diff --git a/.core_files.yaml b/.core_files.yaml index 6fd3a74df92..067a6a2b41d 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -14,7 +14,6 @@ core: &core base_platforms: &base_platforms - homeassistant/components/air_quality/** - homeassistant/components/alarm_control_panel/** - - homeassistant/components/assist_satellite/** - homeassistant/components/binary_sensor/** - homeassistant/components/button/** - homeassistant/components/calendar/** @@ -50,7 +49,6 @@ base_platforms: &base_platforms - homeassistant/components/tts/** - homeassistant/components/update/** - homeassistant/components/vacuum/** - - homeassistant/components/valve/** - homeassistant/components/water_heater/** - homeassistant/components/weather/** @@ -62,7 +60,6 @@ components: &components - homeassistant/components/auth/** - homeassistant/components/automation/** - homeassistant/components/backup/** - - homeassistant/components/blueprint/** - homeassistant/components/bluetooth/** - homeassistant/components/cloud/** - homeassistant/components/config/** @@ -79,7 +76,6 @@ components: &components - homeassistant/components/group/** - homeassistant/components/hassio/** - homeassistant/components/homeassistant/** - - homeassistant/components/homeassistant_hardware/** - homeassistant/components/http/** - homeassistant/components/image/** - homeassistant/components/input_boolean/** @@ -112,7 +108,6 @@ components: &components - homeassistant/components/tag/** - homeassistant/components/template/** - homeassistant/components/timer/** - - homeassistant/components/trace/** - homeassistant/components/usb/** - homeassistant/components/webhook/** - homeassistant/components/websocket_api/** @@ -128,12 +123,9 @@ tests: &tests - tests/*.py - tests/auth/** - tests/backports/** - - tests/components/conftest.py - - tests/components/diagnostics/** - tests/components/history/** - tests/components/logbook/** - tests/components/recorder/** - - tests/components/repairs/** - tests/components/sensor/** - tests/hassfest/** - tests/helpers/** @@ -153,7 +145,6 @@ requirements: &requirements - homeassistant/package_constraints.txt - requirements*.txt - pyproject.toml - - script/licenses.py any: - *base_platforms diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000000..2bc76723445 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,1735 @@ +# Sorted by hassfest. +# +# To sort, run python3 -m script.hassfest -p coverage + +[run] +source = homeassistant +omit = + homeassistant/__main__.py + homeassistant/helpers/backports/aiohttp_resolver.py + homeassistant/helpers/signal.py + homeassistant/scripts/__init__.py + homeassistant/scripts/benchmark/__init__.py + homeassistant/scripts/check_config.py + homeassistant/scripts/ensure_config.py + homeassistant/scripts/macos/__init__.py + + # omit pieces of code that rely on external devices being present + homeassistant/components/acer_projector/* + homeassistant/components/acmeda/__init__.py + homeassistant/components/acmeda/base.py + homeassistant/components/acmeda/cover.py + homeassistant/components/acmeda/errors.py + homeassistant/components/acmeda/helpers.py + homeassistant/components/acmeda/hub.py + homeassistant/components/acmeda/sensor.py + homeassistant/components/actiontec/const.py + homeassistant/components/actiontec/device_tracker.py + homeassistant/components/actiontec/model.py + homeassistant/components/adax/__init__.py + homeassistant/components/adax/climate.py + homeassistant/components/adguard/__init__.py + homeassistant/components/adguard/entity.py + homeassistant/components/adguard/sensor.py + homeassistant/components/adguard/switch.py + homeassistant/components/ads/* + homeassistant/components/aftership/__init__.py + homeassistant/components/aftership/sensor.py + homeassistant/components/agent_dvr/alarm_control_panel.py + homeassistant/components/agent_dvr/camera.py + homeassistant/components/agent_dvr/helpers.py + homeassistant/components/airnow/__init__.py + homeassistant/components/airnow/coordinator.py + homeassistant/components/airnow/sensor.py + homeassistant/components/airq/__init__.py + homeassistant/components/airq/coordinator.py + homeassistant/components/airq/sensor.py + homeassistant/components/airthings/__init__.py + homeassistant/components/airthings/sensor.py + homeassistant/components/airthings_ble/__init__.py + homeassistant/components/airthings_ble/sensor.py + homeassistant/components/airtouch4/__init__.py + homeassistant/components/airtouch4/climate.py + homeassistant/components/airtouch4/coordinator.py + homeassistant/components/airtouch5/__init__.py + homeassistant/components/airtouch5/climate.py + homeassistant/components/airtouch5/entity.py + homeassistant/components/airvisual/__init__.py + homeassistant/components/airvisual/sensor.py + homeassistant/components/airvisual_pro/__init__.py + homeassistant/components/airvisual_pro/sensor.py + homeassistant/components/aladdin_connect/__init__.py + homeassistant/components/aladdin_connect/api.py + homeassistant/components/aladdin_connect/application_credentials.py + homeassistant/components/aladdin_connect/cover.py + homeassistant/components/aladdin_connect/sensor.py + homeassistant/components/alarmdecoder/__init__.py + homeassistant/components/alarmdecoder/alarm_control_panel.py + homeassistant/components/alarmdecoder/binary_sensor.py + homeassistant/components/alarmdecoder/entity.py + homeassistant/components/alarmdecoder/sensor.py + homeassistant/components/alpha_vantage/sensor.py + homeassistant/components/amazon_polly/* + homeassistant/components/ambient_station/__init__.py + homeassistant/components/ambient_station/binary_sensor.py + homeassistant/components/ambient_station/entity.py + homeassistant/components/ambient_station/sensor.py + homeassistant/components/amcrest/* + homeassistant/components/ampio/* + homeassistant/components/android_ip_webcam/switch.py + homeassistant/components/anel_pwrctrl/switch.py + homeassistant/components/anthemav/media_player.py + homeassistant/components/apple_tv/__init__.py + homeassistant/components/apple_tv/browse_media.py + homeassistant/components/apple_tv/media_player.py + homeassistant/components/apple_tv/remote.py + homeassistant/components/aprilaire/__init__.py + homeassistant/components/aprilaire/climate.py + homeassistant/components/aprilaire/coordinator.py + homeassistant/components/aprilaire/entity.py + homeassistant/components/aprilaire/select.py + homeassistant/components/aprilaire/sensor.py + homeassistant/components/apsystems/__init__.py + homeassistant/components/apsystems/coordinator.py + homeassistant/components/apsystems/entity.py + homeassistant/components/apsystems/number.py + homeassistant/components/apsystems/sensor.py + homeassistant/components/aqualogic/* + homeassistant/components/aquostv/media_player.py + homeassistant/components/arcam_fmj/__init__.py + homeassistant/components/arcam_fmj/media_player.py + homeassistant/components/arest/binary_sensor.py + homeassistant/components/arest/sensor.py + homeassistant/components/arest/switch.py + homeassistant/components/arris_tg2492lg/* + homeassistant/components/aruba/device_tracker.py + homeassistant/components/arwn/sensor.py + homeassistant/components/aseko_pool_live/__init__.py + homeassistant/components/aseko_pool_live/binary_sensor.py + homeassistant/components/aseko_pool_live/coordinator.py + homeassistant/components/aseko_pool_live/entity.py + homeassistant/components/aseko_pool_live/sensor.py + homeassistant/components/asterisk_cdr/mailbox.py + homeassistant/components/asterisk_mbox/mailbox.py + homeassistant/components/aten_pe/* + homeassistant/components/atome/* + homeassistant/components/aurora/__init__.py + homeassistant/components/aurora/binary_sensor.py + homeassistant/components/aurora/coordinator.py + homeassistant/components/aurora/entity.py + homeassistant/components/aurora/sensor.py + homeassistant/components/avea/light.py + homeassistant/components/avion/light.py + homeassistant/components/awair/coordinator.py + homeassistant/components/azure_service_bus/* + homeassistant/components/baf/__init__.py + homeassistant/components/baf/binary_sensor.py + homeassistant/components/baf/climate.py + homeassistant/components/baf/entity.py + homeassistant/components/baf/fan.py + homeassistant/components/baf/light.py + homeassistant/components/baf/number.py + homeassistant/components/baf/sensor.py + homeassistant/components/baf/switch.py + homeassistant/components/baidu/tts.py + homeassistant/components/bang_olufsen/entity.py + homeassistant/components/bang_olufsen/media_player.py + homeassistant/components/bang_olufsen/util.py + homeassistant/components/bang_olufsen/websocket.py + homeassistant/components/bbox/device_tracker.py + homeassistant/components/bbox/sensor.py + homeassistant/components/beewi_smartclim/sensor.py + homeassistant/components/bitcoin/sensor.py + homeassistant/components/bizkaibus/sensor.py + homeassistant/components/blink/__init__.py + homeassistant/components/blink/alarm_control_panel.py + homeassistant/components/blink/binary_sensor.py + homeassistant/components/blink/camera.py + homeassistant/components/blink/sensor.py + homeassistant/components/blink/switch.py + homeassistant/components/blinksticklight/light.py + homeassistant/components/blockchain/sensor.py + homeassistant/components/bloomsky/* + homeassistant/components/bluesound/* + homeassistant/components/bluetooth_tracker/* + homeassistant/components/bmw_connected_drive/notify.py + homeassistant/components/bosch_shc/__init__.py + homeassistant/components/bosch_shc/binary_sensor.py + homeassistant/components/bosch_shc/cover.py + homeassistant/components/bosch_shc/entity.py + homeassistant/components/bosch_shc/sensor.py + homeassistant/components/bosch_shc/switch.py + homeassistant/components/braviatv/button.py + homeassistant/components/braviatv/coordinator.py + homeassistant/components/braviatv/media_player.py + homeassistant/components/braviatv/remote.py + homeassistant/components/bring/coordinator.py + homeassistant/components/bring/todo.py + homeassistant/components/broadlink/climate.py + homeassistant/components/broadlink/light.py + homeassistant/components/broadlink/remote.py + homeassistant/components/broadlink/switch.py + homeassistant/components/broadlink/updater.py + homeassistant/components/brottsplatskartan/sensor.py + homeassistant/components/browser/* + homeassistant/components/brunt/__init__.py + homeassistant/components/brunt/cover.py + homeassistant/components/bsblan/climate.py + homeassistant/components/bt_home_hub_5/device_tracker.py + homeassistant/components/bt_smarthub/device_tracker.py + homeassistant/components/buienradar/sensor.py + homeassistant/components/buienradar/util.py + homeassistant/components/buienradar/weather.py + homeassistant/components/canary/camera.py + homeassistant/components/cert_expiry/helper.py + homeassistant/components/channels/* + homeassistant/components/cisco_ios/device_tracker.py + homeassistant/components/cisco_mobility_express/device_tracker.py + homeassistant/components/cisco_webex_teams/notify.py + homeassistant/components/citybikes/sensor.py + homeassistant/components/clementine/media_player.py + homeassistant/components/clickatell/notify.py + homeassistant/components/clicksend/notify.py + homeassistant/components/clicksend_tts/notify.py + homeassistant/components/cmus/media_player.py + homeassistant/components/coinbase/sensor.py + homeassistant/components/comed_hourly_pricing/sensor.py + homeassistant/components/comelit/__init__.py + homeassistant/components/comelit/alarm_control_panel.py + homeassistant/components/comelit/climate.py + homeassistant/components/comelit/coordinator.py + homeassistant/components/comelit/cover.py + homeassistant/components/comelit/humidifier.py + homeassistant/components/comelit/light.py + homeassistant/components/comelit/sensor.py + homeassistant/components/comelit/switch.py + homeassistant/components/comfoconnect/fan.py + homeassistant/components/concord232/alarm_control_panel.py + homeassistant/components/concord232/binary_sensor.py + homeassistant/components/control4/__init__.py + homeassistant/components/control4/director_utils.py + homeassistant/components/control4/light.py + homeassistant/components/control4/media_player.py + homeassistant/components/coolmaster/coordinator.py + homeassistant/components/cppm_tracker/device_tracker.py + homeassistant/components/crownstone/__init__.py + homeassistant/components/crownstone/devices.py + homeassistant/components/crownstone/entry_manager.py + homeassistant/components/crownstone/helpers.py + homeassistant/components/crownstone/light.py + homeassistant/components/crownstone/listeners.py + homeassistant/components/cups/sensor.py + homeassistant/components/currencylayer/sensor.py + homeassistant/components/daikin/climate.py + homeassistant/components/daikin/sensor.py + homeassistant/components/daikin/switch.py + homeassistant/components/danfoss_air/* + homeassistant/components/ddwrt/device_tracker.py + homeassistant/components/decora/light.py + homeassistant/components/decora_wifi/light.py + homeassistant/components/delijn/* + homeassistant/components/deluge/__init__.py + homeassistant/components/deluge/coordinator.py + homeassistant/components/deluge/sensor.py + homeassistant/components/deluge/switch.py + homeassistant/components/denon/media_player.py + homeassistant/components/denonavr/__init__.py + homeassistant/components/denonavr/media_player.py + homeassistant/components/denonavr/receiver.py + homeassistant/components/digital_ocean/* + homeassistant/components/discogs/sensor.py + homeassistant/components/discord/__init__.py + homeassistant/components/discord/notify.py + homeassistant/components/dlib_face_detect/image_processing.py + homeassistant/components/dlib_face_identify/image_processing.py + homeassistant/components/dlink/data.py + homeassistant/components/dominos/* + homeassistant/components/doods/* + homeassistant/components/doorbird/__init__.py + homeassistant/components/doorbird/button.py + homeassistant/components/doorbird/camera.py + homeassistant/components/doorbird/device.py + homeassistant/components/doorbird/entity.py + homeassistant/components/doorbird/util.py + homeassistant/components/doorbird/view.py + homeassistant/components/dormakaba_dkey/__init__.py + homeassistant/components/dormakaba_dkey/binary_sensor.py + homeassistant/components/dormakaba_dkey/entity.py + homeassistant/components/dormakaba_dkey/lock.py + homeassistant/components/dormakaba_dkey/sensor.py + homeassistant/components/dovado/* + homeassistant/components/downloader/__init__.py + homeassistant/components/dte_energy_bridge/sensor.py + homeassistant/components/dublin_bus_transport/sensor.py + homeassistant/components/dunehd/__init__.py + homeassistant/components/dunehd/media_player.py + homeassistant/components/duotecno/__init__.py + homeassistant/components/duotecno/binary_sensor.py + homeassistant/components/duotecno/climate.py + homeassistant/components/duotecno/cover.py + homeassistant/components/duotecno/entity.py + homeassistant/components/duotecno/light.py + homeassistant/components/duotecno/switch.py + homeassistant/components/dwd_weather_warnings/coordinator.py + homeassistant/components/dwd_weather_warnings/sensor.py + homeassistant/components/dweet/* + homeassistant/components/ebox/sensor.py + homeassistant/components/ebusd/* + homeassistant/components/ecoal_boiler/* + homeassistant/components/ecobee/__init__.py + homeassistant/components/ecobee/binary_sensor.py + homeassistant/components/ecobee/climate.py + homeassistant/components/ecobee/notify.py + homeassistant/components/ecobee/sensor.py + homeassistant/components/ecobee/weather.py + homeassistant/components/ecoforest/__init__.py + homeassistant/components/ecoforest/coordinator.py + homeassistant/components/ecoforest/entity.py + homeassistant/components/ecoforest/number.py + homeassistant/components/ecoforest/sensor.py + homeassistant/components/ecoforest/switch.py + homeassistant/components/econet/__init__.py + homeassistant/components/econet/binary_sensor.py + homeassistant/components/econet/climate.py + homeassistant/components/econet/sensor.py + homeassistant/components/econet/water_heater.py + homeassistant/components/ecovacs/controller.py + homeassistant/components/ecovacs/entity.py + homeassistant/components/ecovacs/image.py + homeassistant/components/ecovacs/number.py + homeassistant/components/ecovacs/util.py + homeassistant/components/ecovacs/vacuum.py + homeassistant/components/ecowitt/__init__.py + homeassistant/components/ecowitt/binary_sensor.py + homeassistant/components/ecowitt/entity.py + homeassistant/components/ecowitt/sensor.py + homeassistant/components/eddystone_temperature/sensor.py + homeassistant/components/edimax/switch.py + homeassistant/components/edl21/__init__.py + homeassistant/components/edl21/sensor.py + homeassistant/components/egardia/* + homeassistant/components/electrasmart/__init__.py + homeassistant/components/electrasmart/climate.py + homeassistant/components/electric_kiwi/__init__.py + homeassistant/components/electric_kiwi/api.py + homeassistant/components/electric_kiwi/coordinator.py + homeassistant/components/electric_kiwi/oauth2.py + homeassistant/components/electric_kiwi/select.py + homeassistant/components/eliqonline/sensor.py + homeassistant/components/elkm1/__init__.py + homeassistant/components/elkm1/alarm_control_panel.py + homeassistant/components/elkm1/binary_sensor.py + homeassistant/components/elkm1/climate.py + homeassistant/components/elkm1/light.py + homeassistant/components/elkm1/sensor.py + homeassistant/components/elkm1/switch.py + homeassistant/components/elmax/__init__.py + homeassistant/components/elmax/alarm_control_panel.py + homeassistant/components/elmax/binary_sensor.py + homeassistant/components/elmax/coordinator.py + homeassistant/components/elmax/cover.py + homeassistant/components/elmax/switch.py + homeassistant/components/elv/* + homeassistant/components/elvia/__init__.py + homeassistant/components/elvia/importer.py + homeassistant/components/emby/media_player.py + homeassistant/components/emoncms/sensor.py + homeassistant/components/emoncms_history/* + homeassistant/components/emonitor/__init__.py + homeassistant/components/emonitor/sensor.py + homeassistant/components/enigma2/media_player.py + homeassistant/components/enocean/__init__.py + homeassistant/components/enocean/binary_sensor.py + homeassistant/components/enocean/device.py + homeassistant/components/enocean/dongle.py + homeassistant/components/enocean/light.py + homeassistant/components/enocean/sensor.py + homeassistant/components/enocean/switch.py + homeassistant/components/enphase_envoy/__init__.py + homeassistant/components/enphase_envoy/binary_sensor.py + homeassistant/components/enphase_envoy/coordinator.py + homeassistant/components/enphase_envoy/entity.py + homeassistant/components/enphase_envoy/number.py + homeassistant/components/enphase_envoy/select.py + homeassistant/components/enphase_envoy/sensor.py + homeassistant/components/enphase_envoy/switch.py + homeassistant/components/entur_public_transport/* + homeassistant/components/environment_canada/__init__.py + homeassistant/components/environment_canada/camera.py + homeassistant/components/environment_canada/sensor.py + homeassistant/components/environment_canada/weather.py + homeassistant/components/envisalink/* + homeassistant/components/ephember/climate.py + homeassistant/components/epic_games_store/__init__.py + homeassistant/components/epic_games_store/coordinator.py + homeassistant/components/epion/__init__.py + homeassistant/components/epion/coordinator.py + homeassistant/components/epion/sensor.py + homeassistant/components/epson/__init__.py + homeassistant/components/epson/media_player.py + homeassistant/components/eq3btsmart/__init__.py + homeassistant/components/eq3btsmart/climate.py + homeassistant/components/eq3btsmart/entity.py + homeassistant/components/escea/__init__.py + homeassistant/components/escea/climate.py + homeassistant/components/escea/discovery.py + homeassistant/components/etherscan/sensor.py + homeassistant/components/eufy/* + homeassistant/components/eufylife_ble/__init__.py + homeassistant/components/eufylife_ble/sensor.py + homeassistant/components/everlights/light.py + homeassistant/components/evohome/* + homeassistant/components/ezviz/__init__.py + homeassistant/components/ezviz/alarm_control_panel.py + homeassistant/components/ezviz/binary_sensor.py + homeassistant/components/ezviz/button.py + homeassistant/components/ezviz/camera.py + homeassistant/components/ezviz/coordinator.py + homeassistant/components/ezviz/entity.py + homeassistant/components/ezviz/image.py + homeassistant/components/ezviz/light.py + homeassistant/components/ezviz/number.py + homeassistant/components/ezviz/select.py + homeassistant/components/ezviz/sensor.py + homeassistant/components/ezviz/siren.py + homeassistant/components/ezviz/switch.py + homeassistant/components/ezviz/update.py + homeassistant/components/faa_delays/__init__.py + homeassistant/components/faa_delays/binary_sensor.py + homeassistant/components/faa_delays/coordinator.py + homeassistant/components/familyhub/camera.py + homeassistant/components/ffmpeg/camera.py + homeassistant/components/fibaro/__init__.py + homeassistant/components/fibaro/binary_sensor.py + homeassistant/components/fibaro/climate.py + homeassistant/components/fibaro/cover.py + homeassistant/components/fibaro/event.py + homeassistant/components/fibaro/light.py + homeassistant/components/fibaro/lock.py + homeassistant/components/fibaro/sensor.py + homeassistant/components/fibaro/switch.py + homeassistant/components/fints/sensor.py + homeassistant/components/fireservicerota/__init__.py + homeassistant/components/fireservicerota/binary_sensor.py + homeassistant/components/fireservicerota/sensor.py + homeassistant/components/fireservicerota/switch.py + homeassistant/components/firmata/__init__.py + homeassistant/components/firmata/binary_sensor.py + homeassistant/components/firmata/board.py + homeassistant/components/firmata/entity.py + homeassistant/components/firmata/light.py + homeassistant/components/firmata/pin.py + homeassistant/components/firmata/sensor.py + homeassistant/components/firmata/switch.py + homeassistant/components/fivem/__init__.py + homeassistant/components/fivem/binary_sensor.py + homeassistant/components/fivem/coordinator.py + homeassistant/components/fivem/entity.py + homeassistant/components/fivem/sensor.py + homeassistant/components/fixer/sensor.py + homeassistant/components/fjaraskupan/__init__.py + homeassistant/components/fjaraskupan/binary_sensor.py + homeassistant/components/fjaraskupan/coordinator.py + homeassistant/components/fjaraskupan/fan.py + homeassistant/components/fjaraskupan/light.py + homeassistant/components/fjaraskupan/number.py + homeassistant/components/fjaraskupan/sensor.py + homeassistant/components/fleetgo/device_tracker.py + homeassistant/components/flexit/climate.py + homeassistant/components/flexit_bacnet/climate.py + homeassistant/components/flic/binary_sensor.py + homeassistant/components/flick_electric/__init__.py + homeassistant/components/flick_electric/sensor.py + homeassistant/components/flock/notify.py + homeassistant/components/flume/__init__.py + homeassistant/components/flume/binary_sensor.py + homeassistant/components/flume/coordinator.py + homeassistant/components/flume/entity.py + homeassistant/components/flume/sensor.py + homeassistant/components/flume/util.py + homeassistant/components/folder_watcher/__init__.py + homeassistant/components/foobot/sensor.py + homeassistant/components/fortios/device_tracker.py + homeassistant/components/foscam/__init__.py + homeassistant/components/foscam/camera.py + homeassistant/components/foscam/coordinator.py + homeassistant/components/foscam/entity.py + homeassistant/components/foursquare/* + homeassistant/components/free_mobile/notify.py + homeassistant/components/freebox/camera.py + homeassistant/components/freebox/home_base.py + homeassistant/components/freebox/switch.py + homeassistant/components/fritz/coordinator.py + homeassistant/components/fritz/entity.py + homeassistant/components/fritz/services.py + homeassistant/components/fritz/switch.py + homeassistant/components/fritzbox_callmonitor/__init__.py + homeassistant/components/fritzbox_callmonitor/base.py + homeassistant/components/fritzbox_callmonitor/sensor.py + homeassistant/components/frontier_silicon/__init__.py + homeassistant/components/frontier_silicon/browse_media.py + homeassistant/components/frontier_silicon/media_player.py + homeassistant/components/futurenow/light.py + homeassistant/components/garadget/cover.py + homeassistant/components/garages_amsterdam/__init__.py + homeassistant/components/garages_amsterdam/binary_sensor.py + homeassistant/components/garages_amsterdam/entity.py + homeassistant/components/garages_amsterdam/sensor.py + homeassistant/components/gc100/* + homeassistant/components/geniushub/* + homeassistant/components/geocaching/__init__.py + homeassistant/components/geocaching/coordinator.py + homeassistant/components/geocaching/oauth.py + homeassistant/components/geocaching/sensor.py + homeassistant/components/github/coordinator.py + homeassistant/components/gitlab_ci/sensor.py + homeassistant/components/gitter/sensor.py + homeassistant/components/glances/sensor.py + homeassistant/components/goodwe/__init__.py + homeassistant/components/goodwe/button.py + homeassistant/components/goodwe/coordinator.py + homeassistant/components/goodwe/number.py + homeassistant/components/goodwe/select.py + homeassistant/components/goodwe/sensor.py + homeassistant/components/google_cloud/tts.py + homeassistant/components/google_maps/device_tracker.py + homeassistant/components/google_pubsub/__init__.py + homeassistant/components/gpsd/__init__.py + homeassistant/components/gpsd/sensor.py + homeassistant/components/greenwave/light.py + homeassistant/components/growatt_server/__init__.py + homeassistant/components/growatt_server/sensor.py + homeassistant/components/growatt_server/sensor_types/* + homeassistant/components/gstreamer/media_player.py + homeassistant/components/gtfs/sensor.py + homeassistant/components/guardian/__init__.py + homeassistant/components/guardian/binary_sensor.py + homeassistant/components/guardian/button.py + homeassistant/components/guardian/coordinator.py + homeassistant/components/guardian/sensor.py + homeassistant/components/guardian/switch.py + homeassistant/components/guardian/util.py + homeassistant/components/guardian/valve.py + homeassistant/components/habitica/__init__.py + homeassistant/components/habitica/coordinator.py + homeassistant/components/habitica/sensor.py + homeassistant/components/harman_kardon_avr/media_player.py + homeassistant/components/harmony/data.py + homeassistant/components/harmony/remote.py + homeassistant/components/harmony/util.py + homeassistant/components/haveibeenpwned/sensor.py + homeassistant/components/heatmiser/climate.py + homeassistant/components/hikvision/binary_sensor.py + homeassistant/components/hikvisioncam/switch.py + homeassistant/components/hisense_aehw4a1/__init__.py + homeassistant/components/hisense_aehw4a1/climate.py + homeassistant/components/hitron_coda/device_tracker.py + homeassistant/components/hive/__init__.py + homeassistant/components/hive/alarm_control_panel.py + homeassistant/components/hive/binary_sensor.py + homeassistant/components/hive/climate.py + homeassistant/components/hive/light.py + homeassistant/components/hive/sensor.py + homeassistant/components/hive/switch.py + homeassistant/components/hive/water_heater.py + homeassistant/components/hko/__init__.py + homeassistant/components/hko/coordinator.py + homeassistant/components/hko/weather.py + homeassistant/components/hlk_sw16/__init__.py + homeassistant/components/hlk_sw16/switch.py + homeassistant/components/home_connect/entity.py + homeassistant/components/home_connect/light.py + homeassistant/components/home_connect/switch.py + homeassistant/components/homematic/__init__.py + homeassistant/components/homematic/binary_sensor.py + homeassistant/components/homematic/climate.py + homeassistant/components/homematic/cover.py + homeassistant/components/homematic/entity.py + homeassistant/components/homematic/light.py + homeassistant/components/homematic/lock.py + homeassistant/components/homematic/notify.py + homeassistant/components/homematic/sensor.py + homeassistant/components/homematic/switch.py + homeassistant/components/horizon/media_player.py + homeassistant/components/hp_ilo/sensor.py + homeassistant/components/huawei_lte/__init__.py + homeassistant/components/huawei_lte/binary_sensor.py + homeassistant/components/huawei_lte/device_tracker.py + homeassistant/components/huawei_lte/notify.py + homeassistant/components/huawei_lte/sensor.py + homeassistant/components/huawei_lte/switch.py + homeassistant/components/hunterdouglas_powerview/__init__.py + homeassistant/components/hunterdouglas_powerview/button.py + homeassistant/components/hunterdouglas_powerview/coordinator.py + homeassistant/components/hunterdouglas_powerview/cover.py + homeassistant/components/hunterdouglas_powerview/entity.py + homeassistant/components/hunterdouglas_powerview/number.py + homeassistant/components/hunterdouglas_powerview/select.py + homeassistant/components/hunterdouglas_powerview/sensor.py + homeassistant/components/hunterdouglas_powerview/shade_data.py + homeassistant/components/hunterdouglas_powerview/util.py + homeassistant/components/huum/__init__.py + homeassistant/components/huum/climate.py + homeassistant/components/hvv_departures/__init__.py + homeassistant/components/hvv_departures/binary_sensor.py + homeassistant/components/hvv_departures/sensor.py + homeassistant/components/ialarm/alarm_control_panel.py + homeassistant/components/iammeter/const.py + homeassistant/components/iammeter/sensor.py + homeassistant/components/iaqualink/binary_sensor.py + homeassistant/components/iaqualink/climate.py + homeassistant/components/iaqualink/light.py + homeassistant/components/iaqualink/sensor.py + homeassistant/components/iaqualink/switch.py + homeassistant/components/icloud/__init__.py + homeassistant/components/icloud/account.py + homeassistant/components/icloud/device_tracker.py + homeassistant/components/icloud/sensor.py + homeassistant/components/idteck_prox/* + homeassistant/components/ifttt/__init__.py + homeassistant/components/ifttt/alarm_control_panel.py + homeassistant/components/iglo/light.py + homeassistant/components/ihc/* + homeassistant/components/incomfort/__init__.py + homeassistant/components/incomfort/climate.py + homeassistant/components/incomfort/water_heater.py + homeassistant/components/insteon/binary_sensor.py + homeassistant/components/insteon/climate.py + homeassistant/components/insteon/cover.py + homeassistant/components/insteon/fan.py + homeassistant/components/insteon/insteon_entity.py + homeassistant/components/insteon/light.py + homeassistant/components/insteon/schemas.py + homeassistant/components/insteon/switch.py + homeassistant/components/insteon/utils.py + homeassistant/components/intellifire/__init__.py + homeassistant/components/intellifire/binary_sensor.py + homeassistant/components/intellifire/climate.py + homeassistant/components/intellifire/coordinator.py + homeassistant/components/intellifire/entity.py + homeassistant/components/intellifire/fan.py + homeassistant/components/intellifire/light.py + homeassistant/components/intellifire/number.py + homeassistant/components/intellifire/sensor.py + homeassistant/components/intellifire/switch.py + homeassistant/components/intesishome/* + homeassistant/components/ios/__init__.py + homeassistant/components/ios/notify.py + homeassistant/components/ios/sensor.py + homeassistant/components/iperf3/* + homeassistant/components/iqvia/__init__.py + homeassistant/components/iqvia/sensor.py + homeassistant/components/irish_rail_transport/sensor.py + homeassistant/components/iss/__init__.py + homeassistant/components/iss/sensor.py + homeassistant/components/ista_ecotrend/coordinator.py + homeassistant/components/isy994/__init__.py + homeassistant/components/isy994/binary_sensor.py + homeassistant/components/isy994/button.py + homeassistant/components/isy994/climate.py + homeassistant/components/isy994/cover.py + homeassistant/components/isy994/entity.py + homeassistant/components/isy994/fan.py + homeassistant/components/isy994/helpers.py + homeassistant/components/isy994/light.py + homeassistant/components/isy994/lock.py + homeassistant/components/isy994/models.py + homeassistant/components/isy994/number.py + homeassistant/components/isy994/select.py + homeassistant/components/isy994/sensor.py + homeassistant/components/isy994/services.py + homeassistant/components/isy994/switch.py + homeassistant/components/isy994/util.py + homeassistant/components/itach/remote.py + homeassistant/components/itunes/media_player.py + homeassistant/components/izone/__init__.py + homeassistant/components/izone/climate.py + homeassistant/components/izone/discovery.py + homeassistant/components/joaoapps_join/* + homeassistant/components/juicenet/__init__.py + homeassistant/components/juicenet/device.py + homeassistant/components/juicenet/entity.py + homeassistant/components/juicenet/number.py + homeassistant/components/juicenet/sensor.py + homeassistant/components/juicenet/switch.py + homeassistant/components/justnimbus/coordinator.py + homeassistant/components/justnimbus/entity.py + homeassistant/components/justnimbus/sensor.py + homeassistant/components/kaiterra/* + homeassistant/components/kankun/switch.py + homeassistant/components/keba/* + homeassistant/components/keenetic_ndms2/__init__.py + homeassistant/components/keenetic_ndms2/binary_sensor.py + homeassistant/components/keenetic_ndms2/device_tracker.py + homeassistant/components/keenetic_ndms2/router.py + homeassistant/components/kef/* + homeassistant/components/keyboard/* + homeassistant/components/keyboard_remote/* + homeassistant/components/keymitt_ble/__init__.py + homeassistant/components/keymitt_ble/coordinator.py + homeassistant/components/keymitt_ble/entity.py + homeassistant/components/keymitt_ble/switch.py + homeassistant/components/kitchen_sink/weather.py + homeassistant/components/kiwi/lock.py + homeassistant/components/kodi/__init__.py + homeassistant/components/kodi/browse_media.py + homeassistant/components/kodi/media_player.py + homeassistant/components/kodi/notify.py + homeassistant/components/konnected/__init__.py + homeassistant/components/konnected/panel.py + homeassistant/components/konnected/switch.py + homeassistant/components/kostal_plenticore/__init__.py + homeassistant/components/kostal_plenticore/coordinator.py + homeassistant/components/kostal_plenticore/helper.py + homeassistant/components/kostal_plenticore/select.py + homeassistant/components/kostal_plenticore/sensor.py + homeassistant/components/kostal_plenticore/switch.py + homeassistant/components/kwb/sensor.py + homeassistant/components/lacrosse/sensor.py + homeassistant/components/lannouncer/notify.py + homeassistant/components/launch_library/__init__.py + homeassistant/components/launch_library/sensor.py + homeassistant/components/lcn/climate.py + homeassistant/components/lcn/helpers.py + homeassistant/components/lcn/services.py + homeassistant/components/ld2410_ble/__init__.py + homeassistant/components/ld2410_ble/binary_sensor.py + homeassistant/components/ld2410_ble/coordinator.py + homeassistant/components/ld2410_ble/sensor.py + homeassistant/components/led_ble/__init__.py + homeassistant/components/led_ble/light.py + homeassistant/components/lg_netcast/media_player.py + homeassistant/components/lg_soundbar/__init__.py + homeassistant/components/lg_soundbar/media_player.py + homeassistant/components/lightwave/* + homeassistant/components/limitlessled/light.py + homeassistant/components/linksys_smart/device_tracker.py + homeassistant/components/linode/* + homeassistant/components/linux_battery/sensor.py + homeassistant/components/lirc/* + homeassistant/components/livisi/__init__.py + homeassistant/components/livisi/binary_sensor.py + homeassistant/components/livisi/climate.py + homeassistant/components/livisi/coordinator.py + homeassistant/components/livisi/entity.py + homeassistant/components/livisi/switch.py + homeassistant/components/llamalab_automate/notify.py + homeassistant/components/logi_circle/__init__.py + homeassistant/components/logi_circle/camera.py + homeassistant/components/logi_circle/sensor.py + homeassistant/components/london_underground/sensor.py + homeassistant/components/lookin/__init__.py + homeassistant/components/lookin/climate.py + homeassistant/components/lookin/coordinator.py + homeassistant/components/lookin/entity.py + homeassistant/components/lookin/light.py + homeassistant/components/lookin/media_player.py + homeassistant/components/lookin/sensor.py + homeassistant/components/loqed/sensor.py + homeassistant/components/luci/device_tracker.py + homeassistant/components/lupusec/__init__.py + homeassistant/components/lupusec/alarm_control_panel.py + homeassistant/components/lupusec/binary_sensor.py + homeassistant/components/lupusec/entity.py + homeassistant/components/lupusec/switch.py + homeassistant/components/lutron/__init__.py + homeassistant/components/lutron/binary_sensor.py + homeassistant/components/lutron/cover.py + homeassistant/components/lutron/entity.py + homeassistant/components/lutron/event.py + homeassistant/components/lutron/fan.py + homeassistant/components/lutron/light.py + homeassistant/components/lutron/switch.py + homeassistant/components/lutron_caseta/__init__.py + homeassistant/components/lutron_caseta/binary_sensor.py + homeassistant/components/lutron_caseta/cover.py + homeassistant/components/lutron_caseta/fan.py + homeassistant/components/lutron_caseta/light.py + homeassistant/components/lutron_caseta/switch.py + homeassistant/components/lw12wifi/light.py + homeassistant/components/lyric/__init__.py + homeassistant/components/lyric/api.py + homeassistant/components/lyric/climate.py + homeassistant/components/lyric/sensor.py + homeassistant/components/mailgun/notify.py + homeassistant/components/mastodon/notify.py + homeassistant/components/matrix/__init__.py + homeassistant/components/matrix/notify.py + homeassistant/components/matter/__init__.py + homeassistant/components/matter/fan.py + homeassistant/components/meater/__init__.py + homeassistant/components/meater/sensor.py + homeassistant/components/medcom_ble/__init__.py + homeassistant/components/medcom_ble/sensor.py + homeassistant/components/mediaroom/media_player.py + homeassistant/components/melcloud/__init__.py + homeassistant/components/melcloud/climate.py + homeassistant/components/melcloud/sensor.py + homeassistant/components/melcloud/water_heater.py + homeassistant/components/melnor/__init__.py + homeassistant/components/message_bird/notify.py + homeassistant/components/met/weather.py + homeassistant/components/met_eireann/__init__.py + homeassistant/components/met_eireann/weather.py + homeassistant/components/meteo_france/__init__.py + homeassistant/components/meteo_france/sensor.py + homeassistant/components/meteo_france/weather.py + homeassistant/components/meteoalarm/* + homeassistant/components/meteoclimatic/__init__.py + homeassistant/components/meteoclimatic/sensor.py + homeassistant/components/meteoclimatic/weather.py + homeassistant/components/microbees/__init__.py + homeassistant/components/microbees/api.py + homeassistant/components/microbees/application_credentials.py + homeassistant/components/microbees/binary_sensor.py + homeassistant/components/microbees/button.py + homeassistant/components/microbees/climate.py + homeassistant/components/microbees/coordinator.py + homeassistant/components/microbees/cover.py + homeassistant/components/microbees/entity.py + homeassistant/components/microbees/light.py + homeassistant/components/microbees/sensor.py + homeassistant/components/microbees/switch.py + homeassistant/components/microsoft/tts.py + homeassistant/components/mikrotik/coordinator.py + homeassistant/components/mill/climate.py + homeassistant/components/mill/sensor.py + homeassistant/components/minio/minio_helper.py + homeassistant/components/mjpeg/camera.py + homeassistant/components/mjpeg/util.py + homeassistant/components/mochad/__init__.py + homeassistant/components/mochad/light.py + homeassistant/components/mochad/switch.py + homeassistant/components/modem_callerid/button.py + homeassistant/components/modem_callerid/sensor.py + homeassistant/components/moehlenhoff_alpha2/climate.py + homeassistant/components/moehlenhoff_alpha2/coordinator.py + homeassistant/components/monzo/__init__.py + homeassistant/components/monzo/api.py + homeassistant/components/motion_blinds/__init__.py + homeassistant/components/motion_blinds/coordinator.py + homeassistant/components/motion_blinds/cover.py + homeassistant/components/motion_blinds/entity.py + homeassistant/components/motion_blinds/sensor.py + homeassistant/components/motionblinds_ble/__init__.py + homeassistant/components/motionblinds_ble/button.py + homeassistant/components/motionblinds_ble/cover.py + homeassistant/components/motionblinds_ble/entity.py + homeassistant/components/motionblinds_ble/select.py + homeassistant/components/motionblinds_ble/sensor.py + homeassistant/components/motionmount/__init__.py + homeassistant/components/motionmount/binary_sensor.py + homeassistant/components/motionmount/entity.py + homeassistant/components/motionmount/number.py + homeassistant/components/motionmount/select.py + homeassistant/components/motionmount/sensor.py + homeassistant/components/mpd/media_player.py + homeassistant/components/mqtt_room/sensor.py + homeassistant/components/msteams/notify.py + homeassistant/components/mullvad/__init__.py + homeassistant/components/mullvad/binary_sensor.py + homeassistant/components/mutesync/__init__.py + homeassistant/components/mutesync/binary_sensor.py + homeassistant/components/mvglive/sensor.py + homeassistant/components/mycroft/* + homeassistant/components/mysensors/__init__.py + homeassistant/components/mysensors/climate.py + homeassistant/components/mysensors/cover.py + homeassistant/components/mysensors/gateway.py + homeassistant/components/mysensors/handler.py + homeassistant/components/mysensors/helpers.py + homeassistant/components/mysensors/light.py + homeassistant/components/mysensors/switch.py + homeassistant/components/mystrom/binary_sensor.py + homeassistant/components/mystrom/light.py + homeassistant/components/mystrom/sensor.py + homeassistant/components/mystrom/switch.py + homeassistant/components/myuplink/__init__.py + homeassistant/components/myuplink/api.py + homeassistant/components/myuplink/application_credentials.py + homeassistant/components/myuplink/coordinator.py + homeassistant/components/myuplink/entity.py + homeassistant/components/myuplink/helpers.py + homeassistant/components/myuplink/sensor.py + homeassistant/components/nad/media_player.py + homeassistant/components/nanoleaf/__init__.py + homeassistant/components/nanoleaf/button.py + homeassistant/components/nanoleaf/coordinator.py + homeassistant/components/nanoleaf/entity.py + homeassistant/components/nanoleaf/event.py + homeassistant/components/nanoleaf/light.py + homeassistant/components/neato/__init__.py + homeassistant/components/neato/api.py + homeassistant/components/neato/button.py + homeassistant/components/neato/camera.py + homeassistant/components/neato/entity.py + homeassistant/components/neato/hub.py + homeassistant/components/neato/sensor.py + homeassistant/components/neato/switch.py + homeassistant/components/neato/vacuum.py + homeassistant/components/nederlandse_spoorwegen/sensor.py + homeassistant/components/netdata/sensor.py + homeassistant/components/netgear/__init__.py + homeassistant/components/netgear/button.py + homeassistant/components/netgear/device_tracker.py + homeassistant/components/netgear/entity.py + homeassistant/components/netgear/router.py + homeassistant/components/netgear/sensor.py + homeassistant/components/netgear/switch.py + homeassistant/components/netgear/update.py + homeassistant/components/netgear_lte/__init__.py + homeassistant/components/netgear_lte/notify.py + homeassistant/components/netio/switch.py + homeassistant/components/neurio_energy/sensor.py + homeassistant/components/nexia/climate.py + homeassistant/components/nexia/entity.py + homeassistant/components/nexia/switch.py + homeassistant/components/nextcloud/__init__.py + homeassistant/components/nextcloud/binary_sensor.py + homeassistant/components/nextcloud/coordinator.py + homeassistant/components/nextcloud/entity.py + homeassistant/components/nextcloud/sensor.py + homeassistant/components/nextcloud/update.py + homeassistant/components/nfandroidtv/__init__.py + homeassistant/components/nfandroidtv/notify.py + homeassistant/components/nibe_heatpump/__init__.py + homeassistant/components/nibe_heatpump/binary_sensor.py + homeassistant/components/nibe_heatpump/select.py + homeassistant/components/nibe_heatpump/sensor.py + homeassistant/components/nibe_heatpump/switch.py + homeassistant/components/nibe_heatpump/water_heater.py + homeassistant/components/niko_home_control/light.py + homeassistant/components/nilu/air_quality.py + homeassistant/components/nissan_leaf/* + homeassistant/components/nmap_tracker/__init__.py + homeassistant/components/nmap_tracker/device_tracker.py + homeassistant/components/nmbs/sensor.py + homeassistant/components/noaa_tides/sensor.py + homeassistant/components/nobo_hub/__init__.py + homeassistant/components/nobo_hub/climate.py + homeassistant/components/nobo_hub/select.py + homeassistant/components/nobo_hub/sensor.py + homeassistant/components/norway_air/air_quality.py + homeassistant/components/notify_events/notify.py + homeassistant/components/notion/__init__.py + homeassistant/components/notion/binary_sensor.py + homeassistant/components/notion/coordinator.py + homeassistant/components/notion/sensor.py + homeassistant/components/notion/util.py + homeassistant/components/nsw_fuel_station/sensor.py + homeassistant/components/nuki/__init__.py + homeassistant/components/nuki/coordinator.py + homeassistant/components/nuki/lock.py + homeassistant/components/nx584/alarm_control_panel.py + homeassistant/components/oasa_telematics/sensor.py + homeassistant/components/obihai/__init__.py + homeassistant/components/obihai/button.py + homeassistant/components/obihai/connectivity.py + homeassistant/components/obihai/sensor.py + homeassistant/components/octoprint/__init__.py + homeassistant/components/octoprint/coordinator.py + homeassistant/components/oem/climate.py + homeassistant/components/ohmconnect/sensor.py + homeassistant/components/ombi/* + homeassistant/components/omnilogic/__init__.py + homeassistant/components/omnilogic/coordinator.py + homeassistant/components/omnilogic/sensor.py + homeassistant/components/omnilogic/switch.py + homeassistant/components/ondilo_ico/__init__.py + homeassistant/components/ondilo_ico/api.py + homeassistant/components/ondilo_ico/coordinator.py + homeassistant/components/ondilo_ico/sensor.py + homeassistant/components/onkyo/media_player.py + homeassistant/components/onvif/__init__.py + homeassistant/components/onvif/binary_sensor.py + homeassistant/components/onvif/camera.py + homeassistant/components/onvif/device.py + homeassistant/components/onvif/event.py + homeassistant/components/onvif/parsers.py + homeassistant/components/onvif/sensor.py + homeassistant/components/onvif/util.py + homeassistant/components/open_meteo/weather.py + homeassistant/components/openevse/sensor.py + homeassistant/components/openexchangerates/__init__.py + homeassistant/components/openexchangerates/coordinator.py + homeassistant/components/openexchangerates/sensor.py + homeassistant/components/opengarage/__init__.py + homeassistant/components/opengarage/binary_sensor.py + homeassistant/components/opengarage/cover.py + homeassistant/components/opengarage/entity.py + homeassistant/components/opengarage/sensor.py + homeassistant/components/openhardwaremonitor/sensor.py + homeassistant/components/openhome/__init__.py + homeassistant/components/openhome/media_player.py + homeassistant/components/opensensemap/air_quality.py + homeassistant/components/opentherm_gw/__init__.py + homeassistant/components/opentherm_gw/binary_sensor.py + homeassistant/components/opentherm_gw/climate.py + homeassistant/components/opentherm_gw/sensor.py + homeassistant/components/openuv/__init__.py + homeassistant/components/openuv/binary_sensor.py + homeassistant/components/openuv/coordinator.py + homeassistant/components/openuv/sensor.py + homeassistant/components/openweathermap/__init__.py + homeassistant/components/openweathermap/coordinator.py + homeassistant/components/openweathermap/repairs.py + homeassistant/components/openweathermap/sensor.py + homeassistant/components/openweathermap/weather.py + homeassistant/components/opnsense/__init__.py + homeassistant/components/opnsense/device_tracker.py + homeassistant/components/opower/__init__.py + homeassistant/components/opower/coordinator.py + homeassistant/components/opower/sensor.py + homeassistant/components/opple/light.py + homeassistant/components/oru/* + homeassistant/components/orvibo/switch.py + homeassistant/components/osoenergy/__init__.py + homeassistant/components/osoenergy/binary_sensor.py + homeassistant/components/osoenergy/entity.py + homeassistant/components/osoenergy/sensor.py + homeassistant/components/osoenergy/water_heater.py + homeassistant/components/osramlightify/light.py + homeassistant/components/otp/sensor.py + homeassistant/components/overkiz/__init__.py + homeassistant/components/overkiz/alarm_control_panel.py + homeassistant/components/overkiz/binary_sensor.py + homeassistant/components/overkiz/button.py + homeassistant/components/overkiz/climate.py + homeassistant/components/overkiz/climate_entities/* + homeassistant/components/overkiz/coordinator.py + homeassistant/components/overkiz/cover.py + homeassistant/components/overkiz/cover_entities/* + homeassistant/components/overkiz/entity.py + homeassistant/components/overkiz/executor.py + homeassistant/components/overkiz/light.py + homeassistant/components/overkiz/lock.py + homeassistant/components/overkiz/number.py + homeassistant/components/overkiz/select.py + homeassistant/components/overkiz/sensor.py + homeassistant/components/overkiz/siren.py + homeassistant/components/overkiz/switch.py + homeassistant/components/overkiz/water_heater.py + homeassistant/components/overkiz/water_heater_entities/* + homeassistant/components/ovo_energy/__init__.py + homeassistant/components/ovo_energy/sensor.py + homeassistant/components/panasonic_bluray/media_player.py + homeassistant/components/panasonic_viera/media_player.py + homeassistant/components/pandora/media_player.py + homeassistant/components/pencom/switch.py + homeassistant/components/permobil/__init__.py + homeassistant/components/permobil/binary_sensor.py + homeassistant/components/permobil/coordinator.py + homeassistant/components/permobil/entity.py + homeassistant/components/permobil/sensor.py + homeassistant/components/philips_js/__init__.py + homeassistant/components/philips_js/coordinator.py + homeassistant/components/philips_js/light.py + homeassistant/components/philips_js/media_player.py + homeassistant/components/philips_js/remote.py + homeassistant/components/philips_js/switch.py + homeassistant/components/pi_hole/sensor.py + homeassistant/components/picotts/tts.py + homeassistant/components/pilight/base_class.py + homeassistant/components/pilight/binary_sensor.py + homeassistant/components/pilight/light.py + homeassistant/components/pilight/switch.py + homeassistant/components/ping/__init__.py + homeassistant/components/ping/helpers.py + homeassistant/components/pioneer/media_player.py + homeassistant/components/plaato/__init__.py + homeassistant/components/plaato/binary_sensor.py + homeassistant/components/plaato/entity.py + homeassistant/components/plaato/sensor.py + homeassistant/components/plex/cast.py + homeassistant/components/plex/media_player.py + homeassistant/components/plex/view.py + homeassistant/components/plum_lightpad/light.py + homeassistant/components/pocketcasts/sensor.py + homeassistant/components/point/__init__.py + homeassistant/components/point/alarm_control_panel.py + homeassistant/components/point/binary_sensor.py + homeassistant/components/point/sensor.py + homeassistant/components/powerwall/__init__.py + homeassistant/components/progettihwsw/__init__.py + homeassistant/components/progettihwsw/binary_sensor.py + homeassistant/components/progettihwsw/switch.py + homeassistant/components/proliphix/climate.py + homeassistant/components/prowl/notify.py + homeassistant/components/proxmoxve/* + homeassistant/components/proxy/camera.py + homeassistant/components/pulseaudio_loopback/switch.py + homeassistant/components/purpleair/coordinator.py + homeassistant/components/pushbullet/api.py + homeassistant/components/pushbullet/notify.py + homeassistant/components/pushbullet/sensor.py + homeassistant/components/pushover/notify.py + homeassistant/components/pushsafer/notify.py + homeassistant/components/qbittorrent/__init__.py + homeassistant/components/qbittorrent/coordinator.py + homeassistant/components/qbittorrent/sensor.py + homeassistant/components/qnap/__init__.py + homeassistant/components/qnap/coordinator.py + homeassistant/components/qnap/sensor.py + homeassistant/components/qrcode/image_processing.py + homeassistant/components/quantum_gateway/device_tracker.py + homeassistant/components/qvr_pro/* + homeassistant/components/rabbitair/__init__.py + homeassistant/components/rabbitair/coordinator.py + homeassistant/components/rabbitair/entity.py + homeassistant/components/rabbitair/fan.py + homeassistant/components/rachio/__init__.py + homeassistant/components/rachio/binary_sensor.py + homeassistant/components/rachio/coordinator.py + homeassistant/components/rachio/device.py + homeassistant/components/rachio/entity.py + homeassistant/components/rachio/switch.py + homeassistant/components/rachio/webhooks.py + homeassistant/components/radio_browser/__init__.py + homeassistant/components/radiotherm/__init__.py + homeassistant/components/radiotherm/climate.py + homeassistant/components/radiotherm/coordinator.py + homeassistant/components/radiotherm/data.py + homeassistant/components/radiotherm/entity.py + homeassistant/components/radiotherm/switch.py + homeassistant/components/radiotherm/util.py + homeassistant/components/raincloud/* + homeassistant/components/rainmachine/__init__.py + homeassistant/components/rainmachine/binary_sensor.py + homeassistant/components/rainmachine/button.py + homeassistant/components/rainmachine/coordinator.py + homeassistant/components/rainmachine/select.py + homeassistant/components/rainmachine/sensor.py + homeassistant/components/rainmachine/switch.py + homeassistant/components/rainmachine/update.py + homeassistant/components/rainmachine/util.py + homeassistant/components/raspyrfm/* + homeassistant/components/recollect_waste/sensor.py + homeassistant/components/recorder/repack.py + homeassistant/components/recswitch/switch.py + homeassistant/components/reddit/sensor.py + homeassistant/components/refoss/__init__.py + homeassistant/components/refoss/bridge.py + homeassistant/components/refoss/coordinator.py + homeassistant/components/refoss/entity.py + homeassistant/components/refoss/sensor.py + homeassistant/components/refoss/switch.py + homeassistant/components/refoss/util.py + homeassistant/components/rejseplanen/sensor.py + homeassistant/components/remember_the_milk/__init__.py + homeassistant/components/remote_rpi_gpio/* + homeassistant/components/renson/__init__.py + homeassistant/components/renson/binary_sensor.py + homeassistant/components/renson/button.py + homeassistant/components/renson/coordinator.py + homeassistant/components/renson/entity.py + homeassistant/components/renson/fan.py + homeassistant/components/renson/number.py + homeassistant/components/renson/sensor.py + homeassistant/components/renson/switch.py + homeassistant/components/renson/time.py + homeassistant/components/reolink/binary_sensor.py + homeassistant/components/reolink/button.py + homeassistant/components/reolink/camera.py + homeassistant/components/reolink/entity.py + homeassistant/components/reolink/host.py + homeassistant/components/reolink/light.py + homeassistant/components/reolink/number.py + homeassistant/components/reolink/select.py + homeassistant/components/reolink/sensor.py + homeassistant/components/reolink/siren.py + homeassistant/components/reolink/switch.py + homeassistant/components/reolink/update.py + homeassistant/components/repetier/__init__.py + homeassistant/components/repetier/sensor.py + homeassistant/components/rest/notify.py + homeassistant/components/rest/switch.py + homeassistant/components/ridwell/__init__.py + homeassistant/components/ridwell/calendar.py + homeassistant/components/ridwell/coordinator.py + homeassistant/components/ridwell/switch.py + homeassistant/components/ring/camera.py + homeassistant/components/ripple/sensor.py + homeassistant/components/roborock/coordinator.py + homeassistant/components/rocketchat/notify.py + homeassistant/components/romy/__init__.py + homeassistant/components/romy/binary_sensor.py + homeassistant/components/romy/coordinator.py + homeassistant/components/romy/entity.py + homeassistant/components/romy/sensor.py + homeassistant/components/romy/vacuum.py + homeassistant/components/roomba/__init__.py + homeassistant/components/roomba/binary_sensor.py + homeassistant/components/roomba/braava.py + homeassistant/components/roomba/irobot_base.py + homeassistant/components/roomba/roomba.py + homeassistant/components/roomba/sensor.py + homeassistant/components/roomba/vacuum.py + homeassistant/components/roon/__init__.py + homeassistant/components/roon/event.py + homeassistant/components/roon/media_browser.py + homeassistant/components/roon/media_player.py + homeassistant/components/roon/server.py + homeassistant/components/route53/* + homeassistant/components/rpi_camera/* + homeassistant/components/rtorrent/sensor.py + homeassistant/components/russound_rio/media_player.py + homeassistant/components/russound_rnet/media_player.py + homeassistant/components/ruuvi_gateway/__init__.py + homeassistant/components/ruuvi_gateway/bluetooth.py + homeassistant/components/ruuvi_gateway/coordinator.py + homeassistant/components/rympro/__init__.py + homeassistant/components/rympro/coordinator.py + homeassistant/components/rympro/sensor.py + homeassistant/components/sabnzbd/__init__.py + homeassistant/components/sabnzbd/coordinator.py + homeassistant/components/sabnzbd/sensor.py + homeassistant/components/saj/sensor.py + homeassistant/components/satel_integra/* + homeassistant/components/schluter/* + homeassistant/components/screenlogic/binary_sensor.py + homeassistant/components/screenlogic/climate.py + homeassistant/components/screenlogic/coordinator.py + homeassistant/components/screenlogic/entity.py + homeassistant/components/screenlogic/light.py + homeassistant/components/screenlogic/number.py + homeassistant/components/screenlogic/sensor.py + homeassistant/components/screenlogic/switch.py + homeassistant/components/scsgate/* + homeassistant/components/sendgrid/notify.py + homeassistant/components/sense/__init__.py + homeassistant/components/sense/binary_sensor.py + homeassistant/components/sense/sensor.py + homeassistant/components/senz/__init__.py + homeassistant/components/senz/api.py + homeassistant/components/senz/climate.py + homeassistant/components/serial/sensor.py + homeassistant/components/serial_pm/sensor.py + homeassistant/components/sesame/lock.py + homeassistant/components/seven_segments/image_processing.py + homeassistant/components/shodan/sensor.py + homeassistant/components/sia/__init__.py + homeassistant/components/sia/alarm_control_panel.py + homeassistant/components/sia/binary_sensor.py + homeassistant/components/sia/hub.py + homeassistant/components/sia/sia_entity_base.py + homeassistant/components/sia/utils.py + homeassistant/components/simplepush/__init__.py + homeassistant/components/simplepush/notify.py + homeassistant/components/simplisafe/__init__.py + homeassistant/components/simplisafe/alarm_control_panel.py + homeassistant/components/simplisafe/binary_sensor.py + homeassistant/components/simplisafe/button.py + homeassistant/components/simplisafe/lock.py + homeassistant/components/simplisafe/sensor.py + homeassistant/components/sinch/* + homeassistant/components/sisyphus/* + homeassistant/components/sky_hub/* + homeassistant/components/skybeacon/sensor.py + homeassistant/components/skybell/__init__.py + homeassistant/components/skybell/camera.py + homeassistant/components/skybell/light.py + homeassistant/components/skybell/sensor.py + homeassistant/components/skybell/switch.py + homeassistant/components/slack/__init__.py + homeassistant/components/slack/notify.py + homeassistant/components/slack/sensor.py + homeassistant/components/slide/* + homeassistant/components/slimproto/__init__.py + homeassistant/components/slimproto/media_player.py + homeassistant/components/sma/__init__.py + homeassistant/components/sma/sensor.py + homeassistant/components/smappee/__init__.py + homeassistant/components/smappee/api.py + homeassistant/components/smappee/binary_sensor.py + homeassistant/components/smappee/sensor.py + homeassistant/components/smappee/switch.py + homeassistant/components/smarty/* + homeassistant/components/sms/__init__.py + homeassistant/components/sms/coordinator.py + homeassistant/components/sms/gateway.py + homeassistant/components/sms/notify.py + homeassistant/components/sms/sensor.py + homeassistant/components/smtp/notify.py + homeassistant/components/snapcast/__init__.py + homeassistant/components/snapcast/media_player.py + homeassistant/components/snapcast/server.py + homeassistant/components/snmp/device_tracker.py + homeassistant/components/snmp/sensor.py + homeassistant/components/snmp/switch.py + homeassistant/components/snooz/__init__.py + homeassistant/components/solaredge/__init__.py + homeassistant/components/solaredge/coordinator.py + homeassistant/components/solaredge_local/sensor.py + homeassistant/components/solax/__init__.py + homeassistant/components/solax/sensor.py + homeassistant/components/soma/__init__.py + homeassistant/components/soma/cover.py + homeassistant/components/soma/sensor.py + homeassistant/components/soma/utils.py + homeassistant/components/somfy_mylink/__init__.py + homeassistant/components/somfy_mylink/cover.py + homeassistant/components/sonos/__init__.py + homeassistant/components/sonos/alarms.py + homeassistant/components/sonos/entity.py + homeassistant/components/sonos/favorites.py + homeassistant/components/sonos/helpers.py + homeassistant/components/sonos/household_coordinator.py + homeassistant/components/sonos/media.py + homeassistant/components/sonos/media_browser.py + homeassistant/components/sonos/media_player.py + homeassistant/components/sonos/speaker.py + homeassistant/components/sonos/switch.py + homeassistant/components/sony_projector/switch.py + homeassistant/components/spc/__init__.py + homeassistant/components/spc/alarm_control_panel.py + homeassistant/components/spc/binary_sensor.py + homeassistant/components/spider/__init__.py + homeassistant/components/spider/climate.py + homeassistant/components/spider/sensor.py + homeassistant/components/spider/switch.py + homeassistant/components/splunk/* + homeassistant/components/spotify/__init__.py + homeassistant/components/spotify/browse_media.py + homeassistant/components/spotify/media_player.py + homeassistant/components/spotify/system_health.py + homeassistant/components/spotify/util.py + homeassistant/components/squeezebox/__init__.py + homeassistant/components/squeezebox/browse_media.py + homeassistant/components/squeezebox/media_player.py + homeassistant/components/starline/__init__.py + homeassistant/components/starline/account.py + homeassistant/components/starline/binary_sensor.py + homeassistant/components/starline/button.py + homeassistant/components/starline/device_tracker.py + homeassistant/components/starline/entity.py + homeassistant/components/starline/lock.py + homeassistant/components/starline/sensor.py + homeassistant/components/starline/switch.py + homeassistant/components/starlingbank/sensor.py + homeassistant/components/starlink/__init__.py + homeassistant/components/starlink/binary_sensor.py + homeassistant/components/starlink/button.py + homeassistant/components/starlink/coordinator.py + homeassistant/components/starlink/device_tracker.py + homeassistant/components/starlink/sensor.py + homeassistant/components/starlink/switch.py + homeassistant/components/starlink/time.py + homeassistant/components/steam_online/sensor.py + homeassistant/components/stiebel_eltron/* + homeassistant/components/stookalert/__init__.py + homeassistant/components/stookalert/binary_sensor.py + homeassistant/components/stookwijzer/__init__.py + homeassistant/components/stookwijzer/sensor.py + homeassistant/components/stream/__init__.py + homeassistant/components/stream/core.py + homeassistant/components/stream/fmp4utils.py + homeassistant/components/stream/hls.py + homeassistant/components/stream/worker.py + homeassistant/components/streamlabswater/__init__.py + homeassistant/components/streamlabswater/binary_sensor.py + homeassistant/components/streamlabswater/coordinator.py + homeassistant/components/streamlabswater/sensor.py + homeassistant/components/suez_water/__init__.py + homeassistant/components/suez_water/sensor.py + homeassistant/components/supervisord/sensor.py + homeassistant/components/supla/* + homeassistant/components/surepetcare/__init__.py + homeassistant/components/surepetcare/binary_sensor.py + homeassistant/components/surepetcare/coordinator.py + homeassistant/components/surepetcare/entity.py + homeassistant/components/surepetcare/sensor.py + homeassistant/components/swiss_hydrological_data/sensor.py + homeassistant/components/swiss_public_transport/__init__.py + homeassistant/components/swiss_public_transport/coordinator.py + homeassistant/components/swiss_public_transport/sensor.py + homeassistant/components/swisscom/device_tracker.py + homeassistant/components/switchbee/__init__.py + homeassistant/components/switchbee/button.py + homeassistant/components/switchbee/climate.py + homeassistant/components/switchbee/coordinator.py + homeassistant/components/switchbee/cover.py + homeassistant/components/switchbee/entity.py + homeassistant/components/switchbee/light.py + homeassistant/components/switchbee/switch.py + homeassistant/components/switchbot/__init__.py + homeassistant/components/switchbot/binary_sensor.py + homeassistant/components/switchbot/coordinator.py + homeassistant/components/switchbot/cover.py + homeassistant/components/switchbot/entity.py + homeassistant/components/switchbot/humidifier.py + homeassistant/components/switchbot/light.py + homeassistant/components/switchbot/lock.py + homeassistant/components/switchbot/sensor.py + homeassistant/components/switchbot/switch.py + homeassistant/components/switchbot_cloud/climate.py + homeassistant/components/switchbot_cloud/coordinator.py + homeassistant/components/switchbot_cloud/entity.py + homeassistant/components/switchbot_cloud/sensor.py + homeassistant/components/switchbot_cloud/switch.py + homeassistant/components/switchmate/switch.py + homeassistant/components/syncthing/__init__.py + homeassistant/components/syncthing/sensor.py + homeassistant/components/syncthru/__init__.py + homeassistant/components/syncthru/sensor.py + homeassistant/components/synology_chat/notify.py + homeassistant/components/synology_dsm/__init__.py + homeassistant/components/synology_dsm/binary_sensor.py + homeassistant/components/synology_dsm/button.py + homeassistant/components/synology_dsm/camera.py + homeassistant/components/synology_dsm/common.py + homeassistant/components/synology_dsm/coordinator.py + homeassistant/components/synology_dsm/entity.py + homeassistant/components/synology_dsm/sensor.py + homeassistant/components/synology_dsm/service.py + homeassistant/components/synology_dsm/switch.py + homeassistant/components/synology_dsm/update.py + homeassistant/components/synology_srm/device_tracker.py + homeassistant/components/syslog/notify.py + homeassistant/components/system_bridge/__init__.py + homeassistant/components/system_bridge/binary_sensor.py + homeassistant/components/system_bridge/coordinator.py + homeassistant/components/system_bridge/entity.py + homeassistant/components/system_bridge/media_player.py + homeassistant/components/system_bridge/notify.py + homeassistant/components/system_bridge/sensor.py + homeassistant/components/system_bridge/update.py + homeassistant/components/tado/__init__.py + homeassistant/components/tado/binary_sensor.py + homeassistant/components/tado/climate.py + homeassistant/components/tado/device_tracker.py + homeassistant/components/tado/sensor.py + homeassistant/components/tado/water_heater.py + homeassistant/components/tami4/button.py + homeassistant/components/tank_utility/sensor.py + homeassistant/components/tapsaff/binary_sensor.py + homeassistant/components/tautulli/__init__.py + homeassistant/components/tautulli/coordinator.py + homeassistant/components/tautulli/sensor.py + homeassistant/components/ted5000/sensor.py + homeassistant/components/telegram/notify.py + homeassistant/components/telegram_bot/__init__.py + homeassistant/components/telegram_bot/polling.py + homeassistant/components/telegram_bot/webhooks.py + homeassistant/components/tellduslive/__init__.py + homeassistant/components/tellduslive/binary_sensor.py + homeassistant/components/tellduslive/cover.py + homeassistant/components/tellduslive/entry.py + homeassistant/components/tellduslive/light.py + homeassistant/components/tellduslive/sensor.py + homeassistant/components/tellduslive/switch.py + homeassistant/components/tellstick/* + homeassistant/components/telnet/switch.py + homeassistant/components/temper/sensor.py + homeassistant/components/tensorflow/image_processing.py + homeassistant/components/tfiac/climate.py + homeassistant/components/thermoworks_smoke/sensor.py + homeassistant/components/thingspeak/* + homeassistant/components/thinkingcleaner/* + homeassistant/components/thomson/device_tracker.py + homeassistant/components/tibber/__init__.py + homeassistant/components/tibber/coordinator.py + homeassistant/components/tibber/sensor.py + homeassistant/components/tikteck/light.py + homeassistant/components/tile/__init__.py + homeassistant/components/tile/device_tracker.py + homeassistant/components/time_date/sensor.py + homeassistant/components/tmb/sensor.py + homeassistant/components/todoist/calendar.py + homeassistant/components/tolo/__init__.py + homeassistant/components/tolo/binary_sensor.py + homeassistant/components/tolo/button.py + homeassistant/components/tolo/climate.py + homeassistant/components/tolo/fan.py + homeassistant/components/tolo/light.py + homeassistant/components/tolo/number.py + homeassistant/components/tolo/select.py + homeassistant/components/tolo/sensor.py + homeassistant/components/tolo/switch.py + homeassistant/components/toon/__init__.py + homeassistant/components/toon/binary_sensor.py + homeassistant/components/toon/climate.py + homeassistant/components/toon/coordinator.py + homeassistant/components/toon/helpers.py + homeassistant/components/toon/models.py + homeassistant/components/toon/oauth2.py + homeassistant/components/toon/sensor.py + homeassistant/components/toon/switch.py + homeassistant/components/torque/sensor.py + homeassistant/components/totalconnect/__init__.py + homeassistant/components/touchline/climate.py + homeassistant/components/tplink_lte/* + homeassistant/components/tplink_omada/__init__.py + homeassistant/components/tplink_omada/binary_sensor.py + homeassistant/components/tplink_omada/controller.py + homeassistant/components/tplink_omada/update.py + homeassistant/components/traccar/device_tracker.py + homeassistant/components/traccar_server/__init__.py + homeassistant/components/traccar_server/coordinator.py + homeassistant/components/traccar_server/device_tracker.py + homeassistant/components/traccar_server/entity.py + homeassistant/components/traccar_server/helpers.py + homeassistant/components/traccar_server/sensor.py + homeassistant/components/tradfri/__init__.py + homeassistant/components/tradfri/base_class.py + homeassistant/components/tradfri/coordinator.py + homeassistant/components/tradfri/cover.py + homeassistant/components/tradfri/fan.py + homeassistant/components/tradfri/light.py + homeassistant/components/tradfri/sensor.py + homeassistant/components/tradfri/switch.py + homeassistant/components/trafikverket_weatherstation/__init__.py + homeassistant/components/trafikverket_weatherstation/coordinator.py + homeassistant/components/trafikverket_weatherstation/sensor.py + homeassistant/components/transmission/__init__.py + homeassistant/components/transmission/coordinator.py + homeassistant/components/transmission/sensor.py + homeassistant/components/transmission/switch.py + homeassistant/components/travisci/sensor.py + homeassistant/components/tuya/__init__.py + homeassistant/components/tuya/alarm_control_panel.py + homeassistant/components/tuya/base.py + homeassistant/components/tuya/binary_sensor.py + homeassistant/components/tuya/button.py + homeassistant/components/tuya/camera.py + homeassistant/components/tuya/climate.py + homeassistant/components/tuya/cover.py + homeassistant/components/tuya/fan.py + homeassistant/components/tuya/humidifier.py + homeassistant/components/tuya/light.py + homeassistant/components/tuya/number.py + homeassistant/components/tuya/select.py + homeassistant/components/tuya/sensor.py + homeassistant/components/tuya/siren.py + homeassistant/components/tuya/switch.py + homeassistant/components/tuya/util.py + homeassistant/components/tuya/vacuum.py + homeassistant/components/twilio_call/notify.py + homeassistant/components/twilio_sms/notify.py + homeassistant/components/twitter/notify.py + homeassistant/components/ubus/device_tracker.py + homeassistant/components/ue_smart_radio/media_player.py + homeassistant/components/ukraine_alarm/__init__.py + homeassistant/components/ukraine_alarm/binary_sensor.py + homeassistant/components/unifi_direct/device_tracker.py + homeassistant/components/unifiled/* + homeassistant/components/upb/__init__.py + homeassistant/components/upb/light.py + homeassistant/components/upc_connect/* + homeassistant/components/upcloud/__init__.py + homeassistant/components/upcloud/binary_sensor.py + homeassistant/components/upcloud/switch.py + homeassistant/components/upnp/__init__.py + homeassistant/components/upnp/device.py + homeassistant/components/upnp/sensor.py + homeassistant/components/v2c/__init__.py + homeassistant/components/v2c/binary_sensor.py + homeassistant/components/v2c/coordinator.py + homeassistant/components/v2c/entity.py + homeassistant/components/v2c/number.py + homeassistant/components/v2c/switch.py + homeassistant/components/vallox/__init__.py + homeassistant/components/vallox/coordinator.py + homeassistant/components/vasttrafik/sensor.py + homeassistant/components/velbus/__init__.py + homeassistant/components/velbus/binary_sensor.py + homeassistant/components/velbus/button.py + homeassistant/components/velbus/climate.py + homeassistant/components/velbus/cover.py + homeassistant/components/velbus/entity.py + homeassistant/components/velbus/light.py + homeassistant/components/velbus/select.py + homeassistant/components/velbus/sensor.py + homeassistant/components/velbus/switch.py + homeassistant/components/velux/__init__.py + homeassistant/components/velux/cover.py + homeassistant/components/velux/light.py + homeassistant/components/venstar/climate.py + homeassistant/components/venstar/coordinator.py + homeassistant/components/venstar/sensor.py + homeassistant/components/verisure/__init__.py + homeassistant/components/verisure/alarm_control_panel.py + homeassistant/components/verisure/binary_sensor.py + homeassistant/components/verisure/camera.py + homeassistant/components/verisure/coordinator.py + homeassistant/components/verisure/lock.py + homeassistant/components/verisure/sensor.py + homeassistant/components/verisure/switch.py + homeassistant/components/versasense/* + homeassistant/components/vesync/__init__.py + homeassistant/components/vesync/fan.py + homeassistant/components/vesync/light.py + homeassistant/components/vesync/sensor.py + homeassistant/components/vesync/switch.py + homeassistant/components/viaggiatreno/sensor.py + homeassistant/components/vicare/__init__.py + homeassistant/components/vicare/button.py + homeassistant/components/vicare/climate.py + homeassistant/components/vicare/entity.py + homeassistant/components/vicare/number.py + homeassistant/components/vicare/sensor.py + homeassistant/components/vicare/types.py + homeassistant/components/vicare/utils.py + homeassistant/components/vicare/water_heater.py + homeassistant/components/vilfo/__init__.py + homeassistant/components/vilfo/sensor.py + homeassistant/components/vivotek/camera.py + homeassistant/components/vlc/media_player.py + homeassistant/components/vlc_telnet/__init__.py + homeassistant/components/vlc_telnet/media_player.py + homeassistant/components/vodafone_station/__init__.py + homeassistant/components/vodafone_station/button.py + homeassistant/components/vodafone_station/coordinator.py + homeassistant/components/vodafone_station/device_tracker.py + homeassistant/components/vodafone_station/sensor.py + homeassistant/components/volkszaehler/sensor.py + homeassistant/components/volumio/__init__.py + homeassistant/components/volumio/browse_media.py + homeassistant/components/volumio/media_player.py + homeassistant/components/volvooncall/__init__.py + homeassistant/components/volvooncall/binary_sensor.py + homeassistant/components/volvooncall/device_tracker.py + homeassistant/components/volvooncall/lock.py + homeassistant/components/volvooncall/sensor.py + homeassistant/components/volvooncall/switch.py + homeassistant/components/vulcan/__init__.py + homeassistant/components/vulcan/calendar.py + homeassistant/components/vulcan/fetch_data.py + homeassistant/components/w800rf32/* + homeassistant/components/waqi/sensor.py + homeassistant/components/waterfurnace/* + homeassistant/components/watson_iot/* + homeassistant/components/watson_tts/tts.py + homeassistant/components/watttime/__init__.py + homeassistant/components/watttime/sensor.py + homeassistant/components/weatherflow/__init__.py + homeassistant/components/weatherflow/sensor.py + homeassistant/components/weatherflow_cloud/__init__.py + homeassistant/components/weatherflow_cloud/coordinator.py + homeassistant/components/weatherflow_cloud/weather.py + homeassistant/components/wiffi/__init__.py + homeassistant/components/wiffi/binary_sensor.py + homeassistant/components/wiffi/sensor.py + homeassistant/components/wiffi/wiffi_strings.py + homeassistant/components/wirelesstag/* + homeassistant/components/wolflink/__init__.py + homeassistant/components/wolflink/sensor.py + homeassistant/components/worldtidesinfo/sensor.py + homeassistant/components/worxlandroid/sensor.py + homeassistant/components/x10/light.py + homeassistant/components/xbox/__init__.py + homeassistant/components/xbox/api.py + homeassistant/components/xbox/base_sensor.py + homeassistant/components/xbox/binary_sensor.py + homeassistant/components/xbox/browse_media.py + homeassistant/components/xbox/coordinator.py + homeassistant/components/xbox/media_player.py + homeassistant/components/xbox/remote.py + homeassistant/components/xbox/sensor.py + homeassistant/components/xeoma/camera.py + homeassistant/components/xiaomi/camera.py + homeassistant/components/xiaomi_aqara/__init__.py + homeassistant/components/xiaomi_aqara/binary_sensor.py + homeassistant/components/xiaomi_aqara/cover.py + homeassistant/components/xiaomi_aqara/light.py + homeassistant/components/xiaomi_aqara/lock.py + homeassistant/components/xiaomi_aqara/sensor.py + homeassistant/components/xiaomi_aqara/switch.py + homeassistant/components/xiaomi_miio/__init__.py + homeassistant/components/xiaomi_miio/air_quality.py + homeassistant/components/xiaomi_miio/alarm_control_panel.py + homeassistant/components/xiaomi_miio/binary_sensor.py + homeassistant/components/xiaomi_miio/button.py + homeassistant/components/xiaomi_miio/device.py + homeassistant/components/xiaomi_miio/device_tracker.py + homeassistant/components/xiaomi_miio/fan.py + homeassistant/components/xiaomi_miio/gateway.py + homeassistant/components/xiaomi_miio/humidifier.py + homeassistant/components/xiaomi_miio/light.py + homeassistant/components/xiaomi_miio/number.py + homeassistant/components/xiaomi_miio/remote.py + homeassistant/components/xiaomi_miio/sensor.py + homeassistant/components/xiaomi_miio/switch.py + homeassistant/components/xiaomi_miio/typing.py + homeassistant/components/xiaomi_tv/media_player.py + homeassistant/components/xmpp/notify.py + homeassistant/components/xs1/* + homeassistant/components/yale_smart_alarm/__init__.py + homeassistant/components/yale_smart_alarm/alarm_control_panel.py + homeassistant/components/yale_smart_alarm/entity.py + homeassistant/components/yalexs_ble/__init__.py + homeassistant/components/yalexs_ble/binary_sensor.py + homeassistant/components/yalexs_ble/entity.py + homeassistant/components/yalexs_ble/lock.py + homeassistant/components/yalexs_ble/sensor.py + homeassistant/components/yalexs_ble/util.py + homeassistant/components/yamaha_musiccast/__init__.py + homeassistant/components/yamaha_musiccast/media_player.py + homeassistant/components/yamaha_musiccast/number.py + homeassistant/components/yamaha_musiccast/select.py + homeassistant/components/yamaha_musiccast/switch.py + homeassistant/components/yandex_transport/sensor.py + homeassistant/components/yardian/__init__.py + homeassistant/components/yardian/coordinator.py + homeassistant/components/yardian/switch.py + homeassistant/components/yeelightsunflower/light.py + homeassistant/components/yi/camera.py + homeassistant/components/yolink/__init__.py + homeassistant/components/yolink/api.py + homeassistant/components/yolink/binary_sensor.py + homeassistant/components/yolink/climate.py + homeassistant/components/yolink/coordinator.py + homeassistant/components/yolink/cover.py + homeassistant/components/yolink/entity.py + homeassistant/components/yolink/light.py + homeassistant/components/yolink/lock.py + homeassistant/components/yolink/number.py + homeassistant/components/yolink/sensor.py + homeassistant/components/yolink/services.py + homeassistant/components/yolink/siren.py + homeassistant/components/yolink/switch.py + homeassistant/components/yolink/valve.py + homeassistant/components/zabbix/* + homeassistant/components/zamg/coordinator.py + homeassistant/components/zengge/light.py + homeassistant/components/zeroconf/usage.py + homeassistant/components/zestimate/sensor.py + homeassistant/components/zha/core/cluster_handlers/* + homeassistant/components/zha/core/device.py + homeassistant/components/zha/core/gateway.py + homeassistant/components/zha/core/helpers.py + homeassistant/components/zha/light.py + homeassistant/components/zha/websocket_api.py + homeassistant/components/zhong_hong/climate.py + homeassistant/components/ziggo_mediabox_xl/media_player.py + homeassistant/components/zoneminder/* + homeassistant/components/zwave_me/__init__.py + homeassistant/components/zwave_me/binary_sensor.py + homeassistant/components/zwave_me/button.py + homeassistant/components/zwave_me/climate.py + homeassistant/components/zwave_me/cover.py + homeassistant/components/zwave_me/fan.py + homeassistant/components/zwave_me/helpers.py + homeassistant/components/zwave_me/light.py + homeassistant/components/zwave_me/lock.py + homeassistant/components/zwave_me/number.py + homeassistant/components/zwave_me/sensor.py + homeassistant/components/zwave_me/siren.py + homeassistant/components/zwave_me/switch.py + + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # TYPE_CHECKING and @overload blocks are never executed during pytest run + if TYPE_CHECKING: + @overload diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 44c38afdec6..2b15a65ff1d 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,7 +2,7 @@ "name": "Home Assistant Dev", "context": "..", "dockerFile": "../Dockerfile.dev", - "postCreateCommand": "git config --global --add safe.directory ${containerWorkspaceFolder} && script/setup", + "postCreateCommand": "script/setup", "postStartCommand": "script/bootstrap", "containerEnv": { "PYTHONASYNCIODEBUG": "1" @@ -12,12 +12,7 @@ }, // Port 5683 udp is used by Shelly integration "appPort": ["8123:8123", "5683:5683/udp"], - "runArgs": [ - "-e", - "GIT_EDITOR=code --wait", - "--security-opt", - "label=disable" - ], + "runArgs": ["-e", "GIT_EDITOR=code --wait"], "customizations": { "vscode": { "extensions": [ @@ -58,13 +53,7 @@ ], "[python]": { "editor.defaultFormatter": "charliermarsh.ruff" - }, - "json.schemas": [ - { - "fileMatch": ["homeassistant/components/*/manifest.json"], - "url": "./script/json_schemas/manifest_schema.json" - } - ] + } } } } diff --git a/.dockerignore b/.dockerignore index cf975f4215f..7fde7f33fa5 100644 --- a/.dockerignore +++ b/.dockerignore @@ -7,7 +7,6 @@ docs # Development .devcontainer .vscode -.tool-versions # Test related files tests diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index 9deb34d20e9..ad3205c51c8 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1 +1,2 @@ -custom: https://www.openhomefoundation.org +custom: https://www.nabucasa.com +github: balloob diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 23365feffb7..d69b1ac0c7d 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -74,6 +74,7 @@ If the code communicates with devices, web services, or third-party tools: - [ ] New or updated dependencies have been added to `requirements_all.txt`. Updated by running `python3 -m script.gen_requirements_all`. - [ ] For the updated dependencies - a link to the changelog, or at minimum a diff between library versions is added to the PR description. +- [ ] Untested files have been added to `.coveragerc`. ssdp_confirm(None) --> ssdp_confirm({}) --> create_entry() # 2: user(None): scan --> user({...}) --> create_entry() - @staticmethod - @callback - def async_get_options_flow( - config_entry: ConfigEntry, - ) -> UpnpOptionsFlowHandler: - """Get the options flow for this handler.""" - return UpnpOptionsFlowHandler() - @property def _discoveries(self) -> dict[str, SsdpServiceInfo]: """Get current discoveries.""" @@ -265,14 +249,9 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): CONFIG_ENTRY_HOST: discovery.ssdp_headers["_host"], CONFIG_ENTRY_LOCATION: get_preferred_location(discovery.ssdp_all_locations), } - options = { - CONFIG_ENTRY_FORCE_POLL: False, - } await self.async_set_unique_id(user_input["unique_id"], raise_on_progress=False) - return self.async_create_entry( - title=user_input["title"], data=data, options=options - ) + return self.async_create_entry(title=user_input["title"], data=data) async def _async_create_entry_from_discovery( self, @@ -294,30 +273,4 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): CONFIG_ENTRY_MAC_ADDRESS: mac_address, CONFIG_ENTRY_HOST: discovery.ssdp_headers["_host"], } - options = { - CONFIG_ENTRY_FORCE_POLL: False, - } - return self.async_create_entry(title=title, data=data, options=options) - - -class UpnpOptionsFlowHandler(OptionsFlow): - """Handle an options flow.""" - - async def async_step_init( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle options flow.""" - if user_input is not None: - return self.async_create_entry(title="", data=user_input) - - data_schema = vol.Schema( - { - vol.Optional( - CONFIG_ENTRY_FORCE_POLL, - default=self.config_entry.options.get( - CONFIG_ENTRY_FORCE_POLL, DEFAULT_CONFIG_ENTRY_FORCE_POLL - ), - ): bool, - } - ) - return self.async_show_form(step_id="init", data_schema=data_schema) + return self.async_create_entry(title=title, data=data) diff --git a/homeassistant/components/upnp/const.py b/homeassistant/components/upnp/const.py index d85675d8a4d..e7b44329546 100644 --- a/homeassistant/components/upnp/const.py +++ b/homeassistant/components/upnp/const.py @@ -21,10 +21,8 @@ TIMESTAMP = "timestamp" DATA_PACKETS = "packets" DATA_RATE_PACKETS_PER_SECOND = f"{DATA_PACKETS}/{UnitOfTime.SECONDS}" WAN_STATUS = "wan_status" -PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4 = "port_mapping_number_of_entries" ROUTER_IP = "ip" ROUTER_UPTIME = "uptime" -CONFIG_ENTRY_FORCE_POLL = "force_poll" CONFIG_ENTRY_ST = "st" CONFIG_ENTRY_UDN = "udn" CONFIG_ENTRY_ORIGINAL_UDN = "original_udn" @@ -34,6 +32,5 @@ CONFIG_ENTRY_HOST = "host" IDENTIFIER_HOST = "upnp_host" IDENTIFIER_SERIAL_NUMBER = "upnp_serial_number" DEFAULT_SCAN_INTERVAL = timedelta(seconds=30).total_seconds() -DEFAULT_CONFIG_ENTRY_FORCE_POLL = False ST_IGD_V1 = "urn:schemas-upnp-org:device:InternetGatewayDevice:1" ST_IGD_V2 = "urn:schemas-upnp-org:device:InternetGatewayDevice:2" diff --git a/homeassistant/components/upnp/coordinator.py b/homeassistant/components/upnp/coordinator.py index 37ff700bfe2..72e14ecc4ff 100644 --- a/homeassistant/components/upnp/coordinator.py +++ b/homeassistant/components/upnp/coordinator.py @@ -1,7 +1,5 @@ """UPnP/IGD coordinator.""" -from collections import defaultdict -from collections.abc import Callable from datetime import datetime, timedelta from async_upnp_client.exceptions import UpnpCommunicationError @@ -29,7 +27,6 @@ class UpnpDataUpdateCoordinator( """Initialize.""" self.device = device self.device_entry = device_entry - self._features_by_entity_id: defaultdict[str, set[str]] = defaultdict(set) super().__init__( hass, @@ -38,34 +35,12 @@ class UpnpDataUpdateCoordinator( update_interval=update_interval, ) - def register_entity(self, key: str, entity_id: str) -> Callable[[], None]: - """Register an entity.""" - self._features_by_entity_id[key].add(entity_id) - - def unregister_entity() -> None: - """Unregister entity.""" - self._features_by_entity_id[key].remove(entity_id) - - if not self._features_by_entity_id[key]: - del self._features_by_entity_id[key] - - return unregister_entity - - @property - def _entity_description_keys(self) -> list[str] | None: - """Return a list of entity description keys for which data is required.""" - if not self._features_by_entity_id: - # Must be the first update, no entities attached/enabled yet. - return None - - return list(self._features_by_entity_id) - async def _async_update_data( self, ) -> dict[str, str | datetime | int | float | None]: """Update data.""" try: - return await self.device.async_get_data(self._entity_description_keys) + return await self.device.async_get_data() except UpnpCommunicationError as exception: LOGGER.debug( "Caught exception when updating device: %s, exception: %s", diff --git a/homeassistant/components/upnp/device.py b/homeassistant/components/upnp/device.py index 7067d1d2e1a..bb0bcfc6a6e 100644 --- a/homeassistant/components/upnp/device.py +++ b/homeassistant/components/upnp/device.py @@ -8,12 +8,9 @@ from ipaddress import ip_address from typing import Any from urllib.parse import urlparse -from async_upnp_client.aiohttp import AiohttpNotifyServer, AiohttpSessionRequester +from async_upnp_client.aiohttp import AiohttpSessionRequester from async_upnp_client.client_factory import UpnpFactory -from async_upnp_client.const import AddressTupleVXType -from async_upnp_client.exceptions import UpnpCommunicationError -from async_upnp_client.profiles.igd import IgdDevice, IgdStateItem -from async_upnp_client.utils import async_get_local_ip +from async_upnp_client.profiles.igd import IgdDevice from getmac import get_mac_address from homeassistant.core import HomeAssistant @@ -30,28 +27,12 @@ from .const import ( PACKETS_PER_SEC_SENT, PACKETS_RECEIVED, PACKETS_SENT, - PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4, ROUTER_IP, ROUTER_UPTIME, TIMESTAMP, WAN_STATUS, ) -TYPE_STATE_ITEM_MAPPING = { - BYTES_RECEIVED: IgdStateItem.BYTES_RECEIVED, - BYTES_SENT: IgdStateItem.BYTES_SENT, - KIBIBYTES_PER_SEC_RECEIVED: IgdStateItem.KIBIBYTES_PER_SEC_RECEIVED, - KIBIBYTES_PER_SEC_SENT: IgdStateItem.KIBIBYTES_PER_SEC_SENT, - PACKETS_PER_SEC_RECEIVED: IgdStateItem.PACKETS_PER_SEC_RECEIVED, - PACKETS_PER_SEC_SENT: IgdStateItem.PACKETS_PER_SEC_SENT, - PACKETS_RECEIVED: IgdStateItem.PACKETS_RECEIVED, - PACKETS_SENT: IgdStateItem.PACKETS_SENT, - ROUTER_IP: IgdStateItem.EXTERNAL_IP_ADDRESS, - ROUTER_UPTIME: IgdStateItem.UPTIME, - WAN_STATUS: IgdStateItem.CONNECTION_STATUS, - PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4: IgdStateItem.PORT_MAPPING_NUMBER_OF_ENTRIES, -} - def get_preferred_location(locations: set[str]) -> str: """Get the preferred location (an IPv4 location) from a set of locations.""" @@ -83,43 +64,26 @@ async def async_get_mac_address_from_host(hass: HomeAssistant, host: str) -> str return mac_address -async def async_create_device( - hass: HomeAssistant, location: str, force_poll: bool -) -> Device: +async def async_create_device(hass: HomeAssistant, location: str) -> Device: """Create UPnP/IGD device.""" session = async_get_clientsession(hass, verify_ssl=False) requester = AiohttpSessionRequester(session, with_sleep=True, timeout=20) - # Create UPnP device. factory = UpnpFactory(requester, non_strict=True) upnp_device = await factory.async_create_device(location) - # Create notify server. - _, local_ip = await async_get_local_ip(location) - source: AddressTupleVXType = (local_ip, 0) - notify_server = AiohttpNotifyServer( - requester=requester, - source=source, - ) - await notify_server.async_start_server() - _LOGGER.debug("Started event handler at %s", notify_server.callback_url) - # Create profile wrapper. - igd_device = IgdDevice(upnp_device, notify_server.event_handler) - return Device(hass, igd_device, force_poll) + igd_device = IgdDevice(upnp_device, None) + return Device(hass, igd_device) class Device: """Home Assistant representation of a UPnP/IGD device.""" - def __init__( - self, hass: HomeAssistant, igd_device: IgdDevice, force_poll: bool - ) -> None: + def __init__(self, hass: HomeAssistant, igd_device: IgdDevice) -> None: """Initialize UPnP/IGD device.""" self.hass = hass self._igd_device = igd_device - self._force_poll = force_poll - self.coordinator: ( DataUpdateCoordinator[dict[str, str | datetime | int | float | None]] | None ) = None @@ -187,57 +151,11 @@ class Device: """Get string representation.""" return f"IGD Device: {self.name}/{self.udn}::{self.device_type}" - @property - def force_poll(self) -> bool: - """Get force_poll.""" - return self._force_poll - - async def async_set_force_poll(self, force_poll: bool) -> None: - """Set force_poll, and (un)subscribe if needed.""" - self._force_poll = force_poll - - if self._force_poll: - # No need for subscriptions, as eventing will never be used. - await self.async_unsubscribe_services() - elif not self._force_poll and not self._igd_device.is_subscribed: - await self.async_subscribe_services() - - async def async_subscribe_services(self) -> None: - """Subscribe to services.""" - try: - await self._igd_device.async_subscribe_services(auto_resubscribe=True) - except UpnpCommunicationError as ex: - _LOGGER.debug( - "Error subscribing to services, falling back to forced polling: %s", ex - ) - await self.async_set_force_poll(True) - - async def async_unsubscribe_services(self) -> None: - """Unsubscribe from services.""" - try: - await self._igd_device.async_unsubscribe_services() - except UpnpCommunicationError as ex: - _LOGGER.debug("Error unsubscribing to services: %s", ex) - - async def async_get_data( - self, entity_description_keys: list[str] | None - ) -> dict[str, str | datetime | int | float | None]: + async def async_get_data(self) -> dict[str, str | datetime | int | float | None]: """Get all data from device.""" - if not entity_description_keys: - igd_state_items = None - else: - igd_state_items = { - TYPE_STATE_ITEM_MAPPING[key] for key in entity_description_keys - } - - _LOGGER.debug( - "Getting data for device: %s, state_items: %s, force_poll: %s", - self, - igd_state_items, - self._force_poll, - ) + _LOGGER.debug("Getting data for device: %s", self) igd_state = await self._igd_device.async_get_traffic_and_status_data( - igd_state_items, force_poll=self._force_poll + force_poll=True ) def get_value(value: Any) -> Any: @@ -259,7 +177,4 @@ class Device: KIBIBYTES_PER_SEC_SENT: igd_state.kibibytes_per_sec_sent, PACKETS_PER_SEC_RECEIVED: igd_state.packets_per_sec_received, PACKETS_PER_SEC_SENT: igd_state.packets_per_sec_sent, - PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4: get_value( - igd_state.port_mapping_number_of_entries - ), } diff --git a/homeassistant/components/upnp/icons.json b/homeassistant/components/upnp/icons.json index b6451f0fca8..1d4ebaf183d 100644 --- a/homeassistant/components/upnp/icons.json +++ b/homeassistant/components/upnp/icons.json @@ -33,9 +33,6 @@ }, "packet_upload_speed": { "default": "mdi:server-network" - }, - "port_mapping_number_of_entries_ipv4": { - "default": "mdi:server-network" } } } diff --git a/homeassistant/components/upnp/manifest.json b/homeassistant/components/upnp/manifest.json index b0b4fe35b39..b2972fc7790 100644 --- a/homeassistant/components/upnp/manifest.json +++ b/homeassistant/components/upnp/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.41.0", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.39.0", "getmac==0.9.4"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:InternetGatewayDevice:1" diff --git a/homeassistant/components/upnp/sensor.py b/homeassistant/components/upnp/sensor.py index aae2f8308c1..df7128830b3 100644 --- a/homeassistant/components/upnp/sensor.py +++ b/homeassistant/components/upnp/sensor.py @@ -33,7 +33,6 @@ from .const import ( PACKETS_PER_SEC_SENT, PACKETS_RECEIVED, PACKETS_SENT, - PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4, ROUTER_IP, ROUTER_UPTIME, WAN_STATUS, @@ -89,7 +88,6 @@ SENSOR_DESCRIPTIONS: tuple[UpnpSensorEntityDescription, ...] = ( UpnpSensorEntityDescription( key=ROUTER_UPTIME, translation_key="uptime", - device_class=SensorDeviceClass.DURATION, native_unit_of_measurement=UnitOfTime.SECONDS, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, @@ -101,12 +99,6 @@ SENSOR_DESCRIPTIONS: tuple[UpnpSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - UpnpSensorEntityDescription( - key=PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4, - translation_key="port_mapping_number_of_entries_ipv4", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), UpnpSensorEntityDescription( key=BYTES_RECEIVED, translation_key="download_speed", @@ -167,8 +159,8 @@ async def async_setup_entry( if coordinator.data.get(entity_description.key) is not None ] + LOGGER.debug("Adding sensor entities: %s", entities) async_add_entities(entities) - LOGGER.debug("Added sensor entities: %s", entities) class UpnpSensor(UpnpEntity, SensorEntity): @@ -182,13 +174,3 @@ class UpnpSensor(UpnpEntity, SensorEntity): if (key := self.entity_description.value_key) is None: return None return self.coordinator.data[key] - - async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" - await super().async_added_to_hass() - - # Register self at coordinator. - key = self.entity_description.key - entity_id = self.entity_id - unregister = self.coordinator.register_entity(key, entity_id) - self.async_on_remove(unregister) diff --git a/homeassistant/components/upnp/strings.json b/homeassistant/components/upnp/strings.json index bb414fa95f8..7ce1798c351 100644 --- a/homeassistant/components/upnp/strings.json +++ b/homeassistant/components/upnp/strings.json @@ -21,8 +21,7 @@ "step": { "init": { "data": { - "scan_interval": "Update interval (seconds, minimal 30)", - "force_poll": "Force polling of all data" + "scan_interval": "Update interval (seconds, minimal 30)" } } } @@ -66,9 +65,6 @@ }, "wan_status": { "name": "WAN status" - }, - "port_mapping_number_of_entries_ipv4": { - "name": "Number of port mapping entries (IPv4)" } } } diff --git a/homeassistant/components/usb/__init__.py b/homeassistant/components/usb/__init__.py index 2da72d16ac6..d4201d7f284 100644 --- a/homeassistant/components/usb/__init__.py +++ b/homeassistant/components/usb/__init__.py @@ -16,7 +16,7 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.components import websocket_api -from homeassistant.components.websocket_api import ActiveConnection +from homeassistant.components.websocket_api.connection import ActiveConnection from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP from homeassistant.core import ( CALLBACK_TYPE, diff --git a/homeassistant/components/usgs_earthquakes_feed/geo_location.py b/homeassistant/components/usgs_earthquakes_feed/geo_location.py index aa9817eab7d..33455dc11a9 100644 --- a/homeassistant/components/usgs_earthquakes_feed/geo_location.py +++ b/homeassistant/components/usgs_earthquakes_feed/geo_location.py @@ -276,17 +276,17 @@ class UsgsEarthquakesEvent(GeolocationEvent): @property def extra_state_attributes(self) -> dict[str, Any]: """Return the device state attributes.""" - return { - key: value - for key, value in ( - (ATTR_EXTERNAL_ID, self._external_id), - (ATTR_PLACE, self._place), - (ATTR_MAGNITUDE, self._magnitude), - (ATTR_TIME, self._time), - (ATTR_UPDATED, self._updated), - (ATTR_STATUS, self._status), - (ATTR_TYPE, self._type), - (ATTR_ALERT, self._alert), - ) - if value or isinstance(value, bool) - } + attributes = {} + for key, value in ( + (ATTR_EXTERNAL_ID, self._external_id), + (ATTR_PLACE, self._place), + (ATTR_MAGNITUDE, self._magnitude), + (ATTR_TIME, self._time), + (ATTR_UPDATED, self._updated), + (ATTR_STATUS, self._status), + (ATTR_TYPE, self._type), + (ATTR_ALERT, self._alert), + ): + if value or isinstance(value, bool): + attributes[key] = value + return attributes diff --git a/homeassistant/components/utility_meter/icons.json b/homeassistant/components/utility_meter/icons.json index 2539b73d168..3c447b4a810 100644 --- a/homeassistant/components/utility_meter/icons.json +++ b/homeassistant/components/utility_meter/icons.json @@ -12,11 +12,7 @@ } }, "services": { - "reset": { - "service": "mdi:numeric-0-box-outline" - }, - "calibrate": { - "service": "mdi:auto-fix" - } + "reset": "mdi:numeric-0-box-outline", + "calibrate": "mdi:auto-fix" } } diff --git a/homeassistant/components/utility_meter/manifest.json b/homeassistant/components/utility_meter/manifest.json index 31a2d4e9584..25e803e6a2d 100644 --- a/homeassistant/components/utility_meter/manifest.json +++ b/homeassistant/components/utility_meter/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["croniter"], "quality_scale": "internal", - "requirements": ["cronsim==2.6"] + "requirements": ["croniter==2.0.2"] } diff --git a/homeassistant/components/utility_meter/select.py b/homeassistant/components/utility_meter/select.py index 5815ce7ec95..d5b1206d046 100644 --- a/homeassistant/components/utility_meter/select.py +++ b/homeassistant/components/utility_meter/select.py @@ -6,7 +6,7 @@ import logging from homeassistant.components.select import SelectEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_NAME, CONF_UNIQUE_ID +from homeassistant.const import CONF_UNIQUE_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.device import async_device_info_to_link_from_entity from homeassistant.helpers.device_registry import DeviceInfo @@ -36,9 +36,9 @@ async def async_setup_entry( ) tariff_select = TariffSelect( - name=name, - tariffs=tariffs, - unique_id=unique_id, + name, + tariffs, + unique_id, device_info=device_info, ) async_add_entities([tariff_select]) @@ -62,15 +62,13 @@ async def async_setup_platform( conf_meter_unique_id: str | None = hass.data[DATA_UTILITY][meter].get( CONF_UNIQUE_ID ) - conf_meter_name = hass.data[DATA_UTILITY][meter].get(CONF_NAME, meter) async_add_entities( [ TariffSelect( - name=conf_meter_name, - tariffs=discovery_info[CONF_TARIFFS], - yaml_slug=meter, - unique_id=conf_meter_unique_id, + meter, + discovery_info[CONF_TARIFFS], + conf_meter_unique_id, ) ] ) @@ -84,16 +82,12 @@ class TariffSelect(SelectEntity, RestoreEntity): def __init__( self, name, - tariffs: list[str], - *, - yaml_slug: str | None = None, - unique_id: str | None = None, + tariffs, + unique_id, device_info: DeviceInfo | None = None, ) -> None: """Initialize a tariff selector.""" self._attr_name = name - if yaml_slug: # Backwards compatibility with YAML configuration entries - self.entity_id = f"select.{yaml_slug}" self._attr_unique_id = unique_id self._attr_device_info = device_info self._current_tariff: str | None = None diff --git a/homeassistant/components/utility_meter/sensor.py b/homeassistant/components/utility_meter/sensor.py index 19ef3c1f3a8..6b8c07c7ef7 100644 --- a/homeassistant/components/utility_meter/sensor.py +++ b/homeassistant/components/utility_meter/sensor.py @@ -9,7 +9,7 @@ from decimal import Decimal, DecimalException, InvalidOperation import logging from typing import Any, Self -from cronsim import CronSim +from croniter import croniter import voluptuous as vol from homeassistant.components.sensor import ( @@ -379,13 +379,14 @@ class UtilityMeterSensor(RestoreSensor): self.entity_id = suggested_entity_id self._parent_meter = parent_meter self._sensor_source_id = source_entity + self._state = None self._last_period = Decimal(0) self._last_reset = dt_util.utcnow() self._last_valid_state = None self._collecting = None - self._attr_name = name + self._name = name self._input_device_class = None - self._attr_native_unit_of_measurement = None + self._unit_of_measurement = None self._period = meter_type if meter_type is not None: # For backwards compatibility reasons we convert the period and offset into a cron pattern @@ -404,22 +405,12 @@ class UtilityMeterSensor(RestoreSensor): self._tariff = tariff self._tariff_entity = tariff_entity self._next_reset = None - self.scheduler = ( - CronSim( - self._cron_pattern, - dt_util.now( - dt_util.get_default_time_zone() - ), # we need timezone for DST purposes (see issue #102984) - ) - if self._cron_pattern - else None - ) def start(self, attributes: Mapping[str, Any]) -> None: """Initialize unit and state upon source initial update.""" self._input_device_class = attributes.get(ATTR_DEVICE_CLASS) - self._attr_native_unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) - self._attr_native_value = 0 + self._unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) + self._state = 0 self.async_write_ha_state() @staticmethod @@ -494,13 +485,13 @@ class UtilityMeterSensor(RestoreSensor): ) return - if self.native_value is None: + if self._state is None: # First state update initializes the utility_meter sensors for sensor in self.hass.data[DATA_UTILITY][self._parent_meter][ DATA_TARIFF_SENSORS ]: sensor.start(new_state_attributes) - if self.native_unit_of_measurement is None: + if self._unit_of_measurement is None: _LOGGER.warning( "Source sensor %s has no unit of measurement. Please %s", self._sensor_source_id, @@ -511,12 +502,10 @@ class UtilityMeterSensor(RestoreSensor): adjustment := self.calculate_adjustment(old_state, new_state) ) is not None and (self._sensor_net_consumption or adjustment >= 0): # If net_consumption is off, the adjustment must be non-negative - self._attr_native_value += adjustment # type: ignore[operator] # self._attr_native_value will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line + self._state += adjustment # type: ignore[operator] # self._state will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line self._input_device_class = new_state_attributes.get(ATTR_DEVICE_CLASS) - self._attr_native_unit_of_measurement = new_state_attributes.get( - ATTR_UNIT_OF_MEASUREMENT - ) + self._unit_of_measurement = new_state_attributes.get(ATTR_UNIT_OF_MEASUREMENT) self._last_valid_state = new_state_val self.async_write_ha_state() @@ -545,7 +534,7 @@ class UtilityMeterSensor(RestoreSensor): _LOGGER.debug( "%s - %s - source <%s>", - self.name, + self._name, COLLECTING if self._collecting is not None else PAUSED, self._sensor_source_id, ) @@ -554,10 +543,11 @@ class UtilityMeterSensor(RestoreSensor): async def _program_reset(self): """Program the reset of the utility meter.""" - if self.scheduler: - self._next_reset = next(self.scheduler) - - _LOGGER.debug("Next reset of %s is %s", self.entity_id, self._next_reset) + if self._cron_pattern is not None: + tz = dt_util.get_default_time_zone() + self._next_reset = croniter(self._cron_pattern, dt_util.now(tz)).get_next( + datetime + ) # we need timezone for DST purposes (see issue #102984) self.async_on_remove( async_track_point_in_time( self.hass, @@ -585,16 +575,14 @@ class UtilityMeterSensor(RestoreSensor): return _LOGGER.debug("Reset utility meter <%s>", self.entity_id) self._last_reset = dt_util.utcnow() - self._last_period = ( - Decimal(self.native_value) if self.native_value else Decimal(0) - ) - self._attr_native_value = 0 + self._last_period = Decimal(self._state) if self._state else Decimal(0) + self._state = 0 self.async_write_ha_state() async def async_calibrate(self, value): """Calibrate the Utility Meter with a given value.""" - _LOGGER.debug("Calibrate %s = %s type(%s)", self.name, value, type(value)) - self._attr_native_value = Decimal(str(value)) + _LOGGER.debug("Calibrate %s = %s type(%s)", self._name, value, type(value)) + self._state = Decimal(str(value)) self.async_write_ha_state() async def async_added_to_hass(self): @@ -610,11 +598,10 @@ class UtilityMeterSensor(RestoreSensor): ) if (last_sensor_data := await self.async_get_last_sensor_data()) is not None: - self._attr_native_value = last_sensor_data.native_value + # new introduced in 2022.04 + self._state = last_sensor_data.native_value self._input_device_class = last_sensor_data.input_device_class - self._attr_native_unit_of_measurement = ( - last_sensor_data.native_unit_of_measurement - ) + self._unit_of_measurement = last_sensor_data.native_unit_of_measurement self._last_period = last_sensor_data.last_period self._last_reset = last_sensor_data.last_reset self._last_valid_state = last_sensor_data.last_valid_state @@ -622,6 +609,39 @@ class UtilityMeterSensor(RestoreSensor): # Null lambda to allow cancelling the collection on tariff change self._collecting = lambda: None + elif state := await self.async_get_last_state(): + # legacy to be removed on 2022.10 (we are keeping this to avoid utility_meter counter losses) + try: + self._state = Decimal(state.state) + except InvalidOperation: + _LOGGER.error( + "Could not restore state <%s>. Resetting utility_meter.%s", + state.state, + self.name, + ) + else: + self._unit_of_measurement = state.attributes.get( + ATTR_UNIT_OF_MEASUREMENT + ) + self._last_period = ( + Decimal(state.attributes[ATTR_LAST_PERIOD]) + if state.attributes.get(ATTR_LAST_PERIOD) + and is_number(state.attributes[ATTR_LAST_PERIOD]) + else Decimal(0) + ) + self._last_valid_state = ( + Decimal(state.attributes[ATTR_LAST_VALID_STATE]) + if state.attributes.get(ATTR_LAST_VALID_STATE) + and is_number(state.attributes[ATTR_LAST_VALID_STATE]) + else None + ) + self._last_reset = dt_util.as_utc( + dt_util.parse_datetime(state.attributes.get(ATTR_LAST_RESET)) + ) + if state.attributes.get(ATTR_STATUS) == COLLECTING: + # Null lambda to allow cancelling the collection on tariff change + self._collecting = lambda: None + @callback def async_source_tracking(event): """Wait for source to be ready, then start meter.""" @@ -646,7 +666,7 @@ class UtilityMeterSensor(RestoreSensor): _LOGGER.debug( "<%s> collecting %s from %s", self.name, - self.native_unit_of_measurement, + self._unit_of_measurement, self._sensor_source_id, ) self._collecting = async_track_state_change_event( @@ -661,15 +681,22 @@ class UtilityMeterSensor(RestoreSensor): self._collecting() self._collecting = None + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def native_value(self): + """Return the state of the sensor.""" + return self._state + @property def device_class(self): """Return the device class of the sensor.""" if self._input_device_class is not None: return self._input_device_class - if ( - self.native_unit_of_measurement - in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY] - ): + if self._unit_of_measurement in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY]: return SensorDeviceClass.ENERGY return None @@ -682,6 +709,11 @@ class UtilityMeterSensor(RestoreSensor): else SensorStateClass.TOTAL_INCREASING ) + @property + def native_unit_of_measurement(self): + """Return the unit the value is expressed in.""" + return self._unit_of_measurement + @property def extra_state_attributes(self): """Return the state attributes of the sensor.""" diff --git a/homeassistant/components/uvc/camera.py b/homeassistant/components/uvc/camera.py index a6f0202ee25..cd9594c7d31 100644 --- a/homeassistant/components/uvc/camera.py +++ b/homeassistant/components/uvc/camera.py @@ -5,11 +5,9 @@ from __future__ import annotations from datetime import datetime import logging import re -from typing import Any, cast +import requests from uvcclient import camera as uvc_camera, nvr -from uvcclient.camera import UVCCameraClient -from uvcclient.nvr import UVCRemote import voluptuous as vol from homeassistant.components.camera import ( @@ -59,11 +57,11 @@ def setup_platform( ssl = config[CONF_SSL] try: - nvrconn = nvr.UVCRemote(addr, port, key, ssl=ssl) # Exceptions may be raised in all method calls to the nvr library. + nvrconn = nvr.UVCRemote(addr, port, key, ssl=ssl) cameras = nvrconn.index() - identifier = nvrconn.camera_identifier + identifier = "id" if nvrconn.server_version >= (3, 2, 0) else "uuid" # Filter out airCam models, which are not supported in the latest # version of UnifiVideo and which are EOL by Ubiquiti cameras = [ @@ -77,12 +75,15 @@ def setup_platform( except nvr.NvrError as ex: _LOGGER.error("NVR refuses to talk to me: %s", str(ex)) raise PlatformNotReady from ex + except requests.exceptions.ConnectionError as ex: + _LOGGER.error("Unable to connect to NVR: %s", str(ex)) + raise PlatformNotReady from ex add_entities( - ( + [ UnifiVideoCamera(nvrconn, camera[identifier], camera["name"], password) for camera in cameras - ), + ], True, ) @@ -91,19 +92,24 @@ class UnifiVideoCamera(Camera): """A Ubiquiti Unifi Video Camera.""" _attr_should_poll = True # Cameras default to False - _attr_brand = "Ubiquiti" - _attr_is_streaming = False - _caminfo: dict[str, Any] - def __init__(self, camera: UVCRemote, uuid: str, name: str, password: str) -> None: + def __init__(self, camera, uuid, name, password): """Initialize an Unifi camera.""" super().__init__() self._nvr = camera - self._uuid = self._attr_unique_id = uuid - self._attr_name = name + self._uuid = uuid + self._name = name self._password = password - self._connect_addr: str | None = None - self._camera: UVCCameraClient | None = None + self._attr_is_streaming = False + self._connect_addr = None + self._camera = None + self._motion_status = False + self._caminfo = None + + @property + def name(self): + """Return the name of this camera.""" + return self._name @property def supported_features(self) -> CameraEntityFeature: @@ -116,7 +122,7 @@ class UnifiVideoCamera(Camera): return CameraEntityFeature(0) @property - def extra_state_attributes(self) -> dict[str, Any]: + def extra_state_attributes(self): """Return the camera state attributes.""" attr = {} if self.motion_detection_enabled: @@ -139,14 +145,24 @@ class UnifiVideoCamera(Camera): @property def motion_detection_enabled(self) -> bool: """Camera Motion Detection Status.""" - return bool(self._caminfo["recordingSettings"]["motionRecordEnabled"]) + return self._caminfo["recordingSettings"]["motionRecordEnabled"] @property - def model(self) -> str: - """Return the model of this camera.""" - return cast(str, self._caminfo["model"]) + def unique_id(self) -> str: + """Return a unique identifier for this client.""" + return self._uuid - def _login(self) -> bool: + @property + def brand(self): + """Return the brand of this camera.""" + return "Ubiquiti" + + @property + def model(self): + """Return the model of this camera.""" + return self._caminfo["model"] + + def _login(self): """Login to the camera.""" caminfo = self._caminfo if self._connect_addr: @@ -154,7 +170,6 @@ class UnifiVideoCamera(Camera): else: addrs = [caminfo["host"], caminfo["internalHost"]] - client_cls: type[uvc_camera.UVCCameraClient] if self._nvr.server_version >= (3, 2, 0): client_cls = uvc_camera.UVCCameraClientV320 else: @@ -163,14 +178,15 @@ class UnifiVideoCamera(Camera): if caminfo["username"] is None: caminfo["username"] = "ubnt" - assert isinstance(caminfo["username"], str) - camera = None for addr in addrs: try: camera = client_cls(addr, caminfo["username"], self._password) camera.login() - _LOGGER.debug("Logged into UVC camera %s via %s", self._attr_name, addr) + _LOGGER.debug( + "Logged into UVC camera %(name)s via %(addr)s", + {"name": self._name, "addr": addr}, + ) self._connect_addr = addr break except OSError: @@ -181,7 +197,7 @@ class UnifiVideoCamera(Camera): pass if not self._connect_addr: _LOGGER.error("Unable to login to camera") - return False + return None self._camera = camera self._caminfo = caminfo @@ -194,13 +210,11 @@ class UnifiVideoCamera(Camera): if not self._camera and not self._login(): return None - def _get_image(retry: bool = True) -> bytes | None: - assert self._camera is not None + def _get_image(retry=True): try: return self._camera.get_snapshot() except uvc_camera.CameraConnectError: _LOGGER.error("Unable to contact camera") - return None except uvc_camera.CameraAuthError: if retry: self._login() @@ -210,12 +224,13 @@ class UnifiVideoCamera(Camera): return _get_image() - def set_motion_detection(self, mode: bool) -> None: + def set_motion_detection(self, mode): """Set motion detection on or off.""" set_mode = "motion" if mode is True else "none" try: self._nvr.set_recordmode(self._uuid, set_mode) + self._motion_status = mode except nvr.NvrError as err: _LOGGER.error("Unable to set recordmode to %s", set_mode) _LOGGER.debug(err) @@ -228,19 +243,16 @@ class UnifiVideoCamera(Camera): """Disable motion detection in camera.""" self.set_motion_detection(False) - async def stream_source(self) -> str | None: + async def stream_source(self): """Return the source of the stream.""" for channel in self._caminfo["channels"]: if channel["isRtspEnabled"]: - return cast( - str, - next( - ( - uri - for i, uri in enumerate(channel["rtspUris"]) - if re.search(self._nvr._host, uri) # noqa: SLF001 - ) - ), + return next( + ( + uri + for i, uri in enumerate(channel["rtspUris"]) + if re.search(self._nvr._host, uri) # noqa: SLF001 + ) ) return None diff --git a/homeassistant/components/uvc/manifest.json b/homeassistant/components/uvc/manifest.json index c72b865b5ef..57e798c3fa6 100644 --- a/homeassistant/components/uvc/manifest.json +++ b/homeassistant/components/uvc/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/uvc", "iot_class": "local_polling", "loggers": ["uvcclient"], - "requirements": ["uvcclient==0.12.1"] + "requirements": ["uvcclient==0.11.0"] } diff --git a/homeassistant/components/v2c/icons.json b/homeassistant/components/v2c/icons.json index 6b0a41bf752..1b76b669956 100644 --- a/homeassistant/components/v2c/icons.json +++ b/homeassistant/components/v2c/icons.json @@ -21,15 +21,6 @@ }, "battery_power": { "default": "mdi:home-battery" - }, - "ssid": { - "default": "mdi:wifi" - }, - "ip_address": { - "default": "mdi:ip" - }, - "signal_status": { - "default": "mdi:signal" } }, "switch": { diff --git a/homeassistant/components/v2c/manifest.json b/homeassistant/components/v2c/manifest.json index 3a6eab0f335..ffe4b52ee6e 100644 --- a/homeassistant/components/v2c/manifest.json +++ b/homeassistant/components/v2c/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/v2c", "iot_class": "local_polling", - "requirements": ["pytrydan==0.8.0"] + "requirements": ["pytrydan==0.7.0"] } diff --git a/homeassistant/components/v2c/number.py b/homeassistant/components/v2c/number.py index 1540b098cf1..2ff70226132 100644 --- a/homeassistant/components/v2c/number.py +++ b/homeassistant/components/v2c/number.py @@ -13,7 +13,6 @@ from homeassistant.components.number import ( NumberEntity, NumberEntityDescription, ) -from homeassistant.const import EntityCategory, UnitOfElectricCurrent from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -38,34 +37,11 @@ TRYDAN_NUMBER_SETTINGS = ( key="intensity", translation_key="intensity", device_class=NumberDeviceClass.CURRENT, - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, native_min_value=MIN_INTENSITY, native_max_value=MAX_INTENSITY, value_fn=lambda evse_data: evse_data.intensity, update_fn=lambda evse, value: evse.intensity(value), ), - V2CSettingsNumberEntityDescription( - key="min_intensity", - translation_key="min_intensity", - device_class=NumberDeviceClass.CURRENT, - entity_category=EntityCategory.CONFIG, - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - native_min_value=MIN_INTENSITY, - native_max_value=MAX_INTENSITY, - value_fn=lambda evse_data: evse_data.min_intensity, - update_fn=lambda evse, value: evse.min_intensity(value), - ), - V2CSettingsNumberEntityDescription( - key="max_intensity", - translation_key="max_intensity", - device_class=NumberDeviceClass.CURRENT, - entity_category=EntityCategory.CONFIG, - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - native_min_value=MIN_INTENSITY, - native_max_value=MAX_INTENSITY, - value_fn=lambda evse_data: evse_data.max_intensity, - update_fn=lambda evse, value: evse.max_intensity(value), - ), ) diff --git a/homeassistant/components/v2c/sensor.py b/homeassistant/components/v2c/sensor.py index 97853740e9d..fc0cc0bfaa8 100644 --- a/homeassistant/components/v2c/sensor.py +++ b/homeassistant/components/v2c/sensor.py @@ -15,13 +15,7 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import ( - EntityCategory, - UnitOfElectricPotential, - UnitOfEnergy, - UnitOfPower, - UnitOfTime, -) +from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -51,20 +45,12 @@ TRYDAN_SENSORS = ( V2CSensorEntityDescription( key="charge_power", translation_key="charge_power", + icon="mdi:ev-station", native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.POWER, value_fn=lambda evse_data: evse_data.charge_power, ), - V2CSensorEntityDescription( - key="voltage_installation", - translation_key="voltage_installation", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - state_class=SensorStateClass.MEASUREMENT, - device_class=SensorDeviceClass.VOLTAGE, - value_fn=lambda evse_data: evse_data.voltage_installation, - entity_registry_enabled_default=False, - ), V2CSensorEntityDescription( key="charge_energy", translation_key="charge_energy", @@ -100,7 +86,6 @@ TRYDAN_SENSORS = ( V2CSensorEntityDescription( key="meter_error", translation_key="meter_error", - entity_category=EntityCategory.DIAGNOSTIC, value_fn=lambda evse_data: get_meter_value(evse_data.slave_error), entity_registry_enabled_default=False, device_class=SensorDeviceClass.ENUM, @@ -115,28 +100,6 @@ TRYDAN_SENSORS = ( value_fn=lambda evse_data: evse_data.battery_power, entity_registry_enabled_default=False, ), - V2CSensorEntityDescription( - key="ssid", - translation_key="ssid", - entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda evse_data: evse_data.SSID, - entity_registry_enabled_default=False, - ), - V2CSensorEntityDescription( - key="ip_address", - translation_key="ip_address", - entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda evse_data: evse_data.IP, - entity_registry_enabled_default=False, - ), - V2CSensorEntityDescription( - key="signal_status", - translation_key="signal_status", - entity_category=EntityCategory.DIAGNOSTIC, - state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda evse_data: evse_data.signal_status, - entity_registry_enabled_default=False, - ), ) diff --git a/homeassistant/components/v2c/strings.json b/homeassistant/components/v2c/strings.json index d52b8f066f9..3342652cfb4 100644 --- a/homeassistant/components/v2c/strings.json +++ b/homeassistant/components/v2c/strings.json @@ -33,21 +33,12 @@ "number": { "intensity": { "name": "Intensity" - }, - "max_intensity": { - "name": "Max intensity" - }, - "min_intensity": { - "name": "Min intensity" } }, "sensor": { "charge_power": { "name": "Charge power" }, - "voltage_installation": { - "name": "Installation voltage" - }, "charge_energy": { "name": "Charge energy" }, @@ -102,15 +93,6 @@ "empty_message": "Empty message", "undefined_error": "Undefined error" } - }, - "ssid": { - "name": "SSID" - }, - "ip_address": { - "name": "IP address" - }, - "signal_status": { - "name": "Signal status" } }, "switch": { diff --git a/homeassistant/components/v2c/switch.py b/homeassistant/components/v2c/switch.py index cca7da70e48..cd89e954275 100644 --- a/homeassistant/components/v2c/switch.py +++ b/homeassistant/components/v2c/switch.py @@ -111,12 +111,12 @@ class V2CSwitchEntity(V2CBaseEntity, SwitchEntity): """Return the state of the EVSE switch.""" return self.entity_description.value_fn(self.data) - async def async_turn_on(self, **kwargs: Any) -> None: + async def async_turn_on(self): """Turn on the EVSE switch.""" await self.entity_description.turn_on_fn(self.coordinator.evse) await self.coordinator.async_request_refresh() - async def async_turn_off(self, **kwargs: Any) -> None: + async def async_turn_off(self): """Turn off the EVSE switch.""" await self.entity_description.turn_off_fn(self.coordinator.evse) await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/vacuum/__init__.py b/homeassistant/components/vacuum/__init__.py index a81dbeacee1..90018e2d8cc 100644 --- a/homeassistant/components/vacuum/__init__.py +++ b/homeassistant/components/vacuum/__init__.py @@ -4,11 +4,10 @@ from __future__ import annotations from datetime import timedelta from enum import IntFlag -from functools import partial +from functools import cached_property, partial import logging from typing import Any -from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -24,24 +23,16 @@ from homeassistant.const import ( # noqa: F401 # STATE_PAUSED/IDLE are API ) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.deprecation import ( - DeprecatedConstantEnum, - all_with_deprecated_constants, - check_if_deprecated_constant, - dir_with_deprecated_constants, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.icon import icon_for_battery_level from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass -from homeassistant.util.hass_dict import HassKey from .const import DOMAIN, STATE_CLEANING, STATE_DOCKED, STATE_ERROR, STATE_RETURNING _LOGGER = logging.getLogger(__name__) -DATA_COMPONENT: HassKey[EntityComponent[StateVacuumEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -91,38 +82,20 @@ class VacuumEntityFeature(IntFlag): # These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. # Please use the VacuumEntityFeature enum instead. -_DEPRECATED_SUPPORT_TURN_ON = DeprecatedConstantEnum( - VacuumEntityFeature.TURN_ON, "2025.10" -) -_DEPRECATED_SUPPORT_TURN_OFF = DeprecatedConstantEnum( - VacuumEntityFeature.TURN_OFF, "2025.10" -) -_DEPRECATED_SUPPORT_PAUSE = DeprecatedConstantEnum(VacuumEntityFeature.PAUSE, "2025.10") -_DEPRECATED_SUPPORT_STOP = DeprecatedConstantEnum(VacuumEntityFeature.STOP, "2025.10") -_DEPRECATED_SUPPORT_RETURN_HOME = DeprecatedConstantEnum( - VacuumEntityFeature.RETURN_HOME, "2025.10" -) -_DEPRECATED_SUPPORT_FAN_SPEED = DeprecatedConstantEnum( - VacuumEntityFeature.FAN_SPEED, "2025.10" -) -_DEPRECATED_SUPPORT_BATTERY = DeprecatedConstantEnum( - VacuumEntityFeature.BATTERY, "2025.10" -) -_DEPRECATED_SUPPORT_STATUS = DeprecatedConstantEnum( - VacuumEntityFeature.STATUS, "2025.10" -) -_DEPRECATED_SUPPORT_SEND_COMMAND = DeprecatedConstantEnum( - VacuumEntityFeature.SEND_COMMAND, "2025.10" -) -_DEPRECATED_SUPPORT_LOCATE = DeprecatedConstantEnum( - VacuumEntityFeature.LOCATE, "2025.10" -) -_DEPRECATED_SUPPORT_CLEAN_SPOT = DeprecatedConstantEnum( - VacuumEntityFeature.CLEAN_SPOT, "2025.10" -) -_DEPRECATED_SUPPORT_MAP = DeprecatedConstantEnum(VacuumEntityFeature.MAP, "2025.10") -_DEPRECATED_SUPPORT_STATE = DeprecatedConstantEnum(VacuumEntityFeature.STATE, "2025.10") -_DEPRECATED_SUPPORT_START = DeprecatedConstantEnum(VacuumEntityFeature.START, "2025.10") +SUPPORT_TURN_ON = 1 +SUPPORT_TURN_OFF = 2 +SUPPORT_PAUSE = 4 +SUPPORT_STOP = 8 +SUPPORT_RETURN_HOME = 16 +SUPPORT_FAN_SPEED = 32 +SUPPORT_BATTERY = 64 +SUPPORT_STATUS = 128 +SUPPORT_SEND_COMMAND = 256 +SUPPORT_LOCATE = 512 +SUPPORT_CLEAN_SPOT = 1024 +SUPPORT_MAP = 2048 +SUPPORT_STATE = 4096 +SUPPORT_START = 8192 # mypy: disallow-any-generics @@ -135,7 +108,7 @@ def is_on(hass: HomeAssistant, entity_id: str) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the vacuum component.""" - component = hass.data[DATA_COMPONENT] = EntityComponent[StateVacuumEntity]( + component = hass.data[DOMAIN] = EntityComponent[StateVacuumEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) @@ -143,37 +116,37 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_START, - None, + {}, "async_start", [VacuumEntityFeature.START], ) component.async_register_entity_service( SERVICE_PAUSE, - None, + {}, "async_pause", [VacuumEntityFeature.PAUSE], ) component.async_register_entity_service( SERVICE_RETURN_TO_BASE, - None, + {}, "async_return_to_base", [VacuumEntityFeature.RETURN_HOME], ) component.async_register_entity_service( SERVICE_CLEAN_SPOT, - None, + {}, "async_clean_spot", [VacuumEntityFeature.CLEAN_SPOT], ) component.async_register_entity_service( SERVICE_LOCATE, - None, + {}, "async_locate", [VacuumEntityFeature.LOCATE], ) component.async_register_entity_service( SERVICE_STOP, - None, + {}, "async_stop", [VacuumEntityFeature.STOP], ) @@ -198,12 +171,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - return await hass.data[DATA_COMPONENT].async_setup_entry(entry) + component: EntityComponent[StateVacuumEntity] = hass.data[DOMAIN] + return await component.async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - return await hass.data[DATA_COMPONENT].async_unload_entry(entry) + component: EntityComponent[StateVacuumEntity] = hass.data[DOMAIN] + return await component.async_unload_entry(entry) class StateVacuumEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -406,13 +381,3 @@ class StateVacuumEntity( This method must be run in the event loop. """ await self.hass.async_add_executor_job(self.pause) - - -# As we import deprecated constants from the const module, we need to add these two functions -# otherwise this module will be logged for using deprecated constants and not the custom component -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/vacuum/icons.json b/homeassistant/components/vacuum/icons.json index 4169729efec..25f0cfd03ef 100644 --- a/homeassistant/components/vacuum/icons.json +++ b/homeassistant/components/vacuum/icons.json @@ -5,41 +5,17 @@ } }, "services": { - "clean_spot": { - "service": "mdi:target-variant" - }, - "locate": { - "service": "mdi:map-marker" - }, - "pause": { - "service": "mdi:pause" - }, - "return_to_base": { - "service": "mdi:home-import-outline" - }, - "send_command": { - "service": "mdi:send" - }, - "set_fan_speed": { - "service": "mdi:fan" - }, - "start": { - "service": "mdi:play" - }, - "start_pause": { - "service": "mdi:play-pause" - }, - "stop": { - "service": "mdi:stop" - }, - "toggle": { - "service": "mdi:play-pause" - }, - "turn_off": { - "service": "mdi:stop" - }, - "turn_on": { - "service": "mdi:play" - } + "clean_spot": "mdi:target-variant", + "locate": "mdi:map-marker", + "pause": "mdi:pause", + "return_to_base": "mdi:home-import-outline", + "send_command": "mdi:send", + "set_fan_speed": "mdi:fan", + "start": "mdi:play", + "start_pause": "mdi:play-pause", + "stop": "mdi:stop", + "toggle": "mdi:play-pause", + "turn_off": "mdi:stop", + "turn_on": "mdi:play" } } diff --git a/homeassistant/components/vallox/__init__.py b/homeassistant/components/vallox/__init__.py index ceb34bc6ff9..292786e4c0e 100644 --- a/homeassistant/components/vallox/__init__.py +++ b/homeassistant/components/vallox/__init__.py @@ -13,6 +13,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_NAME, Platform from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( DEFAULT_FAN_SPEED_AWAY, @@ -20,7 +22,6 @@ from .const import ( DEFAULT_FAN_SPEED_HOME, DEFAULT_NAME, DOMAIN, - I18N_KEY_TO_VALLOX_PROFILE, ) from .coordinator import ValloxDataUpdateCoordinator @@ -60,18 +61,6 @@ SERVICE_SCHEMA_SET_PROFILE_FAN_SPEED = vol.Schema( } ) -ATTR_PROFILE = "profile" -ATTR_DURATION = "duration" - -SERVICE_SCHEMA_SET_PROFILE = vol.Schema( - { - vol.Required(ATTR_PROFILE): vol.In(I18N_KEY_TO_VALLOX_PROFILE), - vol.Optional(ATTR_DURATION): vol.All( - vol.Coerce(int), vol.Clamp(min=1, max=65535) - ), - } -) - class ServiceMethodDetails(NamedTuple): """Details for SERVICE_TO_METHOD mapping.""" @@ -83,7 +72,6 @@ class ServiceMethodDetails(NamedTuple): SERVICE_SET_PROFILE_FAN_SPEED_HOME = "set_profile_fan_speed_home" SERVICE_SET_PROFILE_FAN_SPEED_AWAY = "set_profile_fan_speed_away" SERVICE_SET_PROFILE_FAN_SPEED_BOOST = "set_profile_fan_speed_boost" -SERVICE_SET_PROFILE = "set_profile" SERVICE_TO_METHOD = { SERVICE_SET_PROFILE_FAN_SPEED_HOME: ServiceMethodDetails( @@ -98,9 +86,6 @@ SERVICE_TO_METHOD = { method="async_set_profile_fan_speed_boost", schema=SERVICE_SCHEMA_SET_PROFILE_FAN_SPEED, ), - SERVICE_SET_PROFILE: ServiceMethodDetails( - method="async_set_profile", schema=SERVICE_SCHEMA_SET_PROFILE - ), } @@ -198,22 +183,6 @@ class ValloxServiceHandler: return False return True - async def async_set_profile( - self, profile: str, duration: int | None = None - ) -> bool: - """Activate profile for given duration.""" - _LOGGER.debug("Activating profile %s for %s min", profile, duration) - try: - await self._client.set_profile( - I18N_KEY_TO_VALLOX_PROFILE[profile], duration - ) - except ValloxApiException as err: - _LOGGER.error( - "Error setting profile %d for duration %s: %s", profile, duration, err - ) - return False - return True - async def async_handle(self, call: ServiceCall) -> None: """Dispatch a service call.""" service_details = SERVICE_TO_METHOD.get(call.service) @@ -232,3 +201,24 @@ class ValloxServiceHandler: # be observed by all parties involved. if result: await self._coordinator.async_request_refresh() + + +class ValloxEntity(CoordinatorEntity[ValloxDataUpdateCoordinator]): + """Representation of a Vallox entity.""" + + _attr_has_entity_name = True + + def __init__(self, name: str, coordinator: ValloxDataUpdateCoordinator) -> None: + """Initialize a Vallox entity.""" + super().__init__(coordinator) + + self._device_uuid = self.coordinator.data.uuid + assert self.coordinator.config_entry is not None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, str(self._device_uuid))}, + manufacturer=DEFAULT_NAME, + model=self.coordinator.data.model, + name=name, + sw_version=self.coordinator.data.sw_version, + configuration_url=f"http://{self.coordinator.config_entry.data[CONF_HOST]}", + ) diff --git a/homeassistant/components/vallox/binary_sensor.py b/homeassistant/components/vallox/binary_sensor.py index 4a0efc7b101..20593fa4402 100644 --- a/homeassistant/components/vallox/binary_sensor.py +++ b/homeassistant/components/vallox/binary_sensor.py @@ -13,9 +13,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ValloxEntity from .const import DOMAIN from .coordinator import ValloxDataUpdateCoordinator -from .entity import ValloxEntity class ValloxBinarySensorEntity(ValloxEntity, BinarySensorEntity): diff --git a/homeassistant/components/vallox/config_flow.py b/homeassistant/components/vallox/config_flow.py index 30d1d153d9e..3660c641b7c 100644 --- a/homeassistant/components/vallox/config_flow.py +++ b/homeassistant/components/vallox/config_flow.py @@ -86,18 +86,20 @@ class ValloxConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration of the Vallox device host address.""" - reconfigure_entry = self._get_reconfigure_entry() + entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert entry + if not user_input: return self.async_show_form( step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( - CONFIG_SCHEMA, {CONF_HOST: reconfigure_entry.data.get(CONF_HOST)} + CONFIG_SCHEMA, {CONF_HOST: entry.data.get(CONF_HOST)} ), ) updated_host = user_input[CONF_HOST] - if reconfigure_entry.data.get(CONF_HOST) != updated_host: + if entry.data.get(CONF_HOST) != updated_host: self._async_abort_entries_match({CONF_HOST: updated_host}) errors: dict[str, str] = {} @@ -113,7 +115,9 @@ class ValloxConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_HOST] = "unknown" else: return self.async_update_reload_and_abort( - reconfigure_entry, data_updates={CONF_HOST: updated_host} + entry, + data={**entry.data, CONF_HOST: updated_host}, + reason="reconfigure_successful", ) return self.async_show_form( diff --git a/homeassistant/components/vallox/const.py b/homeassistant/components/vallox/const.py index 418f57a22c8..a2494c594f5 100644 --- a/homeassistant/components/vallox/const.py +++ b/homeassistant/components/vallox/const.py @@ -22,15 +22,14 @@ DEFAULT_FAN_SPEED_HOME = 50 DEFAULT_FAN_SPEED_AWAY = 25 DEFAULT_FAN_SPEED_BOOST = 65 -I18N_KEY_TO_VALLOX_PROFILE = { - "home": VALLOX_PROFILE.HOME, - "away": VALLOX_PROFILE.AWAY, - "boost": VALLOX_PROFILE.BOOST, - "fireplace": VALLOX_PROFILE.FIREPLACE, - "extra": VALLOX_PROFILE.EXTRA, +VALLOX_PROFILE_TO_PRESET_MODE_SETTABLE = { + VALLOX_PROFILE.HOME: "Home", + VALLOX_PROFILE.AWAY: "Away", + VALLOX_PROFILE.BOOST: "Boost", + VALLOX_PROFILE.FIREPLACE: "Fireplace", } -VALLOX_PROFILE_TO_PRESET_MODE = { +VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE = { VALLOX_PROFILE.HOME: "Home", VALLOX_PROFILE.AWAY: "Away", VALLOX_PROFILE.BOOST: "Boost", @@ -38,8 +37,8 @@ VALLOX_PROFILE_TO_PRESET_MODE = { VALLOX_PROFILE.EXTRA: "Extra", } -PRESET_MODE_TO_VALLOX_PROFILE = { - value: key for (key, value) in VALLOX_PROFILE_TO_PRESET_MODE.items() +PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE = { + value: key for (key, value) in VALLOX_PROFILE_TO_PRESET_MODE_SETTABLE.items() } VALLOX_CELL_STATE_TO_STR = { diff --git a/homeassistant/components/vallox/date.py b/homeassistant/components/vallox/date.py index 33c3ebb253c..0236117fd0f 100644 --- a/homeassistant/components/vallox/date.py +++ b/homeassistant/components/vallox/date.py @@ -12,9 +12,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ValloxEntity from .const import DOMAIN from .coordinator import ValloxDataUpdateCoordinator -from .entity import ValloxEntity class ValloxFilterChangeDateEntity(ValloxEntity, DateEntity): diff --git a/homeassistant/components/vallox/entity.py b/homeassistant/components/vallox/entity.py deleted file mode 100644 index b0657c561a8..00000000000 --- a/homeassistant/components/vallox/entity.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Support for Vallox ventilation units.""" - -from __future__ import annotations - -from homeassistant.const import CONF_HOST -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import DEFAULT_NAME, DOMAIN -from .coordinator import ValloxDataUpdateCoordinator - - -class ValloxEntity(CoordinatorEntity[ValloxDataUpdateCoordinator]): - """Representation of a Vallox entity.""" - - _attr_has_entity_name = True - - def __init__(self, name: str, coordinator: ValloxDataUpdateCoordinator) -> None: - """Initialize a Vallox entity.""" - super().__init__(coordinator) - - self._device_uuid = self.coordinator.data.uuid - assert self.coordinator.config_entry is not None - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, str(self._device_uuid))}, - manufacturer=DEFAULT_NAME, - model=self.coordinator.data.model, - name=name, - sw_version=self.coordinator.data.sw_version, - configuration_url=f"http://{self.coordinator.config_entry.data[CONF_HOST]}", - ) diff --git a/homeassistant/components/vallox/fan.py b/homeassistant/components/vallox/fan.py index 5fac46177cb..a5bdf0983ae 100644 --- a/homeassistant/components/vallox/fan.py +++ b/homeassistant/components/vallox/fan.py @@ -14,6 +14,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType +from . import ValloxEntity from .const import ( DOMAIN, METRIC_KEY_MODE, @@ -22,11 +23,10 @@ from .const import ( METRIC_KEY_PROFILE_FAN_SPEED_HOME, MODE_OFF, MODE_ON, - PRESET_MODE_TO_VALLOX_PROFILE, - VALLOX_PROFILE_TO_PRESET_MODE, + PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE, + VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE, ) from .coordinator import ValloxDataUpdateCoordinator -from .entity import ValloxEntity class ExtraStateAttributeDetails(NamedTuple): @@ -77,13 +77,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): """Representation of the fan.""" _attr_name = None - _attr_supported_features = ( - FanEntityFeature.PRESET_MODE - | FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) - _enable_turn_on_off_backwards_compatibility = False + _attr_supported_features = FanEntityFeature.PRESET_MODE | FanEntityFeature.SET_SPEED def __init__( self, @@ -97,7 +91,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): self._client = client self._attr_unique_id = str(self._device_uuid) - self._attr_preset_modes = list(PRESET_MODE_TO_VALLOX_PROFILE) + self._attr_preset_modes = list(PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE) @property def is_on(self) -> bool: @@ -108,7 +102,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): def preset_mode(self) -> str | None: """Return the current preset mode.""" vallox_profile = self.coordinator.data.profile - return VALLOX_PROFILE_TO_PRESET_MODE.get(vallox_profile) + return VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE.get(vallox_profile) @property def percentage(self) -> int | None: @@ -204,7 +198,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): return False try: - profile = PRESET_MODE_TO_VALLOX_PROFILE[preset_mode] + profile = PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE[preset_mode] await self._client.set_profile(profile) except ValloxApiException as err: @@ -220,7 +214,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): Returns true if speed has been changed, false otherwise. """ vallox_profile = ( - PRESET_MODE_TO_VALLOX_PROFILE[preset_mode] + PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE[preset_mode] if preset_mode is not None else self.coordinator.data.profile ) diff --git a/homeassistant/components/vallox/icons.json b/homeassistant/components/vallox/icons.json index 9123d1bfe9b..67b41d216d2 100644 --- a/homeassistant/components/vallox/icons.json +++ b/homeassistant/components/vallox/icons.json @@ -37,17 +37,8 @@ } }, "services": { - "set_profile_fan_speed_home": { - "service": "mdi:home" - }, - "set_profile_fan_speed_away": { - "service": "mdi:walk" - }, - "set_profile_fan_speed_boost": { - "service": "mdi:speedometer" - }, - "set_profile": { - "service": "mdi:fan" - } + "set_profile_fan_speed_home": "mdi:home", + "set_profile_fan_speed_away": "mdi:walk", + "set_profile_fan_speed_boost": "mdi:speedometer" } } diff --git a/homeassistant/components/vallox/number.py b/homeassistant/components/vallox/number.py index 96bc07b5a93..93190da1f16 100644 --- a/homeassistant/components/vallox/number.py +++ b/homeassistant/components/vallox/number.py @@ -16,9 +16,9 @@ from homeassistant.const import EntityCategory, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ValloxEntity from .const import DOMAIN from .coordinator import ValloxDataUpdateCoordinator -from .entity import ValloxEntity class ValloxNumberEntity(ValloxEntity, NumberEntity): diff --git a/homeassistant/components/vallox/sensor.py b/homeassistant/components/vallox/sensor.py index 7165947861a..0bb509a9c5a 100644 --- a/homeassistant/components/vallox/sensor.py +++ b/homeassistant/components/vallox/sensor.py @@ -25,15 +25,15 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.util import dt as dt_util +from . import ValloxEntity from .const import ( DOMAIN, METRIC_KEY_MODE, MODE_ON, VALLOX_CELL_STATE_TO_STR, - VALLOX_PROFILE_TO_PRESET_MODE, + VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE, ) from .coordinator import ValloxDataUpdateCoordinator -from .entity import ValloxEntity class ValloxSensorEntity(ValloxEntity, SensorEntity): @@ -78,7 +78,7 @@ class ValloxProfileSensor(ValloxSensorEntity): def native_value(self) -> StateType: """Return the value reported by the sensor.""" vallox_profile = self.coordinator.data.profile - return VALLOX_PROFILE_TO_PRESET_MODE.get(vallox_profile) + return VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE.get(vallox_profile) # There is a quirk with respect to the fan speed reporting. The device keeps on reporting the last diff --git a/homeassistant/components/vallox/services.yaml b/homeassistant/components/vallox/services.yaml index f2a55032b93..e6bd3edad11 100644 --- a/homeassistant/components/vallox/services.yaml +++ b/homeassistant/components/vallox/services.yaml @@ -27,24 +27,3 @@ set_profile_fan_speed_boost: min: 0 max: 100 unit_of_measurement: "%" - -set_profile: - fields: - profile: - required: true - selector: - select: - translation_key: "profile" - options: - - "home" - - "away" - - "boost" - - "fireplace" - - "extra" - duration: - required: false - selector: - number: - min: 1 - max: 65535 - unit_of_measurement: "minutes" diff --git a/homeassistant/components/vallox/strings.json b/homeassistant/components/vallox/strings.json index 8a30ed4ad01..4df57b81bb5 100644 --- a/homeassistant/components/vallox/strings.json +++ b/homeassistant/components/vallox/strings.json @@ -133,31 +133,6 @@ "description": "[%key:component::vallox::services::set_profile_fan_speed_home::fields::fan_speed::description%]" } } - }, - "set_profile": { - "name": "Activate profile for duration", - "description": "Activate a profile and optionally set duration.", - "fields": { - "profile": { - "name": "Profile", - "description": "Profile to activate" - }, - "duration": { - "name": "Duration", - "description": "Activation duration, if omitted device uses stored duration. Duration of 65535 activates profile without timeout. Duration only applies to Boost, Fireplace and Extra profiles." - } - } - } - }, - "selector": { - "profile": { - "options": { - "home": "Home", - "away": "Away", - "boost": "Boost", - "fireplace": "Fireplace", - "extra": "Extra" - } } } } diff --git a/homeassistant/components/vallox/switch.py b/homeassistant/components/vallox/switch.py index 20b270f8f18..d70de89606d 100644 --- a/homeassistant/components/vallox/switch.py +++ b/homeassistant/components/vallox/switch.py @@ -13,9 +13,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ValloxEntity from .const import DOMAIN from .coordinator import ValloxDataUpdateCoordinator -from .entity import ValloxEntity class ValloxSwitchEntity(ValloxEntity, SwitchEntity): diff --git a/homeassistant/components/valve/__init__.py b/homeassistant/components/valve/__init__.py index 7df6f8eac51..e97a68c2e82 100644 --- a/homeassistant/components/valve/__init__.py +++ b/homeassistant/components/valve/__init__.py @@ -11,7 +11,7 @@ from typing import Any, final import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( # noqa: F401 +from homeassistant.const import ( SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE, SERVICE_SET_VALVE_POSITION, @@ -27,13 +27,10 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType -from homeassistant.util.hass_dict import HassKey - -from .const import DOMAIN, ValveState _LOGGER = logging.getLogger(__name__) -DATA_COMPONENT: HassKey[EntityComponent[ValveEntity]] = HassKey(DOMAIN) +DOMAIN = "valve" ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -67,21 +64,18 @@ ATTR_POSITION = "position" async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track states and offer events for valves.""" - component = hass.data[DATA_COMPONENT] = EntityComponent[ValveEntity]( + component = hass.data[DOMAIN] = EntityComponent[ValveEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) component.async_register_entity_service( - SERVICE_OPEN_VALVE, None, "async_handle_open_valve", [ValveEntityFeature.OPEN] + SERVICE_OPEN_VALVE, {}, "async_handle_open_valve", [ValveEntityFeature.OPEN] ) component.async_register_entity_service( - SERVICE_CLOSE_VALVE, - None, - "async_handle_close_valve", - [ValveEntityFeature.CLOSE], + SERVICE_CLOSE_VALVE, {}, "async_handle_close_valve", [ValveEntityFeature.CLOSE] ) component.async_register_entity_service( @@ -96,12 +90,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) component.async_register_entity_service( - SERVICE_STOP_VALVE, None, "async_stop_valve", [ValveEntityFeature.STOP] + SERVICE_STOP_VALVE, {}, "async_stop_valve", [ValveEntityFeature.STOP] ) component.async_register_entity_service( SERVICE_TOGGLE, - None, + {}, "async_toggle", [ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE], ) @@ -111,12 +105,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - return await hass.data[DATA_COMPONENT].async_setup_entry(entry) + component: EntityComponent[ValveEntity] = hass.data[DOMAIN] + return await component.async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - return await hass.data[DATA_COMPONENT].async_unload_entry(entry) + component: EntityComponent[ValveEntity] = hass.data[DOMAIN] + return await component.async_unload_entry(entry) @dataclass(frozen=True, kw_only=True) @@ -174,18 +170,18 @@ class ValveEntity(Entity): reports_position = self.reports_position if self.is_opening: self.__is_last_toggle_direction_open = True - return ValveState.OPENING + return STATE_OPENING if self.is_closing: self.__is_last_toggle_direction_open = False - return ValveState.CLOSING + return STATE_CLOSING if reports_position is True: if (current_valve_position := self.current_valve_position) is None: return None position_zero = current_valve_position == 0 - return ValveState.CLOSED if position_zero else ValveState.OPEN + return STATE_CLOSED if position_zero else STATE_OPEN if (closed := self.is_closed) is None: return None - return ValveState.CLOSED if closed else ValveState.OPEN + return STATE_CLOSED if closed else STATE_OPEN @final @property @@ -227,8 +223,7 @@ class ValveEntity(Entity): async def async_handle_open_valve(self) -> None: """Open the valve.""" if self.supported_features & ValveEntityFeature.SET_POSITION: - await self.async_set_valve_position(100) - return + return await self.async_set_valve_position(100) await self.async_open_valve() def close_valve(self) -> None: @@ -243,8 +238,7 @@ class ValveEntity(Entity): async def async_handle_close_valve(self) -> None: """Close the valve.""" if self.supported_features & ValveEntityFeature.SET_POSITION: - await self.async_set_valve_position(0) - return + return await self.async_set_valve_position(0) await self.async_close_valve() async def async_toggle(self) -> None: diff --git a/homeassistant/components/valve/const.py b/homeassistant/components/valve/const.py deleted file mode 100644 index 5f590b5015a..00000000000 --- a/homeassistant/components/valve/const.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Constants for the Valve entity platform.""" - -from enum import StrEnum - -DOMAIN = "valve" - - -class ValveState(StrEnum): - """State of Valve entities.""" - - OPENING = "opening" - CLOSING = "closing" - CLOSED = "closed" - OPEN = "open" diff --git a/homeassistant/components/valve/icons.json b/homeassistant/components/valve/icons.json index c9c6b632dcb..349196658d4 100644 --- a/homeassistant/components/valve/icons.json +++ b/homeassistant/components/valve/icons.json @@ -1,36 +1,20 @@ { "entity_component": { "_": { - "default": "mdi:valve-open", - "state": { - "closed": "mdi:valve-closed" - } + "default": "mdi:pipe-valve" }, "gas": { "default": "mdi:meter-gas" }, "water": { - "default": "mdi:valve-open", - "state": { - "closed": "mdi:valve-closed" - } + "default": "mdi:pipe-valve" } }, "services": { - "close_valve": { - "service": "mdi:valve-closed" - }, - "open_valve": { - "service": "mdi:valve-open" - }, - "set_valve_position": { - "service": "mdi:valve" - }, - "stop_valve": { - "service": "mdi:stop" - }, - "toggle": { - "service": "mdi:valve-open" - } + "close_valve": "mdi:valve-closed", + "open_valve": "mdi:valve-open", + "set_valve_position": "mdi:valve", + "stop_valve": "mdi:stop", + "toggle": "mdi:valve-open" } } diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index ca8cfb0f2a7..479b7f02024 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -89,9 +89,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True def check_entry_id(interface: str) -> str: - for config_entry in hass.config_entries.async_entries(DOMAIN): - if "port" in config_entry.data and config_entry.data["port"] == interface: - return config_entry.entry_id + for entry in hass.config_entries.async_entries(DOMAIN): + if "port" in entry.data and entry.data["port"] == interface: + return entry.entry_id raise vol.Invalid( "The interface provided is not defined as a port in a Velbus integration" ) @@ -119,10 +119,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def set_memo_text(call: ServiceCall) -> None: """Handle Memo Text service call.""" memo_text = call.data[CONF_MEMO_TEXT] + memo_text.hass = hass await ( hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] .get_module(call.data[CONF_ADDRESS]) - .set_memo_text(memo_text) + .set_memo_text(memo_text.async_render()) ) hass.services.async_register( @@ -135,7 +136,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: vol.Required(CONF_ADDRESS): vol.All( vol.Coerce(int), vol.Range(min=0, max=255) ), - vol.Optional(CONF_MEMO_TEXT, default=""): cv.string, + vol.Optional(CONF_MEMO_TEXT, default=""): cv.template, } ), ) diff --git a/homeassistant/components/velbus/climate.py b/homeassistant/components/velbus/climate.py index ed47d8b0a91..34a565c2b37 100644 --- a/homeassistant/components/velbus/climate.py +++ b/homeassistant/components/velbus/climate.py @@ -14,7 +14,6 @@ from homeassistant.components.climate import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, PRESET_MODES @@ -40,7 +39,8 @@ class VelbusClimate(VelbusEntity, ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _attr_hvac_modes = [HVACMode.HEAT, HVACMode.COOL] + _attr_hvac_mode = HVACMode.HEAT + _attr_hvac_modes = [HVACMode.HEAT] _attr_preset_modes = list(PRESET_MODES) _enable_turn_on_off_backwards_compatibility = False @@ -66,11 +66,6 @@ class VelbusClimate(VelbusEntity, ClimateEntity): """Return the current temperature.""" return self._channel.get_state() - @property - def hvac_mode(self) -> HVACMode: - """Return the current hvac mode based on cool_mode message.""" - return HVACMode.COOL if self._channel.get_cool_mode() else HVACMode.HEAT - @api_call async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperatures.""" @@ -84,15 +79,3 @@ class VelbusClimate(VelbusEntity, ClimateEntity): """Set the new preset mode.""" await self._channel.set_preset(PRESET_MODES[preset_mode]) self.async_write_ha_state() - - @api_call - async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set the new hvac mode.""" - if hvac_mode not in self._attr_hvac_modes: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="invalid_hvac_mode", - translation_placeholders={"hvac_mode": str(hvac_mode)}, - ) - await self._channel.set_mode(hvac_mode) - self.async_write_ha_state() diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 8b9d927f3d7..823d682d339 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -66,16 +66,12 @@ class VelbusCover(VelbusEntity, CoverEntity): @property def is_opening(self) -> bool: """Return if the cover is opening.""" - if opening := self._channel.is_opening(): - self._assumed_closed = False - return opening + return self._channel.is_opening() @property def is_closing(self) -> bool: """Return if the cover is closing.""" - if closing := self._channel.is_closing(): - self._assumed_closed = True - return closing + return self._channel.is_closing() @property def current_cover_position(self) -> int | None: @@ -93,11 +89,13 @@ class VelbusCover(VelbusEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" await self._channel.open() + self._assumed_closed = False @api_call async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self._channel.close() + self._assumed_closed = True @api_call async def async_stop_cover(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/velbus/icons.json b/homeassistant/components/velbus/icons.json index a46f5e5fbf1..a806782d189 100644 --- a/homeassistant/components/velbus/icons.json +++ b/homeassistant/components/velbus/icons.json @@ -1,16 +1,8 @@ { "services": { - "sync_clock": { - "service": "mdi:clock" - }, - "scan": { - "service": "mdi:magnify" - }, - "clear_cache": { - "service": "mdi:delete" - }, - "set_memo_text": { - "service": "mdi:note-text" - } + "sync_clock": "mdi:clock", + "scan": "mdi:magnify", + "clear_cache": "mdi:delete", + "set_memo_text": "mdi:note-text" } } diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 5443afeef77..f778533cad8 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.10.0"], + "requirements": ["velbus-aio==2024.5.1"], "usb": [ { "vid": "10CF", diff --git a/homeassistant/components/velbus/strings.json b/homeassistant/components/velbus/strings.json index 55c7fda84ac..948c079444d 100644 --- a/homeassistant/components/velbus/strings.json +++ b/homeassistant/components/velbus/strings.json @@ -17,11 +17,6 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, - "exceptions": { - "invalid_hvac_mode": { - "message": "Climate mode {hvac_mode} is not supported." - } - }, "services": { "sync_clock": { "name": "Sync clock", diff --git a/homeassistant/components/velux/__init__.py b/homeassistant/components/velux/__init__.py index 2f1cab67c16..4b89fc66a84 100644 --- a/homeassistant/components/velux/__init__.py +++ b/homeassistant/components/velux/__init__.py @@ -1,13 +1,48 @@ """Support for VELUX KLF 200 devices.""" -from pyvlx import PyVLX, PyVLXException +from pyvlx import Node, PyVLX, PyVLXException +import voluptuous as vol -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant, ServiceCall, callback +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, LOGGER, PLATFORMS +CONFIG_SCHEMA = vol.Schema( + vol.All( + cv.deprecated(DOMAIN), + { + DOMAIN: vol.Schema( + { + vol.Required(CONF_HOST): cv.string, + vol.Required(CONF_PASSWORD): cv.string, + } + ) + }, + ), + extra=vol.ALLOW_EXTRA, +) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the velux component.""" + if DOMAIN not in config: + return True + + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=config[DOMAIN], + ) + ) + + return True + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up the velux component.""" @@ -66,3 +101,29 @@ class VeluxModule: LOGGER.debug("Velux interface started") await self.pyvlx.load_scenes() await self.pyvlx.load_nodes() + + +class VeluxEntity(Entity): + """Abstraction for al Velux entities.""" + + _attr_should_poll = False + + def __init__(self, node: Node) -> None: + """Initialize the Velux device.""" + self.node = node + self._attr_unique_id = node.serial_number + self._attr_name = node.name if node.name else f"#{node.node_id}" + + @callback + def async_register_callbacks(self): + """Register callbacks to update hass after device was changed.""" + + async def after_update_callback(device): + """Call after device was updated.""" + self.async_write_ha_state() + + self.node.register_device_updated_cb(after_update_callback) + + async def async_added_to_hass(self): + """Store register state change callback.""" + self.async_register_callbacks() diff --git a/homeassistant/components/velux/config_flow.py b/homeassistant/components/velux/config_flow.py index f4bfa13b4d5..c0d4ec8035b 100644 --- a/homeassistant/components/velux/config_flow.py +++ b/homeassistant/components/velux/config_flow.py @@ -1,11 +1,15 @@ """Config flow for Velux integration.""" +from typing import Any + from pyvlx import PyVLX, PyVLXException import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .const import DOMAIN, LOGGER @@ -20,6 +24,59 @@ DATA_SCHEMA = vol.Schema( class VeluxConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for velux.""" + async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: + """Import a config entry.""" + + def create_repair(error: str | None = None) -> None: + if error: + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_yaml_import_issue_{error}", + breaks_in_ha_version="2024.9.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_yaml_import_issue_{error}", + ) + else: + async_create_issue( + self.hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.9.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Velux", + }, + ) + + for entry in self._async_current_entries(): + if entry.data[CONF_HOST] == config[CONF_HOST]: + create_repair() + return self.async_abort(reason="already_configured") + + pyvlx = PyVLX(host=config[CONF_HOST], password=config[CONF_PASSWORD]) + try: + await pyvlx.connect() + await pyvlx.disconnect() + except (PyVLXException, ConnectionError): + create_repair("cannot_connect") + return self.async_abort(reason="cannot_connect") + except Exception: # noqa: BLE001 + create_repair("unknown") + return self.async_abort(reason="unknown") + + create_repair() + return self.async_create_entry( + title=config[CONF_HOST], + data=config, + ) + async def async_step_user( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/velux/cover.py b/homeassistant/components/velux/cover.py index 90745f601b4..c8688e4d186 100644 --- a/homeassistant/components/velux/cover.py +++ b/homeassistant/components/velux/cover.py @@ -18,8 +18,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .entity import VeluxEntity +from . import DOMAIN, VeluxEntity PARALLEL_UPDATES = 1 @@ -30,7 +29,7 @@ async def async_setup_entry( """Set up cover(s) for Velux platform.""" module = hass.data[DOMAIN][config.entry_id] async_add_entities( - VeluxCover(node, config.entry_id) + VeluxCover(node) for node in module.pyvlx.nodes if isinstance(node, OpeningDevice) ) @@ -42,9 +41,9 @@ class VeluxCover(VeluxEntity, CoverEntity): _is_blind = False node: OpeningDevice - def __init__(self, node: OpeningDevice, config_entry_id: str) -> None: + def __init__(self, node: OpeningDevice) -> None: """Initialize VeluxCover.""" - super().__init__(node, config_entry_id) + super().__init__(node) self._attr_device_class = CoverDeviceClass.WINDOW if isinstance(node, Awning): self._attr_device_class = CoverDeviceClass.AWNING @@ -95,16 +94,6 @@ class VeluxCover(VeluxEntity, CoverEntity): """Return if the cover is closed.""" return self.node.position.closed - @property - def is_opening(self) -> bool: - """Return if the cover is opening or not.""" - return self.node.is_opening - - @property - def is_closing(self) -> bool: - """Return if the cover is closing or not.""" - return self.node.is_closing - async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self.node.close(wait_for_completion=False) diff --git a/homeassistant/components/velux/entity.py b/homeassistant/components/velux/entity.py deleted file mode 100644 index 674ba5dde45..00000000000 --- a/homeassistant/components/velux/entity.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Support for VELUX KLF 200 devices.""" - -from pyvlx import Node - -from homeassistant.core import callback -from homeassistant.helpers.entity import Entity - - -class VeluxEntity(Entity): - """Abstraction for al Velux entities.""" - - _attr_should_poll = False - - def __init__(self, node: Node, config_entry_id: str) -> None: - """Initialize the Velux device.""" - self.node = node - self._attr_unique_id = ( - node.serial_number - if node.serial_number - else f"{config_entry_id}_{node.node_id}" - ) - self._attr_name = node.name if node.name else f"#{node.node_id}" - - @callback - def async_register_callbacks(self): - """Register callbacks to update hass after device was changed.""" - - async def after_update_callback(device): - """Call after device was updated.""" - self.async_write_ha_state() - - self.node.register_device_updated_cb(after_update_callback) - - async def async_added_to_hass(self): - """Store register state change callback.""" - self.async_register_callbacks() diff --git a/homeassistant/components/velux/icons.json b/homeassistant/components/velux/icons.json index 78cb5b14838..a16e7b50093 100644 --- a/homeassistant/components/velux/icons.json +++ b/homeassistant/components/velux/icons.json @@ -1,7 +1,5 @@ { "services": { - "reboot_gateway": { - "service": "mdi:restart" - } + "reboot_gateway": "mdi:restart" } } diff --git a/homeassistant/components/velux/light.py b/homeassistant/components/velux/light.py index 14f12a01060..bbe9822648e 100644 --- a/homeassistant/components/velux/light.py +++ b/homeassistant/components/velux/light.py @@ -11,8 +11,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .entity import VeluxEntity +from . import DOMAIN, VeluxEntity PARALLEL_UPDATES = 1 @@ -24,7 +23,7 @@ async def async_setup_entry( module = hass.data[DOMAIN][config.entry_id] async_add_entities( - VeluxLight(node, config.entry_id) + VeluxLight(node) for node in module.pyvlx.nodes if isinstance(node, LighteningDevice) ) diff --git a/homeassistant/components/velux/scene.py b/homeassistant/components/velux/scene.py index 54888413613..30858b25002 100644 --- a/homeassistant/components/velux/scene.py +++ b/homeassistant/components/velux/scene.py @@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import DOMAIN PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/velux/strings.json b/homeassistant/components/velux/strings.json index 5b7b459a3f7..3964c22efe2 100644 --- a/homeassistant/components/velux/strings.json +++ b/homeassistant/components/velux/strings.json @@ -17,6 +17,16 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, + "issues": { + "deprecated_yaml_import_issue_cannot_connect": { + "title": "The Velux YAML configuration import cannot connect to server", + "description": "Configuring Velux using YAML is being removed but there was an connection error importing your YAML configuration.\n\nMake sure your home assistant can reach the KLF 200." + }, + "deprecated_yaml_import_issue_unknown": { + "title": "The Velux YAML configuration import failed with unknown error raised by pyvlx", + "description": "Configuring Velux using YAML is being removed but there was an unknown error importing your YAML configuration.\n\nCheck your configuration or have a look at the documentation of the integration." + } + }, "services": { "reboot_gateway": { "name": "Reboot gateway", diff --git a/homeassistant/components/venstar/__init__.py b/homeassistant/components/venstar/__init__.py index 3243c7a6f47..cbcfd3dff90 100644 --- a/homeassistant/components/venstar/__init__.py +++ b/homeassistant/components/venstar/__init__.py @@ -13,7 +13,9 @@ from homeassistant.const import ( CONF_USERNAME, Platform, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, VENSTAR_TIMEOUT from .coordinator import VenstarDataUpdateCoordinator @@ -57,3 +59,35 @@ async def async_unload_entry(hass: HomeAssistant, config: ConfigEntry) -> bool: if unload_ok: hass.data[DOMAIN].pop(config.entry_id) return unload_ok + + +class VenstarEntity(CoordinatorEntity[VenstarDataUpdateCoordinator]): + """Representation of a Venstar entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + venstar_data_coordinator: VenstarDataUpdateCoordinator, + config: ConfigEntry, + ) -> None: + """Initialize the data object.""" + super().__init__(venstar_data_coordinator) + self._config = config + self._client = venstar_data_coordinator.client + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self.async_write_ha_state() + + @property + def device_info(self) -> DeviceInfo: + """Return the device information for this entity.""" + return DeviceInfo( + identifiers={(DOMAIN, self._config.entry_id)}, + name=self._client.name, + manufacturer="Venstar", + model=f"{self._client.model}-{self._client.get_type()}", + sw_version="{}.{}".format(*(self._client.get_firmware_ver())), + ) diff --git a/homeassistant/components/venstar/binary_sensor.py b/homeassistant/components/venstar/binary_sensor.py index 315df09b625..38bdc208d15 100644 --- a/homeassistant/components/venstar/binary_sensor.py +++ b/homeassistant/components/venstar/binary_sensor.py @@ -8,8 +8,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VenstarEntity from .const import DOMAIN -from .entity import VenstarEntity async def async_setup_entry( diff --git a/homeassistant/components/venstar/climate.py b/homeassistant/components/venstar/climate.py index 2865d64201e..ea833dc3183 100644 --- a/homeassistant/components/venstar/climate.py +++ b/homeassistant/components/venstar/climate.py @@ -36,6 +36,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from . import VenstarEntity from .const import ( _LOGGER, ATTR_FAN_STATE, @@ -46,7 +47,6 @@ from .const import ( HOLD_MODE_TEMPERATURE, ) from .coordinator import VenstarDataUpdateCoordinator -from .entity import VenstarEntity PLATFORM_SCHEMA = CLIMATE_PLATFORM_SCHEMA.extend( { diff --git a/homeassistant/components/venstar/config_flow.py b/homeassistant/components/venstar/config_flow.py index 929f5718c19..289f7936676 100644 --- a/homeassistant/components/venstar/config_flow.py +++ b/homeassistant/components/venstar/config_flow.py @@ -15,6 +15,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.typing import ConfigType from .const import _LOGGER, DOMAIN, VENSTAR_TIMEOUT @@ -84,7 +85,7 @@ class VenstarConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: ConfigType) -> ConfigFlowResult: """Import entry from configuration.yaml.""" self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) return await self.async_step_user( diff --git a/homeassistant/components/venstar/entity.py b/homeassistant/components/venstar/entity.py deleted file mode 100644 index b8a4b971a7f..00000000000 --- a/homeassistant/components/venstar/entity.py +++ /dev/null @@ -1,44 +0,0 @@ -"""The venstar component.""" - -from __future__ import annotations - -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import DOMAIN -from .coordinator import VenstarDataUpdateCoordinator - - -class VenstarEntity(CoordinatorEntity[VenstarDataUpdateCoordinator]): - """Representation of a Venstar entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - venstar_data_coordinator: VenstarDataUpdateCoordinator, - config: ConfigEntry, - ) -> None: - """Initialize the data object.""" - super().__init__(venstar_data_coordinator) - self._config = config - self._client = venstar_data_coordinator.client - - @callback - def _handle_coordinator_update(self) -> None: - """Handle updated data from the coordinator.""" - self.async_write_ha_state() - - @property - def device_info(self) -> DeviceInfo: - """Return the device information for this entity.""" - firmware_version = self._client.get_firmware_ver() - return DeviceInfo( - identifiers={(DOMAIN, self._config.entry_id)}, - name=self._client.name, - manufacturer="Venstar", - model=f"{self._client.model}-{self._client.get_type()}", - sw_version=f"{firmware_version[0]}.{firmware_version[1]}", - ) diff --git a/homeassistant/components/venstar/sensor.py b/homeassistant/components/venstar/sensor.py index 94180f6ad79..ee4ad43ade6 100644 --- a/homeassistant/components/venstar/sensor.py +++ b/homeassistant/components/venstar/sensor.py @@ -23,9 +23,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VenstarEntity from .const import DOMAIN from .coordinator import VenstarDataUpdateCoordinator -from .entity import VenstarEntity RUNTIME_HEAT1 = "heat1" RUNTIME_HEAT2 = "heat2" @@ -75,7 +75,7 @@ class VenstarSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[VenstarDataUpdateCoordinator, str], Any] name_fn: Callable[[str], str] | None - uom_fn: Callable[[VenstarDataUpdateCoordinator], str | None] + uom_fn: Callable[[Any], str | None] async def async_setup_entry( @@ -99,18 +99,11 @@ async def async_setup_entry( ) runtimes = coordinator.runtimes[-1] - for sensor_name in runtimes: - if sensor_name in RUNTIME_DEVICES: - entities.append( - VenstarSensor( - coordinator, config_entry, RUNTIME_ENTITY, sensor_name - ) - ) - entities.extend( - VenstarSensor(coordinator, config_entry, description, sensor_name) - for description in CONSUMABLE_ENTITIES - if description.key == sensor_name - ) + entities.extend( + VenstarSensor(coordinator, config_entry, RUNTIME_ENTITY, sensor_name) + for sensor_name in runtimes + if sensor_name in RUNTIME_DEVICES + ) for description in INFO_ENTITIES: try: @@ -231,27 +224,6 @@ RUNTIME_ENTITY = VenstarSensorEntityDescription( name_fn=lambda sensor_name: f"{RUNTIME_ATTRIBUTES[sensor_name]} Runtime", ) -CONSUMABLE_ENTITIES: tuple[VenstarSensorEntityDescription, ...] = ( - VenstarSensorEntityDescription( - key="filterHours", - state_class=SensorStateClass.MEASUREMENT, - uom_fn=lambda _: UnitOfTime.HOURS, - value_fn=lambda coordinator, sensor_name: ( - coordinator.runtimes[-1][sensor_name] / 100 - ), - name_fn=None, - translation_key="filter_install_time", - ), - VenstarSensorEntityDescription( - key="filterDays", - state_class=SensorStateClass.MEASUREMENT, - uom_fn=lambda _: UnitOfTime.DAYS, - value_fn=lambda coordinator, sensor_name: coordinator.runtimes[-1][sensor_name], - name_fn=None, - translation_key="filter_usage", - ), -) - INFO_ENTITIES: tuple[VenstarSensorEntityDescription, ...] = ( VenstarSensorEntityDescription( key="schedulepart", diff --git a/homeassistant/components/venstar/strings.json b/homeassistant/components/venstar/strings.json index fdc75162651..952353dcbfe 100644 --- a/homeassistant/components/venstar/strings.json +++ b/homeassistant/components/venstar/strings.json @@ -25,12 +25,6 @@ }, "entity": { "sensor": { - "filter_install_time": { - "name": "Filter installation time" - }, - "filter_usage": { - "name": "Filter usage" - }, "schedule_part": { "name": "Schedule Part", "state": { diff --git a/homeassistant/components/vera/__init__.py b/homeassistant/components/vera/__init__.py index b8f0b702ebe..722a6b86d4b 100644 --- a/homeassistant/components/vera/__init__.py +++ b/homeassistant/components/vera/__init__.py @@ -5,6 +5,7 @@ from __future__ import annotations import asyncio from collections import defaultdict import logging +from typing import Any import pyvera as veraApi from requests.exceptions import RequestException @@ -13,6 +14,10 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + ATTR_ARMED, + ATTR_BATTERY_LEVEL, + ATTR_LAST_TRIP_TIME, + ATTR_TRIPPED, CONF_EXCLUDE, CONF_LIGHTS, EVENT_HOMEASSISTANT_STOP, @@ -21,7 +26,10 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType +from homeassistant.util import slugify +from homeassistant.util.dt import utc_from_timestamp from .common import ( ControllerData, @@ -31,7 +39,7 @@ from .common import ( set_controller_data, ) from .config_flow import fix_device_id_list, new_options -from .const import CONF_CONTROLLER, DOMAIN +from .const import CONF_CONTROLLER, CONF_LEGACY_UNIQUE_ID, DOMAIN, VERA_ID_FORMAT _LOGGER = logging.getLogger(__name__) @@ -196,3 +204,83 @@ def map_vera_device( ), None, ) + + +class VeraDevice[_DeviceTypeT: veraApi.VeraDevice](Entity): + """Representation of a Vera device entity.""" + + def __init__( + self, vera_device: _DeviceTypeT, controller_data: ControllerData + ) -> None: + """Initialize the device.""" + self.vera_device = vera_device + self.controller = controller_data.controller + + self._name = self.vera_device.name + # Append device id to prevent name clashes in HA. + self.vera_id = VERA_ID_FORMAT.format( + slugify(vera_device.name), vera_device.vera_device_id + ) + + if controller_data.config_entry.data.get(CONF_LEGACY_UNIQUE_ID): + self._unique_id = str(self.vera_device.vera_device_id) + else: + self._unique_id = f"vera_{controller_data.config_entry.unique_id}_{self.vera_device.vera_device_id}" + + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + self.controller.register(self.vera_device, self._update_callback) + + def _update_callback(self, _device: _DeviceTypeT) -> None: + """Update the state.""" + self.schedule_update_ha_state(True) + + def update(self): + """Force a refresh from the device if the device is unavailable.""" + refresh_needed = self.vera_device.should_poll or not self.available + _LOGGER.debug("%s: update called (refresh=%s)", self._name, refresh_needed) + if refresh_needed: + self.vera_device.refresh() + + @property + def name(self) -> str: + """Return the name of the device.""" + return self._name + + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return the state attributes of the device.""" + attr = {} + + if self.vera_device.has_battery: + attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level + + if self.vera_device.is_armable: + armed = self.vera_device.is_armed + attr[ATTR_ARMED] = "True" if armed else "False" + + if self.vera_device.is_trippable: + if (last_tripped := self.vera_device.last_trip) is not None: + utc_time = utc_from_timestamp(int(last_tripped)) + attr[ATTR_LAST_TRIP_TIME] = utc_time.isoformat() + else: + attr[ATTR_LAST_TRIP_TIME] = None + tripped = self.vera_device.is_tripped + attr[ATTR_TRIPPED] = "True" if tripped else "False" + + attr["Vera Device Id"] = self.vera_device.vera_device_id + + return attr + + @property + def available(self): + """If device communications have failed return false.""" + return not self.vera_device.comm_failure + + @property + def unique_id(self) -> str: + """Return a unique ID. + + The Vera assigns a unique and immutable ID number to each device. + """ + return self._unique_id diff --git a/homeassistant/components/vera/binary_sensor.py b/homeassistant/components/vera/binary_sensor.py index 3438ee81d4a..d90f6a78858 100644 --- a/homeassistant/components/vera/binary_sensor.py +++ b/homeassistant/components/vera/binary_sensor.py @@ -10,8 +10,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VeraDevice from .common import ControllerData, get_controller_data -from .entity import VeraEntity async def async_setup_entry( @@ -30,7 +30,7 @@ async def async_setup_entry( ) -class VeraBinarySensor(VeraEntity[veraApi.VeraBinarySensor], BinarySensorEntity): +class VeraBinarySensor(VeraDevice[veraApi.VeraBinarySensor], BinarySensorEntity): """Representation of a Vera Binary Sensor.""" _attr_is_on = False @@ -39,7 +39,7 @@ class VeraBinarySensor(VeraEntity[veraApi.VeraBinarySensor], BinarySensorEntity) self, vera_device: veraApi.VeraBinarySensor, controller_data: ControllerData ) -> None: """Initialize the binary_sensor.""" - VeraEntity.__init__(self, vera_device, controller_data) + VeraDevice.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) def update(self) -> None: diff --git a/homeassistant/components/vera/climate.py b/homeassistant/components/vera/climate.py index 01fe26be6bc..79a6c2566e0 100644 --- a/homeassistant/components/vera/climate.py +++ b/homeassistant/components/vera/climate.py @@ -19,8 +19,8 @@ from homeassistant.const import ATTR_TEMPERATURE, Platform, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VeraDevice from .common import ControllerData, get_controller_data -from .entity import VeraEntity FAN_OPERATION_LIST = [FAN_ON, FAN_AUTO] @@ -43,7 +43,7 @@ async def async_setup_entry( ) -class VeraThermostat(VeraEntity[veraApi.VeraThermostat], ClimateEntity): +class VeraThermostat(VeraDevice[veraApi.VeraThermostat], ClimateEntity): """Representation of a Vera Thermostat.""" _attr_hvac_modes = SUPPORT_HVAC @@ -60,7 +60,7 @@ class VeraThermostat(VeraEntity[veraApi.VeraThermostat], ClimateEntity): self, vera_device: veraApi.VeraThermostat, controller_data: ControllerData ) -> None: """Initialize the Vera device.""" - VeraEntity.__init__(self, vera_device, controller_data) + VeraDevice.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) @property diff --git a/homeassistant/components/vera/config_flow.py b/homeassistant/components/vera/config_flow.py index f2b182cc270..181849f46a1 100644 --- a/homeassistant/components/vera/config_flow.py +++ b/homeassistant/components/vera/config_flow.py @@ -76,6 +76,10 @@ def options_data(user_input: dict[str, str]) -> dict[str, list[int]]: class OptionsFlowHandler(OptionsFlow): """Options for the component.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Init object.""" + self.config_entry = config_entry + async def async_step_init( self, user_input: dict[str, str] | None = None, @@ -100,7 +104,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler() + return OptionsFlowHandler(config_entry) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -123,7 +127,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initialized by import.""" # If there are entities with the legacy unique_id, then this imported config @@ -142,7 +146,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_finish( { - **import_data, + **config, CONF_SOURCE: SOURCE_IMPORT, CONF_LEGACY_UNIQUE_ID: use_legacy_unique_id, } diff --git a/homeassistant/components/vera/cover.py b/homeassistant/components/vera/cover.py index b5b57f43c0c..542680925f2 100644 --- a/homeassistant/components/vera/cover.py +++ b/homeassistant/components/vera/cover.py @@ -12,8 +12,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VeraDevice from .common import ControllerData, get_controller_data -from .entity import VeraEntity async def async_setup_entry( @@ -32,14 +32,14 @@ async def async_setup_entry( ) -class VeraCover(VeraEntity[veraApi.VeraCurtain], CoverEntity): +class VeraCover(VeraDevice[veraApi.VeraCurtain], CoverEntity): """Representation a Vera Cover.""" def __init__( self, vera_device: veraApi.VeraCurtain, controller_data: ControllerData ) -> None: """Initialize the Vera device.""" - VeraEntity.__init__(self, vera_device, controller_data) + VeraDevice.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) @property @@ -61,11 +61,10 @@ class VeraCover(VeraEntity[veraApi.VeraCurtain], CoverEntity): self.schedule_update_ha_state() @property - def is_closed(self) -> bool | None: + def is_closed(self) -> bool: """Return if the cover is closed.""" if self.current_cover_position is not None: return self.current_cover_position == 0 - return None def open_cover(self, **kwargs: Any) -> None: """Open the cover.""" diff --git a/homeassistant/components/vera/entity.py b/homeassistant/components/vera/entity.py deleted file mode 100644 index 84e21e54983..00000000000 --- a/homeassistant/components/vera/entity.py +++ /dev/null @@ -1,103 +0,0 @@ -"""Support for Vera devices.""" - -from __future__ import annotations - -import logging -from typing import Any - -import pyvera as veraApi - -from homeassistant.const import ( - ATTR_ARMED, - ATTR_BATTERY_LEVEL, - ATTR_LAST_TRIP_TIME, - ATTR_TRIPPED, -) -from homeassistant.helpers.entity import Entity -from homeassistant.util import slugify -from homeassistant.util.dt import utc_from_timestamp - -from .common import ControllerData -from .const import CONF_LEGACY_UNIQUE_ID, VERA_ID_FORMAT - -_LOGGER = logging.getLogger(__name__) - - -class VeraEntity[_DeviceTypeT: veraApi.VeraDevice](Entity): - """Representation of a Vera device entity.""" - - def __init__( - self, vera_device: _DeviceTypeT, controller_data: ControllerData - ) -> None: - """Initialize the device.""" - self.vera_device = vera_device - self.controller = controller_data.controller - - self._name = self.vera_device.name - # Append device id to prevent name clashes in HA. - self.vera_id = VERA_ID_FORMAT.format( - slugify(vera_device.name), vera_device.vera_device_id - ) - - if controller_data.config_entry.data.get(CONF_LEGACY_UNIQUE_ID): - self._unique_id = str(self.vera_device.vera_device_id) - else: - self._unique_id = f"vera_{controller_data.config_entry.unique_id}_{self.vera_device.vera_device_id}" - - async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" - self.controller.register(self.vera_device, self._update_callback) - - def _update_callback(self, _device: _DeviceTypeT) -> None: - """Update the state.""" - self.schedule_update_ha_state(True) - - def update(self): - """Force a refresh from the device if the device is unavailable.""" - refresh_needed = self.vera_device.should_poll or not self.available - _LOGGER.debug("%s: update called (refresh=%s)", self._name, refresh_needed) - if refresh_needed: - self.vera_device.refresh() - - @property - def name(self) -> str: - """Return the name of the device.""" - return self._name - - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes of the device.""" - attr = {} - - if self.vera_device.has_battery: - attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level - - if self.vera_device.is_armable: - armed = self.vera_device.is_armed - attr[ATTR_ARMED] = "True" if armed else "False" - - if self.vera_device.is_trippable: - if (last_tripped := self.vera_device.last_trip) is not None: - utc_time = utc_from_timestamp(int(last_tripped)) - attr[ATTR_LAST_TRIP_TIME] = utc_time.isoformat() - else: - attr[ATTR_LAST_TRIP_TIME] = None - tripped = self.vera_device.is_tripped - attr[ATTR_TRIPPED] = "True" if tripped else "False" - - attr["Vera Device Id"] = self.vera_device.vera_device_id - - return attr - - @property - def available(self): - """If device communications have failed return false.""" - return not self.vera_device.comm_failure - - @property - def unique_id(self) -> str: - """Return a unique ID. - - The Vera assigns a unique and immutable ID number to each device. - """ - return self._unique_id diff --git a/homeassistant/components/vera/light.py b/homeassistant/components/vera/light.py index e512676de9a..86e5dfa6a91 100644 --- a/homeassistant/components/vera/light.py +++ b/homeassistant/components/vera/light.py @@ -19,8 +19,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.color as color_util +from . import VeraDevice from .common import ControllerData, get_controller_data -from .entity import VeraEntity async def async_setup_entry( @@ -39,7 +39,7 @@ async def async_setup_entry( ) -class VeraLight(VeraEntity[veraApi.VeraDimmer], LightEntity): +class VeraLight(VeraDevice[veraApi.VeraDimmer], LightEntity): """Representation of a Vera Light, including dimmable.""" _attr_is_on = False @@ -50,7 +50,7 @@ class VeraLight(VeraEntity[veraApi.VeraDimmer], LightEntity): self, vera_device: veraApi.VeraDimmer, controller_data: ControllerData ) -> None: """Initialize the light.""" - VeraEntity.__init__(self, vera_device, controller_data) + VeraDevice.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) @property diff --git a/homeassistant/components/vera/lock.py b/homeassistant/components/vera/lock.py index 18f0b9de3e2..01509aa8388 100644 --- a/homeassistant/components/vera/lock.py +++ b/homeassistant/components/vera/lock.py @@ -12,8 +12,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VeraDevice from .common import ControllerData, get_controller_data -from .entity import VeraEntity ATTR_LAST_USER_NAME = "changed_by_name" ATTR_LOW_BATTERY = "low_battery" @@ -35,14 +35,14 @@ async def async_setup_entry( ) -class VeraLock(VeraEntity[veraApi.VeraLock], LockEntity): +class VeraLock(VeraDevice[veraApi.VeraLock], LockEntity): """Representation of a Vera lock.""" def __init__( self, vera_device: veraApi.VeraLock, controller_data: ControllerData ) -> None: """Initialize the Vera device.""" - VeraEntity.__init__(self, vera_device, controller_data) + VeraDevice.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) def lock(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/vera/manifest.json b/homeassistant/components/vera/manifest.json index 211162bcbdc..17b7144fc3d 100644 --- a/homeassistant/components/vera/manifest.json +++ b/homeassistant/components/vera/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/vera", "iot_class": "local_polling", "loggers": ["pyvera"], - "requirements": ["pyvera==0.3.15"] + "requirements": ["pyvera==0.3.13"] } diff --git a/homeassistant/components/vera/sensor.py b/homeassistant/components/vera/sensor.py index 95f1fa0bd89..97e6d6d6314 100644 --- a/homeassistant/components/vera/sensor.py +++ b/homeassistant/components/vera/sensor.py @@ -23,8 +23,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VeraDevice from .common import ControllerData, get_controller_data -from .entity import VeraEntity SCAN_INTERVAL = timedelta(seconds=5) @@ -45,7 +45,7 @@ async def async_setup_entry( ) -class VeraSensor(VeraEntity[veraApi.VeraSensor], SensorEntity): +class VeraSensor(VeraDevice[veraApi.VeraSensor], SensorEntity): """Representation of a Vera Sensor.""" def __init__( @@ -54,7 +54,7 @@ class VeraSensor(VeraEntity[veraApi.VeraSensor], SensorEntity): """Initialize the sensor.""" self._temperature_units: str | None = None self.last_changed_time = None - VeraEntity.__init__(self, vera_device, controller_data) + VeraDevice.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) if self.vera_device.category == veraApi.CATEGORY_TEMPERATURE_SENSOR: self._attr_device_class = SensorDeviceClass.TEMPERATURE diff --git a/homeassistant/components/vera/switch.py b/homeassistant/components/vera/switch.py index ad7fbe68458..3e594685d6b 100644 --- a/homeassistant/components/vera/switch.py +++ b/homeassistant/components/vera/switch.py @@ -12,8 +12,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VeraDevice from .common import ControllerData, get_controller_data -from .entity import VeraEntity async def async_setup_entry( @@ -32,7 +32,7 @@ async def async_setup_entry( ) -class VeraSwitch(VeraEntity[veraApi.VeraSwitch], SwitchEntity): +class VeraSwitch(VeraDevice[veraApi.VeraSwitch], SwitchEntity): """Representation of a Vera Switch.""" _attr_is_on = False @@ -41,7 +41,7 @@ class VeraSwitch(VeraEntity[veraApi.VeraSwitch], SwitchEntity): self, vera_device: veraApi.VeraSwitch, controller_data: ControllerData ) -> None: """Initialize the Vera device.""" - VeraEntity.__init__(self, vera_device, controller_data) + VeraDevice.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) def turn_on(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/verisure/__init__.py b/homeassistant/components/verisure/__init__.py index e635ab712be..9e5f0ca2703 100644 --- a/homeassistant/components/verisure/__init__.py +++ b/homeassistant/components/verisure/__init__.py @@ -12,6 +12,7 @@ from homeassistant.const import CONF_EMAIL, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import entity_registry as er +import homeassistant.helpers.config_validation as cv from homeassistant.helpers.storage import STORAGE_DIR from .const import CONF_LOCK_DEFAULT_CODE, DOMAIN, LOGGER @@ -26,6 +27,8 @@ PLATFORMS = [ Platform.SWITCH, ] +CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Verisure from a config entry.""" @@ -108,6 +111,6 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.config_entries.async_update_entry(entry, version=2) - LOGGER.debug("Migration to version %s successful", entry.version) + LOGGER.info("Migration to version %s successful", entry.version) return True diff --git a/homeassistant/components/verisure/alarm_control_panel.py b/homeassistant/components/verisure/alarm_control_panel.py index 5f34b587163..fc7e7551145 100644 --- a/homeassistant/components/verisure/alarm_control_panel.py +++ b/homeassistant/components/verisure/alarm_control_panel.py @@ -7,10 +7,10 @@ import asyncio from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, - AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_ALARM_ARMING, STATE_ALARM_DISARMING from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -86,7 +86,7 @@ class VerisureAlarm( async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - self._attr_alarm_state = AlarmControlPanelState.DISARMING + self._attr_state = STATE_ALARM_DISARMING self.async_write_ha_state() await self._async_set_arm_state( "DISARMED", self.coordinator.verisure.disarm(code) @@ -94,7 +94,7 @@ class VerisureAlarm( async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - self._attr_alarm_state = AlarmControlPanelState.ARMING + self._attr_state = STATE_ALARM_ARMING self.async_write_ha_state() await self._async_set_arm_state( "ARMED_HOME", self.coordinator.verisure.arm_home(code) @@ -102,7 +102,7 @@ class VerisureAlarm( async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - self._attr_alarm_state = AlarmControlPanelState.ARMING + self._attr_state = STATE_ALARM_ARMING self.async_write_ha_state() await self._async_set_arm_state( "ARMED_AWAY", self.coordinator.verisure.arm_away(code) @@ -111,7 +111,7 @@ class VerisureAlarm( @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - self._attr_alarm_state = ALARM_STATE_TO_HA.get( + self._attr_state = ALARM_STATE_TO_HA.get( self.coordinator.data["alarm"]["statusType"] ) self._attr_changed_by = self.coordinator.data["alarm"].get("name") diff --git a/homeassistant/components/verisure/camera.py b/homeassistant/components/verisure/camera.py index 70cd436d24c..72f5ab93c70 100644 --- a/homeassistant/components/verisure/camera.py +++ b/homeassistant/components/verisure/camera.py @@ -33,7 +33,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_CAPTURE_SMARTCAM, - None, + {}, VerisureSmartcam.capture_smartcam.__name__, ) @@ -110,7 +110,9 @@ class VerisureSmartcam(CoordinatorEntity[VerisureDataUpdateCoordinator], Camera) return LOGGER.debug("Download new image %s", new_image_id) - new_image_path = os.path.join(self._directory_path, f"{new_image_id}.jpg") + new_image_path = os.path.join( + self._directory_path, "{}{}".format(new_image_id, ".jpg") + ) new_image_url = new_image["contentUrl"] self.coordinator.verisure.download_image(new_image_url, new_image_path) LOGGER.debug("Old image_id=%s", self._image_id) @@ -121,7 +123,9 @@ class VerisureSmartcam(CoordinatorEntity[VerisureDataUpdateCoordinator], Camera) def delete_image(self, _=None) -> None: """Delete an old image.""" - remove_image = os.path.join(self._directory_path, f"{self._image_id}.jpg") + remove_image = os.path.join( + self._directory_path, "{}{}".format(self._image_id, ".jpg") + ) try: os.remove(remove_image) LOGGER.debug("Deleting old image %s", remove_image) diff --git a/homeassistant/components/verisure/config_flow.py b/homeassistant/components/verisure/config_flow.py index 0f1088ccb80..ccf74cd6791 100644 --- a/homeassistant/components/verisure/config_flow.py +++ b/homeassistant/components/verisure/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import Any, cast from verisure import ( Error as VerisureError, @@ -38,16 +38,15 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 2 email: str + entry: ConfigEntry password: str verisure: Verisure @staticmethod @callback - def async_get_options_flow( - config_entry: ConfigEntry, - ) -> VerisureOptionsFlowHandler: + def async_get_options_flow(config_entry: ConfigEntry) -> VerisureOptionsFlowHandler: """Get the options flow for this handler.""" - return VerisureOptionsFlowHandler() + return VerisureOptionsFlowHandler(config_entry) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -180,6 +179,10 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Verisure.""" + self.entry = cast( + ConfigEntry, + self.hass.config_entries.async_get_entry(self.context["entry_id"]), + ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -227,21 +230,25 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): LOGGER.debug("Unexpected response from Verisure, %s", ex) errors["base"] = "unknown" else: - return self.async_update_reload_and_abort( - self._get_reauth_entry(), - data_updates={ + data = self.entry.data.copy() + self.hass.config_entries.async_update_entry( + self.entry, + data={ + **data, CONF_EMAIL: user_input[CONF_EMAIL], CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) + self.hass.async_create_task( + self.hass.config_entries.async_reload(self.entry.entry_id) + ) + return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema( { - vol.Required( - CONF_EMAIL, default=self._get_reauth_entry().data[CONF_EMAIL] - ): str, + vol.Required(CONF_EMAIL, default=self.entry.data[CONF_EMAIL]): str, vol.Required(CONF_PASSWORD): str, } ), @@ -267,13 +274,18 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): LOGGER.debug("Unexpected response from Verisure, %s", ex) errors["base"] = "unknown" else: - return self.async_update_reload_and_abort( - self._get_reauth_entry(), - data_updates={ + self.hass.config_entries.async_update_entry( + self.entry, + data={ + **self.entry.data, CONF_EMAIL: self.email, CONF_PASSWORD: self.password, }, ) + self.hass.async_create_task( + self.hass.config_entries.async_reload(self.entry.entry_id) + ) + return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_mfa", @@ -292,6 +304,10 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): class VerisureOptionsFlowHandler(OptionsFlow): """Handle Verisure options.""" + def __init__(self, entry: ConfigEntry) -> None: + """Initialize Verisure options flow.""" + self.entry = entry + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -308,7 +324,7 @@ class VerisureOptionsFlowHandler(OptionsFlow): vol.Optional( CONF_LOCK_CODE_DIGITS, description={ - "suggested_value": self.config_entry.options.get( + "suggested_value": self.entry.options.get( CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS ) }, diff --git a/homeassistant/components/verisure/const.py b/homeassistant/components/verisure/const.py index 4afb93d957f..5b1aa1a0740 100644 --- a/homeassistant/components/verisure/const.py +++ b/homeassistant/components/verisure/const.py @@ -3,7 +3,12 @@ from datetime import timedelta import logging -from homeassistant.components.alarm_control_panel import AlarmControlPanelState +from homeassistant.const import ( + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_DISARMED, + STATE_ALARM_PENDING, +) DOMAIN = "verisure" @@ -38,8 +43,8 @@ DEVICE_TYPE_NAME = { } ALARM_STATE_TO_HA = { - "DISARMED": AlarmControlPanelState.DISARMED, - "ARMED_HOME": AlarmControlPanelState.ARMED_HOME, - "ARMED_AWAY": AlarmControlPanelState.ARMED_AWAY, - "PENDING": AlarmControlPanelState.PENDING, + "DISARMED": STATE_ALARM_DISARMED, + "ARMED_HOME": STATE_ALARM_ARMED_HOME, + "ARMED_AWAY": STATE_ALARM_ARMED_AWAY, + "PENDING": STATE_ALARM_PENDING, } diff --git a/homeassistant/components/verisure/icons.json b/homeassistant/components/verisure/icons.json index 809cf004a3f..35f6960b1e8 100644 --- a/homeassistant/components/verisure/icons.json +++ b/homeassistant/components/verisure/icons.json @@ -1,13 +1,7 @@ { "services": { - "capture_smartcam": { - "service": "mdi:camera" - }, - "enable_autolock": { - "service": "mdi:lock" - }, - "disable_autolock": { - "service": "mdi:lock-off" - } + "capture_smartcam": "mdi:camera", + "enable_autolock": "mdi:lock", + "disable_autolock": "mdi:lock-off" } } diff --git a/homeassistant/components/verisure/lock.py b/homeassistant/components/verisure/lock.py index 87f5c53880e..da2bc2ced2b 100644 --- a/homeassistant/components/verisure/lock.py +++ b/homeassistant/components/verisure/lock.py @@ -7,9 +7,9 @@ from typing import Any from verisure import Error as VerisureError -from homeassistant.components.lock import LockEntity, LockState +from homeassistant.components.lock import LockEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_CODE +from homeassistant.const import ATTR_CODE, STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import ( @@ -41,12 +41,12 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_DISABLE_AUTOLOCK, - None, + {}, VerisureDoorlock.disable_autolock.__name__, ) platform.async_register_entity_service( SERVICE_ENABLE_AUTOLOCK, - None, + {}, VerisureDoorlock.enable_autolock.__name__, ) @@ -130,19 +130,19 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt """Send unlock command.""" code = kwargs.get(ATTR_CODE) if code: - await self.async_set_lock_state(code, LockState.UNLOCKED) + await self.async_set_lock_state(code, STATE_UNLOCKED) async def async_lock(self, **kwargs: Any) -> None: """Send lock command.""" code = kwargs.get(ATTR_CODE) if code: - await self.async_set_lock_state(code, LockState.LOCKED) + await self.async_set_lock_state(code, STATE_LOCKED) - async def async_set_lock_state(self, code: str, state: LockState) -> None: + async def async_set_lock_state(self, code: str, state: str) -> None: """Send set lock state command.""" command = ( self.coordinator.verisure.door_lock(self.serial_number, code) - if state == LockState.LOCKED + if state == STATE_LOCKED else self.coordinator.verisure.door_unlock(self.serial_number, code) ) lock_request = await self.hass.async_add_executor_job( @@ -151,7 +151,7 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt ) LOGGER.debug("Verisure doorlock %s", state) transaction_id = lock_request.get("data", {}).get(command["operationName"]) - target_state = "LOCKED" if state == LockState.LOCKED else "UNLOCKED" + target_state = "LOCKED" if state == STATE_LOCKED else "UNLOCKED" lock_status = None attempts = 0 while lock_status != "OK": diff --git a/homeassistant/components/verisure/manifest.json b/homeassistant/components/verisure/manifest.json index 153b2ba4006..f6630f0c6e5 100644 --- a/homeassistant/components/verisure/manifest.json +++ b/homeassistant/components/verisure/manifest.json @@ -12,5 +12,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["verisure"], - "requirements": ["vsure==2.6.7"] + "requirements": ["vsure==2.6.6"] } diff --git a/homeassistant/components/versasense/__init__.py b/homeassistant/components/versasense/__init__.py index ed4a8edf32c..f209234f8c2 100644 --- a/homeassistant/components/versasense/__init__.py +++ b/homeassistant/components/versasense/__init__.py @@ -55,7 +55,7 @@ async def _configure_entities(hass, config, consumer): switch_info = {} for mac, device in devices.items(): - _LOGGER.debug("Device connected: %s %s", device.name, mac) + _LOGGER.info("Device connected: %s %s", device.name, mac) hass.data[DOMAIN][mac] = {} for peripheral_id, peripheral in device.peripherals.items(): diff --git a/homeassistant/components/versasense/sensor.py b/homeassistant/components/versasense/sensor.py index 4c861bf5787..59d092ccdc1 100644 --- a/homeassistant/components/versasense/sensor.py +++ b/homeassistant/components/versasense/sensor.py @@ -30,7 +30,7 @@ async def async_setup_platform( ) -> None: """Set up the sensor platform.""" if discovery_info is None: - return + return None consumer = hass.data[DOMAIN][KEY_CONSUMER] diff --git a/homeassistant/components/versasense/switch.py b/homeassistant/components/versasense/switch.py index 10bca79e536..195045882ff 100644 --- a/homeassistant/components/versasense/switch.py +++ b/homeassistant/components/versasense/switch.py @@ -33,7 +33,7 @@ async def async_setup_platform( ) -> None: """Set up actuator platform.""" if discovery_info is None: - return + return None consumer = hass.data[DOMAIN][KEY_CONSUMER] diff --git a/homeassistant/components/vesync/__init__.py b/homeassistant/components/vesync/__init__.py index b6f263f3037..7dceb1b3f8f 100644 --- a/homeassistant/components/vesync/__init__.py +++ b/homeassistant/components/vesync/__init__.py @@ -7,6 +7,7 @@ from pyvesync import VeSync from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from .common import async_process_devices @@ -25,6 +26,8 @@ PLATFORMS = [Platform.FAN, Platform.LIGHT, Platform.SENSOR, Platform.SWITCH] _LOGGER = logging.getLogger(__name__) +CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) + async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up Vesync as config entry.""" @@ -137,6 +140,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: - hass.data.pop(DOMAIN) + hass.data[DOMAIN].pop(entry.entry_id) return unload_ok diff --git a/homeassistant/components/vesync/common.py b/homeassistant/components/vesync/common.py index 5f7b2a3a29e..33fc88f32d6 100644 --- a/homeassistant/components/vesync/common.py +++ b/homeassistant/components/vesync/common.py @@ -1,8 +1,14 @@ """Common utilities for VeSync Component.""" import logging +from typing import Any -from .const import VS_FANS, VS_LIGHTS, VS_SENSORS, VS_SWITCHES +from pyvesync.vesyncbasedevice import VeSyncBaseDevice + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity, ToggleEntity + +from .const import DOMAIN, VS_FANS, VS_LIGHTS, VS_SENSORS, VS_SWITCHES _LOGGER = logging.getLogger(__name__) @@ -21,17 +27,17 @@ async def async_process_devices(hass, manager): devices[VS_FANS].extend(manager.fans) # Expose fan sensors separately devices[VS_SENSORS].extend(manager.fans) - _LOGGER.debug("%d VeSync fans found", len(manager.fans)) + _LOGGER.info("%d VeSync fans found", len(manager.fans)) if manager.bulbs: devices[VS_LIGHTS].extend(manager.bulbs) - _LOGGER.debug("%d VeSync lights found", len(manager.bulbs)) + _LOGGER.info("%d VeSync lights found", len(manager.bulbs)) if manager.outlets: devices[VS_SWITCHES].extend(manager.outlets) # Expose outlets' voltage, power & energy usage as separate sensors devices[VS_SENSORS].extend(manager.outlets) - _LOGGER.debug("%d VeSync outlets found", len(manager.outlets)) + _LOGGER.info("%d VeSync outlets found", len(manager.outlets)) if manager.switches: for switch in manager.switches: @@ -39,6 +45,65 @@ async def async_process_devices(hass, manager): devices[VS_SWITCHES].append(switch) else: devices[VS_LIGHTS].append(switch) - _LOGGER.debug("%d VeSync switches found", len(manager.switches)) + _LOGGER.info("%d VeSync switches found", len(manager.switches)) return devices + + +class VeSyncBaseEntity(Entity): + """Base class for VeSync Entity Representations.""" + + _attr_has_entity_name = True + + def __init__(self, device: VeSyncBaseDevice) -> None: + """Initialize the VeSync device.""" + self.device = device + self._attr_unique_id = self.base_unique_id + + @property + def base_unique_id(self): + """Return the ID of this device.""" + # The unique_id property may be overridden in subclasses, such as in + # sensors. Maintaining base_unique_id allows us to group related + # entities under a single device. + if isinstance(self.device.sub_device_no, int): + return f"{self.device.cid}{self.device.sub_device_no!s}" + return self.device.cid + + @property + def available(self) -> bool: + """Return True if device is available.""" + return self.device.connection_status == "online" + + @property + def device_info(self) -> DeviceInfo: + """Return device information.""" + return DeviceInfo( + identifiers={(DOMAIN, self.base_unique_id)}, + name=self.device.device_name, + model=self.device.device_type, + manufacturer="VeSync", + sw_version=self.device.current_firm_version, + ) + + def update(self) -> None: + """Update vesync device.""" + self.device.update() + + +class VeSyncDevice(VeSyncBaseEntity, ToggleEntity): + """Base class for VeSync Device Representations.""" + + @property + def details(self): + """Provide access to the device details dictionary.""" + return self.device.details + + @property + def is_on(self) -> bool: + """Return True if device is on.""" + return self.device.device_status == "on" + + def turn_off(self, **kwargs: Any) -> None: + """Turn the device off.""" + self.device.turn_off() diff --git a/homeassistant/components/vesync/config_flow.py b/homeassistant/components/vesync/config_flow.py index 6115cb9ee76..15f9f548e35 100644 --- a/homeassistant/components/vesync/config_flow.py +++ b/homeassistant/components/vesync/config_flow.py @@ -1,42 +1,40 @@ """Config flow utilities.""" -from typing import Any +from collections import OrderedDict from pyvesync import VeSync import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback -import homeassistant.helpers.config_validation as cv from .const import DOMAIN -DATA_SCHEMA = vol.Schema( - { - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - } -) - class VeSyncFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 + def __init__(self) -> None: + """Instantiate config flow.""" + self._username = None + self._password = None + self.data_schema = OrderedDict() + self.data_schema[vol.Required(CONF_USERNAME)] = str + self.data_schema[vol.Required(CONF_PASSWORD)] = str + @callback - def _show_form(self, errors: dict[str, str] | None = None) -> ConfigFlowResult: + def _show_form(self, errors=None): """Show form to the user.""" return self.async_show_form( step_id="user", - data_schema=DATA_SCHEMA, + data_schema=vol.Schema(self.data_schema), errors=errors if errors else {}, ) - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_user(self, user_input=None): """Handle a flow start.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -44,15 +42,15 @@ class VeSyncFlowHandler(ConfigFlow, domain=DOMAIN): if not user_input: return self._show_form() - username = user_input[CONF_USERNAME] - password = user_input[CONF_PASSWORD] + self._username = user_input[CONF_USERNAME] + self._password = user_input[CONF_PASSWORD] - manager = VeSync(username, password) + manager = VeSync(self._username, self._password) login = await self.hass.async_add_executor_job(manager.login) if not login: return self._show_form(errors={"base": "invalid_auth"}) return self.async_create_entry( - title=username, - data={CONF_USERNAME: username, CONF_PASSWORD: password}, + title=self._username, + data={CONF_USERNAME: self._username, CONF_PASSWORD: self._password}, ) diff --git a/homeassistant/components/vesync/const.py b/homeassistant/components/vesync/const.py index 48215819ce5..483ab89b02e 100644 --- a/homeassistant/components/vesync/const.py +++ b/homeassistant/components/vesync/const.py @@ -23,7 +23,6 @@ DEV_TYPE_TO_HA = { "Core300S": "fan", "Core400S": "fan", "Core600S": "fan", - "EverestAir": "fan", "Vital200S": "fan", "Vital100S": "fan", "ESD16": "walldimmer", @@ -41,7 +40,6 @@ SKU_TO_BASE_DEVICE = { "LAP-C202S-WUSR": "Core200S", # Alt ID Model Core200S "Core300S": "Core300S", "LAP-C301S-WJP": "Core300S", # Alt ID Model Core300S - "LAP-C301S-WAAA": "Core300S", # Alt ID Model Core300S "Core400S": "Core400S", "LAP-C401S-WJP": "Core400S", # Alt ID Model Core400S "LAP-C401S-WUSR": "Core400S", # Alt ID Model Core400S @@ -56,15 +54,9 @@ SKU_TO_BASE_DEVICE = { "LAP-V201S-WEU": "Vital200S", # Alt ID Model Vital200S "LAP-V201S-WUS": "Vital200S", # Alt ID Model Vital200S "LAP-V201-AUSR": "Vital200S", # Alt ID Model Vital200S - "LAP-V201S-AUSR": "Vital200S", # Alt ID Model Vital200S "Vital100S": "Vital100S", "LAP-V102S-WUS": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-AASR": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-WEU": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-WUK": "Vital100S", # Alt ID Model Vital100S - "EverestAir": "EverestAir", - "LAP-EL551S-AUS": "EverestAir", # Alt ID Model EverestAir - "LAP-EL551S-AEUR": "EverestAir", # Alt ID Model EverestAir - "LAP-EL551S-WEU": "EverestAir", # Alt ID Model EverestAir - "LAP-EL551S-WUS": "EverestAir", # Alt ID Model EverestAir } diff --git a/homeassistant/components/vesync/diagnostics.py b/homeassistant/components/vesync/diagnostics.py index e1c092b1e32..9af8a7fed67 100644 --- a/homeassistant/components/vesync/diagnostics.py +++ b/homeassistant/components/vesync/diagnostics.py @@ -12,8 +12,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceEntry +from .common import VeSyncBaseDevice from .const import DOMAIN, VS_MANAGER -from .entity import VeSyncBaseDevice KEYS_TO_REDACT = {"manager", "uuid", "mac_id"} diff --git a/homeassistant/components/vesync/entity.py b/homeassistant/components/vesync/entity.py deleted file mode 100644 index fd636561e9e..00000000000 --- a/homeassistant/components/vesync/entity.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Common entity for VeSync Component.""" - -from typing import Any - -from pyvesync.vesyncbasedevice import VeSyncBaseDevice - -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity, ToggleEntity - -from .const import DOMAIN - - -class VeSyncBaseEntity(Entity): - """Base class for VeSync Entity Representations.""" - - _attr_has_entity_name = True - - def __init__(self, device: VeSyncBaseDevice) -> None: - """Initialize the VeSync device.""" - self.device = device - self._attr_unique_id = self.base_unique_id - - @property - def base_unique_id(self): - """Return the ID of this device.""" - # The unique_id property may be overridden in subclasses, such as in - # sensors. Maintaining base_unique_id allows us to group related - # entities under a single device. - if isinstance(self.device.sub_device_no, int): - return f"{self.device.cid}{self.device.sub_device_no!s}" - return self.device.cid - - @property - def available(self) -> bool: - """Return True if device is available.""" - return self.device.connection_status == "online" - - @property - def device_info(self) -> DeviceInfo: - """Return device information.""" - return DeviceInfo( - identifiers={(DOMAIN, self.base_unique_id)}, - name=self.device.device_name, - model=self.device.device_type, - manufacturer="VeSync", - sw_version=self.device.current_firm_version, - ) - - def update(self) -> None: - """Update vesync device.""" - self.device.update() - - -class VeSyncDevice(VeSyncBaseEntity, ToggleEntity): - """Base class for VeSync Device Representations.""" - - @property - def details(self): - """Provide access to the device details dictionary.""" - return self.device.details - - @property - def is_on(self) -> bool: - """Return True if device is on.""" - return self.device.device_status == "on" - - def turn_off(self, **kwargs: Any) -> None: - """Turn the device off.""" - self.device.turn_off() diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 098a17e90f0..6272c033b4f 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -17,15 +17,14 @@ from homeassistant.util.percentage import ( ) from homeassistant.util.scaling import int_states_in_range +from .common import VeSyncDevice from .const import DEV_TYPE_TO_HA, DOMAIN, SKU_TO_BASE_DEVICE, VS_DISCOVERY, VS_FANS -from .entity import VeSyncDevice _LOGGER = logging.getLogger(__name__) FAN_MODE_AUTO = "auto" FAN_MODE_SLEEP = "sleep" FAN_MODE_PET = "pet" -FAN_MODE_TURBO = "turbo" PRESET_MODES = { "LV-PUR131S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], @@ -33,7 +32,6 @@ PRESET_MODES = { "Core300S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], "Core400S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], "Core600S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], - "EverestAir": [FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_TURBO], "Vital200S": [FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_PET], "Vital100S": [FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_PET], } @@ -43,7 +41,6 @@ SPEED_RANGE = { # off is not included "Core300S": (1, 3), "Core400S": (1, 4), "Core600S": (1, 4), - "EverestAir": (1, 3), "Vital200S": (1, 4), "Vital100S": (1, 4), } @@ -87,15 +84,8 @@ def _setup_entities(devices, async_add_entities): class VeSyncFanHA(VeSyncDevice, FanEntity): """Representation of a VeSync fan.""" - _attr_supported_features = ( - FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE _attr_name = None - _attr_translation_key = "vesync" - _enable_turn_on_off_backwards_compatibility = False def __init__(self, fan) -> None: """Initialize the VeSync fan device.""" @@ -129,7 +119,7 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): @property def preset_mode(self) -> str | None: """Get the current preset mode.""" - if self.smartfan.mode in (FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_TURBO): + if self.smartfan.mode in (FAN_MODE_AUTO, FAN_MODE_SLEEP): return self.smartfan.mode return None @@ -196,8 +186,6 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): self.smartfan.sleep_mode() elif preset_mode == FAN_MODE_PET: self.smartfan.pet_mode() - elif preset_mode == FAN_MODE_TURBO: - self.smartfan.turbo_mode() self.schedule_update_ha_state() diff --git a/homeassistant/components/vesync/icons.json b/homeassistant/components/vesync/icons.json index e4769acc9a5..a4bf4afd410 100644 --- a/homeassistant/components/vesync/icons.json +++ b/homeassistant/components/vesync/icons.json @@ -1,23 +1,5 @@ { - "entity": { - "fan": { - "vesync": { - "state_attributes": { - "preset_mode": { - "state": { - "auto": "mdi:fan-auto", - "sleep": "mdi:sleep", - "pet": "mdi:paw", - "turbo": "mdi:weather-tornado" - } - } - } - } - } - }, "services": { - "update_devices": { - "service": "mdi:update" - } + "update_devices": "mdi:update" } } diff --git a/homeassistant/components/vesync/light.py b/homeassistant/components/vesync/light.py index 6e449f63394..9b15e635903 100644 --- a/homeassistant/components/vesync/light.py +++ b/homeassistant/components/vesync/light.py @@ -14,8 +14,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .common import VeSyncDevice from .const import DEV_TYPE_TO_HA, DOMAIN, VS_DISCOVERY, VS_LIGHTS -from .entity import VeSyncDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/vesync/manifest.json b/homeassistant/components/vesync/manifest.json index c5926cc224a..ff3f56dd184 100644 --- a/homeassistant/components/vesync/manifest.json +++ b/homeassistant/components/vesync/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/vesync", "iot_class": "cloud_polling", "loggers": ["pyvesync"], - "requirements": ["pyvesync==2.1.12"] + "requirements": ["pyvesync==2.1.10"] } diff --git a/homeassistant/components/vesync/sensor.py b/homeassistant/components/vesync/sensor.py index 79061ec0c4c..81f42f4c2ee 100644 --- a/homeassistant/components/vesync/sensor.py +++ b/homeassistant/components/vesync/sensor.py @@ -30,8 +30,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType +from .common import VeSyncBaseEntity from .const import DEV_TYPE_TO_HA, DOMAIN, SKU_TO_BASE_DEVICE, VS_DISCOVERY, VS_SENSORS -from .entity import VeSyncBaseEntity _LOGGER = logging.getLogger(__name__) @@ -72,7 +72,6 @@ FILTER_LIFE_SUPPORTED = [ "Core300S", "Core400S", "Core600S", - "EverestAir", "Vital100S", "Vital200S", ] @@ -84,14 +83,7 @@ AIR_QUALITY_SUPPORTED = [ "Vital100S", "Vital200S", ] -PM25_SUPPORTED = [ - "Core300S", - "Core400S", - "Core600S", - "EverestAir", - "Vital100S", - "Vital200S", -] +PM25_SUPPORTED = ["Core300S", "Core400S", "Core600S", "Vital100S", "Vital200S"] SENSORS: tuple[VeSyncSensorEntityDescription, ...] = ( VeSyncSensorEntityDescription( diff --git a/homeassistant/components/vesync/strings.json b/homeassistant/components/vesync/strings.json index b6e4e2fd957..5ff0aa58722 100644 --- a/homeassistant/components/vesync/strings.json +++ b/homeassistant/components/vesync/strings.json @@ -42,20 +42,6 @@ "current_voltage": { "name": "Current voltage" } - }, - "fan": { - "vesync": { - "state_attributes": { - "preset_mode": { - "state": { - "auto": "Auto", - "sleep": "Sleep", - "pet": "Pet", - "turbo": "Turbo" - } - } - } - } } }, "services": { diff --git a/homeassistant/components/vesync/switch.py b/homeassistant/components/vesync/switch.py index a162a648ad7..1d0c3472d53 100644 --- a/homeassistant/components/vesync/switch.py +++ b/homeassistant/components/vesync/switch.py @@ -9,8 +9,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .common import VeSyncDevice from .const import DEV_TYPE_TO_HA, DOMAIN, VS_DISCOVERY, VS_SWITCHES -from .entity import VeSyncDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/viaggiatreno/sensor.py b/homeassistant/components/viaggiatreno/sensor.py index cb652270c69..1ea12ed6a41 100644 --- a/homeassistant/components/viaggiatreno/sensor.py +++ b/homeassistant/components/viaggiatreno/sensor.py @@ -174,7 +174,7 @@ class ViaggiaTrenoSensor(SensorEntity): self._state = NO_INFORMATION_STRING self._unit = "" else: - self._state = f"Error: {res['error']}" + self._state = "Error: {}".format(res["error"]) self._unit = "" else: for i in MONITORED_INFO: diff --git a/homeassistant/components/vicare/__init__.py b/homeassistant/components/vicare/__init__.py index d6b9e4b923a..0c87cd6f4fe 100644 --- a/homeassistant/components/vicare/__init__.py +++ b/homeassistant/components/vicare/__init__.py @@ -15,12 +15,10 @@ from PyViCare.PyViCareUtils import ( PyViCareInvalidCredentialsError, ) -from homeassistant.components.climate import DOMAIN as DOMAIN_CLIMATE from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.storage import STORAGE_DIR from .const import ( @@ -31,7 +29,7 @@ from .const import ( UNSUPPORTED_DEVICES, ) from .types import ViCareDevice -from .utils import get_device, get_device_serial +from .utils import get_device _LOGGER = logging.getLogger(__name__) _TOKEN_FILENAME = "vicare_token.save" @@ -49,10 +47,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError) as err: raise ConfigEntryAuthFailed("Authentication failed") from err - for device in hass.data[DOMAIN][entry.entry_id][DEVICE_LIST]: - # Migration can be removed in 2025.4.0 - await async_migrate_devices_and_entities(hass, entry, device) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -115,74 +109,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -async def async_migrate_devices_and_entities( - hass: HomeAssistant, entry: ConfigEntry, device: ViCareDevice -) -> None: - """Migrate old entry.""" - device_registry = dr.async_get(hass) - entity_registry = er.async_get(hass) - - gateway_serial: str = device.config.getConfig().serial - device_id = device.config.getId() - device_serial: str | None = await hass.async_add_executor_job( - get_device_serial, device.api - ) - device_model = device.config.getModel() - - old_identifier = gateway_serial - new_identifier = ( - f"{gateway_serial}_{device_serial if device_serial is not None else device_id}" - ) - - # Migrate devices - for device_entry in dr.async_entries_for_config_entry( - device_registry, entry.entry_id - ): - if ( - device_entry.identifiers == {(DOMAIN, old_identifier)} - and device_entry.model == device_model - ): - _LOGGER.debug( - "Migrating device %s to new identifier %s", - device_entry.name, - new_identifier, - ) - device_registry.async_update_device( - device_entry.id, - serial_number=device_serial, - new_identifiers={(DOMAIN, new_identifier)}, - ) - - # Migrate entities - for entity_entry in er.async_entries_for_device( - entity_registry, device_entry.id, True - ): - if entity_entry.unique_id.startswith(new_identifier): - # already correct, nothing to do - continue - unique_id_parts = entity_entry.unique_id.split("-") - # replace old prefix `` - # with `_` - unique_id_parts[0] = new_identifier - # convert climate entity unique id - # from `-` - # to `-heating-` - if entity_entry.domain == DOMAIN_CLIMATE: - unique_id_parts[len(unique_id_parts) - 1] = ( - f"{entity_entry.translation_key}-{unique_id_parts[len(unique_id_parts)-1]}" - ) - entity_new_unique_id = "-".join(unique_id_parts) - - _LOGGER.debug( - "Migrating entity %s to new unique id %s", - entity_entry.name, - entity_new_unique_id, - ) - entity_registry.async_update_entity( - entity_id=entity_entry.entity_id, new_unique_id=entity_new_unique_id - ) - - def get_supported_devices( devices: list[PyViCareDeviceConfig], ) -> list[PyViCareDeviceConfig]: diff --git a/homeassistant/components/vicare/binary_sensor.py b/homeassistant/components/vicare/binary_sensor.py index 55f0ab96ed0..2df8a2f06d3 100644 --- a/homeassistant/components/vicare/binary_sensor.py +++ b/homeassistant/components/vicare/binary_sensor.py @@ -10,7 +10,7 @@ import logging from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareHeatingDevice import ( - HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, + HeatingDeviceWithComponent as PyViCareHeatingDeviceWithComponent, ) from PyViCare.PyViCareUtils import ( PyViCareInvalidDataError, @@ -31,13 +31,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity from .types import ViCareDevice, ViCareRequiredKeysMixin -from .utils import ( - get_burners, - get_circuits, - get_compressors, - get_device_serial, - is_supported, -) +from .utils import get_burners, get_circuits, get_compressors, is_supported _LOGGER = logging.getLogger(__name__) @@ -118,38 +112,61 @@ def _build_entities( entities: list[ViCareBinarySensor] = [] for device in device_list: - # add device entities + entities.extend(_build_entities_for_device(device.api, device.config)) entities.extend( - ViCareBinarySensor( - description, - get_device_serial(device.api), - device.config, - device.api, + _build_entities_for_component( + get_circuits(device.api), device.config, CIRCUIT_SENSORS ) - for description in GLOBAL_SENSORS - if is_supported(description.key, description, device.api) ) - # add component entities - for component_list, entity_description_list in ( - (get_circuits(device.api), CIRCUIT_SENSORS), - (get_burners(device.api), BURNER_SENSORS), - (get_compressors(device.api), COMPRESSOR_SENSORS), - ): - entities.extend( - ViCareBinarySensor( - description, - get_device_serial(device.api), - device.config, - device.api, - component, - ) - for component in component_list - for description in entity_description_list - if is_supported(description.key, description, component) + entities.extend( + _build_entities_for_component( + get_burners(device.api), device.config, BURNER_SENSORS ) + ) + entities.extend( + _build_entities_for_component( + get_compressors(device.api), device.config, COMPRESSOR_SENSORS + ) + ) return entities +def _build_entities_for_device( + device: PyViCareDevice, + device_config: PyViCareDeviceConfig, +) -> list[ViCareBinarySensor]: + """Create device specific ViCare binary sensor entities.""" + + return [ + ViCareBinarySensor( + device, + device_config, + description, + ) + for description in GLOBAL_SENSORS + if is_supported(description.key, description, device) + ] + + +def _build_entities_for_component( + components: list[PyViCareHeatingDeviceWithComponent], + device_config: PyViCareDeviceConfig, + entity_descriptions: tuple[ViCareBinarySensorEntityDescription, ...], +) -> list[ViCareBinarySensor]: + """Create component specific ViCare binary sensor entities.""" + + return [ + ViCareBinarySensor( + component, + device_config, + description, + ) + for component in components + for description in entity_descriptions + if is_supported(description.key, description, component) + ] + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -173,16 +190,12 @@ class ViCareBinarySensor(ViCareEntity, BinarySensorEntity): def __init__( self, - description: ViCareBinarySensorEntityDescription, - device_serial: str | None, + api: PyViCareDevice, device_config: PyViCareDeviceConfig, - device: PyViCareDevice, - component: PyViCareHeatingDeviceComponent | None = None, + description: ViCareBinarySensorEntityDescription, ) -> None: """Initialize the sensor.""" - super().__init__( - description.key, device_serial, device_config, device, component - ) + super().__init__(device_config, api, description.key) self.entity_description = description @property diff --git a/homeassistant/components/vicare/button.py b/homeassistant/components/vicare/button.py index 49d142c1edb..c927055dadd 100644 --- a/homeassistant/components/vicare/button.py +++ b/homeassistant/components/vicare/button.py @@ -24,7 +24,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity from .types import ViCareDevice, ViCareRequiredKeysMixinWithSet -from .utils import get_device_serial, is_supported +from .utils import is_supported _LOGGER = logging.getLogger(__name__) @@ -54,10 +54,9 @@ def _build_entities( return [ ViCareButton( - description, - get_device_serial(device.api), - device.config, device.api, + device.config, + description, ) for device in device_list for description in BUTTON_DESCRIPTIONS @@ -88,13 +87,12 @@ class ViCareButton(ViCareEntity, ButtonEntity): def __init__( self, - description: ViCareButtonEntityDescription, - device_serial: str | None, + api: PyViCareDevice, device_config: PyViCareDeviceConfig, - device: PyViCareDevice, + description: ViCareButtonEntityDescription, ) -> None: """Initialize the button.""" - super().__init__(description.key, device_serial, device_config, device) + super().__init__(device_config, api, description.key) self.entity_description = description def press(self) -> None: diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index 8a116038533..1333327609d 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -40,7 +40,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity from .types import HeatingProgram, ViCareDevice -from .utils import get_burners, get_circuits, get_compressors, get_device_serial +from .utils import get_burners, get_circuits, get_compressors _LOGGER = logging.getLogger(__name__) @@ -87,10 +87,10 @@ def _build_entities( """Create ViCare climate entities for a device.""" return [ ViCareClimate( - get_device_serial(device.api), - device.config, device.api, circuit, + device.config, + "heating", ) for device in device_list for circuit in get_circuits(device.api) @@ -136,26 +136,25 @@ class ViCareClimate(ViCareEntity, ClimateEntity): _attr_min_temp = VICARE_TEMP_HEATING_MIN _attr_max_temp = VICARE_TEMP_HEATING_MAX _attr_target_temperature_step = PRECISION_WHOLE - _attr_translation_key = "heating" _current_action: bool | None = None _current_mode: str | None = None - _current_program: str | None = None _enable_turn_on_off_backwards_compatibility = False def __init__( self, - device_serial: str | None, - device_config: PyViCareDeviceConfig, - device: PyViCareDevice, + api: PyViCareDevice, circuit: PyViCareHeatingCircuit, + device_config: PyViCareDeviceConfig, + translation_key: str, ) -> None: """Initialize the climate device.""" - super().__init__( - self._attr_translation_key, device_serial, device_config, device, circuit - ) - self._device = device + super().__init__(device_config, api, circuit.id) + self._circuit = circuit self._attributes: dict[str, Any] = {} - self._attributes["vicare_programs"] = self._api.getPrograms() + self._current_program = None + self._attr_translation_key = translation_key + + self._attributes["vicare_programs"] = self._circuit.getPrograms() self._attr_preset_modes = [ preset for heating_program in self._attributes["vicare_programs"] @@ -167,13 +166,11 @@ class ViCareClimate(ViCareEntity, ClimateEntity): try: _room_temperature = None with suppress(PyViCareNotSupportedFeatureError): - self._attributes["room_temperature"] = _room_temperature = ( - self._api.getRoomTemperature() - ) + _room_temperature = self._circuit.getRoomTemperature() _supply_temperature = None with suppress(PyViCareNotSupportedFeatureError): - _supply_temperature = self._api.getSupplyTemperature() + _supply_temperature = self._circuit.getSupplyTemperature() if _room_temperature is not None: self._attr_current_temperature = _room_temperature @@ -183,39 +180,44 @@ class ViCareClimate(ViCareEntity, ClimateEntity): self._attr_current_temperature = None with suppress(PyViCareNotSupportedFeatureError): - self._attributes["active_vicare_program"] = self._current_program = ( - self._api.getActiveProgram() + self._current_program = self._circuit.getActiveProgram() + + with suppress(PyViCareNotSupportedFeatureError): + self._attr_target_temperature = ( + self._circuit.getCurrentDesiredTemperature() ) with suppress(PyViCareNotSupportedFeatureError): - self._attr_target_temperature = self._api.getCurrentDesiredTemperature() + self._current_mode = self._circuit.getActiveMode() - with suppress(PyViCareNotSupportedFeatureError): - self._attributes["active_vicare_mode"] = self._current_mode = ( - self._api.getActiveMode() - ) + # Update the generic device attributes + self._attributes = { + "room_temperature": _room_temperature, + "active_vicare_program": self._current_program, + "active_vicare_mode": self._current_mode, + } with suppress(PyViCareNotSupportedFeatureError): self._attributes["heating_curve_slope"] = ( - self._api.getHeatingCurveSlope() + self._circuit.getHeatingCurveSlope() ) with suppress(PyViCareNotSupportedFeatureError): self._attributes["heating_curve_shift"] = ( - self._api.getHeatingCurveShift() + self._circuit.getHeatingCurveShift() ) with suppress(PyViCareNotSupportedFeatureError): - self._attributes["vicare_modes"] = self._api.getModes() + self._attributes["vicare_modes"] = self._circuit.getModes() self._current_action = False # Update the specific device attributes with suppress(PyViCareNotSupportedFeatureError): - for burner in get_burners(self._device): + for burner in get_burners(self._api): self._current_action = self._current_action or burner.getActive() with suppress(PyViCareNotSupportedFeatureError): - for compressor in get_compressors(self._device): + for compressor in get_compressors(self._api): self._current_action = ( self._current_action or compressor.getActive() ) @@ -246,9 +248,9 @@ class ViCareClimate(ViCareEntity, ClimateEntity): raise ValueError(f"Cannot set invalid hvac mode: {hvac_mode}") _LOGGER.debug("Setting hvac mode to %s / %s", hvac_mode, vicare_mode) - self._api.setMode(vicare_mode) + self._circuit.setMode(vicare_mode) - def vicare_mode_from_hvac_mode(self, hvac_mode) -> str | None: + def vicare_mode_from_hvac_mode(self, hvac_mode): """Return the corresponding vicare mode for an hvac_mode.""" if "vicare_modes" not in self._attributes: return None @@ -284,7 +286,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): def set_temperature(self, **kwargs: Any) -> None: """Set new target temperatures.""" if (temp := kwargs.get(ATTR_TEMPERATURE)) is not None: - self._api.setProgramTemperature(self._current_program, temp) + self._circuit.setProgramTemperature(self._current_program, temp) self._attr_target_temperature = temp @property @@ -313,7 +315,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): ): _LOGGER.debug("deactivating %s", self._current_program) try: - self._api.deactivateProgram(self._current_program) + self._circuit.deactivateProgram(self._current_program) except PyViCareCommandError as err: raise ServiceValidationError( translation_domain=DOMAIN, @@ -327,7 +329,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): if target_program in CHANGABLE_HEATING_PROGRAMS: _LOGGER.debug("activating %s", target_program) try: - self._api.activateProgram(target_program) + self._circuit.activateProgram(target_program) except PyViCareCommandError as err: raise ServiceValidationError( translation_domain=DOMAIN, @@ -338,13 +340,13 @@ class ViCareClimate(ViCareEntity, ClimateEntity): ) from err @property - def extra_state_attributes(self) -> dict[str, Any]: + def extra_state_attributes(self): """Show Device Attributes.""" return self._attributes - def set_vicare_mode(self, vicare_mode) -> None: + def set_vicare_mode(self, vicare_mode): """Service function to set vicare modes directly.""" if vicare_mode not in self._attributes["vicare_modes"]: raise ValueError(f"Cannot set invalid vicare mode: {vicare_mode}.") - self._api.setMode(vicare_mode) + self._circuit.setMode(vicare_mode) diff --git a/homeassistant/components/vicare/config_flow.py b/homeassistant/components/vicare/config_flow.py index c711cc06074..67ce4f2c186 100644 --- a/homeassistant/components/vicare/config_flow.py +++ b/homeassistant/components/vicare/config_flow.py @@ -13,7 +13,7 @@ from PyViCare.PyViCareUtils import ( import voluptuous as vol from homeassistant.components import dhcp -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import format_mac @@ -50,6 +50,7 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for ViCare.""" VERSION = 1 + entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -80,6 +81,7 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with ViCare.""" + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -87,11 +89,11 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Confirm re-authentication with ViCare.""" errors: dict[str, str] = {} + assert self.entry is not None - reauth_entry = self._get_reauth_entry() if user_input: data = { - **reauth_entry.data, + **self.entry.data, **user_input, } @@ -100,12 +102,17 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError): errors["base"] = "invalid_auth" else: - return self.async_update_reload_and_abort(reauth_entry, data=data) + self.hass.config_entries.async_update_entry( + self.entry, + data=data, + ) + await self.hass.config_entries.async_reload(self.entry.entry_id) + return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", data_schema=self.add_suggested_values_to_schema( - REAUTH_SCHEMA, reauth_entry.data + REAUTH_SCHEMA, self.entry.data ), errors=errors, ) diff --git a/homeassistant/components/vicare/const.py b/homeassistant/components/vicare/const.py index 828a879927d..24ab94778e3 100644 --- a/homeassistant/components/vicare/const.py +++ b/homeassistant/components/vicare/const.py @@ -10,7 +10,6 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, - Platform.FAN, Platform.NUMBER, Platform.SENSOR, Platform.WATER_HEATER, @@ -19,10 +18,10 @@ PLATFORMS = [ UNSUPPORTED_DEVICES = [ "Heatbox1", "Heatbox2_SRC", - "E3_TCU41_x04", "E3_FloorHeatingCircuitChannel", "E3_FloorHeatingCircuitDistributorBox", "E3_RoomControl_One_522", + "E3_RoomSensor", ] DEVICE_LIST = "device_list" diff --git a/homeassistant/components/vicare/entity.py b/homeassistant/components/vicare/entity.py index 2d858185b9f..1bb2993cd3a 100644 --- a/homeassistant/components/vicare/entity.py +++ b/homeassistant/components/vicare/entity.py @@ -2,9 +2,6 @@ from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig -from PyViCare.PyViCareHeatingDevice import ( - HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, -) from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity @@ -19,28 +16,21 @@ class ViCareEntity(Entity): def __init__( self, - unique_id_suffix: str, - device_serial: str | None, device_config: PyViCareDeviceConfig, device: PyViCareDevice, - component: PyViCareHeatingDeviceComponent | None = None, + unique_id_suffix: str, ) -> None: """Initialize the entity.""" - gateway_serial = device_config.getConfig().serial - device_id = device_config.getId() + self._api = device - identifier = f"{gateway_serial}_{device_serial.replace("zigbee-", "zigbee_") if device_serial is not None else device_id}" - - self._api: PyViCareDevice | PyViCareHeatingDeviceComponent = ( - component if component else device - ) - self._attr_unique_id = f"{identifier}-{unique_id_suffix}" - if component: - self._attr_unique_id += f"-{component.id}" + self._attr_unique_id = f"{device_config.getConfig().serial}-{unique_id_suffix}" + # valid for compressors, circuits, burners (HeatingDeviceWithComponent) + if hasattr(device, "id"): + self._attr_unique_id += f"-{device.id}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, identifier)}, - serial_number=device_serial, + identifiers={(DOMAIN, device_config.getConfig().serial)}, + serial_number=device_config.getConfig().serial, name=device_config.getModel(), manufacturer="Viessmann", model=device_config.getModel(), diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py deleted file mode 100644 index b787de20773..00000000000 --- a/homeassistant/components/vicare/fan.py +++ /dev/null @@ -1,177 +0,0 @@ -"""Viessmann ViCare ventilation device.""" - -from __future__ import annotations - -from contextlib import suppress -import enum -import logging - -from PyViCare.PyViCareDevice import Device as PyViCareDevice -from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig -from PyViCare.PyViCareUtils import ( - PyViCareInvalidDataError, - PyViCareNotSupportedFeatureError, - PyViCareRateLimitError, -) -from PyViCare.PyViCareVentilationDevice import ( - VentilationDevice as PyViCareVentilationDevice, -) -from requests.exceptions import ConnectionError as RequestConnectionError - -from homeassistant.components.fan import FanEntity, FanEntityFeature -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.percentage import ( - ordered_list_item_to_percentage, - percentage_to_ordered_list_item, -) - -from .const import DEVICE_LIST, DOMAIN -from .entity import ViCareEntity -from .utils import get_device_serial - -_LOGGER = logging.getLogger(__name__) - - -class VentilationProgram(enum.StrEnum): - """ViCare preset ventilation programs. - - As listed in https://github.com/somm15/PyViCare/blob/6c5b023ca6c8bb2d38141dd1746dc1705ec84ce8/PyViCare/PyViCareVentilationDevice.py#L37 - """ - - LEVEL_ONE = "levelOne" - LEVEL_TWO = "levelTwo" - LEVEL_THREE = "levelThree" - LEVEL_FOUR = "levelFour" - - -class VentilationMode(enum.StrEnum): - """ViCare ventilation modes.""" - - PERMANENT = "permanent" # on, speed controlled by program (levelOne-levelFour) - VENTILATION = "ventilation" # activated by schedule - SENSOR_DRIVEN = "sensor_driven" # activated by schedule, override by sensor - SENSOR_OVERRIDE = "sensor_override" # activated by sensor - - @staticmethod - def to_vicare_mode(mode: str | None) -> str | None: - """Return the mapped ViCare ventilation mode for the Home Assistant mode.""" - if mode: - try: - ventilation_mode = VentilationMode(mode) - except ValueError: - # ignore unsupported / unmapped modes - return None - return HA_TO_VICARE_MODE_VENTILATION.get(ventilation_mode) if mode else None - return None - - @staticmethod - def from_vicare_mode(vicare_mode: str | None) -> str | None: - """Return the mapped Home Assistant mode for the ViCare ventilation mode.""" - for mode in VentilationMode: - if HA_TO_VICARE_MODE_VENTILATION.get(VentilationMode(mode)) == vicare_mode: - return mode - return None - - -HA_TO_VICARE_MODE_VENTILATION = { - VentilationMode.PERMANENT: "permanent", - VentilationMode.VENTILATION: "ventilation", - VentilationMode.SENSOR_DRIVEN: "sensorDriven", - VentilationMode.SENSOR_OVERRIDE: "sensorOverride", -} - -ORDERED_NAMED_FAN_SPEEDS = [ - VentilationProgram.LEVEL_ONE, - VentilationProgram.LEVEL_TWO, - VentilationProgram.LEVEL_THREE, - VentilationProgram.LEVEL_FOUR, -] - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the ViCare fan platform.""" - - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - - async_add_entities( - [ - ViCareFan(get_device_serial(device.api), device.config, device.api) - for device in device_list - if isinstance(device.api, PyViCareVentilationDevice) - ] - ) - - -class ViCareFan(ViCareEntity, FanEntity): - """Representation of the ViCare ventilation device.""" - - _attr_preset_modes = list[str]( - [ - VentilationMode.PERMANENT, - VentilationMode.VENTILATION, - VentilationMode.SENSOR_DRIVEN, - VentilationMode.SENSOR_OVERRIDE, - ] - ) - _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) - _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE - _attr_translation_key = "ventilation" - _enable_turn_on_off_backwards_compatibility = False - - def __init__( - self, - device_serial: str | None, - device_config: PyViCareDeviceConfig, - device: PyViCareDevice, - ) -> None: - """Initialize the fan entity.""" - super().__init__( - self._attr_translation_key, device_serial, device_config, device - ) - - def update(self) -> None: - """Update state of fan.""" - try: - with suppress(PyViCareNotSupportedFeatureError): - self._attr_preset_mode = VentilationMode.from_vicare_mode( - self._api.getActiveMode() - ) - with suppress(PyViCareNotSupportedFeatureError): - self._attr_percentage = ordered_list_item_to_percentage( - ORDERED_NAMED_FAN_SPEEDS, self._api.getActiveProgram() - ) - except RequestConnectionError: - _LOGGER.error("Unable to retrieve data from ViCare server") - except ValueError: - _LOGGER.error("Unable to decode data from ViCare server") - except PyViCareRateLimitError as limit_exception: - _LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception) - except PyViCareInvalidDataError as invalid_data_exception: - _LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception) - - @property - def is_on(self) -> bool | None: - """Return true if the entity is on.""" - # Viessmann ventilation unit cannot be turned off - return True - - def set_percentage(self, percentage: int) -> None: - """Set the speed of the fan, as a percentage.""" - if self._attr_preset_mode != str(VentilationMode.PERMANENT): - self.set_preset_mode(VentilationMode.PERMANENT) - - level = percentage_to_ordered_list_item(ORDERED_NAMED_FAN_SPEEDS, percentage) - _LOGGER.debug("changing ventilation level to %s", level) - self._api.setPermanentLevel(level) - - def set_preset_mode(self, preset_mode: str) -> None: - """Set new preset mode.""" - target_mode = VentilationMode.to_vicare_mode(preset_mode) - _LOGGER.debug("changing ventilation mode to %s", target_mode) - self._api.setActiveMode(target_mode) diff --git a/homeassistant/components/vicare/icons.json b/homeassistant/components/vicare/icons.json index 9d0f27a863c..2f40d8a8822 100644 --- a/homeassistant/components/vicare/icons.json +++ b/homeassistant/components/vicare/icons.json @@ -88,8 +88,6 @@ } }, "services": { - "set_vicare_mode": { - "service": "mdi:cog" - } + "set_vicare_mode": "mdi:cog" } } diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 8ce996ab81d..97c4b91022d 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.35.0"] + "requirements": ["PyViCare==2.32.0"] } diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py index f9af9636941..c0564170274 100644 --- a/homeassistant/components/vicare/number.py +++ b/homeassistant/components/vicare/number.py @@ -33,7 +33,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity from .types import HeatingProgram, ViCareDevice, ViCareRequiredKeysMixin -from .utils import get_circuits, get_device_serial, is_supported +from .utils import get_circuits, is_supported _LOGGER = logging.getLogger(__name__) @@ -50,18 +50,6 @@ class ViCareNumberEntityDescription(NumberEntityDescription, ViCareRequiredKeysM DEVICE_ENTITY_DESCRIPTIONS: tuple[ViCareNumberEntityDescription, ...] = ( - ViCareNumberEntityDescription( - key="dhw_temperature", - translation_key="dhw_temperature", - entity_category=EntityCategory.CONFIG, - device_class=NumberDeviceClass.TEMPERATURE, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_getter=lambda api: api.getDomesticHotWaterConfiguredTemperature(), - value_setter=lambda api, value: api.setDomesticHotWaterTemperature(value), - min_value_getter=lambda api: api.getDomesticHotWaterMinTemperature(), - max_value_getter=lambda api: api.getDomesticHotWaterMaxTemperature(), - native_step=1, - ), ViCareNumberEntityDescription( key="dhw_secondary_temperature", translation_key="dhw_secondary_temperature", @@ -75,34 +63,6 @@ DEVICE_ENTITY_DESCRIPTIONS: tuple[ViCareNumberEntityDescription, ...] = ( native_max_value=60, native_step=1, ), - ViCareNumberEntityDescription( - key="dhw_hysteresis_switch_on", - translation_key="dhw_hysteresis_switch_on", - entity_category=EntityCategory.CONFIG, - device_class=NumberDeviceClass.TEMPERATURE, - native_unit_of_measurement=UnitOfTemperature.KELVIN, - value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOn(), - value_setter=lambda api, value: api.setDomesticHotWaterHysteresisSwitchOn( - value - ), - min_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOnMin(), - max_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOnMax(), - stepping_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOnStepping(), - ), - ViCareNumberEntityDescription( - key="dhw_hysteresis_switch_off", - translation_key="dhw_hysteresis_switch_off", - entity_category=EntityCategory.CONFIG, - device_class=NumberDeviceClass.TEMPERATURE, - native_unit_of_measurement=UnitOfTemperature.KELVIN, - value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOff(), - value_setter=lambda api, value: api.setDomesticHotWaterHysteresisSwitchOff( - value - ), - min_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOffMin(), - max_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOffMax(), - stepping_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOffStepping(), - ), ) @@ -265,72 +225,6 @@ CIRCUIT_ENTITY_DESCRIPTIONS: tuple[ViCareNumberEntityDescription, ...] = ( HeatingProgram.COMFORT_HEATING ), ), - ViCareNumberEntityDescription( - key="normal_cooling_temperature", - translation_key="normal_cooling_temperature", - entity_category=EntityCategory.CONFIG, - device_class=NumberDeviceClass.TEMPERATURE, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_getter=lambda api: api.getDesiredTemperatureForProgram( - HeatingProgram.NORMAL_COOLING - ), - value_setter=lambda api, value: api.setProgramTemperature( - HeatingProgram.NORMAL_COOLING, value - ), - min_value_getter=lambda api: api.getProgramMinTemperature( - HeatingProgram.NORMAL_COOLING - ), - max_value_getter=lambda api: api.getProgramMaxTemperature( - HeatingProgram.NORMAL_COOLING - ), - stepping_getter=lambda api: api.getProgramStepping( - HeatingProgram.NORMAL_COOLING - ), - ), - ViCareNumberEntityDescription( - key="reduced_cooling_temperature", - translation_key="reduced_cooling_temperature", - entity_category=EntityCategory.CONFIG, - device_class=NumberDeviceClass.TEMPERATURE, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_getter=lambda api: api.getDesiredTemperatureForProgram( - HeatingProgram.REDUCED_COOLING - ), - value_setter=lambda api, value: api.setProgramTemperature( - HeatingProgram.REDUCED_COOLING, value - ), - min_value_getter=lambda api: api.getProgramMinTemperature( - HeatingProgram.REDUCED_COOLING - ), - max_value_getter=lambda api: api.getProgramMaxTemperature( - HeatingProgram.REDUCED_COOLING - ), - stepping_getter=lambda api: api.getProgramStepping( - HeatingProgram.REDUCED_COOLING - ), - ), - ViCareNumberEntityDescription( - key="comfort_cooling_temperature", - translation_key="comfort_cooling_temperature", - entity_category=EntityCategory.CONFIG, - device_class=NumberDeviceClass.TEMPERATURE, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_getter=lambda api: api.getDesiredTemperatureForProgram( - HeatingProgram.COMFORT_COOLING - ), - value_setter=lambda api, value: api.setProgramTemperature( - HeatingProgram.COMFORT_COOLING, value - ), - min_value_getter=lambda api: api.getProgramMinTemperature( - HeatingProgram.COMFORT_COOLING - ), - max_value_getter=lambda api: api.getProgramMaxTemperature( - HeatingProgram.COMFORT_COOLING - ), - stepping_getter=lambda api: api.getProgramStepping( - HeatingProgram.COMFORT_COOLING - ), - ), ) @@ -339,32 +233,30 @@ def _build_entities( ) -> list[ViCareNumber]: """Create ViCare number entities for a device.""" - entities: list[ViCareNumber] = [] - for device in device_list: - # add device entities - entities.extend( - ViCareNumber( - description, - get_device_serial(device.api), - device.config, - device.api, - ) - for description in DEVICE_ENTITY_DESCRIPTIONS - if is_supported(description.key, description, device.api) + entities: list[ViCareNumber] = [ + ViCareNumber( + device.api, + device.config, + description, ) - # add component entities - entities.extend( + for device in device_list + for description in DEVICE_ENTITY_DESCRIPTIONS + if is_supported(description.key, description, device.api) + ] + + entities.extend( + [ ViCareNumber( - description, - get_device_serial(device.api), - device.config, - device.api, circuit, + device.config, + description, ) + for device in device_list for circuit in get_circuits(device.api) for description in CIRCUIT_ENTITY_DESCRIPTIONS if is_supported(description.key, description, circuit) - ) + ] + ) return entities @@ -391,16 +283,12 @@ class ViCareNumber(ViCareEntity, NumberEntity): def __init__( self, - description: ViCareNumberEntityDescription, - device_serial: str | None, + api: PyViCareHeatingDeviceComponent, device_config: PyViCareDeviceConfig, - device: PyViCareDevice, - component: PyViCareHeatingDeviceComponent | None = None, + description: ViCareNumberEntityDescription, ) -> None: """Initialize the number.""" - super().__init__( - description.key, device_serial, device_config, device, component - ) + super().__init__(device_config, api, description.key) self.entity_description = description @property diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 57b7c0bec9a..0e98729e40f 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -10,7 +10,7 @@ import logging from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareHeatingDevice import ( - HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, + HeatingDeviceWithComponent as PyViCareHeatingDeviceWithComponent, ) from PyViCare.PyViCareUtils import ( PyViCareInvalidDataError, @@ -51,13 +51,7 @@ from .const import ( ) from .entity import ViCareEntity from .types import ViCareDevice, ViCareRequiredKeysMixin -from .utils import ( - get_burners, - get_circuits, - get_compressors, - get_device_serial, - is_supported, -) +from .utils import get_burners, get_circuits, get_compressors, is_supported _LOGGER = logging.getLogger(__name__) @@ -177,30 +171,6 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), - ViCareSensorEntityDescription( - key="dhw_storage_temperature", - translation_key="dhw_storage_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_getter=lambda api: api.getDomesticHotWaterStorageTemperature(), - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - ), - ViCareSensorEntityDescription( - key="dhw_storage_top_temperature", - translation_key="dhw_storage_top_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_getter=lambda api: api.getHotWaterStorageTemperatureTop(), - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - ), - ViCareSensorEntityDescription( - key="dhw_storage_bottom_temperature", - translation_key="dhw_storage_bottom_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_getter=lambda api: api.getHotWaterStorageTemperatureBottom(), - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - ), ViCareSensorEntityDescription( key="hotwater_gas_consumption_today", translation_key="hotwater_gas_consumption_today", @@ -430,32 +400,6 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( state_class=SensorStateClass.TOTAL_INCREASING, entity_registry_enabled_default=False, ), - ViCareSensorEntityDescription( - key="energy_consumption_cooling_today", - translation_key="energy_consumption_cooling_today", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - value_getter=lambda api: api.getPowerConsumptionCoolingToday(), - unit_getter=lambda api: api.getPowerConsumptionCoolingUnit(), - state_class=SensorStateClass.TOTAL_INCREASING, - ), - ViCareSensorEntityDescription( - key="energy_consumption_cooling_this_month", - translation_key="energy_consumption_cooling_this_month", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - value_getter=lambda api: api.getPowerConsumptionCoolingThisMonth(), - unit_getter=lambda api: api.getPowerConsumptionCoolingUnit(), - state_class=SensorStateClass.TOTAL_INCREASING, - entity_registry_enabled_default=False, - ), - ViCareSensorEntityDescription( - key="energy_consumption_cooling_this_year", - translation_key="energy_consumption_cooling_this_year", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - value_getter=lambda api: api.getPowerConsumptionCoolingThisYear(), - unit_getter=lambda api: api.getPowerConsumptionCoolingUnit(), - state_class=SensorStateClass.TOTAL_INCREASING, - entity_registry_enabled_default=False, - ), ViCareSensorEntityDescription( key="energy_dhw_summary_consumption_heating_currentday", translation_key="energy_dhw_summary_consumption_heating_currentday", @@ -801,22 +745,9 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( options=["ready", "production"], value_getter=lambda api: _filter_pv_states(api.getPhotovoltaicStatus()), ), - ViCareSensorEntityDescription( - key="room_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - value_getter=lambda api: api.getTemperature(), - ), - ViCareSensorEntityDescription( - key="room_humidity", - device_class=SensorDeviceClass.HUMIDITY, - native_unit_of_measurement=PERCENTAGE, - state_class=SensorStateClass.MEASUREMENT, - value_getter=lambda api: api.getHumidity(), - ), ) + CIRCUIT_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ViCareSensorEntityDescription( key="supply_temperature", @@ -934,38 +865,61 @@ def _build_entities( entities: list[ViCareSensor] = [] for device in device_list: - # add device entities + entities.extend(_build_entities_for_device(device.api, device.config)) entities.extend( - ViCareSensor( - description, - get_device_serial(device.api), - device.config, - device.api, + _build_entities_for_component( + get_circuits(device.api), device.config, CIRCUIT_SENSORS ) - for description in GLOBAL_SENSORS - if is_supported(description.key, description, device.api) ) - # add component entities - for component_list, entity_description_list in ( - (get_circuits(device.api), CIRCUIT_SENSORS), - (get_burners(device.api), BURNER_SENSORS), - (get_compressors(device.api), COMPRESSOR_SENSORS), - ): - entities.extend( - ViCareSensor( - description, - get_device_serial(device.api), - device.config, - device.api, - component, - ) - for component in component_list - for description in entity_description_list - if is_supported(description.key, description, component) + entities.extend( + _build_entities_for_component( + get_burners(device.api), device.config, BURNER_SENSORS ) + ) + entities.extend( + _build_entities_for_component( + get_compressors(device.api), device.config, COMPRESSOR_SENSORS + ) + ) return entities +def _build_entities_for_device( + device: PyViCareDevice, + device_config: PyViCareDeviceConfig, +) -> list[ViCareSensor]: + """Create device specific ViCare sensor entities.""" + + return [ + ViCareSensor( + device, + device_config, + description, + ) + for description in GLOBAL_SENSORS + if is_supported(description.key, description, device) + ] + + +def _build_entities_for_component( + components: list[PyViCareHeatingDeviceWithComponent], + device_config: PyViCareDeviceConfig, + entity_descriptions: tuple[ViCareSensorEntityDescription, ...], +) -> list[ViCareSensor]: + """Create component specific ViCare sensor entities.""" + + return [ + ViCareSensor( + component, + device_config, + description, + ) + for component in components + for description in entity_descriptions + if is_supported(description.key, description, component) + ] + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -978,9 +932,7 @@ async def async_setup_entry( await hass.async_add_executor_job( _build_entities, device_list, - ), - # run update to have device_class set depending on unit_of_measurement - True, + ) ) @@ -991,16 +943,12 @@ class ViCareSensor(ViCareEntity, SensorEntity): def __init__( self, - description: ViCareSensorEntityDescription, - device_serial: str | None, + api, device_config: PyViCareDeviceConfig, - device: PyViCareDevice, - component: PyViCareHeatingDeviceComponent | None = None, + description: ViCareSensorEntityDescription, ) -> None: """Initialize the sensor.""" - super().__init__( - description.key, device_serial, device_config, device, component - ) + super().__init__(device_config, api, description.key) self.entity_description = description @property diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 77e570da779..de92d0ec271 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -65,21 +65,6 @@ "name": "Heating" } }, - "fan": { - "ventilation": { - "name": "Ventilation", - "state_attributes": { - "preset_mode": { - "state": { - "permanent": "permanent", - "ventilation": "schedule", - "sensor_driven": "sensor", - "sensor_override": "schedule with sensor-override" - } - } - } - } - }, "number": { "heating_curve_shift": { "name": "Heating curve shift" @@ -97,34 +82,16 @@ "name": "Comfort temperature" }, "normal_heating_temperature": { - "name": "Normal heating temperature" + "name": "[%key:component::vicare::entity::number::normal_temperature::name%]" }, "reduced_heating_temperature": { - "name": "Reduced heating temperature" + "name": "[%key:component::vicare::entity::number::reduced_temperature::name%]" }, "comfort_heating_temperature": { - "name": "Comfort heating temperature" - }, - "normal_cooling_temperature": { - "name": "Normal cooling temperature" - }, - "reduced_cooling_temperature": { - "name": "Reduced cooling temperature" - }, - "comfort_cooling_temperature": { - "name": "Comfort cooling temperature" - }, - "dhw_temperature": { - "name": "DHW temperature" + "name": "[%key:component::vicare::entity::number::comfort_temperature::name%]" }, "dhw_secondary_temperature": { "name": "DHW secondary temperature" - }, - "dhw_hysteresis_switch_on": { - "name": "DHW hysteresis switch on" - }, - "dhw_hysteresis_switch_off": { - "name": "DHW hysteresis switch off" } }, "sensor": { @@ -161,15 +128,6 @@ "hotwater_min_temperature": { "name": "DHW min temperature" }, - "dhw_storage_temperature": { - "name": "DHW storage temperature" - }, - "dhw_storage_top_temperature": { - "name": "DHW storage top temperature" - }, - "dhw_storage_bottom_temperature": { - "name": "DHW storage bottom temperature" - }, "hotwater_gas_consumption_today": { "name": "DHW gas consumption today" }, @@ -243,49 +201,28 @@ "name": "DHW gas consumption last seven days" }, "energy_summary_consumption_heating_currentday": { - "name": "Heating electricity consumption today" + "name": "Heating energy consumption today" }, "energy_summary_consumption_heating_currentmonth": { - "name": "Heating electricity consumption this month" + "name": "Heating energy consumption this month" }, "energy_summary_consumption_heating_currentyear": { - "name": "Heating electricity consumption this year" + "name": "Heating energy consumption this year" }, "energy_summary_consumption_heating_lastsevendays": { - "name": "Heating electricity consumption last seven days" - }, - "energy_consumption_cooling_today": { - "name": "Cooling electricity consumption today" - }, - "energy_consumption_cooling_this_month": { - "name": "Cooling electricity consumption this month" - }, - "energy_consumption_cooling_this_year": { - "name": "Cooling electricity consumption this year" + "name": "Heating energy consumption last seven days" }, "energy_dhw_summary_consumption_heating_currentday": { - "name": "DHW electricity consumption today" + "name": "DHW energy consumption today" }, "energy_dhw_summary_consumption_heating_currentmonth": { - "name": "DHW electricity consumption this month" + "name": "DHW energy consumption this month" }, "energy_dhw_summary_consumption_heating_currentyear": { - "name": "DHW electricity consumption this year" + "name": "DHW energy consumption this year" }, "energy_summary_dhw_consumption_heating_lastsevendays": { - "name": "DHW electricity consumption last seven days" - }, - "power_consumption_today": { - "name": "Electricity consumption today" - }, - "power_consumption_this_week": { - "name": "Electricity consumption this week" - }, - "power_consumption_this_month": { - "name": "Electricity consumption this month" - }, - "power_consumption_this_year": { - "name": "Electricity consumption this year" + "name": "DHW energy consumption last seven days" }, "power_production_current": { "name": "Power production current" @@ -320,6 +257,18 @@ "solar_power_production_this_year": { "name": "Solar energy production this year" }, + "power_consumption_today": { + "name": "Energy consumption today" + }, + "power_consumption_this_week": { + "name": "Power consumption this week" + }, + "power_consumption_this_month": { + "name": "Energy consumption this month" + }, + "power_consumption_this_year": { + "name": "Energy consumption this year" + }, "buffer_top_temperature": { "name": "Buffer top temperature" }, @@ -355,8 +304,8 @@ "ess_discharge_total": { "name": "Battery discharge total" }, - "pcc_transfer_power_exchange": { - "name": "Power exchange with grid" + "pcc_current_power_exchange": { + "name": "Grid power exchange" }, "pcc_energy_consumption": { "name": "Energy import from grid" diff --git a/homeassistant/components/vicare/types.py b/homeassistant/components/vicare/types.py index 98d1c0566ce..7e1ec7f8bee 100644 --- a/homeassistant/components/vicare/types.py +++ b/homeassistant/components/vicare/types.py @@ -1,7 +1,6 @@ """Types for the ViCare integration.""" from collections.abc import Callable -from contextlib import suppress from dataclasses import dataclass import enum from typing import Any @@ -25,14 +24,11 @@ class HeatingProgram(enum.StrEnum): COMFORT = "comfort" COMFORT_HEATING = "comfortHeating" - COMFORT_COOLING = "comfortCooling" ECO = "eco" NORMAL = "normal" NORMAL_HEATING = "normalHeating" - NORMAL_COOLING = "normalCooling" REDUCED = "reduced" REDUCED_HEATING = "reducedHeating" - REDUCED_COOLING = "reducedCooling" STANDBY = "standby" @staticmethod @@ -52,12 +48,8 @@ class HeatingProgram(enum.StrEnum): ) -> str | None: """Return the mapped ViCare heating program for the Home Assistant preset.""" for program in supported_heating_programs: - with suppress(ValueError): - if ( - VICARE_TO_HA_PRESET_HEATING.get(HeatingProgram(program)) - == ha_preset - ): - return program + if VICARE_TO_HA_PRESET_HEATING.get(HeatingProgram(program)) == ha_preset: + return program return None diff --git a/homeassistant/components/vicare/utils.py b/homeassistant/components/vicare/utils.py index 5156ea4a41e..2ba5ddbfb0a 100644 --- a/homeassistant/components/vicare/utils.py +++ b/homeassistant/components/vicare/utils.py @@ -7,12 +7,7 @@ from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareHeatingDevice import ( HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, ) -from PyViCare.PyViCareUtils import ( - PyViCareInvalidDataError, - PyViCareNotSupportedFeatureError, - PyViCareRateLimitError, -) -import requests +from PyViCare.PyViCareUtils import PyViCareNotSupportedFeatureError from homeassistant.config_entries import ConfigEntry @@ -32,23 +27,6 @@ def get_device( )() -def get_device_serial(device: PyViCareDevice) -> str | None: - """Get device serial for device if supported.""" - try: - return device.getSerial() - except PyViCareNotSupportedFeatureError: - _LOGGER.debug("Device does not offer a 'device.serial' data point") - except PyViCareRateLimitError as limit_exception: - _LOGGER.debug("Vicare API rate limit exceeded: %s", limit_exception) - except PyViCareInvalidDataError as invalid_data_exception: - _LOGGER.debug("Invalid data from Vicare server: %s", invalid_data_exception) - except requests.exceptions.ConnectionError: - _LOGGER.debug("Unable to retrieve data from ViCare server") - except ValueError: - _LOGGER.debug("Unable to decode data from ViCare server") - return None - - def is_supported( name: str, entity_description: ViCareRequiredKeysMixin, diff --git a/homeassistant/components/vicare/water_heater.py b/homeassistant/components/vicare/water_heater.py index 5e241c9a3be..223217f4e13 100644 --- a/homeassistant/components/vicare/water_heater.py +++ b/homeassistant/components/vicare/water_heater.py @@ -28,7 +28,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity from .types import ViCareDevice -from .utils import get_circuits, get_device_serial +from .utils import get_circuits _LOGGER = logging.getLogger(__name__) @@ -69,10 +69,10 @@ def _build_entities( return [ ViCareWater( - get_device_serial(device.api), - device.config, device.api, circuit, + device.config, + "domestic_hot_water", ) for device in device_list for circuit in get_circuits(device.api) @@ -104,20 +104,20 @@ class ViCareWater(ViCareEntity, WaterHeaterEntity): _attr_min_temp = VICARE_TEMP_WATER_MIN _attr_max_temp = VICARE_TEMP_WATER_MAX _attr_operation_list = list(HA_TO_VICARE_HVAC_DHW) - _attr_translation_key = "domestic_hot_water" - _current_mode: str | None = None def __init__( self, - device_serial: str | None, - device_config: PyViCareDeviceConfig, - device: PyViCareDevice, + api: PyViCareDevice, circuit: PyViCareHeatingCircuit, + device_config: PyViCareDeviceConfig, + translation_key: str, ) -> None: """Initialize the DHW water_heater device.""" - super().__init__(circuit.id, device_serial, device_config, device) + super().__init__(device_config, api, circuit.id) self._circuit = circuit self._attributes: dict[str, Any] = {} + self._current_mode = None + self._attr_translation_key = translation_key def update(self) -> None: """Let HA know there has been an update from the ViCare API.""" @@ -151,8 +151,6 @@ class ViCareWater(ViCareEntity, WaterHeaterEntity): self._attr_target_temperature = temp @property - def current_operation(self) -> str | None: + def current_operation(self): """Return current operation ie. heat, cool, idle.""" - if self._current_mode is None: - return None - return VICARE_TO_HA_HVAC_DHW.get(self._current_mode, None) + return VICARE_TO_HA_HVAC_DHW.get(self._current_mode) diff --git a/homeassistant/components/vilfo/__init__.py b/homeassistant/components/vilfo/__init__.py index ca74e74f37a..fe00fa494b5 100644 --- a/homeassistant/components/vilfo/__init__.py +++ b/homeassistant/components/vilfo/__init__.py @@ -105,5 +105,5 @@ class VilfoRouterData: return if self.available and self._unavailable_logged: - _LOGGER.warning("Vilfo Router %s is available again", self.host) + _LOGGER.info("Vilfo Router %s is available again", self.host) self._unavailable_logged = False diff --git a/homeassistant/components/vilfo/config_flow.py b/homeassistant/components/vilfo/config_flow.py index a6cff506f79..b21c63bfb97 100644 --- a/homeassistant/components/vilfo/config_flow.py +++ b/homeassistant/components/vilfo/config_flow.py @@ -1,7 +1,6 @@ """Config flow for Vilfo Router integration.""" import logging -from typing import Any from vilfo import Client as VilfoClient from vilfo.exceptions import ( @@ -10,7 +9,7 @@ from vilfo.exceptions import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, CONF_ID, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -100,9 +99,7 @@ class DomainConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/vizio/config_flow.py b/homeassistant/components/vizio/config_flow.py index 49f6a709565..d8b99595f54 100644 --- a/homeassistant/components/vizio/config_flow.py +++ b/homeassistant/components/vizio/config_flow.py @@ -108,6 +108,10 @@ def _host_is_same(host1: str, host2: str) -> bool: class VizioOptionsConfigFlow(OptionsFlow): """Handle Vizio options.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize vizio options flow.""" + self.config_entry = config_entry + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -180,7 +184,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> VizioOptionsConfigFlow: """Get the options flow for this handler.""" - return VizioOptionsConfigFlow() + return VizioOptionsConfigFlow(config_entry) def __init__(self) -> None: """Initialize config flow.""" @@ -281,7 +285,9 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import( + self, import_config: dict[str, Any] + ) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" # Check if new config entry matches any existing config entries for entry in self._async_current_entries(): @@ -290,28 +296,28 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): continue if await self.hass.async_add_executor_job( - _host_is_same, entry.data[CONF_HOST], import_data[CONF_HOST] + _host_is_same, entry.data[CONF_HOST], import_config[CONF_HOST] ): updated_options: dict[str, Any] = {} updated_data: dict[str, Any] = {} remove_apps = False - if entry.data[CONF_HOST] != import_data[CONF_HOST]: - updated_data[CONF_HOST] = import_data[CONF_HOST] + if entry.data[CONF_HOST] != import_config[CONF_HOST]: + updated_data[CONF_HOST] = import_config[CONF_HOST] - if entry.data[CONF_NAME] != import_data[CONF_NAME]: - updated_data[CONF_NAME] = import_data[CONF_NAME] + if entry.data[CONF_NAME] != import_config[CONF_NAME]: + updated_data[CONF_NAME] = import_config[CONF_NAME] # Update entry.data[CONF_APPS] if import_config[CONF_APPS] differs, and # pop entry.data[CONF_APPS] if import_config[CONF_APPS] is not specified - if entry.data.get(CONF_APPS) != import_data.get(CONF_APPS): - if not import_data.get(CONF_APPS): + if entry.data.get(CONF_APPS) != import_config.get(CONF_APPS): + if not import_config.get(CONF_APPS): remove_apps = True else: - updated_options[CONF_APPS] = import_data[CONF_APPS] + updated_options[CONF_APPS] = import_config[CONF_APPS] - if entry.data.get(CONF_VOLUME_STEP) != import_data[CONF_VOLUME_STEP]: - updated_options[CONF_VOLUME_STEP] = import_data[CONF_VOLUME_STEP] + if entry.data.get(CONF_VOLUME_STEP) != import_config[CONF_VOLUME_STEP]: + updated_options[CONF_VOLUME_STEP] = import_config[CONF_VOLUME_STEP] if updated_options or updated_data or remove_apps: new_data = entry.data.copy() @@ -339,9 +345,9 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): self._must_show_form = True # Store config key/value pairs that are not configurable in user step so they # don't get lost on user step - if import_data.get(CONF_APPS): - self._apps = copy.deepcopy(import_data[CONF_APPS]) - return await self.async_step_user(user_input=import_data) + if import_config.get(CONF_APPS): + self._apps = copy.deepcopy(import_config[CONF_APPS]) + return await self.async_step_user(user_input=import_config) async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo diff --git a/homeassistant/components/vizio/coordinator.py b/homeassistant/components/vizio/coordinator.py index a7ca7d7f9ed..1930828b595 100644 --- a/homeassistant/components/vizio/coordinator.py +++ b/homeassistant/components/vizio/coordinator.py @@ -34,9 +34,10 @@ class VizioAppsDataUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]] self.fail_threshold = 10 self.store = store - async def _async_setup(self) -> None: + async def async_config_entry_first_refresh(self) -> None: """Refresh data for the first time when a config entry is setup.""" self.data = await self.store.async_load() or APPS + await super().async_config_entry_first_refresh() async def _async_update_data(self) -> list[dict[str, Any]]: """Update data via library.""" diff --git a/homeassistant/components/vizio/icons.json b/homeassistant/components/vizio/icons.json index be6f727de6f..ccdaf816bb0 100644 --- a/homeassistant/components/vizio/icons.json +++ b/homeassistant/components/vizio/icons.json @@ -1,7 +1,5 @@ { "services": { - "update_setting": { - "service": "mdi:cog" - } + "update_setting": "mdi:cog" } } diff --git a/homeassistant/components/vizio/media_player.py b/homeassistant/components/vizio/media_player.py index 5711d8fbac9..ba9c92f94f1 100644 --- a/homeassistant/components/vizio/media_player.py +++ b/homeassistant/components/vizio/media_player.py @@ -200,7 +200,7 @@ class VizioDevice(MediaPlayerEntity): return if not self._attr_available: - _LOGGER.warning( + _LOGGER.info( "Restored connection to %s", self._config_entry.data[CONF_HOST] ) self._attr_available = True diff --git a/homeassistant/components/vlc_telnet/__init__.py b/homeassistant/components/vlc_telnet/__init__.py index c327b58a644..a61fcafd2cb 100644 --- a/homeassistant/components/vlc_telnet/__init__.py +++ b/homeassistant/components/vlc_telnet/__init__.py @@ -5,9 +5,6 @@ from dataclasses import dataclass from aiovlc.client import Client from aiovlc.exceptions import AuthError, ConnectError -from homeassistant.components.media_player import ( - SCAN_INTERVAL as MEDIAPLAYER_SCAN_INTERVAL, -) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform from homeassistant.core import HomeAssistant @@ -36,12 +33,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: VlcConfigEntry) -> bool: port = config[CONF_PORT] password = config[CONF_PASSWORD] - vlc = Client( - password=password, - host=host, - port=port, - timeout=int(MEDIAPLAYER_SCAN_INTERVAL.total_seconds() - 1), - ) + vlc = Client(password=password, host=host, port=port) available = True diff --git a/homeassistant/components/vlc_telnet/config_flow.py b/homeassistant/components/vlc_telnet/config_flow.py index 08564937959..6ccb92e5b8b 100644 --- a/homeassistant/components/vlc_telnet/config_flow.py +++ b/homeassistant/components/vlc_telnet/config_flow.py @@ -10,11 +10,11 @@ from aiovlc.client import Client from aiovlc.exceptions import AuthError, ConnectError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.components.hassio import HassioServiceInfo +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import DEFAULT_PORT, DOMAIN @@ -70,6 +70,7 @@ class VLCTelnetConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for VLC media player Telnet.""" VERSION = 1 + entry: ConfigEntry | None = None hassio_discovery: dict[str, Any] | None = None async def async_step_user( @@ -107,19 +108,21 @@ class VLCTelnetConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth flow.""" - self.context["title_placeholders"] = {"host": entry_data[CONF_HOST]} + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert self.entry + self.context["title_placeholders"] = {"host": self.entry.data[CONF_HOST]} return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reauth confirm.""" + assert self.entry errors = {} - reauth_entry = self._get_reauth_entry() if user_input is not None: try: - await validate_input(self.hass, {**reauth_entry.data, **user_input}) + await validate_input(self.hass, {**self.entry.data, **user_input}) except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: @@ -128,14 +131,21 @@ class VLCTelnetConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - return self.async_update_reload_and_abort( - reauth_entry, - data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]}, + self.hass.config_entries.async_update_entry( + self.entry, + data={ + **self.entry.data, + CONF_PASSWORD: user_input[CONF_PASSWORD], + }, ) + self.hass.async_create_task( + self.hass.config_entries.async_reload(self.entry.entry_id) + ) + return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", - description_placeholders={CONF_HOST: reauth_entry.data[CONF_HOST]}, + description_placeholders={CONF_HOST: self.entry.data[CONF_HOST]}, data_schema=STEP_REAUTH_DATA_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/vlc_telnet/manifest.json b/homeassistant/components/vlc_telnet/manifest.json index 5041619e84f..7a5e00cff21 100644 --- a/homeassistant/components/vlc_telnet/manifest.json +++ b/homeassistant/components/vlc_telnet/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/vlc_telnet", "iot_class": "local_polling", "loggers": ["aiovlc"], - "requirements": ["aiovlc==0.5.1"] + "requirements": ["aiovlc==0.3.2"] } diff --git a/homeassistant/components/vlc_telnet/media_player.py b/homeassistant/components/vlc_telnet/media_player.py index b95e987aef8..bd58b2ad23a 100644 --- a/homeassistant/components/vlc_telnet/media_player.py +++ b/homeassistant/components/vlc_telnet/media_player.py @@ -131,7 +131,7 @@ class VlcDevice(MediaPlayerEntity): self._attr_state = MediaPlayerState.IDLE self._attr_available = True - LOGGER.debug("Connected to vlc host: %s", self._vlc.host) + LOGGER.info("Connected to vlc host: %s", self._vlc.host) status = await self._vlc.status() LOGGER.debug("Status: %s", status) @@ -175,13 +175,13 @@ class VlcDevice(MediaPlayerEntity): # Fall back to filename. if data_info := data.get("data"): - media_title = _get_str(data_info, "filename") + self._attr_media_title = _get_str(data_info, "filename") # Strip out auth signatures if streaming local media - if media_title and (pos := media_title.find("?authSig=")) != -1: + if (media_title := self.media_title) and ( + pos := media_title.find("?authSig=") + ) != -1: self._attr_media_title = media_title[:pos] - else: - self._attr_media_title = media_title @catch_vlc_errors async def async_media_seek(self, position: float) -> None: diff --git a/homeassistant/components/vodafone_station/config_flow.py b/homeassistant/components/vodafone_station/config_flow.py index 7a80244f8d6..6b6adb6a18d 100644 --- a/homeassistant/components/vodafone_station/config_flow.py +++ b/homeassistant/components/vodafone_station/config_flow.py @@ -17,6 +17,7 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, + OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback @@ -59,14 +60,13 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Vodafone Station.""" VERSION = 1 + entry: ConfigEntry | None = None @staticmethod @callback - def async_get_options_flow( - config_entry: ConfigEntry, - ) -> VodafoneStationOptionsFlowHandler: + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return VodafoneStationOptionsFlowHandler() + return VodafoneStationOptionsFlowHandler(config_entry) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -106,19 +106,21 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth flow.""" - self.context["title_placeholders"] = {"host": entry_data[CONF_HOST]} + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert self.entry + self.context["title_placeholders"] = {"host": self.entry.data[CONF_HOST]} return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reauth confirm.""" + assert self.entry errors = {} - reauth_entry = self._get_reauth_entry() if user_input is not None: try: - await validate_input(self.hass, {**reauth_entry.data, **user_input}) + await validate_input(self.hass, {**self.entry.data, **user_input}) except aiovodafone_exceptions.AlreadyLogged: errors["base"] = "already_logged" except aiovodafone_exceptions.CannotConnect: @@ -129,22 +131,27 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - return self.async_update_reload_and_abort( - reauth_entry, - data_updates={ + self.hass.config_entries.async_update_entry( + self.entry, + data={ + **self.entry.data, CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) + self.hass.async_create_task( + self.hass.config_entries.async_reload(self.entry.entry_id) + ) + return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", - description_placeholders={CONF_HOST: reauth_entry.data[CONF_HOST]}, + description_placeholders={CONF_HOST: self.entry.data[CONF_HOST]}, data_schema=STEP_REAUTH_DATA_SCHEMA, errors=errors, ) -class VodafoneStationOptionsFlowHandler(OptionsFlow): +class VodafoneStationOptionsFlowHandler(OptionsFlowWithConfigEntry): """Handle a option flow.""" async def async_step_init( @@ -159,7 +166,7 @@ class VodafoneStationOptionsFlowHandler(OptionsFlow): { vol.Optional( CONF_CONSIDER_HOME, - default=self.config_entry.options.get( + default=self.options.get( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds() ), ): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)) diff --git a/homeassistant/components/vodafone_station/device_tracker.py b/homeassistant/components/vodafone_station/device_tracker.py index 3e4d7763bff..85ad834cd23 100644 --- a/homeassistant/components/vodafone_station/device_tracker.py +++ b/homeassistant/components/vodafone_station/device_tracker.py @@ -2,7 +2,9 @@ from __future__ import annotations -from homeassistant.components.device_tracker import ScannerEntity +from aiovodafone import VodafoneStationDevice + +from homeassistant.components.device_tracker import ScannerEntity, SourceType from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -61,7 +63,6 @@ class VodafoneStationTracker(CoordinatorEntity[VodafoneStationRouter], ScannerEn """Representation of a Vodafone Station device.""" _attr_translation_key = "device_tracker" - mac_address: str def __init__( self, coordinator: VodafoneStationRouter, device_info: VodafoneStationDeviceInfo @@ -69,22 +70,43 @@ class VodafoneStationTracker(CoordinatorEntity[VodafoneStationRouter], ScannerEn """Initialize a Vodafone Station device.""" super().__init__(coordinator) self._coordinator = coordinator - mac = device_info.device.mac - self._attr_mac_address = mac + device = device_info.device + mac = device.mac + self._device_mac = mac self._attr_unique_id = mac - self._attr_hostname = device_info.device.name or mac.replace(":", "_") + self._attr_name = device.name or mac.replace(":", "_") @property def _device_info(self) -> VodafoneStationDeviceInfo: """Return fresh data for the device.""" - return self.coordinator.data.devices[self.mac_address] + return self.coordinator.data.devices[self._device_mac] + + @property + def _device(self) -> VodafoneStationDevice: + """Return fresh data for the device.""" + return self.coordinator.data.devices[self._device_mac].device @property def is_connected(self) -> bool: """Return true if the device is connected to the network.""" return self._device_info.home + @property + def source_type(self) -> SourceType: + """Return the source type.""" + return SourceType.ROUTER + + @property + def hostname(self) -> str | None: + """Return the hostname of device.""" + return self._attr_name + @property def ip_address(self) -> str | None: """Return the primary ip address of the device.""" - return self._device_info.device.ip_address + return self._device.ip_address + + @property + def mac_address(self) -> str: + """Return the mac address of the device.""" + return self._device_mac diff --git a/homeassistant/components/vodafone_station/diagnostics.py b/homeassistant/components/vodafone_station/diagnostics.py deleted file mode 100644 index e306d6caca2..00000000000 --- a/homeassistant/components/vodafone_station/diagnostics.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Diagnostics support for Vodafone Station.""" - -from __future__ import annotations - -from typing import Any - -from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant - -from .const import DOMAIN -from .coordinator import VodafoneStationRouter - -TO_REDACT = {CONF_USERNAME, CONF_PASSWORD} - - -async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry -) -> dict[str, Any]: - """Return diagnostics for a config entry.""" - - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] - - sensors_data = coordinator.data.sensors - return { - "entry": async_redact_data(entry.as_dict(), TO_REDACT), - "device_info": { - "sys_model_name": sensors_data.get("sys_model_name"), - "sys_firmware_version": sensors_data["sys_firmware_version"], - "sys_hardware_version": sensors_data["sys_hardware_version"], - "sys_cpu_usage": sensors_data["sys_cpu_usage"][:-1], - "sys_memory_usage": sensors_data["sys_memory_usage"][:-1], - "sys_reboot_cause": sensors_data["sys_reboot_cause"], - "last_update success": coordinator.last_update_success, - "last_exception": coordinator.last_exception, - "client_devices": [ - { - "hostname": device_info.device.name, - "connection_type": device_info.device.connection_type, - "connected": device_info.device.connected, - "type": device_info.device.type, - } - for _, device_info in coordinator.data.devices.items() - ], - }, - } diff --git a/homeassistant/components/vodafone_station/manifest.json b/homeassistant/components/vodafone_station/manifest.json index 29cb3c070ab..47137fff26c 100644 --- a/homeassistant/components/vodafone_station/manifest.json +++ b/homeassistant/components/vodafone_station/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_polling", "loggers": ["aiovodafone"], "quality_scale": "silver", - "requirements": ["aiovodafone==0.6.1"] + "requirements": ["aiovodafone==0.6.0"] } diff --git a/homeassistant/components/vodafone_station/sensor.py b/homeassistant/components/vodafone_station/sensor.py index 307fcaf0ea8..2a08a9b2ebe 100644 --- a/homeassistant/components/vodafone_station/sensor.py +++ b/homeassistant/components/vodafone_station/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass from datetime import datetime -from typing import Final +from typing import Any, Final from homeassistant.components.sensor import ( SensorDeviceClass, @@ -16,49 +16,32 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfDataRate from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import _LOGGER, DOMAIN, LINE_TYPES from .coordinator import VodafoneStationRouter NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"] -UPTIME_DEVIATION = 60 @dataclass(frozen=True, kw_only=True) class VodafoneStationEntityDescription(SensorEntityDescription): """Vodafone Station entity description.""" - value: Callable[ - [VodafoneStationRouter, str | datetime | float | None, str], - str | datetime | float | None, - ] = lambda coordinator, last_value, key: coordinator.data.sensors[key] + value: Callable[[Any, Any], Any] = ( + lambda coordinator, key: coordinator.data.sensors[key] + ) is_suitable: Callable[[dict], bool] = lambda val: True -def _calculate_uptime( - coordinator: VodafoneStationRouter, - last_value: str | datetime | float | None, - key: str, -) -> datetime: +def _calculate_uptime(coordinator: VodafoneStationRouter, key: str) -> datetime: """Calculate device uptime.""" - delta_uptime = coordinator.api.convert_uptime(coordinator.data.sensors[key]) - - if ( - not isinstance(last_value, datetime) - or abs((delta_uptime - last_value).total_seconds()) > UPTIME_DEVIATION - ): - return delta_uptime - - return last_value + return coordinator.api.convert_uptime(coordinator.data.sensors[key]) -def _line_connection( - coordinator: VodafoneStationRouter, - last_value: str | datetime | float | None, - key: str, -) -> str | None: +def _line_connection(coordinator: VodafoneStationRouter, key: str) -> str | None: """Identify line type.""" value = coordinator.data.sensors @@ -143,18 +126,14 @@ SENSOR_TYPES: Final = ( translation_key="sys_cpu_usage", native_unit_of_measurement=PERCENTAGE, entity_category=EntityCategory.DIAGNOSTIC, - value=lambda coordinator, last_value, key: float( - coordinator.data.sensors[key][:-1] - ), + value=lambda coordinator, key: float(coordinator.data.sensors[key][:-1]), ), VodafoneStationEntityDescription( key="sys_memory_usage", translation_key="sys_memory_usage", native_unit_of_measurement=PERCENTAGE, entity_category=EntityCategory.DIAGNOSTIC, - value=lambda coordinator, last_value, key: float( - coordinator.data.sensors[key][:-1] - ), + value=lambda coordinator, key: float(coordinator.data.sensors[key][:-1]), ), VodafoneStationEntityDescription( key="sys_reboot_cause", @@ -199,12 +178,10 @@ class VodafoneStationSensorEntity( self.entity_description = description self._attr_device_info = coordinator.device_info self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" - self._old_state: str | datetime | float | None = None @property - def native_value(self) -> str | datetime | float | None: + def native_value(self) -> StateType: """Sensor value.""" - self._old_state = self.entity_description.value( - self.coordinator, self._old_state, self.entity_description.key + return self.entity_description.value( + self.coordinator, self.entity_description.key ) - return self._old_state diff --git a/homeassistant/components/voip/__init__.py b/homeassistant/components/voip/__init__.py index cee0cbb0766..9ab6a8bf0e8 100644 --- a/homeassistant/components/voip/__init__.py +++ b/homeassistant/components/voip/__init__.py @@ -20,7 +20,6 @@ from .devices import VoIPDevices from .voip import HassVoipDatagramProtocol PLATFORMS = ( - Platform.ASSIST_SATELLITE, Platform.BINARY_SENSOR, Platform.SELECT, Platform.SWITCH, diff --git a/homeassistant/components/voip/assist_satellite.py b/homeassistant/components/voip/assist_satellite.py deleted file mode 100644 index 0100435d6dc..00000000000 --- a/homeassistant/components/voip/assist_satellite.py +++ /dev/null @@ -1,326 +0,0 @@ -"""Assist satellite entity for VoIP integration.""" - -from __future__ import annotations - -import asyncio -from enum import IntFlag -from functools import partial -import io -import logging -from pathlib import Path -from typing import TYPE_CHECKING, Any, Final -import wave - -from voip_utils import RtpDatagramProtocol - -from homeassistant.components import tts -from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType -from homeassistant.components.assist_satellite import ( - AssistSatelliteConfiguration, - AssistSatelliteEntity, - AssistSatelliteEntityDescription, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import Context, HomeAssistant, callback -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .const import CHANNELS, DOMAIN, RATE, RTP_AUDIO_SETTINGS, WIDTH -from .devices import VoIPDevice -from .entity import VoIPEntity - -if TYPE_CHECKING: - from . import DomainData - -_LOGGER = logging.getLogger(__name__) - -_PIPELINE_TIMEOUT_SEC: Final = 30 - - -class Tones(IntFlag): - """Feedback tones for specific events.""" - - LISTENING = 1 - PROCESSING = 2 - ERROR = 4 - - -_TONE_FILENAMES: dict[Tones, str] = { - Tones.LISTENING: "tone.pcm", - Tones.PROCESSING: "processing.pcm", - Tones.ERROR: "error.pcm", -} - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up VoIP Assist satellite entity.""" - domain_data: DomainData = hass.data[DOMAIN] - - @callback - def async_add_device(device: VoIPDevice) -> None: - """Add device.""" - async_add_entities([VoipAssistSatellite(hass, device, config_entry)]) - - domain_data.devices.async_add_new_device_listener(async_add_device) - - entities: list[VoIPEntity] = [ - VoipAssistSatellite(hass, device, config_entry) - for device in domain_data.devices - ] - - async_add_entities(entities) - - -class VoipAssistSatellite(VoIPEntity, AssistSatelliteEntity, RtpDatagramProtocol): - """Assist satellite for VoIP devices.""" - - entity_description = AssistSatelliteEntityDescription(key="assist_satellite") - _attr_translation_key = "assist_satellite" - _attr_name = None - - def __init__( - self, - hass: HomeAssistant, - voip_device: VoIPDevice, - config_entry: ConfigEntry, - tones=Tones.LISTENING | Tones.PROCESSING | Tones.ERROR, - ) -> None: - """Initialize an Assist satellite.""" - VoIPEntity.__init__(self, voip_device) - AssistSatelliteEntity.__init__(self) - RtpDatagramProtocol.__init__(self) - - self.config_entry = config_entry - - self._audio_queue: asyncio.Queue[bytes | None] = asyncio.Queue() - self._audio_chunk_timeout: float = 2.0 - self._run_pipeline_task: asyncio.Task | None = None - self._pipeline_had_error: bool = False - self._tts_done = asyncio.Event() - self._tts_extra_timeout: float = 1.0 - self._tone_bytes: dict[Tones, bytes] = {} - self._tones = tones - self._processing_tone_done = asyncio.Event() - - @property - def pipeline_entity_id(self) -> str | None: - """Return the entity ID of the pipeline to use for the next conversation.""" - return self.voip_device.get_pipeline_entity_id(self.hass) - - @property - def vad_sensitivity_entity_id(self) -> str | None: - """Return the entity ID of the VAD sensitivity to use for the next conversation.""" - return self.voip_device.get_vad_sensitivity_entity_id(self.hass) - - @property - def tts_options(self) -> dict[str, Any] | None: - """Options passed for text-to-speech.""" - return { - tts.ATTR_PREFERRED_FORMAT: "wav", - tts.ATTR_PREFERRED_SAMPLE_RATE: 16000, - tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, - tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, - } - - async def async_added_to_hass(self) -> None: - """Run when entity about to be added to hass.""" - await super().async_added_to_hass() - self.voip_device.protocol = self - - async def async_will_remove_from_hass(self) -> None: - """Run when entity will be removed from hass.""" - await super().async_will_remove_from_hass() - assert self.voip_device.protocol == self - self.voip_device.protocol = None - - @callback - def async_get_configuration( - self, - ) -> AssistSatelliteConfiguration: - """Get the current satellite configuration.""" - raise NotImplementedError - - async def async_set_configuration( - self, config: AssistSatelliteConfiguration - ) -> None: - """Set the current satellite configuration.""" - raise NotImplementedError - - # ------------------------------------------------------------------------- - # VoIP - # ------------------------------------------------------------------------- - - def on_chunk(self, audio_bytes: bytes) -> None: - """Handle raw audio chunk.""" - if self._run_pipeline_task is None: - # Run pipeline until voice command finishes, then start over - self._clear_audio_queue() - self._tts_done.clear() - self._run_pipeline_task = self.config_entry.async_create_background_task( - self.hass, - self._run_pipeline(), - "voip_pipeline_run", - ) - - self._audio_queue.put_nowait(audio_bytes) - - async def _run_pipeline(self) -> None: - _LOGGER.debug("Starting pipeline") - - self.async_set_context(Context(user_id=self.config_entry.data["user"])) - self.voip_device.set_is_active(True) - - async def stt_stream(): - while True: - async with asyncio.timeout(self._audio_chunk_timeout): - chunk = await self._audio_queue.get() - if not chunk: - break - - yield chunk - - # Play listening tone at the start of each cycle - await self._play_tone(Tones.LISTENING, silence_before=0.2) - - try: - await self.async_accept_pipeline_from_satellite( - audio_stream=stt_stream(), - ) - - if self._pipeline_had_error: - self._pipeline_had_error = False - await self._play_tone(Tones.ERROR) - else: - # Block until TTS is done speaking. - # - # This is set in _send_tts and has a timeout that's based on the - # length of the TTS audio. - await self._tts_done.wait() - except TimeoutError: - self.disconnect() # caller hung up - finally: - # Stop audio stream - await self._audio_queue.put(None) - - self.voip_device.set_is_active(False) - self._run_pipeline_task = None - _LOGGER.debug("Pipeline finished") - - def _clear_audio_queue(self) -> None: - """Ensure audio queue is empty.""" - while not self._audio_queue.empty(): - self._audio_queue.get_nowait() - - def on_pipeline_event(self, event: PipelineEvent) -> None: - """Set state based on pipeline stage.""" - if event.type == PipelineEventType.STT_END: - if (self._tones & Tones.PROCESSING) == Tones.PROCESSING: - self._processing_tone_done.clear() - self.config_entry.async_create_background_task( - self.hass, self._play_tone(Tones.PROCESSING), "voip_process_tone" - ) - elif event.type == PipelineEventType.TTS_END: - # Send TTS audio to caller over RTP - if event.data and (tts_output := event.data["tts_output"]): - media_id = tts_output["media_id"] - self.config_entry.async_create_background_task( - self.hass, - self._send_tts(media_id), - "voip_pipeline_tts", - ) - else: - # Empty TTS response - self._tts_done.set() - elif event.type == PipelineEventType.ERROR: - # Play error tone instead of wait for TTS when pipeline is finished. - self._pipeline_had_error = True - _LOGGER.warning(event) - - async def _send_tts(self, media_id: str) -> None: - """Send TTS audio to caller via RTP.""" - try: - if self.transport is None: - return # not connected - - extension, data = await tts.async_get_media_source_audio( - self.hass, - media_id, - ) - - if extension != "wav": - raise ValueError(f"Only WAV audio can be streamed, got {extension}") - - if (self._tones & Tones.PROCESSING) == Tones.PROCESSING: - # Don't overlap TTS and processing beep - _LOGGER.debug("Waiting for processing tone") - await self._processing_tone_done.wait() - - with io.BytesIO(data) as wav_io: - with wave.open(wav_io, "rb") as wav_file: - sample_rate = wav_file.getframerate() - sample_width = wav_file.getsampwidth() - sample_channels = wav_file.getnchannels() - - if ( - (sample_rate != RATE) - or (sample_width != WIDTH) - or (sample_channels != CHANNELS) - ): - raise ValueError( - f"Expected rate/width/channels as {RATE}/{WIDTH}/{CHANNELS}," - f" got {sample_rate}/{sample_width}/{sample_channels}" - ) - - audio_bytes = wav_file.readframes(wav_file.getnframes()) - - _LOGGER.debug("Sending %s byte(s) of audio", len(audio_bytes)) - - # Time out 1 second after TTS audio should be finished - tts_samples = len(audio_bytes) / (WIDTH * CHANNELS) - tts_seconds = tts_samples / RATE - - async with asyncio.timeout(tts_seconds + self._tts_extra_timeout): - # TTS audio is 16Khz 16-bit mono - await self._async_send_audio(audio_bytes) - except TimeoutError: - _LOGGER.warning("TTS timeout") - raise - finally: - # Update satellite state - self.tts_response_finished() - - # Signal pipeline to restart - self._tts_done.set() - - async def _async_send_audio(self, audio_bytes: bytes, **kwargs): - """Send audio in executor.""" - await self.hass.async_add_executor_job( - partial(self.send_audio, audio_bytes, **RTP_AUDIO_SETTINGS, **kwargs) - ) - - async def _play_tone(self, tone: Tones, silence_before: float = 0.0) -> None: - """Play a tone as feedback to the user if it's enabled.""" - if (self._tones & tone) != tone: - return # not enabled - - if tone not in self._tone_bytes: - # Do I/O in executor - self._tone_bytes[tone] = await self.hass.async_add_executor_job( - self._load_pcm, - _TONE_FILENAMES[tone], - ) - - await self._async_send_audio( - self._tone_bytes[tone], - silence_before=silence_before, - ) - - if tone == Tones.PROCESSING: - self._processing_tone_done.set() - - def _load_pcm(self, file_name: str) -> bytes: - """Load raw audio (16Khz, 16-bit mono).""" - return (Path(__file__).parent / file_name).read_bytes() diff --git a/homeassistant/components/voip/binary_sensor.py b/homeassistant/components/voip/binary_sensor.py index f38b228c46c..8eeefbd5d94 100644 --- a/homeassistant/components/voip/binary_sensor.py +++ b/homeassistant/components/voip/binary_sensor.py @@ -10,7 +10,6 @@ from homeassistant.components.binary_sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -43,7 +42,6 @@ class VoIPCallInProgress(VoIPEntity, BinarySensorEntity): """Entity to represent voip call is in progress.""" entity_description = BinarySensorEntityDescription( - entity_registry_enabled_default=False, key="call_in_progress", translation_key="call_in_progress", ) @@ -53,44 +51,10 @@ class VoIPCallInProgress(VoIPEntity, BinarySensorEntity): """Call when entity about to be added to hass.""" await super().async_added_to_hass() - self.async_on_remove( - self.voip_device.async_listen_update(self._is_active_changed) - ) - - await super().async_added_to_hass() - if TYPE_CHECKING: - assert self.registry_entry is not None - ir.async_create_issue( - self.hass, - DOMAIN, - f"assist_in_progress_deprecated_{self.registry_entry.id}", - breaks_in_ha_version="2025.4", - data={ - "entity_id": self.entity_id, - "entity_uuid": self.registry_entry.id, - "integration_name": "VoIP", - }, - is_fixable=True, - severity=ir.IssueSeverity.WARNING, - translation_key="assist_in_progress_deprecated", - translation_placeholders={ - "integration_name": "VoIP", - }, - ) - - async def async_will_remove_from_hass(self) -> None: - """Remove issue.""" - await super().async_will_remove_from_hass() - if TYPE_CHECKING: - assert self.registry_entry is not None - ir.async_delete_issue( - self.hass, - DOMAIN, - f"assist_in_progress_deprecated_{self.registry_entry.id}", - ) + self.async_on_remove(self._device.async_listen_update(self._is_active_changed)) @callback def _is_active_changed(self, device: VoIPDevice) -> None: """Call when active state changed.""" - self._attr_is_on = self.voip_device.is_active + self._attr_is_on = self._device.is_active self.async_write_ha_state() diff --git a/homeassistant/components/voip/config_flow.py b/homeassistant/components/voip/config_flow.py index 63dcb8f86ee..821c7f29a1e 100644 --- a/homeassistant/components/voip/config_flow.py +++ b/homeassistant/components/voip/config_flow.py @@ -47,12 +47,16 @@ class VoIPConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return VoipOptionsFlowHandler() + return VoipOptionsFlowHandler(config_entry) class VoipOptionsFlowHandler(OptionsFlow): """Handle VoIP options.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.config_entry = config_entry + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/voip/devices.py b/homeassistant/components/voip/devices.py index 613d05fc614..4e2dca15308 100644 --- a/homeassistant/components/voip/devices.py +++ b/homeassistant/components/voip/devices.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable, Iterator from dataclasses import dataclass, field -from voip_utils import CallInfo, VoipDatagramProtocol +from voip_utils import CallInfo from homeassistant.config_entries import ConfigEntry from homeassistant.core import Event, HomeAssistant, callback @@ -22,7 +22,6 @@ class VoIPDevice: device_id: str is_active: bool = False update_listeners: list[Callable[[VoIPDevice], None]] = field(default_factory=list) - protocol: VoipDatagramProtocol | None = None @callback def set_is_active(self, active: bool) -> None: @@ -57,18 +56,6 @@ class VoIPDevice: return False - def get_pipeline_entity_id(self, hass: HomeAssistant) -> str | None: - """Return entity id for pipeline select.""" - ent_reg = er.async_get(hass) - return ent_reg.async_get_entity_id("select", DOMAIN, f"{self.voip_id}-pipeline") - - def get_vad_sensitivity_entity_id(self, hass: HomeAssistant) -> str | None: - """Return entity id for VAD sensitivity.""" - ent_reg = er.async_get(hass) - return ent_reg.async_get_entity_id( - "select", DOMAIN, f"{self.voip_id}-vad_sensitivity" - ) - class VoIPDevices: """Class to store devices.""" diff --git a/homeassistant/components/voip/entity.py b/homeassistant/components/voip/entity.py index e96784bc218..9e1e067b195 100644 --- a/homeassistant/components/voip/entity.py +++ b/homeassistant/components/voip/entity.py @@ -15,10 +15,10 @@ class VoIPEntity(entity.Entity): _attr_has_entity_name = True _attr_should_poll = False - def __init__(self, voip_device: VoIPDevice) -> None: + def __init__(self, device: VoIPDevice) -> None: """Initialize VoIP entity.""" - self.voip_device = voip_device - self._attr_unique_id = f"{voip_device.voip_id}-{self.entity_description.key}" + self._device = device + self._attr_unique_id = f"{device.voip_id}-{self.entity_description.key}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, voip_device.voip_id)}, + identifiers={(DOMAIN, device.voip_id)}, ) diff --git a/homeassistant/components/voip/manifest.json b/homeassistant/components/voip/manifest.json index 964193fca53..594abc69c13 100644 --- a/homeassistant/components/voip/manifest.json +++ b/homeassistant/components/voip/manifest.json @@ -3,7 +3,7 @@ "name": "Voice over IP", "codeowners": ["@balloob", "@synesthesiam"], "config_flow": true, - "dependencies": ["assist_pipeline", "assist_satellite"], + "dependencies": ["assist_pipeline"], "documentation": "https://www.home-assistant.io/integrations/voip", "iot_class": "local_push", "quality_scale": "internal", diff --git a/homeassistant/components/voip/repairs.py b/homeassistant/components/voip/repairs.py deleted file mode 100644 index 11cacbb7486..00000000000 --- a/homeassistant/components/voip/repairs.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Repairs implementation for the VoIP integration.""" - -from __future__ import annotations - -from homeassistant.components.assist_pipeline.repair_flows import ( - AssistInProgressDeprecatedRepairFlow, -) -from homeassistant.components.repairs import RepairsFlow -from homeassistant.core import HomeAssistant - - -async def async_create_fix_flow( - hass: HomeAssistant, - issue_id: str, - data: dict[str, str | int | float | None] | None, -) -> RepairsFlow: - """Create flow.""" - if issue_id.startswith("assist_in_progress_deprecated"): - return AssistInProgressDeprecatedRepairFlow(data) - # If VoIP adds confirm-only repairs in the future, this should be changed - # to return a ConfirmRepairFlow instead of raising a ValueError - raise ValueError(f"unknown repair {issue_id}") diff --git a/homeassistant/components/voip/strings.json b/homeassistant/components/voip/strings.json index c25c22f3f80..8bcbb06d4e2 100644 --- a/homeassistant/components/voip/strings.json +++ b/homeassistant/components/voip/strings.json @@ -37,18 +37,6 @@ } } }, - "issues": { - "assist_in_progress_deprecated": { - "title": "[%key:component::assist_pipeline::issues::assist_in_progress_deprecated::title%]", - "fix_flow": { - "step": { - "confirm_disable_entity": { - "description": "[%key:component::assist_pipeline::issues::assist_in_progress_deprecated::fix_flow::step::confirm_disable_entity::description%]" - } - } - } - } - }, "options": { "step": { "init": { diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index 6f6cf989d3b..5770d9d2b4a 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -3,11 +3,15 @@ from __future__ import annotations import asyncio +from collections import deque +from collections.abc import AsyncIterable, MutableSequence, Sequence from functools import partial +import io import logging from pathlib import Path import time from typing import TYPE_CHECKING +import wave from voip_utils import ( CallInfo, @@ -17,19 +21,31 @@ from voip_utils import ( VoipDatagramProtocol, ) +from homeassistant.components import stt, tts from homeassistant.components.assist_pipeline import ( Pipeline, + PipelineEvent, + PipelineEventType, PipelineNotFound, async_get_pipeline, + async_pipeline_from_audio_stream, select as pipeline_select, ) +from homeassistant.components.assist_pipeline.vad import ( + AudioBuffer, + VadSensitivity, + VoiceActivityDetector, + VoiceCommandSegmenter, + WebRtcVad, +) from homeassistant.const import __version__ -from homeassistant.core import HomeAssistant +from homeassistant.core import Context, HomeAssistant +from homeassistant.util.ulid import ulid_now from .const import CHANNELS, DOMAIN, RATE, RTP_AUDIO_SETTINGS, WIDTH if TYPE_CHECKING: - from .devices import VoIPDevices + from .devices import VoIPDevice, VoIPDevices _LOGGER = logging.getLogger(__name__) @@ -42,8 +58,11 @@ def make_protocol( ) -> VoipDatagramProtocol: """Plays a pre-recorded message if pipeline is misconfigured.""" voip_device = devices.async_get_or_create(call_info) - - pipeline_id = pipeline_select.get_chosen_pipeline(hass, DOMAIN, voip_device.voip_id) + pipeline_id = pipeline_select.get_chosen_pipeline( + hass, + DOMAIN, + voip_device.voip_id, + ) try: pipeline: Pipeline | None = async_get_pipeline(hass, pipeline_id) except PipelineNotFound: @@ -62,18 +81,22 @@ def make_protocol( rtcp_state=rtcp_state, ) - if (protocol := voip_device.protocol) is None: - raise ValueError("VoIP satellite not found") + vad_sensitivity = pipeline_select.get_vad_sensitivity( + hass, + DOMAIN, + voip_device.voip_id, + ) - protocol._rtp_input.opus_payload_type = call_info.opus_payload_type # noqa: SLF001 - protocol._rtp_output.opus_payload_type = call_info.opus_payload_type # noqa: SLF001 - - protocol.rtcp_state = rtcp_state - if protocol.rtcp_state is not None: - # Automatically disconnect when BYE is received over RTCP - protocol.rtcp_state.bye_callback = protocol.disconnect - - return protocol + # Pipeline is properly configured + return PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(user_id=devices.config_entry.data["user"]), + opus_payload_type=call_info.opus_payload_type, + silence_seconds=VadSensitivity.to_seconds(vad_sensitivity), + rtcp_state=rtcp_state, + ) class HassVoipDatagramProtocol(VoipDatagramProtocol): @@ -118,6 +141,364 @@ class HassVoipDatagramProtocol(VoipDatagramProtocol): await self._closed_event.wait() +class PipelineRtpDatagramProtocol(RtpDatagramProtocol): + """Run a voice assistant pipeline in a loop for a VoIP call.""" + + def __init__( + self, + hass: HomeAssistant, + language: str, + voip_device: VoIPDevice, + context: Context, + opus_payload_type: int, + pipeline_timeout: float = 30.0, + audio_timeout: float = 2.0, + buffered_chunks_before_speech: int = 100, + listening_tone_enabled: bool = True, + processing_tone_enabled: bool = True, + error_tone_enabled: bool = True, + tone_delay: float = 0.2, + tts_extra_timeout: float = 1.0, + silence_seconds: float = 1.0, + rtcp_state: RtcpState | None = None, + ) -> None: + """Set up pipeline RTP server.""" + super().__init__( + rate=RATE, + width=WIDTH, + channels=CHANNELS, + opus_payload_type=opus_payload_type, + rtcp_state=rtcp_state, + ) + + self.hass = hass + self.language = language + self.voip_device = voip_device + self.pipeline: Pipeline | None = None + self.pipeline_timeout = pipeline_timeout + self.audio_timeout = audio_timeout + self.buffered_chunks_before_speech = buffered_chunks_before_speech + self.listening_tone_enabled = listening_tone_enabled + self.processing_tone_enabled = processing_tone_enabled + self.error_tone_enabled = error_tone_enabled + self.tone_delay = tone_delay + self.tts_extra_timeout = tts_extra_timeout + self.silence_seconds = silence_seconds + + self._audio_queue: asyncio.Queue[bytes] = asyncio.Queue() + self._context = context + self._conversation_id: str | None = None + self._pipeline_task: asyncio.Task | None = None + self._tts_done = asyncio.Event() + self._session_id: str | None = None + self._tone_bytes: bytes | None = None + self._processing_bytes: bytes | None = None + self._error_bytes: bytes | None = None + self._pipeline_error: bool = False + + def connection_made(self, transport): + """Server is ready.""" + super().connection_made(transport) + self.voip_device.set_is_active(True) + + def connection_lost(self, exc): + """Handle connection is lost or closed.""" + super().connection_lost(exc) + self.voip_device.set_is_active(False) + + def on_chunk(self, audio_bytes: bytes) -> None: + """Handle raw audio chunk.""" + if self._pipeline_task is None: + self._clear_audio_queue() + + # Run pipeline until voice command finishes, then start over + self._pipeline_task = self.hass.async_create_background_task( + self._run_pipeline(), + "voip_pipeline_run", + ) + + self._audio_queue.put_nowait(audio_bytes) + + async def _run_pipeline( + self, + ) -> None: + """Forward audio to pipeline STT and handle TTS.""" + if self._session_id is None: + self._session_id = ulid_now() + + # Play listening tone at the start of each cycle + if self.listening_tone_enabled: + await self._play_listening_tone() + + try: + # Wait for speech before starting pipeline + segmenter = VoiceCommandSegmenter(silence_seconds=self.silence_seconds) + vad = WebRtcVad() + chunk_buffer: deque[bytes] = deque( + maxlen=self.buffered_chunks_before_speech, + ) + speech_detected = await self._wait_for_speech( + segmenter, + vad, + chunk_buffer, + ) + if not speech_detected: + _LOGGER.debug("No speech detected") + return + + _LOGGER.debug("Starting pipeline") + self._tts_done.clear() + + async def stt_stream(): + try: + async for chunk in self._segment_audio( + segmenter, + vad, + chunk_buffer, + ): + yield chunk + + if self.processing_tone_enabled: + await self._play_processing_tone() + except TimeoutError: + # Expected after caller hangs up + _LOGGER.debug("Audio timeout") + self._session_id = None + self.disconnect() + finally: + self._clear_audio_queue() + + # Run pipeline with a timeout + async with asyncio.timeout(self.pipeline_timeout): + await async_pipeline_from_audio_stream( + self.hass, + context=self._context, + event_callback=self._event_callback, + stt_metadata=stt.SpeechMetadata( + language="", # set in async_pipeline_from_audio_stream + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=stt_stream(), + pipeline_id=pipeline_select.get_chosen_pipeline( + self.hass, DOMAIN, self.voip_device.voip_id + ), + conversation_id=self._conversation_id, + device_id=self.voip_device.device_id, + tts_audio_output="wav", + ) + + if self._pipeline_error: + self._pipeline_error = False + if self.error_tone_enabled: + await self._play_error_tone() + else: + # Block until TTS is done speaking. + # + # This is set in _send_tts and has a timeout that's based on the + # length of the TTS audio. + await self._tts_done.wait() + + _LOGGER.debug("Pipeline finished") + except PipelineNotFound: + _LOGGER.warning("Pipeline not found") + except TimeoutError: + # Expected after caller hangs up + _LOGGER.debug("Pipeline timeout") + self._session_id = None + self.disconnect() + finally: + # Allow pipeline to run again + self._pipeline_task = None + + async def _wait_for_speech( + self, + segmenter: VoiceCommandSegmenter, + vad: VoiceActivityDetector, + chunk_buffer: MutableSequence[bytes], + ): + """Buffer audio chunks until speech is detected. + + Returns True if speech was detected, False otherwise. + """ + # Timeout if no audio comes in for a while. + # This means the caller hung up. + async with asyncio.timeout(self.audio_timeout): + chunk = await self._audio_queue.get() + + assert vad.samples_per_chunk is not None + vad_buffer = AudioBuffer(vad.samples_per_chunk * WIDTH) + + while chunk: + chunk_buffer.append(chunk) + + segmenter.process_with_vad(chunk, vad, vad_buffer) + if segmenter.in_command: + # Buffer until command starts + if len(vad_buffer) > 0: + chunk_buffer.append(vad_buffer.bytes()) + + return True + + async with asyncio.timeout(self.audio_timeout): + chunk = await self._audio_queue.get() + + return False + + async def _segment_audio( + self, + segmenter: VoiceCommandSegmenter, + vad: VoiceActivityDetector, + chunk_buffer: Sequence[bytes], + ) -> AsyncIterable[bytes]: + """Yield audio chunks until voice command has finished.""" + # Buffered chunks first + for buffered_chunk in chunk_buffer: + yield buffered_chunk + + # Timeout if no audio comes in for a while. + # This means the caller hung up. + async with asyncio.timeout(self.audio_timeout): + chunk = await self._audio_queue.get() + + assert vad.samples_per_chunk is not None + vad_buffer = AudioBuffer(vad.samples_per_chunk * WIDTH) + + while chunk: + if not segmenter.process_with_vad(chunk, vad, vad_buffer): + # Voice command is finished + break + + yield chunk + + async with asyncio.timeout(self.audio_timeout): + chunk = await self._audio_queue.get() + + def _clear_audio_queue(self) -> None: + while not self._audio_queue.empty(): + self._audio_queue.get_nowait() + + def _event_callback(self, event: PipelineEvent): + if not event.data: + return + + if event.type == PipelineEventType.INTENT_END: + # Capture conversation id + self._conversation_id = event.data["intent_output"]["conversation_id"] + elif event.type == PipelineEventType.TTS_END: + # Send TTS audio to caller over RTP + tts_output = event.data["tts_output"] + if tts_output: + media_id = tts_output["media_id"] + self.hass.async_create_background_task( + self._send_tts(media_id), + "voip_pipeline_tts", + ) + else: + # Empty TTS response + self._tts_done.set() + elif event.type == PipelineEventType.ERROR: + # Play error tone instead of wait for TTS + self._pipeline_error = True + + async def _send_tts(self, media_id: str) -> None: + """Send TTS audio to caller via RTP.""" + try: + if self.transport is None: + return + + extension, data = await tts.async_get_media_source_audio( + self.hass, + media_id, + ) + + if extension != "wav": + raise ValueError(f"Only WAV audio can be streamed, got {extension}") + + with io.BytesIO(data) as wav_io: + with wave.open(wav_io, "rb") as wav_file: + sample_rate = wav_file.getframerate() + sample_width = wav_file.getsampwidth() + sample_channels = wav_file.getnchannels() + + if ( + (sample_rate != 16000) + or (sample_width != 2) + or (sample_channels != 1) + ): + raise ValueError( + "Expected rate/width/channels as 16000/2/1," + " got {sample_rate}/{sample_width}/{sample_channels}}" + ) + + audio_bytes = wav_file.readframes(wav_file.getnframes()) + + _LOGGER.debug("Sending %s byte(s) of audio", len(audio_bytes)) + + # Time out 1 second after TTS audio should be finished + tts_samples = len(audio_bytes) / (WIDTH * CHANNELS) + tts_seconds = tts_samples / RATE + + async with asyncio.timeout(tts_seconds + self.tts_extra_timeout): + # TTS audio is 16Khz 16-bit mono + await self._async_send_audio(audio_bytes) + except TimeoutError: + _LOGGER.warning("TTS timeout") + raise + finally: + # Signal pipeline to restart + self._tts_done.set() + + async def _async_send_audio(self, audio_bytes: bytes, **kwargs): + """Send audio in executor.""" + await self.hass.async_add_executor_job( + partial(self.send_audio, audio_bytes, **RTP_AUDIO_SETTINGS, **kwargs) + ) + + async def _play_listening_tone(self) -> None: + """Play a tone to indicate that Home Assistant is listening.""" + if self._tone_bytes is None: + # Do I/O in executor + self._tone_bytes = await self.hass.async_add_executor_job( + self._load_pcm, + "tone.pcm", + ) + + await self._async_send_audio( + self._tone_bytes, + silence_before=self.tone_delay, + ) + + async def _play_processing_tone(self) -> None: + """Play a tone to indicate that Home Assistant is processing the voice command.""" + if self._processing_bytes is None: + # Do I/O in executor + self._processing_bytes = await self.hass.async_add_executor_job( + self._load_pcm, + "processing.pcm", + ) + + await self._async_send_audio(self._processing_bytes) + + async def _play_error_tone(self) -> None: + """Play a tone to indicate a pipeline error occurred.""" + if self._error_bytes is None: + # Do I/O in executor + self._error_bytes = await self.hass.async_add_executor_job( + self._load_pcm, + "error.pcm", + ) + + await self._async_send_audio(self._error_bytes) + + def _load_pcm(self, file_name: str) -> bytes: + """Load raw audio (16Khz, 16-bit mono).""" + return (Path(__file__).parent / file_name).read_bytes() + + class PreRecordMessageProtocol(RtpDatagramProtocol): """Plays a pre-recorded message on a loop.""" diff --git a/homeassistant/components/volumio/config_flow.py b/homeassistant/components/volumio/config_flow.py index 7cc58556f3e..8edda1d20b0 100644 --- a/homeassistant/components/volumio/config_flow.py +++ b/homeassistant/components/volumio/config_flow.py @@ -3,7 +3,6 @@ from __future__ import annotations import logging -from typing import Any from pyvolumio import CannotConnectError, Volumio import voluptuous as vol @@ -11,7 +10,7 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_ID, CONF_NAME, CONF_PORT -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -25,7 +24,7 @@ DATA_SCHEMA = vol.Schema( ) -async def validate_input(hass: HomeAssistant, host: str, port: int) -> dict[str, Any]: +async def validate_input(hass, host, port): """Validate the user input allows us to connect.""" volumio = Volumio(host, port, async_get_clientsession(hass)) @@ -40,13 +39,15 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _host: str - _port: int - _name: str - _uuid: str | None + def __init__(self) -> None: + """Initialize flow.""" + self._host: str | None = None + self._port: int | None = None + self._name: str | None = None + self._uuid: str | None = None @callback - def _async_get_entry(self) -> ConfigFlowResult: + def _async_get_entry(self): return self.async_create_entry( title=self._name, data={ @@ -67,9 +68,7 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): } ) - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: @@ -101,7 +100,7 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle zeroconf discovery.""" self._host = discovery_info.host - self._port = discovery_info.port or 3000 + self._port = discovery_info.port self._name = discovery_info.properties["volumioName"] self._uuid = discovery_info.properties["UUID"] @@ -109,9 +108,7 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_discovery_confirm() - async def async_step_discovery_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_discovery_confirm(self, user_input=None): """Handle user-confirmation of discovered node.""" if user_input is not None: try: diff --git a/homeassistant/components/volvooncall/__init__.py b/homeassistant/components/volvooncall/__init__.py index 9fc07dd92b0..8bade56fa97 100644 --- a/homeassistant/components/volvooncall/__init__.py +++ b/homeassistant/components/volvooncall/__init__.py @@ -1,6 +1,11 @@ """Support for Volvo On Call.""" +import asyncio +import logging + +from aiohttp.client_exceptions import ClientResponseError from volvooncall import Connection +from volvooncall.dashboard import Instrument from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -10,17 +15,30 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, + UpdateFailed, +) from .const import ( + CONF_MUTABLE, CONF_SCANDINAVIAN_MILES, + DEFAULT_UPDATE_INTERVAL, DOMAIN, PLATFORMS, + UNIT_SYSTEM_IMPERIAL, UNIT_SYSTEM_METRIC, UNIT_SYSTEM_SCANDINAVIAN_MILES, + VOLVO_DISCOVERY_NEW, ) -from .coordinator import VolvoUpdateCoordinator -from .models import VolvoData +from .errors import InvalidAuth + +_LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -69,3 +87,185 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok + + +class VolvoData: + """Hold component state.""" + + def __init__( + self, + hass: HomeAssistant, + connection: Connection, + entry: ConfigEntry, + ) -> None: + """Initialize the component state.""" + self.hass = hass + self.vehicles: set[str] = set() + self.instruments: set[Instrument] = set() + self.config_entry = entry + self.connection = connection + + def instrument(self, vin, component, attr, slug_attr): + """Return corresponding instrument.""" + return next( + instrument + for instrument in self.instruments + if instrument.vehicle.vin == vin + and instrument.component == component + and instrument.attr == attr + and instrument.slug_attr == slug_attr + ) + + def vehicle_name(self, vehicle): + """Provide a friendly name for a vehicle.""" + if vehicle.registration_number and vehicle.registration_number != "UNKNOWN": + return vehicle.registration_number + if vehicle.vin: + return vehicle.vin + return "Volvo" + + def discover_vehicle(self, vehicle): + """Load relevant platforms.""" + self.vehicles.add(vehicle.vin) + + dashboard = vehicle.dashboard( + mutable=self.config_entry.data[CONF_MUTABLE], + scandinavian_miles=( + self.config_entry.data[CONF_UNIT_SYSTEM] + == UNIT_SYSTEM_SCANDINAVIAN_MILES + ), + usa_units=( + self.config_entry.data[CONF_UNIT_SYSTEM] == UNIT_SYSTEM_IMPERIAL + ), + ) + + for instrument in ( + instrument + for instrument in dashboard.instruments + if instrument.component in PLATFORMS + ): + self.instruments.add(instrument) + async_dispatcher_send(self.hass, VOLVO_DISCOVERY_NEW, [instrument]) + + async def update(self): + """Update status from the online service.""" + try: + await self.connection.update(journal=True) + except ClientResponseError as ex: + if ex.status == 401: + raise ConfigEntryAuthFailed(ex) from ex + raise UpdateFailed(ex) from ex + + for vehicle in self.connection.vehicles: + if vehicle.vin not in self.vehicles: + self.discover_vehicle(vehicle) + + async def auth_is_valid(self): + """Check if provided username/password/region authenticate.""" + try: + await self.connection.get("customeraccounts") + except ClientResponseError as exc: + raise InvalidAuth from exc + + +class VolvoUpdateCoordinator(DataUpdateCoordinator[None]): # pylint: disable=hass-enforce-coordinator-module + """Volvo coordinator.""" + + def __init__(self, hass: HomeAssistant, volvo_data: VolvoData) -> None: + """Initialize the data update coordinator.""" + + super().__init__( + hass, + _LOGGER, + name="volvooncall", + update_interval=DEFAULT_UPDATE_INTERVAL, + ) + + self.volvo_data = volvo_data + + async def _async_update_data(self) -> None: + """Fetch data from API endpoint.""" + + async with asyncio.timeout(10): + await self.volvo_data.update() + + +class VolvoEntity(CoordinatorEntity[VolvoUpdateCoordinator]): + """Base class for all VOC entities.""" + + def __init__( + self, + vin: str, + component: str, + attribute: str, + slug_attr: str, + coordinator: VolvoUpdateCoordinator, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.vin = vin + self.component = component + self.attribute = attribute + self.slug_attr = slug_attr + + @property + def instrument(self): + """Return corresponding instrument.""" + return self.coordinator.volvo_data.instrument( + self.vin, self.component, self.attribute, self.slug_attr + ) + + @property + def icon(self): + """Return the icon.""" + return self.instrument.icon + + @property + def vehicle(self): + """Return vehicle.""" + return self.instrument.vehicle + + @property + def _entity_name(self): + return self.instrument.name + + @property + def _vehicle_name(self): + return self.coordinator.volvo_data.vehicle_name(self.vehicle) + + @property + def name(self): + """Return full name of the entity.""" + return f"{self._vehicle_name} {self._entity_name}" + + @property + def assumed_state(self): + """Return true if unable to access real state of entity.""" + return True + + @property + def device_info(self) -> DeviceInfo: + """Return a inique set of attributes for each vehicle.""" + return DeviceInfo( + identifiers={(DOMAIN, self.vehicle.vin)}, + name=self._vehicle_name, + model=self.vehicle.vehicle_type, + manufacturer="Volvo", + ) + + @property + def extra_state_attributes(self): + """Return device specific state attributes.""" + return dict( + self.instrument.attributes, + model=f"{self.vehicle.vehicle_type}/{self.vehicle.model_year}", + ) + + @property + def unique_id(self) -> str: + """Return a unique ID.""" + slug_override = "" + if self.instrument.slug_override is not None: + slug_override = f"-{self.instrument.slug_override}" + return f"{self.vin}-{self.component}-{self.attribute}{slug_override}" diff --git a/homeassistant/components/volvooncall/binary_sensor.py b/homeassistant/components/volvooncall/binary_sensor.py index e6104f8d87c..604dc2313bf 100644 --- a/homeassistant/components/volvooncall/binary_sensor.py +++ b/homeassistant/components/volvooncall/binary_sensor.py @@ -16,9 +16,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VolvoEntity, VolvoUpdateCoordinator from .const import DOMAIN, VOLVO_DISCOVERY_NEW -from .coordinator import VolvoUpdateCoordinator -from .entity import VolvoEntity async def async_setup_entry( diff --git a/homeassistant/components/volvooncall/config_flow.py b/homeassistant/components/volvooncall/config_flow.py index ccb0a7f62e1..80358a28ced 100644 --- a/homeassistant/components/volvooncall/config_flow.py +++ b/homeassistant/components/volvooncall/config_flow.py @@ -9,7 +9,7 @@ from typing import Any import voluptuous as vol from volvooncall import Connection -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_PASSWORD, CONF_REGION, @@ -18,6 +18,7 @@ from homeassistant.const import ( ) from homeassistant.helpers.aiohttp_client import async_get_clientsession +from . import VolvoData from .const import ( CONF_MUTABLE, DOMAIN, @@ -26,7 +27,6 @@ from .const import ( UNIT_SYSTEM_SCANDINAVIAN_MILES, ) from .errors import InvalidAuth -from .models import VolvoData _LOGGER = logging.getLogger(__name__) @@ -35,6 +35,7 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): """VolvoOnCall config flow.""" VERSION = 1 + _reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -52,7 +53,7 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: await self.async_set_unique_id(user_input[CONF_USERNAME]) - if self.source != SOURCE_REAUTH: + if not self._reauth_entry: self._abort_if_unique_id_configured() try: @@ -63,18 +64,21 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unhandled exception in user step") errors["base"] = "unknown" if not errors: - if self.source == SOURCE_REAUTH: - return self.async_update_reload_and_abort( - self._get_reauth_entry(), data_updates=user_input + if self._reauth_entry: + self.hass.config_entries.async_update_entry( + self._reauth_entry, data=self._reauth_entry.data | user_input ) + await self.hass.config_entries.async_reload( + self._reauth_entry.entry_id + ) + return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=user_input[CONF_USERNAME], data=user_input ) - elif self.source == SOURCE_REAUTH: - reauth_entry = self._get_reauth_entry() + elif self._reauth_entry: for key in defaults: - defaults[key] = reauth_entry.data.get(key) + defaults[key] = self._reauth_entry.data.get(key) user_schema = vol.Schema( { @@ -103,9 +107,12 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, entry_data: Mapping[str, Any] + self, user_input: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" + self._reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) return await self.async_step_user() async def is_valid(self, user_input): diff --git a/homeassistant/components/volvooncall/coordinator.py b/homeassistant/components/volvooncall/coordinator.py deleted file mode 100644 index 5ac6a58acb0..00000000000 --- a/homeassistant/components/volvooncall/coordinator.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Support for Volvo On Call.""" - -import asyncio -import logging - -from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator - -from .const import DEFAULT_UPDATE_INTERVAL -from .models import VolvoData - -_LOGGER = logging.getLogger(__name__) - - -class VolvoUpdateCoordinator(DataUpdateCoordinator[None]): - """Volvo coordinator.""" - - def __init__(self, hass: HomeAssistant, volvo_data: VolvoData) -> None: - """Initialize the data update coordinator.""" - - super().__init__( - hass, - _LOGGER, - name="volvooncall", - update_interval=DEFAULT_UPDATE_INTERVAL, - ) - - self.volvo_data = volvo_data - - async def _async_update_data(self) -> None: - """Fetch data from API endpoint.""" - - async with asyncio.timeout(10): - await self.volvo_data.update() diff --git a/homeassistant/components/volvooncall/device_tracker.py b/homeassistant/components/volvooncall/device_tracker.py index 96fe5a644bb..51c2f08130b 100644 --- a/homeassistant/components/volvooncall/device_tracker.py +++ b/homeassistant/components/volvooncall/device_tracker.py @@ -4,15 +4,14 @@ from __future__ import annotations from volvooncall.dashboard import Instrument -from homeassistant.components.device_tracker import TrackerEntity +from homeassistant.components.device_tracker import SourceType, TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VolvoEntity, VolvoUpdateCoordinator from .const import DOMAIN, VOLVO_DISCOVERY_NEW -from .coordinator import VolvoUpdateCoordinator -from .entity import VolvoEntity async def async_setup_entry( @@ -61,6 +60,11 @@ class VolvoTrackerEntity(VolvoEntity, TrackerEntity): _, longitude = self._get_pos() return longitude + @property + def source_type(self) -> SourceType | str: + """Return the source type (GPS).""" + return SourceType.GPS + def _get_pos(self) -> tuple[float, float]: volvo_data = self.coordinator.volvo_data instrument = volvo_data.instrument( diff --git a/homeassistant/components/volvooncall/entity.py b/homeassistant/components/volvooncall/entity.py deleted file mode 100644 index 6ebc4bdc754..00000000000 --- a/homeassistant/components/volvooncall/entity.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Support for Volvo On Call.""" - -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import DOMAIN -from .coordinator import VolvoUpdateCoordinator - - -class VolvoEntity(CoordinatorEntity[VolvoUpdateCoordinator]): - """Base class for all VOC entities.""" - - def __init__( - self, - vin: str, - component: str, - attribute: str, - slug_attr: str, - coordinator: VolvoUpdateCoordinator, - ) -> None: - """Initialize the entity.""" - super().__init__(coordinator) - - self.vin = vin - self.component = component - self.attribute = attribute - self.slug_attr = slug_attr - - @property - def instrument(self): - """Return corresponding instrument.""" - return self.coordinator.volvo_data.instrument( - self.vin, self.component, self.attribute, self.slug_attr - ) - - @property - def icon(self): - """Return the icon.""" - return self.instrument.icon - - @property - def vehicle(self): - """Return vehicle.""" - return self.instrument.vehicle - - @property - def _entity_name(self): - return self.instrument.name - - @property - def _vehicle_name(self): - return self.coordinator.volvo_data.vehicle_name(self.vehicle) - - @property - def name(self): - """Return full name of the entity.""" - return f"{self._vehicle_name} {self._entity_name}" - - @property - def assumed_state(self): - """Return true if unable to access real state of entity.""" - return True - - @property - def device_info(self) -> DeviceInfo: - """Return a inique set of attributes for each vehicle.""" - return DeviceInfo( - identifiers={(DOMAIN, self.vehicle.vin)}, - name=self._vehicle_name, - model=self.vehicle.vehicle_type, - manufacturer="Volvo", - ) - - @property - def extra_state_attributes(self): - """Return device specific state attributes.""" - return dict( - self.instrument.attributes, - model=f"{self.vehicle.vehicle_type}/{self.vehicle.model_year}", - ) - - @property - def unique_id(self) -> str: - """Return a unique ID.""" - slug_override = "" - if self.instrument.slug_override is not None: - slug_override = f"-{self.instrument.slug_override}" - return f"{self.vin}-{self.component}-{self.attribute}{slug_override}" diff --git a/homeassistant/components/volvooncall/lock.py b/homeassistant/components/volvooncall/lock.py index cff5df35750..cccd64bce05 100644 --- a/homeassistant/components/volvooncall/lock.py +++ b/homeassistant/components/volvooncall/lock.py @@ -12,9 +12,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VolvoEntity, VolvoUpdateCoordinator from .const import DOMAIN, VOLVO_DISCOVERY_NEW -from .coordinator import VolvoUpdateCoordinator -from .entity import VolvoEntity async def async_setup_entry( diff --git a/homeassistant/components/volvooncall/models.py b/homeassistant/components/volvooncall/models.py deleted file mode 100644 index 159379a908b..00000000000 --- a/homeassistant/components/volvooncall/models.py +++ /dev/null @@ -1,100 +0,0 @@ -"""Support for Volvo On Call.""" - -from aiohttp.client_exceptions import ClientResponseError -from volvooncall import Connection -from volvooncall.dashboard import Instrument - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_UNIT_SYSTEM -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.update_coordinator import UpdateFailed - -from .const import ( - CONF_MUTABLE, - PLATFORMS, - UNIT_SYSTEM_IMPERIAL, - UNIT_SYSTEM_SCANDINAVIAN_MILES, - VOLVO_DISCOVERY_NEW, -) -from .errors import InvalidAuth - - -class VolvoData: - """Hold component state.""" - - def __init__( - self, - hass: HomeAssistant, - connection: Connection, - entry: ConfigEntry, - ) -> None: - """Initialize the component state.""" - self.hass = hass - self.vehicles: set[str] = set() - self.instruments: set[Instrument] = set() - self.config_entry = entry - self.connection = connection - - def instrument(self, vin, component, attr, slug_attr): - """Return corresponding instrument.""" - return next( - instrument - for instrument in self.instruments - if instrument.vehicle.vin == vin - and instrument.component == component - and instrument.attr == attr - and instrument.slug_attr == slug_attr - ) - - def vehicle_name(self, vehicle): - """Provide a friendly name for a vehicle.""" - if vehicle.registration_number and vehicle.registration_number != "UNKNOWN": - return vehicle.registration_number - if vehicle.vin: - return vehicle.vin - return "Volvo" - - def discover_vehicle(self, vehicle): - """Load relevant platforms.""" - self.vehicles.add(vehicle.vin) - - dashboard = vehicle.dashboard( - mutable=self.config_entry.data[CONF_MUTABLE], - scandinavian_miles=( - self.config_entry.data[CONF_UNIT_SYSTEM] - == UNIT_SYSTEM_SCANDINAVIAN_MILES - ), - usa_units=( - self.config_entry.data[CONF_UNIT_SYSTEM] == UNIT_SYSTEM_IMPERIAL - ), - ) - - for instrument in ( - instrument - for instrument in dashboard.instruments - if instrument.component in PLATFORMS - ): - self.instruments.add(instrument) - async_dispatcher_send(self.hass, VOLVO_DISCOVERY_NEW, [instrument]) - - async def update(self): - """Update status from the online service.""" - try: - await self.connection.update(journal=True) - except ClientResponseError as ex: - if ex.status == 401: - raise ConfigEntryAuthFailed(ex) from ex - raise UpdateFailed(ex) from ex - - for vehicle in self.connection.vehicles: - if vehicle.vin not in self.vehicles: - self.discover_vehicle(vehicle) - - async def auth_is_valid(self): - """Check if provided username/password/region authenticate.""" - try: - await self.connection.get("customeraccounts") - except ClientResponseError as exc: - raise InvalidAuth from exc diff --git a/homeassistant/components/volvooncall/sensor.py b/homeassistant/components/volvooncall/sensor.py index 9916d37197b..a46c8671929 100644 --- a/homeassistant/components/volvooncall/sensor.py +++ b/homeassistant/components/volvooncall/sensor.py @@ -10,9 +10,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VolvoEntity, VolvoUpdateCoordinator from .const import DOMAIN, VOLVO_DISCOVERY_NEW -from .coordinator import VolvoUpdateCoordinator -from .entity import VolvoEntity async def async_setup_entry( diff --git a/homeassistant/components/volvooncall/switch.py b/homeassistant/components/volvooncall/switch.py index 7e60f47fb44..23bc452ef66 100644 --- a/homeassistant/components/volvooncall/switch.py +++ b/homeassistant/components/volvooncall/switch.py @@ -12,9 +12,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VolvoEntity, VolvoUpdateCoordinator from .const import DOMAIN, VOLVO_DISCOVERY_NEW -from .coordinator import VolvoUpdateCoordinator -from .entity import VolvoEntity async def async_setup_entry( diff --git a/homeassistant/components/vulcan/calendar.py b/homeassistant/components/vulcan/calendar.py index a89b6b4a116..e068a772345 100644 --- a/homeassistant/components/vulcan/calendar.py +++ b/homeassistant/components/vulcan/calendar.py @@ -133,7 +133,7 @@ class VulcanCalendarEntity(CalendarEntity): events = await get_lessons(self.client) if not self.available: - _LOGGER.warning("Restored connection with API") + _LOGGER.info("Restored connection with API") self._attr_available = True if events == []: diff --git a/homeassistant/components/vulcan/config_flow.py b/homeassistant/components/vulcan/config_flow.py index f02adba9f75..560d777b517 100644 --- a/homeassistant/components/vulcan/config_flow.py +++ b/homeassistant/components/vulcan/config_flow.py @@ -2,7 +2,7 @@ from collections.abc import Mapping import logging -from typing import TYPE_CHECKING, Any +from typing import Any from aiohttp import ClientConnectionError import voluptuous as vol @@ -16,7 +16,6 @@ from vulcan import ( UnauthorizedCertificateException, Vulcan, ) -from vulcan.model import Student from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PIN, CONF_REGION, CONF_TOKEN @@ -39,32 +38,26 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - account: Account - keystore: Keystore - def __init__(self) -> None: """Initialize config flow.""" - self.students: list[Student] | None = None + self.account = None + self.keystore = None + self.students = None - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_user(self, user_input=None): """Handle config flow.""" if self._async_current_entries(): return await self.async_step_add_next_config_entry() return await self.async_step_auth() - async def async_step_auth( - self, - user_input: dict[str, str] | None = None, - errors: dict[str, str] | None = None, - ) -> ConfigFlowResult: + async def async_step_auth(self, user_input=None, errors=None): """Authorize integration.""" if user_input is not None: try: credentials = await register( + self.hass, user_input[CONF_TOKEN], user_input[CONF_REGION], user_input[CONF_PIN], @@ -112,20 +105,16 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_select_student( - self, user_input: dict[str, str] | None = None - ) -> ConfigFlowResult: + async def async_step_select_student(self, user_input=None): """Allow user to select student.""" - errors: dict[str, str] = {} - students: dict[str, str] = {} + errors = {} + students = {} if self.students is not None: for student in self.students: students[str(student.pupil.id)] = ( f"{student.pupil.first_name} {student.pupil.last_name}" ) if user_input is not None: - if TYPE_CHECKING: - assert self.keystore is not None student_id = user_input["student"] await self.async_set_unique_id(str(student_id)) self._abort_if_unique_id_configured() @@ -144,25 +133,17 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_select_saved_credentials( - self, - user_input: dict[str, str] | None = None, - errors: dict[str, str] | None = None, - ) -> ConfigFlowResult: + async def async_step_select_saved_credentials(self, user_input=None, errors=None): """Allow user to select saved credentials.""" - credentials: dict[str, Any] = {} + credentials = {} for entry in self.hass.config_entries.async_entries(DOMAIN): credentials[entry.entry_id] = entry.data["account"]["UserName"] if user_input is not None: - existing_entry = self.hass.config_entries.async_get_entry( - user_input["credentials"] - ) - if TYPE_CHECKING: - assert existing_entry is not None - keystore = Keystore.load(existing_entry.data["keystore"]) - account = Account.load(existing_entry.data["account"]) + entry = self.hass.config_entries.async_get_entry(user_input["credentials"]) + keystore = Keystore.load(entry.data["keystore"]) + account = Account.load(entry.data["account"]) client = Vulcan(keystore, account, async_get_clientsession(self.hass)) try: students = await client.get_students() @@ -206,14 +187,12 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_add_next_config_entry( - self, user_input: dict[str, bool] | None = None - ) -> ConfigFlowResult: + async def async_step_add_next_config_entry(self, user_input=None): """Flow initialized when user is adding next entry of that integration.""" existing_entries = self.hass.config_entries.async_entries(DOMAIN) - errors: dict[str, str] = {} + errors = {} if user_input is not None: if not user_input["use_saved_credentials"]: @@ -267,14 +246,13 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): """Perform reauth upon an API authentication error.""" return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm( - self, user_input: dict[str, str] | None = None - ) -> ConfigFlowResult: + async def async_step_reauth_confirm(self, user_input=None): """Reauthorize integration.""" errors = {} if user_input is not None: try: credentials = await register( + self.hass, user_input[CONF_TOKEN], user_input[CONF_REGION], user_input[CONF_PIN], diff --git a/homeassistant/components/vulcan/register.py b/homeassistant/components/vulcan/register.py index a3dec97f622..67cceb8d7b8 100644 --- a/homeassistant/components/vulcan/register.py +++ b/homeassistant/components/vulcan/register.py @@ -1,11 +1,9 @@ """Support for register Vulcan account.""" -from typing import Any - from vulcan import Account, Keystore -async def register(token: str, symbol: str, pin: str) -> dict[str, Any]: +async def register(hass, token, symbol, pin): """Register integration and save credentials.""" keystore = await Keystore.create(device_model="Home Assistant") account = await Account.register(keystore, token, symbol, pin) diff --git a/homeassistant/components/wake_on_lan/__init__.py b/homeassistant/components/wake_on_lan/__init__.py index efd72c4564c..37837da683a 100644 --- a/homeassistant/components/wake_on_lan/__init__.py +++ b/homeassistant/components/wake_on_lan/__init__.py @@ -6,13 +6,12 @@ import logging import voluptuous as vol import wakeonlan -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN, PLATFORMS +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -44,7 +43,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if broadcast_port is not None: service_kwargs["port"] = broadcast_port - _LOGGER.debug( + _LOGGER.info( "Send magic packet to mac %s (broadcast: %s, port: %s)", mac_address, broadcast_address, @@ -63,21 +62,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) return True - - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up a Wake on LAN component entry.""" - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload(entry.add_update_listener(update_listener)) - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload a config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Handle options update.""" - await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/wake_on_lan/button.py b/homeassistant/components/wake_on_lan/button.py deleted file mode 100644 index 4d6b19bdd8e..00000000000 --- a/homeassistant/components/wake_on_lan/button.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Support for button entity in wake on lan.""" - -from __future__ import annotations - -from functools import partial -import logging -from typing import Any - -import wakeonlan - -from homeassistant.components.button import ButtonEntity -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -_LOGGER = logging.getLogger(__name__) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the Wake on LAN button entry.""" - broadcast_address: str | None = entry.options.get(CONF_BROADCAST_ADDRESS) - broadcast_port: int | None = entry.options.get(CONF_BROADCAST_PORT) - mac_address: str = entry.options[CONF_MAC] - name: str = entry.title - - async_add_entities( - [ - WolButton( - name, - mac_address, - broadcast_address, - broadcast_port, - ) - ] - ) - - -class WolButton(ButtonEntity): - """Representation of a wake on lan button.""" - - _attr_name = None - - def __init__( - self, - name: str, - mac_address: str, - broadcast_address: str | None, - broadcast_port: int | None, - ) -> None: - """Initialize the WOL button.""" - self._mac_address = mac_address - self._broadcast_address = broadcast_address - self._broadcast_port = broadcast_port - self._attr_unique_id = dr.format_mac(mac_address) - self._attr_device_info = dr.DeviceInfo( - connections={(dr.CONNECTION_NETWORK_MAC, self._attr_unique_id)}, - default_name=name, - ) - - async def async_press(self) -> None: - """Press the button.""" - service_kwargs: dict[str, Any] = {} - if self._broadcast_address is not None: - service_kwargs["ip_address"] = self._broadcast_address - if self._broadcast_port is not None: - service_kwargs["port"] = self._broadcast_port - - _LOGGER.debug( - "Send magic packet to mac %s (broadcast: %s, port: %s)", - self._mac_address, - self._broadcast_address, - self._broadcast_port, - ) - - await self.hass.async_add_executor_job( - partial(wakeonlan.send_magic_packet, self._mac_address, **service_kwargs) - ) diff --git a/homeassistant/components/wake_on_lan/config_flow.py b/homeassistant/components/wake_on_lan/config_flow.py deleted file mode 100644 index fb54dd146e5..00000000000 --- a/homeassistant/components/wake_on_lan/config_flow.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Config flow for Wake on lan integration.""" - -from collections.abc import Mapping -from typing import Any - -import voluptuous as vol - -from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.schema_config_entry_flow import ( - SchemaCommonFlowHandler, - SchemaConfigFlowHandler, - SchemaFlowFormStep, -) -from homeassistant.helpers.selector import ( - NumberSelector, - NumberSelectorConfig, - NumberSelectorMode, - TextSelector, -) - -from .const import DEFAULT_NAME, DOMAIN - - -async def validate( - handler: SchemaCommonFlowHandler, user_input: dict[str, Any] -) -> dict[str, Any]: - """Validate input setup.""" - user_input = await validate_options(handler, user_input) - - user_input[CONF_MAC] = dr.format_mac(user_input[CONF_MAC]) - - # Mac address needs to be unique - handler.parent_handler._async_abort_entries_match({CONF_MAC: user_input[CONF_MAC]}) # noqa: SLF001 - - return user_input - - -async def validate_options( - handler: SchemaCommonFlowHandler, user_input: dict[str, Any] -) -> dict[str, Any]: - """Validate input options.""" - if CONF_BROADCAST_PORT in user_input: - # Convert float to int for broadcast port - user_input[CONF_BROADCAST_PORT] = int(user_input[CONF_BROADCAST_PORT]) - return user_input - - -DATA_SCHEMA = {vol.Required(CONF_MAC): TextSelector()} -OPTIONS_SCHEMA = { - vol.Optional(CONF_BROADCAST_ADDRESS): TextSelector(), - vol.Optional(CONF_BROADCAST_PORT): NumberSelector( - NumberSelectorConfig(min=0, max=65535, step=1, mode=NumberSelectorMode.BOX) - ), -} - - -CONFIG_FLOW = { - "user": SchemaFlowFormStep( - schema=vol.Schema(DATA_SCHEMA).extend(OPTIONS_SCHEMA), - validate_user_input=validate, - ) -} -OPTIONS_FLOW = { - "init": SchemaFlowFormStep( - vol.Schema(OPTIONS_SCHEMA), validate_user_input=validate_options - ), -} - - -class WakeonLanConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): - """Handle a config flow for Wake on Lan.""" - - config_flow = CONFIG_FLOW - options_flow = OPTIONS_FLOW - - def async_config_entry_title(self, options: Mapping[str, Any]) -> str: - """Return config entry title.""" - mac: str = options[CONF_MAC] - return f"{DEFAULT_NAME} {mac}" diff --git a/homeassistant/components/wake_on_lan/const.py b/homeassistant/components/wake_on_lan/const.py index 20b9573cfde..2560ef40382 100644 --- a/homeassistant/components/wake_on_lan/const.py +++ b/homeassistant/components/wake_on_lan/const.py @@ -1,11 +1,3 @@ """Constants for the Wake-On-LAN component.""" -from homeassistant.const import Platform - DOMAIN = "wake_on_lan" -PLATFORMS = [Platform.BUTTON] - -CONF_OFF_ACTION = "turn_off" - -DEFAULT_NAME = "Wake on LAN" -DEFAULT_PING_TIMEOUT = 1 diff --git a/homeassistant/components/wake_on_lan/icons.json b/homeassistant/components/wake_on_lan/icons.json index f083b0342f4..6426c478157 100644 --- a/homeassistant/components/wake_on_lan/icons.json +++ b/homeassistant/components/wake_on_lan/icons.json @@ -1,7 +1,5 @@ { "services": { - "send_magic_packet": { - "service": "mdi:cube-send" - } + "send_magic_packet": "mdi:cube-send" } } diff --git a/homeassistant/components/wake_on_lan/manifest.json b/homeassistant/components/wake_on_lan/manifest.json index c716a851ae4..a62980b3010 100644 --- a/homeassistant/components/wake_on_lan/manifest.json +++ b/homeassistant/components/wake_on_lan/manifest.json @@ -2,7 +2,6 @@ "domain": "wake_on_lan", "name": "Wake on LAN", "codeowners": ["@ntilley905"], - "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/wake_on_lan", "iot_class": "local_push", "requirements": ["wakeonlan==2.1.0"] diff --git a/homeassistant/components/wake_on_lan/strings.json b/homeassistant/components/wake_on_lan/strings.json index 89bc30e405a..8395bc7503a 100644 --- a/homeassistant/components/wake_on_lan/strings.json +++ b/homeassistant/components/wake_on_lan/strings.json @@ -1,56 +1,20 @@ { - "config": { - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" - }, - "step": { - "user": { - "data": { - "mac": "MAC address", - "broadcast_address": "Broadcast address", - "broadcast_port": "Broadcast port" - }, - "data_description": { - "mac": "MAC address of the device to wake up.", - "broadcast_address": "The IP address of the host to send the magic packet to. Defaults to `255.255.255.255` and is normally not changed.", - "broadcast_port": "The port to send the magic packet to. Defaults to `9` and is normally not changed." - } - } - } - }, - "options": { - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" - }, - "step": { - "init": { - "data": { - "broadcast_address": "[%key:component::wake_on_lan::config::step::user::data::broadcast_address%]", - "broadcast_port": "[%key:component::wake_on_lan::config::step::user::data::broadcast_port%]" - }, - "data_description": { - "broadcast_address": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_address%]", - "broadcast_port": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_port%]" - } - } - } - }, "services": { "send_magic_packet": { "name": "Send magic packet", "description": "Sends a 'magic packet' to wake up a device with 'Wake-On-LAN' capabilities.", "fields": { "mac": { - "name": "[%key:component::wake_on_lan::config::step::user::data::mac%]", - "description": "[%key:component::wake_on_lan::config::step::user::data_description::mac%]" + "name": "MAC address", + "description": "MAC address of the device to wake up." }, "broadcast_address": { - "name": "[%key:component::wake_on_lan::config::step::user::data::broadcast_address%]", - "description": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_address%]" + "name": "Broadcast address", + "description": "Broadcast IP where to send the magic packet." }, "broadcast_port": { - "name": "[%key:component::wake_on_lan::config::step::user::data::broadcast_port%]", - "description": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_port%]" + "name": "Broadcast port", + "description": "Port where to send the magic packet." } } } diff --git a/homeassistant/components/wake_on_lan/switch.py b/homeassistant/components/wake_on_lan/switch.py index fcf8936d498..cf38d05de38 100644 --- a/homeassistant/components/wake_on_lan/switch.py +++ b/homeassistant/components/wake_on_lan/switch.py @@ -27,10 +27,15 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import CONF_OFF_ACTION, DEFAULT_NAME, DEFAULT_PING_TIMEOUT, DOMAIN +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) +CONF_OFF_ACTION = "turn_off" + +DEFAULT_NAME = "Wake on LAN" +DEFAULT_PING_TIMEOUT = 1 + PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( { vol.Required(CONF_MAC): cv.string, @@ -43,10 +48,10 @@ PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( ) -async def async_setup_platform( +def setup_platform( hass: HomeAssistant, config: ConfigType, - async_add_entities: AddEntitiesCallback, + add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up a wake on lan switch.""" @@ -57,7 +62,7 @@ async def async_setup_platform( name: str = config[CONF_NAME] off_action: list[Any] | None = config.get(CONF_OFF_ACTION) - async_add_entities( + add_entities( [ WolSwitch( hass, @@ -113,7 +118,7 @@ class WolSwitch(SwitchEntity): if self._broadcast_port is not None: service_kwargs["port"] = self._broadcast_port - _LOGGER.debug( + _LOGGER.info( "Send magic packet to mac %s (broadcast: %s, port: %s)", self._mac_address, self._broadcast_address, diff --git a/homeassistant/components/wake_word/__init__.py b/homeassistant/components/wake_word/__init__.py index 8b3a5bbf331..5ce592aacd8 100644 --- a/homeassistant/components/wake_word/__init__.py +++ b/homeassistant/components/wake_word/__init__.py @@ -19,7 +19,6 @@ from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util -from homeassistant.util.hass_dict import HassKey from .const import DOMAIN from .models import DetectionResult, WakeWord @@ -36,7 +35,6 @@ __all__ = [ _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) -DATA_COMPONENT: HassKey[EntityComponent[WakeWordDetectionEntity]] = HassKey(DOMAIN) TIMEOUT_FETCH_WAKE_WORDS = 10 @@ -52,16 +50,16 @@ def async_get_wake_word_detection_entity( hass: HomeAssistant, entity_id: str ) -> WakeWordDetectionEntity | None: """Return wake word entity.""" - return hass.data[DATA_COMPONENT].get_entity(entity_id) + component: EntityComponent[WakeWordDetectionEntity] = hass.data[DOMAIN] + + return component.get_entity(entity_id) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up wake word.""" websocket_api.async_register_command(hass, websocket_entity_info) - component = hass.data[DATA_COMPONENT] = EntityComponent[WakeWordDetectionEntity]( - _LOGGER, DOMAIN, hass - ) + component = hass.data[DOMAIN] = EntityComponent(_LOGGER, DOMAIN, hass) component.register_shutdown() return True @@ -69,12 +67,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - return await hass.data[DATA_COMPONENT].async_setup_entry(entry) + component: EntityComponent = hass.data[DOMAIN] + return await component.async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - return await hass.data[DATA_COMPONENT].async_unload_entry(entry) + component: EntityComponent = hass.data[DOMAIN] + return await component.async_unload_entry(entry) class WakeWordDetectionEntity(RestoreEntity): @@ -137,11 +137,13 @@ class WakeWordDetectionEntity(RestoreEntity): } ) @websocket_api.async_response +@callback async def websocket_entity_info( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Get info about wake word entity.""" - entity = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"]) + component: EntityComponent[WakeWordDetectionEntity] = hass.data[DOMAIN] + entity = component.get_entity(msg["entity_id"]) if entity is None: connection.send_error( diff --git a/homeassistant/components/wallbox/__init__.py b/homeassistant/components/wallbox/__init__.py index b2f8ac7fd5d..4ea2cf98be1 100644 --- a/homeassistant/components/wallbox/__init__.py +++ b/homeassistant/components/wallbox/__init__.py @@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from .const import CONF_STATION, DOMAIN, UPDATE_INTERVAL -from .coordinator import InvalidAuth, WallboxCoordinator, async_validate_input +from .coordinator import InvalidAuth, WallboxCoordinator PLATFORMS = [Platform.LOCK, Platform.NUMBER, Platform.SENSOR, Platform.SWITCH] @@ -22,16 +22,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.data[CONF_PASSWORD], jwtTokenDrift=UPDATE_INTERVAL, ) - try: - await async_validate_input(hass, wallbox) - except InvalidAuth as ex: - raise ConfigEntryAuthFailed from ex - wallbox_coordinator = WallboxCoordinator( entry.data[CONF_STATION], wallbox, hass, ) + + try: + await wallbox_coordinator.async_validate_input() + + except InvalidAuth as ex: + raise ConfigEntryAuthFailed from ex + await wallbox_coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = wallbox_coordinator diff --git a/homeassistant/components/wallbox/config_flow.py b/homeassistant/components/wallbox/config_flow.py index bdc51eef963..44c47149554 100644 --- a/homeassistant/components/wallbox/config_flow.py +++ b/homeassistant/components/wallbox/config_flow.py @@ -8,12 +8,12 @@ from typing import Any import voluptuous as vol from wallbox import Wallbox -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from .const import CONF_STATION, DOMAIN -from .coordinator import InvalidAuth, async_validate_input +from .coordinator import InvalidAuth, WallboxCoordinator COMPONENT_DOMAIN = DOMAIN @@ -32,8 +32,9 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ wallbox = Wallbox(data["username"], data["password"]) + wallbox_coordinator = WallboxCoordinator(data["station"], wallbox, hass) - await async_validate_input(hass, wallbox) + await wallbox_coordinator.async_validate_input() # Return info that you want to store in the config entry. return {"title": "Wallbox Portal"} @@ -42,10 +43,18 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, class WallboxConfigFlow(ConfigFlow, domain=COMPONENT_DOMAIN): """Handle a config flow for Wallbox.""" + def __init__(self) -> None: + """Start the Wallbox config flow.""" + self._reauth_entry: ConfigEntry | None = None + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" + self._reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_user() async def async_step_user( @@ -62,13 +71,18 @@ class WallboxConfigFlow(ConfigFlow, domain=COMPONENT_DOMAIN): try: await self.async_set_unique_id(user_input["station"]) - if self.source != SOURCE_REAUTH: + if not self._reauth_entry: self._abort_if_unique_id_configured() info = await validate_input(self.hass, user_input) return self.async_create_entry(title=info["title"], data=user_input) - reauth_entry = self._get_reauth_entry() - if user_input["station"] == reauth_entry.data[CONF_STATION]: - return self.async_update_reload_and_abort(reauth_entry, data=user_input) + if user_input["station"] == self._reauth_entry.data[CONF_STATION]: + self.hass.config_entries.async_update_entry( + self._reauth_entry, data=user_input, unique_id=user_input["station"] + ) + self.hass.async_create_task( + self.hass.config_entries.async_reload(self._reauth_entry.entry_id) + ) + return self.async_abort(reason="reauth_successful") errors["base"] = "reauth_invalid" except ConnectionError: errors["base"] = "cannot_connect" diff --git a/homeassistant/components/wallbox/const.py b/homeassistant/components/wallbox/const.py index c38b8967776..69633cbda22 100644 --- a/homeassistant/components/wallbox/const.py +++ b/homeassistant/components/wallbox/const.py @@ -22,15 +22,11 @@ CHARGER_CURRENCY_KEY = "currency" CHARGER_DATA_KEY = "config_data" CHARGER_DEPOT_PRICE_KEY = "depot_price" CHARGER_ENERGY_PRICE_KEY = "energy_price" -CHARGER_FEATURES_KEY = "features" CHARGER_SERIAL_NUMBER_KEY = "serial_number" CHARGER_PART_NUMBER_KEY = "part_number" -CHARGER_PLAN_KEY = "plan" -CHARGER_POWER_BOOST_KEY = "POWER_BOOST" CHARGER_SOFTWARE_KEY = "software" CHARGER_MAX_AVAILABLE_POWER_KEY = "max_available_power" CHARGER_MAX_CHARGING_CURRENT_KEY = "max_charging_current" -CHARGER_MAX_ICP_CURRENT_KEY = "icp_max_current" CHARGER_PAUSE_RESUME_KEY = "paused" CHARGER_LOCKED_UNLOCKED_KEY = "locked" CHARGER_NAME_KEY = "name" diff --git a/homeassistant/components/wallbox/coordinator.py b/homeassistant/components/wallbox/coordinator.py index 99c565d9c0c..e24ccd28440 100644 --- a/homeassistant/components/wallbox/coordinator.py +++ b/homeassistant/components/wallbox/coordinator.py @@ -19,12 +19,8 @@ from .const import ( CHARGER_CURRENCY_KEY, CHARGER_DATA_KEY, CHARGER_ENERGY_PRICE_KEY, - CHARGER_FEATURES_KEY, CHARGER_LOCKED_UNLOCKED_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, - CHARGER_MAX_ICP_CURRENT_KEY, - CHARGER_PLAN_KEY, - CHARGER_POWER_BOOST_KEY, CHARGER_STATUS_DESCRIPTION_KEY, CHARGER_STATUS_ID_KEY, CODE_KEY, @@ -89,21 +85,6 @@ def _require_authentication[_WallboxCoordinatorT: WallboxCoordinator, **_P]( return require_authentication -def _validate(wallbox: Wallbox) -> None: - """Authenticate using Wallbox API.""" - try: - wallbox.authenticate() - except requests.exceptions.HTTPError as wallbox_connection_error: - if wallbox_connection_error.response.status_code == 403: - raise InvalidAuth from wallbox_connection_error - raise ConnectionError from wallbox_connection_error - - -async def async_validate_input(hass: HomeAssistant, wallbox: Wallbox) -> None: - """Get new sensor data for Wallbox component.""" - await hass.async_add_executor_job(_validate, wallbox) - - class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Wallbox Coordinator class.""" @@ -123,6 +104,19 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Authenticate using Wallbox API.""" self._wallbox.authenticate() + def _validate(self) -> None: + """Authenticate using Wallbox API.""" + try: + self._wallbox.authenticate() + except requests.exceptions.HTTPError as wallbox_connection_error: + if wallbox_connection_error.response.status_code == 403: + raise InvalidAuth from wallbox_connection_error + raise ConnectionError from wallbox_connection_error + + async def async_validate_input(self) -> None: + """Get new sensor data for Wallbox component.""" + await self.hass.async_add_executor_job(self._validate) + @_require_authentication def _get_data(self) -> dict[str, Any]: """Get new sensor data for Wallbox component.""" @@ -136,16 +130,6 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): data[CHARGER_ENERGY_PRICE_KEY] = data[CHARGER_DATA_KEY][ CHARGER_ENERGY_PRICE_KEY ] - # Only show max_icp_current if power_boost is available in the wallbox unit: - if ( - data[CHARGER_DATA_KEY].get(CHARGER_MAX_ICP_CURRENT_KEY, 0) > 0 - and CHARGER_POWER_BOOST_KEY - in data[CHARGER_DATA_KEY][CHARGER_PLAN_KEY][CHARGER_FEATURES_KEY] - ): - data[CHARGER_MAX_ICP_CURRENT_KEY] = data[CHARGER_DATA_KEY][ - CHARGER_MAX_ICP_CURRENT_KEY - ] - data[CHARGER_CURRENCY_KEY] = ( f"{data[CHARGER_DATA_KEY][CHARGER_CURRENCY_KEY][CODE_KEY]}/kWh" ) @@ -176,21 +160,6 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) await self.async_request_refresh() - @_require_authentication - def _set_icp_current(self, icp_current: float) -> None: - """Set maximum icp current for Wallbox.""" - try: - self._wallbox.setIcpMaxCurrent(self._station, icp_current) - except requests.exceptions.HTTPError as wallbox_connection_error: - if wallbox_connection_error.response.status_code == 403: - raise InvalidAuth from wallbox_connection_error - raise - - async def async_set_icp_current(self, icp_current: float) -> None: - """Set maximum icp current for Wallbox.""" - await self.hass.async_add_executor_job(self._set_icp_current, icp_current) - await self.async_request_refresh() - @_require_authentication def _set_energy_cost(self, energy_cost: float) -> None: """Set energy cost for Wallbox.""" diff --git a/homeassistant/components/wallbox/entity.py b/homeassistant/components/wallbox/entity.py index 3fe1865af4a..489e81ed6b0 100644 --- a/homeassistant/components/wallbox/entity.py +++ b/homeassistant/components/wallbox/entity.py @@ -34,8 +34,7 @@ class WallboxEntity(CoordinatorEntity[WallboxCoordinator]): }, name=f"Wallbox {self.coordinator.data[CHARGER_NAME_KEY]}", manufacturer="Wallbox", - model=self.coordinator.data[CHARGER_NAME_KEY].split(" SN")[0], - model_id=self.coordinator.data[CHARGER_DATA_KEY][CHARGER_PART_NUMBER_KEY], + model=self.coordinator.data[CHARGER_DATA_KEY][CHARGER_PART_NUMBER_KEY], sw_version=self.coordinator.data[CHARGER_DATA_KEY][CHARGER_SOFTWARE_KEY][ CHARGER_CURRENT_VERSION_KEY ], diff --git a/homeassistant/components/wallbox/manifest.json b/homeassistant/components/wallbox/manifest.json index 63102646508..ce9008ef8bb 100644 --- a/homeassistant/components/wallbox/manifest.json +++ b/homeassistant/components/wallbox/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/wallbox", "iot_class": "cloud_polling", "loggers": ["wallbox"], - "requirements": ["wallbox==0.7.0"] + "requirements": ["wallbox==0.6.0"] } diff --git a/homeassistant/components/wallbox/number.py b/homeassistant/components/wallbox/number.py index 24cdd16f99d..8ae4c473299 100644 --- a/homeassistant/components/wallbox/number.py +++ b/homeassistant/components/wallbox/number.py @@ -21,7 +21,6 @@ from .const import ( CHARGER_ENERGY_PRICE_KEY, CHARGER_MAX_AVAILABLE_POWER_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, - CHARGER_MAX_ICP_CURRENT_KEY, CHARGER_PART_NUMBER_KEY, CHARGER_SERIAL_NUMBER_KEY, DOMAIN, @@ -68,16 +67,6 @@ NUMBER_TYPES: dict[str, WallboxNumberEntityDescription] = { set_value_fn=lambda coordinator: coordinator.async_set_energy_cost, native_step=0.01, ), - CHARGER_MAX_ICP_CURRENT_KEY: WallboxNumberEntityDescription( - key=CHARGER_MAX_ICP_CURRENT_KEY, - translation_key="maximum_icp_current", - max_value_fn=lambda coordinator: cast( - float, coordinator.data[CHARGER_MAX_AVAILABLE_POWER_KEY] - ), - min_value_fn=lambda _: 6, - set_value_fn=lambda coordinator: coordinator.async_set_icp_current, - native_step=1, - ), } diff --git a/homeassistant/components/wallbox/sensor.py b/homeassistant/components/wallbox/sensor.py index 18d8afb5612..eadbc04dca2 100644 --- a/homeassistant/components/wallbox/sensor.py +++ b/homeassistant/components/wallbox/sensor.py @@ -38,7 +38,6 @@ from .const import ( CHARGER_ENERGY_PRICE_KEY, CHARGER_MAX_AVAILABLE_POWER_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, - CHARGER_MAX_ICP_CURRENT_KEY, CHARGER_SERIAL_NUMBER_KEY, CHARGER_STATE_OF_CHARGE_KEY, CHARGER_STATUS_DESCRIPTION_KEY, @@ -146,13 +145,6 @@ SENSOR_TYPES: dict[str, WallboxSensorEntityDescription] = { device_class=SensorDeviceClass.CURRENT, state_class=SensorStateClass.MEASUREMENT, ), - CHARGER_MAX_ICP_CURRENT_KEY: WallboxSensorEntityDescription( - key=CHARGER_MAX_ICP_CURRENT_KEY, - translation_key=CHARGER_MAX_ICP_CURRENT_KEY, - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - ), } diff --git a/homeassistant/components/wallbox/strings.json b/homeassistant/components/wallbox/strings.json index f4378b328d8..dd96cebf605 100644 --- a/homeassistant/components/wallbox/strings.json +++ b/homeassistant/components/wallbox/strings.json @@ -38,9 +38,6 @@ }, "energy_price": { "name": "Energy price" - }, - "maximum_icp_current": { - "name": "Maximum ICP current" } }, "sensor": { @@ -82,9 +79,6 @@ }, "max_charging_current": { "name": "Max charging current" - }, - "icp_max_current": { - "name": "Max ICP current" } }, "switch": { diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 4bfe1ce4481..731a513fb66 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -5,14 +5,15 @@ from __future__ import annotations from datetime import timedelta from enum import IntFlag import functools as ft +from functools import cached_property import logging from typing import Any, final -from propcache import cached_property import voluptuous as vol from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + ATTR_ENTITY_ID, ATTR_TEMPERATURE, PRECISION_TENTHS, PRECISION_WHOLE, @@ -34,13 +35,11 @@ from homeassistant.helpers.deprecation import ( from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.temperature import display_temp as show_temp -from homeassistant.helpers.typing import ConfigType, VolDictType -from homeassistant.util.hass_dict import HassKey +from homeassistant.helpers.typing import ConfigType from homeassistant.util.unit_conversion import TemperatureConverter from .const import DOMAIN -DATA_COMPONENT: HassKey[EntityComponent[WaterHeaterEntity]] = HassKey(DOMAIN) ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE @@ -95,32 +94,45 @@ CONVERTIBLE_ATTRIBUTE = [ATTR_TEMPERATURE] _LOGGER = logging.getLogger(__name__) -SET_AWAY_MODE_SCHEMA: VolDictType = { - vol.Required(ATTR_AWAY_MODE): cv.boolean, -} -SET_TEMPERATURE_SCHEMA: VolDictType = { - vol.Required(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float), - vol.Optional(ATTR_OPERATION_MODE): cv.string, -} -SET_OPERATION_MODE_SCHEMA: VolDictType = { - vol.Required(ATTR_OPERATION_MODE): cv.string, -} +ON_OFF_SERVICE_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids}) + +SET_AWAY_MODE_SCHEMA = vol.Schema( + { + vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, + vol.Required(ATTR_AWAY_MODE): cv.boolean, + } +) +SET_TEMPERATURE_SCHEMA = vol.Schema( + vol.All( + { + vol.Required(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float), + vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, + vol.Optional(ATTR_OPERATION_MODE): cv.string, + } + ) +) +SET_OPERATION_MODE_SCHEMA = vol.Schema( + { + vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, + vol.Required(ATTR_OPERATION_MODE): cv.string, + } +) # mypy: disallow-any-generics async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up water_heater devices.""" - component = hass.data[DATA_COMPONENT] = EntityComponent[WaterHeaterEntity]( + component = hass.data[DOMAIN] = EntityComponent[WaterHeaterEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) component.async_register_entity_service( - SERVICE_TURN_ON, None, "async_turn_on", [WaterHeaterEntityFeature.ON_OFF] + SERVICE_TURN_ON, {}, "async_turn_on", [WaterHeaterEntityFeature.ON_OFF] ) component.async_register_entity_service( - SERVICE_TURN_OFF, None, "async_turn_off", [WaterHeaterEntityFeature.ON_OFF] + SERVICE_TURN_OFF, {}, "async_turn_off", [WaterHeaterEntityFeature.ON_OFF] ) component.async_register_entity_service( SERVICE_SET_AWAY_MODE, SET_AWAY_MODE_SCHEMA, async_service_away_mode @@ -133,18 +145,26 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: SET_OPERATION_MODE_SCHEMA, "async_handle_set_operation_mode", ) + component.async_register_entity_service( + SERVICE_TURN_OFF, ON_OFF_SERVICE_SCHEMA, "async_turn_off" + ) + component.async_register_entity_service( + SERVICE_TURN_ON, ON_OFF_SERVICE_SCHEMA, "async_turn_on" + ) return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - return await hass.data[DATA_COMPONENT].async_setup_entry(entry) + component: EntityComponent[WaterHeaterEntity] = hass.data[DOMAIN] + return await component.async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - return await hass.data[DATA_COMPONENT].async_unload_entry(entry) + component: EntityComponent[WaterHeaterEntity] = hass.data[DOMAIN] + return await component.async_unload_entry(entry) class WaterHeaterEntityEntityDescription(EntityDescription, frozen_or_thawed=True): diff --git a/homeassistant/components/water_heater/icons.json b/homeassistant/components/water_heater/icons.json index bc80128c6a3..af6996374c5 100644 --- a/homeassistant/components/water_heater/icons.json +++ b/homeassistant/components/water_heater/icons.json @@ -22,20 +22,10 @@ } }, "services": { - "set_away_mode": { - "service": "mdi:account-arrow-right" - }, - "set_operation_mode": { - "service": "mdi:water-boiler" - }, - "set_temperature": { - "service": "mdi:thermometer" - }, - "turn_off": { - "service": "mdi:water-boiler-off" - }, - "turn_on": { - "service": "mdi:water-boiler" - } + "set_away_mode": "mdi:account-arrow-right", + "set_operation_mode": "mdi:water-boiler", + "set_temperature": "mdi:thermometer", + "turn_off": "mdi:water-boiler-off", + "turn_on": "mdi:water-boiler" } } diff --git a/homeassistant/components/water_heater/strings.json b/homeassistant/components/water_heater/strings.json index 07e132a0b5b..741b277d84d 100644 --- a/homeassistant/components/water_heater/strings.json +++ b/homeassistant/components/water_heater/strings.json @@ -1,5 +1,4 @@ { - "title": "Water heater", "device_automation": { "action_type": { "turn_on": "[%key:common::device_automation::action_type::turn_on%]", @@ -8,7 +7,7 @@ }, "entity_component": { "_": { - "name": "[%key:component::water_heater::title%]", + "name": "Water heater", "state": { "off": "[%key:common::state::off%]", "eco": "Eco", diff --git a/homeassistant/components/watttime/__init__.py b/homeassistant/components/watttime/__init__.py index ed2bdd4ebac..6b32cf723a3 100644 --- a/homeassistant/components/watttime/__init__.py +++ b/homeassistant/components/watttime/__init__.py @@ -58,7 +58,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, LOGGER, - config_entry=entry, name=entry.title, update_interval=DEFAULT_UPDATE_INTERVAL, update_method=async_update_data, diff --git a/homeassistant/components/watttime/config_flow.py b/homeassistant/components/watttime/config_flow.py index ad676e166c5..db68738b302 100644 --- a/homeassistant/components/watttime/config_flow.py +++ b/homeassistant/components/watttime/config_flow.py @@ -126,11 +126,9 @@ class WattTimeConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow( - config_entry: ConfigEntry, - ) -> WattTimeOptionsFlowHandler: + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Define the config flow to handle options.""" - return WattTimeOptionsFlowHandler() + return WattTimeOptionsFlowHandler(config_entry) async def async_step_coordinates( self, user_input: dict[str, Any] | None = None @@ -243,6 +241,10 @@ class WattTimeConfigFlow(ConfigFlow, domain=DOMAIN): class WattTimeOptionsFlowHandler(OptionsFlow): """Handle a WattTime options flow.""" + def __init__(self, entry: ConfigEntry) -> None: + """Initialize.""" + self.entry = entry + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -256,7 +258,7 @@ class WattTimeOptionsFlowHandler(OptionsFlow): { vol.Required( CONF_SHOW_ON_MAP, - default=self.config_entry.options.get(CONF_SHOW_ON_MAP, True), + default=self.entry.options.get(CONF_SHOW_ON_MAP, True), ): bool } ), diff --git a/homeassistant/components/waze_travel_time/__init__.py b/homeassistant/components/waze_travel_time/__init__.py index 1abcf9d391d..83b2e2aa7c7 100644 --- a/homeassistant/components/waze_travel_time/__init__.py +++ b/homeassistant/components/waze_travel_time/__init__.py @@ -1,7 +1,6 @@ """The waze_travel_time component.""" import asyncio -from collections.abc import Collection import logging from pywaze.route_calculator import CalcRoutesResponse, WazeRouteCalculator, WRCError @@ -29,13 +28,10 @@ from .const import ( CONF_AVOID_SUBSCRIPTION_ROADS, CONF_AVOID_TOLL_ROADS, CONF_DESTINATION, - CONF_EXCL_FILTER, - CONF_INCL_FILTER, CONF_ORIGIN, CONF_REALTIME, CONF_UNITS, CONF_VEHICLE_TYPE, - DEFAULT_FILTER, DEFAULT_VEHICLE_TYPE, DOMAIN, METRIC_UNITS, @@ -90,7 +86,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b """Load the saved entities.""" if SEMAPHORE not in hass.data.setdefault(DOMAIN, {}): hass.data.setdefault(DOMAIN, {})[SEMAPHORE] = asyncio.Semaphore(1) - await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) async def async_get_travel_times_service(service: ServiceCall) -> ServiceResponse: @@ -129,14 +124,11 @@ async def async_get_travel_times( avoid_subscription_roads: bool, avoid_ferries: bool, realtime: bool, - incl_filters: Collection[str] | None = None, - excl_filters: Collection[str] | None = None, + incl_filter: str | None = None, + excl_filter: str | None = None, ) -> list[CalcRoutesResponse] | None: """Get all available routes.""" - incl_filters = incl_filters or () - excl_filters = excl_filters or () - _LOGGER.debug( "Getting update for origin: %s destination: %s", origin, @@ -155,46 +147,28 @@ async def async_get_travel_times( real_time=realtime, alternatives=3, ) - _LOGGER.debug("Got routes: %s", routes) - incl_routes: list[CalcRoutesResponse] = [] - - def should_include_route(route: CalcRoutesResponse) -> bool: - if len(incl_filters) < 1: - return True - should_include = any( - street_name in incl_filters or "" in incl_filters - for street_name in route.street_names - ) - if not should_include: - _LOGGER.debug( - "Excluding route [%s], because no inclusive filter matched any streetname", - route.name, + if incl_filter not in {None, ""}: + routes = [ + r + for r in routes + if any( + incl_filter.lower() == street_name.lower() # type: ignore[union-attr] + for street_name in r.street_names ) - return False - return True + ] - incl_routes = [route for route in routes if should_include_route(route)] + if excl_filter not in {None, ""}: + routes = [ + r + for r in routes + if not any( + excl_filter.lower() == street_name.lower() # type: ignore[union-attr] + for street_name in r.street_names + ) + ] - filtered_routes: list[CalcRoutesResponse] = [] - - def should_exclude_route(route: CalcRoutesResponse) -> bool: - for street_name in route.street_names: - for excl_filter in excl_filters: - if excl_filter == street_name: - _LOGGER.debug( - "Excluding route, because exclusive filter [%s] matched streetname: %s", - excl_filter, - route.name, - ) - return True - return False - - filtered_routes = [ - route for route in incl_routes if not should_exclude_route(route) - ] - - if len(filtered_routes) < 1: + if len(routes) < 1: _LOGGER.warning("No routes found") return None except WRCError as exp: @@ -202,36 +176,9 @@ async def async_get_travel_times( return None else: - return filtered_routes + return routes async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) - - -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: - """Migrate an old config entry.""" - - if config_entry.version == 1: - _LOGGER.debug( - "Migrating from version %s.%s", - config_entry.version, - config_entry.minor_version, - ) - options = dict(config_entry.options) - if (incl_filters := options.pop(CONF_INCL_FILTER, None)) not in {None, ""}: - options[CONF_INCL_FILTER] = [incl_filters] - else: - options[CONF_INCL_FILTER] = DEFAULT_FILTER - if (excl_filters := options.pop(CONF_EXCL_FILTER, None)) not in {None, ""}: - options[CONF_EXCL_FILTER] = [excl_filters] - else: - options[CONF_EXCL_FILTER] = DEFAULT_FILTER - hass.config_entries.async_update_entry(config_entry, options=options, version=2) - _LOGGER.debug( - "Migration to version %s.%s successful", - config_entry.version, - config_entry.minor_version, - ) - return True diff --git a/homeassistant/components/waze_travel_time/config_flow.py b/homeassistant/components/waze_travel_time/config_flow.py index 6ab6a4b121c..12dc8336f92 100644 --- a/homeassistant/components/waze_travel_time/config_flow.py +++ b/homeassistant/components/waze_travel_time/config_flow.py @@ -7,7 +7,6 @@ from typing import Any import voluptuous as vol from homeassistant.config_entries import ( - SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -21,8 +20,6 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, SelectSelectorMode, TextSelector, - TextSelectorConfig, - TextSelectorType, ) from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM @@ -37,7 +34,6 @@ from .const import ( CONF_REALTIME, CONF_UNITS, CONF_VEHICLE_TYPE, - DEFAULT_FILTER, DEFAULT_NAME, DEFAULT_OPTIONS, DOMAIN, @@ -50,18 +46,8 @@ from .helpers import is_valid_config_entry OPTIONS_SCHEMA = vol.Schema( { - vol.Optional(CONF_INCL_FILTER): TextSelector( - TextSelectorConfig( - type=TextSelectorType.TEXT, - multiple=True, - ), - ), - vol.Optional(CONF_EXCL_FILTER): TextSelector( - TextSelectorConfig( - type=TextSelectorType.TEXT, - multiple=True, - ), - ), + vol.Optional(CONF_INCL_FILTER, default=""): TextSelector(), + vol.Optional(CONF_EXCL_FILTER, default=""): TextSelector(), vol.Optional(CONF_REALTIME): BooleanSelector(), vol.Required(CONF_VEHICLE_TYPE): SelectSelector( SelectSelectorConfig( @@ -102,7 +88,7 @@ CONFIG_SCHEMA = vol.Schema( ) -def default_options(hass: HomeAssistant) -> dict[str, str | bool | list[str]]: +def default_options(hass: HomeAssistant) -> dict[str, str | bool]: """Get the default options.""" defaults = DEFAULT_OPTIONS.copy() if hass.config.units is US_CUSTOMARY_SYSTEM: @@ -113,13 +99,13 @@ def default_options(hass: HomeAssistant) -> dict[str, str | bool | list[str]]: class WazeOptionsFlow(OptionsFlow): """Handle an options flow for Waze Travel Time.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize waze options flow.""" + self.config_entry = config_entry + async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: - if user_input.get(CONF_INCL_FILTER) is None: - user_input[CONF_INCL_FILTER] = DEFAULT_FILTER - if user_input.get(CONF_EXCL_FILTER) is None: - user_input[CONF_EXCL_FILTER] = DEFAULT_FILTER return self.async_create_entry( title="", data=user_input, @@ -136,7 +122,11 @@ class WazeOptionsFlow(OptionsFlow): class WazeConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Waze Travel Time.""" - VERSION = 2 + VERSION = 1 + + def __init__(self) -> None: + """Init Config Flow.""" + self._entry: ConfigEntry | None = None @staticmethod @callback @@ -144,7 +134,7 @@ class WazeConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> WazeOptionsFlow: """Get the options flow for this handler.""" - return WazeOptionsFlow() + return WazeOptionsFlow(config_entry) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -161,11 +151,12 @@ class WazeConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_DESTINATION], user_input[CONF_REGION], ): - if self.source == SOURCE_RECONFIGURE: + if self._entry: return self.async_update_reload_and_abort( - self._get_reconfigure_entry(), + self._entry, title=user_input[CONF_NAME], data=user_input, + reason="reconfigure_successful", ) return self.async_create_entry( title=user_input.get(CONF_NAME, DEFAULT_NAME), @@ -184,10 +175,13 @@ class WazeConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reconfigure( - self, user_input: dict[str, Any] | None = None + self, _: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration.""" - data = self._get_reconfigure_entry().data.copy() + self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert self._entry + + data = self._entry.data.copy() data[CONF_REGION] = data[CONF_REGION].lower() return self.async_show_form( diff --git a/homeassistant/components/waze_travel_time/const.py b/homeassistant/components/waze_travel_time/const.py index 7c77f43574d..84e41c3963f 100644 --- a/homeassistant/components/waze_travel_time/const.py +++ b/homeassistant/components/waze_travel_time/const.py @@ -22,7 +22,6 @@ DEFAULT_VEHICLE_TYPE = "car" DEFAULT_AVOID_TOLL_ROADS = False DEFAULT_AVOID_SUBSCRIPTION_ROADS = False DEFAULT_AVOID_FERRIES = False -DEFAULT_FILTER = [""] IMPERIAL_UNITS = "imperial" METRIC_UNITS = "metric" @@ -31,13 +30,11 @@ UNITS = [METRIC_UNITS, IMPERIAL_UNITS] REGIONS = ["us", "na", "eu", "il", "au"] VEHICLE_TYPES = ["car", "taxi", "motorcycle"] -DEFAULT_OPTIONS: dict[str, str | bool | list[str]] = { +DEFAULT_OPTIONS: dict[str, str | bool] = { CONF_REALTIME: DEFAULT_REALTIME, CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, CONF_UNITS: METRIC_UNITS, CONF_AVOID_FERRIES: DEFAULT_AVOID_FERRIES, CONF_AVOID_SUBSCRIPTION_ROADS: DEFAULT_AVOID_SUBSCRIPTION_ROADS, CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, - CONF_INCL_FILTER: DEFAULT_FILTER, - CONF_EXCL_FILTER: DEFAULT_FILTER, } diff --git a/homeassistant/components/waze_travel_time/icons.json b/homeassistant/components/waze_travel_time/icons.json index 98e6f26774c..fa95e8fdd8a 100644 --- a/homeassistant/components/waze_travel_time/icons.json +++ b/homeassistant/components/waze_travel_time/icons.json @@ -7,8 +7,6 @@ } }, "services": { - "get_travel_times": { - "service": "mdi:timelapse" - } + "get_travel_times": "mdi:timelapse" } } diff --git a/homeassistant/components/waze_travel_time/sensor.py b/homeassistant/components/waze_travel_time/sensor.py index c2d3ee12cf8..7663b4a102e 100644 --- a/homeassistant/components/waze_travel_time/sensor.py +++ b/homeassistant/components/waze_travel_time/sensor.py @@ -183,8 +183,8 @@ class WazeTravelTimeData: ) if self.origin is not None and self.destination is not None: # Grab options on every update - incl_filter = self.config_entry.options[CONF_INCL_FILTER] - excl_filter = self.config_entry.options[CONF_EXCL_FILTER] + incl_filter = self.config_entry.options.get(CONF_INCL_FILTER) + excl_filter = self.config_entry.options.get(CONF_EXCL_FILTER) realtime = self.config_entry.options[CONF_REALTIME] vehicle_type = self.config_entry.options[CONF_VEHICLE_TYPE] avoid_toll_roads = self.config_entry.options[CONF_AVOID_TOLL_ROADS] diff --git a/homeassistant/components/waze_travel_time/strings.json b/homeassistant/components/waze_travel_time/strings.json index f053f033307..6b0b4184af7 100644 --- a/homeassistant/components/waze_travel_time/strings.json +++ b/homeassistant/components/waze_travel_time/strings.json @@ -23,12 +23,12 @@ "options": { "step": { "init": { - "description": "Some options will allow you to force the integration to use a particular route or avoid a particular route in its time travel calculation.", + "description": "The `substring` inputs will allow you to force the integration to use a particular route or avoid a particular route in its time travel calculation.", "data": { "units": "Units", "vehicle_type": "Vehicle Type", - "incl_filter": "Exact streetname which must be part of the selected route", - "excl_filter": "Exact streetname which must NOT be part of the selected route", + "incl_filter": "Streetname which must be part of the Selected Route", + "excl_filter": "Streetname which must NOT be part of the Selected Route", "realtime": "Realtime Travel Time?", "avoid_toll_roads": "Avoid Toll Roads?", "avoid_ferries": "Avoid Ferries?", @@ -100,7 +100,7 @@ }, "avoid_subscription_roads": { "name": "[%key:component::waze_travel_time::options::step::init::data::avoid_subscription_roads%]", - "description": "Whether to avoid subscription roads." + "description": "Whether to avoid subscription roads. " } } } diff --git a/homeassistant/components/weather/__init__.py b/homeassistant/components/weather/__init__.py index 557765795ee..468c023b470 100644 --- a/homeassistant/components/weather/__init__.py +++ b/homeassistant/components/weather/__init__.py @@ -6,11 +6,10 @@ import abc from collections.abc import Callable, Iterable from contextlib import suppress from datetime import timedelta -from functools import partial +from functools import cached_property, partial import logging from typing import Any, Final, Generic, Literal, Required, TypedDict, cast, final -from propcache import cached_property from typing_extensions import TypeVar import voluptuous as vol @@ -45,7 +44,7 @@ from homeassistant.util.dt import utcnow from homeassistant.util.json import JsonValueType from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM -from .const import ( # noqa: F401 +from .const import ( ATTR_WEATHER_APPARENT_TEMPERATURE, ATTR_WEATHER_CLOUD_COVERAGE, ATTR_WEATHER_DEW_POINT, @@ -63,9 +62,7 @@ from .const import ( # noqa: F401 ATTR_WEATHER_WIND_GUST_SPEED, ATTR_WEATHER_WIND_SPEED, ATTR_WEATHER_WIND_SPEED_UNIT, - DATA_COMPONENT, DOMAIN, - INTENT_GET_WEATHER, UNIT_CONVERSIONS, VALID_UNITS, WeatherEntityFeature, @@ -198,7 +195,7 @@ class Forecast(TypedDict, total=False): async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the weather component.""" - component = hass.data[DATA_COMPONENT] = EntityComponent[WeatherEntity]( + component = hass.data[DOMAIN] = EntityComponent[WeatherEntity]( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) component.async_register_entity_service( @@ -219,12 +216,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - return await hass.data[DATA_COMPONENT].async_setup_entry(entry) + component: EntityComponent[WeatherEntity] = hass.data[DOMAIN] + return await component.async_setup_entry(entry) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - return await hass.data[DATA_COMPONENT].async_unload_entry(entry) + component: EntityComponent[WeatherEntity] = hass.data[DOMAIN] + return await component.async_unload_entry(entry) class WeatherEntityDescription(EntityDescription, frozen_or_thawed=True): @@ -923,6 +922,7 @@ class WeatherEntity(Entity, PostInit, cached_properties=CACHED_PROPERTIES_WITH_A forecast_type: Literal["daily", "hourly", "twice_daily"], ) -> None: """Start subscription to forecast_type.""" + return None @callback def _async_subscription_ended( @@ -930,6 +930,7 @@ class WeatherEntity(Entity, PostInit, cached_properties=CACHED_PROPERTIES_WITH_A forecast_type: Literal["daily", "hourly", "twice_daily"], ) -> None: """End subscription to forecast_type.""" + return None @final @callback diff --git a/homeassistant/components/weather/const.py b/homeassistant/components/weather/const.py index f532b891e3e..0b5246ab31c 100644 --- a/homeassistant/components/weather/const.py +++ b/homeassistant/components/weather/const.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from enum import IntFlag -from typing import TYPE_CHECKING, Final +from typing import Final from homeassistant.const import ( UnitOfLength, @@ -13,7 +13,6 @@ from homeassistant.const import ( UnitOfSpeed, UnitOfTemperature, ) -from homeassistant.util.hass_dict import HassKey from homeassistant.util.unit_conversion import ( DistanceConverter, PressureConverter, @@ -21,11 +20,6 @@ from homeassistant.util.unit_conversion import ( TemperatureConverter, ) -if TYPE_CHECKING: - from homeassistant.helpers.entity_component import EntityComponent - - from . import WeatherEntity - class WeatherEntityFeature(IntFlag): """Supported features of the update entity.""" @@ -54,9 +48,6 @@ ATTR_WEATHER_CLOUD_COVERAGE = "cloud_coverage" ATTR_WEATHER_UV_INDEX = "uv_index" DOMAIN: Final = "weather" -DATA_COMPONENT: HassKey[EntityComponent[WeatherEntity]] = HassKey(DOMAIN) - -INTENT_GET_WEATHER = "HassGetWeather" VALID_UNITS_PRESSURE: set[str] = { UnitOfPressure.HPA, diff --git a/homeassistant/components/weather/icons.json b/homeassistant/components/weather/icons.json index 04b3c1d3df8..cc53861e700 100644 --- a/homeassistant/components/weather/icons.json +++ b/homeassistant/components/weather/icons.json @@ -21,11 +21,7 @@ } }, "services": { - "get_forecast": { - "service": "mdi:weather-cloudy-clock" - }, - "get_forecasts": { - "service": "mdi:weather-cloudy-clock" - } + "get_forecast": "mdi:weather-cloudy-clock", + "get_forecasts": "mdi:weather-cloudy-clock" } } diff --git a/homeassistant/components/weather/intent.py b/homeassistant/components/weather/intent.py index 078108d7afe..e00a386b619 100644 --- a/homeassistant/components/weather/intent.py +++ b/homeassistant/components/weather/intent.py @@ -7,7 +7,9 @@ import voluptuous as vol from homeassistant.core import HomeAssistant, State from homeassistant.helpers import intent -from . import DOMAIN, INTENT_GET_WEATHER +from . import DOMAIN + +INTENT_GET_WEATHER = "HassGetWeather" async def async_setup_intents(hass: HomeAssistant) -> None: diff --git a/homeassistant/components/weather/strings.json b/homeassistant/components/weather/strings.json index 85d331f5bd0..77c9cce864b 100644 --- a/homeassistant/components/weather/strings.json +++ b/homeassistant/components/weather/strings.json @@ -111,12 +111,12 @@ }, "issues": { "deprecated_service_weather_get_forecast": { - "title": "Detected use of deprecated service weather.get_forecast", + "title": "Detected use of deprecated service `weather.get_forecast`", "fix_flow": { "step": { "confirm": { "title": "[%key:component::weather::issues::deprecated_service_weather_get_forecast::title%]", - "description": "Use `weather.get_forecasts` instead which supports multiple entities.\n\nPlease replace this service and adjust your automations and scripts and select **Submit** to close this issue." + "description": "Use `weather.get_forecasts` instead which supports multiple entities.\n\nPlease replace this service and adjust your automations and scripts and select **submit** to close this issue." } } } diff --git a/homeassistant/components/weather/websocket_api.py b/homeassistant/components/weather/websocket_api.py index a96c4fa9973..98adbd1bd02 100644 --- a/homeassistant/components/weather/websocket_api.py +++ b/homeassistant/components/weather/websocket_api.py @@ -9,9 +9,10 @@ import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.entity_component import EntityComponent from homeassistant.util.json import JsonValueType -from .const import DATA_COMPONENT, DOMAIN, VALID_UNITS, WeatherEntityFeature +from .const import DOMAIN, VALID_UNITS, WeatherEntityFeature FORECAST_TYPE_TO_FLAG = { "daily": WeatherEntityFeature.FORECAST_DAILY, @@ -55,10 +56,13 @@ async def ws_subscribe_forecast( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Subscribe to weather forecasts.""" + from . import WeatherEntity # pylint: disable=import-outside-toplevel + + component: EntityComponent[WeatherEntity] = hass.data[DOMAIN] entity_id: str = msg["entity_id"] forecast_type: Literal["daily", "hourly", "twice_daily"] = msg["forecast_type"] - if not (entity := hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])): + if not (entity := component.get_entity(msg["entity_id"])): connection.send_error( msg["id"], "invalid_entity_id", diff --git a/homeassistant/components/weatherflow/strings.json b/homeassistant/components/weatherflow/strings.json index cf23f02d781..d075ee34a05 100644 --- a/homeassistant/components/weatherflow/strings.json +++ b/homeassistant/components/weatherflow/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "user": { - "description": "Unable to discover Tempest WeatherFlow devices. Select **Submit** to try again.", + "description": "Unable to discover Tempest WeatherFlow devices. Click submit to try again.", "data": { "host": "[%key:common::config_flow::data::host%]" }, @@ -13,11 +13,11 @@ }, "error": { "address_in_use": "Unable to open local UDP port 50222.", - "cannot_connect": "UDP discovery error.", - "no_device_found": "[%key:common::config_flow::abort::no_devices_found%]" + "cannot_connect": "UDP discovery error." }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" } }, "entity": { diff --git a/homeassistant/components/weatherflow_cloud/__init__.py b/homeassistant/components/weatherflow_cloud/__init__.py index 8dc26f9b9c6..a40386100e7 100644 --- a/homeassistant/components/weatherflow_cloud/__init__.py +++ b/homeassistant/components/weatherflow_cloud/__init__.py @@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant from .const import DOMAIN from .coordinator import WeatherFlowCloudDataUpdateCoordinator -PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WEATHER] +PLATFORMS: list[Platform] = [Platform.WEATHER] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/weatherflow_cloud/config_flow.py b/homeassistant/components/weatherflow_cloud/config_flow.py index bdd3003e6b6..e8972c320ed 100644 --- a/homeassistant/components/weatherflow_cloud/config_flow.py +++ b/homeassistant/components/weatherflow_cloud/config_flow.py @@ -33,15 +33,9 @@ class WeatherFlowCloudConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 async def async_step_reauth( - self, entry_data: Mapping[str, Any] + self, user_input: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow for reauth.""" - return await self.async_step_reauth_confirm() - - async def async_step_reauth_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a flow initiated by reauthentication.""" errors = {} if user_input is not None: @@ -49,14 +43,18 @@ class WeatherFlowCloudConfigFlow(ConfigFlow, domain=DOMAIN): errors = await _validate_api_token(api_token) if not errors: # Update the existing entry and abort - return self.async_update_reload_and_abort( - self._get_reauth_entry(), - data={CONF_API_TOKEN: api_token}, - reload_even_if_entry_is_unchanged=False, - ) + if existing_entry := self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ): + return self.async_update_reload_and_abort( + existing_entry, + data={CONF_API_TOKEN: api_token}, + reason="reauth_successful", + reload_even_if_entry_is_unchanged=False, + ) return self.async_show_form( - step_id="reauth_confirm", + step_id="reauth", data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}), errors=errors, ) diff --git a/homeassistant/components/weatherflow_cloud/coordinator.py b/homeassistant/components/weatherflow_cloud/coordinator.py index 8b8a916262f..78b4f3be223 100644 --- a/homeassistant/components/weatherflow_cloud/coordinator.py +++ b/homeassistant/components/weatherflow_cloud/coordinator.py @@ -21,11 +21,12 @@ class WeatherFlowCloudDataUpdateCoordinator( def __init__(self, hass: HomeAssistant, api_token: str) -> None: """Initialize global WeatherFlow forecast data updater.""" self.weather_api = WeatherFlowRestAPI(api_token=api_token) + super().__init__( hass, LOGGER, name=DOMAIN, - update_interval=timedelta(seconds=60), + update_interval=timedelta(minutes=15), ) async def _async_update_data(self) -> dict[int, WeatherFlowDataREST]: diff --git a/homeassistant/components/weatherflow_cloud/entity.py b/homeassistant/components/weatherflow_cloud/entity.py deleted file mode 100644 index 46077ab0870..00000000000 --- a/homeassistant/components/weatherflow_cloud/entity.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Base entity class for WeatherFlow Cloud integration.""" - -from weatherflow4py.models.rest.unified import WeatherFlowDataREST - -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import ATTR_ATTRIBUTION, DOMAIN, MANUFACTURER -from .coordinator import WeatherFlowCloudDataUpdateCoordinator - - -class WeatherFlowCloudEntity(CoordinatorEntity[WeatherFlowCloudDataUpdateCoordinator]): - """Base entity class to use for everything.""" - - _attr_attribution = ATTR_ATTRIBUTION - _attr_has_entity_name = True - - def __init__( - self, - coordinator: WeatherFlowCloudDataUpdateCoordinator, - station_id: int, - ) -> None: - """Class initializer.""" - super().__init__(coordinator) - self.station_id = station_id - - self._attr_device_info = DeviceInfo( - name=self.station.station.name, - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, str(station_id))}, - manufacturer=MANUFACTURER, - configuration_url=f"https://tempestwx.com/station/{station_id}/grid", - ) - - @property - def station(self) -> WeatherFlowDataREST: - """Individual Station data.""" - return self.coordinator.data[self.station_id] diff --git a/homeassistant/components/weatherflow_cloud/icons.json b/homeassistant/components/weatherflow_cloud/icons.json deleted file mode 100644 index 19e6ac56821..00000000000 --- a/homeassistant/components/weatherflow_cloud/icons.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "entity": { - "sensor": { - "air_temperature": { - "default": "mdi:thermometer" - }, - "air_density": { - "default": "mdi:format-line-weight" - }, - "feels_like": { - "default": "mdi:thermometer" - }, - "heat_index": { - "default": "mdi:sun-thermometer" - }, - "wet_bulb_temperature": { - "default": "mdi:thermometer-water" - }, - "wet_bulb_globe_temperature": { - "default": "mdi:thermometer-water" - }, - "lightning_strike_count": { - "default": "mdi:lightning-bolt" - }, - "lightning_strike_count_last_1hr": { - "default": "mdi:lightning-bolt" - }, - "lightning_strike_count_last_3hr": { - "default": "mdi:lightning-bolt" - }, - "lightning_strike_last_distance": { - "default": "mdi:lightning-bolt" - }, - "lightning_strike_last_epoch": { - "default": "mdi:lightning-bolt" - }, - "wind_chill": { - "default": "mdi:snowflake-thermometer" - } - } - } -} diff --git a/homeassistant/components/weatherflow_cloud/manifest.json b/homeassistant/components/weatherflow_cloud/manifest.json index 98c98cfbac7..93df04d833c 100644 --- a/homeassistant/components/weatherflow_cloud/manifest.json +++ b/homeassistant/components/weatherflow_cloud/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/weatherflow_cloud", "iot_class": "cloud_polling", - "loggers": ["weatherflow4py"], - "requirements": ["weatherflow4py==1.0.6"] + "requirements": ["weatherflow4py==0.2.21"] } diff --git a/homeassistant/components/weatherflow_cloud/sensor.py b/homeassistant/components/weatherflow_cloud/sensor.py deleted file mode 100644 index aeab955878f..00000000000 --- a/homeassistant/components/weatherflow_cloud/sensor.py +++ /dev/null @@ -1,210 +0,0 @@ -"""Sensors for cloud based weatherflow.""" - -from __future__ import annotations - -from collections.abc import Callable -from dataclasses import dataclass -from datetime import UTC, datetime - -from weatherflow4py.models.rest.observation import Observation - -from homeassistant.components.sensor import ( - SensorDeviceClass, - SensorEntity, - SensorEntityDescription, - SensorStateClass, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfLength, UnitOfPressure, UnitOfTemperature -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType - -from .const import DOMAIN -from .coordinator import WeatherFlowCloudDataUpdateCoordinator -from .entity import WeatherFlowCloudEntity - - -@dataclass(frozen=True, kw_only=True) -class WeatherFlowCloudSensorEntityDescription( - SensorEntityDescription, -): - """Describes a weatherflow sensor.""" - - value_fn: Callable[[Observation], StateType | datetime] - - -WF_SENSORS: tuple[WeatherFlowCloudSensorEntityDescription, ...] = ( - # Air Sensors - WeatherFlowCloudSensorEntityDescription( - key="air_density", - translation_key="air_density", - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=5, - value_fn=lambda data: data.air_density, - native_unit_of_measurement="kg/m³", - ), - # Temp Sensors - WeatherFlowCloudSensorEntityDescription( - key="air_temperature", - translation_key="air_temperature", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=1, - value_fn=lambda data: data.air_temperature, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), - WeatherFlowCloudSensorEntityDescription( - key="dew_point", - translation_key="dew_point", - value_fn=lambda data: data.dew_point, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=1, - ), - WeatherFlowCloudSensorEntityDescription( - key="feels_like", - translation_key="feels_like", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=1, - value_fn=lambda data: data.feels_like, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), - WeatherFlowCloudSensorEntityDescription( - key="heat_index", - translation_key="heat_index", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=1, - value_fn=lambda data: data.heat_index, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), - WeatherFlowCloudSensorEntityDescription( - key="wind_chill", - translation_key="wind_chill", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=1, - value_fn=lambda data: data.wind_chill, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), - WeatherFlowCloudSensorEntityDescription( - key="wet_bulb_temperature", - translation_key="wet_bulb_temperature", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=1, - value_fn=lambda data: data.wet_bulb_temperature, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), - WeatherFlowCloudSensorEntityDescription( - key="wet_bulb_globe_temperature", - translation_key="wet_bulb_globe_temperature", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=1, - value_fn=lambda data: data.wet_bulb_globe_temperature, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), - # Pressure Sensors - WeatherFlowCloudSensorEntityDescription( - key="barometric_pressure", - translation_key="barometric_pressure", - value_fn=lambda data: data.barometric_pressure, - native_unit_of_measurement=UnitOfPressure.MBAR, - device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=3, - ), - WeatherFlowCloudSensorEntityDescription( - key="sea_level_pressure", - translation_key="sea_level_pressure", - value_fn=lambda data: data.sea_level_pressure, - native_unit_of_measurement=UnitOfPressure.MBAR, - device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=3, - ), - # Lightning Sensors - WeatherFlowCloudSensorEntityDescription( - key="lightning_strike_count", - translation_key="lightning_strike_count", - state_class=SensorStateClass.TOTAL, - value_fn=lambda data: data.lightning_strike_count, - ), - WeatherFlowCloudSensorEntityDescription( - key="lightning_strike_count_last_1hr", - translation_key="lightning_strike_count_last_1hr", - state_class=SensorStateClass.TOTAL, - value_fn=lambda data: data.lightning_strike_count_last_1hr, - ), - WeatherFlowCloudSensorEntityDescription( - key="lightning_strike_count_last_3hr", - translation_key="lightning_strike_count_last_3hr", - state_class=SensorStateClass.TOTAL, - value_fn=lambda data: data.lightning_strike_count_last_3hr, - ), - WeatherFlowCloudSensorEntityDescription( - key="lightning_strike_last_distance", - translation_key="lightning_strike_last_distance", - state_class=SensorStateClass.MEASUREMENT, - device_class=SensorDeviceClass.DISTANCE, - native_unit_of_measurement=UnitOfLength.KILOMETERS, - value_fn=lambda data: data.lightning_strike_last_distance, - ), - WeatherFlowCloudSensorEntityDescription( - key="lightning_strike_last_epoch", - translation_key="lightning_strike_last_epoch", - device_class=SensorDeviceClass.TIMESTAMP, - value_fn=( - lambda data: datetime.fromtimestamp( - data.lightning_strike_last_epoch, tz=UTC - ) - if data.lightning_strike_last_epoch is not None - else None - ), - ), -) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up WeatherFlow sensors based on a config entry.""" - - coordinator: WeatherFlowCloudDataUpdateCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] - - async_add_entities( - WeatherFlowCloudSensor(coordinator, sensor_description, station_id) - for station_id in coordinator.data - for sensor_description in WF_SENSORS - ) - - -class WeatherFlowCloudSensor(WeatherFlowCloudEntity, SensorEntity): - """Implementation of a WeatherFlow sensor.""" - - entity_description: WeatherFlowCloudSensorEntityDescription - - def __init__( - self, - coordinator: WeatherFlowCloudDataUpdateCoordinator, - description: WeatherFlowCloudSensorEntityDescription, - station_id: int, - ) -> None: - """Initialize the sensor.""" - # Initialize the Entity Class - super().__init__(coordinator, station_id) - self.entity_description = description - self._attr_unique_id = f"{station_id}_{description.key}" - - @property - def native_value(self) -> StateType | datetime: - """Return the state of the sensor.""" - return self.entity_description.value_fn(self.station.observation.obs[0]) diff --git a/homeassistant/components/weatherflow_cloud/strings.json b/homeassistant/components/weatherflow_cloud/strings.json index f707cbb0353..782b0dcf960 100644 --- a/homeassistant/components/weatherflow_cloud/strings.json +++ b/homeassistant/components/weatherflow_cloud/strings.json @@ -7,7 +7,7 @@ "api_token": "Personal api token" } }, - "reauth_confirm": { + "reauth": { "description": "Reauthenticate with WeatherFlow", "data": { "api_token": "[%key:component::weatherflow_cloud::config::step::user::data::api_token%]" @@ -23,65 +23,5 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } - }, - "entity": { - "sensor": { - "air_density": { - "name": "Air density" - }, - "barometric_pressure": { - "name": "Pressure barometric" - }, - "sea_level_pressure": { - "name": "Pressure sea level" - }, - - "dew_point": { - "name": "Dew point" - }, - "lightning_strike_count": { - "name": "Lightning count" - }, - "lightning_strike_count_last_1hr": { - "name": "Lightning count last 1 hr" - }, - "lightning_strike_count_last_3hr": { - "name": "Lightning count last 3 hr" - }, - "lightning_strike_last_distance": { - "name": "Lightning last distance" - }, - "lightning_strike_last_epoch": { - "name": "Lightning last strike" - }, - - "wind_chill": { - "name": "Wind chill" - }, - "wind_direction": { - "name": "Wind direction" - }, - "wind_direction_cardinal": { - "name": "Wind direction (cardinal)" - }, - "wind_gust": { - "name": "Wind gust" - }, - "wind_lull": { - "name": "Wind lull" - }, - "feels_like": { - "name": "Feels like" - }, - "heat_index": { - "name": "Heat index" - }, - "wet_bulb_temperature": { - "name": "Wet bulb temperature" - }, - "wet_bulb_globe_temperature": { - "name": "Wet bulb globe temperature" - } - } } } diff --git a/homeassistant/components/weatherflow_cloud/weather.py b/homeassistant/components/weatherflow_cloud/weather.py index c475f2974a9..47e2b6a28df 100644 --- a/homeassistant/components/weatherflow_cloud/weather.py +++ b/homeassistant/components/weatherflow_cloud/weather.py @@ -17,11 +17,11 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, STATE_MAP +from .const import ATTR_ATTRIBUTION, DOMAIN, MANUFACTURER, STATE_MAP from .coordinator import WeatherFlowCloudDataUpdateCoordinator -from .entity import WeatherFlowCloudEntity async def async_setup_entry( @@ -43,11 +43,13 @@ async def async_setup_entry( class WeatherFlowWeather( - WeatherFlowCloudEntity, - SingleCoordinatorWeatherEntity[WeatherFlowCloudDataUpdateCoordinator], + SingleCoordinatorWeatherEntity[WeatherFlowCloudDataUpdateCoordinator] ): """Implementation of a WeatherFlow weather condition.""" + _attr_attribution = ATTR_ATTRIBUTION + _attr_has_entity_name = True + _attr_native_temperature_unit = UnitOfTemperature.CELSIUS _attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS _attr_native_pressure_unit = UnitOfPressure.MBAR @@ -63,9 +65,19 @@ class WeatherFlowWeather( station_id: int, ) -> None: """Initialise the platform with a data instance and station.""" - super().__init__(coordinator, station_id) + super().__init__(coordinator) + + self.station_id = station_id self._attr_unique_id = f"weatherflow_forecast_{station_id}" + self._attr_device_info = DeviceInfo( + name=self.local_data.station.name, + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, f"{station_id}")}, + manufacturer=MANUFACTURER, + configuration_url=f"https://tempestwx.com/station/{station_id}/grid", + ) + @property def local_data(self) -> WeatherFlowDataREST: """Return the local weather data object for this station.""" @@ -86,6 +98,7 @@ class WeatherFlowWeather( """Return the Air Pressure @ Station.""" return self.local_data.weather.current_conditions.station_pressure + # @property def humidity(self) -> float | None: """Return the humidity.""" diff --git a/homeassistant/components/weatherkit/manifest.json b/homeassistant/components/weatherkit/manifest.json index f86745f330f..a6dd40d5993 100644 --- a/homeassistant/components/weatherkit/manifest.json +++ b/homeassistant/components/weatherkit/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/weatherkit", "iot_class": "cloud_polling", - "requirements": ["apple_weatherkit==1.1.3"] + "requirements": ["apple_weatherkit==1.1.2"] } diff --git a/homeassistant/components/webhook/manifest.json b/homeassistant/components/webhook/manifest.json index 43f5321d9f6..c2795e8ac17 100644 --- a/homeassistant/components/webhook/manifest.json +++ b/homeassistant/components/webhook/manifest.json @@ -4,6 +4,5 @@ "codeowners": ["@home-assistant/core"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/webhook", - "integration_type": "system", "quality_scale": "internal" } diff --git a/homeassistant/components/webmin/config_flow.py b/homeassistant/components/webmin/config_flow.py index 64f8c684dfa..5fa3aefb048 100644 --- a/homeassistant/components/webmin/config_flow.py +++ b/homeassistant/components/webmin/config_flow.py @@ -26,7 +26,7 @@ from homeassistant.helpers.schema_config_entry_flow import ( SchemaFlowFormStep, ) -from .const import DEFAULT_PORT, DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, LOGGER +from .const import DEFAULT_PORT, DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN from .helpers import get_instance_from_options, get_sorted_mac_addresses @@ -45,17 +45,17 @@ async def validate_user_input( raise SchemaFlowError("invalid_auth") from err raise SchemaFlowError("cannot_connect") from err except Fault as fault: - LOGGER.exception(f"Fault {fault.faultCode}: {fault.faultString}") - raise SchemaFlowError("unknown") from fault + raise SchemaFlowError( + f"Fault {fault.faultCode}: {fault.faultString}" + ) from fault except ClientConnectionError as err: raise SchemaFlowError("cannot_connect") from err except Exception as err: raise SchemaFlowError("unknown") from err - if len(mac_addresses := get_sorted_mac_addresses(data)) > 0: - await cast(SchemaConfigFlowHandler, handler.parent_handler).async_set_unique_id( - mac_addresses[0] - ) + await cast(SchemaConfigFlowHandler, handler.parent_handler).async_set_unique_id( + get_sorted_mac_addresses(data)[0] + ) return user_input diff --git a/homeassistant/components/webmin/coordinator.py b/homeassistant/components/webmin/coordinator.py index 45261787e75..dab5e495c1a 100644 --- a/homeassistant/components/webmin/coordinator.py +++ b/homeassistant/components/webmin/coordinator.py @@ -23,7 +23,6 @@ class WebminUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """The Webmin data update coordinator.""" mac_address: str - unique_id: str def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Initialize the Webmin data update coordinator.""" @@ -42,19 +41,14 @@ class WebminUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): async def async_setup(self) -> None: """Provide needed data to the device info.""" mac_addresses = get_sorted_mac_addresses(self.data) - if len(mac_addresses) > 0: - self.mac_address = mac_addresses[0] - self.unique_id = self.mac_address - self.device_info[ATTR_CONNECTIONS] = { - (CONNECTION_NETWORK_MAC, format_mac(mac_address)) - for mac_address in mac_addresses - } - self.device_info[ATTR_IDENTIFIERS] = { - (DOMAIN, format_mac(mac_address)) for mac_address in mac_addresses - } - else: - assert self.config_entry - self.unique_id = self.config_entry.entry_id + self.mac_address = mac_addresses[0] + self.device_info[ATTR_CONNECTIONS] = { + (CONNECTION_NETWORK_MAC, format_mac(mac_address)) + for mac_address in mac_addresses + } + self.device_info[ATTR_IDENTIFIERS] = { + (DOMAIN, format_mac(mac_address)) for mac_address in mac_addresses + } async def _async_update_data(self) -> dict[str, Any]: data = await self.instance.update() diff --git a/homeassistant/components/webmin/sensor.py b/homeassistant/components/webmin/sensor.py index 785140393a2..cf1a9845c02 100644 --- a/homeassistant/components/webmin/sensor.py +++ b/homeassistant/components/webmin/sensor.py @@ -235,7 +235,7 @@ class WebminSensor(CoordinatorEntity[WebminUpdateCoordinator], SensorEntity): super().__init__(coordinator) self.entity_description = description self._attr_device_info = coordinator.device_info - self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + self._attr_unique_id = f"{coordinator.mac_address}_{description.key}" @property def native_value(self) -> int | float: diff --git a/homeassistant/components/webostv/__init__.py b/homeassistant/components/webostv/__init__.py index 499d0a85518..36950b0e02a 100644 --- a/homeassistant/components/webostv/__init__.py +++ b/homeassistant/components/webostv/__init__.py @@ -40,7 +40,7 @@ from .const import ( WEBOSTV_EXCEPTIONS, ) -CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) +CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) CALL_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.comp_entity_ids}) diff --git a/homeassistant/components/webostv/config_flow.py b/homeassistant/components/webostv/config_flow.py index 45395bd282a..f380e49f8a3 100644 --- a/homeassistant/components/webostv/config_flow.py +++ b/homeassistant/components/webostv/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import logging -from typing import Any, Self +from typing import Any from urllib.parse import urlparse from aiowebostv import WebOsTvPairError @@ -47,6 +47,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): self._host: str = "" self._name: str = "" self._uuid: str | None = None + self._entry: ConfigEntry | None = None @staticmethod @callback @@ -91,6 +92,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): """Display pairing form.""" self._async_check_configured_entry() + self.context[CONF_HOST] = self._host self.context["title_placeholders"] = {"name": self._name} errors = {} @@ -128,27 +130,27 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(uuid) self._abort_if_unique_id_configured({CONF_HOST: self._host}) - if self.hass.config_entries.flow.async_has_matching_flow(self): - return self.async_abort(reason="already_in_progress") + for progress in self._async_in_progress(): + if progress.get("context", {}).get(CONF_HOST) == self._host: + return self.async_abort(reason="already_in_progress") self._uuid = uuid return await self.async_step_pairing() - def is_matching(self, other_flow: Self) -> bool: - """Return True if other_flow is matching this flow.""" - return other_flow._host == self._host # noqa: SLF001 - async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an WebOsTvPairError.""" self._host = entry_data[CONF_HOST] + self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" + assert self._entry is not None + if user_input is not None: try: client = await async_control_connect(self._host, None) @@ -157,9 +159,8 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): except WEBOSTV_EXCEPTIONS: return self.async_abort(reason="reauth_unsuccessful") - reauth_entry = self._get_reauth_entry() - update_client_key(self.hass, reauth_entry, client) - await self.hass.config_entries.async_reload(reauth_entry.entry_id) + update_client_key(self.hass, self._entry, client) + await self.hass.config_entries.async_reload(self._entry.entry_id) return self.async_abort(reason="reauth_successful") return self.async_show_form(step_id="reauth_confirm") @@ -170,6 +171,8 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" + self.config_entry = config_entry + self.options = config_entry.options self.host = config_entry.data[CONF_HOST] self.key = config_entry.data[CONF_CLIENT_SECRET] @@ -186,8 +189,7 @@ class OptionsFlowHandler(OptionsFlow): if not sources_list: errors["base"] = "cannot_retrieve" - option_sources = self.config_entry.options.get(CONF_SOURCES, []) - sources = [s for s in option_sources if s in sources_list] + sources = [s for s in self.options.get(CONF_SOURCES, []) if s in sources_list] if not sources: sources = sources_list diff --git a/homeassistant/components/webostv/device_trigger.py b/homeassistant/components/webostv/device_trigger.py index f16b1cec4f5..17d92b1abf3 100644 --- a/homeassistant/components/webostv/device_trigger.py +++ b/homeassistant/components/webostv/device_trigger.py @@ -4,8 +4,8 @@ from __future__ import annotations import voluptuous as vol -from homeassistant.components.device_automation import ( - DEVICE_TRIGGER_BASE_SCHEMA, +from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA +from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, ) from homeassistant.const import CONF_DEVICE_ID, CONF_PLATFORM, CONF_TYPE diff --git a/homeassistant/components/webostv/icons.json b/homeassistant/components/webostv/icons.json index edc058d099f..deb9729a99f 100644 --- a/homeassistant/components/webostv/icons.json +++ b/homeassistant/components/webostv/icons.json @@ -1,13 +1,7 @@ { "services": { - "button": { - "service": "mdi:button-pointer" - }, - "command": { - "service": "mdi:console" - }, - "select_sound_output": { - "service": "mdi:volume-source" - } + "button": "mdi:button-pointer", + "command": "mdi:console", + "select_sound_output": "mdi:volume-source" } } diff --git a/homeassistant/components/webostv/manifest.json b/homeassistant/components/webostv/manifest.json index 679bad9b9f5..ed8e1a6cc6e 100644 --- a/homeassistant/components/webostv/manifest.json +++ b/homeassistant/components/webostv/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_push", "loggers": ["aiowebostv"], "quality_scale": "platinum", - "requirements": ["aiowebostv==0.4.2"], + "requirements": ["aiowebostv==0.4.0"], "ssdp": [ { "st": "urn:lge-com:service:webos-second-screen:1" diff --git a/homeassistant/components/webostv/media_player.py b/homeassistant/components/webostv/media_player.py index 239780e3f01..6aef47515db 100644 --- a/homeassistant/components/webostv/media_player.py +++ b/homeassistant/components/webostv/media_player.py @@ -239,8 +239,7 @@ class LgWebOSMediaPlayerEntity(RestoreEntity, MediaPlayerEntity): self._attr_assumed_state = True if ( - self._client.is_on - and self._client.media_state is not None + self._client.media_state is not None and self._client.media_state.get("foregroundAppInfo") is not None ): self._attr_assumed_state = False @@ -422,13 +421,13 @@ class LgWebOSMediaPlayerEntity(RestoreEntity, MediaPlayerEntity): partial_match_channel_id = channel["channelId"] if perfect_match_channel_id is not None: - _LOGGER.debug( + _LOGGER.info( "Switching to channel <%s> with perfect match", perfect_match_channel_id, ) await self._client.set_channel(perfect_match_channel_id) elif partial_match_channel_id is not None: - _LOGGER.debug( + _LOGGER.info( "Switching to channel <%s> with partial match", partial_match_channel_id, ) diff --git a/homeassistant/components/webostv/strings.json b/homeassistant/components/webostv/strings.json index 3ceab5f50a3..1d045d48ba5 100644 --- a/homeassistant/components/webostv/strings.json +++ b/homeassistant/components/webostv/strings.json @@ -3,7 +3,7 @@ "flow_title": "LG webOS Smart TV", "step": { "user": { - "description": "Turn on TV, fill the following fields and select **Submit**", + "description": "Turn on TV, fill the following fields click submit", "data": { "host": "[%key:common::config_flow::data::host%]", "name": "[%key:common::config_flow::data::name%]" @@ -14,7 +14,7 @@ }, "pairing": { "title": "webOS TV Pairing", - "description": "Select **Submit** and accept the pairing request on your TV.\n\n![Image](/static/images/config_webos.png)" + "description": "Click submit and accept the pairing request on your TV.\n\n![Image](/static/images/config_webos.png)" }, "reauth_confirm": { "title": "[%key:component::webostv::config::step::pairing::title%]", @@ -22,7 +22,7 @@ } }, "error": { - "cannot_connect": "Failed to connect, please turn on your TV or check the IP address" + "cannot_connect": "Failed to connect, please turn on your TV or check ip address" }, "abort": { "error_pairing": "Connected to LG webOS TV but not paired", diff --git a/homeassistant/components/websocket_api/commands.py b/homeassistant/components/websocket_api/commands.py index cfa132b71eb..f66930c8d00 100644 --- a/homeassistant/components/websocket_api/commands.py +++ b/homeassistant/components/websocket_api/commands.py @@ -36,10 +36,6 @@ from homeassistant.exceptions import ( ) from homeassistant.helpers import config_validation as cv, entity, template from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entityfilter import ( - INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, - convert_include_exclude_filter, -) from homeassistant.helpers.event import ( TrackTemplate, TrackTemplateResult, @@ -370,17 +366,14 @@ def _send_handle_get_states_response( @callback def _forward_entity_changes( send_message: Callable[[str | bytes | dict[str, Any]], None], - entity_ids: set[str] | None, - entity_filter: Callable[[str], bool] | None, + entity_ids: set[str], user: User, message_id_as_bytes: bytes, event: Event[EventStateChangedData], ) -> None: """Forward entity state changed events to websocket.""" entity_id = event.data["entity_id"] - if (entity_ids and entity_id not in entity_ids) or ( - entity_filter and not entity_filter(entity_id) - ): + if entity_ids and entity_id not in entity_ids: return # We have to lookup the permissions again because the user might have # changed since the subscription was created. @@ -388,7 +381,7 @@ def _forward_entity_changes( if ( not user.is_admin and not permissions.access_all_entities(POLICY_READ) - and not permissions.check_entity(entity_id, POLICY_READ) + and not permissions.check_entity(event.data["entity_id"], POLICY_READ) ): return send_message(messages.cached_state_diff_message(message_id_as_bytes, event)) @@ -399,55 +392,43 @@ def _forward_entity_changes( { vol.Required("type"): "subscribe_entities", vol.Optional("entity_ids"): cv.entity_ids, - **INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.schema, } ) def handle_subscribe_entities( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Handle subscribe entities command.""" - entity_ids = set(msg.get("entity_ids", [])) or None - _filter = convert_include_exclude_filter(msg) - entity_filter = None if _filter.empty_filter else _filter.get_filter() + entity_ids = set(msg.get("entity_ids", [])) # We must never await between sending the states and listening for # state changed events or we will introduce a race condition # where some states are missed states = _async_get_allowed_states(hass, connection) - msg_id = msg["id"] - message_id_as_bytes = str(msg_id).encode() - connection.subscriptions[msg_id] = hass.bus.async_listen( + message_id_as_bytes = str(msg["id"]).encode() + connection.subscriptions[msg["id"]] = hass.bus.async_listen( EVENT_STATE_CHANGED, partial( _forward_entity_changes, connection.send_message, entity_ids, - entity_filter, connection.user, message_id_as_bytes, ), ) - connection.send_result(msg_id) + connection.send_result(msg["id"]) # JSON serialize here so we can recover if it blows up due to the # state machine containing unserializable data. This command is required # to succeed for the UI to show. try: - if entity_ids or entity_filter: - serialized_states = [ - state.as_compressed_state_json - for state in states - if (not entity_ids or state.entity_id in entity_ids) - and (not entity_filter or entity_filter(state.entity_id)) - ] - else: - # Fast path when not filtering - serialized_states = [state.as_compressed_state_json for state in states] + serialized_states = [ + state.as_compressed_state_json + for state in states + if not entity_ids or state.entity_id in entity_ids + ] except (ValueError, TypeError): pass else: - _send_handle_entities_init_response( - connection, message_id_as_bytes, serialized_states - ) + _send_handle_entities_init_response(connection, msg["id"], serialized_states) return serialized_states = [] @@ -462,22 +443,18 @@ def handle_subscribe_entities( ), ) - _send_handle_entities_init_response( - connection, message_id_as_bytes, serialized_states - ) + _send_handle_entities_init_response(connection, msg["id"], serialized_states) def _send_handle_entities_init_response( - connection: ActiveConnection, - message_id_as_bytes: bytes, - serialized_states: list[bytes], + connection: ActiveConnection, msg_id: int, serialized_states: list[bytes] ) -> None: """Send handle entities init response.""" connection.send_message( b"".join( ( b'{"id":', - message_id_as_bytes, + str(msg_id).encode(), b',"type":"event","event":{"a":{', b",".join(serialized_states), b"}}}", @@ -859,9 +836,9 @@ def handle_fire_event( @decorators.websocket_command( { vol.Required("type"): "validate_config", - vol.Optional("triggers"): cv.match_all, - vol.Optional("conditions"): cv.match_all, - vol.Optional("actions"): cv.match_all, + vol.Optional("trigger"): cv.match_all, + vol.Optional("condition"): cv.match_all, + vol.Optional("action"): cv.match_all, } ) @decorators.async_response @@ -876,13 +853,9 @@ async def handle_validate_config( result = {} for key, schema, validator in ( - ("triggers", cv.TRIGGER_SCHEMA, trigger.async_validate_trigger_config), - ( - "conditions", - cv.CONDITIONS_SCHEMA, - condition.async_validate_conditions_config, - ), - ("actions", cv.SCRIPT_SCHEMA, script.async_validate_actions_config), + ("trigger", cv.TRIGGER_SCHEMA, trigger.async_validate_trigger_config), + ("condition", cv.CONDITIONS_SCHEMA, condition.async_validate_conditions_config), + ("action", cv.SCRIPT_SCHEMA, script.async_validate_actions_config), ): if key not in msg: continue diff --git a/homeassistant/components/websocket_api/connection.py b/homeassistant/components/websocket_api/connection.py index 62f1adc39b9..ef70df4a123 100644 --- a/homeassistant/components/websocket_api/connection.py +++ b/homeassistant/components/websocket_api/connection.py @@ -16,12 +16,6 @@ from homeassistant.helpers.http import current_request from homeassistant.util.json import JsonValueType from . import const, messages -from .messages import ( - error_message, - event_message, - message_to_json_bytes, - result_message, -) from .util import describe_request if TYPE_CHECKING: @@ -132,12 +126,12 @@ class ActiveConnection: @callback def send_result(self, msg_id: int, result: Any | None = None) -> None: """Send a result message.""" - self.send_message(message_to_json_bytes(result_message(msg_id, result))) + self.send_message(messages.result_message(msg_id, result)) @callback def send_event(self, msg_id: int, event: Any | None = None) -> None: """Send a event message.""" - self.send_message(message_to_json_bytes(event_message(msg_id, event))) + self.send_message(messages.event_message(msg_id, event)) @callback def send_error( @@ -151,15 +145,13 @@ class ActiveConnection: ) -> None: """Send an error message.""" self.send_message( - message_to_json_bytes( - error_message( - msg_id, - code, - message, - translation_key=translation_key, - translation_domain=translation_domain, - translation_placeholders=translation_placeholders, - ) + messages.error_message( + msg_id, + code, + message, + translation_key=translation_key, + translation_domain=translation_domain, + translation_placeholders=translation_placeholders, ) ) @@ -231,7 +223,7 @@ class ActiveConnection: try: if schema is False: if len(msg) > 2: - raise vol.Invalid("extra keys not allowed") # noqa: TRY301 + raise vol.Invalid("extra keys not allowed") handler(self.hass, self, msg) else: handler(self.hass, self, schema(msg)) diff --git a/homeassistant/components/websocket_api/decorators.py b/homeassistant/components/websocket_api/decorators.py index 2c8a6cc02f1..b9924bc91d1 100644 --- a/homeassistant/components/websocket_api/decorators.py +++ b/homeassistant/components/websocket_api/decorators.py @@ -145,7 +145,7 @@ def websocket_command( def decorate(func: const.WebSocketCommandHandler) -> const.WebSocketCommandHandler: """Decorate ws command function.""" - if is_dict and len(schema) == 1: # type: ignore[arg-type] # type only empty schema + if is_dict and len(schema) == 1: # type only empty schema func._ws_schema = False # type: ignore[attr-defined] # noqa: SLF001 elif is_dict: func._ws_schema = messages.BASE_COMMAND_MESSAGE_SCHEMA.extend(schema) # type: ignore[attr-defined] # noqa: SLF001 diff --git a/homeassistant/components/websocket_api/http.py b/homeassistant/components/websocket_api/http.py index e7d57aebab6..c65c4c65988 100644 --- a/homeassistant/components/websocket_api/http.py +++ b/homeassistant/components/websocket_api/http.py @@ -11,7 +11,6 @@ import logging from typing import TYPE_CHECKING, Any, Final from aiohttp import WSMsgType, web -from aiohttp.http_websocket import WebSocketWriter from homeassistant.components.http import KEY_HASS, HomeAssistantView from homeassistant.const import EVENT_HOMEASSISTANT_STOP @@ -36,8 +35,6 @@ from .error import Disconnect from .messages import message_to_json_bytes from .util import describe_request -CLOSE_MSG_TYPES = {WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING} - if TYPE_CHECKING: from .connection import ActiveConnection @@ -127,9 +124,7 @@ class WebSocketHandler: return "finished connection" async def _writer( - self, - connection: ActiveConnection, - send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], + self, send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]] ) -> None: """Write outgoing messages.""" # Variables are set locally to avoid lookups in the loop @@ -139,7 +134,7 @@ class WebSocketHandler: loop = self._loop is_debug_log_enabled = partial(logger.isEnabledFor, logging.DEBUG) debug = logger.debug - can_coalesce = connection.can_coalesce + can_coalesce = self._connection and self._connection.can_coalesce ready_message_count = len(message_queue) # Exceptions if Socket disconnected or cancelled by connection handler try: @@ -153,7 +148,7 @@ class WebSocketHandler: if not can_coalesce: # coalesce may be enabled later in the connection - can_coalesce = connection.can_coalesce + can_coalesce = self._connection and self._connection.can_coalesce if not can_coalesce or ready_message_count == 1: message = message_queue.popleft() @@ -303,23 +298,19 @@ class WebSocketHandler: request = self._request wsock = self._wsock logger = self._logger + debug = logger.debug hass = self._hass + is_enabled_for = logger.isEnabledFor + logging_debug = logging.DEBUG try: async with asyncio.timeout(10): await wsock.prepare(request) - except ConnectionResetError: - # Likely the client disconnected before we prepared the websocket - logger.debug( - "%s: Connection reset by peer while preparing WebSocket", - self.description, - ) - return wsock except TimeoutError: - logger.warning("Timeout preparing request from %s", request.remote) + self._logger.warning("Timeout preparing request from %s", request.remote) return wsock - logger.debug("%s: Connected from %s", self.description, request.remote) + debug("%s: Connected from %s", self.description, request.remote) self._handle_task = asyncio.current_task() unsub_stop = hass.bus.async_listen( @@ -330,29 +321,138 @@ class WebSocketHandler: if TYPE_CHECKING: assert writer is not None - send_bytes_text = partial(writer.send_frame, opcode=WSMsgType.TEXT) + send_bytes_text = partial(writer.send, binary=False) auth = AuthPhase( logger, hass, self._send_message, self._cancel, request, send_bytes_text ) - connection: ActiveConnection | None = None - disconnect_warn: str | None = None + connection = None + disconnect_warn = None try: - connection = await self._async_handle_auth_phase(auth, send_bytes_text) - self._async_increase_writer_limit(writer) - await self._async_websocket_command_phase(connection) - except asyncio.CancelledError: - logger.debug("%s: Connection cancelled", self.description) - raise - except Disconnect as ex: - if disconnect_msg := str(ex): - disconnect_warn = disconnect_msg + await send_bytes_text(AUTH_REQUIRED_MESSAGE) + + # Auth Phase + try: + msg = await wsock.receive(10) + except TimeoutError as err: + disconnect_warn = "Did not receive auth message within 10 seconds" + raise Disconnect from err + + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): + raise Disconnect + + if msg.type != WSMsgType.TEXT: + disconnect_warn = "Received non-Text message." + raise Disconnect + + try: + auth_msg_data = json_loads(msg.data) + except ValueError as err: + disconnect_warn = "Received invalid JSON." + raise Disconnect from err + + if is_enabled_for(logging_debug): + debug("%s: Received %s", self.description, auth_msg_data) + connection = await auth.async_handle(auth_msg_data) + # As the webserver is now started before the start + # event we do not want to block for websocket responses + # + # We only start the writer queue after the auth phase is completed + # since there is no need to queue messages before the auth phase + self._connection = connection + self._writer_task = create_eager_task(self._writer(send_bytes_text)) + hass.data[DATA_CONNECTIONS] = hass.data.get(DATA_CONNECTIONS, 0) + 1 + async_dispatcher_send(hass, SIGNAL_WEBSOCKET_CONNECTED) + + self._authenticated = True + # + # + # Our websocket implementation is backed by a deque + # + # As back-pressure builds, the queue will back up and use more memory + # until we disconnect the client when the queue size reaches + # MAX_PENDING_MSG. When we are generating a high volume of websocket messages, + # we hit a bottleneck in aiohttp where it will wait for + # the buffer to drain before sending the next message and messages + # start backing up in the queue. + # + # https://github.com/aio-libs/aiohttp/issues/1367 added drains + # to the websocket writer to handle malicious clients and network issues. + # The drain causes multiple problems for us since the buffer cannot be + # drained fast enough when we deliver a high volume or large messages: + # + # - We end up disconnecting the client. The client will then reconnect, + # and the cycle repeats itself, which results in a significant amount of + # CPU usage. + # + # - Messages latency increases because messages cannot be moved into + # the TCP buffer because it is blocked waiting for the drain to happen because + # of the low default limit of 16KiB. By increasing the limit, we instead + # rely on the underlying TCP buffer and stack to deliver the messages which + # can typically happen much faster. + # + # After the auth phase is completed, and we are not concerned about + # the user being a malicious client, we set the limit to force a drain + # to 1MiB. 1MiB is the maximum expected size of the serialized entity + # registry, which is the largest message we usually send. + # + # https://github.com/aio-libs/aiohttp/commit/b3c80ee3f7d5d8f0b8bc27afe52e4d46621eaf99 + # added a way to set the limit, but there is no way to actually + # reach the code to set the limit, so we have to set it directly. + # + writer._limit = 2**20 # noqa: SLF001 + async_handle_str = connection.async_handle + async_handle_binary = connection.async_handle_binary + + # Command phase + while not wsock.closed: + msg = await wsock.receive() + + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): + break + + if msg.type is WSMsgType.BINARY: + if len(msg.data) < 1: + disconnect_warn = "Received invalid binary message." + break + handler = msg.data[0] + payload = msg.data[1:] + async_handle_binary(handler, payload) + continue + + if msg.type is not WSMsgType.TEXT: + disconnect_warn = "Received non-Text message." + break + + try: + command_msg_data = json_loads(msg.data) + except ValueError: + disconnect_warn = "Received invalid JSON." + break + + if is_enabled_for(logging_debug): + debug("%s: Received %s", self.description, command_msg_data) + + # command_msg_data is always deserialized from JSON as a list + if type(command_msg_data) is not list: # noqa: E721 + async_handle_str(command_msg_data) + continue + + for split_msg in command_msg_data: + async_handle_str(split_msg) + + except asyncio.CancelledError: + debug("%s: Connection cancelled", self.description) + raise + + except Disconnect as ex: + debug("%s: Connection closed by client: %s", self.description, ex) - logger.debug("%s: Connection closed by client: %s", self.description, ex) except Exception: - logger.exception( + self._logger.exception( "%s: Unexpected error inside websocket API", self.description ) + finally: unsub_stop() @@ -365,175 +465,38 @@ class WebSocketHandler: if self._ready_future and not self._ready_future.done(): self._ready_future.set_result(len(self._message_queue)) - await self._async_cleanup_writer_and_close(disconnect_warn, connection) + # If the writer gets canceled we still need to close the websocket + # so we have another finally block to make sure we close the websocket + # if the writer gets canceled. + try: + if self._writer_task: + await self._writer_task + finally: + try: + # Make sure all error messages are written before closing + await wsock.close() + finally: + if disconnect_warn is None: + debug("%s: Disconnected", self.description) + else: + self._logger.warning( + "%s: Disconnected: %s", self.description, disconnect_warn + ) + + if connection is not None: + hass.data[DATA_CONNECTIONS] -= 1 + self._connection = None + + async_dispatcher_send(hass, SIGNAL_WEBSOCKET_DISCONNECTED) + + # Break reference cycles to make sure GC can happen sooner + self._wsock = None # type: ignore[assignment] + self._request = None # type: ignore[assignment] + self._hass = None # type: ignore[assignment] + self._logger = None # type: ignore[assignment] + self._message_queue = None # type: ignore[assignment] + self._handle_task = None + self._writer_task = None + self._ready_future = None return wsock - - async def _async_handle_auth_phase( - self, - auth: AuthPhase, - send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], - ) -> ActiveConnection: - """Handle the auth phase of the websocket connection.""" - await send_bytes_text(AUTH_REQUIRED_MESSAGE) - - # Auth Phase - try: - msg = await self._wsock.receive(10) - except TimeoutError as err: - raise Disconnect("Did not receive auth message within 10 seconds") from err - - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): - raise Disconnect("Received close message during auth phase") - - if msg.type is not WSMsgType.TEXT: - raise Disconnect("Received non-Text message during auth phase") - - try: - auth_msg_data = json_loads(msg.data) - except ValueError as err: - raise Disconnect("Received invalid JSON during auth phase") from err - - if self._logger.isEnabledFor(logging.DEBUG): - self._logger.debug("%s: Received %s", self.description, auth_msg_data) - connection = await auth.async_handle(auth_msg_data) - # As the webserver is now started before the start - # event we do not want to block for websocket responses - # - # We only start the writer queue after the auth phase is completed - # since there is no need to queue messages before the auth phase - self._connection = connection - self._writer_task = create_eager_task(self._writer(connection, send_bytes_text)) - self._hass.data[DATA_CONNECTIONS] = self._hass.data.get(DATA_CONNECTIONS, 0) + 1 - async_dispatcher_send(self._hass, SIGNAL_WEBSOCKET_CONNECTED) - - self._authenticated = True - return connection - - @callback - def _async_increase_writer_limit(self, writer: WebSocketWriter) -> None: - # - # - # Our websocket implementation is backed by a deque - # - # As back-pressure builds, the queue will back up and use more memory - # until we disconnect the client when the queue size reaches - # MAX_PENDING_MSG. When we are generating a high volume of websocket messages, - # we hit a bottleneck in aiohttp where it will wait for - # the buffer to drain before sending the next message and messages - # start backing up in the queue. - # - # https://github.com/aio-libs/aiohttp/issues/1367 added drains - # to the websocket writer to handle malicious clients and network issues. - # The drain causes multiple problems for us since the buffer cannot be - # drained fast enough when we deliver a high volume or large messages: - # - # - We end up disconnecting the client. The client will then reconnect, - # and the cycle repeats itself, which results in a significant amount of - # CPU usage. - # - # - Messages latency increases because messages cannot be moved into - # the TCP buffer because it is blocked waiting for the drain to happen because - # of the low default limit of 16KiB. By increasing the limit, we instead - # rely on the underlying TCP buffer and stack to deliver the messages which - # can typically happen much faster. - # - # After the auth phase is completed, and we are not concerned about - # the user being a malicious client, we set the limit to force a drain - # to 1MiB. 1MiB is the maximum expected size of the serialized entity - # registry, which is the largest message we usually send. - # - # https://github.com/aio-libs/aiohttp/commit/b3c80ee3f7d5d8f0b8bc27afe52e4d46621eaf99 - # added a way to set the limit, but there is no way to actually - # reach the code to set the limit, so we have to set it directly. - # - writer._limit = 2**20 # noqa: SLF001 - - async def _async_websocket_command_phase( - self, connection: ActiveConnection - ) -> None: - """Handle the command phase of the websocket connection.""" - wsock = self._wsock - async_handle_str = connection.async_handle - async_handle_binary = connection.async_handle_binary - _debug_enabled = partial(self._logger.isEnabledFor, logging.DEBUG) - - # Command phase - while not wsock.closed: - msg = await wsock.receive() - msg_type = msg.type - msg_data = msg.data - - if msg_type in CLOSE_MSG_TYPES: - break - - if msg_type is WSMsgType.BINARY: - if len(msg_data) < 1: - raise Disconnect("Received invalid binary message.") - - handler = msg_data[0] - payload = msg_data[1:] - async_handle_binary(handler, payload) - continue - - if msg_type is not WSMsgType.TEXT: - raise Disconnect("Received non-Text message.") - - try: - command_msg_data = json_loads(msg_data) - except ValueError as ex: - raise Disconnect("Received invalid JSON.") from ex - - if _debug_enabled(): - self._logger.debug( - "%s: Received %s", self.description, command_msg_data - ) - - # command_msg_data is always deserialized from JSON as a list - if type(command_msg_data) is not list: # noqa: E721 - async_handle_str(command_msg_data) - continue - - for split_msg in command_msg_data: - async_handle_str(split_msg) - - async def _async_cleanup_writer_and_close( - self, disconnect_warn: str | None, connection: ActiveConnection | None - ) -> None: - """Cleanup the writer and close the websocket.""" - # If the writer gets canceled we still need to close the websocket - # so we have another finally block to make sure we close the websocket - # if the writer gets canceled. - wsock = self._wsock - hass = self._hass - logger = self._logger - try: - if self._writer_task: - await self._writer_task - finally: - try: - # Make sure all error messages are written before closing - await wsock.close() - finally: - if disconnect_warn is None: - logger.debug("%s: Disconnected", self.description) - else: - logger.warning( - "%s: Disconnected: %s", self.description, disconnect_warn - ) - - if connection is not None: - hass.data[DATA_CONNECTIONS] -= 1 - self._connection = None - - async_dispatcher_send(hass, SIGNAL_WEBSOCKET_DISCONNECTED) - - # Break reference cycles to make sure GC can happen sooner - self._wsock = None # type: ignore[assignment] - self._request = None # type: ignore[assignment] - self._hass = None # type: ignore[assignment] - self._logger = None # type: ignore[assignment] - self._message_queue = None # type: ignore[assignment] - self._handle_task = None - self._writer_task = None - self._ready_future = None diff --git a/homeassistant/components/websocket_api/manifest.json b/homeassistant/components/websocket_api/manifest.json index 315411ea4cf..116bd0ccee8 100644 --- a/homeassistant/components/websocket_api/manifest.json +++ b/homeassistant/components/websocket_api/manifest.json @@ -1,6 +1,7 @@ { "domain": "websocket_api", "name": "Home Assistant WebSocket API", + "after_dependencies": ["recorder"], "codeowners": ["@home-assistant/core"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/websocket_api", diff --git a/homeassistant/components/websocket_api/messages.py b/homeassistant/components/websocket_api/messages.py index 0a8200c5700..238f8be0c3b 100644 --- a/homeassistant/components/websocket_api/messages.py +++ b/homeassistant/components/websocket_api/messages.py @@ -224,12 +224,9 @@ def _state_diff_event( if (old_attributes := old_state.attributes) != ( new_attributes := new_state.attributes ): - if added := { - key: value - for key, value in new_attributes.items() - if key not in old_attributes or old_attributes[key] != value - }: - additions[COMPRESSED_STATE_ATTRIBUTES] = added + for key, value in new_attributes.items(): + if old_attributes.get(key) != value: + additions.setdefault(COMPRESSED_STATE_ATTRIBUTES, {})[key] = value if removed := old_attributes.keys() - new_attributes: # sets are not JSON serializable by default so we convert to list # here if there are any values to avoid jumping into the json_encoder_default diff --git a/homeassistant/components/websocket_api/strings.json b/homeassistant/components/websocket_api/strings.json index afef732b8f5..10b95637b6b 100644 --- a/homeassistant/components/websocket_api/strings.json +++ b/homeassistant/components/websocket_api/strings.json @@ -1,7 +1,7 @@ { "exceptions": { "child_service_not_found": { - "message": "Action {domain}.{service} uses action {child_domain}.{child_service} which was not found." + "message": "Service {domain}.{service} called service {child_domain}.{child_service} which was not found." } } } diff --git a/homeassistant/components/weheat/__init__.py b/homeassistant/components/weheat/__init__.py deleted file mode 100644 index d924d6ceaab..00000000000 --- a/homeassistant/components/weheat/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -"""The Weheat integration.""" - -from __future__ import annotations - -from weheat.abstractions.discovery import HeatPumpDiscovery -from weheat.exceptions import UnauthorizedException - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ACCESS_TOKEN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers.config_entry_oauth2_flow import ( - OAuth2Session, - async_get_config_entry_implementation, -) - -from .const import API_URL, LOGGER -from .coordinator import WeheatDataUpdateCoordinator - -PLATFORMS: list[Platform] = [Platform.SENSOR] - -type WeheatConfigEntry = ConfigEntry[list[WeheatDataUpdateCoordinator]] - - -async def async_setup_entry(hass: HomeAssistant, entry: WeheatConfigEntry) -> bool: - """Set up Weheat from a config entry.""" - implementation = await async_get_config_entry_implementation(hass, entry) - - session = OAuth2Session(hass, entry, implementation) - - token = session.token[CONF_ACCESS_TOKEN] - entry.runtime_data = [] - - # fetch a list of the heat pumps the entry can access - try: - discovered_heat_pumps = await HeatPumpDiscovery.discover_active(API_URL, token) - except UnauthorizedException as error: - raise ConfigEntryAuthFailed from error - - for pump_info in discovered_heat_pumps: - LOGGER.debug("Adding %s", pump_info) - # for each pump, add a coordinator - new_coordinator = WeheatDataUpdateCoordinator(hass, session, pump_info) - - await new_coordinator.async_config_entry_first_refresh() - - entry.runtime_data.append(new_coordinator) - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: WeheatConfigEntry) -> bool: - """Unload a config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/weheat/api.py b/homeassistant/components/weheat/api.py deleted file mode 100644 index b1f5c0b3eff..00000000000 --- a/homeassistant/components/weheat/api.py +++ /dev/null @@ -1,28 +0,0 @@ -"""API for Weheat bound to Home Assistant OAuth.""" - -from aiohttp import ClientSession -from weheat.abstractions import AbstractAuth - -from homeassistant.const import CONF_ACCESS_TOKEN -from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session - -from .const import API_URL - - -class AsyncConfigEntryAuth(AbstractAuth): - """Provide Weheat authentication tied to an OAuth2 based config entry.""" - - def __init__( - self, - websession: ClientSession, - oauth_session: OAuth2Session, - ) -> None: - """Initialize Weheat auth.""" - super().__init__(websession, host=API_URL) - self._oauth_session = oauth_session - - async def async_get_access_token(self) -> str: - """Return a valid access token.""" - await self._oauth_session.async_ensure_token_valid() - - return self._oauth_session.token[CONF_ACCESS_TOKEN] diff --git a/homeassistant/components/weheat/application_credentials.py b/homeassistant/components/weheat/application_credentials.py deleted file mode 100644 index 3f85d4b0558..00000000000 --- a/homeassistant/components/weheat/application_credentials.py +++ /dev/null @@ -1,11 +0,0 @@ -"""application_credentials platform the Weheat integration.""" - -from homeassistant.components.application_credentials import AuthorizationServer -from homeassistant.core import HomeAssistant - -from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN - - -async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: - """Return authorization server.""" - return AuthorizationServer(authorize_url=OAUTH2_AUTHORIZE, token_url=OAUTH2_TOKEN) diff --git a/homeassistant/components/weheat/config_flow.py b/homeassistant/components/weheat/config_flow.py deleted file mode 100644 index b1a0b5dd4ea..00000000000 --- a/homeassistant/components/weheat/config_flow.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Config flow for Weheat.""" - -from collections.abc import Mapping -import logging -from typing import Any - -from weheat.abstractions.user import get_user_id_from_token - -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult -from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN -from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler - -from .const import API_URL, DOMAIN, ENTRY_TITLE, OAUTH2_SCOPES - - -class OAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): - """Config flow to handle Weheat OAuth2 authentication.""" - - DOMAIN = DOMAIN - - @property - def logger(self) -> logging.Logger: - """Return logger.""" - return logging.getLogger(__name__) - - @property - def extra_authorize_data(self) -> dict[str, str]: - """Extra data that needs to be appended to the authorize url.""" - return { - "scope": " ".join(OAUTH2_SCOPES), - } - - async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: - """Override the create entry method to change to the step to find the heat pumps.""" - # get the user id and use that as unique id for this entry - user_id = await get_user_id_from_token( - API_URL, data[CONF_TOKEN][CONF_ACCESS_TOKEN] - ) - await self.async_set_unique_id(user_id) - if self.source != SOURCE_REAUTH: - self._abort_if_unique_id_configured() - - return self.async_create_entry(title=ENTRY_TITLE, data=data) - - self._abort_if_unique_id_mismatch(reason="wrong_account") - return self.async_update_reload_and_abort( - self._get_reauth_entry(), data_updates=data - ) - - async def async_step_reauth( - self, entry_data: Mapping[str, Any] - ) -> ConfigFlowResult: - """Perform reauth upon an API authentication error.""" - return await self.async_step_reauth_confirm() - - async def async_step_reauth_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Confirm reauth dialog.""" - if user_input is None: - return self.async_show_form(step_id="reauth_confirm") - return await self.async_step_user() diff --git a/homeassistant/components/weheat/const.py b/homeassistant/components/weheat/const.py deleted file mode 100644 index e33fd983572..00000000000 --- a/homeassistant/components/weheat/const.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Constants for the Weheat integration.""" - -from logging import Logger, getLogger - -DOMAIN = "weheat" -MANUFACTURER = "Weheat" -ENTRY_TITLE = "Weheat cloud" -ERROR_DESCRIPTION = "error_description" - -OAUTH2_AUTHORIZE = ( - "https://auth.weheat.nl/auth/realms/Weheat/protocol/openid-connect/auth/" -) -OAUTH2_TOKEN = ( - "https://auth.weheat.nl/auth/realms/Weheat/protocol/openid-connect/token/" -) -API_URL = "https://api.weheat.nl" -OAUTH2_SCOPES = ["openid", "offline_access"] - - -UPDATE_INTERVAL = 30 - -LOGGER: Logger = getLogger(__package__) - -DISPLAY_PRECISION_WATTS = 0 -DISPLAY_PRECISION_COP = 1 -DISPLAY_PRECISION_WATER_TEMP = 1 diff --git a/homeassistant/components/weheat/coordinator.py b/homeassistant/components/weheat/coordinator.py deleted file mode 100644 index a50e9daec18..00000000000 --- a/homeassistant/components/weheat/coordinator.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Define a custom coordinator for the Weheat heatpump integration.""" - -from datetime import timedelta - -from weheat.abstractions.discovery import HeatPumpDiscovery -from weheat.abstractions.heat_pump import HeatPump -from weheat.exceptions import ( - ApiException, - BadRequestException, - ForbiddenException, - NotFoundException, - ServiceException, - UnauthorizedException, -) - -from homeassistant.const import CONF_ACCESS_TOKEN -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed - -from .const import API_URL, DOMAIN, LOGGER, UPDATE_INTERVAL - -EXCEPTIONS = ( - ServiceException, - NotFoundException, - ForbiddenException, - BadRequestException, - ApiException, -) - - -class WeheatDataUpdateCoordinator(DataUpdateCoordinator[HeatPump]): - """A custom coordinator for the Weheat heatpump integration.""" - - def __init__( - self, - hass: HomeAssistant, - session: OAuth2Session, - heat_pump: HeatPumpDiscovery.HeatPumpInfo, - ) -> None: - """Initialize the data coordinator.""" - super().__init__( - hass, - logger=LOGGER, - name=DOMAIN, - update_interval=timedelta(seconds=UPDATE_INTERVAL), - ) - self.heat_pump_info = heat_pump - self._heat_pump_data = HeatPump(API_URL, heat_pump.uuid) - - self.session = session - - @property - def heatpump_id(self) -> str: - """Return the heat pump id.""" - return self.heat_pump_info.uuid - - @property - def readable_name(self) -> str | None: - """Return the readable name of the heat pump.""" - if self.heat_pump_info.name: - return self.heat_pump_info.name - return self.heat_pump_info.model - - @property - def model(self) -> str: - """Return the model of the heat pump.""" - return self.heat_pump_info.model - - def fetch_data(self) -> HeatPump: - """Get the data from the API.""" - try: - self._heat_pump_data.get_status(self.session.token[CONF_ACCESS_TOKEN]) - except UnauthorizedException as error: - raise ConfigEntryAuthFailed from error - except EXCEPTIONS as error: - raise UpdateFailed(error) from error - - return self._heat_pump_data - - async def _async_update_data(self) -> HeatPump: - """Fetch data from the API.""" - await self.session.async_ensure_token_valid() - - return await self.hass.async_add_executor_job(self.fetch_data) diff --git a/homeassistant/components/weheat/entity.py b/homeassistant/components/weheat/entity.py deleted file mode 100644 index 079db596e19..00000000000 --- a/homeassistant/components/weheat/entity.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Base entity for Weheat.""" - -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import DOMAIN, MANUFACTURER -from .coordinator import WeheatDataUpdateCoordinator - - -class WeheatEntity(CoordinatorEntity[WeheatDataUpdateCoordinator]): - """Defines a base Weheat entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - coordinator: WeheatDataUpdateCoordinator, - ) -> None: - """Initialize the Weheat entity.""" - super().__init__(coordinator) - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.heatpump_id)}, - name=coordinator.readable_name, - manufacturer=MANUFACTURER, - model=coordinator.model, - ) diff --git a/homeassistant/components/weheat/icons.json b/homeassistant/components/weheat/icons.json deleted file mode 100644 index 6fdae84cfff..00000000000 --- a/homeassistant/components/weheat/icons.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "entity": { - "sensor": { - "power_output": { - "default": "mdi:heat-wave" - }, - "power_input": { - "default": "mdi:lightning-bolt" - }, - "cop": { - "default": "mdi:speedometer" - }, - "ch_inlet_temperature": { - "default": "mdi:radiator" - }, - "outside_temperature": { - "default": "mdi:home-thermometer-outline" - }, - "thermostat_room_temperature": { - "default": "mdi:home-thermometer" - }, - "thermostat_room_temperature_setpoint": { - "default": "mdi:home-thermometer" - }, - "heat_pump_state": { - "default": "mdi:state-machine" - }, - "electricity_used": { - "default": "mdi:flash" - } - } - } -} diff --git a/homeassistant/components/weheat/manifest.json b/homeassistant/components/weheat/manifest.json deleted file mode 100644 index ef89a2f1acb..00000000000 --- a/homeassistant/components/weheat/manifest.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "domain": "weheat", - "name": "Weheat", - "codeowners": ["@jesperraemaekers"], - "config_flow": true, - "dependencies": ["application_credentials"], - "documentation": "https://www.home-assistant.io/integrations/weheat", - "iot_class": "cloud_polling", - "requirements": ["weheat==2024.11.02"] -} diff --git a/homeassistant/components/weheat/sensor.py b/homeassistant/components/weheat/sensor.py deleted file mode 100644 index ef5be9030b9..00000000000 --- a/homeassistant/components/weheat/sensor.py +++ /dev/null @@ -1,212 +0,0 @@ -"""Platform for sensor integration.""" - -from collections.abc import Callable -from dataclasses import dataclass - -from weheat.abstractions.heat_pump import HeatPump - -from homeassistant.components.sensor import ( - SensorDeviceClass, - SensorEntity, - SensorEntityDescription, - SensorStateClass, -) -from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfTemperature -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType - -from . import WeheatConfigEntry -from .const import ( - DISPLAY_PRECISION_COP, - DISPLAY_PRECISION_WATER_TEMP, - DISPLAY_PRECISION_WATTS, -) -from .coordinator import WeheatDataUpdateCoordinator -from .entity import WeheatEntity - - -@dataclass(frozen=True, kw_only=True) -class WeHeatSensorEntityDescription(SensorEntityDescription): - """Describes Weheat sensor entity.""" - - value_fn: Callable[[HeatPump], StateType] - - -SENSORS = [ - WeHeatSensorEntityDescription( - translation_key="power_output", - key="power_output", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATTS, - value_fn=lambda status: status.power_output, - ), - WeHeatSensorEntityDescription( - translation_key="power_input", - key="power_input", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATTS, - value_fn=lambda status: status.power_input, - ), - WeHeatSensorEntityDescription( - translation_key="cop", - key="cop", - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_COP, - value_fn=lambda status: status.cop, - ), - WeHeatSensorEntityDescription( - translation_key="water_inlet_temperature", - key="water_inlet_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, - value_fn=lambda status: status.water_inlet_temperature, - ), - WeHeatSensorEntityDescription( - translation_key="water_outlet_temperature", - key="water_outlet_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, - value_fn=lambda status: status.water_outlet_temperature, - ), - WeHeatSensorEntityDescription( - translation_key="ch_inlet_temperature", - key="ch_inlet_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, - value_fn=lambda status: status.water_house_in_temperature, - ), - WeHeatSensorEntityDescription( - translation_key="outside_temperature", - key="outside_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, - value_fn=lambda status: status.air_inlet_temperature, - ), - WeHeatSensorEntityDescription( - translation_key="thermostat_water_setpoint", - key="thermostat_water_setpoint", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, - value_fn=lambda status: status.thermostat_water_setpoint, - ), - WeHeatSensorEntityDescription( - translation_key="thermostat_room_temperature", - key="thermostat_room_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, - value_fn=lambda status: status.thermostat_room_temperature, - ), - WeHeatSensorEntityDescription( - translation_key="thermostat_room_temperature_setpoint", - key="thermostat_room_temperature_setpoint", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, - value_fn=lambda status: status.thermostat_room_temperature_setpoint, - ), - WeHeatSensorEntityDescription( - translation_key="heat_pump_state", - key="heat_pump_state", - name=None, - device_class=SensorDeviceClass.ENUM, - options=[s.name.lower() for s in HeatPump.State], - value_fn=( - lambda status: status.heat_pump_state.name.lower() - if status.heat_pump_state - else None - ), - ), - WeHeatSensorEntityDescription( - translation_key="electricity_used", - key="electricity_used", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda status: status.energy_total, - ), -] - - -DHW_SENSORS = [ - WeHeatSensorEntityDescription( - translation_key="dhw_top_temperature", - key="dhw_top_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, - value_fn=lambda status: status.dhw_top_temperature, - ), - WeHeatSensorEntityDescription( - translation_key="dhw_bottom_temperature", - key="dhw_bottom_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, - value_fn=lambda status: status.dhw_bottom_temperature, - ), -] - - -async def async_setup_entry( - hass: HomeAssistant, - entry: WeheatConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the sensors for weheat heat pump.""" - entities = [ - WeheatHeatPumpSensor(coordinator, entity_description) - for entity_description in SENSORS - for coordinator in entry.runtime_data - ] - entities.extend( - WeheatHeatPumpSensor(coordinator, entity_description) - for entity_description in DHW_SENSORS - for coordinator in entry.runtime_data - if coordinator.heat_pump_info.has_dhw - ) - - async_add_entities(entities) - - -class WeheatHeatPumpSensor(WeheatEntity, SensorEntity): - """Defines a Weheat heat pump sensor.""" - - coordinator: WeheatDataUpdateCoordinator - entity_description: WeHeatSensorEntityDescription - - def __init__( - self, - coordinator: WeheatDataUpdateCoordinator, - entity_description: WeHeatSensorEntityDescription, - ) -> None: - """Pass coordinator to CoordinatorEntity.""" - super().__init__(coordinator) - - self.entity_description = entity_description - - self._attr_unique_id = f"{coordinator.heatpump_id}_{entity_description.key}" - - @property - def native_value(self) -> StateType: - """Return the state of the sensor.""" - return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/weheat/strings.json b/homeassistant/components/weheat/strings.json deleted file mode 100644 index 0733024cbed..00000000000 --- a/homeassistant/components/weheat/strings.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "config": { - "step": { - "pick_implementation": { - "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" - }, - "find_devices": { - "title": "Select your heat pump" - }, - "reauth_confirm": { - "title": "Re-authenticate with WeHeat", - "description": "You need to re-authenticate with WeHeat to continue" - } - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", - "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", - "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", - "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", - "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", - "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", - "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "no_devices_found": "Could not find any heat pumps on this account", - "wrong_account": "You can only reauthenticate this account with the same user." - }, - "create_entry": { - "default": "[%key:common::config_flow::create_entry::authenticated%]" - } - }, - "entity": { - "sensor": { - "power_output": { - "name": "Output power" - }, - "power_input": { - "name": "Input power" - }, - "cop": { - "name": "COP" - }, - "water_inlet_temperature": { - "name": "Water inlet temperature" - }, - "water_outlet_temperature": { - "name": "Water outlet temperature" - }, - "ch_inlet_temperature": { - "name": "Central heating inlet temperature" - }, - "outside_temperature": { - "name": "Outside temperature" - }, - "thermostat_water_setpoint": { - "name": "Water target temperature" - }, - "thermostat_room_temperature": { - "name": "Current room temperature" - }, - "thermostat_room_temperature_setpoint": { - "name": "Room temperature setpoint" - }, - "dhw_top_temperature": { - "name": "DHW top temperature" - }, - "dhw_bottom_temperature": { - "name": "DHW bottom temperature" - }, - "heat_pump_state": { - "state": { - "standby": "[%key:common::state::standby%]", - "water_check": "Checking water temperature", - "heating": "Heating", - "cooling": "Cooling", - "dhw": "Heating DHW", - "legionella_prevention": "Legionella prevention", - "defrosting": "Defrosting", - "self_test": "Self test", - "manual_control": "Manual control" - } - }, - "electricity_used": { - "name": "Electricity used" - } - } - } -} diff --git a/homeassistant/components/wemo/config_flow.py b/homeassistant/components/wemo/config_flow.py index 361c58953c5..10a9bf5604b 100644 --- a/homeassistant/components/wemo/config_flow.py +++ b/homeassistant/components/wemo/config_flow.py @@ -32,12 +32,16 @@ class WemoFlow(DiscoveryFlowHandler, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return WemoOptionsFlow() + return WemoOptionsFlow(config_entry) class WemoOptionsFlow(OptionsFlow): """Options flow for the WeMo component.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.config_entry = config_entry + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/wemo/coordinator.py b/homeassistant/components/wemo/coordinator.py index 1f25c12f7ca..a186b666470 100644 --- a/homeassistant/components/wemo/coordinator.py +++ b/homeassistant/components/wemo/coordinator.py @@ -275,7 +275,6 @@ def _device_info(wemo: WeMoDevice) -> DeviceInfo: identifiers={(DOMAIN, wemo.serial_number)}, manufacturer="Belkin", model=wemo.model_name, - model_id=wemo.model, name=wemo.name, sw_version=wemo.firmware_version, ) diff --git a/homeassistant/components/wemo/entity.py b/homeassistant/components/wemo/entity.py index 16ab3ae1173..db64aa3137e 100644 --- a/homeassistant/components/wemo/entity.py +++ b/homeassistant/components/wemo/entity.py @@ -2,11 +2,11 @@ from __future__ import annotations -from collections.abc import Generator import contextlib import logging from pywemo.exceptions import ActionException +from typing_extensions import Generator from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity diff --git a/homeassistant/components/wemo/fan.py b/homeassistant/components/wemo/fan.py index f9d3270aaa0..e1b9aaf2388 100644 --- a/homeassistant/components/wemo/fan.py +++ b/homeassistant/components/wemo/fan.py @@ -67,21 +67,16 @@ async def async_setup_entry( # This will call WemoHumidifier.reset_filter_life() platform.async_register_entity_service( - SERVICE_RESET_FILTER_LIFE, None, WemoHumidifier.reset_filter_life.__name__ + SERVICE_RESET_FILTER_LIFE, {}, WemoHumidifier.reset_filter_life.__name__ ) class WemoHumidifier(WemoBinaryStateEntity, FanEntity): """Representation of a WeMo humidifier.""" - _attr_supported_features = ( - FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + _attr_supported_features = FanEntityFeature.SET_SPEED wemo: Humidifier _last_fan_on_mode: FanMode - _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: DeviceCoordinator) -> None: """Initialize the WeMo switch.""" diff --git a/homeassistant/components/wemo/icons.json b/homeassistant/components/wemo/icons.json index af5024afcff..c5ddf5912d6 100644 --- a/homeassistant/components/wemo/icons.json +++ b/homeassistant/components/wemo/icons.json @@ -1,10 +1,6 @@ { "services": { - "set_humidity": { - "service": "mdi:water-percent" - }, - "reset_filter_life": { - "service": "mdi:refresh" - } + "set_humidity": "mdi:water-percent", + "reset_filter_life": "mdi:refresh" } } diff --git a/homeassistant/components/whirlpool/config_flow.py b/homeassistant/components/whirlpool/config_flow.py index 069a5ca1e4f..7c39b1fbb29 100644 --- a/homeassistant/components/whirlpool/config_flow.py +++ b/homeassistant/components/whirlpool/config_flow.py @@ -12,7 +12,7 @@ from whirlpool.appliancesmanager import AppliancesManager from whirlpool.auth import Auth from whirlpool.backendselector import BackendSelector -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -71,11 +71,14 @@ class WhirlpoolConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Whirlpool Sixth Sense.""" VERSION = 1 + entry: ConfigEntry | None async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Whirlpool Sixth Sense.""" + + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -85,10 +88,10 @@ class WhirlpoolConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input: - reauth_entry = self._get_reauth_entry() + assert self.entry is not None password = user_input[CONF_PASSWORD] brand = user_input[CONF_BRAND] - data = {**reauth_entry.data, CONF_PASSWORD: password, CONF_BRAND: brand} + data = {**self.entry.data, CONF_PASSWORD: password, CONF_BRAND: brand} try: await validate_input(self.hass, data) @@ -97,7 +100,9 @@ class WhirlpoolConfigFlow(ConfigFlow, domain=DOMAIN): except (CannotConnect, TimeoutError): errors["base"] = "cannot_connect" else: - return self.async_update_reload_and_abort(reauth_entry, data=data) + self.hass.config_entries.async_update_entry(self.entry, data=data) + await self.hass.config_entries.async_reload(self.entry.entry_id) + return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/whirlpool/strings.json b/homeassistant/components/whirlpool/strings.json index 09257652ece..4b4673b771e 100644 --- a/homeassistant/components/whirlpool/strings.json +++ b/homeassistant/components/whirlpool/strings.json @@ -27,8 +27,7 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", diff --git a/homeassistant/components/whois/__init__.py b/homeassistant/components/whois/__init__.py index 07116825f29..b9f5938d93b 100644 --- a/homeassistant/components/whois/__init__.py +++ b/homeassistant/components/whois/__init__.py @@ -35,7 +35,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator: DataUpdateCoordinator[Domain | None] = DataUpdateCoordinator( hass, LOGGER, - config_entry=entry, name=f"{DOMAIN}_APK", update_interval=SCAN_INTERVAL, update_method=_async_query_domain, diff --git a/homeassistant/components/wiffi/__init__.py b/homeassistant/components/wiffi/__init__.py index 6cf216011f2..c465bc0d2ca 100644 --- a/homeassistant/components/wiffi/__init__.py +++ b/homeassistant/components/wiffi/__init__.py @@ -7,19 +7,26 @@ import logging from wiffi import WiffiTcpServer from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PORT, Platform +from homeassistant.const import CONF_PORT, CONF_TIMEOUT, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import ( + async_dispatcher_connect, + async_dispatcher_send, +) +from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_time_interval +from homeassistant.util.dt import utcnow from .const import ( CHECK_ENTITIES_SIGNAL, CREATE_ENTITY_SIGNAL, + DEFAULT_TIMEOUT, DOMAIN, UPDATE_ENTITY_SIGNAL, ) -from .entity import generate_unique_id _LOGGER = logging.getLogger(__name__) @@ -71,6 +78,11 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok +def generate_unique_id(device, metric): + """Generate a unique string for the entity.""" + return f"{device.mac_address.replace(':', '')}-{metric.name}" + + class WiffiIntegrationApi: """API object for wiffi handling. Stored in hass.data.""" @@ -123,3 +135,78 @@ class WiffiIntegrationApi: def _periodic_tick(self, now=None): """Check if any entity has timed out because it has not been updated.""" async_dispatcher_send(self._hass, CHECK_ENTITIES_SIGNAL) + + +class WiffiEntity(Entity): + """Common functionality for all wiffi entities.""" + + _attr_should_poll = False + + def __init__(self, device, metric, options): + """Initialize the base elements of a wiffi entity.""" + self._id = generate_unique_id(device, metric) + self._attr_unique_id = self._id + self._attr_device_info = DeviceInfo( + connections={(dr.CONNECTION_NETWORK_MAC, device.mac_address)}, + identifiers={(DOMAIN, device.mac_address)}, + manufacturer="stall.biz", + model=device.moduletype, + name=f"{device.moduletype} {device.mac_address}", + sw_version=device.sw_version, + configuration_url=device.configuration_url, + ) + self._attr_name = metric.description + self._expiration_date = None + self._value = None + self._timeout = options.get(CONF_TIMEOUT, DEFAULT_TIMEOUT) + + async def async_added_to_hass(self): + """Entity has been added to hass.""" + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{UPDATE_ENTITY_SIGNAL}-{self._id}", + self._update_value_callback, + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, CHECK_ENTITIES_SIGNAL, self._check_expiration_date + ) + ) + + def reset_expiration_date(self): + """Reset value expiration date. + + Will be called by derived classes after a value update has been received. + """ + self._expiration_date = utcnow() + timedelta(minutes=self._timeout) + + @callback + def _update_value_callback(self, device, metric): + """Update the value of the entity.""" + + @callback + def _check_expiration_date(self): + """Periodically check if entity value has been updated. + + If there are no more updates from the wiffi device, the value will be + set to unavailable. + """ + if ( + self._value is not None + and self._expiration_date is not None + and utcnow() > self._expiration_date + ): + self._value = None + self.async_write_ha_state() + + def _is_measurement_entity(self): + """Measurement entities have a value in present time.""" + return ( + not self._attr_name.endswith("_gestern") and not self._is_metered_entity() + ) + + def _is_metered_entity(self): + """Metered entities have a value that keeps increasing until reset.""" + return self._attr_name.endswith("_pro_h") or self._attr_name.endswith("_heute") diff --git a/homeassistant/components/wiffi/binary_sensor.py b/homeassistant/components/wiffi/binary_sensor.py index b7431b2555c..23aebd122f2 100644 --- a/homeassistant/components/wiffi/binary_sensor.py +++ b/homeassistant/components/wiffi/binary_sensor.py @@ -6,8 +6,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import WiffiEntity from .const import CREATE_ENTITY_SIGNAL -from .entity import WiffiEntity async def async_setup_entry( @@ -17,7 +17,7 @@ async def async_setup_entry( ) -> None: """Set up platform for a new integration. - Called by the HA framework after async_forward_entry_setups has been called + Called by the HA framework after async_forward_entry_setup has been called during initialization of a new integration (= wiffi). """ diff --git a/homeassistant/components/wiffi/config_flow.py b/homeassistant/components/wiffi/config_flow.py index 308923597cd..17262dd0276 100644 --- a/homeassistant/components/wiffi/config_flow.py +++ b/homeassistant/components/wiffi/config_flow.py @@ -6,17 +6,11 @@ Used by UI to setup a wiffi integration. from __future__ import annotations import errno -from typing import Any import voluptuous as vol from wiffi import WiffiTcpServer -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow from homeassistant.const import CONF_PORT, CONF_TIMEOUT from homeassistant.core import callback @@ -34,11 +28,9 @@ class WiffiFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Create Wiffi server setup option flow.""" - return OptionsFlowHandler() + return OptionsFlowHandler(config_entry) - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_user(self, user_input=None): """Handle the start of the config flow. Called after wiffi integration has been selected in the 'add integration @@ -79,9 +71,11 @@ class WiffiFlowHandler(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Wiffi server setup option flow.""" - async def async_step_init( - self, user_input: dict[str, int] | None = None - ) -> ConfigFlowResult: + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.config_entry = config_entry + + async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/wiffi/entity.py b/homeassistant/components/wiffi/entity.py deleted file mode 100644 index fd774c930c8..00000000000 --- a/homeassistant/components/wiffi/entity.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Component for wiffi support.""" - -from datetime import timedelta - -from homeassistant.const import CONF_TIMEOUT -from homeassistant.core import callback -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity import Entity -from homeassistant.util.dt import utcnow - -from .const import CHECK_ENTITIES_SIGNAL, DEFAULT_TIMEOUT, DOMAIN, UPDATE_ENTITY_SIGNAL - - -def generate_unique_id(device, metric): - """Generate a unique string for the entity.""" - return f"{device.mac_address.replace(':', '')}-{metric.name}" - - -class WiffiEntity(Entity): - """Common functionality for all wiffi entities.""" - - _attr_should_poll = False - - def __init__(self, device, metric, options): - """Initialize the base elements of a wiffi entity.""" - self._id = generate_unique_id(device, metric) - self._attr_unique_id = self._id - self._attr_device_info = DeviceInfo( - connections={(dr.CONNECTION_NETWORK_MAC, device.mac_address)}, - identifiers={(DOMAIN, device.mac_address)}, - manufacturer="stall.biz", - model=device.moduletype, - name=f"{device.moduletype} {device.mac_address}", - sw_version=device.sw_version, - configuration_url=device.configuration_url, - ) - self._attr_name = metric.description - self._expiration_date = None - self._value = None - self._timeout = options.get(CONF_TIMEOUT, DEFAULT_TIMEOUT) - - async def async_added_to_hass(self): - """Entity has been added to hass.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{UPDATE_ENTITY_SIGNAL}-{self._id}", - self._update_value_callback, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, CHECK_ENTITIES_SIGNAL, self._check_expiration_date - ) - ) - - def reset_expiration_date(self): - """Reset value expiration date. - - Will be called by derived classes after a value update has been received. - """ - self._expiration_date = utcnow() + timedelta(minutes=self._timeout) - - @callback - def _update_value_callback(self, device, metric): - """Update the value of the entity.""" - - @callback - def _check_expiration_date(self): - """Periodically check if entity value has been updated. - - If there are no more updates from the wiffi device, the value will be - set to unavailable. - """ - if ( - self._value is not None - and self._expiration_date is not None - and utcnow() > self._expiration_date - ): - self._value = None - self.async_write_ha_state() - - def _is_measurement_entity(self): - """Measurement entities have a value in present time.""" - return ( - not self._attr_name.endswith("_gestern") and not self._is_metered_entity() - ) - - def _is_metered_entity(self): - """Metered entities have a value that keeps increasing until reset.""" - return self._attr_name.endswith("_pro_h") or self._attr_name.endswith("_heute") diff --git a/homeassistant/components/wiffi/sensor.py b/homeassistant/components/wiffi/sensor.py index 699a760685a..7b64628085a 100644 --- a/homeassistant/components/wiffi/sensor.py +++ b/homeassistant/components/wiffi/sensor.py @@ -11,8 +11,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import WiffiEntity from .const import CREATE_ENTITY_SIGNAL -from .entity import WiffiEntity from .wiffi_strings import ( WIFFI_UOM_DEGREE, WIFFI_UOM_LUX, @@ -45,7 +45,7 @@ async def async_setup_entry( ) -> None: """Set up platform for a new integration. - Called by the HA framework after async_forward_entry_setups has been called + Called by the HA framework after async_forward_entry_setup has been called during initialization of a new integration (= wiffi). """ diff --git a/homeassistant/components/wilight/__init__.py b/homeassistant/components/wilight/__init__.py index 5242f84ab93..067197c8a14 100644 --- a/homeassistant/components/wilight/__init__.py +++ b/homeassistant/components/wilight/__init__.py @@ -1,13 +1,20 @@ """The WiLight integration.""" +from typing import Any + +from pywilight.wilight_device import PyWiLightDevice + from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity -from .const import DOMAIN from .parent_device import WiLightParent +DOMAIN = "wilight" + # List the platforms that you want to support. PLATFORMS = [Platform.COVER, Platform.FAN, Platform.LIGHT, Platform.SWITCH] @@ -41,3 +48,51 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: del hass.data[DOMAIN][entry.entry_id] return unload_ok + + +class WiLightDevice(Entity): + """Representation of a WiLight device. + + Contains the common logic for WiLight entities. + """ + + _attr_should_poll = False + _attr_has_entity_name = True + + def __init__(self, api_device: PyWiLightDevice, index: str, item_name: str) -> None: + """Initialize the device.""" + # WiLight specific attributes for every component type + self._device_id = api_device.device_id + self._client = api_device.client + self._index = index + self._status: dict[str, Any] = {} + + self._attr_unique_id = f"{self._device_id}_{index}" + self._attr_device_info = DeviceInfo( + name=item_name, + identifiers={(DOMAIN, self._attr_unique_id)}, + model=api_device.model, + manufacturer="WiLight", + sw_version=api_device.swversion, + via_device=(DOMAIN, self._device_id), + ) + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return bool(self._client.is_connected) + + @callback + def handle_event_callback(self, states: dict[str, Any]) -> None: + """Propagate changes through ha.""" + self._status = states + self.async_write_ha_state() + + async def async_update(self) -> None: + """Synchronize state with api_device.""" + await self._client.status(self._index) + + async def async_added_to_hass(self) -> None: + """Register update callback.""" + self._client.register_status_callback(self.handle_event_callback, self._index) + await self._client.status(self._index) diff --git a/homeassistant/components/wilight/config_flow.py b/homeassistant/components/wilight/config_flow.py index 74663d61d8f..52b3b426c20 100644 --- a/homeassistant/components/wilight/config_flow.py +++ b/homeassistant/components/wilight/config_flow.py @@ -1,6 +1,5 @@ """Config flow to configure WiLight.""" -from typing import Any from urllib.parse import urlparse import pywilight @@ -9,7 +8,7 @@ from homeassistant.components import ssdp from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST -from .const import DOMAIN +from . import DOMAIN CONF_SERIAL_NUMBER = "serial_number" CONF_MODEL_NAME = "model_name" @@ -25,14 +24,13 @@ class WiLightFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - _title: str - - def __init__(self) -> None: + def __init__(self): """Initialize the WiLight flow.""" self._host = None self._serial_number = None + self._title = None self._model_name = None - self._wilight_components: list[str] = [] + self._wilight_components = [] self._components_text = "" def _wilight_update(self, host, serial_number, model_name): @@ -91,9 +89,7 @@ class WiLightFlowHandler(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = {"name": self._title} return await self.async_step_confirm() - async def async_step_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_confirm(self, user_input=None): """Handle user-confirmation of discovered WiLight.""" if user_input is not None: return self._get_entry() diff --git a/homeassistant/components/wilight/const.py b/homeassistant/components/wilight/const.py deleted file mode 100644 index 29de5093b70..00000000000 --- a/homeassistant/components/wilight/const.py +++ /dev/null @@ -1,3 +0,0 @@ -"""The WiLight integration.""" - -DOMAIN = "wilight" diff --git a/homeassistant/components/wilight/cover.py b/homeassistant/components/wilight/cover.py index 8a5cb45d909..4ae4692db40 100644 --- a/homeassistant/components/wilight/cover.py +++ b/homeassistant/components/wilight/cover.py @@ -20,8 +20,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .entity import WiLightDevice +from . import DOMAIN, WiLightDevice from .parent_device import WiLightParent diff --git a/homeassistant/components/wilight/entity.py b/homeassistant/components/wilight/entity.py deleted file mode 100644 index b8edf44b495..00000000000 --- a/homeassistant/components/wilight/entity.py +++ /dev/null @@ -1,59 +0,0 @@ -"""The WiLight integration.""" - -from typing import Any - -from pywilight.wilight_device import PyWiLightDevice - -from homeassistant.core import callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity - -from .const import DOMAIN - - -class WiLightDevice(Entity): - """Representation of a WiLight device. - - Contains the common logic for WiLight entities. - """ - - _attr_should_poll = False - _attr_has_entity_name = True - - def __init__(self, api_device: PyWiLightDevice, index: str, item_name: str) -> None: - """Initialize the device.""" - # WiLight specific attributes for every component type - self._device_id = api_device.device_id - self._client = api_device.client - self._index = index - self._status: dict[str, Any] = {} - - self._attr_unique_id = f"{self._device_id}_{index}" - self._attr_device_info = DeviceInfo( - name=item_name, - identifiers={(DOMAIN, self._attr_unique_id)}, - model=api_device.model, - manufacturer="WiLight", - sw_version=api_device.swversion, - via_device=(DOMAIN, self._device_id), - ) - - @property - def available(self) -> bool: - """Return True if entity is available.""" - return bool(self._client.is_connected) - - @callback - def handle_event_callback(self, states: dict[str, Any]) -> None: - """Propagate changes through ha.""" - self._status = states - self.async_write_ha_state() - - async def async_update(self) -> None: - """Synchronize state with api_device.""" - await self._client.status(self._index) - - async def async_added_to_hass(self) -> None: - """Register update callback.""" - self._client.register_status_callback(self.handle_event_callback, self._index) - await self._client.status(self._index) diff --git a/homeassistant/components/wilight/fan.py b/homeassistant/components/wilight/fan.py index 71f1098603b..5c05575c4f8 100644 --- a/homeassistant/components/wilight/fan.py +++ b/homeassistant/components/wilight/fan.py @@ -25,8 +25,7 @@ from homeassistant.util.percentage import ( percentage_to_ordered_list_item, ) -from .const import DOMAIN -from .entity import WiLightDevice +from . import DOMAIN, WiLightDevice from .parent_device import WiLightParent ORDERED_NAMED_FAN_SPEEDS = [WL_SPEED_LOW, WL_SPEED_MEDIUM, WL_SPEED_HIGH] @@ -58,13 +57,7 @@ class WiLightFan(WiLightDevice, FanEntity): _attr_name = None _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) - _attr_supported_features = ( - FanEntityFeature.SET_SPEED - | FanEntityFeature.DIRECTION - | FanEntityFeature.TURN_ON - | FanEntityFeature.TURN_OFF - ) - _enable_turn_on_off_backwards_compatibility = False + _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION def __init__(self, api_device: PyWiLightDevice, index: str, item_name: str) -> None: """Initialize the device.""" diff --git a/homeassistant/components/wilight/icons.json b/homeassistant/components/wilight/icons.json index 48bcae2a301..3c5d0112de1 100644 --- a/homeassistant/components/wilight/icons.json +++ b/homeassistant/components/wilight/icons.json @@ -10,14 +10,8 @@ } }, "services": { - "set_watering_time": { - "service": "mdi:timer" - }, - "set_pause_time": { - "service": "mdi:timer-pause" - }, - "set_trigger": { - "service": "mdi:gesture-tap-button" - } + "set_watering_time": "mdi:timer", + "set_pause_time": "mdi:timer-pause", + "set_trigger": "mdi:gesture-tap-button" } } diff --git a/homeassistant/components/wilight/light.py b/homeassistant/components/wilight/light.py index fbe2499798d..1a51ecd884e 100644 --- a/homeassistant/components/wilight/light.py +++ b/homeassistant/components/wilight/light.py @@ -17,8 +17,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .entity import WiLightDevice +from . import DOMAIN, WiLightDevice from .parent_device import WiLightParent diff --git a/homeassistant/components/wilight/parent_device.py b/homeassistant/components/wilight/parent_device.py index 6e71649d8fc..6e96274f0a4 100644 --- a/homeassistant/components/wilight/parent_device.py +++ b/homeassistant/components/wilight/parent_device.py @@ -78,7 +78,7 @@ class WiLightParent: EVENT_HOMEASSISTANT_STOP, lambda x: client.stop() ) - _LOGGER.debug("Connected to WiLight device: %s", api_device.device_id) + _LOGGER.info("Connected to WiLight device: %s", api_device.device_id) await connect(api_device) diff --git a/homeassistant/components/wilight/switch.py b/homeassistant/components/wilight/switch.py index f2a1ce8b0c5..94e39492626 100644 --- a/homeassistant/components/wilight/switch.py +++ b/homeassistant/components/wilight/switch.py @@ -14,8 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .entity import WiLightDevice +from . import DOMAIN, WiLightDevice from .parent_device import WiLightParent from .support import wilight_to_hass_trigger, wilight_trigger as wl_trigger diff --git a/homeassistant/components/wirelesstag/__init__.py b/homeassistant/components/wirelesstag/__init__.py index a32e940073b..710255153c2 100644 --- a/homeassistant/components/wirelesstag/__init__.py +++ b/homeassistant/components/wirelesstag/__init__.py @@ -6,23 +6,50 @@ from requests.exceptions import ConnectTimeout, HTTPError import voluptuous as vol from wirelesstagpy import WirelessTags from wirelesstagpy.exceptions import WirelessTagsException +from wirelesstagpy.sensortag import SensorTag from homeassistant.components import persistent_notification -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import ( + ATTR_BATTERY_LEVEL, + ATTR_VOLTAGE, + CONF_PASSWORD, + CONF_USERNAME, + PERCENTAGE, + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + UnitOfElectricPotential, +) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import dispatcher_send +from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN, SIGNAL_BINARY_EVENT_UPDATE, SIGNAL_TAG_UPDATE - _LOGGER = logging.getLogger(__name__) + +# Strength of signal in dBm +ATTR_TAG_SIGNAL_STRENGTH = "signal_strength" +# Indicates if tag is out of range or not +ATTR_TAG_OUT_OF_RANGE = "out_of_range" +# Number in percents from max power of tag receiver +ATTR_TAG_POWER_CONSUMPTION = "power_consumption" + + NOTIFICATION_ID = "wirelesstag_notification" NOTIFICATION_TITLE = "Wireless Sensor Tag Setup" +DOMAIN = "wirelesstag" DEFAULT_ENTITY_NAMESPACE = "wirelesstag" +# Template for signal - first parameter is tag_id, +# second, tag manager mac address +SIGNAL_TAG_UPDATE = "wirelesstag.tag_info_updated_{}_{}" + +# Template for signal - tag_id, sensor type and +# tag manager mac address +SIGNAL_BINARY_EVENT_UPDATE = "wirelesstag.binary_event_updated_{}_{}_{}" + CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( @@ -102,6 +129,22 @@ class WirelessTagPlatform: self.api.start_monitoring(push_callback) +def async_migrate_unique_id( + hass: HomeAssistant, tag: SensorTag, domain: str, key: str +) -> None: + """Migrate old unique id to new one with use of tag's uuid.""" + registry = er.async_get(hass) + new_unique_id = f"{tag.uuid}_{key}" + + if registry.async_get_entity_id(domain, DOMAIN, new_unique_id): + return + + old_unique_id = f"{tag.tag_id}_{key}" + if entity_id := registry.async_get_entity_id(domain, DOMAIN, old_unique_id): + _LOGGER.debug("Updating unique id for %s %s", key, entity_id) + registry.async_update_entity(entity_id, new_unique_id=new_unique_id) + + def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Wireless Sensor Tag component.""" conf = config[DOMAIN] @@ -126,3 +169,76 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: return False return True + + +class WirelessTagBaseSensor(Entity): + """Base class for HA implementation for Wireless Sensor Tag.""" + + def __init__(self, api, tag): + """Initialize a base sensor for Wireless Sensor Tag platform.""" + self._api = api + self._tag = tag + self._uuid = self._tag.uuid + self.tag_id = self._tag.tag_id + self.tag_manager_mac = self._tag.tag_manager_mac + self._name = self._tag.name + self._state = None + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def principal_value(self): + """Return base value. + + Subclasses need override based on type of sensor. + """ + return 0 + + def updated_state_value(self): + """Return formatted value. + + The default implementation formats principal value. + """ + return self.decorate_value(self.principal_value) + + def decorate_value(self, value): + """Decorate input value to be well presented for end user.""" + return f"{value:.1f}" + + @property + def available(self): + """Return True if entity is available.""" + return self._tag.is_alive + + def update(self): + """Update state.""" + if not self.should_poll: + return + + updated_tags = self._api.load_tags() + if (updated_tag := updated_tags[self._uuid]) is None: + _LOGGER.error('Unable to update tag: "%s"', self.name) + return + + self._tag = updated_tag + self._state = self.updated_state_value() + + @property + def extra_state_attributes(self): + """Return the state attributes.""" + return { + ATTR_BATTERY_LEVEL: int(self._tag.battery_remaining * 100), + ATTR_VOLTAGE: ( + f"{self._tag.battery_volts:.2f}{UnitOfElectricPotential.VOLT}" + ), + ATTR_TAG_SIGNAL_STRENGTH: ( + f"{self._tag.signal_strength}{SIGNAL_STRENGTH_DECIBELS_MILLIWATT}" + ), + ATTR_TAG_OUT_OF_RANGE: not self._tag.is_in_range, + ATTR_TAG_POWER_CONSUMPTION: ( + f"{self._tag.power_consumption:.2f}{PERCENTAGE}" + ), + } diff --git a/homeassistant/components/wirelesstag/binary_sensor.py b/homeassistant/components/wirelesstag/binary_sensor.py index 9e8075dd874..052f6547dd2 100644 --- a/homeassistant/components/wirelesstag/binary_sensor.py +++ b/homeassistant/components/wirelesstag/binary_sensor.py @@ -15,9 +15,12 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import DOMAIN, SIGNAL_BINARY_EVENT_UPDATE -from .entity import WirelessTagBaseSensor -from .util import async_migrate_unique_id +from . import ( + DOMAIN as WIRELESSTAG_DOMAIN, + SIGNAL_BINARY_EVENT_UPDATE, + WirelessTagBaseSensor, + async_migrate_unique_id, +) # On means in range, Off means out of range SENSOR_PRESENCE = "presence" @@ -81,7 +84,7 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the platform for a WirelessTags.""" - platform = hass.data[DOMAIN] + platform = hass.data[WIRELESSTAG_DOMAIN] sensors = [] tags = platform.tags diff --git a/homeassistant/components/wirelesstag/const.py b/homeassistant/components/wirelesstag/const.py deleted file mode 100644 index c1384606bf1..00000000000 --- a/homeassistant/components/wirelesstag/const.py +++ /dev/null @@ -1,11 +0,0 @@ -"""Support for Wireless Sensor Tags.""" - -DOMAIN = "wirelesstag" - -# Template for signal - first parameter is tag_id, -# second, tag manager mac address -SIGNAL_TAG_UPDATE = "wirelesstag.tag_info_updated_{}_{}" - -# Template for signal - tag_id, sensor type and -# tag manager mac address -SIGNAL_BINARY_EVENT_UPDATE = "wirelesstag.binary_event_updated_{}_{}_{}" diff --git a/homeassistant/components/wirelesstag/entity.py b/homeassistant/components/wirelesstag/entity.py deleted file mode 100644 index 31f8ee99d0d..00000000000 --- a/homeassistant/components/wirelesstag/entity.py +++ /dev/null @@ -1,95 +0,0 @@ -"""Support for Wireless Sensor Tags.""" - -import logging - -from homeassistant.const import ( - ATTR_BATTERY_LEVEL, - ATTR_VOLTAGE, - PERCENTAGE, - SIGNAL_STRENGTH_DECIBELS_MILLIWATT, - UnitOfElectricPotential, -) -from homeassistant.helpers.entity import Entity - -_LOGGER = logging.getLogger(__name__) - - -# Strength of signal in dBm -ATTR_TAG_SIGNAL_STRENGTH = "signal_strength" -# Indicates if tag is out of range or not -ATTR_TAG_OUT_OF_RANGE = "out_of_range" -# Number in percents from max power of tag receiver -ATTR_TAG_POWER_CONSUMPTION = "power_consumption" - - -class WirelessTagBaseSensor(Entity): - """Base class for HA implementation for Wireless Sensor Tag.""" - - def __init__(self, api, tag): - """Initialize a base sensor for Wireless Sensor Tag platform.""" - self._api = api - self._tag = tag - self._uuid = self._tag.uuid - self.tag_id = self._tag.tag_id - self.tag_manager_mac = self._tag.tag_manager_mac - self._name = self._tag.name - self._state = None - - @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def principal_value(self): - """Return base value. - - Subclasses need override based on type of sensor. - """ - return 0 - - def updated_state_value(self): - """Return formatted value. - - The default implementation formats principal value. - """ - return self.decorate_value(self.principal_value) - - def decorate_value(self, value): - """Decorate input value to be well presented for end user.""" - return f"{value:.1f}" - - @property - def available(self): - """Return True if entity is available.""" - return self._tag.is_alive - - def update(self): - """Update state.""" - if not self.should_poll: - return - - updated_tags = self._api.load_tags() - if (updated_tag := updated_tags[self._uuid]) is None: - _LOGGER.error('Unable to update tag: "%s"', self.name) - return - - self._tag = updated_tag - self._state = self.updated_state_value() - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - return { - ATTR_BATTERY_LEVEL: int(self._tag.battery_remaining * 100), - ATTR_VOLTAGE: ( - f"{self._tag.battery_volts:.2f}{UnitOfElectricPotential.VOLT}" - ), - ATTR_TAG_SIGNAL_STRENGTH: ( - f"{self._tag.signal_strength}{SIGNAL_STRENGTH_DECIBELS_MILLIWATT}" - ), - ATTR_TAG_OUT_OF_RANGE: not self._tag.is_in_range, - ATTR_TAG_POWER_CONSUMPTION: ( - f"{self._tag.power_consumption:.2f}{PERCENTAGE}" - ), - } diff --git a/homeassistant/components/wirelesstag/sensor.py b/homeassistant/components/wirelesstag/sensor.py index 7a3cbe5efe2..87906bdc2ae 100644 --- a/homeassistant/components/wirelesstag/sensor.py +++ b/homeassistant/components/wirelesstag/sensor.py @@ -20,9 +20,12 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import DOMAIN, SIGNAL_TAG_UPDATE -from .entity import WirelessTagBaseSensor -from .util import async_migrate_unique_id +from . import ( + DOMAIN as WIRELESSTAG_DOMAIN, + SIGNAL_TAG_UPDATE, + WirelessTagBaseSensor, + async_migrate_unique_id, +) _LOGGER = logging.getLogger(__name__) @@ -78,7 +81,7 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the sensor platform.""" - platform = hass.data[DOMAIN] + platform = hass.data[WIRELESSTAG_DOMAIN] sensors = [] tags = platform.tags for tag in tags.values(): @@ -110,7 +113,9 @@ class WirelessTagSensor(WirelessTagBaseSensor, SensorEntity): # sensor.wirelesstag_bedroom_temperature # and not as sensor.bedroom for temperature and # sensor.bedroom_2 for humidity - self.entity_id = f"sensor.{DOMAIN}_{self.underscored_name}_{self._sensor_type}" + self.entity_id = ( + f"sensor.{WIRELESSTAG_DOMAIN}_{self.underscored_name}_{self._sensor_type}" + ) async def async_added_to_hass(self) -> None: """Register callbacks.""" diff --git a/homeassistant/components/wirelesstag/switch.py b/homeassistant/components/wirelesstag/switch.py index cae5d63988c..239461df4ea 100644 --- a/homeassistant/components/wirelesstag/switch.py +++ b/homeassistant/components/wirelesstag/switch.py @@ -17,9 +17,11 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import DOMAIN -from .entity import WirelessTagBaseSensor -from .util import async_migrate_unique_id +from . import ( + DOMAIN as WIRELESSTAG_DOMAIN, + WirelessTagBaseSensor, + async_migrate_unique_id, +) SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( SwitchEntityDescription( @@ -62,7 +64,7 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up switches for a Wireless Sensor Tags.""" - platform = hass.data[DOMAIN] + platform = hass.data[WIRELESSTAG_DOMAIN] tags = platform.load_tags() monitored_conditions = config[CONF_MONITORED_CONDITIONS] diff --git a/homeassistant/components/wirelesstag/util.py b/homeassistant/components/wirelesstag/util.py deleted file mode 100644 index 1b5d6551fc4..00000000000 --- a/homeassistant/components/wirelesstag/util.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Support for Wireless Sensor Tags.""" - -import logging - -from wirelesstagpy.sensortag import SensorTag - -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .const import DOMAIN - -_LOGGER = logging.getLogger(__name__) - - -def async_migrate_unique_id( - hass: HomeAssistant, tag: SensorTag, domain: str, key: str -) -> None: - """Migrate old unique id to new one with use of tag's uuid.""" - registry = er.async_get(hass) - new_unique_id = f"{tag.uuid}_{key}" - - if registry.async_get_entity_id(domain, DOMAIN, new_unique_id): - return - - old_unique_id = f"{tag.tag_id}_{key}" - if entity_id := registry.async_get_entity_id(domain, DOMAIN, old_unique_id): - _LOGGER.debug("Updating unique id for %s %s", key, entity_id) - registry.async_update_entity(entity_id, new_unique_id=new_unique_id) diff --git a/homeassistant/components/withings/__init__.py b/homeassistant/components/withings/__init__.py index 1c196bd4b92..908548084ae 100644 --- a/homeassistant/components/withings/__init__.py +++ b/homeassistant/components/withings/__init__.py @@ -48,7 +48,6 @@ from .coordinator import ( WithingsActivityDataUpdateCoordinator, WithingsBedPresenceDataUpdateCoordinator, WithingsDataUpdateCoordinator, - WithingsDeviceDataUpdateCoordinator, WithingsGoalsDataUpdateCoordinator, WithingsMeasurementDataUpdateCoordinator, WithingsSleepDataUpdateCoordinator, @@ -74,7 +73,6 @@ class WithingsData: goals_coordinator: WithingsGoalsDataUpdateCoordinator activity_coordinator: WithingsActivityDataUpdateCoordinator workout_coordinator: WithingsWorkoutDataUpdateCoordinator - device_coordinator: WithingsDeviceDataUpdateCoordinator coordinators: set[WithingsDataUpdateCoordinator] = field(default_factory=set) def __post_init__(self) -> None: @@ -86,7 +84,6 @@ class WithingsData: self.goals_coordinator, self.activity_coordinator, self.workout_coordinator, - self.device_coordinator, } @@ -125,7 +122,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: WithingsConfigEntry) -> goals_coordinator=WithingsGoalsDataUpdateCoordinator(hass, client), activity_coordinator=WithingsActivityDataUpdateCoordinator(hass, client), workout_coordinator=WithingsWorkoutDataUpdateCoordinator(hass, client), - device_coordinator=WithingsDeviceDataUpdateCoordinator(hass, client), ) for coordinator in withings_data.coordinators: diff --git a/homeassistant/components/withings/config_flow.py b/homeassistant/components/withings/config_flow.py index d7f07ccc184..5eb4e08595a 100644 --- a/homeassistant/components/withings/config_flow.py +++ b/homeassistant/components/withings/config_flow.py @@ -9,8 +9,8 @@ from typing import Any from aiowithings import AuthScope from homeassistant.components.webhook import async_generate_id -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult -from homeassistant.const import CONF_NAME, CONF_TOKEN, CONF_WEBHOOK_ID +from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.const import CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.helpers import config_entry_oauth2_flow from .const import DEFAULT_TITLE, DOMAIN @@ -23,6 +23,8 @@ class WithingsFlowHandler( DOMAIN = DOMAIN + reauth_entry: ConfigEntry | None = None + @property def logger(self) -> logging.Logger: """Return logger.""" @@ -40,6 +42,9 @@ class WithingsFlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -47,17 +52,14 @@ class WithingsFlowHandler( ) -> ConfigFlowResult: """Confirm reauth dialog.""" if user_input is None: - return self.async_show_form( - step_id="reauth_confirm", - description_placeholders={CONF_NAME: self._get_reauth_entry().title}, - ) + return self.async_show_form(step_id="reauth_confirm") return await self.async_step_user() async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the flow, or update existing entry.""" user_id = str(data[CONF_TOKEN]["userid"]) - await self.async_set_unique_id(user_id) - if self.source != SOURCE_REAUTH: + if not self.reauth_entry: + await self.async_set_unique_id(user_id) self._abort_if_unique_id_configured() return self.async_create_entry( @@ -65,7 +67,9 @@ class WithingsFlowHandler( data={**data, CONF_WEBHOOK_ID: async_generate_id()}, ) - self._abort_if_unique_id_mismatch(reason="wrong_account") - return self.async_update_reload_and_abort( - self._get_reauth_entry(), data_updates=data - ) + if self.reauth_entry.unique_id == user_id: + return self.async_update_reload_and_abort( + self.reauth_entry, data={**self.reauth_entry.data, **data} + ) + + return self.async_abort(reason="wrong_account") diff --git a/homeassistant/components/withings/coordinator.py b/homeassistant/components/withings/coordinator.py index 79419ae23ff..361a20acafd 100644 --- a/homeassistant/components/withings/coordinator.py +++ b/homeassistant/components/withings/coordinator.py @@ -8,7 +8,6 @@ from typing import TYPE_CHECKING from aiowithings import ( Activity, - Device, Goals, MeasurementPosition, MeasurementType, @@ -292,17 +291,3 @@ class WithingsWorkoutDataUpdateCoordinator( self._previous_data = latest_workout self._last_valid_update = latest_workout.end_date return self._previous_data - - -class WithingsDeviceDataUpdateCoordinator( - WithingsDataUpdateCoordinator[dict[str, Device]] -): - """Withings device coordinator.""" - - coordinator_name: str = "device" - _default_update_interval = timedelta(hours=1) - - async def _internal_update_data(self) -> dict[str, Device]: - """Update coordinator data.""" - devices = await self._client.get_devices() - return {device.device_id: device for device in devices} diff --git a/homeassistant/components/withings/entity.py b/homeassistant/components/withings/entity.py index 5c548fdb260..a5cb62b72a2 100644 --- a/homeassistant/components/withings/entity.py +++ b/homeassistant/components/withings/entity.py @@ -4,16 +4,11 @@ from __future__ import annotations from typing import Any -from aiowithings import Device - from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN -from .coordinator import ( - WithingsDataUpdateCoordinator, - WithingsDeviceDataUpdateCoordinator, -) +from .coordinator import WithingsDataUpdateCoordinator class WithingsEntity[_T: WithingsDataUpdateCoordinator[Any]](CoordinatorEntity[_T]): @@ -33,35 +28,3 @@ class WithingsEntity[_T: WithingsDataUpdateCoordinator[Any]](CoordinatorEntity[_ identifiers={(DOMAIN, str(coordinator.config_entry.unique_id))}, manufacturer="Withings", ) - - -class WithingsDeviceEntity(WithingsEntity[WithingsDeviceDataUpdateCoordinator]): - """Base class for withings device entities.""" - - def __init__( - self, - coordinator: WithingsDeviceDataUpdateCoordinator, - device_id: str, - key: str, - ) -> None: - """Initialize the Withings entity.""" - super().__init__(coordinator, key) - self._attr_unique_id = f"{device_id}_{key}" - self.device_id = device_id - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, device_id)}, - manufacturer="Withings", - name=self.device.raw_model, - model=self.device.raw_model, - via_device=(DOMAIN, str(coordinator.config_entry.unique_id)), - ) - - @property - def available(self) -> bool: - """Return True if entity is available.""" - return super().available and self.device_id in self.coordinator.data - - @property - def device(self) -> Device: - """Return the Withings device.""" - return self.coordinator.data[self.device_id] diff --git a/homeassistant/components/withings/icons.json b/homeassistant/components/withings/icons.json index 79ff7489bf8..f6fb5e74136 100644 --- a/homeassistant/components/withings/icons.json +++ b/homeassistant/components/withings/icons.json @@ -136,14 +136,6 @@ }, "workout_duration": { "default": "mdi:timer" - }, - "battery": { - "default": "mdi:battery-off", - "state": { - "low": "mdi:battery-20", - "medium": "mdi:battery-50", - "high": "mdi:battery" - } } } } diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index f9e8328ae53..4c97f43fd80 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -9,5 +9,5 @@ "iot_class": "cloud_push", "loggers": ["aiowithings"], "quality_scale": "platinum", - "requirements": ["aiowithings==3.1.3"] + "requirements": ["aiowithings==3.0.1"] } diff --git a/homeassistant/components/withings/sensor.py b/homeassistant/components/withings/sensor.py index 1005b5995a5..20fd72845ae 100644 --- a/homeassistant/components/withings/sensor.py +++ b/homeassistant/components/withings/sensor.py @@ -9,7 +9,6 @@ from typing import Any from aiowithings import ( Activity, - Device, Goals, MeasurementPosition, MeasurementType, @@ -24,7 +23,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( PERCENTAGE, Platform, @@ -35,8 +33,8 @@ from homeassistant.const import ( UnitOfTime, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +import homeassistant.helpers.entity_registry as er from homeassistant.helpers.typing import StateType from homeassistant.util import dt as dt_util @@ -53,13 +51,12 @@ from .const import ( from .coordinator import ( WithingsActivityDataUpdateCoordinator, WithingsDataUpdateCoordinator, - WithingsDeviceDataUpdateCoordinator, WithingsGoalsDataUpdateCoordinator, WithingsMeasurementDataUpdateCoordinator, WithingsSleepDataUpdateCoordinator, WithingsWorkoutDataUpdateCoordinator, ) -from .entity import WithingsDeviceEntity, WithingsEntity +from .entity import WithingsEntity @dataclass(frozen=True, kw_only=True) @@ -653,24 +650,6 @@ WORKOUT_SENSORS = [ ] -@dataclass(frozen=True, kw_only=True) -class WithingsDeviceSensorEntityDescription(SensorEntityDescription): - """Immutable class for describing withings data.""" - - value_fn: Callable[[Device], StateType] - - -DEVICE_SENSORS = [ - WithingsDeviceSensorEntityDescription( - key="battery", - translation_key="battery", - options=["low", "medium", "high"], - device_class=SensorDeviceClass.ENUM, - value_fn=lambda device: device.battery, - ) -] - - def get_current_goals(goals: Goals) -> set[str]: """Return a list of present goals.""" result = set() @@ -821,52 +800,9 @@ async def async_setup_entry( _async_add_workout_entities ) - device_coordinator = withings_data.device_coordinator - - current_devices: set[str] = set() - - def _async_device_listener() -> None: - """Add device entities.""" - received_devices = set(device_coordinator.data) - new_devices = received_devices - current_devices - old_devices = current_devices - received_devices - if new_devices: - device_registry = dr.async_get(hass) - for device_id in new_devices: - if device := device_registry.async_get_device({(DOMAIN, device_id)}): - if any( - ( - config_entry := hass.config_entries.async_get_entry( - config_entry_id - ) - ) - and config_entry.state == ConfigEntryState.LOADED - for config_entry_id in device.config_entries - ): - continue - async_add_entities( - WithingsDeviceSensor(device_coordinator, description, device_id) - for description in DEVICE_SENSORS - ) - current_devices.add(device_id) - - if old_devices: - device_registry = dr.async_get(hass) - for device_id in old_devices: - if device := device_registry.async_get_device({(DOMAIN, device_id)}): - device_registry.async_update_device( - device.id, remove_config_entry_id=entry.entry_id - ) - current_devices.remove(device_id) - - device_coordinator.async_add_listener(_async_device_listener) - - _async_device_listener() - if not entities: LOGGER.warning( - "No data found for Withings entry %s, sensors will be added when new data is available", - entry.title, + "No data found for Withings entry %s, sensors will be added when new data is available" ) async_add_entities(entities) @@ -987,24 +923,3 @@ class WithingsWorkoutSensor( if not self.coordinator.data: return None return self.entity_description.value_fn(self.coordinator.data) - - -class WithingsDeviceSensor(WithingsDeviceEntity, SensorEntity): - """Implementation of a Withings workout sensor.""" - - entity_description: WithingsDeviceSensorEntityDescription - - def __init__( - self, - coordinator: WithingsDeviceDataUpdateCoordinator, - entity_description: WithingsDeviceSensorEntityDescription, - device_id: str, - ) -> None: - """Initialize sensor.""" - super().__init__(coordinator, device_id, entity_description.key) - self.entity_description = entity_description - - @property - def native_value(self) -> StateType: - """Return the state of the entity.""" - return self.entity_description.value_fn(self.device) diff --git a/homeassistant/components/withings/strings.json b/homeassistant/components/withings/strings.json index 775ef5cdaab..fb86b16c3be 100644 --- a/homeassistant/components/withings/strings.json +++ b/homeassistant/components/withings/strings.json @@ -20,9 +20,7 @@ "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "wrong_account": "Authenticated account does not match the account to be reauthenticated. Please log in with the correct account." + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" }, "create_entry": { "default": "Successfully authenticated with Withings." @@ -309,14 +307,6 @@ }, "workout_duration": { "name": "Last workout duration" - }, - "battery": { - "name": "[%key:component::sensor::entity_component::battery::name%]", - "state": { - "low": "Low", - "medium": "Medium", - "high": "High" - } } } } diff --git a/homeassistant/components/wiz/__init__.py b/homeassistant/components/wiz/__init__.py index 0e986aaefa2..79c317f178b 100644 --- a/homeassistant/components/wiz/__init__.py +++ b/homeassistant/components/wiz/__init__.py @@ -31,8 +31,6 @@ from .const import ( from .discovery import async_discover_devices, async_trigger_discovery from .models import WizData -type WizConfigEntry = ConfigEntry[WizData] - _LOGGER = logging.getLogger(__name__) PLATFORMS = [ @@ -103,7 +101,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass=hass, logger=_LOGGER, - config_entry=entry, name=entry.title, update_interval=timedelta(seconds=15), update_method=_async_update, @@ -138,7 +135,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await bulb.start_push(_async_push_update) bulb.set_discovery_callback(lambda bulb: async_trigger_discovery(hass, [bulb])) - entry.runtime_data = WizData(coordinator=coordinator, bulb=bulb, scenes=scenes) + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = WizData( + coordinator=coordinator, bulb=bulb, scenes=scenes + ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) @@ -148,5 +147,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - await entry.runtime_data.bulb.async_close() + data: WizData = hass.data[DOMAIN].pop(entry.entry_id) + await data.bulb.async_close() return unload_ok diff --git a/homeassistant/components/wiz/binary_sensor.py b/homeassistant/components/wiz/binary_sensor.py index 3411ee200b9..b58e120a9dd 100644 --- a/homeassistant/components/wiz/binary_sensor.py +++ b/homeassistant/components/wiz/binary_sensor.py @@ -10,13 +10,13 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WizConfigEntry from .const import DOMAIN, SIGNAL_WIZ_PIR from .entity import WizEntity from .models import WizData @@ -26,16 +26,17 @@ OCCUPANCY_UNIQUE_ID = "{}_occupancy" async def async_setup_entry( hass: HomeAssistant, - entry: WizConfigEntry, + entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the WiZ binary sensor platform.""" - mac = entry.runtime_data.bulb.mac + wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] + mac = wiz_data.bulb.mac if er.async_get(hass).async_get_entity_id( Platform.BINARY_SENSOR, DOMAIN, OCCUPANCY_UNIQUE_ID.format(mac) ): - async_add_entities([WizOccupancyEntity(entry.runtime_data, entry.title)]) + async_add_entities([WizOccupancyEntity(wiz_data, entry.title)]) return cancel_dispatcher: Callable[[], None] | None = None @@ -46,7 +47,7 @@ async def async_setup_entry( assert cancel_dispatcher is not None cancel_dispatcher() cancel_dispatcher = None - async_add_entities([WizOccupancyEntity(entry.runtime_data, entry.title)]) + async_add_entities([WizOccupancyEntity(wiz_data, entry.title)]) cancel_dispatcher = async_dispatcher_connect( hass, SIGNAL_WIZ_PIR.format(mac), _async_add_occupancy_sensor diff --git a/homeassistant/components/wiz/diagnostics.py b/homeassistant/components/wiz/diagnostics.py index c58751c7fc0..5f617ebafe9 100644 --- a/homeassistant/components/wiz/diagnostics.py +++ b/homeassistant/components/wiz/diagnostics.py @@ -5,21 +5,24 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import WizConfigEntry +from .const import DOMAIN +from .models import WizData TO_REDACT = {"roomId", "homeId"} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: WizConfigEntry + hass: HomeAssistant, entry: ConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" + wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] return { "entry": { "title": entry.title, "data": dict(entry.data), }, - "data": async_redact_data(entry.runtime_data.bulb.diagnostics, TO_REDACT), + "data": async_redact_data(wiz_data.bulb.diagnostics, TO_REDACT), } diff --git a/homeassistant/components/wiz/light.py b/homeassistant/components/wiz/light.py index a3f36d580d2..aece184720d 100644 --- a/homeassistant/components/wiz/light.py +++ b/homeassistant/components/wiz/light.py @@ -19,6 +19,7 @@ from homeassistant.components.light import ( LightEntityFeature, filter_supported_color_modes, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.color import ( @@ -26,7 +27,7 @@ from homeassistant.util.color import ( color_temperature_mired_to_kelvin, ) -from . import WizConfigEntry +from .const import DOMAIN from .entity import WizToggleEntity from .models import WizData @@ -60,12 +61,13 @@ def _async_pilot_builder(**kwargs: Any) -> PilotBuilder: async def async_setup_entry( hass: HomeAssistant, - entry: WizConfigEntry, + entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the WiZ Platform from config_flow.""" - if entry.runtime_data.bulb.bulbtype.bulb_type != BulbClass.SOCKET: - async_add_entities([WizBulbEntity(entry.runtime_data, entry.title)]) + wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] + if wiz_data.bulb.bulbtype.bulb_type != BulbClass.SOCKET: + async_add_entities([WizBulbEntity(wiz_data, entry.title)]) class WizBulbEntity(WizToggleEntity, LightEntity): diff --git a/homeassistant/components/wiz/number.py b/homeassistant/components/wiz/number.py index 0591e854d7d..46708ac001e 100644 --- a/homeassistant/components/wiz/number.py +++ b/homeassistant/components/wiz/number.py @@ -13,11 +13,12 @@ from homeassistant.components.number import ( NumberEntityDescription, NumberMode, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WizConfigEntry +from .const import DOMAIN from .entity import WizEntity from .models import WizData @@ -67,16 +68,15 @@ NUMBERS: tuple[WizNumberEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: WizConfigEntry, + entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the wiz speed number.""" + wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] async_add_entities( - WizSpeedNumber(entry.runtime_data, entry.title, description) + WizSpeedNumber(wiz_data, entry.title, description) for description in NUMBERS - if getattr( - entry.runtime_data.bulb.bulbtype.features, description.required_feature - ) + if getattr(wiz_data.bulb.bulbtype.features, description.required_feature) ) diff --git a/homeassistant/components/wiz/sensor.py b/homeassistant/components/wiz/sensor.py index eb77686a5cf..aae443e60d0 100644 --- a/homeassistant/components/wiz/sensor.py +++ b/homeassistant/components/wiz/sensor.py @@ -8,6 +8,7 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -16,7 +17,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WizConfigEntry +from .const import DOMAIN from .entity import WizEntity from .models import WizData @@ -44,18 +45,18 @@ POWER_SENSORS: tuple[SensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: WizConfigEntry, + entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the wiz sensor.""" + wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] entities = [ - WizSensor(entry.runtime_data, entry.title, description) - for description in SENSORS + WizSensor(wiz_data, entry.title, description) for description in SENSORS ] - if entry.runtime_data.coordinator.data is not None: + if wiz_data.coordinator.data is not None: entities.extend( [ - WizPowerSensor(entry.runtime_data, entry.title, description) + WizPowerSensor(wiz_data, entry.title, description) for description in POWER_SENSORS ] ) diff --git a/homeassistant/components/wiz/switch.py b/homeassistant/components/wiz/switch.py index 4c089d2d6d2..d94bf12da9f 100644 --- a/homeassistant/components/wiz/switch.py +++ b/homeassistant/components/wiz/switch.py @@ -8,22 +8,24 @@ from pywizlight import PilotBuilder from pywizlight.bulblibrary import BulbClass from homeassistant.components.switch import SwitchEntity +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WizConfigEntry +from .const import DOMAIN from .entity import WizToggleEntity from .models import WizData async def async_setup_entry( hass: HomeAssistant, - entry: WizConfigEntry, + entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the WiZ switch platform.""" - if entry.runtime_data.bulb.bulbtype.bulb_type == BulbClass.SOCKET: - async_add_entities([WizSocketEntity(entry.runtime_data, entry.title)]) + wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] + if wiz_data.bulb.bulbtype.bulb_type == BulbClass.SOCKET: + async_add_entities([WizSocketEntity(wiz_data, entry.title)]) class WizSocketEntity(WizToggleEntity, SwitchEntity): diff --git a/homeassistant/components/wled/__init__.py b/homeassistant/components/wled/__init__.py index b4834347694..ba87fb58122 100644 --- a/homeassistant/components/wled/__init__.py +++ b/homeassistant/components/wled/__init__.py @@ -5,12 +5,9 @@ from __future__ import annotations from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.typing import ConfigType -from homeassistant.util.hass_dict import HassKey -from .const import DOMAIN -from .coordinator import WLEDDataUpdateCoordinator, WLEDReleasesDataUpdateCoordinator +from .const import LOGGER +from .coordinator import WLEDDataUpdateCoordinator PLATFORMS = ( Platform.BUTTON, @@ -24,26 +21,23 @@ PLATFORMS = ( type WLEDConfigEntry = ConfigEntry[WLEDDataUpdateCoordinator] -WLED_KEY: HassKey[WLEDReleasesDataUpdateCoordinator] = HassKey(DOMAIN) -CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the WLED integration. - - We set up a single coordinator for fetching WLED releases, which - is used across all WLED devices (and config entries) to avoid - fetching the same data multiple times for each. - """ - hass.data[WLED_KEY] = WLEDReleasesDataUpdateCoordinator(hass) - await hass.data[WLED_KEY].async_request_refresh() - return True - async def async_setup_entry(hass: HomeAssistant, entry: WLEDConfigEntry) -> bool: """Set up WLED from a config entry.""" - entry.runtime_data = WLEDDataUpdateCoordinator(hass, entry=entry) - await entry.runtime_data.async_config_entry_first_refresh() + coordinator = WLEDDataUpdateCoordinator(hass, entry=entry) + await coordinator.async_config_entry_first_refresh() + + if coordinator.data.info.leds.cct: + LOGGER.error( + ( + "WLED device '%s' has a CCT channel, which is not supported by " + "this integration" + ), + entry.title, + ) + return False + + entry.runtime_data = coordinator # Set up all platforms for this device/entry. await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/wled/config_flow.py b/homeassistant/components/wled/config_flow.py index 812a0500d1a..c40753b686a 100644 --- a/homeassistant/components/wled/config_flow.py +++ b/homeassistant/components/wled/config_flow.py @@ -12,7 +12,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlow, + OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.core import callback @@ -30,11 +30,9 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow( - config_entry: ConfigEntry, - ) -> WLEDOptionsFlowHandler: + def async_get_options_flow(config_entry: ConfigEntry) -> WLEDOptionsFlowHandler: """Get the options flow for this handler.""" - return WLEDOptionsFlowHandler() + return WLEDOptionsFlowHandler(config_entry) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -48,9 +46,9 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): except WLEDConnectionError: errors["base"] = "cannot_connect" else: - await self.async_set_unique_id( - device.info.mac_address, raise_on_progress=False - ) + if device.info.leds.cct: + return self.async_abort(reason="cct_unsupported") + await self.async_set_unique_id(device.info.mac_address) self._abort_if_unique_id_configured( updates={CONF_HOST: user_input[CONF_HOST]} ) @@ -60,6 +58,8 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): CONF_HOST: user_input[CONF_HOST], }, ) + else: + user_input = {} return self.async_show_form( step_id="user", @@ -84,6 +84,9 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): except WLEDConnectionError: return self.async_abort(reason="cannot_connect") + if self.discovered_device.info.leds.cct: + return self.async_abort(reason="cct_unsupported") + await self.async_set_unique_id(self.discovered_device.info.mac_address) self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host}) @@ -119,7 +122,7 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): return await wled.update() -class WLEDOptionsFlowHandler(OptionsFlow): +class WLEDOptionsFlowHandler(OptionsFlowWithConfigEntry): """Handle WLED options.""" async def async_step_init( @@ -135,7 +138,7 @@ class WLEDOptionsFlowHandler(OptionsFlow): { vol.Optional( CONF_KEEP_MAIN_LIGHT, - default=self.config_entry.options.get( + default=self.options.get( CONF_KEEP_MAIN_LIGHT, DEFAULT_KEEP_MAIN_LIGHT ), ): bool, diff --git a/homeassistant/components/wled/const.py b/homeassistant/components/wled/const.py index 69ff6ccb1fa..f698347537c 100644 --- a/homeassistant/components/wled/const.py +++ b/homeassistant/components/wled/const.py @@ -3,23 +3,17 @@ from datetime import timedelta import logging -from wled import LightCapability - -from homeassistant.components.light import ColorMode - # Integration domain DOMAIN = "wled" LOGGER = logging.getLogger(__package__) SCAN_INTERVAL = timedelta(seconds=10) -RELEASES_SCAN_INTERVAL = timedelta(hours=3) # Options CONF_KEEP_MAIN_LIGHT = "keep_master_light" DEFAULT_KEEP_MAIN_LIGHT = False # Attributes -ATTR_CCT = "cct" ATTR_COLOR_PRIMARY = "color_primary" ATTR_DURATION = "duration" ATTR_FADE = "fade" @@ -30,76 +24,3 @@ ATTR_SOFTWARE_VERSION = "sw_version" ATTR_SPEED = "speed" ATTR_TARGET_BRIGHTNESS = "target_brightness" ATTR_UDP_PORT = "udp_port" - -# Static values -COLOR_TEMP_K_MIN = 2000 -COLOR_TEMP_K_MAX = 6535 - - -LIGHT_CAPABILITIES_COLOR_MODE_MAPPING: dict[LightCapability, list[ColorMode]] = { - LightCapability.NONE: [ - ColorMode.ONOFF, - ], - LightCapability.RGB_COLOR: [ - ColorMode.RGB, - ], - LightCapability.WHITE_CHANNEL: [ - ColorMode.BRIGHTNESS, - ], - LightCapability.RGB_COLOR | LightCapability.WHITE_CHANNEL: [ - ColorMode.RGBW, - ], - LightCapability.COLOR_TEMPERATURE: [ - ColorMode.COLOR_TEMP, - ], - LightCapability.RGB_COLOR | LightCapability.COLOR_TEMPERATURE: [ - ColorMode.RGBWW, - ], - LightCapability.WHITE_CHANNEL | LightCapability.COLOR_TEMPERATURE: [ - ColorMode.COLOR_TEMP, - ], - LightCapability.RGB_COLOR - | LightCapability.WHITE_CHANNEL - | LightCapability.COLOR_TEMPERATURE: [ - ColorMode.COLOR_TEMP, - ColorMode.RGBW, - ], - LightCapability.MANUAL_WHITE: [ - ColorMode.BRIGHTNESS, - ], - LightCapability.RGB_COLOR | LightCapability.MANUAL_WHITE: [ - ColorMode.RGBW, - ], - LightCapability.WHITE_CHANNEL | LightCapability.MANUAL_WHITE: [ - ColorMode.BRIGHTNESS, - ], - LightCapability.RGB_COLOR - | LightCapability.WHITE_CHANNEL - | LightCapability.MANUAL_WHITE: [ - ColorMode.RGBW, - ColorMode.WHITE, - ], - LightCapability.COLOR_TEMPERATURE | LightCapability.MANUAL_WHITE: [ - ColorMode.COLOR_TEMP, - ColorMode.WHITE, - ], - LightCapability.RGB_COLOR - | LightCapability.COLOR_TEMPERATURE - | LightCapability.MANUAL_WHITE: [ - ColorMode.RGBW, - ColorMode.COLOR_TEMP, - ], - LightCapability.WHITE_CHANNEL - | LightCapability.COLOR_TEMPERATURE - | LightCapability.MANUAL_WHITE: [ - ColorMode.COLOR_TEMP, - ColorMode.WHITE, - ], - LightCapability.RGB_COLOR - | LightCapability.WHITE_CHANNEL - | LightCapability.COLOR_TEMPERATURE - | LightCapability.MANUAL_WHITE: [ - ColorMode.RGBW, - ColorMode.COLOR_TEMP, - ], -} diff --git a/homeassistant/components/wled/coordinator.py b/homeassistant/components/wled/coordinator.py index 8e2855e9f05..f6219c63cb8 100644 --- a/homeassistant/components/wled/coordinator.py +++ b/homeassistant/components/wled/coordinator.py @@ -2,14 +2,7 @@ from __future__ import annotations -from wled import ( - WLED, - Device as WLEDDevice, - Releases, - WLEDConnectionClosedError, - WLEDError, - WLEDReleases, -) +from wled import WLED, Device as WLEDDevice, WLEDConnectionClosedError, WLEDError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP @@ -22,7 +15,6 @@ from .const import ( DEFAULT_KEEP_MAIN_LIGHT, DOMAIN, LOGGER, - RELEASES_SCAN_INTERVAL, SCAN_INTERVAL, ) @@ -49,7 +41,6 @@ class WLEDDataUpdateCoordinator(DataUpdateCoordinator[WLEDDevice]): super().__init__( hass, LOGGER, - config_entry=entry, name=DOMAIN, update_interval=SCAN_INTERVAL, ) @@ -110,38 +101,17 @@ class WLEDDataUpdateCoordinator(DataUpdateCoordinator[WLEDDevice]): async def _async_update_data(self) -> WLEDDevice: """Fetch data from WLED.""" try: - device = await self.wled.update() + device = await self.wled.update(full_update=not self.last_update_success) except WLEDError as error: raise UpdateFailed(f"Invalid response from API: {error}") from error # If the device supports a WebSocket, try activating it. if ( device.info.websocket is not None + and device.info.leds.cct is not True and not self.wled.connected and not self.unsub ): self._use_websocket() return device - - -class WLEDReleasesDataUpdateCoordinator(DataUpdateCoordinator[Releases]): - """Class to manage fetching WLED releases.""" - - def __init__(self, hass: HomeAssistant) -> None: - """Initialize global WLED releases updater.""" - self.wled = WLEDReleases(session=async_get_clientsession(hass)) - super().__init__( - hass, - LOGGER, - config_entry=None, - name=DOMAIN, - update_interval=RELEASES_SCAN_INTERVAL, - ) - - async def _async_update_data(self) -> Releases: - """Fetch release data from WLED.""" - try: - return await self.wled.releases() - except WLEDError as error: - raise UpdateFailed(f"Invalid response from GitHub API: {error}") from error diff --git a/homeassistant/components/wled/diagnostics.py b/homeassistant/components/wled/diagnostics.py index 732cd3602a0..e81760e0f72 100644 --- a/homeassistant/components/wled/diagnostics.py +++ b/homeassistant/components/wled/diagnostics.py @@ -17,23 +17,31 @@ async def async_get_config_entry_diagnostics( coordinator = entry.runtime_data data: dict[str, Any] = { - "info": async_redact_data(coordinator.data.info.to_dict(), "wifi"), - "state": coordinator.data.state.to_dict(), + "info": async_redact_data(coordinator.data.info.__dict__, "wifi"), + "state": coordinator.data.state.__dict__, "effects": { - effect.effect_id: effect.name - for effect in coordinator.data.effects.values() + effect.effect_id: effect.name for effect in coordinator.data.effects }, "palettes": { - palette.palette_id: palette.name - for palette in coordinator.data.palettes.values() + palette.palette_id: palette.name for palette in coordinator.data.palettes }, "playlists": { - playlist.playlist_id: playlist.name - for playlist in coordinator.data.playlists.values() + playlist.playlist_id: { + "name": playlist.name, + "repeat": playlist.repeat, + "shuffle": playlist.shuffle, + "end": playlist.end.preset_id if playlist.end else None, + } + for playlist in coordinator.data.playlists }, "presets": { - preset.preset_id: preset.name - for preset in coordinator.data.presets.values() + preset.preset_id: { + "name": preset.name, + "quick_label": preset.quick_label, + "on": preset.on, + "transition": preset.transition, + } + for preset in coordinator.data.presets }, } return data diff --git a/homeassistant/components/wled/helpers.py b/homeassistant/components/wled/helpers.py index 216dba67c94..0dd29fdc2a3 100644 --- a/homeassistant/components/wled/helpers.py +++ b/homeassistant/components/wled/helpers.py @@ -35,13 +35,3 @@ def wled_exception_handler[_WLEDEntityT: WLEDEntity, **_P]( raise HomeAssistantError("Invalid response from WLED API") from error return handler - - -def kelvin_to_255(k: int, min_k: int, max_k: int) -> int: - """Map color temperature in K from minK-maxK to 0-255.""" - return int((k - min_k) / (max_k - min_k) * 255) - - -def kelvin_to_255_reverse(v: int, min_k: int, max_k: int) -> int: - """Map color temperature from 0-255 to minK-maxK K.""" - return int(v / 255 * (max_k - min_k) + min_k) diff --git a/homeassistant/components/wled/light.py b/homeassistant/components/wled/light.py index b4edf10dc58..36ebd024de3 100644 --- a/homeassistant/components/wled/light.py +++ b/homeassistant/components/wled/light.py @@ -7,7 +7,6 @@ from typing import Any, cast from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -20,18 +19,10 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import WLEDConfigEntry -from .const import ( - ATTR_CCT, - ATTR_COLOR_PRIMARY, - ATTR_ON, - ATTR_SEGMENT_ID, - COLOR_TEMP_K_MAX, - COLOR_TEMP_K_MIN, - LIGHT_CAPABILITIES_COLOR_MODE_MAPPING, -) +from .const import ATTR_COLOR_PRIMARY, ATTR_ON, ATTR_SEGMENT_ID from .coordinator import WLEDDataUpdateCoordinator from .entity import WLEDEntity -from .helpers import kelvin_to_255, kelvin_to_255_reverse, wled_exception_handler +from .helpers import wled_exception_handler PARALLEL_UPDATES = 1 @@ -113,8 +104,6 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): _attr_supported_features = LightEntityFeature.EFFECT | LightEntityFeature.TRANSITION _attr_translation_key = "segment" - _attr_min_color_temp_kelvin = COLOR_TEMP_K_MIN - _attr_max_color_temp_kelvin = COLOR_TEMP_K_MAX def __init__( self, @@ -123,6 +112,8 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): ) -> None: """Initialize WLED segment light.""" super().__init__(coordinator=coordinator) + self._rgbw = coordinator.data.info.leds.rgbw + self._wv = coordinator.data.info.leds.wv self._segment = segment # Segment 0 uses a simpler name, which is more natural for when using @@ -136,24 +127,18 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): f"{self.coordinator.data.info.mac_address}_{self._segment}" ) - if ( - coordinator.data.info.leds.segment_light_capabilities is not None - and ( - color_modes := LIGHT_CAPABILITIES_COLOR_MODE_MAPPING.get( - coordinator.data.info.leds.segment_light_capabilities[segment] - ) - ) - is not None - ): - self._attr_color_mode = color_modes[0] - self._attr_supported_color_modes = set(color_modes) + self._attr_color_mode = ColorMode.RGB + self._attr_supported_color_modes = {ColorMode.RGB} + if self._rgbw and self._wv: + self._attr_color_mode = ColorMode.RGBW + self._attr_supported_color_modes = {ColorMode.RGBW} @property def available(self) -> bool: """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except KeyError: + except IndexError: return False return super().available @@ -161,29 +146,20 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): @property def rgb_color(self) -> tuple[int, int, int] | None: """Return the color value.""" - if not (color := self.coordinator.data.state.segments[self._segment].color): - return None - return color.primary[:3] + return self.coordinator.data.state.segments[self._segment].color_primary[:3] @property def rgbw_color(self) -> tuple[int, int, int, int] | None: """Return the color value.""" - if not (color := self.coordinator.data.state.segments[self._segment].color): - return None - return cast(tuple[int, int, int, int], color.primary) - - @property - def color_temp_kelvin(self) -> int | None: - """Return the CT color value in K.""" - cct = self.coordinator.data.state.segments[self._segment].cct - return kelvin_to_255_reverse(cct, COLOR_TEMP_K_MIN, COLOR_TEMP_K_MAX) + return cast( + tuple[int, int, int, int], + self.coordinator.data.state.segments[self._segment].color_primary, + ) @property def effect(self) -> str | None: """Return the current effect of the light.""" - return self.coordinator.data.effects[ - int(self.coordinator.data.state.segments[self._segment].effect_id) - ].name + return self.coordinator.data.state.segments[self._segment].effect.name @property def brightness(self) -> int | None: @@ -202,7 +178,7 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): @property def effect_list(self) -> list[str]: """Return the list of supported effects.""" - return [effect.name for effect in self.coordinator.data.effects.values()] + return [effect.name for effect in self.coordinator.data.effects] @property def is_on(self) -> bool: @@ -247,11 +223,6 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): if ATTR_RGBW_COLOR in kwargs: data[ATTR_COLOR_PRIMARY] = kwargs[ATTR_RGBW_COLOR] - if ATTR_COLOR_TEMP_KELVIN in kwargs: - data[ATTR_CCT] = kelvin_to_255( - kwargs[ATTR_COLOR_TEMP_KELVIN], COLOR_TEMP_K_MIN, COLOR_TEMP_K_MAX - ) - if ATTR_TRANSITION in kwargs: # WLED uses 100ms per unit, so 10 = 1 second. data[ATTR_TRANSITION] = round(kwargs[ATTR_TRANSITION] * 10) @@ -287,11 +258,7 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = { - light.segment_id - for light in coordinator.data.state.segments.values() - if light.segment_id is not None - } + segment_ids = {light.segment_id for light in coordinator.data.state.segments} new_entities: list[WLEDMainLight | WLEDSegmentLight] = [] # More than 1 segment now? No main? Add main controls diff --git a/homeassistant/components/wled/manifest.json b/homeassistant/components/wled/manifest.json index 71939127356..a01bbcabdd6 100644 --- a/homeassistant/components/wled/manifest.json +++ b/homeassistant/components/wled/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "quality_scale": "platinum", - "requirements": ["wled==0.20.2"], + "requirements": ["wled==0.18.0"], "zeroconf": ["_wled._tcp.local."] } diff --git a/homeassistant/components/wled/number.py b/homeassistant/components/wled/number.py index 225d783bfdb..5af466360bb 100644 --- a/homeassistant/components/wled/number.py +++ b/homeassistant/components/wled/number.py @@ -44,7 +44,7 @@ async def async_setup_entry( class WLEDNumberEntityDescription(NumberEntityDescription): """Class describing WLED number entities.""" - value_fn: Callable[[Segment], int | None] + value_fn: Callable[[Segment], float | None] NUMBERS = [ @@ -64,7 +64,7 @@ NUMBERS = [ native_step=1, native_min_value=0, native_max_value=255, - value_fn=lambda segment: int(segment.intensity), + value_fn=lambda segment: segment.intensity, ), ] @@ -100,7 +100,7 @@ class WLEDNumber(WLEDEntity, NumberEntity): """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except KeyError: + except IndexError: return False return super().available @@ -133,11 +133,7 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = { - segment.segment_id - for segment in coordinator.data.state.segments.values() - if segment.segment_id is not None - } + segment_ids = {segment.segment_id for segment in coordinator.data.state.segments} new_entities: list[WLEDNumber] = [] diff --git a/homeassistant/components/wled/select.py b/homeassistant/components/wled/select.py index a645b04573c..20b14531ac7 100644 --- a/homeassistant/components/wled/select.py +++ b/homeassistant/components/wled/select.py @@ -4,7 +4,7 @@ from __future__ import annotations from functools import partial -from wled import LiveDataOverride +from wled import Live, Playlist, Preset from homeassistant.components.select import SelectEntity from homeassistant.const import EntityCategory @@ -56,17 +56,17 @@ class WLEDLiveOverrideSelect(WLEDEntity, SelectEntity): super().__init__(coordinator=coordinator) self._attr_unique_id = f"{coordinator.data.info.mac_address}_live_override" - self._attr_options = [str(live.value) for live in LiveDataOverride] + self._attr_options = [str(live.value) for live in Live] @property def current_option(self) -> str: """Return the current selected live override.""" - return str(self.coordinator.data.state.live_data_override.value) + return str(self.coordinator.data.state.lor.value) @wled_exception_handler async def async_select_option(self, option: str) -> None: """Set WLED state to the selected live override state.""" - await self.coordinator.wled.live(live=LiveDataOverride(int(option))) + await self.coordinator.wled.live(live=Live(int(option))) class WLEDPresetSelect(WLEDEntity, SelectEntity): @@ -79,9 +79,7 @@ class WLEDPresetSelect(WLEDEntity, SelectEntity): super().__init__(coordinator=coordinator) self._attr_unique_id = f"{coordinator.data.info.mac_address}_preset" - self._attr_options = [ - preset.name for preset in self.coordinator.data.presets.values() - ] + self._attr_options = [preset.name for preset in self.coordinator.data.presets] @property def available(self) -> bool: @@ -91,13 +89,9 @@ class WLEDPresetSelect(WLEDEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the current selected preset.""" - if not self.coordinator.data.state.preset_id: + if not isinstance(self.coordinator.data.state.preset, Preset): return None - if preset := self.coordinator.data.presets.get( - self.coordinator.data.state.preset_id - ): - return preset.name - return None + return self.coordinator.data.state.preset.name @wled_exception_handler async def async_select_option(self, option: str) -> None: @@ -116,7 +110,7 @@ class WLEDPlaylistSelect(WLEDEntity, SelectEntity): self._attr_unique_id = f"{coordinator.data.info.mac_address}_playlist" self._attr_options = [ - playlist.name for playlist in self.coordinator.data.playlists.values() + playlist.name for playlist in self.coordinator.data.playlists ] @property @@ -127,13 +121,9 @@ class WLEDPlaylistSelect(WLEDEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the currently selected playlist.""" - if not self.coordinator.data.state.playlist_id: + if not isinstance(self.coordinator.data.state.playlist, Playlist): return None - if playlist := self.coordinator.data.playlists.get( - self.coordinator.data.state.playlist_id - ): - return playlist.name - return None + return self.coordinator.data.state.playlist.name @wled_exception_handler async def async_select_option(self, option: str) -> None: @@ -160,7 +150,7 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): self._attr_unique_id = f"{coordinator.data.info.mac_address}_palette_{segment}" self._attr_options = [ - palette.name for palette in self.coordinator.data.palettes.values() + palette.name for palette in self.coordinator.data.palettes ] self._segment = segment @@ -169,7 +159,7 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except KeyError: + except IndexError: return False return super().available @@ -177,9 +167,7 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the current selected color palette.""" - return self.coordinator.data.palettes[ - int(self.coordinator.data.state.segments[self._segment].palette_id) - ].name + return self.coordinator.data.state.segments[self._segment].palette.name @wled_exception_handler async def async_select_option(self, option: str) -> None: @@ -194,11 +182,7 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = { - segment.segment_id - for segment in coordinator.data.state.segments.values() - if segment.segment_id is not None - } + segment_ids = {segment.segment_id for segment in coordinator.data.state.segments} new_entities: list[WLEDPaletteSelect] = [] diff --git a/homeassistant/components/wled/sensor.py b/homeassistant/components/wled/sensor.py index 4f97c367612..7d18665a085 100644 --- a/homeassistant/components/wled/sensor.py +++ b/homeassistant/components/wled/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from datetime import datetime +from datetime import datetime, timedelta from wled import Device as WLEDDevice @@ -71,7 +71,7 @@ SENSORS: tuple[WLEDSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, - value_fn=lambda device: (utcnow() - device.info.uptime), + value_fn=lambda device: (utcnow() - timedelta(seconds=device.info.uptime)), ), WLEDSensorEntityDescription( key="free_heap", diff --git a/homeassistant/components/wled/strings.json b/homeassistant/components/wled/strings.json index 50dc0129369..9581641f545 100644 --- a/homeassistant/components/wled/strings.json +++ b/homeassistant/components/wled/strings.json @@ -21,7 +21,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "cct_unsupported": "This WLED device uses CCT channels, which is not supported by this integration" } }, "options": { diff --git a/homeassistant/components/wled/switch.py b/homeassistant/components/wled/switch.py index 643834dcdec..7ec75b956c0 100644 --- a/homeassistant/components/wled/switch.py +++ b/homeassistant/components/wled/switch.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import WLEDConfigEntry -from .const import ATTR_DURATION, ATTR_TARGET_BRIGHTNESS, ATTR_UDP_PORT +from .const import ATTR_DURATION, ATTR_FADE, ATTR_TARGET_BRIGHTNESS, ATTR_UDP_PORT from .coordinator import WLEDDataUpdateCoordinator from .entity import WLEDEntity from .helpers import wled_exception_handler @@ -62,6 +62,7 @@ class WLEDNightlightSwitch(WLEDEntity, SwitchEntity): state = self.coordinator.data.state return { ATTR_DURATION: state.nightlight.duration, + ATTR_FADE: state.nightlight.fade, ATTR_TARGET_BRIGHTNESS: state.nightlight.target_brightness, } @@ -170,7 +171,7 @@ class WLEDReverseSwitch(WLEDEntity, SwitchEntity): """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except KeyError: + except IndexError: return False return super().available @@ -198,11 +199,7 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = { - segment.segment_id - for segment in coordinator.data.state.segments.values() - if segment.segment_id is not None - } + segment_ids = {segment.segment_id for segment in coordinator.data.state.segments} new_entities: list[WLEDReverseSwitch] = [] diff --git a/homeassistant/components/wled/update.py b/homeassistant/components/wled/update.py index 384b394ac50..05df5fcf54f 100644 --- a/homeassistant/components/wled/update.py +++ b/homeassistant/components/wled/update.py @@ -12,8 +12,8 @@ from homeassistant.components.update import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WLED_KEY, WLEDConfigEntry -from .coordinator import WLEDDataUpdateCoordinator, WLEDReleasesDataUpdateCoordinator +from . import WLEDConfigEntry +from .coordinator import WLEDDataUpdateCoordinator from .entity import WLEDEntity from .helpers import wled_exception_handler @@ -24,7 +24,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up WLED update based on a config entry.""" - async_add_entities([WLEDUpdateEntity(entry.runtime_data, hass.data[WLED_KEY])]) + async_add_entities([WLEDUpdateEntity(entry.runtime_data)]) class WLEDUpdateEntity(WLEDEntity, UpdateEntity): @@ -36,33 +36,11 @@ class WLEDUpdateEntity(WLEDEntity, UpdateEntity): ) _attr_title = "WLED" - def __init__( - self, - coordinator: WLEDDataUpdateCoordinator, - releases_coordinator: WLEDReleasesDataUpdateCoordinator, - ) -> None: + def __init__(self, coordinator: WLEDDataUpdateCoordinator) -> None: """Initialize the update entity.""" super().__init__(coordinator=coordinator) - self.releases_coordinator = releases_coordinator self._attr_unique_id = coordinator.data.info.mac_address - async def async_added_to_hass(self) -> None: - """When entity is added to hass. - - Register extra update listener for the releases coordinator. - """ - await super().async_added_to_hass() - self.async_on_remove( - self.releases_coordinator.async_add_listener( - self._handle_coordinator_update - ) - ) - - @property - def available(self) -> bool: - """Return if entity is available.""" - return super().available and self.releases_coordinator.last_update_success - @property def installed_version(self) -> str | None: """Version currently installed and in use.""" @@ -76,17 +54,17 @@ class WLEDUpdateEntity(WLEDEntity, UpdateEntity): # If we already run a pre-release, we consider being on the beta channel. # Offer beta version upgrade, unless stable is newer if ( - (beta := self.releases_coordinator.data.beta) is not None + (beta := self.coordinator.data.info.version_latest_beta) is not None and (current := self.coordinator.data.info.version) is not None and (current.alpha or current.beta or current.release_candidate) and ( - (stable := self.releases_coordinator.data.stable) is None - or (stable is not None and stable < beta and current > stable) + (stable := self.coordinator.data.info.version_latest_stable) is None + or (stable is not None and stable < beta) ) ): return str(beta) - if (stable := self.releases_coordinator.data.stable) is not None: + if (stable := self.coordinator.data.info.version_latest_stable) is not None: return str(stable) return None diff --git a/homeassistant/components/wmspro/__init__.py b/homeassistant/components/wmspro/__init__.py deleted file mode 100644 index 37bf1495a56..00000000000 --- a/homeassistant/components/wmspro/__init__.py +++ /dev/null @@ -1,66 +0,0 @@ -"""The WMS WebControl pro API integration.""" - -from __future__ import annotations - -import aiohttp -from wmspro.webcontrol import WebControlPro - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.typing import UNDEFINED - -from .const import DOMAIN, MANUFACTURER - -PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT, Platform.SCENE] - -type WebControlProConfigEntry = ConfigEntry[WebControlPro] - - -async def async_setup_entry( - hass: HomeAssistant, entry: WebControlProConfigEntry -) -> bool: - """Set up wmspro from a config entry.""" - host = entry.data[CONF_HOST] - session = async_get_clientsession(hass) - hub = WebControlPro(host, session) - - try: - await hub.ping() - except aiohttp.ClientError as err: - raise ConfigEntryNotReady(f"Error while connecting to {host}") from err - - entry.runtime_data = hub - - device_registry = dr.async_get(hass) - device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections={(dr.CONNECTION_NETWORK_MAC, entry.unique_id)} - if entry.unique_id - else UNDEFINED, - identifiers={(DOMAIN, entry.entry_id)}, - manufacturer=MANUFACTURER, - model="WMS WebControl pro", - configuration_url=f"http://{hub.host}/system", - ) - - try: - await hub.refresh() - for dest in hub.dests.values(): - await dest.refresh() - except aiohttp.ClientError as err: - raise ConfigEntryNotReady(f"Error while refreshing from {host}") from err - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - - return True - - -async def async_unload_entry( - hass: HomeAssistant, entry: WebControlProConfigEntry -) -> bool: - """Unload a config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/wmspro/config_flow.py b/homeassistant/components/wmspro/config_flow.py deleted file mode 100644 index 2ce58ec9eca..00000000000 --- a/homeassistant/components/wmspro/config_flow.py +++ /dev/null @@ -1,111 +0,0 @@ -"""Config flow for WMS WebControl pro API integration.""" - -from __future__ import annotations - -import ipaddress -import logging -from typing import Any - -import aiohttp -import voluptuous as vol -from wmspro.webcontrol import WebControlPro - -from homeassistant.components import dhcp -from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import format_mac - -from .const import DOMAIN, SUGGESTED_HOST - -_LOGGER = logging.getLogger(__name__) - -STEP_USER_DATA_SCHEMA = vol.Schema( - { - vol.Required(CONF_HOST): str, - } -) - - -class WebControlProConfigFlow(ConfigFlow, domain=DOMAIN): - """Handle a config flow for wmspro.""" - - VERSION = 1 - - async def async_step_dhcp( - self, discovery_info: dhcp.DhcpServiceInfo - ) -> ConfigFlowResult: - """Handle the DHCP discovery step.""" - unique_id = format_mac(discovery_info.macaddress) - await self.async_set_unique_id(unique_id) - - entry = self.hass.config_entries.async_entry_for_domain_unique_id( - DOMAIN, unique_id - ) - if entry: - try: # Check if current host is a valid IP address - ipaddress.ip_address(entry.data[CONF_HOST]) - except ValueError: # Do not touch name-based host - return self.async_abort(reason="already_configured") - else: # Update existing host with new IP address - self._abort_if_unique_id_configured( - updates={CONF_HOST: discovery_info.ip} - ) - - for entry in self.hass.config_entries.async_entries(DOMAIN): - if not entry.unique_id and entry.data[CONF_HOST] in ( - discovery_info.hostname, - discovery_info.ip, - ): - self.hass.config_entries.async_update_entry(entry, unique_id=unique_id) - return self.async_abort(reason="already_configured") - - return await self.async_step_user() - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the user-based step.""" - errors: dict[str, str] = {} - if user_input is not None: - self._async_abort_entries_match(user_input) - host = user_input[CONF_HOST] - session = async_get_clientsession(self.hass) - hub = WebControlPro(host, session) - try: - pong = await hub.ping() - except aiohttp.ClientError: - errors["base"] = "cannot_connect" - except Exception: - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" - else: - if not pong: - errors["base"] = "cannot_connect" - else: - await hub.refresh() - rooms = set(hub.rooms.keys()) - for entry in self.hass.config_entries.async_loaded_entries(DOMAIN): - if ( - entry.runtime_data - and entry.runtime_data.rooms - and set(entry.runtime_data.rooms.keys()) == rooms - ): - return self.async_abort(reason="already_configured") - return self.async_create_entry(title=host, data=user_input) - - if self.source == dhcp.DOMAIN: - discovery_info: DhcpServiceInfo = self.init_data - data_values = {CONF_HOST: discovery_info.ip} - else: - data_values = {CONF_HOST: SUGGESTED_HOST} - - self.context["title_placeholders"] = data_values - data_schema = self.add_suggested_values_to_schema( - STEP_USER_DATA_SCHEMA, data_values - ) - - return self.async_show_form( - step_id="user", data_schema=data_schema, errors=errors - ) diff --git a/homeassistant/components/wmspro/const.py b/homeassistant/components/wmspro/const.py deleted file mode 100644 index d92534d9e46..00000000000 --- a/homeassistant/components/wmspro/const.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Constants for the WMS WebControl pro API integration.""" - -DOMAIN = "wmspro" -SUGGESTED_HOST = "webcontrol" - -ATTRIBUTION = "Data provided by WMS WebControl pro API" -MANUFACTURER = "WAREMA Renkhoff SE" - -BRIGHTNESS_SCALE = (1, 100) diff --git a/homeassistant/components/wmspro/cover.py b/homeassistant/components/wmspro/cover.py deleted file mode 100644 index a36b34642b7..00000000000 --- a/homeassistant/components/wmspro/cover.py +++ /dev/null @@ -1,77 +0,0 @@ -"""Support for covers connected with WMS WebControl pro.""" - -from __future__ import annotations - -from datetime import timedelta -from typing import Any - -from wmspro.const import ( - WMS_WebControl_pro_API_actionDescription, - WMS_WebControl_pro_API_actionType, -) - -from homeassistant.components.cover import ATTR_POSITION, CoverDeviceClass, CoverEntity -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import WebControlProConfigEntry -from .entity import WebControlProGenericEntity - -SCAN_INTERVAL = timedelta(seconds=5) -PARALLEL_UPDATES = 1 - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: WebControlProConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the WMS based covers from a config entry.""" - hub = config_entry.runtime_data - - entities: list[WebControlProGenericEntity] = [] - for dest in hub.dests.values(): - if dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive): - entities.append(WebControlProAwning(config_entry.entry_id, dest)) # noqa: PERF401 - - async_add_entities(entities) - - -class WebControlProAwning(WebControlProGenericEntity, CoverEntity): - """Representation of a WMS based awning.""" - - _attr_device_class = CoverDeviceClass.AWNING - - @property - def current_cover_position(self) -> int | None: - """Return current position of cover.""" - action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) - return 100 - action["percentage"] - - async def async_set_cover_position(self, **kwargs: Any) -> None: - """Move the cover to a specific position.""" - action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) - await action(percentage=100 - kwargs[ATTR_POSITION]) - - @property - def is_closed(self) -> bool | None: - """Return if the cover is closed.""" - return self.current_cover_position == 0 - - async def async_open_cover(self, **kwargs: Any) -> None: - """Open the cover.""" - action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) - await action(percentage=0) - - async def async_close_cover(self, **kwargs: Any) -> None: - """Close the cover.""" - action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) - await action(percentage=100) - - async def async_stop_cover(self, **kwargs: Any) -> None: - """Stop the device if in motion.""" - action = self._dest.action( - WMS_WebControl_pro_API_actionDescription.ManualCommand, - WMS_WebControl_pro_API_actionType.Stop, - ) - await action() diff --git a/homeassistant/components/wmspro/diagnostics.py b/homeassistant/components/wmspro/diagnostics.py deleted file mode 100644 index c35cecc5ab5..00000000000 --- a/homeassistant/components/wmspro/diagnostics.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Diagnostics support for WMS WebControl pro API integration.""" - -from __future__ import annotations - -from typing import Any - -from homeassistant.core import HomeAssistant - -from . import WebControlProConfigEntry - - -async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: WebControlProConfigEntry -) -> dict[str, Any]: - """Return diagnostics for a config entry.""" - return entry.runtime_data.diag() diff --git a/homeassistant/components/wmspro/entity.py b/homeassistant/components/wmspro/entity.py deleted file mode 100644 index 0bbbc69a294..00000000000 --- a/homeassistant/components/wmspro/entity.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Generic entity for the WMS WebControl pro API integration.""" - -from __future__ import annotations - -from wmspro.destination import Destination - -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity - -from .const import ATTRIBUTION, DOMAIN, MANUFACTURER - - -class WebControlProGenericEntity(Entity): - """Foundation of all WMS based entities.""" - - _attr_attribution = ATTRIBUTION - _attr_has_entity_name = True - _attr_name = None - - def __init__(self, config_entry_id: str, dest: Destination) -> None: - """Initialize the entity with destination channel.""" - dest_id_str = str(dest.id) - self._dest = dest - self._attr_unique_id = dest_id_str - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, dest_id_str)}, - manufacturer=MANUFACTURER, - model=dest.animationType.name, - name=dest.name, - serial_number=dest_id_str, - suggested_area=dest.room.name, - via_device=(DOMAIN, config_entry_id), - configuration_url=f"http://{dest.host}/control", - ) - - async def async_update(self) -> None: - """Update the entity.""" - await self._dest.refresh() - - @property - def available(self) -> bool: - """Return if entity is available.""" - return self._dest.available diff --git a/homeassistant/components/wmspro/light.py b/homeassistant/components/wmspro/light.py deleted file mode 100644 index 9242982bcf9..00000000000 --- a/homeassistant/components/wmspro/light.py +++ /dev/null @@ -1,89 +0,0 @@ -"""Support for lights connected with WMS WebControl pro.""" - -from __future__ import annotations - -from datetime import timedelta -from typing import Any - -from wmspro.const import WMS_WebControl_pro_API_actionDescription - -from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.color import brightness_to_value, value_to_brightness - -from . import WebControlProConfigEntry -from .const import BRIGHTNESS_SCALE -from .entity import WebControlProGenericEntity - -SCAN_INTERVAL = timedelta(seconds=5) -PARALLEL_UPDATES = 1 - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: WebControlProConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the WMS based lights from a config entry.""" - hub = config_entry.runtime_data - - entities: list[WebControlProGenericEntity] = [] - for dest in hub.dests.values(): - if dest.action(WMS_WebControl_pro_API_actionDescription.LightDimming): - entities.append(WebControlProDimmer(config_entry.entry_id, dest)) - elif dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch): - entities.append(WebControlProLight(config_entry.entry_id, dest)) - - async_add_entities(entities) - - -class WebControlProLight(WebControlProGenericEntity, LightEntity): - """Representation of a WMS based light.""" - - _attr_color_mode = ColorMode.ONOFF - _attr_supported_color_modes = {ColorMode.ONOFF} - - @property - def is_on(self) -> bool: - """Return true if light is on.""" - action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch) - return action["onOffState"] - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the light on.""" - action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch) - await action(onOffState=True) - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the light off.""" - action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch) - await action(onOffState=False) - - -class WebControlProDimmer(WebControlProLight): - """Representation of a WMS-based dimmable light.""" - - _attr_color_mode = ColorMode.BRIGHTNESS - _attr_supported_color_modes = {ColorMode.BRIGHTNESS} - - @property - def brightness(self) -> int: - """Return the brightness of this light between 1..255.""" - action = self._dest.action( - WMS_WebControl_pro_API_actionDescription.LightDimming - ) - return value_to_brightness(BRIGHTNESS_SCALE, action["percentage"]) - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the dimmer on.""" - if ATTR_BRIGHTNESS not in kwargs: - await super().async_turn_on(**kwargs) - return - - action = self._dest.action( - WMS_WebControl_pro_API_actionDescription.LightDimming - ) - await action( - percentage=brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS]) - ) diff --git a/homeassistant/components/wmspro/manifest.json b/homeassistant/components/wmspro/manifest.json deleted file mode 100644 index dd65be3e7e7..00000000000 --- a/homeassistant/components/wmspro/manifest.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "domain": "wmspro", - "name": "WMS WebControl pro", - "codeowners": ["@mback2k"], - "config_flow": true, - "dhcp": [ - { - "macaddress": "0023D5*" - }, - { - "registered_devices": true - } - ], - "documentation": "https://www.home-assistant.io/integrations/wmspro", - "integration_type": "hub", - "iot_class": "local_polling", - "requirements": ["pywmspro==0.2.1"] -} diff --git a/homeassistant/components/wmspro/scene.py b/homeassistant/components/wmspro/scene.py deleted file mode 100644 index de18106b7f0..00000000000 --- a/homeassistant/components/wmspro/scene.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Support for scenes provided by WMS WebControl pro.""" - -from __future__ import annotations - -from typing import Any - -from wmspro.scene import Scene as WMS_Scene - -from homeassistant.components.scene import Scene -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import WebControlProConfigEntry -from .const import ATTRIBUTION, DOMAIN, MANUFACTURER - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: WebControlProConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the WMS based scenes from a config entry.""" - hub = config_entry.runtime_data - - async_add_entities( - WebControlProScene(config_entry.entry_id, scene) - for scene in hub.scenes.values() - ) - - -class WebControlProScene(Scene): - """Representation of a WMS based scene.""" - - _attr_attribution = ATTRIBUTION - _attr_has_entity_name = True - - def __init__(self, config_entry_id: str, scene: WMS_Scene) -> None: - """Initialize the entity with the configured scene.""" - super().__init__() - - # Scene information - self._scene = scene - self._attr_name = scene.name - self._attr_unique_id = str(scene.id) - - # Room information - room = scene.room - room_name = room.name - room_id_str = str(room.id) - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, room_id_str)}, - manufacturer=MANUFACTURER, - model="Room", - name=room_name, - serial_number=room_id_str, - suggested_area=room_name, - via_device=(DOMAIN, config_entry_id), - configuration_url=f"http://{scene.host}/control", - ) - - async def async_activate(self, **kwargs: Any) -> None: - """Activate scene. Try to get entities into requested state.""" - await self._scene() diff --git a/homeassistant/components/wmspro/strings.json b/homeassistant/components/wmspro/strings.json deleted file mode 100644 index 9b6d129905b..00000000000 --- a/homeassistant/components/wmspro/strings.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "config": { - "flow_title": "{host}", - "step": { - "user": { - "data": { - "host": "[%key:common::config_flow::data::host%]" - }, - "data_description": { - "host": "The hostname or IP address of your WMS WebControl pro." - } - } - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", - "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "unknown": "[%key:common::config_flow::error::unknown%]" - }, - "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "unknown": "[%key:common::config_flow::error::unknown%]" - } - } -} diff --git a/homeassistant/components/wolflink/__init__.py b/homeassistant/components/wolflink/__init__.py index 49197ed7d26..ad1759ba2cb 100644 --- a/homeassistant/components/wolflink/__init__.py +++ b/homeassistant/components/wolflink/__init__.py @@ -11,7 +11,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -31,7 +30,6 @@ PLATFORMS = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Wolf SmartSet Service from a config entry.""" - username = entry.data[CONF_USERNAME] password = entry.data[CONF_PASSWORD] device_name = entry.data[DEVICE_NAME] @@ -100,7 +98,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, - config_entry=entry, name=DOMAIN, update_method=async_update_data, update_interval=timedelta(seconds=60), @@ -128,32 +125,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Migrate old entry.""" - # convert unique_id to string - if entry.version == 1 and entry.minor_version == 1: - if isinstance(entry.unique_id, int): - hass.config_entries.async_update_entry( - entry, unique_id=str(entry.unique_id) - ) - device_registry = dr.async_get(hass) - for device in dr.async_entries_for_config_entry( - device_registry, entry.entry_id - ): - new_identifiers = set() - for identifier in device.identifiers: - if identifier[0] == DOMAIN: - new_identifiers.add((DOMAIN, str(identifier[1]))) - else: - new_identifiers.add(identifier) - device_registry.async_update_device( - device.id, new_identifiers=new_identifiers - ) - hass.config_entries.async_update_entry(entry, minor_version=2) - - return True - - async def fetch_parameters(client: WolfClient, gateway_id: int, device_id: int): """Fetch all available parameters with usage of WolfClient. diff --git a/homeassistant/components/wolflink/config_flow.py b/homeassistant/components/wolflink/config_flow.py index 54c6db4cb07..6e218bfd1ce 100644 --- a/homeassistant/components/wolflink/config_flow.py +++ b/homeassistant/components/wolflink/config_flow.py @@ -4,11 +4,10 @@ import logging from httpcore import ConnectError import voluptuous as vol -from wolf_comm.models import Device from wolf_comm.token_auth import InvalidAuth from wolf_comm.wolf_client import WolfClient -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import DEVICE_GATEWAY, DEVICE_ID, DEVICE_NAME, DOMAIN @@ -24,18 +23,14 @@ class WolfLinkConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Wolf SmartSet Service.""" VERSION = 1 - MINOR_VERSION = 2 - - fetched_systems: list[Device] def __init__(self) -> None: """Initialize with empty username and password.""" - self.username: str | None = None - self.password: str | None = None + self.username = None + self.password = None + self.fetched_systems = None - async def async_step_user( - self, user_input: dict[str, str] | None = None - ) -> ConfigFlowResult: + async def async_step_user(self, user_input=None): """Handle the initial step to get connection parameters.""" errors = {} if user_input is not None: @@ -59,18 +54,16 @@ class WolfLinkConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=USER_SCHEMA, errors=errors ) - async def async_step_device( - self, user_input: dict[str, str] | None = None - ) -> ConfigFlowResult: + async def async_step_device(self, user_input=None): """Allow user to select device from devices connected to specified account.""" - errors: dict[str, str] = {} + errors = {} if user_input is not None: device_name = user_input[DEVICE_NAME] system = [ device for device in self.fetched_systems if device.name == device_name ] device_id = system[0].id - await self.async_set_unique_id(str(device_id)) + await self.async_set_unique_id(device_id) self._abort_if_unique_id_configured() return self.async_create_entry( title=user_input[DEVICE_NAME], diff --git a/homeassistant/components/wolflink/manifest.json b/homeassistant/components/wolflink/manifest.json index 4bfc0e6dd83..6a98dcd6ca4 100644 --- a/homeassistant/components/wolflink/manifest.json +++ b/homeassistant/components/wolflink/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/wolflink", "iot_class": "cloud_polling", "loggers": ["wolf_comm"], - "requirements": ["wolf-comm==0.0.15"] + "requirements": ["wolf-comm==0.0.9"] } diff --git a/homeassistant/components/wolflink/sensor.py b/homeassistant/components/wolflink/sensor.py index 1f6e6c42464..3179a9ff6bd 100644 --- a/homeassistant/components/wolflink/sensor.py +++ b/homeassistant/components/wolflink/sensor.py @@ -63,7 +63,7 @@ class WolfLinkSensor(CoordinatorEntity, SensorEntity): self._attr_unique_id = f"{device_id}:{wolf_object.parameter_id}" self._state = None self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, str(device_id))}, + identifiers={(DOMAIN, device_id)}, configuration_url="https://www.wolf-smartset.com/", manufacturer=MANUFACTURER, ) diff --git a/homeassistant/components/workday/binary_sensor.py b/homeassistant/components/workday/binary_sensor.py index f4a2541a1d7..5df8e6c3d75 100644 --- a/homeassistant/components/workday/binary_sensor.py +++ b/homeassistant/components/workday/binary_sensor.py @@ -6,7 +6,6 @@ from datetime import date, datetime, timedelta from typing import Final from holidays import ( - PUBLIC, HolidayBase, __version__ as python_holidays_version, country_holidays, @@ -36,7 +35,6 @@ from homeassistant.util import dt as dt_util, slugify from .const import ( ALLOWED_DAYS, CONF_ADD_HOLIDAYS, - CONF_CATEGORY, CONF_EXCLUDES, CONF_OFFSET, CONF_PROVINCE, @@ -71,28 +69,17 @@ def validate_dates(holiday_list: list[str]) -> list[str]: def _get_obj_holidays( - country: str | None, - province: str | None, - year: int, - language: str | None, - categories: list[str] | None, + country: str | None, province: str | None, year: int, language: str | None ) -> HolidayBase: """Get the object for the requested country and year.""" if not country: return HolidayBase() - set_categories = None - if categories: - category_list = [PUBLIC] - category_list.extend(categories) - set_categories = tuple(category_list) - obj_holidays: HolidayBase = country_holidays( country, subdiv=province, - years=[year, year + 1], + years=year, language=language, - categories=set_categories, ) if (supported_languages := obj_holidays.supported_languages) and language == "en": for lang in supported_languages: @@ -102,7 +89,6 @@ def _get_obj_holidays( subdiv=province, years=year, language=lang, - categories=set_categories, ) LOGGER.debug("Changing language from %s to %s", language, lang) return obj_holidays @@ -121,15 +107,13 @@ async def async_setup_entry( sensor_name: str = entry.options[CONF_NAME] workdays: list[str] = entry.options[CONF_WORKDAYS] language: str | None = entry.options.get(CONF_LANGUAGE) - categories: list[str] | None = entry.options.get(CONF_CATEGORY) year: int = (dt_util.now() + timedelta(days=days_offset)).year obj_holidays: HolidayBase = await hass.async_add_executor_job( - _get_obj_holidays, country, province, year, language, categories + _get_obj_holidays, country, province, year, language ) calc_add_holidays: list[str] = validate_dates(add_holidays) calc_remove_holidays: list[str] = validate_dates(remove_holidays) - next_year = dt_util.now().year + 1 # Add custom holidays try: @@ -153,28 +137,26 @@ async def async_setup_entry( LOGGER.debug("Removed %s by name '%s'", holiday, remove_holiday) except KeyError as unmatched: LOGGER.warning("No holiday found matching %s", unmatched) - if _date := dt_util.parse_date(remove_holiday): - if _date.year <= next_year: - # Only check and raise issues for current and next year - async_create_issue( - hass, - DOMAIN, - f"bad_date_holiday-{entry.entry_id}-{slugify(remove_holiday)}", - is_fixable=True, - is_persistent=False, - severity=IssueSeverity.WARNING, - translation_key="bad_date_holiday", - translation_placeholders={ - CONF_COUNTRY: country if country else "-", - "title": entry.title, - CONF_REMOVE_HOLIDAYS: remove_holiday, - }, - data={ - "entry_id": entry.entry_id, - "country": country, - "named_holiday": remove_holiday, - }, - ) + if dt_util.parse_date(remove_holiday): + async_create_issue( + hass, + DOMAIN, + f"bad_date_holiday-{entry.entry_id}-{slugify(remove_holiday)}", + is_fixable=True, + is_persistent=False, + severity=IssueSeverity.WARNING, + translation_key="bad_date_holiday", + translation_placeholders={ + CONF_COUNTRY: country if country else "-", + "title": entry.title, + CONF_REMOVE_HOLIDAYS: remove_holiday, + }, + data={ + "entry_id": entry.entry_id, + "country": country, + "named_holiday": remove_holiday, + }, + ) else: async_create_issue( hass, diff --git a/homeassistant/components/workday/config_flow.py b/homeassistant/components/workday/config_flow.py index 4d93fccb1a7..a66a9c51588 100644 --- a/homeassistant/components/workday/config_flow.py +++ b/homeassistant/components/workday/config_flow.py @@ -5,14 +5,14 @@ from __future__ import annotations from functools import partial from typing import Any -from holidays import PUBLIC, HolidayBase, country_holidays, list_supported_countries +from holidays import HolidayBase, country_holidays, list_supported_countries import voluptuous as vol from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlow, + OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE, CONF_NAME from homeassistant.core import callback @@ -36,7 +36,6 @@ from homeassistant.util import dt as dt_util from .const import ( ALLOWED_DAYS, CONF_ADD_HOLIDAYS, - CONF_CATEGORY, CONF_EXCLUDES, CONF_OFFSET, CONF_PROVINCE, @@ -87,29 +86,7 @@ def add_province_and_language_to_schema( ), } - category_schema = {} - # PUBLIC will always be included and can therefore not be set/removed - _categories = [x for x in _country.supported_categories if x != PUBLIC] - if _categories: - category_schema = { - vol.Optional(CONF_CATEGORY): SelectSelector( - SelectSelectorConfig( - options=_categories, - mode=SelectSelectorMode.DROPDOWN, - multiple=True, - translation_key=CONF_CATEGORY, - ) - ), - } - - return vol.Schema( - { - **DATA_SCHEMA_OPT.schema, - **language_schema, - **province_schema, - **category_schema, - } - ) + return vol.Schema({**DATA_SCHEMA_OPT.schema, **language_schema, **province_schema}) def _is_valid_date_range(check_date: str, error: type[HomeAssistantError]) -> bool: @@ -219,7 +196,7 @@ class WorkdayConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> WorkdayOptionsFlowHandler: """Get the options flow for this handler.""" - return WorkdayOptionsFlowHandler() + return WorkdayOptionsFlowHandler(config_entry) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -279,8 +256,6 @@ class WorkdayConfigFlow(ConfigFlow, domain=DOMAIN): CONF_REMOVE_HOLIDAYS: combined_input[CONF_REMOVE_HOLIDAYS], CONF_PROVINCE: combined_input.get(CONF_PROVINCE), } - if CONF_CATEGORY in combined_input: - abort_match[CONF_CATEGORY] = combined_input[CONF_CATEGORY] LOGGER.debug("abort_check in options with %s", combined_input) self._async_abort_entries_match(abort_match) @@ -305,12 +280,12 @@ class WorkdayConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, description_placeholders={ "name": self.data[CONF_NAME], - "country": self.data.get(CONF_COUNTRY, "-"), + "country": self.data.get(CONF_COUNTRY), }, ) -class WorkdayOptionsFlowHandler(OptionsFlow): +class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): """Handle Workday options.""" async def async_step_init( @@ -320,7 +295,7 @@ class WorkdayOptionsFlowHandler(OptionsFlow): errors: dict[str, str] = {} if user_input is not None: - combined_input: dict[str, Any] = {**self.config_entry.options, **user_input} + combined_input: dict[str, Any] = {**self.options, **user_input} if CONF_PROVINCE not in user_input: # Province not present, delete old value (if present) too combined_input.pop(CONF_PROVINCE, None) @@ -339,40 +314,40 @@ class WorkdayOptionsFlowHandler(OptionsFlow): errors["remove_holidays"] = "remove_holiday_range_error" else: LOGGER.debug("abort_check in options with %s", combined_input) - abort_match = { - CONF_COUNTRY: self.config_entry.options.get(CONF_COUNTRY), - CONF_EXCLUDES: combined_input[CONF_EXCLUDES], - CONF_OFFSET: combined_input[CONF_OFFSET], - CONF_WORKDAYS: combined_input[CONF_WORKDAYS], - CONF_ADD_HOLIDAYS: combined_input[CONF_ADD_HOLIDAYS], - CONF_REMOVE_HOLIDAYS: combined_input[CONF_REMOVE_HOLIDAYS], - CONF_PROVINCE: combined_input.get(CONF_PROVINCE), - } - if CONF_CATEGORY in combined_input: - abort_match[CONF_CATEGORY] = combined_input[CONF_CATEGORY] try: - self._async_abort_entries_match(abort_match) + self._async_abort_entries_match( + { + CONF_COUNTRY: self._config_entry.options.get(CONF_COUNTRY), + CONF_EXCLUDES: combined_input[CONF_EXCLUDES], + CONF_OFFSET: combined_input[CONF_OFFSET], + CONF_WORKDAYS: combined_input[CONF_WORKDAYS], + CONF_ADD_HOLIDAYS: combined_input[CONF_ADD_HOLIDAYS], + CONF_REMOVE_HOLIDAYS: combined_input[CONF_REMOVE_HOLIDAYS], + CONF_PROVINCE: combined_input.get(CONF_PROVINCE), + } + ) except AbortFlow as err: errors = {"base": err.reason} else: return self.async_create_entry(data=combined_input) - options = self.config_entry.options schema: vol.Schema = await self.hass.async_add_executor_job( add_province_and_language_to_schema, DATA_SCHEMA_OPT, - options.get(CONF_COUNTRY), + self.options.get(CONF_COUNTRY), ) - new_schema = self.add_suggested_values_to_schema(schema, user_input or options) + new_schema = self.add_suggested_values_to_schema( + schema, user_input or self.options + ) LOGGER.debug("Errors have occurred in options %s", errors) return self.async_show_form( step_id="init", data_schema=new_schema, errors=errors, description_placeholders={ - "name": options[CONF_NAME], - "country": options.get(CONF_COUNTRY), + "name": self.options[CONF_NAME], + "country": self.options.get(CONF_COUNTRY), }, ) diff --git a/homeassistant/components/workday/const.py b/homeassistant/components/workday/const.py index 76580ae642f..6a46f1e824b 100644 --- a/homeassistant/components/workday/const.py +++ b/homeassistant/components/workday/const.py @@ -19,7 +19,6 @@ CONF_EXCLUDES = "excludes" CONF_OFFSET = "days_offset" CONF_ADD_HOLIDAYS = "add_holidays" CONF_REMOVE_HOLIDAYS = "remove_holidays" -CONF_CATEGORY = "category" # By default, Monday - Friday are workdays DEFAULT_WORKDAYS = ["mon", "tue", "wed", "thu", "fri"] diff --git a/homeassistant/components/workday/diagnostics.py b/homeassistant/components/workday/diagnostics.py deleted file mode 100644 index 84e5073ca5b..00000000000 --- a/homeassistant/components/workday/diagnostics.py +++ /dev/null @@ -1,18 +0,0 @@ -"""Diagnostics support for Workday.""" - -from __future__ import annotations - -from typing import Any - -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant - - -async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry -) -> dict[str, Any]: - """Return diagnostics for a config entry.""" - - return { - "config_entry": entry, - } diff --git a/homeassistant/components/workday/icons.json b/homeassistant/components/workday/icons.json index ec5c64dce97..10d3c93a288 100644 --- a/homeassistant/components/workday/icons.json +++ b/homeassistant/components/workday/icons.json @@ -1,7 +1,5 @@ { "services": { - "check_date": { - "service": "mdi:calendar-check" - } + "check_date": "mdi:calendar-check" } } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index b02db734729..1148f46e2d1 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.60"] + "requirements": ["holidays==0.51"] } diff --git a/homeassistant/components/workday/strings.json b/homeassistant/components/workday/strings.json index f3b966e28ea..0e618beaf82 100644 --- a/homeassistant/components/workday/strings.json +++ b/homeassistant/components/workday/strings.json @@ -20,8 +20,7 @@ "add_holidays": "Add holidays", "remove_holidays": "Remove Holidays", "province": "Subdivision of country", - "language": "Language for named holidays", - "category": "Additional category as holiday" + "language": "Language for named holidays" }, "data_description": { "excludes": "List of workdays to exclude, notice the keyword `holiday` and read the documentation on how to use it correctly", @@ -30,8 +29,7 @@ "add_holidays": "Add custom holidays as YYYY-MM-DD or as range using `,` as separator", "remove_holidays": "Remove holidays as YYYY-MM-DD, as range using `,` as separator or by using partial of name", "province": "State, territory, province or region of country", - "language": "Language to use when configuring named holiday exclusions", - "category": "Select additional categories to include as holidays" + "language": "Language to use when configuring named holiday exclusions" } } }, @@ -53,8 +51,7 @@ "add_holidays": "[%key:component::workday::config::step::options::data::add_holidays%]", "remove_holidays": "[%key:component::workday::config::step::options::data::remove_holidays%]", "province": "[%key:component::workday::config::step::options::data::province%]", - "language": "[%key:component::workday::config::step::options::data::language%]", - "category": "[%key:component::workday::config::step::options::data::category%]" + "language": "[%key:component::workday::config::step::options::data::language%]" }, "data_description": { "excludes": "[%key:component::workday::config::step::options::data_description::excludes%]", @@ -63,8 +60,7 @@ "add_holidays": "[%key:component::workday::config::step::options::data_description::add_holidays%]", "remove_holidays": "[%key:component::workday::config::step::options::data_description::remove_holidays%]", "province": "[%key:component::workday::config::step::options::data_description::province%]", - "language": "[%key:component::workday::config::step::options::data_description::language%]", - "category": "[%key:component::workday::config::step::options::data_description::category%]" + "language": "[%key:component::workday::config::step::options::data_description::language%]" } } }, @@ -82,24 +78,6 @@ "none": "No subdivision" } }, - "category": { - "options": { - "armed_forces": "Armed forces", - "bank": "Bank", - "government": "Government", - "half_day": "Half day", - "optional": "Optional", - "public": "Public", - "school": "School", - "unofficial": "Unofficial", - "workday": "Workday", - "chinese": "Chinese", - "christian": "Christian", - "hebrew": "Hebrew", - "hindu": "Hindu", - "islamic": "Islamic" - } - }, "days": { "options": { "mon": "[%key:common::time::monday%]", diff --git a/homeassistant/components/worldclock/__init__.py b/homeassistant/components/worldclock/__init__.py index ad01c45917a..978eaac8968 100644 --- a/homeassistant/components/worldclock/__init__.py +++ b/homeassistant/components/worldclock/__init__.py @@ -1,25 +1 @@ """The worldclock component.""" - -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant - -from .const import PLATFORMS - - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Worldclock from a config entry.""" - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload(entry.add_update_listener(update_listener)) - - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload World clock config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Handle options update.""" - await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/worldclock/config_flow.py b/homeassistant/components/worldclock/config_flow.py deleted file mode 100644 index eebf0d59dcb..00000000000 --- a/homeassistant/components/worldclock/config_flow.py +++ /dev/null @@ -1,107 +0,0 @@ -"""Config flow for World clock.""" - -from __future__ import annotations - -from collections.abc import Mapping -from typing import Any, cast -import zoneinfo - -import voluptuous as vol - -from homeassistant.const import CONF_NAME, CONF_TIME_ZONE -from homeassistant.helpers.schema_config_entry_flow import ( - SchemaCommonFlowHandler, - SchemaConfigFlowHandler, - SchemaFlowFormStep, -) -from homeassistant.helpers.selector import ( - SelectOptionDict, - SelectSelector, - SelectSelectorConfig, - SelectSelectorMode, - TextSelector, -) - -from .const import CONF_TIME_FORMAT, DEFAULT_NAME, DEFAULT_TIME_STR_FORMAT, DOMAIN - -TIME_STR_OPTIONS = [ - SelectOptionDict( - value=DEFAULT_TIME_STR_FORMAT, label=f"14:05 ({DEFAULT_TIME_STR_FORMAT})" - ), - SelectOptionDict(value="%I:%M %p", label="11:05 AM (%I:%M %p)"), - SelectOptionDict(value="%Y-%m-%d %H:%M", label="2024-01-01 14:05 (%Y-%m-%d %H:%M)"), - SelectOptionDict( - value="%a, %b %d, %Y %I:%M %p", - label="Mon, Jan 01, 2024 11:05 AM (%a, %b %d, %Y %I:%M %p)", - ), -] - - -async def validate_duplicate( - handler: SchemaCommonFlowHandler, user_input: dict[str, Any] -) -> dict[str, Any]: - """Validate already existing entry.""" - handler.parent_handler._async_abort_entries_match({**handler.options, **user_input}) # noqa: SLF001 - - return user_input - - -async def get_schema(handler: SchemaCommonFlowHandler) -> vol.Schema: - """Get available timezones.""" - get_timezones: list[str] = list( - await handler.parent_handler.hass.async_add_executor_job( - zoneinfo.available_timezones - ) - ) - return vol.Schema( - { - vol.Required(CONF_NAME, default=DEFAULT_NAME): TextSelector(), - vol.Required(CONF_TIME_ZONE): SelectSelector( - SelectSelectorConfig( - options=get_timezones, mode=SelectSelectorMode.DROPDOWN, sort=True - ) - ), - } - ).extend(DATA_SCHEMA_OPTIONS.schema) - - -DATA_SCHEMA_OPTIONS = vol.Schema( - { - vol.Optional(CONF_TIME_FORMAT, default=DEFAULT_TIME_STR_FORMAT): SelectSelector( - SelectSelectorConfig( - options=TIME_STR_OPTIONS, - custom_value=True, - mode=SelectSelectorMode.DROPDOWN, - ) - ) - } -) - - -CONFIG_FLOW = { - "user": SchemaFlowFormStep( - schema=get_schema, - validate_user_input=validate_duplicate, - ), - "import": SchemaFlowFormStep( - schema=get_schema, - validate_user_input=validate_duplicate, - ), -} -OPTIONS_FLOW = { - "init": SchemaFlowFormStep( - DATA_SCHEMA_OPTIONS, - validate_user_input=validate_duplicate, - ) -} - - -class WorldclockConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): - """Handle a config flow for Worldclock.""" - - config_flow = CONFIG_FLOW - options_flow = OPTIONS_FLOW - - def async_config_entry_title(self, options: Mapping[str, Any]) -> str: - """Return config entry title.""" - return cast(str, options[CONF_NAME]) diff --git a/homeassistant/components/worldclock/const.py b/homeassistant/components/worldclock/const.py deleted file mode 100644 index fafa3dbc52f..00000000000 --- a/homeassistant/components/worldclock/const.py +++ /dev/null @@ -1,11 +0,0 @@ -"""Constants for world clock component.""" - -from homeassistant.const import Platform - -DOMAIN = "worldclock" -PLATFORMS = [Platform.SENSOR] - -CONF_TIME_FORMAT = "time_format" - -DEFAULT_NAME = "Worldclock Sensor" -DEFAULT_TIME_STR_FORMAT = "%H:%M" diff --git a/homeassistant/components/worldclock/manifest.json b/homeassistant/components/worldclock/manifest.json index bc7ee3cd939..61600e4f924 100644 --- a/homeassistant/components/worldclock/manifest.json +++ b/homeassistant/components/worldclock/manifest.json @@ -2,7 +2,6 @@ "domain": "worldclock", "name": "Worldclock", "codeowners": ["@fabaff"], - "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/worldclock", "iot_class": "local_push", "quality_scale": "internal" diff --git a/homeassistant/components/worldclock/sensor.py b/homeassistant/components/worldclock/sensor.py index f4879ca08c4..d9b4aa90f07 100644 --- a/homeassistant/components/worldclock/sensor.py +++ b/homeassistant/components/worldclock/sensor.py @@ -10,17 +10,17 @@ from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorEntity, ) -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_NAME, CONF_TIME_ZONE -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util -from .const import CONF_TIME_FORMAT, DEFAULT_NAME, DEFAULT_TIME_STR_FORMAT, DOMAIN +CONF_TIME_FORMAT = "time_format" + +DEFAULT_NAME = "Worldclock Sensor" +DEFAULT_TIME_STR_FORMAT = "%H:%M" PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { @@ -38,44 +38,13 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the World clock sensor.""" - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=config, - ) - ) - - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2025.2.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Worldclock", - }, - ) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the World clock sensor entry.""" - time_zone = await dt_util.async_get_time_zone(entry.options[CONF_TIME_ZONE]) + time_zone = dt_util.get_time_zone(config[CONF_TIME_ZONE]) async_add_entities( [ WorldClockSensor( time_zone, - entry.options[CONF_NAME], - entry.options[CONF_TIME_FORMAT], - entry.entry_id, + config[CONF_NAME], + config[CONF_TIME_FORMAT], ) ], True, @@ -86,22 +55,12 @@ class WorldClockSensor(SensorEntity): """Representation of a World clock sensor.""" _attr_icon = "mdi:clock" - _attr_has_entity_name = True - _attr_name = None - def __init__( - self, time_zone: tzinfo | None, name: str, time_format: str, unique_id: str - ) -> None: + def __init__(self, time_zone: tzinfo | None, name: str, time_format: str) -> None: """Initialize the sensor.""" + self._attr_name = name self._time_zone = time_zone self._time_format = time_format - self._attr_unique_id = unique_id - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, unique_id)}, - name=name, - entry_type=DeviceEntryType.SERVICE, - manufacturer="Worldclock", - ) async def async_update(self) -> None: """Get the time and updates the states.""" diff --git a/homeassistant/components/worldclock/strings.json b/homeassistant/components/worldclock/strings.json deleted file mode 100644 index 2f6b8d67a7c..00000000000 --- a/homeassistant/components/worldclock/strings.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" - }, - "step": { - "user": { - "data": { - "name": "[%key:common::config_flow::data::name%]", - "time_zone": "Timezone", - "time_format": "Time format" - }, - "data_description": { - "time_zone": "Select timezone from list", - "time_format": "Select a pre-defined format from the list or define your own format." - } - } - } - }, - "options": { - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" - }, - "step": { - "init": { - "data": { - "time_format": "[%key:component::worldclock::config::step::user::data::time_format%]" - }, - "data_description": { - "time_format": "[%key:component::worldclock::config::step::user::data_description::time_format%]" - } - } - } - } -} diff --git a/homeassistant/components/ws66i/__init__.py b/homeassistant/components/ws66i/__init__.py index 83ad7bbf070..1993f38e0ab 100644 --- a/homeassistant/components/ws66i/__init__.py +++ b/homeassistant/components/ws66i/__init__.py @@ -52,7 +52,7 @@ def _find_zones(hass: HomeAssistant, ws66i: WS66i) -> list[int]: zone_id = (amp_num * 10) + zone_num zone_list.append(zone_id) - _LOGGER.debug("Detected %d amp(s)", amp_num - 1) + _LOGGER.info("Detected %d amp(s)", amp_num - 1) return zone_list diff --git a/homeassistant/components/ws66i/config_flow.py b/homeassistant/components/ws66i/config_flow.py index 120b7738d2e..b0cf6717e4d 100644 --- a/homeassistant/components/ws66i/config_flow.py +++ b/homeassistant/components/ws66i/config_flow.py @@ -8,12 +8,7 @@ from typing import Any from pyws66i import WS66i, get_ws66i import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -49,7 +44,7 @@ FIRST_ZONE = 11 @callback -def _sources_from_config(data: dict[str, str]) -> dict[str, str]: +def _sources_from_config(data): sources_config = { str(idx + 1): data.get(source) for idx, source in enumerate(SOURCES) } @@ -99,9 +94,7 @@ class WS66iConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: @@ -130,13 +123,11 @@ class WS66iConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> Ws66iOptionsFlowHandler: """Define the config flow to handle options.""" - return Ws66iOptionsFlowHandler() + return Ws66iOptionsFlowHandler(config_entry) @callback -def _key_for_source( - index: int, source: str, previous_sources: dict[str, str] -) -> vol.Required: +def _key_for_source(index, source, previous_sources): return vol.Required( source, description={"suggested_value": previous_sources[str(index)]} ) @@ -145,9 +136,11 @@ def _key_for_source( class Ws66iOptionsFlowHandler(OptionsFlow): """Handle a WS66i options flow.""" - async def async_step_init( - self, user_input: dict[str, str] | None = None - ) -> ConfigFlowResult: + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize.""" + self.config_entry = config_entry + + async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: return self.async_create_entry( diff --git a/homeassistant/components/wsdot/sensor.py b/homeassistant/components/wsdot/sensor.py index 73714b75c95..3aae6746ea9 100644 --- a/homeassistant/components/wsdot/sensor.py +++ b/homeassistant/components/wsdot/sensor.py @@ -6,7 +6,6 @@ from datetime import datetime, timedelta, timezone from http import HTTPStatus import logging import re -from typing import Any import requests import voluptuous as vol @@ -126,7 +125,7 @@ class WashingtonStateTravelTimeSensor(WashingtonStateTransportSensor): self._state = self._data.get(ATTR_CURRENT_TIME) @property - def extra_state_attributes(self) -> dict[str, Any] | None: + def extra_state_attributes(self): """Return other details about the sensor state.""" if self._data is not None: attrs = {} @@ -141,7 +140,6 @@ class WashingtonStateTravelTimeSensor(WashingtonStateTransportSensor): self._data.get(ATTR_TIME_UPDATED) ) return attrs - return None def _parse_wsdot_timestamp(timestamp): diff --git a/homeassistant/components/wyoming/__init__.py b/homeassistant/components/wyoming/__init__.py index d639933ece6..00d587e2bb4 100644 --- a/homeassistant/components/wyoming/__init__.py +++ b/homeassistant/components/wyoming/__init__.py @@ -14,11 +14,11 @@ from .const import ATTR_SPEAKER, DOMAIN from .data import WyomingService from .devices import SatelliteDevice from .models import DomainDataItem +from .satellite import WyomingSatellite _LOGGER = logging.getLogger(__name__) SATELLITE_PLATFORMS = [ - Platform.ASSIST_SATELLITE, Platform.BINARY_SENSOR, Platform.SELECT, Platform.SWITCH, @@ -47,29 +47,51 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_unload(entry.add_update_listener(update_listener)) if (satellite_info := service.info.satellite) is not None: - # Create satellite device - dev_reg = dr.async_get(hass) + # Create satellite device, etc. + item.satellite = _make_satellite(hass, entry, service) - # Use config entry id since only one satellite per entry is supported - satellite_id = entry.entry_id - device = dev_reg.async_get_or_create( - config_entry_id=entry.entry_id, - identifiers={(DOMAIN, satellite_id)}, - name=satellite_info.name, - suggested_area=satellite_info.area, - ) - - item.device = SatelliteDevice( - satellite_id=satellite_id, - device_id=device.id, - ) - - # Set up satellite entity, sensors, switches, etc. + # Set up satellite sensors, switches, etc. await hass.config_entries.async_forward_entry_setups(entry, SATELLITE_PLATFORMS) + # Start satellite communication + entry.async_create_background_task( + hass, + item.satellite.run(), + f"Satellite {satellite_info.name}", + ) + + entry.async_on_unload(item.satellite.stop) + return True +def _make_satellite( + hass: HomeAssistant, config_entry: ConfigEntry, service: WyomingService +) -> WyomingSatellite: + """Create Wyoming satellite/device from config entry and Wyoming service.""" + satellite_info = service.info.satellite + assert satellite_info is not None + + dev_reg = dr.async_get(hass) + + # Use config entry id since only one satellite per entry is supported + satellite_id = config_entry.entry_id + + device = dev_reg.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, satellite_id)}, + name=satellite_info.name, + suggested_area=satellite_info.area, + ) + + satellite_device = SatelliteDevice( + satellite_id=satellite_id, + device_id=device.id, + ) + + return WyomingSatellite(hass, config_entry, service, satellite_device) + + async def update_listener(hass: HomeAssistant, entry: ConfigEntry): """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) @@ -80,7 +102,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: item: DomainDataItem = hass.data[DOMAIN][entry.entry_id] platforms = list(item.service.platforms) - if item.device is not None: + if item.satellite is not None: platforms += SATELLITE_PLATFORMS unload_ok = await hass.config_entries.async_unload_platforms(entry, platforms) diff --git a/homeassistant/components/wyoming/binary_sensor.py b/homeassistant/components/wyoming/binary_sensor.py index 24ee073ec4d..4f2c0bb170a 100644 --- a/homeassistant/components/wyoming/binary_sensor.py +++ b/homeassistant/components/wyoming/binary_sensor.py @@ -28,16 +28,15 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.device is not None + assert item.satellite is not None - async_add_entities([WyomingSatelliteAssistInProgress(item.device)]) + async_add_entities([WyomingSatelliteAssistInProgress(item.satellite.device)]) class WyomingSatelliteAssistInProgress(WyomingSatelliteEntity, BinarySensorEntity): """Entity to represent Assist is in progress for satellite.""" entity_description = BinarySensorEntityDescription( - entity_registry_enabled_default=False, key="assist_in_progress", translation_key="assist_in_progress", ) diff --git a/homeassistant/components/wyoming/config_flow.py b/homeassistant/components/wyoming/config_flow.py index 5fdcb1a5484..8461d9e83ac 100644 --- a/homeassistant/components/wyoming/config_flow.py +++ b/homeassistant/components/wyoming/config_flow.py @@ -8,10 +8,9 @@ from urllib.parse import urlparse import voluptuous as vol -from homeassistant.components import zeroconf +from homeassistant.components import hassio, zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.helpers.service_info.hassio import HassioServiceInfo +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT from .const import DOMAIN from .data import WyomingService @@ -31,7 +30,7 @@ class WyomingConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _hassio_discovery: HassioServiceInfo + _hassio_discovery: hassio.HassioServiceInfo _service: WyomingService | None = None _name: str | None = None @@ -62,7 +61,7 @@ class WyomingConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="no_services") async def async_step_hassio( - self, discovery_info: HassioServiceInfo + self, discovery_info: hassio.HassioServiceInfo ) -> ConfigFlowResult: """Handle Supervisor add-on discovery.""" _LOGGER.debug("Supervisor discovery info: %s", discovery_info) @@ -124,6 +123,7 @@ class WyomingConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() + self.context[CONF_NAME] = self._name self.context["title_placeholders"] = {"name": self._name} self._service = service diff --git a/homeassistant/components/wyoming/conversation.py b/homeassistant/components/wyoming/conversation.py deleted file mode 100644 index 9a17559c1f8..00000000000 --- a/homeassistant/components/wyoming/conversation.py +++ /dev/null @@ -1,194 +0,0 @@ -"""Support for Wyoming intent recognition services.""" - -import logging - -from wyoming.asr import Transcript -from wyoming.client import AsyncTcpClient -from wyoming.handle import Handled, NotHandled -from wyoming.info import HandleProgram, IntentProgram -from wyoming.intent import Intent, NotRecognized - -from homeassistant.components import conversation -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.helpers import intent -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util import ulid - -from .const import DOMAIN -from .data import WyomingService -from .error import WyomingError -from .models import DomainDataItem - -_LOGGER = logging.getLogger(__name__) - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Wyoming conversation.""" - item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - [ - WyomingConversationEntity(config_entry, item.service), - ] - ) - - -class WyomingConversationEntity( - conversation.ConversationEntity, conversation.AbstractConversationAgent -): - """Wyoming conversation agent.""" - - _attr_has_entity_name = True - - def __init__( - self, - config_entry: ConfigEntry, - service: WyomingService, - ) -> None: - """Set up provider.""" - super().__init__() - - self.service = service - - self._intent_service: IntentProgram | None = None - self._handle_service: HandleProgram | None = None - - for maybe_intent in self.service.info.intent: - if maybe_intent.installed: - self._intent_service = maybe_intent - break - - for maybe_handle in self.service.info.handle: - if maybe_handle.installed: - self._handle_service = maybe_handle - break - - model_languages: set[str] = set() - - if self._intent_service is not None: - for intent_model in self._intent_service.models: - if intent_model.installed: - model_languages.update(intent_model.languages) - - self._attr_name = self._intent_service.name - self._attr_supported_features = ( - conversation.ConversationEntityFeature.CONTROL - ) - elif self._handle_service is not None: - for handle_model in self._handle_service.models: - if handle_model.installed: - model_languages.update(handle_model.languages) - - self._attr_name = self._handle_service.name - - self._supported_languages = list(model_languages) - self._attr_unique_id = f"{config_entry.entry_id}-conversation" - - @property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return self._supported_languages - - async def async_process( - self, user_input: conversation.ConversationInput - ) -> conversation.ConversationResult: - """Process a sentence.""" - conversation_id = user_input.conversation_id or ulid.ulid_now() - intent_response = intent.IntentResponse(language=user_input.language) - - try: - async with AsyncTcpClient(self.service.host, self.service.port) as client: - await client.write_event( - Transcript( - user_input.text, context={"conversation_id": conversation_id} - ).event() - ) - - while True: - event = await client.read_event() - if event is None: - _LOGGER.debug("Connection lost") - intent_response.async_set_error( - intent.IntentResponseErrorCode.UNKNOWN, - "Connection to service was lost", - ) - return conversation.ConversationResult( - response=intent_response, - conversation_id=user_input.conversation_id, - ) - - if Intent.is_type(event.type): - # Success - recognized_intent = Intent.from_event(event) - _LOGGER.debug("Recognized intent: %s", recognized_intent) - - intent_type = recognized_intent.name - intent_slots = { - e.name: {"value": e.value} - for e in recognized_intent.entities - } - intent_response = await intent.async_handle( - self.hass, - DOMAIN, - intent_type, - intent_slots, - text_input=user_input.text, - language=user_input.language, - ) - - if (not intent_response.speech) and recognized_intent.text: - intent_response.async_set_speech(recognized_intent.text) - - break - - if NotRecognized.is_type(event.type): - not_recognized = NotRecognized.from_event(event) - intent_response.async_set_error( - intent.IntentResponseErrorCode.NO_INTENT_MATCH, - not_recognized.text, - ) - break - - if Handled.is_type(event.type): - # Success - handled = Handled.from_event(event) - intent_response.async_set_speech(handled.text) - break - - if NotHandled.is_type(event.type): - not_handled = NotHandled.from_event(event) - intent_response.async_set_error( - intent.IntentResponseErrorCode.FAILED_TO_HANDLE, - not_handled.text, - ) - break - - except (OSError, WyomingError) as err: - _LOGGER.exception("Unexpected error while communicating with service") - intent_response.async_set_error( - intent.IntentResponseErrorCode.UNKNOWN, - f"Error communicating with service: {err}", - ) - return conversation.ConversationResult( - response=intent_response, - conversation_id=user_input.conversation_id, - ) - except intent.IntentError as err: - _LOGGER.exception("Unexpected error while handling intent") - intent_response.async_set_error( - intent.IntentResponseErrorCode.FAILED_TO_HANDLE, - f"Error handling intent: {err}", - ) - return conversation.ConversationResult( - response=intent_response, - conversation_id=user_input.conversation_id, - ) - - # Success - return conversation.ConversationResult( - response=intent_response, conversation_id=conversation_id - ) diff --git a/homeassistant/components/wyoming/data.py b/homeassistant/components/wyoming/data.py index a16062ab058..e333a740741 100644 --- a/homeassistant/components/wyoming/data.py +++ b/homeassistant/components/wyoming/data.py @@ -37,10 +37,6 @@ class WyomingService: self.platforms.append(Platform.TTS) if any(wake.installed for wake in info.wake): self.platforms.append(Platform.WAKE_WORD) - if any(intent.installed for intent in info.intent) or any( - handle.installed for handle in info.handle - ): - self.platforms.append(Platform.CONVERSATION) def has_services(self) -> bool: """Return True if services are installed that Home Assistant can use.""" @@ -48,8 +44,6 @@ class WyomingService: any(asr for asr in self.info.asr if asr.installed) or any(tts for tts in self.info.tts if tts.installed) or any(wake for wake in self.info.wake if wake.installed) - or any(intent for intent in self.info.intent if intent.installed) - or any(handle for handle in self.info.handle if handle.installed) or ((self.info.satellite is not None) and self.info.satellite.installed) ) @@ -76,16 +70,6 @@ class WyomingService: if wake_installed: return wake_installed[0].name - # intent recognition (text -> intent) - intent_installed = [intent for intent in self.info.intent if intent.installed] - if intent_installed: - return intent_installed[0].name - - # intent handling (text -> text) - handle_installed = [handle for handle in self.info.handle if handle.installed] - if handle_installed: - return handle_installed[0].name - return None @classmethod @@ -116,7 +100,7 @@ async def load_wyoming_info( while True: event = await client.read_event() if event is None: - raise WyomingError( # noqa: TRY301 + raise WyomingError( "Connection closed unexpectedly", ) diff --git a/homeassistant/components/wyoming/devices.py b/homeassistant/components/wyoming/devices.py index 2e00b31fd34..2ca66f3b21a 100644 --- a/homeassistant/components/wyoming/devices.py +++ b/homeassistant/components/wyoming/devices.py @@ -5,7 +5,6 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er @@ -24,7 +23,6 @@ class SatelliteDevice: noise_suppression_level: int = 0 auto_gain: int = 0 volume_multiplier: float = 1.0 - vad_sensitivity: VadSensitivity = VadSensitivity.DEFAULT _is_active_listener: Callable[[], None] | None = None _is_muted_listener: Callable[[], None] | None = None @@ -79,14 +77,6 @@ class SatelliteDevice: if self._audio_settings_listener is not None: self._audio_settings_listener() - @callback - def set_vad_sensitivity(self, vad_sensitivity: VadSensitivity) -> None: - """Set VAD sensitivity.""" - if vad_sensitivity != self.vad_sensitivity: - self.vad_sensitivity = vad_sensitivity - if self._audio_settings_listener is not None: - self._audio_settings_listener() - @callback def set_is_active_listener(self, is_active_listener: Callable[[], None]) -> None: """Listen for updates to is_active.""" @@ -150,10 +140,3 @@ class SatelliteDevice: return ent_reg.async_get_entity_id( "number", DOMAIN, f"{self.satellite_id}-volume_multiplier" ) - - def get_vad_sensitivity_entity_id(self, hass: HomeAssistant) -> str | None: - """Return entity id for VAD sensitivity.""" - ent_reg = er.async_get(hass) - return ent_reg.async_get_entity_id( - "select", DOMAIN, f"{self.satellite_id}-vad_sensitivity" - ) diff --git a/homeassistant/components/wyoming/entity.py b/homeassistant/components/wyoming/entity.py index 1ce105fb860..4591283036f 100644 --- a/homeassistant/components/wyoming/entity.py +++ b/homeassistant/components/wyoming/entity.py @@ -6,7 +6,7 @@ from homeassistant.helpers import entity from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from .const import DOMAIN -from .devices import SatelliteDevice +from .satellite import SatelliteDevice class WyomingSatelliteEntity(entity.Entity): diff --git a/homeassistant/components/wyoming/manifest.json b/homeassistant/components/wyoming/manifest.json index b837d2a9e76..30104a88dce 100644 --- a/homeassistant/components/wyoming/manifest.json +++ b/homeassistant/components/wyoming/manifest.json @@ -3,12 +3,7 @@ "name": "Wyoming Protocol", "codeowners": ["@balloob", "@synesthesiam"], "config_flow": true, - "dependencies": [ - "assist_satellite", - "assist_pipeline", - "intent", - "conversation" - ], + "dependencies": ["assist_pipeline", "intent", "conversation"], "documentation": "https://www.home-assistant.io/integrations/wyoming", "integration_type": "service", "iot_class": "local_push", diff --git a/homeassistant/components/wyoming/models.py b/homeassistant/components/wyoming/models.py index b819d06f916..066af144d78 100644 --- a/homeassistant/components/wyoming/models.py +++ b/homeassistant/components/wyoming/models.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from .data import WyomingService -from .devices import SatelliteDevice +from .satellite import WyomingSatellite @dataclass @@ -11,4 +11,4 @@ class DomainDataItem: """Domain data item.""" service: WyomingService - device: SatelliteDevice | None = None + satellite: WyomingSatellite | None = None diff --git a/homeassistant/components/wyoming/number.py b/homeassistant/components/wyoming/number.py index d9a58cc3333..5e769eeb06d 100644 --- a/homeassistant/components/wyoming/number.py +++ b/homeassistant/components/wyoming/number.py @@ -30,12 +30,13 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.device is not None + assert item.satellite is not None + device = item.satellite.device async_add_entities( [ - WyomingSatelliteAutoGainNumber(item.device), - WyomingSatelliteVolumeMultiplierNumber(item.device), + WyomingSatelliteAutoGainNumber(device), + WyomingSatelliteVolumeMultiplierNumber(device), ] ) diff --git a/homeassistant/components/wyoming/assist_satellite.py b/homeassistant/components/wyoming/satellite.py similarity index 82% rename from homeassistant/components/wyoming/assist_satellite.py rename to homeassistant/components/wyoming/satellite.py index 615084bcbf3..5af0c54abad 100644 --- a/homeassistant/components/wyoming/assist_satellite.py +++ b/homeassistant/components/wyoming/satellite.py @@ -1,14 +1,14 @@ -"""Assist satellite entity for Wyoming integration.""" - -from __future__ import annotations +"""Support for Wyoming satellite services.""" import asyncio -from collections.abc import AsyncGenerator import io import logging -from typing import Any, Final +import time +from typing import Final +from uuid import uuid4 import wave +from typing_extensions import AsyncGenerator from wyoming.asr import Transcribe, Transcript from wyoming.audio import AudioChunk, AudioChunkConverter, AudioStart, AudioStop from wyoming.client import AsyncTcpClient @@ -18,28 +18,19 @@ from wyoming.info import Describe, Info from wyoming.ping import Ping, Pong from wyoming.pipeline import PipelineStage, RunPipeline from wyoming.satellite import PauseSatellite, RunSatellite -from wyoming.snd import Played from wyoming.timer import TimerCancelled, TimerFinished, TimerStarted, TimerUpdated from wyoming.tts import Synthesize, SynthesizeVoice from wyoming.vad import VoiceStarted, VoiceStopped from wyoming.wake import Detect, Detection -from homeassistant.components import assist_pipeline, intent, tts -from homeassistant.components.assist_pipeline import PipelineEvent -from homeassistant.components.assist_satellite import ( - AssistSatelliteConfiguration, - AssistSatelliteEntity, - AssistSatelliteEntityDescription, -) +from homeassistant.components import assist_pipeline, intent, stt, tts +from homeassistant.components.assist_pipeline import select as pipeline_select from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.core import Context, HomeAssistant, callback from .const import DOMAIN from .data import WyomingService from .devices import SatelliteDevice -from .entity import WyomingSatelliteEntity -from .models import DomainDataItem _LOGGER = logging.getLogger(__name__) @@ -49,6 +40,7 @@ _RESTART_SECONDS: Final = 3 _PING_TIMEOUT: Final = 5 _PING_SEND_DELAY: Final = 2 _PIPELINE_FINISH_TIMEOUT: Final = 1 +_CONVERSATION_TIMEOUT_SEC: Final = 5 * 60 # 5 minutes # Wyoming stage -> Assist stage _STAGES: dict[PipelineStage, assist_pipeline.PipelineStage] = { @@ -59,46 +51,21 @@ _STAGES: dict[PipelineStage, assist_pipeline.PipelineStage] = { } -async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Wyoming Assist satellite entity.""" - domain_data: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] - assert domain_data.device is not None - - async_add_entities( - [ - WyomingAssistSatellite( - hass, domain_data.service, domain_data.device, config_entry - ) - ] - ) - - -class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): - """Assist satellite for Wyoming devices.""" - - entity_description = AssistSatelliteEntityDescription(key="assist_satellite") - _attr_translation_key = "assist_satellite" - _attr_name = None +class WyomingSatellite: + """Remove voice satellite running the Wyoming protocol.""" def __init__( self, hass: HomeAssistant, + config_entry: ConfigEntry, service: WyomingService, device: SatelliteDevice, - config_entry: ConfigEntry, ) -> None: - """Initialize an Assist satellite.""" - WyomingSatelliteEntity.__init__(self, device) - AssistSatelliteEntity.__init__(self) - + """Initialize satellite.""" + self.hass = hass + self.config_entry = config_entry self.service = service self.device = device - self.config_entry = config_entry - self.is_running = True self._client: AsyncTcpClient | None = None @@ -116,160 +83,6 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): self.device.set_pipeline_listener(self._pipeline_changed) self.device.set_audio_settings_listener(self._audio_settings_changed) - @property - def pipeline_entity_id(self) -> str | None: - """Return the entity ID of the pipeline to use for the next conversation.""" - return self.device.get_pipeline_entity_id(self.hass) - - @property - def vad_sensitivity_entity_id(self) -> str | None: - """Return the entity ID of the VAD sensitivity to use for the next conversation.""" - return self.device.get_vad_sensitivity_entity_id(self.hass) - - @property - def tts_options(self) -> dict[str, Any] | None: - """Options passed for text-to-speech.""" - return { - tts.ATTR_PREFERRED_FORMAT: "wav", - tts.ATTR_PREFERRED_SAMPLE_RATE: 16000, - tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, - tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, - } - - async def async_added_to_hass(self) -> None: - """Run when entity about to be added to hass.""" - await super().async_added_to_hass() - self.start_satellite() - - async def async_will_remove_from_hass(self) -> None: - """Run when entity will be removed from hass.""" - await super().async_will_remove_from_hass() - self.stop_satellite() - - @callback - def async_get_configuration( - self, - ) -> AssistSatelliteConfiguration: - """Get the current satellite configuration.""" - raise NotImplementedError - - async def async_set_configuration( - self, config: AssistSatelliteConfiguration - ) -> None: - """Set the current satellite configuration.""" - raise NotImplementedError - - def on_pipeline_event(self, event: PipelineEvent) -> None: - """Set state based on pipeline stage.""" - assert self._client is not None - - if event.type == assist_pipeline.PipelineEventType.RUN_END: - # Pipeline run is complete - self._is_pipeline_running = False - self._pipeline_ended_event.set() - self.device.set_is_active(False) - elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_START: - self.hass.add_job(self._client.write_event(Detect().event())) - elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_END: - # Wake word detection - # Inform client of wake word detection - if event.data and (wake_word_output := event.data.get("wake_word_output")): - detection = Detection( - name=wake_word_output["wake_word_id"], - timestamp=wake_word_output.get("timestamp"), - ) - self.hass.add_job(self._client.write_event(detection.event())) - elif event.type == assist_pipeline.PipelineEventType.STT_START: - # Speech-to-text - self.device.set_is_active(True) - - if event.data: - self.hass.add_job( - self._client.write_event( - Transcribe(language=event.data["metadata"]["language"]).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.STT_VAD_START: - # User started speaking - if event.data: - self.hass.add_job( - self._client.write_event( - VoiceStarted(timestamp=event.data["timestamp"]).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.STT_VAD_END: - # User stopped speaking - if event.data: - self.hass.add_job( - self._client.write_event( - VoiceStopped(timestamp=event.data["timestamp"]).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.STT_END: - # Speech-to-text transcript - if event.data: - # Inform client of transript - stt_text = event.data["stt_output"]["text"] - self.hass.add_job( - self._client.write_event(Transcript(text=stt_text).event()) - ) - elif event.type == assist_pipeline.PipelineEventType.TTS_START: - # Text-to-speech text - if event.data: - # Inform client of text - self.hass.add_job( - self._client.write_event( - Synthesize( - text=event.data["tts_input"], - voice=SynthesizeVoice( - name=event.data.get("voice"), - language=event.data.get("language"), - ), - ).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.TTS_END: - # TTS stream - if event.data and (tts_output := event.data["tts_output"]): - media_id = tts_output["media_id"] - self.hass.add_job(self._stream_tts(media_id)) - elif event.type == assist_pipeline.PipelineEventType.ERROR: - # Pipeline error - if event.data: - self.hass.add_job( - self._client.write_event( - Error( - text=event.data["message"], code=event.data["code"] - ).event() - ) - ) - - # ------------------------------------------------------------------------- - - def start_satellite(self) -> None: - """Start satellite task.""" - self.is_running = True - - self.config_entry.async_create_background_task( - self.hass, self.run(), "wyoming satellite run" - ) - - def stop_satellite(self) -> None: - """Signal satellite task to stop running.""" - # Stop existing pipeline - self._audio_queue.put_nowait(None) - - # Tell satellite to stop running - self._send_pause() - - # Stop task loop - self.is_running = False - - # Unblock waiting for unmuted - self._muted_changed_event.set() - - # ------------------------------------------------------------------------- - async def run(self) -> None: """Run and maintain a connection to satellite.""" _LOGGER.debug("Running satellite task") @@ -296,9 +109,6 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): except Exception as err: # noqa: BLE001 _LOGGER.debug("%s: %s", err.__class__.__name__, str(err)) - # Stop any existing pipeline - self._audio_queue.put_nowait(None) - # Ensure sensor is off (before restart) self.device.set_is_active(False) @@ -312,6 +122,17 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): await self.on_stopped() + def stop(self) -> None: + """Signal satellite task to stop running.""" + # Tell satellite to stop running + self._send_pause() + + # Stop task loop + self.is_running = False + + # Unblock waiting for unmuted + self._muted_changed_event.set() + async def on_restart(self) -> None: """Block until pipeline loop will be restarted.""" _LOGGER.warning( @@ -329,7 +150,7 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): await asyncio.sleep(_RECONNECT_SECONDS) async def on_muted(self) -> None: - """Block until device may be unmuted again.""" + """Block until device may be unmated again.""" await self._muted_changed_event.wait() async def on_stopped(self) -> None: @@ -430,7 +251,6 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): done, pending = await asyncio.wait( pending, return_when=asyncio.FIRST_COMPLETED ) - if pipeline_ended_task in done: # Pipeline run end event was received _LOGGER.debug("Pipeline finished") @@ -481,7 +301,7 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): elif AudioStop.is_type(client_event.type) and self._is_pipeline_running: # Stop pipeline _LOGGER.debug("Client requested pipeline to stop") - self._audio_queue.put_nowait(None) + self._audio_queue.put_nowait(b"") elif Info.is_type(client_event.type): client_info = Info.from_event(client_event) _LOGGER.debug("Updated client info: %s", client_info) @@ -508,9 +328,6 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): break _LOGGER.debug("Client detected wake word: %s", wake_word_phrase) - elif Played.is_type(client_event.type): - # TTS response has finished playing on satellite - self.tts_response_finished() else: _LOGGER.debug("Unexpected event from satellite: %s", client_event) @@ -535,20 +352,69 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): if end_stage is None: raise ValueError(f"Invalid end stage: {end_stage}") + pipeline_id = pipeline_select.get_chosen_pipeline( + self.hass, + DOMAIN, + self.device.satellite_id, + ) + pipeline = assist_pipeline.async_get_pipeline(self.hass, pipeline_id) + assert pipeline is not None + # We will push audio in through a queue self._audio_queue = asyncio.Queue() + stt_stream = self._stt_stream() + + # Start pipeline running + _LOGGER.debug( + "Starting pipeline %s from %s to %s", + pipeline.name, + start_stage, + end_stage, + ) + + # Reset conversation id, if necessary + if (self._conversation_id_time is None) or ( + (time.monotonic() - self._conversation_id_time) > _CONVERSATION_TIMEOUT_SEC + ): + self._conversation_id = None + + if self._conversation_id is None: + self._conversation_id = str(uuid4()) + + # Update timeout + self._conversation_id_time = time.monotonic() self._is_pipeline_running = True self._pipeline_ended_event.clear() self.config_entry.async_create_background_task( self.hass, - self.async_accept_pipeline_from_satellite( - audio_stream=self._stt_stream(), + assist_pipeline.async_pipeline_from_audio_stream( + self.hass, + context=Context(), + event_callback=self._event_callback, + stt_metadata=stt.SpeechMetadata( + language=pipeline.language, + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=stt_stream, start_stage=start_stage, end_stage=end_stage, + tts_audio_output="wav", + pipeline_id=pipeline_id, + audio_settings=assist_pipeline.AudioSettings( + noise_suppression_level=self.device.noise_suppression_level, + auto_gain_dbfs=self.device.auto_gain, + volume_multiplier=self.device.volume_multiplier, + ), + device_id=self.device.device_id, wake_word_phrase=wake_word_phrase, + conversation_id=self._conversation_id, ), - "wyoming satellite pipeline", + name="wyoming satellite pipeline", ) async def _send_delayed_ping(self) -> None: @@ -561,6 +427,91 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): except ConnectionError: pass # handled with timeout + def _event_callback(self, event: assist_pipeline.PipelineEvent) -> None: + """Translate pipeline events into Wyoming events.""" + assert self._client is not None + + if event.type == assist_pipeline.PipelineEventType.RUN_END: + # Pipeline run is complete + self._is_pipeline_running = False + self._pipeline_ended_event.set() + self.device.set_is_active(False) + elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_START: + self.hass.add_job(self._client.write_event(Detect().event())) + elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_END: + # Wake word detection + # Inform client of wake word detection + if event.data and (wake_word_output := event.data.get("wake_word_output")): + detection = Detection( + name=wake_word_output["wake_word_id"], + timestamp=wake_word_output.get("timestamp"), + ) + self.hass.add_job(self._client.write_event(detection.event())) + elif event.type == assist_pipeline.PipelineEventType.STT_START: + # Speech-to-text + self.device.set_is_active(True) + + if event.data: + self.hass.add_job( + self._client.write_event( + Transcribe(language=event.data["metadata"]["language"]).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.STT_VAD_START: + # User started speaking + if event.data: + self.hass.add_job( + self._client.write_event( + VoiceStarted(timestamp=event.data["timestamp"]).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.STT_VAD_END: + # User stopped speaking + if event.data: + self.hass.add_job( + self._client.write_event( + VoiceStopped(timestamp=event.data["timestamp"]).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.STT_END: + # Speech-to-text transcript + if event.data: + # Inform client of transript + stt_text = event.data["stt_output"]["text"] + self.hass.add_job( + self._client.write_event(Transcript(text=stt_text).event()) + ) + elif event.type == assist_pipeline.PipelineEventType.TTS_START: + # Text-to-speech text + if event.data: + # Inform client of text + self.hass.add_job( + self._client.write_event( + Synthesize( + text=event.data["tts_input"], + voice=SynthesizeVoice( + name=event.data.get("voice"), + language=event.data.get("language"), + ), + ).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.TTS_END: + # TTS stream + if event.data and (tts_output := event.data["tts_output"]): + media_id = tts_output["media_id"] + self.hass.add_job(self._stream_tts(media_id)) + elif event.type == assist_pipeline.PipelineEventType.ERROR: + # Pipeline error + if event.data: + self.hass.add_job( + self._client.write_event( + Error( + text=event.data["message"], code=event.data["code"] + ).event() + ) + ) + async def _connect(self) -> None: """Connect to satellite over TCP.""" await self._disconnect() @@ -621,16 +572,16 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): async def _stt_stream(self) -> AsyncGenerator[bytes]: """Yield audio chunks from a queue.""" - is_first_chunk = True - while chunk := await self._audio_queue.get(): - if chunk is None: - break + try: + is_first_chunk = True + while chunk := await self._audio_queue.get(): + if is_first_chunk: + is_first_chunk = False + _LOGGER.debug("Receiving audio from satellite") - if is_first_chunk: - is_first_chunk = False - _LOGGER.debug("Receiving audio from satellite") - - yield chunk + yield chunk + except asyncio.CancelledError: + pass # ignore @callback def _handle_timer( diff --git a/homeassistant/components/wyoming/select.py b/homeassistant/components/wyoming/select.py index bbcaab81710..99f26c3e440 100644 --- a/homeassistant/components/wyoming/select.py +++ b/homeassistant/components/wyoming/select.py @@ -4,11 +4,7 @@ from __future__ import annotations from typing import TYPE_CHECKING, Final -from homeassistant.components.assist_pipeline.select import ( - AssistPipelineSelect, - VadSensitivitySelect, -) -from homeassistant.components.assist_pipeline.vad import VadSensitivity +from homeassistant.components.assist_pipeline.select import AssistPipelineSelect from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory @@ -42,13 +38,13 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.device is not None + assert item.satellite is not None + device = item.satellite.device async_add_entities( [ - WyomingSatellitePipelineSelect(hass, item.device), - WyomingSatelliteNoiseSuppressionLevelSelect(item.device), - WyomingSatelliteVadSensitivitySelect(hass, item.device), + WyomingSatellitePipelineSelect(hass, device), + WyomingSatelliteNoiseSuppressionLevelSelect(device), ] ) @@ -96,21 +92,3 @@ class WyomingSatelliteNoiseSuppressionLevelSelect( self._attr_current_option = option self.async_write_ha_state() self._device.set_noise_suppression_level(_NOISE_SUPPRESSION_LEVEL[option]) - - -class WyomingSatelliteVadSensitivitySelect( - WyomingSatelliteEntity, VadSensitivitySelect -): - """VAD sensitivity selector for Wyoming satellites.""" - - def __init__(self, hass: HomeAssistant, device: SatelliteDevice) -> None: - """Initialize a VAD sensitivity selector.""" - self.device = device - - WyomingSatelliteEntity.__init__(self, device) - VadSensitivitySelect.__init__(self, hass, device.satellite_id) - - async def async_select_option(self, option: str) -> None: - """Select an option.""" - await super().async_select_option(option) - self.device.set_vad_sensitivity(VadSensitivity(option)) diff --git a/homeassistant/components/wyoming/strings.json b/homeassistant/components/wyoming/strings.json index 4a1a4c3a246..f2768e45eb8 100644 --- a/homeassistant/components/wyoming/strings.json +++ b/homeassistant/components/wyoming/strings.json @@ -46,14 +46,6 @@ "high": "High", "max": "Max" } - }, - "vad_sensitivity": { - "name": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::name%]", - "state": { - "default": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::default%]", - "aggressive": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::aggressive%]", - "relaxed": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::relaxed%]" - } } }, "switch": { diff --git a/homeassistant/components/wyoming/switch.py b/homeassistant/components/wyoming/switch.py index 308429331c3..c012c60bc5a 100644 --- a/homeassistant/components/wyoming/switch.py +++ b/homeassistant/components/wyoming/switch.py @@ -27,9 +27,9 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.device is not None + assert item.satellite is not None - async_add_entities([WyomingSatelliteMuteSwitch(item.device)]) + async_add_entities([WyomingSatelliteMuteSwitch(item.satellite.device)]) class WyomingSatelliteMuteSwitch( @@ -51,7 +51,7 @@ class WyomingSatelliteMuteSwitch( # Default to off self._attr_is_on = (state is not None) and (state.state == STATE_ON) - self._device.set_is_muted(self._attr_is_on) + self._device.is_muted = self._attr_is_on async def async_turn_on(self, **kwargs: Any) -> None: """Turn on.""" diff --git a/homeassistant/components/wyoming/wake_word.py b/homeassistant/components/wyoming/wake_word.py index 64dfd60c068..6eba0f7ca6d 100644 --- a/homeassistant/components/wyoming/wake_word.py +++ b/homeassistant/components/wyoming/wake_word.py @@ -89,7 +89,6 @@ class WyomingWakeWordProvider(wake_word.WakeWordDetectionEntity): """Get the next chunk from audio stream.""" async for chunk_bytes in stream: return chunk_bytes - return None try: async with AsyncTcpClient(self.service.host, self.service.port) as client: diff --git a/homeassistant/components/x10/light.py b/homeassistant/components/x10/light.py index 23343cb0f8d..29c15f66993 100644 --- a/homeassistant/components/x10/light.py +++ b/homeassistant/components/x10/light.py @@ -54,7 +54,7 @@ def setup_platform( try: x10_command("info") except CalledProcessError as err: - _LOGGER.warning("Assuming that the device is CM17A: %s", err.output) + _LOGGER.info("Assuming that the device is CM17A: %s", err.output) is_cm11a = False add_entities(X10Light(light, is_cm11a) for light in config[CONF_DEVICES]) diff --git a/homeassistant/components/xbox/entity.py b/homeassistant/components/xbox/base_sensor.py similarity index 97% rename from homeassistant/components/xbox/entity.py rename to homeassistant/components/xbox/base_sensor.py index d4a63b71b39..f252385d4ca 100644 --- a/homeassistant/components/xbox/entity.py +++ b/homeassistant/components/xbox/base_sensor.py @@ -11,7 +11,7 @@ from .const import DOMAIN from .coordinator import PresenceData, XboxUpdateCoordinator -class XboxBaseEntity(CoordinatorEntity[XboxUpdateCoordinator]): +class XboxBaseSensorEntity(CoordinatorEntity[XboxUpdateCoordinator]): """Base Sensor for the Xbox Integration.""" def __init__( diff --git a/homeassistant/components/xbox/binary_sensor.py b/homeassistant/components/xbox/binary_sensor.py index af95834425a..0f0b9799d3d 100644 --- a/homeassistant/components/xbox/binary_sensor.py +++ b/homeassistant/components/xbox/binary_sensor.py @@ -10,9 +10,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .base_sensor import XboxBaseSensorEntity from .const import DOMAIN from .coordinator import XboxUpdateCoordinator -from .entity import XboxBaseEntity PRESENCE_ATTRIBUTES = ["online", "in_party", "in_game", "in_multiplayer"] @@ -32,7 +32,7 @@ async def async_setup_entry( update_friends() -class XboxBinarySensorEntity(XboxBaseEntity, BinarySensorEntity): +class XboxBinarySensorEntity(XboxBaseSensorEntity, BinarySensorEntity): """Representation of a Xbox presence state.""" @property diff --git a/homeassistant/components/xbox/config_flow.py b/homeassistant/components/xbox/config_flow.py index 86157be5d7f..e1434aac67c 100644 --- a/homeassistant/components/xbox/config_flow.py +++ b/homeassistant/components/xbox/config_flow.py @@ -1,9 +1,7 @@ """Config flow for xbox.""" import logging -from typing import Any -from homeassistant.config_entries import ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN @@ -27,9 +25,7 @@ class OAuth2FlowHandler( scopes = ["Xboxlive.signin", "Xboxlive.offline_access"] return {"scope": " ".join(scopes)} - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_user(self, user_input=None): """Handle a flow start.""" await self.async_set_unique_id(DOMAIN) diff --git a/homeassistant/components/xbox/media_source.py b/homeassistant/components/xbox/media_source.py index 4478502b4ca..a63f3b2027b 100644 --- a/homeassistant/components/xbox/media_source.py +++ b/homeassistant/components/xbox/media_source.py @@ -13,7 +13,7 @@ from xbox.webapi.api.provider.screenshots.models import ScreenshotResponse from xbox.webapi.api.provider.smartglass.models import InstalledPackage from homeassistant.components.media_player import MediaClass -from homeassistant.components.media_source import ( +from homeassistant.components.media_source.models import ( BrowseMediaSource, MediaSource, MediaSourceItem, diff --git a/homeassistant/components/xbox/sensor.py b/homeassistant/components/xbox/sensor.py index f269e0a5bb9..ff6591d5b3e 100644 --- a/homeassistant/components/xbox/sensor.py +++ b/homeassistant/components/xbox/sensor.py @@ -10,9 +10,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .base_sensor import XboxBaseSensorEntity from .const import DOMAIN from .coordinator import XboxUpdateCoordinator -from .entity import XboxBaseEntity SENSOR_ATTRIBUTES = ["status", "gamer_score", "account_tier", "gold_tenure"] @@ -34,7 +34,7 @@ async def async_setup_entry( update_friends() -class XboxSensorEntity(XboxBaseEntity, SensorEntity): +class XboxSensorEntity(XboxBaseSensorEntity, SensorEntity): """Representation of a Xbox presence state.""" @property diff --git a/homeassistant/components/xeoma/manifest.json b/homeassistant/components/xeoma/manifest.json index d66177ca214..a73b4bb8671 100644 --- a/homeassistant/components/xeoma/manifest.json +++ b/homeassistant/components/xeoma/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/xeoma", "iot_class": "local_polling", "loggers": ["pyxeoma"], - "requirements": ["pyxeoma==1.4.2"] + "requirements": ["pyxeoma==1.4.1"] } diff --git a/homeassistant/components/xiaomi/camera.py b/homeassistant/components/xiaomi/camera.py index cb8d5f39dec..323a0f8a157 100644 --- a/homeassistant/components/xiaomi/camera.py +++ b/homeassistant/components/xiaomi/camera.py @@ -80,6 +80,7 @@ class XiaomiCamera(Camera): self._manager = get_ffmpeg_manager(hass) self._name = config[CONF_NAME] self.host = config[CONF_HOST] + self.host.hass = hass self._model = config[CONF_MODEL] self.port = config[CONF_PORT] self.path = config[CONF_PATH] @@ -140,7 +141,7 @@ class XiaomiCamera(Camera): videos = [v for v in ftp.nlst() if ".tmp" not in v] if not videos: - _LOGGER.debug('Video folder "%s" is empty; delaying', latest_dir) + _LOGGER.info('Video folder "%s" is empty; delaying', latest_dir) return False if self._model == MODEL_XIAOFANG: diff --git a/homeassistant/components/xiaomi/device_tracker.py b/homeassistant/components/xiaomi/device_tracker.py index 9d4a29d2c78..b3983e76aaa 100644 --- a/homeassistant/components/xiaomi/device_tracker.py +++ b/homeassistant/components/xiaomi/device_tracker.py @@ -9,7 +9,7 @@ import requests import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN as DEVICE_TRACKER_DOMAIN, + DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -31,7 +31,7 @@ PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( def get_scanner(hass: HomeAssistant, config: ConfigType) -> XiaomiDeviceScanner | None: """Validate the configuration and return a Xiaomi Device Scanner.""" - scanner = XiaomiDeviceScanner(config[DEVICE_TRACKER_DOMAIN]) + scanner = XiaomiDeviceScanner(config[DOMAIN]) return scanner if scanner.success_init else None @@ -139,7 +139,7 @@ def _retrieve_list(host, token, **kwargs): _LOGGER.exception("No list in response from mi router. %s", result) return None else: - _LOGGER.warning( + _LOGGER.info( "Receive wrong Xiaomi code %s, expected 0 in response %s", xiaomi_code, result, @@ -172,6 +172,7 @@ def _get_token(host, username, password): ) _LOGGER.exception(error_message, url, data, result) return None - - _LOGGER.error("Invalid response: [%s] at url: [%s] with data [%s]", res, url, data) - return None + else: + _LOGGER.error( + "Invalid response: [%s] at url: [%s] with data [%s]", res, url, data + ) diff --git a/homeassistant/components/xiaomi_aqara/__init__.py b/homeassistant/components/xiaomi_aqara/__init__.py index b7f4aa1942e..ee7948a237e 100644 --- a/homeassistant/components/xiaomi_aqara/__init__.py +++ b/homeassistant/components/xiaomi_aqara/__init__.py @@ -1,7 +1,9 @@ """Support for Xiaomi Gateways.""" import asyncio +from datetime import timedelta import logging +from typing import Any import voluptuous as vol from xiaomi_gateway import AsyncXiaomiGatewayMulticast, XiaomiGateway @@ -9,8 +11,11 @@ from xiaomi_gateway import AsyncXiaomiGatewayMulticast, XiaomiGateway from homeassistant.components import persistent_notification from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import ( + ATTR_BATTERY_LEVEL, ATTR_DEVICE_ID, + ATTR_VOLTAGE, CONF_HOST, + CONF_MAC, CONF_PORT, CONF_PROTOCOL, EVENT_HOMEASSISTANT_STOP, @@ -19,7 +24,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.device_registry import DeviceInfo, format_mac +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.helpers.typing import ConfigType +from homeassistant.util.dt import utcnow from .const import ( CONF_INTERFACE, @@ -49,6 +58,8 @@ ATTR_GW_MAC = "gw_mac" ATTR_RINGTONE_ID = "ringtone_id" ATTR_RINGTONE_VOL = "ringtone_vol" +TIME_TILL_UNAVAILABLE = timedelta(minutes=150) + SERVICE_PLAY_RINGTONE = "play_ringtone" SERVICE_STOP_RINGTONE = "stop_ringtone" SERVICE_ADD_DEVICE = "add_device" @@ -234,6 +245,152 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> return unload_ok +class XiaomiDevice(Entity): + """Representation a base Xiaomi device.""" + + _attr_should_poll = False + + def __init__(self, device, device_type, xiaomi_hub, config_entry): + """Initialize the Xiaomi device.""" + self._state = None + self._is_available = True + self._sid = device["sid"] + self._model = device["model"] + self._protocol = device["proto"] + self._name = f"{device_type}_{self._sid}" + self._device_name = f"{self._model}_{self._sid}" + self._type = device_type + self._write_to_hub = xiaomi_hub.write_to_hub + self._get_from_hub = xiaomi_hub.get_from_hub + self._extra_state_attributes = {} + self._remove_unavailability_tracker = None + self._xiaomi_hub = xiaomi_hub + self.parse_data(device["data"], device["raw_data"]) + self.parse_voltage(device["data"]) + + if hasattr(self, "_data_key") and self._data_key: + self._unique_id = f"{self._data_key}{self._sid}" + else: + self._unique_id = f"{self._type}{self._sid}" + + self._gateway_id = config_entry.unique_id + if config_entry.data[CONF_MAC] == format_mac(self._sid): + # this entity belongs to the gateway itself + self._is_gateway = True + self._device_id = config_entry.unique_id + else: + # this entity is connected through zigbee + self._is_gateway = False + self._device_id = self._sid + + async def async_added_to_hass(self): + """Start unavailability tracking.""" + self._xiaomi_hub.callbacks[self._sid].append(self.push_data) + self._async_track_unavailable() + + @property + def name(self): + """Return the name of the device.""" + return self._name + + @property + def unique_id(self) -> str: + """Return a unique ID.""" + return self._unique_id + + @property + def device_id(self): + """Return the device id of the Xiaomi Aqara device.""" + return self._device_id + + @property + def device_info(self) -> DeviceInfo: + """Return the device info of the Xiaomi Aqara device.""" + if self._is_gateway: + device_info = DeviceInfo( + identifiers={(DOMAIN, self._device_id)}, + model=self._model, + ) + else: + device_info = DeviceInfo( + connections={(dr.CONNECTION_ZIGBEE, self._device_id)}, + identifiers={(DOMAIN, self._device_id)}, + manufacturer="Xiaomi Aqara", + model=self._model, + name=self._device_name, + sw_version=self._protocol, + via_device=(DOMAIN, self._gateway_id), + ) + + return device_info + + @property + def available(self): + """Return True if entity is available.""" + return self._is_available + + @property + def extra_state_attributes(self): + """Return the state attributes.""" + return self._extra_state_attributes + + @callback + def _async_set_unavailable(self, now): + """Set state to UNAVAILABLE.""" + self._remove_unavailability_tracker = None + self._is_available = False + self.async_write_ha_state() + + @callback + def _async_track_unavailable(self): + if self._remove_unavailability_tracker: + self._remove_unavailability_tracker() + self._remove_unavailability_tracker = async_track_point_in_utc_time( + self.hass, self._async_set_unavailable, utcnow() + TIME_TILL_UNAVAILABLE + ) + if not self._is_available: + self._is_available = True + return True + return False + + def push_data(self, data: dict[str, Any], raw_data: dict[Any, Any]) -> None: + """Push from Hub running in another thread.""" + self.hass.loop.call_soon_threadsafe(self.async_push_data, data, raw_data) + + @callback + def async_push_data(self, data: dict[str, Any], raw_data: dict[Any, Any]) -> None: + """Push from Hub handled in the event loop.""" + _LOGGER.debug("PUSH >> %s: %s", self, data) + was_unavailable = self._async_track_unavailable() + is_data = self.parse_data(data, raw_data) + is_voltage = self.parse_voltage(data) + if is_data or is_voltage or was_unavailable: + self.async_write_ha_state() + + def parse_voltage(self, data): + """Parse battery level data sent by gateway.""" + if "voltage" in data: + voltage_key = "voltage" + elif "battery_voltage" in data: + voltage_key = "battery_voltage" + else: + return False + + max_volt = 3300 + min_volt = 2800 + voltage = data[voltage_key] + self._extra_state_attributes[ATTR_VOLTAGE] = round(voltage / 1000.0, 2) + voltage = min(voltage, max_volt) + voltage = max(voltage, min_volt) + percent = ((voltage - min_volt) / (max_volt - min_volt)) * 100 + self._extra_state_attributes[ATTR_BATTERY_LEVEL] = round(percent, 1) + return True + + def parse_data(self, data, raw_data): + """Parse data sent by gateway.""" + raise NotImplementedError + + def _add_gateway_to_schema(hass, schema): """Extend a voluptuous schema with a gateway validator.""" diff --git a/homeassistant/components/xiaomi_aqara/binary_sensor.py b/homeassistant/components/xiaomi_aqara/binary_sensor.py index ad91dda2173..cee2980fe07 100644 --- a/homeassistant/components/xiaomi_aqara/binary_sensor.py +++ b/homeassistant/components/xiaomi_aqara/binary_sensor.py @@ -12,8 +12,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later from homeassistant.helpers.restore_state import RestoreEntity +from . import XiaomiDevice from .const import DOMAIN, GATEWAYS_KEY -from .entity import XiaomiDevice _LOGGER = logging.getLogger(__name__) @@ -202,8 +202,6 @@ class XiaomiNatgasSensor(XiaomiBinarySensor): return True return False - return False - class XiaomiMotionSensor(XiaomiBinarySensor): """Representation of a XiaomiMotionSensor.""" @@ -300,8 +298,6 @@ class XiaomiMotionSensor(XiaomiBinarySensor): self._state = True return True - return False - class XiaomiDoorSensor(XiaomiBinarySensor, RestoreEntity): """Representation of a XiaomiDoorSensor.""" @@ -361,8 +357,6 @@ class XiaomiDoorSensor(XiaomiBinarySensor, RestoreEntity): return True return False - return False - class XiaomiWaterLeakSensor(XiaomiBinarySensor): """Representation of a XiaomiWaterLeakSensor.""" @@ -407,8 +401,6 @@ class XiaomiWaterLeakSensor(XiaomiBinarySensor): return True return False - return False - class XiaomiSmokeSensor(XiaomiBinarySensor): """Representation of a XiaomiSmokeSensor.""" @@ -451,8 +443,6 @@ class XiaomiSmokeSensor(XiaomiBinarySensor): return True return False - return False - class XiaomiVibration(XiaomiBinarySensor): """Representation of a Xiaomi Vibration Sensor.""" diff --git a/homeassistant/components/xiaomi_aqara/config_flow.py b/homeassistant/components/xiaomi_aqara/config_flow.py index 6252e6849d0..8f391c8ddf3 100644 --- a/homeassistant/components/xiaomi_aqara/config_flow.py +++ b/homeassistant/components/xiaomi_aqara/config_flow.py @@ -2,7 +2,6 @@ import logging from socket import gaierror -from typing import Any import voluptuous as vol from xiaomi_gateway import MULTICAST_PORT, XiaomiGateway, XiaomiGatewayDiscovery @@ -50,14 +49,13 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - selected_gateway: XiaomiGateway - gateways: dict[str, XiaomiGateway] - - def __init__(self) -> None: + def __init__(self): """Initialize.""" - self.host: str | None = None + self.host = None self.interface = DEFAULT_INTERFACE - self.sid: str | None = None + self.sid = None + self.gateways = None + self.selected_gateway = None @callback def async_show_form_step_user(self, errors): @@ -68,11 +66,9 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" - errors: dict[str, str] = {} + errors = {} if user_input is None: return self.async_show_form_step_user(errors) @@ -129,11 +125,9 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "discovery_error" return self.async_show_form_step_user(errors) - async def async_step_select( - self, user_input: dict[str, str] | None = None - ) -> ConfigFlowResult: + async def async_step_select(self, user_input=None): """Handle multiple aqara gateways found.""" - errors: dict[str, str] = {} + errors = {} if user_input is not None: ip_adress = user_input["select_ip"] self.selected_gateway = self.gateways[ip_adress] @@ -191,9 +185,7 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_user() - async def async_step_settings( - self, user_input: dict[str, str] | None = None - ) -> ConfigFlowResult: + async def async_step_settings(self, user_input=None): """Specify settings and connect aqara gateway.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/xiaomi_aqara/cover.py b/homeassistant/components/xiaomi_aqara/cover.py index e073ef6b683..64c9f6f208a 100644 --- a/homeassistant/components/xiaomi_aqara/cover.py +++ b/homeassistant/components/xiaomi_aqara/cover.py @@ -7,8 +7,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import XiaomiDevice from .const import DOMAIN, GATEWAYS_KEY -from .entity import XiaomiDevice ATTR_CURTAIN_LEVEL = "curtain_level" diff --git a/homeassistant/components/xiaomi_aqara/entity.py b/homeassistant/components/xiaomi_aqara/entity.py deleted file mode 100644 index db47015c0cf..00000000000 --- a/homeassistant/components/xiaomi_aqara/entity.py +++ /dev/null @@ -1,166 +0,0 @@ -"""Support for Xiaomi Gateways.""" - -from datetime import timedelta -import logging -from typing import Any - -from homeassistant.const import ATTR_BATTERY_LEVEL, ATTR_VOLTAGE, CONF_MAC -from homeassistant.core import callback -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo, format_mac -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.event import async_track_point_in_utc_time -from homeassistant.util.dt import utcnow - -from .const import DOMAIN - -_LOGGER = logging.getLogger(__name__) - -TIME_TILL_UNAVAILABLE = timedelta(minutes=150) - - -class XiaomiDevice(Entity): - """Representation a base Xiaomi device.""" - - _attr_should_poll = False - - def __init__(self, device, device_type, xiaomi_hub, config_entry): - """Initialize the Xiaomi device.""" - self._state = None - self._is_available = True - self._sid = device["sid"] - self._model = device["model"] - self._protocol = device["proto"] - self._name = f"{device_type}_{self._sid}" - self._device_name = f"{self._model}_{self._sid}" - self._type = device_type - self._write_to_hub = xiaomi_hub.write_to_hub - self._get_from_hub = xiaomi_hub.get_from_hub - self._extra_state_attributes = {} - self._remove_unavailability_tracker = None - self._xiaomi_hub = xiaomi_hub - self.parse_data(device["data"], device["raw_data"]) - self.parse_voltage(device["data"]) - - if hasattr(self, "_data_key") and self._data_key: - self._unique_id = f"{self._data_key}{self._sid}" - else: - self._unique_id = f"{self._type}{self._sid}" - - self._gateway_id = config_entry.unique_id - if config_entry.data[CONF_MAC] == format_mac(self._sid): - # this entity belongs to the gateway itself - self._is_gateway = True - self._device_id = config_entry.unique_id - else: - # this entity is connected through zigbee - self._is_gateway = False - self._device_id = self._sid - - async def async_added_to_hass(self): - """Start unavailability tracking.""" - self._xiaomi_hub.callbacks[self._sid].append(self.push_data) - self._async_track_unavailable() - - @property - def name(self): - """Return the name of the device.""" - return self._name - - @property - def unique_id(self) -> str: - """Return a unique ID.""" - return self._unique_id - - @property - def device_id(self): - """Return the device id of the Xiaomi Aqara device.""" - return self._device_id - - @property - def device_info(self) -> DeviceInfo: - """Return the device info of the Xiaomi Aqara device.""" - if self._is_gateway: - device_info = DeviceInfo( - identifiers={(DOMAIN, self._device_id)}, - connections={(dr.CONNECTION_NETWORK_MAC, self._device_id)}, - model=self._model, - ) - else: - device_info = DeviceInfo( - connections={(dr.CONNECTION_ZIGBEE, self._device_id)}, - identifiers={(DOMAIN, self._device_id)}, - manufacturer="Xiaomi Aqara", - model=self._model, - name=self._device_name, - sw_version=self._protocol, - via_device=(DOMAIN, self._gateway_id), - ) - - return device_info - - @property - def available(self): - """Return True if entity is available.""" - return self._is_available - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - return self._extra_state_attributes - - @callback - def _async_set_unavailable(self, now): - """Set state to UNAVAILABLE.""" - self._remove_unavailability_tracker = None - self._is_available = False - self.async_write_ha_state() - - @callback - def _async_track_unavailable(self): - if self._remove_unavailability_tracker: - self._remove_unavailability_tracker() - self._remove_unavailability_tracker = async_track_point_in_utc_time( - self.hass, self._async_set_unavailable, utcnow() + TIME_TILL_UNAVAILABLE - ) - if not self._is_available: - self._is_available = True - return True - return False - - def push_data(self, data: dict[str, Any], raw_data: dict[Any, Any]) -> None: - """Push from Hub running in another thread.""" - self.hass.loop.call_soon_threadsafe(self.async_push_data, data, raw_data) - - @callback - def async_push_data(self, data: dict[str, Any], raw_data: dict[Any, Any]) -> None: - """Push from Hub handled in the event loop.""" - _LOGGER.debug("PUSH >> %s: %s", self, data) - was_unavailable = self._async_track_unavailable() - is_data = self.parse_data(data, raw_data) - is_voltage = self.parse_voltage(data) - if is_data or is_voltage or was_unavailable: - self.async_write_ha_state() - - def parse_voltage(self, data): - """Parse battery level data sent by gateway.""" - if "voltage" in data: - voltage_key = "voltage" - elif "battery_voltage" in data: - voltage_key = "battery_voltage" - else: - return False - - max_volt = 3300 - min_volt = 2800 - voltage = data[voltage_key] - self._extra_state_attributes[ATTR_VOLTAGE] = round(voltage / 1000.0, 2) - voltage = min(voltage, max_volt) - voltage = max(voltage, min_volt) - percent = ((voltage - min_volt) / (max_volt - min_volt)) * 100 - self._extra_state_attributes[ATTR_BATTERY_LEVEL] = round(percent, 1) - return True - - def parse_data(self, data, raw_data): - """Parse data sent by gateway.""" - raise NotImplementedError diff --git a/homeassistant/components/xiaomi_aqara/icons.json b/homeassistant/components/xiaomi_aqara/icons.json index 62149b0dd40..4975414833d 100644 --- a/homeassistant/components/xiaomi_aqara/icons.json +++ b/homeassistant/components/xiaomi_aqara/icons.json @@ -1,16 +1,8 @@ { "services": { - "add_device": { - "service": "mdi:cellphone-link" - }, - "play_ringtone": { - "service": "mdi:music" - }, - "remove_device": { - "service": "mdi:cellphone-link" - }, - "stop_ringtone": { - "service": "mdi:music-off" - } + "add_device": "mdi:cellphone-link", + "play_ringtone": "mdi:music", + "remove_device": "mdi:cellphone-link", + "stop_ringtone": "mdi:music-off" } } diff --git a/homeassistant/components/xiaomi_aqara/light.py b/homeassistant/components/xiaomi_aqara/light.py index c8057f1df4a..fc19a22eb5f 100644 --- a/homeassistant/components/xiaomi_aqara/light.py +++ b/homeassistant/components/xiaomi_aqara/light.py @@ -16,8 +16,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.color as color_util +from . import XiaomiDevice from .const import DOMAIN, GATEWAYS_KEY -from .entity import XiaomiDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_aqara/lock.py b/homeassistant/components/xiaomi_aqara/lock.py index 5e538f25699..8499864576a 100644 --- a/homeassistant/components/xiaomi_aqara/lock.py +++ b/homeassistant/components/xiaomi_aqara/lock.py @@ -2,14 +2,15 @@ from __future__ import annotations -from homeassistant.components.lock import LockEntity, LockState +from homeassistant.components.lock import LockEntity from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later +from . import XiaomiDevice from .const import DOMAIN, GATEWAYS_KEY -from .entity import XiaomiDevice FINGER_KEY = "fing_verified" PASSWORD_KEY = "psw_verified" @@ -49,7 +50,7 @@ class XiaomiAqaraLock(LockEntity, XiaomiDevice): def is_locked(self) -> bool | None: """Return true if lock is locked.""" if self._state is not None: - return self._state == LockState.LOCKED + return self._state == STATE_LOCKED return None @property @@ -65,7 +66,7 @@ class XiaomiAqaraLock(LockEntity, XiaomiDevice): @callback def clear_unlock_state(self, _): """Clear unlock state automatically.""" - self._state = LockState.LOCKED + self._state = STATE_LOCKED self.async_write_ha_state() def parse_data(self, data, raw_data): @@ -78,7 +79,7 @@ class XiaomiAqaraLock(LockEntity, XiaomiDevice): if (value := data.get(key)) is not None: self._changed_by = int(value) self._verified_wrong_times = 0 - self._state = LockState.UNLOCKED + self._state = STATE_UNLOCKED async_call_later( self.hass, UNLOCK_MAINTAIN_TIME, self.clear_unlock_state ) diff --git a/homeassistant/components/xiaomi_aqara/sensor.py b/homeassistant/components/xiaomi_aqara/sensor.py index 49358276a48..4b354a6e730 100644 --- a/homeassistant/components/xiaomi_aqara/sensor.py +++ b/homeassistant/components/xiaomi_aqara/sensor.py @@ -22,8 +22,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import XiaomiDevice from .const import BATTERY_MODELS, DOMAIN, GATEWAYS_KEY, POWER_MODELS -from .entity import XiaomiDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_aqara/switch.py b/homeassistant/components/xiaomi_aqara/switch.py index f66cf8c7603..b6bd2ca1e6a 100644 --- a/homeassistant/components/xiaomi_aqara/switch.py +++ b/homeassistant/components/xiaomi_aqara/switch.py @@ -8,8 +8,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import XiaomiDevice from .const import DOMAIN, GATEWAYS_KEY -from .entity import XiaomiDevice _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_ble/__init__.py b/homeassistant/components/xiaomi_ble/__init__.py index fae5e4d0c91..4a9753bfe85 100644 --- a/homeassistant/components/xiaomi_ble/__init__.py +++ b/homeassistant/components/xiaomi_ble/__init__.py @@ -2,12 +2,12 @@ from __future__ import annotations -from functools import partial import logging from typing import cast from xiaomi_ble import EncryptionScheme, SensorUpdate, XiaomiBluetoothDeviceData +from homeassistant import config_entries from homeassistant.components.bluetooth import ( DOMAIN as BLUETOOTH_DOMAIN, BluetoothScanningMode, @@ -29,7 +29,6 @@ from .const import ( XiaomiBleEvent, ) from .coordinator import XiaomiActiveBluetoothProcessorCoordinator -from .types import XiaomiBLEConfigEntry PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.EVENT, Platform.SENSOR] @@ -38,14 +37,16 @@ _LOGGER = logging.getLogger(__name__) def process_service_info( hass: HomeAssistant, - entry: XiaomiBLEConfigEntry, - device_registry: DeviceRegistry, + entry: config_entries.ConfigEntry, + data: XiaomiBluetoothDeviceData, service_info: BluetoothServiceInfoBleak, + device_registry: DeviceRegistry, ) -> SensorUpdate: """Process a BluetoothServiceInfoBleak, running side effects and returning sensor data.""" - coordinator = entry.runtime_data - data = coordinator.device_data update = data.update(service_info) + coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ + entry.entry_id + ] discovered_event_classes = coordinator.discovered_event_classes if entry.data.get(CONF_SLEEPY_DEVICE, False) != data.sleepy_device: hass.config_entries.async_update_entry( @@ -164,29 +165,38 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return await data.async_poll(connectable_device) device_registry = dr.async_get(hass) - coordinator = XiaomiActiveBluetoothProcessorCoordinator( - hass, - _LOGGER, - address=address, - mode=BluetoothScanningMode.PASSIVE, - update_method=partial(process_service_info, hass, entry, device_registry), - needs_poll_method=_needs_poll, - device_data=data, - discovered_event_classes=set(entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, [])), - poll_method=_async_poll, - # We will take advertisements from non-connectable devices - # since we will trade the BLEDevice for a connectable one - # if we need to poll it - connectable=False, - entry=entry, + coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = ( + XiaomiActiveBluetoothProcessorCoordinator( + hass, + _LOGGER, + address=address, + mode=BluetoothScanningMode.PASSIVE, + update_method=lambda service_info: process_service_info( + hass, entry, data, service_info, device_registry + ), + needs_poll_method=_needs_poll, + device_data=data, + discovered_event_classes=set( + entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, []) + ), + poll_method=_async_poll, + # We will take advertisements from non-connectable devices + # since we will trade the BLEDevice for a connectable one + # if we need to poll it + connectable=False, + entry=entry, + ) ) - entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - # only start after all platforms have had a chance to subscribe - entry.async_on_unload(coordinator.async_start()) + entry.async_on_unload( + coordinator.async_start() + ) # only start after all platforms have had a chance to subscribe return True -async def async_unload_entry(hass: HomeAssistant, entry: XiaomiBLEConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + hass.data[DOMAIN].pop(entry.entry_id) + + return unload_ok diff --git a/homeassistant/components/xiaomi_ble/binary_sensor.py b/homeassistant/components/xiaomi_ble/binary_sensor.py index b853f83b967..8734f45c405 100644 --- a/homeassistant/components/xiaomi_ble/binary_sensor.py +++ b/homeassistant/components/xiaomi_ble/binary_sensor.py @@ -8,6 +8,7 @@ from xiaomi_ble.parser import ( SensorUpdate, ) +from homeassistant import config_entries from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, @@ -21,9 +22,12 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info -from .coordinator import XiaomiPassiveBluetoothDataProcessor +from .const import DOMAIN +from .coordinator import ( + XiaomiActiveBluetoothProcessorCoordinator, + XiaomiPassiveBluetoothDataProcessor, +) from .device import device_key_to_bluetooth_entity_key -from .types import XiaomiBLEConfigEntry BINARY_SENSOR_DESCRIPTIONS = { XiaomiBinarySensorDeviceClass.BATTERY: BinarySensorEntityDescription( @@ -50,10 +54,6 @@ BINARY_SENSOR_DESCRIPTIONS = { key=XiaomiBinarySensorDeviceClass.MOTION, device_class=BinarySensorDeviceClass.MOTION, ), - XiaomiBinarySensorDeviceClass.OCCUPANCY: BinarySensorEntityDescription( - key=XiaomiBinarySensorDeviceClass.OCCUPANCY, - device_class=BinarySensorDeviceClass.OCCUPANCY, - ), XiaomiBinarySensorDeviceClass.OPENING: BinarySensorEntityDescription( key=XiaomiBinarySensorDeviceClass.OPENING, device_class=BinarySensorDeviceClass.OPENING, @@ -134,11 +134,13 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: XiaomiBLEConfigEntry, + entry: config_entries.ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Xiaomi BLE sensors.""" - coordinator = entry.runtime_data + coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ + entry.entry_id + ] processor = XiaomiPassiveBluetoothDataProcessor( sensor_update_to_bluetooth_data_update ) diff --git a/homeassistant/components/xiaomi_ble/config_flow.py b/homeassistant/components/xiaomi_ble/config_flow.py index df2de381d39..8209c9565bd 100644 --- a/homeassistant/components/xiaomi_ble/config_flow.py +++ b/homeassistant/components/xiaomi_ble/config_flow.py @@ -4,16 +4,10 @@ from __future__ import annotations from collections.abc import Mapping import dataclasses -import logging from typing import Any import voluptuous as vol -from xiaomi_ble import ( - XiaomiBluetoothDeviceData as DeviceData, - XiaomiCloudException, - XiaomiCloudInvalidAuthenticationException, - XiaomiCloudTokenFetch, -) +from xiaomi_ble import XiaomiBluetoothDeviceData as DeviceData from xiaomi_ble.parser import EncryptionScheme from homeassistant.components import onboarding @@ -23,18 +17,14 @@ from homeassistant.components.bluetooth import ( async_discovered_service_info, async_process_advertisements, ) -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_ADDRESS, CONF_PASSWORD, CONF_USERNAME -from homeassistant.data_entry_flow import AbortFlow -from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_ADDRESS from .const import DOMAIN # How long to wait for additional advertisement packets if we don't have the right ones ADDITIONAL_DISCOVERY_TIMEOUT = 60 -_LOGGER = logging.getLogger(__name__) - @dataclasses.dataclass class Discovery: @@ -114,7 +104,7 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): if device.encryption_scheme == EncryptionScheme.MIBEACON_LEGACY: return await self.async_step_get_encryption_key_legacy() if device.encryption_scheme == EncryptionScheme.MIBEACON_4_5: - return await self.async_step_get_encryption_key_4_5_choose_method() + return await self.async_step_get_encryption_key_4_5() return await self.async_step_bluetooth_confirm() async def async_step_get_encryption_key_legacy( @@ -185,67 +175,6 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_cloud_auth( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the cloud auth step.""" - assert self._discovery_info - - errors: dict[str, str] = {} - description_placeholders: dict[str, str] = {} - if user_input is not None: - session = async_get_clientsession(self.hass) - fetcher = XiaomiCloudTokenFetch( - user_input[CONF_USERNAME], user_input[CONF_PASSWORD], session - ) - try: - device_details = await fetcher.get_device_info( - self._discovery_info.address - ) - except XiaomiCloudInvalidAuthenticationException as ex: - _LOGGER.debug("Authentication failed: %s", ex, exc_info=True) - errors = {"base": "auth_failed"} - description_placeholders = {"error_detail": str(ex)} - except XiaomiCloudException as ex: - _LOGGER.debug("Failed to connect to MI API: %s", ex, exc_info=True) - raise AbortFlow( - "api_error", description_placeholders={"error_detail": str(ex)} - ) from ex - else: - if device_details: - return await self.async_step_get_encryption_key_4_5( - {"bindkey": device_details.bindkey} - ) - errors = {"base": "api_device_not_found"} - - user_input = user_input or {} - return self.async_show_form( - step_id="cloud_auth", - errors=errors, - data_schema=vol.Schema( - { - vol.Required( - CONF_USERNAME, default=user_input.get(CONF_USERNAME) - ): str, - vol.Required(CONF_PASSWORD): str, - } - ), - description_placeholders={ - **self.context["title_placeholders"], - **description_placeholders, - }, - ) - - async def async_step_get_encryption_key_4_5_choose_method( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Choose method to get the bind key for a version 4/5 device.""" - return self.async_show_menu( - step_id="get_encryption_key_4_5_choose_method", - menu_options=["cloud_auth", "get_encryption_key_4_5"], - description_placeholders=self.context["title_placeholders"], - ) - async def async_step_bluetooth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -302,7 +231,7 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_get_encryption_key_legacy() if discovery.device.encryption_scheme == EncryptionScheme.MIBEACON_4_5: - return await self.async_step_get_encryption_key_4_5_choose_method() + return await self.async_step_get_encryption_key_4_5() return self._async_get_or_create_entry() @@ -335,6 +264,9 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow initialized by a reauth event.""" + entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert entry is not None + device: DeviceData = entry_data["device"] self._discovered_device = device @@ -344,7 +276,7 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_get_encryption_key_legacy() if device.encryption_scheme == EncryptionScheme.MIBEACON_4_5: - return await self.async_step_get_encryption_key_4_5_choose_method() + return await self.async_step_get_encryption_key_4_5() # Otherwise there wasn't actually encryption so abort return self.async_abort(reason="reauth_successful") @@ -357,10 +289,10 @@ class XiaomiConfigFlow(ConfigFlow, domain=DOMAIN): if bindkey: data["bindkey"] = bindkey - if self.source == SOURCE_REAUTH: - return self.async_update_reload_and_abort( - self._get_reauth_entry(), data=data - ) + if entry_id := self.context.get("entry_id"): + entry = self.hass.config_entries.async_get_entry(entry_id) + assert entry is not None + return self.async_update_reload_and_abort(entry, data=data) return self.async_create_entry( title=self.context["title_placeholders"]["name"], diff --git a/homeassistant/components/xiaomi_ble/coordinator.py b/homeassistant/components/xiaomi_ble/coordinator.py index 69fc427013a..1cd49e851ea 100644 --- a/homeassistant/components/xiaomi_ble/coordinator.py +++ b/homeassistant/components/xiaomi_ble/coordinator.py @@ -16,11 +16,11 @@ from homeassistant.components.bluetooth.active_update_processor import ( from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataProcessor, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.debounce import Debouncer from .const import CONF_SLEEPY_DEVICE -from .types import XiaomiBLEConfigEntry class XiaomiActiveBluetoothProcessorCoordinator( @@ -45,7 +45,7 @@ class XiaomiActiveBluetoothProcessorCoordinator( ] | None = None, poll_debouncer: Debouncer[Coroutine[Any, Any, None]] | None = None, - entry: XiaomiBLEConfigEntry, + entry: ConfigEntry, connectable: bool = True, ) -> None: """Initialize the Xiaomi Bluetooth Active Update Processor Coordinator.""" diff --git a/homeassistant/components/xiaomi_ble/event.py b/homeassistant/components/xiaomi_ble/event.py index 7265bcd112c..e39a4adb3c7 100644 --- a/homeassistant/components/xiaomi_ble/event.py +++ b/homeassistant/components/xiaomi_ble/event.py @@ -9,6 +9,7 @@ from homeassistant.components.event import ( EventEntity, EventEntityDescription, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -28,7 +29,7 @@ from .const import ( EVENT_TYPE, XiaomiBleEvent, ) -from .types import XiaomiBLEConfigEntry +from .coordinator import XiaomiActiveBluetoothProcessorCoordinator DESCRIPTIONS_BY_EVENT_CLASS = { EVENT_CLASS_BUTTON: EventEntityDescription( @@ -182,11 +183,13 @@ class XiaomiEventEntity(EventEntity): async def async_setup_entry( hass: HomeAssistant, - entry: XiaomiBLEConfigEntry, + entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Xiaomi event.""" - coordinator = entry.runtime_data + coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ + entry.entry_id + ] address = coordinator.address ent_reg = er.async_get(hass) async_add_entities( diff --git a/homeassistant/components/xiaomi_ble/manifest.json b/homeassistant/components/xiaomi_ble/manifest.json index 26dd82c73bc..1e0a09015ee 100644 --- a/homeassistant/components/xiaomi_ble/manifest.json +++ b/homeassistant/components/xiaomi_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/xiaomi_ble", "iot_class": "local_push", - "requirements": ["xiaomi-ble==0.33.0"] + "requirements": ["xiaomi-ble==0.30.0"] } diff --git a/homeassistant/components/xiaomi_ble/sensor.py b/homeassistant/components/xiaomi_ble/sensor.py index ba8f64383ee..65b33c3c559 100644 --- a/homeassistant/components/xiaomi_ble/sensor.py +++ b/homeassistant/components/xiaomi_ble/sensor.py @@ -7,6 +7,7 @@ from typing import cast from xiaomi_ble import DeviceClass, SensorUpdate, Units from xiaomi_ble.parser import ExtendedSensorDeviceClass +from homeassistant import config_entries from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataUpdate, PassiveBluetoothProcessorEntity, @@ -34,9 +35,12 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info -from .coordinator import XiaomiPassiveBluetoothDataProcessor +from .const import DOMAIN +from .coordinator import ( + XiaomiActiveBluetoothProcessorCoordinator, + XiaomiPassiveBluetoothDataProcessor, +) from .device import device_key_to_bluetooth_entity_key -from .types import XiaomiBLEConfigEntry SENSOR_DESCRIPTIONS = { (DeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription( @@ -48,8 +52,8 @@ SENSOR_DESCRIPTIONS = { ), (DeviceClass.CONDUCTIVITY, Units.CONDUCTIVITY): SensorEntityDescription( key=str(Units.CONDUCTIVITY), - device_class=SensorDeviceClass.CONDUCTIVITY, - native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS_PER_CM, + device_class=None, + native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS, state_class=SensorStateClass.MEASUREMENT, ), ( @@ -155,24 +159,6 @@ SENSOR_DESCRIPTIONS = { (ExtendedSensorDeviceClass.LOCK_METHOD, None): SensorEntityDescription( key=str(ExtendedSensorDeviceClass.LOCK_METHOD), icon="mdi:key-variant" ), - # Duration of detected status (in minutes) for Occpancy Sensor - ( - ExtendedSensorDeviceClass.DURATION_DETECTED, - Units.TIME_MINUTES, - ): SensorEntityDescription( - key=str(ExtendedSensorDeviceClass.DURATION_DETECTED), - native_unit_of_measurement=UnitOfTime.MINUTES, - state_class=SensorStateClass.MEASUREMENT, - ), - # Duration of cleared status (in minutes) for Occpancy Sensor - ( - ExtendedSensorDeviceClass.DURATION_CLEARED, - Units.TIME_MINUTES, - ): SensorEntityDescription( - key=str(ExtendedSensorDeviceClass.DURATION_CLEARED), - native_unit_of_measurement=UnitOfTime.MINUTES, - state_class=SensorStateClass.MEASUREMENT, - ), } @@ -207,11 +193,13 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: XiaomiBLEConfigEntry, + entry: config_entries.ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Xiaomi BLE sensors.""" - coordinator = entry.runtime_data + coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ + entry.entry_id + ] processor = XiaomiPassiveBluetoothDataProcessor( sensor_update_to_bluetooth_data_update ) diff --git a/homeassistant/components/xiaomi_ble/strings.json b/homeassistant/components/xiaomi_ble/strings.json index 4ea4a47c61e..048c9bd92e2 100644 --- a/homeassistant/components/xiaomi_ble/strings.json +++ b/homeassistant/components/xiaomi_ble/strings.json @@ -25,35 +25,18 @@ "data": { "bindkey": "Bindkey" } - }, - "cloud_auth": { - "description": "Please provide your Mi app username and password. This data won't be saved and only used to retrieve the device encryption key. Usernames and passwords are case sensitive.", - "data": { - "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]" - } - }, - "get_encryption_key_4_5_choose_method": { - "description": "A Mi device can be set up in Home Assistant in two different ways.\n\nYou can enter the bindkey yourself, or Home Assistant can import them from your Mi account.", - "menu_options": { - "cloud_auth": "Mi account (recommended)", - "get_encryption_key_4_5": "Enter encryption key manually" - } } }, "error": { "decryption_failed": "The provided bindkey did not work, sensor data could not be decrypted. Please check it and try again.", "expected_24_characters": "Expected a 24 character hexadecimal bindkey.", - "expected_32_characters": "Expected a 32 character hexadecimal bindkey.", - "auth_failed": "Authentication failed: {error_detail}", - "api_device_not_found": "The device was not found in your Mi account." + "expected_32_characters": "Expected a 32 character hexadecimal bindkey." }, "abort": { "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "api_error": "Error while communicating with Mi API: {error_detail}" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, "device_automation": { diff --git a/homeassistant/components/xiaomi_ble/types.py b/homeassistant/components/xiaomi_ble/types.py deleted file mode 100644 index f0de8af9d06..00000000000 --- a/homeassistant/components/xiaomi_ble/types.py +++ /dev/null @@ -1,10 +0,0 @@ -"""Support for xiaomi ble.""" - -from typing import TYPE_CHECKING - -from homeassistant.config_entries import ConfigEntry - -if TYPE_CHECKING: - from .coordinator import XiaomiActiveBluetoothProcessorCoordinator - -type XiaomiBLEConfigEntry = ConfigEntry[XiaomiActiveBluetoothProcessorCoordinator] diff --git a/homeassistant/components/xiaomi_miio/__init__.py b/homeassistant/components/xiaomi_miio/__init__.py index d841045d235..bea8d9b402f 100644 --- a/homeassistant/components/xiaomi_miio/__init__.py +++ b/homeassistant/components/xiaomi_miio/__init__.py @@ -56,7 +56,6 @@ from .const import ( MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, - MODEL_FAN_P18, MODEL_FAN_ZA5, MODELS_AIR_MONITOR, MODELS_FAN, @@ -119,7 +118,6 @@ MODEL_TO_CLASS_MAP = { MODEL_FAN_P9: FanMiot, MODEL_FAN_P10: FanMiot, MODEL_FAN_P11: FanMiot, - MODEL_FAN_P18: FanMiot, MODEL_FAN_P5: FanP5, MODEL_FAN_ZA5: FanZA5, } @@ -188,9 +186,7 @@ def _async_update_data_default(hass, device): except DeviceException as ex: if getattr(ex, "code", None) != -9999: raise UpdateFailed(ex) from ex - _LOGGER.error( - "Got exception while fetching the state, trying again: %s", ex - ) + _LOGGER.info("Got exception while fetching the state, trying again: %s", ex) # Try to fetch the data a second time after error code -9999 try: return await _async_fetch_data() @@ -277,9 +273,7 @@ def _async_update_data_vacuum( except DeviceException as ex: if getattr(ex, "code", None) != -9999: raise UpdateFailed(ex) from ex - _LOGGER.error( - "Got exception while fetching the state, trying again: %s", ex - ) + _LOGGER.info("Got exception while fetching the state, trying again: %s", ex) # Try to fetch the data a second time after error code -9999 try: @@ -308,7 +302,6 @@ async def async_create_miio_device_and_coordinator( "zhimi.fan.za3": True, "zhimi.fan.za5": True, "zhimi.airpurifier.za1": True, - "dmaker.fan.1c": True, } lazy_discover = LAZY_DISCOVER_FOR_MODEL.get(model, False) @@ -388,7 +381,6 @@ async def async_create_miio_device_and_coordinator( coordinator = coordinator_class( hass, _LOGGER, - config_entry=entry, name=name, update_method=update_method(hass, device), # Polling interval. Will only be polled if there are subscribers. @@ -454,7 +446,6 @@ async def async_setup_gateway_entry(hass: HomeAssistant, entry: ConfigEntry) -> coordinator_dict[sub_device.sid] = DataUpdateCoordinator( hass, _LOGGER, - config_entry=entry, name=name, update_method=update_data_factory(sub_device), # Polling interval. Will only be polled if there are subscribers. diff --git a/homeassistant/components/xiaomi_miio/air_quality.py b/homeassistant/components/xiaomi_miio/air_quality.py index 199d9161353..80dd751a98c 100644 --- a/homeassistant/components/xiaomi_miio/air_quality.py +++ b/homeassistant/components/xiaomi_miio/air_quality.py @@ -18,7 +18,7 @@ from .const import ( MODEL_AIRQUALITYMONITOR_S1, MODEL_AIRQUALITYMONITOR_V1, ) -from .entity import XiaomiMiioEntity +from .device import XiaomiMiioEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_miio/alarm_control_panel.py b/homeassistant/components/xiaomi_miio/alarm_control_panel.py index 9c06198bc7e..58d5ed247ad 100644 --- a/homeassistant/components/xiaomi_miio/alarm_control_panel.py +++ b/homeassistant/components/xiaomi_miio/alarm_control_panel.py @@ -10,9 +10,13 @@ from miio import DeviceException from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, - AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -102,11 +106,11 @@ class XiaomiGatewayAlarm(AlarmControlPanelEntity): self._attr_available = True if state == XIAOMI_STATE_ARMED_VALUE: - self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY + self._attr_state = STATE_ALARM_ARMED_AWAY elif state == XIAOMI_STATE_DISARMED_VALUE: - self._attr_alarm_state = AlarmControlPanelState.DISARMED + self._attr_state = STATE_ALARM_DISARMED elif state == XIAOMI_STATE_ARMING_VALUE: - self._attr_alarm_state = AlarmControlPanelState.ARMING + self._attr_state = STATE_ALARM_ARMING else: _LOGGER.warning( "New state (%s) doesn't match expected values: %s/%s/%s", @@ -115,6 +119,6 @@ class XiaomiGatewayAlarm(AlarmControlPanelEntity): XIAOMI_STATE_DISARMED_VALUE, XIAOMI_STATE_ARMING_VALUE, ) - self._attr_alarm_state = None + self._attr_state = None - _LOGGER.debug("State value: %s", self._attr_alarm_state) + _LOGGER.debug("State value: %s", self._attr_state) diff --git a/homeassistant/components/xiaomi_miio/binary_sensor.py b/homeassistant/components/xiaomi_miio/binary_sensor.py index a5ab7e56e6b..7729ce27d29 100644 --- a/homeassistant/components/xiaomi_miio/binary_sensor.py +++ b/homeassistant/components/xiaomi_miio/binary_sensor.py @@ -32,7 +32,7 @@ from .const import ( MODELS_VACUUM_WITH_MOP, MODELS_VACUUM_WITH_SEPARATE_MOP, ) -from .entity import XiaomiCoordinatedMiioEntity +from .device import XiaomiCoordinatedMiioEntity _LOGGER = logging.getLogger(__name__) @@ -56,13 +56,13 @@ class XiaomiMiioBinarySensorDescription(BinarySensorEntityDescription): BINARY_SENSOR_TYPES = ( XiaomiMiioBinarySensorDescription( key=ATTR_NO_WATER, - translation_key=ATTR_NO_WATER, + name="Water tank empty", icon="mdi:water-off-outline", entity_category=EntityCategory.DIAGNOSTIC, ), XiaomiMiioBinarySensorDescription( key=ATTR_WATER_TANK_DETACHED, - translation_key=ATTR_WATER_TANK_DETACHED, + name="Water tank", icon="mdi:car-coolant-level", device_class=BinarySensorDeviceClass.CONNECTIVITY, value=lambda value: not value, @@ -70,13 +70,13 @@ BINARY_SENSOR_TYPES = ( ), XiaomiMiioBinarySensorDescription( key=ATTR_PTC_STATUS, - translation_key=ATTR_PTC_STATUS, + name="Auxiliary heat status", device_class=BinarySensorDeviceClass.POWER, entity_category=EntityCategory.DIAGNOSTIC, ), XiaomiMiioBinarySensorDescription( key=ATTR_POWERSUPPLY_ATTACHED, - translation_key=ATTR_POWERSUPPLY_ATTACHED, + name="Power supply", device_class=BinarySensorDeviceClass.PLUG, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -88,7 +88,7 @@ FAN_ZA5_BINARY_SENSORS = (ATTR_POWERSUPPLY_ATTACHED,) VACUUM_SENSORS = { ATTR_MOP_ATTACHED: XiaomiMiioBinarySensorDescription( key=ATTR_WATER_BOX_ATTACHED, - translation_key=ATTR_WATER_BOX_ATTACHED, + name="Mop attached", icon="mdi:square-rounded", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, @@ -97,7 +97,7 @@ VACUUM_SENSORS = { ), ATTR_WATER_BOX_ATTACHED: XiaomiMiioBinarySensorDescription( key=ATTR_WATER_BOX_ATTACHED, - translation_key=ATTR_WATER_BOX_ATTACHED, + name="Water box attached", icon="mdi:water", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, @@ -106,7 +106,7 @@ VACUUM_SENSORS = { ), ATTR_WATER_SHORTAGE: XiaomiMiioBinarySensorDescription( key=ATTR_WATER_SHORTAGE, - translation_key=ATTR_WATER_SHORTAGE, + name="Water shortage", icon="mdi:water", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, @@ -119,7 +119,7 @@ VACUUM_SENSORS_SEPARATE_MOP = { **VACUUM_SENSORS, ATTR_MOP_ATTACHED: XiaomiMiioBinarySensorDescription( key=ATTR_MOP_ATTACHED, - translation_key=ATTR_MOP_ATTACHED, + name="Mop attached", icon="mdi:square-rounded", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, @@ -190,8 +190,7 @@ async def async_setup_entry( elif model in MODELS_HUMIDIFIER_MJJSQ: sensors = HUMIDIFIER_MJJSQ_BINARY_SENSORS elif model in MODELS_VACUUM: - _setup_vacuum_sensors(hass, config_entry, async_add_entities) - return + return _setup_vacuum_sensors(hass, config_entry, async_add_entities) for description in BINARY_SENSOR_TYPES: if description.key not in sensors: diff --git a/homeassistant/components/xiaomi_miio/button.py b/homeassistant/components/xiaomi_miio/button.py index 9a64941f398..38e6afa5ffb 100644 --- a/homeassistant/components/xiaomi_miio/button.py +++ b/homeassistant/components/xiaomi_miio/button.py @@ -24,7 +24,7 @@ from .const import ( MODEL_AIRFRESH_T2017, MODELS_VACUUM, ) -from .entity import XiaomiCoordinatedMiioEntity +from .device import XiaomiCoordinatedMiioEntity # Fans ATTR_RESET_DUST_FILTER = "reset_dust_filter" @@ -51,7 +51,7 @@ BUTTON_TYPES = ( # Fans XiaomiMiioButtonDescription( key=ATTR_RESET_DUST_FILTER, - translation_key=ATTR_RESET_DUST_FILTER, + name="Reset dust filter", icon="mdi:air-filter", method_press="reset_dust_filter", method_press_error_message="Resetting the dust filter lifetime failed", @@ -59,7 +59,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_UPPER_FILTER, - translation_key=ATTR_RESET_UPPER_FILTER, + name="Reset upper filter", icon="mdi:air-filter", method_press="reset_upper_filter", method_press_error_message="Resetting the upper filter lifetime failed.", @@ -68,7 +68,7 @@ BUTTON_TYPES = ( # Vacuums XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_MAIN_BRUSH, - translation_key=ATTR_RESET_VACUUM_MAIN_BRUSH, + name="Reset main brush", icon="mdi:brush", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.MainBrush, @@ -77,7 +77,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_SIDE_BRUSH, - translation_key=ATTR_RESET_VACUUM_SIDE_BRUSH, + name="Reset side brush", icon="mdi:brush", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.SideBrush, @@ -86,7 +86,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_FILTER, - translation_key=ATTR_RESET_VACUUM_FILTER, + name="Reset filter", icon="mdi:air-filter", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.Filter, @@ -95,7 +95,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_SENSOR_DIRTY, - translation_key=ATTR_RESET_VACUUM_SENSOR_DIRTY, + name="Reset sensor dirty", icon="mdi:eye-outline", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.SensorDirty, diff --git a/homeassistant/components/xiaomi_miio/config_flow.py b/homeassistant/components/xiaomi_miio/config_flow.py index b068f4a1e61..c689ede27eb 100644 --- a/homeassistant/components/xiaomi_miio/config_flow.py +++ b/homeassistant/components/xiaomi_miio/config_flow.py @@ -13,6 +13,7 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -63,6 +64,10 @@ DEVICE_CLOUD_CONFIG = vol.Schema( class OptionsFlowHandler(OptionsFlow): """Options for the component.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Init object.""" + self.config_entry = config_entry + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -78,7 +83,14 @@ class OptionsFlowHandler(OptionsFlow): not cloud_username or not cloud_password or not cloud_country ): errors["base"] = "cloud_credentials_incomplete" - self.config_entry.async_start_reauth(self.hass) + # trigger re-auth flow + self.hass.async_create_task( + self.hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH}, + data=self.config_entry.data, + ) + ) if not errors: return self.async_create_entry(title="", data=user_input) @@ -118,7 +130,7 @@ class XiaomiMiioFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler() + return OptionsFlowHandler(config_entry) async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -225,9 +237,7 @@ class XiaomiMiioFlowHandler(ConfigFlow, domain=DOMAIN): step_id="cloud", data_schema=DEVICE_CLOUD_CONFIG, errors=errors ) - miio_cloud = await self.hass.async_add_executor_job( - MiCloud, cloud_username, cloud_password - ) + miio_cloud = MiCloud(cloud_username, cloud_password) try: if not await self.hass.async_add_executor_job(miio_cloud.login): errors["base"] = "cloud_login_error" diff --git a/homeassistant/components/xiaomi_miio/const.py b/homeassistant/components/xiaomi_miio/const.py index 2b9cdb2ffdd..24b494f3d08 100644 --- a/homeassistant/components/xiaomi_miio/const.py +++ b/homeassistant/components/xiaomi_miio/const.py @@ -60,7 +60,6 @@ MODEL_AIRPURIFIER_2H = "zhimi.airpurifier.mc2" MODEL_AIRPURIFIER_2S = "zhimi.airpurifier.mc1" MODEL_AIRPURIFIER_3 = "zhimi.airpurifier.ma4" MODEL_AIRPURIFIER_3C = "zhimi.airpurifier.mb4" -MODEL_AIRPURIFIER_3C_REV_A = "zhimi.airp.mb4a" MODEL_AIRPURIFIER_3H = "zhimi.airpurifier.mb3" MODEL_AIRPURIFIER_M1 = "zhimi.airpurifier.m1" MODEL_AIRPURIFIER_M2 = "zhimi.airpurifier.m2" @@ -94,7 +93,6 @@ MODEL_AIRFRESH_T2017 = "dmaker.airfresh.t2017" MODEL_FAN_1C = "dmaker.fan.1c" MODEL_FAN_P10 = "dmaker.fan.p10" MODEL_FAN_P11 = "dmaker.fan.p11" -MODEL_FAN_P18 = "dmaker.fan.p18" MODEL_FAN_P5 = "dmaker.fan.p5" MODEL_FAN_P9 = "dmaker.fan.p9" MODEL_FAN_SA1 = "zhimi.fan.sa1" @@ -119,7 +117,6 @@ MODELS_FAN_MIOT = [ MODEL_FAN_1C, MODEL_FAN_P10, MODEL_FAN_P11, - MODEL_FAN_P18, MODEL_FAN_P9, MODEL_FAN_ZA5, ] @@ -127,7 +124,6 @@ MODELS_FAN_MIOT = [ MODELS_PURIFIER_MIOT = [ MODEL_AIRPURIFIER_3, MODEL_AIRPURIFIER_3C, - MODEL_AIRPURIFIER_3C_REV_A, MODEL_AIRPURIFIER_3H, MODEL_AIRPURIFIER_PROH, MODEL_AIRPURIFIER_PROH_EU, @@ -493,7 +489,7 @@ FEATURE_FLAGS_FAN_P9 = ( | FEATURE_SET_DELAY_OFF_COUNTDOWN ) -FEATURE_FLAGS_FAN_P10_P11_P18 = ( +FEATURE_FLAGS_FAN_P10_P11 = ( FEATURE_SET_BUZZER | FEATURE_SET_CHILD_LOCK | FEATURE_SET_OSCILLATION_ANGLE diff --git a/homeassistant/components/xiaomi_miio/device.py b/homeassistant/components/xiaomi_miio/device.py index beeb7e95e54..e90a86ab7e9 100644 --- a/homeassistant/components/xiaomi_miio/device.py +++ b/homeassistant/components/xiaomi_miio/device.py @@ -1,11 +1,24 @@ """Code to handle a Xiaomi Device.""" +import datetime +from enum import Enum +from functools import partial import logging +from typing import Any from construct.core import ChecksumError from miio import Device, DeviceException -from .const import AuthException, SetupException +from homeassistant.const import ATTR_CONNECTIONS, CONF_MAC, CONF_MODEL +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import DOMAIN, AuthException, SetupException _LOGGER = logging.getLogger(__name__) @@ -53,3 +66,131 @@ class ConnectXiaomiDevice: self._device_info.firmware_version, self._device_info.hardware_version, ) + + +class XiaomiMiioEntity(Entity): + """Representation of a base Xiaomi Miio Entity.""" + + def __init__(self, name, device, entry, unique_id): + """Initialize the Xiaomi Miio Device.""" + self._device = device + self._model = entry.data[CONF_MODEL] + self._mac = entry.data[CONF_MAC] + self._device_id = entry.unique_id + self._unique_id = unique_id + self._name = name + self._available = None + + @property + def unique_id(self): + """Return an unique ID.""" + return self._unique_id + + @property + def name(self): + """Return the name of this entity, if any.""" + return self._name + + @property + def device_info(self) -> DeviceInfo: + """Return the device info.""" + device_info = DeviceInfo( + identifiers={(DOMAIN, self._device_id)}, + manufacturer="Xiaomi", + model=self._model, + name=self._name, + ) + + if self._mac is not None: + device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, self._mac)} + + return device_info + + +class XiaomiCoordinatedMiioEntity[_T: DataUpdateCoordinator[Any]]( + CoordinatorEntity[_T] +): + """Representation of a base a coordinated Xiaomi Miio Entity.""" + + _attr_has_entity_name = True + + def __init__(self, device, entry, unique_id, coordinator): + """Initialize the coordinated Xiaomi Miio Device.""" + super().__init__(coordinator) + self._device = device + self._model = entry.data[CONF_MODEL] + self._mac = entry.data[CONF_MAC] + self._device_id = entry.unique_id + self._device_name = entry.title + self._unique_id = unique_id + + @property + def unique_id(self): + """Return an unique ID.""" + return self._unique_id + + @property + def device_info(self) -> DeviceInfo: + """Return the device info.""" + device_info = DeviceInfo( + identifiers={(DOMAIN, self._device_id)}, + manufacturer="Xiaomi", + model=self._model, + name=self._device_name, + ) + + if self._mac is not None: + device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, self._mac)} + + return device_info + + async def _try_command(self, mask_error, func, *args, **kwargs): + """Call a miio device command handling error messages.""" + try: + result = await self.hass.async_add_executor_job( + partial(func, *args, **kwargs) + ) + except DeviceException as exc: + if self.available: + _LOGGER.error(mask_error, exc) + + return False + + _LOGGER.debug("Response received from miio device: %s", result) + return True + + @classmethod + def _extract_value_from_attribute(cls, state, attribute): + value = getattr(state, attribute) + if isinstance(value, Enum): + return value.value + if isinstance(value, datetime.timedelta): + return cls._parse_time_delta(value) + if isinstance(value, datetime.time): + return cls._parse_datetime_time(value) + if isinstance(value, datetime.datetime): + return cls._parse_datetime_datetime(value) + + if value is None: + _LOGGER.debug("Attribute %s is None, this is unexpected", attribute) + + return value + + @staticmethod + def _parse_time_delta(timedelta: datetime.timedelta) -> int: + return int(timedelta.total_seconds()) + + @staticmethod + def _parse_datetime_time(initial_time: datetime.time) -> str: + time = datetime.datetime.now().replace( + hour=initial_time.hour, minute=initial_time.minute, second=0, microsecond=0 + ) + + if time < datetime.datetime.now(): + time += datetime.timedelta(days=1) + + return time.isoformat() + + @staticmethod + def _parse_datetime_datetime(time: datetime.datetime) -> str: + return time.isoformat() diff --git a/homeassistant/components/xiaomi_miio/device_tracker.py b/homeassistant/components/xiaomi_miio/device_tracker.py index 1dfc5e53410..9acdb1cc53e 100644 --- a/homeassistant/components/xiaomi_miio/device_tracker.py +++ b/homeassistant/components/xiaomi_miio/device_tracker.py @@ -8,7 +8,7 @@ from miio import DeviceException, WifiRepeater import voluptuous as vol from homeassistant.components.device_tracker import ( - DOMAIN as DEVICE_TRACKER_DOMAIN, + DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) @@ -32,17 +32,15 @@ def get_scanner( ) -> XiaomiMiioDeviceScanner | None: """Return a Xiaomi MiIO device scanner.""" scanner = None - config = config[DEVICE_TRACKER_DOMAIN] + host = config[DOMAIN][CONF_HOST] + token = config[DOMAIN][CONF_TOKEN] - host = config[CONF_HOST] - token = config[CONF_TOKEN] - - _LOGGER.debug("Initializing with host %s (token %s...)", host, token[:5]) + _LOGGER.info("Initializing with host %s (token %s...)", host, token[:5]) try: device = WifiRepeater(host, token) device_info = device.info() - _LOGGER.debug( + _LOGGER.info( "%s %s %s detected", device_info.model, device_info.firmware_version, @@ -73,7 +71,7 @@ class XiaomiMiioDeviceScanner(DeviceScanner): return [device["mac"] for device in station_info.associated_stations] - async def async_get_device_name(self, device: str) -> str | None: + async def async_get_device_name(self, device): """Return None. The repeater doesn't provide the name of the associated device. diff --git a/homeassistant/components/xiaomi_miio/entity.py b/homeassistant/components/xiaomi_miio/entity.py deleted file mode 100644 index 0343a7526d7..00000000000 --- a/homeassistant/components/xiaomi_miio/entity.py +++ /dev/null @@ -1,193 +0,0 @@ -"""Code to handle a Xiaomi Device.""" - -import datetime -from enum import Enum -from functools import partial -import logging -from typing import Any - -from miio import DeviceException - -from homeassistant.const import ATTR_CONNECTIONS, CONF_MAC, CONF_MODEL -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) - -from .const import ATTR_AVAILABLE, DOMAIN - -_LOGGER = logging.getLogger(__name__) - - -class XiaomiMiioEntity(Entity): - """Representation of a base Xiaomi Miio Entity.""" - - def __init__(self, name, device, entry, unique_id): - """Initialize the Xiaomi Miio Device.""" - self._device = device - self._model = entry.data[CONF_MODEL] - self._mac = entry.data[CONF_MAC] - self._device_id = entry.unique_id - self._unique_id = unique_id - self._name = name - self._available = None - - @property - def unique_id(self): - """Return an unique ID.""" - return self._unique_id - - @property - def name(self): - """Return the name of this entity, if any.""" - return self._name - - @property - def device_info(self) -> DeviceInfo: - """Return the device info.""" - device_info = DeviceInfo( - identifiers={(DOMAIN, self._device_id)}, - manufacturer="Xiaomi", - model=self._model, - name=self._name, - ) - - if self._mac is not None: - device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, self._mac)} - - return device_info - - -class XiaomiCoordinatedMiioEntity[_T: DataUpdateCoordinator[Any]]( - CoordinatorEntity[_T] -): - """Representation of a base a coordinated Xiaomi Miio Entity.""" - - _attr_has_entity_name = True - - def __init__(self, device, entry, unique_id, coordinator): - """Initialize the coordinated Xiaomi Miio Device.""" - super().__init__(coordinator) - self._device = device - self._model = entry.data[CONF_MODEL] - self._mac = entry.data[CONF_MAC] - self._device_id = entry.unique_id - self._device_name = entry.title - self._unique_id = unique_id - - @property - def unique_id(self): - """Return an unique ID.""" - return self._unique_id - - @property - def device_info(self) -> DeviceInfo: - """Return the device info.""" - device_info = DeviceInfo( - identifiers={(DOMAIN, self._device_id)}, - manufacturer="Xiaomi", - model=self._model, - name=self._device_name, - ) - - if self._mac is not None: - device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, self._mac)} - - return device_info - - async def _try_command(self, mask_error, func, *args, **kwargs): - """Call a miio device command handling error messages.""" - try: - result = await self.hass.async_add_executor_job( - partial(func, *args, **kwargs) - ) - except DeviceException as exc: - if self.available: - _LOGGER.error(mask_error, exc) - - return False - - _LOGGER.debug("Response received from miio device: %s", result) - return True - - @classmethod - def _extract_value_from_attribute(cls, state, attribute): - value = getattr(state, attribute) - if isinstance(value, Enum): - return value.value - if isinstance(value, datetime.timedelta): - return cls._parse_time_delta(value) - if isinstance(value, datetime.time): - return cls._parse_datetime_time(value) - if isinstance(value, datetime.datetime): - return cls._parse_datetime_datetime(value) - - if value is None: - _LOGGER.debug("Attribute %s is None, this is unexpected", attribute) - - return value - - @staticmethod - def _parse_time_delta(timedelta: datetime.timedelta) -> int: - return int(timedelta.total_seconds()) - - @staticmethod - def _parse_datetime_time(initial_time: datetime.time) -> str: - time = datetime.datetime.now().replace( - hour=initial_time.hour, minute=initial_time.minute, second=0, microsecond=0 - ) - - if time < datetime.datetime.now(): - time += datetime.timedelta(days=1) - - return time.isoformat() - - @staticmethod - def _parse_datetime_datetime(time: datetime.datetime) -> str: - return time.isoformat() - - -class XiaomiGatewayDevice(CoordinatorEntity, Entity): - """Representation of a base Xiaomi Gateway Device.""" - - def __init__(self, coordinator, sub_device, entry): - """Initialize the Xiaomi Gateway Device.""" - super().__init__(coordinator) - self._sub_device = sub_device - self._entry = entry - self._unique_id = sub_device.sid - self._name = f"{sub_device.name} ({sub_device.sid})" - - @property - def unique_id(self): - """Return an unique ID.""" - return self._unique_id - - @property - def name(self): - """Return the name of this entity, if any.""" - return self._name - - @property - def device_info(self) -> DeviceInfo: - """Return the device info of the gateway.""" - return DeviceInfo( - identifiers={(DOMAIN, self._sub_device.sid)}, - via_device=(DOMAIN, self._entry.unique_id), - manufacturer="Xiaomi", - name=self._sub_device.name, - model=self._sub_device.model, - sw_version=self._sub_device.firmware_version, - hw_version=self._sub_device.zigbee_model, - ) - - @property - def available(self): - """Return if entity is available.""" - if self.coordinator.data is None: - return False - - return self.coordinator.data[ATTR_AVAILABLE] diff --git a/homeassistant/components/xiaomi_miio/fan.py b/homeassistant/components/xiaomi_miio/fan.py index 81ca38eb053..4e0e271b071 100644 --- a/homeassistant/components/xiaomi_miio/fan.py +++ b/homeassistant/components/xiaomi_miio/fan.py @@ -60,7 +60,7 @@ from .const import ( FEATURE_FLAGS_FAN_1C, FEATURE_FLAGS_FAN_P5, FEATURE_FLAGS_FAN_P9, - FEATURE_FLAGS_FAN_P10_P11_P18, + FEATURE_FLAGS_FAN_P10_P11, FEATURE_FLAGS_FAN_ZA5, FEATURE_RESET_FILTER, FEATURE_SET_EXTRA_FEATURES, @@ -71,7 +71,6 @@ from .const import ( MODEL_AIRPURIFIER_2H, MODEL_AIRPURIFIER_2S, MODEL_AIRPURIFIER_3C, - MODEL_AIRPURIFIER_3C_REV_A, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_LITE_RMA1, MODEL_AIRPURIFIER_4_LITE_RMB1, @@ -85,7 +84,6 @@ from .const import ( MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, - MODEL_FAN_P18, MODEL_FAN_ZA5, MODELS_FAN_MIIO, MODELS_FAN_MIOT, @@ -93,15 +91,15 @@ from .const import ( SERVICE_RESET_FILTER, SERVICE_SET_EXTRA_FEATURES, ) -from .entity import XiaomiCoordinatedMiioEntity +from .device import XiaomiCoordinatedMiioEntity from .typing import ServiceMethodDetails _LOGGER = logging.getLogger(__name__) DATA_KEY = "fan.xiaomi_miio" -ATTR_MODE_NATURE = "nature" -ATTR_MODE_NORMAL = "normal" +ATTR_MODE_NATURE = "Nature" +ATTR_MODE_NORMAL = "Normal" # Air Purifier ATTR_BRIGHTNESS = "brightness" @@ -118,10 +116,6 @@ ATTR_BUTTON_PRESSED = "button_pressed" # Air Fresh A1 ATTR_FAVORITE_SPEED = "favorite_speed" -# Air Purifier 3C -ATTR_FAVORITE_RPM = "favorite_rpm" -ATTR_MOTOR_SPEED = "motor_speed" - # Map attributes to properties of the state object AVAILABLE_ATTRIBUTES_AIRPURIFIER_COMMON = { ATTR_EXTRA_FEATURES: "extra_features", @@ -221,7 +215,7 @@ async def async_setup_entry( coordinator = hass.data[DOMAIN][config_entry.entry_id][KEY_COORDINATOR] device = hass.data[DOMAIN][config_entry.entry_id][KEY_DEVICE] - if model in (MODEL_AIRPURIFIER_3C, MODEL_AIRPURIFIER_3C_REV_A): + if model == MODEL_AIRPURIFIER_3C: entity = XiaomiAirPurifierMB4( device, config_entry, @@ -300,7 +294,6 @@ class XiaomiGenericDevice(XiaomiCoordinatedMiioEntity, FanEntity): """Representation of a generic Xiaomi device.""" _attr_name = None - _enable_turn_on_off_backwards_compatibility = False def __init__(self, device, entry, unique_id, coordinator): """Initialize the generic Xiaomi device.""" @@ -486,9 +479,6 @@ class XiaomiAirPurifier(XiaomiGenericAirPurifier): self._preset_modes = PRESET_MODES_AIRPURIFIER self._attr_supported_features = FanEntityFeature.PRESET_MODE self._speed_count = 1 - self._attr_supported_features |= ( - FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON - ) self._state = self.coordinator.data.is_on self._state_attrs.update( @@ -613,68 +603,24 @@ class XiaomiAirPurifierMiot(XiaomiAirPurifier): class XiaomiAirPurifierMB4(XiaomiGenericAirPurifier): """Representation of a Xiaomi Air Purifier MB4.""" - def __init__(self, device, entry, unique_id, coordinator) -> None: + def __init__(self, device, entry, unique_id, coordinator): """Initialize Air Purifier MB4.""" super().__init__(device, entry, unique_id, coordinator) self._device_features = FEATURE_FLAGS_AIRPURIFIER_3C self._preset_modes = PRESET_MODES_AIRPURIFIER_3C - self._attr_supported_features = ( - FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + self._attr_supported_features = FanEntityFeature.PRESET_MODE self._state = self.coordinator.data.is_on self._mode = self.coordinator.data.mode.value - self._favorite_rpm: int | None = None - self._speed_range = (300, 2200) - self._motor_speed = 0 @property def operation_mode_class(self): """Hold operation mode class.""" return AirpurifierMiotOperationMode - @property - def percentage(self) -> int | None: - """Return the current percentage based speed.""" - # show the actual fan speed in silent or auto preset mode - if self._mode != self.operation_mode_class["Favorite"].value: - return ranged_value_to_percentage(self._speed_range, self._motor_speed) - if self._favorite_rpm is None: - return None - if self._state: - return ranged_value_to_percentage(self._speed_range, self._favorite_rpm) - - return None - - async def async_set_percentage(self, percentage: int) -> None: - """Set the percentage of the fan. This method is a coroutine.""" - if percentage == 0: - await self.async_turn_off() - return - - favorite_rpm = int( - round(percentage_to_ranged_value(self._speed_range, percentage), -1) - ) - if not favorite_rpm: - return - if await self._try_command( - "Setting fan level of the miio device failed.", - self._device.set_favorite_rpm, - favorite_rpm, - ): - self._favorite_rpm = favorite_rpm - self._mode = self.operation_mode_class["Favorite"].value - self.async_write_ha_state() - async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode of the fan.""" - if not self._state: - await self.async_turn_on() - if await self._try_command( "Setting operation mode of the miio device failed.", self._device.set_mode, @@ -688,14 +634,6 @@ class XiaomiAirPurifierMB4(XiaomiGenericAirPurifier): """Fetch state from the device.""" self._state = self.coordinator.data.is_on self._mode = self.coordinator.data.mode.value - self._favorite_rpm = getattr(self.coordinator.data, ATTR_FAVORITE_RPM, None) - self._motor_speed = min( - self._speed_range[1], - max( - self._speed_range[0], - getattr(self.coordinator.data, ATTR_MOTOR_SPEED, 0), - ), - ) self.async_write_ha_state() @@ -725,10 +663,7 @@ class XiaomiAirFresh(XiaomiGenericAirPurifier): self._speed_count = 4 self._preset_modes = PRESET_MODES_AIRFRESH self._attr_supported_features = ( - FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON + FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE ) self._state = self.coordinator.data.is_on @@ -821,10 +756,7 @@ class XiaomiAirFreshA1(XiaomiGenericAirPurifier): self._device_features = FEATURE_FLAGS_AIRFRESH_A1 self._preset_modes = PRESET_MODES_AIRFRESH_A1 self._attr_supported_features = ( - FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON + FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE ) self._state = self.coordinator.data.is_on @@ -899,8 +831,6 @@ class XiaomiAirFreshT2017(XiaomiAirFreshA1): class XiaomiGenericFan(XiaomiGenericDevice): """Representation of a generic Xiaomi Fan.""" - _attr_translation_key = "generic_fan" - def __init__(self, device, entry, unique_id, coordinator): """Initialize the fan.""" super().__init__(device, entry, unique_id, coordinator) @@ -913,16 +843,14 @@ class XiaomiGenericFan(XiaomiGenericDevice): self._device_features = FEATURE_FLAGS_FAN_1C elif self._model == MODEL_FAN_P9: self._device_features = FEATURE_FLAGS_FAN_P9 - elif self._model in (MODEL_FAN_P10, MODEL_FAN_P11, MODEL_FAN_P18): - self._device_features = FEATURE_FLAGS_FAN_P10_P11_P18 + elif self._model in (MODEL_FAN_P10, MODEL_FAN_P11): + self._device_features = FEATURE_FLAGS_FAN_P10_P11 else: self._device_features = FEATURE_FLAGS_FAN self._attr_supported_features = ( FanEntityFeature.SET_SPEED | FanEntityFeature.OSCILLATE | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON ) if self._model != MODEL_FAN_1C: self._attr_supported_features |= FanEntityFeature.DIRECTION diff --git a/homeassistant/components/xiaomi_miio/gateway.py b/homeassistant/components/xiaomi_miio/gateway.py index dd5deec2296..39e8ce503a4 100644 --- a/homeassistant/components/xiaomi_miio/gateway.py +++ b/homeassistant/components/xiaomi_miio/gateway.py @@ -8,11 +8,17 @@ from micloud.micloudexception import MiCloudAccessDenied from miio import DeviceException, gateway from miio.gateway.gateway import GATEWAY_MODEL_EU +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.update_coordinator import CoordinatorEntity + from .const import ( + ATTR_AVAILABLE, CONF_CLOUD_COUNTRY, CONF_CLOUD_PASSWORD, CONF_CLOUD_SUBDEVICES, CONF_CLOUD_USERNAME, + DOMAIN, AuthException, SetupException, ) @@ -87,7 +93,7 @@ class ConnectXiaomiGateway: try: self._gateway_device.discover_devices() except DeviceException as error: - _LOGGER.error( + _LOGGER.info( ( "DeviceException during getting subdevices of xiaomi gateway" " with host %s, trying cloud to obtain subdevices: %s" @@ -128,3 +134,46 @@ class ConnectXiaomiGateway: "DeviceException during setup of xiaomi gateway with host" f" {self._host}" ) from error + + +class XiaomiGatewayDevice(CoordinatorEntity, Entity): + """Representation of a base Xiaomi Gateway Device.""" + + def __init__(self, coordinator, sub_device, entry): + """Initialize the Xiaomi Gateway Device.""" + super().__init__(coordinator) + self._sub_device = sub_device + self._entry = entry + self._unique_id = sub_device.sid + self._name = f"{sub_device.name} ({sub_device.sid})" + + @property + def unique_id(self): + """Return an unique ID.""" + return self._unique_id + + @property + def name(self): + """Return the name of this entity, if any.""" + return self._name + + @property + def device_info(self) -> DeviceInfo: + """Return the device info of the gateway.""" + return DeviceInfo( + identifiers={(DOMAIN, self._sub_device.sid)}, + via_device=(DOMAIN, self._entry.unique_id), + manufacturer="Xiaomi", + name=self._sub_device.name, + model=self._sub_device.model, + sw_version=self._sub_device.firmware_version, + hw_version=self._sub_device.zigbee_model, + ) + + @property + def available(self): + """Return if entity is available.""" + if self.coordinator.data is None: + return False + + return self.coordinator.data[ATTR_AVAILABLE] diff --git a/homeassistant/components/xiaomi_miio/humidifier.py b/homeassistant/components/xiaomi_miio/humidifier.py index 4701345756a..8367b063102 100644 --- a/homeassistant/components/xiaomi_miio/humidifier.py +++ b/homeassistant/components/xiaomi_miio/humidifier.py @@ -37,7 +37,7 @@ from .const import ( MODELS_HUMIDIFIER_MIOT, MODELS_HUMIDIFIER_MJJSQ, ) -from .entity import XiaomiCoordinatedMiioEntity +from .device import XiaomiCoordinatedMiioEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_miio/icons.json b/homeassistant/components/xiaomi_miio/icons.json index cc0800f1d9d..bbd3f6607d7 100644 --- a/homeassistant/components/xiaomi_miio/icons.json +++ b/homeassistant/components/xiaomi_miio/icons.json @@ -1,90 +1,28 @@ { - "entity": { - "fan": { - "generic_fan": { - "state_attributes": { - "preset_mode": { - "state": { - "nature": "mdi:leaf", - "normal": "mdi:weather-windy" - } - } - } - } - } - }, "services": { - "fan_reset_filter": { - "service": "mdi:refresh" - }, - "fan_set_extra_features": { - "service": "mdi:cog" - }, - "light_set_scene": { - "service": "mdi:palette" - }, - "light_set_delayed_turn_off": { - "service": "mdi:timer" - }, - "light_reminder_on": { - "service": "mdi:alarm" - }, - "light_reminder_off": { - "service": "mdi:alarm-off" - }, - "light_night_light_mode_on": { - "service": "mdi:weather-night" - }, - "light_night_light_mode_off": { - "service": "mdi:weather-sunny" - }, - "light_eyecare_mode_on": { - "service": "mdi:eye" - }, - "light_eyecare_mode_off": { - "service": "mdi:eye-off" - }, - "remote_learn_command": { - "service": "mdi:remote" - }, - "remote_set_led_on": { - "service": "mdi:led-on" - }, - "remote_set_led_off": { - "service": "mdi:led-off" - }, - "switch_set_wifi_led_on": { - "service": "mdi:wifi" - }, - "switch_set_wifi_led_off": { - "service": "mdi:wifi-off" - }, - "switch_set_power_price": { - "service": "mdi:currency-usd" - }, - "switch_set_power_mode": { - "service": "mdi:power" - }, - "vacuum_remote_control_start": { - "service": "mdi:play" - }, - "vacuum_remote_control_stop": { - "service": "mdi:stop" - }, - "vacuum_remote_control_move": { - "service": "mdi:remote" - }, - "vacuum_remote_control_move_step": { - "service": "mdi:remote" - }, - "vacuum_clean_zone": { - "service": "mdi:map-marker" - }, - "vacuum_goto": { - "service": "mdi:map-marker" - }, - "vacuum_clean_segment": { - "service": "mdi:map-marker" - } + "fan_reset_filter": "mdi:refresh", + "fan_set_extra_features": "mdi:cog", + "light_set_scene": "mdi:palette", + "light_set_delayed_turn_off": "mdi:timer", + "light_reminder_on": "mdi:alarm", + "light_reminder_off": "mdi:alarm-off", + "light_night_light_mode_on": "mdi:weather-night", + "light_night_light_mode_off": "mdi:weather-sunny", + "light_eyecare_mode_on": "mdi:eye", + "light_eyecare_mode_off": "mdi:eye-off", + "remote_learn_command": "mdi:remote", + "remote_set_led_on": "mdi:led-on", + "remote_set_led_off": "mdi:led-off", + "switch_set_wifi_led_on": "mdi:wifi", + "switch_set_wifi_led_off": "mdi:wifi-off", + "switch_set_power_price": "mdi:currency-usd", + "switch_set_power_mode": "mdi:power", + "vacuum_remote_control_start": "mdi:play", + "vacuum_remote_control_stop": "mdi:stop", + "vacuum_remote_control_move": "mdi:remote", + "vacuum_remote_control_move_step": "mdi:remote", + "vacuum_clean_zone": "mdi:map-marker", + "vacuum_goto": "mdi:map-marker", + "vacuum_clean_segment": "mdi:map-marker" } } diff --git a/homeassistant/components/xiaomi_miio/light.py b/homeassistant/components/xiaomi_miio/light.py index 8ccc798a2e1..35537e82b2e 100644 --- a/homeassistant/components/xiaomi_miio/light.py +++ b/homeassistant/components/xiaomi_miio/light.py @@ -66,7 +66,8 @@ from .const import ( SERVICE_SET_DELAYED_TURN_OFF, SERVICE_SET_SCENE, ) -from .entity import XiaomiGatewayDevice, XiaomiMiioEntity +from .device import XiaomiMiioEntity +from .gateway import XiaomiGatewayDevice from .typing import ServiceMethodDetails _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/xiaomi_miio/number.py b/homeassistant/components/xiaomi_miio/number.py index a3c501aad3f..a0ae0ea5078 100644 --- a/homeassistant/components/xiaomi_miio/number.py +++ b/homeassistant/components/xiaomi_miio/number.py @@ -50,7 +50,7 @@ from .const import ( FEATURE_FLAGS_FAN_1C, FEATURE_FLAGS_FAN_P5, FEATURE_FLAGS_FAN_P9, - FEATURE_FLAGS_FAN_P10_P11_P18, + FEATURE_FLAGS_FAN_P10_P11, FEATURE_FLAGS_FAN_ZA5, FEATURE_SET_DELAY_OFF_COUNTDOWN, FEATURE_SET_FAN_LEVEL, @@ -72,7 +72,6 @@ from .const import ( MODEL_AIRHUMIDIFIER_CB1, MODEL_AIRPURIFIER_2S, MODEL_AIRPURIFIER_3C, - MODEL_AIRPURIFIER_3C_REV_A, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_LITE_RMA1, MODEL_AIRPURIFIER_4_LITE_RMB1, @@ -87,7 +86,6 @@ from .const import ( MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, - MODEL_FAN_P18, MODEL_FAN_SA1, MODEL_FAN_V2, MODEL_FAN_V3, @@ -98,7 +96,7 @@ from .const import ( MODELS_PURIFIER_MIIO, MODELS_PURIFIER_MIOT, ) -from .entity import XiaomiCoordinatedMiioEntity +from .device import XiaomiCoordinatedMiioEntity ATTR_DELAY_OFF_COUNTDOWN = "delay_off_countdown" ATTR_FAN_LEVEL = "fan_level" @@ -141,7 +139,7 @@ class FavoriteLevelValues: NUMBER_TYPES = { FEATURE_SET_MOTOR_SPEED: XiaomiMiioNumberDescription( key=ATTR_MOTOR_SPEED, - translation_key=ATTR_MOTOR_SPEED, + name="Motor speed", icon="mdi:fast-forward-outline", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, native_min_value=200, @@ -153,7 +151,7 @@ NUMBER_TYPES = { ), FEATURE_SET_FAVORITE_LEVEL: XiaomiMiioNumberDescription( key=ATTR_FAVORITE_LEVEL, - translation_key=ATTR_FAVORITE_LEVEL, + name="Favorite level", icon="mdi:star-cog", native_min_value=0, native_max_value=17, @@ -163,7 +161,7 @@ NUMBER_TYPES = { ), FEATURE_SET_FAN_LEVEL: XiaomiMiioNumberDescription( key=ATTR_FAN_LEVEL, - translation_key=ATTR_FAN_LEVEL, + name="Fan level", icon="mdi:fan", native_min_value=1, native_max_value=3, @@ -173,7 +171,7 @@ NUMBER_TYPES = { ), FEATURE_SET_VOLUME: XiaomiMiioNumberDescription( key=ATTR_VOLUME, - translation_key=ATTR_VOLUME, + name="Volume", icon="mdi:volume-high", native_min_value=0, native_max_value=100, @@ -183,7 +181,7 @@ NUMBER_TYPES = { ), FEATURE_SET_OSCILLATION_ANGLE: XiaomiMiioNumberDescription( key=ATTR_OSCILLATION_ANGLE, - translation_key=ATTR_OSCILLATION_ANGLE, + name="Oscillation angle", icon="mdi:angle-acute", native_unit_of_measurement=DEGREE, native_min_value=1, @@ -194,7 +192,7 @@ NUMBER_TYPES = { ), FEATURE_SET_DELAY_OFF_COUNTDOWN: XiaomiMiioNumberDescription( key=ATTR_DELAY_OFF_COUNTDOWN, - translation_key=ATTR_DELAY_OFF_COUNTDOWN, + name="Delay off countdown", icon="mdi:fan-off", native_unit_of_measurement=UnitOfTime.MINUTES, native_min_value=0, @@ -205,7 +203,7 @@ NUMBER_TYPES = { ), FEATURE_SET_LED_BRIGHTNESS: XiaomiMiioNumberDescription( key=ATTR_LED_BRIGHTNESS, - translation_key=ATTR_LED_BRIGHTNESS, + name="LED brightness", icon="mdi:brightness-6", native_min_value=0, native_max_value=100, @@ -215,7 +213,7 @@ NUMBER_TYPES = { ), FEATURE_SET_LED_BRIGHTNESS_LEVEL: XiaomiMiioNumberDescription( key=ATTR_LED_BRIGHTNESS_LEVEL, - translation_key=ATTR_LED_BRIGHTNESS_LEVEL, + name="LED brightness", icon="mdi:brightness-6", native_min_value=0, native_max_value=8, @@ -225,7 +223,7 @@ NUMBER_TYPES = { ), FEATURE_SET_FAVORITE_RPM: XiaomiMiioNumberDescription( key=ATTR_FAVORITE_RPM, - translation_key=ATTR_FAVORITE_RPM, + name="Favorite motor speed", icon="mdi:star-cog", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, native_min_value=300, @@ -246,7 +244,6 @@ MODEL_TO_FEATURES_MAP = { MODEL_AIRHUMIDIFIER_CB1: FEATURE_FLAGS_AIRHUMIDIFIER_CA_AND_CB, MODEL_AIRPURIFIER_2S: FEATURE_FLAGS_AIRPURIFIER_2S, MODEL_AIRPURIFIER_3C: FEATURE_FLAGS_AIRPURIFIER_3C, - MODEL_AIRPURIFIER_3C_REV_A: FEATURE_FLAGS_AIRPURIFIER_3C, MODEL_AIRPURIFIER_PRO: FEATURE_FLAGS_AIRPURIFIER_PRO, MODEL_AIRPURIFIER_PRO_V7: FEATURE_FLAGS_AIRPURIFIER_PRO_V7, MODEL_AIRPURIFIER_V1: FEATURE_FLAGS_AIRPURIFIER_V1, @@ -257,9 +254,8 @@ MODEL_TO_FEATURES_MAP = { MODEL_AIRPURIFIER_4_PRO: FEATURE_FLAGS_AIRPURIFIER_4, MODEL_AIRPURIFIER_ZA1: FEATURE_FLAGS_AIRPURIFIER_ZA1, MODEL_FAN_1C: FEATURE_FLAGS_FAN_1C, - MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11_P18, - MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11_P18, - MODEL_FAN_P18: FEATURE_FLAGS_FAN_P10_P11_P18, + MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11, + MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11, MODEL_FAN_P5: FEATURE_FLAGS_FAN_P5, MODEL_FAN_P9: FEATURE_FLAGS_FAN_P9, MODEL_FAN_SA1: FEATURE_FLAGS_FAN, @@ -277,7 +273,6 @@ OSCILLATION_ANGLE_VALUES = { MODEL_FAN_P9: OscillationAngleValues(max_value=150, min_value=30, step=30), MODEL_FAN_P10: OscillationAngleValues(max_value=140, min_value=30, step=30), MODEL_FAN_P11: OscillationAngleValues(max_value=140, min_value=30, step=30), - MODEL_FAN_P18: OscillationAngleValues(max_value=140, min_value=30, step=30), } FAVORITE_LEVEL_VALUES = { diff --git a/homeassistant/components/xiaomi_miio/remote.py b/homeassistant/components/xiaomi_miio/remote.py index 9c83f3f4674..959bf0a7bee 100644 --- a/homeassistant/components/xiaomi_miio/remote.py +++ b/homeassistant/components/xiaomi_miio/remote.py @@ -77,7 +77,7 @@ async def async_setup_platform( token = config[CONF_TOKEN] # Create handler - _LOGGER.debug("Initializing with host %s (token %s...)", host, token[:5]) + _LOGGER.info("Initializing with host %s (token %s...)", host, token[:5]) # The Chuang Mi IR Remote Controller wants to be re-discovered every # 5 minutes. As long as polling is disabled the device should be @@ -89,7 +89,7 @@ async def async_setup_platform( device_info = await hass.async_add_executor_job(device.info) model = device_info.model unique_id = f"{model}-{device_info.mac_address}" - _LOGGER.debug( + _LOGGER.info( "%s %s %s detected", model, device_info.firmware_version, @@ -170,12 +170,12 @@ async def async_setup_platform( ) platform.async_register_entity_service( SERVICE_SET_REMOTE_LED_ON, - None, + {}, async_service_led_on_handler, ) platform.async_register_entity_service( SERVICE_SET_REMOTE_LED_OFF, - None, + {}, async_service_led_off_handler, ) diff --git a/homeassistant/components/xiaomi_miio/select.py b/homeassistant/components/xiaomi_miio/select.py index eb0d6bca205..b785adef15a 100644 --- a/homeassistant/components/xiaomi_miio/select.py +++ b/homeassistant/components/xiaomi_miio/select.py @@ -63,7 +63,7 @@ from .const import ( MODEL_FAN_ZA3, MODEL_FAN_ZA4, ) -from .entity import XiaomiCoordinatedMiioEntity +from .device import XiaomiCoordinatedMiioEntity ATTR_DISPLAY_ORIENTATION = "display_orientation" ATTR_LED_BRIGHTNESS = "led_brightness" diff --git a/homeassistant/components/xiaomi_miio/sensor.py b/homeassistant/components/xiaomi_miio/sensor.py index 3f6f4e9b50b..ab992a8fe96 100644 --- a/homeassistant/components/xiaomi_miio/sensor.py +++ b/homeassistant/components/xiaomi_miio/sensor.py @@ -62,7 +62,6 @@ from .const import ( MODEL_AIRHUMIDIFIER_CA1, MODEL_AIRHUMIDIFIER_CB1, MODEL_AIRPURIFIER_3C, - MODEL_AIRPURIFIER_3C_REV_A, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_LITE_RMA1, MODEL_AIRPURIFIER_4_LITE_RMB1, @@ -90,7 +89,8 @@ from .const import ( ROBOROCK_GENERIC, ROCKROBO_GENERIC, ) -from .entity import XiaomiCoordinatedMiioEntity, XiaomiGatewayDevice, XiaomiMiioEntity +from .device import XiaomiCoordinatedMiioEntity, XiaomiMiioEntity +from .gateway import XiaomiGatewayDevice _LOGGER = logging.getLogger(__name__) @@ -162,31 +162,34 @@ class XiaomiMiioSensorDescription(SensorEntityDescription): SENSOR_TYPES = { ATTR_TEMPERATURE: XiaomiMiioSensorDescription( key=ATTR_TEMPERATURE, + name="Temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, ), ATTR_HUMIDITY: XiaomiMiioSensorDescription( key=ATTR_HUMIDITY, + name="Humidity", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PRESSURE: XiaomiMiioSensorDescription( key=ATTR_PRESSURE, + name="Pressure", native_unit_of_measurement=UnitOfPressure.HPA, device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE, state_class=SensorStateClass.MEASUREMENT, ), ATTR_LOAD_POWER: XiaomiMiioSensorDescription( key=ATTR_LOAD_POWER, - translation_key=ATTR_LOAD_POWER, + name="Load power", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, ), ATTR_WATER_LEVEL: XiaomiMiioSensorDescription( key=ATTR_WATER_LEVEL, - translation_key=ATTR_WATER_LEVEL, + name="Water level", native_unit_of_measurement=PERCENTAGE, icon="mdi:water-check", state_class=SensorStateClass.MEASUREMENT, @@ -194,7 +197,7 @@ SENSOR_TYPES = { ), ATTR_ACTUAL_SPEED: XiaomiMiioSensorDescription( key=ATTR_ACTUAL_SPEED, - translation_key=ATTR_ACTUAL_SPEED, + name="Actual speed", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -202,7 +205,7 @@ SENSOR_TYPES = { ), ATTR_CONTROL_SPEED: XiaomiMiioSensorDescription( key=ATTR_CONTROL_SPEED, - translation_key=ATTR_CONTROL_SPEED, + name="Control speed", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -210,7 +213,7 @@ SENSOR_TYPES = { ), ATTR_FAVORITE_SPEED: XiaomiMiioSensorDescription( key=ATTR_FAVORITE_SPEED, - translation_key=ATTR_FAVORITE_SPEED, + name="Favorite speed", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -218,7 +221,7 @@ SENSOR_TYPES = { ), ATTR_MOTOR_SPEED: XiaomiMiioSensorDescription( key=ATTR_MOTOR_SPEED, - translation_key=ATTR_MOTOR_SPEED, + name="Motor speed", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -226,7 +229,7 @@ SENSOR_TYPES = { ), ATTR_MOTOR2_SPEED: XiaomiMiioSensorDescription( key=ATTR_MOTOR2_SPEED, - translation_key=ATTR_MOTOR2_SPEED, + name="Second motor speed", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -234,7 +237,7 @@ SENSOR_TYPES = { ), ATTR_USE_TIME: XiaomiMiioSensorDescription( key=ATTR_USE_TIME, - translation_key=ATTR_USE_TIME, + name="Use time", native_unit_of_measurement=UnitOfTime.SECONDS, icon="mdi:progress-clock", device_class=SensorDeviceClass.DURATION, @@ -244,52 +247,54 @@ SENSOR_TYPES = { ), ATTR_ILLUMINANCE: XiaomiMiioSensorDescription( key=ATTR_ILLUMINANCE, - translation_key=ATTR_ILLUMINANCE, + name="Illuminance", native_unit_of_measurement=UNIT_LUMEN, state_class=SensorStateClass.MEASUREMENT, ), ATTR_ILLUMINANCE_LUX: XiaomiMiioSensorDescription( key=ATTR_ILLUMINANCE, + name="Illuminance", native_unit_of_measurement=LIGHT_LUX, device_class=SensorDeviceClass.ILLUMINANCE, state_class=SensorStateClass.MEASUREMENT, ), ATTR_AIR_QUALITY: XiaomiMiioSensorDescription( key=ATTR_AIR_QUALITY, - translation_key=ATTR_AIR_QUALITY, native_unit_of_measurement="AQI", icon="mdi:cloud", state_class=SensorStateClass.MEASUREMENT, ), ATTR_TVOC: XiaomiMiioSensorDescription( key=ATTR_TVOC, - translation_key=ATTR_TVOC, + name="TVOC", state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, ), ATTR_PM10: XiaomiMiioSensorDescription( key=ATTR_PM10, + name="PM10", native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.PM10, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PM25: XiaomiMiioSensorDescription( key=ATTR_AQI, - translation_key=ATTR_AQI, + name="PM2.5", native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.PM25, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PM25_2: XiaomiMiioSensorDescription( key=ATTR_PM25, + name="PM2.5", native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.PM25, state_class=SensorStateClass.MEASUREMENT, ), ATTR_FILTER_LIFE_REMAINING: XiaomiMiioSensorDescription( key=ATTR_FILTER_LIFE_REMAINING, - translation_key=ATTR_FILTER_LIFE_REMAINING, + name="Filter lifetime remaining", native_unit_of_measurement=PERCENTAGE, icon="mdi:air-filter", state_class=SensorStateClass.MEASUREMENT, @@ -298,7 +303,7 @@ SENSOR_TYPES = { ), ATTR_FILTER_USE: XiaomiMiioSensorDescription( key=ATTR_FILTER_HOURS_USED, - translation_key=ATTR_FILTER_HOURS_USED, + name="Filter use", native_unit_of_measurement=UnitOfTime.HOURS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -307,7 +312,7 @@ SENSOR_TYPES = { ), ATTR_FILTER_LEFT_TIME: XiaomiMiioSensorDescription( key=ATTR_FILTER_LEFT_TIME, - translation_key=ATTR_FILTER_LEFT_TIME, + name="Filter lifetime left", native_unit_of_measurement=UnitOfTime.DAYS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -316,7 +321,7 @@ SENSOR_TYPES = { ), ATTR_DUST_FILTER_LIFE_REMAINING: XiaomiMiioSensorDescription( key=ATTR_DUST_FILTER_LIFE_REMAINING, - translation_key=ATTR_DUST_FILTER_LIFE_REMAINING, + name="Dust filter lifetime remaining", native_unit_of_measurement=PERCENTAGE, icon="mdi:air-filter", state_class=SensorStateClass.MEASUREMENT, @@ -325,7 +330,7 @@ SENSOR_TYPES = { ), ATTR_DUST_FILTER_LIFE_REMAINING_DAYS: XiaomiMiioSensorDescription( key=ATTR_DUST_FILTER_LIFE_REMAINING_DAYS, - translation_key=ATTR_DUST_FILTER_LIFE_REMAINING_DAYS, + name="Dust filter lifetime remaining days", native_unit_of_measurement=UnitOfTime.DAYS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -334,7 +339,7 @@ SENSOR_TYPES = { ), ATTR_UPPER_FILTER_LIFE_REMAINING: XiaomiMiioSensorDescription( key=ATTR_UPPER_FILTER_LIFE_REMAINING, - translation_key=ATTR_UPPER_FILTER_LIFE_REMAINING, + name="Upper filter lifetime remaining", native_unit_of_measurement=PERCENTAGE, icon="mdi:air-filter", state_class=SensorStateClass.MEASUREMENT, @@ -343,7 +348,7 @@ SENSOR_TYPES = { ), ATTR_UPPER_FILTER_LIFE_REMAINING_DAYS: XiaomiMiioSensorDescription( key=ATTR_UPPER_FILTER_LIFE_REMAINING_DAYS, - translation_key=ATTR_UPPER_FILTER_LIFE_REMAINING_DAYS, + name="Upper filter lifetime remaining days", native_unit_of_measurement=UnitOfTime.DAYS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -352,13 +357,14 @@ SENSOR_TYPES = { ), ATTR_CARBON_DIOXIDE: XiaomiMiioSensorDescription( key=ATTR_CARBON_DIOXIDE, + name="Carbon dioxide", native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, device_class=SensorDeviceClass.CO2, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PURIFY_VOLUME: XiaomiMiioSensorDescription( key=ATTR_PURIFY_VOLUME, - translation_key=ATTR_PURIFY_VOLUME, + name="Purify volume", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, device_class=SensorDeviceClass.VOLUME, state_class=SensorStateClass.TOTAL_INCREASING, @@ -367,6 +373,7 @@ SENSOR_TYPES = { ), ATTR_BATTERY: XiaomiMiioSensorDescription( key=ATTR_BATTERY, + name="Battery", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.BATTERY, state_class=SensorStateClass.MEASUREMENT, @@ -561,7 +568,6 @@ MODEL_TO_SENSORS_MAP: dict[str, tuple[str, ...]] = { MODEL_AIRHUMIDIFIER_CA1: HUMIDIFIER_CA1_CB1_SENSORS, MODEL_AIRHUMIDIFIER_CB1: HUMIDIFIER_CA1_CB1_SENSORS, MODEL_AIRPURIFIER_3C: PURIFIER_3C_SENSORS, - MODEL_AIRPURIFIER_3C_REV_A: PURIFIER_3C_SENSORS, MODEL_AIRPURIFIER_4_LITE_RMA1: PURIFIER_4_LITE_SENSORS, MODEL_AIRPURIFIER_4_LITE_RMB1: PURIFIER_4_LITE_SENSORS, MODEL_AIRPURIFIER_4: PURIFIER_4_SENSORS, @@ -581,7 +587,7 @@ VACUUM_SENSORS = { f"dnd_{ATTR_DND_START}": XiaomiMiioSensorDescription( key=ATTR_DND_START, icon="mdi:minus-circle-off", - translation_key="dnd_start", + name="DnD start", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.dnd_status, entity_registry_enabled_default=False, @@ -590,7 +596,7 @@ VACUUM_SENSORS = { f"dnd_{ATTR_DND_END}": XiaomiMiioSensorDescription( key=ATTR_DND_END, icon="mdi:minus-circle-off", - translation_key="dnd_end", + name="DnD end", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.dnd_status, entity_registry_enabled_default=False, @@ -599,7 +605,7 @@ VACUUM_SENSORS = { f"last_clean_{ATTR_LAST_CLEAN_START}": XiaomiMiioSensorDescription( key=ATTR_LAST_CLEAN_START, icon="mdi:clock-time-twelve", - translation_key="last_clean_start", + name="Last clean start", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, entity_category=EntityCategory.DIAGNOSTIC, @@ -609,7 +615,7 @@ VACUUM_SENSORS = { icon="mdi:clock-time-twelve", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, - translation_key="last_clean_end", + name="Last clean end", entity_category=EntityCategory.DIAGNOSTIC, ), f"last_clean_{ATTR_LAST_CLEAN_TIME}": XiaomiMiioSensorDescription( @@ -618,7 +624,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_LAST_CLEAN_TIME, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, - translation_key=ATTR_LAST_CLEAN_TIME, + name="Last clean duration", entity_category=EntityCategory.DIAGNOSTIC, ), f"last_clean_{ATTR_LAST_CLEAN_AREA}": XiaomiMiioSensorDescription( @@ -626,7 +632,7 @@ VACUUM_SENSORS = { icon="mdi:texture-box", key=ATTR_LAST_CLEAN_AREA, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, - translation_key=ATTR_LAST_CLEAN_AREA, + name="Last clean area", entity_category=EntityCategory.DIAGNOSTIC, ), f"current_{ATTR_STATUS_CLEAN_TIME}": XiaomiMiioSensorDescription( @@ -635,7 +641,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_STATUS_CLEAN_TIME, parent_key=VacuumCoordinatorDataAttributes.status, - translation_key=ATTR_STATUS_CLEAN_TIME, + name="Current clean duration", entity_category=EntityCategory.DIAGNOSTIC, ), f"current_{ATTR_LAST_CLEAN_AREA}": XiaomiMiioSensorDescription( @@ -644,7 +650,7 @@ VACUUM_SENSORS = { key=ATTR_STATUS_CLEAN_AREA, parent_key=VacuumCoordinatorDataAttributes.status, entity_category=EntityCategory.DIAGNOSTIC, - translation_key=ATTR_STATUS_CLEAN_AREA, + name="Current clean area", ), f"clean_history_{ATTR_CLEAN_HISTORY_TOTAL_DURATION}": XiaomiMiioSensorDescription( native_unit_of_measurement=UnitOfTime.SECONDS, @@ -652,7 +658,7 @@ VACUUM_SENSORS = { icon="mdi:timer-sand", key=ATTR_CLEAN_HISTORY_TOTAL_DURATION, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - translation_key=ATTR_CLEAN_HISTORY_TOTAL_DURATION, + name="Total duration", entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -661,7 +667,7 @@ VACUUM_SENSORS = { icon="mdi:texture-box", key=ATTR_CLEAN_HISTORY_TOTAL_AREA, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - translation_key=ATTR_CLEAN_HISTORY_TOTAL_AREA, + name="Total clean area", entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -671,7 +677,7 @@ VACUUM_SENSORS = { state_class=SensorStateClass.TOTAL_INCREASING, key=ATTR_CLEAN_HISTORY_COUNT, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - translation_key=ATTR_CLEAN_HISTORY_COUNT, + name="Total clean count", entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -681,7 +687,7 @@ VACUUM_SENSORS = { state_class=SensorStateClass.TOTAL_INCREASING, key=ATTR_CLEAN_HISTORY_DUST_COLLECTION_COUNT, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - translation_key=ATTR_CLEAN_HISTORY_DUST_COLLECTION_COUNT, + name="Total dust collection count", entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -691,7 +697,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_MAIN_BRUSH_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - translation_key=ATTR_CONSUMABLE_STATUS_MAIN_BRUSH_LEFT, + name="Main brush left", entity_category=EntityCategory.DIAGNOSTIC, ), f"consumable_{ATTR_CONSUMABLE_STATUS_SIDE_BRUSH_LEFT}": XiaomiMiioSensorDescription( @@ -700,7 +706,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_SIDE_BRUSH_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - translation_key=ATTR_CONSUMABLE_STATUS_SIDE_BRUSH_LEFT, + name="Side brush left", entity_category=EntityCategory.DIAGNOSTIC, ), f"consumable_{ATTR_CONSUMABLE_STATUS_FILTER_LEFT}": XiaomiMiioSensorDescription( @@ -709,7 +715,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_FILTER_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - translation_key=ATTR_CONSUMABLE_STATUS_FILTER_LEFT, + name="Filter left", entity_category=EntityCategory.DIAGNOSTIC, ), f"consumable_{ATTR_CONSUMABLE_STATUS_SENSOR_DIRTY_LEFT}": XiaomiMiioSensorDescription( @@ -718,7 +724,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_SENSOR_DIRTY_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - translation_key=ATTR_CONSUMABLE_STATUS_SENSOR_DIRTY_LEFT, + name="Sensor dirty left", entity_category=EntityCategory.DIAGNOSTIC, ), } diff --git a/homeassistant/components/xiaomi_miio/strings.json b/homeassistant/components/xiaomi_miio/strings.json index 31fe547b162..a9588855818 100644 --- a/homeassistant/components/xiaomi_miio/strings.json +++ b/homeassistant/components/xiaomi_miio/strings.json @@ -93,235 +93,6 @@ "high": "High" } } - }, - "fan": { - "generic_fan": { - "state_attributes": { - "preset_mode": { - "state": { - "nature": "Nature", - "normal": "Normal" - } - } - } - } - }, - "binary_sensor": { - "no_water": { - "name": "Water tank empty" - }, - "water_tank_detached": { - "name": "Water tank" - }, - "ptc_status": { - "name": "Auxiliary heat status" - }, - "powersupply_attached": { - "name": "Power supply" - }, - "is_water_box_attached": { - "name": "Mop attached" - }, - "is_water_shortage": { - "name": "Water shortage" - }, - "is_water_box_carriage_attached": { - "name": "[%key:component::xiaomi_miio::entity::binary_sensor::is_water_box_attached::name%]" - } - }, - "button": { - "reset_dust_filter": { - "name": "Reset dust filter" - }, - "reset_upper_filter": { - "name": "Reset upper filter" - }, - "reset_vacuum_main_brush": { - "name": "Reset main brush" - }, - "reset_vacuum_side_brush": { - "name": "Reset side brush" - }, - "reset_vacuum_filter": { - "name": "Reset filter" - }, - "reset_vacuum_sensor_dirty": { - "name": "Reset sensor dirty" - } - }, - "number": { - "motor_speed": { - "name": "Motor speed" - }, - "favorite_level": { - "name": "Favorite level" - }, - "fan_level": { - "name": "Fan level" - }, - "volume": { - "name": "Volume" - }, - "angle": { - "name": "Oscillation angle" - }, - "delay_off_countdown": { - "name": "Delay off countdown" - }, - "led_brightness": { - "name": "LED brightness" - }, - "led_brightness_level": { - "name": "LED brightness" - }, - "favorite_rpm": { - "name": "Favorite motor speed" - } - }, - "sensor": { - "load_power": { - "name": "Load power" - }, - "water_level": { - "name": "Water level" - }, - "actual_speed": { - "name": "Actual speed" - }, - "control_speed": { - "name": "Control speed" - }, - "favorite_speed": { - "name": "Favorite speed" - }, - "motor_speed": { - "name": "[%key:component::xiaomi_miio::entity::number::motor_speed::name%]" - }, - "motor2_speed": { - "name": "Second motor speed" - }, - "use_time": { - "name": "Use time" - }, - "illuminance": { - "name": "[%key:component::sensor::entity_component::illuminance::name%]" - }, - "air_quality": { - "name": "Air quality" - }, - "tvoc": { - "name": "TVOC" - }, - "air_quality_index": { - "name": "Air quality index" - }, - "filter_life_remaining": { - "name": "Filter lifetime remaining" - }, - "filter_hours_used": { - "name": "Filter use" - }, - "filter_left_time": { - "name": "Filter lifetime left" - }, - "dust_filter_life_remaining": { - "name": "Dust filter lifetime remaining" - }, - "dust_filter_life_remaining_days": { - "name": "Dust filter lifetime remaining days" - }, - "upper_filter_life_remaining": { - "name": "Upper filter lifetime remaining" - }, - "upper_filter_life_remaining_days": { - "name": "Upper filter lifetime remaining days" - }, - "purify_volume": { - "name": "Purify volume" - }, - "dnd_start": { - "name": "DnD start" - }, - "dnd_end": { - "name": "DnD end" - }, - "last_clean_start": { - "name": "Last clean start" - }, - "last_clean_end": { - "name": "Last clean end" - }, - "duration": { - "name": "Last clean duration" - }, - "area": { - "name": "Last clean area" - }, - "clean_time": { - "name": "Current clean duration" - }, - "clean_area": { - "name": "Current clean area" - }, - "total_duration": { - "name": "Total duration" - }, - "total_area": { - "name": "Total clean area" - }, - "count": { - "name": "Total clean count" - }, - "dust_collection_count": { - "name": "Total dust collection count" - }, - "main_brush_left": { - "name": "Main brush left" - }, - "side_brush_left": { - "name": "Side brush left" - }, - "filter_left": { - "name": "Filter left" - }, - "sensor_dirty_left": { - "name": "Sensor dirty left" - } - }, - "switch": { - "buzzer": { - "name": "Buzzer" - }, - "child_lock": { - "name": "Child lock" - }, - "display": { - "name": "Display" - }, - "dry": { - "name": "Dry mode" - }, - "clean_mode": { - "name": "Clean mode" - }, - "led": { - "name": "LED" - }, - "learn_mode": { - "name": "Learn mode" - }, - "auto_detect": { - "name": "Auto detect" - }, - "ionizer": { - "name": "Ionizer" - }, - "anion": { - "name": "[%key:component::xiaomi_miio::entity::switch::ionizer::name%]" - }, - "ptc": { - "name": "Auxiliary heat" - } } }, "services": { @@ -439,7 +210,7 @@ }, "remote_learn_command": { "name": "Remote learn command", - "description": "Learns an IR command, select **Perform action**, point the remote at the IR device, and the learned command will be shown as a notification in Overview.", + "description": "Learns an IR command, press \"Call Service\", point the remote at the IR device, and the learned command will be shown as a notification in Overview.", "fields": { "slot": { "name": "Slot", diff --git a/homeassistant/components/xiaomi_miio/switch.py b/homeassistant/components/xiaomi_miio/switch.py index 02f4d4e94e5..797a98d9fa1 100644 --- a/homeassistant/components/xiaomi_miio/switch.py +++ b/homeassistant/components/xiaomi_miio/switch.py @@ -59,7 +59,7 @@ from .const import ( FEATURE_FLAGS_FAN_1C, FEATURE_FLAGS_FAN_P5, FEATURE_FLAGS_FAN_P9, - FEATURE_FLAGS_FAN_P10_P11_P18, + FEATURE_FLAGS_FAN_P10_P11, FEATURE_FLAGS_FAN_ZA5, FEATURE_SET_ANION, FEATURE_SET_AUTO_DETECT, @@ -84,7 +84,6 @@ from .const import ( MODEL_AIRPURIFIER_2H, MODEL_AIRPURIFIER_2S, MODEL_AIRPURIFIER_3C, - MODEL_AIRPURIFIER_3C_REV_A, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_LITE_RMA1, MODEL_AIRPURIFIER_4_LITE_RMB1, @@ -99,7 +98,6 @@ from .const import ( MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, - MODEL_FAN_P18, MODEL_FAN_ZA1, MODEL_FAN_ZA3, MODEL_FAN_ZA4, @@ -115,7 +113,8 @@ from .const import ( SERVICE_SET_WIFI_LED_ON, SUCCESS, ) -from .entity import XiaomiCoordinatedMiioEntity, XiaomiGatewayDevice, XiaomiMiioEntity +from .device import XiaomiCoordinatedMiioEntity, XiaomiMiioEntity +from .gateway import XiaomiGatewayDevice from .typing import ServiceMethodDetails _LOGGER = logging.getLogger(__name__) @@ -201,7 +200,6 @@ MODEL_TO_FEATURES_MAP = { MODEL_AIRPURIFIER_2H: FEATURE_FLAGS_AIRPURIFIER_2S, MODEL_AIRPURIFIER_2S: FEATURE_FLAGS_AIRPURIFIER_2S, MODEL_AIRPURIFIER_3C: FEATURE_FLAGS_AIRPURIFIER_3C, - MODEL_AIRPURIFIER_3C_REV_A: FEATURE_FLAGS_AIRPURIFIER_3C, MODEL_AIRPURIFIER_PRO: FEATURE_FLAGS_AIRPURIFIER_PRO, MODEL_AIRPURIFIER_PRO_V7: FEATURE_FLAGS_AIRPURIFIER_PRO_V7, MODEL_AIRPURIFIER_V1: FEATURE_FLAGS_AIRPURIFIER_V1, @@ -212,9 +210,8 @@ MODEL_TO_FEATURES_MAP = { MODEL_AIRPURIFIER_4_PRO: FEATURE_FLAGS_AIRPURIFIER_4, MODEL_AIRPURIFIER_ZA1: FEATURE_FLAGS_AIRPURIFIER_ZA1, MODEL_FAN_1C: FEATURE_FLAGS_FAN_1C, - MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11_P18, - MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11_P18, - MODEL_FAN_P18: FEATURE_FLAGS_FAN_P10_P11_P18, + MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11, + MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11, MODEL_FAN_P5: FEATURE_FLAGS_FAN_P5, MODEL_FAN_P9: FEATURE_FLAGS_FAN_P9, MODEL_FAN_ZA1: FEATURE_FLAGS_FAN, @@ -239,7 +236,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_BUZZER, feature=FEATURE_SET_BUZZER, - translation_key=ATTR_BUZZER, + name="Buzzer", icon="mdi:volume-high", method_on="async_set_buzzer_on", method_off="async_set_buzzer_off", @@ -248,7 +245,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_CHILD_LOCK, feature=FEATURE_SET_CHILD_LOCK, - translation_key=ATTR_CHILD_LOCK, + name="Child lock", icon="mdi:lock", method_on="async_set_child_lock_on", method_off="async_set_child_lock_off", @@ -257,7 +254,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_DISPLAY, feature=FEATURE_SET_DISPLAY, - translation_key=ATTR_DISPLAY, + name="Display", icon="mdi:led-outline", method_on="async_set_display_on", method_off="async_set_display_off", @@ -266,7 +263,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_DRY, feature=FEATURE_SET_DRY, - translation_key=ATTR_DRY, + name="Dry mode", icon="mdi:hair-dryer", method_on="async_set_dry_on", method_off="async_set_dry_off", @@ -275,7 +272,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_CLEAN, feature=FEATURE_SET_CLEAN, - translation_key=ATTR_CLEAN, + name="Clean mode", icon="mdi:shimmer", method_on="async_set_clean_on", method_off="async_set_clean_off", @@ -285,7 +282,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_LED, feature=FEATURE_SET_LED, - translation_key=ATTR_LED, + name="LED", icon="mdi:led-outline", method_on="async_set_led_on", method_off="async_set_led_off", @@ -294,7 +291,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_LEARN_MODE, feature=FEATURE_SET_LEARN_MODE, - translation_key=ATTR_LEARN_MODE, + name="Learn mode", icon="mdi:school-outline", method_on="async_set_learn_mode_on", method_off="async_set_learn_mode_off", @@ -303,7 +300,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_AUTO_DETECT, feature=FEATURE_SET_AUTO_DETECT, - translation_key=ATTR_AUTO_DETECT, + name="Auto detect", method_on="async_set_auto_detect_on", method_off="async_set_auto_detect_off", entity_category=EntityCategory.CONFIG, @@ -311,7 +308,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_IONIZER, feature=FEATURE_SET_IONIZER, - translation_key=ATTR_IONIZER, + name="Ionizer", icon="mdi:shimmer", method_on="async_set_ionizer_on", method_off="async_set_ionizer_off", @@ -320,7 +317,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_ANION, feature=FEATURE_SET_ANION, - translation_key=ATTR_ANION, + name="Ionizer", icon="mdi:shimmer", method_on="async_set_anion_on", method_off="async_set_anion_off", @@ -329,7 +326,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_PTC, feature=FEATURE_SET_PTC, - translation_key=ATTR_PTC, + name="Auxiliary heat", icon="mdi:radiator", method_on="async_set_ptc_on", method_off="async_set_ptc_off", diff --git a/homeassistant/components/xiaomi_miio/vacuum.py b/homeassistant/components/xiaomi_miio/vacuum.py index b720cc90d2c..ef6f94c162f 100644 --- a/homeassistant/components/xiaomi_miio/vacuum.py +++ b/homeassistant/components/xiaomi_miio/vacuum.py @@ -41,7 +41,7 @@ from .const import ( SERVICE_START_REMOTE_CONTROL, SERVICE_STOP_REMOTE_CONTROL, ) -from .entity import XiaomiCoordinatedMiioEntity +from .device import XiaomiCoordinatedMiioEntity _LOGGER = logging.getLogger(__name__) @@ -104,13 +104,13 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_START_REMOTE_CONTROL, - None, + {}, MiroboVacuum.async_remote_control_start.__name__, ) platform.async_register_entity_service( SERVICE_STOP_REMOTE_CONTROL, - None, + {}, MiroboVacuum.async_remote_control_stop.__name__, ) diff --git a/homeassistant/components/xmpp/notify.py b/homeassistant/components/xmpp/notify.py index 3fb5dd166a1..824f996c675 100644 --- a/homeassistant/components/xmpp/notify.py +++ b/homeassistant/components/xmpp/notify.py @@ -190,13 +190,13 @@ async def async_send_message( # noqa: C901 _LOGGER.debug("Timeout set to %ss", timeout) url = await self.upload_file(timeout=timeout) - _LOGGER.debug("Upload success") + _LOGGER.info("Upload success") for recipient in recipients: if room: - _LOGGER.debug("Sending file to %s", room) + _LOGGER.info("Sending file to %s", room) message = self.Message(sto=room, stype="groupchat") else: - _LOGGER.debug("Sending file to %s", recipient) + _LOGGER.info("Sending file to %s", recipient) message = self.Message(sto=recipient, stype="chat") message["body"] = url message["oob"]["url"] = url @@ -264,7 +264,7 @@ async def async_send_message( # noqa: C901 uploaded via XEP_0363 and HTTP and returns the resulting URL """ - _LOGGER.debug("Getting file from %s", url) + _LOGGER.info("Getting file from %s", url) def get_url(url): """Return result for GET request to url.""" @@ -295,7 +295,7 @@ async def async_send_message( # noqa: C901 _LOGGER.debug("Got %s extension", extension) filename = self.get_random_filename(None, extension=extension) - _LOGGER.debug("Uploading file from URL, %s", filename) + _LOGGER.info("Uploading file from URL, %s", filename) return await self["xep_0363"].upload_file( filename, @@ -305,20 +305,16 @@ async def async_send_message( # noqa: C901 timeout=timeout, ) - def _read_upload_file(self, path: str) -> bytes: - """Read file from path.""" - with open(path, "rb") as upfile: - _LOGGER.debug("Reading file %s", path) - return upfile.read() - - async def upload_file_from_path(self, path: str, timeout=None): + async def upload_file_from_path(self, path, timeout=None): """Upload a file from a local file path via XEP_0363.""" - _LOGGER.debug("Uploading file from path, %s", path) + _LOGGER.info("Uploading file from path, %s", path) if not hass.config.is_allowed_path(path): raise PermissionError("Could not access file. Path not allowed") - input_file = await hass.async_add_executor_job(self._read_upload_file, path) + with open(path, "rb") as upfile: + _LOGGER.debug("Reading file %s", path) + input_file = upfile.read() filesize = len(input_file) _LOGGER.debug("Filesize is %s bytes", filesize) @@ -374,6 +370,6 @@ async def async_send_message( # noqa: C901 @staticmethod def discard_ssl_invalid_cert(event): """Do nothing if ssl certificate is invalid.""" - _LOGGER.debug("Ignoring invalid SSL certificate as requested") + _LOGGER.info("Ignoring invalid SSL certificate as requested") SendNotificationBot() diff --git a/homeassistant/components/xs1/__init__.py b/homeassistant/components/xs1/__init__.py index 6f7197817d7..e24fbc0181e 100644 --- a/homeassistant/components/xs1/__init__.py +++ b/homeassistant/components/xs1/__init__.py @@ -1,5 +1,6 @@ """Support for the EZcontrol XS1 gateway.""" +import asyncio import logging import voluptuous as vol @@ -16,6 +17,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType _LOGGER = logging.getLogger(__name__) @@ -42,6 +44,11 @@ CONFIG_SCHEMA = vol.Schema( PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] +# Lock used to limit the amount of concurrent update requests +# as the XS1 Gateway can only handle a very +# small amount of concurrent requests +UPDATE_LOCK = asyncio.Lock() + def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up XS1 integration.""" @@ -81,3 +88,16 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: discovery.load_platform(hass, platform, DOMAIN, {}, config) return True + + +class XS1DeviceEntity(Entity): + """Representation of a base XS1 device.""" + + def __init__(self, device): + """Initialize the XS1 device.""" + self.device = device + + async def async_update(self): + """Retrieve latest device state.""" + async with UPDATE_LOCK: + await self.hass.async_add_executor_job(self.device.update) diff --git a/homeassistant/components/xs1/climate.py b/homeassistant/components/xs1/climate.py index c7d580631d3..e594f32adff 100644 --- a/homeassistant/components/xs1/climate.py +++ b/homeassistant/components/xs1/climate.py @@ -16,8 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, SENSORS -from .entity import XS1DeviceEntity +from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, SENSORS, XS1DeviceEntity MIN_TEMP = 8 MAX_TEMP = 25 diff --git a/homeassistant/components/xs1/entity.py b/homeassistant/components/xs1/entity.py deleted file mode 100644 index 7239a6fd446..00000000000 --- a/homeassistant/components/xs1/entity.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Support for the EZcontrol XS1 gateway.""" - -import asyncio - -from homeassistant.helpers.entity import Entity - -# Lock used to limit the amount of concurrent update requests -# as the XS1 Gateway can only handle a very -# small amount of concurrent requests -UPDATE_LOCK = asyncio.Lock() - - -class XS1DeviceEntity(Entity): - """Representation of a base XS1 device.""" - - def __init__(self, device): - """Initialize the XS1 device.""" - self.device = device - - async def async_update(self): - """Retrieve latest device state.""" - async with UPDATE_LOCK: - await self.hass.async_add_executor_job(self.device.update) diff --git a/homeassistant/components/xs1/sensor.py b/homeassistant/components/xs1/sensor.py index b3895d67d82..e98fd33743b 100644 --- a/homeassistant/components/xs1/sensor.py +++ b/homeassistant/components/xs1/sensor.py @@ -9,8 +9,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, SENSORS -from .entity import XS1DeviceEntity +from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, SENSORS, XS1DeviceEntity def setup_platform( diff --git a/homeassistant/components/xs1/switch.py b/homeassistant/components/xs1/switch.py index a8f66390a6d..c2af652d6ad 100644 --- a/homeassistant/components/xs1/switch.py +++ b/homeassistant/components/xs1/switch.py @@ -11,8 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN -from .entity import XS1DeviceEntity +from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, XS1DeviceEntity def setup_platform( diff --git a/homeassistant/components/yale/__init__.py b/homeassistant/components/yale/__init__.py deleted file mode 100644 index 1cbd9c87b57..00000000000 --- a/homeassistant/components/yale/__init__.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Support for Yale devices.""" - -from __future__ import annotations - -from pathlib import Path -from typing import cast - -from aiohttp import ClientResponseError -from yalexs.const import Brand -from yalexs.exceptions import YaleApiError -from yalexs.manager.const import CONF_BRAND -from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation -from yalexs.manager.gateway import Config as YaleXSConfig - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EVENT_HOMEASSISTANT_STOP -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import config_entry_oauth2_flow, device_registry as dr - -from .const import DOMAIN, PLATFORMS -from .data import YaleData -from .gateway import YaleGateway -from .util import async_create_yale_clientsession - -type YaleConfigEntry = ConfigEntry[YaleData] - - -async def async_setup_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: - """Set up yale from a config entry.""" - session = async_create_yale_clientsession(hass) - implementation = ( - await config_entry_oauth2_flow.async_get_config_entry_implementation( - hass, entry - ) - ) - oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) - yale_gateway = YaleGateway(Path(hass.config.config_dir), session, oauth_session) - try: - await async_setup_yale(hass, entry, yale_gateway) - except (RequireValidation, InvalidAuth) as err: - raise ConfigEntryAuthFailed from err - except TimeoutError as err: - raise ConfigEntryNotReady("Timed out connecting to yale api") from err - except (YaleApiError, ClientResponseError, CannotConnect) as err: - raise ConfigEntryNotReady from err - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: - """Unload a config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -async def async_setup_yale( - hass: HomeAssistant, entry: YaleConfigEntry, yale_gateway: YaleGateway -) -> None: - """Set up the yale component.""" - config = cast(YaleXSConfig, entry.data) - await yale_gateway.async_setup({**config, CONF_BRAND: Brand.YALE_GLOBAL}) - await yale_gateway.async_authenticate() - await yale_gateway.async_refresh_access_token_if_needed() - data = entry.runtime_data = YaleData(hass, yale_gateway) - entry.async_on_unload( - hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, data.async_stop) - ) - entry.async_on_unload(data.async_stop) - await data.async_setup() - - -async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: YaleConfigEntry, device_entry: dr.DeviceEntry -) -> bool: - """Remove yale config entry from a device if its no longer present.""" - return not any( - identifier - for identifier in device_entry.identifiers - if identifier[0] == DOMAIN - and config_entry.runtime_data.get_device(identifier[1]) - ) diff --git a/homeassistant/components/yale/application_credentials.py b/homeassistant/components/yale/application_credentials.py deleted file mode 100644 index 31b5b7a92c7..00000000000 --- a/homeassistant/components/yale/application_credentials.py +++ /dev/null @@ -1,15 +0,0 @@ -"""application_credentials platform the yale integration.""" - -from homeassistant.components.application_credentials import AuthorizationServer -from homeassistant.core import HomeAssistant - -OAUTH2_AUTHORIZE = "https://oauth.aaecosystem.com/authorize" -OAUTH2_TOKEN = "https://oauth.aaecosystem.com/access_token" - - -async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: - """Return authorization server.""" - return AuthorizationServer( - authorize_url=OAUTH2_AUTHORIZE, - token_url=OAUTH2_TOKEN, - ) diff --git a/homeassistant/components/yale/binary_sensor.py b/homeassistant/components/yale/binary_sensor.py deleted file mode 100644 index dbb00ad7d42..00000000000 --- a/homeassistant/components/yale/binary_sensor.py +++ /dev/null @@ -1,188 +0,0 @@ -"""Support for Yale binary sensors.""" - -from __future__ import annotations - -from collections.abc import Callable -from dataclasses import dataclass -from datetime import datetime, timedelta -from functools import partial -import logging - -from yalexs.activity import Activity, ActivityType -from yalexs.doorbell import DoorbellDetail -from yalexs.lock import LockDetail, LockDoorStatus -from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL -from yalexs.util import update_lock_detail_from_activity - -from homeassistant.components.binary_sensor import ( - BinarySensorDeviceClass, - BinarySensorEntity, - BinarySensorEntityDescription, -) -from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later - -from . import YaleConfigEntry, YaleData -from .entity import YaleDescriptionEntity -from .util import ( - retrieve_ding_activity, - retrieve_doorbell_motion_activity, - retrieve_online_state, - retrieve_time_based_activity, -) - -_LOGGER = logging.getLogger(__name__) - -TIME_TO_RECHECK_DETECTION = timedelta( - seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds() * 3 -) - - -@dataclass(frozen=True, kw_only=True) -class YaleDoorbellBinarySensorEntityDescription(BinarySensorEntityDescription): - """Describes Yale binary_sensor entity.""" - - value_fn: Callable[[YaleData, DoorbellDetail | LockDetail], Activity | None] - is_time_based: bool - - -SENSOR_TYPE_DOOR = BinarySensorEntityDescription( - key="open", - device_class=BinarySensorDeviceClass.DOOR, -) - -SENSOR_TYPES_VIDEO_DOORBELL = ( - YaleDoorbellBinarySensorEntityDescription( - key="motion", - device_class=BinarySensorDeviceClass.MOTION, - value_fn=retrieve_doorbell_motion_activity, - is_time_based=True, - ), - YaleDoorbellBinarySensorEntityDescription( - key="image capture", - translation_key="image_capture", - value_fn=partial( - retrieve_time_based_activity, {ActivityType.DOORBELL_IMAGE_CAPTURE} - ), - is_time_based=True, - ), - YaleDoorbellBinarySensorEntityDescription( - key="online", - device_class=BinarySensorDeviceClass.CONNECTIVITY, - entity_category=EntityCategory.DIAGNOSTIC, - value_fn=retrieve_online_state, - is_time_based=False, - ), -) - - -SENSOR_TYPES_DOORBELL: tuple[YaleDoorbellBinarySensorEntityDescription, ...] = ( - YaleDoorbellBinarySensorEntityDescription( - key="ding", - translation_key="ding", - device_class=BinarySensorDeviceClass.OCCUPANCY, - value_fn=retrieve_ding_activity, - is_time_based=True, - ), -) - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: YaleConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the Yale binary sensors.""" - data = config_entry.runtime_data - entities: list[BinarySensorEntity] = [] - - for lock in data.locks: - detail = data.get_device_detail(lock.device_id) - if detail.doorsense: - entities.append(YaleDoorBinarySensor(data, lock, SENSOR_TYPE_DOOR)) - - if detail.doorbell: - entities.extend( - YaleDoorbellBinarySensor(data, lock, description) - for description in SENSOR_TYPES_DOORBELL - ) - - entities.extend( - YaleDoorbellBinarySensor(data, doorbell, description) - for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL - for doorbell in data.doorbells - ) - async_add_entities(entities) - - -class YaleDoorBinarySensor(YaleDescriptionEntity, BinarySensorEntity): - """Representation of an Yale Door binary sensor.""" - - _attr_device_class = BinarySensorDeviceClass.DOOR - description: BinarySensorEntityDescription - - @callback - def _update_from_data(self) -> None: - """Get the latest state of the sensor and update activity.""" - if door_activity := self._get_latest({ActivityType.DOOR_OPERATION}): - update_lock_detail_from_activity(self._detail, door_activity) - if door_activity.was_pushed: - self._detail.set_online(True) - - if bridge_activity := self._get_latest({ActivityType.BRIDGE_OPERATION}): - update_lock_detail_from_activity(self._detail, bridge_activity) - self._attr_available = self._detail.bridge_is_online - self._attr_is_on = self._detail.door_state == LockDoorStatus.OPEN - - -class YaleDoorbellBinarySensor(YaleDescriptionEntity, BinarySensorEntity): - """Representation of an Yale binary sensor.""" - - entity_description: YaleDoorbellBinarySensorEntityDescription - _check_for_off_update_listener: Callable[[], None] | None = None - - @callback - def _update_from_data(self) -> None: - """Get the latest state of the sensor.""" - self._cancel_any_pending_updates() - self._attr_is_on = bool( - self.entity_description.value_fn(self._data, self._detail) - ) - - if self.entity_description.is_time_based: - self._attr_available = retrieve_online_state(self._data, self._detail) - self._schedule_update_to_recheck_turn_off_sensor() - else: - self._attr_available = True - - @callback - def _async_scheduled_update(self, now: datetime) -> None: - """Timer callback for sensor update.""" - self._check_for_off_update_listener = None - self._update_from_data() - if not self.is_on: - self.async_write_ha_state() - - def _schedule_update_to_recheck_turn_off_sensor(self) -> None: - """Schedule an update to recheck the sensor to see if it is ready to turn off.""" - # If the sensor is already off there is nothing to do - if not self.is_on: - return - self._check_for_off_update_listener = async_call_later( - self.hass, TIME_TO_RECHECK_DETECTION, self._async_scheduled_update - ) - - def _cancel_any_pending_updates(self) -> None: - """Cancel any updates to recheck a sensor to see if it is ready to turn off.""" - if not self._check_for_off_update_listener: - return - _LOGGER.debug("%s: canceled pending update", self.entity_id) - self._check_for_off_update_listener() - self._check_for_off_update_listener = None - - async def async_will_remove_from_hass(self) -> None: - """When removing cancel any scheduled updates.""" - self._cancel_any_pending_updates() - await super().async_will_remove_from_hass() diff --git a/homeassistant/components/yale/button.py b/homeassistant/components/yale/button.py deleted file mode 100644 index b04ad638f0c..00000000000 --- a/homeassistant/components/yale/button.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Support for Yale buttons.""" - -from homeassistant.components.button import ButtonEntity -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import YaleConfigEntry -from .entity import YaleEntity - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: YaleConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Yale lock wake buttons.""" - data = config_entry.runtime_data - async_add_entities(YaleWakeLockButton(data, lock, "wake") for lock in data.locks) - - -class YaleWakeLockButton(YaleEntity, ButtonEntity): - """Representation of an Yale lock wake button.""" - - _attr_translation_key = "wake" - - async def async_press(self) -> None: - """Wake the device.""" - await self._data.async_status_async(self._device_id, self._hyper_bridge) - - @callback - def _update_from_data(self) -> None: - """Nothing to update as buttons are stateless.""" diff --git a/homeassistant/components/yale/camera.py b/homeassistant/components/yale/camera.py deleted file mode 100644 index 217e8f5f6fd..00000000000 --- a/homeassistant/components/yale/camera.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Support for Yale doorbell camera.""" - -from __future__ import annotations - -import logging - -from aiohttp import ClientSession -from yalexs.activity import ActivityType -from yalexs.doorbell import Doorbell -from yalexs.util import update_doorbell_image_from_activity - -from homeassistant.components.camera import Camera -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import aiohttp_client -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import YaleConfigEntry, YaleData -from .const import DEFAULT_NAME, DEFAULT_TIMEOUT -from .entity import YaleEntity - -_LOGGER = logging.getLogger(__name__) - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: YaleConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Yale cameras.""" - data = config_entry.runtime_data - # Create an aiohttp session instead of using the default one since the - # default one is likely to trigger yale's WAF if another integration - # is also using Cloudflare - session = aiohttp_client.async_create_clientsession(hass) - async_add_entities( - YaleCamera(data, doorbell, session, DEFAULT_TIMEOUT) - for doorbell in data.doorbells - ) - - -class YaleCamera(YaleEntity, Camera): - """An implementation of an Yale security camera.""" - - _attr_translation_key = "camera" - _attr_motion_detection_enabled = True - _attr_brand = DEFAULT_NAME - _image_url: str | None = None - _image_content: bytes | None = None - - def __init__( - self, data: YaleData, device: Doorbell, session: ClientSession, timeout: int - ) -> None: - """Initialize an Yale security camera.""" - super().__init__(data, device, "camera") - self._timeout = timeout - self._session = session - self._attr_model = self._detail.model - - @property - def is_recording(self) -> bool: - """Return true if the device is recording.""" - return self._device.has_subscription - - async def _async_update(self): - """Update device.""" - _LOGGER.debug("async_update called %s", self._detail.device_name) - await self._data.refresh_camera_by_id(self._device_id) - self._update_from_data() - - @callback - def _update_from_data(self) -> None: - """Get the latest state of the sensor.""" - if doorbell_activity := self._get_latest( - {ActivityType.DOORBELL_MOTION, ActivityType.DOORBELL_IMAGE_CAPTURE} - ): - update_doorbell_image_from_activity(self._detail, doorbell_activity) - - async def async_camera_image( - self, width: int | None = None, height: int | None = None - ) -> bytes | None: - """Return bytes of camera image.""" - self._update_from_data() - - if self._image_url is not self._detail.image_url: - self._image_content = await self._data.async_get_doorbell_image( - self._device_id, self._session, timeout=self._timeout - ) - self._image_url = self._detail.image_url - - return self._image_content diff --git a/homeassistant/components/yale/config_flow.py b/homeassistant/components/yale/config_flow.py deleted file mode 100644 index fecf286fdd6..00000000000 --- a/homeassistant/components/yale/config_flow.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Config flow for Yale integration.""" - -from collections.abc import Mapping -import logging -from typing import Any - -import jwt - -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult -from homeassistant.helpers import config_entry_oauth2_flow - -from .const import DOMAIN - -_LOGGER = logging.getLogger(__name__) - - -class YaleConfigFlow(config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN): - """Handle a config flow for Yale.""" - - VERSION = 1 - DOMAIN = DOMAIN - - @property - def logger(self) -> logging.Logger: - """Return logger.""" - return _LOGGER - - async def async_step_reauth( - self, entry_data: Mapping[str, Any] - ) -> ConfigFlowResult: - """Handle configuration by re-auth.""" - return await self.async_step_user() - - def _async_get_user_id_from_access_token(self, encoded: str) -> str: - """Get user ID from access token.""" - decoded = jwt.decode( - encoded, - "", - verify=False, - options={"verify_signature": False}, - algorithms=["HS256"], - ) - return decoded["userId"] - - async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: - """Create an entry for the flow.""" - user_id = self._async_get_user_id_from_access_token( - data["token"]["access_token"] - ) - await self.async_set_unique_id(user_id) - if self.source == SOURCE_REAUTH: - self._abort_if_unique_id_mismatch(reason="reauth_invalid_user") - return self.async_update_reload_and_abort( - self._get_reauth_entry(), data=data - ) - self._abort_if_unique_id_configured() - return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/yale/const.py b/homeassistant/components/yale/const.py deleted file mode 100644 index 3da4fb1dfb4..00000000000 --- a/homeassistant/components/yale/const.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Constants for Yale devices.""" - -from homeassistant.const import Platform - -DEFAULT_TIMEOUT = 25 - -CONF_ACCESS_TOKEN_CACHE_FILE = "access_token_cache_file" -CONF_BRAND = "brand" -CONF_LOGIN_METHOD = "login_method" -CONF_INSTALL_ID = "install_id" - -VERIFICATION_CODE_KEY = "verification_code" - -MANUFACTURER = "Yale Home Inc." - -DEFAULT_NAME = "Yale" -DOMAIN = "yale" - -OPERATION_METHOD_AUTORELOCK = "autorelock" -OPERATION_METHOD_REMOTE = "remote" -OPERATION_METHOD_KEYPAD = "keypad" -OPERATION_METHOD_MANUAL = "manual" -OPERATION_METHOD_TAG = "tag" -OPERATION_METHOD_MOBILE_DEVICE = "mobile" - -ATTR_OPERATION_AUTORELOCK = "autorelock" -ATTR_OPERATION_METHOD = "method" -ATTR_OPERATION_REMOTE = "remote" -ATTR_OPERATION_KEYPAD = "keypad" -ATTR_OPERATION_MANUAL = "manual" -ATTR_OPERATION_TAG = "tag" - -LOGIN_METHODS = ["phone", "email"] -DEFAULT_LOGIN_METHOD = "email" - -PLATFORMS = [ - Platform.BINARY_SENSOR, - Platform.BUTTON, - Platform.CAMERA, - Platform.EVENT, - Platform.LOCK, - Platform.SENSOR, -] diff --git a/homeassistant/components/yale/data.py b/homeassistant/components/yale/data.py deleted file mode 100644 index 12736f7733d..00000000000 --- a/homeassistant/components/yale/data.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Support for Yale devices.""" - -from __future__ import annotations - -from yalexs.lock import LockDetail -from yalexs.manager.data import YaleXSData -from yalexs_ble import YaleXSBLEDiscovery - -from homeassistant.config_entries import SOURCE_INTEGRATION_DISCOVERY -from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import discovery_flow - -from .gateway import YaleGateway - -YALEXS_BLE_DOMAIN = "yalexs_ble" - - -@callback -def _async_trigger_ble_lock_discovery( - hass: HomeAssistant, locks_with_offline_keys: list[LockDetail] -) -> None: - """Update keys for the yalexs-ble integration if available.""" - for lock_detail in locks_with_offline_keys: - discovery_flow.async_create_flow( - hass, - YALEXS_BLE_DOMAIN, - context={"source": SOURCE_INTEGRATION_DISCOVERY}, - data=YaleXSBLEDiscovery( - { - "name": lock_detail.device_name, - "address": lock_detail.mac_address, - "serial": lock_detail.serial_number, - "key": lock_detail.offline_key, - "slot": lock_detail.offline_slot, - } - ), - ) - - -class YaleData(YaleXSData): - """yale data object.""" - - def __init__(self, hass: HomeAssistant, yale_gateway: YaleGateway) -> None: - """Init yale data object.""" - self._hass = hass - super().__init__(yale_gateway, HomeAssistantError) - - @callback - def async_offline_key_discovered(self, detail: LockDetail) -> None: - """Handle offline key discovery.""" - _async_trigger_ble_lock_discovery(self._hass, [detail]) diff --git a/homeassistant/components/yale/diagnostics.py b/homeassistant/components/yale/diagnostics.py deleted file mode 100644 index 7e7f6179e7a..00000000000 --- a/homeassistant/components/yale/diagnostics.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Diagnostics support for yale.""" - -from __future__ import annotations - -from typing import Any - -from yalexs.const import Brand - -from homeassistant.components.diagnostics import async_redact_data -from homeassistant.core import HomeAssistant - -from . import YaleConfigEntry - -TO_REDACT = { - "HouseID", - "OfflineKeys", - "installUserID", - "invitations", - "key", - "pins", - "pubsubChannel", - "recentImage", - "remoteOperateSecret", - "users", - "zWaveDSK", - "contentToken", -} - - -async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: YaleConfigEntry -) -> dict[str, Any]: - """Return diagnostics for a config entry.""" - data = entry.runtime_data - - return { - "locks": { - lock.device_id: async_redact_data( - data.get_device_detail(lock.device_id).raw, TO_REDACT - ) - for lock in data.locks - }, - "doorbells": { - doorbell.device_id: async_redact_data( - data.get_device_detail(doorbell.device_id).raw, TO_REDACT - ) - for doorbell in data.doorbells - }, - "brand": Brand.YALE_GLOBAL.value, - } diff --git a/homeassistant/components/yale/entity.py b/homeassistant/components/yale/entity.py deleted file mode 100644 index 152070c0be3..00000000000 --- a/homeassistant/components/yale/entity.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Base class for Yale entity.""" - -from abc import abstractmethod - -from yalexs.activity import Activity, ActivityType -from yalexs.doorbell import Doorbell, DoorbellDetail -from yalexs.keypad import KeypadDetail -from yalexs.lock import Lock, LockDetail -from yalexs.util import get_configuration_url - -from homeassistant.const import ATTR_CONNECTIONS -from homeassistant.core import callback -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity, EntityDescription - -from . import DOMAIN, YaleData -from .const import MANUFACTURER - -DEVICE_TYPES = ["keypad", "lock", "camera", "doorbell", "door", "bell"] - - -class YaleEntity(Entity): - """Base implementation for Yale device.""" - - _attr_should_poll = False - _attr_has_entity_name = True - - def __init__( - self, data: YaleData, device: Doorbell | Lock | KeypadDetail, unique_id: str - ) -> None: - """Initialize an Yale device.""" - super().__init__() - self._data = data - self._stream = data.activity_stream - self._device = device - detail = self._detail - self._device_id = device.device_id - self._attr_unique_id = f"{device.device_id}_{unique_id}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._device_id)}, - manufacturer=MANUFACTURER, - model=detail.model, - name=device.device_name, - sw_version=detail.firmware_version, - suggested_area=_remove_device_types(device.device_name, DEVICE_TYPES), - configuration_url=get_configuration_url(data.brand), - ) - if isinstance(detail, LockDetail) and (mac := detail.mac_address): - self._attr_device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_BLUETOOTH, mac)} - - @property - def _detail(self) -> DoorbellDetail | LockDetail: - return self._data.get_device_detail(self._device.device_id) - - @property - def _hyper_bridge(self) -> bool: - """Check if the lock has a paired hyper bridge.""" - return bool(self._detail.bridge and self._detail.bridge.hyper_bridge) - - @callback - def _get_latest(self, activity_types: set[ActivityType]) -> Activity | None: - """Get the latest activity for the device.""" - return self._stream.get_latest_device_activity(self._device_id, activity_types) - - @callback - def _update_from_data_and_write_state(self) -> None: - self._update_from_data() - self.async_write_ha_state() - - @abstractmethod - def _update_from_data(self) -> None: - """Update the entity state from the data object.""" - - async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" - self.async_on_remove( - self._data.async_subscribe_device_id( - self._device_id, self._update_from_data_and_write_state - ) - ) - self.async_on_remove( - self._stream.async_subscribe_device_id( - self._device_id, self._update_from_data_and_write_state - ) - ) - self._update_from_data() - - -class YaleDescriptionEntity(YaleEntity): - """An Yale entity with a description.""" - - def __init__( - self, - data: YaleData, - device: Doorbell | Lock | KeypadDetail, - description: EntityDescription, - ) -> None: - """Initialize an Yale entity with a description.""" - super().__init__(data, device, description.key) - self.entity_description = description - - -def _remove_device_types(name: str, device_types: list[str]) -> str: - """Strip device types from a string. - - Yale stores the name as Master Bed Lock - or Master Bed Door. We can come up with a - reasonable suggestion by removing the supported - device types from the string. - """ - lower_name = name.lower() - for device_type in device_types: - lower_name = lower_name.removesuffix(f" {device_type}") - return name[: len(lower_name)] diff --git a/homeassistant/components/yale/event.py b/homeassistant/components/yale/event.py deleted file mode 100644 index 935ba7376f8..00000000000 --- a/homeassistant/components/yale/event.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Support for yale events.""" - -from __future__ import annotations - -from collections.abc import Callable -from dataclasses import dataclass -from typing import TYPE_CHECKING - -from yalexs.activity import Activity -from yalexs.doorbell import DoorbellDetail -from yalexs.lock import LockDetail - -from homeassistant.components.event import ( - EventDeviceClass, - EventEntity, - EventEntityDescription, -) -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import YaleConfigEntry, YaleData -from .entity import YaleDescriptionEntity -from .util import ( - retrieve_ding_activity, - retrieve_doorbell_motion_activity, - retrieve_online_state, -) - - -@dataclass(kw_only=True, frozen=True) -class YaleEventEntityDescription(EventEntityDescription): - """Describe yale event entities.""" - - value_fn: Callable[[YaleData, DoorbellDetail | LockDetail], Activity | None] - - -TYPES_VIDEO_DOORBELL: tuple[YaleEventEntityDescription, ...] = ( - YaleEventEntityDescription( - key="motion", - translation_key="motion", - device_class=EventDeviceClass.MOTION, - event_types=["motion"], - value_fn=retrieve_doorbell_motion_activity, - ), -) - - -TYPES_DOORBELL: tuple[YaleEventEntityDescription, ...] = ( - YaleEventEntityDescription( - key="doorbell", - translation_key="doorbell", - device_class=EventDeviceClass.DOORBELL, - event_types=["ring"], - value_fn=retrieve_ding_activity, - ), -) - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: YaleConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the yale event platform.""" - data = config_entry.runtime_data - entities: list[YaleEventEntity] = [ - YaleEventEntity(data, lock, description) - for description in TYPES_DOORBELL - for lock in data.locks - if (detail := data.get_device_detail(lock.device_id)) and detail.doorbell - ] - entities.extend( - YaleEventEntity(data, doorbell, description) - for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL - for doorbell in data.doorbells - ) - async_add_entities(entities) - - -class YaleEventEntity(YaleDescriptionEntity, EventEntity): - """An yale event entity.""" - - entity_description: YaleEventEntityDescription - _last_activity: Activity | None = None - - @callback - def _update_from_data(self) -> None: - """Update from data.""" - self._attr_available = retrieve_online_state(self._data, self._detail) - current_activity = self.entity_description.value_fn(self._data, self._detail) - if not current_activity or current_activity == self._last_activity: - return - self._last_activity = current_activity - event_types = self.entity_description.event_types - if TYPE_CHECKING: - assert event_types is not None - self._trigger_event(event_type=event_types[0]) - self.async_write_ha_state() diff --git a/homeassistant/components/yale/gateway.py b/homeassistant/components/yale/gateway.py deleted file mode 100644 index cd7796182d2..00000000000 --- a/homeassistant/components/yale/gateway.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Handle Yale connection setup and authentication.""" - -import logging -from pathlib import Path - -from aiohttp import ClientSession -from yalexs.authenticator_common import Authentication, AuthenticationState -from yalexs.manager.gateway import Gateway - -from homeassistant.helpers import config_entry_oauth2_flow - -_LOGGER = logging.getLogger(__name__) - - -class YaleGateway(Gateway): - """Handle the connection to Yale.""" - - def __init__( - self, - config_path: Path, - aiohttp_session: ClientSession, - oauth_session: config_entry_oauth2_flow.OAuth2Session, - ) -> None: - """Init the connection.""" - super().__init__(config_path, aiohttp_session) - self._oauth_session = oauth_session - - async def async_get_access_token(self) -> str: - """Get access token.""" - await self._oauth_session.async_ensure_token_valid() - return self._oauth_session.token["access_token"] - - async def async_refresh_access_token_if_needed(self) -> None: - """Refresh the access token if needed.""" - await self._oauth_session.async_ensure_token_valid() - - async def async_authenticate(self) -> Authentication: - """Authenticate with the details provided to setup.""" - await self._oauth_session.async_ensure_token_valid() - self.authentication = Authentication( - AuthenticationState.AUTHENTICATED, None, None, None - ) - return self.authentication diff --git a/homeassistant/components/yale/icons.json b/homeassistant/components/yale/icons.json deleted file mode 100644 index b654b6d912a..00000000000 --- a/homeassistant/components/yale/icons.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "entity": { - "binary_sensor": { - "image_capture": { - "default": "mdi:file-image" - } - } - } -} diff --git a/homeassistant/components/yale/lock.py b/homeassistant/components/yale/lock.py deleted file mode 100644 index b911c92ba0f..00000000000 --- a/homeassistant/components/yale/lock.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Support for Yale lock.""" - -from __future__ import annotations - -from collections.abc import Callable, Coroutine -import logging -from typing import Any - -from aiohttp import ClientResponseError -from yalexs.activity import ActivityType, ActivityTypes -from yalexs.lock import Lock, LockStatus -from yalexs.util import get_latest_activity, update_lock_detail_from_activity - -from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature -from homeassistant.const import ATTR_BATTERY_LEVEL -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.restore_state import RestoreEntity -import homeassistant.util.dt as dt_util - -from . import YaleConfigEntry, YaleData -from .entity import YaleEntity - -_LOGGER = logging.getLogger(__name__) - -LOCK_JAMMED_ERR = 531 - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: YaleConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Yale locks.""" - data = config_entry.runtime_data - async_add_entities(YaleLock(data, lock) for lock in data.locks) - - -class YaleLock(YaleEntity, RestoreEntity, LockEntity): - """Representation of an Yale lock.""" - - _attr_name = None - _lock_status: LockStatus | None = None - - def __init__(self, data: YaleData, device: Lock) -> None: - """Initialize the lock.""" - super().__init__(data, device, "lock") - if self._detail.unlatch_supported: - self._attr_supported_features = LockEntityFeature.OPEN - - async def async_lock(self, **kwargs: Any) -> None: - """Lock the device.""" - if self._data.push_updates_connected: - await self._data.async_lock_async(self._device_id, self._hyper_bridge) - return - await self._call_lock_operation(self._data.async_lock) - - async def async_open(self, **kwargs: Any) -> None: - """Open/unlatch the device.""" - if self._data.push_updates_connected: - await self._data.async_unlatch_async(self._device_id, self._hyper_bridge) - return - await self._call_lock_operation(self._data.async_unlatch) - - async def async_unlock(self, **kwargs: Any) -> None: - """Unlock the device.""" - if self._data.push_updates_connected: - await self._data.async_unlock_async(self._device_id, self._hyper_bridge) - return - await self._call_lock_operation(self._data.async_unlock) - - async def _call_lock_operation( - self, lock_operation: Callable[[str], Coroutine[Any, Any, list[ActivityTypes]]] - ) -> None: - try: - activities = await lock_operation(self._device_id) - except ClientResponseError as err: - if err.status == LOCK_JAMMED_ERR: - self._detail.lock_status = LockStatus.JAMMED - self._detail.lock_status_datetime = dt_util.utcnow() - else: - raise - else: - for lock_activity in activities: - update_lock_detail_from_activity(self._detail, lock_activity) - - if self._update_lock_status_from_detail(): - _LOGGER.debug( - "async_signal_device_id_update (from lock operation): %s", - self._device_id, - ) - self._data.async_signal_device_id_update(self._device_id) - - def _update_lock_status_from_detail(self) -> bool: - self._attr_available = self._detail.bridge_is_online - - if self._lock_status != self._detail.lock_status: - self._lock_status = self._detail.lock_status - return True - return False - - @callback - def _update_from_data(self) -> None: - """Get the latest state of the sensor and update activity.""" - detail = self._detail - if lock_activity := self._get_latest({ActivityType.LOCK_OPERATION}): - self._attr_changed_by = lock_activity.operated_by - lock_activity_without_operator = self._get_latest( - {ActivityType.LOCK_OPERATION_WITHOUT_OPERATOR} - ) - if latest_activity := get_latest_activity( - lock_activity_without_operator, lock_activity - ): - if latest_activity.was_pushed: - self._detail.set_online(True) - update_lock_detail_from_activity(detail, latest_activity) - - if bridge_activity := self._get_latest({ActivityType.BRIDGE_OPERATION}): - update_lock_detail_from_activity(detail, bridge_activity) - - self._update_lock_status_from_detail() - lock_status = self._lock_status - if lock_status is None or lock_status is LockStatus.UNKNOWN: - self._attr_is_locked = None - else: - self._attr_is_locked = lock_status is LockStatus.LOCKED - self._attr_is_jammed = lock_status is LockStatus.JAMMED - self._attr_is_locking = lock_status is LockStatus.LOCKING - self._attr_is_unlocking = lock_status in ( - LockStatus.UNLOCKING, - LockStatus.UNLATCHING, - ) - self._attr_extra_state_attributes = {ATTR_BATTERY_LEVEL: detail.battery_level} - if keypad := detail.keypad: - self._attr_extra_state_attributes["keypad_battery_level"] = ( - keypad.battery_level - ) - - async def async_added_to_hass(self) -> None: - """Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log.""" - await super().async_added_to_hass() - - if not (last_state := await self.async_get_last_state()): - return - - if ATTR_CHANGED_BY in last_state.attributes: - self._attr_changed_by = last_state.attributes[ATTR_CHANGED_BY] diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json deleted file mode 100644 index 34f3a7a1728..00000000000 --- a/homeassistant/components/yale/manifest.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "domain": "yale", - "name": "Yale", - "codeowners": ["@bdraco"], - "config_flow": true, - "dependencies": ["application_credentials", "cloud"], - "dhcp": [ - { - "hostname": "yale-connect-plus", - "macaddress": "00177A*" - } - ], - "documentation": "https://www.home-assistant.io/integrations/yale", - "iot_class": "cloud_push", - "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.0"] -} diff --git a/homeassistant/components/yale/sensor.py b/homeassistant/components/yale/sensor.py deleted file mode 100644 index bb3d4317277..00000000000 --- a/homeassistant/components/yale/sensor.py +++ /dev/null @@ -1,211 +0,0 @@ -"""Support for Yale sensors.""" - -from __future__ import annotations - -from collections.abc import Callable -from dataclasses import dataclass -from typing import Any, cast - -from yalexs.activity import ActivityType, LockOperationActivity -from yalexs.doorbell import Doorbell -from yalexs.keypad import KeypadDetail -from yalexs.lock import LockDetail - -from homeassistant.components.sensor import ( - RestoreSensor, - SensorDeviceClass, - SensorEntity, - SensorEntityDescription, - SensorStateClass, -) -from homeassistant.const import ( - ATTR_ENTITY_PICTURE, - PERCENTAGE, - STATE_UNAVAILABLE, - EntityCategory, -) -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import YaleConfigEntry -from .const import ( - ATTR_OPERATION_AUTORELOCK, - ATTR_OPERATION_KEYPAD, - ATTR_OPERATION_MANUAL, - ATTR_OPERATION_METHOD, - ATTR_OPERATION_REMOTE, - ATTR_OPERATION_TAG, - OPERATION_METHOD_AUTORELOCK, - OPERATION_METHOD_KEYPAD, - OPERATION_METHOD_MANUAL, - OPERATION_METHOD_MOBILE_DEVICE, - OPERATION_METHOD_REMOTE, - OPERATION_METHOD_TAG, -) -from .entity import YaleDescriptionEntity, YaleEntity - - -def _retrieve_device_battery_state(detail: LockDetail) -> int: - """Get the latest state of the sensor.""" - return detail.battery_level - - -def _retrieve_linked_keypad_battery_state(detail: KeypadDetail) -> int | None: - """Get the latest state of the sensor.""" - return detail.battery_percentage - - -@dataclass(frozen=True, kw_only=True) -class YaleSensorEntityDescription[T: LockDetail | KeypadDetail]( - SensorEntityDescription -): - """Mixin for required keys.""" - - value_fn: Callable[[T], int | None] - - -SENSOR_TYPE_DEVICE_BATTERY = YaleSensorEntityDescription[LockDetail]( - key="device_battery", - entity_category=EntityCategory.DIAGNOSTIC, - state_class=SensorStateClass.MEASUREMENT, - value_fn=_retrieve_device_battery_state, -) - -SENSOR_TYPE_KEYPAD_BATTERY = YaleSensorEntityDescription[KeypadDetail]( - key="linked_keypad_battery", - entity_category=EntityCategory.DIAGNOSTIC, - state_class=SensorStateClass.MEASUREMENT, - value_fn=_retrieve_linked_keypad_battery_state, -) - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: YaleConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the Yale sensors.""" - data = config_entry.runtime_data - entities: list[SensorEntity] = [] - - for device in data.locks: - detail = data.get_device_detail(device.device_id) - entities.append(YaleOperatorSensor(data, device, "lock_operator")) - if SENSOR_TYPE_DEVICE_BATTERY.value_fn(detail): - entities.append( - YaleBatterySensor[LockDetail](data, device, SENSOR_TYPE_DEVICE_BATTERY) - ) - if keypad := detail.keypad: - entities.append( - YaleBatterySensor[KeypadDetail]( - data, keypad, SENSOR_TYPE_KEYPAD_BATTERY - ) - ) - - entities.extend( - YaleBatterySensor[Doorbell](data, device, SENSOR_TYPE_DEVICE_BATTERY) - for device in data.doorbells - if SENSOR_TYPE_DEVICE_BATTERY.value_fn(data.get_device_detail(device.device_id)) - ) - - async_add_entities(entities) - - -class YaleOperatorSensor(YaleEntity, RestoreSensor): - """Representation of an Yale lock operation sensor.""" - - _attr_translation_key = "operator" - _operated_remote: bool | None = None - _operated_keypad: bool | None = None - _operated_manual: bool | None = None - _operated_tag: bool | None = None - _operated_autorelock: bool | None = None - - @callback - def _update_from_data(self) -> None: - """Get the latest state of the sensor and update activity.""" - self._attr_available = True - if lock_activity := self._get_latest({ActivityType.LOCK_OPERATION}): - lock_activity = cast(LockOperationActivity, lock_activity) - self._attr_native_value = lock_activity.operated_by - self._operated_remote = lock_activity.operated_remote - self._operated_keypad = lock_activity.operated_keypad - self._operated_manual = lock_activity.operated_manual - self._operated_tag = lock_activity.operated_tag - self._operated_autorelock = lock_activity.operated_autorelock - self._attr_entity_picture = lock_activity.operator_thumbnail_url - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return the device specific state attributes.""" - attributes: dict[str, Any] = {} - - if self._operated_remote is not None: - attributes[ATTR_OPERATION_REMOTE] = self._operated_remote - if self._operated_keypad is not None: - attributes[ATTR_OPERATION_KEYPAD] = self._operated_keypad - if self._operated_manual is not None: - attributes[ATTR_OPERATION_MANUAL] = self._operated_manual - if self._operated_tag is not None: - attributes[ATTR_OPERATION_TAG] = self._operated_tag - if self._operated_autorelock is not None: - attributes[ATTR_OPERATION_AUTORELOCK] = self._operated_autorelock - - if self._operated_remote: - attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_REMOTE - elif self._operated_keypad: - attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_KEYPAD - elif self._operated_manual: - attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MANUAL - elif self._operated_tag: - attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_TAG - elif self._operated_autorelock: - attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_AUTORELOCK - else: - attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MOBILE_DEVICE - - return attributes - - async def async_added_to_hass(self) -> None: - """Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log.""" - await super().async_added_to_hass() - - last_state = await self.async_get_last_state() - last_sensor_state = await self.async_get_last_sensor_data() - if ( - not last_state - or not last_sensor_state - or last_state.state == STATE_UNAVAILABLE - ): - return - - self._attr_native_value = last_sensor_state.native_value - last_attrs = last_state.attributes - if ATTR_ENTITY_PICTURE in last_attrs: - self._attr_entity_picture = last_attrs[ATTR_ENTITY_PICTURE] - if ATTR_OPERATION_REMOTE in last_attrs: - self._operated_remote = last_attrs[ATTR_OPERATION_REMOTE] - if ATTR_OPERATION_KEYPAD in last_attrs: - self._operated_keypad = last_attrs[ATTR_OPERATION_KEYPAD] - if ATTR_OPERATION_MANUAL in last_attrs: - self._operated_manual = last_attrs[ATTR_OPERATION_MANUAL] - if ATTR_OPERATION_TAG in last_attrs: - self._operated_tag = last_attrs[ATTR_OPERATION_TAG] - if ATTR_OPERATION_AUTORELOCK in last_attrs: - self._operated_autorelock = last_attrs[ATTR_OPERATION_AUTORELOCK] - - -class YaleBatterySensor[T: LockDetail | KeypadDetail]( - YaleDescriptionEntity, SensorEntity -): - """Representation of an Yale sensor.""" - - entity_description: YaleSensorEntityDescription[T] - _attr_device_class = SensorDeviceClass.BATTERY - _attr_native_unit_of_measurement = PERCENTAGE - - @callback - def _update_from_data(self) -> None: - """Get the latest state of the sensor.""" - self._attr_native_value = self.entity_description.value_fn(self._detail) - self._attr_available = self._attr_native_value is not None diff --git a/homeassistant/components/yale/strings.json b/homeassistant/components/yale/strings.json deleted file mode 100644 index 3fb1345a3b0..00000000000 --- a/homeassistant/components/yale/strings.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "config": { - "step": { - "pick_implementation": { - "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" - } - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", - "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", - "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", - "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", - "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", - "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", - "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", - "reauth_invalid_user": "Reauthenticate must use the same account." - }, - "create_entry": { - "default": "[%key:common::config_flow::create_entry::authenticated%]" - } - }, - "entity": { - "binary_sensor": { - "ding": { - "name": "Doorbell ding" - }, - "image_capture": { - "name": "Image capture" - } - }, - "button": { - "wake": { - "name": "Wake" - } - }, - "camera": { - "camera": { - "name": "[%key:component::camera::title%]" - } - }, - "sensor": { - "operator": { - "name": "Operator" - } - }, - "event": { - "doorbell": { - "state_attributes": { - "event_type": { - "state": { - "ring": "Ring" - } - } - } - }, - "motion": { - "state_attributes": { - "event_type": { - "state": { - "motion": "Motion" - } - } - } - } - } - } -} diff --git a/homeassistant/components/yale/util.py b/homeassistant/components/yale/util.py deleted file mode 100644 index 3462c576fd9..00000000000 --- a/homeassistant/components/yale/util.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Yale util functions.""" - -from __future__ import annotations - -from datetime import datetime, timedelta -from functools import partial - -import aiohttp -from yalexs.activity import ACTION_DOORBELL_CALL_MISSED, Activity, ActivityType -from yalexs.doorbell import DoorbellDetail -from yalexs.lock import LockDetail -from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL - -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import aiohttp_client - -from . import YaleData - -TIME_TO_DECLARE_DETECTION = timedelta(seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds()) - - -@callback -def async_create_yale_clientsession(hass: HomeAssistant) -> aiohttp.ClientSession: - """Create an aiohttp session for the yale integration.""" - # Create an aiohttp session instead of using the default one since the - # default one is likely to trigger yale's WAF if another integration - # is also using Cloudflare - return aiohttp_client.async_create_clientsession(hass) - - -def retrieve_time_based_activity( - activities: set[ActivityType], data: YaleData, detail: DoorbellDetail | LockDetail -) -> Activity | None: - """Get the latest state of the sensor.""" - stream = data.activity_stream - if latest := stream.get_latest_device_activity(detail.device_id, activities): - return _activity_time_based(latest) - return False - - -_RING_ACTIVITIES = {ActivityType.DOORBELL_DING} - - -def retrieve_ding_activity( - data: YaleData, detail: DoorbellDetail | LockDetail -) -> Activity | None: - """Get the ring/ding state.""" - stream = data.activity_stream - latest = stream.get_latest_device_activity(detail.device_id, _RING_ACTIVITIES) - if latest is None or ( - data.push_updates_connected and latest.action == ACTION_DOORBELL_CALL_MISSED - ): - return None - return _activity_time_based(latest) - - -retrieve_doorbell_motion_activity = partial( - retrieve_time_based_activity, {ActivityType.DOORBELL_MOTION} -) - - -def _activity_time_based(latest: Activity) -> Activity | None: - """Get the latest state of the sensor.""" - start = latest.activity_start_time - end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION - if start <= datetime.now() <= end: - return latest - return None - - -def retrieve_online_state(data: YaleData, detail: DoorbellDetail | LockDetail) -> bool: - """Get the latest state of the sensor.""" - # The doorbell will go into standby mode when there is no motion - # for a short while. It will wake by itself when needed so we need - # to consider is available or we will not report motion or dings - if isinstance(detail, DoorbellDetail): - return detail.is_online or detail.is_standby - return detail.bridge_is_online diff --git a/homeassistant/components/yale_home/manifest.json b/homeassistant/components/yale_home/manifest.json index c497fa3fe34..0e45b0da7d0 100644 --- a/homeassistant/components/yale_home/manifest.json +++ b/homeassistant/components/yale_home/manifest.json @@ -2,5 +2,5 @@ "domain": "yale_home", "name": "Yale Home", "integration_type": "virtual", - "supported_by": "yale" + "supported_by": "august" } diff --git a/homeassistant/components/yale_smart_alarm/__init__.py b/homeassistant/components/yale_smart_alarm/__init__.py index c543de89b84..1ef68d98a13 100644 --- a/homeassistant/components/yale_smart_alarm/__init__.py +++ b/homeassistant/components/yale_smart_alarm/__init__.py @@ -6,6 +6,7 @@ from homeassistant.components.lock import CONF_DEFAULT_CODE, DOMAIN as LOCK_DOMA from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_CODE from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import entity_registry as er from .const import LOGGER, PLATFORMS @@ -18,6 +19,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool """Set up Yale from a config entry.""" coordinator = YaleDataUpdateCoordinator(hass, entry) + if not await hass.async_add_executor_job(coordinator.get_updates): + raise ConfigEntryAuthFailed + await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator @@ -59,6 +63,6 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.config_entries.async_update_entry(entry, version=2) - LOGGER.debug("Migration to version %s successful", entry.version) + LOGGER.info("Migration to version %s successful", entry.version) return True diff --git a/homeassistant/components/yale_smart_alarm/alarm_control_panel.py b/homeassistant/components/yale_smart_alarm/alarm_control_panel.py index 0f5b7d0b8e5..2fc56a9e5dd 100644 --- a/homeassistant/components/yale_smart_alarm/alarm_control_panel.py +++ b/homeassistant/components/yale_smart_alarm/alarm_control_panel.py @@ -13,12 +13,12 @@ from yalesmartalarmclient.const import ( from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, - AlarmControlPanelState, ) from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from . import YaleConfigEntry from .const import DOMAIN, STATE_MAP, YALE_ALL_ERRORS @@ -106,6 +106,6 @@ class YaleAlarmDevice(YaleAlarmEntity, AlarmControlPanelEntity): return super().available @property - def alarm_state(self) -> AlarmControlPanelState | None: + def state(self) -> StateType: """Return the state of the alarm.""" return STATE_MAP.get(self.coordinator.data["alarm"]) diff --git a/homeassistant/components/yale_smart_alarm/binary_sensor.py b/homeassistant/components/yale_smart_alarm/binary_sensor.py index 8e68b1f0cb4..a1b94b907de 100644 --- a/homeassistant/components/yale_smart_alarm/binary_sensor.py +++ b/homeassistant/components/yale_smart_alarm/binary_sensor.py @@ -49,13 +49,9 @@ async def async_setup_entry( """Set up the Yale binary sensor entry.""" coordinator = entry.runtime_data - sensors: list[YaleDoorSensor | YaleDoorBatterySensor | YaleProblemSensor] = [ + sensors: list[YaleDoorSensor | YaleProblemSensor] = [ YaleDoorSensor(coordinator, data) for data in coordinator.data["door_windows"] ] - sensors.extend( - YaleDoorBatterySensor(coordinator, data) - for data in coordinator.data["door_windows"] - ) sensors.extend( YaleProblemSensor(coordinator, description) for description in SENSOR_TYPES ) @@ -74,27 +70,6 @@ class YaleDoorSensor(YaleEntity, BinarySensorEntity): return bool(self.coordinator.data["sensor_map"][self._attr_unique_id] == "open") -class YaleDoorBatterySensor(YaleEntity, BinarySensorEntity): - """Representation of a Yale door sensor battery status.""" - - _attr_device_class = BinarySensorDeviceClass.BATTERY - - def __init__( - self, - coordinator: YaleDataUpdateCoordinator, - data: dict, - ) -> None: - """Initiate Yale door battery Sensor.""" - super().__init__(coordinator, data) - self._attr_unique_id = f"{data["address"]}-battery" - - @property - def is_on(self) -> bool: - """Return true if the battery is low.""" - state: bool = self.coordinator.data["sensor_battery_map"][self._attr_unique_id] - return state - - class YaleProblemSensor(YaleAlarmEntity, BinarySensorEntity): """Representation of a Yale problem sensor.""" diff --git a/homeassistant/components/yale_smart_alarm/config_flow.py b/homeassistant/components/yale_smart_alarm/config_flow.py index c71b7b33a08..644160a8d93 100644 --- a/homeassistant/components/yale_smart_alarm/config_flow.py +++ b/homeassistant/components/yale_smart_alarm/config_flow.py @@ -23,8 +23,10 @@ from .const import ( CONF_AREA_ID, CONF_LOCK_CODE_DIGITS, DEFAULT_AREA_ID, + DEFAULT_LOCK_CODE_DIGITS, DEFAULT_NAME, DOMAIN, + LOGGER, YALE_BASE_ERRORS, ) @@ -38,67 +40,66 @@ DATA_SCHEMA = vol.Schema( DATA_SCHEMA_AUTH = vol.Schema( { + vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, } ) -OPTIONS_SCHEMA = vol.Schema( - { - vol.Optional( - CONF_LOCK_CODE_DIGITS, - ): int, - } -) - - -def validate_credentials(username: str, password: str) -> dict[str, Any]: - """Validate credentials.""" - errors: dict[str, str] = {} - try: - YaleSmartAlarmClient(username, password) - except AuthenticationError: - errors = {"base": "invalid_auth"} - except YALE_BASE_ERRORS: - errors = {"base": "cannot_connect"} - return errors - class YaleConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Yale integration.""" VERSION = 2 + entry: ConfigEntry | None + @staticmethod @callback def async_get_options_flow(config_entry: ConfigEntry) -> YaleOptionsFlowHandler: """Get the options flow for this handler.""" - return YaleOptionsFlowHandler() + return YaleOptionsFlowHandler(config_entry) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Yale.""" + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - errors: dict[str, str] = {} + errors = {} if user_input is not None: - reauth_entry = self._get_reauth_entry() - username = reauth_entry.data[CONF_USERNAME] + username = user_input[CONF_USERNAME] password = user_input[CONF_PASSWORD] - errors = await self.hass.async_add_executor_job( - validate_credentials, username, password - ) - if not errors: - return self.async_update_reload_and_abort( - reauth_entry, - data_updates={CONF_PASSWORD: password}, + try: + await self.hass.async_add_executor_job( + YaleSmartAlarmClient, username, password ) + except AuthenticationError as error: + LOGGER.error("Authentication failed. Check credentials %s", error) + errors = {"base": "invalid_auth"} + except YALE_BASE_ERRORS as error: + LOGGER.error("Connection to API failed %s", error) + errors = {"base": "cannot_connect"} + + if not errors: + existing_entry = await self.async_set_unique_id(username) + if existing_entry and self.entry: + self.hass.config_entries.async_update_entry( + existing_entry, + data={ + **self.entry.data, + CONF_USERNAME: username, + CONF_PASSWORD: password, + }, + ) + await self.hass.config_entries.async_reload(existing_entry.entry_id) + return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", @@ -106,42 +107,11 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_reconfigure( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle reconfiguration of existing entry.""" - errors: dict[str, str] = {} - - if user_input is not None: - reconfigure_entry = self._get_reconfigure_entry() - username = user_input[CONF_USERNAME] - - errors = await self.hass.async_add_executor_job( - validate_credentials, username, user_input[CONF_PASSWORD] - ) - if ( - username != reconfigure_entry.unique_id - and await self.async_set_unique_id(username) - ): - errors["base"] = "unique_id_exists" - if not errors: - return self.async_update_reload_and_abort( - reconfigure_entry, - unique_id=username, - data_updates=user_input, - ) - - return self.async_show_form( - step_id="reconfigure", - data_schema=DATA_SCHEMA, - errors=errors, - ) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - errors: dict[str, str] = {} + errors = {} if user_input is not None: username = user_input[CONF_USERNAME] @@ -149,9 +119,17 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): name = DEFAULT_NAME area = user_input.get(CONF_AREA_ID, DEFAULT_AREA_ID) - errors = await self.hass.async_add_executor_job( - validate_credentials, username, password - ) + try: + await self.hass.async_add_executor_job( + YaleSmartAlarmClient, username, password + ) + except AuthenticationError as error: + LOGGER.error("Authentication failed. Check credentials %s", error) + errors = {"base": "invalid_auth"} + except YALE_BASE_ERRORS as error: + LOGGER.error("Connection to API failed %s", error) + errors = {"base": "cannot_connect"} + if not errors: await self.async_set_unique_id(username) self._abort_if_unique_id_configured() @@ -176,18 +154,32 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): class YaleOptionsFlowHandler(OptionsFlow): """Handle Yale options.""" + def __init__(self, entry: ConfigEntry) -> None: + """Initialize Yale options flow.""" + self.entry = entry + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage Yale options.""" + errors: dict[str, Any] = {} - if user_input is not None: + if user_input: return self.async_create_entry(data=user_input) return self.async_show_form( step_id="init", - data_schema=self.add_suggested_values_to_schema( - OPTIONS_SCHEMA, - self.config_entry.options, + data_schema=vol.Schema( + { + vol.Optional( + CONF_LOCK_CODE_DIGITS, + description={ + "suggested_value": self.entry.options.get( + CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS + ) + }, + ): int, + } ), + errors=errors, ) diff --git a/homeassistant/components/yale_smart_alarm/const.py b/homeassistant/components/yale_smart_alarm/const.py index 14e31268ec9..e7b732c6cf9 100644 --- a/homeassistant/components/yale_smart_alarm/const.py +++ b/homeassistant/components/yale_smart_alarm/const.py @@ -9,8 +9,12 @@ from yalesmartalarmclient.client import ( ) from yalesmartalarmclient.exceptions import AuthenticationError, UnknownError -from homeassistant.components.alarm_control_panel import AlarmControlPanelState -from homeassistant.const import Platform +from homeassistant.const import ( + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_DISARMED, + Platform, +) CONF_AREA_ID = "area_id" CONF_LOCK_CODE_DIGITS = "lock_code_digits" @@ -35,15 +39,13 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, Platform.LOCK, - Platform.SELECT, Platform.SENSOR, - Platform.SWITCH, ] STATE_MAP = { - YALE_STATE_DISARM: AlarmControlPanelState.DISARMED, - YALE_STATE_ARM_PARTIAL: AlarmControlPanelState.ARMED_HOME, - YALE_STATE_ARM_FULL: AlarmControlPanelState.ARMED_AWAY, + YALE_STATE_DISARM: STATE_ALARM_DISARMED, + YALE_STATE_ARM_PARTIAL: STATE_ALARM_ARMED_HOME, + YALE_STATE_ARM_FULL: STATE_ALARM_ARMED_AWAY, } YALE_BASE_ERRORS = ( diff --git a/homeassistant/components/yale_smart_alarm/coordinator.py b/homeassistant/components/yale_smart_alarm/coordinator.py index 66bd71c9f1e..5307e166e17 100644 --- a/homeassistant/components/yale_smart_alarm/coordinator.py +++ b/homeassistant/components/yale_smart_alarm/coordinator.py @@ -3,9 +3,8 @@ from __future__ import annotations from datetime import timedelta -from typing import TYPE_CHECKING, Any +from typing import Any -from yalesmartalarmclient import YaleLock from yalesmartalarmclient.client import YaleSmartAlarmClient from yalesmartalarmclient.exceptions import AuthenticationError @@ -21,11 +20,10 @@ from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER, YALE_BASE_ERRORS class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """A Yale Data Update Coordinator.""" - yale: YaleSmartAlarmClient - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: """Initialize the Yale hub.""" self.entry = entry + self.yale: YaleSmartAlarmClient | None = None super().__init__( hass, LOGGER, @@ -33,36 +31,72 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), always_update=False, ) - self.locks: list[YaleLock] = [] - - async def _async_setup(self) -> None: - """Set up connection to Yale.""" - try: - self.yale = await self.hass.async_add_executor_job( - YaleSmartAlarmClient, - self.entry.data[CONF_USERNAME], - self.entry.data[CONF_PASSWORD], - ) - self.locks = await self.hass.async_add_executor_job(self.yale.get_locks) - except AuthenticationError as error: - raise ConfigEntryAuthFailed from error - except YALE_BASE_ERRORS as error: - raise UpdateFailed from error async def _async_update_data(self) -> dict[str, Any]: """Fetch data from Yale.""" updates = await self.hass.async_add_executor_job(self.get_updates) + locks = [] door_windows = [] temp_sensors = [] for device in updates["cycle"]["device_status"]: state = device["status1"] + if device["type"] == "device_type.door_lock": + lock_status_str = device["minigw_lock_status"] + lock_status = int(str(lock_status_str or 0), 16) + closed = (lock_status & 16) == 16 + locked = (lock_status & 1) == 1 + if not lock_status and "device_status.lock" in state: + device["_state"] = "locked" + device["_state2"] = "unknown" + locks.append(device) + continue + if not lock_status and "device_status.unlock" in state: + device["_state"] = "unlocked" + device["_state2"] = "unknown" + locks.append(device) + continue + if ( + lock_status + and ( + "device_status.lock" in state or "device_status.unlock" in state + ) + and closed + and locked + ): + device["_state"] = "locked" + device["_state2"] = "closed" + locks.append(device) + continue + if ( + lock_status + and ( + "device_status.lock" in state or "device_status.unlock" in state + ) + and closed + and not locked + ): + device["_state"] = "unlocked" + device["_state2"] = "closed" + locks.append(device) + continue + if ( + lock_status + and ( + "device_status.lock" in state or "device_status.unlock" in state + ) + and not closed + ): + device["_state"] = "unlocked" + device["_state2"] = "open" + locks.append(device) + continue + device["_state"] = "unavailable" + locks.append(device) + continue if device["type"] == "device_type.door_contact": - device["_battery"] = False - if "device_status.low_battery" in state: - device["_battery"] = True if "device_status.dc_close" in state: device["_state"] = "closed" door_windows.append(device) @@ -80,46 +114,48 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): _sensor_map = { contact["address"]: contact["_state"] for contact in door_windows } - _sensor_battery_map = { - f"{contact["address"]}-battery": contact["_battery"] - for contact in door_windows - } + _lock_map = {lock["address"]: lock["_state"] for lock in locks} _temp_map = {temp["address"]: temp["status_temp"] for temp in temp_sensors} return { "alarm": updates["arm_status"], + "locks": locks, "door_windows": door_windows, "temp_sensors": temp_sensors, "status": updates["status"], "online": updates["online"], "sensor_map": _sensor_map, - "sensor_battery_map": _sensor_battery_map, "temp_map": _temp_map, + "lock_map": _lock_map, "panel_info": updates["panel_info"], } def get_updates(self) -> dict[str, Any]: """Fetch data from Yale.""" + + if self.yale is None: + try: + self.yale = YaleSmartAlarmClient( + self.entry.data[CONF_USERNAME], self.entry.data[CONF_PASSWORD] + ) + except AuthenticationError as error: + raise ConfigEntryAuthFailed from error + except YALE_BASE_ERRORS as error: + raise UpdateFailed from error + try: arm_status = self.yale.get_armed_status() - data = self.yale.get_information() - if TYPE_CHECKING: - assert data.cycle - for device in data.cycle["data"]["device_status"]: - if device["type"] == YaleLock.DEVICE_TYPE: - for lock in self.locks: - if lock.name == device["name"]: - lock.update(device) + data = self.yale.get_all() + cycle = data["CYCLE"] + status = data["STATUS"] + online = data["ONLINE"] + panel_info = data["PANEL INFO"] + except AuthenticationError as error: raise ConfigEntryAuthFailed from error except YALE_BASE_ERRORS as error: raise UpdateFailed from error - cycle = data.cycle["data"] if data.cycle else None - status = data.status["data"] if data.status else None - online = data.online["data"] if data.online else None - panel_info = data.panel_info["data"] if data.panel_info else None - return { "arm_status": arm_status, "cycle": cycle, diff --git a/homeassistant/components/yale_smart_alarm/diagnostics.py b/homeassistant/components/yale_smart_alarm/diagnostics.py index eb7b2be9fb4..82d2ca9a915 100644 --- a/homeassistant/components/yale_smart_alarm/diagnostics.py +++ b/homeassistant/components/yale_smart_alarm/diagnostics.py @@ -2,7 +2,6 @@ from __future__ import annotations -from dataclasses import asdict from typing import Any from homeassistant.components.diagnostics import async_redact_data @@ -30,4 +29,4 @@ async def async_get_config_entry_diagnostics( assert coordinator.yale get_all_data = await hass.async_add_executor_job(coordinator.yale.get_all) - return async_redact_data(asdict(get_all_data), TO_REDACT) + return async_redact_data(get_all_data, TO_REDACT) diff --git a/homeassistant/components/yale_smart_alarm/entity.py b/homeassistant/components/yale_smart_alarm/entity.py index e37dc3562f5..179e20d509d 100644 --- a/homeassistant/components/yale_smart_alarm/entity.py +++ b/homeassistant/components/yale_smart_alarm/entity.py @@ -1,7 +1,5 @@ """Base class for yale_smart_alarm entity.""" -from yalesmartalarmclient import YaleLock - from homeassistant.const import CONF_NAME, CONF_USERNAME from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import Entity @@ -11,7 +9,7 @@ from .const import DOMAIN, MANUFACTURER, MODEL from .coordinator import YaleDataUpdateCoordinator -class YaleEntity(CoordinatorEntity[YaleDataUpdateCoordinator]): +class YaleEntity(CoordinatorEntity[YaleDataUpdateCoordinator], Entity): """Base implementation for Yale device.""" _attr_has_entity_name = True @@ -25,29 +23,10 @@ class YaleEntity(CoordinatorEntity[YaleDataUpdateCoordinator]): manufacturer=MANUFACTURER, model=MODEL, identifiers={(DOMAIN, data["address"])}, - via_device=(DOMAIN, coordinator.entry.data[CONF_USERNAME]), + via_device=(DOMAIN, self.coordinator.entry.data[CONF_USERNAME]), ) -class YaleLockEntity(CoordinatorEntity[YaleDataUpdateCoordinator]): - """Base implementation for Yale lock device.""" - - _attr_has_entity_name = True - - def __init__(self, coordinator: YaleDataUpdateCoordinator, lock: YaleLock) -> None: - """Initialize an Yale device.""" - super().__init__(coordinator) - self._attr_unique_id: str = lock.sid() - self._attr_device_info = DeviceInfo( - name=lock.name, - manufacturer=MANUFACTURER, - model=MODEL, - identifiers={(DOMAIN, lock.sid())}, - via_device=(DOMAIN, coordinator.entry.data[CONF_USERNAME]), - ) - self.lock_data = lock - - class YaleAlarmEntity(CoordinatorEntity[YaleDataUpdateCoordinator], Entity): """Base implementation for Yale Alarm device.""" diff --git a/homeassistant/components/yale_smart_alarm/icons.json b/homeassistant/components/yale_smart_alarm/icons.json index fb83ea88f97..4cb5888a406 100644 --- a/homeassistant/components/yale_smart_alarm/icons.json +++ b/homeassistant/components/yale_smart_alarm/icons.json @@ -4,16 +4,6 @@ "panic": { "default": "mdi:alarm-light" } - }, - "select": { - "volume": { - "default": "mdi:volume-high", - "state": { - "high": "mdi:volume-high", - "low": "mdi:volume-low", - "off": "mdi:volume-off" - } - } } } } diff --git a/homeassistant/components/yale_smart_alarm/lock.py b/homeassistant/components/yale_smart_alarm/lock.py index 243299658ed..3b4d0a19039 100644 --- a/homeassistant/components/yale_smart_alarm/lock.py +++ b/homeassistant/components/yale_smart_alarm/lock.py @@ -2,14 +2,12 @@ from __future__ import annotations -from typing import Any +from typing import TYPE_CHECKING, Any -from yalesmartalarmclient import YaleLock, YaleLockState - -from homeassistant.components.lock import LockEntity, LockState +from homeassistant.components.lock import LockEntity from homeassistant.const import ATTR_CODE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import YaleConfigEntry @@ -20,13 +18,7 @@ from .const import ( YALE_ALL_ERRORS, ) from .coordinator import YaleDataUpdateCoordinator -from .entity import YaleLockEntity - -LOCK_STATE_MAP = { - YaleLockState.LOCKED: LockState.LOCKED, - YaleLockState.UNLOCKED: LockState.UNLOCKED, - YaleLockState.DOOR_OPEN: LockState.OPEN, -} +from .entity import YaleEntity async def async_setup_entry( @@ -38,61 +30,63 @@ async def async_setup_entry( code_format = entry.options.get(CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS) async_add_entities( - YaleDoorlock(coordinator, lock, code_format) for lock in coordinator.locks + YaleDoorlock(coordinator, data, code_format) + for data in coordinator.data["locks"] ) -class YaleDoorlock(YaleLockEntity, LockEntity): +class YaleDoorlock(YaleEntity, LockEntity): """Representation of a Yale doorlock.""" _attr_name = None def __init__( - self, coordinator: YaleDataUpdateCoordinator, lock: YaleLock, code_format: int + self, coordinator: YaleDataUpdateCoordinator, data: dict, code_format: int ) -> None: """Initialize the Yale Lock Device.""" - super().__init__(coordinator, lock) + super().__init__(coordinator, data) self._attr_code_format = rf"^\d{{{code_format}}}$" + self.lock_name: str = data["name"] async def async_unlock(self, **kwargs: Any) -> None: """Send unlock command.""" code: str | None = kwargs.get(ATTR_CODE) - return await self.async_set_lock(YaleLockState.UNLOCKED, code) + return await self.async_set_lock("unlocked", code) async def async_lock(self, **kwargs: Any) -> None: """Send lock command.""" - return await self.async_set_lock(YaleLockState.LOCKED, None) + return await self.async_set_lock("locked", None) - async def async_set_lock(self, state: YaleLockState, code: str | None) -> None: + async def async_set_lock(self, command: str, code: str | None) -> None: """Set lock.""" - if state is YaleLockState.UNLOCKED and not code: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="no_code", - ) + if TYPE_CHECKING: + assert self.coordinator.yale, "Connection to API is missing" - lock_state = False try: - if state is YaleLockState.LOCKED: + get_lock = await self.hass.async_add_executor_job( + self.coordinator.yale.lock_api.get, self.lock_name + ) + if command == "locked": lock_state = await self.hass.async_add_executor_job( - self.lock_data.close + self.coordinator.yale.lock_api.close_lock, + get_lock, ) - if code and state is YaleLockState.UNLOCKED: + if command == "unlocked": lock_state = await self.hass.async_add_executor_job( - self.lock_data.open, code + self.coordinator.yale.lock_api.open_lock, get_lock, code ) except YALE_ALL_ERRORS as error: raise HomeAssistantError( translation_domain=DOMAIN, translation_key="set_lock", translation_placeholders={ - "name": self.lock_data.name, + "name": self.lock_name, "error": str(error), }, ) from error if lock_state: - self.lock_data.set_state(state) + self.coordinator.data["lock_map"][self._attr_unique_id] = command self.async_write_ha_state() return raise HomeAssistantError( @@ -103,9 +97,4 @@ class YaleDoorlock(YaleLockEntity, LockEntity): @property def is_locked(self) -> bool | None: """Return true if the lock is locked.""" - return LOCK_STATE_MAP.get(self.lock_data.state()) == LockState.LOCKED - - @property - def is_open(self) -> bool | None: - """Return true if the lock is open.""" - return LOCK_STATE_MAP.get(self.lock_data.state()) == LockState.OPEN + return bool(self.coordinator.data["lock_map"][self._attr_unique_id] == "locked") diff --git a/homeassistant/components/yale_smart_alarm/manifest.json b/homeassistant/components/yale_smart_alarm/manifest.json index 9a13cf72db9..ed494505bae 100644 --- a/homeassistant/components/yale_smart_alarm/manifest.json +++ b/homeassistant/components/yale_smart_alarm/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale_smart_alarm", "iot_class": "cloud_polling", "loggers": ["yalesmartalarmclient"], - "requirements": ["yalesmartalarmclient==0.4.3"] + "requirements": ["yalesmartalarmclient==0.3.9"] } diff --git a/homeassistant/components/yale_smart_alarm/select.py b/homeassistant/components/yale_smart_alarm/select.py deleted file mode 100644 index 55b56dd8e54..00000000000 --- a/homeassistant/components/yale_smart_alarm/select.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Select for Yale Alarm.""" - -from __future__ import annotations - -from yalesmartalarmclient import YaleLock, YaleLockVolume - -from homeassistant.components.select import SelectEntity -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import YaleConfigEntry -from .coordinator import YaleDataUpdateCoordinator -from .entity import YaleLockEntity - -VOLUME_OPTIONS = {value.name.lower(): str(value.value) for value in YaleLockVolume} - - -async def async_setup_entry( - hass: HomeAssistant, entry: YaleConfigEntry, async_add_entities: AddEntitiesCallback -) -> None: - """Set up the Yale select entry.""" - - coordinator = entry.runtime_data - - async_add_entities( - YaleLockVolumeSelect(coordinator, lock) - for lock in coordinator.locks - if lock.supports_lock_config() - ) - - -class YaleLockVolumeSelect(YaleLockEntity, SelectEntity): - """Representation of a Yale lock volume select.""" - - _attr_translation_key = "volume" - - def __init__(self, coordinator: YaleDataUpdateCoordinator, lock: YaleLock) -> None: - """Initialize the Yale volume select.""" - super().__init__(coordinator, lock) - self._attr_unique_id = f"{lock.sid()}-volume" - self._attr_current_option = self.lock_data.volume().name.lower() - self._attr_options = [volume.name.lower() for volume in YaleLockVolume] - - async def async_select_option(self, option: str) -> None: - """Change the selected option.""" - convert_to_value = VOLUME_OPTIONS[option] - option_enum = YaleLockVolume(convert_to_value) - if await self.hass.async_add_executor_job( - self.lock_data.set_volume, option_enum - ): - self._attr_current_option = self.lock_data.volume().name.lower() - self.async_write_ha_state() - - @callback - def _handle_coordinator_update(self) -> None: - """Handle updated data from the coordinator.""" - self._attr_current_option = self.lock_data.volume().name.lower() - super()._handle_coordinator_update() diff --git a/homeassistant/components/yale_smart_alarm/strings.json b/homeassistant/components/yale_smart_alarm/strings.json index 7f940e1139e..ce89c9e69ea 100644 --- a/homeassistant/components/yale_smart_alarm/strings.json +++ b/homeassistant/components/yale_smart_alarm/strings.json @@ -2,13 +2,11 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "unique_id_exists": "Another config entry with this username already exist" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "step": { "user": { @@ -20,14 +18,10 @@ } }, "reauth_confirm": { - "data": { - "password": "[%key:common::config_flow::data::password%]" - } - }, - "reconfigure": { "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", + "name": "[%key:common::config_flow::data::name%]", "area_id": "[%key:component::yale_smart_alarm::config::step::user::data::area_id%]" } } @@ -61,21 +55,6 @@ "panic": { "name": "Panic button" } - }, - "switch": { - "autolock": { - "name": "Autolock" - } - }, - "select": { - "volume": { - "name": "Volume", - "state": { - "high": "High", - "low": "Low", - "off": "[%key:common::state::off%]" - } - } } }, "exceptions": { @@ -88,9 +67,6 @@ "set_lock": { "message": "Could not set lock for {name}: {error}" }, - "no_code": { - "message": "Can not unlock without code" - }, "could_not_change_lock": { "message": "Could not set lock, check system ready for lock" }, diff --git a/homeassistant/components/yale_smart_alarm/switch.py b/homeassistant/components/yale_smart_alarm/switch.py deleted file mode 100644 index e8c0817c2de..00000000000 --- a/homeassistant/components/yale_smart_alarm/switch.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Switches for Yale Alarm.""" - -from __future__ import annotations - -from typing import Any - -from yalesmartalarmclient import YaleLock - -from homeassistant.components.switch import SwitchEntity -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import YaleConfigEntry -from .coordinator import YaleDataUpdateCoordinator -from .entity import YaleLockEntity - - -async def async_setup_entry( - hass: HomeAssistant, entry: YaleConfigEntry, async_add_entities: AddEntitiesCallback -) -> None: - """Set up the Yale switch entry.""" - - coordinator = entry.runtime_data - - async_add_entities( - YaleAutolockSwitch(coordinator, lock) - for lock in coordinator.locks - if lock.supports_lock_config() - ) - - -class YaleAutolockSwitch(YaleLockEntity, SwitchEntity): - """Representation of a Yale autolock switch.""" - - _attr_translation_key = "autolock" - - def __init__(self, coordinator: YaleDataUpdateCoordinator, lock: YaleLock) -> None: - """Initialize the Yale Autolock Switch.""" - super().__init__(coordinator, lock) - self._attr_unique_id = f"{lock.sid()}-autolock" - self._attr_is_on = self.lock_data.autolock() - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the entity on.""" - if await self.hass.async_add_executor_job(self.lock_data.set_autolock, True): - self._attr_is_on = True - self.async_write_ha_state() - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - if await self.hass.async_add_executor_job(self.lock_data.set_autolock, False): - self._attr_is_on = False - self.async_write_ha_state() - - @callback - def _handle_coordinator_update(self) -> None: - """Handle updated data from the coordinator.""" - self._attr_is_on = self.lock_data.autolock() - super()._handle_coordinator_update() diff --git a/homeassistant/components/yalexs_ble/config_flow.py b/homeassistant/components/yalexs_ble/config_flow.py index 6de74759686..c0df4e26821 100644 --- a/homeassistant/components/yalexs_ble/config_flow.py +++ b/homeassistant/components/yalexs_ble/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import logging -from typing import Any, Self +from typing import Any from bleak_retry_connector import BleakError, BLEDevice import voluptuous as vol @@ -68,16 +68,12 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _address: str | None = None - _local_name_is_unique = False - active = False - local_name: str | None = None - def __init__(self) -> None: """Initialize the config flow.""" self._discovery_info: BluetoothServiceInfoBleak | None = None self._discovered_devices: dict[str, BluetoothServiceInfoBleak] = {} self._lock_cfg: ValidatedLockConfig | None = None + self._reauth_entry: ConfigEntry | None = None async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfoBleak @@ -85,7 +81,7 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the bluetooth discovery step.""" await self.async_set_unique_id(discovery_info.address) self._abort_if_unique_id_configured() - self.local_name = discovery_info.name + self.context["local_name"] = discovery_info.name self._discovery_info = discovery_info self.context["title_placeholders"] = { "name": human_readable_name( @@ -107,8 +103,8 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): ) address = lock_cfg.address - self.local_name = lock_cfg.local_name - self._local_name_is_unique = local_name_is_unique(self.local_name) + local_name = lock_cfg.local_name + hass = self.hass # We do not want to raise on progress as integration_discovery takes # precedence over other discovery flows since we already have the keys. @@ -120,7 +116,7 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured(updates=new_data) for entry in self._async_current_entries(): if ( - self._local_name_is_unique + local_name_is_unique(lock_cfg.local_name) and entry.data.get(CONF_LOCAL_NAME) == lock_cfg.local_name ): return self.async_update_reload_and_abort( @@ -128,14 +124,27 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): ) self._discovery_info = async_find_existing_service_info( - self.hass, self.local_name, address + hass, local_name, address ) if not self._discovery_info: return self.async_abort(reason="no_devices_found") - self._address = address - if self.hass.config_entries.flow.async_has_matching_flow(self): - raise AbortFlow("already_in_progress") + # Integration discovery should abort other flows unless they + # are already in the process of being set up since this discovery + # will already have all the keys and the user can simply confirm. + for progress in self._async_in_progress(include_uninitialized=True): + context = progress["context"] + if ( + local_name_is_unique(local_name) + and context.get("local_name") == local_name + ) or context.get("unique_id") == address: + if context.get("active"): + # The user has already started interacting with this flow + # and entered the keys. We abort the discovery flow since + # we assume they do not want to use the discovered keys for + # some reason. + raise AbortFlow("already_in_progress") + hass.config_entries.flow.async_abort(progress["flow_id"]) self._lock_cfg = lock_cfg self.context["title_placeholders"] = { @@ -145,24 +154,6 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): } return await self.async_step_integration_discovery_confirm() - def is_matching(self, other_flow: Self) -> bool: - """Return True if other_flow is matching this flow.""" - # Integration discovery should abort other flows unless they - # are already in the process of being set up since this discovery - # will already have all the keys and the user can simply confirm. - if ( - self._local_name_is_unique and other_flow.local_name == self.local_name - ) or other_flow.unique_id == self._address: - if other_flow.active: - # The user has already started interacting with this flow - # and entered the keys. We abort the discovery flow since - # we assume they do not want to use the discovered keys for - # some reason. - return True - self.hass.config_entries.flow.async_abort(other_flow.flow_id) - - return False - async def async_step_integration_discovery_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -193,6 +184,9 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" + self._reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) return await self.async_step_reauth_validate() async def async_step_reauth_validate( @@ -200,7 +194,8 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle reauth and validation.""" errors = {} - reauth_entry = self._get_reauth_entry() + reauth_entry = self._reauth_entry + assert reauth_entry is not None if user_input is not None: if ( device := async_ble_device_from_address( @@ -217,7 +212,7 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): ) ): return self.async_update_reload_and_abort( - reauth_entry, data_updates=user_input + reauth_entry, data={**reauth_entry.data, **user_input} ) return self.async_show_form( @@ -239,7 +234,7 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - self.active = True + self.context["active"] = True address = user_input[CONF_ADDRESS] discovery_info = self._discovered_devices[address] local_name = discovery_info.name @@ -312,12 +307,16 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> YaleXSBLEOptionsFlowHandler: """Get the options flow for this handler.""" - return YaleXSBLEOptionsFlowHandler() + return YaleXSBLEOptionsFlowHandler(config_entry) class YaleXSBLEOptionsFlowHandler(OptionsFlow): """Handle YaleXSBLE options.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize YaleXSBLE options flow.""" + self.entry = config_entry + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -339,9 +338,7 @@ class YaleXSBLEOptionsFlowHandler(OptionsFlow): { vol.Optional( CONF_ALWAYS_CONNECTED, - default=self.config_entry.options.get( - CONF_ALWAYS_CONNECTED, False - ), + default=self.entry.options.get(CONF_ALWAYS_CONNECTED, False), ): bool, } ), diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index 1baeaeea63f..293ba87df86 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.0"] + "requirements": ["yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/yamaha/const.py b/homeassistant/components/yamaha/const.py index 1cdb619b6ef..c0f4e34dd50 100644 --- a/homeassistant/components/yamaha/const.py +++ b/homeassistant/components/yamaha/const.py @@ -1,8 +1,6 @@ """Constants for the Yamaha component.""" DOMAIN = "yamaha" -DISCOVER_TIMEOUT = 3 -KNOWN_ZONES = "known_zones" CURSOR_TYPE_DOWN = "down" CURSOR_TYPE_LEFT = "left" CURSOR_TYPE_RETURN = "return" diff --git a/homeassistant/components/yamaha/icons.json b/homeassistant/components/yamaha/icons.json index 40eceda3b3e..f7075508b0d 100644 --- a/homeassistant/components/yamaha/icons.json +++ b/homeassistant/components/yamaha/icons.json @@ -1,13 +1,7 @@ { "services": { - "enable_output": { - "service": "mdi:audio-input-stereo-minijack" - }, - "menu_cursor": { - "service": "mdi:cursor-default" - }, - "select_scene": { - "service": "mdi:palette" - } + "enable_output": "mdi:audio-input-stereo-minijack", + "menu_cursor": "mdi:cursor-default", + "select_scene": "mdi:palette" } } diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index c16433b3c37..1be7cb03e17 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -7,7 +7,6 @@ from typing import Any import requests import rxv -from rxv import RXV import voluptuous as vol from homeassistant.components.media_player import ( @@ -19,7 +18,6 @@ from homeassistant.components.media_player import ( ) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -31,9 +29,6 @@ from .const import ( CURSOR_TYPE_RIGHT, CURSOR_TYPE_SELECT, CURSOR_TYPE_UP, - DISCOVER_TIMEOUT, - DOMAIN, - KNOWN_ZONES, SERVICE_ENABLE_OUTPUT, SERVICE_MENU_CURSOR, SERVICE_SELECT_SCENE, @@ -60,6 +55,7 @@ CURSOR_TYPE_MAP = { CURSOR_TYPE_SELECT: rxv.RXV.menu_sel.__name__, CURSOR_TYPE_UP: rxv.RXV.menu_up.__name__, } +DATA_YAMAHA = "yamaha_known_receivers" DEFAULT_NAME = "Yamaha Receiver" SUPPORT_YAMAHA = ( @@ -103,7 +99,6 @@ class YamahaConfigInfo: self.zone_ignore = config.get(CONF_ZONE_IGNORE) self.zone_names = config.get(CONF_ZONE_NAMES) self.from_discovery = False - _LOGGER.debug("Discovery Info: %s", discovery_info) if discovery_info is not None: self.name = discovery_info.get("name") self.model = discovery_info.get("model_name") @@ -113,27 +108,24 @@ class YamahaConfigInfo: self.from_discovery = True -def _discovery(config_info: YamahaConfigInfo) -> list[RXV]: - """Discover list of zone controllers from configuration in the network.""" +def _discovery(config_info): + """Discover receivers from configuration in the network.""" if config_info.from_discovery: - _LOGGER.debug("Discovery Zones") - zones = rxv.RXV( + receivers = rxv.RXV( config_info.ctrl_url, model_name=config_info.model, friendly_name=config_info.name, unit_desc_url=config_info.desc_url, ).zone_controllers() + _LOGGER.debug("Receivers: %s", receivers) elif config_info.host is None: - _LOGGER.debug("Config No Host Supplied Zones") - zones = [] - for recv in rxv.find(DISCOVER_TIMEOUT): - zones.extend(recv.zone_controllers()) + receivers = [] + for recv in rxv.find(): + receivers.extend(recv.zone_controllers()) else: - _LOGGER.debug("Config Zones") - zones = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() + receivers = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() - _LOGGER.debug("Returned _discover zones: %s", zones) - return zones + return receivers async def async_setup_platform( @@ -146,28 +138,21 @@ async def async_setup_platform( # Keep track of configured receivers so that we don't end up # discovering a receiver dynamically that we have static config # for. Map each device from its zone_id . - known_zones = hass.data.setdefault(DOMAIN, {KNOWN_ZONES: set()})[KNOWN_ZONES] - _LOGGER.debug("Known receiver zones: %s", known_zones) + known_zones = hass.data.setdefault(DATA_YAMAHA, set()) # Get the Infos for configuration from config (YAML) or Discovery config_info = YamahaConfigInfo(config=config, discovery_info=discovery_info) # Async check if the Receivers are there in the network - try: - zone_ctrls = await hass.async_add_executor_job(_discovery, config_info) - except requests.exceptions.ConnectionError as ex: - raise PlatformNotReady(f"Issue while connecting to {config_info.name}") from ex + receivers = await hass.async_add_executor_job(_discovery, config_info) entities = [] - for zctrl in zone_ctrls: - _LOGGER.debug("Receiver zone: %s serial %s", zctrl.zone, zctrl.serial_number) - if config_info.zone_ignore and zctrl.zone in config_info.zone_ignore: - _LOGGER.debug("Ignore receiver zone: %s %s", config_info.name, zctrl.zone) + for receiver in receivers: + if config_info.zone_ignore and receiver.zone in config_info.zone_ignore: continue - assert config_info.name - entity = YamahaDeviceZone( + entity = YamahaDevice( config_info.name, - zctrl, + receiver, config_info.source_ignore, config_info.source_names, config_info.zone_names, @@ -178,9 +163,7 @@ async def async_setup_platform( known_zones.add(entity.zone_id) entities.append(entity) else: - _LOGGER.debug( - "Ignoring duplicate zone: %s %s", config_info.name, zctrl.zone - ) + _LOGGER.debug("Ignoring duplicate receiver: %s", config_info.name) async_add_entities(entities) @@ -201,53 +184,45 @@ async def async_setup_platform( platform.async_register_entity_service( SERVICE_MENU_CURSOR, {vol.Required(ATTR_CURSOR): vol.In(CURSOR_TYPE_MAP)}, - YamahaDeviceZone.menu_cursor.__name__, + YamahaDevice.menu_cursor.__name__, ) -class YamahaDeviceZone(MediaPlayerEntity): - """Representation of a Yamaha device zone.""" +class YamahaDevice(MediaPlayerEntity): + """Representation of a Yamaha device.""" - _reverse_mapping: dict[str, str] - - def __init__( - self, - name: str, - zctrl: RXV, - source_ignore: list[str] | None, - source_names: dict[str, str] | None, - zone_names: dict[str, str] | None, - ) -> None: + def __init__(self, name, receiver, source_ignore, source_names, zone_names): """Initialize the Yamaha Receiver.""" - self.zctrl = zctrl + self.receiver = receiver self._attr_is_volume_muted = False self._attr_volume_level = 0 self._attr_state = MediaPlayerState.OFF - self._source_ignore: list[str] = source_ignore or [] - self._source_names: dict[str, str] = source_names or {} - self._zone_names: dict[str, str] = zone_names or {} + self._source_ignore = source_ignore or [] + self._source_names = source_names or {} + self._zone_names = zone_names or {} + self._reverse_mapping = None self._playback_support = None self._is_playback_supported = False self._play_status = None self._name = name - self._zone = zctrl.zone - if self.zctrl.serial_number is not None: + self._zone = receiver.zone + if self.receiver.serial_number is not None: # Since not all receivers will have a serial number and set a unique id # the default name of the integration may not be changed # to avoid a breaking change. - self._attr_unique_id = f"{self.zctrl.serial_number}_{self._zone}" + self._attr_unique_id = f"{self.receiver.serial_number}_{self._zone}" def update(self) -> None: """Get the latest details from the device.""" try: - self._play_status = self.zctrl.play_status() + self._play_status = self.receiver.play_status() except requests.exceptions.ConnectionError: - _LOGGER.debug("Receiver is offline: %s", self._name) + _LOGGER.info("Receiver is offline: %s", self._name) self._attr_available = False return self._attr_available = True - if self.zctrl.on: + if self.receiver.on: if self._play_status is None: self._attr_state = MediaPlayerState.ON elif self._play_status.playing: @@ -257,27 +232,27 @@ class YamahaDeviceZone(MediaPlayerEntity): else: self._attr_state = MediaPlayerState.OFF - self._attr_is_volume_muted = self.zctrl.mute - self._attr_volume_level = (self.zctrl.volume / 100) + 1 + self._attr_is_volume_muted = self.receiver.mute + self._attr_volume_level = (self.receiver.volume / 100) + 1 if self.source_list is None: self.build_source_list() - current_source = self.zctrl.input + current_source = self.receiver.input self._attr_source = self._source_names.get(current_source, current_source) - self._playback_support = self.zctrl.get_playback_support() - self._is_playback_supported = self.zctrl.is_playback_supported( + self._playback_support = self.receiver.get_playback_support() + self._is_playback_supported = self.receiver.is_playback_supported( self._attr_source ) - surround_programs = self.zctrl.surround_programs() + surround_programs = self.receiver.surround_programs() if surround_programs: - self._attr_sound_mode = self.zctrl.surround_program + self._attr_sound_mode = self.receiver.surround_program self._attr_sound_mode_list = surround_programs else: self._attr_sound_mode = None self._attr_sound_mode_list = None - def build_source_list(self) -> None: + def build_source_list(self): """Build the source list.""" self._reverse_mapping = { alias: source for source, alias in self._source_names.items() @@ -285,12 +260,12 @@ class YamahaDeviceZone(MediaPlayerEntity): self._attr_source_list = sorted( self._source_names.get(source, source) - for source in self.zctrl.inputs() + for source in self.receiver.inputs() if source not in self._source_ignore ) @property - def name(self) -> str: + def name(self): """Return the name of the device.""" name = self._name zone_name = self._zone_names.get(self._zone, self._zone) @@ -300,9 +275,9 @@ class YamahaDeviceZone(MediaPlayerEntity): return name @property - def zone_id(self) -> str: + def zone_id(self): """Return a zone_id to ensure 1 media player per zone.""" - return f"{self.zctrl.ctrl_url}:{self._zone}" + return f"{self.receiver.ctrl_url}:{self._zone}" @property def supported_features(self) -> MediaPlayerEntityFeature: @@ -326,42 +301,42 @@ class YamahaDeviceZone(MediaPlayerEntity): def turn_off(self) -> None: """Turn off media player.""" - self.zctrl.on = False + self.receiver.on = False def set_volume_level(self, volume: float) -> None: """Set volume level, range 0..1.""" - zone_vol = 100 - (volume * 100) - negative_zone_vol = -zone_vol - self.zctrl.volume = negative_zone_vol + receiver_vol = 100 - (volume * 100) + negative_receiver_vol = -receiver_vol + self.receiver.volume = negative_receiver_vol def mute_volume(self, mute: bool) -> None: """Mute (true) or unmute (false) media player.""" - self.zctrl.mute = mute + self.receiver.mute = mute def turn_on(self) -> None: """Turn the media player on.""" - self.zctrl.on = True - self._attr_volume_level = (self.zctrl.volume / 100) + 1 + self.receiver.on = True + self._attr_volume_level = (self.receiver.volume / 100) + 1 def media_play(self) -> None: """Send play command.""" - self._call_playback_function(self.zctrl.play, "play") + self._call_playback_function(self.receiver.play, "play") def media_pause(self) -> None: """Send pause command.""" - self._call_playback_function(self.zctrl.pause, "pause") + self._call_playback_function(self.receiver.pause, "pause") def media_stop(self) -> None: """Send stop command.""" - self._call_playback_function(self.zctrl.stop, "stop") + self._call_playback_function(self.receiver.stop, "stop") def media_previous_track(self) -> None: """Send previous track command.""" - self._call_playback_function(self.zctrl.previous, "previous track") + self._call_playback_function(self.receiver.previous, "previous track") def media_next_track(self) -> None: """Send next track command.""" - self._call_playback_function(self.zctrl.next, "next track") + self._call_playback_function(self.receiver.next, "next track") def _call_playback_function(self, function, function_text): try: @@ -371,7 +346,7 @@ class YamahaDeviceZone(MediaPlayerEntity): def select_source(self, source: str) -> None: """Select input source.""" - self.zctrl.input = self._reverse_mapping.get(source, source) + self.receiver.input = self._reverse_mapping.get(source, source) def play_media( self, media_type: MediaType | str, media_id: str, **kwargs: Any @@ -395,43 +370,41 @@ class YamahaDeviceZone(MediaPlayerEntity): menu must be fetched by the receiver from the vtuner service. """ if media_type == "NET RADIO": - self.zctrl.net_radio(media_id) + self.receiver.net_radio(media_id) - def enable_output(self, port: str, enabled: bool) -> None: + def enable_output(self, port, enabled): """Enable or disable an output port..""" - self.zctrl.enable_output(port, enabled) + self.receiver.enable_output(port, enabled) - def menu_cursor(self, cursor: str) -> None: + def menu_cursor(self, cursor): """Press a menu cursor button.""" - getattr(self.zctrl, CURSOR_TYPE_MAP[cursor])() + getattr(self.receiver, CURSOR_TYPE_MAP[cursor])() - def set_scene(self, scene: str) -> None: + def set_scene(self, scene): """Set the current scene.""" try: - self.zctrl.scene = scene + self.receiver.scene = scene except AssertionError: _LOGGER.warning("Scene '%s' does not exist!", scene) def select_sound_mode(self, sound_mode: str) -> None: """Set Sound Mode for Receiver..""" - self.zctrl.surround_program = sound_mode + self.receiver.surround_program = sound_mode @property - def media_artist(self) -> str | None: + def media_artist(self): """Artist of current playing media.""" if self._play_status is not None: return self._play_status.artist - return None @property - def media_album_name(self) -> str | None: + def media_album_name(self): """Album of current playing media.""" if self._play_status is not None: return self._play_status.album - return None @property - def media_content_type(self) -> MediaType | None: + def media_content_type(self): """Content type of current playing media.""" # Loose assumption that if playback is supported, we are playing music if self._is_playback_supported: @@ -439,7 +412,7 @@ class YamahaDeviceZone(MediaPlayerEntity): return None @property - def media_title(self) -> str | None: + def media_title(self): """Artist of current playing media.""" if self._play_status is not None: song = self._play_status.song @@ -451,4 +424,3 @@ class YamahaDeviceZone(MediaPlayerEntity): return f"{station}: {song}" return song or station - return None diff --git a/homeassistant/components/yamaha_musiccast/__init__.py b/homeassistant/components/yamaha_musiccast/__init__.py index a2ce98dde56..667b411e6c4 100644 --- a/homeassistant/components/yamaha_musiccast/__init__.py +++ b/homeassistant/components/yamaha_musiccast/__init__.py @@ -2,22 +2,42 @@ from __future__ import annotations +from datetime import timedelta import logging -from aiomusiccast.musiccast_device import MusicCastDevice +from aiomusiccast import MusicCastConnectionException +from aiomusiccast.capabilities import Capability +from aiomusiccast.musiccast_device import MusicCastData, MusicCastDevice from homeassistant.components import ssdp from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, Platform +from homeassistant.const import ATTR_CONNECTIONS, ATTR_VIA_DEVICE, CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import ( + CONNECTION_NETWORK_MAC, + DeviceInfo, + format_mac, +) +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, + UpdateFailed, +) -from .const import CONF_SERIAL, CONF_UPNP_DESC, DOMAIN -from .coordinator import MusicCastDataUpdateCoordinator +from .const import ( + BRAND, + CONF_SERIAL, + CONF_UPNP_DESC, + DEFAULT_ZONE, + DOMAIN, + ENTITY_CATEGORY_MAPPING, +) PLATFORMS = [Platform.MEDIA_PLAYER, Platform.NUMBER, Platform.SELECT, Platform.SWITCH] _LOGGER = logging.getLogger(__name__) +SCAN_INTERVAL = timedelta(seconds=60) async def get_upnp_desc(hass: HomeAssistant, host: str): @@ -83,3 +103,118 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Reload config entry.""" await hass.config_entries.async_reload(entry.entry_id) + + +class MusicCastDataUpdateCoordinator(DataUpdateCoordinator[MusicCastData]): # pylint: disable=hass-enforce-coordinator-module + """Class to manage fetching data from the API.""" + + def __init__(self, hass: HomeAssistant, client: MusicCastDevice) -> None: + """Initialize.""" + self.musiccast = client + + super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) + self.entities: list[MusicCastDeviceEntity] = [] + + async def _async_update_data(self) -> MusicCastData: + """Update data via library.""" + try: + await self.musiccast.fetch() + except MusicCastConnectionException as exception: + raise UpdateFailed from exception + return self.musiccast.data + + +class MusicCastEntity(CoordinatorEntity[MusicCastDataUpdateCoordinator]): + """Defines a base MusicCast entity.""" + + def __init__( + self, + *, + name: str, + icon: str, + coordinator: MusicCastDataUpdateCoordinator, + enabled_default: bool = True, + ) -> None: + """Initialize the MusicCast entity.""" + super().__init__(coordinator) + self._attr_entity_registry_enabled_default = enabled_default + self._attr_icon = icon + self._attr_name = name + + +class MusicCastDeviceEntity(MusicCastEntity): + """Defines a MusicCast device entity.""" + + _zone_id: str = DEFAULT_ZONE + + @property + def device_id(self): + """Return the ID of the current device.""" + if self._zone_id == DEFAULT_ZONE: + return self.coordinator.data.device_id + return f"{self.coordinator.data.device_id}_{self._zone_id}" + + @property + def device_name(self): + """Return the name of the current device.""" + return self.coordinator.data.zones[self._zone_id].name + + @property + def device_info(self) -> DeviceInfo: + """Return device information about this MusicCast device.""" + + device_info = DeviceInfo( + name=self.device_name, + identifiers={ + ( + DOMAIN, + self.device_id, + ) + }, + manufacturer=BRAND, + model=self.coordinator.data.model_name, + sw_version=self.coordinator.data.system_version, + ) + + if self._zone_id == DEFAULT_ZONE: + device_info[ATTR_CONNECTIONS] = { + (CONNECTION_NETWORK_MAC, format_mac(mac)) + for mac in self.coordinator.data.mac_addresses.values() + } + else: + device_info[ATTR_VIA_DEVICE] = (DOMAIN, self.coordinator.data.device_id) + + return device_info + + async def async_added_to_hass(self): + """Run when this Entity has been added to HA.""" + await super().async_added_to_hass() + # All entities should register callbacks to update HA when their state changes + self.coordinator.musiccast.register_callback(self.async_write_ha_state) + + async def async_will_remove_from_hass(self): + """Entity being removed from hass.""" + await super().async_will_remove_from_hass() + self.coordinator.musiccast.remove_callback(self.async_write_ha_state) + + +class MusicCastCapabilityEntity(MusicCastDeviceEntity): + """Base Entity type for all capabilities.""" + + def __init__( + self, + coordinator: MusicCastDataUpdateCoordinator, + capability: Capability, + zone_id: str | None = None, + ) -> None: + """Initialize a capability based entity.""" + if zone_id is not None: + self._zone_id = zone_id + self.capability = capability + super().__init__(name=capability.name, icon="", coordinator=coordinator) + self._attr_entity_category = ENTITY_CATEGORY_MAPPING.get(capability.entity_type) + + @property + def unique_id(self) -> str: + """Return the unique ID for this entity.""" + return f"{self.device_id}_{self.capability.id}" diff --git a/homeassistant/components/yamaha_musiccast/coordinator.py b/homeassistant/components/yamaha_musiccast/coordinator.py deleted file mode 100644 index d5e0c67310a..00000000000 --- a/homeassistant/components/yamaha_musiccast/coordinator.py +++ /dev/null @@ -1,41 +0,0 @@ -"""The MusicCast integration.""" - -from __future__ import annotations - -from datetime import timedelta -import logging -from typing import TYPE_CHECKING - -from aiomusiccast import MusicCastConnectionException -from aiomusiccast.musiccast_device import MusicCastData, MusicCastDevice - -from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed - -from .const import DOMAIN - -if TYPE_CHECKING: - from .entity import MusicCastDeviceEntity - -_LOGGER = logging.getLogger(__name__) - -SCAN_INTERVAL = timedelta(seconds=60) - - -class MusicCastDataUpdateCoordinator(DataUpdateCoordinator[MusicCastData]): - """Class to manage fetching data from the API.""" - - def __init__(self, hass: HomeAssistant, client: MusicCastDevice) -> None: - """Initialize.""" - self.musiccast = client - - super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) - self.entities: list[MusicCastDeviceEntity] = [] - - async def _async_update_data(self) -> MusicCastData: - """Update data via library.""" - try: - await self.musiccast.fetch() - except MusicCastConnectionException as exception: - raise UpdateFailed from exception - return self.musiccast.data diff --git a/homeassistant/components/yamaha_musiccast/entity.py b/homeassistant/components/yamaha_musiccast/entity.py deleted file mode 100644 index 4f1add825e4..00000000000 --- a/homeassistant/components/yamaha_musiccast/entity.py +++ /dev/null @@ -1,112 +0,0 @@ -"""The MusicCast integration.""" - -from __future__ import annotations - -from aiomusiccast.capabilities import Capability - -from homeassistant.const import ATTR_CONNECTIONS, ATTR_VIA_DEVICE -from homeassistant.helpers.device_registry import ( - CONNECTION_NETWORK_MAC, - DeviceInfo, - format_mac, -) -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import BRAND, DEFAULT_ZONE, DOMAIN, ENTITY_CATEGORY_MAPPING -from .coordinator import MusicCastDataUpdateCoordinator - - -class MusicCastEntity(CoordinatorEntity[MusicCastDataUpdateCoordinator]): - """Defines a base MusicCast entity.""" - - def __init__( - self, - *, - name: str, - icon: str, - coordinator: MusicCastDataUpdateCoordinator, - enabled_default: bool = True, - ) -> None: - """Initialize the MusicCast entity.""" - super().__init__(coordinator) - self._attr_entity_registry_enabled_default = enabled_default - self._attr_icon = icon - self._attr_name = name - - -class MusicCastDeviceEntity(MusicCastEntity): - """Defines a MusicCast device entity.""" - - _zone_id: str = DEFAULT_ZONE - - @property - def device_id(self): - """Return the ID of the current device.""" - if self._zone_id == DEFAULT_ZONE: - return self.coordinator.data.device_id - return f"{self.coordinator.data.device_id}_{self._zone_id}" - - @property - def device_name(self): - """Return the name of the current device.""" - return self.coordinator.data.zones[self._zone_id].name - - @property - def device_info(self) -> DeviceInfo: - """Return device information about this MusicCast device.""" - - device_info = DeviceInfo( - name=self.device_name, - identifiers={ - ( - DOMAIN, - self.device_id, - ) - }, - manufacturer=BRAND, - model=self.coordinator.data.model_name, - sw_version=self.coordinator.data.system_version, - ) - - if self._zone_id == DEFAULT_ZONE: - device_info[ATTR_CONNECTIONS] = { - (CONNECTION_NETWORK_MAC, format_mac(mac)) - for mac in self.coordinator.data.mac_addresses.values() - } - else: - device_info[ATTR_VIA_DEVICE] = (DOMAIN, self.coordinator.data.device_id) - - return device_info - - async def async_added_to_hass(self): - """Run when this Entity has been added to HA.""" - await super().async_added_to_hass() - # All entities should register callbacks to update HA when their state changes - self.coordinator.musiccast.register_callback(self.async_write_ha_state) - - async def async_will_remove_from_hass(self): - """Entity being removed from hass.""" - await super().async_will_remove_from_hass() - self.coordinator.musiccast.remove_callback(self.async_write_ha_state) - - -class MusicCastCapabilityEntity(MusicCastDeviceEntity): - """Base Entity type for all capabilities.""" - - def __init__( - self, - coordinator: MusicCastDataUpdateCoordinator, - capability: Capability, - zone_id: str | None = None, - ) -> None: - """Initialize a capability based entity.""" - if zone_id is not None: - self._zone_id = zone_id - self.capability = capability - super().__init__(name=capability.name, icon="", coordinator=coordinator) - self._attr_entity_category = ENTITY_CATEGORY_MAPPING.get(capability.entity_type) - - @property - def unique_id(self) -> str: - """Return the unique ID for this entity.""" - return f"{self.device_id}_{self.capability.id}" diff --git a/homeassistant/components/yamaha_musiccast/media_player.py b/homeassistant/components/yamaha_musiccast/media_player.py index 4384cc34836..a068ac6ddca 100644 --- a/homeassistant/components/yamaha_musiccast/media_player.py +++ b/homeassistant/components/yamaha_musiccast/media_player.py @@ -27,6 +27,7 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import uuid +from . import MusicCastDataUpdateCoordinator, MusicCastDeviceEntity from .const import ( ATTR_MAIN_SYNC, ATTR_MC_LINK, @@ -37,8 +38,6 @@ from .const import ( MEDIA_CLASS_MAPPING, NULL_GROUP, ) -from .coordinator import MusicCastDataUpdateCoordinator -from .entity import MusicCastDeviceEntity _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/yamaha_musiccast/number.py b/homeassistant/components/yamaha_musiccast/number.py index 02dd6720d91..a5a591379c6 100644 --- a/homeassistant/components/yamaha_musiccast/number.py +++ b/homeassistant/components/yamaha_musiccast/number.py @@ -9,9 +9,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import MusicCastDataUpdateCoordinator -from .entity import MusicCastCapabilityEntity +from . import DOMAIN, MusicCastCapabilityEntity, MusicCastDataUpdateCoordinator async def async_setup_entry( diff --git a/homeassistant/components/yamaha_musiccast/select.py b/homeassistant/components/yamaha_musiccast/select.py index 3a4649b9ae5..b068b956e1b 100644 --- a/homeassistant/components/yamaha_musiccast/select.py +++ b/homeassistant/components/yamaha_musiccast/select.py @@ -9,9 +9,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, TRANSLATION_KEY_MAPPING -from .coordinator import MusicCastDataUpdateCoordinator -from .entity import MusicCastCapabilityEntity +from . import DOMAIN, MusicCastCapabilityEntity, MusicCastDataUpdateCoordinator +from .const import TRANSLATION_KEY_MAPPING async def async_setup_entry( diff --git a/homeassistant/components/yamaha_musiccast/strings.json b/homeassistant/components/yamaha_musiccast/strings.json index eaa5ac50c80..d0ee6c030a6 100644 --- a/homeassistant/components/yamaha_musiccast/strings.json +++ b/homeassistant/components/yamaha_musiccast/strings.json @@ -20,9 +20,7 @@ "yxc_control_url_missing": "The control URL is not given in the ssdp description." }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "no_musiccast_device": "This device seems to be no MusicCast Device.", - "unknown": "[%key:common::config_flow::error::unknown%]" + "no_musiccast_device": "This device seems to be no MusicCast Device." } }, "entity": { diff --git a/homeassistant/components/yamaha_musiccast/switch.py b/homeassistant/components/yamaha_musiccast/switch.py index 49d031a02b5..2ae8388027a 100644 --- a/homeassistant/components/yamaha_musiccast/switch.py +++ b/homeassistant/components/yamaha_musiccast/switch.py @@ -9,9 +9,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import MusicCastDataUpdateCoordinator -from .entity import MusicCastCapabilityEntity +from . import DOMAIN, MusicCastCapabilityEntity, MusicCastDataUpdateCoordinator async def async_setup_entry( diff --git a/homeassistant/components/yandex_transport/manifest.json b/homeassistant/components/yandex_transport/manifest.json index 1d1219d5a95..703f81d2823 100644 --- a/homeassistant/components/yandex_transport/manifest.json +++ b/homeassistant/components/yandex_transport/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@rishatik92", "@devbis"], "documentation": "https://www.home-assistant.io/integrations/yandex_transport", "iot_class": "cloud_polling", - "requirements": ["aioymaps==1.2.5"] + "requirements": ["aioymaps==1.2.2"] } diff --git a/homeassistant/components/yandex_transport/sensor.py b/homeassistant/components/yandex_transport/sensor.py index 95c4785a341..30227e3261e 100644 --- a/homeassistant/components/yandex_transport/sensor.py +++ b/homeassistant/components/yandex_transport/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import timedelta import logging -from aioymaps import CaptchaError, NoSessionError, YandexMapsRequester +from aioymaps import CaptchaError, YandexMapsRequester import voluptuous as vol from homeassistant.components.sensor import ( @@ -88,7 +88,7 @@ class DiscoverYandexTransport(SensorEntity): closer_time = None try: yandex_reply = await self.requester.get_stop_info(self._stop_id) - except (CaptchaError, NoSessionError) as ex: + except CaptchaError as ex: _LOGGER.error( "%s. You may need to disable the integration for some time", ex, diff --git a/homeassistant/components/yardian/icons.json b/homeassistant/components/yardian/icons.json index 4ca3d83bd15..79bcc32adf2 100644 --- a/homeassistant/components/yardian/icons.json +++ b/homeassistant/components/yardian/icons.json @@ -7,8 +7,6 @@ } }, "services": { - "start_irrigation": { - "service": "mdi:water" - } + "start_irrigation": "mdi:water" } } diff --git a/homeassistant/components/yeelight/config_flow.py b/homeassistant/components/yeelight/config_flow.py index 7a3a0a2f100..d7bf4e25996 100644 --- a/homeassistant/components/yeelight/config_flow.py +++ b/homeassistant/components/yeelight/config_flow.py @@ -3,7 +3,6 @@ from __future__ import annotations import logging -from typing import Any, Self from urllib.parse import urlparse import voluptuous as vol @@ -23,7 +22,6 @@ from homeassistant.const import CONF_DEVICE, CONF_HOST, CONF_ID, CONF_MODEL, CON from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import VolDictType from .const import ( CONF_DETECTED_MODEL, @@ -53,20 +51,17 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _discovered_ip: str = "" - _discovered_model: str - @staticmethod @callback - def async_get_options_flow( - config_entry: ConfigEntry, - ) -> OptionsFlowHandler: + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Return the options flow.""" - return OptionsFlowHandler() + return OptionsFlowHandler(config_entry) - def __init__(self) -> None: + def __init__(self): """Initialize the config flow.""" - self._discovered_devices: dict[str, Any] = {} + self._discovered_devices = {} + self._discovered_model = None + self._discovered_ip = None async def async_step_homekit( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -87,7 +82,9 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle discovery from zeroconf.""" self._discovered_ip = discovery_info.host - await self.async_set_unique_id(f"{int(discovery_info.name[-26:-18]):#018x}") + await self.async_set_unique_id( + "{0:#0{1}x}".format(int(discovery_info.name[-26:-18]), 18) + ) return await self._async_handle_discovery_with_unique_id() async def async_step_ssdp( @@ -98,7 +95,7 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(discovery_info.ssdp_headers["id"]) return await self._async_handle_discovery_with_unique_id() - async def _async_handle_discovery_with_unique_id(self) -> ConfigFlowResult: + async def _async_handle_discovery_with_unique_id(self): """Handle any discovery with a unique id.""" for entry in self._async_current_entries(include_ignore=False): if entry.unique_id != self.unique_id and self.unique_id != entry.data.get( @@ -119,10 +116,12 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="already_configured") return await self._async_handle_discovery() - async def _async_handle_discovery(self) -> ConfigFlowResult: + async def _async_handle_discovery(self): """Handle any discovery.""" - if self.hass.config_entries.flow.async_has_matching_flow(self): - return self.async_abort(reason="already_in_progress") + self.context[CONF_HOST] = self._discovered_ip + for progress in self._async_in_progress(): + if progress.get("context", {}).get(CONF_HOST) == self._discovered_ip: + return self.async_abort(reason="already_in_progress") self._async_abort_entries_match({CONF_HOST: self._discovered_ip}) try: @@ -140,13 +139,7 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): ) return await self.async_step_discovery_confirm() - def is_matching(self, other_flow: Self) -> bool: - """Return True if other_flow is matching this flow.""" - return other_flow._discovered_ip == self._discovered_ip # noqa: SLF001 - - async def async_step_discovery_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_discovery_confirm(self, user_input=None): """Confirm discovery.""" if user_input is not None or not onboarding.async_is_onboarded(self.hass): return self.async_create_entry( @@ -169,9 +162,7 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): step_id="discovery_confirm", description_placeholders=placeholders ) - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: @@ -203,9 +194,7 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_pick_device( - self, user_input: dict[str, str] | None = None - ) -> ConfigFlowResult: + async def async_step_pick_device(self, user_input=None): """Handle the step to pick discovered device.""" if user_input is not None: unique_id = user_input[CONF_DEVICE] @@ -250,25 +239,23 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_DEVICE): vol.In(devices_name)}), ) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, user_input=None): """Handle import step.""" - host = import_data[CONF_HOST] + host = user_input[CONF_HOST] try: await self._async_try_connect(host, raise_on_progress=False) except CannotConnect: _LOGGER.error("Failed to import %s: cannot connect", host) return self.async_abort(reason="cannot_connect") - if CONF_NIGHTLIGHT_SWITCH_TYPE in import_data: - import_data[CONF_NIGHTLIGHT_SWITCH] = ( - import_data.pop(CONF_NIGHTLIGHT_SWITCH_TYPE) + if CONF_NIGHTLIGHT_SWITCH_TYPE in user_input: + user_input[CONF_NIGHTLIGHT_SWITCH] = ( + user_input.pop(CONF_NIGHTLIGHT_SWITCH_TYPE) == NIGHTLIGHT_SWITCH_TYPE_LIGHT ) self._abort_if_unique_id_configured() - return self.async_create_entry(title=import_data[CONF_NAME], data=import_data) + return self.async_create_entry(title=user_input[CONF_NAME], data=user_input) - async def _async_try_connect( - self, host: str, raise_on_progress: bool = True - ) -> str: + async def _async_try_connect(self, host, raise_on_progress=True): """Set up with options.""" self._async_abort_entries_match({CONF_HOST: host}) @@ -298,12 +285,14 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Yeelight.""" - async def async_step_init( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize the option flow.""" + self._config_entry = config_entry + + async def async_step_init(self, user_input=None): """Handle the initial step.""" - data = self.config_entry.data - options = self.config_entry.options + data = self._config_entry.data + options = self._config_entry.options detected_model = data.get(CONF_DETECTED_MODEL) model = options[CONF_MODEL] or detected_model @@ -312,7 +301,7 @@ class OptionsFlowHandler(OptionsFlow): title="", data={CONF_MODEL: model, **options, **user_input} ) - schema_dict: VolDictType = {} + schema_dict = {} known_models = get_known_models() if is_unknown_model := model not in known_models: known_models.insert(0, model) diff --git a/homeassistant/components/yeelight/device.py b/homeassistant/components/yeelight/device.py index 09086dc91d9..c42fd072728 100644 --- a/homeassistant/components/yeelight/device.py +++ b/homeassistant/components/yeelight/device.py @@ -32,13 +32,13 @@ def async_format_model(model: str) -> str: @callback -def async_format_id(id_: str | None) -> str: +def async_format_id(id_: str) -> str: """Generate a more human readable id.""" return hex(int(id_, 16)) if id_ else "None" @callback -def async_format_model_id(model: str, id_: str | None) -> str: +def async_format_model_id(model: str, id_: str) -> str: """Generate a more human readable name.""" return f"{async_format_model(model)} {async_format_id(id_)}" diff --git a/homeassistant/components/yeelight/icons.json b/homeassistant/components/yeelight/icons.json index 898637e752c..bf0d0c497f0 100644 --- a/homeassistant/components/yeelight/icons.json +++ b/homeassistant/components/yeelight/icons.json @@ -7,29 +7,13 @@ } }, "services": { - "set_mode": { - "service": "mdi:cog" - }, - "set_color_scene": { - "service": "mdi:palette" - }, - "set_hsv_scene": { - "service": "mdi:palette" - }, - "set_color_temp_scene": { - "service": "mdi:palette" - }, - "set_color_flow_scene": { - "service": "mdi:palette" - }, - "set_auto_delay_off_scene": { - "service": "mdi:timer" - }, - "start_flow": { - "service": "mdi:play" - }, - "set_music_mode": { - "service": "mdi:music" - } + "set_mode": "mdi:cog", + "set_color_scene": "mdi:palette", + "set_hsv_scene": "mdi:palette", + "set_color_temp_scene": "mdi:palette", + "set_color_flow_scene": "mdi:palette", + "set_auto_delay_off_scene": "mdi:timer", + "start_flow": "mdi:play", + "set_music_mode": "mdi:music" } } diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index 8d0a2e31185..4c63ab79baf 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -17,7 +17,7 @@ "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], "quality_scale": "platinum", - "requirements": ["yeelight==0.7.14", "async-upnp-client==0.41.0"], + "requirements": ["yeelight==0.7.14", "async-upnp-client==0.39.0"], "zeroconf": [ { "type": "_miio._udp.local.", diff --git a/homeassistant/components/yeelight/scanner.py b/homeassistant/components/yeelight/scanner.py index ac482504880..6ca12e9bd01 100644 --- a/homeassistant/components/yeelight/scanner.py +++ b/homeassistant/components/yeelight/scanner.py @@ -67,8 +67,7 @@ class YeelightScanner: async def async_setup(self) -> None: """Set up the scanner.""" if self._setup_future is not None: - await self._setup_future - return + return await self._setup_future self._setup_future = self._hass.loop.create_future() connected_futures: list[asyncio.Future[None]] = [] diff --git a/homeassistant/components/yolink/climate.py b/homeassistant/components/yolink/climate.py index 98f1b764498..21e0a71ebcb 100644 --- a/homeassistant/components/yolink/climate.py +++ b/homeassistant/components/yolink/climate.py @@ -77,7 +77,6 @@ class YoLinkClimateEntity(YoLinkEntity, ClimateEntity): self._attr_fan_modes = [FAN_ON, FAN_AUTO] self._attr_min_temp = -10 self._attr_max_temp = 50 - self._attr_hvac_mode = None self._attr_hvac_modes = [ HVACMode.COOL, HVACMode.HEAT, diff --git a/homeassistant/components/yolink/config_flow.py b/homeassistant/components/yolink/config_flow.py index 2e96dcf9f8c..abdac696248 100644 --- a/homeassistant/components/yolink/config_flow.py +++ b/homeassistant/components/yolink/config_flow.py @@ -6,7 +6,7 @@ from collections.abc import Mapping import logging from typing import Any -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN @@ -18,6 +18,7 @@ class OAuth2FlowHandler( """Config flow to handle yolink OAuth2 authentication.""" DOMAIN = DOMAIN + _reauth_entry: ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -34,6 +35,9 @@ class OAuth2FlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" + self._reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm(self, user_input=None) -> ConfigFlowResult: @@ -44,10 +48,12 @@ class OAuth2FlowHandler( async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: """Create an oauth config entry or update existing entry for reauth.""" - if self.source == SOURCE_REAUTH: - return self.async_update_reload_and_abort( - self._get_reauth_entry(), data_updates=data + if existing_entry := self._reauth_entry: + self.hass.config_entries.async_update_entry( + existing_entry, data=existing_entry.data | data ) + await self.hass.config_entries.async_reload(existing_entry.entry_id) + return self.async_abort(reason="reauth_successful") return self.async_create_entry(title="YoLink", data=data) async def async_step_user( @@ -55,6 +61,6 @@ class OAuth2FlowHandler( ) -> ConfigFlowResult: """Handle a flow start.""" existing_entry = await self.async_set_unique_id(DOMAIN) - if existing_entry and self.source != SOURCE_REAUTH: + if existing_entry and not self._reauth_entry: return self.async_abort(reason="already_configured") return await super().async_step_user(user_input) diff --git a/homeassistant/components/yolink/const.py b/homeassistant/components/yolink/const.py index eb6169eccad..e829fe08d32 100644 --- a/homeassistant/components/yolink/const.py +++ b/homeassistant/components/yolink/const.py @@ -17,19 +17,3 @@ YOLINK_OFFLINE_TIME = 32400 DEV_MODEL_WATER_METER_YS5007 = "YS5007" DEV_MODEL_MULTI_OUTLET_YS6801 = "YS6801" -DEV_MODEL_TH_SENSOR_YS8004_UC = "YS8004-UC" -DEV_MODEL_TH_SENSOR_YS8004_EC = "YS8004-EC" -DEV_MODEL_TH_SENSOR_YS8008_UC = "YS8008-UC" -DEV_MODEL_TH_SENSOR_YS8008_EC = "YS8008-EC" -DEV_MODEL_TH_SENSOR_YS8014_UC = "YS8014-UC" -DEV_MODEL_TH_SENSOR_YS8014_EC = "YS8014-EC" -DEV_MODEL_TH_SENSOR_YS8017_UC = "YS8017-UC" -DEV_MODEL_TH_SENSOR_YS8017_EC = "YS8017-EC" -DEV_MODEL_FLEX_FOB_YS3604_UC = "YS3604-UC" -DEV_MODEL_FLEX_FOB_YS3604_EC = "YS3604-EC" -DEV_MODEL_FLEX_FOB_YS3614_UC = "YS3614-UC" -DEV_MODEL_FLEX_FOB_YS3614_EC = "YS3614-EC" -DEV_MODEL_PLUG_YS6602_UC = "YS6602-UC" -DEV_MODEL_PLUG_YS6602_EC = "YS6602-EC" -DEV_MODEL_PLUG_YS6803_UC = "YS6803-UC" -DEV_MODEL_PLUG_YS6803_EC = "YS6803-EC" diff --git a/homeassistant/components/yolink/device_trigger.py b/homeassistant/components/yolink/device_trigger.py index 6e247bf858e..b7f83623be5 100644 --- a/homeassistant/components/yolink/device_trigger.py +++ b/homeassistant/components/yolink/device_trigger.py @@ -16,12 +16,6 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType from . import DOMAIN, YOLINK_EVENT -from .const import ( - DEV_MODEL_FLEX_FOB_YS3604_EC, - DEV_MODEL_FLEX_FOB_YS3604_UC, - DEV_MODEL_FLEX_FOB_YS3614_EC, - DEV_MODEL_FLEX_FOB_YS3614_UC, -) CONF_BUTTON_1 = "button_1" CONF_BUTTON_2 = "button_2" @@ -30,7 +24,7 @@ CONF_BUTTON_4 = "button_4" CONF_SHORT_PRESS = "short_press" CONF_LONG_PRESS = "long_press" -FLEX_FOB_4_BUTTONS = { +REMOTE_TRIGGER_TYPES = { f"{CONF_BUTTON_1}_{CONF_SHORT_PRESS}", f"{CONF_BUTTON_1}_{CONF_LONG_PRESS}", f"{CONF_BUTTON_2}_{CONF_SHORT_PRESS}", @@ -41,24 +35,14 @@ FLEX_FOB_4_BUTTONS = { f"{CONF_BUTTON_4}_{CONF_LONG_PRESS}", } -FLEX_FOB_2_BUTTONS = { - f"{CONF_BUTTON_1}_{CONF_SHORT_PRESS}", - f"{CONF_BUTTON_1}_{CONF_LONG_PRESS}", - f"{CONF_BUTTON_2}_{CONF_SHORT_PRESS}", - f"{CONF_BUTTON_2}_{CONF_LONG_PRESS}", -} - TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend( - {vol.Required(CONF_TYPE): vol.In(FLEX_FOB_4_BUTTONS)} + {vol.Required(CONF_TYPE): vol.In(REMOTE_TRIGGER_TYPES)} ) -# YoLink Remotes YS3604/YS3614 -FLEX_FOB_TRIGGER_TYPES: dict[str, set[str]] = { - DEV_MODEL_FLEX_FOB_YS3604_EC: FLEX_FOB_4_BUTTONS, - DEV_MODEL_FLEX_FOB_YS3604_UC: FLEX_FOB_4_BUTTONS, - DEV_MODEL_FLEX_FOB_YS3614_UC: FLEX_FOB_2_BUTTONS, - DEV_MODEL_FLEX_FOB_YS3614_EC: FLEX_FOB_2_BUTTONS, +# YoLink Remotes YS3604/YS3605/YS3606/YS3607 +DEVICE_TRIGGER_TYPES: dict[str, set[str]] = { + ATTR_DEVICE_SMART_REMOTER: REMOTE_TRIGGER_TYPES, } @@ -70,8 +54,7 @@ async def async_get_triggers( registry_device = device_registry.async_get(device_id) if not registry_device or registry_device.model != ATTR_DEVICE_SMART_REMOTER: return [] - if registry_device.model_id not in list(FLEX_FOB_TRIGGER_TYPES.keys()): - return [] + return [ { CONF_DEVICE_ID: device_id, @@ -79,7 +62,7 @@ async def async_get_triggers( CONF_PLATFORM: "device", CONF_TYPE: trigger, } - for trigger in FLEX_FOB_TRIGGER_TYPES[registry_device.model_id] + for trigger in DEVICE_TRIGGER_TYPES[ATTR_DEVICE_SMART_REMOTER] ] diff --git a/homeassistant/components/yolink/entity.py b/homeassistant/components/yolink/entity.py index 0f500b72404..d9ca2968493 100644 --- a/homeassistant/components/yolink/entity.py +++ b/homeassistant/components/yolink/entity.py @@ -55,7 +55,6 @@ class YoLinkEntity(CoordinatorEntity[YoLinkCoordinator]): identifiers={(DOMAIN, self.coordinator.device.device_id)}, manufacturer=MANUFACTURER, model=self.coordinator.device.device_type, - model_id=self.coordinator.device.device_model_name, name=self.coordinator.device.device_name, ) diff --git a/homeassistant/components/yolink/icons.json b/homeassistant/components/yolink/icons.json index c58d219a2e0..ee9037c864a 100644 --- a/homeassistant/components/yolink/icons.json +++ b/homeassistant/components/yolink/icons.json @@ -17,9 +17,6 @@ }, "power_failure_alarm_beep": { "default": "mdi:bullhorn" - }, - "water_meter_reading": { - "default": "mdi:gauge" } }, "switch": { @@ -29,8 +26,6 @@ } }, "services": { - "play_on_speaker_hub": { - "service": "mdi:speaker" - } + "play_on_speaker_hub": "mdi:speaker" } } diff --git a/homeassistant/components/yolink/lock.py b/homeassistant/components/yolink/lock.py index d675fd8cf06..177a8808de1 100644 --- a/homeassistant/components/yolink/lock.py +++ b/homeassistant/components/yolink/lock.py @@ -1,11 +1,11 @@ -"""YoLink Lock V1/V2.""" +"""YoLink Lock.""" from __future__ import annotations from typing import Any from yolink.client_request import ClientRequest -from yolink.const import ATTR_DEVICE_LOCK, ATTR_DEVICE_LOCK_V2 +from yolink.const import ATTR_DEVICE_LOCK from homeassistant.components.lock import LockEntity from homeassistant.config_entries import ConfigEntry @@ -27,8 +27,7 @@ async def async_setup_entry( entities = [ YoLinkLockEntity(config_entry, device_coordinator) for device_coordinator in device_coordinators.values() - if device_coordinator.device.device_type - in [ATTR_DEVICE_LOCK, ATTR_DEVICE_LOCK_V2] + if device_coordinator.device.device_type == ATTR_DEVICE_LOCK ] async_add_entities(entities) @@ -51,41 +50,21 @@ class YoLinkLockEntity(YoLinkEntity, LockEntity): def update_entity_state(self, state: dict[str, Any]) -> None: """Update HA Entity State.""" state_value = state.get("state") - if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2: - self._attr_is_locked = ( - state_value["lock"] == "locked" if state_value is not None else None - ) - else: - self._attr_is_locked = ( - state_value == "locked" if state_value is not None else None - ) + self._attr_is_locked = ( + state_value == "locked" if state_value is not None else None + ) self.async_write_ha_state() async def call_lock_state_change(self, state: str) -> None: """Call setState api to change lock state.""" - if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2: - await self.call_device( - ClientRequest("setState", {"state": {"lock": state}}) - ) - else: - await self.call_device(ClientRequest("setState", {"state": state})) + await self.call_device(ClientRequest("setState", {"state": state})) self._attr_is_locked = state == "lock" self.async_write_ha_state() async def async_lock(self, **kwargs: Any) -> None: """Lock device.""" - state_param = ( - "locked" - if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2 - else "lock" - ) - await self.call_lock_state_change(state_param) + await self.call_lock_state_change("lock") async def async_unlock(self, **kwargs: Any) -> None: """Unlock device.""" - state_param = ( - "unlocked" - if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2 - else "unlock" - ) - await self.call_lock_state_change(state_param) + await self.call_lock_state_change("unlock") diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index 78b553d7978..5353d5d5b8c 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.7"] + "requirements": ["yolink-api==0.4.4"] } diff --git a/homeassistant/components/yolink/sensor.py b/homeassistant/components/yolink/sensor.py index 8f263cdae07..6badeefbdb3 100644 --- a/homeassistant/components/yolink/sensor.py +++ b/homeassistant/components/yolink/sensor.py @@ -40,9 +40,7 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, - UnitOfEnergy, UnitOfLength, - UnitOfPower, UnitOfTemperature, UnitOfVolume, ) @@ -50,21 +48,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import percentage -from .const import ( - DEV_MODEL_PLUG_YS6602_EC, - DEV_MODEL_PLUG_YS6602_UC, - DEV_MODEL_PLUG_YS6803_EC, - DEV_MODEL_PLUG_YS6803_UC, - DEV_MODEL_TH_SENSOR_YS8004_EC, - DEV_MODEL_TH_SENSOR_YS8004_UC, - DEV_MODEL_TH_SENSOR_YS8008_EC, - DEV_MODEL_TH_SENSOR_YS8008_UC, - DEV_MODEL_TH_SENSOR_YS8014_EC, - DEV_MODEL_TH_SENSOR_YS8014_UC, - DEV_MODEL_TH_SENSOR_YS8017_EC, - DEV_MODEL_TH_SENSOR_YS8017_UC, - DOMAIN, -) +from .const import DOMAIN from .coordinator import YoLinkCoordinator from .entity import YoLinkEntity @@ -124,24 +108,6 @@ MCU_DEV_TEMPERATURE_SENSOR = [ ATTR_DEVICE_CO_SMOKE_SENSOR, ] -NONE_HUMIDITY_SENSOR_MODELS = [ - DEV_MODEL_TH_SENSOR_YS8004_EC, - DEV_MODEL_TH_SENSOR_YS8004_UC, - DEV_MODEL_TH_SENSOR_YS8008_EC, - DEV_MODEL_TH_SENSOR_YS8008_UC, - DEV_MODEL_TH_SENSOR_YS8014_EC, - DEV_MODEL_TH_SENSOR_YS8014_UC, - DEV_MODEL_TH_SENSOR_YS8017_UC, - DEV_MODEL_TH_SENSOR_YS8017_EC, -] - -POWER_SUPPORT_MODELS = [ - DEV_MODEL_PLUG_YS6602_UC, - DEV_MODEL_PLUG_YS6602_EC, - DEV_MODEL_PLUG_YS6803_UC, - DEV_MODEL_PLUG_YS6803_EC, -] - def cvt_battery(val: int | None) -> int | None: """Convert battery to percentage.""" @@ -175,10 +141,7 @@ SENSOR_TYPES: tuple[YoLinkSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.HUMIDITY, native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, - exists_fn=lambda device: ( - device.device_type in [ATTR_DEVICE_TH_SENSOR] - and device.device_model_name not in NONE_HUMIDITY_SENSOR_MODELS - ), + exists_fn=lambda device: device.device_type in [ATTR_DEVICE_TH_SENSOR], ), YoLinkSensorEntityDescription( key="temperature", @@ -247,32 +210,12 @@ SENSOR_TYPES: tuple[YoLinkSensorEntityDescription, ...] = ( key="meter_reading", translation_key="water_meter_reading", device_class=SensorDeviceClass.WATER, + icon="mdi:gauge", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, state_class=SensorStateClass.TOTAL_INCREASING, should_update_entity=lambda value: value is not None, - exists_fn=lambda device: ( - device.device_type in ATTR_DEVICE_WATER_METER_CONTROLLER - ), - ), - YoLinkSensorEntityDescription( - key="power", - translation_key="current_power", - device_class=SensorDeviceClass.POWER, - native_unit_of_measurement=UnitOfPower.WATT, - state_class=SensorStateClass.MEASUREMENT, - should_update_entity=lambda value: value is not None, - exists_fn=lambda device: device.device_model_name in POWER_SUPPORT_MODELS, - value=lambda value: value / 10 if value is not None else None, - ), - YoLinkSensorEntityDescription( - key="watt", - translation_key="power_consumption", - device_class=SensorDeviceClass.ENERGY, - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - state_class=SensorStateClass.TOTAL, - should_update_entity=lambda value: value is not None, - exists_fn=lambda device: device.device_model_name in POWER_SUPPORT_MODELS, - value=lambda value: value / 100 if value is not None else None, + exists_fn=lambda device: device.device_type + in ATTR_DEVICE_WATER_METER_CONTROLLER, ), ) diff --git a/homeassistant/components/yolink/strings.json b/homeassistant/components/yolink/strings.json index 2f9a9454502..bc8fb435e76 100644 --- a/homeassistant/components/yolink/strings.json +++ b/homeassistant/components/yolink/strings.json @@ -19,8 +19,7 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", - "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" @@ -52,12 +51,6 @@ "plug_4": { "name": "Plug 4" } }, "sensor": { - "current_power": { - "name": "Current power" - }, - "power_consumption": { - "name": "Power consumption" - }, "power_failure_alarm": { "name": "Power failure alarm", "state": { diff --git a/homeassistant/components/yolink/valve.py b/homeassistant/components/yolink/valve.py index d8c199697c3..a24ad7d385d 100644 --- a/homeassistant/components/yolink/valve.py +++ b/homeassistant/components/yolink/valve.py @@ -37,7 +37,7 @@ DEVICE_TYPES: tuple[YoLinkValveEntityDescription, ...] = ( key="valve_state", translation_key="meter_valve_state", device_class=ValveDeviceClass.WATER, - value=lambda value: value != "open" if value is not None else None, + value=lambda value: value == "closed" if value is not None else None, exists_fn=lambda device: device.device_type == ATTR_DEVICE_WATER_METER_CONTROLLER and not device.device_model_name.startswith(DEV_MODEL_WATER_METER_YS5007), diff --git a/homeassistant/components/youless/__init__.py b/homeassistant/components/youless/__init__.py index d475034cc9d..a968d052922 100644 --- a/homeassistant/components/youless/__init__.py +++ b/homeassistant/components/youless/__init__.py @@ -36,7 +36,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, - config_entry=entry, name="youless_gateway", update_method=async_update_data, update_interval=timedelta(seconds=10), diff --git a/homeassistant/components/youless/manifest.json b/homeassistant/components/youless/manifest.json index 1ccc8cda0ff..9a81de38388 100644 --- a/homeassistant/components/youless/manifest.json +++ b/homeassistant/components/youless/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/youless", "iot_class": "local_polling", "loggers": ["youless_api"], - "requirements": ["youless-api==2.1.2"] + "requirements": ["youless-api==2.1.0"] } diff --git a/homeassistant/components/youtube/config_flow.py b/homeassistant/components/youtube/config_flow.py index 48336422585..32b37b93eb2 100644 --- a/homeassistant/components/youtube/config_flow.py +++ b/homeassistant/components/youtube/config_flow.py @@ -12,10 +12,9 @@ from youtubeaio.types import AuthScope, ForbiddenError from youtubeaio.youtube import YouTube from homeassistant.config_entries import ( - SOURCE_REAUTH, ConfigEntry, ConfigFlowResult, - OptionsFlow, + OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.core import callback @@ -46,6 +45,7 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN + reauth_entry: ConfigEntry | None = None _youtube: YouTube | None = None @staticmethod @@ -54,7 +54,7 @@ class OAuth2FlowHandler( config_entry: ConfigEntry, ) -> YouTubeOptionsFlowHandler: """Get the options flow for this handler.""" - return YouTubeOptionsFlowHandler() + return YouTubeOptionsFlowHandler(config_entry) @property def logger(self) -> logging.Logger: @@ -75,6 +75,9 @@ class OAuth2FlowHandler( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -114,19 +117,22 @@ class OAuth2FlowHandler( self._title = own_channel.snippet.title self._data = data - await self.async_set_unique_id(own_channel.channel_id) - if self.source != SOURCE_REAUTH: + if not self.reauth_entry: + await self.async_set_unique_id(own_channel.channel_id) self._abort_if_unique_id_configured() return await self.async_step_channels() - self._abort_if_unique_id_mismatch( + if self.reauth_entry.unique_id == own_channel.channel_id: + self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) + await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) + return self.async_abort(reason="reauth_successful") + + return self.async_abort( reason="wrong_account", description_placeholders={"title": self._title}, ) - return self.async_update_reload_and_abort(self._get_reauth_entry(), data=data) - async def async_step_channels( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -159,7 +165,7 @@ class OAuth2FlowHandler( ) -class YouTubeOptionsFlowHandler(OptionsFlow): +class YouTubeOptionsFlowHandler(OptionsFlowWithConfigEntry): """YouTube Options flow handler.""" async def async_step_init( @@ -194,6 +200,6 @@ class YouTubeOptionsFlowHandler(OptionsFlow): ), } ), - self.config_entry.options, + self.options, ), ) diff --git a/homeassistant/components/youtube/const.py b/homeassistant/components/youtube/const.py index da5a554f364..a663c487d0a 100644 --- a/homeassistant/components/youtube/const.py +++ b/homeassistant/components/youtube/const.py @@ -15,7 +15,6 @@ AUTH = "auth" LOGGER = logging.getLogger(__package__) ATTR_TITLE = "title" -ATTR_TOTAL_VIEWS = "total_views" ATTR_LATEST_VIDEO = "latest_video" ATTR_SUBSCRIBER_COUNT = "subscriber_count" ATTR_DESCRIPTION = "description" diff --git a/homeassistant/components/youtube/coordinator.py b/homeassistant/components/youtube/coordinator.py index 0da480f1169..4599342c84d 100644 --- a/homeassistant/components/youtube/coordinator.py +++ b/homeassistant/components/youtube/coordinator.py @@ -22,7 +22,6 @@ from .const import ( ATTR_SUBSCRIBER_COUNT, ATTR_THUMBNAIL, ATTR_TITLE, - ATTR_TOTAL_VIEWS, ATTR_VIDEO_ID, CONF_CHANNELS, DOMAIN, @@ -69,7 +68,6 @@ class YouTubeDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): ATTR_ICON: channel.snippet.thumbnails.get_highest_quality().url, ATTR_LATEST_VIDEO: latest_video, ATTR_SUBSCRIBER_COUNT: channel.statistics.subscriber_count, - ATTR_TOTAL_VIEWS: channel.statistics.view_count, } except UnauthorizedError as err: raise ConfigEntryAuthFailed from err diff --git a/homeassistant/components/youtube/sensor.py b/homeassistant/components/youtube/sensor.py index 8832382508c..bc69f92e8fd 100644 --- a/homeassistant/components/youtube/sensor.py +++ b/homeassistant/components/youtube/sensor.py @@ -20,7 +20,6 @@ from .const import ( ATTR_SUBSCRIBER_COUNT, ATTR_THUMBNAIL, ATTR_TITLE, - ATTR_TOTAL_VIEWS, ATTR_VIDEO_ID, COORDINATOR, DOMAIN, @@ -59,15 +58,6 @@ SENSOR_TYPES = [ entity_picture_fn=lambda channel: channel[ATTR_ICON], attributes_fn=None, ), - YouTubeSensorEntityDescription( - key="views", - translation_key="views", - native_unit_of_measurement="views", - available_fn=lambda _: True, - value_fn=lambda channel: channel[ATTR_TOTAL_VIEWS], - entity_picture_fn=lambda channel: channel[ATTR_ICON], - attributes_fn=None, - ), ] diff --git a/homeassistant/components/youtube/strings.json b/homeassistant/components/youtube/strings.json index 78ca0532459..d664e2f15e7 100644 --- a/homeassistant/components/youtube/strings.json +++ b/homeassistant/components/youtube/strings.json @@ -10,8 +10,7 @@ "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", - "wrong_account": "Wrong account: please authenticate with the right account." + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", @@ -47,8 +46,7 @@ "published_at": { "name": "Published at" } } }, - "subscribers": { "name": "Subscribers" }, - "views": { "name": "Views" } + "subscribers": { "name": "Subscribers" } } } } diff --git a/homeassistant/components/zabbix/__init__.py b/homeassistant/components/zabbix/__init__.py index d9bab3e6fe4..851af54da32 100644 --- a/homeassistant/components/zabbix/__init__.py +++ b/homeassistant/components/zabbix/__init__.py @@ -34,14 +34,13 @@ from homeassistant.helpers.entityfilter import ( ) from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN - _LOGGER = logging.getLogger(__name__) CONF_PUBLISH_STATES_HOST = "publish_states_host" DEFAULT_SSL = False DEFAULT_PATH = "zabbix" +DOMAIN = "zabbix" TIMEOUT = 5 RETRY_DELAY = 20 @@ -85,7 +84,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: try: zapi = ZabbixAPI(url=url, user=username, password=password) - _LOGGER.debug("Connected to Zabbix API Version %s", zapi.api_version()) + _LOGGER.info("Connected to Zabbix API Version %s", zapi.api_version()) except ZabbixAPIException as login_exception: _LOGGER.error("Unable to login to the Zabbix API: %s", login_exception) return False diff --git a/homeassistant/components/zabbix/const.py b/homeassistant/components/zabbix/const.py deleted file mode 100644 index 5f710381f38..00000000000 --- a/homeassistant/components/zabbix/const.py +++ /dev/null @@ -1,3 +0,0 @@ -"""Constants for Zabbix.""" - -DOMAIN = "zabbix" diff --git a/homeassistant/components/zabbix/sensor.py b/homeassistant/components/zabbix/sensor.py index f5d96f106cb..2187deb22e8 100644 --- a/homeassistant/components/zabbix/sensor.py +++ b/homeassistant/components/zabbix/sensor.py @@ -19,7 +19,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType -from .const import DOMAIN +from .. import zabbix _LOGGER = logging.getLogger(__name__) @@ -52,11 +52,11 @@ def setup_platform( """Set up the Zabbix sensor platform.""" sensors: list[ZabbixTriggerCountSensor] = [] - if not (zapi := hass.data[DOMAIN]): + if not (zapi := hass.data[zabbix.DOMAIN]): _LOGGER.error("Zabbix integration hasn't been loaded? zapi is None") return - _LOGGER.debug("Connected to Zabbix API Version %s", zapi.api_version()) + _LOGGER.info("Connected to Zabbix API Version %s", zapi.api_version()) # The following code seems overly complex. Need to think about this... if trigger_conf := config.get(_CONF_TRIGGERS): diff --git a/homeassistant/components/zeroconf/__init__.py b/homeassistant/components/zeroconf/__init__.py index 449c2ccef91..bbc89e77a76 100644 --- a/homeassistant/components/zeroconf/__init__.py +++ b/homeassistant/components/zeroconf/__init__.py @@ -33,8 +33,6 @@ from homeassistant.core import Event, HomeAssistant, callback from homeassistant.data_entry_flow import BaseServiceInfo from homeassistant.helpers import discovery_flow, instance_id import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.discovery_flow import DiscoveryKey -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.helpers.typing import ConfigType from homeassistant.loader import ( @@ -381,31 +379,11 @@ class ZeroconfDiscovery: self.zeroconf, types, handlers=[self.async_service_update] ) - async_dispatcher_connect( - self.hass, - config_entries.signal_discovered_config_entry_removed(DOMAIN), - self._handle_config_entry_removed, - ) - async def async_stop(self) -> None: """Cancel the service browser and stop processing the queue.""" if self.async_service_browser: await self.async_service_browser.async_cancel() - @callback - def _handle_config_entry_removed( - self, - entry: config_entries.ConfigEntry, - ) -> None: - """Handle config entry changes.""" - for discovery_key in entry.discovery_keys[DOMAIN]: - if discovery_key.version != 1: - continue - _type = discovery_key.key[0] - name = discovery_key.key[1] - _LOGGER.debug("Rediscover service %s.%s", _type, name) - self._async_service_update(self.zeroconf, _type, name) - def _async_dismiss_discoveries(self, name: str) -> None: """Dismiss all discoveries for the given name.""" for flow in self.hass.config_entries.flow.async_progress_by_init_data_type( @@ -430,20 +408,10 @@ class ZeroconfDiscovery: state_change, ) - if state_change is ServiceStateChange.Removed: + if state_change == ServiceStateChange.Removed: self._async_dismiss_discoveries(name) return - self._async_service_update(zeroconf, service_type, name) - - @callback - def _async_service_update( - self, - zeroconf: HaZeroconf, - service_type: str, - name: str, - ) -> None: - """Service state added or changed.""" try: async_service_info = AsyncServiceInfo(service_type, name) except BadTypeInNameException as ex: @@ -485,11 +453,6 @@ class ZeroconfDiscovery: return _LOGGER.debug("Discovered new device %s %s", name, info) props: dict[str, str | None] = info.properties - discovery_key = DiscoveryKey( - domain=DOMAIN, - key=(info.type, info.name), - version=1, - ) domain = None # If we can handle it as a HomeKit discovery, we do that here. @@ -504,7 +467,6 @@ class ZeroconfDiscovery: homekit_discovery.domain, {"source": config_entries.SOURCE_HOMEKIT}, info, - discovery_key=discovery_key, ) # Continue on here as homekit_controller # still needs to get updates on devices @@ -540,9 +502,7 @@ class ZeroconfDiscovery: continue matcher_domain = matcher[ATTR_DOMAIN] - # Create a type annotated regular dict since this is a hot path and creating - # a regular dict is slightly cheaper than calling ConfigFlowContext - context: config_entries.ConfigFlowContext = { + context = { "source": config_entries.SOURCE_ZEROCONF, } if domain: @@ -555,7 +515,6 @@ class ZeroconfDiscovery: matcher_domain, context, info, - discovery_key=discovery_key, ) diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 98b09f1a251..0a76af3b9c2 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.136.0"] + "requirements": ["zeroconf==0.132.2"] } diff --git a/homeassistant/components/zeroconf/usage.py b/homeassistant/components/zeroconf/usage.py index 8ddfdbd592d..b9d51cd3c36 100644 --- a/homeassistant/components/zeroconf/usage.py +++ b/homeassistant/components/zeroconf/usage.py @@ -4,7 +4,7 @@ from typing import Any import zeroconf -from homeassistant.helpers.frame import ReportBehavior, report_usage +from homeassistant.helpers.frame import report from .models import HaZeroconf @@ -16,14 +16,14 @@ def install_multiple_zeroconf_catcher(hass_zc: HaZeroconf) -> None: """ def new_zeroconf_new(self: zeroconf.Zeroconf, *k: Any, **kw: Any) -> HaZeroconf: - report_usage( + report( ( "attempted to create another Zeroconf instance. Please use the shared" " Zeroconf via await" " homeassistant.components.zeroconf.async_get_instance(hass)" ), exclude_integrations={"zeroconf"}, - core_behavior=ReportBehavior.LOG, + error_if_core=False, ) return hass_zc diff --git a/homeassistant/components/zerproc/light.py b/homeassistant/components/zerproc/light.py index ed6ed03ad27..71bb38dd80f 100644 --- a/homeassistant/components/zerproc/light.py +++ b/homeassistant/components/zerproc/light.py @@ -147,7 +147,7 @@ class ZerprocLight(LightEntity): self._attr_available = False return if not self.available: - _LOGGER.warning("Reconnected to %s", self._light.address) + _LOGGER.info("Reconnected to %s", self._light.address) self._attr_available = True self._attr_is_on = state.is_on hsv = color_util.color_RGB_to_hsv(*state.color) diff --git a/homeassistant/components/zeversolar/diagnostics.py b/homeassistant/components/zeversolar/diagnostics.py index 6e6ed262f51..b8901a7e793 100644 --- a/homeassistant/components/zeversolar/diagnostics.py +++ b/homeassistant/components/zeversolar/diagnostics.py @@ -31,7 +31,6 @@ async def async_get_config_entry_diagnostics( "num_inverters": data.num_inverters, "serial_number": data.serial_number, "pac": data.pac, - "energy_today": data.energy_today, "status": data.status.value, "meter_status": data.meter_status.value, } diff --git a/homeassistant/components/zeversolar/manifest.json b/homeassistant/components/zeversolar/manifest.json index 18bab34c04e..af197b3aa7c 100644 --- a/homeassistant/components/zeversolar/manifest.json +++ b/homeassistant/components/zeversolar/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/zeversolar", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["zeversolar==0.3.2"] + "requirements": ["zeversolar==0.3.1"] } diff --git a/homeassistant/components/zha/__init__.py b/homeassistant/components/zha/__init__.py index 1897b741d87..ed74cde47e1 100644 --- a/homeassistant/components/zha/__init__.py +++ b/homeassistant/components/zha/__init__.py @@ -1,25 +1,18 @@ """Support for Zigbee Home Automation devices.""" import contextlib +import copy import logging -from zoneinfo import ZoneInfo +import re import voluptuous as vol -from zha.application.const import BAUD_RATES, RadioType -from zha.application.gateway import Gateway -from zha.application.helpers import ZHAData -from zha.zigbee.device import get_device_automation_triggers +from zhaquirks import setup as setup_quirks from zigpy.config import CONF_DATABASE, CONF_DEVICE, CONF_DEVICE_PATH from zigpy.exceptions import NetworkSettingsInconsistent, TransientConnectionError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_TYPE, - EVENT_CORE_CONFIG_UPDATE, - EVENT_HOMEASSISTANT_STOP, - Platform, -) -from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.const import CONF_TYPE, EVENT_HOMEASSISTANT_STOP +from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv @@ -27,7 +20,9 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType from . import repairs, websocket_api -from .const import ( +from .core import ZHAGateway +from .core.const import ( + BAUD_RATES, CONF_BAUDRATE, CONF_CUSTOM_QUIRKS_PATH, CONF_DEVICE_CONFIG, @@ -38,14 +33,13 @@ from .const import ( CONF_ZIGPY, DATA_ZHA, DOMAIN, -) -from .helpers import ( + PLATFORMS, SIGNAL_ADD_ENTITIES, - HAZHAData, - ZHAGatewayProxy, - create_zha_config, - get_zha_data, + RadioType, ) +from .core.device import get_device_automation_triggers +from .core.discovery import GROUP_PROBE +from .core.helpers import ZHAData, get_zha_data from .radio_manager import ZhaRadioManager from .repairs.network_settings_inconsistent import warn_on_inconsistent_network_settings from .repairs.wrong_silabs_firmware import ( @@ -80,25 +74,6 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) -PLATFORMS = ( - Platform.ALARM_CONTROL_PANEL, - Platform.BINARY_SENSOR, - Platform.BUTTON, - Platform.CLIMATE, - Platform.COVER, - Platform.DEVICE_TRACKER, - Platform.FAN, - Platform.LIGHT, - Platform.LOCK, - Platform.NUMBER, - Platform.SELECT, - Platform.SENSOR, - Platform.SIREN, - Platform.SWITCH, - Platform.UPDATE, -) - - # Zigbee definitions CENTICELSIUS = "C-100" @@ -108,22 +83,49 @@ _LOGGER = logging.getLogger(__name__) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up ZHA from config.""" - ha_zha_data = HAZHAData(yaml_config=config.get(DOMAIN, {})) - hass.data[DATA_ZHA] = ha_zha_data + zha_data = ZHAData() + zha_data.yaml_config = config.get(DOMAIN, {}) + hass.data[DATA_ZHA] = zha_data return True +def _clean_serial_port_path(path: str) -> str: + """Clean the serial port path, applying corrections where necessary.""" + + if path.startswith("socket://"): + path = path.strip() + + # Removes extraneous brackets from IP addresses (they don't parse in CPython 3.11.4) + if re.match(r"^socket://\[\d+\.\d+\.\d+\.\d+\]:\d+$", path): + path = path.replace("[", "").replace("]", "") + + return path + + async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up ZHA. Will automatically load components to support devices found on the network. """ - ha_zha_data: HAZHAData = get_zha_data(hass) - ha_zha_data.config_entry = config_entry - zha_lib_data: ZHAData = create_zha_config(hass, ha_zha_data) - zha_gateway = await Gateway.async_from_config(zha_lib_data) + # Remove brackets around IP addresses, this no longer works in CPython 3.11.4 + # This will be removed in 2023.11.0 + path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] + cleaned_path = _clean_serial_port_path(path) + data = copy.deepcopy(dict(config_entry.data)) + + if path != cleaned_path: + _LOGGER.debug("Cleaned serial port path %r -> %r", path, cleaned_path) + data[CONF_DEVICE][CONF_DEVICE_PATH] = cleaned_path + hass.config_entries.async_update_entry(config_entry, data=data) + + zha_data = get_zha_data(hass) + + if zha_data.yaml_config.get(CONF_ENABLE_QUIRKS, True): + await hass.async_add_import_executor_job( + setup_quirks, zha_data.yaml_config.get(CONF_CUSTOM_QUIRKS_PATH) + ) # Load and cache device trigger information early device_registry = dr.async_get(hass) @@ -139,16 +141,19 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b if dev_entry is None: continue - zha_lib_data.device_trigger_cache[dev_entry.id] = ( + zha_data.device_trigger_cache[dev_entry.id] = ( str(dev.ieee), get_device_automation_triggers(dev), ) - ha_zha_data.device_trigger_cache = zha_lib_data.device_trigger_cache - _LOGGER.debug("Trigger cache: %s", zha_lib_data.device_trigger_cache) + _LOGGER.debug("Trigger cache: %s", zha_data.device_trigger_cache) try: - await zha_gateway.async_initialize() + zha_gateway = await ZHAGateway.async_from_config( + hass=hass, + config=zha_data.yaml_config, + config_entry=config_entry, + ) except NetworkSettingsInconsistent as exc: await warn_on_inconsistent_network_settings( hass, @@ -180,8 +185,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b repairs.async_delete_blocking_issues(hass) - ha_zha_data.gateway_proxy = ZHAGatewayProxy(hass, config_entry, zha_gateway) - manufacturer = zha_gateway.state.node_info.manufacturer model = zha_gateway.state.node_info.model @@ -202,24 +205,13 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b websocket_api.async_load_api(hass) async def async_shutdown(_: Event) -> None: - """Handle shutdown tasks.""" - assert ha_zha_data.gateway_proxy is not None - await ha_zha_data.gateway_proxy.shutdown() + await zha_gateway.shutdown() config_entry.async_on_unload( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown) ) - @callback - def update_config(event: Event) -> None: - """Handle Core config update.""" - zha_gateway.config.local_timezone = ZoneInfo(hass.config.time_zone) - - config_entry.async_on_unload( - hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, update_config) - ) - - await ha_zha_data.gateway_proxy.async_initialize_devices_and_entities() + await zha_gateway.async_initialize_devices_and_entities() await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES) return True @@ -227,12 +219,11 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload ZHA config entry.""" - ha_zha_data = get_zha_data(hass) - ha_zha_data.config_entry = None + zha_data = get_zha_data(hass) - if ha_zha_data.gateway_proxy is not None: - await ha_zha_data.gateway_proxy.shutdown() - ha_zha_data.gateway_proxy = None + if zha_data.gateway is not None: + await zha_data.gateway.shutdown() + zha_data.gateway = None # clean up any remaining entity metadata # (entities that have been discovered but not yet added to HA) @@ -240,11 +231,15 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> # be in when we get here in failure cases with contextlib.suppress(KeyError): for platform in PLATFORMS: - del ha_zha_data.platforms[platform] + del zha_data.platforms[platform] + GROUP_PROBE.cleanup() websocket_api.async_unload_api(hass) - return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) + # our components don't have unload methods so no need to look at return values + await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) + + return True async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/zha/alarm_control_panel.py b/homeassistant/components/zha/alarm_control_panel.py index 734683e5497..7750e7f280d 100644 --- a/homeassistant/components/zha/alarm_control_panel.py +++ b/homeassistant/components/zha/alarm_control_panel.py @@ -3,43 +3,59 @@ from __future__ import annotations import functools +from typing import TYPE_CHECKING -from zha.application.platforms.alarm_control_panel.const import ( - AlarmState as ZHAAlarmState, -) +from zigpy.zcl.clusters.security import IasAce from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, - AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import ( + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, + Platform, +) +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.cluster_handlers.security import ( + SIGNAL_ALARM_TRIGGERED, + SIGNAL_ARMED_STATE_CHANGED, + IasAceClusterHandler, +) +from .core.const import ( + CLUSTER_HANDLER_IAS_ACE, + CONF_ALARM_ARM_REQUIRES_CODE, + CONF_ALARM_FAILED_TRIES, + CONF_ALARM_MASTER_CODE, SIGNAL_ADD_ENTITIES, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - get_zha_data, + ZHA_ALARM_OPTIONS, +) +from .core.helpers import async_get_zha_config_value, get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity + +if TYPE_CHECKING: + from .core.device import ZHADevice + +STRICT_MATCH = functools.partial( + ZHA_ENTITIES.strict_match, Platform.ALARM_CONTROL_PANEL ) -ZHA_STATE_TO_ALARM_STATE_MAP = { - ZHAAlarmState.DISARMED.value: AlarmControlPanelState.DISARMED, - ZHAAlarmState.ARMED_HOME.value: AlarmControlPanelState.ARMED_HOME, - ZHAAlarmState.ARMED_AWAY.value: AlarmControlPanelState.ARMED_AWAY, - ZHAAlarmState.ARMED_NIGHT.value: AlarmControlPanelState.ARMED_NIGHT, - ZHAAlarmState.ARMED_VACATION.value: AlarmControlPanelState.ARMED_VACATION, - ZHAAlarmState.ARMED_CUSTOM_BYPASS.value: AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ZHAAlarmState.PENDING.value: AlarmControlPanelState.PENDING, - ZHAAlarmState.ARMING.value: AlarmControlPanelState.ARMING, - ZHAAlarmState.DISARMING.value: AlarmControlPanelState.DISARMING, - ZHAAlarmState.TRIGGERED.value: AlarmControlPanelState.TRIGGERED, - ZHAAlarmState.UNKNOWN.value: None, +IAS_ACE_STATE_MAP = { + IasAce.PanelStatus.Panel_Disarmed: STATE_ALARM_DISARMED, + IasAce.PanelStatus.Armed_Stay: STATE_ALARM_ARMED_HOME, + IasAce.PanelStatus.Armed_Night: STATE_ALARM_ARMED_NIGHT, + IasAce.PanelStatus.Armed_Away: STATE_ALARM_ARMED_AWAY, + IasAce.PanelStatus.In_Alarm: STATE_ALARM_TRIGGERED, } @@ -56,16 +72,14 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, - async_add_entities, - ZHAAlarmControlPanel, - entities_to_create, + discovery.async_add_entities, async_add_entities, entities_to_create ), ) config_entry.async_on_unload(unsub) -class ZHAAlarmControlPanel(ZHAEntity, AlarmControlPanelEntity): +@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_ACE) +class ZHAAlarmControlPanel(ZhaEntity, AlarmControlPanelEntity): """Entity for ZHA alarm control devices.""" _attr_translation_key: str = "alarm_control_panel" @@ -77,42 +91,68 @@ class ZHAAlarmControlPanel(ZHAEntity, AlarmControlPanelEntity): | AlarmControlPanelEntityFeature.TRIGGER ) + def __init__( + self, unique_id, zha_device: ZHADevice, cluster_handlers, **kwargs + ) -> None: + """Initialize the ZHA alarm control device.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + cfg_entry = zha_device.gateway.config_entry + self._cluster_handler: IasAceClusterHandler = cluster_handlers[0] + self._cluster_handler.panel_code = async_get_zha_config_value( + cfg_entry, ZHA_ALARM_OPTIONS, CONF_ALARM_MASTER_CODE, "1234" + ) + self._cluster_handler.code_required_arm_actions = async_get_zha_config_value( + cfg_entry, ZHA_ALARM_OPTIONS, CONF_ALARM_ARM_REQUIRES_CODE, False + ) + self._cluster_handler.max_invalid_tries = async_get_zha_config_value( + cfg_entry, ZHA_ALARM_OPTIONS, CONF_ALARM_FAILED_TRIES, 3 + ) + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._cluster_handler, SIGNAL_ARMED_STATE_CHANGED, self.async_set_armed_mode + ) + self.async_accept_signal( + self._cluster_handler, SIGNAL_ALARM_TRIGGERED, self.async_alarm_trigger + ) + + @callback + def async_set_armed_mode(self) -> None: + """Set the entity state.""" + self.async_write_ha_state() + @property def code_arm_required(self) -> bool: """Whether the code is required for arm actions.""" - return self.entity_data.entity.code_arm_required + return self._cluster_handler.code_required_arm_actions - @convert_zha_error_to_ha_error async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - await self.entity_data.entity.async_alarm_disarm(code) + self._cluster_handler.arm(IasAce.ArmMode.Disarm, code, 0) self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - await self.entity_data.entity.async_alarm_arm_home(code) + self._cluster_handler.arm(IasAce.ArmMode.Arm_Day_Home_Only, code, 0) self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - await self.entity_data.entity.async_alarm_arm_away(code) + self._cluster_handler.arm(IasAce.ArmMode.Arm_All_Zones, code, 0) self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" - await self.entity_data.entity.async_alarm_arm_night(code) + self._cluster_handler.arm(IasAce.ArmMode.Arm_Night_Sleep_Only, code, 0) self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_alarm_trigger(self, code: str | None = None) -> None: """Send alarm trigger command.""" - await self.entity_data.entity.async_alarm_trigger(code) self.async_write_ha_state() @property - def alarm_state(self) -> AlarmControlPanelState | None: + def state(self) -> str | None: """Return the state of the entity.""" - return ZHA_STATE_TO_ALARM_STATE_MAP.get(self.entity_data.entity.state["state"]) + return IAS_ACE_STATE_MAP.get(self._cluster_handler.armed_state) diff --git a/homeassistant/components/zha/api.py b/homeassistant/components/zha/api.py index 60960a3e9fc..db0658eb632 100644 --- a/homeassistant/components/zha/api.py +++ b/homeassistant/components/zha/api.py @@ -4,14 +4,13 @@ from __future__ import annotations from typing import TYPE_CHECKING, Literal -from zha.application.const import RadioType from zigpy.backups import NetworkBackup from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH from zigpy.types import Channels from zigpy.util import pick_optimal_channel -from .const import CONF_RADIO_TYPE, DOMAIN -from .helpers import get_zha_data, get_zha_gateway +from .core.const import CONF_RADIO_TYPE, DOMAIN, RadioType +from .core.helpers import get_zha_gateway from .radio_manager import ZhaRadioManager if TYPE_CHECKING: @@ -23,12 +22,14 @@ def _get_config_entry(hass: HomeAssistant) -> ConfigEntry: """Find the singleton ZHA config entry, if one exists.""" # If ZHA is already running, use its config entry - zha_data = get_zha_data(hass) + try: + zha_gateway = get_zha_gateway(hass) + except ValueError: + pass + else: + return zha_gateway.config_entry - if zha_data.config_entry is not None: - return zha_data.config_entry - - # Otherwise, find an inactive one + # Otherwise, find one entries = hass.config_entries.async_entries(DOMAIN) if len(entries) != 1: diff --git a/homeassistant/components/zha/backup.py b/homeassistant/components/zha/backup.py index a3d9090eaba..e31ae09eeb6 100644 --- a/homeassistant/components/zha/backup.py +++ b/homeassistant/components/zha/backup.py @@ -4,7 +4,7 @@ import logging from homeassistant.core import HomeAssistant -from .helpers import get_zha_gateway +from .core.helpers import get_zha_gateway _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/zha/binary_sensor.py b/homeassistant/components/zha/binary_sensor.py index f45ebf0c5a5..bdd2fd03ca0 100644 --- a/homeassistant/components/zha/binary_sensor.py +++ b/homeassistant/components/zha/binary_sensor.py @@ -3,24 +3,58 @@ from __future__ import annotations import functools +import logging + +from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT +from zigpy.quirks.v2 import BinarySensorMetadata +import zigpy.types as t +from zigpy.zcl.clusters.general import OnOff +from zigpy.zcl.clusters.security import IasZone from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import STATE_ON, EntityCategory, Platform +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.const import ( + CLUSTER_HANDLER_ACCELEROMETER, + CLUSTER_HANDLER_BINARY_INPUT, + CLUSTER_HANDLER_HUE_OCCUPANCY, + CLUSTER_HANDLER_OCCUPANCY, + CLUSTER_HANDLER_ON_OFF, + CLUSTER_HANDLER_THERMOSTAT, + CLUSTER_HANDLER_ZONE, + ENTITY_METADATA, SIGNAL_ADD_ENTITIES, - EntityData, - async_add_entities as zha_async_add_entities, - get_zha_data, + SIGNAL_ATTR_UPDATED, ) +from .core.helpers import get_zha_data, validate_device_class +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity + +# Zigbee Cluster Library Zone Type to Home Assistant device class +IAS_ZONE_CLASS_MAPPING = { + IasZone.ZoneType.Motion_Sensor: BinarySensorDeviceClass.MOTION, + IasZone.ZoneType.Contact_Switch: BinarySensorDeviceClass.OPENING, + IasZone.ZoneType.Fire_Sensor: BinarySensorDeviceClass.SMOKE, + IasZone.ZoneType.Water_Sensor: BinarySensorDeviceClass.MOISTURE, + IasZone.ZoneType.Carbon_Monoxide_Sensor: BinarySensorDeviceClass.GAS, + IasZone.ZoneType.Vibration_Movement_Sensor: BinarySensorDeviceClass.VIBRATION, +} + +STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.BINARY_SENSOR) +MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.BINARY_SENSOR) +CONFIG_DIAGNOSTIC_MATCH = functools.partial( + ZHA_ENTITIES.config_diagnostic_match, Platform.BINARY_SENSOR +) + +_LOGGER = logging.getLogger(__name__) async def async_setup_entry( @@ -36,24 +70,312 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, BinarySensor, entities_to_create + discovery.async_add_entities, async_add_entities, entities_to_create ), ) config_entry.async_on_unload(unsub) -class BinarySensor(ZHAEntity, BinarySensorEntity): +class BinarySensor(ZhaEntity, BinarySensorEntity): """ZHA BinarySensor.""" - def __init__(self, entity_data: EntityData) -> None: + _attribute_name: str + + def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs) -> None: """Initialize the ZHA binary sensor.""" - super().__init__(entity_data) - if self.entity_data.entity.info_object.device_class is not None: - self._attr_device_class = BinarySensorDeviceClass( - self.entity_data.entity.info_object.device_class + self._cluster_handler = cluster_handlers[0] + if ENTITY_METADATA in kwargs: + self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + + def _init_from_quirks_metadata(self, entity_metadata: BinarySensorMetadata) -> None: + """Init this entity from the quirks metadata.""" + super()._init_from_quirks_metadata(entity_metadata) + self._attribute_name = entity_metadata.attribute_name + if entity_metadata.device_class is not None: + self._attr_device_class = validate_device_class( + BinarySensorDeviceClass, + entity_metadata.device_class, + Platform.BINARY_SENSOR.value, + _LOGGER, ) + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state + ) + @property def is_on(self) -> bool: """Return True if the switch is on based on the state machine.""" - return self.entity_data.entity.is_on + raw_state = self._cluster_handler.cluster.get(self._attribute_name) + if raw_state is None: + return False + return self.parse(raw_state) + + @callback + def async_set_state(self, attr_id, attr_name, value): + """Set the state.""" + self.async_write_ha_state() + + @staticmethod + def parse(value: bool | int) -> bool: + """Parse the raw attribute into a bool state.""" + return bool(value) + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ACCELEROMETER) +class Accelerometer(BinarySensor): + """ZHA BinarySensor.""" + + _attribute_name = "acceleration" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.MOVING + _attr_translation_key: str = "accelerometer" + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY) +class Occupancy(BinarySensor): + """ZHA BinarySensor.""" + + _attribute_name = "occupancy" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OCCUPANCY + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY) +class HueOccupancy(Occupancy): + """ZHA Hue occupancy.""" + + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OCCUPANCY + + +@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ON_OFF) +class Opening(BinarySensor): + """ZHA OnOff BinarySensor.""" + + _attribute_name = "on_off" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OPENING + + # Client/out cluster attributes aren't stored in the zigpy database, but are properly stored in the runtime cache. + # We need to manually restore the last state from the sensor state to the runtime cache for now. + @callback + def async_restore_last_state(self, last_state): + """Restore previous state to zigpy cache.""" + self._cluster_handler.cluster.update_attribute( + OnOff.attributes_by_name[self._attribute_name].id, + t.Bool.true if last_state.state == STATE_ON else t.Bool.false, + ) + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_BINARY_INPUT) +class BinaryInput(BinarySensor): + """ZHA BinarySensor.""" + + _attribute_name = "present_value" + _attr_translation_key: str = "binary_input" + + +@STRICT_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, + manufacturers="IKEA of Sweden", + models=lambda model: isinstance(model, str) + and model is not None + and model.find("motion") != -1, +) +@STRICT_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, + manufacturers="Philips", + models={"SML001", "SML002"}, +) +class Motion(Opening): + """ZHA OnOff BinarySensor with motion device class.""" + + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.MOTION + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ZONE) +class IASZone(BinarySensor): + """ZHA IAS BinarySensor.""" + + _attribute_name = "zone_status" + + @property + def translation_key(self) -> str | None: + """Return the name of the sensor.""" + zone_type = self._cluster_handler.cluster.get("zone_type") + if zone_type in IAS_ZONE_CLASS_MAPPING: + return None + return "ias_zone" + + @property + def device_class(self) -> BinarySensorDeviceClass | None: + """Return device class from component DEVICE_CLASSES.""" + zone_type = self._cluster_handler.cluster.get("zone_type") + return IAS_ZONE_CLASS_MAPPING.get(zone_type) + + @staticmethod + def parse(value: bool | int) -> bool: + """Parse the raw attribute into a bool state.""" + return BinarySensor.parse(value & 3) # use only bit 0 and 1 for alarm state + + +@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ZONE, models={"WL4200", "WL4200S"}) +class SinopeLeakStatus(BinarySensor): + """Sinope water leak sensor.""" + + _attribute_name = "leak_status" + _attr_device_class = BinarySensorDeviceClass.MOISTURE + + +@MULTI_MATCH( + cluster_handler_names="tuya_manufacturer", + manufacturers={ + "_TZE200_htnnfasr", + }, +) +class FrostLock(BinarySensor): + """ZHA BinarySensor.""" + + _attribute_name = "frost_lock" + _unique_id_suffix = "frost_lock" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.LOCK + _attr_translation_key: str = "frost_lock" + + +@MULTI_MATCH(cluster_handler_names="ikea_airpurifier") +class ReplaceFilter(BinarySensor): + """ZHA BinarySensor.""" + + _attribute_name = "replace_filter" + _unique_id_suffix = "replace_filter" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PROBLEM + _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC + _attr_translation_key: str = "replace_filter" + + +@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) +class AqaraPetFeederErrorDetected(BinarySensor): + """ZHA aqara pet feeder error detected binary sensor.""" + + _attribute_name = "error_detected" + _unique_id_suffix = "error_detected" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PROBLEM + + +@MULTI_MATCH( + cluster_handler_names="opple_cluster", + models={"lumi.plug.mmeu01", "lumi.plug.maeu01"}, +) +class XiaomiPlugConsumerConnected(BinarySensor): + """ZHA Xiaomi plug consumer connected binary sensor.""" + + _attribute_name = "consumer_connected" + _unique_id_suffix = "consumer_connected" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PLUG + _attr_translation_key: str = "consumer_connected" + + +@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"}) +class AqaraThermostatWindowOpen(BinarySensor): + """ZHA Aqara thermostat window open binary sensor.""" + + _attribute_name = "window_open" + _unique_id_suffix = "window_open" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.WINDOW + + +@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"}) +class AqaraThermostatValveAlarm(BinarySensor): + """ZHA Aqara thermostat valve alarm binary sensor.""" + + _attribute_name = "valve_alarm" + _unique_id_suffix = "valve_alarm" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PROBLEM + _attr_translation_key: str = "valve_alarm" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} +) +class AqaraThermostatCalibrated(BinarySensor): + """ZHA Aqara thermostat calibrated binary sensor.""" + + _attribute_name = "calibrated" + _unique_id_suffix = "calibrated" + _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC + _attr_translation_key: str = "calibrated" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} +) +class AqaraThermostatExternalSensor(BinarySensor): + """ZHA Aqara thermostat external sensor binary sensor.""" + + _attribute_name = "sensor" + _unique_id_suffix = "sensor" + _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC + _attr_translation_key: str = "external_sensor" + + +@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"}) +class AqaraLinkageAlarmState(BinarySensor): + """ZHA Aqara linkage alarm state binary sensor.""" + + _attribute_name = "linkage_alarm_state" + _unique_id_suffix = "linkage_alarm_state" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.SMOKE + _attr_translation_key: str = "linkage_alarm_state" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.curtain.agl001"} +) +class AqaraE1CurtainMotorOpenedByHandBinarySensor(BinarySensor): + """Opened by hand binary sensor.""" + + _unique_id_suffix = "hand_open" + _attribute_name = "hand_open" + _attr_translation_key = "hand_open" + _attr_entity_category = EntityCategory.DIAGNOSTIC + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossMountingModeActive(BinarySensor): + """Danfoss TRV proprietary attribute exposing whether in mounting mode.""" + + _unique_id_suffix = "mounting_mode_active" + _attribute_name = "mounting_mode_active" + _attr_translation_key: str = "mounting_mode_active" + _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OPENING + _attr_entity_category = EntityCategory.DIAGNOSTIC + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossHeatRequired(BinarySensor): + """Danfoss TRV proprietary attribute exposing whether heat is required.""" + + _unique_id_suffix = "heat_required" + _attribute_name = "heat_required" + _attr_translation_key: str = "heat_required" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossPreheatStatus(BinarySensor): + """Danfoss TRV proprietary attribute exposing whether in pre-heating mode.""" + + _unique_id_suffix = "preheat_status" + _attribute_name = "preheat_status" + _attr_translation_key: str = "preheat_status" + _attr_entity_registry_enabled_default = False + _attr_entity_category = EntityCategory.DIAGNOSTIC diff --git a/homeassistant/components/zha/button.py b/homeassistant/components/zha/button.py index ecd5cd51f61..33102062443 100644 --- a/homeassistant/components/zha/button.py +++ b/homeassistant/components/zha/button.py @@ -4,22 +4,33 @@ from __future__ import annotations import functools import logging +from typing import TYPE_CHECKING, Any, Self + +from zigpy.quirks.v2 import WriteAttributeButtonMetadata, ZCLCommandButtonMetadata from homeassistant.components.button import ButtonDeviceClass, ButtonEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import EntityCategory, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import ZHAEntity -from .helpers import ( - SIGNAL_ADD_ENTITIES, - EntityData, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - get_zha_data, +from .core import discovery +from .core.const import CLUSTER_HANDLER_IDENTIFY, ENTITY_METADATA, SIGNAL_ADD_ENTITIES +from .core.helpers import get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity + +if TYPE_CHECKING: + from .core.cluster_handlers import ClusterHandler + from .core.device import ZHADevice + + +MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.BUTTON) +CONFIG_DIAGNOSTIC_MATCH = functools.partial( + ZHA_ENTITIES.config_diagnostic_match, Platform.BUTTON ) +DEFAULT_DURATION = 5 # seconds _LOGGER = logging.getLogger(__name__) @@ -37,24 +48,172 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, ZHAButton, entities_to_create + discovery.async_add_entities, + async_add_entities, + entities_to_create, ), ) config_entry.async_on_unload(unsub) -class ZHAButton(ZHAEntity, ButtonEntity): +class ZHAButton(ZhaEntity, ButtonEntity): """Defines a ZHA button.""" - def __init__(self, entity_data: EntityData) -> None: - """Initialize the ZHA binary sensor.""" - super().__init__(entity_data) - if self.entity_data.entity.info_object.device_class is not None: - self._attr_device_class = ButtonDeviceClass( - self.entity_data.entity.info_object.device_class - ) + _command_name: str + _args: list[Any] + _kwargs: dict[str, Any] + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this button.""" + self._cluster_handler: ClusterHandler = cluster_handlers[0] + if ENTITY_METADATA in kwargs: + self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + + def _init_from_quirks_metadata( + self, entity_metadata: ZCLCommandButtonMetadata + ) -> None: + """Init this entity from the quirks metadata.""" + super()._init_from_quirks_metadata(entity_metadata) + self._command_name = entity_metadata.command_name + self._args = entity_metadata.args + self._kwargs = entity_metadata.kwargs + + def get_args(self) -> list[Any]: + """Return the arguments to use in the command.""" + return list(self._args) if self._args else [] + + def get_kwargs(self) -> dict[str, Any]: + """Return the keyword arguments to use in the command.""" + return self._kwargs - @convert_zha_error_to_ha_error async def async_press(self) -> None: """Send out a update command.""" - await self.entity_data.entity.async_press() + command = getattr(self._cluster_handler, self._command_name) + arguments = self.get_args() or [] + kwargs = self.get_kwargs() or {} + await command(*arguments, **kwargs) + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_IDENTIFY) +class ZHAIdentifyButton(ZHAButton): + """Defines a ZHA identify button.""" + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + Return entity if it is a supported configuration, otherwise return None + """ + if ZHA_ENTITIES.prevent_entity_creation( + Platform.BUTTON, zha_device.ieee, CLUSTER_HANDLER_IDENTIFY + ): + return None + return cls(unique_id, zha_device, cluster_handlers, **kwargs) + + _attr_device_class = ButtonDeviceClass.IDENTIFY + _attr_entity_category = EntityCategory.DIAGNOSTIC + _command_name = "identify" + _kwargs = {} + _args = [DEFAULT_DURATION] + + +class ZHAAttributeButton(ZhaEntity, ButtonEntity): + """Defines a ZHA button, which writes a value to an attribute.""" + + _attribute_name: str + _attribute_value: Any = None + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this button.""" + self._cluster_handler: ClusterHandler = cluster_handlers[0] + if ENTITY_METADATA in kwargs: + self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + + def _init_from_quirks_metadata( + self, entity_metadata: WriteAttributeButtonMetadata + ) -> None: + """Init this entity from the quirks metadata.""" + super()._init_from_quirks_metadata(entity_metadata) + self._attribute_name = entity_metadata.attribute_name + self._attribute_value = entity_metadata.attribute_value + + async def async_press(self) -> None: + """Write attribute with defined value.""" + await self._cluster_handler.write_attributes_safe( + {self._attribute_name: self._attribute_value} + ) + self.async_write_ha_state() + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="tuya_manufacturer", + manufacturers={ + "_TZE200_htnnfasr", + }, +) +class FrostLockResetButton(ZHAAttributeButton): + """Defines a ZHA frost lock reset button.""" + + _unique_id_suffix = "reset_frost_lock" + _attribute_name = "frost_lock_reset" + _attribute_value = 0 + _attr_device_class = ButtonDeviceClass.RESTART + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "reset_frost_lock" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.motion.ac01"} +) +class NoPresenceStatusResetButton(ZHAAttributeButton): + """Defines a ZHA no presence status reset button.""" + + _unique_id_suffix = "reset_no_presence_status" + _attribute_name = "reset_no_presence_status" + _attribute_value = 1 + _attr_device_class = ButtonDeviceClass.RESTART + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "reset_no_presence_status" + + +@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) +class AqaraPetFeederFeedButton(ZHAAttributeButton): + """Defines a feed button for the aqara c1 pet feeder.""" + + _unique_id_suffix = "feeding" + _attribute_name = "feeding" + _attribute_value = 1 + _attr_translation_key = "feed" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} +) +class AqaraSelfTestButton(ZHAAttributeButton): + """Defines a ZHA self-test button for Aqara smoke sensors.""" + + _unique_id_suffix = "self_test" + _attribute_name = "self_test" + _attribute_value = 1 + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "self_test" diff --git a/homeassistant/components/zha/climate.py b/homeassistant/components/zha/climate.py index fcf5afb5ac5..61c5f28ca8f 100644 --- a/homeassistant/components/zha/climate.py +++ b/homeassistant/components/zha/climate.py @@ -6,62 +6,109 @@ at https://home-assistant.io/components/zha.climate/ from __future__ import annotations -from collections.abc import Mapping +from datetime import datetime, timedelta import functools +from random import randint from typing import Any -from zha.application.platforms.climate.const import ( - ClimateEntityFeature as ZHAClimateEntityFeature, - HVACAction as ZHAHVACAction, - HVACMode as ZHAHVACMode, -) +from zigpy.zcl.clusters.hvac import Fan as F, Thermostat as T from homeassistant.components.climate import ( ATTR_HVAC_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, - ATTR_TEMPERATURE, + FAN_AUTO, + FAN_ON, + PRESET_AWAY, + PRESET_BOOST, + PRESET_COMFORT, + PRESET_ECO, + PRESET_NONE, ClimateEntity, ClimateEntityFeature, HVACAction, HVACMode, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import PRECISION_TENTHS, Platform, UnitOfTemperature +from homeassistant.const import ( + ATTR_TEMPERATURE, + PRECISION_TENTHS, + Platform, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.event import async_track_time_interval +import homeassistant.util.dt as dt_util -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.const import ( + CLUSTER_HANDLER_FAN, + CLUSTER_HANDLER_THERMOSTAT, + PRESET_COMPLEX, + PRESET_SCHEDULE, + PRESET_TEMP_MANUAL, SIGNAL_ADD_ENTITIES, - EntityData, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - exclude_none_values, - get_zha_data, + SIGNAL_ATTR_UPDATED, ) +from .core.helpers import get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity -ZHA_TO_HA_HVAC_MODE = { - ZHAHVACMode.OFF: HVACMode.OFF, - ZHAHVACMode.AUTO: HVACMode.AUTO, - ZHAHVACMode.HEAT: HVACMode.HEAT, - ZHAHVACMode.COOL: HVACMode.COOL, - ZHAHVACMode.HEAT_COOL: HVACMode.HEAT_COOL, - ZHAHVACMode.DRY: HVACMode.DRY, - ZHAHVACMode.FAN_ONLY: HVACMode.FAN_ONLY, +ATTR_SYS_MODE = "system_mode" +ATTR_RUNNING_MODE = "running_mode" +ATTR_SETPT_CHANGE_SRC = "setpoint_change_source" +ATTR_SETPT_CHANGE_AMT = "setpoint_change_amount" +ATTR_OCCUPANCY = "occupancy" +ATTR_PI_COOLING_DEMAND = "pi_cooling_demand" +ATTR_PI_HEATING_DEMAND = "pi_heating_demand" +ATTR_OCCP_COOL_SETPT = "occupied_cooling_setpoint" +ATTR_OCCP_HEAT_SETPT = "occupied_heating_setpoint" +ATTR_UNOCCP_HEAT_SETPT = "unoccupied_heating_setpoint" +ATTR_UNOCCP_COOL_SETPT = "unoccupied_cooling_setpoint" + + +STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.CLIMATE) +MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.CLIMATE) +RUNNING_MODE = {0x00: HVACMode.OFF, 0x03: HVACMode.COOL, 0x04: HVACMode.HEAT} + +SEQ_OF_OPERATION = { + 0x00: [HVACMode.OFF, HVACMode.COOL], # cooling only + 0x01: [HVACMode.OFF, HVACMode.COOL], # cooling with reheat + 0x02: [HVACMode.OFF, HVACMode.HEAT], # heating only + 0x03: [HVACMode.OFF, HVACMode.HEAT], # heating with reheat + # cooling and heating 4-pipes + 0x04: [HVACMode.OFF, HVACMode.HEAT_COOL, HVACMode.COOL, HVACMode.HEAT], + # cooling and heating 4-pipes + 0x05: [HVACMode.OFF, HVACMode.HEAT_COOL, HVACMode.COOL, HVACMode.HEAT], + 0x06: [HVACMode.COOL, HVACMode.HEAT, HVACMode.OFF], # centralite specific + 0x07: [HVACMode.HEAT_COOL, HVACMode.OFF], # centralite specific } -ZHA_TO_HA_HVAC_ACTION = { - ZHAHVACAction.OFF: HVACAction.OFF, - ZHAHVACAction.HEATING: HVACAction.HEATING, - ZHAHVACAction.COOLING: HVACAction.COOLING, - ZHAHVACAction.DRYING: HVACAction.DRYING, - ZHAHVACAction.IDLE: HVACAction.IDLE, - ZHAHVACAction.FAN: HVACAction.FAN, - ZHAHVACAction.PREHEATING: HVACAction.PREHEATING, +HVAC_MODE_2_SYSTEM = { + HVACMode.OFF: T.SystemMode.Off, + HVACMode.HEAT_COOL: T.SystemMode.Auto, + HVACMode.COOL: T.SystemMode.Cool, + HVACMode.HEAT: T.SystemMode.Heat, + HVACMode.FAN_ONLY: T.SystemMode.Fan_only, + HVACMode.DRY: T.SystemMode.Dry, } +SYSTEM_MODE_2_HVAC = { + T.SystemMode.Off: HVACMode.OFF, + T.SystemMode.Auto: HVACMode.HEAT_COOL, + T.SystemMode.Cool: HVACMode.COOL, + T.SystemMode.Heat: HVACMode.HEAT, + T.SystemMode.Emergency_Heating: HVACMode.HEAT, + T.SystemMode.Pre_cooling: HVACMode.COOL, # this is 'precooling'. is it the same? + T.SystemMode.Fan_only: HVACMode.FAN_ONLY, + T.SystemMode.Dry: HVACMode.DRY, + T.SystemMode.Sleep: HVACMode.OFF, +} + +ZCL_TEMP = 100 + async def async_setup_entry( hass: HomeAssistant, @@ -71,166 +118,708 @@ async def async_setup_entry( """Set up the Zigbee Home Automation sensor from config entry.""" zha_data = get_zha_data(hass) entities_to_create = zha_data.platforms[Platform.CLIMATE] - unsub = async_dispatcher_connect( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, Thermostat, entities_to_create + discovery.async_add_entities, async_add_entities, entities_to_create ), ) config_entry.async_on_unload(unsub) -class Thermostat(ZHAEntity, ClimateEntity): +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + aux_cluster_handlers=CLUSTER_HANDLER_FAN, + stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, +) +class Thermostat(ZhaEntity, ClimateEntity): """Representation of a ZHA Thermostat device.""" + DEFAULT_MAX_TEMP = 35 + DEFAULT_MIN_TEMP = 7 + _attr_precision = PRECISION_TENTHS _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key: str = "thermostat" _enable_turn_on_off_backwards_compatibility = False - def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: - """Initialize the ZHA thermostat entity.""" - super().__init__(entity_data, **kwargs) - self._attr_hvac_modes = [ - ZHA_TO_HA_HVAC_MODE[mode] for mode in self.entity_data.entity.hvac_modes - ] - self._attr_hvac_mode = ZHA_TO_HA_HVAC_MODE.get( - self.entity_data.entity.hvac_mode + def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): + """Initialize ZHA Thermostat instance.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._thrm = self.cluster_handlers.get(CLUSTER_HANDLER_THERMOSTAT) + self._preset = PRESET_NONE + self._presets = [] + self._supported_flags = ( + ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TURN_ON ) - self._attr_hvac_action = ZHA_TO_HA_HVAC_ACTION.get( - self.entity_data.entity.hvac_action - ) - - features: ClimateEntityFeature = ClimateEntityFeature(0) - zha_features: ZHAClimateEntityFeature = ( - self.entity_data.entity.supported_features - ) - - if ZHAClimateEntityFeature.TARGET_TEMPERATURE in zha_features: - features |= ClimateEntityFeature.TARGET_TEMPERATURE - if ZHAClimateEntityFeature.TARGET_TEMPERATURE_RANGE in zha_features: - features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - if ZHAClimateEntityFeature.TARGET_HUMIDITY in zha_features: - features |= ClimateEntityFeature.TARGET_HUMIDITY - if ZHAClimateEntityFeature.PRESET_MODE in zha_features: - features |= ClimateEntityFeature.PRESET_MODE - if ZHAClimateEntityFeature.FAN_MODE in zha_features: - features |= ClimateEntityFeature.FAN_MODE - if ZHAClimateEntityFeature.SWING_MODE in zha_features: - features |= ClimateEntityFeature.SWING_MODE - if ZHAClimateEntityFeature.TURN_OFF in zha_features: - features |= ClimateEntityFeature.TURN_OFF - if ZHAClimateEntityFeature.TURN_ON in zha_features: - features |= ClimateEntityFeature.TURN_ON - - self._attr_supported_features = features + self._fan = self.cluster_handlers.get(CLUSTER_HANDLER_FAN) @property - def extra_state_attributes(self) -> Mapping[str, Any] | None: - """Return entity specific state attributes.""" - state = self.entity_data.entity.state - - return exclude_none_values( - { - "occupancy": state.get("occupancy"), - "occupied_cooling_setpoint": state.get("occupied_cooling_setpoint"), - "occupied_heating_setpoint": state.get("occupied_heating_setpoint"), - "pi_cooling_demand": state.get("pi_cooling_demand"), - "pi_heating_demand": state.get("pi_heating_demand"), - "system_mode": state.get("system_mode"), - "unoccupied_cooling_setpoint": state.get("unoccupied_cooling_setpoint"), - "unoccupied_heating_setpoint": state.get("unoccupied_heating_setpoint"), - } - ) - - @property - def current_temperature(self) -> float | None: + def current_temperature(self): """Return the current temperature.""" - return self.entity_data.entity.current_temperature + if self._thrm.local_temperature is None: + return None + return self._thrm.local_temperature / ZCL_TEMP + + @property + def extra_state_attributes(self): + """Return device specific state attributes.""" + data = {} + if self.hvac_mode: + mode = SYSTEM_MODE_2_HVAC.get(self._thrm.system_mode, "unknown") + data[ATTR_SYS_MODE] = f"[{self._thrm.system_mode}]/{mode}" + if self._thrm.occupancy is not None: + data[ATTR_OCCUPANCY] = self._thrm.occupancy + if self._thrm.occupied_cooling_setpoint is not None: + data[ATTR_OCCP_COOL_SETPT] = self._thrm.occupied_cooling_setpoint + if self._thrm.occupied_heating_setpoint is not None: + data[ATTR_OCCP_HEAT_SETPT] = self._thrm.occupied_heating_setpoint + if self._thrm.pi_heating_demand is not None: + data[ATTR_PI_HEATING_DEMAND] = self._thrm.pi_heating_demand + if self._thrm.pi_cooling_demand is not None: + data[ATTR_PI_COOLING_DEMAND] = self._thrm.pi_cooling_demand + + unoccupied_cooling_setpoint = self._thrm.unoccupied_cooling_setpoint + if unoccupied_cooling_setpoint is not None: + data[ATTR_UNOCCP_COOL_SETPT] = unoccupied_cooling_setpoint + + unoccupied_heating_setpoint = self._thrm.unoccupied_heating_setpoint + if unoccupied_heating_setpoint is not None: + data[ATTR_UNOCCP_HEAT_SETPT] = unoccupied_heating_setpoint + return data @property def fan_mode(self) -> str | None: """Return current FAN mode.""" - return self.entity_data.entity.fan_mode + if self._thrm.running_state is None: + return FAN_AUTO + + if self._thrm.running_state & ( + T.RunningState.Fan_State_On + | T.RunningState.Fan_2nd_Stage_On + | T.RunningState.Fan_3rd_Stage_On + ): + return FAN_ON + return FAN_AUTO @property def fan_modes(self) -> list[str] | None: """Return supported FAN modes.""" - return self.entity_data.entity.fan_modes + if not self._fan: + return None + return [FAN_AUTO, FAN_ON] + + @property + def hvac_action(self) -> HVACAction | None: + """Return the current HVAC action.""" + if ( + self._thrm.pi_heating_demand is None + and self._thrm.pi_cooling_demand is None + ): + return self._rm_rs_action + return self._pi_demand_action + + @property + def _rm_rs_action(self) -> HVACAction | None: + """Return the current HVAC action based on running mode and running state.""" + + if (running_state := self._thrm.running_state) is None: + return None + if running_state & ( + T.RunningState.Heat_State_On | T.RunningState.Heat_2nd_Stage_On + ): + return HVACAction.HEATING + if running_state & ( + T.RunningState.Cool_State_On | T.RunningState.Cool_2nd_Stage_On + ): + return HVACAction.COOLING + if running_state & ( + T.RunningState.Fan_State_On + | T.RunningState.Fan_2nd_Stage_On + | T.RunningState.Fan_3rd_Stage_On + ): + return HVACAction.FAN + if running_state & T.RunningState.Idle: + return HVACAction.IDLE + if self.hvac_mode != HVACMode.OFF: + return HVACAction.IDLE + return HVACAction.OFF + + @property + def _pi_demand_action(self) -> HVACAction | None: + """Return the current HVAC action based on pi_demands.""" + + heating_demand = self._thrm.pi_heating_demand + if heating_demand is not None and heating_demand > 0: + return HVACAction.HEATING + cooling_demand = self._thrm.pi_cooling_demand + if cooling_demand is not None and cooling_demand > 0: + return HVACAction.COOLING + + if self.hvac_mode != HVACMode.OFF: + return HVACAction.IDLE + return HVACAction.OFF + + @property + def hvac_mode(self) -> HVACMode | None: + """Return HVAC operation mode.""" + return SYSTEM_MODE_2_HVAC.get(self._thrm.system_mode) + + @property + def hvac_modes(self) -> list[HVACMode]: + """Return the list of available HVAC operation modes.""" + return SEQ_OF_OPERATION.get(self._thrm.ctrl_sequence_of_oper, [HVACMode.OFF]) @property def preset_mode(self) -> str: """Return current preset mode.""" - return self.entity_data.entity.preset_mode + return self._preset @property def preset_modes(self) -> list[str] | None: """Return supported preset modes.""" - return self.entity_data.entity.preset_modes + return self._presets @property - def target_temperature(self) -> float | None: + def supported_features(self) -> ClimateEntityFeature: + """Return the list of supported features.""" + features = self._supported_flags + if HVACMode.HEAT_COOL in self.hvac_modes: + features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + if self._fan is not None: + self._supported_flags |= ClimateEntityFeature.FAN_MODE + return features + + @property + def target_temperature(self): """Return the temperature we try to reach.""" - return self.entity_data.entity.target_temperature + temp = None + if self.hvac_mode == HVACMode.COOL: + if self.preset_mode == PRESET_AWAY: + temp = self._thrm.unoccupied_cooling_setpoint + else: + temp = self._thrm.occupied_cooling_setpoint + elif self.hvac_mode == HVACMode.HEAT: + if self.preset_mode == PRESET_AWAY: + temp = self._thrm.unoccupied_heating_setpoint + else: + temp = self._thrm.occupied_heating_setpoint + if temp is None: + return temp + return round(temp / ZCL_TEMP, 1) @property - def target_temperature_high(self) -> float | None: + def target_temperature_high(self): """Return the upper bound temperature we try to reach.""" - return self.entity_data.entity.target_temperature_high + if self.hvac_mode != HVACMode.HEAT_COOL: + return None + if self.preset_mode == PRESET_AWAY: + temp = self._thrm.unoccupied_cooling_setpoint + else: + temp = self._thrm.occupied_cooling_setpoint + + if temp is None: + return temp + + return round(temp / ZCL_TEMP, 1) @property - def target_temperature_low(self) -> float | None: + def target_temperature_low(self): """Return the lower bound temperature we try to reach.""" - return self.entity_data.entity.target_temperature_low + if self.hvac_mode != HVACMode.HEAT_COOL: + return None + if self.preset_mode == PRESET_AWAY: + temp = self._thrm.unoccupied_heating_setpoint + else: + temp = self._thrm.occupied_heating_setpoint + + if temp is None: + return temp + return round(temp / ZCL_TEMP, 1) @property def max_temp(self) -> float: """Return the maximum temperature.""" - return self.entity_data.entity.max_temp + temps = [] + if HVACMode.HEAT in self.hvac_modes: + temps.append(self._thrm.max_heat_setpoint_limit) + if HVACMode.COOL in self.hvac_modes: + temps.append(self._thrm.max_cool_setpoint_limit) + + if not temps: + return self.DEFAULT_MAX_TEMP + return round(max(temps) / ZCL_TEMP, 1) @property def min_temp(self) -> float: """Return the minimum temperature.""" - return self.entity_data.entity.min_temp + temps = [] + if HVACMode.HEAT in self.hvac_modes: + temps.append(self._thrm.min_heat_setpoint_limit) + if HVACMode.COOL in self.hvac_modes: + temps.append(self._thrm.min_cool_setpoint_limit) - @callback - def _handle_entity_events(self, event: Any) -> None: - """Entity state changed.""" - self._attr_hvac_mode = self._attr_hvac_mode = ZHA_TO_HA_HVAC_MODE.get( - self.entity_data.entity.hvac_mode - ) - self._attr_hvac_action = ZHA_TO_HA_HVAC_ACTION.get( - self.entity_data.entity.hvac_action - ) - super()._handle_entity_events(event) + if not temps: + return self.DEFAULT_MIN_TEMP + return round(min(temps) / ZCL_TEMP, 1) + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._thrm, SIGNAL_ATTR_UPDATED, self.async_attribute_updated + ) + + async def async_attribute_updated(self, attr_id, attr_name, value): + """Handle attribute update from device.""" + if ( + attr_name in (ATTR_OCCP_COOL_SETPT, ATTR_OCCP_HEAT_SETPT) + and self.preset_mode == PRESET_AWAY + ): + # occupancy attribute is an unreportable attribute, but if we get + # an attribute update for an "occupied" setpoint, there's a chance + # occupancy has changed + if await self._thrm.get_occupancy() is True: + self._preset = PRESET_NONE + + self.debug("Attribute '%s' = %s update", attr_name, value) + self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_set_fan_mode(self, fan_mode: str) -> None: """Set fan mode.""" - await self.entity_data.entity.async_set_fan_mode(fan_mode=fan_mode) - self.async_write_ha_state() + if not self.fan_modes or fan_mode not in self.fan_modes: + self.warning("Unsupported '%s' fan mode", fan_mode) + return + + if fan_mode == FAN_ON: + mode = F.FanMode.On + else: + mode = F.FanMode.Auto + + await self._fan.async_set_speed(mode) - @convert_zha_error_to_ha_error async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target operation mode.""" - await self.entity_data.entity.async_set_hvac_mode(hvac_mode=hvac_mode) - self.async_write_ha_state() + if hvac_mode not in self.hvac_modes: + self.warning( + "can't set '%s' mode. Supported modes are: %s", + hvac_mode, + self.hvac_modes, + ) + return + + if await self._thrm.async_set_operation_mode(HVAC_MODE_2_SYSTEM[hvac_mode]): + self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" - await self.entity_data.entity.async_set_preset_mode(preset_mode=preset_mode) + if not self.preset_modes or preset_mode not in self.preset_modes: + self.debug("Preset mode '%s' is not supported", preset_mode) + return + + if self.preset_mode not in ( + preset_mode, + PRESET_NONE, + ): + await self.async_preset_handler(self.preset_mode, enable=False) + + if preset_mode != PRESET_NONE: + await self.async_preset_handler(preset_mode, enable=True) + + self._preset = preset_mode self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" - await self.entity_data.entity.async_set_temperature( - target_temp_low=kwargs.get(ATTR_TARGET_TEMP_LOW), - target_temp_high=kwargs.get(ATTR_TARGET_TEMP_HIGH), - temperature=kwargs.get(ATTR_TEMPERATURE), - hvac_mode=kwargs.get(ATTR_HVAC_MODE), - ) + low_temp = kwargs.get(ATTR_TARGET_TEMP_LOW) + high_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH) + temp = kwargs.get(ATTR_TEMPERATURE) + hvac_mode = kwargs.get(ATTR_HVAC_MODE) + + if hvac_mode is not None: + await self.async_set_hvac_mode(hvac_mode) + + is_away = self.preset_mode == PRESET_AWAY + + if self.hvac_mode == HVACMode.HEAT_COOL: + if low_temp is not None: + await self._thrm.async_set_heating_setpoint( + temperature=int(low_temp * ZCL_TEMP), + is_away=is_away, + ) + if high_temp is not None: + await self._thrm.async_set_cooling_setpoint( + temperature=int(high_temp * ZCL_TEMP), + is_away=is_away, + ) + elif temp is not None: + if self.hvac_mode == HVACMode.COOL: + await self._thrm.async_set_cooling_setpoint( + temperature=int(temp * ZCL_TEMP), + is_away=is_away, + ) + elif self.hvac_mode == HVACMode.HEAT: + await self._thrm.async_set_heating_setpoint( + temperature=int(temp * ZCL_TEMP), + is_away=is_away, + ) + else: + self.debug("Not setting temperature for '%s' mode", self.hvac_mode) + return + else: + self.debug("incorrect %s setting for '%s' mode", kwargs, self.hvac_mode) + return + self.async_write_ha_state() + + async def async_preset_handler(self, preset: str, enable: bool = False) -> None: + """Set the preset mode via handler.""" + + handler = getattr(self, f"async_preset_handler_{preset}") + await handler(enable) + + +@MULTI_MATCH( + cluster_handler_names={CLUSTER_HANDLER_THERMOSTAT, "sinope_manufacturer_specific"}, + manufacturers="Sinope Technologies", + stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, +) +class SinopeTechnologiesThermostat(Thermostat): + """Sinope Technologies Thermostat.""" + + manufacturer = 0x119C + update_time_interval = timedelta(minutes=randint(45, 75)) + + def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): + """Initialize ZHA Thermostat instance.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._presets = [PRESET_AWAY, PRESET_NONE] + self._supported_flags |= ClimateEntityFeature.PRESET_MODE + self._manufacturer_ch = self.cluster_handlers["sinope_manufacturer_specific"] + + @property + def _rm_rs_action(self) -> HVACAction: + """Return the current HVAC action based on running mode and running state.""" + + running_mode = self._thrm.running_mode + if running_mode == T.SystemMode.Heat: + return HVACAction.HEATING + if running_mode == T.SystemMode.Cool: + return HVACAction.COOLING + + running_state = self._thrm.running_state + if running_state and running_state & ( + T.RunningState.Fan_State_On + | T.RunningState.Fan_2nd_Stage_On + | T.RunningState.Fan_3rd_Stage_On + ): + return HVACAction.FAN + if self.hvac_mode != HVACMode.OFF and running_mode == T.SystemMode.Off: + return HVACAction.IDLE + return HVACAction.OFF + + @callback + def _async_update_time(self, timestamp=None) -> None: + """Update thermostat's time display.""" + + secs_2k = ( + dt_util.now().replace(tzinfo=None) - datetime(2000, 1, 1, 0, 0, 0, 0) + ).total_seconds() + + self.debug("Updating time: %s", secs_2k) + self._manufacturer_ch.cluster.create_catching_task( + self._manufacturer_ch.write_attributes_safe( + {"secs_since_2k": secs_2k}, manufacturer=self.manufacturer + ) + ) + + async def async_added_to_hass(self) -> None: + """Run when about to be added to Hass.""" + await super().async_added_to_hass() + self.async_on_remove( + async_track_time_interval( + self.hass, self._async_update_time, self.update_time_interval + ) + ) + self._async_update_time() + + async def async_preset_handler_away(self, is_away: bool = False) -> None: + """Set occupancy.""" + mfg_code = self._zha_device.manufacturer_code + await self._thrm.write_attributes_safe( + {"set_occupancy": 0 if is_away else 1}, manufacturer=mfg_code + ) + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + aux_cluster_handlers=CLUSTER_HANDLER_FAN, + manufacturers={"Zen Within", "LUX"}, + stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, +) +class ZenWithinThermostat(Thermostat): + """Zen Within Thermostat implementation.""" + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + aux_cluster_handlers=CLUSTER_HANDLER_FAN, + manufacturers="Centralite", + models={"3157100", "3157100-E"}, + stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, +) +class CentralitePearl(ZenWithinThermostat): + """Centralite Pearl Thermostat implementation.""" + + +@STRICT_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + manufacturers={ + "_TZE200_ckud7u2l", + "_TZE200_ywdxldoj", + "_TZE200_cwnjrr72", + "_TZE200_2atgpdho", + "_TZE200_pvvbommb", + "_TZE200_4eeyebrt", + "_TZE200_cpmgn2cf", + "_TZE200_9sfg7gm0", + "_TZE200_8whxpsiw", + "_TYST11_ckud7u2l", + "_TYST11_ywdxldoj", + "_TYST11_cwnjrr72", + "_TYST11_2atgpdho", + }, +) +class MoesThermostat(Thermostat): + """Moes Thermostat implementation.""" + + def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): + """Initialize ZHA Thermostat instance.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._presets = [ + PRESET_NONE, + PRESET_AWAY, + PRESET_SCHEDULE, + PRESET_COMFORT, + PRESET_ECO, + PRESET_BOOST, + PRESET_COMPLEX, + ] + self._supported_flags |= ClimateEntityFeature.PRESET_MODE + + @property + def hvac_modes(self) -> list[HVACMode]: + """Return only the heat mode, because the device can't be turned off.""" + return [HVACMode.HEAT] + + async def async_attribute_updated(self, attr_id, attr_name, value): + """Handle attribute update from device.""" + if attr_name == "operation_preset": + if value == 0: + self._preset = PRESET_AWAY + if value == 1: + self._preset = PRESET_SCHEDULE + if value == 2: + self._preset = PRESET_NONE + if value == 3: + self._preset = PRESET_COMFORT + if value == 4: + self._preset = PRESET_ECO + if value == 5: + self._preset = PRESET_BOOST + if value == 6: + self._preset = PRESET_COMPLEX + await super().async_attribute_updated(attr_id, attr_name, value) + + async def async_preset_handler(self, preset: str, enable: bool = False) -> None: + """Set the preset mode.""" + mfg_code = self._zha_device.manufacturer_code + if not enable: + return await self._thrm.write_attributes_safe( + {"operation_preset": 2}, manufacturer=mfg_code + ) + if preset == PRESET_AWAY: + return await self._thrm.write_attributes_safe( + {"operation_preset": 0}, manufacturer=mfg_code + ) + if preset == PRESET_SCHEDULE: + return await self._thrm.write_attributes_safe( + {"operation_preset": 1}, manufacturer=mfg_code + ) + if preset == PRESET_COMFORT: + return await self._thrm.write_attributes_safe( + {"operation_preset": 3}, manufacturer=mfg_code + ) + if preset == PRESET_ECO: + return await self._thrm.write_attributes_safe( + {"operation_preset": 4}, manufacturer=mfg_code + ) + if preset == PRESET_BOOST: + return await self._thrm.write_attributes_safe( + {"operation_preset": 5}, manufacturer=mfg_code + ) + if preset == PRESET_COMPLEX: + return await self._thrm.write_attributes_safe( + {"operation_preset": 6}, manufacturer=mfg_code + ) + + +@STRICT_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + manufacturers={ + "_TZE200_b6wax7g0", + }, +) +class BecaThermostat(Thermostat): + """Beca Thermostat implementation.""" + + def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): + """Initialize ZHA Thermostat instance.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._presets = [ + PRESET_NONE, + PRESET_AWAY, + PRESET_SCHEDULE, + PRESET_ECO, + PRESET_BOOST, + PRESET_TEMP_MANUAL, + ] + self._supported_flags |= ClimateEntityFeature.PRESET_MODE + + @property + def hvac_modes(self) -> list[HVACMode]: + """Return only the heat mode, because the device can't be turned off.""" + return [HVACMode.HEAT] + + async def async_attribute_updated(self, attr_id, attr_name, value): + """Handle attribute update from device.""" + if attr_name == "operation_preset": + if value == 0: + self._preset = PRESET_AWAY + if value == 1: + self._preset = PRESET_SCHEDULE + if value == 2: + self._preset = PRESET_NONE + if value == 4: + self._preset = PRESET_ECO + if value == 5: + self._preset = PRESET_BOOST + if value == 7: + self._preset = PRESET_TEMP_MANUAL + await super().async_attribute_updated(attr_id, attr_name, value) + + async def async_preset_handler(self, preset: str, enable: bool = False) -> None: + """Set the preset mode.""" + mfg_code = self._zha_device.manufacturer_code + if not enable: + return await self._thrm.write_attributes_safe( + {"operation_preset": 2}, manufacturer=mfg_code + ) + if preset == PRESET_AWAY: + return await self._thrm.write_attributes_safe( + {"operation_preset": 0}, manufacturer=mfg_code + ) + if preset == PRESET_SCHEDULE: + return await self._thrm.write_attributes_safe( + {"operation_preset": 1}, manufacturer=mfg_code + ) + if preset == PRESET_ECO: + return await self._thrm.write_attributes_safe( + {"operation_preset": 4}, manufacturer=mfg_code + ) + if preset == PRESET_BOOST: + return await self._thrm.write_attributes_safe( + {"operation_preset": 5}, manufacturer=mfg_code + ) + if preset == PRESET_TEMP_MANUAL: + return await self._thrm.write_attributes_safe( + {"operation_preset": 7}, manufacturer=mfg_code + ) + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + manufacturers="Stelpro", + models={"SORB"}, + stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, +) +class StelproFanHeater(Thermostat): + """Stelpro Fan Heater implementation.""" + + @property + def hvac_modes(self) -> list[HVACMode]: + """Return only the heat mode, because the device can't be turned off.""" + return [HVACMode.HEAT] + + +@STRICT_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + manufacturers={ + "_TZE200_7yoranx2", + "_TZE200_e9ba97vf", # TV01-ZG + "_TZE200_hue3yfsn", # TV02-ZG + "_TZE200_husqqvux", # TSL-TRV-TV01ZG + "_TZE200_kds0pmmv", # MOES TRV TV02 + "_TZE200_kly8gjlz", # TV05-ZG + "_TZE200_lnbfnyxd", + "_TZE200_mudxchsu", + }, +) +class ZONNSMARTThermostat(Thermostat): + """ZONNSMART Thermostat implementation. + + Notice that this device uses two holiday presets (2: HolidayMode, + 3: HolidayModeTemp), but only one of them can be set. + """ + + PRESET_HOLIDAY = "holiday" + PRESET_FROST = "frost protect" + + def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): + """Initialize ZHA Thermostat instance.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._presets = [ + PRESET_NONE, + self.PRESET_HOLIDAY, + PRESET_SCHEDULE, + self.PRESET_FROST, + ] + self._supported_flags |= ClimateEntityFeature.PRESET_MODE + + async def async_attribute_updated(self, attr_id, attr_name, value): + """Handle attribute update from device.""" + if attr_name == "operation_preset": + if value == 0: + self._preset = PRESET_SCHEDULE + if value == 1: + self._preset = PRESET_NONE + if value == 2: + self._preset = self.PRESET_HOLIDAY + if value == 3: + self._preset = self.PRESET_HOLIDAY + if value == 4: + self._preset = self.PRESET_FROST + await super().async_attribute_updated(attr_id, attr_name, value) + + async def async_preset_handler(self, preset: str, enable: bool = False) -> None: + """Set the preset mode.""" + mfg_code = self._zha_device.manufacturer_code + if not enable: + return await self._thrm.write_attributes_safe( + {"operation_preset": 1}, manufacturer=mfg_code + ) + if preset == PRESET_SCHEDULE: + return await self._thrm.write_attributes_safe( + {"operation_preset": 0}, manufacturer=mfg_code + ) + if preset == self.PRESET_HOLIDAY: + return await self._thrm.write_attributes_safe( + {"operation_preset": 3}, manufacturer=mfg_code + ) + if preset == self.PRESET_FROST: + return await self._thrm.write_attributes_safe( + {"operation_preset": 4}, manufacturer=mfg_code + ) diff --git a/homeassistant/components/zha/config_flow.py b/homeassistant/components/zha/config_flow.py index f3f7f38772d..9be27f7b37c 100644 --- a/homeassistant/components/zha/config_flow.py +++ b/homeassistant/components/zha/config_flow.py @@ -10,7 +10,6 @@ from typing import Any import serial.tools.list_ports from serial.tools.list_ports_common import ListPortInfo import voluptuous as vol -from zha.application.const import RadioType import zigpy.backups from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH @@ -33,11 +32,16 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.selector import FileSelector, FileSelectorConfig from homeassistant.util import dt as dt_util -from .const import CONF_BAUDRATE, CONF_FLOW_CONTROL, CONF_RADIO_TYPE, DOMAIN +from .core.const import ( + CONF_BAUDRATE, + CONF_FLOW_CONTROL, + CONF_RADIO_TYPE, + DOMAIN, + RadioType, +) from .radio_manager import ( DEVICE_SCHEMA, HARDWARE_DISCOVERY_SCHEMA, @@ -105,26 +109,25 @@ async def list_serial_ports(hass: HomeAssistant) -> list[ListPortInfo]: yellow_radio.description = "Yellow Zigbee module" yellow_radio.manufacturer = "Nabu Casa" - if is_hassio(hass): - # Present the multi-PAN addon as a setup option, if it's available - multipan_manager = ( - await silabs_multiprotocol_addon.get_multiprotocol_addon_manager(hass) + # Present the multi-PAN addon as a setup option, if it's available + multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager( + hass + ) + + try: + addon_info = await multipan_manager.async_get_addon_info() + except (AddonError, KeyError): + addon_info = None + + if addon_info is not None and addon_info.state != AddonState.NOT_INSTALLED: + addon_port = ListPortInfo( + device=silabs_multiprotocol_addon.get_zigbee_socket(), + skip_link_detection=True, ) - try: - addon_info = await multipan_manager.async_get_addon_info() - except (AddonError, KeyError): - addon_info = None - - if addon_info is not None and addon_info.state != AddonState.NOT_INSTALLED: - addon_port = ListPortInfo( - device=silabs_multiprotocol_addon.get_zigbee_socket(), - skip_link_detection=True, - ) - - addon_port.description = "Multiprotocol add-on" - addon_port.manufacturer = "Nabu Casa" - ports.append(addon_port) + addon_port.description = "Multiprotocol add-on" + addon_port.manufacturer = "Nabu Casa" + ports.append(addon_port) return ports @@ -133,7 +136,6 @@ class BaseZhaFlow(ConfigEntryBaseFlow): """Mixin for common ZHA flow steps and forms.""" _hass: HomeAssistant - _title: str def __init__(self) -> None: """Initialize flow instance.""" @@ -141,20 +143,22 @@ class BaseZhaFlow(ConfigEntryBaseFlow): self._hass = None # type: ignore[assignment] self._radio_mgr = ZhaRadioManager() + self._title: str | None = None @property - def hass(self) -> HomeAssistant: + def hass(self): """Return hass.""" return self._hass @hass.setter - def hass(self, hass: HomeAssistant) -> None: + def hass(self, hass): """Set hass.""" self._hass = hass self._radio_mgr.hass = hass async def _async_create_radio_entry(self) -> ConfigFlowResult: """Create a config entry with the current flow state.""" + assert self._title is not None assert self._radio_mgr.radio_type is not None assert self._radio_mgr.device_path is not None assert self._radio_mgr.device_settings is not None @@ -682,6 +686,8 @@ class ZhaOptionsFlowHandler(BaseZhaFlow, OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" super().__init__() + self.config_entry = config_entry + self._radio_mgr.device_path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] self._radio_mgr.device_settings = config_entry.data[CONF_DEVICE] self._radio_mgr.radio_type = RadioType[config_entry.data[CONF_RADIO_TYPE]] diff --git a/homeassistant/components/zha/const.py b/homeassistant/components/zha/const.py deleted file mode 100644 index 270a3d3fb66..00000000000 --- a/homeassistant/components/zha/const.py +++ /dev/null @@ -1,76 +0,0 @@ -"""Constants for the ZHA integration.""" - -EZSP_OVERWRITE_EUI64 = ( - "i_understand_i_can_update_eui64_only_once_and_i_still_want_to_do_it" -) - -ATTR_ACTIVE_COORDINATOR = "active_coordinator" -ATTR_ATTRIBUTES = "attributes" -ATTR_AVAILABLE = "available" -ATTR_DEVICE_TYPE = "device_type" -ATTR_CLUSTER_NAME = "cluster_name" -ATTR_ENDPOINT_NAMES = "endpoint_names" -ATTR_IEEE = "ieee" -ATTR_LAST_SEEN = "last_seen" -ATTR_LQI = "lqi" -ATTR_MANUFACTURER = "manufacturer" -ATTR_MANUFACTURER_CODE = "manufacturer_code" -ATTR_NEIGHBORS = "neighbors" -ATTR_NWK = "nwk" -ATTR_POWER_SOURCE = "power_source" -ATTR_QUIRK_APPLIED = "quirk_applied" -ATTR_QUIRK_CLASS = "quirk_class" -ATTR_QUIRK_ID = "quirk_id" -ATTR_ROUTES = "routes" -ATTR_RSSI = "rssi" -ATTR_SIGNATURE = "signature" -ATTR_SUCCESS = "success" - - -CONF_ALARM_MASTER_CODE = "alarm_master_code" -CONF_ALARM_FAILED_TRIES = "alarm_failed_tries" -CONF_ALARM_ARM_REQUIRES_CODE = "alarm_arm_requires_code" - -CONF_RADIO_TYPE = "radio_type" -CONF_USB_PATH = "usb_path" -CONF_USE_THREAD = "use_thread" -CONF_BAUDRATE = "baudrate" -CONF_FLOW_CONTROL = "flow_control" - -CONF_ENABLE_QUIRKS = "enable_quirks" -CONF_CUSTOM_QUIRKS_PATH = "custom_quirks_path" - -CONF_DEFAULT_LIGHT_TRANSITION = "default_light_transition" -CONF_ENABLE_ENHANCED_LIGHT_TRANSITION = "enhanced_light_transition" -CONF_ENABLE_LIGHT_TRANSITIONING_FLAG = "light_transitioning_flag" -CONF_GROUP_MEMBERS_ASSUME_STATE = "group_members_assume_state" - -CONF_ENABLE_IDENTIFY_ON_JOIN = "enable_identify_on_join" -CONF_CONSIDER_UNAVAILABLE_MAINS = "consider_unavailable_mains" -CONF_CONSIDER_UNAVAILABLE_BATTERY = "consider_unavailable_battery" -CONF_ENABLE_MAINS_STARTUP_POLLING = "enable_mains_startup_polling" - -CONF_ZIGPY = "zigpy_config" -CONF_DEVICE_CONFIG = "device_config" - -CUSTOM_CONFIGURATION = "custom_configuration" - -DATA_ZHA = "zha" -DATA_ZHA_DEVICE_TRIGGER_CACHE = "zha_device_trigger_cache" - -DEFAULT_DATABASE_NAME = "zigbee.db" - -DEVICE_PAIRING_STATUS = "pairing_status" - -DOMAIN = "zha" - -GROUP_ID = "group_id" - - -GROUP_IDS = "group_ids" -GROUP_NAME = "group_name" - -MFG_CLUSTER_ID_START = 0xFC00 - -ZHA_ALARM_OPTIONS = "zha_alarm_options" -ZHA_OPTIONS = "zha_options" diff --git a/homeassistant/components/zha/core/__init__.py b/homeassistant/components/zha/core/__init__.py new file mode 100644 index 00000000000..755eac3c4ce --- /dev/null +++ b/homeassistant/components/zha/core/__init__.py @@ -0,0 +1,6 @@ +"""Core module for Zigbee Home Automation.""" + +from .device import ZHADevice +from .gateway import ZHAGateway + +__all__ = ["ZHADevice", "ZHAGateway"] diff --git a/homeassistant/components/zha/core/cluster_handlers/__init__.py b/homeassistant/components/zha/core/cluster_handlers/__init__.py new file mode 100644 index 00000000000..8833d5c116f --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/__init__.py @@ -0,0 +1,654 @@ +"""Cluster handlers module for Zigbee Home Automation.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable, Coroutine, Iterator +import contextlib +from enum import Enum +import functools +import logging +from typing import TYPE_CHECKING, Any, TypedDict + +import zigpy.exceptions +import zigpy.util +import zigpy.zcl +from zigpy.zcl.foundation import ( + CommandSchema, + ConfigureReportingResponseRecord, + Status, + ZCLAttributeDef, +) + +from homeassistant.const import ATTR_COMMAND +from homeassistant.core import callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.dispatcher import async_dispatcher_send + +from ..const import ( + ATTR_ARGS, + ATTR_ATTRIBUTE_ID, + ATTR_ATTRIBUTE_NAME, + ATTR_CLUSTER_ID, + ATTR_PARAMS, + ATTR_TYPE, + ATTR_UNIQUE_ID, + ATTR_VALUE, + CLUSTER_HANDLER_ZDO, + REPORT_CONFIG_ATTR_PER_REQ, + SIGNAL_ATTR_UPDATED, + ZHA_CLUSTER_HANDLER_MSG, + ZHA_CLUSTER_HANDLER_MSG_BIND, + ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, + ZHA_CLUSTER_HANDLER_MSG_DATA, + ZHA_CLUSTER_HANDLER_READS_PER_REQ, +) +from ..helpers import LogMixin, safe_read + +if TYPE_CHECKING: + from ..endpoint import Endpoint + +_LOGGER = logging.getLogger(__name__) +RETRYABLE_REQUEST_DECORATOR = zigpy.util.retryable_request(tries=3) +UNPROXIED_CLUSTER_METHODS = {"general_command"} + +type _FuncType[**_P] = Callable[_P, Awaitable[Any]] +type _ReturnFuncType[**_P] = Callable[_P, Coroutine[Any, Any, Any]] + + +@contextlib.contextmanager +def wrap_zigpy_exceptions() -> Iterator[None]: + """Wrap zigpy exceptions in `HomeAssistantError` exceptions.""" + try: + yield + except TimeoutError as exc: + raise HomeAssistantError( + "Failed to send request: device did not respond" + ) from exc + except zigpy.exceptions.ZigbeeException as exc: + message = "Failed to send request" + + if str(exc): + message = f"{message}: {exc}" + + raise HomeAssistantError(message) from exc + + +def retry_request[**_P](func: _FuncType[_P]) -> _ReturnFuncType[_P]: + """Send a request with retries and wrap expected zigpy exceptions.""" + + @functools.wraps(func) + async def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> Any: + with wrap_zigpy_exceptions(): + return await RETRYABLE_REQUEST_DECORATOR(func)(*args, **kwargs) + + return wrapper + + +class AttrReportConfig(TypedDict, total=True): + """Configuration to report for the attributes.""" + + # An attribute name + attr: str + # The config for the attribute reporting configuration consists of a tuple for + # (minimum_reported_time_interval_s, maximum_reported_time_interval_s, value_delta) + config: tuple[int, int, int | float] + + +def parse_and_log_command(cluster_handler, tsn, command_id, args): + """Parse and log a zigbee cluster command.""" + try: + name = cluster_handler.cluster.server_commands[command_id].name + except KeyError: + name = f"0x{command_id:02X}" + + cluster_handler.debug( + "received '%s' command with %s args on cluster_id '%s' tsn '%s'", + name, + args, + cluster_handler.cluster.cluster_id, + tsn, + ) + return name + + +class ClusterHandlerStatus(Enum): + """Status of a cluster handler.""" + + CREATED = 1 + CONFIGURED = 2 + INITIALIZED = 3 + + +class ClusterHandler(LogMixin): + """Base cluster handler for a Zigbee cluster.""" + + REPORT_CONFIG: tuple[AttrReportConfig, ...] = () + BIND: bool = True + + # Dict of attributes to read on cluster handler initialization. + # Dict keys -- attribute ID or names, with bool value indicating whether a cached + # attribute read is acceptable. + ZCL_INIT_ATTRS: dict[str, bool] = {} + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize ClusterHandler.""" + self._generic_id = f"cluster_handler_0x{cluster.cluster_id:04x}" + self._endpoint: Endpoint = endpoint + self._cluster = cluster + self._id = f"{endpoint.id}:0x{cluster.cluster_id:04x}" + unique_id = endpoint.unique_id.replace("-", ":") + self._unique_id = f"{unique_id}:0x{cluster.cluster_id:04x}" + if not hasattr(self, "_value_attribute") and self.REPORT_CONFIG: + attr_def: ZCLAttributeDef = self.cluster.attributes_by_name[ + self.REPORT_CONFIG[0]["attr"] + ] + self.value_attribute = attr_def.id + self._status = ClusterHandlerStatus.CREATED + self._cluster.add_listener(self) + self.data_cache: dict[str, Enum] = {} + + @classmethod + def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool: + """Filter the cluster match for specific devices.""" + return True + + @property + def id(self) -> str: + """Return cluster handler id unique for this device only.""" + return self._id + + @property + def generic_id(self): + """Return the generic id for this cluster handler.""" + return self._generic_id + + @property + def unique_id(self): + """Return the unique id for this cluster handler.""" + return self._unique_id + + @property + def cluster(self): + """Return the zigpy cluster for this cluster handler.""" + return self._cluster + + @property + def name(self) -> str: + """Return friendly name.""" + return self.cluster.ep_attribute or self._generic_id + + @property + def status(self): + """Return the status of the cluster handler.""" + return self._status + + def __hash__(self) -> int: + """Make this a hashable.""" + return hash(self._unique_id) + + @callback + def async_send_signal(self, signal: str, *args: Any) -> None: + """Send a signal through hass dispatcher.""" + self._endpoint.async_send_signal(signal, *args) + + async def bind(self): + """Bind a zigbee cluster. + + This also swallows ZigbeeException exceptions that are thrown when + devices are unreachable. + """ + try: + res = await self.cluster.bind() + self.debug("bound '%s' cluster: %s", self.cluster.ep_attribute, res[0]) + async_dispatcher_send( + self._endpoint.device.hass, + ZHA_CLUSTER_HANDLER_MSG, + { + ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_BIND, + ZHA_CLUSTER_HANDLER_MSG_DATA: { + "cluster_name": self.cluster.name, + "cluster_id": self.cluster.cluster_id, + "success": res[0] == 0, + }, + }, + ) + except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: + self.debug( + "Failed to bind '%s' cluster: %s", + self.cluster.ep_attribute, + str(ex), + exc_info=ex, + ) + async_dispatcher_send( + self._endpoint.device.hass, + ZHA_CLUSTER_HANDLER_MSG, + { + ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_BIND, + ZHA_CLUSTER_HANDLER_MSG_DATA: { + "cluster_name": self.cluster.name, + "cluster_id": self.cluster.cluster_id, + "success": False, + }, + }, + ) + + async def configure_reporting(self) -> None: + """Configure attribute reporting for a cluster. + + This also swallows ZigbeeException exceptions that are thrown when + devices are unreachable. + """ + event_data = {} + kwargs = {} + if ( + self.cluster.cluster_id >= 0xFC00 + and self._endpoint.device.manufacturer_code + ): + kwargs["manufacturer"] = self._endpoint.device.manufacturer_code + + for attr_report in self.REPORT_CONFIG: + attr, config = attr_report["attr"], attr_report["config"] + + try: + attr_name = self.cluster.find_attribute(attr).name + except KeyError: + attr_name = attr + + event_data[attr_name] = { + "min": config[0], + "max": config[1], + "id": attr, + "name": attr_name, + "change": config[2], + "status": None, + } + + to_configure = [*self.REPORT_CONFIG] + chunk, rest = ( + to_configure[:REPORT_CONFIG_ATTR_PER_REQ], + to_configure[REPORT_CONFIG_ATTR_PER_REQ:], + ) + while chunk: + reports = {rec["attr"]: rec["config"] for rec in chunk} + try: + res = await self.cluster.configure_reporting_multiple(reports, **kwargs) + self._configure_reporting_status(reports, res[0], event_data) + except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: + self.debug( + "failed to set reporting on '%s' cluster for: %s", + self.cluster.ep_attribute, + str(ex), + ) + break + chunk, rest = ( + rest[:REPORT_CONFIG_ATTR_PER_REQ], + rest[REPORT_CONFIG_ATTR_PER_REQ:], + ) + + async_dispatcher_send( + self._endpoint.device.hass, + ZHA_CLUSTER_HANDLER_MSG, + { + ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, + ZHA_CLUSTER_HANDLER_MSG_DATA: { + "cluster_name": self.cluster.name, + "cluster_id": self.cluster.cluster_id, + "attributes": event_data, + }, + }, + ) + + def _configure_reporting_status( + self, + attrs: dict[str, tuple[int, int, float | int]], + res: list | tuple, + event_data: dict[str, dict[str, Any]], + ) -> None: + """Parse configure reporting result.""" + if isinstance(res, (Exception, ConfigureReportingResponseRecord)): + # assume default response + self.debug( + "attr reporting for '%s' on '%s': %s", + attrs, + self.name, + res, + ) + for attr in attrs: + event_data[attr]["status"] = Status.FAILURE.name + return + if res[0].status == Status.SUCCESS and len(res) == 1: + self.debug( + "Successfully configured reporting for '%s' on '%s' cluster: %s", + attrs, + self.name, + res, + ) + # 2.5.8.1.3 Status Field + # The status field specifies the status of the Configure Reporting operation attempted on this attribute, as detailed in 2.5.7.3. + # Note that attribute status records are not included for successfully configured attributes, in order to save bandwidth. + # In the case of successful configuration of all attributes, only a single attribute status record SHALL be included in the command, + # with the status field set to SUCCESS and the direction and attribute identifier fields omitted. + for attr in attrs: + event_data[attr]["status"] = Status.SUCCESS.name + return + + for record in res: + event_data[self.cluster.find_attribute(record.attrid).name]["status"] = ( + record.status.name + ) + failed = [ + self.cluster.find_attribute(record.attrid).name + for record in res + if record.status != Status.SUCCESS + ] + self.debug( + "Failed to configure reporting for '%s' on '%s' cluster: %s", + failed, + self.name, + res, + ) + success = set(attrs) - set(failed) + self.debug( + "Successfully configured reporting for '%s' on '%s' cluster", + set(attrs) - set(failed), + self.name, + ) + for attr in success: + event_data[attr]["status"] = Status.SUCCESS.name + + async def async_configure(self) -> None: + """Set cluster binding and attribute reporting.""" + if not self._endpoint.device.skip_configuration: + if self.BIND: + self.debug("Performing cluster binding") + await self.bind() + if self.cluster.is_server: + self.debug("Configuring cluster attribute reporting") + await self.configure_reporting() + ch_specific_cfg = getattr( + self, "async_configure_cluster_handler_specific", None + ) + if ch_specific_cfg: + self.debug("Performing cluster handler specific configuration") + await ch_specific_cfg() + self.debug("finished cluster handler configuration") + else: + self.debug("skipping cluster handler configuration") + self._status = ClusterHandlerStatus.CONFIGURED + + async def async_initialize(self, from_cache: bool) -> None: + """Initialize cluster handler.""" + if not from_cache and self._endpoint.device.skip_configuration: + self.debug("Skipping cluster handler initialization") + self._status = ClusterHandlerStatus.INITIALIZED + return + + self.debug("initializing cluster handler: from_cache: %s", from_cache) + cached = [a for a, cached in self.ZCL_INIT_ATTRS.items() if cached] + uncached = [a for a, cached in self.ZCL_INIT_ATTRS.items() if not cached] + uncached.extend([cfg["attr"] for cfg in self.REPORT_CONFIG]) + + if cached: + self.debug("initializing cached cluster handler attributes: %s", cached) + await self._get_attributes( + True, cached, from_cache=True, only_cache=from_cache + ) + if uncached: + self.debug( + "initializing uncached cluster handler attributes: %s - from cache[%s]", + uncached, + from_cache, + ) + await self._get_attributes( + True, uncached, from_cache=from_cache, only_cache=from_cache + ) + + ch_specific_init = getattr( + self, "async_initialize_cluster_handler_specific", None + ) + if ch_specific_init: + self.debug( + "Performing cluster handler specific initialization: %s", uncached + ) + await ch_specific_init(from_cache=from_cache) + + self.debug("finished cluster handler initialization") + self._status = ClusterHandlerStatus.INITIALIZED + + @callback + def cluster_command(self, tsn, command_id, args): + """Handle commands received to this cluster.""" + + @callback + def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: + """Handle attribute updates on this cluster.""" + attr_name = self._get_attribute_name(attrid) + self.debug( + "cluster_handler[%s] attribute_updated - cluster[%s] attr[%s] value[%s]", + self.name, + self.cluster.name, + attr_name, + value, + ) + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", + attrid, + attr_name, + value, + ) + + @callback + def zdo_command(self, *args, **kwargs): + """Handle ZDO commands on this cluster.""" + + @callback + def zha_send_event(self, command: str, arg: list | dict | CommandSchema) -> None: + """Relay events to hass.""" + + args: list | dict + if isinstance(arg, CommandSchema): + args = [a for a in arg if a is not None] + params = arg.as_dict() + elif isinstance(arg, (list, dict)): + # Quirks can directly send lists and dicts to ZHA this way + args = arg + params = {} + else: + raise TypeError(f"Unexpected zha_send_event {command!r} argument: {arg!r}") + + self._endpoint.send_event( + { + ATTR_UNIQUE_ID: self.unique_id, + ATTR_CLUSTER_ID: self.cluster.cluster_id, + ATTR_COMMAND: command, + # Maintain backwards compatibility with the old zigpy response format + ATTR_ARGS: args, + ATTR_PARAMS: params, + } + ) + + async def async_update(self): + """Retrieve latest state from cluster.""" + + def _get_attribute_name(self, attrid: int) -> str | int: + if attrid not in self.cluster.attributes: + return attrid + + return self.cluster.attributes[attrid].name + + async def get_attribute_value(self, attribute, from_cache=True): + """Get the value for an attribute.""" + manufacturer = None + manufacturer_code = self._endpoint.device.manufacturer_code + if self.cluster.cluster_id >= 0xFC00 and manufacturer_code: + manufacturer = manufacturer_code + result = await safe_read( + self._cluster, + [attribute], + allow_cache=from_cache, + only_cache=from_cache, + manufacturer=manufacturer, + ) + return result.get(attribute) + + async def _get_attributes( + self, + raise_exceptions: bool, + attributes: list[str], + from_cache: bool = True, + only_cache: bool = True, + ) -> dict[int | str, Any]: + """Get the values for a list of attributes.""" + manufacturer = None + manufacturer_code = self._endpoint.device.manufacturer_code + if self.cluster.cluster_id >= 0xFC00 and manufacturer_code: + manufacturer = manufacturer_code + chunk = attributes[:ZHA_CLUSTER_HANDLER_READS_PER_REQ] + rest = attributes[ZHA_CLUSTER_HANDLER_READS_PER_REQ:] + result = {} + while chunk: + try: + self.debug("Reading attributes in chunks: %s", chunk) + read, _ = await self.cluster.read_attributes( + chunk, + allow_cache=from_cache, + only_cache=only_cache, + manufacturer=manufacturer, + ) + result.update(read) + except (TimeoutError, zigpy.exceptions.ZigbeeException) as ex: + self.debug( + "failed to get attributes '%s' on '%s' cluster: %s", + chunk, + self.cluster.ep_attribute, + str(ex), + ) + if raise_exceptions: + raise + chunk = rest[:ZHA_CLUSTER_HANDLER_READS_PER_REQ] + rest = rest[ZHA_CLUSTER_HANDLER_READS_PER_REQ:] + return result + + get_attributes = functools.partialmethod(_get_attributes, False) + + async def write_attributes_safe( + self, attributes: dict[str, Any], manufacturer: int | None = None + ) -> None: + """Wrap `write_attributes` to throw an exception on attribute write failure.""" + + res = await self.write_attributes(attributes, manufacturer=manufacturer) + + for record in res[0]: + if record.status != Status.SUCCESS: + try: + name = self.cluster.attributes[record.attrid].name + value = attributes.get(name, "unknown") + except KeyError: + name = f"0x{record.attrid:04x}" + value = "unknown" + + raise HomeAssistantError( + f"Failed to write attribute {name}={value}: {record.status}", + ) + + def log(self, level, msg, *args, **kwargs): + """Log a message.""" + msg = f"[%s:%s]: {msg}" + args = (self._endpoint.device.nwk, self._id, *args) + _LOGGER.log(level, msg, *args, **kwargs) + + def __getattr__(self, name): + """Get attribute or a decorated cluster command.""" + if ( + hasattr(self._cluster, name) + and callable(getattr(self._cluster, name)) + and name not in UNPROXIED_CLUSTER_METHODS + ): + command = getattr(self._cluster, name) + wrapped_command = retry_request(command) + wrapped_command.__name__ = name + + return wrapped_command + return self.__getattribute__(name) + + +class ZDOClusterHandler(LogMixin): + """Cluster handler for ZDO events.""" + + def __init__(self, device) -> None: + """Initialize ZDOClusterHandler.""" + self.name = CLUSTER_HANDLER_ZDO + self._cluster = device.device.endpoints[0] + self._zha_device = device + self._status = ClusterHandlerStatus.CREATED + self._unique_id = f"{device.ieee!s}:{device.name}_ZDO" + self._cluster.add_listener(self) + + @property + def unique_id(self): + """Return the unique id for this cluster handler.""" + return self._unique_id + + @property + def cluster(self): + """Return the aigpy cluster for this cluster handler.""" + return self._cluster + + @property + def status(self): + """Return the status of the cluster handler.""" + return self._status + + @callback + def device_announce(self, zigpy_device): + """Device announce handler.""" + + @callback + def permit_duration(self, duration): + """Permit handler.""" + + async def async_initialize(self, from_cache): + """Initialize cluster handler.""" + self._status = ClusterHandlerStatus.INITIALIZED + + async def async_configure(self): + """Configure cluster handler.""" + self._status = ClusterHandlerStatus.CONFIGURED + + def log(self, level, msg, *args, **kwargs): + """Log a message.""" + msg = f"[%s:ZDO](%s): {msg}" + args = (self._zha_device.nwk, self._zha_device.model, *args) + _LOGGER.log(level, msg, *args, **kwargs) + + +class ClientClusterHandler(ClusterHandler): + """ClusterHandler for Zigbee client (output) clusters.""" + + @callback + def attribute_updated(self, attrid: int, value: Any, timestamp: Any) -> None: + """Handle an attribute updated on this cluster.""" + super().attribute_updated(attrid, value, timestamp) + + try: + attr_name = self._cluster.attributes[attrid].name + except KeyError: + attr_name = "Unknown" + + self.zha_send_event( + SIGNAL_ATTR_UPDATED, + { + ATTR_ATTRIBUTE_ID: attrid, + ATTR_ATTRIBUTE_NAME: attr_name, + ATTR_VALUE: value, + }, + ) + + @callback + def cluster_command(self, tsn, command_id, args): + """Handle a cluster command received on this cluster.""" + if ( + self._cluster.server_commands is not None + and self._cluster.server_commands.get(command_id) is not None + ): + self.zha_send_event(self._cluster.server_commands[command_id].name, args) diff --git a/homeassistant/components/zha/core/cluster_handlers/closures.py b/homeassistant/components/zha/core/cluster_handlers/closures.py new file mode 100644 index 00000000000..e96d6492beb --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/closures.py @@ -0,0 +1,271 @@ +"""Closures cluster handlers module for Zigbee Home Automation.""" + +from __future__ import annotations + +from typing import Any + +import zigpy.types as t +from zigpy.zcl.clusters.closures import ConfigStatus, DoorLock, Shade, WindowCovering + +from homeassistant.core import callback + +from .. import registries +from ..const import REPORT_CONFIG_IMMEDIATE, SIGNAL_ATTR_UPDATED +from . import AttrReportConfig, ClientClusterHandler, ClusterHandler + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(DoorLock.cluster_id) +class DoorLockClusterHandler(ClusterHandler): + """Door lock cluster handler.""" + + _value_attribute = 0 + REPORT_CONFIG = ( + AttrReportConfig( + attr=DoorLock.AttributeDefs.lock_state.name, + config=REPORT_CONFIG_IMMEDIATE, + ), + ) + + async def async_update(self): + """Retrieve latest state.""" + result = await self.get_attribute_value( + DoorLock.AttributeDefs.lock_state.name, from_cache=True + ) + if result is not None: + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", + DoorLock.AttributeDefs.lock_state.id, + DoorLock.AttributeDefs.lock_state.name, + result, + ) + + @callback + def cluster_command(self, tsn, command_id, args): + """Handle a cluster command received on this cluster.""" + + if ( + self._cluster.client_commands is None + or self._cluster.client_commands.get(command_id) is None + ): + return + + command_name = self._cluster.client_commands[command_id].name + + if command_name == DoorLock.ClientCommandDefs.operation_event_notification.name: + self.zha_send_event( + command_name, + { + "source": args[0].name, + "operation": args[1].name, + "code_slot": (args[2] + 1), # start code slots at 1 + }, + ) + + @callback + def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: + """Handle attribute update from lock cluster.""" + attr_name = self._get_attribute_name(attrid) + self.debug( + "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value + ) + if attrid == self._value_attribute: + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value + ) + + async def async_set_user_code(self, code_slot: int, user_code: str) -> None: + """Set the user code for the code slot.""" + + await self.set_pin_code( + code_slot - 1, # start code slots at 1, Zigbee internals use 0 + DoorLock.UserStatus.Enabled, + DoorLock.UserType.Unrestricted, + user_code, + ) + + async def async_enable_user_code(self, code_slot: int) -> None: + """Enable the code slot.""" + + await self.set_user_status(code_slot - 1, DoorLock.UserStatus.Enabled) + + async def async_disable_user_code(self, code_slot: int) -> None: + """Disable the code slot.""" + + await self.set_user_status(code_slot - 1, DoorLock.UserStatus.Disabled) + + async def async_get_user_code(self, code_slot: int) -> int: + """Get the user code from the code slot.""" + + return await self.get_pin_code(code_slot - 1) + + async def async_clear_user_code(self, code_slot: int) -> None: + """Clear the code slot.""" + + await self.clear_pin_code(code_slot - 1) + + async def async_clear_all_user_codes(self) -> None: + """Clear all code slots.""" + + await self.clear_all_pin_codes() + + async def async_set_user_type(self, code_slot: int, user_type: str) -> None: + """Set user type.""" + + await self.set_user_type(code_slot - 1, user_type) + + async def async_get_user_type(self, code_slot: int) -> str: + """Get user type.""" + + return await self.get_user_type(code_slot - 1) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Shade.cluster_id) +class ShadeClusterHandler(ClusterHandler): + """Shade cluster handler.""" + + +@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(WindowCovering.cluster_id) +class WindowCoveringClientClusterHandler(ClientClusterHandler): + """Window client cluster handler.""" + + +@registries.BINDABLE_CLUSTERS.register(WindowCovering.cluster_id) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(WindowCovering.cluster_id) +class WindowCoveringClusterHandler(ClusterHandler): + """Window cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=WindowCovering.AttributeDefs.current_position_lift_percentage.name, + config=REPORT_CONFIG_IMMEDIATE, + ), + AttrReportConfig( + attr=WindowCovering.AttributeDefs.current_position_tilt_percentage.name, + config=REPORT_CONFIG_IMMEDIATE, + ), + ) + + ZCL_INIT_ATTRS = { + WindowCovering.AttributeDefs.window_covering_type.name: True, + WindowCovering.AttributeDefs.window_covering_mode.name: True, + WindowCovering.AttributeDefs.config_status.name: True, + WindowCovering.AttributeDefs.installed_closed_limit_lift.name: True, + WindowCovering.AttributeDefs.installed_closed_limit_tilt.name: True, + WindowCovering.AttributeDefs.installed_open_limit_lift.name: True, + WindowCovering.AttributeDefs.installed_open_limit_tilt.name: True, + } + + async def async_update(self): + """Retrieve latest state.""" + results = await self.get_attributes( + [ + WindowCovering.AttributeDefs.current_position_lift_percentage.name, + WindowCovering.AttributeDefs.current_position_tilt_percentage.name, + ], + from_cache=False, + only_cache=False, + ) + self.debug( + "read current_position_lift_percentage and current_position_tilt_percentage - results: %s", + results, + ) + if ( + results + and results.get( + WindowCovering.AttributeDefs.current_position_lift_percentage.name + ) + is not None + ): + # the 100 - value is because we need to invert the value before giving it to the entity + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", + WindowCovering.AttributeDefs.current_position_lift_percentage.id, + WindowCovering.AttributeDefs.current_position_lift_percentage.name, + 100 + - results.get( + WindowCovering.AttributeDefs.current_position_lift_percentage.name + ), + ) + if ( + results + and results.get( + WindowCovering.AttributeDefs.current_position_tilt_percentage.name + ) + is not None + ): + # the 100 - value is because we need to invert the value before giving it to the entity + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", + WindowCovering.AttributeDefs.current_position_tilt_percentage.id, + WindowCovering.AttributeDefs.current_position_tilt_percentage.name, + 100 + - results.get( + WindowCovering.AttributeDefs.current_position_tilt_percentage.name + ), + ) + + @property + def inverted(self): + """Return true if the window covering is inverted.""" + config_status = self.cluster.get( + WindowCovering.AttributeDefs.config_status.name + ) + return ( + config_status is not None + and ConfigStatus.Open_up_commands_reversed in ConfigStatus(config_status) + ) + + @property + def current_position_lift_percentage(self) -> t.uint16_t | None: + """Return the current lift percentage of the window covering.""" + lift_percentage = self.cluster.get( + WindowCovering.AttributeDefs.current_position_lift_percentage.name + ) + if lift_percentage is not None: + # the 100 - value is because we need to invert the value before giving it to the entity + lift_percentage = 100 - lift_percentage + return lift_percentage + + @property + def current_position_tilt_percentage(self) -> t.uint16_t | None: + """Return the current tilt percentage of the window covering.""" + tilt_percentage = self.cluster.get( + WindowCovering.AttributeDefs.current_position_tilt_percentage.name + ) + if tilt_percentage is not None: + # the 100 - value is because we need to invert the value before giving it to the entity + tilt_percentage = 100 - tilt_percentage + return tilt_percentage + + @property + def installed_open_limit_lift(self) -> t.uint16_t | None: + """Return the installed open lift limit of the window covering.""" + return self.cluster.get( + WindowCovering.AttributeDefs.installed_open_limit_lift.name + ) + + @property + def installed_closed_limit_lift(self) -> t.uint16_t | None: + """Return the installed closed lift limit of the window covering.""" + return self.cluster.get( + WindowCovering.AttributeDefs.installed_closed_limit_lift.name + ) + + @property + def installed_open_limit_tilt(self) -> t.uint16_t | None: + """Return the installed open tilt limit of the window covering.""" + return self.cluster.get( + WindowCovering.AttributeDefs.installed_open_limit_tilt.name + ) + + @property + def installed_closed_limit_tilt(self) -> t.uint16_t | None: + """Return the installed closed tilt limit of the window covering.""" + return self.cluster.get( + WindowCovering.AttributeDefs.installed_closed_limit_tilt.name + ) + + @property + def window_covering_type(self) -> WindowCovering.WindowCoveringType | None: + """Return the window covering type.""" + return self.cluster.get(WindowCovering.AttributeDefs.window_covering_type.name) diff --git a/homeassistant/components/zha/core/cluster_handlers/general.py b/homeassistant/components/zha/core/cluster_handlers/general.py new file mode 100644 index 00000000000..438fc6b1723 --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/general.py @@ -0,0 +1,690 @@ +"""General cluster handlers module for Zigbee Home Automation.""" + +from __future__ import annotations + +from collections.abc import Coroutine +from typing import TYPE_CHECKING, Any + +from zhaquirks.quirk_ids import TUYA_PLUG_ONOFF +import zigpy.exceptions +import zigpy.types as t +import zigpy.zcl +from zigpy.zcl.clusters.general import ( + Alarms, + AnalogInput, + AnalogOutput, + AnalogValue, + ApplianceControl, + Basic, + BinaryInput, + BinaryOutput, + BinaryValue, + Commissioning, + DeviceTemperature, + GreenPowerProxy, + Groups, + Identify, + LevelControl, + MultistateInput, + MultistateOutput, + MultistateValue, + OnOff, + OnOffConfiguration, + Ota, + Partition, + PollControl, + PowerConfiguration, + PowerProfile, + RSSILocation, + Scenes, + Time, +) +from zigpy.zcl.foundation import Status + +from homeassistant.core import callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.event import async_call_later + +from .. import registries +from ..const import ( + REPORT_CONFIG_ASAP, + REPORT_CONFIG_BATTERY_SAVE, + REPORT_CONFIG_DEFAULT, + REPORT_CONFIG_IMMEDIATE, + REPORT_CONFIG_MAX_INT, + REPORT_CONFIG_MIN_INT, + SIGNAL_ATTR_UPDATED, + SIGNAL_MOVE_LEVEL, + SIGNAL_SET_LEVEL, + SIGNAL_UPDATE_DEVICE, +) +from . import ( + AttrReportConfig, + ClientClusterHandler, + ClusterHandler, + parse_and_log_command, +) +from .helpers import is_hue_motion_sensor + +if TYPE_CHECKING: + from ..endpoint import Endpoint + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Alarms.cluster_id) +class AlarmsClusterHandler(ClusterHandler): + """Alarms cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogInput.cluster_id) +class AnalogInputClusterHandler(ClusterHandler): + """Analog Input cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=AnalogInput.AttributeDefs.present_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.BINDABLE_CLUSTERS.register(AnalogOutput.cluster_id) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogOutput.cluster_id) +class AnalogOutputClusterHandler(ClusterHandler): + """Analog Output cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=AnalogOutput.AttributeDefs.present_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + ZCL_INIT_ATTRS = { + AnalogOutput.AttributeDefs.min_present_value.name: True, + AnalogOutput.AttributeDefs.max_present_value.name: True, + AnalogOutput.AttributeDefs.resolution.name: True, + AnalogOutput.AttributeDefs.relinquish_default.name: True, + AnalogOutput.AttributeDefs.description.name: True, + AnalogOutput.AttributeDefs.engineering_units.name: True, + AnalogOutput.AttributeDefs.application_type.name: True, + } + + @property + def present_value(self) -> float | None: + """Return cached value of present_value.""" + return self.cluster.get(AnalogOutput.AttributeDefs.present_value.name) + + @property + def min_present_value(self) -> float | None: + """Return cached value of min_present_value.""" + return self.cluster.get(AnalogOutput.AttributeDefs.min_present_value.name) + + @property + def max_present_value(self) -> float | None: + """Return cached value of max_present_value.""" + return self.cluster.get(AnalogOutput.AttributeDefs.max_present_value.name) + + @property + def resolution(self) -> float | None: + """Return cached value of resolution.""" + return self.cluster.get(AnalogOutput.AttributeDefs.resolution.name) + + @property + def relinquish_default(self) -> float | None: + """Return cached value of relinquish_default.""" + return self.cluster.get(AnalogOutput.AttributeDefs.relinquish_default.name) + + @property + def description(self) -> str | None: + """Return cached value of description.""" + return self.cluster.get(AnalogOutput.AttributeDefs.description.name) + + @property + def engineering_units(self) -> int | None: + """Return cached value of engineering_units.""" + return self.cluster.get(AnalogOutput.AttributeDefs.engineering_units.name) + + @property + def application_type(self) -> int | None: + """Return cached value of application_type.""" + return self.cluster.get(AnalogOutput.AttributeDefs.application_type.name) + + async def async_set_present_value(self, value: float) -> None: + """Update present_value.""" + await self.write_attributes_safe( + {AnalogOutput.AttributeDefs.present_value.name: value} + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogValue.cluster_id) +class AnalogValueClusterHandler(ClusterHandler): + """Analog Value cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=AnalogValue.AttributeDefs.present_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceControl.cluster_id) +class ApplianceControlClusterHandler(ClusterHandler): + """Appliance Control cluster handler.""" + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(Basic.cluster_id) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Basic.cluster_id) +class BasicClusterHandler(ClusterHandler): + """Cluster handler to interact with the basic cluster.""" + + UNKNOWN = 0 + BATTERY = 3 + BIND: bool = False + + POWER_SOURCES = { + UNKNOWN: "Unknown", + 1: "Mains (single phase)", + 2: "Mains (3 phase)", + BATTERY: "Battery", + 4: "DC source", + 5: "Emergency mains constantly powered", + 6: "Emergency mains and transfer switch", + } + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize Basic cluster handler.""" + super().__init__(cluster, endpoint) + if is_hue_motion_sensor(self) and self.cluster.endpoint.endpoint_id == 2: + self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() + self.ZCL_INIT_ATTRS["trigger_indicator"] = True + elif ( + self.cluster.endpoint.manufacturer == "TexasInstruments" + and self.cluster.endpoint.model == "ti.router" + ): + self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() + self.ZCL_INIT_ATTRS["transmit_power"] = True + elif self.cluster.endpoint.model == "lumi.curtain.agl001": + self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() + self.ZCL_INIT_ATTRS["power_source"] = True + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryInput.cluster_id) +class BinaryInputClusterHandler(ClusterHandler): + """Binary Input cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=BinaryInput.AttributeDefs.present_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryOutput.cluster_id) +class BinaryOutputClusterHandler(ClusterHandler): + """Binary Output cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=BinaryOutput.AttributeDefs.present_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryValue.cluster_id) +class BinaryValueClusterHandler(ClusterHandler): + """Binary Value cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=BinaryValue.AttributeDefs.present_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Commissioning.cluster_id) +class CommissioningClusterHandler(ClusterHandler): + """Commissioning cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(DeviceTemperature.cluster_id) +class DeviceTemperatureClusterHandler(ClusterHandler): + """Device Temperature cluster handler.""" + + REPORT_CONFIG = ( + { + "attr": DeviceTemperature.AttributeDefs.current_temperature.name, + "config": (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), + }, + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(GreenPowerProxy.cluster_id) +class GreenPowerProxyClusterHandler(ClusterHandler): + """Green Power Proxy cluster handler.""" + + BIND: bool = False + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Groups.cluster_id) +class GroupsClusterHandler(ClusterHandler): + """Groups cluster handler.""" + + BIND: bool = False + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Identify.cluster_id) +class IdentifyClusterHandler(ClusterHandler): + """Identify cluster handler.""" + + BIND: bool = False + + @callback + def cluster_command(self, tsn, command_id, args): + """Handle commands received to this cluster.""" + cmd = parse_and_log_command(self, tsn, command_id, args) + + if cmd == Identify.ServerCommandDefs.trigger_effect.name: + self.async_send_signal(f"{self.unique_id}_{cmd}", args[0]) + + +@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(LevelControl.cluster_id) +class LevelControlClientClusterHandler(ClientClusterHandler): + """LevelControl client cluster.""" + + +@registries.BINDABLE_CLUSTERS.register(LevelControl.cluster_id) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(LevelControl.cluster_id) +class LevelControlClusterHandler(ClusterHandler): + """Cluster handler for the LevelControl Zigbee cluster.""" + + CURRENT_LEVEL = 0 + REPORT_CONFIG = ( + AttrReportConfig( + attr=LevelControl.AttributeDefs.current_level.name, + config=REPORT_CONFIG_ASAP, + ), + ) + ZCL_INIT_ATTRS = { + LevelControl.AttributeDefs.on_off_transition_time.name: True, + LevelControl.AttributeDefs.on_level.name: True, + LevelControl.AttributeDefs.on_transition_time.name: True, + LevelControl.AttributeDefs.off_transition_time.name: True, + LevelControl.AttributeDefs.default_move_rate.name: True, + LevelControl.AttributeDefs.start_up_current_level.name: True, + } + + @property + def current_level(self) -> int | None: + """Return cached value of the current_level attribute.""" + return self.cluster.get(LevelControl.AttributeDefs.current_level.name) + + @callback + def cluster_command(self, tsn, command_id, args): + """Handle commands received to this cluster.""" + cmd = parse_and_log_command(self, tsn, command_id, args) + + if cmd in ( + LevelControl.ServerCommandDefs.move_to_level.name, + LevelControl.ServerCommandDefs.move_to_level_with_on_off.name, + ): + self.dispatch_level_change(SIGNAL_SET_LEVEL, args[0]) + elif cmd in ( + LevelControl.ServerCommandDefs.move.name, + LevelControl.ServerCommandDefs.move_with_on_off.name, + ): + # We should dim slowly -- for now, just step once + rate = args[1] + if args[0] == 0xFF: + rate = 10 # Should read default move rate + self.dispatch_level_change(SIGNAL_MOVE_LEVEL, -rate if args[0] else rate) + elif cmd in ( + LevelControl.ServerCommandDefs.step.name, + LevelControl.ServerCommandDefs.step_with_on_off.name, + ): + # Step (technically may change on/off) + self.dispatch_level_change( + SIGNAL_MOVE_LEVEL, -args[1] if args[0] else args[1] + ) + + @callback + def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: + """Handle attribute updates on this cluster.""" + self.debug("received attribute: %s update with value: %s", attrid, value) + if attrid == self.CURRENT_LEVEL: + self.dispatch_level_change(SIGNAL_SET_LEVEL, value) + + def dispatch_level_change(self, command, level): + """Dispatch level change.""" + self.async_send_signal(f"{self.unique_id}_{command}", level) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateInput.cluster_id) +class MultistateInputClusterHandler(ClusterHandler): + """Multistate Input cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=MultistateInput.AttributeDefs.present_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateOutput.cluster_id) +class MultistateOutputClusterHandler(ClusterHandler): + """Multistate Output cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=MultistateOutput.AttributeDefs.present_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateValue.cluster_id) +class MultistateValueClusterHandler(ClusterHandler): + """Multistate Value cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=MultistateValue.AttributeDefs.present_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(OnOff.cluster_id) +class OnOffClientClusterHandler(ClientClusterHandler): + """OnOff client cluster handler.""" + + +@registries.BINDABLE_CLUSTERS.register(OnOff.cluster_id) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(OnOff.cluster_id) +class OnOffClusterHandler(ClusterHandler): + """Cluster handler for the OnOff Zigbee cluster.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=OnOff.AttributeDefs.on_off.name, config=REPORT_CONFIG_IMMEDIATE + ), + ) + ZCL_INIT_ATTRS = { + OnOff.AttributeDefs.start_up_on_off.name: True, + } + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize OnOffClusterHandler.""" + super().__init__(cluster, endpoint) + self._off_listener = None + + if endpoint.device.quirk_id == TUYA_PLUG_ONOFF: + self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() + self.ZCL_INIT_ATTRS["backlight_mode"] = True + self.ZCL_INIT_ATTRS["power_on_state"] = True + self.ZCL_INIT_ATTRS["child_lock"] = True + + @classmethod + def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool: + """Filter the cluster match for specific devices.""" + return not ( + cluster.endpoint.device.manufacturer == "Konke" + and cluster.endpoint.device.model + in ("3AFE280100510001", "3AFE170100510001") + ) + + @property + def on_off(self) -> bool | None: + """Return cached value of on/off attribute.""" + return self.cluster.get(OnOff.AttributeDefs.on_off.name) + + async def turn_on(self) -> None: + """Turn the on off cluster on.""" + result = await self.on() + if result[1] is not Status.SUCCESS: + raise HomeAssistantError(f"Failed to turn on: {result[1]}") + self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.true) + + async def turn_off(self) -> None: + """Turn the on off cluster off.""" + result = await self.off() + if result[1] is not Status.SUCCESS: + raise HomeAssistantError(f"Failed to turn off: {result[1]}") + self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.false) + + @callback + def cluster_command(self, tsn, command_id, args): + """Handle commands received to this cluster.""" + cmd = parse_and_log_command(self, tsn, command_id, args) + + if cmd in ( + OnOff.ServerCommandDefs.off.name, + OnOff.ServerCommandDefs.off_with_effect.name, + ): + self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.false) + elif cmd in ( + OnOff.ServerCommandDefs.on.name, + OnOff.ServerCommandDefs.on_with_recall_global_scene.name, + ): + self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.true) + elif cmd == OnOff.ServerCommandDefs.on_with_timed_off.name: + should_accept = args[0] + on_time = args[1] + # 0 is always accept 1 is only accept when already on + if should_accept == 0 or (should_accept == 1 and bool(self.on_off)): + if self._off_listener is not None: + self._off_listener() + self._off_listener = None + self.cluster.update_attribute( + OnOff.AttributeDefs.on_off.id, t.Bool.true + ) + if on_time > 0: + self._off_listener = async_call_later( + self._endpoint.device.hass, + (on_time / 10), # value is in 10ths of a second + self.set_to_off, + ) + elif cmd == "toggle": + self.cluster.update_attribute( + OnOff.AttributeDefs.on_off.id, not bool(self.on_off) + ) + + @callback + def set_to_off(self, *_): + """Set the state to off.""" + self._off_listener = None + self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.false) + + @callback + def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: + """Handle attribute updates on this cluster.""" + if attrid == OnOff.AttributeDefs.on_off.id: + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", + attrid, + OnOff.AttributeDefs.on_off.name, + value, + ) + + async def async_update(self): + """Initialize cluster handler.""" + if self.cluster.is_client: + return + from_cache = not self._endpoint.device.is_mains_powered + self.debug("attempting to update onoff state - from cache: %s", from_cache) + await self.get_attribute_value( + OnOff.AttributeDefs.on_off.id, from_cache=from_cache + ) + await super().async_update() + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(OnOffConfiguration.cluster_id) +class OnOffConfigurationClusterHandler(ClusterHandler): + """OnOff Configuration cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Ota.cluster_id) +class OtaClusterHandler(ClusterHandler): + """OTA cluster handler.""" + + BIND: bool = False + + # Some devices have this cluster in the wrong collection (e.g. Third Reality) + ZCL_INIT_ATTRS = { + Ota.AttributeDefs.current_file_version.name: True, + } + + @property + def current_file_version(self) -> int | None: + """Return cached value of current_file_version attribute.""" + return self.cluster.get(Ota.AttributeDefs.current_file_version.name) + + +@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(Ota.cluster_id) +class OtaClientClusterHandler(ClientClusterHandler): + """OTA client cluster handler.""" + + BIND: bool = False + + ZCL_INIT_ATTRS = { + Ota.AttributeDefs.current_file_version.name: True, + } + + @callback + def attribute_updated(self, attrid: int, value: Any, timestamp: Any) -> None: + """Handle an attribute updated on this cluster.""" + # We intentionally avoid the `ClientClusterHandler` attribute update handler: + # it emits a logbook event on every update, which pollutes the logbook + ClusterHandler.attribute_updated(self, attrid, value, timestamp) + + @property + def current_file_version(self) -> int | None: + """Return cached value of current_file_version attribute.""" + return self.cluster.get(Ota.AttributeDefs.current_file_version.name) + + @callback + def cluster_command( + self, tsn: int, command_id: int, args: list[Any] | None + ) -> None: + """Handle OTA commands.""" + if command_id not in self.cluster.server_commands: + return + + signal_id = self._endpoint.unique_id.split("-")[0] + cmd_name = self.cluster.server_commands[command_id].name + + if cmd_name == Ota.ServerCommandDefs.query_next_image.name: + assert args + + current_file_version = args[3] + self.cluster.update_attribute( + Ota.AttributeDefs.current_file_version.id, current_file_version + ) + self.async_send_signal( + SIGNAL_UPDATE_DEVICE.format(signal_id), current_file_version + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Partition.cluster_id) +class PartitionClusterHandler(ClusterHandler): + """Partition cluster handler.""" + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(PollControl.cluster_id) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PollControl.cluster_id) +class PollControlClusterHandler(ClusterHandler): + """Poll Control cluster handler.""" + + CHECKIN_INTERVAL = 55 * 60 * 4 # 55min + CHECKIN_FAST_POLL_TIMEOUT = 2 * 4 # 2s + LONG_POLL = 6 * 4 # 6s + _IGNORED_MANUFACTURER_ID = { + 4476, + } # IKEA + + async def async_configure_cluster_handler_specific(self) -> None: + """Configure cluster handler: set check-in interval.""" + await self.write_attributes_safe( + {PollControl.AttributeDefs.checkin_interval.name: self.CHECKIN_INTERVAL} + ) + + @callback + def cluster_command( + self, tsn: int, command_id: int, args: list[Any] | None + ) -> None: + """Handle commands received to this cluster.""" + if command_id in self.cluster.client_commands: + cmd_name = self.cluster.client_commands[command_id].name + else: + cmd_name = command_id + + self.debug("Received %s tsn command '%s': %s", tsn, cmd_name, args) + self.zha_send_event(cmd_name, args) + if cmd_name == PollControl.ClientCommandDefs.checkin.name: + self.cluster.create_catching_task(self.check_in_response(tsn)) + + async def check_in_response(self, tsn: int) -> None: + """Respond to checkin command.""" + await self.checkin_response(True, self.CHECKIN_FAST_POLL_TIMEOUT, tsn=tsn) + if self._endpoint.device.manufacturer_code not in self._IGNORED_MANUFACTURER_ID: + await self.set_long_poll_interval(self.LONG_POLL) + await self.fast_poll_stop() + + @callback + def skip_manufacturer_id(self, manufacturer_code: int) -> None: + """Block a specific manufacturer id from changing default polling.""" + self._IGNORED_MANUFACTURER_ID.add(manufacturer_code) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PowerConfiguration.cluster_id) +class PowerConfigurationClusterHandler(ClusterHandler): + """Cluster handler for the zigbee power configuration cluster.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=PowerConfiguration.AttributeDefs.battery_voltage.name, + config=REPORT_CONFIG_BATTERY_SAVE, + ), + AttrReportConfig( + attr=PowerConfiguration.AttributeDefs.battery_percentage_remaining.name, + config=REPORT_CONFIG_BATTERY_SAVE, + ), + ) + + def async_initialize_cluster_handler_specific(self, from_cache: bool) -> Coroutine: + """Initialize cluster handler specific attrs.""" + attributes = [ + PowerConfiguration.AttributeDefs.battery_size.name, + PowerConfiguration.AttributeDefs.battery_quantity.name, + ] + return self.get_attributes( + attributes, from_cache=from_cache, only_cache=from_cache + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PowerProfile.cluster_id) +class PowerProfileClusterHandler(ClusterHandler): + """Power Profile cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(RSSILocation.cluster_id) +class RSSILocationClusterHandler(ClusterHandler): + """RSSI Location cluster handler.""" + + +@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(Scenes.cluster_id) +class ScenesClientClusterHandler(ClientClusterHandler): + """Scenes cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Scenes.cluster_id) +class ScenesClusterHandler(ClusterHandler): + """Scenes cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Time.cluster_id) +class TimeClusterHandler(ClusterHandler): + """Time cluster handler.""" diff --git a/homeassistant/components/zha/core/cluster_handlers/helpers.py b/homeassistant/components/zha/core/cluster_handlers/helpers.py new file mode 100644 index 00000000000..46557bf23a8 --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/helpers.py @@ -0,0 +1,23 @@ +"""Helpers for use with ZHA Zigbee cluster handlers.""" + +from . import ClusterHandler + + +def is_hue_motion_sensor(cluster_handler: ClusterHandler) -> bool: + """Return true if the manufacturer and model match known Hue motion sensor models.""" + return cluster_handler.cluster.endpoint.manufacturer in ( + "Philips", + "Signify Netherlands B.V.", + ) and cluster_handler.cluster.endpoint.model in ( + "SML001", + "SML002", + "SML003", + "SML004", + ) + + +def is_sonoff_presence_sensor(cluster_handler: ClusterHandler) -> bool: + """Return true if the manufacturer and model match known Sonoff sensor models.""" + return cluster_handler.cluster.endpoint.manufacturer in ( + "SONOFF", + ) and cluster_handler.cluster.endpoint.model in ("SNZB-06P",) diff --git a/homeassistant/components/zha/core/cluster_handlers/homeautomation.py b/homeassistant/components/zha/core/cluster_handlers/homeautomation.py new file mode 100644 index 00000000000..b287cb98f6a --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/homeautomation.py @@ -0,0 +1,236 @@ +"""Home automation cluster handlers module for Zigbee Home Automation.""" + +from __future__ import annotations + +import enum + +from zigpy.zcl.clusters.homeautomation import ( + ApplianceEventAlerts, + ApplianceIdentification, + ApplianceStatistics, + Diagnostic, + ElectricalMeasurement, + MeterIdentification, +) + +from .. import registries +from ..const import ( + CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, + REPORT_CONFIG_DEFAULT, + REPORT_CONFIG_OP, + SIGNAL_ATTR_UPDATED, +) +from . import AttrReportConfig, ClusterHandler + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceEventAlerts.cluster_id) +class ApplianceEventAlertsClusterHandler(ClusterHandler): + """Appliance Event Alerts cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceIdentification.cluster_id) +class ApplianceIdentificationClusterHandler(ClusterHandler): + """Appliance Identification cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceStatistics.cluster_id) +class ApplianceStatisticsClusterHandler(ClusterHandler): + """Appliance Statistics cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Diagnostic.cluster_id) +class DiagnosticClusterHandler(ClusterHandler): + """Diagnostic cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ElectricalMeasurement.cluster_id) +class ElectricalMeasurementClusterHandler(ClusterHandler): + """Cluster handler that polls active power level.""" + + CLUSTER_HANDLER_NAME = CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT + + class MeasurementType(enum.IntFlag): + """Measurement types.""" + + ACTIVE_MEASUREMENT = 1 + REACTIVE_MEASUREMENT = 2 + APPARENT_MEASUREMENT = 4 + PHASE_A_MEASUREMENT = 8 + PHASE_B_MEASUREMENT = 16 + PHASE_C_MEASUREMENT = 32 + DC_MEASUREMENT = 64 + HARMONICS_MEASUREMENT = 128 + POWER_QUALITY_MEASUREMENT = 256 + + REPORT_CONFIG = ( + AttrReportConfig( + attr=ElectricalMeasurement.AttributeDefs.active_power.name, + config=REPORT_CONFIG_OP, + ), + AttrReportConfig( + attr=ElectricalMeasurement.AttributeDefs.active_power_max.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=ElectricalMeasurement.AttributeDefs.apparent_power.name, + config=REPORT_CONFIG_OP, + ), + AttrReportConfig( + attr=ElectricalMeasurement.AttributeDefs.rms_current.name, + config=REPORT_CONFIG_OP, + ), + AttrReportConfig( + attr=ElectricalMeasurement.AttributeDefs.rms_current_max.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=ElectricalMeasurement.AttributeDefs.rms_voltage.name, + config=REPORT_CONFIG_OP, + ), + AttrReportConfig( + attr=ElectricalMeasurement.AttributeDefs.rms_voltage_max.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=ElectricalMeasurement.AttributeDefs.ac_frequency.name, + config=REPORT_CONFIG_OP, + ), + AttrReportConfig( + attr=ElectricalMeasurement.AttributeDefs.ac_frequency_max.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + ZCL_INIT_ATTRS = { + ElectricalMeasurement.AttributeDefs.ac_current_divisor.name: True, + ElectricalMeasurement.AttributeDefs.ac_current_multiplier.name: True, + ElectricalMeasurement.AttributeDefs.ac_power_divisor.name: True, + ElectricalMeasurement.AttributeDefs.ac_power_multiplier.name: True, + ElectricalMeasurement.AttributeDefs.ac_voltage_divisor.name: True, + ElectricalMeasurement.AttributeDefs.ac_voltage_multiplier.name: True, + ElectricalMeasurement.AttributeDefs.ac_frequency_divisor.name: True, + ElectricalMeasurement.AttributeDefs.ac_frequency_multiplier.name: True, + ElectricalMeasurement.AttributeDefs.measurement_type.name: True, + ElectricalMeasurement.AttributeDefs.power_divisor.name: True, + ElectricalMeasurement.AttributeDefs.power_multiplier.name: True, + ElectricalMeasurement.AttributeDefs.power_factor.name: True, + } + + async def async_update(self): + """Retrieve latest state.""" + self.debug("async_update") + + # This is a polling cluster handler. Don't allow cache. + attrs = [ + a["attr"] + for a in self.REPORT_CONFIG + if a["attr"] not in self.cluster.unsupported_attributes + ] + result = await self.get_attributes(attrs, from_cache=False, only_cache=False) + if result: + for attr, value in result.items(): + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", + self.cluster.find_attribute(attr).id, + attr, + value, + ) + + @property + def ac_current_divisor(self) -> int: + """Return ac current divisor.""" + return ( + self.cluster.get( + ElectricalMeasurement.AttributeDefs.ac_current_divisor.name + ) + or 1 + ) + + @property + def ac_current_multiplier(self) -> int: + """Return ac current multiplier.""" + return ( + self.cluster.get( + ElectricalMeasurement.AttributeDefs.ac_current_multiplier.name + ) + or 1 + ) + + @property + def ac_voltage_divisor(self) -> int: + """Return ac voltage divisor.""" + return ( + self.cluster.get( + ElectricalMeasurement.AttributeDefs.ac_voltage_divisor.name + ) + or 1 + ) + + @property + def ac_voltage_multiplier(self) -> int: + """Return ac voltage multiplier.""" + return ( + self.cluster.get( + ElectricalMeasurement.AttributeDefs.ac_voltage_multiplier.name + ) + or 1 + ) + + @property + def ac_frequency_divisor(self) -> int: + """Return ac frequency divisor.""" + return ( + self.cluster.get( + ElectricalMeasurement.AttributeDefs.ac_frequency_divisor.name + ) + or 1 + ) + + @property + def ac_frequency_multiplier(self) -> int: + """Return ac frequency multiplier.""" + return ( + self.cluster.get( + ElectricalMeasurement.AttributeDefs.ac_frequency_multiplier.name + ) + or 1 + ) + + @property + def ac_power_divisor(self) -> int: + """Return active power divisor.""" + return self.cluster.get( + ElectricalMeasurement.AttributeDefs.ac_power_divisor.name, + self.cluster.get(ElectricalMeasurement.AttributeDefs.power_divisor.name) + or 1, + ) + + @property + def ac_power_multiplier(self) -> int: + """Return active power divisor.""" + return self.cluster.get( + ElectricalMeasurement.AttributeDefs.ac_power_multiplier.name, + self.cluster.get(ElectricalMeasurement.AttributeDefs.power_multiplier.name) + or 1, + ) + + @property + def measurement_type(self) -> str | None: + """Return Measurement type.""" + if ( + meas_type := self.cluster.get( + ElectricalMeasurement.AttributeDefs.measurement_type.name + ) + ) is None: + return None + + meas_type = self.MeasurementType(meas_type) + return ", ".join( + m.name + for m in self.MeasurementType + if m in meas_type and m.name is not None + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MeterIdentification.cluster_id) +class MeterIdentificationClusterHandler(ClusterHandler): + """Metering Identification cluster handler.""" diff --git a/homeassistant/components/zha/core/cluster_handlers/hvac.py b/homeassistant/components/zha/core/cluster_handlers/hvac.py new file mode 100644 index 00000000000..1230549832b --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/hvac.py @@ -0,0 +1,347 @@ +"""HVAC cluster handlers module for Zigbee Home Automation. + +For more details about this component, please refer to the documentation at +https://home-assistant.io/integrations/zha/ +""" + +from __future__ import annotations + +from typing import Any + +from zigpy.zcl.clusters.hvac import ( + Dehumidification, + Fan, + Pump, + Thermostat, + UserInterface, +) + +from homeassistant.core import callback + +from .. import registries +from ..const import ( + REPORT_CONFIG_MAX_INT, + REPORT_CONFIG_MIN_INT, + REPORT_CONFIG_OP, + SIGNAL_ATTR_UPDATED, +) +from . import AttrReportConfig, ClusterHandler + +REPORT_CONFIG_CLIMATE = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 25) +REPORT_CONFIG_CLIMATE_DEMAND = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 5) +REPORT_CONFIG_CLIMATE_DISCRETE = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 1) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Dehumidification.cluster_id) +class DehumidificationClusterHandler(ClusterHandler): + """Dehumidification cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Fan.cluster_id) +class FanClusterHandler(ClusterHandler): + """Fan cluster handler.""" + + _value_attribute = 0 + + REPORT_CONFIG = ( + AttrReportConfig(attr=Fan.AttributeDefs.fan_mode.name, config=REPORT_CONFIG_OP), + ) + ZCL_INIT_ATTRS = {Fan.AttributeDefs.fan_mode_sequence.name: True} + + @property + def fan_mode(self) -> int | None: + """Return current fan mode.""" + return self.cluster.get(Fan.AttributeDefs.fan_mode.name) + + @property + def fan_mode_sequence(self) -> int | None: + """Return possible fan mode speeds.""" + return self.cluster.get(Fan.AttributeDefs.fan_mode_sequence.name) + + async def async_set_speed(self, value) -> None: + """Set the speed of the fan.""" + await self.write_attributes_safe({Fan.AttributeDefs.fan_mode.name: value}) + + async def async_update(self) -> None: + """Retrieve latest state.""" + await self.get_attribute_value( + Fan.AttributeDefs.fan_mode.name, from_cache=False + ) + + @callback + def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: + """Handle attribute update from fan cluster.""" + attr_name = self._get_attribute_name(attrid) + self.debug( + "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value + ) + if attr_name == "fan_mode": + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Pump.cluster_id) +class PumpClusterHandler(ClusterHandler): + """Pump cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Thermostat.cluster_id) +class ThermostatClusterHandler(ClusterHandler): + """Thermostat cluster handler.""" + + REPORT_CONFIG: tuple[AttrReportConfig, ...] = ( + AttrReportConfig( + attr=Thermostat.AttributeDefs.local_temperature.name, + config=REPORT_CONFIG_CLIMATE, + ), + AttrReportConfig( + attr=Thermostat.AttributeDefs.occupied_cooling_setpoint.name, + config=REPORT_CONFIG_CLIMATE, + ), + AttrReportConfig( + attr=Thermostat.AttributeDefs.occupied_heating_setpoint.name, + config=REPORT_CONFIG_CLIMATE, + ), + AttrReportConfig( + attr=Thermostat.AttributeDefs.unoccupied_cooling_setpoint.name, + config=REPORT_CONFIG_CLIMATE, + ), + AttrReportConfig( + attr=Thermostat.AttributeDefs.unoccupied_heating_setpoint.name, + config=REPORT_CONFIG_CLIMATE, + ), + AttrReportConfig( + attr=Thermostat.AttributeDefs.running_mode.name, + config=REPORT_CONFIG_CLIMATE, + ), + AttrReportConfig( + attr=Thermostat.AttributeDefs.running_state.name, + config=REPORT_CONFIG_CLIMATE_DEMAND, + ), + AttrReportConfig( + attr=Thermostat.AttributeDefs.system_mode.name, + config=REPORT_CONFIG_CLIMATE, + ), + AttrReportConfig( + attr=Thermostat.AttributeDefs.occupancy.name, + config=REPORT_CONFIG_CLIMATE_DISCRETE, + ), + AttrReportConfig( + attr=Thermostat.AttributeDefs.pi_cooling_demand.name, + config=REPORT_CONFIG_CLIMATE_DEMAND, + ), + AttrReportConfig( + attr=Thermostat.AttributeDefs.pi_heating_demand.name, + config=REPORT_CONFIG_CLIMATE_DEMAND, + ), + ) + ZCL_INIT_ATTRS: dict[str, bool] = { + Thermostat.AttributeDefs.abs_min_heat_setpoint_limit.name: True, + Thermostat.AttributeDefs.abs_max_heat_setpoint_limit.name: True, + Thermostat.AttributeDefs.abs_min_cool_setpoint_limit.name: True, + Thermostat.AttributeDefs.abs_max_cool_setpoint_limit.name: True, + Thermostat.AttributeDefs.ctrl_sequence_of_oper.name: False, + Thermostat.AttributeDefs.max_cool_setpoint_limit.name: True, + Thermostat.AttributeDefs.max_heat_setpoint_limit.name: True, + Thermostat.AttributeDefs.min_cool_setpoint_limit.name: True, + Thermostat.AttributeDefs.min_heat_setpoint_limit.name: True, + Thermostat.AttributeDefs.local_temperature_calibration.name: True, + Thermostat.AttributeDefs.setpoint_change_source.name: True, + } + + @property + def abs_max_cool_setpoint_limit(self) -> int: + """Absolute maximum cooling setpoint.""" + return self.cluster.get( + Thermostat.AttributeDefs.abs_max_cool_setpoint_limit.name, 3200 + ) + + @property + def abs_min_cool_setpoint_limit(self) -> int: + """Absolute minimum cooling setpoint.""" + return self.cluster.get( + Thermostat.AttributeDefs.abs_min_cool_setpoint_limit.name, 1600 + ) + + @property + def abs_max_heat_setpoint_limit(self) -> int: + """Absolute maximum heating setpoint.""" + return self.cluster.get( + Thermostat.AttributeDefs.abs_max_heat_setpoint_limit.name, 3000 + ) + + @property + def abs_min_heat_setpoint_limit(self) -> int: + """Absolute minimum heating setpoint.""" + return self.cluster.get( + Thermostat.AttributeDefs.abs_min_heat_setpoint_limit.name, 700 + ) + + @property + def ctrl_sequence_of_oper(self) -> int: + """Control Sequence of operations attribute.""" + return self.cluster.get( + Thermostat.AttributeDefs.ctrl_sequence_of_oper.name, 0xFF + ) + + @property + def max_cool_setpoint_limit(self) -> int: + """Maximum cooling setpoint.""" + sp_limit = self.cluster.get( + Thermostat.AttributeDefs.max_cool_setpoint_limit.name + ) + if sp_limit is None: + return self.abs_max_cool_setpoint_limit + return sp_limit + + @property + def min_cool_setpoint_limit(self) -> int: + """Minimum cooling setpoint.""" + sp_limit = self.cluster.get( + Thermostat.AttributeDefs.min_cool_setpoint_limit.name + ) + if sp_limit is None: + return self.abs_min_cool_setpoint_limit + return sp_limit + + @property + def max_heat_setpoint_limit(self) -> int: + """Maximum heating setpoint.""" + sp_limit = self.cluster.get( + Thermostat.AttributeDefs.max_heat_setpoint_limit.name + ) + if sp_limit is None: + return self.abs_max_heat_setpoint_limit + return sp_limit + + @property + def min_heat_setpoint_limit(self) -> int: + """Minimum heating setpoint.""" + sp_limit = self.cluster.get( + Thermostat.AttributeDefs.min_heat_setpoint_limit.name + ) + if sp_limit is None: + return self.abs_min_heat_setpoint_limit + return sp_limit + + @property + def local_temperature(self) -> int | None: + """Thermostat temperature.""" + return self.cluster.get(Thermostat.AttributeDefs.local_temperature.name) + + @property + def occupancy(self) -> int | None: + """Is occupancy detected.""" + return self.cluster.get(Thermostat.AttributeDefs.occupancy.name) + + @property + def occupied_cooling_setpoint(self) -> int | None: + """Temperature when room is occupied.""" + return self.cluster.get(Thermostat.AttributeDefs.occupied_cooling_setpoint.name) + + @property + def occupied_heating_setpoint(self) -> int | None: + """Temperature when room is occupied.""" + return self.cluster.get(Thermostat.AttributeDefs.occupied_heating_setpoint.name) + + @property + def pi_cooling_demand(self) -> int: + """Cooling demand.""" + return self.cluster.get(Thermostat.AttributeDefs.pi_cooling_demand.name) + + @property + def pi_heating_demand(self) -> int: + """Heating demand.""" + return self.cluster.get(Thermostat.AttributeDefs.pi_heating_demand.name) + + @property + def running_mode(self) -> int | None: + """Thermostat running mode.""" + return self.cluster.get(Thermostat.AttributeDefs.running_mode.name) + + @property + def running_state(self) -> int | None: + """Thermostat running state, state of heat, cool, fan relays.""" + return self.cluster.get(Thermostat.AttributeDefs.running_state.name) + + @property + def system_mode(self) -> int | None: + """System mode.""" + return self.cluster.get(Thermostat.AttributeDefs.system_mode.name) + + @property + def unoccupied_cooling_setpoint(self) -> int | None: + """Temperature when room is not occupied.""" + return self.cluster.get( + Thermostat.AttributeDefs.unoccupied_cooling_setpoint.name + ) + + @property + def unoccupied_heating_setpoint(self) -> int | None: + """Temperature when room is not occupied.""" + return self.cluster.get( + Thermostat.AttributeDefs.unoccupied_heating_setpoint.name + ) + + @callback + def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: + """Handle attribute update cluster.""" + attr_name = self._get_attribute_name(attrid) + self.debug( + "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value + ) + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", + attrid, + attr_name, + value, + ) + + async def async_set_operation_mode(self, mode) -> bool: + """Set Operation mode.""" + await self.write_attributes_safe( + {Thermostat.AttributeDefs.system_mode.name: mode} + ) + return True + + async def async_set_heating_setpoint( + self, temperature: int, is_away: bool = False + ) -> bool: + """Set heating setpoint.""" + attr = ( + Thermostat.AttributeDefs.unoccupied_heating_setpoint.name + if is_away + else Thermostat.AttributeDefs.occupied_heating_setpoint.name + ) + await self.write_attributes_safe({attr: temperature}) + return True + + async def async_set_cooling_setpoint( + self, temperature: int, is_away: bool = False + ) -> bool: + """Set cooling setpoint.""" + attr = ( + Thermostat.AttributeDefs.unoccupied_cooling_setpoint.name + if is_away + else Thermostat.AttributeDefs.occupied_cooling_setpoint.name + ) + await self.write_attributes_safe({attr: temperature}) + return True + + async def get_occupancy(self) -> bool | None: + """Get unreportable occupancy attribute.""" + res, fail = await self.read_attributes( + [Thermostat.AttributeDefs.occupancy.name] + ) + self.debug("read 'occupancy' attr, success: %s, fail: %s", res, fail) + if Thermostat.AttributeDefs.occupancy.name not in res: + return None + return bool(self.occupancy) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(UserInterface.cluster_id) +class UserInterfaceClusterHandler(ClusterHandler): + """User interface (thermostat) cluster handler.""" + + ZCL_INIT_ATTRS = {UserInterface.AttributeDefs.keypad_lockout.name: True} diff --git a/homeassistant/components/zha/core/cluster_handlers/lighting.py b/homeassistant/components/zha/core/cluster_handlers/lighting.py new file mode 100644 index 00000000000..bde0fdbb0e7 --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/lighting.py @@ -0,0 +1,196 @@ +"""Lighting cluster handlers module for Zigbee Home Automation.""" + +from __future__ import annotations + +from functools import cached_property + +from zigpy.zcl.clusters.lighting import Ballast, Color + +from .. import registries +from ..const import REPORT_CONFIG_DEFAULT +from . import AttrReportConfig, ClientClusterHandler, ClusterHandler + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Ballast.cluster_id) +class BallastClusterHandler(ClusterHandler): + """Ballast cluster handler.""" + + +@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(Color.cluster_id) +class ColorClientClusterHandler(ClientClusterHandler): + """Color client cluster handler.""" + + +@registries.BINDABLE_CLUSTERS.register(Color.cluster_id) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Color.cluster_id) +class ColorClusterHandler(ClusterHandler): + """Color cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=Color.AttributeDefs.current_x.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Color.AttributeDefs.current_y.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Color.AttributeDefs.current_hue.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Color.AttributeDefs.current_saturation.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Color.AttributeDefs.color_temperature.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + MAX_MIREDS: int = 500 + MIN_MIREDS: int = 153 + ZCL_INIT_ATTRS = { + Color.AttributeDefs.color_mode.name: False, + Color.AttributeDefs.color_temp_physical_min.name: True, + Color.AttributeDefs.color_temp_physical_max.name: True, + Color.AttributeDefs.color_capabilities.name: True, + Color.AttributeDefs.color_loop_active.name: False, + Color.AttributeDefs.enhanced_current_hue.name: False, + Color.AttributeDefs.start_up_color_temperature.name: True, + Color.AttributeDefs.options.name: True, + } + + @cached_property + def color_capabilities(self) -> Color.ColorCapabilities: + """Return ZCL color capabilities of the light.""" + color_capabilities = self.cluster.get( + Color.AttributeDefs.color_capabilities.name + ) + if color_capabilities is None: + return Color.ColorCapabilities.XY_attributes + return Color.ColorCapabilities(color_capabilities) + + @property + def color_mode(self) -> int | None: + """Return cached value of the color_mode attribute.""" + return self.cluster.get(Color.AttributeDefs.color_mode.name) + + @property + def color_loop_active(self) -> int | None: + """Return cached value of the color_loop_active attribute.""" + return self.cluster.get(Color.AttributeDefs.color_loop_active.name) + + @property + def color_temperature(self) -> int | None: + """Return cached value of color temperature.""" + return self.cluster.get(Color.AttributeDefs.color_temperature.name) + + @property + def current_x(self) -> int | None: + """Return cached value of the current_x attribute.""" + return self.cluster.get(Color.AttributeDefs.current_x.name) + + @property + def current_y(self) -> int | None: + """Return cached value of the current_y attribute.""" + return self.cluster.get(Color.AttributeDefs.current_y.name) + + @property + def current_hue(self) -> int | None: + """Return cached value of the current_hue attribute.""" + return self.cluster.get(Color.AttributeDefs.current_hue.name) + + @property + def enhanced_current_hue(self) -> int | None: + """Return cached value of the enhanced_current_hue attribute.""" + return self.cluster.get(Color.AttributeDefs.enhanced_current_hue.name) + + @property + def current_saturation(self) -> int | None: + """Return cached value of the current_saturation attribute.""" + return self.cluster.get(Color.AttributeDefs.current_saturation.name) + + @property + def min_mireds(self) -> int: + """Return the coldest color_temp that this cluster handler supports.""" + min_mireds = self.cluster.get( + Color.AttributeDefs.color_temp_physical_min.name, self.MIN_MIREDS + ) + if min_mireds == 0: + self.warning( + ( + "[Min mireds is 0, setting to %s] Please open an issue on the" + " quirks repo to have this device corrected" + ), + self.MIN_MIREDS, + ) + min_mireds = self.MIN_MIREDS + return min_mireds + + @property + def max_mireds(self) -> int: + """Return the warmest color_temp that this cluster handler supports.""" + max_mireds = self.cluster.get( + Color.AttributeDefs.color_temp_physical_max.name, self.MAX_MIREDS + ) + if max_mireds == 0: + self.warning( + ( + "[Max mireds is 0, setting to %s] Please open an issue on the" + " quirks repo to have this device corrected" + ), + self.MAX_MIREDS, + ) + max_mireds = self.MAX_MIREDS + return max_mireds + + @property + def hs_supported(self) -> bool: + """Return True if the cluster handler supports hue and saturation.""" + return ( + self.color_capabilities is not None + and Color.ColorCapabilities.Hue_and_saturation in self.color_capabilities + ) + + @property + def enhanced_hue_supported(self) -> bool: + """Return True if the cluster handler supports enhanced hue and saturation.""" + return ( + self.color_capabilities is not None + and Color.ColorCapabilities.Enhanced_hue in self.color_capabilities + ) + + @property + def xy_supported(self) -> bool: + """Return True if the cluster handler supports xy.""" + return ( + self.color_capabilities is not None + and Color.ColorCapabilities.XY_attributes in self.color_capabilities + ) + + @property + def color_temp_supported(self) -> bool: + """Return True if the cluster handler supports color temperature.""" + return ( + self.color_capabilities is not None + and Color.ColorCapabilities.Color_temperature in self.color_capabilities + ) or self.color_temperature is not None + + @property + def color_loop_supported(self) -> bool: + """Return True if the cluster handler supports color loop.""" + return ( + self.color_capabilities is not None + and Color.ColorCapabilities.Color_loop in self.color_capabilities + ) + + @property + def options(self) -> Color.Options: + """Return ZCL options of the cluster handler.""" + return Color.Options(self.cluster.get(Color.AttributeDefs.options.name, 0)) + + @property + def execute_if_off_supported(self) -> bool: + """Return True if the cluster handler can execute commands when off.""" + return Color.Options.Execute_if_off in self.options diff --git a/homeassistant/components/zha/core/cluster_handlers/lightlink.py b/homeassistant/components/zha/core/cluster_handlers/lightlink.py new file mode 100644 index 00000000000..85ec6905069 --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/lightlink.py @@ -0,0 +1,48 @@ +"""Lightlink cluster handlers module for Zigbee Home Automation.""" + +import zigpy.exceptions +from zigpy.zcl.clusters.lightlink import LightLink +from zigpy.zcl.foundation import GENERAL_COMMANDS, GeneralCommand + +from .. import registries +from . import ClusterHandler, ClusterHandlerStatus + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(LightLink.cluster_id) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(LightLink.cluster_id) +class LightLinkClusterHandler(ClusterHandler): + """Lightlink cluster handler.""" + + BIND: bool = False + + async def async_configure(self) -> None: + """Add Coordinator to LightLink group.""" + + if self._endpoint.device.skip_configuration: + self._status = ClusterHandlerStatus.CONFIGURED + return + + application = self._endpoint.zigpy_endpoint.device.application + try: + coordinator = application.get_device(application.state.node_info.ieee) + except KeyError: + self.warning("Aborting - unable to locate required coordinator device.") + return + + try: + rsp = await self.cluster.get_group_identifiers(0) + except (zigpy.exceptions.ZigbeeException, TimeoutError) as exc: + self.warning("Couldn't get list of groups: %s", str(exc)) + return + + if isinstance(rsp, GENERAL_COMMANDS[GeneralCommand.Default_Response].schema): + groups = [] + else: + groups = rsp.group_info_records + + if groups: + for group in groups: + self.debug("Adding coordinator to 0x%04x group id", group.group_id) + await coordinator.add_to_group(group.group_id) + else: + await coordinator.add_to_group(0x0000, name="Default Lightlink Group") diff --git a/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py b/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py new file mode 100644 index 00000000000..9d5d68d2c7e --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py @@ -0,0 +1,515 @@ +"""Manufacturer specific cluster handlers module for Zigbee Home Automation.""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING, Any + +from zhaquirks.inovelli.types import AllLEDEffectType, SingleLEDEffectType +from zhaquirks.quirk_ids import ( + DANFOSS_ALLY_THERMOSTAT, + TUYA_PLUG_MANUFACTURER, + XIAOMI_AQARA_VIBRATION_AQ1, +) +import zigpy.zcl +from zigpy.zcl import clusters +from zigpy.zcl.clusters.closures import DoorLock + +from homeassistant.core import callback + +from .. import registries +from ..const import ( + ATTR_ATTRIBUTE_ID, + ATTR_ATTRIBUTE_NAME, + ATTR_VALUE, + REPORT_CONFIG_ASAP, + REPORT_CONFIG_DEFAULT, + REPORT_CONFIG_IMMEDIATE, + REPORT_CONFIG_MAX_INT, + REPORT_CONFIG_MIN_INT, + SIGNAL_ATTR_UPDATED, + UNKNOWN, +) +from . import AttrReportConfig, ClientClusterHandler, ClusterHandler +from .general import MultistateInputClusterHandler +from .homeautomation import DiagnosticClusterHandler +from .hvac import ThermostatClusterHandler, UserInterfaceClusterHandler + +if TYPE_CHECKING: + from ..endpoint import Endpoint + +_LOGGER = logging.getLogger(__name__) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + registries.SMARTTHINGS_HUMIDITY_CLUSTER +) +class SmartThingsHumidityClusterHandler(ClusterHandler): + """Smart Things Humidity cluster handler.""" + + REPORT_CONFIG = ( + { + "attr": "measured_value", + "config": (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), + }, + ) + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFD00) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFD00) +class OsramButtonClusterHandler(ClusterHandler): + """Osram button cluster handler.""" + + REPORT_CONFIG = () + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(registries.PHILLIPS_REMOTE_CLUSTER) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(registries.PHILLIPS_REMOTE_CLUSTER) +class PhillipsRemoteClusterHandler(ClusterHandler): + """Phillips remote cluster handler.""" + + REPORT_CONFIG = () + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(registries.TUYA_MANUFACTURER_CLUSTER) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + registries.TUYA_MANUFACTURER_CLUSTER +) +class TuyaClusterHandler(ClusterHandler): + """Cluster handler for the Tuya manufacturer Zigbee cluster.""" + + REPORT_CONFIG = () + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize TuyaClusterHandler.""" + super().__init__(cluster, endpoint) + if endpoint.device.quirk_id == TUYA_PLUG_MANUFACTURER: + self.ZCL_INIT_ATTRS = { + "backlight_mode": True, + "power_on_state": True, + } + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFCC0) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFCC0) +class OppleRemoteClusterHandler(ClusterHandler): + """Opple cluster handler.""" + + REPORT_CONFIG = () + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize Opple cluster handler.""" + super().__init__(cluster, endpoint) + if self.cluster.endpoint.model == "lumi.motion.ac02": + self.ZCL_INIT_ATTRS = { + "detection_interval": True, + "motion_sensitivity": True, + "trigger_indicator": True, + } + elif self.cluster.endpoint.model == "lumi.motion.agl04": + self.ZCL_INIT_ATTRS = { + "detection_interval": True, + "motion_sensitivity": True, + } + elif self.cluster.endpoint.model == "lumi.motion.ac01": + self.ZCL_INIT_ATTRS = { + "presence": True, + "monitoring_mode": True, + "motion_sensitivity": True, + "approach_distance": True, + } + elif self.cluster.endpoint.model in ("lumi.plug.mmeu01", "lumi.plug.maeu01"): + self.ZCL_INIT_ATTRS = { + "power_outage_memory": True, + "consumer_connected": True, + } + elif self.cluster.endpoint.model == "aqara.feeder.acn001": + self.ZCL_INIT_ATTRS = { + "portions_dispensed": True, + "weight_dispensed": True, + "error_detected": True, + "disable_led_indicator": True, + "child_lock": True, + "feeding_mode": True, + "serving_size": True, + "portion_weight": True, + } + elif self.cluster.endpoint.model == "lumi.airrtc.agl001": + self.ZCL_INIT_ATTRS = { + "system_mode": True, + "preset": True, + "window_detection": True, + "valve_detection": True, + "valve_alarm": True, + "child_lock": True, + "away_preset_temperature": True, + "window_open": True, + "calibrated": True, + "schedule": True, + "sensor": True, + } + elif self.cluster.endpoint.model == "lumi.sensor_smoke.acn03": + self.ZCL_INIT_ATTRS = { + "buzzer_manual_mute": True, + "smoke_density": True, + "heartbeat_indicator": True, + "buzzer_manual_alarm": True, + "buzzer": True, + "linkage_alarm": True, + } + elif self.cluster.endpoint.model == "lumi.magnet.ac01": + self.ZCL_INIT_ATTRS = { + "detection_distance": True, + } + elif self.cluster.endpoint.model == "lumi.switch.acn047": + self.ZCL_INIT_ATTRS = { + "switch_mode": True, + "switch_type": True, + "startup_on_off": True, + "decoupled_mode": True, + } + elif self.cluster.endpoint.model == "lumi.curtain.agl001": + self.ZCL_INIT_ATTRS = { + "hooks_state": True, + "hooks_lock": True, + "positions_stored": True, + "light_level": True, + "hand_open": True, + } + + async def async_initialize_cluster_handler_specific(self, from_cache: bool) -> None: + """Initialize cluster handler specific.""" + if self.cluster.endpoint.model in ("lumi.motion.ac02", "lumi.motion.agl04"): + interval = self.cluster.get("detection_interval", self.cluster.get(0x0102)) + if interval is not None: + self.debug("Loaded detection interval at startup: %s", interval) + self.cluster.endpoint.ias_zone.reset_s = int(interval) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + registries.SMARTTHINGS_ACCELERATION_CLUSTER +) +class SmartThingsAccelerationClusterHandler(ClusterHandler): + """Smart Things Acceleration cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig(attr="acceleration", config=REPORT_CONFIG_ASAP), + AttrReportConfig(attr="x_axis", config=REPORT_CONFIG_ASAP), + AttrReportConfig(attr="y_axis", config=REPORT_CONFIG_ASAP), + AttrReportConfig(attr="z_axis", config=REPORT_CONFIG_ASAP), + ) + + @classmethod + def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool: + """Filter the cluster match for specific devices.""" + return cluster.endpoint.device.manufacturer in ( + "CentraLite", + "Samjin", + "SmartThings", + ) + + @callback + def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: + """Handle attribute updates on this cluster.""" + try: + attr_name = self._cluster.attributes[attrid].name + except KeyError: + attr_name = UNKNOWN + + if attrid == self.value_attribute: + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", + attrid, + attr_name, + value, + ) + return + + self.zha_send_event( + SIGNAL_ATTR_UPDATED, + { + ATTR_ATTRIBUTE_ID: attrid, + ATTR_ATTRIBUTE_NAME: attr_name, + ATTR_VALUE: value, + }, + ) + + +@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(0xFC31) +class InovelliNotificationClientClusterHandler(ClientClusterHandler): + """Inovelli Notification cluster handler.""" + + @callback + def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: + """Handle an attribute updated on this cluster.""" + + @callback + def cluster_command(self, tsn, command_id, args): + """Handle a cluster command received on this cluster.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFC31) +class InovelliConfigEntityClusterHandler(ClusterHandler): + """Inovelli Configuration Entity cluster handler.""" + + REPORT_CONFIG = () + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize Inovelli cluster handler.""" + super().__init__(cluster, endpoint) + if self.cluster.endpoint.model == "VZM31-SN": + self.ZCL_INIT_ATTRS = { + "dimming_speed_up_remote": True, + "dimming_speed_up_local": True, + "ramp_rate_off_to_on_local": True, + "ramp_rate_off_to_on_remote": True, + "dimming_speed_down_remote": True, + "dimming_speed_down_local": True, + "ramp_rate_on_to_off_local": True, + "ramp_rate_on_to_off_remote": True, + "minimum_level": True, + "maximum_level": True, + "invert_switch": True, + "auto_off_timer": True, + "default_level_local": True, + "default_level_remote": True, + "state_after_power_restored": True, + "load_level_indicator_timeout": True, + "active_power_reports": True, + "periodic_power_and_energy_reports": True, + "active_energy_reports": True, + "power_type": False, + "switch_type": False, + "increased_non_neutral_output": True, + "button_delay": False, + "smart_bulb_mode": False, + "double_tap_up_enabled": True, + "double_tap_down_enabled": True, + "double_tap_up_level": True, + "double_tap_down_level": True, + "led_color_when_on": True, + "led_color_when_off": True, + "led_intensity_when_on": True, + "led_intensity_when_off": True, + "led_scaling_mode": True, + "aux_switch_scenes": True, + "binding_off_to_on_sync_level": True, + "local_protection": False, + "output_mode": False, + "on_off_led_mode": True, + "firmware_progress_led": True, + "relay_click_in_on_off_mode": True, + "disable_clear_notifications_double_tap": True, + } + elif self.cluster.endpoint.model == "VZM35-SN": + self.ZCL_INIT_ATTRS = { + "dimming_speed_up_remote": True, + "dimming_speed_up_local": True, + "ramp_rate_off_to_on_local": True, + "ramp_rate_off_to_on_remote": True, + "dimming_speed_down_remote": True, + "dimming_speed_down_local": True, + "ramp_rate_on_to_off_local": True, + "ramp_rate_on_to_off_remote": True, + "minimum_level": True, + "maximum_level": True, + "invert_switch": True, + "auto_off_timer": True, + "default_level_local": True, + "default_level_remote": True, + "state_after_power_restored": True, + "load_level_indicator_timeout": True, + "power_type": False, + "switch_type": False, + "non_neutral_aux_med_gear_learn_value": True, + "non_neutral_aux_low_gear_learn_value": True, + "quick_start_time": False, + "button_delay": False, + "smart_fan_mode": False, + "double_tap_up_enabled": True, + "double_tap_down_enabled": True, + "double_tap_up_level": True, + "double_tap_down_level": True, + "led_color_when_on": True, + "led_color_when_off": True, + "led_intensity_when_on": True, + "led_intensity_when_off": True, + "aux_switch_scenes": True, + "local_protection": False, + "output_mode": False, + "on_off_led_mode": True, + "firmware_progress_led": True, + "smart_fan_led_display_levels": True, + } + + async def issue_all_led_effect( + self, + effect_type: AllLEDEffectType | int = AllLEDEffectType.Fast_Blink, + color: int = 200, + level: int = 100, + duration: int = 3, + **kwargs: Any, + ) -> None: + """Issue all LED effect command. + + This command is used to issue an LED effect to all LEDs on the device. + """ + + await self.led_effect(effect_type, color, level, duration, expect_reply=False) + + async def issue_individual_led_effect( + self, + led_number: int = 1, + effect_type: SingleLEDEffectType | int = SingleLEDEffectType.Fast_Blink, + color: int = 200, + level: int = 100, + duration: int = 3, + **kwargs: Any, + ) -> None: + """Issue individual LED effect command. + + This command is used to issue an LED effect to the specified LED on the device. + """ + + await self.individual_led_effect( + led_number, effect_type, color, level, duration, expect_reply=False + ) + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(registries.IKEA_AIR_PURIFIER_CLUSTER) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + registries.IKEA_AIR_PURIFIER_CLUSTER +) +class IkeaAirPurifierClusterHandler(ClusterHandler): + """IKEA Air Purifier cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig(attr="filter_run_time", config=REPORT_CONFIG_DEFAULT), + AttrReportConfig(attr="replace_filter", config=REPORT_CONFIG_IMMEDIATE), + AttrReportConfig(attr="filter_life_time", config=REPORT_CONFIG_DEFAULT), + AttrReportConfig(attr="disable_led", config=REPORT_CONFIG_IMMEDIATE), + AttrReportConfig(attr="air_quality_25pm", config=REPORT_CONFIG_IMMEDIATE), + AttrReportConfig(attr="child_lock", config=REPORT_CONFIG_IMMEDIATE), + AttrReportConfig(attr="fan_mode", config=REPORT_CONFIG_IMMEDIATE), + AttrReportConfig(attr="fan_speed", config=REPORT_CONFIG_IMMEDIATE), + AttrReportConfig(attr="device_run_time", config=REPORT_CONFIG_DEFAULT), + ) + + @property + def fan_mode(self) -> int | None: + """Return current fan mode.""" + return self.cluster.get("fan_mode") + + @property + def fan_mode_sequence(self) -> int | None: + """Return possible fan mode speeds.""" + return self.cluster.get("fan_mode_sequence") + + async def async_set_speed(self, value) -> None: + """Set the speed of the fan.""" + await self.write_attributes_safe({"fan_mode": value}) + + async def async_update(self) -> None: + """Retrieve latest state.""" + await self.get_attribute_value("fan_mode", from_cache=False) + + @callback + def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: + """Handle attribute update from fan cluster.""" + attr_name = self._get_attribute_name(attrid) + self.debug( + "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value + ) + if attr_name == "fan_mode": + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value + ) + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFC80) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFC80) +class IkeaRemoteClusterHandler(ClusterHandler): + """Ikea Matter remote cluster handler.""" + + REPORT_CONFIG = () + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + DoorLock.cluster_id, XIAOMI_AQARA_VIBRATION_AQ1 +) +class XiaomiVibrationAQ1ClusterHandler(MultistateInputClusterHandler): + """Xiaomi DoorLock Cluster is in fact a MultiStateInput Cluster.""" + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFC11) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFC11) +class SonoffPresenceSenorClusterHandler(ClusterHandler): + """SonoffPresenceSensor cluster handler.""" + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize SonoffPresenceSensor cluster handler.""" + super().__init__(cluster, endpoint) + if self.cluster.endpoint.model == "SNZB-06P": + self.ZCL_INIT_ATTRS = {"last_illumination_state": True} + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + clusters.hvac.Thermostat.cluster_id, DANFOSS_ALLY_THERMOSTAT +) +class DanfossThermostatClusterHandler(ThermostatClusterHandler): + """Thermostat cluster handler for the Danfoss TRV and derivatives.""" + + REPORT_CONFIG = ( + *ThermostatClusterHandler.REPORT_CONFIG, + AttrReportConfig(attr="open_window_detection", config=REPORT_CONFIG_DEFAULT), + AttrReportConfig(attr="heat_required", config=REPORT_CONFIG_ASAP), + AttrReportConfig(attr="mounting_mode_active", config=REPORT_CONFIG_DEFAULT), + AttrReportConfig(attr="load_estimate", config=REPORT_CONFIG_DEFAULT), + AttrReportConfig(attr="adaptation_run_status", config=REPORT_CONFIG_DEFAULT), + AttrReportConfig(attr="preheat_status", config=REPORT_CONFIG_DEFAULT), + AttrReportConfig(attr="preheat_time", config=REPORT_CONFIG_DEFAULT), + ) + + ZCL_INIT_ATTRS = { + **ThermostatClusterHandler.ZCL_INIT_ATTRS, + "external_open_window_detected": True, + "window_open_feature": True, + "exercise_day_of_week": True, + "exercise_trigger_time": True, + "mounting_mode_control": False, # Can change + "orientation": True, + "external_measured_room_sensor": False, # Can change + "radiator_covered": True, + "heat_available": True, + "load_balancing_enable": True, + "load_room_mean": False, # Can change + "control_algorithm_scale_factor": True, + "regulation_setpoint_offset": True, + "adaptation_run_control": True, + "adaptation_run_settings": True, + } + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + clusters.hvac.UserInterface.cluster_id, DANFOSS_ALLY_THERMOSTAT +) +class DanfossUserInterfaceClusterHandler(UserInterfaceClusterHandler): + """Interface cluster handler for the Danfoss TRV and derivatives.""" + + ZCL_INIT_ATTRS = { + **UserInterfaceClusterHandler.ZCL_INIT_ATTRS, + "viewing_direction": True, + } + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + clusters.homeautomation.Diagnostic.cluster_id, DANFOSS_ALLY_THERMOSTAT +) +class DanfossDiagnosticClusterHandler(DiagnosticClusterHandler): + """Diagnostic cluster handler for the Danfoss TRV and derivatives.""" + + REPORT_CONFIG = ( + *DiagnosticClusterHandler.REPORT_CONFIG, + AttrReportConfig(attr="sw_error_code", config=REPORT_CONFIG_DEFAULT), + AttrReportConfig(attr="motor_step_counter", config=REPORT_CONFIG_DEFAULT), + ) diff --git a/homeassistant/components/zha/core/cluster_handlers/measurement.py b/homeassistant/components/zha/core/cluster_handlers/measurement.py new file mode 100644 index 00000000000..768de8c4c73 --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/measurement.py @@ -0,0 +1,208 @@ +"""Measurement cluster handlers module for Zigbee Home Automation.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import zigpy.zcl +from zigpy.zcl.clusters.measurement import ( + PM25, + CarbonDioxideConcentration, + CarbonMonoxideConcentration, + FlowMeasurement, + FormaldehydeConcentration, + IlluminanceLevelSensing, + IlluminanceMeasurement, + LeafWetness, + OccupancySensing, + PressureMeasurement, + RelativeHumidity, + SoilMoisture, + TemperatureMeasurement, +) + +from .. import registries +from ..const import ( + REPORT_CONFIG_DEFAULT, + REPORT_CONFIG_IMMEDIATE, + REPORT_CONFIG_MAX_INT, + REPORT_CONFIG_MIN_INT, +) +from . import AttrReportConfig, ClusterHandler +from .helpers import is_hue_motion_sensor, is_sonoff_presence_sensor + +if TYPE_CHECKING: + from ..endpoint import Endpoint + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(FlowMeasurement.cluster_id) +class FlowMeasurementClusterHandler(ClusterHandler): + """Flow Measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=FlowMeasurement.AttributeDefs.measured_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IlluminanceLevelSensing.cluster_id) +class IlluminanceLevelSensingClusterHandler(ClusterHandler): + """Illuminance Level Sensing cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=IlluminanceLevelSensing.AttributeDefs.level_status.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IlluminanceMeasurement.cluster_id) +class IlluminanceMeasurementClusterHandler(ClusterHandler): + """Illuminance Measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=IlluminanceMeasurement.AttributeDefs.measured_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(OccupancySensing.cluster_id) +class OccupancySensingClusterHandler(ClusterHandler): + """Occupancy Sensing cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=OccupancySensing.AttributeDefs.occupancy.name, + config=REPORT_CONFIG_IMMEDIATE, + ), + ) + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize Occupancy cluster handler.""" + super().__init__(cluster, endpoint) + if is_hue_motion_sensor(self): + self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() + self.ZCL_INIT_ATTRS["sensitivity"] = True + if is_sonoff_presence_sensor(self): + self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() + self.ZCL_INIT_ATTRS["ultrasonic_o_to_u_delay"] = True + self.ZCL_INIT_ATTRS["ultrasonic_u_to_o_threshold"] = True + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PressureMeasurement.cluster_id) +class PressureMeasurementClusterHandler(ClusterHandler): + """Pressure measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=PressureMeasurement.AttributeDefs.measured_value.name, + config=REPORT_CONFIG_DEFAULT, + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(RelativeHumidity.cluster_id) +class RelativeHumidityClusterHandler(ClusterHandler): + """Relative Humidity measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=RelativeHumidity.AttributeDefs.measured_value.name, + config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 100), + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(SoilMoisture.cluster_id) +class SoilMoistureClusterHandler(ClusterHandler): + """Soil Moisture measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=SoilMoisture.AttributeDefs.measured_value.name, + config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 100), + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(LeafWetness.cluster_id) +class LeafWetnessClusterHandler(ClusterHandler): + """Leaf Wetness measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=LeafWetness.AttributeDefs.measured_value.name, + config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 100), + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(TemperatureMeasurement.cluster_id) +class TemperatureMeasurementClusterHandler(ClusterHandler): + """Temperature measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=TemperatureMeasurement.AttributeDefs.measured_value.name, + config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + CarbonMonoxideConcentration.cluster_id +) +class CarbonMonoxideConcentrationClusterHandler(ClusterHandler): + """Carbon Monoxide measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=CarbonMonoxideConcentration.AttributeDefs.measured_value.name, + config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.000001), + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + CarbonDioxideConcentration.cluster_id +) +class CarbonDioxideConcentrationClusterHandler(ClusterHandler): + """Carbon Dioxide measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=CarbonDioxideConcentration.AttributeDefs.measured_value.name, + config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.000001), + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PM25.cluster_id) +class PM25ClusterHandler(ClusterHandler): + """Particulate Matter 2.5 microns or less measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=PM25.AttributeDefs.measured_value.name, + config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.1), + ), + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + FormaldehydeConcentration.cluster_id +) +class FormaldehydeConcentrationClusterHandler(ClusterHandler): + """Formaldehyde measurement cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=FormaldehydeConcentration.AttributeDefs.measured_value.name, + config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.000001), + ), + ) diff --git a/homeassistant/components/zha/core/cluster_handlers/protocol.py b/homeassistant/components/zha/core/cluster_handlers/protocol.py new file mode 100644 index 00000000000..e1e3d7a5413 --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/protocol.py @@ -0,0 +1,129 @@ +"""Protocol cluster handlers module for Zigbee Home Automation.""" + +from zigpy.zcl.clusters.protocol import ( + AnalogInputExtended, + AnalogInputRegular, + AnalogOutputExtended, + AnalogOutputRegular, + AnalogValueExtended, + AnalogValueRegular, + BacnetProtocolTunnel, + BinaryInputExtended, + BinaryInputRegular, + BinaryOutputExtended, + BinaryOutputRegular, + BinaryValueExtended, + BinaryValueRegular, + GenericTunnel, + MultistateInputExtended, + MultistateInputRegular, + MultistateOutputExtended, + MultistateOutputRegular, + MultistateValueExtended, + MultistateValueRegular, +) + +from .. import registries +from . import ClusterHandler + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogInputExtended.cluster_id) +class AnalogInputExtendedClusterHandler(ClusterHandler): + """Analog Input Extended cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogInputRegular.cluster_id) +class AnalogInputRegularClusterHandler(ClusterHandler): + """Analog Input Regular cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogOutputExtended.cluster_id) +class AnalogOutputExtendedClusterHandler(ClusterHandler): + """Analog Output Regular cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogOutputRegular.cluster_id) +class AnalogOutputRegularClusterHandler(ClusterHandler): + """Analog Output Regular cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogValueExtended.cluster_id) +class AnalogValueExtendedClusterHandler(ClusterHandler): + """Analog Value Extended edition cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogValueRegular.cluster_id) +class AnalogValueRegularClusterHandler(ClusterHandler): + """Analog Value Regular cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BacnetProtocolTunnel.cluster_id) +class BacnetProtocolTunnelClusterHandler(ClusterHandler): + """Bacnet Protocol Tunnel cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryInputExtended.cluster_id) +class BinaryInputExtendedClusterHandler(ClusterHandler): + """Binary Input Extended cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryInputRegular.cluster_id) +class BinaryInputRegularClusterHandler(ClusterHandler): + """Binary Input Regular cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryOutputExtended.cluster_id) +class BinaryOutputExtendedClusterHandler(ClusterHandler): + """Binary Output Extended cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryOutputRegular.cluster_id) +class BinaryOutputRegularClusterHandler(ClusterHandler): + """Binary Output Regular cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryValueExtended.cluster_id) +class BinaryValueExtendedClusterHandler(ClusterHandler): + """Binary Value Extended cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryValueRegular.cluster_id) +class BinaryValueRegularClusterHandler(ClusterHandler): + """Binary Value Regular cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(GenericTunnel.cluster_id) +class GenericTunnelClusterHandler(ClusterHandler): + """Generic Tunnel cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateInputExtended.cluster_id) +class MultiStateInputExtendedClusterHandler(ClusterHandler): + """Multistate Input Extended cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateInputRegular.cluster_id) +class MultiStateInputRegularClusterHandler(ClusterHandler): + """Multistate Input Regular cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( + MultistateOutputExtended.cluster_id +) +class MultiStateOutputExtendedClusterHandler(ClusterHandler): + """Multistate Output Extended cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateOutputRegular.cluster_id) +class MultiStateOutputRegularClusterHandler(ClusterHandler): + """Multistate Output Regular cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateValueExtended.cluster_id) +class MultiStateValueExtendedClusterHandler(ClusterHandler): + """Multistate Value Extended cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateValueRegular.cluster_id) +class MultiStateValueRegularClusterHandler(ClusterHandler): + """Multistate Value Regular cluster handler.""" diff --git a/homeassistant/components/zha/core/cluster_handlers/security.py b/homeassistant/components/zha/core/cluster_handlers/security.py new file mode 100644 index 00000000000..8ebe09cef03 --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/security.py @@ -0,0 +1,400 @@ +"""Security cluster handlers module for Zigbee Home Automation. + +For more details about this component, please refer to the documentation at +https://home-assistant.io/integrations/zha/ +""" + +from __future__ import annotations + +from collections.abc import Callable +from typing import TYPE_CHECKING, Any + +import zigpy.zcl +from zigpy.zcl.clusters.security import IasAce as AceCluster, IasWd, IasZone + +from homeassistant.core import callback +from homeassistant.exceptions import HomeAssistantError + +from .. import registries +from ..const import ( + SIGNAL_ATTR_UPDATED, + WARNING_DEVICE_MODE_EMERGENCY, + WARNING_DEVICE_SOUND_HIGH, + WARNING_DEVICE_SQUAWK_MODE_ARMED, + WARNING_DEVICE_STROBE_HIGH, + WARNING_DEVICE_STROBE_YES, +) +from . import ClusterHandler, ClusterHandlerStatus + +if TYPE_CHECKING: + from ..endpoint import Endpoint + +SIGNAL_ARMED_STATE_CHANGED = "zha_armed_state_changed" +SIGNAL_ALARM_TRIGGERED = "zha_armed_triggered" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AceCluster.cluster_id) +class IasAceClusterHandler(ClusterHandler): + """IAS Ancillary Control Equipment cluster handler.""" + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize IAS Ancillary Control Equipment cluster handler.""" + super().__init__(cluster, endpoint) + self.command_map: dict[int, Callable[..., Any]] = { + AceCluster.ServerCommandDefs.arm.id: self.arm, + AceCluster.ServerCommandDefs.bypass.id: self._bypass, + AceCluster.ServerCommandDefs.emergency.id: self._emergency, + AceCluster.ServerCommandDefs.fire.id: self._fire, + AceCluster.ServerCommandDefs.panic.id: self._panic, + AceCluster.ServerCommandDefs.get_zone_id_map.id: self._get_zone_id_map, + AceCluster.ServerCommandDefs.get_zone_info.id: self._get_zone_info, + AceCluster.ServerCommandDefs.get_panel_status.id: self._send_panel_status_response, + AceCluster.ServerCommandDefs.get_bypassed_zone_list.id: self._get_bypassed_zone_list, + AceCluster.ServerCommandDefs.get_zone_status.id: self._get_zone_status, + } + self.arm_map: dict[AceCluster.ArmMode, Callable[..., Any]] = { + AceCluster.ArmMode.Disarm: self._disarm, + AceCluster.ArmMode.Arm_All_Zones: self._arm_away, + AceCluster.ArmMode.Arm_Day_Home_Only: self._arm_day, + AceCluster.ArmMode.Arm_Night_Sleep_Only: self._arm_night, + } + self.armed_state: AceCluster.PanelStatus = AceCluster.PanelStatus.Panel_Disarmed + self.invalid_tries: int = 0 + + # These will all be setup by the entity from ZHA configuration + self.panel_code: str = "1234" + self.code_required_arm_actions = False + self.max_invalid_tries: int = 3 + + # where do we store this to handle restarts + self.alarm_status: AceCluster.AlarmStatus = AceCluster.AlarmStatus.No_Alarm + + @callback + def cluster_command(self, tsn, command_id, args) -> None: + """Handle commands received to this cluster.""" + self.debug( + "received command %s", self._cluster.server_commands[command_id].name + ) + self.command_map[command_id](*args) + + def arm(self, arm_mode: int, code: str | None, zone_id: int) -> None: + """Handle the IAS ACE arm command.""" + mode = AceCluster.ArmMode(arm_mode) + + self.zha_send_event( + AceCluster.ServerCommandDefs.arm.name, + { + "arm_mode": mode.value, + "arm_mode_description": mode.name, + "code": code, + "zone_id": zone_id, + }, + ) + + zigbee_reply = self.arm_map[mode](code) + self._endpoint.device.hass.async_create_task(zigbee_reply) + + if self.invalid_tries >= self.max_invalid_tries: + self.alarm_status = AceCluster.AlarmStatus.Emergency + self.armed_state = AceCluster.PanelStatus.In_Alarm + self.async_send_signal(f"{self.unique_id}_{SIGNAL_ALARM_TRIGGERED}") + else: + self.async_send_signal(f"{self.unique_id}_{SIGNAL_ARMED_STATE_CHANGED}") + self._send_panel_status_changed() + + def _disarm(self, code: str): + """Test the code and disarm the panel if the code is correct.""" + if ( + code != self.panel_code + and self.armed_state != AceCluster.PanelStatus.Panel_Disarmed + ): + self.debug("Invalid code supplied to IAS ACE") + self.invalid_tries += 1 + zigbee_reply = self.arm_response( + AceCluster.ArmNotification.Invalid_Arm_Disarm_Code + ) + else: + self.invalid_tries = 0 + if ( + self.armed_state == AceCluster.PanelStatus.Panel_Disarmed + and self.alarm_status == AceCluster.AlarmStatus.No_Alarm + ): + self.debug("IAS ACE already disarmed") + zigbee_reply = self.arm_response( + AceCluster.ArmNotification.Already_Disarmed + ) + else: + self.debug("Disarming all IAS ACE zones") + zigbee_reply = self.arm_response( + AceCluster.ArmNotification.All_Zones_Disarmed + ) + + self.armed_state = AceCluster.PanelStatus.Panel_Disarmed + self.alarm_status = AceCluster.AlarmStatus.No_Alarm + return zigbee_reply + + def _arm_day(self, code: str) -> None: + """Arm the panel for day / home zones.""" + return self._handle_arm( + code, + AceCluster.PanelStatus.Armed_Stay, + AceCluster.ArmNotification.Only_Day_Home_Zones_Armed, + ) + + def _arm_night(self, code: str) -> None: + """Arm the panel for night / sleep zones.""" + return self._handle_arm( + code, + AceCluster.PanelStatus.Armed_Night, + AceCluster.ArmNotification.Only_Night_Sleep_Zones_Armed, + ) + + def _arm_away(self, code: str) -> None: + """Arm the panel for away mode.""" + return self._handle_arm( + code, + AceCluster.PanelStatus.Armed_Away, + AceCluster.ArmNotification.All_Zones_Armed, + ) + + def _handle_arm( + self, + code: str, + panel_status: AceCluster.PanelStatus, + armed_type: AceCluster.ArmNotification, + ) -> None: + """Arm the panel with the specified statuses.""" + if self.code_required_arm_actions and code != self.panel_code: + self.debug("Invalid code supplied to IAS ACE") + zigbee_reply = self.arm_response( + AceCluster.ArmNotification.Invalid_Arm_Disarm_Code + ) + else: + self.debug("Arming all IAS ACE zones") + self.armed_state = panel_status + zigbee_reply = self.arm_response(armed_type) + return zigbee_reply + + def _bypass(self, zone_list, code) -> None: + """Handle the IAS ACE bypass command.""" + self.zha_send_event( + AceCluster.ServerCommandDefs.bypass.name, + {"zone_list": zone_list, "code": code}, + ) + + def _emergency(self) -> None: + """Handle the IAS ACE emergency command.""" + self._set_alarm(AceCluster.AlarmStatus.Emergency) + + def _fire(self) -> None: + """Handle the IAS ACE fire command.""" + self._set_alarm(AceCluster.AlarmStatus.Fire) + + def _panic(self) -> None: + """Handle the IAS ACE panic command.""" + self._set_alarm(AceCluster.AlarmStatus.Emergency_Panic) + + def _set_alarm(self, status: AceCluster.AlarmStatus) -> None: + """Set the specified alarm status.""" + self.alarm_status = status + self.armed_state = AceCluster.PanelStatus.In_Alarm + self.async_send_signal(f"{self.unique_id}_{SIGNAL_ALARM_TRIGGERED}") + self._send_panel_status_changed() + + def _get_zone_id_map(self): + """Handle the IAS ACE zone id map command.""" + + def _get_zone_info(self, zone_id): + """Handle the IAS ACE zone info command.""" + + def _send_panel_status_response(self) -> None: + """Handle the IAS ACE panel status response command.""" + response = self.panel_status_response( + self.armed_state, + 0x00, + AceCluster.AudibleNotification.Default_Sound, + self.alarm_status, + ) + self._endpoint.device.hass.async_create_task(response) + + def _send_panel_status_changed(self) -> None: + """Handle the IAS ACE panel status changed command.""" + response = self.panel_status_changed( + self.armed_state, + 0x00, + AceCluster.AudibleNotification.Default_Sound, + self.alarm_status, + ) + self._endpoint.device.hass.async_create_task(response) + + def _get_bypassed_zone_list(self): + """Handle the IAS ACE bypassed zone list command.""" + + def _get_zone_status( + self, starting_zone_id, max_zone_ids, zone_status_mask_flag, zone_status_mask + ): + """Handle the IAS ACE zone status command.""" + + +@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(IasWd.cluster_id) +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IasWd.cluster_id) +class IasWdClusterHandler(ClusterHandler): + """IAS Warning Device cluster handler.""" + + @staticmethod + def set_bit(destination_value, destination_bit, source_value, source_bit): + """Set the specified bit in the value.""" + + if IasWdClusterHandler.get_bit(source_value, source_bit): + return destination_value | (1 << destination_bit) + return destination_value + + @staticmethod + def get_bit(value, bit): + """Get the specified bit from the value.""" + return (value & (1 << bit)) != 0 + + async def issue_squawk( + self, + mode=WARNING_DEVICE_SQUAWK_MODE_ARMED, + strobe=WARNING_DEVICE_STROBE_YES, + squawk_level=WARNING_DEVICE_SOUND_HIGH, + ): + """Issue a squawk command. + + This command uses the WD capabilities to emit a quick audible/visible + pulse called a "squawk". The squawk command has no effect if the WD + is currently active (warning in progress). + """ + value = 0 + value = IasWdClusterHandler.set_bit(value, 0, squawk_level, 0) + value = IasWdClusterHandler.set_bit(value, 1, squawk_level, 1) + + value = IasWdClusterHandler.set_bit(value, 3, strobe, 0) + + value = IasWdClusterHandler.set_bit(value, 4, mode, 0) + value = IasWdClusterHandler.set_bit(value, 5, mode, 1) + value = IasWdClusterHandler.set_bit(value, 6, mode, 2) + value = IasWdClusterHandler.set_bit(value, 7, mode, 3) + + await self.squawk(value) + + async def issue_start_warning( + self, + mode=WARNING_DEVICE_MODE_EMERGENCY, + strobe=WARNING_DEVICE_STROBE_YES, + siren_level=WARNING_DEVICE_SOUND_HIGH, + warning_duration=5, # seconds + strobe_duty_cycle=0x00, + strobe_intensity=WARNING_DEVICE_STROBE_HIGH, + ): + """Issue a start warning command. + + This command starts the WD operation. The WD alerts the surrounding area + by audible (siren) and visual (strobe) signals. + + strobe_duty_cycle indicates the length of the flash cycle. This provides a means + of varying the flash duration for different alarm types (e.g., fire, police, + burglar). Valid range is 0-100 in increments of 10. All other values SHALL + be rounded to the nearest valid value. Strobe SHALL calculate duty cycle over + a duration of one second. + + The ON state SHALL precede the OFF state. For example, if Strobe Duty Cycle + Field specifies “40,” then the strobe SHALL flash ON for 4/10ths of a second + and then turn OFF for 6/10ths of a second. + """ + value = 0 + value = IasWdClusterHandler.set_bit(value, 0, siren_level, 0) + value = IasWdClusterHandler.set_bit(value, 1, siren_level, 1) + + value = IasWdClusterHandler.set_bit(value, 2, strobe, 0) + + value = IasWdClusterHandler.set_bit(value, 4, mode, 0) + value = IasWdClusterHandler.set_bit(value, 5, mode, 1) + value = IasWdClusterHandler.set_bit(value, 6, mode, 2) + value = IasWdClusterHandler.set_bit(value, 7, mode, 3) + + await self.start_warning( + value, warning_duration, strobe_duty_cycle, strobe_intensity + ) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IasZone.cluster_id) +class IASZoneClusterHandler(ClusterHandler): + """Cluster handler for the IASZone Zigbee cluster.""" + + ZCL_INIT_ATTRS = { + IasZone.AttributeDefs.zone_status.name: False, + IasZone.AttributeDefs.zone_state.name: True, + IasZone.AttributeDefs.zone_type.name: True, + } + + @callback + def cluster_command(self, tsn, command_id, args): + """Handle commands received to this cluster.""" + if command_id == IasZone.ClientCommandDefs.status_change_notification.id: + zone_status = args[0] + # update attribute cache with new zone status + self.cluster.update_attribute( + IasZone.AttributeDefs.zone_status.id, zone_status + ) + self.debug("Updated alarm state: %s", zone_status) + elif command_id == IasZone.ClientCommandDefs.enroll.id: + self.debug("Enroll requested") + self._cluster.create_catching_task( + self.enroll_response( + enroll_response_code=IasZone.EnrollResponse.Success, zone_id=0 + ) + ) + + async def async_configure(self): + """Configure IAS device.""" + await self.get_attribute_value( + IasZone.AttributeDefs.zone_type.name, from_cache=False + ) + if self._endpoint.device.skip_configuration: + self.debug("skipping IASZoneClusterHandler configuration") + return + + self.debug("started IASZoneClusterHandler configuration") + + await self.bind() + ieee = self.cluster.endpoint.device.application.state.node_info.ieee + + try: + await self.write_attributes_safe( + {IasZone.AttributeDefs.cie_addr.name: ieee} + ) + self.debug( + "wrote cie_addr: %s to '%s' cluster", + str(ieee), + self._cluster.ep_attribute, + ) + except HomeAssistantError as ex: + self.debug( + "Failed to write cie_addr: %s to '%s' cluster: %s", + str(ieee), + self._cluster.ep_attribute, + str(ex), + ) + + self.debug("Sending pro-active IAS enroll response") + self._cluster.create_catching_task( + self.enroll_response( + enroll_response_code=IasZone.EnrollResponse.Success, zone_id=0 + ) + ) + + self._status = ClusterHandlerStatus.CONFIGURED + self.debug("finished IASZoneClusterHandler configuration") + + @callback + def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: + """Handle attribute updates on this cluster.""" + if attrid == IasZone.AttributeDefs.zone_status.id: + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", + attrid, + IasZone.AttributeDefs.zone_status.name, + value, + ) diff --git a/homeassistant/components/zha/core/cluster_handlers/smartenergy.py b/homeassistant/components/zha/core/cluster_handlers/smartenergy.py new file mode 100644 index 00000000000..d167b8b1752 --- /dev/null +++ b/homeassistant/components/zha/core/cluster_handlers/smartenergy.py @@ -0,0 +1,388 @@ +"""Smart energy cluster handlers module for Zigbee Home Automation.""" + +from __future__ import annotations + +import enum +from functools import partialmethod +from typing import TYPE_CHECKING + +import zigpy.zcl +from zigpy.zcl.clusters.smartenergy import ( + Calendar, + DeviceManagement, + Drlc, + EnergyManagement, + Events, + KeyEstablishment, + MduPairing, + Messaging, + Metering, + Prepayment, + Price, + Tunneling, +) + +from .. import registries +from ..const import ( + REPORT_CONFIG_ASAP, + REPORT_CONFIG_DEFAULT, + REPORT_CONFIG_OP, + SIGNAL_ATTR_UPDATED, +) +from . import AttrReportConfig, ClusterHandler + +if TYPE_CHECKING: + from ..endpoint import Endpoint + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Calendar.cluster_id) +class CalendarClusterHandler(ClusterHandler): + """Calendar cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(DeviceManagement.cluster_id) +class DeviceManagementClusterHandler(ClusterHandler): + """Device Management cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Drlc.cluster_id) +class DrlcClusterHandler(ClusterHandler): + """Demand Response and Load Control cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(EnergyManagement.cluster_id) +class EnergyManagementClusterHandler(ClusterHandler): + """Energy Management cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Events.cluster_id) +class EventsClusterHandler(ClusterHandler): + """Event cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(KeyEstablishment.cluster_id) +class KeyEstablishmentClusterHandler(ClusterHandler): + """Key Establishment cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MduPairing.cluster_id) +class MduPairingClusterHandler(ClusterHandler): + """Pairing cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Messaging.cluster_id) +class MessagingClusterHandler(ClusterHandler): + """Messaging cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Metering.cluster_id) +class MeteringClusterHandler(ClusterHandler): + """Metering cluster handler.""" + + REPORT_CONFIG = ( + AttrReportConfig( + attr=Metering.AttributeDefs.instantaneous_demand.name, + config=REPORT_CONFIG_OP, + ), + AttrReportConfig( + attr=Metering.AttributeDefs.current_summ_delivered.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Metering.AttributeDefs.current_tier1_summ_delivered.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Metering.AttributeDefs.current_tier2_summ_delivered.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Metering.AttributeDefs.current_tier3_summ_delivered.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Metering.AttributeDefs.current_tier4_summ_delivered.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Metering.AttributeDefs.current_tier5_summ_delivered.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Metering.AttributeDefs.current_tier6_summ_delivered.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Metering.AttributeDefs.current_summ_received.name, + config=REPORT_CONFIG_DEFAULT, + ), + AttrReportConfig( + attr=Metering.AttributeDefs.status.name, + config=REPORT_CONFIG_ASAP, + ), + ) + ZCL_INIT_ATTRS = { + Metering.AttributeDefs.demand_formatting.name: True, + Metering.AttributeDefs.divisor.name: True, + Metering.AttributeDefs.metering_device_type.name: True, + Metering.AttributeDefs.multiplier.name: True, + Metering.AttributeDefs.summation_formatting.name: True, + Metering.AttributeDefs.unit_of_measure.name: True, + } + + METERING_DEVICE_TYPES_ELECTRIC = { + 0, + 7, + 8, + 9, + 10, + 11, + 13, + 14, + 15, + 127, + 134, + 135, + 136, + 137, + 138, + 140, + 141, + 142, + } + METERING_DEVICE_TYPES_GAS = {1, 128} + METERING_DEVICE_TYPES_WATER = {2, 129} + METERING_DEVICE_TYPES_HEATING_COOLING = {3, 5, 6, 130, 132, 133} + + metering_device_type = { + 0: "Electric Metering", + 1: "Gas Metering", + 2: "Water Metering", + 3: "Thermal Metering", # deprecated + 4: "Pressure Metering", + 5: "Heat Metering", + 6: "Cooling Metering", + 7: "End Use Measurement Device (EUMD) for metering electric vehicle charging", + 8: "PV Generation Metering", + 9: "Wind Turbine Generation Metering", + 10: "Water Turbine Generation Metering", + 11: "Micro Generation Metering", + 12: "Solar Hot Water Generation Metering", + 13: "Electric Metering Element/Phase 1", + 14: "Electric Metering Element/Phase 2", + 15: "Electric Metering Element/Phase 3", + 127: "Mirrored Electric Metering", + 128: "Mirrored Gas Metering", + 129: "Mirrored Water Metering", + 130: "Mirrored Thermal Metering", # deprecated + 131: "Mirrored Pressure Metering", + 132: "Mirrored Heat Metering", + 133: "Mirrored Cooling Metering", + 134: "Mirrored End Use Measurement Device (EUMD) for metering electric vehicle charging", + 135: "Mirrored PV Generation Metering", + 136: "Mirrored Wind Turbine Generation Metering", + 137: "Mirrored Water Turbine Generation Metering", + 138: "Mirrored Micro Generation Metering", + 139: "Mirrored Solar Hot Water Generation Metering", + 140: "Mirrored Electric Metering Element/Phase 1", + 141: "Mirrored Electric Metering Element/Phase 2", + 142: "Mirrored Electric Metering Element/Phase 3", + } + + class DeviceStatusElectric(enum.IntFlag): + """Electric Metering Device Status.""" + + NO_ALARMS = 0 + CHECK_METER = 1 + LOW_BATTERY = 2 + TAMPER_DETECT = 4 + POWER_FAILURE = 8 + POWER_QUALITY = 16 + LEAK_DETECT = 32 # Really? + SERVICE_DISCONNECT = 64 + RESERVED = 128 + + class DeviceStatusGas(enum.IntFlag): + """Gas Metering Device Status.""" + + NO_ALARMS = 0 + CHECK_METER = 1 + LOW_BATTERY = 2 + TAMPER_DETECT = 4 + NOT_DEFINED = 8 + LOW_PRESSURE = 16 + LEAK_DETECT = 32 + SERVICE_DISCONNECT = 64 + REVERSE_FLOW = 128 + + class DeviceStatusWater(enum.IntFlag): + """Water Metering Device Status.""" + + NO_ALARMS = 0 + CHECK_METER = 1 + LOW_BATTERY = 2 + TAMPER_DETECT = 4 + PIPE_EMPTY = 8 + LOW_PRESSURE = 16 + LEAK_DETECT = 32 + SERVICE_DISCONNECT = 64 + REVERSE_FLOW = 128 + + class DeviceStatusHeatingCooling(enum.IntFlag): + """Heating and Cooling Metering Device Status.""" + + NO_ALARMS = 0 + CHECK_METER = 1 + LOW_BATTERY = 2 + TAMPER_DETECT = 4 + TEMPERATURE_SENSOR = 8 + BURST_DETECT = 16 + LEAK_DETECT = 32 + SERVICE_DISCONNECT = 64 + REVERSE_FLOW = 128 + + class DeviceStatusDefault(enum.IntFlag): + """Metering Device Status.""" + + NO_ALARMS = 0 + + class FormatSelector(enum.IntEnum): + """Format specified selector.""" + + DEMAND = 0 + SUMMATION = 1 + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize Metering.""" + super().__init__(cluster, endpoint) + self._format_spec: str | None = None + self._summa_format: str | None = None + + @property + def divisor(self) -> int: + """Return divisor for the value.""" + return self.cluster.get(Metering.AttributeDefs.divisor.name) or 1 + + @property + def device_type(self) -> str | int | None: + """Return metering device type.""" + dev_type = self.cluster.get(Metering.AttributeDefs.metering_device_type.name) + if dev_type is None: + return None + return self.metering_device_type.get(dev_type, dev_type) + + @property + def multiplier(self) -> int: + """Return multiplier for the value.""" + return self.cluster.get(Metering.AttributeDefs.multiplier.name) or 1 + + @property + def status(self) -> int | None: + """Return metering device status.""" + if (status := self.cluster.get(Metering.AttributeDefs.status.name)) is None: + return None + + metering_device_type = self.cluster.get( + Metering.AttributeDefs.metering_device_type.name + ) + if metering_device_type in self.METERING_DEVICE_TYPES_ELECTRIC: + return self.DeviceStatusElectric(status) + if metering_device_type in self.METERING_DEVICE_TYPES_GAS: + return self.DeviceStatusGas(status) + if metering_device_type in self.METERING_DEVICE_TYPES_WATER: + return self.DeviceStatusWater(status) + if metering_device_type in self.METERING_DEVICE_TYPES_HEATING_COOLING: + return self.DeviceStatusHeatingCooling(status) + return self.DeviceStatusDefault(status) + + @property + def unit_of_measurement(self) -> int: + """Return unit of measurement.""" + return self.cluster.get(Metering.AttributeDefs.unit_of_measure.name) + + async def async_initialize_cluster_handler_specific(self, from_cache: bool) -> None: + """Fetch config from device and updates format specifier.""" + + fmting = self.cluster.get( + Metering.AttributeDefs.demand_formatting.name, 0xF9 + ) # 1 digit to the right, 15 digits to the left + self._format_spec = self.get_formatting(fmting) + + fmting = self.cluster.get( + Metering.AttributeDefs.summation_formatting.name, 0xF9 + ) # 1 digit to the right, 15 digits to the left + self._summa_format = self.get_formatting(fmting) + + async def async_update(self) -> None: + """Retrieve latest state.""" + self.debug("async_update") + + attrs = [ + a["attr"] + for a in self.REPORT_CONFIG + if a["attr"] not in self.cluster.unsupported_attributes + ] + result = await self.get_attributes(attrs, from_cache=False, only_cache=False) + if result: + for attr, value in result.items(): + self.async_send_signal( + f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", + self.cluster.find_attribute(attr).id, + attr, + value, + ) + + @staticmethod + def get_formatting(formatting: int) -> str: + """Return a formatting string, given the formatting value. + + Bits 0 to 2: Number of Digits to the right of the Decimal Point. + Bits 3 to 6: Number of Digits to the left of the Decimal Point. + Bit 7: If set, suppress leading zeros. + """ + r_digits = int(formatting & 0x07) # digits to the right of decimal point + l_digits = (formatting >> 3) & 0x0F # digits to the left of decimal point + if l_digits == 0: + l_digits = 15 + width = r_digits + l_digits + (1 if r_digits > 0 else 0) + + if formatting & 0x80: + # suppress leading 0 + return f"{{:{width}.{r_digits}f}}" + + return f"{{:0{width}.{r_digits}f}}" + + def _formatter_function( + self, selector: FormatSelector, value: int + ) -> int | float | str: + """Return formatted value for display.""" + value_float = value * self.multiplier / self.divisor + if self.unit_of_measurement == 0: + # Zigbee spec power unit is kW, but we show the value in W + value_watt = value_float * 1000 + if value_watt < 100: + return round(value_watt, 1) + return round(value_watt) + if selector == self.FormatSelector.SUMMATION: + assert self._summa_format + return self._summa_format.format(value_float).lstrip() + assert self._format_spec + return self._format_spec.format(value_float).lstrip() + + demand_formatter = partialmethod(_formatter_function, FormatSelector.DEMAND) + summa_formatter = partialmethod(_formatter_function, FormatSelector.SUMMATION) + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Prepayment.cluster_id) +class PrepaymentClusterHandler(ClusterHandler): + """Prepayment cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Price.cluster_id) +class PriceClusterHandler(ClusterHandler): + """Price cluster handler.""" + + +@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Tunneling.cluster_id) +class TunnelingClusterHandler(ClusterHandler): + """Tunneling cluster handler.""" diff --git a/homeassistant/components/zha/core/const.py b/homeassistant/components/zha/core/const.py new file mode 100644 index 00000000000..2359fe0a1c3 --- /dev/null +++ b/homeassistant/components/zha/core/const.py @@ -0,0 +1,423 @@ +"""All constants related to the ZHA component.""" + +from __future__ import annotations + +import enum +import logging + +import bellows.zigbee.application +import voluptuous as vol +import zigpy.application +import zigpy.types as t +import zigpy_deconz.zigbee.application +import zigpy_xbee.zigbee.application +import zigpy_zigate.zigbee.application +import zigpy_znp.zigbee.application + +from homeassistant.const import Platform +import homeassistant.helpers.config_validation as cv + +ATTR_ACTIVE_COORDINATOR = "active_coordinator" +ATTR_ARGS = "args" +ATTR_ATTRIBUTE = "attribute" +ATTR_ATTRIBUTE_ID = "attribute_id" +ATTR_ATTRIBUTE_NAME = "attribute_name" +ATTR_AVAILABLE = "available" +ATTR_CLUSTER_ID = "cluster_id" +ATTR_CLUSTER_TYPE = "cluster_type" +ATTR_COMMAND_TYPE = "command_type" +ATTR_DEVICE_IEEE = "device_ieee" +ATTR_DEVICE_TYPE = "device_type" +ATTR_ENDPOINTS = "endpoints" +ATTR_ENDPOINT_NAMES = "endpoint_names" +ATTR_ENDPOINT_ID = "endpoint_id" +ATTR_IEEE = "ieee" +ATTR_IN_CLUSTERS = "in_clusters" +ATTR_LAST_SEEN = "last_seen" +ATTR_LEVEL = "level" +ATTR_LQI = "lqi" +ATTR_MANUFACTURER = "manufacturer" +ATTR_MANUFACTURER_CODE = "manufacturer_code" +ATTR_MEMBERS = "members" +ATTR_MODEL = "model" +ATTR_NEIGHBORS = "neighbors" +ATTR_NODE_DESCRIPTOR = "node_descriptor" +ATTR_NWK = "nwk" +ATTR_OUT_CLUSTERS = "out_clusters" +ATTR_PARAMS = "params" +ATTR_POWER_SOURCE = "power_source" +ATTR_PROFILE_ID = "profile_id" +ATTR_QUIRK_APPLIED = "quirk_applied" +ATTR_QUIRK_CLASS = "quirk_class" +ATTR_QUIRK_ID = "quirk_id" +ATTR_ROUTES = "routes" +ATTR_RSSI = "rssi" +ATTR_SIGNATURE = "signature" +ATTR_TYPE = "type" +ATTR_UNIQUE_ID = "unique_id" +ATTR_VALUE = "value" +ATTR_WARNING_DEVICE_DURATION = "duration" +ATTR_WARNING_DEVICE_MODE = "mode" +ATTR_WARNING_DEVICE_STROBE = "strobe" +ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE = "duty_cycle" +ATTR_WARNING_DEVICE_STROBE_INTENSITY = "intensity" + +BAUD_RATES = [2400, 4800, 9600, 14400, 19200, 38400, 57600, 115200, 128000, 256000] +BINDINGS = "bindings" + +CLUSTER_DETAILS = "cluster_details" + +CLUSTER_HANDLER_ACCELEROMETER = "accelerometer" +CLUSTER_HANDLER_BINARY_INPUT = "binary_input" +CLUSTER_HANDLER_ANALOG_INPUT = "analog_input" +CLUSTER_HANDLER_ANALOG_OUTPUT = "analog_output" +CLUSTER_HANDLER_ATTRIBUTE = "attribute" +CLUSTER_HANDLER_BASIC = "basic" +CLUSTER_HANDLER_COLOR = "light_color" +CLUSTER_HANDLER_COVER = "window_covering" +CLUSTER_HANDLER_DEVICE_TEMPERATURE = "device_temperature" +CLUSTER_HANDLER_DOORLOCK = "door_lock" +CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT = "electrical_measurement" +CLUSTER_HANDLER_EVENT_RELAY = "event_relay" +CLUSTER_HANDLER_FAN = "fan" +CLUSTER_HANDLER_HUMIDITY = "humidity" +CLUSTER_HANDLER_HUE_OCCUPANCY = "philips_occupancy" +CLUSTER_HANDLER_SOIL_MOISTURE = "soil_moisture" +CLUSTER_HANDLER_LEAF_WETNESS = "leaf_wetness" +CLUSTER_HANDLER_IAS_ACE = "ias_ace" +CLUSTER_HANDLER_IAS_WD = "ias_wd" +CLUSTER_HANDLER_IDENTIFY = "identify" +CLUSTER_HANDLER_ILLUMINANCE = "illuminance" +CLUSTER_HANDLER_LEVEL = ATTR_LEVEL +CLUSTER_HANDLER_MULTISTATE_INPUT = "multistate_input" +CLUSTER_HANDLER_OCCUPANCY = "occupancy" +CLUSTER_HANDLER_ON_OFF = "on_off" +CLUSTER_HANDLER_OTA = "ota" +CLUSTER_HANDLER_POWER_CONFIGURATION = "power" +CLUSTER_HANDLER_PRESSURE = "pressure" +CLUSTER_HANDLER_SHADE = "shade" +CLUSTER_HANDLER_SMARTENERGY_METERING = "smartenergy_metering" +CLUSTER_HANDLER_TEMPERATURE = "temperature" +CLUSTER_HANDLER_THERMOSTAT = "thermostat" +CLUSTER_HANDLER_ZDO = "zdo" +CLUSTER_HANDLER_ZONE = ZONE = "ias_zone" +CLUSTER_HANDLER_INOVELLI = "inovelli_vzm31sn_cluster" + +CLUSTER_COMMAND_SERVER = "server" +CLUSTER_COMMANDS_CLIENT = "client_commands" +CLUSTER_COMMANDS_SERVER = "server_commands" +CLUSTER_TYPE_IN = "in" +CLUSTER_TYPE_OUT = "out" + +PLATFORMS = ( + Platform.ALARM_CONTROL_PANEL, + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CLIMATE, + Platform.COVER, + Platform.DEVICE_TRACKER, + Platform.FAN, + Platform.LIGHT, + Platform.LOCK, + Platform.NUMBER, + Platform.SELECT, + Platform.SENSOR, + Platform.SIREN, + Platform.SWITCH, + Platform.UPDATE, +) + +CONF_ALARM_MASTER_CODE = "alarm_master_code" +CONF_ALARM_FAILED_TRIES = "alarm_failed_tries" +CONF_ALARM_ARM_REQUIRES_CODE = "alarm_arm_requires_code" + +CONF_BAUDRATE = "baudrate" +CONF_FLOW_CONTROL = "flow_control" +CONF_CUSTOM_QUIRKS_PATH = "custom_quirks_path" +CONF_DEFAULT_LIGHT_TRANSITION = "default_light_transition" +CONF_DEVICE_CONFIG = "device_config" +CONF_ENABLE_ENHANCED_LIGHT_TRANSITION = "enhanced_light_transition" +CONF_ENABLE_LIGHT_TRANSITIONING_FLAG = "light_transitioning_flag" +CONF_ALWAYS_PREFER_XY_COLOR_MODE = "always_prefer_xy_color_mode" +CONF_GROUP_MEMBERS_ASSUME_STATE = "group_members_assume_state" +CONF_ENABLE_IDENTIFY_ON_JOIN = "enable_identify_on_join" +CONF_ENABLE_QUIRKS = "enable_quirks" +CONF_RADIO_TYPE = "radio_type" +CONF_USB_PATH = "usb_path" +CONF_USE_THREAD = "use_thread" +CONF_ZIGPY = "zigpy_config" + +CONF_CONSIDER_UNAVAILABLE_MAINS = "consider_unavailable_mains" +CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS = 60 * 60 * 2 # 2 hours +CONF_CONSIDER_UNAVAILABLE_BATTERY = "consider_unavailable_battery" +CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY = 60 * 60 * 6 # 6 hours + +CONF_ZHA_OPTIONS_SCHEMA = vol.Schema( + { + vol.Optional(CONF_DEFAULT_LIGHT_TRANSITION, default=0): vol.All( + vol.Coerce(float), vol.Range(min=0, max=2**16 / 10) + ), + vol.Required(CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, default=False): cv.boolean, + vol.Required(CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, default=True): cv.boolean, + vol.Required(CONF_ALWAYS_PREFER_XY_COLOR_MODE, default=True): cv.boolean, + vol.Required(CONF_GROUP_MEMBERS_ASSUME_STATE, default=True): cv.boolean, + vol.Required(CONF_ENABLE_IDENTIFY_ON_JOIN, default=True): cv.boolean, + vol.Optional( + CONF_CONSIDER_UNAVAILABLE_MAINS, + default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, + ): cv.positive_int, + vol.Optional( + CONF_CONSIDER_UNAVAILABLE_BATTERY, + default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, + ): cv.positive_int, + } +) + +CONF_ZHA_ALARM_SCHEMA = vol.Schema( + { + vol.Required(CONF_ALARM_MASTER_CODE, default="1234"): cv.string, + vol.Required(CONF_ALARM_FAILED_TRIES, default=3): cv.positive_int, + vol.Required(CONF_ALARM_ARM_REQUIRES_CODE, default=False): cv.boolean, + } +) + +CUSTOM_CONFIGURATION = "custom_configuration" + +DATA_DEVICE_CONFIG = "zha_device_config" +DATA_ZHA = "zha" +DATA_ZHA_CONFIG = "config" +DATA_ZHA_CORE_EVENTS = "zha_core_events" +DATA_ZHA_DEVICE_TRIGGER_CACHE = "zha_device_trigger_cache" +DATA_ZHA_GATEWAY = "zha_gateway" + +DEBUG_COMP_BELLOWS = "bellows" +DEBUG_COMP_ZHA = "homeassistant.components.zha" +DEBUG_COMP_ZIGPY = "zigpy" +DEBUG_COMP_ZIGPY_ZNP = "zigpy_znp" +DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz" +DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee" +DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate" +DEBUG_LEVEL_CURRENT = "current" +DEBUG_LEVEL_ORIGINAL = "original" +DEBUG_LEVELS = { + DEBUG_COMP_BELLOWS: logging.DEBUG, + DEBUG_COMP_ZHA: logging.DEBUG, + DEBUG_COMP_ZIGPY: logging.DEBUG, + DEBUG_COMP_ZIGPY_ZNP: logging.DEBUG, + DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG, + DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG, + DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG, +} +DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY] + +DEFAULT_RADIO_TYPE = "ezsp" +DEFAULT_BAUDRATE = 57600 +DEFAULT_DATABASE_NAME = "zigbee.db" + +DEVICE_PAIRING_STATUS = "pairing_status" + +DISCOVERY_KEY = "zha_discovery_info" + +DOMAIN = "zha" + +ENTITY_METADATA = "entity_metadata" + +GROUP_ID = "group_id" +GROUP_IDS = "group_ids" +GROUP_NAME = "group_name" + +MFG_CLUSTER_ID_START = 0xFC00 + +POWER_MAINS_POWERED = "Mains" +POWER_BATTERY_OR_UNKNOWN = "Battery or Unknown" + +PRESET_SCHEDULE = "Schedule" +PRESET_COMPLEX = "Complex" +PRESET_TEMP_MANUAL = "Temporary manual" + +ZCL_INIT_ATTRS = "ZCL_INIT_ATTRS" + +ZHA_ALARM_OPTIONS = "zha_alarm_options" +ZHA_OPTIONS = "zha_options" + +ZHA_CONFIG_SCHEMAS = { + ZHA_OPTIONS: CONF_ZHA_OPTIONS_SCHEMA, + ZHA_ALARM_OPTIONS: CONF_ZHA_ALARM_SCHEMA, +} + +type _ControllerClsType = type[zigpy.application.ControllerApplication] + + +class RadioType(enum.Enum): + """Possible options for radio type.""" + + ezsp = ( + "EZSP = Silicon Labs EmberZNet protocol: Elelabs, HUSBZB-1, Telegesis", + bellows.zigbee.application.ControllerApplication, + ) + znp = ( + "ZNP = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2", + zigpy_znp.zigbee.application.ControllerApplication, + ) + deconz = ( + "deCONZ = dresden elektronik deCONZ protocol: ConBee I/II, RaspBee I/II", + zigpy_deconz.zigbee.application.ControllerApplication, + ) + zigate = ( + "ZiGate = ZiGate Zigbee radios: PiZiGate, ZiGate USB-TTL, ZiGate WiFi", + zigpy_zigate.zigbee.application.ControllerApplication, + ) + xbee = ( + "XBee = Digi XBee Zigbee radios: Digi XBee Series 2, 2C, 3", + zigpy_xbee.zigbee.application.ControllerApplication, + ) + + @classmethod + def list(cls) -> list[str]: + """Return a list of descriptions.""" + return [e.description for e in RadioType] + + @classmethod + def get_by_description(cls, description: str) -> RadioType: + """Get radio by description.""" + for radio in cls: + if radio.description == description: + return radio + raise ValueError + + def __init__(self, description: str, controller_cls: _ControllerClsType) -> None: + """Init instance.""" + self._desc = description + self._ctrl_cls = controller_cls + + @property + def controller(self) -> _ControllerClsType: + """Return controller class.""" + return self._ctrl_cls + + @property + def description(self) -> str: + """Return radio type description.""" + return self._desc + + +REPORT_CONFIG_ATTR_PER_REQ = 3 +REPORT_CONFIG_MAX_INT = 900 +REPORT_CONFIG_MAX_INT_BATTERY_SAVE = 10800 +REPORT_CONFIG_MIN_INT = 30 +REPORT_CONFIG_MIN_INT_ASAP = 1 +REPORT_CONFIG_MIN_INT_IMMEDIATE = 0 +REPORT_CONFIG_MIN_INT_OP = 5 +REPORT_CONFIG_MIN_INT_BATTERY_SAVE = 3600 +REPORT_CONFIG_RPT_CHANGE = 1 +REPORT_CONFIG_DEFAULT = ( + REPORT_CONFIG_MIN_INT, + REPORT_CONFIG_MAX_INT, + REPORT_CONFIG_RPT_CHANGE, +) +REPORT_CONFIG_ASAP = ( + REPORT_CONFIG_MIN_INT_ASAP, + REPORT_CONFIG_MAX_INT, + REPORT_CONFIG_RPT_CHANGE, +) +REPORT_CONFIG_BATTERY_SAVE = ( + REPORT_CONFIG_MIN_INT_BATTERY_SAVE, + REPORT_CONFIG_MAX_INT_BATTERY_SAVE, + REPORT_CONFIG_RPT_CHANGE, +) +REPORT_CONFIG_IMMEDIATE = ( + REPORT_CONFIG_MIN_INT_IMMEDIATE, + REPORT_CONFIG_MAX_INT, + REPORT_CONFIG_RPT_CHANGE, +) +REPORT_CONFIG_OP = ( + REPORT_CONFIG_MIN_INT_OP, + REPORT_CONFIG_MAX_INT, + REPORT_CONFIG_RPT_CHANGE, +) + +SENSOR_ACCELERATION = "acceleration" +SENSOR_BATTERY = "battery" +SENSOR_ELECTRICAL_MEASUREMENT = CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT +SENSOR_GENERIC = "generic" +SENSOR_HUMIDITY = CLUSTER_HANDLER_HUMIDITY +SENSOR_ILLUMINANCE = CLUSTER_HANDLER_ILLUMINANCE +SENSOR_METERING = "metering" +SENSOR_OCCUPANCY = CLUSTER_HANDLER_OCCUPANCY +SENSOR_OPENING = "opening" +SENSOR_PRESSURE = CLUSTER_HANDLER_PRESSURE +SENSOR_TEMPERATURE = CLUSTER_HANDLER_TEMPERATURE +SENSOR_TYPE = "sensor_type" + +SIGNAL_ADD_ENTITIES = "zha_add_new_entities" +SIGNAL_ATTR_UPDATED = "attribute_updated" +SIGNAL_AVAILABLE = "available" +SIGNAL_MOVE_LEVEL = "move_level" +SIGNAL_REMOVE = "remove" +SIGNAL_SET_LEVEL = "set_level" +SIGNAL_STATE_ATTR = "update_state_attribute" +SIGNAL_UPDATE_DEVICE = "{}_zha_update_device" +SIGNAL_GROUP_ENTITY_REMOVED = "group_entity_removed" +SIGNAL_GROUP_MEMBERSHIP_CHANGE = "group_membership_change" + +UNKNOWN = "unknown" +UNKNOWN_MANUFACTURER = "unk_manufacturer" +UNKNOWN_MODEL = "unk_model" + +WARNING_DEVICE_MODE_STOP = 0 +WARNING_DEVICE_MODE_BURGLAR = 1 +WARNING_DEVICE_MODE_FIRE = 2 +WARNING_DEVICE_MODE_EMERGENCY = 3 +WARNING_DEVICE_MODE_POLICE_PANIC = 4 +WARNING_DEVICE_MODE_FIRE_PANIC = 5 +WARNING_DEVICE_MODE_EMERGENCY_PANIC = 6 + +WARNING_DEVICE_STROBE_NO = 0 +WARNING_DEVICE_STROBE_YES = 1 + +WARNING_DEVICE_SOUND_LOW = 0 +WARNING_DEVICE_SOUND_MEDIUM = 1 +WARNING_DEVICE_SOUND_HIGH = 2 +WARNING_DEVICE_SOUND_VERY_HIGH = 3 + +WARNING_DEVICE_STROBE_LOW = 0x00 +WARNING_DEVICE_STROBE_MEDIUM = 0x01 +WARNING_DEVICE_STROBE_HIGH = 0x02 +WARNING_DEVICE_STROBE_VERY_HIGH = 0x03 + +WARNING_DEVICE_SQUAWK_MODE_ARMED = 0 +WARNING_DEVICE_SQUAWK_MODE_DISARMED = 1 + +ZHA_DISCOVERY_NEW = "zha_discovery_new_{}" +ZHA_CLUSTER_HANDLER_MSG = "zha_channel_message" +ZHA_CLUSTER_HANDLER_MSG_BIND = "zha_channel_bind" +ZHA_CLUSTER_HANDLER_MSG_CFG_RPT = "zha_channel_configure_reporting" +ZHA_CLUSTER_HANDLER_MSG_DATA = "zha_channel_msg_data" +ZHA_CLUSTER_HANDLER_CFG_DONE = "zha_channel_cfg_done" +ZHA_CLUSTER_HANDLER_READS_PER_REQ = 5 +ZHA_EVENT = "zha_event" +ZHA_GW_MSG = "zha_gateway_message" +ZHA_GW_MSG_DEVICE_FULL_INIT = "device_fully_initialized" +ZHA_GW_MSG_DEVICE_INFO = "device_info" +ZHA_GW_MSG_DEVICE_JOINED = "device_joined" +ZHA_GW_MSG_DEVICE_REMOVED = "device_removed" +ZHA_GW_MSG_GROUP_ADDED = "group_added" +ZHA_GW_MSG_GROUP_INFO = "group_info" +ZHA_GW_MSG_GROUP_MEMBER_ADDED = "group_member_added" +ZHA_GW_MSG_GROUP_MEMBER_REMOVED = "group_member_removed" +ZHA_GW_MSG_GROUP_REMOVED = "group_removed" +ZHA_GW_MSG_LOG_ENTRY = "log_entry" +ZHA_GW_MSG_LOG_OUTPUT = "log_output" +ZHA_GW_MSG_RAW_INIT = "raw_device_initialized" + + +class Strobe(t.enum8): + """Strobe enum.""" + + No_Strobe = 0x00 + Strobe = 0x01 + + +EZSP_OVERWRITE_EUI64 = ( + "i_understand_i_can_update_eui64_only_once_and_i_still_want_to_do_it" +) diff --git a/homeassistant/components/zha/core/decorators.py b/homeassistant/components/zha/core/decorators.py new file mode 100644 index 00000000000..d20fb7f2a38 --- /dev/null +++ b/homeassistant/components/zha/core/decorators.py @@ -0,0 +1,56 @@ +"""Decorators for ZHA core registries.""" + +from __future__ import annotations + +from collections.abc import Callable +from typing import Any + + +class DictRegistry[_TypeT: type[Any]](dict[int | str, _TypeT]): + """Dict Registry of items.""" + + def register(self, name: int | str) -> Callable[[_TypeT], _TypeT]: + """Return decorator to register item with a specific name.""" + + def decorator(cluster_handler: _TypeT) -> _TypeT: + """Register decorated cluster handler or item.""" + self[name] = cluster_handler + return cluster_handler + + return decorator + + +class NestedDictRegistry[_TypeT: type[Any]]( + dict[int | str, dict[int | str | None, _TypeT]] +): + """Dict Registry of multiple items per key.""" + + def register( + self, name: int | str, sub_name: int | str | None = None + ) -> Callable[[_TypeT], _TypeT]: + """Return decorator to register item with a specific and a quirk name.""" + + def decorator(cluster_handler: _TypeT) -> _TypeT: + """Register decorated cluster handler or item.""" + if name not in self: + self[name] = {} + self[name][sub_name] = cluster_handler + return cluster_handler + + return decorator + + +class SetRegistry(set[int | str]): + """Set Registry of items.""" + + def register[_TypeT: type[Any]]( + self, name: int | str + ) -> Callable[[_TypeT], _TypeT]: + """Return decorator to register item with a specific name.""" + + def decorator(cluster_handler: _TypeT) -> _TypeT: + """Register decorated cluster handler or item.""" + self.add(name) + return cluster_handler + + return decorator diff --git a/homeassistant/components/zha/core/device.py b/homeassistant/components/zha/core/device.py new file mode 100644 index 00000000000..163674d614c --- /dev/null +++ b/homeassistant/components/zha/core/device.py @@ -0,0 +1,1010 @@ +"""Device for Zigbee Home Automation.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable +from datetime import timedelta +from enum import Enum +from functools import cached_property +import logging +import random +import time +from typing import TYPE_CHECKING, Any, Self + +from zigpy import types +from zigpy.device import Device as ZigpyDevice +import zigpy.exceptions +from zigpy.profiles import PROFILES +import zigpy.quirks +from zigpy.quirks.v2 import CustomDeviceV2 +from zigpy.types.named import EUI64, NWK +from zigpy.zcl.clusters import Cluster +from zigpy.zcl.clusters.general import Groups, Identify +from zigpy.zcl.foundation import Status as ZclStatus, ZCLCommandDef +import zigpy.zdo.types as zdo_types + +from homeassistant.const import ATTR_COMMAND, ATTR_DEVICE_ID, ATTR_NAME +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceEntry +from homeassistant.helpers.dispatcher import ( + async_dispatcher_connect, + async_dispatcher_send, +) +from homeassistant.helpers.event import async_track_time_interval + +from . import const, discovery +from .cluster_handlers import ClusterHandler, ZDOClusterHandler +from .const import ( + ATTR_ACTIVE_COORDINATOR, + ATTR_ARGS, + ATTR_ATTRIBUTE, + ATTR_AVAILABLE, + ATTR_CLUSTER_ID, + ATTR_CLUSTER_TYPE, + ATTR_COMMAND_TYPE, + ATTR_DEVICE_TYPE, + ATTR_ENDPOINT_ID, + ATTR_ENDPOINT_NAMES, + ATTR_ENDPOINTS, + ATTR_IEEE, + ATTR_LAST_SEEN, + ATTR_LQI, + ATTR_MANUFACTURER, + ATTR_MANUFACTURER_CODE, + ATTR_MODEL, + ATTR_NEIGHBORS, + ATTR_NODE_DESCRIPTOR, + ATTR_NWK, + ATTR_PARAMS, + ATTR_POWER_SOURCE, + ATTR_QUIRK_APPLIED, + ATTR_QUIRK_CLASS, + ATTR_QUIRK_ID, + ATTR_ROUTES, + ATTR_RSSI, + ATTR_SIGNATURE, + ATTR_VALUE, + CLUSTER_COMMAND_SERVER, + CLUSTER_COMMANDS_CLIENT, + CLUSTER_COMMANDS_SERVER, + CLUSTER_TYPE_IN, + CLUSTER_TYPE_OUT, + CONF_CONSIDER_UNAVAILABLE_BATTERY, + CONF_CONSIDER_UNAVAILABLE_MAINS, + CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, + CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, + CONF_ENABLE_IDENTIFY_ON_JOIN, + POWER_BATTERY_OR_UNKNOWN, + POWER_MAINS_POWERED, + SIGNAL_AVAILABLE, + SIGNAL_UPDATE_DEVICE, + UNKNOWN, + UNKNOWN_MANUFACTURER, + UNKNOWN_MODEL, + ZHA_OPTIONS, +) +from .endpoint import Endpoint +from .helpers import LogMixin, async_get_zha_config_value, convert_to_zcl_values + +if TYPE_CHECKING: + from ..websocket_api import ClusterBinding + from .gateway import ZHAGateway + +_LOGGER = logging.getLogger(__name__) +_UPDATE_ALIVE_INTERVAL = (60, 90) +_CHECKIN_GRACE_PERIODS = 2 + + +def get_device_automation_triggers( + device: zigpy.device.Device, +) -> dict[tuple[str, str], dict[str, str]]: + """Get the supported device automation triggers for a zigpy device.""" + return { + ("device_offline", "device_offline"): {"device_event_type": "device_offline"}, + **getattr(device, "device_automation_triggers", {}), + } + + +class DeviceStatus(Enum): + """Status of a device.""" + + CREATED = 1 + INITIALIZED = 2 + + +class ZHADevice(LogMixin): + """ZHA Zigbee device object.""" + + _ha_device_id: str + + def __init__( + self, + hass: HomeAssistant, + zigpy_device: zigpy.device.Device, + zha_gateway: ZHAGateway, + ) -> None: + """Initialize the gateway.""" + self.hass: HomeAssistant = hass + self._zigpy_device: ZigpyDevice = zigpy_device + self._zha_gateway: ZHAGateway = zha_gateway + self._available_signal: str = f"{self.name}_{self.ieee}_{SIGNAL_AVAILABLE}" + self._checkins_missed_count: int = 0 + self.unsubs: list[Callable[[], None]] = [] + self.quirk_applied: bool = isinstance( + self._zigpy_device, zigpy.quirks.CustomDevice + ) + self.quirk_class: str = ( + f"{self._zigpy_device.__class__.__module__}." + f"{self._zigpy_device.__class__.__name__}" + ) + self.quirk_id: str | None = getattr(self._zigpy_device, ATTR_QUIRK_ID, None) + + if self.is_mains_powered: + self.consider_unavailable_time: int = async_get_zha_config_value( + self._zha_gateway.config_entry, + ZHA_OPTIONS, + CONF_CONSIDER_UNAVAILABLE_MAINS, + CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, + ) + else: + self.consider_unavailable_time = async_get_zha_config_value( + self._zha_gateway.config_entry, + ZHA_OPTIONS, + CONF_CONSIDER_UNAVAILABLE_BATTERY, + CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, + ) + self._available: bool = self.is_coordinator or ( + self.last_seen is not None + and time.time() - self.last_seen < self.consider_unavailable_time + ) + self._zdo_handler: ZDOClusterHandler = ZDOClusterHandler(self) + self._power_config_ch: ClusterHandler | None = None + self._identify_ch: ClusterHandler | None = None + self._basic_ch: ClusterHandler | None = None + self.status: DeviceStatus = DeviceStatus.CREATED + + self._endpoints: dict[int, Endpoint] = {} + for ep_id, endpoint in zigpy_device.endpoints.items(): + if ep_id != 0: + self._endpoints[ep_id] = Endpoint.new(endpoint, self) + + if not self.is_coordinator: + keep_alive_interval = random.randint(*_UPDATE_ALIVE_INTERVAL) + self.debug( + "starting availability checks - interval: %s", keep_alive_interval + ) + self.unsubs.append( + async_track_time_interval( + self.hass, + self._check_available, + timedelta(seconds=keep_alive_interval), + ) + ) + + @property + def device_id(self) -> str: + """Return the HA device registry device id.""" + return self._ha_device_id + + def set_device_id(self, device_id: str) -> None: + """Set the HA device registry device id.""" + self._ha_device_id = device_id + + @property + def device(self) -> zigpy.device.Device: + """Return underlying Zigpy device.""" + return self._zigpy_device + + @property + def name(self) -> str: + """Return device name.""" + return f"{self.manufacturer} {self.model}" + + @property + def ieee(self) -> EUI64: + """Return ieee address for device.""" + return self._zigpy_device.ieee + + @property + def manufacturer(self) -> str: + """Return manufacturer for device.""" + if self._zigpy_device.manufacturer is None: + return UNKNOWN_MANUFACTURER + return self._zigpy_device.manufacturer + + @property + def model(self) -> str: + """Return model for device.""" + if self._zigpy_device.model is None: + return UNKNOWN_MODEL + return self._zigpy_device.model + + @property + def manufacturer_code(self) -> int | None: + """Return the manufacturer code for the device.""" + if self._zigpy_device.node_desc is None: + return None + + return self._zigpy_device.node_desc.manufacturer_code + + @property + def nwk(self) -> NWK: + """Return nwk for device.""" + return self._zigpy_device.nwk + + @property + def lqi(self): + """Return lqi for device.""" + return self._zigpy_device.lqi + + @property + def rssi(self): + """Return rssi for device.""" + return self._zigpy_device.rssi + + @property + def last_seen(self) -> float | None: + """Return last_seen for device.""" + return self._zigpy_device.last_seen + + @property + def is_mains_powered(self) -> bool | None: + """Return true if device is mains powered.""" + if self._zigpy_device.node_desc is None: + return None + + return self._zigpy_device.node_desc.is_mains_powered + + @property + def device_type(self) -> str: + """Return the logical device type for the device.""" + if self._zigpy_device.node_desc is None: + return UNKNOWN + + return self._zigpy_device.node_desc.logical_type.name + + @property + def power_source(self) -> str: + """Return the power source for the device.""" + return ( + POWER_MAINS_POWERED if self.is_mains_powered else POWER_BATTERY_OR_UNKNOWN + ) + + @property + def is_router(self) -> bool | None: + """Return true if this is a routing capable device.""" + if self._zigpy_device.node_desc is None: + return None + + return self._zigpy_device.node_desc.is_router + + @property + def is_coordinator(self) -> bool | None: + """Return true if this device represents a coordinator.""" + if self._zigpy_device.node_desc is None: + return None + + return self._zigpy_device.node_desc.is_coordinator + + @property + def is_active_coordinator(self) -> bool: + """Return true if this device is the active coordinator.""" + if not self.is_coordinator: + return False + + return self.ieee == self.gateway.state.node_info.ieee + + @property + def is_end_device(self) -> bool | None: + """Return true if this device is an end device.""" + if self._zigpy_device.node_desc is None: + return None + + return self._zigpy_device.node_desc.is_end_device + + @property + def is_groupable(self) -> bool: + """Return true if this device has a group cluster.""" + return self.is_coordinator or ( + self.available and bool(self.async_get_groupable_endpoints()) + ) + + @property + def skip_configuration(self) -> bool: + """Return true if the device should not issue configuration related commands.""" + return self._zigpy_device.skip_configuration or bool(self.is_coordinator) + + @property + def gateway(self): + """Return the gateway for this device.""" + return self._zha_gateway + + @cached_property + def device_automation_commands(self) -> dict[str, list[tuple[str, str]]]: + """Return the a lookup of commands to etype/sub_type.""" + commands: dict[str, list[tuple[str, str]]] = {} + for etype_subtype, trigger in self.device_automation_triggers.items(): + if command := trigger.get(ATTR_COMMAND): + commands.setdefault(command, []).append(etype_subtype) + return commands + + @cached_property + def device_automation_triggers(self) -> dict[tuple[str, str], dict[str, str]]: + """Return the device automation triggers for this device.""" + return get_device_automation_triggers(self._zigpy_device) + + @property + def available_signal(self) -> str: + """Signal to use to subscribe to device availability changes.""" + return self._available_signal + + @property + def available(self): + """Return True if device is available.""" + return self._available + + @available.setter + def available(self, new_availability: bool) -> None: + """Set device availability.""" + self._available = new_availability + + @property + def power_configuration_ch(self) -> ClusterHandler | None: + """Return power configuration cluster handler.""" + return self._power_config_ch + + @power_configuration_ch.setter + def power_configuration_ch(self, cluster_handler: ClusterHandler) -> None: + """Power configuration cluster handler setter.""" + if self._power_config_ch is None: + self._power_config_ch = cluster_handler + + @property + def basic_ch(self) -> ClusterHandler | None: + """Return basic cluster handler.""" + return self._basic_ch + + @basic_ch.setter + def basic_ch(self, cluster_handler: ClusterHandler) -> None: + """Set the basic cluster handler.""" + if self._basic_ch is None: + self._basic_ch = cluster_handler + + @property + def identify_ch(self) -> ClusterHandler | None: + """Return power configuration cluster handler.""" + return self._identify_ch + + @identify_ch.setter + def identify_ch(self, cluster_handler: ClusterHandler) -> None: + """Power configuration cluster handler setter.""" + if self._identify_ch is None: + self._identify_ch = cluster_handler + + @property + def zdo_cluster_handler(self) -> ZDOClusterHandler: + """Return ZDO cluster handler.""" + return self._zdo_handler + + @property + def endpoints(self) -> dict[int, Endpoint]: + """Return the endpoints for this device.""" + return self._endpoints + + @property + def zigbee_signature(self) -> dict[str, Any]: + """Get zigbee signature for this device.""" + return { + ATTR_NODE_DESCRIPTOR: str(self._zigpy_device.node_desc), + ATTR_ENDPOINTS: { + signature[0]: signature[1] + for signature in [ + endpoint.zigbee_signature for endpoint in self._endpoints.values() + ] + }, + ATTR_MANUFACTURER: self.manufacturer, + ATTR_MODEL: self.model, + } + + @property + def sw_version(self) -> str | None: + """Return the software version for this device.""" + device_registry = dr.async_get(self.hass) + reg_device: DeviceEntry | None = device_registry.async_get(self.device_id) + if reg_device is None: + return None + return reg_device.sw_version + + @classmethod + def new( + cls, + hass: HomeAssistant, + zigpy_dev: zigpy.device.Device, + gateway: ZHAGateway, + ) -> Self: + """Create new device.""" + zha_dev = cls(hass, zigpy_dev, gateway) + zha_dev.unsubs.append( + async_dispatcher_connect( + hass, + SIGNAL_UPDATE_DEVICE.format(str(zha_dev.ieee)), + zha_dev.async_update_sw_build_id, + ) + ) + discovery.PROBE.discover_device_entities(zha_dev) + return zha_dev + + @callback + def async_update_sw_build_id(self, sw_version: int) -> None: + """Update device sw version.""" + if self.device_id is None: + return + + device_registry = dr.async_get(self.hass) + device_registry.async_update_device( + self.device_id, sw_version=f"0x{sw_version:08x}" + ) + + async def _check_available(self, *_: Any) -> None: + # don't flip the availability state of the coordinator + if self.is_coordinator: + return + if self.last_seen is None: + self.debug("last_seen is None, marking the device unavailable") + self.update_available(False) + return + + difference = time.time() - self.last_seen + if difference < self.consider_unavailable_time: + self.debug( + "Device seen - marking the device available and resetting counter" + ) + self.update_available(True) + self._checkins_missed_count = 0 + return + + if self.hass.data[const.DATA_ZHA].allow_polling: + if ( + self._checkins_missed_count >= _CHECKIN_GRACE_PERIODS + or self.manufacturer == "LUMI" + or not self._endpoints + ): + self.debug( + ( + "last_seen is %s seconds ago and ping attempts have been exhausted," + " marking the device unavailable" + ), + difference, + ) + self.update_available(False) + return + + self._checkins_missed_count += 1 + self.debug( + "Attempting to checkin with device - missed checkins: %s", + self._checkins_missed_count, + ) + if not self.basic_ch: + self.debug("does not have a mandatory basic cluster") + self.update_available(False) + return + res = await self.basic_ch.get_attribute_value( + ATTR_MANUFACTURER, from_cache=False + ) + if res is not None: + self._checkins_missed_count = 0 + + def update_available(self, available: bool) -> None: + """Update device availability and signal entities.""" + self.debug( + ( + "Update device availability - device available: %s - new availability:" + " %s - changed: %s" + ), + self.available, + available, + self.available ^ available, + ) + availability_changed = self.available ^ available + self.available = available + if availability_changed and available: + # reinit cluster handlers then signal entities + self.debug( + "Device availability changed and device became available," + " reinitializing cluster handlers" + ) + self.hass.async_create_task(self._async_became_available()) + return + if availability_changed and not available: + self.debug("Device availability changed and device became unavailable") + self.zha_send_event( + { + "device_event_type": "device_offline", + }, + ) + async_dispatcher_send(self.hass, f"{self._available_signal}_entity") + + @callback + def zha_send_event(self, event_data: dict[str, str | int]) -> None: + """Relay events to hass.""" + self.hass.bus.async_fire( + const.ZHA_EVENT, + { + const.ATTR_DEVICE_IEEE: str(self.ieee), + const.ATTR_UNIQUE_ID: str(self.ieee), + ATTR_DEVICE_ID: self.device_id, + **event_data, + }, + ) + + async def _async_became_available(self) -> None: + """Update device availability and signal entities.""" + await self.async_initialize(False) + async_dispatcher_send(self.hass, f"{self._available_signal}_entity") + + @property + def device_info(self) -> dict[str, Any]: + """Return a device description for device.""" + ieee = str(self.ieee) + time_struct = time.localtime(self.last_seen) + update_time = time.strftime("%Y-%m-%dT%H:%M:%S", time_struct) + return { + ATTR_IEEE: ieee, + ATTR_NWK: self.nwk, + ATTR_MANUFACTURER: self.manufacturer, + ATTR_MODEL: self.model, + ATTR_NAME: self.name or ieee, + ATTR_QUIRK_APPLIED: self.quirk_applied, + ATTR_QUIRK_CLASS: self.quirk_class, + ATTR_QUIRK_ID: self.quirk_id, + ATTR_MANUFACTURER_CODE: self.manufacturer_code, + ATTR_POWER_SOURCE: self.power_source, + ATTR_LQI: self.lqi, + ATTR_RSSI: self.rssi, + ATTR_LAST_SEEN: update_time, + ATTR_AVAILABLE: self.available, + ATTR_DEVICE_TYPE: self.device_type, + ATTR_SIGNATURE: self.zigbee_signature, + } + + async def async_configure(self) -> None: + """Configure the device.""" + should_identify = async_get_zha_config_value( + self._zha_gateway.config_entry, + ZHA_OPTIONS, + CONF_ENABLE_IDENTIFY_ON_JOIN, + True, + ) + self.debug("started configuration") + await self._zdo_handler.async_configure() + self._zdo_handler.debug("'async_configure' stage succeeded") + await asyncio.gather( + *(endpoint.async_configure() for endpoint in self._endpoints.values()) + ) + if isinstance(self._zigpy_device, CustomDeviceV2): + self.debug("applying quirks v2 custom device configuration") + await self._zigpy_device.apply_custom_configuration() + async_dispatcher_send( + self.hass, + const.ZHA_CLUSTER_HANDLER_MSG, + { + const.ATTR_TYPE: const.ZHA_CLUSTER_HANDLER_CFG_DONE, + }, + ) + self.debug("completed configuration") + + if ( + should_identify + and self.identify_ch is not None + and not self.skip_configuration + ): + await self.identify_ch.trigger_effect( + effect_id=Identify.EffectIdentifier.Okay, + effect_variant=Identify.EffectVariant.Default, + ) + + async def async_initialize(self, from_cache: bool = False) -> None: + """Initialize cluster handlers.""" + self.debug("started initialization") + await self._zdo_handler.async_initialize(from_cache) + self._zdo_handler.debug("'async_initialize' stage succeeded") + + # We intentionally do not use `gather` here! This is so that if, for example, + # three `device.async_initialize()`s are spawned, only three concurrent requests + # will ever be in flight at once. Startup concurrency is managed at the device + # level. + for endpoint in self._endpoints.values(): + try: + await endpoint.async_initialize(from_cache) + except Exception: # noqa: BLE001 + self.debug("Failed to initialize endpoint", exc_info=True) + + self.debug("power source: %s", self.power_source) + self.status = DeviceStatus.INITIALIZED + self.debug("completed initialization") + + @callback + def async_cleanup_handles(self) -> None: + """Unsubscribe the dispatchers and timers.""" + for unsubscribe in self.unsubs: + unsubscribe() + + @property + def zha_device_info(self) -> dict[str, Any]: + """Get ZHA device information.""" + device_info: dict[str, Any] = {} + device_info.update(self.device_info) + device_info[ATTR_ACTIVE_COORDINATOR] = self.is_active_coordinator + device_info["entities"] = [ + { + "entity_id": entity_ref.reference_id, + ATTR_NAME: entity_ref.device_info[ATTR_NAME], + } + for entity_ref in self.gateway.device_registry[self.ieee] + ] + + topology = self.gateway.application_controller.topology + device_info[ATTR_NEIGHBORS] = [ + { + "device_type": neighbor.device_type.name, + "rx_on_when_idle": neighbor.rx_on_when_idle.name, + "relationship": neighbor.relationship.name, + "extended_pan_id": str(neighbor.extended_pan_id), + "ieee": str(neighbor.ieee), + "nwk": str(neighbor.nwk), + "permit_joining": neighbor.permit_joining.name, + "depth": str(neighbor.depth), + "lqi": str(neighbor.lqi), + } + for neighbor in topology.neighbors[self.ieee] + ] + + device_info[ATTR_ROUTES] = [ + { + "dest_nwk": str(route.DstNWK), + "route_status": str(route.RouteStatus.name), + "memory_constrained": bool(route.MemoryConstrained), + "many_to_one": bool(route.ManyToOne), + "route_record_required": bool(route.RouteRecordRequired), + "next_hop": str(route.NextHop), + } + for route in topology.routes[self.ieee] + ] + + # Return endpoint device type Names + names: list[dict[str, str]] = [] + for endpoint in (ep for epid, ep in self.device.endpoints.items() if epid): + profile = PROFILES.get(endpoint.profile_id) + if profile and endpoint.device_type is not None: + # DeviceType provides undefined enums + names.append({ATTR_NAME: profile.DeviceType(endpoint.device_type).name}) + else: + names.append( + { + ATTR_NAME: ( + f"unknown {endpoint.device_type} device_type " + f"of 0x{(endpoint.profile_id or 0xFFFF):04x} profile id" + ) + } + ) + device_info[ATTR_ENDPOINT_NAMES] = names + + device_registry = dr.async_get(self.hass) + reg_device = device_registry.async_get(self.device_id) + if reg_device is not None: + device_info["user_given_name"] = reg_device.name_by_user + device_info["device_reg_id"] = reg_device.id + device_info["area_id"] = reg_device.area_id + return device_info + + @callback + def async_get_clusters(self) -> dict[int, dict[str, dict[int, Cluster]]]: + """Get all clusters for this device.""" + return { + ep_id: { + CLUSTER_TYPE_IN: endpoint.in_clusters, + CLUSTER_TYPE_OUT: endpoint.out_clusters, + } + for (ep_id, endpoint) in self._zigpy_device.endpoints.items() + if ep_id != 0 + } + + @callback + def async_get_groupable_endpoints(self): + """Get device endpoints that have a group 'in' cluster.""" + return [ + ep_id + for (ep_id, clusters) in self.async_get_clusters().items() + if Groups.cluster_id in clusters[CLUSTER_TYPE_IN] + ] + + @callback + def async_get_std_clusters(self): + """Get ZHA and ZLL clusters for this device.""" + + return { + ep_id: { + CLUSTER_TYPE_IN: endpoint.in_clusters, + CLUSTER_TYPE_OUT: endpoint.out_clusters, + } + for (ep_id, endpoint) in self._zigpy_device.endpoints.items() + if ep_id != 0 and endpoint.profile_id in PROFILES + } + + @callback + def async_get_cluster( + self, endpoint_id: int, cluster_id: int, cluster_type: str = CLUSTER_TYPE_IN + ) -> Cluster: + """Get zigbee cluster from this entity.""" + clusters: dict[int, dict[str, dict[int, Cluster]]] = self.async_get_clusters() + return clusters[endpoint_id][cluster_type][cluster_id] + + @callback + def async_get_cluster_attributes( + self, endpoint_id, cluster_id, cluster_type=CLUSTER_TYPE_IN + ): + """Get zigbee attributes for specified cluster.""" + cluster = self.async_get_cluster(endpoint_id, cluster_id, cluster_type) + if cluster is None: + return None + return cluster.attributes + + @callback + def async_get_cluster_commands( + self, endpoint_id, cluster_id, cluster_type=CLUSTER_TYPE_IN + ): + """Get zigbee commands for specified cluster.""" + cluster = self.async_get_cluster(endpoint_id, cluster_id, cluster_type) + if cluster is None: + return None + return { + CLUSTER_COMMANDS_CLIENT: cluster.client_commands, + CLUSTER_COMMANDS_SERVER: cluster.server_commands, + } + + async def write_zigbee_attribute( + self, + endpoint_id, + cluster_id, + attribute, + value, + cluster_type=CLUSTER_TYPE_IN, + manufacturer=None, + ): + """Write a value to a zigbee attribute for a cluster in this entity.""" + try: + cluster: Cluster = self.async_get_cluster( + endpoint_id, cluster_id, cluster_type + ) + except KeyError as exc: + raise ValueError( + f"Cluster {cluster_id} not found on endpoint {endpoint_id} while" + f" writing attribute {attribute} with value {value}" + ) from exc + + try: + response = await cluster.write_attributes( + {attribute: value}, manufacturer=manufacturer + ) + except zigpy.exceptions.ZigbeeException as exc: + raise HomeAssistantError( + f"Failed to set attribute: " + f"{ATTR_VALUE}: {value} " + f"{ATTR_ATTRIBUTE}: {attribute} " + f"{ATTR_CLUSTER_ID}: {cluster_id} " + f"{ATTR_ENDPOINT_ID}: {endpoint_id}" + ) from exc + + self.debug( + "set: %s for attr: %s to cluster: %s for ept: %s - res: %s", + value, + attribute, + cluster_id, + endpoint_id, + response, + ) + return response + + async def issue_cluster_command( + self, + endpoint_id: int, + cluster_id: int, + command: int, + command_type: str, + args: list | None, + params: dict[str, Any] | None, + cluster_type: str = CLUSTER_TYPE_IN, + manufacturer: int | None = None, + ) -> None: + """Issue a command against specified zigbee cluster on this device.""" + try: + cluster: Cluster = self.async_get_cluster( + endpoint_id, cluster_id, cluster_type + ) + except KeyError as exc: + raise ValueError( + f"Cluster {cluster_id} not found on endpoint {endpoint_id} while" + f" issuing command {command} with args {args}" + ) from exc + commands: dict[int, ZCLCommandDef] = ( + cluster.server_commands + if command_type == CLUSTER_COMMAND_SERVER + else cluster.client_commands + ) + if args is not None: + self.warning( + ( + "args [%s] are deprecated and should be passed with the params key." + " The parameter names are: %s" + ), + args, + [field.name for field in commands[command].schema.fields], + ) + response = await getattr(cluster, commands[command].name)(*args) + else: + assert params is not None + response = await getattr(cluster, commands[command].name)( + **convert_to_zcl_values(params, commands[command].schema) + ) + self.debug( + "Issued cluster command: %s %s %s %s %s %s %s %s", + f"{ATTR_CLUSTER_ID}: [{cluster_id}]", + f"{ATTR_CLUSTER_TYPE}: [{cluster_type}]", + f"{ATTR_ENDPOINT_ID}: [{endpoint_id}]", + f"{ATTR_COMMAND}: [{command}]", + f"{ATTR_COMMAND_TYPE}: [{command_type}]", + f"{ATTR_ARGS}: [{args}]", + f"{ATTR_PARAMS}: [{params}]", + f"{ATTR_MANUFACTURER}: [{manufacturer}]", + ) + if response is None: + return # client commands don't return a response + if isinstance(response, Exception): + raise HomeAssistantError("Failed to issue cluster command") from response + if response[1] is not ZclStatus.SUCCESS: + raise HomeAssistantError( + f"Failed to issue cluster command with status: {response[1]}" + ) + + async def async_add_to_group(self, group_id: int) -> None: + """Add this device to the provided zigbee group.""" + try: + # A group name is required. However, the spec also explicitly states that + # the group name can be ignored by the receiving device if a device cannot + # store it, so we cannot rely on it existing after being written. This is + # only done to make the ZCL command valid. + await self._zigpy_device.add_to_group(group_id, name=f"0x{group_id:04X}") + except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: + self.debug( + "Failed to add device '%s' to group: 0x%04x ex: %s", + self._zigpy_device.ieee, + group_id, + str(ex), + ) + + async def async_remove_from_group(self, group_id: int) -> None: + """Remove this device from the provided zigbee group.""" + try: + await self._zigpy_device.remove_from_group(group_id) + except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: + self.debug( + "Failed to remove device '%s' from group: 0x%04x ex: %s", + self._zigpy_device.ieee, + group_id, + str(ex), + ) + + async def async_add_endpoint_to_group( + self, endpoint_id: int, group_id: int + ) -> None: + """Add the device endpoint to the provided zigbee group.""" + try: + await self._zigpy_device.endpoints[endpoint_id].add_to_group( + group_id, name=f"0x{group_id:04X}" + ) + except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: + self.debug( + "Failed to add endpoint: %s for device: '%s' to group: 0x%04x ex: %s", + endpoint_id, + self._zigpy_device.ieee, + group_id, + str(ex), + ) + + async def async_remove_endpoint_from_group( + self, endpoint_id: int, group_id: int + ) -> None: + """Remove the device endpoint from the provided zigbee group.""" + try: + await self._zigpy_device.endpoints[endpoint_id].remove_from_group(group_id) + except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: + self.debug( + ( + "Failed to remove endpoint: %s for device '%s' from group: 0x%04x" + " ex: %s" + ), + endpoint_id, + self._zigpy_device.ieee, + group_id, + str(ex), + ) + + async def async_bind_to_group( + self, group_id: int, cluster_bindings: list[ClusterBinding] + ) -> None: + """Directly bind this device to a group for the given clusters.""" + await self._async_group_binding_operation( + group_id, zdo_types.ZDOCmd.Bind_req, cluster_bindings + ) + + async def async_unbind_from_group( + self, group_id: int, cluster_bindings: list[ClusterBinding] + ) -> None: + """Unbind this device from a group for the given clusters.""" + await self._async_group_binding_operation( + group_id, zdo_types.ZDOCmd.Unbind_req, cluster_bindings + ) + + async def _async_group_binding_operation( + self, + group_id: int, + operation: zdo_types.ZDOCmd, + cluster_bindings: list[ClusterBinding], + ) -> None: + """Create or remove a direct zigbee binding between a device and a group.""" + + zdo = self._zigpy_device.zdo + op_msg = "0x%04x: %s %s, ep: %s, cluster: %s to group: 0x%04x" + destination_address = zdo_types.MultiAddress() + destination_address.addrmode = types.uint8_t(1) + destination_address.nwk = types.uint16_t(group_id) + + tasks = [] + + for cluster_binding in cluster_bindings: + if cluster_binding.endpoint_id == 0: + continue + if ( + cluster_binding.id + in self._zigpy_device.endpoints[ + cluster_binding.endpoint_id + ].out_clusters + ): + op_params = ( + self.nwk, + operation.name, + str(self.ieee), + cluster_binding.endpoint_id, + cluster_binding.id, + group_id, + ) + zdo.debug(f"processing {op_msg}", *op_params) + tasks.append( + ( + zdo.request( + operation, + self.ieee, + cluster_binding.endpoint_id, + cluster_binding.id, + destination_address, + ), + op_msg, + op_params, + ) + ) + res = await asyncio.gather(*(t[0] for t in tasks), return_exceptions=True) + for outcome, log_msg in zip(res, tasks, strict=False): + if isinstance(outcome, Exception): + fmt = f"{log_msg[1]} failed: %s" + else: + fmt = f"{log_msg[1]} completed: %s" + zdo.debug(fmt, *(log_msg[2] + (outcome,))) + + def log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None: + """Log a message.""" + msg = f"[%s](%s): {msg}" + args = (self.nwk, self.model, *args) + _LOGGER.log(level, msg, *args, **kwargs) diff --git a/homeassistant/components/zha/core/discovery.py b/homeassistant/components/zha/core/discovery.py new file mode 100644 index 00000000000..3c342d14060 --- /dev/null +++ b/homeassistant/components/zha/core/discovery.py @@ -0,0 +1,661 @@ +"""Device discovery functions for Zigbee Home Automation.""" + +from __future__ import annotations + +from collections import Counter +from collections.abc import Callable +import logging +from typing import TYPE_CHECKING, Any, cast + +from slugify import slugify +from zigpy.quirks.v2 import ( + BinarySensorMetadata, + CustomDeviceV2, + EntityType, + NumberMetadata, + SwitchMetadata, + WriteAttributeButtonMetadata, + ZCLCommandButtonMetadata, + ZCLEnumMetadata, + ZCLSensorMetadata, +) +from zigpy.state import State +from zigpy.zcl import ClusterType +from zigpy.zcl.clusters.general import Ota + +from homeassistant.const import CONF_TYPE, Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.dispatcher import ( + async_dispatcher_connect, + async_dispatcher_send, +) +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.entity_registry import async_entries_for_device +from homeassistant.helpers.typing import ConfigType + +from .. import ( # noqa: F401 + alarm_control_panel, + binary_sensor, + button, + climate, + cover, + device_tracker, + fan, + light, + lock, + number, + select, + sensor, + siren, + switch, + update, +) +from . import const as zha_const, registries as zha_regs + +# importing cluster handlers updates registries +from .cluster_handlers import ( # noqa: F401 + ClusterHandler, + closures, + general, + homeautomation, + hvac, + lighting, + lightlink, + manufacturerspecific, + measurement, + protocol, + security, + smartenergy, +) +from .helpers import get_zha_data, get_zha_gateway + +if TYPE_CHECKING: + from ..entity import ZhaEntity + from .device import ZHADevice + from .endpoint import Endpoint + from .group import ZHAGroup + +_LOGGER = logging.getLogger(__name__) + + +QUIRKS_ENTITY_META_TO_ENTITY_CLASS = { + ( + Platform.BUTTON, + WriteAttributeButtonMetadata, + EntityType.CONFIG, + ): button.ZHAAttributeButton, + ( + Platform.BUTTON, + WriteAttributeButtonMetadata, + EntityType.STANDARD, + ): button.ZHAAttributeButton, + (Platform.BUTTON, ZCLCommandButtonMetadata, EntityType.CONFIG): button.ZHAButton, + ( + Platform.BUTTON, + ZCLCommandButtonMetadata, + EntityType.DIAGNOSTIC, + ): button.ZHAButton, + (Platform.BUTTON, ZCLCommandButtonMetadata, EntityType.STANDARD): button.ZHAButton, + ( + Platform.BINARY_SENSOR, + BinarySensorMetadata, + EntityType.CONFIG, + ): binary_sensor.BinarySensor, + ( + Platform.BINARY_SENSOR, + BinarySensorMetadata, + EntityType.DIAGNOSTIC, + ): binary_sensor.BinarySensor, + ( + Platform.BINARY_SENSOR, + BinarySensorMetadata, + EntityType.STANDARD, + ): binary_sensor.BinarySensor, + (Platform.SENSOR, ZCLEnumMetadata, EntityType.DIAGNOSTIC): sensor.EnumSensor, + (Platform.SENSOR, ZCLEnumMetadata, EntityType.STANDARD): sensor.EnumSensor, + (Platform.SENSOR, ZCLSensorMetadata, EntityType.DIAGNOSTIC): sensor.Sensor, + (Platform.SENSOR, ZCLSensorMetadata, EntityType.STANDARD): sensor.Sensor, + (Platform.SELECT, ZCLEnumMetadata, EntityType.CONFIG): select.ZCLEnumSelectEntity, + (Platform.SELECT, ZCLEnumMetadata, EntityType.STANDARD): select.ZCLEnumSelectEntity, + ( + Platform.SELECT, + ZCLEnumMetadata, + EntityType.DIAGNOSTIC, + ): select.ZCLEnumSelectEntity, + ( + Platform.NUMBER, + NumberMetadata, + EntityType.CONFIG, + ): number.ZHANumberConfigurationEntity, + (Platform.NUMBER, NumberMetadata, EntityType.DIAGNOSTIC): number.ZhaNumber, + (Platform.NUMBER, NumberMetadata, EntityType.STANDARD): number.ZhaNumber, + ( + Platform.SWITCH, + SwitchMetadata, + EntityType.CONFIG, + ): switch.ZHASwitchConfigurationEntity, + (Platform.SWITCH, SwitchMetadata, EntityType.STANDARD): switch.Switch, +} + + +@callback +async def async_add_entities( + _async_add_entities: AddEntitiesCallback, + entities: list[ + tuple[ + type[ZhaEntity], + tuple[str, ZHADevice, list[ClusterHandler]], + dict[str, Any], + ] + ], + **kwargs, +) -> None: + """Add entities helper.""" + if not entities: + return + + to_add = [ + ent_cls.create_entity(*args, **{**kwargs, **kw_args}) + for ent_cls, args, kw_args in entities + ] + entities_to_add = [entity for entity in to_add if entity is not None] + _async_add_entities(entities_to_add, update_before_add=False) + entities.clear() + + +class ProbeEndpoint: + """All discovered cluster handlers and entities of an endpoint.""" + + def __init__(self) -> None: + """Initialize instance.""" + self._device_configs: ConfigType = {} + + @callback + def discover_entities(self, endpoint: Endpoint) -> None: + """Process an endpoint on a zigpy device.""" + _LOGGER.debug( + "Discovering entities for endpoint: %s-%s", + str(endpoint.device.ieee), + endpoint.id, + ) + self.discover_by_device_type(endpoint) + self.discover_multi_entities(endpoint) + self.discover_by_cluster_id(endpoint) + self.discover_multi_entities(endpoint, config_diagnostic_entities=True) + zha_regs.ZHA_ENTITIES.clean_up() + + @callback + def discover_device_entities(self, device: ZHADevice) -> None: + """Discover entities for a ZHA device.""" + _LOGGER.debug( + "Discovering entities for device: %s-%s", + str(device.ieee), + device.name, + ) + + if device.is_coordinator: + self.discover_coordinator_device_entities(device) + return + + self.discover_quirks_v2_entities(device) + zha_regs.ZHA_ENTITIES.clean_up() + + @callback + def discover_quirks_v2_entities(self, device: ZHADevice) -> None: + """Discover entities for a ZHA device exposed by quirks v2.""" + _LOGGER.debug( + "Attempting to discover quirks v2 entities for device: %s-%s", + str(device.ieee), + device.name, + ) + + if not isinstance(device.device, CustomDeviceV2): + _LOGGER.debug( + "Device: %s-%s is not a quirks v2 device - skipping " + "discover_quirks_v2_entities", + str(device.ieee), + device.name, + ) + return + + zigpy_device: CustomDeviceV2 = device.device + + if not zigpy_device.exposes_metadata: + _LOGGER.debug( + "Device: %s-%s does not expose any quirks v2 entities", + str(device.ieee), + device.name, + ) + return + + for ( + cluster_details, + entity_metadata_list, + ) in zigpy_device.exposes_metadata.items(): + endpoint_id, cluster_id, cluster_type = cluster_details + + if endpoint_id not in device.endpoints: + _LOGGER.warning( + "Device: %s-%s does not have an endpoint with id: %s - unable to " + "create entity with cluster details: %s", + str(device.ieee), + device.name, + endpoint_id, + cluster_details, + ) + continue + + endpoint: Endpoint = device.endpoints[endpoint_id] + cluster = ( + endpoint.zigpy_endpoint.in_clusters.get(cluster_id) + if cluster_type is ClusterType.Server + else endpoint.zigpy_endpoint.out_clusters.get(cluster_id) + ) + + if cluster is None: + _LOGGER.warning( + "Device: %s-%s does not have a cluster with id: %s - " + "unable to create entity with cluster details: %s", + str(device.ieee), + device.name, + cluster_id, + cluster_details, + ) + continue + + cluster_handler_id = f"{endpoint.id}:0x{cluster.cluster_id:04x}" + cluster_handler = ( + endpoint.all_cluster_handlers.get(cluster_handler_id) + if cluster_type is ClusterType.Server + else endpoint.client_cluster_handlers.get(cluster_handler_id) + ) + assert cluster_handler + + for entity_metadata in entity_metadata_list: + platform = Platform(entity_metadata.entity_platform.value) + metadata_type = type(entity_metadata) + entity_class = QUIRKS_ENTITY_META_TO_ENTITY_CLASS.get( + (platform, metadata_type, entity_metadata.entity_type) + ) + + if entity_class is None: + _LOGGER.warning( + "Device: %s-%s has an entity with details: %s that does not" + " have an entity class mapping - unable to create entity", + str(device.ieee), + device.name, + { + zha_const.CLUSTER_DETAILS: cluster_details, + zha_const.ENTITY_METADATA: entity_metadata, + }, + ) + continue + + # automatically add the attribute to ZCL_INIT_ATTRS for the cluster + # handler if it is not already in the list + if ( + hasattr(entity_metadata, "attribute_name") + and entity_metadata.attribute_name + not in cluster_handler.ZCL_INIT_ATTRS + ): + init_attrs = cluster_handler.ZCL_INIT_ATTRS.copy() + init_attrs[entity_metadata.attribute_name] = ( + entity_metadata.attribute_initialized_from_cache + ) + cluster_handler.__dict__[zha_const.ZCL_INIT_ATTRS] = init_attrs + + endpoint.async_new_entity( + platform, + entity_class, + endpoint.unique_id, + [cluster_handler], + entity_metadata=entity_metadata, + ) + + _LOGGER.debug( + "'%s' platform -> '%s' using %s", + platform, + entity_class.__name__, + [cluster_handler.name], + ) + + @callback + def discover_coordinator_device_entities(self, device: ZHADevice) -> None: + """Discover entities for the coordinator device.""" + _LOGGER.debug( + "Discovering entities for coordinator device: %s-%s", + str(device.ieee), + device.name, + ) + state: State = device.gateway.application_controller.state + platforms: dict[Platform, list] = get_zha_data(device.hass).platforms + + @callback + def process_counters(counter_groups: str) -> None: + for counter_group, counters in getattr(state, counter_groups).items(): + for counter in counters: + platforms[Platform.SENSOR].append( + ( + sensor.DeviceCounterSensor, + ( + f"{slugify(str(device.ieee))}_{counter_groups}_{counter_group}_{counter}", + device, + counter_groups, + counter_group, + counter, + ), + {}, + ) + ) + _LOGGER.debug( + "'%s' platform -> '%s' using %s", + Platform.SENSOR, + sensor.DeviceCounterSensor.__name__, + f"counter groups[{counter_groups}] counter group[{counter_group}] counter[{counter}]", + ) + + process_counters("counters") + process_counters("broadcast_counters") + process_counters("device_counters") + process_counters("group_counters") + + @callback + def discover_by_device_type(self, endpoint: Endpoint) -> None: + """Process an endpoint on a zigpy device.""" + + unique_id = endpoint.unique_id + + platform: str | None = self._device_configs.get(unique_id, {}).get(CONF_TYPE) + if platform is None: + ep_profile_id = endpoint.zigpy_endpoint.profile_id + ep_device_type = endpoint.zigpy_endpoint.device_type + platform = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type) + + if platform and platform in zha_const.PLATFORMS: + platform = cast(Platform, platform) + + cluster_handlers = endpoint.unclaimed_cluster_handlers() + platform_entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity( + platform, + endpoint.device.manufacturer, + endpoint.device.model, + cluster_handlers, + endpoint.device.quirk_id, + ) + if platform_entity_class is None: + return + endpoint.claim_cluster_handlers(claimed) + endpoint.async_new_entity( + platform, platform_entity_class, unique_id, claimed + ) + + @callback + def discover_by_cluster_id(self, endpoint: Endpoint) -> None: + """Process an endpoint on a zigpy device.""" + + items = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.items() + single_input_clusters = { + cluster_class: match + for cluster_class, match in items + if not isinstance(cluster_class, int) + } + remaining_cluster_handlers = endpoint.unclaimed_cluster_handlers() + for cluster_handler in remaining_cluster_handlers: + if ( + cluster_handler.cluster.cluster_id + in zha_regs.CLUSTER_HANDLER_ONLY_CLUSTERS + ): + endpoint.claim_cluster_handlers([cluster_handler]) + continue + + platform = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.get( + cluster_handler.cluster.cluster_id + ) + if platform is None: + for cluster_class, match in single_input_clusters.items(): + if isinstance(cluster_handler.cluster, cluster_class): + platform = match + break + + self.probe_single_cluster(platform, cluster_handler, endpoint) + + # until we can get rid of registries + self.handle_on_off_output_cluster_exception(endpoint) + + @staticmethod + def probe_single_cluster( + platform: Platform | None, + cluster_handler: ClusterHandler, + endpoint: Endpoint, + ) -> None: + """Probe specified cluster for specific component.""" + if platform is None or platform not in zha_const.PLATFORMS: + return + cluster_handler_list = [cluster_handler] + unique_id = f"{endpoint.unique_id}-{cluster_handler.cluster.cluster_id}" + + entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity( + platform, + endpoint.device.manufacturer, + endpoint.device.model, + cluster_handler_list, + endpoint.device.quirk_id, + ) + if entity_class is None: + return + endpoint.claim_cluster_handlers(claimed) + endpoint.async_new_entity(platform, entity_class, unique_id, claimed) + + def handle_on_off_output_cluster_exception(self, endpoint: Endpoint) -> None: + """Process output clusters of the endpoint.""" + + profile_id = endpoint.zigpy_endpoint.profile_id + device_type = endpoint.zigpy_endpoint.device_type + if device_type in zha_regs.REMOTE_DEVICE_TYPES.get(profile_id, []): + return + + for cluster_id, cluster in endpoint.zigpy_endpoint.out_clusters.items(): + platform = zha_regs.SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS.get( + cluster.cluster_id + ) + if platform is None: + continue + + cluster_handler_classes = zha_regs.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( + cluster_id, {None: ClusterHandler} + ) + + quirk_id = ( + endpoint.device.quirk_id + if endpoint.device.quirk_id in cluster_handler_classes + else None + ) + + cluster_handler_class = cluster_handler_classes.get( + quirk_id, ClusterHandler + ) + + cluster_handler = cluster_handler_class(cluster, endpoint) + self.probe_single_cluster(platform, cluster_handler, endpoint) + + @staticmethod + @callback + def discover_multi_entities( + endpoint: Endpoint, + config_diagnostic_entities: bool = False, + ) -> None: + """Process an endpoint on and discover multiple entities.""" + + ep_profile_id = endpoint.zigpy_endpoint.profile_id + ep_device_type = endpoint.zigpy_endpoint.device_type + cmpt_by_dev_type = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type) + + if config_diagnostic_entities: + cluster_handlers = list(endpoint.all_cluster_handlers.values()) + ota_handler_id = f"{endpoint.id}:0x{Ota.cluster_id:04x}" + if ota_handler_id in endpoint.client_cluster_handlers: + cluster_handlers.append( + endpoint.client_cluster_handlers[ota_handler_id] + ) + matches, claimed = zha_regs.ZHA_ENTITIES.get_config_diagnostic_entity( + endpoint.device.manufacturer, + endpoint.device.model, + cluster_handlers, + endpoint.device.quirk_id, + ) + else: + matches, claimed = zha_regs.ZHA_ENTITIES.get_multi_entity( + endpoint.device.manufacturer, + endpoint.device.model, + endpoint.unclaimed_cluster_handlers(), + endpoint.device.quirk_id, + ) + + endpoint.claim_cluster_handlers(claimed) + for platform, ent_n_handler_list in matches.items(): + for entity_and_handler in ent_n_handler_list: + _LOGGER.debug( + "'%s' platform -> '%s' using %s", + platform, + entity_and_handler.entity_class.__name__, + [ch.name for ch in entity_and_handler.claimed_cluster_handlers], + ) + for platform, ent_n_handler_list in matches.items(): + for entity_and_handler in ent_n_handler_list: + if platform == cmpt_by_dev_type: + # for well known device types, + # like thermostats we'll take only 1st class + endpoint.async_new_entity( + platform, + entity_and_handler.entity_class, + endpoint.unique_id, + entity_and_handler.claimed_cluster_handlers, + ) + break + first_ch = entity_and_handler.claimed_cluster_handlers[0] + endpoint.async_new_entity( + platform, + entity_and_handler.entity_class, + f"{endpoint.unique_id}-{first_ch.cluster.cluster_id}", + entity_and_handler.claimed_cluster_handlers, + ) + + def initialize(self, hass: HomeAssistant) -> None: + """Update device overrides config.""" + zha_config = get_zha_data(hass).yaml_config + if overrides := zha_config.get(zha_const.CONF_DEVICE_CONFIG): + self._device_configs.update(overrides) + + +class GroupProbe: + """Determine the appropriate component for a group.""" + + _hass: HomeAssistant + + def __init__(self) -> None: + """Initialize instance.""" + self._unsubs: list[Callable[[], None]] = [] + + def initialize(self, hass: HomeAssistant) -> None: + """Initialize the group probe.""" + self._hass = hass + self._unsubs.append( + async_dispatcher_connect( + hass, zha_const.SIGNAL_GROUP_ENTITY_REMOVED, self._reprobe_group + ) + ) + + def cleanup(self) -> None: + """Clean up on when ZHA shuts down.""" + for unsub in self._unsubs[:]: + unsub() + self._unsubs.remove(unsub) + + @callback + def _reprobe_group(self, group_id: int) -> None: + """Reprobe a group for entities after its members change.""" + zha_gateway = get_zha_gateway(self._hass) + if (zha_group := zha_gateway.groups.get(group_id)) is None: + return + self.discover_group_entities(zha_group) + + @callback + def discover_group_entities(self, group: ZHAGroup) -> None: + """Process a group and create any entities that are needed.""" + # only create a group entity if there are 2 or more members in a group + if len(group.members) < 2: + _LOGGER.debug( + "Group: %s:0x%04x has less than 2 members - skipping entity discovery", + group.name, + group.group_id, + ) + return + + entity_domains = GroupProbe.determine_entity_domains(self._hass, group) + + if not entity_domains: + return + + zha_data = get_zha_data(self._hass) + zha_gateway = get_zha_gateway(self._hass) + + for domain in entity_domains: + entity_class = zha_regs.ZHA_ENTITIES.get_group_entity(domain) + if entity_class is None: + continue + zha_data.platforms[domain].append( + ( + entity_class, + ( + group.get_domain_entity_ids(domain), + f"{domain}_zha_group_0x{group.group_id:04x}", + group.group_id, + zha_gateway.coordinator_zha_device, + ), + {}, + ) + ) + async_dispatcher_send(self._hass, zha_const.SIGNAL_ADD_ENTITIES) + + @staticmethod + def determine_entity_domains( + hass: HomeAssistant, group: ZHAGroup + ) -> list[Platform]: + """Determine the entity domains for this group.""" + entity_registry = er.async_get(hass) + + entity_domains: list[Platform] = [] + all_domain_occurrences: list[Platform] = [] + + for member in group.members: + if member.device.is_coordinator: + continue + entities = async_entries_for_device( + entity_registry, + member.device.device_id, + include_disabled_entities=True, + ) + all_domain_occurrences.extend( + [ + cast(Platform, entity.domain) + for entity in entities + if entity.domain in zha_regs.GROUP_ENTITY_DOMAINS + ] + ) + if not all_domain_occurrences: + return entity_domains + # get all domains we care about if there are more than 2 entities of this domain + counts = Counter(all_domain_occurrences) + entity_domains = [domain[0] for domain in counts.items() if domain[1] >= 2] + _LOGGER.debug( + "The entity domains are: %s for group: %s:0x%04x", + entity_domains, + group.name, + group.group_id, + ) + return entity_domains + + +PROBE = ProbeEndpoint() +GROUP_PROBE = GroupProbe() diff --git a/homeassistant/components/zha/core/endpoint.py b/homeassistant/components/zha/core/endpoint.py new file mode 100644 index 00000000000..32483a3bc53 --- /dev/null +++ b/homeassistant/components/zha/core/endpoint.py @@ -0,0 +1,253 @@ +"""Representation of a Zigbee endpoint for zha.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Awaitable, Callable +import functools +import logging +from typing import TYPE_CHECKING, Any, Final + +from homeassistant.const import Platform +from homeassistant.core import callback +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.util.async_ import gather_with_limited_concurrency + +from . import const, discovery, registries +from .cluster_handlers import ClusterHandler +from .helpers import get_zha_data + +if TYPE_CHECKING: + from zigpy import Endpoint as ZigpyEndpoint + + from .cluster_handlers import ClientClusterHandler + from .device import ZHADevice + +ATTR_DEVICE_TYPE: Final[str] = "device_type" +ATTR_PROFILE_ID: Final[str] = "profile_id" +ATTR_IN_CLUSTERS: Final[str] = "input_clusters" +ATTR_OUT_CLUSTERS: Final[str] = "output_clusters" + +_LOGGER = logging.getLogger(__name__) + + +class Endpoint: + """Endpoint for a zha device.""" + + def __init__(self, zigpy_endpoint: ZigpyEndpoint, device: ZHADevice) -> None: + """Initialize instance.""" + assert zigpy_endpoint is not None + assert device is not None + self._zigpy_endpoint: ZigpyEndpoint = zigpy_endpoint + self._device: ZHADevice = device + self._all_cluster_handlers: dict[str, ClusterHandler] = {} + self._claimed_cluster_handlers: dict[str, ClusterHandler] = {} + self._client_cluster_handlers: dict[str, ClientClusterHandler] = {} + self._unique_id: str = f"{device.ieee!s}-{zigpy_endpoint.endpoint_id}" + + @property + def device(self) -> ZHADevice: + """Return the device this endpoint belongs to.""" + return self._device + + @property + def all_cluster_handlers(self) -> dict[str, ClusterHandler]: + """All server cluster handlers of an endpoint.""" + return self._all_cluster_handlers + + @property + def claimed_cluster_handlers(self) -> dict[str, ClusterHandler]: + """Cluster handlers in use.""" + return self._claimed_cluster_handlers + + @property + def client_cluster_handlers(self) -> dict[str, ClientClusterHandler]: + """Return a dict of client cluster handlers.""" + return self._client_cluster_handlers + + @property + def zigpy_endpoint(self) -> ZigpyEndpoint: + """Return endpoint of zigpy device.""" + return self._zigpy_endpoint + + @property + def id(self) -> int: + """Return endpoint id.""" + return self._zigpy_endpoint.endpoint_id + + @property + def unique_id(self) -> str: + """Return the unique id for this endpoint.""" + return self._unique_id + + @property + def zigbee_signature(self) -> tuple[int, dict[str, Any]]: + """Get the zigbee signature for the endpoint this pool represents.""" + return ( + self.id, + { + ATTR_PROFILE_ID: f"0x{self._zigpy_endpoint.profile_id:04x}" + if self._zigpy_endpoint.profile_id is not None + else "", + ATTR_DEVICE_TYPE: f"0x{self._zigpy_endpoint.device_type:04x}" + if self._zigpy_endpoint.device_type is not None + else "", + ATTR_IN_CLUSTERS: [ + f"0x{cluster_id:04x}" + for cluster_id in sorted(self._zigpy_endpoint.in_clusters) + ], + ATTR_OUT_CLUSTERS: [ + f"0x{cluster_id:04x}" + for cluster_id in sorted(self._zigpy_endpoint.out_clusters) + ], + }, + ) + + @classmethod + def new(cls, zigpy_endpoint: ZigpyEndpoint, device: ZHADevice) -> Endpoint: + """Create new endpoint and populate cluster handlers.""" + endpoint = cls(zigpy_endpoint, device) + endpoint.add_all_cluster_handlers() + endpoint.add_client_cluster_handlers() + if not device.is_coordinator: + discovery.PROBE.discover_entities(endpoint) + return endpoint + + def add_all_cluster_handlers(self) -> None: + """Create and add cluster handlers for all input clusters.""" + for cluster_id, cluster in self.zigpy_endpoint.in_clusters.items(): + cluster_handler_classes = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( + cluster_id, {None: ClusterHandler} + ) + quirk_id = ( + self.device.quirk_id + if self.device.quirk_id in cluster_handler_classes + else None + ) + cluster_handler_class = cluster_handler_classes.get( + quirk_id, ClusterHandler + ) + + # Allow cluster handler to filter out bad matches + if not cluster_handler_class.matches(cluster, self): + cluster_handler_class = ClusterHandler + + _LOGGER.debug( + "Creating cluster handler for cluster id: %s class: %s", + cluster_id, + cluster_handler_class, + ) + + try: + cluster_handler = cluster_handler_class(cluster, self) + except KeyError as err: + _LOGGER.warning( + "Cluster handler %s for cluster %s on endpoint %s is invalid: %s", + cluster_handler_class, + cluster, + self, + err, + ) + continue + + if cluster_handler.name == const.CLUSTER_HANDLER_POWER_CONFIGURATION: + self._device.power_configuration_ch = cluster_handler + elif cluster_handler.name == const.CLUSTER_HANDLER_IDENTIFY: + self._device.identify_ch = cluster_handler + elif cluster_handler.name == const.CLUSTER_HANDLER_BASIC: + self._device.basic_ch = cluster_handler + self._all_cluster_handlers[cluster_handler.id] = cluster_handler + + def add_client_cluster_handlers(self) -> None: + """Create client cluster handlers for all output clusters if in the registry.""" + for ( + cluster_id, + cluster_handler_class, + ) in registries.CLIENT_CLUSTER_HANDLER_REGISTRY.items(): + cluster = self.zigpy_endpoint.out_clusters.get(cluster_id) + if cluster is not None: + cluster_handler = cluster_handler_class(cluster, self) + self.client_cluster_handlers[cluster_handler.id] = cluster_handler + + async def async_initialize(self, from_cache: bool = False) -> None: + """Initialize claimed cluster handlers.""" + await self._execute_handler_tasks( + "async_initialize", from_cache, max_concurrency=1 + ) + + async def async_configure(self) -> None: + """Configure claimed cluster handlers.""" + await self._execute_handler_tasks("async_configure") + + async def _execute_handler_tasks( + self, func_name: str, *args: Any, max_concurrency: int | None = None + ) -> None: + """Add a throttled cluster handler task and swallow exceptions.""" + cluster_handlers = [ + *self.claimed_cluster_handlers.values(), + *self.client_cluster_handlers.values(), + ] + tasks = [getattr(ch, func_name)(*args) for ch in cluster_handlers] + + gather: Callable[..., Awaitable] + + if max_concurrency is None: + gather = asyncio.gather + else: + gather = functools.partial(gather_with_limited_concurrency, max_concurrency) + + results = await gather(*tasks, return_exceptions=True) + for cluster_handler, outcome in zip(cluster_handlers, results, strict=False): + if isinstance(outcome, Exception): + cluster_handler.debug( + "'%s' stage failed: %s", func_name, str(outcome), exc_info=outcome + ) + else: + cluster_handler.debug("'%s' stage succeeded", func_name) + + def async_new_entity( + self, + platform: Platform, + entity_class: type, + unique_id: str, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Create a new entity.""" + from .device import DeviceStatus # pylint: disable=import-outside-toplevel + + if self.device.status == DeviceStatus.INITIALIZED: + return + + zha_data = get_zha_data(self.device.hass) + zha_data.platforms[platform].append( + (entity_class, (unique_id, self.device, cluster_handlers), kwargs or {}) + ) + + @callback + def async_send_signal(self, signal: str, *args: Any) -> None: + """Send a signal through hass dispatcher.""" + async_dispatcher_send(self.device.hass, signal, *args) + + def send_event(self, signal: dict[str, Any]) -> None: + """Broadcast an event from this endpoint.""" + self.device.zha_send_event( + { + const.ATTR_UNIQUE_ID: self.unique_id, + const.ATTR_ENDPOINT_ID: self.id, + **signal, + } + ) + + def claim_cluster_handlers(self, cluster_handlers: list[ClusterHandler]) -> None: + """Claim cluster handlers.""" + self.claimed_cluster_handlers.update({ch.id: ch for ch in cluster_handlers}) + + def unclaimed_cluster_handlers(self) -> list[ClusterHandler]: + """Return a list of available (unclaimed) cluster handlers.""" + claimed = set(self.claimed_cluster_handlers) + available = set(self.all_cluster_handlers) + return [ + self.all_cluster_handlers[cluster_id] + for cluster_id in (available - claimed) + ] diff --git a/homeassistant/components/zha/core/gateway.py b/homeassistant/components/zha/core/gateway.py new file mode 100644 index 00000000000..8b8826e2648 --- /dev/null +++ b/homeassistant/components/zha/core/gateway.py @@ -0,0 +1,882 @@ +"""Virtual gateway for Zigbee Home Automation.""" + +from __future__ import annotations + +import asyncio +import collections +from collections.abc import Callable +from contextlib import suppress +from datetime import timedelta +from enum import Enum +import itertools +import logging +import re +import time +from typing import TYPE_CHECKING, Any, NamedTuple, Self, cast + +from zigpy.application import ControllerApplication +from zigpy.config import ( + CONF_DATABASE, + CONF_DEVICE, + CONF_DEVICE_PATH, + CONF_NWK, + CONF_NWK_CHANNEL, + CONF_NWK_VALIDATE_SETTINGS, +) +import zigpy.device +import zigpy.endpoint +import zigpy.group +from zigpy.state import State +from zigpy.types.named import EUI64 + +from homeassistant import __path__ as HOMEASSISTANT_PATH +from homeassistant.components.system_log import LogEntry +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.typing import ConfigType +from homeassistant.util.async_ import gather_with_limited_concurrency + +from . import discovery +from .const import ( + ATTR_IEEE, + ATTR_MANUFACTURER, + ATTR_MODEL, + ATTR_NWK, + ATTR_SIGNATURE, + ATTR_TYPE, + CONF_RADIO_TYPE, + CONF_USE_THREAD, + CONF_ZIGPY, + DATA_ZHA, + DEBUG_COMP_BELLOWS, + DEBUG_COMP_ZHA, + DEBUG_COMP_ZIGPY, + DEBUG_COMP_ZIGPY_DECONZ, + DEBUG_COMP_ZIGPY_XBEE, + DEBUG_COMP_ZIGPY_ZIGATE, + DEBUG_COMP_ZIGPY_ZNP, + DEBUG_LEVEL_CURRENT, + DEBUG_LEVEL_ORIGINAL, + DEBUG_LEVELS, + DEBUG_RELAY_LOGGERS, + DEFAULT_DATABASE_NAME, + DEVICE_PAIRING_STATUS, + DOMAIN, + SIGNAL_ADD_ENTITIES, + SIGNAL_GROUP_MEMBERSHIP_CHANGE, + SIGNAL_REMOVE, + UNKNOWN_MANUFACTURER, + UNKNOWN_MODEL, + ZHA_GW_MSG, + ZHA_GW_MSG_DEVICE_FULL_INIT, + ZHA_GW_MSG_DEVICE_INFO, + ZHA_GW_MSG_DEVICE_JOINED, + ZHA_GW_MSG_DEVICE_REMOVED, + ZHA_GW_MSG_GROUP_ADDED, + ZHA_GW_MSG_GROUP_INFO, + ZHA_GW_MSG_GROUP_MEMBER_ADDED, + ZHA_GW_MSG_GROUP_MEMBER_REMOVED, + ZHA_GW_MSG_GROUP_REMOVED, + ZHA_GW_MSG_LOG_ENTRY, + ZHA_GW_MSG_LOG_OUTPUT, + ZHA_GW_MSG_RAW_INIT, + RadioType, +) +from .device import DeviceStatus, ZHADevice +from .group import GroupMember, ZHAGroup +from .helpers import get_zha_data +from .registries import GROUP_ENTITY_DOMAINS + +if TYPE_CHECKING: + from logging import Filter, LogRecord + + from ..entity import ZhaEntity + from .cluster_handlers import ClusterHandler + + type _LogFilterType = Filter | Callable[[LogRecord], bool] + +_LOGGER = logging.getLogger(__name__) + + +class EntityReference(NamedTuple): + """Describes an entity reference.""" + + reference_id: str + zha_device: ZHADevice + cluster_handlers: dict[str, ClusterHandler] + device_info: DeviceInfo + remove_future: asyncio.Future[Any] + + +class DevicePairingStatus(Enum): + """Status of a device.""" + + PAIRED = 1 + INTERVIEW_COMPLETE = 2 + CONFIGURED = 3 + INITIALIZED = 4 + + +class ZHAGateway: + """Gateway that handles events that happen on the ZHA Zigbee network.""" + + def __init__( + self, hass: HomeAssistant, config: ConfigType, config_entry: ConfigEntry + ) -> None: + """Initialize the gateway.""" + self.hass = hass + self._config = config + self._devices: dict[EUI64, ZHADevice] = {} + self._groups: dict[int, ZHAGroup] = {} + self.application_controller: ControllerApplication = None + self.coordinator_zha_device: ZHADevice = None # type: ignore[assignment] + self._device_registry: collections.defaultdict[EUI64, list[EntityReference]] = ( + collections.defaultdict(list) + ) + self._log_levels: dict[str, dict[str, int]] = { + DEBUG_LEVEL_ORIGINAL: async_capture_log_levels(), + DEBUG_LEVEL_CURRENT: async_capture_log_levels(), + } + self.debug_enabled = False + self._log_relay_handler = LogRelayHandler(hass, self) + self.config_entry = config_entry + self._unsubs: list[Callable[[], None]] = [] + + self.shutting_down = False + self._reload_task: asyncio.Task | None = None + + def get_application_controller_data(self) -> tuple[ControllerApplication, dict]: + """Get an uninitialized instance of a zigpy `ControllerApplication`.""" + radio_type = RadioType[self.config_entry.data[CONF_RADIO_TYPE]] + + app_config = self._config.get(CONF_ZIGPY, {}) + database = self._config.get( + CONF_DATABASE, + self.hass.config.path(DEFAULT_DATABASE_NAME), + ) + app_config[CONF_DATABASE] = database + app_config[CONF_DEVICE] = self.config_entry.data[CONF_DEVICE] + + if CONF_NWK_VALIDATE_SETTINGS not in app_config: + app_config[CONF_NWK_VALIDATE_SETTINGS] = True + + # The bellows UART thread sometimes propagates a cancellation into the main Core + # event loop, when a connection to a TCP coordinator fails in a specific way + if ( + CONF_USE_THREAD not in app_config + and radio_type is RadioType.ezsp + and app_config[CONF_DEVICE][CONF_DEVICE_PATH].startswith("socket://") + ): + app_config[CONF_USE_THREAD] = False + + # Local import to avoid circular dependencies + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + is_multiprotocol_url, + ) + + # Until we have a way to coordinate channels with the Thread half of multi-PAN, + # stick to the old zigpy default of channel 15 instead of dynamically scanning + if ( + is_multiprotocol_url(app_config[CONF_DEVICE][CONF_DEVICE_PATH]) + and app_config.get(CONF_NWK, {}).get(CONF_NWK_CHANNEL) is None + ): + app_config.setdefault(CONF_NWK, {})[CONF_NWK_CHANNEL] = 15 + + return radio_type.controller, radio_type.controller.SCHEMA(app_config) + + @classmethod + async def async_from_config( + cls, hass: HomeAssistant, config: ConfigType, config_entry: ConfigEntry + ) -> Self: + """Create an instance of a gateway from config objects.""" + instance = cls(hass, config, config_entry) + await instance.async_initialize() + return instance + + async def async_initialize(self) -> None: + """Initialize controller and connect radio.""" + discovery.PROBE.initialize(self.hass) + discovery.GROUP_PROBE.initialize(self.hass) + + self.shutting_down = False + + app_controller_cls, app_config = self.get_application_controller_data() + app = await app_controller_cls.new( + config=app_config, + auto_form=False, + start_radio=False, + ) + + try: + await app.startup(auto_form=True) + except Exception: + # Explicitly shut down the controller application on failure + await app.shutdown() + raise + + self.application_controller = app + + zha_data = get_zha_data(self.hass) + zha_data.gateway = self + + self.coordinator_zha_device = self._async_get_or_create_device( + self._find_coordinator_device() + ) + + self.async_load_devices() + self.async_load_groups() + + self.application_controller.add_listener(self) + self.application_controller.groups.add_listener(self) + + def connection_lost(self, exc: Exception) -> None: + """Handle connection lost event.""" + _LOGGER.debug("Connection to the radio was lost: %r", exc) + + if self.shutting_down: + return + + # Ensure we do not queue up multiple resets + if self._reload_task is not None: + _LOGGER.debug("Ignoring reset, one is already running") + return + + self._reload_task = self.hass.async_create_task( + self.hass.config_entries.async_reload(self.config_entry.entry_id) + ) + + def _find_coordinator_device(self) -> zigpy.device.Device: + zigpy_coordinator = self.application_controller.get_device(nwk=0x0000) + + if last_backup := self.application_controller.backups.most_recent_backup(): + with suppress(KeyError): + zigpy_coordinator = self.application_controller.get_device( + ieee=last_backup.node_info.ieee + ) + + return zigpy_coordinator + + @callback + def async_load_devices(self) -> None: + """Restore ZHA devices from zigpy application state.""" + + for zigpy_device in self.application_controller.devices.values(): + zha_device = self._async_get_or_create_device(zigpy_device) + delta_msg = "not known" + if zha_device.last_seen is not None: + delta = round(time.time() - zha_device.last_seen) + delta_msg = f"{timedelta(seconds=delta)!s} ago" + _LOGGER.debug( + ( + "[%s](%s) restored as '%s', last seen: %s," + " consider_unavailable_time: %s seconds" + ), + zha_device.nwk, + zha_device.name, + "available" if zha_device.available else "unavailable", + delta_msg, + zha_device.consider_unavailable_time, + ) + + @callback + def async_load_groups(self) -> None: + """Initialize ZHA groups.""" + + for group_id in self.application_controller.groups: + group = self.application_controller.groups[group_id] + zha_group = self._async_get_or_create_group(group) + # we can do this here because the entities are in the + # entity registry tied to the devices + discovery.GROUP_PROBE.discover_group_entities(zha_group) + + @property + def radio_concurrency(self) -> int: + """Maximum configured radio concurrency.""" + return self.application_controller._concurrent_requests_semaphore.max_value # noqa: SLF001 + + async def async_fetch_updated_state_mains(self) -> None: + """Fetch updated state for mains powered devices.""" + _LOGGER.debug("Fetching current state for mains powered devices") + + now = time.time() + + # Only delay startup to poll mains-powered devices that are online + online_devices = [ + dev + for dev in self.devices.values() + if dev.is_mains_powered + and dev.last_seen is not None + and (now - dev.last_seen) < dev.consider_unavailable_time + ] + + # Prioritize devices that have recently been contacted + online_devices.sort(key=lambda dev: cast(float, dev.last_seen), reverse=True) + + # Make sure that we always leave slots for non-startup requests + max_poll_concurrency = max(1, self.radio_concurrency - 4) + + await gather_with_limited_concurrency( + max_poll_concurrency, + *(dev.async_initialize(from_cache=False) for dev in online_devices), + ) + + _LOGGER.debug("completed fetching current state for mains powered devices") + + async def async_initialize_devices_and_entities(self) -> None: + """Initialize devices and load entities.""" + + _LOGGER.debug("Initializing all devices from Zigpy cache") + await asyncio.gather( + *(dev.async_initialize(from_cache=True) for dev in self.devices.values()) + ) + + async def fetch_updated_state() -> None: + """Fetch updated state for mains powered devices.""" + await self.async_fetch_updated_state_mains() + _LOGGER.debug("Allowing polled requests") + self.hass.data[DATA_ZHA].allow_polling = True + + # background the fetching of state for mains powered devices + self.config_entry.async_create_background_task( + self.hass, fetch_updated_state(), "zha.gateway-fetch_updated_state" + ) + + def device_joined(self, device: zigpy.device.Device) -> None: + """Handle device joined. + + At this point, no information about the device is known other than its + address + """ + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_JOINED, + ZHA_GW_MSG_DEVICE_INFO: { + ATTR_NWK: device.nwk, + ATTR_IEEE: str(device.ieee), + DEVICE_PAIRING_STATUS: DevicePairingStatus.PAIRED.name, + }, + }, + ) + + def raw_device_initialized(self, device: zigpy.device.Device) -> None: + """Handle a device initialization without quirks loaded.""" + manuf = device.manufacturer + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_RAW_INIT, + ZHA_GW_MSG_DEVICE_INFO: { + ATTR_NWK: device.nwk, + ATTR_IEEE: str(device.ieee), + DEVICE_PAIRING_STATUS: DevicePairingStatus.INTERVIEW_COMPLETE.name, + ATTR_MODEL: device.model if device.model else UNKNOWN_MODEL, + ATTR_MANUFACTURER: manuf if manuf else UNKNOWN_MANUFACTURER, + ATTR_SIGNATURE: device.get_signature(), + }, + }, + ) + + def device_initialized(self, device: zigpy.device.Device) -> None: + """Handle device joined and basic information discovered.""" + self.hass.async_create_task(self.async_device_initialized(device)) + + def device_left(self, device: zigpy.device.Device) -> None: + """Handle device leaving the network.""" + self.async_update_device(device, False) + + def group_member_removed( + self, zigpy_group: zigpy.group.Group, endpoint: zigpy.endpoint.Endpoint + ) -> None: + """Handle zigpy group member removed event.""" + # need to handle endpoint correctly on groups + zha_group = self._async_get_or_create_group(zigpy_group) + zha_group.info("group_member_removed - endpoint: %s", endpoint) + self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_MEMBER_REMOVED) + async_dispatcher_send( + self.hass, f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{zigpy_group.group_id:04x}" + ) + + def group_member_added( + self, zigpy_group: zigpy.group.Group, endpoint: zigpy.endpoint.Endpoint + ) -> None: + """Handle zigpy group member added event.""" + # need to handle endpoint correctly on groups + zha_group = self._async_get_or_create_group(zigpy_group) + zha_group.info("group_member_added - endpoint: %s", endpoint) + self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_MEMBER_ADDED) + async_dispatcher_send( + self.hass, f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{zigpy_group.group_id:04x}" + ) + if len(zha_group.members) == 2: + # we need to do this because there wasn't already + # a group entity to remove and re-add + discovery.GROUP_PROBE.discover_group_entities(zha_group) + + def group_added(self, zigpy_group: zigpy.group.Group) -> None: + """Handle zigpy group added event.""" + zha_group = self._async_get_or_create_group(zigpy_group) + zha_group.info("group_added") + # need to dispatch for entity creation here + self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_ADDED) + + def group_removed(self, zigpy_group: zigpy.group.Group) -> None: + """Handle zigpy group removed event.""" + self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_REMOVED) + zha_group = self._groups.pop(zigpy_group.group_id) + zha_group.info("group_removed") + self._cleanup_group_entity_registry_entries(zigpy_group) + + def _send_group_gateway_message( + self, zigpy_group: zigpy.group.Group, gateway_message_type: str + ) -> None: + """Send the gateway event for a zigpy group event.""" + zha_group = self._groups.get(zigpy_group.group_id) + if zha_group is not None: + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: gateway_message_type, + ZHA_GW_MSG_GROUP_INFO: zha_group.group_info, + }, + ) + + async def _async_remove_device( + self, device: ZHADevice, entity_refs: list[EntityReference] | None + ) -> None: + if entity_refs is not None: + remove_tasks: list[asyncio.Future[Any]] = [ + entity_ref.remove_future for entity_ref in entity_refs + ] + if remove_tasks: + await asyncio.wait(remove_tasks) + + device_registry = dr.async_get(self.hass) + reg_device = device_registry.async_get(device.device_id) + if reg_device is not None: + device_registry.async_remove_device(reg_device.id) + + def device_removed(self, device: zigpy.device.Device) -> None: + """Handle device being removed from the network.""" + zha_device = self._devices.pop(device.ieee, None) + entity_refs = self._device_registry.pop(device.ieee, None) + if zha_device is not None: + device_info = zha_device.zha_device_info + zha_device.async_cleanup_handles() + async_dispatcher_send(self.hass, f"{SIGNAL_REMOVE}_{zha_device.ieee!s}") + self.hass.async_create_task( + self._async_remove_device(zha_device, entity_refs), + "ZHAGateway._async_remove_device", + ) + if device_info is not None: + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_REMOVED, + ZHA_GW_MSG_DEVICE_INFO: device_info, + }, + ) + + def get_device(self, ieee: EUI64) -> ZHADevice | None: + """Return ZHADevice for given ieee.""" + return self._devices.get(ieee) + + def get_group(self, group_id: int) -> ZHAGroup | None: + """Return Group for given group id.""" + return self.groups.get(group_id) + + @callback + def async_get_group_by_name(self, group_name: str) -> ZHAGroup | None: + """Get ZHA group by name.""" + for group in self.groups.values(): + if group.name == group_name: + return group + return None + + def get_entity_reference(self, entity_id: str) -> EntityReference | None: + """Return entity reference for given entity_id if found.""" + for entity_reference in itertools.chain.from_iterable( + self.device_registry.values() + ): + if entity_id == entity_reference.reference_id: + return entity_reference + return None + + def remove_entity_reference(self, entity: ZhaEntity) -> None: + """Remove entity reference for given entity_id if found.""" + if entity.zha_device.ieee in self.device_registry: + entity_refs = self.device_registry.get(entity.zha_device.ieee) + self.device_registry[entity.zha_device.ieee] = [ + e + for e in entity_refs # type: ignore[union-attr] + if e.reference_id != entity.entity_id + ] + + def _cleanup_group_entity_registry_entries( + self, zigpy_group: zigpy.group.Group + ) -> None: + """Remove entity registry entries for group entities when the groups are removed from HA.""" + # first we collect the potential unique ids for entities that could be created from this group + possible_entity_unique_ids = [ + f"{domain}_zha_group_0x{zigpy_group.group_id:04x}" + for domain in GROUP_ENTITY_DOMAINS + ] + + # then we get all group entity entries tied to the coordinator + entity_registry = er.async_get(self.hass) + assert self.coordinator_zha_device + all_group_entity_entries = er.async_entries_for_device( + entity_registry, + self.coordinator_zha_device.device_id, + include_disabled_entities=True, + ) + + # then we get the entity entries for this specific group + # by getting the entries that match + entries_to_remove = [ + entry + for entry in all_group_entity_entries + if entry.unique_id in possible_entity_unique_ids + ] + + # then we remove the entries from the entity registry + for entry in entries_to_remove: + _LOGGER.debug( + "cleaning up entity registry entry for entity: %s", entry.entity_id + ) + entity_registry.async_remove(entry.entity_id) + + @property + def state(self) -> State: + """Return the active coordinator's network state.""" + return self.application_controller.state + + @property + def devices(self) -> dict[EUI64, ZHADevice]: + """Return devices.""" + return self._devices + + @property + def groups(self) -> dict[int, ZHAGroup]: + """Return groups.""" + return self._groups + + @property + def device_registry(self) -> collections.defaultdict[EUI64, list[EntityReference]]: + """Return entities by ieee.""" + return self._device_registry + + def register_entity_reference( + self, + ieee: EUI64, + reference_id: str, + zha_device: ZHADevice, + cluster_handlers: dict[str, ClusterHandler], + device_info: DeviceInfo, + remove_future: asyncio.Future[Any], + ): + """Record the creation of a hass entity associated with ieee.""" + self._device_registry[ieee].append( + EntityReference( + reference_id=reference_id, + zha_device=zha_device, + cluster_handlers=cluster_handlers, + device_info=device_info, + remove_future=remove_future, + ) + ) + + @callback + def async_enable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: + """Enable debug mode for ZHA.""" + self._log_levels[DEBUG_LEVEL_ORIGINAL] = async_capture_log_levels() + async_set_logger_levels(DEBUG_LEVELS) + self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() + + if filterer: + self._log_relay_handler.addFilter(filterer) + + for logger_name in DEBUG_RELAY_LOGGERS: + logging.getLogger(logger_name).addHandler(self._log_relay_handler) + + self.debug_enabled = True + + @callback + def async_disable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: + """Disable debug mode for ZHA.""" + async_set_logger_levels(self._log_levels[DEBUG_LEVEL_ORIGINAL]) + self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() + for logger_name in DEBUG_RELAY_LOGGERS: + logging.getLogger(logger_name).removeHandler(self._log_relay_handler) + if filterer: + self._log_relay_handler.removeFilter(filterer) + self.debug_enabled = False + + @callback + def _async_get_or_create_device( + self, zigpy_device: zigpy.device.Device + ) -> ZHADevice: + """Get or create a ZHA device.""" + if (zha_device := self._devices.get(zigpy_device.ieee)) is None: + zha_device = ZHADevice.new(self.hass, zigpy_device, self) + self._devices[zigpy_device.ieee] = zha_device + + device_registry = dr.async_get(self.hass) + device_registry_device = device_registry.async_get_or_create( + config_entry_id=self.config_entry.entry_id, + connections={(dr.CONNECTION_ZIGBEE, str(zha_device.ieee))}, + identifiers={(DOMAIN, str(zha_device.ieee))}, + name=zha_device.name, + manufacturer=zha_device.manufacturer, + model=zha_device.model, + ) + zha_device.set_device_id(device_registry_device.id) + return zha_device + + @callback + def _async_get_or_create_group(self, zigpy_group: zigpy.group.Group) -> ZHAGroup: + """Get or create a ZHA group.""" + zha_group = self._groups.get(zigpy_group.group_id) + if zha_group is None: + zha_group = ZHAGroup(self.hass, self, zigpy_group) + self._groups[zigpy_group.group_id] = zha_group + return zha_group + + @callback + def async_update_device( + self, sender: zigpy.device.Device, available: bool = True + ) -> None: + """Update device that has just become available.""" + if sender.ieee in self.devices: + device = self.devices[sender.ieee] + # avoid a race condition during new joins + if device.status is DeviceStatus.INITIALIZED: + device.update_available(available) + + async def async_device_initialized(self, device: zigpy.device.Device) -> None: + """Handle device joined and basic information discovered (async).""" + zha_device = self._async_get_or_create_device(device) + _LOGGER.debug( + "device - %s:%s entering async_device_initialized - is_new_join: %s", + device.nwk, + device.ieee, + zha_device.status is not DeviceStatus.INITIALIZED, + ) + + if zha_device.status is DeviceStatus.INITIALIZED: + # ZHA already has an initialized device so either the device was assigned a + # new nwk or device was physically reset and added again without being removed + _LOGGER.debug( + "device - %s:%s has been reset and re-added or its nwk address changed", + device.nwk, + device.ieee, + ) + await self._async_device_rejoined(zha_device) + else: + _LOGGER.debug( + "device - %s:%s has joined the ZHA zigbee network", + device.nwk, + device.ieee, + ) + await self._async_device_joined(zha_device) + + device_info = zha_device.zha_device_info + device_info[DEVICE_PAIRING_STATUS] = DevicePairingStatus.INITIALIZED.name + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, + ZHA_GW_MSG_DEVICE_INFO: device_info, + }, + ) + + async def _async_device_joined(self, zha_device: ZHADevice) -> None: + zha_device.available = True + device_info = zha_device.device_info + await zha_device.async_configure() + device_info[DEVICE_PAIRING_STATUS] = DevicePairingStatus.CONFIGURED.name + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, + ZHA_GW_MSG_DEVICE_INFO: device_info, + }, + ) + await zha_device.async_initialize(from_cache=False) + async_dispatcher_send(self.hass, SIGNAL_ADD_ENTITIES) + + async def _async_device_rejoined(self, zha_device: ZHADevice) -> None: + _LOGGER.debug( + "skipping discovery for previously discovered device - %s:%s", + zha_device.nwk, + zha_device.ieee, + ) + # we don't have to do this on a nwk swap + # but we don't have a way to tell currently + await zha_device.async_configure() + device_info = zha_device.device_info + device_info[DEVICE_PAIRING_STATUS] = DevicePairingStatus.CONFIGURED.name + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, + ZHA_GW_MSG_DEVICE_INFO: device_info, + }, + ) + # force async_initialize() to fire so don't explicitly call it + zha_device.available = False + zha_device.update_available(True) + + async def async_create_zigpy_group( + self, + name: str, + members: list[GroupMember] | None, + group_id: int | None = None, + ) -> ZHAGroup | None: + """Create a new Zigpy Zigbee group.""" + + # we start with two to fill any gaps from a user removing existing groups + + if group_id is None: + group_id = 2 + while group_id in self.groups: + group_id += 1 + + # guard against group already existing + if self.async_get_group_by_name(name) is None: + self.application_controller.groups.add_group(group_id, name) + if members is not None: + tasks = [] + for member in members: + _LOGGER.debug( + ( + "Adding member with IEEE: %s and endpoint ID: %s to group:" + " %s:0x%04x" + ), + member.ieee, + member.endpoint_id, + name, + group_id, + ) + tasks.append( + self.devices[member.ieee].async_add_endpoint_to_group( + member.endpoint_id, group_id + ) + ) + await asyncio.gather(*tasks) + return self.groups.get(group_id) + + async def async_remove_zigpy_group(self, group_id: int) -> None: + """Remove a Zigbee group from Zigpy.""" + if not (group := self.groups.get(group_id)): + _LOGGER.debug("Group: 0x%04x could not be found", group_id) + return + if group.members: + tasks = [member.async_remove_from_group() for member in group.members] + if tasks: + await asyncio.gather(*tasks) + self.application_controller.groups.pop(group_id) + + async def shutdown(self) -> None: + """Stop ZHA Controller Application.""" + if self.shutting_down: + _LOGGER.debug("Ignoring duplicate shutdown event") + return + + _LOGGER.debug("Shutting down ZHA ControllerApplication") + self.shutting_down = True + + for unsubscribe in self._unsubs: + unsubscribe() + for device in self.devices.values(): + device.async_cleanup_handles() + await self.application_controller.shutdown() + + def handle_message( + self, + sender: zigpy.device.Device, + profile: int, + cluster: int, + src_ep: int, + dst_ep: int, + message: bytes, + ) -> None: + """Handle message from a device Event handler.""" + if sender.ieee in self.devices and not self.devices[sender.ieee].available: + self.async_update_device(sender, available=True) + + +@callback +def async_capture_log_levels() -> dict[str, int]: + """Capture current logger levels for ZHA.""" + return { + DEBUG_COMP_BELLOWS: logging.getLogger(DEBUG_COMP_BELLOWS).getEffectiveLevel(), + DEBUG_COMP_ZHA: logging.getLogger(DEBUG_COMP_ZHA).getEffectiveLevel(), + DEBUG_COMP_ZIGPY: logging.getLogger(DEBUG_COMP_ZIGPY).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_ZNP: logging.getLogger( + DEBUG_COMP_ZIGPY_ZNP + ).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_DECONZ: logging.getLogger( + DEBUG_COMP_ZIGPY_DECONZ + ).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_XBEE: logging.getLogger( + DEBUG_COMP_ZIGPY_XBEE + ).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_ZIGATE: logging.getLogger( + DEBUG_COMP_ZIGPY_ZIGATE + ).getEffectiveLevel(), + } + + +@callback +def async_set_logger_levels(levels: dict[str, int]) -> None: + """Set logger levels for ZHA.""" + logging.getLogger(DEBUG_COMP_BELLOWS).setLevel(levels[DEBUG_COMP_BELLOWS]) + logging.getLogger(DEBUG_COMP_ZHA).setLevel(levels[DEBUG_COMP_ZHA]) + logging.getLogger(DEBUG_COMP_ZIGPY).setLevel(levels[DEBUG_COMP_ZIGPY]) + logging.getLogger(DEBUG_COMP_ZIGPY_ZNP).setLevel(levels[DEBUG_COMP_ZIGPY_ZNP]) + logging.getLogger(DEBUG_COMP_ZIGPY_DECONZ).setLevel(levels[DEBUG_COMP_ZIGPY_DECONZ]) + logging.getLogger(DEBUG_COMP_ZIGPY_XBEE).setLevel(levels[DEBUG_COMP_ZIGPY_XBEE]) + logging.getLogger(DEBUG_COMP_ZIGPY_ZIGATE).setLevel(levels[DEBUG_COMP_ZIGPY_ZIGATE]) + + +class LogRelayHandler(logging.Handler): + """Log handler for error messages.""" + + def __init__(self, hass: HomeAssistant, gateway: ZHAGateway) -> None: + """Initialize a new LogErrorHandler.""" + super().__init__() + self.hass = hass + self.gateway = gateway + hass_path: str = HOMEASSISTANT_PATH[0] + config_dir = self.hass.config.config_dir + self.paths_re = re.compile( + r"(?:{})/(.*)".format( + "|".join([re.escape(x) for x in (hass_path, config_dir)]) + ) + ) + + def emit(self, record: LogRecord) -> None: + """Relay log message via dispatcher.""" + entry = LogEntry( + record, + self.paths_re, + formatter=self.formatter, + figure_out_source=record.levelno >= logging.WARNING, + ) + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + {ATTR_TYPE: ZHA_GW_MSG_LOG_OUTPUT, ZHA_GW_MSG_LOG_ENTRY: entry.to_dict()}, + ) diff --git a/homeassistant/components/zha/core/group.py b/homeassistant/components/zha/core/group.py new file mode 100644 index 00000000000..a6156ab63b7 --- /dev/null +++ b/homeassistant/components/zha/core/group.py @@ -0,0 +1,246 @@ +"""Group for Zigbee Home Automation.""" + +from __future__ import annotations + +import asyncio +import logging +from typing import TYPE_CHECKING, Any, NamedTuple + +import zigpy.endpoint +import zigpy.exceptions +import zigpy.group +from zigpy.types.named import EUI64 + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_registry import async_entries_for_device + +from .helpers import LogMixin + +if TYPE_CHECKING: + from .device import ZHADevice + from .gateway import ZHAGateway + +_LOGGER = logging.getLogger(__name__) + + +class GroupMember(NamedTuple): + """Describes a group member.""" + + ieee: EUI64 + endpoint_id: int + + +class GroupEntityReference(NamedTuple): + """Reference to a group entity.""" + + name: str | None + original_name: str | None + entity_id: int + + +class ZHAGroupMember(LogMixin): + """Composite object that represents a device endpoint in a Zigbee group.""" + + def __init__( + self, zha_group: ZHAGroup, zha_device: ZHADevice, endpoint_id: int + ) -> None: + """Initialize the group member.""" + self._zha_group = zha_group + self._zha_device = zha_device + self._endpoint_id = endpoint_id + + @property + def group(self) -> ZHAGroup: + """Return the group this member belongs to.""" + return self._zha_group + + @property + def endpoint_id(self) -> int: + """Return the endpoint id for this group member.""" + return self._endpoint_id + + @property + def endpoint(self) -> zigpy.endpoint.Endpoint: + """Return the endpoint for this group member.""" + return self._zha_device.device.endpoints.get(self.endpoint_id) + + @property + def device(self) -> ZHADevice: + """Return the ZHA device for this group member.""" + return self._zha_device + + @property + def member_info(self) -> dict[str, Any]: + """Get ZHA group info.""" + member_info: dict[str, Any] = {} + member_info["endpoint_id"] = self.endpoint_id + member_info["device"] = self.device.zha_device_info + member_info["entities"] = self.associated_entities + return member_info + + @property + def associated_entities(self) -> list[dict[str, Any]]: + """Return the list of entities that were derived from this endpoint.""" + entity_registry = er.async_get(self._zha_device.hass) + zha_device_registry = self.device.gateway.device_registry + + entity_info = [] + + for entity_ref in zha_device_registry.get(self.device.ieee): + # We have device entities now that don't leverage cluster handlers + if not entity_ref.cluster_handlers: + continue + entity = entity_registry.async_get(entity_ref.reference_id) + handler = list(entity_ref.cluster_handlers.values())[0] + + if ( + entity is None + or handler.cluster.endpoint.endpoint_id != self.endpoint_id + ): + continue + + entity_info.append( + GroupEntityReference( + name=entity.name, + original_name=entity.original_name, + entity_id=entity_ref.reference_id, + )._asdict() + ) + + return entity_info + + async def async_remove_from_group(self) -> None: + """Remove the device endpoint from the provided zigbee group.""" + try: + await self._zha_device.device.endpoints[ + self._endpoint_id + ].remove_from_group(self._zha_group.group_id) + except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: + self.debug( + ( + "Failed to remove endpoint: %s for device '%s' from group: 0x%04x" + " ex: %s" + ), + self._endpoint_id, + self._zha_device.ieee, + self._zha_group.group_id, + str(ex), + ) + + def log(self, level: int, msg: str, *args: Any, **kwargs) -> None: + """Log a message.""" + msg = f"[%s](%s): {msg}" + args = (f"0x{self._zha_group.group_id:04x}", self.endpoint_id, *args) + _LOGGER.log(level, msg, *args, **kwargs) + + +class ZHAGroup(LogMixin): + """ZHA Zigbee group object.""" + + def __init__( + self, + hass: HomeAssistant, + zha_gateway: ZHAGateway, + zigpy_group: zigpy.group.Group, + ) -> None: + """Initialize the group.""" + self.hass = hass + self._zha_gateway = zha_gateway + self._zigpy_group = zigpy_group + + @property + def name(self) -> str: + """Return group name.""" + return self._zigpy_group.name + + @property + def group_id(self) -> int: + """Return group name.""" + return self._zigpy_group.group_id + + @property + def endpoint(self) -> zigpy.endpoint.Endpoint: + """Return the endpoint for this group.""" + return self._zigpy_group.endpoint + + @property + def members(self) -> list[ZHAGroupMember]: + """Return the ZHA devices that are members of this group.""" + return [ + ZHAGroupMember(self, self._zha_gateway.devices[member_ieee], endpoint_id) + for (member_ieee, endpoint_id) in self._zigpy_group.members + if member_ieee in self._zha_gateway.devices + ] + + async def async_add_members(self, members: list[GroupMember]) -> None: + """Add members to this group.""" + if len(members) > 1: + tasks = [ + self._zha_gateway.devices[member.ieee].async_add_endpoint_to_group( + member.endpoint_id, self.group_id + ) + for member in members + ] + await asyncio.gather(*tasks) + else: + await self._zha_gateway.devices[ + members[0].ieee + ].async_add_endpoint_to_group(members[0].endpoint_id, self.group_id) + + async def async_remove_members(self, members: list[GroupMember]) -> None: + """Remove members from this group.""" + if len(members) > 1: + tasks = [ + self._zha_gateway.devices[member.ieee].async_remove_endpoint_from_group( + member.endpoint_id, self.group_id + ) + for member in members + ] + await asyncio.gather(*tasks) + else: + await self._zha_gateway.devices[ + members[0].ieee + ].async_remove_endpoint_from_group(members[0].endpoint_id, self.group_id) + + @property + def member_entity_ids(self) -> list[str]: + """Return the ZHA entity ids for all entities for the members of this group.""" + return [ + entity_reference["entity_id"] + for member in self.members + for entity_reference in member.associated_entities + ] + + def get_domain_entity_ids(self, domain: str) -> list[str]: + """Return entity ids from the entity domain for this group.""" + entity_registry = er.async_get(self.hass) + domain_entity_ids: list[str] = [] + + for member in self.members: + if member.device.is_coordinator: + continue + entities = async_entries_for_device( + entity_registry, + member.device.device_id, + include_disabled_entities=True, + ) + domain_entity_ids.extend( + [entity.entity_id for entity in entities if entity.domain == domain] + ) + return domain_entity_ids + + @property + def group_info(self) -> dict[str, Any]: + """Get ZHA group info.""" + group_info: dict[str, Any] = {} + group_info["group_id"] = self.group_id + group_info["name"] = self.name + group_info["members"] = [member.member_info for member in self.members] + return group_info + + def log(self, level: int, msg: str, *args: Any, **kwargs) -> None: + """Log a message.""" + msg = f"[%s](%s): {msg}" + args = (self.name, self.group_id, *args) + _LOGGER.log(level, msg, *args, **kwargs) diff --git a/homeassistant/components/zha/core/helpers.py b/homeassistant/components/zha/core/helpers.py new file mode 100644 index 00000000000..2508dd34fd4 --- /dev/null +++ b/homeassistant/components/zha/core/helpers.py @@ -0,0 +1,523 @@ +"""Helpers for Zigbee Home Automation. + +For more details about this component, please refer to the documentation at +https://home-assistant.io/integrations/zha/ +""" + +from __future__ import annotations + +import binascii +import collections +from collections.abc import Callable, Iterator +import dataclasses +from dataclasses import dataclass +import enum +import logging +import re +from typing import TYPE_CHECKING, Any, overload + +import voluptuous as vol +import zigpy.exceptions +import zigpy.types +import zigpy.util +import zigpy.zcl +from zigpy.zcl.foundation import CommandSchema +import zigpy.zdo.types as zdo_types + +from homeassistant.components.binary_sensor import BinarySensorDeviceClass +from homeassistant.components.number import NumberDeviceClass +from homeassistant.components.sensor import SensorDeviceClass +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + Platform, + UnitOfApparentPower, + UnitOfDataRate, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfFrequency, + UnitOfInformation, + UnitOfIrradiance, + UnitOfLength, + UnitOfMass, + UnitOfPower, + UnitOfPrecipitationDepth, + UnitOfPressure, + UnitOfSoundPressure, + UnitOfSpeed, + UnitOfTemperature, + UnitOfTime, + UnitOfVolume, + UnitOfVolumeFlowRate, + UnitOfVolumetricFlux, +) +from homeassistant.core import HomeAssistant, State, callback +from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers.typing import ConfigType + +from .const import CLUSTER_TYPE_IN, CLUSTER_TYPE_OUT, CUSTOM_CONFIGURATION, DATA_ZHA +from .registries import BINDABLE_CLUSTERS + +if TYPE_CHECKING: + from .device import ZHADevice + from .gateway import ZHAGateway + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class BindingPair: + """Information for binding.""" + + source_cluster: zigpy.zcl.Cluster + target_ieee: zigpy.types.EUI64 + target_ep_id: int + + @property + def destination_address(self) -> zdo_types.MultiAddress: + """Return a ZDO multi address instance.""" + return zdo_types.MultiAddress( + addrmode=3, ieee=self.target_ieee, endpoint=self.target_ep_id + ) + + +async def safe_read( + cluster, attributes, allow_cache=True, only_cache=False, manufacturer=None +): + """Swallow all exceptions from network read. + + If we throw during initialization, setup fails. Rather have an entity that + exists, but is in a maybe wrong state, than no entity. This method should + probably only be used during initialization. + """ + try: + result, _ = await cluster.read_attributes( + attributes, + allow_cache=allow_cache, + only_cache=only_cache, + manufacturer=manufacturer, + ) + except Exception: # noqa: BLE001 + return {} + return result + + +async def get_matched_clusters( + source_zha_device: ZHADevice, target_zha_device: ZHADevice +) -> list[BindingPair]: + """Get matched input/output cluster pairs for 2 devices.""" + source_clusters = source_zha_device.async_get_std_clusters() + target_clusters = target_zha_device.async_get_std_clusters() + clusters_to_bind = [] + + for endpoint_id in source_clusters: + for cluster_id in source_clusters[endpoint_id][CLUSTER_TYPE_OUT]: + if cluster_id not in BINDABLE_CLUSTERS: + continue + if target_zha_device.nwk == 0x0000: + cluster_pair = BindingPair( + source_cluster=source_clusters[endpoint_id][CLUSTER_TYPE_OUT][ + cluster_id + ], + target_ieee=target_zha_device.ieee, + target_ep_id=target_zha_device.device.application.get_endpoint_id( + cluster_id, is_server_cluster=True + ), + ) + clusters_to_bind.append(cluster_pair) + continue + for t_endpoint_id in target_clusters: + if cluster_id in target_clusters[t_endpoint_id][CLUSTER_TYPE_IN]: + cluster_pair = BindingPair( + source_cluster=source_clusters[endpoint_id][CLUSTER_TYPE_OUT][ + cluster_id + ], + target_ieee=target_zha_device.ieee, + target_ep_id=t_endpoint_id, + ) + clusters_to_bind.append(cluster_pair) + return clusters_to_bind + + +def cluster_command_schema_to_vol_schema(schema: CommandSchema) -> vol.Schema: + """Convert a cluster command schema to a voluptuous schema.""" + return vol.Schema( + { + vol.Optional(field.name) + if field.optional + else vol.Required(field.name): schema_type_to_vol(field.type) + for field in schema.fields + } + ) + + +def schema_type_to_vol(field_type: Any) -> Any: + """Convert a schema type to a voluptuous type.""" + if issubclass(field_type, enum.Flag) and field_type.__members__: + return cv.multi_select( + [key.replace("_", " ") for key in field_type.__members__] + ) + if issubclass(field_type, enum.Enum) and field_type.__members__: + return vol.In([key.replace("_", " ") for key in field_type.__members__]) + if ( + issubclass(field_type, zigpy.types.FixedIntType) + or issubclass(field_type, enum.Flag) + or issubclass(field_type, enum.Enum) + ): + return vol.All( + vol.Coerce(int), vol.Range(field_type.min_value, field_type.max_value) + ) + return str + + +def convert_to_zcl_values( + fields: dict[str, Any], schema: CommandSchema +) -> dict[str, Any]: + """Convert user input to ZCL values.""" + converted_fields: dict[str, Any] = {} + for field in schema.fields: + if field.name not in fields: + continue + value = fields[field.name] + if issubclass(field.type, enum.Flag) and isinstance(value, list): + new_value = 0 + + for flag in value: + if isinstance(flag, str): + new_value |= field.type[flag.replace(" ", "_")] + else: + new_value |= flag + + value = field.type(new_value) + elif issubclass(field.type, enum.Enum): + value = ( + field.type[value.replace(" ", "_")] + if isinstance(value, str) + else field.type(value) + ) + else: + value = field.type(value) + _LOGGER.debug( + "Converted ZCL schema field(%s) value from: %s to: %s", + field.name, + fields[field.name], + value, + ) + converted_fields[field.name] = value + return converted_fields + + +@callback +def async_is_bindable_target(source_zha_device, target_zha_device): + """Determine if target is bindable to source.""" + if target_zha_device.nwk == 0x0000: + return True + + source_clusters = source_zha_device.async_get_std_clusters() + target_clusters = target_zha_device.async_get_std_clusters() + + for endpoint_id in source_clusters: + for t_endpoint_id in target_clusters: + matches = set( + source_clusters[endpoint_id][CLUSTER_TYPE_OUT].keys() + ).intersection(target_clusters[t_endpoint_id][CLUSTER_TYPE_IN].keys()) + if any(bindable in BINDABLE_CLUSTERS for bindable in matches): + return True + return False + + +@callback +def async_get_zha_config_value[_T]( + config_entry: ConfigEntry, section: str, config_key: str, default: _T +) -> _T: + """Get the value for the specified configuration from the ZHA config entry.""" + return ( + config_entry.options.get(CUSTOM_CONFIGURATION, {}) + .get(section, {}) + .get(config_key, default) + ) + + +def async_cluster_exists(hass: HomeAssistant, cluster_id, skip_coordinator=True): + """Determine if a device containing the specified in cluster is paired.""" + zha_gateway = get_zha_gateway(hass) + zha_devices = zha_gateway.devices.values() + for zha_device in zha_devices: + if skip_coordinator and zha_device.is_coordinator: + continue + clusters_by_endpoint = zha_device.async_get_clusters() + for clusters in clusters_by_endpoint.values(): + if ( + cluster_id in clusters[CLUSTER_TYPE_IN] + or cluster_id in clusters[CLUSTER_TYPE_OUT] + ): + return True + return False + + +@callback +def async_get_zha_device(hass: HomeAssistant, device_id: str) -> ZHADevice: + """Get a ZHA device for the given device registry id.""" + device_registry = dr.async_get(hass) + registry_device = device_registry.async_get(device_id) + if not registry_device: + _LOGGER.error("Device id `%s` not found in registry", device_id) + raise KeyError(f"Device id `{device_id}` not found in registry.") + zha_gateway = get_zha_gateway(hass) + try: + ieee_address = list(registry_device.identifiers)[0][1] + ieee = zigpy.types.EUI64.convert(ieee_address) + except (IndexError, ValueError) as ex: + _LOGGER.error( + "Unable to determine device IEEE for device with device id `%s`", device_id + ) + raise KeyError( + f"Unable to determine device IEEE for device with device id `{device_id}`." + ) from ex + return zha_gateway.devices[ieee] + + +def find_state_attributes(states: list[State], key: str) -> Iterator[Any]: + """Find attributes with matching key from states.""" + for state in states: + if (value := state.attributes.get(key)) is not None: + yield value + + +def mean_int(*args): + """Return the mean of the supplied values.""" + return int(sum(args) / len(args)) + + +def mean_tuple(*args): + """Return the mean values along the columns of the supplied values.""" + return tuple(sum(x) / len(x) for x in zip(*args, strict=False)) + + +def reduce_attribute( + states: list[State], + key: str, + default: Any | None = None, + reduce: Callable[..., Any] = mean_int, +) -> Any: + """Find the first attribute matching key from states. + + If none are found, return default. + """ + attrs = list(find_state_attributes(states, key)) + + if not attrs: + return default + + if len(attrs) == 1: + return attrs[0] + + return reduce(*attrs) + + +class LogMixin: + """Log helper.""" + + def log(self, level, msg, *args, **kwargs): + """Log with level.""" + raise NotImplementedError + + def debug(self, msg, *args, **kwargs): + """Debug level log.""" + return self.log(logging.DEBUG, msg, *args, **kwargs) + + def info(self, msg, *args, **kwargs): + """Info level log.""" + return self.log(logging.INFO, msg, *args, **kwargs) + + def warning(self, msg, *args, **kwargs): + """Warning method log.""" + return self.log(logging.WARNING, msg, *args, **kwargs) + + def error(self, msg, *args, **kwargs): + """Error level log.""" + return self.log(logging.ERROR, msg, *args, **kwargs) + + +def convert_install_code(value: str) -> zigpy.types.KeyData: + """Convert string to install code bytes and validate length.""" + + try: + code = binascii.unhexlify(value.replace("-", "").lower()) + except binascii.Error as exc: + raise vol.Invalid(f"invalid hex string: {value}") from exc + + if len(code) != 18: # 16 byte code + 2 crc bytes + raise vol.Invalid("invalid length of the install code") + + link_key = zigpy.util.convert_install_code(code) + if link_key is None: + raise vol.Invalid("invalid install code") + + return link_key + + +QR_CODES = ( + # Consciot + r"^([\da-fA-F]{16})\|([\da-fA-F]{36})$", + # Enbrighten + r""" + ^Z: + ([0-9a-fA-F]{16}) # IEEE address + \$I: + ([0-9a-fA-F]{36}) # install code + $ + """, + # Aqara + r""" + \$A: + ([0-9a-fA-F]{16}) # IEEE address + \$I: + ([0-9a-fA-F]{36}) # install code + $ + """, + # Bosch + r""" + ^RB01SG + [0-9a-fA-F]{34} + ([0-9a-fA-F]{16}) # IEEE address + DLK + ([0-9a-fA-F]{36}|[0-9a-fA-F]{32}) # install code / link key + $ + """, +) + + +def qr_to_install_code(qr_code: str) -> tuple[zigpy.types.EUI64, zigpy.types.KeyData]: + """Try to parse the QR code. + + if successful, return a tuple of a EUI64 address and install code. + """ + + for code_pattern in QR_CODES: + match = re.search(code_pattern, qr_code, re.VERBOSE) + if match is None: + continue + + ieee_hex = binascii.unhexlify(match[1]) + ieee = zigpy.types.EUI64(ieee_hex[::-1]) + + # Bosch supplies (A) device specific link key (DSLK) or (A) install code + crc + if "RB01SG" in code_pattern and len(match[2]) == 32: + link_key_hex = binascii.unhexlify(match[2]) + link_key = zigpy.types.KeyData(link_key_hex) + return ieee, link_key + install_code = match[2] + # install_code sanity check + link_key = convert_install_code(install_code) + return ieee, link_key + + raise vol.Invalid(f"couldn't convert qr code: {qr_code}") + + +@dataclasses.dataclass(kw_only=True, slots=True) +class ZHAData: + """ZHA component data stored in `hass.data`.""" + + yaml_config: ConfigType = dataclasses.field(default_factory=dict) + platforms: collections.defaultdict[Platform, list] = dataclasses.field( + default_factory=lambda: collections.defaultdict(list) + ) + gateway: ZHAGateway | None = dataclasses.field(default=None) + device_trigger_cache: dict[str, tuple[str, dict]] = dataclasses.field( + default_factory=dict + ) + allow_polling: bool = dataclasses.field(default=False) + + +def get_zha_data(hass: HomeAssistant) -> ZHAData: + """Get the global ZHA data object.""" + if DATA_ZHA not in hass.data: + hass.data[DATA_ZHA] = ZHAData() + + return hass.data[DATA_ZHA] + + +def get_zha_gateway(hass: HomeAssistant) -> ZHAGateway: + """Get the ZHA gateway object.""" + if (zha_gateway := get_zha_data(hass).gateway) is None: + raise ValueError("No gateway object exists") + + return zha_gateway + + +UNITS_OF_MEASURE = { + UnitOfApparentPower.__name__: UnitOfApparentPower, + UnitOfPower.__name__: UnitOfPower, + UnitOfEnergy.__name__: UnitOfEnergy, + UnitOfElectricCurrent.__name__: UnitOfElectricCurrent, + UnitOfElectricPotential.__name__: UnitOfElectricPotential, + UnitOfTemperature.__name__: UnitOfTemperature, + UnitOfTime.__name__: UnitOfTime, + UnitOfLength.__name__: UnitOfLength, + UnitOfFrequency.__name__: UnitOfFrequency, + UnitOfPressure.__name__: UnitOfPressure, + UnitOfSoundPressure.__name__: UnitOfSoundPressure, + UnitOfVolume.__name__: UnitOfVolume, + UnitOfVolumeFlowRate.__name__: UnitOfVolumeFlowRate, + UnitOfMass.__name__: UnitOfMass, + UnitOfIrradiance.__name__: UnitOfIrradiance, + UnitOfVolumetricFlux.__name__: UnitOfVolumetricFlux, + UnitOfPrecipitationDepth.__name__: UnitOfPrecipitationDepth, + UnitOfSpeed.__name__: UnitOfSpeed, + UnitOfInformation.__name__: UnitOfInformation, + UnitOfDataRate.__name__: UnitOfDataRate, +} + + +def validate_unit(quirks_unit: enum.Enum) -> enum.Enum: + """Validate and return a unit of measure.""" + return UNITS_OF_MEASURE[type(quirks_unit).__name__](quirks_unit.value) + + +@overload +def validate_device_class( + device_class_enum: type[BinarySensorDeviceClass], + metadata_value, + platform: str, + logger: logging.Logger, +) -> BinarySensorDeviceClass | None: ... + + +@overload +def validate_device_class( + device_class_enum: type[SensorDeviceClass], + metadata_value, + platform: str, + logger: logging.Logger, +) -> SensorDeviceClass | None: ... + + +@overload +def validate_device_class( + device_class_enum: type[NumberDeviceClass], + metadata_value, + platform: str, + logger: logging.Logger, +) -> NumberDeviceClass | None: ... + + +def validate_device_class( + device_class_enum: type[ + BinarySensorDeviceClass | SensorDeviceClass | NumberDeviceClass + ], + metadata_value: enum.Enum, + platform: str, + logger: logging.Logger, +) -> BinarySensorDeviceClass | SensorDeviceClass | NumberDeviceClass | None: + """Validate and return a device class.""" + try: + return device_class_enum(metadata_value.value) + except ValueError as ex: + logger.warning( + "Quirks provided an invalid device class: %s for platform %s: %s", + metadata_value, + platform, + ex, + ) + return None diff --git a/homeassistant/components/zha/core/registries.py b/homeassistant/components/zha/core/registries.py new file mode 100644 index 00000000000..9d23b77efaa --- /dev/null +++ b/homeassistant/components/zha/core/registries.py @@ -0,0 +1,516 @@ +"""Mapping registries for Zigbee Home Automation.""" + +from __future__ import annotations + +import collections +from collections.abc import Callable +import dataclasses +from operator import attrgetter +from typing import TYPE_CHECKING + +import attr +from zigpy import zcl +import zigpy.profiles.zha +import zigpy.profiles.zll +from zigpy.types.named import EUI64 + +from homeassistant.const import Platform + +from .decorators import DictRegistry, NestedDictRegistry, SetRegistry + +if TYPE_CHECKING: + from ..entity import ZhaEntity, ZhaGroupEntity + from .cluster_handlers import ClientClusterHandler, ClusterHandler + + +GROUP_ENTITY_DOMAINS = [Platform.LIGHT, Platform.SWITCH, Platform.FAN] + +IKEA_AIR_PURIFIER_CLUSTER = 0xFC7D +PHILLIPS_REMOTE_CLUSTER = 0xFC00 +SMARTTHINGS_ACCELERATION_CLUSTER = 0xFC02 +SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE = 0x8000 +SMARTTHINGS_HUMIDITY_CLUSTER = 0xFC45 +TUYA_MANUFACTURER_CLUSTER = 0xEF00 +VOC_LEVEL_CLUSTER = 0x042E + +REMOTE_DEVICE_TYPES = { + zigpy.profiles.zha.PROFILE_ID: [ + zigpy.profiles.zha.DeviceType.COLOR_CONTROLLER, + zigpy.profiles.zha.DeviceType.COLOR_DIMMER_SWITCH, + zigpy.profiles.zha.DeviceType.COLOR_SCENE_CONTROLLER, + zigpy.profiles.zha.DeviceType.DIMMER_SWITCH, + zigpy.profiles.zha.DeviceType.LEVEL_CONTROL_SWITCH, + zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER, + zigpy.profiles.zha.DeviceType.NON_COLOR_SCENE_CONTROLLER, + zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT_SWITCH, + zigpy.profiles.zha.DeviceType.REMOTE_CONTROL, + zigpy.profiles.zha.DeviceType.SCENE_SELECTOR, + ], + zigpy.profiles.zll.PROFILE_ID: [ + zigpy.profiles.zll.DeviceType.COLOR_CONTROLLER, + zigpy.profiles.zll.DeviceType.COLOR_SCENE_CONTROLLER, + zigpy.profiles.zll.DeviceType.CONTROL_BRIDGE, + zigpy.profiles.zll.DeviceType.CONTROLLER, + zigpy.profiles.zll.DeviceType.SCENE_CONTROLLER, + ], +} +REMOTE_DEVICE_TYPES = collections.defaultdict(list, REMOTE_DEVICE_TYPES) + +SINGLE_INPUT_CLUSTER_DEVICE_CLASS = { + # this works for now but if we hit conflicts we can break it out to + # a different dict that is keyed by manufacturer + zcl.clusters.general.AnalogOutput.cluster_id: Platform.NUMBER, + zcl.clusters.general.MultistateInput.cluster_id: Platform.SENSOR, + zcl.clusters.general.OnOff.cluster_id: Platform.SWITCH, + zcl.clusters.hvac.Fan.cluster_id: Platform.FAN, +} + +SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS = { + zcl.clusters.general.OnOff.cluster_id: Platform.BINARY_SENSOR, + zcl.clusters.security.IasAce.cluster_id: Platform.ALARM_CONTROL_PANEL, +} + +BINDABLE_CLUSTERS = SetRegistry() + +DEVICE_CLASS = { + zigpy.profiles.zha.PROFILE_ID: { + SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE: Platform.DEVICE_TRACKER, + zigpy.profiles.zha.DeviceType.THERMOSTAT: Platform.CLIMATE, + zigpy.profiles.zha.DeviceType.COLOR_DIMMABLE_LIGHT: Platform.LIGHT, + zigpy.profiles.zha.DeviceType.COLOR_TEMPERATURE_LIGHT: Platform.LIGHT, + zigpy.profiles.zha.DeviceType.DIMMABLE_BALLAST: Platform.LIGHT, + zigpy.profiles.zha.DeviceType.DIMMABLE_LIGHT: Platform.LIGHT, + zigpy.profiles.zha.DeviceType.DIMMABLE_PLUG_IN_UNIT: Platform.LIGHT, + zigpy.profiles.zha.DeviceType.EXTENDED_COLOR_LIGHT: Platform.LIGHT, + zigpy.profiles.zha.DeviceType.LEVEL_CONTROLLABLE_OUTPUT: Platform.COVER, + zigpy.profiles.zha.DeviceType.ON_OFF_BALLAST: Platform.SWITCH, + zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT: Platform.LIGHT, + zigpy.profiles.zha.DeviceType.ON_OFF_PLUG_IN_UNIT: Platform.SWITCH, + zigpy.profiles.zha.DeviceType.SHADE: Platform.COVER, + zigpy.profiles.zha.DeviceType.SMART_PLUG: Platform.SWITCH, + zigpy.profiles.zha.DeviceType.IAS_ANCILLARY_CONTROL: Platform.ALARM_CONTROL_PANEL, + zigpy.profiles.zha.DeviceType.IAS_WARNING_DEVICE: Platform.SIREN, + }, + zigpy.profiles.zll.PROFILE_ID: { + zigpy.profiles.zll.DeviceType.COLOR_LIGHT: Platform.LIGHT, + zigpy.profiles.zll.DeviceType.COLOR_TEMPERATURE_LIGHT: Platform.LIGHT, + zigpy.profiles.zll.DeviceType.DIMMABLE_LIGHT: Platform.LIGHT, + zigpy.profiles.zll.DeviceType.DIMMABLE_PLUGIN_UNIT: Platform.LIGHT, + zigpy.profiles.zll.DeviceType.EXTENDED_COLOR_LIGHT: Platform.LIGHT, + zigpy.profiles.zll.DeviceType.ON_OFF_LIGHT: Platform.LIGHT, + zigpy.profiles.zll.DeviceType.ON_OFF_PLUGIN_UNIT: Platform.SWITCH, + }, +} +DEVICE_CLASS = collections.defaultdict(dict, DEVICE_CLASS) + +CLUSTER_HANDLER_ONLY_CLUSTERS = SetRegistry() +CLIENT_CLUSTER_HANDLER_REGISTRY: DictRegistry[type[ClientClusterHandler]] = ( + DictRegistry() +) +ZIGBEE_CLUSTER_HANDLER_REGISTRY: NestedDictRegistry[type[ClusterHandler]] = ( + NestedDictRegistry() +) + +WEIGHT_ATTR = attrgetter("weight") + + +def set_or_callable(value) -> frozenset[str] | Callable: + """Convert single str or None to a set. Pass through callables and sets.""" + if value is None: + return frozenset() + if callable(value): + return value + if isinstance(value, (frozenset, set, list)): + return frozenset(value) + return frozenset([str(value)]) + + +def _get_empty_frozenset() -> frozenset[str]: + return frozenset() + + +@attr.s(frozen=True) +class MatchRule: + """Match a ZHA Entity to a cluster handler name or generic id.""" + + cluster_handler_names: frozenset[str] = attr.ib( + factory=frozenset, converter=set_or_callable + ) + generic_ids: frozenset[str] = attr.ib(factory=frozenset, converter=set_or_callable) + manufacturers: frozenset[str] | Callable = attr.ib( + factory=_get_empty_frozenset, converter=set_or_callable + ) + models: frozenset[str] | Callable = attr.ib( + factory=_get_empty_frozenset, converter=set_or_callable + ) + aux_cluster_handlers: frozenset[str] | Callable = attr.ib( + factory=_get_empty_frozenset, converter=set_or_callable + ) + quirk_ids: frozenset[str] | Callable = attr.ib( + factory=_get_empty_frozenset, converter=set_or_callable + ) + + @property + def weight(self) -> int: + """Return the weight of the matching rule. + + More specific matches should be preferred over less specific. Quirk class + matching rules have priority over model matching rules + and have a priority over manufacturer matching rules and rules matching a + single model/manufacturer get a better priority over rules matching multiple + models/manufacturers. And any model or manufacturers matching rules get better + priority over rules matching only cluster handlers. + But in case of a cluster handler name/cluster handler id matching, we give rules matching + multiple cluster handlers a better priority over rules matching a single cluster handler. + """ + weight = 0 + if self.quirk_ids: + weight += 501 - (1 if callable(self.quirk_ids) else len(self.quirk_ids)) + + if self.models: + weight += 401 - (1 if callable(self.models) else len(self.models)) + + if self.manufacturers: + weight += 301 - ( + 1 if callable(self.manufacturers) else len(self.manufacturers) + ) + + weight += 10 * len(self.cluster_handler_names) + weight += 5 * len(self.generic_ids) + if isinstance(self.aux_cluster_handlers, frozenset): + weight += 1 * len(self.aux_cluster_handlers) + return weight + + def claim_cluster_handlers( + self, cluster_handlers: list[ClusterHandler] + ) -> list[ClusterHandler]: + """Return a list of cluster handlers this rule matches + aux cluster handlers.""" + claimed = [] + if isinstance(self.cluster_handler_names, frozenset): + claimed.extend( + [ch for ch in cluster_handlers if ch.name in self.cluster_handler_names] + ) + if isinstance(self.generic_ids, frozenset): + claimed.extend( + [ch for ch in cluster_handlers if ch.generic_id in self.generic_ids] + ) + if isinstance(self.aux_cluster_handlers, frozenset): + claimed.extend( + [ch for ch in cluster_handlers if ch.name in self.aux_cluster_handlers] + ) + return claimed + + def strict_matched( + self, + manufacturer: str, + model: str, + cluster_handlers: list, + quirk_id: str | None, + ) -> bool: + """Return True if this device matches the criteria.""" + return all(self._matched(manufacturer, model, cluster_handlers, quirk_id)) + + def loose_matched( + self, + manufacturer: str, + model: str, + cluster_handlers: list, + quirk_id: str | None, + ) -> bool: + """Return True if this device matches the criteria.""" + return any(self._matched(manufacturer, model, cluster_handlers, quirk_id)) + + def _matched( + self, + manufacturer: str, + model: str, + cluster_handlers: list, + quirk_id: str | None, + ) -> list: + """Return a list of field matches.""" + if not any(attr.asdict(self).values()): + return [False] + + matches = [] + if self.cluster_handler_names: + cluster_handler_names = {ch.name for ch in cluster_handlers} + matches.append(self.cluster_handler_names.issubset(cluster_handler_names)) + + if self.generic_ids: + all_generic_ids = {ch.generic_id for ch in cluster_handlers} + matches.append(self.generic_ids.issubset(all_generic_ids)) + + if self.manufacturers: + if callable(self.manufacturers): + matches.append(self.manufacturers(manufacturer)) + else: + matches.append(manufacturer in self.manufacturers) + + if self.models: + if callable(self.models): + matches.append(self.models(model)) + else: + matches.append(model in self.models) + + if self.quirk_ids: + if callable(self.quirk_ids): + matches.append(self.quirk_ids(quirk_id)) + else: + matches.append(quirk_id in self.quirk_ids) + + return matches + + +@dataclasses.dataclass +class EntityClassAndClusterHandlers: + """Container for entity class and corresponding cluster handlers.""" + + entity_class: type[ZhaEntity] + claimed_cluster_handlers: list[ClusterHandler] + + +class ZHAEntityRegistry: + """Cluster handler to ZHA Entity mapping.""" + + def __init__(self) -> None: + """Initialize Registry instance.""" + self._strict_registry: dict[Platform, dict[MatchRule, type[ZhaEntity]]] = ( + collections.defaultdict(dict) + ) + self._multi_entity_registry: dict[ + Platform, dict[int | str | None, dict[MatchRule, list[type[ZhaEntity]]]] + ] = collections.defaultdict( + lambda: collections.defaultdict(lambda: collections.defaultdict(list)) + ) + self._config_diagnostic_entity_registry: dict[ + Platform, dict[int | str | None, dict[MatchRule, list[type[ZhaEntity]]]] + ] = collections.defaultdict( + lambda: collections.defaultdict(lambda: collections.defaultdict(list)) + ) + self._group_registry: dict[str, type[ZhaGroupEntity]] = {} + self.single_device_matches: dict[Platform, dict[EUI64, list[str]]] = ( + collections.defaultdict(lambda: collections.defaultdict(list)) + ) + + def get_entity( + self, + component: Platform, + manufacturer: str, + model: str, + cluster_handlers: list[ClusterHandler], + quirk_id: str | None, + default: type[ZhaEntity] | None = None, + ) -> tuple[type[ZhaEntity] | None, list[ClusterHandler]]: + """Match a ZHA ClusterHandler to a ZHA Entity class.""" + matches = self._strict_registry[component] + for match in sorted(matches, key=WEIGHT_ATTR, reverse=True): + if match.strict_matched(manufacturer, model, cluster_handlers, quirk_id): + claimed = match.claim_cluster_handlers(cluster_handlers) + return self._strict_registry[component][match], claimed + + return default, [] + + def get_multi_entity( + self, + manufacturer: str, + model: str, + cluster_handlers: list[ClusterHandler], + quirk_id: str | None, + ) -> tuple[ + dict[Platform, list[EntityClassAndClusterHandlers]], list[ClusterHandler] + ]: + """Match ZHA cluster handlers to potentially multiple ZHA Entity classes.""" + result: dict[Platform, list[EntityClassAndClusterHandlers]] = ( + collections.defaultdict(list) + ) + all_claimed: set[ClusterHandler] = set() + for component, stop_match_groups in self._multi_entity_registry.items(): + for stop_match_grp, matches in stop_match_groups.items(): + sorted_matches = sorted(matches, key=WEIGHT_ATTR, reverse=True) + for match in sorted_matches: + if match.strict_matched( + manufacturer, model, cluster_handlers, quirk_id + ): + claimed = match.claim_cluster_handlers(cluster_handlers) + for ent_class in stop_match_groups[stop_match_grp][match]: + ent_n_cluster_handlers = EntityClassAndClusterHandlers( + ent_class, claimed + ) + result[component].append(ent_n_cluster_handlers) + all_claimed |= set(claimed) + if stop_match_grp: + break + + return result, list(all_claimed) + + def get_config_diagnostic_entity( + self, + manufacturer: str, + model: str, + cluster_handlers: list[ClusterHandler], + quirk_id: str | None, + ) -> tuple[ + dict[Platform, list[EntityClassAndClusterHandlers]], list[ClusterHandler] + ]: + """Match ZHA cluster handlers to potentially multiple ZHA Entity classes.""" + result: dict[Platform, list[EntityClassAndClusterHandlers]] = ( + collections.defaultdict(list) + ) + all_claimed: set[ClusterHandler] = set() + for ( + component, + stop_match_groups, + ) in self._config_diagnostic_entity_registry.items(): + for stop_match_grp, matches in stop_match_groups.items(): + sorted_matches = sorted(matches, key=WEIGHT_ATTR, reverse=True) + for match in sorted_matches: + if match.strict_matched( + manufacturer, model, cluster_handlers, quirk_id + ): + claimed = match.claim_cluster_handlers(cluster_handlers) + for ent_class in stop_match_groups[stop_match_grp][match]: + ent_n_cluster_handlers = EntityClassAndClusterHandlers( + ent_class, claimed + ) + result[component].append(ent_n_cluster_handlers) + all_claimed |= set(claimed) + if stop_match_grp: + break + + return result, list(all_claimed) + + def get_group_entity(self, component: str) -> type[ZhaGroupEntity] | None: + """Match a ZHA group to a ZHA Entity class.""" + return self._group_registry.get(component) + + def strict_match[_ZhaEntityT: type[ZhaEntity]]( + self, + component: Platform, + cluster_handler_names: set[str] | str | None = None, + generic_ids: set[str] | str | None = None, + manufacturers: Callable | set[str] | str | None = None, + models: Callable | set[str] | str | None = None, + aux_cluster_handlers: Callable | set[str] | str | None = None, + quirk_ids: set[str] | str | None = None, + ) -> Callable[[_ZhaEntityT], _ZhaEntityT]: + """Decorate a strict match rule.""" + + rule = MatchRule( + cluster_handler_names, + generic_ids, + manufacturers, + models, + aux_cluster_handlers, + quirk_ids, + ) + + def decorator(zha_ent: _ZhaEntityT) -> _ZhaEntityT: + """Register a strict match rule. + + All non-empty fields of a match rule must match. + """ + self._strict_registry[component][rule] = zha_ent + return zha_ent + + return decorator + + def multipass_match[_ZhaEntityT: type[ZhaEntity]]( + self, + component: Platform, + cluster_handler_names: set[str] | str | None = None, + generic_ids: set[str] | str | None = None, + manufacturers: Callable | set[str] | str | None = None, + models: Callable | set[str] | str | None = None, + aux_cluster_handlers: Callable | set[str] | str | None = None, + stop_on_match_group: int | str | None = None, + quirk_ids: set[str] | str | None = None, + ) -> Callable[[_ZhaEntityT], _ZhaEntityT]: + """Decorate a loose match rule.""" + + rule = MatchRule( + cluster_handler_names, + generic_ids, + manufacturers, + models, + aux_cluster_handlers, + quirk_ids, + ) + + def decorator(zha_entity: _ZhaEntityT) -> _ZhaEntityT: + """Register a loose match rule. + + All non empty fields of a match rule must match. + """ + # group the rules by cluster handlers + self._multi_entity_registry[component][stop_on_match_group][rule].append( + zha_entity + ) + return zha_entity + + return decorator + + def config_diagnostic_match[_ZhaEntityT: type[ZhaEntity]]( + self, + component: Platform, + cluster_handler_names: set[str] | str | None = None, + generic_ids: set[str] | str | None = None, + manufacturers: Callable | set[str] | str | None = None, + models: Callable | set[str] | str | None = None, + aux_cluster_handlers: Callable | set[str] | str | None = None, + stop_on_match_group: int | str | None = None, + quirk_ids: set[str] | str | None = None, + ) -> Callable[[_ZhaEntityT], _ZhaEntityT]: + """Decorate a loose match rule.""" + + rule = MatchRule( + cluster_handler_names, + generic_ids, + manufacturers, + models, + aux_cluster_handlers, + quirk_ids, + ) + + def decorator(zha_entity: _ZhaEntityT) -> _ZhaEntityT: + """Register a loose match rule. + + All non-empty fields of a match rule must match. + """ + # group the rules by cluster handlers + self._config_diagnostic_entity_registry[component][stop_on_match_group][ + rule + ].append(zha_entity) + return zha_entity + + return decorator + + def group_match[_ZhaGroupEntityT: type[ZhaGroupEntity]]( + self, component: Platform + ) -> Callable[[_ZhaGroupEntityT], _ZhaGroupEntityT]: + """Decorate a group match rule.""" + + def decorator(zha_ent: _ZhaGroupEntityT) -> _ZhaGroupEntityT: + """Register a group match rule.""" + self._group_registry[component] = zha_ent + return zha_ent + + return decorator + + def prevent_entity_creation(self, platform: Platform, ieee: EUI64, key: str): + """Return True if the entity should not be created.""" + platform_restrictions = self.single_device_matches[platform] + device_restrictions = platform_restrictions[ieee] + if key in device_restrictions: + return True + device_restrictions.append(key) + return False + + def clean_up(self) -> None: + """Clean up post discovery.""" + self.single_device_matches = collections.defaultdict( + lambda: collections.defaultdict(list) + ) + + +ZHA_ENTITIES = ZHAEntityRegistry() diff --git a/homeassistant/components/zha/cover.py b/homeassistant/components/zha/cover.py index 0d6be2dbb35..718b6fed3a2 100644 --- a/homeassistant/components/zha/cover.py +++ b/homeassistant/components/zha/cover.py @@ -2,17 +2,16 @@ from __future__ import annotations -from collections.abc import Mapping +import asyncio import functools import logging -from typing import Any +from typing import TYPE_CHECKING, Any, cast -from zha.application.platforms.cover import Shade as ZhaShade -from zha.application.platforms.cover.const import ( - CoverEntityFeature as ZHACoverEntityFeature, -) +from zigpy.zcl.clusters.closures import WindowCovering as WindowCoveringCluster +from zigpy.zcl.foundation import Status from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, CoverDeviceClass, @@ -20,22 +19,41 @@ from homeassistant.components.cover import ( CoverEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, State, callback +from homeassistant.const import ( + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + Platform, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.cluster_handlers.closures import WindowCoveringClusterHandler +from .core.const import ( + CLUSTER_HANDLER_COVER, + CLUSTER_HANDLER_LEVEL, + CLUSTER_HANDLER_ON_OFF, + CLUSTER_HANDLER_SHADE, SIGNAL_ADD_ENTITIES, - EntityData, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - get_zha_data, + SIGNAL_ATTR_UPDATED, + SIGNAL_SET_LEVEL, ) +from .core.helpers import get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity + +if TYPE_CHECKING: + from .core.cluster_handlers import ClusterHandler + from .core.device import ZHADevice _LOGGER = logging.getLogger(__name__) +MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.COVER) + async def async_setup_entry( hass: HomeAssistant, @@ -50,143 +68,421 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, ZhaCover, entities_to_create + discovery.async_add_entities, async_add_entities, entities_to_create ), ) config_entry.async_on_unload(unsub) -class ZhaCover(ZHAEntity, CoverEntity): +WCAttrs = WindowCoveringCluster.AttributeDefs +WCT = WindowCoveringCluster.WindowCoveringType +WCCS = WindowCoveringCluster.ConfigStatus + +ZCL_TO_COVER_DEVICE_CLASS = { + WCT.Awning: CoverDeviceClass.AWNING, + WCT.Drapery: CoverDeviceClass.CURTAIN, + WCT.Projector_screen: CoverDeviceClass.SHADE, + WCT.Rollershade: CoverDeviceClass.SHADE, + WCT.Rollershade_two_motors: CoverDeviceClass.SHADE, + WCT.Rollershade_exterior: CoverDeviceClass.SHADE, + WCT.Rollershade_exterior_two_motors: CoverDeviceClass.SHADE, + WCT.Shutter: CoverDeviceClass.SHUTTER, + WCT.Tilt_blind_tilt_only: CoverDeviceClass.BLIND, + WCT.Tilt_blind_tilt_and_lift: CoverDeviceClass.BLIND, +} + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_COVER) +class ZhaCover(ZhaEntity, CoverEntity): """Representation of a ZHA cover.""" - def __init__(self, entity_data: EntityData) -> None: - """Initialize the ZHA cover.""" - super().__init__(entity_data) + _attr_translation_key: str = "cover" - if self.entity_data.entity.info_object.device_class is not None: - self._attr_device_class = CoverDeviceClass( - self.entity_data.entity.info_object.device_class + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this cover.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_COVER) + assert cluster_handler + self._cover_cluster_handler: WindowCoveringClusterHandler = cast( + WindowCoveringClusterHandler, cluster_handler + ) + if self._cover_cluster_handler.window_covering_type: + self._attr_device_class: CoverDeviceClass | None = ( + ZCL_TO_COVER_DEVICE_CLASS.get( + self._cover_cluster_handler.window_covering_type + ) ) + self._attr_supported_features: CoverEntityFeature = ( + self._determine_supported_features() + ) + self._target_lift_position: int | None = None + self._target_tilt_position: int | None = None + self._determine_initial_state() - features = CoverEntityFeature(0) - zha_features: ZHACoverEntityFeature = self.entity_data.entity.supported_features + def _determine_supported_features(self) -> CoverEntityFeature: + """Determine the supported cover features.""" + supported_features: CoverEntityFeature = ( + CoverEntityFeature.OPEN + | CoverEntityFeature.CLOSE + | CoverEntityFeature.STOP + | CoverEntityFeature.SET_POSITION + ) + if ( + self._cover_cluster_handler.window_covering_type + and self._cover_cluster_handler.window_covering_type + in ( + WCT.Shutter, + WCT.Tilt_blind_tilt_only, + WCT.Tilt_blind_tilt_and_lift, + ) + ): + supported_features |= CoverEntityFeature.SET_TILT_POSITION + supported_features |= CoverEntityFeature.OPEN_TILT + supported_features |= CoverEntityFeature.CLOSE_TILT + supported_features |= CoverEntityFeature.STOP_TILT + return supported_features - if ZHACoverEntityFeature.OPEN in zha_features: - features |= CoverEntityFeature.OPEN - if ZHACoverEntityFeature.CLOSE in zha_features: - features |= CoverEntityFeature.CLOSE - if ZHACoverEntityFeature.SET_POSITION in zha_features: - features |= CoverEntityFeature.SET_POSITION - if ZHACoverEntityFeature.STOP in zha_features: - features |= CoverEntityFeature.STOP - if ZHACoverEntityFeature.OPEN_TILT in zha_features: - features |= CoverEntityFeature.OPEN_TILT - if ZHACoverEntityFeature.CLOSE_TILT in zha_features: - features |= CoverEntityFeature.CLOSE_TILT - if ZHACoverEntityFeature.STOP_TILT in zha_features: - features |= CoverEntityFeature.STOP_TILT - if ZHACoverEntityFeature.SET_TILT_POSITION in zha_features: - features |= CoverEntityFeature.SET_TILT_POSITION + def _determine_initial_state(self) -> None: + """Determine the initial state of the cover.""" + if ( + self._cover_cluster_handler.window_covering_type + and self._cover_cluster_handler.window_covering_type + in ( + WCT.Shutter, + WCT.Tilt_blind_tilt_only, + WCT.Tilt_blind_tilt_and_lift, + ) + ): + self._determine_state( + self.current_cover_tilt_position, is_lift_update=False + ) + if ( + self._cover_cluster_handler.window_covering_type + == WCT.Tilt_blind_tilt_and_lift + ): + state = self._state + self._determine_state(self.current_cover_position) + if state == STATE_OPEN and self._state == STATE_CLOSED: + # let the tilt state override the lift state + self._state = STATE_OPEN + else: + self._determine_state(self.current_cover_position) - self._attr_supported_features = features + def _determine_state(self, position_or_tilt, is_lift_update=True) -> None: + """Determine the state of the cover. - @property - def extra_state_attributes(self) -> Mapping[str, Any] | None: - """Return entity specific state attributes.""" - state = self.entity_data.entity.state - return { - "target_lift_position": state.get("target_lift_position"), - "target_tilt_position": state.get("target_tilt_position"), - } + In HA None is unknown, 0 is closed, 100 is fully open. + In ZCL 0 is fully open, 100 is fully closed. + Keep in mind the values have already been flipped to match HA + in the WindowCovering cluster handler + """ + if is_lift_update: + target = self._target_lift_position + current = self.current_cover_position + else: + target = self._target_tilt_position + current = self.current_cover_tilt_position + + if position_or_tilt == 100: + self._state = STATE_CLOSED + return + if target is not None and target != current: + # we are mid transition and shouldn't update the state + return + self._state = STATE_OPEN + + async def async_added_to_hass(self) -> None: + """Run when the cover entity is about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._cover_cluster_handler, SIGNAL_ATTR_UPDATED, self.zcl_attribute_updated + ) @property def is_closed(self) -> bool | None: - """Return True if the cover is closed.""" - return self.entity_data.entity.is_closed + """Return True if the cover is closed. + + In HA None is unknown, 0 is closed, 100 is fully open. + In ZCL 0 is fully open, 100 is fully closed. + Keep in mind the values have already been flipped to match HA + in the WindowCovering cluster handler + """ + if self.current_cover_position is None: + return None + return self.current_cover_position == 0 @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self.entity_data.entity.is_opening + return self._state == STATE_OPENING @property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self.entity_data.entity.is_closing + return self._state == STATE_CLOSING @property def current_cover_position(self) -> int | None: - """Return the current position of ZHA cover.""" - return self.entity_data.entity.current_cover_position + """Return the current position of ZHA cover. + + In HA None is unknown, 0 is closed, 100 is fully open. + In ZCL 0 is fully open, 100 is fully closed. + Keep in mind the values have already been flipped to match HA + in the WindowCovering cluster handler + """ + return self._cover_cluster_handler.current_position_lift_percentage @property def current_cover_tilt_position(self) -> int | None: """Return the current tilt position of the cover.""" - return self.entity_data.entity.current_cover_tilt_position + return self._cover_cluster_handler.current_position_tilt_percentage - @convert_zha_error_to_ha_error - async def async_open_cover(self, **kwargs: Any) -> None: - """Open the cover.""" - await self.entity_data.entity.async_open_cover() - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_open_cover_tilt(self, **kwargs: Any) -> None: - """Open the cover tilt.""" - await self.entity_data.entity.async_open_cover_tilt() - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_close_cover(self, **kwargs: Any) -> None: - """Close the cover.""" - await self.entity_data.entity.async_close_cover() - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_close_cover_tilt(self, **kwargs: Any) -> None: - """Close the cover tilt.""" - await self.entity_data.entity.async_close_cover_tilt() - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_set_cover_position(self, **kwargs: Any) -> None: - """Move the cover to a specific position.""" - await self.entity_data.entity.async_set_cover_position( - position=kwargs.get(ATTR_POSITION) - ) - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: - """Move the cover tilt to a specific position.""" - await self.entity_data.entity.async_set_cover_tilt_position( - tilt_position=kwargs.get(ATTR_TILT_POSITION) - ) - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_stop_cover(self, **kwargs: Any) -> None: - """Stop the cover.""" - await self.entity_data.entity.async_stop_cover() - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_stop_cover_tilt(self, **kwargs: Any) -> None: - """Stop the cover tilt.""" - await self.entity_data.entity.async_stop_cover_tilt() + @callback + def zcl_attribute_updated(self, attr_id, attr_name, value): + """Handle position update from cluster handler.""" + if attr_id in ( + WCAttrs.current_position_lift_percentage.id, + WCAttrs.current_position_tilt_percentage.id, + ): + value = ( + self.current_cover_position + if attr_id == WCAttrs.current_position_lift_percentage.id + else self.current_cover_tilt_position + ) + self._determine_state( + value, + is_lift_update=attr_id == WCAttrs.current_position_lift_percentage.id, + ) self.async_write_ha_state() @callback - def restore_external_state_attributes(self, state: State) -> None: - """Restore entity state.""" + def async_update_state(self, state): + """Handle state update from HA operations below.""" + _LOGGER.debug("async_update_state=%s", state) + self._state = state + self.async_write_ha_state() - # Shades are a subtype of cover that do not need external state restored - if isinstance(self.entity_data.entity, ZhaShade): - return + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + res = await self._cover_cluster_handler.up_open() + if res[1] is not Status.SUCCESS: + raise HomeAssistantError(f"Failed to open cover: {res[1]}") + self.async_update_state(STATE_OPENING) - # Same as `light`, some entity state is not derived from ZCL attributes - self.entity_data.entity.restore_external_state_attributes( - state=state.state, - target_lift_position=state.attributes.get("target_lift_position"), - target_tilt_position=state.attributes.get("target_tilt_position"), + async def async_open_cover_tilt(self, **kwargs: Any) -> None: + """Open the cover tilt.""" + # 0 is open in ZCL + res = await self._cover_cluster_handler.go_to_tilt_percentage(0) + if res[1] is not Status.SUCCESS: + raise HomeAssistantError(f"Failed to open cover tilt: {res[1]}") + self.async_update_state(STATE_OPENING) + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close the cover.""" + res = await self._cover_cluster_handler.down_close() + if res[1] is not Status.SUCCESS: + raise HomeAssistantError(f"Failed to close cover: {res[1]}") + self.async_update_state(STATE_CLOSING) + + async def async_close_cover_tilt(self, **kwargs: Any) -> None: + """Close the cover tilt.""" + # 100 is closed in ZCL + res = await self._cover_cluster_handler.go_to_tilt_percentage(100) + if res[1] is not Status.SUCCESS: + raise HomeAssistantError(f"Failed to close cover tilt: {res[1]}") + self.async_update_state(STATE_CLOSING) + + async def async_set_cover_position(self, **kwargs: Any) -> None: + """Move the cover to a specific position.""" + self._target_lift_position = kwargs[ATTR_POSITION] + assert self._target_lift_position is not None + assert self.current_cover_position is not None + # the 100 - value is because we need to invert the value before giving it to ZCL + res = await self._cover_cluster_handler.go_to_lift_percentage( + 100 - self._target_lift_position ) + if res[1] is not Status.SUCCESS: + raise HomeAssistantError(f"Failed to set cover position: {res[1]}") + self.async_update_state( + STATE_CLOSING + if self._target_lift_position < self.current_cover_position + else STATE_OPENING + ) + + async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: + """Move the cover tilt to a specific position.""" + self._target_tilt_position = kwargs[ATTR_TILT_POSITION] + assert self._target_tilt_position is not None + assert self.current_cover_tilt_position is not None + # the 100 - value is because we need to invert the value before giving it to ZCL + res = await self._cover_cluster_handler.go_to_tilt_percentage( + 100 - self._target_tilt_position + ) + if res[1] is not Status.SUCCESS: + raise HomeAssistantError(f"Failed to set cover tilt position: {res[1]}") + self.async_update_state( + STATE_CLOSING + if self._target_tilt_position < self.current_cover_tilt_position + else STATE_OPENING + ) + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + res = await self._cover_cluster_handler.stop() + if res[1] is not Status.SUCCESS: + raise HomeAssistantError(f"Failed to stop cover: {res[1]}") + self._target_lift_position = self.current_cover_position + self._determine_state(self.current_cover_position) + self.async_write_ha_state() + + async def async_stop_cover_tilt(self, **kwargs: Any) -> None: + """Stop the cover tilt.""" + res = await self._cover_cluster_handler.stop() + if res[1] is not Status.SUCCESS: + raise HomeAssistantError(f"Failed to stop cover: {res[1]}") + self._target_tilt_position = self.current_cover_tilt_position + self._determine_state(self.current_cover_tilt_position, is_lift_update=False) + self.async_write_ha_state() + + +@MULTI_MATCH( + cluster_handler_names={ + CLUSTER_HANDLER_LEVEL, + CLUSTER_HANDLER_ON_OFF, + CLUSTER_HANDLER_SHADE, + } +) +class Shade(ZhaEntity, CoverEntity): + """ZHA Shade.""" + + _attr_device_class = CoverDeviceClass.SHADE + _attr_translation_key: str = "shade" + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs, + ) -> None: + """Initialize the ZHA light.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._on_off_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_ON_OFF] + self._level_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_LEVEL] + self._position: int | None = None + self._is_open: bool | None = None + + @property + def current_cover_position(self) -> int | None: + """Return current position of cover. + + None is unknown, 0 is closed, 100 is fully open. + """ + return self._position + + @property + def is_closed(self) -> bool | None: + """Return True if shade is closed.""" + if self._is_open is None: + return None + return not self._is_open + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._on_off_cluster_handler, + SIGNAL_ATTR_UPDATED, + self.async_set_open_closed, + ) + self.async_accept_signal( + self._level_cluster_handler, SIGNAL_SET_LEVEL, self.async_set_level + ) + + @callback + def async_restore_last_state(self, last_state): + """Restore previous state.""" + self._is_open = last_state.state == STATE_OPEN + if ATTR_CURRENT_POSITION in last_state.attributes: + self._position = last_state.attributes[ATTR_CURRENT_POSITION] + + @callback + def async_set_open_closed(self, attr_id: int, attr_name: str, value: bool) -> None: + """Set open/closed state.""" + self._is_open = bool(value) + self.async_write_ha_state() + + @callback + def async_set_level(self, value: int) -> None: + """Set the reported position.""" + value = max(0, min(255, value)) + self._position = int(value * 100 / 255) + self.async_write_ha_state() + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the window cover.""" + res = await self._on_off_cluster_handler.on() + if res[1] != Status.SUCCESS: + raise HomeAssistantError(f"Failed to open cover: {res[1]}") + + self._is_open = True + self.async_write_ha_state() + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close the window cover.""" + res = await self._on_off_cluster_handler.off() + if res[1] != Status.SUCCESS: + raise HomeAssistantError(f"Failed to close cover: {res[1]}") + + self._is_open = False + self.async_write_ha_state() + + async def async_set_cover_position(self, **kwargs: Any) -> None: + """Move the roller shutter to a specific position.""" + new_pos = kwargs[ATTR_POSITION] + res = await self._level_cluster_handler.move_to_level_with_on_off( + new_pos * 255 / 100, 1 + ) + + if res[1] != Status.SUCCESS: + raise HomeAssistantError(f"Failed to set cover position: {res[1]}") + + self._position = new_pos + self.async_write_ha_state() + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + res = await self._level_cluster_handler.stop() + if res[1] != Status.SUCCESS: + raise HomeAssistantError(f"Failed to stop cover: {res[1]}") + + +@MULTI_MATCH( + cluster_handler_names={CLUSTER_HANDLER_LEVEL, CLUSTER_HANDLER_ON_OFF}, + manufacturers="Keen Home Inc", +) +class KeenVent(Shade): + """Keen vent cover.""" + + _attr_device_class = CoverDeviceClass.DAMPER + _attr_translation_key: str = "keen_vent" + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + position = self._position or 100 + await asyncio.gather( + self._level_cluster_handler.move_to_level_with_on_off( + position * 255 / 100, 1 + ), + self._on_off_cluster_handler.on(), + ) + + self._is_open = True + self._position = position + self.async_write_ha_state() diff --git a/homeassistant/components/zha/device_action.py b/homeassistant/components/zha/device_action.py index b4b40880734..a0f16d61f41 100644 --- a/homeassistant/components/zha/device_action.py +++ b/homeassistant/components/zha/device_action.py @@ -5,25 +5,20 @@ from __future__ import annotations from typing import Any import voluptuous as vol -from zha.exceptions import ZHAException -from zha.zigbee.cluster_handlers.const import ( - CLUSTER_HANDLER_IAS_WD, - CLUSTER_HANDLER_INOVELLI, -) -from zha.zigbee.cluster_handlers.manufacturerspecific import ( - AllLEDEffectType, - SingleLEDEffectType, -) from homeassistant.components.device_automation import InvalidDeviceAutomationConfig from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_TYPE from homeassistant.core import Context, HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType, TemplateVarsType -from .const import DOMAIN -from .helpers import async_get_zha_device_proxy +from . import DOMAIN +from .core.cluster_handlers.manufacturerspecific import ( + AllLEDEffectType, + SingleLEDEffectType, +) +from .core.const import CLUSTER_HANDLER_IAS_WD, CLUSTER_HANDLER_INOVELLI +from .core.helpers import async_get_zha_device from .websocket_api import SERVICE_WARNING_DEVICE_SQUAWK, SERVICE_WARNING_DEVICE_WARN # mypy: disallow-any-generics @@ -149,7 +144,7 @@ async def async_get_actions( ) -> list[dict[str, str]]: """List device actions.""" try: - zha_device = async_get_zha_device_proxy(hass, device_id).device + zha_device = async_get_zha_device(hass, device_id) except (KeyError, AttributeError): return [] cluster_handlers = [ @@ -186,7 +181,7 @@ async def _execute_service_based_action( action_type = config[CONF_TYPE] service_name = SERVICE_NAMES[action_type] try: - zha_device = async_get_zha_device_proxy(hass, config[CONF_DEVICE_ID]).device + zha_device = async_get_zha_device(hass, config[CONF_DEVICE_ID]) except (KeyError, AttributeError): return @@ -206,7 +201,7 @@ async def _execute_cluster_handler_command_based_action( action_type = config[CONF_TYPE] cluster_handler_name = CLUSTER_HANDLER_MAPPINGS[action_type] try: - zha_device = async_get_zha_device_proxy(hass, config[CONF_DEVICE_ID]).device + zha_device = async_get_zha_device(hass, config[CONF_DEVICE_ID]) except (KeyError, AttributeError): return @@ -229,10 +224,7 @@ async def _execute_cluster_handler_command_based_action( f" {action_type}" ) - try: - await getattr(action_cluster_handler, action_type)(**config) - except ZHAException as err: - raise HomeAssistantError(err) from err + await getattr(action_cluster_handler, action_type)(**config) ZHA_ACTION_TYPES = { diff --git a/homeassistant/components/zha/device_tracker.py b/homeassistant/components/zha/device_tracker.py index fc374f6c44d..9c96fd0e346 100644 --- a/homeassistant/components/zha/device_tracker.py +++ b/homeassistant/components/zha/device_tracker.py @@ -3,21 +3,28 @@ from __future__ import annotations import functools +import time -from homeassistant.components.device_tracker import ScannerEntity +from homeassistant.components.device_tracker import ScannerEntity, SourceType from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.const import ( + CLUSTER_HANDLER_POWER_CONFIGURATION, SIGNAL_ADD_ENTITIES, - async_add_entities as zha_async_add_entities, - get_zha_data, + SIGNAL_ATTR_UPDATED, ) +from .core.helpers import get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity +from .sensor import Battery + +STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.DEVICE_TRACKER) async def async_setup_entry( @@ -33,43 +40,92 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, - async_add_entities, - ZHADeviceScannerEntity, - entities_to_create, + discovery.async_add_entities, async_add_entities, entities_to_create ), ) config_entry.async_on_unload(unsub) -class ZHADeviceScannerEntity(ScannerEntity, ZHAEntity): +@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_POWER_CONFIGURATION) +class ZHADeviceScannerEntity(ScannerEntity, ZhaEntity): """Represent a tracked device.""" _attr_should_poll = True # BaseZhaEntity defaults to False _attr_name: str = "Device scanner" - @property - def is_connected(self) -> bool: - """Return true if the device is connected to the network.""" - return self.entity_data.entity.is_connected + def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): + """Initialize the ZHA device tracker.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._battery_cluster_handler = self.cluster_handlers.get( + CLUSTER_HANDLER_POWER_CONFIGURATION + ) + self._connected = False + self._keepalive_interval = 60 + self._battery_level = None + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + if self._battery_cluster_handler: + self.async_accept_signal( + self._battery_cluster_handler, + SIGNAL_ATTR_UPDATED, + self.async_battery_percentage_remaining_updated, + ) + + async def async_update(self) -> None: + """Handle polling.""" + if self.zha_device.last_seen is None: + self._connected = False + else: + difference = time.time() - self.zha_device.last_seen + if difference > self._keepalive_interval: + self._connected = False + else: + self._connected = True @property - def battery_level(self) -> int | None: + def is_connected(self): + """Return true if the device is connected to the network.""" + return self._connected + + @property + def source_type(self) -> SourceType: + """Return the source type, eg gps or router, of the device.""" + return SourceType.ROUTER + + @callback + def async_battery_percentage_remaining_updated(self, attr_id, attr_name, value): + """Handle tracking.""" + if attr_name != "battery_percentage_remaining": + return + self.debug("battery_percentage_remaining updated: %s", value) + self._connected = True + self._battery_level = Battery.formatter(value) + self.async_write_ha_state() + + @property + def battery_level(self): """Return the battery level of the device. Percentage from 0-100. """ - return self.entity_data.entity.battery_level + return self._battery_level - @property # type: ignore[explicit-override, misc] - def device_info(self) -> DeviceInfo: + @property # type: ignore[misc] + def device_info( + self, + ) -> DeviceInfo: """Return device info.""" # We opt ZHA device tracker back into overriding this method because # it doesn't track IP-based devices. - return ZHAEntity.device_info.__get__(self) + # Call Super because ScannerEntity overrode it. + # mypy doesn't know about fget: https://github.com/python/mypy/issues/6185 + return ZhaEntity.device_info.fget(self) # type: ignore[attr-defined] @property def unique_id(self) -> str: """Return unique ID.""" # Call Super because ScannerEntity overrode it. - return ZHAEntity.unique_id.__get__(self) + # mypy doesn't know about fget: https://github.com/python/mypy/issues/6185 + return ZhaEntity.unique_id.fget(self) # type: ignore[attr-defined] diff --git a/homeassistant/components/zha/device_trigger.py b/homeassistant/components/zha/device_trigger.py index 8e8509e62a5..a2ae734b8fc 100644 --- a/homeassistant/components/zha/device_trigger.py +++ b/homeassistant/components/zha/device_trigger.py @@ -1,10 +1,9 @@ """Provides device automations for ZHA devices that emit events.""" import voluptuous as vol -from zha.application.const import ZHA_EVENT -from homeassistant.components.device_automation import ( - DEVICE_TRIGGER_BASE_SCHEMA, +from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA +from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, ) from homeassistant.components.homeassistant.triggers import event as event_trigger @@ -14,8 +13,9 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN as ZHA_DOMAIN -from .helpers import async_get_zha_device_proxy, get_zha_data +from . import DOMAIN as ZHA_DOMAIN +from .core.const import ZHA_EVENT +from .core.helpers import async_get_zha_device, get_zha_data CONF_SUBTYPE = "subtype" DEVICE = "device" @@ -31,7 +31,7 @@ def _get_device_trigger_data(hass: HomeAssistant, device_id: str) -> tuple[str, # First, try checking to see if the device itself is accessible try: - zha_device = async_get_zha_device_proxy(hass, device_id).device + zha_device = async_get_zha_device(hass, device_id) except ValueError: pass else: diff --git a/homeassistant/components/zha/diagnostics.py b/homeassistant/components/zha/diagnostics.py index 234f10d59ae..fff816777c0 100644 --- a/homeassistant/components/zha/diagnostics.py +++ b/homeassistant/components/zha/diagnostics.py @@ -6,36 +6,31 @@ import dataclasses from importlib.metadata import version from typing import Any -from zha.application.const import ( - ATTR_ATTRIBUTE, +from zigpy.config import CONF_NWK_EXTENDED_PAN_ID +from zigpy.profiles import PROFILES +from zigpy.types import Channels +from zigpy.zcl import Cluster + +from homeassistant.components.diagnostics.util import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ID, CONF_NAME, CONF_UNIQUE_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .core.const import ( + ATTR_ATTRIBUTE_NAME, ATTR_DEVICE_TYPE, ATTR_IEEE, ATTR_IN_CLUSTERS, ATTR_OUT_CLUSTERS, ATTR_PROFILE_ID, ATTR_VALUE, + CONF_ALARM_MASTER_CODE, UNKNOWN, ) -from zha.application.gateway import Gateway -from zha.zigbee.device import Device -from zigpy.config import CONF_NWK_EXTENDED_PAN_ID -from zigpy.profiles import PROFILES -from zigpy.types import Channels -from zigpy.zcl import Cluster - -from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ID, CONF_NAME, CONF_UNIQUE_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from .const import CONF_ALARM_MASTER_CODE -from .helpers import ( - ZHADeviceProxy, - async_get_zha_device_proxy, - get_zha_data, - get_zha_gateway, -) +from .core.device import ZHADevice +from .core.gateway import ZHAGateway +from .core.helpers import async_get_zha_device, get_zha_data, get_zha_gateway KEYS_TO_REDACT = { ATTR_IEEE, @@ -50,15 +45,6 @@ ATTRIBUTES = "attributes" CLUSTER_DETAILS = "cluster_details" UNSUPPORTED_ATTRIBUTES = "unsupported_attributes" -BELLOWS_VERSION = version("bellows") -ZIGPY_VERSION = version("zigpy") -ZIGPY_DECONZ_VERSION = version("zigpy-deconz") -ZIGPY_XBEE_VERSION = version("zigpy-xbee") -ZIGPY_ZNP_VERSION = version("zigpy-znp") -ZIGPY_ZIGATE_VERSION = version("zigpy-zigate") -ZHA_QUIRKS_VERSION = version("zha-quirks") -ZHA_VERSION = version("zha") - def shallow_asdict(obj: Any) -> dict: """Return a shallow copy of a dataclass as a dict.""" @@ -79,7 +65,7 @@ async def async_get_config_entry_diagnostics( ) -> dict[str, Any]: """Return diagnostics for a config entry.""" zha_data = get_zha_data(hass) - gateway: Gateway = get_zha_gateway(hass) + gateway: ZHAGateway = get_zha_gateway(hass) app = gateway.application_controller energy_scan = await app.energy_scan( @@ -95,14 +81,13 @@ async def async_get_config_entry_diagnostics( channel: 100 * energy / 255 for channel, energy in energy_scan.items() }, "versions": { - "bellows": BELLOWS_VERSION, - "zigpy": ZIGPY_VERSION, - "zigpy_deconz": ZIGPY_DECONZ_VERSION, - "zigpy_xbee": ZIGPY_XBEE_VERSION, - "zigpy_znp": ZIGPY_ZNP_VERSION, - "zigpy_zigate": ZIGPY_ZIGATE_VERSION, - "zhaquirks": ZHA_QUIRKS_VERSION, - "zha": ZHA_VERSION, + "bellows": version("bellows"), + "zigpy": version("zigpy"), + "zigpy_deconz": version("zigpy-deconz"), + "zigpy_xbee": version("zigpy-xbee"), + "zigpy_znp": version("zigpy_znp"), + "zigpy_zigate": version("zigpy-zigate"), + "zhaquirks": version("zha-quirks"), }, "devices": [ { @@ -121,15 +106,13 @@ async def async_get_device_diagnostics( hass: HomeAssistant, config_entry: ConfigEntry, device: dr.DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device.""" - zha_device_proxy: ZHADeviceProxy = async_get_zha_device_proxy(hass, device.id) - device_info: dict[str, Any] = zha_device_proxy.zha_device_info - device_info[CLUSTER_DETAILS] = get_endpoint_cluster_attr_data( - zha_device_proxy.device - ) + zha_device: ZHADevice = async_get_zha_device(hass, device.id) + device_info: dict[str, Any] = zha_device.zha_device_info + device_info[CLUSTER_DETAILS] = get_endpoint_cluster_attr_data(zha_device) return async_redact_data(device_info, KEYS_TO_REDACT) -def get_endpoint_cluster_attr_data(zha_device: Device) -> dict: +def get_endpoint_cluster_attr_data(zha_device: ZHADevice) -> dict: """Return endpoint cluster attribute data.""" cluster_details = {} for ep_id, endpoint in zha_device.device.endpoints.items(): @@ -167,15 +150,27 @@ def get_endpoint_cluster_attr_data(zha_device: Device) -> dict: def get_cluster_attr_data(cluster: Cluster) -> dict: """Return cluster attribute data.""" + unsupported_attributes = {} + for u_attr in cluster.unsupported_attributes: + try: + u_attr_def = cluster.find_attribute(u_attr) + unsupported_attributes[f"0x{u_attr_def.id:04x}"] = { + ATTR_ATTRIBUTE_NAME: u_attr_def.name + } + except KeyError: + if isinstance(u_attr, int): + unsupported_attributes[f"0x{u_attr:04x}"] = {} + else: + unsupported_attributes[u_attr] = {} + return { ATTRIBUTES: { f"0x{attr_id:04x}": { - ATTR_ATTRIBUTE: repr(attr_def), - ATTR_VALUE: cluster.get(attr_def.name), + ATTR_ATTRIBUTE_NAME: attr_def.name, + ATTR_VALUE: attr_value, } for attr_id, attr_def in cluster.attributes.items() + if (attr_value := cluster.get(attr_def.name)) is not None }, - UNSUPPORTED_ATTRIBUTES: sorted( - cluster.unsupported_attributes, key=lambda v: (isinstance(v, str), v) - ), + UNSUPPORTED_ATTRIBUTES: unsupported_attributes, } diff --git a/homeassistant/components/zha/entity.py b/homeassistant/components/zha/entity.py index 3e3d0642ca2..f10e377dc46 100644 --- a/homeassistant/components/zha/entity.py +++ b/homeassistant/components/zha/entity.py @@ -4,82 +4,86 @@ from __future__ import annotations import asyncio from collections.abc import Callable -from functools import partial +import functools import logging -from typing import Any +from typing import TYPE_CHECKING, Any, Self -from propcache import cached_property -from zha.mixins import LogMixin +from zigpy.quirks.v2 import EntityMetadata, EntityType -from homeassistant.const import ATTR_MANUFACTURER, ATTR_MODEL, ATTR_NAME, EntityCategory -from homeassistant.core import State, callback +from homeassistant.const import ATTR_NAME, EntityCategory +from homeassistant.core import CALLBACK_TYPE, Event, EventStateChangedData, callback +from homeassistant.helpers import entity +from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE, DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.dispatcher import ( + async_dispatcher_connect, + async_dispatcher_send, +) +from homeassistant.helpers.event import async_track_state_change_event from homeassistant.helpers.restore_state import RestoreEntity -from homeassistant.helpers.typing import UNDEFINED, UndefinedType -from .const import DOMAIN -from .helpers import SIGNAL_REMOVE_ENTITIES, EntityData, convert_zha_error_to_ha_error +from .core.const import ( + ATTR_MANUFACTURER, + ATTR_MODEL, + DOMAIN, + SIGNAL_GROUP_ENTITY_REMOVED, + SIGNAL_GROUP_MEMBERSHIP_CHANGE, + SIGNAL_REMOVE, +) +from .core.helpers import LogMixin, get_zha_gateway + +if TYPE_CHECKING: + from .core.cluster_handlers import ClusterHandler + from .core.device import ZHADevice _LOGGER = logging.getLogger(__name__) +ENTITY_SUFFIX = "entity_suffix" +DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY = 0.5 -class ZHAEntity(LogMixin, RestoreEntity, Entity): - """ZHA eitity.""" + +class BaseZhaEntity(LogMixin, entity.Entity): + """A base class for ZHA entities.""" + + _unique_id_suffix: str | None = None + """suffix to add to the unique_id of the entity. Used for multi + entities using the same cluster handler/cluster id for the entity.""" _attr_has_entity_name = True _attr_should_poll = False - remove_future: asyncio.Future[Any] - def __init__(self, entity_data: EntityData, *args, **kwargs) -> None: + def __init__(self, unique_id: str, zha_device: ZHADevice, **kwargs: Any) -> None: """Init ZHA entity.""" - super().__init__(*args, **kwargs) - self.entity_data: EntityData = entity_data + self._unique_id: str = unique_id + if self._unique_id_suffix: + self._unique_id += f"-{self._unique_id_suffix}" + self._state: Any = None + self._extra_state_attributes: dict[str, Any] = {} + self._zha_device = zha_device self._unsubs: list[Callable[[], None]] = [] - if self.entity_data.entity.icon is not None: - # Only custom quirks will realistically set an icon - self._attr_icon = self.entity_data.entity.icon - - meta = self.entity_data.entity.info_object - self._attr_unique_id = meta.unique_id - - if meta.entity_category is not None: - self._attr_entity_category = EntityCategory(meta.entity_category) - - self._attr_entity_registry_enabled_default = ( - meta.entity_registry_enabled_default - ) - - if meta.translation_key is not None: - self._attr_translation_key = meta.translation_key - - @cached_property - def name(self) -> str | UndefinedType | None: - """Return the name of the entity.""" - meta = self.entity_data.entity.info_object - original_name = super().name - - if original_name not in (UNDEFINED, None) or meta.fallback_name is None: - return original_name - - # This is to allow local development and to register niche devices, since - # their translation_key will probably never be added to `zha/strings.json`. - self._attr_name = meta.fallback_name - return super().name + @property + def unique_id(self) -> str: + """Return a unique ID.""" + return self._unique_id @property - def available(self) -> bool: - """Return entity availability.""" - return self.entity_data.entity.available + def zha_device(self) -> ZHADevice: + """Return the ZHA device this entity is attached to.""" + return self._zha_device + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return device specific state attributes.""" + return self._extra_state_attributes @property def device_info(self) -> DeviceInfo: """Return a device description for device registry.""" - zha_device_info = self.entity_data.device_proxy.device_info + zha_device_info = self._zha_device.device_info ieee = zha_device_info["ieee"] - zha_gateway = self.entity_data.device_proxy.gateway_proxy.gateway + + zha_gateway = get_zha_gateway(self.hass) return DeviceInfo( connections={(CONNECTION_ZIGBEE, ieee)}, @@ -91,67 +95,265 @@ class ZHAEntity(LogMixin, RestoreEntity, Entity): ) @callback - def _handle_entity_events(self, event: Any) -> None: + def async_state_changed(self) -> None: """Entity state changed.""" - self.debug("Handling event from entity: %s", event) self.async_write_ha_state() - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - self.remove_future = self.hass.loop.create_future() - self._unsubs.append( - self.entity_data.entity.on_all_events(self._handle_entity_events) - ) - remove_signal = ( - f"{SIGNAL_REMOVE_ENTITIES}_group_{self.entity_data.group_proxy.group.group_id}" - if self.entity_data.is_group_entity - and self.entity_data.group_proxy is not None - else f"{SIGNAL_REMOVE_ENTITIES}_{self.entity_data.device_proxy.device.ieee}" - ) - self._unsubs.append( - async_dispatcher_connect( - self.hass, - remove_signal, - partial(self.async_remove, force_remove=True), - ) - ) - self.entity_data.device_proxy.gateway_proxy.register_entity_reference( - self.entity_id, - self.entity_data, - self.device_info, - self.remove_future, - ) - - if (state := await self.async_get_last_state()) is None: - return - - self.restore_external_state_attributes(state) + @callback + def async_update_state_attribute(self, key: str, value: Any) -> None: + """Update a single device state attribute.""" + self._extra_state_attributes.update({key: value}) + self.async_write_ha_state() @callback - def restore_external_state_attributes(self, state: State) -> None: - """Restore ephemeral external state from Home Assistant back into ZHA.""" - - # Some operations rely on extra state that is not maintained in the ZCL - # attribute cache. Until ZHA is able to maintain its own persistent state (or - # provides a more generic hook to utilize HA to do this), we directly restore - # them. + def async_set_state(self, attr_id: int, attr_name: str, value: Any) -> None: + """Set the entity state.""" async def async_will_remove_from_hass(self) -> None: """Disconnect entity object when removed.""" for unsub in self._unsubs[:]: unsub() self._unsubs.remove(unsub) - await super().async_will_remove_from_hass() - self.remove_future.set_result(True) - @convert_zha_error_to_ha_error - async def async_update(self) -> None: - """Update the entity.""" - await self.entity_data.entity.async_update() - self.async_write_ha_state() + @callback + def async_accept_signal( + self, + cluster_handler: ClusterHandler | None, + signal: str, + func: Callable[..., Any], + signal_override=False, + ): + """Accept a signal from a cluster handler.""" + unsub = None + if signal_override: + unsub = async_dispatcher_connect(self.hass, signal, func) + else: + assert cluster_handler + unsub = async_dispatcher_connect( + self.hass, f"{cluster_handler.unique_id}_{signal}", func + ) + self._unsubs.append(unsub) def log(self, level: int, msg: str, *args, **kwargs): """Log a message.""" msg = f"%s: {msg}" args = (self.entity_id, *args) _LOGGER.log(level, msg, *args, **kwargs) + + +class ZhaEntity(BaseZhaEntity, RestoreEntity): + """A base class for non group ZHA entities.""" + + remove_future: asyncio.Future[Any] + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init ZHA entity.""" + super().__init__(unique_id, zha_device, **kwargs) + + self.cluster_handlers: dict[str, ClusterHandler] = {} + for cluster_handler in cluster_handlers: + self.cluster_handlers[cluster_handler.name] = cluster_handler + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + Return entity if it is a supported configuration, otherwise return None + """ + return cls(unique_id, zha_device, cluster_handlers, **kwargs) + + def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None: + """Init this entity from the quirks metadata.""" + if entity_metadata.initially_disabled: + self._attr_entity_registry_enabled_default = False + + has_device_class = hasattr(entity_metadata, "device_class") + has_attribute_name = hasattr(entity_metadata, "attribute_name") + has_command_name = hasattr(entity_metadata, "command_name") + if not has_device_class or ( + has_device_class and entity_metadata.device_class is None + ): + if entity_metadata.translation_key: + self._attr_translation_key = entity_metadata.translation_key + elif has_attribute_name: + self._attr_translation_key = entity_metadata.attribute_name + elif has_command_name: + self._attr_translation_key = entity_metadata.command_name + if has_attribute_name: + self._unique_id_suffix = entity_metadata.attribute_name + elif has_command_name: + self._unique_id_suffix = entity_metadata.command_name + if entity_metadata.entity_type is EntityType.CONFIG: + self._attr_entity_category = EntityCategory.CONFIG + elif entity_metadata.entity_type is EntityType.DIAGNOSTIC: + self._attr_entity_category = EntityCategory.DIAGNOSTIC + else: + self._attr_entity_category = None + + @property + def available(self) -> bool: + """Return entity availability.""" + return self._zha_device.available + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + self.remove_future = self.hass.loop.create_future() + self.async_accept_signal( + None, + f"{SIGNAL_REMOVE}_{self.zha_device.ieee}", + functools.partial(self.async_remove, force_remove=True), + signal_override=True, + ) + + if last_state := await self.async_get_last_state(): + self.async_restore_last_state(last_state) + + self.async_accept_signal( + None, + f"{self.zha_device.available_signal}_entity", + self.async_state_changed, + signal_override=True, + ) + self._zha_device.gateway.register_entity_reference( + self._zha_device.ieee, + self.entity_id, + self._zha_device, + self.cluster_handlers, + self.device_info, + self.remove_future, + ) + + async def async_will_remove_from_hass(self) -> None: + """Disconnect entity object when removed.""" + await super().async_will_remove_from_hass() + self.zha_device.gateway.remove_entity_reference(self) + self.remove_future.set_result(True) + + @callback + def async_restore_last_state(self, last_state) -> None: + """Restore previous state.""" + + async def async_update(self) -> None: + """Retrieve latest state.""" + tasks = [ + cluster_handler.async_update() + for cluster_handler in self.cluster_handlers.values() + if hasattr(cluster_handler, "async_update") + ] + if tasks: + await asyncio.gather(*tasks) + + +class ZhaGroupEntity(BaseZhaEntity): + """A base class for ZHA group entities.""" + + # The group name is set in the initializer + _attr_name: str + + def __init__( + self, + entity_ids: list[str], + unique_id: str, + group_id: int, + zha_device: ZHADevice, + **kwargs: Any, + ) -> None: + """Initialize a ZHA group.""" + super().__init__(unique_id, zha_device, **kwargs) + self._available = False + self._group = zha_device.gateway.groups.get(group_id) + self._group_id: int = group_id + self._entity_ids: list[str] = entity_ids + self._async_unsub_state_changed: CALLBACK_TYPE | None = None + self._handled_group_membership = False + self._change_listener_debouncer: Debouncer | None = None + self._update_group_from_child_delay = DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY + + self._attr_name = self._group.name + + @property + def available(self) -> bool: + """Return entity availability.""" + return self._available + + @classmethod + def create_entity( + cls, + entity_ids: list[str], + unique_id: str, + group_id: int, + zha_device: ZHADevice, + **kwargs: Any, + ) -> Self | None: + """Group Entity Factory. + + Return entity if it is a supported configuration, otherwise return None + """ + return cls(entity_ids, unique_id, group_id, zha_device, **kwargs) + + async def _handle_group_membership_changed(self): + """Handle group membership changed.""" + # Make sure we don't call remove twice as members are removed + if self._handled_group_membership: + return + + self._handled_group_membership = True + await self.async_remove(force_remove=True) + if len(self._group.members) >= 2: + async_dispatcher_send( + self.hass, SIGNAL_GROUP_ENTITY_REMOVED, self._group_id + ) + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await super().async_added_to_hass() + await self.async_update() + + self.async_accept_signal( + None, + f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{self._group_id:04x}", + self._handle_group_membership_changed, + signal_override=True, + ) + + if self._change_listener_debouncer is None: + self._change_listener_debouncer = Debouncer( + self.hass, + _LOGGER, + cooldown=self._update_group_from_child_delay, + immediate=False, + function=functools.partial(self.async_update_ha_state, True), + ) + self.async_on_remove(self._change_listener_debouncer.async_cancel) + self._async_unsub_state_changed = async_track_state_change_event( + self.hass, self._entity_ids, self.async_state_changed_listener + ) + + @callback + def async_state_changed_listener(self, event: Event[EventStateChangedData]) -> None: + """Handle child updates.""" + # Delay to ensure that we get updates from all members before updating the group + assert self._change_listener_debouncer + self._change_listener_debouncer.async_schedule_call() + + async def async_will_remove_from_hass(self) -> None: + """Handle removal from Home Assistant.""" + await super().async_will_remove_from_hass() + if self._async_unsub_state_changed is not None: + self._async_unsub_state_changed() + self._async_unsub_state_changed = None + + async def async_update(self) -> None: + """Update the state of the group entity.""" diff --git a/homeassistant/components/zha/fan.py b/homeassistant/components/zha/fan.py index 767c0d4cfb7..3677befb76e 100644 --- a/homeassistant/components/zha/fan.py +++ b/homeassistant/components/zha/fan.py @@ -2,26 +2,54 @@ from __future__ import annotations +from abc import abstractmethod import functools +import math from typing import Any -from zha.application.platforms.fan.const import FanEntityFeature as ZHAFanEntityFeature +from zigpy.zcl.clusters import hvac -from homeassistant.components.fan import FanEntity, FanEntityFeature +from homeassistant.components.fan import ( + ATTR_PERCENTAGE, + ATTR_PRESET_MODE, + FanEntity, + FanEntityFeature, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .entity import ZHAEntity -from .helpers import ( - SIGNAL_ADD_ENTITIES, - EntityData, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - get_zha_data, +from homeassistant.util.percentage import ( + percentage_to_ranged_value, + ranged_value_to_percentage, ) +from homeassistant.util.scaling import int_states_in_range + +from .core import discovery +from .core.cluster_handlers import wrap_zigpy_exceptions +from .core.const import CLUSTER_HANDLER_FAN, SIGNAL_ADD_ENTITIES, SIGNAL_ATTR_UPDATED +from .core.helpers import get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity, ZhaGroupEntity + +# Additional speeds in zigbee's ZCL +# Spec is unclear as to what this value means. On King Of Fans HBUniversal +# receiver, this means Very High. +PRESET_MODE_ON = "on" +# The fan speed is self-regulated +PRESET_MODE_AUTO = "auto" +# When the heated/cooled space is occupied, the fan is always on +PRESET_MODE_SMART = "smart" + +SPEED_RANGE = (1, 3) # off is not included +PRESET_MODES_TO_NAME = {4: PRESET_MODE_ON, 5: PRESET_MODE_AUTO, 6: PRESET_MODE_SMART} + +DEFAULT_ON_PERCENTAGE = 50 + +STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.FAN) +GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, Platform.FAN) +MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.FAN) async def async_setup_entry( @@ -37,65 +65,50 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, ZhaFan, entities_to_create + discovery.async_add_entities, + async_add_entities, + entities_to_create, ), ) config_entry.async_on_unload(unsub) -class ZhaFan(FanEntity, ZHAEntity): - """Representation of a ZHA fan.""" +class BaseFan(FanEntity): + """Base representation of a ZHA fan.""" + _attr_supported_features = FanEntityFeature.SET_SPEED _attr_translation_key: str = "fan" - _enable_turn_on_off_backwards_compatibility = False - - def __init__(self, entity_data: EntityData) -> None: - """Initialize the ZHA fan.""" - super().__init__(entity_data) - features = FanEntityFeature(0) - zha_features: ZHAFanEntityFeature = self.entity_data.entity.supported_features - - if ZHAFanEntityFeature.DIRECTION in zha_features: - features |= FanEntityFeature.DIRECTION - if ZHAFanEntityFeature.OSCILLATE in zha_features: - features |= FanEntityFeature.OSCILLATE - if ZHAFanEntityFeature.PRESET_MODE in zha_features: - features |= FanEntityFeature.PRESET_MODE - if ZHAFanEntityFeature.SET_SPEED in zha_features: - features |= FanEntityFeature.SET_SPEED - if ZHAFanEntityFeature.TURN_ON in zha_features: - features |= FanEntityFeature.TURN_ON - if ZHAFanEntityFeature.TURN_OFF in zha_features: - features |= FanEntityFeature.TURN_OFF - - self._attr_supported_features = features - - @property - def preset_mode(self) -> str | None: - """Return the current preset mode.""" - return self.entity_data.entity.preset_mode @property def preset_modes(self) -> list[str]: """Return the available preset modes.""" - return self.entity_data.entity.preset_modes + return list(self.preset_modes_to_name.values()) + + @property + def preset_modes_to_name(self) -> dict[int, str]: + """Return a dict from preset mode to name.""" + return PRESET_MODES_TO_NAME + + @property + def preset_name_to_mode(self) -> dict[str, int]: + """Return a dict from preset name to mode.""" + return {v: k for k, v in self.preset_modes_to_name.items()} @property def default_on_percentage(self) -> int: """Return the default on percentage.""" - return self.entity_data.entity.default_on_percentage + return DEFAULT_ON_PERCENTAGE @property def speed_range(self) -> tuple[int, int]: """Return the range of speeds the fan supports. Off is not included.""" - return self.entity_data.entity.speed_range + return SPEED_RANGE @property def speed_count(self) -> int: """Return the number of speeds the fan supports.""" - return self.entity_data.entity.speed_count + return int_states_in_range(self.speed_range) - @convert_zha_error_to_ha_error async def async_turn_on( self, percentage: int | None = None, @@ -103,30 +116,201 @@ class ZhaFan(FanEntity, ZHAEntity): **kwargs: Any, ) -> None: """Turn the entity on.""" - await self.entity_data.entity.async_turn_on( - percentage=percentage, preset_mode=preset_mode - ) - self.async_write_ha_state() + if percentage is None: + percentage = self.default_on_percentage + await self.async_set_percentage(percentage) - @convert_zha_error_to_ha_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - await self.entity_data.entity.async_turn_off() - self.async_write_ha_state() + await self.async_set_percentage(0) - @convert_zha_error_to_ha_error async def async_set_percentage(self, percentage: int) -> None: """Set the speed percentage of the fan.""" - await self.entity_data.entity.async_set_percentage(percentage=percentage) - self.async_write_ha_state() + fan_mode = math.ceil(percentage_to_ranged_value(self.speed_range, percentage)) + await self._async_set_fan_mode(fan_mode) - @convert_zha_error_to_ha_error async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode for the fan.""" - await self.entity_data.entity.async_set_preset_mode(preset_mode=preset_mode) - self.async_write_ha_state() + await self._async_set_fan_mode(self.preset_name_to_mode[preset_mode]) + + @abstractmethod + async def _async_set_fan_mode(self, fan_mode: int) -> None: + """Set the fan mode for the fan.""" + + @callback + def async_set_state(self, attr_id, attr_name, value): + """Handle state update from cluster handler.""" + + +@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_FAN) +class ZhaFan(BaseFan, ZhaEntity): + """Representation of a ZHA fan.""" + + def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): + """Init this sensor.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._fan_cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_FAN) + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._fan_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state + ) @property def percentage(self) -> int | None: """Return the current speed percentage.""" - return self.entity_data.entity.percentage + if ( + self._fan_cluster_handler.fan_mode is None + or self._fan_cluster_handler.fan_mode > self.speed_range[1] + ): + return None + if self._fan_cluster_handler.fan_mode == 0: + return 0 + return ranged_value_to_percentage( + self.speed_range, self._fan_cluster_handler.fan_mode + ) + + @property + def preset_mode(self) -> str | None: + """Return the current preset mode.""" + return self.preset_modes_to_name.get(self._fan_cluster_handler.fan_mode) + + @callback + def async_set_state(self, attr_id, attr_name, value): + """Handle state update from cluster handler.""" + self.async_write_ha_state() + + async def _async_set_fan_mode(self, fan_mode: int) -> None: + """Set the fan mode for the fan.""" + await self._fan_cluster_handler.async_set_speed(fan_mode) + self.async_set_state(0, "fan_mode", fan_mode) + + +@GROUP_MATCH() +class FanGroup(BaseFan, ZhaGroupEntity): + """Representation of a fan group.""" + + _attr_translation_key: str = "fan_group" + + def __init__( + self, entity_ids: list[str], unique_id: str, group_id: int, zha_device, **kwargs + ) -> None: + """Initialize a fan group.""" + super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) + self._available: bool = False + group = self.zha_device.gateway.get_group(self._group_id) + self._fan_cluster_handler = group.endpoint[hvac.Fan.cluster_id] + self._percentage = None + self._preset_mode = None + + @property + def percentage(self) -> int | None: + """Return the current speed percentage.""" + return self._percentage + + @property + def preset_mode(self) -> str | None: + """Return the current preset mode.""" + return self._preset_mode + + async def _async_set_fan_mode(self, fan_mode: int) -> None: + """Set the fan mode for the group.""" + + with wrap_zigpy_exceptions(): + await self._fan_cluster_handler.write_attributes({"fan_mode": fan_mode}) + + self.async_set_state(0, "fan_mode", fan_mode) + + async def async_update(self) -> None: + """Attempt to retrieve on off state from the fan.""" + all_states = [self.hass.states.get(x) for x in self._entity_ids] + states: list[State] = list(filter(None, all_states)) + percentage_states: list[State] = [ + state for state in states if state.attributes.get(ATTR_PERCENTAGE) + ] + preset_mode_states: list[State] = [ + state for state in states if state.attributes.get(ATTR_PRESET_MODE) + ] + self._available = any(state.state != STATE_UNAVAILABLE for state in states) + + if percentage_states: + self._percentage = percentage_states[0].attributes[ATTR_PERCENTAGE] + self._preset_mode = None + elif preset_mode_states: + self._preset_mode = preset_mode_states[0].attributes[ATTR_PRESET_MODE] + self._percentage = None + else: + self._percentage = None + self._preset_mode = None + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await self.async_update() + await super().async_added_to_hass() + + +IKEA_SPEED_RANGE = (1, 10) # off is not included +IKEA_PRESET_MODES_TO_NAME = { + 1: PRESET_MODE_AUTO, + 2: "Speed 1", + 3: "Speed 1.5", + 4: "Speed 2", + 5: "Speed 2.5", + 6: "Speed 3", + 7: "Speed 3.5", + 8: "Speed 4", + 9: "Speed 4.5", + 10: "Speed 5", +} + + +@MULTI_MATCH( + cluster_handler_names="ikea_airpurifier", + models={"STARKVIND Air purifier", "STARKVIND Air purifier table"}, +) +class IkeaFan(ZhaFan): + """Representation of an Ikea fan.""" + + def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs) -> None: + """Init this sensor.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._fan_cluster_handler = self.cluster_handlers.get("ikea_airpurifier") + + @property + def preset_modes_to_name(self) -> dict[int, str]: + """Return a dict from preset mode to name.""" + return IKEA_PRESET_MODES_TO_NAME + + @property + def speed_range(self) -> tuple[int, int]: + """Return the range of speeds the fan supports. Off is not included.""" + return IKEA_SPEED_RANGE + + @property + def default_on_percentage(self) -> int: + """Return the default on percentage.""" + return int( + (100 / self.speed_count) * self.preset_name_to_mode[PRESET_MODE_AUTO] + ) + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_FAN, + models={"HBUniversalCFRemote", "HDC52EastwindFan"}, +) +class KofFan(ZhaFan): + """Representation of a fan made by King Of Fans.""" + + _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + + @property + def speed_range(self) -> tuple[int, int]: + """Return the range of speeds the fan supports. Off is not included.""" + return (1, 4) + + @property + def preset_modes_to_name(self) -> dict[int, str]: + """Return a dict from preset mode to name.""" + return {6: PRESET_MODE_SMART} diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py deleted file mode 100644 index 2440e18cf53..00000000000 --- a/homeassistant/components/zha/helpers.py +++ /dev/null @@ -1,1344 +0,0 @@ -"""Helper functions for the ZHA integration.""" - -from __future__ import annotations - -import asyncio -import collections -from collections.abc import Awaitable, Callable, Coroutine, Mapping -import copy -import dataclasses -import enum -import functools -import itertools -import logging -import re -import time -from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Concatenate, NamedTuple, cast -from zoneinfo import ZoneInfo - -import voluptuous as vol -from zha.application.const import ( - ATTR_CLUSTER_ID, - ATTR_DEVICE_IEEE, - ATTR_TYPE, - ATTR_UNIQUE_ID, - CLUSTER_TYPE_IN, - CLUSTER_TYPE_OUT, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, - UNKNOWN_MANUFACTURER, - UNKNOWN_MODEL, - ZHA_CLUSTER_HANDLER_CFG_DONE, - ZHA_CLUSTER_HANDLER_MSG, - ZHA_CLUSTER_HANDLER_MSG_BIND, - ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, - ZHA_CLUSTER_HANDLER_MSG_DATA, - ZHA_EVENT, - ZHA_GW_MSG, - ZHA_GW_MSG_DEVICE_FULL_INIT, - ZHA_GW_MSG_DEVICE_INFO, - ZHA_GW_MSG_DEVICE_JOINED, - ZHA_GW_MSG_DEVICE_REMOVED, - ZHA_GW_MSG_GROUP_ADDED, - ZHA_GW_MSG_GROUP_INFO, - ZHA_GW_MSG_GROUP_MEMBER_ADDED, - ZHA_GW_MSG_GROUP_MEMBER_REMOVED, - ZHA_GW_MSG_GROUP_REMOVED, - ZHA_GW_MSG_RAW_INIT, - RadioType, -) -from zha.application.gateway import ( - ConnectionLostEvent, - DeviceFullInitEvent, - DeviceJoinedEvent, - DeviceLeftEvent, - DeviceRemovedEvent, - Gateway, - GroupEvent, - RawDeviceInitializedEvent, -) -from zha.application.helpers import ( - AlarmControlPanelOptions, - CoordinatorConfiguration, - DeviceOptions, - DeviceOverridesConfiguration, - LightOptions, - QuirksConfiguration, - ZHAConfiguration, - ZHAData, -) -from zha.application.platforms import GroupEntity, PlatformEntity -from zha.event import EventBase -from zha.exceptions import ZHAException -from zha.mixins import LogMixin -from zha.zigbee.cluster_handlers import ClusterBindEvent, ClusterConfigureReportingEvent -from zha.zigbee.device import ClusterHandlerConfigurationComplete, Device, ZHAEvent -from zha.zigbee.group import Group, GroupInfo, GroupMember -from zigpy.config import ( - CONF_DATABASE, - CONF_DEVICE, - CONF_DEVICE_PATH, - CONF_NWK, - CONF_NWK_CHANNEL, -) -import zigpy.exceptions -from zigpy.profiles import PROFILES -import zigpy.types -from zigpy.types import EUI64 -import zigpy.util -import zigpy.zcl -from zigpy.zcl.foundation import CommandSchema - -from homeassistant import __path__ as HOMEASSISTANT_PATH -from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( - is_multiprotocol_url, -) -from homeassistant.components.system_log import LogEntry -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_AREA_ID, - ATTR_DEVICE_ID, - ATTR_ENTITY_ID, - ATTR_MODEL, - ATTR_NAME, - Platform, -) -from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import ( - config_validation as cv, - device_registry as dr, - entity_registry as er, -) -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType - -from .const import ( - ATTR_ACTIVE_COORDINATOR, - ATTR_ATTRIBUTES, - ATTR_AVAILABLE, - ATTR_CLUSTER_NAME, - ATTR_DEVICE_TYPE, - ATTR_ENDPOINT_NAMES, - ATTR_IEEE, - ATTR_LAST_SEEN, - ATTR_LQI, - ATTR_MANUFACTURER, - ATTR_MANUFACTURER_CODE, - ATTR_NEIGHBORS, - ATTR_NWK, - ATTR_POWER_SOURCE, - ATTR_QUIRK_APPLIED, - ATTR_QUIRK_CLASS, - ATTR_QUIRK_ID, - ATTR_ROUTES, - ATTR_RSSI, - ATTR_SIGNATURE, - ATTR_SUCCESS, - CONF_ALARM_ARM_REQUIRES_CODE, - CONF_ALARM_FAILED_TRIES, - CONF_ALARM_MASTER_CODE, - CONF_BAUDRATE, - CONF_CONSIDER_UNAVAILABLE_BATTERY, - CONF_CONSIDER_UNAVAILABLE_MAINS, - CONF_CUSTOM_QUIRKS_PATH, - CONF_DEFAULT_LIGHT_TRANSITION, - CONF_DEVICE_CONFIG, - CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, - CONF_ENABLE_IDENTIFY_ON_JOIN, - CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, - CONF_ENABLE_MAINS_STARTUP_POLLING, - CONF_ENABLE_QUIRKS, - CONF_FLOW_CONTROL, - CONF_GROUP_MEMBERS_ASSUME_STATE, - CONF_RADIO_TYPE, - CONF_ZIGPY, - CUSTOM_CONFIGURATION, - DATA_ZHA, - DEFAULT_DATABASE_NAME, - DEVICE_PAIRING_STATUS, - DOMAIN, - ZHA_ALARM_OPTIONS, - ZHA_OPTIONS, -) - -if TYPE_CHECKING: - from logging import Filter, LogRecord - - from .entity import ZHAEntity - from .update import ZHAFirmwareUpdateCoordinator - - type _LogFilterType = Filter | Callable[[LogRecord], bool] - -_LOGGER = logging.getLogger(__name__) - -DEBUG_COMP_BELLOWS = "bellows" -DEBUG_COMP_ZHA = "homeassistant.components.zha" -DEBUG_LIB_ZHA = "zha" -DEBUG_COMP_ZIGPY = "zigpy" -DEBUG_COMP_ZIGPY_ZNP = "zigpy_znp" -DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz" -DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee" -DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate" -DEBUG_LEVEL_CURRENT = "current" -DEBUG_LEVEL_ORIGINAL = "original" -DEBUG_LEVELS = { - DEBUG_COMP_BELLOWS: logging.DEBUG, - DEBUG_COMP_ZHA: logging.DEBUG, - DEBUG_COMP_ZIGPY: logging.DEBUG, - DEBUG_COMP_ZIGPY_ZNP: logging.DEBUG, - DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG, - DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG, - DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG, - DEBUG_LIB_ZHA: logging.DEBUG, -} -DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY, DEBUG_LIB_ZHA] -ZHA_GW_MSG_LOG_ENTRY = "log_entry" -ZHA_GW_MSG_LOG_OUTPUT = "log_output" -SIGNAL_REMOVE_ENTITIES = "zha_remove_entities" -GROUP_ENTITY_DOMAINS = [Platform.LIGHT, Platform.SWITCH, Platform.FAN] -SIGNAL_ADD_ENTITIES = "zha_add_entities" -ENTITIES = "entities" - -RX_ON_WHEN_IDLE = "rx_on_when_idle" -RELATIONSHIP = "relationship" -EXTENDED_PAN_ID = "extended_pan_id" -PERMIT_JOINING = "permit_joining" -DEPTH = "depth" - -DEST_NWK = "dest_nwk" -ROUTE_STATUS = "route_status" -MEMORY_CONSTRAINED = "memory_constrained" -MANY_TO_ONE = "many_to_one" -ROUTE_RECORD_REQUIRED = "route_record_required" -NEXT_HOP = "next_hop" - -USER_GIVEN_NAME = "user_given_name" -DEVICE_REG_ID = "device_reg_id" - - -class GroupEntityReference(NamedTuple): - """Reference to a group entity.""" - - name: str | None - original_name: str | None - entity_id: str - - -class ZHAGroupProxy(LogMixin): - """Proxy class to interact with the ZHA group instances.""" - - def __init__(self, group: Group, gateway_proxy: ZHAGatewayProxy) -> None: - """Initialize the gateway proxy.""" - self.group: Group = group - self.gateway_proxy: ZHAGatewayProxy = gateway_proxy - - @property - def group_info(self) -> dict[str, Any]: - """Return a group description for group.""" - return { - "name": self.group.name, - "group_id": self.group.group_id, - "members": [ - { - "endpoint_id": member.endpoint_id, - "device": self.gateway_proxy.device_proxies[ - member.device.ieee - ].zha_device_info, - "entities": [e._asdict() for e in self.associated_entities(member)], - } - for member in self.group.members - ], - } - - def associated_entities(self, member: GroupMember) -> list[GroupEntityReference]: - """Return the list of entities that were derived from this endpoint.""" - entity_registry = er.async_get(self.gateway_proxy.hass) - entity_refs: collections.defaultdict[EUI64, list[EntityReference]] = ( - self.gateway_proxy.ha_entity_refs - ) - - entity_info = [] - - for entity_ref in entity_refs.get(member.device.ieee): # type: ignore[union-attr] - if not entity_ref.entity_data.is_group_entity: - continue - entity = entity_registry.async_get(entity_ref.ha_entity_id) - - if ( - entity is None - or entity_ref.entity_data.group_proxy is None - or entity_ref.entity_data.group_proxy.group.group_id - != member.group.group_id - ): - continue - - entity_info.append( - GroupEntityReference( - name=entity.name, - original_name=entity.original_name, - entity_id=entity_ref.ha_entity_id, - ) - ) - - return entity_info - - def log(self, level: int, msg: str, *args: Any, **kwargs) -> None: - """Log a message.""" - msg = f"[%s](%s): {msg}" - args = ( - f"0x{self.group.group_id:04x}", - self.group.endpoint.endpoint_id, - *args, - ) - _LOGGER.log(level, msg, *args, **kwargs) - - -class ZHADeviceProxy(EventBase): - """Proxy class to interact with the ZHA device instances.""" - - _ha_device_id: str - - def __init__(self, device: Device, gateway_proxy: ZHAGatewayProxy) -> None: - """Initialize the gateway proxy.""" - super().__init__() - self.device = device - self.gateway_proxy = gateway_proxy - self._unsubs: list[Callable[[], None]] = [] - self._unsubs.append(self.device.on_all_events(self._handle_event_protocol)) - - @property - def device_id(self) -> str: - """Return the HA device registry device id.""" - return self._ha_device_id - - @device_id.setter - def device_id(self, device_id: str) -> None: - """Set the HA device registry device id.""" - self._ha_device_id = device_id - - @property - def device_info(self) -> dict[str, Any]: - """Return a device description for device.""" - ieee = str(self.device.ieee) - time_struct = time.localtime(self.device.last_seen) - update_time = time.strftime("%Y-%m-%dT%H:%M:%S", time_struct) - return { - ATTR_IEEE: ieee, - ATTR_NWK: self.device.nwk, - ATTR_MANUFACTURER: self.device.manufacturer, - ATTR_MODEL: self.device.model, - ATTR_NAME: self.device.name or ieee, - ATTR_QUIRK_APPLIED: self.device.quirk_applied, - ATTR_QUIRK_CLASS: self.device.quirk_class, - ATTR_QUIRK_ID: self.device.quirk_id, - ATTR_MANUFACTURER_CODE: self.device.manufacturer_code, - ATTR_POWER_SOURCE: self.device.power_source, - ATTR_LQI: self.device.lqi, - ATTR_RSSI: self.device.rssi, - ATTR_LAST_SEEN: update_time, - ATTR_AVAILABLE: self.device.available, - ATTR_DEVICE_TYPE: self.device.device_type, - ATTR_SIGNATURE: self.device.zigbee_signature, - } - - @property - def zha_device_info(self) -> dict[str, Any]: - """Get ZHA device information.""" - device_info: dict[str, Any] = {} - device_info.update(self.device_info) - device_info[ATTR_ACTIVE_COORDINATOR] = self.device.is_active_coordinator - device_info[ENTITIES] = [ - { - ATTR_ENTITY_ID: entity_ref.ha_entity_id, - ATTR_NAME: entity_ref.ha_device_info[ATTR_NAME], - } - for entity_ref in self.gateway_proxy.ha_entity_refs[self.device.ieee] - ] - - topology = self.gateway_proxy.gateway.application_controller.topology - device_info[ATTR_NEIGHBORS] = [ - { - ATTR_DEVICE_TYPE: neighbor.device_type.name, - RX_ON_WHEN_IDLE: neighbor.rx_on_when_idle.name, - RELATIONSHIP: neighbor.relationship.name, - EXTENDED_PAN_ID: str(neighbor.extended_pan_id), - ATTR_IEEE: str(neighbor.ieee), - ATTR_NWK: str(neighbor.nwk), - PERMIT_JOINING: neighbor.permit_joining.name, - DEPTH: str(neighbor.depth), - ATTR_LQI: str(neighbor.lqi), - } - for neighbor in topology.neighbors[self.device.ieee] - ] - - device_info[ATTR_ROUTES] = [ - { - DEST_NWK: str(route.DstNWK), - ROUTE_STATUS: str(route.RouteStatus.name), - MEMORY_CONSTRAINED: bool(route.MemoryConstrained), - MANY_TO_ONE: bool(route.ManyToOne), - ROUTE_RECORD_REQUIRED: bool(route.RouteRecordRequired), - NEXT_HOP: str(route.NextHop), - } - for route in topology.routes[self.device.ieee] - ] - - # Return endpoint device type Names - names: list[dict[str, str]] = [] - for endpoint in ( - ep for epid, ep in self.device.device.endpoints.items() if epid - ): - profile = PROFILES.get(endpoint.profile_id) - if profile and endpoint.device_type is not None: - # DeviceType provides undefined enums - names.append({ATTR_NAME: profile.DeviceType(endpoint.device_type).name}) - else: - names.append( - { - ATTR_NAME: ( - f"unknown {endpoint.device_type} device_type " - f"of 0x{(endpoint.profile_id or 0xFFFF):04x} profile id" - ) - } - ) - device_info[ATTR_ENDPOINT_NAMES] = names - - device_registry = dr.async_get(self.gateway_proxy.hass) - reg_device = device_registry.async_get(self.device_id) - if reg_device is not None: - device_info[USER_GIVEN_NAME] = reg_device.name_by_user - device_info[DEVICE_REG_ID] = reg_device.id - device_info[ATTR_AREA_ID] = reg_device.area_id - return device_info - - @callback - def handle_zha_event(self, zha_event: ZHAEvent) -> None: - """Handle a ZHA event.""" - self.gateway_proxy.hass.bus.async_fire( - ZHA_EVENT, - { - ATTR_DEVICE_IEEE: str(zha_event.device_ieee), - ATTR_UNIQUE_ID: zha_event.unique_id, - ATTR_DEVICE_ID: self.device_id, - **zha_event.data, - }, - ) - - @callback - def handle_zha_channel_configure_reporting( - self, event: ClusterConfigureReportingEvent - ) -> None: - """Handle a ZHA cluster configure reporting event.""" - async_dispatcher_send( - self.gateway_proxy.hass, - ZHA_CLUSTER_HANDLER_MSG, - { - ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, - ZHA_CLUSTER_HANDLER_MSG_DATA: { - ATTR_CLUSTER_NAME: event.cluster_name, - ATTR_CLUSTER_ID: event.cluster_id, - ATTR_ATTRIBUTES: event.attributes, - }, - }, - ) - - @callback - def handle_zha_channel_cfg_done( - self, event: ClusterHandlerConfigurationComplete - ) -> None: - """Handle a ZHA cluster configure reporting event.""" - async_dispatcher_send( - self.gateway_proxy.hass, - ZHA_CLUSTER_HANDLER_MSG, - { - ATTR_TYPE: ZHA_CLUSTER_HANDLER_CFG_DONE, - }, - ) - - @callback - def handle_zha_channel_bind(self, event: ClusterBindEvent) -> None: - """Handle a ZHA cluster bind event.""" - async_dispatcher_send( - self.gateway_proxy.hass, - ZHA_CLUSTER_HANDLER_MSG, - { - ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_BIND, - ZHA_CLUSTER_HANDLER_MSG_DATA: { - ATTR_CLUSTER_NAME: event.cluster_name, - ATTR_CLUSTER_ID: event.cluster_id, - ATTR_SUCCESS: event.success, - }, - }, - ) - - -class EntityReference(NamedTuple): - """Describes an entity reference.""" - - ha_entity_id: str - entity_data: EntityData - ha_device_info: dr.DeviceInfo - remove_future: asyncio.Future[Any] - - -class ZHAGatewayProxy(EventBase): - """Proxy class to interact with the ZHA gateway.""" - - def __init__( - self, hass: HomeAssistant, config_entry: ConfigEntry, gateway: Gateway - ) -> None: - """Initialize the gateway proxy.""" - super().__init__() - self.hass = hass - self.config_entry = config_entry - self.gateway = gateway - self.device_proxies: dict[EUI64, ZHADeviceProxy] = {} - self.group_proxies: dict[int, ZHAGroupProxy] = {} - self._ha_entity_refs: collections.defaultdict[EUI64, list[EntityReference]] = ( - collections.defaultdict(list) - ) - self._log_levels: dict[str, dict[str, int]] = { - DEBUG_LEVEL_ORIGINAL: async_capture_log_levels(), - DEBUG_LEVEL_CURRENT: async_capture_log_levels(), - } - self.debug_enabled: bool = False - self._log_relay_handler: LogRelayHandler = LogRelayHandler(hass, self) - self._unsubs: list[Callable[[], None]] = [] - self._unsubs.append(self.gateway.on_all_events(self._handle_event_protocol)) - self._reload_task: asyncio.Task | None = None - config_entry.async_on_unload( - self.hass.bus.async_listen( - er.EVENT_ENTITY_REGISTRY_UPDATED, - self._handle_entity_registry_updated, - ) - ) - - @property - def ha_entity_refs(self) -> collections.defaultdict[EUI64, list[EntityReference]]: - """Return entities by ieee.""" - return self._ha_entity_refs - - def register_entity_reference( - self, - ha_entity_id: str, - entity_data: EntityData, - ha_device_info: dr.DeviceInfo, - remove_future: asyncio.Future[Any], - ) -> None: - """Record the creation of a hass entity associated with ieee.""" - self._ha_entity_refs[entity_data.device_proxy.device.ieee].append( - EntityReference( - ha_entity_id=ha_entity_id, - entity_data=entity_data, - ha_device_info=ha_device_info, - remove_future=remove_future, - ) - ) - - async def _handle_entity_registry_updated( - self, event: Event[er.EventEntityRegistryUpdatedData] - ) -> None: - """Handle when entity registry updated.""" - entity_id = event.data["entity_id"] - entity_entry: er.RegistryEntry | None = er.async_get(self.hass).async_get( - entity_id - ) - if ( - entity_entry is None - or entity_entry.config_entry_id != self.config_entry.entry_id - or entity_entry.device_id is None - ): - return - device_entry: dr.DeviceEntry | None = dr.async_get(self.hass).async_get( - entity_entry.device_id - ) - assert device_entry - - ieee_address = next( - identifier - for domain, identifier in device_entry.identifiers - if domain == DOMAIN - ) - assert ieee_address - - ieee = EUI64.convert(ieee_address) - - assert ieee in self.device_proxies - - zha_device_proxy = self.device_proxies[ieee] - entity_key = (entity_entry.domain, entity_entry.unique_id) - if entity_key not in zha_device_proxy.device.platform_entities: - return - platform_entity = zha_device_proxy.device.platform_entities[entity_key] - if entity_entry.disabled: - platform_entity.disable() - else: - platform_entity.enable() - - async def async_initialize_devices_and_entities(self) -> None: - """Initialize devices and entities.""" - for device in self.gateway.devices.values(): - device_proxy = self._async_get_or_create_device_proxy(device) - self._create_entity_metadata(device_proxy) - for group in self.gateway.groups.values(): - group_proxy = self._async_get_or_create_group_proxy(group) - self._create_entity_metadata(group_proxy) - - await self.gateway.async_initialize_devices_and_entities() - - @callback - def handle_connection_lost(self, event: ConnectionLostEvent) -> None: - """Handle a connection lost event.""" - - _LOGGER.debug("Connection to the radio was lost: %r", event) - - # Ensure we do not queue up multiple resets - if self._reload_task is not None: - _LOGGER.debug("Ignoring reset, one is already running") - return - - self._reload_task = self.hass.async_create_task( - self.hass.config_entries.async_reload(self.config_entry.entry_id), - ) - - @callback - def handle_device_joined(self, event: DeviceJoinedEvent) -> None: - """Handle a device joined event.""" - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_JOINED, - ZHA_GW_MSG_DEVICE_INFO: { - ATTR_NWK: event.device_info.nwk, - ATTR_IEEE: str(event.device_info.ieee), - DEVICE_PAIRING_STATUS: event.device_info.pairing_status.name, - }, - }, - ) - - @callback - def handle_device_removed(self, event: DeviceRemovedEvent) -> None: - """Handle a device removed event.""" - zha_device_proxy = self.device_proxies.pop(event.device_info.ieee, None) - entity_refs = self._ha_entity_refs.pop(event.device_info.ieee, None) - if zha_device_proxy is not None: - device_info = zha_device_proxy.zha_device_info - # zha_device_proxy.async_cleanup_handles() - async_dispatcher_send( - self.hass, - f"{SIGNAL_REMOVE_ENTITIES}_{zha_device_proxy.device.ieee!s}", - ) - self.hass.async_create_task( - self._async_remove_device(zha_device_proxy, entity_refs), - "ZHAGateway._async_remove_device", - ) - if device_info is not None: - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_REMOVED, - ZHA_GW_MSG_DEVICE_INFO: device_info, - }, - ) - - @callback - def handle_device_left(self, event: DeviceLeftEvent) -> None: - """Handle a device left event.""" - - @callback - def handle_raw_device_initialized(self, event: RawDeviceInitializedEvent) -> None: - """Handle a raw device initialized event.""" - manuf = event.device_info.manufacturer - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_RAW_INIT, - ZHA_GW_MSG_DEVICE_INFO: { - ATTR_NWK: str(event.device_info.nwk), - ATTR_IEEE: str(event.device_info.ieee), - DEVICE_PAIRING_STATUS: event.device_info.pairing_status.name, - ATTR_MODEL: ( - event.device_info.model - if event.device_info.model - else UNKNOWN_MODEL - ), - ATTR_MANUFACTURER: manuf if manuf else UNKNOWN_MANUFACTURER, - ATTR_SIGNATURE: event.device_info.signature, - }, - }, - ) - - @callback - def handle_device_fully_initialized(self, event: DeviceFullInitEvent) -> None: - """Handle a device fully initialized event.""" - zha_device = self.gateway.get_device(event.device_info.ieee) - zha_device_proxy = self._async_get_or_create_device_proxy(zha_device) - - device_info = zha_device_proxy.zha_device_info - device_info[DEVICE_PAIRING_STATUS] = event.device_info.pairing_status.name - if event.new_join: - self._create_entity_metadata(zha_device_proxy) - async_dispatcher_send(self.hass, SIGNAL_ADD_ENTITIES) - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, - ZHA_GW_MSG_DEVICE_INFO: device_info, - }, - ) - - @callback - def handle_group_member_removed(self, event: GroupEvent) -> None: - """Handle a group member removed event.""" - zha_group_proxy = self._async_get_or_create_group_proxy(event.group_info) - zha_group_proxy.info("group_member_removed - group_info: %s", event.group_info) - self._update_group_entities(event) - self._send_group_gateway_message( - zha_group_proxy, ZHA_GW_MSG_GROUP_MEMBER_REMOVED - ) - - @callback - def handle_group_member_added(self, event: GroupEvent) -> None: - """Handle a group member added event.""" - zha_group_proxy = self._async_get_or_create_group_proxy(event.group_info) - zha_group_proxy.info("group_member_added - group_info: %s", event.group_info) - self._send_group_gateway_message(zha_group_proxy, ZHA_GW_MSG_GROUP_MEMBER_ADDED) - self._update_group_entities(event) - - @callback - def handle_group_added(self, event: GroupEvent) -> None: - """Handle a group added event.""" - zha_group_proxy = self._async_get_or_create_group_proxy(event.group_info) - zha_group_proxy.info("group_added") - self._update_group_entities(event) - self._send_group_gateway_message(zha_group_proxy, ZHA_GW_MSG_GROUP_ADDED) - - @callback - def handle_group_removed(self, event: GroupEvent) -> None: - """Handle a group removed event.""" - zha_group_proxy = self.group_proxies.pop(event.group_info.group_id) - self._send_group_gateway_message(zha_group_proxy, ZHA_GW_MSG_GROUP_REMOVED) - zha_group_proxy.info("group_removed") - self._cleanup_group_entity_registry_entries(zha_group_proxy) - - @callback - def async_enable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: - """Enable debug mode for ZHA.""" - self._log_levels[DEBUG_LEVEL_ORIGINAL] = async_capture_log_levels() - async_set_logger_levels(DEBUG_LEVELS) - self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() - - if filterer: - self._log_relay_handler.addFilter(filterer) - - for logger_name in DEBUG_RELAY_LOGGERS: - logging.getLogger(logger_name).addHandler(self._log_relay_handler) - - self.debug_enabled = True - - @callback - def async_disable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: - """Disable debug mode for ZHA.""" - async_set_logger_levels(self._log_levels[DEBUG_LEVEL_ORIGINAL]) - self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() - for logger_name in DEBUG_RELAY_LOGGERS: - logging.getLogger(logger_name).removeHandler(self._log_relay_handler) - if filterer: - self._log_relay_handler.removeFilter(filterer) - self.debug_enabled = False - - async def shutdown(self) -> None: - """Shutdown the gateway proxy.""" - for unsub in self._unsubs: - unsub() - await self.gateway.shutdown() - - def get_device_proxy(self, ieee: EUI64) -> ZHADeviceProxy | None: - """Return ZHADevice for given ieee.""" - return self.device_proxies.get(ieee) - - def get_group_proxy(self, group_id: int | str) -> ZHAGroupProxy | None: - """Return Group for given group id.""" - if isinstance(group_id, str): - for group_proxy in self.group_proxies.values(): - if group_proxy.group.name == group_id: - return group_proxy - return None - return self.group_proxies.get(group_id) - - def get_entity_reference(self, entity_id: str) -> EntityReference | None: - """Return entity reference for given entity_id if found.""" - for entity_reference in itertools.chain.from_iterable( - self.ha_entity_refs.values() - ): - if entity_id == entity_reference.ha_entity_id: - return entity_reference - return None - - def remove_entity_reference(self, entity: ZHAEntity) -> None: - """Remove entity reference for given entity_id if found.""" - if entity.zha_device.ieee in self.ha_entity_refs: - entity_refs = self.ha_entity_refs.get(entity.zha_device.ieee) - self.ha_entity_refs[entity.zha_device.ieee] = [ - e - for e in entity_refs # type: ignore[union-attr] - if e.ha_entity_id != entity.entity_id - ] - - def _async_get_or_create_device_proxy(self, zha_device: Device) -> ZHADeviceProxy: - """Get or create a ZHA device.""" - if (zha_device_proxy := self.device_proxies.get(zha_device.ieee)) is None: - zha_device_proxy = ZHADeviceProxy(zha_device, self) - self.device_proxies[zha_device_proxy.device.ieee] = zha_device_proxy - - device_registry = dr.async_get(self.hass) - device_registry_device = device_registry.async_get_or_create( - config_entry_id=self.config_entry.entry_id, - connections={(dr.CONNECTION_ZIGBEE, str(zha_device.ieee))}, - identifiers={(DOMAIN, str(zha_device.ieee))}, - name=zha_device.name, - manufacturer=zha_device.manufacturer, - model=zha_device.model, - ) - zha_device_proxy.device_id = device_registry_device.id - return zha_device_proxy - - def _async_get_or_create_group_proxy(self, group_info: GroupInfo) -> ZHAGroupProxy: - """Get or create a ZHA group.""" - zha_group_proxy = self.group_proxies.get(group_info.group_id) - if zha_group_proxy is None: - zha_group_proxy = ZHAGroupProxy( - self.gateway.groups[group_info.group_id], self - ) - self.group_proxies[group_info.group_id] = zha_group_proxy - return zha_group_proxy - - def _create_entity_metadata( - self, proxy_object: ZHADeviceProxy | ZHAGroupProxy - ) -> None: - """Create HA entity metadata.""" - ha_zha_data = get_zha_data(self.hass) - coordinator_proxy = self.device_proxies[ - self.gateway.coordinator_zha_device.ieee - ] - - if isinstance(proxy_object, ZHADeviceProxy): - for entity in proxy_object.device.platform_entities.values(): - ha_zha_data.platforms[Platform(entity.PLATFORM)].append( - EntityData( - entity=entity, device_proxy=proxy_object, group_proxy=None - ) - ) - else: - for entity in proxy_object.group.group_entities.values(): - ha_zha_data.platforms[Platform(entity.PLATFORM)].append( - EntityData( - entity=entity, - device_proxy=coordinator_proxy, - group_proxy=proxy_object, - ) - ) - - def _cleanup_group_entity_registry_entries( - self, zha_group_proxy: ZHAGroupProxy - ) -> None: - """Remove entity registry entries for group entities when the groups are removed from HA.""" - # first we collect the potential unique ids for entities that could be created from this group - possible_entity_unique_ids = [ - f"{domain}_zha_group_0x{zha_group_proxy.group.group_id:04x}" - for domain in GROUP_ENTITY_DOMAINS - ] - - # then we get all group entity entries tied to the coordinator - entity_registry = er.async_get(self.hass) - assert self.gateway.coordinator_zha_device - coordinator_proxy = self.device_proxies[ - self.gateway.coordinator_zha_device.ieee - ] - all_group_entity_entries = er.async_entries_for_device( - entity_registry, - coordinator_proxy.device_id, - include_disabled_entities=True, - ) - - # then we get the entity entries for this specific group - # by getting the entries that match - entries_to_remove = [ - entry - for entry in all_group_entity_entries - if entry.unique_id in possible_entity_unique_ids - ] - - # then we remove the entries from the entity registry - for entry in entries_to_remove: - _LOGGER.debug( - "cleaning up entity registry entry for entity: %s", entry.entity_id - ) - entity_registry.async_remove(entry.entity_id) - - def _update_group_entities(self, group_event: GroupEvent) -> None: - """Update group entities when a group event is received.""" - async_dispatcher_send( - self.hass, - f"{SIGNAL_REMOVE_ENTITIES}_group_{group_event.group_info.group_id}", - ) - self._create_entity_metadata( - self.group_proxies[group_event.group_info.group_id] - ) - async_dispatcher_send(self.hass, SIGNAL_ADD_ENTITIES) - - def _send_group_gateway_message( - self, zha_group_proxy: ZHAGroupProxy, gateway_message_type: str - ) -> None: - """Send the gateway event for a zigpy group event.""" - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: gateway_message_type, - ZHA_GW_MSG_GROUP_INFO: zha_group_proxy.group_info, - }, - ) - - async def _async_remove_device( - self, device: ZHADeviceProxy, entity_refs: list[EntityReference] | None - ) -> None: - if entity_refs is not None: - remove_tasks: list[asyncio.Future[Any]] = [ - entity_ref.remove_future for entity_ref in entity_refs - ] - if remove_tasks: - await asyncio.wait(remove_tasks) - - device_registry = dr.async_get(self.hass) - reg_device = device_registry.async_get(device.device_id) - if reg_device is not None: - device_registry.async_remove_device(reg_device.id) - - -@callback -def async_capture_log_levels() -> dict[str, int]: - """Capture current logger levels for ZHA.""" - return { - DEBUG_COMP_BELLOWS: logging.getLogger(DEBUG_COMP_BELLOWS).getEffectiveLevel(), - DEBUG_COMP_ZHA: logging.getLogger(DEBUG_COMP_ZHA).getEffectiveLevel(), - DEBUG_COMP_ZIGPY: logging.getLogger(DEBUG_COMP_ZIGPY).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_ZNP: logging.getLogger( - DEBUG_COMP_ZIGPY_ZNP - ).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_DECONZ: logging.getLogger( - DEBUG_COMP_ZIGPY_DECONZ - ).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_XBEE: logging.getLogger( - DEBUG_COMP_ZIGPY_XBEE - ).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_ZIGATE: logging.getLogger( - DEBUG_COMP_ZIGPY_ZIGATE - ).getEffectiveLevel(), - DEBUG_LIB_ZHA: logging.getLogger(DEBUG_LIB_ZHA).getEffectiveLevel(), - } - - -@callback -def async_set_logger_levels(levels: dict[str, int]) -> None: - """Set logger levels for ZHA.""" - logging.getLogger(DEBUG_COMP_BELLOWS).setLevel(levels[DEBUG_COMP_BELLOWS]) - logging.getLogger(DEBUG_COMP_ZHA).setLevel(levels[DEBUG_COMP_ZHA]) - logging.getLogger(DEBUG_COMP_ZIGPY).setLevel(levels[DEBUG_COMP_ZIGPY]) - logging.getLogger(DEBUG_COMP_ZIGPY_ZNP).setLevel(levels[DEBUG_COMP_ZIGPY_ZNP]) - logging.getLogger(DEBUG_COMP_ZIGPY_DECONZ).setLevel(levels[DEBUG_COMP_ZIGPY_DECONZ]) - logging.getLogger(DEBUG_COMP_ZIGPY_XBEE).setLevel(levels[DEBUG_COMP_ZIGPY_XBEE]) - logging.getLogger(DEBUG_COMP_ZIGPY_ZIGATE).setLevel(levels[DEBUG_COMP_ZIGPY_ZIGATE]) - logging.getLogger(DEBUG_LIB_ZHA).setLevel(levels[DEBUG_LIB_ZHA]) - - -class LogRelayHandler(logging.Handler): - """Log handler for error messages.""" - - def __init__(self, hass: HomeAssistant, gateway: ZHAGatewayProxy) -> None: - """Initialize a new LogErrorHandler.""" - super().__init__() - self.hass = hass - self.gateway = gateway - hass_path: str = HOMEASSISTANT_PATH[0] - config_dir = self.hass.config.config_dir - self.paths_re = re.compile( - rf"(?:{re.escape(hass_path)}|{re.escape(config_dir)})/(.*)" - ) - - def emit(self, record: LogRecord) -> None: - """Relay log message via dispatcher.""" - entry = LogEntry( - record, self.paths_re, figure_out_source=record.levelno >= logging.WARNING - ) - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - {ATTR_TYPE: ZHA_GW_MSG_LOG_OUTPUT, ZHA_GW_MSG_LOG_ENTRY: entry.to_dict()}, - ) - - -@dataclasses.dataclass(kw_only=True, slots=True) -class HAZHAData: - """ZHA data stored in `hass.data`.""" - - yaml_config: ConfigType = dataclasses.field(default_factory=dict) - config_entry: ConfigEntry | None = dataclasses.field(default=None) - device_trigger_cache: dict[str, tuple[str, dict]] = dataclasses.field( - default_factory=dict - ) - gateway_proxy: ZHAGatewayProxy | None = dataclasses.field(default=None) - platforms: collections.defaultdict[Platform, list] = dataclasses.field( - default_factory=lambda: collections.defaultdict(list) - ) - update_coordinator: ZHAFirmwareUpdateCoordinator | None = dataclasses.field( - default=None - ) - - -@dataclasses.dataclass(kw_only=True, slots=True) -class EntityData: - """ZHA entity data.""" - - entity: PlatformEntity | GroupEntity - device_proxy: ZHADeviceProxy - group_proxy: ZHAGroupProxy | None = dataclasses.field(default=None) - - @property - def is_group_entity(self) -> bool: - """Return if this is a group entity.""" - return self.group_proxy is not None and isinstance(self.entity, GroupEntity) - - -def get_zha_data(hass: HomeAssistant) -> HAZHAData: - """Get the global ZHA data object.""" - if DATA_ZHA not in hass.data: - hass.data[DATA_ZHA] = HAZHAData() - - return hass.data[DATA_ZHA] - - -def get_zha_gateway(hass: HomeAssistant) -> Gateway: - """Get the ZHA gateway object.""" - if (gateway_proxy := get_zha_data(hass).gateway_proxy) is None: - raise ValueError("No gateway object exists") - - return gateway_proxy.gateway - - -def get_zha_gateway_proxy(hass: HomeAssistant) -> ZHAGatewayProxy: - """Get the ZHA gateway object.""" - if (gateway_proxy := get_zha_data(hass).gateway_proxy) is None: - raise ValueError("No gateway object exists") - - return gateway_proxy - - -def get_config_entry(hass: HomeAssistant) -> ConfigEntry: - """Get the ZHA gateway object.""" - if (gateway_proxy := get_zha_data(hass).gateway_proxy) is None: - raise ValueError("No gateway object exists to retrieve the config entry from.") - - return gateway_proxy.config_entry - - -@callback -def async_get_zha_device_proxy(hass: HomeAssistant, device_id: str) -> ZHADeviceProxy: - """Get a ZHA device for the given device registry id.""" - device_registry = dr.async_get(hass) - registry_device = device_registry.async_get(device_id) - if not registry_device: - _LOGGER.error("Device id `%s` not found in registry", device_id) - raise KeyError(f"Device id `{device_id}` not found in registry.") - zha_gateway_proxy = get_zha_gateway_proxy(hass) - ieee_address = next( - identifier - for domain, identifier in registry_device.identifiers - if domain == DOMAIN - ) - ieee = EUI64.convert(ieee_address) - return zha_gateway_proxy.device_proxies[ieee] - - -def cluster_command_schema_to_vol_schema(schema: CommandSchema) -> vol.Schema: - """Convert a cluster command schema to a voluptuous schema.""" - return vol.Schema( - { - ( - vol.Optional(field.name) if field.optional else vol.Required(field.name) - ): schema_type_to_vol(field.type) - for field in schema.fields - } - ) - - -def schema_type_to_vol(field_type: Any) -> Any: - """Convert a schema type to a voluptuous type.""" - if issubclass(field_type, enum.Flag) and field_type.__members__: - return cv.multi_select( - [key.replace("_", " ") for key in field_type.__members__] - ) - if issubclass(field_type, enum.Enum) and field_type.__members__: - return vol.In([key.replace("_", " ") for key in field_type.__members__]) - if ( - issubclass(field_type, zigpy.types.FixedIntType) - or issubclass(field_type, enum.Flag) - or issubclass(field_type, enum.Enum) - ): - return vol.All( - vol.Coerce(int), vol.Range(field_type.min_value, field_type.max_value) - ) - return str - - -def convert_to_zcl_values( - fields: dict[str, Any], schema: CommandSchema -) -> dict[str, Any]: - """Convert user input to ZCL values.""" - converted_fields: dict[str, Any] = {} - for field in schema.fields: - if field.name not in fields: - continue - value = fields[field.name] - if issubclass(field.type, enum.Flag) and isinstance(value, list): - new_value = 0 - - for flag in value: - if isinstance(flag, str): - new_value |= field.type[flag.replace(" ", "_")] - else: - new_value |= flag - - value = field.type(new_value) - elif issubclass(field.type, enum.Enum): - value = ( - field.type[value.replace(" ", "_")] - if isinstance(value, str) - else field.type(value) - ) - else: - value = field.type(value) - _LOGGER.debug( - "Converted ZCL schema field(%s) value from: %s to: %s", - field.name, - fields[field.name], - value, - ) - converted_fields[field.name] = value - return converted_fields - - -def async_cluster_exists(hass: HomeAssistant, cluster_id, skip_coordinator=True): - """Determine if a device containing the specified in cluster is paired.""" - zha_gateway = get_zha_gateway(hass) - zha_devices = zha_gateway.devices.values() - for zha_device in zha_devices: - if skip_coordinator and zha_device.is_coordinator: - continue - clusters_by_endpoint = zha_device.async_get_clusters() - for clusters in clusters_by_endpoint.values(): - if ( - cluster_id in clusters[CLUSTER_TYPE_IN] - or cluster_id in clusters[CLUSTER_TYPE_OUT] - ): - return True - return False - - -@callback -def async_add_entities( - _async_add_entities: AddEntitiesCallback, - entity_class: type[ZHAEntity], - entities: list[EntityData], - **kwargs, -) -> None: - """Add entities helper.""" - if not entities: - return - - entities_to_add: list[ZHAEntity] = [] - for entity_data in entities: - try: - entities_to_add.append(entity_class(entity_data)) - # broad exception to prevent a single entity from preventing an entire platform from loading - # this can potentially be caused by a misbehaving device or a bad quirk. Not ideal but the - # alternative is adding try/catch to each entity class __init__ method with a specific exception - except Exception: # noqa: BLE001 - _LOGGER.exception( - "Error while adding entity from entity data: %s", entity_data - ) - _async_add_entities(entities_to_add, update_before_add=False) - for entity in entities_to_add: - if not entity.enabled: - entity.entity_data.entity.disable() - entities.clear() - - -def _clean_serial_port_path(path: str) -> str: - """Clean the serial port path, applying corrections where necessary.""" - - if path.startswith("socket://"): - path = path.strip() - - # Removes extraneous brackets from IP addresses (they don't parse in CPython 3.11.4) - if re.match(r"^socket://\[\d+\.\d+\.\d+\.\d+\]:\d+$", path): - path = path.replace("[", "").replace("]", "") - - return path - - -CONF_ZHA_OPTIONS_SCHEMA = vol.Schema( - { - vol.Optional(CONF_DEFAULT_LIGHT_TRANSITION, default=0): vol.All( - vol.Coerce(float), vol.Range(min=0, max=2**16 / 10) - ), - vol.Required(CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, default=False): cv.boolean, - vol.Required(CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, default=True): cv.boolean, - vol.Required(CONF_GROUP_MEMBERS_ASSUME_STATE, default=True): cv.boolean, - vol.Required(CONF_ENABLE_IDENTIFY_ON_JOIN, default=True): cv.boolean, - vol.Optional( - CONF_CONSIDER_UNAVAILABLE_MAINS, - default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, - ): cv.positive_int, - vol.Optional( - CONF_CONSIDER_UNAVAILABLE_BATTERY, - default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, - ): cv.positive_int, - vol.Required(CONF_ENABLE_MAINS_STARTUP_POLLING, default=True): cv.boolean, - }, - extra=vol.REMOVE_EXTRA, -) - -CONF_ZHA_ALARM_SCHEMA = vol.Schema( - { - vol.Required(CONF_ALARM_MASTER_CODE, default="1234"): cv.string, - vol.Required(CONF_ALARM_FAILED_TRIES, default=3): cv.positive_int, - vol.Required(CONF_ALARM_ARM_REQUIRES_CODE, default=False): cv.boolean, - } -) - - -def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData: - """Create ZHA lib configuration from HA config objects.""" - - # ensure that we have the necessary HA configuration data - assert ha_zha_data.config_entry is not None - assert ha_zha_data.yaml_config is not None - - # Remove brackets around IP addresses, this no longer works in CPython 3.11.4 - # This will be removed in 2023.11.0 - path = ha_zha_data.config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] - cleaned_path = _clean_serial_port_path(path) - - if path != cleaned_path: - _LOGGER.debug("Cleaned serial port path %r -> %r", path, cleaned_path) - ha_zha_data.config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] = cleaned_path - hass.config_entries.async_update_entry( - ha_zha_data.config_entry, data=ha_zha_data.config_entry.data - ) - - # deep copy the yaml config to avoid modifying the original and to safely - # pass it to the ZHA library - app_config = copy.deepcopy(ha_zha_data.yaml_config.get(CONF_ZIGPY, {})) - database = ha_zha_data.yaml_config.get( - CONF_DATABASE, - hass.config.path(DEFAULT_DATABASE_NAME), - ) - app_config[CONF_DATABASE] = database - app_config[CONF_DEVICE] = ha_zha_data.config_entry.data[CONF_DEVICE] - - radio_type = RadioType[ha_zha_data.config_entry.data[CONF_RADIO_TYPE]] - - # Until we have a way to coordinate channels with the Thread half of multi-PAN, - # stick to the old zigpy default of channel 15 instead of dynamically scanning - if ( - is_multiprotocol_url(app_config[CONF_DEVICE][CONF_DEVICE_PATH]) - and app_config.get(CONF_NWK, {}).get(CONF_NWK_CHANNEL) is None - ): - app_config.setdefault(CONF_NWK, {})[CONF_NWK_CHANNEL] = 15 - - options: MappingProxyType[str, Any] = ha_zha_data.config_entry.options.get( - CUSTOM_CONFIGURATION, {} - ) - zha_options = CONF_ZHA_OPTIONS_SCHEMA(options.get(ZHA_OPTIONS, {})) - ha_acp_options = CONF_ZHA_ALARM_SCHEMA(options.get(ZHA_ALARM_OPTIONS, {})) - light_options: LightOptions = LightOptions( - default_light_transition=zha_options.get(CONF_DEFAULT_LIGHT_TRANSITION), - enable_enhanced_light_transition=zha_options.get( - CONF_ENABLE_ENHANCED_LIGHT_TRANSITION - ), - enable_light_transitioning_flag=zha_options.get( - CONF_ENABLE_LIGHT_TRANSITIONING_FLAG - ), - group_members_assume_state=zha_options.get(CONF_GROUP_MEMBERS_ASSUME_STATE), - ) - device_options: DeviceOptions = DeviceOptions( - enable_identify_on_join=zha_options.get(CONF_ENABLE_IDENTIFY_ON_JOIN), - consider_unavailable_mains=zha_options.get(CONF_CONSIDER_UNAVAILABLE_MAINS), - consider_unavailable_battery=zha_options.get(CONF_CONSIDER_UNAVAILABLE_BATTERY), - enable_mains_startup_polling=zha_options.get(CONF_ENABLE_MAINS_STARTUP_POLLING), - ) - acp_options: AlarmControlPanelOptions = AlarmControlPanelOptions( - master_code=ha_acp_options.get(CONF_ALARM_MASTER_CODE), - failed_tries=ha_acp_options.get(CONF_ALARM_FAILED_TRIES), - arm_requires_code=ha_acp_options.get(CONF_ALARM_ARM_REQUIRES_CODE), - ) - coord_config: CoordinatorConfiguration = CoordinatorConfiguration( - path=app_config[CONF_DEVICE][CONF_DEVICE_PATH], - baudrate=app_config[CONF_DEVICE][CONF_BAUDRATE], - flow_control=app_config[CONF_DEVICE][CONF_FLOW_CONTROL], - radio_type=radio_type.name, - ) - quirks_config: QuirksConfiguration = QuirksConfiguration( - enabled=ha_zha_data.yaml_config.get(CONF_ENABLE_QUIRKS, True), - custom_quirks_path=ha_zha_data.yaml_config.get(CONF_CUSTOM_QUIRKS_PATH), - ) - overrides_config: dict[str, DeviceOverridesConfiguration] = {} - overrides: dict[str, dict[str, Any]] = cast( - dict[str, dict[str, Any]], ha_zha_data.yaml_config.get(CONF_DEVICE_CONFIG) - ) - if overrides is not None: - for unique_id, override in overrides.items(): - overrides_config[unique_id] = DeviceOverridesConfiguration( - type=override["type"], - ) - - return ZHAData( - zigpy_config=app_config, - config=ZHAConfiguration( - light_options=light_options, - device_options=device_options, - alarm_control_panel_options=acp_options, - coordinator_configuration=coord_config, - quirks_configuration=quirks_config, - device_overrides=overrides_config, - ), - local_timezone=ZoneInfo(hass.config.time_zone), - ) - - -def convert_zha_error_to_ha_error[**_P, _EntityT: ZHAEntity]( - func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], -) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: - """Decorate ZHA commands and re-raises ZHAException as HomeAssistantError.""" - - @functools.wraps(func) - async def handler(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None: - try: - return await func(self, *args, **kwargs) - except ZHAException as err: - raise HomeAssistantError(err) from err - - return handler - - -def exclude_none_values(obj: Mapping[str, Any]) -> dict[str, Any]: - """Return a new dictionary excluding keys with None values.""" - return {k: v for k, v in obj.items() if v is not None} diff --git a/homeassistant/components/zha/icons.json b/homeassistant/components/zha/icons.json index 5b3b85ced39..9b060e8105a 100644 --- a/homeassistant/components/zha/icons.json +++ b/homeassistant/components/zha/icons.json @@ -45,15 +45,6 @@ "maximum_level": { "default": "mdi:brightness-percent" }, - "default_level_local": { - "default": "mdi:brightness-percent" - }, - "default_level_remote": { - "default": "mdi:brightness-percent" - }, - "state_after_power_restored": { - "default": "mdi:brightness-percent" - }, "auto_off_timer": { "default": "mdi:timer" }, @@ -95,18 +86,6 @@ }, "presence_detection_timeout": { "default": "mdi:timer-edit" - }, - "exercise_trigger_time": { - "default": "mdi:clock" - }, - "external_temperature_sensor": { - "default": "mdi:thermometer" - }, - "load_room_mean": { - "default": "mdi:scale-balance" - }, - "regulation_setpoint_offset": { - "default": "mdi:thermostat" } }, "select": { @@ -115,9 +94,6 @@ }, "keypad_lockout": { "default": "mdi:lock" - }, - "exercise_day_of_week": { - "default": "mdi:wrench-clock" } }, "sensor": { @@ -156,15 +132,6 @@ }, "hooks_state": { "default": "mdi:hook" - }, - "open_window_detected": { - "default": "mdi:window-open" - }, - "load_estimate": { - "default": "mdi:scale-balance" - }, - "preheat_time": { - "default": "mdi:radiator" } }, "switch": { @@ -191,60 +158,21 @@ }, "hooks_locked": { "default": "mdi:lock" - }, - "external_window_sensor": { - "default": "mdi:window-open" - }, - "use_internal_window_detection": { - "default": "mdi:window-open" - }, - "prioritize_external_temperature_sensor": { - "default": "mdi:thermometer" - }, - "heat_available": { - "default": "mdi:water-boiler" - }, - "use_load_balancing": { - "default": "mdi:scale-balance" } } }, "services": { - "permit": { - "service": "mdi:cellphone-link" - }, - "remove": { - "service": "mdi:cellphone-remove" - }, - "reconfigure_device": { - "service": "mdi:cellphone-cog" - }, - "set_zigbee_cluster_attribute": { - "service": "mdi:cog" - }, - "issue_zigbee_cluster_command": { - "service": "mdi:console" - }, - "issue_zigbee_group_command": { - "service": "mdi:console" - }, - "warning_device_squawk": { - "service": "mdi:alert" - }, - "warning_device_warn": { - "service": "mdi:alert" - }, - "clear_lock_user_code": { - "service": "mdi:lock-remove" - }, - "enable_lock_user_code": { - "service": "mdi:lock" - }, - "disable_lock_user_code": { - "service": "mdi:lock-off" - }, - "set_lock_user_code": { - "service": "mdi:lock" - } + "permit": "mdi:cellphone-link", + "remove": "mdi:cellphone-remove", + "reconfigure_device": "mdi:cellphone-cog", + "set_zigbee_cluster_attribute": "mdi:cog", + "issue_zigbee_cluster_command": "mdi:console", + "issue_zigbee_group_command": "mdi:console", + "warning_device_squawk": "mdi:alert", + "warning_device_warn": "mdi:alert", + "clear_lock_user_code": "mdi:lock-remove", + "enable_lock_user_code": "mdi:lock", + "disable_lock_user_code": "mdi:lock-off", + "set_lock_user_code": "mdi:lock" } } diff --git a/homeassistant/components/zha/light.py b/homeassistant/components/zha/light.py index 9a22dfb02e9..6fd08de889f 100644 --- a/homeassistant/components/zha/light.py +++ b/homeassistant/components/zha/light.py @@ -2,58 +2,94 @@ from __future__ import annotations -from collections.abc import Mapping +from collections import Counter +from collections.abc import Callable +from datetime import timedelta import functools +import itertools import logging -from typing import Any +import random +from typing import TYPE_CHECKING, Any -from zha.application.platforms.light.const import ( - ColorMode as ZhaColorMode, - LightEntityFeature as ZhaLightEntityFeature, -) +from zigpy.zcl.clusters.general import Identify, LevelControl, OnOff +from zigpy.zcl.clusters.lighting import Color +from zigpy.zcl.foundation import Status +from homeassistant.components import light from homeassistant.components.light import ( - ATTR_BRIGHTNESS, - ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, - ATTR_EFFECT, - ATTR_FLASH, - ATTR_TRANSITION, - ATTR_XY_COLOR, ColorMode, - LightEntity, LightEntityFeature, + brightness_supported, + filter_supported_color_modes, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_ON, Platform -from homeassistant.core import HomeAssistant, State, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .entity import ZHAEntity -from .helpers import ( - SIGNAL_ADD_ENTITIES, - EntityData, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - get_zha_data, +from homeassistant.const import ( + ATTR_SUPPORTED_FEATURES, + STATE_ON, + STATE_UNAVAILABLE, + Platform, ) +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback +from homeassistant.helpers.debounce import Debouncer +from homeassistant.helpers.dispatcher import ( + async_dispatcher_connect, + async_dispatcher_send, +) +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.event import async_call_later, async_track_time_interval -ZHA_TO_HA_COLOR_MODE = { - ZhaColorMode.UNKNOWN: ColorMode.UNKNOWN, - ZhaColorMode.ONOFF: ColorMode.ONOFF, - ZhaColorMode.BRIGHTNESS: ColorMode.BRIGHTNESS, - ZhaColorMode.COLOR_TEMP: ColorMode.COLOR_TEMP, - ZhaColorMode.XY: ColorMode.XY, -} +from .core import discovery, helpers +from .core.const import ( + CLUSTER_HANDLER_COLOR, + CLUSTER_HANDLER_LEVEL, + CLUSTER_HANDLER_ON_OFF, + CONF_ALWAYS_PREFER_XY_COLOR_MODE, + CONF_DEFAULT_LIGHT_TRANSITION, + CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, + CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, + CONF_GROUP_MEMBERS_ASSUME_STATE, + DATA_ZHA, + SIGNAL_ADD_ENTITIES, + SIGNAL_ATTR_UPDATED, + SIGNAL_SET_LEVEL, + ZHA_OPTIONS, +) +from .core.helpers import LogMixin, async_get_zha_config_value, get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity, ZhaGroupEntity -HA_TO_ZHA_COLOR_MODE = {v: k for k, v in ZHA_TO_HA_COLOR_MODE.items()} - -OFF_BRIGHTNESS = "off_brightness" -OFF_WITH_TRANSITION = "off_with_transition" +if TYPE_CHECKING: + from .core.device import ZHADevice _LOGGER = logging.getLogger(__name__) +DEFAULT_ON_OFF_TRANSITION = 1 # most bulbs default to a 1-second turn on/off transition +DEFAULT_EXTRA_TRANSITION_DELAY_SHORT = 0.25 +DEFAULT_EXTRA_TRANSITION_DELAY_LONG = 2.0 +DEFAULT_LONG_TRANSITION_TIME = 10 +DEFAULT_MIN_BRIGHTNESS = 2 +ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY = 0.05 + +FLASH_EFFECTS = { + light.FLASH_SHORT: Identify.EffectIdentifier.Blink, + light.FLASH_LONG: Identify.EffectIdentifier.Breathe, +} + +STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.LIGHT) +GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, Platform.LIGHT) +SIGNAL_LIGHT_GROUP_STATE_CHANGED = "zha_light_group_state_changed" +SIGNAL_LIGHT_GROUP_TRANSITION_START = "zha_light_group_transition_start" +SIGNAL_LIGHT_GROUP_TRANSITION_FINISHED = "zha_light_group_transition_finished" +SIGNAL_LIGHT_GROUP_ASSUME_GROUP_STATE = "zha_light_group_assume_group_state" +DEFAULT_MIN_TRANSITION_MANUFACTURERS = {"sengled"} + +COLOR_MODES_GROUP_LIGHT = {ColorMode.COLOR_TEMP, ColorMode.XY} +SUPPORT_GROUP_LIGHT = ( + light.LightEntityFeature.EFFECT + | light.LightEntityFeature.FLASH + | light.LightEntityFeature.TRANSITION +) + async def async_setup_entry( hass: HomeAssistant, @@ -68,137 +104,1280 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, Light, entities_to_create + discovery.async_add_entities, async_add_entities, entities_to_create ), ) config_entry.async_on_unload(unsub) -class Light(LightEntity, ZHAEntity): - """Representation of a ZHA or ZLL light.""" +class BaseLight(LogMixin, light.LightEntity): + """Operations common to all light entities.""" - def __init__(self, entity_data: EntityData) -> None: - """Initialize the ZHA light.""" - super().__init__(entity_data) - color_modes: set[ColorMode] = set() - has_brightness = False - for color_mode in self.entity_data.entity.supported_color_modes: - if color_mode == ZhaColorMode.BRIGHTNESS: - has_brightness = True - if color_mode not in (ZhaColorMode.BRIGHTNESS, ZhaColorMode.ONOFF): - color_modes.add(ZHA_TO_HA_COLOR_MODE[color_mode]) - if color_modes: - self._attr_supported_color_modes = color_modes - elif has_brightness: - color_modes.add(ColorMode.BRIGHTNESS) - self._attr_supported_color_modes = color_modes - else: - color_modes.add(ColorMode.ONOFF) - self._attr_supported_color_modes = color_modes + _FORCE_ON = False + _DEFAULT_MIN_TRANSITION_TIME: float = 0 - features = LightEntityFeature(0) - zha_features: ZhaLightEntityFeature = self.entity_data.entity.supported_features + def __init__(self, *args, **kwargs): + """Initialize the light.""" + self._zha_device: ZHADevice = None + super().__init__(*args, **kwargs) + self._attr_min_mireds: int | None = 153 + self._attr_max_mireds: int | None = 500 + self._attr_color_mode = ColorMode.UNKNOWN # Set by subclasses + self._attr_supported_features: int = 0 + self._attr_state: bool | None + self._off_with_transition: bool = False + self._off_brightness: int | None = None + self._zha_config_transition = self._DEFAULT_MIN_TRANSITION_TIME + self._zha_config_enhanced_light_transition: bool = False + self._zha_config_enable_light_transitioning_flag: bool = True + self._zha_config_always_prefer_xy_color_mode: bool = True + self._on_off_cluster_handler = None + self._level_cluster_handler = None + self._color_cluster_handler = None + self._identify_cluster_handler = None + self._transitioning_individual: bool = False + self._transitioning_group: bool = False + self._transition_listener: Callable[[], None] | None = None - if ZhaLightEntityFeature.EFFECT in zha_features: - features |= LightEntityFeature.EFFECT - if ZhaLightEntityFeature.FLASH in zha_features: - features |= LightEntityFeature.FLASH - if ZhaLightEntityFeature.TRANSITION in zha_features: - features |= LightEntityFeature.TRANSITION - - self._attr_supported_features = features + async def async_will_remove_from_hass(self) -> None: + """Disconnect entity object when removed.""" + self._async_unsub_transition_listener() + await super().async_will_remove_from_hass() @property - def extra_state_attributes(self) -> Mapping[str, Any] | None: - """Return entity specific state attributes.""" - state = self.entity_data.entity.state + def extra_state_attributes(self) -> dict[str, Any]: + """Return state attributes.""" return { - "off_with_transition": state.get("off_with_transition"), - "off_brightness": state.get("off_brightness"), + "off_with_transition": self._off_with_transition, + "off_brightness": self._off_brightness, } @property def is_on(self) -> bool: """Return true if entity is on.""" - return self.entity_data.entity.is_on - - @property - def brightness(self) -> int: - """Return the brightness of this light.""" - return self.entity_data.entity.brightness - - @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self.entity_data.entity.min_mireds - - @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self.entity_data.entity.max_mireds - - @property - def xy_color(self) -> tuple[float, float] | None: - """Return the xy color value [float, float].""" - return self.entity_data.entity.xy_color - - @property - def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" - return self.entity_data.entity.color_temp - - @property - def color_mode(self) -> ColorMode | None: - """Return the color mode.""" - if self.entity_data.entity.color_mode is None: - return None - return ZHA_TO_HA_COLOR_MODE[self.entity_data.entity.color_mode] - - @property - def effect_list(self) -> list[str] | None: - """Return the list of supported effects.""" - return self.entity_data.entity.effect_list - - @property - def effect(self) -> str | None: - """Return the current effect.""" - return self.entity_data.entity.effect - - @convert_zha_error_to_ha_error - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the entity on.""" - await self.entity_data.entity.async_turn_on( - transition=kwargs.get(ATTR_TRANSITION), - brightness=kwargs.get(ATTR_BRIGHTNESS), - effect=kwargs.get(ATTR_EFFECT), - flash=kwargs.get(ATTR_FLASH), - color_temp=kwargs.get(ATTR_COLOR_TEMP), - xy_color=kwargs.get(ATTR_XY_COLOR), - ) - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - await self.entity_data.entity.async_turn_off( - transition=kwargs.get(ATTR_TRANSITION) - ) - self.async_write_ha_state() + if self._attr_state is None: + return False + return self._attr_state @callback - def restore_external_state_attributes(self, state: State) -> None: - """Restore entity state.""" - self.entity_data.entity.restore_external_state_attributes( - state=(state.state == STATE_ON), - off_with_transition=state.attributes.get(OFF_WITH_TRANSITION), - off_brightness=state.attributes.get(OFF_BRIGHTNESS), - brightness=state.attributes.get(ATTR_BRIGHTNESS), - color_temp=state.attributes.get(ATTR_COLOR_TEMP), - xy_color=state.attributes.get(ATTR_XY_COLOR), - color_mode=( - HA_TO_ZHA_COLOR_MODE[ColorMode(state.attributes[ATTR_COLOR_MODE])] - if state.attributes.get(ATTR_COLOR_MODE) is not None - else None - ), - effect=state.attributes.get(ATTR_EFFECT), + def set_level(self, value: int) -> None: + """Set the brightness of this light between 0..254. + + brightness level 255 is a special value instructing the device to come + on at `on_level` Zigbee attribute value, regardless of the last set + level + """ + if self.is_transitioning: + self.debug( + "received level %s while transitioning - skipping update", + value, + ) + return + value = max(0, min(254, value)) + self._attr_brightness = value + self.async_write_ha_state() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + transition = kwargs.get(light.ATTR_TRANSITION) + duration = ( + transition if transition is not None else self._zha_config_transition + ) or ( + # if 0 is passed in some devices still need the minimum default + self._DEFAULT_MIN_TRANSITION_TIME + ) + brightness = kwargs.get(light.ATTR_BRIGHTNESS) + effect = kwargs.get(light.ATTR_EFFECT) + flash = kwargs.get(light.ATTR_FLASH) + temperature = kwargs.get(light.ATTR_COLOR_TEMP) + xy_color = kwargs.get(light.ATTR_XY_COLOR) + hs_color = kwargs.get(light.ATTR_HS_COLOR) + + execute_if_off_supported = ( + self._GROUP_SUPPORTS_EXECUTE_IF_OFF + if isinstance(self, LightGroup) + else self._color_cluster_handler + and self._color_cluster_handler.execute_if_off_supported + ) + + set_transition_flag = ( + brightness_supported(self._attr_supported_color_modes) + or temperature is not None + or xy_color is not None + or hs_color is not None + ) and self._zha_config_enable_light_transitioning_flag + transition_time = ( + ( + duration + DEFAULT_EXTRA_TRANSITION_DELAY_SHORT + if ( + (brightness is not None or transition is not None) + and brightness_supported(self._attr_supported_color_modes) + or (self._off_with_transition and self._off_brightness is not None) + or temperature is not None + or xy_color is not None + or hs_color is not None + ) + else DEFAULT_ON_OFF_TRANSITION + DEFAULT_EXTRA_TRANSITION_DELAY_SHORT + ) + if set_transition_flag + else 0 + ) + + # If we need to pause attribute report parsing, we'll do so here. + # After successful calls, we later start a timer to unset the flag after + # transition_time. + # - On an error on the first move to level call, we unset the flag immediately + # if no previous timer is running. + # - On an error on subsequent calls, we start the transition timer, + # as a brightness call might have come through. + if set_transition_flag: + self.async_transition_set_flag() + + # If the light is currently off but a turn_on call with a color/temperature is + # sent, the light needs to be turned on first at a low brightness level where + # the light is immediately transitioned to the correct color. Afterwards, the + # transition is only from the low brightness to the new brightness. + # Otherwise, the transition is from the color the light had before being turned + # on to the new color. This can look especially bad with transitions longer than + # a second. We do not want to do this for devices that need to be forced to use + # the on command because we would end up with 4 commands sent: + # move to level, on, color, move to level... We also will not set this + # if the bulb is already in the desired color mode with the desired color + # or color temperature. + new_color_provided_while_off = ( + self._zha_config_enhanced_light_transition + and not self._FORCE_ON + and not self._attr_state + and ( + ( + temperature is not None + and ( + self._attr_color_temp != temperature + or self._attr_color_mode != ColorMode.COLOR_TEMP + ) + ) + or ( + xy_color is not None + and ( + self._attr_xy_color != xy_color + or self._attr_color_mode != ColorMode.XY + ) + ) + or ( + hs_color is not None + and ( + self._attr_hs_color != hs_color + or self._attr_color_mode != ColorMode.HS + ) + ) + ) + and brightness_supported(self._attr_supported_color_modes) + and not execute_if_off_supported + ) + + if ( + brightness is None + and (self._off_with_transition or new_color_provided_while_off) + and self._off_brightness is not None + ): + brightness = self._off_brightness + + if brightness is not None: + level = min(254, brightness) + else: + level = self._attr_brightness or 254 + + t_log = {} + + if new_color_provided_while_off: + # If the light is currently off, we first need to turn it on at a low + # brightness level with no transition. + # After that, we set it to the desired color/temperature with no transition. + result = await self._level_cluster_handler.move_to_level_with_on_off( + level=DEFAULT_MIN_BRIGHTNESS, + transition_time=int(10 * self._DEFAULT_MIN_TRANSITION_TIME), + ) + t_log["move_to_level_with_on_off"] = result + if result[1] is not Status.SUCCESS: + # First 'move to level' call failed, so if the transitioning delay + # isn't running from a previous call, + # the flag can be unset immediately + if set_transition_flag and not self._transition_listener: + self.async_transition_complete() + self.debug("turned on: %s", t_log) + return + # Currently only setting it to "on", as the correct level state will + # be set at the second move_to_level call + self._attr_state = True + + if execute_if_off_supported: + self.debug("handling color commands before turning on/level") + if not await self.async_handle_color_commands( + temperature, + duration, # duration is ignored by lights when off + hs_color, + xy_color, + new_color_provided_while_off, + t_log, + ): + # Color calls before on/level calls failed, + # so if the transitioning delay isn't running from a previous call, + # the flag can be unset immediately + if set_transition_flag and not self._transition_listener: + self.async_transition_complete() + self.debug("turned on: %s", t_log) + return + + if ( + (brightness is not None or transition is not None) + and not new_color_provided_while_off + and brightness_supported(self._attr_supported_color_modes) + ): + result = await self._level_cluster_handler.move_to_level_with_on_off( + level=level, + transition_time=int(10 * duration), + ) + t_log["move_to_level_with_on_off"] = result + if result[1] is not Status.SUCCESS: + # First 'move to level' call failed, so if the transitioning delay + # isn't running from a previous call, the flag can be unset immediately + if set_transition_flag and not self._transition_listener: + self.async_transition_complete() + self.debug("turned on: %s", t_log) + return + self._attr_state = bool(level) + if level: + self._attr_brightness = level + + if ( + (brightness is None and transition is None) + and not new_color_provided_while_off + or (self._FORCE_ON and brightness != 0) + ): + # since FORCE_ON lights don't turn on with move_to_level_with_on_off, + # we should call the on command on the on_off cluster + # if brightness is not 0. + result = await self._on_off_cluster_handler.on() + t_log["on_off"] = result + if result[1] is not Status.SUCCESS: + # 'On' call failed, but as brightness may still transition + # (for FORCE_ON lights), we start the timer to unset the flag after + # the transition_time if necessary. + self.async_transition_start_timer(transition_time) + self.debug("turned on: %s", t_log) + return + self._attr_state = True + + if not execute_if_off_supported: + self.debug("handling color commands after turning on/level") + if not await self.async_handle_color_commands( + temperature, + duration, + hs_color, + xy_color, + new_color_provided_while_off, + t_log, + ): + # Color calls failed, but as brightness may still transition, + # we start the timer to unset the flag + self.async_transition_start_timer(transition_time) + self.debug("turned on: %s", t_log) + return + + if new_color_provided_while_off: + # The light has the correct color, so we can now transition + # it to the correct brightness level. + result = await self._level_cluster_handler.move_to_level( + level=level, transition_time=int(10 * duration) + ) + t_log["move_to_level_if_color"] = result + if result[1] is not Status.SUCCESS: + self.debug("turned on: %s", t_log) + return + self._attr_state = bool(level) + if level: + self._attr_brightness = level + + # Our light is guaranteed to have just started the transitioning process + # if necessary, so we start the delay for the transition (to stop parsing + # attribute reports after the completed transition). + self.async_transition_start_timer(transition_time) + + if effect == light.EFFECT_COLORLOOP: + result = await self._color_cluster_handler.color_loop_set( + update_flags=( + Color.ColorLoopUpdateFlags.Action + | Color.ColorLoopUpdateFlags.Direction + | Color.ColorLoopUpdateFlags.Time + ), + action=Color.ColorLoopAction.Activate_from_current_hue, + direction=Color.ColorLoopDirection.Increment, + time=transition if transition else 7, + start_hue=0, + ) + t_log["color_loop_set"] = result + self._attr_effect = light.EFFECT_COLORLOOP + elif ( + self._attr_effect == light.EFFECT_COLORLOOP + and effect != light.EFFECT_COLORLOOP + ): + result = await self._color_cluster_handler.color_loop_set( + update_flags=Color.ColorLoopUpdateFlags.Action, + action=Color.ColorLoopAction.Deactivate, + direction=Color.ColorLoopDirection.Decrement, + time=0, + start_hue=0, + ) + t_log["color_loop_set"] = result + self._attr_effect = None + + if flash is not None: + result = await self._identify_cluster_handler.trigger_effect( + effect_id=FLASH_EFFECTS[flash], + effect_variant=Identify.EffectVariant.Default, + ) + t_log["trigger_effect"] = result + + self._off_with_transition = False + self._off_brightness = None + self.debug("turned on: %s", t_log) + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + transition = kwargs.get(light.ATTR_TRANSITION) + supports_level = brightness_supported(self._attr_supported_color_modes) + + transition_time = ( + transition or self._DEFAULT_MIN_TRANSITION_TIME + if transition is not None + else DEFAULT_ON_OFF_TRANSITION + ) + DEFAULT_EXTRA_TRANSITION_DELAY_SHORT + + # Start pausing attribute report parsing + if self._zha_config_enable_light_transitioning_flag: + self.async_transition_set_flag() + + # is not none looks odd here, but it will override built in bulb + # transition times if we pass 0 in here + if transition is not None and supports_level: + result = await self._level_cluster_handler.move_to_level_with_on_off( + level=0, + transition_time=int( + 10 * (transition or self._DEFAULT_MIN_TRANSITION_TIME) + ), + ) + else: + result = await self._on_off_cluster_handler.off() + + # Pause parsing attribute reports until transition is complete + if self._zha_config_enable_light_transitioning_flag: + self.async_transition_start_timer(transition_time) + self.debug("turned off: %s", result) + if result[1] is not Status.SUCCESS: + return + self._attr_state = False + + if supports_level and not self._off_with_transition: + # store current brightness so that the next turn_on uses it: + # when using "enhanced turn on" + self._off_brightness = self._attr_brightness + if transition is not None: + # save for when calling turn_on without a brightness: + # current_level is set to 1 after transitioning to level 0, + # needed for correct state with light groups + self._attr_brightness = 1 + self._off_with_transition = transition is not None + + self.async_write_ha_state() + + async def async_handle_color_commands( + self, + temperature, + duration, + hs_color, + xy_color, + new_color_provided_while_off, + t_log, + ): + """Process ZCL color commands.""" + + transition_time = ( + self._DEFAULT_MIN_TRANSITION_TIME + if new_color_provided_while_off + else duration + ) + + if temperature is not None: + result = await self._color_cluster_handler.move_to_color_temp( + color_temp_mireds=temperature, + transition_time=int(10 * transition_time), + ) + t_log["move_to_color_temp"] = result + if result[1] is not Status.SUCCESS: + return False + self._attr_color_mode = ColorMode.COLOR_TEMP + self._attr_color_temp = temperature + self._attr_xy_color = None + self._attr_hs_color = None + + if hs_color is not None: + if ( + not isinstance(self, LightGroup) + and self._color_cluster_handler.enhanced_hue_supported + ): + result = await self._color_cluster_handler.enhanced_move_to_hue_and_saturation( + enhanced_hue=int(hs_color[0] * 65535 / 360), + saturation=int(hs_color[1] * 2.54), + transition_time=int(10 * transition_time), + ) + t_log["enhanced_move_to_hue_and_saturation"] = result + else: + result = await self._color_cluster_handler.move_to_hue_and_saturation( + hue=int(hs_color[0] * 254 / 360), + saturation=int(hs_color[1] * 2.54), + transition_time=int(10 * transition_time), + ) + t_log["move_to_hue_and_saturation"] = result + if result[1] is not Status.SUCCESS: + return False + self._attr_color_mode = ColorMode.HS + self._attr_hs_color = hs_color + self._attr_xy_color = None + self._attr_color_temp = None + xy_color = None # don't set xy_color if it is also present + + if xy_color is not None: + result = await self._color_cluster_handler.move_to_color( + color_x=int(xy_color[0] * 65535), + color_y=int(xy_color[1] * 65535), + transition_time=int(10 * transition_time), + ) + t_log["move_to_color"] = result + if result[1] is not Status.SUCCESS: + return False + self._attr_color_mode = ColorMode.XY + self._attr_xy_color = xy_color + self._attr_color_temp = None + self._attr_hs_color = None + + return True + + @property + def is_transitioning(self) -> bool: + """Return if the light is transitioning.""" + return self._transitioning_individual or self._transitioning_group + + @callback + def async_transition_set_flag(self) -> None: + """Set _transitioning to True.""" + self.debug("setting transitioning flag to True") + self._transitioning_individual = True + self._transitioning_group = False + if isinstance(self, LightGroup): + async_dispatcher_send( + self.hass, + SIGNAL_LIGHT_GROUP_TRANSITION_START, + {"entity_ids": self._entity_ids}, + ) + self._async_unsub_transition_listener() + + @callback + def async_transition_start_timer(self, transition_time) -> None: + """Start a timer to unset _transitioning_individual after transition_time. + + If necessary. + """ + if not transition_time: + return + # For longer transitions, we want to extend the timer a bit more + if transition_time >= DEFAULT_LONG_TRANSITION_TIME: + transition_time += DEFAULT_EXTRA_TRANSITION_DELAY_LONG + self.debug("starting transitioning timer for %s", transition_time) + self._transition_listener = async_call_later( + self._zha_device.hass, + transition_time, + self.async_transition_complete, + ) + + @callback + def _async_unsub_transition_listener(self) -> None: + """Unsubscribe transition listener.""" + if self._transition_listener: + self._transition_listener() + self._transition_listener = None + + @callback + def async_transition_complete(self, _=None) -> None: + """Set _transitioning_individual to False and write HA state.""" + self.debug("transition complete - future attribute reports will write HA state") + self._transitioning_individual = False + self._async_unsub_transition_listener() + self.async_write_ha_state() + if isinstance(self, LightGroup): + async_dispatcher_send( + self.hass, + SIGNAL_LIGHT_GROUP_TRANSITION_FINISHED, + {"entity_ids": self._entity_ids}, + ) + if self._debounced_member_refresh is not None: + self.debug("transition complete - refreshing group member states") + assert self.platform.config_entry + self.platform.config_entry.async_create_background_task( + self.hass, + self._debounced_member_refresh.async_call(), + "zha.light-refresh-debounced-member", + ) + + +@STRICT_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, + aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, +) +class Light(BaseLight, ZhaEntity): + """Representation of a ZHA or ZLL light.""" + + _attr_supported_color_modes: set[ColorMode] + _attr_translation_key: str = "light" + _REFRESH_INTERVAL = (45, 75) + + def __init__( + self, unique_id, zha_device: ZHADevice, cluster_handlers, **kwargs + ) -> None: + """Initialize the ZHA light.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._on_off_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_ON_OFF] + self._attr_state = bool(self._on_off_cluster_handler.on_off) + self._level_cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_LEVEL) + self._color_cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_COLOR) + self._identify_cluster_handler = zha_device.identify_ch + if self._color_cluster_handler: + self._attr_min_mireds: int = self._color_cluster_handler.min_mireds + self._attr_max_mireds: int = self._color_cluster_handler.max_mireds + self._cancel_refresh_handle: CALLBACK_TYPE | None = None + effect_list = [] + + self._zha_config_always_prefer_xy_color_mode = async_get_zha_config_value( + zha_device.gateway.config_entry, + ZHA_OPTIONS, + CONF_ALWAYS_PREFER_XY_COLOR_MODE, + True, + ) + + self._attr_supported_color_modes = {ColorMode.ONOFF} + if self._level_cluster_handler: + self._attr_supported_color_modes.add(ColorMode.BRIGHTNESS) + self._attr_supported_features |= light.LightEntityFeature.TRANSITION + self._attr_brightness = self._level_cluster_handler.current_level + + if self._color_cluster_handler: + if self._color_cluster_handler.color_temp_supported: + self._attr_supported_color_modes.add(ColorMode.COLOR_TEMP) + self._attr_color_temp = self._color_cluster_handler.color_temperature + + if self._color_cluster_handler.xy_supported and ( + self._zha_config_always_prefer_xy_color_mode + or not self._color_cluster_handler.hs_supported + ): + self._attr_supported_color_modes.add(ColorMode.XY) + curr_x = self._color_cluster_handler.current_x + curr_y = self._color_cluster_handler.current_y + if curr_x is not None and curr_y is not None: + self._attr_xy_color = (curr_x / 65535, curr_y / 65535) + else: + self._attr_xy_color = (0, 0) + + if ( + self._color_cluster_handler.hs_supported + and not self._zha_config_always_prefer_xy_color_mode + ): + self._attr_supported_color_modes.add(ColorMode.HS) + if ( + self._color_cluster_handler.enhanced_hue_supported + and self._color_cluster_handler.enhanced_current_hue is not None + ): + curr_hue = ( + self._color_cluster_handler.enhanced_current_hue * 65535 / 360 + ) + elif self._color_cluster_handler.current_hue is not None: + curr_hue = self._color_cluster_handler.current_hue * 254 / 360 + else: + curr_hue = 0 + + if ( + curr_saturation := self._color_cluster_handler.current_saturation + ) is None: + curr_saturation = 0 + + self._attr_hs_color = ( + int(curr_hue), + int(curr_saturation * 2.54), + ) + + if self._color_cluster_handler.color_loop_supported: + self._attr_supported_features |= light.LightEntityFeature.EFFECT + effect_list.append(light.EFFECT_COLORLOOP) + if self._color_cluster_handler.color_loop_active == 1: + self._attr_effect = light.EFFECT_COLORLOOP + self._attr_supported_color_modes = filter_supported_color_modes( + self._attr_supported_color_modes + ) + if len(self._attr_supported_color_modes) == 1: + self._attr_color_mode = next(iter(self._attr_supported_color_modes)) + else: # Light supports color_temp + hs, determine which mode the light is in + assert self._color_cluster_handler + if ( + self._color_cluster_handler.color_mode + == Color.ColorMode.Color_temperature + ): + self._attr_color_mode = ColorMode.COLOR_TEMP + else: + self._attr_color_mode = ColorMode.XY + + if self._identify_cluster_handler: + self._attr_supported_features |= light.LightEntityFeature.FLASH + + if effect_list: + self._attr_effect_list = effect_list + + self._zha_config_transition = async_get_zha_config_value( + zha_device.gateway.config_entry, + ZHA_OPTIONS, + CONF_DEFAULT_LIGHT_TRANSITION, + 0, + ) + self._zha_config_enhanced_light_transition = async_get_zha_config_value( + zha_device.gateway.config_entry, + ZHA_OPTIONS, + CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, + False, + ) + self._zha_config_enable_light_transitioning_flag = async_get_zha_config_value( + zha_device.gateway.config_entry, + ZHA_OPTIONS, + CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, + True, + ) + + @callback + def async_set_state(self, attr_id, attr_name, value): + """Set the state.""" + if self.is_transitioning: + self.debug( + "received onoff %s while transitioning - skipping update", + value, + ) + return + self._attr_state = bool(value) + if value: + self._off_with_transition = False + self._off_brightness = None + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._on_off_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state + ) + if self._level_cluster_handler: + self.async_accept_signal( + self._level_cluster_handler, SIGNAL_SET_LEVEL, self.set_level + ) + refresh_interval = random.randint(*(x * 60 for x in self._REFRESH_INTERVAL)) + self._cancel_refresh_handle = async_track_time_interval( + self.hass, self._refresh, timedelta(seconds=refresh_interval) + ) + self.debug("started polling with refresh interval of %s", refresh_interval) + self.async_accept_signal( + None, + SIGNAL_LIGHT_GROUP_STATE_CHANGED, + self._maybe_force_refresh, + signal_override=True, + ) + + @callback + def transition_on(signal): + """Handle a transition start event from a group.""" + if self.entity_id in signal["entity_ids"]: + self.debug( + "group transition started - setting member transitioning flag" + ) + self._transitioning_group = True + + self.async_accept_signal( + None, + SIGNAL_LIGHT_GROUP_TRANSITION_START, + transition_on, + signal_override=True, + ) + + @callback + def transition_off(signal): + """Handle a transition finished event from a group.""" + if self.entity_id in signal["entity_ids"]: + self.debug( + "group transition completed - unsetting member transitioning flag" + ) + self._transitioning_group = False + + self.async_accept_signal( + None, + SIGNAL_LIGHT_GROUP_TRANSITION_FINISHED, + transition_off, + signal_override=True, + ) + + self.async_accept_signal( + None, + SIGNAL_LIGHT_GROUP_ASSUME_GROUP_STATE, + self._assume_group_state, + signal_override=True, + ) + + async def async_will_remove_from_hass(self) -> None: + """Disconnect entity object when removed.""" + assert self._cancel_refresh_handle + self._cancel_refresh_handle() + self._cancel_refresh_handle = None + self.debug("stopped polling during device removal") + await super().async_will_remove_from_hass() + + @callback + def async_restore_last_state(self, last_state): + """Restore previous state.""" + self._attr_state = last_state.state == STATE_ON + if "brightness" in last_state.attributes: + self._attr_brightness = last_state.attributes["brightness"] + if "off_with_transition" in last_state.attributes: + self._off_with_transition = last_state.attributes["off_with_transition"] + if "off_brightness" in last_state.attributes: + self._off_brightness = last_state.attributes["off_brightness"] + if (color_mode := last_state.attributes.get("color_mode")) is not None: + self._attr_color_mode = ColorMode(color_mode) + if "color_temp" in last_state.attributes: + self._attr_color_temp = last_state.attributes["color_temp"] + if "xy_color" in last_state.attributes: + self._attr_xy_color = last_state.attributes["xy_color"] + if "hs_color" in last_state.attributes: + self._attr_hs_color = last_state.attributes["hs_color"] + if "effect" in last_state.attributes: + self._attr_effect = last_state.attributes["effect"] + + async def async_get_state(self) -> None: + """Attempt to retrieve the state from the light.""" + if not self._attr_available: + return + self.debug("polling current state") + + if self._on_off_cluster_handler: + state = await self._on_off_cluster_handler.get_attribute_value( + "on_off", from_cache=False + ) + # check if transition started whilst waiting for polled state + if self.is_transitioning: + return + + if state is not None: + self._attr_state = state + if state: # reset "off with transition" flag if the light is on + self._off_with_transition = False + self._off_brightness = None + + if self._level_cluster_handler: + level = await self._level_cluster_handler.get_attribute_value( + "current_level", from_cache=False + ) + # check if transition started whilst waiting for polled state + if self.is_transitioning: + return + if level is not None: + self._attr_brightness = level + + if self._color_cluster_handler: + attributes = [ + "color_mode", + "current_x", + "current_y", + ] + if ( + not self._zha_config_always_prefer_xy_color_mode + and self._color_cluster_handler.enhanced_hue_supported + ): + attributes.append("enhanced_current_hue") + attributes.append("current_saturation") + if ( + self._color_cluster_handler.hs_supported + and not self._color_cluster_handler.enhanced_hue_supported + and not self._zha_config_always_prefer_xy_color_mode + ): + attributes.append("current_hue") + attributes.append("current_saturation") + if self._color_cluster_handler.color_temp_supported: + attributes.append("color_temperature") + if self._color_cluster_handler.color_loop_supported: + attributes.append("color_loop_active") + + results = await self._color_cluster_handler.get_attributes( + attributes, from_cache=False, only_cache=False + ) + + # although rare, a transition might have been started while we were waiting + # for the polled attributes, so abort if we are transitioning, + # as that state will not be accurate + if self.is_transitioning: + return + + if (color_mode := results.get("color_mode")) is not None: + if color_mode == Color.ColorMode.Color_temperature: + self._attr_color_mode = ColorMode.COLOR_TEMP + color_temp = results.get("color_temperature") + if color_temp is not None and color_mode: + self._attr_color_temp = color_temp + self._attr_xy_color = None + self._attr_hs_color = None + elif ( + color_mode == Color.ColorMode.Hue_and_saturation + and not self._zha_config_always_prefer_xy_color_mode + ): + self._attr_color_mode = ColorMode.HS + if self._color_cluster_handler.enhanced_hue_supported: + current_hue = results.get("enhanced_current_hue") + else: + current_hue = results.get("current_hue") + current_saturation = results.get("current_saturation") + if current_hue is not None and current_saturation is not None: + self._attr_hs_color = ( + int(current_hue * 360 / 65535) + if self._color_cluster_handler.enhanced_hue_supported + else int(current_hue * 360 / 254), + int(current_saturation / 2.54), + ) + self._attr_xy_color = None + self._attr_color_temp = None + else: + self._attr_color_mode = ColorMode.XY + color_x = results.get("current_x") + color_y = results.get("current_y") + if color_x is not None and color_y is not None: + self._attr_xy_color = (color_x / 65535, color_y / 65535) + self._attr_color_temp = None + self._attr_hs_color = None + + color_loop_active = results.get("color_loop_active") + if color_loop_active is not None: + if color_loop_active == 1: + self._attr_effect = light.EFFECT_COLORLOOP + else: + self._attr_effect = None + + async def async_update(self) -> None: + """Update to the latest state.""" + if self.is_transitioning: + self.debug("skipping async_update while transitioning") + return + await self.async_get_state() + + async def _refresh(self, time): + """Call async_get_state at an interval.""" + if self.is_transitioning: + self.debug("skipping _refresh while transitioning") + return + if self._zha_device.available and self.hass.data[DATA_ZHA].allow_polling: + self.debug("polling for updated state") + await self.async_get_state() + self.async_write_ha_state() + else: + self.debug( + "skipping polling for updated state, available: %s, allow polled requests: %s", + self._zha_device.available, + self.hass.data[DATA_ZHA].allow_polling, + ) + + async def _maybe_force_refresh(self, signal): + """Force update the state if the signal contains the entity id for this entity.""" + if self.entity_id in signal["entity_ids"]: + if self.is_transitioning: + self.debug("skipping _maybe_force_refresh while transitioning") + return + if self._zha_device.available and self.hass.data[DATA_ZHA].allow_polling: + self.debug("forcing polling for updated state") + await self.async_get_state() + self.async_write_ha_state() + else: + self.debug( + "skipping _maybe_force_refresh, available: %s, allow polled requests: %s", + self._zha_device.available, + self.hass.data[DATA_ZHA].allow_polling, + ) + + @callback + def _assume_group_state(self, signal, update_params) -> None: + """Handle an assume group state event from a group.""" + if self.entity_id in signal["entity_ids"] and self._attr_available: + self.debug("member assuming group state with: %s", update_params) + + state = update_params["state"] + brightness = update_params.get(light.ATTR_BRIGHTNESS) + color_mode = update_params.get(light.ATTR_COLOR_MODE) + color_temp = update_params.get(light.ATTR_COLOR_TEMP) + xy_color = update_params.get(light.ATTR_XY_COLOR) + hs_color = update_params.get(light.ATTR_HS_COLOR) + effect = update_params.get(light.ATTR_EFFECT) + + supported_modes = self._attr_supported_color_modes + + # unset "off brightness" and "off with transition" + # if group turned on this light + if state and not self._attr_state: + self._off_with_transition = False + self._off_brightness = None + + # set "off brightness" and "off with transition" + # if group turned off this light, and the light was not already off + # (to not override _off_with_transition) + elif ( + not state and self._attr_state and brightness_supported(supported_modes) + ): + # use individual brightness, instead of possibly averaged + # brightness from group + self._off_brightness = self._attr_brightness + self._off_with_transition = update_params["off_with_transition"] + + # Note: If individual lights have off_with_transition set, but not the + # group, and the group is then turned on without a level, individual lights + # might fall back to brightness level 1. + # Since all lights might need different brightness levels to be turned on, + # we can't use one group call. And making individual calls when turning on + # a ZHA group would cause a lot of traffic. In this case, + # turn_on should either just be called with a level or individual turn_on + # calls can be used. + + # state is always set (light.turn_on/light.turn_off) + self._attr_state = state + + # before assuming a group state attribute, check if the attribute + # was actually set in that call + if brightness is not None and brightness_supported(supported_modes): + self._attr_brightness = brightness + if color_mode is not None and color_mode in supported_modes: + self._attr_color_mode = color_mode + if color_temp is not None and ColorMode.COLOR_TEMP in supported_modes: + self._attr_color_temp = color_temp + if xy_color is not None and ColorMode.XY in supported_modes: + self._attr_xy_color = xy_color + if hs_color is not None and ColorMode.HS in supported_modes: + self._attr_hs_color = hs_color + # the effect is always deactivated in async_turn_on if not provided + if effect is None: + self._attr_effect = None + elif self._attr_effect_list and effect in self._attr_effect_list: + self._attr_effect = effect + + self.async_write_ha_state() + + +@STRICT_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, + aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, + manufacturers={"Philips", "Signify Netherlands B.V."}, +) +class HueLight(Light): + """Representation of a HUE light which does not report attributes.""" + + _REFRESH_INTERVAL = (3, 5) + + +@STRICT_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, + aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, + manufacturers={"Jasco", "Jasco Products", "Quotra-Vision", "eWeLight", "eWeLink"}, +) +class ForceOnLight(Light): + """Representation of a light which does not respect on/off for move_to_level_with_on_off commands.""" + + _FORCE_ON = True + + +@STRICT_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, + aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, + manufacturers=DEFAULT_MIN_TRANSITION_MANUFACTURERS, +) +class MinTransitionLight(Light): + """Representation of a light which does not react to any "move to" calls with 0 as a transition.""" + + # Transitions are counted in 1/10th of a second increments, so this is the smallest + _DEFAULT_MIN_TRANSITION_TIME = 0.1 + + +@GROUP_MATCH() +class LightGroup(BaseLight, ZhaGroupEntity): + """Representation of a light group.""" + + _attr_translation_key: str = "light_group" + + def __init__( + self, + entity_ids: list[str], + unique_id: str, + group_id: int, + zha_device: ZHADevice, + **kwargs: Any, + ) -> None: + """Initialize a light group.""" + super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) + group = self.zha_device.gateway.get_group(self._group_id) + + self._GROUP_SUPPORTS_EXECUTE_IF_OFF = True + + for member in group.members: + # Ensure we do not send group commands that violate the minimum transition + # time of any members. + if member.device.manufacturer in DEFAULT_MIN_TRANSITION_MANUFACTURERS: + self._DEFAULT_MIN_TRANSITION_TIME = ( + MinTransitionLight._DEFAULT_MIN_TRANSITION_TIME # noqa: SLF001 + ) + + # Check all group members to see if they support execute_if_off. + # If at least one member has a color cluster and doesn't support it, + # it's not used. + for endpoint in member.device._endpoints.values(): # noqa: SLF001 + for cluster_handler in endpoint.all_cluster_handlers.values(): + if ( + cluster_handler.name == CLUSTER_HANDLER_COLOR + and not cluster_handler.execute_if_off_supported + ): + self._GROUP_SUPPORTS_EXECUTE_IF_OFF = False + break + + self._on_off_cluster_handler = group.endpoint[OnOff.cluster_id] + self._level_cluster_handler = group.endpoint[LevelControl.cluster_id] + self._color_cluster_handler = group.endpoint[Color.cluster_id] + self._identify_cluster_handler = group.endpoint[Identify.cluster_id] + self._debounced_member_refresh: Debouncer | None = None + self._zha_config_transition = async_get_zha_config_value( + zha_device.gateway.config_entry, + ZHA_OPTIONS, + CONF_DEFAULT_LIGHT_TRANSITION, + 0, + ) + self._zha_config_enable_light_transitioning_flag = async_get_zha_config_value( + zha_device.gateway.config_entry, + ZHA_OPTIONS, + CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, + True, + ) + self._zha_config_always_prefer_xy_color_mode = async_get_zha_config_value( + zha_device.gateway.config_entry, + ZHA_OPTIONS, + CONF_ALWAYS_PREFER_XY_COLOR_MODE, + True, + ) + self._zha_config_group_members_assume_state = async_get_zha_config_value( + zha_device.gateway.config_entry, + ZHA_OPTIONS, + CONF_GROUP_MEMBERS_ASSUME_STATE, + True, + ) + if self._zha_config_group_members_assume_state: + self._update_group_from_child_delay = ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY + self._zha_config_enhanced_light_transition = False + + self._attr_color_mode = ColorMode.UNKNOWN + self._attr_supported_color_modes = {ColorMode.ONOFF} + + # remove this when all ZHA platforms and base entities are updated + @property + def available(self) -> bool: + """Return entity availability.""" + return self._attr_available + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + if self._debounced_member_refresh is None: + force_refresh_debouncer = Debouncer( + self.hass, + _LOGGER, + cooldown=3, + immediate=True, + function=self._force_member_updates, + ) + self._debounced_member_refresh = force_refresh_debouncer + self.async_on_remove(force_refresh_debouncer.async_cancel) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + # "off with transition" and "off brightness" will get overridden when + # turning on the group, but they are needed for setting the assumed + # member state correctly, so save them here + off_brightness = self._off_brightness if self._off_with_transition else None + await super().async_turn_on(**kwargs) + if self._zha_config_group_members_assume_state: + self._send_member_assume_state_event(True, kwargs, off_brightness) + if self.is_transitioning: # when transitioning, state is refreshed at the end + return + if self._debounced_member_refresh: + await self._debounced_member_refresh.async_call() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + await super().async_turn_off(**kwargs) + if self._zha_config_group_members_assume_state: + self._send_member_assume_state_event(False, kwargs) + if self.is_transitioning: + return + if self._debounced_member_refresh: + await self._debounced_member_refresh.async_call() + + async def async_update(self) -> None: + """Query all members and determine the light group state.""" + self.debug("updating group state") + all_states = [self.hass.states.get(x) for x in self._entity_ids] + states: list[State] = list(filter(None, all_states)) + on_states = [state for state in states if state.state == STATE_ON] + + self._attr_state = len(on_states) > 0 + + # reset "off with transition" flag if any member is on + if self._attr_state: + self._off_with_transition = False + self._off_brightness = None + + self._attr_available = any(state.state != STATE_UNAVAILABLE for state in states) + + self._attr_brightness = helpers.reduce_attribute( + on_states, light.ATTR_BRIGHTNESS + ) + + self._attr_xy_color = helpers.reduce_attribute( + on_states, light.ATTR_XY_COLOR, reduce=helpers.mean_tuple + ) + + if not self._zha_config_always_prefer_xy_color_mode: + self._attr_hs_color = helpers.reduce_attribute( + on_states, light.ATTR_HS_COLOR, reduce=helpers.mean_tuple + ) + + self._attr_color_temp = helpers.reduce_attribute( + on_states, light.ATTR_COLOR_TEMP + ) + self._attr_min_mireds = helpers.reduce_attribute( + states, light.ATTR_MIN_MIREDS, default=153, reduce=min + ) + self._attr_max_mireds = helpers.reduce_attribute( + states, light.ATTR_MAX_MIREDS, default=500, reduce=max + ) + + self._attr_effect_list = None + all_effect_lists = list( + helpers.find_state_attributes(states, light.ATTR_EFFECT_LIST) + ) + if all_effect_lists: + # Merge all effects from all effect_lists with a union merge. + self._attr_effect_list = list(set().union(*all_effect_lists)) + + self._attr_effect = None + all_effects = list(helpers.find_state_attributes(on_states, light.ATTR_EFFECT)) + if all_effects: + # Report the most common effect. + effects_count = Counter(itertools.chain(all_effects)) + self._attr_effect = effects_count.most_common(1)[0][0] + + supported_color_modes = {ColorMode.ONOFF} + all_supported_color_modes: list[set[ColorMode]] = list( + helpers.find_state_attributes(states, light.ATTR_SUPPORTED_COLOR_MODES) + ) + if all_supported_color_modes: + # Merge all color modes. + supported_color_modes = filter_supported_color_modes( + set().union(*all_supported_color_modes) + ) + + self._attr_supported_color_modes = supported_color_modes + + self._attr_color_mode = ColorMode.UNKNOWN + all_color_modes = list( + helpers.find_state_attributes(on_states, light.ATTR_COLOR_MODE) + ) + if all_color_modes: + # Report the most common color mode, select brightness and onoff last + color_mode_count = Counter(itertools.chain(all_color_modes)) + if ColorMode.ONOFF in color_mode_count: + if ColorMode.ONOFF in supported_color_modes: + color_mode_count[ColorMode.ONOFF] = -1 + else: + color_mode_count.pop(ColorMode.ONOFF) + if ColorMode.BRIGHTNESS in color_mode_count: + if ColorMode.BRIGHTNESS in supported_color_modes: + color_mode_count[ColorMode.BRIGHTNESS] = 0 + else: + color_mode_count.pop(ColorMode.BRIGHTNESS) + if color_mode_count: + self._attr_color_mode = color_mode_count.most_common(1)[0][0] + else: + self._attr_color_mode = next(iter(supported_color_modes)) + + if self._attr_color_mode == ColorMode.HS and ( + color_mode_count[ColorMode.HS] != len(self._group.members) + or self._zha_config_always_prefer_xy_color_mode + ): # switch to XY if all members do not support HS + self._attr_color_mode = ColorMode.XY + + self._attr_supported_features = LightEntityFeature(0) + for support in helpers.find_state_attributes(states, ATTR_SUPPORTED_FEATURES): + # Merge supported features by emulating support for every feature + # we find. + self._attr_supported_features |= support + # Bitwise-and the supported features with the GroupedLight's features + # so that we don't break in the future when a new feature is added. + self._attr_supported_features &= SUPPORT_GROUP_LIGHT + + async def _force_member_updates(self) -> None: + """Force the update of member entities to ensure the states are correct for bulbs that don't report their state.""" + async_dispatcher_send( + self.hass, + SIGNAL_LIGHT_GROUP_STATE_CHANGED, + {"entity_ids": self._entity_ids}, + ) + + def _send_member_assume_state_event( + self, state, service_kwargs, off_brightness=None + ) -> None: + """Send an assume event to all members of the group.""" + update_params = { + "state": state, + "off_with_transition": self._off_with_transition, + } + + # check if the parameters were actually updated + # in the service call before updating members + if light.ATTR_BRIGHTNESS in service_kwargs: # or off brightness + update_params[light.ATTR_BRIGHTNESS] = self._attr_brightness + elif off_brightness is not None: + # if we turn on the group light with "off brightness", + # pass that to the members + update_params[light.ATTR_BRIGHTNESS] = off_brightness + + if light.ATTR_COLOR_TEMP in service_kwargs: + update_params[light.ATTR_COLOR_MODE] = self._attr_color_mode + update_params[light.ATTR_COLOR_TEMP] = self._attr_color_temp + + if light.ATTR_XY_COLOR in service_kwargs: + update_params[light.ATTR_COLOR_MODE] = self._attr_color_mode + update_params[light.ATTR_XY_COLOR] = self._attr_xy_color + + if light.ATTR_HS_COLOR in service_kwargs: + update_params[light.ATTR_COLOR_MODE] = self._attr_color_mode + update_params[light.ATTR_HS_COLOR] = self._attr_hs_color + + if light.ATTR_EFFECT in service_kwargs: + update_params[light.ATTR_EFFECT] = self._attr_effect + + async_dispatcher_send( + self.hass, + SIGNAL_LIGHT_GROUP_ASSUME_GROUP_STATE, + {"entity_ids": self._entity_ids}, + update_params, ) diff --git a/homeassistant/components/zha/lock.py b/homeassistant/components/zha/lock.py index ebac03eb7b8..fa719075c05 100644 --- a/homeassistant/components/zha/lock.py +++ b/homeassistant/components/zha/lock.py @@ -4,25 +4,35 @@ import functools from typing import Any import voluptuous as vol +from zigpy.zcl.foundation import Status -from homeassistant.components.lock import LockEntity +from homeassistant.components.lock import STATE_LOCKED, STATE_UNLOCKED, LockEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, State, callback +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, async_get_current_platform, ) +from homeassistant.helpers.typing import StateType -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.const import ( + CLUSTER_HANDLER_DOORLOCK, SIGNAL_ADD_ENTITIES, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - get_zha_data, + SIGNAL_ATTR_UPDATED, ) +from .core.helpers import get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity + +# The first state is Zigbee 'Not fully locked' +STATE_LIST = [STATE_UNLOCKED, STATE_LOCKED, STATE_UNLOCKED] +MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.LOCK) + +VALUE_TO_STATE = dict(enumerate(STATE_LIST)) SERVICE_SET_LOCK_USER_CODE = "set_lock_user_code" SERVICE_ENABLE_LOCK_USER_CODE = "enable_lock_user_code" @@ -43,7 +53,7 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, ZhaDoorLock, entities_to_create + discovery.async_add_entities, async_add_entities, entities_to_create ), ) config_entry.async_on_unload(unsub) @@ -84,57 +94,105 @@ async def async_setup_entry( ) -class ZhaDoorLock(ZHAEntity, LockEntity): +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_DOORLOCK) +class ZhaDoorLock(ZhaEntity, LockEntity): """Representation of a ZHA lock.""" _attr_translation_key: str = "door_lock" + def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): + """Init this sensor.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._doorlock_cluster_handler = self.cluster_handlers.get( + CLUSTER_HANDLER_DOORLOCK + ) + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._doorlock_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state + ) + + @callback + def async_restore_last_state(self, last_state): + """Restore previous state.""" + self._state = VALUE_TO_STATE.get(last_state.state, last_state.state) + @property def is_locked(self) -> bool: """Return true if entity is locked.""" - return self.entity_data.entity.is_locked + if self._state is None: + return False + return self._state == STATE_LOCKED + + @property + def extra_state_attributes(self) -> dict[str, StateType]: + """Return state attributes.""" + return self.state_attributes - @convert_zha_error_to_ha_error async def async_lock(self, **kwargs: Any) -> None: """Lock the lock.""" - await self.entity_data.entity.async_lock() + result = await self._doorlock_cluster_handler.lock_door() + if result[0] is not Status.SUCCESS: + self.error("Error with lock_door: %s", result) + return self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" - await self.entity_data.entity.async_unlock() + result = await self._doorlock_cluster_handler.unlock_door() + if result[0] is not Status.SUCCESS: + self.error("Error with unlock_door: %s", result) + return self.async_write_ha_state() - @convert_zha_error_to_ha_error - async def async_set_lock_user_code(self, code_slot: int, user_code: str) -> None: - """Set the user_code to index X on the lock.""" - await self.entity_data.entity.async_set_lock_user_code( - code_slot=code_slot, user_code=user_code - ) - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_enable_lock_user_code(self, code_slot: int) -> None: - """Enable user_code at index X on the lock.""" - await self.entity_data.entity.async_enable_lock_user_code(code_slot=code_slot) - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_disable_lock_user_code(self, code_slot: int) -> None: - """Disable user_code at index X on the lock.""" - await self.entity_data.entity.async_disable_lock_user_code(code_slot=code_slot) - self.async_write_ha_state() - - @convert_zha_error_to_ha_error - async def async_clear_lock_user_code(self, code_slot: int) -> None: - """Clear the user_code at index X on the lock.""" - await self.entity_data.entity.async_clear_lock_user_code(code_slot=code_slot) - self.async_write_ha_state() + async def async_update(self) -> None: + """Attempt to retrieve state from the lock.""" + await super().async_update() + await self.async_get_state() @callback - def restore_external_state_attributes(self, state: State) -> None: - """Restore entity state.""" - self.entity_data.entity.restore_external_state_attributes( - state=state.state, - ) + def async_set_state(self, attr_id, attr_name, value): + """Handle state update from cluster handler.""" + self._state = VALUE_TO_STATE.get(value, self._state) + self.async_write_ha_state() + + async def async_get_state(self, from_cache=True): + """Attempt to retrieve state from the lock.""" + if self._doorlock_cluster_handler: + state = await self._doorlock_cluster_handler.get_attribute_value( + "lock_state", from_cache=from_cache + ) + if state is not None: + self._state = VALUE_TO_STATE.get(state, self._state) + + async def refresh(self, time): + """Call async_get_state at an interval.""" + await self.async_get_state(from_cache=False) + + async def async_set_lock_user_code(self, code_slot: int, user_code: str) -> None: + """Set the user_code to index X on the lock.""" + if self._doorlock_cluster_handler: + await self._doorlock_cluster_handler.async_set_user_code( + code_slot, user_code + ) + self.debug("User code at slot %s set", code_slot) + + async def async_enable_lock_user_code(self, code_slot: int) -> None: + """Enable user_code at index X on the lock.""" + if self._doorlock_cluster_handler: + await self._doorlock_cluster_handler.async_enable_user_code(code_slot) + self.debug("User code at slot %s enabled", code_slot) + + async def async_disable_lock_user_code(self, code_slot: int) -> None: + """Disable user_code at index X on the lock.""" + if self._doorlock_cluster_handler: + await self._doorlock_cluster_handler.async_disable_user_code(code_slot) + self.debug("User code at slot %s disabled", code_slot) + + async def async_clear_lock_user_code(self, code_slot: int) -> None: + """Clear the user_code at index X on the lock.""" + if self._doorlock_cluster_handler: + await self._doorlock_cluster_handler.async_clear_user_code(code_slot) + self.debug("User code at slot %s cleared", code_slot) diff --git a/homeassistant/components/zha/logbook.py b/homeassistant/components/zha/logbook.py index 3de81e1255d..e63ef565824 100644 --- a/homeassistant/components/zha/logbook.py +++ b/homeassistant/components/zha/logbook.py @@ -5,18 +5,16 @@ from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING -from zha.application.const import ZHA_EVENT - from homeassistant.components.logbook import LOGBOOK_ENTRY_MESSAGE, LOGBOOK_ENTRY_NAME from homeassistant.const import ATTR_COMMAND, ATTR_DEVICE_ID from homeassistant.core import Event, HomeAssistant, callback import homeassistant.helpers.device_registry as dr -from .const import DOMAIN as ZHA_DOMAIN -from .helpers import async_get_zha_device_proxy +from .core.const import DOMAIN as ZHA_DOMAIN, ZHA_EVENT +from .core.helpers import async_get_zha_device if TYPE_CHECKING: - from zha.zigbee.device import Device + from .core.device import ZHADevice @callback @@ -32,7 +30,7 @@ def async_describe_events( """Describe ZHA logbook event.""" device: dr.DeviceEntry | None = None device_name: str = "Unknown device" - zha_device: Device | None = None + zha_device: ZHADevice | None = None event_data = event.data event_type: str | None = None event_subtype: str | None = None @@ -41,9 +39,7 @@ def async_describe_events( device = device_registry.devices[event.data[ATTR_DEVICE_ID]] if device: device_name = device.name_by_user or device.name or "Unknown device" - zha_device = async_get_zha_device_proxy( - hass, event.data[ATTR_DEVICE_ID] - ).device + zha_device = async_get_zha_device(hass, event.data[ATTR_DEVICE_ID]) except (KeyError, AttributeError): pass diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 96c9bc030f6..7087ff0b2f0 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -1,7 +1,7 @@ { "domain": "zha", "name": "Zigbee Home Automation", - "after_dependencies": ["hassio", "onboarding", "usb"], + "after_dependencies": ["onboarding", "usb"], "codeowners": ["@dmulcahey", "@adminiuga", "@puddly", "@TheJulianJES"], "config_flow": true, "dependencies": ["file_upload"], @@ -18,10 +18,20 @@ "zigpy_xbee", "zigpy_zigate", "zigpy_znp", - "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.24", "zha==0.0.37"], + "requirements": [ + "bellows==0.39.1", + "pyserial==3.5", + "zha-quirks==0.0.117", + "zigpy-deconz==0.23.2", + "zigpy==0.64.1", + "zigpy-xbee==0.20.1", + "zigpy-zigate==0.12.1", + "zigpy-znp==0.12.2", + "universal-silabs-flasher==0.0.20", + "pyserial-asyncio-fast==0.11" + ], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/number.py b/homeassistant/components/zha/number.py index 263f5262994..9320b4494a4 100644 --- a/homeassistant/components/zha/number.py +++ b/homeassistant/components/zha/number.py @@ -4,25 +4,267 @@ from __future__ import annotations import functools import logging +from typing import TYPE_CHECKING, Any, Self -from homeassistant.components.number import RestoreNumber +from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT +from zigpy.quirks.v2 import NumberMetadata +from zigpy.zcl.clusters.hvac import Thermostat + +from homeassistant.components.number import NumberDeviceClass, NumberEntity, NumberMode from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import ( + EntityCategory, + Platform, + UnitOfMass, + UnitOfTemperature, + UnitOfTime, +) +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UndefinedType -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.const import ( + CLUSTER_HANDLER_ANALOG_OUTPUT, + CLUSTER_HANDLER_BASIC, + CLUSTER_HANDLER_COLOR, + CLUSTER_HANDLER_INOVELLI, + CLUSTER_HANDLER_LEVEL, + CLUSTER_HANDLER_OCCUPANCY, + CLUSTER_HANDLER_THERMOSTAT, + ENTITY_METADATA, SIGNAL_ADD_ENTITIES, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - get_zha_data, + SIGNAL_ATTR_UPDATED, ) +from .core.helpers import get_zha_data, validate_device_class, validate_unit +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity + +if TYPE_CHECKING: + from .core.cluster_handlers import ClusterHandler + from .core.device import ZHADevice _LOGGER = logging.getLogger(__name__) +STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.NUMBER) +CONFIG_DIAGNOSTIC_MATCH = functools.partial( + ZHA_ENTITIES.config_diagnostic_match, Platform.NUMBER +) + + +UNITS = { + 0: "Square-meters", + 1: "Square-feet", + 2: "Milliamperes", + 3: "Amperes", + 4: "Ohms", + 5: "Volts", + 6: "Kilo-volts", + 7: "Mega-volts", + 8: "Volt-amperes", + 9: "Kilo-volt-amperes", + 10: "Mega-volt-amperes", + 11: "Volt-amperes-reactive", + 12: "Kilo-volt-amperes-reactive", + 13: "Mega-volt-amperes-reactive", + 14: "Degrees-phase", + 15: "Power-factor", + 16: "Joules", + 17: "Kilojoules", + 18: "Watt-hours", + 19: "Kilowatt-hours", + 20: "BTUs", + 21: "Therms", + 22: "Ton-hours", + 23: "Joules-per-kilogram-dry-air", + 24: "BTUs-per-pound-dry-air", + 25: "Cycles-per-hour", + 26: "Cycles-per-minute", + 27: "Hertz", + 28: "Grams-of-water-per-kilogram-dry-air", + 29: "Percent-relative-humidity", + 30: "Millimeters", + 31: "Meters", + 32: "Inches", + 33: "Feet", + 34: "Watts-per-square-foot", + 35: "Watts-per-square-meter", + 36: "Lumens", + 37: "Luxes", + 38: "Foot-candles", + 39: "Kilograms", + 40: "Pounds-mass", + 41: "Tons", + 42: "Kilograms-per-second", + 43: "Kilograms-per-minute", + 44: "Kilograms-per-hour", + 45: "Pounds-mass-per-minute", + 46: "Pounds-mass-per-hour", + 47: "Watts", + 48: "Kilowatts", + 49: "Megawatts", + 50: "BTUs-per-hour", + 51: "Horsepower", + 52: "Tons-refrigeration", + 53: "Pascals", + 54: "Kilopascals", + 55: "Bars", + 56: "Pounds-force-per-square-inch", + 57: "Centimeters-of-water", + 58: "Inches-of-water", + 59: "Millimeters-of-mercury", + 60: "Centimeters-of-mercury", + 61: "Inches-of-mercury", + 62: "°C", + 63: "°K", + 64: "°F", + 65: "Degree-days-Celsius", + 66: "Degree-days-Fahrenheit", + 67: "Years", + 68: "Months", + 69: "Weeks", + 70: "Days", + 71: "Hours", + 72: "Minutes", + 73: "Seconds", + 74: "Meters-per-second", + 75: "Kilometers-per-hour", + 76: "Feet-per-second", + 77: "Feet-per-minute", + 78: "Miles-per-hour", + 79: "Cubic-feet", + 80: "Cubic-meters", + 81: "Imperial-gallons", + 82: "Liters", + 83: "Us-gallons", + 84: "Cubic-feet-per-minute", + 85: "Cubic-meters-per-second", + 86: "Imperial-gallons-per-minute", + 87: "Liters-per-second", + 88: "Liters-per-minute", + 89: "Us-gallons-per-minute", + 90: "Degrees-angular", + 91: "Degrees-Celsius-per-hour", + 92: "Degrees-Celsius-per-minute", + 93: "Degrees-Fahrenheit-per-hour", + 94: "Degrees-Fahrenheit-per-minute", + 95: None, + 96: "Parts-per-million", + 97: "Parts-per-billion", + 98: "%", + 99: "Percent-per-second", + 100: "Per-minute", + 101: "Per-second", + 102: "Psi-per-Degree-Fahrenheit", + 103: "Radians", + 104: "Revolutions-per-minute", + 105: "Currency1", + 106: "Currency2", + 107: "Currency3", + 108: "Currency4", + 109: "Currency5", + 110: "Currency6", + 111: "Currency7", + 112: "Currency8", + 113: "Currency9", + 114: "Currency10", + 115: "Square-inches", + 116: "Square-centimeters", + 117: "BTUs-per-pound", + 118: "Centimeters", + 119: "Pounds-mass-per-second", + 120: "Delta-Degrees-Fahrenheit", + 121: "Delta-Degrees-Kelvin", + 122: "Kilohms", + 123: "Megohms", + 124: "Millivolts", + 125: "Kilojoules-per-kilogram", + 126: "Megajoules", + 127: "Joules-per-degree-Kelvin", + 128: "Joules-per-kilogram-degree-Kelvin", + 129: "Kilohertz", + 130: "Megahertz", + 131: "Per-hour", + 132: "Milliwatts", + 133: "Hectopascals", + 134: "Millibars", + 135: "Cubic-meters-per-hour", + 136: "Liters-per-hour", + 137: "Kilowatt-hours-per-square-meter", + 138: "Kilowatt-hours-per-square-foot", + 139: "Megajoules-per-square-meter", + 140: "Megajoules-per-square-foot", + 141: "Watts-per-square-meter-Degree-Kelvin", + 142: "Cubic-feet-per-second", + 143: "Percent-obscuration-per-foot", + 144: "Percent-obscuration-per-meter", + 145: "Milliohms", + 146: "Megawatt-hours", + 147: "Kilo-BTUs", + 148: "Mega-BTUs", + 149: "Kilojoules-per-kilogram-dry-air", + 150: "Megajoules-per-kilogram-dry-air", + 151: "Kilojoules-per-degree-Kelvin", + 152: "Megajoules-per-degree-Kelvin", + 153: "Newton", + 154: "Grams-per-second", + 155: "Grams-per-minute", + 156: "Tons-per-hour", + 157: "Kilo-BTUs-per-hour", + 158: "Hundredths-seconds", + 159: "Milliseconds", + 160: "Newton-meters", + 161: "Millimeters-per-second", + 162: "Millimeters-per-minute", + 163: "Meters-per-minute", + 164: "Meters-per-hour", + 165: "Cubic-meters-per-minute", + 166: "Meters-per-second-per-second", + 167: "Amperes-per-meter", + 168: "Amperes-per-square-meter", + 169: "Ampere-square-meters", + 170: "Farads", + 171: "Henrys", + 172: "Ohm-meters", + 173: "Siemens", + 174: "Siemens-per-meter", + 175: "Teslas", + 176: "Volts-per-degree-Kelvin", + 177: "Volts-per-meter", + 178: "Webers", + 179: "Candelas", + 180: "Candelas-per-square-meter", + 181: "Kelvins-per-hour", + 182: "Kelvins-per-minute", + 183: "Joule-seconds", + 185: "Square-meters-per-Newton", + 186: "Kilogram-per-cubic-meter", + 187: "Newton-seconds", + 188: "Newtons-per-meter", + 189: "Watts-per-meter-per-degree-Kelvin", +} + +ICONS = { + 0: "mdi:temperature-celsius", + 1: "mdi:water-percent", + 2: "mdi:gauge", + 3: "mdi:speedometer", + 4: "mdi:percent", + 5: "mdi:air-filter", + 6: "mdi:fan", + 7: "mdi:flash", + 8: "mdi:current-ac", + 9: "mdi:flash", + 10: "mdi:flash", + 11: "mdi:flash", + 12: "mdi:counter", + 13: "mdi:thermometer-lines", + 14: "mdi:timer", + 15: "mdi:palette", + 16: "mdi:brightness-percent", +} + async def async_setup_entry( hass: HomeAssistant, @@ -37,53 +279,875 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, ZhaNumber, entities_to_create + discovery.async_add_entities, + async_add_entities, + entities_to_create, ), ) config_entry.async_on_unload(unsub) -class ZhaNumber(ZHAEntity, RestoreNumber): +@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ANALOG_OUTPUT) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ZhaNumber(ZhaEntity, NumberEntity): """Representation of a ZHA Number entity.""" - @property - def name(self) -> str | UndefinedType | None: - """Return the name of the number entity.""" - if (description := self.entity_data.entity.description) is None: - return super().name + _attr_translation_key: str = "number" - # The name of this entity is reported by the device itself. - # For backwards compatibility, we keep the same format as before. This - # should probably be changed in the future to omit the prefix. - return f"{super().name} {description}" + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this entity.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._analog_output_cluster_handler = self.cluster_handlers[ + CLUSTER_HANDLER_ANALOG_OUTPUT + ] + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._analog_output_cluster_handler, + SIGNAL_ATTR_UPDATED, + self.async_set_state, + ) @property def native_value(self) -> float | None: """Return the current value.""" - return self.entity_data.entity.native_value + return self._analog_output_cluster_handler.present_value @property def native_min_value(self) -> float: """Return the minimum value.""" - return self.entity_data.entity.native_min_value + min_present_value = self._analog_output_cluster_handler.min_present_value + if min_present_value is not None: + return min_present_value + return 0 @property def native_max_value(self) -> float: """Return the maximum value.""" - return self.entity_data.entity.native_max_value + max_present_value = self._analog_output_cluster_handler.max_present_value + if max_present_value is not None: + return max_present_value + return 1023 @property def native_step(self) -> float | None: """Return the value step.""" - return self.entity_data.entity.native_step + resolution = self._analog_output_cluster_handler.resolution + if resolution is not None: + return resolution + return super().native_step + + @property + def name(self) -> str | UndefinedType | None: + """Return the name of the number entity.""" + description = self._analog_output_cluster_handler.description + if description is not None and len(description) > 0: + return f"{super().name} {description}" + return super().name + + @property + def icon(self) -> str | None: + """Return the icon to be used for this entity.""" + application_type = self._analog_output_cluster_handler.application_type + if application_type is not None: + return ICONS.get(application_type >> 16, super().icon) + return super().icon @property def native_unit_of_measurement(self) -> str | None: """Return the unit the value is expressed in.""" - return self.entity_data.entity.native_unit_of_measurement + engineering_units = self._analog_output_cluster_handler.engineering_units + return UNITS.get(engineering_units) + + @callback + def async_set_state(self, attr_id, attr_name, value): + """Handle value update from cluster handler.""" + self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_set_native_value(self, value: float) -> None: """Update the current value from HA.""" - await self.entity_data.entity.async_set_native_value(value=value) + await self._analog_output_cluster_handler.async_set_present_value(float(value)) self.async_write_ha_state() + + async def async_update(self) -> None: + """Attempt to retrieve the state of the entity.""" + await super().async_update() + _LOGGER.debug("polling current state") + if self._analog_output_cluster_handler: + value = await self._analog_output_cluster_handler.get_attribute_value( + "present_value", from_cache=False + ) + _LOGGER.debug("read value=%s", value) + + +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ZHANumberConfigurationEntity(ZhaEntity, NumberEntity): + """Representation of a ZHA number configuration entity.""" + + _attr_entity_category = EntityCategory.CONFIG + _attr_native_step: float = 1.0 + _attr_multiplier: float = 1 + _attribute_name: str + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + Return entity if it is a supported configuration, otherwise return None + """ + cluster_handler = cluster_handlers[0] + if ENTITY_METADATA not in kwargs and ( + cls._attribute_name in cluster_handler.cluster.unsupported_attributes + or cls._attribute_name not in cluster_handler.cluster.attributes_by_name + or cluster_handler.cluster.get(cls._attribute_name) is None + ): + _LOGGER.debug( + "%s is not supported - skipping %s entity creation", + cls._attribute_name, + cls.__name__, + ) + return None + + return cls(unique_id, zha_device, cluster_handlers, **kwargs) + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this number configuration entity.""" + self._cluster_handler: ClusterHandler = cluster_handlers[0] + if ENTITY_METADATA in kwargs: + self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + + def _init_from_quirks_metadata(self, entity_metadata: NumberMetadata) -> None: + """Init this entity from the quirks metadata.""" + super()._init_from_quirks_metadata(entity_metadata) + self._attribute_name = entity_metadata.attribute_name + + if entity_metadata.min is not None: + self._attr_native_min_value = entity_metadata.min + if entity_metadata.max is not None: + self._attr_native_max_value = entity_metadata.max + if entity_metadata.step is not None: + self._attr_native_step = entity_metadata.step + if entity_metadata.multiplier is not None: + self._attr_multiplier = entity_metadata.multiplier + if entity_metadata.device_class is not None: + self._attr_device_class = validate_device_class( + NumberDeviceClass, + entity_metadata.device_class, + Platform.NUMBER.value, + _LOGGER, + ) + if entity_metadata.device_class is None and entity_metadata.unit is not None: + self._attr_native_unit_of_measurement = validate_unit( + entity_metadata.unit + ).value + + @property + def native_value(self) -> float: + """Return the current value.""" + return ( + self._cluster_handler.cluster.get(self._attribute_name) + * self._attr_multiplier + ) + + async def async_set_native_value(self, value: float) -> None: + """Update the current value from HA.""" + await self._cluster_handler.write_attributes_safe( + {self._attribute_name: int(value / self._attr_multiplier)} + ) + self.async_write_ha_state() + + async def async_update(self) -> None: + """Attempt to retrieve the state of the entity.""" + await super().async_update() + _LOGGER.debug("polling current state") + if self._cluster_handler: + value = await self._cluster_handler.get_attribute_value( + self._attribute_name, from_cache=False + ) + _LOGGER.debug("read value=%s", value) + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", + models={"lumi.motion.ac02", "lumi.motion.agl04"}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraMotionDetectionInterval(ZHANumberConfigurationEntity): + """Representation of a ZHA motion detection interval configuration entity.""" + + _unique_id_suffix = "detection_interval" + _attr_native_min_value: float = 2 + _attr_native_max_value: float = 65535 + _attribute_name = "detection_interval" + _attr_translation_key: str = "detection_interval" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class OnOffTransitionTimeConfigurationEntity(ZHANumberConfigurationEntity): + """Representation of a ZHA on off transition time configuration entity.""" + + _unique_id_suffix = "on_off_transition_time" + _attr_native_min_value: float = 0x0000 + _attr_native_max_value: float = 0xFFFF + _attribute_name = "on_off_transition_time" + _attr_translation_key: str = "on_off_transition_time" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class OnLevelConfigurationEntity(ZHANumberConfigurationEntity): + """Representation of a ZHA on level configuration entity.""" + + _unique_id_suffix = "on_level" + _attr_native_min_value: float = 0x00 + _attr_native_max_value: float = 0xFF + _attribute_name = "on_level" + _attr_translation_key: str = "on_level" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class OnTransitionTimeConfigurationEntity(ZHANumberConfigurationEntity): + """Representation of a ZHA on transition time configuration entity.""" + + _unique_id_suffix = "on_transition_time" + _attr_native_min_value: float = 0x0000 + _attr_native_max_value: float = 0xFFFE + _attribute_name = "on_transition_time" + _attr_translation_key: str = "on_transition_time" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class OffTransitionTimeConfigurationEntity(ZHANumberConfigurationEntity): + """Representation of a ZHA off transition time configuration entity.""" + + _unique_id_suffix = "off_transition_time" + _attr_native_min_value: float = 0x0000 + _attr_native_max_value: float = 0xFFFE + _attribute_name = "off_transition_time" + _attr_translation_key: str = "off_transition_time" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DefaultMoveRateConfigurationEntity(ZHANumberConfigurationEntity): + """Representation of a ZHA default move rate configuration entity.""" + + _unique_id_suffix = "default_move_rate" + _attr_native_min_value: float = 0x00 + _attr_native_max_value: float = 0xFE + _attribute_name = "default_move_rate" + _attr_translation_key: str = "default_move_rate" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class StartUpCurrentLevelConfigurationEntity(ZHANumberConfigurationEntity): + """Representation of a ZHA startup current level configuration entity.""" + + _unique_id_suffix = "start_up_current_level" + _attr_native_min_value: float = 0x00 + _attr_native_max_value: float = 0xFF + _attribute_name = "start_up_current_level" + _attr_translation_key: str = "start_up_current_level" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_COLOR) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class StartUpColorTemperatureConfigurationEntity(ZHANumberConfigurationEntity): + """Representation of a ZHA startup color temperature configuration entity.""" + + _unique_id_suffix = "start_up_color_temperature" + _attr_native_min_value: float = 153 + _attr_native_max_value: float = 500 + _attribute_name = "start_up_color_temperature" + _attr_translation_key: str = "start_up_color_temperature" + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this ZHA startup color temperature entity.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + if self._cluster_handler: + self._attr_native_min_value: float = self._cluster_handler.min_mireds + self._attr_native_max_value: float = self._cluster_handler.max_mireds + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="tuya_manufacturer", + manufacturers={ + "_TZE200_htnnfasr", + }, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class TimerDurationMinutes(ZHANumberConfigurationEntity): + """Representation of a ZHA timer duration configuration entity.""" + + _unique_id_suffix = "timer_duration" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0x00 + _attr_native_max_value: float = 0x257 + _attr_native_unit_of_measurement: str | None = UNITS[72] + _attribute_name = "timer_duration" + _attr_translation_key: str = "timer_duration" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names="ikea_airpurifier") +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class FilterLifeTime(ZHANumberConfigurationEntity): + """Representation of a ZHA filter lifetime configuration entity.""" + + _unique_id_suffix = "filter_life_time" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0x00 + _attr_native_max_value: float = 0xFFFFFFFF + _attr_native_unit_of_measurement: str | None = UNITS[72] + _attribute_name = "filter_life_time" + _attr_translation_key: str = "filter_life_time" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_BASIC, + manufacturers={"TexasInstruments"}, + models={"ti.router"}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class TiRouterTransmitPower(ZHANumberConfigurationEntity): + """Representation of a ZHA TI transmit power configuration entity.""" + + _unique_id_suffix = "transmit_power" + _attr_native_min_value: float = -20 + _attr_native_max_value: float = 20 + _attribute_name = "transmit_power" + _attr_translation_key: str = "transmit_power" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliRemoteDimmingUpSpeed(ZHANumberConfigurationEntity): + """Inovelli remote dimming up speed configuration entity.""" + + _unique_id_suffix = "dimming_speed_up_remote" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 126 + _attribute_name = "dimming_speed_up_remote" + _attr_translation_key: str = "dimming_speed_up_remote" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliButtonDelay(ZHANumberConfigurationEntity): + """Inovelli button delay configuration entity.""" + + _unique_id_suffix = "button_delay" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 9 + _attribute_name = "button_delay" + _attr_translation_key: str = "button_delay" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliLocalDimmingUpSpeed(ZHANumberConfigurationEntity): + """Inovelli local dimming up speed configuration entity.""" + + _unique_id_suffix = "dimming_speed_up_local" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 127 + _attribute_name = "dimming_speed_up_local" + _attr_translation_key: str = "dimming_speed_up_local" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliLocalRampRateOffToOn(ZHANumberConfigurationEntity): + """Inovelli off to on local ramp rate configuration entity.""" + + _unique_id_suffix = "ramp_rate_off_to_on_local" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 127 + _attribute_name = "ramp_rate_off_to_on_local" + _attr_translation_key: str = "ramp_rate_off_to_on_local" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliRemoteDimmingSpeedOffToOn(ZHANumberConfigurationEntity): + """Inovelli off to on remote ramp rate configuration entity.""" + + _unique_id_suffix = "ramp_rate_off_to_on_remote" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 127 + _attribute_name = "ramp_rate_off_to_on_remote" + _attr_translation_key: str = "ramp_rate_off_to_on_remote" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliRemoteDimmingDownSpeed(ZHANumberConfigurationEntity): + """Inovelli remote dimming down speed configuration entity.""" + + _unique_id_suffix = "dimming_speed_down_remote" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 127 + _attribute_name = "dimming_speed_down_remote" + _attr_translation_key: str = "dimming_speed_down_remote" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliLocalDimmingDownSpeed(ZHANumberConfigurationEntity): + """Inovelli local dimming down speed configuration entity.""" + + _unique_id_suffix = "dimming_speed_down_local" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 127 + _attribute_name = "dimming_speed_down_local" + _attr_translation_key: str = "dimming_speed_down_local" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliLocalRampRateOnToOff(ZHANumberConfigurationEntity): + """Inovelli local on to off ramp rate configuration entity.""" + + _unique_id_suffix = "ramp_rate_on_to_off_local" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 127 + _attribute_name = "ramp_rate_on_to_off_local" + _attr_translation_key: str = "ramp_rate_on_to_off_local" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliRemoteDimmingSpeedOnToOff(ZHANumberConfigurationEntity): + """Inovelli remote on to off ramp rate configuration entity.""" + + _unique_id_suffix = "ramp_rate_on_to_off_remote" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 127 + _attribute_name = "ramp_rate_on_to_off_remote" + _attr_translation_key: str = "ramp_rate_on_to_off_remote" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliMinimumLoadDimmingLevel(ZHANumberConfigurationEntity): + """Inovelli minimum load dimming level configuration entity.""" + + _unique_id_suffix = "minimum_level" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 1 + _attr_native_max_value: float = 254 + _attribute_name = "minimum_level" + _attr_translation_key: str = "minimum_level" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliMaximumLoadDimmingLevel(ZHANumberConfigurationEntity): + """Inovelli maximum load dimming level configuration entity.""" + + _unique_id_suffix = "maximum_level" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 2 + _attr_native_max_value: float = 255 + _attribute_name = "maximum_level" + _attr_translation_key: str = "maximum_level" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliAutoShutoffTimer(ZHANumberConfigurationEntity): + """Inovelli automatic switch shutoff timer configuration entity.""" + + _unique_id_suffix = "auto_off_timer" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 32767 + _attribute_name = "auto_off_timer" + _attr_translation_key: str = "auto_off_timer" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliQuickStartTime(ZHANumberConfigurationEntity): + """Inovelli fan quick start time configuration entity.""" + + _unique_id_suffix = "quick_start_time" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 10 + _attribute_name = "quick_start_time" + _attr_translation_key: str = "quick_start_time" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliLoadLevelIndicatorTimeout(ZHANumberConfigurationEntity): + """Inovelli load level indicator timeout configuration entity.""" + + _unique_id_suffix = "load_level_indicator_timeout" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 11 + _attribute_name = "load_level_indicator_timeout" + _attr_translation_key: str = "load_level_indicator_timeout" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliDefaultAllLEDOnColor(ZHANumberConfigurationEntity): + """Inovelli default all led color when on configuration entity.""" + + _unique_id_suffix = "led_color_when_on" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 255 + _attribute_name = "led_color_when_on" + _attr_translation_key: str = "led_color_when_on" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliDefaultAllLEDOffColor(ZHANumberConfigurationEntity): + """Inovelli default all led color when off configuration entity.""" + + _unique_id_suffix = "led_color_when_off" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 255 + _attribute_name = "led_color_when_off" + _attr_translation_key: str = "led_color_when_off" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliDefaultAllLEDOnIntensity(ZHANumberConfigurationEntity): + """Inovelli default all led intensity when on configuration entity.""" + + _unique_id_suffix = "led_intensity_when_on" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 100 + _attribute_name = "led_intensity_when_on" + _attr_translation_key: str = "led_intensity_when_on" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliDefaultAllLEDOffIntensity(ZHANumberConfigurationEntity): + """Inovelli default all led intensity when off configuration entity.""" + + _unique_id_suffix = "led_intensity_when_off" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 100 + _attribute_name = "led_intensity_when_off" + _attr_translation_key: str = "led_intensity_when_off" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliDoubleTapUpLevel(ZHANumberConfigurationEntity): + """Inovelli double tap up level configuration entity.""" + + _unique_id_suffix = "double_tap_up_level" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 2 + _attr_native_max_value: float = 254 + _attribute_name = "double_tap_up_level" + _attr_translation_key: str = "double_tap_up_level" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliDoubleTapDownLevel(ZHANumberConfigurationEntity): + """Inovelli double tap down level configuration entity.""" + + _unique_id_suffix = "double_tap_down_level" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 254 + _attribute_name = "double_tap_down_level" + _attr_translation_key: str = "double_tap_down_level" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraPetFeederServingSize(ZHANumberConfigurationEntity): + """Aqara pet feeder serving size configuration entity.""" + + _unique_id_suffix = "serving_size" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 1 + _attr_native_max_value: float = 10 + _attribute_name = "serving_size" + _attr_translation_key: str = "serving_size" + + _attr_mode: NumberMode = NumberMode.BOX + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraPetFeederPortionWeight(ZHANumberConfigurationEntity): + """Aqara pet feeder portion weight configuration entity.""" + + _unique_id_suffix = "portion_weight" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 1 + _attr_native_max_value: float = 100 + _attribute_name = "portion_weight" + _attr_translation_key: str = "portion_weight" + + _attr_mode: NumberMode = NumberMode.BOX + _attr_native_unit_of_measurement: str = UnitOfMass.GRAMS + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraThermostatAwayTemp(ZHANumberConfigurationEntity): + """Aqara away preset temperature configuration entity.""" + + _unique_id_suffix = "away_preset_temperature" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: float = 5 + _attr_native_max_value: float = 30 + _attr_multiplier: float = 0.01 + _attribute_name = "away_preset_temperature" + _attr_translation_key: str = "away_preset_temperature" + + _attr_mode: NumberMode = NumberMode.SLIDER + _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ThermostatLocalTempCalibration(ZHANumberConfigurationEntity): + """Local temperature calibration.""" + + _unique_id_suffix = "local_temperature_calibration" + _attr_native_min_value: float = -2.5 + _attr_native_max_value: float = 2.5 + _attr_native_step: float = 0.1 + _attr_multiplier: float = 0.1 + _attribute_name = "local_temperature_calibration" + _attr_translation_key: str = "local_temperature_calibration" + + _attr_mode: NumberMode = NumberMode.SLIDER + _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + models={"TRVZB"}, + stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class SonoffThermostatLocalTempCalibration(ThermostatLocalTempCalibration): + """Local temperature calibration for the Sonoff TRVZB.""" + + _attr_native_min_value: float = -7 + _attr_native_max_value: float = 7 + _attr_native_step: float = 0.2 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY, models={"SNZB-06P"} +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class SonoffPresenceSenorTimeout(ZHANumberConfigurationEntity): + """Configuration of Sonoff sensor presence detection timeout.""" + + _unique_id_suffix = "presence_detection_timeout" + _attr_entity_category = EntityCategory.CONFIG + _attr_native_min_value: int = 15 + _attr_native_max_value: int = 60 + _attribute_name = "ultrasonic_o_to_u_delay" + _attr_translation_key: str = "presence_detection_timeout" + + _attr_mode: NumberMode = NumberMode.BOX + + +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ZCLTemperatureEntity(ZHANumberConfigurationEntity): + """Common entity class for ZCL temperature input.""" + + _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS + _attr_mode: NumberMode = NumberMode.BOX + _attr_native_step: float = 0.01 + _attr_multiplier: float = 0.01 + + +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ZCLHeatSetpointLimitEntity(ZCLTemperatureEntity): + """Min or max heat setpoint setting on thermostats.""" + + _attr_icon: str = "mdi:thermostat" + _attr_native_step: float = 0.5 + + _min_source = Thermostat.AttributeDefs.abs_min_heat_setpoint_limit.name + _max_source = Thermostat.AttributeDefs.abs_max_heat_setpoint_limit.name + + @property + def native_min_value(self) -> float: + """Return the minimum value.""" + # The spec says 0x954D, which is a signed integer, therefore the value is in decimals + min_present_value = self._cluster_handler.cluster.get(self._min_source, -27315) + return min_present_value * self._attr_multiplier + + @property + def native_max_value(self) -> float: + """Return the maximum value.""" + max_present_value = self._cluster_handler.cluster.get(self._max_source, 0x7FFF) + return max_present_value * self._attr_multiplier + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class MaxHeatSetpointLimit(ZCLHeatSetpointLimitEntity): + """Max heat setpoint setting on thermostats. + + Optional thermostat attribute. + """ + + _unique_id_suffix = "max_heat_setpoint_limit" + _attribute_name: str = "max_heat_setpoint_limit" + _attr_translation_key: str = "max_heat_setpoint_limit" + _attr_entity_category = EntityCategory.CONFIG + + _min_source = Thermostat.AttributeDefs.min_heat_setpoint_limit.name + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class MinHeatSetpointLimit(ZCLHeatSetpointLimitEntity): + """Min heat setpoint setting on thermostats. + + Optional thermostat attribute. + """ + + _unique_id_suffix = "min_heat_setpoint_limit" + _attribute_name: str = "min_heat_setpoint_limit" + _attr_translation_key: str = "min_heat_setpoint_limit" + _attr_entity_category = EntityCategory.CONFIG + + _max_source = Thermostat.AttributeDefs.max_heat_setpoint_limit.name + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DanfossExerciseTriggerTime(ZHANumberConfigurationEntity): + """Danfoss proprietary attribute to set the time to exercise the valve.""" + + _unique_id_suffix = "exercise_trigger_time" + _attribute_name: str = "exercise_trigger_time" + _attr_translation_key: str = "exercise_trigger_time" + _attr_native_min_value: int = 0 + _attr_native_max_value: int = 1439 + _attr_mode: NumberMode = NumberMode.BOX + _attr_native_unit_of_measurement: str = UnitOfTime.MINUTES + _attr_icon: str = "mdi:clock" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DanfossExternalMeasuredRoomSensor(ZCLTemperatureEntity): + """Danfoss proprietary attribute to communicate the value of the external temperature sensor.""" + + _unique_id_suffix = "external_measured_room_sensor" + _attribute_name: str = "external_measured_room_sensor" + _attr_translation_key: str = "external_temperature_sensor" + _attr_native_min_value: float = -80 + _attr_native_max_value: float = 35 + _attr_icon: str = "mdi:thermometer" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DanfossLoadRoomMean(ZHANumberConfigurationEntity): + """Danfoss proprietary attribute to set a value for the load.""" + + _unique_id_suffix = "load_room_mean" + _attribute_name: str = "load_room_mean" + _attr_translation_key: str = "load_room_mean" + _attr_native_min_value: int = -8000 + _attr_native_max_value: int = 2000 + _attr_mode: NumberMode = NumberMode.BOX + _attr_icon: str = "mdi:scale-balance" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DanfossRegulationSetpointOffset(ZHANumberConfigurationEntity): + """Danfoss proprietary attribute to set the regulation setpoint offset.""" + + _unique_id_suffix = "regulation_setpoint_offset" + _attribute_name: str = "regulation_setpoint_offset" + _attr_translation_key: str = "regulation_setpoint_offset" + _attr_mode: NumberMode = NumberMode.BOX + _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS + _attr_icon: str = "mdi:thermostat" + _attr_native_min_value: float = -2.5 + _attr_native_max_value: float = 2.5 + _attr_native_step: float = 0.1 + _attr_multiplier = 1 / 10 diff --git a/homeassistant/components/zha/radio_manager.py b/homeassistant/components/zha/radio_manager.py index 82c30b7678a..44b7304c58e 100644 --- a/homeassistant/components/zha/radio_manager.py +++ b/homeassistant/components/zha/radio_manager.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -from collections.abc import AsyncIterator import contextlib from contextlib import suppress import copy @@ -14,7 +13,6 @@ from typing import Any, Self from bellows.config import CONF_USE_THREAD import voluptuous as vol -from zha.application.const import RadioType from zigpy.application import ControllerApplication import zigpy.backups from zigpy.config import ( @@ -31,13 +29,14 @@ from homeassistant.components import usb from homeassistant.core import HomeAssistant from . import repairs -from .const import ( +from .core.const import ( CONF_RADIO_TYPE, CONF_ZIGPY, DEFAULT_DATABASE_NAME, EZSP_OVERWRITE_EUI64, + RadioType, ) -from .helpers import get_zha_data +from .core.helpers import get_zha_data # Only the common radio types will be autoprobed, ordered by new device popularity. # XBee takes too long to probe since it scans through all possible bauds and likely has @@ -158,7 +157,7 @@ class ZhaRadioManager: return mgr @contextlib.asynccontextmanager - async def connect_zigpy_app(self) -> AsyncIterator[ControllerApplication]: + async def connect_zigpy_app(self) -> ControllerApplication: """Connect to the radio with the current config and then clean up.""" assert self.radio_type is not None @@ -178,6 +177,7 @@ class ZhaRadioManager: app_config[CONF_DEVICE] = self.device_settings app_config[CONF_NWK_BACKUP_ENABLED] = False app_config[CONF_USE_THREAD] = False + app_config = self.radio_type.controller.SCHEMA(app_config) app = await self.radio_type.controller.new( app_config, auto_form=False, start_radio=False diff --git a/homeassistant/components/zha/repairs/__init__.py b/homeassistant/components/zha/repairs/__init__.py index 3fcbdb66bbc..3d8f2553baa 100644 --- a/homeassistant/components/zha/repairs/__init__.py +++ b/homeassistant/components/zha/repairs/__init__.py @@ -8,7 +8,7 @@ from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir -from ..const import DOMAIN +from ..core.const import DOMAIN from .network_settings_inconsistent import ( ISSUE_INCONSISTENT_NETWORK_SETTINGS, NetworkSettingsInconsistentFlow, diff --git a/homeassistant/components/zha/repairs/network_settings_inconsistent.py b/homeassistant/components/zha/repairs/network_settings_inconsistent.py index ef38ebc3d47..2598ff8f98a 100644 --- a/homeassistant/components/zha/repairs/network_settings_inconsistent.py +++ b/homeassistant/components/zha/repairs/network_settings_inconsistent.py @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import issue_registry as ir -from ..const import DOMAIN +from ..core.const import DOMAIN from ..radio_manager import ZhaRadioManager _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/zha/repairs/wrong_silabs_firmware.py b/homeassistant/components/zha/repairs/wrong_silabs_firmware.py index 4d6d1ae52d8..3cd22c99ec7 100644 --- a/homeassistant/components/zha/repairs/wrong_silabs_firmware.py +++ b/homeassistant/components/zha/repairs/wrong_silabs_firmware.py @@ -19,7 +19,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir -from ..const import DOMAIN +from ..core.const import DOMAIN _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/zha/select.py b/homeassistant/components/zha/select.py index fdb47b550fe..026a85fbfdc 100644 --- a/homeassistant/components/zha/select.py +++ b/homeassistant/components/zha/select.py @@ -2,26 +2,56 @@ from __future__ import annotations +from enum import Enum import functools import logging -from typing import Any +from typing import TYPE_CHECKING, Any, Self + +from zhaquirks.danfoss import thermostat as danfoss_thermostat +from zhaquirks.quirk_ids import ( + DANFOSS_ALLY_THERMOSTAT, + TUYA_PLUG_MANUFACTURER, + TUYA_PLUG_ONOFF, +) +from zhaquirks.xiaomi.aqara.magnet_ac01 import OppleCluster as MagnetAC01OppleCluster +from zhaquirks.xiaomi.aqara.switch_acn047 import OppleCluster as T2RelayOppleCluster +from zigpy import types +from zigpy.quirks.v2 import ZCLEnumMetadata +from zigpy.zcl.clusters.general import OnOff +from zigpy.zcl.clusters.security import IasWd from homeassistant.components.select import SelectEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant, State, callback +from homeassistant.const import STATE_UNKNOWN, EntityCategory, Platform +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.const import ( + CLUSTER_HANDLER_HUE_OCCUPANCY, + CLUSTER_HANDLER_IAS_WD, + CLUSTER_HANDLER_INOVELLI, + CLUSTER_HANDLER_OCCUPANCY, + CLUSTER_HANDLER_ON_OFF, + CLUSTER_HANDLER_THERMOSTAT, + ENTITY_METADATA, SIGNAL_ADD_ENTITIES, - EntityData, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - get_zha_data, + SIGNAL_ATTR_UPDATED, + Strobe, ) +from .core.helpers import get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity +if TYPE_CHECKING: + from .core.cluster_handlers import ClusterHandler + from .core.device import ZHADevice + + +CONFIG_DIAGNOSTIC_MATCH = functools.partial( + ZHA_ENTITIES.config_diagnostic_match, Platform.SELECT +) _LOGGER = logging.getLogger(__name__) @@ -38,38 +68,731 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, + discovery.async_add_entities, async_add_entities, - ZHAEnumSelectEntity, entities_to_create, ), ) config_entry.async_on_unload(unsub) -class ZHAEnumSelectEntity(ZHAEntity, SelectEntity): +class ZHAEnumSelectEntity(ZhaEntity, SelectEntity): """Representation of a ZHA select entity.""" - def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: - """Initialize the ZHA select entity.""" - super().__init__(entity_data, **kwargs) - self._attr_options = self.entity_data.entity.info_object.options + _attr_entity_category = EntityCategory.CONFIG + _attribute_name: str + _enum: type[Enum] + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this select entity.""" + self._cluster_handler: ClusterHandler = cluster_handlers[0] + self._attribute_name = self._enum.__name__ + self._attr_options = [entry.name.replace("_", " ") for entry in self._enum] + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) @property def current_option(self) -> str | None: """Return the selected entity option to represent the entity state.""" - return self.entity_data.entity.current_option + option = self._cluster_handler.data_cache.get(self._attribute_name) + if option is None: + return None + return option.name.replace("_", " ") - @convert_zha_error_to_ha_error async def async_select_option(self, option: str) -> None: """Change the selected option.""" - await self.entity_data.entity.async_select_option(option=option) + self._cluster_handler.data_cache[self._attribute_name] = self._enum[ + option.replace(" ", "_") + ] self.async_write_ha_state() @callback - def restore_external_state_attributes(self, state: State) -> None: - """Restore entity state.""" - if state.state and state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): - self.entity_data.entity.restore_external_state_attributes( - state=state.state, + def async_restore_last_state(self, last_state) -> None: + """Restore previous state.""" + if last_state.state and last_state.state != STATE_UNKNOWN: + self._cluster_handler.data_cache[self._attribute_name] = self._enum[ + last_state.state.replace(" ", "_") + ] + + +class ZHANonZCLSelectEntity(ZHAEnumSelectEntity): + """Representation of a ZHA select entity with no ZCL interaction.""" + + @property + def available(self) -> bool: + """Return entity availability.""" + return True + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) +class ZHADefaultToneSelectEntity(ZHANonZCLSelectEntity): + """Representation of a ZHA default siren tone select entity.""" + + _unique_id_suffix = IasWd.Warning.WarningMode.__name__ + _enum = IasWd.Warning.WarningMode + _attr_translation_key: str = "default_siren_tone" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) +class ZHADefaultSirenLevelSelectEntity(ZHANonZCLSelectEntity): + """Representation of a ZHA default siren level select entity.""" + + _unique_id_suffix = IasWd.Warning.SirenLevel.__name__ + _enum = IasWd.Warning.SirenLevel + _attr_translation_key: str = "default_siren_level" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) +class ZHADefaultStrobeLevelSelectEntity(ZHANonZCLSelectEntity): + """Representation of a ZHA default siren strobe level select entity.""" + + _unique_id_suffix = IasWd.StrobeLevel.__name__ + _enum = IasWd.StrobeLevel + _attr_translation_key: str = "default_strobe_level" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) +class ZHADefaultStrobeSelectEntity(ZHANonZCLSelectEntity): + """Representation of a ZHA default siren strobe select entity.""" + + _unique_id_suffix = Strobe.__name__ + _enum = Strobe + _attr_translation_key: str = "default_strobe" + + +class ZCLEnumSelectEntity(ZhaEntity, SelectEntity): + """Representation of a ZHA ZCL enum select entity.""" + + _attribute_name: str + _attr_entity_category = EntityCategory.CONFIG + _enum: type[Enum] + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + Return entity if it is a supported configuration, otherwise return None + """ + cluster_handler = cluster_handlers[0] + if ENTITY_METADATA not in kwargs and ( + cls._attribute_name in cluster_handler.cluster.unsupported_attributes + or cls._attribute_name not in cluster_handler.cluster.attributes_by_name + or cluster_handler.cluster.get(cls._attribute_name) is None + ): + _LOGGER.debug( + "%s is not supported - skipping %s entity creation", + cls._attribute_name, + cls.__name__, ) + return None + + return cls(unique_id, zha_device, cluster_handlers, **kwargs) + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this select entity.""" + self._cluster_handler: ClusterHandler = cluster_handlers[0] + if ENTITY_METADATA in kwargs: + self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) + self._attr_options = [entry.name.replace("_", " ") for entry in self._enum] + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + + def _init_from_quirks_metadata(self, entity_metadata: ZCLEnumMetadata) -> None: + """Init this entity from the quirks metadata.""" + super()._init_from_quirks_metadata(entity_metadata) + self._attribute_name = entity_metadata.attribute_name + self._enum = entity_metadata.enum + + @property + def current_option(self) -> str | None: + """Return the selected entity option to represent the entity state.""" + option = self._cluster_handler.cluster.get(self._attribute_name) + if option is None: + return None + option = self._enum(option) + return option.name.replace("_", " ") + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self._cluster_handler.write_attributes_safe( + {self._attribute_name: self._enum[option.replace(" ", "_")]} + ) + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state + ) + + @callback + def async_set_state(self, attr_id: int, attr_name: str, value: Any): + """Handle state update from cluster handler.""" + self.async_write_ha_state() + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_ON_OFF) +class ZHAStartupOnOffSelectEntity(ZCLEnumSelectEntity): + """Representation of a ZHA startup onoff select entity.""" + + _unique_id_suffix = OnOff.StartUpOnOff.__name__ + _attribute_name = "start_up_on_off" + _enum = OnOff.StartUpOnOff + _attr_translation_key: str = "start_up_on_off" + + +class TuyaPowerOnState(types.enum8): + """Tuya power on state enum.""" + + Off = 0x00 + On = 0x01 + LastState = 0x02 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF +) +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="tuya_manufacturer", quirk_ids=TUYA_PLUG_MANUFACTURER +) +class TuyaPowerOnStateSelectEntity(ZCLEnumSelectEntity): + """Representation of a ZHA power on state select entity.""" + + _unique_id_suffix = "power_on_state" + _attribute_name = "power_on_state" + _enum = TuyaPowerOnState + _attr_translation_key: str = "power_on_state" + + +class TuyaBacklightMode(types.enum8): + """Tuya switch backlight mode enum.""" + + Off = 0x00 + LightWhenOn = 0x01 + LightWhenOff = 0x02 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF +) +class TuyaBacklightModeSelectEntity(ZCLEnumSelectEntity): + """Representation of a ZHA backlight mode select entity.""" + + _unique_id_suffix = "backlight_mode" + _attribute_name = "backlight_mode" + _enum = TuyaBacklightMode + _attr_translation_key: str = "backlight_mode" + + +class MoesBacklightMode(types.enum8): + """MOES switch backlight mode enum.""" + + Off = 0x00 + LightWhenOn = 0x01 + LightWhenOff = 0x02 + Freeze = 0x03 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="tuya_manufacturer", quirk_ids=TUYA_PLUG_MANUFACTURER +) +class MoesBacklightModeSelectEntity(ZCLEnumSelectEntity): + """Moes devices have a different backlight mode select options.""" + + _unique_id_suffix = "backlight_mode" + _attribute_name = "backlight_mode" + _enum = MoesBacklightMode + _attr_translation_key: str = "backlight_mode" + + +class AqaraMotionSensitivities(types.enum8): + """Aqara motion sensitivities.""" + + Low = 0x01 + Medium = 0x02 + High = 0x03 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", + models={"lumi.motion.ac01", "lumi.motion.ac02", "lumi.motion.agl04"}, +) +class AqaraMotionSensitivity(ZCLEnumSelectEntity): + """Representation of a ZHA motion sensitivity configuration entity.""" + + _unique_id_suffix = "motion_sensitivity" + _attribute_name = "motion_sensitivity" + _enum = AqaraMotionSensitivities + _attr_translation_key: str = "motion_sensitivity" + + +class HueV1MotionSensitivities(types.enum8): + """Hue v1 motion sensitivities.""" + + Low = 0x00 + Medium = 0x01 + High = 0x02 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY, + manufacturers={"Philips", "Signify Netherlands B.V."}, + models={"SML001"}, +) +class HueV1MotionSensitivity(ZCLEnumSelectEntity): + """Representation of a ZHA motion sensitivity configuration entity.""" + + _unique_id_suffix = "motion_sensitivity" + _attribute_name = "sensitivity" + _enum = HueV1MotionSensitivities + _attr_translation_key: str = "motion_sensitivity" + + +class HueV2MotionSensitivities(types.enum8): + """Hue v2 motion sensitivities.""" + + Lowest = 0x00 + Low = 0x01 + Medium = 0x02 + High = 0x03 + Highest = 0x04 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY, + manufacturers={"Philips", "Signify Netherlands B.V."}, + models={"SML002", "SML003", "SML004"}, +) +class HueV2MotionSensitivity(ZCLEnumSelectEntity): + """Representation of a ZHA motion sensitivity configuration entity.""" + + _unique_id_suffix = "motion_sensitivity" + _attribute_name = "sensitivity" + _enum = HueV2MotionSensitivities + _attr_translation_key: str = "motion_sensitivity" + + +class AqaraMonitoringModess(types.enum8): + """Aqara monitoring modes.""" + + Undirected = 0x00 + Left_Right = 0x01 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.motion.ac01"} +) +class AqaraMonitoringMode(ZCLEnumSelectEntity): + """Representation of a ZHA monitoring mode configuration entity.""" + + _unique_id_suffix = "monitoring_mode" + _attribute_name = "monitoring_mode" + _enum = AqaraMonitoringModess + _attr_translation_key: str = "monitoring_mode" + + +class AqaraApproachDistances(types.enum8): + """Aqara approach distances.""" + + Far = 0x00 + Medium = 0x01 + Near = 0x02 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.motion.ac01"} +) +class AqaraApproachDistance(ZCLEnumSelectEntity): + """Representation of a ZHA approach distance configuration entity.""" + + _unique_id_suffix = "approach_distance" + _attribute_name = "approach_distance" + _enum = AqaraApproachDistances + _attr_translation_key: str = "approach_distance" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.magnet.ac01"} +) +class AqaraMagnetAC01DetectionDistance(ZCLEnumSelectEntity): + """Representation of a ZHA detection distance configuration entity.""" + + _unique_id_suffix = "detection_distance" + _attribute_name = "detection_distance" + _enum = MagnetAC01OppleCluster.DetectionDistance + _attr_translation_key: str = "detection_distance" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} +) +class AqaraT2RelaySwitchMode(ZCLEnumSelectEntity): + """Representation of a ZHA switch mode configuration entity.""" + + _unique_id_suffix = "switch_mode" + _attribute_name = "switch_mode" + _enum = T2RelayOppleCluster.SwitchMode + _attr_translation_key: str = "switch_mode" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} +) +class AqaraT2RelaySwitchType(ZCLEnumSelectEntity): + """Representation of a ZHA switch type configuration entity.""" + + _unique_id_suffix = "switch_type" + _attribute_name = "switch_type" + _enum = T2RelayOppleCluster.SwitchType + _attr_translation_key: str = "switch_type" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} +) +class AqaraT2RelayStartupOnOff(ZCLEnumSelectEntity): + """Representation of a ZHA startup on off configuration entity.""" + + _unique_id_suffix = "startup_on_off" + _attribute_name = "startup_on_off" + _enum = T2RelayOppleCluster.StartupOnOff + _attr_translation_key: str = "start_up_on_off" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} +) +class AqaraT2RelayDecoupledMode(ZCLEnumSelectEntity): + """Representation of a ZHA switch decoupled mode configuration entity.""" + + _unique_id_suffix = "decoupled_mode" + _attribute_name = "decoupled_mode" + _enum = T2RelayOppleCluster.DecoupledMode + _attr_translation_key: str = "decoupled_mode" + + +class InovelliOutputMode(types.enum1): + """Inovelli output mode.""" + + Dimmer = 0x00 + OnOff = 0x01 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliOutputModeEntity(ZCLEnumSelectEntity): + """Inovelli output mode control.""" + + _unique_id_suffix = "output_mode" + _attribute_name = "output_mode" + _enum = InovelliOutputMode + _attr_translation_key: str = "output_mode" + + +class InovelliSwitchType(types.enum8): + """Inovelli switch mode.""" + + Single_Pole = 0x00 + Three_Way_Dumb = 0x01 + Three_Way_AUX = 0x02 + Single_Pole_Full_Sine = 0x03 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM31-SN"} +) +class InovelliSwitchTypeEntity(ZCLEnumSelectEntity): + """Inovelli switch type control.""" + + _unique_id_suffix = "switch_type" + _attribute_name = "switch_type" + _enum = InovelliSwitchType + _attr_translation_key: str = "switch_type" + + +class InovelliFanSwitchType(types.enum1): + """Inovelli fan switch mode.""" + + Load_Only = 0x00 + Three_Way_AUX = 0x01 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} +) +class InovelliFanSwitchTypeEntity(ZCLEnumSelectEntity): + """Inovelli fan switch type control.""" + + _unique_id_suffix = "switch_type" + _attribute_name = "switch_type" + _enum = InovelliFanSwitchType + _attr_translation_key: str = "switch_type" + + +class InovelliLedScalingMode(types.enum1): + """Inovelli led mode.""" + + VZM31SN = 0x00 + LZW31SN = 0x01 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliLedScalingModeEntity(ZCLEnumSelectEntity): + """Inovelli led mode control.""" + + _unique_id_suffix = "led_scaling_mode" + _attribute_name = "led_scaling_mode" + _enum = InovelliLedScalingMode + _attr_translation_key: str = "led_scaling_mode" + + +class InovelliFanLedScalingMode(types.enum8): + """Inovelli fan led mode.""" + + VZM31SN = 0x00 + Grade_1 = 0x01 + Grade_2 = 0x02 + Grade_3 = 0x03 + Grade_4 = 0x04 + Grade_5 = 0x05 + Grade_6 = 0x06 + Grade_7 = 0x07 + Grade_8 = 0x08 + Grade_9 = 0x09 + Adaptive = 0x0A + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} +) +class InovelliFanLedScalingModeEntity(ZCLEnumSelectEntity): + """Inovelli fan switch led mode control.""" + + _unique_id_suffix = "smart_fan_led_display_levels" + _attribute_name = "smart_fan_led_display_levels" + _enum = InovelliFanLedScalingMode + _attr_translation_key: str = "smart_fan_led_display_levels" + + +class InovelliNonNeutralOutput(types.enum1): + """Inovelli non neutral output selection.""" + + Low = 0x00 + High = 0x01 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliNonNeutralOutputEntity(ZCLEnumSelectEntity): + """Inovelli non neutral output control.""" + + _unique_id_suffix = "increased_non_neutral_output" + _attribute_name = "increased_non_neutral_output" + _enum = InovelliNonNeutralOutput + _attr_translation_key: str = "increased_non_neutral_output" + + +class AqaraFeedingMode(types.enum8): + """Feeding mode.""" + + Manual = 0x00 + Schedule = 0x01 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} +) +class AqaraPetFeederMode(ZCLEnumSelectEntity): + """Representation of an Aqara pet feeder mode configuration entity.""" + + _unique_id_suffix = "feeding_mode" + _attribute_name = "feeding_mode" + _enum = AqaraFeedingMode + _attr_translation_key: str = "feeding_mode" + + +class AqaraThermostatPresetMode(types.enum8): + """Thermostat preset mode.""" + + Manual = 0x00 + Auto = 0x01 + Away = 0x02 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} +) +class AqaraThermostatPreset(ZCLEnumSelectEntity): + """Representation of an Aqara thermostat preset configuration entity.""" + + _unique_id_suffix = "preset" + _attribute_name = "preset" + _enum = AqaraThermostatPresetMode + _attr_translation_key: str = "preset" + + +class SonoffPresenceDetectionSensitivityEnum(types.enum8): + """Enum for detection sensitivity select entity.""" + + Low = 0x01 + Medium = 0x02 + High = 0x03 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY, models={"SNZB-06P"} +) +class SonoffPresenceDetectionSensitivity(ZCLEnumSelectEntity): + """Entity to set the detection sensitivity of the Sonoff SNZB-06P.""" + + _unique_id_suffix = "detection_sensitivity" + _attribute_name = "ultrasonic_u_to_o_threshold" + _enum = SonoffPresenceDetectionSensitivityEnum + _attr_translation_key: str = "detection_sensitivity" + + +class KeypadLockoutEnum(types.enum8): + """Keypad lockout options.""" + + Unlock = 0x00 + Lock1 = 0x01 + Lock2 = 0x02 + Lock3 = 0x03 + Lock4 = 0x04 + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names="thermostat_ui") +class KeypadLockout(ZCLEnumSelectEntity): + """Mandatory attribute for thermostat_ui cluster. + + Often only the first two are implemented, and Lock2 to Lock4 should map to Lock1 in the firmware. + This however covers all bases. + """ + + _unique_id_suffix = "keypad_lockout" + _attribute_name: str = "keypad_lockout" + _enum = KeypadLockoutEnum + _attr_translation_key: str = "keypad_lockout" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossExerciseDayOfTheWeek(ZCLEnumSelectEntity): + """Danfoss proprietary attribute for setting the day of the week for exercising.""" + + _unique_id_suffix = "exercise_day_of_week" + _attribute_name = "exercise_day_of_week" + _attr_translation_key: str = "exercise_day_of_week" + _enum = danfoss_thermostat.DanfossExerciseDayOfTheWeekEnum + _attr_icon: str = "mdi:wrench-clock" + + +class DanfossOrientationEnum(types.enum8): + """Vertical or Horizontal.""" + + Horizontal = 0x00 + Vertical = 0x01 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossOrientation(ZCLEnumSelectEntity): + """Danfoss proprietary attribute for setting the orientation of the valve. + + Needed for biasing the internal temperature sensor. + This is implemented as an enum here, but is a boolean on the device. + """ + + _unique_id_suffix = "orientation" + _attribute_name = "orientation" + _attr_translation_key: str = "valve_orientation" + _enum = DanfossOrientationEnum + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossAdaptationRunControl(ZCLEnumSelectEntity): + """Danfoss proprietary attribute for controlling the current adaptation run.""" + + _unique_id_suffix = "adaptation_run_control" + _attribute_name = "adaptation_run_control" + _attr_translation_key: str = "adaptation_run_command" + _enum = danfoss_thermostat.DanfossAdaptationRunControlEnum + + +class DanfossControlAlgorithmScaleFactorEnum(types.enum8): + """The time scale factor for changing the opening of the valve. + + Not all values are given, therefore there are some extrapolated values with a margin of error of about 5 minutes. + This is implemented as an enum here, but is a number on the device. + """ + + quick_5min = 0x01 + + quick_10min = 0x02 # extrapolated + quick_15min = 0x03 # extrapolated + quick_25min = 0x04 # extrapolated + + moderate_30min = 0x05 + + moderate_40min = 0x06 # extrapolated + moderate_50min = 0x07 # extrapolated + moderate_60min = 0x08 # extrapolated + moderate_70min = 0x09 # extrapolated + + slow_80min = 0x0A + + quick_open_disabled = 0x11 # not sure what it does; also requires lower 4 bits to be in [1, 10] I assume + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossControlAlgorithmScaleFactor(ZCLEnumSelectEntity): + """Danfoss proprietary attribute for setting the scale factor of the setpoint filter time constant.""" + + _unique_id_suffix = "control_algorithm_scale_factor" + _attribute_name = "control_algorithm_scale_factor" + _attr_translation_key: str = "setpoint_response_time" + _enum = DanfossControlAlgorithmScaleFactorEnum + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="thermostat_ui", + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossViewingDirection(ZCLEnumSelectEntity): + """Danfoss proprietary attribute for setting the viewing direction of the screen.""" + + _unique_id_suffix = "viewing_direction" + _attribute_name = "viewing_direction" + _attr_translation_key: str = "viewing_direction" + _enum = danfoss_thermostat.DanfossViewingDirectionEnum diff --git a/homeassistant/components/zha/sensor.py b/homeassistant/components/zha/sensor.py index dde000b24b5..99d950dc06a 100644 --- a/homeassistant/components/zha/sensor.py +++ b/homeassistant/components/zha/sensor.py @@ -2,71 +2,115 @@ from __future__ import annotations -from collections.abc import Mapping +import asyncio +from dataclasses import dataclass +from datetime import timedelta +import enum import functools import logging -from typing import Any +import numbers +import random +from typing import TYPE_CHECKING, Any, Self +from zhaquirks.danfoss import thermostat as danfoss_thermostat +from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT +from zigpy import types +from zigpy.quirks.v2 import ZCLEnumMetadata, ZCLSensorMetadata +from zigpy.state import Counter, State +from zigpy.zcl.clusters.closures import WindowCovering +from zigpy.zcl.clusters.general import Basic + +from homeassistant.components.climate import HVACAction from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, + SensorEntityDescription, SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import ( + CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + CONCENTRATION_PARTS_PER_BILLION, + CONCENTRATION_PARTS_PER_MILLION, + LIGHT_LUX, + PERCENTAGE, + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + EntityCategory, + Platform, + UnitOfApparentPower, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfFrequency, + UnitOfMass, + UnitOfPower, + UnitOfPressure, + UnitOfTemperature, + UnitOfTime, + UnitOfVolume, + UnitOfVolumeFlowRate, +) +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import StateType -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.const import ( + CLUSTER_HANDLER_ANALOG_INPUT, + CLUSTER_HANDLER_BASIC, + CLUSTER_HANDLER_COVER, + CLUSTER_HANDLER_DEVICE_TEMPERATURE, + CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, + CLUSTER_HANDLER_HUMIDITY, + CLUSTER_HANDLER_ILLUMINANCE, + CLUSTER_HANDLER_LEAF_WETNESS, + CLUSTER_HANDLER_POWER_CONFIGURATION, + CLUSTER_HANDLER_PRESSURE, + CLUSTER_HANDLER_SMARTENERGY_METERING, + CLUSTER_HANDLER_SOIL_MOISTURE, + CLUSTER_HANDLER_TEMPERATURE, + CLUSTER_HANDLER_THERMOSTAT, + DATA_ZHA, + ENTITY_METADATA, SIGNAL_ADD_ENTITIES, - EntityData, - async_add_entities as zha_async_add_entities, - exclude_none_values, - get_zha_data, + SIGNAL_ATTR_UPDATED, ) +from .core.helpers import get_zha_data, validate_device_class, validate_unit +from .core.registries import SMARTTHINGS_HUMIDITY_CLUSTER, ZHA_ENTITIES +from .entity import BaseZhaEntity, ZhaEntity + +if TYPE_CHECKING: + from .core.cluster_handlers import ClusterHandler + from .core.device import ZHADevice + +BATTERY_SIZES = { + 0: "No battery", + 1: "Built in", + 2: "Other", + 3: "AA", + 4: "AAA", + 5: "C", + 6: "D", + 7: "CR2", + 8: "CR123A", + 9: "CR2450", + 10: "CR2032", + 11: "CR1632", + 255: "Unknown", +} _LOGGER = logging.getLogger(__name__) -# For backwards compatibility and transparency, all expected extra state attributes are -# explicitly listed below. These should have been sensors themselves but for whatever -# reason were not created as such. They will be migrated to independent sensor entities -# in a future release. -_EXTRA_STATE_ATTRIBUTES: set[str] = { - # Battery - "battery_size", - "battery_quantity", - "battery_voltage", - # Power - "measurement_type", - "apparent_power_max", - "rms_current_max", - "rms_voltage_max", - "ac_frequency_max", - "power_factor_max", - "active_power_max", - # Smart Energy metering - "device_type", - "status", - "zcl_unit_of_measurement", - # Danfoss bitmaps - "In_progress", - "Valve_characteristic_found", - "Valve_characteristic_lost", - "Top_pcb_sensor_error", - "Side_pcb_sensor_error", - "Non_volatile_memory_error", - "Unknown_hw_error", - "Motor_error", - "Invalid_internal_communication", - "Invalid_clock_information", - "Radio_communication_error", - "Encoder_jammed", - "Low_battery", - "Critical_low_battery", -} +CLUSTER_HANDLER_ST_HUMIDITY_CLUSTER = ( + f"cluster_handler_0x{SMARTTHINGS_HUMIDITY_CLUSTER:04x}" +) +STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.SENSOR) +MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.SENSOR) +CONFIG_DIAGNOSTIC_MATCH = functools.partial( + ZHA_ENTITIES.config_diagnostic_match, Platform.SENSOR +) async def async_setup_entry( @@ -82,76 +126,1504 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, Sensor, entities_to_create + discovery.async_add_entities, + async_add_entities, + entities_to_create, ), ) config_entry.async_on_unload(unsub) # pylint: disable-next=hass-invalid-inheritance # needs fixing -class Sensor(ZHAEntity, SensorEntity): - """ZHA sensor.""" +class Sensor(ZhaEntity, SensorEntity): + """Base ZHA sensor.""" - def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: - """Initialize the ZHA select entity.""" - super().__init__(entity_data, **kwargs) - entity = self.entity_data.entity + _attribute_name: int | str | None = None + _decimals: int = 1 + _divisor: int = 1 + _multiplier: int | float = 1 - if entity.device_class is not None: - self._attr_device_class = SensorDeviceClass(entity.device_class) + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. - if entity.state_class is not None: - self._attr_state_class = SensorStateClass(entity.state_class) - - if hasattr(entity.info_object, "unit") and entity.info_object.unit is not None: - self._attr_native_unit_of_measurement = entity.info_object.unit - - if ( - hasattr(entity, "entity_description") - and entity.entity_description is not None + Return entity if it is a supported configuration, otherwise return None + """ + cluster_handler = cluster_handlers[0] + if ENTITY_METADATA not in kwargs and ( + cls._attribute_name in cluster_handler.cluster.unsupported_attributes + or cls._attribute_name not in cluster_handler.cluster.attributes_by_name ): - entity_description = entity.entity_description + _LOGGER.debug( + "%s is not supported - skipping %s entity creation", + cls._attribute_name, + cls.__name__, + ) + return None - if entity_description.state_class is not None: - self._attr_state_class = SensorStateClass( - entity_description.state_class.value - ) + return cls(unique_id, zha_device, cluster_handlers, **kwargs) - if entity_description.scale is not None: - self._attr_scale = entity_description.scale + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this sensor.""" + self._cluster_handler: ClusterHandler = cluster_handlers[0] + if ENTITY_METADATA in kwargs: + self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - if entity_description.native_unit_of_measurement is not None: - self._attr_native_unit_of_measurement = ( - entity_description.native_unit_of_measurement - ) + def _init_from_quirks_metadata(self, entity_metadata: ZCLSensorMetadata) -> None: + """Init this entity from the quirks metadata.""" + super()._init_from_quirks_metadata(entity_metadata) + self._attribute_name = entity_metadata.attribute_name + if entity_metadata.divisor is not None: + self._divisor = entity_metadata.divisor + if entity_metadata.multiplier is not None: + self._multiplier = entity_metadata.multiplier + if entity_metadata.device_class is not None: + self._attr_device_class = validate_device_class( + SensorDeviceClass, + entity_metadata.device_class, + Platform.SENSOR.value, + _LOGGER, + ) + if entity_metadata.device_class is None and entity_metadata.unit is not None: + self._attr_native_unit_of_measurement = validate_unit( + entity_metadata.unit + ).value - if entity_description.device_class is not None: - self._attr_device_class = SensorDeviceClass( - entity_description.device_class.value - ) + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state + ) @property def native_value(self) -> StateType: """Return the state of the entity.""" - return self.entity_data.entity.native_value - - @property - def extra_state_attributes(self) -> Mapping[str, Any] | None: - """Return entity specific state attributes.""" - entity = self.entity_data.entity - if entity.extra_state_attribute_names is None: + assert self._attribute_name is not None + raw_state = self._cluster_handler.cluster.get(self._attribute_name) + if raw_state is None: return None + return self.formatter(raw_state) - if not entity.extra_state_attribute_names <= _EXTRA_STATE_ATTRIBUTES: - _LOGGER.warning( - "Unexpected extra state attributes found for sensor %s: %s", - entity, - entity.extra_state_attribute_names - _EXTRA_STATE_ATTRIBUTES, + @callback + def async_set_state(self, attr_id: int, attr_name: str, value: Any) -> None: + """Handle state update from cluster handler.""" + self.async_write_ha_state() + + def formatter(self, value: int | enum.IntEnum) -> int | float | str | None: + """Numeric pass-through formatter.""" + if self._decimals > 0: + return round( + float(value * self._multiplier) / self._divisor, self._decimals + ) + return round(float(value * self._multiplier) / self._divisor) + + +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class PollableSensor(Sensor): + """Base ZHA sensor that polls for state.""" + + _use_custom_polling: bool = True + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this sensor.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._cancel_refresh_handle: CALLBACK_TYPE | None = None + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + if self._use_custom_polling: + refresh_interval = random.randint(30, 60) + self._cancel_refresh_handle = async_track_time_interval( + self.hass, self._refresh, timedelta(seconds=refresh_interval) + ) + self.debug("started polling with refresh interval of %s", refresh_interval) + + async def async_will_remove_from_hass(self) -> None: + """Disconnect entity object when removed.""" + if self._cancel_refresh_handle is not None: + self._cancel_refresh_handle() + self._cancel_refresh_handle = None + self.debug("stopped polling during device removal") + await super().async_will_remove_from_hass() + + async def _refresh(self, time): + """Call async_update at a constrained random interval.""" + if self._zha_device.available and self.hass.data[DATA_ZHA].allow_polling: + self.debug("polling for updated state") + await self.async_update() + self.async_write_ha_state() + else: + self.debug( + "skipping polling for updated state, available: %s, allow polled requests: %s", + self._zha_device.available, + self.hass.data[DATA_ZHA].allow_polling, ) - return exclude_none_values( - { - name: entity.state.get(name) - for name in entity.extra_state_attribute_names - } + +class DeviceCounterSensor(BaseZhaEntity, SensorEntity): + """Device counter sensor.""" + + _attr_should_poll = True + _attr_state_class: SensorStateClass = SensorStateClass.TOTAL + _attr_entity_category = EntityCategory.DIAGNOSTIC + _attr_entity_registry_enabled_default = False + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + counter_groups: str, + counter_group: str, + counter: str, + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + Return entity if it is a supported configuration, otherwise return None + """ + return cls( + unique_id, zha_device, counter_groups, counter_group, counter, **kwargs ) + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + counter_groups: str, + counter_group: str, + counter: str, + **kwargs: Any, + ) -> None: + """Init this sensor.""" + super().__init__(unique_id, zha_device, **kwargs) + state: State = self._zha_device.gateway.application_controller.state + self._zigpy_counter: Counter = ( + getattr(state, counter_groups).get(counter_group, {}).get(counter, None) + ) + self._attr_name: str = self._zigpy_counter.name + self.remove_future: asyncio.Future + + @property + def available(self) -> bool: + """Return entity availability.""" + return self._zha_device.available + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + self.remove_future = self.hass.loop.create_future() + self._zha_device.gateway.register_entity_reference( + self._zha_device.ieee, + self.entity_id, + self._zha_device, + {}, + self.device_info, + self.remove_future, + ) + + async def async_will_remove_from_hass(self) -> None: + """Disconnect entity object when removed.""" + await super().async_will_remove_from_hass() + self.zha_device.gateway.remove_entity_reference(self) + self.remove_future.set_result(True) + + @property + def native_value(self) -> StateType: + """Return the state of the entity.""" + return self._zigpy_counter.value + + async def async_update(self) -> None: + """Retrieve latest state.""" + self.async_write_ha_state() + + +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class EnumSensor(Sensor): + """Sensor with value from enum.""" + + _attr_device_class: SensorDeviceClass = SensorDeviceClass.ENUM + _enum: type[enum.Enum] + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this sensor.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._attr_options = [e.name for e in self._enum] + + def _init_from_quirks_metadata(self, entity_metadata: ZCLEnumMetadata) -> None: + """Init this entity from the quirks metadata.""" + ZhaEntity._init_from_quirks_metadata(self, entity_metadata) # noqa: SLF001 + self._attribute_name = entity_metadata.attribute_name + self._enum = entity_metadata.enum + + def formatter(self, value: int) -> str | None: + """Use name of enum.""" + assert self._enum is not None + return self._enum(value).name + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ANALOG_INPUT, + manufacturers="Digi", + stop_on_match_group=CLUSTER_HANDLER_ANALOG_INPUT, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AnalogInput(Sensor): + """Sensor that displays analog input values.""" + + _attribute_name = "present_value" + _attr_translation_key: str = "analog_input" + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_POWER_CONFIGURATION) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Battery(Sensor): + """Battery sensor of power configuration cluster.""" + + _attribute_name = "battery_percentage_remaining" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.BATTERY + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _attr_entity_category = EntityCategory.DIAGNOSTIC + _attr_native_unit_of_measurement = PERCENTAGE + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + Unlike any other entity, PowerConfiguration cluster may not support + battery_percent_remaining attribute, but zha-device-handlers takes care of it + so create the entity regardless + """ + if zha_device.is_mains_powered: + return None + return cls(unique_id, zha_device, cluster_handlers, **kwargs) + + @staticmethod + def formatter(value: int) -> int | None: + """Return the state of the entity.""" + # per zcl specs battery percent is reported at 200% ¯\_(ツ)_/¯ + if not isinstance(value, numbers.Number) or value == -1 or value == 255: + return None + return round(value / 2) + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return device state attrs for battery sensors.""" + state_attrs = {} + battery_size = self._cluster_handler.cluster.get("battery_size") + if battery_size is not None: + state_attrs["battery_size"] = BATTERY_SIZES.get(battery_size, "Unknown") + battery_quantity = self._cluster_handler.cluster.get("battery_quantity") + if battery_quantity is not None: + state_attrs["battery_quantity"] = battery_quantity + battery_voltage = self._cluster_handler.cluster.get("battery_voltage") + if battery_voltage is not None: + state_attrs["battery_voltage"] = round(battery_voltage / 10, 2) + return state_attrs + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, + stop_on_match_group=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, + models={"VZM31-SN", "SP 234", "outletv4"}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ElectricalMeasurement(PollableSensor): + """Active power measurement.""" + + _use_custom_polling: bool = False + _attribute_name = "active_power" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.POWER + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _attr_native_unit_of_measurement: str = UnitOfPower.WATT + _div_mul_prefix: str | None = "ac_power" + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return device state attrs for sensor.""" + attrs = {} + if self._cluster_handler.measurement_type is not None: + attrs["measurement_type"] = self._cluster_handler.measurement_type + + max_attr_name = f"{self._attribute_name}_max" + + try: + max_v = self._cluster_handler.cluster.get(max_attr_name) + except KeyError: + pass + else: + if max_v is not None: + attrs[max_attr_name] = str(self.formatter(max_v)) + + return attrs + + def formatter(self, value: int) -> int | float: + """Return 'normalized' value.""" + if self._div_mul_prefix: + multiplier = getattr( + self._cluster_handler, f"{self._div_mul_prefix}_multiplier" + ) + divisor = getattr(self._cluster_handler, f"{self._div_mul_prefix}_divisor") + else: + multiplier = self._multiplier + divisor = self._divisor + value = float(value * multiplier) / divisor + if value < 100 and divisor > 1: + return round(value, self._decimals) + return round(value) + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, + stop_on_match_group=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class PolledElectricalMeasurement(ElectricalMeasurement): + """Polled active power measurement.""" + + _use_custom_polling: bool = True + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ElectricalMeasurementApparentPower(PolledElectricalMeasurement): + """Apparent power measurement.""" + + _attribute_name = "apparent_power" + _unique_id_suffix = "apparent_power" + _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor + _attr_device_class: SensorDeviceClass = SensorDeviceClass.APPARENT_POWER + _attr_native_unit_of_measurement = UnitOfApparentPower.VOLT_AMPERE + _div_mul_prefix = "ac_power" + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ElectricalMeasurementRMSCurrent(PolledElectricalMeasurement): + """RMS current measurement.""" + + _attribute_name = "rms_current" + _unique_id_suffix = "rms_current" + _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor + _attr_device_class: SensorDeviceClass = SensorDeviceClass.CURRENT + _attr_native_unit_of_measurement = UnitOfElectricCurrent.AMPERE + _div_mul_prefix = "ac_current" + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ElectricalMeasurementRMSVoltage(PolledElectricalMeasurement): + """RMS Voltage measurement.""" + + _attribute_name = "rms_voltage" + _unique_id_suffix = "rms_voltage" + _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor + _attr_device_class: SensorDeviceClass = SensorDeviceClass.VOLTAGE + _attr_native_unit_of_measurement = UnitOfElectricPotential.VOLT + _div_mul_prefix = "ac_voltage" + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ElectricalMeasurementFrequency(PolledElectricalMeasurement): + """Frequency measurement.""" + + _attribute_name = "ac_frequency" + _unique_id_suffix = "ac_frequency" + _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor + _attr_device_class: SensorDeviceClass = SensorDeviceClass.FREQUENCY + _attr_translation_key: str = "ac_frequency" + _attr_native_unit_of_measurement = UnitOfFrequency.HERTZ + _div_mul_prefix = "ac_frequency" + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ElectricalMeasurementPowerFactor(PolledElectricalMeasurement): + """Power Factor measurement.""" + + _attribute_name = "power_factor" + _unique_id_suffix = "power_factor" + _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor + _attr_device_class: SensorDeviceClass = SensorDeviceClass.POWER_FACTOR + _attr_native_unit_of_measurement = PERCENTAGE + _div_mul_prefix = None + + +@MULTI_MATCH( + generic_ids=CLUSTER_HANDLER_ST_HUMIDITY_CLUSTER, + stop_on_match_group=CLUSTER_HANDLER_HUMIDITY, +) +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_HUMIDITY, + stop_on_match_group=CLUSTER_HANDLER_HUMIDITY, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Humidity(Sensor): + """Humidity sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.HUMIDITY + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _divisor = 100 + _attr_native_unit_of_measurement = PERCENTAGE + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_SOIL_MOISTURE) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class SoilMoisture(Sensor): + """Soil Moisture sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.HUMIDITY + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _attr_translation_key: str = "soil_moisture" + _divisor = 100 + _attr_native_unit_of_measurement = PERCENTAGE + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEAF_WETNESS) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class LeafWetness(Sensor): + """Leaf Wetness sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.HUMIDITY + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _attr_translation_key: str = "leaf_wetness" + _divisor = 100 + _attr_native_unit_of_measurement = PERCENTAGE + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ILLUMINANCE) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Illuminance(Sensor): + """Illuminance Sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.ILLUMINANCE + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _attr_native_unit_of_measurement = LIGHT_LUX + + def formatter(self, value: int) -> int | None: + """Convert illumination data.""" + if value == 0: + return 0 + if value == 0xFFFF: + return None + return round(pow(10, ((value - 1) / 10000))) + + +@dataclass(frozen=True, kw_only=True) +class SmartEnergyMeteringEntityDescription(SensorEntityDescription): + """Dataclass that describes a Zigbee smart energy metering entity.""" + + key: str = "instantaneous_demand" + state_class: SensorStateClass | None = SensorStateClass.MEASUREMENT + scale: int = 1 + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, + stop_on_match_group=CLUSTER_HANDLER_SMARTENERGY_METERING, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class SmartEnergyMetering(PollableSensor): + """Metering sensor.""" + + entity_description: SmartEnergyMeteringEntityDescription + _use_custom_polling: bool = False + _attribute_name = "instantaneous_demand" + _attr_translation_key: str = "instantaneous_demand" + + _ENTITY_DESCRIPTION_MAP = { + 0x00: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + ), + 0x01: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, + device_class=None, # volume flow rate is not supported yet + ), + 0x02: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE, + device_class=None, # volume flow rate is not supported yet + ), + 0x03: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, + device_class=None, # volume flow rate is not supported yet + scale=100, + ), + 0x04: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=f"{UnitOfVolume.GALLONS}/{UnitOfTime.HOURS}", # US gallons per hour + device_class=None, # volume flow rate is not supported yet + ), + 0x05: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=f"IMP {UnitOfVolume.GALLONS}/{UnitOfTime.HOURS}", # IMP gallons per hour + device_class=None, # needs to be None as imperial gallons are not supported + ), + 0x06: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=UnitOfPower.BTU_PER_HOUR, + device_class=None, + state_class=None, + ), + 0x07: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=f"l/{UnitOfTime.HOURS}", + device_class=None, # volume flow rate is not supported yet + ), + 0x08: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=UnitOfPressure.KPA, + device_class=SensorDeviceClass.PRESSURE, + ), # gauge + 0x09: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=UnitOfPressure.KPA, + device_class=SensorDeviceClass.PRESSURE, + ), # absolute + 0x0A: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=f"{UnitOfVolume.CUBIC_FEET}/{UnitOfTime.HOURS}", # cubic feet per hour + device_class=None, # volume flow rate is not supported yet + scale=1000, + ), + 0x0B: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement="unitless", device_class=None, state_class=None + ), + 0x0C: SmartEnergyMeteringEntityDescription( + native_unit_of_measurement=f"{UnitOfEnergy.MEGA_JOULE}/{UnitOfTime.SECONDS}", + device_class=None, # needs to be None as MJ/s is not supported + ), + } + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + + entity_description = self._ENTITY_DESCRIPTION_MAP.get( + self._cluster_handler.unit_of_measurement + ) + if entity_description is not None: + self.entity_description = entity_description + + def formatter(self, value: int) -> int | float: + """Pass through cluster handler formatter.""" + return self._cluster_handler.demand_formatter(value) + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return device state attrs for battery sensors.""" + attrs = {} + if self._cluster_handler.device_type is not None: + attrs["device_type"] = self._cluster_handler.device_type + if (status := self._cluster_handler.status) is not None: + if isinstance(status, enum.IntFlag): + attrs["status"] = str( + status.name if status.name is not None else status.value + ) + else: + attrs["status"] = str(status)[len(status.__class__.__name__) + 1 :] + return attrs + + @property + def native_value(self) -> StateType: + """Return the state of the entity.""" + state = super().native_value + if hasattr(self, "entity_description") and state is not None: + return float(state) * self.entity_description.scale + + return state + + +@dataclass(frozen=True, kw_only=True) +class SmartEnergySummationEntityDescription(SmartEnergyMeteringEntityDescription): + """Dataclass that describes a Zigbee smart energy summation entity.""" + + key: str = "summation_delivered" + state_class: SensorStateClass | None = SensorStateClass.TOTAL_INCREASING + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, + stop_on_match_group=CLUSTER_HANDLER_SMARTENERGY_METERING, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class SmartEnergySummation(SmartEnergyMetering): + """Smart Energy Metering summation sensor.""" + + entity_description: SmartEnergySummationEntityDescription + _attribute_name = "current_summ_delivered" + _unique_id_suffix = "summation_delivered" + _attr_translation_key: str = "summation_delivered" + + _ENTITY_DESCRIPTION_MAP = { + 0x00: SmartEnergySummationEntityDescription( + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + ), + 0x01: SmartEnergySummationEntityDescription( + native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, + device_class=SensorDeviceClass.VOLUME, + ), + 0x02: SmartEnergySummationEntityDescription( + native_unit_of_measurement=UnitOfVolume.CUBIC_FEET, + device_class=SensorDeviceClass.VOLUME, + ), + 0x03: SmartEnergySummationEntityDescription( + native_unit_of_measurement=UnitOfVolume.CUBIC_FEET, + device_class=SensorDeviceClass.VOLUME, + scale=100, + ), + 0x04: SmartEnergySummationEntityDescription( + native_unit_of_measurement=UnitOfVolume.GALLONS, # US gallons + device_class=SensorDeviceClass.VOLUME, + ), + 0x05: SmartEnergySummationEntityDescription( + native_unit_of_measurement=f"IMP {UnitOfVolume.GALLONS}", + device_class=None, # needs to be None as imperial gallons are not supported + ), + 0x06: SmartEnergySummationEntityDescription( + native_unit_of_measurement="BTU", device_class=None, state_class=None + ), + 0x07: SmartEnergySummationEntityDescription( + native_unit_of_measurement=UnitOfVolume.LITERS, + device_class=SensorDeviceClass.VOLUME, + ), + 0x08: SmartEnergySummationEntityDescription( + native_unit_of_measurement=UnitOfPressure.KPA, + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + ), # gauge + 0x09: SmartEnergySummationEntityDescription( + native_unit_of_measurement=UnitOfPressure.KPA, + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + ), # absolute + 0x0A: SmartEnergySummationEntityDescription( + native_unit_of_measurement=UnitOfVolume.CUBIC_FEET, + device_class=SensorDeviceClass.VOLUME, + scale=1000, + ), + 0x0B: SmartEnergySummationEntityDescription( + native_unit_of_measurement="unitless", device_class=None, state_class=None + ), + 0x0C: SmartEnergySummationEntityDescription( + native_unit_of_measurement=UnitOfEnergy.MEGA_JOULE, + device_class=SensorDeviceClass.ENERGY, + ), + } + + def formatter(self, value: int) -> int | float: + """Numeric pass-through formatter.""" + if self._cluster_handler.unit_of_measurement != 0: + return self._cluster_handler.summa_formatter(value) + + cooked = ( + float(self._cluster_handler.multiplier * value) + / self._cluster_handler.divisor + ) + return round(cooked, 3) + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, + models={"TS011F", "ZLinky_TIC", "TICMeter"}, + stop_on_match_group=CLUSTER_HANDLER_SMARTENERGY_METERING, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class PolledSmartEnergySummation(SmartEnergySummation): + """Polled Smart Energy Metering summation sensor.""" + + _use_custom_polling: bool = True + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, + models={"ZLinky_TIC", "TICMeter"}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Tier1SmartEnergySummation(PolledSmartEnergySummation): + """Tier 1 Smart Energy Metering summation sensor.""" + + _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation + _attribute_name = "current_tier1_summ_delivered" + _unique_id_suffix = "tier1_summation_delivered" + _attr_translation_key: str = "tier1_summation_delivered" + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, + models={"ZLinky_TIC", "TICMeter"}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Tier2SmartEnergySummation(PolledSmartEnergySummation): + """Tier 2 Smart Energy Metering summation sensor.""" + + _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation + _attribute_name = "current_tier2_summ_delivered" + _unique_id_suffix = "tier2_summation_delivered" + _attr_translation_key: str = "tier2_summation_delivered" + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, + models={"ZLinky_TIC", "TICMeter"}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Tier3SmartEnergySummation(PolledSmartEnergySummation): + """Tier 3 Smart Energy Metering summation sensor.""" + + _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation + _attribute_name = "current_tier3_summ_delivered" + _unique_id_suffix = "tier3_summation_delivered" + _attr_translation_key: str = "tier3_summation_delivered" + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, + models={"ZLinky_TIC", "TICMeter"}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Tier4SmartEnergySummation(PolledSmartEnergySummation): + """Tier 4 Smart Energy Metering summation sensor.""" + + _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation + _attribute_name = "current_tier4_summ_delivered" + _unique_id_suffix = "tier4_summation_delivered" + _attr_translation_key: str = "tier4_summation_delivered" + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, + models={"ZLinky_TIC", "TICMeter"}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Tier5SmartEnergySummation(PolledSmartEnergySummation): + """Tier 5 Smart Energy Metering summation sensor.""" + + _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation + _attribute_name = "current_tier5_summ_delivered" + _unique_id_suffix = "tier5_summation_delivered" + _attr_translation_key: str = "tier5_summation_delivered" + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, + models={"ZLinky_TIC", "TICMeter"}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Tier6SmartEnergySummation(PolledSmartEnergySummation): + """Tier 6 Smart Energy Metering summation sensor.""" + + _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation + _attribute_name = "current_tier6_summ_delivered" + _unique_id_suffix = "tier6_summation_delivered" + _attr_translation_key: str = "tier6_summation_delivered" + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class SmartEnergySummationReceived(PolledSmartEnergySummation): + """Smart Energy Metering summation received sensor.""" + + _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation + _attribute_name = "current_summ_received" + _unique_id_suffix = "summation_received" + _attr_translation_key: str = "summation_received" + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + This attribute only started to be initialized in HA 2024.2.0, + so the entity would be created on the first HA start after the + upgrade for existing devices, as the initialization to see if + an attribute is unsupported happens later in the background. + To avoid creating unnecessary entities for existing devices, + wait until the attribute was properly initialized once for now. + """ + if cluster_handlers[0].cluster.get(cls._attribute_name) is None: + return None + return super().create_entity(unique_id, zha_device, cluster_handlers, **kwargs) + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_PRESSURE) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Pressure(Sensor): + """Pressure sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.PRESSURE + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _decimals = 0 + _attr_native_unit_of_measurement = UnitOfPressure.HPA + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_TEMPERATURE) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class Temperature(Sensor): + """Temperature Sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.TEMPERATURE + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _divisor = 100 + _attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_DEVICE_TEMPERATURE) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DeviceTemperature(Sensor): + """Device Temperature Sensor.""" + + _attribute_name = "current_temperature" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.TEMPERATURE + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _attr_translation_key: str = "device_temperature" + _divisor = 100 + _attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS + _attr_entity_category = EntityCategory.DIAGNOSTIC + + +@MULTI_MATCH(cluster_handler_names="carbon_dioxide_concentration") +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class CarbonDioxideConcentration(Sensor): + """Carbon Dioxide Concentration sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.CO2 + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _decimals = 0 + _multiplier = 1e6 + _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_MILLION + + +@MULTI_MATCH(cluster_handler_names="carbon_monoxide_concentration") +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class CarbonMonoxideConcentration(Sensor): + """Carbon Monoxide Concentration sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.CO + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _decimals = 0 + _multiplier = 1e6 + _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_MILLION + + +@MULTI_MATCH(generic_ids="cluster_handler_0x042e", stop_on_match_group="voc_level") +@MULTI_MATCH(cluster_handler_names="voc_level", stop_on_match_group="voc_level") +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class VOCLevel(Sensor): + """VOC Level sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _decimals = 0 + _multiplier = 1e6 + _attr_native_unit_of_measurement = CONCENTRATION_MICROGRAMS_PER_CUBIC_METER + + +@MULTI_MATCH( + cluster_handler_names="voc_level", + models="lumi.airmonitor.acn01", + stop_on_match_group="voc_level", +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class PPBVOCLevel(Sensor): + """VOC Level sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = ( + SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS + ) + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _decimals = 0 + _multiplier = 1 + _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_BILLION + + +@MULTI_MATCH(cluster_handler_names="pm25") +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class PM25(Sensor): + """Particulate Matter 2.5 microns or less sensor.""" + + _attribute_name = "measured_value" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.PM25 + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _decimals = 0 + _multiplier = 1 + _attr_native_unit_of_measurement = CONCENTRATION_MICROGRAMS_PER_CUBIC_METER + + +@MULTI_MATCH(cluster_handler_names="formaldehyde_concentration") +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class FormaldehydeConcentration(Sensor): + """Formaldehyde Concentration sensor.""" + + _attribute_name = "measured_value" + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _attr_translation_key: str = "formaldehyde" + _decimals = 0 + _multiplier = 1e6 + _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_MILLION + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class ThermostatHVACAction(Sensor): + """Thermostat HVAC action sensor.""" + + _unique_id_suffix = "hvac_action" + _attr_translation_key: str = "hvac_action" + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + Return entity if it is a supported configuration, otherwise return None + """ + + return cls(unique_id, zha_device, cluster_handlers, **kwargs) + + @property + def native_value(self) -> str | None: + """Return the current HVAC action.""" + if ( + self._cluster_handler.pi_heating_demand is None + and self._cluster_handler.pi_cooling_demand is None + ): + return self._rm_rs_action + return self._pi_demand_action + + @property + def _rm_rs_action(self) -> HVACAction | None: + """Return the current HVAC action based on running mode and running state.""" + + if (running_state := self._cluster_handler.running_state) is None: + return None + + rs_heat = ( + self._cluster_handler.RunningState.Heat_State_On + | self._cluster_handler.RunningState.Heat_2nd_Stage_On + ) + if running_state & rs_heat: + return HVACAction.HEATING + + rs_cool = ( + self._cluster_handler.RunningState.Cool_State_On + | self._cluster_handler.RunningState.Cool_2nd_Stage_On + ) + if running_state & rs_cool: + return HVACAction.COOLING + + running_state = self._cluster_handler.running_state + if running_state and running_state & ( + self._cluster_handler.RunningState.Fan_State_On + | self._cluster_handler.RunningState.Fan_2nd_Stage_On + | self._cluster_handler.RunningState.Fan_3rd_Stage_On + ): + return HVACAction.FAN + + running_state = self._cluster_handler.running_state + if running_state and running_state & self._cluster_handler.RunningState.Idle: + return HVACAction.IDLE + + if self._cluster_handler.system_mode != self._cluster_handler.SystemMode.Off: + return HVACAction.IDLE + return HVACAction.OFF + + @property + def _pi_demand_action(self) -> HVACAction: + """Return the current HVAC action based on pi_demands.""" + + heating_demand = self._cluster_handler.pi_heating_demand + if heating_demand is not None and heating_demand > 0: + return HVACAction.HEATING + cooling_demand = self._cluster_handler.pi_cooling_demand + if cooling_demand is not None and cooling_demand > 0: + return HVACAction.COOLING + + if self._cluster_handler.system_mode != self._cluster_handler.SystemMode.Off: + return HVACAction.IDLE + return HVACAction.OFF + + +@MULTI_MATCH( + cluster_handler_names={CLUSTER_HANDLER_THERMOSTAT}, + manufacturers="Sinope Technologies", + stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class SinopeHVACAction(ThermostatHVACAction): + """Sinope Thermostat HVAC action sensor.""" + + @property + def _rm_rs_action(self) -> HVACAction: + """Return the current HVAC action based on running mode and running state.""" + + running_mode = self._cluster_handler.running_mode + if running_mode == self._cluster_handler.RunningMode.Heat: + return HVACAction.HEATING + if running_mode == self._cluster_handler.RunningMode.Cool: + return HVACAction.COOLING + + running_state = self._cluster_handler.running_state + if running_state and running_state & ( + self._cluster_handler.RunningState.Fan_State_On + | self._cluster_handler.RunningState.Fan_2nd_Stage_On + | self._cluster_handler.RunningState.Fan_3rd_Stage_On + ): + return HVACAction.FAN + if ( + self._cluster_handler.system_mode != self._cluster_handler.SystemMode.Off + and running_mode == self._cluster_handler.SystemMode.Off + ): + return HVACAction.IDLE + return HVACAction.OFF + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_BASIC) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class RSSISensor(Sensor): + """RSSI sensor for a device.""" + + _attribute_name = "rssi" + _unique_id_suffix = "rssi" + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _attr_device_class: SensorDeviceClass | None = SensorDeviceClass.SIGNAL_STRENGTH + _attr_native_unit_of_measurement: str | None = SIGNAL_STRENGTH_DECIBELS_MILLIWATT + _attr_entity_category = EntityCategory.DIAGNOSTIC + _attr_entity_registry_enabled_default = False + _attr_should_poll = True # BaseZhaEntity defaults to False + _attr_translation_key: str = "rssi" + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + Return entity if it is a supported configuration, otherwise return None + """ + key = f"{CLUSTER_HANDLER_BASIC}_{cls._unique_id_suffix}" + if ZHA_ENTITIES.prevent_entity_creation(Platform.SENSOR, zha_device.ieee, key): + return None + return cls(unique_id, zha_device, cluster_handlers, **kwargs) + + @property + def native_value(self) -> StateType: + """Return the state of the entity.""" + return getattr(self._zha_device.device, self._attribute_name) + + +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_BASIC) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class LQISensor(RSSISensor): + """LQI sensor for a device.""" + + _attribute_name = "lqi" + _unique_id_suffix = "lqi" + _attr_device_class = None + _attr_native_unit_of_measurement = None + _attr_translation_key = "lqi" + + +@MULTI_MATCH( + cluster_handler_names="tuya_manufacturer", + manufacturers={ + "_TZE200_htnnfasr", + }, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class TimeLeft(Sensor): + """Sensor that displays time left value.""" + + _attribute_name = "timer_time_left" + _unique_id_suffix = "time_left" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.DURATION + _attr_translation_key: str = "timer_time_left" + _attr_native_unit_of_measurement = UnitOfTime.MINUTES + + +@MULTI_MATCH(cluster_handler_names="ikea_airpurifier") +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class IkeaDeviceRunTime(Sensor): + """Sensor that displays device run time (in minutes).""" + + _attribute_name = "device_run_time" + _unique_id_suffix = "device_run_time" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.DURATION + _attr_translation_key: str = "device_run_time" + _attr_native_unit_of_measurement = UnitOfTime.MINUTES + _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC + + +@MULTI_MATCH(cluster_handler_names="ikea_airpurifier") +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class IkeaFilterRunTime(Sensor): + """Sensor that displays run time of the current filter (in minutes).""" + + _attribute_name = "filter_run_time" + _unique_id_suffix = "filter_run_time" + _attr_device_class: SensorDeviceClass = SensorDeviceClass.DURATION + _attr_translation_key: str = "filter_run_time" + _attr_native_unit_of_measurement = UnitOfTime.MINUTES + _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC + + +class AqaraFeedingSource(types.enum8): + """Aqara pet feeder feeding source.""" + + Feeder = 0x01 + HomeAssistant = 0x02 + + +@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraPetFeederLastFeedingSource(EnumSensor): + """Sensor that displays the last feeding source of pet feeder.""" + + _attribute_name = "last_feeding_source" + _unique_id_suffix = "last_feeding_source" + _attr_translation_key: str = "last_feeding_source" + _enum = AqaraFeedingSource + + +@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraPetFeederLastFeedingSize(Sensor): + """Sensor that displays the last feeding size of the pet feeder.""" + + _attribute_name = "last_feeding_size" + _unique_id_suffix = "last_feeding_size" + _attr_translation_key: str = "last_feeding_size" + + +@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraPetFeederPortionsDispensed(Sensor): + """Sensor that displays the number of portions dispensed by the pet feeder.""" + + _attribute_name = "portions_dispensed" + _unique_id_suffix = "portions_dispensed" + _attr_translation_key: str = "portions_dispensed_today" + _attr_state_class: SensorStateClass = SensorStateClass.TOTAL_INCREASING + + +@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraPetFeederWeightDispensed(Sensor): + """Sensor that displays the weight dispensed by the pet feeder.""" + + _attribute_name = "weight_dispensed" + _unique_id_suffix = "weight_dispensed" + _attr_translation_key: str = "weight_dispensed_today" + _attr_native_unit_of_measurement = UnitOfMass.GRAMS + _attr_state_class: SensorStateClass = SensorStateClass.TOTAL_INCREASING + + +@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"}) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraSmokeDensityDbm(Sensor): + """Sensor that displays the smoke density of an Aqara smoke sensor in dB/m.""" + + _attribute_name = "smoke_density_dbm" + _unique_id_suffix = "smoke_density_dbm" + _attr_translation_key: str = "smoke_density" + _attr_native_unit_of_measurement = "dB/m" + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _attr_suggested_display_precision: int = 3 + + +class SonoffIlluminationStates(types.enum8): + """Enum for displaying last Illumination state.""" + + Dark = 0x00 + Light = 0x01 + + +@MULTI_MATCH(cluster_handler_names="sonoff_manufacturer", models={"SNZB-06P"}) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class SonoffPresenceSenorIlluminationStatus(EnumSensor): + """Sensor that displays the illumination status the last time peresence was detected.""" + + _attribute_name = "last_illumination_state" + _unique_id_suffix = "last_illumination" + _attr_translation_key: str = "last_illumination_state" + _enum = SonoffIlluminationStates + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class PiHeatingDemand(Sensor): + """Sensor that displays the percentage of heating power demanded. + + Optional thermostat attribute. + """ + + _unique_id_suffix = "pi_heating_demand" + _attribute_name = "pi_heating_demand" + _attr_translation_key: str = "pi_heating_demand" + _attr_native_unit_of_measurement = PERCENTAGE + _decimals = 0 + _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT + _attr_entity_category = EntityCategory.DIAGNOSTIC + + +class SetpointChangeSourceEnum(types.enum8): + """The source of the setpoint change.""" + + Manual = 0x00 + Schedule = 0x01 + External = 0x02 + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class SetpointChangeSource(EnumSensor): + """Sensor that displays the source of the setpoint change. + + Optional thermostat attribute. + """ + + _unique_id_suffix = "setpoint_change_source" + _attribute_name = "setpoint_change_source" + _attr_translation_key: str = "setpoint_change_source" + _attr_entity_category = EntityCategory.DIAGNOSTIC + _enum = SetpointChangeSourceEnum + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_COVER) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class WindowCoveringTypeSensor(EnumSensor): + """Sensor that displays the type of a cover device.""" + + _attribute_name: str = WindowCovering.AttributeDefs.window_covering_type.name + _enum = WindowCovering.WindowCoveringType + _unique_id_suffix: str = WindowCovering.AttributeDefs.window_covering_type.name + _attr_translation_key: str = WindowCovering.AttributeDefs.window_covering_type.name + _attr_entity_category = EntityCategory.DIAGNOSTIC + _attr_icon = "mdi:curtains" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_BASIC, models={"lumi.curtain.agl001"} +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraCurtainMotorPowerSourceSensor(EnumSensor): + """Sensor that displays the power source of the Aqara E1 curtain motor device.""" + + _attribute_name: str = Basic.AttributeDefs.power_source.name + _enum = Basic.PowerSource + _unique_id_suffix: str = Basic.AttributeDefs.power_source.name + _attr_translation_key: str = Basic.AttributeDefs.power_source.name + _attr_entity_category = EntityCategory.DIAGNOSTIC + _attr_icon = "mdi:battery-positive" + + +class AqaraE1HookState(types.enum8): + """Aqara hook state.""" + + Unlocked = 0x00 + Locked = 0x01 + Locking = 0x02 + Unlocking = 0x03 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.curtain.agl001"} +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class AqaraCurtainHookStateSensor(EnumSensor): + """Representation of a ZHA curtain mode configuration entity.""" + + _attribute_name = "hooks_state" + _enum = AqaraE1HookState + _unique_id_suffix = "hooks_state" + _attr_translation_key: str = "hooks_state" + _attr_icon: str = "mdi:hook" + _attr_entity_category = EntityCategory.DIAGNOSTIC + + +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class BitMapSensor(Sensor): + """A sensor with only state attributes. + + The sensor value will be an aggregate of the state attributes. + """ + + _bitmap: types.bitmap8 | types.bitmap16 + + def formatter(self, _value: int) -> str: + """Summary of all attributes.""" + binary_state_attributes = [ + key for (key, elem) in self.extra_state_attributes.items() if elem + ] + + return "something" if binary_state_attributes else "nothing" + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Bitmap.""" + value = self._cluster_handler.cluster.get(self._attribute_name) + + state_attr = {} + + for bit in list(self._bitmap): + if value is None: + state_attr[bit.name] = False + else: + state_attr[bit.name] = bit in self._bitmap(value) + + return state_attr + + +@MULTI_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DanfossOpenWindowDetection(EnumSensor): + """Danfoss proprietary attribute. + + Sensor that displays whether the TRV detects an open window using the temperature sensor. + """ + + _unique_id_suffix = "open_window_detection" + _attribute_name = "open_window_detection" + _attr_translation_key: str = "open_window_detected" + _attr_icon: str = "mdi:window-open" + _enum = danfoss_thermostat.DanfossOpenWindowDetectionEnum + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DanfossLoadEstimate(Sensor): + """Danfoss proprietary attribute for communicating its estimate of the radiator load.""" + + _unique_id_suffix = "load_estimate" + _attribute_name = "load_estimate" + _attr_translation_key: str = "load_estimate" + _attr_icon: str = "mdi:scale-balance" + _attr_entity_category = EntityCategory.DIAGNOSTIC + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DanfossAdaptationRunStatus(BitMapSensor): + """Danfoss proprietary attribute for showing the status of the adaptation run.""" + + _unique_id_suffix = "adaptation_run_status" + _attribute_name = "adaptation_run_status" + _attr_translation_key: str = "adaptation_run_status" + _attr_entity_category = EntityCategory.DIAGNOSTIC + _bitmap = danfoss_thermostat.DanfossAdaptationRunStatusBitmap + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DanfossPreheatTime(Sensor): + """Danfoss proprietary attribute for communicating the time when it starts pre-heating.""" + + _unique_id_suffix = "preheat_time" + _attribute_name = "preheat_time" + _attr_translation_key: str = "preheat_time" + _attr_icon: str = "mdi:radiator" + _attr_entity_registry_enabled_default = False + _attr_entity_category = EntityCategory.DIAGNOSTIC + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="diagnostic", + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DanfossSoftwareErrorCode(BitMapSensor): + """Danfoss proprietary attribute for communicating the error code.""" + + _unique_id_suffix = "sw_error_code" + _attribute_name = "sw_error_code" + _attr_translation_key: str = "software_error" + _attr_entity_category = EntityCategory.DIAGNOSTIC + _bitmap = danfoss_thermostat.DanfossSoftwareErrorCodeBitmap + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="diagnostic", + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class DanfossMotorStepCounter(Sensor): + """Danfoss proprietary attribute for communicating the motor step counter.""" + + _unique_id_suffix = "motor_step_counter" + _attribute_name = "motor_step_counter" + _attr_translation_key: str = "motor_stepcount" + _attr_entity_category = EntityCategory.DIAGNOSTIC diff --git a/homeassistant/components/zha/siren.py b/homeassistant/components/zha/siren.py index 9d876d9ca4d..3aab332f746 100644 --- a/homeassistant/components/zha/siren.py +++ b/homeassistant/components/zha/siren.py @@ -2,18 +2,11 @@ from __future__ import annotations +from collections.abc import Callable import functools -from typing import Any +from typing import TYPE_CHECKING, Any, cast -from zha.application.const import ( - WARNING_DEVICE_MODE_BURGLAR, - WARNING_DEVICE_MODE_EMERGENCY, - WARNING_DEVICE_MODE_EMERGENCY_PANIC, - WARNING_DEVICE_MODE_FIRE, - WARNING_DEVICE_MODE_FIRE_PANIC, - WARNING_DEVICE_MODE_POLICE_PANIC, -) -from zha.application.platforms.siren import SirenEntityFeature as ZHASirenEntityFeature +from zigpy.zcl.clusters.security import IasWd as WD from homeassistant.components.siren import ( ATTR_DURATION, @@ -24,18 +17,38 @@ from homeassistant.components.siren import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.event import async_call_later -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.cluster_handlers.security import IasWdClusterHandler +from .core.const import ( + CLUSTER_HANDLER_IAS_WD, SIGNAL_ADD_ENTITIES, - EntityData, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - get_zha_data, + WARNING_DEVICE_MODE_BURGLAR, + WARNING_DEVICE_MODE_EMERGENCY, + WARNING_DEVICE_MODE_EMERGENCY_PANIC, + WARNING_DEVICE_MODE_FIRE, + WARNING_DEVICE_MODE_FIRE_PANIC, + WARNING_DEVICE_MODE_POLICE_PANIC, + WARNING_DEVICE_MODE_STOP, + WARNING_DEVICE_SOUND_HIGH, + WARNING_DEVICE_STROBE_HIGH, + WARNING_DEVICE_STROBE_NO, + Strobe, ) +from .core.helpers import get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity + +if TYPE_CHECKING: + from .core.cluster_handlers import ClusterHandler + from .core.device import ZHADevice + +MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.SIREN) +DEFAULT_DURATION = 5 # seconds async def async_setup_entry( @@ -51,61 +64,115 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, ZHASiren, entities_to_create + discovery.async_add_entities, + async_add_entities, + entities_to_create, ), ) config_entry.async_on_unload(unsub) -class ZHASiren(ZHAEntity, SirenEntity): +@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) +class ZHASiren(ZhaEntity, SirenEntity): """Representation of a ZHA siren.""" - _attr_available_tones: list[int | str] | dict[int, str] | None = { - WARNING_DEVICE_MODE_BURGLAR: "Burglar", - WARNING_DEVICE_MODE_FIRE: "Fire", - WARNING_DEVICE_MODE_EMERGENCY: "Emergency", - WARNING_DEVICE_MODE_POLICE_PANIC: "Police Panic", - WARNING_DEVICE_MODE_FIRE_PANIC: "Fire Panic", - WARNING_DEVICE_MODE_EMERGENCY_PANIC: "Emergency Panic", - } + _attr_name: str = "Siren" - def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: - """Initialize the ZHA siren.""" - super().__init__(entity_data, **kwargs) + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs, + ) -> None: + """Init this siren.""" + self._attr_supported_features = ( + SirenEntityFeature.TURN_ON + | SirenEntityFeature.TURN_OFF + | SirenEntityFeature.DURATION + | SirenEntityFeature.VOLUME_SET + | SirenEntityFeature.TONES + ) + self._attr_available_tones: list[int | str] | dict[int, str] | None = { + WARNING_DEVICE_MODE_BURGLAR: "Burglar", + WARNING_DEVICE_MODE_FIRE: "Fire", + WARNING_DEVICE_MODE_EMERGENCY: "Emergency", + WARNING_DEVICE_MODE_POLICE_PANIC: "Police Panic", + WARNING_DEVICE_MODE_FIRE_PANIC: "Fire Panic", + WARNING_DEVICE_MODE_EMERGENCY_PANIC: "Emergency Panic", + } + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._cluster_handler: IasWdClusterHandler = cast( + IasWdClusterHandler, cluster_handlers[0] + ) + self._attr_is_on: bool = False + self._off_listener: Callable[[], None] | None = None - features: SirenEntityFeature = SirenEntityFeature(0) - zha_features: ZHASirenEntityFeature = self.entity_data.entity.supported_features - - if ZHASirenEntityFeature.TURN_ON in zha_features: - features |= SirenEntityFeature.TURN_ON - if ZHASirenEntityFeature.TURN_OFF in zha_features: - features |= SirenEntityFeature.TURN_OFF - if ZHASirenEntityFeature.TONES in zha_features: - features |= SirenEntityFeature.TONES - if ZHASirenEntityFeature.VOLUME_SET in zha_features: - features |= SirenEntityFeature.VOLUME_SET - if ZHASirenEntityFeature.DURATION in zha_features: - features |= SirenEntityFeature.DURATION - - self._attr_supported_features = features - - @property - def is_on(self) -> bool: - """Return True if entity is on.""" - return self.entity_data.entity.is_on - - @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn on siren.""" - await self.entity_data.entity.async_turn_on( - duration=kwargs.get(ATTR_DURATION), - tone=kwargs.get(ATTR_TONE), - volume_level=kwargs.get(ATTR_VOLUME_LEVEL), + if self._off_listener: + self._off_listener() + self._off_listener = None + tone_cache = self._cluster_handler.data_cache.get( + WD.Warning.WarningMode.__name__ + ) + siren_tone = ( + tone_cache.value + if tone_cache is not None + else WARNING_DEVICE_MODE_EMERGENCY + ) + siren_duration = DEFAULT_DURATION + level_cache = self._cluster_handler.data_cache.get( + WD.Warning.SirenLevel.__name__ + ) + siren_level = ( + level_cache.value if level_cache is not None else WARNING_DEVICE_SOUND_HIGH + ) + strobe_cache = self._cluster_handler.data_cache.get(Strobe.__name__) + should_strobe = ( + strobe_cache.value if strobe_cache is not None else Strobe.No_Strobe + ) + strobe_level_cache = self._cluster_handler.data_cache.get( + WD.StrobeLevel.__name__ + ) + strobe_level = ( + strobe_level_cache.value + if strobe_level_cache is not None + else WARNING_DEVICE_STROBE_HIGH + ) + if (duration := kwargs.get(ATTR_DURATION)) is not None: + siren_duration = duration + if (tone := kwargs.get(ATTR_TONE)) is not None: + siren_tone = tone + if (level := kwargs.get(ATTR_VOLUME_LEVEL)) is not None: + siren_level = int(level) + await self._cluster_handler.issue_start_warning( + mode=siren_tone, + warning_duration=siren_duration, + siren_level=siren_level, + strobe=should_strobe, + strobe_duty_cycle=50 if should_strobe else 0, + strobe_intensity=strobe_level, + ) + self._attr_is_on = True + self._off_listener = async_call_later( + self._zha_device.hass, siren_duration, self.async_set_off ) self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn off siren.""" - await self.entity_data.entity.async_turn_off() + await self._cluster_handler.issue_start_warning( + mode=WARNING_DEVICE_MODE_STOP, strobe=WARNING_DEVICE_STROBE_NO + ) + self._attr_is_on = False + self.async_write_ha_state() + + @callback + def async_set_off(self, _) -> None: + """Set is_on to False and write HA state.""" + self._attr_is_on = False + if self._off_listener: + self._off_listener() + self._off_listener = None self.async_write_ha_state() diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index d0505bf2460..f25fdf1ebe4 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -178,11 +178,11 @@ "title": "Global Options", "enhanced_light_transition": "Enable enhanced light color/temperature transition from an off-state", "light_transitioning_flag": "Enable enhanced brightness slider during light transition", + "always_prefer_xy_color_mode": "Always prefer XY color mode", "group_members_assume_state": "Group members assume state of group", "enable_identify_on_join": "Enable identify effect when devices join the network", "default_light_transition": "Default light transition time (seconds)", "consider_unavailable_mains": "Consider mains powered devices unavailable after (seconds)", - "enable_mains_startup_polling": "Refresh state for mains powered devices on startup", "consider_unavailable_battery": "Consider battery powered devices unavailable after (seconds)" }, "zha_alarm_options": { @@ -413,7 +413,7 @@ }, "warning_device_squawk": { "name": "Warning device squawk", - "description": "This action uses the WD capabilities to emit a quick audible/visible pulse called a \"squawk\". The squawk command has no effect if the WD is currently active (warning in progress).", + "description": "This service uses the WD capabilities to emit a quick audible/visible pulse called a \"squawk\". The squawk command has no effect if the WD is currently active (warning in progress).", "fields": { "ieee": { "name": "[%key:component::zha::services::permit::fields::ieee::name%]", @@ -435,7 +435,7 @@ }, "warning_device_warn": { "name": "Warning device starts alert", - "description": "This action starts the operation of the warning device. The warning device alerts the surrounding area by audible (siren) and visual (strobe) signals.", + "description": "This service starts the operation of the warning device. The warning device alerts the surrounding area by audible (siren) and visual (strobe) signals.", "fields": { "ieee": { "name": "[%key:component::zha::services::permit::fields::ieee::name%]", @@ -708,15 +708,6 @@ "maximum_level": { "name": "Maximum load dimming level" }, - "default_level_local": { - "name": "Local default dimming level" - }, - "default_level_remote": { - "name": "Remote default dimming level" - }, - "state_after_power_restored": { - "name": "Start-up default dimming level" - }, "auto_off_timer": { "name": "Automatic switch shutoff timer" }, @@ -776,21 +767,6 @@ }, "regulation_setpoint_offset": { "name": "Regulation setpoint offset" - }, - "irrigation_cycles": { - "name": "Irrigation cycles" - }, - "irrigation_target": { - "name": "Irrigation target" - }, - "irrigation_interval": { - "name": "Irrigation interval" - }, - "valve_countdown_1": { - "name": "Irrigation time 1" - }, - "valve_countdown_2": { - "name": "Irrigation time 2" } }, "select": { @@ -842,9 +818,6 @@ "increased_non_neutral_output": { "name": "Non neutral output" }, - "leading_or_trailing_edge": { - "name": "Dimming mode" - }, "feeding_mode": { "name": "Mode" }, @@ -880,12 +853,6 @@ }, "setpoint_response_time": { "name": "Setpoint response time" - }, - "irrigation_mode": { - "name": "Irrigation mode" - }, - "weather_delay": { - "name": "Weather delay" } }, "sensor": { @@ -931,12 +898,6 @@ "device_temperature": { "name": "Device temperature" }, - "internal_temp_monitor": { - "name": "Internal temperature" - }, - "overheated": { - "name": "Overheat protection" - }, "formaldehyde": { "name": "Formaldehyde concentration" }, @@ -1062,27 +1023,6 @@ }, "motor_stepcount": { "name": "Motor stepcount" - }, - "irrigation_duration": { - "name": "Last irrigation duration" - }, - "irrigation_start_time": { - "name": "Irrigation start time" - }, - "irrigation_end_time": { - "name": "Irrigation end time" - }, - "irrigation_duration_1": { - "name": "Irrigation duration 1" - }, - "irriation_duration_2": { - "name": "Irrigation duration 2" - }, - "valve_status_1": { - "name": "Status 1" - }, - "valve_status_2": { - "name": "Status 2" } }, "switch": { @@ -1187,12 +1127,6 @@ }, "adaptation_run_enabled": { "name": "Adaptation run enabled" - }, - "valve_on_off_1": { - "name": "Valve 1" - }, - "valve_on_off_2": { - "name": "Valve 2" } } } diff --git a/homeassistant/components/zha/switch.py b/homeassistant/components/zha/switch.py index cb0268f98e0..f07d3d4c8e3 100644 --- a/homeassistant/components/zha/switch.py +++ b/homeassistant/components/zha/switch.py @@ -4,21 +4,44 @@ from __future__ import annotations import functools import logging -from typing import Any +from typing import TYPE_CHECKING, Any, Self + +from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT, TUYA_PLUG_ONOFF +from zigpy.quirks.v2 import SwitchMetadata +from zigpy.zcl.clusters.closures import ConfigStatus, WindowCovering, WindowCoveringMode +from zigpy.zcl.clusters.general import OnOff +from zigpy.zcl.foundation import Status from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, EntityCategory, Platform +from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import ZHAEntity -from .helpers import ( +from .core import discovery +from .core.const import ( + CLUSTER_HANDLER_BASIC, + CLUSTER_HANDLER_COVER, + CLUSTER_HANDLER_INOVELLI, + CLUSTER_HANDLER_ON_OFF, + CLUSTER_HANDLER_THERMOSTAT, + ENTITY_METADATA, SIGNAL_ADD_ENTITIES, - async_add_entities as zha_async_add_entities, - convert_zha_error_to_ha_error, - get_zha_data, + SIGNAL_ATTR_UPDATED, +) +from .core.helpers import get_zha_data +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity, ZhaGroupEntity + +if TYPE_CHECKING: + from .core.cluster_handlers import ClusterHandler + from .core.device import ZHADevice + +STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.SWITCH) +GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, Platform.SWITCH) +CONFIG_DIAGNOSTIC_MATCH = functools.partial( + ZHA_ENTITIES.config_diagnostic_match, Platform.SWITCH ) _LOGGER = logging.getLogger(__name__) @@ -37,28 +60,752 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, async_add_entities, Switch, entities_to_create + discovery.async_add_entities, async_add_entities, entities_to_create ), ) config_entry.async_on_unload(unsub) -class Switch(ZHAEntity, SwitchEntity): +@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ON_OFF) +class Switch(ZhaEntity, SwitchEntity): """ZHA switch.""" + _attr_translation_key = "switch" + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Initialize the ZHA switch.""" + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + self._on_off_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_ON_OFF] + @property def is_on(self) -> bool: """Return if the switch is on based on the statemachine.""" - return self.entity_data.entity.is_on + if self._on_off_cluster_handler.on_off is None: + return False + return self._on_off_cluster_handler.on_off - @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - await self.entity_data.entity.async_turn_on() + await self._on_off_cluster_handler.turn_on() self.async_write_ha_state() - @convert_zha_error_to_ha_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - await self.entity_data.entity.async_turn_off() + await self._on_off_cluster_handler.turn_off() self.async_write_ha_state() + + @callback + def async_set_state(self, attr_id: int, attr_name: str, value: Any): + """Handle state update from cluster handler.""" + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._on_off_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state + ) + + async def async_update(self) -> None: + """Attempt to retrieve on off state from the switch.""" + self.debug("Polling current state") + await self._on_off_cluster_handler.get_attribute_value( + "on_off", from_cache=False + ) + + +@GROUP_MATCH() +class SwitchGroup(ZhaGroupEntity, SwitchEntity): + """Representation of a switch group.""" + + def __init__( + self, + entity_ids: list[str], + unique_id: str, + group_id: int, + zha_device: ZHADevice, + **kwargs: Any, + ) -> None: + """Initialize a switch group.""" + super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) + self._available: bool + self._state: bool + group = self.zha_device.gateway.get_group(self._group_id) + self._on_off_cluster_handler = group.endpoint[OnOff.cluster_id] + + @property + def is_on(self) -> bool: + """Return if the switch is on based on the statemachine.""" + return bool(self._state) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + result = await self._on_off_cluster_handler.on() + if result[1] is not Status.SUCCESS: + return + self._state = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + result = await self._on_off_cluster_handler.off() + if result[1] is not Status.SUCCESS: + return + self._state = False + self.async_write_ha_state() + + async def async_update(self) -> None: + """Query all members and determine the switch group state.""" + all_states = [self.hass.states.get(x) for x in self._entity_ids] + states: list[State] = list(filter(None, all_states)) + on_states = [state for state in states if state.state == STATE_ON] + + self._state = len(on_states) > 0 + self._available = any(state.state != STATE_UNAVAILABLE for state in states) + + +class ZHASwitchConfigurationEntity(ZhaEntity, SwitchEntity): + """Representation of a ZHA switch configuration entity.""" + + _attr_entity_category = EntityCategory.CONFIG + _attribute_name: str + _inverter_attribute_name: str | None = None + _force_inverted: bool = False + _off_value: int = 0 + _on_value: int = 1 + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + Return entity if it is a supported configuration, otherwise return None + """ + cluster_handler = cluster_handlers[0] + if ENTITY_METADATA not in kwargs and ( + cls._attribute_name in cluster_handler.cluster.unsupported_attributes + or cls._attribute_name not in cluster_handler.cluster.attributes_by_name + or cluster_handler.cluster.get(cls._attribute_name) is None + ): + _LOGGER.debug( + "%s is not supported - skipping %s entity creation", + cls._attribute_name, + cls.__name__, + ) + return None + + return cls(unique_id, zha_device, cluster_handlers, **kwargs) + + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> None: + """Init this number configuration entity.""" + self._cluster_handler: ClusterHandler = cluster_handlers[0] + if ENTITY_METADATA in kwargs: + self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) + super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + + def _init_from_quirks_metadata(self, entity_metadata: SwitchMetadata) -> None: + """Init this entity from the quirks metadata.""" + super()._init_from_quirks_metadata(entity_metadata) + self._attribute_name = entity_metadata.attribute_name + if entity_metadata.invert_attribute_name: + self._inverter_attribute_name = entity_metadata.invert_attribute_name + if entity_metadata.force_inverted: + self._force_inverted = entity_metadata.force_inverted + self._off_value = entity_metadata.off_value + self._on_value = entity_metadata.on_value + + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + await super().async_added_to_hass() + self.async_accept_signal( + self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state + ) + + @callback + def async_set_state(self, attr_id: int, attr_name: str, value: Any): + """Handle state update from cluster handler.""" + self.async_write_ha_state() + + @property + def inverted(self) -> bool: + """Return True if the switch is inverted.""" + if self._inverter_attribute_name: + return bool( + self._cluster_handler.cluster.get(self._inverter_attribute_name) + ) + return self._force_inverted + + @property + def is_on(self) -> bool: + """Return if the switch is on based on the statemachine.""" + if self._on_value != 1: + val = self._cluster_handler.cluster.get(self._attribute_name) + val = val == self._on_value + else: + val = bool(self._cluster_handler.cluster.get(self._attribute_name)) + return (not val) if self.inverted else val + + async def async_turn_on_off(self, state: bool) -> None: + """Turn the entity on or off.""" + if self.inverted: + state = not state + if state: + await self._cluster_handler.write_attributes_safe( + {self._attribute_name: self._on_value} + ) + else: + await self._cluster_handler.write_attributes_safe( + {self._attribute_name: self._off_value} + ) + self.async_write_ha_state() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + await self.async_turn_on_off(True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + await self.async_turn_on_off(False) + + async def async_update(self) -> None: + """Attempt to retrieve the state of the entity.""" + self.debug("Polling current state") + value = await self._cluster_handler.get_attribute_value( + self._attribute_name, from_cache=False + ) + await self._cluster_handler.get_attribute_value( + self._inverter_attribute_name, from_cache=False + ) + self.debug("read value=%s, inverted=%s", value, self.inverted) + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="tuya_manufacturer", + manufacturers={ + "_TZE200_b6wax7g0", + }, +) +class OnOffWindowDetectionFunctionConfigurationEntity(ZHASwitchConfigurationEntity): + """Representation of a ZHA window detection configuration entity.""" + + _unique_id_suffix = "on_off_window_opened_detection" + _attribute_name = "window_detection_function" + _inverter_attribute_name = "window_detection_function_inverter" + _attr_translation_key = "window_detection_function" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.motion.ac02"} +) +class P1MotionTriggerIndicatorSwitch(ZHASwitchConfigurationEntity): + """Representation of a ZHA motion triggering configuration entity.""" + + _unique_id_suffix = "trigger_indicator" + _attribute_name = "trigger_indicator" + _attr_translation_key = "trigger_indicator" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", + models={"lumi.plug.mmeu01", "lumi.plug.maeu01"}, +) +class XiaomiPlugPowerOutageMemorySwitch(ZHASwitchConfigurationEntity): + """Representation of a ZHA power outage memory configuration entity.""" + + _unique_id_suffix = "power_outage_memory" + _attribute_name = "power_outage_memory" + _attr_translation_key = "power_outage_memory" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_BASIC, + manufacturers={"Philips", "Signify Netherlands B.V."}, + models={"SML001", "SML002", "SML003", "SML004"}, +) +class HueMotionTriggerIndicatorSwitch(ZHASwitchConfigurationEntity): + """Representation of a ZHA motion triggering configuration entity.""" + + _unique_id_suffix = "trigger_indicator" + _attribute_name = "trigger_indicator" + _attr_translation_key = "trigger_indicator" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="ikea_airpurifier", + models={"STARKVIND Air purifier", "STARKVIND Air purifier table"}, +) +class ChildLock(ZHASwitchConfigurationEntity): + """ZHA BinarySensor.""" + + _unique_id_suffix = "child_lock" + _attribute_name = "child_lock" + _attr_translation_key = "child_lock" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="ikea_airpurifier", + models={"STARKVIND Air purifier", "STARKVIND Air purifier table"}, +) +class DisableLed(ZHASwitchConfigurationEntity): + """ZHA BinarySensor.""" + + _unique_id_suffix = "disable_led" + _attribute_name = "disable_led" + _attr_translation_key = "disable_led" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliInvertSwitch(ZHASwitchConfigurationEntity): + """Inovelli invert switch control.""" + + _unique_id_suffix = "invert_switch" + _attribute_name = "invert_switch" + _attr_translation_key = "invert_switch" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliSmartBulbMode(ZHASwitchConfigurationEntity): + """Inovelli smart bulb mode control.""" + + _unique_id_suffix = "smart_bulb_mode" + _attribute_name = "smart_bulb_mode" + _attr_translation_key = "smart_bulb_mode" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} +) +class InovelliSmartFanMode(ZHASwitchConfigurationEntity): + """Inovelli smart fan mode control.""" + + _unique_id_suffix = "smart_fan_mode" + _attribute_name = "smart_fan_mode" + _attr_translation_key = "smart_fan_mode" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliDoubleTapUpEnabled(ZHASwitchConfigurationEntity): + """Inovelli double tap up enabled.""" + + _unique_id_suffix = "double_tap_up_enabled" + _attribute_name = "double_tap_up_enabled" + _attr_translation_key = "double_tap_up_enabled" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliDoubleTapDownEnabled(ZHASwitchConfigurationEntity): + """Inovelli double tap down enabled.""" + + _unique_id_suffix = "double_tap_down_enabled" + _attribute_name = "double_tap_down_enabled" + _attr_translation_key = "double_tap_down_enabled" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliAuxSwitchScenes(ZHASwitchConfigurationEntity): + """Inovelli unique aux switch scenes.""" + + _unique_id_suffix = "aux_switch_scenes" + _attribute_name = "aux_switch_scenes" + _attr_translation_key = "aux_switch_scenes" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliBindingOffToOnSyncLevel(ZHASwitchConfigurationEntity): + """Inovelli send move to level with on/off to bound devices.""" + + _unique_id_suffix = "binding_off_to_on_sync_level" + _attribute_name = "binding_off_to_on_sync_level" + _attr_translation_key = "binding_off_to_on_sync_level" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliLocalProtection(ZHASwitchConfigurationEntity): + """Inovelli local protection control.""" + + _unique_id_suffix = "local_protection" + _attribute_name = "local_protection" + _attr_translation_key = "local_protection" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliOnOffLEDMode(ZHASwitchConfigurationEntity): + """Inovelli only 1 LED mode control.""" + + _unique_id_suffix = "on_off_led_mode" + _attribute_name = "on_off_led_mode" + _attr_translation_key = "one_led_mode" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliFirmwareProgressLED(ZHASwitchConfigurationEntity): + """Inovelli firmware progress LED control.""" + + _unique_id_suffix = "firmware_progress_led" + _attribute_name = "firmware_progress_led" + _attr_translation_key = "firmware_progress_led" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliRelayClickInOnOffMode(ZHASwitchConfigurationEntity): + """Inovelli relay click in on off mode control.""" + + _unique_id_suffix = "relay_click_in_on_off_mode" + _attribute_name = "relay_click_in_on_off_mode" + _attr_translation_key = "relay_click_in_on_off_mode" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, +) +class InovelliDisableDoubleTapClearNotificationsMode(ZHASwitchConfigurationEntity): + """Inovelli disable clear notifications double tap control.""" + + _unique_id_suffix = "disable_clear_notifications_double_tap" + _attribute_name = "disable_clear_notifications_double_tap" + _attr_translation_key = "disable_clear_notifications_double_tap" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} +) +class AqaraPetFeederLEDIndicator(ZHASwitchConfigurationEntity): + """Representation of a LED indicator configuration entity.""" + + _unique_id_suffix = "disable_led_indicator" + _attribute_name = "disable_led_indicator" + _attr_translation_key = "led_indicator" + _force_inverted = True + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} +) +class AqaraPetFeederChildLock(ZHASwitchConfigurationEntity): + """Representation of a child lock configuration entity.""" + + _unique_id_suffix = "child_lock" + _attribute_name = "child_lock" + _attr_translation_key = "child_lock" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF +) +class TuyaChildLockSwitch(ZHASwitchConfigurationEntity): + """Representation of a child lock configuration entity.""" + + _unique_id_suffix = "child_lock" + _attribute_name = "child_lock" + _attr_translation_key = "child_lock" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} +) +class AqaraThermostatWindowDetection(ZHASwitchConfigurationEntity): + """Representation of an Aqara thermostat window detection configuration entity.""" + + _unique_id_suffix = "window_detection" + _attribute_name = "window_detection" + _attr_translation_key = "window_detection" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} +) +class AqaraThermostatValveDetection(ZHASwitchConfigurationEntity): + """Representation of an Aqara thermostat valve detection configuration entity.""" + + _unique_id_suffix = "valve_detection" + _attribute_name = "valve_detection" + _attr_translation_key = "valve_detection" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} +) +class AqaraThermostatChildLock(ZHASwitchConfigurationEntity): + """Representation of an Aqara thermostat child lock configuration entity.""" + + _unique_id_suffix = "child_lock" + _attribute_name = "child_lock" + _attr_translation_key = "child_lock" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} +) +class AqaraHeartbeatIndicator(ZHASwitchConfigurationEntity): + """Representation of a heartbeat indicator configuration entity for Aqara smoke sensors.""" + + _unique_id_suffix = "heartbeat_indicator" + _attribute_name = "heartbeat_indicator" + _attr_translation_key = "heartbeat_indicator" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} +) +class AqaraLinkageAlarm(ZHASwitchConfigurationEntity): + """Representation of a linkage alarm configuration entity for Aqara smoke sensors.""" + + _unique_id_suffix = "linkage_alarm" + _attribute_name = "linkage_alarm" + _attr_translation_key = "linkage_alarm" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} +) +class AqaraBuzzerManualMute(ZHASwitchConfigurationEntity): + """Representation of a buzzer manual mute configuration entity for Aqara smoke sensors.""" + + _unique_id_suffix = "buzzer_manual_mute" + _attribute_name = "buzzer_manual_mute" + _attr_translation_key = "buzzer_manual_mute" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} +) +class AqaraBuzzerManualAlarm(ZHASwitchConfigurationEntity): + """Representation of a buzzer manual mute configuration entity for Aqara smoke sensors.""" + + _unique_id_suffix = "buzzer_manual_alarm" + _attribute_name = "buzzer_manual_alarm" + _attr_translation_key = "buzzer_manual_alarm" + + +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_COVER) +class WindowCoveringInversionSwitch(ZHASwitchConfigurationEntity): + """Representation of a switch that controls inversion for window covering devices. + + This is necessary because this cluster uses 2 attributes to control inversion. + """ + + _unique_id_suffix = "inverted" + _attribute_name = WindowCovering.AttributeDefs.config_status.name + _attr_translation_key = "inverted" + + @classmethod + def create_entity( + cls, + unique_id: str, + zha_device: ZHADevice, + cluster_handlers: list[ClusterHandler], + **kwargs: Any, + ) -> Self | None: + """Entity Factory. + + Return entity if it is a supported configuration, otherwise return None + """ + cluster_handler = cluster_handlers[0] + window_covering_mode_attr = ( + WindowCovering.AttributeDefs.window_covering_mode.name + ) + # this entity needs 2 attributes to function + if ( + cls._attribute_name in cluster_handler.cluster.unsupported_attributes + or cls._attribute_name not in cluster_handler.cluster.attributes_by_name + or cluster_handler.cluster.get(cls._attribute_name) is None + or window_covering_mode_attr + in cluster_handler.cluster.unsupported_attributes + or window_covering_mode_attr + not in cluster_handler.cluster.attributes_by_name + or cluster_handler.cluster.get(window_covering_mode_attr) is None + ): + _LOGGER.debug( + "%s is not supported - skipping %s entity creation", + cls._attribute_name, + cls.__name__, + ) + return None + + return cls(unique_id, zha_device, cluster_handlers, **kwargs) + + @property + def is_on(self) -> bool: + """Return if the switch is on based on the statemachine.""" + config_status = ConfigStatus( + self._cluster_handler.cluster.get(self._attribute_name) + ) + return ConfigStatus.Open_up_commands_reversed in config_status + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + await self._async_on_off(True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + await self._async_on_off(False) + + async def async_update(self) -> None: + """Attempt to retrieve the state of the entity.""" + self.debug("Polling current state") + await self._cluster_handler.get_attributes( + [ + self._attribute_name, + WindowCovering.AttributeDefs.window_covering_mode.name, + ], + from_cache=False, + only_cache=False, + ) + self.async_write_ha_state() + + async def _async_on_off(self, invert: bool) -> None: + """Turn the entity on or off.""" + name: str = WindowCovering.AttributeDefs.window_covering_mode.name + current_mode: WindowCoveringMode = WindowCoveringMode( + self._cluster_handler.cluster.get(name) + ) + send_command: bool = False + if invert and WindowCoveringMode.Motor_direction_reversed not in current_mode: + current_mode |= WindowCoveringMode.Motor_direction_reversed + send_command = True + elif not invert and WindowCoveringMode.Motor_direction_reversed in current_mode: + current_mode &= ~WindowCoveringMode.Motor_direction_reversed + send_command = True + if send_command: + await self._cluster_handler.write_attributes_safe({name: current_mode}) + await self.async_update() + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names="opple_cluster", models={"lumi.curtain.agl001"} +) +class AqaraE1CurtainMotorHooksLockedSwitch(ZHASwitchConfigurationEntity): + """Representation of a switch that controls whether the curtain motor hooks are locked.""" + + _unique_id_suffix = "hooks_lock" + _attribute_name = "hooks_lock" + _attr_translation_key = "hooks_locked" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossExternalOpenWindowDetected(ZHASwitchConfigurationEntity): + """Danfoss proprietary attribute for communicating an open window.""" + + _unique_id_suffix = "external_open_window_detected" + _attribute_name: str = "external_open_window_detected" + _attr_translation_key: str = "external_window_sensor" + _attr_icon: str = "mdi:window-open" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossWindowOpenFeature(ZHASwitchConfigurationEntity): + """Danfoss proprietary attribute enabling open window detection.""" + + _unique_id_suffix = "window_open_feature" + _attribute_name: str = "window_open_feature" + _attr_translation_key: str = "use_internal_window_detection" + _attr_icon: str = "mdi:window-open" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossMountingModeControl(ZHASwitchConfigurationEntity): + """Danfoss proprietary attribute for switching to mounting mode.""" + + _unique_id_suffix = "mounting_mode_control" + _attribute_name: str = "mounting_mode_control" + _attr_translation_key: str = "mounting_mode" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossRadiatorCovered(ZHASwitchConfigurationEntity): + """Danfoss proprietary attribute for communicating full usage of the external temperature sensor.""" + + _unique_id_suffix = "radiator_covered" + _attribute_name: str = "radiator_covered" + _attr_translation_key: str = "prioritize_external_temperature_sensor" + _attr_icon: str = "mdi:thermometer" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossHeatAvailable(ZHASwitchConfigurationEntity): + """Danfoss proprietary attribute for communicating available heat.""" + + _unique_id_suffix = "heat_available" + _attribute_name: str = "heat_available" + _attr_translation_key: str = "heat_available" + _attr_icon: str = "mdi:water-boiler" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossLoadBalancingEnable(ZHASwitchConfigurationEntity): + """Danfoss proprietary attribute for enabling load balancing.""" + + _unique_id_suffix = "load_balancing_enable" + _attribute_name: str = "load_balancing_enable" + _attr_translation_key: str = "use_load_balancing" + _attr_icon: str = "mdi:scale-balance" + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, + quirk_ids={DANFOSS_ALLY_THERMOSTAT}, +) +class DanfossAdaptationRunSettings(ZHASwitchConfigurationEntity): + """Danfoss proprietary attribute for enabling daily adaptation run. + + Actually a bitmap, but only the first bit is used. + """ + + _unique_id_suffix = "adaptation_run_settings" + _attribute_name: str = "adaptation_run_settings" + _attr_translation_key: str = "adaptation_run_enabled" diff --git a/homeassistant/components/zha/update.py b/homeassistant/components/zha/update.py index 151d1c495e8..0cb80d13119 100644 --- a/homeassistant/components/zha/update.py +++ b/homeassistant/components/zha/update.py @@ -5,10 +5,11 @@ from __future__ import annotations import functools import logging import math -from typing import Any +from typing import TYPE_CHECKING, Any -from zha.exceptions import ZHAException -from zigpy.application import ControllerApplication +from zigpy.ota import OtaImageWithMetadata +from zigpy.zcl.clusters.general import Ota +from zigpy.zcl.foundation import Status from homeassistant.components.update import ( UpdateDeviceClass, @@ -16,8 +17,8 @@ from homeassistant.components.update import ( UpdateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import EntityCategory, Platform +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -26,17 +27,24 @@ from homeassistant.helpers.update_coordinator import ( DataUpdateCoordinator, ) -from .entity import ZHAEntity -from .helpers import ( - SIGNAL_ADD_ENTITIES, - EntityData, - async_add_entities as zha_async_add_entities, - get_zha_data, - get_zha_gateway, -) +from .core import discovery +from .core.const import CLUSTER_HANDLER_OTA, SIGNAL_ADD_ENTITIES, SIGNAL_ATTR_UPDATED +from .core.helpers import get_zha_data, get_zha_gateway +from .core.registries import ZHA_ENTITIES +from .entity import ZhaEntity + +if TYPE_CHECKING: + from zigpy.application import ControllerApplication + + from .core.cluster_handlers import ClusterHandler + from .core.device import ZHADevice _LOGGER = logging.getLogger(__name__) +CONFIG_DIAGNOSTIC_MATCH = functools.partial( + ZHA_ENTITIES.config_diagnostic_match, Platform.UPDATE +) + async def async_setup_entry( hass: HomeAssistant, @@ -45,26 +53,26 @@ async def async_setup_entry( ) -> None: """Set up the Zigbee Home Automation update from config entry.""" zha_data = get_zha_data(hass) - if zha_data.update_coordinator is None: - zha_data.update_coordinator = ZHAFirmwareUpdateCoordinator( - hass, get_zha_gateway(hass).application_controller - ) entities_to_create = zha_data.platforms[Platform.UPDATE] + coordinator = ZHAFirmwareUpdateCoordinator( + hass, get_zha_gateway(hass).application_controller + ) + unsub = async_dispatcher_connect( hass, SIGNAL_ADD_ENTITIES, functools.partial( - zha_async_add_entities, + discovery.async_add_entities, async_add_entities, - ZHAFirmwareUpdateEntity, entities_to_create, + coordinator=coordinator, ), ) config_entry.async_on_unload(unsub) -class ZHAFirmwareUpdateCoordinator(DataUpdateCoordinator[None]): # pylint: disable=hass-enforce-class-module +class ZHAFirmwareUpdateCoordinator(DataUpdateCoordinator[None]): # pylint: disable=hass-enforce-coordinator-module """Firmware update coordinator that broadcasts updates network-wide.""" def __init__( @@ -85,91 +93,128 @@ class ZHAFirmwareUpdateCoordinator(DataUpdateCoordinator[None]): # pylint: disa await self.controller_application.ota.broadcast_notify(jitter=100) +@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_OTA) class ZHAFirmwareUpdateEntity( - ZHAEntity, CoordinatorEntity[ZHAFirmwareUpdateCoordinator], UpdateEntity + ZhaEntity, CoordinatorEntity[ZHAFirmwareUpdateCoordinator], UpdateEntity ): """Representation of a ZHA firmware update entity.""" + _unique_id_suffix = "firmware_update" + _attr_entity_category = EntityCategory.CONFIG _attr_device_class = UpdateDeviceClass.FIRMWARE _attr_supported_features = ( UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS | UpdateEntityFeature.SPECIFIC_VERSION - | UpdateEntityFeature.RELEASE_NOTES ) - def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: - """Initialize the ZHA siren.""" - zha_data = get_zha_data(entity_data.device_proxy.gateway_proxy.hass) - assert zha_data.update_coordinator is not None + def __init__( + self, + unique_id: str, + zha_device: ZHADevice, + channels: list[ClusterHandler], + coordinator: ZHAFirmwareUpdateCoordinator, + **kwargs: Any, + ) -> None: + """Initialize the ZHA update entity.""" + super().__init__(unique_id, zha_device, channels, **kwargs) + CoordinatorEntity.__init__(self, coordinator) - super().__init__(entity_data, coordinator=zha_data.update_coordinator, **kwargs) - CoordinatorEntity.__init__(self, zha_data.update_coordinator) + self._ota_cluster_handler: ClusterHandler = self.cluster_handlers[ + CLUSTER_HANDLER_OTA + ] + self._attr_installed_version: str | None = self._get_cluster_version() + self._attr_latest_version = self._attr_installed_version + self._latest_firmware: OtaImageWithMetadata | None = None - @property - def installed_version(self) -> str | None: - """Version installed and in use.""" - return self.entity_data.entity.installed_version + def _get_cluster_version(self) -> str | None: + """Synchronize current file version with the cluster.""" - @property - def in_progress(self) -> bool | int | None: - """Update installation progress. + if self._ota_cluster_handler.current_file_version is not None: + return f"0x{self._ota_cluster_handler.current_file_version:08x}" - Needs UpdateEntityFeature.PROGRESS flag to be set for it to be used. + return None - Can either return a boolean (True if in progress, False if not) - or an integer to indicate the progress in from 0 to 100%. - """ - if not self.entity_data.entity.in_progress: - return self.entity_data.entity.in_progress + @callback + def attribute_updated(self, attrid: int, name: str, value: Any) -> None: + """Handle attribute updates on the OTA cluster.""" + if attrid == Ota.AttributeDefs.current_file_version.id: + self._attr_installed_version = f"0x{value:08x}" + self.async_write_ha_state() - # Stay in an indeterminate state until we actually send something - if self.entity_data.entity.progress == 0: - return True + @callback + def device_ota_update_available( + self, image: OtaImageWithMetadata, current_file_version: int + ) -> None: + """Handle ota update available signal from Zigpy.""" + self._latest_firmware = image + self._attr_latest_version = f"0x{image.version:08x}" + self._attr_installed_version = f"0x{current_file_version:08x}" - # Rescale 0-100% to 2-100% to avoid 0 and 1 colliding with None, False, and True - return int(math.ceil(2 + 98 * self.entity_data.entity.progress / 100)) + if image.metadata.changelog: + self._attr_release_summary = image.metadata.changelog - @property - def latest_version(self) -> str | None: - """Latest version available for install.""" - return self.entity_data.entity.latest_version + self.async_write_ha_state() - @property - def release_summary(self) -> str | None: - """Summary of the release notes or changelog. + @callback + def _update_progress(self, current: int, total: int, progress: float) -> None: + """Update install progress on event.""" + # If we are not supposed to be updating, do nothing + if self._attr_in_progress is False: + return - This is not suitable for long changelogs, but merely suitable - for a short excerpt update description of max 255 characters. - """ - return self.entity_data.entity.release_summary + # Remap progress to 2-100 to avoid 0 and 1 + self._attr_in_progress = int(math.ceil(2 + 98 * progress / 100)) + self.async_write_ha_state() - async def async_release_notes(self) -> str | None: - """Return full release notes. - - This is suitable for a long changelog that does not fit in the release_summary - property. The returned string can contain markdown. - """ - return self.entity_data.entity.release_notes - - @property - def release_url(self) -> str | None: - """URL to the full release notes of the latest version available.""" - return self.entity_data.entity.release_url - - # We explicitly convert ZHA exceptions to HA exceptions here so there is no need to - # use the `@convert_zha_error_to_ha_error` decorator. async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: """Install an update.""" + assert self._latest_firmware is not None + + # Set the progress to an indeterminate state + self._attr_in_progress = True + self.async_write_ha_state() + try: - await self.entity_data.entity.async_install(version=version) - except ZHAException as exc: - raise HomeAssistantError(exc) from exc - finally: + result = await self.zha_device.device.update_firmware( + image=self._latest_firmware, + progress_callback=self._update_progress, + ) + except Exception as ex: + raise HomeAssistantError(f"Update was not successful: {ex}") from ex + + # If we tried to install firmware that is no longer compatible with the device, + # bail out + if result == Status.NO_IMAGE_AVAILABLE: + self._attr_latest_version = self._attr_installed_version self.async_write_ha_state() + # If the update finished but was not successful, we should also throw an error + if result != Status.SUCCESS: + raise HomeAssistantError(f"Update was not successful: {result}") + + # Clear the state + self._latest_firmware = None + self._attr_in_progress = False + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Call when entity is added.""" + await super().async_added_to_hass() + + # OTA events are sent by the device + self.zha_device.device.add_listener(self) + self.async_accept_signal( + self._ota_cluster_handler, SIGNAL_ATTR_UPDATED, self.attribute_updated + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed.""" + await super().async_will_remove_from_hass() + self._attr_in_progress = False + async def async_update(self) -> None: """Update the entity.""" await CoordinatorEntity.async_update(self) diff --git a/homeassistant/components/zha/websocket_api.py b/homeassistant/components/zha/websocket_api.py index 5ffd7117d93..cb95e930b1a 100644 --- a/homeassistant/components/zha/websocket_api.py +++ b/homeassistant/components/zha/websocket_api.py @@ -7,7 +7,28 @@ import logging from typing import TYPE_CHECKING, Any, Literal, NamedTuple, cast import voluptuous as vol -from zha.application.const import ( +import zigpy.backups +from zigpy.config import CONF_DEVICE +from zigpy.config.validators import cv_boolean +from zigpy.types.named import EUI64, KeyData +from zigpy.zcl.clusters.security import IasAce +import zigpy.zdo.types as zdo_types + +from homeassistant.components import websocket_api +from homeassistant.const import ATTR_COMMAND, ATTR_ID, ATTR_NAME +from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.helpers import entity_registry as er +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.service import async_register_admin_service +from homeassistant.helpers.typing import VolDictType, VolSchemaType + +from .api import ( + async_change_channel, + async_get_active_network_settings, + async_get_radio_type, +) +from .core.const import ( ATTR_ARGS, ATTR_ATTRIBUTE, ATTR_CLUSTER_ID, @@ -26,51 +47,13 @@ from zha.application.const import ( ATTR_WARNING_DEVICE_STROBE, ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE, ATTR_WARNING_DEVICE_STROBE_INTENSITY, + BINDINGS, CLUSTER_COMMAND_SERVER, CLUSTER_COMMANDS_CLIENT, CLUSTER_COMMANDS_SERVER, + CLUSTER_HANDLER_IAS_WD, CLUSTER_TYPE_IN, CLUSTER_TYPE_OUT, - WARNING_DEVICE_MODE_EMERGENCY, - WARNING_DEVICE_SOUND_HIGH, - WARNING_DEVICE_SQUAWK_MODE_ARMED, - WARNING_DEVICE_STROBE_HIGH, - WARNING_DEVICE_STROBE_YES, - ZHA_CLUSTER_HANDLER_MSG, -) -from zha.application.gateway import Gateway -from zha.application.helpers import ( - async_is_bindable_target, - convert_install_code, - get_matched_clusters, - qr_to_install_code, -) -from zha.zigbee.cluster_handlers.const import CLUSTER_HANDLER_IAS_WD -from zha.zigbee.device import Device -from zha.zigbee.group import GroupMemberReference -import zigpy.backups -from zigpy.config import CONF_DEVICE -from zigpy.config.validators import cv_boolean -from zigpy.types.named import EUI64, KeyData -from zigpy.zcl.clusters.security import IasAce -import zigpy.zdo.types as zdo_types - -from homeassistant.components import websocket_api -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_COMMAND, ATTR_ID, ATTR_NAME -from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.helpers import entity_registry as er -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.service import async_register_admin_service -from homeassistant.helpers.typing import VolDictType, VolSchemaType - -from .api import ( - async_change_channel, - async_get_active_network_settings, - async_get_radio_type, -) -from .const import ( CUSTOM_CONFIGURATION, DOMAIN, EZSP_OVERWRITE_EUI64, @@ -78,23 +61,32 @@ from .const import ( GROUP_IDS, GROUP_NAME, MFG_CLUSTER_ID_START, + WARNING_DEVICE_MODE_EMERGENCY, + WARNING_DEVICE_SOUND_HIGH, + WARNING_DEVICE_SQUAWK_MODE_ARMED, + WARNING_DEVICE_STROBE_HIGH, + WARNING_DEVICE_STROBE_YES, ZHA_ALARM_OPTIONS, - ZHA_OPTIONS, + ZHA_CLUSTER_HANDLER_MSG, + ZHA_CONFIG_SCHEMAS, ) -from .helpers import ( - CONF_ZHA_ALARM_SCHEMA, - CONF_ZHA_OPTIONS_SCHEMA, - EntityReference, - ZHAGatewayProxy, +from .core.gateway import EntityReference +from .core.group import GroupMember +from .core.helpers import ( async_cluster_exists, + async_is_bindable_target, cluster_command_schema_to_vol_schema, - get_config_entry, + convert_install_code, + get_matched_clusters, get_zha_gateway, - get_zha_gateway_proxy, + qr_to_install_code, ) if TYPE_CHECKING: - from homeassistant.components.websocket_api import ActiveConnection + from homeassistant.components.websocket_api.connection import ActiveConnection + + from .core.device import ZHADevice + from .core.gateway import ZHAGateway _LOGGER = logging.getLogger(__name__) @@ -113,8 +105,6 @@ ATTR_SOURCE_IEEE = "source_ieee" ATTR_TARGET_IEEE = "target_ieee" ATTR_QR_CODE = "qr_code" -BINDINGS = "bindings" - SERVICE_PERMIT = "permit" SERVICE_REMOVE = "remove" SERVICE_SET_ZIGBEE_CLUSTER_ATTRIBUTE = "set_zigbee_cluster_attribute" @@ -244,12 +234,6 @@ SERVICE_SCHEMAS: dict[str, VolSchemaType] = { } -ZHA_CONFIG_SCHEMAS = { - ZHA_OPTIONS: CONF_ZHA_OPTIONS_SCHEMA, - ZHA_ALARM_OPTIONS: CONF_ZHA_ALARM_SCHEMA, -} - - class ClusterBinding(NamedTuple): """Describes a cluster binding.""" @@ -259,9 +243,9 @@ class ClusterBinding(NamedTuple): endpoint_id: int -def _cv_group_member(value: dict[str, Any]) -> GroupMemberReference: +def _cv_group_member(value: dict[str, Any]) -> GroupMember: """Transform a group member.""" - return GroupMemberReference( + return GroupMember( ieee=value[ATTR_IEEE], endpoint_id=value[ATTR_ENDPOINT_ID], ) @@ -322,7 +306,7 @@ async def websocket_permit_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Permit ZHA zigbee devices.""" - zha_gateway_proxy = get_zha_gateway_proxy(hass) + zha_gateway = get_zha_gateway(hass) duration: int = msg[ATTR_DURATION] ieee: EUI64 | None = msg.get(ATTR_IEEE) @@ -337,30 +321,28 @@ async def websocket_permit_devices( @callback def async_cleanup() -> None: """Remove signal listener and turn off debug mode.""" - zha_gateway_proxy.async_disable_debug_mode() + zha_gateway.async_disable_debug_mode() remove_dispatcher_function() connection.subscriptions[msg["id"]] = async_cleanup - zha_gateway_proxy.async_enable_debug_mode() + zha_gateway.async_enable_debug_mode() src_ieee: EUI64 link_key: KeyData if ATTR_SOURCE_IEEE in msg: src_ieee = msg[ATTR_SOURCE_IEEE] link_key = msg[ATTR_INSTALL_CODE] _LOGGER.debug("Allowing join for %s device with link key", src_ieee) - await zha_gateway_proxy.gateway.application_controller.permit_with_link_key( + await zha_gateway.application_controller.permit_with_link_key( time_s=duration, node=src_ieee, link_key=link_key ) elif ATTR_QR_CODE in msg: src_ieee, link_key = msg[ATTR_QR_CODE] _LOGGER.debug("Allowing join for %s device with link key", src_ieee) - await zha_gateway_proxy.gateway.application_controller.permit_with_link_key( + await zha_gateway.application_controller.permit_with_link_key( time_s=duration, node=src_ieee, link_key=link_key ) else: - await zha_gateway_proxy.gateway.application_controller.permit( - time_s=duration, node=ieee - ) + await zha_gateway.application_controller.permit(time_s=duration, node=ieee) connection.send_result(msg[ID]) @@ -371,26 +353,26 @@ async def websocket_get_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA devices.""" - zha_gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - devices = [ - device.zha_device_info for device in zha_gateway_proxy.device_proxies.values() - ] + zha_gateway = get_zha_gateway(hass) + devices = [device.zha_device_info for device in zha_gateway.devices.values()] connection.send_result(msg[ID], devices) @callback -def _get_entity_name(zha_gateway: Gateway, entity_ref: EntityReference) -> str | None: +def _get_entity_name( + zha_gateway: ZHAGateway, entity_ref: EntityReference +) -> str | None: entity_registry = er.async_get(zha_gateway.hass) - entry = entity_registry.async_get(entity_ref.ha_entity_id) + entry = entity_registry.async_get(entity_ref.reference_id) return entry.name if entry else None @callback def _get_entity_original_name( - zha_gateway: Gateway, entity_ref: EntityReference + zha_gateway: ZHAGateway, entity_ref: EntityReference ) -> str | None: entity_registry = er.async_get(zha_gateway.hass) - entry = entity_registry.async_get(entity_ref.ha_entity_id) + entry = entity_registry.async_get(entity_ref.reference_id) return entry.original_name if entry else None @@ -401,36 +383,32 @@ async def websocket_get_groupable_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA devices that can be grouped.""" - zha_gateway_proxy = get_zha_gateway_proxy(hass) + zha_gateway = get_zha_gateway(hass) - devices = [ - device - for device in zha_gateway_proxy.device_proxies.values() - if device.device.is_groupable - ] + devices = [device for device in zha_gateway.devices.values() if device.is_groupable] groupable_devices: list[dict[str, Any]] = [] for device in devices: - entity_refs = zha_gateway_proxy.ha_entity_refs[device.device.ieee] + entity_refs = zha_gateway.device_registry[device.ieee] groupable_devices.extend( { "endpoint_id": ep_id, "entities": [ { - "name": _get_entity_name(zha_gateway_proxy, entity_ref), + "name": _get_entity_name(zha_gateway, entity_ref), "original_name": _get_entity_original_name( - zha_gateway_proxy, entity_ref + zha_gateway, entity_ref ), } for entity_ref in entity_refs - if list(entity_ref.entity_data.entity.cluster_handlers.values())[ + if list(entity_ref.cluster_handlers.values())[ 0 ].cluster.endpoint.endpoint_id == ep_id ], "device": device.zha_device_info, } - for ep_id in device.device.async_get_groupable_endpoints() + for ep_id in device.async_get_groupable_endpoints() ) connection.send_result(msg[ID], groupable_devices) @@ -443,8 +421,8 @@ async def websocket_get_groups( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA groups.""" - zha_gateway_proxy = get_zha_gateway_proxy(hass) - groups = [group.group_info for group in zha_gateway_proxy.group_proxies.values()] + zha_gateway = get_zha_gateway(hass) + groups = [group.group_info for group in zha_gateway.groups.values()] connection.send_result(msg[ID], groups) @@ -460,10 +438,10 @@ async def websocket_get_device( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA devices.""" - zha_gateway_proxy = get_zha_gateway_proxy(hass) + zha_gateway = get_zha_gateway(hass) ieee: EUI64 = msg[ATTR_IEEE] - if not (zha_device := zha_gateway_proxy.device_proxies.get(ieee)): + if not (zha_device := zha_gateway.devices.get(ieee)): connection.send_message( websocket_api.error_message( msg[ID], websocket_api.ERR_NOT_FOUND, "ZHA Device not found" @@ -487,10 +465,10 @@ async def websocket_get_group( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA group.""" - zha_gateway_proxy = get_zha_gateway_proxy(hass) + zha_gateway = get_zha_gateway(hass) group_id: int = msg[GROUP_ID] - if not (zha_group := zha_gateway_proxy.group_proxies.get(group_id)): + if not (zha_group := zha_gateway.groups.get(group_id)): connection.send_message( websocket_api.error_message( msg[ID], websocket_api.ERR_NOT_FOUND, "ZHA Group not found" @@ -516,17 +494,13 @@ async def websocket_add_group( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Add a new ZHA group.""" - zha_gateway = get_zha_gateway_proxy(hass) + zha_gateway = get_zha_gateway(hass) group_name: str = msg[GROUP_NAME] group_id: int | None = msg.get(GROUP_ID) - members: list[GroupMemberReference] | None = msg.get(ATTR_MEMBERS) - group = await zha_gateway.gateway.async_create_zigpy_group( - group_name, members, group_id - ) + members: list[GroupMember] | None = msg.get(ATTR_MEMBERS) + group = await zha_gateway.async_create_zigpy_group(group_name, members, group_id) assert group - connection.send_result( - msg[ID], zha_gateway.group_proxies[group.group_id].group_info - ) + connection.send_result(msg[ID], group.group_info) @websocket_api.require_admin @@ -541,18 +515,17 @@ async def websocket_remove_groups( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Remove the specified ZHA groups.""" - zha_gateway = get_zha_gateway_proxy(hass) + zha_gateway = get_zha_gateway(hass) group_ids: list[int] = msg[GROUP_IDS] if len(group_ids) > 1: tasks = [ - zha_gateway.gateway.async_remove_zigpy_group(group_id) - for group_id in group_ids + zha_gateway.async_remove_zigpy_group(group_id) for group_id in group_ids ] await asyncio.gather(*tasks) else: - await zha_gateway.gateway.async_remove_zigpy_group(group_ids[0]) - ret_groups = [group.group_info for group in zha_gateway.group_proxies.values()] + await zha_gateway.async_remove_zigpy_group(group_ids[0]) + ret_groups = [group.group_info for group in zha_gateway.groups.values()] connection.send_result(msg[ID], ret_groups) @@ -570,9 +543,8 @@ async def websocket_add_group_members( ) -> None: """Add members to a ZHA group.""" zha_gateway = get_zha_gateway(hass) - zha_gateway_proxy = get_zha_gateway_proxy(hass) group_id: int = msg[GROUP_ID] - members: list[GroupMemberReference] = msg[ATTR_MEMBERS] + members: list[GroupMember] = msg[ATTR_MEMBERS] if not (zha_group := zha_gateway.groups.get(group_id)): connection.send_message( @@ -583,9 +555,8 @@ async def websocket_add_group_members( return await zha_group.async_add_members(members) - ret_group = zha_gateway_proxy.get_group_proxy(group_id) - assert ret_group - connection.send_result(msg[ID], ret_group.group_info) + ret_group = zha_group.group_info + connection.send_result(msg[ID], ret_group) @websocket_api.require_admin @@ -602,9 +573,8 @@ async def websocket_remove_group_members( ) -> None: """Remove members from a ZHA group.""" zha_gateway = get_zha_gateway(hass) - zha_gateway_proxy = get_zha_gateway_proxy(hass) group_id: int = msg[GROUP_ID] - members: list[GroupMemberReference] = msg[ATTR_MEMBERS] + members: list[GroupMember] = msg[ATTR_MEMBERS] if not (zha_group := zha_gateway.groups.get(group_id)): connection.send_message( @@ -615,9 +585,8 @@ async def websocket_remove_group_members( return await zha_group.async_remove_members(members) - ret_group = zha_gateway_proxy.get_group_proxy(group_id) - assert ret_group - connection.send_result(msg[ID], ret_group.group_info) + ret_group = zha_group.group_info + connection.send_result(msg[ID], ret_group) @websocket_api.require_admin @@ -634,7 +603,7 @@ async def websocket_reconfigure_node( """Reconfigure a ZHA nodes entities by its ieee address.""" zha_gateway = get_zha_gateway(hass) ieee: EUI64 = msg[ATTR_IEEE] - device: Device | None = zha_gateway.get_device(ieee) + device: ZHADevice | None = zha_gateway.get_device(ieee) async def forward_messages(data): """Forward events to websocket.""" @@ -896,15 +865,14 @@ async def websocket_get_bindable_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Directly bind devices.""" - zha_gateway_proxy = get_zha_gateway_proxy(hass) + zha_gateway = get_zha_gateway(hass) source_ieee: EUI64 = msg[ATTR_IEEE] - source_device = zha_gateway_proxy.device_proxies.get(source_ieee) - assert source_device is not None + source_device = zha_gateway.get_device(source_ieee) devices = [ device.zha_device_info - for device in zha_gateway_proxy.device_proxies.values() - if async_is_bindable_target(source_device.device, device.device) + for device in zha_gateway.devices.values() + if async_is_bindable_target(source_device, device) ] _LOGGER.debug( @@ -1025,7 +993,7 @@ async def websocket_unbind_group( async def async_binding_operation( - zha_gateway: Gateway, + zha_gateway: ZHAGateway, source_ieee: EUI64, target_ieee: EUI64, operation: zdo_types.ZDOCmd, @@ -1079,7 +1047,7 @@ async def websocket_get_configuration( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA configuration.""" - config_entry: ConfigEntry = get_config_entry(hass) + zha_gateway = get_zha_gateway(hass) import voluptuous_serialize # pylint: disable=import-outside-toplevel def custom_serializer(schema: Any) -> Any: @@ -1102,9 +1070,9 @@ async def websocket_get_configuration( data["schemas"][section] = voluptuous_serialize.convert( schema, custom_serializer=custom_serializer ) - data["data"][section] = config_entry.options.get(CUSTOM_CONFIGURATION, {}).get( - section, {} - ) + data["data"][section] = zha_gateway.config_entry.options.get( + CUSTOM_CONFIGURATION, {} + ).get(section, {}) # send default values for unconfigured options for entry in data["schemas"][section]: @@ -1126,8 +1094,8 @@ async def websocket_update_zha_configuration( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Update the ZHA configuration.""" - config_entry: ConfigEntry = get_config_entry(hass) - options = config_entry.options + zha_gateway = get_zha_gateway(hass) + options = zha_gateway.config_entry.options data_to_save = {**options, CUSTOM_CONFIGURATION: msg["data"]} for section, schema in ZHA_CONFIG_SCHEMAS.items(): @@ -1158,8 +1126,10 @@ async def websocket_update_zha_configuration( data_to_save, ) - hass.config_entries.async_update_entry(config_entry, options=data_to_save) - status = await hass.config_entries.async_reload(config_entry.entry_id) + hass.config_entries.async_update_entry( + zha_gateway.config_entry, options=data_to_save + ) + status = await hass.config_entries.async_reload(zha_gateway.config_entry.entry_id) connection.send_result(msg[ID], status) @@ -1172,11 +1142,10 @@ async def websocket_get_network_settings( """Get ZHA network settings.""" backup = async_get_active_network_settings(hass) zha_gateway = get_zha_gateway(hass) - config_entry: ConfigEntry = get_config_entry(hass) connection.send_result( msg[ID], { - "radio_type": async_get_radio_type(hass, config_entry).name, + "radio_type": async_get_radio_type(hass, zha_gateway.config_entry).name, "device": zha_gateway.application_controller.config[CONF_DEVICE], "settings": backup.as_dict(), }, @@ -1311,8 +1280,12 @@ def async_load_api(hass: HomeAssistant) -> None: """Remove a node from the network.""" zha_gateway = get_zha_gateway(hass) ieee: EUI64 = service.data[ATTR_IEEE] + zha_device: ZHADevice | None = zha_gateway.get_device(ieee) + if zha_device is not None and zha_device.is_active_coordinator: + _LOGGER.info("Removing the coordinator (%s) is not allowed", ieee) + return _LOGGER.info("Removing node %s", ieee) - await zha_gateway.async_remove_device(ieee) + await application_controller.remove(ieee) async_register_admin_service( hass, DOMAIN, SERVICE_REMOVE, remove, schema=SERVICE_SCHEMAS[IEEE_SERVICE] diff --git a/homeassistant/components/zhong_hong/manifest.json b/homeassistant/components/zhong_hong/manifest.json index 9da0e9ab72b..06cc06faf0b 100644 --- a/homeassistant/components/zhong_hong/manifest.json +++ b/homeassistant/components/zhong_hong/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/zhong_hong", "iot_class": "local_push", "loggers": ["zhong_hong_hvac"], - "requirements": ["zhong-hong-hvac==1.0.13"] + "requirements": ["zhong-hong-hvac==1.0.12"] } diff --git a/homeassistant/components/ziggo_mediabox_xl/media_player.py b/homeassistant/components/ziggo_mediabox_xl/media_player.py index 6e858b454e9..a81a206b5b2 100644 --- a/homeassistant/components/ziggo_mediabox_xl/media_player.py +++ b/homeassistant/components/ziggo_mediabox_xl/media_player.py @@ -64,7 +64,7 @@ def setup_platform( if mediabox.test_connection(): connection_successful = True elif manual_config: - _LOGGER.error("Can't connect to %s", host) + _LOGGER.info("Can't connect to %s", host) else: _LOGGER.error("Can't connect to %s", host) # When the device is in eco mode it's not connected to the network @@ -77,7 +77,7 @@ def setup_platform( except OSError as error: _LOGGER.error("Can't connect to %s: %s", host, error) else: - _LOGGER.warning("Ignoring duplicate Ziggo Mediabox XL %s", host) + _LOGGER.info("Ignoring duplicate Ziggo Mediabox XL %s", host) add_entities(hosts, True) diff --git a/homeassistant/components/zone/icons.json b/homeassistant/components/zone/icons.json index a9829425570..a03163179cb 100644 --- a/homeassistant/components/zone/icons.json +++ b/homeassistant/components/zone/icons.json @@ -1,7 +1,5 @@ { "services": { - "reload": { - "service": "mdi:reload" - } + "reload": "mdi:reload" } } diff --git a/homeassistant/components/zoneminder/camera.py b/homeassistant/components/zoneminder/camera.py index 21513b4bed4..ab938472ed7 100644 --- a/homeassistant/components/zoneminder/camera.py +++ b/homeassistant/components/zoneminder/camera.py @@ -35,7 +35,7 @@ def setup_platform( ) for monitor in monitors: - _LOGGER.debug("Initializing camera %s", monitor.id) + _LOGGER.info("Initializing camera %s", monitor.id) cameras.append(ZoneMinderCamera(monitor, zm_client.verify_ssl)) add_entities(cameras) diff --git a/homeassistant/components/zoneminder/icons.json b/homeassistant/components/zoneminder/icons.json index 3f9f6410a22..8ca180d7399 100644 --- a/homeassistant/components/zoneminder/icons.json +++ b/homeassistant/components/zoneminder/icons.json @@ -1,7 +1,5 @@ { "services": { - "set_run_state": { - "service": "mdi:cog" - } + "set_run_state": "mdi:cog" } } diff --git a/homeassistant/components/zwave_js/__init__.py b/homeassistant/components/zwave_js/__init__.py index 06b8214d941..dedae10400f 100644 --- a/homeassistant/components/zwave_js/__init__.py +++ b/homeassistant/components/zwave_js/__init__.py @@ -100,7 +100,6 @@ from .const import ( DATA_CLIENT, DOMAIN, EVENT_DEVICE_ADDED_TO_REGISTRY, - EVENT_VALUE_UPDATED, LIB_LOGGER, LOGGER, LR_ADDON_VERSION, @@ -354,7 +353,7 @@ class ControllerEvents: self.discovered_value_ids: dict[str, set[str]] = defaultdict(set) self.driver_events = driver_events self.dev_reg = driver_events.dev_reg - self.registered_unique_ids: dict[str, dict[Platform, set[str]]] = defaultdict( + self.registered_unique_ids: dict[str, dict[str, set[str]]] = defaultdict( lambda: defaultdict(set) ) self.node_events = NodeEvents(hass, self) @@ -624,7 +623,7 @@ class NodeEvents: ) # add listeners to handle new values that get added later - for event in ("value added", EVENT_VALUE_UPDATED, "metadata updated"): + for event in ("value added", "value updated", "metadata updated"): self.config_entry.async_on_unload( node.on( event, @@ -723,7 +722,7 @@ class NodeEvents: # add listener for value updated events self.config_entry.async_on_unload( disc_info.node.on( - EVENT_VALUE_UPDATED, + "value updated", lambda event: self.async_on_value_updated_fire_event( value_updates_disc_info, event["value"] ), diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index bd49e85b601..8f81790708f 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -13,10 +13,8 @@ from zwave_js_server.client import Client from zwave_js_server.const import ( CommandClass, ExclusionStrategy, - InclusionState, InclusionStrategy, LogLevel, - NodeStatus, Protocols, ProvisioningEntryStatus, QRCodeVersion, @@ -43,7 +41,6 @@ from zwave_js_server.model.controller.firmware import ( ControllerFirmwareUpdateResult, ) from zwave_js_server.model.driver import Driver -from zwave_js_server.model.endpoint import Endpoint from zwave_js_server.model.log_config import LogConfig from zwave_js_server.model.log_message import LogMessage from zwave_js_server.model.node import Node, NodeStatistics @@ -56,7 +53,6 @@ from zwave_js_server.model.utils import ( async_parse_qr_code_string, async_try_parse_dsk_from_qr_code_string, ) -from zwave_js_server.model.value import ConfigurationValueFormat from zwave_js_server.util.node import async_set_config_parameter from homeassistant.components import websocket_api @@ -77,11 +73,6 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from .config_validation import BITMASK_SCHEMA from .const import ( - ATTR_COMMAND_CLASS, - ATTR_ENDPOINT, - ATTR_METHOD_NAME, - ATTR_PARAMETERS, - ATTR_WAIT_FOR_RESULT, CONF_DATA_COLLECTION_OPTED_IN, DATA_CLIENT, EVENT_DEVICE_ADDED_TO_REGISTRY, @@ -107,8 +98,6 @@ PROPERTY = "property" PROPERTY_KEY = "property_key" ENDPOINT = "endpoint" VALUE = "value" -VALUE_SIZE = "value_size" -VALUE_FORMAT = "value_format" # constants for log config commands CONFIG = "config" @@ -419,8 +408,6 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_rebuild_node_routes) websocket_api.async_register_command(hass, websocket_set_config_parameter) websocket_api.async_register_command(hass, websocket_get_config_parameters) - websocket_api.async_register_command(hass, websocket_get_raw_config_parameter) - websocket_api.async_register_command(hass, websocket_set_raw_config_parameter) websocket_api.async_register_command(hass, websocket_subscribe_log_updates) websocket_api.async_register_command(hass, websocket_update_log_config) websocket_api.async_register_command(hass, websocket_get_log_config) @@ -448,8 +435,6 @@ def async_register_api(hass: HomeAssistant) -> None: ) websocket_api.async_register_command(hass, websocket_subscribe_node_statistics) websocket_api.async_register_command(hass, websocket_hard_reset_controller) - websocket_api.async_register_command(hass, websocket_node_capabilities) - websocket_api.async_register_command(hass, websocket_invoke_cc_api) hass.http.register_view(FirmwareUploadView(dr.async_get(hass))) @@ -708,30 +693,6 @@ async def websocket_add_node( ) ) - @callback - def forward_node_added( - node: Node, low_security: bool, low_security_reason: str | None - ) -> None: - interview_unsubs = [ - node.on("interview started", forward_event), - node.on("interview completed", forward_event), - node.on("interview stage completed", forward_stage), - node.on("interview failed", forward_event), - ] - unsubs.extend(interview_unsubs) - node_details = { - "node_id": node.node_id, - "status": node.status, - "ready": node.ready, - "low_security": low_security, - "low_security_reason": low_security_reason, - } - connection.send_message( - websocket_api.event_message( - msg[ID], {"event": "node added", "node": node_details} - ) - ) - @callback def forward_requested_grant(event: dict) -> None: connection.send_message( @@ -766,10 +727,24 @@ async def websocket_add_node( @callback def node_added(event: dict) -> None: - forward_node_added( - event["node"], - event["result"].get("lowSecurity", False), - event["result"].get("lowSecurityReason"), + node = event["node"] + interview_unsubs = [ + node.on("interview started", forward_event), + node.on("interview completed", forward_event), + node.on("interview stage completed", forward_stage), + node.on("interview failed", forward_event), + ] + unsubs.extend(interview_unsubs) + node_details = { + "node_id": node.node_id, + "status": node.status, + "ready": node.ready, + "low_security": event["result"].get("lowSecurity", False), + } + connection.send_message( + websocket_api.event_message( + msg[ID], {"event": "node added", "node": node_details} + ) ) @callback @@ -801,39 +776,25 @@ async def websocket_add_node( ] msg[DATA_UNSUBSCRIBE] = unsubs - if controller.inclusion_state == InclusionState.INCLUDING: - connection.send_result( - msg[ID], - True, # Inclusion is already in progress + try: + result = await controller.async_begin_inclusion( + INCLUSION_STRATEGY_NOT_SMART_START[inclusion_strategy.value], + force_security=force_security, + provisioning=provisioning, + dsk=dsk, ) - # Check for nodes that have been added but not fully included - for node in controller.nodes.values(): - if node.status != NodeStatus.DEAD and not node.ready: - forward_node_added( - node, - not node.is_secure, - None, - ) - else: - try: - result = await controller.async_begin_inclusion( - INCLUSION_STRATEGY_NOT_SMART_START[inclusion_strategy.value], - force_security=force_security, - provisioning=provisioning, - dsk=dsk, - ) - except ValueError as err: - connection.send_error( - msg[ID], - ERR_INVALID_FORMAT, - err.args[0], - ) - return + except ValueError as err: + connection.send_error( + msg[ID], + ERR_INVALID_FORMAT, + err.args[0], + ) + return - connection.send_result( - msg[ID], - result, - ) + connection.send_result( + msg[ID], + result, + ) @websocket_api.require_admin @@ -1752,7 +1713,6 @@ async def websocket_get_config_parameters( "unit": metadata.unit, "writeable": metadata.writeable, "readable": metadata.readable, - "default": metadata.default, }, "value": zwave_value.value, } @@ -1765,72 +1725,6 @@ async def websocket_get_config_parameters( ) -@websocket_api.require_admin -@websocket_api.websocket_command( - { - vol.Required(TYPE): "zwave_js/set_raw_config_parameter", - vol.Required(DEVICE_ID): str, - vol.Required(PROPERTY): int, - vol.Required(VALUE): int, - vol.Required(VALUE_SIZE): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)), - vol.Required(VALUE_FORMAT): vol.Coerce(ConfigurationValueFormat), - } -) -@websocket_api.async_response -@async_handle_failed_command -@async_get_node -async def websocket_set_raw_config_parameter( - hass: HomeAssistant, - connection: ActiveConnection, - msg: dict[str, Any], - node: Node, -) -> None: - """Set a custom config parameter value for a Z-Wave node.""" - result = await node.async_set_raw_config_parameter_value( - msg[VALUE], - msg[PROPERTY], - value_size=msg[VALUE_SIZE], - value_format=msg[VALUE_FORMAT], - ) - - connection.send_result( - msg[ID], - { - STATUS: result.status, - }, - ) - - -@websocket_api.require_admin -@websocket_api.websocket_command( - { - vol.Required(TYPE): "zwave_js/get_raw_config_parameter", - vol.Required(DEVICE_ID): str, - vol.Required(PROPERTY): int, - } -) -@websocket_api.async_response -@async_handle_failed_command -@async_get_node -async def websocket_get_raw_config_parameter( - hass: HomeAssistant, - connection: ActiveConnection, - msg: dict[str, Any], - node: Node, -) -> None: - """Get a custom config parameter value for a Z-Wave node.""" - value = await node.async_get_raw_config_parameter_value( - msg[PROPERTY], - ) - - connection.send_result( - msg[ID], - { - VALUE: value, - }, - ) - - def filename_is_present_if_logging_to_file(obj: dict) -> dict: """Validate that filename is provided if log_to_file is True.""" if obj.get(LOG_TO_FILE, False) and FILENAME not in obj: @@ -2604,81 +2498,3 @@ async def websocket_hard_reset_controller( ) ] await driver.async_hard_reset() - - -@websocket_api.websocket_command( - { - vol.Required(TYPE): "zwave_js/node_capabilities", - vol.Required(DEVICE_ID): str, - } -) -@websocket_api.async_response -@async_handle_failed_command -@async_get_node -async def websocket_node_capabilities( - hass: HomeAssistant, - connection: ActiveConnection, - msg: dict[str, Any], - node: Node, -) -> None: - """Get node endpoints with their support command classes.""" - # consumers expect snake_case at the moment - # remove that addition when consumers are updated - connection.send_result( - msg[ID], - { - idx: [ - command_class.to_dict() | {"is_secure": command_class.is_secure} - for command_class in endpoint.command_classes - ] - for idx, endpoint in node.endpoints.items() - }, - ) - - -@websocket_api.require_admin -@websocket_api.websocket_command( - { - vol.Required(TYPE): "zwave_js/invoke_cc_api", - vol.Required(DEVICE_ID): str, - vol.Required(ATTR_COMMAND_CLASS): vol.All( - vol.Coerce(int), vol.Coerce(CommandClass) - ), - vol.Optional(ATTR_ENDPOINT): vol.Coerce(int), - vol.Required(ATTR_METHOD_NAME): cv.string, - vol.Required(ATTR_PARAMETERS): list, - vol.Optional(ATTR_WAIT_FOR_RESULT): cv.boolean, - } -) -@websocket_api.async_response -@async_handle_failed_command -@async_get_node -async def websocket_invoke_cc_api( - hass: HomeAssistant, - connection: ActiveConnection, - msg: dict[str, Any], - node: Node, -) -> None: - """Call invokeCCAPI on the node or provided endpoint.""" - command_class: CommandClass = msg[ATTR_COMMAND_CLASS] - method_name: str = msg[ATTR_METHOD_NAME] - parameters: list[Any] = msg[ATTR_PARAMETERS] - - node_or_endpoint: Node | Endpoint = node - if (endpoint := msg.get(ATTR_ENDPOINT)) is not None: - node_or_endpoint = node.endpoints[endpoint] - - try: - result = await node_or_endpoint.async_invoke_cc_api( - command_class, - method_name, - *parameters, - wait_for_result=msg.get(ATTR_WAIT_FOR_RESULT, False), - ) - except BaseZwaveJSServerError as err: - connection.send_error(msg[ID], err.__class__.__name__, str(err)) - else: - connection.send_result( - msg[ID], - result, - ) diff --git a/homeassistant/components/zwave_js/binary_sensor.py b/homeassistant/components/zwave_js/binary_sensor.py index 0f1495fc6e6..bd5ce2d810b 100644 --- a/homeassistant/components/zwave_js/binary_sensor.py +++ b/homeassistant/components/zwave_js/binary_sensor.py @@ -248,16 +248,6 @@ BOOLEAN_SENSOR_MAPPINGS: dict[int, BinarySensorEntityDescription] = { } -@callback -def is_valid_notification_binary_sensor( - info: ZwaveDiscoveryInfo, -) -> bool | NotificationZWaveJSEntityDescription: - """Return if the notification CC Value is valid as binary sensor.""" - if not info.primary_value.metadata.states: - return False - return len(info.primary_value.metadata.states) > 1 - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -274,18 +264,16 @@ async def async_setup_entry( entities: list[BinarySensorEntity] = [] if info.platform_hint == "notification": - # ensure the notification CC Value is valid as binary sensor - if not is_valid_notification_binary_sensor(info): - return # Get all sensors from Notification CC states for state_key in info.primary_value.metadata.states: # ignore idle key (0) if state_key == "0": continue - # get (optional) description for this state + notification_description: ( NotificationZWaveJSEntityDescription | None ) = None + for description in NOTIFICATION_SENSOR_MAPPINGS: if ( int(description.key) @@ -301,6 +289,7 @@ async def async_setup_entry( and notification_description.off_state == state_key ): continue + entities.append( ZWaveNotificationBinarySensor( config_entry, driver, info, state_key, notification_description diff --git a/homeassistant/components/zwave_js/climate.py b/homeassistant/components/zwave_js/climate.py index c7ab579c2cb..14a3fe579c4 100644 --- a/homeassistant/components/zwave_js/climate.py +++ b/homeassistant/components/zwave_js/climate.py @@ -24,6 +24,8 @@ from homeassistant.components.climate import ( ATTR_HVAC_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, + DEFAULT_MAX_TEMP, + DEFAULT_MIN_TEMP, DOMAIN as CLIMATE_DOMAIN, PRESET_NONE, ClimateEntity, @@ -419,7 +421,7 @@ class ZWaveClimate(ZWaveBaseEntity, ClimateEntity): @property def min_temp(self) -> float: """Return the minimum temperature.""" - min_temp = 0.0 # Not using DEFAULT_MIN_TEMP to allow wider range + min_temp = DEFAULT_MIN_TEMP base_unit: str = UnitOfTemperature.CELSIUS try: temp = self._setpoint_value_or_raise(self._current_mode_setpoint_enums[0]) @@ -435,7 +437,7 @@ class ZWaveClimate(ZWaveBaseEntity, ClimateEntity): @property def max_temp(self) -> float: """Return the maximum temperature.""" - max_temp = 50.0 # Not using DEFAULT_MAX_TEMP to allow wider range + max_temp = DEFAULT_MAX_TEMP base_unit: str = UnitOfTemperature.CELSIUS try: temp = self._setpoint_value_or_raise(self._current_mode_setpoint_enums[0]) diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index 36f208e18d5..e73fa9fc3a7 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -18,6 +18,8 @@ from homeassistant.components.hassio import ( AddonInfo, AddonManager, AddonState, + HassioServiceInfo, + is_hassio, ) from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import ( @@ -27,7 +29,6 @@ from homeassistant.config_entries import ( ConfigEntryBaseFlow, ConfigEntryState, ConfigFlow, - ConfigFlowContext, ConfigFlowResult, OptionsFlow, OptionsFlowManager, @@ -37,8 +38,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import AbortFlow, FlowManager from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.hassio import is_hassio -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.typing import VolDictType from . import disconnect_client @@ -193,7 +192,7 @@ class BaseZwaveJSFlow(ConfigEntryBaseFlow, ABC): @property @abstractmethod - def flow_manager(self) -> FlowManager[ConfigFlowContext, ConfigFlowResult]: + def flow_manager(self) -> FlowManager[ConfigFlowResult]: """Return the flow manager of the flow.""" async def async_step_install_addon( @@ -347,12 +346,11 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): VERSION = 1 - _title: str - def __init__(self) -> None: """Set up flow instance.""" super().__init__() self.use_addon = False + self._title: str | None = None self._usb_discovery = False @property @@ -366,7 +364,19 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Return the options flow.""" - return OptionsFlowHandler() + return OptionsFlowHandler(config_entry) + + async def async_step_import(self, data: dict[str, Any]) -> ConfigFlowResult: + """Handle imported data. + + This step will be used when importing data + during Z-Wave to Z-Wave JS migration. + """ + # Note that the data comes from the zwave integration. + # So we don't use our constants here. + self.s0_legacy_key = data.get("network_key") + self.usb_path = data.get("usb_path") + return await self.async_step_user() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -396,7 +406,6 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): return await self.async_step_manual({CONF_URL: self.ws_address}) assert self.ws_address - assert self.unique_id return self.async_show_form( step_id="zeroconf_confirm", description_placeholders={ @@ -725,9 +734,10 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow): """Handle an options flow for Z-Wave JS.""" - def __init__(self) -> None: + def __init__(self, config_entry: ConfigEntry) -> None: """Set up the options flow.""" super().__init__() + self.config_entry = config_entry self.original_addon_config: dict[str, Any] | None = None self.revert_reason: str | None = None diff --git a/homeassistant/components/zwave_js/config_validation.py b/homeassistant/components/zwave_js/config_validation.py index 30bc2f16789..6c060f90ce5 100644 --- a/homeassistant/components/zwave_js/config_validation.py +++ b/homeassistant/components/zwave_js/config_validation.py @@ -34,8 +34,6 @@ def boolean(value: Any) -> bool: VALUE_SCHEMA = vol.Any( boolean, - float, - int, vol.Coerce(int), vol.Coerce(float), BITMASK_SCHEMA, diff --git a/homeassistant/components/zwave_js/const.py b/homeassistant/components/zwave_js/const.py index fd81cd7e7de..a04f9247548 100644 --- a/homeassistant/components/zwave_js/const.py +++ b/homeassistant/components/zwave_js/const.py @@ -42,7 +42,6 @@ DATA_CLIENT = "client" DATA_OLD_SERVER_LOG_LEVEL = "old_server_log_level" EVENT_DEVICE_ADDED_TO_REGISTRY = f"{DOMAIN}_device_added_to_registry" -EVENT_VALUE_UPDATED = "value updated" LOGGER = logging.getLogger(__package__) LIB_LOGGER = logging.getLogger("zwave_js_server") diff --git a/homeassistant/components/zwave_js/cover.py b/homeassistant/components/zwave_js/cover.py index 218c5cc82fe..363b32cedda 100644 --- a/homeassistant/components/zwave_js/cover.py +++ b/homeassistant/components/zwave_js/cover.py @@ -19,7 +19,6 @@ from zwave_js_server.const.command_class.multilevel_switch import ( from zwave_js_server.const.command_class.window_covering import ( NO_POSITION_PROPERTY_KEYS, NO_POSITION_SUFFIX, - WINDOW_COVERING_LEVEL_CHANGE_DOWN_PROPERTY, WINDOW_COVERING_LEVEL_CHANGE_UP_PROPERTY, SlatStates, ) @@ -342,20 +341,6 @@ class ZWaveWindowCovering(CoverPositionMixin, CoverTiltMixin): super().__init__(config_entry, driver, info) pos_value: ZwaveValue | None = None tilt_value: ZwaveValue | None = None - self._up_value = cast( - ZwaveValue, - self.get_zwave_value( - WINDOW_COVERING_LEVEL_CHANGE_UP_PROPERTY, - value_property_key=info.primary_value.property_key, - ), - ) - self._down_value = cast( - ZwaveValue, - self.get_zwave_value( - WINDOW_COVERING_LEVEL_CHANGE_DOWN_PROPERTY, - value_property_key=info.primary_value.property_key, - ), - ) # If primary value is for position, we have to search for a tilt value if info.primary_value.property_key in COVER_POSITION_PROPERTY_KEYS: @@ -417,18 +402,6 @@ class ZWaveWindowCovering(CoverPositionMixin, CoverTiltMixin): """Return range of valid tilt positions.""" return abs(SlatStates.CLOSED_2 - SlatStates.CLOSED_1) - async def async_open_cover(self, **kwargs: Any) -> None: - """Open the cover.""" - await self._async_set_value(self._up_value, True) - - async def async_close_cover(self, **kwargs: Any) -> None: - """Close the cover.""" - await self._async_set_value(self._down_value, True) - - async def async_stop_cover(self, **kwargs: Any) -> None: - """Stop the cover.""" - await self._async_set_value(self._up_value, False) - class ZwaveMotorizedBarrier(ZWaveBaseEntity, CoverEntity): """Representation of a Z-Wave motorized barrier device.""" diff --git a/homeassistant/components/zwave_js/device_condition.py b/homeassistant/components/zwave_js/device_condition.py index 8a50c838eec..dcd42d4d85d 100644 --- a/homeassistant/components/zwave_js/device_condition.py +++ b/homeassistant/components/zwave_js/device_condition.py @@ -8,7 +8,9 @@ import voluptuous as vol from zwave_js_server.const import CommandClass from zwave_js_server.model.value import ConfigurationValue -from homeassistant.components.device_automation import InvalidDeviceAutomationConfig +from homeassistant.components.device_automation.exceptions import ( + InvalidDeviceAutomationConfig, +) from homeassistant.const import CONF_CONDITION, CONF_DEVICE_ID, CONF_DOMAIN, CONF_TYPE from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError diff --git a/homeassistant/components/zwave_js/device_trigger.py b/homeassistant/components/zwave_js/device_trigger.py index 661d4557694..49027d4d43b 100644 --- a/homeassistant/components/zwave_js/device_trigger.py +++ b/homeassistant/components/zwave_js/device_trigger.py @@ -7,8 +7,8 @@ from typing import Any import voluptuous as vol from zwave_js_server.const import CommandClass -from homeassistant.components.device_automation import ( - DEVICE_TRIGGER_BASE_SCHEMA, +from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA +from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, ) from homeassistant.components.homeassistant.triggers import event, state diff --git a/homeassistant/components/zwave_js/diagnostics.py b/homeassistant/components/zwave_js/diagnostics.py index 5515100b20b..dde455bd9b6 100644 --- a/homeassistant/components/zwave_js/diagnostics.py +++ b/homeassistant/components/zwave_js/diagnostics.py @@ -12,7 +12,8 @@ from zwave_js_server.model.node import Node from zwave_js_server.model.value import ValueDataType from zwave_js_server.util.node import dump_node_state -from homeassistant.components.diagnostics import REDACTED, async_redact_data +from homeassistant.components.diagnostics import REDACTED +from homeassistant.components.diagnostics.util import async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant @@ -80,7 +81,7 @@ def get_device_entities( er.async_get(hass), device.id, include_disabled_entities=True ) entities = [] - for entry in sorted(entity_entries): + for entry in entity_entries: # Skip entities that are not part of this integration if entry.config_entry_id != config_entry.entry_id: continue diff --git a/homeassistant/components/zwave_js/discovery.py b/homeassistant/components/zwave_js/discovery.py index 5c79c668afc..0b66567c036 100644 --- a/homeassistant/components/zwave_js/discovery.py +++ b/homeassistant/components/zwave_js/discovery.py @@ -2,12 +2,12 @@ from __future__ import annotations -from collections.abc import Generator from dataclasses import asdict, dataclass, field from enum import StrEnum from typing import TYPE_CHECKING, Any, cast from awesomeversion import AwesomeVersion +from typing_extensions import Generator from zwave_js_server.const import ( CURRENT_STATE_PROPERTY, CURRENT_VALUE_PROPERTY, @@ -238,12 +238,6 @@ SWITCH_BINARY_CURRENT_VALUE_SCHEMA = ZWaveValueDiscoverySchema( command_class={CommandClass.SWITCH_BINARY}, property={CURRENT_VALUE_PROPERTY} ) -COLOR_SWITCH_CURRENT_VALUE_SCHEMA = ZWaveValueDiscoverySchema( - command_class={CommandClass.SWITCH_COLOR}, - property={CURRENT_COLOR_PROPERTY}, - property_key={None}, -) - SIREN_TONE_SCHEMA = ZWaveValueDiscoverySchema( command_class={CommandClass.SOUND_SWITCH}, property={TONE_ID_PROPERTY}, @@ -585,15 +579,6 @@ DISCOVERY_SCHEMAS = [ ), entity_registry_enabled_default=False, ), - # ZVIDAR Z-CM-V01 (SmartWings/Deyi WM25L/V Z-Wave Motor for Roller Shade) - ZWaveDiscoverySchema( - platform=Platform.COVER, - hint="shade", - manufacturer_id={0x045A}, - product_id={0x0507}, - product_type={0x0904}, - primary_value=SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, - ), # Vision Security ZL7432 In Wall Dual Relay Switch ZWaveDiscoverySchema( platform=Platform.SWITCH, @@ -768,6 +753,33 @@ DISCOVERY_SCHEMAS = [ }, ), ), + # HomeSeer HSM-200 v1 + ZWaveDiscoverySchema( + platform=Platform.LIGHT, + hint="black_is_off", + manufacturer_id={0x001E}, + product_id={0x0001}, + product_type={0x0004}, + primary_value=ZWaveValueDiscoverySchema( + command_class={CommandClass.SWITCH_COLOR}, + property={CURRENT_COLOR_PROPERTY}, + property_key={None}, + ), + absent_values=[SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA], + ), + # Logic Group ZDB5100 + ZWaveDiscoverySchema( + platform=Platform.LIGHT, + hint="black_is_off", + manufacturer_id={0x0234}, + product_id={0x0121}, + product_type={0x0003}, + primary_value=ZWaveValueDiscoverySchema( + command_class={CommandClass.SWITCH_COLOR}, + property={CURRENT_COLOR_PROPERTY}, + property_key={None}, + ), + ), # ====== START OF GENERIC MAPPING SCHEMAS ======= # locks # Door Lock CC @@ -885,6 +897,17 @@ DISCOVERY_SCHEMAS = [ type={ValueType.BOOLEAN}, ), ), + ZWaveDiscoverySchema( + platform=Platform.BINARY_SENSOR, + hint="notification", + primary_value=ZWaveValueDiscoverySchema( + command_class={ + CommandClass.NOTIFICATION, + }, + type={ValueType.NUMBER}, + ), + allow_multi=True, + ), # binary sensor for Indicator CC ZWaveDiscoverySchema( platform=Platform.BINARY_SENSOR, @@ -946,6 +969,19 @@ DISCOVERY_SCHEMAS = [ ), data_template=NumericSensorDataTemplate(), ), + # special list sensors (Notification CC) + ZWaveDiscoverySchema( + platform=Platform.SENSOR, + hint="list_sensor", + primary_value=ZWaveValueDiscoverySchema( + command_class={ + CommandClass.NOTIFICATION, + }, + type={ValueType.NUMBER}, + ), + allow_multi=True, + entity_registry_enabled_default=False, + ), # number for Indicator CC (exclude property keys 3-5) ZWaveDiscoverySchema( platform=Platform.NUMBER, @@ -969,6 +1005,11 @@ DISCOVERY_SCHEMAS = [ ), entity_category=EntityCategory.CONFIG, ), + # binary switches + ZWaveDiscoverySchema( + platform=Platform.SWITCH, + primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + ), # switch for Indicator CC ZWaveDiscoverySchema( platform=Platform.SWITCH, @@ -1056,51 +1097,15 @@ DISCOVERY_SCHEMAS = [ device_class_generic={"Thermostat"}, primary_value=SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, ), - # Handle the different combinations of Binary Switch, Multilevel Switch and Color Switch - # to create switches and/or (colored) lights. The goal is to: - # - couple Color Switch CC with Multilevel Switch CC if possible - # - couple Color Switch CC with Binary Switch CC as the first fallback - # - use Color Switch CC standalone as the last fallback - # - # Multilevel Switch CC (+ Color Switch CC) -> Dimmable light with or without color support. + # lights + # primary value is the currentValue (brightness) + # catch any device with multilevel CC as light + # NOTE: keep this at the bottom of the discovery scheme, + # to handle all others that need the multilevel CC first ZWaveDiscoverySchema( platform=Platform.LIGHT, primary_value=SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, ), - # Binary Switch CC when Multilevel Switch and Color Switch CC exist -> - # On/Off switch, assign color to light entity instead - ZWaveDiscoverySchema( - platform=Platform.SWITCH, - primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, - required_values=[ - SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, - COLOR_SWITCH_CURRENT_VALUE_SCHEMA, - ], - ), - # Binary Switch CC and Color Switch CC -> - # Colored light that uses Binary Switch CC for turning on/off. - ZWaveDiscoverySchema( - platform=Platform.LIGHT, - hint="color_onoff", - primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, - required_values=[COLOR_SWITCH_CURRENT_VALUE_SCHEMA], - ), - # Binary Switch CC without Color Switch CC -> On/Off switch - ZWaveDiscoverySchema( - platform=Platform.SWITCH, - primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, - absent_values=[COLOR_SWITCH_CURRENT_VALUE_SCHEMA], - ), - # Colored light (legacy device) that can only be controlled through Color Switch CC. - ZWaveDiscoverySchema( - platform=Platform.LIGHT, - hint="color_onoff", - primary_value=COLOR_SWITCH_CURRENT_VALUE_SCHEMA, - absent_values=[ - SWITCH_BINARY_CURRENT_VALUE_SCHEMA, - SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, - ], - ), # light for Basic CC with target ZWaveDiscoverySchema( platform=Platform.LIGHT, @@ -1183,7 +1188,6 @@ DISCOVERY_SCHEMAS = [ type={ValueType.NUMBER}, any_available_states={(0, "idle")}, ), - allow_multi=True, ), # event # stateful = False @@ -1206,43 +1210,6 @@ DISCOVERY_SCHEMAS = [ ), entity_category=EntityCategory.DIAGNOSTIC, ), - ZWaveDiscoverySchema( - platform=Platform.BINARY_SENSOR, - hint="notification", - primary_value=ZWaveValueDiscoverySchema( - command_class={ - CommandClass.NOTIFICATION, - }, - type={ValueType.NUMBER}, - ), - # set allow-multi to true because some of the notification sensors - # can not be mapped to a binary sensor and must be handled as a regular sensor - allow_multi=True, - ), - # alarmType, alarmLevel (Notification CC) - ZWaveDiscoverySchema( - platform=Platform.SENSOR, - hint="notification_alarm", - primary_value=ZWaveValueDiscoverySchema( - command_class={ - CommandClass.NOTIFICATION, - }, - property={"alarmType", "alarmLevel"}, - type={ValueType.NUMBER}, - ), - entity_registry_enabled_default=False, - ), - # fallback sensors within Notification CC - ZWaveDiscoverySchema( - platform=Platform.SENSOR, - hint="notification", - primary_value=ZWaveValueDiscoverySchema( - command_class={ - CommandClass.NOTIFICATION, - }, - type={ValueType.NUMBER}, - ), - ), ] @@ -1262,11 +1229,8 @@ def async_discover_single_value( value: ZwaveValue, device: DeviceEntry, discovered_value_ids: dict[str, set[str]] ) -> Generator[ZwaveDiscoveryInfo]: """Run discovery on a single ZWave value and return matching schema info.""" + discovered_value_ids[device.id].add(value.value_id) for schema in DISCOVERY_SCHEMAS: - # abort if attribute(s) already discovered - if value.value_id in discovered_value_ids[device.id]: - continue - # check manufacturer_id, product_id, product_type if ( ( @@ -1325,20 +1289,14 @@ def async_discover_single_value( # check additional required values if schema.required_values is not None and not all( - any( - check_value(val, val_scheme, primary_value=value) - for val in value.node.values.values() - ) + any(check_value(val, val_scheme) for val in value.node.values.values()) for val_scheme in schema.required_values ): continue # check for values that may not be present if schema.absent_values is not None and any( - any( - check_value(val, val_scheme, primary_value=value) - for val in value.node.values.values() - ) + any(check_value(val, val_scheme) for val in value.node.values.values()) for val_scheme in schema.absent_values ): continue @@ -1376,12 +1334,10 @@ def async_discover_single_value( entity_category=schema.entity_category, ) - # prevent re-discovery of the (primary) value if not allowed if not schema.allow_multi: - discovered_value_ids[device.id].add(value.value_id) - - # prevent re-discovery of the (primary) value after all schemas have been checked - discovered_value_ids[device.id].add(value.value_id) + # return early since this value may not be discovered + # by other schemas/platforms + return if value.command_class == CommandClass.CONFIGURATION: yield from async_discover_single_configuration_value( @@ -1457,11 +1413,7 @@ def async_discover_single_configuration_value( @callback -def check_value( - value: ZwaveValue, - schema: ZWaveValueDiscoverySchema, - primary_value: ZwaveValue | None = None, -) -> bool: +def check_value(value: ZwaveValue, schema: ZWaveValueDiscoverySchema) -> bool: """Check if value matches scheme.""" # check command_class if ( @@ -1472,14 +1424,6 @@ def check_value( # check endpoint if schema.endpoint is not None and value.endpoint not in schema.endpoint: return False - # If the schema does not require an endpoint, make sure the value is on the - # same endpoint as the primary value - if ( - schema.endpoint is None - and primary_value is not None - and value.endpoint != primary_value.endpoint - ): - return False # check property if schema.property is not None and value.property_ not in schema.property: return False diff --git a/homeassistant/components/zwave_js/entity.py b/homeassistant/components/zwave_js/entity.py index d1ab9009308..4a6f87cc032 100644 --- a/homeassistant/components/zwave_js/entity.py +++ b/homeassistant/components/zwave_js/entity.py @@ -22,10 +22,11 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import UNDEFINED -from .const import DOMAIN, EVENT_VALUE_UPDATED, LOGGER +from .const import DOMAIN, LOGGER from .discovery import ZwaveDiscoveryInfo from .helpers import get_device_id, get_unique_id, get_valueless_base_unique_id +EVENT_VALUE_UPDATED = "value updated" EVENT_VALUE_REMOVED = "value removed" EVENT_DEAD = "dead" EVENT_ALIVE = "alive" @@ -334,6 +335,5 @@ class ZWaveBaseEntity(Entity): value, new_value, options=options, wait_for_result=wait_for_result ) except BaseZwaveJSServerError as err: - raise HomeAssistantError( - f"Unable to set value {value.value_id}: {err}" - ) from err + LOGGER.error("Unable to set value %s: %s", value.value_id, err) + raise HomeAssistantError from err diff --git a/homeassistant/components/zwave_js/fan.py b/homeassistant/components/zwave_js/fan.py index 37d3fc57886..925a48512d8 100644 --- a/homeassistant/components/zwave_js/fan.py +++ b/homeassistant/components/zwave_js/fan.py @@ -78,12 +78,7 @@ async def async_setup_entry( class ZwaveFan(ZWaveBaseEntity, FanEntity): """Representation of a Z-Wave fan.""" - _attr_supported_features = ( - FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) - _enable_turn_on_off_backwards_compatibility = False + _attr_supported_features = FanEntityFeature.SET_SPEED def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo @@ -254,11 +249,7 @@ class ValueMappingZwaveFan(ZwaveFan): @property def supported_features(self) -> FanEntityFeature: """Flag supported features.""" - flags = ( - FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + flags = FanEntityFeature.SET_SPEED if self.has_fan_value_mapping and self.fan_value_mapping.presets: flags |= FanEntityFeature.PRESET_MODE @@ -391,13 +382,7 @@ class ZwaveThermostatFan(ZWaveBaseEntity, FanEntity): @property def supported_features(self) -> FanEntityFeature: """Flag supported features.""" - if not self._fan_off: - return FanEntityFeature.PRESET_MODE - return ( - FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_ON - | FanEntityFeature.TURN_OFF - ) + return FanEntityFeature.PRESET_MODE @property def fan_state(self) -> str | None: diff --git a/homeassistant/components/zwave_js/helpers.py b/homeassistant/components/zwave_js/helpers.py index 5885527e01c..737b8deff34 100644 --- a/homeassistant/components/zwave_js/helpers.py +++ b/homeassistant/components/zwave_js/helpers.py @@ -343,18 +343,20 @@ def async_get_nodes_from_area_id( } ) # Add devices in an area that are Z-Wave JS devices - nodes.update( - async_get_node_from_device_id(hass, device.id, dev_reg) - for device in dr.async_entries_for_area(dev_reg, area_id) - if any( - cast( - ConfigEntry, - hass.config_entries.async_get_entry(config_entry_id), - ).domain - == DOMAIN - for config_entry_id in device.config_entries - ) - ) + for device in dr.async_entries_for_area(dev_reg, area_id): + if next( + ( + config_entry_id + for config_entry_id in device.config_entries + if cast( + ConfigEntry, + hass.config_entries.async_get_entry(config_entry_id), + ).domain + == DOMAIN + ), + None, + ): + nodes.add(async_get_node_from_device_id(hass, device.id, dev_reg)) return nodes diff --git a/homeassistant/components/zwave_js/icons.json b/homeassistant/components/zwave_js/icons.json index b52255e09d1..2956cf2c6e0 100644 --- a/homeassistant/components/zwave_js/icons.json +++ b/homeassistant/components/zwave_js/icons.json @@ -57,41 +57,17 @@ } }, "services": { - "bulk_set_partial_config_parameters": { - "service": "mdi:cogs" - }, - "clear_lock_usercode": { - "service": "mdi:eraser" - }, - "invoke_cc_api": { - "service": "mdi:api" - }, - "multicast_set_value": { - "service": "mdi:list-box" - }, - "ping": { - "service": "mdi:crosshairs-gps" - }, - "refresh_notifications": { - "service": "mdi:bell" - }, - "refresh_value": { - "service": "mdi:refresh" - }, - "reset_meter": { - "service": "mdi:meter-electric" - }, - "set_config_parameter": { - "service": "mdi:cog" - }, - "set_lock_configuration": { - "service": "mdi:shield-lock" - }, - "set_lock_usercode": { - "service": "mdi:lock-smart" - }, - "set_value": { - "service": "mdi:form-textbox" - } + "bulk_set_partial_config_parameters": "mdi:cogs", + "clear_lock_usercode": "mdi:eraser", + "invoke_cc_api": "mdi:api", + "multicast_set_value": "mdi:list-box", + "ping": "mdi:crosshairs-gps", + "refresh_notifications": "mdi:bell", + "refresh_value": "mdi:refresh", + "reset_meter": "mdi:meter-electric", + "set_config_parameter": "mdi:cog", + "set_lock_configuration": "mdi:shield-lock", + "set_lock_usercode": "mdi:lock-smart", + "set_value": "mdi:form-textbox" } } diff --git a/homeassistant/components/zwave_js/light.py b/homeassistant/components/zwave_js/light.py index 4a044ca3f52..020f1b66b3d 100644 --- a/homeassistant/components/zwave_js/light.py +++ b/homeassistant/components/zwave_js/light.py @@ -76,8 +76,8 @@ async def async_setup_entry( driver = client.driver assert driver is not None # Driver is ready before platforms are loaded. - if info.platform_hint == "color_onoff": - async_add_entities([ZwaveColorOnOffLight(config_entry, driver, info)]) + if info.platform_hint == "black_is_off": + async_add_entities([ZwaveBlackIsOffLight(config_entry, driver, info)]) else: async_add_entities([ZwaveLight(config_entry, driver, info)]) @@ -111,10 +111,9 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): self._supports_color = False self._supports_rgbw = False self._supports_color_temp = False - self._supports_dimming = False - self._color_mode: str | None = None self._hs_color: tuple[float, float] | None = None self._rgbw_color: tuple[int, int, int, int] | None = None + self._color_mode: str | None = None self._color_temp: int | None = None self._min_mireds = 153 # 6500K as a safe default self._max_mireds = 370 # 2700K as a safe default @@ -130,28 +129,15 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): ) self._supported_color_modes: set[ColorMode] = set() - self._target_brightness: Value | None = None - # get additional (optional) values and set features - if self.info.primary_value.command_class == CommandClass.SWITCH_BINARY: - # This light can not be dimmed separately from the color channels - self._target_brightness = self.get_zwave_value( - TARGET_VALUE_PROPERTY, - CommandClass.SWITCH_BINARY, - add_to_watched_value_ids=False, - ) - self._supports_dimming = False - elif self.info.primary_value.command_class == CommandClass.SWITCH_MULTILEVEL: - # This light can be dimmed separately from the color channels - self._target_brightness = self.get_zwave_value( - TARGET_VALUE_PROPERTY, - CommandClass.SWITCH_MULTILEVEL, - add_to_watched_value_ids=False, - ) - self._supports_dimming = True - elif self.info.primary_value.command_class == CommandClass.BASIC: - # If the command class is Basic, we must generate a name that includes - # the command class name to avoid ambiguity + # If the command class is Basic, we must geenerate a name that includes + # the command class name to avoid ambiguity + self._target_brightness = self.get_zwave_value( + TARGET_VALUE_PROPERTY, + CommandClass.SWITCH_MULTILEVEL, + add_to_watched_value_ids=False, + ) + if self.info.primary_value.command_class == CommandClass.BASIC: self._attr_name = self.generate_name( include_value_name=True, alternate_value_name="Basic" ) @@ -160,13 +146,6 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): CommandClass.BASIC, add_to_watched_value_ids=False, ) - self._supports_dimming = True - - self._current_color = self.get_zwave_value( - CURRENT_COLOR_PROPERTY, - CommandClass.SWITCH_COLOR, - value_property_key=None, - ) self._target_color = self.get_zwave_value( TARGET_COLOR_PROPERTY, CommandClass.SWITCH_COLOR, @@ -237,7 +216,7 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): @property def rgbw_color(self) -> tuple[int, int, int, int] | None: - """Return the RGBW color.""" + """Return the hs color.""" return self._rgbw_color @property @@ -264,39 +243,11 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): """Turn the device on.""" transition = kwargs.get(ATTR_TRANSITION) - brightness = kwargs.get(ATTR_BRIGHTNESS) - - hs_color = kwargs.get(ATTR_HS_COLOR) - color_temp = kwargs.get(ATTR_COLOR_TEMP) - rgbw = kwargs.get(ATTR_RGBW_COLOR) - - new_colors = self._get_new_colors(hs_color, color_temp, rgbw) - if new_colors is not None: - await self._async_set_colors(new_colors, transition) - - # set brightness (or turn on if dimming is not supported) - await self._async_set_brightness(brightness, transition) - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the light off.""" - await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) - - def _get_new_colors( - self, - hs_color: tuple[float, float] | None, - color_temp: int | None, - rgbw: tuple[int, int, int, int] | None, - brightness_scale: float | None = None, - ) -> dict[ColorComponent, int] | None: - """Determine the new color dict to set.""" # RGB/HS color + hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None and self._supports_color: red, green, blue = color_util.color_hs_to_RGB(*hs_color) - if brightness_scale is not None: - red = round(red * brightness_scale) - green = round(green * brightness_scale) - blue = round(blue * brightness_scale) colors = { ColorComponent.RED: red, ColorComponent.GREEN: green, @@ -306,9 +257,10 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): # turn of white leds when setting rgb colors[ColorComponent.WARM_WHITE] = 0 colors[ColorComponent.COLD_WHITE] = 0 - return colors + await self._async_set_colors(colors, transition) # Color temperature + color_temp = kwargs.get(ATTR_COLOR_TEMP) if color_temp is not None and self._supports_color_temp: # Limit color temp to min/max values cold = max( @@ -323,18 +275,20 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): ), ) warm = 255 - cold - colors = { - ColorComponent.WARM_WHITE: warm, - ColorComponent.COLD_WHITE: cold, - } - if self._supports_color: - # turn off color leds when setting color temperature - colors[ColorComponent.RED] = 0 - colors[ColorComponent.GREEN] = 0 - colors[ColorComponent.BLUE] = 0 - return colors + await self._async_set_colors( + { + # turn off color leds when setting color temperature + ColorComponent.RED: 0, + ColorComponent.GREEN: 0, + ColorComponent.BLUE: 0, + ColorComponent.WARM_WHITE: warm, + ColorComponent.COLD_WHITE: cold, + }, + transition, + ) # RGBW + rgbw = kwargs.get(ATTR_RGBW_COLOR) if rgbw is not None and self._supports_rgbw: rgbw_channels = { ColorComponent.RED: rgbw[0], @@ -346,15 +300,17 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): if self._cold_white: rgbw_channels[ColorComponent.COLD_WHITE] = rgbw[3] + await self._async_set_colors(rgbw_channels, transition) - return rgbw_channels + # set brightness + await self._async_set_brightness(kwargs.get(ATTR_BRIGHTNESS), transition) - return None + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the light off.""" + await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) async def _async_set_colors( - self, - colors: dict[ColorComponent, int], - transition: float | None = None, + self, colors: dict[ColorComponent, int], transition: float | None = None ) -> None: """Set (multiple) defined colors to given value(s).""" # prefer the (new) combined color property @@ -405,14 +361,9 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): zwave_transition = {TRANSITION_DURATION_OPTION: "default"} # setting a value requires setting targetValue - if self._supports_dimming: - await self._async_set_value( - self._target_brightness, zwave_brightness, zwave_transition - ) - else: - await self._async_set_value( - self._target_brightness, zwave_brightness > 0, zwave_transition - ) + await self._async_set_value( + self._target_brightness, zwave_brightness, zwave_transition + ) # We do an optimistic state update when setting to a previous value # to avoid waiting for the value to be updated from the device which is # typically delayed and causes a confusing UX. @@ -476,8 +427,15 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): """Calculate light colors.""" (red_val, green_val, blue_val, ww_val, cw_val) = self._get_color_values() - if self._current_color and isinstance(self._current_color.value, dict): - multi_color = self._current_color.value + # prefer the (new) combined color property + # https://github.com/zwave-js/node-zwave-js/pull/1782 + combined_color_val = self.get_zwave_value( + CURRENT_COLOR_PROPERTY, + CommandClass.SWITCH_COLOR, + value_property_key=None, + ) + if combined_color_val and isinstance(combined_color_val.value, dict): + multi_color = combined_color_val.value else: multi_color = {} @@ -528,10 +486,11 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): self._color_mode = ColorMode.RGBW -class ZwaveColorOnOffLight(ZwaveLight): - """Representation of a colored Z-Wave light with an optional binary switch to turn on/off. +class ZwaveBlackIsOffLight(ZwaveLight): + """Representation of a Z-Wave light where setting the color to black turns it off. - Dimming for RGB lights is realized by scaling the color channels. + Currently only supports lights with RGB, no color temperature, and no white + channels. """ def __init__( @@ -540,137 +499,61 @@ class ZwaveColorOnOffLight(ZwaveLight): """Initialize the light.""" super().__init__(config_entry, driver, info) - self._last_on_color: dict[ColorComponent, int] | None = None - self._last_brightness: int | None = None + self._last_color: dict[str, int] | None = None + self._supported_color_modes.discard(ColorMode.BRIGHTNESS) @property - def brightness(self) -> int | None: - """Return the brightness of this light between 0..255. + def brightness(self) -> int: + """Return the brightness of this light between 0..255.""" + return 255 - Z-Wave multilevel switches use a range of [0, 99] to control brightness. - """ + @property + def is_on(self) -> bool | None: + """Return true if device is on (brightness above 0).""" if self.info.primary_value.value is None: return None - if self._target_brightness and self.info.primary_value.value is False: - # Binary switch exists and is turned off - return 0 - - # Brightness is encoded in the color channels by scaling them lower than 255 - color_values = [ - v.value - for v in self._get_color_values() - if v is not None and v.value is not None - ] - return max(color_values) if color_values else 0 + return any(value != 0 for value in self.info.primary_value.value.values()) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" - if ( kwargs.get(ATTR_RGBW_COLOR) is not None or kwargs.get(ATTR_COLOR_TEMP) is not None + or kwargs.get(ATTR_HS_COLOR) is not None ): - # RGBW and color temp are not supported in this mode, - # delegate to the parent class await super().async_turn_on(**kwargs) return transition = kwargs.get(ATTR_TRANSITION) - brightness = kwargs.get(ATTR_BRIGHTNESS) - hs_color = kwargs.get(ATTR_HS_COLOR) - new_colors: dict[ColorComponent, int] | None = None - scale: float | None = None - - if brightness is None and hs_color is None: - # Turned on without specifying brightness or color - if self._last_on_color is not None: - if self._target_brightness: - # Color is already set, use the binary switch to turn on - await self._async_set_brightness(None, transition) - return - - # Preserve the previous color - new_colors = self._last_on_color - elif self._supports_color: - # Turned on for the first time. Make it white - new_colors = { + # turn on light to last color if known, otherwise set to white + if self._last_color is not None: + await self._async_set_colors( + { + ColorComponent.RED: self._last_color["red"], + ColorComponent.GREEN: self._last_color["green"], + ColorComponent.BLUE: self._last_color["blue"], + }, + transition, + ) + else: + await self._async_set_colors( + { ColorComponent.RED: 255, ColorComponent.GREEN: 255, ColorComponent.BLUE: 255, - } - elif brightness is not None: - # If brightness gets set, preserve the color and mix it with the new brightness - if self.color_mode == ColorMode.HS: - scale = brightness / 255 - if ( - self._last_on_color is not None - and None not in self._last_on_color.values() - ): - # Changed brightness from 0 to >0 - old_brightness = max(self._last_on_color.values()) - new_scale = brightness / old_brightness - scale = new_scale - new_colors = {} - for color, value in self._last_on_color.items(): - new_colors[color] = round(value * new_scale) - elif hs_color is None and self._color_mode == ColorMode.HS: - hs_color = self._hs_color - elif hs_color is not None and brightness is None: - # Turned on by using the color controls - current_brightness = self.brightness - if current_brightness == 0 and self._last_brightness is not None: - # Use the last brightness value if the light is currently off - scale = self._last_brightness / 255 - elif current_brightness is not None: - scale = current_brightness / 255 - - # Reset last color until turning off again - self._last_on_color = None - - if new_colors is None: - new_colors = self._get_new_colors( - hs_color=hs_color, color_temp=None, rgbw=None, brightness_scale=scale + }, + transition, ) - if new_colors is not None: - await self._async_set_colors(new_colors, transition) - - # Turn the binary switch on if there is one - await self._async_set_brightness(brightness, transition) - async def async_turn_off(self, **kwargs: Any) -> None: """Turn the light off.""" - - # Remember last color and brightness to restore it when turning on - self._last_brightness = self.brightness - if self._current_color and isinstance(self._current_color.value, dict): - red = self._current_color.value.get(COLOR_SWITCH_COMBINED_RED) - green = self._current_color.value.get(COLOR_SWITCH_COMBINED_GREEN) - blue = self._current_color.value.get(COLOR_SWITCH_COMBINED_BLUE) - - last_color: dict[ColorComponent, int] = {} - if red is not None: - last_color[ColorComponent.RED] = red - if green is not None: - last_color[ColorComponent.GREEN] = green - if blue is not None: - last_color[ColorComponent.BLUE] = blue - - if last_color: - self._last_on_color = last_color - - if self._target_brightness: - # Turn off the binary switch only - await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) - else: - # turn off all color channels - colors = { + self._last_color = self.info.primary_value.value + await self._async_set_colors( + { ColorComponent.RED: 0, ColorComponent.GREEN: 0, ColorComponent.BLUE: 0, - } - - await self._async_set_colors( - colors, - kwargs.get(ATTR_TRANSITION), - ) + }, + kwargs.get(ATTR_TRANSITION), + ) + await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) diff --git a/homeassistant/components/zwave_js/lock.py b/homeassistant/components/zwave_js/lock.py index c14517f4b03..5eb89e17402 100644 --- a/homeassistant/components/zwave_js/lock.py +++ b/homeassistant/components/zwave_js/lock.py @@ -19,8 +19,9 @@ from zwave_js_server.const.command_class.lock import ( from zwave_js_server.exceptions import BaseZwaveJSServerError from zwave_js_server.util.lock import clear_usercode, set_configuration, set_usercode -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockEntity, LockState +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockEntity from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_platform @@ -48,12 +49,12 @@ PARALLEL_UPDATES = 0 STATE_TO_ZWAVE_MAP: dict[int, dict[str, int | bool]] = { CommandClass.DOOR_LOCK: { - LockState.UNLOCKED: DoorLockMode.UNSECURED, - LockState.LOCKED: DoorLockMode.SECURED, + STATE_UNLOCKED: DoorLockMode.UNSECURED, + STATE_LOCKED: DoorLockMode.SECURED, }, CommandClass.LOCK: { - LockState.UNLOCKED: False, - LockState.LOCKED: True, + STATE_UNLOCKED: False, + STATE_LOCKED: True, }, } UNIT16_SCHEMA = vol.All(vol.Coerce(int), vol.Range(min=0, max=65535)) @@ -139,7 +140,7 @@ class ZWaveLock(ZWaveBaseEntity, LockEntity): == self.info.primary_value.value ) - async def _set_lock_state(self, target_state: LockState, **kwargs: Any) -> None: + async def _set_lock_state(self, target_state: str, **kwargs: Any) -> None: """Set the lock state.""" target_value = self.get_zwave_value( LOCK_CMD_CLASS_TO_PROPERTY_MAP[ @@ -154,11 +155,11 @@ class ZWaveLock(ZWaveBaseEntity, LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Lock the lock.""" - await self._set_lock_state(LockState.LOCKED) + await self._set_lock_state(STATE_LOCKED) async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" - await self._set_lock_state(LockState.UNLOCKED) + await self._set_lock_state(STATE_UNLOCKED) async def async_set_lock_usercode(self, code_slot: int, usercode: str) -> None: """Set the usercode to index X on the lock.""" @@ -195,19 +196,15 @@ class ZWaveLock(ZWaveBaseEntity, LockEntity): ) -> None: """Set the lock configuration.""" params: dict[str, Any] = {"operation_type": operation_type} - params.update( - { - attr: val - for attr, val in ( - ("lock_timeout_configuration", lock_timeout), - ("auto_relock_time", auto_relock_time), - ("hold_and_release_time", hold_and_release_time), - ("twist_assist", twist_assist), - ("block_to_block", block_to_block), - ) - if val is not None - } - ) + for attr, val in ( + ("lock_timeout_configuration", lock_timeout), + ("auto_relock_time", auto_relock_time), + ("hold_and_release_time", hold_and_release_time), + ("twist_assist", twist_assist), + ("block_to_block", block_to_block), + ): + if val is not None: + params[attr] = val configuration = DoorLockCCConfigurationSetOptions(**params) result = await set_configuration( self.info.node.endpoints[self.info.primary_value.endpoint or 0], diff --git a/homeassistant/components/zwave_js/manifest.json b/homeassistant/components/zwave_js/manifest.json index 3631bf1163b..f394537803a 100644 --- a/homeassistant/components/zwave_js/manifest.json +++ b/homeassistant/components/zwave_js/manifest.json @@ -1,7 +1,6 @@ { "domain": "zwave_js", "name": "Z-Wave", - "after_dependencies": ["hassio"], "codeowners": ["@home-assistant/z-wave"], "config_flow": true, "dependencies": ["http", "repairs", "usb", "websocket_api"], @@ -10,7 +9,7 @@ "iot_class": "local_push", "loggers": ["zwave_js_server"], "quality_scale": "platinum", - "requirements": ["pyserial==3.5", "zwave-js-server-python==0.59.1"], + "requirements": ["pyserial==3.5", "zwave-js-server-python==0.57.0"], "usb": [ { "vid": "0658", diff --git a/homeassistant/components/zwave_js/migrate.py b/homeassistant/components/zwave_js/migrate.py index ac749cb516b..bde53137dc1 100644 --- a/homeassistant/components/zwave_js/migrate.py +++ b/homeassistant/components/zwave_js/migrate.py @@ -6,16 +6,20 @@ from dataclasses import dataclass import logging from zwave_js_server.model.driver import Driver -from zwave_js_server.model.node import Node from zwave_js_server.model.value import Value as ZwaveValue -from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.device_registry import DeviceEntry +from homeassistant.helpers.entity_registry import ( + EntityRegistry, + RegistryEntry, + async_entries_for_device, +) from .const import DOMAIN from .discovery import ZwaveDiscoveryInfo -from .helpers import get_unique_id, get_valueless_base_unique_id +from .helpers import get_unique_id _LOGGER = logging.getLogger(__name__) @@ -58,10 +62,10 @@ class ValueID: @callback def async_migrate_old_entity( hass: HomeAssistant, - ent_reg: er.EntityRegistry, + ent_reg: EntityRegistry, registered_unique_ids: set[str], - platform: Platform, - device: dr.DeviceEntry, + platform: str, + device: DeviceEntry, unique_id: str, ) -> None: """Migrate existing entity if current one can't be found and an old one exists.""" @@ -73,8 +77,8 @@ def async_migrate_old_entity( # Look for existing entities in the registry that could be the same value but on # a different endpoint - existing_entity_entries: list[er.RegistryEntry] = [] - for entry in er.async_entries_for_device(ent_reg, device.id): + existing_entity_entries: list[RegistryEntry] = [] + for entry in async_entries_for_device(ent_reg, device.id): # If entity is not in the domain for this discovery info or entity has already # been processed, skip it if entry.domain != platform or entry.unique_id in registered_unique_ids: @@ -105,40 +109,35 @@ def async_migrate_old_entity( @callback def async_migrate_unique_id( - ent_reg: er.EntityRegistry, - platform: Platform, - old_unique_id: str, - new_unique_id: str, + ent_reg: EntityRegistry, platform: str, old_unique_id: str, new_unique_id: str ) -> None: """Check if entity with old unique ID exists, and if so migrate it to new ID.""" - if not (entity_id := ent_reg.async_get_entity_id(platform, DOMAIN, old_unique_id)): - return - - _LOGGER.debug( - "Migrating entity %s from old unique ID '%s' to new unique ID '%s'", - entity_id, - old_unique_id, - new_unique_id, - ) - try: - ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) - except ValueError: + if entity_id := ent_reg.async_get_entity_id(platform, DOMAIN, old_unique_id): _LOGGER.debug( - ( - "Entity %s can't be migrated because the unique ID is taken; " - "Cleaning it up since it is likely no longer valid" - ), + "Migrating entity %s from old unique ID '%s' to new unique ID '%s'", entity_id, + old_unique_id, + new_unique_id, ) - ent_reg.async_remove(entity_id) + try: + ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) + except ValueError: + _LOGGER.debug( + ( + "Entity %s can't be migrated because the unique ID is taken; " + "Cleaning it up since it is likely no longer valid" + ), + entity_id, + ) + ent_reg.async_remove(entity_id) @callback def async_migrate_discovered_value( hass: HomeAssistant, - ent_reg: er.EntityRegistry, + ent_reg: EntityRegistry, registered_unique_ids: set[str], - device: dr.DeviceEntry, + device: DeviceEntry, driver: Driver, disc_info: ZwaveDiscoveryInfo, ) -> None: @@ -161,7 +160,7 @@ def async_migrate_discovered_value( ] if ( - disc_info.platform == Platform.BINARY_SENSOR + disc_info.platform == "binary_sensor" and disc_info.platform_hint == "notification" ): for state_key in disc_info.primary_value.metadata.states: @@ -212,24 +211,6 @@ def async_migrate_discovered_value( registered_unique_ids.add(new_unique_id) -@callback -def async_migrate_statistics_sensors( - hass: HomeAssistant, driver: Driver, node: Node, key_map: dict[str, str] -) -> None: - """Migrate statistics sensors to new unique IDs. - - - Migrate camel case keys in unique IDs to snake keys. - """ - ent_reg = er.async_get(hass) - base_unique_id = f"{get_valueless_base_unique_id(driver, node)}.statistics" - for new_key, old_key in key_map.items(): - if new_key == old_key: - continue - old_unique_id = f"{base_unique_id}_{old_key}" - new_unique_id = f"{base_unique_id}_{new_key}" - async_migrate_unique_id(ent_reg, Platform.SENSOR, old_unique_id, new_unique_id) - - @callback def get_old_value_ids(value: ZwaveValue) -> list[str]: """Get old value IDs so we can migrate entity unique ID.""" diff --git a/homeassistant/components/zwave_js/sensor.py b/homeassistant/components/zwave_js/sensor.py index b259711d21b..e43c620ff54 100644 --- a/homeassistant/components/zwave_js/sensor.py +++ b/homeassistant/components/zwave_js/sensor.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable, Mapping from dataclasses import dataclass +from datetime import datetime from typing import Any import voluptuous as vol @@ -15,10 +16,10 @@ from zwave_js_server.const.command_class.meter import ( ) from zwave_js_server.exceptions import BaseZwaveJSServerError from zwave_js_server.model.controller import Controller -from zwave_js_server.model.controller.statistics import ControllerStatistics +from zwave_js_server.model.controller.statistics import ControllerStatisticsDataType from zwave_js_server.model.driver import Driver from zwave_js_server.model.node import Node as ZwaveNode -from zwave_js_server.model.node.statistics import NodeStatistics +from zwave_js_server.model.node.statistics import NodeStatisticsDataType from zwave_js_server.util.command_class.meter import get_meter_type from homeassistant.components.sensor import ( @@ -51,7 +52,6 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UNDEFINED, StateType -from .binary_sensor import is_valid_notification_binary_sensor from .const import ( ATTR_METER_TYPE, ATTR_METER_TYPE_NAME, @@ -90,7 +90,6 @@ from .discovery_data_template import ( ) from .entity import ZWaveBaseEntity from .helpers import get_device_info, get_valueless_base_unique_id -from .migrate import async_migrate_statistics_sensors PARALLEL_UPDATES = 0 @@ -329,172 +328,152 @@ ENTITY_DESCRIPTION_KEY_MAP = { } -def convert_nested_attr( - statistics: ControllerStatistics | NodeStatistics, key: str +def convert_dict_of_dicts( + statistics: ControllerStatisticsDataType | NodeStatisticsDataType, key: str ) -> Any: - """Convert a string that represents a nested attr to a value.""" - data = statistics - for _key in key.split("."): - if data is None: - return None # type: ignore[unreachable] - data = getattr(data, _key) - return data + """Convert a dictionary of dictionaries to a value.""" + keys = key.split(".") + return statistics.get(keys[0], {}).get(keys[1], {}).get(keys[2]) # type: ignore[attr-defined] @dataclass(frozen=True, kw_only=True) class ZWaveJSStatisticsSensorEntityDescription(SensorEntityDescription): """Class to represent a Z-Wave JS statistics sensor entity description.""" - convert: Callable[[ControllerStatistics | NodeStatistics, str], Any] = getattr + convert: Callable[ + [ControllerStatisticsDataType | NodeStatisticsDataType, str], Any + ] = lambda statistics, key: statistics.get(key) entity_registry_enabled_default: bool = False # Controller statistics descriptions ENTITY_DESCRIPTION_CONTROLLER_STATISTICS_LIST = [ ZWaveJSStatisticsSensorEntityDescription( - key="messages_tx", + key="messagesTX", translation_key="successful_messages", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="messages_rx", + key="messagesRX", translation_key="successful_messages", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="messages_dropped_tx", + key="messagesDroppedTX", translation_key="messages_dropped", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="messages_dropped_rx", + key="messagesDroppedRX", translation_key="messages_dropped", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="nak", translation_key="nak", state_class=SensorStateClass.TOTAL + key="NAK", translation_key="nak", state_class=SensorStateClass.TOTAL ), ZWaveJSStatisticsSensorEntityDescription( - key="can", translation_key="can", state_class=SensorStateClass.TOTAL + key="CAN", translation_key="can", state_class=SensorStateClass.TOTAL ), ZWaveJSStatisticsSensorEntityDescription( - key="timeout_ack", + key="timeoutACK", translation_key="timeout_ack", state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="timeout_response", + key="timeoutResponse", translation_key="timeout_response", state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="timeout_callback", + key="timeoutCallback", translation_key="timeout_callback", state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="background_rssi.channel_0.average", + key="backgroundRSSI.channel0.average", translation_key="average_background_rssi", translation_placeholders={"channel": "0"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, - convert=convert_nested_attr, + convert=convert_dict_of_dicts, ), ZWaveJSStatisticsSensorEntityDescription( - key="background_rssi.channel_0.current", + key="backgroundRSSI.channel0.current", translation_key="current_background_rssi", translation_placeholders={"channel": "0"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, state_class=SensorStateClass.MEASUREMENT, - convert=convert_nested_attr, + convert=convert_dict_of_dicts, ), ZWaveJSStatisticsSensorEntityDescription( - key="background_rssi.channel_1.average", + key="backgroundRSSI.channel1.average", translation_key="average_background_rssi", translation_placeholders={"channel": "1"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, - convert=convert_nested_attr, + convert=convert_dict_of_dicts, ), ZWaveJSStatisticsSensorEntityDescription( - key="background_rssi.channel_1.current", + key="backgroundRSSI.channel1.current", translation_key="current_background_rssi", translation_placeholders={"channel": "1"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, state_class=SensorStateClass.MEASUREMENT, - convert=convert_nested_attr, + convert=convert_dict_of_dicts, ), ZWaveJSStatisticsSensorEntityDescription( - key="background_rssi.channel_2.average", + key="backgroundRSSI.channel2.average", translation_key="average_background_rssi", translation_placeholders={"channel": "2"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, - convert=convert_nested_attr, + convert=convert_dict_of_dicts, ), ZWaveJSStatisticsSensorEntityDescription( - key="background_rssi.channel_2.current", + key="backgroundRSSI.channel2.current", translation_key="current_background_rssi", translation_placeholders={"channel": "2"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, state_class=SensorStateClass.MEASUREMENT, - convert=convert_nested_attr, + convert=convert_dict_of_dicts, ), ] -CONTROLLER_STATISTICS_KEY_MAP: dict[str, str] = { - "messages_tx": "messagesTX", - "messages_rx": "messagesRX", - "messages_dropped_tx": "messagesDroppedTX", - "messages_dropped_rx": "messagesDroppedRX", - "nak": "NAK", - "can": "CAN", - "timeout_ack": "timeoutAck", - "timeout_response": "timeoutResponse", - "timeout_callback": "timeoutCallback", - "background_rssi.channel_0.average": "backgroundRSSI.channel0.average", - "background_rssi.channel_0.current": "backgroundRSSI.channel0.current", - "background_rssi.channel_1.average": "backgroundRSSI.channel1.average", - "background_rssi.channel_1.current": "backgroundRSSI.channel1.current", - "background_rssi.channel_2.average": "backgroundRSSI.channel2.average", - "background_rssi.channel_2.current": "backgroundRSSI.channel2.current", -} - # Node statistics descriptions ENTITY_DESCRIPTION_NODE_STATISTICS_LIST = [ ZWaveJSStatisticsSensorEntityDescription( - key="commands_rx", + key="commandsRX", translation_key="successful_commands", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="commands_tx", + key="commandsTX", translation_key="successful_commands", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="commands_dropped_rx", + key="commandsDroppedRX", translation_key="commands_dropped", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="commands_dropped_tx", + key="commandsDroppedTX", translation_key="commands_dropped", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="timeout_response", + key="timeoutResponse", translation_key="timeout_response", state_class=SensorStateClass.TOTAL, ), @@ -513,24 +492,20 @@ ENTITY_DESCRIPTION_NODE_STATISTICS_LIST = [ state_class=SensorStateClass.MEASUREMENT, ), ZWaveJSStatisticsSensorEntityDescription( - key="last_seen", + key="lastSeen", translation_key="last_seen", device_class=SensorDeviceClass.TIMESTAMP, + convert=( + lambda statistics, key: ( + datetime.fromisoformat(dt) # type: ignore[arg-type] + if (dt := statistics.get(key)) + else None + ) + ), entity_registry_enabled_default=True, ), ] -NODE_STATISTICS_KEY_MAP: dict[str, str] = { - "commands_rx": "commandsRX", - "commands_tx": "commandsTX", - "commands_dropped_rx": "commandsDroppedRX", - "commands_dropped_tx": "commandsDroppedTX", - "timeout_response": "timeoutResponse", - "rtt": "rtt", - "rssi": "rssi", - "last_seen": "lastSeen", -} - def get_entity_description( data: NumericSensorDataTemplateData, @@ -581,10 +556,7 @@ async def async_setup_entry( data.unit_of_measurement, ) ) - elif info.platform_hint == "notification": - # prevent duplicate entities for values that are already represented as binary sensors - if is_valid_notification_binary_sensor(info): - return + elif info.platform_hint == "list_sensor": entities.append( ZWaveListSensor(config_entry, driver, info, entity_description) ) @@ -616,14 +588,6 @@ async def async_setup_entry( @callback def async_add_statistics_sensors(node: ZwaveNode) -> None: """Add statistics sensors.""" - async_migrate_statistics_sensors( - hass, - driver, - node, - CONTROLLER_STATISTICS_KEY_MAP - if driver.controller.own_node == node - else NODE_STATISTICS_KEY_MAP, - ) async_add_entities( [ ZWaveStatisticsSensor( @@ -786,9 +750,10 @@ class ZWaveMeterSensor(ZWaveNumericSensor): CommandClass.METER, "reset", *args, wait_for_result=False ) except BaseZwaveJSServerError as err: - raise HomeAssistantError( - f"Failed to reset meters on node {node} endpoint {endpoint}: {err}" - ) from err + LOGGER.error( + "Failed to reset meters on node %s endpoint %s: %s", node, endpoint, err + ) + raise HomeAssistantError from err LOGGER.debug( "Meters on node %s endpoint %s reset with the following options: %s", node, @@ -1037,7 +1002,7 @@ class ZWaveStatisticsSensor(SensorEntity): def statistics_updated(self, event_data: dict) -> None: """Call when statistics updated event is received.""" self._attr_native_value = self.entity_description.convert( - event_data["statistics_updated"], self.entity_description.key + event_data["statistics"], self.entity_description.key ) self.async_write_ha_state() @@ -1063,5 +1028,5 @@ class ZWaveStatisticsSensor(SensorEntity): # Set initial state self._attr_native_value = self.entity_description.convert( - self.statistics_src.statistics, self.entity_description.key + self.statistics_src.statistics.data, self.entity_description.key ) diff --git a/homeassistant/components/zwave_js/services.py b/homeassistant/components/zwave_js/services.py index d1cb66ceafc..66d09714723 100644 --- a/homeassistant/components/zwave_js/services.py +++ b/homeassistant/components/zwave_js/services.py @@ -3,11 +3,12 @@ from __future__ import annotations import asyncio -from collections.abc import Collection, Generator, Sequence +from collections.abc import Collection, Sequence import logging import math from typing import Any +from typing_extensions import Generator import voluptuous as vol from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import SET_VALUE_SUCCESS, CommandClass, CommandStatus @@ -48,12 +49,6 @@ _LOGGER = logging.getLogger(__name__) type _NodeOrEndpointType = ZwaveNode | Endpoint -TARGET_VALIDATORS = { - vol.Optional(ATTR_AREA_ID): vol.All(cv.ensure_list, [cv.string]), - vol.Optional(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, -} - def parameter_name_does_not_need_bitmask( val: dict[str, int | str | list[str]], @@ -267,7 +262,13 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - **TARGET_VALIDATORS, + vol.Optional(ATTR_AREA_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_DEVICE_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Optional(const.ATTR_ENDPOINT, default=0): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Any( vol.Coerce(int), cv.string @@ -305,7 +306,13 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - **TARGET_VALIDATORS, + vol.Optional(ATTR_AREA_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_DEVICE_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Optional(const.ATTR_ENDPOINT, default=0): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any( @@ -350,7 +357,13 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - **TARGET_VALIDATORS, + vol.Optional(ATTR_AREA_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_DEVICE_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(const.ATTR_COMMAND_CLASS): vol.Coerce(int), vol.Required(const.ATTR_PROPERTY): vol.Any( vol.Coerce(int), str @@ -379,7 +392,13 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - **TARGET_VALIDATORS, + vol.Optional(ATTR_AREA_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_DEVICE_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Optional(const.ATTR_BROADCAST, default=False): cv.boolean, vol.Required(const.ATTR_COMMAND_CLASS): vol.Coerce(int), vol.Required(const.ATTR_PROPERTY): vol.Any( @@ -410,7 +429,15 @@ class ZWaveServices: self.async_ping, schema=vol.Schema( vol.All( - TARGET_VALIDATORS, + { + vol.Optional(ATTR_AREA_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_DEVICE_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + }, cv.has_at_least_one_key( ATTR_DEVICE_ID, ATTR_ENTITY_ID, ATTR_AREA_ID ), @@ -427,7 +454,13 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - **TARGET_VALIDATORS, + vol.Optional(ATTR_AREA_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_DEVICE_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(const.ATTR_COMMAND_CLASS): vol.All( vol.Coerce(int), vol.Coerce(CommandClass) ), @@ -451,7 +484,13 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - **TARGET_VALIDATORS, + vol.Optional(ATTR_AREA_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_DEVICE_ID): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(const.ATTR_NOTIFICATION_TYPE): vol.All( vol.Coerce(int), vol.Coerce(NotificationType) ), @@ -529,15 +568,8 @@ class ZWaveServices: for node_or_endpoint, result in get_valid_responses_from_results( nodes_or_endpoints_list, _results ): - if value_size is None: - # async_set_config_parameter still returns (Value, SetConfigParameterResult) - zwave_value = result[0] - cmd_status = result[1] - else: - # async_set_raw_config_parameter_value now returns just SetConfigParameterResult - cmd_status = result - zwave_value = f"parameter {property_or_property_name}" - + zwave_value = result[0] + cmd_status = result[1] if cmd_status.status == CommandStatus.ACCEPTED: msg = "Set configuration parameter %s on Node %s with value %s" else: diff --git a/homeassistant/components/zwave_js/services.yaml b/homeassistant/components/zwave_js/services.yaml index acf6e9a0665..81809e3fbeb 100644 --- a/homeassistant/components/zwave_js/services.yaml +++ b/homeassistant/components/zwave_js/services.yaml @@ -51,6 +51,16 @@ set_lock_configuration: min: 0 max: 65535 unit_of_measurement: sec + outside_handles_can_open_door_configuration: + required: false + example: [true, true, true, false] + selector: + object: + inside_handles_can_open_door_configuration: + required: false + example: [true, true, true, false] + selector: + object: auto_relock_time: required: false example: 1 @@ -79,28 +89,10 @@ set_lock_configuration: boolean: set_config_parameter: + target: + entity: + integration: zwave_js fields: - area_id: - example: living_room - selector: - area: - device: - - integration: zwave_js - multiple: true - device_id: - example: "8f4219cfa57e23f6f669c4616c2205e2" - selector: - device: - filter: - - integration: zwave_js - multiple: true - entity_id: - example: sensor.living_room_temperature - selector: - entity: - filter: - - integration: zwave_js - multiple: true endpoint: example: 1 default: 0 @@ -135,28 +127,10 @@ set_config_parameter: max: 3 bulk_set_partial_config_parameters: + target: + entity: + integration: zwave_js fields: - area_id: - example: living_room - selector: - area: - device: - - integration: zwave_js - multiple: true - device_id: - example: "8f4219cfa57e23f6f669c4616c2205e2" - selector: - device: - filter: - - integration: zwave_js - multiple: true - entity_id: - example: sensor.living_room_temperature - selector: - entity: - filter: - - integration: zwave_js - multiple: true endpoint: example: 1 default: 0 @@ -195,28 +169,10 @@ refresh_value: boolean: set_value: + target: + entity: + integration: zwave_js fields: - area_id: - example: living_room - selector: - area: - device: - - integration: zwave_js - multiple: true - device_id: - example: "8f4219cfa57e23f6f669c4616c2205e2" - selector: - device: - filter: - - integration: zwave_js - multiple: true - entity_id: - example: sensor.living_room_temperature - selector: - entity: - filter: - - integration: zwave_js - multiple: true command_class: example: 117 required: true @@ -252,28 +208,10 @@ set_value: boolean: multicast_set_value: + target: + entity: + integration: zwave_js fields: - area_id: - example: living_room - selector: - area: - device: - - integration: zwave_js - multiple: true - device_id: - example: "8f4219cfa57e23f6f669c4616c2205e2" - selector: - device: - filter: - - integration: zwave_js - multiple: true - entity_id: - example: sensor.living_room_temperature - selector: - entity: - filter: - - integration: zwave_js - multiple: true broadcast: example: true required: false @@ -310,55 +248,16 @@ multicast_set_value: object: ping: - fields: - area_id: - example: living_room - selector: - area: - device: - - integration: zwave_js - multiple: true - device_id: - example: "8f4219cfa57e23f6f669c4616c2205e2" - selector: - device: - filter: - - integration: zwave_js - multiple: true - entity_id: - example: sensor.living_room_temperature - selector: - entity: - filter: - - integration: zwave_js - multiple: true + target: + entity: + integration: zwave_js reset_meter: + target: + entity: + domain: sensor + integration: zwave_js fields: - area_id: - example: living_room - selector: - area: - entity: - - integration: zwave_js - domain: sensor - multiple: true - device_id: - example: "8f4219cfa57e23f6f669c4616c2205e2" - selector: - device: - entity: - - integration: zwave_js - domain: sensor - multiple: true - entity_id: - example: sensor.living_room_temperature - selector: - entity: - filter: - - integration: zwave_js - domain: sensor - multiple: true meter_type: example: 1 required: false @@ -371,28 +270,10 @@ reset_meter: text: invoke_cc_api: + target: + entity: + integration: zwave_js fields: - area_id: - example: living_room - selector: - area: - device: - - integration: zwave_js - multiple: true - device_id: - example: "8f4219cfa57e23f6f669c4616c2205e2" - selector: - device: - filter: - - integration: zwave_js - multiple: true - entity_id: - example: sensor.living_room_temperature - selector: - entity: - filter: - - integration: zwave_js - multiple: true command_class: example: 132 required: true @@ -415,28 +296,10 @@ invoke_cc_api: object: refresh_notifications: + target: + entity: + integration: zwave_js fields: - area_id: - example: living_room - selector: - area: - device: - - integration: zwave_js - multiple: true - device_id: - example: "8f4219cfa57e23f6f669c4616c2205e2" - selector: - device: - filter: - - integration: zwave_js - multiple: true - entity_id: - example: sensor.living_room_temperature - selector: - entity: - filter: - - integration: zwave_js - multiple: true notification_type: example: 1 required: true diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 28789bbf9f4..7c65f1804b1 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -265,22 +265,10 @@ "bulk_set_partial_config_parameters": { "description": "Allows for bulk setting partial parameters. Useful when multiple partial parameters have to be set at the same time.", "fields": { - "area_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" - }, - "device_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" - }, "endpoint": { "description": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::description%]", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, - "entity_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" - }, "parameter": { "description": "[%key:component::zwave_js::services::set_config_parameter::fields::parameter::description%]", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::parameter::name%]" @@ -303,28 +291,16 @@ "name": "Clear lock user code" }, "invoke_cc_api": { - "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` action and require direct calls to the Command Class API.", + "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` service and require direct calls to the Command Class API.", "fields": { - "area_id": { - "description": "The area(s) to target for this service. If an area is specified, all zwave_js devices and entities in that area will be targeted for this service.", - "name": "Area ID(s)" - }, "command_class": { "description": "The ID of the command class that you want to issue a command to.", "name": "[%key:component::zwave_js::services::set_value::fields::command_class::name%]" }, - "device_id": { - "description": "The device(s) to target for this service.", - "name": "Device ID(s)" - }, "endpoint": { "description": "The endpoint to call the API on. If an endpoint is specified, that endpoint will be targeted for all nodes associated with the target areas, devices, and/or entities. If an endpoint is not specified, the root endpoint (0) will be targeted for nodes associated with target areas and devices, and the endpoint for the primary value of each entity will be targeted.", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, - "entity_id": { - "description": "The entity ID(s) to target for this service.", - "name": "Entity ID(s)" - }, "method_name": { "description": "The name of the API method to call. Refer to the Z-Wave JS Command Class API documentation (https://zwave-js.github.io/node-zwave-js/#/api/CCs/index) for available methods.", "name": "Method name" @@ -337,12 +313,8 @@ "name": "Invoke a Command Class API on a node (advanced)" }, "multicast_set_value": { - "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This action has minimal validation so only use this action if you know what you are doing.", + "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This service has minimal validation so only use this service if you know what you are doing.", "fields": { - "area_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" - }, "broadcast": { "description": "Whether command should be broadcast to all devices on the network.", "name": "Broadcast?" @@ -351,18 +323,10 @@ "description": "[%key:component::zwave_js::services::set_value::fields::command_class::description%]", "name": "[%key:component::zwave_js::services::set_value::fields::command_class::name%]" }, - "device_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" - }, "endpoint": { "description": "[%key:component::zwave_js::services::set_value::fields::endpoint::description%]", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, - "entity_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" - }, "options": { "description": "[%key:component::zwave_js::services::set_value::fields::options::description%]", "name": "[%key:component::zwave_js::services::set_value::fields::options::name%]" @@ -384,37 +348,11 @@ }, "ping": { "description": "Forces Z-Wave JS to try to reach a node. This can be used to update the status of the node in Z-Wave JS when you think it doesn't accurately reflect reality, e.g. reviving a failed/dead node or marking the node as asleep.", - "fields": { - "area_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" - }, - "device_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" - }, - "entity_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" - } - }, "name": "Ping a node" }, "refresh_notifications": { "description": "Refreshes notifications on a node based on notification type and optionally notification event.", "fields": { - "area_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" - }, - "device_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" - }, - "entity_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" - }, "notification_event": { "description": "The Notification Event number as defined in the Z-Wave specs.", "name": "Notification Event" @@ -443,18 +381,6 @@ "reset_meter": { "description": "Resets the meters on a node.", "fields": { - "area_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" - }, - "device_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" - }, - "entity_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" - }, "meter_type": { "description": "The type of meter to reset. Not all meters support the ability to pick a meter type to reset.", "name": "Meter type" @@ -469,26 +395,14 @@ "set_config_parameter": { "description": "Changes the configuration parameters of your Z-Wave devices.", "fields": { - "area_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" - }, "bitmask": { "description": "Target a specific bitmask (see the documentation for more information). Cannot be combined with value_size or value_format.", "name": "Bitmask" }, - "device_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" - }, "endpoint": { "description": "The configuration parameter's endpoint.", "name": "Endpoint" }, - "entity_id": { - "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", - "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" - }, "parameter": { "description": "The name (or ID) of the configuration parameter you want to configure.", "name": "Parameter" @@ -523,6 +437,10 @@ "description": "Duration in seconds the latch stays retracted.", "name": "Hold and release time" }, + "inside_handles_can_open_door_configuration": { + "description": "A list of four booleans which indicate which inside handles can open the door.", + "name": "Inside handles can open door configuration" + }, "lock_timeout": { "description": "Seconds until lock mode times out. Should only be used if operation type is `timed`.", "name": "Lock timeout" @@ -531,6 +449,10 @@ "description": "The operation type of the lock.", "name": "Operation Type" }, + "outside_handles_can_open_door_configuration": { + "description": "A list of four booleans which indicate which outside handles can open the door.", + "name": "Outside handles can open door configuration" + }, "twist_assist": { "description": "Enable Twist Assist.", "name": "Twist assist" @@ -553,28 +475,16 @@ "name": "Set lock user code" }, "set_value": { - "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This action has minimal validation so only use this action if you know what you are doing.", + "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This service has minimal validation so only use this service if you know what you are doing.", "fields": { - "area_id": { - "description": "The area(s) to target for this service. If an area is specified, all zwave_js devices and entities in that area will be targeted for this service.", - "name": "Area ID(s)" - }, "command_class": { "description": "The ID of the command class for the value.", "name": "Command class" }, - "device_id": { - "description": "The device(s) to target for this service.", - "name": "Device ID(s)" - }, "endpoint": { "description": "The endpoint for the value.", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, - "entity_id": { - "description": "The entity ID(s) to target for this service.", - "name": "Entity ID(s)" - }, "options": { "description": "Set value options map. Refer to the Z-Wave JS documentation for more information on what options can be set.", "name": "Options" @@ -592,7 +502,7 @@ "name": "[%key:component::zwave_js::services::set_config_parameter::fields::value::name%]" }, "wait_for_result": { - "description": "Whether or not to wait for a response from the node. If not included in the payload, the integration will decide whether to wait or not. If set to `true`, note that the action can take a while if setting a value on an asleep battery device.", + "description": "Whether or not to wait for a response from the node. If not included in the payload, the integration will decide whether to wait or not. If set to `true`, note that the service call can take a while if setting a value on an asleep battery device.", "name": "Wait for result?" } }, diff --git a/homeassistant/components/zwave_js/triggers/value_updated.py b/homeassistant/components/zwave_js/triggers/value_updated.py index d6378ea27d5..4814eba0757 100644 --- a/homeassistant/components/zwave_js/triggers/value_updated.py +++ b/homeassistant/components/zwave_js/triggers/value_updated.py @@ -32,7 +32,6 @@ from ..const import ( ATTR_PROPERTY_KEY_NAME, ATTR_PROPERTY_NAME, DOMAIN, - EVENT_VALUE_UPDATED, ) from ..helpers import async_get_nodes_from_targets, get_device_id from .trigger_helpers import async_bypass_dynamic_config_validation @@ -129,9 +128,14 @@ async def async_attach_trigger( (prev_value, prev_value_raw, from_value), (curr_value, curr_value_raw, to_value), ): - if match not in (MATCH_ALL, value_to_eval, raw_value_to_eval) and not ( - isinstance(match, list) - and (value_to_eval in match or raw_value_to_eval in match) + if ( + match != MATCH_ALL + and value_to_eval != match + and not ( + isinstance(match, list) + and (value_to_eval in match or raw_value_to_eval in match) + ) + and raw_value_to_eval != match ): return @@ -185,7 +189,7 @@ async def async_attach_trigger( # We need to store the current value and device for the callback unsubs.append( node.on( - EVENT_VALUE_UPDATED, + "value updated", functools.partial(async_on_value_updated, value, device), ) ) diff --git a/homeassistant/components/zwave_js/update.py b/homeassistant/components/zwave_js/update.py index d060abe007d..02c59d220e1 100644 --- a/homeassistant/components/zwave_js/update.py +++ b/homeassistant/components/zwave_js/update.py @@ -155,8 +155,7 @@ class ZWaveNodeFirmwareUpdate(UpdateEntity): progress: NodeFirmwareUpdateProgress = event["firmware_update_progress"] if not self._latest_version_firmware: return - self._attr_in_progress = True - self._attr_update_percentage = int(progress.progress) + self._attr_in_progress = int(progress.progress) self.async_write_ha_state() @callback @@ -182,7 +181,6 @@ class ZWaveNodeFirmwareUpdate(UpdateEntity): self._result = None self._finished_event.clear() self._attr_in_progress = False - self._attr_update_percentage = None if write_state: self.async_write_ha_state() @@ -269,7 +267,6 @@ class ZWaveNodeFirmwareUpdate(UpdateEntity): assert firmware self._unsub_firmware_events_and_reset_progress(False) self._attr_in_progress = True - self._attr_update_percentage = None self.async_write_ha_state() self._progress_unsub = self.node.on( diff --git a/homeassistant/components/zwave_me/__init__.py b/homeassistant/components/zwave_me/__init__.py index 36ee62eec53..7e00924c221 100644 --- a/homeassistant/components/zwave_me/__init__.py +++ b/homeassistant/components/zwave_me/__init__.py @@ -1,16 +1,21 @@ """The Z-Wave-Me WS integration.""" +import logging + from zwave_me_ws import ZWaveMe, ZWaveMeData from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_TOKEN, CONF_URL -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.dispatcher import dispatcher_send +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send +from homeassistant.helpers.entity import Entity from .const import DOMAIN, PLATFORMS, ZWaveMePlatform +_LOGGER = logging.getLogger(__name__) ZWAVE_ME_PLATFORMS = [platform.value for platform in ZWaveMePlatform] @@ -106,3 +111,66 @@ async def async_setup_platforms( controller.platforms_inited = True await hass.async_add_executor_job(controller.zwave_api.get_devices) + + +class ZWaveMeEntity(Entity): + """Representation of a ZWaveMe device.""" + + def __init__(self, controller, device): + """Initialize the device.""" + self.controller = controller + self.device = device + self._attr_name = device.title + self._attr_unique_id: str = ( + f"{self.controller.config.unique_id}-{self.device.id}" + ) + self._attr_should_poll = False + + @property + def device_info(self) -> DeviceInfo: + """Return device specific attributes.""" + return DeviceInfo( + identifiers={(DOMAIN, self.device.deviceIdentifier)}, + name=self._attr_name, + manufacturer=self.device.manufacturer, + sw_version=self.device.firmware, + suggested_area=self.device.locationName, + ) + + async def async_added_to_hass(self) -> None: + """Connect to an updater.""" + self.async_on_remove( + async_dispatcher_connect( + self.hass, f"ZWAVE_ME_INFO_{self.device.id}", self.get_new_data + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"ZWAVE_ME_UNAVAILABLE_{self.device.id}", + self.set_unavailable_status, + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, f"ZWAVE_ME_DESTROY_{self.device.id}", self.delete_entity + ) + ) + + @callback + def get_new_data(self, new_data: ZWaveMeData) -> None: + """Update info in the HAss.""" + self.device = new_data + self._attr_available = not new_data.isFailed + self.async_write_ha_state() + + @callback + def set_unavailable_status(self): + """Update status in the HAss.""" + self._attr_available = False + self.async_write_ha_state() + + @callback + def delete_entity(self) -> None: + """Remove this entity.""" + self.hass.async_create_task(self.async_remove(force_remove=True)) diff --git a/homeassistant/components/zwave_me/binary_sensor.py b/homeassistant/components/zwave_me/binary_sensor.py index d121c17770b..3be8f912b6d 100644 --- a/homeassistant/components/zwave_me/binary_sensor.py +++ b/homeassistant/components/zwave_me/binary_sensor.py @@ -14,9 +14,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeController +from . import ZWaveMeController, ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity BINARY_SENSORS_MAP: dict[str, BinarySensorEntityDescription] = { "generic": BinarySensorEntityDescription( diff --git a/homeassistant/components/zwave_me/button.py b/homeassistant/components/zwave_me/button.py index 50ddf01aeab..f7f1d5d7945 100644 --- a/homeassistant/components/zwave_me/button.py +++ b/homeassistant/components/zwave_me/button.py @@ -6,8 +6,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.BUTTON diff --git a/homeassistant/components/zwave_me/climate.py b/homeassistant/components/zwave_me/climate.py index de6f606745f..02112e51617 100644 --- a/homeassistant/components/zwave_me/climate.py +++ b/homeassistant/components/zwave_me/climate.py @@ -17,8 +17,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity TEMPERATURE_DEFAULT_STEP = 0.5 diff --git a/homeassistant/components/zwave_me/cover.py b/homeassistant/components/zwave_me/cover.py index c9359402c01..c2eec09496d 100644 --- a/homeassistant/components/zwave_me/cover.py +++ b/homeassistant/components/zwave_me/cover.py @@ -14,8 +14,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.COVER diff --git a/homeassistant/components/zwave_me/entity.py b/homeassistant/components/zwave_me/entity.py deleted file mode 100644 index a02c893d54a..00000000000 --- a/homeassistant/components/zwave_me/entity.py +++ /dev/null @@ -1,73 +0,0 @@ -"""The Z-Wave-Me WS integration.""" - -from zwave_me_ws import ZWaveMeData - -from homeassistant.core import callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity import Entity - -from .const import DOMAIN - - -class ZWaveMeEntity(Entity): - """Representation of a ZWaveMe device.""" - - def __init__(self, controller, device): - """Initialize the device.""" - self.controller = controller - self.device = device - self._attr_name = device.title - self._attr_unique_id: str = ( - f"{self.controller.config.unique_id}-{self.device.id}" - ) - self._attr_should_poll = False - - @property - def device_info(self) -> DeviceInfo: - """Return device specific attributes.""" - return DeviceInfo( - identifiers={(DOMAIN, self.device.deviceIdentifier)}, - name=self._attr_name, - manufacturer=self.device.manufacturer, - sw_version=self.device.firmware, - suggested_area=self.device.locationName, - ) - - async def async_added_to_hass(self) -> None: - """Connect to an updater.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, f"ZWAVE_ME_INFO_{self.device.id}", self.get_new_data - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"ZWAVE_ME_UNAVAILABLE_{self.device.id}", - self.set_unavailable_status, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, f"ZWAVE_ME_DESTROY_{self.device.id}", self.delete_entity - ) - ) - - @callback - def get_new_data(self, new_data: ZWaveMeData) -> None: - """Update info in the HAss.""" - self.device = new_data - self._attr_available = not new_data.isFailed - self.async_write_ha_state() - - @callback - def set_unavailable_status(self): - """Update status in the HAss.""" - self._attr_available = False - self.async_write_ha_state() - - @callback - def delete_entity(self) -> None: - """Remove this entity.""" - self.hass.async_create_task(self.async_remove(force_remove=True)) diff --git a/homeassistant/components/zwave_me/fan.py b/homeassistant/components/zwave_me/fan.py index 1016586ab55..25ccec9a0fb 100644 --- a/homeassistant/components/zwave_me/fan.py +++ b/homeassistant/components/zwave_me/fan.py @@ -10,8 +10,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.FAN @@ -44,12 +44,7 @@ async def async_setup_entry( class ZWaveMeFan(ZWaveMeEntity, FanEntity): """Representation of a ZWaveMe Fan.""" - _attr_supported_features = ( - FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) - _enable_turn_on_off_backwards_compatibility = False + _attr_supported_features = FanEntityFeature.SET_SPEED @property def percentage(self) -> int: diff --git a/homeassistant/components/zwave_me/light.py b/homeassistant/components/zwave_me/light.py index ef3eca5d389..2289fe7b115 100644 --- a/homeassistant/components/zwave_me/light.py +++ b/homeassistant/components/zwave_me/light.py @@ -17,9 +17,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeController +from . import ZWaveMeController, ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity async def async_setup_entry( @@ -85,8 +84,8 @@ class ZWaveMeRGB(ZWaveMeEntity, LightEntity): self.device.id, f"exact?level={round(brightness / 2.55)}" ) return - red, green, blue = color if any(color) else (255, 255, 255) - cmd = f"exact?red={red}&green={green}&blue={blue}" + cmd = "exact?red={}&green={}&blue={}" + cmd = cmd.format(*color) if any(color) else cmd.format(*(255, 255, 255)) self.controller.zwave_api.send_command(self.device.id, cmd) @property diff --git a/homeassistant/components/zwave_me/lock.py b/homeassistant/components/zwave_me/lock.py index 0bcc8f092ae..6218dac1627 100644 --- a/homeassistant/components/zwave_me/lock.py +++ b/homeassistant/components/zwave_me/lock.py @@ -12,8 +12,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.LOCK diff --git a/homeassistant/components/zwave_me/number.py b/homeassistant/components/zwave_me/number.py index 9a98a4f8d00..272e833d678 100644 --- a/homeassistant/components/zwave_me/number.py +++ b/homeassistant/components/zwave_me/number.py @@ -6,8 +6,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.NUMBER diff --git a/homeassistant/components/zwave_me/sensor.py b/homeassistant/components/zwave_me/sensor.py index be0b0bae284..20470e6e62b 100644 --- a/homeassistant/components/zwave_me/sensor.py +++ b/homeassistant/components/zwave_me/sensor.py @@ -28,9 +28,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ZWaveMeController +from . import ZWaveMeController, ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity @dataclass(frozen=True) diff --git a/homeassistant/components/zwave_me/siren.py b/homeassistant/components/zwave_me/siren.py index 443b2cc7b37..a1bf8081616 100644 --- a/homeassistant/components/zwave_me/siren.py +++ b/homeassistant/components/zwave_me/siren.py @@ -8,8 +8,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity DEVICE_NAME = ZWaveMePlatform.SIREN diff --git a/homeassistant/components/zwave_me/switch.py b/homeassistant/components/zwave_me/switch.py index 05cf06484e9..4c11f079b12 100644 --- a/homeassistant/components/zwave_me/switch.py +++ b/homeassistant/components/zwave_me/switch.py @@ -13,8 +13,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import ZWaveMeEntity from .const import DOMAIN, ZWaveMePlatform -from .entity import ZWaveMeEntity _LOGGER = logging.getLogger(__name__) DEVICE_NAME = ZWaveMePlatform.SWITCH diff --git a/homeassistant/config.py b/homeassistant/config.py index cab4d0c7aff..ff679d4df51 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -17,23 +17,62 @@ import re import shutil from types import ModuleType from typing import TYPE_CHECKING, Any +from urllib.parse import urlparse from awesomeversion import AwesomeVersion import voluptuous as vol from voluptuous.humanize import MAX_VALIDATION_ERROR_ITEM_LENGTH from yaml.error import MarkedYAMLError -from .const import CONF_PACKAGES, CONF_PLATFORM, __version__ -from .core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback -from .core_config import _PACKAGE_DEFINITION_SCHEMA, _PACKAGES_CONFIG_SCHEMA +from . import auth +from .auth import mfa_modules as auth_mfa_modules, providers as auth_providers +from .const import ( + ATTR_ASSUMED_STATE, + ATTR_FRIENDLY_NAME, + ATTR_HIDDEN, + CONF_ALLOWLIST_EXTERNAL_DIRS, + CONF_ALLOWLIST_EXTERNAL_URLS, + CONF_AUTH_MFA_MODULES, + CONF_AUTH_PROVIDERS, + CONF_COUNTRY, + CONF_CURRENCY, + CONF_CUSTOMIZE, + CONF_CUSTOMIZE_DOMAIN, + CONF_CUSTOMIZE_GLOB, + CONF_DEBUG, + CONF_ELEVATION, + CONF_EXTERNAL_URL, + CONF_ID, + CONF_INTERNAL_URL, + CONF_LANGUAGE, + CONF_LATITUDE, + CONF_LEGACY_TEMPLATES, + CONF_LONGITUDE, + CONF_MEDIA_DIRS, + CONF_NAME, + CONF_PACKAGES, + CONF_PLATFORM, + CONF_RADIUS, + CONF_TEMPERATURE_UNIT, + CONF_TIME_ZONE, + CONF_TYPE, + CONF_UNIT_SYSTEM, + LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, + __version__, +) +from .core import DOMAIN as HA_DOMAIN, ConfigSource, HomeAssistant, callback from .exceptions import ConfigValidationError, HomeAssistantError -from .helpers import config_validation as cv +from .generated.currencies import HISTORIC_CURRENCIES +from .helpers import config_validation as cv, issue_registry as ir +from .helpers.entity_values import EntityValues from .helpers.translation import async_get_exception_message from .helpers.typing import ConfigType from .loader import ComponentProtocol, Integration, IntegrationNotFound from .requirements import RequirementsNotFound, async_get_integration_with_requirements from .util.async_ import create_eager_task +from .util.hass_dict import HassKey from .util.package import is_docker_env +from .util.unit_system import get_unit_system, validate_unit_system from .util.yaml import SECRET_YAML, Secrets, YamlTypeError, load_yaml_dict from .util.yaml.objects import NodeStrClass @@ -44,6 +83,7 @@ RE_ASCII = re.compile(r"\033\[[^m]*m") YAML_CONFIG_FILE = "configuration.yaml" VERSION_FILE = ".HA_VERSION" CONFIG_DIR_NAME = ".homeassistant" +DATA_CUSTOMIZE: HassKey[EntityValues] = HassKey("hass_customize") AUTOMATION_CONFIG_PATH = "automations.yaml" SCRIPT_CONFIG_PATH = "scripts.yaml" @@ -132,6 +172,201 @@ class IntegrationConfigInfo: exception_info_list: list[ConfigExceptionInfo] +def _no_duplicate_auth_provider( + configs: Sequence[dict[str, Any]], +) -> Sequence[dict[str, Any]]: + """No duplicate auth provider config allowed in a list. + + Each type of auth provider can only have one config without optional id. + Unique id is required if same type of auth provider used multiple times. + """ + config_keys: set[tuple[str, str | None]] = set() + for config in configs: + key = (config[CONF_TYPE], config.get(CONF_ID)) + if key in config_keys: + raise vol.Invalid( + f"Duplicate auth provider {config[CONF_TYPE]} found. " + "Please add unique IDs " + "if you want to have the same auth provider twice" + ) + config_keys.add(key) + return configs + + +def _no_duplicate_auth_mfa_module( + configs: Sequence[dict[str, Any]], +) -> Sequence[dict[str, Any]]: + """No duplicate auth mfa module item allowed in a list. + + Each type of mfa module can only have one config without optional id. + A global unique id is required if same type of mfa module used multiple + times. + Note: this is different than auth provider + """ + config_keys: set[str] = set() + for config in configs: + key = config.get(CONF_ID, config[CONF_TYPE]) + if key in config_keys: + raise vol.Invalid( + f"Duplicate mfa module {config[CONF_TYPE]} found. " + "Please add unique IDs " + "if you want to have the same mfa module twice" + ) + config_keys.add(key) + return configs + + +def _filter_bad_internal_external_urls(conf: dict) -> dict: + """Filter internal/external URL with a path.""" + for key in CONF_INTERNAL_URL, CONF_EXTERNAL_URL: + if key in conf and urlparse(conf[key]).path not in ("", "/"): + # We warn but do not fix, because if this was incorrectly configured, + # adjusting this value might impact security. + _LOGGER.warning( + "Invalid %s set. It's not allowed to have a path (/bla)", key + ) + + return conf + + +# Schema for all packages element +PACKAGES_CONFIG_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list)}) + +# Schema for individual package definition +PACKAGE_DEFINITION_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list, None)}) + +CUSTOMIZE_DICT_SCHEMA = vol.Schema( + { + vol.Optional(ATTR_FRIENDLY_NAME): cv.string, + vol.Optional(ATTR_HIDDEN): cv.boolean, + vol.Optional(ATTR_ASSUMED_STATE): cv.boolean, + }, + extra=vol.ALLOW_EXTRA, +) + +CUSTOMIZE_CONFIG_SCHEMA = vol.Schema( + { + vol.Optional(CONF_CUSTOMIZE, default={}): vol.Schema( + {cv.entity_id: CUSTOMIZE_DICT_SCHEMA} + ), + vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}): vol.Schema( + {cv.string: CUSTOMIZE_DICT_SCHEMA} + ), + vol.Optional(CONF_CUSTOMIZE_GLOB, default={}): vol.Schema( + {cv.string: CUSTOMIZE_DICT_SCHEMA} + ), + } +) + + +def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None: + if currency not in HISTORIC_CURRENCIES: + ir.async_delete_issue(hass, HA_DOMAIN, "historic_currency") + return + + ir.async_create_issue( + hass, + HA_DOMAIN, + "historic_currency", + is_fixable=False, + learn_more_url="homeassistant://config/general", + severity=ir.IssueSeverity.WARNING, + translation_key="historic_currency", + translation_placeholders={"currency": currency}, + ) + + +def _raise_issue_if_no_country(hass: HomeAssistant, country: str | None) -> None: + if country is not None: + ir.async_delete_issue(hass, HA_DOMAIN, "country_not_configured") + return + + ir.async_create_issue( + hass, + HA_DOMAIN, + "country_not_configured", + is_fixable=False, + learn_more_url="homeassistant://config/general", + severity=ir.IssueSeverity.WARNING, + translation_key="country_not_configured", + ) + + +def _validate_currency(data: Any) -> Any: + try: + return cv.currency(data) + except vol.InInvalid: + with suppress(vol.InInvalid): + return cv.historic_currency(data) + raise + + +CORE_CONFIG_SCHEMA = vol.All( + CUSTOMIZE_CONFIG_SCHEMA.extend( + { + CONF_NAME: vol.Coerce(str), + CONF_LATITUDE: cv.latitude, + CONF_LONGITUDE: cv.longitude, + CONF_ELEVATION: vol.Coerce(int), + CONF_RADIUS: cv.positive_int, + vol.Remove(CONF_TEMPERATURE_UNIT): cv.temperature_unit, + CONF_UNIT_SYSTEM: validate_unit_system, + CONF_TIME_ZONE: cv.time_zone, + vol.Optional(CONF_INTERNAL_URL): cv.url, + vol.Optional(CONF_EXTERNAL_URL): cv.url, + vol.Optional(CONF_ALLOWLIST_EXTERNAL_DIRS): vol.All( + cv.ensure_list, [vol.IsDir()] + ), + vol.Optional(LEGACY_CONF_WHITELIST_EXTERNAL_DIRS): vol.All( + cv.ensure_list, [vol.IsDir()] + ), + vol.Optional(CONF_ALLOWLIST_EXTERNAL_URLS): vol.All( + cv.ensure_list, [cv.url] + ), + vol.Optional(CONF_PACKAGES, default={}): PACKAGES_CONFIG_SCHEMA, + vol.Optional(CONF_AUTH_PROVIDERS): vol.All( + cv.ensure_list, + [ + auth_providers.AUTH_PROVIDER_SCHEMA.extend( + { + CONF_TYPE: vol.NotIn( + ["insecure_example"], + ( + "The insecure_example auth provider" + " is for testing only." + ), + ) + } + ) + ], + _no_duplicate_auth_provider, + ), + vol.Optional(CONF_AUTH_MFA_MODULES): vol.All( + cv.ensure_list, + [ + auth_mfa_modules.MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend( + { + CONF_TYPE: vol.NotIn( + ["insecure_example"], + "The insecure_example mfa module is for testing only.", + ) + } + ) + ], + _no_duplicate_auth_mfa_module, + ), + vol.Optional(CONF_MEDIA_DIRS): cv.schema_with_slug_keys(vol.IsDir()), + vol.Remove(CONF_LEGACY_TEMPLATES): cv.boolean, + vol.Optional(CONF_CURRENCY): _validate_currency, + vol.Optional(CONF_COUNTRY): cv.country, + vol.Optional(CONF_LANGUAGE): cv.language, + vol.Optional(CONF_DEBUG): cv.boolean, + } + ), + _filter_bad_internal_external_urls, +) + + def get_default_config_dir() -> str: """Put together the default configuration directory based on the OS.""" data_dir = os.path.expanduser("~") @@ -246,14 +481,12 @@ async def async_hass_config_yaml(hass: HomeAssistant) -> dict: for invalid_domain in invalid_domains: config.pop(invalid_domain) - core_config = config.get(HOMEASSISTANT_DOMAIN, {}) + core_config = config.get(HA_DOMAIN, {}) try: await merge_packages_config(hass, config, core_config.get(CONF_PACKAGES, {})) except vol.Invalid as exc: suffix = "" - if annotation := find_annotation( - config, [HOMEASSISTANT_DOMAIN, CONF_PACKAGES, *exc.path] - ): + if annotation := find_annotation(config, [HA_DOMAIN, CONF_PACKAGES, *exc.path]): suffix = f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" _LOGGER.error( "Invalid package configuration '%s'%s: %s", CONF_PACKAGES, suffix, exc @@ -476,7 +709,7 @@ def stringify_invalid( ) else: message_prefix = f"Invalid config for '{domain}'" - if domain != HOMEASSISTANT_DOMAIN and link: + if domain != HA_DOMAIN and link: message_suffix = f", please check the docs at {link}" else: message_suffix = "" @@ -559,7 +792,7 @@ def format_homeassistant_error( if annotation := find_annotation(config, [domain]): message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" message = f"{message_prefix}: {str(exc) or repr(exc)}" - if domain != HOMEASSISTANT_DOMAIN and link: + if domain != HA_DOMAIN and link: message += f", please check the docs at {link}" return message @@ -577,14 +810,135 @@ def format_schema_error( return humanize_error(hass, exc, domain, config, link) +async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> None: + """Process the [homeassistant] section from the configuration. + + This method is a coroutine. + """ + config = CORE_CONFIG_SCHEMA(config) + + # Only load auth during startup. + if not hasattr(hass, "auth"): + if (auth_conf := config.get(CONF_AUTH_PROVIDERS)) is None: + auth_conf = [{"type": "homeassistant"}] + + mfa_conf = config.get( + CONF_AUTH_MFA_MODULES, + [{"type": "totp", "id": "totp", "name": "Authenticator app"}], + ) + + setattr( + hass, "auth", await auth.auth_manager_from_config(hass, auth_conf, mfa_conf) + ) + + await hass.config.async_load() + + hac = hass.config + + if any( + k in config + for k in ( + CONF_LATITUDE, + CONF_LONGITUDE, + CONF_NAME, + CONF_ELEVATION, + CONF_TIME_ZONE, + CONF_UNIT_SYSTEM, + CONF_EXTERNAL_URL, + CONF_INTERNAL_URL, + CONF_CURRENCY, + CONF_COUNTRY, + CONF_LANGUAGE, + CONF_RADIUS, + ) + ): + hac.config_source = ConfigSource.YAML + + for key, attr in ( + (CONF_LATITUDE, "latitude"), + (CONF_LONGITUDE, "longitude"), + (CONF_NAME, "location_name"), + (CONF_ELEVATION, "elevation"), + (CONF_INTERNAL_URL, "internal_url"), + (CONF_EXTERNAL_URL, "external_url"), + (CONF_MEDIA_DIRS, "media_dirs"), + (CONF_CURRENCY, "currency"), + (CONF_COUNTRY, "country"), + (CONF_LANGUAGE, "language"), + (CONF_RADIUS, "radius"), + ): + if key in config: + setattr(hac, attr, config[key]) + + if config.get(CONF_DEBUG): + hac.debug = True + + _raise_issue_if_historic_currency(hass, hass.config.currency) + _raise_issue_if_no_country(hass, hass.config.country) + + if CONF_TIME_ZONE in config: + await hac.async_set_time_zone(config[CONF_TIME_ZONE]) + + if CONF_MEDIA_DIRS not in config: + if is_docker_env(): + hac.media_dirs = {"local": "/media"} + else: + hac.media_dirs = {"local": hass.config.path("media")} + + # Init whitelist external dir + hac.allowlist_external_dirs = {hass.config.path("www"), *hac.media_dirs.values()} + if CONF_ALLOWLIST_EXTERNAL_DIRS in config: + hac.allowlist_external_dirs.update(set(config[CONF_ALLOWLIST_EXTERNAL_DIRS])) + + elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config: + _LOGGER.warning( + "Key %s has been replaced with %s. Please update your config", + LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, + CONF_ALLOWLIST_EXTERNAL_DIRS, + ) + hac.allowlist_external_dirs.update( + set(config[LEGACY_CONF_WHITELIST_EXTERNAL_DIRS]) + ) + + # Init whitelist external URL list – make sure to add / to every URL that doesn't + # already have it so that we can properly test "path ownership" + if CONF_ALLOWLIST_EXTERNAL_URLS in config: + hac.allowlist_external_urls.update( + url if url.endswith("/") else f"{url}/" + for url in config[CONF_ALLOWLIST_EXTERNAL_URLS] + ) + + # Customize + cust_exact = dict(config[CONF_CUSTOMIZE]) + cust_domain = dict(config[CONF_CUSTOMIZE_DOMAIN]) + cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB]) + + for name, pkg in config[CONF_PACKAGES].items(): + if (pkg_cust := pkg.get(HA_DOMAIN)) is None: + continue + + try: + pkg_cust = CUSTOMIZE_CONFIG_SCHEMA(pkg_cust) + except vol.Invalid: + _LOGGER.warning("Package %s contains invalid customize", name) + continue + + cust_exact.update(pkg_cust[CONF_CUSTOMIZE]) + cust_domain.update(pkg_cust[CONF_CUSTOMIZE_DOMAIN]) + cust_glob.update(pkg_cust[CONF_CUSTOMIZE_GLOB]) + + hass.data[DATA_CUSTOMIZE] = EntityValues(cust_exact, cust_domain, cust_glob) + + if CONF_UNIT_SYSTEM in config: + hac.units = get_unit_system(config[CONF_UNIT_SYSTEM]) + + def _log_pkg_error( hass: HomeAssistant, package: str, component: str | None, config: dict, message: str ) -> None: """Log an error while merging packages.""" message_prefix = f"Setup of package '{package}'" - if annotation := find_annotation( - config, [HOMEASSISTANT_DOMAIN, CONF_PACKAGES, package] - ): + if annotation := find_annotation(config, [HA_DOMAIN, CONF_PACKAGES, package]): message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" _LOGGER.error("%s failed: %s", message_prefix, message) @@ -593,7 +947,7 @@ def _log_pkg_error( def _identify_config_schema(module: ComponentProtocol) -> str | None: """Extract the schema and identify list or dict based.""" if not isinstance(module.CONFIG_SCHEMA, vol.Schema): - return None # type: ignore[unreachable] + return None schema = module.CONFIG_SCHEMA.schema @@ -641,7 +995,7 @@ def _identify_config_schema(module: ComponentProtocol) -> str | None: def _validate_package_definition(name: str, conf: Any) -> None: """Validate basic package definition properties.""" cv.slug(name) - _PACKAGE_DEFINITION_SCHEMA(conf) + PACKAGE_DEFINITION_SCHEMA(conf) def _recursive_merge(conf: dict[str, Any], package: dict[str, Any]) -> str | None: @@ -680,7 +1034,7 @@ async def merge_packages_config( vol.Invalid if whole package config is invalid. """ - _PACKAGES_CONFIG_SCHEMA(packages) + PACKAGES_CONFIG_SCHEMA(packages) invalid_packages = [] for pack_name, pack_conf in packages.items(): @@ -699,7 +1053,7 @@ async def merge_packages_config( continue for comp_name, comp_conf in pack_conf.items(): - if comp_name == HOMEASSISTANT_DOMAIN: + if comp_name == HA_DOMAIN: continue try: domain = cv.domain_key(comp_name) @@ -844,7 +1198,7 @@ def _get_log_message_and_stack_print_pref( # Generate the log message from the English translations log_message = async_get_exception_message( - HOMEASSISTANT_DOMAIN, + HA_DOMAIN, platform_exception.translation_key, translation_placeholders=placeholders, ) @@ -905,7 +1259,7 @@ def async_drop_config_annotations( # Don't drop annotations from the homeassistant integration because it may # have configuration for other integrations as packages. - if integration.domain in config and integration.domain != HOMEASSISTANT_DOMAIN: + if integration.domain in config and integration.domain != HA_DOMAIN: drop_config_annotations_rec(config[integration.domain]) return config @@ -957,7 +1311,7 @@ def async_handle_component_errors( raise ConfigValidationError( translation_key, [platform_exception.exception for platform_exception in config_exception_info], - translation_domain=HOMEASSISTANT_DOMAIN, + translation_domain=HA_DOMAIN, translation_placeholders=placeholders, ) @@ -1177,9 +1531,7 @@ async def async_process_component_config( # No custom config validator, proceed with schema validation if hasattr(component, "CONFIG_SCHEMA"): try: - return IntegrationConfigInfo( - await cv.async_validate(hass, component.CONFIG_SCHEMA, config), [] - ) + return IntegrationConfigInfo(component.CONFIG_SCHEMA(config), []) except vol.Invalid as exc: exc_info = ConfigExceptionInfo( exc, @@ -1214,9 +1566,7 @@ async def async_process_component_config( # Validate component specific platform schema platform_path = f"{p_name}.{domain}" try: - p_validated = await cv.async_validate( - hass, component_platform_schema, p_config - ) + p_validated = component_platform_schema(p_config) except vol.Invalid as exc: exc_info = ConfigExceptionInfo( exc, diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index f1748c6b7fb..c8d671e1fe1 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -3,43 +3,27 @@ from __future__ import annotations import asyncio -from collections import UserDict, defaultdict -from collections.abc import ( - Callable, - Coroutine, - Generator, - Hashable, - Iterable, - Mapping, - ValuesView, -) +from collections import UserDict +from collections.abc import Callable, Coroutine, Hashable, Iterable, Mapping, ValuesView from contextvars import ContextVar from copy import deepcopy -from datetime import datetime from enum import Enum, StrEnum import functools -from functools import cache +from functools import cached_property import logging from random import randint from types import MappingProxyType from typing import TYPE_CHECKING, Any, Generic, Self, cast from async_interrupt import interrupt -from propcache import cached_property -from typing_extensions import TypeVar -import voluptuous as vol +from typing_extensions import Generator, TypeVar from . import data_entry_flow, loader from .components import persistent_notification -from .const import ( - CONF_NAME, - EVENT_HOMEASSISTANT_STARTED, - EVENT_HOMEASSISTANT_STOP, - Platform, -) +from .const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, Platform from .core import ( CALLBACK_TYPE, - DOMAIN as HOMEASSISTANT_DOMAIN, + DOMAIN as HA_DOMAIN, CoreState, Event, HassJob, @@ -47,7 +31,7 @@ from .core import ( HomeAssistant, callback, ) -from .data_entry_flow import FLOW_NOT_COMPLETE_STEPS, FlowContext, FlowResult +from .data_entry_flow import FLOW_NOT_COMPLETE_STEPS, FlowResult from .exceptions import ( ConfigEntryAuthFailed, ConfigEntryError, @@ -56,15 +40,14 @@ from .exceptions import ( ) from .helpers import device_registry, entity_registry, issue_registry as ir, storage from .helpers.debounce import Debouncer -from .helpers.discovery_flow import DiscoveryKey from .helpers.dispatcher import SignalType, async_dispatcher_send_internal from .helpers.event import ( RANDOM_MICROSECOND_MAX, RANDOM_MICROSECOND_MIN, async_call_later, ) -from .helpers.frame import ReportBehavior, report, report_usage -from .helpers.json import json_bytes, json_bytes_sorted, json_fragment +from .helpers.frame import report +from .helpers.json import json_bytes, json_fragment from .helpers.typing import UNDEFINED, ConfigType, DiscoveryInfoType, UndefinedType from .loader import async_suggest_report_issue from .setup import ( @@ -78,16 +61,15 @@ from .setup import ( from .util import ulid as ulid_util from .util.async_ import create_eager_task from .util.decorator import Registry -from .util.dt import utc_from_timestamp, utcnow from .util.enum import try_parse_enum if TYPE_CHECKING: from .components.bluetooth import BluetoothServiceInfoBleak from .components.dhcp import DhcpServiceInfo + from .components.hassio import HassioServiceInfo from .components.ssdp import SsdpServiceInfo from .components.usb import UsbServiceInfo from .components.zeroconf import ZeroconfServiceInfo - from .helpers.service_info.hassio import HassioServiceInfo from .helpers.service_info.mqtt import MqttServiceInfo @@ -113,6 +95,11 @@ SOURCE_ZEROCONF = "zeroconf" # source and while it exists normal discoveries with the same unique id are ignored. SOURCE_IGNORE = "ignore" +# This is used when a user uses the "Stop Ignoring" button in the UI (the +# config_entries/ignore_flow websocket command). It's triggered after the +# "ignore" config entry has been removed and unloaded. +SOURCE_UNIGNORE = "unignore" + # This is used to signal that re-authentication is required by the user. SOURCE_REAUTH = "reauth" @@ -123,15 +110,12 @@ HANDLERS: Registry[str, type[ConfigFlow]] = Registry() STORAGE_KEY = "core.config_entries" STORAGE_VERSION = 1 -STORAGE_VERSION_MINOR = 4 +STORAGE_VERSION_MINOR = 2 SAVE_DELAY = 1 DISCOVERY_COOLDOWN = 1 -ISSUE_UNIQUE_ID_COLLISION = "config_entry_unique_id_collision" -UNIQUE_ID_COLLISION_TITLE_LIMIT = 5 - _DataT = TypeVar("_DataT", default=Any) @@ -179,13 +163,12 @@ DISCOVERY_SOURCES = { SOURCE_DHCP, SOURCE_DISCOVERY, SOURCE_HARDWARE, - SOURCE_HASSIO, SOURCE_HOMEKIT, SOURCE_IMPORT, SOURCE_INTEGRATION_DISCOVERY, SOURCE_MQTT, SOURCE_SSDP, - SOURCE_SYSTEM, + SOURCE_UNIGNORE, SOURCE_USB, SOURCE_ZEROCONF, } @@ -198,15 +181,6 @@ SIGNAL_CONFIG_ENTRY_CHANGED = SignalType["ConfigEntryChange", "ConfigEntry"]( "config_entry_changed" ) - -@cache -def signal_discovered_config_entry_removed( - discovery_domain: str, -) -> SignalType[ConfigEntry]: - """Format signal.""" - return SignalType(f"{discovery_domain}_discovered_config_entry_removed") - - NO_RESET_TRIES_STATES = { ConfigEntryState.SETUP_RETRY, ConfigEntryState.SETUP_IN_PROGRESS, @@ -259,13 +233,14 @@ type UpdateListenerType = Callable[ [HomeAssistant, ConfigEntry], Coroutine[Any, Any, None] ] -STATE_KEYS = { +FROZEN_CONFIG_ENTRY_ATTRS = { + "entry_id", + "domain", "state", "reason", "error_reason_translation_key", "error_reason_translation_placeholders", } -FROZEN_CONFIG_ENTRY_ATTRS = {"entry_id", "domain", *STATE_KEYS} UPDATE_ENTRY_CONFIG_ENTRY_ATTRS = { "unique_id", "title", @@ -278,19 +253,7 @@ UPDATE_ENTRY_CONFIG_ENTRY_ATTRS = { } -class ConfigFlowContext(FlowContext, total=False): - """Typed context dict for config flow.""" - - alternative_domain: str - configuration_url: str - confirm_only: bool - discovery_key: DiscoveryKey - entry_id: str - title_placeholders: Mapping[str, str] - unique_id: str | None - - -class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False): +class ConfigFlowResult(FlowResult, total=False): """Typed result dict for config flow.""" minor_version: int @@ -298,16 +261,6 @@ class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False): version: int -def _validate_item(*, disabled_by: ConfigEntryDisabler | Any | None = None) -> None: - """Validate config entry item.""" - - # Deprecated in 2022.1, stopped working in 2024.10 - if disabled_by is not None and not isinstance(disabled_by, ConfigEntryDisabler): - raise TypeError( - f"disabled_by must be a ConfigEntryDisabler value, got {disabled_by}" - ) - - class ConfigEntry(Generic[_DataT]): """Hold a configuration entry.""" @@ -337,25 +290,20 @@ class ConfigEntry(Generic[_DataT]): _on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None setup_lock: asyncio.Lock _reauth_lock: asyncio.Lock + _reconfigure_lock: asyncio.Lock _tasks: set[asyncio.Future[Any]] _background_tasks: set[asyncio.Future[Any]] _integration_for_domain: loader.Integration | None _tries: int - created_at: datetime - modified_at: datetime - discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]] def __init__( self, *, - created_at: datetime | None = None, data: Mapping[str, Any], disabled_by: ConfigEntryDisabler | None = None, - discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]], domain: str, entry_id: str | None = None, minor_version: int, - modified_at: datetime | None = None, options: Mapping[str, Any] | None, pref_disable_new_entities: bool | None = None, pref_disable_polling: bool | None = None, @@ -407,7 +355,18 @@ class ConfigEntry(Generic[_DataT]): _setter(self, "unique_id", unique_id) # Config entry is disabled - _validate_item(disabled_by=disabled_by) + if isinstance(disabled_by, str) and not isinstance( + disabled_by, ConfigEntryDisabler + ): + report( # type: ignore[unreachable] + ( + "uses str for config entry disabled_by. This is deprecated and will" + " stop working in Home Assistant 2022.3, it should be updated to" + " use ConfigEntryDisabler instead" + ), + error_if_core=False, + ) + disabled_by = ConfigEntryDisabler(disabled_by) _setter(self, "disabled_by", disabled_by) # Supports unload @@ -440,15 +399,14 @@ class ConfigEntry(Generic[_DataT]): _setter(self, "setup_lock", asyncio.Lock()) # Reauth lock to prevent concurrent reauth flows _setter(self, "_reauth_lock", asyncio.Lock()) + # Reconfigure lock to prevent concurrent reconfigure flows + _setter(self, "_reconfigure_lock", asyncio.Lock()) _setter(self, "_tasks", set()) _setter(self, "_background_tasks", set()) _setter(self, "_integration_for_domain", None) _setter(self, "_tries", 0) - _setter(self, "created_at", created_at or utcnow()) - _setter(self, "modified_at", modified_at or utcnow()) - _setter(self, "discovery_keys", discovery_keys) def __repr__(self) -> str: """Representation of ConfigEntry.""" @@ -460,15 +418,30 @@ class ConfigEntry(Generic[_DataT]): def __setattr__(self, key: str, value: Any) -> None: """Set an attribute.""" if key in UPDATE_ENTRY_CONFIG_ENTRY_ATTRS: - raise AttributeError( - f"{key} cannot be changed directly, use async_update_entry instead" + if key == "unique_id": + # Setting unique_id directly will corrupt internal state + # There is no deprecation period for this key + # as changing them will corrupt internal state + # so we raise an error here + raise AttributeError( + "unique_id cannot be changed directly, use async_update_entry instead" + ) + report( + f'sets "{key}" directly to update a config entry. This is deprecated and will' + " stop working in Home Assistant 2024.9, it should be updated to use" + " async_update_entry instead", + error_if_core=False, ) - if key in FROZEN_CONFIG_ENTRY_ATTRS: + + elif key in FROZEN_CONFIG_ENTRY_ATTRS: + # These attributes are frozen and cannot be changed + # There is no deprecation period for these + # as changing them will corrupt internal state + # so we raise an error here raise AttributeError(f"{key} cannot be changed") super().__setattr__(key, value) - self.clear_state_cache() - self.clear_storage_cache() + self.clear_cache() @property def supports_options(self) -> bool: @@ -494,18 +467,16 @@ class ConfigEntry(Generic[_DataT]): ) return self._supports_reconfigure or False - def clear_state_cache(self) -> None: - """Clear cached properties that are included in as_json_fragment.""" + def clear_cache(self) -> None: + """Clear cached properties.""" self.__dict__.pop("as_json_fragment", None) @cached_property def as_json_fragment(self) -> json_fragment: - """Return JSON fragment of a config entry that is used for the API.""" + """Return JSON fragment of a config entry.""" json_repr = { - "created_at": self.created_at.timestamp(), "entry_id": self.entry_id, "domain": self.domain, - "modified_at": self.modified_at.timestamp(), "title": self.title, "source": self.source, "state": self.state.value, @@ -522,15 +493,6 @@ class ConfigEntry(Generic[_DataT]): } return json_fragment(json_bytes(json_repr)) - def clear_storage_cache(self) -> None: - """Clear cached properties that are included in as_storage_fragment.""" - self.__dict__.pop("as_storage_fragment", None) - - @cached_property - def as_storage_fragment(self) -> json_fragment: - """Return a storage fragment for this entry.""" - return json_fragment(json_bytes_sorted(self.as_dict())) - async def async_setup( self, hass: HomeAssistant, @@ -538,21 +500,10 @@ class ConfigEntry(Generic[_DataT]): integration: loader.Integration | None = None, ) -> None: """Set up an entry.""" + current_entry.set(self) if self.source == SOURCE_IGNORE or self.disabled_by: return - current_entry.set(self) - try: - await self.__async_setup_with_context(hass, integration) - finally: - current_entry.set(None) - - async def __async_setup_with_context( - self, - hass: HomeAssistant, - integration: loader.Integration | None, - ) -> None: - """Set up an entry, with current_entry set.""" if integration is None and not (integration := self._integration_for_domain): integration = await loader.async_get_integration(hass, self.domain) self._integration_for_domain = integration @@ -872,11 +823,6 @@ class ConfigEntry(Generic[_DataT]): async def async_remove(self, hass: HomeAssistant) -> None: """Invoke remove callback on component.""" - old_modified_at = self.modified_at - object.__setattr__(self, "modified_at", utcnow()) - self.clear_state_cache() - self.clear_storage_cache() - if self.source == SOURCE_IGNORE: return @@ -908,8 +854,6 @@ class ConfigEntry(Generic[_DataT]): self.title, integration.domain, ) - # Restore modified_at - object.__setattr__(self, "modified_at", old_modified_at) @callback def _async_set_state( @@ -932,10 +876,7 @@ class ConfigEntry(Generic[_DataT]): "error_reason_translation_placeholders", error_reason_translation_placeholders, ) - self.clear_state_cache() - # Storage cache is not cleared here because the state is not stored - # in storage and we do not want to clear the cache on every state change - # since state changes are frequent. + self.clear_cache() async_dispatcher_send_internal( hass, SIGNAL_CONFIG_ENTRY_CHANGED, ConfigEntryChange.UPDATED, self ) @@ -1001,14 +942,11 @@ class ConfigEntry(Generic[_DataT]): def as_dict(self) -> dict[str, Any]: """Return dictionary version of this entry.""" return { - "created_at": self.created_at.isoformat(), "data": dict(self.data), - "discovery_keys": dict(self.discovery_keys), "disabled_by": self.disabled_by, "domain": self.domain, "entry_id": self.entry_id, "minor_version": self.minor_version, - "modified_at": self.modified_at.isoformat(), "options": dict(self.options), "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, @@ -1057,7 +995,7 @@ class ConfigEntry(Generic[_DataT]): def async_start_reauth( self, hass: HomeAssistant, - context: ConfigFlowContext | None = None, + context: dict[str, Any] | None = None, data: dict[str, Any] | None = None, ) -> None: """Start a reauth flow.""" @@ -1075,7 +1013,7 @@ class ConfigEntry(Generic[_DataT]): async def _async_init_reauth( self, hass: HomeAssistant, - context: ConfigFlowContext | None = None, + context: dict[str, Any] | None = None, data: dict[str, Any] | None = None, ) -> None: """Start a reauth flow.""" @@ -1087,12 +1025,12 @@ class ConfigEntry(Generic[_DataT]): return result = await hass.config_entries.flow.async_init( self.domain, - context=ConfigFlowContext( - source=SOURCE_REAUTH, - entry_id=self.entry_id, - title_placeholders={"name": self.title}, - unique_id=self.unique_id, - ) + context={ + "source": SOURCE_REAUTH, + "entry_id": self.entry_id, + "title_placeholders": {"name": self.title}, + "unique_id": self.unique_id, + } | (context or {}), data=self.data | (data or {}), ) @@ -1103,7 +1041,7 @@ class ConfigEntry(Generic[_DataT]): issue_id = f"config_entry_reauth_{self.domain}_{self.entry_id}" ir.async_create_issue( hass, - HOMEASSISTANT_DOMAIN, + HA_DOMAIN, issue_id, data={"flow_id": result["flow_id"]}, is_fixable=False, @@ -1113,6 +1051,49 @@ class ConfigEntry(Generic[_DataT]): translation_placeholders={"name": self.title}, ) + @callback + def async_start_reconfigure( + self, + hass: HomeAssistant, + context: dict[str, Any] | None = None, + data: dict[str, Any] | None = None, + ) -> None: + """Start a reconfigure flow.""" + # We will check this again in the task when we hold the lock, + # but we also check it now to try to avoid creating the task. + if any(self.async_get_active_flows(hass, {SOURCE_RECONFIGURE, SOURCE_REAUTH})): + # Reconfigure or reauth flow already in progress for this entry + return + hass.async_create_task( + self._async_init_reconfigure(hass, context, data), + f"config entry reconfigure {self.title} {self.domain} {self.entry_id}", + ) + + async def _async_init_reconfigure( + self, + hass: HomeAssistant, + context: dict[str, Any] | None = None, + data: dict[str, Any] | None = None, + ) -> None: + """Start a reconfigure flow.""" + async with self._reconfigure_lock: + if any( + self.async_get_active_flows(hass, {SOURCE_RECONFIGURE, SOURCE_REAUTH}) + ): + # Reconfigure or reauth flow already in progress for this entry + return + await hass.config_entries.flow.async_init( + self.domain, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": self.entry_id, + "title_placeholders": {"name": self.title}, + "unique_id": self.unique_id, + } + | (context or {}), + data=self.data | (data or {}), + ) + @callback def async_get_active_flows( self, hass: HomeAssistant, sources: set[str] @@ -1202,9 +1183,7 @@ def _report_non_awaited_platform_forwards(entry: ConfigEntry, what: str) -> None ) -class ConfigEntriesFlowManager( - data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult] -): +class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): """Manage all the config entry flows that are in progress.""" _flow_result = ConfigFlowResult @@ -1219,12 +1198,8 @@ class ConfigEntriesFlowManager( super().__init__(hass) self.config_entries = config_entries self._hass_config = hass_config - self._pending_import_flows: defaultdict[ - str, dict[str, asyncio.Future[None]] - ] = defaultdict(dict) - self._initialize_futures: defaultdict[str, set[asyncio.Future[None]]] = ( - defaultdict(set) - ) + self._pending_import_flows: dict[str, dict[str, asyncio.Future[None]]] = {} + self._initialize_futures: dict[str, list[asyncio.Future[None]]] = {} self._discovery_debouncer = Debouncer[None]( hass, _LOGGER, @@ -1244,47 +1219,26 @@ class ConfigEntriesFlowManager( @callback def _async_has_other_discovery_flows(self, flow_id: str) -> bool: """Check if there are any other discovery flows in progress.""" - for flow in self._progress.values(): - if flow.flow_id != flow_id and flow.context["source"] in DISCOVERY_SOURCES: - return True - return False + return any( + flow.context["source"] in DISCOVERY_SOURCES and flow.flow_id != flow_id + for flow in self._progress.values() + ) async def async_init( - self, - handler: str, - *, - context: ConfigFlowContext | None = None, - data: Any = None, + self, handler: str, *, context: dict[str, Any] | None = None, data: Any = None ) -> ConfigFlowResult: """Start a configuration flow.""" if not context or "source" not in context: raise KeyError("Context not set or doesn't have a source set") - # reauth/reconfigure flows should be linked to a config entry - if (source := context["source"]) in { - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - } and "entry_id" not in context: - # Deprecated in 2024.12, should fail in 2025.12 - report( - f"initialises a {source} flow without a link to the config entry", - error_if_integration=False, - error_if_core=True, - ) - flow_id = ulid_util.ulid_now() # Avoid starting a config flow on an integration that only supports # a single config entry, but which already has an entry if ( - source not in {SOURCE_IGNORE, SOURCE_REAUTH, SOURCE_RECONFIGURE} - and ( - self.config_entries.async_has_entries(handler, include_ignore=False) - or ( - self.config_entries.async_has_entries(handler, include_ignore=True) - and source != SOURCE_USER - ) - ) + context.get("source") + not in {SOURCE_IGNORE, SOURCE_REAUTH, SOURCE_UNIGNORE, SOURCE_RECONFIGURE} + and self.config_entries.async_has_entries(handler, include_ignore=False) and await _support_single_config_entry_only(self.hass, handler) ): return ConfigFlowResult( @@ -1292,17 +1246,18 @@ class ConfigEntriesFlowManager( flow_id=flow_id, handler=handler, reason="single_instance_allowed", - translation_domain=HOMEASSISTANT_DOMAIN, + translation_domain=HA_DOMAIN, ) loop = self.hass.loop - if source == SOURCE_IMPORT: - self._pending_import_flows[handler][flow_id] = loop.create_future() + if context["source"] == SOURCE_IMPORT: + self._pending_import_flows.setdefault(handler, {})[flow_id] = ( + loop.create_future() + ) cancel_init_future = loop.create_future() - handler_init_futures = self._initialize_futures[handler] - handler_init_futures.add(cancel_init_future) + self._initialize_futures.setdefault(handler, []).append(cancel_init_future) try: async with interrupt( cancel_init_future, @@ -1313,13 +1268,8 @@ class ConfigEntriesFlowManager( except FlowCancelledError as ex: raise asyncio.CancelledError from ex finally: - handler_init_futures.remove(cancel_init_future) - if not handler_init_futures: - del self._initialize_futures[handler] - if handler in self._pending_import_flows: - self._pending_import_flows[handler].pop(flow_id, None) - if not self._pending_import_flows[handler]: - del self._pending_import_flows[handler] + self._initialize_futures[handler].remove(cancel_init_future) + self._pending_import_flows.get(handler, {}).pop(flow_id, None) if result["type"] != data_entry_flow.FlowResultType.ABORT: await self.async_post_init(flow, result) @@ -1330,7 +1280,7 @@ class ConfigEntriesFlowManager( self, flow_id: str, handler: str, - context: ConfigFlowContext, + context: dict, data: Any, ) -> tuple[ConfigFlow, ConfigFlowResult]: """Run the init in a task to allow it to be canceled at shutdown.""" @@ -1346,18 +1296,11 @@ class ConfigEntriesFlowManager( try: result = await self._async_handle_step(flow, flow.init_step, data) finally: - self._set_pending_import_done(flow) + init_done = self._pending_import_flows.get(handler, {}).get(flow_id) + if init_done and not init_done.done(): + init_done.set_result(None) return flow, result - def _set_pending_import_done(self, flow: ConfigFlow) -> None: - """Set pending import flow as done.""" - if ( - (handler_import_flows := self._pending_import_flows.get(flow.handler)) - and (init_done := handler_import_flows.get(flow.flow_id)) - and not init_done.done() - ): - init_done.set_result(None) - @callback def async_shutdown(self) -> None: """Cancel any initializing flows.""" @@ -1368,21 +1311,19 @@ class ConfigEntriesFlowManager( async def async_finish_flow( self, - flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult], + flow: data_entry_flow.FlowHandler[ConfigFlowResult], result: ConfigFlowResult, ) -> ConfigFlowResult: - """Finish a config flow and add an entry. - - This method is called when a flow step returns FlowResultType.ABORT or - FlowResultType.CREATE_ENTRY. - """ + """Finish a config flow and add an entry.""" flow = cast(ConfigFlow, flow) # Mark the step as done. # We do this to avoid a circular dependency where async_finish_flow sets up a # new entry, which needs the integration to be set up, which is waiting for # init to be done. - self._set_pending_import_done(flow) + init_done = self._pending_import_flows.get(flow.handler, {}).get(flow.flow_id) + if init_done and not init_done.done(): + init_done.set_result(None) # Remove notification if no other discovery config entries in progress if not self._async_has_other_discovery_flows(flow.flow_id): @@ -1394,44 +1335,9 @@ class ConfigEntriesFlowManager( entry := self.config_entries.async_get_entry(entry_id) ) is not None: issue_id = f"config_entry_reauth_{entry.domain}_{entry.entry_id}" - ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) + ir.async_delete_issue(self.hass, HA_DOMAIN, issue_id) if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: - # If there's an ignored config entry with a matching unique ID, - # update the discovery key. - if ( - (discovery_key := flow.context.get("discovery_key")) - and (unique_id := flow.unique_id) is not None - and ( - entry := self.config_entries.async_entry_for_domain_unique_id( - result["handler"], unique_id - ) - ) - and discovery_key - not in ( - known_discovery_keys := entry.discovery_keys.get( - discovery_key.domain, () - ) - ) - ): - new_discovery_keys = MappingProxyType( - entry.discovery_keys - | { - discovery_key.domain: tuple( - [*known_discovery_keys, discovery_key][-10:] - ) - } - ) - _LOGGER.debug( - "Updating discovery keys for %s entry %s %s -> %s", - entry.domain, - unique_id, - entry.discovery_keys, - new_discovery_keys, - ) - self.config_entries.async_update_entry( - entry, discovery_keys=new_discovery_keys - ) return result # Avoid adding a config entry for a integration @@ -1446,7 +1352,7 @@ class ConfigEntriesFlowManager( flow_id=flow.flow_id, handler=flow.handler, reason="single_instance_allowed", - translation_domain=HOMEASSISTANT_DOMAIN, + translation_domain=HA_DOMAIN, ) # Check if config entry exists with unique ID. Unload it. @@ -1463,7 +1369,6 @@ class ConfigEntriesFlowManager( or progress_unique_id == DEFAULT_DISCOVERY_UNIQUE_ID ): self.async_abort(progress_flow_id) - continue # Abort any flows in progress for the same handler # when integration allows only one config entry @@ -1489,15 +1394,8 @@ class ConfigEntriesFlowManager( if existing_entry is not None and existing_entry.state.recoverable: await self.config_entries.async_unload(existing_entry.entry_id) - discovery_key = flow.context.get("discovery_key") - discovery_keys = ( - MappingProxyType({discovery_key.domain: (discovery_key,)}) - if discovery_key - else MappingProxyType({}) - ) entry = ConfigEntry( data=result["data"], - discovery_keys=discovery_keys, domain=result["handler"], minor_version=result["minor_version"], options=result["options"], @@ -1507,24 +1405,16 @@ class ConfigEntriesFlowManager( version=result["version"], ) - if existing_entry is not None: - # Unload and remove the existing entry - await self.config_entries._async_remove(existing_entry.entry_id) # noqa: SLF001 await self.config_entries.async_add(entry) if existing_entry is not None: - # Clean up devices and entities belonging to the existing entry - self.config_entries._async_clean_up(existing_entry) # noqa: SLF001 + await self.config_entries.async_remove(existing_entry.entry_id) result["result"] = entry return result async def async_create_flow( - self, - handler_key: str, - *, - context: ConfigFlowContext | None = None, - data: Any = None, + self, handler_key: str, *, context: dict | None = None, data: Any = None ) -> ConfigFlow: """Create a flow for specified handler. @@ -1542,7 +1432,7 @@ class ConfigEntriesFlowManager( async def async_post_init( self, - flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult], + flow: data_entry_flow.FlowHandler[ConfigFlowResult], result: ConfigFlowResult, ) -> None: """After a flow is initialised trigger new flow notifications.""" @@ -1578,35 +1468,6 @@ class ConfigEntriesFlowManager( notification_id=DISCOVERY_NOTIFICATION_ID, ) - @callback - def async_has_matching_discovery_flow( - self, handler: str, match_context: ConfigFlowContext, data: Any - ) -> bool: - """Check if an existing matching discovery flow is in progress. - - A flow with the same handler, context, and data. - - If match_context is passed, only return flows with a context that is a - superset of match_context. - """ - if not (flows := self._handler_progress_index.get(handler)): - return False - match_items = match_context.items() - for progress in flows: - if match_items <= progress.context.items() and progress.init_data == data: - return True - return False - - @callback - def async_has_matching_flow(self, flow: ConfigFlow) -> bool: - """Check if an existing matching flow is in progress.""" - if not (flows := self._handler_progress_index.get(flow.handler)): - return False - for other_flow in set(flows): - if other_flow is not flow and flow.is_matching(other_flow): # type: ignore[arg-type] - return True - return False - class ConfigEntryItems(UserDict[str, ConfigEntry]): """Container for config items, maps config_entry_id -> entry. @@ -1621,7 +1482,7 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): super().__init__() self._hass = hass self._domain_index: dict[str, list[ConfigEntry]] = {} - self._domain_unique_id_index: dict[str, dict[str, list[ConfigEntry]]] = {} + self._domain_unique_id_index: dict[str, dict[str, ConfigEntry]] = {} def values(self) -> ValuesView[ConfigEntry]: """Return the underlying values to avoid __iter__ overhead.""" @@ -1630,7 +1491,6 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): def __setitem__(self, entry_id: str, entry: ConfigEntry) -> None: """Add an item.""" data = self.data - self.check_unique_id(entry) if entry_id in data: # This is likely a bug in a test that is adding the same entry twice. # In the future, once we have fixed the tests, this will raise HomeAssistantError. @@ -1639,50 +1499,32 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): data[entry_id] = entry self._index_entry(entry) - def check_unique_id(self, entry: ConfigEntry) -> None: - """Check config entry unique id. - - For a string unique id (this is the correct case): return - For a hashable non string unique id: log warning - For a non-hashable unique id: raise error - """ - if (unique_id := entry.unique_id) is None: - return - if isinstance(unique_id, str): - # Unique id should be a string - return - if isinstance(unique_id, Hashable): # type: ignore[unreachable] - # Checks for other non-string was added in HA Core 2024.10 - # In HA Core 2025.10, we should remove the error and instead fail - report_issue = async_suggest_report_issue( - self._hass, integration_domain=entry.domain - ) - _LOGGER.error( - ( - "Config entry '%s' from integration %s has an invalid unique_id" - " '%s' of type %s when a string is expected, please %s" - ), - entry.title, - entry.domain, - entry.unique_id, - type(entry.unique_id).__name__, - report_issue, - ) - else: - # Guard against integrations using unhashable unique_id - # In HA Core 2024.11, the guard was changed from warning to failing - raise HomeAssistantError( - f"The entry unique id {unique_id} is not a string." - ) - def _index_entry(self, entry: ConfigEntry) -> None: """Index an entry.""" - self.check_unique_id(entry) self._domain_index.setdefault(entry.domain, []).append(entry) if entry.unique_id is not None: - self._domain_unique_id_index.setdefault(entry.domain, {}).setdefault( - entry.unique_id, [] - ).append(entry) + unique_id_hash = entry.unique_id + # Guard against integrations using unhashable unique_id + # In HA Core 2024.9, we should remove the guard and instead fail + if not isinstance(entry.unique_id, Hashable): + unique_id_hash = str(entry.unique_id) # type: ignore[unreachable] + report_issue = async_suggest_report_issue( + self._hass, integration_domain=entry.domain + ) + _LOGGER.error( + ( + "Config entry '%s' from integration %s has an invalid unique_id" + " '%s', please %s" + ), + entry.title, + entry.domain, + entry.unique_id, + report_issue, + ) + + self._domain_unique_id_index.setdefault(entry.domain, {})[ + unique_id_hash + ] = entry def _unindex_entry(self, entry_id: str) -> None: """Unindex an entry.""" @@ -1692,9 +1534,10 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): if not self._domain_index[domain]: del self._domain_index[domain] if (unique_id := entry.unique_id) is not None: - self._domain_unique_id_index[domain][unique_id].remove(entry) - if not self._domain_unique_id_index[domain][unique_id]: - del self._domain_unique_id_index[domain][unique_id] + # Check type first to avoid expensive isinstance call + if type(unique_id) is not str and not isinstance(unique_id, Hashable): # noqa: E721 + unique_id = str(entry.unique_id) # type: ignore[unreachable] + del self._domain_unique_id_index[domain][unique_id] if not self._domain_unique_id_index[domain]: del self._domain_unique_id_index[domain] @@ -1710,11 +1553,9 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): """ entry_id = entry.entry_id self._unindex_entry(entry_id) - self.check_unique_id(entry) object.__setattr__(entry, "unique_id", new_unique_id) self._index_entry(entry) - entry.clear_state_cache() - entry.clear_storage_cache() + entry.clear_cache() def get_entries_for_domain(self, domain: str) -> list[ConfigEntry]: """Get entries for a domain.""" @@ -1724,16 +1565,10 @@ class ConfigEntryItems(UserDict[str, ConfigEntry]): self, domain: str, unique_id: str ) -> ConfigEntry | None: """Get entry by domain and unique id.""" - if unique_id is None: - return None # type: ignore[unreachable] - if not isinstance(unique_id, Hashable): - raise HomeAssistantError( - f"The entry unique id {unique_id} is not a string." - ) - entries = self._domain_unique_id_index.get(domain, {}).get(unique_id) - if not entries: - return None - return entries[0] + # Check type first to avoid expensive isinstance call + if type(unique_id) is not str and not isinstance(unique_id, Hashable): # noqa: E721 + unique_id = str(unique_id) # type: ignore[unreachable] + return self._domain_unique_id_index.get(domain, {}).get(unique_id) class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]): @@ -1756,39 +1591,25 @@ class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]): ) -> dict[str, Any]: """Migrate to the new version.""" data = old_data - if old_major_version == 1: - if old_minor_version < 2: - # Version 1.2 implements migration and freezes the available keys - for entry in data["entries"]: - # Populate keys which were introduced before version 1.2 + if old_major_version == 1 and old_minor_version < 2: + # Version 1.2 implements migration and freezes the available keys + for entry in data["entries"]: + # Populate keys which were introduced before version 1.2 - pref_disable_new_entities = entry.get("pref_disable_new_entities") - if pref_disable_new_entities is None and "system_options" in entry: - pref_disable_new_entities = entry.get("system_options", {}).get( - "disable_new_entities" - ) - - entry.setdefault("disabled_by", entry.get("disabled_by")) - entry.setdefault("minor_version", entry.get("minor_version", 1)) - entry.setdefault("options", entry.get("options", {})) - entry.setdefault( - "pref_disable_new_entities", pref_disable_new_entities + pref_disable_new_entities = entry.get("pref_disable_new_entities") + if pref_disable_new_entities is None and "system_options" in entry: + pref_disable_new_entities = entry.get("system_options", {}).get( + "disable_new_entities" ) - entry.setdefault( - "pref_disable_polling", entry.get("pref_disable_polling") - ) - entry.setdefault("unique_id", entry.get("unique_id")) - if old_minor_version < 3: - # Version 1.3 adds the created_at and modified_at fields - created_at = utc_from_timestamp(0).isoformat() - for entry in data["entries"]: - entry["created_at"] = entry["modified_at"] = created_at - - if old_minor_version < 4: - # Version 1.4 adds discovery_keys - for entry in data["entries"]: - entry["discovery_keys"] = {} + entry.setdefault("disabled_by", entry.get("disabled_by")) + entry.setdefault("minor_version", entry.get("minor_version", 1)) + entry.setdefault("options", entry.get("options", {})) + entry.setdefault("pref_disable_new_entities", pref_disable_new_entities) + entry.setdefault( + "pref_disable_polling", entry.get("pref_disable_polling") + ) + entry.setdefault("unique_id", entry.get("unique_id")) if old_major_version > 1: raise NotImplementedError @@ -1843,12 +1664,12 @@ class ConfigEntries: entries = self._entries.get_entries_for_domain(domain) if include_ignore and include_disabled: return bool(entries) - for entry in entries: - if (include_ignore or entry.source != SOURCE_IGNORE) and ( - include_disabled or not entry.disabled_by - ): - return True - return False + return any( + entry + for entry in entries + if (include_ignore or entry.source != SOURCE_IGNORE) + and (include_disabled or not entry.disabled_by) + ) @callback def async_entries( @@ -1873,16 +1694,6 @@ class ConfigEntries: and (include_disabled or not entry.disabled_by) ] - @callback - def async_loaded_entries(self, domain: str) -> list[ConfigEntry]: - """Return loaded entries for a specific domain. - - This will exclude ignored or disabled config entruis. - """ - entries = self._entries.get_entries_for_domain(domain) - - return [entry for entry in entries if entry.state == ConfigEntryState.LOADED] - @callback def async_entry_for_domain_unique_id( self, domain: str, unique_id: str @@ -1898,27 +1709,12 @@ class ConfigEntries: ) self._entries[entry.entry_id] = entry - self.async_update_issues() self._async_dispatch(ConfigEntryChange.ADDED, entry) await self.async_setup(entry.entry_id) self._async_schedule_save() async def async_remove(self, entry_id: str) -> dict[str, Any]: - """Remove, unload and clean up after an entry.""" - unload_success, entry = await self._async_remove(entry_id) - self._async_clean_up(entry) - - for discovery_domain in entry.discovery_keys: - async_dispatcher_send_internal( - self.hass, - signal_discovered_config_entry_removed(discovery_domain), - entry, - ) - - return {"require_restart": not unload_success} - - async def _async_remove(self, entry_id: str) -> tuple[bool, ConfigEntry]: - """Remove and unload an entry.""" + """Remove an entry.""" if (entry := self.async_get_entry(entry_id)) is None: raise UnknownEntry @@ -1931,16 +1727,8 @@ class ConfigEntries: await entry.async_remove(self.hass) del self._entries[entry.entry_id] - self.async_update_issues() self._async_schedule_save() - return (unload_success, entry) - - @callback - def _async_clean_up(self, entry: ConfigEntry) -> None: - """Clean up after an entry.""" - entry_id = entry.entry_id - dev_reg = device_registry.async_get(self.hass) ent_reg = entity_registry.async_get(self.hass) @@ -1956,9 +1744,24 @@ class ConfigEntries: if "flow_id" in progress_flow: self.hass.config_entries.flow.async_abort(progress_flow["flow_id"]) issue_id = f"config_entry_reauth_{entry.domain}_{entry.entry_id}" - ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) + ir.async_delete_issue(self.hass, HA_DOMAIN, issue_id) + + # After we have fully removed an "ignore" config entry we can try and rediscover + # it so that a user is able to immediately start configuring it. We do this by + # starting a new flow with the 'unignore' step. If the integration doesn't + # implement async_step_unignore then this will be a no-op. + if entry.source == SOURCE_IGNORE: + self.hass.async_create_task_internal( + self.hass.config_entries.flow.async_init( + entry.domain, + context={"source": SOURCE_UNIGNORE}, + data={"unique_id": entry.unique_id}, + ), + f"config entry unignore {entry.title} {entry.domain} {entry.unique_id}", + ) self._async_dispatch(ConfigEntryChange.REMOVED, entry) + return {"require_restart": not unload_success} @callback def _async_shutdown(self, event: Event) -> None: @@ -1982,19 +1785,11 @@ class ConfigEntries: entry_id = entry["entry_id"] config_entry = ConfigEntry( - created_at=datetime.fromisoformat(entry["created_at"]), data=entry["data"], disabled_by=try_parse_enum(ConfigEntryDisabler, entry["disabled_by"]), - discovery_keys=MappingProxyType( - { - domain: tuple(DiscoveryKey.from_json_dict(key) for key in keys) - for domain, keys in entry["discovery_keys"].items() - } - ), domain=entry["domain"], entry_id=entry_id, minor_version=entry["minor_version"], - modified_at=datetime.fromisoformat(entry["modified_at"]), options=entry["options"], pref_disable_new_entities=entry["pref_disable_new_entities"], pref_disable_polling=entry["pref_disable_polling"], @@ -2006,7 +1801,6 @@ class ConfigEntries: entries[entry_id] = config_entry self._entries = entries - self.async_update_issues() async def async_setup(self, entry_id: str, _lock: bool = True) -> bool: """Set up a config entry. @@ -2117,7 +1911,19 @@ class ConfigEntries: if (entry := self.async_get_entry(entry_id)) is None: raise UnknownEntry - _validate_item(disabled_by=disabled_by) + if isinstance(disabled_by, str) and not isinstance( + disabled_by, ConfigEntryDisabler + ): + report( # type: ignore[unreachable] + ( + "uses str for config entry disabled_by. This is deprecated and will" + " stop working in Home Assistant 2022.3, it should be updated to" + " use ConfigEntryDisabler instead" + ), + error_if_core=False, + ) + disabled_by = ConfigEntryDisabler(disabled_by) + if entry.disabled_by is disabled_by: return True @@ -2148,8 +1954,6 @@ class ConfigEntries: entry: ConfigEntry, *, data: Mapping[str, Any] | UndefinedType = UNDEFINED, - discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]] - | UndefinedType = UNDEFINED, minor_version: int | UndefinedType = UNDEFINED, options: Mapping[str, Any] | UndefinedType = UNDEFINED, pref_disable_new_entities: bool | UndefinedType = UNDEFINED, @@ -2174,37 +1978,11 @@ class ConfigEntries: _setter = object.__setattr__ if unique_id is not UNDEFINED and entry.unique_id != unique_id: - # Deprecated in 2024.11, should fail in 2025.11 - if ( - # flipr creates duplicates during migration, and asks users to - # remove the duplicate. We don't need warn about it here too. - # We should remove the special case for "flipr" in HA Core 2025.4, - # when the flipr migration period ends - entry.domain != "flipr" - and unique_id is not None - and self.async_entry_for_domain_unique_id(entry.domain, unique_id) - is not None - ): - report_issue = async_suggest_report_issue( - self.hass, integration_domain=entry.domain - ) - _LOGGER.error( - ( - "Unique id of config entry '%s' from integration %s changed to" - " '%s' which is already in use, please %s" - ), - entry.title, - entry.domain, - unique_id, - report_issue, - ) # Reindex the entry if the unique_id has changed self._entries.update_unique_id(entry, unique_id) - self.async_update_issues() changed = True for attr, value in ( - ("discovery_keys", discovery_keys), ("minor_version", minor_version), ("pref_disable_new_entities", pref_disable_new_entities), ("pref_disable_polling", pref_disable_polling), @@ -2228,8 +2006,6 @@ class ConfigEntries: if not changed: return False - _setter(entry, "modified_at", utcnow()) - for listener in entry.update_listeners: self.hass.async_create_task( listener(self.hass, entry), @@ -2237,8 +2013,7 @@ class ConfigEntries: ) self._async_schedule_save() - entry.clear_state_cache() - entry.clear_storage_cache() + entry.clear_cache() self._async_dispatch(ConfigEntryChange.UPDATED, entry) return True @@ -2421,10 +2196,7 @@ class ConfigEntries: @callback def _data_to_save(self) -> dict[str, list[dict[str, Any]]]: """Return data to save.""" - # typing does not know that the storage fragment will serialize to a dict - return { - "entries": [entry.as_storage_fragment for entry in self._entries.values()] # type: ignore[misc] - } + return {"entries": [entry.as_dict() for entry in self._entries.values()]} async def async_wait_component(self, entry: ConfigEntry) -> bool: """Wait for an entry's component to load and return if the entry is loaded. @@ -2443,84 +2215,6 @@ class ConfigEntries: return False return entry.state is ConfigEntryState.LOADED - @callback - def async_update_issues(self) -> None: - """Update unique id collision issues.""" - issue_registry = ir.async_get(self.hass) - issues: set[str] = set() - - for issue in issue_registry.issues.values(): - if ( - issue.domain != HOMEASSISTANT_DOMAIN - or not (issue_data := issue.data) - or issue_data.get("issue_type") != ISSUE_UNIQUE_ID_COLLISION - ): - continue - issues.add(issue.issue_id) - - for domain, unique_ids in self._entries._domain_unique_id_index.items(): # noqa: SLF001 - # flipr creates duplicates during migration, and asks users to - # remove the duplicate. We don't need warn about it here too. - # We should remove the special case for "flipr" in HA Core 2025.4, - # when the flipr migration period ends - if domain == "flipr": - continue - for unique_id, entries in unique_ids.items(): - # We might mutate the list of entries, so we need a copy to not mess up - # the index - entries = list(entries) - - # There's no need to raise an issue for ignored entries, we can - # safely remove them once we no longer allow unique id collisions. - # Iterate over a copy of the copy to allow mutating while iterating - for entry in list(entries): - if entry.source == SOURCE_IGNORE: - entries.remove(entry) - - if len(entries) < 2: - continue - issue_id = f"{ISSUE_UNIQUE_ID_COLLISION}_{domain}_{unique_id}" - issues.discard(issue_id) - titles = [f"'{entry.title}'" for entry in entries] - translation_placeholders = { - "domain": domain, - "configure_url": f"/config/integrations/integration/{domain}", - "unique_id": str(unique_id), - } - if len(titles) <= UNIQUE_ID_COLLISION_TITLE_LIMIT: - translation_key = "config_entry_unique_id_collision" - translation_placeholders["titles"] = ", ".join(titles) - else: - translation_key = "config_entry_unique_id_collision_many" - translation_placeholders["number_of_entries"] = str(len(titles)) - translation_placeholders["titles"] = ", ".join( - titles[:UNIQUE_ID_COLLISION_TITLE_LIMIT] - ) - translation_placeholders["title_limit"] = str( - UNIQUE_ID_COLLISION_TITLE_LIMIT - ) - - ir.async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - issue_id, - breaks_in_ha_version="2025.11.0", - data={ - "issue_type": ISSUE_UNIQUE_ID_COLLISION, - "unique_id": unique_id, - }, - is_fixable=False, - issue_domain=domain, - severity=ir.IssueSeverity.ERROR, - translation_key=translation_key, - translation_placeholders=translation_placeholders, - ) - - break # Only create one issue per domain - - for issue_id in issues: - ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) - @callback def _async_abort_entries_match( @@ -2542,9 +2236,7 @@ def _async_abort_entries_match( raise data_entry_flow.AbortFlow("already_configured") -class ConfigEntryBaseFlow( - data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult] -): +class ConfigEntryBaseFlow(data_entry_flow.FlowHandler[ConfigFlowResult]): """Base class for config and option flows.""" _flow_result = ConfigFlowResult @@ -2565,7 +2257,7 @@ class ConfigFlow(ConfigEntryBaseFlow): if not self.context: return None - return self.context.get("unique_id") + return cast(str | None, self.context.get("unique_id")) @staticmethod @callback @@ -2591,27 +2283,6 @@ class ConfigFlow(ConfigEntryBaseFlow): self._async_current_entries(include_ignore=False), match_dict ) - @callback - def _abort_if_unique_id_mismatch( - self, - *, - reason: str = "unique_id_mismatch", - description_placeholders: Mapping[str, str] | None = None, - ) -> None: - """Abort if the unique ID does not match the reauth/reconfigure context. - - Requires strings.json entry corresponding to the `reason` parameter - in user visible flows. - """ - if ( - self.source == SOURCE_REAUTH - and self._get_reauth_entry().unique_id != self.unique_id - ) or ( - self.source == SOURCE_RECONFIGURE - and self._get_reconfigure_entry().unique_id != self.unique_id - ): - raise data_entry_flow.AbortFlow(reason, description_placeholders) - @callback def _abort_if_unique_id_configured( self, @@ -2740,23 +2411,14 @@ class ConfigFlow(ConfigEntryBaseFlow): ] async def async_step_ignore(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Ignore this config flow. - - Ignoring a config flow works by creating a config entry with source set to - SOURCE_IGNORE. - - There will only be a single active discovery flow per device, also when the - integration has multiple discovery sources for the same device. This method - is called when the user ignores a discovered device or service, we then store - the key for the flow being ignored. - - Once the ignore config entry is created, ConfigEntriesFlowManager.async_finish_flow - will make sure the discovery key is kept up to date since it may not be stable - unlike the unique id. - """ + """Ignore this config flow.""" await self.async_set_unique_id(user_input["unique_id"], raise_on_progress=False) return self.async_create_entry(title=user_input["title"], data={}) + async def async_step_unignore(self, user_input: dict[str, Any]) -> ConfigFlowResult: + """Rediscover a config entry by it's unique_id.""" + return self.async_abort(reason="not_implemented") + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -2889,20 +2551,6 @@ class ConfigFlow(ConfigEntryBaseFlow): options: Mapping[str, Any] | None = None, ) -> ConfigFlowResult: """Finish config flow and create a config entry.""" - if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: - report_issue = async_suggest_report_issue( - self.hass, integration_domain=self.handler - ) - _LOGGER.warning( - ( - "Detected %s config flow creating a new entry, " - "when it is expected to update an existing entry and abort. " - "This will stop working in %s, please %s" - ), - self.source, - "2025.11", - report_issue, - ) result = super().async_create_entry( title=title, data=data, @@ -2924,30 +2572,11 @@ class ConfigFlow(ConfigEntryBaseFlow): unique_id: str | None | UndefinedType = UNDEFINED, title: str | UndefinedType = UNDEFINED, data: Mapping[str, Any] | UndefinedType = UNDEFINED, - data_updates: Mapping[str, Any] | UndefinedType = UNDEFINED, options: Mapping[str, Any] | UndefinedType = UNDEFINED, - reason: str | UndefinedType = UNDEFINED, + reason: str = "reauth_successful", reload_even_if_entry_is_unchanged: bool = True, ) -> ConfigFlowResult: - """Update config entry, reload config entry and finish config flow. - - :param data: replace the entry data with new data - :param data_updates: add items from data_updates to entry data - existing keys - are overridden - :param options: replace the entry options with new options - :param title: replace the title of the entry - :param unique_id: replace the unique_id of the entry - - :param reason: set the reason for the abort, defaults to - `reauth_successful` or `reconfigure_successful` based on flow source - - :param reload_even_if_entry_is_unchanged: set this to `False` if the entry - should not be reloaded if it is unchanged - """ - if data_updates is not UNDEFINED: - if data is not UNDEFINED: - raise ValueError("Cannot set both data and data_updates") - data = entry.data | data_updates + """Update config entry, reload config entry and finish config flow.""" result = self.hass.config_entries.async_update_entry( entry=entry, unique_id=unique_id, @@ -2957,82 +2586,10 @@ class ConfigFlow(ConfigEntryBaseFlow): ) if reload_even_if_entry_is_unchanged or result: self.hass.config_entries.async_schedule_reload(entry.entry_id) - if reason is UNDEFINED: - reason = "reauth_successful" - if self.source == SOURCE_RECONFIGURE: - reason = "reconfigure_successful" return self.async_abort(reason=reason) - @callback - def async_show_form( - self, - *, - step_id: str | None = None, - data_schema: vol.Schema | None = None, - errors: dict[str, str] | None = None, - description_placeholders: Mapping[str, str | None] | None = None, - last_step: bool | None = None, - preview: str | None = None, - ) -> ConfigFlowResult: - """Return the definition of a form to gather user input. - The step_id parameter is deprecated and will be removed in a future release. - """ - if self.source == SOURCE_REAUTH and "entry_id" in self.context: - # If the integration does not provide a name for the reauth title, - # we append it to the description placeholders. - # We also need to check entry_id as some integrations bypass the - # reauth helpers and create a flow without it. - description_placeholders = dict(description_placeholders or {}) - if description_placeholders.get(CONF_NAME) is None: - description_placeholders[CONF_NAME] = self._get_reauth_entry().title - return super().async_show_form( - step_id=step_id, - data_schema=data_schema, - errors=errors, - description_placeholders=description_placeholders, - last_step=last_step, - preview=preview, - ) - - def is_matching(self, other_flow: Self) -> bool: - """Return True if other_flow is matching this flow.""" - raise NotImplementedError - - @property - def _reauth_entry_id(self) -> str: - """Return reauth entry id.""" - if self.source != SOURCE_REAUTH: - raise ValueError(f"Source is {self.source}, expected {SOURCE_REAUTH}") - return self.context["entry_id"] - - @callback - def _get_reauth_entry(self) -> ConfigEntry: - """Return the reauth config entry linked to the current context.""" - if entry := self.hass.config_entries.async_get_entry(self._reauth_entry_id): - return entry - raise UnknownEntry - - @property - def _reconfigure_entry_id(self) -> str: - """Return reconfigure entry id.""" - if self.source != SOURCE_RECONFIGURE: - raise ValueError(f"Source is {self.source}, expected {SOURCE_RECONFIGURE}") - return self.context["entry_id"] - - @callback - def _get_reconfigure_entry(self) -> ConfigEntry: - """Return the reconfigure config entry linked to the current context.""" - if entry := self.hass.config_entries.async_get_entry( - self._reconfigure_entry_id - ): - return entry - raise UnknownEntry - - -class OptionsFlowManager( - data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult] -): +class OptionsFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): """Flow to set options for a configuration entry.""" _flow_result = ConfigFlowResult @@ -3049,7 +2606,7 @@ class OptionsFlowManager( self, handler_key: str, *, - context: ConfigFlowContext | None = None, + context: dict[str, Any] | None = None, data: dict[str, Any] | None = None, ) -> OptionsFlow: """Create an options flow for a config entry. @@ -3062,14 +2619,11 @@ class OptionsFlowManager( async def async_finish_flow( self, - flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult], + flow: data_entry_flow.FlowHandler[ConfigFlowResult], result: ConfigFlowResult, ) -> ConfigFlowResult: """Finish an options flow and update options for configuration entry. - This method is called when a flow step returns FlowResultType.ABORT or - FlowResultType.CREATE_ENTRY. - Flow.handler and entry_id is the same thing to map flow with entry. """ flow = cast(OptionsFlow, flow) @@ -3087,7 +2641,7 @@ class OptionsFlowManager( return result async def _async_setup_preview( - self, flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult] + self, flow: data_entry_flow.FlowHandler[ConfigFlowResult] ) -> None: """Set up preview for an option flow handler.""" entry = self._async_get_config_entry(flow.handler) @@ -3102,9 +2656,6 @@ class OptionsFlow(ConfigEntryBaseFlow): handler: str - _config_entry: ConfigEntry - """For compatibility only - to be removed in 2025.12""" - @callback def _async_abort_entries_match( self, match_dict: dict[str, Any] | None = None @@ -3113,78 +2664,32 @@ class OptionsFlow(ConfigEntryBaseFlow): Requires `already_configured` in strings.json in user visible flows. """ + + config_entry = cast( + ConfigEntry, self.hass.config_entries.async_get_entry(self.handler) + ) _async_abort_entries_match( [ entry - for entry in self.hass.config_entries.async_entries( - self.config_entry.domain - ) - if entry is not self.config_entry and entry.source != SOURCE_IGNORE + for entry in self.hass.config_entries.async_entries(config_entry.domain) + if entry is not config_entry and entry.source != SOURCE_IGNORE ], match_dict, ) - @property - def _config_entry_id(self) -> str: - """Return config entry id. - - Please note that this is not available inside `__init__` method, and - can only be referenced after initialisation. - """ - # This is the same as handler, but that's an implementation detail - if self.handler is None: - raise ValueError( - "The config entry id is not available during initialisation" - ) - return self.handler - - @property - def config_entry(self) -> ConfigEntry: - """Return the config entry linked to the current options flow. - - Please note that this is not available inside `__init__` method, and - can only be referenced after initialisation. - """ - # For compatibility only - to be removed in 2025.12 - if hasattr(self, "_config_entry"): - return self._config_entry - - if self.hass is None: - raise ValueError("The config entry is not available during initialisation") - if entry := self.hass.config_entries.async_get_entry(self._config_entry_id): - return entry - raise UnknownEntry - - @config_entry.setter - def config_entry(self, value: ConfigEntry) -> None: - """Set the config entry value.""" - report_usage( - "sets option flow config_entry explicitly, which is deprecated " - "and will stop working in 2025.12", - core_behavior=ReportBehavior.ERROR, - core_integration_behavior=ReportBehavior.ERROR, - custom_integration_behavior=ReportBehavior.LOG, - ) - self._config_entry = value - class OptionsFlowWithConfigEntry(OptionsFlow): - """Base class for options flows with config entry and options. - - This class is being phased out, and should not be referenced in new code. - It is kept only for backward compatibility, and only for custom integrations. - """ + """Base class for options flows with config entry and options.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" self._config_entry = config_entry self._options = deepcopy(dict(config_entry.options)) - report_usage( - "inherits from OptionsFlowWithConfigEntry", - core_behavior=ReportBehavior.ERROR, - core_integration_behavior=ReportBehavior.ERROR, - custom_integration_behavior=ReportBehavior.IGNORE, - ) + + @property + def config_entry(self) -> ConfigEntry: + """Return the config entry.""" + return self._config_entry @property def options(self) -> dict[str, Any]: diff --git a/homeassistant/const.py b/homeassistant/const.py index 4082a076b94..d0f1d4555d4 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -9,7 +9,6 @@ from typing import TYPE_CHECKING, Final from .helpers.deprecation import ( DeprecatedConstant, DeprecatedConstantEnum, - EnumWithDeprecatedMembers, all_with_deprecated_constants, check_if_deprecated_constant, dir_with_deprecated_constants, @@ -24,14 +23,14 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 12 +MINOR_VERSION: Final = 8 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) -REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0) +REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) # Truthy date string triggers showing related deprecation warning messages. -REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "2025.2" +REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "" # Format for platform files PLATFORM_FORMAT: Final = "{platform}.{domain}" @@ -42,7 +41,6 @@ class Platform(StrEnum): AIR_QUALITY = "air_quality" ALARM_CONTROL_PANEL = "alarm_control_panel" - ASSIST_SATELLITE = "assist_satellite" BINARY_SENSOR = "binary_sensor" BUTTON = "button" CALENDAR = "calendar" @@ -62,6 +60,7 @@ class Platform(StrEnum): LAWN_MOWER = "lawn_mower" LIGHT = "light" LOCK = "lock" + MAILBOX = "mailbox" MEDIA_PLAYER = "media_player" NOTIFY = "notify" NUMBER = "number" @@ -76,9 +75,9 @@ class Platform(StrEnum): TIME = "time" TODO = "todo" TTS = "tts" - UPDATE = "update" VACUUM = "vacuum" VALVE = "valve" + UPDATE = "update" WAKE_WORD = "wake_word" WATER_HEATER = "water_heater" WEATHER = "weather" @@ -114,7 +113,6 @@ SUN_EVENT_SUNRISE: Final = "sunrise" # #### CONFIG #### CONF_ABOVE: Final = "above" CONF_ACCESS_TOKEN: Final = "access_token" -CONF_ACTION: Final = "action" CONF_ADDRESS: Final = "address" CONF_AFTER: Final = "after" CONF_ALIAS: Final = "alias" @@ -223,7 +221,6 @@ CONF_METHOD: Final = "method" CONF_MINIMUM: Final = "minimum" CONF_MODE: Final = "mode" CONF_MODEL: Final = "model" -CONF_MODEL_ID: Final = "model_id" CONF_MONITORED_CONDITIONS: Final = "monitored_conditions" CONF_MONITORED_VARIABLES: Final = "monitored_variables" CONF_NAME: Final = "name" @@ -283,8 +280,6 @@ CONF_THEN: Final = "then" CONF_TIMEOUT: Final = "timeout" CONF_TIME_ZONE: Final = "time_zone" CONF_TOKEN: Final = "token" -CONF_TRIGGER: Final = "trigger" -CONF_TRIGGERS: Final = "triggers" CONF_TRIGGER_TIME: Final = "trigger_time" CONF_TTL: Final = "ttl" CONF_TYPE: Final = "type" @@ -479,93 +474,25 @@ STATE_PLAYING: Final = "playing" STATE_PAUSED: Final = "paused" STATE_IDLE: Final = "idle" STATE_STANDBY: Final = "standby" +STATE_ALARM_DISARMED: Final = "disarmed" +STATE_ALARM_ARMED_HOME: Final = "armed_home" +STATE_ALARM_ARMED_AWAY: Final = "armed_away" +STATE_ALARM_ARMED_NIGHT: Final = "armed_night" +STATE_ALARM_ARMED_VACATION: Final = "armed_vacation" +STATE_ALARM_ARMED_CUSTOM_BYPASS: Final = "armed_custom_bypass" +STATE_ALARM_PENDING: Final = "pending" +STATE_ALARM_ARMING: Final = "arming" +STATE_ALARM_DISARMING: Final = "disarming" +STATE_ALARM_TRIGGERED: Final = "triggered" +STATE_LOCKED: Final = "locked" +STATE_UNLOCKED: Final = "unlocked" +STATE_LOCKING: Final = "locking" +STATE_UNLOCKING: Final = "unlocking" +STATE_JAMMED: Final = "jammed" STATE_UNAVAILABLE: Final = "unavailable" STATE_OK: Final = "ok" STATE_PROBLEM: Final = "problem" -# #### LOCK STATES #### -# STATE_* below are deprecated as of 2024.10 -# use the LockState enum instead. -_DEPRECATED_STATE_LOCKED: Final = DeprecatedConstant( - "locked", - "LockState.LOCKED", - "2025.10", -) -_DEPRECATED_STATE_UNLOCKED: Final = DeprecatedConstant( - "unlocked", - "LockState.UNLOCKED", - "2025.10", -) -_DEPRECATED_STATE_LOCKING: Final = DeprecatedConstant( - "locking", - "LockState.LOCKING", - "2025.10", -) -_DEPRECATED_STATE_UNLOCKING: Final = DeprecatedConstant( - "unlocking", - "LockState.UNLOCKING", - "2025.10", -) -_DEPRECATED_STATE_JAMMED: Final = DeprecatedConstant( - "jammed", - "LockState.JAMMED", - "2025.10", -) - -# #### ALARM CONTROL PANEL STATES #### -# STATE_ALARM_* below are deprecated as of 2024.11 -# use the AlarmControlPanelState enum instead. -_DEPRECATED_STATE_ALARM_DISARMED: Final = DeprecatedConstant( - "disarmed", - "AlarmControlPanelState.DISARMED", - "2025.11", -) -_DEPRECATED_STATE_ALARM_ARMED_HOME: Final = DeprecatedConstant( - "armed_home", - "AlarmControlPanelState.ARMED_HOME", - "2025.11", -) -_DEPRECATED_STATE_ALARM_ARMED_AWAY: Final = DeprecatedConstant( - "armed_away", - "AlarmControlPanelState.ARMED_AWAY", - "2025.11", -) -_DEPRECATED_STATE_ALARM_ARMED_NIGHT: Final = DeprecatedConstant( - "armed_night", - "AlarmControlPanelState.ARMED_NIGHT", - "2025.11", -) -_DEPRECATED_STATE_ALARM_ARMED_VACATION: Final = DeprecatedConstant( - "armed_vacation", - "AlarmControlPanelState.ARMED_VACATION", - "2025.11", -) -_DEPRECATED_STATE_ALARM_ARMED_CUSTOM_BYPASS: Final = DeprecatedConstant( - "armed_custom_bypass", - "AlarmControlPanelState.ARMED_CUSTOM_BYPASS", - "2025.11", -) -_DEPRECATED_STATE_ALARM_PENDING: Final = DeprecatedConstant( - "pending", - "AlarmControlPanelState.PENDING", - "2025.11", -) -_DEPRECATED_STATE_ALARM_ARMING: Final = DeprecatedConstant( - "arming", - "AlarmControlPanelState.ARMING", - "2025.11", -) -_DEPRECATED_STATE_ALARM_DISARMING: Final = DeprecatedConstant( - "disarming", - "AlarmControlPanelState.DISARMING", - "2025.11", -) -_DEPRECATED_STATE_ALARM_TRIGGERED: Final = DeprecatedConstant( - "triggered", - "AlarmControlPanelState.TRIGGERED", - "2025.11", -) - # #### STATE AND EVENT ATTRIBUTES #### # Attribution ATTR_ATTRIBUTION: Final = "attribution" @@ -637,7 +564,6 @@ ATTR_CONNECTIONS: Final = "connections" ATTR_DEFAULT_NAME: Final = "default_name" ATTR_MANUFACTURER: Final = "manufacturer" ATTR_MODEL: Final = "model" -ATTR_MODEL_ID: Final = "model_id" ATTR_SERIAL_NUMBER: Final = "serial_number" ATTR_SUGGESTED_AREA: Final = "suggested_area" ATTR_SW_VERSION: Final = "sw_version" @@ -725,9 +651,6 @@ class UnitOfPower(StrEnum): WATT = "W" KILO_WATT = "kW" - MEGA_WATT = "MW" - GIGA_WATT = "GW" - TERA_WATT = "TW" BTU_PER_HOUR = "BTU/h" @@ -747,38 +670,19 @@ _DEPRECATED_POWER_BTU_PER_HOUR: Final = DeprecatedConstantEnum( ) """Deprecated: please use UnitOfPower.BTU_PER_HOUR.""" - # Reactive power units -class UnitOfReactivePower(StrEnum): - """Reactive power units.""" - - VOLT_AMPERE_REACTIVE = "var" - - -_DEPRECATED_POWER_VOLT_AMPERE_REACTIVE: Final = DeprecatedConstantEnum( - UnitOfReactivePower.VOLT_AMPERE_REACTIVE, - "2025.9", -) -"""Deprecated: please use UnitOfReactivePower.VOLT_AMPERE_REACTIVE.""" +POWER_VOLT_AMPERE_REACTIVE: Final = "var" # Energy units class UnitOfEnergy(StrEnum): """Energy units.""" - JOULE = "J" - KILO_JOULE = "kJ" - MEGA_JOULE = "MJ" GIGA_JOULE = "GJ" - WATT_HOUR = "Wh" KILO_WATT_HOUR = "kWh" + MEGA_JOULE = "MJ" MEGA_WATT_HOUR = "MWh" - GIGA_WATT_HOUR = "GWh" - TERA_WATT_HOUR = "TWh" - CALORIE = "cal" - KILO_CALORIE = "kcal" - MEGA_CALORIE = "Mcal" - GIGA_CALORIE = "Gcal" + WATT_HOUR = "Wh" _DEPRECATED_ENERGY_KILO_WATT_HOUR: Final = DeprecatedConstantEnum( @@ -946,7 +850,6 @@ class UnitOfLength(StrEnum): FEET = "ft" YARDS = "yd" MILES = "mi" - NAUTICAL_MILES = "nmi" _DEPRECATED_LENGTH_MILLIMETERS: Final = DeprecatedConstantEnum( @@ -1227,35 +1130,20 @@ _DEPRECATED_MASS_POUNDS: Final = DeprecatedConstantEnum( """Deprecated: please use UnitOfMass.POUNDS""" -class UnitOfConductivity( - StrEnum, - metaclass=EnumWithDeprecatedMembers, - deprecated={ - "SIEMENS": ("UnitOfConductivity.SIEMENS_PER_CM", "2025.11.0"), - "MICROSIEMENS": ("UnitOfConductivity.MICROSIEMENS_PER_CM", "2025.11.0"), - "MILLISIEMENS": ("UnitOfConductivity.MILLISIEMENS_PER_CM", "2025.11.0"), - }, -): +# Conductivity units +class UnitOfConductivity(StrEnum): """Conductivity units.""" - SIEMENS_PER_CM = "S/cm" - MICROSIEMENS_PER_CM = "µS/cm" - MILLISIEMENS_PER_CM = "mS/cm" - - # Deprecated aliases SIEMENS = "S/cm" - """Deprecated: Please use UnitOfConductivity.SIEMENS_PER_CM""" MICROSIEMENS = "µS/cm" - """Deprecated: Please use UnitOfConductivity.MICROSIEMENS_PER_CM""" MILLISIEMENS = "mS/cm" - """Deprecated: Please use UnitOfConductivity.MILLISIEMENS_PER_CM""" _DEPRECATED_CONDUCTIVITY: Final = DeprecatedConstantEnum( - UnitOfConductivity.MICROSIEMENS_PER_CM, - "2025.11", + UnitOfConductivity.MICROSIEMENS, + "2025.6", ) -"""Deprecated: please use UnitOfConductivity.MICROSIEMENS_PER_CM""" +"""Deprecated: please use UnitOfConductivity.MICROSIEMENS""" # Light units LIGHT_LUX: Final = "lx" @@ -1358,25 +1246,16 @@ CONCENTRATION_PARTS_PER_MILLION: Final = "ppm" CONCENTRATION_PARTS_PER_BILLION: Final = "ppb" -class UnitOfBloodGlucoseConcentration(StrEnum): - """Blood glucose concentration units.""" - - MILLIGRAMS_PER_DECILITER = "mg/dL" - MILLIMOLE_PER_LITER = "mmol/L" - - # Speed units class UnitOfSpeed(StrEnum): """Speed units.""" BEAUFORT = "Beaufort" FEET_PER_SECOND = "ft/s" - INCHES_PER_SECOND = "in/s" METERS_PER_SECOND = "m/s" KILOMETERS_PER_HOUR = "km/h" KNOTS = "kn" MILES_PER_HOUR = "mph" - MILLIMETERS_PER_SECOND = "mm/s" _DEPRECATED_SPEED_FEET_PER_SECOND: Final = DeprecatedConstantEnum( diff --git a/homeassistant/core.py b/homeassistant/core.py index cdfb5570b44..c4392f62c52 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -18,12 +18,16 @@ from collections.abc import ( ValuesView, ) import concurrent.futures +from contextlib import suppress from dataclasses import dataclass import datetime import enum import functools +from functools import cached_property import inspect import logging +import os +import pathlib import re import threading import time @@ -39,10 +43,11 @@ from typing import ( cast, overload, ) +from urllib.parse import urlparse -from propcache import cached_property, under_cached_property from typing_extensions import TypeVar import voluptuous as vol +import yarl from . import util from .const import ( @@ -50,6 +55,7 @@ from .const import ( ATTR_FRIENDLY_NAME, ATTR_SERVICE, ATTR_SERVICE_DATA, + BASE_PLATFORMS, COMPRESSED_STATE_ATTRIBUTES, COMPRESSED_STATE_CONTEXT, COMPRESSED_STATE_LAST_CHANGED, @@ -71,6 +77,7 @@ from .const import ( MAX_EXPECTED_ENTITY_IDS, MAX_LENGTH_EVENT_EVENT_TYPE, MAX_LENGTH_STATE_STATE, + UnitOfLength, __version__, ) from .exceptions import ( @@ -83,20 +90,17 @@ from .exceptions import ( Unauthorized, ) from .helpers.deprecation import ( - DeferredDeprecatedAlias, DeprecatedConstantEnum, - EnumWithDeprecatedMembers, all_with_deprecated_constants, check_if_deprecated_constant, dir_with_deprecated_constants, ) from .helpers.json import json_bytes, json_fragment -from .helpers.typing import VolSchemaType -from .util import dt as dt_util +from .helpers.typing import UNDEFINED, UndefinedType, VolSchemaType +from .util import dt as dt_util, location from .util.async_ import ( cancelling, create_eager_task, - get_scheduled_timer_handles, run_callback_threadsafe, shutdown_run_callback_threadsafe, ) @@ -107,11 +111,18 @@ from .util.json import JsonObjectType from .util.read_only_dict import ReadOnlyDict from .util.timeout import TimeoutManager from .util.ulid import ulid_at_time, ulid_now +from .util.unit_system import ( + _CONF_UNIT_SYSTEM_IMPERIAL, + _CONF_UNIT_SYSTEM_US_CUSTOMARY, + METRIC_SYSTEM, + UnitSystem, + get_unit_system, +) # Typing imports that create a circular dependency if TYPE_CHECKING: from .auth import AuthManager - from .components.http import HomeAssistantHTTP + from .components.http import ApiConfig, HomeAssistantHTTP from .config_entries import ConfigEntries from .helpers.entity import StateInfo @@ -125,6 +136,10 @@ _SENTINEL = object() _DataT = TypeVar("_DataT", bound=Mapping[str, Any], default=Mapping[str, Any]) type CALLBACK_TYPE = Callable[[], None] +CORE_STORAGE_KEY = "core.config" +CORE_STORAGE_VERSION = 1 +CORE_STORAGE_MINOR_VERSION = 4 + DOMAIN = "homeassistant" # How long to wait to log tasks that are blocking @@ -134,16 +149,7 @@ type ServiceResponse = JsonObjectType | None type EntityServiceResponse = dict[str, ServiceResponse] -class ConfigSource( - enum.StrEnum, - metaclass=EnumWithDeprecatedMembers, - deprecated={ - "DEFAULT": ("core_config.ConfigSource.DEFAULT", "2025.11.0"), - "DISCOVERED": ("core_config.ConfigSource.DISCOVERED", "2025.11.0"), - "STORAGE": ("core_config.ConfigSource.STORAGE", "2025.11.0"), - "YAML": ("core_config.ConfigSource.YAML", "2025.11.0"), - }, -): +class ConfigSource(enum.StrEnum): """Source of core configuration.""" DEFAULT = "default" @@ -162,7 +168,7 @@ class EventStateEventData(TypedDict): class EventStateChangedData(EventStateEventData): """EVENT_STATE_CHANGED data. - A state changed event is fired when on state write the state is changed. + A state changed event is fired when on state write when the state is changed. """ old_state: State | None @@ -171,7 +177,7 @@ class EventStateChangedData(EventStateEventData): class EventStateReportedData(EventStateEventData): """EVENT_STATE_REPORTED data. - A state reported event is fired when on state write the state is unchanged. + A state reported event is fired when on state write when the state is unchanged. """ old_last_reported: datetime.datetime @@ -185,19 +191,6 @@ _DEPRECATED_SOURCE_STORAGE = DeprecatedConstantEnum(ConfigSource.STORAGE, "2025. _DEPRECATED_SOURCE_YAML = DeprecatedConstantEnum(ConfigSource.YAML, "2025.1") -def _deprecated_core_config() -> Any: - # pylint: disable-next=import-outside-toplevel - from . import core_config - - return core_config.Config - - -# The Config class was moved to core_config in Home Assistant 2024.11 -_DEPRECATED_Config = DeferredDeprecatedAlias( - _deprecated_core_config, "homeassistant.core_config.Config", "2025.11" -) - - # How long to wait until things that run on startup have to finish. TIMEOUT_EVENT_START = 15 @@ -341,8 +334,6 @@ class HassJob[**_P, _R_co]: we run the job. """ - __slots__ = ("target", "name", "_cancel_on_shutdown", "_cache") - def __init__( self, target: Callable[_P, _R_co], @@ -355,13 +346,12 @@ class HassJob[**_P, _R_co]: self.target: Final = target self.name = name self._cancel_on_shutdown = cancel_on_shutdown - self._cache: dict[str, Any] = {} if job_type: # Pre-set the cached_property so we # avoid the function call - self._cache["job_type"] = job_type + self.__dict__["job_type"] = job_type - @under_cached_property + @cached_property def job_type(self) -> HassJobType: """Return the job type.""" return get_hassjob_callable_job_type(self.target) @@ -437,9 +427,6 @@ class HomeAssistant: # pylint: disable-next=import-outside-toplevel from . import loader - # pylint: disable-next=import-outside-toplevel - from .core_config import Config - # This is a dictionary that any component can store any data on. self.data = HassDict() self.loop = asyncio.get_running_loop() @@ -463,7 +450,9 @@ class HomeAssistant: self.import_executor = InterruptibleThreadPoolExecutor( max_workers=1, thread_name_prefix="ImportExecutor" ) - self.loop_thread_id = getattr(self.loop, "_thread_id") + self.loop_thread_id = getattr( + self.loop, "_thread_ident", getattr(self.loop, "_thread_id") + ) def verify_event_loop_thread(self, what: str) -> None: """Report and raise if we are not running in the event loop thread.""" @@ -656,12 +645,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report_usage( + frame.report( "calls `async_add_job`, which is deprecated and will be removed in Home " "Assistant 2025.4; Please review " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" " for replacement options", - core_behavior=frame.ReportBehavior.LOG, + error_if_core=False, ) if target is None: @@ -712,12 +701,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report_usage( + frame.report( "calls `async_add_hass_job`, which is deprecated and will be removed in Home " "Assistant 2025.5; Please review " "https://developers.home-assistant.io/blog/2024/04/07/deprecate_add_hass_job" " for replacement options", - core_behavior=frame.ReportBehavior.LOG, + error_if_core=False, ) return self._async_add_hass_job(hassjob, *args, background=background) @@ -986,12 +975,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report_usage( + frame.report( "calls `async_run_job`, which is deprecated and will be removed in Home " "Assistant 2025.4; Please review " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" " for replacement options", - core_behavior=frame.ReportBehavior.LOG, + error_if_core=False, ) if asyncio.iscoroutine(target): @@ -1238,7 +1227,8 @@ class HomeAssistant: def _cancel_cancellable_timers(self) -> None: """Cancel timer handles marked as cancellable.""" - for handle in get_scheduled_timer_handles(self.loop): + handles: Iterable[asyncio.TimerHandle] = self.loop._scheduled # type: ignore[attr-defined] # noqa: SLF001 + for handle in handles: if ( not handle.cancelled() and (args := handle._args) # noqa: SLF001 @@ -1256,8 +1246,6 @@ class HomeAssistant: class Context: """The context that triggered something.""" - __slots__ = ("id", "user_id", "parent_id", "origin_event", "_cache") - def __init__( self, user_id: str | None = None, @@ -1269,7 +1257,6 @@ class Context: self.user_id = user_id self.parent_id = parent_id self.origin_event: Event[Any] | None = None - self._cache: dict[str, Any] = {} def __eq__(self, other: object) -> bool: """Compare contexts.""" @@ -1283,7 +1270,7 @@ class Context: """Create a deep copy of this context.""" return Context(user_id=self.user_id, parent_id=self.parent_id, id=self.id) - @under_cached_property + @cached_property def _as_dict(self) -> dict[str, str | None]: """Return a dictionary representation of the context. @@ -1300,12 +1287,12 @@ class Context: """Return a ReadOnlyDict representation of the context.""" return self._as_read_only_dict - @under_cached_property + @cached_property def _as_read_only_dict(self) -> ReadOnlyDict[str, str | None]: """Return a ReadOnlyDict representation of the context.""" return ReadOnlyDict(self._as_dict) - @under_cached_property + @cached_property def json_fragment(self) -> json_fragment: """Return a JSON fragment of the context.""" return json_fragment(json_bytes(self._as_dict)) @@ -1321,24 +1308,10 @@ class EventOrigin(enum.Enum): """Return the event.""" return self.value - @cached_property - def idx(self) -> int: - """Return the index of the origin.""" - return next((idx for idx, origin in enumerate(EventOrigin) if origin is self)) - class Event(Generic[_DataT]): """Representation of an event within the bus.""" - __slots__ = ( - "event_type", - "data", - "origin", - "time_fired_timestamp", - "context", - "_cache", - ) - def __init__( self, event_type: EventType[_DataT] | str, @@ -1357,14 +1330,13 @@ class Event(Generic[_DataT]): self.context = context if not context.origin_event: context.origin_event = self - self._cache: dict[str, Any] = {} - @under_cached_property + @cached_property def time_fired(self) -> datetime.datetime: """Return time fired as a timestamp.""" return dt_util.utc_from_timestamp(self.time_fired_timestamp) - @under_cached_property + @cached_property def _as_dict(self) -> dict[str, Any]: """Create a dict representation of this Event. @@ -1389,7 +1361,7 @@ class Event(Generic[_DataT]): """ return self._as_read_only_dict - @under_cached_property + @cached_property def _as_read_only_dict(self) -> ReadOnlyDict[str, Any]: """Create a ReadOnlyDict representation of this Event.""" as_dict = self._as_dict @@ -1405,7 +1377,7 @@ class Event(Generic[_DataT]): as_dict["context"] = ReadOnlyDict(context) return ReadOnlyDict(as_dict) - @under_cached_property + @cached_property def json_fragment(self) -> json_fragment: """Return an event as a JSON fragment.""" return json_fragment(json_bytes(self._as_dict)) @@ -1635,10 +1607,10 @@ class EventBus: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report_usage( + frame.report( "calls `async_listen` with run_immediately, which is" " deprecated and will be removed in Home Assistant 2025.5", - core_behavior=frame.ReportBehavior.LOG, + error_if_core=False, ) if event_filter is not None and not is_callback_check_partial(event_filter): @@ -1705,10 +1677,10 @@ class EventBus: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report_usage( + frame.report( "calls `async_listen_once` with run_immediately, which is " "deprecated and will be removed in Home Assistant 2025.5", - core_behavior=frame.ReportBehavior.LOG, + error_if_core=False, ) one_time_listener: _OneTimeListener[_DataT] = _OneTimeListener( @@ -1776,21 +1748,6 @@ class State: object_id: Object id of this state. """ - __slots__ = ( - "entity_id", - "state", - "attributes", - "last_changed", - "last_reported", - "last_updated", - "context", - "state_info", - "domain", - "object_id", - "last_updated_timestamp", - "_cache", - ) - def __init__( self, entity_id: str, @@ -1805,7 +1762,6 @@ class State: last_updated_timestamp: float | None = None, ) -> None: """Initialize a new state.""" - self._cache: dict[str, Any] = {} state = str(state) if validate_entity_id and not valid_entity_id(entity_id): @@ -1839,31 +1795,31 @@ class State: last_updated_timestamp = last_updated.timestamp() self.last_updated_timestamp = last_updated_timestamp if self.last_changed == last_updated: - self._cache["last_changed_timestamp"] = last_updated_timestamp + self.__dict__["last_changed_timestamp"] = last_updated_timestamp # If last_reported is the same as last_updated async_set will pass # the same datetime object for both values so we can use an identity # check here. if self.last_reported is last_updated: - self._cache["last_reported_timestamp"] = last_updated_timestamp + self.__dict__["last_reported_timestamp"] = last_updated_timestamp - @under_cached_property + @cached_property def name(self) -> str: """Name of this state.""" return self.attributes.get(ATTR_FRIENDLY_NAME) or self.object_id.replace( "_", " " ) - @under_cached_property + @cached_property def last_changed_timestamp(self) -> float: """Timestamp of last change.""" return self.last_changed.timestamp() - @under_cached_property + @cached_property def last_reported_timestamp(self) -> float: """Timestamp of last report.""" return self.last_reported.timestamp() - @under_cached_property + @cached_property def _as_dict(self) -> dict[str, Any]: """Return a dict representation of the State. @@ -1904,7 +1860,7 @@ class State: """ return self._as_read_only_dict - @under_cached_property + @cached_property def _as_read_only_dict( self, ) -> ReadOnlyDict[str, datetime.datetime | Collection[Any]]: @@ -1919,17 +1875,17 @@ class State: as_dict["context"] = ReadOnlyDict(context) return ReadOnlyDict(as_dict) - @under_cached_property + @cached_property def as_dict_json(self) -> bytes: """Return a JSON string of the State.""" return json_bytes(self._as_dict) - @under_cached_property + @cached_property def json_fragment(self) -> json_fragment: """Return a JSON fragment of the State.""" return json_fragment(self.as_dict_json) - @under_cached_property + @cached_property def as_compressed_state(self) -> CompressedState: """Build a compressed dict of a state for adds. @@ -1957,7 +1913,7 @@ class State: ) return compressed_state - @under_cached_property + @cached_property def as_compressed_state_json(self) -> bytes: """Build a compressed JSON key value pair of a state for adds. @@ -2282,45 +2238,16 @@ class StateMachine: This method must be run in the event loop. """ - self.async_set_internal( - entity_id.lower(), - str(new_state), - attributes or {}, - force_update, - context, - state_info, - timestamp or time.time(), - ) + new_state = str(new_state) + attributes = attributes or {} + old_state = self._states_data.get(entity_id) + if old_state is None: + # If the state is missing, try to convert the entity_id to lowercase + # and try again. + entity_id = entity_id.lower() + old_state = self._states_data.get(entity_id) - @callback - def async_set_internal( - self, - entity_id: str, - new_state: str, - attributes: Mapping[str, Any] | None, - force_update: bool, - context: Context | None, - state_info: StateInfo | None, - timestamp: float, - ) -> None: - """Set the state of an entity, add entity if it does not exist. - - This method is intended to only be used by core internally - and should not be considered a stable API. We will make - breaking changes to this function in the future and it - should not be used in integrations. - - This method must be run in the event loop. - """ - # Most cases the key will be in the dict - # so we optimize for the happy path as - # python 3.11+ has near zero overhead for - # try when it does not raise an exception. - old_state: State | None - try: - old_state = self._states_data[entity_id] - except KeyError: - old_state = None + if old_state is None: same_state = False same_attr = False last_changed = None @@ -2340,16 +2267,15 @@ class StateMachine: # timestamp implementation: # https://github.com/python/cpython/blob/c90a862cdcf55dc1753c6466e5fa4a467a13ae24/Modules/_datetimemodule.c#L6387 # https://github.com/python/cpython/blob/c90a862cdcf55dc1753c6466e5fa4a467a13ae24/Modules/_datetimemodule.c#L6323 + if timestamp is None: + timestamp = time.time() now = dt_util.utc_from_timestamp(timestamp) - if context is None: - context = Context(id=ulid_at_time(timestamp)) - if same_state and same_attr: # mypy does not understand this is only possible if old_state is not None old_last_reported = old_state.last_reported # type: ignore[union-attr] old_state.last_reported = now # type: ignore[union-attr] - old_state._cache["last_reported_timestamp"] = timestamp # type: ignore[union-attr] # noqa: SLF001 + old_state.last_reported_timestamp = timestamp # type: ignore[union-attr] # Avoid creating an EventStateReportedData self._bus.async_fire_internal( # type: ignore[misc] EVENT_STATE_REPORTED, @@ -2363,6 +2289,9 @@ class StateMachine: ) return + if context is None: + context = Context(id=ulid_at_time(timestamp)) + if same_attr: if TYPE_CHECKING: assert old_state is not None @@ -2852,6 +2781,452 @@ class ServiceRegistry: return await self._hass.async_add_executor_job(target, service_call) +class _ComponentSet(set[str]): + """Set of loaded components. + + This set contains both top level components and platforms. + + Examples: + `light`, `switch`, `hue`, `mjpeg.camera`, `universal.media_player`, + `homeassistant.scene` + + The top level components set only contains the top level components. + + The all components set contains all components, including platform + based components. + + """ + + def __init__( + self, top_level_components: set[str], all_components: set[str] + ) -> None: + """Initialize the component set.""" + self._top_level_components = top_level_components + self._all_components = all_components + + def add(self, component: str) -> None: + """Add a component to the store.""" + if "." not in component: + self._top_level_components.add(component) + self._all_components.add(component) + else: + platform, _, domain = component.partition(".") + if domain in BASE_PLATFORMS: + self._all_components.add(platform) + return super().add(component) + + def remove(self, component: str) -> None: + """Remove a component from the store.""" + if "." in component: + raise ValueError("_ComponentSet does not support removing sub-components") + self._top_level_components.remove(component) + return super().remove(component) + + def discard(self, component: str) -> None: + """Remove a component from the store.""" + raise NotImplementedError("_ComponentSet does not support discard, use remove") + + +class Config: + """Configuration settings for Home Assistant.""" + + _store: Config._ConfigStore + + def __init__(self, hass: HomeAssistant, config_dir: str) -> None: + """Initialize a new config object.""" + # pylint: disable-next=import-outside-toplevel + from .components.zone import DEFAULT_RADIUS + + self.hass = hass + + self.latitude: float = 0 + self.longitude: float = 0 + + self.elevation: int = 0 + """Elevation (always in meters regardless of the unit system).""" + + self.radius: int = DEFAULT_RADIUS + """Radius of the Home Zone (always in meters regardless of the unit system).""" + + self.debug: bool = False + self.location_name: str = "Home" + self.time_zone: str = "UTC" + self.units: UnitSystem = METRIC_SYSTEM + self.internal_url: str | None = None + self.external_url: str | None = None + self.currency: str = "EUR" + self.country: str | None = None + self.language: str = "en" + + self.config_source: ConfigSource = ConfigSource.DEFAULT + + # If True, pip install is skipped for requirements on startup + self.skip_pip: bool = False + + # List of packages to skip when installing requirements on startup + self.skip_pip_packages: list[str] = [] + + # Set of loaded top level components + # This set is updated by _ComponentSet + # and should not be modified directly + self.top_level_components: set[str] = set() + + # Set of all loaded components including platform + # based components + self.all_components: set[str] = set() + + # Set of loaded components + self.components: _ComponentSet = _ComponentSet( + self.top_level_components, self.all_components + ) + + # API (HTTP) server configuration + self.api: ApiConfig | None = None + + # Directory that holds the configuration + self.config_dir: str = config_dir + + # List of allowed external dirs to access + self.allowlist_external_dirs: set[str] = set() + + # List of allowed external URLs that integrations may use + self.allowlist_external_urls: set[str] = set() + + # Dictionary of Media folders that integrations may use + self.media_dirs: dict[str, str] = {} + + # If Home Assistant is running in recovery mode + self.recovery_mode: bool = False + + # Use legacy template behavior + self.legacy_templates: bool = False + + # If Home Assistant is running in safe mode + self.safe_mode: bool = False + + def async_initialize(self) -> None: + """Finish initializing a config object. + + This must be called before the config object is used. + """ + self._store = self._ConfigStore(self.hass) + + def distance(self, lat: float, lon: float) -> float | None: + """Calculate distance from Home Assistant. + + Async friendly. + """ + return self.units.length( + location.distance(self.latitude, self.longitude, lat, lon), + UnitOfLength.METERS, + ) + + def path(self, *path: str) -> str: + """Generate path to the file within the configuration directory. + + Async friendly. + """ + return os.path.join(self.config_dir, *path) + + def is_allowed_external_url(self, url: str) -> bool: + """Check if an external URL is allowed.""" + parsed_url = f"{yarl.URL(url)!s}/" + + return any( + allowed + for allowed in self.allowlist_external_urls + if parsed_url.startswith(allowed) + ) + + def is_allowed_path(self, path: str) -> bool: + """Check if the path is valid for access from outside. + + This function does blocking I/O and should not be called from the event loop. + Use hass.async_add_executor_job to schedule it on the executor. + """ + assert path is not None + + thepath = pathlib.Path(path) + try: + # The file path does not have to exist (it's parent should) + if thepath.exists(): + thepath = thepath.resolve() + else: + thepath = thepath.parent.resolve() + except (FileNotFoundError, RuntimeError, PermissionError): + return False + + for allowed_path in self.allowlist_external_dirs: + try: + thepath.relative_to(allowed_path) + except ValueError: + pass + else: + return True + + return False + + def as_dict(self) -> dict[str, Any]: + """Create a dictionary representation of the configuration. + + Async friendly. + """ + allowlist_external_dirs = list(self.allowlist_external_dirs) + return { + "latitude": self.latitude, + "longitude": self.longitude, + "elevation": self.elevation, + "unit_system": self.units.as_dict(), + "location_name": self.location_name, + "time_zone": self.time_zone, + "components": list(self.components), + "config_dir": self.config_dir, + # legacy, backwards compat + "whitelist_external_dirs": allowlist_external_dirs, + "allowlist_external_dirs": allowlist_external_dirs, + "allowlist_external_urls": list(self.allowlist_external_urls), + "version": __version__, + "config_source": self.config_source, + "recovery_mode": self.recovery_mode, + "state": self.hass.state.value, + "external_url": self.external_url, + "internal_url": self.internal_url, + "currency": self.currency, + "country": self.country, + "language": self.language, + "safe_mode": self.safe_mode, + "debug": self.debug, + "radius": self.radius, + } + + async def async_set_time_zone(self, time_zone_str: str) -> None: + """Help to set the time zone.""" + if time_zone := await dt_util.async_get_time_zone(time_zone_str): + self.time_zone = time_zone_str + dt_util.set_default_time_zone(time_zone) + else: + raise ValueError(f"Received invalid time zone {time_zone_str}") + + def set_time_zone(self, time_zone_str: str) -> None: + """Set the time zone. + + This is a legacy method that should not be used in new code. + Use async_set_time_zone instead. + + It will be removed in Home Assistant 2025.6. + """ + # report is imported here to avoid a circular import + from .helpers.frame import report # pylint: disable=import-outside-toplevel + + report( + "set the time zone using set_time_zone instead of async_set_time_zone" + " which will stop working in Home Assistant 2025.6", + error_if_core=True, + error_if_integration=True, + ) + if time_zone := dt_util.get_time_zone(time_zone_str): + self.time_zone = time_zone_str + dt_util.set_default_time_zone(time_zone) + else: + raise ValueError(f"Received invalid time zone {time_zone_str}") + + async def _async_update( + self, + *, + source: ConfigSource, + latitude: float | None = None, + longitude: float | None = None, + elevation: int | None = None, + unit_system: str | None = None, + location_name: str | None = None, + time_zone: str | None = None, + external_url: str | UndefinedType | None = UNDEFINED, + internal_url: str | UndefinedType | None = UNDEFINED, + currency: str | None = None, + country: str | UndefinedType | None = UNDEFINED, + language: str | None = None, + radius: int | None = None, + ) -> None: + """Update the configuration from a dictionary.""" + self.config_source = source + if latitude is not None: + self.latitude = latitude + if longitude is not None: + self.longitude = longitude + if elevation is not None: + self.elevation = elevation + if unit_system is not None: + try: + self.units = get_unit_system(unit_system) + except ValueError: + self.units = METRIC_SYSTEM + if location_name is not None: + self.location_name = location_name + if time_zone is not None: + await self.async_set_time_zone(time_zone) + if external_url is not UNDEFINED: + self.external_url = external_url + if internal_url is not UNDEFINED: + self.internal_url = internal_url + if currency is not None: + self.currency = currency + if country is not UNDEFINED: + self.country = country + if language is not None: + self.language = language + if radius is not None: + self.radius = radius + + async def async_update(self, **kwargs: Any) -> None: + """Update the configuration from a dictionary.""" + # pylint: disable-next=import-outside-toplevel + from .config import ( + _raise_issue_if_historic_currency, + _raise_issue_if_no_country, + ) + + await self._async_update(source=ConfigSource.STORAGE, **kwargs) + await self._async_store() + self.hass.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE, kwargs) + + _raise_issue_if_historic_currency(self.hass, self.currency) + _raise_issue_if_no_country(self.hass, self.country) + + async def async_load(self) -> None: + """Load [homeassistant] core config.""" + if not (data := await self._store.async_load()): + return + + # In 2021.9 we fixed validation to disallow a path (because that's never + # correct) but this data still lives in storage, so we print a warning. + if data.get("external_url") and urlparse(data["external_url"]).path not in ( + "", + "/", + ): + _LOGGER.warning("Invalid external_url set. It's not allowed to have a path") + + if data.get("internal_url") and urlparse(data["internal_url"]).path not in ( + "", + "/", + ): + _LOGGER.warning("Invalid internal_url set. It's not allowed to have a path") + + await self._async_update( + source=ConfigSource.STORAGE, + latitude=data.get("latitude"), + longitude=data.get("longitude"), + elevation=data.get("elevation"), + unit_system=data.get("unit_system_v2"), + location_name=data.get("location_name"), + time_zone=data.get("time_zone"), + external_url=data.get("external_url", UNDEFINED), + internal_url=data.get("internal_url", UNDEFINED), + currency=data.get("currency"), + country=data.get("country"), + language=data.get("language"), + radius=data["radius"], + ) + + async def _async_store(self) -> None: + """Store [homeassistant] core config.""" + data = { + "latitude": self.latitude, + "longitude": self.longitude, + "elevation": self.elevation, + # We don't want any integrations to use the name of the unit system + # so we are using the private attribute here + "unit_system_v2": self.units._name, # noqa: SLF001 + "location_name": self.location_name, + "time_zone": self.time_zone, + "external_url": self.external_url, + "internal_url": self.internal_url, + "currency": self.currency, + "country": self.country, + "language": self.language, + "radius": self.radius, + } + await self._store.async_save(data) + + # Circular dependency prevents us from generating the class at top level + # pylint: disable-next=import-outside-toplevel + from .helpers.storage import Store + + class _ConfigStore(Store[dict[str, Any]]): + """Class to help storing Config data.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize storage class.""" + super().__init__( + hass, + CORE_STORAGE_VERSION, + CORE_STORAGE_KEY, + private=True, + atomic_writes=True, + minor_version=CORE_STORAGE_MINOR_VERSION, + ) + self._original_unit_system: str | None = None # from old store 1.1 + + async def _async_migrate_func( + self, + old_major_version: int, + old_minor_version: int, + old_data: dict[str, Any], + ) -> dict[str, Any]: + """Migrate to the new version.""" + + # pylint: disable-next=import-outside-toplevel + from .components.zone import DEFAULT_RADIUS + + data = old_data + if old_major_version == 1 and old_minor_version < 2: + # In 1.2, we remove support for "imperial", replaced by "us_customary" + # Using a new key to allow rollback + self._original_unit_system = data.get("unit_system") + data["unit_system_v2"] = self._original_unit_system + if data["unit_system_v2"] == _CONF_UNIT_SYSTEM_IMPERIAL: + data["unit_system_v2"] = _CONF_UNIT_SYSTEM_US_CUSTOMARY + if old_major_version == 1 and old_minor_version < 3: + # In 1.3, we add the key "language", initialize it from the + # owner account. + data["language"] = "en" + try: + owner = await self.hass.auth.async_get_owner() + if owner is not None: + # pylint: disable-next=import-outside-toplevel + from .components.frontend import storage as frontend_store + + # pylint: disable-next=import-outside-toplevel + from .helpers import config_validation as cv + + _, owner_data = await frontend_store.async_user_store( + self.hass, owner.id + ) + + if ( + "language" in owner_data + and "language" in owner_data["language"] + ): + with suppress(vol.InInvalid): + data["language"] = cv.language( + owner_data["language"]["language"] + ) + # pylint: disable-next=broad-except + except Exception: + _LOGGER.exception("Unexpected error during core config migration") + if old_major_version == 1 and old_minor_version < 4: + # In 1.4, we add the key "radius", initialize it with the default. + data.setdefault("radius", DEFAULT_RADIUS) + + if old_major_version > 1: + raise NotImplementedError + return data + + async def async_save(self, data: dict[str, Any]) -> None: + if self._original_unit_system: + data["unit_system"] = self._original_unit_system + return await super().async_save(data) + + # These can be removed if no deprecated constant are in this module anymore __getattr__ = functools.partial(check_if_deprecated_constant, module_globals=globals()) __dir__ = functools.partial( diff --git a/homeassistant/core_config.py b/homeassistant/core_config.py deleted file mode 100644 index 5c773c57bc4..00000000000 --- a/homeassistant/core_config.py +++ /dev/null @@ -1,891 +0,0 @@ -"""Module to help with parsing and generating configuration files.""" - -from __future__ import annotations - -from collections import OrderedDict -from collections.abc import Sequence -from contextlib import suppress -import enum -import logging -import os -import pathlib -from typing import TYPE_CHECKING, Any, Final -from urllib.parse import urlparse - -import voluptuous as vol -from webrtc_models import RTCConfiguration, RTCIceServer -import yarl - -from . import auth -from .auth import mfa_modules as auth_mfa_modules, providers as auth_providers -from .const import ( - ATTR_ASSUMED_STATE, - ATTR_FRIENDLY_NAME, - ATTR_HIDDEN, - BASE_PLATFORMS, - CONF_ALLOWLIST_EXTERNAL_DIRS, - CONF_ALLOWLIST_EXTERNAL_URLS, - CONF_AUTH_MFA_MODULES, - CONF_AUTH_PROVIDERS, - CONF_COUNTRY, - CONF_CURRENCY, - CONF_CUSTOMIZE, - CONF_CUSTOMIZE_DOMAIN, - CONF_CUSTOMIZE_GLOB, - CONF_DEBUG, - CONF_ELEVATION, - CONF_EXTERNAL_URL, - CONF_ID, - CONF_INTERNAL_URL, - CONF_LANGUAGE, - CONF_LATITUDE, - CONF_LEGACY_TEMPLATES, - CONF_LONGITUDE, - CONF_MEDIA_DIRS, - CONF_NAME, - CONF_PACKAGES, - CONF_RADIUS, - CONF_TEMPERATURE_UNIT, - CONF_TIME_ZONE, - CONF_TYPE, - CONF_UNIT_SYSTEM, - CONF_URL, - CONF_USERNAME, - EVENT_CORE_CONFIG_UPDATE, - LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, - UnitOfLength, - __version__, -) -from .core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from .generated.currencies import HISTORIC_CURRENCIES -from .helpers import config_validation as cv, issue_registry as ir -from .helpers.entity_values import EntityValues -from .helpers.frame import ReportBehavior, report_usage -from .helpers.storage import Store -from .helpers.typing import UNDEFINED, UndefinedType -from .util import dt as dt_util, location -from .util.hass_dict import HassKey -from .util.package import is_docker_env -from .util.unit_system import ( - _CONF_UNIT_SYSTEM_IMPERIAL, - _CONF_UNIT_SYSTEM_US_CUSTOMARY, - METRIC_SYSTEM, - UnitSystem, - get_unit_system, - validate_unit_system, -) - -# Typing imports that create a circular dependency -if TYPE_CHECKING: - from .components.http import ApiConfig - -_LOGGER = logging.getLogger(__name__) - -DATA_CUSTOMIZE: HassKey[EntityValues] = HassKey("hass_customize") - -CONF_CREDENTIAL: Final = "credential" -CONF_ICE_SERVERS: Final = "ice_servers" -CONF_WEBRTC: Final = "webrtc" - -CORE_STORAGE_KEY = "core.config" -CORE_STORAGE_VERSION = 1 -CORE_STORAGE_MINOR_VERSION = 4 - - -class ConfigSource(enum.StrEnum): - """Source of core configuration.""" - - DEFAULT = "default" - DISCOVERED = "discovered" - STORAGE = "storage" - YAML = "yaml" - - -def _no_duplicate_auth_provider( - configs: Sequence[dict[str, Any]], -) -> Sequence[dict[str, Any]]: - """No duplicate auth provider config allowed in a list. - - Each type of auth provider can only have one config without optional id. - Unique id is required if same type of auth provider used multiple times. - """ - config_keys: set[tuple[str, str | None]] = set() - for config in configs: - key = (config[CONF_TYPE], config.get(CONF_ID)) - if key in config_keys: - raise vol.Invalid( - f"Duplicate auth provider {config[CONF_TYPE]} found. " - "Please add unique IDs " - "if you want to have the same auth provider twice" - ) - config_keys.add(key) - return configs - - -def _no_duplicate_auth_mfa_module( - configs: Sequence[dict[str, Any]], -) -> Sequence[dict[str, Any]]: - """No duplicate auth mfa module item allowed in a list. - - Each type of mfa module can only have one config without optional id. - A global unique id is required if same type of mfa module used multiple - times. - Note: this is different than auth provider - """ - config_keys: set[str] = set() - for config in configs: - key = config.get(CONF_ID, config[CONF_TYPE]) - if key in config_keys: - raise vol.Invalid( - f"Duplicate mfa module {config[CONF_TYPE]} found. " - "Please add unique IDs " - "if you want to have the same mfa module twice" - ) - config_keys.add(key) - return configs - - -def _filter_bad_internal_external_urls(conf: dict) -> dict: - """Filter internal/external URL with a path.""" - for key in CONF_INTERNAL_URL, CONF_EXTERNAL_URL: - if key in conf and urlparse(conf[key]).path not in ("", "/"): - # We warn but do not fix, because if this was incorrectly configured, - # adjusting this value might impact security. - _LOGGER.warning( - "Invalid %s set. It's not allowed to have a path (/bla)", key - ) - - return conf - - -# Schema for all packages element -_PACKAGES_CONFIG_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list)}) - -# Schema for individual package definition -_PACKAGE_DEFINITION_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list, None)}) - -_CUSTOMIZE_DICT_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_FRIENDLY_NAME): cv.string, - vol.Optional(ATTR_HIDDEN): cv.boolean, - vol.Optional(ATTR_ASSUMED_STATE): cv.boolean, - }, - extra=vol.ALLOW_EXTRA, -) - -_CUSTOMIZE_CONFIG_SCHEMA = vol.Schema( - { - vol.Optional(CONF_CUSTOMIZE, default={}): vol.Schema( - {cv.entity_id: _CUSTOMIZE_DICT_SCHEMA} - ), - vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}): vol.Schema( - {cv.string: _CUSTOMIZE_DICT_SCHEMA} - ), - vol.Optional(CONF_CUSTOMIZE_GLOB, default={}): vol.Schema( - {cv.string: _CUSTOMIZE_DICT_SCHEMA} - ), - } -) - - -def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None: - if currency not in HISTORIC_CURRENCIES: - ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "historic_currency") - return - - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - "historic_currency", - is_fixable=False, - learn_more_url="homeassistant://config/general", - severity=ir.IssueSeverity.WARNING, - translation_key="historic_currency", - translation_placeholders={"currency": currency}, - ) - - -def _raise_issue_if_no_country(hass: HomeAssistant, country: str | None) -> None: - if country is not None: - ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "country_not_configured") - return - - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - "country_not_configured", - is_fixable=False, - learn_more_url="homeassistant://config/general", - severity=ir.IssueSeverity.WARNING, - translation_key="country_not_configured", - ) - - -def _validate_currency(data: Any) -> Any: - try: - return cv.currency(data) - except vol.InInvalid: - with suppress(vol.InInvalid): - return cv.historic_currency(data) - raise - - -def _validate_stun_or_turn_url(value: Any) -> str: - """Validate an URL.""" - url_in = str(value) - url = urlparse(url_in) - - if url.scheme not in ("stun", "stuns", "turn", "turns"): - raise vol.Invalid("invalid url") - return url_in - - -CORE_CONFIG_SCHEMA = vol.All( - _CUSTOMIZE_CONFIG_SCHEMA.extend( - { - CONF_NAME: vol.Coerce(str), - CONF_LATITUDE: cv.latitude, - CONF_LONGITUDE: cv.longitude, - CONF_ELEVATION: vol.Coerce(int), - CONF_RADIUS: cv.positive_int, - vol.Remove(CONF_TEMPERATURE_UNIT): cv.temperature_unit, - CONF_UNIT_SYSTEM: validate_unit_system, - CONF_TIME_ZONE: cv.time_zone, - vol.Optional(CONF_INTERNAL_URL): cv.url, - vol.Optional(CONF_EXTERNAL_URL): cv.url, - vol.Optional(CONF_ALLOWLIST_EXTERNAL_DIRS): vol.All( - cv.ensure_list, [vol.IsDir()] - ), - vol.Optional(LEGACY_CONF_WHITELIST_EXTERNAL_DIRS): vol.All( - cv.ensure_list, [vol.IsDir()] - ), - vol.Optional(CONF_ALLOWLIST_EXTERNAL_URLS): vol.All( - cv.ensure_list, [cv.url] - ), - vol.Optional(CONF_PACKAGES, default={}): _PACKAGES_CONFIG_SCHEMA, - vol.Optional(CONF_AUTH_PROVIDERS): vol.All( - cv.ensure_list, - [ - auth_providers.AUTH_PROVIDER_SCHEMA.extend( - { - CONF_TYPE: vol.NotIn( - ["insecure_example"], - ( - "The insecure_example auth provider" - " is for testing only." - ), - ) - } - ) - ], - _no_duplicate_auth_provider, - ), - vol.Optional(CONF_AUTH_MFA_MODULES): vol.All( - cv.ensure_list, - [ - auth_mfa_modules.MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend( - { - CONF_TYPE: vol.NotIn( - ["insecure_example"], - "The insecure_example mfa module is for testing only.", - ) - } - ) - ], - _no_duplicate_auth_mfa_module, - ), - vol.Optional(CONF_MEDIA_DIRS): cv.schema_with_slug_keys(vol.IsDir()), - vol.Remove(CONF_LEGACY_TEMPLATES): cv.boolean, - vol.Optional(CONF_CURRENCY): _validate_currency, - vol.Optional(CONF_COUNTRY): cv.country, - vol.Optional(CONF_LANGUAGE): cv.language, - vol.Optional(CONF_DEBUG): cv.boolean, - vol.Optional(CONF_WEBRTC): vol.Schema( - { - vol.Required(CONF_ICE_SERVERS): vol.All( - cv.ensure_list, - [ - vol.Schema( - { - vol.Required(CONF_URL): vol.All( - cv.ensure_list, [_validate_stun_or_turn_url] - ), - vol.Optional(CONF_USERNAME): cv.string, - vol.Optional(CONF_CREDENTIAL): cv.string, - } - ) - ], - ) - } - ), - } - ), - _filter_bad_internal_external_urls, -) - - -async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> None: - """Process the [homeassistant] section from the configuration. - - This method is a coroutine. - """ - # CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir - # so we need to run it in an executor job. - config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) - - # Only load auth during startup. - if not hasattr(hass, "auth"): - if (auth_conf := config.get(CONF_AUTH_PROVIDERS)) is None: - auth_conf = [{"type": "homeassistant"}] - - mfa_conf = config.get( - CONF_AUTH_MFA_MODULES, - [{"type": "totp", "id": "totp", "name": "Authenticator app"}], - ) - - setattr( - hass, "auth", await auth.auth_manager_from_config(hass, auth_conf, mfa_conf) - ) - - await hass.config.async_load() - - hac = hass.config - - if any( - k in config - for k in ( - CONF_COUNTRY, - CONF_CURRENCY, - CONF_ELEVATION, - CONF_EXTERNAL_URL, - CONF_INTERNAL_URL, - CONF_LANGUAGE, - CONF_LATITUDE, - CONF_LONGITUDE, - CONF_NAME, - CONF_RADIUS, - CONF_TIME_ZONE, - CONF_UNIT_SYSTEM, - ) - ): - hac.config_source = ConfigSource.YAML - - for key, attr in ( - (CONF_COUNTRY, "country"), - (CONF_CURRENCY, "currency"), - (CONF_ELEVATION, "elevation"), - (CONF_EXTERNAL_URL, "external_url"), - (CONF_INTERNAL_URL, "internal_url"), - (CONF_LANGUAGE, "language"), - (CONF_LATITUDE, "latitude"), - (CONF_LONGITUDE, "longitude"), - (CONF_MEDIA_DIRS, "media_dirs"), - (CONF_NAME, "location_name"), - (CONF_RADIUS, "radius"), - ): - if key in config: - setattr(hac, attr, config[key]) - - if config.get(CONF_DEBUG): - hac.debug = True - - if CONF_WEBRTC in config: - hac.webrtc.ice_servers = [ - RTCIceServer( - server[CONF_URL], - server.get(CONF_USERNAME), - server.get(CONF_CREDENTIAL), - ) - for server in config[CONF_WEBRTC][CONF_ICE_SERVERS] - ] - - _raise_issue_if_historic_currency(hass, hass.config.currency) - _raise_issue_if_no_country(hass, hass.config.country) - - if CONF_TIME_ZONE in config: - await hac.async_set_time_zone(config[CONF_TIME_ZONE]) - - if CONF_MEDIA_DIRS not in config: - if is_docker_env(): - hac.media_dirs = {"local": "/media"} - else: - hac.media_dirs = {"local": hass.config.path("media")} - - # Init whitelist external dir - hac.allowlist_external_dirs = {hass.config.path("www"), *hac.media_dirs.values()} - if CONF_ALLOWLIST_EXTERNAL_DIRS in config: - hac.allowlist_external_dirs.update(set(config[CONF_ALLOWLIST_EXTERNAL_DIRS])) - - elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config: - _LOGGER.warning( - "Key %s has been replaced with %s. Please update your config", - LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, - CONF_ALLOWLIST_EXTERNAL_DIRS, - ) - hac.allowlist_external_dirs.update( - set(config[LEGACY_CONF_WHITELIST_EXTERNAL_DIRS]) - ) - - # Init whitelist external URL list – make sure to add / to every URL that doesn't - # already have it so that we can properly test "path ownership" - if CONF_ALLOWLIST_EXTERNAL_URLS in config: - hac.allowlist_external_urls.update( - url if url.endswith("/") else f"{url}/" - for url in config[CONF_ALLOWLIST_EXTERNAL_URLS] - ) - - # Customize - cust_exact = dict(config[CONF_CUSTOMIZE]) - cust_domain = dict(config[CONF_CUSTOMIZE_DOMAIN]) - cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB]) - - for name, pkg in config[CONF_PACKAGES].items(): - if (pkg_cust := pkg.get(HOMEASSISTANT_DOMAIN)) is None: - continue - - try: - pkg_cust = _CUSTOMIZE_CONFIG_SCHEMA(pkg_cust) - except vol.Invalid: - _LOGGER.warning("Package %s contains invalid customize", name) - continue - - cust_exact.update(pkg_cust[CONF_CUSTOMIZE]) - cust_domain.update(pkg_cust[CONF_CUSTOMIZE_DOMAIN]) - cust_glob.update(pkg_cust[CONF_CUSTOMIZE_GLOB]) - - hass.data[DATA_CUSTOMIZE] = EntityValues(cust_exact, cust_domain, cust_glob) - - if CONF_UNIT_SYSTEM in config: - hac.units = get_unit_system(config[CONF_UNIT_SYSTEM]) - - -class _ComponentSet(set[str]): - """Set of loaded components. - - This set contains both top level components and platforms. - - Examples: - `light`, `switch`, `hue`, `mjpeg.camera`, `universal.media_player`, - `homeassistant.scene` - - The top level components set only contains the top level components. - - The all components set contains all components, including platform - based components. - - """ - - def __init__( - self, top_level_components: set[str], all_components: set[str] - ) -> None: - """Initialize the component set.""" - self._top_level_components = top_level_components - self._all_components = all_components - - def add(self, component: str) -> None: - """Add a component to the store.""" - if "." not in component: - self._top_level_components.add(component) - self._all_components.add(component) - else: - platform, _, domain = component.partition(".") - if domain in BASE_PLATFORMS: - self._all_components.add(platform) - return super().add(component) - - def remove(self, component: str) -> None: - """Remove a component from the store.""" - if "." in component: - raise ValueError("_ComponentSet does not support removing sub-components") - self._top_level_components.remove(component) - return super().remove(component) - - def discard(self, component: str) -> None: - """Remove a component from the store.""" - raise NotImplementedError("_ComponentSet does not support discard, use remove") - - -class Config: - """Configuration settings for Home Assistant.""" - - _store: Config._ConfigStore - - def __init__(self, hass: HomeAssistant, config_dir: str) -> None: - """Initialize a new config object.""" - # pylint: disable-next=import-outside-toplevel - from .components.zone import DEFAULT_RADIUS - - self.hass = hass - - self.latitude: float = 0 - self.longitude: float = 0 - - self.elevation: int = 0 - """Elevation (always in meters regardless of the unit system).""" - - self.radius: int = DEFAULT_RADIUS - """Radius of the Home Zone (always in meters regardless of the unit system).""" - - self.debug: bool = False - self.location_name: str = "Home" - self.time_zone: str = "UTC" - self.units: UnitSystem = METRIC_SYSTEM - self.internal_url: str | None = None - self.external_url: str | None = None - self.currency: str = "EUR" - self.country: str | None = None - self.language: str = "en" - - self.config_source: ConfigSource = ConfigSource.DEFAULT - - # If True, pip install is skipped for requirements on startup - self.skip_pip: bool = False - - # List of packages to skip when installing requirements on startup - self.skip_pip_packages: list[str] = [] - - # Set of loaded top level components - # This set is updated by _ComponentSet - # and should not be modified directly - self.top_level_components: set[str] = set() - - # Set of all loaded components including platform - # based components - self.all_components: set[str] = set() - - # Set of loaded components - self.components: _ComponentSet = _ComponentSet( - self.top_level_components, self.all_components - ) - - # API (HTTP) server configuration - self.api: ApiConfig | None = None - - # Directory that holds the configuration - self.config_dir: str = config_dir - - # List of allowed external dirs to access - self.allowlist_external_dirs: set[str] = set() - - # List of allowed external URLs that integrations may use - self.allowlist_external_urls: set[str] = set() - - # Dictionary of Media folders that integrations may use - self.media_dirs: dict[str, str] = {} - - # If Home Assistant is running in recovery mode - self.recovery_mode: bool = False - - # Use legacy template behavior - self.legacy_templates: bool = False - - # If Home Assistant is running in safe mode - self.safe_mode: bool = False - - self.webrtc = RTCConfiguration() - - def async_initialize(self) -> None: - """Finish initializing a config object. - - This must be called before the config object is used. - """ - self._store = self._ConfigStore(self.hass) - - def distance(self, lat: float, lon: float) -> float | None: - """Calculate distance from Home Assistant. - - Async friendly. - """ - return self.units.length( - location.distance(self.latitude, self.longitude, lat, lon), - UnitOfLength.METERS, - ) - - def path(self, *path: str) -> str: - """Generate path to the file within the configuration directory. - - Async friendly. - """ - return os.path.join(self.config_dir, *path) - - def is_allowed_external_url(self, url: str) -> bool: - """Check if an external URL is allowed.""" - parsed_url = f"{yarl.URL(url)!s}/" - - return any( - allowed - for allowed in self.allowlist_external_urls - if parsed_url.startswith(allowed) - ) - - def is_allowed_path(self, path: str) -> bool: - """Check if the path is valid for access from outside. - - This function does blocking I/O and should not be called from the event loop. - Use hass.async_add_executor_job to schedule it on the executor. - """ - assert path is not None - - thepath = pathlib.Path(path) - try: - # The file path does not have to exist (it's parent should) - if thepath.exists(): - thepath = thepath.resolve() - else: - thepath = thepath.parent.resolve() - except (FileNotFoundError, RuntimeError, PermissionError): - return False - - for allowed_path in self.allowlist_external_dirs: - try: - thepath.relative_to(allowed_path) - except ValueError: - pass - else: - return True - - return False - - def as_dict(self) -> dict[str, Any]: - """Return a dictionary representation of the configuration. - - Async friendly. - """ - allowlist_external_dirs = list(self.allowlist_external_dirs) - return { - "allowlist_external_dirs": allowlist_external_dirs, - "allowlist_external_urls": list(self.allowlist_external_urls), - "components": list(self.components), - "config_dir": self.config_dir, - "config_source": self.config_source, - "country": self.country, - "currency": self.currency, - "debug": self.debug, - "elevation": self.elevation, - "external_url": self.external_url, - "internal_url": self.internal_url, - "language": self.language, - "latitude": self.latitude, - "location_name": self.location_name, - "longitude": self.longitude, - "radius": self.radius, - "recovery_mode": self.recovery_mode, - "safe_mode": self.safe_mode, - "state": self.hass.state.value, - "time_zone": self.time_zone, - "unit_system": self.units.as_dict(), - "version": __version__, - # legacy, backwards compat - "whitelist_external_dirs": allowlist_external_dirs, - } - - async def async_set_time_zone(self, time_zone_str: str) -> None: - """Help to set the time zone.""" - if time_zone := await dt_util.async_get_time_zone(time_zone_str): - self.time_zone = time_zone_str - dt_util.set_default_time_zone(time_zone) - else: - raise ValueError(f"Received invalid time zone {time_zone_str}") - - def set_time_zone(self, time_zone_str: str) -> None: - """Set the time zone. - - This is a legacy method that should not be used in new code. - Use async_set_time_zone instead. - - It will be removed in Home Assistant 2025.6. - """ - report_usage( - "set the time zone using set_time_zone instead of async_set_time_zone" - " which will stop working in Home Assistant 2025.6", - core_integration_behavior=ReportBehavior.ERROR, - custom_integration_behavior=ReportBehavior.ERROR, - ) - if time_zone := dt_util.get_time_zone(time_zone_str): - self.time_zone = time_zone_str - dt_util.set_default_time_zone(time_zone) - else: - raise ValueError(f"Received invalid time zone {time_zone_str}") - - async def _async_update( - self, - *, - country: str | UndefinedType | None = UNDEFINED, - currency: str | None = None, - elevation: int | None = None, - external_url: str | UndefinedType | None = UNDEFINED, - internal_url: str | UndefinedType | None = UNDEFINED, - language: str | None = None, - latitude: float | None = None, - location_name: str | None = None, - longitude: float | None = None, - radius: int | None = None, - source: ConfigSource, - time_zone: str | None = None, - unit_system: str | None = None, - ) -> None: - """Update the configuration from a dictionary.""" - self.config_source = source - if country is not UNDEFINED: - self.country = country - if currency is not None: - self.currency = currency - if elevation is not None: - self.elevation = elevation - if external_url is not UNDEFINED: - self.external_url = external_url - if internal_url is not UNDEFINED: - self.internal_url = internal_url - if language is not None: - self.language = language - if latitude is not None: - self.latitude = latitude - if location_name is not None: - self.location_name = location_name - if longitude is not None: - self.longitude = longitude - if radius is not None: - self.radius = radius - if time_zone is not None: - await self.async_set_time_zone(time_zone) - if unit_system is not None: - try: - self.units = get_unit_system(unit_system) - except ValueError: - self.units = METRIC_SYSTEM - - async def async_update(self, **kwargs: Any) -> None: - """Update the configuration from a dictionary.""" - await self._async_update(source=ConfigSource.STORAGE, **kwargs) - await self._async_store() - self.hass.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE, kwargs) - - _raise_issue_if_historic_currency(self.hass, self.currency) - _raise_issue_if_no_country(self.hass, self.country) - - async def async_load(self) -> None: - """Load [homeassistant] core config.""" - if not (data := await self._store.async_load()): - return - - # In 2021.9 we fixed validation to disallow a path (because that's never - # correct) but this data still lives in storage, so we print a warning. - if data.get("external_url") and urlparse(data["external_url"]).path not in ( - "", - "/", - ): - _LOGGER.warning("Invalid external_url set. It's not allowed to have a path") - - if data.get("internal_url") and urlparse(data["internal_url"]).path not in ( - "", - "/", - ): - _LOGGER.warning("Invalid internal_url set. It's not allowed to have a path") - - await self._async_update( - source=ConfigSource.STORAGE, - latitude=data.get("latitude"), - longitude=data.get("longitude"), - elevation=data.get("elevation"), - unit_system=data.get("unit_system_v2"), - location_name=data.get("location_name"), - time_zone=data.get("time_zone"), - external_url=data.get("external_url", UNDEFINED), - internal_url=data.get("internal_url", UNDEFINED), - currency=data.get("currency"), - country=data.get("country"), - language=data.get("language"), - radius=data["radius"], - ) - - async def _async_store(self) -> None: - """Store [homeassistant] core config.""" - data = { - "latitude": self.latitude, - "longitude": self.longitude, - "elevation": self.elevation, - # We don't want any integrations to use the name of the unit system - # so we are using the private attribute here - "unit_system_v2": self.units._name, # noqa: SLF001 - "location_name": self.location_name, - "time_zone": self.time_zone, - "external_url": self.external_url, - "internal_url": self.internal_url, - "currency": self.currency, - "country": self.country, - "language": self.language, - "radius": self.radius, - } - await self._store.async_save(data) - - class _ConfigStore(Store[dict[str, Any]]): - """Class to help storing Config data.""" - - def __init__(self, hass: HomeAssistant) -> None: - """Initialize storage class.""" - super().__init__( - hass, - CORE_STORAGE_VERSION, - CORE_STORAGE_KEY, - private=True, - atomic_writes=True, - minor_version=CORE_STORAGE_MINOR_VERSION, - ) - self._original_unit_system: str | None = None # from old store 1.1 - - async def _async_migrate_func( - self, - old_major_version: int, - old_minor_version: int, - old_data: dict[str, Any], - ) -> dict[str, Any]: - """Migrate to the new version.""" - - # pylint: disable-next=import-outside-toplevel - from .components.zone import DEFAULT_RADIUS - - data = old_data - if old_major_version == 1 and old_minor_version < 2: - # In 1.2, we remove support for "imperial", replaced by "us_customary" - # Using a new key to allow rollback - self._original_unit_system = data.get("unit_system") - data["unit_system_v2"] = self._original_unit_system - if data["unit_system_v2"] == _CONF_UNIT_SYSTEM_IMPERIAL: - data["unit_system_v2"] = _CONF_UNIT_SYSTEM_US_CUSTOMARY - if old_major_version == 1 and old_minor_version < 3: - # In 1.3, we add the key "language", initialize it from the - # owner account. - data["language"] = "en" - try: - owner = await self.hass.auth.async_get_owner() - if owner is not None: - # pylint: disable-next=import-outside-toplevel - from .components.frontend import storage as frontend_store - - _, owner_data = await frontend_store.async_user_store( - self.hass, owner.id - ) - - if ( - "language" in owner_data - and "language" in owner_data["language"] - ): - with suppress(vol.InInvalid): - data["language"] = cv.language( - owner_data["language"]["language"] - ) - # pylint: disable-next=broad-except - except Exception: - _LOGGER.exception("Unexpected error during core config migration") - if old_major_version == 1 and old_minor_version < 4: - # In 1.4, we add the key "radius", initialize it with the default. - data.setdefault("radius", DEFAULT_RADIUS) - - if old_major_version > 1: - raise NotImplementedError - return data - - async def async_save(self, data: dict[str, Any]) -> None: - if self._original_unit_system: - data["unit_system"] = self._original_unit_system - return await super().async_save(data) diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index 9d041c9b8d3..f632e3e4dde 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -26,7 +26,7 @@ from .helpers.deprecation import ( check_if_deprecated_constant, dir_with_deprecated_constants, ) -from .helpers.frame import ReportBehavior, report_usage +from .helpers.frame import report from .loader import async_suggest_report_issue from .util import uuid as uuid_util @@ -46,7 +46,7 @@ class FlowResultType(StrEnum): MENU = "menu" -# RESULT_TYPE_* is deprecated, to be removed in 2025.1 +# RESULT_TYPE_* is deprecated, to be removed in 2022.9 _DEPRECATED_RESULT_TYPE_FORM = DeprecatedConstantEnum(FlowResultType.FORM, "2025.1") _DEPRECATED_RESULT_TYPE_CREATE_ENTRY = DeprecatedConstantEnum( FlowResultType.CREATE_ENTRY, "2025.1" @@ -87,10 +87,7 @@ STEP_ID_OPTIONAL_STEPS = { } -_FlowContextT = TypeVar("_FlowContextT", bound="FlowContext", default="FlowContext") -_FlowResultT = TypeVar( - "_FlowResultT", bound="FlowResult[Any, Any]", default="FlowResult" -) +_FlowResultT = TypeVar("_FlowResultT", bound="FlowResult[Any]", default="FlowResult") _HandlerT = TypeVar("_HandlerT", default=str) @@ -115,7 +112,9 @@ class UnknownStep(FlowError): """Unknown step specified.""" -class InvalidData(vol.Invalid): +# ignore misc is required as vol.Invalid is not typed +# mypy error: Class cannot subclass "Invalid" (has type "Any") +class InvalidData(vol.Invalid): # type: ignore[misc] """Invalid data provided.""" def __init__( @@ -142,17 +141,10 @@ class AbortFlow(FlowError): self.description_placeholders = description_placeholders -class FlowContext(TypedDict, total=False): - """Typed context dict.""" - - show_advanced_options: bool - source: str - - -class FlowResult(TypedDict, Generic[_FlowContextT, _HandlerT], total=False): +class FlowResult(TypedDict, Generic[_HandlerT], total=False): """Typed result dict.""" - context: _FlowContextT + context: dict[str, Any] data_schema: vol.Schema | None data: Mapping[str, Any] description_placeholders: Mapping[str, str | None] | None @@ -199,7 +191,7 @@ def _map_error_to_schema_errors( schema_errors[path_part_str] = error.error_message -class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): +class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): """Manage all the flows that are in progress.""" _flow_result: type[_FlowResultT] = FlowResult # type: ignore[assignment] @@ -211,14 +203,12 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): """Initialize the flow manager.""" self.hass = hass self._preview: set[_HandlerT] = set() - self._progress: dict[ - str, FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] - ] = {} + self._progress: dict[str, FlowHandler[_FlowResultT, _HandlerT]] = {} self._handler_progress_index: defaultdict[ - _HandlerT, set[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]] + _HandlerT, set[FlowHandler[_FlowResultT, _HandlerT]] ] = defaultdict(set) self._init_data_process_index: defaultdict[ - type, set[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]] + type, set[FlowHandler[_FlowResultT, _HandlerT]] ] = defaultdict(set) @abc.abstractmethod @@ -226,9 +216,9 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): self, handler_key: _HandlerT, *, - context: _FlowContextT | None = None, + context: dict[str, Any] | None = None, data: dict[str, Any] | None = None, - ) -> FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]: + ) -> FlowHandler[_FlowResultT, _HandlerT]: """Create a flow for specified handler. Handler key is the domain of the component that we want to set up. @@ -236,23 +226,34 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): @abc.abstractmethod async def async_finish_flow( - self, - flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], - result: _FlowResultT, + self, flow: FlowHandler[_FlowResultT, _HandlerT], result: _FlowResultT ) -> _FlowResultT: - """Finish a data entry flow. - - This method is called when a flow step returns FlowResultType.ABORT or - FlowResultType.CREATE_ENTRY. - """ + """Finish a data entry flow.""" async def async_post_init( - self, - flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], - result: _FlowResultT, + self, flow: FlowHandler[_FlowResultT, _HandlerT], result: _FlowResultT ) -> None: """Entry has finished executing its first step asynchronously.""" + @callback + def async_has_matching_flow( + self, handler: _HandlerT, match_context: dict[str, Any], data: Any + ) -> bool: + """Check if an existing matching flow is in progress. + + A flow with the same handler, context, and data. + + If match_context is passed, only return flows with a context that is a + superset of match_context. + """ + if not (flows := self._handler_progress_index.get(handler)): + return False + match_items = match_context.items() + for progress in flows: + if match_items <= progress.context.items() and progress.init_data == data: + return True + return False + @callback def async_get(self, flow_id: str) -> _FlowResultT: """Return a flow in progress as a partial FlowResult.""" @@ -293,18 +294,18 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): ) -> list[_FlowResultT]: """Return flows in progress init matching by data type as a partial FlowResult.""" return self._async_flow_handler_to_flow_result( - [ + ( progress for progress in self._init_data_process_index.get(init_data_type, ()) if matcher(progress.init_data) - ], + ), include_uninitialized, ) @callback def _async_progress_by_handler( self, handler: _HandlerT, match_context: dict[str, Any] | None - ) -> list[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]]: + ) -> list[FlowHandler[_FlowResultT, _HandlerT]]: """Return the flows in progress by handler. If match_context is specified, only return flows with a context that @@ -323,12 +324,12 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): self, handler: _HandlerT, *, - context: _FlowContextT | None = None, + context: dict[str, Any] | None = None, data: Any = None, ) -> _FlowResultT: """Start a data entry flow.""" if context is None: - context = cast(_FlowContextT, {}) + context = {} flow = await self.async_create_flow(handler, context=context, data=data) if not flow: raise UnknownFlow("Flow was not created") @@ -385,7 +386,7 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): ) is not None and user_input is not None: data_schema = cast(vol.Schema, data_schema) try: - user_input = data_schema(user_input) + user_input = data_schema(user_input) # type: ignore[operator] except vol.Invalid as ex: raised_errors = [ex] if isinstance(ex, vol.MultipleInvalid): @@ -468,7 +469,7 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): @callback def _async_add_flow_progress( - self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] + self, flow: FlowHandler[_FlowResultT, _HandlerT] ) -> None: """Add a flow to in progress.""" if flow.init_data is not None: @@ -478,7 +479,7 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): @callback def _async_remove_flow_from_index( - self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] + self, flow: FlowHandler[_FlowResultT, _HandlerT] ) -> None: """Remove a flow from in progress.""" if flow.init_data is not None: @@ -505,7 +506,7 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): async def _async_handle_step( self, - flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], + flow: FlowHandler[_FlowResultT, _HandlerT], step_id: str, user_input: dict | BaseServiceInfo | None, ) -> _FlowResultT: @@ -530,12 +531,12 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): if not isinstance(result["type"], FlowResultType): result["type"] = FlowResultType(result["type"]) # type: ignore[unreachable] - report_usage( + report( ( "does not use FlowResultType enum for data entry flow result type. " - "This is deprecated and will stop working in Home Assistant 2025.1" + "This is deprecated and will stop working in Home Assistant 2022.9" ), - core_behavior=ReportBehavior.LOG, + error_if_core=False, ) if ( @@ -582,7 +583,7 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): return result def _raise_if_step_does_not_exist( - self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], step_id: str + self, flow: FlowHandler[_FlowResultT, _HandlerT], step_id: str ) -> None: """Raise if the step does not exist.""" method = f"async_step_{step_id}" @@ -594,7 +595,7 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): ) async def _async_setup_preview( - self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] + self, flow: FlowHandler[_FlowResultT, _HandlerT] ) -> None: """Set up preview for a flow handler.""" if flow.handler not in self._preview: @@ -604,7 +605,7 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): @callback def _async_flow_handler_to_flow_result( self, - flows: Iterable[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]], + flows: Iterable[FlowHandler[_FlowResultT, _HandlerT]], include_uninitialized: bool, ) -> list[_FlowResultT]: """Convert a list of FlowHandler to a partial FlowResult that can be serialized.""" @@ -626,7 +627,7 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): ] -class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]): +class FlowHandler(Generic[_FlowResultT, _HandlerT]): """Handle a data entry flow.""" _flow_result: type[_FlowResultT] = FlowResult # type: ignore[assignment] @@ -640,7 +641,7 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]): hass: HomeAssistant = None # type: ignore[assignment] handler: _HandlerT = None # type: ignore[assignment] # Ensure the attribute has a subscriptable, but immutable, default value. - context: _FlowContextT = MappingProxyType({}) # type: ignore[assignment] + context: dict[str, Any] = MappingProxyType({}) # type: ignore[assignment] # Set by _async_create_flow callback init_step = "init" @@ -659,12 +660,12 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]): @property def source(self) -> str | None: """Source that initialized the flow.""" - return self.context.get("source", None) # type: ignore[return-value] + return self.context.get("source", None) # type: ignore[no-any-return] @property def show_advanced_options(self) -> bool: """If we should show advanced options.""" - return self.context.get("show_advanced_options", False) # type: ignore[return-value] + return self.context.get("show_advanced_options", False) # type: ignore[no-any-return] def add_suggested_values_to_schema( self, data_schema: vol.Schema, suggested_values: Mapping[str, Any] | None diff --git a/homeassistant/exceptions.py b/homeassistant/exceptions.py index f308cbc5cd8..01e22d16e79 100644 --- a/homeassistant/exceptions.py +++ b/homeassistant/exceptions.py @@ -2,10 +2,12 @@ from __future__ import annotations -from collections.abc import Callable, Generator, Sequence +from collections.abc import Callable, Sequence from dataclasses import dataclass from typing import TYPE_CHECKING, Any +from typing_extensions import Generator + from .util.event_type import EventType if TYPE_CHECKING: diff --git a/homeassistant/generated/amazon_polly.py b/homeassistant/generated/amazon_polly.py deleted file mode 100644 index 1d870bf6c92..00000000000 --- a/homeassistant/generated/amazon_polly.py +++ /dev/null @@ -1,137 +0,0 @@ -"""Automatically generated file. - -To update, run python3 -m script.amazon_polly -""" - -from __future__ import annotations - -from typing import Final - -SUPPORTED_ENGINES: Final[set[str]] = { - "generative", - "long-form", - "neural", - "standard", -} - -SUPPORTED_REGIONS: Final[set[str]] = { - "af-south-1", - "ap-east-1", - "ap-northeast-1", - "ap-northeast-2", - "ap-northeast-3", - "ap-south-1", - "ap-southeast-1", - "ap-southeast-2", - "ca-central-1", - "eu-central-1", - "eu-north-1", - "eu-west-1", - "eu-west-2", - "eu-west-3", - "me-south-1", - "sa-east-1", - "us-east-1", - "us-east-2", - "us-west-1", - "us-west-2", -} - -SUPPORTED_VOICES: Final[set[str]] = { - "Aditi", - "Adriano", - "Amy", - "Andres", - "Aria", - "Arlet", - "Arthur", - "Astrid", - "Ayanda", - "Bianca", - "Brian", - "Burcu", - "Camila", - "Carla", - "Carmen", - "Celine", - "Chantal", - "Conchita", - "Cristiano", - "Daniel", - "Danielle", - "Dora", - "Elin", - "Emma", - "Enrique", - "Ewa", - "Filiz", - "Gabrielle", - "Geraint", - "Giorgio", - "Gregory", - "Gwyneth", - "Hala", - "Hannah", - "Hans", - "Hiujin", - "Ida", - "Ines", - "Isabelle", - "Ivy", - "Jacek", - "Jan", - "Joanna", - "Joey", - "Justin", - "Kajal", - "Karl", - "Kazuha", - "Kendra", - "Kevin", - "Kimberly", - "Laura", - "Lea", - "Liam", - "Lisa", - "Liv", - "Lotte", - "Lucia", - "Lupe", - "Mads", - "Maja", - "Marlene", - "Mathieu", - "Matthew", - "Maxim", - "Mia", - "Miguel", - "Mizuki", - "Naja", - "Niamh", - "Nicole", - "Ola", - "Olivia", - "Pedro", - "Penelope", - "Raveena", - "Remi", - "Ricardo", - "Ruben", - "Russell", - "Ruth", - "Salli", - "Seoyeon", - "Sergio", - "Sofie", - "Stephen", - "Suvi", - "Takumi", - "Tatyana", - "Thiago", - "Tomoko", - "Vicki", - "Vitoria", - "Zayd", - "Zeina", - "Zhiyu", -} diff --git a/homeassistant/generated/application_credentials.py b/homeassistant/generated/application_credentials.py index 6b3028826dc..bc6b29e4c23 100644 --- a/homeassistant/generated/application_credentials.py +++ b/homeassistant/generated/application_credentials.py @@ -4,18 +4,17 @@ To update, run python3 -m script.hassfest """ APPLICATION_CREDENTIALS = [ + "aladdin_connect", "electric_kiwi", "fitbit", "geocaching", "google", "google_assistant_sdk", "google_mail", - "google_photos", "google_sheets", "google_tasks", "home_connect", "husqvarna_automower", - "iotty", "lametric", "lyric", "microbees", @@ -24,15 +23,11 @@ APPLICATION_CREDENTIALS = [ "neato", "nest", "netatmo", - "point", "senz", "spotify", - "tesla_fleet", "twitch", - "weheat", "withings", "xbox", - "yale", "yolink", "youtube", ] diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index a105efc2685..17461225851 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -8,26 +8,6 @@ from __future__ import annotations from typing import Final BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ - { - "domain": "acaia", - "manufacturer_id": 16962, - }, - { - "domain": "acaia", - "local_name": "ACAIA*", - }, - { - "domain": "acaia", - "local_name": "PYXIS-*", - }, - { - "domain": "acaia", - "local_name": "LUNAR-*", - }, - { - "domain": "acaia", - "local_name": "PROCHBT001", - }, { "domain": "airthings_ble", "manufacturer_id": 820, @@ -157,41 +137,6 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "domain": "govee_ble", "local_name": "B5178*", }, - { - "connectable": False, - "domain": "govee_ble", - "local_name": "GV5121*", - }, - { - "connectable": False, - "domain": "govee_ble", - "local_name": "GV5122*", - }, - { - "connectable": False, - "domain": "govee_ble", - "local_name": "GV5123*", - }, - { - "connectable": False, - "domain": "govee_ble", - "local_name": "GV5124*", - }, - { - "connectable": False, - "domain": "govee_ble", - "local_name": "GV5125*", - }, - { - "connectable": False, - "domain": "govee_ble", - "local_name": "GV5126*", - }, - { - "connectable": False, - "domain": "govee_ble", - "local_name": "GVH5127*", - }, { "connectable": False, "domain": "govee_ble", @@ -276,22 +221,6 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "manufacturer_id": 19506, "service_uuid": "00001801-0000-1000-8000-00805f9b34fb", }, - { - "connectable": False, - "domain": "govee_ble", - "manufacturer_id": 61320, - }, - { - "connectable": False, - "domain": "govee_ble", - "manufacturer_data_start": [ - 236, - 0, - 0, - 1, - ], - "manufacturer_id": 34819, - }, { "domain": "homekit_controller", "manufacturer_data_start": [ @@ -299,11 +228,6 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ ], "manufacturer_id": 76, }, - { - "connectable": True, - "domain": "husqvarna_automower_ble", - "service_uuid": "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", - }, { "domain": "ibeacon", "manufacturer_data_start": [ @@ -346,11 +270,6 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "domain": "inkbird", "local_name": "tps", }, - { - "connectable": True, - "domain": "iron_os", - "service_uuid": "9eae1000-9d0d-48c5-aa55-33e27f9bc533", - }, { "connectable": False, "domain": "kegtron", @@ -705,15 +624,6 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "manufacturer_id": 27, "service_uuid": "0000fff0-0000-1000-8000-00805f9b34fb", }, - { - "connectable": False, - "domain": "thermobeacon", - "manufacturer_data_start": [ - 0, - ], - "manufacturer_id": 48, - "service_uuid": "0000fff0-0000-1000-8000-00805f9b34fb", - }, { "connectable": False, "domain": "thermobeacon", diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index ffe61b915c6..23a13bcbfd8 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -9,10 +9,8 @@ FLOWS = { "generic_hygrostat", "generic_thermostat", "group", - "history_stats", "integration", "min_max", - "mold_indicator", "random", "statistics", "switch_as_x", @@ -24,7 +22,6 @@ FLOWS = { ], "integration": [ "abode", - "acaia", "accuweather", "acmeda", "adax", @@ -45,6 +42,7 @@ FLOWS = { "airvisual_pro", "airzone", "airzone_cloud", + "aladdin_connect", "alarmdecoder", "amberelectric", "ambient_network", @@ -55,7 +53,6 @@ FLOWS = { "androidtv_remote", "anova", "anthemav", - "anthropic", "aosmith", "apcupsd", "apple_tv", @@ -72,7 +69,6 @@ FLOWS = { "aurora", "aurora_abb_powerone", "aussie_broadband", - "autarco", "awair", "axis", "azure_data_explorer", @@ -85,7 +81,6 @@ FLOWS = { "blink", "blue_current", "bluemaestro", - "bluesound", "bluetooth", "bmw_connected_drive", "bond", @@ -96,17 +91,14 @@ FLOWS = { "brother", "brottsplatskartan", "brunt", - "bryant_evolution", "bsblan", "bthome", "buienradar", "caldav", - "cambridge_audio", "canary", "cast", "ccm15", "cert_expiry", - "chacon_dio", "cloudflare", "co2signal", "coinbase", @@ -117,7 +109,6 @@ FLOWS = { "cpuspeed", "crownstone", "daikin", - "deako", "deconz", "deluge", "denonavr", @@ -140,7 +131,6 @@ FLOWS = { "drop_connect", "dsmr", "dsmr_reader", - "duke_energy", "dunehd", "duotecno", "dwd_weather_warnings", @@ -156,12 +146,10 @@ FLOWS = { "efergy", "electrasmart", "electric_kiwi", - "elevenlabs", "elgato", "elkm1", "elmax", "elvia", - "emoncms", "emonitor", "emulated_roku", "energenie_power_sockets", @@ -206,14 +194,12 @@ FLOWS = { "fritzbox_callmonitor", "fronius", "frontier_silicon", - "fujitsu_fglair", "fully_kiosk", "fyta", "garages_amsterdam", "gardena_bluetooth", "gdacs", "generic", - "geniushub", "geo_json_events", "geocaching", "geofency", @@ -227,10 +213,8 @@ FLOWS = { "goodwe", "google", "google_assistant_sdk", - "google_cloud", "google_generative_ai_conversation", "google_mail", - "google_photos", "google_sheets", "google_tasks", "google_translate", @@ -259,13 +243,11 @@ FLOWS = { "homewizard", "homeworks", "honeywell", - "html5", "huawei_lte", "hue", "huisbaasje", "hunterdouglas_powerview", "husqvarna_automower", - "husqvarna_automower_ble", "huum", "hvv_departures", "hydrawise", @@ -285,14 +267,10 @@ FLOWS = { "intellifire", "ios", "iotawatt", - "iotty", "ipma", "ipp", "iqvia", - "iron_os", - "iskra", "islamic_prayer_times", - "israel_rail", "iss", "ista_ecotrend", "isy994", @@ -321,26 +299,22 @@ FLOWS = { "lastfm", "launch_library", "laundrify", - "lcn", "ld2410_ble", "leaone", "led_ble", - "lektrico", "lg_netcast", "lg_soundbar", - "lg_thinq", "lidarr", "lifx", "linear_garage_door", - "linkplay", "litejet", "litterrobot", "livisi", "local_calendar", - "local_file", "local_ip", "local_todo", "locative", + "logi_circle", "lookin", "loqed", "luftdaten", @@ -348,9 +322,7 @@ FLOWS = { "lutron", "lutron_caseta", "lyric", - "madvr", "mailgun", - "mastodon", "matter", "mealie", "meater", @@ -373,7 +345,6 @@ FLOWS = { "modem_callerid", "modern_forms", "moehlenhoff_alpha2", - "monarch_money", "monoprice", "monzo", "moon", @@ -382,17 +353,14 @@ FLOWS = { "motionblinds_ble", "motioneye", "motionmount", - "mpd", "mqtt", "mullvad", - "music_assistant", "mutesync", "mysensors", "mystrom", "myuplink", "nam", "nanoleaf", - "nasweb", "neato", "nest", "netatmo", @@ -404,18 +372,15 @@ FLOWS = { "nextdns", "nfandroidtv", "nibe_heatpump", - "nice_go", "nightscout", "nina", "nmap_tracker", "nobo_hub", - "nordpool", "notion", "nuheat", "nuki", "nut", "nws", - "nyt_games", "nzbget", "obihai", "octoprint", @@ -424,7 +389,6 @@ FLOWS = { "oncue", "ondilo_ico", "onewire", - "onkyo", "onvif", "open_meteo", "openai_conversation", @@ -445,7 +409,6 @@ FLOWS = { "ovo_energy", "owntracks", "p1_monitor", - "palazzetti", "panasonic_viera", "peco", "pegel_online", @@ -510,7 +473,6 @@ FLOWS = { "rpi_power", "rtsp_to_webrtc", "ruckus_unleashed", - "russound_rio", "ruuvi_gateway", "ruuvitag_ble", "rympro", @@ -526,7 +488,6 @@ FLOWS = { "sensirion_ble", "sensorpro", "sensorpush", - "sensoterra", "sentry", "senz", "seventeentrack", @@ -535,10 +496,8 @@ FLOWS = { "shelly", "shopping_list", "sia", - "simplefin", "simplepush", "simplisafe", - "sky_remote", "skybell", "slack", "sleepiq", @@ -548,9 +507,7 @@ FLOWS = { "smart_meter_texas", "smartthings", "smarttub", - "smarty", "smhi", - "smlight", "sms", "snapcast", "snooz", @@ -564,6 +521,7 @@ FLOWS = { "sonos", "soundtouch", "speedtestdotnet", + "spider", "spotify", "sql", "squeezebox", @@ -600,7 +558,6 @@ FLOWS = { "technove", "tedee", "tellduslive", - "tesla_fleet", "tesla_wall_connector", "teslemetry", "tessie", @@ -617,7 +574,6 @@ FLOWS = { "tomorrowio", "toon", "totalconnect", - "touchline_sl", "tplink", "tplink_omada", "traccar", @@ -629,7 +585,6 @@ FLOWS = { "trafikverket_train", "trafikverket_weatherstation", "transmission", - "triggercmd", "tuya", "twentemilieu", "twilio", @@ -661,7 +616,6 @@ FLOWS = { "volumio", "volvooncall", "vulcan", - "wake_on_lan", "wallbox", "waqi", "watttime", @@ -671,7 +625,6 @@ FLOWS = { "weatherkit", "webmin", "webostv", - "weheat", "wemo", "whirlpool", "whois", @@ -680,17 +633,14 @@ FLOWS = { "withings", "wiz", "wled", - "wmspro", "wolflink", "workday", - "worldclock", "ws66i", "wyoming", "xbox", "xiaomi_aqara", "xiaomi_ble", "xiaomi_miio", - "yale", "yale_smart_alarm", "yalexs_ble", "yamaha_musiccast", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 7dacf9a0bca..e898f64d128 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -12,6 +12,11 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "airzone", "macaddress": "E84F25*", }, + { + "domain": "august", + "hostname": "yale-connect-plus", + "macaddress": "00177A*", + }, { "domain": "august", "hostname": "connect", @@ -27,16 +32,15 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "connect", "macaddress": "2C9FFB*", }, - { - "domain": "august", - "hostname": "connect", - "macaddress": "789C85*", - }, { "domain": "august", "hostname": "august*", "macaddress": "E076D0*", }, + { + "domain": "awair", + "macaddress": "70886B1*", + }, { "domain": "axis", "registered_devices": True, @@ -276,18 +280,6 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "polisy*", "macaddress": "000DB9*", }, - { - "domain": "lamarzocco", - "hostname": "gs[0-9][0-9][0-9][0-9][0-9][0-9]", - }, - { - "domain": "lamarzocco", - "hostname": "lm[0-9][0-9][0-9][0-9][0-9][0-9]", - }, - { - "domain": "lamarzocco", - "hostname": "mr[0-9][0-9][0-9][0-9][0-9][0-9]", - }, { "domain": "lametric", "registered_devices": True, @@ -379,15 +371,6 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "gateway*", "macaddress": "F8811A*", }, - { - "domain": "palazzetti", - "hostname": "connbox*", - "macaddress": "40F3857*", - }, - { - "domain": "palazzetti", - "registered_devices": True, - }, { "domain": "powerwall", "hostname": "1118431-*", @@ -449,26 +432,6 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "ring*", "macaddress": "0CAE7D*", }, - { - "domain": "ring", - "hostname": "ring*", - "macaddress": "2CAB33*", - }, - { - "domain": "ring", - "hostname": "ring*", - "macaddress": "94E36D*", - }, - { - "domain": "ring", - "hostname": "ring*", - "macaddress": "9C7613*", - }, - { - "domain": "ring", - "hostname": "ring*", - "macaddress": "341513*", - }, { "domain": "roomba", "hostname": "irobot-*", @@ -864,7 +827,7 @@ DHCP: Final[list[dict[str, str | bool]]] = [ }, { "domain": "tplink", - "hostname": "l[59]*", + "hostname": "l5*", "macaddress": "5CE931*", }, { @@ -874,14 +837,9 @@ DHCP: Final[list[dict[str, str | bool]]] = [ }, { "domain": "tplink", - "hostname": "l[59]*", + "hostname": "l5*", "macaddress": "5C628B*", }, - { - "domain": "tplink", - "hostname": "l[59]*", - "macaddress": "14EBB6*", - }, { "domain": "tplink", "hostname": "tp*", @@ -1131,19 +1089,6 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "wiz", "hostname": "wiz_*", }, - { - "domain": "wmspro", - "macaddress": "0023D5*", - }, - { - "domain": "wmspro", - "registered_devices": True, - }, - { - "domain": "yale", - "hostname": "yale-connect-plus", - "macaddress": "00177A*", - }, { "domain": "yeelight", "hostname": "yeelink-*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index f007db87868..3371c8de0fa 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -11,12 +11,6 @@ "config_flow": true, "iot_class": "cloud_push" }, - "acaia": { - "name": "Acaia", - "integration_type": "device", - "config_flow": true, - "iot_class": "local_push" - }, "accuweather": { "name": "AccuWeather", "integration_type": "service", @@ -186,6 +180,12 @@ } } }, + "aladdin_connect": { + "name": "Aladdin Connect", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "alarmdecoder": { "name": "AlarmDecoder", "integration_type": "device", @@ -206,6 +206,12 @@ "amazon": { "name": "Amazon", "integrations": { + "alexa": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "cloud_push", + "name": "Amazon Alexa" + }, "amazon_polly": { "integration_type": "hub", "config_flow": false, @@ -315,12 +321,6 @@ "config_flow": true, "iot_class": "local_push" }, - "anthropic": { - "name": "Anthropic Conversation", - "integration_type": "service", - "config_flow": true, - "iot_class": "cloud_polling" - }, "anwb_energie": { "name": "ANWB Energie", "integration_type": "virtual", @@ -402,7 +402,7 @@ "iot_class": "cloud_push" }, "aprilaire": { - "name": "AprilAire", + "name": "Aprilaire", "integration_type": "device", "config_flow": true, "iot_class": "local_push" @@ -413,26 +413,14 @@ "config_flow": false, "iot_class": "cloud_push" }, - "aps": { - "name": "Arizona Public Service (APS)", - "integration_type": "virtual", - "supported_by": "opower" - }, "apsystems": { "name": "APsystems", "integration_type": "device", "config_flow": true, "iot_class": "local_polling" }, - "aqara": { - "name": "Aqara", - "iot_standards": [ - "matter", - "zigbee" - ] - }, "aquacell": { - "name": "AquaCell", + "name": "Aquacell", "integration_type": "device", "config_flow": true, "iot_class": "cloud_polling" @@ -473,11 +461,6 @@ "config_flow": false, "iot_class": "local_polling" }, - "artsound": { - "name": "ArtSound", - "integration_type": "virtual", - "supported_by": "linkplay" - }, "aruba": { "name": "Aruba", "integrations": { @@ -513,6 +496,29 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "assist_pipeline": { + "name": "Assist pipeline", + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_push" + }, + "asterisk": { + "name": "Asterisk", + "integrations": { + "asterisk_cdr": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling", + "name": "Asterisk Call Detail Records" + }, + "asterisk_mbox": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_push", + "name": "Asterisk Voicemail" + } + } + }, "asuswrt": { "name": "ASUSWRT", "integration_type": "hub", @@ -581,12 +587,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "autarco": { - "name": "Autarco", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "avion": { "name": "Avi-on", "integration_type": "hub", @@ -635,6 +635,12 @@ "config_flow": true, "iot_class": "local_push" }, + "bayesian": { + "name": "Bayesian", + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling" + }, "bbox": { "name": "Bbox", "integration_type": "hub", @@ -704,6 +710,12 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "bloomsky": { + "name": "BloomSky", + "integration_type": "hub", + "config_flow": false, + "iot_class": "cloud_polling" + }, "blue_current": { "name": "Blue Current", "integration_type": "hub", @@ -719,7 +731,7 @@ "bluesound": { "name": "Bluesound", "integration_type": "hub", - "config_flow": true, + "config_flow": false, "iot_class": "local_polling" }, "bluetooth": { @@ -804,12 +816,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "bryant_evolution": { - "name": "Bryant Evolution", - "integration_type": "device", - "config_flow": true, - "iot_class": "local_polling" - }, "bsblan": { "name": "BSB-Lan", "integration_type": "device", @@ -861,12 +867,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "cambridge_audio": { - "name": "Cambridge Audio", - "integration_type": "device", - "config_flow": true, - "iot_class": "local_push" - }, "canary": { "name": "Canary", "integration_type": "hub", @@ -884,12 +884,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "chacon_dio": { - "name": "Chacon DiO", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push" - }, "channels": { "name": "Channels", "integration_type": "hub", @@ -958,8 +952,7 @@ "name": "Cloudflare", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_push", - "single_config_entry": true + "iot_class": "cloud_push" }, "cmus": { "name": "cmus", @@ -1110,13 +1103,6 @@ "config_flow": false, "iot_class": "local_polling" }, - "deako": { - "name": "Deako", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_polling", - "single_config_entry": true - }, "debugpy": { "name": "Remote Python Debugger", "integration_type": "service", @@ -1161,8 +1147,7 @@ "demo": { "integration_type": "hub", "config_flow": false, - "iot_class": "calculated", - "single_config_entry": true + "iot_class": "calculated" }, "denon": { "name": "Denon", @@ -1360,7 +1345,7 @@ "iot_class": "local_push" }, "dsmr": { - "name": "DSMR Smart Meter", + "name": "DSMR Slimme Meter", "integration_type": "hub", "config_flow": true, "iot_class": "local_push" @@ -1389,12 +1374,6 @@ "config_flow": false, "iot_class": "cloud_polling" }, - "duke_energy": { - "name": "Duke Energy", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "dunehd": { "name": "Dune HD", "integration_type": "hub", @@ -1405,8 +1384,7 @@ "name": "Duotecno", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push", - "single_config_entry": true + "iot_class": "local_push" }, "duquesne_light": { "name": "Duquesne Light", @@ -1464,8 +1442,7 @@ "name": "ecobee", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling", - "single_config_entry": true + "iot_class": "cloud_polling" }, "ecoforest": { "name": "Ecoforest", @@ -1533,12 +1510,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "elevenlabs": { - "name": "ElevenLabs", - "integration_type": "service", - "config_flow": true, - "iot_class": "cloud_polling" - }, "elgato": { "name": "Elgato", "integrations": { @@ -1597,7 +1568,7 @@ "integrations": { "emoncms": { "integration_type": "hub", - "config_flow": true, + "config_flow": false, "iot_class": "local_polling", "name": "Emoncms" }, @@ -1663,8 +1634,7 @@ "name": "EnOcean", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push", - "single_config_entry": true + "iot_class": "local_push" }, "enphase_envoy": { "name": "Enphase Envoy", @@ -1828,6 +1798,11 @@ "ffmpeg": { "name": "FFmpeg", "integrations": { + "ffmpeg": { + "integration_type": "hub", + "config_flow": false, + "name": "FFmpeg" + }, "ffmpeg_motion": { "integration_type": "hub", "config_flow": false, @@ -1865,6 +1840,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "filter": { + "name": "Filter", + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_push" + }, "fints": { "name": "FinTS", "integration_type": "service", @@ -2090,22 +2071,10 @@ "config_flow": true, "iot_class": "local_polling" }, - "fujitsu": { - "name": "Fujitsu", - "integrations": { - "fujitsu_anywair": { - "integration_type": "virtual", - "config_flow": false, - "supported_by": "advantage_air", - "name": "Fujitsu anywAIR" - }, - "fujitsu_fglair": { - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling", - "name": "FGLair" - } - } + "fujitsu_anywair": { + "name": "Fujitsu anywAIR", + "integration_type": "virtual", + "supported_by": "advantage_air" }, "fully_kiosk": { "name": "Fully Kiosk Browser", @@ -2161,7 +2130,7 @@ "geniushub": { "name": "Genius Hub", "integration_type": "hub", - "config_flow": true, + "config_flow": false, "iot_class": "local_polling" }, "geo_json_events": { @@ -2273,6 +2242,12 @@ "google": { "name": "Google", "integrations": { + "google_assistant": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "cloud_push", + "name": "Google Assistant" + }, "google_assistant_sdk": { "integration_type": "service", "config_flow": true, @@ -2280,10 +2255,16 @@ "name": "Google Assistant SDK" }, "google_cloud": { - "integration_type": "service", - "config_flow": true, + "integration_type": "hub", + "config_flow": false, "iot_class": "cloud_push", - "name": "Google Cloud" + "name": "Google Cloud Platform" + }, + "google_domains": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "cloud_polling", + "name": "Google Domains" }, "google_generative_ai_conversation": { "integration_type": "service", @@ -2303,12 +2284,6 @@ "iot_class": "cloud_polling", "name": "Google Maps" }, - "google_photos": { - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling", - "name": "Google Photos" - }, "google_pubsub": { "integration_type": "hub", "config_flow": false, @@ -2468,8 +2443,7 @@ "name": "Home Assistant Supervisor", "integration_type": "hub", "config_flow": false, - "iot_class": "local_polling", - "single_config_entry": true + "iot_class": "local_polling" }, "havana_shade": { "name": "Havana Shade", @@ -2550,6 +2524,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "history_stats": { + "name": "History Stats", + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling" + }, "hitron_coda": { "name": "Rogers Hitron CODA", "integration_type": "hub", @@ -2657,9 +2637,8 @@ "html5": { "name": "HTML5 Push Notifications", "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push", - "single_config_entry": true + "config_flow": false, + "iot_class": "cloud_push" }, "huawei_lte": { "name": "Huawei LTE", @@ -2684,22 +2663,11 @@ "integration_type": "virtual", "supported_by": "motion_blinds" }, - "husqvarna": { - "name": "Husqvarna", - "integrations": { - "husqvarna_automower": { - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push", - "name": "Husqvarna Automower" - }, - "husqvarna_automower_ble": { - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_polling", - "name": "Husqvarna Automower BLE" - } - } + "husqvarna_automower": { + "name": "Husqvarna Automower", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push" }, "huum": { "name": "Huum", @@ -2741,8 +2709,7 @@ "name": "Jandy iAqualink", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling", - "single_config_entry": true + "iot_class": "cloud_polling" }, "ibm": { "name": "IBM", @@ -2871,8 +2838,7 @@ "name": "Insteon", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push", - "single_config_entry": true + "iot_class": "local_push" }, "intellifire": { "name": "IntelliFire", @@ -2903,12 +2869,6 @@ "config_flow": true, "iot_class": "local_polling" }, - "iotty": { - "name": "iotty", - "integration_type": "device", - "config_flow": true, - "iot_class": "cloud_polling" - }, "iperf3": { "name": "Iperf3", "integration_type": "hub", @@ -2939,18 +2899,6 @@ "config_flow": false, "iot_class": "cloud_polling" }, - "iron_os": { - "name": "IronOS", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_polling" - }, - "iskra": { - "name": "iskra", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_polling" - }, "islamic_prayer_times": { "integration_type": "hub", "config_flow": true, @@ -2961,18 +2909,11 @@ "integration_type": "virtual", "supported_by": "motion_blinds" }, - "israel_rail": { - "name": "Israel Railways", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "iss": { "name": "International Space Station (ISS)", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling", - "single_config_entry": true + "iot_class": "cloud_polling" }, "ista_ecotrend": { "name": "ista EcoTrend", @@ -3111,8 +3052,7 @@ "name": "Everything but the Kitchen Sink", "integration_type": "hub", "config_flow": false, - "iot_class": "calculated", - "single_config_entry": true + "iot_class": "calculated" }, "kiwi": { "name": "KIWI", @@ -3128,7 +3068,7 @@ }, "knocki": { "name": "Knocki", - "integration_type": "hub", + "integration_type": "device", "config_flow": true, "iot_class": "cloud_push" }, @@ -3226,8 +3166,7 @@ "name": "Launch Library", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling", - "single_config_entry": true + "iot_class": "cloud_polling" }, "laundrify": { "name": "laundrify", @@ -3238,7 +3177,7 @@ "lcn": { "name": "LCN", "integration_type": "hub", - "config_flow": true, + "config_flow": false, "iot_class": "local_push" }, "ld2410_ble": { @@ -3264,12 +3203,6 @@ "integration_type": "virtual", "supported_by": "netatmo" }, - "lektrico": { - "name": "Lektrico Charging Station", - "integration_type": "device", - "config_flow": true, - "iot_class": "local_polling" - }, "leviton": { "name": "Leviton", "iot_standards": [ @@ -3291,12 +3224,6 @@ "iot_class": "local_polling", "name": "LG Soundbars" }, - "lg_thinq": { - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push", - "name": "LG ThinQ" - }, "webostv": { "integration_type": "hub", "config_flow": true, @@ -3341,12 +3268,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "linkplay": { - "name": "LinkPlay", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_polling" - }, "linksys_smart": { "name": "Linksys Smart Wi-Fi", "integration_type": "hub", @@ -3375,8 +3296,7 @@ "name": "LiteJet", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push", - "single_config_entry": true + "iot_class": "local_push" }, "litterrobot": { "name": "Litter-Robot", @@ -3404,14 +3324,13 @@ "local_file": { "name": "Local File", "integration_type": "hub", - "config_flow": true, + "config_flow": false, "iot_class": "local_polling" }, "local_ip": { "integration_type": "hub", "config_flow": true, - "iot_class": "local_polling", - "single_config_entry": true + "iot_class": "local_polling" }, "local_todo": { "integration_type": "hub", @@ -3430,6 +3349,12 @@ "config_flow": false, "iot_class": "cloud_push" }, + "logi_circle": { + "name": "Logi Circle", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "logitech": { "name": "Logitech", "integrations": { @@ -3439,6 +3364,12 @@ "iot_class": "local_push", "name": "Logitech Harmony Hub" }, + "ue_smart_radio": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "cloud_polling", + "name": "Logitech UE Smart Radio" + }, "squeezebox": { "integration_type": "hub", "config_flow": true, @@ -3522,18 +3453,18 @@ "integration_type": "virtual", "supported_by": "motion_blinds" }, - "madvr": { - "name": "madVR Envy", - "integration_type": "device", - "config_flow": true, - "iot_class": "local_push" - }, "mailgun": { "name": "Mailgun", "integration_type": "hub", "config_flow": true, "iot_class": "cloud_push" }, + "manual": { + "name": "Manual Alarm Control Panel", + "integration_type": "hub", + "config_flow": false, + "iot_class": "calculated" + }, "marantz": { "name": "Marantz", "integration_type": "virtual", @@ -3552,9 +3483,9 @@ }, "mastodon": { "name": "Mastodon", - "integration_type": "service", - "config_flow": true, - "iot_class": "cloud_polling" + "integration_type": "hub", + "config_flow": false, + "iot_class": "cloud_push" }, "matrix": { "name": "Matrix", @@ -3634,11 +3565,6 @@ "config_flow": false, "iot_class": "cloud_polling" }, - "mercury_nz": { - "name": "Mercury NZ Limited", - "integration_type": "virtual", - "supported_by": "opower" - }, "message_bird": { "name": "MessageBird", "integration_type": "hub", @@ -3775,11 +3701,6 @@ "config_flow": true, "iot_class": "local_polling" }, - "mini_connected": { - "name": "MINI Connected", - "integration_type": "virtual", - "supported_by": "bmw_connected_drive" - }, "minio": { "name": "Minio", "integration_type": "hub", @@ -3832,11 +3753,11 @@ "config_flow": true, "iot_class": "local_push" }, - "monarch_money": { - "name": "Monarch Money", + "mold_indicator": { + "name": "Mold Indicator", "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" + "config_flow": false, + "iot_class": "local_polling" }, "monessen": { "name": "Monessen", @@ -3899,7 +3820,7 @@ "mpd": { "name": "Music Player Daemon (MPD)", "integration_type": "hub", - "config_flow": true, + "config_flow": false, "iot_class": "local_polling" }, "mqtt": { @@ -3950,12 +3871,6 @@ "iot_class": "cloud_polling", "single_config_entry": true }, - "music_assistant": { - "name": "Music Assistant", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_push" - }, "mutesync": { "name": "mutesync", "integration_type": "hub", @@ -4022,12 +3937,6 @@ "config_flow": true, "iot_class": "local_push" }, - "nasweb": { - "name": "NASweb", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_push" - }, "neato": { "name": "Neato Botvac", "integration_type": "hub", @@ -4127,12 +4036,6 @@ "config_flow": true, "iot_class": "local_polling" }, - "nice_go": { - "name": "Nice G.O.", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push" - }, "nightscout": { "name": "Nightscout", "integration_type": "hub", @@ -4193,13 +4096,6 @@ "config_flow": true, "iot_class": "local_push" }, - "nordpool": { - "name": "Nord Pool", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling", - "single_config_entry": true - }, "norway_air": { "name": "Om Luftkvalitet i Norge (Norway Air)", "integration_type": "hub", @@ -4271,18 +4167,11 @@ "config_flow": false, "iot_class": "local_push" }, - "nyt_games": { - "name": "NYT Games", - "integration_type": "service", - "config_flow": true, - "iot_class": "cloud_polling" - }, "nzbget": { "name": "NZBGet", "integration_type": "hub", "config_flow": true, - "iot_class": "local_polling", - "single_config_entry": true + "iot_class": "local_polling" }, "oasa_telematics": { "name": "OASA Telematics", @@ -4330,8 +4219,7 @@ "name": "Hayward Omnilogic", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling", - "single_config_entry": true + "iot_class": "cloud_polling" }, "oncue": { "name": "Oncue by Kohler", @@ -4343,8 +4231,7 @@ "name": "Ondilo ICO", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling", - "single_config_entry": true + "iot_class": "cloud_polling" }, "onewire": { "name": "1-Wire", @@ -4354,8 +4241,8 @@ }, "onkyo": { "name": "Onkyo", - "integration_type": "device", - "config_flow": true, + "integration_type": "hub", + "config_flow": false, "iot_class": "local_push" }, "onvif": { @@ -4552,8 +4439,7 @@ "name": "OwnTracks", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push", - "single_config_entry": true + "iot_class": "local_push" }, "p1_monitor": { "name": "P1 Monitor", @@ -4561,12 +4447,6 @@ "config_flow": true, "iot_class": "local_polling" }, - "palazzetti": { - "name": "Palazzetti", - "integration_type": "device", - "config_flow": true, - "iot_class": "local_polling" - }, "panasonic": { "name": "Panasonic", "integrations": { @@ -4590,6 +4470,16 @@ "config_flow": false, "iot_class": "local_polling" }, + "panel_custom": { + "name": "Custom Panel", + "integration_type": "hub", + "config_flow": false + }, + "panel_iframe": { + "name": "iframe Panel", + "integration_type": "hub", + "config_flow": false + }, "pcs_lighting": { "name": "PCS Lighting", "integration_type": "virtual", @@ -4681,11 +4571,6 @@ "config_flow": false, "iot_class": "local_push" }, - "pinecil": { - "name": "Pinecil", - "integration_type": "virtual", - "supported_by": "iron_os" - }, "ping": { "name": "Ping (ICMP)", "integration_type": "hub", @@ -4769,8 +4654,7 @@ "profiler": { "name": "Profiler", "integration_type": "hub", - "config_flow": true, - "single_config_entry": true + "config_flow": true }, "progettihwsw": { "name": "ProgettiHWSW Automation", @@ -4985,8 +4869,7 @@ "name": "Radio Browser", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling", - "single_config_entry": true + "iot_class": "cloud_polling" }, "radiotherm": { "name": "Radio Thermostat", @@ -5161,8 +5044,7 @@ "name": "Rhasspy", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push", - "single_config_entry": true + "iot_class": "local_push" }, "ridwell": { "name": "Ridwell", @@ -5236,23 +5118,6 @@ "config_flow": true, "iot_class": "local_push" }, - "roth": { - "name": "Roth", - "integrations": { - "touchline": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling", - "name": "Roth Touchline" - }, - "touchline_sl": { - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling", - "name": "Roth Touchline SL" - } - } - }, "rova": { "name": "ROVA", "integration_type": "hub", @@ -5278,7 +5143,7 @@ "iot_class": "local_push" }, "ruckus_unleashed": { - "name": "Ruckus", + "name": "Ruckus Unleashed", "integration_type": "hub", "config_flow": true, "iot_class": "local_polling" @@ -5288,7 +5153,7 @@ "integrations": { "russound_rio": { "integration_type": "hub", - "config_flow": true, + "config_flow": false, "iot_class": "local_push", "name": "Russound RIO" }, @@ -5461,12 +5326,6 @@ "config_flow": true, "iot_class": "local_push" }, - "sensoterra": { - "name": "Sensoterra", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "sentry": { "name": "Sentry", "integration_type": "service", @@ -5568,12 +5427,6 @@ "config_flow": false, "iot_class": "cloud_push" }, - "simplefin": { - "name": "SimpleFin", - "integration_type": "service", - "config_flow": true, - "iot_class": "cloud_polling" - }, "simplepush": { "name": "Simplepush", "integration_type": "hub", @@ -5614,22 +5467,11 @@ "config_flow": false, "iot_class": "local_push" }, - "sky": { - "name": "Sky", - "integrations": { - "sky_hub": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling", - "name": "Sky Hub" - }, - "sky_remote": { - "integration_type": "device", - "config_flow": true, - "iot_class": "assumed_state", - "name": "Sky Remote Control" - } - } + "sky_hub": { + "name": "Sky Hub", + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling" }, "skybeacon": { "name": "Skybeacon", @@ -5715,7 +5557,7 @@ "smarty": { "name": "Salda Smarty", "integration_type": "hub", - "config_flow": true, + "config_flow": false, "iot_class": "local_polling" }, "smhi": { @@ -5724,12 +5566,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "smlight": { - "name": "SMLIGHT SLZB", - "integration_type": "device", - "config_flow": true, - "iot_class": "local_push" - }, "sms": { "name": "SMS notifications via GSM-modem", "integration_type": "hub", @@ -5882,6 +5718,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "spider": { + "name": "Itho Daalderop Spider", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "splunk": { "name": "Splunk", "integration_type": "hub", @@ -6126,6 +5968,10 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "tag": { + "integration_type": "hub", + "config_flow": false + }, "tailscale": { "name": "Tailscale", "integration_type": "hub", @@ -6264,12 +6110,6 @@ "config_flow": true, "iot_class": "local_polling", "name": "Tesla Wall Connector" - }, - "tesla_fleet": { - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling", - "name": "Tesla Fleet" } } }, @@ -6427,6 +6267,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "touchline": { + "name": "Roth Touchline", + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling" + }, "tplink": { "name": "TP-Link", "integrations": { @@ -6529,12 +6375,6 @@ "config_flow": false, "iot_class": "cloud_polling" }, - "triggercmd": { - "name": "TRIGGERcmd", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "tuya": { "name": "Tuya", "integration_type": "hub", @@ -6880,7 +6720,7 @@ "wake_on_lan": { "name": "Wake on LAN", "integration_type": "hub", - "config_flow": true, + "config_flow": false, "iot_class": "local_push" }, "wallbox": { @@ -6929,18 +6769,17 @@ } } }, + "webhook": { + "name": "Webhook", + "integration_type": "hub", + "config_flow": false + }, "webmin": { "name": "Webmin", "integration_type": "device", "config_flow": true, "iot_class": "local_polling" }, - "weheat": { - "name": "Weheat", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "wemo": { "name": "Belkin WeMo", "integration_type": "hub", @@ -7000,12 +6839,6 @@ "config_flow": true, "iot_class": "local_push" }, - "wmspro": { - "name": "WMS WebControl pro", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_polling" - }, "wolflink": { "name": "Wolf SmartSet Service", "integration_type": "hub", @@ -7020,7 +6853,7 @@ "worldclock": { "name": "Worldclock", "integration_type": "hub", - "config_flow": true, + "config_flow": false, "iot_class": "local_push" }, "worldtidesinfo": { @@ -7136,14 +6969,8 @@ "yale_home": { "integration_type": "virtual", "config_flow": false, - "supported_by": "yale", + "supported_by": "august", "name": "Yale Home" - }, - "yale": { - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push", - "name": "Yale" } } }, @@ -7312,12 +7139,6 @@ } }, "helper": { - "bayesian": { - "name": "Bayesian", - "integration_type": "helper", - "config_flow": false, - "iot_class": "local_polling" - }, "counter": { "integration_type": "helper", "config_flow": false @@ -7327,12 +7148,6 @@ "config_flow": true, "iot_class": "calculated" }, - "filter": { - "name": "Filter", - "integration_type": "helper", - "config_flow": false, - "iot_class": "local_push" - }, "generic_hygrostat": { "integration_type": "helper", "config_flow": true, @@ -7348,11 +7163,6 @@ "config_flow": true, "iot_class": "calculated" }, - "history_stats": { - "integration_type": "helper", - "config_flow": true, - "iot_class": "local_polling" - }, "input_boolean": { "integration_type": "helper", "config_flow": false @@ -7382,23 +7192,13 @@ "config_flow": true, "iot_class": "local_push" }, - "manual": { - "name": "Manual Alarm Control Panel", - "integration_type": "helper", - "config_flow": false, - "iot_class": "calculated" - }, "min_max": { "integration_type": "helper", "config_flow": true, "iot_class": "calculated" }, - "mold_indicator": { - "integration_type": "helper", - "config_flow": true, - "iot_class": "calculated" - }, "random": { + "name": "Random", "integration_type": "helper", "config_flow": true, "iot_class": "calculated" @@ -7408,6 +7208,7 @@ "config_flow": false }, "statistics": { + "name": "Statistics", "integration_type": "helper", "config_flow": true, "iot_class": "local_polling" @@ -7429,6 +7230,7 @@ "iot_class": "local_polling" }, "timer": { + "name": "Timer", "integration_type": "helper", "config_flow": false }, @@ -7438,6 +7240,7 @@ "iot_class": "calculated" }, "trend": { + "name": "Trend", "integration_type": "helper", "config_flow": true, "iot_class": "calculated" @@ -7466,7 +7269,6 @@ "google_travel_time", "group", "growatt_server", - "history_stats", "holiday", "homekit_controller", "input_boolean", @@ -7483,25 +7285,21 @@ "min_max", "mobile_app", "moehlenhoff_alpha2", - "mold_indicator", "moon", "nextbus", "nmap_tracker", "plant", "proximity", - "random", "rpi_power", "schedule", "season", "shopping_list", - "statistics", "sun", "switch_as_x", + "tag", "threshold", "time_date", - "timer", "tod", - "trend", "uptime", "utility_meter", "version", diff --git a/homeassistant/generated/languages.py b/homeassistant/generated/languages.py index 7e56952f7a5..feedd373fd9 100644 --- a/homeassistant/generated/languages.py +++ b/homeassistant/generated/languages.py @@ -28,7 +28,6 @@ LANGUAGES = { "fi", "fr", "fy", - "ga", "gl", "gsw", "he", @@ -45,7 +44,6 @@ LANGUAGES = { "lb", "lt", "lv", - "mk", "ml", "nb", "nl", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 1fbd6337fdb..8efe49b7892 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -267,11 +267,6 @@ HOMEKIT = { } ZEROCONF = { - "_PowerView-G3._tcp.local.": [ - { - "domain": "hunterdouglas_powerview", - }, - ], "_Volumio._tcp.local.": [ { "domain": "volumio", @@ -420,11 +415,6 @@ ZEROCONF = { "domain": "forked_daapd", }, ], - "_deako._tcp.local.": [ - { - "domain": "deako", - }, - ], "_devialet-http._tcp.local.": [ { "domain": "devialet", @@ -524,10 +514,6 @@ ZEROCONF = { "domain": "bosch_shc", "name": "bosch shc*", }, - { - "domain": "lektrico", - "name": "lektrico*", - }, { "domain": "loqed", "name": "loqed*", @@ -603,23 +589,12 @@ ZEROCONF = { "name": "gateway*", }, ], - "_linkplay._tcp.local.": [ - { - "domain": "linkplay", - }, - ], "_lookin._tcp.local.": [ { "domain": "lookin", }, ], "_lutron._tcp.local.": [ - { - "domain": "lutron_caseta", - "properties": { - "SYSTYPE": "hwqs*", - }, - }, { "domain": "lutron_caseta", "properties": { @@ -639,11 +614,6 @@ ZEROCONF = { }, }, ], - "_mass._tcp.local.": [ - { - "domain": "music_assistant", - }, - ], "_matter._tcp.local.": [ { "domain": "matter", @@ -676,11 +646,6 @@ ZEROCONF = { "name": "yeelink-*", }, ], - "_musc._tcp.local.": [ - { - "domain": "bluesound", - }, - ], "_nanoleafapi._tcp.local.": [ { "domain": "nanoleaf", @@ -711,6 +676,11 @@ ZEROCONF = { "domain": "plugwise", }, ], + "_powerview-g3._tcp.local.": [ + { + "domain": "hunterdouglas_powerview", + }, + ], "_powerview._tcp.local.": [ { "domain": "hunterdouglas_powerview", @@ -767,19 +737,11 @@ ZEROCONF = { }, ], "_slzb-06._tcp.local.": [ - { - "domain": "smlight", - }, { "domain": "zha", "name": "slzb-06*", }, ], - "_smoip._tcp.local.": [ - { - "domain": "cambridge_audio", - }, - ], "_sonos._tcp.local.": [ { "domain": "sonos", @@ -809,11 +771,6 @@ ZEROCONF = { "name": "smappee50*", }, ], - "_stream-magic._tcp.local.": [ - { - "domain": "cambridge_audio", - }, - ], "_system-bridge._tcp.local.": [ { "domain": "system_bridge", diff --git a/homeassistant/helpers/aiohttp_client.py b/homeassistant/helpers/aiohttp_client.py index f01ae325875..5c4ead4e611 100644 --- a/homeassistant/helpers/aiohttp_client.py +++ b/homeassistant/helpers/aiohttp_client.py @@ -5,7 +5,6 @@ from __future__ import annotations import asyncio from collections.abc import Awaitable, Callable from contextlib import suppress -import socket from ssl import SSLContext import sys from types import MappingProxyType @@ -14,7 +13,6 @@ from typing import TYPE_CHECKING, Any import aiohttp from aiohttp import web from aiohttp.hdrs import CONTENT_TYPE, USER_AGENT -from aiohttp.resolver import AsyncResolver from aiohttp.web_exceptions import HTTPBadGateway, HTTPGatewayTimeout from homeassistant import config_entries @@ -25,6 +23,7 @@ from homeassistant.util import ssl as ssl_util from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import json_loads +from .backports.aiohttp_resolver import AsyncResolver from .frame import warn_use from .json import json_dumps @@ -32,11 +31,11 @@ if TYPE_CHECKING: from aiohttp.typedefs import JSONDecoder -DATA_CONNECTOR: HassKey[dict[tuple[bool, int, str], aiohttp.BaseConnector]] = HassKey( +DATA_CONNECTOR: HassKey[dict[tuple[bool, int], aiohttp.BaseConnector]] = HassKey( "aiohttp_connector" ) -DATA_CLIENTSESSION: HassKey[dict[tuple[bool, int, str], aiohttp.ClientSession]] = ( - HassKey("aiohttp_clientsession") +DATA_CLIENTSESSION: HassKey[dict[tuple[bool, int], aiohttp.ClientSession]] = HassKey( + "aiohttp_clientsession" ) SERVER_SOFTWARE = ( @@ -44,13 +43,11 @@ SERVER_SOFTWARE = ( f"aiohttp/{aiohttp.__version__} Python/{sys.version_info[0]}.{sys.version_info[1]}" ) -ENABLE_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < ( - 3, - 13, - 1, -) or sys.version_info < (3, 12, 7) -# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960 -# which first appeared in Python 3.12.7 and 3.13.1 +ENABLE_CLEANUP_CLOSED = not (3, 11, 1) <= sys.version_info < (3, 11, 4) +# Enabling cleanup closed on python 3.11.1+ leaks memory relatively quickly +# see https://github.com/aio-libs/aiohttp/issues/7252 +# aiohttp interacts poorly with https://github.com/python/cpython/pull/98540 +# The issue was fixed in 3.11.4 via https://github.com/python/cpython/pull/104485 WARN_CLOSE_MSG = "closes the Home Assistant aiohttp session" @@ -85,16 +82,13 @@ class HassClientResponse(aiohttp.ClientResponse): @callback @bind_hass def async_get_clientsession( - hass: HomeAssistant, - verify_ssl: bool = True, - family: socket.AddressFamily = socket.AF_UNSPEC, - ssl_cipher: ssl_util.SSLCipherList = ssl_util.SSLCipherList.PYTHON_DEFAULT, + hass: HomeAssistant, verify_ssl: bool = True, family: int = 0 ) -> aiohttp.ClientSession: """Return default aiohttp ClientSession. This method must be run in the event loop. """ - session_key = _make_key(verify_ssl, family, ssl_cipher) + session_key = _make_key(verify_ssl, family) sessions = hass.data.setdefault(DATA_CLIENTSESSION, {}) if session_key not in sessions: @@ -103,7 +97,6 @@ def async_get_clientsession( verify_ssl, auto_cleanup_method=_async_register_default_clientsession_shutdown, family=family, - ssl_cipher=ssl_cipher, ) sessions[session_key] = session else: @@ -118,8 +111,7 @@ def async_create_clientsession( hass: HomeAssistant, verify_ssl: bool = True, auto_cleanup: bool = True, - family: socket.AddressFamily = socket.AF_UNSPEC, - ssl_cipher: ssl_util.SSLCipherList = ssl_util.SSLCipherList.PYTHON_DEFAULT, + family: int = 0, **kwargs: Any, ) -> aiohttp.ClientSession: """Create a new ClientSession with kwargs, i.e. for cookies. @@ -140,7 +132,6 @@ def async_create_clientsession( verify_ssl, auto_cleanup_method=auto_cleanup_method, family=family, - ssl_cipher=ssl_cipher, **kwargs, ) @@ -151,13 +142,12 @@ def _async_create_clientsession( verify_ssl: bool = True, auto_cleanup_method: Callable[[HomeAssistant, aiohttp.ClientSession], None] | None = None, - family: socket.AddressFamily = socket.AF_UNSPEC, - ssl_cipher: ssl_util.SSLCipherList = ssl_util.SSLCipherList.PYTHON_DEFAULT, + family: int = 0, **kwargs: Any, ) -> aiohttp.ClientSession: """Create a new ClientSession with kwargs, i.e. for cookies.""" clientsession = aiohttp.ClientSession( - connector=_async_get_connector(hass, verify_ssl, family, ssl_cipher), + connector=_async_get_connector(hass, verify_ssl, family), json_serialize=json_dumps, response_class=HassClientResponse, **kwargs, @@ -285,53 +275,31 @@ def _async_register_default_clientsession_shutdown( @callback -def _make_key( - verify_ssl: bool = True, - family: socket.AddressFamily = socket.AF_UNSPEC, - ssl_cipher: ssl_util.SSLCipherList = ssl_util.SSLCipherList.PYTHON_DEFAULT, -) -> tuple[bool, socket.AddressFamily, ssl_util.SSLCipherList]: +def _make_key(verify_ssl: bool = True, family: int = 0) -> tuple[bool, int]: """Make a key for connector or session pool.""" - return (verify_ssl, family, ssl_cipher) - - -class HomeAssistantTCPConnector(aiohttp.TCPConnector): - """Home Assistant TCP Connector. - - Same as aiohttp.TCPConnector but with a longer cleanup_closed timeout. - - By default the cleanup_closed timeout is 2 seconds. This is too short - for Home Assistant since we churn through a lot of connections. We set - it to 60 seconds to reduce the overhead of aborting TLS connections - that are likely already closed. - """ - - # abort transport after 60 seconds (cleanup broken connections) - _cleanup_closed_period = 60.0 + return (verify_ssl, family) @callback def _async_get_connector( - hass: HomeAssistant, - verify_ssl: bool = True, - family: socket.AddressFamily = socket.AF_UNSPEC, - ssl_cipher: ssl_util.SSLCipherList = ssl_util.SSLCipherList.PYTHON_DEFAULT, + hass: HomeAssistant, verify_ssl: bool = True, family: int = 0 ) -> aiohttp.BaseConnector: """Return the connector pool for aiohttp. This method must be run in the event loop. """ - connector_key = _make_key(verify_ssl, family, ssl_cipher) + connector_key = _make_key(verify_ssl, family) connectors = hass.data.setdefault(DATA_CONNECTOR, {}) if connector_key in connectors: return connectors[connector_key] if verify_ssl: - ssl_context: SSLContext = ssl_util.client_context(ssl_cipher) + ssl_context: SSLContext = ssl_util.get_default_context() else: - ssl_context = ssl_util.client_context_no_verify(ssl_cipher) + ssl_context = ssl_util.get_default_no_verify_context() - connector = HomeAssistantTCPConnector( + connector = aiohttp.TCPConnector( family=family, enable_cleanup_closed=ENABLE_CLEANUP_CLOSED, ssl=ssl_context, diff --git a/homeassistant/helpers/area_registry.py b/homeassistant/helpers/area_registry.py index f74296a9fb1..975750ebbdd 100644 --- a/homeassistant/helpers/area_registry.py +++ b/homeassistant/helpers/area_registry.py @@ -5,12 +5,11 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Iterable import dataclasses -from dataclasses import dataclass, field -from datetime import datetime -from typing import TYPE_CHECKING, Any, Literal, TypedDict +from functools import cached_property +from typing import Any, Literal, TypedDict from homeassistant.core import HomeAssistant, callback -from homeassistant.util.dt import utc_from_timestamp, utcnow +from homeassistant.util import slugify from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey @@ -19,26 +18,20 @@ from .json import json_bytes, json_fragment from .normalized_name_base_registry import ( NormalizedNameBaseRegistryEntry, NormalizedNameBaseRegistryItems, + normalize_name, ) from .registry import BaseRegistry, RegistryIndexType from .singleton import singleton from .storage import Store from .typing import UNDEFINED, UndefinedType -if TYPE_CHECKING: - # mypy cannot workout _cache Protocol with dataclasses - from propcache import cached_property as under_cached_property -else: - from propcache import under_cached_property - - DATA_REGISTRY: HassKey[AreaRegistry] = HassKey("area_registry") EVENT_AREA_REGISTRY_UPDATED: EventType[EventAreaRegistryUpdatedData] = EventType( "area_registry_updated" ) STORAGE_KEY = "core.area_registry" STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 7 +STORAGE_VERSION_MINOR = 6 class _AreaStoreData(TypedDict): @@ -51,8 +44,6 @@ class _AreaStoreData(TypedDict): labels: list[str] name: str picture: str | None - created_at: str - modified_at: str class AreasRegistryStoreData(TypedDict): @@ -68,7 +59,7 @@ class EventAreaRegistryUpdatedData(TypedDict): area_id: str -@dataclass(frozen=True, kw_only=True, slots=True) +@dataclasses.dataclass(frozen=True, kw_only=True) class AreaEntry(NormalizedNameBaseRegistryEntry): """Area Registry Entry.""" @@ -76,11 +67,10 @@ class AreaEntry(NormalizedNameBaseRegistryEntry): floor_id: str | None icon: str | None id: str - labels: set[str] = field(default_factory=set) + labels: set[str] = dataclasses.field(default_factory=set) picture: str | None - _cache: dict[str, Any] = field(default_factory=dict, compare=False, init=False) - @under_cached_property + @cached_property def json_fragment(self) -> json_fragment: """Return a JSON representation of this AreaEntry.""" return json_fragment( @@ -93,8 +83,6 @@ class AreaEntry(NormalizedNameBaseRegistryEntry): "labels": list(self.labels), "name": self.name, "picture": self.picture, - "created_at": self.created_at.timestamp(), - "modified_at": self.modified_at.timestamp(), } ) ) @@ -137,12 +125,6 @@ class AreaRegistryStore(Store[AreasRegistryStoreData]): for area in old_data["areas"]: area["labels"] = [] - if old_minor_version < 7: - # Version 1.7 adds created_at and modiefied_at - created_at = utc_from_timestamp(0).isoformat() - for area in old_data["areas"]: - area["created_at"] = area["modified_at"] = created_at - if old_major_version > 1: raise NotImplementedError return old_data # type: ignore[return-value] @@ -159,23 +141,22 @@ class AreaRegistryItems(NormalizedNameBaseRegistryItems[AreaEntry]): def _index_entry(self, key: str, entry: AreaEntry) -> None: """Index an entry.""" - super()._index_entry(key, entry) if entry.floor_id is not None: self._floors_index[entry.floor_id][key] = True for label in entry.labels: self._labels_index[label][key] = True + super()._index_entry(key, entry) def _unindex_entry( self, key: str, replacement_entry: AreaEntry | None = None ) -> None: - # always call base class before other indices - super()._unindex_entry(key, replacement_entry) entry = self.data[key] if labels := entry.labels: for label in labels: self._unindex_entry_value(key, label, self._labels_index) if floor_id := entry.floor_id: self._unindex_entry_value(key, floor_id, self._floors_index) + return super()._unindex_entry(key, replacement_entry) def get_areas_for_label(self, label: str) -> list[AreaEntry]: """Get areas for label.""" @@ -231,10 +212,6 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): return area return self.async_create(name) - def _generate_id(self, name: str) -> str: - """Generate area ID.""" - return self.areas.generate_id_from_name(name) - @callback def async_create( self, @@ -248,28 +225,28 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): ) -> AreaEntry: """Create a new area.""" self.hass.verify_event_loop_thread("area_registry.async_create") + normalized_name = normalize_name(name) - if area := self.async_get_area_by_name(name): - raise ValueError( - f"The name {name} ({area.normalized_name}) is already in use" - ) + if self.async_get_area_by_name(name): + raise ValueError(f"The name {name} ({normalized_name}) is already in use") + area_id = self._generate_area_id(name) area = AreaEntry( aliases=aliases or set(), floor_id=floor_id, icon=icon, - id=self._generate_id(name), + id=area_id, labels=labels or set(), name=name, + normalized_name=normalized_name, picture=picture, ) - area_id = area.id - self.areas[area_id] = area + assert area.id is not None + self.areas[area.id] = area self.async_schedule_save() - self.hass.bus.async_fire_internal( EVENT_AREA_REGISTRY_UPDATED, - EventAreaRegistryUpdatedData(action="create", area_id=area_id), + EventAreaRegistryUpdatedData(action="create", area_id=area.id), ) return area @@ -338,28 +315,27 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): """Update name of area.""" old = self.areas[area_id] - new_values: dict[str, Any] = { - attr_name: value - for attr_name, value in ( - ("aliases", aliases), - ("icon", icon), - ("labels", labels), - ("picture", picture), - ("floor_id", floor_id), - ) - if value is not UNDEFINED and value != getattr(old, attr_name) - } + new_values = {} + + for attr_name, value in ( + ("aliases", aliases), + ("icon", icon), + ("labels", labels), + ("picture", picture), + ("floor_id", floor_id), + ): + if value is not UNDEFINED and value != getattr(old, attr_name): + new_values[attr_name] = value if name is not UNDEFINED and name != old.name: new_values["name"] = name + new_values["normalized_name"] = normalize_name(name) if not new_values: return old - new_values["modified_at"] = utcnow() - self.hass.verify_event_loop_thread("area_registry.async_update") - new = self.areas[area_id] = dataclasses.replace(old, **new_values) + new = self.areas[area_id] = dataclasses.replace(old, **new_values) # type: ignore[arg-type] self.async_schedule_save() return new @@ -375,6 +351,7 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): if data is not None: for area in data["areas"]: assert area["name"] is not None and area["id"] is not None + normalized_name = normalize_name(area["name"]) areas[area["id"]] = AreaEntry( aliases=set(area["aliases"]), floor_id=area["floor_id"], @@ -382,9 +359,8 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): id=area["id"], labels=set(area["labels"]), name=area["name"], + normalized_name=normalized_name, picture=area["picture"], - created_at=datetime.fromisoformat(area["created_at"]), - modified_at=datetime.fromisoformat(area["modified_at"]), ) self.areas = areas @@ -403,13 +379,20 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): "labels": list(entry.labels), "name": entry.name, "picture": entry.picture, - "created_at": entry.created_at.isoformat(), - "modified_at": entry.modified_at.isoformat(), } for entry in self.areas.values() ] } + def _generate_area_id(self, name: str) -> str: + """Generate area ID.""" + suggestion = suggestion_base = slugify(name) + tries = 1 + while suggestion in self.areas: + tries += 1 + suggestion = f"{suggestion_base}_{tries}" + return suggestion + @callback def _async_setup_cleanup(self) -> None: """Set up the area registry cleanup.""" diff --git a/homeassistant/helpers/backports/__init__.py b/homeassistant/helpers/backports/__init__.py new file mode 100644 index 00000000000..e672fe1d3d2 --- /dev/null +++ b/homeassistant/helpers/backports/__init__.py @@ -0,0 +1 @@ +"""Backports for helpers.""" diff --git a/homeassistant/helpers/backports/aiohttp_resolver.py b/homeassistant/helpers/backports/aiohttp_resolver.py new file mode 100644 index 00000000000..efa4ba4bb85 --- /dev/null +++ b/homeassistant/helpers/backports/aiohttp_resolver.py @@ -0,0 +1,116 @@ +"""Backport of aiohttp's AsyncResolver for Home Assistant. + +This is a backport of the AsyncResolver class from aiohttp 3.10. + +Before aiohttp 3.10, on system with IPv6 support, AsyncResolver would not fallback +to providing A records when AAAA records were not available. + +Additionally, unlike the ThreadedResolver, AsyncResolver +did not handle link-local addresses correctly. +""" + +from __future__ import annotations + +import asyncio +import socket +import sys +from typing import Any, TypedDict + +import aiodns +from aiohttp.abc import AbstractResolver + +# This is a backport of https://github.com/aio-libs/aiohttp/pull/8270 +# This can be removed once aiohttp 3.10 is the minimum supported version. + +_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV +_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) + + +class ResolveResult(TypedDict): + """Resolve result. + + This is the result returned from an AbstractResolver's + resolve method. + + :param hostname: The hostname that was provided. + :param host: The IP address that was resolved. + :param port: The port that was resolved. + :param family: The address family that was resolved. + :param proto: The protocol that was resolved. + :param flags: The flags that were resolved. + """ + + hostname: str + host: str + port: int + family: int + proto: int + flags: int + + +class AsyncResolver(AbstractResolver): + """Use the `aiodns` package to make asynchronous DNS lookups.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Initialize the resolver.""" + if aiodns is None: + raise RuntimeError("Resolver requires aiodns library") + + self._loop = asyncio.get_running_loop() + self._resolver = aiodns.DNSResolver(*args, loop=self._loop, **kwargs) # type: ignore[misc] + + async def resolve( # type: ignore[override] + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> list[ResolveResult]: + """Resolve a host name to an IP address.""" + try: + resp = await self._resolver.getaddrinfo( + host, + port=port, + type=socket.SOCK_STREAM, + family=family, # type: ignore[arg-type] + flags=socket.AI_ADDRCONFIG, + ) + except aiodns.error.DNSError as exc: + msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" + raise OSError(msg) from exc + hosts: list[ResolveResult] = [] + for node in resp.nodes: + address: tuple[bytes, int] | tuple[bytes, int, int, int] = node.addr + family = node.family + if family == socket.AF_INET6: + if len(address) > 3 and address[3] and _SUPPORTS_SCOPE_ID: + # This is essential for link-local IPv6 addresses. + # LL IPv6 is a VERY rare case. Strictly speaking, we should use + # getnameinfo() unconditionally, but performance makes sense. + result = await self._resolver.getnameinfo( + (address[0].decode("ascii"), *address[1:]), + _NUMERIC_SOCKET_FLAGS, + ) + resolved_host = result.node + else: + resolved_host = address[0].decode("ascii") + port = address[1] + else: # IPv4 + assert family == socket.AF_INET + resolved_host = address[0].decode("ascii") + port = address[1] + hosts.append( + ResolveResult( + hostname=host, + host=resolved_host, + port=port, + family=family, + proto=0, + flags=_NUMERIC_SOCKET_FLAGS, + ) + ) + + if not hosts: + raise OSError("DNS lookup failed") + + return hosts + + async def close(self) -> None: + """Close the resolver.""" + self._resolver.cancel() diff --git a/homeassistant/helpers/category_registry.py b/homeassistant/helpers/category_registry.py index 41fa82084b3..6498859e2ab 100644 --- a/homeassistant/helpers/category_registry.py +++ b/homeassistant/helpers/category_registry.py @@ -5,11 +5,9 @@ from __future__ import annotations from collections.abc import Iterable import dataclasses from dataclasses import dataclass, field -from datetime import datetime -from typing import Any, Literal, TypedDict +from typing import Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from homeassistant.util.ulid import ulid_now @@ -25,16 +23,13 @@ EVENT_CATEGORY_REGISTRY_UPDATED: EventType[EventCategoryRegistryUpdatedData] = ( ) STORAGE_KEY = "core.category_registry" STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 2 class _CategoryStoreData(TypedDict): """Data type for individual category. Used in CategoryRegistryStoreData.""" category_id: str - created_at: str icon: str | None - modified_at: str name: str @@ -60,36 +55,10 @@ class CategoryEntry: """Category registry entry.""" category_id: str = field(default_factory=ulid_now) - created_at: datetime = field(default_factory=utcnow) icon: str | None = None - modified_at: datetime = field(default_factory=utcnow) name: str -class CategoryRegistryStore(Store[CategoryRegistryStoreData]): - """Store category registry data.""" - - async def _async_migrate_func( - self, - old_major_version: int, - old_minor_version: int, - old_data: dict[str, dict[str, list[dict[str, Any]]]], - ) -> CategoryRegistryStoreData: - """Migrate to the new version.""" - if old_major_version > STORAGE_VERSION_MAJOR: - raise ValueError("Can't migrate to future version") - - if old_major_version == 1: - if old_minor_version < 2: - # Version 1.2 implements migration and adds created_at and modified_at - created_at = utc_from_timestamp(0).isoformat() - for categories in old_data["categories"].values(): - for category in categories: - category["created_at"] = category["modified_at"] = created_at - - return old_data # type: ignore[return-value] - - class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): """Class to hold a registry of categories by scope.""" @@ -97,12 +66,11 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): """Initialize the category registry.""" self.hass = hass self.categories: dict[str, dict[str, CategoryEntry]] = {} - self._store = CategoryRegistryStore( + self._store = Store( hass, STORAGE_VERSION_MAJOR, STORAGE_KEY, atomic_writes=True, - minor_version=STORAGE_VERSION_MINOR, ) @callback @@ -177,7 +145,7 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): ) -> CategoryEntry: """Update name or icon of the category.""" old = self.categories[scope][category_id] - changes: dict[str, Any] = {} + changes = {} if icon is not UNDEFINED and icon != old.icon: changes["icon"] = icon @@ -189,10 +157,8 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): if not changes: return old - changes["modified_at"] = utcnow() - self.hass.verify_event_loop_thread("category_registry.async_update") - new = self.categories[scope][category_id] = dataclasses.replace(old, **changes) + new = self.categories[scope][category_id] = dataclasses.replace(old, **changes) # type: ignore[arg-type] self.async_schedule_save() self.hass.bus.async_fire_internal( @@ -214,9 +180,7 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): category_entries[scope] = { category["category_id"]: CategoryEntry( category_id=category["category_id"], - created_at=datetime.fromisoformat(category["created_at"]), icon=category["icon"], - modified_at=datetime.fromisoformat(category["modified_at"]), name=category["name"], ) for category in categories @@ -232,9 +196,7 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): scope: [ { "category_id": entry.category_id, - "created_at": entry.created_at.isoformat(), "icon": entry.icon, - "modified_at": entry.modified_at.isoformat(), "name": entry.name, } for entry in entries.values() diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 4b5e2f277a0..0626e0033c4 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -13,6 +13,7 @@ import voluptuous as vol from homeassistant import loader from homeassistant.config import ( # type: ignore[attr-defined] CONF_PACKAGES, + CORE_CONFIG_SCHEMA, YAML_CONFIG_FILE, config_per_platform, extract_domain_configs, @@ -21,8 +22,7 @@ from homeassistant.config import ( # type: ignore[attr-defined] load_yaml_config_file, merge_packages_config, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.core_config import CORE_CONFIG_SCHEMA +from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.requirements import ( RequirementsNotFound, @@ -157,10 +157,10 @@ async def async_check_ha_config_file( # noqa: C901 return result.add_error(f"Error loading {config_path}: {err}") # Extract and validate core [homeassistant] config - core_config = config.pop(HOMEASSISTANT_DOMAIN, {}) + core_config = config.pop(HA_DOMAIN, {}) try: core_config = CORE_CONFIG_SCHEMA(core_config) - result[HOMEASSISTANT_DOMAIN] = core_config + result[HA_DOMAIN] = core_config # Merge packages await merge_packages_config( @@ -168,8 +168,8 @@ async def async_check_ha_config_file( # noqa: C901 ) except vol.Invalid as err: result.add_error( - format_schema_error(hass, err, HOMEASSISTANT_DOMAIN, core_config), - HOMEASSISTANT_DOMAIN, + format_schema_error(hass, err, HA_DOMAIN, core_config), + HA_DOMAIN, core_config, ) core_config = {} @@ -234,7 +234,7 @@ async def async_check_ha_config_file( # noqa: C901 config_schema = getattr(component, "CONFIG_SCHEMA", None) if config_schema is not None: try: - validated_config = await cv.async_validate(hass, config_schema, config) + validated_config = config_schema(config) # Don't fail if the validator removed the domain from the config if domain in validated_config: result[domain] = validated_config[domain] @@ -255,9 +255,7 @@ async def async_check_ha_config_file( # noqa: C901 for p_name, p_config in config_per_platform(config, domain): # Validate component specific platform schema try: - p_validated = await cv.async_validate( - hass, component_platform_schema, p_config - ) + p_validated = component_platform_schema(p_config) except vol.Invalid as ex: _comp_error(ex, domain, p_config, p_config) continue diff --git a/homeassistant/helpers/collection.py b/homeassistant/helpers/collection.py index 86d3450c3a0..036aaacf0e9 100644 --- a/homeassistant/helpers/collection.py +++ b/homeassistant/helpers/collection.py @@ -7,7 +7,6 @@ import asyncio from collections.abc import Awaitable, Callable, Coroutine, Iterable from dataclasses import dataclass from functools import partial -from hashlib import md5 from itertools import groupby import logging from operator import attrgetter @@ -26,7 +25,6 @@ from homeassistant.util import slugify from . import entity_registry from .entity import Entity from .entity_component import EntityComponent -from .json import json_bytes from .storage import Store from .typing import ConfigType, VolDictType @@ -52,7 +50,6 @@ class CollectionChange: change_type: str item_id: str item: Any - item_hash: str | None = None type ChangeListener = Callable[ @@ -276,9 +273,7 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( await self.notify_changes( [ - CollectionChange( - CHANGE_ADDED, item[CONF_ID], item, self._hash_item(item) - ) + CollectionChange(CHANGE_ADDED, item[CONF_ID], item) for item in raw_storage["items"] ] ) @@ -318,16 +313,7 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( item = self._create_item(item_id, validated_data) self.data[item_id] = item self._async_schedule_save() - await self.notify_changes( - [ - CollectionChange( - CHANGE_ADDED, - item_id, - item, - self._hash_item(self._serialize_item(item_id, item)), - ) - ] - ) + await self.notify_changes([CollectionChange(CHANGE_ADDED, item_id, item)]) return item async def async_update_item(self, item_id: str, updates: dict) -> _ItemT: @@ -345,16 +331,7 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( self.data[item_id] = updated self._async_schedule_save() - await self.notify_changes( - [ - CollectionChange( - CHANGE_UPDATED, - item_id, - updated, - self._hash_item(self._serialize_item(item_id, updated)), - ) - ] - ) + await self.notify_changes([CollectionChange(CHANGE_UPDATED, item_id, updated)]) return self.data[item_id] @@ -388,10 +365,6 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( def _data_to_save(self) -> _StoreT: """Return JSON-compatible date for storing to file.""" - def _hash_item(self, item: dict) -> str: - """Return a hash of the item.""" - return md5(json_bytes(item)).hexdigest() - class DictStorageCollection(StorageCollection[dict, SerializedStorageCollection]): """A specialized StorageCollection where the items are untyped dicts.""" @@ -491,10 +464,6 @@ class _CollectionLifeCycle(Generic[_EntityT]): async def _update_entity(self, change_set: CollectionChange) -> None: if entity := self.entities.get(change_set.item_id): - if change_set.item_hash: - self.ent_reg.async_update_entity_options( - entity.entity_id, "collection", {"hash": change_set.item_hash} - ) await entity.async_update_config(change_set.item) async def _collection_changed(self, change_set: Iterable[CollectionChange]) -> None: @@ -673,8 +642,8 @@ class StorageCollectionWebsocket[_StorageCollectionT: StorageCollection]: } for change in change_set ] - for conn, msg_id in self._subscribers: - conn.send_message(websocket_api.event_message(msg_id, json_msg)) + for connection, msg_id in self._subscribers: + connection.send_message(websocket_api.event_message(msg_id, json_msg)) if not self._subscribers: self._remove_subscription = ( diff --git a/homeassistant/helpers/condition.py b/homeassistant/helpers/condition.py index 86965f86d40..e15b40a78df 100644 --- a/homeassistant/helpers/condition.py +++ b/homeassistant/helpers/condition.py @@ -4,15 +4,15 @@ from __future__ import annotations import asyncio from collections import deque -from collections.abc import Callable, Container, Generator +from collections.abc import Callable, Container from contextlib import contextmanager from datetime import datetime, time as dt_time, timedelta import functools as ft -import logging import re import sys from typing import Any, Protocol, cast +from typing_extensions import Generator import voluptuous as vol from homeassistant.components import zone as zone_cmp @@ -61,7 +61,7 @@ import homeassistant.util.dt as dt_util from . import config_validation as cv, entity_registry as er from .sun import get_astral_event_date -from .template import Template, render_complex +from .template import Template, attach as template_attach, render_complex from .trace import ( TraceElement, trace_append_element, @@ -511,6 +511,9 @@ def async_numeric_state_from_config(config: ConfigType) -> ConditionCheckerType: hass: HomeAssistant, variables: TemplateVarsType = None ) -> bool: """Test numeric state condition.""" + if value_template is not None: + value_template.hass = hass + errors = [] for index, entity_id in enumerate(entity_ids): try: @@ -628,6 +631,7 @@ def state_from_config(config: ConfigType) -> ConditionCheckerType: @trace_condition_function def if_state(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Test if condition.""" + template_attach(hass, for_period) errors = [] result: bool = match != ENTITY_MATCH_ANY for index, entity_id in enumerate(entity_ids): @@ -789,6 +793,8 @@ def async_template_from_config(config: ConfigType) -> ConditionCheckerType: @trace_condition_function def template_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Validate template based if-condition.""" + value_template.hass = hass + return async_template(hass, value_template, variables) return template_if @@ -1065,46 +1071,6 @@ async def async_validate_conditions_config( return [await async_validate_condition_config(hass, cond) for cond in conditions] -async def async_conditions_from_config( - hass: HomeAssistant, - condition_configs: list[ConfigType], - logger: logging.Logger, - name: str, -) -> Callable[[TemplateVarsType], bool]: - """AND all conditions.""" - checks: list[ConditionCheckerType] = [ - await async_from_config(hass, condition_config) - for condition_config in condition_configs - ] - - def check_conditions(variables: TemplateVarsType = None) -> bool: - """AND all conditions.""" - errors: list[ConditionErrorIndex] = [] - for index, check in enumerate(checks): - try: - with trace_path(["condition", str(index)]): - if check(hass, variables) is False: - return False - except ConditionError as ex: - errors.append( - ConditionErrorIndex( - "condition", index=index, total=len(checks), error=ex - ) - ) - - if errors: - logger.warning( - "Error evaluating condition in '%s':\n%s", - name, - ConditionErrorContainer("condition", errors=errors), - ) - return False - - return True - - return check_conditions - - @callback def async_extract_entities(config: ConfigType | Template) -> set[str]: """Extract entities from a condition.""" diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 2b35ebade76..a28c81e6da9 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -4,9 +4,8 @@ # with PEP 695 syntax. Fixed in Python 3.13. # from __future__ import annotations -from collections.abc import Callable, Hashable, Mapping +from collections.abc import Callable, Hashable import contextlib -from contextvars import ContextVar from datetime import ( date as date_sys, datetime as datetime_sys, @@ -14,7 +13,6 @@ from datetime import ( timedelta, ) from enum import Enum, StrEnum -import functools import logging from numbers import Number import os @@ -22,7 +20,6 @@ import re from socket import ( # type: ignore[attr-defined] # private, not in typeshed _GLOBAL_DEFAULT_TIMEOUT, ) -import threading from typing import Any, cast, overload from urllib.parse import urlparse from uuid import UUID @@ -37,7 +34,6 @@ from homeassistant.const import ( ATTR_FLOOR_ID, ATTR_LABEL_ID, CONF_ABOVE, - CONF_ACTION, CONF_ALIAS, CONF_ATTRIBUTE, CONF_BELOW, @@ -81,8 +77,6 @@ from homeassistant.const import ( CONF_TARGET, CONF_THEN, CONF_TIMEOUT, - CONF_TRIGGER, - CONF_TRIGGERS, CONF_UNTIL, CONF_VALUE_TEMPLATE, CONF_VARIABLES, @@ -99,7 +93,6 @@ from homeassistant.const import ( ) from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, - HomeAssistant, async_get_hass, async_get_hass_or_none, split_entity_id, @@ -120,51 +113,6 @@ from .typing import VolDictType, VolSchemaType TIME_PERIOD_ERROR = "offset {} should be format 'HH:MM', 'HH:MM:SS' or 'HH:MM:SS.F'" -class MustValidateInExecutor(HomeAssistantError): - """Raised when validation must happen in an executor thread.""" - - -class _Hass(threading.local): - """Container which makes a HomeAssistant instance available to validators.""" - - hass: HomeAssistant | None = None - - -_hass = _Hass() -"""Set when doing async friendly schema validation.""" - - -def _async_get_hass_or_none() -> HomeAssistant | None: - """Return the HomeAssistant instance or None. - - First tries core.async_get_hass_or_none, then _hass which is - set when doing async friendly schema validation. - """ - return async_get_hass_or_none() or _hass.hass - - -_validating_async: ContextVar[bool] = ContextVar("_validating_async", default=False) -"""Set to True when doing async friendly schema validation.""" - - -def not_async_friendly[**_P, _R](validator: Callable[_P, _R]) -> Callable[_P, _R]: - """Mark a validator as not async friendly. - - This makes validation happen in an executor thread if validation is done by - async_validate, otherwise does nothing. - """ - - @functools.wraps(validator) - def _not_async_friendly(*args: _P.args, **kwargs: _P.kwargs) -> _R: - if _validating_async.get() and async_get_hass_or_none(): - # Raise if doing async friendly validation and validation - # is happening in the event loop - raise MustValidateInExecutor - return validator(*args, **kwargs) - - return _not_async_friendly - - class UrlProtocolSchema(StrEnum): """Valid URL protocol schema values.""" @@ -268,7 +216,6 @@ def whitespace(value: Any) -> str: raise vol.Invalid(f"contains non-whitespace: {value}") -@not_async_friendly def isdevice(value: Any) -> str: """Validate that value is a real device.""" try: @@ -310,7 +257,6 @@ def is_regex(value: Any) -> re.Pattern[Any]: return r -@not_async_friendly def isfile(value: Any) -> str: """Validate that the value is an existing file.""" if value is None: @@ -324,7 +270,6 @@ def isfile(value: Any) -> str: return file_in -@not_async_friendly def isdir(value: Any) -> str: """Validate that the value is an existing dir.""" if value is None: @@ -717,19 +662,8 @@ def template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value is None") if isinstance(value, (list, dict, template_helper.Template)): raise vol.Invalid("template value should be a string") - if not (hass := _async_get_hass_or_none()): - # pylint: disable-next=import-outside-toplevel - from .frame import ReportBehavior, report_usage - report_usage( - ( - "validates schema outside the event loop, " - "which will stop working in HA Core 2025.10" - ), - core_behavior=ReportBehavior.LOG, - ) - - template_value = template_helper.Template(str(value), hass) + template_value = template_helper.Template(str(value), async_get_hass_or_none()) try: template_value.ensure_valid() @@ -746,19 +680,8 @@ def dynamic_template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value should be a string") if not template_helper.is_template_string(str(value)): raise vol.Invalid("template value does not contain a dynamic template") - if not (hass := _async_get_hass_or_none()): - # pylint: disable-next=import-outside-toplevel - from .frame import ReportBehavior, report_usage - report_usage( - ( - "validates schema outside the event loop, " - "which will stop working in HA Core 2025.10" - ), - core_behavior=ReportBehavior.LOG, - ) - - template_value = template_helper.Template(str(value), hass) + template_value = template_helper.Template(str(value), async_get_hass_or_none()) try: template_value.ensure_valid() @@ -846,9 +769,9 @@ def socket_timeout(value: Any | None) -> object: float_value = float(value) if float_value > 0.0: return float_value + raise vol.Invalid("Invalid socket timeout value. float > 0.0 required.") except Exception as err: raise vol.Invalid(f"Invalid socket timeout: {err}") from err - raise vol.Invalid("Invalid socket timeout value. float > 0.0 required.") def url( @@ -874,7 +797,7 @@ def url_no_path(value: Any) -> str: url_in = url(value) if urlparse(url_in).path not in ("", "/"): - raise vol.Invalid("url is not allowed to have a path component") + raise vol.Invalid("url it not allowed to have a path component") return url_in @@ -1115,13 +1038,7 @@ def key_dependency[_KT: Hashable, _VT]( def custom_serializer(schema: Any) -> Any: """Serialize additional types for voluptuous_serialize.""" - return _custom_serializer(schema, allow_section=True) - - -def _custom_serializer(schema: Any, *, allow_section: bool) -> Any: - """Serialize additional types for voluptuous_serialize.""" - from homeassistant import data_entry_flow # pylint: disable=import-outside-toplevel - + from .. import data_entry_flow # pylint: disable=import-outside-toplevel from . import selector # pylint: disable=import-outside-toplevel if schema is positive_time_period_dict: @@ -1134,15 +1051,10 @@ def _custom_serializer(schema: Any, *, allow_section: bool) -> Any: return {"type": "boolean"} if isinstance(schema, data_entry_flow.section): - if not allow_section: - raise ValueError("Nesting expandable sections is not supported") return { "type": "expandable", "schema": voluptuous_serialize.convert( - schema.schema, - custom_serializer=functools.partial( - _custom_serializer, allow_section=False - ), + schema.schema, custom_serializer=custom_serializer ), "expanded": not schema.options["collapsed"], } @@ -1340,56 +1252,37 @@ TARGET_SERVICE_FIELDS = { _HAS_ENTITY_SERVICE_FIELD = has_at_least_one_key(*ENTITY_SERVICE_FIELDS) -def is_entity_service_schema(validator: VolSchemaType) -> bool: - """Check if the passed validator is an entity schema validator. - - The validator must be either of: - - A validator returned by cv._make_entity_service_schema - - A validator returned by cv._make_entity_service_schema, wrapped in a vol.Schema - - A validator returned by cv._make_entity_service_schema, wrapped in a vol.All - Nesting is allowed. - """ - if hasattr(validator, "_entity_service_schema"): - return True - if isinstance(validator, (vol.All)): - return any(is_entity_service_schema(val) for val in validator.validators) - if isinstance(validator, (vol.Schema)): - return is_entity_service_schema(validator.schema) - - return False - - -def _make_entity_service_schema(schema: dict, extra: int) -> VolSchemaType: +def _make_entity_service_schema(schema: dict, extra: int) -> vol.Schema: """Create an entity service schema.""" - validator = vol.All( - vol.Schema( - { - # The frontend stores data here. Don't use in core. - vol.Remove("metadata"): dict, - **schema, - **ENTITY_SERVICE_FIELDS, - }, - extra=extra, - ), - _HAS_ENTITY_SERVICE_FIELD, + return vol.Schema( + vol.All( + vol.Schema( + { + # The frontend stores data here. Don't use in core. + vol.Remove("metadata"): dict, + **schema, + **ENTITY_SERVICE_FIELDS, + }, + extra=extra, + ), + _HAS_ENTITY_SERVICE_FIELD, + ) ) - setattr(validator, "_entity_service_schema", True) - return validator BASE_ENTITY_SCHEMA = _make_entity_service_schema({}, vol.PREVENT_EXTRA) def make_entity_service_schema( - schema: dict | None, *, extra: int = vol.PREVENT_EXTRA -) -> VolSchemaType: + schema: dict, *, extra: int = vol.PREVENT_EXTRA +) -> vol.Schema: """Create an entity service schema.""" if not schema and extra == vol.PREVENT_EXTRA: # If the schema is empty and we don't allow extra keys, we can return # the base schema and avoid compiling a new schema which is the case # for ~50% of services. return BASE_ENTITY_SCHEMA - return _make_entity_service_schema(schema or {}, extra) + return _make_entity_service_schema(schema, extra) SCRIPT_CONVERSATION_RESPONSE_SCHEMA = vol.Any(template, None) @@ -1432,30 +1325,11 @@ EVENT_SCHEMA = vol.Schema( } ) - -def _backward_compat_service_schema(value: Any | None) -> Any: - """Backward compatibility for service schemas.""" - - if not isinstance(value, dict): - return value - - # `service` has been renamed to `action` - if CONF_SERVICE in value: - if CONF_ACTION in value: - raise vol.Invalid( - "Cannot specify both 'service' and 'action'. Please use 'action' only." - ) - value[CONF_ACTION] = value.pop(CONF_SERVICE) - - return value - - SERVICE_SCHEMA = vol.All( - _backward_compat_service_schema, vol.Schema( { **SCRIPT_ACTION_BASE_SCHEMA, - vol.Exclusive(CONF_ACTION, "service name"): vol.Any( + vol.Exclusive(CONF_SERVICE, "service name"): vol.Any( service, dynamic_template ), vol.Exclusive(CONF_SERVICE_TEMPLATE, "service name"): vol.Any( @@ -1474,7 +1348,7 @@ SERVICE_SCHEMA = vol.All( vol.Remove("metadata"): dict, } ), - has_at_least_one_key(CONF_ACTION, CONF_SERVICE_TEMPLATE), + has_at_least_one_key(CONF_SERVICE, CONF_SERVICE_TEMPLATE), ) NUMERIC_STATE_THRESHOLD_SCHEMA = vol.Any( @@ -1770,32 +1644,6 @@ CONDITION_ACTION_SCHEMA: vol.Schema = vol.Schema( ) ) - -def _trigger_pre_validator(value: Any | None) -> Any: - """Rewrite trigger `trigger` to `platform`. - - `platform` has been renamed to `trigger` in user documentation and in the automation - editor. The Python trigger implementation still uses `platform`, so we need to - rename `trigger` to `platform. - """ - - if not isinstance(value, Mapping): - # If the value is not a mapping, we let that be handled by the TRIGGER_SCHEMA - return value - - if CONF_TRIGGER in value: - if CONF_PLATFORM in value: - raise vol.Invalid( - "Cannot specify both 'platform' and 'trigger'. Please use 'trigger' only." - ) - value = dict(value) - value[CONF_PLATFORM] = value.pop(CONF_TRIGGER) - elif CONF_PLATFORM not in value: - raise vol.Invalid("required key not provided", [CONF_TRIGGER]) - - return value - - TRIGGER_BASE_SCHEMA = vol.Schema( { vol.Optional(CONF_ALIAS): str, @@ -1810,19 +1658,6 @@ TRIGGER_BASE_SCHEMA = vol.Schema( _base_trigger_validator_schema = TRIGGER_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA) -def _base_trigger_list_flatten(triggers: list[Any]) -> list[Any]: - """Flatten trigger arrays containing 'triggers:' sublists into a single list of triggers.""" - flatlist = [] - for t in triggers: - if CONF_TRIGGERS in t and len(t) == 1: - triggerlist = ensure_list(t[CONF_TRIGGERS]) - flatlist.extend(triggerlist) - else: - flatlist.append(t) - - return flatlist - - # This is first round of validation, we don't want to process the config here already, # just ensure basics as platform and ID are there. def _base_trigger_validator(value: Any) -> Any: @@ -1830,11 +1665,7 @@ def _base_trigger_validator(value: Any) -> Any: return value -TRIGGER_SCHEMA = vol.All( - ensure_list, - _base_trigger_list_flatten, - [vol.All(_trigger_pre_validator, _base_trigger_validator)], -) +TRIGGER_SCHEMA = vol.All(ensure_list, [_base_trigger_validator]) _SCRIPT_DELAY_SCHEMA = vol.Schema( { @@ -2013,7 +1844,6 @@ ACTIONS_MAP = { CONF_WAIT_FOR_TRIGGER: SCRIPT_ACTION_WAIT_FOR_TRIGGER, CONF_VARIABLES: SCRIPT_ACTION_VARIABLES, CONF_IF: SCRIPT_ACTION_IF, - CONF_ACTION: SCRIPT_ACTION_CALL_SERVICE, CONF_SERVICE: SCRIPT_ACTION_CALL_SERVICE, CONF_SERVICE_TEMPLATE: SCRIPT_ACTION_CALL_SERVICE, CONF_STOP: SCRIPT_ACTION_STOP, @@ -2069,32 +1899,3 @@ historic_currency = vol.In( country = vol.In(COUNTRIES, msg="invalid ISO 3166 formatted country") language = vol.In(LANGUAGES, msg="invalid RFC 5646 formatted language") - - -async def async_validate( - hass: HomeAssistant, validator: Callable[[Any], Any], value: Any -) -> Any: - """Async friendly schema validation. - - If a validator decorated with @not_async_friendly is called, validation will be - deferred to an executor. If not, validation will happen in the event loop. - """ - _validating_async.set(True) - try: - return validator(value) - except MustValidateInExecutor: - return await hass.async_add_executor_job( - _validate_in_executor, hass, validator, value - ) - finally: - _validating_async.set(False) - - -def _validate_in_executor( - hass: HomeAssistant, validator: Callable[[Any], Any], value: Any -) -> Any: - _hass.hass = hass - try: - return validator(value) - finally: - _hass.hass = None diff --git a/homeassistant/helpers/data_entry_flow.py b/homeassistant/helpers/data_entry_flow.py index adb2062a8ea..2adab32195b 100644 --- a/homeassistant/helpers/data_entry_flow.py +++ b/homeassistant/helpers/data_entry_flow.py @@ -18,7 +18,7 @@ from . import config_validation as cv _FlowManagerT = TypeVar( "_FlowManagerT", - bound=data_entry_flow.FlowManager[Any, Any], + bound=data_entry_flow.FlowManager[Any], default=data_entry_flow.FlowManager, ) @@ -47,7 +47,7 @@ class _BaseFlowManagerView(HomeAssistantView, Generic[_FlowManagerT]): data = result.copy() if (schema := data["data_schema"]) is None: - data["data_schema"] = [] # type: ignore[typeddict-item] # json result type + data["data_schema"] = [] else: data["data_schema"] = voluptuous_serialize.convert( schema, custom_serializer=cv.custom_serializer diff --git a/homeassistant/helpers/deprecation.py b/homeassistant/helpers/deprecation.py index 81f7821ec79..65e8f4ef97e 100644 --- a/homeassistant/helpers/deprecation.py +++ b/homeassistant/helpers/deprecation.py @@ -3,8 +3,7 @@ from __future__ import annotations from collections.abc import Callable -from contextlib import suppress -from enum import Enum, EnumType, _EnumDict +from enum import Enum import functools import inspect import logging @@ -165,30 +164,6 @@ def _print_deprecation_warning_internal( breaks_in_ha_version: str | None, *, log_when_no_integration_is_found: bool, -) -> None: - # Suppress ImportError due to use of deprecated enum in core.py - # Can be removed in HA Core 2025.1 - with suppress(ImportError): - _print_deprecation_warning_internal_impl( - obj_name, - module_name, - replacement, - description, - verb, - breaks_in_ha_version, - log_when_no_integration_is_found=log_when_no_integration_is_found, - ) - - -def _print_deprecation_warning_internal_impl( - obj_name: str, - module_name: str, - replacement: str, - description: str, - verb: str, - breaks_in_ha_version: str | None, - *, - log_when_no_integration_is_found: bool, ) -> None: # pylint: disable=import-outside-toplevel from homeassistant.core import async_get_hass_or_none @@ -363,35 +338,3 @@ def all_with_deprecated_constants(module_globals: dict[str, Any]) -> list[str]: for name in module_globals_keys if name.startswith(_PREFIX_DEPRECATED) ] - - -class EnumWithDeprecatedMembers(EnumType): - """Enum with deprecated members.""" - - def __new__( - mcs, # noqa: N804 ruff bug, ruff does not understand this is a metaclass - cls: str, - bases: tuple[type, ...], - classdict: _EnumDict, - *, - deprecated: dict[str, tuple[str, str]], - **kwds: Any, - ) -> Any: - """Create a new class.""" - classdict["__deprecated__"] = deprecated - return super().__new__(mcs, cls, bases, classdict, **kwds) - - def __getattribute__(cls, name: str) -> Any: - """Warn if accessing a deprecated member.""" - deprecated = super().__getattribute__("__deprecated__") - if name in deprecated: - _print_deprecation_warning_internal( - f"{cls.__name__}.{name}", - cls.__module__, - f"{deprecated[name][0]}", - "enum member", - "used", - deprecated[name][1], - log_when_no_integration_is_found=False, - ) - return super().__getattribute__(name) diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index faf4257577d..4579739f0e1 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -4,9 +4,8 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Mapping -from datetime import datetime from enum import StrEnum -from functools import lru_cache, partial +from functools import cached_property, lru_cache, partial import logging import time from typing import TYPE_CHECKING, Any, Literal, TypedDict @@ -24,7 +23,6 @@ from homeassistant.core import ( ) from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import async_suggest_report_issue -from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import format_unserializable_data @@ -45,14 +43,9 @@ from .singleton import singleton from .typing import UNDEFINED, UndefinedType if TYPE_CHECKING: - # mypy cannot workout _cache Protocol with attrs - from propcache import cached_property as under_cached_property - from homeassistant.config_entries import ConfigEntry from . import entity_registry -else: - from propcache import under_cached_property _LOGGER = logging.getLogger(__name__) @@ -62,7 +55,7 @@ EVENT_DEVICE_REGISTRY_UPDATED: EventType[EventDeviceRegistryUpdatedData] = Event ) STORAGE_KEY = "core.device_registry" STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 8 +STORAGE_VERSION_MINOR = 6 CLEANUP_DELAY = 10 @@ -101,7 +94,6 @@ class DeviceInfo(TypedDict, total=False): configuration_url: str | URL | None connections: set[tuple[str, str]] - created_at: str default_manufacturer: str default_model: str default_name: str @@ -109,8 +101,6 @@ class DeviceInfo(TypedDict, total=False): identifiers: set[tuple[str, str]] manufacturer: str | None model: str | None - model_id: str | None - modified_at: str name: str | None serial_number: str | None suggested_area: str | None @@ -137,7 +127,6 @@ DEVICE_INFO_TYPES = { "identifiers", "manufacturer", "model", - "model_id", "name", "serial_number", "suggested_area", @@ -282,7 +271,7 @@ def _validate_configuration_url(value: Any) -> str | None: return url_as_str -@attr.s(frozen=True, slots=True) +@attr.s(frozen=True) class DeviceEntry: """Device Registry Entry.""" @@ -290,7 +279,6 @@ class DeviceEntry: config_entries: set[str] = attr.ib(converter=set, factory=set) configuration_url: str | None = attr.ib(default=None) connections: set[tuple[str, str]] = attr.ib(converter=set, factory=set) - created_at: datetime = attr.ib(factory=utcnow) disabled_by: DeviceEntryDisabler | None = attr.ib(default=None) entry_type: DeviceEntryType | None = attr.ib(default=None) hw_version: str | None = attr.ib(default=None) @@ -299,8 +287,6 @@ class DeviceEntry: labels: set[str] = attr.ib(converter=set, factory=set) manufacturer: str | None = attr.ib(default=None) model: str | None = attr.ib(default=None) - model_id: str | None = attr.ib(default=None) - modified_at: datetime = attr.ib(factory=utcnow) name_by_user: str | None = attr.ib(default=None) name: str | None = attr.ib(default=None) primary_config_entry: str | None = attr.ib(default=None) @@ -310,7 +296,6 @@ class DeviceEntry: via_device_id: str | None = attr.ib(default=None) # This value is not stored, just used to keep track of events to fire. is_new: bool = attr.ib(default=False) - _cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False) @property def disabled(self) -> bool: @@ -328,7 +313,6 @@ class DeviceEntry: "configuration_url": self.configuration_url, "config_entries": list(self.config_entries), "connections": list(self.connections), - "created_at": self.created_at.timestamp(), "disabled_by": self.disabled_by, "entry_type": self.entry_type, "hw_version": self.hw_version, @@ -337,8 +321,6 @@ class DeviceEntry: "labels": list(self.labels), "manufacturer": self.manufacturer, "model": self.model, - "model_id": self.model_id, - "modified_at": self.modified_at.timestamp(), "name_by_user": self.name_by_user, "name": self.name, "primary_config_entry": self.primary_config_entry, @@ -347,7 +329,7 @@ class DeviceEntry: "via_device_id": self.via_device_id, } - @under_cached_property + @cached_property def json_repr(self) -> bytes | None: """Return a cached JSON representation of the entry.""" try: @@ -363,7 +345,7 @@ class DeviceEntry: ) return None - @under_cached_property + @cached_property def as_storage_fragment(self) -> json_fragment: """Return a json fragment for storage.""" return json_fragment( @@ -373,7 +355,6 @@ class DeviceEntry: "config_entries": list(self.config_entries), "configuration_url": self.configuration_url, "connections": list(self.connections), - "created_at": self.created_at, "disabled_by": self.disabled_by, "entry_type": self.entry_type, "hw_version": self.hw_version, @@ -382,8 +363,6 @@ class DeviceEntry: "labels": list(self.labels), "manufacturer": self.manufacturer, "model": self.model, - "model_id": self.model_id, - "modified_at": self.modified_at, "name_by_user": self.name_by_user, "name": self.name, "primary_config_entry": self.primary_config_entry, @@ -395,7 +374,7 @@ class DeviceEntry: ) -@attr.s(frozen=True, slots=True) +@attr.s(frozen=True) class DeletedDeviceEntry: """Deleted Device Registry Entry.""" @@ -404,9 +383,6 @@ class DeletedDeviceEntry: identifiers: set[tuple[str, str]] = attr.ib() id: str = attr.ib() orphaned_timestamp: float | None = attr.ib() - created_at: datetime = attr.ib(factory=utcnow) - modified_at: datetime = attr.ib(factory=utcnow) - _cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False) def to_device_entry( self, @@ -419,13 +395,12 @@ class DeletedDeviceEntry: # type ignores: likely https://github.com/python/mypy/issues/8625 config_entries={config_entry_id}, # type: ignore[arg-type] connections=self.connections & connections, # type: ignore[arg-type] - created_at=self.created_at, identifiers=self.identifiers & identifiers, # type: ignore[arg-type] id=self.id, is_new=True, ) - @under_cached_property + @cached_property def as_storage_fragment(self) -> json_fragment: """Return a json fragment for storage.""" return json_fragment( @@ -433,11 +408,9 @@ class DeletedDeviceEntry: { "config_entries": list(self.config_entries), "connections": list(self.connections), - "created_at": self.created_at, "identifiers": list(self.identifiers), "id": self.id, "orphaned_timestamp": self.orphaned_timestamp, - "modified_at": self.modified_at, } ) ) @@ -505,22 +478,11 @@ class DeviceRegistryStore(storage.Store[dict[str, list[dict[str, Any]]]]): if old_minor_version < 5: # Introduced in 2024.3 for device in old_data["devices"]: - device["labels"] = [] + device["labels"] = device.get("labels", []) if old_minor_version < 6: # Introduced in 2024.7 for device in old_data["devices"]: - device["primary_config_entry"] = None - if old_minor_version < 7: - # Introduced in 2024.8 - for device in old_data["devices"]: - device["model_id"] = None - if old_minor_version < 8: - # Introduced in 2024.8 - created_at = utc_from_timestamp(0).isoformat() - for device in old_data["devices"]: - device["created_at"] = device["modified_at"] = created_at - for device in old_data["deleted_devices"]: - device["created_at"] = device["modified_at"] = created_at + device.setdefault("primary_config_entry", None) if old_major_version > 1: raise NotImplementedError @@ -717,7 +679,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): config_entry_id: str, configuration_url: str | URL | None | UndefinedType = UNDEFINED, connections: set[tuple[str, str]] | None | UndefinedType = UNDEFINED, - created_at: str | datetime | UndefinedType = UNDEFINED, # will be ignored default_manufacturer: str | None | UndefinedType = UNDEFINED, default_model: str | None | UndefinedType = UNDEFINED, default_name: str | None | UndefinedType = UNDEFINED, @@ -728,8 +689,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): identifiers: set[tuple[str, str]] | None | UndefinedType = UNDEFINED, manufacturer: str | None | UndefinedType = UNDEFINED, model: str | None | UndefinedType = UNDEFINED, - model_id: str | None | UndefinedType = UNDEFINED, - modified_at: str | datetime | UndefinedType = UNDEFINED, # will be ignored name: str | None | UndefinedType = UNDEFINED, serial_number: str | None | UndefinedType = UNDEFINED, suggested_area: str | None | UndefinedType = UNDEFINED, @@ -776,7 +735,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): ("identifiers", identifiers), ("manufacturer", manufacturer), ("model", model), - ("model_id", model_id), ("name", name), ("serial_number", serial_number), ("suggested_area", suggested_area), @@ -842,6 +800,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): device.id, allow_collisions=True, add_config_entry_id=config_entry_id, + add_config_entry=config_entry, configuration_url=configuration_url, device_info_type=device_info_type, disabled_by=disabled_by, @@ -851,7 +810,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): merge_connections=connections or UNDEFINED, merge_identifiers=identifiers or UNDEFINED, model=model, - model_id=model_id, name=name, serial_number=serial_number, suggested_area=suggested_area, @@ -869,6 +827,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): self, device_id: str, *, + add_config_entry: ConfigEntry | UndefinedType = UNDEFINED, add_config_entry_id: str | UndefinedType = UNDEFINED, # Temporary flag so we don't blow up when collisions are implicitly introduced # by calls to async_get_or_create. Must not be set by integrations. @@ -884,7 +843,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): merge_connections: set[tuple[str, str]] | UndefinedType = UNDEFINED, merge_identifiers: set[tuple[str, str]] | UndefinedType = UNDEFINED, model: str | None | UndefinedType = UNDEFINED, - model_id: str | None | UndefinedType = UNDEFINED, name_by_user: str | None | UndefinedType = UNDEFINED, name: str | None | UndefinedType = UNDEFINED, new_connections: set[tuple[str, str]] | UndefinedType = UNDEFINED, @@ -903,11 +861,13 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): config_entries = old.config_entries - if add_config_entry_id is not UNDEFINED: - if self.hass.config_entries.async_get_entry(add_config_entry_id) is None: + if add_config_entry_id is not UNDEFINED and add_config_entry is UNDEFINED: + config_entry = self.hass.config_entries.async_get_entry(add_config_entry_id) + if config_entry is None: raise HomeAssistantError( f"Can't link device to unknown config entry {add_config_entry_id}" ) + add_config_entry = config_entry if not new_connections and not new_identifiers: raise HomeAssistantError( @@ -951,11 +911,11 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): area = ar.async_get(self.hass).async_get_or_create(suggested_area) area_id = area.id - if add_config_entry_id is not UNDEFINED: + if add_config_entry is not UNDEFINED: primary_entry_id = old.primary_config_entry if ( device_info_type == "primary" - and add_config_entry_id != primary_entry_id + and add_config_entry.entry_id != primary_entry_id ): if ( primary_entry_id is None @@ -966,11 +926,11 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): ) or primary_entry.domain in LOW_PRIO_CONFIG_ENTRY_DOMAINS ): - new_values["primary_config_entry"] = add_config_entry_id - old_values["primary_config_entry"] = primary_entry_id + new_values["primary_config_entry"] = add_config_entry.entry_id + old_values["primary_config_entry"] = old.primary_config_entry - if add_config_entry_id not in old.config_entries: - config_entries = old.config_entries | {add_config_entry_id} + if add_config_entry.entry_id not in old.config_entries: + config_entries = old.config_entries | {add_config_entry.entry_id} if ( remove_config_entry_id is not UNDEFINED @@ -1044,7 +1004,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): ("labels", labels), ("manufacturer", manufacturer), ("model", model), - ("model_id", model_id), ("name", name), ("name_by_user", name_by_user), ("serial_number", serial_number), @@ -1062,10 +1021,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): if not new_values: return old - if not RUNTIME_ONLY_ATTRS.issuperset(new_values): - # Change modified_at if we are changing something that we store - new_values["modified_at"] = utcnow() - self.hass.verify_event_loop_thread("device_registry.async_update_device") new = attr.evolve(old, **new_values) self.devices[device_id] = new @@ -1145,7 +1100,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): self.deleted_devices[device_id] = DeletedDeviceEntry( config_entries=device.config_entries, connections=device.connections, - created_at=device.created_at, identifiers=device.identifiers, id=device.id, orphaned_timestamp=None, @@ -1181,7 +1135,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): tuple(conn) # type: ignore[misc] for conn in device["connections"] }, - created_at=datetime.fromisoformat(device["created_at"]), disabled_by=( DeviceEntryDisabler(device["disabled_by"]) if device["disabled_by"] @@ -1201,8 +1154,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): labels=set(device["labels"]), manufacturer=device["manufacturer"], model=device["model"], - model_id=device["model_id"], - modified_at=datetime.fromisoformat(device["modified_at"]), name_by_user=device["name_by_user"], name=device["name"], primary_config_entry=device["primary_config_entry"], @@ -1215,10 +1166,8 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): deleted_devices[device["id"]] = DeletedDeviceEntry( config_entries=set(device["config_entries"]), connections={tuple(conn) for conn in device["connections"]}, - created_at=datetime.fromisoformat(device["created_at"]), identifiers={tuple(iden) for iden in device["identifiers"]}, id=device["id"], - modified_at=datetime.fromisoformat(device["modified_at"]), orphaned_timestamp=device["orphaned_timestamp"], ) diff --git a/homeassistant/helpers/discovery.py b/homeassistant/helpers/discovery.py index 7c1b5ac4a64..9f656dad56c 100644 --- a/homeassistant/helpers/discovery.py +++ b/homeassistant/helpers/discovery.py @@ -14,8 +14,8 @@ from typing import Any, TypedDict from homeassistant import core, setup from homeassistant.const import Platform from homeassistant.loader import bind_hass -from homeassistant.util.signal_type import SignalTypeFormat +from ..util.signal_type import SignalTypeFormat from .dispatcher import async_dispatcher_connect, async_dispatcher_send_internal from .typing import ConfigType, DiscoveryInfoType diff --git a/homeassistant/helpers/discovery_flow.py b/homeassistant/helpers/discovery_flow.py index fd41c7ffb44..9ec0b01dc56 100644 --- a/homeassistant/helpers/discovery_flow.py +++ b/homeassistant/helpers/discovery_flow.py @@ -3,49 +3,25 @@ from __future__ import annotations from collections.abc import Coroutine -import dataclasses -from typing import TYPE_CHECKING, Any, NamedTuple, Self +from typing import Any, NamedTuple +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import EVENT_HOMEASSISTANT_STARTED from homeassistant.core import CoreState, Event, HomeAssistant, callback from homeassistant.loader import bind_hass from homeassistant.util.async_ import gather_with_limited_concurrency from homeassistant.util.hass_dict import HassKey -if TYPE_CHECKING: - from homeassistant.config_entries import ConfigFlowContext, ConfigFlowResult - FLOW_INIT_LIMIT = 20 DISCOVERY_FLOW_DISPATCHER: HassKey[FlowDispatcher] = HassKey( "discovery_flow_dispatcher" ) -@dataclasses.dataclass(kw_only=True, slots=True) -class DiscoveryKey: - """Serializable discovery key.""" - - domain: str - key: str | tuple[str, ...] - version: int - - @classmethod - def from_json_dict(cls, json_dict: dict[str, Any]) -> Self: - """Construct from JSON dict.""" - if type(key := json_dict["key"]) is list: - key = tuple(key) - return cls(domain=json_dict["domain"], key=key, version=json_dict["version"]) - - @bind_hass @callback def async_create_flow( - hass: HomeAssistant, - domain: str, - context: ConfigFlowContext, - data: Any, - *, - discovery_key: DiscoveryKey | None = None, + hass: HomeAssistant, domain: str, context: dict[str, Any], data: Any ) -> None: """Create a discovery flow.""" dispatcher: FlowDispatcher | None = None @@ -55,9 +31,6 @@ def async_create_flow( dispatcher = hass.data[DISCOVERY_FLOW_DISPATCHER] = FlowDispatcher(hass) dispatcher.async_setup() - if discovery_key: - context = context | {"discovery_key": discovery_key} - if not dispatcher or dispatcher.started: if init_coro := _async_init_flow(hass, domain, context, data): hass.async_create_background_task( @@ -70,7 +43,7 @@ def async_create_flow( @callback def _async_init_flow( - hass: HomeAssistant, domain: str, context: ConfigFlowContext, data: Any + hass: HomeAssistant, domain: str, context: dict[str, Any], data: Any ) -> Coroutine[None, None, ConfigFlowResult] | None: """Create a discovery flow.""" # Avoid spawning flows that have the same initial discovery data @@ -78,9 +51,7 @@ def _async_init_flow( # which can overload devices since zeroconf/ssdp updates can happen # multiple times in the same minute if ( - hass.config_entries.flow.async_has_matching_discovery_flow( - domain, context, data - ) + hass.config_entries.flow.async_has_matching_flow(domain, context, data) or hass.is_stopping ): return None @@ -98,7 +69,7 @@ class PendingFlowKey(NamedTuple): class PendingFlowValue(NamedTuple): """Value for pending flows.""" - context: ConfigFlowContext + context: dict[str, Any] data: Any @@ -137,7 +108,7 @@ class FlowDispatcher: await gather_with_limited_concurrency(FLOW_INIT_LIMIT, *init_coros) @callback - def async_create(self, domain: str, context: ConfigFlowContext, data: Any) -> None: + def async_create(self, domain: str, context: dict[str, Any], data: Any) -> None: """Create and add or queue a flow.""" key = PendingFlowKey(domain, context["source"]) values = PendingFlowValue(context, data) diff --git a/homeassistant/helpers/dispatcher.py b/homeassistant/helpers/dispatcher.py index a5a790b7ce5..173e441781c 100644 --- a/homeassistant/helpers/dispatcher.py +++ b/homeassistant/helpers/dispatcher.py @@ -151,11 +151,11 @@ def _format_err[*_Ts]( *args: Any, ) -> str: """Format error message.""" - - return ( + return "Exception in {} when dispatching '{}': {}".format( # Functions wrapped in partial do not have a __name__ - f"Exception in {getattr(target, "__name__", None) or target} " - f"when dispatching '{signal}': {args}" + getattr(target, "__name__", None) or str(target), + signal, + args, ) diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index 1f77dd3f95c..cf910a5cba8 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -9,6 +9,7 @@ from collections.abc import Callable, Coroutine, Iterable, Mapping import dataclasses from enum import Enum, IntFlag, auto import functools as ft +from functools import cached_property import logging import math from operator import attrgetter @@ -18,9 +19,9 @@ import time from types import FunctionType from typing import TYPE_CHECKING, Any, Final, Literal, NotRequired, TypedDict, final -from propcache import cached_property import voluptuous as vol +from homeassistant.config import DATA_CUSTOMIZE from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_ATTRIBUTION, @@ -48,7 +49,6 @@ from homeassistant.core import ( get_hassjob_callable_job_type, get_release_channel, ) -from homeassistant.core_config import DATA_CUSTOMIZE from homeassistant.exceptions import ( HomeAssistantError, InvalidStateError, @@ -263,6 +263,8 @@ class CalculatedState: attributes: dict[str, Any] # Capability attributes returned by the capability_attributes property capability_attributes: Mapping[str, Any] | None + # Attributes which may be overridden by the entity registry + shadowed_attributes: Mapping[str, Any] class CachedProperties(type): @@ -337,9 +339,7 @@ class CachedProperties(type): Also invalidates the corresponding cached_property by calling delattr on it. """ - if ( - old_val := getattr(o, private_attr_name, _SENTINEL) - ) == val and type(old_val) is type(val): + if getattr(o, private_attr_name, _SENTINEL) == val: return setattr(o, private_attr_name, val) # Invalidate the cache of the cached property @@ -1042,20 +1042,18 @@ class Entity( @callback def _async_calculate_state(self) -> CalculatedState: """Calculate state string and attribute mapping.""" - state, attr, capabilities, _, _ = self.__async_calculate_state() - return CalculatedState(state, attr, capabilities) + return CalculatedState(*self.__async_calculate_state()) def __async_calculate_state( self, - ) -> tuple[str, dict[str, Any], Mapping[str, Any] | None, str | None, int | None]: + ) -> tuple[str, dict[str, Any], Mapping[str, Any] | None, Mapping[str, Any]]: """Calculate state string and attribute mapping. - Returns a tuple: + Returns a tuple (state, attr, capability_attr, shadowed_attr). state - the stringified state attr - the attribute dictionary capability_attr - a mapping with capability attributes - original_device_class - the device class which may be overridden - supported_features - the supported features + shadowed_attr - a mapping with attributes which may be overridden This method is called when writing the state to avoid the overhead of creating a dataclass object. @@ -1064,6 +1062,7 @@ class Entity( capability_attr = self.capability_attributes attr = capability_attr.copy() if capability_attr else {} + shadowed_attr = {} available = self.available # only call self.available once per update cycle state = self._stringify_state(available) @@ -1082,27 +1081,30 @@ class Entity( if (attribution := self.attribution) is not None: attr[ATTR_ATTRIBUTION] = attribution - original_device_class = self.device_class + shadowed_attr[ATTR_DEVICE_CLASS] = self.device_class if ( - device_class := (entry and entry.device_class) or original_device_class + device_class := (entry and entry.device_class) + or shadowed_attr[ATTR_DEVICE_CLASS] ) is not None: attr[ATTR_DEVICE_CLASS] = str(device_class) if (entity_picture := self.entity_picture) is not None: attr[ATTR_ENTITY_PICTURE] = entity_picture - if (icon := (entry and entry.icon) or self.icon) is not None: + shadowed_attr[ATTR_ICON] = self.icon + if (icon := (entry and entry.icon) or shadowed_attr[ATTR_ICON]) is not None: attr[ATTR_ICON] = icon + shadowed_attr[ATTR_FRIENDLY_NAME] = self._friendly_name_internal() if ( - name := (entry and entry.name) or self._friendly_name_internal() + name := (entry and entry.name) or shadowed_attr[ATTR_FRIENDLY_NAME] ) is not None: attr[ATTR_FRIENDLY_NAME] = name if (supported_features := self.supported_features) is not None: attr[ATTR_SUPPORTED_FEATURES] = supported_features - return (state, attr, capability_attr, original_device_class, supported_features) + return (state, attr, capability_attr, shadowed_attr) @callback def _async_write_ha_state(self) -> None: @@ -1128,15 +1130,14 @@ class Entity( return state_calculate_start = timer() - state, attr, capabilities, original_device_class, supported_features = ( - self.__async_calculate_state() - ) + state, attr, capabilities, shadowed_attr = self.__async_calculate_state() time_now = timer() if entry: # Make sure capabilities in the entity registry are up to date. Capabilities # include capability attributes, device class and supported features - supported_features = supported_features or 0 + original_device_class: str | None = shadowed_attr[ATTR_DEVICE_CLASS] + supported_features: int = attr.get(ATTR_SUPPORTED_FEATURES) or 0 if ( capabilities != entry.capabilities or original_device_class != entry.original_device_class @@ -1187,18 +1188,11 @@ class Entity( report_issue, ) - try: - # Most of the time this will already be - # set and since try is near zero cost - # on py3.11+ its faster to assume it is - # set and catch the exception if it is not. - customize = hass.data[DATA_CUSTOMIZE] - except KeyError: - pass - else: - # Overwrite properties that have been set in the config file. - if custom := customize.get(entity_id): - attr.update(custom) + # Overwrite properties that have been set in the config file. + if (customize := hass.data.get(DATA_CUSTOMIZE)) and ( + custom := customize.get(entity_id) + ): + attr.update(custom) if ( self._context_set is not None @@ -1208,7 +1202,7 @@ class Entity( self._context_set = None try: - hass.states.async_set_internal( + hass.states.async_set( entity_id, state, attr, diff --git a/homeassistant/helpers/entity_component.py b/homeassistant/helpers/entity_component.py index 1be7289401c..0034eb1c6fc 100644 --- a/homeassistant/helpers/entity_component.py +++ b/homeassistant/helpers/entity_component.py @@ -5,6 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Callable, Iterable from datetime import timedelta +from functools import partial import logging from types import ModuleType from typing import Any, Generic @@ -65,13 +66,10 @@ async def async_update_entity(hass: HomeAssistant, entity_id: str) -> None: class EntityComponent(Generic[_EntityT]): - """The EntityComponent manages platforms that manage entities. - - An example of an entity component is 'light', which manages platforms such - as 'hue.light'. + """The EntityComponent manages platforms that manages entities. This class has the following responsibilities: - - Process the configuration and set up a platform based component, for example light. + - Process the configuration and set up a platform based component. - Manage the platforms and their entities. - Help extract the entities from a service call. - Listen for discovery events for platforms related to the domain. @@ -260,22 +258,31 @@ class EntityComponent(Generic[_EntityT]): def async_register_entity_service( self, name: str, - schema: VolDictType | VolSchemaType | None, + schema: VolDictType | VolSchemaType, func: str | Callable[..., Any], required_features: list[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, ) -> None: """Register an entity service.""" - service.async_register_entity_service( - self.hass, + if isinstance(schema, dict): + schema = cv.make_entity_service_schema(schema) + + service_func: str | HassJob[..., Any] + service_func = func if isinstance(func, str) else HassJob(func) + + self.hass.services.async_register( self.domain, name, - entities=self._entities, - func=func, + partial( + service.entity_service_call, + self.hass, + self._entities, + service_func, + required_features=required_features, + ), + schema, + supports_response, job_type=HassJobType.Coroutinefunction, - required_features=required_features, - schema=schema, - supports_response=supports_response, ) async def async_setup_platform( diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index 62eed213b2a..d868e582f8f 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -6,6 +6,7 @@ import asyncio from collections.abc import Awaitable, Callable, Coroutine, Iterable from contextvars import ContextVar from datetime import timedelta +from functools import partial from logging import Logger, getLogger from typing import TYPE_CHECKING, Any, Protocol @@ -19,6 +20,7 @@ from homeassistant.core import ( CALLBACK_TYPE, DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, + HassJob, HomeAssistant, ServiceCall, SupportsResponse, @@ -39,6 +41,7 @@ from homeassistant.util.async_ import create_eager_task from homeassistant.util.hass_dict import HassKey from . import ( + config_validation as cv, device_registry as dev_reg, entity_registry as ent_reg, service, @@ -111,11 +114,7 @@ class EntityPlatformModule(Protocol): class EntityPlatform: - """Manage the entities for a single platform. - - An example of an entity platform is 'hue.light', which is managed by - the entity component 'light'. - """ + """Manage the entities for a single platform.""" def __init__( self, @@ -588,7 +587,7 @@ class EntityPlatform: """Add entities for a single platform without updating. In this case we are not updating the entities before adding them - which means it is likely that we will not have to yield control + which means its unlikely that we will not have to yield control to the event loop so we can await the coros directly without scheduling them as tasks. """ @@ -732,6 +731,7 @@ class EntityPlatform: return suggested_object_id: str | None = None + generate_new_entity_id = False entity_name = entity.name if entity_name is UNDEFINED: @@ -841,39 +841,33 @@ class EntityPlatform: entity.device_entry = device entity.entity_id = entry.entity_id - else: # entity.unique_id is None - generate_new_entity_id = False - # We won't generate an entity ID if the platform has already set one - # We will however make sure that platform cannot pick a registered ID - if entity.entity_id is not None and entity_registry.async_is_registered( - entity.entity_id - ): - # If entity already registered, convert entity id to suggestion - suggested_object_id = split_entity_id(entity.entity_id)[1] - generate_new_entity_id = True + # We won't generate an entity ID if the platform has already set one + # We will however make sure that platform cannot pick a registered ID + elif entity.entity_id is not None and entity_registry.async_is_registered( + entity.entity_id + ): + # If entity already registered, convert entity id to suggestion + suggested_object_id = split_entity_id(entity.entity_id)[1] + generate_new_entity_id = True - # Generate entity ID - if entity.entity_id is None or generate_new_entity_id: - suggested_object_id = ( - suggested_object_id - or entity.suggested_object_id - or DEVICE_DEFAULT_NAME - ) + # Generate entity ID + if entity.entity_id is None or generate_new_entity_id: + suggested_object_id = ( + suggested_object_id or entity.suggested_object_id or DEVICE_DEFAULT_NAME + ) - if self.entity_namespace is not None: - suggested_object_id = ( - f"{self.entity_namespace} {suggested_object_id}" - ) - entity.entity_id = entity_registry.async_generate_entity_id( - self.domain, suggested_object_id, self.entities - ) + if self.entity_namespace is not None: + suggested_object_id = f"{self.entity_namespace} {suggested_object_id}" + entity.entity_id = entity_registry.async_generate_entity_id( + self.domain, suggested_object_id, self.entities + ) - # Make sure it is valid in case an entity set the value themselves - # Avoid calling valid_entity_id if we already know it is valid - # since it already made it in the registry - if not valid_entity_id(entity.entity_id): - entity.add_to_platform_abort() - raise HomeAssistantError(f"Invalid entity ID: {entity.entity_id}") + # Make sure it is valid in case an entity set the value themselves + # Avoid calling valid_entity_id if we already know it is valid + # since it already made it in the registry + if not entity.registry_entry and not valid_entity_id(entity.entity_id): + entity.add_to_platform_abort() + raise HomeAssistantError(f"Invalid entity ID: {entity.entity_id}") already_exists, restored = self._entity_id_already_exists(entity.entity_id) @@ -1003,16 +997,24 @@ class EntityPlatform: if self.hass.services.has_service(self.platform_name, name): return - service.async_register_entity_service( - self.hass, + if isinstance(schema, dict): + schema = cv.make_entity_service_schema(schema) + + service_func: str | HassJob[..., Any] + service_func = func if isinstance(func, str) else HassJob(func) + + self.hass.services.async_register( self.platform_name, name, - entities=self.domain_platform_entities, - func=func, - job_type=None, - required_features=required_features, - schema=schema, - supports_response=supports_response, + partial( + service.entity_service_call, + self.hass, + self.domain_platform_entities, + service_func, + required_features=required_features, + ), + schema, + supports_response, ) async def _async_update_entity_states(self) -> None: diff --git a/homeassistant/helpers/entity_registry.py b/homeassistant/helpers/entity_registry.py index 9d50b7ae83b..dabe2e61917 100644 --- a/homeassistant/helpers/entity_registry.py +++ b/homeassistant/helpers/entity_registry.py @@ -14,6 +14,7 @@ from collections import defaultdict from collections.abc import Callable, Container, Hashable, KeysView, Mapping from datetime import datetime, timedelta from enum import StrEnum +from functools import cached_property import logging import time from typing import TYPE_CHECKING, Any, Literal, NotRequired, TypedDict @@ -47,7 +48,6 @@ from homeassistant.core import ( from homeassistant.exceptions import MaxLengthExceeded from homeassistant.loader import async_suggest_report_issue from homeassistant.util import slugify, uuid as uuid_util -from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import format_unserializable_data @@ -64,12 +64,7 @@ from .singleton import singleton from .typing import UNDEFINED, UndefinedType if TYPE_CHECKING: - # mypy cannot workout _cache Protocol with attrs - from propcache import cached_property as under_cached_property - from homeassistant.config_entries import ConfigEntry -else: - from propcache import under_cached_property DATA_REGISTRY: HassKey[EntityRegistry] = HassKey("entity_registry") EVENT_ENTITY_REGISTRY_UPDATED: EventType[EventEntityRegistryUpdatedData] = EventType( @@ -79,7 +74,7 @@ EVENT_ENTITY_REGISTRY_UPDATED: EventType[EventEntityRegistryUpdatedData] = Event _LOGGER = logging.getLogger(__name__) STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 15 +STORAGE_VERSION_MINOR = 14 STORAGE_KEY = "core.entity_registry" CLEANUP_INTERVAL = 3600 * 24 @@ -166,7 +161,7 @@ def _protect_entity_options( return ReadOnlyDict({key: ReadOnlyDict(val) for key, val in data.items()}) -@attr.s(frozen=True, slots=True) +@attr.s(frozen=True) class RegistryEntry: """Entity Registry Entry.""" @@ -179,7 +174,6 @@ class RegistryEntry: categories: dict[str, str] = attr.ib(factory=dict) capabilities: Mapping[str, Any] | None = attr.ib(default=None) config_entry_id: str | None = attr.ib(default=None) - created_at: datetime = attr.ib(factory=utcnow) device_class: str | None = attr.ib(default=None) device_id: str | None = attr.ib(default=None) domain: str = attr.ib(init=False, repr=False) @@ -193,7 +187,6 @@ class RegistryEntry: ) has_entity_name: bool = attr.ib(default=False) labels: set[str] = attr.ib(factory=set) - modified_at: datetime = attr.ib(factory=utcnow) name: str | None = attr.ib(default=None) options: ReadOnlyEntityOptionsType = attr.ib( default=None, converter=_protect_entity_options @@ -205,7 +198,6 @@ class RegistryEntry: supported_features: int = attr.ib(default=0) translation_key: str | None = attr.ib(default=None) unit_of_measurement: str | None = attr.ib(default=None) - _cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False) @domain.default def _domain_default(self) -> str: @@ -240,11 +232,8 @@ class RegistryEntry: display_dict["ec"] = ENTITY_CATEGORY_VALUE_TO_INDEX[category] if self.hidden_by is not None: display_dict["hb"] = True - if self.has_entity_name: - display_dict["hn"] = True - name = self.name or self.original_name - if name is not None: - display_dict["en"] = name + if not self.name and self.has_entity_name: + display_dict["en"] = self.original_name if self.domain == "sensor" and (sensor_options := self.options.get("sensor")): if (precision := sensor_options.get("display_precision")) is not None or ( precision := sensor_options.get("suggested_display_precision") @@ -252,7 +241,7 @@ class RegistryEntry: display_dict["dp"] = precision return display_dict - @under_cached_property + @cached_property def display_json_repr(self) -> bytes | None: """Return a cached partial JSON representation of the entry. @@ -272,7 +261,7 @@ class RegistryEntry: return None return json_repr - @under_cached_property + @cached_property def as_partial_dict(self) -> dict[str, Any]: """Return a partial dict representation of the entry.""" # Convert sets and tuples to lists @@ -282,7 +271,6 @@ class RegistryEntry: "area_id": self.area_id, "categories": self.categories, "config_entry_id": self.config_entry_id, - "created_at": self.created_at.timestamp(), "device_id": self.device_id, "disabled_by": self.disabled_by, "entity_category": self.entity_category, @@ -292,7 +280,6 @@ class RegistryEntry: "icon": self.icon, "id": self.id, "labels": list(self.labels), - "modified_at": self.modified_at.timestamp(), "name": self.name, "options": self.options, "original_name": self.original_name, @@ -301,7 +288,7 @@ class RegistryEntry: "unique_id": self.unique_id, } - @under_cached_property + @cached_property def extended_dict(self) -> dict[str, Any]: """Return a extended dict representation of the entry.""" # Convert sets and tuples to lists @@ -316,7 +303,7 @@ class RegistryEntry: "original_icon": self.original_icon, } - @under_cached_property + @cached_property def partial_json_repr(self) -> bytes | None: """Return a cached partial JSON representation of the entry.""" try: @@ -332,7 +319,7 @@ class RegistryEntry: ) return None - @under_cached_property + @cached_property def as_storage_fragment(self) -> json_fragment: """Return a json fragment for storage.""" return json_fragment( @@ -343,7 +330,6 @@ class RegistryEntry: "categories": self.categories, "capabilities": self.capabilities, "config_entry_id": self.config_entry_id, - "created_at": self.created_at, "device_class": self.device_class, "device_id": self.device_id, "disabled_by": self.disabled_by, @@ -354,7 +340,6 @@ class RegistryEntry: "id": self.id, "has_entity_name": self.has_entity_name, "labels": list(self.labels), - "modified_at": self.modified_at, "name": self.name, "options": self.options, "original_device_class": self.original_device_class, @@ -399,7 +384,7 @@ class RegistryEntry: hass.states.async_set(self.entity_id, STATE_UNAVAILABLE, attrs) -@attr.s(frozen=True, slots=True) +@attr.s(frozen=True) class DeletedRegistryEntry: """Deleted Entity Registry Entry.""" @@ -410,26 +395,21 @@ class DeletedRegistryEntry: domain: str = attr.ib(init=False, repr=False) id: str = attr.ib() orphaned_timestamp: float | None = attr.ib() - created_at: datetime = attr.ib(factory=utcnow) - modified_at: datetime = attr.ib(factory=utcnow) - _cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False) @domain.default def _domain_default(self) -> str: """Compute domain value.""" return split_entity_id(self.entity_id)[0] - @under_cached_property + @cached_property def as_storage_fragment(self) -> json_fragment: """Return a json fragment for storage.""" return json_fragment( json_bytes( { "config_entry_id": self.config_entry_id, - "created_at": self.created_at, "entity_id": self.entity_id, "id": self.id, - "modified_at": self.modified_at, "orphaned_timestamp": self.orphaned_timestamp, "platform": self.platform, "unique_id": self.unique_id, @@ -449,97 +429,88 @@ class EntityRegistryStore(storage.Store[dict[str, list[dict[str, Any]]]]): ) -> dict: """Migrate to the new version.""" data = old_data - if old_major_version == 1: - if old_minor_version < 2: - # Version 1.2 implements migration and freezes the available keys - for entity in data["entities"]: - # Populate keys which were introduced before version 1.2 - entity.setdefault("area_id", None) - entity.setdefault("capabilities", {}) - entity.setdefault("config_entry_id", None) - entity.setdefault("device_class", None) - entity.setdefault("device_id", None) - entity.setdefault("disabled_by", None) - entity.setdefault("entity_category", None) - entity.setdefault("icon", None) - entity.setdefault("name", None) - entity.setdefault("original_icon", None) - entity.setdefault("original_name", None) - entity.setdefault("supported_features", 0) - entity.setdefault("unit_of_measurement", None) + if old_major_version == 1 and old_minor_version < 2: + # Version 1.2 implements migration and freezes the available keys + for entity in data["entities"]: + # Populate keys which were introduced before version 1.2 + entity.setdefault("area_id", None) + entity.setdefault("capabilities", {}) + entity.setdefault("config_entry_id", None) + entity.setdefault("device_class", None) + entity.setdefault("device_id", None) + entity.setdefault("disabled_by", None) + entity.setdefault("entity_category", None) + entity.setdefault("icon", None) + entity.setdefault("name", None) + entity.setdefault("original_icon", None) + entity.setdefault("original_name", None) + entity.setdefault("supported_features", 0) + entity.setdefault("unit_of_measurement", None) - if old_minor_version < 3: - # Version 1.3 adds original_device_class - for entity in data["entities"]: - # Move device_class to original_device_class - entity["original_device_class"] = entity["device_class"] - entity["device_class"] = None + if old_major_version == 1 and old_minor_version < 3: + # Version 1.3 adds original_device_class + for entity in data["entities"]: + # Move device_class to original_device_class + entity["original_device_class"] = entity["device_class"] + entity["device_class"] = None - if old_minor_version < 4: - # Version 1.4 adds id - for entity in data["entities"]: - entity["id"] = uuid_util.random_uuid_hex() + if old_major_version == 1 and old_minor_version < 4: + # Version 1.4 adds id + for entity in data["entities"]: + entity["id"] = uuid_util.random_uuid_hex() - if old_minor_version < 5: - # Version 1.5 adds entity options - for entity in data["entities"]: - entity["options"] = {} + if old_major_version == 1 and old_minor_version < 5: + # Version 1.5 adds entity options + for entity in data["entities"]: + entity["options"] = {} - if old_minor_version < 6: - # Version 1.6 adds hidden_by - for entity in data["entities"]: - entity["hidden_by"] = None + if old_major_version == 1 and old_minor_version < 6: + # Version 1.6 adds hidden_by + for entity in data["entities"]: + entity["hidden_by"] = None - if old_minor_version < 7: - # Version 1.7 adds has_entity_name - for entity in data["entities"]: - entity["has_entity_name"] = False + if old_major_version == 1 and old_minor_version < 7: + # Version 1.7 adds has_entity_name + for entity in data["entities"]: + entity["has_entity_name"] = False - if old_minor_version < 8: - # Cleanup after frontend bug which incorrectly updated device_class - # Fixed by frontend PR #13551 - for entity in data["entities"]: - domain = split_entity_id(entity["entity_id"])[0] - if domain in [Platform.BINARY_SENSOR, Platform.COVER]: - continue - entity["device_class"] = None + if old_major_version == 1 and old_minor_version < 8: + # Cleanup after frontend bug which incorrectly updated device_class + # Fixed by frontend PR #13551 + for entity in data["entities"]: + domain = split_entity_id(entity["entity_id"])[0] + if domain in [Platform.BINARY_SENSOR, Platform.COVER]: + continue + entity["device_class"] = None - if old_minor_version < 9: - # Version 1.9 adds translation_key - for entity in data["entities"]: - entity["translation_key"] = None + if old_major_version == 1 and old_minor_version < 9: + # Version 1.9 adds translation_key + for entity in data["entities"]: + entity["translation_key"] = None - if old_minor_version < 10: - # Version 1.10 adds aliases - for entity in data["entities"]: - entity["aliases"] = [] + if old_major_version == 1 and old_minor_version < 10: + # Version 1.10 adds aliases + for entity in data["entities"]: + entity["aliases"] = [] - if old_minor_version < 11: - # Version 1.11 adds deleted_entities - data["deleted_entities"] = data.get("deleted_entities", []) + if old_major_version == 1 and old_minor_version < 11: + # Version 1.11 adds deleted_entities + data["deleted_entities"] = data.get("deleted_entities", []) - if old_minor_version < 12: - # Version 1.12 adds previous_unique_id - for entity in data["entities"]: - entity["previous_unique_id"] = None + if old_major_version == 1 and old_minor_version < 12: + # Version 1.12 adds previous_unique_id + for entity in data["entities"]: + entity["previous_unique_id"] = None - if old_minor_version < 13: - # Version 1.13 adds labels - for entity in data["entities"]: - entity["labels"] = [] + if old_major_version == 1 and old_minor_version < 13: + # Version 1.13 adds labels + for entity in data["entities"]: + entity["labels"] = [] - if old_minor_version < 14: - # Version 1.14 adds categories - for entity in data["entities"]: - entity["categories"] = {} - - if old_minor_version < 15: - # Version 1.15 adds created_at and modified_at - created_at = utc_from_timestamp(0).isoformat() - for entity in data["entities"]: - entity["created_at"] = entity["modified_at"] = created_at - for entity in data["deleted_entities"]: - entity["created_at"] = entity["modified_at"] = created_at + if old_major_version == 1 and old_minor_version < 14: + # Version 1.14 adds categories + for entity in data["entities"]: + entity["categories"] = {} if old_major_version > 1: raise NotImplementedError @@ -866,12 +837,10 @@ class EntityRegistry(BaseRegistry): ) entity_registry_id: str | None = None - created_at = utcnow() deleted_entity = self.deleted_entities.pop((domain, platform, unique_id), None) if deleted_entity is not None: # Restore id entity_registry_id = deleted_entity.id - created_at = deleted_entity.created_at entity_id = self.async_generate_entity_id( domain, @@ -896,7 +865,6 @@ class EntityRegistry(BaseRegistry): entry = RegistryEntry( capabilities=none_if_undefined(capabilities), config_entry_id=none_if_undefined(config_entry_id), - created_at=created_at, device_id=none_if_undefined(device_id), disabled_by=disabled_by, entity_category=none_if_undefined(entity_category), @@ -938,7 +906,6 @@ class EntityRegistry(BaseRegistry): orphaned_timestamp = None if config_entry_id else time.time() self.deleted_entities[key] = DeletedRegistryEntry( config_entry_id=config_entry_id, - created_at=entity.created_at, entity_id=entity_id, id=entity.id, orphaned_timestamp=orphaned_timestamp, @@ -1126,8 +1093,6 @@ class EntityRegistry(BaseRegistry): if not new_values: return old - new_values["modified_at"] = utcnow() - self.hass.verify_event_loop_thread("entity_registry.async_update_entity") new = self.entities[entity_id] = attr.evolve(old, **new_values) @@ -1295,7 +1260,6 @@ class EntityRegistry(BaseRegistry): categories=entity["categories"], capabilities=entity["capabilities"], config_entry_id=entity["config_entry_id"], - created_at=datetime.fromisoformat(entity["created_at"]), device_class=entity["device_class"], device_id=entity["device_id"], disabled_by=RegistryEntryDisabler(entity["disabled_by"]) @@ -1312,7 +1276,6 @@ class EntityRegistry(BaseRegistry): id=entity["id"], has_entity_name=entity["has_entity_name"], labels=set(entity["labels"]), - modified_at=datetime.fromisoformat(entity["modified_at"]), name=entity["name"], options=entity["options"], original_device_class=entity["original_device_class"], @@ -1344,10 +1307,8 @@ class EntityRegistry(BaseRegistry): ) deleted_entities[key] = DeletedRegistryEntry( config_entry_id=entity["config_entry_id"], - created_at=datetime.fromisoformat(entity["created_at"]), entity_id=entity["entity_id"], id=entity["id"], - modified_at=datetime.fromisoformat(entity["modified_at"]), orphaned_timestamp=entity["orphaned_timestamp"], platform=entity["platform"], unique_id=entity["unique_id"], diff --git a/homeassistant/helpers/entityfilter.py b/homeassistant/helpers/entityfilter.py index 1eaa0fb1404..24b65cba82a 100644 --- a/homeassistant/helpers/entityfilter.py +++ b/homeassistant/helpers/entityfilter.py @@ -4,8 +4,7 @@ from __future__ import annotations from collections.abc import Callable import fnmatch -from functools import lru_cache, partial -import operator +from functools import lru_cache import re import voluptuous as vol @@ -196,7 +195,7 @@ def _generate_filter_from_sets_and_pattern_lists( # Case 1 - No filter # - All entities included if not have_include and not have_exclude: - return bool + return lambda entity_id: True # Case 2 - Only includes # - Entity listed in entities include: include @@ -281,4 +280,4 @@ def _generate_filter_from_sets_and_pattern_lists( # Case 6 - No Domain and/or glob includes or excludes # - Entity listed in entities include: include # - Otherwise: exclude - return partial(operator.contains, include_e) + return lambda entity_id: entity_id in include_e diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 61a798dbd75..0c77809079e 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -322,26 +322,12 @@ def async_track_state_change_event( for each one, we keep a dict of entity ids that care about the state change events so we can do a fast dict lookup to route events. - The passed in entity_ids will be automatically lower cased. - - EVENT_STATE_CHANGED is fired on each occasion the state is updated - and changed, opposite of EVENT_STATE_REPORTED. """ if not (entity_ids := _async_string_to_lower_list(entity_ids)): return _remove_empty_listener return _async_track_state_change_event(hass, entity_ids, action, job_type) -@callback -def _async_dispatch_entity_id_event_soon( - hass: HomeAssistant, - callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], - event: Event[_StateEventDataT], -) -> None: - """Dispatch to listeners soon to ensure one event loop runs before dispatch.""" - hass.loop.call_soon(_async_dispatch_entity_id_event, hass, callbacks, event) - - @callback def _async_dispatch_entity_id_event( hass: HomeAssistant, @@ -375,7 +361,7 @@ def _async_state_filter( _KEYED_TRACK_STATE_CHANGE = _KeyedEventTracker( key=_TRACK_STATE_CHANGE_DATA, event_type=EVENT_STATE_CHANGED, - dispatcher_callable=_async_dispatch_entity_id_event_soon, + dispatcher_callable=_async_dispatch_entity_id_event, filter_callable=_async_state_filter, ) @@ -387,10 +373,7 @@ def _async_track_state_change_event( action: Callable[[Event[EventStateChangedData]], Any], job_type: HassJobType | None, ) -> CALLBACK_TYPE: - """Faster version of async_track_state_change_event. - - The passed in entity_ids will not be automatically lower cased. - """ + """async_track_state_change_event without lowercasing.""" return _async_track_event( _KEYED_TRACK_STATE_CHANGE, hass, entity_ids, action, job_type ) @@ -410,11 +393,7 @@ def async_track_state_report_event( action: Callable[[Event[EventStateReportedData]], Any], job_type: HassJobType | None = None, ) -> CALLBACK_TYPE: - """Track EVENT_STATE_REPORTED by entity_ids. - - EVENT_STATE_REPORTED is fired on each occasion the state is updated - but not changed, opposite of EVENT_STATE_CHANGED. - """ + """Track EVENT_STATE_REPORTED by entity_id without lowercasing.""" return _async_track_event( _KEYED_TRACK_STATE_REPORT, hass, entity_ids, action, job_type ) @@ -987,27 +966,13 @@ class TrackTemplateResultInfo: self.hass = hass self._job = HassJob(action, f"track template result {track_templates}") + for track_template_ in track_templates: + track_template_.template.hass = hass self._track_templates = track_templates self._has_super_template = has_super_template self._last_result: dict[Template, bool | str | TemplateError] = {} - for track_template_ in track_templates: - if track_template_.template.hass: - continue - - # pylint: disable-next=import-outside-toplevel - from .frame import ReportBehavior, report_usage - - report_usage( - ( - "calls async_track_template_result with template without hass, " - "which will stop working in HA Core 2025.10" - ), - core_behavior=ReportBehavior.LOG, - ) - track_template_.template.hass = hass - self._rate_limit = KeyedRateLimit(hass) self._info: dict[Template, RenderInfo] = {} self._track_state_changes: _TrackStateChangeFiltered | None = None diff --git a/homeassistant/helpers/floor_registry.py b/homeassistant/helpers/floor_registry.py index fcfca8e3212..9bf8a2a5d26 100644 --- a/homeassistant/helpers/floor_registry.py +++ b/homeassistant/helpers/floor_registry.py @@ -5,17 +5,17 @@ from __future__ import annotations from collections.abc import Iterable import dataclasses from dataclasses import dataclass -from datetime import datetime -from typing import Any, Literal, TypedDict +from typing import Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.util.dt import utc_from_timestamp, utcnow +from homeassistant.util import slugify from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from .normalized_name_base_registry import ( NormalizedNameBaseRegistryEntry, NormalizedNameBaseRegistryItems, + normalize_name, ) from .registry import BaseRegistry from .singleton import singleton @@ -28,7 +28,6 @@ EVENT_FLOOR_REGISTRY_UPDATED: EventType[EventFloorRegistryUpdatedData] = EventTy ) STORAGE_KEY = "core.floor_registry" STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 2 class _FloorStoreData(TypedDict): @@ -39,8 +38,6 @@ class _FloorStoreData(TypedDict): icon: str | None level: int | None name: str - created_at: str - modified_at: str class FloorRegistryStoreData(TypedDict): @@ -69,29 +66,6 @@ class FloorEntry(NormalizedNameBaseRegistryEntry): level: int | None = None -class FloorRegistryStore(Store[FloorRegistryStoreData]): - """Store floor registry data.""" - - async def _async_migrate_func( - self, - old_major_version: int, - old_minor_version: int, - old_data: dict[str, list[dict[str, Any]]], - ) -> FloorRegistryStoreData: - """Migrate to the new version.""" - if old_major_version > STORAGE_VERSION_MAJOR: - raise ValueError("Can't migrate to future version") - - if old_major_version == 1: - if old_minor_version < 2: - # Version 1.2 implements migration and adds created_at and modified_at - created_at = utc_from_timestamp(0).isoformat() - for floor in old_data["floors"]: - floor["created_at"] = floor["modified_at"] = created_at - - return old_data # type: ignore[return-value] - - class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): """Class to hold a registry of floors.""" @@ -101,12 +75,11 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): def __init__(self, hass: HomeAssistant) -> None: """Initialize the floor registry.""" self.hass = hass - self._store = FloorRegistryStore( + self._store = Store( hass, STORAGE_VERSION_MAJOR, STORAGE_KEY, atomic_writes=True, - minor_version=STORAGE_VERSION_MINOR, ) @callback @@ -128,9 +101,15 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): """Get all floors.""" return self.floors.values() + @callback def _generate_id(self, name: str) -> str: """Generate floor ID.""" - return self.floors.generate_id_from_name(name) + suggestion = suggestion_base = slugify(name) + tries = 1 + while suggestion in self.floors: + tries += 1 + suggestion = f"{suggestion_base}_{tries}" + return suggestion @callback def async_create( @@ -143,26 +122,30 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): ) -> FloorEntry: """Create a new floor.""" self.hass.verify_event_loop_thread("floor_registry.async_create") - if floor := self.async_get_floor_by_name(name): raise ValueError( f"The name {name} ({floor.normalized_name}) is already in use" ) + normalized_name = normalize_name(name) + floor = FloorEntry( aliases=aliases or set(), icon=icon, floor_id=self._generate_id(name), name=name, + normalized_name=normalized_name, level=level, ) floor_id = floor.floor_id self.floors[floor_id] = floor self.async_schedule_save() - self.hass.bus.async_fire_internal( EVENT_FLOOR_REGISTRY_UPDATED, - EventFloorRegistryUpdatedData(action="create", floor_id=floor_id), + EventFloorRegistryUpdatedData( + action="create", + floor_id=floor_id, + ), ) return floor @@ -192,7 +175,7 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): ) -> FloorEntry: """Update name of the floor.""" old = self.floors[floor_id] - changes: dict[str, Any] = { + changes = { attr_name: value for attr_name, value in ( ("aliases", aliases), @@ -203,14 +186,13 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): } if name is not UNDEFINED and name != old.name: changes["name"] = name + changes["normalized_name"] = normalize_name(name) if not changes: return old - changes["modified_at"] = utcnow() - self.hass.verify_event_loop_thread("floor_registry.async_update") - new = self.floors[floor_id] = dataclasses.replace(old, **changes) + new = self.floors[floor_id] = dataclasses.replace(old, **changes) # type: ignore[arg-type] self.async_schedule_save() self.hass.bus.async_fire_internal( @@ -230,14 +212,14 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): if data is not None: for floor in data["floors"]: + normalized_name = normalize_name(floor["name"]) floors[floor["floor_id"]] = FloorEntry( aliases=set(floor["aliases"]), icon=floor["icon"], floor_id=floor["floor_id"], name=floor["name"], level=floor["level"], - created_at=datetime.fromisoformat(floor["created_at"]), - modified_at=datetime.fromisoformat(floor["modified_at"]), + normalized_name=normalized_name, ) self.floors = floors @@ -254,8 +236,6 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): "icon": entry.icon, "level": entry.level, "name": entry.name, - "created_at": entry.created_at.isoformat(), - "modified_at": entry.modified_at.isoformat(), } for entry in self.floors.values() ] diff --git a/homeassistant/helpers/frame.py b/homeassistant/helpers/frame.py index eda98099713..8a30c26886e 100644 --- a/homeassistant/helpers/frame.py +++ b/homeassistant/helpers/frame.py @@ -5,16 +5,14 @@ from __future__ import annotations import asyncio from collections.abc import Callable from dataclasses import dataclass -import enum import functools +from functools import cached_property import linecache import logging import sys from types import FrameType from typing import Any, cast -from propcache import cached_property - from homeassistant.core import async_get_hass_or_none from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import async_suggest_report_issue @@ -131,65 +129,15 @@ class MissingIntegrationFrame(HomeAssistantError): def report( what: str, - *, - exclude_integrations: set[str] | None = None, + exclude_integrations: set | None = None, error_if_core: bool = True, - error_if_integration: bool = False, level: int = logging.WARNING, log_custom_component_only: bool = False, + error_if_integration: bool = False, ) -> None: """Report incorrect usage. - If error_if_core is True, raise instead of log if an integration is not found - when unwinding the stack frame. - If error_if_integration is True, raise instead of log if an integration is found - when unwinding the stack frame. - """ - core_behavior = ReportBehavior.ERROR if error_if_core else ReportBehavior.LOG - core_integration_behavior = ( - ReportBehavior.ERROR if error_if_integration else ReportBehavior.LOG - ) - custom_integration_behavior = core_integration_behavior - - if log_custom_component_only: - if core_behavior is ReportBehavior.LOG: - core_behavior = ReportBehavior.IGNORE - if core_integration_behavior is ReportBehavior.LOG: - core_integration_behavior = ReportBehavior.IGNORE - - report_usage( - what, - core_behavior=core_behavior, - core_integration_behavior=core_integration_behavior, - custom_integration_behavior=custom_integration_behavior, - exclude_integrations=exclude_integrations, - level=level, - ) - - -class ReportBehavior(enum.Enum): - """Enum for behavior on code usage.""" - - IGNORE = enum.auto() - """Ignore the code usage.""" - LOG = enum.auto() - """Log the code usage.""" - ERROR = enum.auto() - """Raise an error on code usage.""" - - -def report_usage( - what: str, - *, - core_behavior: ReportBehavior = ReportBehavior.ERROR, - core_integration_behavior: ReportBehavior = ReportBehavior.LOG, - custom_integration_behavior: ReportBehavior = ReportBehavior.LOG, - exclude_integrations: set[str] | None = None, - level: int = logging.WARNING, -) -> None: - """Report incorrect code usage. - - Similar to `report` but allows more fine-grained reporting. + Async friendly. """ try: integration_frame = get_integration_frame( @@ -197,20 +145,18 @@ def report_usage( ) except MissingIntegrationFrame as err: msg = f"Detected code that {what}. Please report this issue." - if core_behavior is ReportBehavior.ERROR: + if error_if_core: raise RuntimeError(msg) from err - if core_behavior is ReportBehavior.LOG: + if not log_custom_component_only: _LOGGER.warning(msg, stack_info=True) return - integration_behavior = core_integration_behavior - if integration_frame.custom_integration: - integration_behavior = custom_integration_behavior - - if integration_behavior is not ReportBehavior.IGNORE: - _report_integration( - what, integration_frame, level, integration_behavior is ReportBehavior.ERROR - ) + if ( + error_if_integration + or not log_custom_component_only + or integration_frame.custom_integration + ): + _report_integration(what, integration_frame, level, error_if_integration) def _report_integration( diff --git a/homeassistant/helpers/hassio.py b/homeassistant/helpers/hassio.py deleted file mode 100644 index 51503f709d6..00000000000 --- a/homeassistant/helpers/hassio.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Hass.io helper.""" - -import os - -from homeassistant.core import HomeAssistant, callback - - -@callback -def is_hassio(hass: HomeAssistant) -> bool: - """Return true if Hass.io is loaded. - - Async friendly. - """ - return "hassio" in hass.config.components - - -@callback -def get_supervisor_ip() -> str | None: - """Return the supervisor ip address.""" - if "SUPERVISOR" not in os.environ: - return None - return os.environ["SUPERVISOR"].partition(":")[0] diff --git a/homeassistant/helpers/icon.py b/homeassistant/helpers/icon.py index ce8205eb915..e759719f667 100644 --- a/homeassistant/helpers/icon.py +++ b/homeassistant/helpers/icon.py @@ -7,7 +7,7 @@ from collections.abc import Iterable from functools import lru_cache import logging import pathlib -from typing import Any, cast +from typing import Any from homeassistant.core import HomeAssistant, callback from homeassistant.loader import Integration, async_get_integrations @@ -21,34 +21,12 @@ ICON_CACHE: HassKey[_IconsCache] = HassKey("icon_cache") _LOGGER = logging.getLogger(__name__) -def convert_shorthand_service_icon( - value: str | dict[str, str | dict[str, str]], -) -> dict[str, str | dict[str, str]]: - """Convert shorthand service icon to dict.""" - if isinstance(value, str): - return {"service": value} - return value - - -def _load_icons_file( - icons_file: pathlib.Path, -) -> dict[str, Any]: - """Load and parse an icons.json file.""" - icons = load_json_object(icons_file) - if "services" not in icons: - return icons - services = cast(dict[str, str | dict[str, str | dict[str, str]]], icons["services"]) - for service, service_icons in services.items(): - services[service] = convert_shorthand_service_icon(service_icons) - return icons - - def _load_icons_files( icons_files: dict[str, pathlib.Path], ) -> dict[str, dict[str, Any]]: """Load and parse icons.json files.""" return { - component: _load_icons_file(icons_file) + component: load_json_object(icons_file) for component, icons_file in icons_files.items() } diff --git a/homeassistant/helpers/intent.py b/homeassistant/helpers/intent.py index b38f769b302..1bf78ae3a29 100644 --- a/homeassistant/helpers/intent.py +++ b/homeassistant/helpers/intent.py @@ -7,12 +7,12 @@ import asyncio from collections.abc import Callable, Collection, Coroutine, Iterable import dataclasses from dataclasses import dataclass, field -from enum import Enum, StrEnum, auto +from enum import Enum, auto +from functools import cached_property from itertools import groupby import logging from typing import Any -from propcache import cached_property import voluptuous as vol from homeassistant.components.homeassistant.exposed_entities import async_should_expose @@ -54,9 +54,6 @@ INTENT_DECREASE_TIMER = "HassDecreaseTimer" INTENT_PAUSE_TIMER = "HassPauseTimer" INTENT_UNPAUSE_TIMER = "HassUnpauseTimer" INTENT_TIMER_STATUS = "HassTimerStatus" -INTENT_GET_CURRENT_DATE = "HassGetCurrentDate" -INTENT_GET_CURRENT_TIME = "HassGetCurrentTime" -INTENT_RESPOND = "HassRespond" SLOT_SCHEMA = vol.Schema({}, extra=vol.ALLOW_EXTRA) @@ -352,7 +349,6 @@ class MatchTargetsCandidate: """Candidate for async_match_targets.""" state: State - is_exposed: bool entity: entity_registry.RegistryEntry | None = None area: area_registry.AreaEntry | None = None floor: floor_registry.FloorEntry | None = None @@ -516,31 +512,29 @@ def async_match_targets( # noqa: C901 if not states: return MatchTargetsResult(False, MatchFailedReason.DOMAIN) - candidates = [ - MatchTargetsCandidate( - state=state, - is_exposed=( - async_should_expose(hass, constraints.assistant, state.entity_id) - if constraints.assistant - else True - ), - ) - for state in states - ] + if constraints.assistant: + # Filter by exposure + states = [ + s + for s in states + if async_should_expose(hass, constraints.assistant, s.entity_id) + ] + if not states: + return MatchTargetsResult(False, MatchFailedReason.ASSISTANT) if constraints.domains and (not filtered_by_domain): # Filter by domain (if we didn't already do it) - candidates = [c for c in candidates if c.state.domain in constraints.domains] - if not candidates: + states = [s for s in states if s.domain in constraints.domains] + if not states: return MatchTargetsResult(False, MatchFailedReason.DOMAIN) if constraints.states: # Filter by state - candidates = [c for c in candidates if c.state.state in constraints.states] - if not candidates: + states = [s for s in states if s.state in constraints.states] + if not states: return MatchTargetsResult(False, MatchFailedReason.STATE) - # Try to exit early so we can avoid registry lookups + # Exit early so we can avoid registry lookups if not ( constraints.name or constraints.features @@ -548,18 +542,11 @@ def async_match_targets( # noqa: C901 or constraints.area_name or constraints.floor_name ): - if constraints.assistant: - # Check exposure - candidates = [c for c in candidates if c.is_exposed] - if not candidates: - return MatchTargetsResult(False, MatchFailedReason.ASSISTANT) - - return MatchTargetsResult(True, states=[c.state for c in candidates]) + return MatchTargetsResult(True, states=states) # We need entity registry entries now er = entity_registry.async_get(hass) - for candidate in candidates: - candidate.entity = er.async_get(candidate.state.entity_id) + candidates = [MatchTargetsCandidate(s, er.async_get(s.entity_id)) for s in states] if constraints.name: # Filter by entity name or alias @@ -648,12 +635,6 @@ def async_match_targets( # noqa: C901 False, MatchFailedReason.AREA, areas=targeted_areas ) - if constraints.assistant: - # Check exposure - candidates = [c for c in candidates if c.is_exposed] - if not candidates: - return MatchTargetsResult(False, MatchFailedReason.ASSISTANT) - if constraints.name and (not constraints.allow_duplicate_names): # Check for duplicates if not areas_added: @@ -837,7 +818,6 @@ class DynamicServiceIntentHandler(IntentHandler): required_states: set[str] | None = None, description: str | None = None, platforms: set[str] | None = None, - device_classes: set[type[StrEnum]] | None = None, ) -> None: """Create Service Intent Handler.""" self.intent_type = intent_type @@ -847,7 +827,6 @@ class DynamicServiceIntentHandler(IntentHandler): self.required_states = required_states self.description = description self.platforms = platforms - self.device_classes = device_classes self.required_slots: _IntentSlotsType = {} if required_slots: @@ -870,38 +849,13 @@ class DynamicServiceIntentHandler(IntentHandler): @cached_property def slot_schema(self) -> dict: """Return a slot schema.""" - domain_validator = ( - vol.In(list(self.required_domains)) if self.required_domains else cv.string - ) slot_schema = { vol.Any("name", "area", "floor"): non_empty_string, - vol.Optional("domain"): vol.All(cv.ensure_list, [domain_validator]), + vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), + vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), + vol.Optional("preferred_area_id"): cv.string, + vol.Optional("preferred_floor_id"): cv.string, } - if self.device_classes: - # The typical way to match enums is with vol.Coerce, but we build a - # flat list to make the API simpler to describe programmatically - flattened_device_classes = vol.In( - [ - device_class.value - for device_class_enum in self.device_classes - for device_class in device_class_enum - ] - ) - slot_schema.update( - { - vol.Optional("device_class"): vol.All( - cv.ensure_list, - [flattened_device_classes], - ) - } - ) - - slot_schema.update( - { - vol.Optional("preferred_area_id"): cv.string, - vol.Optional("preferred_floor_id"): cv.string, - } - ) if self.required_slots: slot_schema.update( @@ -954,6 +908,9 @@ class DynamicServiceIntentHandler(IntentHandler): if "domain" in slots: domains = set(slots["domain"]["value"]) + if self.required_domains: + # Must be a subset of intent's required domain(s) + domains.intersection_update(self.required_domains) if "device_class" in slots: device_classes = set(slots["device_class"]["value"]) @@ -1161,7 +1118,6 @@ class ServiceIntentHandler(DynamicServiceIntentHandler): required_states: set[str] | None = None, description: str | None = None, platforms: set[str] | None = None, - device_classes: set[type[StrEnum]] | None = None, ) -> None: """Create service handler.""" super().__init__( @@ -1174,7 +1130,6 @@ class ServiceIntentHandler(DynamicServiceIntentHandler): required_states=required_states, description=description, platforms=platforms, - device_classes=device_classes, ) self.domain = domain self.service = service diff --git a/homeassistant/helpers/json.py b/homeassistant/helpers/json.py index ebb74856429..28b3d509a0c 100644 --- a/homeassistant/helpers/json.py +++ b/homeassistant/helpers/json.py @@ -13,39 +13,13 @@ import orjson from homeassistant.util.file import write_utf8_file, write_utf8_file_atomic from homeassistant.util.json import ( # noqa: F401 - JSON_DECODE_EXCEPTIONS as _JSON_DECODE_EXCEPTIONS, - JSON_ENCODE_EXCEPTIONS as _JSON_ENCODE_EXCEPTIONS, + JSON_DECODE_EXCEPTIONS, + JSON_ENCODE_EXCEPTIONS, SerializationError, format_unserializable_data, - json_loads as _json_loads, + json_loads, ) -from .deprecation import ( - DeprecatedConstant, - all_with_deprecated_constants, - check_if_deprecated_constant, - deprecated_function, - dir_with_deprecated_constants, -) - -_DEPRECATED_JSON_DECODE_EXCEPTIONS = DeprecatedConstant( - _JSON_DECODE_EXCEPTIONS, "homeassistant.util.json.JSON_DECODE_EXCEPTIONS", "2025.8" -) -_DEPRECATED_JSON_ENCODE_EXCEPTIONS = DeprecatedConstant( - _JSON_ENCODE_EXCEPTIONS, "homeassistant.util.json.JSON_ENCODE_EXCEPTIONS", "2025.8" -) -json_loads = deprecated_function( - "homeassistant.util.json.json_loads", breaks_in_ha_version="2025.8" -)(_json_loads) - -# These can be removed if no deprecated constant are in this module anymore -__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) -__dir__ = partial( - dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] -) -__all__ = all_with_deprecated_constants(globals()) - - _LOGGER = logging.getLogger(__name__) @@ -162,17 +136,13 @@ def json_dumps(data: Any) -> str: return json_bytes(data).decode("utf-8") -json_bytes_sorted = partial( - orjson.dumps, - option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SORT_KEYS, - default=json_encoder_default, -) -"""Dump json bytes with keys sorted.""" - - def json_dumps_sorted(data: Any) -> str: """Dump json string with keys sorted.""" - return json_bytes_sorted(data).decode("utf-8") + return orjson.dumps( + data, + option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SORT_KEYS, + default=json_encoder_default, + ).decode("utf-8") JSON_DUMP: Final = json_dumps diff --git a/homeassistant/helpers/label_registry.py b/homeassistant/helpers/label_registry.py index 33a05156328..64e884e1428 100644 --- a/homeassistant/helpers/label_registry.py +++ b/homeassistant/helpers/label_registry.py @@ -5,17 +5,17 @@ from __future__ import annotations from collections.abc import Iterable import dataclasses from dataclasses import dataclass -from datetime import datetime -from typing import Any, Literal, TypedDict +from typing import Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.util.dt import utc_from_timestamp, utcnow +from homeassistant.util import slugify from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from .normalized_name_base_registry import ( NormalizedNameBaseRegistryEntry, NormalizedNameBaseRegistryItems, + normalize_name, ) from .registry import BaseRegistry from .singleton import singleton @@ -28,7 +28,6 @@ EVENT_LABEL_REGISTRY_UPDATED: EventType[EventLabelRegistryUpdatedData] = EventTy ) STORAGE_KEY = "core.label_registry" STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 2 class _LabelStoreData(TypedDict): @@ -39,8 +38,6 @@ class _LabelStoreData(TypedDict): icon: str | None label_id: str name: str - created_at: str - modified_at: str class LabelRegistryStoreData(TypedDict): @@ -69,29 +66,6 @@ class LabelEntry(NormalizedNameBaseRegistryEntry): icon: str | None = None -class LabelRegistryStore(Store[LabelRegistryStoreData]): - """Store label registry data.""" - - async def _async_migrate_func( - self, - old_major_version: int, - old_minor_version: int, - old_data: dict[str, list[dict[str, Any]]], - ) -> LabelRegistryStoreData: - """Migrate to the new version.""" - if old_major_version > STORAGE_VERSION_MAJOR: - raise ValueError("Can't migrate to future version") - - if old_major_version == 1: - if old_minor_version < 2: - # Version 1.2 implements migration and adds created_at and modified_at - created_at = utc_from_timestamp(0).isoformat() - for label in old_data["labels"]: - label["created_at"] = label["modified_at"] = created_at - - return old_data # type: ignore[return-value] - - class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): """Class to hold a registry of labels.""" @@ -101,12 +75,11 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): def __init__(self, hass: HomeAssistant) -> None: """Initialize the label registry.""" self.hass = hass - self._store = LabelRegistryStore( + self._store = Store( hass, STORAGE_VERSION_MAJOR, STORAGE_KEY, atomic_writes=True, - minor_version=STORAGE_VERSION_MINOR, ) @callback @@ -128,9 +101,15 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): """Get all labels.""" return self.labels.values() + @callback def _generate_id(self, name: str) -> str: - """Generate label ID.""" - return self.labels.generate_id_from_name(name) + """Initialize ID.""" + suggestion = suggestion_base = slugify(name) + tries = 1 + while suggestion in self.labels: + tries += 1 + suggestion = f"{suggestion_base}_{tries}" + return suggestion @callback def async_create( @@ -143,26 +122,30 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): ) -> LabelEntry: """Create a new label.""" self.hass.verify_event_loop_thread("label_registry.async_create") - if label := self.async_get_label_by_name(name): raise ValueError( f"The name {name} ({label.normalized_name}) is already in use" ) + normalized_name = normalize_name(name) + label = LabelEntry( color=color, description=description, icon=icon, label_id=self._generate_id(name), name=name, + normalized_name=normalized_name, ) label_id = label.label_id self.labels[label_id] = label self.async_schedule_save() - self.hass.bus.async_fire_internal( EVENT_LABEL_REGISTRY_UPDATED, - EventLabelRegistryUpdatedData(action="create", label_id=label_id), + EventLabelRegistryUpdatedData( + action="create", + label_id=label_id, + ), ) return label @@ -192,7 +175,7 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): ) -> LabelEntry: """Update name of label.""" old = self.labels[label_id] - changes: dict[str, Any] = { + changes = { attr_name: value for attr_name, value in ( ("color", color), @@ -204,14 +187,13 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): if name is not UNDEFINED and name != old.name: changes["name"] = name + changes["normalized_name"] = normalize_name(name) if not changes: return old - changes["modified_at"] = utcnow() - self.hass.verify_event_loop_thread("label_registry.async_update") - new = self.labels[label_id] = dataclasses.replace(old, **changes) + new = self.labels[label_id] = dataclasses.replace(old, **changes) # type: ignore[arg-type] self.async_schedule_save() self.hass.bus.async_fire_internal( @@ -231,14 +213,14 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): if data is not None: for label in data["labels"]: + normalized_name = normalize_name(label["name"]) labels[label["label_id"]] = LabelEntry( color=label["color"], description=label["description"], icon=label["icon"], label_id=label["label_id"], name=label["name"], - created_at=datetime.fromisoformat(label["created_at"]), - modified_at=datetime.fromisoformat(label["modified_at"]), + normalized_name=normalized_name, ) self.labels = labels @@ -255,8 +237,6 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): "icon": entry.icon, "label_id": entry.label_id, "name": entry.name, - "created_at": entry.created_at.isoformat(), - "modified_at": entry.modified_at.isoformat(), } for entry in self.labels.values() ] diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index d322810b0ef..ba307a785ac 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -14,16 +14,16 @@ import slugify as unicode_slug import voluptuous as vol from voluptuous_openapi import UNSUPPORTED, convert -from homeassistant.components.climate import INTENT_GET_TEMPERATURE -from homeassistant.components.conversation import ( +from homeassistant.components.climate.intent import INTENT_GET_TEMPERATURE +from homeassistant.components.conversation.trace import ( ConversationTraceEventType, async_conversation_trace_append, ) -from homeassistant.components.cover import INTENT_CLOSE_COVER, INTENT_OPEN_COVER -from homeassistant.components.homeassistant import async_should_expose +from homeassistant.components.cover.intent import INTENT_CLOSE_COVER, INTENT_OPEN_COVER +from homeassistant.components.homeassistant.exposed_entities import async_should_expose from homeassistant.components.intent import async_device_supports_timers from homeassistant.components.script import ATTR_VARIABLES, DOMAIN as SCRIPT_DOMAIN -from homeassistant.components.weather import INTENT_GET_WEATHER +from homeassistant.components.weather.intent import INTENT_GET_WEATHER from homeassistant.const import ( ATTR_DOMAIN, ATTR_ENTITY_ID, @@ -167,7 +167,7 @@ class APIInstance: async def async_call_tool(self, tool_input: ToolInput) -> JsonObjectType: """Call a LLM tool, validate args and return the response.""" async_conversation_trace_append( - ConversationTraceEventType.TOOL_CALL, + ConversationTraceEventType.LLM_TOOL_CALL, {"tool_name": tool_input.tool_name, "tool_args": tool_input.tool_args}, ) @@ -277,9 +277,6 @@ class AssistAPI(API): intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND, intent.INTENT_TOGGLE, - intent.INTENT_GET_CURRENT_DATE, - intent.INTENT_GET_CURRENT_TIME, - intent.INTENT_RESPOND, } def __init__(self, hass: HomeAssistant) -> None: @@ -325,7 +322,8 @@ class AssistAPI(API): ( "When controlling Home Assistant always call the intent tools. " "Use HassTurnOn to lock and HassTurnOff to unlock a lock. " - "When controlling a device, prefer passing just name and domain. " + "When controlling a device, prefer passing just its name and its domain " + "(what comes before the dot in its entity id). " "When controlling an area, prefer passing just area name and domain." ) ] @@ -363,7 +361,7 @@ class AssistAPI(API): prompt.append( "An overview of the areas and the devices in this smart home:" ) - prompt.append(yaml.dump(list(exposed_entities.values()))) + prompt.append(yaml.dump(exposed_entities)) return "\n".join(prompt) @@ -416,9 +414,7 @@ class AssistAPI(API): ): continue - script_tool = ScriptTool(self.hass, state.entity_id) - if script_tool.parameters.schema: - tools.append(script_tool) + tools.append(ScriptTool(self.hass, state.entity_id)) return tools @@ -449,16 +445,11 @@ def _get_exposed_entities( entities = {} for state in hass.states.async_all(): - if not async_should_expose(hass, assistant, state.entity_id): + if state.domain == SCRIPT_DOMAIN: continue - description: str | None = None - if state.domain == SCRIPT_DOMAIN: - description, parameters = _get_cached_script_parameters( - hass, state.entity_id - ) - if parameters.schema: # Only list scripts without input fields here - continue + if not async_should_expose(hass, assistant, state.entity_id): + continue entity_entry = entity_registry.async_get(state.entity_id) names = [state.name] @@ -484,19 +475,15 @@ def _get_exposed_entities( info: dict[str, Any] = { "names": ", ".join(names), - "domain": state.domain, "state": state.state, } - if description: - info["description"] = description - if area_names: info["areas"] = ", ".join(area_names) if attributes := { attr_name: str(attr_value) - if isinstance(attr_value, (Enum, Decimal, int)) + if isinstance(attr_value, (Enum, Decimal)) else attr_value for attr_name, attr_value in state.attributes.items() if attr_name in interesting_attributes @@ -532,7 +519,7 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return convert(cv.CONDITIONS_SCHEMA) if isinstance(schema, selector.ConstantSelector): - return convert(vol.Schema(schema.config["value"])) + return {"enum": [schema.config["value"]]} result: dict[str, Any] if isinstance(schema, selector.ColorTempSelector): @@ -584,7 +571,7 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return result if isinstance(schema, selector.ObjectSelector): - return {"type": "object", "additionalProperties": True} + return {"type": "object"} if isinstance(schema, selector.SelectSelector): options = [ @@ -608,7 +595,7 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return {"type": "string", "format": "time"} if isinstance(schema, selector.TriggerSelector): - return {"type": "array", "items": {"type": "string"}} + return convert(cv.TRIGGER_SCHEMA) if schema.config.get("multiple"): return {"type": "array", "items": {"type": "string"}} @@ -616,83 +603,6 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return {"type": "string"} -def _get_cached_script_parameters( - hass: HomeAssistant, entity_id: str -) -> tuple[str | None, vol.Schema]: - """Get script description and schema.""" - entity_registry = er.async_get(hass) - - description = None - parameters = vol.Schema({}) - entity_entry = entity_registry.async_get(entity_id) - if entity_entry and entity_entry.unique_id: - parameters_cache = hass.data.get(SCRIPT_PARAMETERS_CACHE) - - if parameters_cache is None: - parameters_cache = hass.data[SCRIPT_PARAMETERS_CACHE] = {} - - @callback - def clear_cache(event: Event) -> None: - """Clear script parameter cache on script reload or delete.""" - if ( - event.data[ATTR_DOMAIN] == SCRIPT_DOMAIN - and event.data[ATTR_SERVICE] in parameters_cache - ): - parameters_cache.pop(event.data[ATTR_SERVICE]) - - cancel = hass.bus.async_listen(EVENT_SERVICE_REMOVED, clear_cache) - - @callback - def on_homeassistant_close(event: Event) -> None: - """Cleanup.""" - cancel() - - hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_CLOSE, on_homeassistant_close - ) - - if entity_entry.unique_id in parameters_cache: - return parameters_cache[entity_entry.unique_id] - - if service_desc := service.async_get_cached_service_description( - hass, SCRIPT_DOMAIN, entity_entry.unique_id - ): - description = service_desc.get("description") - schema: dict[vol.Marker, Any] = {} - fields = service_desc.get("fields", {}) - - for field, config in fields.items(): - field_description = config.get("description") - if not field_description: - field_description = config.get("name") - key: vol.Marker - if config.get("required"): - key = vol.Required(field, description=field_description) - else: - key = vol.Optional(field, description=field_description) - if "selector" in config: - schema[key] = selector.selector(config["selector"]) - else: - schema[key] = cv.string - - parameters = vol.Schema(schema) - - aliases: list[str] = [] - if entity_entry.name: - aliases.append(entity_entry.name) - if entity_entry.aliases: - aliases.extend(entity_entry.aliases) - if aliases: - if description: - description = description + ". Aliases: " + str(list(aliases)) - else: - description = "Aliases: " + str(list(aliases)) - - parameters_cache[entity_entry.unique_id] = (description, parameters) - - return description, parameters - - class ScriptTool(Tool): """LLM Tool representing a Script.""" @@ -702,14 +612,70 @@ class ScriptTool(Tool): script_entity_id: str, ) -> None: """Init the class.""" - self.name = split_entity_id(script_entity_id)[1] - if self.name[0].isdigit(): - self.name = "_" + self.name - self._entity_id = script_entity_id + entity_registry = er.async_get(hass) - self.description, self.parameters = _get_cached_script_parameters( - hass, script_entity_id - ) + self.name = split_entity_id(script_entity_id)[1] + self.parameters = vol.Schema({}) + entity_entry = entity_registry.async_get(script_entity_id) + if entity_entry and entity_entry.unique_id: + parameters_cache = hass.data.get(SCRIPT_PARAMETERS_CACHE) + + if parameters_cache is None: + parameters_cache = hass.data[SCRIPT_PARAMETERS_CACHE] = {} + + @callback + def clear_cache(event: Event) -> None: + """Clear script parameter cache on script reload or delete.""" + if ( + event.data[ATTR_DOMAIN] == SCRIPT_DOMAIN + and event.data[ATTR_SERVICE] in parameters_cache + ): + parameters_cache.pop(event.data[ATTR_SERVICE]) + + cancel = hass.bus.async_listen(EVENT_SERVICE_REMOVED, clear_cache) + + @callback + def on_homeassistant_close(event: Event) -> None: + """Cleanup.""" + cancel() + + hass.bus.async_listen_once( + EVENT_HOMEASSISTANT_CLOSE, on_homeassistant_close + ) + + if entity_entry.unique_id in parameters_cache: + self.description, self.parameters = parameters_cache[ + entity_entry.unique_id + ] + return + + if service_desc := service.async_get_cached_service_description( + hass, SCRIPT_DOMAIN, entity_entry.unique_id + ): + self.description = service_desc.get("description") + schema: dict[vol.Marker, Any] = {} + fields = service_desc.get("fields", {}) + + for field, config in fields.items(): + description = config.get("description") + if not description: + description = config.get("name") + key: vol.Marker + if config.get("required"): + key = vol.Required(field, description=description) + else: + key = vol.Optional(field, description=description) + if "selector" in config: + schema[key] = selector.selector(config["selector"]) + else: + schema[key] = cv.string + + self.parameters = vol.Schema(schema) + + parameters_cache[entity_entry.unique_id] = ( + self.description, + self.parameters, + ) async def async_call( self, hass: HomeAssistant, tool_input: ToolInput, llm_context: LLMContext @@ -749,7 +715,7 @@ class ScriptTool(Tool): SCRIPT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: self._entity_id, + ATTR_ENTITY_ID: SCRIPT_DOMAIN + "." + self.name, ATTR_VARIABLES: tool_input.tool_args, }, context=llm_context.context, diff --git a/homeassistant/helpers/network.py b/homeassistant/helpers/network.py index e39cc2de547..d5891973e40 100644 --- a/homeassistant/helpers/network.py +++ b/homeassistant/helpers/network.py @@ -6,7 +6,6 @@ from collections.abc import Callable from contextlib import suppress from ipaddress import ip_address -from aiohttp import hdrs from hass_nabucasa import remote import yarl @@ -16,8 +15,6 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import bind_hass from homeassistant.util.network import is_ip_address, is_loopback, normalize_url -from .hassio import is_hassio - TYPE_URL_INTERNAL = "internal_url" TYPE_URL_EXTERNAL = "external_url" SUPERVISOR_NETWORK_HOST = "homeassistant" @@ -44,6 +41,10 @@ def get_supervisor_network_url( hass: HomeAssistant, *, allow_ssl: bool = False ) -> str | None: """Get URL for home assistant within supervisor network.""" + # Local import to avoid circular dependencies + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.hassio import is_hassio + if hass.config.api is None or not is_hassio(hass): return None @@ -178,21 +179,20 @@ def get_url( and request_host is not None and hass.config.api is not None ): + # Local import to avoid circular dependencies + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.hassio import get_host_info, is_hassio + scheme = "https" if hass.config.api.use_ssl else "http" current_url = yarl.URL.build( scheme=scheme, host=request_host, port=hass.config.api.port ) known_hostnames = ["localhost"] - if is_hassio(hass): - # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from homeassistant.components.hassio import get_host_info - - if host_info := get_host_info(hass): - known_hostnames.extend( - [host_info["hostname"], f"{host_info['hostname']}.local"] - ) + if is_hassio(hass) and (host_info := get_host_info(hass)): + known_hostnames.extend( + [host_info["hostname"], f"{host_info['hostname']}.local"] + ) if ( ( @@ -216,18 +216,7 @@ def _get_request_host() -> str | None: """Get the host address of the current request.""" if (request := http.current_request.get()) is None: raise NoURLAvailableError - # partition the host to remove the port - # because the raw host header can contain the port - host = request.headers.get(hdrs.HOST) - if host is None: - return None - # IPv6 addresses are enclosed in brackets - # use same logic as yarl and urllib to extract the host - if "[" in host: - return (host.partition("[")[2]).partition("]")[0] - if ":" in host: - host = host.partition(":")[0] - return host + return yarl.URL(request.url).host @bind_hass diff --git a/homeassistant/helpers/normalized_name_base_registry.py b/homeassistant/helpers/normalized_name_base_registry.py index 983d9e55340..1cffac9ffc5 100644 --- a/homeassistant/helpers/normalized_name_base_registry.py +++ b/homeassistant/helpers/normalized_name_base_registry.py @@ -1,11 +1,8 @@ """Provide a base class for registries that use a normalized name index.""" -from dataclasses import dataclass, field -from datetime import datetime +from dataclasses import dataclass from functools import lru_cache -from homeassistant.util import dt as dt_util, slugify - from .registry import BaseRegistryItems @@ -14,13 +11,7 @@ class NormalizedNameBaseRegistryEntry: """Normalized Name Base Registry Entry.""" name: str - normalized_name: str = field(init=False) - created_at: datetime = field(default_factory=dt_util.utcnow) - modified_at: datetime = field(default_factory=dt_util.utcnow) - - def __post_init__(self) -> None: - """Post init.""" - object.__setattr__(self, "normalized_name", normalize_name(self.name)) + normalized_name: str @lru_cache(maxsize=1024) @@ -47,7 +38,7 @@ class NormalizedNameBaseRegistryItems[_VT: NormalizedNameBaseRegistryEntry]( old_entry = self.data[key] if ( replacement_entry is not None - and (normalized_name := replacement_entry.normalized_name) + and (normalized_name := normalize_name(replacement_entry.name)) != old_entry.normalized_name and normalized_name in self._normalized_names ): @@ -57,17 +48,8 @@ class NormalizedNameBaseRegistryItems[_VT: NormalizedNameBaseRegistryEntry]( del self._normalized_names[old_entry.normalized_name] def _index_entry(self, key: str, entry: _VT) -> None: - self._normalized_names[entry.normalized_name] = entry + self._normalized_names[normalize_name(entry.name)] = entry def get_by_name(self, name: str) -> _VT | None: """Get entry by name.""" return self._normalized_names.get(normalize_name(name)) - - def generate_id_from_name(self, name: str) -> str: - """Generate ID from name.""" - suggestion = suggestion_base = slugify(name) - tries = 1 - while suggestion in self: - tries += 1 - suggestion = f"{suggestion_base}_{tries}" - return suggestion diff --git a/homeassistant/helpers/recorder.py b/homeassistant/helpers/recorder.py index 59604944eeb..6155fc9b320 100644 --- a/homeassistant/helpers/recorder.py +++ b/homeassistant/helpers/recorder.py @@ -3,25 +3,13 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Generator -from contextlib import contextmanager from dataclasses import dataclass, field -import functools -import logging -from typing import TYPE_CHECKING, Any +from typing import Any from homeassistant.core import HomeAssistant, callback from homeassistant.util.hass_dict import HassKey -if TYPE_CHECKING: - from sqlalchemy.orm.session import Session - - from homeassistant.components.recorder import Recorder - -_LOGGER = logging.getLogger(__name__) - DOMAIN: HassKey[RecorderData] = HassKey("recorder") -DATA_INSTANCE: HassKey[Recorder] = HassKey("recorder_instance") @dataclass(slots=True) @@ -32,32 +20,20 @@ class RecorderData: db_connected: asyncio.Future[bool] = field(default_factory=asyncio.Future) -@callback def async_migration_in_progress(hass: HomeAssistant) -> bool: """Check to see if a recorder migration is in progress.""" + if "recorder" not in hass.config.components: + return False # pylint: disable-next=import-outside-toplevel from homeassistant.components import recorder return recorder.util.async_migration_in_progress(hass) -@callback -def async_migration_is_live(hass: HomeAssistant) -> bool: - """Check to see if a recorder migration is live.""" - # pylint: disable-next=import-outside-toplevel - from homeassistant.components import recorder - - return recorder.util.async_migration_is_live(hass) - - @callback def async_initialize_recorder(hass: HomeAssistant) -> None: """Initialize recorder data.""" - # pylint: disable-next=import-outside-toplevel - from homeassistant.components.recorder.basic_websocket_api import async_setup - hass.data[DOMAIN] = RecorderData() - async_setup(hass) async def async_wait_recorder(hass: HomeAssistant) -> bool: @@ -68,45 +44,3 @@ async def async_wait_recorder(hass: HomeAssistant) -> bool: if DOMAIN not in hass.data: return False return await hass.data[DOMAIN].db_connected - - -@functools.lru_cache(maxsize=1) -def get_instance(hass: HomeAssistant) -> Recorder: - """Get the recorder instance.""" - return hass.data[DATA_INSTANCE] - - -@contextmanager -def session_scope( - *, - hass: HomeAssistant | None = None, - session: Session | None = None, - exception_filter: Callable[[Exception], bool] | None = None, - read_only: bool = False, -) -> Generator[Session]: - """Provide a transactional scope around a series of operations. - - read_only is used to indicate that the session is only used for reading - data and that no commit is required. It does not prevent the session - from writing and is not a security measure. - """ - if session is None and hass is not None: - session = get_instance(hass).get_session() - - if session is None: - raise RuntimeError("Session required") - - need_rollback = False - try: - yield session - if not read_only and session.get_transaction(): - need_rollback = True - session.commit() - except Exception as err: - _LOGGER.exception("Error executing query") - if need_rollback: - session.rollback() - if not exception_filter or not exception_filter(err): - raise - finally: - session.close() diff --git a/homeassistant/helpers/schema_config_entry_flow.py b/homeassistant/helpers/schema_config_entry_flow.py index af8c4c6402d..7463c9945b2 100644 --- a/homeassistant/helpers/schema_config_entry_flow.py +++ b/homeassistant/helpers/schema_config_entry_flow.py @@ -16,6 +16,7 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, + OptionsFlowWithConfigEntry, ) from homeassistant.core import HomeAssistant, callback, split_entity_id from homeassistant.data_entry_flow import UnknownHandler @@ -402,7 +403,7 @@ class SchemaConfigFlowHandler(ConfigFlow, ABC): ) -class SchemaOptionsFlowHandler(OptionsFlow): +class SchemaOptionsFlowHandler(OptionsFlowWithConfigEntry): """Handle a schema based options flow.""" def __init__( @@ -421,8 +422,10 @@ class SchemaOptionsFlowHandler(OptionsFlow): options, which is the union of stored options and user input from the options flow steps. """ - self._options = copy.deepcopy(dict(config_entry.options)) - self._common_handler = SchemaCommonFlowHandler(self, options_flow, self.options) + super().__init__(config_entry) + self._common_handler = SchemaCommonFlowHandler( + self, options_flow, self._options + ) self._async_options_flow_finished = async_options_flow_finished for step in options_flow: @@ -435,11 +438,6 @@ class SchemaOptionsFlowHandler(OptionsFlow): if async_setup_preview: setattr(self, "async_setup_preview", async_setup_preview) - @property - def options(self) -> dict[str, Any]: - """Return a mutable copy of the config entry options.""" - return self._options - @staticmethod def _async_step( step_id: str, diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index 86dcd858c1b..84dabb114cd 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -3,20 +3,20 @@ from __future__ import annotations import asyncio -from collections.abc import AsyncGenerator, Callable, Mapping, Sequence +from collections.abc import Callable, Mapping, Sequence from contextlib import asynccontextmanager from contextvars import ContextVar from copy import copy from dataclasses import dataclass from datetime import datetime, timedelta -from functools import partial +from functools import cached_property, partial import itertools import logging from types import MappingProxyType -from typing import Any, Literal, TypedDict, cast, overload +from typing import Any, Literal, TypedDict, cast import async_interrupt -from propcache import cached_property +from typing_extensions import AsyncGenerator import voluptuous as vol from homeassistant import exceptions @@ -76,7 +76,6 @@ from homeassistant.core import ( HassJob, HomeAssistant, ServiceResponse, - State, SupportsResponse, callback, ) @@ -109,7 +108,9 @@ from .trace import ( trace_update_result, ) from .trigger import async_initialize_triggers, async_validate_trigger_config -from .typing import UNDEFINED, ConfigType, TemplateVarsType, UndefinedType +from .typing import UNDEFINED, ConfigType, UndefinedType + +# mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs SCRIPT_MODE_PARALLEL = "parallel" SCRIPT_MODE_QUEUED = "queued" @@ -177,7 +178,7 @@ def _set_result_unless_done(future: asyncio.Future[None]) -> None: future.set_result(None) -def action_trace_append(variables: dict[str, Any], path: str) -> TraceElement: +def action_trace_append(variables, path): """Append a TraceElement to trace[path].""" trace_element = TraceElement(variables, path) trace_append_element(trace_element, ACTION_TRACE_NODE_MAX_LEN) @@ -430,7 +431,7 @@ class _ScriptRun: if not self._stop.done(): self._script._changed() # noqa: SLF001 - async def _async_get_condition(self, config: ConfigType) -> ConditionCheckerType: + async def _async_get_condition(self, config): return await self._script._async_get_condition(config) # noqa: SLF001 def _log( @@ -438,7 +439,7 @@ class _ScriptRun: ) -> None: self._script._log(msg, *args, level=level, **kwargs) # noqa: SLF001 - def _step_log(self, default_message: str, timeout: float | None = None) -> None: + def _step_log(self, default_message, timeout=None): self._script.last_action = self._action.get(CONF_ALIAS, default_message) _timeout = ( "" if timeout is None else f" (timeout: {timedelta(seconds=timeout)})" @@ -580,7 +581,7 @@ class _ScriptRun: if not isinstance(exception, exceptions.HomeAssistantError): raise exception - def _log_exception(self, exception: Exception) -> None: + def _log_exception(self, exception): action_type = cv.determine_script_action(self._action) error = str(exception) @@ -629,7 +630,7 @@ class _ScriptRun: ) raise _AbortScript from ex - async def _async_delay_step(self) -> None: + async def _async_delay_step(self): """Handle delay.""" delay_delta = self._get_pos_time_period_template(CONF_DELAY) @@ -661,7 +662,7 @@ class _ScriptRun: return self._get_pos_time_period_template(CONF_TIMEOUT).total_seconds() return None - async def _async_wait_template_step(self) -> None: + async def _async_wait_template_step(self): """Handle a wait template.""" timeout = self._get_timeout_seconds_from_action() self._step_log("wait template", timeout) @@ -670,6 +671,7 @@ class _ScriptRun: trace_set_result(wait=self._variables["wait"]) wait_template = self._action[CONF_WAIT_TEMPLATE] + wait_template.hass = self._hass # check if condition already okay if condition.async_template(self._hass, wait_template, self._variables, False): @@ -689,9 +691,7 @@ class _ScriptRun: futures.append(done) @callback - def async_script_wait( - entity_id: str, from_s: State | None, to_s: State | None - ) -> None: + def async_script_wait(entity_id, from_s, to_s): """Handle script after template condition is true.""" self._async_set_remaining_time_var(timeout_handle) self._variables["wait"]["completed"] = True @@ -728,7 +728,7 @@ class _ScriptRun: except ScriptStoppedError as ex: raise asyncio.CancelledError from ex - async def _async_call_service_step(self) -> None: + async def _async_call_service_step(self): """Call the service specified in the action.""" self._step_log("call service") @@ -775,14 +775,14 @@ class _ScriptRun: if response_variable: self._variables[response_variable] = response_data - async def _async_device_step(self) -> None: + async def _async_device_step(self): """Perform the device automation specified in the action.""" self._step_log("device automation") await device_action.async_call_action_from_config( self._hass, self._action, self._variables, self._context ) - async def _async_scene_step(self) -> None: + async def _async_scene_step(self): """Activate the scene specified in the action.""" self._step_log("activate scene") trace_set_result(scene=self._action[CONF_SCENE]) @@ -794,7 +794,7 @@ class _ScriptRun: context=self._context, ) - async def _async_event_step(self) -> None: + async def _async_event_step(self): """Fire an event.""" self._step_log(self._action.get(CONF_ALIAS, self._action[CONF_EVENT])) event_data = {} @@ -816,7 +816,7 @@ class _ScriptRun: self._action[CONF_EVENT], event_data, context=self._context ) - async def _async_condition_step(self) -> None: + async def _async_condition_step(self): """Test if condition is matching.""" self._script.last_action = self._action.get( CONF_ALIAS, self._action[CONF_CONDITION] @@ -836,19 +836,12 @@ class _ScriptRun: if not check: raise _ConditionFail - def _test_conditions( - self, - conditions: list[ConditionCheckerType], - name: str, - condition_path: str | None = None, - ) -> bool | None: + def _test_conditions(self, conditions, name, condition_path=None): if condition_path is None: condition_path = name @trace_condition_function - def traced_test_conditions( - hass: HomeAssistant, variables: TemplateVarsType - ) -> bool | None: + def traced_test_conditions(hass, variables): try: with trace_path(condition_path): for idx, cond in enumerate(conditions): @@ -864,7 +857,7 @@ class _ScriptRun: return traced_test_conditions(self._hass, self._variables) @async_trace_path("repeat") - async def _async_repeat_step(self) -> None: # noqa: C901 + async def _async_repeat_step(self): # noqa: C901 """Repeat a sequence.""" description = self._action.get(CONF_ALIAS, "sequence") repeat = self._action[CONF_REPEAT] @@ -884,7 +877,7 @@ class _ScriptRun: script = self._script._get_repeat_script(self._step) # noqa: SLF001 warned_too_many_loops = False - async def async_run_sequence(iteration: int, extra_msg: str = "") -> None: + async def async_run_sequence(iteration, extra_msg=""): self._log("Repeating %s: Iteration %i%s", description, iteration, extra_msg) with trace_path("sequence"): await self._async_run_script(script) @@ -1060,7 +1053,7 @@ class _ScriptRun: """If sequence.""" if_data = await self._script._async_get_if_data(self._step) # noqa: SLF001 - test_conditions: bool | None = False + test_conditions = False try: with trace_path("if"): test_conditions = self._test_conditions( @@ -1080,26 +1073,6 @@ class _ScriptRun: with trace_path("else"): await self._async_run_script(if_data["if_else"]) - @overload - def _async_futures_with_timeout( - self, - timeout: float, - ) -> tuple[ - list[asyncio.Future[None]], - asyncio.TimerHandle, - asyncio.Future[None], - ]: ... - - @overload - def _async_futures_with_timeout( - self, - timeout: None, - ) -> tuple[ - list[asyncio.Future[None]], - None, - None, - ]: ... - def _async_futures_with_timeout( self, timeout: float | None, @@ -1126,18 +1099,14 @@ class _ScriptRun: futures.append(timeout_future) return futures, timeout_handle, timeout_future - async def _async_wait_for_trigger_step(self) -> None: + async def _async_wait_for_trigger_step(self): """Wait for a trigger event.""" timeout = self._get_timeout_seconds_from_action() self._step_log("wait for trigger", timeout) variables = {**self._variables} - self._variables["wait"] = { - "remaining": timeout, - "completed": False, - "trigger": None, - } + self._variables["wait"] = {"remaining": timeout, "trigger": None} trace_set_result(wait=self._variables["wait"]) if timeout == 0: @@ -1151,15 +1120,12 @@ class _ScriptRun: done = self._hass.loop.create_future() futures.append(done) - async def async_done( - variables: dict[str, Any], context: Context | None = None - ) -> None: + async def async_done(variables, context=None): self._async_set_remaining_time_var(timeout_handle) - self._variables["wait"]["completed"] = True self._variables["wait"]["trigger"] = variables["trigger"] _set_result_unless_done(done) - def log_cb(level: int, msg: str, **kwargs: Any) -> None: + def log_cb(level, msg, **kwargs): self._log(msg, level=level, **kwargs) remove_triggers = await async_initialize_triggers( @@ -1203,14 +1169,14 @@ class _ScriptRun: unsub() - async def _async_variables_step(self) -> None: + async def _async_variables_step(self): """Set a variable value.""" self._step_log("setting variables") self._variables = self._action[CONF_VARIABLES].async_render( self._hass, self._variables, render_as_defaults=False ) - async def _async_set_conversation_response_step(self) -> None: + async def _async_set_conversation_response_step(self): """Set conversation response.""" self._step_log("setting conversation response") resp: template.Template | None = self._action[CONF_SET_CONVERSATION_RESPONSE] @@ -1222,7 +1188,7 @@ class _ScriptRun: ) trace_set_result(conversation_response=self._conversation_response) - async def _async_stop_step(self) -> None: + async def _async_stop_step(self): """Stop script execution.""" stop = self._action[CONF_STOP] error = self._action.get(CONF_ERROR, False) @@ -1355,7 +1321,7 @@ async def _async_stop_scripts_at_shutdown(hass: HomeAssistant, event: Event) -> ) -type _VarsType = dict[str, Any] | Mapping[str, Any] | MappingProxyType[str, Any] +type _VarsType = dict[str, Any] | MappingProxyType def _referenced_extract_ids(data: Any, key: str, found: set[str]) -> None: @@ -1393,7 +1359,7 @@ class ScriptRunResult: conversation_response: str | None | UndefinedType service_response: ServiceResponse - variables: dict[str, Any] + variables: dict class Script: @@ -1434,6 +1400,7 @@ class Script: self._hass = hass self.sequence = sequence + template.attach(hass, self.sequence) self.name = name self.unique_id = f"{domain}.{name}-{id(self)}" self.domain = domain @@ -1447,7 +1414,7 @@ class Script: self._set_logger(logger) self._log_exceptions = log_exceptions - self.last_action: str | None = None + self.last_action = None self.last_triggered: datetime | None = None self._runs: list[_ScriptRun] = [] @@ -1455,7 +1422,7 @@ class Script: self._max_exceeded = max_exceeded if script_mode == SCRIPT_MODE_QUEUED: self._queue_lck = asyncio.Lock() - self._config_cache: dict[frozenset[tuple[str, str]], ConditionCheckerType] = {} + self._config_cache: dict[set[tuple], Callable[..., bool]] = {} self._repeat_script: dict[int, Script] = {} self._choose_data: dict[int, _ChooseData] = {} self._if_data: dict[int, _IfData] = {} @@ -1463,6 +1430,8 @@ class Script: self._sequence_scripts: dict[int, Script] = {} self.variables = variables self._variables_dynamic = template.is_complex(variables) + if self._variables_dynamic: + template.attach(hass, variables) self._copy_variables_on_run = copy_variables @property @@ -1746,11 +1715,9 @@ class Script: variables["context"] = context elif self._copy_variables_on_run: - # This is not the top level script, variables have been turned to a dict - variables = cast(dict[str, Any], copy(run_variables)) + variables = cast(dict, copy(run_variables)) else: - # This is not the top level script, variables have been turned to a dict - variables = cast(dict[str, Any], run_variables) + variables = cast(dict, run_variables) # Prevent non-allowed recursive calls which will cause deadlocks when we try to # stop (restart) or wait for (queued) our own script run. @@ -1779,7 +1746,9 @@ class Script: cls = _ScriptRun else: cls = _QueuedScriptRun - run = cls(self._hass, self, variables, context, self._log_exceptions) + run = cls( + self._hass, self, cast(dict, variables), context, self._log_exceptions + ) has_existing_runs = bool(self._runs) self._runs.append(run) if self.script_mode == SCRIPT_MODE_RESTART and has_existing_runs: @@ -1804,9 +1773,7 @@ class Script: self._changed() raise - async def _async_stop( - self, aws: list[asyncio.Task[None]], update_state: bool - ) -> None: + async def _async_stop(self, aws: list[asyncio.Task], update_state: bool) -> None: await asyncio.wait(aws) if update_state: self._changed() @@ -1825,8 +1792,11 @@ class Script: return await asyncio.shield(create_eager_task(self._async_stop(aws, update_state))) - async def _async_get_condition(self, config: ConfigType) -> ConditionCheckerType: - config_cache_key = frozenset((k, str(v)) for k, v in config.items()) + async def _async_get_condition(self, config): + if isinstance(config, template.Template): + config_cache_key = config.template + else: + config_cache_key = frozenset((k, str(v)) for k, v in config.items()) if not (cond := self._config_cache.get(config_cache_key)): cond = await condition.async_from_config(self._hass, config) self._config_cache[config_cache_key] = cond diff --git a/homeassistant/helpers/script_variables.py b/homeassistant/helpers/script_variables.py index 2b4507abd64..043101b9b86 100644 --- a/homeassistant/helpers/script_variables.py +++ b/homeassistant/helpers/script_variables.py @@ -36,6 +36,7 @@ class ScriptVariables: """ if self._has_template is None: self._has_template = template.is_complex(self.variables) + template.attach(hass, self.variables) if not self._has_template: if render_as_defaults: diff --git a/homeassistant/helpers/selector.py b/homeassistant/helpers/selector.py index 025b8de8896..5a542657d10 100644 --- a/homeassistant/helpers/selector.py +++ b/homeassistant/helpers/selector.py @@ -725,7 +725,6 @@ class DurationSelectorConfig(TypedDict, total=False): """Class to represent a duration selector config.""" enable_day: bool - enable_millisecond: bool allow_negative: bool @@ -740,8 +739,6 @@ class DurationSelector(Selector[DurationSelectorConfig]): # Enable day field in frontend. A selection with `days` set is allowed # even if `enable_day` is not set vol.Optional("enable_day"): cv.boolean, - # Enable millisecond field in frontend. - vol.Optional("enable_millisecond"): cv.boolean, # Allow negative durations. Will default to False in HA Core 2025.6.0. vol.Optional("allow_negative"): cv.boolean, } diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index e3da52604cb..35c682437cb 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -20,8 +20,8 @@ from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FLOOR_ID, ATTR_LABEL_ID, - CONF_ACTION, CONF_ENTITY_ID, + CONF_SERVICE, CONF_SERVICE_DATA, CONF_SERVICE_DATA_TEMPLATE, CONF_SERVICE_TEMPLATE, @@ -33,7 +33,6 @@ from homeassistant.core import ( Context, EntityServiceResponse, HassJob, - HassJobType, HomeAssistant, ServiceCall, ServiceResponse, @@ -64,7 +63,7 @@ from . import ( ) from .group import expand_entity_ids from .selector import TargetSelector -from .typing import ConfigType, TemplateVarsType, VolDictType, VolSchemaType +from .typing import ConfigType, TemplateVarsType, VolSchemaType if TYPE_CHECKING: from .entity import Entity @@ -359,13 +358,14 @@ def async_prepare_call_from_config( f"Invalid config for calling service: {ex}" ) from ex - if CONF_ACTION in config: - domain_service = config[CONF_ACTION] + if CONF_SERVICE in config: + domain_service = config[CONF_SERVICE] else: domain_service = config[CONF_SERVICE_TEMPLATE] if isinstance(domain_service, template.Template): try: + domain_service.hass = hass domain_service = domain_service.async_render(variables) domain_service = cv.service(domain_service) except TemplateError as ex: @@ -384,8 +384,10 @@ def async_prepare_call_from_config( conf = config[CONF_TARGET] try: if isinstance(conf, template.Template): + conf.hass = hass target.update(conf.async_render(variables)) else: + template.attach(hass, conf) target.update(template.render_complex(conf, variables)) if CONF_ENTITY_ID in target: @@ -411,6 +413,7 @@ def async_prepare_call_from_config( if conf not in config: continue try: + template.attach(hass, config[conf]) render = template.render_complex(config[conf], variables) if not isinstance(render, dict): raise HomeAssistantError( @@ -571,31 +574,19 @@ def async_extract_referenced_entity_ids( # noqa: C901 for area_entry in area_reg.areas.get_areas_for_floor(floor_id) ) - selected.referenced_areas.update(selector.area_ids) + # Find devices for targeted areas selected.referenced_devices.update(selector.device_ids) - if not selected.referenced_areas and not selected.referenced_devices: - return selected - - # Add indirectly referenced by device - selected.indirectly_referenced.update( - entry.entity_id - for device_id in selected.referenced_devices - for entry in entities.get_entries_for_device_id(device_id) - # Do not add entities which are hidden or which are config - # or diagnostic entities. - if (entry.entity_category is None and entry.hidden_by is None) - ) - - # Find devices for targeted areas - referenced_devices_by_area: set[str] = set() + selected.referenced_areas.update(selector.area_ids) if selected.referenced_areas: for area_id in selected.referenced_areas: - referenced_devices_by_area.update( + selected.referenced_devices.update( device_entry.id for device_entry in dev_reg.devices.get_devices_for_area_id(area_id) ) - selected.referenced_devices.update(referenced_devices_by_area) + + if not selected.referenced_areas and not selected.referenced_devices: + return selected # Add indirectly referenced by area selected.indirectly_referenced.update( @@ -607,10 +598,10 @@ def async_extract_referenced_entity_ids( # noqa: C901 # or diagnostic entities. if entry.entity_category is None and entry.hidden_by is None ) - # Add indirectly referenced by area through device + # Add indirectly referenced by device selected.indirectly_referenced.update( entry.entity_id - for device_id in referenced_devices_by_area + for device_id in selected.referenced_devices for entry in entities.get_entries_for_device_id(device_id) # Do not add entities which are hidden or which are config # or diagnostic entities. @@ -622,10 +613,11 @@ def async_extract_referenced_entity_ids( # noqa: C901 # by an area and the entity # has no explicitly set area not entry.area_id + # The entity's device matches a targeted device + or device_id in selector.device_ids ) ) ) - return selected @@ -1252,55 +1244,3 @@ class ReloadServiceHelper[_T]: self._service_running = False self._pending_reload_targets -= reload_targets self._service_condition.notify_all() - - -@callback -def async_register_entity_service( - hass: HomeAssistant, - domain: str, - name: str, - *, - entities: dict[str, Entity], - func: str | Callable[..., Any], - job_type: HassJobType | None, - required_features: Iterable[int] | None = None, - schema: VolDictType | VolSchemaType | None, - supports_response: SupportsResponse = SupportsResponse.NONE, -) -> None: - """Help registering an entity service. - - This is called by EntityComponent.async_register_entity_service and - EntityPlatform.async_register_entity_service and should not be called - directly by integrations. - """ - if schema is None or isinstance(schema, dict): - schema = cv.make_entity_service_schema(schema) - elif not cv.is_entity_service_schema(schema): - # pylint: disable-next=import-outside-toplevel - from .frame import ReportBehavior, report_usage - - report_usage( - ( - "registers an entity service with a non entity service schema " - "which will stop working in HA Core 2025.9" - ), - core_behavior=ReportBehavior.LOG, - ) - - service_func: str | HassJob[..., Any] - service_func = func if isinstance(func, str) else HassJob(func) - - hass.services.async_register( - domain, - name, - partial( - entity_service_call, - hass, - entities, - service_func, - required_features=required_features, - ), - schema, - supports_response, - job_type=job_type, - ) diff --git a/homeassistant/helpers/service_info/hassio.py b/homeassistant/helpers/service_info/hassio.py deleted file mode 100644 index 0125fef3017..00000000000 --- a/homeassistant/helpers/service_info/hassio.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Hassio Discovery data.""" - -from dataclasses import dataclass -from typing import Any - -from homeassistant.data_entry_flow import BaseServiceInfo - - -@dataclass(slots=True) -class HassioServiceInfo(BaseServiceInfo): - """Prepared info from hassio entries.""" - - config: dict[str, Any] - name: str - slug: str - uuid: str diff --git a/homeassistant/helpers/state.py b/homeassistant/helpers/state.py index 70f64d5296a..71b1b2658e2 100644 --- a/homeassistant/helpers/state.py +++ b/homeassistant/helpers/state.py @@ -9,16 +9,17 @@ import logging from types import ModuleType from typing import Any -from homeassistant.components.lock import LockState from homeassistant.components.sun import STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON from homeassistant.const import ( STATE_CLOSED, STATE_HOME, + STATE_LOCKED, STATE_NOT_HOME, STATE_OFF, STATE_ON, STATE_OPEN, STATE_UNKNOWN, + STATE_UNLOCKED, ) from homeassistant.core import Context, HomeAssistant, State from homeassistant.loader import IntegrationNotFound, async_get_integration, bind_hass @@ -78,7 +79,7 @@ def state_as_number(state: State) -> float: """ if state.state in ( STATE_ON, - LockState.LOCKED, + STATE_LOCKED, STATE_ABOVE_HORIZON, STATE_OPEN, STATE_HOME, @@ -86,7 +87,7 @@ def state_as_number(state: State) -> float: return 1 if state.state in ( STATE_OFF, - LockState.UNLOCKED, + STATE_UNLOCKED, STATE_UNKNOWN, STATE_BELOW_HORIZON, STATE_CLOSED, diff --git a/homeassistant/helpers/storage.py b/homeassistant/helpers/storage.py index 080599f54d8..7e3c12cfc01 100644 --- a/homeassistant/helpers/storage.py +++ b/homeassistant/helpers/storage.py @@ -6,6 +6,7 @@ import asyncio from collections.abc import Callable, Iterable, Mapping, Sequence from contextlib import suppress from copy import deepcopy +from functools import cached_property import inspect from json import JSONDecodeError, JSONEncoder import logging @@ -13,8 +14,6 @@ import os from pathlib import Path from typing import Any -from propcache import cached_property - from homeassistant.const import ( EVENT_HOMEASSISTANT_FINAL_WRITE, EVENT_HOMEASSISTANT_STARTED, diff --git a/homeassistant/helpers/system_info.py b/homeassistant/helpers/system_info.py index df4c45cd5ed..69e03904caa 100644 --- a/homeassistant/helpers/system_info.py +++ b/homeassistant/helpers/system_info.py @@ -14,7 +14,6 @@ from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass from homeassistant.util.package import is_docker_env, is_virtual_env -from .hassio import is_hassio from .importlib import async_import_module from .singleton import singleton @@ -53,13 +52,13 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]: else: hassio = await async_import_module(hass, "homeassistant.components.hassio") - is_hassio_ = is_hassio(hass) + is_hassio = hassio.is_hassio(hass) info_object = { "installation_type": "Unknown", "version": current_version, "dev": "dev" in current_version, - "hassio": is_hassio_, + "hassio": is_hassio, "virtualenv": is_virtual_env(), "python_version": platform.python_version(), "docker": False, @@ -90,7 +89,7 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]: info_object["installation_type"] = "Home Assistant Core" # Enrich with Supervisor information - if is_hassio_: + if is_hassio: if not (info := hassio.get_info(hass)): _LOGGER.warning("No Home Assistant Supervisor info available") info = {} diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 2eab666bbd4..cc619e25aed 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -6,10 +6,9 @@ from ast import literal_eval import asyncio import base64 import collections.abc -from collections.abc import Callable, Generator, Iterable +from collections.abc import Callable, Iterable from contextlib import AbstractContextManager from contextvars import ContextVar -from copy import deepcopy from datetime import date, datetime, time, timedelta from functools import cache, lru_cache, partial, wraps import json @@ -35,7 +34,7 @@ from jinja2.sandbox import ImmutableSandboxedEnvironment from jinja2.utils import Namespace from lru import LRU import orjson -from propcache import under_cached_property +from typing_extensions import Generator import voluptuous as vol from homeassistant.const import ( @@ -53,7 +52,6 @@ from homeassistant.const import ( from homeassistant.core import ( Context, HomeAssistant, - ServiceResponse, State, callback, split_entity_id, @@ -83,7 +81,6 @@ from . import ( label_registry, location as loc_helper, ) -from .deprecation import deprecated_function from .singleton import singleton from .translation import async_translate_state from .typing import TemplateVarsType @@ -153,7 +150,6 @@ CACHED_TEMPLATE_STATES = 512 EVAL_CACHE_SIZE = 512 MAX_CUSTOM_TEMPLATE_SIZE = 5 * 1024 * 1024 -MAX_TEMPLATE_OUTPUT = 256 * 1024 # 256KiB CACHED_TEMPLATE_LRU: LRU[State, TemplateState] = LRU(CACHED_TEMPLATE_STATES) CACHED_TEMPLATE_NO_COLLECT_LRU: LRU[State, TemplateState] = LRU(CACHED_TEMPLATE_STATES) @@ -210,24 +206,15 @@ def async_setup(hass: HomeAssistant) -> bool: @bind_hass -@deprecated_function( - "automatic setting of Template.hass introduced by HA Core PR #89242", - breaks_in_ha_version="2025.10", -) def attach(hass: HomeAssistant, obj: Any) -> None: - """Recursively attach hass to all template instances in list and dict.""" - return _attach(hass, obj) - - -def _attach(hass: HomeAssistant, obj: Any) -> None: """Recursively attach hass to all template instances in list and dict.""" if isinstance(obj, list): for child in obj: - _attach(hass, child) + attach(hass, child) elif isinstance(obj, collections.abc.Mapping): for child_key, child_value in obj.items(): - _attach(hass, child_key) - _attach(hass, child_value) + attach(hass, child_key) + attach(hass, child_value) elif isinstance(obj, Template): obj.hass = hass @@ -509,26 +496,10 @@ class Template: ) def __init__(self, template: str, hass: HomeAssistant | None = None) -> None: - """Instantiate a template. - - Note: A valid hass instance should always be passed in. The hass parameter - will be non optional in Home Assistant Core 2025.10. - """ - # pylint: disable-next=import-outside-toplevel - from .frame import ReportBehavior, report_usage - + """Instantiate a template.""" if not isinstance(template, str): raise TypeError("Expected template to be a string") - if not hass: - report_usage( - ( - "creates a template object without passing hass, " - "which will stop working in HA Core 2025.10" - ), - core_behavior=ReportBehavior.LOG, - ) - self.template: str = template.strip() self._compiled_code: CodeType | None = None self._compiled: jinja2.Template | None = None @@ -634,11 +605,6 @@ class Template: except Exception as err: raise TemplateError(err) from err - if len(render_result) > MAX_TEMPLATE_OUTPUT: - raise TemplateError( - f"Template output exceeded maximum size of {MAX_TEMPLATE_OUTPUT} characters" - ) - render_result = render_result.strip() if not parse_result or self.hass and self.hass.config.legacy_templates: @@ -1025,8 +991,6 @@ class DomainStates: class TemplateStateBase(State): """Class to represent a state object in a template.""" - __slots__ = ("_hass", "_collect", "_entity_id", "_state") - _state: State __setitem__ = _readonly @@ -1039,7 +1003,6 @@ class TemplateStateBase(State): self._hass = hass self._collect = collect self._entity_id = entity_id - self._cache: dict[str, Any] = {} def _collect_state(self) -> None: if self._collect and (render_info := _render_info.get()): @@ -1060,7 +1023,7 @@ class TemplateStateBase(State): return self.state_with_unit raise KeyError - @under_cached_property + @property def entity_id(self) -> str: # type: ignore[override] """Wrap State.entity_id. @@ -1117,7 +1080,7 @@ class TemplateStateBase(State): return self._state.object_id @property - def name(self) -> str: # type: ignore[override] + def name(self) -> str: """Wrap State.name.""" self._collect_state() return self._state.name @@ -1154,7 +1117,7 @@ class TemplateStateBase(State): class TemplateState(TemplateStateBase): """Class to represent a state object in a template.""" - __slots__ = () + __slots__ = ("_state",) # Inheritance is done so functions that check against State keep working def __init__(self, hass: HomeAssistant, state: State, collect: bool = True) -> None: @@ -1170,8 +1133,6 @@ class TemplateState(TemplateStateBase): class TemplateStateFromEntityId(TemplateStateBase): """Class to represent a state object in a template.""" - __slots__ = () - def __init__( self, hass: HomeAssistant, entity_id: str, collect: bool = True ) -> None: @@ -1281,7 +1242,7 @@ def result_as_boolean(template_result: Any | None) -> bool: True/not 0/'1'/'true'/'yes'/'on'/'enable' are considered truthy False/0/None/'0'/'false'/'no'/'off'/'disable' are considered falsy - All other values are falsy + """ if template_result is None: return False @@ -2152,63 +2113,6 @@ def as_timedelta(value: str) -> timedelta | None: return dt_util.parse_duration(value) -def merge_response(value: ServiceResponse) -> list[Any]: - """Merge action responses into single list. - - Checks that the input is a correct service response: - { - "entity_id": {str: dict[str, Any]}, - } - If response is a single list, it will extend the list with the items - and add the entity_id and value_key to each dictionary for reference. - If response is a dictionary or multiple lists, - it will append the dictionary/lists to the list - and add the entity_id to each dictionary for reference. - """ - if not isinstance(value, dict): - raise TypeError("Response is not a dictionary") - if not value: - # Bail out early if response is an empty dictionary - return [] - - is_single_list = False - response_items: list = [] - input_service_response = deepcopy(value) - for entity_id, entity_response in input_service_response.items(): # pylint: disable=too-many-nested-blocks - if not isinstance(entity_response, dict): - raise TypeError("Response is not a dictionary") - for value_key, type_response in entity_response.items(): - if len(entity_response) == 1 and isinstance(type_response, list): - # Provides special handling for responses such as calendar events - # and weather forecasts where the response contains a single list with multiple - # dictionaries inside. - is_single_list = True - for dict_in_list in type_response: - if isinstance(dict_in_list, dict): - if ATTR_ENTITY_ID in dict_in_list: - raise ValueError( - f"Response dictionary already contains key '{ATTR_ENTITY_ID}'" - ) - dict_in_list[ATTR_ENTITY_ID] = entity_id - dict_in_list["value_key"] = value_key - response_items.extend(type_response) - else: - # Break the loop if not a single list as the logic is then managed in the outer loop - # which handles both dictionaries and in the case of multiple lists. - break - - if not is_single_list: - _response = entity_response.copy() - if ATTR_ENTITY_ID in _response: - raise ValueError( - f"Response dictionary already contains key '{ATTR_ENTITY_ID}'" - ) - _response[ATTR_ENTITY_ID] = entity_id - response_items.append(_response) - - return response_items - - def strptime(string, fmt, default=_SENTINEL): """Parse a time string to datetime.""" try: @@ -2424,7 +2328,7 @@ def regex_match(value, find="", ignorecase=False): """Match value using regex.""" if not isinstance(value, str): value = str(value) - flags = re.IGNORECASE if ignorecase else 0 + flags = re.I if ignorecase else 0 return bool(_regex_cache(find, flags).match(value)) @@ -2435,7 +2339,7 @@ def regex_replace(value="", find="", replace="", ignorecase=False): """Replace using regex.""" if not isinstance(value, str): value = str(value) - flags = re.IGNORECASE if ignorecase else 0 + flags = re.I if ignorecase else 0 return _regex_cache(find, flags).sub(replace, value) @@ -2443,7 +2347,7 @@ def regex_search(value, find="", ignorecase=False): """Search using regex.""" if not isinstance(value, str): value = str(value) - flags = re.IGNORECASE if ignorecase else 0 + flags = re.I if ignorecase else 0 return bool(_regex_cache(find, flags).search(value)) @@ -2456,7 +2360,7 @@ def regex_findall(value, find="", ignorecase=False): """Find all matches using regex.""" if not isinstance(value, str): value = str(value) - flags = re.IGNORECASE if ignorecase else 0 + flags = re.I if ignorecase else 0 return _regex_cache(find, flags).findall(value) @@ -2924,7 +2828,6 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["as_timedelta"] = as_timedelta self.globals["as_timestamp"] = forgiving_as_timestamp self.globals["timedelta"] = timedelta - self.globals["merge_response"] = merge_response self.globals["strptime"] = strptime self.globals["urlencode"] = urlencode self.globals["average"] = average @@ -2942,7 +2845,6 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["iif"] = iif self.globals["bool"] = forgiving_boolean self.globals["version"] = version - self.globals["zip"] = zip self.tests["is_number"] = is_number self.tests["list"] = _is_list self.tests["set"] = _is_set diff --git a/homeassistant/helpers/trace.py b/homeassistant/helpers/trace.py index 431a7a7d1f8..6f29ff23bec 100644 --- a/homeassistant/helpers/trace.py +++ b/homeassistant/helpers/trace.py @@ -3,12 +3,14 @@ from __future__ import annotations from collections import deque -from collections.abc import Callable, Coroutine, Generator +from collections.abc import Callable, Coroutine from contextlib import contextmanager from contextvars import ContextVar from functools import wraps from typing import Any +from typing_extensions import Generator + from homeassistant.core import ServiceResponse import homeassistant.util.dt as dt_util @@ -34,7 +36,7 @@ class TraceElement: """Container for trace data.""" self._child_key: str | None = None self._child_run_id: str | None = None - self._error: BaseException | None = None + self._error: Exception | None = None self.path: str = path self._result: dict[str, Any] | None = None self.reuse_by_child = False @@ -52,7 +54,7 @@ class TraceElement: self._child_key = child_key self._child_run_id = child_run_id - def set_error(self, ex: BaseException | None) -> None: + def set_error(self, ex: Exception) -> None: """Set error.""" self._error = ex diff --git a/homeassistant/helpers/trigger.py b/homeassistant/helpers/trigger.py index 67e9010df79..a0abbaa390c 100644 --- a/homeassistant/helpers/trigger.py +++ b/homeassistant/helpers/trigger.py @@ -225,7 +225,7 @@ async def _async_get_trigger_platform( try: integration = await async_get_integration(hass, platform) except IntegrationNotFound: - raise vol.Invalid(f"Invalid trigger '{platform}' specified") from None + raise vol.Invalid(f"Invalid platform '{platform}' specified") from None try: return await integration.async_get_platform("trigger") except ImportError: diff --git a/homeassistant/helpers/trigger_template_entity.py b/homeassistant/helpers/trigger_template_entity.py index 7f8ad41d7bb..7b1c4ab8078 100644 --- a/homeassistant/helpers/trigger_template_entity.py +++ b/homeassistant/helpers/trigger_template_entity.py @@ -30,7 +30,7 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads from . import config_validation as cv from .entity import Entity -from .template import render_complex +from .template import attach as template_attach, render_complex from .typing import ConfigType CONF_AVAILABILITY = "availability" @@ -157,6 +157,11 @@ class TriggerBaseEntity(Entity): """Return extra attributes.""" return self._rendered.get(CONF_ATTRIBUTES) + async def async_added_to_hass(self) -> None: + """Handle being added to Home Assistant.""" + await super().async_added_to_hass() + template_attach(self.hass, self._config) + def _set_unique_id(self, unique_id: str | None) -> None: """Set unique id.""" self._unique_id = unique_id diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index 87d55891e90..8451c69d2b3 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -4,7 +4,7 @@ from __future__ import annotations from abc import abstractmethod import asyncio -from collections.abc import Awaitable, Callable, Coroutine, Generator +from collections.abc import Awaitable, Callable, Coroutine from datetime import datetime, timedelta import logging from random import randint @@ -13,9 +13,8 @@ from typing import Any, Generic, Protocol import urllib.error import aiohttp -from propcache import cached_property import requests -from typing_extensions import TypeVar +from typing_extensions import Generator, TypeVar from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP @@ -29,8 +28,6 @@ from homeassistant.util.dt import utcnow from . import entity, event from .debounce import Debouncer -from .frame import report_usage -from .typing import UNDEFINED, UndefinedType REQUEST_REFRESH_DEFAULT_COOLDOWN = 10 REQUEST_REFRESH_DEFAULT_IMMEDIATE = True @@ -70,11 +67,9 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): hass: HomeAssistant, logger: logging.Logger, *, - config_entry: config_entries.ConfigEntry | None | UndefinedType = UNDEFINED, name: str, update_interval: timedelta | None = None, update_method: Callable[[], Awaitable[_DataT]] | None = None, - setup_method: Callable[[], Awaitable[None]] | None = None, request_refresh_debouncer: Debouncer[Coroutine[Any, Any, None]] | None = None, always_update: bool = True, ) -> None: @@ -83,16 +78,10 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): self.logger = logger self.name = name self.update_method = update_method - self.setup_method = setup_method self._update_interval_seconds: float | None = None self.update_interval = update_interval self._shutdown_requested = False - if config_entry is UNDEFINED: - self.config_entry = config_entries.current_entry.get() - # This should be deprecated once all core integrations are updated - # to pass in the config entry explicitly. - else: - self.config_entry = config_entry + self.config_entry = config_entries.current_entry.get() self.always_update = always_update # It's None before the first successful update. @@ -285,70 +274,15 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): fails. Additionally logging is handled by config entry setup to ensure that multiple retries do not cause log spam. """ - if self.config_entry is None: - report_usage( - "uses `async_config_entry_first_refresh`, which is only supported " - "for coordinators with a config entry and will stop working in " - "Home Assistant 2025.11" - ) - elif ( - self.config_entry.state - is not config_entries.ConfigEntryState.SETUP_IN_PROGRESS - ): - report_usage( - "uses `async_config_entry_first_refresh`, which is only supported " - f"when entry state is {config_entries.ConfigEntryState.SETUP_IN_PROGRESS}, " - f"but it is in state {self.config_entry.state}, " - "This will stop working in Home Assistant 2025.11", - ) - if await self.__wrap_async_setup(): - await self._async_refresh( - log_failures=False, raise_on_auth_failed=True, raise_on_entry_error=True - ) - if self.last_update_success: - return + await self._async_refresh( + log_failures=False, raise_on_auth_failed=True, raise_on_entry_error=True + ) + if self.last_update_success: + return ex = ConfigEntryNotReady() ex.__cause__ = self.last_exception raise ex - async def __wrap_async_setup(self) -> bool: - """Error handling for _async_setup.""" - try: - await self._async_setup() - except ( - TimeoutError, - requests.exceptions.Timeout, - aiohttp.ClientError, - requests.exceptions.RequestException, - urllib.error.URLError, - UpdateFailed, - ) as err: - self.last_exception = err - - except (ConfigEntryError, ConfigEntryAuthFailed) as err: - self.last_exception = err - self.last_update_success = False - raise - - except Exception as err: # pylint: disable=broad-except - self.last_exception = err - self.logger.exception("Unexpected error fetching %s data", self.name) - else: - return True - - self.last_update_success = False - return False - - async def _async_setup(self) -> None: - """Set up the coordinator. - - Can be overwritten by integrations to load data or resources - only once during the first refresh. - """ - if self.setup_method is None: - return None - return await self.setup_method() - async def async_refresh(self) -> None: """Refresh data and log errors.""" await self._async_refresh(log_failures=True) @@ -458,7 +392,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): self.logger.debug( "Finished fetching %s data in %.3f seconds (success: %s)", self.name, - monotonic() - start, # pylint: disable=possibly-used-before-assignment + monotonic() - start, self.last_update_success, ) if not auth_failed and self._listeners and not self.hass.is_stopping: @@ -537,7 +471,7 @@ class BaseCoordinatorEntity[ self.coordinator = coordinator self.coordinator_context = context - @cached_property + @property def should_poll(self) -> bool: """No need to poll. Coordinator notifies entity of updates.""" return False diff --git a/homeassistant/loader.py b/homeassistant/loader.py index d2e04df04c4..9afad610420 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -11,6 +11,7 @@ from collections.abc import Callable, Iterable from contextlib import suppress from dataclasses import dataclass import functools as ft +from functools import cached_property import importlib import logging import os @@ -25,7 +26,6 @@ from awesomeversion import ( AwesomeVersionException, AwesomeVersionStrategy, ) -from propcache import cached_property import voluptuous as vol from . import generated @@ -102,23 +102,6 @@ BLOCKED_CUSTOM_INTEGRATIONS: dict[str, BlockedIntegration] = { "mydolphin_plus": BlockedIntegration( AwesomeVersion("1.0.13"), "crashes Home Assistant" ), - # Added in 2024.7.2 because of - # https://github.com/gcobb321/icloud3/issues/349 - # Note: Current version 3.0.5.2, the fixed version is a guesstimate, - # as no solution is available at time of writing. - "icloud3": BlockedIntegration( - AwesomeVersion("3.0.5.3"), "prevents recorder from working" - ), - # Added in 2024.7.2 because of - # https://github.com/custom-components/places/issues/289 - "places": BlockedIntegration( - AwesomeVersion("2.7.1"), "prevents recorder from working" - ), - # Added in 2024.7.2 because of - # https://github.com/enkama/hass-variables/issues/120 - "variable": BlockedIntegration( - AwesomeVersion("3.4.4"), "prevents recorder from working" - ), } DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey( @@ -206,7 +189,7 @@ class USBMatcherOptional(TypedDict, total=False): class USBMatcher(USBMatcherRequired, USBMatcherOptional): - """Matcher for the USB integration.""" + """Matcher for the bluetooth integration.""" @dataclass(slots=True) @@ -255,7 +238,6 @@ class Manifest(TypedDict, total=False): usb: list[dict[str, str]] homekit: dict[str, list[str]] is_built_in: bool - overwrites_built_in: bool version: str codeowners: list[str] loggers: list[str] @@ -283,7 +265,9 @@ def manifest_from_legacy_module(domain: str, module: ModuleType) -> Manifest: } -def _get_custom_components(hass: HomeAssistant) -> dict[str, Integration]: +async def _async_get_custom_components( + hass: HomeAssistant, +) -> dict[str, Integration]: """Return list of custom integrations.""" if hass.config.recovery_mode or hass.config.safe_mode: return {} @@ -293,14 +277,21 @@ def _get_custom_components(hass: HomeAssistant) -> dict[str, Integration]: except ImportError: return {} - dirs = [ - entry - for path in custom_components.__path__ - for entry in pathlib.Path(path).iterdir() - if entry.is_dir() - ] + def get_sub_directories(paths: list[str]) -> list[pathlib.Path]: + """Return all sub directories in a set of paths.""" + return [ + entry + for path in paths + for entry in pathlib.Path(path).iterdir() + if entry.is_dir() + ] - integrations = _resolve_integrations_from_root( + dirs = await hass.async_add_executor_job( + get_sub_directories, custom_components.__path__ + ) + + integrations = await hass.async_add_executor_job( + _resolve_integrations_from_root, hass, custom_components, [comp.name for comp in dirs], @@ -321,7 +312,7 @@ async def async_get_custom_components( if comps_or_future is None: future = hass.data[DATA_CUSTOM_COMPONENTS] = hass.loop.create_future() - comps = await hass.async_add_executor_job(_get_custom_components, hass) + comps = await _async_get_custom_components(hass) hass.data[DATA_CUSTOM_COMPONENTS] = comps future.set_result(comps) @@ -443,7 +434,6 @@ async def async_get_integration_descriptions( "single_config_entry": integration.manifest.get( "single_config_entry", False ), - "overwrites_built_in": integration.overwrites_built_in, } custom_flows[integration_key][integration.domain] = metadata @@ -755,7 +745,6 @@ class Integration: self.file_path = file_path self.manifest = manifest manifest["is_built_in"] = self.is_built_in - manifest["overwrites_built_in"] = self.overwrites_built_in if self.dependencies: self._all_dependencies_resolved: bool | None = None @@ -903,16 +892,6 @@ class Integration: """Test if package is a built-in integration.""" return self.pkg_path.startswith(PACKAGE_BUILTIN) - @property - def overwrites_built_in(self) -> bool: - """Return if package overwrites a built-in integration.""" - if self.is_built_in: - return False - core_comp_path = ( - pathlib.Path(__file__).parent / "components" / self.domain / "manifest.json" - ) - return core_comp_path.is_file() - @property def version(self) -> AwesomeVersion | None: """Return the version of the integration.""" @@ -949,7 +928,7 @@ class Integration: except IntegrationNotFound as err: _LOGGER.error( ( - "Unable to resolve dependencies for %s: unable to resolve" + "Unable to resolve dependencies for %s: we are unable to resolve" " (sub)dependency %s" ), self.domain, @@ -958,7 +937,7 @@ class Integration: except CircularDependency as err: _LOGGER.error( ( - "Unable to resolve dependencies for %s: it contains a circular" + "Unable to resolve dependencies for %s: it contains a circular" " dependency: %s -> %s" ), self.domain, @@ -1556,18 +1535,16 @@ class Components: raise ImportError(f"Unable to load {comp_name}") # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from .helpers.frame import ReportBehavior, report_usage + from .helpers.frame import report # pylint: disable=import-outside-toplevel - report_usage( + report( ( f"accesses hass.components.{comp_name}." - " This is deprecated and will stop working in Home Assistant 2025.3, it" + " This is deprecated and will stop working in Home Assistant 2024.9, it" f" should be updated to import functions used from {comp_name} directly" ), - core_behavior=ReportBehavior.IGNORE, - core_integration_behavior=ReportBehavior.IGNORE, - custom_integration_behavior=ReportBehavior.LOG, + error_if_core=False, + log_custom_component_only=True, ) wrapped = ModuleWrapper(self._hass, component) @@ -1587,18 +1564,16 @@ class Helpers: helper = importlib.import_module(f"homeassistant.helpers.{helper_name}") # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from .helpers.frame import ReportBehavior, report_usage + from .helpers.frame import report # pylint: disable=import-outside-toplevel - report_usage( + report( ( f"accesses hass.helpers.{helper_name}." - " This is deprecated and will stop working in Home Assistant 2025.5, it" + " This is deprecated and will stop working in Home Assistant 2024.11, it" f" should be updated to import functions used from {helper_name} directly" ), - core_behavior=ReportBehavior.IGNORE, - core_integration_behavior=ReportBehavior.IGNORE, - custom_integration_behavior=ReportBehavior.LOG, + error_if_core=False, + log_custom_component_only=True, ) wrapped = ModuleWrapper(self._hass, helper) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 5bc539beb86..9aed0850478 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,75 +3,67 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.1 +aiohttp-fast-url-dispatcher==0.3.0 aiohttp-fast-zlib==0.1.1 -aiohttp==3.11.0 +aiohttp==3.9.5 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 -async-interrupt==1.2.0 -async-upnp-client==0.41.0 +async-interrupt==1.1.2 +async-upnp-client==0.39.0 atomicwrites-homeassistant==1.4.1 -attrs==24.2.0 -audioop-lts==0.2.1;python_version>='3.13' -av==13.1.0 +attrs==23.2.0 awesomeversion==24.6.0 -bcrypt==4.2.0 -bleak-retry-connector==3.6.0 -bleak==0.22.3 -bluetooth-adapters==0.20.0 +bcrypt==4.1.2 +bleak-retry-connector==3.5.0 +bleak==0.22.2 +bluetooth-adapters==0.19.3 bluetooth-auto-recovery==1.4.2 -bluetooth-data-tools==1.20.0 -cached-ipaddress==0.8.0 +bluetooth-data-tools==1.19.3 +cached_ipaddress==0.3.0 certifi>=2021.5.30 ciso8601==2.3.1 -cryptography==43.0.1 -dbus-fast==2.24.3 -fnv-hash-fast==1.0.2 -go2rtc-client==0.1.1 -ha-ffmpeg==3.2.2 -habluetooth==3.6.0 -hass-nabucasa==0.84.0 -hassil==2.0.1 -home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241106.2 -home-assistant-intents==2024.11.13 -httpx==0.27.2 +cryptography==42.0.8 +dbus-fast==2.22.1 +fnv-hash-fast==0.5.0 +ha-av==10.1.1 +ha-ffmpeg==3.2.0 +habluetooth==3.1.3 +hass-nabucasa==0.81.1 +hassil==1.7.1 +home-assistant-bluetooth==1.12.2 +home-assistant-frontend==20240626.2 +home-assistant-intents==2024.6.26 +httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.11 +orjson==3.9.15 packaging>=23.1 paho-mqtt==1.6.1 -Pillow==11.0.0 -propcache==0.2.0 +Pillow==10.3.0 +pip>=21.3.1 psutil-home-assistant==0.0.1 -PyJWT==2.9.0 -pymicro-vad==1.0.1 +PyJWT==2.8.0 PyNaCl==1.5.0 -pyOpenSSL==24.2.1 +pyOpenSSL==24.1.0 pyserial==3.5 -pyspeex-noise==1.0.2 python-slugify==8.0.4 -PyTurboJPEG==1.7.5 +PyTurboJPEG==1.7.1 pyudev==0.24.1 -PyYAML==6.0.2 +PyYAML==6.0.1 requests==2.32.3 -securetar==2024.2.1 SQLAlchemy==2.0.31 -standard-aifc==3.13.0;python_version>='3.13' -standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 -ulid-transform==1.0.2 +ulid-transform==0.9.0 urllib3>=1.26.5,<2 -uv==0.5.0 -voluptuous-openapi==0.0.5 +voluptuous-openapi==0.0.4 voluptuous-serialize==2.6.0 -voluptuous==0.15.2 -webrtc-models==0.2.0 -yarl==1.17.1 -zeroconf==0.136.0 +voluptuous==0.13.1 +webrtc-noise-gain==1.2.3 +yarl==1.9.4 +zeroconf==0.132.2 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 @@ -84,9 +76,14 @@ httplib2>=0.19.0 # gRPC is an implicit dependency that we want to make explicit so we manage # upgrades intentionally. It is a large package to build from source and we # want to ensure we have wheels built. -grpcio==1.67.1 -grpcio-status==1.67.1 -grpcio-reflection==1.67.1 +grpcio==1.59.0 +grpcio-status==1.59.0 +grpcio-reflection==1.59.0 + +# libcst >=0.4.0 requires a newer Rust than we currently have available, +# thus our wheels builds fail. This pins it to the last working version, +# which at this point satisfies our needs. +libcst==0.3.23 # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -102,11 +99,16 @@ enum34==1000000000.0.0 typing==1000000000.0.0 uuid==1000000000.0.0 +# regex causes segfault with version 2021.8.27 +# https://bitbucket.org/mrabarnett/mrab-regex/issues/421/2021827-results-in-fatal-python-error +# This is fixed in 2021.8.28 +regex==2021.8.28 + # httpx requires httpcore, and httpcore requires anyio and h11, but the version constraints on # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.6.2.post1 +anyio==4.4.0 h11==0.14.0 httpcore==1.0.5 @@ -115,8 +117,13 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==2.1.3 -pandas~=2.2.3 +numpy==1.26.0 + +# Prevent dependency conflicts between sisyphus-control and aioambient +# until upper bounds for sisyphus-control have been updated +# https://github.com/jkeljo/sisyphus-control/issues/6 +python-engineio>=3.13.1,<4.0 +python-socketio>=4.6.0,<5.0 # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 @@ -127,10 +134,7 @@ backoff>=2.0 # Required to avoid breaking (#101042). # v2 has breaking changes (#99218). -pydantic==1.10.19 - -# Required for Python 3.12.4 compatibility (#119223). -mashumaro>=3.13.1 +pydantic==1.10.17 # Breaks asyncio # https://github.com/pubnub/python/issues/130 @@ -146,7 +150,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==5.28.3 +protobuf==4.25.1 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder @@ -160,6 +164,9 @@ websockets>=11.0.1 # pysnmplib is no longer maintained and does not work with newer # python pysnmplib==1000000000.0.0 +# pysnmp is no longer maintained and does not work with newer +# python +pysnmp==1000000000.0.0 # The get-mac package has been replaced with getmac. Installing get-mac alongside getmac # breaks getmac due to them both sharing the same python package name inside 'getmac'. @@ -168,21 +175,24 @@ get-mac==1000000000.0.0 # We want to skip the binary wheels for the 'charset-normalizer' packages. # They are build with mypyc, but causes issues with our wheel builder. # In order to do so, we need to constrain the version. -charset-normalizer==3.4.0 +charset-normalizer==3.2.0 # dacite: Ensure we have a version that is able to handle type unions for -# NAM, Brother, and GIOS. +# Roborock, NAM, Brother, and GIOS. dacite>=1.7.0 -# chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x -chacha20poly1305-reuseable>=0.13.0 +# Musle wheels for pandas 2.2.0 cannot be build for any architecture. +pandas==2.1.4 + +# chacha20poly1305-reuseable==0.12.0 is incompatible with cryptography==42.0.x +chacha20poly1305-reuseable>=0.12.1 # pycountry<23.12.11 imports setuptools at run time # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 pycountry>=23.12.11 -# scapy==2.6.0 causes CI failures due to a race condition -scapy>=2.6.1 +# scapy<2.5.0 will not work with python3.12 +scapy>=2.5.0 # tuf isn't updated to deal with breaking changes in securesystemslib==1.0. # Only tuf>=4 includes a constraint to <1.0. @@ -191,7 +201,3 @@ tuf>=4.0.0 # https://github.com/jd/tenacity/issues/471 tenacity!=8.4.0 - -# 5.0.0 breaks Timeout as a context manager -# TypeError: 'Timeout' object does not support the context manager protocol -async-timeout==4.0.3 diff --git a/homeassistant/runner.py b/homeassistant/runner.py index 59775655854..a1510336302 100644 --- a/homeassistant/runner.py +++ b/homeassistant/runner.py @@ -3,8 +3,10 @@ from __future__ import annotations import asyncio +from asyncio import events import dataclasses import logging +import os import subprocess import threading from time import monotonic @@ -56,6 +58,22 @@ class RuntimeConfig: safe_mode: bool = False +def can_use_pidfd() -> bool: + """Check if pidfd_open is available. + + Back ported from cpython 3.12 + """ + if not hasattr(os, "pidfd_open"): + return False + try: + pid = os.getpid() + os.close(os.pidfd_open(pid, 0)) + except OSError: + # blocked by security policy like SECCOMP + return False + return True + + class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): """Event loop policy for Home Assistant.""" @@ -63,6 +81,23 @@ class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): """Init the event loop policy.""" super().__init__() self.debug = debug + self._watcher: asyncio.AbstractChildWatcher | None = None + + def _init_watcher(self) -> None: + """Initialize the watcher for child processes. + + Back ported from cpython 3.12 + """ + with events._lock: # type: ignore[attr-defined] # noqa: SLF001 + if self._watcher is None: # pragma: no branch + if can_use_pidfd(): + self._watcher = asyncio.PidfdChildWatcher() + else: + self._watcher = asyncio.ThreadedChildWatcher() + if threading.current_thread() is threading.main_thread(): + self._watcher.attach_loop( + self._local._loop # type: ignore[attr-defined] # noqa: SLF001 + ) @property def loop_name(self) -> str: @@ -72,6 +107,7 @@ class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): def new_event_loop(self) -> asyncio.AbstractEventLoop: """Get the event loop.""" loop: asyncio.AbstractEventLoop = super().new_event_loop() + setattr(loop, "_thread_ident", threading.get_ident()) loop.set_exception_handler(_async_loop_exception_handler) if self.debug: loop.set_debug(True) @@ -140,7 +176,7 @@ def _enable_posix_spawn() -> None: # less efficient. This is a workaround to force posix_spawn() # when using musl since cpython is not aware its supported. tag = next(packaging.tags.sys_tags()) - subprocess._USE_POSIX_SPAWN = "musllinux" in tag.platform # type: ignore[misc] # noqa: SLF001 + subprocess._USE_POSIX_SPAWN = "musllinux" in tag.platform # noqa: SLF001 def run(runtime_config: RuntimeConfig) -> int: diff --git a/homeassistant/scripts/auth.py b/homeassistant/scripts/auth.py index b034021e6e7..fff57c7adfe 100644 --- a/homeassistant/scripts/auth.py +++ b/homeassistant/scripts/auth.py @@ -2,10 +2,8 @@ import argparse import asyncio -from collections.abc import Sequence import logging import os -from typing import TYPE_CHECKING from homeassistant import runner from homeassistant.auth import auth_manager_from_config @@ -17,7 +15,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er # mypy: allow-untyped-calls, allow-untyped-defs -def run(args: Sequence[str] | None) -> None: +def run(args): """Handle Home Assistant auth provider script.""" parser = argparse.ArgumentParser(description="Manage Home Assistant users") parser.add_argument("--script", choices=["auth"]) @@ -52,7 +50,7 @@ def run(args: Sequence[str] | None) -> None: asyncio.run(run_command(parser.parse_args(args))) -async def run_command(args: argparse.Namespace) -> None: +async def run_command(args): """Run the command.""" hass = HomeAssistant(os.path.join(os.getcwd(), args.config)) await asyncio.gather(dr.async_load(hass), er.async_load(hass)) @@ -67,13 +65,9 @@ async def run_command(args: argparse.Namespace) -> None: await hass.async_stop() -async def list_users( - hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace -) -> None: +async def list_users(hass, provider, args): """List the users.""" count = 0 - if TYPE_CHECKING: - assert provider.data for user in provider.data.users: count += 1 print(user["username"]) @@ -82,12 +76,8 @@ async def list_users( print("Total users:", count) -async def add_user( - hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace -) -> None: +async def add_user(hass, provider, args): """Create a user.""" - if TYPE_CHECKING: - assert provider.data try: provider.data.add_auth(args.username, args.password) except hass_auth.InvalidUser: @@ -99,12 +89,8 @@ async def add_user( print("Auth created") -async def validate_login( - hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace -) -> None: +async def validate_login(hass, provider, args): """Validate a login.""" - if TYPE_CHECKING: - assert provider.data try: provider.data.validate_login(args.username, args.password) print("Auth valid") @@ -112,12 +98,8 @@ async def validate_login( print("Auth invalid") -async def change_password( - hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace -) -> None: +async def change_password(hass, provider, args): """Change password.""" - if TYPE_CHECKING: - assert provider.data try: provider.data.change_password(args.username, args.new_password) await provider.data.async_save() diff --git a/homeassistant/scripts/benchmark/__init__.py b/homeassistant/scripts/benchmark/__init__.py index b769d385a4f..34bc536502f 100644 --- a/homeassistant/scripts/benchmark/__init__.py +++ b/homeassistant/scripts/benchmark/__init__.py @@ -4,8 +4,10 @@ from __future__ import annotations import argparse import asyncio +import collections from collections.abc import Callable from contextlib import suppress +import json import logging from timeit import default_timer as timer @@ -16,7 +18,7 @@ from homeassistant.helpers.event import ( async_track_state_change, async_track_state_change_event, ) -from homeassistant.helpers.json import JSON_DUMP +from homeassistant.helpers.json import JSON_DUMP, JSONEncoder # mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs # mypy: no-warn-return-any @@ -308,3 +310,48 @@ async def json_serialize_states(hass): start = timer() JSON_DUMP(states) return timer() - start + + +def _create_state_changed_event_from_old_new( + entity_id, event_time_fired, old_state, new_state +): + """Create a state changed event from a old and new state.""" + attributes = {} + if new_state is not None: + attributes = new_state.get("attributes") + attributes_json = json.dumps(attributes, cls=JSONEncoder) + if attributes_json == "null": + attributes_json = "{}" + row = collections.namedtuple( + "Row", + [ + "event_type" + "event_data" + "time_fired" + "context_id" + "context_user_id" + "state" + "entity_id" + "domain" + "attributes" + "state_id", + "old_state_id", + ], + ) + + row.event_type = EVENT_STATE_CHANGED + row.event_data = "{}" + row.attributes = attributes_json + row.time_fired = event_time_fired + row.state = new_state and new_state.get("state") + row.entity_id = entity_id + row.domain = entity_id and core.split_entity_id(entity_id)[0] + row.context_id = None + row.context_user_id = None + row.old_state_id = old_state and 1 + row.state_id = new_state and 1 + + # pylint: disable-next=import-outside-toplevel + from homeassistant.components import logbook + + return logbook.LazyEventPartialState(row, {}) diff --git a/homeassistant/scripts/macos/__init__.py b/homeassistant/scripts/macos/__init__.py index 0bf88da81dc..f629492ec39 100644 --- a/homeassistant/scripts/macos/__init__.py +++ b/homeassistant/scripts/macos/__init__.py @@ -44,7 +44,7 @@ def uninstall_osx(): print("Home Assistant has been uninstalled.") -def run(args: list[str]) -> int: +def run(args): """Handle OSX commandline script.""" commands = "install", "uninstall", "restart" if not args or args[0] not in commands: @@ -63,5 +63,3 @@ def run(args: list[str]) -> int: time.sleep(0.5) install_osx() return 0 - - raise ValueError(f"Invalid command {args[0]}") diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 331389da7c6..9775a3fee45 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from collections import defaultdict -from collections.abc import Awaitable, Callable, Generator, Mapping +from collections.abc import Awaitable, Callable, Mapping import contextlib import contextvars from enum import StrEnum @@ -14,6 +14,8 @@ import time from types import ModuleType from typing import Any, Final, TypedDict +from typing_extensions import Generator + from . import config as conf_util, core, loader, requirements from .const import ( BASE_PLATFORMS, # noqa: F401 @@ -29,7 +31,7 @@ from .core import ( callback, ) from .exceptions import DependencyError, HomeAssistantError -from .helpers import issue_registry as ir, singleton, translation +from .helpers import singleton, translation from .helpers.issue_registry import IssueSeverity, async_create_issue from .helpers.typing import ConfigType from .util.async_ import create_eager_task @@ -281,20 +283,6 @@ async def _async_setup_component( integration = await loader.async_get_integration(hass, domain) except loader.IntegrationNotFound: _log_error_setup_error(hass, domain, None, "Integration not found.") - if not hass.config.safe_mode and hass.config_entries.async_entries(domain): - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"integration_not_found.{domain}", - is_fixable=True, - issue_domain=HOMEASSISTANT_DOMAIN, - severity=IssueSeverity.ERROR, - translation_key="integration_not_found", - translation_placeholders={ - "domain": domain, - }, - data={"domain": domain}, - ) return False log_error = partial(_log_error_setup_error, hass, domain, integration) diff --git a/homeassistant/util/__init__.py b/homeassistant/util/__init__.py index c2d825a1676..c9aa2817640 100644 --- a/homeassistant/util/__init__.py +++ b/homeassistant/util/__init__.py @@ -129,11 +129,13 @@ class Throttle: async def throttled_value() -> None: """Stand-in function for when real func is being throttled.""" + return None else: def throttled_value() -> None: # type: ignore[misc] """Stand-in function for when real func is being throttled.""" + return None if self.limit_no_throttle is not None: method = Throttle(self.limit_no_throttle)(method) diff --git a/homeassistant/util/aiohttp.py b/homeassistant/util/aiohttp.py index 5571861f417..2a4616ee634 100644 --- a/homeassistant/util/aiohttp.py +++ b/homeassistant/util/aiohttp.py @@ -28,19 +28,6 @@ class MockStreamReader: return self._content.read(byte_count) -class MockPayloadWriter: - """Small mock to imitate payload writer.""" - - def enable_chunking(self) -> None: - """Enable chunking.""" - - async def write_headers(self, *args: Any, **kwargs: Any) -> None: - """Write headers.""" - - -_MOCK_PAYLOAD_WRITER = MockPayloadWriter() - - class MockRequest: """Mock an aiohttp request.""" @@ -62,14 +49,8 @@ class MockRequest: self.status = status self.headers: CIMultiDict[str] = CIMultiDict(headers or {}) self.query_string = query_string or "" - self.keep_alive = False - self.version = (1, 1) self._content = content self.mock_source = mock_source - self._payload_writer = _MOCK_PAYLOAD_WRITER - - async def _prepare_hook(self, response: Any) -> None: - """Prepare hook.""" @property def query(self) -> MultiDict[str]: @@ -109,7 +90,7 @@ def serialize_response(response: web.Response) -> dict[str, Any]: if (body := response.body) is None: body_decoded = None elif isinstance(body, payload.StringPayload): - body_decoded = body._value.decode(body.encoding or "utf-8") # noqa: SLF001 + body_decoded = body._value.decode(body.encoding) # noqa: SLF001 elif isinstance(body, bytes): body_decoded = body.decode(response.charset or "utf-8") else: diff --git a/homeassistant/util/async_.py b/homeassistant/util/async_.py index d010d8cb341..f2dc1291324 100644 --- a/homeassistant/util/async_.py +++ b/homeassistant/util/async_.py @@ -2,15 +2,7 @@ from __future__ import annotations -from asyncio import ( - AbstractEventLoop, - Future, - Semaphore, - Task, - TimerHandle, - gather, - get_running_loop, -) +from asyncio import AbstractEventLoop, Future, Semaphore, Task, gather, get_running_loop from collections.abc import Awaitable, Callable, Coroutine import concurrent.futures import logging @@ -57,7 +49,7 @@ def run_callback_threadsafe[_T, *_Ts]( Return a concurrent.futures.Future to access the result. """ - if (ident := loop.__dict__.get("_thread_id")) and ident == threading.get_ident(): + if (ident := loop.__dict__.get("_thread_ident")) and ident == threading.get_ident(): raise RuntimeError("Cannot be called from within the event loop") future: concurrent.futures.Future[_T] = concurrent.futures.Future() @@ -132,9 +124,3 @@ def shutdown_run_callback_threadsafe(loop: AbstractEventLoop) -> None: python is going to exit. """ setattr(loop, _SHUTDOWN_RUN_CALLBACK_THREADSAFE, True) - - -def get_scheduled_timer_handles(loop: AbstractEventLoop) -> list[TimerHandle]: - """Return a list of scheduled TimerHandles.""" - handles: list[TimerHandle] = loop._scheduled # type: ignore[attr-defined] # noqa: SLF001 - return handles diff --git a/homeassistant/util/color.py b/homeassistant/util/color.py index 0745bc96dfb..ab5c4037f9b 100644 --- a/homeassistant/util/color.py +++ b/homeassistant/util/color.py @@ -244,7 +244,7 @@ def color_RGB_to_xy_brightness( y = Y / (X + Y + Z) # Brightness - Y = min(Y, 1) + Y = 1 if Y > 1 else Y brightness = round(Y * 255) # Check if the given xy value is within the color-reach of the lamp. diff --git a/homeassistant/util/dt.py b/homeassistant/util/dt.py index ee2b6c762d8..30cf7222f3a 100644 --- a/homeassistant/util/dt.py +++ b/homeassistant/util/dt.py @@ -95,7 +95,7 @@ def set_default_time_zone(time_zone: dt.tzinfo) -> None: get_default_time_zone.cache_clear() -def get_time_zone(time_zone_str: str) -> zoneinfo.ZoneInfo | None: +def get_time_zone(time_zone_str: str) -> dt.tzinfo | None: """Get time zone from string. Return None if unable to determine. Must be run in the executor if the ZoneInfo is not already @@ -107,7 +107,7 @@ def get_time_zone(time_zone_str: str) -> zoneinfo.ZoneInfo | None: return None -async def async_get_time_zone(time_zone_str: str) -> zoneinfo.ZoneInfo | None: +async def async_get_time_zone(time_zone_str: str) -> dt.tzinfo | None: """Get time zone from string. Return None if unable to determine. Async friendly. diff --git a/homeassistant/util/executor.py b/homeassistant/util/executor.py index 5f0fdd5c273..47b6d08a197 100644 --- a/homeassistant/util/executor.py +++ b/homeassistant/util/executor.py @@ -63,18 +63,10 @@ def join_or_interrupt_threads( class InterruptibleThreadPoolExecutor(ThreadPoolExecutor): """A ThreadPoolExecutor instance that will not deadlock on shutdown.""" - def shutdown( - self, *args: Any, join_threads_or_timeout: bool = True, **kwargs: Any - ) -> None: - """Shutdown with interrupt support added. - - By default shutdown will wait for threads to finish up - to the timeout before forcefully stopping them. This can - be disabled by setting `join_threads_or_timeout` to False. - """ + def shutdown(self, *args: Any, **kwargs: Any) -> None: + """Shutdown with interrupt support added.""" super().shutdown(wait=False, cancel_futures=True) - if join_threads_or_timeout: - self.join_threads_or_timeout() + self.join_threads_or_timeout() def join_threads_or_timeout(self) -> None: """Join threads or timeout.""" diff --git a/homeassistant/util/frozen_dataclass_compat.py b/homeassistant/util/frozen_dataclass_compat.py index 81ce9961a0b..6184e4564eb 100644 --- a/homeassistant/util/frozen_dataclass_compat.py +++ b/homeassistant/util/frozen_dataclass_compat.py @@ -8,10 +8,7 @@ from __future__ import annotations import dataclasses import sys -from typing import TYPE_CHECKING, Any, cast, dataclass_transform - -if TYPE_CHECKING: - from _typeshed import DataclassInstance +from typing import Any, dataclass_transform def _class_fields(cls: type, kw_only: bool) -> list[tuple[str, Any, Any]]: @@ -114,8 +111,6 @@ class FrozenOrThawed(type): """ cls, *_args = args if dataclasses.is_dataclass(cls): - if TYPE_CHECKING: - cls = cast(type[DataclassInstance], cls) return object.__new__(cls) return cls._dataclass(*_args, **kwargs) diff --git a/homeassistant/util/json.py b/homeassistant/util/json.py index fa67f6b1dcc..1479550b615 100644 --- a/homeassistant/util/json.py +++ b/homeassistant/util/json.py @@ -2,6 +2,8 @@ from __future__ import annotations +from collections.abc import Callable +import json import logging from os import PathLike from typing import Any @@ -10,6 +12,8 @@ import orjson from homeassistant.exceptions import HomeAssistantError +from .file import WriteError # noqa: F401 + _SENTINEL = object() _LOGGER = logging.getLogger(__name__) @@ -125,9 +129,63 @@ def load_json_object( raise HomeAssistantError(f"Expected JSON to be parsed as a dict got {type(value)}") +def save_json( + filename: str, + data: list | dict, + private: bool = False, + *, + encoder: type[json.JSONEncoder] | None = None, + atomic_writes: bool = False, +) -> None: + """Save JSON data to a file.""" + # pylint: disable-next=import-outside-toplevel + from homeassistant.helpers.frame import report + + report( + ( + "uses save_json from homeassistant.util.json module." + " This is deprecated and will stop working in Home Assistant 2022.4, it" + " should be updated to use homeassistant.helpers.json module instead" + ), + error_if_core=False, + ) + + # pylint: disable-next=import-outside-toplevel + import homeassistant.helpers.json as json_helper + + json_helper.save_json( + filename, data, private, encoder=encoder, atomic_writes=atomic_writes + ) + + def format_unserializable_data(data: dict[str, Any]) -> str: """Format output of find_paths in a friendly way. Format is comma separated: =() """ return ", ".join(f"{path}={value}({type(value)}" for path, value in data.items()) + + +def find_paths_unserializable_data( + bad_data: Any, *, dump: Callable[[Any], str] = json.dumps +) -> dict[str, Any]: + """Find the paths to unserializable data. + + This method is slow! Only use for error handling. + """ + # pylint: disable-next=import-outside-toplevel + from homeassistant.helpers.frame import report + + report( + ( + "uses find_paths_unserializable_data from homeassistant.util.json module." + " This is deprecated and will stop working in Home Assistant 2022.4, it" + " should be updated to use homeassistant.helpers.json module instead" + ), + error_if_core=False, + ) + + # pylint: disable-next=import-outside-toplevel + import homeassistant.helpers.json as json_helper + + return json_helper.find_paths_unserializable_data(bad_data, dump=dump) diff --git a/homeassistant/util/language.py b/homeassistant/util/language.py index 8a82de9065f..8644f8014b6 100644 --- a/homeassistant/util/language.py +++ b/homeassistant/util/language.py @@ -137,6 +137,9 @@ class Dialect: region_idx = pref_regions.index(self.region) elif dialect.region is not None: region_idx = pref_regions.index(dialect.region) + else: + # Can't happen, but mypy is not smart enough + raise ValueError # More preferred regions are at the front. # Add 1 to boost above a weak match where no regions are set. diff --git a/homeassistant/util/location.py b/homeassistant/util/location.py index c00cf88699e..24c49c5427c 100644 --- a/homeassistant/util/location.py +++ b/homeassistant/util/location.py @@ -163,8 +163,7 @@ async def _get_whoami(session: aiohttp.ClientSession) -> dict[str, Any] | None: """Query whoami.home-assistant.io for location data.""" try: resp = await session.get( - WHOAMI_URL_DEV if HA_VERSION.endswith("0.dev0") else WHOAMI_URL, - timeout=aiohttp.ClientTimeout(total=30), + WHOAMI_URL_DEV if HA_VERSION.endswith("0.dev0") else WHOAMI_URL, timeout=30 ) except (aiohttp.ClientError, TimeoutError): return None diff --git a/homeassistant/util/logging.py b/homeassistant/util/logging.py index 2c4eb744614..d2554ef543c 100644 --- a/homeassistant/util/logging.py +++ b/homeassistant/util/logging.py @@ -196,8 +196,8 @@ def async_create_catching_coro[_T]( trace = traceback.extract_stack() return catch_log_coro_exception( target, - lambda: ( - f"Exception in {target.__name__} called from\n" - + "".join(traceback.format_list(trace[:-1])) + lambda: "Exception in {} called from\n {}".format( + target.__name__, + "".join(traceback.format_list(trace[:-1])), ), ) diff --git a/homeassistant/util/loop.py b/homeassistant/util/loop.py index d7593013046..8a469569601 100644 --- a/homeassistant/util/loop.py +++ b/homeassistant/util/loop.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable import functools -from functools import cache import linecache import logging import threading @@ -27,11 +26,6 @@ def _get_line_from_cache(filename: str, lineno: int) -> str: return (linecache.getline(filename, lineno) or "?").strip() -# Set of previously reported blocking calls -# (integration, filename, lineno) -_PREVIOUSLY_REPORTED: set[tuple[str | None, str, int | Any]] = set() - - def raise_for_blocking_call( func: Callable[..., Any], check_allowed: Callable[[dict[str, Any]], bool] | None = None, @@ -48,48 +42,28 @@ def raise_for_blocking_call( offender_filename = offender_frame.f_code.co_filename offender_lineno = offender_frame.f_lineno offender_line = _get_line_from_cache(offender_filename, offender_lineno) - report_key: tuple[str | None, str, int | Any] try: integration_frame = get_integration_frame() except MissingIntegrationFrame: # Did not source from integration? Hard error. - report_key = (None, offender_filename, offender_lineno) - was_reported = report_key in _PREVIOUSLY_REPORTED - _PREVIOUSLY_REPORTED.add(report_key) if not strict_core: - if was_reported: - _LOGGER.debug( - "Detected blocking call to %s with args %s in %s, " - "line %s: %s inside the event loop; " - "This is causing stability issues. " - "Please create a bug report at " - "https://github.com/home-assistant/core/issues?q=is%%3Aopen+is%%3Aissue\n" - "%s\n", - func.__name__, - mapped_args.get("args"), - offender_filename, - offender_lineno, - offender_line, - _dev_help_message(func.__name__), - ) - else: - _LOGGER.warning( - "Detected blocking call to %s with args %s in %s, " - "line %s: %s inside the event loop; " - "This is causing stability issues. " - "Please create a bug report at " - "https://github.com/home-assistant/core/issues?q=is%%3Aopen+is%%3Aissue\n" - "%s\n" - "Traceback (most recent call last):\n%s", - func.__name__, - mapped_args.get("args"), - offender_filename, - offender_lineno, - offender_line, - _dev_help_message(func.__name__), - "".join(traceback.format_stack(f=offender_frame)), - ) + _LOGGER.warning( + "Detected blocking call to %s with args %s in %s, " + "line %s: %s inside the event loop; " + "This is causing stability issues. " + "Please create a bug report at " + "https://github.com/home-assistant/core/issues?q=is%%3Aopen+is%%3Aissue\n" + "%s\n" + "Traceback (most recent call last):\n%s", + func.__name__, + mapped_args.get("args"), + offender_filename, + offender_lineno, + offender_line, + _dev_help_message(func.__name__), + "".join(traceback.format_stack(f=offender_frame)), + ) return if found_frame is None: @@ -103,63 +77,39 @@ def raise_for_blocking_call( f"{_dev_help_message(func.__name__)}" ) - report_key = (integration_frame.integration, offender_filename, offender_lineno) - was_reported = report_key in _PREVIOUSLY_REPORTED - _PREVIOUSLY_REPORTED.add(report_key) - report_issue = async_suggest_report_issue( async_get_hass_or_none(), integration_domain=integration_frame.integration, module=integration_frame.module, ) - if was_reported: - _LOGGER.debug( - "Detected blocking call to %s with args %s " - "inside the event loop by %sintegration '%s' " - "at %s, line %s: %s (offender: %s, line %s: %s), please %s\n" - "%s\n", - func.__name__, - mapped_args.get("args"), - "custom " if integration_frame.custom_integration else "", - integration_frame.integration, - integration_frame.relative_filename, - integration_frame.line_number, - integration_frame.line, - offender_filename, - offender_lineno, - offender_line, - report_issue, - _dev_help_message(func.__name__), - ) - else: - _LOGGER.warning( - "Detected blocking call to %s with args %s " - "inside the event loop by %sintegration '%s' " - "at %s, line %s: %s (offender: %s, line %s: %s), please %s\n" - "%s\n" - "Traceback (most recent call last):\n%s", - func.__name__, - mapped_args.get("args"), - "custom " if integration_frame.custom_integration else "", - integration_frame.integration, - integration_frame.relative_filename, - integration_frame.line_number, - integration_frame.line, - offender_filename, - offender_lineno, - offender_line, - report_issue, - _dev_help_message(func.__name__), - "".join(traceback.format_stack(f=integration_frame.frame)), - ) + _LOGGER.warning( + "Detected blocking call to %s with args %s " + "inside the event loop by %sintegration '%s' " + "at %s, line %s: %s (offender: %s, line %s: %s), please %s\n" + "%s\n" + "Traceback (most recent call last):\n%s", + func.__name__, + mapped_args.get("args"), + "custom " if integration_frame.custom_integration else "", + integration_frame.integration, + integration_frame.relative_filename, + integration_frame.line_number, + integration_frame.line, + offender_filename, + offender_lineno, + offender_line, + report_issue, + _dev_help_message(func.__name__), + "".join(traceback.format_stack(f=integration_frame.frame)), + ) if strict: raise RuntimeError( - f"Caught blocking call to {func.__name__} with args " - f"{mapped_args.get('args')} inside the event loop by " + "Caught blocking call to {func.__name__} with args " + f"{mapped_args.get('args')} inside the event loop by" f"{'custom ' if integration_frame.custom_integration else ''}" - f"integration '{integration_frame.integration}' at " + "integration '{integration_frame.integration}' at " f"{integration_frame.relative_filename}, line {integration_frame.line_number}:" f" {integration_frame.line}. (offender: {offender_filename}, line " f"{offender_lineno}: {offender_line}), please {report_issue}\n" @@ -167,7 +117,6 @@ def raise_for_blocking_call( ) -@cache def _dev_help_message(what: str) -> str: """Generate help message to guide developers.""" return ( diff --git a/homeassistant/util/package.py b/homeassistant/util/package.py index da0666290a1..067bf5ff36d 100644 --- a/homeassistant/util/package.py +++ b/homeassistant/util/package.py @@ -8,7 +8,6 @@ from importlib.metadata import PackageNotFoundError, version import logging import os from pathlib import Path -import site from subprocess import PIPE, Popen import sys from urllib.parse import urlparse @@ -84,12 +83,6 @@ def is_installed(requirement_str: str) -> bool: return False -_UV_ENV_PYTHON_VARS = ( - "UV_SYSTEM_PYTHON", - "UV_PYTHON", -) - - def install_package( package: str, upgrade: bool = True, @@ -101,44 +94,22 @@ def install_package( Return boolean if install successful. """ + # Not using 'import pip; pip.main([])' because it breaks the logger _LOGGER.info("Attempting install of %s", package) env = os.environ.copy() - args = [ - sys.executable, - "-m", - "uv", - "pip", - "install", - "--quiet", - package, - # We need to use unsafe-first-match for custom components - # which can use a different version of a package than the one - # we have built the wheel for. - "--index-strategy", - "unsafe-first-match", - ] + args = [sys.executable, "-m", "pip", "install", "--quiet", package] if timeout: - env["HTTP_TIMEOUT"] = str(timeout) + args += ["--timeout", str(timeout)] if upgrade: args.append("--upgrade") if constraints is not None: args += ["--constraint", constraints] if target: - abs_target = os.path.abspath(target) - args += ["--target", abs_target] - elif ( - not is_virtual_env() - and not (any(var in env for var in _UV_ENV_PYTHON_VARS)) - and (abs_target := site.getusersitepackages()) - ): - # Pip compatibility - # Uv has currently no support for --user - # See https://github.com/astral-sh/uv/issues/2077 - # Using workaround to install to site-packages - # https://github.com/astral-sh/uv/issues/2077#issuecomment-2150406001 - args += ["--python", sys.executable, "--target", abs_target] - - _LOGGER.debug("Running uv pip command: args=%s", args) + assert not is_virtual_env() + # This only works if not running in venv + args += ["--user"] + env["PYTHONUSERBASE"] = os.path.abspath(target) + _LOGGER.debug("Running pip command: args=%s", args) with Popen( args, stdin=PIPE, diff --git a/homeassistant/util/pil.py b/homeassistant/util/pil.py index 6925cd03a4c..733f640ce48 100644 --- a/homeassistant/util/pil.py +++ b/homeassistant/util/pil.py @@ -28,7 +28,7 @@ def draw_box( """ line_width = 3 - font_height = 20 + font_height = 8 y_min, x_min, y_max, x_max = box (left, right, top, bottom) = ( x_min * img_width, @@ -43,8 +43,5 @@ def draw_box( ) if text: draw.text( - (left + line_width, abs(top - line_width - font_height)), - text, - fill=color, - font_size=font_height, + (left + line_width, abs(top - line_width - font_height)), text, fill=color ) diff --git a/homeassistant/util/ssl.py b/homeassistant/util/ssl.py index a22fd0c8fb4..7c1e653ce75 100644 --- a/homeassistant/util/ssl.py +++ b/homeassistant/util/ssl.py @@ -15,7 +15,6 @@ class SSLCipherList(StrEnum): PYTHON_DEFAULT = "python_default" INTERMEDIATE = "intermediate" MODERN = "modern" - INSECURE = "insecure" SSL_CIPHER_LISTS = { @@ -59,12 +58,11 @@ SSL_CIPHER_LISTS = { "ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:" "ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256" ), - SSLCipherList.INSECURE: "DEFAULT:@SECLEVEL=0", } @cache -def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext: +def _create_no_verify_ssl_context(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext: # This is a copy of aiohttp's create_default_context() function, with the # ssl verify turned off. # https://github.com/aio-libs/aiohttp/blob/33953f110e97eecc707e1402daa8d543f38a189b/aiohttp/connector.py#L911 @@ -82,10 +80,16 @@ def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext: return sslcontext -@cache -def _client_context( +def create_no_verify_ssl_context( ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT, ) -> ssl.SSLContext: + """Return an SSL context that does not verify the server certificate.""" + + return _create_no_verify_ssl_context(ssl_cipher_list=ssl_cipher_list) + + +@cache +def _client_context(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext: # Reuse environment variable definition from requests, since it's already a # requirement. If the environment variable has no value, fall back to using # certs from certifi package. @@ -100,19 +104,17 @@ def _client_context( return sslcontext +def client_context( + ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT, +) -> ssl.SSLContext: + """Return an SSL context for making requests.""" + + return _client_context(ssl_cipher_list=ssl_cipher_list) + + # Create this only once and reuse it -_DEFAULT_SSL_CONTEXT = _client_context(SSLCipherList.PYTHON_DEFAULT) -_DEFAULT_NO_VERIFY_SSL_CONTEXT = _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT) -_NO_VERIFY_SSL_CONTEXTS = { - SSLCipherList.INTERMEDIATE: _client_context_no_verify(SSLCipherList.INTERMEDIATE), - SSLCipherList.MODERN: _client_context_no_verify(SSLCipherList.MODERN), - SSLCipherList.INSECURE: _client_context_no_verify(SSLCipherList.INSECURE), -} -_SSL_CONTEXTS = { - SSLCipherList.INTERMEDIATE: _client_context(SSLCipherList.INTERMEDIATE), - SSLCipherList.MODERN: _client_context(SSLCipherList.MODERN), - SSLCipherList.INSECURE: _client_context(SSLCipherList.INSECURE), -} +_DEFAULT_SSL_CONTEXT = client_context() +_DEFAULT_NO_VERIFY_SSL_CONTEXT = create_no_verify_ssl_context() def get_default_context() -> ssl.SSLContext: @@ -125,27 +127,6 @@ def get_default_no_verify_context() -> ssl.SSLContext: return _DEFAULT_NO_VERIFY_SSL_CONTEXT -def client_context_no_verify( - ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT, -) -> ssl.SSLContext: - """Return a SSL context with no verification with a specific ssl cipher.""" - return _NO_VERIFY_SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_NO_VERIFY_SSL_CONTEXT) - - -def client_context( - ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT, -) -> ssl.SSLContext: - """Return an SSL context for making requests.""" - return _SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_SSL_CONTEXT) - - -def create_no_verify_ssl_context( - ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT, -) -> ssl.SSLContext: - """Return an SSL context that does not verify the server certificate.""" - return _client_context_no_verify(ssl_cipher_list) - - def server_context_modern() -> ssl.SSLContext: """Return an SSL context following the Mozilla recommendations. diff --git a/homeassistant/util/timeout.py b/homeassistant/util/timeout.py index ddabdf2746d..72cabffeed6 100644 --- a/homeassistant/util/timeout.py +++ b/homeassistant/util/timeout.py @@ -16,7 +16,7 @@ from .async_ import run_callback_threadsafe ZONE_GLOBAL = "global" -class _State(enum.Enum): +class _State(str, enum.Enum): """States of a task.""" INIT = "INIT" @@ -61,16 +61,18 @@ class _GlobalFreezeContext: def _enter(self) -> None: """Run freeze.""" - if self._manager.freezes_done: - # Global reset - for task in self._manager.global_tasks: - task.pause() + if not self._manager.freezes_done: + return - # Zones reset - for zone in self._manager.zones.values(): - if not zone.freezes_done: - continue - zone.pause() + # Global reset + for task in self._manager.global_tasks: + task.pause() + + # Zones reset + for zone in self._manager.zones.values(): + if not zone.freezes_done: + continue + zone.pause() self._manager.global_freezes.append(self) @@ -160,16 +162,11 @@ class _GlobalTaskContext: self._wait_zone: asyncio.Event = asyncio.Event() self._state: _State = _State.INIT self._cool_down: float = cool_down - self._cancelling = 0 async def __aenter__(self) -> Self: self._manager.global_tasks.append(self) self._start_timer() self._state = _State.ACTIVE - # Remember if the task was already cancelling - # so when we __aexit__ we can decide if we should - # raise asyncio.TimeoutError or let the cancellation propagate - self._cancelling = self._task.cancelling() return self async def __aexit__( @@ -182,15 +179,7 @@ class _GlobalTaskContext: self._manager.global_tasks.remove(self) # Timeout on exit - if exc_type is asyncio.CancelledError and self.state is _State.TIMEOUT: - # The timeout was hit, and the task was cancelled - # so we need to uncancel the task since the cancellation - # should not leak out of the context manager - if self._task.uncancel() > self._cancelling: - # If the task was already cancelling don't raise - # asyncio.TimeoutError and instead return None - # to allow the cancellation to propagate - return None + if exc_type is asyncio.CancelledError and self.state == _State.TIMEOUT: raise TimeoutError self._state = _State.EXIT @@ -279,7 +268,6 @@ class _ZoneTaskContext: self._time_left: float = timeout self._expiration_time: float | None = None self._timeout_handler: asyncio.Handle | None = None - self._cancelling = 0 @property def state(self) -> _State: @@ -294,11 +282,6 @@ class _ZoneTaskContext: if self._zone.freezes_done: self._start_timer() - # Remember if the task was already cancelling - # so when we __aexit__ we can decide if we should - # raise asyncio.TimeoutError or let the cancellation propagate - self._cancelling = self._task.cancelling() - return self async def __aexit__( @@ -311,15 +294,7 @@ class _ZoneTaskContext: self._stop_timer() # Timeout on exit - if exc_type is asyncio.CancelledError and self.state is _State.TIMEOUT: - # The timeout was hit, and the task was cancelled - # so we need to uncancel the task since the cancellation - # should not leak out of the context manager - if self._task.uncancel() > self._cancelling: - # If the task was already cancelling don't raise - # asyncio.TimeoutError and instead return None - # to allow the cancellation to propagate - return None + if exc_type is asyncio.CancelledError and self.state == _State.TIMEOUT: raise TimeoutError self._state = _State.EXIT diff --git a/homeassistant/util/ulid.py b/homeassistant/util/ulid.py index f4895f9d963..65f1b8226c0 100644 --- a/homeassistant/util/ulid.py +++ b/homeassistant/util/ulid.py @@ -4,12 +4,10 @@ from __future__ import annotations from ulid_transform import ( bytes_to_ulid, - bytes_to_ulid_or_none, ulid_at_time, ulid_hex, ulid_now, ulid_to_bytes, - ulid_to_bytes_or_none, ) __all__ = [ @@ -19,8 +17,6 @@ __all__ = [ "ulid_to_bytes", "bytes_to_ulid", "ulid_now", - "ulid_to_bytes_or_none", - "bytes_to_ulid_or_none", ] diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 1bf3561e66a..2b9f73afab7 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -10,7 +10,6 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, UNIT_NOT_RECOGNIZED_TEMPLATE, - UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -48,10 +47,6 @@ _HRS_TO_MINUTES = 60 # 1 hr = 60 minutes _HRS_TO_SECS = _HRS_TO_MINUTES * _MIN_TO_SEC # 1 hr = 60 minutes = 3600 seconds _DAYS_TO_SECS = 24 * _HRS_TO_SECS # 1 day = 24 hours = 86400 seconds -# Energy conversion constants -_WH_TO_J = 3600 # 1 Wh = 3600 J -_WH_TO_CAL = _WH_TO_J / 4.184 # 1 Wh = 860.42065 cal - # Mass conversion constants _POUND_TO_G = 453.59237 _OUNCE_TO_G = _POUND_TO_G / 16 # 16 ounces to a pound @@ -73,6 +68,7 @@ class BaseUnitConverter: """Define the format of a conversion utility.""" UNIT_CLASS: str + NORMALIZED_UNIT: str | None VALID_UNITS: set[str | None] _UNIT_CONVERSION: dict[str | None, float] @@ -129,6 +125,7 @@ class DataRateConverter(BaseUnitConverter): """Utility to convert data rate values.""" UNIT_CLASS = "data_rate" + NORMALIZED_UNIT = UnitOfDataRate.BITS_PER_SECOND # Units in terms of bits _UNIT_CONVERSION: dict[str | None, float] = { UnitOfDataRate.BITS_PER_SECOND: 1, @@ -150,6 +147,7 @@ class DistanceConverter(BaseUnitConverter): """Utility to convert distance values.""" UNIT_CLASS = "distance" + NORMALIZED_UNIT = UnitOfLength.METERS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfLength.METERS: 1, UnitOfLength.MILLIMETERS: 1 / _MM_TO_M, @@ -159,12 +157,10 @@ class DistanceConverter(BaseUnitConverter): UnitOfLength.FEET: 1 / _FOOT_TO_M, UnitOfLength.YARDS: 1 / _YARD_TO_M, UnitOfLength.MILES: 1 / _MILE_TO_M, - UnitOfLength.NAUTICAL_MILES: 1 / _NAUTICAL_MILE_TO_M, } VALID_UNITS = { UnitOfLength.KILOMETERS, UnitOfLength.MILES, - UnitOfLength.NAUTICAL_MILES, UnitOfLength.FEET, UnitOfLength.METERS, UnitOfLength.CENTIMETERS, @@ -174,25 +170,15 @@ class DistanceConverter(BaseUnitConverter): } -class BloodGlucoseConcentrationConverter(BaseUnitConverter): - """Utility to convert blood glucose concentration values.""" - - UNIT_CLASS = "blood_glucose_concentration" - _UNIT_CONVERSION: dict[str | None, float] = { - UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER: 18, - UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER: 1, - } - VALID_UNITS = set(UnitOfBloodGlucoseConcentration) - - class ConductivityConverter(BaseUnitConverter): """Utility to convert electric current values.""" UNIT_CLASS = "conductivity" + NORMALIZED_UNIT = UnitOfConductivity.MICROSIEMENS _UNIT_CONVERSION: dict[str | None, float] = { - UnitOfConductivity.MICROSIEMENS_PER_CM: 1, - UnitOfConductivity.MILLISIEMENS_PER_CM: 1e-3, - UnitOfConductivity.SIEMENS_PER_CM: 1e-6, + UnitOfConductivity.MICROSIEMENS: 1, + UnitOfConductivity.MILLISIEMENS: 1e-3, + UnitOfConductivity.SIEMENS: 1e-6, } VALID_UNITS = set(UnitOfConductivity) @@ -201,6 +187,7 @@ class ElectricCurrentConverter(BaseUnitConverter): """Utility to convert electric current values.""" UNIT_CLASS = "electric_current" + NORMALIZED_UNIT = UnitOfElectricCurrent.AMPERE _UNIT_CONVERSION: dict[str | None, float] = { UnitOfElectricCurrent.AMPERE: 1, UnitOfElectricCurrent.MILLIAMPERE: 1e3, @@ -212,6 +199,7 @@ class ElectricPotentialConverter(BaseUnitConverter): """Utility to convert electric potential values.""" UNIT_CLASS = "voltage" + NORMALIZED_UNIT = UnitOfElectricPotential.VOLT _UNIT_CONVERSION: dict[str | None, float] = { UnitOfElectricPotential.VOLT: 1, UnitOfElectricPotential.MILLIVOLT: 1e3, @@ -226,28 +214,28 @@ class EnergyConverter(BaseUnitConverter): """Utility to convert energy values.""" UNIT_CLASS = "energy" + NORMALIZED_UNIT = UnitOfEnergy.KILO_WATT_HOUR _UNIT_CONVERSION: dict[str | None, float] = { - UnitOfEnergy.JOULE: _WH_TO_J * 1e3, - UnitOfEnergy.KILO_JOULE: _WH_TO_J, - UnitOfEnergy.MEGA_JOULE: _WH_TO_J / 1e3, - UnitOfEnergy.GIGA_JOULE: _WH_TO_J / 1e6, - UnitOfEnergy.WATT_HOUR: 1e3, + UnitOfEnergy.WATT_HOUR: 1 * 1000, UnitOfEnergy.KILO_WATT_HOUR: 1, - UnitOfEnergy.MEGA_WATT_HOUR: 1 / 1e3, - UnitOfEnergy.GIGA_WATT_HOUR: 1 / 1e6, - UnitOfEnergy.TERA_WATT_HOUR: 1 / 1e9, - UnitOfEnergy.CALORIE: _WH_TO_CAL * 1e3, - UnitOfEnergy.KILO_CALORIE: _WH_TO_CAL, - UnitOfEnergy.MEGA_CALORIE: _WH_TO_CAL / 1e3, - UnitOfEnergy.GIGA_CALORIE: _WH_TO_CAL / 1e6, + UnitOfEnergy.MEGA_WATT_HOUR: 1 / 1000, + UnitOfEnergy.MEGA_JOULE: 3.6, + UnitOfEnergy.GIGA_JOULE: 3.6 / 1000, + } + VALID_UNITS = { + UnitOfEnergy.WATT_HOUR, + UnitOfEnergy.KILO_WATT_HOUR, + UnitOfEnergy.MEGA_WATT_HOUR, + UnitOfEnergy.MEGA_JOULE, + UnitOfEnergy.GIGA_JOULE, } - VALID_UNITS = set(UnitOfEnergy) class InformationConverter(BaseUnitConverter): """Utility to convert information values.""" UNIT_CLASS = "information" + NORMALIZED_UNIT = UnitOfInformation.BITS # Units in terms of bits _UNIT_CONVERSION: dict[str | None, float] = { UnitOfInformation.BITS: 1, @@ -279,6 +267,7 @@ class MassConverter(BaseUnitConverter): """Utility to convert mass values.""" UNIT_CLASS = "mass" + NORMALIZED_UNIT = UnitOfMass.GRAMS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfMass.MICROGRAMS: 1 * 1000 * 1000, UnitOfMass.MILLIGRAMS: 1 * 1000, @@ -303,19 +292,14 @@ class PowerConverter(BaseUnitConverter): """Utility to convert power values.""" UNIT_CLASS = "power" + NORMALIZED_UNIT = UnitOfPower.WATT _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPower.WATT: 1, UnitOfPower.KILO_WATT: 1 / 1000, - UnitOfPower.MEGA_WATT: 1 / 1e6, - UnitOfPower.GIGA_WATT: 1 / 1e9, - UnitOfPower.TERA_WATT: 1 / 1e12, } VALID_UNITS = { UnitOfPower.WATT, UnitOfPower.KILO_WATT, - UnitOfPower.MEGA_WATT, - UnitOfPower.GIGA_WATT, - UnitOfPower.TERA_WATT, } @@ -323,6 +307,7 @@ class PressureConverter(BaseUnitConverter): """Utility to convert pressure values.""" UNIT_CLASS = "pressure" + NORMALIZED_UNIT = UnitOfPressure.PA _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPressure.PA: 1, UnitOfPressure.HPA: 1 / 100, @@ -353,17 +338,16 @@ class SpeedConverter(BaseUnitConverter): """Utility to convert speed values.""" UNIT_CLASS = "speed" + NORMALIZED_UNIT = UnitOfSpeed.METERS_PER_SECOND _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolumetricFlux.INCHES_PER_DAY: _DAYS_TO_SECS / _IN_TO_M, UnitOfVolumetricFlux.INCHES_PER_HOUR: _HRS_TO_SECS / _IN_TO_M, UnitOfVolumetricFlux.MILLIMETERS_PER_DAY: _DAYS_TO_SECS / _MM_TO_M, UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR: _HRS_TO_SECS / _MM_TO_M, UnitOfSpeed.FEET_PER_SECOND: 1 / _FOOT_TO_M, - UnitOfSpeed.INCHES_PER_SECOND: 1 / _IN_TO_M, UnitOfSpeed.KILOMETERS_PER_HOUR: _HRS_TO_SECS / _KM_TO_M, UnitOfSpeed.KNOTS: _HRS_TO_SECS / _NAUTICAL_MILE_TO_M, UnitOfSpeed.METERS_PER_SECOND: 1, - UnitOfSpeed.MILLIMETERS_PER_SECOND: 1 / _MM_TO_M, UnitOfSpeed.MILES_PER_HOUR: _HRS_TO_SECS / _MILE_TO_M, UnitOfSpeed.BEAUFORT: 1, } @@ -372,13 +356,11 @@ class SpeedConverter(BaseUnitConverter): UnitOfVolumetricFlux.INCHES_PER_HOUR, UnitOfVolumetricFlux.MILLIMETERS_PER_DAY, UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, - UnitOfSpeed.INCHES_PER_SECOND, UnitOfSpeed.FEET_PER_SECOND, UnitOfSpeed.KILOMETERS_PER_HOUR, UnitOfSpeed.KNOTS, UnitOfSpeed.METERS_PER_SECOND, UnitOfSpeed.MILES_PER_HOUR, - UnitOfSpeed.MILLIMETERS_PER_SECOND, UnitOfSpeed.BEAUFORT, } @@ -451,6 +433,7 @@ class TemperatureConverter(BaseUnitConverter): """Utility to convert temperature values.""" UNIT_CLASS = "temperature" + NORMALIZED_UNIT = UnitOfTemperature.CELSIUS VALID_UNITS = { UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT, @@ -581,6 +564,7 @@ class UnitlessRatioConverter(BaseUnitConverter): """Utility to convert unitless ratios.""" UNIT_CLASS = "unitless" + NORMALIZED_UNIT = None _UNIT_CONVERSION: dict[str | None, float] = { None: 1, CONCENTRATION_PARTS_PER_BILLION: 1000000000, @@ -597,6 +581,7 @@ class VolumeConverter(BaseUnitConverter): """Utility to convert volume values.""" UNIT_CLASS = "volume" + NORMALIZED_UNIT = UnitOfVolume.CUBIC_METERS # Units in terms of m³ _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolume.LITERS: 1 / _L_TO_CUBIC_METER, @@ -622,6 +607,7 @@ class VolumeFlowRateConverter(BaseUnitConverter): """Utility to convert volume values.""" UNIT_CLASS = "volume_flow_rate" + NORMALIZED_UNIT = UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR # Units in terms of m³/h _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR: 1, @@ -644,6 +630,7 @@ class DurationConverter(BaseUnitConverter): """Utility to convert duration values.""" UNIT_CLASS = "duration" + NORMALIZED_UNIT = UnitOfTime.SECONDS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfTime.MICROSECONDS: 1000000, UnitOfTime.MILLISECONDS: 1000, diff --git a/homeassistant/util/unit_system.py b/homeassistant/util/unit_system.py index 7f7c7f2b5fd..bd31b4286ab 100644 --- a/homeassistant/util/unit_system.py +++ b/homeassistant/util/unit_system.py @@ -58,21 +58,23 @@ WIND_SPEED_UNITS = SpeedConverter.VALID_UNITS TEMPERATURE_UNITS: set[str] = {UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS} -_VALID_BY_TYPE: dict[str, set[str] | set[str | None]] = { - LENGTH: LENGTH_UNITS, - ACCUMULATED_PRECIPITATION: LENGTH_UNITS, - WIND_SPEED: WIND_SPEED_UNITS, - TEMPERATURE: TEMPERATURE_UNITS, - MASS: MASS_UNITS, - VOLUME: VOLUME_UNITS, - PRESSURE: PRESSURE_UNITS, -} - def _is_valid_unit(unit: str, unit_type: str) -> bool: """Check if the unit is valid for it's type.""" - if units := _VALID_BY_TYPE.get(unit_type): - return unit in units + if unit_type == LENGTH: + return unit in LENGTH_UNITS + if unit_type == ACCUMULATED_PRECIPITATION: + return unit in LENGTH_UNITS + if unit_type == WIND_SPEED: + return unit in WIND_SPEED_UNITS + if unit_type == TEMPERATURE: + return unit in TEMPERATURE_UNITS + if unit_type == MASS: + return unit in MASS_UNITS + if unit_type == VOLUME: + return unit in VOLUME_UNITS + if unit_type == PRESSURE: + return unit in PRESSURE_UNITS return False @@ -238,7 +240,6 @@ METRIC_SYSTEM = UnitSystem( ("distance", UnitOfLength.FEET): UnitOfLength.METERS, ("distance", UnitOfLength.INCHES): UnitOfLength.MILLIMETERS, ("distance", UnitOfLength.MILES): UnitOfLength.KILOMETERS, - ("distance", UnitOfLength.NAUTICAL_MILES): UnitOfLength.KILOMETERS, ("distance", UnitOfLength.YARDS): UnitOfLength.METERS, # Convert non-metric volumes of gas meters ("gas", UnitOfVolume.CENTUM_CUBIC_FEET): UnitOfVolume.CUBIC_METERS, @@ -259,7 +260,6 @@ METRIC_SYSTEM = UnitSystem( ("pressure", UnitOfPressure.INHG): UnitOfPressure.HPA, # Convert non-metric speeds except knots to km/h ("speed", UnitOfSpeed.FEET_PER_SECOND): UnitOfSpeed.KILOMETERS_PER_HOUR, - ("speed", UnitOfSpeed.INCHES_PER_SECOND): UnitOfSpeed.MILLIMETERS_PER_SECOND, ("speed", UnitOfSpeed.MILES_PER_HOUR): UnitOfSpeed.KILOMETERS_PER_HOUR, ( "speed", @@ -332,7 +332,6 @@ US_CUSTOMARY_SYSTEM = UnitSystem( ("pressure", UnitOfPressure.MMHG): UnitOfPressure.INHG, # Convert non-USCS speeds, except knots, to mph ("speed", UnitOfSpeed.METERS_PER_SECOND): UnitOfSpeed.MILES_PER_HOUR, - ("speed", UnitOfSpeed.MILLIMETERS_PER_SECOND): UnitOfSpeed.INCHES_PER_SECOND, ("speed", UnitOfSpeed.KILOMETERS_PER_HOUR): UnitOfSpeed.MILES_PER_HOUR, ( "speed", diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index 39d38a8f47d..ff9b7cb3601 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -22,9 +22,10 @@ except ImportError: SafeLoader as FastestAvailableSafeLoader, ) -from propcache import cached_property +from functools import cached_property from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.frame import report from .const import SECRET_YAML from .objects import Input, NodeDictClass, NodeListClass, NodeStrClass @@ -143,6 +144,37 @@ class FastSafeLoader(FastestAvailableSafeLoader, _LoaderMixin): self.secrets = secrets +class SafeLoader(FastSafeLoader): + """Provided for backwards compatibility. Logs when instantiated.""" + + def __init__(*args: Any, **kwargs: Any) -> None: + """Log a warning and call super.""" + SafeLoader.__report_deprecated() + FastSafeLoader.__init__(*args, **kwargs) + + @classmethod + def add_constructor(cls, tag: str, constructor: Callable) -> None: + """Log a warning and call super.""" + SafeLoader.__report_deprecated() + FastSafeLoader.add_constructor(tag, constructor) + + @classmethod + def add_multi_constructor( + cls, tag_prefix: str, multi_constructor: Callable + ) -> None: + """Log a warning and call super.""" + SafeLoader.__report_deprecated() + FastSafeLoader.add_multi_constructor(tag_prefix, multi_constructor) + + @staticmethod + def __report_deprecated() -> None: + """Log deprecation warning.""" + report( + "uses deprecated 'SafeLoader' instead of 'FastSafeLoader', " + "which will stop working in HA Core 2024.6," + ) + + class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): """Python safe loader.""" @@ -152,27 +184,50 @@ class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): self.secrets = secrets +class SafeLineLoader(PythonSafeLoader): + """Provided for backwards compatibility. Logs when instantiated.""" + + def __init__(*args: Any, **kwargs: Any) -> None: + """Log a warning and call super.""" + SafeLineLoader.__report_deprecated() + PythonSafeLoader.__init__(*args, **kwargs) + + @classmethod + def add_constructor(cls, tag: str, constructor: Callable) -> None: + """Log a warning and call super.""" + SafeLineLoader.__report_deprecated() + PythonSafeLoader.add_constructor(tag, constructor) + + @classmethod + def add_multi_constructor( + cls, tag_prefix: str, multi_constructor: Callable + ) -> None: + """Log a warning and call super.""" + SafeLineLoader.__report_deprecated() + PythonSafeLoader.add_multi_constructor(tag_prefix, multi_constructor) + + @staticmethod + def __report_deprecated() -> None: + """Log deprecation warning.""" + report( + "uses deprecated 'SafeLineLoader' instead of 'PythonSafeLoader', " + "which will stop working in HA Core 2024.6," + ) + + type LoaderType = FastSafeLoader | PythonSafeLoader def load_yaml( fname: str | os.PathLike[str], secrets: Secrets | None = None ) -> JSON_TYPE | None: - """Load a YAML file. - - If opening the file raises an OSError it will be wrapped in a HomeAssistantError, - except for FileNotFoundError which will be re-raised. - """ + """Load a YAML file.""" try: with open(fname, encoding="utf-8") as conf_file: return parse_yaml(conf_file, secrets) except UnicodeDecodeError as exc: _LOGGER.error("Unable to read file %s: %s", fname, exc) raise HomeAssistantError(exc) from exc - except FileNotFoundError: - raise - except OSError as exc: - raise HomeAssistantError(exc) from exc def load_yaml_dict( @@ -293,20 +348,6 @@ def _add_reference_to_node_class( return obj -def _raise_if_no_value[NodeT: yaml.nodes.Node, _R]( - func: Callable[[LoaderType, NodeT], _R], -) -> Callable[[LoaderType, NodeT], _R]: - def wrapper(loader: LoaderType, node: NodeT) -> _R: - if not node.value: - raise HomeAssistantError( - f"{node.start_mark}: {node.tag} needs an argument." - ) - return func(loader, node) - - return wrapper - - -@_raise_if_no_value def _include_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: """Load another YAML file and embed it using the !include tag. @@ -322,7 +363,7 @@ def _include_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: return _add_reference(loaded_yaml, loader, node) except FileNotFoundError as exc: raise HomeAssistantError( - f"{node.start_mark}: Unable to read file {fname}" + f"{node.start_mark}: Unable to read file {fname}." ) from exc @@ -341,7 +382,6 @@ def _find_files(directory: str, pattern: str) -> Iterator[str]: yield filename -@_raise_if_no_value def _include_dir_named_yaml(loader: LoaderType, node: yaml.nodes.Node) -> NodeDictClass: """Load multiple files from directory as a dictionary.""" mapping = NodeDictClass() @@ -359,7 +399,6 @@ def _include_dir_named_yaml(loader: LoaderType, node: yaml.nodes.Node) -> NodeDi return _add_reference_to_node_class(mapping, loader, node) -@_raise_if_no_value def _include_dir_merge_named_yaml( loader: LoaderType, node: yaml.nodes.Node ) -> NodeDictClass: @@ -375,7 +414,6 @@ def _include_dir_merge_named_yaml( return _add_reference_to_node_class(mapping, loader, node) -@_raise_if_no_value def _include_dir_list_yaml( loader: LoaderType, node: yaml.nodes.Node ) -> list[JSON_TYPE]: @@ -389,7 +427,6 @@ def _include_dir_list_yaml( ] -@_raise_if_no_value def _include_dir_merge_list_yaml( loader: LoaderType, node: yaml.nodes.Node ) -> JSON_TYPE: diff --git a/homeassistant/util/yaml/objects.py b/homeassistant/util/yaml/objects.py index 7e4019331c6..d35ba11d25e 100644 --- a/homeassistant/util/yaml/objects.py +++ b/homeassistant/util/yaml/objects.py @@ -29,7 +29,7 @@ class NodeStrClass(str): def __voluptuous_compile__(self, schema: vol.Schema) -> Any: """Needed because vol.Schema.compile does not handle str subclasses.""" - return _compile_scalar(self) # type: ignore[no-untyped-call] + return _compile_scalar(self) class NodeDictClass(dict): diff --git a/mypy.ini b/mypy.ini index 4d33f16d968..d94e5a37194 100644 --- a/mypy.ini +++ b/mypy.ini @@ -8,10 +8,10 @@ platform = linux plugins = pydantic.mypy show_error_codes = true follow_imports = normal +enable_incomplete_feature = NewGenericSyntax local_partial_types = true strict_equality = true no_implicit_optional = true -report_deprecated_as_error = true warn_incomplete_stub = true warn_redundant_casts = true warn_unused_configs = true @@ -85,9 +85,6 @@ disallow_any_generics = true [mypy-homeassistant.helpers.reload] disallow_any_generics = true -[mypy-homeassistant.helpers.script] -disallow_any_generics = true - [mypy-homeassistant.helpers.script_variables] disallow_any_generics = true @@ -705,7 +702,17 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.assist_satellite.*] +[mypy-homeassistant.components.asterisk_cdr.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + +[mypy-homeassistant.components.asterisk_mbox.*] check_untyped_defs = true disallow_incomplete_defs = true disallow_subclassing_any = true @@ -725,16 +732,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.autarco.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.auth.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -865,16 +862,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.bluesound.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.bluetooth.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -955,16 +942,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.bryant_evolution.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.bthome.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -995,16 +972,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.cambridge_audio.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.camera.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1175,16 +1142,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.deako.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.deconz.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1456,16 +1413,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.elevenlabs.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.elgato.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1756,16 +1703,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.fujitsu_fglair.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.fully_kiosk.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1776,16 +1713,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.fyta.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.generic_hygrostat.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1846,16 +1773,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.go2rtc.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.goalzero.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1886,26 +1803,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.google_cloud.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - -[mypy-homeassistant.components.google_photos.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.google_sheets.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1916,16 +1813,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.govee_ble.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.gpsd.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2406,16 +2293,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.iotty.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.ipp.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2616,16 +2493,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.lektrico.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.lidarr.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2666,16 +2533,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.linkplay.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.litejet.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2776,7 +2633,7 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.madvr.*] +[mypy-homeassistant.components.mailbox.*] check_untyped_defs = true disallow_incomplete_defs = true disallow_subclassing_any = true @@ -2786,7 +2643,7 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.manual.*] +[mypy-homeassistant.components.map.*] check_untyped_defs = true disallow_incomplete_defs = true disallow_subclassing_any = true @@ -2936,16 +2793,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.mold_indicator.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.monzo.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2996,16 +2843,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.music_assistant.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.my.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3056,16 +2893,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.nasweb.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.neato.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3156,16 +2983,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.nordpool.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.notify.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3236,16 +3053,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.onkyo.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.open_meteo.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3256,16 +3063,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.openai_conversation.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.openexchangerates.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3336,16 +3133,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.panel_custom.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.peco.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3516,16 +3303,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.radio_browser.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.rainforest_raven.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3806,16 +3583,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.script.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.search.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3866,16 +3633,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.sensoterra.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.senz.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3896,16 +3653,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.shell_command.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.shelly.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3996,16 +3743,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.smlight.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.snooz.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4016,16 +3753,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.solarlog.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.sonarr.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4046,17 +3773,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.spotify.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true -no_implicit_reexport = true - [mypy-homeassistant.components.sql.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4067,16 +3783,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.squeezebox.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.ssdp.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4198,16 +3904,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.switch_as_x.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.switchbee.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4679,16 +4375,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.uvc.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.vacuum.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4889,16 +4575,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.workday.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.worldclock.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4992,6 +4668,9 @@ warn_unreachable = true [mypy-homeassistant.components.application_credentials.*] no_implicit_reexport = true +[mypy-homeassistant.components.spotify.*] +no_implicit_reexport = true + [mypy-tests.*] check_untyped_defs = false disallow_incomplete_defs = false diff --git a/pylint/plugins/hass_decorator.py b/pylint/plugins/hass_decorator.py deleted file mode 100644 index 7e509776a86..00000000000 --- a/pylint/plugins/hass_decorator.py +++ /dev/null @@ -1,119 +0,0 @@ -"""Plugin to check decorators.""" - -from __future__ import annotations - -from astroid import nodes -from pylint.checkers import BaseChecker -from pylint.lint import PyLinter - - -class HassDecoratorChecker(BaseChecker): - """Checker for decorators.""" - - name = "hass_decorator" - priority = -1 - msgs = { - "W7471": ( - "A coroutine function should not be decorated with @callback", - "hass-async-callback-decorator", - "Used when a coroutine function has an invalid @callback decorator", - ), - "W7472": ( - "Fixture %s is invalid here, please %s", - "hass-pytest-fixture-decorator", - "Used when a pytest fixture is invalid", - ), - } - - def _get_pytest_fixture_node(self, node: nodes.FunctionDef) -> nodes.Call | None: - for decorator in node.decorators.nodes: - if ( - isinstance(decorator, nodes.Call) - and decorator.func.as_string() == "pytest.fixture" - ): - return decorator - - return None - - def _get_pytest_fixture_node_keyword( - self, decorator: nodes.Call, search_arg: str - ) -> nodes.Keyword | None: - for keyword in decorator.keywords: - if keyword.arg == search_arg: - return keyword - - return None - - def _check_pytest_fixture( - self, node: nodes.FunctionDef, decoratornames: set[str] - ) -> None: - if ( - "_pytest.fixtures.FixtureFunctionMarker" not in decoratornames - or not (root_name := node.root().name).startswith("tests.") - or (decorator := self._get_pytest_fixture_node(node)) is None - or not ( - scope_keyword := self._get_pytest_fixture_node_keyword( - decorator, "scope" - ) - ) - or not isinstance(scope_keyword.value, nodes.Const) - or not (scope := scope_keyword.value.value) - ): - return - - parts = root_name.split(".") - test_component: str | None = None - if root_name.startswith("tests.components.") and parts[2] != "conftest": - test_component = parts[2] - - if scope == "session": - if test_component: - self.add_message( - "hass-pytest-fixture-decorator", - node=decorator, - args=("scope `session`", "use `package` or lower"), - ) - return - if not ( - autouse_keyword := self._get_pytest_fixture_node_keyword( - decorator, "autouse" - ) - ) or ( - isinstance(autouse_keyword.value, nodes.Const) - and not autouse_keyword.value.value - ): - self.add_message( - "hass-pytest-fixture-decorator", - node=decorator, - args=( - "scope/autouse combination", - "set `autouse=True` or reduce scope", - ), - ) - return - - test_module = parts[3] if len(parts) > 3 else "" - - if test_component and scope == "package" and test_module != "conftest": - self.add_message( - "hass-pytest-fixture-decorator", - node=decorator, - args=("scope `package`", "use `module` or lower"), - ) - - def visit_asyncfunctiondef(self, node: nodes.AsyncFunctionDef) -> None: - """Apply checks on an AsyncFunctionDef node.""" - if decoratornames := node.decoratornames(): - if "homeassistant.core.callback" in decoratornames: - self.add_message("hass-async-callback-decorator", node=node) - self._check_pytest_fixture(node, decoratornames) - - def visit_functiondef(self, node: nodes.FunctionDef) -> None: - """Apply checks on an AsyncFunctionDef node.""" - if decoratornames := node.decoratornames(): - self._check_pytest_fixture(node, decoratornames) - - -def register(linter: PyLinter) -> None: - """Register the checker.""" - linter.register_checker(HassDecoratorChecker(linter)) diff --git a/pylint/plugins/hass_enforce_class_module.py b/pylint/plugins/hass_enforce_class_module.py deleted file mode 100644 index 09fe61b68c6..00000000000 --- a/pylint/plugins/hass_enforce_class_module.py +++ /dev/null @@ -1,168 +0,0 @@ -"""Plugin for checking if class is in correct module.""" - -from __future__ import annotations - -from astroid import nodes -from pylint.checkers import BaseChecker -from pylint.lint import PyLinter - -from homeassistant.const import Platform - -_BASE_ENTITY_MODULES: set[str] = { - "BaseCoordinatorEntity", - "CoordinatorEntity", - "Entity", - "EntityDescription", - "ManualTriggerEntity", - "RestoreEntity", - "ToggleEntity", - "ToggleEntityDescription", - "TriggerBaseEntity", -} -_MODULES: dict[str, set[str]] = { - "air_quality": {"AirQualityEntity"}, - "alarm_control_panel": { - "AlarmControlPanelEntity", - "AlarmControlPanelEntityDescription", - }, - "assist_satellite": {"AssistSatelliteEntity", "AssistSatelliteEntityDescription"}, - "binary_sensor": {"BinarySensorEntity", "BinarySensorEntityDescription"}, - "button": {"ButtonEntity", "ButtonEntityDescription"}, - "calendar": {"CalendarEntity", "CalendarEntityDescription"}, - "camera": {"Camera", "CameraEntityDescription"}, - "climate": {"ClimateEntity", "ClimateEntityDescription"}, - "coordinator": {"DataUpdateCoordinator"}, - "conversation": {"ConversationEntity"}, - "cover": {"CoverEntity", "CoverEntityDescription"}, - "date": {"DateEntity", "DateEntityDescription"}, - "datetime": {"DateTimeEntity", "DateTimeEntityDescription"}, - "device_tracker": { - "DeviceTrackerEntity", - "ScannerEntity", - "ScannerEntityDescription", - "TrackerEntity", - "TrackerEntityDescription", - }, - "event": {"EventEntity", "EventEntityDescription"}, - "fan": {"FanEntity", "FanEntityDescription"}, - "geo_location": {"GeolocationEvent"}, - "humidifier": {"HumidifierEntity", "HumidifierEntityDescription"}, - "image": {"ImageEntity", "ImageEntityDescription"}, - "image_processing": { - "ImageProcessingEntity", - "ImageProcessingFaceEntity", - "ImageProcessingEntityDescription", - }, - "lawn_mower": {"LawnMowerEntity", "LawnMowerEntityDescription"}, - "light": {"LightEntity", "LightEntityDescription"}, - "lock": {"LockEntity", "LockEntityDescription"}, - "media_player": {"MediaPlayerEntity", "MediaPlayerEntityDescription"}, - "notify": {"NotifyEntity", "NotifyEntityDescription"}, - "number": {"NumberEntity", "NumberEntityDescription", "RestoreNumber"}, - "remote": {"RemoteEntity", "RemoteEntityDescription"}, - "select": {"SelectEntity", "SelectEntityDescription"}, - "sensor": {"RestoreSensor", "SensorEntity", "SensorEntityDescription"}, - "siren": {"SirenEntity", "SirenEntityDescription"}, - "stt": {"SpeechToTextEntity"}, - "switch": {"SwitchEntity", "SwitchEntityDescription"}, - "text": {"TextEntity", "TextEntityDescription"}, - "time": {"TimeEntity", "TimeEntityDescription"}, - "todo": {"TodoListEntity"}, - "tts": {"TextToSpeechEntity"}, - "update": {"UpdateEntity", "UpdateEntityDescription"}, - "vacuum": {"StateVacuumEntity", "VacuumEntity", "VacuumEntityDescription"}, - "wake_word": {"WakeWordDetectionEntity"}, - "water_heater": {"WaterHeaterEntity"}, - "weather": { - "CoordinatorWeatherEntity", - "SingleCoordinatorWeatherEntity", - "WeatherEntity", - "WeatherEntityDescription", - }, -} -_ENTITY_COMPONENTS: set[str] = {platform.value for platform in Platform}.union( - { - "alert", - "automation", - "counter", - "dominos", - "input_boolean", - "input_button", - "input_datetime", - "input_number", - "input_select", - "input_text", - "microsoft_face", - "person", - "plant", - "remember_the_milk", - "schedule", - "script", - "tag", - "timer", - } -) - - -_MODULE_CLASSES = { - class_name for classes in _MODULES.values() for class_name in classes -} - - -class HassEnforceClassModule(BaseChecker): - """Checker for class in correct module.""" - - name = "hass_enforce_class_module" - priority = -1 - msgs = { - "C7461": ( - "Derived %s is recommended to be placed in the '%s' module", - "hass-enforce-class-module", - "Used when derived class should be placed in its own module.", - ), - } - - def visit_classdef(self, node: nodes.ClassDef) -> None: - """Check if derived class is placed in its own module.""" - root_name = node.root().name - - # we only want to check components - if not root_name.startswith("homeassistant.components."): - return - parts = root_name.split(".") - current_integration = parts[2] - current_module = parts[3] if len(parts) > 3 else "" - - ancestors = list(node.ancestors()) - - if current_module != "entity" and current_integration not in _ENTITY_COMPONENTS: - top_level_ancestors = list(node.ancestors(recurs=False)) - - for ancestor in top_level_ancestors: - if ancestor.name in _BASE_ENTITY_MODULES and not any( - anc.name in _MODULE_CLASSES for anc in ancestors - ): - self.add_message( - "hass-enforce-class-module", - node=node, - args=(ancestor.name, "entity"), - ) - return - - for expected_module, classes in _MODULES.items(): - if expected_module in (current_module, current_integration): - continue - - for ancestor in ancestors: - if ancestor.name in classes: - self.add_message( - "hass-enforce-class-module", - node=node, - args=(ancestor.name, expected_module), - ) - return - - -def register(linter: PyLinter) -> None: - """Register the checker.""" - linter.register_checker(HassEnforceClassModule(linter)) diff --git a/pylint/plugins/hass_enforce_coordinator_module.py b/pylint/plugins/hass_enforce_coordinator_module.py new file mode 100644 index 00000000000..7160a25085d --- /dev/null +++ b/pylint/plugins/hass_enforce_coordinator_module.py @@ -0,0 +1,40 @@ +"""Plugin for checking if coordinator is in its own module.""" + +from __future__ import annotations + +from astroid import nodes +from pylint.checkers import BaseChecker +from pylint.lint import PyLinter + + +class HassEnforceCoordinatorModule(BaseChecker): + """Checker for coordinators own module.""" + + name = "hass_enforce_coordinator_module" + priority = -1 + msgs = { + "C7461": ( + "Derived data update coordinator is recommended to be placed in the 'coordinator' module", + "hass-enforce-coordinator-module", + "Used when derived data update coordinator should be placed in its own module.", + ), + } + + def visit_classdef(self, node: nodes.ClassDef) -> None: + """Check if derived data update coordinator is placed in its own module.""" + root_name = node.root().name + + # we only want to check component update coordinators + if not root_name.startswith("homeassistant.components"): + return + + is_coordinator_module = root_name.endswith(".coordinator") + for ancestor in node.ancestors(): + if ancestor.name == "DataUpdateCoordinator" and not is_coordinator_module: + self.add_message("hass-enforce-coordinator-module", node=node) + return + + +def register(linter: PyLinter) -> None: + """Register the checker.""" + linter.register_checker(HassEnforceCoordinatorModule(linter)) diff --git a/pylint/plugins/hass_enforce_type_hints.py b/pylint/plugins/hass_enforce_type_hints.py index a837650f3b5..f5d5b86635a 100644 --- a/pylint/plugins/hass_enforce_type_hints.py +++ b/pylint/plugins/hass_enforce_type_hints.py @@ -28,8 +28,6 @@ _KNOWN_GENERIC_TYPES: set[str] = { } _KNOWN_GENERIC_TYPES_TUPLE = tuple(_KNOWN_GENERIC_TYPES) -_FORCE_ANNOTATION_PLATFORMS = ["config_flow"] - class _Special(Enum): """Sentinel values.""" @@ -102,14 +100,13 @@ _TEST_FIXTURES: dict[str, list[str] | str] = { "aiohttp_client": "ClientSessionGenerator", "aiohttp_server": "Callable[[], TestServer]", "area_registry": "AreaRegistry", - "async_test_recorder": "RecorderInstanceGenerator", + "async_setup_recorder_instance": "RecorderInstanceGenerator", "caplog": "pytest.LogCaptureFixture", "capsys": "pytest.CaptureFixture[str]", "current_request_with_host": "None", "device_registry": "DeviceRegistry", "enable_bluetooth": "None", "enable_custom_integrations": "None", - "enable_missing_statistics": "bool", "enable_nightly_purge": "bool", "enable_statistics": "bool", "enable_schema_validation": "bool", @@ -149,7 +146,6 @@ _TEST_FIXTURES: dict[str, list[str] | str] = { "mock_tts_get_cache_files": "MagicMock", "mock_tts_init_cache_dir": "MagicMock", "mock_zeroconf": "MagicMock", - "monkeypatch": "pytest.MonkeyPatch", "mqtt_client_mock": "MqttMockPahoClient", "mqtt_mock": "MqttMockHAClient", "mqtt_mock_entry": "MqttMockHAClientGenerator", @@ -1318,7 +1314,7 @@ _INHERITANCE_MATCH: dict[str, list[ClassTypeHintMatch]] = { ), TypeHintMatch( function_name="source_type", - return_type="SourceType", + return_type=["SourceType", "str"], ), ], ), @@ -1763,6 +1759,39 @@ _INHERITANCE_MATCH: dict[str, list[ClassTypeHintMatch]] = { ], ), ], + "mailbox": [ + ClassTypeHintMatch( + base_class="Mailbox", + matches=[ + TypeHintMatch( + function_name="media_type", + return_type="str", + ), + TypeHintMatch( + function_name="can_delete", + return_type="bool", + ), + TypeHintMatch( + function_name="has_media", + return_type="bool", + ), + TypeHintMatch( + function_name="async_get_media", + arg_types={1: "str"}, + return_type="bytes", + ), + TypeHintMatch( + function_name="async_get_messages", + return_type="list[dict[str, Any]]", + ), + TypeHintMatch( + function_name="async_delete", + arg_types={1: "str"}, + return_type="bool", + ), + ], + ), + ], "media_player": [ ClassTypeHintMatch( base_class="Entity", @@ -3110,7 +3139,6 @@ class HassTypeHintChecker(BaseChecker): _class_matchers: list[ClassTypeHintMatch] _function_matchers: list[TypeHintMatch] _module_node: nodes.Module - _module_platform: str | None _in_test_module: bool def visit_module(self, node: nodes.Module) -> None: @@ -3118,22 +3146,24 @@ class HassTypeHintChecker(BaseChecker): self._class_matchers = [] self._function_matchers = [] self._module_node = node - self._module_platform = _get_module_platform(node.name) self._in_test_module = node.name.startswith("tests.") - if self._in_test_module or self._module_platform is None: + if ( + self._in_test_module + or (module_platform := _get_module_platform(node.name)) is None + ): return - if self._module_platform in _PLATFORMS: + if module_platform in _PLATFORMS: self._function_matchers.extend(_FUNCTION_MATCH["__any_platform__"]) - if function_matches := _FUNCTION_MATCH.get(self._module_platform): + if function_matches := _FUNCTION_MATCH.get(module_platform): self._function_matchers.extend(function_matches) - if class_matches := _CLASS_MATCH.get(self._module_platform): + if class_matches := _CLASS_MATCH.get(module_platform): self._class_matchers.extend(class_matches) - if property_matches := _INHERITANCE_MATCH.get(self._module_platform): + if property_matches := _INHERITANCE_MATCH.get(module_platform): self._class_matchers.extend(property_matches) self._class_matchers.reverse() @@ -3143,12 +3173,7 @@ class HassTypeHintChecker(BaseChecker): ) -> bool: """Check if we can skip the function validation.""" return ( - # test modules are excluded from ignore_missing_annotations - not self._in_test_module - # some modules have checks forced - and self._module_platform not in _FORCE_ANNOTATION_PLATFORMS - # other modules are only checked ignore_missing_annotations - and self.linter.config.ignore_missing_annotations + self.linter.config.ignore_missing_annotations and node.returns is None and not _has_valid_annotations(annotations) ) diff --git a/pylint/plugins/hass_imports.py b/pylint/plugins/hass_imports.py index c6a869dd7fc..3ec8b6c3cd9 100644 --- a/pylint/plugins/hass_imports.py +++ b/pylint/plugins/hass_imports.py @@ -19,12 +19,6 @@ class ObsoleteImportMatch: _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { - "functools": [ - ObsoleteImportMatch( - reason="replaced by propcache.cached_property", - constant=re.compile(r"^cached_property$"), - ), - ], "homeassistant.backports.enum": [ ObsoleteImportMatch( reason="We can now use the Python 3.11 provided enum.StrEnum instead", @@ -33,7 +27,10 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { ], "homeassistant.backports.functools": [ ObsoleteImportMatch( - reason="replaced by propcache.cached_property", + reason=( + "We can now use the Python 3.12 provided " + "functools.cached_property instead" + ), constant=re.compile(r"^cached_property$"), ), ], @@ -395,33 +392,14 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { constant=re.compile(r"^IMPERIAL_SYSTEM$"), ), ], + "homeassistant.util.json": [ + ObsoleteImportMatch( + reason="moved to homeassistant.helpers.json", + constant=re.compile(r"^save_json|find_paths_unserializable_data$"), + ), + ], } -_IGNORE_ROOT_IMPORT = ( - "assist_pipeline", - "automation", - "bluetooth", - "camera", - "cast", - "device_automation", - "device_tracker", - "ffmpeg", - "ffmpeg_motion", - "google_assistant", - "hardware", - "homeassistant", - "homeassistant_hardware", - "http", - "manual", - "plex", - "recorder", - "rest", - "script", - "sensor", - "stream", - "zha", -) - # Blacklist of imports that should be using the namespace @dataclass @@ -488,11 +466,6 @@ class HassImportsFormatChecker(BaseChecker): "hass-helper-namespace-import", "Used when a helper should be used via the namespace", ), - "W7426": ( - "`%s` should be imported using an alias, such as `%s as %s`", - "hass-import-constant-alias", - "Used when a constant should be imported as an alias", - ), } options = () @@ -517,9 +490,8 @@ class HassImportsFormatChecker(BaseChecker): if module.startswith(f"{self.current_package}."): self.add_message("hass-relative-import", node=node) continue - if ( - module.startswith("homeassistant.components.") - and len(module.split(".")) > 3 + if module.startswith("homeassistant.components.") and module.endswith( + "const" ): if ( self.current_package.startswith("tests.components.") @@ -551,85 +523,6 @@ class HassImportsFormatChecker(BaseChecker): if len(split_package) < node.level + 2: self.add_message("hass-absolute-import", node=node) - def _check_for_constant_alias( - self, - node: nodes.ImportFrom, - current_component: str | None, - imported_component: str, - ) -> bool: - """Check for hass-import-constant-alias.""" - if current_component == imported_component: - return True - - # Check for `from homeassistant.components.other import DOMAIN` - for name, alias in node.names: - if name == "DOMAIN" and (alias is None or alias == "DOMAIN"): - self.add_message( - "hass-import-constant-alias", - node=node, - args=( - "DOMAIN", - "DOMAIN", - f"{imported_component.upper()}_DOMAIN", - ), - ) - return False - - return True - - def _check_for_component_root_import( - self, - node: nodes.ImportFrom, - current_component: str | None, - imported_parts: list[str], - imported_component: str, - ) -> bool: - """Check for hass-component-root-import.""" - if ( - current_component == imported_component - or imported_component in _IGNORE_ROOT_IMPORT - ): - return True - - # Check for `from homeassistant.components.other.module import something` - if len(imported_parts) > 3: - self.add_message("hass-component-root-import", node=node) - return False - - # Check for `from homeassistant.components.other import const` - for name, _ in node.names: - if name == "const": - self.add_message("hass-component-root-import", node=node) - return False - - return True - - def _check_for_relative_import( - self, - current_package: str, - node: nodes.ImportFrom, - current_component: str | None, - ) -> bool: - """Check for hass-relative-import.""" - if node.modname == current_package or node.modname.startswith( - f"{current_package}." - ): - self.add_message("hass-relative-import", node=node) - return False - - for root in ("homeassistant", "tests"): - if current_package.startswith(f"{root}.components."): - if node.modname == f"{root}.components": - for name in node.names: - if name[0] == current_component: - self.add_message("hass-relative-import", node=node) - return False - elif node.modname.startswith(f"{root}.components.{current_component}."): - self.add_message("hass-relative-import", node=node) - return False - - return True - def visit_importfrom(self, node: nodes.ImportFrom) -> None: """Check for improper 'from _ import _' invocations.""" if not self.current_package: @@ -637,36 +530,35 @@ class HassImportsFormatChecker(BaseChecker): if node.level is not None: self._visit_importfrom_relative(self.current_package, node) return - - # Cache current component - current_component: str | None = None + if node.modname == self.current_package or node.modname.startswith( + f"{self.current_package}." + ): + self.add_message("hass-relative-import", node=node) + return for root in ("homeassistant", "tests"): if self.current_package.startswith(f"{root}.components."): current_component = self.current_package.split(".")[2] - - # Checks for hass-relative-import - if not self._check_for_relative_import( - self.current_package, node, current_component + if node.modname == f"{root}.components": + for name in node.names: + if name[0] == current_component: + self.add_message("hass-relative-import", node=node) + return + if node.modname.startswith(f"{root}.components.{current_component}."): + self.add_message("hass-relative-import", node=node) + return + if node.modname.startswith("homeassistant.components.") and ( + node.modname.endswith(".const") + or "const" in {names[0] for names in node.names} ): + if ( + self.current_package.startswith("tests.components.") + and self.current_package.split(".")[2] == node.modname.split(".")[2] + ): + # Ignore check if the component being tested matches + # the component being imported from + return + self.add_message("hass-component-root-import", node=node) return - - if node.modname.startswith("homeassistant.components."): - imported_parts = node.modname.split(".") - imported_component = imported_parts[2] - - # Checks for hass-component-root-import - if not self._check_for_component_root_import( - node, current_component, imported_parts, imported_component - ): - return - - # Checks for hass-import-constant-alias - if not self._check_for_constant_alias( - node, current_component, imported_component - ): - return - - # Checks for hass-deprecated-import if obsolete_imports := _OBSOLETE_IMPORT.get(node.modname): for name_tuple in node.names: for obsolete_import in obsolete_imports: @@ -676,8 +568,6 @@ class HassImportsFormatChecker(BaseChecker): node=node, args=(import_match.string, obsolete_import.reason), ) - - # Checks for hass-helper-namespace-import if namespace_alias := _FORCE_NAMESPACE_IMPORT.get(node.modname): for name in node.names: if name[0] in namespace_alias.names: diff --git a/pyproject.toml b/pyproject.toml index ebf22a93d7d..f81013aa8b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,10 @@ [build-system] -requires = ["setuptools==75.1.0"] +requires = ["setuptools==69.2.0", "wheel~=0.43.0"] build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.12.0.dev0" +version = "2024.8.0.dev0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" @@ -19,68 +19,58 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", "Topic :: Home Automation", ] requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - # Integrations may depend on hassio integration without listing it to - # change behavior based on presence of supervisor. Deprecated with #127228 - # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.1", - "aiohttp==3.11.0", + "aiohttp==3.9.5", "aiohttp_cors==0.7.0", + "aiohttp-fast-url-dispatcher==0.3.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", "astral==2.2", - "async-interrupt==1.2.0", - "attrs==24.2.0", + "async-interrupt==1.1.2", + "attrs==23.2.0", "atomicwrites-homeassistant==1.4.1", - "audioop-lts==0.2.1;python_version>='3.13'", "awesomeversion==24.6.0", - "bcrypt==4.2.0", + "bcrypt==4.1.2", "certifi>=2021.5.30", "ciso8601==2.3.1", - "fnv-hash-fast==1.0.2", + "fnv-hash-fast==0.5.0", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration - "hass-nabucasa==0.84.0", + "hass-nabucasa==0.81.1", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all - "httpx==0.27.2", - "home-assistant-bluetooth==1.13.0", + "httpx==0.27.0", + "home-assistant-bluetooth==1.12.2", "ifaddr==0.2.0", "Jinja2==3.1.4", "lru-dict==1.3.0", - "PyJWT==2.9.0", + "PyJWT==2.8.0", # PyJWT has loose dependency. We want the latest one. - "cryptography==43.0.1", - "Pillow==11.0.0", - "propcache==0.2.0", - "pyOpenSSL==24.2.1", - "orjson==3.10.11", + "cryptography==42.0.8", + "Pillow==10.3.0", + "pyOpenSSL==24.1.0", + "orjson==3.9.15", "packaging>=23.1", + "pip>=21.3.1", "psutil-home-assistant==0.0.1", "python-slugify==8.0.4", - "PyYAML==6.0.2", + "PyYAML==6.0.1", "requests==2.32.3", - "securetar==2024.2.1", "SQLAlchemy==2.0.31", - "standard-aifc==3.13.0;python_version>='3.13'", - "standard-telnetlib==3.13.0;python_version>='3.13'", "typing-extensions>=4.12.2,<5.0", - "ulid-transform==1.0.2", + "ulid-transform==0.9.0", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 "urllib3>=1.26.5,<2", - "uv==0.5.0", - "voluptuous==0.15.2", + "voluptuous==0.13.1", "voluptuous-serialize==2.6.0", - "voluptuous-openapi==0.0.5", - "yarl==1.17.1", - "webrtc-models==0.2.0", + "voluptuous-openapi==0.0.4", + "yarl==1.9.4", ] [project.urls] @@ -120,8 +110,7 @@ init-hook = """\ load-plugins = [ "pylint.extensions.code_style", "pylint.extensions.typing", - "hass_decorator", - "hass_enforce_class_module", + "hass_enforce_coordinator_module", "hass_enforce_sorted_platforms", "hass_enforce_super_call", "hass_enforce_type_hints", @@ -160,6 +149,7 @@ class-const-naming-style = "any" # inconsistent-return-statements - doesn't handle raise # too-many-ancestors - it's too strict. # wrong-import-order - isort guards this +# consider-using-f-string - str.format sometimes more readable # possibly-used-before-assignment - too many errors / not necessarily issues # --- # Pylint CodeStyle plugin @@ -181,8 +171,8 @@ disable = [ "too-many-locals", "too-many-public-methods", "too-many-boolean-expressions", - "too-many-positional-arguments", "wrong-import-order", + "consider-using-f-string", "consider-using-namedtuple-or-dataclass", "consider-using-assignment-expr", "possibly-used-before-assignment", @@ -195,7 +185,6 @@ disable = [ "bidirectional-unicode", # PLE2502 "continue-in-finally", # PLE0116 "duplicate-bases", # PLE0241 - "misplaced-bare-raise", # PLE0704 "format-needs-mapping", # F502 "function-redefined", # F811 # Needed because ruff does not understand type of __all__ generated by a function @@ -323,8 +312,6 @@ disable = [ "no-else-return", # RET505 "broad-except", # BLE001 "protected-access", # SLF001 - "broad-exception-raised", # TRY002 - "consider-using-f-string", # PLC0209 # "no-self-use", # PLR6301 # Optional plugin, not enabled # Handled by mypy @@ -455,7 +442,6 @@ norecursedirs = [ log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s" log_date_format = "%Y-%m-%d %H:%M:%S" asyncio_mode = "auto" -asyncio_default_fixture_loop_scope = "function" filterwarnings = [ "error::sqlalchemy.exc.SAWarning", @@ -471,14 +457,16 @@ filterwarnings = [ # Ignore custom pytest marks "ignore:Unknown pytest.mark.disable_autouse_fixture:pytest.PytestUnknownMarkWarning:tests.components.met", "ignore:Unknown pytest.mark.dataset:pytest.PytestUnknownMarkWarning:tests.components.screenlogic", - # https://github.com/rokam/sunweg/blob/3.1.0/sunweg/plant.py#L96 - v3.1.0 - 2024-10-02 + # https://github.com/rokam/sunweg/blob/3.0.1/sunweg/plant.py#L96 - v3.0.1 - 2024-05-29 "ignore:The '(kwh_per_kwp|performance_rate)' property is deprecated and will return 0:DeprecationWarning:tests.components.sunweg.test_init", # -- design choice 3rd party - # https://github.com/gwww/elkm1/blob/2.2.10/elkm1_lib/util.py#L8-L19 + # https://github.com/gwww/elkm1/blob/2.2.7/elkm1_lib/util.py#L8-L19 "ignore:ssl.TLSVersion.TLSv1 is deprecated:DeprecationWarning:elkm1_lib.util", + # https://github.com/michaeldavie/env_canada/blob/v0.6.2/env_canada/ec_cache.py + "ignore:Inheritance class CacheClientSession from ClientSession is discouraged:DeprecationWarning:env_canada.ec_cache", # https://github.com/allenporter/ical/pull/215 - # https://github.com/allenporter/ical/blob/8.2.0/ical/util.py#L21-L23 + # https://github.com/allenporter/ical/blob/8.0.0/ical/util.py#L20-L22 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:ical.util", # https://github.com/bachya/regenmaschine/blob/2024.03.0/regenmaschine/client.py#L52 "ignore:ssl.TLSVersion.SSLv3 is deprecated:DeprecationWarning:regenmaschine.client", @@ -490,13 +478,13 @@ filterwarnings = [ "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", # -- tracked upstream / open PRs - # - pyOpenSSL v24.2.1 - # https://github.com/certbot/certbot/issues/9828 - v2.11.0 - # https://github.com/certbot/certbot/issues/9992 + # https://github.com/certbot/certbot/issues/9828 - v2.10.0 "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", - # - other + # https://github.com/influxdata/influxdb-client-python/issues/603 - v1.42.0 + # https://github.com/influxdata/influxdb-client-python/pull/652 + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", + # https://github.com/beetbox/mediafile/issues/67 - v0.12.0 + "ignore:'imghdr' is deprecated and slated for removal in Python 3.13:DeprecationWarning:mediafile", # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 # https://github.com/foxel/python_ndms2_client/pull/8 "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:ndms2_client.connection", @@ -504,8 +492,6 @@ filterwarnings = [ # -- fixed, waiting for release / update # https://github.com/bachya/aiopurpleair/pull/200 - >=2023.10.0 "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aiopurpleair.helpers.validators", - # https://bugs.launchpad.net/beautifulsoup/+bug/2076897 - >4.12.3 - "ignore:The 'strip_cdata' option of HTMLParser\\(\\) has never done anything and will eventually be removed:DeprecationWarning:bs4.builder._lxml", # https://github.com/DataDog/datadogpy/pull/290 - >=0.23.0 "ignore:invalid escape sequence:SyntaxWarning:.*datadog.dogstatsd.base", # https://github.com/DataDog/datadogpy/pull/566/files - >=0.37.0 @@ -514,9 +500,8 @@ filterwarnings = [ "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:devialet.devialet_api", # https://github.com/httplib2/httplib2/pull/226 - >=0.21.0 "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:httplib2", - # https://github.com/influxdata/influxdb-client-python/issues/603 >=1.45.0 - # https://github.com/influxdata/influxdb-client-python/pull/652 - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", + # https://github.com/jaraco/jaraco.abode/commit/9e3e789efc96cddcaa15f920686bbeb79a7469e0 - update jaraco.abode to >=5.1.0 + "ignore:`jaraco.functools.call_aside` is deprecated, use `jaraco.functools.invoke` instead:DeprecationWarning:jaraco.abode.helpers.timeline", # https://github.com/majuss/lupupy/pull/15 - >0.3.2 "ignore:\"is not\" with 'str' literal. Did you mean \"!=\"?:SyntaxWarning:.*lupupy.devices.alarm", # https://github.com/nextcord/nextcord/pull/1095 - >2.6.1 @@ -531,10 +516,16 @@ filterwarnings = [ "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol", # https://github.com/hunterjm/python-onvif-zeep-async/pull/51 - >3.1.12 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:onvif.client", + # https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8 - >=2.13.12 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:google.pubsub_v1.services.publisher.client", # https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0 "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", - # https://github.com/cereal2nd/velbus-aio/pull/126 - >2024.10.0 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler", + # https://github.com/mvantellingen/python-zeep/pull/1364 - >4.2.1 + "ignore:'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning:zeep.utils", + # https://github.com/timmo001/system-bridge-connector/pull/27 - >=4.1.0 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:systembridgeconnector.version", + # https://github.com/jschlyter/ttls/commit/d64f1251397b8238cf6a35bea64784de25e3386c - >=1.8.1 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:ttls", # -- fixed for Python 3.13 # https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2 @@ -542,9 +533,10 @@ filterwarnings = [ # -- other # Locale changes might take some time to resolve upstream - # https://github.com/Squachen/micloud/blob/v_0.6/micloud/micloud.py#L35 - v0.6 - 2022-12-08 "ignore:'locale.getdefaultlocale' is deprecated and slated for removal in Python 3.15:DeprecationWarning:micloud.micloud", - # https://github.com/MatsNl/pyatag/issues/11 - v0.3.7.1 - 2023-10-09 + # https://github.com/protocolbuffers/protobuf - v4.25.1 + "ignore:Type google._upb._message.(Message|Scalar)MapContainer uses PyType_Spec with a metaclass that has custom tp_new. .* Python 3.14:DeprecationWarning", + # https://github.com/MatsNl/pyatag/issues/11 - v0.3.7.1 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pyatag.gateway", # https://github.com/lidatong/dataclasses-json/issues/328 # https://github.com/lidatong/dataclasses-json/pull/351 @@ -552,19 +544,14 @@ filterwarnings = [ # https://pypi.org/project/emulated-roku/ - v0.3.0 - 2023-12-19 # https://github.com/martonperei/emulated_roku "ignore:loop argument is deprecated:DeprecationWarning:emulated_roku", - # https://github.com/w1ll1am23/pyeconet/blob/v0.1.23/src/pyeconet/api.py#L38 - v0.1.23 - 2024-10-08 - "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:pyeconet.api", - # https://github.com/thecynic/pylutron - v0.2.16 - 2024-10-22 + # https://github.com/thecynic/pylutron - v0.2.13 "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", - # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 - 2024-02-24 + # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", - # https://github.com/lextudio/pysnmp/blob/v7.1.10/pysnmp/smi/compiler.py#L23-L31 - v7.1.10 - 2024-11-04 - "ignore:smiV1Relaxed is deprecated. Please use smi_v1_relaxed instead:DeprecationWarning:pysnmp.smi.compiler", - "ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysmi.reader.url", # wrong stacklevel # https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10 "ignore:This function will be removed in future versions of pint:DeprecationWarning:pyweatherflowudp.const", # Wrong stacklevel - # https://bugs.launchpad.net/beautifulsoup/+bug/2034451 fixed in >4.12.3 + # https://bugs.launchpad.net/beautifulsoup/+bug/2034451 "ignore:It looks like you're parsing an XML document using an HTML parser:UserWarning:html.parser", # New in aiohttp - v3.9.0 "ignore:It is recommended to use web.AppKey instances for keys:UserWarning:(homeassistant|tests|aiohttp_cors)", @@ -572,9 +559,6 @@ filterwarnings = [ # https://pypi.org/project/aprslib/ - v0.7.2 - 2022-07-10 "ignore:invalid escape sequence:SyntaxWarning:.*aprslib.parsing.common", "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aprslib.parsing.common", - # https://pypi.org/project/panasonic-viera/ - v0.4.2 - 2024-04-24 - # https://github.com/florianholzapfel/panasonic-viera/blob/0.4.2/panasonic_viera/__init__.py#L789 - "ignore:invalid escape sequence:SyntaxWarning:.*panasonic_viera", # https://pypi.org/project/pyblackbird/ - v0.6 - 2023-03-15 # https://github.com/koolsb/pyblackbird/pull/9 -> closed "ignore:invalid escape sequence:SyntaxWarning:.*pyblackbird", @@ -585,10 +569,13 @@ filterwarnings = [ "ignore:invalid escape sequence:SyntaxWarning:.*sanix", # https://pypi.org/project/sleekxmppfs/ - v1.4.1 - 2022-08-18 "ignore:invalid escape sequence:SyntaxWarning:.*sleekxmppfs.thirdparty.mini_dateutil", # codespell:ignore thirdparty + # https://pypi.org/project/vobject/ - v0.9.7 - 2024-03-25 + # https://github.com/py-vobject/vobject + "ignore:invalid escape sequence:SyntaxWarning:.*vobject.base", # - pkg_resources # https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast", - # https://pypi.org/project/habitipy/ - v0.3.3 - 2024-10-28 + # https://pypi.org/project/habitipy/ - v0.3.1 - 2019-01-14 / 2024-04-28 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:habitipy.api", # https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data", @@ -596,6 +583,9 @@ filterwarnings = [ "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", + # https://pypi.org/project/velbus-aio/ - v2024.4.1 - 2024-04-07 + # https://github.com/Cereal2nd/velbus-aio/blob/2024.4.1/velbusaio/handler.py#L12 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler", # -- Python 3.13 # HomeAssistant @@ -605,11 +595,14 @@ filterwarnings = [ # https://github.com/nextcord/nextcord/issues/1174 # https://github.com/nextcord/nextcord/blob/v2.6.1/nextcord/player.py#L5 "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:nextcord.player", - # https://pypi.org/project/SpeechRecognition/ - v3.11.0 - 2024-05-05 - # https://github.com/Uberi/speech_recognition/blob/3.11.0/speech_recognition/__init__.py#L7 + # https://pypi.org/project/pylutron/ - v0.2.12 - 2024-02-12 + # https://github.com/thecynic/pylutron/issues/89 + "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pylutron", + # https://pypi.org/project/SpeechRecognition/ - v3.10.4 - 2024-05-05 + # https://github.com/Uberi/speech_recognition/blob/3.10.4/speech_recognition/__init__.py#L7 "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", - # https://pypi.org/project/voip-utils/ - v0.2.0 - 2024-09-06 - # https://github.com/home-assistant-libs/voip-utils/blob/0.2.0/voip_utils/rtp_audio.py#L3 + # https://pypi.org/project/voip-utils/ - v0.1.0 - 2023-06-28 + # https://github.com/home-assistant-libs/voip-utils/blob/v0.1.0/voip_utils/rtp_audio.py#L2 "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:voip_utils.rtp_audio", # -- Python 3.13 - unmaintained projects, last release about 2+ years @@ -621,17 +614,6 @@ filterwarnings = [ # https://github.com/ssaenger/pyws66i/blob/v1.1/pyws66i/__init__.py#L2 "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pyws66i", - # -- New in Python 3.13 - # https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11 - # https://github.com/kurtmckee/feedparser/issues/481 - "ignore:'count' is passed as positional argument:DeprecationWarning:feedparser.html", - # https://github.com/youknowone/python-deadlib - Backports for aifc, telnetlib - "ignore:aifc was removed in Python 3.13.*'standard-aifc':DeprecationWarning:speech_recognition", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:homeassistant.components.hddtemp.sensor", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:ndms2_client.connection", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:plumlightpad.lightpad", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:pyws66i", - # -- unmaintained projects, last release about 2+ years # https://pypi.org/project/agent-py/ - v0.0.23 - 2020-06-04 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:agent.a", @@ -642,7 +624,7 @@ filterwarnings = [ # https://pypi.org/project/directv/ - v0.4.0 - 2020-09-12 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:directv.directv", "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:directv.models", - # https://pypi.org/project/foobot_async/ - v1.0.1 - 2024-08-16 + # https://pypi.org/project/foobot_async/ - v1.0.0 - 2020-11-24 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:foobot_async", # https://pypi.org/project/httpsig/ - v1.3.0 - 2018-11-28 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:httpsig", @@ -675,6 +657,10 @@ filterwarnings = [ "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*pyiss", # https://pypi.org/project/PyMetEireann/ - v2021.8.0 - 2021-08-16 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteireann", + # https://pypi.org/project/pyowm/ - v3.3.0 - 2022-02-14 + # https://github.com/csparpa/pyowm/issues/435 + # https://github.com/csparpa/pyowm/blob/3.3.0/pyowm/commons/cityidregistry.py#L7 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pyowm.commons.cityidregistry", # https://pypi.org/project/PyPasser/ - v0.0.5 - 2021-10-21 "ignore:invalid escape sequence:SyntaxWarning:.*pypasser.utils", # https://pypi.org/project/pyqwikswitch/ - v0.94 - 2019-08-19 @@ -684,37 +670,16 @@ filterwarnings = [ "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:rx.internal.constants", # https://pypi.org/project/rxv/ - v0.7.0 - 2021-10-10 "ignore:defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead:DeprecationWarning:rxv.ssdp", -] - -[tool.coverage.run] -source = ["homeassistant"] - -[tool.coverage.report] -exclude_lines = [ - # Have to re-enable the standard pragma - "pragma: no cover", - # Don't complain about missing debug-only code: - "def __repr__", - # Don't complain if tests don't hit defensive assertion code: - "raise AssertionError", - "raise NotImplementedError", - # TYPE_CHECKING and @overload blocks are never executed during pytest run - "if TYPE_CHECKING:", - "@overload", + # https://pypi.org/project/webrtcvad/ - v2.0.10 - 2017-01-08 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:webrtcvad", ] [tool.ruff] -required-version = ">=0.6.8" +required-version = ">=0.4.8" [tool.ruff.lint] select = [ "A001", # Variable {name} is shadowing a Python builtin - "ASYNC210", # Async functions should not call blocking HTTP methods - "ASYNC220", # Async functions should not create subprocesses with blocking methods - "ASYNC221", # Async functions should not run processes with blocking methods - "ASYNC222", # Async functions should not wait on processes with blocking methods - "ASYNC230", # Async functions should not open files with blocking methods like open - "ASYNC251", # Async functions should not call time.sleep "B002", # Python does not support the unary prefix increment "B005", # Using .strip() with multi-character strings is misleading "B007", # Loop control variable {name} not used within loop body @@ -735,9 +700,7 @@ select = [ "DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) "E", # pycodestyle "F", # pyflakes/autoflake - "F541", # f-string without any placeholders "FLY", # flynt - "FURB", # refurb "G", # flake8-logging-format "I", # isort "INP", # flake8-no-pep420 @@ -752,7 +715,6 @@ select = [ "PIE", # flake8-pie "PL", # pylint "PT", # flake8-pytest-style - "PTH", # flake8-pathlib "PYI", # flake8-pyi "RET", # flake8-return "RSE", # flake8-raise @@ -760,7 +722,6 @@ select = [ "RUF006", # Store a reference to the return value of asyncio.create_task "RUF010", # Use explicit conversion flag "RUF013", # PEP 484 prohibits implicit Optional - "RUF017", # Avoid quadratic list summation "RUF018", # Avoid assignment expressions in assert statements "RUF019", # Unnecessary key check before dictionary access # "RUF100", # Unused `noqa` directive; temporarily every now and then to clean them up @@ -787,12 +748,9 @@ select = [ "SLOT", # flake8-slots "T100", # Trace found: {name} used "T20", # flake8-print - "TCH", # flake8-type-checking - "TID", # Tidy imports + "TID251", # Banned imports "TRY", # tryceratops "UP", # pyupgrade - "UP031", # Use format specifiers instead of percent format - "UP032", # Use f-string instead of `format` call "W", # pycodestyle ] @@ -822,12 +780,6 @@ ignore = [ "SIM103", # Return the condition {condition} directly "SIM108", # Use ternary operator {contents} instead of if-else-block "SIM115", # Use context handler for opening files - - # Moving imports into type-checking blocks can mess with pytest.patch() - "TCH001", # Move application import {} into a type-checking block - "TCH002", # Move third-party import {} into a type-checking block - "TCH003", # Move standard library import {} into a type-checking block - "TRY003", # Avoid specifying long messages outside the exception class "TRY400", # Use `logging.exception` instead of `logging.error` # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 @@ -846,7 +798,15 @@ ignore = [ "ISC001", # Disabled because ruff does not understand type of __all__ generated by a function - "PLE0605" + "PLE0605", + + # temporarily disabled + "PT019", + "PYI024", # Use typing.NamedTuple instead of collections.namedtuple + "RET503", + "RET501", + "TRY002", + "TRY301" ] [tool.ruff.lint.flake8-import-conventions.extend-aliases] @@ -872,6 +832,7 @@ voluptuous = "vol" "homeassistant.components.lawn_mower.PLATFORM_SCHEMA" = "LAWN_MOWER_PLATFORM_SCHEMA" "homeassistant.components.light.PLATFORM_SCHEMA" = "LIGHT_PLATFORM_SCHEMA" "homeassistant.components.lock.PLATFORM_SCHEMA" = "LOCK_PLATFORM_SCHEMA" +"homeassistant.components.mailbox.PLATFORM_SCHEMA" = "MAILBOX_PLATFORM_SCHEMA" "homeassistant.components.media_player.PLATFORM_SCHEMA" = "MEDIA_PLAYER_PLATFORM_SCHEMA" "homeassistant.components.notify.PLATFORM_SCHEMA" = "NOTIFY_PLATFORM_SCHEMA" "homeassistant.components.number.PLATFORM_SCHEMA" = "NUMBER_PLATFORM_SCHEMA" @@ -892,7 +853,6 @@ voluptuous = "vol" "homeassistant.components.wake_word.PLATFORM_SCHEMA" = "WAKE_WORD_PLATFORM_SCHEMA" "homeassistant.components.water_heater.PLATFORM_SCHEMA" = "WATER_HEATER_PLATFORM_SCHEMA" "homeassistant.components.weather.PLATFORM_SCHEMA" = "WEATHER_PLATFORM_SCHEMA" -"homeassistant.core.DOMAIN" = "HOMEASSISTANT_DOMAIN" "homeassistant.helpers.area_registry" = "ar" "homeassistant.helpers.category_registry" = "cr" "homeassistant.helpers.config_validation" = "cv" @@ -926,17 +886,5 @@ split-on-trailing-comma = false "homeassistant/scripts/*" = ["T201"] "script/*" = ["T20"] -# Allow relative imports within auth and within components -"homeassistant/auth/*/*" = ["TID252"] -"homeassistant/components/*/*/*" = ["TID252"] -"tests/components/*/*/*" = ["TID252"] - -# Temporary -"homeassistant/**" = ["PTH"] -"tests/**" = ["PTH"] - [tool.ruff.lint.mccabe] max-complexity = 25 - -[tool.ruff.lint.pydocstyle] -property-decorators = ["propcache.cached_property"] diff --git a/requirements.txt b/requirements.txt index b97c8dc57a0..f41fca19ecc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,48 +4,42 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.1 -aiohttp==3.11.0 +aiohttp==3.9.5 aiohttp_cors==0.7.0 +aiohttp-fast-url-dispatcher==0.3.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 astral==2.2 -async-interrupt==1.2.0 -attrs==24.2.0 +async-interrupt==1.1.2 +attrs==23.2.0 atomicwrites-homeassistant==1.4.1 -audioop-lts==0.2.1;python_version>='3.13' awesomeversion==24.6.0 -bcrypt==4.2.0 +bcrypt==4.1.2 certifi>=2021.5.30 ciso8601==2.3.1 -fnv-hash-fast==1.0.2 -hass-nabucasa==0.84.0 -httpx==0.27.2 -home-assistant-bluetooth==1.13.0 +fnv-hash-fast==0.5.0 +hass-nabucasa==0.81.1 +httpx==0.27.0 +home-assistant-bluetooth==1.12.2 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 -PyJWT==2.9.0 -cryptography==43.0.1 -Pillow==11.0.0 -propcache==0.2.0 -pyOpenSSL==24.2.1 -orjson==3.10.11 +PyJWT==2.8.0 +cryptography==42.0.8 +Pillow==10.3.0 +pyOpenSSL==24.1.0 +orjson==3.9.15 packaging>=23.1 +pip>=21.3.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 -PyYAML==6.0.2 +PyYAML==6.0.1 requests==2.32.3 -securetar==2024.2.1 SQLAlchemy==2.0.31 -standard-aifc==3.13.0;python_version>='3.13' -standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 -ulid-transform==1.0.2 +ulid-transform==0.9.0 urllib3>=1.26.5,<2 -uv==0.5.0 -voluptuous==0.15.2 +voluptuous==0.13.1 voluptuous-serialize==2.6.0 -voluptuous-openapi==0.0.5 -yarl==1.17.1 -webrtc-models==0.2.0 +voluptuous-openapi==0.0.4 +yarl==1.9.4 diff --git a/requirements_all.txt b/requirements_all.txt index 65ef5f1ebf2..5ed983e4a17 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -4,7 +4,7 @@ -r requirements.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.4 +AEMET-OpenData==0.5.2 # homeassistant.components.honeywell AIOSomecomfort==0.0.25 @@ -12,14 +12,17 @@ AIOSomecomfort==0.0.25 # homeassistant.components.adax Adax-local==0.1.5 +# homeassistant.components.blinksticklight +BlinkStick==1.2.0 + # homeassistant.components.doorbird -DoorBirdPy==3.0.8 +DoorBirdPy==2.1.0 # homeassistant.components.homekit -HAP-python==4.9.2 +HAP-python==4.9.1 # homeassistant.components.tasmota -HATasmota==0.9.2 +HATasmota==0.9.1 # homeassistant.components.mastodon Mastodon.py==1.8.1 @@ -33,10 +36,10 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==11.0.0 +Pillow==10.3.0 # homeassistant.components.plex -PlexAPI==4.15.16 +PlexAPI==4.15.13 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 @@ -45,26 +48,29 @@ ProgettiHWSW==0.1.3 # PyBluez==0.22 # homeassistant.components.cast -PyChromecast==14.0.5 +PyChromecast==14.0.1 # homeassistant.components.flick_electric PyFlick==0.0.2 # homeassistant.components.flume -PyFlume==0.6.5 +PyFlume==0.8.7 # homeassistant.components.fronius PyFronius==0.7.3 # homeassistant.components.pyload -PyLoadAPI==1.3.2 +PyLoadAPI==1.2.0 + +# homeassistant.components.mvglive +PyMVGLive==1.1.4 # homeassistant.components.met_eireann PyMetEireann==2021.8.0 # homeassistant.components.met # homeassistant.components.norway_air -PyMetno==0.13.0 +PyMetno==0.12.0 # homeassistant.components.keymitt_ble PyMicroBot==0.0.17 @@ -84,7 +90,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.51.0 +PySwitchbot==0.48.0 # homeassistant.components.switchmate PySwitchmate==0.5.1 @@ -97,10 +103,10 @@ PyTransportNSW==0.1.1 # homeassistant.components.camera # homeassistant.components.stream -PyTurboJPEG==1.7.5 +PyTurboJPEG==1.7.1 # homeassistant.components.vicare -PyViCare==2.35.0 +PyViCare==2.32.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -109,7 +115,7 @@ PyXiaomiGateway==0.14.3 RachioPy==1.1.0 # homeassistant.components.python_script -RestrictedPython==7.4 +RestrictedPython==7.0 # homeassistant.components.remember_the_milk RtmAPI==0.7.2 @@ -152,7 +158,7 @@ advantage-air==0.4.4 afsapi==0.2.7 # homeassistant.components.agent_dvr -agent-py==0.0.24 +agent-py==0.0.23 # homeassistant.components.geo_json_events aio-geojson-generic-client==0.4 @@ -170,50 +176,47 @@ aio-geojson-nsw-rfs-incidents==0.7 aio-geojson-usgs-earthquakes==0.3 # homeassistant.components.gdacs -aio-georss-gdacs==0.10 - -# homeassistant.components.acaia -aioacaia==0.1.6 +aio-georss-gdacs==0.9 # homeassistant.components.airq aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.10 +aioairzone-cloud==0.5.3 # homeassistant.components.airzone -aioairzone==0.9.6 +aioairzone==0.7.7 # homeassistant.components.ambient_network # homeassistant.components.ambient_station -aioambient==2024.08.0 +aioambient==2024.01.0 # homeassistant.components.apcupsd aioapcaccess==0.4.2 # homeassistant.components.aquacell -aioaquacell==0.2.0 +aioaquacell==0.1.7 # homeassistant.components.aseko_pool_live -aioaseko==1.0.0 +aioaseko==0.1.1 # homeassistant.components.asuswrt aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.10.3 +aioautomower==2024.6.1 # homeassistant.components.azure_devops -aioazuredevops==2.2.1 +aioazuredevops==2.1.1 # homeassistant.components.baf aiobafi6==0.9.0 # homeassistant.components.aws -aiobotocore==2.13.1 +aiobotocore==2.13.0 # homeassistant.components.comelit -aiocomelit==0.9.1 +aiocomelit==0.9.0 # homeassistant.components.dhcp aiodhcpwatcher==1.0.2 @@ -224,9 +227,6 @@ aiodiscover==2.1.0 # homeassistant.components.dnsip aiodns==3.2.0 -# homeassistant.components.duke_energy -aiodukeenergy==0.2.2 - # homeassistant.components.eafm aioeafm==0.1.2 @@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.1 +aioesphomeapi==24.6.1 # homeassistant.components.flo aioflo==2021.11.0 @@ -252,8 +252,7 @@ aioflo==2021.11.0 aioftp==0.21.3 # homeassistant.components.github -# homeassistant.components.iron_os -aiogithubapi==24.6.0 +aiogithubapi==23.11.0 # homeassistant.components.guardian aioguardian==2022.07.0 @@ -261,14 +260,11 @@ aioguardian==2022.07.0 # homeassistant.components.harmony aioharmony==0.2.10 -# homeassistant.components.hassio -aiohasupervisor==0.2.1 - # homeassistant.components.homekit_controller -aiohomekit==3.2.6 +aiohomekit==3.1.5 # homeassistant.components.hue -aiohue==4.7.3 +aiohue==4.7.1 # homeassistant.components.imap aioimaplib==1.1.0 @@ -283,10 +279,10 @@ aiokef==0.2.16 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.5.5 +aiolifx-themes==0.4.15 # homeassistant.components.lifx -aiolifx==1.1.1 +aiolifx==1.0.2 # homeassistant.components.livisi aiolivisi==0.0.19 @@ -295,10 +291,10 @@ aiolivisi==0.0.19 aiolookin==1.0.0 # homeassistant.components.lyric -aiolyric==2.0.1 +aiolyric==1.1.0 # homeassistant.components.mealie -aiomealie==0.9.3 +aiomealie==0.4.0 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -319,16 +315,16 @@ aionut==4.3.3 aiooncue==0.3.7 # homeassistant.components.openexchangerates -aioopenexchangerates==0.6.8 +aioopenexchangerates==0.4.0 # homeassistant.components.nmap_tracker -aiooui==0.1.7 +aiooui==0.1.6 # homeassistant.components.pegel_online aiopegelonline==0.0.10 # homeassistant.components.acmeda -aiopulse==0.4.6 +aiopulse==0.4.4 # homeassistant.components.purpleair aiopurpleair==2022.12.1 @@ -345,10 +341,10 @@ aiopvpc==4.2.2 aiopyarr==23.4.0 # homeassistant.components.qnap_qsw -aioqsw==0.4.1 +aioqsw==0.3.5 # homeassistant.components.rainforest_raven -aioraven==0.7.0 +aioraven==0.6.0 # homeassistant.components.recollect_waste aiorecollect==2023.09.0 @@ -357,10 +353,7 @@ aiorecollect==2023.09.0 aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed -aioruckus==0.42 - -# homeassistant.components.russound_rio -aiorussound==4.1.0 +aioruckus==0.34 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -369,7 +362,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==12.0.1 +aioshelly==10.0.1 # homeassistant.components.skybell aioskybell==22.7.0 @@ -383,32 +376,26 @@ aiosolaredge==0.2.0 # homeassistant.components.steamist aiosteamist==1.0.0 -# homeassistant.components.cambridge_audio -aiostreammagic==2.8.5 - # homeassistant.components.switcher_kis -aioswitcher==4.4.0 +aioswitcher==3.4.3 # homeassistant.components.syncthing aiosyncthing==0.5.1 # homeassistant.components.tankerkoenig -aiotankerkoenig==0.4.2 - -# homeassistant.components.tedee -aiotedee==0.2.20 +aiotankerkoenig==0.4.1 # homeassistant.components.tractive -aiotractive==0.6.0 +aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==80 +aiounifi==79 # homeassistant.components.vlc_telnet -aiovlc==0.5.1 +aiovlc==0.3.2 # homeassistant.components.vodafone_station -aiovodafone==0.6.1 +aiovodafone==0.6.0 # homeassistant.components.waqi aiowaqi==3.1.0 @@ -417,22 +404,22 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webostv -aiowebostv==0.4.2 +aiowebostv==0.4.0 # homeassistant.components.withings -aiowithings==3.1.3 +aiowithings==3.0.1 # homeassistant.components.yandex_transport -aioymaps==1.2.5 +aioymaps==1.2.2 # homeassistant.components.airgradient -airgradient==0.9.1 +airgradient==0.6.0 # homeassistant.components.airly airly==1.1.0 # homeassistant.components.airthings_ble -airthings-ble==0.9.2 +airthings-ble==0.9.0 # homeassistant.components.airthings airthings-cloud==0.2.0 @@ -441,13 +428,13 @@ airthings-cloud==0.2.0 airtouch4pyapi==1.0.5 # homeassistant.components.airtouch5 -airtouch5py==0.2.10 +airtouch5py==0.2.8 # homeassistant.components.alpha_vantage alpha-vantage==2.3.1 # homeassistant.components.amberelectric -amberelectric==1.1.1 +amberelectric==1.1.0 # homeassistant.components.amcrest amcrest==1.9.8 @@ -456,37 +443,34 @@ amcrest==1.9.8 androidtv[async]==0.0.73 # homeassistant.components.androidtv_remote -androidtvremote2==0.1.2 +androidtvremote2==0.1.1 # homeassistant.components.anel_pwrctrl anel-pwrctrl-homeassistant==0.0.1.dev2 # homeassistant.components.anova -anova-wifi==0.17.0 +anova-wifi==0.14.0 # homeassistant.components.anthemav anthemav==1.4.1 -# homeassistant.components.anthropic -anthropic==0.31.2 - # homeassistant.components.weatherkit -apple_weatherkit==1.1.3 +apple_weatherkit==1.1.2 # homeassistant.components.apprise -apprise==1.9.0 +apprise==1.8.0 # homeassistant.components.aprs aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==2.2.1 +apsystems-ez1==1.3.1 # homeassistant.components.aqualogic aqualogic==2.6 # homeassistant.components.aranet -aranet4==2.4.0 +aranet4==2.3.4 # homeassistant.components.arcam_fmj arcam-fmj==1.5.2 @@ -497,16 +481,19 @@ arris-tg2492lg==2.2.0 # homeassistant.components.ampio asmog==0.0.6 +# homeassistant.components.asterisk_mbox +asterisk_mbox==0.5.0 + # homeassistant.components.dlna_dmr # homeassistant.components.dlna_dms # homeassistant.components.samsungtv # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.41.0 +async-upnp-client==0.39.0 # homeassistant.components.arve -asyncarve==0.1.1 +asyncarve==0.0.9 # homeassistant.components.keyboard_remote asyncinotify==4.0.2 @@ -521,21 +508,11 @@ asyncsleepiq==1.5.2 # atenpdu==0.3.2 # homeassistant.components.aurora -auroranoaa==0.0.5 +auroranoaa==0.0.3 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 -# homeassistant.components.autarco -autarco==3.1.0 - -# homeassistant.components.husqvarna_automower_ble -automower-ble==0.2.0 - -# homeassistant.components.generic -# homeassistant.components.stream -av==13.1.0 - # homeassistant.components.avea # avea==1.5.1 @@ -543,19 +520,16 @@ av==13.1.0 # avion==0.10 # homeassistant.components.axis -axis==63 - -# homeassistant.components.fujitsu_fglair -ayla-iot-unofficial==1.4.3 +axis==61 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 # homeassistant.components.azure_data_explorer -azure-kusto-data[aio]==4.5.1 +azure-kusto-data[aio]==3.1.0 # homeassistant.components.azure_data_explorer -azure-kusto-ingest==4.5.1 +azure-kusto-ingest==3.1.0 # homeassistant.components.azure_service_bus azure-servicebus==7.10.0 @@ -581,24 +555,27 @@ beautifulsoup4==4.12.3 # homeassistant.components.beewi_smartclim # beewi-smartclim==0.0.10 +# homeassistant.components.zha +bellows==0.39.1 + # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.4 +bimmer-connected[china]==0.15.3 # homeassistant.components.bizkaibus bizkaibus==0.1.1 # homeassistant.components.eq3btsmart # homeassistant.components.esphome -bleak-esphome==1.1.0 +bleak-esphome==1.0.0 # homeassistant.components.bluetooth -bleak-retry-connector==3.6.0 +bleak-retry-connector==3.5.0 # homeassistant.components.bluetooth -bleak==0.22.3 +bleak==0.22.2 # homeassistant.components.blebox -blebox-uniapi==2.5.0 +blebox-uniapi==2.4.2 # homeassistant.components.blink blinkpy==0.23.0 @@ -617,7 +594,7 @@ bluemaestro-ble==0.2.3 # bluepy==1.3.0 # homeassistant.components.bluetooth -bluetooth-adapters==0.20.0 +bluetooth-adapters==0.19.3 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.2 @@ -626,7 +603,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.20.0 +bluetooth-data-tools==1.19.3 # homeassistant.components.bond bond-async==0.2.1 @@ -636,19 +613,16 @@ boschshcpy==0.2.91 # homeassistant.components.amazon_polly # homeassistant.components.route53 -boto3==1.34.131 - -# homeassistant.components.aws -botocore==1.34.131 +boto3==1.34.51 # homeassistant.components.bring -bring-api==0.9.1 +bring-api==0.7.1 # homeassistant.components.broadlink broadlink==0.19.0 # homeassistant.components.brother -brother==4.3.1 +brother==4.2.0 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 @@ -672,7 +646,7 @@ btsmarthub-devicelist==0.2.3 buienradar==1.0.6 # homeassistant.components.dhcp -cached-ipaddress==0.8.0 +cached_ipaddress==0.3.0 # homeassistant.components.caldav caldav==1.3.9 @@ -686,9 +660,6 @@ clearpasspy==1.0.2 # homeassistant.components.sinch clx-sdk-xms==1.0.0 -# homeassistant.components.coinbase -coinbase-advanced-py==1.2.2 - # homeassistant.components.coinbase coinbase==2.1.0 @@ -699,7 +670,7 @@ colorlog==6.8.2 colorthief==0.2.1 # homeassistant.components.concord232 -concord232==0.15.1 +concord232==0.15 # homeassistant.components.upc_connect connect-box==0.3.1 @@ -708,7 +679,7 @@ connect-box==0.3.1 construct==2.10.68 # homeassistant.components.utility_meter -cronsim==2.6 +croniter==2.0.2 # homeassistant.components.crownstone crownstone-cloud==1.4.11 @@ -726,10 +697,10 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.24.3 +dbus-fast==2.22.1 # homeassistant.components.debugpy -debugpy==1.8.6 +debugpy==1.8.1 # homeassistant.components.decora_wifi # decora-wifi==1.4 @@ -738,7 +709,7 @@ debugpy==1.8.6 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.4.1 +deebot-client==8.0.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -752,7 +723,7 @@ deluge-client==1.10.2 demetriek==0.4.0 # homeassistant.components.denonavr -denonavr==1.0.0 +denonavr==0.11.6 # homeassistant.components.devialet devialet==1.4.5 @@ -763,9 +734,6 @@ devolo-home-control-api==0.18.3 # homeassistant.components.devolo_home_network devolo-plc-api==1.4.1 -# homeassistant.components.chacon_dio -dio-chacon-wifi-api==1.2.1 - # homeassistant.components.directv directv==0.4.0 @@ -775,6 +743,9 @@ discogs-client==2.3.0 # homeassistant.components.steamist discovery30303==0.3.2 +# homeassistant.components.dovado +dovado==0.4.1 + # homeassistant.components.dremel_3d_printer dremel3dpy==2.1.1 @@ -782,7 +753,7 @@ dremel3dpy==2.1.1 dropmqttapi==1.0.3 # homeassistant.components.dsmr -dsmr-parser==1.4.2 +dsmr-parser==1.3.1 # homeassistant.components.dwd_weather_warnings dwdwfsapi==1.0.7 @@ -800,7 +771,7 @@ dynalite-panel==0.0.4 eagle100==0.1.1 # homeassistant.components.easyenergy -easyenergy==2.1.2 +easyenergy==2.1.1 # homeassistant.components.ebusd ebusdpy==0.0.17 @@ -811,9 +782,6 @@ ecoaliface==0.4.0 # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 -# homeassistant.components.elevenlabs -elevenlabs==1.6.1 - # homeassistant.components.elgato elgato==5.1.2 @@ -821,7 +789,7 @@ elgato==5.1.2 eliqonline==1.2.2 # homeassistant.components.elkm1 -elkm1-lib==2.2.10 +elkm1-lib==2.2.7 # homeassistant.components.elmax elmax-api==0.0.5 @@ -839,7 +807,7 @@ emulated-roku==0.3.0 energyflip-client==0.2.2 # homeassistant.components.energyzero -energyzero==2.1.1 +energyzero==2.1.0 # homeassistant.components.enocean enocean==0.50 @@ -848,10 +816,10 @@ enocean==0.50 enturclient==0.2.4 # homeassistant.components.environment_canada -env-canada==0.7.2 +env-canada==0.7.0 # homeassistant.components.season -ephem==4.1.6 +ephem==4.1.5 # homeassistant.components.epic_games_store epicstore-api==0.1.7 @@ -863,7 +831,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.4.1 +eq3btsmart==1.1.9 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -880,9 +848,6 @@ eufylife-ble-client==0.1.8 # homeassistant.components.evohome evohome-async==0.4.20 -# homeassistant.components.bryant_evolution -evolutionhttp==0.0.18 - # homeassistant.components.faa_delays faadelays==2023.9.1 @@ -918,14 +883,14 @@ fjaraskupan==2.3.0 flexit_bacnet==2.2.1 # homeassistant.components.flipr -flipr-api==1.6.1 +flipr-api==1.5.1 # homeassistant.components.flux_led flux-led==1.0.4 # homeassistant.components.homekit # homeassistant.components.recorder -fnv-hash-fast==1.0.2 +fnv-hash-fast==0.5.0 # homeassistant.components.foobot foobot_async==1.0.0 @@ -944,22 +909,22 @@ freesms==0.2.0 # homeassistant.components.fritz # homeassistant.components.fritzbox_callmonitor -fritzconnection[qr]==1.14.0 +fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.6.10 +fyta_cli==0.4.1 # homeassistant.components.google_translate gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.4 +gardena-bluetooth==1.4.2 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 # homeassistant.components.google -gcal-sync==6.2.0 +gcal-sync==6.0.4 # homeassistant.components.geniushub geniushub-client==0.7.1 @@ -977,7 +942,7 @@ georss-generic-client==0.8 georss-ign-sismologia-client==0.8 # homeassistant.components.qld_bushfire -georss-qld-bushfire-alert-client==0.8 +georss-qld-bushfire-alert-client==0.7 # homeassistant.components.dlna_dmr # homeassistant.components.kef @@ -987,7 +952,7 @@ georss-qld-bushfire-alert-client==0.8 getmac==0.9.4 # homeassistant.components.gios -gios==5.0.0 +gios==4.0.0 # homeassistant.components.gitter gitterpy==0.1.7 @@ -995,9 +960,6 @@ gitterpy==0.1.7 # homeassistant.components.glances glances-api==0.8.0 -# homeassistant.components.go2rtc -go2rtc-client==0.1.1 - # homeassistant.components.goalzero goalzero==0.2.2 @@ -1009,37 +971,31 @@ goodwe==0.3.6 google-api-python-client==2.71.0 # homeassistant.components.google_pubsub -google-cloud-pubsub==2.23.0 +google-cloud-pubsub==2.13.11 # homeassistant.components.google_cloud -google-cloud-speech==2.27.0 - -# homeassistant.components.google_cloud -google-cloud-texttospeech==2.17.2 +google-cloud-texttospeech==2.12.3 # homeassistant.components.google_generative_ai_conversation -google-generativeai==0.8.2 +google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==6.1.5 - -# homeassistant.components.google_photos -google-photos-library-api==0.12.1 +google-nest-sdm==4.0.5 # homeassistant.components.google_travel_time googlemaps==2.5.1 # homeassistant.components.slide -goslide-api==0.7.0 +goslide-api==0.5.1 # homeassistant.components.tailwind -gotailwind==0.2.4 +gotailwind==0.2.3 # homeassistant.components.govee_ble -govee-ble==0.40.0 +govee-ble==0.31.3 # homeassistant.components.govee_light_local -govee-local-api==1.5.3 +govee-local-api==1.5.0 # homeassistant.components.remote_rpi_gpio gpiozero==1.6.2 @@ -1048,7 +1004,7 @@ gpiozero==1.6.2 gps3==0.33.3 # homeassistant.components.gree -greeclimate==2.1.0 +greeclimate==1.4.1 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 @@ -1057,7 +1013,7 @@ greeneye_monitor==3.0.3 greenwavereality==0.5.1 # homeassistant.components.pure_energie -gridnet==5.0.1 +gridnet==5.0.0 # homeassistant.components.growatt_server growattServer==1.5.0 @@ -1069,13 +1025,17 @@ gspread==5.5.0 gstreamer-player==1.1.2 # homeassistant.components.profiler -guppy3==3.1.4.post1;python_version<'3.13' +guppy3==3.1.4.post1 # homeassistant.components.iaqualink h2==4.1.0 +# homeassistant.components.generic +# homeassistant.components.stream +ha-av==10.1.1 + # homeassistant.components.ffmpeg -ha-ffmpeg==3.2.2 +ha-ffmpeg==3.2.0 # homeassistant.components.iotawatt ha-iotawattpy==0.1.2 @@ -1084,31 +1044,31 @@ ha-iotawattpy==0.1.2 ha-philipsjs==3.2.2 # homeassistant.components.habitica -habitipy==0.3.3 +habitipy==0.3.1 # homeassistant.components.bluetooth -habluetooth==3.6.0 +habluetooth==3.1.3 # homeassistant.components.cloud -hass-nabucasa==0.84.0 +hass-nabucasa==0.81.1 # homeassistant.components.splunk hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==2.0.1 +hassil==1.7.1 # homeassistant.components.jewish_calendar hdate==0.10.9 # homeassistant.components.heatmiser -heatmiserV3==2.0.3 +heatmiserV3==1.1.18 # homeassistant.components.here_travel_time -here-routing==1.0.1 +here-routing==0.2.0 # homeassistant.components.here_travel_time -here-transit==1.2.1 +here-transit==1.2.0 # homeassistant.components.hikvisioncam hikvision==0.4 @@ -1127,19 +1087,19 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.60 +holidays==0.51 # homeassistant.components.frontend -home-assistant-frontend==20241106.2 +home-assistant-frontend==20240626.2 # homeassistant.components.conversation -home-assistant-intents==2024.11.13 +home-assistant-intents==2024.6.26 # homeassistant.components.home_connect -homeconnect==0.8.0 +homeconnect==0.7.2 # homeassistant.components.homematicip_cloud -homematicip==1.1.2 +homematicip==1.1.1 # homeassistant.components.horizon horimote==0.4.1 @@ -1148,10 +1108,10 @@ horimote==0.4.1 httplib2==0.20.4 # homeassistant.components.huawei_lte -huawei-lte-api==1.10.0 +huawei-lte-api==1.7.3 # homeassistant.components.huum -huum==0.7.12 +huum==0.7.10 # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -1171,13 +1131,13 @@ ibmiotf==0.3.4 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==8.2.0 +ical==8.0.1 # homeassistant.components.ping icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.6.2 +idasen-ha==2.6.1 # homeassistant.components.network ifaddr==0.2.0 @@ -1189,10 +1149,10 @@ iglo==1.2.7 ihcsdk==2.8.5 # homeassistant.components.imgw_pib -imgw_pib==1.0.6 +imgw_pib==1.0.5 # homeassistant.components.incomfort -incomfort-client==0.6.3-1 +incomfort-client==0.6.2 # homeassistant.components.influxdb influxdb-client==1.24.0 @@ -1201,36 +1161,29 @@ influxdb-client==1.24.0 influxdb==5.3.1 # homeassistant.components.inkbird -inkbird-ble==0.5.8 +inkbird-ble==0.5.6 # homeassistant.components.insteon insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire -intellifire4py==4.1.9 - -# homeassistant.components.iotty -iottycloud==0.2.1 +intellifire4py==2.2.2 # homeassistant.components.iperf3 iperf3==0.1.11 # homeassistant.components.isal -isal==1.7.1 +isal==1.6.1 # homeassistant.components.gogogate2 ismartgate==5.0.1 -# homeassistant.components.israel_rail -israel-rail-api==0.1.2 - # homeassistant.components.abode -jaraco.abode==6.2.1 +jaraco.abode==5.1.2 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 -# homeassistant.components.command_line # homeassistant.components.rest jsonpath==0.82.2 @@ -1250,28 +1203,25 @@ kegtron-ble==0.4.0 kiwiki-client==0.1.1 # homeassistant.components.knocki -knocki==0.3.5 +knocki==0.2.0 # homeassistant.components.knx -knx-frontend==2024.9.10.221729 +knx-frontend==2024.1.20.105944 # homeassistant.components.konnected konnected==1.2.0 # homeassistant.components.kraken -krakenex==2.2.2 +krakenex==2.1.0 # homeassistant.components.lacrosse_view -lacrosse-view==1.0.3 +lacrosse-view==1.0.1 # homeassistant.components.eufy lakeside==0.13 # homeassistant.components.laundrify -laundrify-aio==1.2.2 - -# homeassistant.components.lcn -lcn-frontend==0.2.2 +laundrify-aio==1.1.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1282,9 +1232,6 @@ leaone-ble==0.1.0 # homeassistant.components.led_ble led-ble==1.0.2 -# homeassistant.components.lektrico -lektricowifi==0.0.43 - # homeassistant.components.foscam libpyfoscam==1.2.2 @@ -1312,9 +1259,15 @@ linear-garage-door==0.2.9 # homeassistant.components.linode linode-api==4.1.9b1 +# homeassistant.components.lamarzocco +lmcloud==1.1.13 + # homeassistant.components.google_maps locationsharinglib==5.0.1 +# homeassistant.components.logi_circle +logi-circle==0.2.3 + # homeassistant.components.london_underground london-tube-status==0.5 @@ -1331,10 +1284,10 @@ lupupy==0.3.2 lw12==0.9.2 # homeassistant.components.scrape -lxml==5.3.0 +lxml==5.1.0 # homeassistant.components.matrix -matrix-nio==0.25.2 +matrix-nio==0.24.0 # homeassistant.components.maxcube maxcube-api==0.4.3 @@ -1358,13 +1311,13 @@ melnor-bluetooth==0.0.25 messagebird==1.2.0 # homeassistant.components.meteoalarm -meteoalertapi==0.3.1 +meteoalertapi==0.3.0 # homeassistant.components.meteo_france meteofrance-api==1.3.0 # homeassistant.components.mfi -mficlient==0.5.0 +mficlient==0.3.0 # homeassistant.components.xiaomi_miio micloud==0.5 @@ -1376,7 +1329,7 @@ microBeesPy==0.3.2 mill-local==0.3.0 # homeassistant.components.mill -millheater==0.12.2 +millheater==0.11.8 # homeassistant.components.minio minio==7.1.12 @@ -1388,29 +1341,26 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.4.2 +monzopy==1.3.0 # homeassistant.components.mopeka -mopeka-iot-ble==0.8.0 +mopeka-iot-ble==0.7.0 # homeassistant.components.motion_blinds -motionblinds==0.6.25 +motionblinds==0.6.23 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.2 +motionblindsble==0.1.0 # homeassistant.components.motioneye motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==4.1.1.116.0 +mozart-api==3.4.1.8.6 # homeassistant.components.mullvad mullvad-api==1.0.0 -# homeassistant.components.music_assistant -music-assistant-client==1.0.5 - # homeassistant.components.tts mutagen==1.47.0 @@ -1430,7 +1380,7 @@ nad-receiver==0.3.0 ndms2-client==0.1.2 # homeassistant.components.ness_alarm -nessclient==1.1.2 +nessclient==1.0.0 # homeassistant.components.netdata netdata==1.1.0 @@ -1439,7 +1389,7 @@ netdata==1.1.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==3.3.0 +nettigo-air-monitor==3.2.0 # homeassistant.components.neurio_energy neurio==0.3.1 @@ -1448,19 +1398,16 @@ neurio==0.3.1 nexia==2.0.8 # homeassistant.components.nextcloud -nextcloudmonitor==1.5.1 +nextcloudmonitor==1.5.0 # homeassistant.components.discord nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.3.0 +nextdns==3.0.0 # homeassistant.components.nibe_heatpump -nibe==2.11.0 - -# homeassistant.components.nice_go -nice-go==0.3.10 +nibe==2.8.0 # homeassistant.components.niko_home_control niko-home-control==0.2.1 @@ -1494,10 +1441,7 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==2.1.3 - -# homeassistant.components.nyt_games -nyt_games==0.4.4 +numpy==1.26.0 # homeassistant.components.oasa_telematics oasatelematics==0.3 @@ -1509,13 +1453,13 @@ oauth2client==4.1.3 objgraph==3.5.0 # homeassistant.components.garages_amsterdam -odp-amsterdam==6.0.2 +odp-amsterdam==6.0.1 # homeassistant.components.oem oemthermostat==1.1.1 # homeassistant.components.ollama -ollama==0.3.3 +ollama-hass==0.1.7 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1533,7 +1477,7 @@ open-garage==0.2.0 open-meteo==0.3.1 # homeassistant.components.openai_conversation -openai==1.35.7 +openai==1.3.8 # homeassistant.components.openerz openerz-api==0.3.0 @@ -1548,7 +1492,7 @@ openhomedevice==2.2.0 opensensemap-api==0.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.7 +openwebifpy==4.2.4 # homeassistant.components.luci openwrt-luci-rpc==1.1.17 @@ -1557,7 +1501,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.8.6 +opower==0.4.7 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1575,7 +1519,7 @@ ourgroceries==1.5.4 ovoenergy==2.0.0 # homeassistant.components.p1_monitor -p1monitor==3.1.0 +p1monitor==3.0.0 # homeassistant.components.mqtt paho-mqtt==1.6.1 @@ -1584,7 +1528,7 @@ paho-mqtt==1.6.1 panacotta==0.2 # homeassistant.components.panasonic_viera -panasonic-viera==0.4.2 +panasonic-viera==0.3.6 # homeassistant.components.dunehd pdunehd==1.3.2 @@ -1613,7 +1557,7 @@ pigpio==1.78 pilight==0.1.1 # homeassistant.components.dominos -pizzapi==0.0.6 +pizzapi==0.0.3 # homeassistant.components.plex plexauth==0.0.6 @@ -1622,7 +1566,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.5.0 +plugwise==0.38.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1643,7 +1587,7 @@ prayer-times-calculator-offline==1.0.3 proliphix==0.4.1 # homeassistant.components.prometheus -prometheus-client==0.21.0 +prometheus-client==0.17.1 # homeassistant.components.proxmoxve proxmoxer==2.0.1 @@ -1654,7 +1598,7 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.1.0 +psutil==5.9.8 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 @@ -1672,7 +1616,7 @@ pushover_complete==1.1.1 pvo==2.1.1 # homeassistant.components.aosmith -py-aosmith==1.0.10 +py-aosmith==1.0.8 # homeassistant.components.canary py-canary==0.5.4 @@ -1689,14 +1633,11 @@ py-dormakaba-dkey==1.0.5 # homeassistant.components.improv_ble py-improv-ble-client==1.0.3 -# homeassistant.components.madvr -py-madvr2==1.6.32 - # homeassistant.components.melissa py-melissa-climate==2.1.4 # homeassistant.components.nextbus -py-nextbusnext==2.0.5 +py-nextbusnext==1.0.2 # homeassistant.components.nightscout py-nightscout==1.2.2 @@ -1708,11 +1649,14 @@ py-schluter==0.1.7 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.5.3 +py-synologydsm-api==2.4.4 # homeassistant.components.zabbix py-zabbix==1.1.7 +# homeassistant.components.seventeentrack +py17track==2021.12.2 + # homeassistant.components.atome pyAtome==0.1.1 @@ -1720,16 +1664,16 @@ pyAtome==0.1.1 pyCEC==0.5.2 # homeassistant.components.control4 -pyControl4==1.2.0 +pyControl4==1.1.0 # homeassistant.components.duotecno -pyDuotecno==2024.10.1 +pyDuotecno==2024.5.1 # homeassistant.components.electrasmart -pyElectra==1.2.4 +pyElectra==1.2.3 # homeassistant.components.emby -pyEmby==1.10 +pyEmby==1.9 # homeassistant.components.hikvision pyHik==0.3.2 @@ -1741,7 +1685,7 @@ pyRFXtrx==0.31.1 pySDCP==1 # homeassistant.components.tibber -pyTibber==0.30.8 +pyTibber==0.28.2 # homeassistant.components.dlink pyW215==0.7.0 @@ -1766,7 +1710,7 @@ pyairnow==1.2.1 pyairvisual==2023.08.1 # homeassistant.components.aprilaire -pyaprilaire==0.7.4 +pyaprilaire==0.7.0 # homeassistant.components.asuswrt pyasuswrt==0.1.21 @@ -1775,10 +1719,10 @@ pyasuswrt==0.1.21 pyatag==0.3.5.3 # homeassistant.components.netatmo -pyatmo==8.1.0 +pyatmo==8.0.3 # homeassistant.components.apple_tv -pyatv==0.15.1 +pyatv==0.14.3 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 @@ -1792,14 +1736,11 @@ pybbox==0.0.5-alpha # homeassistant.components.blackbird pyblackbird==0.6 -# homeassistant.components.bluesound -pyblu==1.0.4 - # homeassistant.components.neato pybotvac==0.0.25 # homeassistant.components.braviatv -pybravia==0.3.4 +pybravia==0.3.3 # homeassistant.components.nissan_leaf pycarwings2==2.14 @@ -1823,10 +1764,7 @@ pycmus==0.1.1 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.2.2 - -# homeassistant.components.radio_browser -pycountry==24.6.1 +pycoolmasternet-async==0.1.5 # homeassistant.components.microsoft pycsspeechtts==1.0.8 @@ -1835,16 +1773,13 @@ pycsspeechtts==1.0.8 # pycups==1.9.73 # homeassistant.components.daikin -pydaikin==2.13.7 +pydaikin==2.11.1 # homeassistant.components.danfoss_air pydanfossair==0.1.0 -# homeassistant.components.deako -pydeako==0.5.4 - # homeassistant.components.deconz -pydeconz==118 +pydeconz==116 # homeassistant.components.delijn pydelijn==1.1.0 @@ -1853,13 +1788,13 @@ pydelijn==1.1.0 pydexcom==0.2.3 # homeassistant.components.discovergy -pydiscovergy==3.0.2 +pydiscovergy==3.0.1 # homeassistant.components.doods pydoods==1.0.2 # homeassistant.components.hydrawise -pydrawise==2024.9.0 +pydrawise==2024.6.4 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 @@ -1871,7 +1806,7 @@ pyebox==1.1.4 pyecoforest==0.4.0 # homeassistant.components.econet -pyeconet==0.1.23 +pyeconet==0.1.22 # homeassistant.components.ista_ecotrend pyecotrend-ista==3.3.1 @@ -1889,10 +1824,10 @@ pyegps==0.2.5 pyeiscp==0.0.7 # homeassistant.components.emoncms -pyemoncms==0.1.1 +pyemoncms==0.0.7 # homeassistant.components.enphase_envoy -pyenphase==1.22.0 +pyenphase==1.20.3 # homeassistant.components.envisalink pyenvisalink==4.7 @@ -1910,7 +1845,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.8.0 +pyfibaro==0.7.8 # homeassistant.components.fido pyfido==2.1.2 @@ -1919,7 +1854,7 @@ pyfido==2.1.2 pyfireservicerota==0.0.43 # homeassistant.components.flic -pyflic==2.0.4 +pyflic==2.0.3 # homeassistant.components.futurenow pyfnip==0.2 @@ -1931,7 +1866,7 @@ pyforked-daapd==0.1.14 pyfreedompro==1.1.0 # homeassistant.components.fritzbox -pyfritzhome==0.6.12 +pyfritzhome==0.6.11 # homeassistant.components.ifttt pyfttt==0.3 @@ -1958,7 +1893,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==1.1.2 +pyhomeworks==0.0.6 # homeassistant.components.ialarm pyialarm==2.2.0 @@ -1976,7 +1911,7 @@ pyintesishome==1.8.0 pyipma==3.0.7 # homeassistant.components.ipp -pyipp==0.17.0 +pyipp==0.16.0 # homeassistant.components.iqvia pyiqvia==2022.04.0 @@ -1984,9 +1919,6 @@ pyiqvia==2022.04.0 # homeassistant.components.irish_rail_transport pyirishrail==0.0.2 -# homeassistant.components.iskra -pyiskra==0.1.14 - # homeassistant.components.iss pyiss==1.0.1 @@ -1997,7 +1929,7 @@ pyisy==3.1.14 pyitachip2ir==0.0.7 # homeassistant.components.jvc_projector -pyjvcprojector==1.1.2 +pyjvcprojector==1.0.11 # homeassistant.components.kaleidescape pykaleidescape==1.0.1 @@ -2026,9 +1958,6 @@ pykwb==0.0.8 # homeassistant.components.lacrosse pylacrosse==0.4 -# homeassistant.components.lamarzocco -pylamarzocco==1.2.3 - # homeassistant.components.lastfm pylast==5.1.0 @@ -2042,16 +1971,16 @@ pylgnetcast==0.3.9 pylibrespot-java==0.1.1 # homeassistant.components.litejet -pylitejet==0.6.3 +pylitejet==0.6.2 # homeassistant.components.litterrobot pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.21.1 +pylutron-caseta==0.20.0 # homeassistant.components.lutron -pylutron==0.2.16 +pylutron==0.2.13 # homeassistant.components.mailgun pymailgunner==1.4 @@ -2068,9 +1997,6 @@ pymelcloud==2.5.9 # homeassistant.components.meteoclimatic pymeteoclimatic==0.1.0 -# homeassistant.components.assist_pipeline -pymicro-vad==1.0.1 - # homeassistant.components.xiaomi_tv pymitv==1.4.3 @@ -2078,7 +2004,7 @@ pymitv==1.4.3 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.9 +pymodbus==3.6.8 # homeassistant.components.monoprice pymonoprice==0.4 @@ -2089,9 +2015,6 @@ pymsteams==0.1.12 # homeassistant.components.mysensors pymysensors==0.24.0 -# homeassistant.components.iron_os -pynecil==0.2.1 - # homeassistant.components.netgear pynetgear==0.10.10 @@ -2101,9 +2024,6 @@ pynetio==0.1.9.1 # homeassistant.components.nobo_hub pynobo==1.8.1 -# homeassistant.components.nordpool -pynordpool==0.2.2 - # homeassistant.components.nuki pynuki==1.6.3 @@ -2111,7 +2031,7 @@ pynuki==1.6.3 pynws[retry]==1.8.2 # homeassistant.components.nx584 -pynx584==0.8.2 +pynx584==0.5 # homeassistant.components.nzbget pynzbgetapi==0.2.0 @@ -2129,7 +2049,7 @@ pyombi==0.1.10 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.2.1 +pyopenweathermap==0.0.9 # homeassistant.components.opnsense pyopnsense==0.4.0 @@ -2141,7 +2061,7 @@ pyoppleio-legacy==1.0.8 pyosoenergyapi==1.1.4 # homeassistant.components.opentherm_gw -pyotgw==2.2.2 +pyotgw==2.2.0 # homeassistant.auth.mfa_modules.notify # homeassistant.auth.mfa_modules.totp @@ -2149,19 +2069,16 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.14.1 +pyoverkiz==1.13.11 # homeassistant.components.onewire pyownet==0.10.0.post1 -# homeassistant.components.palazzetti -pypalazzetti==0.1.11 - # homeassistant.components.elv pypca==0.0.7 # homeassistant.components.lcn -pypck==0.7.24 +pypck==0.7.17 # homeassistant.components.pjlink pypjlink2==1.2.1 @@ -2170,7 +2087,7 @@ pypjlink2==1.2.1 pyplaato==0.0.18 # homeassistant.components.point -pypoint==3.0.0 +pypoint==2.3.2 # homeassistant.components.profiler pyprof2calltree==1.4.5 @@ -2221,26 +2138,25 @@ pysabnzbd==1.1.1 pysaj==0.0.16 # homeassistant.components.schlage -pyschlage==2024.8.0 +pyschlage==2024.6.0 # homeassistant.components.sensibo -pysensibo==1.1.0 +pysensibo==1.0.36 # homeassistant.components.serial -pyserial-asyncio-fast==0.13 +# homeassistant.components.zha +pyserial-asyncio-fast==0.11 # homeassistant.components.acer_projector # homeassistant.components.crownstone # homeassistant.components.usb +# homeassistant.components.zha # homeassistant.components.zwave_js pyserial==3.5 # homeassistant.components.sesame pysesame2==1.0.1 -# homeassistant.components.seventeentrack -pyseventeentrack==1.0.1 - # homeassistant.components.sia pysiaalarm==3.1.1 @@ -2262,17 +2178,11 @@ pysmartapp==0.3.5 # homeassistant.components.smartthings pysmartthings==0.7.8 -# homeassistant.components.smarty -pysmarty2==0.10.1 - # homeassistant.components.edl21 pysml==0.0.12 -# homeassistant.components.smlight -pysmlight==0.1.3 - # homeassistant.components.snmp -pysnmp==6.2.6 +pysnmp-lextudio==6.0.11 # homeassistant.components.snooz pysnooz==0.8.6 @@ -2283,29 +2193,32 @@ pysoma==0.0.12 # homeassistant.components.spc pyspcwebgw==0.7.0 -# homeassistant.components.assist_pipeline -pyspeex-noise==1.0.2 - # homeassistant.components.squeezebox -pysqueezebox==0.10.0 +pysqueezebox==0.7.1 # homeassistant.components.stiebel_eltron pystiebeleltron==0.0.1.dev2 # homeassistant.components.suez_water -pysuezV2==1.3.1 +pysuez==0.2.0 # homeassistant.components.switchbee -pyswitchbee==1.8.3 +pyswitchbee==1.8.0 # homeassistant.components.tautulli pytautulli==23.1.1 +# homeassistant.components.tedee +pytedee-async==0.2.17 + +# homeassistant.components.tfiac +pytfiac==0.4 + # homeassistant.components.thinkingcleaner pythinkingcleaner==0.0.3 # homeassistant.components.motionmount -python-MotionMount==2.2.0 +python-MotionMount==2.0.0 # homeassistant.components.awair python-awair==0.2.4 @@ -2314,7 +2227,7 @@ python-awair==0.2.4 python-blockchain-api==0.0.2 # homeassistant.components.bsblan -python-bsblan==1.2.1 +python-bsblan==0.5.18 # homeassistant.components.clementine python-clementine-remote==1.0.1 @@ -2323,7 +2236,7 @@ python-clementine-remote==1.0.1 python-digitalocean==1.13.2 # homeassistant.components.ecobee -python-ecobee-api==0.2.20 +python-ecobee-api==0.2.18 # homeassistant.components.etherscan python-etherscan-api==0.0.3 @@ -2344,10 +2257,10 @@ python-gc100==1.0.3a0 python-gitlab==1.6.0 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.8.0 +python-homeassistant-analytics==0.6.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.3.0 +python-homewizard-energy==v6.0.0 # homeassistant.components.hp_ilo python-hpilo==4.4.3 @@ -2362,16 +2275,13 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.7 - -# homeassistant.components.linkplay -python-linkplay==0.0.20 +python-kasa[speedups]==0.7.0.1 # homeassistant.components.lirc # python-lirc==1.2.3 # homeassistant.components.matter -python-matter-server==6.6.0 +python-matter-server==6.2.0b1 # homeassistant.components.xiaomi_miio python-miio==0.5.12 @@ -2383,10 +2293,10 @@ python-mpd2==3.1.1 python-mystrom==2.2.0 # homeassistant.components.swiss_public_transport -python-opendata-transport==0.5.0 +python-opendata-transport==0.4.0 # homeassistant.components.opensky -python-opensky==1.0.1 +python-opensky==1.0.0 # homeassistant.components.otbr # homeassistant.components.thread @@ -2402,7 +2312,7 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.7.2 +python-roborock==2.5.0 # homeassistant.components.smarttub python-smarttub==0.0.36 @@ -2411,13 +2321,13 @@ python-smarttub==0.0.36 python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.17.7 +python-tado==0.17.6 # homeassistant.components.technove -python-technove==1.3.1 +python-technove==1.2.2 # homeassistant.components.telegram_bot -python-telegram-bot[socks]==21.5 +python-telegram-bot[socks]==21.0.1 # homeassistant.components.vlc python-vlc==3.0.18122 @@ -2434,9 +2344,6 @@ pytomorrowio==0.3.6 # homeassistant.components.touchline pytouchline==0.7 -# homeassistant.components.touchline_sl -pytouchlinesl==0.1.8 - # homeassistant.components.traccar # homeassistant.components.traccar_server pytraccar==2.1.1 @@ -2448,10 +2355,10 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==1.0.0 +pytrafikverket==0.3.10 # homeassistant.components.v2c -pytrydan==0.8.0 +pytrydan==0.7.0 # homeassistant.components.usb pyudev==0.24.1 @@ -2463,13 +2370,13 @@ pyuptimerobot==22.2.0 # pyuserinput==0.1.11 # homeassistant.components.vera -pyvera==0.3.15 +pyvera==0.3.13 # homeassistant.components.versasense pyversasense==0.0.6 # homeassistant.components.vesync -pyvesync==2.1.12 +pyvesync==2.1.10 # homeassistant.components.vizio pyvizio==0.1.61 @@ -2498,14 +2405,11 @@ pywilight==0.0.74 # homeassistant.components.wiz pywizlight==0.5.14 -# homeassistant.components.wmspro -pywmspro==0.2.1 - # homeassistant.components.ws66i pyws66i==1.1 # homeassistant.components.xeoma -pyxeoma==1.4.2 +pyxeoma==1.4.1 # homeassistant.components.yardian pyyardian==1.1.1 @@ -2529,7 +2433,7 @@ qnapstats==0.4.0 quantum-gateway==0.0.8 # homeassistant.components.radio_browser -radios==0.3.2 +radios==0.3.1 # homeassistant.components.radiotherm radiotherm==2.1.0 @@ -2544,19 +2448,19 @@ rapt-ble==0.1.2 raspyrfm-client==1.2.8 # homeassistant.components.refoss -refoss-ha==1.2.4 +refoss-ha==1.2.1 # homeassistant.components.rainmachine regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.7 +renault-api==0.2.3 # homeassistant.components.renson renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.1 +reolink-aio==0.9.3 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2565,7 +2469,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell==0.9.12 +ring-doorbell[listen]==0.8.11 # homeassistant.components.fleetgo ritassist==0.9.2 @@ -2597,8 +2501,11 @@ rpi-bad-power==0.1.0 # homeassistant.components.rtsp_to_webrtc rtsp-to-webrtc==0.5.1 +# homeassistant.components.russound_rio +russound-rio==1.0.0 + # homeassistant.components.russound_rnet -russound==0.2.0 +russound==0.1.9 # homeassistant.components.ruuvitag_ble ruuvitag-ble==0.1.2 @@ -2632,7 +2539,7 @@ sendgrid==6.8.2 # homeassistant.components.emulated_kasa # homeassistant.components.sense -sense-energy==0.13.3 +sense-energy==0.12.2 # homeassistant.components.sensirion_ble sensirion-ble==0.1.1 @@ -2641,16 +2548,13 @@ sensirion-ble==0.1.1 sensorpro-ble==0.5.3 # homeassistant.components.sensorpush -sensorpush-ble==1.7.1 - -# homeassistant.components.sensoterra -sensoterra==2.0.1 +sensorpush-ble==1.6.2 # homeassistant.components.sentry sentry-sdk==1.40.3 # homeassistant.components.sfr_box -sfrbox-api==0.0.11 +sfrbox-api==0.0.8 # homeassistant.components.sharkiq sharkiq==1.0.2 @@ -2661,9 +2565,6 @@ sharp_aquos_rc==0.3.2 # homeassistant.components.shodan shodan==1.28.0 -# homeassistant.components.simplefin -simplefin4py==0.0.18 - # homeassistant.components.sighthound simplehound==0.3 @@ -2674,10 +2575,7 @@ simplepush==2.2.3 simplisafe-python==2024.01.0 # homeassistant.components.sisyphus -sisyphus-control==3.1.4 - -# homeassistant.components.sky_remote -skyboxremote==0.0.6 +sisyphus-control==3.1.3 # homeassistant.components.slack slackclient==2.5.0 @@ -2689,19 +2587,19 @@ slixmpp==1.8.5 smart-meter-texas==0.5.5 # homeassistant.components.smhi -smhi-pkg==1.0.18 +smhi-pkg==1.0.16 # homeassistant.components.snapcast snapcast==2.3.6 # homeassistant.components.sonos -soco==0.30.6 +soco==0.30.4 # homeassistant.components.solaredge_local solaredge-local==0.2.3 # homeassistant.components.solarlog -solarlog_cli==0.3.2 +solarlog_cli==0.1.5 # homeassistant.components.solax solax==3.1.1 @@ -2718,8 +2616,11 @@ speak2mary==1.4.0 # homeassistant.components.speedtestdotnet speedtest-cli==2.1.3 +# homeassistant.components.spider +spiderpy==1.6.1 + # homeassistant.components.spotify -spotifyaio==0.8.8 +spotipy==2.23.0 # homeassistant.components.sql sqlparse==0.5.0 @@ -2753,6 +2654,7 @@ streamlabswater==1.0.1 # homeassistant.components.huawei_lte # homeassistant.components.solaredge +# homeassistant.components.thermoworks_smoke # homeassistant.components.traccar stringcase==1.2.0 @@ -2760,7 +2662,7 @@ stringcase==1.2.0 subarulink==0.7.11 # homeassistant.components.sunweg -sunweg==3.0.2 +sunweg==3.0.1 # homeassistant.components.surepetcare surepy==0.9.0 @@ -2769,19 +2671,19 @@ surepy==0.9.0 swisshydrodata==0.1.0 # homeassistant.components.switchbot_cloud -switchbot-api==2.2.1 +switchbot-api==2.1.0 # homeassistant.components.synology_srm synology-srm==0.2.0 # homeassistant.components.system_bridge -systembridgeconnector==4.1.5 +systembridgeconnector==4.0.3 # homeassistant.components.system_bridge -systembridgemodels==4.2.4 +systembridgemodels==4.0.4 # homeassistant.components.tailscale -tailscale==0.6.1 +tailscale==0.6.0 # homeassistant.components.tank_utility tank-utility==1.5.0 @@ -2796,7 +2698,7 @@ tellcore-net==0.4 tellcore-py==1.1.2 # homeassistant.components.tellduslive -tellduslive==0.10.12 +tellduslive==0.10.11 # homeassistant.components.lg_soundbar temescal==0.5 @@ -2807,10 +2709,9 @@ temperusb==1.6.1 # homeassistant.components.tensorflow # tensorflow==2.5.0 -# homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.8.4 +tesla-fleet-api==0.6.1 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2818,27 +2719,24 @@ tesla-powerwall==0.5.2 # homeassistant.components.tesla_wall_connector tesla-wall-connector==1.0.2 -# homeassistant.components.teslemetry -teslemetry-stream==0.4.2 - # homeassistant.components.tessie -tessie-api==0.1.1 +tessie-api==0.0.9 # homeassistant.components.tensorflow # tf-models-official==2.5.0 # homeassistant.components.thermobeacon -thermobeacon-ble==0.7.0 +thermobeacon-ble==0.6.2 # homeassistant.components.thermopro thermopro-ble==0.10.0 +# homeassistant.components.thermoworks_smoke +thermoworks-smoke==0.1.8 + # homeassistant.components.thingspeak thingspeak==1.0.0 -# homeassistant.components.lg_thinq -thinqconnect==1.0.0 - # homeassistant.components.tikteck tikteck==0.4 @@ -2864,22 +2762,19 @@ total-connect-client==2024.5 tp-connected==0.0.4 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.3 +tplink-omada-client==1.3.12 # homeassistant.components.transmission transmission-rpc==7.0.3 -# homeassistant.components.triggercmd -triggercmd==0.0.27 - # homeassistant.components.twinkly -ttls==1.8.3 +ttls==1.5.1 # homeassistant.components.thethingsnetwork -ttn_client==1.2.0 +ttn_client==1.0.0 # homeassistant.components.tuya -tuya-device-sharing-sdk==0.2.1 +tuya-device-sharing-sdk==0.1.9 # homeassistant.components.twentemilieu twentemilieu==2.0.1 @@ -2888,22 +2783,19 @@ twentemilieu==2.0.1 twilio==6.32.0 # homeassistant.components.twitch -twitchAPI==4.2.1 - -# homeassistant.components.monarch_money -typedmonarchmoney==0.3.1 +twitchAPI==4.0.0 # homeassistant.components.ukraine_alarm uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.4.0 +uiprotect==4.2.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 # homeassistant.components.unifiprotect -unifi-discovery==1.2.0 +unifi-discovery==1.1.8 # homeassistant.components.unifi_direct unifi_ap==0.0.1 @@ -2912,13 +2804,13 @@ unifi_ap==0.0.1 unifiled==0.11 # homeassistant.components.zha -universal-silabs-flasher==0.0.24 +universal-silabs-flasher==0.0.20 # homeassistant.components.upb -upb-lib==0.5.8 +upb-lib==0.5.7 # homeassistant.components.upcloud -upcloud-api==2.6.0 +upcloud-api==2.5.1 # homeassistant.components.huawei_lte # homeassistant.components.syncthru @@ -2926,7 +2818,7 @@ upcloud-api==2.6.0 url-normalize==1.4.3 # homeassistant.components.uvc -uvcclient==0.12.1 +uvcclient==0.11.0 # homeassistant.components.roborock vacuum-map-parser-roborock==0.1.2 @@ -2935,10 +2827,10 @@ vacuum-map-parser-roborock==0.1.2 vallox-websocket-api==5.3.0 # homeassistant.components.rdw -vehicle==2.2.2 +vehicle==2.2.1 # homeassistant.components.velbus -velbus-aio==2024.10.0 +velbus-aio==2024.5.1 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2956,7 +2848,7 @@ volkszaehler==0.4.0 volvooncall==0.10.3 # homeassistant.components.verisure -vsure==2.6.7 +vsure==2.6.6 # homeassistant.components.vasttrafik vtjp==0.2.1 @@ -2972,7 +2864,7 @@ vultr==0.1.2 wakeonlan==2.1.0 # homeassistant.components.wallbox -wallbox==0.7.0 +wallbox==0.6.0 # homeassistant.components.folder_watcher watchdog==2.3.1 @@ -2981,19 +2873,13 @@ watchdog==2.3.1 waterfurnace==1.1.0 # homeassistant.components.weatherflow_cloud -weatherflow4py==1.0.6 - -# homeassistant.components.cisco_webex_teams -webexpythonsdk==2.0.1 - -# homeassistant.components.nasweb -webio-api==0.1.8 +weatherflow4py==0.2.21 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 -# homeassistant.components.weheat -weheat==2024.11.02 +# homeassistant.components.assist_pipeline +webrtc-noise-gain==1.2.3 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -3008,10 +2894,10 @@ wiffi==1.1.2 wirelesstagpy==0.8.1 # homeassistant.components.wled -wled==0.20.2 +wled==0.18.0 # homeassistant.components.wolflink -wolf-comm==0.0.15 +wolf-comm==0.0.9 # homeassistant.components.wyoming wyoming==1.5.4 @@ -3020,14 +2906,15 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.33.0 +xiaomi-ble==0.30.0 # homeassistant.components.knx -xknx==3.3.0 +xknx==2.12.2 # homeassistant.components.knx -xknxproject==3.8.1 +xknxproject==3.7.1 +# homeassistant.components.bluesound # homeassistant.components.fritz # homeassistant.components.rest # homeassistant.components.startca @@ -3039,16 +2926,14 @@ xmltodict==0.13.0 xs1-api-client==3.0.0 # homeassistant.components.yale_smart_alarm -yalesmartalarmclient==0.4.3 +yalesmartalarmclient==0.3.9 # homeassistant.components.august -# homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.0 +yalexs-ble==2.4.3 # homeassistant.components.august -# homeassistant.components.yale -yalexs==8.10.0 +yalexs==6.4.1 # homeassistant.components.yeelight yeelight==0.7.14 @@ -3057,16 +2942,16 @@ yeelight==0.7.14 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.7 +yolink-api==0.4.4 # homeassistant.components.youless -youless-api==2.1.2 +youless-api==2.1.0 # homeassistant.components.youtube youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.11.04 +yt-dlp==2024.05.27 # homeassistant.components.zamg zamg==0.3.6 @@ -3075,25 +2960,40 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.136.0 +zeroconf==0.132.2 # homeassistant.components.zeversolar -zeversolar==0.3.2 +zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.37 +zha-quirks==0.0.117 # homeassistant.components.zhong_hong -zhong-hong-hvac==1.0.13 +zhong-hong-hvac==1.0.12 # homeassistant.components.ziggo_mediabox_xl ziggo-mediabox-xl==1.1.0 +# homeassistant.components.zha +zigpy-deconz==0.23.2 + +# homeassistant.components.zha +zigpy-xbee==0.20.1 + +# homeassistant.components.zha +zigpy-zigate==0.12.1 + +# homeassistant.components.zha +zigpy-znp==0.12.2 + +# homeassistant.components.zha +zigpy==0.64.1 + # homeassistant.components.zoneminder zm-py==0.5.4 # homeassistant.components.zwave_js -zwave-js-server-python==0.59.1 +zwave-js-server-python==0.57.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test.txt b/requirements_test.txt index 166fd965e2c..e2818b559ea 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -7,18 +7,17 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt -astroid==3.3.5 -coverage==7.6.1 -freezegun==1.5.1 -license-expression==30.4.0 +astroid==3.2.2 +coverage==7.5.3 +freezegun==1.5.0 mock-open==1.4.0 -mypy-dev==1.14.0a2 -pre-commit==4.0.0 -pydantic==1.10.19 -pylint==3.3.1 +mypy-dev==1.11.0a9 +pre-commit==3.7.1 +pydantic==1.10.17 +pylint==3.2.4 pylint-per-file-ignores==1.3.2 -pipdeptree==2.23.4 -pytest-asyncio==0.24.0 +pipdeptree==2.19.0 +pytest-asyncio==0.23.6 pytest-aiohttp==1.0.5 pytest-cov==5.0.0 pytest-freezer==0.4.8 @@ -26,28 +25,29 @@ pytest-github-actions-annotate-failures==0.2.0 pytest-socket==0.7.0 pytest-sugar==1.0.0 pytest-timeout==2.3.1 -pytest-unordered==0.6.1 +pytest-unordered==0.6.0 pytest-picked==0.5.0 pytest-xdist==3.6.1 -pytest==8.3.3 +pytest==8.2.0 requests-mock==1.12.1 respx==0.21.1 -syrupy==4.7.2 -tqdm==4.66.5 -types-aiofiles==24.1.0.20240626 +syrupy==4.6.1 +tqdm==4.66.4 +types-aiofiles==23.2.0.20240403 types-atomicwrites==1.4.5.1 types-croniter==2.0.0.20240423 -types-beautifulsoup4==4.12.0.20240907 -types-caldav==1.3.0.20240824 +types-beautifulsoup4==4.12.0.20240511 +types-caldav==1.3.0.20240331 types-chardet==0.1.5 types-decorator==5.1.8.20240310 types-paho-mqtt==1.6.0.20240321 -types-pillow==10.2.0.20240822 -types-protobuf==5.28.0.20240924 -types-psutil==6.0.0.20240901 -types-python-dateutil==2.9.0.20241003 +types-pillow==10.2.0.20240511 +types-protobuf==4.24.0.20240106 +types-psutil==5.9.5.20240511 +types-python-dateutil==2.9.0.20240316 types-python-slugify==8.0.2.20240310 -types-pytz==2024.2.0.20241003 -types-PyYAML==6.0.12.20240917 +types-pytz==2024.1.0.20240417 +types-PyYAML==6.0.12.20240311 types-requests==2.31.0.3 types-xmltodict==0.13.0.3 +uv==0.2.13 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b61e65f3c68..d98590b53e6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -4,7 +4,7 @@ -r requirements_test.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.4 +AEMET-OpenData==0.5.2 # homeassistant.components.honeywell AIOSomecomfort==0.0.25 @@ -13,16 +13,13 @@ AIOSomecomfort==0.0.25 Adax-local==0.1.5 # homeassistant.components.doorbird -DoorBirdPy==3.0.8 +DoorBirdPy==2.1.0 # homeassistant.components.homekit -HAP-python==4.9.2 +HAP-python==4.9.1 # homeassistant.components.tasmota -HATasmota==0.9.2 - -# homeassistant.components.mastodon -Mastodon.py==1.8.1 +HATasmota==0.9.1 # homeassistant.components.doods # homeassistant.components.generic @@ -33,35 +30,35 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==11.0.0 +Pillow==10.3.0 # homeassistant.components.plex -PlexAPI==4.15.16 +PlexAPI==4.15.13 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 # homeassistant.components.cast -PyChromecast==14.0.5 +PyChromecast==14.0.1 # homeassistant.components.flick_electric PyFlick==0.0.2 # homeassistant.components.flume -PyFlume==0.6.5 +PyFlume==0.8.7 # homeassistant.components.fronius PyFronius==0.7.3 # homeassistant.components.pyload -PyLoadAPI==1.3.2 +PyLoadAPI==1.2.0 # homeassistant.components.met_eireann PyMetEireann==2021.8.0 # homeassistant.components.met # homeassistant.components.norway_air -PyMetno==0.13.0 +PyMetno==0.12.0 # homeassistant.components.keymitt_ble PyMicroBot==0.0.17 @@ -81,7 +78,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.51.0 +PySwitchbot==0.48.0 # homeassistant.components.syncthru PySyncThru==0.7.10 @@ -91,10 +88,10 @@ PyTransportNSW==0.1.1 # homeassistant.components.camera # homeassistant.components.stream -PyTurboJPEG==1.7.5 +PyTurboJPEG==1.7.1 # homeassistant.components.vicare -PyViCare==2.35.0 +PyViCare==2.32.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -103,7 +100,7 @@ PyXiaomiGateway==0.14.3 RachioPy==1.1.0 # homeassistant.components.python_script -RestrictedPython==7.4 +RestrictedPython==7.0 # homeassistant.components.remember_the_milk RtmAPI==0.7.2 @@ -140,7 +137,7 @@ advantage-air==0.4.4 afsapi==0.2.7 # homeassistant.components.agent_dvr -agent-py==0.0.24 +agent-py==0.0.23 # homeassistant.components.geo_json_events aio-geojson-generic-client==0.4 @@ -158,50 +155,47 @@ aio-geojson-nsw-rfs-incidents==0.7 aio-geojson-usgs-earthquakes==0.3 # homeassistant.components.gdacs -aio-georss-gdacs==0.10 - -# homeassistant.components.acaia -aioacaia==0.1.6 +aio-georss-gdacs==0.9 # homeassistant.components.airq aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.10 +aioairzone-cloud==0.5.3 # homeassistant.components.airzone -aioairzone==0.9.6 +aioairzone==0.7.7 # homeassistant.components.ambient_network # homeassistant.components.ambient_station -aioambient==2024.08.0 +aioambient==2024.01.0 # homeassistant.components.apcupsd aioapcaccess==0.4.2 # homeassistant.components.aquacell -aioaquacell==0.2.0 +aioaquacell==0.1.7 # homeassistant.components.aseko_pool_live -aioaseko==1.0.0 +aioaseko==0.1.1 # homeassistant.components.asuswrt aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.10.3 +aioautomower==2024.6.1 # homeassistant.components.azure_devops -aioazuredevops==2.2.1 +aioazuredevops==2.1.1 # homeassistant.components.baf aiobafi6==0.9.0 # homeassistant.components.aws -aiobotocore==2.13.1 +aiobotocore==2.13.0 # homeassistant.components.comelit -aiocomelit==0.9.1 +aiocomelit==0.9.0 # homeassistant.components.dhcp aiodhcpwatcher==1.0.2 @@ -212,9 +206,6 @@ aiodiscover==2.1.0 # homeassistant.components.dnsip aiodns==3.2.0 -# homeassistant.components.duke_energy -aiodukeenergy==0.2.2 - # homeassistant.components.eafm aioeafm==0.1.2 @@ -231,14 +222,13 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.1 +aioesphomeapi==24.6.1 # homeassistant.components.flo aioflo==2021.11.0 # homeassistant.components.github -# homeassistant.components.iron_os -aiogithubapi==24.6.0 +aiogithubapi==23.11.0 # homeassistant.components.guardian aioguardian==2022.07.0 @@ -246,14 +236,11 @@ aioguardian==2022.07.0 # homeassistant.components.harmony aioharmony==0.2.10 -# homeassistant.components.hassio -aiohasupervisor==0.2.1 - # homeassistant.components.homekit_controller -aiohomekit==3.2.6 +aiohomekit==3.1.5 # homeassistant.components.hue -aiohue==4.7.3 +aiohue==4.7.1 # homeassistant.components.imap aioimaplib==1.1.0 @@ -265,10 +252,10 @@ aiokafka==0.10.0 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.5.5 +aiolifx-themes==0.4.15 # homeassistant.components.lifx -aiolifx==1.1.1 +aiolifx==1.0.2 # homeassistant.components.livisi aiolivisi==0.0.19 @@ -277,10 +264,10 @@ aiolivisi==0.0.19 aiolookin==1.0.0 # homeassistant.components.lyric -aiolyric==2.0.1 +aiolyric==1.1.0 # homeassistant.components.mealie -aiomealie==0.9.3 +aiomealie==0.4.0 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -301,16 +288,16 @@ aionut==4.3.3 aiooncue==0.3.7 # homeassistant.components.openexchangerates -aioopenexchangerates==0.6.8 +aioopenexchangerates==0.4.0 # homeassistant.components.nmap_tracker -aiooui==0.1.7 +aiooui==0.1.6 # homeassistant.components.pegel_online aiopegelonline==0.0.10 # homeassistant.components.acmeda -aiopulse==0.4.6 +aiopulse==0.4.4 # homeassistant.components.purpleair aiopurpleair==2022.12.1 @@ -327,10 +314,10 @@ aiopvpc==4.2.2 aiopyarr==23.4.0 # homeassistant.components.qnap_qsw -aioqsw==0.4.1 +aioqsw==0.3.5 # homeassistant.components.rainforest_raven -aioraven==0.7.0 +aioraven==0.6.0 # homeassistant.components.recollect_waste aiorecollect==2023.09.0 @@ -339,10 +326,7 @@ aiorecollect==2023.09.0 aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed -aioruckus==0.42 - -# homeassistant.components.russound_rio -aiorussound==4.1.0 +aioruckus==0.34 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -351,7 +335,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==12.0.1 +aioshelly==10.0.1 # homeassistant.components.skybell aioskybell==22.7.0 @@ -365,32 +349,26 @@ aiosolaredge==0.2.0 # homeassistant.components.steamist aiosteamist==1.0.0 -# homeassistant.components.cambridge_audio -aiostreammagic==2.8.5 - # homeassistant.components.switcher_kis -aioswitcher==4.4.0 +aioswitcher==3.4.3 # homeassistant.components.syncthing aiosyncthing==0.5.1 # homeassistant.components.tankerkoenig -aiotankerkoenig==0.4.2 - -# homeassistant.components.tedee -aiotedee==0.2.20 +aiotankerkoenig==0.4.1 # homeassistant.components.tractive -aiotractive==0.6.0 +aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==80 +aiounifi==79 # homeassistant.components.vlc_telnet -aiovlc==0.5.1 +aiovlc==0.3.2 # homeassistant.components.vodafone_station -aiovodafone==0.6.1 +aiovodafone==0.6.0 # homeassistant.components.waqi aiowaqi==3.1.0 @@ -399,22 +377,22 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webostv -aiowebostv==0.4.2 +aiowebostv==0.4.0 # homeassistant.components.withings -aiowithings==3.1.3 +aiowithings==3.0.1 # homeassistant.components.yandex_transport -aioymaps==1.2.5 +aioymaps==1.2.2 # homeassistant.components.airgradient -airgradient==0.9.1 +airgradient==0.6.0 # homeassistant.components.airly airly==1.1.0 # homeassistant.components.airthings_ble -airthings-ble==0.9.2 +airthings-ble==0.9.0 # homeassistant.components.airthings airthings-cloud==0.2.0 @@ -423,88 +401,75 @@ airthings-cloud==0.2.0 airtouch4pyapi==1.0.5 # homeassistant.components.airtouch5 -airtouch5py==0.2.10 +airtouch5py==0.2.8 # homeassistant.components.amberelectric -amberelectric==1.1.1 +amberelectric==1.1.0 # homeassistant.components.androidtv androidtv[async]==0.0.73 # homeassistant.components.androidtv_remote -androidtvremote2==0.1.2 +androidtvremote2==0.1.1 # homeassistant.components.anova -anova-wifi==0.17.0 +anova-wifi==0.14.0 # homeassistant.components.anthemav anthemav==1.4.1 -# homeassistant.components.anthropic -anthropic==0.31.2 - # homeassistant.components.weatherkit -apple_weatherkit==1.1.3 +apple_weatherkit==1.1.2 # homeassistant.components.apprise -apprise==1.9.0 +apprise==1.8.0 # homeassistant.components.aprs aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==2.2.1 +apsystems-ez1==1.3.1 # homeassistant.components.aranet -aranet4==2.4.0 +aranet4==2.3.4 # homeassistant.components.arcam_fmj arcam-fmj==1.5.2 +# homeassistant.components.asterisk_mbox +asterisk_mbox==0.5.0 + # homeassistant.components.dlna_dmr # homeassistant.components.dlna_dms # homeassistant.components.samsungtv # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.41.0 +async-upnp-client==0.39.0 # homeassistant.components.arve -asyncarve==0.1.1 +asyncarve==0.0.9 # homeassistant.components.sleepiq asyncsleepiq==1.5.2 # homeassistant.components.aurora -auroranoaa==0.0.5 +auroranoaa==0.0.3 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 -# homeassistant.components.autarco -autarco==3.1.0 - -# homeassistant.components.husqvarna_automower_ble -automower-ble==0.2.0 - -# homeassistant.components.generic -# homeassistant.components.stream -av==13.1.0 - # homeassistant.components.axis -axis==63 - -# homeassistant.components.fujitsu_fglair -ayla-iot-unofficial==1.4.3 +axis==61 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 # homeassistant.components.azure_data_explorer -azure-kusto-data[aio]==4.5.1 +azure-kusto-data[aio]==3.1.0 # homeassistant.components.azure_data_explorer -azure-kusto-ingest==4.5.1 +azure-kusto-ingest==3.1.0 # homeassistant.components.holiday babel==2.15.0 @@ -515,21 +480,24 @@ base36==0.1.1 # homeassistant.components.scrape beautifulsoup4==4.12.3 +# homeassistant.components.zha +bellows==0.39.1 + # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.4 +bimmer-connected[china]==0.15.3 # homeassistant.components.eq3btsmart # homeassistant.components.esphome -bleak-esphome==1.1.0 +bleak-esphome==1.0.0 # homeassistant.components.bluetooth -bleak-retry-connector==3.6.0 +bleak-retry-connector==3.5.0 # homeassistant.components.bluetooth -bleak==0.22.3 +bleak==0.22.2 # homeassistant.components.blebox -blebox-uniapi==2.5.0 +blebox-uniapi==2.4.2 # homeassistant.components.blink blinkpy==0.23.0 @@ -541,7 +509,7 @@ bluecurrent-api==1.2.3 bluemaestro-ble==0.2.3 # homeassistant.components.bluetooth -bluetooth-adapters==0.20.0 +bluetooth-adapters==0.19.3 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.2 @@ -550,7 +518,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.20.0 +bluetooth-data-tools==1.19.3 # homeassistant.components.bond bond-async==0.2.1 @@ -558,17 +526,14 @@ bond-async==0.2.1 # homeassistant.components.bosch_shc boschshcpy==0.2.91 -# homeassistant.components.aws -botocore==1.34.131 - # homeassistant.components.bring -bring-api==0.9.1 +bring-api==0.7.1 # homeassistant.components.broadlink broadlink==0.19.0 # homeassistant.components.brother -brother==4.3.1 +brother==4.2.0 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 @@ -583,14 +548,11 @@ bthome-ble==3.9.1 buienradar==1.0.6 # homeassistant.components.dhcp -cached-ipaddress==0.8.0 +cached_ipaddress==0.3.0 # homeassistant.components.caldav caldav==1.3.9 -# homeassistant.components.coinbase -coinbase-advanced-py==1.2.2 - # homeassistant.components.coinbase coinbase==2.1.0 @@ -604,7 +566,7 @@ colorthief==0.2.1 construct==2.10.68 # homeassistant.components.utility_meter -cronsim==2.6 +croniter==2.0.2 # homeassistant.components.crownstone crownstone-cloud==1.4.11 @@ -622,13 +584,13 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.24.3 +dbus-fast==2.22.1 # homeassistant.components.debugpy -debugpy==1.8.6 +debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==8.4.1 +deebot-client==8.0.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -642,7 +604,7 @@ deluge-client==1.10.2 demetriek==0.4.0 # homeassistant.components.denonavr -denonavr==1.0.0 +denonavr==0.11.6 # homeassistant.components.devialet devialet==1.4.5 @@ -653,9 +615,6 @@ devolo-home-control-api==0.18.3 # homeassistant.components.devolo_home_network devolo-plc-api==1.4.1 -# homeassistant.components.chacon_dio -dio-chacon-wifi-api==1.2.1 - # homeassistant.components.directv directv==0.4.0 @@ -669,7 +628,7 @@ dremel3dpy==2.1.1 dropmqttapi==1.0.3 # homeassistant.components.dsmr -dsmr-parser==1.4.2 +dsmr-parser==1.3.1 # homeassistant.components.dwd_weather_warnings dwdwfsapi==1.0.7 @@ -684,19 +643,16 @@ dynalite-panel==0.0.4 eagle100==0.1.1 # homeassistant.components.easyenergy -easyenergy==2.1.2 +easyenergy==2.1.1 # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 -# homeassistant.components.elevenlabs -elevenlabs==1.6.1 - # homeassistant.components.elgato elgato==5.1.2 # homeassistant.components.elkm1 -elkm1-lib==2.2.10 +elkm1-lib==2.2.7 # homeassistant.components.elmax elmax-api==0.0.5 @@ -711,16 +667,16 @@ emulated-roku==0.3.0 energyflip-client==0.2.2 # homeassistant.components.energyzero -energyzero==2.1.1 +energyzero==2.1.0 # homeassistant.components.enocean enocean==0.50 # homeassistant.components.environment_canada -env-canada==0.7.2 +env-canada==0.7.0 # homeassistant.components.season -ephem==4.1.6 +ephem==4.1.5 # homeassistant.components.epic_games_store epicstore-api==0.1.7 @@ -732,7 +688,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.4.1 +eq3btsmart==1.1.9 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -743,12 +699,6 @@ eternalegypt==0.0.16 # homeassistant.components.eufylife_ble eufylife-ble-client==0.1.8 -# homeassistant.components.evohome -evohome-async==0.4.20 - -# homeassistant.components.bryant_evolution -evolutionhttp==0.0.18 - # homeassistant.components.faa_delays faadelays==2023.9.1 @@ -777,14 +727,14 @@ fjaraskupan==2.3.0 flexit_bacnet==2.2.1 # homeassistant.components.flipr -flipr-api==1.6.1 +flipr-api==1.5.1 # homeassistant.components.flux_led flux-led==1.0.4 # homeassistant.components.homekit # homeassistant.components.recorder -fnv-hash-fast==1.0.2 +fnv-hash-fast==0.5.0 # homeassistant.components.foobot foobot_async==1.0.0 @@ -797,25 +747,22 @@ freebox-api==1.1.0 # homeassistant.components.fritz # homeassistant.components.fritzbox_callmonitor -fritzconnection[qr]==1.14.0 +fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.6.10 +fyta_cli==0.4.1 # homeassistant.components.google_translate gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.4 +gardena-bluetooth==1.4.2 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 # homeassistant.components.google -gcal-sync==6.2.0 - -# homeassistant.components.geniushub -geniushub-client==0.7.1 +gcal-sync==6.0.4 # homeassistant.components.geocaching geocachingapi==0.2.1 @@ -830,7 +777,7 @@ georss-generic-client==0.8 georss-ign-sismologia-client==0.8 # homeassistant.components.qld_bushfire -georss-qld-bushfire-alert-client==0.8 +georss-qld-bushfire-alert-client==0.7 # homeassistant.components.dlna_dmr # homeassistant.components.kef @@ -840,14 +787,11 @@ georss-qld-bushfire-alert-client==0.8 getmac==0.9.4 # homeassistant.components.gios -gios==5.0.0 +gios==4.0.0 # homeassistant.components.glances glances-api==0.8.0 -# homeassistant.components.go2rtc -go2rtc-client==0.1.1 - # homeassistant.components.goalzero goalzero==0.2.2 @@ -859,46 +803,37 @@ goodwe==0.3.6 google-api-python-client==2.71.0 # homeassistant.components.google_pubsub -google-cloud-pubsub==2.23.0 - -# homeassistant.components.google_cloud -google-cloud-speech==2.27.0 - -# homeassistant.components.google_cloud -google-cloud-texttospeech==2.17.2 +google-cloud-pubsub==2.13.11 # homeassistant.components.google_generative_ai_conversation -google-generativeai==0.8.2 +google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==6.1.5 - -# homeassistant.components.google_photos -google-photos-library-api==0.12.1 +google-nest-sdm==4.0.5 # homeassistant.components.google_travel_time googlemaps==2.5.1 # homeassistant.components.tailwind -gotailwind==0.2.4 +gotailwind==0.2.3 # homeassistant.components.govee_ble -govee-ble==0.40.0 +govee-ble==0.31.3 # homeassistant.components.govee_light_local -govee-local-api==1.5.3 +govee-local-api==1.5.0 # homeassistant.components.gpsd gps3==0.33.3 # homeassistant.components.gree -greeclimate==2.1.0 +greeclimate==1.4.1 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 # homeassistant.components.pure_energie -gridnet==5.0.1 +gridnet==5.0.0 # homeassistant.components.growatt_server growattServer==1.5.0 @@ -907,13 +842,17 @@ growattServer==1.5.0 gspread==5.5.0 # homeassistant.components.profiler -guppy3==3.1.4.post1;python_version<'3.13' +guppy3==3.1.4.post1 # homeassistant.components.iaqualink h2==4.1.0 +# homeassistant.components.generic +# homeassistant.components.stream +ha-av==10.1.1 + # homeassistant.components.ffmpeg -ha-ffmpeg==3.2.2 +ha-ffmpeg==3.2.0 # homeassistant.components.iotawatt ha-iotawattpy==0.1.2 @@ -922,25 +861,25 @@ ha-iotawattpy==0.1.2 ha-philipsjs==3.2.2 # homeassistant.components.habitica -habitipy==0.3.3 +habitipy==0.3.1 # homeassistant.components.bluetooth -habluetooth==3.6.0 +habluetooth==3.1.3 # homeassistant.components.cloud -hass-nabucasa==0.84.0 +hass-nabucasa==0.81.1 # homeassistant.components.conversation -hassil==2.0.1 +hassil==1.7.1 # homeassistant.components.jewish_calendar hdate==0.10.9 # homeassistant.components.here_travel_time -here-routing==1.0.1 +here-routing==0.2.0 # homeassistant.components.here_travel_time -here-transit==1.2.1 +here-transit==1.2.0 # homeassistant.components.hko hko==0.3.2 @@ -953,28 +892,28 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.60 +holidays==0.51 # homeassistant.components.frontend -home-assistant-frontend==20241106.2 +home-assistant-frontend==20240626.2 # homeassistant.components.conversation -home-assistant-intents==2024.11.13 +home-assistant-intents==2024.6.26 # homeassistant.components.home_connect -homeconnect==0.8.0 +homeconnect==0.7.2 # homeassistant.components.homematicip_cloud -homematicip==1.1.2 +homematicip==1.1.1 # homeassistant.components.remember_the_milk httplib2==0.20.4 # homeassistant.components.huawei_lte -huawei-lte-api==1.10.0 +huawei-lte-api==1.7.3 # homeassistant.components.huum -huum==0.7.12 +huum==0.7.10 # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -988,22 +927,22 @@ ibeacon-ble==1.2.0 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==8.2.0 +ical==8.0.1 # homeassistant.components.ping icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.6.2 +idasen-ha==2.6.1 # homeassistant.components.network ifaddr==0.2.0 # homeassistant.components.imgw_pib -imgw_pib==1.0.6 +imgw_pib==1.0.5 # homeassistant.components.incomfort -incomfort-client==0.6.3-1 +incomfort-client==0.6.2 # homeassistant.components.influxdb influxdb-client==1.24.0 @@ -1012,33 +951,26 @@ influxdb-client==1.24.0 influxdb==5.3.1 # homeassistant.components.inkbird -inkbird-ble==0.5.8 +inkbird-ble==0.5.6 # homeassistant.components.insteon insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire -intellifire4py==4.1.9 - -# homeassistant.components.iotty -iottycloud==0.2.1 +intellifire4py==2.2.2 # homeassistant.components.isal -isal==1.7.1 +isal==1.6.1 # homeassistant.components.gogogate2 ismartgate==5.0.1 -# homeassistant.components.israel_rail -israel-rail-api==0.1.2 - # homeassistant.components.abode -jaraco.abode==6.2.1 +jaraco.abode==5.1.2 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 -# homeassistant.components.command_line # homeassistant.components.rest jsonpath==0.82.2 @@ -1049,25 +981,22 @@ justnimbus==0.7.4 kegtron-ble==0.4.0 # homeassistant.components.knocki -knocki==0.3.5 +knocki==0.2.0 # homeassistant.components.knx -knx-frontend==2024.9.10.221729 +knx-frontend==2024.1.20.105944 # homeassistant.components.konnected konnected==1.2.0 # homeassistant.components.kraken -krakenex==2.2.2 +krakenex==2.1.0 # homeassistant.components.lacrosse_view -lacrosse-view==1.0.3 +lacrosse-view==1.0.1 # homeassistant.components.laundrify -laundrify-aio==1.2.2 - -# homeassistant.components.lcn -lcn-frontend==0.2.2 +laundrify-aio==1.1.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1078,9 +1007,6 @@ leaone-ble==0.1.0 # homeassistant.components.led_ble led-ble==1.0.2 -# homeassistant.components.lektrico -lektricowifi==0.0.43 - # homeassistant.components.foscam libpyfoscam==1.2.2 @@ -1093,6 +1019,12 @@ libsoundtouch==0.8 # homeassistant.components.linear_garage_door linear-garage-door==0.2.9 +# homeassistant.components.lamarzocco +lmcloud==1.1.13 + +# homeassistant.components.logi_circle +logi-circle==0.2.3 + # homeassistant.components.london_underground london-tube-status==0.5 @@ -1106,10 +1038,10 @@ luftdaten==0.7.4 lupupy==0.3.2 # homeassistant.components.scrape -lxml==5.3.0 +lxml==5.1.0 # homeassistant.components.matrix -matrix-nio==0.25.2 +matrix-nio==0.24.0 # homeassistant.components.maxcube maxcube-api==0.4.3 @@ -1133,7 +1065,7 @@ melnor-bluetooth==0.0.25 meteofrance-api==1.3.0 # homeassistant.components.mfi -mficlient==0.5.0 +mficlient==0.3.0 # homeassistant.components.xiaomi_miio micloud==0.5 @@ -1145,7 +1077,7 @@ microBeesPy==0.3.2 mill-local==0.3.0 # homeassistant.components.mill -millheater==0.12.2 +millheater==0.11.8 # homeassistant.components.minio minio==7.1.12 @@ -1157,29 +1089,26 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.4.2 +monzopy==1.3.0 # homeassistant.components.mopeka -mopeka-iot-ble==0.8.0 +mopeka-iot-ble==0.7.0 # homeassistant.components.motion_blinds -motionblinds==0.6.25 +motionblinds==0.6.23 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.2 +motionblindsble==0.1.0 # homeassistant.components.motioneye motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==4.1.1.116.0 +mozart-api==3.4.1.8.6 # homeassistant.components.mullvad mullvad-api==1.0.0 -# homeassistant.components.music_assistant -music-assistant-client==1.0.5 - # homeassistant.components.tts mutagen==1.47.0 @@ -1196,31 +1125,28 @@ myuplink==0.6.0 ndms2-client==0.1.2 # homeassistant.components.ness_alarm -nessclient==1.1.2 +nessclient==1.0.0 # homeassistant.components.nmap_tracker netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==3.3.0 +nettigo-air-monitor==3.2.0 # homeassistant.components.nexia nexia==2.0.8 # homeassistant.components.nextcloud -nextcloudmonitor==1.5.1 +nextcloudmonitor==1.5.0 # homeassistant.components.discord nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.3.0 +nextdns==3.0.0 # homeassistant.components.nibe_heatpump -nibe==2.11.0 - -# homeassistant.components.nice_go -nice-go==0.3.10 +nibe==2.8.0 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 @@ -1242,10 +1168,7 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==2.1.3 - -# homeassistant.components.nyt_games -nyt_games==0.4.4 +numpy==1.26.0 # homeassistant.components.google oauth2client==4.1.3 @@ -1254,10 +1177,10 @@ oauth2client==4.1.3 objgraph==3.5.0 # homeassistant.components.garages_amsterdam -odp-amsterdam==6.0.2 +odp-amsterdam==6.0.1 # homeassistant.components.ollama -ollama==0.3.3 +ollama-hass==0.1.7 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1275,7 +1198,7 @@ open-garage==0.2.0 open-meteo==0.3.1 # homeassistant.components.openai_conversation -openai==1.35.7 +openai==1.3.8 # homeassistant.components.openerz openerz-api==0.3.0 @@ -1284,10 +1207,10 @@ openerz-api==0.3.0 openhomedevice==2.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.7 +openwebifpy==4.2.4 # homeassistant.components.opower -opower==0.8.6 +opower==0.4.7 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1299,13 +1222,13 @@ ourgroceries==1.5.4 ovoenergy==2.0.0 # homeassistant.components.p1_monitor -p1monitor==3.1.0 +p1monitor==3.0.0 # homeassistant.components.mqtt paho-mqtt==1.6.1 # homeassistant.components.panasonic_viera -panasonic-viera==0.4.2 +panasonic-viera==0.3.6 # homeassistant.components.dunehd pdunehd==1.3.2 @@ -1329,7 +1252,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.5.0 +plugwise==0.38.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1344,7 +1267,7 @@ praw==7.5.0 prayer-times-calculator-offline==1.0.3 # homeassistant.components.prometheus -prometheus-client==0.21.0 +prometheus-client==0.17.1 # homeassistant.components.hardware # homeassistant.components.recorder @@ -1352,7 +1275,7 @@ prometheus-client==0.21.0 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.1.0 +psutil==5.9.8 # homeassistant.components.androidtv pure-python-adb[async]==0.3.0.dev0 @@ -1367,7 +1290,7 @@ pushover_complete==1.1.1 pvo==2.1.1 # homeassistant.components.aosmith -py-aosmith==1.0.10 +py-aosmith==1.0.8 # homeassistant.components.canary py-canary==0.5.4 @@ -1384,14 +1307,11 @@ py-dormakaba-dkey==1.0.5 # homeassistant.components.improv_ble py-improv-ble-client==1.0.3 -# homeassistant.components.madvr -py-madvr2==1.6.32 - # homeassistant.components.melissa py-melissa-climate==2.1.4 # homeassistant.components.nextbus -py-nextbusnext==2.0.5 +py-nextbusnext==1.0.2 # homeassistant.components.nightscout py-nightscout==1.2.2 @@ -1400,25 +1320,28 @@ py-nightscout==1.2.2 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.5.3 +py-synologydsm-api==2.4.4 + +# homeassistant.components.seventeentrack +py17track==2021.12.2 # homeassistant.components.hdmi_cec pyCEC==0.5.2 # homeassistant.components.control4 -pyControl4==1.2.0 +pyControl4==1.1.0 # homeassistant.components.duotecno -pyDuotecno==2024.10.1 +pyDuotecno==2024.5.1 # homeassistant.components.electrasmart -pyElectra==1.2.4 +pyElectra==1.2.3 # homeassistant.components.rfxtrx pyRFXtrx==0.31.1 # homeassistant.components.tibber -pyTibber==0.30.8 +pyTibber==0.28.2 # homeassistant.components.dlink pyW215==0.7.0 @@ -1437,7 +1360,7 @@ pyairnow==1.2.1 pyairvisual==2023.08.1 # homeassistant.components.aprilaire -pyaprilaire==0.7.4 +pyaprilaire==0.7.0 # homeassistant.components.asuswrt pyasuswrt==0.1.21 @@ -1446,10 +1369,10 @@ pyasuswrt==0.1.21 pyatag==0.3.5.3 # homeassistant.components.netatmo -pyatmo==8.1.0 +pyatmo==8.0.3 # homeassistant.components.apple_tv -pyatv==0.15.1 +pyatv==0.14.3 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 @@ -1460,14 +1383,11 @@ pybalboa==1.0.2 # homeassistant.components.blackbird pyblackbird==0.6 -# homeassistant.components.bluesound -pyblu==1.0.4 - # homeassistant.components.neato pybotvac==0.0.25 # homeassistant.components.braviatv -pybravia==0.3.4 +pybravia==0.3.3 # homeassistant.components.cloudflare pycfdns==3.0.0 @@ -1476,31 +1396,25 @@ pycfdns==3.0.0 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.2.2 - -# homeassistant.components.radio_browser -pycountry==24.6.1 +pycoolmasternet-async==0.1.5 # homeassistant.components.microsoft pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.7 - -# homeassistant.components.deako -pydeako==0.5.4 +pydaikin==2.11.1 # homeassistant.components.deconz -pydeconz==118 +pydeconz==116 # homeassistant.components.dexcom pydexcom==0.2.3 # homeassistant.components.discovergy -pydiscovergy==3.0.2 +pydiscovergy==3.0.1 # homeassistant.components.hydrawise -pydrawise==2024.9.0 +pydrawise==2024.6.4 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 @@ -1509,7 +1423,7 @@ pydroid-ipcam==2.0.0 pyecoforest==0.4.0 # homeassistant.components.econet -pyeconet==0.1.23 +pyeconet==0.1.22 # homeassistant.components.ista_ecotrend pyecotrend-ista==3.3.1 @@ -1520,14 +1434,8 @@ pyefergy==22.5.0 # homeassistant.components.energenie_power_sockets pyegps==0.2.5 -# homeassistant.components.onkyo -pyeiscp==0.0.7 - -# homeassistant.components.emoncms -pyemoncms==0.1.1 - # homeassistant.components.enphase_envoy -pyenphase==1.22.0 +pyenphase==1.20.3 # homeassistant.components.everlights pyeverlights==0.1.0 @@ -1539,7 +1447,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.8.0 +pyfibaro==0.7.8 # homeassistant.components.fido pyfido==2.1.2 @@ -1548,7 +1456,7 @@ pyfido==2.1.2 pyfireservicerota==0.0.43 # homeassistant.components.flic -pyflic==2.0.4 +pyflic==2.0.3 # homeassistant.components.forked_daapd pyforked-daapd==0.1.14 @@ -1557,7 +1465,7 @@ pyforked-daapd==0.1.14 pyfreedompro==1.1.0 # homeassistant.components.fritzbox -pyfritzhome==0.6.12 +pyfritzhome==0.6.11 # homeassistant.components.ifttt pyfttt==0.3 @@ -1578,7 +1486,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==1.1.2 +pyhomeworks==0.0.6 # homeassistant.components.ialarm pyialarm==2.2.0 @@ -1593,14 +1501,11 @@ pyinsteon==1.6.3 pyipma==3.0.7 # homeassistant.components.ipp -pyipp==0.17.0 +pyipp==0.16.0 # homeassistant.components.iqvia pyiqvia==2022.04.0 -# homeassistant.components.iskra -pyiskra==0.1.14 - # homeassistant.components.iss pyiss==1.0.1 @@ -1608,7 +1513,7 @@ pyiss==1.0.1 pyisy==3.1.14 # homeassistant.components.jvc_projector -pyjvcprojector==1.1.2 +pyjvcprojector==1.0.11 # homeassistant.components.kaleidescape pykaleidescape==1.0.1 @@ -1631,9 +1536,6 @@ pykrakenapi==0.1.8 # homeassistant.components.kulersky pykulersky==0.5.2 -# homeassistant.components.lamarzocco -pylamarzocco==1.2.3 - # homeassistant.components.lastfm pylast==5.1.0 @@ -1647,16 +1549,16 @@ pylgnetcast==0.3.9 pylibrespot-java==0.1.1 # homeassistant.components.litejet -pylitejet==0.6.3 +pylitejet==0.6.2 # homeassistant.components.litterrobot pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.21.1 +pylutron-caseta==0.20.0 # homeassistant.components.lutron -pylutron==0.2.16 +pylutron==0.2.13 # homeassistant.components.mailgun pymailgunner==1.4 @@ -1670,14 +1572,11 @@ pymelcloud==2.5.9 # homeassistant.components.meteoclimatic pymeteoclimatic==0.1.0 -# homeassistant.components.assist_pipeline -pymicro-vad==1.0.1 - # homeassistant.components.mochad pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.9 +pymodbus==3.6.8 # homeassistant.components.monoprice pymonoprice==0.4 @@ -1685,18 +1584,12 @@ pymonoprice==0.4 # homeassistant.components.mysensors pymysensors==0.24.0 -# homeassistant.components.iron_os -pynecil==0.2.1 - # homeassistant.components.netgear pynetgear==0.10.10 # homeassistant.components.nobo_hub pynobo==1.8.1 -# homeassistant.components.nordpool -pynordpool==0.2.2 - # homeassistant.components.nuki pynuki==1.6.3 @@ -1704,7 +1597,7 @@ pynuki==1.6.3 pynws[retry]==1.8.2 # homeassistant.components.nx584 -pynx584==0.8.2 +pynx584==0.5 # homeassistant.components.nzbget pynzbgetapi==0.2.0 @@ -1719,7 +1612,7 @@ pyoctoprintapi==0.1.12 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.2.1 +pyopenweathermap==0.0.9 # homeassistant.components.opnsense pyopnsense==0.4.0 @@ -1728,7 +1621,7 @@ pyopnsense==0.4.0 pyosoenergyapi==1.1.4 # homeassistant.components.opentherm_gw -pyotgw==2.2.2 +pyotgw==2.2.0 # homeassistant.auth.mfa_modules.notify # homeassistant.auth.mfa_modules.totp @@ -1736,16 +1629,13 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.14.1 +pyoverkiz==1.13.11 # homeassistant.components.onewire pyownet==0.10.0.post1 -# homeassistant.components.palazzetti -pypalazzetti==0.1.11 - # homeassistant.components.lcn -pypck==0.7.24 +pypck==0.7.17 # homeassistant.components.pjlink pypjlink2==1.2.1 @@ -1754,7 +1644,7 @@ pypjlink2==1.2.1 pyplaato==0.0.18 # homeassistant.components.point -pypoint==3.0.0 +pypoint==2.3.2 # homeassistant.components.profiler pyprof2calltree==1.4.5 @@ -1790,20 +1680,22 @@ pyrympro==0.0.8 pysabnzbd==1.1.1 # homeassistant.components.schlage -pyschlage==2024.8.0 +pyschlage==2024.6.0 # homeassistant.components.sensibo -pysensibo==1.1.0 +pysensibo==1.0.36 + +# homeassistant.components.serial +# homeassistant.components.zha +pyserial-asyncio-fast==0.11 # homeassistant.components.acer_projector # homeassistant.components.crownstone # homeassistant.components.usb +# homeassistant.components.zha # homeassistant.components.zwave_js pyserial==3.5 -# homeassistant.components.seventeentrack -pyseventeentrack==1.0.1 - # homeassistant.components.sia pysiaalarm==3.1.1 @@ -1822,17 +1714,11 @@ pysmartapp==0.3.5 # homeassistant.components.smartthings pysmartthings==0.7.8 -# homeassistant.components.smarty -pysmarty2==0.10.1 - # homeassistant.components.edl21 pysml==0.0.12 -# homeassistant.components.smlight -pysmlight==0.1.3 - # homeassistant.components.snmp -pysnmp==6.2.6 +pysnmp-lextudio==6.0.11 # homeassistant.components.snooz pysnooz==0.8.6 @@ -1843,32 +1729,32 @@ pysoma==0.0.12 # homeassistant.components.spc pyspcwebgw==0.7.0 -# homeassistant.components.assist_pipeline -pyspeex-noise==1.0.2 - # homeassistant.components.squeezebox -pysqueezebox==0.10.0 +pysqueezebox==0.7.1 # homeassistant.components.suez_water -pysuezV2==1.3.1 +pysuez==0.2.0 # homeassistant.components.switchbee -pyswitchbee==1.8.3 +pyswitchbee==1.8.0 # homeassistant.components.tautulli pytautulli==23.1.1 +# homeassistant.components.tedee +pytedee-async==0.2.17 + # homeassistant.components.motionmount -python-MotionMount==2.2.0 +python-MotionMount==2.0.0 # homeassistant.components.awair python-awair==0.2.4 # homeassistant.components.bsblan -python-bsblan==1.2.1 +python-bsblan==0.5.18 # homeassistant.components.ecobee -python-ecobee-api==0.2.20 +python-ecobee-api==0.2.18 # homeassistant.components.fully_kiosk python-fullykiosk==0.0.14 @@ -1877,10 +1763,10 @@ python-fullykiosk==0.0.14 # python-gammu==3.2.4 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.8.0 +python-homeassistant-analytics==0.6.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.3.0 +python-homewizard-energy==v6.0.0 # homeassistant.components.izone python-izone==1.2.9 @@ -1889,13 +1775,10 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.7 - -# homeassistant.components.linkplay -python-linkplay==0.0.20 +python-kasa[speedups]==0.7.0.1 # homeassistant.components.matter -python-matter-server==6.6.0 +python-matter-server==6.2.0b1 # homeassistant.components.xiaomi_miio python-miio==0.5.12 @@ -1907,10 +1790,10 @@ python-mpd2==3.1.1 python-mystrom==2.2.0 # homeassistant.components.swiss_public_transport -python-opendata-transport==0.5.0 +python-opendata-transport==0.4.0 # homeassistant.components.opensky -python-opensky==1.0.1 +python-opensky==1.0.0 # homeassistant.components.otbr # homeassistant.components.thread @@ -1923,7 +1806,7 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.7.2 +python-roborock==2.5.0 # homeassistant.components.smarttub python-smarttub==0.0.36 @@ -1932,13 +1815,13 @@ python-smarttub==0.0.36 python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.17.7 +python-tado==0.17.6 # homeassistant.components.technove -python-technove==1.3.1 +python-technove==1.2.2 # homeassistant.components.telegram_bot -python-telegram-bot[socks]==21.5 +python-telegram-bot[socks]==21.0.1 # homeassistant.components.tile pytile==2023.12.0 @@ -1946,9 +1829,6 @@ pytile==2023.12.0 # homeassistant.components.tomorrowio pytomorrowio==0.3.6 -# homeassistant.components.touchline_sl -pytouchlinesl==0.1.8 - # homeassistant.components.traccar # homeassistant.components.traccar_server pytraccar==2.1.1 @@ -1960,10 +1840,10 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==1.0.0 +pytrafikverket==0.3.10 # homeassistant.components.v2c -pytrydan==0.8.0 +pytrydan==0.7.0 # homeassistant.components.usb pyudev==0.24.1 @@ -1972,10 +1852,10 @@ pyudev==0.24.1 pyuptimerobot==22.2.0 # homeassistant.components.vera -pyvera==0.3.15 +pyvera==0.3.13 # homeassistant.components.vesync -pyvesync==2.1.12 +pyvesync==2.1.10 # homeassistant.components.vizio pyvizio==0.1.61 @@ -2004,9 +1884,6 @@ pywilight==0.0.74 # homeassistant.components.wiz pywizlight==0.5.14 -# homeassistant.components.wmspro -pywmspro==0.2.1 - # homeassistant.components.ws66i pyws66i==1.1 @@ -2026,7 +1903,7 @@ qingping-ble==0.10.0 qnapstats==0.4.0 # homeassistant.components.radio_browser -radios==0.3.2 +radios==0.3.1 # homeassistant.components.radiotherm radiotherm==2.1.0 @@ -2035,25 +1912,25 @@ radiotherm==2.1.0 rapt-ble==0.1.2 # homeassistant.components.refoss -refoss-ha==1.2.4 +refoss-ha==1.2.1 # homeassistant.components.rainmachine regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.7 +renault-api==0.2.3 # homeassistant.components.renson renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.1 +reolink-aio==0.9.3 # homeassistant.components.rflink rflink==0.0.66 # homeassistant.components.ring -ring-doorbell==0.9.12 +ring-doorbell[listen]==0.8.11 # homeassistant.components.roku rokuecp==0.19.3 @@ -2099,7 +1976,7 @@ securetar==2024.2.1 # homeassistant.components.emulated_kasa # homeassistant.components.sense -sense-energy==0.13.3 +sense-energy==0.12.2 # homeassistant.components.sensirion_ble sensirion-ble==0.1.1 @@ -2108,23 +1985,17 @@ sensirion-ble==0.1.1 sensorpro-ble==0.5.3 # homeassistant.components.sensorpush -sensorpush-ble==1.7.1 - -# homeassistant.components.sensoterra -sensoterra==2.0.1 +sensorpush-ble==1.6.2 # homeassistant.components.sentry sentry-sdk==1.40.3 # homeassistant.components.sfr_box -sfrbox-api==0.0.11 +sfrbox-api==0.0.8 # homeassistant.components.sharkiq sharkiq==1.0.2 -# homeassistant.components.simplefin -simplefin4py==0.0.18 - # homeassistant.components.sighthound simplehound==0.3 @@ -2134,9 +2005,6 @@ simplepush==2.2.3 # homeassistant.components.simplisafe simplisafe-python==2024.01.0 -# homeassistant.components.sky_remote -skyboxremote==0.0.6 - # homeassistant.components.slack slackclient==2.5.0 @@ -2144,16 +2012,16 @@ slackclient==2.5.0 smart-meter-texas==0.5.5 # homeassistant.components.smhi -smhi-pkg==1.0.18 +smhi-pkg==1.0.16 # homeassistant.components.snapcast snapcast==2.3.6 # homeassistant.components.sonos -soco==0.30.6 +soco==0.30.4 # homeassistant.components.solarlog -solarlog_cli==0.3.2 +solarlog_cli==0.1.5 # homeassistant.components.solax solax==3.1.1 @@ -2170,8 +2038,11 @@ speak2mary==1.4.0 # homeassistant.components.speedtestdotnet speedtest-cli==2.1.3 +# homeassistant.components.spider +spiderpy==1.6.1 + # homeassistant.components.spotify -spotifyaio==0.8.8 +spotipy==2.23.0 # homeassistant.components.sql sqlparse==0.5.0 @@ -2202,6 +2073,7 @@ streamlabswater==1.0.1 # homeassistant.components.huawei_lte # homeassistant.components.solaredge +# homeassistant.components.thermoworks_smoke # homeassistant.components.traccar stringcase==1.2.0 @@ -2209,25 +2081,25 @@ stringcase==1.2.0 subarulink==0.7.11 # homeassistant.components.sunweg -sunweg==3.0.2 +sunweg==3.0.1 # homeassistant.components.surepetcare surepy==0.9.0 # homeassistant.components.switchbot_cloud -switchbot-api==2.2.1 +switchbot-api==2.1.0 # homeassistant.components.system_bridge -systembridgeconnector==4.1.5 +systembridgeconnector==4.0.3 # homeassistant.components.system_bridge -systembridgemodels==4.2.4 +systembridgemodels==4.0.4 # homeassistant.components.tailscale -tailscale==0.6.1 +tailscale==0.6.0 # homeassistant.components.tellduslive -tellduslive==0.10.12 +tellduslive==0.10.11 # homeassistant.components.lg_soundbar temescal==0.5 @@ -2235,10 +2107,9 @@ temescal==0.5 # homeassistant.components.temper temperusb==1.6.1 -# homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.8.4 +tesla-fleet-api==0.6.1 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2246,21 +2117,15 @@ tesla-powerwall==0.5.2 # homeassistant.components.tesla_wall_connector tesla-wall-connector==1.0.2 -# homeassistant.components.teslemetry -teslemetry-stream==0.4.2 - # homeassistant.components.tessie -tessie-api==0.1.1 +tessie-api==0.0.9 # homeassistant.components.thermobeacon -thermobeacon-ble==0.7.0 +thermobeacon-ble==0.6.2 # homeassistant.components.thermopro thermopro-ble==0.10.0 -# homeassistant.components.lg_thinq -thinqconnect==1.0.0 - # homeassistant.components.tilt_ble tilt-ble==0.2.3 @@ -2277,22 +2142,19 @@ toonapi==0.3.0 total-connect-client==2024.5 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.3 +tplink-omada-client==1.3.12 # homeassistant.components.transmission transmission-rpc==7.0.3 -# homeassistant.components.triggercmd -triggercmd==0.0.27 - # homeassistant.components.twinkly -ttls==1.8.3 +ttls==1.5.1 # homeassistant.components.thethingsnetwork -ttn_client==1.2.0 +ttn_client==1.0.0 # homeassistant.components.tuya -tuya-device-sharing-sdk==0.2.1 +tuya-device-sharing-sdk==0.1.9 # homeassistant.components.twentemilieu twentemilieu==2.0.1 @@ -2301,31 +2163,28 @@ twentemilieu==2.0.1 twilio==6.32.0 # homeassistant.components.twitch -twitchAPI==4.2.1 - -# homeassistant.components.monarch_money -typedmonarchmoney==0.3.1 +twitchAPI==4.0.0 # homeassistant.components.ukraine_alarm uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.4.0 +uiprotect==4.2.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 # homeassistant.components.unifiprotect -unifi-discovery==1.2.0 +unifi-discovery==1.1.8 # homeassistant.components.zha -universal-silabs-flasher==0.0.24 +universal-silabs-flasher==0.0.20 # homeassistant.components.upb -upb-lib==0.5.8 +upb-lib==0.5.7 # homeassistant.components.upcloud -upcloud-api==2.6.0 +upcloud-api==2.5.1 # homeassistant.components.huawei_lte # homeassistant.components.syncthru @@ -2333,7 +2192,7 @@ upcloud-api==2.6.0 url-normalize==1.4.3 # homeassistant.components.uvc -uvcclient==0.12.1 +uvcclient==0.11.0 # homeassistant.components.roborock vacuum-map-parser-roborock==0.1.2 @@ -2342,10 +2201,10 @@ vacuum-map-parser-roborock==0.1.2 vallox-websocket-api==5.3.0 # homeassistant.components.rdw -vehicle==2.2.2 +vehicle==2.2.1 # homeassistant.components.velbus -velbus-aio==2024.10.0 +velbus-aio==2024.5.1 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2360,7 +2219,7 @@ voip-utils==0.1.0 volvooncall==0.10.3 # homeassistant.components.verisure -vsure==2.6.7 +vsure==2.6.6 # homeassistant.components.vulcan vulcan-api==2.3.2 @@ -2373,22 +2232,19 @@ vultr==0.1.2 wakeonlan==2.1.0 # homeassistant.components.wallbox -wallbox==0.7.0 +wallbox==0.6.0 # homeassistant.components.folder_watcher watchdog==2.3.1 # homeassistant.components.weatherflow_cloud -weatherflow4py==1.0.6 - -# homeassistant.components.nasweb -webio-api==0.1.8 +weatherflow4py==0.2.21 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 -# homeassistant.components.weheat -weheat==2024.11.02 +# homeassistant.components.assist_pipeline +webrtc-noise-gain==1.2.3 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2400,10 +2256,10 @@ whois==0.9.27 wiffi==1.1.2 # homeassistant.components.wled -wled==0.20.2 +wled==0.18.0 # homeassistant.components.wolflink -wolf-comm==0.0.15 +wolf-comm==0.0.9 # homeassistant.components.wyoming wyoming==1.5.4 @@ -2412,14 +2268,15 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.33.0 +xiaomi-ble==0.30.0 # homeassistant.components.knx -xknx==3.3.0 +xknx==2.12.2 # homeassistant.components.knx -xknxproject==3.8.1 +xknxproject==3.7.1 +# homeassistant.components.bluesound # homeassistant.components.fritz # homeassistant.components.rest # homeassistant.components.startca @@ -2428,46 +2285,59 @@ xknxproject==3.8.1 xmltodict==0.13.0 # homeassistant.components.yale_smart_alarm -yalesmartalarmclient==0.4.3 +yalesmartalarmclient==0.3.9 # homeassistant.components.august -# homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.0 +yalexs-ble==2.4.3 # homeassistant.components.august -# homeassistant.components.yale -yalexs==8.10.0 +yalexs==6.4.1 # homeassistant.components.yeelight yeelight==0.7.14 # homeassistant.components.yolink -yolink-api==0.4.7 +yolink-api==0.4.4 # homeassistant.components.youless -youless-api==2.1.2 +youless-api==2.1.0 # homeassistant.components.youtube youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.11.04 +yt-dlp==2024.05.27 # homeassistant.components.zamg zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.136.0 +zeroconf==0.132.2 # homeassistant.components.zeversolar -zeversolar==0.3.2 +zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.37 +zha-quirks==0.0.117 + +# homeassistant.components.zha +zigpy-deconz==0.23.2 + +# homeassistant.components.zha +zigpy-xbee==0.20.1 + +# homeassistant.components.zha +zigpy-zigate==0.12.1 + +# homeassistant.components.zha +zigpy-znp==0.12.2 + +# homeassistant.components.zha +zigpy==0.64.1 # homeassistant.components.zwave_js -zwave-js-server-python==0.59.1 +zwave-js-server-python==0.57.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 23f584dd0de..a7e5c20d86c 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.7.3 +ruff==0.4.9 yamllint==1.35.1 diff --git a/script/amazon_polly.py b/script/amazon_polly.py deleted file mode 100644 index fcb0a4b7987..00000000000 --- a/script/amazon_polly.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Helper script to update supported languages for Amazone Polly text-to-speech (TTS). - -N.B. This script requires AWS credentials. -""" - -from dataclasses import dataclass -from pathlib import Path -from typing import Self - -import boto3 - -from .hassfest.serializer import format_python_namespace - - -@dataclass(frozen=True) -class AmazonPollyVoice: - """Amazon Polly Voice.""" - - id: str - name: str - gender: str - language_name: str - language_code: str - supported_engines: set[str] - additional_language_codes: set[str] - - @classmethod - def validate(cls, model: dict[str, str | list[str]]) -> Self: - """Validate data model.""" - return cls( - id=model["Id"], - name=model["Name"], - gender=model["Gender"], - language_name=model["LanguageName"], - language_code=model["LanguageCode"], - supported_engines=set(model["SupportedEngines"]), - additional_language_codes=set(model.get("AdditionalLanguageCodes", [])), - ) - - -def get_all_voices(client: boto3.client) -> list[AmazonPollyVoice]: - """Get list of all supported voices from Amazon Polly.""" - response = client.describe_voices() - return [AmazonPollyVoice.validate(voice) for voice in response["Voices"]] - - -supported_regions = set( - boto3.session.Session().get_available_regions(service_name="polly") -) - -polly_client = boto3.client(service_name="polly", region_name="us-east-1") -voices = get_all_voices(polly_client) -supported_voices = set({v.id for v in voices}) -supported_engines = set().union(*[v.supported_engines for v in voices]) - -Path("homeassistant/generated/amazon_polly.py").write_text( - format_python_namespace( - { - "SUPPORTED_VOICES": supported_voices, - "SUPPORTED_REGIONS": supported_regions, - "SUPPORTED_ENGINES": supported_engines, - }, - annotations={ - "SUPPORTED_VOICES": "Final[set[str]]", - "SUPPORTED_REGIONS": "Final[set[str]]", - "SUPPORTED_ENGINES": "Final[set[str]]", - }, - generator="script.amazon_polly", - ) -) diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 7d53741c661..434b4d0071f 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -6,6 +6,7 @@ from __future__ import annotations import difflib import importlib from operator import itemgetter +import os from pathlib import Path import pkgutil import re @@ -14,7 +15,7 @@ import tomllib from typing import Any from homeassistant.util.yaml.loader import load_yaml -from script.hassfest.model import Config, Integration +from script.hassfest.model import Integration # Requirements which can't be installed on all systems because they rely on additional # system packages. Requirements listed in EXCLUDED_REQUIREMENTS_ALL will be commented-out @@ -58,16 +59,8 @@ INCLUDED_REQUIREMENTS_WHEELS = { # will be included in requirements_all_{action}.txt OVERRIDDEN_REQUIREMENTS_ACTIONS = { - "pytest": { - "exclude": set(), - "include": {"python-gammu"}, - "markers": {}, - }, - "wheels_aarch64": { - "exclude": set(), - "include": INCLUDED_REQUIREMENTS_WHEELS, - "markers": {}, - }, + "pytest": {"exclude": set(), "include": {"python-gammu"}}, + "wheels_aarch64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, # Pandas has issues building on armhf, it is expected they # will drop the platform in the near future (they consider it # "flimsy" on 386). The following packages depend on pandas, @@ -75,23 +68,10 @@ OVERRIDDEN_REQUIREMENTS_ACTIONS = { "wheels_armhf": { "exclude": {"env-canada", "noaa-coops", "pyezviz", "pykrakenapi"}, "include": INCLUDED_REQUIREMENTS_WHEELS, - "markers": {}, - }, - "wheels_armv7": { - "exclude": set(), - "include": INCLUDED_REQUIREMENTS_WHEELS, - "markers": {}, - }, - "wheels_amd64": { - "exclude": set(), - "include": INCLUDED_REQUIREMENTS_WHEELS, - "markers": {}, - }, - "wheels_i386": { - "exclude": set(), - "include": INCLUDED_REQUIREMENTS_WHEELS, - "markers": {}, }, + "wheels_armv7": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "wheels_amd64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "wheels_i386": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, } IGNORE_PIN = ("colorlog>2.1,<3", "urllib3") @@ -102,8 +82,8 @@ URL_PIN = ( ) -CONSTRAINT_PATH = ( - Path(__file__).parent.parent / "homeassistant" / "package_constraints.txt" +CONSTRAINT_PATH = os.path.join( + os.path.dirname(__file__), "../homeassistant/package_constraints.txt" ) CONSTRAINT_BASE = """ # Constrain pycryptodome to avoid vulnerability @@ -117,9 +97,14 @@ httplib2>=0.19.0 # gRPC is an implicit dependency that we want to make explicit so we manage # upgrades intentionally. It is a large package to build from source and we # want to ensure we have wheels built. -grpcio==1.67.1 -grpcio-status==1.67.1 -grpcio-reflection==1.67.1 +grpcio==1.59.0 +grpcio-status==1.59.0 +grpcio-reflection==1.59.0 + +# libcst >=0.4.0 requires a newer Rust than we currently have available, +# thus our wheels builds fail. This pins it to the last working version, +# which at this point satisfies our needs. +libcst==0.3.23 # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -135,11 +120,16 @@ enum34==1000000000.0.0 typing==1000000000.0.0 uuid==1000000000.0.0 +# regex causes segfault with version 2021.8.27 +# https://bitbucket.org/mrabarnett/mrab-regex/issues/421/2021827-results-in-fatal-python-error +# This is fixed in 2021.8.28 +regex==2021.8.28 + # httpx requires httpcore, and httpcore requires anyio and h11, but the version constraints on # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.6.2.post1 +anyio==4.4.0 h11==0.14.0 httpcore==1.0.5 @@ -148,8 +138,13 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==2.1.3 -pandas~=2.2.3 +numpy==1.26.0 + +# Prevent dependency conflicts between sisyphus-control and aioambient +# until upper bounds for sisyphus-control have been updated +# https://github.com/jkeljo/sisyphus-control/issues/6 +python-engineio>=3.13.1,<4.0 +python-socketio>=4.6.0,<5.0 # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 @@ -160,10 +155,7 @@ backoff>=2.0 # Required to avoid breaking (#101042). # v2 has breaking changes (#99218). -pydantic==1.10.19 - -# Required for Python 3.12.4 compatibility (#119223). -mashumaro>=3.13.1 +pydantic==1.10.17 # Breaks asyncio # https://github.com/pubnub/python/issues/130 @@ -179,7 +171,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==5.28.3 +protobuf==4.25.1 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder @@ -193,6 +185,9 @@ websockets>=11.0.1 # pysnmplib is no longer maintained and does not work with newer # python pysnmplib==1000000000.0.0 +# pysnmp is no longer maintained and does not work with newer +# python +pysnmp==1000000000.0.0 # The get-mac package has been replaced with getmac. Installing get-mac alongside getmac # breaks getmac due to them both sharing the same python package name inside 'getmac'. @@ -201,21 +196,24 @@ get-mac==1000000000.0.0 # We want to skip the binary wheels for the 'charset-normalizer' packages. # They are build with mypyc, but causes issues with our wheel builder. # In order to do so, we need to constrain the version. -charset-normalizer==3.4.0 +charset-normalizer==3.2.0 # dacite: Ensure we have a version that is able to handle type unions for -# NAM, Brother, and GIOS. +# Roborock, NAM, Brother, and GIOS. dacite>=1.7.0 -# chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x -chacha20poly1305-reuseable>=0.13.0 +# Musle wheels for pandas 2.2.0 cannot be build for any architecture. +pandas==2.1.4 + +# chacha20poly1305-reuseable==0.12.0 is incompatible with cryptography==42.0.x +chacha20poly1305-reuseable>=0.12.1 # pycountry<23.12.11 imports setuptools at run time # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 pycountry>=23.12.11 -# scapy==2.6.0 causes CI failures due to a race condition -scapy>=2.6.1 +# scapy<2.5.0 will not work with python3.12 +scapy>=2.5.0 # tuf isn't updated to deal with breaking changes in securesystemslib==1.0. # Only tuf>=4 includes a constraint to <1.0. @@ -224,10 +222,6 @@ tuf>=4.0.0 # https://github.com/jd/tenacity/issues/471 tenacity!=8.4.0 - -# 5.0.0 breaks Timeout as a context manager -# TypeError: 'Timeout' object does not support the context manager protocol -async-timeout==4.0.3 """ GENERATED_MESSAGE = ( @@ -278,7 +272,8 @@ def explore_module(package: str, explore_children: bool) -> list[str]: def core_requirements() -> list[str]: """Gather core requirements out of pyproject.toml.""" - data = tomllib.loads(Path("pyproject.toml").read_text()) + with open("pyproject.toml", "rb") as fp: + data = tomllib.load(fp) dependencies: list[str] = data["project"]["dependencies"] return dependencies @@ -291,9 +286,7 @@ def gather_recursive_requirements( seen = set() seen.add(domain) - integration = Integration( - Path(f"homeassistant/components/{domain}"), _get_hassfest_config() - ) + integration = Integration(Path(f"homeassistant/components/{domain}")) integration.load_manifest() reqs = {x for x in integration.requirements if x not in CONSTRAINT_BASE} for dep_domain in integration.dependencies: @@ -332,10 +325,6 @@ def process_action_requirement(req: str, action: str) -> str: return req if normalized_package_name in EXCLUDED_REQUIREMENTS_ALL: return f"# {req}" - if markers := OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["markers"].get( - normalized_package_name, None - ): - return f"{req};{markers}" return req @@ -363,8 +352,7 @@ def gather_requirements_from_manifests( errors: list[str], reqs: dict[str, list[str]] ) -> None: """Gather all of the requirements from manifests.""" - config = _get_hassfest_config() - integrations = Integration.load_dir(config.core_integrations_path, config) + integrations = Integration.load_dir(Path("homeassistant/components")) for domain in sorted(integrations): integration = integrations[domain] @@ -553,7 +541,7 @@ def diff_file(filename: str, content: str) -> list[str]: def main(validate: bool, ci: bool) -> int: """Run the script.""" - if not Path("requirements_all.txt").is_file(): + if not os.path.isfile("requirements_all.txt"): print("Run this from HA root dir") return 1 @@ -612,17 +600,6 @@ def main(validate: bool, ci: bool) -> int: return 0 -def _get_hassfest_config() -> Config: - """Get hassfest config.""" - return Config( - root=Path().absolute(), - specific_integrations=None, - action="validate", - requirements=True, - core_integrations_path=Path("homeassistant/components"), - ) - - if __name__ == "__main__": _VAL = sys.argv[-1] == "validate" _CI = sys.argv[-1] == "ci" diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index f0b9ad25dd0..bcb19a14c37 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -4,7 +4,7 @@ from __future__ import annotations import argparse from operator import attrgetter -from pathlib import Path +import pathlib import sys from time import monotonic @@ -14,6 +14,7 @@ from . import ( codeowners, config_flow, config_schema, + coverage, dependencies, dhcp, docker, @@ -52,6 +53,7 @@ INTEGRATION_PLUGINS = [ config_flow, # This needs to run last, after translations are processed ] HASS_PLUGINS = [ + coverage, docker, mypy_config, metadata, @@ -63,9 +65,9 @@ ALL_PLUGIN_NAMES = [ ] -def valid_integration_path(integration_path: Path | str) -> Path: +def valid_integration_path(integration_path: pathlib.Path | str) -> pathlib.Path: """Test if it's a valid integration.""" - path = Path(integration_path) + path = pathlib.Path(integration_path) if not path.is_dir(): raise argparse.ArgumentTypeError(f"{integration_path} is not a directory.") @@ -107,12 +109,6 @@ def get_config() -> Config: default=ALL_PLUGIN_NAMES, help="Comma-separate list of plugins to run. Valid plugin names: %(default)s", ) - parser.add_argument( - "--core-integrations-path", - type=Path, - default=Path("homeassistant/components"), - help="Path to core integrations", - ) parsed = parser.parse_args() if parsed.action is None: @@ -123,16 +119,18 @@ def get_config() -> Config: "Generate is not allowed when limiting to specific integrations" ) - if not parsed.integration_path and not Path("requirements_all.txt").is_file(): + if ( + not parsed.integration_path + and not pathlib.Path("requirements_all.txt").is_file() + ): raise RuntimeError("Run from Home Assistant root") return Config( - root=Path().absolute(), + root=pathlib.Path(".").absolute(), specific_integrations=parsed.integration_path, action=parsed.action, requirements=parsed.requirements, plugins=set(parsed.plugins), - core_integrations_path=parsed.core_integrations_path, ) @@ -150,12 +148,12 @@ def main() -> int: integrations = {} for int_path in config.specific_integrations: - integration = Integration(int_path, config) + integration = Integration(int_path) integration.load_manifest() integrations[integration.domain] = integration else: - integrations = Integration.load_dir(config.core_integrations_path, config) + integrations = Integration.load_dir(pathlib.Path("homeassistant/components")) plugins += HASS_PLUGINS for plugin in plugins: diff --git a/script/hassfest/bluetooth.py b/script/hassfest/bluetooth.py index 94f25588632..49480d1ed02 100644 --- a/script/hassfest/bluetooth.py +++ b/script/hassfest/bluetooth.py @@ -34,15 +34,19 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - if bluetooth_path.read_text() != content: - config.add_error( - "bluetooth", - "File bluetooth.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + with open(str(bluetooth_path)) as fp: + current = fp.read() + if current != content: + config.add_error( + "bluetooth", + "File bluetooth.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) + return def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate bluetooth file.""" bluetooth_path = config.root / "homeassistant/generated/bluetooth.py" - bluetooth_path.write_text(f"{config.cache['bluetooth']}") + with open(str(bluetooth_path), "w") as fp: + fp.write(f"{config.cache['bluetooth']}") diff --git a/script/hassfest/brand.py b/script/hassfest/brand.py index 6139e12393e..fe47d31067a 100644 --- a/script/hassfest/brand.py +++ b/script/hassfest/brand.py @@ -18,8 +18,6 @@ BRAND_SCHEMA = vol.Schema( } ) -BRAND_EXCEPTIONS = ["u_tec"] - def _validate_brand( brand: Brand, integrations: dict[str, Integration], config: Config @@ -40,14 +38,10 @@ def _validate_brand( f"Domain '{brand.domain}' does not match file name {brand.path.name}", ) - if ( - len(brand.integrations) < 2 - and not brand.iot_standards - and brand.domain not in BRAND_EXCEPTIONS - ): + if not brand.integrations and not brand.iot_standards: config.add_error( "brand", - f"{brand.path.name}: At least two integrations or " + f"{brand.path.name}: At least one of integrations or " "iot_standards must be non-empty", ) diff --git a/script/hassfest/codeowners.py b/script/hassfest/codeowners.py index 73ea8d02520..04150836dd5 100644 --- a/script/hassfest/codeowners.py +++ b/script/hassfest/codeowners.py @@ -98,15 +98,18 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - if codeowners_path.read_text() != content + "\n": - config.add_error( - "codeowners", - "File CODEOWNERS is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + with open(str(codeowners_path)) as fp: + if fp.read().strip() != content: + config.add_error( + "codeowners", + "File CODEOWNERS is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) + return def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate CODEOWNERS.""" codeowners_path = config.root / "CODEOWNERS" - codeowners_path.write_text(f"{config.cache['codeowners']}\n") + with open(str(codeowners_path), "w") as fp: + fp.write(f"{config.cache['codeowners']}\n") diff --git a/script/hassfest/config_flow.py b/script/hassfest/config_flow.py index 83d406a0036..382e77bde74 100644 --- a/script/hassfest/config_flow.py +++ b/script/hassfest/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import json +import pathlib from typing import Any from .brand import validate as validate_brands @@ -215,31 +216,36 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - brands = Brand.load_dir(config.root / "homeassistant/brands", config) + brands = Brand.load_dir(pathlib.Path(config.root / "homeassistant/brands"), config) validate_brands(brands, integrations, config) - if config_flow_path.read_text() != content: - config.add_error( - "config_flow", - "File config_flows.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + with open(str(config_flow_path)) as fp: + if fp.read() != content: + config.add_error( + "config_flow", + "File config_flows.py is not up to date. " + "Run python3 -m script.hassfest", + fixable=True, + ) config.cache["integrations"] = content = _generate_integrations( brands, integrations, config ) - if integrations_path.read_text() != content + "\n": - config.add_error( - "config_flow", - "File integrations.json is not up to date. " - "Run python3 -m script.hassfest", - fixable=True, - ) + with open(str(integrations_path)) as fp: + if fp.read() != content + "\n": + config.add_error( + "config_flow", + "File integrations.json is not up to date. " + "Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate config flow file.""" config_flow_path = config.root / "homeassistant/generated/config_flows.py" integrations_path = config.root / "homeassistant/generated/integrations.json" - config_flow_path.write_text(f"{config.cache['config_flow']}") - integrations_path.write_text(f"{config.cache['integrations']}\n") + with open(str(config_flow_path), "w") as fp: + fp.write(f"{config.cache['config_flow']}") + with open(str(integrations_path), "w") as fp: + fp.write(f"{config.cache['integrations']}\n") diff --git a/script/hassfest/config_schema.py b/script/hassfest/config_schema.py index 6b863ab9ecd..141b087472b 100644 --- a/script/hassfest/config_schema.py +++ b/script/hassfest/config_schema.py @@ -4,15 +4,15 @@ from __future__ import annotations import ast -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN +from homeassistant.core import DOMAIN as HA_DOMAIN from .model import Config, Integration CONFIG_SCHEMA_IGNORE = { # Configuration under the homeassistant key is a special case, it's handled by - # core_config.async_process_ha_core_config already during bootstrapping, not by + # conf_util.async_process_ha_core_config already during bootstrapping, not by # a schema in the homeassistant integration. - HOMEASSISTANT_DOMAIN, + HA_DOMAIN, } @@ -21,7 +21,7 @@ def _has_assignment(module: ast.Module, name: str) -> bool: for item in module.body: if type(item) not in (ast.Assign, ast.AnnAssign, ast.AugAssign): continue - if type(item) is ast.Assign: + if type(item) == ast.Assign: for target in item.targets: if getattr(target, "id", None) == name: return True @@ -35,7 +35,7 @@ def _has_function( module: ast.Module, _type: ast.AsyncFunctionDef | ast.FunctionDef, name: str ) -> bool: """Test if the module defines a function.""" - return any(type(item) is _type and item.name == name for item in module.body) + return any(type(item) == _type and item.name == name for item in module.body) def _has_import(module: ast.Module, name: str) -> bool: diff --git a/script/hassfest/coverage.py b/script/hassfest/coverage.py new file mode 100644 index 00000000000..388f2a1c761 --- /dev/null +++ b/script/hassfest/coverage.py @@ -0,0 +1,181 @@ +"""Validate coverage files.""" + +from __future__ import annotations + +from pathlib import Path + +from .model import Config, Integration + +DONT_IGNORE = ( + "config_flow.py", + "device_action.py", + "device_condition.py", + "device_trigger.py", + "diagnostics.py", + "group.py", + "intent.py", + "logbook.py", + "media_source.py", + "recorder.py", + "scene.py", +) +FORCE_COVERAGE = ("gold", "platinum") + +CORE_PREFIX = """# Sorted by hassfest. +# +# To sort, run python3 -m script.hassfest -p coverage + +[run] +source = homeassistant +omit = +""" +COMPONENTS_PREFIX = ( + " # omit pieces of code that rely on external devices being present\n" +) +SUFFIX = """[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # TYPE_CHECKING and @overload blocks are never executed during pytest run + if TYPE_CHECKING: + @overload +""" + + +def validate(integrations: dict[str, Integration], config: Config) -> None: + """Validate coverage.""" + coverage_path = config.root / ".coveragerc" + + not_found: list[str] = [] + unsorted: list[str] = [] + checking = False + + previous_line = "" + with coverage_path.open("rt") as fp: + for line in fp: + line = line.strip() + + if line == COMPONENTS_PREFIX.strip(): + previous_line = "" + continue + + if not line or line.startswith("#"): + continue + + if not checking: + if line == "omit =": + checking = True + continue + + # Finished + if line == "[report]": + break + + path = Path(line) + + # Discard wildcard + path_exists = path + while "*" in path_exists.name: + path_exists = path_exists.parent + + if not path_exists.exists(): + not_found.append(line) + continue + + if line < previous_line: + unsorted.append(line) + previous_line = line + + if not line.startswith("homeassistant/components/"): + continue + + # Ignore sub-directories + if len(path.parts) > 4: + continue + + integration_path = path.parent + + integration = integrations[integration_path.name] + + if integration.quality_scale in FORCE_COVERAGE: + integration.add_error( + "coverage", + f"has quality scale {integration.quality_scale} and " + "should not be present in .coveragerc file", + ) + continue + + if (last_part := path.parts[-1]) in {"*", "const.py"} and Path( + f"tests/components/{integration.domain}/__init__.py" + ).exists(): + integration.add_error( + "coverage", + f"has tests and should not use {last_part} in .coveragerc file", + ) + continue + + for check in DONT_IGNORE: + if path.parts[-1] not in {"*", check}: + continue + + if (integration_path / check).exists(): + integration.add_error( + "coverage", + f"{check} must not be ignored by the .coveragerc file", + ) + + if unsorted: + config.add_error( + "coverage", + "Paths are unsorted in .coveragerc file. " + "Run python3 -m script.hassfest\n - " + f"{'\n - '.join(unsorted)}", + fixable=True, + ) + + if not_found: + raise RuntimeError( + f".coveragerc references files that don't exist: {', '.join(not_found)}." + ) + + +def generate(integrations: dict[str, Integration], config: Config) -> None: + """Sort coverage.""" + coverage_path = config.root / ".coveragerc" + core = [] + components = [] + section = "header" + + with coverage_path.open("rt") as fp: + for line in fp: + if line == "[report]\n": + break + + if section != "core" and line == "omit =\n": + section = "core" + elif section != "components" and line == COMPONENTS_PREFIX: + section = "components" + elif section == "core" and line != "\n": + core.append(line) + elif section == "components" and line != "\n": + components.append(line) + + assert core, "core should be a non-empty list" + assert components, "components should be a non-empty list" + content = ( + f"{CORE_PREFIX}{"".join(sorted(core))}\n" + f"{COMPONENTS_PREFIX}{"".join(sorted(components))}\n" + f"\n{SUFFIX}" + ) + + with coverage_path.open("w") as fp: + fp.write(content) diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index 0c7f4f11a8c..66796d4dd0d 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -44,15 +44,6 @@ class ImportCollector(ast.NodeVisitor): assert self._cur_fil_dir self.referenced[self._cur_fil_dir].add(reference_domain) - def visit_If(self, node: ast.If) -> None: - """Visit If node.""" - if isinstance(node.test, ast.Name) and node.test.id == "TYPE_CHECKING": - # Ignore TYPE_CHECKING block - return - - # Have it visit other kids - self.generic_visit(node) - def visit_ImportFrom(self, node: ast.ImportFrom) -> None: """Visit ImportFrom node.""" if node.module is None: @@ -121,10 +112,10 @@ ALLOWED_USED_COMPONENTS = { "alert", "automation", "conversation", - "default_config", "device_automation", "frontend", "group", + "hassio", "homeassistant", "input_boolean", "input_button", diff --git a/script/hassfest/dhcp.py b/script/hassfest/dhcp.py index 8a8f344f6cb..d1fd0474430 100644 --- a/script/hassfest/dhcp.py +++ b/script/hassfest/dhcp.py @@ -32,15 +32,19 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - if dhcp_path.read_text() != content: - config.add_error( - "dhcp", - "File dhcp.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + with open(str(dhcp_path)) as fp: + current = fp.read() + if current != content: + config.add_error( + "dhcp", + "File dhcp.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) + return def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate dhcp file.""" dhcp_path = config.root / "homeassistant/generated/dhcp.py" - dhcp_path.write_text(f"{config.cache['dhcp']}") + with open(str(dhcp_path), "w") as fp: + fp.write(f"{config.cache['dhcp']}") diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index 57d86bc4def..e38a238be7d 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -1,13 +1,7 @@ """Generate and validate the dockerfile.""" -from dataclasses import dataclass -from pathlib import Path - from homeassistant import core -from homeassistant.components.go2rtc.const import RECOMMENDED_VERSION as GO2RTC_VERSION -from homeassistant.const import Platform from homeassistant.util import executor, thread -from script.gen_requirements_all import gather_recursive_requirements from .model import Config, Integration from .requirements import PACKAGE_REGEX, PIP_VERSION_RANGE_SEPARATOR @@ -21,13 +15,12 @@ FROM ${{BUILD_FROM}} # Synchronize with homeassistant/core.py:async_stop ENV \ S6_SERVICES_GRACETIME={timeout} \ - UV_SYSTEM_PYTHON=true \ - UV_NO_CACHE=true + UV_SYSTEM_PYTHON=true ARG QEMU_CPU # Install uv -RUN pip3 install uv=={uv} +RUN pip3 install uv=={uv_version} WORKDIR /usr/src @@ -44,9 +37,15 @@ RUN \ if ls homeassistant/home_assistant_*.whl 1> /dev/null 2>&1; then \ uv pip install homeassistant/home_assistant_*.whl; \ fi \ - && uv pip install \ - --no-build \ - -r homeassistant/requirements_all.txt + && if [ "${{BUILD_ARCH}}" = "i386" ]; then \ + linux32 uv pip install \ + --no-build \ + -r homeassistant/requirements_all.txt; \ + else \ + uv pip install \ + --no-build \ + -r homeassistant/requirements_all.txt; \ + fi ## Setup Home Assistant Core COPY . homeassistant/ @@ -59,122 +58,33 @@ RUN \ # Home Assistant S6-Overlay COPY rootfs / -# Needs to be redefined inside the FROM statement to be set for RUN commands -ARG BUILD_ARCH -# Get go2rtc binary -RUN \ - case "${{BUILD_ARCH}}" in \ - "aarch64") go2rtc_suffix='arm64' ;; \ - "armhf") go2rtc_suffix='armv6' ;; \ - "armv7") go2rtc_suffix='arm' ;; \ - *) go2rtc_suffix=${{BUILD_ARCH}} ;; \ - esac \ - && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v{go2rtc}/go2rtc_linux_${{go2rtc_suffix}} --output /bin/go2rtc \ - && chmod +x /bin/go2rtc \ - # Verify go2rtc can be executed - && go2rtc --version - WORKDIR /config """ -_HASSFEST_TEMPLATE = r"""# Automatically generated by hassfest. -# -# To update, run python3 -m script.hassfest -p docker -FROM python:3.13-alpine -ENV \ - UV_SYSTEM_PYTHON=true \ - UV_EXTRA_INDEX_URL="https://wheels.home-assistant.io/musllinux-index/" - -SHELL ["/bin/sh", "-o", "pipefail", "-c"] -ENTRYPOINT ["/usr/src/homeassistant/script/hassfest/docker/entrypoint.sh"] -WORKDIR "/github/workspace" - -COPY . /usr/src/homeassistant - -# Uv is only needed during build -RUN --mount=from=ghcr.io/astral-sh/uv:{uv},source=/uv,target=/bin/uv \ - # Required for PyTurboJPEG - apk add --no-cache libturbojpeg \ - && uv pip install \ - --no-build \ - --no-cache \ - -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ - -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree=={pipdeptree} tqdm=={tqdm} ruff=={ruff} \ - {required_components_packages} - -LABEL "name"="hassfest" -LABEL "maintainer"="Home Assistant " - -LABEL "com.github.actions.name"="hassfest" -LABEL "com.github.actions.description"="Run hassfest to validate standalone integration repositories" -LABEL "com.github.actions.icon"="terminal" -LABEL "com.github.actions.color"="gray-dark" -""" - - -def _get_package_versions(file: Path, packages: set[str]) -> dict[str, str]: - package_versions: dict[str, str] = {} - with file.open(encoding="UTF-8") as fp: +def _get_uv_version() -> str: + with open("requirements_test.txt") as fp: for _, line in enumerate(fp): - if package_versions.keys() == packages: - return package_versions - if match := PACKAGE_REGEX.match(line): pkg, sep, version = match.groups() - if pkg not in packages: + if pkg != "uv": continue if sep != "==" or not version: raise RuntimeError( - f'Requirement {pkg} need to be pinned "{pkg}==".' + 'Requirement uv need to be pinned "uv==".' ) for part in version.split(";", 1)[0].split(","): version_part = PIP_VERSION_RANGE_SEPARATOR.match(part) if version_part: - package_versions[pkg] = version_part.group(2) - break + return version_part.group(2) - if package_versions.keys() == packages: - return package_versions - - raise RuntimeError("At least one package was not found in the requirements file.") + raise RuntimeError("Invalid uv requirement in requirements_test.txt") -@dataclass -class File: - """File.""" - - content: str - path: Path - - -def _generate_hassfest_dockerimage( - config: Config, timeout: int, package_versions: dict[str, str] -) -> File: - packages = set() - already_checked_domains = set() - for platform in Platform: - packages.update( - gather_recursive_requirements(platform.value, already_checked_domains) - ) - # Add go2rtc requirements as this file needs the go2rtc integration - packages.update(gather_recursive_requirements("go2rtc", already_checked_domains)) - - return File( - _HASSFEST_TEMPLATE.format( - timeout=timeout, - required_components_packages=" ".join(sorted(packages)), - **package_versions, - ), - config.root / "script/hassfest/docker/Dockerfile", - ) - - -def _generate_files(config: Config) -> list[File]: +def _generate_dockerfile() -> str: timeout = ( core.STOPPING_STAGE_SHUTDOWN_TIMEOUT + core.STOP_STAGE_SHUTDOWN_TIMEOUT @@ -183,44 +93,27 @@ def _generate_files(config: Config) -> list[File]: + executor.EXECUTOR_SHUTDOWN_TIMEOUT + thread.THREADING_SHUTDOWN_TIMEOUT + 10 - ) * 1000 - - package_versions = _get_package_versions(Path("requirements.txt"), {"uv"}) - package_versions |= _get_package_versions( - Path("requirements_test.txt"), {"pipdeptree", "tqdm"} ) - package_versions |= _get_package_versions( - Path("requirements_test_pre_commit.txt"), {"ruff"} + return DOCKERFILE_TEMPLATE.format( + timeout=timeout * 1000, uv_version=_get_uv_version() ) - return [ - File( - DOCKERFILE_TEMPLATE.format( - timeout=timeout, - **package_versions, - go2rtc=GO2RTC_VERSION, - ), - config.root / "Dockerfile", - ), - _generate_hassfest_dockerimage(config, timeout, package_versions), - ] - def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate dockerfile.""" - docker_files = _generate_files(config) - config.cache["docker"] = docker_files + dockerfile_content = _generate_dockerfile() + config.cache["dockerfile"] = dockerfile_content - for file in docker_files: - if file.content != file.path.read_text(): - config.add_error( - "docker", - f"File {file.path} is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + dockerfile_path = config.root / "Dockerfile" + if dockerfile_path.read_text() != dockerfile_content: + config.add_error( + "docker", + "File Dockerfile is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate dockerfile.""" - for file in _generate_files(config): - file.path.write_text(file.content) + dockerfile_path = config.root / "Dockerfile" + dockerfile_path.write_text(config.cache["dockerfile"]) diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile deleted file mode 100644 index 0fa0a1a89fa..00000000000 --- a/script/hassfest/docker/Dockerfile +++ /dev/null @@ -1,34 +0,0 @@ -# Automatically generated by hassfest. -# -# To update, run python3 -m script.hassfest -p docker -FROM python:3.13-alpine - -ENV \ - UV_SYSTEM_PYTHON=true \ - UV_EXTRA_INDEX_URL="https://wheels.home-assistant.io/musllinux-index/" - -SHELL ["/bin/sh", "-o", "pipefail", "-c"] -ENTRYPOINT ["/usr/src/homeassistant/script/hassfest/docker/entrypoint.sh"] -WORKDIR "/github/workspace" - -COPY . /usr/src/homeassistant - -# Uv is only needed during build -RUN --mount=from=ghcr.io/astral-sh/uv:0.5.0,source=/uv,target=/bin/uv \ - # Required for PyTurboJPEG - apk add --no-cache libturbojpeg \ - && uv pip install \ - --no-build \ - --no-cache \ - -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ - -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.7.3 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.1 home-assistant-intents==2024.11.13 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 - -LABEL "name"="hassfest" -LABEL "maintainer"="Home Assistant " - -LABEL "com.github.actions.name"="hassfest" -LABEL "com.github.actions.description"="Run hassfest to validate standalone integration repositories" -LABEL "com.github.actions.icon"="terminal" -LABEL "com.github.actions.color"="gray-dark" diff --git a/script/hassfest/docker/Dockerfile.dockerignore b/script/hassfest/docker/Dockerfile.dockerignore deleted file mode 100644 index c109421fce1..00000000000 --- a/script/hassfest/docker/Dockerfile.dockerignore +++ /dev/null @@ -1,11 +0,0 @@ -# Ignore everything except the specified files -* - -!homeassistant/ -!requirements.txt -!script/ -script/hassfest/docker/ -!script/hassfest/docker/entrypoint.sh - -# Temporary files -**/__pycache__ \ No newline at end of file diff --git a/script/hassfest/docker/entrypoint.sh b/script/hassfest/docker/entrypoint.sh deleted file mode 100755 index 7b75eb186d2..00000000000 --- a/script/hassfest/docker/entrypoint.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh - -integrations="" -integration_path="" - -# Enable recursive globbing using find -for manifest in $(find . -name "manifest.json"); do - manifest_path=$(realpath "${manifest}") - integrations="$integrations --integration-path ${manifest_path%/*}" -done - -if [ -z "$integrations" ]; then - echo "Error: No integrations found!" - exit 1 -fi - -cd /usr/src/homeassistant || exit 1 -exec python3 -m script.hassfest --action validate $integrations "$@" diff --git a/script/hassfest/icons.py b/script/hassfest/icons.py index f6bcd865c23..087d395afeb 100644 --- a/script/hassfest/icons.py +++ b/script/hassfest/icons.py @@ -9,7 +9,6 @@ import voluptuous as vol from voluptuous.humanize import humanize_error import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.icon import convert_shorthand_service_icon from .model import Config, Integration from .translations import translation_key_validator @@ -52,7 +51,7 @@ DATA_ENTRY_ICONS_SCHEMA = vol.Schema( { "step": { str: { - "sections": { + "section": { str: icon_value_validator, } } @@ -61,38 +60,7 @@ DATA_ENTRY_ICONS_SCHEMA = vol.Schema( ) -CORE_SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( - vol.Schema( - { - vol.Optional("service"): icon_value_validator, - vol.Optional("sections"): cv.schema_with_slug_keys( - icon_value_validator, slug_validator=translation_key_validator - ), - } - ), - slug_validator=translation_key_validator, -) - - -CUSTOM_INTEGRATION_SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( - vol.All( - convert_shorthand_service_icon, - vol.Schema( - { - vol.Optional("service"): icon_value_validator, - vol.Optional("sections"): cv.schema_with_slug_keys( - icon_value_validator, slug_validator=translation_key_validator - ), - } - ), - ), - slug_validator=translation_key_validator, -) - - -def icon_schema( - core_integration: bool, integration_type: str, no_entity_platform: bool -) -> vol.Schema: +def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: """Create an icon schema.""" state_validator = cv.schema_with_slug_keys( @@ -123,9 +91,7 @@ def icon_schema( {str: {"fix_flow": DATA_ENTRY_ICONS_SCHEMA}} ), vol.Optional("options"): DATA_ENTRY_ICONS_SCHEMA, - vol.Optional("services"): CORE_SERVICE_ICONS_SCHEMA - if core_integration - else CUSTOM_INTEGRATION_SERVICE_ICONS_SCHEMA, + vol.Optional("services"): state_validator, } ) @@ -180,9 +146,7 @@ def validate_icon_file(config: Config, integration: Integration) -> None: no_entity_platform = integration.domain in ("notify", "image_processing") - schema = icon_schema( - integration.core, integration.integration_type, no_entity_platform - ) + schema = icon_schema(integration.integration_type, no_entity_platform) try: schema(icons) diff --git a/script/hassfest/manifest.py b/script/hassfest/manifest.py index 4013c8a6c19..8ff0750250f 100644 --- a/script/hassfest/manifest.py +++ b/script/hassfest/manifest.py @@ -88,10 +88,12 @@ NO_IOT_CLASS = [ "logbook", "logger", "lovelace", + "map", "media_source", "my", "onboarding", "panel_custom", + "panel_iframe", "plant", "profiler", "proxy", @@ -115,7 +117,13 @@ NO_IOT_CLASS = [ # https://github.com/home-assistant/developers.home-assistant/pull/1512 NO_DIAGNOSTICS = [ "dlna_dms", + "gdacs", + "geonetnz_quakes", "hyperion", + # Modbus is excluded because it doesn't have to have a config flow + # according to ADR-0010, since it's a protocol integration. This + # means that it can't implement diagnostics. + "modbus", "nightscout", "pvpc_hourly_pricing", "risco", @@ -268,6 +276,7 @@ INTEGRATION_MANIFEST_SCHEMA = vol.Schema( ) ], vol.Required("documentation"): vol.All(vol.Url(), documentation_url), + vol.Optional("issue_tracker"): vol.Url(), vol.Optional("quality_scale"): vol.In(SUPPORTED_QUALITY_SCALES), vol.Optional("requirements"): [str], vol.Optional("dependencies"): [str], @@ -303,7 +312,6 @@ def manifest_schema(value: dict[str, Any]) -> vol.Schema: CUSTOM_INTEGRATION_MANIFEST_SCHEMA = INTEGRATION_MANIFEST_SCHEMA.extend( { vol.Optional("version"): vol.All(str, verify_version), - vol.Optional("issue_tracker"): vol.Url(), vol.Optional("import_executor"): bool, } ) diff --git a/script/hassfest/metadata.py b/script/hassfest/metadata.py index 0768e875016..bd3ac4514e7 100644 --- a/script/hassfest/metadata.py +++ b/script/hassfest/metadata.py @@ -10,7 +10,8 @@ from .model import Config, Integration def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate project metadata keys.""" metadata_path = config.root / "pyproject.toml" - data = tomllib.loads(metadata_path.read_text()) + with open(metadata_path, "rb") as fp: + data = tomllib.load(fp) try: if data["project"]["version"] != __version__: diff --git a/script/hassfest/model.py b/script/hassfest/model.py index 63e9b025ed4..736fb6874be 100644 --- a/script/hassfest/model.py +++ b/script/hassfest/model.py @@ -29,7 +29,6 @@ class Config: root: pathlib.Path action: Literal["validate", "generate"] requirements: bool - core_integrations_path: pathlib.Path errors: list[Error] = field(default_factory=list) cache: dict[str, Any] = field(default_factory=dict) plugins: set[str] = field(default_factory=set) @@ -106,7 +105,7 @@ class Integration: """Represent an integration in our validator.""" @classmethod - def load_dir(cls, path: pathlib.Path, config: Config) -> dict[str, Integration]: + def load_dir(cls, path: pathlib.Path) -> dict[str, Integration]: """Load all integrations in a directory.""" assert path.is_dir() integrations: dict[str, Integration] = {} @@ -124,14 +123,13 @@ class Integration: ) continue - integration = cls(fil, config) + integration = cls(fil) integration.load_manifest() integrations[integration.domain] = integration return integrations path: pathlib.Path - _config: Config _manifest: dict[str, Any] | None = None manifest_path: pathlib.Path | None = None errors: list[Error] = field(default_factory=list) @@ -152,9 +150,7 @@ class Integration: @property def core(self) -> bool: """Core integration.""" - return self.path.as_posix().startswith( - self._config.core_integrations_path.as_posix() - ) + return self.path.as_posix().startswith("homeassistant/components") @property def disabled(self) -> str | None: diff --git a/script/hassfest/mqtt.py b/script/hassfest/mqtt.py index 54ee65aaa35..b2112d9bb6a 100644 --- a/script/hassfest/mqtt.py +++ b/script/hassfest/mqtt.py @@ -33,15 +33,17 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - if mqtt_path.read_text() != content: - config.add_error( - "mqtt", - "File mqtt.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + with open(str(mqtt_path)) as fp: + if fp.read() != content: + config.add_error( + "mqtt", + "File mqtt.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate MQTT file.""" mqtt_path = config.root / "homeassistant/generated/mqtt.py" - mqtt_path.write_text(f"{config.cache['mqtt']}") + with open(str(mqtt_path), "w") as fp: + fp.write(f"{config.cache['mqtt']}") diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index 25fe875e437..56734257f78 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -36,14 +36,15 @@ GENERAL_SETTINGS: Final[dict[str, str]] = { "plugins": "pydantic.mypy", "show_error_codes": "true", "follow_imports": "normal", - # "enable_incomplete_feature": ", ".join( # noqa: FLY002 - # [] - # ), + "enable_incomplete_feature": ",".join( # noqa: FLY002 + [ + "NewGenericSyntax", + ] + ), # Enable some checks globally. "local_partial_types": "true", "strict_equality": "true", "no_implicit_optional": "true", - "report_deprecated_as_error": "true", "warn_incomplete_stub": "true", "warn_redundant_casts": "true", "warn_unused_configs": "true", diff --git a/script/hassfest/requirements.py b/script/hassfest/requirements.py index 998593d20ec..d35d96121c5 100644 --- a/script/hassfest/requirements.py +++ b/script/hassfest/requirements.py @@ -28,6 +28,12 @@ PACKAGE_REGEX = re.compile( PIP_REGEX = re.compile(r"^(--.+\s)?([-_\.\w\d]+.*(?:==|>=|<=|~=|!=|<|>|===)?.*$)") PIP_VERSION_RANGE_SEPARATOR = re.compile(r"^(==|>=|<=|~=|!=|<|>|===)?(.*)$") +IGNORE_STANDARD_LIBRARY_VIOLATIONS = { + # Integrations which have standard library requirements. + "slide", + "suez_water", +} + def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle requirements for integrations.""" @@ -78,19 +84,18 @@ def validate_requirements_format(integration: Integration) -> bool: if not version: continue - if integration.core: - for part in version.split(";", 1)[0].split(","): - version_part = PIP_VERSION_RANGE_SEPARATOR.match(part) - if ( - version_part - and AwesomeVersion(version_part.group(2)).strategy - == AwesomeVersionStrategy.UNKNOWN - ): - integration.add_error( - "requirements", - f"Unable to parse package version ({version}) for {pkg}.", - ) - continue + for part in version.split(";", 1)[0].split(","): + version_part = PIP_VERSION_RANGE_SEPARATOR.match(part) + if ( + version_part + and AwesomeVersion(version_part.group(2)).strategy + == AwesomeVersionStrategy.UNKNOWN + ): + integration.add_error( + "requirements", + f"Unable to parse package version ({version}) for {pkg}.", + ) + continue return len(integration.errors) == start_errors @@ -138,7 +143,10 @@ def validate_requirements(integration: Integration) -> None: if req in sys.stdlib_module_names: standard_library_violations.add(req) - if standard_library_violations: + if ( + standard_library_violations + and integration.domain not in IGNORE_STANDARD_LIBRARY_VIOLATIONS + ): integration.add_error( "requirements", ( @@ -146,6 +154,18 @@ def validate_requirements(integration: Integration) -> None: "are not compatible with the Python standard library" ), ) + elif ( + not standard_library_violations + and integration.domain in IGNORE_STANDARD_LIBRARY_VIOLATIONS + ): + integration.add_error( + "requirements", + ( + f"Integration {integration.domain} no longer has requirements which are" + " incompatible with the Python standard library, remove it from " + "IGNORE_STANDARD_LIBRARY_VIOLATIONS" + ), + ) @cache diff --git a/script/hassfest/services.py b/script/hassfest/services.py index 8c9ab5c0c0b..92fca14d373 100644 --- a/script/hassfest/services.py +++ b/script/hassfest/services.py @@ -75,14 +75,6 @@ CUSTOM_INTEGRATION_FIELD_SCHEMA = CORE_INTEGRATION_FIELD_SCHEMA.extend( } ) -CUSTOM_INTEGRATION_SECTION_SCHEMA = vol.Schema( - { - vol.Optional("collapsed"): bool, - vol.Required("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), - } -) - - CORE_INTEGRATION_SERVICE_SCHEMA = vol.Any( vol.Schema( { @@ -113,17 +105,7 @@ CUSTOM_INTEGRATION_SERVICE_SCHEMA = vol.Any( vol.Optional("target"): vol.Any( selector.TargetSelector.CONFIG_SCHEMA, None ), - vol.Optional("fields"): vol.All( - vol.Schema( - { - str: vol.Any( - CUSTOM_INTEGRATION_FIELD_SCHEMA, - CUSTOM_INTEGRATION_SECTION_SCHEMA, - ) - } - ), - unique_field_validator, - ), + vol.Optional("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), } ), None, diff --git a/script/hassfest/ssdp.py b/script/hassfest/ssdp.py index 989b614e43d..0a61284eb46 100644 --- a/script/hassfest/ssdp.py +++ b/script/hassfest/ssdp.py @@ -33,15 +33,17 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - if ssdp_path.read_text() != content: - config.add_error( - "ssdp", - "File ssdp.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + with open(str(ssdp_path)) as fp: + if fp.read() != content: + config.add_error( + "ssdp", + "File ssdp.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate ssdp file.""" ssdp_path = config.root / "homeassistant/generated/ssdp.py" - ssdp_path.write_text(f"{config.cache['ssdp']}") + with open(str(ssdp_path), "w") as fp: + fp.write(f"{config.cache['ssdp']}") diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index 2c3b9b4d99b..c39c070eba2 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -41,7 +41,6 @@ ALLOW_NAME_TRANSLATION = { "local_todo", "nmap_tracker", "rpi_power", - "swiss_public_transport", "waze_travel_time", "zodiac", } @@ -131,13 +130,11 @@ def translation_value_validator(value: Any) -> str: - prevents strings with single quoted placeholders - prevents combined translations """ - string_value = cv.string_with_no_html(value) - string_value = string_no_single_quoted_placeholders(string_value) - if RE_COMBINED_REFERENCE.search(string_value): + value = cv.string_with_no_html(value) + value = string_no_single_quoted_placeholders(value) + if RE_COMBINED_REFERENCE.search(value): raise vol.Invalid("the string should not contain combined translations") - if string_value != string_value.strip(): - raise vol.Invalid("the string should not contain leading or trailing spaces") - return string_value + return str(value) def string_no_single_quoted_placeholders(value: str) -> str: @@ -169,7 +166,7 @@ def gen_data_entry_schema( vol.Optional("data_description"): {str: translation_value_validator}, vol.Optional("menu_options"): {str: translation_value_validator}, vol.Optional("submit"): translation_value_validator, - vol.Optional("sections"): { + vol.Optional("section"): { str: { vol.Optional("data"): {str: translation_value_validator}, vol.Optional("description"): translation_value_validator, diff --git a/script/hassfest/usb.py b/script/hassfest/usb.py index c34f4fd1b62..84cafc973ad 100644 --- a/script/hassfest/usb.py +++ b/script/hassfest/usb.py @@ -35,15 +35,19 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - if usb_path.read_text() != content: - config.add_error( - "usb", - "File usb.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + with open(str(usb_path)) as fp: + current = fp.read() + if current != content: + config.add_error( + "usb", + "File usb.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) + return def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate usb file.""" usb_path = config.root / "homeassistant/generated/usb.py" - usb_path.write_text(f"{config.cache['usb']}") + with open(str(usb_path), "w") as fp: + fp.write(f"{config.cache['usb']}") diff --git a/script/hassfest/zeroconf.py b/script/hassfest/zeroconf.py index 48fcc0a4589..63f10fcf294 100644 --- a/script/hassfest/zeroconf.py +++ b/script/hassfest/zeroconf.py @@ -90,15 +90,19 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - if zeroconf_path.read_text() != content: - config.add_error( - "zeroconf", - "File zeroconf.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + with open(str(zeroconf_path)) as fp: + current = fp.read() + if current != content: + config.add_error( + "zeroconf", + "File zeroconf.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) + return def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate zeroconf file.""" zeroconf_path = config.root / "homeassistant/generated/zeroconf.py" - zeroconf_path.write_text(f"{config.cache['zeroconf']}") + with open(str(zeroconf_path), "w") as fp: + fp.write(f"{config.cache['zeroconf']}") diff --git a/script/inspect_schemas.py b/script/inspect_schemas.py index 0f888d14af2..a8ffe0afb60 100755 --- a/script/inspect_schemas.py +++ b/script/inspect_schemas.py @@ -2,7 +2,7 @@ """Inspect all component SCHEMAS.""" import importlib -from pathlib import Path +import os import pkgutil from homeassistant.config import _identify_config_schema @@ -20,7 +20,7 @@ def explore_module(package): def main(): """Run the script.""" - if not Path("requirements_all.txt").is_file(): + if not os.path.isfile("requirements_all.txt"): print("Run this from HA root dir") return @@ -57,9 +57,7 @@ def main(): ) for key in sorted(msg): - print(f"\n{key}") - for val in msg[key]: - print(f" - {val}") + print("\n{}\n - {}".format(key, "\n - ".join(msg[key]))) if __name__ == "__main__": diff --git a/script/install_integration_requirements.py b/script/install_integration_requirements.py index 91c9f6a8ed0..ab91ea71557 100644 --- a/script/install_integration_requirements.py +++ b/script/install_integration_requirements.py @@ -45,7 +45,6 @@ def main() -> int | None: cmd, check=True, ) - return None if __name__ == "__main__": diff --git a/script/json_schemas/manifest_schema.json b/script/json_schemas/manifest_schema.json deleted file mode 100644 index 40f08fd2c85..00000000000 --- a/script/json_schemas/manifest_schema.json +++ /dev/null @@ -1,391 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Home Assistant integration manifest", - "description": "The manifest for a Home Assistant integration", - "type": "object", - "if": { - "properties": { "integration_type": { "const": "virtual" } }, - "required": ["integration_type"] - }, - "then": { - "oneOf": [ - { - "properties": { - "domain": { - "description": "The domain identifier of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#domain", - "examples": ["mobile_app"], - "type": "string", - "pattern": "[0-9a-z_]+" - }, - "name": { - "description": "The friendly name of the integration.", - "type": "string" - }, - "integration_type": { - "description": "The integration type.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-type", - "const": "virtual" - }, - "iot_standards": { - "description": "The IoT standards which supports devices or services of this virtual integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#iot-standards", - "type": "array", - "minItems": 1, - "items": { - "type": "string", - "enum": ["homekit", "zigbee", "zwave"] - } - } - }, - "additionalProperties": false, - "required": ["domain", "name", "integration_type", "iot_standards"] - }, - { - "properties": { - "domain": { - "description": "The domain identifier of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#domain", - "examples": ["mobile_app"], - "type": "string", - "pattern": "[0-9a-z_]+" - }, - "name": { - "description": "The friendly name of the integration.", - "type": "string" - }, - "integration_type": { - "description": "The integration type.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-type", - "const": "virtual" - }, - "supported_by": { - "description": "The integration which supports devices or services of this virtual integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#supported-by", - "type": "string" - } - }, - "additionalProperties": false, - "required": ["domain", "name", "integration_type", "supported_by"] - } - ] - }, - "else": { - "properties": { - "domain": { - "description": "The domain identifier of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#domain", - "examples": ["mobile_app"], - "type": "string", - "pattern": "[0-9a-z_]+" - }, - "name": { - "description": "The friendly name of the integration.", - "type": "string" - }, - "integration_type": { - "description": "The integration type.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-type", - "type": "string", - "default": "hub", - "enum": [ - "device", - "entity", - "hardware", - "helper", - "hub", - "service", - "system" - ] - }, - "config_flow": { - "description": "Whether the integration is configurable from the UI.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#config-flow", - "type": "boolean" - }, - "mqtt": { - "description": "A list of topics to subscribe for the discovery of devices via MQTT.\nThis requires to specify \"mqtt\" in either the \"dependencies\" or \"after_dependencies\".\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#mqtt", - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true - }, - "zeroconf": { - "description": "A list containing service domains to search for devices to discover via Zeroconf. Items can either be strings, which discovers all devices in the specific service domain, and/or objects which include filters. (useful for generic service domains like _http._tcp.local.)\nA device is discovered if it matches one of the items, but inside the individual item all properties have to be matched.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#zeroconf", - "type": "array", - "minItems": 1, - "items": { - "anyOf": [ - { - "type": "string", - "pattern": "^.*\\.local\\.$", - "description": "Service domain to search for devices." - }, - { - "type": "object", - "properties": { - "type": { - "description": "The service domain to search for devices.", - "examples": ["_http._tcp.local."], - "type": "string", - "pattern": "^.*\\.local\\.$" - }, - "name": { - "description": "The name or name pattern of the devices to filter.", - "type": "string" - }, - "properties": { - "description": "The properties of the Zeroconf advertisement to filter.", - "type": "object", - "additionalProperties": { "type": "string" } - } - }, - "required": ["type"], - "additionalProperties": false - } - ] - }, - "uniqueItems": true - }, - "ssdp": { - "description": "A list of matchers to find devices discoverable via SSDP/UPnP. In order to be discovered, the device has to match all properties of any of the matchers.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#ssdp", - "type": "array", - "minItems": 1, - "items": { - "description": "A matcher for the SSDP discovery.", - "type": "object", - "properties": { - "st": { - "type": "string" - }, - "deviceType": { - "type": "string" - }, - "manufacturer": { - "type": "string" - }, - "modelDescription": { - "type": "string" - } - }, - "additionalProperties": { "type": "string" } - } - }, - "bluetooth": { - "description": "A list of matchers to find devices discoverable via Bluetooth. In order to be discovered, the device has to match all properties of any of the matchers.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#bluetooth", - "type": "array", - "minItems": 1, - "items": { - "description": "A matcher for the bluetooth discovery", - "type": "object", - "properties": { - "connectable": { - "description": "Whether the device needs to be connected to or it works with just advertisement data.", - "type": "boolean" - }, - "local_name": { - "description": "The name or a name pattern of the device to match.", - "type": "string", - "pattern": "^([^*]+|[^*]{3,}[*].*)$" - }, - "service_uuid": { - "description": "The 128-bit service data UUID to match.", - "type": "string", - "pattern": "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" - }, - "service_data_uuid": { - "description": "The 16-bit service data UUID to match, converted into the corresponding 128-bit UUID by replacing the 3rd and 4th byte of `00000000-0000-1000-8000-00805f9b34fb` with the 16-bit UUID.", - "examples": ["0000fd3d-0000-1000-8000-00805f9b34fb"], - "type": "string", - "pattern": "0000[0-9a-f]{4}-0000-1000-8000-00805f9b34fb" - }, - "manufacturer_id": { - "description": "The Manufacturer ID to match.", - "type": "integer" - }, - "manufacturer_data_start": { - "description": "The start bytes of the manufacturer data to match.", - "type": "array", - "minItems": 1, - "items": { - "type": "integer", - "minimum": 0, - "maximum": 255 - } - } - }, - "additionalProperties": false - }, - "uniqueItems": true - }, - "homekit": { - "description": "A list of model names to find devices which are discoverable via HomeKit. A device is discovered if the model name of the device starts with any of the specified model names.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#homekit", - "type": "object", - "properties": { - "models": { - "description": "The model names to search for.", - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true - } - }, - "required": ["models"], - "additionalProperties": false - }, - "dhcp": { - "description": "A list of matchers to find devices discoverable via DHCP. In order to be discovered, the device has to match all properties of any of the matchers.\nYou can specify an item with \"registered_devices\" set to true to check for devices with MAC addresses specified in the device registry.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#dhcp", - "type": "array", - "items": { - "anyOf": [ - { - "type": "object", - "properties": { - "registered_devices": { - "description": "Whether the MAC addresses of devices in the device registry should be used for discovery, useful if the discovery is used to update the IP address of already registered devices.", - "const": true - } - }, - "additionalProperties": false - }, - { - "type": "object", - "properties": { - "hostname": { - "description": "The hostname or hostname pattern to match.", - "type": "string" - }, - "macaddress": { - "description": "The MAC address or MAC address pattern to match.", - "type": "string", - "maxLength": 12 - } - }, - "additionalProperties": false - } - ] - }, - "uniqueItems": true - }, - "usb": { - "description": "A list of matchers to find devices discoverable via USB. In order to be discovered, the device has to match all properties of any of the matchers.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#usb", - "type": "array", - "uniqueItems": true, - "items": { - "type": "object", - "additionalProperties": false, - "properties": { - "vid": { - "description": "The vendor ID to match.", - "type": "string", - "pattern": "[0-9A-F]{4}" - }, - "pid": { - "description": "The product ID to match.", - "type": "string", - "pattern": "[0-9A-F]{4}" - }, - "description": { - "description": "The USB device description to match.", - "type": "string" - }, - "manufacturer": { - "description": "The manufacturer to match.", - "type": "string" - }, - "serial_number": { - "description": "The serial number to match.", - "type": "string" - }, - "known_devices": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - }, - "documentation": { - "description": "The website containing the documentation for the integration. It has to be in the format \"https://www.home-assistant.io/integrations/[domain]\"\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#documentation", - "type": "string", - "pattern": "^https://www.home-assistant.io/integrations/[0-9a-z_]+$", - "format": "uri" - }, - "quality_scale": { - "description": "The quality scale of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-quality-scale", - "type": "string", - "enum": ["internal", "silver", "gold", "platinum"] - }, - "requirements": { - "description": "The PyPI package requirements for the integration. The package has to be pinned to a specific version.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#requirements", - "type": "array", - "items": { - "type": "string", - "pattern": ".+==.+" - }, - "uniqueItems": true - }, - "dependencies": { - "description": "A list of integrations which need to be loaded before this integration can be set up.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#dependencies", - "type": "array", - "items": { - "type": "string" - }, - "minItems": 1, - "uniqueItems": true - }, - "after_dependencies": { - "description": "A list of integrations which need to be loaded before this integration is set up when it is configured. The integration will still be set up when the \"after_dependencies\" are not configured.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#after-dependencies", - "type": "array", - "items": { - "type": "string" - }, - "minItems": 1, - "uniqueItems": true - }, - "codeowners": { - "description": "A list of GitHub usernames or GitHub team names of the integration owners.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#code-owners", - "type": "array", - "minItems": 0, - "items": { - "type": "string", - "pattern": "^@.+$" - }, - "uniqueItems": true - }, - "loggers": { - "description": "A list of logger names used by the requirements.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#loggers", - "type": "array", - "minItems": 1, - "items": { - "type": "string" - }, - "uniqueItems": true - }, - "disabled": { - "description": "The reason for the integration being disabled.", - "type": "string" - }, - "iot_class": { - "description": "The IoT class of the integration, describing how the integration connects to the device or service.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#iot-class", - "type": "string", - "enum": [ - "assumed_state", - "cloud_polling", - "cloud_push", - "local_polling", - "local_push", - "calculated" - ] - }, - "single_config_entry": { - "description": "Whether the integration only supports a single config entry.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#single-config-entry-only", - "const": true - } - }, - "additionalProperties": false, - "required": ["domain", "name", "codeowners", "documentation"], - "dependencies": { - "mqtt": { - "anyOf": [ - { "required": ["dependencies"] }, - { "required": ["after_dependencies"] } - ] - } - } - } -} diff --git a/script/licenses.py b/script/licenses.py deleted file mode 100644 index 464a2fc456b..00000000000 --- a/script/licenses.py +++ /dev/null @@ -1,413 +0,0 @@ -"""Tool to check the licenses.""" - -from __future__ import annotations - -from argparse import ArgumentParser, Namespace -from collections.abc import Sequence -from dataclasses import dataclass -from importlib import metadata -import json -from pathlib import Path -import sys -from typing import TypedDict, cast - -from awesomeversion import AwesomeVersion -from license_expression import ( - AND, - OR, - ExpressionError, - LicenseExpression, - LicenseSymbol, - get_spdx_licensing, -) - -licensing = get_spdx_licensing() - - -class PackageMetadata(TypedDict): - """Package metadata.""" - - name: str - version: str - license_expression: str | None - license_metadata: str | None - license_classifier: list[str] - - -@dataclass -class PackageDefinition: - """Package definition.""" - - license: str - license_expression: str | None - license_metadata: str | None - license_classifier: list[str] - name: str - version: AwesomeVersion - - @classmethod - def from_dict(cls, data: PackageMetadata) -> PackageDefinition: - """Create a package definition from PackageMetadata.""" - if not (license_str := "; ".join(data["license_classifier"])): - license_str = data["license_metadata"] or "UNKNOWN" - return cls( - license=license_str, - license_expression=data["license_expression"], - license_metadata=data["license_metadata"], - license_classifier=data["license_classifier"], - name=data["name"], - version=AwesomeVersion(data["version"]), - ) - - -# Incomplete list of OSI approved SPDX identifiers -# Add more as needed, see https://spdx.org/licenses/ -OSI_APPROVED_LICENSES_SPDX = { - "0BSD", - "AFL-2.1", - "AGPL-3.0-only", - "AGPL-3.0-or-later", - "Apache-2.0", - "BSD-1-Clause", - "BSD-2-Clause", - "BSD-3-Clause", - "EPL-1.0", - "EPL-2.0", - "GPL-2.0-only", - "GPL-2.0-or-later", - "GPL-3.0-only", - "GPL-3.0-or-later", - "HPND", - "ISC", - "LGPL-2.1-only", - "LGPL-2.1-or-later", - "LGPL-3.0-only", - "LGPL-3.0-or-later", - "MIT", - "MIT-CMU", - "MPL-1.1", - "MPL-2.0", - "PSF-2.0", - "Unlicense", - "Zlib", - "ZPL-2.1", -} - -OSI_APPROVED_LICENSES = { - "Academic Free License (AFL)", - "Apache Software License", - "Apple Public Source License", - "Artistic License", - "Attribution Assurance License", - "BSD License", - "Boost Software License 1.0 (BSL-1.0)", - "CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)", - "Common Development and Distribution License 1.0 (CDDL-1.0)", - "Common Public License", - "Eclipse Public License 1.0 (EPL-1.0)", - "Eclipse Public License 2.0 (EPL-2.0)", - "Educational Community License, Version 2.0 (ECL-2.0)", - "Eiffel Forum License", - "European Union Public Licence 1.0 (EUPL 1.0)", - "European Union Public Licence 1.1 (EUPL 1.1)", - "European Union Public Licence 1.2 (EUPL 1.2)", - "GNU Affero General Public License v3", - "GNU Affero General Public License v3 or later (AGPLv3+)", - "GNU Free Documentation License (FDL)", - "GNU General Public License (GPL)", - "GNU General Public License v2 (GPLv2)", - "GNU General Public License v2 or later (GPLv2+)", - "GNU General Public License v3 (GPLv3)", - "GNU General Public License v3 or later (GPLv3+)", - "GNU Lesser General Public License v2 (LGPLv2)", - "GNU Lesser General Public License v2 or later (LGPLv2+)", - "GNU Lesser General Public License v3 (LGPLv3)", - "GNU Lesser General Public License v3 or later (LGPLv3+)", - "GNU Library or Lesser General Public License (LGPL)", - "Historical Permission Notice and Disclaimer (HPND)", - "IBM Public License", - "ISC License (ISCL)", - "Intel Open Source License", - "Jabber Open Source License", - "MIT License", - "MIT No Attribution License (MIT-0)", - "MITRE Collaborative Virtual Workspace License (CVW)", - "MirOS License (MirOS)", - "Motosoto License", - "Mozilla Public License 1.0 (MPL)", - "Mozilla Public License 1.1 (MPL 1.1)", - "Mozilla Public License 2.0 (MPL 2.0)", - "Mulan Permissive Software License v2 (MulanPSL-2.0)", - "NASA Open Source Agreement v1.3 (NASA-1.3)", - "Nethack General Public License", - "Nokia Open Source License", - "Open Group Test Suite License", - "Open Software License 3.0 (OSL-3.0)", - "PostgreSQL License", - "Python License (CNRI Python License)", - "Python Software Foundation License", - "Qt Public License (QPL)", - "Ricoh Source Code Public License", - "SIL Open Font License 1.1 (OFL-1.1)", - "Sleepycat License", - "Sun Industry Standards Source License (SISSL)", - "Sun Public License", - "The Unlicense (Unlicense)", - "Universal Permissive License (UPL)", - "University of Illinois/NCSA Open Source License", - "Vovida Software License 1.0", - "W3C License", - "X.Net License", - "Zero-Clause BSD (0BSD)", - "Zope Public License", - "zlib/libpng License", - # End license classifier - "Apache License", - "MIT", - "MPL2", - "Apache 2", - "LGPL v3", - "BSD", - "GNU-3.0", - "GPLv3", - "Eclipse Public License v2.0", - "ISC", - "GNU General Public License v3", - "GPLv2", -} - -EXCEPTIONS = { - "PyMicroBot", # https://github.com/spycle/pyMicroBot/pull/3 - "PySwitchmate", # https://github.com/Danielhiversen/pySwitchmate/pull/16 - "PyXiaomiGateway", # https://github.com/Danielhiversen/PyXiaomiGateway/pull/201 - "aioecowitt", # https://github.com/home-assistant-libs/aioecowitt/pull/180 - "chacha20poly1305", # LGPL - "commentjson", # https://github.com/vaidik/commentjson/pull/55 - "crownstone-cloud", # https://github.com/crownstone/crownstone-lib-python-cloud/pull/5 - "crownstone-core", # https://github.com/crownstone/crownstone-lib-python-core/pull/6 - "crownstone-sse", # https://github.com/crownstone/crownstone-lib-python-sse/pull/2 - "crownstone-uart", # https://github.com/crownstone/crownstone-lib-python-uart/pull/12 - "eliqonline", # https://github.com/molobrakos/eliqonline/pull/17 - "enocean", # https://github.com/kipe/enocean/pull/142 - "imutils", # https://github.com/PyImageSearch/imutils/pull/292 - "iso4217", # Public domain - "kiwiki_client", # https://github.com/c7h/kiwiki_client/pull/6 - "ld2410-ble", # https://github.com/930913/ld2410-ble/pull/7 - "maxcube-api", # https://github.com/uebelack/python-maxcube-api/pull/48 - "neurio", # https://github.com/jordanh/neurio-python/pull/13 - "nsw-fuel-api-client", # https://github.com/nickw444/nsw-fuel-api-client/pull/14 - "pigpio", # https://github.com/joan2937/pigpio/pull/608 - "pymitv", # MIT - "pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5 - "pyeconet", # https://github.com/w1ll1am23/pyeconet/pull/41 - "pysabnzbd", # https://github.com/jeradM/pysabnzbd/pull/6 - "pyvera", # https://github.com/maximvelichko/pyvera/pull/164 - "repoze.lru", - "sharp_aquos_rc", # https://github.com/jmoore987/sharp_aquos_rc/pull/14 - "tapsaff", # https://github.com/bazwilliams/python-taps-aff/pull/5 -} - -TODO = { - "aiocache": AwesomeVersion( - "0.12.3" - ), # https://github.com/aio-libs/aiocache/blob/master/LICENSE all rights reserved? -} - -EXCEPTIONS_AND_TODOS = EXCEPTIONS.union(TODO) - - -def check_licenses(args: CheckArgs) -> int: - """Check licenses are OSI approved.""" - exit_code = 0 - raw_licenses = json.loads(Path(args.path).read_text()) - license_status = { - pkg.name: (pkg, check_license_status(pkg)) - for data in raw_licenses - if (pkg := PackageDefinition.from_dict(data)) - } - - for name, version in TODO.items(): - pkg, status = license_status.get(name, (None, None)) - if pkg is None or not (version < pkg.version): - continue - assert status is not None - - if status is True: - print( - "Approved license detected for " - f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" - "Please remove the package from the TODO list.\n" - ) - else: - print( - "We could not detect an OSI-approved license for " - f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" - "Please update the package version on the TODO list.\n" - ) - exit_code = 1 - - for pkg, status in license_status.values(): - if status is False and pkg.name not in EXCEPTIONS_AND_TODOS: - print( - "We could not detect an OSI-approved license for " - f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" - ) - exit_code = 1 - if status is True and pkg.name in EXCEPTIONS: - print( - "Approved license detected for " - f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" - "Please remove the package from the EXCEPTIONS list.\n" - ) - exit_code = 1 - - for name in EXCEPTIONS_AND_TODOS.difference(license_status): - print( - f"Package {name} is tracked, but not used. " - "Please remove it from the licenses.py file.\n" - ) - exit_code = 1 - - return exit_code - - -def check_license_status(package: PackageDefinition) -> bool: - """Check if package licenses is OSI approved.""" - if package.license_expression: - # Prefer 'License-Expression' if it exists - return check_license_expression(package.license_expression) or False - - if ( - package.license_metadata - and (check := check_license_expression(package.license_metadata)) is not None - ): - # Check license metadata if it's a valid SPDX license expression - return check - - for approved_license in OSI_APPROVED_LICENSES: - if approved_license in package.license: - return True - return False - - -def check_license_expression(license_str: str) -> bool | None: - """Check if license expression is a valid and approved SPDX license string.""" - if license_str == "UNKNOWN" or "\n" in license_str: - # Ignore common errors for license metadata values - return None - - try: - expr = licensing.parse(license_str, validate=True) - except ExpressionError: - return None - return check_spdx_license(expr) - - -def check_spdx_license(expr: LicenseExpression) -> bool: - """Check a SPDX license expression.""" - if isinstance(expr, LicenseSymbol): - return expr.key in OSI_APPROVED_LICENSES_SPDX - if isinstance(expr, OR): - return any(check_spdx_license(arg) for arg in expr.args) - if isinstance(expr, AND): - return all(check_spdx_license(arg) for arg in expr.args) - return False - - -def get_license_str(package: PackageDefinition) -> str: - """Return license string.""" - return ( - f"{package.license_expression} -- {package.license_metadata} " - f"-- {package.license_classifier}" - ) - - -def extract_licenses(args: ExtractArgs) -> int: - """Extract license data for installed packages.""" - licenses = sorted( - [get_package_metadata(dist) for dist in list(metadata.distributions())], - key=lambda dist: dist["name"], - ) - Path(args.output_file).write_text(json.dumps(licenses, indent=2)) - return 0 - - -def get_package_metadata(dist: metadata.Distribution) -> PackageMetadata: - """Get package metadata for distribution.""" - return { - "name": dist.name, - "version": dist.version, - "license_expression": dist.metadata.get("License-Expression"), - "license_metadata": dist.metadata.get("License"), - "license_classifier": extract_license_classifier( - dist.metadata.get_all("Classifier") - ), - } - - -def extract_license_classifier(classifiers: list[str] | None) -> list[str]: - """Extract license from list of classifiers. - - E.g. 'License :: OSI Approved :: MIT License' -> 'MIT License'. - Filter out bare 'License :: OSI Approved'. - """ - return [ - license_classifier - for classifier in classifiers or () - if classifier.startswith("License") - and (license_classifier := classifier.rpartition(" :: ")[2]) - and license_classifier != "OSI Approved" - ] - - -class ExtractArgs(Namespace): - """Extract arguments.""" - - output_file: str - - -class CheckArgs(Namespace): - """Check arguments.""" - - path: str - - -def main(argv: Sequence[str] | None = None) -> int: - """Run the main script.""" - parser = ArgumentParser() - subparsers = parser.add_subparsers(title="Subcommands", required=True) - - parser_extract = subparsers.add_parser("extract") - parser_extract.set_defaults(action="extract") - parser_extract.add_argument( - "--output-file", - default="licenses.json", - help="Path to store the licenses file", - ) - - parser_check = subparsers.add_parser("check") - parser_check.set_defaults(action="check") - parser_check.add_argument( - "path", - nargs="?", - metavar="PATH", - default="licenses.json", - help="Path to json licenses file", - ) - - argv = argv or sys.argv[1:] - args = parser.parse_args(argv) - - if args.action == "extract": - args = cast(ExtractArgs, args) - return extract_licenses(args) - if args.action == "check": - args = cast(CheckArgs, args) - if (exit_code := check_licenses(args)) == 0: - print("All licenses are approved!") - return exit_code - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/script/lint_and_test.py b/script/lint_and_test.py index fb350c113b9..e23870364b6 100755 --- a/script/lint_and_test.py +++ b/script/lint_and_test.py @@ -9,7 +9,6 @@ from collections import namedtuple from contextlib import suppress import itertools import os -from pathlib import Path import re import shlex import sys @@ -21,7 +20,7 @@ except ImportError: RE_ASCII = re.compile(r"\033\[[^m]*m") -Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"]) # noqa: PYI024 +Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"]) PASS = "green" FAIL = "bold_red" @@ -64,7 +63,7 @@ async def async_exec(*args, display=False): """Execute, return code & log.""" argsp = [] for arg in args: - if Path(arg).is_file(): + if os.path.isfile(arg): argsp.append(f"\\\n {shlex.quote(arg)}") else: argsp.append(shlex.quote(arg)) @@ -133,7 +132,7 @@ async def ruff(files): async def lint(files): """Perform lint.""" - files = [file for file in files if Path(file).is_file()] + files = [file for file in files if os.path.isfile(file)] res = sorted( itertools.chain( *await asyncio.gather( @@ -165,7 +164,7 @@ async def lint(files): async def main(): """Run the main loop.""" # Ensure we are in the homeassistant root - os.chdir(Path(__file__).parent.parent) + os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) files = await git() if not files: @@ -195,7 +194,7 @@ async def main(): gen_req = True # requirements script for components # Find test files... if fname.startswith("tests/"): - if "/test_" in fname and Path(fname).is_file(): + if "/test_" in fname and os.path.isfile(fname): # All test helpers should be excluded test_files.add(fname) else: @@ -208,7 +207,7 @@ async def main(): else: parts[-1] = f"test_{parts[-1]}" fname = "/".join(parts) - if Path(fname).is_file(): + if os.path.isfile(fname): test_files.add(fname) if gen_req: diff --git a/script/scaffold/templates/config_flow/tests/conftest.py b/script/scaffold/templates/config_flow/tests/conftest.py index 12faacd40df..fc217636705 100644 --- a/script/scaffold/templates/config_flow/tests/conftest.py +++ b/script/scaffold/templates/config_flow/tests/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the NEW_NAME tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/script/scaffold/templates/config_flow_helper/tests/conftest.py b/script/scaffold/templates/config_flow_helper/tests/conftest.py index 12faacd40df..fc217636705 100644 --- a/script/scaffold/templates/config_flow_helper/tests/conftest.py +++ b/script/scaffold/templates/config_flow_helper/tests/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the NEW_NAME tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py b/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py index 8e7854835d8..809902fa0dd 100644 --- a/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py +++ b/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py @@ -59,7 +59,7 @@ def get_suggested(schema, key): return None return k.description["suggested_value"] # Wanted key absent from schema - raise KeyError(f"Key `{key}` is missing from schema") + raise Exception @pytest.mark.parametrize("platform", ["sensor"]) diff --git a/script/scaffold/templates/config_flow_oauth2/integration/api.py b/script/scaffold/templates/config_flow_oauth2/integration/api.py index 9516dd99122..3f4aa3cfb82 100644 --- a/script/scaffold/templates/config_flow_oauth2/integration/api.py +++ b/script/scaffold/templates/config_flow_oauth2/integration/api.py @@ -49,6 +49,7 @@ class AsyncConfigEntryAuth(my_pypi_package.AbstractAuth): async def async_get_access_token(self) -> str: """Return a valid access token.""" - await self._oauth_session.async_ensure_token_valid() + if not self._oauth_session.valid_token: + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token["access_token"] diff --git a/script/scaffold/templates/device_trigger/tests/test_device_trigger.py b/script/scaffold/templates/device_trigger/tests/test_device_trigger.py index 1693049ae4c..7e4f88261bc 100644 --- a/script/scaffold/templates/device_trigger/tests/test_device_trigger.py +++ b/script/scaffold/templates/device_trigger/tests/test_device_trigger.py @@ -109,16 +109,14 @@ async def test_if_fires_on_state_change( hass.states.async_set("NEW_DOMAIN.entity", STATE_ON) await hass.async_block_till_done() assert len(service_calls) == 1 - assert ( - service_calls[0].data["some"] - == "turn_on - device - NEW_DOMAIN.entity - off - on - None - 0" - ) + assert service_calls[0].data[ + "some" + ] == "turn_on - device - {} - off - on - None - 0".format("NEW_DOMAIN.entity") # Fake that the entity is turning off. hass.states.async_set("NEW_DOMAIN.entity", STATE_OFF) await hass.async_block_till_done() assert len(service_calls) == 2 - assert ( - service_calls[1].data["some"] - == "turn_off - device - NEW_DOMAIN.entity - on - off - None - 0" - ) + assert service_calls[1].data[ + "some" + ] == "turn_off - device - {} - on - off - None - 0".format("NEW_DOMAIN.entity") diff --git a/script/split_tests.py b/script/split_tests.py index c64de46a068..8da03bd749b 100755 --- a/script/split_tests.py +++ b/script/split_tests.py @@ -49,27 +49,16 @@ class BucketHolder: test_folder.get_all_flatten(), reverse=True, key=lambda x: x.total_tests ) for tests in sorted_tests: + print(f"{tests.total_tests:>{digits}} tests in {tests.path}") if tests.added_to_bucket: # Already added to bucket continue - print(f"{tests.total_tests:>{digits}} tests in {tests.path}") smallest_bucket = min(self._buckets, key=lambda x: x.total_tests) - is_file = isinstance(tests, TestFile) if ( smallest_bucket.total_tests + tests.total_tests < self._tests_per_bucket - ) or is_file: + ) or isinstance(tests, TestFile): smallest_bucket.add(tests) - # Ensure all files from the same folder are in the same bucket - # to ensure that syrupy correctly identifies unused snapshots - if is_file: - for other_test in tests.parent.children.values(): - if other_test is tests or isinstance(other_test, TestFolder): - continue - print( - f"{other_test.total_tests:>{digits}} tests in {other_test.path} (same bucket)" - ) - smallest_bucket.add(other_test) # verify that all tests are added to a bucket if not test_folder.added_to_bucket: @@ -77,7 +66,7 @@ class BucketHolder: def create_ouput_file(self) -> None: """Create output file.""" - with Path("pytest_buckets.txt").open("w") as file: + with open("pytest_buckets.txt", "w") as file: for idx, bucket in enumerate(self._buckets): print(f"Bucket {idx+1} has {bucket.total_tests} tests") file.write(bucket.get_paths_line()) @@ -90,7 +79,6 @@ class TestFile: total_tests: int path: Path added_to_bucket: bool = field(default=False, init=False) - parent: TestFolder | None = field(default=None, init=False) def add_to_bucket(self) -> None: """Add test file to bucket.""" @@ -137,7 +125,6 @@ class TestFolder: def add_test_file(self, file: TestFile) -> None: """Add test file to folder.""" path = file.path - file.parent = self relative_path = path.relative_to(self.path) if not relative_path.parts: raise ValueError("Path is not a child of this folder") diff --git a/script/translations/download.py b/script/translations/download.py index 756de46fb61..8f7327c07ec 100755 --- a/script/translations/download.py +++ b/script/translations/download.py @@ -4,7 +4,8 @@ from __future__ import annotations import json -from pathlib import Path +import os +import pathlib import re import subprocess @@ -13,7 +14,7 @@ from .error import ExitApp from .util import get_lokalise_token, load_json_from_path FILENAME_FORMAT = re.compile(r"strings\.(?P\w+)\.json") -DOWNLOAD_DIR = Path("build/translations-download").absolute() +DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute() def run_download_docker(): @@ -55,32 +56,35 @@ def run_download_docker(): raise ExitApp("Failed to download translations") -def save_json(filename: Path, data: list | dict) -> None: - """Save JSON data to a file.""" - filename.write_text(json.dumps(data, sort_keys=True, indent=4), encoding="utf-8") +def save_json(filename: str, data: list | dict): + """Save JSON data to a file. + + Returns True on success. + """ + data = json.dumps(data, sort_keys=True, indent=4) + with open(filename, "w", encoding="utf-8") as fdesc: + fdesc.write(data) + return True + return False -def get_component_path(lang, component) -> Path | None: +def get_component_path(lang, component): """Get the component translation path.""" - if (Path("homeassistant") / "components" / component).is_dir(): - return ( - Path("homeassistant") - / "components" - / component - / "translations" - / f"{lang}.json" + if os.path.isdir(os.path.join("homeassistant", "components", component)): + return os.path.join( + "homeassistant", "components", component, "translations", f"{lang}.json" ) return None -def get_platform_path(lang, component, platform) -> Path: +def get_platform_path(lang, component, platform): """Get the platform translation path.""" - return ( - Path("homeassistant") - / "components" - / component - / "translations" - / f"{platform}.{lang}.json" + return os.path.join( + "homeassistant", + "components", + component, + "translations", + f"{platform}.{lang}.json", ) @@ -103,7 +107,7 @@ def save_language_translations(lang, translations): f"Skipping {lang} for {component}, as the integration doesn't seem to exist." ) continue - path.parent.mkdir(parents=True, exist_ok=True) + os.makedirs(os.path.dirname(path), exist_ok=True) save_json(path, base_translations) if "platform" not in component_translations: @@ -113,7 +117,7 @@ def save_language_translations(lang, translations): "platform" ].items(): path = get_platform_path(lang, component, platform) - path.parent.mkdir(parents=True, exist_ok=True) + os.makedirs(os.path.dirname(path), exist_ok=True) save_json(path, platform_translations) diff --git a/script/version_bump.py b/script/version_bump.py index ff94c01a5a2..fb4fe2f7868 100755 --- a/script/version_bump.py +++ b/script/version_bump.py @@ -2,7 +2,6 @@ """Helper script to bump the current version.""" import argparse -from pathlib import Path import re import subprocess @@ -111,7 +110,8 @@ def bump_version( def write_version(version): """Update Home Assistant constant file with new version.""" - content = Path("homeassistant/const.py").read_text() + with open("homeassistant/const.py") as fil: + content = fil.read() major, minor, patch = str(version).split(".", 2) @@ -125,21 +125,25 @@ def write_version(version): "PATCH_VERSION: Final = .*\n", f'PATCH_VERSION: Final = "{patch}"\n', content ) - Path("homeassistant/const.py").write_text(content) + with open("homeassistant/const.py", "w") as fil: + fil.write(content) def write_version_metadata(version: Version) -> None: """Update pyproject.toml file with new version.""" - content = Path("pyproject.toml").read_text(encoding="utf8") + with open("pyproject.toml", encoding="utf8") as fp: + content = fp.read() content = re.sub(r"(version\W+=\W).+\n", f'\\g<1>"{version}"\n', content, count=1) - Path("pyproject.toml").write_text(content, encoding="utf8") + with open("pyproject.toml", "w", encoding="utf8") as fp: + fp.write(content) def write_ci_workflow(version: Version) -> None: """Update ci workflow with new version.""" - content = Path(".github/workflows/ci.yaml").read_text() + with open(".github/workflows/ci.yaml") as fp: + content = fp.read() short_version = ".".join(str(version).split(".", maxsplit=2)[:2]) content = re.sub( @@ -149,7 +153,8 @@ def write_ci_workflow(version: Version) -> None: count=1, ) - Path(".github/workflows/ci.yaml").write_text(content) + with open(".github/workflows/ci.yaml", "w") as fp: + fp.write(content) def main() -> None: diff --git a/tests/auth/mfa_modules/test_insecure_example.py b/tests/auth/mfa_modules/test_insecure_example.py index 8caca780ecb..f7f8a327059 100644 --- a/tests/auth/mfa_modules/test_insecure_example.py +++ b/tests/auth/mfa_modules/test_insecure_example.py @@ -121,7 +121,7 @@ async def test_login(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("pin") is str + assert result["data_schema"].schema.get("pin") == str result = await hass.auth.login_flow.async_configure( result["flow_id"], {"pin": "invalid-code"} diff --git a/tests/auth/mfa_modules/test_notify.py b/tests/auth/mfa_modules/test_notify.py index 8047ba2fef3..23b8811dbf9 100644 --- a/tests/auth/mfa_modules/test_notify.py +++ b/tests/auth/mfa_modules/test_notify.py @@ -155,7 +155,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("code") is str + assert result["data_schema"].schema.get("code") == str # wait service call finished await hass.async_block_till_done() @@ -165,7 +165,8 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test-notify" message = notify_call.data["message"] - assert MOCK_CODE in message + message.hass = hass + assert MOCK_CODE in message.async_render() with patch("pyotp.HOTP.verify", return_value=False): result = await hass.auth.login_flow.async_configure( @@ -213,7 +214,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("code") is str + assert result["data_schema"].schema.get("code") == str # wait service call finished await hass.async_block_till_done() @@ -223,7 +224,8 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test-notify" message = notify_call.data["message"] - assert MOCK_CODE in message + message.hass = hass + assert MOCK_CODE in message.async_render() with patch("pyotp.HOTP.verify", return_value=True): result = await hass.auth.login_flow.async_configure( @@ -262,7 +264,8 @@ async def test_setup_user_notify_service(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test1" message = notify_call.data["message"] - assert MOCK_CODE in message + message.hass = hass + assert MOCK_CODE in message.async_render() with patch("pyotp.HOTP.at", return_value=MOCK_CODE_2): step = await flow.async_step_setup({"code": "invalid"}) @@ -278,7 +281,8 @@ async def test_setup_user_notify_service(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test1" message = notify_call.data["message"] - assert MOCK_CODE_2 in message + message.hass = hass + assert MOCK_CODE_2 in message.async_render() with patch("pyotp.HOTP.verify", return_value=True): step = await flow.async_step_setup({"code": MOCK_CODE_2}) diff --git a/tests/auth/mfa_modules/test_totp.py b/tests/auth/mfa_modules/test_totp.py index fadc3214712..961db3f44ca 100644 --- a/tests/auth/mfa_modules/test_totp.py +++ b/tests/auth/mfa_modules/test_totp.py @@ -114,7 +114,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("code") is str + assert result["data_schema"].schema.get("code") == str with patch("pyotp.TOTP.verify", return_value=False): result = await hass.auth.login_flow.async_configure( diff --git a/tests/auth/providers/test_command_line.py b/tests/auth/providers/test_command_line.py index 2ce49730e5f..016ce767bad 100644 --- a/tests/auth/providers/test_command_line.py +++ b/tests/auth/providers/test_command_line.py @@ -10,11 +10,10 @@ from homeassistant import data_entry_flow from homeassistant.auth import AuthManager, auth_store, models as auth_models from homeassistant.auth.providers import command_line from homeassistant.const import CONF_TYPE -from homeassistant.core import HomeAssistant @pytest.fixture -async def store(hass: HomeAssistant) -> auth_store.AuthStore: +async def store(hass): """Mock store.""" store = auth_store.AuthStore(hass) await store.async_load() @@ -22,9 +21,7 @@ async def store(hass: HomeAssistant) -> auth_store.AuthStore: @pytest.fixture -def provider( - hass: HomeAssistant, store: auth_store.AuthStore -) -> command_line.CommandLineAuthProvider: +def provider(hass, store): """Mock provider.""" return command_line.CommandLineAuthProvider( hass, @@ -41,18 +38,12 @@ def provider( @pytest.fixture -def manager( - hass: HomeAssistant, - store: auth_store.AuthStore, - provider: command_line.CommandLineAuthProvider, -) -> AuthManager: +def manager(hass, store, provider): """Mock manager.""" return AuthManager(hass, store, {(provider.type, provider.id): provider}, {}) -async def test_create_new_credential( - manager: AuthManager, provider: command_line.CommandLineAuthProvider -) -> None: +async def test_create_new_credential(manager, provider) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "good-user", "password": "good-pass"} @@ -66,9 +57,7 @@ async def test_create_new_credential( assert not user.local_only -async def test_match_existing_credentials( - provider: command_line.CommandLineAuthProvider, -) -> None: +async def test_match_existing_credentials(store, provider) -> None: """See if we match existing users.""" existing = auth_models.Credentials( id=uuid.uuid4(), @@ -84,26 +73,24 @@ async def test_match_existing_credentials( assert credentials is existing -async def test_invalid_username(provider: command_line.CommandLineAuthProvider) -> None: +async def test_invalid_username(provider) -> None: """Test we raise if incorrect user specified.""" with pytest.raises(command_line.InvalidAuthError): await provider.async_validate_login("bad-user", "good-pass") -async def test_invalid_password(provider: command_line.CommandLineAuthProvider) -> None: +async def test_invalid_password(provider) -> None: """Test we raise if incorrect password specified.""" with pytest.raises(command_line.InvalidAuthError): await provider.async_validate_login("good-user", "bad-pass") -async def test_good_auth(provider: command_line.CommandLineAuthProvider) -> None: +async def test_good_auth(provider) -> None: """Test nothing is raised with good credentials.""" await provider.async_validate_login("good-user", "good-pass") -async def test_good_auth_with_meta( - manager: AuthManager, provider: command_line.CommandLineAuthProvider -) -> None: +async def test_good_auth_with_meta(manager, provider) -> None: """Test metadata is added upon successful authentication.""" provider.config[command_line.CONF_ARGS] = ["--with-meta"] provider.config[command_line.CONF_META] = True @@ -123,9 +110,7 @@ async def test_good_auth_with_meta( assert user.local_only -async def test_utf_8_username_password( - provider: command_line.CommandLineAuthProvider, -) -> None: +async def test_utf_8_username_password(provider) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "ßßß", "password": "äöü"} @@ -133,9 +118,7 @@ async def test_utf_8_username_password( assert credentials.is_new is True -async def test_login_flow_validates( - provider: command_line.CommandLineAuthProvider, -) -> None: +async def test_login_flow_validates(provider) -> None: """Test login flow.""" flow = await provider.async_login_flow({}) result = await flow.async_step_init() @@ -154,7 +137,7 @@ async def test_login_flow_validates( assert result["data"]["username"] == "good-user" -async def test_strip_username(provider: command_line.CommandLineAuthProvider) -> None: +async def test_strip_username(provider) -> None: """Test authentication works with username with whitespace around.""" flow = await provider.async_login_flow({}) result = await flow.async_step_init( diff --git a/tests/auth/providers/test_insecure_example.py b/tests/auth/providers/test_insecure_example.py index 7c28028753c..f0043231c04 100644 --- a/tests/auth/providers/test_insecure_example.py +++ b/tests/auth/providers/test_insecure_example.py @@ -7,11 +7,10 @@ import pytest from homeassistant.auth import AuthManager, auth_store, models as auth_models from homeassistant.auth.providers import insecure_example -from homeassistant.core import HomeAssistant @pytest.fixture -async def store(hass: HomeAssistant) -> auth_store.AuthStore: +async def store(hass): """Mock store.""" store = auth_store.AuthStore(hass) await store.async_load() @@ -19,9 +18,7 @@ async def store(hass: HomeAssistant) -> auth_store.AuthStore: @pytest.fixture -def provider( - hass: HomeAssistant, store: auth_store.AuthStore -) -> insecure_example.ExampleAuthProvider: +def provider(hass, store): """Mock provider.""" return insecure_example.ExampleAuthProvider( hass, @@ -41,18 +38,12 @@ def provider( @pytest.fixture -def manager( - hass: HomeAssistant, - store: auth_store.AuthStore, - provider: insecure_example.ExampleAuthProvider, -) -> AuthManager: +def manager(hass, store, provider): """Mock manager.""" return AuthManager(hass, store, {(provider.type, provider.id): provider}, {}) -async def test_create_new_credential( - manager: AuthManager, provider: insecure_example.ExampleAuthProvider -) -> None: +async def test_create_new_credential(manager, provider) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "user-test", "password": "password-test"} @@ -64,9 +55,7 @@ async def test_create_new_credential( assert user.is_active -async def test_match_existing_credentials( - provider: insecure_example.ExampleAuthProvider, -) -> None: +async def test_match_existing_credentials(store, provider) -> None: """See if we match existing users.""" existing = auth_models.Credentials( id=uuid.uuid4(), @@ -82,21 +71,19 @@ async def test_match_existing_credentials( assert credentials is existing -async def test_verify_username(provider: insecure_example.ExampleAuthProvider) -> None: +async def test_verify_username(provider) -> None: """Test we raise if incorrect user specified.""" with pytest.raises(insecure_example.InvalidAuthError): await provider.async_validate_login("non-existing-user", "password-test") -async def test_verify_password(provider: insecure_example.ExampleAuthProvider) -> None: +async def test_verify_password(provider) -> None: """Test we raise if incorrect user specified.""" with pytest.raises(insecure_example.InvalidAuthError): await provider.async_validate_login("user-test", "incorrect-password") -async def test_utf_8_username_password( - provider: insecure_example.ExampleAuthProvider, -) -> None: +async def test_utf_8_username_password(provider) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "🎉", "password": "😎"} diff --git a/tests/auth/providers/test_trusted_networks.py b/tests/auth/providers/test_trusted_networks.py index e738e8f0911..2f84a256f2d 100644 --- a/tests/auth/providers/test_trusted_networks.py +++ b/tests/auth/providers/test_trusted_networks.py @@ -17,7 +17,7 @@ from homeassistant.setup import async_setup_component @pytest.fixture -async def store(hass: HomeAssistant) -> auth_store.AuthStore: +async def store(hass): """Mock store.""" store = auth_store.AuthStore(hass) await store.async_load() @@ -25,9 +25,7 @@ async def store(hass: HomeAssistant) -> auth_store.AuthStore: @pytest.fixture -def provider( - hass: HomeAssistant, store: auth_store.AuthStore -) -> tn_auth.TrustedNetworksAuthProvider: +def provider(hass, store): """Mock provider.""" return tn_auth.TrustedNetworksAuthProvider( hass, @@ -47,9 +45,7 @@ def provider( @pytest.fixture -def provider_with_user( - hass: HomeAssistant, store: auth_store.AuthStore -) -> tn_auth.TrustedNetworksAuthProvider: +def provider_with_user(hass, store): """Mock provider with trusted users config.""" return tn_auth.TrustedNetworksAuthProvider( hass, @@ -75,9 +71,7 @@ def provider_with_user( @pytest.fixture -def provider_bypass_login( - hass: HomeAssistant, store: auth_store.AuthStore -) -> tn_auth.TrustedNetworksAuthProvider: +def provider_bypass_login(hass, store): """Mock provider with allow_bypass_login config.""" return tn_auth.TrustedNetworksAuthProvider( hass, @@ -98,21 +92,13 @@ def provider_bypass_login( @pytest.fixture -def manager( - hass: HomeAssistant, - store: auth_store.AuthStore, - provider: tn_auth.TrustedNetworksAuthProvider, -) -> auth.AuthManager: +def manager(hass, store, provider): """Mock manager.""" return auth.AuthManager(hass, store, {(provider.type, provider.id): provider}, {}) @pytest.fixture -def manager_with_user( - hass: HomeAssistant, - store: auth_store.AuthStore, - provider_with_user: tn_auth.TrustedNetworksAuthProvider, -) -> auth.AuthManager: +def manager_with_user(hass, store, provider_with_user): """Mock manager with trusted user.""" return auth.AuthManager( hass, @@ -123,11 +109,7 @@ def manager_with_user( @pytest.fixture -def manager_bypass_login( - hass: HomeAssistant, - store: auth_store.AuthStore, - provider_bypass_login: tn_auth.TrustedNetworksAuthProvider, -) -> auth.AuthManager: +def manager_bypass_login(hass, store, provider_bypass_login): """Mock manager with allow bypass login.""" return auth.AuthManager( hass, @@ -137,7 +119,7 @@ def manager_bypass_login( ) -async def test_config_schema() -> None: +async def test_config_schema(): """Test CONFIG_SCHEMA.""" # Valid configuration tn_auth.CONFIG_SCHEMA( @@ -163,9 +145,7 @@ async def test_config_schema() -> None: ) -async def test_trusted_networks_credentials( - manager: auth.AuthManager, provider: tn_auth.TrustedNetworksAuthProvider -) -> None: +async def test_trusted_networks_credentials(manager, provider) -> None: """Test trusted_networks credentials related functions.""" owner = await manager.async_create_user("test-owner") tn_owner_cred = await provider.async_get_or_create_credentials({"user": owner.id}) @@ -182,24 +162,22 @@ async def test_trusted_networks_credentials( await provider.async_get_or_create_credentials({"user": "invalid-user"}) -async def test_validate_access(provider: tn_auth.TrustedNetworksAuthProvider) -> None: +async def test_validate_access(provider) -> None: """Test validate access from trusted networks.""" provider.async_validate_access(ip_address("192.168.0.1")) provider.async_validate_access(ip_address("192.168.128.10")) provider.async_validate_access(ip_address("::1")) provider.async_validate_access(ip_address("fd01:db8::ff00:42:8329")) - with pytest.raises(auth.InvalidAuthError): + with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.0.2")) - with pytest.raises(auth.InvalidAuthError): + with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_access(ip_address("127.0.0.1")) - with pytest.raises(auth.InvalidAuthError): + with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_access(ip_address("2001:db8::ff00:42:8329")) -async def test_validate_access_proxy( - hass: HomeAssistant, provider: tn_auth.TrustedNetworksAuthProvider -) -> None: +async def test_validate_access_proxy(hass: HomeAssistant, provider) -> None: """Test validate access from trusted networks are blocked from proxy.""" await async_setup_component( @@ -214,17 +192,15 @@ async def test_validate_access_proxy( ) provider.async_validate_access(ip_address("192.168.128.2")) provider.async_validate_access(ip_address("fd00::2")) - with pytest.raises(auth.InvalidAuthError): + with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.128.0")) - with pytest.raises(auth.InvalidAuthError): + with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.128.1")) - with pytest.raises(auth.InvalidAuthError): + with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_access(ip_address("fd00::1")) -async def test_validate_access_cloud( - hass: HomeAssistant, provider: tn_auth.TrustedNetworksAuthProvider -) -> None: +async def test_validate_access_cloud(hass: HomeAssistant, provider) -> None: """Test validate access from trusted networks are blocked from cloud.""" await async_setup_component( hass, @@ -241,25 +217,21 @@ async def test_validate_access_cloud( provider.async_validate_access(ip_address("192.168.128.2")) remote.is_cloud_request.set(True) - with pytest.raises(auth.InvalidAuthError): + with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.128.2")) -async def test_validate_refresh_token( - provider: tn_auth.TrustedNetworksAuthProvider, -) -> None: +async def test_validate_refresh_token(provider) -> None: """Verify re-validation of refresh token.""" with patch.object(provider, "async_validate_access") as mock: - with pytest.raises(auth.InvalidAuthError): + with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_refresh_token(Mock(), None) provider.async_validate_refresh_token(Mock(), "127.0.0.1") mock.assert_called_once_with(ip_address("127.0.0.1")) -async def test_login_flow( - manager: auth.AuthManager, provider: tn_auth.TrustedNetworksAuthProvider -) -> None: +async def test_login_flow(manager, provider) -> None: """Test login flow.""" owner = await manager.async_create_user("test-owner") user = await manager.async_create_user("test-user") @@ -286,10 +258,7 @@ async def test_login_flow( assert step["data"]["user"] == user.id -async def test_trusted_users_login( - manager_with_user: auth.AuthManager, - provider_with_user: tn_auth.TrustedNetworksAuthProvider, -) -> None: +async def test_trusted_users_login(manager_with_user, provider_with_user) -> None: """Test available user list changed per different IP.""" owner = await manager_with_user.async_create_user("test-owner") sys_user = await manager_with_user.async_create_system_user( @@ -369,10 +338,7 @@ async def test_trusted_users_login( assert schema({"user": sys_user.id}) -async def test_trusted_group_login( - manager_with_user: auth.AuthManager, - provider_with_user: tn_auth.TrustedNetworksAuthProvider, -) -> None: +async def test_trusted_group_login(manager_with_user, provider_with_user) -> None: """Test config trusted_user with group_id.""" owner = await manager_with_user.async_create_user("test-owner") # create a user in user group @@ -425,10 +391,7 @@ async def test_trusted_group_login( assert schema({"user": user.id}) -async def test_bypass_login_flow( - manager_bypass_login: auth.AuthManager, - provider_bypass_login: tn_auth.TrustedNetworksAuthProvider, -) -> None: +async def test_bypass_login_flow(manager_bypass_login, provider_bypass_login) -> None: """Test login flow can be bypass if only one user available.""" owner = await manager_bypass_login.async_create_user("test-owner") diff --git a/tests/common.py b/tests/common.py index 8bd45e4d7f8..52ea4861c81 100644 --- a/tests/common.py +++ b/tests/common.py @@ -3,17 +3,8 @@ from __future__ import annotations import asyncio -from collections.abc import ( - AsyncGenerator, - Callable, - Coroutine, - Generator, - Iterable, - Iterator, - Mapping, - Sequence, -) -from contextlib import asynccontextmanager, contextmanager, suppress +from collections.abc import Callable, Coroutine, Mapping, Sequence +from contextlib import asynccontextmanager, contextmanager from datetime import UTC, datetime, timedelta from enum import Enum import functools as ft @@ -23,6 +14,7 @@ import json import logging import os import pathlib +import threading import time from types import FrameType, ModuleType from typing import Any, Literal, NoReturn @@ -31,7 +23,7 @@ from unittest.mock import AsyncMock, Mock, patch from aiohttp.test_utils import unused_port as get_test_instance_port # noqa: F401 import pytest from syrupy import SnapshotAssertion -from typing_extensions import TypeVar +from typing_extensions import AsyncGenerator, Generator import voluptuous as vol from homeassistant import auth, bootstrap, config_entries, loader @@ -46,8 +38,8 @@ from homeassistant.components import device_automation, persistent_notification from homeassistant.components.device_automation import ( # noqa: F401 _async_get_device_automation_capabilities as async_get_device_automation_capabilities, ) -from homeassistant.config import IntegrationConfigInfo, async_process_component_config -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config import async_process_component_config +from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import ( DEVICE_DEFAULT_NAME, EVENT_HOMEASSISTANT_CLOSE, @@ -90,13 +82,8 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.json import JSONEncoder, _orjson_default_encoder, json_dumps from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util.async_ import ( - _SHUTDOWN_RUN_CALLBACK_THREADSAFE, - get_scheduled_timer_handles, - run_callback_threadsafe, -) +from homeassistant.util.async_ import run_callback_threadsafe import homeassistant.util.dt as dt_util -from homeassistant.util.event_type import EventType from homeassistant.util.json import ( JsonArrayType, JsonObjectType, @@ -114,8 +101,6 @@ from .testing_config.custom_components.test_constant_deprecation import ( import_deprecated_constant, ) -_DataT = TypeVar("_DataT", bound=Mapping[str, Any], default=dict[str, Any]) - _LOGGER = logging.getLogger(__name__) INSTANCES = [] CLIENT_ID = "https://example.com/app" @@ -173,6 +158,48 @@ def get_test_config_dir(*add_path): return os.path.join(os.path.dirname(__file__), "testing_config", *add_path) +@contextmanager +def get_test_home_assistant() -> Generator[HomeAssistant]: + """Return a Home Assistant object pointing at test config directory.""" + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + context_manager = async_test_home_assistant(loop) + hass = loop.run_until_complete(context_manager.__aenter__()) + + loop_stop_event = threading.Event() + + def run_loop() -> None: + """Run event loop.""" + + loop._thread_ident = threading.get_ident() + hass.loop_thread_id = loop._thread_ident + loop.run_forever() + loop_stop_event.set() + + orig_stop = hass.stop + hass._stopped = Mock(set=loop.stop) + + def start_hass(*mocks: Any) -> None: + """Start hass.""" + asyncio.run_coroutine_threadsafe(hass.async_start(), loop).result() + + def stop_hass() -> None: + """Stop hass.""" + orig_stop() + loop_stop_event.wait() + + hass.start = start_hass + hass.stop = stop_hass + + threading.Thread(name="LoopThread", target=run_loop, daemon=False).start() + + try: + yield hass + finally: + loop.run_until_complete(context_manager.__aexit__(None, None, None)) + loop.close() + + class StoreWithoutWriteLoad[_T: (Mapping[str, Any] | Sequence[Any])](storage.Store[_T]): """Fake store that does not write or load. Used for testing.""" @@ -195,7 +222,6 @@ async def async_test_home_assistant( event_loop: asyncio.AbstractEventLoop | None = None, load_registries: bool = True, config_dir: str | None = None, - initial_state: CoreState = CoreState.running, ) -> AsyncGenerator[HomeAssistant]: """Return a Home Assistant object pointing at test config dir.""" hass = HomeAssistant(config_dir or get_test_config_dir()) @@ -323,7 +349,7 @@ async def async_test_home_assistant( await rs.async_load(hass) hass.data[bootstrap.DATA_REGISTRIES_LOADED] = None - hass.set_state(initial_state) + hass.set_state(CoreState.running) @callback def clear_instance(event): @@ -338,9 +364,6 @@ async def async_test_home_assistant( finally: # Restore timezone, it is set when creating the hass object dt_util.set_default_time_zone(orig_tz) - # Remove loop shutdown indicator to not interfere with additional hass objects - with suppress(AttributeError): - delattr(hass.loop, _SHUTDOWN_RUN_CALLBACK_THREADSAFE) def async_mock_service( @@ -384,16 +407,14 @@ mock_service = threadsafe_callback_factory(async_mock_service) @callback -def async_mock_intent(hass: HomeAssistant, intent_typ: str) -> list[intent.Intent]: +def async_mock_intent(hass, intent_typ): """Set up a fake intent handler.""" - intents: list[intent.Intent] = [] + intents = [] class MockIntentHandler(intent.IntentHandler): intent_type = intent_typ - async def async_handle( - self, intent_obj: intent.Intent - ) -> intent.IntentResponse: + async def async_handle(self, intent_obj): """Handle the intent.""" intents.append(intent_obj) return intent_obj.create_response() @@ -419,7 +440,7 @@ def async_fire_mqtt_message( from paho.mqtt.client import MQTTMessage # pylint: disable-next=import-outside-toplevel - from homeassistant.components.mqtt import MqttData + from homeassistant.components.mqtt.models import MqttData if isinstance(payload, str): payload = payload.encode("utf-8") @@ -492,7 +513,7 @@ def _async_fire_time_changed( hass: HomeAssistant, utc_datetime: datetime | None, fire_all: bool ) -> None: timestamp = dt_util.utc_to_timestamp(utc_datetime) - for task in list(get_scheduled_timer_handles(hass.loop)): + for task in list(hass.loop._scheduled): if not isinstance(task, asyncio.TimerHandle): continue if task.cancelled(): @@ -990,7 +1011,6 @@ class MockConfigEntry(config_entries.ConfigEntry): *, data=None, disabled_by=None, - discovery_keys=None, domain="test", entry_id=None, minor_version=1, @@ -1005,11 +1025,9 @@ class MockConfigEntry(config_entries.ConfigEntry): version=1, ) -> None: """Initialize a mock config entry.""" - discovery_keys = discovery_keys or {} kwargs = { "data": data or {}, "disabled_by": disabled_by, - "discovery_keys": discovery_keys, "domain": domain, "entry_id": entry_id or ulid_util.ulid_now(), "minor_version": minor_version, @@ -1057,60 +1075,6 @@ class MockConfigEntry(config_entries.ConfigEntry): """ self._async_set_state(hass, state, reason) - async def start_reauth_flow( - self, - hass: HomeAssistant, - context: dict[str, Any] | None = None, - data: dict[str, Any] | None = None, - ) -> ConfigFlowResult: - """Start a reauthentication flow.""" - if self.entry_id not in hass.config_entries._entries: - raise ValueError("Config entry must be added to hass to start reauth flow") - return await start_reauth_flow(hass, self, context, data) - - async def start_reconfigure_flow( - self, - hass: HomeAssistant, - *, - show_advanced_options: bool = False, - ) -> ConfigFlowResult: - """Start a reconfiguration flow.""" - if self.entry_id not in hass.config_entries._entries: - raise ValueError( - "Config entry must be added to hass to start reconfiguration flow" - ) - return await hass.config_entries.flow.async_init( - self.domain, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": self.entry_id, - "show_advanced_options": show_advanced_options, - }, - ) - - -async def start_reauth_flow( - hass: HomeAssistant, - entry: ConfigEntry, - context: dict[str, Any] | None = None, - data: dict[str, Any] | None = None, -) -> ConfigFlowResult: - """Start a reauthentication flow for a config entry. - - This helper method should be aligned with `ConfigEntry._async_init_reauth`. - """ - return await hass.config_entries.flow.async_init( - entry.domain, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "title_placeholders": {"name": entry.title}, - "unique_id": entry.unique_id, - } - | (context or {}), - data=entry.data | (data or {}), - ) - def patch_yaml_files(files_dict, endswith=True): """Patch load_yaml with a dictionary of yaml files.""" @@ -1163,12 +1127,7 @@ def assert_setup_component(count, domain=None): """ config = {} - async def mock_psc( - hass: HomeAssistant, - config_input: ConfigType, - integration: loader.Integration, - component: loader.ComponentProtocol | None = None, - ) -> IntegrationConfigInfo: + async def mock_psc(hass, config_input, integration, component=None): """Mock the prepare_setup_component to capture config.""" domain_input = integration.domain integration_config_info = await async_process_component_config( @@ -1469,7 +1428,7 @@ async def get_system_health_info(hass: HomeAssistant, domain: str) -> dict[str, @contextmanager -def mock_config_flow(domain: str, config_flow: type[ConfigFlow]) -> Iterator[None]: +def mock_config_flow(domain: str, config_flow: type[ConfigFlow]) -> None: """Mock a config flow handler.""" original_handler = config_entries.HANDLERS.get(domain) config_entries.HANDLERS[domain] = config_flow @@ -1537,14 +1496,12 @@ def mock_platform( module_cache[platform_path] = module or Mock() -def async_capture_events( - hass: HomeAssistant, event_name: EventType[_DataT] | str -) -> list[Event[_DataT]]: +def async_capture_events(hass: HomeAssistant, event_name: str) -> list[Event]: """Create a helper that captures events.""" - events: list[Event[_DataT]] = [] + events = [] @callback - def capture_events(event: Event[_DataT]) -> None: + def capture_events(event: Event) -> None: events.append(event) hass.bus.async_listen(event_name, capture_events) @@ -1553,14 +1510,14 @@ def async_capture_events( @callback -def async_mock_signal[*_Ts]( - hass: HomeAssistant, signal: SignalType[*_Ts] | str -) -> list[tuple[*_Ts]]: +def async_mock_signal( + hass: HomeAssistant, signal: SignalType[Any] | str +) -> list[tuple[Any]]: """Catch all dispatches to a signal.""" - calls: list[tuple[*_Ts]] = [] + calls = [] @callback - def mock_signal_handler(*args: *_Ts) -> None: + def mock_signal_handler(*args: Any) -> None: """Mock service call.""" calls.append(args) @@ -1760,7 +1717,7 @@ def extract_stack_to_frame(extract_stack: list[Mock]) -> FrameType: def setup_test_component_platform( hass: HomeAssistant, domain: str, - entities: Iterable[Entity], + entities: Sequence[Entity], from_config_entry: bool = False, built_in: bool = True, ) -> MockPlatform: diff --git a/tests/components/abode/conftest.py b/tests/components/abode/conftest.py index 097eb568d4a..21b236540d0 100644 --- a/tests/components/abode/conftest.py +++ b/tests/components/abode/conftest.py @@ -1,11 +1,11 @@ """Configuration for Abode tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch from jaraco.abode.helpers import urls as URL import pytest from requests_mock import Mocker +from typing_extensions import Generator from tests.common import load_fixture from tests.components.light.conftest import mock_light_profiles # noqa: F401 diff --git a/tests/components/abode/test_alarm_control_panel.py b/tests/components/abode/test_alarm_control_panel.py index 025afa74b80..51e0ee46838 100644 --- a/tests/components/abode/test_alarm_control_panel.py +++ b/tests/components/abode/test_alarm_control_panel.py @@ -3,10 +3,7 @@ from unittest.mock import PropertyMock, patch from homeassistant.components.abode import ATTR_DEVICE_ID -from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_DOMAIN, - AlarmControlPanelState, -) +from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -14,6 +11,9 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_DISARMED, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -39,7 +39,7 @@ async def test_attributes(hass: HomeAssistant) -> None: await setup_platform(hass, ALARM_DOMAIN) state = hass.states.get(DEVICE_ID) - assert state.state == AlarmControlPanelState.DISARMED + assert state.state == STATE_ALARM_DISARMED assert state.attributes.get(ATTR_DEVICE_ID) == "area_1" assert not state.attributes.get("battery_backup") assert not state.attributes.get("cellular_backup") @@ -75,7 +75,7 @@ async def test_set_alarm_away(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(DEVICE_ID) - assert state.state == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMED_AWAY async def test_set_alarm_home(hass: HomeAssistant) -> None: @@ -105,7 +105,7 @@ async def test_set_alarm_home(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(DEVICE_ID) - assert state.state == AlarmControlPanelState.ARMED_HOME + assert state.state == STATE_ALARM_ARMED_HOME async def test_set_alarm_standby(hass: HomeAssistant) -> None: @@ -134,7 +134,7 @@ async def test_set_alarm_standby(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(DEVICE_ID) - assert state.state == AlarmControlPanelState.DISARMED + assert state.state == STATE_ALARM_DISARMED async def test_state_unknown(hass: HomeAssistant) -> None: diff --git a/tests/components/abode/test_camera.py b/tests/components/abode/test_camera.py index 1fcf250935e..5cf3263876b 100644 --- a/tests/components/abode/test_camera.py +++ b/tests/components/abode/test_camera.py @@ -3,8 +3,8 @@ from unittest.mock import patch from homeassistant.components.abode.const import DOMAIN as ABODE_DOMAIN -from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN, CameraState -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_IDLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -26,7 +26,7 @@ async def test_attributes(hass: HomeAssistant) -> None: await setup_platform(hass, CAMERA_DOMAIN) state = hass.states.get("camera.test_cam") - assert state.state == CameraState.IDLE + assert state.state == STATE_IDLE async def test_capture_image(hass: HomeAssistant) -> None: diff --git a/tests/components/abode/test_config_flow.py b/tests/components/abode/test_config_flow.py index 2abed387566..265a77560f7 100644 --- a/tests/components/abode/test_config_flow.py +++ b/tests/components/abode/test_config_flow.py @@ -10,8 +10,9 @@ from jaraco.abode.helpers.errors import MFA_CODE_REQUIRED import pytest from requests.exceptions import ConnectTimeout +from homeassistant.components.abode import config_flow from homeassistant.components.abode.const import CONF_POLLING, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -21,110 +22,114 @@ from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") +async def test_show_form(hass: HomeAssistant) -> None: + """Test that the form is served with no input.""" + flow = config_flow.AbodeFlowHandler() + flow.hass = hass + + result = await flow.async_step_user(user_input=None) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + async def test_one_config_allowed(hass: HomeAssistant) -> None: """Test that only one Abode configuration is allowed.""" + flow = config_flow.AbodeFlowHandler() + flow.hass = hass + MockConfigEntry( domain=DOMAIN, data={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, ).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) + step_user_result = await flow.async_step_user() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + assert step_user_result["type"] is FlowResultType.ABORT + assert step_user_result["reason"] == "single_instance_allowed" -async def test_user_flow(hass: HomeAssistant) -> None: - """Test user flow, with various errors.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" +async def test_invalid_credentials(hass: HomeAssistant) -> None: + """Test that invalid credentials throws an error.""" + conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} + + flow = config_flow.AbodeFlowHandler() + flow.hass = hass - # Test that invalid credentials throws an error. with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException( (HTTPStatus.BAD_REQUEST, "auth error") ), ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "invalid_auth"} + result = await flow.async_step_user(user_input=conf) + assert result["errors"] == {"base": "invalid_auth"} + + +async def test_connection_auth_error(hass: HomeAssistant) -> None: + """Test other than invalid credentials throws an error.""" + conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} + + flow = config_flow.AbodeFlowHandler() + flow.hass = hass - # Test other than invalid credentials throws an error. with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException( (HTTPStatus.INTERNAL_SERVER_ERROR, "connection error") ), ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "cannot_connect"} + result = await flow.async_step_user(user_input=conf) + assert result["errors"] == {"base": "cannot_connect"} + + +async def test_connection_error(hass: HomeAssistant) -> None: + """Test login throws an error if connection times out.""" + conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} + + flow = config_flow.AbodeFlowHandler() + flow.hass = hass - # Test login throws an error if connection times out. with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=ConnectTimeout, ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "cannot_connect"} + result = await flow.async_step_user(user_input=conf) + assert result["errors"] == {"base": "cannot_connect"} - # Test success - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) + +async def test_step_user(hass: HomeAssistant) -> None: + """Test that the user step works.""" + conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} with patch("homeassistant.components.abode.config_flow.Abode"): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "user@email.com" - assert result["data"] == { - CONF_USERNAME: "user@email.com", - CONF_PASSWORD: "password", - CONF_POLLING: False, - } + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "user@email.com" + assert result["data"] == { + CONF_USERNAME: "user@email.com", + CONF_PASSWORD: "password", + CONF_POLLING: False, + } async def test_step_mfa(hass: HomeAssistant) -> None: """Test that the MFA step works.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) + conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException(MFA_CODE_REQUIRED), ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "mfa" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mfa" with patch( "homeassistant.components.abode.config_flow.Abode", @@ -136,51 +141,49 @@ async def test_step_mfa(hass: HomeAssistant) -> None: result["flow_id"], user_input={"mfa_code": "123456"} ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "mfa" - assert result["errors"] == {"base": "invalid_mfa_code"} + assert result["errors"] == {"base": "invalid_mfa_code"} with patch("homeassistant.components.abode.config_flow.Abode"): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"mfa_code": "123456"} ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "user@email.com" - assert result["data"] == { - CONF_USERNAME: "user@email.com", - CONF_PASSWORD: "password", - CONF_POLLING: False, - } + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "user@email.com" + assert result["data"] == { + CONF_USERNAME: "user@email.com", + CONF_PASSWORD: "password", + CONF_POLLING: False, + } async def test_step_reauth(hass: HomeAssistant) -> None: """Test the reauth flow.""" - entry = MockConfigEntry( + conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} + + MockConfigEntry( domain=DOMAIN, unique_id="user@email.com", - data={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, - ) - entry.add_to_hass(hass) + data=conf, + ).add_to_hass(hass) - result = await entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - with ( - patch("homeassistant.components.abode.config_flow.Abode"), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "user@email.com", - CONF_PASSWORD: "new_password", - }, + with patch("homeassistant.components.abode.config_flow.Abode"): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH}, + data=conf, ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" - assert len(hass.config_entries.async_entries()) == 1 - assert entry.data[CONF_PASSWORD] == "new_password" + with patch("homeassistant.config_entries.ConfigEntries.async_reload"): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=conf, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 diff --git a/tests/components/abode/test_cover.py b/tests/components/abode/test_cover.py index 4a49648516d..cdbec0ddf68 100644 --- a/tests/components/abode/test_cover.py +++ b/tests/components/abode/test_cover.py @@ -3,12 +3,13 @@ from unittest.mock import patch from homeassistant.components.abode import ATTR_DEVICE_ID -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, CoverState +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, + STATE_CLOSED, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -33,7 +34,7 @@ async def test_attributes(hass: HomeAssistant) -> None: await setup_platform(hass, COVER_DOMAIN) state = hass.states.get(DEVICE_ID) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes.get(ATTR_DEVICE_ID) == "ZW:00000007" assert not state.attributes.get("battery_low") assert not state.attributes.get("no_response") diff --git a/tests/components/abode/test_lock.py b/tests/components/abode/test_lock.py index fe203d0b0f4..6be1aef22ca 100644 --- a/tests/components/abode/test_lock.py +++ b/tests/components/abode/test_lock.py @@ -3,12 +3,13 @@ from unittest.mock import patch from homeassistant.components.abode import ATTR_DEVICE_ID -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, SERVICE_LOCK, SERVICE_UNLOCK, + STATE_LOCKED, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -33,7 +34,7 @@ async def test_attributes(hass: HomeAssistant) -> None: await setup_platform(hass, LOCK_DOMAIN) state = hass.states.get(DEVICE_ID) - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED assert state.attributes.get(ATTR_DEVICE_ID) == "ZW:00000004" assert not state.attributes.get("battery_low") assert not state.attributes.get("no_response") diff --git a/tests/components/acaia/__init__.py b/tests/components/acaia/__init__.py deleted file mode 100644 index f4eaa39e615..00000000000 --- a/tests/components/acaia/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Common test tools for the acaia integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Set up the acaia integration for testing.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/acaia/conftest.py b/tests/components/acaia/conftest.py deleted file mode 100644 index 1dc6ff31051..00000000000 --- a/tests/components/acaia/conftest.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Common fixtures for the acaia tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -from aioacaia.acaiascale import AcaiaDeviceState -from aioacaia.const import UnitMass as AcaiaUnitOfMass -import pytest - -from homeassistant.components.acaia.const import CONF_IS_NEW_STYLE_SCALE, DOMAIN -from homeassistant.const import CONF_ADDRESS -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.acaia.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_verify() -> Generator[AsyncMock]: - """Override is_new_scale check.""" - with patch( - "homeassistant.components.acaia.config_flow.is_new_scale", return_value=True - ) as mock_verify: - yield mock_verify - - -@pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - title="LUNAR-DDEEFF", - domain=DOMAIN, - version=1, - data={ - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - CONF_IS_NEW_STYLE_SCALE: True, - }, - unique_id="aa:bb:cc:dd:ee:ff", - ) - - -@pytest.fixture -async def init_integration( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_scale: MagicMock -) -> None: - """Set up the acaia integration for testing.""" - await setup_integration(hass, mock_config_entry) - - -@pytest.fixture -def mock_scale() -> Generator[MagicMock]: - """Return a mocked acaia scale client.""" - with ( - patch( - "homeassistant.components.acaia.coordinator.AcaiaScale", - autospec=True, - ) as scale_mock, - ): - scale = scale_mock.return_value - scale.connected = True - scale.mac = "aa:bb:cc:dd:ee:ff" - scale.model = "Lunar" - scale.timer_running = True - scale.heartbeat_task = None - scale.process_queue_task = None - scale.device_state = AcaiaDeviceState( - battery_level=42, units=AcaiaUnitOfMass.GRAMS - ) - scale.weight = 123.45 - yield scale diff --git a/tests/components/acaia/snapshots/test_button.ambr b/tests/components/acaia/snapshots/test_button.ambr deleted file mode 100644 index cd91ca1a17a..00000000000 --- a/tests/components/acaia/snapshots/test_button.ambr +++ /dev/null @@ -1,139 +0,0 @@ -# serializer version: 1 -# name: test_buttons[button.lunar_ddeeff_reset_timer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.lunar_ddeeff_reset_timer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Reset timer', - 'platform': 'acaia', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reset_timer', - 'unique_id': 'aa:bb:cc:dd:ee:ff_reset_timer', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[button.lunar_ddeeff_reset_timer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'LUNAR-DDEEFF Reset timer', - }), - 'context': , - 'entity_id': 'button.lunar_ddeeff_reset_timer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[button.lunar_ddeeff_start_stop_timer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.lunar_ddeeff_start_stop_timer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Start/stop timer', - 'platform': 'acaia', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'start_stop', - 'unique_id': 'aa:bb:cc:dd:ee:ff_start_stop', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[button.lunar_ddeeff_start_stop_timer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'LUNAR-DDEEFF Start/stop timer', - }), - 'context': , - 'entity_id': 'button.lunar_ddeeff_start_stop_timer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[button.lunar_ddeeff_tare-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.lunar_ddeeff_tare', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tare', - 'platform': 'acaia', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tare', - 'unique_id': 'aa:bb:cc:dd:ee:ff_tare', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[button.lunar_ddeeff_tare-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'LUNAR-DDEEFF Tare', - }), - 'context': , - 'entity_id': 'button.lunar_ddeeff_tare', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/acaia/snapshots/test_init.ambr b/tests/components/acaia/snapshots/test_init.ambr deleted file mode 100644 index 1cc3d8dbbc0..00000000000 --- a/tests/components/acaia/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_device - DeviceRegistryEntrySnapshot({ - 'area_id': 'kitchen', - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'acaia', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Acaia', - 'model': 'Lunar', - 'model_id': None, - 'name': 'LUNAR-DDEEFF', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': 'Kitchen', - 'sw_version': None, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/acaia/test_button.py b/tests/components/acaia/test_button.py deleted file mode 100644 index f68f85e253d..00000000000 --- a/tests/components/acaia/test_button.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Tests for the acaia buttons.""" - -from datetime import timedelta -from unittest.mock import MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_UNAVAILABLE, - STATE_UNKNOWN, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - -BUTTONS = ( - "tare", - "reset_timer", - "start_stop_timer", -) - - -async def test_buttons( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - mock_scale: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the acaia buttons.""" - - with patch("homeassistant.components.acaia.PLATFORMS", [Platform.BUTTON]): - await setup_integration(hass, mock_config_entry) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_button_presses( - hass: HomeAssistant, - mock_scale: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the acaia button presses.""" - - await setup_integration(hass, mock_config_entry) - - for button in BUTTONS: - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - { - ATTR_ENTITY_ID: f"button.lunar_ddeeff_{button}", - }, - blocking=True, - ) - - function = getattr(mock_scale, button) - function.assert_called_once() - - -async def test_buttons_unavailable_on_disconnected_scale( - hass: HomeAssistant, - mock_scale: MagicMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the acaia buttons are unavailable when the scale is disconnected.""" - - await setup_integration(hass, mock_config_entry) - - for button in BUTTONS: - state = hass.states.get(f"button.lunar_ddeeff_{button}") - assert state - assert state.state == STATE_UNKNOWN - - mock_scale.connected = False - freezer.tick(timedelta(minutes=10)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - for button in BUTTONS: - state = hass.states.get(f"button.lunar_ddeeff_{button}") - assert state - assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/acaia/test_config_flow.py b/tests/components/acaia/test_config_flow.py deleted file mode 100644 index 2bf4b1dbe8a..00000000000 --- a/tests/components/acaia/test_config_flow.py +++ /dev/null @@ -1,242 +0,0 @@ -"""Test the acaia config flow.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice -import pytest - -from homeassistant.components.acaia.const import CONF_IS_NEW_STYLE_SCALE, DOMAIN -from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER -from homeassistant.const import CONF_ADDRESS -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo - -from tests.common import MockConfigEntry - -service_info = BluetoothServiceInfo( - name="LUNAR-DDEEFF", - address="aa:bb:cc:dd:ee:ff", - rssi=-63, - manufacturer_data={}, - service_data={}, - service_uuids=[], - source="local", -) - - -@pytest.fixture -def mock_discovered_service_info() -> Generator[AsyncMock]: - """Override getting Bluetooth service info.""" - with patch( - "homeassistant.components.acaia.config_flow.async_discovered_service_info", - return_value=[service_info], - ) as mock_discovered_service_info: - yield mock_discovered_service_info - - -async def test_form( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_verify: AsyncMock, - mock_discovered_service_info: AsyncMock, -) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - user_input = { - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - } - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=user_input, - ) - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "LUNAR-DDEEFF" - assert result2["data"] == { - **user_input, - CONF_IS_NEW_STYLE_SCALE: True, - } - - -async def test_bluetooth_discovery( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_verify: AsyncMock, -) -> None: - """Test we can discover a device.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "bluetooth_confirm" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={}, - ) - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == service_info.name - assert result2["data"] == { - CONF_ADDRESS: service_info.address, - CONF_IS_NEW_STYLE_SCALE: True, - } - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (AcaiaDeviceNotFound("Error"), "device_not_found"), - (AcaiaError, "unknown"), - (AcaiaUnknownDevice, "unsupported_device"), - ], -) -async def test_bluetooth_discovery_errors( - hass: HomeAssistant, - mock_verify: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test abortions of Bluetooth discovery.""" - mock_verify.side_effect = exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == error - - -async def test_already_configured( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_verify: AsyncMock, - mock_discovered_service_info: AsyncMock, -) -> None: - """Ensure we can't add the same device twice.""" - - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "already_configured" - - -async def test_already_configured_bluetooth_discovery( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> None: - """Ensure configure device is not discovered again.""" - - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (AcaiaDeviceNotFound("Error"), "device_not_found"), - (AcaiaError, "unknown"), - ], -) -async def test_recoverable_config_flow_errors( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_verify: AsyncMock, - mock_discovered_service_info: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test recoverable errors.""" - mock_verify.side_effect = exception - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": error} - - # recover - mock_verify.side_effect = None - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - { - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - }, - ) - assert result3["type"] is FlowResultType.CREATE_ENTRY - - -async def test_unsupported_device( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_verify: AsyncMock, - mock_discovered_service_info: AsyncMock, -) -> None: - """Test flow aborts on unsupported device.""" - mock_verify.side_effect = AcaiaUnknownDevice - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - }, - ) - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "unsupported_device" - - -async def test_no_bluetooth_devices( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_discovered_service_info: AsyncMock, -) -> None: - """Test flow aborts on unsupported device.""" - mock_discovered_service_info.return_value = [] - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_devices_found" diff --git a/tests/components/acaia/test_init.py b/tests/components/acaia/test_init.py deleted file mode 100644 index 8ad988d3b9b..00000000000 --- a/tests/components/acaia/test_init.py +++ /dev/null @@ -1,65 +0,0 @@ -"""Test init of acaia integration.""" - -from datetime import timedelta -from unittest.mock import MagicMock - -from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.acaia.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from tests.common import MockConfigEntry, async_fire_time_changed - -pytestmark = pytest.mark.usefixtures("init_integration") - - -async def test_load_unload_config_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> None: - """Test loading and unloading the integration.""" - - assert mock_config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED - - -@pytest.mark.parametrize( - "exception", [AcaiaError, AcaiaDeviceNotFound("Boom"), TimeoutError] -) -async def test_update_exception_leads_to_active_disconnect( - hass: HomeAssistant, - mock_scale: MagicMock, - freezer: FrozenDateTimeFactory, - exception: Exception, -) -> None: - """Test scale gets disconnected on exception.""" - - mock_scale.connect.side_effect = exception - mock_scale.connected = False - - freezer.tick(timedelta(minutes=10)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - mock_scale.device_disconnected_handler.assert_called_once() - - -async def test_device( - mock_scale: MagicMock, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Snapshot the device from registry.""" - - device = device_registry.async_get_device({(DOMAIN, mock_scale.mac)}) - assert device - assert device == snapshot diff --git a/tests/components/accuweather/conftest.py b/tests/components/accuweather/conftest.py index 737fd3f84b6..3b0006068ea 100644 --- a/tests/components/accuweather/conftest.py +++ b/tests/components/accuweather/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the AccuWeather tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.accuweather.const import DOMAIN diff --git a/tests/components/accuweather/snapshots/test_sensor.ambr b/tests/components/accuweather/snapshots/test_sensor.ambr index 3468d638bc0..5e28be5a72b 100644 --- a/tests/components/accuweather/snapshots/test_sensor.ambr +++ b/tests/components/accuweather/snapshots/test_sensor.ambr @@ -1969,58 +1969,6 @@ 'state': '9.2', }) # --- -# name: test_sensor[sensor.home_humidity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_humidity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Humidity', - 'platform': 'accuweather', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'humidity', - 'unique_id': '0123456-relativehumidity', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.home_humidity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by AccuWeather', - 'device_class': 'humidity', - 'friendly_name': 'Home Humidity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.home_humidity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '67', - }) -# --- # name: test_sensor[sensor.home_mold_pollen_day_0-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2319,61 +2267,6 @@ 'state': '0.0', }) # --- -# name: test_sensor[sensor.home_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Pressure', - 'platform': 'accuweather', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'pressure', - 'unique_id': '0123456-pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.home_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by AccuWeather', - 'device_class': 'pressure', - 'friendly_name': 'Home Pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.home_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1012.0', - }) -# --- # name: test_sensor[sensor.home_pressure_tendency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4252,58 +4145,6 @@ 'state': '276.1', }) # --- -# name: test_sensor[sensor.home_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'accuweather', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'temperature', - 'unique_id': '0123456-temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.home_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by AccuWeather', - 'device_class': 'temperature', - 'friendly_name': 'Home Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.home_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '22.6', - }) -# --- # name: test_sensor[sensor.home_thunderstorm_probability_day_0-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/accuweather/snapshots/test_weather.ambr b/tests/components/accuweather/snapshots/test_weather.ambr index cbe1891d216..49bf4008884 100644 --- a/tests/components/accuweather/snapshots/test_weather.ambr +++ b/tests/components/accuweather/snapshots/test_weather.ambr @@ -1,4 +1,85 @@ # serializer version: 1 +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 58, + 'condition': 'lightning-rainy', + 'datetime': '2020-07-26T05:00:00+00:00', + 'humidity': 60, + 'precipitation': 2.5, + 'precipitation_probability': 60, + 'temperature': 29.5, + 'templow': 15.4, + 'uv_index': 5, + 'wind_bearing': 166, + 'wind_gust_speed': 29.6, + 'wind_speed': 13.0, + }), + dict({ + 'apparent_temperature': 28.9, + 'cloud_coverage': 52, + 'condition': 'partlycloudy', + 'datetime': '2020-07-27T05:00:00+00:00', + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 26.2, + 'templow': 15.9, + 'uv_index': 7, + 'wind_bearing': 297, + 'wind_gust_speed': 14.8, + 'wind_speed': 9.3, + }), + dict({ + 'apparent_temperature': 31.6, + 'cloud_coverage': 65, + 'condition': 'partlycloudy', + 'datetime': '2020-07-28T05:00:00+00:00', + 'humidity': 52, + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 31.7, + 'templow': 16.8, + 'uv_index': 7, + 'wind_bearing': 198, + 'wind_gust_speed': 24.1, + 'wind_speed': 16.7, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 45, + 'condition': 'partlycloudy', + 'datetime': '2020-07-29T05:00:00+00:00', + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 9, + 'temperature': 24.0, + 'templow': 11.7, + 'uv_index': 6, + 'wind_bearing': 293, + 'wind_gust_speed': 24.1, + 'wind_speed': 13.0, + }), + dict({ + 'apparent_temperature': 22.2, + 'cloud_coverage': 50, + 'condition': 'partlycloudy', + 'datetime': '2020-07-30T05:00:00+00:00', + 'humidity': 55, + 'precipitation': 0.0, + 'precipitation_probability': 1, + 'temperature': 21.4, + 'templow': 12.2, + 'uv_index': 7, + 'wind_bearing': 280, + 'wind_gust_speed': 27.8, + 'wind_speed': 18.5, + }), + ]), + }) +# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.home': dict({ diff --git a/tests/components/accuweather/test_init.py b/tests/components/accuweather/test_init.py index f88cde88e7e..340676905d6 100644 --- a/tests/components/accuweather/test_init.py +++ b/tests/components/accuweather/test_init.py @@ -10,7 +10,7 @@ from homeassistant.components.accuweather.const import ( UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION, ) -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -107,7 +107,7 @@ async def test_remove_ozone_sensors( ) -> None: """Test remove ozone sensors from registry.""" entity_registry.async_get_or_create( - SENSOR_DOMAIN, + SENSOR_PLATFORM, DOMAIN, "0123456-ozone-0", suggested_object_id="home_ozone_0d", diff --git a/tests/components/accuweather/test_sensor.py b/tests/components/accuweather/test_sensor.py index 37ebe260f39..41c1c0d930a 100644 --- a/tests/components/accuweather/test_sensor.py +++ b/tests/components/accuweather/test_sensor.py @@ -148,7 +148,6 @@ async def test_manual_update_entity( assert mock_accuweather_client.async_get_current_conditions.call_count == 2 -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_imperial_units( hass: HomeAssistant, mock_accuweather_client: AsyncMock ) -> None: diff --git a/tests/components/acmeda/conftest.py b/tests/components/acmeda/conftest.py deleted file mode 100644 index 2c980351c09..00000000000 --- a/tests/components/acmeda/conftest.py +++ /dev/null @@ -1,20 +0,0 @@ -"""Define fixtures available for all Acmeda tests.""" - -import pytest - -from homeassistant.components.acmeda.const import DOMAIN -from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: - """Return the default mocked config entry.""" - mock_config_entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_HOST: "127.0.0.1"}, - ) - mock_config_entry.add_to_hass(hass) - return mock_config_entry diff --git a/tests/components/acmeda/test_cover.py b/tests/components/acmeda/test_cover.py deleted file mode 100644 index 0d908ecc915..00000000000 --- a/tests/components/acmeda/test_cover.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Define tests for the Acmeda config flow.""" - -from homeassistant.components.acmeda.const import DOMAIN -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry - - -async def test_cover_id_migration( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test migrating unique id.""" - mock_config_entry.add_to_hass(hass) - entity_registry.async_get_or_create( - COVER_DOMAIN, DOMAIN, 1234567890123, config_entry=mock_config_entry - ) - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.async_block_till_done() - entities = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id - ) - assert len(entities) == 1 - assert entities[0].unique_id == "1234567890123" diff --git a/tests/components/acmeda/test_sensor.py b/tests/components/acmeda/test_sensor.py deleted file mode 100644 index 3d7090ce7dd..00000000000 --- a/tests/components/acmeda/test_sensor.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Define tests for the Acmeda config flow.""" - -from homeassistant.components.acmeda.const import DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry - - -async def test_sensor_id_migration( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test migrating unique id.""" - mock_config_entry.add_to_hass(hass) - entity_registry.async_get_or_create( - SENSOR_DOMAIN, DOMAIN, 1234567890123, config_entry=mock_config_entry - ) - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - entities = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id - ) - assert len(entities) == 1 - assert entities[0].unique_id == "1234567890123" diff --git a/tests/components/adguard/test_config_flow.py b/tests/components/adguard/test_config_flow.py index 6644a4ca20f..d493962611f 100644 --- a/tests/components/adguard/test_config_flow.py +++ b/tests/components/adguard/test_config_flow.py @@ -4,6 +4,7 @@ import aiohttp from homeassistant import config_entries from homeassistant.components.adguard.const import DOMAIN +from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_HOST, @@ -16,7 +17,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker diff --git a/tests/components/advantage_air/__init__.py b/tests/components/advantage_air/__init__.py index 5587c668c7e..05d98e957bb 100644 --- a/tests/components/advantage_air/__init__.py +++ b/tests/components/advantage_air/__init__.py @@ -4,7 +4,6 @@ from unittest.mock import AsyncMock, patch from homeassistant.components.advantage_air.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_json_object_fixture @@ -44,7 +43,7 @@ def patch_update(return_value=True, side_effect=None): ) -async def add_mock_config(hass: HomeAssistant) -> MockConfigEntry: +async def add_mock_config(hass): """Create a fake Advantage Air Config Entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/advantage_air/test_binary_sensor.py b/tests/components/advantage_air/test_binary_sensor.py index d0088d96ba5..13bbadb38f9 100644 --- a/tests/components/advantage_air/test_binary_sensor.py +++ b/tests/components/advantage_air/test_binary_sensor.py @@ -1,8 +1,10 @@ """Test the Advantage Air Binary Sensor Platform.""" from datetime import timedelta -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock +from homeassistant.components.advantage_air import ADVANTAGE_AIR_SYNC_INTERVAL +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -68,14 +70,22 @@ async def test_binary_sensor_async_setup_entry( assert not hass.states.get(entity_id) mock_get.reset_mock() + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done() - with patch("homeassistant.config_entries.RELOAD_AFTER_UPDATE_DELAY", 1): - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done() + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL + 1), + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 1 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 1 + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 2 state = hass.states.get(entity_id) assert state @@ -91,14 +101,22 @@ async def test_binary_sensor_async_setup_entry( assert not hass.states.get(entity_id) mock_get.reset_mock() + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done() - with patch("homeassistant.config_entries.RELOAD_AFTER_UPDATE_DELAY", 1): - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done() + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL + 1), + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 1 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 1 + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 2 state = hass.states.get(entity_id) assert state diff --git a/tests/components/advantage_air/test_cover.py b/tests/components/advantage_air/test_cover.py index a9a3cc70c18..4752601d9ad 100644 --- a/tests/components/advantage_air/test_cover.py +++ b/tests/components/advantage_air/test_cover.py @@ -9,9 +9,8 @@ from homeassistant.components.cover import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, CoverDeviceClass, - CoverState, ) -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, STATE_OPEN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -32,7 +31,7 @@ async def test_ac_cover( entity_id = "cover.myauto_zone_y" state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes.get("device_class") == CoverDeviceClass.DAMPER assert state.attributes.get("current_position") == 100 @@ -121,7 +120,7 @@ async def test_things_cover( thing_id = "200" state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes.get("device_class") == CoverDeviceClass.BLIND entry = entity_registry.async_get(entity_id) diff --git a/tests/components/advantage_air/test_sensor.py b/tests/components/advantage_air/test_sensor.py index 3ea368a59fb..06243921a64 100644 --- a/tests/components/advantage_air/test_sensor.py +++ b/tests/components/advantage_air/test_sensor.py @@ -1,13 +1,15 @@ """Test the Advantage Air Sensor Platform.""" from datetime import timedelta -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock +from homeassistant.components.advantage_air import ADVANTAGE_AIR_SYNC_INTERVAL from homeassistant.components.advantage_air.const import DOMAIN as ADVANTAGE_AIR_DOMAIN from homeassistant.components.advantage_air.sensor import ( ADVANTAGE_AIR_SERVICE_SET_TIME_TO, ADVANTAGE_AIR_SET_COUNTDOWN_VALUE, ) +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -122,15 +124,23 @@ async def test_sensor_platform_disabled_entity( assert not hass.states.get(entity_id) + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done(wait_background_tasks=True) mock_get.reset_mock() - with patch("homeassistant.config_entries.RELOAD_AFTER_UPDATE_DELAY", 1): - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done(wait_background_tasks=True) + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL + 1), + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 1 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 1 + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 2 state = hass.states.get(entity_id) assert state diff --git a/tests/components/aemet/conftest.py b/tests/components/aemet/conftest.py index 38f4793541c..aa4f537c7fb 100644 --- a/tests/components/aemet/conftest.py +++ b/tests/components/aemet/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for aemet.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/aemet/snapshots/test_diagnostics.ambr b/tests/components/aemet/snapshots/test_diagnostics.ambr index 54546507dfa..8d4132cad84 100644 --- a/tests/components/aemet/snapshots/test_diagnostics.ambr +++ b/tests/components/aemet/snapshots/test_diagnostics.ambr @@ -11,8 +11,6 @@ 'name': 'AEMET', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'aemet', 'entry_id': '7442b231f139e813fc1939281123f220', 'minor_version': 1, diff --git a/tests/components/aemet/snapshots/test_weather.ambr b/tests/components/aemet/snapshots/test_weather.ambr index 58c854dcda9..f19f95a6e80 100644 --- a/tests/components/aemet/snapshots/test_weather.ambr +++ b/tests/components/aemet/snapshots/test_weather.ambr @@ -1,4 +1,494 @@ # serializer version: 1 +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-08T23:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 2.0, + 'templow': -1.0, + 'wind_bearing': 90.0, + 'wind_speed': 0.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-09T23:00:00+00:00', + 'precipitation_probability': 30, + 'temperature': 4.0, + 'templow': -4.0, + 'wind_bearing': 45.0, + 'wind_speed': 20.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T23:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 3.0, + 'templow': -7.0, + 'wind_bearing': 0.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-11T23:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': -1.0, + 'templow': -13.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-01-12T23:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 6.0, + 'templow': -11.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-13T23:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 6.0, + 'templow': -7.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-14T23:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 5.0, + 'templow': -4.0, + 'wind_bearing': None, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T12:00:00+00:00', + 'precipitation': 2.7, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 22.0, + 'wind_speed': 15.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T13:00:00+00:00', + 'precipitation': 0.6, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 14.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T14:00:00+00:00', + 'precipitation': 0.8, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 20.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T15:00:00+00:00', + 'precipitation': 1.4, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 14.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T16:00:00+00:00', + 'precipitation': 1.2, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 9.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T17:00:00+00:00', + 'precipitation': 0.4, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 7.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T18:00:00+00:00', + 'precipitation': 0.3, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T19:00:00+00:00', + 'precipitation': 0.1, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-09T20:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 8.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-09T21:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 9.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-09T22:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-09T23:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T00:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 10.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T01:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T02:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 9.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T03:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T04:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': -1.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 9.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T06:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 18.0, + 'wind_speed': 13.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T07:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T08:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 31.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T09:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': 2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 22.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T12:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 20.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T13:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T14:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 4.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 28.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T15:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T16:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T17:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T18:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T19:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T20:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T21:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 27.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T22:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T23:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-11T00:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 27.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T01:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 22.0, + 'wind_speed': 12.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T02:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 17.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T03:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 11.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T04:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -4.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T05:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -4.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 10.0, + }), + ]), + }) +# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.aemet': dict({ diff --git a/tests/components/aemet/test_diagnostics.py b/tests/components/aemet/test_diagnostics.py index 6d007dd0465..0d94995a85b 100644 --- a/tests/components/aemet/test_diagnostics.py +++ b/tests/components/aemet/test_diagnostics.py @@ -4,7 +4,6 @@ from unittest.mock import patch import pytest from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.aemet.const import DOMAIN from homeassistant.core import HomeAssistant @@ -31,4 +30,4 @@ async def test_config_entry_diagnostics( return_value={}, ): result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/aftership/conftest.py b/tests/components/aftership/conftest.py index d66ae267bfe..1704b099cc2 100644 --- a/tests/components/aftership/conftest.py +++ b/tests/components/aftership/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the AfterShip tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/agent_dvr/conftest.py b/tests/components/agent_dvr/conftest.py index 0ce1c008a23..a62e1738850 100644 --- a/tests/components/agent_dvr/conftest.py +++ b/tests/components/agent_dvr/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for Agent DVR.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/airgradient/conftest.py b/tests/components/airgradient/conftest.py index 395c5cd96a4..7ca1198ce5f 100644 --- a/tests/components/airgradient/conftest.py +++ b/tests/components/airgradient/conftest.py @@ -1,15 +1,16 @@ """AirGradient tests configuration.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import patch from airgradient import Config, Measures import pytest +from typing_extensions import Generator from homeassistant.components.airgradient.const import DOMAIN from homeassistant.const import CONF_HOST from tests.common import MockConfigEntry, load_fixture +from tests.components.smhi.common import AsyncMock @pytest.fixture @@ -43,7 +44,6 @@ def mock_airgradient_client() -> Generator[AsyncMock]: client.get_config.return_value = Config.from_json( load_fixture("get_config_local.json", DOMAIN) ) - client.get_latest_firmware_version.return_value = "3.1.4" yield client diff --git a/tests/components/airgradient/snapshots/test_diagnostics.ambr b/tests/components/airgradient/snapshots/test_diagnostics.ambr deleted file mode 100644 index a96dfb95382..00000000000 --- a/tests/components/airgradient/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,42 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics_polling_instance - dict({ - 'config': dict({ - 'co2_automatic_baseline_calibration_days': 8, - 'configuration_control': 'local', - 'country': 'DE', - 'display_brightness': 0, - 'led_bar_brightness': 100, - 'led_bar_mode': 'co2', - 'nox_learning_offset': 12, - 'pm_standard': 'ugm3', - 'post_data_to_airgradient': True, - 'temperature_unit': 'c', - 'tvoc_learning_offset': 12, - }), - 'measures': dict({ - 'ambient_temperature': 22.17, - 'boot_time': 28, - 'compensated_ambient_temperature': 22.17, - 'compensated_pm02': None, - 'compensated_relative_humidity': 47.0, - 'firmware_version': '3.1.1', - 'model': 'I-9PSL', - 'nitrogen_index': 1, - 'pm003_count': 270, - 'pm01': 22, - 'pm02': 34, - 'pm10': 41, - 'raw_ambient_temperature': 27.96, - 'raw_nitrogen': 16931, - 'raw_pm02': 34, - 'raw_relative_humidity': 48.0, - 'raw_total_volatile_organic_component': 31792, - 'rco2': 778, - 'relative_humidity': 47.0, - 'serial_number': '84fce612f5b8', - 'signal_strength': -52, - 'total_volatile_organic_component_index': 99, - }), - }) -# --- diff --git a/tests/components/airgradient/snapshots/test_init.ambr b/tests/components/airgradient/snapshots/test_init.ambr index 72cb12535f1..4462a996a49 100644 --- a/tests/components/airgradient/snapshots/test_init.ambr +++ b/tests/components/airgradient/snapshots/test_init.ambr @@ -20,8 +20,7 @@ 'labels': set({ }), 'manufacturer': 'AirGradient', - 'model': 'AirGradient ONE', - 'model_id': 'I-9PSL', + 'model': 'I-9PSL', 'name': 'Airgradient', 'name_by_user': None, 'primary_config_entry': , @@ -52,12 +51,11 @@ 'labels': set({ }), 'manufacturer': 'AirGradient', - 'model': 'AirGradient Open Air', - 'model_id': 'O-1PPT', + 'model': 'O-1PPT', 'name': 'Airgradient', 'name_by_user': None, 'primary_config_entry': , - 'serial_number': '84fce612f5b8', + 'serial_number': '84fce60bec38', 'suggested_area': None, 'sw_version': '3.1.1', 'via_device_id': None, diff --git a/tests/components/airgradient/snapshots/test_sensor.ambr b/tests/components/airgradient/snapshots/test_sensor.ambr index 941369ff266..ff83fdcc111 100644 --- a/tests/components/airgradient/snapshots/test_sensor.ambr +++ b/tests/components/airgradient/snapshots/test_sensor.ambr @@ -305,7 +305,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '47.0', + 'state': '48.0', }) # --- # name: test_all_entities[indoor][sensor.airgradient_led_bar_brightness-entry] @@ -912,7 +912,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '22.17', + 'state': '27.96', }) # --- # name: test_all_entities[indoor][sensor.airgradient_voc_index-entry] diff --git a/tests/components/airgradient/snapshots/test_update.ambr b/tests/components/airgradient/snapshots/test_update.ambr deleted file mode 100644 index 1f944bb528b..00000000000 --- a/tests/components/airgradient/snapshots/test_update.ambr +++ /dev/null @@ -1,60 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[update.airgradient_firmware-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.airgradient_firmware', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Firmware', - 'platform': 'airgradient', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '84fce612f5b8-update', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[update.airgradient_firmware-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'device_class': 'firmware', - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/airgradient/icon.png', - 'friendly_name': 'Airgradient Firmware', - 'in_progress': False, - 'installed_version': '3.1.1', - 'latest_version': '3.1.4', - 'release_summary': None, - 'release_url': None, - 'skipped_version': None, - 'supported_features': , - 'title': None, - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.airgradient_firmware', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/airgradient/test_button.py b/tests/components/airgradient/test_button.py index 83de2c2f048..7901c3a067b 100644 --- a/tests/components/airgradient/test_button.py +++ b/tests/components/airgradient/test_button.py @@ -7,7 +7,7 @@ from airgradient import Config from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient.const import DOMAIN +from homeassistant.components.airgradient import DOMAIN from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant diff --git a/tests/components/airgradient/test_config_flow.py b/tests/components/airgradient/test_config_flow.py index 73dbd17a213..217d2ac0e8c 100644 --- a/tests/components/airgradient/test_config_flow.py +++ b/tests/components/airgradient/test_config_flow.py @@ -3,13 +3,10 @@ from ipaddress import ip_address from unittest.mock import AsyncMock -from airgradient import ( - AirGradientConnectionError, - AirGradientParseError, - ConfigurationControl, -) +from airgradient import AirGradientConnectionError, ConfigurationControl +from mashumaro import MissingField -from homeassistant.components.airgradient.const import DOMAIN +from homeassistant.components.airgradient import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST @@ -144,7 +141,9 @@ async def test_flow_old_firmware_version( mock_setup_entry: AsyncMock, ) -> None: """Test flow with old firmware version.""" - mock_airgradient_client.get_current_measures.side_effect = AirGradientParseError + mock_airgradient_client.get_current_measures.side_effect = MissingField( + "", object, object + ) result = await hass.config_entries.flow.async_init( DOMAIN, @@ -253,32 +252,3 @@ async def test_zeroconf_flow_abort_old_firmware(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "invalid_version" - - -async def test_user_flow_works_discovery( - hass: HomeAssistant, - mock_new_airgradient_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test user flow can continue after discovery happened.""" - await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DISCOVERY, - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert len(hass.config_entries.flow.async_progress(DOMAIN)) == 2 - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "10.0.0.131"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - # Verify the discovery flow was aborted - assert not hass.config_entries.flow.async_progress(DOMAIN) diff --git a/tests/components/airgradient/test_diagnostics.py b/tests/components/airgradient/test_diagnostics.py deleted file mode 100644 index 34a9bb7aab2..00000000000 --- a/tests/components/airgradient/test_diagnostics.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Tests for the diagnostics data provided by the AirGradient integration.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics_polling_instance( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_airgradient_client: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test diagnostics.""" - await setup_integration(hass, mock_config_entry) - - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) - == snapshot - ) diff --git a/tests/components/airgradient/test_init.py b/tests/components/airgradient/test_init.py index a121940f2bc..408e6f5f3ba 100644 --- a/tests/components/airgradient/test_init.py +++ b/tests/components/airgradient/test_init.py @@ -1,18 +1,16 @@ """Tests for the AirGradient integration.""" -from datetime import timedelta from unittest.mock import AsyncMock -from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient.const import DOMAIN +from homeassistant.components.airgradient import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from . import setup_integration -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry async def test_device_info( @@ -29,28 +27,3 @@ async def test_device_info( ) assert device_entry is not None assert device_entry == snapshot - - -async def test_new_firmware_version( - hass: HomeAssistant, - mock_airgradient_client: AsyncMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test device registry integration.""" - await setup_integration(hass, mock_config_entry) - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - assert device_entry is not None - assert device_entry.sw_version == "3.1.1" - mock_airgradient_client.get_current_measures.return_value.firmware_version = "3.1.2" - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - assert device_entry is not None - assert device_entry.sw_version == "3.1.2" diff --git a/tests/components/airgradient/test_number.py b/tests/components/airgradient/test_number.py index 7aabda8f81c..0803c0d437f 100644 --- a/tests/components/airgradient/test_number.py +++ b/tests/components/airgradient/test_number.py @@ -7,7 +7,7 @@ from airgradient import Config from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient.const import DOMAIN +from homeassistant.components.airgradient import DOMAIN from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, diff --git a/tests/components/airgradient/test_select.py b/tests/components/airgradient/test_select.py index de4a7beaaa7..61679a15c07 100644 --- a/tests/components/airgradient/test_select.py +++ b/tests/components/airgradient/test_select.py @@ -8,7 +8,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.airgradient.const import DOMAIN +from homeassistant.components.airgradient import DOMAIN from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, diff --git a/tests/components/airgradient/test_sensor.py b/tests/components/airgradient/test_sensor.py index e3fed70839a..c2e53ef4de2 100644 --- a/tests/components/airgradient/test_sensor.py +++ b/tests/components/airgradient/test_sensor.py @@ -8,7 +8,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.airgradient.const import DOMAIN +from homeassistant.components.airgradient import DOMAIN from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er diff --git a/tests/components/airgradient/test_switch.py b/tests/components/airgradient/test_switch.py index a0cbdd17d75..20a1cb7470b 100644 --- a/tests/components/airgradient/test_switch.py +++ b/tests/components/airgradient/test_switch.py @@ -7,7 +7,7 @@ from airgradient import Config from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient.const import DOMAIN +from homeassistant.components.airgradient import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, diff --git a/tests/components/airgradient/test_update.py b/tests/components/airgradient/test_update.py deleted file mode 100644 index 020a9a82a71..00000000000 --- a/tests/components/airgradient/test_update.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Tests for the AirGradient update platform.""" - -from datetime import timedelta -from unittest.mock import AsyncMock, patch - -from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion - -from homeassistant.const import STATE_OFF, STATE_ON, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_airgradient_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.UPDATE]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_update_mechanism( - hass: HomeAssistant, - mock_airgradient_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test update entity.""" - await setup_integration(hass, mock_config_entry) - - state = hass.states.get("update.airgradient_firmware") - assert state.state == STATE_ON - assert state.attributes["installed_version"] == "3.1.1" - assert state.attributes["latest_version"] == "3.1.4" - mock_airgradient_client.get_latest_firmware_version.assert_called_once() - mock_airgradient_client.get_latest_firmware_version.reset_mock() - - mock_airgradient_client.get_current_measures.return_value.firmware_version = "3.1.4" - - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get("update.airgradient_firmware") - assert state.state == STATE_OFF - assert state.attributes["installed_version"] == "3.1.4" - assert state.attributes["latest_version"] == "3.1.4" - - mock_airgradient_client.get_latest_firmware_version.return_value = "3.1.5" - - freezer.tick(timedelta(minutes=59)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - mock_airgradient_client.get_latest_firmware_version.assert_called_once() - state = hass.states.get("update.airgradient_firmware") - assert state.state == STATE_ON - assert state.attributes["installed_version"] == "3.1.4" - assert state.attributes["latest_version"] == "3.1.5" diff --git a/tests/components/airly/snapshots/test_diagnostics.ambr b/tests/components/airly/snapshots/test_diagnostics.ambr index ec501b2fd7e..c22e96a2082 100644 --- a/tests/components/airly/snapshots/test_diagnostics.ambr +++ b/tests/components/airly/snapshots/test_diagnostics.ambr @@ -9,8 +9,6 @@ 'name': 'Home', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'airly', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, diff --git a/tests/components/airly/test_diagnostics.py b/tests/components/airly/test_diagnostics.py index 9a61bf5abee..7364824e594 100644 --- a/tests/components/airly/test_diagnostics.py +++ b/tests/components/airly/test_diagnostics.py @@ -1,7 +1,6 @@ """Test Airly diagnostics.""" from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -23,4 +22,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/airnow/conftest.py b/tests/components/airnow/conftest.py index 84adf12806d..676595250f1 100644 --- a/tests/components/airnow/conftest.py +++ b/tests/components/airnow/conftest.py @@ -1,12 +1,12 @@ """Define fixtures for AirNow tests.""" -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator -from homeassistant.components.airnow.const import DOMAIN +from homeassistant.components.airnow import DOMAIN from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS from homeassistant.core import HomeAssistant from homeassistant.util.json import JsonArrayType diff --git a/tests/components/airnow/snapshots/test_diagnostics.ambr b/tests/components/airnow/snapshots/test_diagnostics.ambr index 3dd4788dc61..c2004d759a9 100644 --- a/tests/components/airnow/snapshots/test_diagnostics.ambr +++ b/tests/components/airnow/snapshots/test_diagnostics.ambr @@ -8,7 +8,7 @@ 'DateObserved': '2020-12-20', 'HourObserved': 15, 'Latitude': '**REDACTED**', - 'LocalTimeZone': 'PST', + 'LocalTimeZoneInfo': 'PST', 'Longitude': '**REDACTED**', 'O3': 0.048, 'PM10': 12, @@ -24,8 +24,6 @@ 'longitude': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'airnow', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, diff --git a/tests/components/airnow/test_diagnostics.py b/tests/components/airnow/test_diagnostics.py index eb79dabe51a..7329398e789 100644 --- a/tests/components/airnow/test_diagnostics.py +++ b/tests/components/airnow/test_diagnostics.py @@ -4,7 +4,6 @@ from unittest.mock import patch import pytest from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -28,6 +27,7 @@ async def test_entry_diagnostics( return_value="PST", ): assert await hass.config_entries.async_setup(config_entry.entry_id) - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/airq/conftest.py b/tests/components/airq/conftest.py index a132153a76f..5df032c0308 100644 --- a/tests/components/airq/conftest.py +++ b/tests/components/airq/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for air-Q.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/airthings_ble/__init__.py b/tests/components/airthings_ble/__init__.py index add21b1067f..45521903a08 100644 --- a/tests/components/airthings_ble/__init__.py +++ b/tests/components/airthings_ble/__init__.py @@ -13,7 +13,6 @@ from airthings_ble import ( from homeassistant.components.airthings_ble.const import DOMAIN from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceRegistry from tests.common import MockConfigEntry, MockEntity @@ -49,7 +48,7 @@ def patch_airthings_ble(return_value=AirthingsDevice, side_effect=None): def patch_airthings_device_update(): """Patch airthings-ble device.""" return patch( - "homeassistant.components.airthings_ble.coordinator.AirthingsBluetoothDeviceData.update_device", + "homeassistant.components.airthings_ble.AirthingsBluetoothDeviceData.update_device", return_value=WAVE_DEVICE_INFO, ) @@ -226,7 +225,7 @@ VOC_V3 = MockEntity( ) -def create_entry(hass: HomeAssistant) -> MockConfigEntry: +def create_entry(hass): """Create a config entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/airtouch5/__init__.py b/tests/components/airtouch5/__init__.py index 567be6af774..2b76786e7e5 100644 --- a/tests/components/airtouch5/__init__.py +++ b/tests/components/airtouch5/__init__.py @@ -1,13 +1 @@ """Tests for the Airtouch 5 integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/airtouch5/conftest.py b/tests/components/airtouch5/conftest.py index fab26e3f6cc..d6d55689f17 100644 --- a/tests/components/airtouch5/conftest.py +++ b/tests/components/airtouch5/conftest.py @@ -1,23 +1,9 @@ """Common fixtures for the Airtouch 5 tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch -from airtouch5py.data_packet_factory import DataPacketFactory -from airtouch5py.packets.ac_ability import AcAbility -from airtouch5py.packets.ac_status import AcFanSpeed, AcMode, AcPowerState, AcStatus -from airtouch5py.packets.zone_name import ZoneName -from airtouch5py.packets.zone_status import ( - ControlMethod, - ZonePowerState, - ZoneStatusZone, -) import pytest - -from homeassistant.components.airtouch5.const import DOMAIN -from homeassistant.const import CONF_HOST - -from tests.common import MockConfigEntry +from typing_extensions import Generator @pytest.fixture @@ -27,107 +13,3 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.airtouch5.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock the config entry.""" - return MockConfigEntry( - domain=DOMAIN, - unique_id="1.1.1.1", - data={ - CONF_HOST: "1.1.1.1", - }, - ) - - -@pytest.fixture -def mock_airtouch5_client() -> Generator[AsyncMock]: - """Mock an Airtouch5 client.""" - - with ( - patch( - "homeassistant.components.airtouch5.Airtouch5SimpleClient", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.airtouch5.config_flow.Airtouch5SimpleClient", - new=mock_client, - ), - ): - client = mock_client.return_value - - # Default values for the tests using this mock : - client.data_packet_factory = DataPacketFactory() - client.ac = [ - AcAbility( - ac_number=1, - ac_name="AC 1", - start_zone_number=1, - zone_count=2, - supports_mode_cool=True, - supports_mode_fan=True, - supports_mode_dry=True, - supports_mode_heat=True, - supports_mode_auto=True, - supports_fan_speed_intelligent_auto=True, - supports_fan_speed_turbo=True, - supports_fan_speed_powerful=True, - supports_fan_speed_high=True, - supports_fan_speed_medium=True, - supports_fan_speed_low=True, - supports_fan_speed_quiet=True, - supports_fan_speed_auto=True, - min_cool_set_point=15, - max_cool_set_point=25, - min_heat_set_point=20, - max_heat_set_point=30, - ) - ] - client.latest_ac_status = { - 1: AcStatus( - ac_power_state=AcPowerState.ON, - ac_number=1, - ac_mode=AcMode.AUTO, - ac_fan_speed=AcFanSpeed.AUTO, - ac_setpoint=24, - turbo_active=False, - bypass_active=False, - spill_active=False, - timer_set=False, - temperature=24, - error_code=0, - ) - } - - client.zones = [ZoneName(1, "Zone 1"), ZoneName(2, "Zone 2")] - client.latest_zone_status = { - 1: ZoneStatusZone( - zone_power_state=ZonePowerState.ON, - zone_number=1, - control_method=ControlMethod.PERCENTAGE_CONTROL, - open_percentage=0.9, - set_point=24, - has_sensor=False, - temperature=24, - spill_active=False, - is_low_battery=False, - ), - 2: ZoneStatusZone( - zone_power_state=ZonePowerState.ON, - zone_number=1, - control_method=ControlMethod.TEMPERATURE_CONTROL, - open_percentage=1, - set_point=24, - has_sensor=True, - temperature=24, - spill_active=False, - is_low_battery=False, - ), - } - - client.connection_state_callbacks = [] - client.zone_status_callbacks = [] - client.ac_status_callbacks = [] - - yield client diff --git a/tests/components/airtouch5/snapshots/test_cover.ambr b/tests/components/airtouch5/snapshots/test_cover.ambr deleted file mode 100644 index a8e57f69527..00000000000 --- a/tests/components/airtouch5/snapshots/test_cover.ambr +++ /dev/null @@ -1,99 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[cover.zone_1_damper-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.zone_1_damper', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Damper', - 'platform': 'airtouch5', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'damper', - 'unique_id': 'zone_1_open_percentage', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[cover.zone_1_damper-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_position': 90, - 'device_class': 'damper', - 'friendly_name': 'Zone 1 Damper', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.zone_1_damper', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_all_entities[cover.zone_2_damper-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.zone_2_damper', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Damper', - 'platform': 'airtouch5', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'damper', - 'unique_id': 'zone_2_open_percentage', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[cover.zone_2_damper-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_position': 100, - 'device_class': 'damper', - 'friendly_name': 'Zone 2 Damper', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.zone_2_damper', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- diff --git a/tests/components/airtouch5/test_cover.py b/tests/components/airtouch5/test_cover.py deleted file mode 100644 index 57a344e8018..00000000000 --- a/tests/components/airtouch5/test_cover.py +++ /dev/null @@ -1,143 +0,0 @@ -"""Tests for the Airtouch5 cover platform.""" - -from collections.abc import Callable -from unittest.mock import AsyncMock, patch - -from airtouch5py.packets.zone_status import ( - ControlMethod, - ZonePowerState, - ZoneStatusZone, -) -from syrupy import SnapshotAssertion - -from homeassistant.components.cover import ( - ATTR_CURRENT_POSITION, - ATTR_POSITION, - DOMAIN as COVER_DOMAIN, - SERVICE_CLOSE_COVER, - SERVICE_OPEN_COVER, - SERVICE_SET_COVER_POSITION, - CoverState, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - -COVER_ENTITY_ID = "cover.zone_1_damper" - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_airtouch5_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - - with patch("homeassistant.components.airtouch5.PLATFORMS", [Platform.COVER]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_cover_actions( - hass: HomeAssistant, - mock_airtouch5_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the actions of the Airtouch5 covers.""" - - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_ENTITY_ID}, - blocking=True, - ) - mock_airtouch5_client.send_packet.assert_called_once() - mock_airtouch5_client.reset_mock() - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_ENTITY_ID}, - blocking=True, - ) - mock_airtouch5_client.send_packet.assert_called_once() - mock_airtouch5_client.reset_mock() - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: COVER_ENTITY_ID, ATTR_POSITION: 50}, - blocking=True, - ) - mock_airtouch5_client.send_packet.assert_called_once() - mock_airtouch5_client.reset_mock() - - -async def test_cover_callbacks( - hass: HomeAssistant, - mock_airtouch5_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the callbacks of the Airtouch5 covers.""" - - await setup_integration(hass, mock_config_entry) - - # We find the callback method on the mock client - zone_status_callback: Callable[[dict[int, ZoneStatusZone]], None] = ( - mock_airtouch5_client.zone_status_callbacks[2] - ) - - # Define a method to simply call it - async def _call_zone_status_callback(open_percentage: int) -> None: - zsz = ZoneStatusZone( - zone_power_state=ZonePowerState.ON, - zone_number=1, - control_method=ControlMethod.PERCENTAGE_CONTROL, - open_percentage=open_percentage, - set_point=None, - has_sensor=False, - temperature=None, - spill_active=False, - is_low_battery=False, - ) - zone_status_callback({1: zsz}) - await hass.async_block_till_done() - - # And call it to effectively launch the callback as the server would do - - # Partly open - await _call_zone_status_callback(0.7) - state = hass.states.get(COVER_ENTITY_ID) - assert state - assert state.state == CoverState.OPEN - assert state.attributes.get(ATTR_CURRENT_POSITION) == 70 - - # Fully open - await _call_zone_status_callback(1) - state = hass.states.get(COVER_ENTITY_ID) - assert state - assert state.state == CoverState.OPEN - assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 - - # Fully closed - await _call_zone_status_callback(0.0) - state = hass.states.get(COVER_ENTITY_ID) - assert state - assert state.state == CoverState.CLOSED - assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 - - # Partly reopened - await _call_zone_status_callback(0.3) - state = hass.states.get(COVER_ENTITY_ID) - assert state - assert state.state == CoverState.OPEN - assert state.attributes.get(ATTR_CURRENT_POSITION) == 30 diff --git a/tests/components/airvisual/conftest.py b/tests/components/airvisual/conftest.py index cc49b60e0d8..a82dc0ab78c 100644 --- a/tests/components/airvisual/conftest.py +++ b/tests/components/airvisual/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for AirVisual.""" -from collections.abc import AsyncGenerator, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import AsyncGenerator, Generator from homeassistant.components.airvisual import ( CONF_CITY, diff --git a/tests/components/airvisual/snapshots/test_diagnostics.ambr b/tests/components/airvisual/snapshots/test_diagnostics.ambr index 606d6082351..cb9d25b8790 100644 --- a/tests/components/airvisual/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual/snapshots/test_diagnostics.ambr @@ -36,8 +36,6 @@ 'longitude': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'airvisual', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, diff --git a/tests/components/airvisual/test_config_flow.py b/tests/components/airvisual/test_config_flow.py index 632bdb72eb4..b9643b17c07 100644 --- a/tests/components/airvisual/test_config_flow.py +++ b/tests/components/airvisual/test_config_flow.py @@ -18,7 +18,7 @@ from homeassistant.components.airvisual import ( INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, ) -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SHOW_ON_MAP from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -33,8 +33,6 @@ from .conftest import ( TEST_STATE, ) -from tests.common import MockConfigEntry - pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -148,10 +146,16 @@ async def test_options_flow( async def test_step_reauth( - hass: HomeAssistant, config_entry: MockConfigEntry, setup_config_entry + hass: HomeAssistant, config_entry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH}, data=config_entry.data + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/airvisual/test_diagnostics.py b/tests/components/airvisual/test_diagnostics.py index 0253f102c59..072e4559705 100644 --- a/tests/components/airvisual/test_diagnostics.py +++ b/tests/components/airvisual/test_diagnostics.py @@ -1,7 +1,6 @@ """Test AirVisual diagnostics.""" from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -17,6 +16,7 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/airvisual/test_init.py b/tests/components/airvisual/test_init.py index 19dab3de210..7fa9f4ca779 100644 --- a/tests/components/airvisual/test_init.py +++ b/tests/components/airvisual/test_init.py @@ -11,9 +11,7 @@ from homeassistant.components.airvisual import ( INTEGRATION_TYPE_GEOGRAPHY_NAME, INTEGRATION_TYPE_NODE_PRO, ) - -# pylint: disable-next=hass-component-root-import -from homeassistant.components.airvisual_pro.const import DOMAIN as AIRVISUAL_PRO_DOMAIN +from homeassistant.components.airvisual_pro import DOMAIN as AIRVISUAL_PRO_DOMAIN from homeassistant.const import ( CONF_API_KEY, CONF_COUNTRY, diff --git a/tests/components/airvisual_pro/conftest.py b/tests/components/airvisual_pro/conftest.py index 4acf9188889..d25e9821d91 100644 --- a/tests/components/airvisual_pro/conftest.py +++ b/tests/components/airvisual_pro/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for AirVisual Pro.""" -from collections.abc import AsyncGenerator, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import AsyncGenerator, Generator from homeassistant.components.airvisual_pro.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD diff --git a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr index cb1d3a7aee7..be709621e31 100644 --- a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr @@ -91,8 +91,6 @@ 'password': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'airvisual_pro', 'entry_id': '6a2b3770e53c28dc1eeb2515e906b0ce', 'minor_version': 1, diff --git a/tests/components/airvisual_pro/test_config_flow.py b/tests/components/airvisual_pro/test_config_flow.py index 9298b8cf528..803a335f52c 100644 --- a/tests/components/airvisual_pro/test_config_flow.py +++ b/tests/components/airvisual_pro/test_config_flow.py @@ -10,13 +10,11 @@ from pyairvisual.node import ( import pytest from homeassistant.components.airvisual_pro.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry - pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -100,14 +98,22 @@ async def test_step_import(hass: HomeAssistant, config, setup_airvisual_pro) -> async def test_reauth( hass: HomeAssistant, config, - config_entry: MockConfigEntry, + config_entry, connect_errors, connect_mock, pro, setup_airvisual_pro, ) -> None: """Test re-auth (including errors).""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + data=config, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/airvisual_pro/test_diagnostics.py b/tests/components/airvisual_pro/test_diagnostics.py index 372b62eaf38..dd87d00be30 100644 --- a/tests/components/airvisual_pro/test_diagnostics.py +++ b/tests/components/airvisual_pro/test_diagnostics.py @@ -1,7 +1,6 @@ """Test AirVisual Pro diagnostics.""" from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -17,6 +16,7 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index fb4f6530b1e..adf0176765c 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -220,45 +220,6 @@ }), ]), }), - dict({ - 'data': list([ - dict({ - 'air_demand': 0, - 'coldStage': 0, - 'coldStages': 0, - 'coolmaxtemp': 30, - 'coolmintemp': 15, - 'coolsetpoint': 20, - 'errors': list([ - ]), - 'floor_demand': 0, - 'heatStage': 0, - 'heatStages': 0, - 'heatmaxtemp': 30, - 'heatmintemp': 15, - 'heatsetpoint': 20, - 'humidity': 0, - 'maxTemp': 30, - 'minTemp': 15, - 'mode': 6, - 'modes': list([ - 1, - 2, - 3, - 4, - 5, - 6, - ]), - 'name': 'Aux Heat', - 'on': 1, - 'roomTemp': 22, - 'setpoint': 20, - 'systemID': 4, - 'units': 0, - 'zoneID': 1, - }), - ]), - }), ]), }), 'version': dict({ @@ -277,8 +238,6 @@ 'port': 3000, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'airzone', 'entry_id': '6e7a0798c1734ba81d26ced0e690eaec', 'minor_version': 1, @@ -308,8 +267,12 @@ 'temp-set': 45, 'temp-unit': 0, }), - 'num-systems': 4, - 'num-zones': 8, + 'new-systems': list([ + ]), + 'new-zones': list([ + ]), + 'num-systems': 3, + 'num-zones': 7, 'systems': dict({ '1': dict({ 'available': True, @@ -359,23 +322,6 @@ ]), 'problems': False, }), - '4': dict({ - 'available': True, - 'full-name': 'Airzone [4] System', - 'id': 4, - 'master-system-zone': '4:1', - 'master-zone': 1, - 'mode': 6, - 'modes': list([ - 1, - 2, - 3, - 4, - 5, - 6, - ]), - 'problems': False, - }), }), 'version': '1.62', 'webserver': dict({ @@ -739,46 +685,6 @@ 'temp-step': 1.0, 'temp-unit': 1, }), - '4:1': dict({ - 'absolute-temp-max': 30.0, - 'absolute-temp-min': 15.0, - 'action': 5, - 'air-demand': False, - 'available': True, - 'cold-stage': 0, - 'cool-temp-max': 30.0, - 'cool-temp-min': 15.0, - 'cool-temp-set': 20.0, - 'demand': False, - 'double-set-point': False, - 'floor-demand': False, - 'full-name': 'Airzone [4:1] Aux Heat', - 'heat-stage': 0, - 'heat-temp-max': 30.0, - 'heat-temp-min': 15.0, - 'heat-temp-set': 20.0, - 'id': 1, - 'master': True, - 'mode': 6, - 'modes': list([ - 1, - 2, - 3, - 4, - 5, - 6, - ]), - 'name': 'Aux Heat', - 'on': True, - 'problems': False, - 'system': 4, - 'temp': 22.0, - 'temp-max': 30.0, - 'temp-min': 15.0, - 'temp-set': 20.0, - 'temp-step': 0.5, - 'temp-unit': 0, - }), }), }), }) diff --git a/tests/components/airzone/test_climate.py b/tests/components/airzone/test_climate.py index 12a73a6a268..fa972bd3899 100644 --- a/tests/components/airzone/test_climate.py +++ b/tests/components/airzone/test_climate.py @@ -225,23 +225,6 @@ async def test_airzone_create_climates(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25.0 assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 22.8 - state = hass.states.get("climate.aux_heat") - assert state.state == HVACMode.HEAT - assert state.attributes.get(ATTR_CURRENT_HUMIDITY) is None - assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 22 - assert state.attributes.get(ATTR_HVAC_ACTION) == HVACAction.IDLE - assert state.attributes.get(ATTR_HVAC_MODES) == [ - HVACMode.OFF, - HVACMode.COOL, - HVACMode.HEAT, - HVACMode.FAN_ONLY, - HVACMode.DRY, - ] - assert state.attributes.get(ATTR_MAX_TEMP) == 30 - assert state.attributes.get(ATTR_MIN_TEMP) == 15 - assert state.attributes.get(ATTR_TARGET_TEMP_STEP) == API_TEMPERATURE_STEP - assert state.attributes.get(ATTR_TEMPERATURE) == 20.0 - HVAC_MOCK_CHANGED = copy.deepcopy(HVAC_MOCK) HVAC_MOCK_CHANGED[API_SYSTEMS][0][API_DATA][0][API_MAX_TEMP] = 25 HVAC_MOCK_CHANGED[API_SYSTEMS][0][API_DATA][0][API_MIN_TEMP] = 10 @@ -265,7 +248,7 @@ async def test_airzone_create_climates(hass: HomeAssistant) -> None: ), ): async_fire_time_changed(hass, utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done(wait_background_tasks=True) + await hass.async_block_till_done() state = hass.states.get("climate.salon") assert state.attributes.get(ATTR_MAX_TEMP) == 25 diff --git a/tests/components/airzone/test_coordinator.py b/tests/components/airzone/test_coordinator.py index 583758a6bee..06c77bebb81 100644 --- a/tests/components/airzone/test_coordinator.py +++ b/tests/components/airzone/test_coordinator.py @@ -8,7 +8,6 @@ from aioairzone.exceptions import ( InvalidMethod, SystemOutOfRange, ) -from freezegun.api import FrozenDateTimeFactory from homeassistant.components.airzone.const import DOMAIN from homeassistant.components.airzone.coordinator import SCAN_INTERVAL @@ -16,7 +15,7 @@ from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.util.dt import utcnow -from .util import CONFIG, HVAC_MOCK, HVAC_MOCK_NEW_ZONES, HVAC_VERSION_MOCK +from .util import CONFIG, HVAC_MOCK, HVAC_VERSION_MOCK from tests.common import MockConfigEntry, async_fire_time_changed @@ -65,62 +64,3 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: state = hass.states.get("sensor.despacho_temperature") assert state.state == STATE_UNAVAILABLE - - -async def test_coordinator_new_devices( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, -) -> None: - """Test new devices on coordinator update.""" - - config_entry = MockConfigEntry( - data=CONFIG, - domain=DOMAIN, - unique_id="airzone_unique_id", - ) - config_entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.airzone.AirzoneLocalApi.get_dhw", - side_effect=HotWaterNotAvailable, - ), - patch( - "homeassistant.components.airzone.AirzoneLocalApi.get_hvac", - return_value=HVAC_MOCK_NEW_ZONES, - ) as mock_hvac, - patch( - "homeassistant.components.airzone.AirzoneLocalApi.get_hvac_systems", - side_effect=SystemOutOfRange, - ), - patch( - "homeassistant.components.airzone.AirzoneLocalApi.get_version", - return_value=HVAC_VERSION_MOCK, - ), - patch( - "homeassistant.components.airzone.AirzoneLocalApi.get_webserver", - side_effect=InvalidMethod, - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - mock_hvac.assert_called_once() - mock_hvac.reset_mock() - - state = hass.states.get("sensor.salon_temperature") - assert state.state == "19.6" - - state = hass.states.get("sensor.dorm_ppal_temperature") - assert state is None - - mock_hvac.return_value = HVAC_MOCK - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_hvac.assert_called_once() - - state = hass.states.get("sensor.salon_temperature") - assert state.state == "19.6" - - state = hass.states.get("sensor.dorm_ppal_temperature") - assert state.state == "21.1" diff --git a/tests/components/airzone/test_diagnostics.py b/tests/components/airzone/test_diagnostics.py index bca75bca778..6a03b9f1985 100644 --- a/tests/components/airzone/test_diagnostics.py +++ b/tests/components/airzone/test_diagnostics.py @@ -4,7 +4,6 @@ from unittest.mock import patch from aioairzone.const import RAW_HVAC, RAW_VERSION, RAW_WEBSERVER from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.airzone.const import DOMAIN from homeassistant.core import HomeAssistant @@ -38,4 +37,4 @@ async def test_config_entry_diagnostics( }, ): result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/airzone/test_select.py b/tests/components/airzone/test_select.py index 343c033728a..01617eab175 100644 --- a/tests/components/airzone/test_select.py +++ b/tests/components/airzone/test_select.py @@ -2,19 +2,17 @@ from unittest.mock import patch -from aioairzone.common import OperationMode from aioairzone.const import ( API_COLD_ANGLE, API_DATA, API_HEAT_ANGLE, - API_MODE, API_SLEEP, API_SYSTEM_ID, API_ZONE_ID, ) import pytest -from homeassistant.components.select import ATTR_OPTIONS, DOMAIN as SELECT_DOMAIN +from homeassistant.components.select import DOMAIN as SELECT_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, SERVICE_SELECT_OPTION from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -33,9 +31,6 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.despacho_heat_angle") assert state.state == "90deg" - state = hass.states.get("select.despacho_mode") - assert state is None - state = hass.states.get("select.despacho_sleep") assert state.state == "off" @@ -45,9 +40,6 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dorm_1_heat_angle") assert state.state == "90deg" - state = hass.states.get("select.dorm_1_mode") - assert state is None - state = hass.states.get("select.dorm_1_sleep") assert state.state == "off" @@ -57,9 +49,6 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dorm_2_heat_angle") assert state.state == "90deg" - state = hass.states.get("select.dorm_2_mode") - assert state is None - state = hass.states.get("select.dorm_2_sleep") assert state.state == "off" @@ -69,9 +58,6 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dorm_ppal_heat_angle") assert state.state == "50deg" - state = hass.states.get("select.dorm_ppal_mode") - assert state is None - state = hass.states.get("select.dorm_ppal_sleep") assert state.state == "30m" @@ -81,16 +67,6 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.salon_heat_angle") assert state.state == "90deg" - state = hass.states.get("select.salon_mode") - assert state.state == "heat" - assert state.attributes.get(ATTR_OPTIONS) == [ - "cool", - "dry", - "fan", - "heat", - "stop", - ] - state = hass.states.get("select.salon_sleep") assert state.state == "off" @@ -139,50 +115,6 @@ async def test_airzone_select_sleep(hass: HomeAssistant) -> None: assert state.state == "30m" -async def test_airzone_select_mode(hass: HomeAssistant) -> None: - """Test select HVAC mode.""" - - await async_init_integration(hass) - - put_hvac_mode = { - API_DATA: [ - { - API_SYSTEM_ID: 1, - API_ZONE_ID: 1, - API_MODE: OperationMode.COOLING, - } - ] - } - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: "select.salon_mode", - ATTR_OPTION: "Invalid", - }, - blocking=True, - ) - - with patch( - "homeassistant.components.airzone.AirzoneLocalApi.put_hvac", - return_value=put_hvac_mode, - ): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: "select.salon_mode", - ATTR_OPTION: "cool", - }, - blocking=True, - ) - - state = hass.states.get("select.salon_mode") - assert state.state == "cool" - - async def test_airzone_select_grille_angle(hass: HomeAssistant) -> None: """Test select sleep.""" diff --git a/tests/components/airzone/test_sensor.py b/tests/components/airzone/test_sensor.py index 352994d6313..3d75599d2d2 100644 --- a/tests/components/airzone/test_sensor.py +++ b/tests/components/airzone/test_sensor.py @@ -113,7 +113,7 @@ async def test_airzone_sensors_availability(hass: HomeAssistant) -> None: ), ): async_fire_time_changed(hass, utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done(wait_background_tasks=True) + await hass.async_block_till_done() state = hass.states.get("sensor.dorm_ppal_temperature") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/airzone/test_switch.py b/tests/components/airzone/test_switch.py deleted file mode 100644 index f761b53ed4c..00000000000 --- a/tests/components/airzone/test_switch.py +++ /dev/null @@ -1,102 +0,0 @@ -"""The switch tests for the Airzone platform.""" - -from unittest.mock import patch - -from aioairzone.const import API_DATA, API_ON, API_SYSTEM_ID, API_ZONE_ID - -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, -) -from homeassistant.core import HomeAssistant - -from .util import async_init_integration - - -async def test_airzone_create_switches(hass: HomeAssistant) -> None: - """Test creation of switches.""" - - await async_init_integration(hass) - - state = hass.states.get("switch.despacho") - assert state.state == STATE_OFF - - state = hass.states.get("switch.dorm_1") - assert state.state == STATE_ON - - state = hass.states.get("switch.dorm_2") - assert state.state == STATE_OFF - - state = hass.states.get("switch.dorm_ppal") - assert state.state == STATE_ON - - state = hass.states.get("switch.salon") - assert state.state == STATE_OFF - - -async def test_airzone_switch_off(hass: HomeAssistant) -> None: - """Test switch off.""" - - await async_init_integration(hass) - - put_hvac_off = { - API_DATA: [ - { - API_SYSTEM_ID: 1, - API_ZONE_ID: 3, - API_ON: False, - } - ] - } - - with patch( - "homeassistant.components.airzone.AirzoneLocalApi.put_hvac", - return_value=put_hvac_off, - ): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: "switch.dorm_1", - }, - blocking=True, - ) - - state = hass.states.get("switch.dorm_1") - assert state.state == STATE_OFF - - -async def test_airzone_switch_on(hass: HomeAssistant) -> None: - """Test switch on.""" - - await async_init_integration(hass) - - put_hvac_on = { - API_DATA: [ - { - API_SYSTEM_ID: 1, - API_ZONE_ID: 5, - API_ON: True, - } - ] - } - - with patch( - "homeassistant.components.airzone.AirzoneLocalApi.put_hvac", - return_value=put_hvac_on, - ): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: "switch.dorm_2", - }, - blocking=True, - ) - - state = hass.states.get("switch.dorm_2") - assert state.state == STATE_ON diff --git a/tests/components/airzone/util.py b/tests/components/airzone/util.py index 278663b7a97..6e3e0eccc8f 100644 --- a/tests/components/airzone/util.py +++ b/tests/components/airzone/util.py @@ -1,6 +1,5 @@ """Tests for the Airzone integration.""" -from copy import deepcopy from unittest.mock import patch from aioairzone.const import ( @@ -272,47 +271,6 @@ HVAC_MOCK = { }, ] }, - { - API_DATA: [ - { - API_SYSTEM_ID: 4, - API_ZONE_ID: 1, - API_NAME: "Aux Heat", - API_ON: 1, - API_COOL_SET_POINT: 20, - API_COOL_MAX_TEMP: 30, - API_COOL_MIN_TEMP: 15, - API_HEAT_SET_POINT: 20, - API_HEAT_MAX_TEMP: 30, - API_HEAT_MIN_TEMP: 15, - API_MAX_TEMP: 30, - API_MIN_TEMP: 15, - API_SET_POINT: 20, - API_ROOM_TEMP: 22, - API_MODES: [1, 2, 3, 4, 5, 6], - API_MODE: 6, - API_COLD_STAGES: 0, - API_COLD_STAGE: 0, - API_HEAT_STAGES: 0, - API_HEAT_STAGE: 0, - API_HUMIDITY: 0, - API_UNITS: 0, - API_ERRORS: [], - API_AIR_DEMAND: 0, - API_FLOOR_DEMAND: 0, - }, - ] - }, - ] -} - -HVAC_MOCK_NEW_ZONES = { - API_SYSTEMS: [ - { - API_DATA: [ - deepcopy(HVAC_MOCK[API_SYSTEMS][0][API_DATA][0]), - ] - } ] } diff --git a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr index c6ad36916bf..31065d68a47 100644 --- a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr @@ -91,8 +91,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'airzone_cloud', 'entry_id': 'd186e31edb46d64d14b9b2f11f1ebd9f', 'minor_version': 1, @@ -116,7 +114,6 @@ 'installation': 'installation1', 'is-connected': True, 'mode': 3, - 'model': 'Aidoo', 'modes': list([ 1, 2, @@ -136,7 +133,6 @@ }), 'temperature': 21.0, 'temperature-setpoint': 22.0, - 'temperature-setpoint-auto-air': 22.0, 'temperature-setpoint-cool-air': 22.0, 'temperature-setpoint-hot-air': 22.0, 'temperature-setpoint-max': 30.0, @@ -157,13 +153,9 @@ 'available': True, 'double-set-point': True, 'id': 'aidoo_pro', - 'indoor-exchanger-temperature': 26.0, - 'indoor-return-temperature': 26.0, - 'indoor-work-temperature': 25.0, 'installation': 'installation1', 'is-connected': True, 'mode': 2, - 'model': 'Aidoo Pro', 'modes': list([ 1, 2, @@ -172,12 +164,6 @@ 5, ]), 'name': 'Bron Pro', - 'outdoor-condenser-pressure': 150.0, - 'outdoor-discharge-temperature': 121.0, - 'outdoor-electric-current': 3.0, - 'outdoor-evaporator-pressure': 20.0, - 'outdoor-exchanger-temperature': -25.0, - 'outdoor-temperature': 29.0, 'power': True, 'problems': False, 'speed': 3, @@ -192,7 +178,6 @@ }), 'temperature': 20.0, 'temperature-setpoint': 22.0, - 'temperature-setpoint-auto-air': 22.0, 'temperature-setpoint-cool-air': 22.0, 'temperature-setpoint-hot-air': 18.0, 'temperature-setpoint-max': 30.0, @@ -299,7 +284,6 @@ 'dhw1': dict({ 'active': False, 'available': True, - 'double-set-point': False, 'id': 'dhw1', 'installation': 'installation1', 'is-connected': True, @@ -361,7 +345,6 @@ 'temperature-setpoint-max': 30.0, 'temperature-setpoint-min': 15.0, 'temperature-step': 0.5, - 'user-access': 'admin', 'web-servers': list([ 'webserver1', 'webserver2', @@ -382,18 +365,15 @@ 'aq-present': True, 'aq-status': 'good', 'available': True, - 'double-set-point': False, 'errors': list([ dict({ '_id': 'error-id', }), ]), - 'firmware': '3.35', 'id': 'system1', 'installation': 'installation1', 'is-connected': True, 'mode': 2, - 'model': 'c6', 'modes': list([ 2, 3, @@ -411,12 +391,10 @@ 'webserver1': dict({ 'available': True, 'connection-date': '2023-05-07T12:55:51.000Z', - 'cpu-usage': 32, 'disconnection-date': '2023-01-01T22:26:55.376Z', 'firmware': '3.44', 'id': 'webserver1', 'installation': 'installation1', - 'memory-free': 42616, 'name': 'WebServer 11:22:33:44:55:66', 'type': 'ws_az', 'wifi-channel': 36, @@ -516,8 +494,6 @@ 'temperature-setpoint-stop-air': 24.0, 'temperature-setpoint-vent-air': 24.0, 'temperature-step': 0.5, - 'thermostat-fw': '3.52', - 'thermostat-model': 'blueface', 'web-server': 'webserver1', 'ws-connected': True, 'zone': 1, @@ -581,11 +557,6 @@ 'temperature-setpoint-stop-air': 24.0, 'temperature-setpoint-vent-air': 24.0, 'temperature-step': 0.5, - 'thermostat-battery': 54, - 'thermostat-battery-low': False, - 'thermostat-coverage': 76, - 'thermostat-fw': '3.33', - 'thermostat-model': 'thinkradio', 'web-server': 'webserver1', 'ws-connected': True, 'zone': 2, diff --git a/tests/components/airzone_cloud/test_binary_sensor.py b/tests/components/airzone_cloud/test_binary_sensor.py index bb2d0f78060..8e065821057 100644 --- a/tests/components/airzone_cloud/test_binary_sensor.py +++ b/tests/components/airzone_cloud/test_binary_sensor.py @@ -47,9 +47,6 @@ async def test_airzone_create_binary_sensors(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.dormitorio_air_quality_active") assert state.state == STATE_OFF - state = hass.states.get("binary_sensor.dormitorio_battery") - assert state.state == STATE_OFF - state = hass.states.get("binary_sensor.dormitorio_floor_demand") assert state.state == STATE_OFF diff --git a/tests/components/airzone_cloud/test_climate.py b/tests/components/airzone_cloud/test_climate.py index 2b587680a57..37c5ff8e1af 100644 --- a/tests/components/airzone_cloud/test_climate.py +++ b/tests/components/airzone_cloud/test_climate.py @@ -97,7 +97,8 @@ async def test_airzone_create_climates(hass: HomeAssistant) -> None: assert state.attributes[ATTR_MAX_TEMP] == 30 assert state.attributes[ATTR_MIN_TEMP] == 15 assert state.attributes[ATTR_TARGET_TEMP_STEP] == API_DEFAULT_TEMP_STEP - assert state.attributes.get(ATTR_TEMPERATURE) == 22.0 + assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 22.0 + assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 18.0 # Groups state = hass.states.get("climate.group") @@ -588,7 +589,6 @@ async def test_airzone_climate_set_temp(hass: HomeAssistant) -> None: SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: "climate.bron_pro", - ATTR_HVAC_MODE: HVACMode.HEAT_COOL, ATTR_TARGET_TEMP_HIGH: 25.0, ATTR_TARGET_TEMP_LOW: 20.0, }, @@ -596,7 +596,7 @@ async def test_airzone_climate_set_temp(hass: HomeAssistant) -> None: ) state = hass.states.get("climate.bron_pro") - assert state.state == HVACMode.HEAT_COOL + assert state.state == HVACMode.HEAT assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25.0 assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 20.0 diff --git a/tests/components/airzone_cloud/test_config_flow.py b/tests/components/airzone_cloud/test_config_flow.py index 04e253eb494..86a70ced51a 100644 --- a/tests/components/airzone_cloud/test_config_flow.py +++ b/tests/components/airzone_cloud/test_config_flow.py @@ -15,7 +15,6 @@ from .util import ( GET_INSTALLATION_MOCK, GET_INSTALLATIONS_MOCK, WS_ID, - mock_get_device_config, mock_get_device_status, mock_get_webserver, ) @@ -29,10 +28,6 @@ async def test_form(hass: HomeAssistant) -> None: "homeassistant.components.airzone_cloud.async_setup_entry", return_value=True, ) as mock_setup_entry, - patch( - "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", - side_effect=mock_get_device_config, - ), patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, @@ -104,10 +99,6 @@ async def test_installations_list_error(hass: HomeAssistant) -> None: "homeassistant.components.airzone_cloud.async_setup_entry", return_value=True, ), - patch( - "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", - side_effect=mock_get_device_config, - ), patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, diff --git a/tests/components/airzone_cloud/test_coordinator.py b/tests/components/airzone_cloud/test_coordinator.py index e2b80e66672..b4b7afd6086 100644 --- a/tests/components/airzone_cloud/test_coordinator.py +++ b/tests/components/airzone_cloud/test_coordinator.py @@ -14,7 +14,6 @@ from .util import ( CONFIG, GET_INSTALLATION_MOCK, GET_INSTALLATIONS_MOCK, - mock_get_device_config, mock_get_device_status, mock_get_webserver, ) @@ -33,10 +32,6 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: config_entry.add_to_hass(hass) with ( - patch( - "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", - side_effect=mock_get_device_config, - ) as mock_device_config, patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, @@ -61,13 +56,11 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - mock_device_config.assert_called() mock_device_status.assert_called() mock_installation.assert_awaited_once() mock_installations.assert_called_once() mock_webserver.assert_called() - mock_device_config.reset_mock() mock_device_status.reset_mock() mock_installation.reset_mock() mock_installations.reset_mock() diff --git a/tests/components/airzone_cloud/test_diagnostics.py b/tests/components/airzone_cloud/test_diagnostics.py index d3e23fc7f4b..254dba16b09 100644 --- a/tests/components/airzone_cloud/test_diagnostics.py +++ b/tests/components/airzone_cloud/test_diagnostics.py @@ -15,7 +15,6 @@ from aioairzone_cloud.const import ( RAW_WEBSERVERS, ) from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.airzone_cloud.const import DOMAIN from homeassistant.const import CONF_ID @@ -112,4 +111,4 @@ async def test_config_entry_diagnostics( return_value=RAW_DATA_MOCK, ): result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/airzone_cloud/test_init.py b/tests/components/airzone_cloud/test_init.py index 6cab0be6e7c..b5b4bcebaa8 100644 --- a/tests/components/airzone_cloud/test_init.py +++ b/tests/components/airzone_cloud/test_init.py @@ -2,8 +2,6 @@ from unittest.mock import patch -from aioairzone_cloud.exceptions import AirzoneTimeout - from homeassistant.components.airzone_cloud.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -52,20 +50,3 @@ async def test_unload_entry(hass: HomeAssistant) -> None: await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_init_api_timeout(hass: HomeAssistant) -> None: - """Test API timeouts when loading the Airzone Cloud integration.""" - - with patch( - "homeassistant.components.airzone_cloud.AirzoneCloudApi.login", - side_effect=AirzoneTimeout, - ): - config_entry = MockConfigEntry( - data=CONFIG, - domain=DOMAIN, - unique_id="airzone_cloud_unique_id", - ) - config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(config_entry.entry_id) is False diff --git a/tests/components/airzone_cloud/test_select.py b/tests/components/airzone_cloud/test_select.py index d0993365083..5a6b6104468 100644 --- a/tests/components/airzone_cloud/test_select.py +++ b/tests/components/airzone_cloud/test_select.py @@ -4,7 +4,7 @@ from unittest.mock import patch import pytest -from homeassistant.components.select import ATTR_OPTIONS, DOMAIN as SELECT_DOMAIN +from homeassistant.components.select import DOMAIN as SELECT_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, SERVICE_SELECT_OPTION from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -22,21 +22,9 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dormitorio_air_quality_mode") assert state.state == "auto" - state = hass.states.get("select.dormitorio_mode") - assert state is None - state = hass.states.get("select.salon_air_quality_mode") assert state.state == "auto" - state = hass.states.get("select.salon_mode") - assert state.state == "cool" - assert state.attributes.get(ATTR_OPTIONS) == [ - "cool", - "dry", - "fan", - "heat", - ] - async def test_airzone_select_air_quality_mode(hass: HomeAssistant) -> None: """Test select Air Quality mode.""" @@ -70,37 +58,3 @@ async def test_airzone_select_air_quality_mode(hass: HomeAssistant) -> None: state = hass.states.get("select.dormitorio_air_quality_mode") assert state.state == "off" - - -async def test_airzone_select_mode(hass: HomeAssistant) -> None: - """Test select HVAC mode.""" - - await async_init_integration(hass) - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: "select.salon_mode", - ATTR_OPTION: "Invalid", - }, - blocking=True, - ) - - with patch( - "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_patch_device", - return_value=None, - ): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: "select.salon_mode", - ATTR_OPTION: "heat", - }, - blocking=True, - ) - - state = hass.states.get("select.salon_mode") - assert state.state == "heat" diff --git a/tests/components/airzone_cloud/test_sensor.py b/tests/components/airzone_cloud/test_sensor.py index 672e10adedb..31fe52f3302 100644 --- a/tests/components/airzone_cloud/test_sensor.py +++ b/tests/components/airzone_cloud/test_sensor.py @@ -20,39 +20,9 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: state = hass.states.get("sensor.bron_pro_temperature") assert state.state == "20.0" - state = hass.states.get("sensor.bron_pro_indoor_exchanger_temperature") - assert state.state == "26.0" - - state = hass.states.get("sensor.bron_pro_indoor_return_temperature") - assert state.state == "26.0" - - state = hass.states.get("sensor.bron_pro_indoor_working_temperature") - assert state.state == "25.0" - - state = hass.states.get("sensor.bron_pro_outdoor_condenser_pressure") - assert state.state == "150.0" - - state = hass.states.get("sensor.bron_pro_outdoor_discharge_temperature") - assert state.state == "121.0" - - state = hass.states.get("sensor.bron_pro_outdoor_electric_current") - assert state.state == "3.0" - - state = hass.states.get("sensor.bron_pro_outdoor_evaporator_pressure") - assert state.state == "20.0" - - state = hass.states.get("sensor.bron_pro_outdoor_exchanger_temperature") - assert state.state == "-25.0" - - state = hass.states.get("sensor.bron_pro_outdoor_temperature") - assert state.state == "29.0" - # WebServers - state = hass.states.get("sensor.webserver_11_22_33_44_55_66_cpu_usage") - assert state.state == "32" - - state = hass.states.get("sensor.webserver_11_22_33_44_55_66_free_memory") - assert state.state == "42616" + state = hass.states.get("sensor.webserver_11_22_33_44_55_66_signal_strength") + assert state.state == "-56" state = hass.states.get("sensor.webserver_11_22_33_44_55_67_signal_strength") assert state.state == "-77" @@ -61,9 +31,6 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: state = hass.states.get("sensor.dormitorio_air_quality_index") assert state.state == "1" - state = hass.states.get("sensor.dormitorio_battery") - assert state.state == "54" - state = hass.states.get("sensor.dormitorio_pm1") assert state.state == "3" @@ -73,9 +40,6 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: state = hass.states.get("sensor.dormitorio_pm10") assert state.state == "3" - state = hass.states.get("sensor.dormitorio_signal_percentage") - assert state.state == "76" - state = hass.states.get("sensor.dormitorio_temperature") assert state.state == "25.0" diff --git a/tests/components/airzone_cloud/test_switch.py b/tests/components/airzone_cloud/test_switch.py deleted file mode 100644 index 5ee65f11fa8..00000000000 --- a/tests/components/airzone_cloud/test_switch.py +++ /dev/null @@ -1,71 +0,0 @@ -"""The switch tests for the Airzone Cloud platform.""" - -from unittest.mock import patch - -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, -) -from homeassistant.core import HomeAssistant - -from .util import async_init_integration - - -async def test_airzone_create_switches(hass: HomeAssistant) -> None: - """Test creation of switches.""" - - await async_init_integration(hass) - - state = hass.states.get("switch.dormitorio") - assert state.state == STATE_OFF - - state = hass.states.get("switch.salon") - assert state.state == STATE_ON - - -async def test_airzone_switch_off(hass: HomeAssistant) -> None: - """Test switch off.""" - - await async_init_integration(hass) - - with patch( - "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_patch_device", - return_value=None, - ): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: "switch.salon", - }, - blocking=True, - ) - - state = hass.states.get("switch.salon") - assert state.state == STATE_OFF - - -async def test_airzone_switch_on(hass: HomeAssistant) -> None: - """Test switch on.""" - - await async_init_integration(hass) - - with patch( - "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_patch_device", - return_value=None, - ): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: "switch.dormitorio", - }, - blocking=True, - ) - - state = hass.states.get("switch.dormitorio") - assert state.state == STATE_ON diff --git a/tests/components/airzone_cloud/util.py b/tests/components/airzone_cloud/util.py index 52b0ae0bec3..6e7dad707f1 100644 --- a/tests/components/airzone_cloud/util.py +++ b/tests/components/airzone_cloud/util.py @@ -3,9 +3,8 @@ from typing import Any from unittest.mock import patch -from aioairzone_cloud.common import OperationMode, UserAccessType +from aioairzone_cloud.common import OperationMode from aioairzone_cloud.const import ( - API_ACCESS_TYPE, API_ACTIVE, API_AIR_ACTIVE, API_AQ_ACTIVE, @@ -24,21 +23,12 @@ from aioairzone_cloud.const import ( API_CELSIUS, API_CONFIG, API_CONNECTION_DATE, - API_CONSUMPTION_UE, - API_CPU_WS, API_DEVICE_ID, API_DEVICES, - API_DISCH_COMP_TEMP_UE, API_DISCONNECTION_DATE, API_DOUBLE_SET_POINT, API_ERRORS, - API_EXCH_HEAT_TEMP_IU, - API_EXCH_HEAT_TEMP_UE, - API_EXT_TEMP, API_FAH, - API_FREE, - API_FREE_MEM, - API_GENERAL, API_GROUP_ID, API_GROUPS, API_HUMIDITY, @@ -51,13 +41,9 @@ from aioairzone_cloud.const import ( API_MODE_AVAIL, API_NAME, API_OLD_ID, - API_PC_UE, - API_PE_UE, API_POWER, API_POWERFUL_MODE, API_RAD_ACTIVE, - API_RADIO_BATTERY_PERCENT, - API_RADIO_COVERAGE_PERCENT, API_RANGE_MAX_AIR, API_RANGE_MIN_AIR, API_RANGE_SP_MAX_ACS, @@ -76,7 +62,6 @@ from aioairzone_cloud.const import ( API_RANGE_SP_MIN_HOT_AIR, API_RANGE_SP_MIN_STOP_AIR, API_RANGE_SP_MIN_VENT_AIR, - API_RETURN_TEMP, API_SETPOINT, API_SP_AIR_AUTO, API_SP_AIR_COOL, @@ -94,15 +79,10 @@ from aioairzone_cloud.const import ( API_STAT_SSID, API_STATUS, API_STEP, - API_SYSTEM_FW, API_SYSTEM_NUMBER, - API_SYSTEM_TYPE, API_TANK_TEMP, - API_THERMOSTAT_FW, - API_THERMOSTAT_TYPE, API_TYPE, API_WARNINGS, - API_WORK_TEMP, API_WS_CONNECTED, API_WS_FW, API_WS_ID, @@ -204,7 +184,6 @@ GET_INSTALLATIONS_MOCK = { { API_INSTALLATION_ID: CONFIG[CONF_ID], API_NAME: "House", - API_ACCESS_TYPE: UserAccessType.ADMIN, API_WS_IDS: [ WS_ID, WS_ID_AIDOO, @@ -223,12 +202,6 @@ GET_WEBSERVER_MOCK = { API_STAT_AP_MAC: "00:00:00:00:00:00", }, API_STATUS: { - API_CPU_WS: { - API_GENERAL: 32, - }, - API_FREE_MEM: { - API_FREE: 42616, - }, API_IS_CONNECTED: True, API_STAT_QUALITY: 4, API_STAT_RSSI: -56, @@ -272,42 +245,6 @@ GET_WEBSERVER_MOCK_AIDOO_PRO = { } -def mock_get_device_config(device: Device) -> dict[str, Any]: - """Mock API device config.""" - - if device.get_id() == "aidoo_pro": - return { - API_CONSUMPTION_UE: 3, - API_DISCH_COMP_TEMP_UE: {API_CELSIUS: 121, API_FAH: -250}, - API_EXCH_HEAT_TEMP_IU: {API_CELSIUS: 26, API_FAH: 79}, - API_EXCH_HEAT_TEMP_UE: {API_CELSIUS: -25, API_FAH: -13}, - API_EXT_TEMP: {API_CELSIUS: 29, API_FAH: 84}, - API_PC_UE: 0.15, - API_PE_UE: 0.02, - API_RETURN_TEMP: {API_CELSIUS: 26, API_FAH: 79}, - API_WORK_TEMP: {API_CELSIUS: 25, API_FAH: 77}, - } - if device.get_id() == "system1": - return { - API_SYSTEM_FW: "3.35", - API_SYSTEM_TYPE: "c6", - } - if device.get_id() == "zone1": - return { - API_THERMOSTAT_FW: "3.52", - API_THERMOSTAT_TYPE: "blueface", - } - if device.get_id() == "zone2": - return { - API_THERMOSTAT_FW: "3.33", - API_THERMOSTAT_TYPE: "thinkradio", - API_RADIO_BATTERY_PERCENT: 54, - API_RADIO_COVERAGE_PERCENT: 76, - } - - return {} - - def mock_get_device_status(device: Device) -> dict[str, Any]: """Mock API device status.""" @@ -533,10 +470,6 @@ async def async_init_integration( config_entry.add_to_hass(hass) with ( - patch( - "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", - side_effect=mock_get_device_config, - ), patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, diff --git a/tests/components/aladdin_connect/conftest.py b/tests/components/aladdin_connect/conftest.py new file mode 100644 index 00000000000..2c158998f49 --- /dev/null +++ b/tests/components/aladdin_connect/conftest.py @@ -0,0 +1,29 @@ +"""Test fixtures for the Aladdin Connect Garage Door integration.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from typing_extensions import Generator + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.aladdin_connect.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return an Aladdin Connect config entry.""" + return MockConfigEntry( + domain="aladdin_connect", + data={}, + title="test@test.com", + unique_id="aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee", + version=2, + ) diff --git a/tests/components/aladdin_connect/test_config_flow.py b/tests/components/aladdin_connect/test_config_flow.py new file mode 100644 index 00000000000..7154c53b9f6 --- /dev/null +++ b/tests/components/aladdin_connect/test_config_flow.py @@ -0,0 +1,230 @@ +"""Test the Aladdin Connect Garage Door config flow.""" + +# from unittest.mock import AsyncMock +# +# import pytest +# +# from homeassistant.components.aladdin_connect.const import ( +# DOMAIN, +# OAUTH2_AUTHORIZE, +# OAUTH2_TOKEN, +# ) +# from homeassistant.components.application_credentials import ( +# ClientCredential, +# async_import_client_credential, +# ) +# from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigFlowResult +# from homeassistant.core import HomeAssistant +# from homeassistant.data_entry_flow import FlowResultType +# from homeassistant.helpers import config_entry_oauth2_flow +# from homeassistant.setup import async_setup_component +# +# from tests.common import MockConfigEntry +# from tests.test_util.aiohttp import AiohttpClientMocker +# from tests.typing import ClientSessionGenerator +# +# CLIENT_ID = "1234" +# CLIENT_SECRET = "5678" +# +# EXAMPLE_TOKEN = ( +# "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhYWFhYWFhYS1iYmJiLWNjY2MtZGRk" +# "ZC1lZWVlZWVlZWVlZWUiLCJuYW1lIjoiSm9obiBEb2UiLCJpYXQiOjE1MTYyMzkwMjIsInVzZXJuYW" +# "1lIjoidGVzdEB0ZXN0LmNvbSJ9.CTU1YItIrUl8nSM3koJxlFJr5CjLghgc9gS6h45D8dE" +# ) +# +# +# @pytest.fixture +# async def setup_credentials(hass: HomeAssistant) -> None: +# """Fixture to setup credentials.""" +# assert await async_setup_component(hass, "application_credentials", {}) +# await async_import_client_credential( +# hass, +# DOMAIN, +# ClientCredential(CLIENT_ID, CLIENT_SECRET), +# ) +# +# +# async def _oauth_actions( +# hass: HomeAssistant, +# result: ConfigFlowResult, +# hass_client_no_auth: ClientSessionGenerator, +# aioclient_mock: AiohttpClientMocker, +# ) -> None: +# state = config_entry_oauth2_flow._encode_jwt( +# hass, +# { +# "flow_id": result["flow_id"], +# "redirect_uri": "https://example.com/auth/external/callback", +# }, +# ) +# +# assert result["url"] == ( +# f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" +# "&redirect_uri=https://example.com/auth/external/callback" +# f"&state={state}" +# ) +# +# client = await hass_client_no_auth() +# resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") +# assert resp.status == 200 +# assert resp.headers["content-type"] == "text/html; charset=utf-8" +# +# aioclient_mock.post( +# OAUTH2_TOKEN, +# json={ +# "refresh_token": "mock-refresh-token", +# "access_token": EXAMPLE_TOKEN, +# "type": "Bearer", +# "expires_in": 60, +# }, +# ) +# +# +# @pytest.mark.skip(reason="Integration disabled") +# @pytest.mark.usefixtures("current_request_with_host") +# async def test_full_flow( +# hass: HomeAssistant, +# hass_client_no_auth: ClientSessionGenerator, +# aioclient_mock: AiohttpClientMocker, +# setup_credentials: None, +# mock_setup_entry: AsyncMock, +# ) -> None: +# """Check full flow.""" +# result = await hass.config_entries.flow.async_init( +# DOMAIN, context={"source": SOURCE_USER} +# ) +# await _oauth_actions(hass, result, hass_client_no_auth, aioclient_mock) +# +# result = await hass.config_entries.flow.async_configure(result["flow_id"]) +# assert result["type"] is FlowResultType.CREATE_ENTRY +# assert result["title"] == "test@test.com" +# assert result["data"]["token"]["access_token"] == EXAMPLE_TOKEN +# assert result["data"]["token"]["refresh_token"] == "mock-refresh-token" +# assert result["result"].unique_id == "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee" +# +# assert len(hass.config_entries.async_entries(DOMAIN)) == 1 +# assert len(mock_setup_entry.mock_calls) == 1 +# +# +# @pytest.mark.skip(reason="Integration disabled") +# @pytest.mark.usefixtures("current_request_with_host") +# async def test_duplicate_entry( +# hass: HomeAssistant, +# hass_client_no_auth: ClientSessionGenerator, +# aioclient_mock: AiohttpClientMocker, +# setup_credentials: None, +# mock_config_entry: MockConfigEntry, +# ) -> None: +# """Test we abort with duplicate entry.""" +# mock_config_entry.add_to_hass(hass) +# result = await hass.config_entries.flow.async_init( +# DOMAIN, context={"source": SOURCE_USER} +# ) +# await _oauth_actions(hass, result, hass_client_no_auth, aioclient_mock) +# +# result = await hass.config_entries.flow.async_configure(result["flow_id"]) +# assert result["type"] is FlowResultType.ABORT +# assert result["reason"] == "already_configured" +# +# +# @pytest.mark.skip(reason="Integration disabled") +# @pytest.mark.usefixtures("current_request_with_host") +# async def test_reauth( +# hass: HomeAssistant, +# hass_client_no_auth: ClientSessionGenerator, +# aioclient_mock: AiohttpClientMocker, +# setup_credentials: None, +# mock_config_entry: MockConfigEntry, +# mock_setup_entry: AsyncMock, +# ) -> None: +# """Test reauthentication.""" +# mock_config_entry.add_to_hass(hass) +# result = await hass.config_entries.flow.async_init( +# DOMAIN, +# context={ +# "source": SOURCE_REAUTH, +# "entry_id": mock_config_entry.entry_id, +# }, +# data=mock_config_entry.data, +# ) +# assert result["type"] is FlowResultType.FORM +# assert result["step_id"] == "reauth_confirm" +# result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) +# await _oauth_actions(hass, result, hass_client_no_auth, aioclient_mock) +# +# result = await hass.config_entries.flow.async_configure(result["flow_id"]) +# assert result["type"] is FlowResultType.ABORT +# assert result["reason"] == "reauth_successful" +# +# +# @pytest.mark.skip(reason="Integration disabled") +# @pytest.mark.usefixtures("current_request_with_host") +# async def test_reauth_wrong_account( +# hass: HomeAssistant, +# hass_client_no_auth: ClientSessionGenerator, +# aioclient_mock: AiohttpClientMocker, +# setup_credentials: None, +# mock_setup_entry: AsyncMock, +# ) -> None: +# """Test reauthentication with wrong account.""" +# config_entry = MockConfigEntry( +# domain=DOMAIN, +# data={}, +# title="test@test.com", +# unique_id="aaaaaaaa-bbbb-ffff-dddd-eeeeeeeeeeee", +# version=2, +# ) +# config_entry.add_to_hass(hass) +# result = await hass.config_entries.flow.async_init( +# DOMAIN, +# context={ +# "source": SOURCE_REAUTH, +# "entry_id": config_entry.entry_id, +# }, +# data=config_entry.data, +# ) +# assert result["type"] is FlowResultType.FORM +# assert result["step_id"] == "reauth_confirm" +# result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) +# await _oauth_actions(hass, result, hass_client_no_auth, aioclient_mock) +# +# result = await hass.config_entries.flow.async_configure(result["flow_id"]) +# assert result["type"] is FlowResultType.ABORT +# assert result["reason"] == "wrong_account" +# +# +# @pytest.mark.skip(reason="Integration disabled") +# @pytest.mark.usefixtures("current_request_with_host") +# async def test_reauth_old_account( +# hass: HomeAssistant, +# hass_client_no_auth: ClientSessionGenerator, +# aioclient_mock: AiohttpClientMocker, +# setup_credentials: None, +# mock_setup_entry: AsyncMock, +# ) -> None: +# """Test reauthentication with old account.""" +# config_entry = MockConfigEntry( +# domain=DOMAIN, +# data={}, +# title="test@test.com", +# unique_id="test@test.com", +# version=2, +# ) +# config_entry.add_to_hass(hass) +# result = await hass.config_entries.flow.async_init( +# DOMAIN, +# context={ +# "source": SOURCE_REAUTH, +# "entry_id": config_entry.entry_id, +# }, +# data=config_entry.data, +# ) +# assert result["type"] is FlowResultType.FORM +# assert result["step_id"] == "reauth_confirm" +# result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) +# await _oauth_actions(hass, result, hass_client_no_auth, aioclient_mock) +# +# result = await hass.config_entries.flow.async_configure(result["flow_id"]) +# assert result["type"] is FlowResultType.ABORT +# assert result["reason"] == "reauth_successful" +# assert config_entry.unique_id == "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee" diff --git a/tests/components/aladdin_connect/test_init.py b/tests/components/aladdin_connect/test_init.py deleted file mode 100644 index b01af287b7b..00000000000 --- a/tests/components/aladdin_connect/test_init.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Tests for the Aladdin Connect integration.""" - -from homeassistant.components.aladdin_connect import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from tests.common import MockConfigEntry - - -async def test_aladdin_connect_repair_issue( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test the Aladdin Connect configuration entry loading/unloading handles the repair.""" - config_entry_1 = MockConfigEntry( - title="Example 1", - domain=DOMAIN, - ) - config_entry_1.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry_1.entry_id) - await hass.async_block_till_done() - assert config_entry_1.state is ConfigEntryState.LOADED - - # Add a second one - config_entry_2 = MockConfigEntry( - title="Example 2", - domain=DOMAIN, - ) - config_entry_2.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry_2.entry_id) - await hass.async_block_till_done() - - assert config_entry_2.state is ConfigEntryState.LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) - - # Remove the first one - await hass.config_entries.async_remove(config_entry_1.entry_id) - await hass.async_block_till_done() - - assert config_entry_1.state is ConfigEntryState.NOT_LOADED - assert config_entry_2.state is ConfigEntryState.LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) - - # Remove the second one - await hass.config_entries.async_remove(config_entry_2.entry_id) - await hass.async_block_till_done() - - assert config_entry_1.state is ConfigEntryState.NOT_LOADED - assert config_entry_2.state is ConfigEntryState.NOT_LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) is None diff --git a/tests/components/alarm_control_panel/common.py b/tests/components/alarm_control_panel/common.py index 8a631eeff36..9ec419d8cf0 100644 --- a/tests/components/alarm_control_panel/common.py +++ b/tests/components/alarm_control_panel/common.py @@ -8,7 +8,6 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, - AlarmControlPanelState, ) from homeassistant.const import ( ATTR_CODE, @@ -21,15 +20,18 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, ) -from homeassistant.core import HomeAssistant from tests.common import MockEntity -async def async_alarm_disarm( - hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_alarm_disarm(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} if code: @@ -40,9 +42,7 @@ async def async_alarm_disarm( await hass.services.async_call(DOMAIN, SERVICE_ALARM_DISARM, data, blocking=True) -async def async_alarm_arm_home( - hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_alarm_arm_home(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} if code: @@ -53,9 +53,7 @@ async def async_alarm_arm_home( await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_HOME, data, blocking=True) -async def async_alarm_arm_away( - hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_alarm_arm_away(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} if code: @@ -66,9 +64,7 @@ async def async_alarm_arm_away( await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_AWAY, data, blocking=True) -async def async_alarm_arm_night( - hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_alarm_arm_night(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} if code: @@ -79,9 +75,7 @@ async def async_alarm_arm_night( await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_NIGHT, data, blocking=True) -async def async_alarm_arm_vacation( - hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_alarm_arm_vacation(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for vacation mode.""" data = {} if code: @@ -94,9 +88,7 @@ async def async_alarm_arm_vacation( ) -async def async_alarm_trigger( - hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_alarm_trigger(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} if code: @@ -107,9 +99,7 @@ async def async_alarm_trigger( await hass.services.async_call(DOMAIN, SERVICE_ALARM_TRIGGER, data, blocking=True) -async def async_alarm_arm_custom_bypass( - hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_alarm_arm_custom_bypass(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} if code: @@ -140,31 +130,31 @@ class MockAlarm(MockEntity, AlarmControlPanelEntity): def alarm_arm_away(self, code=None): """Send arm away command.""" - self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY + self._attr_state = STATE_ALARM_ARMED_AWAY self.schedule_update_ha_state() def alarm_arm_home(self, code=None): """Send arm home command.""" - self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME + self._attr_state = STATE_ALARM_ARMED_HOME self.schedule_update_ha_state() def alarm_arm_night(self, code=None): """Send arm night command.""" - self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT + self._attr_state = STATE_ALARM_ARMED_NIGHT self.schedule_update_ha_state() def alarm_arm_vacation(self, code=None): """Send arm night command.""" - self._attr_alarm_state = AlarmControlPanelState.ARMED_VACATION + self._attr_state = STATE_ALARM_ARMED_VACATION self.schedule_update_ha_state() def alarm_disarm(self, code=None): """Send disarm command.""" if code == "1234": - self._attr_alarm_state = AlarmControlPanelState.DISARMED + self._attr_state = STATE_ALARM_DISARMED self.schedule_update_ha_state() def alarm_trigger(self, code=None): """Send alarm trigger command.""" - self._attr_alarm_state = AlarmControlPanelState.TRIGGERED + self._attr_state = STATE_ALARM_TRIGGERED self.schedule_update_ha_state() diff --git a/tests/components/alarm_control_panel/conftest.py b/tests/components/alarm_control_panel/conftest.py index 3e82b935493..620b74dd80e 100644 --- a/tests/components/alarm_control_panel/conftest.py +++ b/tests/components/alarm_control_panel/conftest.py @@ -1,9 +1,9 @@ """Fixturs for Alarm Control Panel tests.""" -from collections.abc import Generator from unittest.mock import MagicMock import pytest +from typing_extensions import Generator from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, @@ -129,7 +129,7 @@ async def code_arm_required() -> bool: @pytest.fixture(name="supported_features") -async def alarm_control_panel_supported_features() -> AlarmControlPanelEntityFeature: +async def lock_supported_features() -> AlarmControlPanelEntityFeature: """Return the supported features for the test alarm control panel entity.""" return ( AlarmControlPanelEntityFeature.ARM_AWAY @@ -142,7 +142,7 @@ async def alarm_control_panel_supported_features() -> AlarmControlPanelEntityFea @pytest.fixture(name="mock_alarm_control_panel_entity") -async def setup_alarm_control_panel_platform_test_entity( +async def setup_lock_platform_test_entity( hass: HomeAssistant, entity_registry: er.EntityRegistry, code_format: CodeFormat | None, diff --git a/tests/components/alarm_control_panel/test_device_action.py b/tests/components/alarm_control_panel/test_device_action.py index a7335017691..9c5aaffd733 100644 --- a/tests/components/alarm_control_panel/test_device_action.py +++ b/tests/components/alarm_control_panel/test_device_action.py @@ -7,10 +7,19 @@ from homeassistant.components import automation from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntityFeature, - AlarmControlPanelState, ) from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import CONF_PLATFORM, STATE_UNKNOWN, EntityCategory +from homeassistant.const import ( + CONF_PLATFORM, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, + STATE_UNKNOWN, + EntityCategory, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -532,44 +541,27 @@ async def test_action( hass.bus.async_fire("test_event_arm_away") await hass.async_block_till_done() - assert ( - hass.states.get(entity_entry.entity_id).state - == AlarmControlPanelState.ARMED_AWAY - ) + assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_AWAY hass.bus.async_fire("test_event_arm_home") await hass.async_block_till_done() - assert ( - hass.states.get(entity_entry.entity_id).state - == AlarmControlPanelState.ARMED_HOME - ) + assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_HOME hass.bus.async_fire("test_event_arm_vacation") await hass.async_block_till_done() - assert ( - hass.states.get(entity_entry.entity_id).state - == AlarmControlPanelState.ARMED_VACATION - ) + assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_VACATION hass.bus.async_fire("test_event_arm_night") await hass.async_block_till_done() - assert ( - hass.states.get(entity_entry.entity_id).state - == AlarmControlPanelState.ARMED_NIGHT - ) + assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_NIGHT hass.bus.async_fire("test_event_disarm") await hass.async_block_till_done() - assert ( - hass.states.get(entity_entry.entity_id).state == AlarmControlPanelState.DISARMED - ) + assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_DISARMED hass.bus.async_fire("test_event_trigger") await hass.async_block_till_done() - assert ( - hass.states.get(entity_entry.entity_id).state - == AlarmControlPanelState.TRIGGERED - ) + assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_TRIGGERED async def test_action_legacy( @@ -623,7 +615,4 @@ async def test_action_legacy( hass.bus.async_fire("test_event_arm_away") await hass.async_block_till_done() - assert ( - hass.states.get(entity_entry.entity_id).state - == AlarmControlPanelState.ARMED_AWAY - ) + assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_AWAY diff --git a/tests/components/alarm_control_panel/test_device_condition.py b/tests/components/alarm_control_panel/test_device_condition.py index 37cbc466e6d..da1d77f50a3 100644 --- a/tests/components/alarm_control_panel/test_device_condition.py +++ b/tests/components/alarm_control_panel/test_device_condition.py @@ -7,10 +7,18 @@ from homeassistant.components import automation from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntityFeature, - AlarmControlPanelState, ) from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import EntityCategory +from homeassistant.const import ( + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, + EntityCategory, +) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -346,7 +354,7 @@ async def test_if_state( ] }, ) - hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) + hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -358,7 +366,7 @@ async def test_if_state( assert len(service_calls) == 1 assert service_calls[0].data["some"] == "is_triggered - event - test_event1" - hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) + hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -370,7 +378,7 @@ async def test_if_state( assert len(service_calls) == 2 assert service_calls[1].data["some"] == "is_disarmed - event - test_event2" - hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_HOME) + hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_HOME) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -382,7 +390,7 @@ async def test_if_state( assert len(service_calls) == 3 assert service_calls[2].data["some"] == "is_armed_home - event - test_event3" - hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_AWAY) + hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_AWAY) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -394,7 +402,7 @@ async def test_if_state( assert len(service_calls) == 4 assert service_calls[3].data["some"] == "is_armed_away - event - test_event4" - hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_NIGHT) + hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_NIGHT) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -406,7 +414,7 @@ async def test_if_state( assert len(service_calls) == 5 assert service_calls[4].data["some"] == "is_armed_night - event - test_event5" - hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_VACATION) + hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_VACATION) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -418,7 +426,7 @@ async def test_if_state( assert len(service_calls) == 6 assert service_calls[5].data["some"] == "is_armed_vacation - event - test_event6" - hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_CUSTOM_BYPASS) + hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_CUSTOM_BYPASS) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -480,7 +488,7 @@ async def test_if_state_legacy( ] }, ) - hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) + hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) hass.bus.async_fire("test_event1") await hass.async_block_till_done() assert len(service_calls) == 1 diff --git a/tests/components/alarm_control_panel/test_device_trigger.py b/tests/components/alarm_control_panel/test_device_trigger.py index 17a301ccdf1..46eba314dc1 100644 --- a/tests/components/alarm_control_panel/test_device_trigger.py +++ b/tests/components/alarm_control_panel/test_device_trigger.py @@ -9,10 +9,18 @@ from homeassistant.components import automation from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntityFeature, - AlarmControlPanelState, ) from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import EntityCategory +from homeassistant.const import ( + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_DISARMED, + STATE_ALARM_PENDING, + STATE_ALARM_TRIGGERED, + EntityCategory, +) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -248,7 +256,7 @@ async def test_if_fires_on_state_change( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, AlarmControlPanelState.PENDING) + hass.states.async_set(entry.entity_id, STATE_ALARM_PENDING) assert await async_setup_component( hass, @@ -392,7 +400,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is triggered. - hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) + hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -401,7 +409,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is disarmed. - hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) + hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) await hass.async_block_till_done() assert len(service_calls) == 2 assert ( @@ -410,7 +418,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed home. - hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_HOME) + hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_HOME) await hass.async_block_till_done() assert len(service_calls) == 3 assert ( @@ -419,7 +427,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed away. - hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_AWAY) + hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_AWAY) await hass.async_block_till_done() assert len(service_calls) == 4 assert ( @@ -428,7 +436,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed night. - hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_NIGHT) + hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_NIGHT) await hass.async_block_till_done() assert len(service_calls) == 5 assert ( @@ -437,7 +445,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed vacation. - hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_VACATION) + hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_VACATION) await hass.async_block_till_done() assert len(service_calls) == 6 assert ( @@ -463,7 +471,7 @@ async def test_if_fires_on_state_change_with_for( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) + hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) assert await async_setup_component( hass, @@ -498,7 +506,7 @@ async def test_if_fires_on_state_change_with_for( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) + hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) await hass.async_block_till_done() assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) @@ -528,7 +536,7 @@ async def test_if_fires_on_state_change_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) + hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) assert await async_setup_component( hass, @@ -562,7 +570,7 @@ async def test_if_fires_on_state_change_legacy( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) + hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( diff --git a/tests/components/alarm_control_panel/test_init.py b/tests/components/alarm_control_panel/test_init.py index 89a2a2a2b1a..06724978ce3 100644 --- a/tests/components/alarm_control_panel/test_init.py +++ b/tests/components/alarm_control_panel/test_init.py @@ -2,17 +2,14 @@ from types import ModuleType from typing import Any -from unittest.mock import patch import pytest from homeassistant.components import alarm_control_panel -from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, +from homeassistant.components.alarm_control_panel.const import ( AlarmControlPanelEntityFeature, CodeFormat, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_CODE, SERVICE_ALARM_ARM_AWAY, @@ -26,20 +23,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UNDEFINED, UndefinedType -from .conftest import TEST_DOMAIN, MockAlarmControlPanel +from .conftest import MockAlarmControlPanel -from tests.common import ( - MockConfigEntry, - MockModule, - MockPlatform, - help_test_all, - import_and_test_deprecated_constant_enum, - mock_integration, - mock_platform, -) +from tests.common import help_test_all, import_and_test_deprecated_constant_enum async def help_test_async_alarm_control_panel_service( @@ -295,290 +283,3 @@ async def test_alarm_control_panel_with_default_code( hass, mock_alarm_control_panel_entity.entity_id, SERVICE_ALARM_DISARM ) mock_alarm_control_panel_entity.calls_disarm.assert_called_with("1234") - - -async def test_alarm_control_panel_not_log_deprecated_state_warning( - hass: HomeAssistant, - mock_alarm_control_panel_entity: MockAlarmControlPanel, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test correctly using alarm_state doesn't log issue or raise repair.""" - state = hass.states.get(mock_alarm_control_panel_entity.entity_id) - assert state is not None - assert "Entities should implement the 'alarm_state' property and" not in caplog.text - - -async def test_alarm_control_panel_log_deprecated_state_warning_using_state_prop( - hass: HomeAssistant, - code_format: CodeFormat | None, - supported_features: AlarmControlPanelEntityFeature, - code_arm_required: bool, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test incorrectly using state property does log issue and raise repair.""" - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [ALARM_CONTROL_PANEL_DOMAIN] - ) - return True - - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - ), - ) - - class MockLegacyAlarmControlPanel(MockAlarmControlPanel): - """Mocked alarm control entity.""" - - def __init__( - self, - supported_features: AlarmControlPanelEntityFeature = AlarmControlPanelEntityFeature( - 0 - ), - code_format: CodeFormat | None = None, - code_arm_required: bool = True, - ) -> None: - """Initialize the alarm control.""" - super().__init__(supported_features, code_format, code_arm_required) - - @property - def state(self) -> str: - """Return the state of the entity.""" - return "disarmed" - - entity = MockLegacyAlarmControlPanel( - supported_features=supported_features, - code_format=code_format, - code_arm_required=code_arm_required, - ) - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test alarm control panel platform via config entry.""" - async_add_entities([entity]) - - mock_platform( - hass, - f"{TEST_DOMAIN}.{ALARM_CONTROL_PANEL_DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), - ) - - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get(entity.entity_id) - assert state is not None - - assert "Entities should implement the 'alarm_state' property and" in caplog.text - - -async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state_attr( - hass: HomeAssistant, - code_format: CodeFormat | None, - supported_features: AlarmControlPanelEntityFeature, - code_arm_required: bool, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test incorrectly using _attr_state attribute does log issue and raise repair.""" - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [ALARM_CONTROL_PANEL_DOMAIN] - ) - return True - - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - ), - ) - - class MockLegacyAlarmControlPanel(MockAlarmControlPanel): - """Mocked alarm control entity.""" - - def __init__( - self, - supported_features: AlarmControlPanelEntityFeature = AlarmControlPanelEntityFeature( - 0 - ), - code_format: CodeFormat | None = None, - code_arm_required: bool = True, - ) -> None: - """Initialize the alarm control.""" - super().__init__(supported_features, code_format, code_arm_required) - - def alarm_disarm(self, code: str | None = None) -> None: - """Mock alarm disarm calls.""" - self._attr_state = "disarmed" - - entity = MockLegacyAlarmControlPanel( - supported_features=supported_features, - code_format=code_format, - code_arm_required=code_arm_required, - ) - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test alarm control panel platform via config entry.""" - async_add_entities([entity]) - - mock_platform( - hass, - f"{TEST_DOMAIN}.{ALARM_CONTROL_PANEL_DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), - ) - - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get(entity.entity_id) - assert state is not None - - assert "Entities should implement the 'alarm_state' property and" not in caplog.text - - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - await help_test_async_alarm_control_panel_service( - hass, entity.entity_id, SERVICE_ALARM_DISARM - ) - - assert "Entities should implement the 'alarm_state' property and" in caplog.text - caplog.clear() - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - await help_test_async_alarm_control_panel_service( - hass, entity.entity_id, SERVICE_ALARM_DISARM - ) - # Test we only log once - assert "Entities should implement the 'alarm_state' property and" not in caplog.text - - -async def test_alarm_control_panel_deprecated_state_does_not_break_state( - hass: HomeAssistant, - code_format: CodeFormat | None, - supported_features: AlarmControlPanelEntityFeature, - code_arm_required: bool, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test using _attr_state attribute does not break state.""" - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [ALARM_CONTROL_PANEL_DOMAIN] - ) - return True - - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - ), - ) - - class MockLegacyAlarmControlPanel(MockAlarmControlPanel): - """Mocked alarm control entity.""" - - def __init__( - self, - supported_features: AlarmControlPanelEntityFeature = AlarmControlPanelEntityFeature( - 0 - ), - code_format: CodeFormat | None = None, - code_arm_required: bool = True, - ) -> None: - """Initialize the alarm control.""" - self._attr_state = "armed_away" - super().__init__(supported_features, code_format, code_arm_required) - - def alarm_disarm(self, code: str | None = None) -> None: - """Mock alarm disarm calls.""" - self._attr_state = "disarmed" - - entity = MockLegacyAlarmControlPanel( - supported_features=supported_features, - code_format=code_format, - code_arm_required=code_arm_required, - ) - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test alarm control panel platform via config entry.""" - async_add_entities([entity]) - - mock_platform( - hass, - f"{TEST_DOMAIN}.{ALARM_CONTROL_PANEL_DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), - ) - - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get(entity.entity_id) - assert state is not None - assert state.state == "armed_away" - - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - await help_test_async_alarm_control_panel_service( - hass, entity.entity_id, SERVICE_ALARM_DISARM - ) - - state = hass.states.get(entity.entity_id) - assert state is not None - assert state.state == "disarmed" diff --git a/tests/components/alarm_control_panel/test_reproduce_state.py b/tests/components/alarm_control_panel/test_reproduce_state.py index fcb4fdee36e..c7984b0793e 100644 --- a/tests/components/alarm_control_panel/test_reproduce_state.py +++ b/tests/components/alarm_control_panel/test_reproduce_state.py @@ -2,7 +2,6 @@ import pytest -from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.const import ( SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_CUSTOM_BYPASS, @@ -11,6 +10,13 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.state import async_reproduce_state @@ -23,37 +29,27 @@ async def test_reproducing_states( ) -> None: """Test reproducing Alarm control panel states.""" hass.states.async_set( - "alarm_control_panel.entity_armed_away", - AlarmControlPanelState.ARMED_AWAY, - {}, + "alarm_control_panel.entity_armed_away", STATE_ALARM_ARMED_AWAY, {} ) hass.states.async_set( "alarm_control_panel.entity_armed_custom_bypass", - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_CUSTOM_BYPASS, {}, ) hass.states.async_set( - "alarm_control_panel.entity_armed_home", - AlarmControlPanelState.ARMED_HOME, - {}, + "alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_HOME, {} ) hass.states.async_set( - "alarm_control_panel.entity_armed_night", - AlarmControlPanelState.ARMED_NIGHT, - {}, + "alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_NIGHT, {} ) hass.states.async_set( - "alarm_control_panel.entity_armed_vacation", - AlarmControlPanelState.ARMED_VACATION, - {}, + "alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_VACATION, {} ) hass.states.async_set( - "alarm_control_panel.entity_disarmed", AlarmControlPanelState.DISARMED, {} + "alarm_control_panel.entity_disarmed", STATE_ALARM_DISARMED, {} ) hass.states.async_set( - "alarm_control_panel.entity_triggered", - AlarmControlPanelState.TRIGGERED, - {}, + "alarm_control_panel.entity_triggered", STATE_ALARM_TRIGGERED, {} ) arm_away_calls = async_mock_service( @@ -80,34 +76,18 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State( - "alarm_control_panel.entity_armed_away", - AlarmControlPanelState.ARMED_AWAY, - ), + State("alarm_control_panel.entity_armed_away", STATE_ALARM_ARMED_AWAY), State( "alarm_control_panel.entity_armed_custom_bypass", - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_CUSTOM_BYPASS, ), + State("alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_HOME), + State("alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_NIGHT), State( - "alarm_control_panel.entity_armed_home", - AlarmControlPanelState.ARMED_HOME, - ), - State( - "alarm_control_panel.entity_armed_night", - AlarmControlPanelState.ARMED_NIGHT, - ), - State( - "alarm_control_panel.entity_armed_vacation", - AlarmControlPanelState.ARMED_VACATION, - ), - State( - "alarm_control_panel.entity_disarmed", - AlarmControlPanelState.DISARMED, - ), - State( - "alarm_control_panel.entity_triggered", - AlarmControlPanelState.TRIGGERED, + "alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_VACATION ), + State("alarm_control_panel.entity_disarmed", STATE_ALARM_DISARMED), + State("alarm_control_panel.entity_triggered", STATE_ALARM_TRIGGERED), ], ) @@ -137,34 +117,17 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ + State("alarm_control_panel.entity_armed_away", STATE_ALARM_TRIGGERED), State( - "alarm_control_panel.entity_armed_away", - AlarmControlPanelState.TRIGGERED, + "alarm_control_panel.entity_armed_custom_bypass", STATE_ALARM_ARMED_AWAY ), State( - "alarm_control_panel.entity_armed_custom_bypass", - AlarmControlPanelState.ARMED_AWAY, - ), - State( - "alarm_control_panel.entity_armed_home", - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - State( - "alarm_control_panel.entity_armed_night", - AlarmControlPanelState.ARMED_HOME, - ), - State( - "alarm_control_panel.entity_armed_vacation", - AlarmControlPanelState.ARMED_NIGHT, - ), - State( - "alarm_control_panel.entity_disarmed", - AlarmControlPanelState.ARMED_VACATION, - ), - State( - "alarm_control_panel.entity_triggered", - AlarmControlPanelState.DISARMED, + "alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_CUSTOM_BYPASS ), + State("alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_HOME), + State("alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_NIGHT), + State("alarm_control_panel.entity_disarmed", STATE_ALARM_ARMED_VACATION), + State("alarm_control_panel.entity_triggered", STATE_ALARM_DISARMED), # Should not raise State("alarm_control_panel.non_existing", "on"), ], diff --git a/tests/components/alert/test_init.py b/tests/components/alert/test_init.py index 263fb69c883..31236c84f34 100644 --- a/tests/components/alert/test_init.py +++ b/tests/components/alert/test_init.py @@ -337,7 +337,7 @@ async def test_skipfirst(hass: HomeAssistant, mock_notifier: list[ServiceCall]) async def test_done_message_state_tracker_reset_on_cancel(hass: HomeAssistant) -> None: """Test that the done message is reset when canceled.""" - entity = alert.AlertEntity(hass, *TEST_NOACK) + entity = alert.Alert(hass, *TEST_NOACK) entity._cancel = lambda *args: None assert entity._send_done_message is False entity._send_done_message = True diff --git a/tests/components/alexa/test_auth.py b/tests/components/alexa/test_auth.py index b3aa645bfcb..8d4308ba792 100644 --- a/tests/components/alexa/test_auth.py +++ b/tests/components/alexa/test_auth.py @@ -10,14 +10,14 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def run_auth_get_access_token( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - expires_in: int, - client_id: str, - client_secret: str, - accept_grant_code: str, - refresh_token: str, -) -> None: + hass, + aioclient_mock, + expires_in, + client_id, + client_secret, + accept_grant_code, + refresh_token, +): """Do auth and request a new token for tests.""" aioclient_mock.post( TEST_TOKEN_URL, diff --git a/tests/components/alexa/test_capabilities.py b/tests/components/alexa/test_capabilities.py index a41c2f47b2d..15a4bd6d9a1 100644 --- a/tests/components/alexa/test_capabilities.py +++ b/tests/components/alexa/test_capabilities.py @@ -5,14 +5,13 @@ from unittest.mock import patch import pytest -from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.alexa import smart_home from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, ClimateEntityFeature, HVACMode, ) -from homeassistant.components.lock import LockState +from homeassistant.components.lock import STATE_JAMMED, STATE_LOCKING, STATE_UNLOCKING from homeassistant.components.media_player import MediaPlayerEntityFeature from homeassistant.components.valve import ValveEntityFeature from homeassistant.components.water_heater import ( @@ -24,9 +23,16 @@ from homeassistant.components.water_heater import ( ) from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_DISARMED, + STATE_LOCKED, STATE_OFF, STATE_UNAVAILABLE, STATE_UNKNOWN, + STATE_UNLOCKED, UnitOfTemperature, ) from homeassistant.core import HomeAssistant @@ -42,42 +48,6 @@ from .test_common import ( from tests.common import async_mock_service -@pytest.mark.parametrize( - ( - "current_activity", - "activity_list", - ), - [ - ("TV", ["TV", "MUSIC", "DVD"]), - ("TV", ["TV"]), - ], -) -async def test_discovery_remote( - hass: HomeAssistant, current_activity: str, activity_list: list[str] -) -> None: - """Test discory for a remote entity.""" - request = get_new_request("Alexa.Discovery", "Discover") - # setup test device - hass.states.async_set( - "remote.test", - "off", - { - "current_activity": current_activity, - "activity_list": activity_list, - "supported_features": 4, - }, - ) - msg = await smart_home.async_handle_message(hass, get_default_config(hass), request) - assert "event" in msg - msg = msg["event"] - assert len(msg["payload"]["endpoints"]) == 1 - endpoint = msg["payload"]["endpoints"][0] - assert endpoint["endpointId"] == "remote#test" - interfaces = {capability["interface"] for capability in endpoint["capabilities"]} - assert "Alexa.PowerController" in interfaces - assert "Alexa.ModeController" in interfaces - - @pytest.mark.parametrize("adjust", ["-5", "5", "-80"]) async def test_api_adjust_brightness(hass: HomeAssistant, adjust: str) -> None: """Test api adjust brightness process.""" @@ -229,6 +199,7 @@ async def test_api_increase_color_temp( ("media_player", "GAME CONSOLE", ["tv", "game console", 10000], 1), ("media_player", "SATELLITE TV", ["satellite-tv", "game console", None], 0), ("media_player", "SATELLITE TV", ["satellite_tv", "game console"], 0), + ("media_player", "BAD DEVICE", ["satellite_tv", "game console"], None), ], ) async def test_api_select_input( @@ -249,6 +220,18 @@ async def test_api_select_input( }, ) + # test where no source matches + if idx is None: + await assert_request_fails( + "Alexa.InputController", + "SelectInput", + "media_player#test", + "media_player.select_source", + hass, + payload={"input": payload}, + ) + return + call, _ = await assert_request_calls_service( "Alexa.InputController", "SelectInput", @@ -260,137 +243,13 @@ async def test_api_select_input( assert call.data["source"] == source_list[idx] -@pytest.mark.parametrize( - ("source_list"), - [(["satellite_tv", "game console"]), ([])], -) -async def test_api_select_input_fails( - hass: HomeAssistant, - source_list: list[Any], -) -> None: - """Test api set input process fails.""" - hass.states.async_set( - "media_player.test", - "off", - { - "friendly_name": "Test media player", - "source": "unknown", - "source_list": source_list, - }, - ) - await assert_request_fails( - "Alexa.InputController", - "SelectInput", - "media_player#test", - "media_player.select_source", - hass, - payload={"input": "BAD DEVICE"}, - ) - - -@pytest.mark.parametrize( - ("activity", "activity_list", "target_activity_index"), - [ - ("TV", ["TV", "MUSIC", "DVD"], 0), - ("MUSIC", ["TV", "MUSIC", "DVD", 1000], 1), - ("DVD", ["TV", "MUSIC", "DVD", None], 2), - ("TV", ["TV"], 0), - ], -) -async def test_api_select_activity( - hass: HomeAssistant, - activity: str, - activity_list: list[str], - target_activity_index: int | None, -) -> None: - """Test api set activity process.""" - hass.states.async_set( - "remote.test", - "off", - { - "current_activity": activity, - "activity_list": activity_list, - }, - ) - call, _ = await assert_request_calls_service( - "Alexa.ModeController", - "SetMode", - "remote#test", - "remote.turn_on", - hass, - payload={"mode": f"activity.{activity}"}, - instance="remote.activity", - ) - assert call.data["activity"] == activity_list[target_activity_index] - - -@pytest.mark.parametrize(("activity_list"), [(["TV", "MUSIC", "DVD"]), ([])]) -async def test_api_select_activity_fails( - hass: HomeAssistant, activity_list: list[str] -) -> None: - """Test api set activity process fails.""" - hass.states.async_set( - "remote.test", - "off", - { - "current_activity": None, - "activity_list": activity_list, - }, - ) - await assert_request_fails( - "Alexa.ModeController", - "SetMode", - "remote#test", - "remote.turn_on", - hass, - payload={"mode": "activity.BAD"}, - instance="remote.activity", - ) - - -@pytest.mark.parametrize( - ( - "current_state", - "target_name", - "target_service", - ), - [ - ("on", "TurnOff", "turn_off"), - ("off", "TurnOn", "turn_on"), - ], -) -async def test_api_remote_set_power_state( - hass: HomeAssistant, - current_state: str, - target_name: str, - target_service: str, -) -> None: - """Test api remote set power state process.""" - hass.states.async_set( - "remote.test", - current_state, - { - "current_activity": ["TV", "MUSIC", "DVD"], - "activity_list": "TV", - }, - ) - - _, msg = await assert_request_calls_service( - "Alexa.PowerController", - target_name, - "remote#test", - f"remote.{target_service}", - hass, - ) - - async def test_report_lock_state(hass: HomeAssistant) -> None: """Test LockController implements lockState property.""" - hass.states.async_set("lock.locked", LockState.LOCKED, {}) - hass.states.async_set("lock.unlocked", LockState.UNLOCKED, {}) - hass.states.async_set("lock.unlocking", LockState.UNLOCKING, {}) - hass.states.async_set("lock.locking", LockState.LOCKING, {}) - hass.states.async_set("lock.jammed", LockState.JAMMED, {}) + hass.states.async_set("lock.locked", STATE_LOCKED, {}) + hass.states.async_set("lock.unlocked", STATE_UNLOCKED, {}) + hass.states.async_set("lock.unlocking", STATE_UNLOCKING, {}) + hass.states.async_set("lock.locking", STATE_LOCKING, {}) + hass.states.async_set("lock.jammed", STATE_JAMMED, {}) hass.states.async_set("lock.unknown", STATE_UNKNOWN, {}) properties = await reported_properties(hass, "lock.locked") @@ -760,77 +619,6 @@ async def test_report_fan_direction(hass: HomeAssistant) -> None: properties.assert_equal("Alexa.ModeController", "mode", "direction.forward") -async def test_report_remote_power(hass: HomeAssistant) -> None: - """Test ModeController reports remote power state correctly.""" - hass.states.async_set( - "remote.off", - "off", - {"current_activity": "TV", "activity_list": ["TV", "MUSIC", "DVD"]}, - ) - hass.states.async_set( - "remote.on", - "on", - {"current_activity": "TV", "activity_list": ["TV", "MUSIC", "DVD"]}, - ) - - properties = await reported_properties(hass, "remote#off") - properties.assert_equal("Alexa.PowerController", "powerState", "OFF") - - properties = await reported_properties(hass, "remote#on") - properties.assert_equal("Alexa.PowerController", "powerState", "ON") - - -async def test_report_remote_activity(hass: HomeAssistant) -> None: - """Test ModeController reports remote activity correctly.""" - hass.states.async_set( - "remote.unknown", - "on", - { - "current_activity": "UNKNOWN", - "supported_features": 4, - }, - ) - hass.states.async_set( - "remote.tv", - "on", - { - "current_activity": "TV", - "activity_list": ["TV", "MUSIC", "DVD"], - "supported_features": 4, - }, - ) - hass.states.async_set( - "remote.music", - "on", - { - "current_activity": "MUSIC", - "activity_list": ["TV", "MUSIC", "DVD"], - "supported_features": 4, - }, - ) - hass.states.async_set( - "remote.dvd", - "on", - { - "current_activity": "DVD", - "activity_list": ["TV", "MUSIC", "DVD"], - "supported_features": 4, - }, - ) - - properties = await reported_properties(hass, "remote#unknown") - properties.assert_not_has_property("Alexa.ModeController", "mode") - - properties = await reported_properties(hass, "remote#tv") - properties.assert_equal("Alexa.ModeController", "mode", "activity.TV") - - properties = await reported_properties(hass, "remote#music") - properties.assert_equal("Alexa.ModeController", "mode", "activity.MUSIC") - - properties = await reported_properties(hass, "remote#dvd") - properties.assert_equal("Alexa.ModeController", "mode", "activity.DVD") - - async def test_report_cover_range_value(hass: HomeAssistant) -> None: """Test RangeController reports cover position correctly.""" hass.states.async_set( @@ -1347,23 +1135,15 @@ async def test_temperature_sensor_water_heater(hass: HomeAssistant) -> None: async def test_report_alarm_control_panel_state(hass: HomeAssistant) -> None: """Test SecurityPanelController implements armState property.""" + hass.states.async_set("alarm_control_panel.armed_away", STATE_ALARM_ARMED_AWAY, {}) hass.states.async_set( - "alarm_control_panel.armed_away", AlarmControlPanelState.ARMED_AWAY, {} + "alarm_control_panel.armed_custom_bypass", STATE_ALARM_ARMED_CUSTOM_BYPASS, {} ) + hass.states.async_set("alarm_control_panel.armed_home", STATE_ALARM_ARMED_HOME, {}) hass.states.async_set( - "alarm_control_panel.armed_custom_bypass", - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - {}, - ) - hass.states.async_set( - "alarm_control_panel.armed_home", AlarmControlPanelState.ARMED_HOME, {} - ) - hass.states.async_set( - "alarm_control_panel.armed_night", AlarmControlPanelState.ARMED_NIGHT, {} - ) - hass.states.async_set( - "alarm_control_panel.disarmed", AlarmControlPanelState.DISARMED, {} + "alarm_control_panel.armed_night", STATE_ALARM_ARMED_NIGHT, {} ) + hass.states.async_set("alarm_control_panel.disarmed", STATE_ALARM_DISARMED, {}) properties = await reported_properties(hass, "alarm_control_panel.armed_away") properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") diff --git a/tests/components/alexa/test_common.py b/tests/components/alexa/test_common.py index e78f2cba40f..9fdcc1c89c1 100644 --- a/tests/components/alexa/test_common.py +++ b/tests/components/alexa/test_common.py @@ -1,8 +1,5 @@ """Test helpers for the Alexa integration.""" -from __future__ import annotations - -from typing import Any from unittest.mock import Mock from uuid import uuid4 @@ -10,7 +7,7 @@ import pytest from homeassistant.components.alexa import config, smart_home from homeassistant.components.alexa.const import CONF_ENDPOINT, CONF_FILTER, CONF_LOCALE -from homeassistant.core import Context, HomeAssistant, ServiceCall, callback +from homeassistant.core import Context, callback from homeassistant.helpers import entityfilter from tests.common import async_mock_service @@ -31,7 +28,7 @@ class MockConfig(smart_home.AlexaConfig): "camera.test": {"display_categories": "CAMERA"}, } - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass): """Mock Alexa config.""" super().__init__( hass, @@ -65,7 +62,7 @@ class MockConfig(smart_home.AlexaConfig): """Accept a grant.""" -def get_default_config(hass: HomeAssistant) -> MockConfig: +def get_default_config(hass): """Return a MockConfig instance.""" return MockConfig(hass) @@ -96,15 +93,15 @@ def get_new_request(namespace, name, endpoint=None): async def assert_request_calls_service( - namespace: str, - name: str, - endpoint: str, - service: str, - hass: HomeAssistant, + namespace, + name, + endpoint, + service, + hass, response_type="Response", - payload: dict[str, Any] | None = None, - instance: str | None = None, -) -> tuple[ServiceCall, dict[str, Any]]: + payload=None, + instance=None, +): """Assert an API request calls a hass service.""" context = Context() request = get_new_request(namespace, name, endpoint) @@ -132,14 +129,8 @@ async def assert_request_calls_service( async def assert_request_fails( - namespace: str, - name: str, - endpoint: str, - service_not_called: str, - hass: HomeAssistant, - payload: dict[str, Any] | None = None, - instance: str | None = None, -) -> None: + namespace, name, endpoint, service_not_called, hass, payload=None, instance=None +): """Assert an API request returns an ErrorResponse.""" request = get_new_request(namespace, name, endpoint) if payload: @@ -161,12 +152,8 @@ async def assert_request_fails( async def assert_power_controller_works( - endpoint: str, - on_service: str, - off_service: str, - hass: HomeAssistant, - timestamp: str, -) -> None: + endpoint, on_service, off_service, hass, timestamp +): """Assert PowerController API requests work.""" _, response = await assert_request_calls_service( "Alexa.PowerController", "TurnOn", endpoint, on_service, hass @@ -182,12 +169,8 @@ async def assert_power_controller_works( async def assert_scene_controller_works( - endpoint: str, - activate_service: str, - deactivate_service: str, - hass: HomeAssistant, - timestamp: str, -) -> None: + endpoint, activate_service, deactivate_service, hass, timestamp +): """Assert SceneController API requests work.""" _, response = await assert_request_calls_service( "Alexa.SceneController", @@ -213,9 +196,7 @@ async def assert_scene_controller_works( assert response["event"]["payload"]["timestamp"] == timestamp -async def reported_properties( - hass: HomeAssistant, endpoint: str, return_full_response: bool = False -) -> ReportedProperties: +async def reported_properties(hass, endpoint, return_full_response=False): """Use ReportState to get properties and return them. The result is a ReportedProperties instance, which has methods to make @@ -232,7 +213,7 @@ async def reported_properties( class ReportedProperties: """Class to help assert reported properties.""" - def __init__(self, properties) -> None: + def __init__(self, properties): """Initialize class.""" self.properties = properties diff --git a/tests/components/alexa/test_smart_home.py b/tests/components/alexa/test_smart_home.py index 68010a6a711..d502dce7d01 100644 --- a/tests/components/alexa/test_smart_home.py +++ b/tests/components/alexa/test_smart_home.py @@ -12,6 +12,7 @@ from homeassistant.components.cover import CoverDeviceClass, CoverEntityFeature from homeassistant.components.media_player import MediaPlayerEntityFeature from homeassistant.components.vacuum import VacuumEntityFeature from homeassistant.components.valve import SERVICE_STOP_VALVE, ValveEntityFeature +from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE, @@ -19,7 +20,6 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import Context, Event, HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import entityfilter from homeassistant.setup import async_setup_component from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM @@ -120,9 +120,7 @@ async def test_wrong_version(hass: HomeAssistant) -> None: await smart_home.async_handle_message(hass, get_default_config(hass), msg) -async def discovery_test( - device, hass: HomeAssistant, expected_endpoints: int = 1 -) -> dict[str, Any] | list[dict[str, Any]] | None: +async def discovery_test(device, hass, expected_endpoints=1): """Test alexa discovery request.""" request = get_new_request("Alexa.Discovery", "Discover") @@ -1981,7 +1979,7 @@ async def test_cover_position( "friendly_name": "Test cover range", "device_class": "blind", "supported_features": supported_features, - "current_position": position, + "position": position, }, ) appliance = await discovery_test(device, hass) @@ -2298,7 +2296,7 @@ async def test_cover_position_range( "friendly_name": "Test cover range", "device_class": "blind", "supported_features": 7, - "current_position": 30, + "position": 30, }, ) appliance = await discovery_test(device, hass) @@ -2603,15 +2601,8 @@ async def test_stop_valve( async def assert_percentage_changes( - hass: HomeAssistant, - adjustments, - namespace, - name, - endpoint, - parameter, - service, - changed_parameter, -) -> None: + hass, adjustments, namespace, name, endpoint, parameter, service, changed_parameter +): """Assert an API request making percentage changes works. AdjustPercentage, AdjustBrightness, etc. are examples of such requests. @@ -2625,15 +2616,8 @@ async def assert_percentage_changes( async def assert_range_changes( - hass: HomeAssistant, - adjustments: list[tuple[int | str, int, bool]], - namespace: str, - name: str, - endpoint: str, - service: str, - changed_parameter: str | None, - instance: str, -) -> None: + hass, adjustments, namespace, name, endpoint, service, changed_parameter, instance +): """Assert an API request making range changes works. AdjustRangeValue are examples of such requests. @@ -3999,108 +3983,6 @@ async def test_alarm_control_panel_code_arm_required(hass: HomeAssistant) -> Non await discovery_test(device, hass, expected_endpoints=0) -async def test_alarm_control_panel_disarm_required(hass: HomeAssistant) -> None: - """Test alarm_control_panel disarm required.""" - device = ( - "alarm_control_panel.test_4", - "armed_away", - { - "friendly_name": "Test Alarm Control Panel 4", - "code_arm_required": False, - "code_format": "FORMAT_NUMBER", - "code": "1234", - "supported_features": 3, - }, - ) - appliance = await discovery_test(device, hass) - - assert appliance["endpointId"] == "alarm_control_panel#test_4" - assert appliance["displayCategories"][0] == "SECURITY_PANEL" - assert appliance["friendlyName"] == "Test Alarm Control Panel 4" - assert_endpoint_capabilities( - appliance, "Alexa.SecurityPanelController", "Alexa.EndpointHealth", "Alexa" - ) - - properties = await reported_properties(hass, "alarm_control_panel#test_4") - properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") - - msg = await assert_request_fails( - "Alexa.SecurityPanelController", - "Arm", - "alarm_control_panel#test_4", - "alarm_control_panel.alarm_arm_home", - hass, - payload={"armState": "ARMED_STAY"}, - ) - assert msg["event"]["payload"]["type"] == "AUTHORIZATION_REQUIRED" - assert ( - msg["event"]["payload"]["message"] - == "You must disarm the system before you can set the requested arm state." - ) - - _, msg = await assert_request_calls_service( - "Alexa.SecurityPanelController", - "Arm", - "alarm_control_panel#test_4", - "alarm_control_panel.alarm_arm_away", - hass, - response_type="Arm.Response", - payload={"armState": "ARMED_AWAY"}, - ) - properties = ReportedProperties(msg["context"]["properties"]) - properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") - - -async def test_alarm_control_panel_change_arm_type(hass: HomeAssistant) -> None: - """Test alarm_control_panel change arm type.""" - device = ( - "alarm_control_panel.test_5", - "armed_home", - { - "friendly_name": "Test Alarm Control Panel 5", - "code_arm_required": False, - "code_format": "FORMAT_NUMBER", - "code": "1234", - "supported_features": 3, - }, - ) - appliance = await discovery_test(device, hass) - - assert appliance["endpointId"] == "alarm_control_panel#test_5" - assert appliance["displayCategories"][0] == "SECURITY_PANEL" - assert appliance["friendlyName"] == "Test Alarm Control Panel 5" - assert_endpoint_capabilities( - appliance, "Alexa.SecurityPanelController", "Alexa.EndpointHealth", "Alexa" - ) - - properties = await reported_properties(hass, "alarm_control_panel#test_5") - properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_STAY") - - _, msg = await assert_request_calls_service( - "Alexa.SecurityPanelController", - "Arm", - "alarm_control_panel#test_5", - "alarm_control_panel.alarm_arm_home", - hass, - response_type="Arm.Response", - payload={"armState": "ARMED_STAY"}, - ) - properties = ReportedProperties(msg["context"]["properties"]) - properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_STAY") - - _, msg = await assert_request_calls_service( - "Alexa.SecurityPanelController", - "Arm", - "alarm_control_panel#test_5", - "alarm_control_panel.alarm_arm_away", - hass, - response_type="Arm.Response", - payload={"armState": "ARMED_AWAY"}, - ) - properties = ReportedProperties(msg["context"]["properties"]) - properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") - - async def test_range_unsupported_domain(hass: HomeAssistant) -> None: """Test rangeController with unsupported domain.""" device = ("switch.test", "on", {"friendly_name": "Test switch"}) @@ -4776,7 +4658,7 @@ async def test_cover_semantics_position_and_tilt(hass: HomeAssistant) -> None: "friendly_name": "Test cover semantics", "device_class": "blind", "supported_features": 255, - "current_position": 30, + "position": 30, "tilt_position": 30, }, ) diff --git a/tests/components/alexa/test_smart_home_http.py b/tests/components/alexa/test_smart_home_http.py index 20d9b30dda5..1c30c72e72c 100644 --- a/tests/components/alexa/test_smart_home_http.py +++ b/tests/components/alexa/test_smart_home_http.py @@ -5,7 +5,6 @@ import json import logging from typing import Any -from aiohttp import ClientResponse import pytest from homeassistant.components.alexa import DOMAIN, smart_home @@ -18,9 +17,7 @@ from .test_common import get_new_request from tests.typing import ClientSessionGenerator -async def do_http_discovery( - config: dict[str, Any], hass: HomeAssistant, hass_client: ClientSessionGenerator -) -> ClientResponse: +async def do_http_discovery(config, hass, hass_client): """Submit a request to the Smart Home HTTP API.""" await async_setup_component(hass, DOMAIN, config) http_client = await hass_client() diff --git a/tests/components/amberelectric/conftest.py b/tests/components/amberelectric/conftest.py index ce4073db71b..9de865fae6c 100644 --- a/tests/components/amberelectric/conftest.py +++ b/tests/components/amberelectric/conftest.py @@ -1,9 +1,9 @@ """Provide common Amber fixtures.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/amberelectric/test_binary_sensor.py b/tests/components/amberelectric/test_binary_sensor.py index 2c1ee22b644..1e5eb572e07 100644 --- a/tests/components/amberelectric/test_binary_sensor.py +++ b/tests/components/amberelectric/test_binary_sensor.py @@ -8,7 +8,6 @@ from unittest.mock import Mock, patch from amberelectric.model.channel import ChannelType from amberelectric.model.current_interval import CurrentInterval from amberelectric.model.interval import SpikeStatus -from amberelectric.model.tariff_information import TariffInformation from dateutil import parser import pytest @@ -112,7 +111,7 @@ async def setup_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: @pytest.mark.usefixtures("setup_no_spike") def test_no_spike_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_all()) == 5 sensor = hass.states.get("binary_sensor.mock_title_price_spike") assert sensor assert sensor.state == "off" @@ -123,7 +122,7 @@ def test_no_spike_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_potential_spike") def test_potential_spike_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_all()) == 5 sensor = hass.states.get("binary_sensor.mock_title_price_spike") assert sensor assert sensor.state == "off" @@ -134,85 +133,9 @@ def test_potential_spike_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_spike") def test_spike_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_all()) == 5 sensor = hass.states.get("binary_sensor.mock_title_price_spike") assert sensor assert sensor.state == "on" assert sensor.attributes["icon"] == "mdi:power-plug-off" assert sensor.attributes["spike_status"] == "spike" - - -@pytest.fixture -async def setup_inactive_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mock]: - """Set up general channel.""" - MockConfigEntry( - domain="amberelectric", - data={ - CONF_SITE_NAME: "mock_title", - CONF_API_TOKEN: MOCK_API_TOKEN, - CONF_SITE_ID: GENERAL_ONLY_SITE_ID, - }, - ).add_to_hass(hass) - - instance = Mock() - with patch( - "amberelectric.api.AmberApi.create", - return_value=instance, - ) as mock_update: - general_channel: list[CurrentInterval] = [ - generate_current_interval( - ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") - ), - ] - general_channel[0].tariff_information = TariffInformation(demandWindow=False) - instance.get_current_price = Mock(return_value=general_channel) - assert await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() - yield mock_update.return_value - - -@pytest.fixture -async def setup_active_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mock]: - """Set up general channel.""" - MockConfigEntry( - domain="amberelectric", - data={ - CONF_SITE_NAME: "mock_title", - CONF_API_TOKEN: MOCK_API_TOKEN, - CONF_SITE_ID: GENERAL_ONLY_SITE_ID, - }, - ).add_to_hass(hass) - - instance = Mock() - with patch( - "amberelectric.api.AmberApi.create", - return_value=instance, - ) as mock_update: - general_channel: list[CurrentInterval] = [ - generate_current_interval( - ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") - ), - ] - general_channel[0].tariff_information = TariffInformation(demandWindow=True) - instance.get_current_price = Mock(return_value=general_channel) - assert await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() - yield mock_update.return_value - - -@pytest.mark.usefixtures("setup_inactive_demand_window") -def test_inactive_demand_window_sensor(hass: HomeAssistant) -> None: - """Testing the creation of the Amber demand_window sensor.""" - assert len(hass.states.async_all()) == 6 - sensor = hass.states.get("binary_sensor.mock_title_demand_window") - assert sensor - assert sensor.state == "off" - - -@pytest.mark.usefixtures("setup_active_demand_window") -def test_active_demand_window_sensor(hass: HomeAssistant) -> None: - """Testing the creation of the Amber demand_window sensor.""" - assert len(hass.states.async_all()) == 6 - sensor = hass.states.get("binary_sensor.mock_title_demand_window") - assert sensor - assert sensor.state == "on" diff --git a/tests/components/amberelectric/test_sensor.py b/tests/components/amberelectric/test_sensor.py index 3a5626d14d5..3c0910f0afc 100644 --- a/tests/components/amberelectric/test_sensor.py +++ b/tests/components/amberelectric/test_sensor.py @@ -105,7 +105,7 @@ async def setup_general_and_feed_in(hass: HomeAssistant) -> AsyncGenerator[Mock] async def test_general_price_sensor(hass: HomeAssistant, setup_general: Mock) -> None: """Test the General Price sensor.""" - assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_all()) == 5 price = hass.states.get("sensor.mock_title_general_price") assert price assert price.state == "0.08" @@ -143,7 +143,7 @@ async def test_general_price_sensor(hass: HomeAssistant, setup_general: Mock) -> @pytest.mark.usefixtures("setup_general_and_controlled_load") async def test_general_and_controlled_load_price_sensor(hass: HomeAssistant) -> None: """Test the Controlled Price sensor.""" - assert len(hass.states.async_all()) == 9 + assert len(hass.states.async_all()) == 8 price = hass.states.get("sensor.mock_title_controlled_load_price") assert price assert price.state == "0.08" @@ -165,7 +165,7 @@ async def test_general_and_controlled_load_price_sensor(hass: HomeAssistant) -> @pytest.mark.usefixtures("setup_general_and_feed_in") async def test_general_and_feed_in_price_sensor(hass: HomeAssistant) -> None: """Test the Feed In sensor.""" - assert len(hass.states.async_all()) == 9 + assert len(hass.states.async_all()) == 8 price = hass.states.get("sensor.mock_title_feed_in_price") assert price assert price.state == "-0.08" @@ -188,7 +188,7 @@ async def test_general_forecast_sensor( hass: HomeAssistant, setup_general: Mock ) -> None: """Test the General Forecast sensor.""" - assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_all()) == 5 price = hass.states.get("sensor.mock_title_general_forecast") assert price assert price.state == "0.09" @@ -230,7 +230,7 @@ async def test_general_forecast_sensor( @pytest.mark.usefixtures("setup_general_and_controlled_load") async def test_controlled_load_forecast_sensor(hass: HomeAssistant) -> None: """Test the Controlled Load Forecast sensor.""" - assert len(hass.states.async_all()) == 9 + assert len(hass.states.async_all()) == 8 price = hass.states.get("sensor.mock_title_controlled_load_forecast") assert price assert price.state == "0.09" @@ -254,7 +254,7 @@ async def test_controlled_load_forecast_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_general_and_feed_in") async def test_feed_in_forecast_sensor(hass: HomeAssistant) -> None: """Test the Feed In Forecast sensor.""" - assert len(hass.states.async_all()) == 9 + assert len(hass.states.async_all()) == 8 price = hass.states.get("sensor.mock_title_feed_in_forecast") assert price assert price.state == "-0.09" @@ -278,7 +278,7 @@ async def test_feed_in_forecast_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_general") def test_renewable_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_all()) == 5 sensor = hass.states.get("sensor.mock_title_renewables") assert sensor assert sensor.state == "51" @@ -287,7 +287,7 @@ def test_renewable_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_general") def test_general_price_descriptor_descriptor_sensor(hass: HomeAssistant) -> None: """Test the General Price Descriptor sensor.""" - assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_all()) == 5 price = hass.states.get("sensor.mock_title_general_price_descriptor") assert price assert price.state == "extremely_low" @@ -298,7 +298,7 @@ def test_general_and_controlled_load_price_descriptor_sensor( hass: HomeAssistant, ) -> None: """Test the Controlled Price Descriptor sensor.""" - assert len(hass.states.async_all()) == 9 + assert len(hass.states.async_all()) == 8 price = hass.states.get("sensor.mock_title_controlled_load_price_descriptor") assert price assert price.state == "extremely_low" @@ -307,7 +307,7 @@ def test_general_and_controlled_load_price_descriptor_sensor( @pytest.mark.usefixtures("setup_general_and_feed_in") def test_general_and_feed_in_price_descriptor_sensor(hass: HomeAssistant) -> None: """Test the Feed In Price Descriptor sensor.""" - assert len(hass.states.async_all()) == 9 + assert len(hass.states.async_all()) == 8 price = hass.states.get("sensor.mock_title_feed_in_price_descriptor") assert price assert price.state == "extremely_low" diff --git a/tests/components/ambient_network/conftest.py b/tests/components/ambient_network/conftest.py index e728d46aaf6..2900f8ae5fe 100644 --- a/tests/components/ambient_network/conftest.py +++ b/tests/components/ambient_network/conftest.py @@ -1,13 +1,13 @@ """Common fixtures for the Ambient Weather Network integration tests.""" -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch from aioambient import OpenAPI import pytest +from typing_extensions import Generator -from homeassistant.components.ambient_network.const import DOMAIN +from homeassistant.components import ambient_network from homeassistant.core import HomeAssistant from tests.common import ( @@ -69,7 +69,7 @@ async def mock_aioambient(open_api: OpenAPI): def config_entry_fixture(request: pytest.FixtureRequest) -> MockConfigEntry: """Mock config entry.""" return MockConfigEntry( - domain=DOMAIN, + domain=ambient_network.DOMAIN, title=f"Station {request.param[0]}", data={"mac": request.param}, ) diff --git a/tests/components/ambient_station/conftest.py b/tests/components/ambient_station/conftest.py index 160c05ad996..e4f067108a5 100644 --- a/tests/components/ambient_station/conftest.py +++ b/tests/components/ambient_station/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for Ambient PWS.""" -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant.components.ambient_station.const import CONF_APP_KEY, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/ambient_station/snapshots/test_diagnostics.ambr b/tests/components/ambient_station/snapshots/test_diagnostics.ambr index 2f90b09d39f..b4aede7948c 100644 --- a/tests/components/ambient_station/snapshots/test_diagnostics.ambr +++ b/tests/components/ambient_station/snapshots/test_diagnostics.ambr @@ -7,8 +7,6 @@ 'app_key': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'ambient_station', 'entry_id': '382cf7643f016fd48b3fe52163fe8877', 'minor_version': 1, diff --git a/tests/components/ambient_station/test_config_flow.py b/tests/components/ambient_station/test_config_flow.py index e4c8efabc20..19ae9828c22 100644 --- a/tests/components/ambient_station/test_config_flow.py +++ b/tests/components/ambient_station/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from aioambient.errors import AmbientError import pytest -from homeassistant.components.ambient_station.const import CONF_APP_KEY, DOMAIN +from homeassistant.components.ambient_station import CONF_APP_KEY, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant diff --git a/tests/components/ambient_station/test_diagnostics.py b/tests/components/ambient_station/test_diagnostics.py index 82db72eb9ca..05161ba32cd 100644 --- a/tests/components/ambient_station/test_diagnostics.py +++ b/tests/components/ambient_station/test_diagnostics.py @@ -1,7 +1,6 @@ """Test Ambient PWS diagnostics.""" from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.ambient_station import AmbientStationConfigEntry from homeassistant.core import HomeAssistant @@ -21,6 +20,7 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" ambient = config_entry.runtime_data ambient.stations = data_station - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/analytics/test_analytics.py b/tests/components/analytics/test_analytics.py index ba7e46bdde7..60882cda874 100644 --- a/tests/components/analytics/test_analytics.py +++ b/tests/components/analytics/test_analytics.py @@ -19,7 +19,8 @@ from homeassistant.components.analytics.const import ( ATTR_STATISTICS, ATTR_USAGE, ) -from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState +from homeassistant.components.recorder import Recorder +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import IntegrationNotFound @@ -35,7 +36,7 @@ MOCK_VERSION_NIGHTLY = "1970.1.0.dev19700101" @pytest.fixture(autouse=True) -def uuid_mock() -> Generator[None]: +def uuid_mock() -> Generator[Any, Any, None]: """Mock the UUID.""" with patch("uuid.UUID.hex", new_callable=PropertyMock) as hex_mock: hex_mock.return_value = MOCK_UUID @@ -43,7 +44,7 @@ def uuid_mock() -> Generator[None]: @pytest.fixture(autouse=True) -def ha_version_mock() -> Generator[None]: +def ha_version_mock() -> Generator[Any, Any, None]: """Mock the core version.""" with patch( "homeassistant.components.analytics.analytics.HA_VERSION", @@ -53,7 +54,7 @@ def ha_version_mock() -> Generator[None]: @pytest.fixture -def installation_type_mock() -> Generator[None]: +def installation_type_mock() -> Generator[Any, Any, None]: """Mock the async_get_system_info.""" with patch( "homeassistant.components.analytics.analytics.async_get_system_info", @@ -67,7 +68,6 @@ def _last_call_payload(aioclient: AiohttpClientMocker) -> dict[str, Any]: return aioclient.mock_calls[-1][2] -@pytest.mark.usefixtures("supervisor_client") async def test_no_send( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -76,7 +76,7 @@ async def test_no_send( """Test send when no preferences are defined.""" analytics = Analytics(hass) with patch( - "homeassistant.components.analytics.analytics.is_hassio", + "homeassistant.components.hassio.is_hassio", side_effect=Mock(return_value=False), ): assert not analytics.preferences[ATTR_BASE] @@ -97,7 +97,7 @@ async def test_load_with_supervisor_diagnostics(hass: HomeAssistant) -> None: side_effect=Mock(return_value={"diagnostics": True}), ), patch( - "homeassistant.components.analytics.analytics.is_hassio", + "homeassistant.components.hassio.is_hassio", side_effect=Mock(return_value=True), ), ): @@ -118,7 +118,7 @@ async def test_load_with_supervisor_without_diagnostics(hass: HomeAssistant) -> side_effect=Mock(return_value={"diagnostics": False}), ), patch( - "homeassistant.components.analytics.analytics.is_hassio", + "homeassistant.components.hassio.is_hassio", side_effect=Mock(return_value=True), ), ): @@ -127,7 +127,6 @@ async def test_load_with_supervisor_without_diagnostics(hass: HomeAssistant) -> assert not analytics.preferences[ATTR_DIAGNOSTICS] -@pytest.mark.usefixtures("supervisor_client") async def test_failed_to_send( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -146,7 +145,6 @@ async def test_failed_to_send( ) -@pytest.mark.usefixtures("supervisor_client") async def test_failed_to_send_raises( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -162,11 +160,11 @@ async def test_failed_to_send_raises( assert "Error sending analytics" in caplog.text -@pytest.mark.usefixtures("installation_type_mock", "supervisor_client") async def test_send_base( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -185,7 +183,6 @@ async def test_send_base( assert snapshot == submitted_data -@pytest.mark.usefixtures("supervisor_client") async def test_send_base_with_supervisor( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -219,12 +216,8 @@ async def test_send_base_with_supervisor( side_effect=Mock(return_value={}), ), patch( - "homeassistant.components.analytics.analytics.is_hassio", + "homeassistant.components.hassio.is_hassio", side_effect=Mock(return_value=True), - ) as is_hassio_mock, - patch( - "homeassistant.helpers.system_info.is_hassio", - new=is_hassio_mock, ), ): await analytics.load() @@ -238,11 +231,11 @@ async def test_send_base_with_supervisor( assert snapshot == submitted_data -@pytest.mark.usefixtures("installation_type_mock", "supervisor_client") async def test_send_usage( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send usage preferences are defined.""" @@ -279,7 +272,6 @@ async def test_send_usage_with_supervisor( caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, snapshot: SnapshotAssertion, - supervisor_client: AsyncMock, ) -> None: """Test send usage with supervisor preferences are defined.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200) @@ -290,9 +282,6 @@ async def test_send_usage_with_supervisor( assert analytics.preferences[ATTR_USAGE] hass.config.components.add("default_config") - supervisor_client.addons.addon_info.return_value = Mock( - slug="test_addon", protected=True, version="1", auto_update=False - ) with ( patch( "homeassistant.components.hassio.get_supervisor_info", @@ -318,12 +307,19 @@ async def test_send_usage_with_supervisor( side_effect=Mock(return_value={}), ), patch( - "homeassistant.components.analytics.analytics.is_hassio", - side_effect=Mock(return_value=True), - ) as is_hassio_mock, + "homeassistant.components.hassio.async_get_addon_info", + side_effect=AsyncMock( + return_value={ + "slug": "test_addon", + "protected": True, + "version": "1", + "auto_update": False, + } + ), + ), patch( - "homeassistant.helpers.system_info.is_hassio", - new=is_hassio_mock, + "homeassistant.components.hassio.is_hassio", + side_effect=Mock(return_value=True), ), ): await analytics.send_analytics() @@ -335,11 +331,11 @@ async def test_send_usage_with_supervisor( assert snapshot == submitted_data -@pytest.mark.usefixtures("installation_type_mock", "supervisor_client") async def test_send_statistics( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send statistics preferences are defined.""" @@ -363,10 +359,9 @@ async def test_send_statistics( assert snapshot == submitted_data -@pytest.mark.usefixtures("mock_hass_config", "supervisor_client") +@pytest.mark.usefixtures("mock_hass_config") async def test_send_statistics_one_integration_fails( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test send statistics preferences are defined.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200) @@ -387,13 +382,12 @@ async def test_send_statistics_one_integration_fails( assert post_call[2]["integration_count"] == 0 -@pytest.mark.usefixtures( - "installation_type_mock", "mock_hass_config", "supervisor_client" -) +@pytest.mark.usefixtures("mock_hass_config") async def test_send_statistics_disabled_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send statistics with disabled integration.""" @@ -426,13 +420,12 @@ async def test_send_statistics_disabled_integration( assert snapshot == submitted_data -@pytest.mark.usefixtures( - "installation_type_mock", "mock_hass_config", "supervisor_client" -) +@pytest.mark.usefixtures("mock_hass_config") async def test_send_statistics_ignored_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send statistics with ignored integration.""" @@ -471,10 +464,9 @@ async def test_send_statistics_ignored_integration( assert snapshot == submitted_data -@pytest.mark.usefixtures("mock_hass_config", "supervisor_client") +@pytest.mark.usefixtures("mock_hass_config") async def test_send_statistics_async_get_integration_unknown_exception( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test send statistics preferences are defined.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200) @@ -500,7 +492,6 @@ async def test_send_statistics_with_supervisor( caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, snapshot: SnapshotAssertion, - supervisor_client: AsyncMock, ) -> None: """Test send statistics preferences are defined.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200) @@ -509,9 +500,6 @@ async def test_send_statistics_with_supervisor( assert analytics.preferences[ATTR_BASE] assert analytics.preferences[ATTR_STATISTICS] - supervisor_client.addons.addon_info.return_value = Mock( - slug="test_addon", protected=True, version="1", auto_update=False - ) with ( patch( "homeassistant.components.hassio.get_supervisor_info", @@ -537,12 +525,19 @@ async def test_send_statistics_with_supervisor( side_effect=Mock(return_value={}), ), patch( - "homeassistant.components.analytics.analytics.is_hassio", - side_effect=Mock(return_value=True), - ) as is_hassio_mock, + "homeassistant.components.hassio.async_get_addon_info", + side_effect=AsyncMock( + return_value={ + "slug": "test_addon", + "protected": True, + "version": "1", + "auto_update": False, + } + ), + ), patch( - "homeassistant.helpers.system_info.is_hassio", - new=is_hassio_mock, + "homeassistant.components.hassio.is_hassio", + side_effect=Mock(return_value=True), ), ): await analytics.send_analytics() @@ -554,7 +549,6 @@ async def test_send_statistics_with_supervisor( assert snapshot == submitted_data -@pytest.mark.usefixtures("supervisor_client") async def test_reusing_uuid( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -572,13 +566,12 @@ async def test_reusing_uuid( assert analytics.uuid == "NOT_MOCK_UUID" -@pytest.mark.usefixtures( - "enable_custom_integrations", "installation_type_mock", "supervisor_client" -) +@pytest.mark.usefixtures("enable_custom_integrations") async def test_custom_integrations( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test sending custom integrations.""" @@ -601,10 +594,8 @@ async def test_custom_integrations( assert snapshot == submitted_data -@pytest.mark.usefixtures("supervisor_client") async def test_dev_url( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test sending payload to dev url.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL_DEV, status=200) @@ -620,7 +611,6 @@ async def test_dev_url( assert str(payload[1]) == ANALYTICS_ENDPOINT_URL_DEV -@pytest.mark.usefixtures("supervisor_client") async def test_dev_url_error( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -644,10 +634,8 @@ async def test_dev_url_error( ) in caplog.text -@pytest.mark.usefixtures("supervisor_client") async def test_nightly_endpoint( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test sending payload to production url when running nightly.""" aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200) @@ -663,13 +651,12 @@ async def test_nightly_endpoint( assert str(payload[1]) == ANALYTICS_ENDPOINT_URL -@pytest.mark.usefixtures( - "installation_type_mock", "mock_hass_config", "supervisor_client" -) +@pytest.mark.usefixtures("mock_hass_config") async def test_send_with_no_energy( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -701,13 +688,12 @@ async def test_send_with_no_energy( assert snapshot == submitted_data -@pytest.mark.usefixtures( - "recorder_mock", "installation_type_mock", "mock_hass_config", "supervisor_client" -) +@pytest.mark.usefixtures("recorder_mock", "mock_hass_config") async def test_send_with_no_energy_config( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -734,13 +720,12 @@ async def test_send_with_no_energy_config( ) -@pytest.mark.usefixtures( - "recorder_mock", "installation_type_mock", "mock_hass_config", "supervisor_client" -) +@pytest.mark.usefixtures("recorder_mock", "mock_hass_config") async def test_send_with_energy_config( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -767,13 +752,12 @@ async def test_send_with_energy_config( ) -@pytest.mark.usefixtures( - "installation_type_mock", "mock_hass_config", "supervisor_client" -) +@pytest.mark.usefixtures("mock_hass_config") async def test_send_usage_with_certificate( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send usage preferences with certificate.""" @@ -795,11 +779,12 @@ async def test_send_usage_with_certificate( assert snapshot == submitted_data -@pytest.mark.usefixtures("recorder_mock", "installation_type_mock", "supervisor_client") async def test_send_with_recorder( + recorder_mock: Recorder, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test recorder information.""" @@ -826,7 +811,6 @@ async def test_send_with_recorder( ) -@pytest.mark.usefixtures("supervisor_client") async def test_send_with_problems_loading_yaml( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -846,7 +830,7 @@ async def test_send_with_problems_loading_yaml( assert len(aioclient_mock.mock_calls) == 0 -@pytest.mark.usefixtures("mock_hass_config", "supervisor_client") +@pytest.mark.usefixtures("mock_hass_config") async def test_timeout_while_sending( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -865,11 +849,11 @@ async def test_timeout_while_sending( assert "Timeout sending analytics" in caplog.text -@pytest.mark.usefixtures("installation_type_mock", "supervisor_client") async def test_not_check_config_entries_if_yaml( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, + installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test skip config entry check if defined in yaml.""" @@ -888,7 +872,7 @@ async def test_not_check_config_entries_if_yaml( domain="ignored_integration", state=ConfigEntryState.LOADED, source="ignore", - disabled_by=ConfigEntryDisabler.USER, + disabled_by="user", ) mock_config_entry.add_to_hass(hass) diff --git a/tests/components/analytics/test_init.py b/tests/components/analytics/test_init.py index 66000fc5936..cf8d4838415 100644 --- a/tests/components/analytics/test_init.py +++ b/tests/components/analytics/test_init.py @@ -2,8 +2,6 @@ from unittest.mock import patch -import pytest - from homeassistant.components.analytics.const import ANALYTICS_ENDPOINT_URL, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -22,7 +20,6 @@ async def test_setup(hass: HomeAssistant) -> None: assert DOMAIN in hass.data -@pytest.mark.usefixtures("supervisor_client") async def test_websocket( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, diff --git a/tests/components/analytics_insights/conftest.py b/tests/components/analytics_insights/conftest.py index a9c152b8ab9..75d47c41f4e 100644 --- a/tests/components/analytics_insights/conftest.py +++ b/tests/components/analytics_insights/conftest.py @@ -1,14 +1,13 @@ """Common fixtures for the Homeassistant Analytics tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from python_homeassistant_analytics import CurrentAnalytics -from python_homeassistant_analytics.models import Addon, CustomIntegration, Integration +from python_homeassistant_analytics.models import CustomIntegration, Integration +from typing_extensions import Generator from homeassistant.components.analytics_insights.const import ( - CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_INTEGRATIONS, DOMAIN, @@ -44,10 +43,6 @@ def mock_analytics_client() -> Generator[AsyncMock]: client.get_current_analytics.return_value = CurrentAnalytics.from_json( load_fixture("analytics_insights/current_data.json") ) - addons = load_json_object_fixture("analytics_insights/addons.json") - client.get_addons.return_value = { - key: Addon.from_dict(value) for key, value in addons.items() - } integrations = load_json_object_fixture("analytics_insights/integrations.json") client.get_integrations.return_value = { key: Integration.from_dict(value) for key, value in integrations.items() @@ -70,7 +65,6 @@ def mock_config_entry() -> MockConfigEntry: title="Homeassistant Analytics", data={}, options={ - CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube", "spotify", "myq"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, diff --git a/tests/components/analytics_insights/fixtures/addons.json b/tests/components/analytics_insights/fixtures/addons.json deleted file mode 100644 index cb7ae42c86b..00000000000 --- a/tests/components/analytics_insights/fixtures/addons.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "core_samba": { - "total": 76357, - "versions": { - "12.3.2": 65875, - "12.2.0": 1313, - "12.3.1": 5018, - "12.1.0": 211, - "10.0.0": 1139, - "9.4.0": 4, - "12.3.0": 704, - "9.3.1": 36, - "10.0.2": 1290, - "9.5.1": 379, - "9.6.1": 66, - "10.0.1": 200, - "9.3.0": 20, - "9.2.0": 9, - "9.5.0": 13, - "12.0.0": 39, - "9.7.0": 20, - "11.0.0": 13, - "3.0": 1, - "9.6.0": 2, - "8.1": 2, - "9.0": 3 - }, - "protected": 76345, - "auto_update": 32732 - } -} diff --git a/tests/components/analytics_insights/snapshots/test_sensor.ambr b/tests/components/analytics_insights/snapshots/test_sensor.ambr index 6e11b344b0b..d7eeed7955c 100644 --- a/tests/components/analytics_insights/snapshots/test_sensor.ambr +++ b/tests/components/analytics_insights/snapshots/test_sensor.ambr @@ -1,54 +1,4 @@ # serializer version: 1 -# name: test_all_entities[sensor.homeassistant_analytics_core_samba-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.homeassistant_analytics_core_samba', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'core_samba', - 'platform': 'analytics_insights', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'addons', - 'unique_id': 'addon_core_samba_active_installations', - 'unit_of_measurement': 'active installations', - }) -# --- -# name: test_all_entities[sensor.homeassistant_analytics_core_samba-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Homeassistant Analytics core_samba', - 'state_class': , - 'unit_of_measurement': 'active installations', - }), - 'context': , - 'entity_id': 'sensor.homeassistant_analytics_core_samba', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '76357', - }) -# --- # name: test_all_entities[sensor.homeassistant_analytics_hacs_custom-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -199,106 +149,6 @@ 'state': '24388', }) # --- -# name: test_all_entities[sensor.homeassistant_analytics_total_active_installations-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.homeassistant_analytics_total_active_installations', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Total active installations', - 'platform': 'analytics_insights', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_active_installations', - 'unique_id': 'total_active_installations', - 'unit_of_measurement': 'active installations', - }) -# --- -# name: test_all_entities[sensor.homeassistant_analytics_total_active_installations-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Homeassistant Analytics Total active installations', - 'state_class': , - 'unit_of_measurement': 'active installations', - }), - 'context': , - 'entity_id': 'sensor.homeassistant_analytics_total_active_installations', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '310400', - }) -# --- -# name: test_all_entities[sensor.homeassistant_analytics_total_reported_integrations-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.homeassistant_analytics_total_reported_integrations', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Total reported integrations', - 'platform': 'analytics_insights', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_reports_integrations', - 'unique_id': 'total_reports_integrations', - 'unit_of_measurement': 'active installations', - }) -# --- -# name: test_all_entities[sensor.homeassistant_analytics_total_reported_integrations-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Homeassistant Analytics Total reported integrations', - 'state_class': , - 'unit_of_measurement': 'active installations', - }), - 'context': , - 'entity_id': 'sensor.homeassistant_analytics_total_reported_integrations', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '249256', - }) -# --- # name: test_all_entities[sensor.homeassistant_analytics_youtube-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/analytics_insights/test_config_flow.py b/tests/components/analytics_insights/test_config_flow.py index 747f24930a4..0c9d4c074f8 100644 --- a/tests/components/analytics_insights/test_config_flow.py +++ b/tests/components/analytics_insights/test_config_flow.py @@ -7,7 +7,6 @@ import pytest from python_homeassistant_analytics import HomeassistantAnalyticsConnectionError from homeassistant.components.analytics_insights.const import ( - CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_INTEGRATIONS, DOMAIN, @@ -26,12 +25,10 @@ from tests.common import MockConfigEntry [ ( { - CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { - CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -41,7 +38,6 @@ from tests.common import MockConfigEntry CONF_TRACKED_INTEGRATIONS: ["youtube"], }, { - CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -51,7 +47,6 @@ from tests.common import MockConfigEntry CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { - CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -88,7 +83,6 @@ async def test_form( "user_input", [ { - CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -119,7 +113,6 @@ async def test_submitting_empty_form( result = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -130,7 +123,6 @@ async def test_submitting_empty_form( assert result["title"] == "Home Assistant Analytics Insights" assert result["data"] == {} assert result["options"] == { - CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], } @@ -169,7 +161,6 @@ async def test_form_already_configured( domain=DOMAIN, data={}, options={ - CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: ["youtube", "spotify"], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -188,32 +179,19 @@ async def test_form_already_configured( [ ( { - CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { - CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, ), - ( - { - CONF_TRACKED_ADDONS: ["core_samba"], - }, - { - CONF_TRACKED_ADDONS: ["core_samba"], - CONF_TRACKED_INTEGRATIONS: [], - CONF_TRACKED_CUSTOM_INTEGRATIONS: [], - }, - ), ( { CONF_TRACKED_INTEGRATIONS: ["youtube"], }, { - CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -223,7 +201,6 @@ async def test_form_already_configured( CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { - CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -260,7 +237,6 @@ async def test_options_flow( "user_input", [ { - CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -291,7 +267,6 @@ async def test_submitting_empty_options_flow( result = await hass.config_entries.options.async_configure( result["flow_id"], { - CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube", "hue"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -300,7 +275,6 @@ async def test_submitting_empty_options_flow( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { - CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube", "hue"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], } diff --git a/tests/components/analytics_insights/test_sensor.py b/tests/components/analytics_insights/test_sensor.py index bf82e0c2d65..3ede971c8f8 100644 --- a/tests/components/analytics_insights/test_sensor.py +++ b/tests/components/analytics_insights/test_sensor.py @@ -4,7 +4,6 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory -import pytest from python_homeassistant_analytics import ( HomeassistantAnalyticsConnectionError, HomeassistantAnalyticsNotModifiedError, @@ -20,7 +19,6 @@ from . import setup_integration from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_all_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, diff --git a/tests/components/android_ip_webcam/test_init.py b/tests/components/android_ip_webcam/test_init.py index 58108cef53b..70ecdc9271e 100644 --- a/tests/components/android_ip_webcam/test_init.py +++ b/tests/components/android_ip_webcam/test_init.py @@ -79,3 +79,4 @@ async def test_unload_entry(hass: HomeAssistant, aioclient_mock_fixture) -> None await hass.async_block_till_done() assert entry.state is ConfigEntryState.NOT_LOADED + assert entry.entry_id not in hass.data[DOMAIN] diff --git a/tests/components/androidtv/common.py b/tests/components/androidtv/common.py index 133f6b1470b..23e048e4d52 100644 --- a/tests/components/androidtv/common.py +++ b/tests/components/androidtv/common.py @@ -100,12 +100,7 @@ CONFIG_FIRETV_DEFAULT = CONFIG_FIRETV_PYTHON_ADB def setup_mock_entry( - config: dict[str, Any], - entity_domain: str, - *, - options=None, - version=1, - minor_version=2, + config: dict[str, Any], entity_domain: str ) -> tuple[str, str, MockConfigEntry]: """Prepare mock entry for entities tests.""" patch_key = config[ADB_PATCH_KEY] @@ -114,9 +109,6 @@ def setup_mock_entry( domain=DOMAIN, data=config[DOMAIN], unique_id="a1:b1:c1:d1:e1:f1", - options=options, - version=version, - minor_version=minor_version, ) return patch_key, entity_id, config_entry diff --git a/tests/components/androidtv/conftest.py b/tests/components/androidtv/conftest.py index a075ed66079..befb9db7a8c 100644 --- a/tests/components/androidtv/conftest.py +++ b/tests/components/androidtv/conftest.py @@ -1,9 +1,9 @@ """Fixtures for the Android TV integration tests.""" -from collections.abc import Generator from unittest.mock import Mock, patch import pytest +from typing_extensions import Generator from . import patchers diff --git a/tests/components/androidtv/patchers.py b/tests/components/androidtv/patchers.py index 500b9e75cb3..90a13523ebe 100644 --- a/tests/components/androidtv/patchers.py +++ b/tests/components/androidtv/patchers.py @@ -1,6 +1,5 @@ """Define patches used for androidtv tests.""" -from typing import Any from unittest.mock import patch from androidtv.adb_manager.adb_manager_async import DeviceAsync @@ -26,7 +25,7 @@ PROPS_DEV_MAC = "ether ab:cd:ef:gh:ij:kl brd" class AdbDeviceTcpAsyncFake: """A fake of the `adb_shell.adb_device_async.AdbDeviceTcpAsync` class.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs) -> None: """Initialize a fake `adb_shell.adb_device_async.AdbDeviceTcpAsync` instance.""" self.available = False @@ -38,7 +37,7 @@ class AdbDeviceTcpAsyncFake: """Try to connect to a device.""" raise NotImplementedError - async def shell(self, cmd, *args, **kwargs) -> bytes | str | None: + async def shell(self, cmd, *args, **kwargs): """Send an ADB shell command.""" return None diff --git a/tests/components/androidtv/test_config_flow.py b/tests/components/androidtv/test_config_flow.py index cb1015e4198..e2b5207c590 100644 --- a/tests/components/androidtv/test_config_flow.py +++ b/tests/components/androidtv/test_config_flow.py @@ -22,7 +22,7 @@ from homeassistant.components.androidtv.const import ( CONF_APPS, CONF_EXCLUDE_UNNAMED_APPS, CONF_GET_SOURCES, - CONF_SCREENCAP_INTERVAL, + CONF_SCREENCAP, CONF_STATE_DETECTION_RULES, CONF_TURN_OFF_COMMAND, CONF_TURN_ON_COMMAND, @@ -73,7 +73,7 @@ CONNECT_METHOD = ( class MockConfigDevice: """Mock class to emulate Android device.""" - def __init__(self, eth_mac=ETH_MAC, wifi_mac=None) -> None: + def __init__(self, eth_mac=ETH_MAC, wifi_mac=None): """Initialize a fake device to test config flow.""" self.available = True self.device_properties = {PROP_ETHMAC: eth_mac, PROP_WIFIMAC: wifi_mac} @@ -501,7 +501,7 @@ async def test_options_flow(hass: HomeAssistant) -> None: user_input={ CONF_GET_SOURCES: True, CONF_EXCLUDE_UNNAMED_APPS: True, - CONF_SCREENCAP_INTERVAL: 1, + CONF_SCREENCAP: True, CONF_TURN_OFF_COMMAND: "off", CONF_TURN_ON_COMMAND: "on", }, @@ -515,6 +515,6 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert config_entry.options[CONF_GET_SOURCES] is True assert config_entry.options[CONF_EXCLUDE_UNNAMED_APPS] is True - assert config_entry.options[CONF_SCREENCAP_INTERVAL] == 1 + assert config_entry.options[CONF_SCREENCAP] is True assert config_entry.options[CONF_TURN_OFF_COMMAND] == "off" assert config_entry.options[CONF_TURN_ON_COMMAND] == "on" diff --git a/tests/components/androidtv/test_diagnostics.py b/tests/components/androidtv/test_diagnostics.py index 40dba53bd9b..7d1801514af 100644 --- a/tests/components/androidtv/test_diagnostics.py +++ b/tests/components/androidtv/test_diagnostics.py @@ -1,6 +1,6 @@ """Tests for the diagnostics data provided by the AndroidTV integration.""" -from homeassistant.components.androidtv.diagnostics import TO_REDACT +from homeassistant.components.asuswrt.diagnostics import TO_REDACT from homeassistant.components.diagnostics import async_redact_data from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -36,4 +36,4 @@ async def test_diagnostics( hass, hass_client, mock_config_entry ) - assert result["entry"] == entry_dict | {"discovery_keys": {}} + assert result["entry"] == entry_dict diff --git a/tests/components/androidtv/test_init.py b/tests/components/androidtv/test_init.py deleted file mode 100644 index 8ff7df1668b..00000000000 --- a/tests/components/androidtv/test_init.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Tests for AndroidTV integration initialization.""" - -from homeassistant.components.androidtv.const import ( - CONF_SCREENCAP, - CONF_SCREENCAP_INTERVAL, -) -from homeassistant.components.media_player import DOMAIN as MP_DOMAIN -from homeassistant.core import HomeAssistant - -from . import patchers -from .common import CONFIG_ANDROID_DEFAULT, SHELL_RESPONSE_OFF, setup_mock_entry - - -async def test_migrate_version( - hass: HomeAssistant, -) -> None: - """Test migration to new version.""" - patch_key, _, mock_config_entry = setup_mock_entry( - CONFIG_ANDROID_DEFAULT, - MP_DOMAIN, - options={CONF_SCREENCAP: False}, - minor_version=1, - ) - mock_config_entry.add_to_hass(hass) - - with ( - patchers.patch_connect(True)[patch_key], - patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key], - ): - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.options[CONF_SCREENCAP_INTERVAL] == 0 - assert mock_config_entry.minor_version == 2 diff --git a/tests/components/androidtv/test_media_player.py b/tests/components/androidtv/test_media_player.py index 5a8d88dd9f6..ef0d0c63b06 100644 --- a/tests/components/androidtv/test_media_player.py +++ b/tests/components/androidtv/test_media_player.py @@ -13,7 +13,7 @@ import pytest from homeassistant.components.androidtv.const import ( CONF_APPS, CONF_EXCLUDE_UNNAMED_APPS, - CONF_SCREENCAP_INTERVAL, + CONF_SCREENCAP, CONF_STATE_DETECTION_RULES, CONF_TURN_OFF_COMMAND, CONF_TURN_ON_COMMAND, @@ -801,9 +801,6 @@ async def test_get_image_http( """ patch_key, entity_id, config_entry = _setup(CONFIG_ANDROID_DEFAULT) config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry( - config_entry, options={CONF_SCREENCAP_INTERVAL: 2} - ) with ( patchers.patch_connect(True)[patch_key], @@ -831,27 +828,21 @@ async def test_get_image_http( content = await resp.read() assert content == b"image" - next_update = utcnow() + timedelta(minutes=1) + next_update = utcnow() + timedelta(seconds=30) with ( patchers.patch_shell("11")[patch_key], patchers.PATCH_SCREENCAP as patch_screen_cap, - patch( - "homeassistant.components.androidtv.media_player.utcnow", - return_value=next_update, - ), + patch("homeassistant.util.utcnow", return_value=next_update), ): async_fire_time_changed(hass, next_update, True) await hass.async_block_till_done() patch_screen_cap.assert_not_called() - next_update = utcnow() + timedelta(minutes=2) + next_update = utcnow() + timedelta(seconds=60) with ( patchers.patch_shell("11")[patch_key], patchers.PATCH_SCREENCAP as patch_screen_cap, - patch( - "homeassistant.components.androidtv.media_player.utcnow", - return_value=next_update, - ), + patch("homeassistant.util.utcnow", return_value=next_update), ): async_fire_time_changed(hass, next_update, True) await hass.async_block_till_done() @@ -863,9 +854,6 @@ async def test_get_image_http_fail(hass: HomeAssistant) -> None: patch_key, entity_id, config_entry = _setup(CONFIG_ANDROID_DEFAULT) config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry( - config_entry, options={CONF_SCREENCAP_INTERVAL: 2} - ) with ( patchers.patch_connect(True)[patch_key], @@ -897,7 +885,7 @@ async def test_get_image_disabled(hass: HomeAssistant) -> None: patch_key, entity_id, config_entry = _setup(CONFIG_ANDROID_DEFAULT) config_entry.add_to_hass(hass) hass.config_entries.async_update_entry( - config_entry, options={CONF_SCREENCAP_INTERVAL: 0} + config_entry, options={CONF_SCREENCAP: False} ) with ( @@ -1145,7 +1133,7 @@ async def test_options_reload(hass: HomeAssistant) -> None: with patchers.PATCH_SETUP_ENTRY as setup_entry_call: # change an option that not require integration reload hass.config_entries.async_update_entry( - config_entry, options={CONF_EXCLUDE_UNNAMED_APPS: True} + config_entry, options={CONF_SCREENCAP: False} ) await hass.async_block_till_done() diff --git a/tests/components/androidtv_remote/conftest.py b/tests/components/androidtv_remote/conftest.py index 05e40991ff9..aa5583927d1 100644 --- a/tests/components/androidtv_remote/conftest.py +++ b/tests/components/androidtv_remote/conftest.py @@ -1,9 +1,10 @@ """Fixtures for the Android TV Remote integration tests.""" -from collections.abc import Callable, Generator +from collections.abc import Callable from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.androidtv_remote.const import DOMAIN from homeassistant.config_entries import ConfigEntryState diff --git a/tests/components/androidtv_remote/test_config_flow.py b/tests/components/androidtv_remote/test_config_flow.py index 02e15bca415..93c9067d1c8 100644 --- a/tests/components/androidtv_remote/test_config_flow.py +++ b/tests/components/androidtv_remote/test_config_flow.py @@ -757,59 +757,6 @@ async def test_zeroconf_flow_abort_if_mac_is_missing( assert result["reason"] == "cannot_connect" -async def test_zeroconf_flow_already_configured_zeroconf_has_multiple_invalid_ip_addresses( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_unload_entry: AsyncMock, - mock_api: MagicMock, -) -> None: - """Test we abort the zeroconf flow if already configured and zeroconf has invalid ip addresses.""" - host = "1.2.3.4" - name = "My Android TV" - mac = "1A:2B:3C:4D:5E:6F" - unique_id = "1a:2b:3c:4d:5e:6f" - name_existing = name - host_existing = host - - mock_config_entry = MockConfigEntry( - title=name, - domain=DOMAIN, - data={ - "host": host_existing, - "name": name_existing, - "mac": mac, - }, - unique_id=unique_id, - state=ConfigEntryState.LOADED, - ) - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, - data=zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("1.2.3.5"), - ip_addresses=[ip_address("1.2.3.5"), ip_address(host)], - port=6466, - hostname=host, - type="mock_type", - name=name + "._androidtvremote2._tcp.local.", - properties={"bt": mac}, - ), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - await hass.async_block_till_done() - assert hass.config_entries.async_entries(DOMAIN)[0].data == { - "host": host, - "name": name, - "mac": mac, - } - assert len(mock_unload_entry.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 - - async def test_reauth_flow_success( hass: HomeAssistant, mock_setup_entry: AsyncMock, diff --git a/tests/components/androidtv_remote/test_media_player.py b/tests/components/androidtv_remote/test_media_player.py index e292a5b273f..ad7c049e32f 100644 --- a/tests/components/androidtv_remote/test_media_player.py +++ b/tests/components/androidtv_remote/test_media_player.py @@ -20,11 +20,10 @@ async def test_media_player_receives_push_updates( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_api: MagicMock ) -> None: """Test the Android TV Remote media player receives push updates and state is updated.""" + mock_config_entry.options = { + "apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}} + } mock_config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry( - mock_config_entry, - options={"apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}}}, - ) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -323,7 +322,7 @@ async def test_browse_media( mock_api: MagicMock, ) -> None: """Test the Android TV Remote media player browse media.""" - new_options = { + mock_config_entry.options = { "apps": { "com.google.android.youtube.tv": { "app_name": "YouTube", @@ -333,7 +332,6 @@ async def test_browse_media( } } mock_config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry(mock_config_entry, options=new_options) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -347,7 +345,7 @@ async def test_browse_media( ) response = await client.receive_json() assert response["success"] - assert response["result"] == { + assert { "title": "Applications", "media_class": "directory", "media_content_type": "apps", @@ -379,7 +377,7 @@ async def test_browse_media( "thumbnail": "", }, ], - } + } == response["result"] async def test_media_player_connection_closed( diff --git a/tests/components/androidtv_remote/test_remote.py b/tests/components/androidtv_remote/test_remote.py index b3c3ce1c283..7ca63685747 100644 --- a/tests/components/androidtv_remote/test_remote.py +++ b/tests/components/androidtv_remote/test_remote.py @@ -19,9 +19,10 @@ async def test_remote_receives_push_updates( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_api: MagicMock ) -> None: """Test the Android TV Remote receives push updates and state is updated.""" - new_options = {"apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}}} + mock_config_entry.options = { + "apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}} + } mock_config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry(mock_config_entry, options=new_options) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -52,9 +53,10 @@ async def test_remote_toggles( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_api: MagicMock ) -> None: """Test the Android TV Remote toggles.""" - new_options = {"apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}}} + mock_config_entry.options = { + "apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}} + } mock_config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry(mock_config_entry, options=new_options) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/anova/__init__.py b/tests/components/anova/__init__.py index 903a1180980..887f5b3b05b 100644 --- a/tests/components/anova/__init__.py +++ b/tests/components/anova/__init__.py @@ -36,7 +36,6 @@ def create_entry(hass: HomeAssistant, device_id: str = DEVICE_UNIQUE_ID) -> Conf }, unique_id="sample@gmail.com", version=1, - minor_version=2, ) entry.add_to_hass(hass) return entry diff --git a/tests/components/anova/test_config_flow.py b/tests/components/anova/test_config_flow.py index 3b2afaa49c0..0f93b869296 100644 --- a/tests/components/anova/test_config_flow.py +++ b/tests/components/anova/test_config_flow.py @@ -6,7 +6,7 @@ from anova_wifi import AnovaApi, InvalidLogin from homeassistant import config_entries from homeassistant.components.anova.const import DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_DEVICES, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -27,6 +27,7 @@ async def test_flow_user(hass: HomeAssistant, anova_api: AnovaApi) -> None: assert result["data"] == { CONF_USERNAME: "sample@gmail.com", CONF_PASSWORD: "sample", + CONF_DEVICES: [], } diff --git a/tests/components/anova/test_init.py b/tests/components/anova/test_init.py index 2e3e2920abc..5fc63fcaf93 100644 --- a/tests/components/anova/test_init.py +++ b/tests/components/anova/test_init.py @@ -1,18 +1,13 @@ """Test init for Anova.""" -from unittest.mock import patch - from anova_wifi import AnovaApi -from homeassistant.components.anova.const import DOMAIN +from homeassistant.components.anova import DOMAIN from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_DEVICES, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from . import async_init_integration, create_entry -from tests.common import MockConfigEntry - async def test_async_setup_entry(hass: HomeAssistant, anova_api: AnovaApi) -> None: """Test a successful setup entry.""" @@ -60,34 +55,3 @@ async def test_websocket_failure( """Test that we successfully handle a websocket failure on setup.""" entry = await async_init_integration(hass) assert entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_migration_removing_devices_in_config_entry( - hass: HomeAssistant, anova_api: AnovaApi -) -> None: - """Test a successful setup entry.""" - entry = MockConfigEntry( - domain=DOMAIN, - title="Anova", - data={ - CONF_USERNAME: "sample@gmail.com", - CONF_PASSWORD: "sample", - CONF_DEVICES: [], - }, - unique_id="sample@gmail.com", - version=1, - minor_version=1, - ) - entry.add_to_hass(hass) - - with patch("homeassistant.components.anova.AnovaApi.authenticate"): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("sensor.anova_precision_cooker_mode") - assert state is not None - assert state.state == "idle" - - assert entry.version == 1 - assert entry.minor_version == 2 - assert CONF_DEVICES not in entry.data diff --git a/tests/components/anthropic/__init__.py b/tests/components/anthropic/__init__.py deleted file mode 100644 index 99d7a5785a8..00000000000 --- a/tests/components/anthropic/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Anthropic integration.""" diff --git a/tests/components/anthropic/conftest.py b/tests/components/anthropic/conftest.py deleted file mode 100644 index ce6b98c480c..00000000000 --- a/tests/components/anthropic/conftest.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Tests helpers.""" - -from collections.abc import AsyncGenerator -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.const import CONF_LLM_HASS_API -from homeassistant.core import HomeAssistant -from homeassistant.helpers import llm -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: - """Mock a config entry.""" - entry = MockConfigEntry( - title="Claude", - domain="anthropic", - data={ - "api_key": "bla", - }, - ) - entry.add_to_hass(hass) - return entry - - -@pytest.fixture -def mock_config_entry_with_assist( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> MockConfigEntry: - """Mock a config entry with assist.""" - hass.config_entries.async_update_entry( - mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} - ) - return mock_config_entry - - -@pytest.fixture -async def mock_init_component( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> AsyncGenerator[None]: - """Initialize integration.""" - with patch( - "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock - ): - assert await async_setup_component(hass, "anthropic", {}) - await hass.async_block_till_done() - yield - - -@pytest.fixture(autouse=True) -async def setup_ha(hass: HomeAssistant) -> None: - """Set up Home Assistant.""" - assert await async_setup_component(hass, "homeassistant", {}) diff --git a/tests/components/anthropic/snapshots/test_conversation.ambr b/tests/components/anthropic/snapshots/test_conversation.ambr deleted file mode 100644 index e4dd7cd00bb..00000000000 --- a/tests/components/anthropic/snapshots/test_conversation.ambr +++ /dev/null @@ -1,34 +0,0 @@ -# serializer version: 1 -# name: test_unknown_hass_api - dict({ - 'conversation_id': None, - 'response': IntentResponse( - card=dict({ - }), - error_code=, - failed_results=list([ - ]), - intent=None, - intent_targets=list([ - ]), - language='en', - matched_states=list([ - ]), - reprompt=dict({ - }), - response_type=, - speech=dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Error preparing LLM API: API non-existing not found', - }), - }), - speech_slots=dict({ - }), - success_results=list([ - ]), - unmatched_states=list([ - ]), - ), - }) -# --- diff --git a/tests/components/anthropic/test_config_flow.py b/tests/components/anthropic/test_config_flow.py deleted file mode 100644 index a5a025b00d0..00000000000 --- a/tests/components/anthropic/test_config_flow.py +++ /dev/null @@ -1,239 +0,0 @@ -"""Test the Anthropic config flow.""" - -from unittest.mock import AsyncMock, patch - -from anthropic import ( - APIConnectionError, - APIResponseValidationError, - APITimeoutError, - AuthenticationError, - BadRequestError, - InternalServerError, -) -from httpx import URL, Request, Response -import pytest - -from homeassistant import config_entries -from homeassistant.components.anthropic.config_flow import RECOMMENDED_OPTIONS -from homeassistant.components.anthropic.const import ( - CONF_CHAT_MODEL, - CONF_MAX_TOKENS, - CONF_PROMPT, - CONF_RECOMMENDED, - CONF_TEMPERATURE, - DOMAIN, - RECOMMENDED_CHAT_MODEL, - RECOMMENDED_MAX_TOKENS, -) -from homeassistant.const import CONF_LLM_HASS_API -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_form(hass: HomeAssistant) -> None: - """Test we get the form.""" - # Pretend we already set up a config entry. - hass.config.components.add("anthropic") - MockConfigEntry( - domain=DOMAIN, - state=config_entries.ConfigEntryState.LOADED, - ).add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] is None - - with ( - patch( - "homeassistant.components.anthropic.config_flow.anthropic.resources.messages.AsyncMessages.create", - new_callable=AsyncMock, - ), - patch( - "homeassistant.components.anthropic.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "api_key": "bla", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["data"] == { - "api_key": "bla", - } - assert result2["options"] == RECOMMENDED_OPTIONS - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_options( - hass: HomeAssistant, mock_config_entry, mock_init_component -) -> None: - """Test the options form.""" - options_flow = await hass.config_entries.options.async_init( - mock_config_entry.entry_id - ) - options = await hass.config_entries.options.async_configure( - options_flow["flow_id"], - { - "prompt": "Speak like a pirate", - "max_tokens": 200, - }, - ) - await hass.async_block_till_done() - assert options["type"] is FlowResultType.CREATE_ENTRY - assert options["data"]["prompt"] == "Speak like a pirate" - assert options["data"]["max_tokens"] == 200 - assert options["data"][CONF_CHAT_MODEL] == RECOMMENDED_CHAT_MODEL - - -@pytest.mark.parametrize( - ("side_effect", "error"), - [ - (APIConnectionError(request=None), "cannot_connect"), - (APITimeoutError(request=None), "timeout_connect"), - ( - BadRequestError( - message="Your credit balance is too low to access the Claude API. Please go to Plans & Billing to upgrade or purchase credits.", - response=Response( - status_code=400, - request=Request(method="POST", url=URL()), - ), - body={"type": "error", "error": {"type": "invalid_request_error"}}, - ), - "unknown", - ), - ( - AuthenticationError( - message="invalid x-api-key", - response=Response( - status_code=401, - request=Request(method="POST", url=URL()), - ), - body={"type": "error", "error": {"type": "authentication_error"}}, - ), - "authentication_error", - ), - ( - InternalServerError( - message=None, - response=Response( - status_code=500, - request=Request(method="POST", url=URL()), - ), - body=None, - ), - "unknown", - ), - ( - APIResponseValidationError( - response=Response( - status_code=200, - request=Request(method="POST", url=URL()), - ), - body=None, - ), - "unknown", - ), - ], -) -async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> None: - """Test we handle invalid auth.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - "homeassistant.components.anthropic.config_flow.anthropic.resources.messages.AsyncMessages.create", - new_callable=AsyncMock, - side_effect=side_effect, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "api_key": "bla", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": error} - - -@pytest.mark.parametrize( - ("current_options", "new_options", "expected_options"), - [ - ( - { - CONF_RECOMMENDED: True, - CONF_LLM_HASS_API: "none", - CONF_PROMPT: "bla", - }, - { - CONF_RECOMMENDED: False, - CONF_PROMPT: "Speak like a pirate", - CONF_TEMPERATURE: 0.3, - }, - { - CONF_RECOMMENDED: False, - CONF_PROMPT: "Speak like a pirate", - CONF_TEMPERATURE: 0.3, - CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, - CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, - }, - ), - ( - { - CONF_RECOMMENDED: False, - CONF_PROMPT: "Speak like a pirate", - CONF_TEMPERATURE: 0.3, - CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, - CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, - }, - { - CONF_RECOMMENDED: True, - CONF_LLM_HASS_API: "assist", - CONF_PROMPT: "", - }, - { - CONF_RECOMMENDED: True, - CONF_LLM_HASS_API: "assist", - CONF_PROMPT: "", - }, - ), - ], -) -async def test_options_switching( - hass: HomeAssistant, - mock_config_entry, - mock_init_component, - current_options, - new_options, - expected_options, -) -> None: - """Test the options form.""" - hass.config_entries.async_update_entry(mock_config_entry, options=current_options) - options_flow = await hass.config_entries.options.async_init( - mock_config_entry.entry_id - ) - if current_options.get(CONF_RECOMMENDED) != new_options.get(CONF_RECOMMENDED): - options_flow = await hass.config_entries.options.async_configure( - options_flow["flow_id"], - { - **current_options, - CONF_RECOMMENDED: new_options[CONF_RECOMMENDED], - }, - ) - options = await hass.config_entries.options.async_configure( - options_flow["flow_id"], - new_options, - ) - await hass.async_block_till_done() - assert options["type"] is FlowResultType.CREATE_ENTRY - assert options["data"] == expected_options diff --git a/tests/components/anthropic/test_conversation.py b/tests/components/anthropic/test_conversation.py deleted file mode 100644 index 65ede877281..00000000000 --- a/tests/components/anthropic/test_conversation.py +++ /dev/null @@ -1,487 +0,0 @@ -"""Tests for the Anthropic integration.""" - -from unittest.mock import AsyncMock, Mock, patch - -from anthropic import RateLimitError -from anthropic.types import Message, TextBlock, ToolUseBlock, Usage -from freezegun import freeze_time -from httpx import URL, Request, Response -from syrupy.assertion import SnapshotAssertion -import voluptuous as vol - -from homeassistant.components import conversation -from homeassistant.components.conversation import trace -from homeassistant.const import CONF_LLM_HASS_API -from homeassistant.core import Context, HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import intent, llm -from homeassistant.setup import async_setup_component -from homeassistant.util import ulid - -from tests.common import MockConfigEntry - - -async def test_entity( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, -) -> None: - """Test entity properties.""" - state = hass.states.get("conversation.claude") - assert state - assert state.attributes["supported_features"] == 0 - - hass.config_entries.async_update_entry( - mock_config_entry, - options={ - **mock_config_entry.options, - CONF_LLM_HASS_API: "assist", - }, - ) - with patch( - "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock - ): - await hass.config_entries.async_reload(mock_config_entry.entry_id) - - state = hass.states.get("conversation.claude") - assert state - assert ( - state.attributes["supported_features"] - == conversation.ConversationEntityFeature.CONTROL - ) - - -async def test_error_handling( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component -) -> None: - """Test that the default prompt works.""" - with patch( - "anthropic.resources.messages.AsyncMessages.create", - new_callable=AsyncMock, - side_effect=RateLimitError( - message=None, - response=Response( - status_code=429, request=Request(method="POST", url=URL()) - ), - body=None, - ), - ): - result = await conversation.async_converse( - hass, "hello", None, Context(), agent_id="conversation.claude" - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR, result - assert result.response.error_code == "unknown", result - - -async def test_template_error( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test that template error handling works.""" - hass.config_entries.async_update_entry( - mock_config_entry, - options={ - "prompt": "talk like a {% if True %}smarthome{% else %}pirate please.", - }, - ) - with patch( - "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock - ): - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - result = await conversation.async_converse( - hass, "hello", None, Context(), agent_id="conversation.claude" - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR, result - assert result.response.error_code == "unknown", result - - -async def test_template_variables( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test that template variables work.""" - context = Context(user_id="12345") - mock_user = Mock() - mock_user.id = "12345" - mock_user.name = "Test User" - - hass.config_entries.async_update_entry( - mock_config_entry, - options={ - "prompt": ( - "The user name is {{ user_name }}. " - "The user id is {{ llm_context.context.user_id }}." - ), - }, - ) - with ( - patch( - "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock - ) as mock_create, - patch("homeassistant.auth.AuthManager.async_get_user", return_value=mock_user), - ): - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - result = await conversation.async_converse( - hass, "hello", None, context, agent_id="conversation.claude" - ) - - assert ( - result.response.response_type == intent.IntentResponseType.ACTION_DONE - ), result - assert "The user name is Test User." in mock_create.mock_calls[1][2]["system"] - assert "The user id is 12345." in mock_create.mock_calls[1][2]["system"] - - -async def test_conversation_agent( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, -) -> None: - """Test Anthropic Agent.""" - agent = conversation.agent_manager.async_get_agent(hass, "conversation.claude") - assert agent.supported_languages == "*" - - -@patch("homeassistant.components.anthropic.conversation.llm.AssistAPI._async_get_tools") -async def test_function_call( - mock_get_tools, - hass: HomeAssistant, - mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, -) -> None: - """Test function call from the assistant.""" - agent_id = "conversation.claude" - context = Context() - - mock_tool = AsyncMock() - mock_tool.name = "test_tool" - mock_tool.description = "Test function" - mock_tool.parameters = vol.Schema( - {vol.Optional("param1", description="Test parameters"): str} - ) - mock_tool.async_call.return_value = "Test response" - - mock_get_tools.return_value = [mock_tool] - - def completion_result(*args, messages, **kwargs): - for message in messages: - for content in message["content"]: - if not isinstance(content, str) and content["type"] == "tool_use": - return Message( - type="message", - id="msg_1234567890ABCDEFGHIJKLMN", - content=[ - TextBlock( - type="text", - text="I have successfully called the function", - ) - ], - model="claude-3-5-sonnet-20240620", - role="assistant", - stop_reason="end_turn", - stop_sequence=None, - usage=Usage(input_tokens=8, output_tokens=12), - ) - - return Message( - type="message", - id="msg_1234567890ABCDEFGHIJKLMN", - content=[ - TextBlock(type="text", text="Certainly, calling it now!"), - ToolUseBlock( - type="tool_use", - id="toolu_0123456789AbCdEfGhIjKlM", - name="test_tool", - input={"param1": "test_value"}, - ), - ], - model="claude-3-5-sonnet-20240620", - role="assistant", - stop_reason="tool_use", - stop_sequence=None, - usage=Usage(input_tokens=8, output_tokens=12), - ) - - with ( - patch( - "anthropic.resources.messages.AsyncMessages.create", - new_callable=AsyncMock, - side_effect=completion_result, - ) as mock_create, - freeze_time("2024-06-03 23:00:00"), - ): - result = await conversation.async_converse( - hass, - "Please call the test function", - None, - context, - agent_id=agent_id, - ) - - assert "Today's date is 2024-06-03." in mock_create.mock_calls[1][2]["system"] - - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert mock_create.mock_calls[1][2]["messages"][2] == { - "role": "user", - "content": [ - { - "content": '"Test response"', - "tool_use_id": "toolu_0123456789AbCdEfGhIjKlM", - "type": "tool_result", - } - ], - } - mock_tool.async_call.assert_awaited_once_with( - hass, - llm.ToolInput( - tool_name="test_tool", - tool_args={"param1": "test_value"}, - ), - llm.LLMContext( - platform="anthropic", - context=context, - user_prompt="Please call the test function", - language="en", - assistant="conversation", - device_id=None, - ), - ) - - # Test Conversation tracing - traces = trace.async_get_traces() - assert traces - last_trace = traces[-1].as_dict() - trace_events = last_trace.get("events", []) - assert [event["event_type"] for event in trace_events] == [ - trace.ConversationTraceEventType.ASYNC_PROCESS, - trace.ConversationTraceEventType.AGENT_DETAIL, - trace.ConversationTraceEventType.TOOL_CALL, - ] - # AGENT_DETAIL event contains the raw prompt passed to the model - detail_event = trace_events[1] - assert "Answer in plain text" in detail_event["data"]["system"] - assert "Today's date is 2024-06-03." in trace_events[1]["data"]["system"] - - # Call it again, make sure we have updated prompt - with ( - patch( - "anthropic.resources.messages.AsyncMessages.create", - new_callable=AsyncMock, - side_effect=completion_result, - ) as mock_create, - freeze_time("2024-06-04 23:00:00"), - ): - result = await conversation.async_converse( - hass, - "Please call the test function", - None, - context, - agent_id=agent_id, - ) - - assert "Today's date is 2024-06-04." in mock_create.mock_calls[1][2]["system"] - # Test old assert message not updated - assert "Today's date is 2024-06-03." in trace_events[1]["data"]["system"] - - -@patch("homeassistant.components.anthropic.conversation.llm.AssistAPI._async_get_tools") -async def test_function_exception( - mock_get_tools, - hass: HomeAssistant, - mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, -) -> None: - """Test function call with exception.""" - agent_id = "conversation.claude" - context = Context() - - mock_tool = AsyncMock() - mock_tool.name = "test_tool" - mock_tool.description = "Test function" - mock_tool.parameters = vol.Schema( - {vol.Optional("param1", description="Test parameters"): str} - ) - mock_tool.async_call.side_effect = HomeAssistantError("Test tool exception") - - mock_get_tools.return_value = [mock_tool] - - def completion_result(*args, messages, **kwargs): - for message in messages: - for content in message["content"]: - if not isinstance(content, str) and content["type"] == "tool_use": - return Message( - type="message", - id="msg_1234567890ABCDEFGHIJKLMN", - content=[ - TextBlock( - type="text", - text="There was an error calling the function", - ) - ], - model="claude-3-5-sonnet-20240620", - role="assistant", - stop_reason="end_turn", - stop_sequence=None, - usage=Usage(input_tokens=8, output_tokens=12), - ) - - return Message( - type="message", - id="msg_1234567890ABCDEFGHIJKLMN", - content=[ - TextBlock(type="text", text="Certainly, calling it now!"), - ToolUseBlock( - type="tool_use", - id="toolu_0123456789AbCdEfGhIjKlM", - name="test_tool", - input={"param1": "test_value"}, - ), - ], - model="claude-3-5-sonnet-20240620", - role="assistant", - stop_reason="tool_use", - stop_sequence=None, - usage=Usage(input_tokens=8, output_tokens=12), - ) - - with patch( - "anthropic.resources.messages.AsyncMessages.create", - new_callable=AsyncMock, - side_effect=completion_result, - ) as mock_create: - result = await conversation.async_converse( - hass, - "Please call the test function", - None, - context, - agent_id=agent_id, - ) - - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert mock_create.mock_calls[1][2]["messages"][2] == { - "role": "user", - "content": [ - { - "content": '{"error": "HomeAssistantError", "error_text": "Test tool exception"}', - "tool_use_id": "toolu_0123456789AbCdEfGhIjKlM", - "type": "tool_result", - } - ], - } - mock_tool.async_call.assert_awaited_once_with( - hass, - llm.ToolInput( - tool_name="test_tool", - tool_args={"param1": "test_value"}, - ), - llm.LLMContext( - platform="anthropic", - context=context, - user_prompt="Please call the test function", - language="en", - assistant="conversation", - device_id=None, - ), - ) - - -async def test_assist_api_tools_conversion( - hass: HomeAssistant, - mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, -) -> None: - """Test that we are able to convert actual tools from Assist API.""" - for component in ( - "intent", - "todo", - "light", - "shopping_list", - "humidifier", - "climate", - "media_player", - "vacuum", - "cover", - "weather", - ): - assert await async_setup_component(hass, component, {}) - - agent_id = "conversation.claude" - with patch( - "anthropic.resources.messages.AsyncMessages.create", - new_callable=AsyncMock, - return_value=Message( - type="message", - id="msg_1234567890ABCDEFGHIJKLMN", - content=[TextBlock(type="text", text="Hello, how can I help you?")], - model="claude-3-5-sonnet-20240620", - role="assistant", - stop_reason="end_turn", - stop_sequence=None, - usage=Usage(input_tokens=8, output_tokens=12), - ), - ) as mock_create: - await conversation.async_converse( - hass, "hello", None, Context(), agent_id=agent_id - ) - - tools = mock_create.mock_calls[0][2]["tools"] - assert tools - - -async def test_unknown_hass_api( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - mock_init_component, -) -> None: - """Test when we reference an API that no longer exists.""" - hass.config_entries.async_update_entry( - mock_config_entry, - options={ - **mock_config_entry.options, - CONF_LLM_HASS_API: "non-existing", - }, - ) - - result = await conversation.async_converse( - hass, "hello", None, Context(), agent_id="conversation.claude" - ) - - assert result == snapshot - - -@patch("anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock) -async def test_conversation_id( - mock_create, - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, -) -> None: - """Test conversation ID is honored.""" - result = await conversation.async_converse( - hass, "hello", None, None, agent_id="conversation.claude" - ) - - conversation_id = result.conversation_id - - result = await conversation.async_converse( - hass, "hello", conversation_id, None, agent_id="conversation.claude" - ) - - assert result.conversation_id == conversation_id - - unknown_id = ulid.ulid() - - result = await conversation.async_converse( - hass, "hello", unknown_id, None, agent_id="conversation.claude" - ) - - assert result.conversation_id != unknown_id - - result = await conversation.async_converse( - hass, "hello", "koala", None, agent_id="conversation.claude" - ) - - assert result.conversation_id == "koala" diff --git a/tests/components/anthropic/test_init.py b/tests/components/anthropic/test_init.py deleted file mode 100644 index ee87bb708d0..00000000000 --- a/tests/components/anthropic/test_init.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Tests for the Anthropic integration.""" - -from unittest.mock import AsyncMock, patch - -from anthropic import ( - APIConnectionError, - APITimeoutError, - AuthenticationError, - BadRequestError, -) -from httpx import URL, Request, Response -import pytest - -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry - - -@pytest.mark.parametrize( - ("side_effect", "error"), - [ - (APIConnectionError(request=None), "Connection error"), - (APITimeoutError(request=None), "Request timed out"), - ( - BadRequestError( - message="Your credit balance is too low to access the Claude API. Please go to Plans & Billing to upgrade or purchase credits.", - response=Response( - status_code=400, - request=Request(method="POST", url=URL()), - ), - body={"type": "error", "error": {"type": "invalid_request_error"}}, - ), - "anthropic integration not ready yet: Your credit balance is too low to access the Claude API", - ), - ( - AuthenticationError( - message="invalid x-api-key", - response=Response( - status_code=401, - request=Request(method="POST", url=URL()), - ), - body={"type": "error", "error": {"type": "authentication_error"}}, - ), - "Invalid API key", - ), - ], -) -async def test_init_error( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - caplog: pytest.LogCaptureFixture, - side_effect, - error, -) -> None: - """Test initialization errors.""" - with patch( - "anthropic.resources.messages.AsyncMessages.create", - new_callable=AsyncMock, - side_effect=side_effect, - ): - assert await async_setup_component(hass, "anthropic", {}) - await hass.async_block_till_done() - assert error in caplog.text diff --git a/tests/components/aosmith/conftest.py b/tests/components/aosmith/conftest.py index 31e36332a89..d67ae1ea627 100644 --- a/tests/components/aosmith/conftest.py +++ b/tests/components/aosmith/conftest.py @@ -1,6 +1,5 @@ """Common fixtures for the A. O. Smith tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from py_aosmith import AOSmithAPIClient @@ -10,10 +9,12 @@ from py_aosmith.models import ( DeviceType, EnergyUseData, EnergyUseHistoryEntry, + HotWaterStatus, OperationMode, SupportedOperationModeInfo, ) import pytest +from typing_extensions import Generator from homeassistant.components.aosmith.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD @@ -92,7 +93,7 @@ def build_device_fixture( temperature_setpoint_pending=setpoint_pending, temperature_setpoint_previous=130, temperature_setpoint_maximum=130, - hot_water_status=90, + hot_water_status=HotWaterStatus.LOW, ), ) diff --git a/tests/components/aosmith/fixtures/get_all_device_info.json b/tests/components/aosmith/fixtures/get_all_device_info.json index 27bd5b24a16..4d19a80a3ad 100644 --- a/tests/components/aosmith/fixtures/get_all_device_info.json +++ b/tests/components/aosmith/fixtures/get_all_device_info.json @@ -103,7 +103,7 @@ } ], "firmwareVersion": "2.14", - "hotWaterStatus": 10, + "hotWaterStatus": "HIGH", "isAdvancedLoadUpMore": false, "isCtaUcmPresent": false, "isDemandResponsePaused": false, diff --git a/tests/components/aosmith/snapshots/test_device.ambr b/tests/components/aosmith/snapshots/test_device.ambr index dec33a92fe2..d563090ce9d 100644 --- a/tests/components/aosmith/snapshots/test_device.ambr +++ b/tests/components/aosmith/snapshots/test_device.ambr @@ -21,7 +21,6 @@ }), 'manufacturer': 'A. O. Smith', 'model': 'HPTS-50 200 202172000', - 'model_id': None, 'name': 'My water heater', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/aosmith/snapshots/test_diagnostics.ambr b/tests/components/aosmith/snapshots/test_diagnostics.ambr index e2cf6c6b24b..8704cdaa214 100644 --- a/tests/components/aosmith/snapshots/test_diagnostics.ambr +++ b/tests/components/aosmith/snapshots/test_diagnostics.ambr @@ -43,7 +43,7 @@ 'error': '', 'firmwareVersion': '2.14', 'heaterSsid': '**REDACTED**', - 'hotWaterStatus': 10, + 'hotWaterStatus': 'HIGH', 'isAdvancedLoadUpMore': False, 'isCtaUcmPresent': False, 'isDemandResponsePaused': False, diff --git a/tests/components/aosmith/snapshots/test_sensor.ambr b/tests/components/aosmith/snapshots/test_sensor.ambr index 563b52f6df7..7aae9713037 100644 --- a/tests/components/aosmith/snapshots/test_sensor.ambr +++ b/tests/components/aosmith/snapshots/test_sensor.ambr @@ -58,7 +58,13 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'options': list([ + 'low', + 'medium', + 'high', + ]), + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -75,7 +81,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Hot water availability', 'platform': 'aosmith', @@ -83,20 +89,25 @@ 'supported_features': 0, 'translation_key': 'hot_water_availability', 'unique_id': 'hot_water_availability_junctionId', - 'unit_of_measurement': '%', + 'unit_of_measurement': None, }) # --- # name: test_state[sensor.my_water_heater_hot_water_availability-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'My water heater Hot water availability', - 'unit_of_measurement': '%', + 'options': list([ + 'low', + 'medium', + 'high', + ]), }), 'context': , 'entity_id': 'sensor.my_water_heater_hot_water_availability', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '90', + 'state': 'low', }) # --- diff --git a/tests/components/aosmith/test_sensor.py b/tests/components/aosmith/test_sensor.py index 1dc632b5e84..a77e4e4576d 100644 --- a/tests/components/aosmith/test_sensor.py +++ b/tests/components/aosmith/test_sensor.py @@ -1,10 +1,10 @@ """Tests for the sensor platform of the A. O. Smith integration.""" -from collections.abc import AsyncGenerator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import AsyncGenerator from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -14,7 +14,7 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.fixture(autouse=True) -async def platforms() -> AsyncGenerator[None]: +async def platforms() -> AsyncGenerator[list[str]]: """Return the platforms to be loaded for this test.""" with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.SENSOR]): yield diff --git a/tests/components/aosmith/test_water_heater.py b/tests/components/aosmith/test_water_heater.py index 69ad8004fc2..ab4a4a33bca 100644 --- a/tests/components/aosmith/test_water_heater.py +++ b/tests/components/aosmith/test_water_heater.py @@ -1,11 +1,11 @@ """Tests for the water heater platform of the A. O. Smith integration.""" -from collections.abc import AsyncGenerator from unittest.mock import MagicMock, patch from py_aosmith.models import OperationMode import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import AsyncGenerator from homeassistant.components.water_heater import ( ATTR_AWAY_MODE, @@ -29,7 +29,7 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.fixture(autouse=True) -async def platforms() -> AsyncGenerator[None]: +async def platforms() -> AsyncGenerator[list[str]]: """Return the platforms to be loaded for this test.""" with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.WATER_HEATER]): yield diff --git a/tests/components/apache_kafka/test_init.py b/tests/components/apache_kafka/test_init.py index cffe08ffd4a..2b702046054 100644 --- a/tests/components/apache_kafka/test_init.py +++ b/tests/components/apache_kafka/test_init.py @@ -3,9 +3,8 @@ from __future__ import annotations from asyncio import AbstractEventLoop -from collections.abc import Callable, Generator +from collections.abc import Callable from dataclasses import dataclass -from typing import Any from unittest.mock import patch import pytest @@ -42,7 +41,7 @@ class MockKafkaClient: @pytest.fixture(name="mock_client") -def mock_client_fixture() -> Generator[MockKafkaClient]: +def mock_client_fixture(): """Mock the apache kafka client.""" with ( patch(f"{PRODUCER_PATH}.start") as start, @@ -90,7 +89,7 @@ async def test_full_config(hass: HomeAssistant, mock_client: MockKafkaClient) -> mock_client.start.assert_called_once() -async def _setup(hass: HomeAssistant, filter_config: dict[str, Any]) -> None: +async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {apache_kafka.DOMAIN: {"filter": filter_config}} config[apache_kafka.DOMAIN].update(MIN_CONFIG) @@ -99,9 +98,7 @@ async def _setup(hass: HomeAssistant, filter_config: dict[str, Any]) -> None: await hass.async_block_till_done() -async def _run_filter_tests( - hass: HomeAssistant, tests: list[FilterTest], mock_client: MockKafkaClient -) -> None: +async def _run_filter_tests(hass, tests, mock_client): """Run a series of filter tests on apache kafka.""" for test in tests: hass.states.async_set(test.id, STATE_ON) diff --git a/tests/components/apcupsd/__init__.py b/tests/components/apcupsd/__init__.py index eb8cd594ad7..b75f3eab3af 100644 --- a/tests/components/apcupsd/__init__.py +++ b/tests/components/apcupsd/__init__.py @@ -4,7 +4,7 @@ from collections import OrderedDict from typing import Final from unittest.mock import patch -from homeassistant.components.apcupsd.const import DOMAIN +from homeassistant.components.apcupsd import DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant diff --git a/tests/components/apcupsd/test_binary_sensor.py b/tests/components/apcupsd/test_binary_sensor.py index 02351109603..7616a960b21 100644 --- a/tests/components/apcupsd/test_binary_sensor.py +++ b/tests/components/apcupsd/test_binary_sensor.py @@ -1,7 +1,5 @@ """Test binary sensors of APCUPSd integration.""" -import pytest - from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util import slugify @@ -33,22 +31,3 @@ async def test_no_binary_sensor(hass: HomeAssistant) -> None: device_slug = slugify(MOCK_STATUS["UPSNAME"]) state = hass.states.get(f"binary_sensor.{device_slug}_online_status") assert state is None - - -@pytest.mark.parametrize( - ("override", "expected"), - [ - ("0x008", "on"), - ("0x02040010 Status Flag", "off"), - ], -) -async def test_statflag(hass: HomeAssistant, override: str, expected: str) -> None: - """Test binary sensor for different STATFLAG values.""" - status = MOCK_STATUS.copy() - status["STATFLAG"] = override - await async_init_integration(hass, status=status) - - device_slug = slugify(MOCK_STATUS["UPSNAME"]) - assert ( - hass.states.get(f"binary_sensor.{device_slug}_online_status").state == expected - ) diff --git a/tests/components/apcupsd/test_config_flow.py b/tests/components/apcupsd/test_config_flow.py index 88594260579..2888771eb01 100644 --- a/tests/components/apcupsd/test_config_flow.py +++ b/tests/components/apcupsd/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest -from homeassistant.components.apcupsd.const import DOMAIN +from homeassistant.components.apcupsd import DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SOURCE from homeassistant.core import HomeAssistant diff --git a/tests/components/apcupsd/test_sensor.py b/tests/components/apcupsd/test_sensor.py index 0fe7f12ad27..0c7d174a5e8 100644 --- a/tests/components/apcupsd/test_sensor.py +++ b/tests/components/apcupsd/test_sensor.py @@ -15,7 +15,6 @@ from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, STATE_UNAVAILABLE, - STATE_UNKNOWN, UnitOfElectricPotential, UnitOfPower, UnitOfTime, @@ -26,7 +25,7 @@ from homeassistant.setup import async_setup_component from homeassistant.util import slugify from homeassistant.util.dt import utcnow -from . import MOCK_MINIMAL_STATUS, MOCK_STATUS, async_init_integration +from . import MOCK_STATUS, async_init_integration from tests.common import async_fire_time_changed @@ -238,34 +237,3 @@ async def test_multiple_manual_update_entity(hass: HomeAssistant) -> None: blocking=True, ) assert mock_request_status.call_count == 1 - - -async def test_sensor_unknown(hass: HomeAssistant) -> None: - """Test if our integration can properly certain sensors as unknown when it becomes so.""" - await async_init_integration(hass, status=MOCK_MINIMAL_STATUS) - - assert hass.states.get("sensor.mode").state == MOCK_MINIMAL_STATUS["UPSMODE"] - # Last self test sensor should be added even if our status does not report it initially (it is - # a sensor that appears only after a periodical or manual self test is performed). - assert hass.states.get("sensor.last_self_test") is not None - assert hass.states.get("sensor.last_self_test").state == STATE_UNKNOWN - - # Simulate an event (a self test) such that "LASTSTEST" field is being reported, the state of - # the sensor should be properly updated with the corresponding value. - with patch("aioapcaccess.request_status") as mock_request_status: - mock_request_status.return_value = MOCK_MINIMAL_STATUS | { - "LASTSTEST": "1970-01-01 00:00:00 0000" - } - future = utcnow() + timedelta(minutes=2) - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - assert hass.states.get("sensor.last_self_test").state == "1970-01-01 00:00:00 0000" - - # Simulate another event (e.g., daemon restart) such that "LASTSTEST" is no longer reported. - with patch("aioapcaccess.request_status") as mock_request_status: - mock_request_status.return_value = MOCK_MINIMAL_STATUS - future = utcnow() + timedelta(minutes=2) - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - # The state should become unknown again. - assert hass.states.get("sensor.last_self_test").state == STATE_UNKNOWN diff --git a/tests/components/api/test_init.py b/tests/components/api/test_init.py index abce262fd12..a1453315dbf 100644 --- a/tests/components/api/test_init.py +++ b/tests/components/api/test_init.py @@ -3,7 +3,6 @@ import asyncio from http import HTTPStatus import json -from typing import Any from unittest.mock import patch from aiohttp import ServerDisconnectedError, web @@ -356,67 +355,6 @@ async def test_api_call_service_with_data( assert state["attributes"] == {"data": 1} -SERVICE_DICT = {"changed_states": [], "service_response": {"foo": "bar"}} -RESP_REQUIRED = { - "message": ( - "Service call requires responses but caller did not ask for " - "responses. Add ?return_response to query parameters." - ) -} -RESP_UNSUPPORTED = { - "message": "Service does not support responses. Remove return_response from request." -} - - -@pytest.mark.parametrize( - ( - "supports_response", - "requested_response", - "expected_number_of_service_calls", - "expected_status", - "expected_response", - ), - [ - (ha.SupportsResponse.ONLY, True, 1, HTTPStatus.OK, SERVICE_DICT), - (ha.SupportsResponse.ONLY, False, 0, HTTPStatus.BAD_REQUEST, RESP_REQUIRED), - (ha.SupportsResponse.OPTIONAL, True, 1, HTTPStatus.OK, SERVICE_DICT), - (ha.SupportsResponse.OPTIONAL, False, 1, HTTPStatus.OK, []), - (ha.SupportsResponse.NONE, True, 0, HTTPStatus.BAD_REQUEST, RESP_UNSUPPORTED), - (ha.SupportsResponse.NONE, False, 1, HTTPStatus.OK, []), - ], -) -async def test_api_call_service_returns_response_requested_response( - hass: HomeAssistant, - mock_api_client: TestClient, - supports_response: ha.SupportsResponse, - requested_response: bool, - expected_number_of_service_calls: int, - expected_status: int, - expected_response: Any, -) -> None: - """Test if the API allows us to call a service.""" - test_value = [] - - @ha.callback - def listener(service_call): - """Record that our service got called.""" - test_value.append(1) - return {"foo": "bar"} - - hass.services.async_register( - "test_domain", "test_service", listener, supports_response=supports_response - ) - - resp = await mock_api_client.post( - "/api/services/test_domain/test_service" - + ("?return_response" if requested_response else "") - ) - assert resp.status == expected_status - await hass.async_block_till_done() - assert len(test_value) == expected_number_of_service_calls - assert await resp.json() == expected_response - - async def test_api_call_service_client_closed( hass: HomeAssistant, mock_api_client: TestClient ) -> None: @@ -832,43 +770,4 @@ async def test_api_core_state(hass: HomeAssistant, mock_api_client: TestClient) resp = await mock_api_client.get("/api/core/state") assert resp.status == HTTPStatus.OK json = await resp.json() - assert json == { - "state": "RUNNING", - "recorder_state": {"migration_in_progress": False, "migration_is_live": False}, - } - - -@pytest.mark.parametrize( - ("migration_in_progress", "migration_is_live"), - [ - (False, False), - (False, True), - (True, False), - (True, True), - ], -) -async def test_api_core_state_recorder_migrating( - hass: HomeAssistant, - mock_api_client: TestClient, - migration_in_progress: bool, - migration_is_live: bool, -) -> None: - """Test getting core status.""" - with ( - patch( - "homeassistant.helpers.recorder.async_migration_in_progress", - return_value=migration_in_progress, - ), - patch( - "homeassistant.helpers.recorder.async_migration_is_live", - return_value=migration_is_live, - ), - ): - resp = await mock_api_client.get("/api/core/state") - assert resp.status == HTTPStatus.OK - json = await resp.json() - expected_recorder_state = { - "migration_in_progress": migration_in_progress, - "migration_is_live": migration_is_live, - } - assert json == {"state": "RUNNING", "recorder_state": expected_recorder_state} + assert json["state"] == "RUNNING" diff --git a/tests/components/apple_tv/common.py b/tests/components/apple_tv/common.py index 8a81536c792..ddb8c1348d9 100644 --- a/tests/components/apple_tv/common.py +++ b/tests/components/apple_tv/common.py @@ -1,7 +1,5 @@ """Test code shared between test files.""" -from typing import Any - from pyatv import conf, const, interface from pyatv.const import Protocol @@ -9,7 +7,7 @@ from pyatv.const import Protocol class MockPairingHandler(interface.PairingHandler): """Mock for PairingHandler in pyatv.""" - def __init__(self, *args: Any) -> None: + def __init__(self, *args): """Initialize a new MockPairingHandler.""" super().__init__(*args) self.pin_code = None diff --git a/tests/components/apple_tv/conftest.py b/tests/components/apple_tv/conftest.py index 78982a8d51c..36061924db5 100644 --- a/tests/components/apple_tv/conftest.py +++ b/tests/components/apple_tv/conftest.py @@ -1,12 +1,12 @@ """Fixtures for component.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pyatv import conf from pyatv.const import PairingRequirement, Protocol from pyatv.support import http import pytest +from typing_extensions import Generator from .common import MockPairingHandler, airplay_service, create_conf, mrp_service diff --git a/tests/components/apple_tv/test_config_flow.py b/tests/components/apple_tv/test_config_flow.py index 4567bd32582..b8f49e7c8f5 100644 --- a/tests/components/apple_tv/test_config_flow.py +++ b/tests/components/apple_tv/test_config_flow.py @@ -1,12 +1,12 @@ """Test config flow.""" -from collections.abc import Generator from ipaddress import IPv4Address, ip_address from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch from pyatv import exceptions from pyatv.const import PairingRequirement, Protocol import pytest +from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components import zeroconf @@ -16,7 +16,6 @@ from homeassistant.components.apple_tv.const import ( CONF_START_OFF, DOMAIN, ) -from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -1190,17 +1189,18 @@ async def test_reconfigure_update_credentials(hass: HomeAssistant) -> None: ) config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass, data={"name": "apple tv"}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": "reauth"}, + data={"identifier": "mrpid", "name": "apple tv"}, + ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, ) assert result2["type"] is FlowResultType.FORM - assert result2["description_placeholders"] == { - CONF_NAME: "Mock Title", - "protocol": "MRP", - } + assert result2["description_placeholders"] == {"protocol": "MRP"} result3 = await hass.config_entries.flow.async_configure( result["flow_id"], {"pin": 1111} diff --git a/tests/components/application_credentials/test_init.py b/tests/components/application_credentials/test_init.py index b72d9653c2d..c427b1d07e0 100644 --- a/tests/components/application_credentials/test_init.py +++ b/tests/components/application_credentials/test_init.py @@ -2,12 +2,13 @@ from __future__ import annotations -from collections.abc import Callable, Generator +from collections.abc import Callable import logging from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow from homeassistant.components.application_credentials import ( @@ -124,12 +125,7 @@ def config_flow_handler( class OAuthFixture: """Fixture to facilitate testing an OAuth flow.""" - def __init__( - self, - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - ) -> None: + def __init__(self, hass, hass_client, aioclient_mock): """Initialize OAuthFixture.""" self.hass = hass self.hass_client = hass_client @@ -189,7 +185,7 @@ async def oauth_fixture( class Client: """Test client with helper methods for application credentials websocket.""" - def __init__(self, client) -> None: + def __init__(self, client): """Initialize Client.""" self.client = client self.id = 0 @@ -423,10 +419,6 @@ async def test_import_named_credential( ] -@pytest.mark.parametrize( - "ignore_translations", - ["component.fake_integration.config.abort.missing_credentials"], -) async def test_config_flow_no_credentials(hass: HomeAssistant) -> None: """Test config flow base case with no credentials registered.""" result = await hass.config_entries.flow.async_init( @@ -436,10 +428,6 @@ async def test_config_flow_no_credentials(hass: HomeAssistant) -> None: assert result.get("reason") == "missing_credentials" -@pytest.mark.parametrize( - "ignore_translations", - ["component.fake_integration.config.abort.missing_credentials"], -) async def test_config_flow_other_domain( hass: HomeAssistant, ws_client: ClientFixture, @@ -567,10 +555,6 @@ async def test_config_flow_multiple_entries( ) -@pytest.mark.parametrize( - "ignore_translations", - ["component.fake_integration.config.abort.missing_credentials"], -) async def test_config_flow_create_delete_credential( hass: HomeAssistant, ws_client: ClientFixture, @@ -616,10 +600,6 @@ async def test_config_flow_with_config_credential( assert result["data"].get("auth_implementation") == TEST_DOMAIN -@pytest.mark.parametrize( - "ignore_translations", - ["component.fake_integration.config.abort.missing_configuration"], -) @pytest.mark.parametrize("mock_application_credentials_integration", [None]) async def test_import_without_setup(hass: HomeAssistant, config_credential) -> None: """Test import of credentials without setting up the integration.""" @@ -635,10 +615,6 @@ async def test_import_without_setup(hass: HomeAssistant, config_credential) -> N assert result.get("reason") == "missing_configuration" -@pytest.mark.parametrize( - "ignore_translations", - ["component.fake_integration.config.abort.missing_configuration"], -) @pytest.mark.parametrize("mock_application_credentials_integration", [None]) async def test_websocket_without_platform( hass: HomeAssistant, ws_client: ClientFixture diff --git a/tests/components/apprise/test_notify.py b/tests/components/apprise/test_notify.py index d73fa72d6c7..7d37d7a5d99 100644 --- a/tests/components/apprise/test_notify.py +++ b/tests/components/apprise/test_notify.py @@ -1,27 +1,14 @@ """The tests for the apprise notification platform.""" -import logging from pathlib import Path from unittest.mock import MagicMock, patch -import pytest - from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component BASE_COMPONENT = "notify" -@pytest.fixture(autouse=True) -def reset_log_level(): - """Set and reset log level after each test case.""" - logger = logging.getLogger("apprise") - orig_level = logger.level - logger.setLevel(logging.DEBUG) - yield - logger.setLevel(orig_level) - - async def test_apprise_config_load_fail01(hass: HomeAssistant) -> None: """Test apprise configuration failures 1.""" diff --git a/tests/components/aprilaire/test_config_flow.py b/tests/components/aprilaire/test_config_flow.py index e4b7c167256..c9cba2b3fd6 100644 --- a/tests/components/aprilaire/test_config_flow.py +++ b/tests/components/aprilaire/test_config_flow.py @@ -104,7 +104,7 @@ async def test_config_flow_data(client: AprilaireClient, hass: HomeAssistant) -> abort_if_unique_id_configured_mock.assert_called_once() create_entry_mock.assert_called_once_with( - title="AprilAire", + title="Aprilaire", data={ "host": "localhost", "port": 7000, diff --git a/tests/components/aprs/test_device_tracker.py b/tests/components/aprs/test_device_tracker.py index 4142195b0b9..4cdff41598f 100644 --- a/tests/components/aprs/test_device_tracker.py +++ b/tests/components/aprs/test_device_tracker.py @@ -1,11 +1,11 @@ """Test APRS device tracker.""" -from collections.abc import Generator from unittest.mock import MagicMock, Mock, patch import aprslib from aprslib import IS import pytest +from typing_extensions import Generator from homeassistant.components.aprs import device_tracker from homeassistant.core import HomeAssistant diff --git a/tests/components/apsystems/conftest.py b/tests/components/apsystems/conftest.py index 0feccf21578..cd04346c070 100644 --- a/tests/components/apsystems/conftest.py +++ b/tests/components/apsystems/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the APsystems Local API tests.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch -from APsystemsEZ1 import ReturnAlarmInfo, ReturnDeviceInfo, ReturnOutputData +from APsystemsEZ1 import ReturnDeviceInfo, ReturnOutputData import pytest +from typing_extensions import Generator from homeassistant.components.apsystems.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS @@ -23,7 +23,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_apsystems() -> Generator[MagicMock]: +def mock_apsystems() -> Generator[AsyncMock, None, None]: """Mock APSystems lib.""" with ( patch( @@ -52,13 +52,6 @@ def mock_apsystems() -> Generator[MagicMock]: e2=6.0, te2=7.0, ) - mock_api.get_alarm_info.return_value = ReturnAlarmInfo( - offgrid=False, - shortcircuit_1=True, - shortcircuit_2=False, - operating=False, - ) - mock_api.get_device_power_status.return_value = True yield mock_api diff --git a/tests/components/apsystems/snapshots/test_binary_sensor.ambr b/tests/components/apsystems/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 0875c88976b..00000000000 --- a/tests/components/apsystems/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,189 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[binary_sensor.mock_title_dc_1_short_circuit_error_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_dc_1_short_circuit_error_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DC 1 short circuit error status', - 'platform': 'apsystems', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dc_1_short_circuit_error_status', - 'unique_id': 'MY_SERIAL_NUMBER_dc_1_short_circuit_error_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_dc_1_short_circuit_error_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Mock Title DC 1 short circuit error status', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_dc_1_short_circuit_error_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_dc_2_short_circuit_error_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_dc_2_short_circuit_error_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DC 2 short circuit error status', - 'platform': 'apsystems', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dc_2_short_circuit_error_status', - 'unique_id': 'MY_SERIAL_NUMBER_dc_2_short_circuit_error_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_dc_2_short_circuit_error_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Mock Title DC 2 short circuit error status', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_dc_2_short_circuit_error_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_off_grid_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_off_grid_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off grid status', - 'platform': 'apsystems', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'off_grid_status', - 'unique_id': 'MY_SERIAL_NUMBER_off_grid_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_off_grid_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Mock Title Off grid status', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_off_grid_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_output_fault_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_output_fault_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Output fault status', - 'platform': 'apsystems', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'output_fault_status', - 'unique_id': 'MY_SERIAL_NUMBER_output_fault_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_output_fault_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Mock Title Output fault status', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_output_fault_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/apsystems/snapshots/test_switch.ambr b/tests/components/apsystems/snapshots/test_switch.ambr deleted file mode 100644 index 6daa9fd6e14..00000000000 --- a/tests/components/apsystems/snapshots/test_switch.ambr +++ /dev/null @@ -1,48 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[switch.mock_title_inverter_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.mock_title_inverter_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Inverter status', - 'platform': 'apsystems', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'inverter_status', - 'unique_id': 'MY_SERIAL_NUMBER_inverter_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[switch.mock_title_inverter_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Mock Title Inverter status', - }), - 'context': , - 'entity_id': 'switch.mock_title_inverter_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/apsystems/test_binary_sensor.py b/tests/components/apsystems/test_binary_sensor.py deleted file mode 100644 index 0c6fbffc93c..00000000000 --- a/tests/components/apsystems/test_binary_sensor.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Test the APSystem binary sensor module.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_apsystems: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch( - "homeassistant.components.apsystems.PLATFORMS", - [Platform.BINARY_SENSOR], - ): - await setup_integration(hass, mock_config_entry) - await snapshot_platform( - hass, entity_registry, snapshot, mock_config_entry.entry_id - ) diff --git a/tests/components/apsystems/test_config_flow.py b/tests/components/apsystems/test_config_flow.py index 3d78524a529..e3fcdf67dcc 100644 --- a/tests/components/apsystems/test_config_flow.py +++ b/tests/components/apsystems/test_config_flow.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock from homeassistant.components.apsystems.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT +from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -27,24 +27,6 @@ async def test_form_create_success( assert result["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" -async def test_form_create_success_custom_port( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_apsystems: AsyncMock -) -> None: - """Test we handle creating with custom port with success.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - CONF_IP_ADDRESS: "127.0.0.1", - CONF_PORT: 8042, - }, - ) - assert result["result"].unique_id == "MY_SERIAL_NUMBER" - assert result.get("type") is FlowResultType.CREATE_ENTRY - assert result["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" - assert result["data"].get(CONF_PORT) == 8042 - - async def test_form_cannot_connect_and_recover( hass: HomeAssistant, mock_apsystems: AsyncMock, mock_setup_entry: AsyncMock ) -> None: @@ -75,33 +57,6 @@ async def test_form_cannot_connect_and_recover( assert result2["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" -async def test_form_cannot_connect_and_recover_custom_port( - hass: HomeAssistant, mock_apsystems: AsyncMock, mock_setup_entry: AsyncMock -) -> None: - """Test we handle cannot connect error but recovering with custom port.""" - - mock_apsystems.get_device_info.side_effect = TimeoutError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_IP_ADDRESS: "127.0.0.2", CONF_PORT: 8042}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - mock_apsystems.get_device_info.side_effect = None - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_IP_ADDRESS: "127.0.0.1", CONF_PORT: 8042}, - ) - assert result2["result"].unique_id == "MY_SERIAL_NUMBER" - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" - assert result2["data"].get(CONF_PORT) == 8042 - - async def test_form_unique_id_already_configured( hass: HomeAssistant, mock_setup_entry: AsyncMock, diff --git a/tests/components/apsystems/test_switch.py b/tests/components/apsystems/test_switch.py deleted file mode 100644 index afd889fe958..00000000000 --- a/tests/components/apsystems/test_switch.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Test the APSystem switch module.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_apsystems: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch( - "homeassistant.components.apsystems.PLATFORMS", - [Platform.SWITCH], - ): - await setup_integration(hass, mock_config_entry) - await snapshot_platform( - hass, entity_registry, snapshot, mock_config_entry.entry_id - ) diff --git a/tests/components/aquacell/__init__.py b/tests/components/aquacell/__init__.py index 9190172145a..c54bc539496 100644 --- a/tests/components/aquacell/__init__.py +++ b/tests/components/aquacell/__init__.py @@ -1,9 +1,6 @@ """Tests for the Aquacell integration.""" -from aioaquacell import Brand - from homeassistant.components.aquacell.const import ( - CONF_BRAND, CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, ) @@ -17,20 +14,11 @@ TEST_CONFIG_ENTRY = { CONF_PASSWORD: "test-password", CONF_REFRESH_TOKEN: "refresh-token", CONF_REFRESH_TOKEN_CREATION_TIME: 0, - CONF_BRAND: Brand.AQUACELL, -} - -TEST_CONFIG_ENTRY_WITHOUT_BRAND = { - CONF_EMAIL: "test@test.com", - CONF_PASSWORD: "test-password", - CONF_REFRESH_TOKEN: "refresh-token", - CONF_REFRESH_TOKEN_CREATION_TIME: 0, } TEST_USER_INPUT = { CONF_EMAIL: "test@test.com", CONF_PASSWORD: "test-password", - CONF_BRAND: "aquacell", } DSN = "DSN" diff --git a/tests/components/aquacell/conftest.py b/tests/components/aquacell/conftest.py index 443f7da77ce..db27f51dc03 100644 --- a/tests/components/aquacell/conftest.py +++ b/tests/components/aquacell/conftest.py @@ -2,7 +2,7 @@ from collections.abc import Generator from datetime import datetime -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch from aioaquacell import AquacellApi, Softener import pytest @@ -13,13 +13,13 @@ from homeassistant.components.aquacell.const import ( ) from homeassistant.const import CONF_EMAIL -from . import TEST_CONFIG_ENTRY, TEST_CONFIG_ENTRY_WITHOUT_BRAND +from . import TEST_CONFIG_ENTRY from tests.common import MockConfigEntry, load_json_array_fixture @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Override async_setup_entry.""" with patch( "homeassistant.components.aquacell.async_setup_entry", return_value=True @@ -28,7 +28,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_aquacell_api() -> Generator[MagicMock]: +def mock_aquacell_api() -> Generator[AsyncMock, None, None]: """Build a fixture for the Aquacell API that authenticates successfully and returns a single softener.""" with ( patch( @@ -76,17 +76,3 @@ def mock_config_entry() -> MockConfigEntry: CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), }, ) - - -@pytest.fixture -def mock_config_entry_without_brand() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="Aquacell", - unique_id=TEST_CONFIG_ENTRY[CONF_EMAIL], - data={ - **TEST_CONFIG_ENTRY_WITHOUT_BRAND, - CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), - }, - ) diff --git a/tests/components/aquacell/test_config_flow.py b/tests/components/aquacell/test_config_flow.py index f677b3f8348..b6bcb82293c 100644 --- a/tests/components/aquacell/test_config_flow.py +++ b/tests/components/aquacell/test_config_flow.py @@ -5,11 +5,7 @@ from unittest.mock import AsyncMock from aioaquacell import ApiException, AuthenticationFailed import pytest -from homeassistant.components.aquacell.const import ( - CONF_BRAND, - CONF_REFRESH_TOKEN, - DOMAIN, -) +from homeassistant.components.aquacell.const import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant @@ -55,9 +51,7 @@ async def test_full_flow( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" assert result["errors"] == {} result2 = await hass.config_entries.flow.async_configure( @@ -71,7 +65,6 @@ async def test_full_flow( assert result2["data"][CONF_EMAIL] == TEST_CONFIG_ENTRY[CONF_EMAIL] assert result2["data"][CONF_PASSWORD] == TEST_CONFIG_ENTRY[CONF_PASSWORD] assert result2["data"][CONF_REFRESH_TOKEN] == TEST_CONFIG_ENTRY[CONF_REFRESH_TOKEN] - assert result2["data"][CONF_BRAND] == TEST_CONFIG_ENTRY[CONF_BRAND] assert len(mock_setup_entry.mock_calls) == 1 @@ -79,7 +72,6 @@ async def test_full_flow( ("exception", "error"), [ (ApiException, "cannot_connect"), - (TimeoutError, "cannot_connect"), (AuthenticationFailed, "invalid_auth"), (Exception, "unknown"), ], @@ -117,5 +109,4 @@ async def test_form_exceptions( assert result3["data"][CONF_EMAIL] == TEST_CONFIG_ENTRY[CONF_EMAIL] assert result3["data"][CONF_PASSWORD] == TEST_CONFIG_ENTRY[CONF_PASSWORD] assert result3["data"][CONF_REFRESH_TOKEN] == TEST_CONFIG_ENTRY[CONF_REFRESH_TOKEN] - assert result3["data"][CONF_BRAND] == TEST_CONFIG_ENTRY[CONF_BRAND] assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/aquacell/test_init.py b/tests/components/aquacell/test_init.py index 580d87f4d9a..a70d077e180 100644 --- a/tests/components/aquacell/test_init.py +++ b/tests/components/aquacell/test_init.py @@ -38,17 +38,6 @@ async def test_load_unload_entry( assert entry.state is ConfigEntryState.NOT_LOADED -async def test_load_withoutbrand( - hass: HomeAssistant, - mock_aquacell_api: AsyncMock, - mock_config_entry_without_brand: MockConfigEntry, -) -> None: - """Test load entry without brand.""" - await setup_integration(hass, mock_config_entry_without_brand) - - assert mock_config_entry_without_brand.state is ConfigEntryState.LOADED - - async def test_coordinator_update_valid_refresh_token( hass: HomeAssistant, mock_aquacell_api: AsyncMock, diff --git a/tests/components/aranet/__init__.py b/tests/components/aranet/__init__.py index 711c605fd28..18bebfb44a4 100644 --- a/tests/components/aranet/__init__.py +++ b/tests/components/aranet/__init__.py @@ -82,11 +82,3 @@ VALID_ARANET_RADIATION_DATA_SERVICE_INFO = fake_service_info( 1794: b"\x02!&\x04\x01\x00`-\x00\x00\x08\x98\x05\x00n\x00\x00d\x00,\x01\xfd\x00\xc7" }, ) - -VALID_ARANET_RADON_DATA_SERVICE_INFO = fake_service_info( - "AranetRn+ 12345", - "0000fce0-0000-1000-8000-00805f9b34fb", - { - 1794: b"\x03!\x04\x06\x01\x00\x00\x00\x07\x00\xfe\x01\xc9'\xce\x01\x00d\x01X\x02\xf6\x01\x08" - }, -) diff --git a/tests/components/aranet/test_sensor.py b/tests/components/aranet/test_sensor.py index 7bd00af4837..c932a92c1e8 100644 --- a/tests/components/aranet/test_sensor.py +++ b/tests/components/aranet/test_sensor.py @@ -11,7 +11,6 @@ from . import ( DISABLED_INTEGRATIONS_SERVICE_INFO, VALID_ARANET2_DATA_SERVICE_INFO, VALID_ARANET_RADIATION_DATA_SERVICE_INFO, - VALID_ARANET_RADON_DATA_SERVICE_INFO, VALID_DATA_SERVICE_INFO, ) @@ -189,71 +188,6 @@ async def test_sensors_aranet4(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensors_aranetrn(hass: HomeAssistant) -> None: - """Test setting up creates the sensors for Aranet Radon device.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="aa:bb:cc:dd:ee:ff", - ) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert len(hass.states.async_all("sensor")) == 0 - inject_bluetooth_service_info(hass, VALID_ARANET_RADON_DATA_SERVICE_INFO) - await hass.async_block_till_done() - assert len(hass.states.async_all("sensor")) == 6 - - batt_sensor = hass.states.get("sensor.aranetrn_12345_battery") - batt_sensor_attrs = batt_sensor.attributes - assert batt_sensor.state == "100" - assert batt_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Battery" - assert batt_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" - assert batt_sensor_attrs[ATTR_STATE_CLASS] == "measurement" - - co2_sensor = hass.states.get("sensor.aranetrn_12345_radon_concentration") - co2_sensor_attrs = co2_sensor.attributes - assert co2_sensor.state == "7" - assert co2_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Radon Concentration" - assert co2_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "Bq/m³" - assert co2_sensor_attrs[ATTR_STATE_CLASS] == "measurement" - - humid_sensor = hass.states.get("sensor.aranetrn_12345_humidity") - humid_sensor_attrs = humid_sensor.attributes - assert humid_sensor.state == "46.2" - assert humid_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Humidity" - assert humid_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" - assert humid_sensor_attrs[ATTR_STATE_CLASS] == "measurement" - - temp_sensor = hass.states.get("sensor.aranetrn_12345_temperature") - temp_sensor_attrs = temp_sensor.attributes - assert temp_sensor.state == "25.5" - assert temp_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Temperature" - assert temp_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "°C" - assert temp_sensor_attrs[ATTR_STATE_CLASS] == "measurement" - - press_sensor = hass.states.get("sensor.aranetrn_12345_pressure") - press_sensor_attrs = press_sensor.attributes - assert press_sensor.state == "1018.5" - assert press_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Pressure" - assert press_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "hPa" - assert press_sensor_attrs[ATTR_STATE_CLASS] == "measurement" - - interval_sensor = hass.states.get("sensor.aranetrn_12345_update_interval") - interval_sensor_attrs = interval_sensor.attributes - assert interval_sensor.state == "600" - assert ( - interval_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Update Interval" - ) - assert interval_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "s" - assert interval_sensor_attrs[ATTR_STATE_CLASS] == "measurement" - - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_smart_home_integration_disabled(hass: HomeAssistant) -> None: """Test disabling smart home integration marks entities as unavailable.""" diff --git a/tests/components/arcam_fmj/conftest.py b/tests/components/arcam_fmj/conftest.py index ca4af1b00a3..66850933cc7 100644 --- a/tests/components/arcam_fmj/conftest.py +++ b/tests/components/arcam_fmj/conftest.py @@ -1,11 +1,11 @@ """Tests for the arcam_fmj component.""" -from collections.abc import AsyncGenerator from unittest.mock import Mock, patch from arcam.fmj.client import Client from arcam.fmj.state import State import pytest +from typing_extensions import AsyncGenerator from homeassistant.components.arcam_fmj.const import DEFAULT_NAME from homeassistant.components.arcam_fmj.media_player import ArcamFmj @@ -99,7 +99,6 @@ async def player_setup_fixture( return state_1 if zone == 2: return state_2 - raise ValueError(f"Unknown player zone: {zone}") await async_setup_component(hass, "homeassistant", {}) diff --git a/tests/components/arcam_fmj/test_config_flow.py b/tests/components/arcam_fmj/test_config_flow.py index 60c68c5e102..26e93354900 100644 --- a/tests/components/arcam_fmj/test_config_flow.py +++ b/tests/components/arcam_fmj/test_config_flow.py @@ -1,14 +1,15 @@ """Tests for the Arcam FMJ config flow module.""" -from collections.abc import Generator from dataclasses import replace from unittest.mock import AsyncMock, MagicMock, patch from arcam.fmj.client import ConnectionFailed import pytest +from typing_extensions import Generator from homeassistant.components import ssdp -from homeassistant.components.arcam_fmj.const import DOMAIN +from homeassistant.components.arcam_fmj.config_flow import get_entry_client +from homeassistant.components.arcam_fmj.const import DOMAIN, DOMAIN_DATA_ENTRIES from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SOURCE from homeassistant.core import HomeAssistant @@ -214,3 +215,12 @@ async def test_user_wrong( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == f"Arcam FMJ ({MOCK_HOST})" assert result["result"].unique_id is None + + +async def test_get_entry_client(hass: HomeAssistant) -> None: + """Test helper for configuration.""" + entry = MockConfigEntry( + domain=DOMAIN, data=MOCK_CONFIG_ENTRY, title=MOCK_NAME, unique_id=MOCK_UUID + ) + hass.data[DOMAIN_DATA_ENTRIES] = {entry.entry_id: "dummy"} + assert get_entry_client(hass, entry) == "dummy" diff --git a/tests/components/arve/conftest.py b/tests/components/arve/conftest.py index 8fc35e37000..40a5f98291b 100644 --- a/tests/components/arve/conftest.py +++ b/tests/components/arve/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Arve tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from asyncarve import ArveCustomer, ArveDevices, ArveSensPro, ArveSensProData import pytest +from typing_extensions import Generator from homeassistant.components.arve.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/aseko_pool_live/conftest.py b/tests/components/aseko_pool_live/conftest.py deleted file mode 100644 index f3bbddb2cab..00000000000 --- a/tests/components/aseko_pool_live/conftest.py +++ /dev/null @@ -1,20 +0,0 @@ -"""Aseko Pool Live conftest.""" - -from datetime import datetime - -from aioaseko import User -import pytest - - -@pytest.fixture -def user() -> User: - """Aseko User fixture.""" - return User( - user_id="a_user_id", - created_at=datetime.now(), - updated_at=datetime.now(), - name="John", - surname="Doe", - language="any_language", - is_active=True, - ) diff --git a/tests/components/aseko_pool_live/test_config_flow.py b/tests/components/aseko_pool_live/test_config_flow.py index b307f00abbe..4307e527cee 100644 --- a/tests/components/aseko_pool_live/test_config_flow.py +++ b/tests/components/aseko_pool_live/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from aioaseko import AsekoAPIError, AsekoInvalidCredentials, User +from aioaseko import AccountInfo, APIUnavailable, InvalidAuthCredentials import pytest from homeassistant import config_entries @@ -23,7 +23,7 @@ async def test_async_step_user_form(hass: HomeAssistant) -> None: assert result["errors"] == {} -async def test_async_step_user_success(hass: HomeAssistant, user: User) -> None: +async def test_async_step_user_success(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -31,8 +31,8 @@ async def test_async_step_user_success(hass: HomeAssistant, user: User) -> None: with ( patch( - "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", - return_value=user, + "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", + return_value=AccountInfo("aseko@example.com", "a_user_id", "any_language"), ), patch( "homeassistant.components.aseko_pool_live.async_setup_entry", @@ -60,13 +60,13 @@ async def test_async_step_user_success(hass: HomeAssistant, user: User) -> None: @pytest.mark.parametrize( ("error_web", "reason"), [ - (AsekoAPIError, "cannot_connect"), - (AsekoInvalidCredentials, "invalid_auth"), + (APIUnavailable, "cannot_connect"), + (InvalidAuthCredentials, "invalid_auth"), (Exception, "unknown"), ], ) async def test_async_step_user_exception( - hass: HomeAssistant, user: User, error_web: Exception, reason: str + hass: HomeAssistant, error_web: Exception, reason: str ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -74,8 +74,8 @@ async def test_async_step_user_exception( ) with patch( - "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", - return_value=user, + "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", + return_value=AccountInfo("aseko@example.com", "a_user_id", "any_language"), side_effect=error_web, ): result2 = await hass.config_entries.flow.async_configure( @@ -93,13 +93,13 @@ async def test_async_step_user_exception( @pytest.mark.parametrize( ("error_web", "reason"), [ - (AsekoAPIError, "cannot_connect"), - (AsekoInvalidCredentials, "invalid_auth"), + (APIUnavailable, "cannot_connect"), + (InvalidAuthCredentials, "invalid_auth"), (Exception, "unknown"), ], ) async def test_get_account_info_exceptions( - hass: HomeAssistant, user: User, error_web: Exception, reason: str + hass: HomeAssistant, error_web: Exception, reason: str ) -> None: """Test we handle config flow exceptions.""" result = await hass.config_entries.flow.async_init( @@ -107,8 +107,8 @@ async def test_get_account_info_exceptions( ) with patch( - "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", - return_value=user, + "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", + return_value=AccountInfo("aseko@example.com", "a_user_id", "any_language"), side_effect=error_web, ): result2 = await hass.config_entries.flow.async_configure( @@ -123,102 +123,52 @@ async def test_get_account_info_exceptions( assert result2["errors"] == {"base": reason} -async def test_async_step_reauth_success(hass: HomeAssistant, user: User) -> None: +async def test_async_step_reauth_success(hass: HomeAssistant) -> None: """Test successful reauthentication.""" mock_entry = MockConfigEntry( domain=DOMAIN, - unique_id="a_user_id", - data={CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "passw0rd"}, - version=2, + unique_id="UID", + data={CONF_EMAIL: "aseko@example.com"}, ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} - with ( - patch( - "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", - return_value=user, - ), - patch( - "homeassistant.components.aseko_pool_live.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): + with patch( + "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", + return_value=AccountInfo("aseko@example.com", "a_user_id", "any_language"), + ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "new_password"}, + {CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "passw0rd"}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" assert len(mock_setup_entry.mock_calls) == 1 - assert mock_entry.unique_id == "a_user_id" - assert dict(mock_entry.data) == { - CONF_EMAIL: "aseko@example.com", - CONF_PASSWORD: "new_password", - } - - -async def test_async_step_reauth_mismatch(hass: HomeAssistant, user: User) -> None: - """Test mismatch reauthentication.""" - - mock_entry = MockConfigEntry( - domain=DOMAIN, - unique_id="UID", - data={CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "passw0rd"}, - version=2, - ) - mock_entry.add_to_hass(hass) - - result = await mock_entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {} - - with ( - patch( - "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", - return_value=user, - ), - patch( - "homeassistant.components.aseko_pool_live.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "new_password"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unique_id_mismatch" - assert len(mock_setup_entry.mock_calls) == 0 - assert mock_entry.unique_id == "UID" - assert dict(mock_entry.data) == { - CONF_EMAIL: "aseko@example.com", - CONF_PASSWORD: "passw0rd", - } @pytest.mark.parametrize( ("error_web", "reason"), [ - (AsekoAPIError, "cannot_connect"), - (AsekoInvalidCredentials, "invalid_auth"), + (APIUnavailable, "cannot_connect"), + (InvalidAuthCredentials, "invalid_auth"), (Exception, "unknown"), ], ) async def test_async_step_reauth_exception( - hass: HomeAssistant, user: User, error_web: Exception, reason: str + hass: HomeAssistant, error_web: Exception, reason: str ) -> None: """Test we get the form.""" @@ -229,11 +179,17 @@ async def test_async_step_reauth_exception( ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_entry.entry_id, + }, + ) with patch( - "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", - return_value=user, + "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", + return_value=AccountInfo("aseko@example.com", "a_user_id", "any_language"), side_effect=error_web, ): result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index 0f6872edbfe..f19e70a8ec1 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -2,28 +2,23 @@ from __future__ import annotations -from collections.abc import AsyncIterable, Generator +from collections.abc import AsyncIterable from pathlib import Path from typing import Any from unittest.mock import AsyncMock import pytest +from typing_extensions import Generator from homeassistant.components import stt, tts, wake_word from homeassistant.components.assist_pipeline import DOMAIN, select as assist_select -from homeassistant.components.assist_pipeline.const import ( - BYTES_PER_CHUNK, - SAMPLE_CHANNELS, - SAMPLE_RATE, - SAMPLE_WIDTH, -) from homeassistant.components.assist_pipeline.pipeline import ( PipelineData, PipelineStorageCollection, ) from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component @@ -36,17 +31,115 @@ from tests.common import ( mock_integration, mock_platform, ) -from tests.components.stt.common import MockSTTProvider, MockSTTProviderEntity -from tests.components.tts.common import MockTTSProvider _TRANSCRIPT = "test transcript" -BYTES_ONE_SECOND = SAMPLE_RATE * SAMPLE_WIDTH * SAMPLE_CHANNELS - @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir + + +class BaseProvider: + """Mock STT provider.""" + + _supported_languages = ["en-US"] + + def __init__(self, text: str) -> None: + """Init test provider.""" + self.text = text + self.received: list[bytes] = [] + + @property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return self._supported_languages + + @property + def supported_formats(self) -> list[stt.AudioFormats]: + """Return a list of supported formats.""" + return [stt.AudioFormats.WAV] + + @property + def supported_codecs(self) -> list[stt.AudioCodecs]: + """Return a list of supported codecs.""" + return [stt.AudioCodecs.PCM] + + @property + def supported_bit_rates(self) -> list[stt.AudioBitRates]: + """Return a list of supported bitrates.""" + return [stt.AudioBitRates.BITRATE_16] + + @property + def supported_sample_rates(self) -> list[stt.AudioSampleRates]: + """Return a list of supported samplerates.""" + return [stt.AudioSampleRates.SAMPLERATE_16000] + + @property + def supported_channels(self) -> list[stt.AudioChannels]: + """Return a list of supported channels.""" + return [stt.AudioChannels.CHANNEL_MONO] + + async def async_process_audio_stream( + self, metadata: stt.SpeechMetadata, stream: AsyncIterable[bytes] + ) -> stt.SpeechResult: + """Process an audio stream.""" + async for data in stream: + if not data: + break + self.received.append(data) + return stt.SpeechResult(self.text, stt.SpeechResultState.SUCCESS) + + +class MockSttProvider(BaseProvider, stt.Provider): + """Mock provider.""" + + +class MockSttProviderEntity(BaseProvider, stt.SpeechToTextEntity): + """Mock provider entity.""" + + _attr_name = "Mock STT" + + +class MockTTSProvider(tts.Provider): + """Mock TTS provider.""" + + name = "Test" + _supported_languages = ["en-US"] + _supported_voices = { + "en-US": [ + tts.Voice("james_earl_jones", "James Earl Jones"), + tts.Voice("fran_drescher", "Fran Drescher"), + ] + } + _supported_options = ["voice", "age", tts.ATTR_AUDIO_OUTPUT] + + @property + def default_language(self) -> str: + """Return the default language.""" + return "en" + + @property + def supported_languages(self) -> list[str]: + """Return list of supported languages.""" + return self._supported_languages + + @callback + def async_get_supported_voices(self, language: str) -> list[tts.Voice] | None: + """Return a list of supported voices for a language.""" + return self._supported_voices.get(language) + + @property + def supported_options(self) -> list[str]: + """Return list of supported options like voice, emotions.""" + return self._supported_options + + def get_tts_audio( + self, message: str, language: str, options: dict[str, Any] + ) -> tts.TtsAudioType: + """Load TTS data.""" + return ("mp3", b"") class MockTTSPlatform(MockPlatform): @@ -54,7 +147,7 @@ class MockTTSPlatform(MockPlatform): PLATFORM_SCHEMA = tts.PLATFORM_SCHEMA - def __init__(self, *, async_get_engine, **kwargs: Any) -> None: + def __init__(self, *, async_get_engine, **kwargs): """Initialize the tts platform.""" super().__init__(**kwargs) self.async_get_engine = async_get_engine @@ -63,29 +156,25 @@ class MockTTSPlatform(MockPlatform): @pytest.fixture async def mock_tts_provider() -> MockTTSProvider: """Mock TTS provider.""" - provider = MockTTSProvider("en") - provider._supported_languages = ["en-US"] - return provider + return MockTTSProvider() @pytest.fixture -async def mock_stt_provider() -> MockSTTProvider: +async def mock_stt_provider() -> MockSttProvider: """Mock STT provider.""" - return MockSTTProvider(supported_languages=["en-US"], text=_TRANSCRIPT) + return MockSttProvider(_TRANSCRIPT) @pytest.fixture -def mock_stt_provider_entity() -> MockSTTProviderEntity: +def mock_stt_provider_entity() -> MockSttProviderEntity: """Test provider entity fixture.""" - entity = MockSTTProviderEntity(supported_languages=["en-US"], text=_TRANSCRIPT) - entity._attr_name = "Mock STT" - return entity + return MockSttProviderEntity(_TRANSCRIPT) class MockSttPlatform(MockPlatform): """Provide a fake STT platform.""" - def __init__(self, *, async_get_engine, **kwargs: Any) -> None: + def __init__(self, *, async_get_engine, **kwargs): """Initialize the stt platform.""" super().__init__(**kwargs) self.async_get_engine = async_get_engine @@ -195,8 +284,8 @@ def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: @pytest.fixture async def init_supporting_components( hass: HomeAssistant, - mock_stt_provider: MockSTTProvider, - mock_stt_provider_entity: MockSTTProviderEntity, + mock_stt_provider: MockSttProvider, + mock_stt_provider_entity: MockSttProviderEntity, mock_tts_provider: MockTTSProvider, mock_wake_word_provider_entity: MockWakeWordEntity, mock_wake_word_provider_entity2: MockWakeWordEntity2, @@ -374,8 +463,3 @@ def pipeline_data(hass: HomeAssistant, init_components) -> PipelineData: def pipeline_storage(pipeline_data) -> PipelineStorageCollection: """Return pipeline storage collection.""" return pipeline_data.pipeline_store - - -def make_10ms_chunk(header: bytes) -> bytes: - """Return 10ms of zeros with the given header.""" - return header + bytes(BYTES_PER_CHUNK - len(header)) diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index e14bbac1839..8124ed4ab85 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -10,7 +10,7 @@ }), dict({ 'data': dict({ - 'engine': 'stt.mock_stt', + 'engine': 'test', 'metadata': dict({ 'bit_rate': , 'channel': , @@ -75,7 +75,7 @@ dict({ 'data': dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", 'mime_type': 'audio/mpeg', 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', }), @@ -164,7 +164,7 @@ dict({ 'data': dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22Arnold+Schwarzenegger%22%7D", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=Arnold+Schwarzenegger", 'mime_type': 'audio/mpeg', 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_2657c1a8ee_test.mp3', }), @@ -253,7 +253,7 @@ dict({ 'data': dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22Arnold+Schwarzenegger%22%7D", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=Arnold+Schwarzenegger", 'mime_type': 'audio/mpeg', 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_2657c1a8ee_test.mp3', }), @@ -301,7 +301,7 @@ }), dict({ 'data': dict({ - 'engine': 'stt.mock_stt', + 'engine': 'test', 'metadata': dict({ 'bit_rate': , 'channel': , @@ -366,7 +366,7 @@ dict({ 'data': dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", 'mime_type': 'audio/mpeg', 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', }), diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index b806c6faf23..2c506215c68 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -11,7 +11,7 @@ # --- # name: test_audio_pipeline.1 dict({ - 'engine': 'stt.mock_stt', + 'engine': 'test', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -71,7 +71,7 @@ # name: test_audio_pipeline.6 dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", 'mime_type': 'audio/mpeg', 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', }), @@ -92,7 +92,7 @@ # --- # name: test_audio_pipeline_debug.1 dict({ - 'engine': 'stt.mock_stt', + 'engine': 'test', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -152,7 +152,7 @@ # name: test_audio_pipeline_debug.6 dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", 'mime_type': 'audio/mpeg', 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', }), @@ -185,7 +185,7 @@ # --- # name: test_audio_pipeline_with_enhancements.1 dict({ - 'engine': 'stt.mock_stt', + 'engine': 'test', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -245,7 +245,7 @@ # name: test_audio_pipeline_with_enhancements.6 dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", 'mime_type': 'audio/mpeg', 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', }), @@ -288,7 +288,7 @@ # --- # name: test_audio_pipeline_with_wake_word_no_timeout.3 dict({ - 'engine': 'stt.mock_stt', + 'engine': 'test', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -348,7 +348,7 @@ # name: test_audio_pipeline_with_wake_word_no_timeout.8 dict({ 'tts_output': dict({ - 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", + 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&voice=james_earl_jones", 'mime_type': 'audio/mpeg', 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', }), @@ -401,7 +401,7 @@ # --- # name: test_device_capture.1 dict({ - 'engine': 'stt.mock_stt', + 'engine': 'test', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -427,7 +427,7 @@ # --- # name: test_device_capture_override.1 dict({ - 'engine': 'stt.mock_stt', + 'engine': 'test', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -440,7 +440,7 @@ # --- # name: test_device_capture_override.2 dict({ - 'audio': 'Y2h1bmsxAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=', + 'audio': 'Y2h1bmsx', 'channels': 1, 'rate': 16000, 'type': 'audio', @@ -475,7 +475,7 @@ # --- # name: test_device_capture_queue_full.1 dict({ - 'engine': 'stt.mock_stt', + 'engine': 'test', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -649,7 +649,7 @@ # --- # name: test_stt_stream_failed.1 dict({ - 'engine': 'stt.mock_stt', + 'engine': 'test', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -663,7 +663,7 @@ # name: test_stt_stream_failed.2 None # --- -# name: test_text_only_pipeline[extra_msg0] +# name: test_text_only_pipeline dict({ 'language': 'en', 'pipeline': , @@ -673,7 +673,7 @@ }), }) # --- -# name: test_text_only_pipeline[extra_msg0].1 +# name: test_text_only_pipeline.1 dict({ 'conversation_id': 'mock-conversation-id', 'device_id': 'mock-device-id', @@ -682,7 +682,7 @@ 'language': 'en', }) # --- -# name: test_text_only_pipeline[extra_msg0].2 +# name: test_text_only_pipeline.2 dict({ 'intent_output': dict({ 'conversation_id': None, @@ -697,58 +697,14 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called Are', + 'speech': 'Sorry, I am not aware of any area called are', }), }), }), }), }) # --- -# name: test_text_only_pipeline[extra_msg0].3 - None -# --- -# name: test_text_only_pipeline[extra_msg1] - dict({ - 'language': 'en', - 'pipeline': , - 'runner_data': dict({ - 'stt_binary_handler_id': None, - 'timeout': 300, - }), - }) -# --- -# name: test_text_only_pipeline[extra_msg1].1 - dict({ - 'conversation_id': 'mock-conversation-id', - 'device_id': 'mock-device-id', - 'engine': 'conversation.home_assistant', - 'intent_input': 'Are the lights on?', - 'language': 'en', - }) -# --- -# name: test_text_only_pipeline[extra_msg1].2 - dict({ - 'intent_output': dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called Are', - }), - }), - }), - }), - }) -# --- -# name: test_text_only_pipeline[extra_msg1].3 +# name: test_text_only_pipeline.3 None # --- # name: test_text_pipeline_timeout diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index c4696573bad..f9b91af3bf1 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -13,7 +13,6 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import assist_pipeline, media_source, stt, tts from homeassistant.components.assist_pipeline.const import ( - BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, DOMAIN, ) @@ -21,16 +20,16 @@ from homeassistant.core import Context, HomeAssistant from homeassistant.setup import async_setup_component from .conftest import ( - BYTES_ONE_SECOND, - MockSTTProvider, - MockSTTProviderEntity, + MockSttProvider, + MockSttProviderEntity, MockTTSProvider, MockWakeWordEntity, - make_10ms_chunk, ) from tests.typing import ClientSessionGenerator, WebSocketGenerator +BYTES_ONE_SECOND = 16000 * 2 + def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: """Process events to remove dynamic values.""" @@ -47,7 +46,7 @@ def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: async def test_pipeline_from_audio_stream_auto( hass: HomeAssistant, - mock_stt_provider_entity: MockSTTProviderEntity, + mock_stt_provider: MockSttProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -59,8 +58,8 @@ async def test_pipeline_from_audio_stream_auto( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield make_10ms_chunk(b"part1") - yield make_10ms_chunk(b"part2") + yield b"part1" + yield b"part2" yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -76,19 +75,19 @@ async def test_pipeline_from_audio_stream_auto( channel=stt.AudioChannels.CHANNEL_MONO, ), stt_stream=audio_data(), - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + audio_settings=assist_pipeline.AudioSettings( + is_vad_enabled=False, is_chunking_enabled=False + ), ) assert process_events(events) == snapshot - assert len(mock_stt_provider_entity.received) == 2 - assert mock_stt_provider_entity.received[0].startswith(b"part1") - assert mock_stt_provider_entity.received[1].startswith(b"part2") + assert mock_stt_provider.received == [b"part1", b"part2"] async def test_pipeline_from_audio_stream_legacy( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSTTProvider, + mock_stt_provider: MockSttProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -101,8 +100,8 @@ async def test_pipeline_from_audio_stream_legacy( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield make_10ms_chunk(b"part1") - yield make_10ms_chunk(b"part2") + yield b"part1" + yield b"part2" yield b"" # Create a pipeline using an stt entity @@ -141,19 +140,19 @@ async def test_pipeline_from_audio_stream_legacy( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + audio_settings=assist_pipeline.AudioSettings( + is_vad_enabled=False, is_chunking_enabled=False + ), ) assert process_events(events) == snapshot - assert len(mock_stt_provider.received) == 2 - assert mock_stt_provider.received[0].startswith(b"part1") - assert mock_stt_provider.received[1].startswith(b"part2") + assert mock_stt_provider.received == [b"part1", b"part2"] async def test_pipeline_from_audio_stream_entity( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider_entity: MockSTTProviderEntity, + mock_stt_provider_entity: MockSttProviderEntity, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -166,8 +165,8 @@ async def test_pipeline_from_audio_stream_entity( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield make_10ms_chunk(b"part1") - yield make_10ms_chunk(b"part2") + yield b"part1" + yield b"part2" yield b"" # Create a pipeline using an stt entity @@ -206,19 +205,19 @@ async def test_pipeline_from_audio_stream_entity( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + audio_settings=assist_pipeline.AudioSettings( + is_vad_enabled=False, is_chunking_enabled=False + ), ) assert process_events(events) == snapshot - assert len(mock_stt_provider_entity.received) == 2 - assert mock_stt_provider_entity.received[0].startswith(b"part1") - assert mock_stt_provider_entity.received[1].startswith(b"part2") + assert mock_stt_provider_entity.received == [b"part1", b"part2"] async def test_pipeline_from_audio_stream_no_stt( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSTTProvider, + mock_stt_provider: MockSttProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -231,8 +230,8 @@ async def test_pipeline_from_audio_stream_no_stt( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield make_10ms_chunk(b"part1") - yield make_10ms_chunk(b"part2") + yield b"part1" + yield b"part2" yield b"" # Create a pipeline without stt support @@ -272,7 +271,9 @@ async def test_pipeline_from_audio_stream_no_stt( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + audio_settings=assist_pipeline.AudioSettings( + is_vad_enabled=False, is_chunking_enabled=False + ), ) assert not events @@ -281,7 +282,7 @@ async def test_pipeline_from_audio_stream_no_stt( async def test_pipeline_from_audio_stream_unknown_pipeline( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSTTProvider, + mock_stt_provider: MockSttProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -292,8 +293,8 @@ async def test_pipeline_from_audio_stream_unknown_pipeline( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield make_10ms_chunk(b"part1") - yield make_10ms_chunk(b"part2") + yield b"part1" + yield b"part2" yield b"" # Try to use the created pipeline @@ -319,7 +320,7 @@ async def test_pipeline_from_audio_stream_unknown_pipeline( async def test_pipeline_from_audio_stream_wake_word( hass: HomeAssistant, - mock_stt_provider_entity: MockSTTProviderEntity, + mock_stt_provider: MockSttProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_components, snapshot: SnapshotAssertion, @@ -334,25 +335,24 @@ async def test_pipeline_from_audio_stream_wake_word( # [0, 2, ...] wake_chunk_2 = bytes(it.islice(it.cycle(range(0, 256, 2)), BYTES_ONE_SECOND)) - samples_per_chunk = 160 # 10ms @ 16Khz - bytes_per_chunk = samples_per_chunk * 2 # 16-bit + bytes_per_chunk = int(0.01 * BYTES_ONE_SECOND) async def audio_data(): - # 1 second in chunks + # 1 second in 10 ms chunks i = 0 while i < len(wake_chunk_1): yield wake_chunk_1[i : i + bytes_per_chunk] i += bytes_per_chunk - # 1 second in chunks + # 1 second in 30 ms chunks i = 0 while i < len(wake_chunk_2): yield wake_chunk_2[i : i + bytes_per_chunk] i += bytes_per_chunk - for header in (b"wake word!", b"part1", b"part2"): - yield make_10ms_chunk(header) - + yield b"wake word!" + yield b"part1" + yield b"part2" yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -372,7 +372,9 @@ async def test_pipeline_from_audio_stream_wake_word( wake_word_settings=assist_pipeline.WakeWordSettings( audio_seconds_to_buffer=1.5 ), - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + audio_settings=assist_pipeline.AudioSettings( + is_vad_enabled=False, is_chunking_enabled=False + ), ) assert process_events(events) == snapshot @@ -381,21 +383,19 @@ async def test_pipeline_from_audio_stream_wake_word( # 2. queued audio (from mock wake word entity) # 3. part1 # 4. part2 - assert len(mock_stt_provider_entity.received) > 3 + assert len(mock_stt_provider.received) > 3 first_chunk = bytes( - [c_byte for c in mock_stt_provider_entity.received[:-3] for c_byte in c] + [c_byte for c in mock_stt_provider.received[:-3] for c_byte in c] ) assert first_chunk == wake_chunk_1[len(wake_chunk_1) // 2 :] + wake_chunk_2 - assert mock_stt_provider_entity.received[-3] == b"queued audio" - assert mock_stt_provider_entity.received[-2].startswith(b"part1") - assert mock_stt_provider_entity.received[-1].startswith(b"part2") + assert mock_stt_provider.received[-3:] == [b"queued audio", b"part1", b"part2"] async def test_pipeline_save_audio( hass: HomeAssistant, - mock_stt_provider: MockSTTProvider, + mock_stt_provider: MockSttProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -413,11 +413,13 @@ async def test_pipeline_save_audio( pipeline = assist_pipeline.async_get_pipeline(hass) events: list[assist_pipeline.PipelineEvent] = [] + # Pad out to an even number of bytes since these "samples" will be saved + # as 16-bit values. async def audio_data(): - yield make_10ms_chunk(b"wake word") + yield b"wake word_" # queued audio - yield make_10ms_chunk(b"part1") - yield make_10ms_chunk(b"part2") + yield b"part1_" + yield b"part2_" yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -436,7 +438,9 @@ async def test_pipeline_save_audio( pipeline_id=pipeline.id, start_stage=assist_pipeline.PipelineStage.WAKE_WORD, end_stage=assist_pipeline.PipelineStage.STT, - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + audio_settings=assist_pipeline.AudioSettings( + is_vad_enabled=False, is_chunking_enabled=False + ), ) pipeline_dirs = list(temp_dir.iterdir()) @@ -460,21 +464,17 @@ async def test_pipeline_save_audio( # Verify wake file with wave.open(str(wake_file), "rb") as wake_wav: wake_data = wake_wav.readframes(wake_wav.getnframes()) - assert wake_data.startswith(b"wake word") + assert wake_data == b"wake word_" # Verify stt file with wave.open(str(stt_file), "rb") as stt_wav: stt_data = stt_wav.readframes(stt_wav.getnframes()) - assert stt_data.startswith(b"queued audio") - stt_data = stt_data[len(b"queued audio") :] - assert stt_data.startswith(b"part1") - stt_data = stt_data[BYTES_PER_CHUNK:] - assert stt_data.startswith(b"part2") + assert stt_data == b"queued audiopart1_part2_" async def test_pipeline_saved_audio_with_device_id( hass: HomeAssistant, - mock_stt_provider: MockSTTProvider, + mock_stt_provider: MockSttProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -529,7 +529,7 @@ async def test_pipeline_saved_audio_with_device_id( async def test_pipeline_saved_audio_write_error( hass: HomeAssistant, - mock_stt_provider: MockSTTProvider, + mock_stt_provider: MockSttProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -578,7 +578,7 @@ async def test_pipeline_saved_audio_write_error( async def test_pipeline_saved_audio_empty_queue( hass: HomeAssistant, - mock_stt_provider: MockSTTProvider, + mock_stt_provider: MockSttProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -641,7 +641,7 @@ async def test_pipeline_saved_audio_empty_queue( async def test_wake_word_detection_aborted( hass: HomeAssistant, - mock_stt_provider: MockSTTProvider, + mock_stt_provider: MockSttProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_components, pipeline_data: assist_pipeline.pipeline.PipelineData, @@ -652,10 +652,10 @@ async def test_wake_word_detection_aborted( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield make_10ms_chunk(b"silence!") - yield make_10ms_chunk(b"wake word!") - yield make_10ms_chunk(b"part1") - yield make_10ms_chunk(b"part2") + yield b"silence!" + yield b"wake word!" + yield b"part1" + yield b"part2" yield b"" pipeline_store = pipeline_data.pipeline_store @@ -685,7 +685,9 @@ async def test_wake_word_detection_aborted( wake_word_settings=assist_pipeline.WakeWordSettings( audio_seconds_to_buffer=1.5 ), - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + audio_settings=assist_pipeline.AudioSettings( + is_vad_enabled=False, is_chunking_enabled=False + ), ), ) await pipeline_input.validate() @@ -788,12 +790,13 @@ async def test_tts_audio_output( assert len(extra_options) == 0, extra_options -async def test_tts_wav_preferred_format( +async def test_tts_supports_preferred_format( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts_provider: MockTTSProvider, init_components, pipeline_data: assist_pipeline.pipeline.PipelineData, + snapshot: SnapshotAssertion, ) -> None: """Test that preferred format options are given to the TTS system if supported.""" client = await hass_client() @@ -828,7 +831,6 @@ async def test_tts_wav_preferred_format( tts.ATTR_PREFERRED_FORMAT, tts.ATTR_PREFERRED_SAMPLE_RATE, tts.ATTR_PREFERRED_SAMPLE_CHANNELS, - tts.ATTR_PREFERRED_SAMPLE_BYTES, ] ) @@ -850,80 +852,6 @@ async def test_tts_wav_preferred_format( options = mock_get_tts_audio.call_args_list[0].kwargs["options"] # We should have received preferred format options in get_tts_audio - assert options.get(tts.ATTR_PREFERRED_FORMAT) == "wav" - assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_RATE)) == 16000 - assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS)) == 1 - assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_BYTES)) == 2 - - -async def test_tts_dict_preferred_format( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_tts_provider: MockTTSProvider, - init_components, - pipeline_data: assist_pipeline.pipeline.PipelineData, -) -> None: - """Test that preferred format options are given to the TTS system if supported.""" - client = await hass_client() - assert await async_setup_component(hass, media_source.DOMAIN, {}) - - events: list[assist_pipeline.PipelineEvent] = [] - - pipeline_store = pipeline_data.pipeline_store - pipeline_id = pipeline_store.async_get_preferred_item() - pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) - - pipeline_input = assist_pipeline.pipeline.PipelineInput( - tts_input="This is a test.", - conversation_id=None, - device_id=None, - run=assist_pipeline.pipeline.PipelineRun( - hass, - context=Context(), - pipeline=pipeline, - start_stage=assist_pipeline.PipelineStage.TTS, - end_stage=assist_pipeline.PipelineStage.TTS, - event_callback=events.append, - tts_audio_output={ - tts.ATTR_PREFERRED_FORMAT: "flac", - tts.ATTR_PREFERRED_SAMPLE_RATE: 48000, - tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 2, - tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, - }, - ), - ) - await pipeline_input.validate() - - # Make the TTS provider support preferred format options - supported_options = list(mock_tts_provider.supported_options or []) - supported_options.extend( - [ - tts.ATTR_PREFERRED_FORMAT, - tts.ATTR_PREFERRED_SAMPLE_RATE, - tts.ATTR_PREFERRED_SAMPLE_CHANNELS, - tts.ATTR_PREFERRED_SAMPLE_BYTES, - ] - ) - - with ( - patch.object(mock_tts_provider, "_supported_options", supported_options), - patch.object(mock_tts_provider, "get_tts_audio") as mock_get_tts_audio, - ): - await pipeline_input.execute() - - for event in events: - if event.type == assist_pipeline.PipelineEventType.TTS_END: - # We must fetch the media URL to trigger the TTS - assert event.data - media_id = event.data["tts_output"]["media_id"] - resolved = await media_source.async_resolve_media(hass, media_id, None) - await client.get(resolved.url) - - assert mock_get_tts_audio.called - options = mock_get_tts_audio.call_args_list[0].kwargs["options"] - - # We should have received preferred format options in get_tts_audio - assert options.get(tts.ATTR_PREFERRED_FORMAT) == "flac" - assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_RATE)) == 48000 - assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS)) == 2 - assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_BYTES)) == 2 + assert tts.ATTR_PREFERRED_FORMAT in options + assert tts.ATTR_PREFERRED_SAMPLE_RATE in options + assert tts.ATTR_PREFERRED_SAMPLE_CHANNELS in options diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index 50d0fc9bed8..3e1e99412d8 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -1,10 +1,10 @@ """Websocket tests for Voice Assistant integration.""" -from collections.abc import AsyncGenerator from typing import Any from unittest.mock import ANY, patch import pytest +from typing_extensions import AsyncGenerator from homeassistant.components import conversation from homeassistant.components.assist_pipeline.const import DOMAIN @@ -26,7 +26,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MANY_LANGUAGES -from .conftest import MockSTTProviderEntity, MockTTSProvider +from .conftest import MockSttProvider, MockTTSProvider from tests.common import flush_store @@ -398,7 +398,7 @@ async def test_default_pipeline_no_stt_tts( @pytest.mark.usefixtures("init_supporting_components") async def test_default_pipeline( hass: HomeAssistant, - mock_stt_provider_entity: MockSTTProviderEntity, + mock_stt_provider: MockSttProvider, mock_tts_provider: MockTTSProvider, ha_language: str, ha_country: str | None, @@ -412,7 +412,7 @@ async def test_default_pipeline( hass.config.language = ha_language with ( - patch.object(mock_stt_provider_entity, "_supported_languages", MANY_LANGUAGES), + patch.object(mock_stt_provider, "_supported_languages", MANY_LANGUAGES), patch.object(mock_tts_provider, "_supported_languages", MANY_LANGUAGES), ): assert await async_setup_component(hass, "assist_pipeline", {}) @@ -429,7 +429,7 @@ async def test_default_pipeline( id=pipeline.id, language=pipeline_language, name="Home Assistant", - stt_engine="stt.mock_stt", + stt_engine="test", stt_language=stt_language, tts_engine="test", tts_language=tts_language, @@ -441,10 +441,10 @@ async def test_default_pipeline( @pytest.mark.usefixtures("init_supporting_components") async def test_default_pipeline_unsupported_stt_language( - hass: HomeAssistant, mock_stt_provider_entity: MockSTTProviderEntity + hass: HomeAssistant, mock_stt_provider: MockSttProvider ) -> None: """Test async_get_pipeline.""" - with patch.object(mock_stt_provider_entity, "_supported_languages", ["smurfish"]): + with patch.object(mock_stt_provider, "_supported_languages", ["smurfish"]): assert await async_setup_component(hass, "assist_pipeline", {}) pipeline_data: PipelineData = hass.data[DOMAIN] @@ -489,7 +489,7 @@ async def test_default_pipeline_unsupported_tts_language( id=pipeline.id, language="en", name="Home Assistant", - stt_engine="stt.mock_stt", + stt_engine="test", stt_language="en-US", tts_engine=None, tts_language=None, diff --git a/tests/components/assist_pipeline/test_repair_flows.py b/tests/components/assist_pipeline/test_repair_flows.py deleted file mode 100644 index 4c8a242b20c..00000000000 --- a/tests/components/assist_pipeline/test_repair_flows.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Test repair flows.""" - -import pytest - -from homeassistant.components.assist_pipeline.repair_flows import ( - AssistInProgressDeprecatedRepairFlow, -) - - -@pytest.mark.parametrize( - "data", [None, {}, {"entity_id": "blah", "entity_uuid": "12345"}] -) -def test_assist_in_progress_deprecated_flow_requires_data(data: dict | None) -> None: - """Test AssistInProgressDeprecatedRepairFlow requires data.""" - - with pytest.raises(ValueError): - AssistInProgressDeprecatedRepairFlow(data) diff --git a/tests/components/assist_pipeline/test_vad.py b/tests/components/assist_pipeline/test_vad.py index bd07601cd5d..139ae915263 100644 --- a/tests/components/assist_pipeline/test_vad.py +++ b/tests/components/assist_pipeline/test_vad.py @@ -1,9 +1,11 @@ """Tests for voice command segmenter.""" import itertools as it +from unittest.mock import patch from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, + VoiceActivityDetector, VoiceCommandSegmenter, chunk_samples, ) @@ -16,66 +18,85 @@ def test_silence() -> None: segmenter = VoiceCommandSegmenter() # True return value indicates voice command has not finished - assert segmenter.process(_ONE_SECOND * 3, 0.0) - assert not segmenter.in_command + assert segmenter.process(_ONE_SECOND * 3, False) def test_speech() -> None: """Test that silence + speech + silence triggers a voice command.""" + def is_speech(chunk): + """Anything non-zero is speech.""" + return sum(chunk) > 0 + segmenter = VoiceCommandSegmenter() # silence - assert segmenter.process(_ONE_SECOND, 0.0) + assert segmenter.process(_ONE_SECOND, False) # "speech" - assert segmenter.process(_ONE_SECOND, 1.0) - assert segmenter.in_command + assert segmenter.process(_ONE_SECOND, True) # silence # False return value indicates voice command is finished - assert not segmenter.process(_ONE_SECOND, 0.0) - assert not segmenter.in_command + assert not segmenter.process(_ONE_SECOND, False) def test_audio_buffer() -> None: """Test audio buffer wrapping.""" - samples_per_chunk = 160 # 10 ms - bytes_per_chunk = samples_per_chunk * 2 - leftover_buffer = AudioBuffer(bytes_per_chunk) + class DisabledVad(VoiceActivityDetector): + def is_speech(self, chunk): + return False - # Partially fill audio buffer - half_chunk = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk // 2)) - chunks = list(chunk_samples(half_chunk, bytes_per_chunk, leftover_buffer)) + @property + def samples_per_chunk(self): + return 160 # 10 ms - assert not chunks - assert leftover_buffer.bytes() == half_chunk + vad = DisabledVad() + bytes_per_chunk = vad.samples_per_chunk * 2 + vad_buffer = AudioBuffer(bytes_per_chunk) + segmenter = VoiceCommandSegmenter() - # Fill and wrap with 1/4 chunk left over - three_quarters_chunk = bytes( - it.islice(it.cycle(range(256)), int(0.75 * bytes_per_chunk)) - ) - chunks = list(chunk_samples(three_quarters_chunk, bytes_per_chunk, leftover_buffer)) + with patch.object(vad, "is_speech", return_value=False) as mock_process: + # Partially fill audio buffer + half_chunk = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk // 2)) + segmenter.process_with_vad(half_chunk, vad, vad_buffer) - assert len(chunks) == 1 - assert ( - leftover_buffer.bytes() - == three_quarters_chunk[len(three_quarters_chunk) - (bytes_per_chunk // 4) :] - ) - assert chunks[0] == half_chunk + three_quarters_chunk[: bytes_per_chunk // 2] + assert not mock_process.called + assert vad_buffer is not None + assert vad_buffer.bytes() == half_chunk - # Run 2 chunks through - leftover_buffer.clear() - assert len(leftover_buffer) == 0 + # Fill and wrap with 1/4 chunk left over + three_quarters_chunk = bytes( + it.islice(it.cycle(range(256)), int(0.75 * bytes_per_chunk)) + ) + segmenter.process_with_vad(three_quarters_chunk, vad, vad_buffer) - two_chunks = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk * 2)) - chunks = list(chunk_samples(two_chunks, bytes_per_chunk, leftover_buffer)) + assert mock_process.call_count == 1 + assert ( + vad_buffer.bytes() + == three_quarters_chunk[ + len(three_quarters_chunk) - (bytes_per_chunk // 4) : + ] + ) + assert ( + mock_process.call_args[0][0] + == half_chunk + three_quarters_chunk[: bytes_per_chunk // 2] + ) - assert len(chunks) == 2 - assert len(leftover_buffer) == 0 - assert chunks[0] == two_chunks[:bytes_per_chunk] - assert chunks[1] == two_chunks[bytes_per_chunk:] + # Run 2 chunks through + segmenter.reset() + vad_buffer.clear() + assert len(vad_buffer) == 0 + + mock_process.reset_mock() + two_chunks = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk * 2)) + segmenter.process_with_vad(two_chunks, vad, vad_buffer) + + assert mock_process.call_count == 2 + assert len(vad_buffer) == 0 + assert mock_process.call_args_list[0][0][0] == two_chunks[:bytes_per_chunk] + assert mock_process.call_args_list[1][0][0] == two_chunks[bytes_per_chunk:] def test_partial_chunk() -> None: @@ -106,147 +127,41 @@ def test_chunk_samples_leftover() -> None: assert leftover_chunk_buffer.bytes() == bytes([5, 6]) -def test_silence_seconds() -> None: - """Test end of voice command silence seconds.""" +def test_vad_no_chunking() -> None: + """Test VAD that doesn't require chunking.""" - segmenter = VoiceCommandSegmenter(silence_seconds=1.0) + class VadNoChunk(VoiceActivityDetector): + def is_speech(self, chunk: bytes) -> bool: + return sum(chunk) > 0 - # silence - assert segmenter.process(_ONE_SECOND, 0.0) - assert not segmenter.in_command - - # "speech" - assert segmenter.process(_ONE_SECOND, 1.0) - assert segmenter.in_command - - # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, 0.0) - assert segmenter.in_command - - # exactly enough silence now - assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) - assert not segmenter.in_command - - -def test_silence_reset() -> None: - """Test that speech resets end of voice command detection.""" - - segmenter = VoiceCommandSegmenter(silence_seconds=1.0, reset_seconds=0.5) - - # silence - assert segmenter.process(_ONE_SECOND, 0.0) - assert not segmenter.in_command - - # "speech" - assert segmenter.process(_ONE_SECOND, 1.0) - assert segmenter.in_command - - # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, 0.0) - assert segmenter.in_command - - # speech should reset silence detection - assert segmenter.process(_ONE_SECOND * 0.5, 1.0) - assert segmenter.in_command - - # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, 0.0) - assert segmenter.in_command - - # exactly enough silence now - assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) - assert not segmenter.in_command - - -def test_speech_reset() -> None: - """Test that silence resets start of voice command detection.""" + @property + def samples_per_chunk(self) -> int | None: + return None + vad = VadNoChunk() segmenter = VoiceCommandSegmenter( - silence_seconds=1.0, reset_seconds=0.5, speech_seconds=1.0 + speech_seconds=1.0, silence_seconds=1.0, reset_seconds=0.5 ) + silence = bytes([0] * 16000) + speech = bytes([255] * (16000 // 2)) - # silence - assert segmenter.process(_ONE_SECOND, 0.0) - assert not segmenter.in_command + # Test with differently-sized chunks + assert vad.is_speech(speech) + assert not vad.is_speech(silence) - # not enough speech to start voice command - assert segmenter.process(_ONE_SECOND * 0.5, 1.0) - assert not segmenter.in_command - - # silence should reset speech detection - assert segmenter.process(_ONE_SECOND, 0.0) - assert not segmenter.in_command - - # not enough speech to start voice command - assert segmenter.process(_ONE_SECOND * 0.5, 1.0) - assert not segmenter.in_command - - # exactly enough speech now - assert segmenter.process(_ONE_SECOND * 0.5, 1.0) - assert segmenter.in_command - - -def test_timeout() -> None: - """Test that voice command detection times out.""" - - segmenter = VoiceCommandSegmenter(timeout_seconds=1.0) - - # not enough to time out - assert not segmenter.timed_out - assert segmenter.process(_ONE_SECOND * 0.5, 0.0) - assert not segmenter.timed_out - - # enough to time out - assert not segmenter.process(_ONE_SECOND * 0.5, 1.0) - assert segmenter.timed_out - - # flag resets with more audio - assert segmenter.process(_ONE_SECOND * 0.5, 1.0) - assert not segmenter.timed_out - - assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) - assert segmenter.timed_out - - -def test_command_seconds() -> None: - """Test minimum number of seconds for voice command.""" - - segmenter = VoiceCommandSegmenter( - command_seconds=3, speech_seconds=1, silence_seconds=1, reset_seconds=1 - ) - - assert segmenter.process(_ONE_SECOND, 1.0) - - # Silence counts towards total command length - assert segmenter.process(_ONE_SECOND * 0.5, 0.0) - - # Enough to finish command now - assert segmenter.process(_ONE_SECOND, 1.0) - assert segmenter.process(_ONE_SECOND * 0.5, 0.0) - - # Silence to finish - assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) - - -def test_speech_thresholds() -> None: - """Test before/in command speech thresholds.""" - - segmenter = VoiceCommandSegmenter( - before_command_speech_threshold=0.2, - in_command_speech_threshold=0.5, - command_seconds=2, - speech_seconds=1, - silence_seconds=1, - ) - - # Not high enough probability to trigger command - assert segmenter.process(_ONE_SECOND, 0.1) - assert not segmenter.in_command - - # Triggers command - assert segmenter.process(_ONE_SECOND, 0.3) - assert segmenter.in_command - - # Now that same probability is considered silence. - # Finishes command. - assert not segmenter.process(_ONE_SECOND, 0.3) + # Simulate voice command + assert segmenter.process_with_vad(silence, vad, None) + # begin + assert segmenter.process_with_vad(speech, vad, None) + assert segmenter.process_with_vad(speech, vad, None) + assert segmenter.process_with_vad(speech, vad, None) + # reset with silence + assert segmenter.process_with_vad(silence, vad, None) + # resume + assert segmenter.process_with_vad(speech, vad, None) + assert segmenter.process_with_vad(speech, vad, None) + assert segmenter.process_with_vad(speech, vad, None) + assert segmenter.process_with_vad(speech, vad, None) + # end + assert segmenter.process_with_vad(silence, vad, None) + assert not segmenter.process_with_vad(silence, vad, None) diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index e339ee74fbb..e08dd9685ea 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -5,15 +5,9 @@ import base64 from typing import Any from unittest.mock import ANY, patch -import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.assist_pipeline.const import ( - DOMAIN, - SAMPLE_CHANNELS, - SAMPLE_RATE, - SAMPLE_WIDTH, -) +from homeassistant.components.assist_pipeline.const import DOMAIN from homeassistant.components.assist_pipeline.pipeline import ( DeviceAudioQueue, Pipeline, @@ -23,31 +17,17 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr -from .conftest import ( - BYTES_ONE_SECOND, - BYTES_PER_CHUNK, - MockWakeWordEntity, - MockWakeWordEntity2, - make_10ms_chunk, -) +from .conftest import MockWakeWordEntity, MockWakeWordEntity2 from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator -@pytest.mark.parametrize( - "extra_msg", - [ - {}, - {"pipeline": "conversation.home_assistant"}, - ], -) async def test_text_only_pipeline( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, init_components, snapshot: SnapshotAssertion, - extra_msg: dict[str, Any], ) -> None: """Test events from a pipeline run with text input (no STT/TTS).""" events = [] @@ -62,7 +42,6 @@ async def test_text_only_pipeline( "conversation_id": "mock-conversation-id", "device_id": "mock-device-id", } - | extra_msg ) # result @@ -216,7 +195,7 @@ async def test_audio_pipeline_with_wake_word_timeout( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": SAMPLE_RATE, + "sample_rate": 16000, "timeout": 1, }, } @@ -240,7 +219,7 @@ async def test_audio_pipeline_with_wake_word_timeout( events.append(msg["event"]) # 2 seconds of silence - await client.send_bytes(bytes([1]) + bytes(2 * BYTES_ONE_SECOND)) + await client.send_bytes(bytes([1]) + bytes(16000 * 2 * 2)) # Time out error msg = await client.receive_json() @@ -270,7 +249,12 @@ async def test_audio_pipeline_with_wake_word_no_timeout( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": SAMPLE_RATE, "timeout": 0, "no_vad": True}, + "input": { + "sample_rate": 16000, + "timeout": 0, + "no_vad": True, + "no_chunking": True, + }, } ) @@ -293,10 +277,9 @@ async def test_audio_pipeline_with_wake_word_no_timeout( events.append(msg["event"]) # "audio" - await client.send_bytes(bytes([handler_id]) + make_10ms_chunk(b"wake word")) + await client.send_bytes(bytes([handler_id]) + b"wake word") - async with asyncio.timeout(1): - msg = await client.receive_json() + msg = await client.receive_json() assert msg["event"]["type"] == "wake_word-end" assert msg["event"]["data"] == snapshot events.append(msg["event"]) @@ -377,7 +360,7 @@ async def test_audio_pipeline_no_wake_word_engine( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": SAMPLE_RATE, + "sample_rate": 16000, }, } ) @@ -414,7 +397,7 @@ async def test_audio_pipeline_no_wake_word_entity( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": SAMPLE_RATE, + "sample_rate": 16000, }, } ) @@ -682,7 +665,7 @@ async def test_stt_provider_missing( ) -> None: """Test events from a pipeline run with a non-existent STT provider.""" with patch( - "homeassistant.components.stt.async_get_speech_to_text_entity", + "homeassistant.components.stt.async_get_provider", return_value=None, ): client = await hass_ws_client(hass) @@ -708,11 +691,11 @@ async def test_stt_provider_bad_metadata( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, init_components, - mock_stt_provider_entity, + mock_stt_provider, snapshot: SnapshotAssertion, ) -> None: """Test events from a pipeline run with wrong metadata.""" - with patch.object(mock_stt_provider_entity, "check_metadata", return_value=False): + with patch.object(mock_stt_provider, "check_metadata", return_value=False): client = await hass_ws_client(hass) await client.send_json_auto_id( @@ -743,7 +726,7 @@ async def test_stt_stream_failed( client = await hass_ws_client(hass) with patch( - "tests.components.assist_pipeline.conftest.MockSTTProviderEntity.async_process_audio_stream", + "tests.components.assist_pipeline.conftest.MockSttProvider.async_process_audio_stream", side_effect=RuntimeError, ): await client.send_json_auto_id( @@ -1188,32 +1171,7 @@ async def test_get_pipeline( "id": ANY, "language": "en", "name": "Home Assistant", - "stt_engine": "stt.mock_stt", - "stt_language": "en-US", - "tts_engine": "test", - "tts_language": "en-US", - "tts_voice": "james_earl_jones", - "wake_word_entity": None, - "wake_word_id": None, - } - - # Get conversation agent as pipeline - await client.send_json_auto_id( - { - "type": "assist_pipeline/pipeline/get", - "pipeline_id": "conversation.home_assistant", - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == { - "conversation_engine": "conversation.home_assistant", - "conversation_language": "en", - "id": ANY, - "language": "en", - "name": "Home Assistant", - # It found these defaults - "stt_engine": "stt.mock_stt", + "stt_engine": "test", "stt_language": "en-US", "tts_engine": "test", "tts_language": "en-US", @@ -1297,7 +1255,7 @@ async def test_list_pipelines( "id": ANY, "language": "en", "name": "Home Assistant", - "stt_engine": "stt.mock_stt", + "stt_engine": "test", "stt_language": "en-US", "tts_engine": "test", "tts_language": "en-US", @@ -1783,7 +1741,7 @@ async def test_audio_pipeline_with_enhancements( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": SAMPLE_RATE, + "sample_rate": 16000, # Enhancements "noise_suppression_level": 2, "auto_gain_dbfs": 15, @@ -1813,7 +1771,7 @@ async def test_audio_pipeline_with_enhancements( # One second of silence. # This will pass through the audio enhancement pipeline, but we don't test # the actual output. - await client.send_bytes(bytes([handler_id]) + bytes(BYTES_ONE_SECOND)) + await client.send_bytes(bytes([handler_id]) + bytes(16000 * 2)) # End of audio stream (handler id + empty payload) await client.send_bytes(bytes([handler_id])) @@ -1883,7 +1841,11 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, } ) @@ -1892,7 +1854,11 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, } ) @@ -1926,8 +1892,8 @@ async def test_wake_word_cooldown_same_id( assert msg["event"]["data"] == snapshot # Wake both up at the same time - await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) - await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) + await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") + await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") # Get response events error_data: dict[str, Any] | None = None @@ -1966,7 +1932,11 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, } ) @@ -1975,7 +1945,11 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, } ) @@ -2009,8 +1983,8 @@ async def test_wake_word_cooldown_different_ids( assert msg["event"]["data"] == snapshot # Wake both up at the same time, but they will have different wake word ids - await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) - await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) + await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") + await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") # Get response events msg = await client_1.receive_json() @@ -2085,7 +2059,11 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_1, "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, } ) @@ -2096,7 +2074,11 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_2, "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, } ) @@ -2131,8 +2113,8 @@ async def test_wake_word_cooldown_different_entities( # Wake both up at the same time. # They will have the same wake word id, but different entities. - await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) - await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) + await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") + await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") # Get response events error_data: dict[str, Any] | None = None @@ -2170,11 +2152,7 @@ async def test_device_capture( identifiers={("demo", "satellite-1234")}, ) - audio_chunks = [ - make_10ms_chunk(b"chunk1"), - make_10ms_chunk(b"chunk2"), - make_10ms_chunk(b"chunk3"), - ] + audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] # Start capture client_capture = await hass_ws_client(hass) @@ -2197,7 +2175,11 @@ async def test_device_capture( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, "device_id": satellite_device.id, } ) @@ -2248,9 +2230,9 @@ async def test_device_capture( # Verify audio chunks for i, audio_chunk in enumerate(audio_chunks): assert events[i]["type"] == "audio" - assert events[i]["rate"] == SAMPLE_RATE - assert events[i]["width"] == SAMPLE_WIDTH - assert events[i]["channels"] == SAMPLE_CHANNELS + assert events[i]["rate"] == 16000 + assert events[i]["width"] == 2 + assert events[i]["channels"] == 1 # Audio is base64 encoded assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") @@ -2275,11 +2257,7 @@ async def test_device_capture_override( identifiers={("demo", "satellite-1234")}, ) - audio_chunks = [ - make_10ms_chunk(b"chunk1"), - make_10ms_chunk(b"chunk2"), - make_10ms_chunk(b"chunk3"), - ] + audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] # Start first capture client_capture_1 = await hass_ws_client(hass) @@ -2302,7 +2280,11 @@ async def test_device_capture_override( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, "device_id": satellite_device.id, } ) @@ -2385,9 +2367,9 @@ async def test_device_capture_override( # Verify all but first audio chunk for i, audio_chunk in enumerate(audio_chunks[1:]): assert events[i]["type"] == "audio" - assert events[i]["rate"] == SAMPLE_RATE - assert events[i]["width"] == SAMPLE_WIDTH - assert events[i]["channels"] == SAMPLE_CHANNELS + assert events[i]["rate"] == 16000 + assert events[i]["width"] == 2 + assert events[i]["channels"] == 1 # Audio is base64 encoded assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") @@ -2447,7 +2429,11 @@ async def test_device_capture_queue_full( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, "device_id": satellite_device.id, } ) @@ -2468,8 +2454,8 @@ async def test_device_capture_queue_full( assert msg["event"]["type"] == "stt-start" assert msg["event"]["data"] == snapshot - # Single chunk will "overflow" the queue - await client_pipeline.send_bytes(bytes([handler_id]) + bytes(BYTES_PER_CHUNK)) + # Single sample will "overflow" the queue + await client_pipeline.send_bytes(bytes([handler_id, 0, 0])) # End of audio stream await client_pipeline.send_bytes(bytes([handler_id])) @@ -2577,7 +2563,7 @@ async def test_stt_cooldown_same_id( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": SAMPLE_RATE, + "sample_rate": 16000, "wake_word_phrase": "ok_nabu", }, } @@ -2589,7 +2575,7 @@ async def test_stt_cooldown_same_id( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": SAMPLE_RATE, + "sample_rate": 16000, "wake_word_phrase": "ok_nabu", }, } @@ -2648,7 +2634,7 @@ async def test_stt_cooldown_different_ids( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": SAMPLE_RATE, + "sample_rate": 16000, "wake_word_phrase": "ok_nabu", }, } @@ -2660,7 +2646,7 @@ async def test_stt_cooldown_different_ids( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": SAMPLE_RATE, + "sample_rate": 16000, "wake_word_phrase": "hey_jarvis", }, } diff --git a/tests/components/assist_satellite/__init__.py b/tests/components/assist_satellite/__init__.py deleted file mode 100644 index 7e06ea3a4b9..00000000000 --- a/tests/components/assist_satellite/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -"""Tests for Assist Satellite.""" - -ENTITY_ID = "assist_satellite.test_entity" diff --git a/tests/components/assist_satellite/conftest.py b/tests/components/assist_satellite/conftest.py deleted file mode 100644 index 9e9bfd959e6..00000000000 --- a/tests/components/assist_satellite/conftest.py +++ /dev/null @@ -1,135 +0,0 @@ -"""Test helpers for Assist Satellite.""" - -import pathlib -from unittest.mock import Mock - -import pytest - -from homeassistant.components.assist_pipeline import PipelineEvent -from homeassistant.components.assist_satellite import ( - DOMAIN as AS_DOMAIN, - AssistSatelliteAnnouncement, - AssistSatelliteConfiguration, - AssistSatelliteEntity, - AssistSatelliteEntityFeature, - AssistSatelliteWakeWord, -) -from homeassistant.config_entries import ConfigEntry, ConfigFlow -from homeassistant.core import HomeAssistant, callback -from homeassistant.setup import async_setup_component - -from tests.common import ( - MockConfigEntry, - MockModule, - mock_config_flow, - mock_integration, - mock_platform, - setup_test_component_platform, -) - -TEST_DOMAIN = "test" - - -@pytest.fixture(autouse=True) -def mock_tts(mock_tts_cache_dir: pathlib.Path) -> None: - """Mock TTS cache dir fixture.""" - - -class MockAssistSatellite(AssistSatelliteEntity): - """Mock Assist Satellite Entity.""" - - _attr_name = "Test Entity" - _attr_supported_features = AssistSatelliteEntityFeature.ANNOUNCE - - def __init__(self) -> None: - """Initialize the mock entity.""" - self.events = [] - self.announcements: list[AssistSatelliteAnnouncement] = [] - self.config = AssistSatelliteConfiguration( - available_wake_words=[ - AssistSatelliteWakeWord( - id="1234", wake_word="okay nabu", trained_languages=["en"] - ), - AssistSatelliteWakeWord( - id="5678", - wake_word="hey jarvis", - trained_languages=["en"], - ), - ], - active_wake_words=["1234"], - max_active_wake_words=1, - ) - - def on_pipeline_event(self, event: PipelineEvent) -> None: - """Handle pipeline events.""" - self.events.append(event) - - async def async_announce(self, announcement: AssistSatelliteAnnouncement) -> None: - """Announce media on a device.""" - self.announcements.append(announcement) - - @callback - def async_get_configuration(self) -> AssistSatelliteConfiguration: - """Get the current satellite configuration.""" - return self.config - - async def async_set_configuration( - self, config: AssistSatelliteConfiguration - ) -> None: - """Set the current satellite configuration.""" - self.config = config - - -@pytest.fixture -def entity() -> MockAssistSatellite: - """Mock Assist Satellite Entity.""" - return MockAssistSatellite() - - -@pytest.fixture -def config_entry(hass: HomeAssistant) -> ConfigEntry: - """Mock config entry.""" - entry = MockConfigEntry(domain=TEST_DOMAIN) - entry.add_to_hass(hass) - return entry - - -@pytest.fixture -async def init_components( - hass: HomeAssistant, - config_entry: ConfigEntry, - entity: MockAssistSatellite, -) -> None: - """Initialize components.""" - assert await async_setup_component(hass, "homeassistant", {}) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [AS_DOMAIN]) - return True - - async def async_unload_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Unload test config entry.""" - await hass.config_entries.async_forward_entry_unload(config_entry, AS_DOMAIN) - return True - - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - async_unload_entry=async_unload_entry_init, - ), - ) - setup_test_component_platform(hass, AS_DOMAIN, [entity], from_config_entry=True) - mock_platform(hass, f"{TEST_DOMAIN}.config_flow", Mock()) - - with mock_config_flow(TEST_DOMAIN, ConfigFlow): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/assist_satellite/test_entity.py b/tests/components/assist_satellite/test_entity.py deleted file mode 100644 index 884ba36782c..00000000000 --- a/tests/components/assist_satellite/test_entity.py +++ /dev/null @@ -1,466 +0,0 @@ -"""Test the Assist Satellite entity.""" - -import asyncio -from unittest.mock import patch - -import pytest - -from homeassistant.components import stt -from homeassistant.components.assist_pipeline import ( - OPTION_PREFERRED, - AudioSettings, - Pipeline, - PipelineEvent, - PipelineEventType, - PipelineStage, - async_get_pipeline, - async_update_pipeline, - vad, -) -from homeassistant.components.assist_satellite import ( - AssistSatelliteAnnouncement, - SatelliteBusyError, -) -from homeassistant.components.assist_satellite.entity import AssistSatelliteState -from homeassistant.components.media_source import PlayMedia -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import Context, HomeAssistant - -from . import ENTITY_ID -from .conftest import MockAssistSatellite - - -async def test_entity_state( - hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite -) -> None: - """Test entity state represent events.""" - - state = hass.states.get(ENTITY_ID) - assert state is not None - assert state.state == AssistSatelliteState.IDLE - - context = Context() - audio_stream = object() - - entity.async_set_context(context) - - with patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream" - ) as mock_start_pipeline: - await entity.async_accept_pipeline_from_satellite(audio_stream) - - assert mock_start_pipeline.called - kwargs = mock_start_pipeline.call_args[1] - assert kwargs["context"] is context - assert kwargs["event_callback"] == entity._internal_on_pipeline_event - assert kwargs["stt_metadata"] == stt.SpeechMetadata( - language="", - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ) - assert kwargs["stt_stream"] is audio_stream - assert kwargs["pipeline_id"] is None - assert kwargs["device_id"] is None - assert kwargs["tts_audio_output"] is None - assert kwargs["wake_word_phrase"] is None - assert kwargs["audio_settings"] == AudioSettings( - silence_seconds=vad.VadSensitivity.to_seconds(vad.VadSensitivity.DEFAULT) - ) - assert kwargs["start_stage"] == PipelineStage.STT - assert kwargs["end_stage"] == PipelineStage.TTS - - for event_type, event_data, expected_state in ( - (PipelineEventType.RUN_START, {}, AssistSatelliteState.IDLE), - (PipelineEventType.RUN_END, {}, AssistSatelliteState.IDLE), - ( - PipelineEventType.WAKE_WORD_START, - {}, - AssistSatelliteState.IDLE, - ), - (PipelineEventType.WAKE_WORD_END, {}, AssistSatelliteState.IDLE), - (PipelineEventType.STT_START, {}, AssistSatelliteState.LISTENING), - (PipelineEventType.STT_VAD_START, {}, AssistSatelliteState.LISTENING), - (PipelineEventType.STT_VAD_END, {}, AssistSatelliteState.LISTENING), - (PipelineEventType.STT_END, {}, AssistSatelliteState.LISTENING), - (PipelineEventType.INTENT_START, {}, AssistSatelliteState.PROCESSING), - ( - PipelineEventType.INTENT_END, - { - "intent_output": { - "conversation_id": "mock-conversation-id", - } - }, - AssistSatelliteState.PROCESSING, - ), - (PipelineEventType.TTS_START, {}, AssistSatelliteState.RESPONDING), - (PipelineEventType.TTS_END, {}, AssistSatelliteState.RESPONDING), - (PipelineEventType.ERROR, {}, AssistSatelliteState.RESPONDING), - ): - kwargs["event_callback"](PipelineEvent(event_type, event_data)) - state = hass.states.get(ENTITY_ID) - assert state.state == expected_state, event_type - - entity.tts_response_finished() - state = hass.states.get(ENTITY_ID) - assert state.state == AssistSatelliteState.IDLE - - -async def test_new_pipeline_cancels_pipeline( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, -) -> None: - """Test that a new pipeline run cancels any running pipeline.""" - pipeline1_started = asyncio.Event() - pipeline1_finished = asyncio.Event() - pipeline1_cancelled = asyncio.Event() - pipeline2_finished = asyncio.Event() - - async def async_pipeline_from_audio_stream(*args, **kwargs): - if not pipeline1_started.is_set(): - # First pipeline run - pipeline1_started.set() - - # Wait for pipeline to be cancelled - try: - await pipeline1_finished.wait() - except asyncio.CancelledError: - pipeline1_cancelled.set() - raise - else: - # Second pipeline run - pipeline2_finished.set() - - with ( - patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - ): - hass.async_create_task( - entity.async_accept_pipeline_from_satellite( - object(), # type: ignore[arg-type] - ) - ) - - async with asyncio.timeout(1): - await pipeline1_started.wait() - - # Start a second pipeline - await entity.async_accept_pipeline_from_satellite( - object(), # type: ignore[arg-type] - ) - await pipeline1_cancelled.wait() - await pipeline2_finished.wait() - - -@pytest.mark.parametrize( - ("service_data", "expected_params"), - [ - ( - {"message": "Hello"}, - AssistSatelliteAnnouncement( - "Hello", "https://www.home-assistant.io/resolved.mp3", "tts" - ), - ), - ( - { - "message": "Hello", - "media_id": "media-source://bla", - }, - AssistSatelliteAnnouncement( - "Hello", "https://www.home-assistant.io/resolved.mp3", "media_id" - ), - ), - ( - {"media_id": "http://example.com/bla.mp3"}, - AssistSatelliteAnnouncement("", "http://example.com/bla.mp3", "url"), - ), - ], -) -async def test_announce( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - service_data: dict, - expected_params: tuple[str, str], -) -> None: - """Test announcing on a device.""" - await async_update_pipeline( - hass, - async_get_pipeline(hass), - tts_engine="tts.mock_entity", - tts_language="en", - tts_voice="test-voice", - ) - - entity._attr_tts_options = {"test-option": "test-value"} - - original_announce = entity.async_announce - announce_started = asyncio.Event() - - async def async_announce(announcement): - # Verify state change - assert entity.state == AssistSatelliteState.RESPONDING - await original_announce(announcement) - announce_started.set() - - def tts_generate_media_source_id( - hass: HomeAssistant, - message: str, - engine: str | None = None, - language: str | None = None, - options: dict | None = None, - cache: bool | None = None, - ): - # Check that TTS options are passed here - assert options == {"test-option": "test-value", "voice": "test-voice"} - return "media-source://bla" - - with ( - patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", - new=tts_generate_media_source_id, - ), - patch( - "homeassistant.components.media_source.async_resolve_media", - return_value=PlayMedia( - url="https://www.home-assistant.io/resolved.mp3", - mime_type="audio/mp3", - ), - ), - patch.object(entity, "async_announce", new=async_announce), - ): - await hass.services.async_call( - "assist_satellite", - "announce", - service_data, - target={"entity_id": "assist_satellite.test_entity"}, - blocking=True, - ) - assert entity.state == AssistSatelliteState.IDLE - - assert entity.announcements[0] == expected_params - - -async def test_announce_busy( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, -) -> None: - """Test that announcing while an announcement is in progress raises an error.""" - media_id = "https://www.home-assistant.io/resolved.mp3" - announce_started = asyncio.Event() - got_error = asyncio.Event() - - async def async_announce(announcement): - announce_started.set() - - # Block so we can do another announcement - await got_error.wait() - - with patch.object(entity, "async_announce", new=async_announce): - announce_task = asyncio.create_task( - entity.async_internal_announce(media_id=media_id) - ) - async with asyncio.timeout(1): - await announce_started.wait() - - # Try to do a second announcement - with pytest.raises(SatelliteBusyError): - await entity.async_internal_announce(media_id=media_id) - - # Avoid lingering task - got_error.set() - await announce_task - - -async def test_announce_cancels_pipeline( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, -) -> None: - """Test that announcements cancel any running pipeline.""" - media_id = "https://www.home-assistant.io/resolved.mp3" - pipeline_started = asyncio.Event() - pipeline_finished = asyncio.Event() - pipeline_cancelled = asyncio.Event() - - async def async_pipeline_from_audio_stream(*args, **kwargs): - pipeline_started.set() - - # Wait for pipeline to be cancelled - try: - await pipeline_finished.wait() - except asyncio.CancelledError: - pipeline_cancelled.set() - raise - - with ( - patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch.object(entity, "async_announce") as mock_async_announce, - ): - hass.async_create_task( - entity.async_accept_pipeline_from_satellite( - object(), # type: ignore[arg-type] - ) - ) - - async with asyncio.timeout(1): - await pipeline_started.wait() - await entity.async_internal_announce(None, media_id) - await pipeline_cancelled.wait() - - mock_async_announce.assert_called_once() - - -async def test_context_refresh( - hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite -) -> None: - """Test that the context will be automatically refreshed.""" - audio_stream = object() - - # Remove context - entity._context = None - - with patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream" - ): - await entity.async_accept_pipeline_from_satellite(audio_stream) - - # Context should have been refreshed - assert entity._context is not None - - -async def test_pipeline_entity( - hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite -) -> None: - """Test getting pipeline from an entity.""" - audio_stream = object() - pipeline = Pipeline( - conversation_engine="test", - conversation_language="en", - language="en", - name="test-pipeline", - stt_engine=None, - stt_language=None, - tts_engine=None, - tts_language=None, - tts_voice=None, - wake_word_entity=None, - wake_word_id=None, - ) - - pipeline_entity_id = "select.pipeline" - hass.states.async_set(pipeline_entity_id, pipeline.name) - entity._attr_pipeline_entity_id = pipeline_entity_id - - done = asyncio.Event() - - async def async_pipeline_from_audio_stream(*args, pipeline_id: str, **kwargs): - assert pipeline_id == pipeline.id - done.set() - - with ( - patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch( - "homeassistant.components.assist_satellite.entity.async_get_pipelines", - return_value=[pipeline], - ), - ): - async with asyncio.timeout(1): - await entity.async_accept_pipeline_from_satellite(audio_stream) - await done.wait() - - -async def test_pipeline_entity_preferred( - hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite -) -> None: - """Test getting pipeline from an entity with a preferred state.""" - audio_stream = object() - - pipeline_entity_id = "select.pipeline" - hass.states.async_set(pipeline_entity_id, OPTION_PREFERRED) - entity._attr_pipeline_entity_id = pipeline_entity_id - - done = asyncio.Event() - - async def async_pipeline_from_audio_stream(*args, pipeline_id: str, **kwargs): - # Preferred pipeline - assert pipeline_id is None - done.set() - - with ( - patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - ): - async with asyncio.timeout(1): - await entity.async_accept_pipeline_from_satellite(audio_stream) - await done.wait() - - -async def test_vad_sensitivity_entity( - hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite -) -> None: - """Test getting vad sensitivity from an entity.""" - audio_stream = object() - - vad_sensitivity_entity_id = "select.vad_sensitivity" - hass.states.async_set(vad_sensitivity_entity_id, vad.VadSensitivity.AGGRESSIVE) - entity._attr_vad_sensitivity_entity_id = vad_sensitivity_entity_id - - done = asyncio.Event() - - async def async_pipeline_from_audio_stream( - *args, audio_settings: AudioSettings, **kwargs - ): - # Verify vad sensitivity - assert audio_settings.silence_seconds == vad.VadSensitivity.to_seconds( - vad.VadSensitivity.AGGRESSIVE - ) - done.set() - - with patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ): - async with asyncio.timeout(1): - await entity.async_accept_pipeline_from_satellite(audio_stream) - await done.wait() - - -async def test_pipeline_entity_not_found( - hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite -) -> None: - """Test that setting the pipeline entity id to a non-existent entity raises an error.""" - audio_stream = object() - - # Set to an entity that doesn't exist - entity._attr_pipeline_entity_id = "select.pipeline" - - with pytest.raises(RuntimeError): - await entity.async_accept_pipeline_from_satellite(audio_stream) - - -async def test_vad_sensitivity_entity_not_found( - hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite -) -> None: - """Test that setting the vad sensitivity entity id to a non-existent entity raises an error.""" - audio_stream = object() - - # Set to an entity that doesn't exist - entity._attr_vad_sensitivity_entity_id = "select.vad_sensitivity" - - with pytest.raises(RuntimeError): - await entity.async_accept_pipeline_from_satellite(audio_stream) diff --git a/tests/components/assist_satellite/test_websocket_api.py b/tests/components/assist_satellite/test_websocket_api.py deleted file mode 100644 index 257961a5b32..00000000000 --- a/tests/components/assist_satellite/test_websocket_api.py +++ /dev/null @@ -1,518 +0,0 @@ -"""Test WebSocket API.""" - -import asyncio -from http import HTTPStatus -from unittest.mock import patch - -from freezegun.api import FrozenDateTimeFactory -import pytest - -from homeassistant.components.assist_pipeline import PipelineStage -from homeassistant.components.assist_satellite.websocket_api import ( - CONNECTION_TEST_TIMEOUT, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant - -from . import ENTITY_ID -from .conftest import MockAssistSatellite - -from tests.common import MockUser -from tests.typing import ClientSessionGenerator, WebSocketGenerator - - -async def test_intercept_wake_word( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test intercepting a wake word.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/intercept_wake_word", - "entity_id": ENTITY_ID, - } - ) - msg = await ws_client.receive_json() - assert msg["success"] - assert msg["result"] is None - subscription_id = msg["id"] - - await entity.async_accept_pipeline_from_satellite( - object(), # type: ignore[arg-type] - start_stage=PipelineStage.STT, - wake_word_phrase="ok, nabu", - ) - - async with asyncio.timeout(1): - msg = await ws_client.receive_json() - - assert msg["id"] == subscription_id - assert msg["type"] == "event" - assert msg["event"] == {"wake_word_phrase": "ok, nabu"} - - -async def test_intercept_wake_word_requires_on_device_wake_word( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test intercepting a wake word fails if detection happens in HA.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/intercept_wake_word", - "entity_id": ENTITY_ID, - } - ) - - async with asyncio.timeout(1): - msg = await ws_client.receive_json() - - assert msg["success"] - assert msg["result"] is None - - await entity.async_accept_pipeline_from_satellite( - object(), # type: ignore[arg-type] - # Emulate wake word processing in Home Assistant - start_stage=PipelineStage.WAKE_WORD, - ) - - async with asyncio.timeout(1): - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"] == { - "code": "home_assistant_error", - "message": "Only on-device wake words currently supported", - } - - -async def test_intercept_wake_word_requires_wake_word_phrase( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test intercepting a wake word fails if detection happens in HA.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/intercept_wake_word", - "entity_id": ENTITY_ID, - } - ) - - async with asyncio.timeout(1): - msg = await ws_client.receive_json() - - assert msg["success"] - assert msg["result"] is None - - await entity.async_accept_pipeline_from_satellite( - object(), # type: ignore[arg-type] - start_stage=PipelineStage.STT, - # We are not passing wake word phrase - ) - - async with asyncio.timeout(1): - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"] == { - "code": "home_assistant_error", - "message": "No wake word phrase provided", - } - - -async def test_intercept_wake_word_require_admin( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, - hass_admin_user: MockUser, -) -> None: - """Test intercepting a wake word requires admin access.""" - # Remove admin permission and verify we're not allowed - hass_admin_user.groups = [] - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/intercept_wake_word", - "entity_id": ENTITY_ID, - } - ) - - async with asyncio.timeout(1): - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"] == { - "code": "unauthorized", - "message": "Unauthorized", - } - - -async def test_intercept_wake_word_invalid_satellite( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test intercepting a wake word requires admin access.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/intercept_wake_word", - "entity_id": "assist_satellite.invalid", - } - ) - async with asyncio.timeout(1): - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"] == { - "code": "not_found", - "message": "Entity not found", - } - - -async def test_intercept_wake_word_twice( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test intercepting a wake word twice cancels the previous request.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/intercept_wake_word", - "entity_id": ENTITY_ID, - } - ) - - async with asyncio.timeout(1): - msg = await ws_client.receive_json() - - assert msg["success"] - assert msg["result"] is None - - task = hass.async_create_task(ws_client.receive_json()) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/intercept_wake_word", - "entity_id": ENTITY_ID, - } - ) - - # Should get an error from previous subscription - async with asyncio.timeout(1): - msg = await task - - assert not msg["success"] - assert msg["error"] == { - "code": "home_assistant_error", - "message": "Wake word interception already in progress", - } - - # Response to second subscription - async with asyncio.timeout(1): - msg = await ws_client.receive_json() - - assert msg["success"] - assert msg["result"] is None - - -async def test_intercept_wake_word_unsubscribe( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test that closing the websocket connection stops interception.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/intercept_wake_word", - "entity_id": ENTITY_ID, - } - ) - - # Wait for interception to start - for _ in range(3): - await asyncio.sleep(0) - - async def receive_json(): - with pytest.raises(TypeError): - # Raises TypeError when connection is closed - await ws_client.receive_json() - - task = hass.async_create_task(receive_json()) - - # Close connection - await ws_client.close() - await task - - with ( - patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - ) as mock_pipeline_from_audio_stream, - ): - # Start a pipeline with a wake word - await entity.async_accept_pipeline_from_satellite( - object(), - wake_word_phrase="ok, nabu", # type: ignore[arg-type] - ) - - # Wake word should not be intercepted - mock_pipeline_from_audio_stream.assert_called_once() - - -async def test_get_configuration( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test getting satellite configuration.""" - ws_client = await hass_ws_client(hass) - - with ( - patch.object(entity, "_attr_pipeline_entity_id", "select.test_pipeline"), - patch.object(entity, "_attr_vad_sensitivity_entity_id", "select.test_vad"), - ): - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/get_configuration", - "entity_id": ENTITY_ID, - } - ) - msg = await ws_client.receive_json() - assert msg["success"] - assert msg["result"] == { - "active_wake_words": ["1234"], - "available_wake_words": [ - {"id": "1234", "trained_languages": ["en"], "wake_word": "okay nabu"}, - {"id": "5678", "trained_languages": ["en"], "wake_word": "hey jarvis"}, - ], - "max_active_wake_words": 1, - "pipeline_entity_id": "select.test_pipeline", - "vad_entity_id": "select.test_vad", - } - - -async def test_set_wake_words( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test setting active wake words.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/set_wake_words", - "entity_id": ENTITY_ID, - "wake_word_ids": ["5678"], - } - ) - msg = await ws_client.receive_json() - assert msg["success"] - - # Verify change - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/get_configuration", - "entity_id": ENTITY_ID, - } - ) - msg = await ws_client.receive_json() - assert msg["success"] - assert msg["result"].get("active_wake_words") == ["5678"] - - -async def test_set_wake_words_exceed_maximum( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test setting too many active wake words.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/set_wake_words", - "entity_id": ENTITY_ID, - "wake_word_ids": ["1234", "5678"], # max of 1 - } - ) - msg = await ws_client.receive_json() - assert not msg["success"] - assert msg["error"] == { - "code": "not_supported", - "message": "Maximum number of active wake words is 1", - } - - -async def test_set_wake_words_bad_id( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test setting active wake words with a bad id.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/set_wake_words", - "entity_id": ENTITY_ID, - "wake_word_ids": ["abcd"], # not an available id - } - ) - msg = await ws_client.receive_json() - assert not msg["success"] - assert msg["error"] == { - "code": "not_supported", - "message": "Wake word id is not supported: abcd", - } - - -async def test_connection_test( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, - hass_client: ClientSessionGenerator, -) -> None: - """Test connection test.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/test_connection", - "entity_id": ENTITY_ID, - } - ) - - for _ in range(3): - await asyncio.sleep(0) - - assert len(entity.announcements) == 1 - assert entity.announcements[0].message == "" - announcement_media_id = entity.announcements[0].media_id - hass_url = "http://10.10.10.10:8123" - assert announcement_media_id.startswith( - f"{hass_url}/api/assist_satellite/connection_test/" - ) - - # Fake satellite fetches the URL - client = await hass_client() - resp = await client.get(announcement_media_id[len(hass_url) :]) - assert resp.status == HTTPStatus.OK - - response = await ws_client.receive_json() - assert response["success"] - assert response["result"] == {"status": "success"} - - -async def test_connection_test_timeout( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, - hass_client: ClientSessionGenerator, - freezer: FrozenDateTimeFactory, -) -> None: - """Test connection test timeout.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/test_connection", - "entity_id": ENTITY_ID, - } - ) - - for _ in range(3): - await asyncio.sleep(0) - - assert len(entity.announcements) == 1 - assert entity.announcements[0].message == "" - announcement_media_id = entity.announcements[0].media_id - hass_url = "http://10.10.10.10:8123" - assert announcement_media_id.startswith( - f"{hass_url}/api/assist_satellite/connection_test/" - ) - - freezer.tick(CONNECTION_TEST_TIMEOUT + 1) - - # Timeout - response = await ws_client.receive_json() - assert response["success"] - assert response["result"] == {"status": "timeout"} - - -async def test_connection_test_invalid_satellite( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test connection test with unknown entity id.""" - ws_client = await hass_ws_client(hass) - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/test_connection", - "entity_id": "assist_satellite.invalid", - } - ) - response = await ws_client.receive_json() - - assert not response["success"] - assert response["error"] == { - "code": "not_found", - "message": "Entity not found", - } - - -async def test_connection_test_timeout_announcement_unsupported( - hass: HomeAssistant, - init_components: ConfigEntry, - entity: MockAssistSatellite, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test connection test entity which does not support announce.""" - ws_client = await hass_ws_client(hass) - - # Disable announce support - entity.supported_features = 0 - - await ws_client.send_json_auto_id( - { - "type": "assist_satellite/test_connection", - "entity_id": ENTITY_ID, - } - ) - response = await ws_client.receive_json() - - assert not response["success"] - assert response["error"] == { - "code": "not_supported", - "message": "Entity does not support announce", - } diff --git a/tests/components/asterisk_mbox/__init__.py b/tests/components/asterisk_mbox/__init__.py new file mode 100644 index 00000000000..79e3675ad07 --- /dev/null +++ b/tests/components/asterisk_mbox/__init__.py @@ -0,0 +1 @@ +"""Tests for the asterisk component.""" diff --git a/tests/components/asterisk_mbox/const.py b/tests/components/asterisk_mbox/const.py new file mode 100644 index 00000000000..945c6b28d30 --- /dev/null +++ b/tests/components/asterisk_mbox/const.py @@ -0,0 +1,12 @@ +"""Asterisk tests constants.""" + +from homeassistant.components.asterisk_mbox import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT + +CONFIG = { + DOMAIN: { + CONF_HOST: "localhost", + CONF_PASSWORD: "password", + CONF_PORT: 1234, + } +} diff --git a/tests/components/asterisk_mbox/test_init.py b/tests/components/asterisk_mbox/test_init.py new file mode 100644 index 00000000000..4800ada0ec4 --- /dev/null +++ b/tests/components/asterisk_mbox/test_init.py @@ -0,0 +1,36 @@ +"""Test mailbox.""" + +from unittest.mock import Mock, patch + +import pytest +from typing_extensions import Generator + +from homeassistant.components.asterisk_mbox import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from .const import CONFIG + + +@pytest.fixture +def client() -> Generator[Mock]: + """Mock client.""" + with patch( + "homeassistant.components.asterisk_mbox.asteriskClient", autospec=True + ) as client: + yield client + + +async def test_repair_issue_is_created( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + client: Mock, +) -> None: + """Test repair issue is created.""" + assert await async_setup_component(hass, DOMAIN, CONFIG) + await hass.async_block_till_done() + assert ( + DOMAIN, + "deprecated_integration", + ) in issue_registry.issues diff --git a/tests/components/asuswrt/conftest.py b/tests/components/asuswrt/conftest.py index f850a26b997..7710e26707c 100644 --- a/tests/components/asuswrt/conftest.py +++ b/tests/components/asuswrt/conftest.py @@ -16,30 +16,12 @@ ASUSWRT_LEGACY_LIB = f"{ASUSWRT_BASE}.bridge.AsusWrtLegacy" MOCK_BYTES_TOTAL = 60000000000, 50000000000 MOCK_BYTES_TOTAL_HTTP = dict(enumerate(MOCK_BYTES_TOTAL)) -MOCK_CPU_USAGE = { - "cpu1_usage": 0.1, - "cpu2_usage": 0.2, - "cpu3_usage": 0.3, - "cpu4_usage": 0.4, - "cpu5_usage": 0.5, - "cpu6_usage": 0.6, - "cpu7_usage": 0.7, - "cpu8_usage": 0.8, - "cpu_total_usage": 0.9, -} MOCK_CURRENT_TRANSFER_RATES = 20000000, 10000000 MOCK_CURRENT_TRANSFER_RATES_HTTP = dict(enumerate(MOCK_CURRENT_TRANSFER_RATES)) MOCK_LOAD_AVG_HTTP = {"load_avg_1": 1.1, "load_avg_5": 1.2, "load_avg_15": 1.3} MOCK_LOAD_AVG = list(MOCK_LOAD_AVG_HTTP.values()) -MOCK_MEMORY_USAGE = { - "mem_usage_perc": 52.4, - "mem_total": 1048576, - "mem_free": 393216, - "mem_used": 655360, -} MOCK_TEMPERATURES = {"2.4GHz": 40.2, "5.0GHz": 0, "CPU": 71.2} MOCK_TEMPERATURES_HTTP = {**MOCK_TEMPERATURES, "5.0GHz_2": 40.3, "6.0GHz": 40.4} -MOCK_UPTIME = {"last_boot": "2024-08-02T00:47:00+00:00", "uptime": 1625927} @pytest.fixture(name="patch_setup_entry") @@ -139,11 +121,6 @@ def mock_controller_connect_http(mock_devices_http): service_mock.return_value.async_get_temperatures.return_value = { k: v for k, v in MOCK_TEMPERATURES_HTTP.items() if k != "5.0GHz" } - service_mock.return_value.async_get_cpu_usage.return_value = MOCK_CPU_USAGE - service_mock.return_value.async_get_memory_usage.return_value = ( - MOCK_MEMORY_USAGE - ) - service_mock.return_value.async_get_uptime.return_value = MOCK_UPTIME yield service_mock @@ -156,22 +133,13 @@ def mock_controller_connect_http_sens_fail(connect_http): connect_http.return_value.async_get_traffic_rates.side_effect = AsusWrtError connect_http.return_value.async_get_loadavg.side_effect = AsusWrtError connect_http.return_value.async_get_temperatures.side_effect = AsusWrtError - connect_http.return_value.async_get_cpu_usage.side_effect = AsusWrtError - connect_http.return_value.async_get_memory_usage.side_effect = AsusWrtError - connect_http.return_value.async_get_uptime.side_effect = AsusWrtError @pytest.fixture(name="connect_http_sens_detect") def mock_controller_connect_http_sens_detect(): """Mock a successful sensor detection using http library.""" - with ( - patch( - f"{ASUSWRT_BASE}.bridge.AsusWrtHttpBridge._get_available_temperature_sensors", - return_value=[*MOCK_TEMPERATURES_HTTP], - ) as mock_sens_temp_detect, - patch( - f"{ASUSWRT_BASE}.bridge.AsusWrtHttpBridge._get_available_cpu_sensors", - return_value=[*MOCK_CPU_USAGE], - ) as mock_sens_cpu_detect, - ): - yield mock_sens_temp_detect, mock_sens_cpu_detect + with patch( + f"{ASUSWRT_BASE}.bridge.AsusWrtHttpBridge._get_available_temperature_sensors", + return_value=[*MOCK_TEMPERATURES_HTTP], + ) as mock_sens_detect: + yield mock_sens_detect diff --git a/tests/components/asuswrt/test_diagnostics.py b/tests/components/asuswrt/test_diagnostics.py index 1acaf686567..207f3ba25f0 100644 --- a/tests/components/asuswrt/test_diagnostics.py +++ b/tests/components/asuswrt/test_diagnostics.py @@ -38,4 +38,4 @@ async def test_diagnostics( hass, hass_client, mock_config_entry ) - assert result["entry"] == entry_dict | {"discovery_keys": {}} + assert result["entry"] == entry_dict diff --git a/tests/components/asuswrt/test_sensor.py b/tests/components/asuswrt/test_sensor.py index 0036c40a6f2..3de830f3f34 100644 --- a/tests/components/asuswrt/test_sensor.py +++ b/tests/components/asuswrt/test_sensor.py @@ -2,7 +2,6 @@ from datetime import timedelta -from freezegun.api import FrozenDateTimeFactory from pyasuswrt.exceptions import AsusWrtError, AsusWrtNotAvailableInfoError import pytest @@ -11,13 +10,10 @@ from homeassistant.components.asuswrt.const import ( CONF_INTERFACE, DOMAIN, SENSORS_BYTES, - SENSORS_CPU, SENSORS_LOAD_AVG, - SENSORS_MEMORY, SENSORS_RATES, SENSORS_TEMPERATURES, SENSORS_TEMPERATURES_LEGACY, - SENSORS_UPTIME, ) from homeassistant.components.device_tracker import CONF_CONSIDER_HOME from homeassistant.config_entries import ConfigEntryState @@ -30,6 +26,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import slugify +from homeassistant.util.dt import utcnow from .common import ( CONFIG_DATA_HTTP, @@ -45,14 +42,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed SENSORS_DEFAULT = [*SENSORS_BYTES, *SENSORS_RATES] SENSORS_ALL_LEGACY = [*SENSORS_DEFAULT, *SENSORS_LOAD_AVG, *SENSORS_TEMPERATURES_LEGACY] -SENSORS_ALL_HTTP = [ - *SENSORS_DEFAULT, - *SENSORS_CPU, - *SENSORS_LOAD_AVG, - *SENSORS_MEMORY, - *SENSORS_TEMPERATURES, - *SENSORS_UPTIME, -] +SENSORS_ALL_HTTP = [*SENSORS_DEFAULT, *SENSORS_LOAD_AVG, *SENSORS_TEMPERATURES] @pytest.fixture(name="create_device_registry_devices") @@ -105,7 +95,6 @@ def _setup_entry(hass: HomeAssistant, config, sensors, unique_id=None): async def _test_sensors( hass: HomeAssistant, - freezer: FrozenDateTimeFactory, mock_devices, config, entry_unique_id, @@ -136,8 +125,7 @@ async def _test_sensors( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() assert hass.states.get(f"{device_tracker.DOMAIN}.test").state == STATE_HOME @@ -151,8 +139,7 @@ async def _test_sensors( # remove first tracked device mock_devices.pop(MOCK_MACS[0]) - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() # consider home option set, all devices still home but only 1 device connected @@ -173,8 +160,7 @@ async def _test_sensors( config_entry, options={CONF_CONSIDER_HOME: 0} ) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() # consider home option set to 0, device "test" not home @@ -190,16 +176,13 @@ async def _test_sensors( ) async def test_sensors_legacy( hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_devices_legacy, - entry_unique_id, connect_legacy, + mock_devices_legacy, create_device_registry_devices, + entry_unique_id, ) -> None: """Test creating AsusWRT default sensors and tracker with legacy protocol.""" - await _test_sensors( - hass, freezer, mock_devices_legacy, CONFIG_DATA_TELNET, entry_unique_id - ) + await _test_sensors(hass, mock_devices_legacy, CONFIG_DATA_TELNET, entry_unique_id) @pytest.mark.parametrize( @@ -208,21 +191,16 @@ async def test_sensors_legacy( ) async def test_sensors_http( hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_devices_http, - entry_unique_id, connect_http, + mock_devices_http, create_device_registry_devices, + entry_unique_id, ) -> None: """Test creating AsusWRT default sensors and tracker with http protocol.""" - await _test_sensors( - hass, freezer, mock_devices_http, CONFIG_DATA_HTTP, entry_unique_id - ) + await _test_sensors(hass, mock_devices_http, CONFIG_DATA_HTTP, entry_unique_id) -async def _test_loadavg_sensors( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, config -) -> None: +async def _test_loadavg_sensors(hass: HomeAssistant, config) -> None: """Test creating an AsusWRT load average sensors.""" config_entry, sensor_prefix = _setup_entry(hass, config, SENSORS_LOAD_AVG) config_entry.add_to_hass(hass) @@ -230,8 +208,7 @@ async def _test_loadavg_sensors( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() # assert temperature sensor available @@ -240,22 +217,18 @@ async def _test_loadavg_sensors( assert hass.states.get(f"{sensor_prefix}_sensor_load_avg15").state == "1.3" -async def test_loadavg_sensors_legacy( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_legacy -) -> None: +async def test_loadavg_sensors_legacy(hass: HomeAssistant, connect_legacy) -> None: """Test creating an AsusWRT load average sensors.""" - await _test_loadavg_sensors(hass, freezer, CONFIG_DATA_TELNET) + await _test_loadavg_sensors(hass, CONFIG_DATA_TELNET) -async def test_loadavg_sensors_http( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http -) -> None: +async def test_loadavg_sensors_http(hass: HomeAssistant, connect_http) -> None: """Test creating an AsusWRT load average sensors.""" - await _test_loadavg_sensors(hass, freezer, CONFIG_DATA_HTTP) + await _test_loadavg_sensors(hass, CONFIG_DATA_HTTP) async def test_loadavg_sensors_unaivalable_http( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http + hass: HomeAssistant, connect_http ) -> None: """Test load average sensors no available using http.""" config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_LOAD_AVG) @@ -268,8 +241,7 @@ async def test_loadavg_sensors_unaivalable_http( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() # assert load average sensors not available @@ -299,9 +271,7 @@ async def test_temperature_sensors_http_fail( assert not hass.states.get(f"{sensor_prefix}_6_0ghz") -async def _test_temperature_sensors( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, config, sensors -) -> str: +async def _test_temperature_sensors(hass: HomeAssistant, config, sensors) -> str: """Test creating a AsusWRT temperature sensors.""" config_entry, sensor_prefix = _setup_entry(hass, config, sensors) config_entry.add_to_hass(hass) @@ -309,19 +279,16 @@ async def _test_temperature_sensors( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() return sensor_prefix -async def test_temperature_sensors_legacy( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_legacy -) -> None: +async def test_temperature_sensors_legacy(hass: HomeAssistant, connect_legacy) -> None: """Test creating a AsusWRT temperature sensors.""" sensor_prefix = await _test_temperature_sensors( - hass, freezer, CONFIG_DATA_TELNET, SENSORS_TEMPERATURES_LEGACY + hass, CONFIG_DATA_TELNET, SENSORS_TEMPERATURES_LEGACY ) # assert temperature sensor available assert hass.states.get(f"{sensor_prefix}_2_4ghz").state == "40.2" @@ -329,12 +296,10 @@ async def test_temperature_sensors_legacy( assert not hass.states.get(f"{sensor_prefix}_5_0ghz") -async def test_temperature_sensors_http( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http -) -> None: +async def test_temperature_sensors_http(hass: HomeAssistant, connect_http) -> None: """Test creating a AsusWRT temperature sensors.""" sensor_prefix = await _test_temperature_sensors( - hass, freezer, CONFIG_DATA_HTTP, SENSORS_TEMPERATURES + hass, CONFIG_DATA_HTTP, SENSORS_TEMPERATURES ) # assert temperature sensor available assert hass.states.get(f"{sensor_prefix}_2_4ghz").state == "40.2" @@ -344,97 +309,6 @@ async def test_temperature_sensors_http( assert not hass.states.get(f"{sensor_prefix}_5_0ghz") -async def test_cpu_sensors_http_fail( - hass: HomeAssistant, connect_http_sens_fail -) -> None: - """Test fail creating AsusWRT cpu sensors.""" - config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_CPU) - config_entry.add_to_hass(hass) - - # initial devices setup - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - # assert cpu availability exception is handled correctly - assert not hass.states.get(f"{sensor_prefix}_cpu1_usage") - assert not hass.states.get(f"{sensor_prefix}_cpu2_usage") - assert not hass.states.get(f"{sensor_prefix}_cpu3_usage") - assert not hass.states.get(f"{sensor_prefix}_cpu4_usage") - assert not hass.states.get(f"{sensor_prefix}_cpu5_usage") - assert not hass.states.get(f"{sensor_prefix}_cpu6_usage") - assert not hass.states.get(f"{sensor_prefix}_cpu7_usage") - assert not hass.states.get(f"{sensor_prefix}_cpu8_usage") - assert not hass.states.get(f"{sensor_prefix}_cpu_total_usage") - - -async def test_cpu_sensors_http( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http -) -> None: - """Test creating AsusWRT cpu sensors.""" - config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_CPU) - config_entry.add_to_hass(hass) - - # initial devices setup - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # assert cpu sensors available - assert hass.states.get(f"{sensor_prefix}_cpu1_usage").state == "0.1" - assert hass.states.get(f"{sensor_prefix}_cpu2_usage").state == "0.2" - assert hass.states.get(f"{sensor_prefix}_cpu3_usage").state == "0.3" - assert hass.states.get(f"{sensor_prefix}_cpu4_usage").state == "0.4" - assert hass.states.get(f"{sensor_prefix}_cpu5_usage").state == "0.5" - assert hass.states.get(f"{sensor_prefix}_cpu6_usage").state == "0.6" - assert hass.states.get(f"{sensor_prefix}_cpu7_usage").state == "0.7" - assert hass.states.get(f"{sensor_prefix}_cpu8_usage").state == "0.8" - assert hass.states.get(f"{sensor_prefix}_cpu_total_usage").state == "0.9" - - -async def test_memory_sensors_http( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http -) -> None: - """Test creating AsusWRT memory sensors.""" - config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_MEMORY) - config_entry.add_to_hass(hass) - - # initial devices setup - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # assert memory sensors available - assert hass.states.get(f"{sensor_prefix}_mem_usage_perc").state == "52.4" - assert hass.states.get(f"{sensor_prefix}_mem_free").state == "384.0" - assert hass.states.get(f"{sensor_prefix}_mem_used").state == "640.0" - - -async def test_uptime_sensors_http( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http -) -> None: - """Test creating AsusWRT uptime sensors.""" - config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_UPTIME) - config_entry.add_to_hass(hass) - - # initial devices setup - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # assert uptime sensors available - assert ( - hass.states.get(f"{sensor_prefix}_sensor_last_boot").state - == "2024-08-02T00:47:00+00:00" - ) - assert hass.states.get(f"{sensor_prefix}_sensor_uptime").state == "1625927" - - @pytest.mark.parametrize( "side_effect", [OSError, None], @@ -485,9 +359,7 @@ async def test_connect_fail_http( assert config_entry.state is ConfigEntryState.SETUP_RETRY -async def _test_sensors_polling_fails( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, config, sensors -) -> None: +async def _test_sensors_polling_fails(hass: HomeAssistant, config, sensors) -> None: """Test AsusWRT sensors are unavailable when polling fails.""" config_entry, sensor_prefix = _setup_entry(hass, config, sensors) config_entry.add_to_hass(hass) @@ -495,8 +367,7 @@ async def _test_sensors_polling_fails( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() for sensor_name in sensors: @@ -509,28 +380,22 @@ async def _test_sensors_polling_fails( async def test_sensors_polling_fails_legacy( hass: HomeAssistant, - freezer: FrozenDateTimeFactory, connect_legacy_sens_fail, ) -> None: """Test AsusWRT sensors are unavailable when polling fails.""" - await _test_sensors_polling_fails( - hass, freezer, CONFIG_DATA_TELNET, SENSORS_ALL_LEGACY - ) + await _test_sensors_polling_fails(hass, CONFIG_DATA_TELNET, SENSORS_ALL_LEGACY) async def test_sensors_polling_fails_http( hass: HomeAssistant, - freezer: FrozenDateTimeFactory, connect_http_sens_fail, connect_http_sens_detect, ) -> None: """Test AsusWRT sensors are unavailable when polling fails.""" - await _test_sensors_polling_fails(hass, freezer, CONFIG_DATA_HTTP, SENSORS_ALL_HTTP) + await _test_sensors_polling_fails(hass, CONFIG_DATA_HTTP, SENSORS_ALL_HTTP) -async def test_options_reload( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_legacy -) -> None: +async def test_options_reload(hass: HomeAssistant, connect_legacy) -> None: """Test AsusWRT integration is reload changing an options that require this.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -543,8 +408,7 @@ async def test_options_reload( await hass.async_block_till_done() assert connect_legacy.return_value.connection.async_connect.call_count == 1 - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() # change an option that requires integration reload @@ -587,10 +451,7 @@ async def test_unique_id_migration( async def test_decorator_errors( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - connect_legacy, - mock_available_temps, + hass: HomeAssistant, connect_legacy, mock_available_temps ) -> None: """Test AsusWRT sensors are unavailable on decorator type check error.""" sensors = [*SENSORS_BYTES, *SENSORS_TEMPERATURES_LEGACY] @@ -604,8 +465,7 @@ async def test_decorator_errors( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() for sensor_name in sensors: diff --git a/tests/components/atag/__init__.py b/tests/components/atag/__init__.py index a240cc47c7f..adea1e07be7 100644 --- a/tests/components/atag/__init__.py +++ b/tests/components/atag/__init__.py @@ -1,8 +1,6 @@ """Tests for the Atag integration.""" -from pyatag import AtagException - -from homeassistant.components.atag import DOMAIN +from homeassistant.components.atag import DOMAIN, AtagException from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant diff --git a/tests/components/atag/conftest.py b/tests/components/atag/conftest.py index 63476c4846d..83ba3e37aad 100644 --- a/tests/components/atag/conftest.py +++ b/tests/components/atag/conftest.py @@ -1,10 +1,10 @@ """Provide common Atag fixtures.""" import asyncio -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/atag/test_climate.py b/tests/components/atag/test_climate.py index b4f2a0f3f0f..bc78ee58216 100644 --- a/tests/components/atag/test_climate.py +++ b/tests/components/atag/test_climate.py @@ -2,8 +2,7 @@ from unittest.mock import PropertyMock, patch -from homeassistant.components.atag import DOMAIN -from homeassistant.components.atag.climate import PRESET_MAP +from homeassistant.components.atag.climate import DOMAIN, PRESET_MAP from homeassistant.components.climate import ( ATTR_HVAC_ACTION, ATTR_HVAC_MODE, @@ -105,10 +104,10 @@ async def test_update_failed( entry = await init_integration(hass, aioclient_mock) await async_setup_component(hass, HA_DOMAIN, {}) assert hass.states.get(CLIMATE_ID).state == HVACMode.HEAT - coordinator = entry.runtime_data + coordinator = hass.data[DOMAIN][entry.entry_id] with patch("pyatag.AtagOne.update", side_effect=TimeoutError) as updater: await coordinator.async_refresh() await hass.async_block_till_done() updater.assert_called_once() assert not coordinator.last_update_success - assert coordinator.atag.id == UID + assert coordinator.data.id == UID diff --git a/tests/components/atag/test_init.py b/tests/components/atag/test_init.py index 7c65150fbf6..59f38ae7bfe 100644 --- a/tests/components/atag/test_init.py +++ b/tests/components/atag/test_init.py @@ -1,5 +1,6 @@ """Tests for the ATAG integration.""" +from homeassistant.components.atag import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -22,7 +23,7 @@ async def test_unload_config_entry( ) -> None: """Test the ATAG configuration entry unloading.""" entry = await init_integration(hass, aioclient_mock) - assert entry.runtime_data + assert hass.data[DOMAIN] await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() - assert not hasattr(entry, "runtime_data") + assert not hass.data.get(DOMAIN) diff --git a/tests/components/august/conftest.py b/tests/components/august/conftest.py index 78cb2cdad89..052cde7d2a2 100644 --- a/tests/components/august/conftest.py +++ b/tests/components/august/conftest.py @@ -3,7 +3,6 @@ from unittest.mock import patch import pytest -from yalexs.manager.ratelimit import _RateLimitChecker @pytest.fixture(name="mock_discovery", autouse=True) @@ -13,10 +12,3 @@ def mock_discovery_fixture(): "homeassistant.components.august.data.discovery_flow.async_create_flow" ) as mock_discovery: yield mock_discovery - - -@pytest.fixture(name="disable_ratelimit_checks", autouse=True) -def disable_ratelimit_checks_fixture(): - """Disable rate limit checks.""" - with patch.object(_RateLimitChecker, "register_wakeup"): - yield diff --git a/tests/components/august/fixtures/get_lock.low_keypad_battery.json b/tests/components/august/fixtures/get_lock.low_keypad_battery.json index 43b5513a527..08bdfaa76ed 100644 --- a/tests/components/august/fixtures/get_lock.low_keypad_battery.json +++ b/tests/components/august/fixtures/get_lock.low_keypad_battery.json @@ -36,7 +36,7 @@ "currentFirmwareVersion": "2.27.0", "battery": {}, "batteryLevel": "Low", - "batteryRaw": 128 + "batteryRaw": 170 }, "OfflineKeys": { "created": [], diff --git a/tests/components/august/mocks.py b/tests/components/august/mocks.py index 43cc4957445..62c01d38d0c 100644 --- a/tests/components/august/mocks.py +++ b/tests/components/august/mocks.py @@ -18,7 +18,6 @@ from yalexs.activity import ( ACTIVITY_ACTIONS_LOCK_OPERATION, SOURCE_LOCK_OPERATE, SOURCE_LOG, - Activity, BridgeOperationActivity, DoorbellDingActivity, DoorbellMotionActivity, @@ -26,7 +25,7 @@ from yalexs.activity import ( DoorOperationActivity, LockOperationActivity, ) -from yalexs.authenticator_common import AuthenticationState +from yalexs.authenticator import AuthenticationState from yalexs.const import Brand from yalexs.doorbell import Doorbell, DoorbellDetail from yalexs.lock import Lock, LockDetail @@ -59,15 +58,11 @@ def _mock_authenticator(auth_state): return authenticator -def _timetoken(): - return str(time.time_ns())[:-2] - - @patch("yalexs.manager.gateway.ApiAsync") @patch("yalexs.manager.gateway.AuthenticatorAsync.async_authenticate") async def _mock_setup_august( - hass: HomeAssistant, api_instance, pubnub_mock, authenticate_mock, api_mock, brand -) -> MockConfigEntry: + hass, api_instance, pubnub_mock, authenticate_mock, api_mock, brand +): """Set up august integration.""" authenticate_mock.side_effect = MagicMock( return_value=_mock_august_authentication( @@ -82,7 +77,10 @@ async def _mock_setup_august( ) entry.add_to_hass(hass) with ( - patch.object(pubnub_mock, "run"), + patch( + "yalexs.manager.data.async_create_pubnub", + return_value=AsyncMock(), + ), patch("yalexs.manager.data.AugustPubNub", return_value=pubnub_mock), ): assert await hass.config_entries.async_setup(entry.entry_id) @@ -105,13 +103,13 @@ async def _create_august_with_devices( async def _create_august_api_with_devices( - hass: HomeAssistant, - devices: Iterable[LockDetail | DoorbellDetail], - api_call_side_effects: dict[str, Any] | None = None, - activities: list[Any] | None = None, - pubnub: AugustPubNub | None = None, - brand: Brand = Brand.AUGUST, -) -> tuple[MockConfigEntry, MagicMock]: + hass, + devices, + api_call_side_effects=None, + activities=None, + pubnub=None, + brand=Brand.AUGUST, +): if api_call_side_effects is None: api_call_side_effects = {} if pubnub is None: @@ -213,10 +211,7 @@ async def _create_august_api_with_devices( async def _mock_setup_august_with_api_side_effects( - hass: HomeAssistant, - api_call_side_effects: dict[str, Any], - pubnub: AugustPubNub, - brand: Brand = Brand.AUGUST, + hass, api_call_side_effects, pubnub, brand=Brand.AUGUST ): api_instance = MagicMock(name="Api", brand=brand) @@ -336,21 +331,19 @@ def _mock_august_lock_data(lockid="mocklockid1", houseid="mockhouseid1"): } -async def _mock_operative_august_lock_detail(hass: HomeAssistant) -> LockDetail: +async def _mock_operative_august_lock_detail(hass): return await _mock_lock_from_fixture(hass, "get_lock.online.json") -async def _mock_lock_with_offline_key(hass: HomeAssistant) -> LockDetail: +async def _mock_lock_with_offline_key(hass): return await _mock_lock_from_fixture(hass, "get_lock.online_with_keys.json") -async def _mock_inoperative_august_lock_detail(hass: HomeAssistant) -> LockDetail: +async def _mock_inoperative_august_lock_detail(hass): return await _mock_lock_from_fixture(hass, "get_lock.offline.json") -async def _mock_activities_from_fixture( - hass: HomeAssistant, path: str -) -> list[Activity]: +async def _mock_activities_from_fixture(hass, path): json_dict = await _load_json_fixture(hass, path) activities = [] for activity_json in json_dict: @@ -361,32 +354,32 @@ async def _mock_activities_from_fixture( return activities -async def _mock_lock_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: +async def _mock_lock_from_fixture(hass, path): json_dict = await _load_json_fixture(hass, path) return LockDetail(json_dict) -async def _mock_doorbell_from_fixture(hass: HomeAssistant, path: str) -> DoorbellDetail: +async def _mock_doorbell_from_fixture(hass, path): json_dict = await _load_json_fixture(hass, path) return DoorbellDetail(json_dict) -async def _load_json_fixture(hass: HomeAssistant, path: str) -> Any: +async def _load_json_fixture(hass, path): fixture = await hass.async_add_executor_job( load_fixture, os.path.join("august", path) ) return json.loads(fixture) -async def _mock_doorsense_enabled_august_lock_detail(hass: HomeAssistant) -> LockDetail: +async def _mock_doorsense_enabled_august_lock_detail(hass): return await _mock_lock_from_fixture(hass, "get_lock.online_with_doorsense.json") -async def _mock_doorsense_missing_august_lock_detail(hass: HomeAssistant) -> LockDetail: +async def _mock_doorsense_missing_august_lock_detail(hass): return await _mock_lock_from_fixture(hass, "get_lock.online_missing_doorsense.json") -async def _mock_lock_with_unlatch(hass: HomeAssistant) -> LockDetail: +async def _mock_lock_with_unlatch(hass): return await _mock_lock_from_fixture(hass, "get_lock.online_with_unlatch.json") @@ -414,7 +407,7 @@ def _mock_door_operation_activity(lock, action, offset): ) -def _activity_from_dict(activity_dict: dict[str, Any]) -> Activity | None: +def _activity_from_dict(activity_dict): action = activity_dict.get("action") activity_dict["dateTime"] = time.time() * 1000 diff --git a/tests/components/august/snapshots/test_binary_sensor.ambr b/tests/components/august/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 6e95b0ce552..00000000000 --- a/tests/components/august/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_doorbell_device_registry - DeviceRegistryEntrySnapshot({ - 'area_id': 'tmt100_name', - 'config_entries': , - 'configuration_url': 'https://account.august.com', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'august', - 'tmt100', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'August Home Inc.', - 'model': 'hydra1', - 'model_id': None, - 'name': 'tmt100 Name', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': 'tmt100 Name', - 'sw_version': '3.1.0-HYDRC75+201909251139', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/august/snapshots/test_lock.ambr b/tests/components/august/snapshots/test_lock.ambr deleted file mode 100644 index 6aad3a140ca..00000000000 --- a/tests/components/august/snapshots/test_lock.ambr +++ /dev/null @@ -1,37 +0,0 @@ -# serializer version: 1 -# name: test_lock_device_registry - DeviceRegistryEntrySnapshot({ - 'area_id': 'online_with_doorsense_name', - 'config_entries': , - 'configuration_url': 'https://account.august.com', - 'connections': set({ - tuple( - 'bluetooth', - '12:22', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'august', - 'online_with_doorsense', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'August Home Inc.', - 'model': 'AUG-MD01', - 'model_id': None, - 'name': 'online_with_doorsense Name', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': 'online_with_doorsense Name', - 'sw_version': 'undefined-4.3.0-1.8.14', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/august/test_binary_sensor.py b/tests/components/august/test_binary_sensor.py index 4ae300ae56b..377a5bf2897 100644 --- a/tests/components/august/test_binary_sensor.py +++ b/tests/components/august/test_binary_sensor.py @@ -1,10 +1,9 @@ """The binary_sensor tests for the august platform.""" import datetime -from unittest.mock import Mock +import time +from unittest.mock import Mock, patch -from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion from yalexs.pubnub_async import AugustPubNub from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN @@ -26,32 +25,43 @@ from .mocks import ( _mock_doorbell_from_fixture, _mock_doorsense_enabled_august_lock_detail, _mock_lock_from_fixture, - _timetoken, ) from tests.common import async_fire_time_changed +def _timetoken(): + return str(time.time_ns())[:-2] + + async def test_doorsense(hass: HomeAssistant) -> None: """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_lock_from_fixture( hass, "get_lock.online_with_doorsense.json" ) await _create_august_with_devices(hass, [lock_one]) - states = hass.states - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + await hass.async_block_till_done() - assert ( - states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" ) + assert binary_sensor_online_with_doorsense_name.state == STATE_OFF async def test_lock_bridge_offline(hass: HomeAssistant) -> None: @@ -63,82 +73,113 @@ async def test_lock_bridge_offline(hass: HomeAssistant) -> None: hass, "get_activity.bridge_offline.json" ) await _create_august_with_devices(hass, [lock_one], activities=activities) - states = hass.states - assert ( - states.get("binary_sensor.online_with_doorsense_name_door").state - == STATE_UNAVAILABLE + + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" ) + assert binary_sensor_online_with_doorsense_name.state == STATE_UNAVAILABLE async def test_create_doorbell(hass: HomeAssistant) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") await _create_august_with_devices(hass, [doorbell_one]) - states = hass.states - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF - assert ( - states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" ) - assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON - assert ( - states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( + "binary_sensor.k98gidt45gul_name_image_capture" ) - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF - assert ( - states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF + assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF + binary_sensor_k98gidt45gul_name_online = hass.states.get( + "binary_sensor.k98gidt45gul_name_connectivity" ) + assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_occupancy" + ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" + ) + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( + "binary_sensor.k98gidt45gul_name_image_capture" + ) + assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF async def test_create_doorbell_offline(hass: HomeAssistant) -> None: """Test creation of a doorbell that is offline.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) - states = hass.states - assert states.get("binary_sensor.tmt100_name_motion").state == STATE_UNAVAILABLE - assert states.get("binary_sensor.tmt100_name_connectivity").state == STATE_OFF - assert ( - states.get("binary_sensor.tmt100_name_doorbell_ding").state == STATE_UNAVAILABLE + binary_sensor_tmt100_name_motion = hass.states.get( + "binary_sensor.tmt100_name_motion" ) + assert binary_sensor_tmt100_name_motion.state == STATE_UNAVAILABLE + binary_sensor_tmt100_name_online = hass.states.get( + "binary_sensor.tmt100_name_connectivity" + ) + assert binary_sensor_tmt100_name_online.state == STATE_OFF + binary_sensor_tmt100_name_ding = hass.states.get( + "binary_sensor.tmt100_name_occupancy" + ) + assert binary_sensor_tmt100_name_ding.state == STATE_UNAVAILABLE -async def test_create_doorbell_with_motion( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: +async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") activities = await _mock_activities_from_fixture( hass, "get_activity.doorbell_motion.json" ) await _create_august_with_devices(hass, [doorbell_one], activities=activities) - states = hass.states - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON - assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON - assert ( - states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" ) - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON + binary_sensor_k98gidt45gul_name_online = hass.states.get( + "binary_sensor.k98gidt45gul_name_connectivity" + ) + assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_occupancy" + ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.august.binary_sensor._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" + ) + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF -async def test_doorbell_update_via_pubnub( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: +async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: """Test creation of a doorbell that can be updated via pubnub.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") pubnub = AugustPubNub() await _create_august_with_devices(hass, [doorbell_one], pubnub=pubnub) assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" - states = hass.states - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF - assert ( - states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" ) + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_occupancy" + ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF pubnub.message( pubnub, @@ -161,7 +202,10 @@ async def test_doorbell_update_via_pubnub( await hass.async_block_till_done() - assert states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_ON + binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( + "binary_sensor.k98gidt45gul_name_image_capture" + ) + assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_ON pubnub.message( pubnub, @@ -195,19 +239,29 @@ async def test_doorbell_update_via_pubnub( await hass.async_block_till_done() - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON - - assert ( - states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" ) + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert ( - states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_occupancy" ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.august.binary_sensor._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + + binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( + "binary_sensor.k98gidt45gul_name_image_capture" + ) + assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF pubnub.message( pubnub, @@ -221,25 +275,37 @@ async def test_doorbell_update_via_pubnub( ) await hass.async_block_till_done() - assert states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_ON - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert ( - states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_occupancy" ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_ON + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.august.binary_sensor._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_occupancy" + ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF async def test_doorbell_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion + hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: """Test creation of a lock with doorsense and bridge ands up in the registry.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) reg_device = device_registry.async_get_device(identifiers={("august", "tmt100")}) - assert reg_device == snapshot + assert reg_device.model == "hydra1" + assert reg_device.name == "tmt100 Name" + assert reg_device.manufacturer == "August Home Inc." + assert reg_device.sw_version == "3.1.0-HYDRC75+201909251139" async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: @@ -252,9 +318,11 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: config_entry = await _create_august_with_devices( hass, [lock_one], activities=activities, pubnub=pubnub ) - states = hass.states - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON pubnub.message( pubnub, @@ -266,9 +334,10 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - assert ( - states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" ) + assert binary_sensor_online_with_doorsense_name.state == STATE_OFF pubnub.message( pubnub, @@ -279,22 +348,33 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ), ) await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON pubnub.connected = True async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON # Ensure pubnub status is always preserved async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() - - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON pubnub.message( pubnub, @@ -305,11 +385,17 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ), ) await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() @@ -320,10 +406,7 @@ async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") await _create_august_with_devices(hass, [lock_one]) - states = hass.states - assert ( - states.get( - "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" - ).state - == STATE_OFF + ding_sensor = hass.states.get( + "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_occupancy" ) + assert ding_sensor.state == STATE_OFF diff --git a/tests/components/august/test_button.py b/tests/components/august/test_button.py index 948b59b2286..8ae2bc8a70d 100644 --- a/tests/components/august/test_button.py +++ b/tests/components/august/test_button.py @@ -20,4 +20,5 @@ async def test_wake_lock(hass: HomeAssistant) -> None: await hass.services.async_call( BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True ) + await hass.async_block_till_done() api_instance.async_status_async.assert_called_once() diff --git a/tests/components/august/test_camera.py b/tests/components/august/test_camera.py index 287620cc872..539a26cc30f 100644 --- a/tests/components/august/test_camera.py +++ b/tests/components/august/test_camera.py @@ -6,7 +6,7 @@ from unittest.mock import patch from yalexs.const import Brand from yalexs.doorbell import ContentTokenExpired -from homeassistant.components.camera import CameraState +from homeassistant.const import STATE_IDLE from homeassistant.core import HomeAssistant from .mocks import _create_august_with_devices, _mock_doorbell_from_fixture @@ -25,10 +25,14 @@ async def test_create_doorbell( ): await _create_august_with_devices(hass, [doorbell_one], brand=Brand.AUGUST) - camera_state = hass.states.get("camera.k98gidt45gul_name_camera") - assert camera_state.state == CameraState.IDLE + camera_k98gidt45gul_name_camera = hass.states.get( + "camera.k98gidt45gul_name_camera" + ) + assert camera_k98gidt45gul_name_camera.state == STATE_IDLE - url = camera_state.attributes["entity_picture"] + url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ + "entity_picture" + ] client = await hass_client_no_auth() resp = await client.get(url) diff --git a/tests/components/august/test_config_flow.py b/tests/components/august/test_config_flow.py index b3138342b8c..aec08864c65 100644 --- a/tests/components/august/test_config_flow.py +++ b/tests/components/august/test_config_flow.py @@ -2,9 +2,10 @@ from unittest.mock import patch -from yalexs.authenticator_common import ValidationResult +from yalexs.authenticator import ValidationResult from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation +from homeassistant import config_entries from homeassistant.components.august.const import ( CONF_ACCESS_TOKEN_CACHE_FILE, CONF_BRAND, @@ -13,7 +14,6 @@ from homeassistant.components.august.const import ( DOMAIN, VERIFICATION_CODE_KEY, ) -from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -25,7 +25,7 @@ async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -66,7 +66,7 @@ async def test_form(hass: HomeAssistant) -> None: async def test_form_invalid_auth(hass: HomeAssistant) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( @@ -90,7 +90,7 @@ async def test_form_invalid_auth(hass: HomeAssistant) -> None: async def test_user_unexpected_exception(hass: HomeAssistant) -> None: """Test we handle an unexpected exception.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( @@ -115,7 +115,7 @@ async def test_user_unexpected_exception(hass: HomeAssistant) -> None: async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( @@ -138,7 +138,7 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None: async def test_form_needs_validate(hass: HomeAssistant) -> None: """Test we present validation when we need to validate.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) with ( @@ -248,7 +248,9 @@ async def test_form_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -292,7 +294,9 @@ async def test_form_reauth_with_2fa(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -367,7 +371,7 @@ async def test_switching_brands(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -385,7 +389,7 @@ async def test_switching_brands(hass: HomeAssistant) -> None: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_BRAND: "yale_access", + CONF_BRAND: "yale_home", CONF_LOGIN_METHOD: "email", CONF_USERNAME: "my@email.tld", CONF_PASSWORD: "test-password", @@ -396,4 +400,4 @@ async def test_switching_brands(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" assert len(mock_setup_entry.mock_calls) == 1 - assert entry.data[CONF_BRAND] == "yale_access" + assert entry.data[CONF_BRAND] == "yale_home" diff --git a/tests/components/august/test_event.py b/tests/components/august/test_event.py deleted file mode 100644 index 0bb482c5b89..00000000000 --- a/tests/components/august/test_event.py +++ /dev/null @@ -1,170 +0,0 @@ -"""The event tests for the august.""" - -from unittest.mock import Mock - -from freezegun.api import FrozenDateTimeFactory -from yalexs.pubnub_async import AugustPubNub - -from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN -from homeassistant.core import HomeAssistant - -from .mocks import ( - _create_august_with_devices, - _mock_activities_from_fixture, - _mock_doorbell_from_fixture, - _mock_lock_from_fixture, - _timetoken, -) - -from tests.common import async_fire_time_changed - - -async def test_create_doorbell(hass: HomeAssistant) -> None: - """Test creation of a doorbell.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - await _create_august_with_devices(hass, [doorbell_one]) - - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state is not None - assert motion_state.state == STATE_UNKNOWN - doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state == STATE_UNKNOWN - - -async def test_create_doorbell_offline(hass: HomeAssistant) -> None: - """Test creation of a doorbell that is offline.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") - await _create_august_with_devices(hass, [doorbell_one]) - motion_state = hass.states.get("event.tmt100_name_motion") - assert motion_state is not None - assert motion_state.state == STATE_UNAVAILABLE - doorbell_state = hass.states.get("event.tmt100_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state == STATE_UNAVAILABLE - - -async def test_create_doorbell_with_motion( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: - """Test creation of a doorbell.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - activities = await _mock_activities_from_fixture( - hass, "get_activity.doorbell_motion.json" - ) - await _create_august_with_devices(hass, [doorbell_one], activities=activities) - - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state is not None - assert motion_state.state != STATE_UNKNOWN - isotime = motion_state.state - doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state == STATE_UNKNOWN - - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state.state == isotime - - -async def test_doorbell_update_via_pubnub( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: - """Test creation of a doorbell that can be updated via pubnub.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - pubnub = AugustPubNub() - - await _create_august_with_devices(hass, [doorbell_one], pubnub=pubnub) - assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" - - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state is not None - assert motion_state.state == STATE_UNKNOWN - doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state == STATE_UNKNOWN - - pubnub.message( - pubnub, - Mock( - channel=doorbell_one.pubsub_channel, - timetoken=_timetoken(), - message={ - "status": "doorbell_motion_detected", - "data": { - "event": "doorbell_motion_detected", - "image": { - "height": 640, - "width": 480, - "format": "jpg", - "created_at": "2021-03-16T02:36:26.886Z", - "bytes": 14061, - "secure_url": ( - "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" - ), - "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", - "etag": "09e839331c4ea59eef28081f2caa0e90", - }, - "doorbellName": "Front Door", - "callID": None, - "origin": "mars-api", - "mutableContent": True, - }, - }, - ), - ) - - await hass.async_block_till_done() - - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state is not None - assert motion_state.state != STATE_UNKNOWN - isotime = motion_state.state - - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state is not None - assert motion_state.state != STATE_UNKNOWN - - pubnub.message( - pubnub, - Mock( - channel=doorbell_one.pubsub_channel, - timetoken=_timetoken(), - message={ - "status": "buttonpush", - }, - ), - ) - await hass.async_block_till_done() - - doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state != STATE_UNKNOWN - isotime = motion_state.state - - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state != STATE_UNKNOWN - assert motion_state.state == isotime - - -async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: - """Test creation of a lock with a doorbell.""" - lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") - await _create_august_with_devices(hass, [lock_one]) - - doorbell_state = hass.states.get( - "event.a6697750d607098bae8d6baa11ef8063_name_doorbell" - ) - assert doorbell_state is not None - assert doorbell_state.state == STATE_UNKNOWN diff --git a/tests/components/august/test_gateway.py b/tests/components/august/test_gateway.py index 1603aeb3ecb..e605fd74f0a 100644 --- a/tests/components/august/test_gateway.py +++ b/tests/components/august/test_gateway.py @@ -22,14 +22,14 @@ async def test_refresh_access_token(hass: HomeAssistant) -> None: @patch("yalexs.manager.gateway.AuthenticatorAsync.should_refresh") @patch("yalexs.manager.gateway.AuthenticatorAsync.async_refresh_access_token") async def _patched_refresh_access_token( - hass: HomeAssistant, - new_token: str, - new_token_expire_time: int, + hass, + new_token, + new_token_expire_time, refresh_access_token_mock, should_refresh_mock, authenticate_mock, async_get_operable_locks_mock, -) -> None: +): authenticate_mock.side_effect = MagicMock( return_value=_mock_august_authentication( "original_token", 1234, AuthenticationState.AUTHENTICATED @@ -50,5 +50,5 @@ async def _patched_refresh_access_token( ) await august_gateway.async_refresh_access_token_if_needed() refresh_access_token_mock.assert_called() - assert await august_gateway.async_get_access_token() == new_token + assert august_gateway.access_token == new_token assert august_gateway.authentication.access_token_expires == new_token_expire_time diff --git a/tests/components/august/test_init.py b/tests/components/august/test_init.py index 3343e85d60a..8261e32d668 100644 --- a/tests/components/august/test_init.py +++ b/tests/components/august/test_init.py @@ -5,26 +5,22 @@ from unittest.mock import Mock, patch from aiohttp import ClientResponseError import pytest from yalexs.authenticator_common import AuthenticationState -from yalexs.const import Brand from yalexs.exceptions import AugustApiAIOHTTPError from homeassistant.components.august.const import DOMAIN -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, + STATE_LOCKED, STATE_ON, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import ( - device_registry as dr, - entity_registry as er, - issue_registry as ir, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from .mocks import ( @@ -126,16 +122,16 @@ async def test_unlock_throws_august_api_http_error(hass: HomeAssistant) -> None: "unlock_return_activities": _unlock_return_activities_side_effect }, ) + last_err = None data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - - with pytest.raises( - HomeAssistantError, - match=( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ), - ): + try: await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + except HomeAssistantError as err: + last_err = err + assert str(last_err) == ( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ) async def test_lock_throws_august_api_http_error(hass: HomeAssistant) -> None: @@ -156,15 +152,16 @@ async def test_lock_throws_august_api_http_error(hass: HomeAssistant) -> None: "lock_return_activities": _lock_return_activities_side_effect }, ) + last_err = None data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - with pytest.raises( - HomeAssistantError, - match=( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ), - ): + try: await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + except HomeAssistantError as err: + last_err = err + assert str(last_err) == ( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ) async def test_open_throws_hass_service_not_supported_error( @@ -191,7 +188,7 @@ async def test_inoperative_locks_are_filtered_out(hass: HomeAssistant) -> None: lock_a6697750d607098bae8d6baa11ef8063_name = hass.states.get( "lock.a6697750d607098bae8d6baa11ef8063_name" ) - assert lock_a6697750d607098bae8d6baa11ef8063_name.state == LockState.LOCKED + assert lock_a6697750d607098bae8d6baa11ef8063_name.state == STATE_LOCKED async def test_lock_has_doorsense(hass: HomeAssistant) -> None: @@ -374,7 +371,6 @@ async def test_load_unload(hass: HomeAssistant) -> None: await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.NOT_LOADED async def test_load_triggers_ble_discovery( @@ -424,24 +420,3 @@ async def test_device_remove_devices( ) response = await client.remove_device(dead_device_entry.id, config_entry.entry_id) assert response["success"] - - -async def test_brand_migration_issue(hass: HomeAssistant) -> None: - """Test creating and removing the brand migration issue.""" - august_operative_lock = await _mock_operative_august_lock_detail(hass) - config_entry = await _create_august_with_devices( - hass, [august_operative_lock], brand=Brand.YALE_HOME - ) - - assert config_entry.state is ConfigEntryState.LOADED - - issue_reg = ir.async_get(hass) - issue_entry = issue_reg.async_get_issue(DOMAIN, "yale_brand_migration") - assert issue_entry - assert issue_entry.severity == ir.IssueSeverity.CRITICAL - assert issue_entry.translation_placeholders == { - "migrate_url": "https://my.home-assistant.io/redirect/config_flow_start?domain=yale" - } - - await hass.config_entries.async_remove(config_entry.entry_id) - assert not issue_reg.async_get_issue(DOMAIN, "yale_brand_migration") diff --git a/tests/components/august/test_lock.py b/tests/components/august/test_lock.py index 1b8c98e299c..8bb71826d24 100644 --- a/tests/components/august/test_lock.py +++ b/tests/components/august/test_lock.py @@ -6,18 +6,24 @@ from unittest.mock import Mock from aiohttp import ClientResponseError from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion from yalexs.manager.activity import INITIAL_LOCK_RESYNC_TIME from yalexs.pubnub_async import AugustPubNub -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.components.lock import ( + DOMAIN as LOCK_DOMAIN, + STATE_JAMMED, + STATE_LOCKING, + STATE_UNLOCKING, +) from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, + STATE_LOCKED, STATE_UNAVAILABLE, STATE_UNKNOWN, + STATE_UNLOCKED, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -37,7 +43,7 @@ from tests.common import async_fire_time_changed async def test_lock_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion + hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: """Test creation of a lock with doorsense and bridge ands up in the registry.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) @@ -46,7 +52,10 @@ async def test_lock_device_registry( reg_device = device_registry.async_get_device( identifiers={("august", "online_with_doorsense")} ) - assert reg_device == snapshot + assert reg_device.model == "AUG-MD01" + assert reg_device.sw_version == "undefined-4.3.0-1.8.14" + assert reg_device.name == "online_with_doorsense Name" + assert reg_device.manufacturer == "August Home Inc." async def test_lock_changed_by(hass: HomeAssistant) -> None: @@ -56,10 +65,14 @@ async def test_lock_changed_by(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_state = hass.states.get("lock.online_with_doorsense_name") + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED - assert lock_state.attributes["changed_by"] == "Your favorite elven princess" + assert lock_online_with_doorsense_name.state == STATE_LOCKED + + assert ( + lock_online_with_doorsense_name.attributes.get("changed_by") + == "Your favorite elven princess" + ) async def test_state_locking(hass: HomeAssistant) -> None: @@ -69,7 +82,9 @@ async def test_state_locking(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.locking.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - assert hass.states.get("lock.online_with_doorsense_name").state == LockState.LOCKING + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKING async def test_state_unlocking(hass: HomeAssistant) -> None: @@ -81,9 +96,9 @@ async def test_state_unlocking(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - assert ( - hass.states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING - ) + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_UNLOCKING async def test_state_jammed(hass: HomeAssistant) -> None: @@ -93,7 +108,9 @@ async def test_state_jammed(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.jammed.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - assert hass.states.get("lock.online_with_doorsense_name").state == LockState.JAMMED + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_JAMMED async def test_one_lock_operation( @@ -102,27 +119,35 @@ async def test_one_lock_operation( """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) await _create_august_with_devices(hass, [lock_one]) - states = hass.states - lock_state = states.get("lock.online_with_doorsense_name") + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED + assert lock_online_with_doorsense_name.state == STATE_LOCKED - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + await hass.async_block_till_done() - lock_state = states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.UNLOCKED + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKED - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKED + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKED # No activity means it will be unavailable until the activity feed has data lock_operator_sensor = entity_registry.async_get( @@ -130,7 +155,8 @@ async def test_one_lock_operation( ) assert lock_operator_sensor assert ( - states.get("sensor.online_with_doorsense_name_operator").state == STATE_UNKNOWN + hass.states.get("sensor.online_with_doorsense_name_operator").state + == STATE_UNKNOWN ) @@ -140,13 +166,14 @@ async def test_open_lock_operation(hass: HomeAssistant) -> None: await _create_august_with_devices(hass, [lock_with_unlatch]) lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == LockState.LOCKED + assert lock_online_with_unlatch_name.state == STATE_LOCKED data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + await hass.async_block_till_done() lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == LockState.UNLOCKED + assert lock_online_with_unlatch_name.state == STATE_UNLOCKED async def test_open_lock_operation_pubnub_connected( @@ -162,10 +189,12 @@ async def test_open_lock_operation_pubnub_connected( await _create_august_with_devices(hass, [lock_with_unlatch], pubnub=pubnub) pubnub.connected = True - assert hass.states.get("lock.online_with_unlatch_name").state == LockState.LOCKED + lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") + assert lock_online_with_unlatch_name.state == STATE_LOCKED data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + await hass.async_block_till_done() pubnub.message( pubnub, @@ -180,7 +209,8 @@ async def test_open_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - assert hass.states.get("lock.online_with_unlatch_name").state == LockState.UNLOCKED + lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") + assert lock_online_with_unlatch_name.state == STATE_UNLOCKED await hass.async_block_till_done() @@ -197,15 +227,19 @@ async def test_one_lock_operation_pubnub_connected( await _create_august_with_devices(hass, [lock_one], pubnub=pubnub) pubnub.connected = True - lock_state = hass.states.get("lock.online_with_doorsense_name") + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED + assert lock_online_with_doorsense_name.state == STATE_LOCKED - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + await hass.async_block_till_done() pubnub.message( pubnub, @@ -220,13 +254,17 @@ async def test_one_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.UNLOCKED + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKED - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + await hass.async_block_till_done() pubnub.message( pubnub, @@ -241,8 +279,8 @@ async def test_one_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKED # No activity means it will be unavailable until the activity feed has data lock_operator_sensor = entity_registry.async_get( @@ -268,8 +306,8 @@ async def test_one_lock_operation_pubnub_connected( ) await hass.async_block_till_done() - lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.UNLOCKED + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKED async def test_lock_jammed(hass: HomeAssistant) -> None: @@ -287,18 +325,22 @@ async def test_lock_jammed(hass: HomeAssistant) -> None: }, ) - lock_state = hass.states.get("lock.online_with_doorsense_name") + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED + assert lock_online_with_doorsense_name.state == STATE_LOCKED - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + await hass.async_block_till_done() - lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.JAMMED + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_JAMMED async def test_lock_throws_exception_on_unknown_status_code( @@ -318,12 +360,15 @@ async def test_lock_throws_exception_on_unknown_status_code( }, ) - lock_state = hass.states.get("lock.online_with_doorsense_name") + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED + assert lock_online_with_doorsense_name.state == STATE_LOCKED - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} with pytest.raises(ClientResponseError): @@ -338,7 +383,9 @@ async def test_one_lock_unknown_state(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one]) - assert hass.states.get("lock.brokenid_name").state == STATE_UNKNOWN + lock_brokenid_name = hass.states.get("lock.brokenid_name") + + assert lock_brokenid_name.state == STATE_UNKNOWN async def test_lock_bridge_offline(hass: HomeAssistant) -> None: @@ -350,7 +397,9 @@ async def test_lock_bridge_offline(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - assert hass.states.get("lock.online_with_doorsense_name").state == STATE_UNAVAILABLE + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_UNAVAILABLE async def test_lock_bridge_online(hass: HomeAssistant) -> None: @@ -362,13 +411,14 @@ async def test_lock_bridge_online(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - assert hass.states.get("lock.online_with_doorsense_name").state == LockState.LOCKED + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKED async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) - states = hass.states assert lock_one.pubsub_channel == "pubsub" pubnub = AugustPubNub() @@ -378,7 +428,9 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: ) pubnub.connected = True - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKED + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKED pubnub.message( pubnub, @@ -394,7 +446,8 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKING pubnub.message( pubnub, @@ -410,21 +463,25 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKING async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - assert hass.states.get("lock.online_with_doorsense_name").state == LockState.LOCKING + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKING pubnub.connected = True async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKING # Ensure pubnub status is always preserved async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKING pubnub.message( pubnub, @@ -439,11 +496,13 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKING async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKING await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/august/test_sensor.py b/tests/components/august/test_sensor.py index 2d72d287ce3..0227ee64ef1 100644 --- a/tests/components/august/test_sensor.py +++ b/tests/components/august/test_sensor.py @@ -28,9 +28,13 @@ async def test_create_doorbell(hass: HomeAssistant) -> None: doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") await _create_august_with_devices(hass, [doorbell_one]) - battery_state = hass.states.get("sensor.k98gidt45gul_name_battery") - assert battery_state.state == "96" - assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + sensor_k98gidt45gul_name_battery = hass.states.get( + "sensor.k98gidt45gul_name_battery" + ) + assert sensor_k98gidt45gul_name_battery.state == "96" + assert ( + sensor_k98gidt45gul_name_battery.attributes["unit_of_measurement"] == PERCENTAGE + ) async def test_create_doorbell_offline( @@ -40,9 +44,9 @@ async def test_create_doorbell_offline( doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) - battery_state = hass.states.get("sensor.tmt100_name_battery") - assert battery_state.state == "81" - assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") + assert sensor_tmt100_name_battery.state == "81" + assert sensor_tmt100_name_battery.attributes["unit_of_measurement"] == PERCENTAGE entry = entity_registry.async_get("sensor.tmt100_name_battery") assert entry @@ -56,7 +60,8 @@ async def test_create_doorbell_hardwired(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [doorbell_one]) - assert hass.states.get("sensor.tmt100_name_battery") is None + sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") + assert sensor_tmt100_name_battery is None async def test_create_lock_with_linked_keypad( @@ -66,21 +71,25 @@ async def test_create_lock_with_linked_keypad( lock_one = await _mock_lock_from_fixture(hass, "get_lock.doorsense_init.json") await _create_august_with_devices(hass, [lock_one]) - battery_state = hass.states.get( + sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) - assert battery_state.state == "88" - assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE - + assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" + assert ( + sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ + "unit_of_measurement" + ] + == PERCENTAGE + ) entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) assert entry assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - keypad_battery_state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert keypad_battery_state.state == "62" - assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert state.state == "60" + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" @@ -92,32 +101,42 @@ async def test_create_lock_with_low_battery_linked_keypad( """Test creation of a lock with a linked keypad that both have a battery.""" lock_one = await _mock_lock_from_fixture(hass, "get_lock.low_keypad_battery.json") await _create_august_with_devices(hass, [lock_one]) - states = hass.states - battery_state = states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_battery") - assert battery_state.state == "88" - assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" + assert ( + sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ + "unit_of_measurement" + ] + == PERCENTAGE + ) entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) assert entry assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - keypad_battery_state = states.get("sensor.front_door_lock_keypad_battery") - assert keypad_battery_state.state == "10" - assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert state.state == "10" + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" # No activity means it will be unavailable until someone unlocks/locks it - operator_entry = entity_registry.async_get( + lock_operator_sensor = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_operator" ) - assert operator_entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" - - operator_state = states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator") - assert operator_state.state == STATE_UNKNOWN + assert ( + lock_operator_sensor.unique_id + == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" + ) + assert ( + hass.states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator").state + == STATE_UNKNOWN + ) async def test_lock_operator_bluetooth( diff --git a/tests/components/aurora/conftest.py b/tests/components/aurora/conftest.py index 462203193f2..916f0925c4a 100644 --- a/tests/components/aurora/conftest.py +++ b/tests/components/aurora/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Aurora tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.aurora.const import CONF_THRESHOLD, DOMAIN from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE diff --git a/tests/components/aussie_broadband/test_config_flow.py b/tests/components/aussie_broadband/test_config_flow.py index 76e96c5cc02..6ee674ab0f4 100644 --- a/tests/components/aussie_broadband/test_config_flow.py +++ b/tests/components/aussie_broadband/test_config_flow.py @@ -13,8 +13,6 @@ from homeassistant.data_entry_flow import FlowResultType from .common import FAKE_DATA, FAKE_SERVICES -from tests.common import MockConfigEntry - TEST_USERNAME = FAKE_DATA[CONF_USERNAME] TEST_PASSWORD = FAKE_DATA[CONF_PASSWORD] @@ -165,15 +163,41 @@ async def test_form_network_issue(hass: HomeAssistant) -> None: async def test_reauth(hass: HomeAssistant) -> None: """Test reauth flow.""" - mock_entry = MockConfigEntry( - domain=DOMAIN, - data=FAKE_DATA, - unique_id=FAKE_DATA[CONF_USERNAME], + + # Test reauth but the entry doesn't exist + result1 = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=FAKE_DATA ) - mock_entry.add_to_hass(hass) + + with ( + patch("aussiebb.asyncio.AussieBB.__init__", return_value=None), + patch("aussiebb.asyncio.AussieBB.login", return_value=True), + patch( + "aussiebb.asyncio.AussieBB.get_services", return_value=[FAKE_SERVICES[0]] + ), + patch( + "homeassistant.components.aussie_broadband.async_setup_entry", + return_value=True, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + { + CONF_PASSWORD: TEST_PASSWORD, + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == TEST_USERNAME + assert result2["data"] == FAKE_DATA # Test failed reauth - result5 = await mock_entry.start_reauth_flow(hass) + result5 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=FAKE_DATA, + ) assert result5["step_id"] == "reauth_confirm" with ( diff --git a/tests/components/autarco/__init__.py b/tests/components/autarco/__init__.py deleted file mode 100644 index 208e5999fc7..00000000000 --- a/tests/components/autarco/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Tests for the Autarco integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the integration.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/autarco/conftest.py b/tests/components/autarco/conftest.py deleted file mode 100644 index b35ea993600..00000000000 --- a/tests/components/autarco/conftest.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Common fixtures for the Autarco tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -from autarco import AccountSite, Battery, Inverter, Solar -import pytest - -from homeassistant.components.autarco.const import DOMAIN -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.autarco.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_autarco_client() -> Generator[AsyncMock]: - """Mock a Autarco client.""" - with ( - patch( - "homeassistant.components.autarco.Autarco", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.autarco.config_flow.Autarco", - new=mock_client, - ), - ): - client = mock_client.return_value - client.get_account.return_value = [ - AccountSite( - site_id=1, - public_key="key-public", - system_name="test-system", - retailer="test-retailer", - health="OK", - ) - ] - client.get_solar.return_value = Solar( - power_production=200, - energy_production_today=4, - energy_production_month=58, - energy_production_total=10379, - ) - client.get_inverters.return_value = { - "test-serial-1": Inverter( - serial_number="test-serial-1", - out_ac_power=200, - out_ac_energy_total=10379, - grid_turned_off=False, - health="OK", - ), - "test-serial-2": Inverter( - serial_number="test-serial-2", - out_ac_power=500, - out_ac_energy_total=10379, - grid_turned_off=False, - health="OK", - ), - } - client.get_battery.return_value = Battery( - flow_now=777, - net_charged_now=777, - state_of_charge=56, - discharged_today=2, - discharged_month=25, - discharged_total=696, - charged_today=1, - charged_month=26, - charged_total=748, - ) - yield client - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="Autarco", - data={ - CONF_EMAIL: "test@autarco.com", - CONF_PASSWORD: "test-password", - }, - ) diff --git a/tests/components/autarco/snapshots/test_diagnostics.ambr b/tests/components/autarco/snapshots/test_diagnostics.ambr deleted file mode 100644 index 876e6d6b727..00000000000 --- a/tests/components/autarco/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,45 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'sites_data': list([ - dict({ - 'battery': dict({ - 'charged_month': 26, - 'charged_today': 1, - 'charged_total': 748, - 'discharged_month': 25, - 'discharged_today': 2, - 'discharged_total': 696, - 'flow_now': 777, - 'net_charged_now': 777, - 'state_of_charge': 56, - }), - 'health': 'OK', - 'id': 1, - 'inverters': list([ - dict({ - 'grid_turned_off': False, - 'health': 'OK', - 'out_ac_energy_total': 10379, - 'out_ac_power': 200, - 'serial_number': 'test-serial-1', - }), - dict({ - 'grid_turned_off': False, - 'health': 'OK', - 'out_ac_energy_total': 10379, - 'out_ac_power': 500, - 'serial_number': 'test-serial-2', - }), - ]), - 'name': 'test-system', - 'solar': dict({ - 'energy_production_month': 58, - 'energy_production_today': 4, - 'energy_production_total': 10379, - 'power_production': 200, - }), - }), - ]), - }) -# --- diff --git a/tests/components/autarco/snapshots/test_sensor.ambr b/tests/components/autarco/snapshots/test_sensor.ambr deleted file mode 100644 index dbbd8e9b47d..00000000000 --- a/tests/components/autarco/snapshots/test_sensor.ambr +++ /dev/null @@ -1,817 +0,0 @@ -# serializer version: 1 -# name: test_all_sensors[sensor.battery_charged_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.battery_charged_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charged month', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charged_month', - 'unique_id': '1_battery_charged_month', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.battery_charged_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Battery Charged month', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.battery_charged_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '26', - }) -# --- -# name: test_all_sensors[sensor.battery_charged_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.battery_charged_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charged today', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charged_today', - 'unique_id': '1_battery_charged_today', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.battery_charged_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Battery Charged today', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.battery_charged_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_all_sensors[sensor.battery_charged_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.battery_charged_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charged total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charged_total', - 'unique_id': '1_battery_charged_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.battery_charged_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Battery Charged total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.battery_charged_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '748', - }) -# --- -# name: test_all_sensors[sensor.battery_discharged_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.battery_discharged_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Discharged month', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'discharged_month', - 'unique_id': '1_battery_discharged_month', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.battery_discharged_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Battery Discharged month', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.battery_discharged_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '25', - }) -# --- -# name: test_all_sensors[sensor.battery_discharged_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.battery_discharged_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Discharged today', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'discharged_today', - 'unique_id': '1_battery_discharged_today', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.battery_discharged_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Battery Discharged today', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.battery_discharged_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_all_sensors[sensor.battery_discharged_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.battery_discharged_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Discharged total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'discharged_total', - 'unique_id': '1_battery_discharged_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.battery_discharged_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Battery Discharged total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.battery_discharged_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '696', - }) -# --- -# name: test_all_sensors[sensor.battery_flow_now-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.battery_flow_now', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Flow now', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flow_now', - 'unique_id': '1_battery_flow_now', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.battery_flow_now-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Battery Flow now', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.battery_flow_now', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '777', - }) -# --- -# name: test_all_sensors[sensor.battery_state_of_charge-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.battery_state_of_charge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'State of charge', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'state_of_charge', - 'unique_id': '1_battery_state_of_charge', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_sensors[sensor.battery_state_of_charge-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Battery State of charge', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.battery_state_of_charge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '56', - }) -# --- -# name: test_all_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy AC output total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_energy_total', - 'unique_id': 'test-serial-1_out_ac_energy_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Inverter test-serial-1 Energy AC output total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10379', - }) -# --- -# name: test_all_sensors[sensor.inverter_test_serial_1_power_ac_output-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power AC output', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_power', - 'unique_id': 'test-serial-1_out_ac_power', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.inverter_test_serial_1_power_ac_output-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter test-serial-1 Power AC output', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '200', - }) -# --- -# name: test_all_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy AC output total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_energy_total', - 'unique_id': 'test-serial-2_out_ac_energy_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Inverter test-serial-2 Energy AC output total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10379', - }) -# --- -# name: test_all_sensors[sensor.inverter_test_serial_2_power_ac_output-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power AC output', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_power', - 'unique_id': 'test-serial-2_out_ac_power', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.inverter_test_serial_2_power_ac_output-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter test-serial-2 Power AC output', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '500', - }) -# --- -# name: test_all_sensors[sensor.solar_energy_production_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_energy_production_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy production month', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy_production_month', - 'unique_id': '1_solar_energy_production_month', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.solar_energy_production_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Solar Energy production month', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_energy_production_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '58', - }) -# --- -# name: test_all_sensors[sensor.solar_energy_production_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy production today', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy_production_today', - 'unique_id': '1_solar_energy_production_today', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.solar_energy_production_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Solar Energy production today', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_energy_production_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_all_sensors[sensor.solar_energy_production_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_energy_production_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy production total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy_production_total', - 'unique_id': '1_solar_energy_production_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.solar_energy_production_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Solar Energy production total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_energy_production_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10379', - }) -# --- -# name: test_all_sensors[sensor.solar_power_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power production', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_production', - 'unique_id': '1_solar_power_production', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensors[sensor.solar_power_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Solar Power production', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_power_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '200', - }) -# --- diff --git a/tests/components/autarco/test_config_flow.py b/tests/components/autarco/test_config_flow.py deleted file mode 100644 index 621ad7f55c8..00000000000 --- a/tests/components/autarco/test_config_flow.py +++ /dev/null @@ -1,101 +0,0 @@ -"""Test the Autarco config flow.""" - -from unittest.mock import AsyncMock - -from autarco import AutarcoAuthenticationError, AutarcoConnectionError -import pytest - -from homeassistant.components.autarco.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_full_user_flow( - hass: HomeAssistant, - mock_autarco_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test the full user configuration flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert not result.get("errors") - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, - ) - - assert result.get("type") is FlowResultType.CREATE_ENTRY - assert result.get("title") == "test@autarco.com" - assert result.get("data") == { - CONF_EMAIL: "test@autarco.com", - CONF_PASSWORD: "test-password", - } - assert len(mock_autarco_client.get_account.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_duplicate_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_autarco_client: AsyncMock, -) -> None: - """Test abort when setting up duplicate entry.""" - mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - assert result.get("type") is FlowResultType.FORM - assert not result.get("errors") - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, - ) - - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (AutarcoConnectionError, "cannot_connect"), - (AutarcoAuthenticationError, "invalid_auth"), - ], -) -async def test_exceptions( - hass: HomeAssistant, - mock_autarco_client: AsyncMock, - mock_setup_entry: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test exceptions.""" - mock_autarco_client.get_account.side_effect = exception - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("errors") == {"base": error} - - mock_autarco_client.get_account.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, - ) - assert result.get("type") is FlowResultType.CREATE_ENTRY diff --git a/tests/components/autarco/test_diagnostics.py b/tests/components/autarco/test_diagnostics.py deleted file mode 100644 index 1d12a2c1894..00000000000 --- a/tests/components/autarco/test_diagnostics.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Test Autarco diagnostics.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_autarco_client: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test config entry diagnostics.""" - await setup_integration(hass, mock_config_entry) - - result = await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) - - assert result == snapshot diff --git a/tests/components/autarco/test_init.py b/tests/components/autarco/test_init.py deleted file mode 100644 index 81c5f947251..00000000000 --- a/tests/components/autarco/test_init.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Test the Autarco init module.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_load_unload_entry( - hass: HomeAssistant, - mock_autarco_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test load and unload entry.""" - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_remove(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/autarco/test_sensor.py b/tests/components/autarco/test_sensor.py deleted file mode 100644 index c7e65baba70..00000000000 --- a/tests/components/autarco/test_sensor.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Test the sensor provided by the Autarco integration.""" - -from datetime import timedelta -from unittest.mock import AsyncMock, MagicMock, patch - -from autarco import AutarcoConnectionError -from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - - -async def test_all_sensors( - hass: HomeAssistant, - mock_autarco_client: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Autarco sensors.""" - with patch("homeassistant.components.autarco.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_update_failed( - hass: HomeAssistant, - mock_autarco_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test entities become unavailable after failed update.""" - await setup_integration(hass, mock_config_entry) - assert mock_config_entry.state is ConfigEntryState.LOADED - - assert ( - hass.states.get("sensor.inverter_test_serial_1_energy_ac_output_total").state - is not None - ) - - mock_autarco_client.get_solar.side_effect = AutarcoConnectionError - freezer.tick(timedelta(minutes=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert ( - hass.states.get("sensor.inverter_test_serial_1_energy_ac_output_total").state - == STATE_UNAVAILABLE - ) diff --git a/tests/components/auth/test_init.py b/tests/components/auth/test_init.py index 718bb369b53..d0ca4699e0e 100644 --- a/tests/components/auth/test_init.py +++ b/tests/components/auth/test_init.py @@ -13,7 +13,6 @@ from homeassistant.auth.models import ( TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN, TOKEN_TYPE_NORMAL, Credentials, - RefreshToken, ) from homeassistant.components import auth from homeassistant.core import HomeAssistant @@ -38,7 +37,7 @@ def mock_credential(): ) -async def async_setup_user_refresh_token(hass: HomeAssistant) -> RefreshToken: +async def async_setup_user_refresh_token(hass): """Create a testing user with a connected credential.""" user = await hass.auth.async_create_user("Test User") @@ -599,8 +598,8 @@ async def test_ws_delete_all_refresh_tokens( hass_admin_credential: Credentials, hass_ws_client: WebSocketGenerator, hass_access_token: str, - delete_token_type: dict[str, str], - delete_current_token: dict[str, bool], + delete_token_type: dict[str:str], + delete_current_token: dict[str:bool], expected_remaining_normal_tokens: int, expected_remaining_long_lived_tokens: int, ) -> None: diff --git a/tests/components/auth/test_init_link_user.py b/tests/components/auth/test_init_link_user.py index a8f04c2720d..d1a5fa51af2 100644 --- a/tests/components/auth/test_init_link_user.py +++ b/tests/components/auth/test_init_link_user.py @@ -1,7 +1,6 @@ """Tests for the link user flow.""" from http import HTTPStatus -from typing import Any from unittest.mock import patch from homeassistant.core import HomeAssistant @@ -12,9 +11,7 @@ from tests.common import CLIENT_ID, CLIENT_REDIRECT_URI from tests.typing import ClientSessionGenerator -async def async_get_code( - hass: HomeAssistant, aiohttp_client: ClientSessionGenerator -) -> dict[str, Any]: +async def async_get_code(hass, aiohttp_client): """Return authorization code for link user tests.""" config = [ { diff --git a/tests/components/automation/test_blueprint.py b/tests/components/automation/test_blueprint.py index 1095c625fb2..ee3fa631d00 100644 --- a/tests/components/automation/test_blueprint.py +++ b/tests/components/automation/test_blueprint.py @@ -1,10 +1,8 @@ """Test built-in blueprints.""" import asyncio -from collections.abc import Iterator import contextlib from datetime import timedelta -from os import PathLike import pathlib from typing import Any from unittest.mock import patch @@ -25,9 +23,7 @@ BUILTIN_BLUEPRINT_FOLDER = pathlib.Path(automation.__file__).parent / "blueprint @contextlib.contextmanager -def patch_blueprint( - blueprint_path: str, data_path: str | PathLike[str] -) -> Iterator[None]: +def patch_blueprint(blueprint_path: str, data_path): """Patch blueprint loading from a different source.""" orig_load = models.DomainBlueprints._load_blueprint @@ -38,10 +34,7 @@ def patch_blueprint( return orig_load(self, path) return models.Blueprint( - yaml.load_yaml(data_path), - expected_domain=self.domain, - path=path, - schema=automation.config.AUTOMATION_BLUEPRINT_SCHEMA, + yaml.load_yaml(data_path), expected_domain=self.domain, path=path ) with patch( diff --git a/tests/components/automation/test_init.py b/tests/components/automation/test_init.py index 2bdc0f7516b..0c300540644 100644 --- a/tests/components/automation/test_init.py +++ b/tests/components/automation/test_init.py @@ -88,7 +88,7 @@ async def test_service_data_not_a_dict( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "data": 100}, + "action": {"service": "test.automation", "data": 100}, } }, ) @@ -111,7 +111,7 @@ async def test_service_data_single_template( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", "data": "{{ { 'foo': 'bar' } }}", }, } @@ -136,7 +136,7 @@ async def test_service_specify_data( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", "data_template": { "some": ( "{{ trigger.platform }} - {{ trigger.event.event_type }}" @@ -170,7 +170,7 @@ async def test_service_specify_entity_id( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "entity_id": "hello.world"}, + "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -178,7 +178,7 @@ async def test_service_specify_entity_id( hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 - assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] + assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_service_specify_entity_id_list( @@ -192,7 +192,7 @@ async def test_service_specify_entity_id_list( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } @@ -202,7 +202,7 @@ async def test_service_specify_entity_id_list( hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 - assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world", "hello.world2"] + assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) async def test_two_triggers(hass: HomeAssistant, calls: list[ServiceCall]) -> None: @@ -216,7 +216,7 @@ async def test_two_triggers(hass: HomeAssistant, calls: list[ServiceCall]) -> No {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, } }, ) @@ -240,12 +240,12 @@ async def test_trigger_service_ignoring_condition( automation.DOMAIN: { "alias": "test", "trigger": [{"platform": "event", "event_type": "test_event"}], - "conditions": { + "condition": { "condition": "numeric_state", "entity_id": "non.existing", "above": "1", }, - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, } }, ) @@ -292,8 +292,8 @@ async def test_two_conditions_with_and( automation.DOMAIN, { automation.DOMAIN: { - "triggers": [{"platform": "event", "event_type": "test_event"}], - "conditions": [ + "trigger": [{"platform": "event", "event_type": "test_event"}], + "condition": [ {"condition": "state", "entity_id": entity_id, "state": "100"}, { "condition": "numeric_state", @@ -301,7 +301,7 @@ async def test_two_conditions_with_and( "below": 150, }, ], - "actions": {"action": "test.automation"}, + "action": {"service": "test.automation"}, } }, ) @@ -331,9 +331,9 @@ async def test_shorthand_conditions_template( automation.DOMAIN, { automation.DOMAIN: { - "triggers": [{"platform": "event", "event_type": "test_event"}], - "conditions": "{{ is_state('test.entity', 'hello') }}", - "actions": {"action": "test.automation"}, + "trigger": [{"platform": "event", "event_type": "test_event"}], + "condition": "{{ is_state('test.entity', 'hello') }}", + "action": {"service": "test.automation"}, } }, ) @@ -360,11 +360,11 @@ async def test_automation_list_setting( automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, }, ] }, @@ -390,8 +390,8 @@ async def test_automation_calling_two_actions( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ - {"action": "test.automation", "data": {"position": 0}}, - {"action": "test.automation", "data": {"position": 1}}, + {"service": "test.automation", "data": {"position": 0}}, + {"service": "test.automation", "data": {"position": 1}}, ], } }, @@ -420,7 +420,7 @@ async def test_shared_context(hass: HomeAssistant, calls: list[ServiceCall]) -> { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, }, ] }, @@ -486,7 +486,7 @@ async def test_services(hass: HomeAssistant, calls: list[ServiceCall]) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, } }, ) @@ -569,7 +569,7 @@ async def test_reload_config_service( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -597,7 +597,7 @@ async def test_reload_config_service( "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { - "action": "test.automation", + "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -650,7 +650,7 @@ async def test_reload_config_when_invalid_config( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -690,7 +690,7 @@ async def test_reload_config_handles_load_fails( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -735,7 +735,7 @@ async def test_automation_stops( "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"action": "test.automation"}, + {"service": "test.automation"}, ], } } @@ -807,11 +807,11 @@ async def test_reload_unchanged_does_not_stop( config = { automation.DOMAIN: { "alias": "hello", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [ + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"action": "test.automation"}, + {"service": "test.automation"}, ], } } @@ -854,11 +854,11 @@ async def test_reload_single_unchanged_does_not_stop( automation.DOMAIN: { "id": "sun", "alias": "hello", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [ + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"action": "test.automation"}, + {"service": "test.automation"}, ], } } @@ -905,7 +905,7 @@ async def test_reload_single_add_automation( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"action": "test.automation"}], + "action": [{"service": "test.automation"}], } } assert await async_setup_component(hass, automation.DOMAIN, config1) @@ -942,25 +942,25 @@ async def test_reload_single_parallel_calls( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_sun"}, - "action": [{"action": "test.automation"}], + "action": [{"service": "test.automation"}], }, { "id": "moon", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_moon"}, - "action": [{"action": "test.automation"}], + "action": [{"service": "test.automation"}], }, { "id": "mars", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_mars"}, - "action": [{"action": "test.automation"}], + "action": [{"service": "test.automation"}], }, { "id": "venus", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_venus"}, - "action": [{"action": "test.automation"}], + "action": [{"service": "test.automation"}], }, ] } @@ -1055,7 +1055,7 @@ async def test_reload_single_remove_automation( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"action": "test.automation"}], + "action": [{"service": "test.automation"}], } } config2 = {automation.DOMAIN: {}} @@ -1092,13 +1092,13 @@ async def test_reload_moved_automation_without_alias( config = { automation.DOMAIN: [ { - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [{"action": "test.automation"}], + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [{"service": "test.automation"}], }, { "alias": "automation_with_alias", - "triggers": {"platform": "event", "event_type": "test_event2"}, - "actions": [{"action": "test.automation"}], + "trigger": {"platform": "event", "event_type": "test_event2"}, + "action": [{"service": "test.automation"}], }, ] } @@ -1148,18 +1148,18 @@ async def test_reload_identical_automations_without_id( automation.DOMAIN: [ { "alias": "dolly", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [{"action": "test.automation"}], + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [{"service": "test.automation"}], }, { "alias": "dolly", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [{"action": "test.automation"}], + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [{"service": "test.automation"}], }, { "alias": "dolly", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [{"action": "test.automation"}], + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [{"service": "test.automation"}], }, ] } @@ -1245,13 +1245,13 @@ async def test_reload_identical_automations_without_id( "automation_config", [ { - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [{"action": "test.automation"}], + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [{"service": "test.automation"}], }, # An automation using templates { - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [{"action": "{{ 'test.automation' }}"}], + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [{"service": "{{ 'test.automation' }}"}], }, # An automation using blueprint { @@ -1277,14 +1277,14 @@ async def test_reload_identical_automations_without_id( }, { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [{"action": "test.automation"}], + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [{"service": "test.automation"}], }, # An automation using templates { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [{"action": "{{ 'test.automation' }}"}], + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [{"service": "{{ 'test.automation' }}"}], }, # An automation using blueprint { @@ -1380,8 +1380,8 @@ async def test_reload_automation_when_blueprint_changes( # Reload the automations without any change, but with updated blueprint blueprint_path = automation.async_get_blueprints(hass).blueprint_folder blueprint_config = yaml.load_yaml(blueprint_path / "test_event_service.yaml") - blueprint_config["actions"] = [blueprint_config["actions"]] - blueprint_config["actions"].append(blueprint_config["actions"][-1]) + blueprint_config["action"] = [blueprint_config["action"]] + blueprint_config["action"].append(blueprint_config["action"][-1]) with ( patch( @@ -1424,12 +1424,12 @@ async def test_automation_restore_state(hass: HomeAssistant) -> None: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, }, ] } @@ -1474,7 +1474,7 @@ async def test_initial_value_off(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "entity_id": "hello.world"}, + "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1499,7 +1499,7 @@ async def test_initial_value_on(hass: HomeAssistant) -> None: "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } @@ -1528,7 +1528,7 @@ async def test_initial_value_off_but_restore_on(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "entity_id": "hello.world"}, + "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1553,7 +1553,7 @@ async def test_initial_value_on_but_restore_off(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "entity_id": "hello.world"}, + "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1576,7 +1576,7 @@ async def test_no_initial_value_and_restore_off(hass: HomeAssistant) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "entity_id": "hello.world"}, + "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1600,7 +1600,7 @@ async def test_automation_is_on_if_no_initial_state_or_restore( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "entity_id": "hello.world"}, + "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1623,7 +1623,7 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "entity_id": "hello.world"}, + "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1641,7 +1641,7 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 - assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] + assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) @pytest.mark.parametrize( @@ -1650,13 +1650,13 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: ( {}, "could not be validated", - "required key not provided @ data['actions']", + "required key not provided @ data['action']", "validation_failed_schema", ), ( { - "triggers": {"platform": "automation"}, - "actions": [], + "trigger": {"platform": "automation"}, + "action": [], }, "failed to setup triggers", "Integration 'automation' does not provide trigger support.", @@ -1664,14 +1664,14 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: ), ( { - "triggers": {"platform": "event", "event_type": "test_event"}, - "conditions": { + "trigger": {"platform": "event", "event_type": "test_event"}, + "condition": { "condition": "state", # The UUID will fail being resolved to en entity_id "entity_id": "abcdabcdabcdabcdabcdabcdabcdabcd", "state": "blah", }, - "actions": [], + "action": [], }, "failed to setup conditions", "Unknown entity registry entry abcdabcdabcdabcdabcdabcdabcdabcd.", @@ -1679,8 +1679,8 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: ), ( { - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": { + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": { "condition": "state", # The UUID will fail being resolved to en entity_id "entity_id": "abcdabcdabcdabcdabcdabcdabcdabcd", @@ -1712,9 +1712,9 @@ async def test_automation_bad_config_validation( {"alias": "bad_automation", **broken_config}, { "alias": "good_automation", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": { - "action": "test.automation", + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": { + "service": "test.automation", "entity_id": "hello.world", }, }, @@ -1756,7 +1756,7 @@ async def test_automation_bad_config_validation( "alias": "bad_automation", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { - "action": "test.automation", + "service": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -1785,7 +1785,7 @@ async def test_automation_with_error_in_script( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation", "entity_id": "hello.world"}, + "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1811,7 +1811,7 @@ async def test_automation_with_error_in_script_2( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": None, "entity_id": "hello.world"}, + "action": {"service": None, "entity_id": "hello.world"}, } }, ) @@ -1842,19 +1842,19 @@ async def test_automation_restore_last_triggered_with_initial_state( "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"action": "test.automation"}, + "action": {"service": "test.automation"}, }, ] } @@ -1970,54 +1970,54 @@ async def test_extraction_functions( DOMAIN: [ { "alias": "test1", - "triggers": [ - {"trigger": "state", "entity_id": "sensor.trigger_state"}, + "trigger": [ + {"platform": "state", "entity_id": "sensor.trigger_state"}, { - "trigger": "numeric_state", + "platform": "numeric_state", "entity_id": "sensor.trigger_numeric_state", "above": 10, }, { - "trigger": "calendar", + "platform": "calendar", "entity_id": "calendar.trigger_calendar", "event": "start", }, { - "trigger": "event", + "platform": "event", "event_type": "state_changed", "event_data": {"entity_id": "sensor.trigger_event"}, }, # entity_id is a list of strings (not supported) { - "trigger": "event", + "platform": "event", "event_type": "state_changed", "event_data": {"entity_id": ["sensor.trigger_event2"]}, }, # entity_id is not a valid entity ID { - "trigger": "event", + "platform": "event", "event_type": "state_changed", "event_data": {"entity_id": "abc"}, }, # entity_id is not a string { - "trigger": "event", + "platform": "event", "event_type": "state_changed", "event_data": {"entity_id": 123}, }, ], - "conditions": { + "condition": { "condition": "state", "entity_id": "light.condition_state", "state": "on", }, - "actions": [ + "action": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { @@ -2027,67 +2027,67 @@ async def test_extraction_functions( "type": "turn_on", }, { - "action": "test.test", + "service": "test.test", "target": {"area_id": "area-in-both"}, }, { - "action": "test.test", + "service": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "action": "test.test", + "service": "test.test", "target": {"label_id": "label-in-both"}, }, ], }, { "alias": "test2", - "triggers": [ + "trigger": [ { - "trigger": "device", + "platform": "device", "domain": "light", "type": "turned_on", "entity_id": "light.trigger_2", "device_id": trigger_device_2.id, }, { - "trigger": "tag", + "platform": "tag", "tag_id": "1234", "device_id": "device-trigger-tag1", }, { - "trigger": "tag", + "platform": "tag", "tag_id": "1234", "device_id": ["device-trigger-tag2", "device-trigger-tag3"], }, { - "trigger": "event", + "platform": "event", "event_type": "esphome.button_pressed", "event_data": {"device_id": "device-trigger-event"}, }, # device_id is a list of strings (not supported) { - "trigger": "event", + "platform": "event", "event_type": "esphome.button_pressed", "event_data": {"device_id": ["device-trigger-event2"]}, }, # device_id is not a string { - "trigger": "event", + "platform": "event", "event_type": "esphome.button_pressed", "event_data": {"device_id": 123}, }, ], - "conditions": { + "condition": { "condition": "device", "device_id": condition_device.id, "domain": "light", "type": "is_on", "entity_id": "light.bla", }, - "actions": [ + "action": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -2112,35 +2112,35 @@ async def test_extraction_functions( }, { "alias": "test3", - "triggers": [ + "trigger": [ { - "trigger": "event", + "platform": "event", "event_type": "esphome.button_pressed", "event_data": {"area_id": "area-trigger-event"}, }, # area_id is a list of strings (not supported) { - "trigger": "event", + "platform": "event", "event_type": "esphome.button_pressed", "event_data": {"area_id": ["area-trigger-event2"]}, }, # area_id is not a string { - "trigger": "event", + "platform": "event", "event_type": "esphome.button_pressed", "event_data": {"area_id": 123}, }, ], - "conditions": { + "condition": { "condition": "device", "device_id": condition_device.id, "domain": "light", "type": "is_on", "entity_id": "light.bla", }, - "actions": [ + "action": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -2150,27 +2150,27 @@ async def test_extraction_functions( }, {"scene": "scene.hello"}, { - "action": "test.test", + "service": "test.test", "target": {"area_id": "area-in-both"}, }, { - "action": "test.test", + "service": "test.test", "target": {"area_id": "area-in-last"}, }, { - "action": "test.test", + "service": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "action": "test.test", + "service": "test.test", "target": {"floor_id": "floor-in-last"}, }, { - "action": "test.test", + "service": "test.test", "target": {"label_id": "label-in-both"}, }, { - "action": "test.test", + "service": "test.test", "target": {"label_id": "label-in-last"}, }, ], @@ -2287,9 +2287,9 @@ async def test_automation_variables( "event_type": "{{ trigger.event.event_type }}", "this_variables": "{{this.entity_id}}", }, - "triggers": {"trigger": "event", "event_type": "test_event"}, - "actions": { - "action": "test.automation", + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": { + "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2302,22 +2302,22 @@ async def test_automation_variables( "variables": { "test_var": "defined_in_config", }, - "trigger": {"trigger": "event", "event_type": "test_event_2"}, - "conditions": { + "trigger": {"platform": "event", "event_type": "test_event_2"}, + "condition": { "condition": "template", "value_template": "{{ trigger.event.data.pass_condition }}", }, - "actions": { - "action": "test.automation", + "action": { + "service": "test.automation", }, }, { "variables": { "test_var": "{{ trigger.event.data.break + 1 }}", }, - "triggers": {"trigger": "event", "event_type": "test_event_3"}, - "actions": { - "action": "test.automation", + "trigger": {"platform": "event", "event_type": "test_event_3"}, + "action": { + "service": "test.automation", }, }, ] @@ -2371,9 +2371,9 @@ async def test_automation_trigger_variables( "trigger_variables": { "test_var": "defined_in_config", }, - "trigger": {"trigger": "event", "event_type": "test_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2389,9 +2389,9 @@ async def test_automation_trigger_variables( "test_var": "defined_in_config", "this_trigger_variables": "{{this.entity_id}}", }, - "trigger": {"trigger": "event", "event_type": "test_event_2"}, + "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { - "action": "test.automation", + "service": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2436,9 +2436,9 @@ async def test_automation_bad_trigger_variables( "trigger_variables": { "test_var": "{{ states('foo.bar') }}", }, - "trigger": {"trigger": "event", "event_type": "test_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", }, }, ] @@ -2463,9 +2463,9 @@ async def test_automation_this_var_always( { automation.DOMAIN: [ { - "trigger": {"trigger": "event", "event_type": "test_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", "data": { "this_template": "{{this.entity_id}}", }, @@ -2517,107 +2517,6 @@ async def test_blueprint_automation( ] -async def test_blueprint_automation_legacy_schema( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: - """Test blueprint automation where the blueprint is using legacy schema.""" - assert await async_setup_component( - hass, - "automation", - { - "automation": { - "use_blueprint": { - "path": "test_event_service_legacy_schema.yaml", - "input": { - "trigger_event": "blueprint_event", - "service_to_call": "test.automation", - "a_number": 5, - }, - } - } - }, - ) - hass.bus.async_fire("blueprint_event") - await hass.async_block_till_done() - assert len(calls) == 1 - assert automation.entities_in_automation(hass, "automation.automation_0") == [ - "light.kitchen" - ] - assert ( - automation.blueprint_in_automation(hass, "automation.automation_0") - == "test_event_service_legacy_schema.yaml" - ) - assert automation.automations_with_blueprint( - hass, "test_event_service_legacy_schema.yaml" - ) == ["automation.automation_0"] - - -@pytest.mark.parametrize( - ("blueprint", "override"), - [ - # Override a blueprint with modern schema with legacy schema - ( - "test_event_service.yaml", - {"trigger": {"platform": "event", "event_type": "override"}}, - ), - # Override a blueprint with modern schema with modern schema - ( - "test_event_service.yaml", - {"triggers": {"platform": "event", "event_type": "override"}}, - ), - # Override a blueprint with legacy schema with legacy schema - ( - "test_event_service_legacy_schema.yaml", - {"trigger": {"platform": "event", "event_type": "override"}}, - ), - # Override a blueprint with legacy schema with modern schema - ( - "test_event_service_legacy_schema.yaml", - {"triggers": {"platform": "event", "event_type": "override"}}, - ), - ], -) -async def test_blueprint_automation_override( - hass: HomeAssistant, calls: list[ServiceCall], blueprint: str, override: dict -) -> None: - """Test blueprint automation where the automation config overrides the blueprint.""" - assert await async_setup_component( - hass, - "automation", - { - "automation": { - "use_blueprint": { - "path": blueprint, - "input": { - "trigger_event": "blueprint_event", - "service_to_call": "test.automation", - "a_number": 5, - }, - }, - } - | override - }, - ) - - hass.bus.async_fire("blueprint_event") - await hass.async_block_till_done() - assert len(calls) == 0 - - hass.bus.async_fire("override") - await hass.async_block_till_done() - assert len(calls) == 1 - - assert automation.entities_in_automation(hass, "automation.automation_0") == [ - "light.kitchen" - ] - assert ( - automation.blueprint_in_automation(hass, "automation.automation_0") == blueprint - ) - assert automation.automations_with_blueprint(hass, blueprint) == [ - "automation.automation_0" - ] - - @pytest.mark.parametrize( ("blueprint_inputs", "problem", "details"), [ @@ -2643,7 +2542,7 @@ async def test_blueprint_automation_override( "Blueprint 'Call service based on event' generated invalid automation", ( "value should be a string for dictionary value @" - " data['actions'][0]['action']" + " data['action'][0]['service']" ), ), ], @@ -2739,9 +2638,9 @@ async def test_trigger_service(hass: HomeAssistant, calls: list[ServiceCall]) -> { automation.DOMAIN: { "alias": "hello", - "trigger": {"trigger": "event", "event_type": "test_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "action": "test.automation", + "service": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } @@ -2771,23 +2670,23 @@ async def test_trigger_condition_implicit_id( { automation.DOMAIN: { "trigger": [ - {"trigger": "event", "event_type": "test_event1"}, - {"trigger": "event", "event_type": "test_event2"}, - {"trigger": "event", "event_type": "test_event3"}, + {"platform": "event", "event_type": "test_event1"}, + {"platform": "event", "event_type": "test_event2"}, + {"platform": "event", "event_type": "test_event3"}, ], "action": { "choose": [ { "conditions": {"condition": "trigger", "id": [0, "2"]}, "sequence": { - "action": "test.automation", + "service": "test.automation", "data": {"param": "one"}, }, }, { "conditions": {"condition": "trigger", "id": "1"}, "sequence": { - "action": "test.automation", + "service": "test.automation", "data": {"param": "two"}, }, }, @@ -2823,22 +2722,22 @@ async def test_trigger_condition_explicit_id( { automation.DOMAIN: { "trigger": [ - {"trigger": "event", "event_type": "test_event1", "id": "one"}, - {"trigger": "event", "event_type": "test_event2", "id": "two"}, + {"platform": "event", "event_type": "test_event1", "id": "one"}, + {"platform": "event", "event_type": "test_event2", "id": "two"}, ], "action": { "choose": [ { "conditions": {"condition": "trigger", "id": "one"}, "sequence": { - "action": "test.automation", + "service": "test.automation", "data": {"param": "one"}, }, }, { "conditions": {"condition": "trigger", "id": "two"}, "sequence": { - "action": "test.automation", + "service": "test.automation", "data": {"param": "two"}, }, }, @@ -2923,8 +2822,8 @@ async def test_recursive_automation_starting_script( f" {automation_runs} }}}}" ) }, - {"action": "script.script1"}, - {"action": "test.script_done"}, + {"service": "script.script1"}, + {"service": "test.script_done"}, ], }, } @@ -2938,12 +2837,12 @@ async def test_recursive_automation_starting_script( automation.DOMAIN: { "mode": automation_mode, "trigger": [ - {"trigger": "event", "event_type": "trigger_automation"}, + {"platform": "event", "event_type": "trigger_automation"}, ], "action": [ - {"action": "test.automation_started"}, + {"service": "test.automation_started"}, {"delay": 0.001}, - {"action": "script.script1"}, + {"service": "script.script1"}, ], } }, @@ -3020,11 +2919,11 @@ async def test_recursive_automation( automation.DOMAIN: { "mode": automation_mode, "trigger": [ - {"trigger": "event", "event_type": "trigger_automation"}, + {"platform": "event", "event_type": "trigger_automation"}, ], "action": [ {"event": "trigger_automation"}, - {"action": "test.automation_done"}, + {"service": "test.automation_done"}, ], } }, @@ -3082,11 +2981,11 @@ async def test_recursive_automation_restart_mode( automation.DOMAIN: { "mode": SCRIPT_MODE_RESTART, "trigger": [ - {"trigger": "event", "event_type": "trigger_automation"}, + {"platform": "event", "event_type": "trigger_automation"}, ], "action": [ {"event": "trigger_automation"}, - {"action": "test.automation_done"}, + {"service": "test.automation_done"}, ], } }, @@ -3121,8 +3020,8 @@ async def test_websocket_config( """Test config command.""" config = { "alias": "hello", - "triggers": {"trigger": "event", "event_type": "test_event"}, - "actions": {"action": "test.automation", "data": 100}, + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": {"service": "test.automation", "data": 100}, } assert await async_setup_component( hass, automation.DOMAIN, {automation.DOMAIN: config} @@ -3191,12 +3090,12 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non automation.DOMAIN: [ { "trigger": { - "trigger": "state", + "platform": "state", "entity_id": "binary_sensor.presence", "from": "on", }, "action": { - "action": "automation.turn_off", + "service": "automation.turn_off", "target": { "entity_id": "automation.automation_1", }, @@ -3209,7 +3108,7 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non }, { "trigger": { - "trigger": "state", + "platform": "state", "entity_id": "binary_sensor.presence", "from": "on", "for": { @@ -3219,7 +3118,7 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non }, }, "action": { - "action": "persistent_notification.create", + "service": "persistent_notification.create", "metadata": {}, "data": { "message": "Test race", @@ -3286,7 +3185,7 @@ async def test_two_automations_call_restart_script_same_time( "fire_toggle": { "sequence": [ { - "action": "input_boolean.toggle", + "service": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test_1"}, } ] @@ -3302,24 +3201,24 @@ async def test_two_automations_call_restart_script_same_time( automation.DOMAIN: [ { "trigger": { - "trigger": "state", + "platform": "state", "entity_id": "binary_sensor.presence", "to": "on", }, "action": { - "action": "script.fire_toggle", + "service": "script.fire_toggle", }, "id": "automation_0", "mode": "single", }, { "trigger": { - "trigger": "state", + "platform": "state", "entity_id": "binary_sensor.presence", "to": "on", }, "action": { - "action": "script.fire_toggle", + "service": "script.fire_toggle", }, "id": "automation_1", "mode": "single", @@ -3330,7 +3229,6 @@ async def test_two_automations_call_restart_script_same_time( hass.states.async_set("binary_sensor.presence", "on") await hass.async_block_till_done() - await hass.async_block_till_done() assert len(events) == 2 cancel() @@ -3360,7 +3258,7 @@ async def test_two_automation_call_restart_script_right_after_each_other( automation.DOMAIN: [ { "trigger": { - "trigger": "state", + "platform": "state", "entity_id": ["input_boolean.test_1", "input_boolean.test_1"], "from": "off", "to": "on", @@ -3402,120 +3300,3 @@ async def test_two_automation_call_restart_script_right_after_each_other( hass.states.async_set("input_boolean.test_2", "on") await hass.async_block_till_done() assert len(events) == 1 - - -async def test_action_backward_compatibility( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: - """Test we can still use old-style automations. - - - Services action using the `service` key instead of `action` - - Singular `trigger` instead of `triggers` - - Singular `condition` instead of `conditions` - - Singular `action` instead of `actions` - """ - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: { - "trigger": {"trigger": "event", "event_type": "test_event"}, - "condition": { - "condition": "template", - "value_template": "{{ True }}", - }, - "action": { - "service": "test.automation", - "entity_id": "hello.world", - "data": {"event": "{{ trigger.event.event_type }}"}, - }, - } - }, - ) - - hass.bus.async_fire("test_event") - await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] - assert calls[0].data.get("event") == "test_event" - - -@pytest.mark.parametrize( - ("config", "message"), - [ - ( - { - "trigger": {"platform": "event", "event_type": "test_event"}, - "triggers": {"platform": "event", "event_type": "test_event2"}, - "actions": [], - }, - "Cannot specify both 'trigger' and 'triggers'. Please use 'triggers' only.", - ), - ( - { - "trigger": {"platform": "event", "event_type": "test_event"}, - "condition": {"condition": "template", "value_template": "{{ True }}"}, - "conditions": {"condition": "template", "value_template": "{{ True }}"}, - }, - "Cannot specify both 'condition' and 'conditions'. Please use 'conditions' only.", - ), - ( - { - "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, - "actions": {"service": "test.automation", "entity_id": "hello.world"}, - }, - "Cannot specify both 'action' and 'actions'. Please use 'actions' only.", - ), - ( - { - "trigger": { - "platform": "event", - "trigger": "event", - "event_type": "test_event2", - }, - "action": [], - }, - "Cannot specify both 'platform' and 'trigger'. Please use 'trigger' only.", - ), - ], -) -async def test_invalid_configuration( - hass: HomeAssistant, - config: dict[str, Any], - message: str, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test for invalid automation configurations.""" - assert await async_setup_component( - hass, - automation.DOMAIN, - {automation.DOMAIN: config}, - ) - await hass.async_block_till_done() - assert message in caplog.text - - -@pytest.mark.parametrize( - ("trigger_key"), - ["trigger", "platform"], -) -async def test_valid_configuration( - hass: HomeAssistant, - trigger_key: str, -) -> None: - """Test for valid automation configurations.""" - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: { - "triggers": { - trigger_key: "event", - "event_type": "test_event2", - }, - "action": [], - } - }, - ) - await hass.async_block_till_done() diff --git a/tests/components/automation/test_recorder.py b/tests/components/automation/test_recorder.py index c1defdd0339..fc45e6aee5b 100644 --- a/tests/components/automation/test_recorder.py +++ b/tests/components/automation/test_recorder.py @@ -39,8 +39,8 @@ async def test_exclude_attributes( automation.DOMAIN, { automation.DOMAIN: { - "trigger": {"trigger": "event", "event_type": "test_event"}, - "actions": {"action": "test.automation", "entity_id": "hello.world"}, + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": {"service": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -48,7 +48,7 @@ async def test_exclude_attributes( hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 - assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] + assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) await async_wait_recording_done(hass) states = await hass.async_add_executor_job( diff --git a/tests/components/awair/__init__.py b/tests/components/awair/__init__.py index 0c0fd0eb522..f93866263a2 100644 --- a/tests/components/awair/__init__.py +++ b/tests/components/awair/__init__.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from homeassistant.components.awair.const import DOMAIN +from homeassistant.components.awair import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant diff --git a/tests/components/awair/test_config_flow.py b/tests/components/awair/test_config_flow.py index b27f20e83f3..ab9f5faa425 100644 --- a/tests/components/awair/test_config_flow.py +++ b/tests/components/awair/test_config_flow.py @@ -7,7 +7,7 @@ from aiohttp.client_exceptions import ClientConnectorError from python_awair.exceptions import AuthError, AwairError from homeassistant.components.awair.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -136,7 +136,11 @@ async def test_reauth(hass: HomeAssistant, user, cloud_devices) -> None: ) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "unique_id": CLOUD_UNIQUE_ID}, + data={**CLOUD_CONFIG, CONF_ACCESS_TOKEN: "blah"}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -144,32 +148,27 @@ async def test_reauth(hass: HomeAssistant, user, cloud_devices) -> None: with patch("python_awair.AwairClient.query", side_effect=AuthError()): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={CONF_ACCESS_TOKEN: "bad"}, + user_input=CLOUD_CONFIG, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {CONF_ACCESS_TOKEN: "invalid_access_token"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {CONF_ACCESS_TOKEN: "invalid_access_token"} with ( patch( "python_awair.AwairClient.query", side_effect=[user, cloud_devices], ), - patch( - "homeassistant.components.awair.async_setup_entry", return_value=True - ) as mock_setup_entry, + patch("homeassistant.components.awair.async_setup_entry", return_value=True), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={CONF_ACCESS_TOKEN: "good"}, + user_input=CLOUD_CONFIG, ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - mock_setup_entry.assert_called_once() - assert dict(mock_config.data) == {CONF_ACCESS_TOKEN: "good"} + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" async def test_reauth_error(hass: HomeAssistant) -> None: @@ -181,7 +180,11 @@ async def test_reauth_error(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "unique_id": CLOUD_UNIQUE_ID}, + data={**CLOUD_CONFIG, CONF_ACCESS_TOKEN: "blah"}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -400,6 +403,10 @@ async def test_zeroconf_discovery_update_configuration( return_value=True, ) as mock_setup_entry, patch("python_awair.AwairClient.query", side_effect=[local_devices]), + patch( + "homeassistant.components.awair.async_setup_entry", + return_value=True, + ), ): result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/awair/test_sensor.py b/tests/components/awair/test_sensor.py index 8c9cd6e3a24..8af1fdd9c7c 100644 --- a/tests/components/awair/test_sensor.py +++ b/tests/components/awair/test_sensor.py @@ -29,7 +29,7 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from . import setup_awair @@ -48,24 +48,16 @@ SENSOR_TYPES_MAP = { def assert_expected_properties( hass: HomeAssistant, - entity_registry: er.RegistryEntry, - name: str, - unique_id: str, - state_value: str, + registry: er.RegistryEntry, + name, + unique_id, + state_value, attributes: dict, - model="Awair", - model_id="awair", ): """Assert expected properties from a dict.""" - entity_entry = entity_registry.async_get(name) - assert entity_entry.unique_id == unique_id - - device_registry = dr.async_get(hass) - device_entry = device_registry.async_get(entity_entry.device_id) - assert device_entry is not None - assert device_entry.model == model - assert device_entry.model_id == model_id + entry = registry.async_get(name) + assert entry.unique_id == unique_id state = hass.states.get(name) assert state assert state.state == state_value @@ -209,10 +201,7 @@ async def test_awair_gen2_sensors( async def test_local_awair_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - local_devices, - local_data, + hass: HomeAssistant, entity_registry: er.EntityRegistry, local_devices, local_data ) -> None: """Test expected sensors on a local Awair.""" @@ -226,8 +215,6 @@ async def test_local_awair_sensors( f"{local_devices['device_uuid']}_{SENSOR_TYPES_MAP[API_SCORE].unique_id_tag}", "94", {}, - model="Awair Element", - model_id="awair-element", ) diff --git a/tests/components/aws/test_init.py b/tests/components/aws/test_init.py index 820b08e51b4..9589ad6c037 100644 --- a/tests/components/aws/test_init.py +++ b/tests/components/aws/test_init.py @@ -1,7 +1,6 @@ """Tests for the aws component config and setup.""" import json -from typing import Any from unittest.mock import AsyncMock, MagicMock, call, patch as async_patch from homeassistant.core import HomeAssistant @@ -11,7 +10,7 @@ from homeassistant.setup import async_setup_component class MockAioSession: """Mock AioSession.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init a mock session.""" self.get_user = AsyncMock() self.invoke = AsyncMock() diff --git a/tests/components/axis/conftest.py b/tests/components/axis/conftest.py index c3377c15955..b306e25c434 100644 --- a/tests/components/axis/conftest.py +++ b/tests/components/axis/conftest.py @@ -2,17 +2,19 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine, Generator +from collections.abc import Callable from copy import deepcopy from types import MappingProxyType -from typing import Any, Protocol +from typing import Any from unittest.mock import AsyncMock, patch from axis.rtsp import Signal, State import pytest import respx +from typing_extensions import Generator from homeassistant.components.axis.const import DOMAIN as AXIS_DOMAIN +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MODEL, @@ -46,33 +48,9 @@ from .const import ( from tests.common import MockConfigEntry -type ConfigEntryFactoryType = Callable[[], Coroutine[Any, Any, MockConfigEntry]] -type RtspStateType = Callable[[bool], None] - -class RtspEventMock(Protocol): - """Fixture to allow mocking received RTSP events.""" - - def __call__( - self, - topic: str, - data_type: str, - data_value: str, - operation: str = "Initialized", - source_name: str = "", - source_idx: str = "", - ) -> None: - """Send RTSP event.""" - - -class _RtspClientMock(Protocol): - async def __call__( - self, data: dict[str, Any] | None = None, state: str = "" - ) -> None: ... - - -@pytest.fixture(name="mock_setup_entry") -def fixture_setup_entry() -> Generator[AsyncMock]: +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.axis.async_setup_entry", return_value=True @@ -84,13 +62,14 @@ def fixture_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(name="config_entry") -def fixture_config_entry( +def config_entry_fixture( + hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any], config_entry_options: MappingProxyType[str, Any], config_entry_version: int, -) -> MockConfigEntry: +) -> ConfigEntry: """Define a config entry fixture.""" - return MockConfigEntry( + config_entry = MockConfigEntry( domain=AXIS_DOMAIN, entry_id="676abe5b73621446e6550a2e86ffe3dd", unique_id=FORMATTED_MAC, @@ -98,16 +77,18 @@ def fixture_config_entry( options=config_entry_options, version=config_entry_version, ) + config_entry.add_to_hass(hass) + return config_entry @pytest.fixture(name="config_entry_version") -def fixture_config_entry_version() -> int: +def config_entry_version_fixture() -> int: """Define a config entry version fixture.""" return 3 @pytest.fixture(name="config_entry_data") -def fixture_config_entry_data() -> MappingProxyType[str, Any]: +def config_entry_data_fixture() -> MappingProxyType[str, Any]: """Define a config entry data fixture.""" return { CONF_HOST: DEFAULT_HOST, @@ -120,7 +101,7 @@ def fixture_config_entry_data() -> MappingProxyType[str, Any]: @pytest.fixture(name="config_entry_options") -def fixture_config_entry_options() -> MappingProxyType[str, Any]: +def config_entry_options_fixture() -> MappingProxyType[str, Any]: """Define a config entry options fixture.""" return {} @@ -128,15 +109,8 @@ def fixture_config_entry_options() -> MappingProxyType[str, Any]: # Axis API fixtures -@pytest.fixture(autouse=True) -def reset_mock_requests() -> Generator[None]: - """Reset respx mock routes after the test.""" - yield - respx.mock.clear() - - -@pytest.fixture(name="mock_requests") -def fixture_request( +@pytest.fixture(name="mock_vapix_requests") +def default_request_fixture( respx_mock: respx.MockRouter, port_management_payload: dict[str, Any], param_properties_payload: str, @@ -241,7 +215,7 @@ def api_discovery_items() -> dict[str, Any]: @pytest.fixture(autouse=True) -def fixture_api_discovery(api_discovery_items: dict[str, Any]) -> None: +def api_discovery_fixture(api_discovery_items: dict[str, Any]) -> None: """Apidiscovery mock response.""" data = deepcopy(API_DISCOVERY_RESPONSE) if api_discovery_items: @@ -250,66 +224,64 @@ def fixture_api_discovery(api_discovery_items: dict[str, Any]) -> None: @pytest.fixture(name="port_management_payload") -def fixture_io_port_management_data() -> dict[str, Any]: +def io_port_management_data_fixture() -> dict[str, Any]: """Property parameter data.""" return PORT_MANAGEMENT_RESPONSE @pytest.fixture(name="param_properties_payload") -def fixture_param_properties_data() -> str: +def param_properties_data_fixture() -> str: """Property parameter data.""" return PROPERTIES_RESPONSE @pytest.fixture(name="param_ports_payload") -def fixture_param_ports_data() -> str: +def param_ports_data_fixture() -> str: """Property parameter data.""" return PORTS_RESPONSE @pytest.fixture(name="mqtt_status_code") -def fixture_mqtt_status_code() -> int: +def mqtt_status_code_fixture() -> int: """Property parameter data.""" return 200 -@pytest.fixture(name="mock_default_requests") -def fixture_default_requests(mock_requests: Callable[[str], None]) -> None: +@pytest.fixture(name="setup_default_vapix_requests") +def default_vapix_requests_fixture(mock_vapix_requests: Callable[[str], None]) -> None: """Mock default Vapix requests responses.""" - mock_requests(DEFAULT_HOST) + mock_vapix_requests(DEFAULT_HOST) -@pytest.fixture(name="config_entry_factory") -async def fixture_config_entry_factory( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_requests: Callable[[str], None], -) -> ConfigEntryFactoryType: +@pytest.fixture(name="prepare_config_entry") +async def prep_config_entry_fixture( + hass: HomeAssistant, config_entry: ConfigEntry, setup_default_vapix_requests: None +) -> Callable[[], ConfigEntry]: """Fixture factory to set up Axis network device.""" - async def __mock_setup_config_entry() -> MockConfigEntry: - config_entry.add_to_hass(hass) - mock_requests(config_entry.data[CONF_HOST]) - await hass.config_entries.async_setup(config_entry.entry_id) + async def __mock_setup_config_entry() -> ConfigEntry: + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() return config_entry return __mock_setup_config_entry -@pytest.fixture(name="config_entry_setup") -async def fixture_config_entry_setup( - config_entry_factory: ConfigEntryFactoryType, -) -> MockConfigEntry: +@pytest.fixture(name="setup_config_entry") +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: ConfigEntry, setup_default_vapix_requests: None +) -> ConfigEntry: """Define a fixture to set up Axis network device.""" - return await config_entry_factory() + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + return config_entry # RTSP fixtures -@pytest.fixture(autouse=True, name="_mock_rtsp_client") -def fixture_axis_rtsp_client() -> Generator[_RtspClientMock]: +@pytest.fixture(autouse=True) +def mock_axis_rtspclient() -> Generator[Callable[[dict | None, str], None]]: """No real RTSP communication allowed.""" with patch("axis.stream_manager.RTSPClient") as rtsp_client_mock: rtsp_client_mock.return_value.session.state = State.STOPPED @@ -326,7 +298,7 @@ def fixture_axis_rtsp_client() -> Generator[_RtspClientMock]: rtsp_client_mock.return_value.stop = stop_stream - def make_rtsp_call(data: dict[str, Any] | None = None, state: str = "") -> None: + def make_rtsp_call(data: dict | None = None, state: str = "") -> None: """Generate a RTSP call.""" axis_streammanager_session_callback = rtsp_client_mock.call_args[0][4] @@ -341,8 +313,10 @@ def fixture_axis_rtsp_client() -> Generator[_RtspClientMock]: yield make_rtsp_call -@pytest.fixture(autouse=True, name="mock_rtsp_event") -def fixture_rtsp_event(_mock_rtsp_client: _RtspClientMock) -> RtspEventMock: +@pytest.fixture(autouse=True) +def mock_rtsp_event( + mock_axis_rtspclient: Callable[[dict | None, str], None], +) -> Callable[[str, str, str, str, str, str], None]: """Fixture to allow mocking received RTSP events.""" def send_event( @@ -387,18 +361,20 @@ def fixture_rtsp_event(_mock_rtsp_client: _RtspClientMock) -> RtspEventMock: """ - _mock_rtsp_client(data=event.encode("utf-8")) + mock_axis_rtspclient(data=event.encode("utf-8")) return send_event -@pytest.fixture(autouse=True, name="mock_rtsp_signal_state") -def fixture_rtsp_signal_state(_mock_rtsp_client: _RtspClientMock) -> RtspStateType: +@pytest.fixture(autouse=True) +def mock_rtsp_signal_state( + mock_axis_rtspclient: Callable[[dict | None, str], None], +) -> Callable[[bool], None]: """Fixture to allow mocking RTSP state signalling.""" def send_signal(connected: bool) -> None: """Signal state change of RTSP connection.""" signal = Signal.PLAYING if connected else Signal.FAILED - _mock_rtsp_client(state=signal) + mock_axis_rtspclient(state=signal) return send_signal diff --git a/tests/components/axis/const.py b/tests/components/axis/const.py index 2efb464efd7..16b9d17f99e 100644 --- a/tests/components/axis/const.py +++ b/tests/components/axis/const.py @@ -4,8 +4,8 @@ from axis.models.api import CONTEXT MAC = "00408C123456" FORMATTED_MAC = "00:40:8c:12:34:56" -MODEL = "A1234" -NAME = "home" +MODEL = "model" +NAME = "name" DEFAULT_HOST = "1.2.3.4" diff --git a/tests/components/axis/snapshots/test_binary_sensor.ambr b/tests/components/axis/snapshots/test_binary_sensor.ambr deleted file mode 100644 index ab860489d55..00000000000 --- a/tests/components/axis/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,518 +0,0 @@ -# serializer version: 1 -# name: test_binary_sensors[event0][binary_sensor.home_daynight_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_daynight_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DayNight 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:VideoSource/tnsaxis:DayNightVision-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event0][binary_sensor.home_daynight_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'light', - 'friendly_name': 'home DayNight 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_daynight_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event10][binary_sensor.home_object_analytics_device1scenario8-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Object Analytics Device1Scenario8', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario8-Device1Scenario8', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event10][binary_sensor.home_object_analytics_device1scenario8-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Object Analytics Device1Scenario8', - }), - 'context': , - 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event1][binary_sensor.home_sound_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_sound_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sound 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:AudioSource/tnsaxis:TriggerLevel-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event1][binary_sensor.home_sound_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'sound', - 'friendly_name': 'home Sound 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_sound_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event2][binary_sensor.home_pir_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_pir_sensor', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PIR sensor', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:IO/Port-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event2][binary_sensor.home_pir_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'home PIR sensor', - }), - 'context': , - 'entity_id': 'binary_sensor.home_pir_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event3][binary_sensor.home_pir_0-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_pir_0', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PIR 0', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:Sensor/PIR-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event3][binary_sensor.home_pir_0-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home PIR 0', - }), - 'context': , - 'entity_id': 'binary_sensor.home_pir_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event4][binary_sensor.home_fence_guard_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_fence_guard_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Fence Guard Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/FenceGuard/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event4][binary_sensor.home_fence_guard_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Fence Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_fence_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event5][binary_sensor.home_motion_guard_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_motion_guard_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion Guard Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/MotionGuard/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event5][binary_sensor.home_motion_guard_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Motion Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_motion_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event6][binary_sensor.home_loitering_guard_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Loitering Guard Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/LoiteringGuard/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event6][binary_sensor.home_loitering_guard_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Loitering Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event7][binary_sensor.home_vmd4_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_vmd4_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VMD4 Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event7][binary_sensor.home_vmd4_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home VMD4 Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_vmd4_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event8][binary_sensor.home_object_analytics_scenario_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Object Analytics Scenario 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario1-Device1Scenario1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event8][binary_sensor.home_object_analytics_scenario_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Object Analytics Scenario 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event9][binary_sensor.home_vmd4_camera1profile9-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VMD4 Camera1Profile9', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile9-Camera1Profile9', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event9][binary_sensor.home_vmd4_camera1profile9-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home VMD4 Camera1Profile9', - }), - 'context': , - 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/axis/snapshots/test_camera.ambr b/tests/components/axis/snapshots/test_camera.ambr deleted file mode 100644 index 564ff96b3d8..00000000000 --- a/tests/components/axis/snapshots/test_camera.ambr +++ /dev/null @@ -1,101 +0,0 @@ -# serializer version: 1 -# name: test_camera[config_entry_options0-][camera.home-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'camera', - 'entity_category': None, - 'entity_id': 'camera.home', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-camera', - 'unit_of_measurement': None, - }) -# --- -# name: test_camera[config_entry_options0-][camera.home-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'access_token': '1', - 'entity_picture': '/api/camera_proxy/camera.home?token=1', - 'friendly_name': 'home', - 'frontend_stream_type': , - 'supported_features': , - }), - 'context': , - 'entity_id': 'camera.home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- -# name: test_camera[config_entry_options1-streamprofile=profile_1][camera.home-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'camera', - 'entity_category': None, - 'entity_id': 'camera.home', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-camera', - 'unit_of_measurement': None, - }) -# --- -# name: test_camera[config_entry_options1-streamprofile=profile_1][camera.home-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'access_token': '1', - 'entity_picture': '/api/camera_proxy/camera.home?token=1', - 'friendly_name': 'home', - 'frontend_stream_type': , - 'supported_features': , - }), - 'context': , - 'entity_id': 'camera.home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- diff --git a/tests/components/axis/snapshots/test_diagnostics.ambr b/tests/components/axis/snapshots/test_diagnostics.ambr index ebd0061f416..8ea316d00cf 100644 --- a/tests/components/axis/snapshots/test_diagnostics.ambr +++ b/tests/components/axis/snapshots/test_diagnostics.ambr @@ -30,15 +30,13 @@ 'config': dict({ 'data': dict({ 'host': '1.2.3.4', - 'model': 'A1234', - 'name': 'home', + 'model': 'model', + 'name': 'name', 'password': '**REDACTED**', 'port': 80, 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'axis', 'entry_id': '676abe5b73621446e6550a2e86ffe3dd', 'minor_version': 1, diff --git a/tests/components/axis/snapshots/test_hub.ambr b/tests/components/axis/snapshots/test_hub.ambr deleted file mode 100644 index 16579287f09..00000000000 --- a/tests/components/axis/snapshots/test_hub.ambr +++ /dev/null @@ -1,73 +0,0 @@ -# serializer version: 1 -# name: test_device_registry_entry[api_discovery_items0] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://1.2.3.4:80', - 'connections': set({ - tuple( - 'mac', - '00:40:8c:12:34:56', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'axis', - '00:40:8c:12:34:56', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Axis Communications AB', - 'model': 'A1234 Network Camera', - 'model_id': None, - 'name': 'home', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '00:40:8c:12:34:56', - 'suggested_area': None, - 'sw_version': '9.10.1', - 'via_device_id': None, - }) -# --- -# name: test_device_registry_entry[api_discovery_items1] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://1.2.3.4:80', - 'connections': set({ - tuple( - 'mac', - '00:40:8c:12:34:56', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'axis', - '00:40:8c:12:34:56', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Axis Communications AB', - 'model': 'A1234 Network Camera', - 'model_id': None, - 'name': 'home', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '00:40:8c:12:34:56', - 'suggested_area': None, - 'sw_version': '9.80.1', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/axis/snapshots/test_light.ambr b/tests/components/axis/snapshots/test_light.ambr deleted file mode 100644 index b37da39fe27..00000000000 --- a/tests/components/axis/snapshots/test_light.ambr +++ /dev/null @@ -1,57 +0,0 @@ -# serializer version: 1 -# name: test_lights[api_discovery_items0][light.home_ir_light_0-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.home_ir_light_0', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IR Light 0', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:Light/Status-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[api_discovery_items0][light.home_ir_light_0-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 170, - 'color_mode': , - 'friendly_name': 'home IR Light 0', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.home_ir_light_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/axis/snapshots/test_switch.ambr b/tests/components/axis/snapshots/test_switch.ambr deleted file mode 100644 index dc4c75371cf..00000000000 --- a/tests/components/axis/snapshots/test_switch.ambr +++ /dev/null @@ -1,189 +0,0 @@ -# serializer version: 1 -# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_doorbell-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.home_doorbell', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Doorbell', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_doorbell-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'home Doorbell', - }), - 'context': , - 'entity_id': 'switch.home_doorbell', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_relay_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.home_relay_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Relay 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_relay_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'home Relay 1', - }), - 'context': , - 'entity_id': 'switch.home_relay_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_doorbell-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.home_doorbell', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Doorbell', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_doorbell-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'home Doorbell', - }), - 'context': , - 'entity_id': 'switch.home_doorbell', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_relay_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.home_relay_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Relay 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_relay_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'home Relay 1', - }), - 'context': , - 'entity_id': 'switch.home_relay_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/axis/test_binary_sensor.py b/tests/components/axis/test_binary_sensor.py index 766a51463a4..99a530724e3 100644 --- a/tests/components/axis/test_binary_sensor.py +++ b/tests/components/axis/test_binary_sensor.py @@ -1,22 +1,22 @@ """Axis binary sensor platform tests.""" -from unittest.mock import patch +from collections.abc import Callable +from typing import Any import pytest -from syrupy import SnapshotAssertion -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.const import Platform +from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, + BinarySensorDeviceClass, +) +from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .conftest import ConfigEntryFactoryType, RtspEventMock - -from tests.common import snapshot_platform +from .const import NAME @pytest.mark.parametrize( - "event", + ("event", "entity"), [ ( { @@ -25,7 +25,13 @@ from tests.common import snapshot_platform "source_idx": "1", "data_type": "DayNight", "data_value": "1", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_daynight_1", + "state": STATE_ON, + "name": f"{NAME} DayNight 1", + "device_class": BinarySensorDeviceClass.LIGHT, + }, ), ( { @@ -34,7 +40,13 @@ from tests.common import snapshot_platform "source_idx": "1", "data_type": "Sound", "data_value": "0", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_sound_1", + "state": STATE_OFF, + "name": f"{NAME} Sound 1", + "device_class": BinarySensorDeviceClass.SOUND, + }, ), ( { @@ -44,7 +56,13 @@ from tests.common import snapshot_platform "operation": "Initialized", "source_name": "port", "source_idx": "0", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_pir_sensor", + "state": STATE_OFF, + "name": f"{NAME} PIR sensor", + "device_class": BinarySensorDeviceClass.CONNECTIVITY, + }, ), ( { @@ -53,42 +71,78 @@ from tests.common import snapshot_platform "data_value": "0", "source_name": "sensor", "source_idx": "0", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_pir_0", + "state": STATE_OFF, + "name": f"{NAME} PIR 0", + "device_class": BinarySensorDeviceClass.MOTION, + }, ), ( { "topic": "tnsaxis:CameraApplicationPlatform/FenceGuard/Camera1Profile1", "data_type": "active", "data_value": "1", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_fence_guard_profile_1", + "state": STATE_ON, + "name": f"{NAME} Fence Guard Profile 1", + "device_class": BinarySensorDeviceClass.MOTION, + }, ), ( { "topic": "tnsaxis:CameraApplicationPlatform/MotionGuard/Camera1Profile1", "data_type": "active", "data_value": "1", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_motion_guard_profile_1", + "state": STATE_ON, + "name": f"{NAME} Motion Guard Profile 1", + "device_class": BinarySensorDeviceClass.MOTION, + }, ), ( { "topic": "tnsaxis:CameraApplicationPlatform/LoiteringGuard/Camera1Profile1", "data_type": "active", "data_value": "1", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_loitering_guard_profile_1", + "state": STATE_ON, + "name": f"{NAME} Loitering Guard Profile 1", + "device_class": BinarySensorDeviceClass.MOTION, + }, ), ( { "topic": "tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile1", "data_type": "active", "data_value": "1", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_vmd4_profile_1", + "state": STATE_ON, + "name": f"{NAME} VMD4 Profile 1", + "device_class": BinarySensorDeviceClass.MOTION, + }, ), ( { "topic": "tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario1", "data_type": "active", "data_value": "1", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_object_analytics_scenario_1", + "state": STATE_ON, + "name": f"{NAME} Object Analytics Scenario 1", + "device_class": BinarySensorDeviceClass.MOTION, + }, ), # Events with names generated from event ID and topic ( @@ -96,34 +150,50 @@ from tests.common import snapshot_platform "topic": "tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile9", "data_type": "active", "data_value": "1", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_vmd4_camera1profile9", + "state": STATE_ON, + "name": f"{NAME} VMD4 Camera1Profile9", + "device_class": BinarySensorDeviceClass.MOTION, + }, ), ( { "topic": "tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario8", "data_type": "active", "data_value": "1", - } + }, + { + "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_object_analytics_device1scenario8", + "state": STATE_ON, + "name": f"{NAME} Object Analytics Device1Scenario8", + "device_class": BinarySensorDeviceClass.MOTION, + }, ), ], ) +@pytest.mark.usefixtures("setup_config_entry") async def test_binary_sensors( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - config_entry_factory: ConfigEntryFactoryType, - mock_rtsp_event: RtspEventMock, + mock_rtsp_event: Callable[[str, str, str, str, str, str], None], event: dict[str, str], + entity: dict[str, Any], ) -> None: """Test that sensors are loaded properly.""" - with patch("homeassistant.components.axis.PLATFORMS", [Platform.BINARY_SENSOR]): - config_entry = await config_entry_factory() mock_rtsp_event(**event) - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN)) == 1 + + state = hass.states.get(entity["id"]) + assert state.state == entity["state"] + assert state.name == entity["name"] + assert state.attributes["device_class"] == entity["device_class"] @pytest.mark.parametrize( - "event", + ("event"), [ # Event with unsupported topic { @@ -155,12 +225,13 @@ async def test_binary_sensors( }, ], ) -@pytest.mark.usefixtures("config_entry_setup") +@pytest.mark.usefixtures("setup_config_entry") async def test_unsupported_events( hass: HomeAssistant, - mock_rtsp_event: RtspEventMock, + mock_rtsp_event: Callable[[str, str, str, str, str, str], None], event: dict[str, str], ) -> None: """Validate nothing breaks with unsupported events.""" mock_rtsp_event(**event) + await hass.async_block_till_done() assert len(hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN)) == 0 diff --git a/tests/components/axis/test_camera.py b/tests/components/axis/test_camera.py index 6cc4bbd7c2f..7d26cc7a3bc 100644 --- a/tests/components/axis/test_camera.py +++ b/tests/components/axis/test_camera.py @@ -1,31 +1,77 @@ """Axis camera platform tests.""" -from unittest.mock import patch +from collections.abc import Callable import pytest -from syrupy import SnapshotAssertion from homeassistant.components import camera -from homeassistant.components.axis.const import CONF_STREAM_PROFILE +from homeassistant.components.axis.const import ( + CONF_STREAM_PROFILE, + DOMAIN as AXIS_DOMAIN, +) from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN -from homeassistant.const import Platform +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_IDLE from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component -from .conftest import ConfigEntryFactoryType from .const import MAC, NAME -from tests.common import snapshot_platform + +async def test_platform_manually_configured(hass: HomeAssistant) -> None: + """Test that nothing happens when platform is manually configured.""" + assert ( + await async_setup_component( + hass, CAMERA_DOMAIN, {CAMERA_DOMAIN: {"platform": AXIS_DOMAIN}} + ) + is True + ) + + assert AXIS_DOMAIN not in hass.data -@pytest.fixture(autouse=True) -def mock_getrandbits(): - """Mock camera access token which normally is randomized.""" - with patch( - "homeassistant.components.camera.SystemRandom.getrandbits", - return_value=1, - ): - yield +@pytest.mark.usefixtures("setup_config_entry") +async def test_camera(hass: HomeAssistant) -> None: + """Test that Axis camera platform is loaded properly.""" + assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 1 + + entity_id = f"{CAMERA_DOMAIN}.{NAME}" + + cam = hass.states.get(entity_id) + assert cam.state == STATE_IDLE + assert cam.name == NAME + + camera_entity = camera._get_camera_from_entity_id(hass, entity_id) + assert camera_entity.image_source == "http://1.2.3.4:80/axis-cgi/jpg/image.cgi" + assert camera_entity.mjpeg_source == "http://1.2.3.4:80/axis-cgi/mjpg/video.cgi" + assert ( + await camera_entity.stream_source() + == "rtsp://root:pass@1.2.3.4/axis-media/media.amp?videocodec=h264" + ) + + +@pytest.mark.parametrize("config_entry_options", [{CONF_STREAM_PROFILE: "profile_1"}]) +@pytest.mark.usefixtures("setup_config_entry") +async def test_camera_with_stream_profile(hass: HomeAssistant) -> None: + """Test that Axis camera entity is using the correct path with stream profike.""" + assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 1 + + entity_id = f"{CAMERA_DOMAIN}.{NAME}" + + cam = hass.states.get(entity_id) + assert cam.state == STATE_IDLE + assert cam.name == NAME + + camera_entity = camera._get_camera_from_entity_id(hass, entity_id) + assert camera_entity.image_source == "http://1.2.3.4:80/axis-cgi/jpg/image.cgi" + assert ( + camera_entity.mjpeg_source + == "http://1.2.3.4:80/axis-cgi/mjpg/video.cgi?streamprofile=profile_1" + ) + assert ( + await camera_entity.stream_source() + == "rtsp://root:pass@1.2.3.4/axis-media/media.amp?videocodec=h264&streamprofile=profile_1" + ) PROPERTY_DATA = f"""root.Properties.API.HTTP.Version=3 @@ -36,44 +82,13 @@ root.Properties.Firmware.BuildDate=Feb 15 2019 09:42 root.Properties.Firmware.BuildNumber=26 root.Properties.Firmware.Version=9.10.1 root.Properties.System.SerialNumber={MAC} -""" # No image format data to signal camera support - - -@pytest.mark.parametrize( - ("config_entry_options", "stream_profile"), - [ - ({}, ""), - ({CONF_STREAM_PROFILE: "profile_1"}, "streamprofile=profile_1"), - ], -) -async def test_camera( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - snapshot: SnapshotAssertion, - stream_profile: str, -) -> None: - """Test that Axis camera platform is loaded properly.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CAMERA]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - entity_id = f"{CAMERA_DOMAIN}.{NAME}" - camera_entity = camera.helper.get_camera_from_entity_id(hass, entity_id) - assert camera_entity.image_source == "http://1.2.3.4:80/axis-cgi/jpg/image.cgi" - assert ( - camera_entity.mjpeg_source == "http://1.2.3.4:80/axis-cgi/mjpg/video.cgi" - f"{"" if not stream_profile else f"?{stream_profile}"}" - ) - assert ( - await camera_entity.stream_source() - == "rtsp://root:pass@1.2.3.4/axis-media/media.amp?videocodec=h264" - f"{"" if not stream_profile else f"&{stream_profile}"}" - ) +""" @pytest.mark.parametrize("param_properties_payload", [PROPERTY_DATA]) -@pytest.mark.usefixtures("config_entry_setup") -async def test_camera_disabled(hass: HomeAssistant) -> None: +async def test_camera_disabled( + hass: HomeAssistant, prepare_config_entry: Callable[[], ConfigEntry] +) -> None: """Test that Axis camera platform is loaded properly but does not create camera entity.""" + await prepare_config_entry() assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 0 diff --git a/tests/components/axis/test_config_flow.py b/tests/components/axis/test_config_flow.py index 52dd9c2f8ad..055c74cc9a5 100644 --- a/tests/components/axis/test_config_flow.py +++ b/tests/components/axis/test_config_flow.py @@ -2,7 +2,7 @@ from collections.abc import Callable from ipaddress import ip_address -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -17,9 +17,13 @@ from homeassistant.components.axis.const import ( ) from homeassistant.config_entries import ( SOURCE_DHCP, + SOURCE_IGNORE, + SOURCE_REAUTH, + SOURCE_RECONFIGURE, SOURCE_SSDP, SOURCE_USER, SOURCE_ZEROCONF, + ConfigEntry, ) from homeassistant.const import ( CONF_HOST, @@ -41,9 +45,21 @@ from tests.common import MockConfigEntry DHCP_FORMATTED_MAC = dr.format_mac(MAC).replace(":", "") -@pytest.mark.usefixtures("mock_default_requests") +@pytest.fixture(name="mock_config_entry") +async def mock_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_setup_entry: AsyncMock +) -> MockConfigEntry: + """Mock config entry and setup entry.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + return config_entry + + +@pytest.mark.usefixtures("setup_default_vapix_requests", "mock_setup_entry") async def test_flow_manual_configuration(hass: HomeAssistant) -> None: """Test that config flow works.""" + MockConfigEntry(domain=AXIS_DOMAIN, source=SOURCE_IGNORE).add_to_hass(hass) + result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_USER} ) @@ -75,13 +91,13 @@ async def test_flow_manual_configuration(hass: HomeAssistant) -> None: } -async def test_manual_configuration_duplicate_fails( +async def test_manual_configuration_update_configuration( hass: HomeAssistant, - config_entry_setup: MockConfigEntry, - mock_requests: Callable[[str], None], + mock_config_entry: MockConfigEntry, + mock_vapix_requests: Callable[[str], None], ) -> None: """Test that config flow fails on already configured device.""" - assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" + assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_USER} @@ -90,7 +106,7 @@ async def test_manual_configuration_duplicate_fails( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_requests("2.3.4.5") + mock_vapix_requests("2.3.4.5") result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -105,19 +121,10 @@ async def test_manual_configuration_duplicate_fails( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" + assert mock_config_entry.data[CONF_HOST] == "2.3.4.5" -@pytest.mark.parametrize( - ("exc", "error"), - [ - (config_flow.AuthenticationRequired, "invalid_auth"), - (config_flow.CannotConnect, "cannot_connect"), - ], -) -async def test_flow_fails_on_api( - hass: HomeAssistant, exc: Exception, error: str -) -> None: +async def test_flow_fails_faulty_credentials(hass: HomeAssistant) -> None: """Test that config flow fails on faulty credentials.""" result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_USER} @@ -128,7 +135,7 @@ async def test_flow_fails_on_api( with patch( "homeassistant.components.axis.config_flow.get_axis_api", - side_effect=exc, + side_effect=config_flow.AuthenticationRequired, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -141,10 +148,37 @@ async def test_flow_fails_on_api( }, ) - assert result["errors"] == {"base": error} + assert result["errors"] == {"base": "invalid_auth"} -@pytest.mark.usefixtures("mock_default_requests") +async def test_flow_fails_cannot_connect(hass: HomeAssistant) -> None: + """Test that config flow fails on cannot connect.""" + result = await hass.config_entries.flow.async_init( + AXIS_DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + with patch( + "homeassistant.components.axis.config_flow.get_axis_api", + side_effect=config_flow.CannotConnect, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PROTOCOL: "http", + CONF_HOST: "1.2.3.4", + CONF_USERNAME: "user", + CONF_PASSWORD: "pass", + CONF_PORT: 80, + }, + ) + + assert result["errors"] == {"base": "cannot_connect"} + + +@pytest.mark.usefixtures("setup_default_vapix_requests", "mock_setup_entry") async def test_flow_create_entry_multiple_existing_entries_of_same_model( hass: HomeAssistant, ) -> None: @@ -195,19 +229,24 @@ async def test_flow_create_entry_multiple_existing_entries_of_same_model( async def test_reauth_flow_update_configuration( hass: HomeAssistant, - config_entry_setup: MockConfigEntry, - mock_requests: Callable[[str], None], + mock_config_entry: MockConfigEntry, + mock_vapix_requests: Callable[[str], None], ) -> None: """Test that config flow fails on already configured device.""" - assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" - assert config_entry_setup.data[CONF_USERNAME] == "root" - assert config_entry_setup.data[CONF_PASSWORD] == "pass" + assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" + assert mock_config_entry.data[CONF_USERNAME] == "root" + assert mock_config_entry.data[CONF_PASSWORD] == "pass" + + result = await hass.config_entries.flow.async_init( + AXIS_DOMAIN, + context={"source": SOURCE_REAUTH}, + data=mock_config_entry.data, + ) - result = await config_entry_setup.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_requests("2.3.4.5") + mock_vapix_requests("2.3.4.5") result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -221,30 +260,36 @@ async def test_reauth_flow_update_configuration( await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert config_entry_setup.data[CONF_PROTOCOL] == "https" - assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" - assert config_entry_setup.data[CONF_PORT] == 443 - assert config_entry_setup.data[CONF_USERNAME] == "user2" - assert config_entry_setup.data[CONF_PASSWORD] == "pass2" + assert result["reason"] == "already_configured" + assert mock_config_entry.data[CONF_PROTOCOL] == "https" + assert mock_config_entry.data[CONF_HOST] == "2.3.4.5" + assert mock_config_entry.data[CONF_PORT] == 443 + assert mock_config_entry.data[CONF_USERNAME] == "user2" + assert mock_config_entry.data[CONF_PASSWORD] == "pass2" async def test_reconfiguration_flow_update_configuration( hass: HomeAssistant, - config_entry_setup: MockConfigEntry, - mock_requests: Callable[[str], None], + mock_config_entry: MockConfigEntry, + mock_vapix_requests: Callable[[str], None], ) -> None: """Test that config flow reconfiguration updates configured device.""" - assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" - assert config_entry_setup.data[CONF_USERNAME] == "root" - assert config_entry_setup.data[CONF_PASSWORD] == "pass" + assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" + assert mock_config_entry.data[CONF_USERNAME] == "root" + assert mock_config_entry.data[CONF_PASSWORD] == "pass" - result = await config_entry_setup.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + AXIS_DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": mock_config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_requests("2.3.4.5") + mock_vapix_requests("2.3.4.5") result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -255,12 +300,12 @@ async def test_reconfiguration_flow_update_configuration( await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert config_entry_setup.data[CONF_PROTOCOL] == "http" - assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" - assert config_entry_setup.data[CONF_PORT] == 80 - assert config_entry_setup.data[CONF_USERNAME] == "user" - assert config_entry_setup.data[CONF_PASSWORD] == "pass" + assert result["reason"] == "already_configured" + assert mock_config_entry.data[CONF_PROTOCOL] == "http" + assert mock_config_entry.data[CONF_HOST] == "2.3.4.5" + assert mock_config_entry.data[CONF_PORT] == 80 + assert mock_config_entry.data[CONF_USERNAME] == "user" + assert mock_config_entry.data[CONF_PASSWORD] == "pass" @pytest.mark.parametrize( @@ -327,7 +372,7 @@ async def test_reconfiguration_flow_update_configuration( ), ], ) -@pytest.mark.usefixtures("mock_default_requests") +@pytest.mark.usefixtures("setup_default_vapix_requests", "mock_setup_entry") async def test_discovery_flow( hass: HomeAssistant, source: str, @@ -410,12 +455,12 @@ async def test_discovery_flow( ) async def test_discovered_device_already_configured( hass: HomeAssistant, - config_entry_setup: MockConfigEntry, + mock_config_entry: MockConfigEntry, source: str, discovery_info: BaseServiceInfo, ) -> None: """Test that discovery doesn't setup already configured devices.""" - assert config_entry_setup.data[CONF_HOST] == DEFAULT_HOST + assert mock_config_entry.data[CONF_HOST] == DEFAULT_HOST result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, data=discovery_info, context={"source": source} @@ -423,7 +468,7 @@ async def test_discovered_device_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data[CONF_HOST] == DEFAULT_HOST + assert mock_config_entry.data[CONF_HOST] == DEFAULT_HOST @pytest.mark.parametrize( @@ -468,14 +513,14 @@ async def test_discovered_device_already_configured( ) async def test_discovery_flow_updated_configuration( hass: HomeAssistant, - config_entry_setup: MockConfigEntry, - mock_requests: Callable[[str], None], + mock_config_entry: MockConfigEntry, + mock_vapix_requests: Callable[[str], None], source: str, discovery_info: BaseServiceInfo, expected_port: int, ) -> None: """Test that discovery flow update configuration with new parameters.""" - assert config_entry_setup.data == { + assert mock_config_entry.data == { CONF_HOST: DEFAULT_HOST, CONF_PORT: 80, CONF_USERNAME: "root", @@ -484,7 +529,7 @@ async def test_discovery_flow_updated_configuration( CONF_NAME: NAME, } - mock_requests("2.3.4.5") + mock_vapix_requests("2.3.4.5") result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, data=discovery_info, context={"source": source} ) @@ -492,7 +537,7 @@ async def test_discovery_flow_updated_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data == { + assert mock_config_entry.data == { CONF_HOST: "2.3.4.5", CONF_PORT: expected_port, CONF_USERNAME: "root", @@ -601,13 +646,13 @@ async def test_discovery_flow_ignore_link_local_address( async def test_option_flow( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, setup_config_entry: ConfigEntry ) -> None: """Test config flow options.""" - assert CONF_STREAM_PROFILE not in config_entry_setup.options - assert CONF_VIDEO_SOURCE not in config_entry_setup.options + assert CONF_STREAM_PROFILE not in setup_config_entry.options + assert CONF_VIDEO_SOURCE not in setup_config_entry.options - result = await hass.config_entries.options.async_init(config_entry_setup.entry_id) + result = await hass.config_entries.options.async_init(setup_config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_stream" @@ -631,5 +676,5 @@ async def test_option_flow( CONF_STREAM_PROFILE: "profile_1", CONF_VIDEO_SOURCE: 1, } - assert config_entry_setup.options[CONF_STREAM_PROFILE] == "profile_1" - assert config_entry_setup.options[CONF_VIDEO_SOURCE] == 1 + assert setup_config_entry.options[CONF_STREAM_PROFILE] == "profile_1" + assert setup_config_entry.options[CONF_VIDEO_SOURCE] == 1 diff --git a/tests/components/axis/test_diagnostics.py b/tests/components/axis/test_diagnostics.py index e96ba88c2cd..c3e1faf4277 100644 --- a/tests/components/axis/test_diagnostics.py +++ b/tests/components/axis/test_diagnostics.py @@ -2,13 +2,12 @@ import pytest from syrupy import SnapshotAssertion -from syrupy.filters import props +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from .const import API_DISCOVERY_BASIC_DEVICE_INFO -from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -17,10 +16,11 @@ from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry_setup: MockConfigEntry, + setup_config_entry: ConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry_setup - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, setup_config_entry) + == snapshot + ) diff --git a/tests/components/axis/test_hub.py b/tests/components/axis/test_hub.py index 74cdb0164cd..fb0a28bb262 100644 --- a/tests/components/axis/test_hub.py +++ b/tests/components/axis/test_hub.py @@ -5,21 +5,27 @@ from ipaddress import ip_address from types import MappingProxyType from typing import Any from unittest import mock -from unittest.mock import ANY, Mock, call, patch +from unittest.mock import ANY, AsyncMock, Mock, call, patch import axis as axislib import pytest -from syrupy import SnapshotAssertion +from typing_extensions import Generator from homeassistant.components import axis, zeroconf from homeassistant.components.axis.const import DOMAIN as AXIS_DOMAIN from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigEntryState -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE +from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigEntry +from homeassistant.const import ( + CONF_HOST, + CONF_MODEL, + CONF_NAME, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .conftest import RtspEventMock, RtspStateType from .const import ( API_DISCOVERY_BASIC_DEVICE_INFO, API_DISCOVERY_MQTT, @@ -28,27 +34,62 @@ from .const import ( NAME, ) -from tests.common import MockConfigEntry, async_fire_mqtt_message +from tests.common import async_fire_mqtt_message from tests.typing import MqttMockHAClient -@pytest.mark.parametrize( - "api_discovery_items", [({}), (API_DISCOVERY_BASIC_DEVICE_INFO)] -) -async def test_device_registry_entry( - config_entry_setup: MockConfigEntry, +@pytest.fixture(name="forward_entry_setups") +def hass_mock_forward_entry_setup(hass: HomeAssistant) -> Generator[AsyncMock]: + """Mock async_forward_entry_setups.""" + with patch.object( + hass.config_entries, "async_forward_entry_setups" + ) as forward_mock: + yield forward_mock + + +async def test_device_setup( + forward_entry_setups: AsyncMock, + config_entry_data: MappingProxyType[str, Any], + setup_config_entry: ConfigEntry, device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, ) -> None: """Successful setup.""" + hub = setup_config_entry.runtime_data + + assert hub.api.vapix.firmware_version == "9.10.1" + assert hub.api.vapix.product_number == "M1065-LW" + assert hub.api.vapix.product_type == "Network Camera" + assert hub.api.vapix.serial_number == "00408C123456" + + assert len(forward_entry_setups.mock_calls) == 1 + platforms = set(forward_entry_setups.mock_calls[0][1][1]) + assert platforms == {"binary_sensor", "camera", "light", "switch"} + + assert hub.config.host == config_entry_data[CONF_HOST] + assert hub.config.model == config_entry_data[CONF_MODEL] + assert hub.config.name == config_entry_data[CONF_NAME] + assert hub.unique_id == FORMATTED_MAC + device_entry = device_registry.async_get_device( - identifiers={(AXIS_DOMAIN, config_entry_setup.unique_id)} + identifiers={(AXIS_DOMAIN, hub.unique_id)} ) - assert device_entry == snapshot + + assert device_entry.configuration_url == hub.api.config.url + + +@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_BASIC_DEVICE_INFO]) +async def test_device_info(setup_config_entry: ConfigEntry) -> None: + """Verify other path of device information works.""" + hub = setup_config_entry.runtime_data + + assert hub.api.vapix.firmware_version == "9.80.1" + assert hub.api.vapix.product_number == "M1065-LW" + assert hub.api.vapix.product_type == "Network Camera" + assert hub.api.vapix.serial_number == "00408C123456" @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_MQTT]) -@pytest.mark.usefixtures("config_entry_setup") +@pytest.mark.usefixtures("setup_config_entry") async def test_device_support_mqtt( hass: HomeAssistant, mqtt_mock: MqttMockHAClient ) -> None: @@ -74,7 +115,7 @@ async def test_device_support_mqtt( @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_MQTT]) @pytest.mark.parametrize("mqtt_status_code", [401]) -@pytest.mark.usefixtures("config_entry_setup") +@pytest.mark.usefixtures("setup_config_entry") async def test_device_support_mqtt_low_privilege(mqtt_mock: MqttMockHAClient) -> None: """Successful setup.""" mqtt_call = call(f"{MAC}/#", mock.ANY, 0, "utf-8") @@ -83,14 +124,14 @@ async def test_device_support_mqtt_low_privilege(mqtt_mock: MqttMockHAClient) -> async def test_update_address( hass: HomeAssistant, - config_entry_setup: MockConfigEntry, - mock_requests: Callable[[str], None], + setup_config_entry: ConfigEntry, + mock_vapix_requests: Callable[[str], None], ) -> None: """Test update address works.""" - hub = config_entry_setup.runtime_data + hub = setup_config_entry.runtime_data assert hub.api.config.host == "1.2.3.4" - mock_requests("2.3.4.5") + mock_vapix_requests("2.3.4.5") await hass.config_entries.flow.async_init( AXIS_DOMAIN, data=zeroconf.ZeroconfServiceInfo( @@ -109,11 +150,11 @@ async def test_update_address( assert hub.api.config.host == "2.3.4.5" -@pytest.mark.usefixtures("config_entry_setup") +@pytest.mark.usefixtures("setup_config_entry") async def test_device_unavailable( hass: HomeAssistant, - mock_rtsp_event: RtspEventMock, - mock_rtsp_signal_state: RtspStateType, + mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + mock_rtsp_signal_state: Callable[[bool], None], ) -> None: """Successful setup.""" # Provide an entity that can be used to verify connection state on @@ -146,12 +187,22 @@ async def test_device_unavailable( assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.{NAME}_sound_1").state == STATE_OFF -@pytest.mark.usefixtures("mock_default_requests") +@pytest.mark.usefixtures("setup_default_vapix_requests") +async def test_device_not_accessible( + hass: HomeAssistant, config_entry: ConfigEntry +) -> None: + """Failed setup schedules a retry of setup.""" + with patch.object(axis, "get_axis_api", side_effect=axis.errors.CannotConnect): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert hass.data[AXIS_DOMAIN] == {} + + +@pytest.mark.usefixtures("setup_default_vapix_requests") async def test_device_trigger_reauth_flow( - hass: HomeAssistant, config_entry: MockConfigEntry + hass: HomeAssistant, config_entry: ConfigEntry ) -> None: """Failed authentication trigger a reauthentication flow.""" - config_entry.add_to_hass(hass) with ( patch.object( axis, "get_axis_api", side_effect=axis.errors.AuthenticationRequired @@ -161,7 +212,18 @@ async def test_device_trigger_reauth_flow( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() mock_flow_init.assert_called_once() - assert config_entry.state == ConfigEntryState.SETUP_ERROR + assert hass.data[AXIS_DOMAIN] == {} + + +@pytest.mark.usefixtures("setup_default_vapix_requests") +async def test_device_unknown_error( + hass: HomeAssistant, config_entry: ConfigEntry +) -> None: + """Unknown errors are handled.""" + with patch.object(axis, "get_axis_api", side_effect=Exception): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert hass.data[AXIS_DOMAIN] == {} async def test_shutdown(config_entry_data: MappingProxyType[str, Any]) -> None: @@ -179,31 +241,36 @@ async def test_shutdown(config_entry_data: MappingProxyType[str, Any]) -> None: assert len(axis_device.api.stream.stop.mock_calls) == 1 -@pytest.mark.parametrize( - ("side_effect", "state"), - [ - # Device unauthorized yields authentication required error - (axislib.Unauthorized, ConfigEntryState.SETUP_ERROR), - # Device unavailable yields cannot connect error - (TimeoutError, ConfigEntryState.SETUP_RETRY), - (axislib.RequestError, ConfigEntryState.SETUP_RETRY), - # Device yield unknown error - (axislib.AxisException, ConfigEntryState.SETUP_ERROR), - ], -) -@pytest.mark.usefixtures("mock_default_requests") -async def test_get_axis_api_errors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - side_effect: Exception, - state: ConfigEntryState, +async def test_get_device_fails( + hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any] ) -> None: - """Failed setup schedules a retry of setup.""" - config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.axis.hub.api.axis.interfaces.vapix.Vapix.initialize", - side_effect=side_effect, + """Device unauthorized yields authentication required error.""" + with ( + patch( + "axis.interfaces.vapix.Vapix.initialize", side_effect=axislib.Unauthorized + ), + pytest.raises(axis.errors.AuthenticationRequired), ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state == state + await axis.hub.get_axis_api(hass, config_entry_data) + + +async def test_get_device_device_unavailable( + hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any] +) -> None: + """Device unavailable yields cannot connect error.""" + with ( + patch("axis.interfaces.vapix.Vapix.request", side_effect=axislib.RequestError), + pytest.raises(axis.errors.CannotConnect), + ): + await axis.hub.get_axis_api(hass, config_entry_data) + + +async def test_get_device_unknown_error( + hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any] +) -> None: + """Device yield unknown error.""" + with ( + patch("axis.interfaces.vapix.Vapix.request", side_effect=axislib.AxisException), + pytest.raises(axis.errors.AuthenticationRequired), + ): + await axis.hub.get_axis_api(hass, config_entry_data) diff --git a/tests/components/axis/test_init.py b/tests/components/axis/test_init.py index 89737325440..e4dc7cd1eef 100644 --- a/tests/components/axis/test_init.py +++ b/tests/components/axis/test_init.py @@ -5,23 +5,19 @@ from unittest.mock import AsyncMock, Mock, patch import pytest from homeassistant.components import axis -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry - -async def test_setup_entry(config_entry_setup: MockConfigEntry) -> None: +async def test_setup_entry(setup_config_entry: ConfigEntry) -> None: """Test successful setup of entry.""" - assert config_entry_setup.state is ConfigEntryState.LOADED + assert setup_config_entry.state is ConfigEntryState.LOADED async def test_setup_entry_fails( - hass: HomeAssistant, config_entry: MockConfigEntry + hass: HomeAssistant, config_entry: ConfigEntry ) -> None: """Test successful setup of entry.""" - config_entry.add_to_hass(hass) - mock_device = Mock() mock_device.async_setup = AsyncMock(return_value=False) @@ -34,21 +30,18 @@ async def test_setup_entry_fails( async def test_unload_entry( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, setup_config_entry: ConfigEntry ) -> None: """Test successful unload of entry.""" - assert config_entry_setup.state is ConfigEntryState.LOADED + assert setup_config_entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(config_entry_setup.entry_id) - assert config_entry_setup.state is ConfigEntryState.NOT_LOADED + assert await hass.config_entries.async_unload(setup_config_entry.entry_id) + assert setup_config_entry.state is ConfigEntryState.NOT_LOADED @pytest.mark.parametrize("config_entry_version", [1]) -async def test_migrate_entry( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> None: +async def test_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Test successful migration of entry data.""" - config_entry.add_to_hass(hass) assert config_entry.version == 1 mock_device = Mock() diff --git a/tests/components/axis/test_light.py b/tests/components/axis/test_light.py index c33af5ec3a4..a5ae66afee0 100644 --- a/tests/components/axis/test_light.py +++ b/tests/components/axis/test_light.py @@ -1,12 +1,12 @@ """Axis light platform tests.""" +from collections.abc import Callable from typing import Any from unittest.mock import patch from axis.models.api import CONTEXT import pytest import respx -from syrupy import SnapshotAssertion from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN from homeassistant.const import ( @@ -14,16 +14,12 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, - Platform, + STATE_ON, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .conftest import ConfigEntryFactoryType, RtspEventMock from .const import DEFAULT_HOST, NAME -from tests.common import snapshot_platform - API_DISCOVERY_LIGHT_CONTROL = { "id": "light-control", "version": "1.1", @@ -73,10 +69,10 @@ def light_control_fixture(light_control_items: list[dict[str, Any]]) -> None: @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_LIGHT_CONTROL]) @pytest.mark.parametrize("light_control_items", [[]]) -@pytest.mark.usefixtures("config_entry_setup") +@pytest.mark.usefixtures("setup_config_entry") async def test_no_light_entity_without_light_control_representation( hass: HomeAssistant, - mock_rtsp_event: RtspEventMock, + mock_rtsp_event: Callable[[str, str, str, str, str, str], None], ) -> None: """Verify no lights entities get created without light control representation.""" mock_rtsp_event( @@ -92,12 +88,10 @@ async def test_no_light_entity_without_light_control_representation( @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_LIGHT_CONTROL]) +@pytest.mark.usefixtures("setup_config_entry") async def test_lights( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_rtsp_event: RtspEventMock, - snapshot: SnapshotAssertion, + mock_rtsp_event: Callable[[str, str, str, str, str, str], None], ) -> None: """Test that lights are loaded properly.""" # Add light @@ -134,9 +128,6 @@ async def test_lights( }, ) - with patch("homeassistant.components.axis.PLATFORMS", [Platform.LIGHT]): - config_entry = await config_entry_factory() - mock_rtsp_event( topic="tns1:Device/tnsaxis:Light/Status", data_type="state", @@ -145,10 +136,15 @@ async def test_lights( source_idx="0", ) await hass.async_block_till_done() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + assert len(hass.states.async_entity_ids(LIGHT_DOMAIN)) == 1 entity_id = f"{LIGHT_DOMAIN}.{NAME}_ir_light_0" + light_0 = hass.states.get(entity_id) + assert light_0.state == STATE_ON + assert light_0.name == f"{NAME} IR Light 0" + # Turn on, set brightness, light already on with ( patch("axis.interfaces.vapix.LightHandler.activate_light") as mock_activate, diff --git a/tests/components/axis/test_switch.py b/tests/components/axis/test_switch.py index 964cfdae64c..479830783b1 100644 --- a/tests/components/axis/test_switch.py +++ b/tests/components/axis/test_switch.py @@ -1,27 +1,23 @@ """Axis switch platform tests.""" +from collections.abc import Callable from unittest.mock import patch from axis.models.api import CONTEXT import pytest -from syrupy import SnapshotAssertion from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, + STATE_OFF, STATE_ON, - Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .conftest import ConfigEntryFactoryType, RtspEventMock from .const import API_DISCOVERY_PORT_MANAGEMENT, NAME -from tests.common import snapshot_platform - PORT_DATA = """root.IOPort.I0.Configurable=yes root.IOPort.I0.Direction=output root.IOPort.I0.Output.Name=Doorbell @@ -32,6 +28,61 @@ root.IOPort.I1.Output.Name= root.IOPort.I1.Output.Active=open """ + +@pytest.mark.parametrize("param_ports_payload", [PORT_DATA]) +@pytest.mark.usefixtures("setup_config_entry") +async def test_switches_with_port_cgi( + hass: HomeAssistant, + mock_rtsp_event: Callable[[str, str, str, str, str, str], None], +) -> None: + """Test that switches are loaded properly using port.cgi.""" + mock_rtsp_event( + topic="tns1:Device/Trigger/Relay", + data_type="LogicalState", + data_value="inactive", + source_name="RelayToken", + source_idx="0", + ) + mock_rtsp_event( + topic="tns1:Device/Trigger/Relay", + data_type="LogicalState", + data_value="active", + source_name="RelayToken", + source_idx="1", + ) + await hass.async_block_till_done() + + assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 + + relay_1 = hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1") + assert relay_1.state == STATE_ON + assert relay_1.name == f"{NAME} Relay 1" + + entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" + + relay_0 = hass.states.get(entity_id) + assert relay_0.state == STATE_OFF + assert relay_0.name == f"{NAME} Doorbell" + + with patch("axis.interfaces.vapix.Ports.close") as mock_turn_on: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_turn_on.assert_called_once_with("0") + + with patch("axis.interfaces.vapix.Ports.open") as mock_turn_off: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_turn_off.assert_called_once_with("0") + + PORT_MANAGEMENT_RESPONSE = { "apiVersion": "1.0", "method": "getPorts", @@ -62,70 +113,14 @@ PORT_MANAGEMENT_RESPONSE = { } -@pytest.mark.parametrize("param_ports_payload", [PORT_DATA]) -async def test_switches_with_port_cgi( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_rtsp_event: RtspEventMock, - snapshot: SnapshotAssertion, -) -> None: - """Test that switches are loaded properly using port.cgi.""" - with patch("homeassistant.components.axis.PLATFORMS", [Platform.SWITCH]): - config_entry = await config_entry_factory() - - mock_rtsp_event( - topic="tns1:Device/Trigger/Relay", - data_type="LogicalState", - data_value="inactive", - source_name="RelayToken", - source_idx="0", - ) - mock_rtsp_event( - topic="tns1:Device/Trigger/Relay", - data_type="LogicalState", - data_value="active", - source_name="RelayToken", - source_idx="1", - ) - await hass.async_block_till_done() - - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" - - with patch("axis.interfaces.vapix.Ports.close") as mock_turn_on: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_turn_on.assert_called_once_with("0") - - with patch("axis.interfaces.vapix.Ports.open") as mock_turn_off: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_turn_off.assert_called_once_with("0") - - @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_PORT_MANAGEMENT]) @pytest.mark.parametrize("port_management_payload", [PORT_MANAGEMENT_RESPONSE]) +@pytest.mark.usefixtures("setup_config_entry") async def test_switches_with_port_management( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_rtsp_event: RtspEventMock, - snapshot: SnapshotAssertion, + mock_rtsp_event: Callable[[str, str, str, str, str, str], None], ) -> None: """Test that switches are loaded properly using port management.""" - with patch("homeassistant.components.axis.PLATFORMS", [Platform.SWITCH]): - config_entry = await config_entry_factory() - mock_rtsp_event( topic="tns1:Device/Trigger/Relay", data_type="LogicalState", @@ -142,10 +137,31 @@ async def test_switches_with_port_management( ) await hass.async_block_till_done() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 + + relay_1 = hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1") + assert relay_1.state == STATE_ON + assert relay_1.name == f"{NAME} Relay 1" entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" + relay_0 = hass.states.get(entity_id) + assert relay_0.state == STATE_OFF + assert relay_0.name == f"{NAME} Doorbell" + + # State update + + mock_rtsp_event( + topic="tns1:Device/Trigger/Relay", + data_type="LogicalState", + data_value="active", + source_name="RelayToken", + source_idx="0", + ) + await hass.async_block_till_done() + + assert hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1").state == STATE_ON + with patch("axis.interfaces.vapix.IoPortManagement.close") as mock_turn_on: await hass.services.async_call( SWITCH_DOMAIN, @@ -163,16 +179,3 @@ async def test_switches_with_port_management( blocking=True, ) mock_turn_off.assert_called_once_with("0") - - # State update - - mock_rtsp_event( - topic="tns1:Device/Trigger/Relay", - data_type="LogicalState", - data_value="active", - source_name="RelayToken", - source_idx="0", - ) - await hass.async_block_till_done() - - assert hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1").state == STATE_ON diff --git a/tests/components/azure_data_explorer/conftest.py b/tests/components/azure_data_explorer/conftest.py index f8915a12ce1..4168021b333 100644 --- a/tests/components/azure_data_explorer/conftest.py +++ b/tests/components/azure_data_explorer/conftest.py @@ -1,12 +1,12 @@ """Test fixtures for Azure Data Explorer.""" -from collections.abc import Generator from datetime import timedelta import logging from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.azure_data_explorer.const import ( CONF_FILTER, diff --git a/tests/components/azure_data_explorer/test_init.py b/tests/components/azure_data_explorer/test_init.py index 10633154efd..4d339728d09 100644 --- a/tests/components/azure_data_explorer/test_init.py +++ b/tests/components/azure_data_explorer/test_init.py @@ -9,10 +9,14 @@ from azure.kusto.ingest import StreamDescriptor import pytest from homeassistant.components import azure_data_explorer -from homeassistant.components.azure_data_explorer.const import CONF_SEND_INTERVAL +from homeassistant.components.azure_data_explorer.const import ( + CONF_SEND_INTERVAL, + DOMAIN, +) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow from . import FilterTest @@ -95,6 +99,27 @@ async def test_put_event_on_queue_with_queueing_client( assert type(mock_queued_ingest.call_args.args[0]) is StreamDescriptor +async def test_import(hass: HomeAssistant) -> None: + """Test the popping of the filter and further import of the config.""" + config = { + DOMAIN: { + "filter": { + "include_domains": ["light"], + "include_entity_globs": ["sensor.included_*"], + "include_entities": ["binary_sensor.included"], + "exclude_domains": ["light"], + "exclude_entity_globs": ["sensor.excluded_*"], + "exclude_entities": ["binary_sensor.excluded"], + }, + } + } + + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done() + + assert "filter" in hass.data[DOMAIN] + + async def test_unload_entry( hass: HomeAssistant, entry_managed: MockConfigEntry, @@ -214,6 +239,7 @@ async def test_filter( ) await hass.async_block_till_done() assert mock_managed_streaming.called == test.expect_called + assert "filter" in hass.data[DOMAIN] @pytest.mark.parametrize( diff --git a/tests/components/azure_devops/__init__.py b/tests/components/azure_devops/__init__.py index 6414fe0257c..d636a6fda6d 100644 --- a/tests/components/azure_devops/__init__.py +++ b/tests/components/azure_devops/__init__.py @@ -1,12 +1,9 @@ """Tests for the Azure DevOps integration.""" -from datetime import datetime from typing import Final -from aioazuredevops.models.build import Build, BuildDefinition +from aioazuredevops.models.builds import Build, BuildDefinition from aioazuredevops.models.core import Project -from aioazuredevops.models.work_item import WorkItem, WorkItemFields -from aioazuredevops.models.work_item_type import Category, Icon, State, WorkItemType from homeassistant.components.azure_devops.const import CONF_ORG, CONF_PAT, CONF_PROJECT from homeassistant.core import HomeAssistant @@ -80,55 +77,6 @@ DEVOPS_BUILD_MISSING_PROJECT_DEFINITION = Build( build_id=9876, ) -DEVOPS_WORK_ITEM_TYPES = [ - WorkItemType( - name="Bug", - reference_name="System.Bug", - description="Bug", - color="ff0000", - icon=Icon(id="1234", url="https://example.com/icon.png"), - is_disabled=False, - xml_form="", - fields=[], - field_instances=[], - transitions={}, - states=[ - State(name="New", color="ff0000", category=Category.PROPOSED), - State(name="Active", color="ff0000", category=Category.IN_PROGRESS), - State(name="Resolved", color="ff0000", category=Category.RESOLVED), - State(name="Closed", color="ff0000", category=Category.COMPLETED), - ], - url="", - ) -] - -DEVOPS_WORK_ITEM_IDS = [1] - -DEVOPS_WORK_ITEMS = [ - WorkItem( - id=1, - rev=1, - fields=WorkItemFields( - area_path="", - team_project="", - iteration_path="", - work_item_type="Bug", - state="New", - reason="New", - assigned_to=None, - created_date=datetime(2021, 1, 1), - created_by=None, - changed_date=datetime(2021, 1, 1), - changed_by=None, - comment_count=0, - title="Test", - microsoft_vsts_common_state_change_date=datetime(2021, 1, 1), - microsoft_vsts_common_priority=1, - ), - url="https://example.com", - ) -] - async def setup_integration( hass: HomeAssistant, diff --git a/tests/components/azure_devops/conftest.py b/tests/components/azure_devops/conftest.py index 54c730f9523..c65adaa4da5 100644 --- a/tests/components/azure_devops/conftest.py +++ b/tests/components/azure_devops/conftest.py @@ -7,16 +7,7 @@ import pytest from homeassistant.components.azure_devops.const import DOMAIN -from . import ( - DEVOPS_BUILD, - DEVOPS_PROJECT, - DEVOPS_WORK_ITEM_IDS, - DEVOPS_WORK_ITEM_TYPES, - DEVOPS_WORK_ITEMS, - FIXTURE_USER_INPUT, - PAT, - UNIQUE_ID, -) +from . import DEVOPS_BUILD, DEVOPS_PROJECT, FIXTURE_USER_INPUT, PAT, UNIQUE_ID from tests.common import MockConfigEntry @@ -42,9 +33,8 @@ async def mock_devops_client() -> AsyncGenerator[MagicMock]: devops_client.get_project.return_value = DEVOPS_PROJECT devops_client.get_builds.return_value = [DEVOPS_BUILD] devops_client.get_build.return_value = DEVOPS_BUILD - devops_client.get_work_item_types.return_value = DEVOPS_WORK_ITEM_TYPES - devops_client.get_work_item_ids.return_value = DEVOPS_WORK_ITEM_IDS - devops_client.get_work_items.return_value = DEVOPS_WORK_ITEMS + devops_client.get_work_item_ids.return_value = None + devops_client.get_work_items.return_value = None yield devops_client diff --git a/tests/components/azure_devops/snapshots/test_sensor.ambr b/tests/components/azure_devops/snapshots/test_sensor.ambr index aa8d1d9e7e0..0ce82cae1e8 100644 --- a/tests/components/azure_devops/snapshots/test_sensor.ambr +++ b/tests/components/azure_devops/snapshots/test_sensor.ambr @@ -1,4 +1,467 @@ # serializer version: 1 +# name: test_sensors[sensor.testproject_ci_build_finish_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_build_finish_time', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CI build finish time', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'finish_time', + 'unique_id': 'testorg_1234_9876_finish_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_finish_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'date', + 'friendly_name': 'testproject CI build finish time', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_finish_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T00:00:00+00:00', + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_id-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_build_id', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CI build id', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'build_id', + 'unique_id': 'testorg_1234_9876_build_id', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_id-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build id', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_id', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5678', + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_queue_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_build_queue_time', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CI build queue time', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'queue_time', + 'unique_id': 'testorg_1234_9876_queue_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_queue_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'date', + 'friendly_name': 'testproject CI build queue time', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_queue_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T00:00:00+00:00', + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_reason-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_build_reason', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CI build reason', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reason', + 'unique_id': 'testorg_1234_9876_reason', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_reason-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build reason', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_reason', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'manual', + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_result-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_build_result', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CI build result', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'result', + 'unique_id': 'testorg_1234_9876_result', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_result-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build result', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_result', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'succeeded', + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_source_branch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_build_source_branch', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CI build source branch', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'source_branch', + 'unique_id': 'testorg_1234_9876_source_branch', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_source_branch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build source branch', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_source_branch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'main', + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_source_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_build_source_version', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CI build source version', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'source_version', + 'unique_id': 'testorg_1234_9876_source_version', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_source_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build source version', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_source_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '123', + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_start_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_build_start_time', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CI build start time', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'start_time', + 'unique_id': 'testorg_1234_9876_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_start_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'date', + 'friendly_name': 'testproject CI build start time', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_start_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T00:00:00+00:00', + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_build_status', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CI build status', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': 'testorg_1234_9876_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build status', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'completed', + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_url-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_build_url', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CI build url', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'url', + 'unique_id': 'testorg_1234_9876_url', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_build_url-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build url', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_url', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_sensors[sensor.testproject_ci_latest_build-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -428,6 +891,52 @@ 'state': '2021-01-01T00:00:00+00:00', }) # --- +# name: test_sensors[sensor.testproject_ci_latest_build_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_ci_latest_build_status', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CI latest build status', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': 'testorg_1234_9876_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_ci_latest_build_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI latest build status', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_latest_build_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'completed', + }) +# --- # name: test_sensors[sensor.testproject_ci_latest_build_url-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -474,6 +983,243 @@ 'state': 'unknown', }) # --- +# name: test_sensors[sensor.testproject_test_build_build_id-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_test_build_build_id', + 'has_entity_name': True, + 'hidden_by': , + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Test Build build id', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'build_id', + 'unique_id': 'testorg_1234_9876_build_id', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_test_build_build_id-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject Test Build build id', + }), + 'context': , + 'entity_id': 'sensor.testproject_test_build_build_id', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5678', + }) +# --- +# name: test_sensors[sensor.testproject_test_build_latest_build-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.testproject_test_build_latest_build', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Test Build latest build', + 'platform': 'azure_devops', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'latest_build', + 'unique_id': 'testorg_1234_9876_latest_build', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.testproject_test_build_latest_build-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'definition_id': 9876, + 'definition_name': 'Test Build', + 'finish_time': '2021-01-01T00:00:00Z', + 'friendly_name': 'testproject Test Build latest build', + 'id': 5678, + 'queue_time': '2021-01-01T00:00:00Z', + 'reason': 'manual', + 'result': 'succeeded', + 'source_branch': 'main', + 'source_version': '123', + 'start_time': '2021-01-01T00:00:00Z', + 'status': 'completed', + 'url': None, + }), + 'context': , + 'entity_id': 'sensor.testproject_test_build_latest_build', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensors_missing_data[sensor.testproject_ci_build_finish_time-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'date', + 'friendly_name': 'testproject CI build finish time', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_finish_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors_missing_data[sensor.testproject_ci_build_id-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build id', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_id', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6789', + }) +# --- +# name: test_sensors_missing_data[sensor.testproject_ci_build_queue_time-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'date', + 'friendly_name': 'testproject CI build queue time', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_queue_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors_missing_data[sensor.testproject_ci_build_reason-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build reason', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_reason', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors_missing_data[sensor.testproject_ci_build_result-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build result', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_result', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors_missing_data[sensor.testproject_ci_build_source_branch-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build source branch', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_source_branch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors_missing_data[sensor.testproject_ci_build_source_version-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build source version', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_source_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors_missing_data[sensor.testproject_ci_build_start_time-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'date', + 'friendly_name': 'testproject CI build start time', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_start_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors_missing_data[sensor.testproject_ci_build_status-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build status', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors_missing_data[sensor.testproject_ci_build_url-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI build url', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_build_url', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_sensors_missing_data[sensor.testproject_ci_latest_build-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -606,6 +1352,19 @@ 'state': 'unknown', }) # --- +# name: test_sensors_missing_data[sensor.testproject_ci_latest_build_status-state-missing-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'testproject CI latest build status', + }), + 'context': , + 'entity_id': 'sensor.testproject_ci_latest_build_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_sensors_missing_data[sensor.testproject_ci_latest_build_url-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/azure_devops/test_config_flow.py b/tests/components/azure_devops/test_config_flow.py index 64c771a7adc..45dc10802b9 100644 --- a/tests/components/azure_devops/test_config_flow.py +++ b/tests/components/azure_devops/test_config_flow.py @@ -53,17 +53,20 @@ async def test_authorization_error( async def test_reauth_authorization_error( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, mock_devops_client: AsyncMock, ) -> None: """Test we show user form on Azure DevOps authorization error.""" - mock_config_entry.add_to_hass(hass) mock_devops_client.authorize.return_value = False mock_devops_client.authorized = False - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=FIXTURE_USER_INPUT, + ) + assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" + assert result["step_id"] == "reauth" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -72,7 +75,7 @@ async def test_reauth_authorization_error( await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth_confirm" + assert result2["step_id"] == "reauth" assert result2["errors"] == {"base": "invalid_auth"} @@ -105,18 +108,20 @@ async def test_connection_error( async def test_reauth_connection_error( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, mock_devops_client: AsyncMock, ) -> None: """Test we show user form on Azure DevOps connection error.""" - mock_config_entry.add_to_hass(hass) mock_devops_client.authorize.side_effect = aiohttp.ClientError mock_devops_client.authorized = False - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=FIXTURE_USER_INPUT, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" + assert result["step_id"] == "reauth" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -125,7 +130,7 @@ async def test_reauth_connection_error( await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth_confirm" + assert result2["step_id"] == "reauth" assert result2["errors"] == {"base": "cannot_connect"} @@ -169,10 +174,14 @@ async def test_reauth_project_error( mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=FIXTURE_USER_INPUT, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" + assert result["step_id"] == "reauth" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -181,7 +190,7 @@ async def test_reauth_project_error( await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth_confirm" + assert result2["step_id"] == "reauth" assert result2["errors"] == {"base": "project_error"} @@ -196,10 +205,15 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=FIXTURE_USER_INPUT, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" + assert result["step_id"] == "reauth" + assert result["errors"] == {"base": "invalid_auth"} mock_devops_client.authorize.return_value = True mock_devops_client.authorized = True diff --git a/tests/components/azure_devops/test_init.py b/tests/components/azure_devops/test_init.py index dd512cb12e0..a7655042f25 100644 --- a/tests/components/azure_devops/test_init.py +++ b/tests/components/azure_devops/test_init.py @@ -91,48 +91,3 @@ async def test_no_builds( assert mock_devops_client.get_builds.call_count == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_no_work_item_types( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_devops_client: MagicMock, -) -> None: - """Test a failed update entry.""" - mock_devops_client.get_work_item_types.return_value = None - - await setup_integration(hass, mock_config_entry) - - assert mock_devops_client.get_work_item_types.call_count == 1 - - assert mock_config_entry.state is ConfigEntryState.LOADED - - -async def test_no_work_item_ids( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_devops_client: MagicMock, -) -> None: - """Test a failed update entry.""" - mock_devops_client.get_work_item_ids.return_value = None - - await setup_integration(hass, mock_config_entry) - - assert mock_devops_client.get_work_item_ids.call_count == 1 - - assert mock_config_entry.state is ConfigEntryState.LOADED - - -async def test_no_work_items( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_devops_client: MagicMock, -) -> None: - """Test a failed update entry.""" - mock_devops_client.get_work_items.return_value = None - - await setup_integration(hass, mock_config_entry) - - assert mock_devops_client.get_work_items.call_count == 1 - - assert mock_config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/azure_event_hub/conftest.py b/tests/components/azure_event_hub/conftest.py index b814a845c86..a34f2e646f2 100644 --- a/tests/components/azure_event_hub/conftest.py +++ b/tests/components/azure_event_hub/conftest.py @@ -1,6 +1,5 @@ """Test fixtures for AEH.""" -from collections.abc import AsyncGenerator, Generator from dataclasses import dataclass from datetime import timedelta import logging @@ -9,6 +8,7 @@ from unittest.mock import AsyncMock, MagicMock, patch from azure.eventhub.aio import EventHubProducerClient import pytest +from typing_extensions import AsyncGenerator, Generator from homeassistant.components.azure_event_hub.const import ( CONF_FILTER, diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py deleted file mode 100644 index 631c774e63c..00000000000 --- a/tests/components/backup/conftest.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Test fixtures for the Backup integration.""" - -from __future__ import annotations - -from collections.abc import Generator -from pathlib import Path -from unittest.mock import MagicMock, Mock, patch - -import pytest - -from homeassistant.core import HomeAssistant - - -@pytest.fixture(name="mocked_json_bytes") -def mocked_json_bytes_fixture() -> Generator[Mock]: - """Mock json_bytes.""" - with patch( - "homeassistant.components.backup.manager.json_bytes", - return_value=b"{}", # Empty JSON - ) as mocked_json_bytes: - yield mocked_json_bytes - - -@pytest.fixture(name="mocked_tarfile") -def mocked_tarfile_fixture() -> Generator[Mock]: - """Mock tarfile.""" - with patch( - "homeassistant.components.backup.manager.SecureTarFile" - ) as mocked_tarfile: - yield mocked_tarfile - - -@pytest.fixture(name="mock_backup_generation") -def mock_backup_generation_fixture( - hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock -) -> Generator[None]: - """Mock backup generator.""" - - def _mock_iterdir(path: Path) -> list[Path]: - if not path.name.endswith("testing_config"): - return [] - return [ - Path("test.txt"), - Path(".DS_Store"), - Path(".storage"), - ] - - with ( - patch("pathlib.Path.iterdir", _mock_iterdir), - patch("pathlib.Path.stat", MagicMock(st_size=123)), - patch("pathlib.Path.is_file", lambda x: x.name != ".storage"), - patch( - "pathlib.Path.is_dir", - lambda x: x.name == ".storage", - ), - patch( - "pathlib.Path.exists", - lambda x: x != Path(hass.config.path("backups")), - ), - patch( - "pathlib.Path.is_symlink", - lambda _: False, - ), - patch( - "pathlib.Path.mkdir", - MagicMock(), - ), - patch( - "homeassistant.components.backup.manager.HAVERSION", - "2025.1.0", - ), - ): - yield diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 42eb524e529..a1d83f5cd75 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -147,54 +147,6 @@ 'type': 'result', }) # --- -# name: test_details[with_hassio-with_backup_content] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_details[with_hassio-without_backup_content] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_details[without_hassio-with_backup_content] - dict({ - 'id': 1, - 'result': dict({ - 'backup': dict({ - 'date': '1970-01-01T00:00:00.000Z', - 'name': 'Test', - 'path': 'abc123.tar', - 'size': 0.0, - 'slug': 'abc123', - }), - }), - 'success': True, - 'type': 'result', - }) -# --- -# name: test_details[without_hassio-without_backup_content] - dict({ - 'id': 1, - 'result': dict({ - 'backup': None, - }), - 'success': True, - 'type': 'result', - }) -# --- # name: test_generate[with_hassio] dict({ 'error': dict({ @@ -210,23 +162,16 @@ dict({ 'id': 1, 'result': dict({ - 'slug': '27f5c632', + 'date': '1970-01-01T00:00:00.000Z', + 'name': 'Test', + 'path': 'abc123.tar', + 'size': 0.0, + 'slug': 'abc123', }), 'success': True, 'type': 'result', }) # --- -# name: test_generate[without_hassio].1 - dict({ - 'event': dict({ - 'done': True, - 'stage': None, - 'success': True, - }), - 'id': 1, - 'type': 'event', - }) -# --- # name: test_info[with_hassio] dict({ 'error': dict({ @@ -276,22 +221,3 @@ 'type': 'result', }) # --- -# name: test_restore[with_hassio] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_restore[without_hassio] - dict({ - 'id': 1, - 'result': None, - 'success': True, - 'type': 'result', - }) -# --- diff --git a/tests/components/backup/test_http.py b/tests/components/backup/test_http.py index 76b1f76b55b..baf1798534a 100644 --- a/tests/components/backup/test_http.py +++ b/tests/components/backup/test_http.py @@ -1,11 +1,8 @@ """Tests for the Backup integration.""" -import asyncio -from io import StringIO from unittest.mock import patch from aiohttp import web -import pytest from homeassistant.core import HomeAssistant @@ -26,7 +23,7 @@ async def test_downloading_backup( with ( patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backup", + "homeassistant.components.backup.http.BackupManager.get_backup", return_value=TEST_BACKUP, ), patch("pathlib.Path.exists", return_value=True), @@ -52,12 +49,12 @@ async def test_downloading_backup_not_found( assert resp.status == 404 -async def test_downloading_as_non_admin( +async def test_non_admin( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_admin_user: MockUser, ) -> None: - """Test downloading a backup file when you are not an admin.""" + """Test downloading a backup file that does not exist.""" hass_admin_user.groups = [] await setup_backup_integration(hass) @@ -65,53 +62,3 @@ async def test_downloading_as_non_admin( resp = await client.get("/api/backup/download/abc123") assert resp.status == 401 - - -async def test_uploading_a_backup_file( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, -) -> None: - """Test uploading a backup file.""" - await setup_backup_integration(hass) - - client = await hass_client() - - with patch( - "homeassistant.components.backup.manager.BackupManager.async_receive_backup", - ) as async_receive_backup_mock: - resp = await client.post( - "/api/backup/upload", - data={"file": StringIO("test")}, - ) - assert resp.status == 201 - assert async_receive_backup_mock.called - - -@pytest.mark.parametrize( - ("error", "message"), - [ - (OSError("Boom!"), "Can't write backup file Boom!"), - (asyncio.CancelledError("Boom!"), ""), - ], -) -async def test_error_handling_uploading_a_backup_file( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - error: Exception, - message: str, -) -> None: - """Test error handling when uploading a backup file.""" - await setup_backup_integration(hass) - - client = await hass_client() - - with patch( - "homeassistant.components.backup.manager.BackupManager.async_receive_backup", - side_effect=error, - ): - resp = await client.post( - "/api/backup/upload", - data={"file": StringIO("test")}, - ) - assert resp.status == 500 - assert await resp.text() == message diff --git a/tests/components/backup/test_init.py b/tests/components/backup/test_init.py index e064939d618..9fdfa978f94 100644 --- a/tests/components/backup/test_init.py +++ b/tests/components/backup/test_init.py @@ -33,7 +33,7 @@ async def test_create_service( await setup_backup_integration(hass) with patch( - "homeassistant.components.backup.manager.BackupManager.async_create_backup", + "homeassistant.components.backup.websocket.BackupManager.generate_backup", ) as generate_backup: await hass.services.async_call( DOMAIN, diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 9d24964aedf..41749298819 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -2,18 +2,13 @@ from __future__ import annotations -import asyncio -from unittest.mock import AsyncMock, MagicMock, Mock, mock_open, patch +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, Mock, patch -import aiohttp -from multidict import CIMultiDict, CIMultiDictProxy import pytest from homeassistant.components.backup import BackupManager -from homeassistant.components.backup.manager import ( - BackupPlatformProtocol, - BackupProgress, -) +from homeassistant.components.backup.manager import BackupPlatformProtocol from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component @@ -23,30 +18,59 @@ from .common import TEST_BACKUP from tests.common import MockPlatform, mock_platform -async def _mock_backup_generation( - manager: BackupManager, mocked_json_bytes: Mock, mocked_tarfile: Mock -) -> None: +async def _mock_backup_generation(manager: BackupManager): """Mock backup generator.""" - progress: list[BackupProgress] = [] + def _mock_iterdir(path: Path) -> list[Path]: + if not path.name.endswith("testing_config"): + return [] + return [ + Path("test.txt"), + Path(".DS_Store"), + Path(".storage"), + ] - def on_progress(_progress: BackupProgress) -> None: - """Mock progress callback.""" - progress.append(_progress) + with ( + patch( + "homeassistant.components.backup.manager.SecureTarFile" + ) as mocked_tarfile, + patch("pathlib.Path.iterdir", _mock_iterdir), + patch("pathlib.Path.stat", MagicMock(st_size=123)), + patch("pathlib.Path.is_file", lambda x: x.name != ".storage"), + patch( + "pathlib.Path.is_dir", + lambda x: x.name == ".storage", + ), + patch( + "pathlib.Path.exists", + lambda x: x != manager.backup_dir, + ), + patch( + "pathlib.Path.is_symlink", + lambda _: False, + ), + patch( + "pathlib.Path.mkdir", + MagicMock(), + ), + patch( + "homeassistant.components.backup.manager.json_bytes", + return_value=b"{}", # Empty JSON + ) as mocked_json_bytes, + patch( + "homeassistant.components.backup.manager.HAVERSION", + "2025.1.0", + ), + ): + await manager.generate_backup() - assert manager.backup_task is None - await manager.async_create_backup(on_progress=on_progress) - assert manager.backup_task is not None - assert progress == [] - - await manager.backup_task - assert progress == [BackupProgress(done=True, stage=None, success=True)] - - assert mocked_json_bytes.call_count == 1 - backup_json_dict = mocked_json_bytes.call_args[0][0] - assert isinstance(backup_json_dict, dict) - assert backup_json_dict["homeassistant"] == {"version": "2025.1.0"} - assert manager.backup_dir.as_posix() in str(mocked_tarfile.call_args_list[0][0][0]) + assert mocked_json_bytes.call_count == 1 + backup_json_dict = mocked_json_bytes.call_args[0][0] + assert isinstance(backup_json_dict, dict) + assert backup_json_dict["homeassistant"] == {"version": "2025.1.0"} + assert manager.backup_dir.as_posix() in str( + mocked_tarfile.call_args_list[0][0][0] + ) async def _setup_mock_domain( @@ -84,7 +108,7 @@ async def test_load_backups(hass: HomeAssistant) -> None: ), ): await manager.load_backups() - backups = await manager.async_get_backups() + backups = await manager.get_backups() assert backups == {TEST_BACKUP.slug: TEST_BACKUP} @@ -99,7 +123,7 @@ async def test_load_backups_with_exception( patch("tarfile.open", side_effect=OSError("Test exception")), ): await manager.load_backups() - backups = await manager.async_get_backups() + backups = await manager.get_backups() assert f"Unable to read backup {TEST_BACKUP.path}: Test exception" in caplog.text assert backups == {} @@ -114,7 +138,7 @@ async def test_removing_backup( manager.loaded_backups = True with patch("pathlib.Path.exists", return_value=True): - await manager.async_remove_backup(slug=TEST_BACKUP.slug) + await manager.remove_backup(TEST_BACKUP.slug) assert "Removed backup located at" in caplog.text @@ -125,7 +149,7 @@ async def test_removing_non_existing_backup( """Test removing not existing backup.""" manager = BackupManager(hass) - await manager.async_remove_backup(slug="non_existing") + await manager.remove_backup("non_existing") assert "Removed backup located at" not in caplog.text @@ -139,7 +163,7 @@ async def test_getting_backup_that_does_not_exist( manager.loaded_backups = True with patch("pathlib.Path.exists", return_value=False): - backup = await manager.async_get_backup(slug=TEST_BACKUP.slug) + backup = await manager.get_backup(TEST_BACKUP.slug) assert backup is None assert ( @@ -148,28 +172,23 @@ async def test_getting_backup_that_does_not_exist( ) in caplog.text -async def test_async_create_backup_when_backing_up(hass: HomeAssistant) -> None: +async def test_generate_backup_when_backing_up(hass: HomeAssistant) -> None: """Test generate backup.""" - event = asyncio.Event() manager = BackupManager(hass) - manager.backup_task = hass.async_create_task(event.wait()) + manager.backing_up = True with pytest.raises(HomeAssistantError, match="Backup already in progress"): - await manager.async_create_backup(on_progress=None) - event.set() + await manager.generate_backup() -@pytest.mark.usefixtures("mock_backup_generation") -async def test_async_create_backup( +async def test_generate_backup( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - mocked_json_bytes: Mock, - mocked_tarfile: Mock, ) -> None: """Test generate backup.""" manager = BackupManager(hass) manager.loaded_backups = True - await _mock_backup_generation(manager, mocked_json_bytes, mocked_tarfile) + await _mock_backup_generation(manager) assert "Generated new backup with slug " in caplog.text assert "Creating backup directory" in caplog.text @@ -226,9 +245,7 @@ async def test_not_loading_bad_platforms( ) -async def test_exception_plaform_pre( - hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock -) -> None: +async def test_exception_plaform_pre(hass: HomeAssistant) -> None: """Test exception in pre step.""" manager = BackupManager(hass) manager.loaded_backups = True @@ -245,12 +262,10 @@ async def test_exception_plaform_pre( ) with pytest.raises(HomeAssistantError): - await _mock_backup_generation(manager, mocked_json_bytes, mocked_tarfile) + await _mock_backup_generation(manager) -async def test_exception_plaform_post( - hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock -) -> None: +async def test_exception_plaform_post(hass: HomeAssistant) -> None: """Test exception in post step.""" manager = BackupManager(hass) manager.loaded_backups = True @@ -267,10 +282,10 @@ async def test_exception_plaform_post( ) with pytest.raises(HomeAssistantError): - await _mock_backup_generation(manager, mocked_json_bytes, mocked_tarfile) + await _mock_backup_generation(manager) -async def test_loading_platforms_when_running_async_pre_backup_actions( +async def test_loading_platforms_when_running_pre_backup_actions( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: @@ -287,7 +302,7 @@ async def test_loading_platforms_when_running_async_pre_backup_actions( async_post_backup=AsyncMock(), ), ) - await manager.async_pre_backup_actions() + await manager.pre_backup_actions() assert manager.loaded_platforms assert len(manager.platforms) == 1 @@ -295,7 +310,7 @@ async def test_loading_platforms_when_running_async_pre_backup_actions( assert "Loaded 1 platforms" in caplog.text -async def test_loading_platforms_when_running_async_post_backup_actions( +async def test_loading_platforms_when_running_post_backup_actions( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: @@ -312,71 +327,9 @@ async def test_loading_platforms_when_running_async_post_backup_actions( async_post_backup=AsyncMock(), ), ) - await manager.async_post_backup_actions() + await manager.post_backup_actions() assert manager.loaded_platforms assert len(manager.platforms) == 1 assert "Loaded 1 platforms" in caplog.text - - -async def test_async_receive_backup( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test receiving a backup file.""" - manager = BackupManager(hass) - - size = 2 * 2**16 - protocol = Mock(_reading_paused=False) - stream = aiohttp.StreamReader(protocol, 2**16) - stream.feed_data(b"0" * size + b"\r\n--:--") - stream.feed_eof() - - open_mock = mock_open() - - with patch("pathlib.Path.open", open_mock), patch("shutil.move") as mover_mock: - await manager.async_receive_backup( - contents=aiohttp.BodyPartReader( - b"--:", - CIMultiDictProxy( - CIMultiDict( - { - aiohttp.hdrs.CONTENT_DISPOSITION: "attachment; filename=abc123.tar" - } - ) - ), - stream, - ) - ) - assert open_mock.call_count == 1 - assert mover_mock.call_count == 1 - assert mover_mock.mock_calls[0].args[1].name == "abc123.tar" - - -async def test_async_trigger_restore( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test trigger restore.""" - manager = BackupManager(hass) - manager.loaded_backups = True - manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} - - with ( - patch("pathlib.Path.exists", return_value=True), - patch("pathlib.Path.write_text") as mocked_write_text, - patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, - ): - await manager.async_restore_backup(TEST_BACKUP.slug) - assert mocked_write_text.call_args[0][0] == '{"path": "abc123.tar"}' - assert mocked_service_call.called - - -async def test_async_trigger_restore_missing_backup(hass: HomeAssistant) -> None: - """Test trigger restore.""" - manager = BackupManager(hass) - manager.loaded_backups = True - - with pytest.raises(HomeAssistantError, match="Backup abc123 not found"): - await manager.async_restore_backup(TEST_BACKUP.slug) diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 3e031f172ae..e11278202e0 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -2,11 +2,9 @@ from unittest.mock import patch -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.backup.manager import Backup from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -47,48 +45,13 @@ async def test_info( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backups", + "homeassistant.components.backup.websocket.BackupManager.get_backups", return_value={TEST_BACKUP.slug: TEST_BACKUP}, ): await client.send_json_auto_id({"type": "backup/info"}) assert snapshot == await client.receive_json() -@pytest.mark.parametrize( - "backup_content", - [ - pytest.param(TEST_BACKUP, id="with_backup_content"), - pytest.param(None, id="without_backup_content"), - ], -) -@pytest.mark.parametrize( - "with_hassio", - [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), - ], -) -async def test_details( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, - with_hassio: bool, - backup_content: Backup | None, -) -> None: - """Test getting backup info.""" - await setup_backup_integration(hass, with_hassio=with_hassio) - - client = await hass_ws_client(hass) - await hass.async_block_till_done() - - with patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backup", - return_value=backup_content, - ): - await client.send_json_auto_id({"type": "backup/details", "slug": "abc123"}) - assert await client.receive_json() == snapshot - - @pytest.mark.parametrize( "with_hassio", [ @@ -109,40 +72,12 @@ async def test_remove( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.async_remove_backup", + "homeassistant.components.backup.websocket.BackupManager.remove_backup", ): await client.send_json_auto_id({"type": "backup/remove", "slug": "abc123"}) assert snapshot == await client.receive_json() -@pytest.mark.parametrize( - ("with_hassio", "number_of_messages"), - [ - pytest.param(True, 1, id="with_hassio"), - pytest.param(False, 2, id="without_hassio"), - ], -) -@pytest.mark.usefixtures("mock_backup_generation") -async def test_generate( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, - with_hassio: bool, - number_of_messages: int, -) -> None: - """Test generating a backup.""" - await setup_backup_integration(hass, with_hassio=with_hassio) - - client = await hass_ws_client(hass) - freezer.move_to("2024-11-13 12:01:00+01:00") - await hass.async_block_till_done() - - await client.send_json_auto_id({"type": "backup/generate"}) - for _ in range(number_of_messages): - assert snapshot == await client.receive_json() - - @pytest.mark.parametrize( "with_hassio", [ @@ -150,23 +85,24 @@ async def test_generate( pytest.param(False, id="without_hassio"), ], ) -async def test_restore( +async def test_generate( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, snapshot: SnapshotAssertion, with_hassio: bool, ) -> None: - """Test calling the restore command.""" + """Test generating a backup.""" await setup_backup_integration(hass, with_hassio=with_hassio) client = await hass_ws_client(hass) await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.async_restore_backup", + "homeassistant.components.backup.websocket.BackupManager.generate_backup", + return_value=TEST_BACKUP, ): - await client.send_json_auto_id({"type": "backup/restore", "slug": "abc123"}) - assert await client.receive_json() == snapshot + await client.send_json_auto_id({"type": "backup/generate"}) + assert snapshot == await client.receive_json() @pytest.mark.parametrize( @@ -196,7 +132,7 @@ async def test_backup_end( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.async_post_backup_actions", + "homeassistant.components.backup.websocket.BackupManager.post_backup_actions", ): await client.send_json_auto_id({"type": "backup/end"}) assert snapshot == await client.receive_json() @@ -229,7 +165,7 @@ async def test_backup_start( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.async_pre_backup_actions", + "homeassistant.components.backup.websocket.BackupManager.pre_backup_actions", ): await client.send_json_auto_id({"type": "backup/start"}) assert snapshot == await client.receive_json() @@ -257,7 +193,7 @@ async def test_backup_end_excepion( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.async_post_backup_actions", + "homeassistant.components.backup.websocket.BackupManager.post_backup_actions", side_effect=exception, ): await client.send_json_auto_id({"type": "backup/end"}) @@ -286,7 +222,7 @@ async def test_backup_start_excepion( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.async_pre_backup_actions", + "homeassistant.components.backup.websocket.BackupManager.pre_backup_actions", side_effect=exception, ): await client.send_json_auto_id({"type": "backup/start"}) diff --git a/tests/components/baf/__init__.py b/tests/components/baf/__init__.py index a047029f9a0..f1074a87cee 100644 --- a/tests/components/baf/__init__.py +++ b/tests/components/baf/__init__.py @@ -12,7 +12,7 @@ class MockBAFDevice(Device): """A simple mock for a BAF Device.""" # pylint: disable-next=super-init-not-called - def __init__(self, async_wait_available_side_effect=None) -> None: + def __init__(self, async_wait_available_side_effect=None): """Init simple mock.""" self._async_wait_available_side_effect = async_wait_available_side_effect diff --git a/tests/components/balboa/__init__.py b/tests/components/balboa/__init__.py index 2cb100e3642..a27293e955f 100644 --- a/tests/components/balboa/__init__.py +++ b/tests/components/balboa/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations from unittest.mock import MagicMock -from homeassistant.components.balboa.const import CONF_SYNC_TIME, DOMAIN +from homeassistant.components.balboa import CONF_SYNC_TIME, DOMAIN from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant, State diff --git a/tests/components/balboa/conftest.py b/tests/components/balboa/conftest.py index 0bb8b2cd468..fbdc2f8a759 100644 --- a/tests/components/balboa/conftest.py +++ b/tests/components/balboa/conftest.py @@ -2,11 +2,12 @@ from __future__ import annotations -from collections.abc import Callable, Generator +from collections.abc import Callable from unittest.mock import AsyncMock, MagicMock, patch from pybalboa.enums import HeatMode, LowHighRange import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/balboa/snapshots/test_fan.ambr b/tests/components/balboa/snapshots/test_fan.ambr index 8d35ab6de7c..2b87a961906 100644 --- a/tests/components/balboa/snapshots/test_fan.ambr +++ b/tests/components/balboa/snapshots/test_fan.ambr @@ -28,7 +28,7 @@ 'original_name': 'Pump 1', 'platform': 'balboa', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': 'pump', 'unique_id': 'FakeSpa-Pump 1-c0ffee', 'unit_of_measurement': None, @@ -42,7 +42,7 @@ 'percentage_step': 50.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.fakespa_pump_1', diff --git a/tests/components/balboa/test_climate.py b/tests/components/balboa/test_climate.py index 850184a7d71..c877f2858cd 100644 --- a/tests/components/balboa/test_climate.py +++ b/tests/components/balboa/test_climate.py @@ -85,8 +85,6 @@ async def test_spa_temperature( hass: HomeAssistant, client: MagicMock, integration: MockConfigEntry ) -> None: """Test spa temperature settings.""" - client.temperature_minimum = 110 - client.temperature_maximum = 250 # flip the spa into F # set temp to a valid number state = await _patch_spa_settemp(hass, client, 0, 100) diff --git a/tests/components/bang_olufsen/conftest.py b/tests/components/bang_olufsen/conftest.py index cbde856ff89..1fbcbe0fe69 100644 --- a/tests/components/bang_olufsen/conftest.py +++ b/tests/components/bang_olufsen/conftest.py @@ -3,59 +3,24 @@ from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch -from mozart_api.models import ( - Action, - BeolinkPeer, - BeolinkSelf, - ContentItem, - ListeningMode, - ListeningModeFeatures, - ListeningModeRef, - ListeningModeTrigger, - PlaybackContentMetadata, - PlaybackProgress, - PlaybackState, - PlayQueueSettings, - ProductState, - RemoteMenuItem, - RenderingState, - SoftwareUpdateState, - SoftwareUpdateStatus, - Source, - SourceArray, - SourceTypeEnum, - VolumeState, -) +from mozart_api.models import BeolinkPeer import pytest from homeassistant.components.bang_olufsen.const import DOMAIN -from homeassistant.core import HomeAssistant from .const import ( TEST_DATA_CREATE_ENTRY, - TEST_DATA_CREATE_ENTRY_2, TEST_FRIENDLY_NAME, - TEST_FRIENDLY_NAME_3, - TEST_FRIENDLY_NAME_4, - TEST_HOST_3, - TEST_HOST_4, TEST_JID_1, - TEST_JID_3, - TEST_JID_4, TEST_NAME, - TEST_NAME_2, TEST_SERIAL_NUMBER, - TEST_SERIAL_NUMBER_2, - TEST_SOUND_MODE, - TEST_SOUND_MODE_2, - TEST_SOUND_MODE_NAME, ) from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry() -> MockConfigEntry: +def mock_config_entry(): """Mock config entry.""" return MockConfigEntry( domain=DOMAIN, @@ -65,32 +30,10 @@ def mock_config_entry() -> MockConfigEntry: ) -@pytest.fixture -def mock_config_entry_2() -> MockConfigEntry: - """Mock config entry.""" - return MockConfigEntry( - domain=DOMAIN, - unique_id=TEST_SERIAL_NUMBER_2, - data=TEST_DATA_CREATE_ENTRY_2, - title=TEST_NAME_2, - ) - - -@pytest.fixture -async def mock_media_player( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_mozart_client: AsyncMock, -) -> None: - """Mock media_player entity.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - @pytest.fixture def mock_mozart_client() -> Generator[AsyncMock]: """Mock MozartClient.""" + with ( patch( "homeassistant.components.bang_olufsen.MozartClient", autospec=True @@ -104,263 +47,21 @@ def mock_mozart_client() -> Generator[AsyncMock]: # REST API client methods client.get_beolink_self = AsyncMock() - client.get_beolink_self.return_value = BeolinkSelf( + client.get_beolink_self.return_value = BeolinkPeer( friendly_name=TEST_FRIENDLY_NAME, jid=TEST_JID_1 ) - client.get_softwareupdate_status = AsyncMock() - client.get_softwareupdate_status.return_value = SoftwareUpdateStatus( - software_version="1.0.0", state=SoftwareUpdateState() - ) - client.get_product_state = AsyncMock() - client.get_product_state.return_value = ProductState( - volume=VolumeState(), - playback=PlaybackState( - metadata=PlaybackContentMetadata(), - progress=PlaybackProgress(), - source=Source(), - state=RenderingState(value="started"), - ), - ) - client.get_available_sources = AsyncMock() - client.get_available_sources.return_value = SourceArray( - items=[ - # Is not playable, so should not be user selectable - Source( - name="AirPlay", - id="airPlay", - is_enabled=True, - is_multiroom_available=False, - ), - # The only available beolink source - Source( - name="Tidal", - id="tidal", - is_enabled=True, - is_multiroom_available=True, - is_playable=True, - ), - Source( - name="Line-In", - id="lineIn", - is_enabled=True, - is_multiroom_available=False, - is_playable=True, - ), - # Is disabled and not playable, so should not be user selectable - Source( - name="Powerlink", - id="pl", - is_enabled=False, - ), - ] - ) - client.get_remote_menu = AsyncMock() - client.get_remote_menu.return_value = { - # Music category, so shouldn't be included in video sources - "b355888b-2cde-5f94-8592-d47b71d52a27": RemoteMenuItem( - action_list=[ - Action( - button_name=None, - content_id="netRadio://6629967157728971", - deezer_user_id=None, - gain_db=None, - listening_mode_id=None, - preset_key=None, - queue_item=None, - queue_settings=None, - radio_station_id=None, - source=None, - speaker_group_id=None, - stand_position=None, - stop_duration=None, - tone_name=None, - type="triggerContent", - volume_level=None, - ) - ], - scene_list=None, - disabled=None, - dynamic_list=None, - first_child_menu_item_id=None, - label="Yle Radio Suomi Helsinki", - next_sibling_menu_item_id="0b4552f8-7ac6-5046-9d44-5410a815b8d6", - parent_menu_item_id="eee0c2d0-2b3a-4899-a708-658475c38926", - available=None, - content=ContentItem( - categories=["music"], - content_uri="netRadio://6629967157728971", - label="Yle Radio Suomi Helsinki", - source=SourceTypeEnum(value="netRadio"), - ), - fixed=True, - id="b355888b-2cde-5f94-8592-d47b71d52a27", - ), - # Has "hdmi" as category, so should be included in video sources - "b6591565-80f4-4356-bcd9-c92ca247f0a9": RemoteMenuItem( - action_list=[ - Action( - button_name=None, - content_id="tv://hdmi_1", - deezer_user_id=None, - gain_db=None, - listening_mode_id=None, - preset_key=None, - queue_item=None, - queue_settings=None, - radio_station_id=None, - source=None, - speaker_group_id=None, - stand_position=None, - stop_duration=None, - tone_name=None, - type="triggerContent", - volume_level=None, - ) - ], - scene_list=None, - disabled=False, - dynamic_list="none", - first_child_menu_item_id=None, - label="HDMI A", - next_sibling_menu_item_id="0ba98974-7b1f-40dc-bc48-fbacbb0f1793", - parent_menu_item_id="b66c835b-6b98-4400-8f84-6348043792c7", - available=True, - content=ContentItem( - categories=["hdmi"], - content_uri="tv://hdmi_1", - label="HDMI A", - source=SourceTypeEnum(value="tv"), - ), - fixed=False, - id="b6591565-80f4-4356-bcd9-c92ca247f0a9", - ), - # The parent remote menu item. Has the TV label and should therefore not be included in video sources - "b66c835b-6b98-4400-8f84-6348043792c7": RemoteMenuItem( - action_list=[], - scene_list=None, - disabled=False, - dynamic_list="none", - first_child_menu_item_id="b6591565-80f4-4356-bcd9-c92ca247f0a9", - label="TV", - next_sibling_menu_item_id="0c4547fe-d3cc-4348-a425-473595b8c9fb", - parent_menu_item_id=None, - available=True, - content=None, - fixed=True, - id="b66c835b-6b98-4400-8f84-6348043792c7", - ), - # Has an empty content, so should not be included - "64c9da45-3682-44a4-8030-09ed3ef44160": RemoteMenuItem( - action_list=[], - scene_list=None, - disabled=False, - dynamic_list="none", - first_child_menu_item_id=None, - label="ListeningPosition", - next_sibling_menu_item_id=None, - parent_menu_item_id="0c4547fe-d3cc-4348-a425-473595b8c9fb", - available=True, - content=None, - fixed=True, - id="64c9da45-3682-44a4-8030-09ed3ef44160", - ), - } - client.get_beolink_peers = AsyncMock() - client.get_beolink_peers.return_value = [ - BeolinkPeer( - friendly_name=TEST_FRIENDLY_NAME_3, - jid=TEST_JID_3, - ip_address=TEST_HOST_3, - ), - BeolinkPeer( - friendly_name=TEST_FRIENDLY_NAME_4, - jid=TEST_JID_4, - ip_address=TEST_HOST_4, - ), - ] - client.get_beolink_listeners = AsyncMock() - client.get_beolink_listeners.return_value = [ - BeolinkPeer( - friendly_name=TEST_FRIENDLY_NAME_3, - jid=TEST_JID_3, - ip_address=TEST_HOST_3, - ), - BeolinkPeer( - friendly_name=TEST_FRIENDLY_NAME_4, - jid=TEST_JID_4, - ip_address=TEST_HOST_4, - ), - ] - - client.get_listening_mode_set = AsyncMock() - client.get_listening_mode_set.return_value = [ - ListeningMode( - id=TEST_SOUND_MODE, - name=TEST_SOUND_MODE_NAME, - features=ListeningModeFeatures(), - triggers=[ListeningModeTrigger()], - ), - ListeningMode( - id=TEST_SOUND_MODE_2, - name=TEST_SOUND_MODE_NAME, - features=ListeningModeFeatures(), - triggers=[ListeningModeTrigger()], - ), - ListeningMode( - id=345, - name=f"{TEST_SOUND_MODE_NAME} 2", - features=ListeningModeFeatures(), - triggers=[ListeningModeTrigger()], - ), - ] - client.get_active_listening_mode = AsyncMock() - client.get_active_listening_mode.return_value = ListeningModeRef( - href="", - id=123, - ) - client.get_settings_queue = AsyncMock() - client.get_settings_queue.return_value = PlayQueueSettings( - repeat="none", - shuffle=False, - ) - - client.post_standby = AsyncMock() - client.set_current_volume_level = AsyncMock() - client.set_volume_mute = AsyncMock() - client.post_playback_command = AsyncMock() - client.seek_to_position = AsyncMock() - client.post_clear_queue = AsyncMock() - client.post_overlay_play = AsyncMock() - client.post_uri_source = AsyncMock() - client.run_provided_scene = AsyncMock() - client.activate_preset = AsyncMock() - client.start_deezer_flow = AsyncMock() - client.add_to_queue = AsyncMock() - client.post_remote_trigger = AsyncMock() - client.set_active_source = AsyncMock() - client.post_beolink_expand = AsyncMock() - client.join_beolink_peer = AsyncMock() - client.post_beolink_unexpand = AsyncMock() - client.post_beolink_leave = AsyncMock() - client.post_beolink_allstandby = AsyncMock() - client.join_latest_beolink_experience = AsyncMock() - client.activate_listening_mode = AsyncMock() - client.set_settings_queue = AsyncMock() # Non-REST API client methods client.check_device_connection = AsyncMock() client.close_api_client = AsyncMock() - - # WebSocket listener client.connect_notifications = AsyncMock() client.disconnect_notifications = Mock() - client.websocket_connected = False yield client @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry(): """Mock successful setup entry.""" with patch( "homeassistant.components.bang_olufsen.async_setup_entry", return_value=True diff --git a/tests/components/bang_olufsen/const.py b/tests/components/bang_olufsen/const.py index 6602a898eb6..187f93108a1 100644 --- a/tests/components/bang_olufsen/const.py +++ b/tests/components/bang_olufsen/const.py @@ -1,27 +1,6 @@ """Constants used for testing the bang_olufsen integration.""" from ipaddress import IPv4Address, IPv6Address -from unittest.mock import Mock - -from mozart_api.exceptions import ApiException -from mozart_api.models import ( - Action, - ListeningModeRef, - OverlayPlayRequest, - OverlayPlayRequestTextToSpeechTextToSpeech, - PlaybackContentMetadata, - PlaybackError, - PlaybackProgress, - PlayQueueItem, - PlayQueueItemType, - RenderingState, - SceneProperties, - Source, - UserFlow, - VolumeLevel, - VolumeMute, - VolumeState, -) from homeassistant.components.bang_olufsen.const import ( ATTR_FRIENDLY_NAME, @@ -29,7 +8,6 @@ from homeassistant.components.bang_olufsen.const import ( ATTR_SERIAL_NUMBER, ATTR_TYPE_NUMBER, CONF_BEOLINK_JID, - BangOlufsenSource, ) from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_NAME @@ -41,29 +19,12 @@ TEST_MODEL_BALANCE = "Beosound Balance" TEST_MODEL_THEATRE = "Beosound Theatre" TEST_MODEL_LEVEL = "Beosound Level" TEST_SERIAL_NUMBER = "11111111" -TEST_SERIAL_NUMBER_2 = "22222222" TEST_NAME = f"{TEST_MODEL_BALANCE}-{TEST_SERIAL_NUMBER}" -TEST_NAME_2 = f"{TEST_MODEL_BALANCE}-{TEST_SERIAL_NUMBER_2}" TEST_FRIENDLY_NAME = "Living room Balance" TEST_TYPE_NUMBER = "1111" TEST_ITEM_NUMBER = "1111111" TEST_JID_1 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.{TEST_SERIAL_NUMBER}@products.bang-olufsen.com" -TEST_MEDIA_PLAYER_ENTITY_ID = "media_player.beosound_balance_11111111" -TEST_FRIENDLY_NAME_2 = "Laundry room Balance" -TEST_JID_2 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.22222222@products.bang-olufsen.com" -TEST_MEDIA_PLAYER_ENTITY_ID_2 = "media_player.beosound_balance_22222222" -TEST_HOST_2 = "192.168.0.2" - -TEST_FRIENDLY_NAME_3 = "Lego room Balance" -TEST_JID_3 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.33333333@products.bang-olufsen.com" -TEST_MEDIA_PLAYER_ENTITY_ID_3 = "media_player.beosound_balance_33333333" -TEST_HOST_3 = "192.168.0.3" - -TEST_FRIENDLY_NAME_4 = "Lounge room Balance" -TEST_JID_4 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.44444444@products.bang-olufsen.com" -TEST_MEDIA_PLAYER_ENTITY_ID_4 = "media_player.beosound_balance_44444444" -TEST_HOST_4 = "192.168.0.4" TEST_HOSTNAME_ZEROCONF = TEST_NAME.replace(" ", "-") + ".local." TEST_TYPE_ZEROCONF = "_bangolufsen._tcp.local." @@ -79,12 +40,6 @@ TEST_DATA_CREATE_ENTRY = { CONF_BEOLINK_JID: TEST_JID_1, CONF_NAME: TEST_NAME, } -TEST_DATA_CREATE_ENTRY_2 = { - CONF_HOST: TEST_HOST, - CONF_MODEL: TEST_MODEL_BALANCE, - CONF_BEOLINK_JID: TEST_JID_2, - CONF_NAME: TEST_NAME_2, -} TEST_DATA_ZEROCONF = ZeroconfServiceInfo( ip_address=IPv4Address(TEST_HOST), @@ -125,95 +80,3 @@ TEST_DATA_ZEROCONF_IPV6 = ZeroconfServiceInfo( ATTR_ITEM_NUMBER: TEST_ITEM_NUMBER, }, ) - -TEST_SOURCE = Source( - name="Tidal", id="tidal", is_seekable=True, is_enabled=True, is_playable=True -) -TEST_AUDIO_SOURCES = [TEST_SOURCE.name, BangOlufsenSource.LINE_IN.name] -TEST_VIDEO_SOURCES = ["HDMI A"] -TEST_SOURCES = TEST_AUDIO_SOURCES + TEST_VIDEO_SOURCES -TEST_FALLBACK_SOURCES = [ - "Audio Streamer", - "Bluetooth", - "Spotify Connect", - "Line-In", - "Optical", - "B&O Radio", - "Deezer", - "Tidal Connect", -] -TEST_PLAYBACK_METADATA = PlaybackContentMetadata( - album_name="Test album", - artist_name="Test artist", - organization="Test organization", - title="Test title", - total_duration_seconds=123, - track=1, -) -TEST_PLAYBACK_ERROR = PlaybackError(error="Test error") -TEST_PLAYBACK_PROGRESS = PlaybackProgress(progress=123) -TEST_PLAYBACK_STATE_PAUSED = RenderingState(value="paused") -TEST_PLAYBACK_STATE_PLAYING = RenderingState(value="started") -TEST_VOLUME = VolumeState(level=VolumeLevel(level=40)) -TEST_VOLUME_HOME_ASSISTANT_FORMAT = 0.4 -TEST_PLAYBACK_STATE_TURN_OFF = RenderingState(value="stopped") -TEST_VOLUME_MUTED = VolumeState( - muted=VolumeMute(muted=True), level=VolumeLevel(level=40) -) -TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT = True -TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT = 10.0 -TEST_SEEK_POSITION = 10000 -TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS = OverlayPlayRequest( - text_to_speech=OverlayPlayRequestTextToSpeechTextToSpeech( - lang="da-dk", text="Dette er en test" - ) -) -TEST_OVERLAY_OFFSET_VOLUME_TTS = OverlayPlayRequest( - text_to_speech=OverlayPlayRequestTextToSpeechTextToSpeech( - lang="en-us", text="This is a test" - ), - volume_absolute=60, -) -TEST_RADIO_STATION = SceneProperties( - action_list=[ - Action( - type="radio", - radio_station_id="1234567890123456", - ) - ] -) -TEST_DEEZER_FLOW = UserFlow(user_id="123") -TEST_DEEZER_PLAYLIST = PlayQueueItem( - provider=PlayQueueItemType(value="deezer"), - start_now_from_position=123, - type="playlist", - uri="playlist:1234567890", -) -TEST_DEEZER_TRACK = PlayQueueItem( - provider=PlayQueueItemType(value="deezer"), - start_now_from_position=0, - type="track", - uri="1234567890", -) - -# codespell can't see the escaped ', so it thinks the word is misspelled -TEST_DEEZER_INVALID_FLOW = ApiException( - status=400, - reason="Bad Request", - http_resp=Mock( - status=400, - reason="Bad Request", - data='{"message": "Couldn\'t start user flow for me"}', # codespell:ignore - ), -) -TEST_SOUND_MODE = 123 -TEST_SOUND_MODE_2 = 234 -TEST_SOUND_MODE_NAME = "Test Listening Mode" -TEST_ACTIVE_SOUND_MODE_NAME = f"{TEST_SOUND_MODE_NAME} ({TEST_SOUND_MODE})" -TEST_ACTIVE_SOUND_MODE_NAME_2 = f"{TEST_SOUND_MODE_NAME} ({TEST_SOUND_MODE_2})" -TEST_LISTENING_MODE_REF = ListeningModeRef(href="", id=TEST_SOUND_MODE_2) -TEST_SOUND_MODES = [ - TEST_ACTIVE_SOUND_MODE_NAME, - TEST_ACTIVE_SOUND_MODE_NAME_2, - f"{TEST_SOUND_MODE_NAME} 2 (345)", -] diff --git a/tests/components/bang_olufsen/snapshots/test_media_player.ambr b/tests/components/bang_olufsen/snapshots/test_media_player.ambr deleted file mode 100644 index ea96e286821..00000000000 --- a/tests/components/bang_olufsen/snapshots/test_media_player.ambr +++ /dev/null @@ -1,874 +0,0 @@ -# serializer version: 1 -# name: test_async_beolink_allstandby - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_expand[all_discovered-True-None-log_messages0-2] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_expand[all_discovered-True-expand_side_effect1-log_messages1-2] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_expand[beolink_jids-parameter_value2-None-log_messages2-1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_expand[beolink_jids-parameter_value3-expand_side_effect3-log_messages3-1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_join - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_unexpand - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players[group_members0-1-0] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players[group_members0-1-0].1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_22222222', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players[group_members1-0-1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players[group_members1-0-1].1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_22222222', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'media_position': 0, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Line-In', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source].1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_22222222', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity].1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_22222222', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_unjoin_player - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_update_beolink_listener - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'leader': dict({ - 'Laundry room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'media_player.beosound_balance_11111111', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_update_beolink_listener.1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_22222222', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- diff --git a/tests/components/bang_olufsen/test_config_flow.py b/tests/components/bang_olufsen/test_config_flow.py index 5d5f34a79e6..ad513905f16 100644 --- a/tests/components/bang_olufsen/test_config_flow.py +++ b/tests/components/bang_olufsen/test_config_flow.py @@ -1,6 +1,6 @@ """Test the bang_olufsen config_flow.""" -from unittest.mock import AsyncMock, Mock +from unittest.mock import Mock from aiohttp.client_exceptions import ClientConnectorError from mozart_api.exceptions import ApiException @@ -25,7 +25,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def test_config_flow_timeout_error( - hass: HomeAssistant, mock_mozart_client: AsyncMock + hass: HomeAssistant, mock_mozart_client ) -> None: """Test we handle timeout_error.""" mock_mozart_client.get_beolink_self.side_effect = TimeoutError() @@ -42,7 +42,7 @@ async def test_config_flow_timeout_error( async def test_config_flow_client_connector_error( - hass: HomeAssistant, mock_mozart_client: AsyncMock + hass: HomeAssistant, mock_mozart_client ) -> None: """Test we handle client_connector_error.""" mock_mozart_client.get_beolink_self.side_effect = ClientConnectorError( @@ -73,7 +73,7 @@ async def test_config_flow_invalid_ip(hass: HomeAssistant) -> None: async def test_config_flow_api_exception( - hass: HomeAssistant, mock_mozart_client: AsyncMock + hass: HomeAssistant, mock_mozart_client ) -> None: """Test we handle api_exception.""" mock_mozart_client.get_beolink_self.side_effect = ApiException() @@ -89,7 +89,7 @@ async def test_config_flow_api_exception( assert mock_mozart_client.get_beolink_self.call_count == 1 -async def test_config_flow(hass: HomeAssistant, mock_mozart_client: AsyncMock) -> None: +async def test_config_flow(hass: HomeAssistant, mock_mozart_client) -> None: """Test config flow.""" result_init = await hass.config_entries.flow.async_init( @@ -112,9 +112,7 @@ async def test_config_flow(hass: HomeAssistant, mock_mozart_client: AsyncMock) - assert mock_mozart_client.get_beolink_self.call_count == 1 -async def test_config_flow_zeroconf( - hass: HomeAssistant, mock_mozart_client: AsyncMock -) -> None: +async def test_config_flow_zeroconf(hass: HomeAssistant, mock_mozart_client) -> None: """Test zeroconf discovery.""" result_zeroconf = await hass.config_entries.flow.async_init( @@ -134,7 +132,7 @@ async def test_config_flow_zeroconf( assert result_confirm["type"] is FlowResultType.CREATE_ENTRY assert result_confirm["data"] == TEST_DATA_CREATE_ENTRY - assert mock_mozart_client.get_beolink_self.call_count == 1 + assert mock_mozart_client.get_beolink_self.call_count == 0 async def test_config_flow_zeroconf_not_mozart_device(hass: HomeAssistant) -> None: @@ -161,21 +159,3 @@ async def test_config_flow_zeroconf_ipv6(hass: HomeAssistant) -> None: assert result_user["type"] is FlowResultType.ABORT assert result_user["reason"] == "ipv6_address" - - -async def test_config_flow_zeroconf_invalid_ip( - hass: HomeAssistant, mock_mozart_client: AsyncMock -) -> None: - """Test zeroconf discovery with invalid IP address.""" - mock_mozart_client.get_beolink_self.side_effect = ClientConnectorError( - Mock(), Mock() - ) - - result_user = await hass.config_entries.flow.async_init( - handler=DOMAIN, - context={CONF_SOURCE: SOURCE_ZEROCONF}, - data=TEST_DATA_ZEROCONF, - ) - - assert result_user["type"] is FlowResultType.ABORT - assert result_user["reason"] == "invalid_address" diff --git a/tests/components/bang_olufsen/test_init.py b/tests/components/bang_olufsen/test_init.py index c8e4c05f9ab..11742b846ae 100644 --- a/tests/components/bang_olufsen/test_init.py +++ b/tests/components/bang_olufsen/test_init.py @@ -1,7 +1,5 @@ """Test the bang_olufsen __init__.""" -from unittest.mock import AsyncMock - from aiohttp.client_exceptions import ServerTimeoutError from homeassistant.components.bang_olufsen import DOMAIN @@ -9,16 +7,14 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceRegistry -from .const import TEST_FRIENDLY_NAME, TEST_MODEL_BALANCE, TEST_SERIAL_NUMBER - -from tests.common import MockConfigEntry +from .const import TEST_MODEL_BALANCE, TEST_NAME, TEST_SERIAL_NUMBER async def test_setup_entry( hass: HomeAssistant, + mock_config_entry, + mock_mozart_client, device_registry: DeviceRegistry, - mock_config_entry: MockConfigEntry, - mock_mozart_client: AsyncMock, ) -> None: """Test async_setup_entry.""" @@ -35,8 +31,7 @@ async def test_setup_entry( identifiers={(DOMAIN, TEST_SERIAL_NUMBER)} ) assert device is not None - # Is usually TEST_NAME, but is updated to the device's friendly name by _update_name_and_beolink - assert device.name == TEST_FRIENDLY_NAME + assert device.name == TEST_NAME assert device.model == TEST_MODEL_BALANCE # Ensure that the connection has been checked WebSocket connection has been initialized @@ -46,9 +41,7 @@ async def test_setup_entry( async def test_setup_entry_failed( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_mozart_client: AsyncMock, + hass: HomeAssistant, mock_config_entry, mock_mozart_client ) -> None: """Test failed async_setup_entry.""" @@ -73,9 +66,7 @@ async def test_setup_entry_failed( async def test_unload_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_mozart_client: AsyncMock, + hass: HomeAssistant, mock_config_entry, mock_mozart_client ) -> None: """Test unload_entry.""" @@ -86,7 +77,6 @@ async def test_unload_entry( await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state == ConfigEntryState.LOADED - assert hasattr(mock_config_entry, "runtime_data") # Unload entry await hass.config_entries.async_unload(mock_config_entry.entry_id) @@ -96,5 +86,5 @@ async def test_unload_entry( assert mock_mozart_client.close_api_client.call_count == 1 # Ensure that the entry is not loaded and has been removed from hass - assert not hasattr(mock_config_entry, "runtime_data") + assert mock_config_entry.entry_id not in hass.data[DOMAIN] assert mock_config_entry.state == ConfigEntryState.NOT_LOADED diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py deleted file mode 100644 index aa35b0265dc..00000000000 --- a/tests/components/bang_olufsen/test_media_player.py +++ /dev/null @@ -1,1780 +0,0 @@ -"""Test the Bang & Olufsen media_player entity.""" - -from contextlib import AbstractContextManager, nullcontext as does_not_raise -import logging -from unittest.mock import AsyncMock, patch - -from mozart_api.exceptions import NotFoundException -from mozart_api.models import ( - BeolinkLeader, - BeolinkSelf, - PlaybackContentMetadata, - PlayQueueSettings, - RenderingState, - Source, - SourceArray, - WebsocketNotificationTag, -) -import pytest -from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.components.bang_olufsen.const import ( - BANG_OLUFSEN_REPEAT_FROM_HA, - BANG_OLUFSEN_STATES, - DOMAIN, - BangOlufsenSource, -) -from homeassistant.components.media_player import ( - ATTR_GROUP_MEMBERS, - ATTR_INPUT_SOURCE, - ATTR_INPUT_SOURCE_LIST, - ATTR_MEDIA_ALBUM_ARTIST, - ATTR_MEDIA_ALBUM_NAME, - ATTR_MEDIA_ANNOUNCE, - ATTR_MEDIA_CHANNEL, - ATTR_MEDIA_CONTENT_ID, - ATTR_MEDIA_CONTENT_TYPE, - ATTR_MEDIA_DURATION, - ATTR_MEDIA_EXTRA, - ATTR_MEDIA_POSITION, - ATTR_MEDIA_POSITION_UPDATED_AT, - ATTR_MEDIA_REPEAT, - ATTR_MEDIA_SEEK_POSITION, - ATTR_MEDIA_SHUFFLE, - ATTR_MEDIA_TITLE, - ATTR_MEDIA_TRACK, - ATTR_MEDIA_VOLUME_LEVEL, - ATTR_MEDIA_VOLUME_MUTED, - ATTR_SOUND_MODE, - ATTR_SOUND_MODE_LIST, - DOMAIN as MEDIA_PLAYER_DOMAIN, - SERVICE_CLEAR_PLAYLIST, - SERVICE_JOIN, - SERVICE_MEDIA_NEXT_TRACK, - SERVICE_MEDIA_PLAY_PAUSE, - SERVICE_MEDIA_PREVIOUS_TRACK, - SERVICE_MEDIA_SEEK, - SERVICE_MEDIA_STOP, - SERVICE_PLAY_MEDIA, - SERVICE_REPEAT_SET, - SERVICE_SELECT_SOUND_MODE, - SERVICE_SELECT_SOURCE, - SERVICE_SHUFFLE_SET, - SERVICE_TURN_OFF, - SERVICE_UNJOIN, - SERVICE_VOLUME_MUTE, - SERVICE_VOLUME_SET, - MediaPlayerState, - MediaType, - RepeatMode, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers.device_registry import DeviceRegistry -from homeassistant.setup import async_setup_component - -from .const import ( - TEST_ACTIVE_SOUND_MODE_NAME, - TEST_ACTIVE_SOUND_MODE_NAME_2, - TEST_AUDIO_SOURCES, - TEST_DEEZER_FLOW, - TEST_DEEZER_INVALID_FLOW, - TEST_DEEZER_PLAYLIST, - TEST_DEEZER_TRACK, - TEST_FALLBACK_SOURCES, - TEST_FRIENDLY_NAME_2, - TEST_JID_1, - TEST_JID_2, - TEST_JID_3, - TEST_JID_4, - TEST_LISTENING_MODE_REF, - TEST_MEDIA_PLAYER_ENTITY_ID, - TEST_MEDIA_PLAYER_ENTITY_ID_2, - TEST_MEDIA_PLAYER_ENTITY_ID_3, - TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS, - TEST_OVERLAY_OFFSET_VOLUME_TTS, - TEST_PLAYBACK_ERROR, - TEST_PLAYBACK_METADATA, - TEST_PLAYBACK_PROGRESS, - TEST_PLAYBACK_STATE_PAUSED, - TEST_PLAYBACK_STATE_PLAYING, - TEST_PLAYBACK_STATE_TURN_OFF, - TEST_RADIO_STATION, - TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, - TEST_SOUND_MODE_2, - TEST_SOUND_MODES, - TEST_SOURCE, - TEST_SOURCES, - TEST_VIDEO_SOURCES, - TEST_VOLUME, - TEST_VOLUME_HOME_ASSISTANT_FORMAT, - TEST_VOLUME_MUTED, - TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT, -) - -from tests.common import MockConfigEntry -from tests.typing import WebSocketGenerator - - -async def test_initialization( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mock_config_entry: MockConfigEntry, - mock_mozart_client: AsyncMock, -) -> None: - """Test the integration is initialized properly in _initialize, async_added_to_hass and __init__.""" - - caplog.set_level(logging.DEBUG) - - # Setup entity - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - # Ensure that the logger has been called with the debug message - assert "Connected to: Beosound Balance 11111111 running SW 1.0.0" in caplog.text - - # Check state (The initial state in this test does not contain all that much. - # States are tested using simulated WebSocket events.) - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_SOURCES - assert states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] - assert states.attributes[ATTR_SOUND_MODE_LIST] == TEST_SOUND_MODES - - # Check API calls - mock_mozart_client.get_softwareupdate_status.assert_called_once() - mock_mozart_client.get_product_state.assert_called_once() - mock_mozart_client.get_available_sources.assert_called_once() - mock_mozart_client.get_remote_menu.assert_called_once() - mock_mozart_client.get_listening_mode_set.assert_called_once() - mock_mozart_client.get_active_listening_mode.assert_called_once() - mock_mozart_client.get_beolink_self.assert_called_once() - mock_mozart_client.get_beolink_peers.assert_called_once() - mock_mozart_client.get_beolink_listeners.assert_called_once() - - -async def test_async_update_sources_audio_only( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_mozart_client: AsyncMock, -) -> None: - """Test sources are correctly handled in _async_update_sources.""" - mock_mozart_client.get_remote_menu.return_value = {} - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_AUDIO_SOURCES - - -async def test_async_update_sources_outdated_api( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test fallback sources are correctly handled in _async_update_sources.""" - mock_mozart_client.get_available_sources.side_effect = ValueError() - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ( - states.attributes[ATTR_INPUT_SOURCE_LIST] - == TEST_FALLBACK_SOURCES + TEST_VIDEO_SOURCES - ) - - -async def test_async_update_sources_remote( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test _async_update_sources is called when there are new video sources.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - notification_callback = mock_mozart_client.get_notification_notifications.call_args[ - 0 - ][0] - - # This is not an ideal check, but I couldn't get anything else to work - assert mock_mozart_client.get_available_sources.call_count == 1 - assert mock_mozart_client.get_remote_menu.call_count == 1 - - # Send the remote menu Websocket event - notification_callback(WebsocketNotificationTag(value="remoteMenuChanged")) - - assert mock_mozart_client.get_available_sources.call_count == 2 - assert mock_mozart_client.get_remote_menu.call_count == 2 - - -async def test_async_update_sources_availability( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that the playback_source WebSocket event updates available playback sources.""" - # Remove video sources to simplify test - mock_mozart_client.get_remote_menu.return_value = {} - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - playback_source_callback = ( - mock_mozart_client.get_playback_source_notifications.call_args[0][0] - ) - - assert mock_mozart_client.get_available_sources.call_count == 1 - - # Add a source that is available and playable - mock_mozart_client.get_available_sources.return_value = SourceArray( - items=[TEST_SOURCE] - ) - - # Send playback_source. The source is not actually used, so its attributes don't matter - playback_source_callback(Source()) - - assert mock_mozart_client.get_available_sources.call_count == 2 - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_INPUT_SOURCE_LIST] == [TEST_SOURCE.name] - - -async def test_async_update_playback_metadata( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test _async_update_playback_metadata.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - playback_metadata_callback = ( - mock_mozart_client.get_playback_metadata_notifications.call_args[0][0] - ) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ATTR_MEDIA_DURATION not in states.attributes - assert ATTR_MEDIA_TITLE not in states.attributes - assert ATTR_MEDIA_ALBUM_NAME not in states.attributes - assert ATTR_MEDIA_ALBUM_ARTIST not in states.attributes - assert ATTR_MEDIA_TRACK not in states.attributes - assert ATTR_MEDIA_CHANNEL not in states.attributes - - # Send the WebSocket event dispatch - playback_metadata_callback(TEST_PLAYBACK_METADATA) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ( - states.attributes[ATTR_MEDIA_DURATION] - == TEST_PLAYBACK_METADATA.total_duration_seconds - ) - assert states.attributes[ATTR_MEDIA_TITLE] == TEST_PLAYBACK_METADATA.title - assert states.attributes[ATTR_MEDIA_ALBUM_NAME] == TEST_PLAYBACK_METADATA.album_name - assert ( - states.attributes[ATTR_MEDIA_ALBUM_ARTIST] == TEST_PLAYBACK_METADATA.artist_name - ) - assert states.attributes[ATTR_MEDIA_TRACK] == TEST_PLAYBACK_METADATA.track - assert states.attributes[ATTR_MEDIA_CHANNEL] == TEST_PLAYBACK_METADATA.organization - - -async def test_async_update_playback_error( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test _async_update_playback_error.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - playback_error_callback = ( - mock_mozart_client.get_playback_error_notifications.call_args[0][0] - ) - - # The async_dispatcher_send function seems to swallow exceptions, making pytest.raises unusable - playback_error_callback(TEST_PLAYBACK_ERROR) - - assert ( - "Exception in _async_update_playback_error when dispatching '11111111_playback_error': (PlaybackError(error='Test error', item=None),)" - in caplog.text - ) - - -async def test_async_update_playback_progress( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test _async_update_playback_progress.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - playback_progress_callback = ( - mock_mozart_client.get_playback_progress_notifications.call_args[0][0] - ) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ATTR_MEDIA_POSITION not in states.attributes - old_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] - assert old_updated_at - - playback_progress_callback(TEST_PLAYBACK_PROGRESS) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_MEDIA_POSITION] == TEST_PLAYBACK_PROGRESS.progress - new_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] - assert new_updated_at - assert old_updated_at != new_updated_at - - -async def test_async_update_playback_state( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test _async_update_playback_state.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - playback_state_callback = ( - mock_mozart_client.get_playback_state_notifications.call_args[0][0] - ) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.state == MediaPlayerState.PLAYING - - playback_state_callback(TEST_PLAYBACK_STATE_PAUSED) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.state == TEST_PLAYBACK_STATE_PAUSED.value - - -@pytest.mark.parametrize( - ("source", "content_type", "progress", "metadata"), - [ - # Normal source, music mediatype expected - ( - TEST_SOURCE, - MediaType.MUSIC, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(), - ), - # URI source, url media type expected - ( - BangOlufsenSource.URI_STREAMER, - MediaType.URL, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(), - ), - # Line-In source,media type expected, progress 0 expected - ( - BangOlufsenSource.LINE_IN, - MediaType.MUSIC, - 0, - PlaybackContentMetadata(), - ), - ], -) -async def test_async_update_source_change( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - source: Source, - content_type: MediaType, - progress: int, - metadata: PlaybackContentMetadata, -) -> None: - """Test _async_update_source_change.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - playback_progress_callback = ( - mock_mozart_client.get_playback_progress_notifications.call_args[0][0] - ) - playback_metadata_callback = ( - mock_mozart_client.get_playback_metadata_notifications.call_args[0][0] - ) - source_change_callback = ( - mock_mozart_client.get_source_change_notifications.call_args[0][0] - ) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ATTR_INPUT_SOURCE not in states.attributes - assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - - # Simulate progress attribute being available - playback_progress_callback(TEST_PLAYBACK_PROGRESS) - - # Simulate metadata - playback_metadata_callback(metadata) - source_change_callback(source) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_INPUT_SOURCE] == source.name - assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == content_type - assert states.attributes[ATTR_MEDIA_POSITION] == progress - - -async def test_async_turn_off( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_turn_off.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - playback_state_callback = ( - mock_mozart_client.get_playback_state_notifications.call_args[0][0] - ) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, - blocking=True, - ) - - playback_state_callback(TEST_PLAYBACK_STATE_TURN_OFF) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert TEST_PLAYBACK_STATE_TURN_OFF.value - assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_TURN_OFF.value] - - # Check API call - mock_mozart_client.post_standby.assert_called_once() - - -async def test_async_set_volume_level( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_set_volume_level and _async_update_volume by proxy.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ATTR_MEDIA_VOLUME_LEVEL not in states.attributes - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_SET, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_VOLUME_LEVEL: TEST_VOLUME_HOME_ASSISTANT_FORMAT, - }, - blocking=True, - ) - - # The service call will trigger a WebSocket notification - volume_callback(TEST_VOLUME) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ( - states.attributes[ATTR_MEDIA_VOLUME_LEVEL] == TEST_VOLUME_HOME_ASSISTANT_FORMAT - ) - - mock_mozart_client.set_current_volume_level.assert_called_once_with( - volume_level=TEST_VOLUME.level - ) - - -async def test_async_update_beolink_line_in( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test _async_update_beolink with line-in and no active Beolink session.""" - # Ensure no listeners - mock_mozart_client.get_beolink_listeners.return_value = [] - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - source_change_callback = ( - mock_mozart_client.get_source_change_notifications.call_args[0][0] - ) - beolink_callback = mock_mozart_client.get_notification_notifications.call_args[0][0] - - # Set source - source_change_callback(BangOlufsenSource.LINE_IN) - beolink_callback(WebsocketNotificationTag(value="beolinkListeners")) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes["group_members"] == [] - - # Called once during _initialize and once during _async_update_beolink - assert mock_mozart_client.get_beolink_listeners.call_count == 2 - assert mock_mozart_client.get_beolink_peers.call_count == 2 - - -async def test_async_update_beolink_listener( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - mock_config_entry_2: MockConfigEntry, -) -> None: - """Test _async_update_beolink as a listener.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - playback_metadata_callback = ( - mock_mozart_client.get_playback_metadata_notifications.call_args[0][0] - ) - - # Add another entity - mock_config_entry_2.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry_2.entry_id) - - # Runs _async_update_beolink - playback_metadata_callback( - PlaybackContentMetadata( - remote_leader=BeolinkLeader( - friendly_name=TEST_FRIENDLY_NAME_2, jid=TEST_JID_2 - ) - ) - ) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes["group_members"] == [ - TEST_MEDIA_PLAYER_ENTITY_ID_2, - TEST_MEDIA_PLAYER_ENTITY_ID, - ] - - # Called once for each entity during _initialize - assert mock_mozart_client.get_beolink_listeners.call_count == 2 - # Called once for each entity during _initialize and - # once more during _async_update_beolink for the entity that has the callback associated with it. - assert mock_mozart_client.get_beolink_peers.call_count == 3 - - # Main entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - # Secondary entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -async def test_async_update_name_and_beolink( - hass: HomeAssistant, - device_registry: DeviceRegistry, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test _async_update_name_and_beolink.""" - # Change response to ensure device name is changed - mock_mozart_client.get_beolink_self.return_value = BeolinkSelf( - friendly_name=TEST_FRIENDLY_NAME_2, jid=TEST_JID_1 - ) - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - configuration_callback = ( - mock_mozart_client.get_notification_notifications.call_args[0][0] - ) - # Trigger callback - configuration_callback(WebsocketNotificationTag(value="configuration")) - - await hass.async_block_till_done() - - assert mock_mozart_client.get_beolink_self.call_count == 2 - assert mock_mozart_client.get_beolink_peers.call_count == 2 - assert mock_mozart_client.get_beolink_listeners.call_count == 2 - - # Check that device name has been changed - assert mock_config_entry.unique_id - assert ( - device := device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - ) - assert device.name == TEST_FRIENDLY_NAME_2 - - -async def test_async_mute_volume( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_mute_volume.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ATTR_MEDIA_VOLUME_MUTED not in states.attributes - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_MUTE, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_VOLUME_MUTED: TEST_VOLUME_HOME_ASSISTANT_FORMAT, - }, - blocking=True, - ) - - # The service call will trigger a WebSocket notification - volume_callback(TEST_VOLUME_MUTED) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ( - states.attributes[ATTR_MEDIA_VOLUME_MUTED] - == TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT - ) - - mock_mozart_client.set_volume_mute.assert_called_once_with( - volume_mute=TEST_VOLUME_MUTED.muted - ) - - -@pytest.mark.parametrize( - ("initial_state", "command"), - [ - # Current state is playing, "pause" command expected - (TEST_PLAYBACK_STATE_PLAYING, "pause"), - # Current state is paused, "play" command expected - (TEST_PLAYBACK_STATE_PAUSED, "play"), - ], -) -async def test_async_media_play_pause( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - initial_state: RenderingState, - command: str, -) -> None: - """Test async_media_play_pause.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - playback_state_callback = ( - mock_mozart_client.get_playback_state_notifications.call_args[0][0] - ) - - # Set the initial state - playback_state_callback(initial_state) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert initial_state.value - assert states.state == BANG_OLUFSEN_STATES[initial_state.value] - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_PLAY_PAUSE, - {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, - blocking=True, - ) - - mock_mozart_client.post_playback_command.assert_called_once_with(command=command) - - -async def test_async_media_stop( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_media_stop.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - playback_state_callback = ( - mock_mozart_client.get_playback_state_notifications.call_args[0][0] - ) - - # Set the state to playing - playback_state_callback(TEST_PLAYBACK_STATE_PLAYING) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert TEST_PLAYBACK_STATE_PLAYING.value - assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_PLAYING.value] - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_STOP, - {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, - blocking=True, - ) - - # Check API call - mock_mozart_client.post_playback_command.assert_called_once_with(command="stop") - - -async def test_async_media_next_track( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_media_next_track.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_NEXT_TRACK, - {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, - blocking=True, - ) - - mock_mozart_client.post_playback_command.assert_called_once_with(command="skip") - - -@pytest.mark.parametrize( - ("source", "expected_result", "seek_called_times"), - [ - # Seekable source, seek expected - (TEST_SOURCE, does_not_raise(), 1), - # Non seekable source, seek shouldn't work - (BangOlufsenSource.LINE_IN, pytest.raises(HomeAssistantError), 0), - # Malformed source, seek shouldn't work - (Source(), pytest.raises(HomeAssistantError), 0), - ], -) -async def test_async_media_seek( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - source: Source, - expected_result: AbstractContextManager, - seek_called_times: int, -) -> None: - """Test async_media_seek.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - source_change_callback = ( - mock_mozart_client.get_source_change_notifications.call_args[0][0] - ) - - # Set the source - source_change_callback(source) - - # Check results - with expected_result: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_SEEK, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_SEEK_POSITION: TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, - }, - blocking=True, - ) - - assert mock_mozart_client.seek_to_position.call_count == seek_called_times - - -async def test_async_media_previous_track( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_media_previous_track.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_PREVIOUS_TRACK, - {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, - blocking=True, - ) - - mock_mozart_client.post_playback_command.assert_called_once_with(command="prev") - - -async def test_async_clear_playlist( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_clear_playlist.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_CLEAR_PLAYLIST, - {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, - blocking=True, - ) - - mock_mozart_client.post_clear_queue.assert_called_once() - - -@pytest.mark.parametrize( - ("source", "expected_result", "audio_source_call", "video_source_call"), - [ - # Invalid source - ("Test source", pytest.raises(ServiceValidationError), 0, 0), - # Valid audio source - (TEST_SOURCE.name, does_not_raise(), 1, 0), - # Valid video source - (TEST_VIDEO_SOURCES[0], does_not_raise(), 0, 1), - ], -) -async def test_async_select_source( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - source: str, - expected_result: AbstractContextManager, - audio_source_call: int, - video_source_call: int, -) -> None: - """Test async_select_source with an invalid source.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - with expected_result: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_SELECT_SOURCE, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_INPUT_SOURCE: source, - }, - blocking=True, - ) - - assert mock_mozart_client.set_active_source.call_count == audio_source_call - assert mock_mozart_client.post_remote_trigger.call_count == video_source_call - - -async def test_async_select_sound_mode( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_select_sound_mode.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_SOUND_MODE] == TEST_ACTIVE_SOUND_MODE_NAME - - active_listening_mode_callback = ( - mock_mozart_client.get_active_listening_mode_notifications.call_args[0][0] - ) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_SELECT_SOUND_MODE, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_SOUND_MODE: TEST_ACTIVE_SOUND_MODE_NAME_2, - }, - blocking=True, - ) - - active_listening_mode_callback(TEST_LISTENING_MODE_REF) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_SOUND_MODE] == TEST_ACTIVE_SOUND_MODE_NAME_2 - - mock_mozart_client.activate_listening_mode.assert_called_once_with( - id=TEST_SOUND_MODE_2 - ) - - -async def test_async_select_sound_mode_invalid( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_select_sound_mode with an invalid sound_mode.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - with pytest.raises(ServiceValidationError) as exc_info: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_SELECT_SOUND_MODE, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_SOUND_MODE: "invalid_sound_mode", - }, - blocking=True, - ) - - assert exc_info.value.translation_domain == DOMAIN - assert exc_info.value.translation_key == "invalid_sound_mode" - assert exc_info.errisinstance(ServiceValidationError) - - -async def test_async_play_media_invalid_type( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media only accepts valid media types.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - with pytest.raises(ServiceValidationError) as exc_info: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "test", - ATTR_MEDIA_CONTENT_TYPE: "invalid type", - }, - blocking=True, - ) - - assert exc_info.value.translation_domain == DOMAIN - assert exc_info.value.translation_key == "invalid_media_type" - assert exc_info.errisinstance(HomeAssistantError) - - -async def test_async_play_media_url( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media URL.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - # Setup media source - await async_setup_component(hass, "media_source", {"media_source": {}}) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", - ATTR_MEDIA_CONTENT_TYPE: "audio/mpeg", - }, - blocking=True, - ) - - mock_mozart_client.post_uri_source.assert_called_once() - - -async def test_async_play_media_overlay_absolute_volume_uri( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media overlay with Home Assistant local URI and absolute volume.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await async_setup_component(hass, "media_source", {"media_source": {}}) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", - ATTR_MEDIA_CONTENT_TYPE: "music", - ATTR_MEDIA_ANNOUNCE: True, - ATTR_MEDIA_EXTRA: {"overlay_absolute_volume": 60}, - }, - blocking=True, - ) - - mock_mozart_client.post_overlay_play.assert_called_once() - - # Check that the API call was as expected - args, _ = mock_mozart_client.post_overlay_play.call_args - assert args[0].volume_absolute == 60 - assert "/local/doorbell.mp3" in args[0].uri.location - - -async def test_async_play_media_overlay_invalid_offset_volume_tts( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "Dette er en test", - ATTR_MEDIA_CONTENT_TYPE: "overlay_tts", - ATTR_MEDIA_ANNOUNCE: True, - ATTR_MEDIA_EXTRA: { - "overlay_offset_volume": 20, - "overlay_tts_language": "da-dk", - }, - }, - blocking=True, - ) - assert "Error setting volume" in caplog.text - - mock_mozart_client.post_overlay_play.assert_called_once_with( - TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS - ) - - -async def test_async_play_media_overlay_offset_volume_tts( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] - - # Set the volume to enable offset - volume_callback(TEST_VOLUME) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "This is a test", - ATTR_MEDIA_CONTENT_TYPE: "overlay_tts", - ATTR_MEDIA_ANNOUNCE: True, - ATTR_MEDIA_EXTRA: {"overlay_offset_volume": 20}, - }, - blocking=True, - ) - - mock_mozart_client.post_overlay_play.assert_called_once_with( - TEST_OVERLAY_OFFSET_VOLUME_TTS - ) - - -async def test_async_play_media_tts( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media with Home Assistant tts.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await async_setup_component(hass, "media_source", {"media_source": {}}) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", - ATTR_MEDIA_CONTENT_TYPE: "provider", - }, - blocking=True, - ) - - mock_mozart_client.post_overlay_play.assert_called_once() - - -async def test_async_play_media_radio( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media with B&O radio.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "1234567890123456", - ATTR_MEDIA_CONTENT_TYPE: "radio", - }, - blocking=True, - ) - - mock_mozart_client.run_provided_scene.assert_called_once_with( - scene_properties=TEST_RADIO_STATION - ) - - -async def test_async_play_media_favourite( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media with B&O favourite.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "1", - ATTR_MEDIA_CONTENT_TYPE: "favourite", - }, - blocking=True, - ) - - mock_mozart_client.activate_preset.assert_called_once_with(id=int("1")) - - -async def test_async_play_media_deezer_flow( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media with Deezer flow.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - # Send a service call - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "flow", - ATTR_MEDIA_CONTENT_TYPE: "deezer", - ATTR_MEDIA_EXTRA: {"id": "123"}, - }, - blocking=True, - ) - - mock_mozart_client.start_deezer_flow.assert_called_once_with( - user_flow=TEST_DEEZER_FLOW - ) - - -async def test_async_play_media_deezer_playlist( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media with Deezer playlist.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "playlist:1234567890", - ATTR_MEDIA_CONTENT_TYPE: "deezer", - ATTR_MEDIA_EXTRA: {"start_from": 123}, - }, - blocking=True, - ) - - mock_mozart_client.add_to_queue.assert_called_once_with( - play_queue_item=TEST_DEEZER_PLAYLIST - ) - - -async def test_async_play_media_deezer_track( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media with Deezer track.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "1234567890", - ATTR_MEDIA_CONTENT_TYPE: "deezer", - }, - blocking=True, - ) - - mock_mozart_client.add_to_queue.assert_called_once_with( - play_queue_item=TEST_DEEZER_TRACK - ) - - -async def test_async_play_media_invalid_deezer( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media with an invalid/no Deezer login.""" - - mock_mozart_client.start_deezer_flow.side_effect = TEST_DEEZER_INVALID_FLOW - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - with pytest.raises(HomeAssistantError) as exc_info: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "flow", - ATTR_MEDIA_CONTENT_TYPE: "deezer", - }, - blocking=True, - ) - - assert exc_info.value.translation_domain == DOMAIN - assert exc_info.value.translation_key == "play_media_error" - assert exc_info.errisinstance(HomeAssistantError) - - mock_mozart_client.start_deezer_flow.assert_called_once() - - -async def test_async_play_media_url_m3u( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_play_media URL with the m3u extension.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await async_setup_component(hass, "media_source", {"media_source": {}}) - - with ( - pytest.raises(HomeAssistantError) as exc_info, - patch( - "homeassistant.components.bang_olufsen.media_player.async_process_play_media_url", - return_value="https://test.com/test.m3u", - ), - ): - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", - ATTR_MEDIA_CONTENT_TYPE: "audio/mpeg", - }, - blocking=True, - ) - - # Check exception - assert exc_info.value.translation_domain == DOMAIN - assert exc_info.value.translation_key == "m3u_invalid_format" - assert exc_info.errisinstance(HomeAssistantError) - - mock_mozart_client.post_uri_source.assert_not_called() - - -@pytest.mark.parametrize( - ("child", "present"), - [ - # Audio source expected - ( - { - "title": "test.mp3", - "media_class": "music", - "media_content_type": "audio/mpeg", - "media_content_id": "media-source://media_source/local/test.mp3", - "can_play": True, - "can_expand": False, - "thumbnail": None, - "children_media_class": None, - }, - True, - ), - # Video source not expected - ( - { - "title": "test.mp4", - "media_class": "video", - "media_content_type": "video/mp4", - "media_content_id": ("media-source://media_source/local/test.mp4"), - "can_play": True, - "can_expand": False, - "thumbnail": None, - "children_media_class": None, - }, - False, - ), - ], -) -async def test_async_browse_media( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - child: dict[str, str | bool | None], - present: bool, -) -> None: - """Test async_browse_media with audio and video source.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await async_setup_component(hass, "media_source", {"media_source": {}}) - - client = await hass_ws_client() - await client.send_json_auto_id( - { - "type": "media_player/browse_media", - "entity_id": TEST_MEDIA_PLAYER_ENTITY_ID, - } - ) - response = await client.receive_json() - assert response["success"] - - assert (child in response["result"]["children"]) is present - - -@pytest.mark.parametrize( - ("group_members", "expand_count", "join_count"), - [ - # Valid member - ([TEST_MEDIA_PLAYER_ENTITY_ID_2], 1, 0), - # Touch to join - ([], 0, 1), - ], -) -async def test_async_join_players( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - mock_config_entry_2: MockConfigEntry, - group_members: list[str], - expand_count: int, - join_count: int, -) -> None: - """Test async_join_players.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - source_change_callback = ( - mock_mozart_client.get_source_change_notifications.call_args[0][0] - ) - - # Add another entity - mock_config_entry_2.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry_2.entry_id) - - # Set the source to a beolink expandable source - source_change_callback(TEST_SOURCE) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_JOIN, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_GROUP_MEMBERS: group_members, - }, - blocking=True, - ) - - assert mock_mozart_client.post_beolink_expand.call_count == expand_count - assert mock_mozart_client.join_latest_beolink_experience.call_count == join_count - - # Main entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - # Secondary entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -@pytest.mark.parametrize( - ("source", "group_members", "expected_result", "error_type"), - [ - # Invalid source - ( - BangOlufsenSource.LINE_IN, - [TEST_MEDIA_PLAYER_ENTITY_ID_2], - pytest.raises(ServiceValidationError), - "invalid_source", - ), - # Invalid media_player entity - ( - TEST_SOURCE, - [TEST_MEDIA_PLAYER_ENTITY_ID_3], - pytest.raises(ServiceValidationError), - "invalid_grouping_entity", - ), - ], -) -async def test_async_join_players_invalid( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - mock_config_entry_2: MockConfigEntry, - source: Source, - group_members: list[str], - expected_result: AbstractContextManager, - error_type: str, -) -> None: - """Test async_join_players with an invalid media_player entity.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - source_change_callback = ( - mock_mozart_client.get_source_change_notifications.call_args[0][0] - ) - - mock_config_entry_2.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry_2.entry_id) - - source_change_callback(source) - - with expected_result as exc_info: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_JOIN, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_GROUP_MEMBERS: group_members, - }, - blocking=True, - ) - - assert exc_info.value.translation_domain == DOMAIN - assert exc_info.value.translation_key == error_type - assert exc_info.errisinstance(HomeAssistantError) - - assert mock_mozart_client.post_beolink_expand.call_count == 0 - assert mock_mozart_client.join_latest_beolink_experience.call_count == 0 - - # Main entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - # Secondary entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -async def test_async_unjoin_player( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_unjoin_player.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_UNJOIN, - {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, - blocking=True, - ) - - mock_mozart_client.post_beolink_leave.assert_called_once() - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -async def test_async_beolink_join( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_beolink_join with defined JID.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - DOMAIN, - "beolink_join", - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - "beolink_jid": TEST_JID_2, - }, - blocking=True, - ) - - mock_mozart_client.join_beolink_peer.assert_called_once_with(jid=TEST_JID_2) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -@pytest.mark.parametrize( - ( - "parameter", - "parameter_value", - "expand_side_effect", - "log_messages", - "peers_call_count", - ), - [ - # All discovered - # Valid peers - ("all_discovered", True, None, [], 2), - # Invalid peers - ( - "all_discovered", - True, - NotFoundException(), - [f"Unable to expand to {TEST_JID_3}", f"Unable to expand to {TEST_JID_4}"], - 2, - ), - # Beolink JIDs - # Valid peer - ("beolink_jids", [TEST_JID_3, TEST_JID_4], None, [], 1), - # Invalid peer - ( - "beolink_jids", - [TEST_JID_3, TEST_JID_4], - NotFoundException(), - [ - f"Unable to expand to {TEST_JID_3}. Is the device available on the network?", - f"Unable to expand to {TEST_JID_4}. Is the device available on the network?", - ], - 1, - ), - ], -) -async def test_async_beolink_expand( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - parameter: str, - parameter_value: bool | list[str], - expand_side_effect: NotFoundException | None, - log_messages: list[str], - peers_call_count: int, -) -> None: - """Test async_beolink_expand.""" - mock_mozart_client.post_beolink_expand.side_effect = expand_side_effect - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - source_change_callback = ( - mock_mozart_client.get_source_change_notifications.call_args[0][0] - ) - - # Set the source to a beolink expandable source - source_change_callback(TEST_SOURCE) - - await hass.services.async_call( - DOMAIN, - "beolink_expand", - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - parameter: parameter_value, - }, - blocking=True, - ) - - # Check log messages - for log_message in log_messages: - assert log_message in caplog.text - - # Called once during _initialize and once during async_beolink_expand for all_discovered - assert mock_mozart_client.get_beolink_peers.call_count == peers_call_count - - assert mock_mozart_client.post_beolink_expand.call_count == len( - await mock_mozart_client.get_beolink_peers() - ) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -async def test_async_beolink_unexpand( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test test_async_beolink_unexpand.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - DOMAIN, - "beolink_unexpand", - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - "beolink_jids": [TEST_JID_3, TEST_JID_4], - }, - blocking=True, - ) - - assert mock_mozart_client.post_beolink_unexpand.call_count == 2 - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -async def test_async_beolink_allstandby( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_beolink_allstandby.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - DOMAIN, - "beolink_allstandby", - {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, - blocking=True, - ) - - mock_mozart_client.post_beolink_allstandby.assert_called_once() - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -@pytest.mark.parametrize( - ("repeat"), - [ - # Repeat all - (RepeatMode.ALL), - # Repeat track - (RepeatMode.ONE), - # Repeat none - (RepeatMode.OFF), - ], -) -async def test_async_set_repeat( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - repeat: RepeatMode, -) -> None: - """Test async_set_repeat.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ATTR_MEDIA_REPEAT not in states.attributes - - # Set the return value of the repeat endpoint to match service call - mock_mozart_client.get_settings_queue.return_value = PlayQueueSettings( - repeat=BANG_OLUFSEN_REPEAT_FROM_HA[repeat] - ) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_REPEAT_SET, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_REPEAT: repeat, - }, - blocking=True, - ) - mock_mozart_client.set_settings_queue.assert_called_once_with( - play_queue_settings=PlayQueueSettings( - repeat=BANG_OLUFSEN_REPEAT_FROM_HA[repeat] - ) - ) - - # Test the BANG_OLUFSEN_REPEAT_TO_HA dict by checking property value - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_MEDIA_REPEAT] == repeat - - -@pytest.mark.parametrize( - ("shuffle"), - [ - # Shuffle on - (True), - # Shuffle off - (False), - ], -) -async def test_async_set_shuffle( - hass: HomeAssistant, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - shuffle: bool, -) -> None: - """Test async_set_shuffle.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert ATTR_MEDIA_SHUFFLE not in states.attributes - - # Set the return value of the shuffle endpoint to match service call - mock_mozart_client.get_settings_queue.return_value = PlayQueueSettings( - shuffle=shuffle - ) - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_SHUFFLE_SET, - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_SHUFFLE: shuffle, - }, - blocking=True, - ) - mock_mozart_client.set_settings_queue.assert_called_once_with( - play_queue_settings=PlayQueueSettings(shuffle=shuffle) - ) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_MEDIA_SHUFFLE] == shuffle diff --git a/tests/components/bang_olufsen/test_websocket.py b/tests/components/bang_olufsen/test_websocket.py deleted file mode 100644 index b17859a4f4e..00000000000 --- a/tests/components/bang_olufsen/test_websocket.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Test the Bang & Olufsen WebSocket listener.""" - -import logging -from unittest.mock import AsyncMock, Mock - -from mozart_api.models import SoftwareUpdateState -import pytest - -from homeassistant.components.bang_olufsen.const import ( - BANG_OLUFSEN_WEBSOCKET_EVENT, - CONNECTION_STATUS, - DOMAIN, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceRegistry -from homeassistant.helpers.dispatcher import async_dispatcher_connect - -from .const import TEST_NAME - -from tests.common import MockConfigEntry - - -async def test_connection( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mock_config_entry: MockConfigEntry, - mock_mozart_client: AsyncMock, -) -> None: - """Test on_connection and on_connection_lost logs and calls correctly.""" - - mock_mozart_client.websocket_connected = True - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - connection_callback = mock_mozart_client.get_on_connection.call_args[0][0] - - caplog.set_level(logging.DEBUG) - - mock_connection_callback = Mock() - - async_dispatcher_connect( - hass, - f"{mock_config_entry.unique_id}_{CONNECTION_STATUS}", - mock_connection_callback, - ) - - # Call the WebSocket connection status method - connection_callback() - await hass.async_block_till_done() - - mock_connection_callback.assert_called_once_with(True) - assert f"Connected to the {TEST_NAME} notification channel" in caplog.text - - -async def test_connection_lost( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mock_config_entry: MockConfigEntry, - mock_mozart_client: AsyncMock, -) -> None: - """Test on_connection_lost logs and calls correctly.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - connection_lost_callback = mock_mozart_client.get_on_connection_lost.call_args[0][0] - - mock_connection_lost_callback = Mock() - - async_dispatcher_connect( - hass, - f"{mock_config_entry.unique_id}_{CONNECTION_STATUS}", - mock_connection_lost_callback, - ) - - connection_lost_callback() - await hass.async_block_till_done() - - mock_connection_lost_callback.assert_called_once_with(False) - assert f"Lost connection to the {TEST_NAME}" in caplog.text - - -async def test_on_software_update_state( - hass: HomeAssistant, - device_registry: DeviceRegistry, - mock_config_entry: MockConfigEntry, - mock_mozart_client: AsyncMock, -) -> None: - """Test software version is updated through on_software_update_state.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - software_update_state_callback = ( - mock_mozart_client.get_software_update_state_notifications.call_args[0][0] - ) - - # Trigger the notification - await software_update_state_callback(SoftwareUpdateState()) - - await hass.async_block_till_done() - - assert mock_config_entry.unique_id - assert ( - device := device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - ) - assert device.sw_version == "1.0.0" - - -async def test_on_all_notifications_raw( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - device_registry: DeviceRegistry, - mock_config_entry: MockConfigEntry, - mock_mozart_client: AsyncMock, -) -> None: - """Test on_all_notifications_raw logs and fires as expected.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - all_notifications_raw_callback = ( - mock_mozart_client.get_all_notifications_raw.call_args[0][0] - ) - - raw_notification = { - "eventData": { - "default": {"level": 40}, - "level": {"level": 40}, - "maximum": {"level": 100}, - "muted": {"muted": False}, - }, - "eventType": "WebSocketEventVolume", - } - raw_notification_full = raw_notification - - # Get device ID for the modified notification that is sent as an event and in the log - assert mock_config_entry.unique_id - assert ( - device := device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - ) - raw_notification_full.update( - { - "device_id": device.id, - "serial_number": mock_config_entry.unique_id, - } - ) - - caplog.set_level(logging.DEBUG) - - mock_event_callback = Mock() - - # Listen to BANG_OLUFSEN_WEBSOCKET_EVENT events - hass.bus.async_listen(BANG_OLUFSEN_WEBSOCKET_EVENT, mock_event_callback) - - # Trigger the notification - all_notifications_raw_callback(raw_notification) - await hass.async_block_till_done() - - assert str(raw_notification_full) in caplog.text - - mocked_call = mock_event_callback.call_args[0][0].as_dict() - assert mocked_call["event_type"] == BANG_OLUFSEN_WEBSOCKET_EVENT - assert mocked_call["data"] == raw_notification_full diff --git a/tests/components/bayesian/test_binary_sensor.py b/tests/components/bayesian/test_binary_sensor.py index a8723ae5d30..e4f646572cb 100644 --- a/tests/components/bayesian/test_binary_sensor.py +++ b/tests/components/bayesian/test_binary_sensor.py @@ -1,7 +1,6 @@ """The test for the bayesian sensor platform.""" import json -from logging import WARNING from unittest.mock import patch import pytest @@ -21,14 +20,16 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import Context, HomeAssistant, callback -from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.event import async_track_state_change_event from homeassistant.setup import async_setup_component from tests.common import get_fixture_path -async def test_load_values_when_added_to_hass(hass: HomeAssistant) -> None: +async def test_load_values_when_added_to_hass( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test that sensor initializes with observations of relevant entities.""" config = { @@ -57,6 +58,11 @@ async def test_load_values_when_added_to_hass(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + assert ( + entity_registry.entities["binary_sensor.test_binary"].unique_id + == "bayesian-3b4c9563-5e84-4167-8fe7-8f507e796d72" + ) + state = hass.states.get("binary_sensor.test_binary") assert state.attributes.get("device_class") == "connectivity" assert state.attributes.get("observations")[0]["prob_given_true"] == 0.8 @@ -325,75 +331,6 @@ async def test_sensor_value_template(hass: HomeAssistant) -> None: assert state.state == "off" -async def test_mixed_states(hass: HomeAssistant) -> None: - """Test sensor on probability threshold limits.""" - config = { - "binary_sensor": { - "name": "should_HVAC", - "platform": "bayesian", - "observations": [ - { - "platform": "template", - "value_template": "{{states('sensor.guest_sensor') != 'off'}}", - "prob_given_true": 0.3, - "prob_given_false": 0.15, - }, - { - "platform": "state", - "entity_id": "sensor.anyone_home", - "to_state": "on", - "prob_given_true": 0.6, - "prob_given_false": 0.05, - }, - { - "platform": "numeric_state", - "entity_id": "sensor.temperature", - "below": 24, - "above": 19, - "prob_given_true": 0.1, - "prob_given_false": 0.6, - }, - ], - "prior": 0.3, - "probability_threshold": 0.5, - } - } - assert await async_setup_component(hass, "binary_sensor", config) - await hass.async_block_till_done() - - hass.states.async_set("sensor.guest_sensor", "UNKNOWN") - hass.states.async_set("sensor.anyone_home", "on") - hass.states.async_set("sensor.temperature", 15) - - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.should_HVAC") - - assert set(state.attributes.get("occurred_observation_entities")) == { - "sensor.anyone_home", - "sensor.temperature", - } - template_obs = { - "platform": "template", - "value_template": "{{states('sensor.guest_sensor') != 'off'}}", - "prob_given_true": 0.3, - "prob_given_false": 0.15, - "observed": True, - } - assert template_obs in state.attributes.get("observations") - - assert abs(0.95857988 - state.attributes.get("probability")) < 0.01 - # A = binary_sensor.should_HVAC being TRUE, P(A) being the prior - # B = value_template evaluating to TRUE - # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). - # Calculated where P(A) = 0.3, P(B|A) = 0.3 , P(B|notA) = 0.15 = 0.46153846 - # Step 2, prior is now 0.46153846, B now refers to sensor.anyone_home=='on' - # P(A) = 0.46153846, P(B|A) = 0.6 , P(B|notA) = 0.05, result = 0.91139240 - # Step 3, prior is now 0.91139240, B now refers to sensor.temperature in range [19,24] - # However since the temp is 15 we take the inverse probability for this negative observation - # P(A) = 0.91139240, P(B|A) = (1-0.1) , P(B|notA) = (1-0.6), result = 0.95857988 - - async def test_threshold(hass: HomeAssistant, issue_registry: ir.IssueRegistry) -> None: """Test sensor on probability threshold limits.""" config = { @@ -430,7 +367,7 @@ async def test_threshold(hass: HomeAssistant, issue_registry: ir.IssueRegistry) async def test_multiple_observations(hass: HomeAssistant) -> None: """Test sensor with multiple observations of same entity. - these entries should be labelled as 'state' and negative observations ignored - as the outcome is not known to be binary. + these entries should be labelled as 'multi_state' and negative observations ignored - as the outcome is not known to be binary. Before the merge of #67631 this practice was a common work-around for bayesian's ignoring of negative observations, this also preserves that function """ @@ -499,203 +436,83 @@ async def test_multiple_observations(hass: HomeAssistant) -> None: # Calculated using bayes theorum where P(A) = 0.2, P(B|A) = 0.2, P(B|notA) = 0.6 assert state.state == "off" - assert state.attributes.get("observations")[0]["platform"] == "state" - assert state.attributes.get("observations")[1]["platform"] == "state" + assert state.attributes.get("observations")[0]["platform"] == "multi_state" + assert state.attributes.get("observations")[1]["platform"] == "multi_state" -async def test_multiple_numeric_observations( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test sensor on numeric state platform observations with more than one range. - - This tests an example where the probability of it being a 'nice day' varies over - a series of temperatures. Since this is a multi-state, all the non-observed ranges - should be ignored and only the range including the observed value should update - the prior. When a value lands on above or below (15 is tested) it is included if it - equals `below`, and ignored if it equals `above`. - """ +async def test_multiple_numeric_observations(hass: HomeAssistant) -> None: + """Test sensor with multiple numeric observations of same entity.""" config = { "binary_sensor": { "platform": "bayesian", - "name": "nice_day", + "name": "Test_Binary", "observations": [ { "platform": "numeric_state", - "entity_id": "sensor.test_temp", - "below": 0, - "prob_given_true": 0.05, - "prob_given_false": 0.2, - }, - { - "platform": "numeric_state", - "entity_id": "sensor.test_temp", + "entity_id": "sensor.test_monitored", "below": 10, "above": 0, - "prob_given_true": 0.1, - "prob_given_false": 0.25, + "prob_given_true": 0.4, + "prob_given_false": 0.0001, }, { "platform": "numeric_state", - "entity_id": "sensor.test_temp", - "below": 15, - "above": 10, - "prob_given_true": 0.2, - "prob_given_false": 0.35, - }, - { - "platform": "numeric_state", - "entity_id": "sensor.test_temp", - "below": 25, - "above": 15, - "prob_given_true": 0.5, - "prob_given_false": 0.15, - }, - { - "platform": "numeric_state", - "entity_id": "sensor.test_temp", - "above": 25, - "prob_given_true": 0.15, - "prob_given_false": 0.05, + "entity_id": "sensor.test_monitored", + "below": 100, + "above": 30, + "prob_given_true": 0.6, + "prob_given_false": 0.0001, }, ], - "prior": 0.3, + "prior": 0.1, } } + assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() - hass.states.async_set("sensor.test_temp", -5) + hass.states.async_set("sensor.test_monitored", STATE_UNKNOWN) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.nice_day") + state = hass.states.get("binary_sensor.test_binary") for attrs in state.attributes.values(): json.dumps(attrs) - assert state.attributes.get("occurred_observation_entities") == ["sensor.test_temp"] + assert state.attributes.get("occurred_observation_entities") == [] assert state.attributes.get("probability") == 0.1 - # No observations made so probability should be the prior - assert state.attributes.get("occurred_observation_entities") == ["sensor.test_temp"] - assert abs(state.attributes.get("probability") - 0.09677) < 0.01 - # A = binary_sensor.nice_day being TRUE - # B = sensor.test_temp in the range (, 0] - # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). - # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false - # Calculated using P(A) = 0.3, P(B|A) = 0.05, P(B|~A) = 0.2 -> 0.09677 - # Because >1 range is defined for sensor.test_temp we should not infer anything from the - # ranges not observed - assert state.state == "off" - - hass.states.async_set("sensor.test_temp", 5) - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.nice_day") - - assert state.attributes.get("occurred_observation_entities") == ["sensor.test_temp"] - assert abs(state.attributes.get("probability") - 0.14634146) < 0.01 - # A = binary_sensor.nice_day being TRUE - # B = sensor.test_temp in the range (0, 10] - # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). - # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false - # Calculated using P(A) = 0.3, P(B|A) = 0.1, P(B|~A) = 0.25 -> 0.14634146 - # Because >1 range is defined for sensor.test_temp we should not infer anything from the - # ranges not observed assert state.state == "off" - hass.states.async_set("sensor.test_temp", 12) + hass.states.async_set("sensor.test_monitored", 20) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.nice_day") - assert abs(state.attributes.get("probability") - 0.19672131) < 0.01 - # A = binary_sensor.nice_day being TRUE - # B = sensor.test_temp in the range (10, 15] - # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). - # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false - # Calculated using P(A) = 0.3, P(B|A) = 0.2, P(B|~A) = 0.35 -> 0.19672131 - # Because >1 range is defined for sensor.test_temp we should not infer anything from the - # ranges not observed + state = hass.states.get("binary_sensor.test_binary") + + assert state.attributes.get("occurred_observation_entities") == [ + "sensor.test_monitored" + ] + assert round(abs(0.026 - state.attributes.get("probability")), 7) < 0.01 + # Step 1 Calculated where P(A) = 0.1, P(~B|A) = 0.6 (negative obs), P(~B|notA) = 0.9999 -> 0.0625 + # Step 2 P(A) = 0.0625, P(B|A) = 0.4 (negative obs), P(B|notA) = 0.9999 -> 0.26 assert state.state == "off" - hass.states.async_set("sensor.test_temp", 22) + hass.states.async_set("sensor.test_monitored", 35) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.nice_day") - assert abs(state.attributes.get("probability") - 0.58823529) < 0.01 - # A = binary_sensor.nice_day being TRUE - # B = sensor.test_temp in the range (15, 25] - # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). - # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false - # Calculated using P(A) = 0.3, P(B|A) = 0.5, P(B|~A) = 0.15 -> 0.58823529 - # Because >1 range is defined for sensor.test_temp we should not infer anything from the - # ranges not observed + state = hass.states.get("binary_sensor.test_binary") + assert state.attributes.get("occurred_observation_entities") == [ + "sensor.test_monitored" + ] + assert abs(1 - state.attributes.get("probability")) < 0.01 + # Step 1 Calculated where P(A) = 0.1, P(~B|A) = 0.6 (negative obs), P(~B|notA) = 0.9999 -> 0.0625 + # Step 2 P(A) = 0.0625, P(B|A) = 0.6, P(B|notA) = 0.0001 -> 0.9975 assert state.state == "on" - - hass.states.async_set("sensor.test_temp", 30) - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.nice_day") - assert abs(state.attributes.get("probability") - 0.562500) < 0.01 - # A = binary_sensor.nice_day being TRUE - # B = sensor.test_temp in the range (25, ] - # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). - # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false - # Calculated using P(A) = 0.3, P(B|A) = 0.15, P(B|~A) = 0.05 -> 0.562500 - # Because >1 range is defined for sensor.test_temp we should not infer anything from the - # ranges not observed - - assert state.state == "on" - - # Edge cases - # if on a threshold only one observation should be included and not both - hass.states.async_set("sensor.test_temp", 15) - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.nice_day") - - assert state.attributes.get("occurred_observation_entities") == ["sensor.test_temp"] - - assert abs(state.attributes.get("probability") - 0.19672131) < 0.01 - # Where there are multi numeric ranges when on the threshold, use below - # A = binary_sensor.nice_day being TRUE - # B = sensor.test_temp in the range (10, 15] - # Bayes theorum is P(A|B) = P(B|A) * P(A) / ( P(B|A)*P(A) + P(B|~A)*P(~A) ). - # Where P(B|A) is prob_given_true and P(B|~A) is prob_given_false - # Calculated using P(A) = 0.3, P(B|A) = 0.2, P(B|~A) = 0.35 -> 0.19672131 - # Because >1 range is defined for sensor.test_temp we should not infer anything from the - # ranges not observed - - assert state.state == "off" - - assert len(issue_registry.issues) == 0 assert state.attributes.get("observations")[0]["platform"] == "numeric_state" - - hass.states.async_set("sensor.test_temp", "badstate") - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.nice_day") - - assert state.attributes.get("occurred_observation_entities") == [] - assert state.state == "off" - - hass.states.async_set("sensor.test_temp", STATE_UNAVAILABLE) - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.nice_day") - - assert state.attributes.get("occurred_observation_entities") == [] - assert state.state == "off" - - hass.states.async_set("sensor.test_temp", STATE_UNKNOWN) - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.nice_day") - - assert state.attributes.get("occurred_observation_entities") == [] - assert state.state == "off" + assert state.attributes.get("observations")[1]["platform"] == "numeric_state" async def test_mirrored_observations( @@ -834,127 +651,6 @@ async def test_missing_prob_given_false( ) -async def test_bad_multi_numeric( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test whether missing prob_given_false are detected and appropriate issues are created.""" - - config = { - "binary_sensor": { - "platform": "bayesian", - "name": "bins_out", - "observations": [ - { - "platform": "numeric_state", - "entity_id": "sensor.signal_strength", - "above": 10, - "prob_given_true": 0.01, - "prob_given_false": 0.3, - }, - { - "platform": "numeric_state", - "entity_id": "sensor.signal_strength", - "above": 5, - "below": 10, - "prob_given_true": 0.02, - "prob_given_false": 0.5, - }, - { - "platform": "numeric_state", - "entity_id": "sensor.signal_strength", - "above": 0, - "below": 6, # overlaps - "prob_given_true": 0.07, - "prob_given_false": 0.1, - }, - { - "platform": "numeric_state", - "entity_id": "sensor.signal_strength", - "above": -10, - "below": 0, - "prob_given_true": 0.3, - "prob_given_false": 0.07, - }, - { - "platform": "numeric_state", - "entity_id": "sensor.signal_strength", - "below": -10, - "prob_given_true": 0.6, - "prob_given_false": 0.03, - }, - ], - "prior": 0.2, - } - } - caplog.clear() - caplog.set_level(WARNING) - - assert await async_setup_component(hass, "binary_sensor", config) - - assert "entities must not overlap" in caplog.text - - -async def test_inverted_numeric( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test whether missing prob_given_false are detected and appropriate logs are created.""" - - config = { - "binary_sensor": { - "platform": "bayesian", - "name": "goldilocks_zone", - "observations": [ - { - "platform": "numeric_state", - "entity_id": "sensor.temp", - "above": 23, - "below": 20, - "prob_given_true": 0.9, - "prob_given_false": 0.2, - }, - ], - "prior": 0.4, - } - } - - assert await async_setup_component(hass, "binary_sensor", config) - assert ( - "bayesian numeric state 'above' (23.0) must be less than 'below' (20.0)" - in caplog.text - ) - - -async def test_no_value_numeric( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test whether missing prob_given_false are detected and appropriate logs are created.""" - - config = { - "binary_sensor": { - "platform": "bayesian", - "name": "goldilocks_zone", - "observations": [ - { - "platform": "numeric_state", - "entity_id": "sensor.temp", - "prob_given_true": 0.9, - "prob_given_false": 0.2, - }, - ], - "prior": 0.4, - } - } - - assert await async_setup_component(hass, "binary_sensor", config) - assert "at least one of 'above' or 'below' must be specified" in caplog.text - - async def test_probability_updates(hass: HomeAssistant) -> None: """Test probability update function.""" prob_given_true = [0.3, 0.6, 0.8] @@ -1022,18 +718,17 @@ async def test_observed_entities(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get("binary_sensor.test_binary") - assert state.attributes.get("occurred_observation_entities") == [ - "sensor.test_monitored" - ] + assert ["sensor.test_monitored"] == state.attributes.get( + "occurred_observation_entities" + ) hass.states.async_set("sensor.test_monitored1", "on") await hass.async_block_till_done() state = hass.states.get("binary_sensor.test_binary") - assert sorted(state.attributes.get("occurred_observation_entities")) == [ - "sensor.test_monitored", - "sensor.test_monitored1", - ] + assert ["sensor.test_monitored", "sensor.test_monitored1"] == sorted( + state.attributes.get("occurred_observation_entities") + ) async def test_state_attributes_are_serializable(hass: HomeAssistant) -> None: @@ -1090,10 +785,9 @@ async def test_state_attributes_are_serializable(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get("binary_sensor.test_binary") - assert sorted(state.attributes.get("occurred_observation_entities")) == [ - "sensor.test_monitored", - "sensor.test_monitored1", - ] + assert ["sensor.test_monitored", "sensor.test_monitored1"] == sorted( + state.attributes.get("occurred_observation_entities") + ) for attrs in state.attributes.values(): json.dumps(attrs) diff --git a/tests/components/binary_sensor/test_device_condition.py b/tests/components/binary_sensor/test_device_condition.py index 8a0132ff2af..c2bd29fad36 100644 --- a/tests/components/binary_sensor/test_device_condition.py +++ b/tests/components/binary_sensor/test_device_condition.py @@ -22,6 +22,7 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, setup_test_component_platform, ) @@ -31,6 +32,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -232,7 +239,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for turn_on and turn_off conditions.""" @@ -301,26 +308,26 @@ async def test_if_state( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_off event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_off event - test_event2" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for turn_on and turn_off conditions.""" @@ -368,19 +375,19 @@ async def test_if_state_legacy( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on event - test_event1" async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for firing if condition is on with delay.""" @@ -432,26 +439,26 @@ async def test_if_fires_on_for_condition( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Time travel 10 secs into the future time_freeze.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Time travel 20 secs into the future time_freeze.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_off event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/binary_sensor/test_device_trigger.py b/tests/components/binary_sensor/test_device_trigger.py index 78e382f77bf..f91a336061d 100644 --- a/tests/components/binary_sensor/test_device_trigger.py +++ b/tests/components/binary_sensor/test_device_trigger.py @@ -22,6 +22,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, setup_test_component_platform, ) @@ -31,6 +32,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -233,7 +240,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for on and off triggers firing.""" @@ -306,22 +313,21 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"not_bat_low device - {entry.entity_id} - on - off - None" ) hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[1].data["some"] - == f"bat_low device - {entry.entity_id} - off - on - None" + calls[1].data["some"] == f"bat_low device - {entry.entity_id} - off - on - None" ) @@ -329,7 +335,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for triggers firing with delay.""" @@ -382,17 +388,17 @@ async def test_if_fires_on_state_change_with_for( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) @@ -401,7 +407,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for triggers firing.""" @@ -453,12 +459,12 @@ async def test_if_fires_on_state_change_legacy( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - None" ) diff --git a/tests/components/binary_sensor/test_init.py b/tests/components/binary_sensor/test_init.py index ea0ad05a0db..8f14063e011 100644 --- a/tests/components/binary_sensor/test_init.py +++ b/tests/components/binary_sensor/test_init.py @@ -1,9 +1,9 @@ """The tests for the Binary sensor component.""" -from collections.abc import Generator from unittest import mock import pytest +from typing_extensions import Generator from homeassistant.components import binary_sensor from homeassistant.config_entries import ConfigEntry, ConfigFlow diff --git a/tests/components/blackbird/test_media_player.py b/tests/components/blackbird/test_media_player.py index db92dddcc77..ec5a37f72ad 100644 --- a/tests/components/blackbird/test_media_player.py +++ b/tests/components/blackbird/test_media_player.py @@ -35,7 +35,7 @@ class AttrDict(dict): class MockBlackbird: """Mock for pyblackbird object.""" - def __init__(self) -> None: + def __init__(self): """Init mock object.""" self.zones = defaultdict(lambda: AttrDict(power=True, av=1)) diff --git a/tests/components/blebox/conftest.py b/tests/components/blebox/conftest.py index fb35bae43a1..89229575a0b 100644 --- a/tests/components/blebox/conftest.py +++ b/tests/components/blebox/conftest.py @@ -9,7 +9,6 @@ import pytest from homeassistant.components.blebox.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry @@ -78,9 +77,7 @@ def feature_fixture(request: pytest.FixtureRequest) -> Any: return request.getfixturevalue(request.param) -async def async_setup_entities( - hass: HomeAssistant, entity_ids: list[str] -) -> list[er.RegistryEntry]: +async def async_setup_entities(hass, entity_ids): """Return configured entries with the given entity ids.""" config_entry = mock_config() @@ -93,7 +90,7 @@ async def async_setup_entities( return [entity_registry.async_get(entity_id) for entity_id in entity_ids] -async def async_setup_entity(hass: HomeAssistant, entity_id: str) -> er.RegistryEntry: +async def async_setup_entity(hass, entity_id): """Return a configured entry with the given entity_id.""" return (await async_setup_entities(hass, [entity_id]))[0] diff --git a/tests/components/blebox/test_climate.py b/tests/components/blebox/test_climate.py index e402a3d5fbd..8ba0c3f630e 100644 --- a/tests/components/blebox/test_climate.py +++ b/tests/components/blebox/test_climate.py @@ -21,7 +21,6 @@ from homeassistant.components.climate import ( ) from homeassistant.const import ( ATTR_DEVICE_CLASS, - ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, STATE_UNKNOWN, @@ -153,7 +152,6 @@ async def test_on_when_below_desired(saunabox, hass: HomeAssistant) -> None: feature_mock.desired = 64.8 feature_mock.current = 25.7 - feature_mock.mode = 1 feature_mock.async_on = AsyncMock(side_effect=turn_on) await hass.services.async_call( "climate", @@ -188,13 +186,12 @@ async def test_on_when_above_desired(saunabox, hass: HomeAssistant) -> None: feature_mock.desired = 23.4 feature_mock.current = 28.7 - feature_mock.mode = 1 feature_mock.async_on = AsyncMock(side_effect=turn_on) await hass.services.async_call( "climate", SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_HVAC_MODE: HVACMode.HEAT}, + {"entity_id": entity_id, ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, ) feature_mock.async_off.assert_not_called() diff --git a/tests/components/blebox/test_cover.py b/tests/components/blebox/test_cover.py index 2d9125b2206..1596de134c0 100644 --- a/tests/components/blebox/test_cover.py +++ b/tests/components/blebox/test_cover.py @@ -11,17 +11,18 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, CoverDeviceClass, CoverEntityFeature, - CoverState, ) from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_SUPPORTED_FEATURES, SERVICE_CLOSE_COVER, - SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, - SERVICE_OPEN_COVER_TILT, SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, @@ -209,7 +210,7 @@ async def test_open(feature, hass: HomeAssistant) -> None: feature_mock.async_open = AsyncMock(side_effect=open_gate) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED feature_mock.async_update = AsyncMock() await hass.services.async_call( @@ -218,7 +219,7 @@ async def test_open(feature, hass: HomeAssistant) -> None: {"entity_id": entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.OPENING + assert hass.states.get(entity_id).state == STATE_OPENING @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -237,13 +238,13 @@ async def test_close(feature, hass: HomeAssistant) -> None: feature_mock.async_close = AsyncMock(side_effect=close) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN feature_mock.async_update = AsyncMock() await hass.services.async_call( "cover", SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True ) - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert hass.states.get(entity_id).state == STATE_CLOSING def opening_to_stop_feature_mock(feature_mock): @@ -267,13 +268,13 @@ async def test_stop(feature, hass: HomeAssistant) -> None: opening_to_stop_feature_mock(feature_mock) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == CoverState.OPENING + assert hass.states.get(entity_id).state == STATE_OPENING feature_mock.async_update = AsyncMock() await hass.services.async_call( "cover", SERVICE_STOP_COVER, {"entity_id": entity_id}, blocking=True ) - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -292,7 +293,7 @@ async def test_update(feature, hass: HomeAssistant) -> None: state = hass.states.get(entity_id) assert state.attributes[ATTR_CURRENT_POSITION] == 71 # 100 - 29 - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN @pytest.mark.parametrize( @@ -315,7 +316,7 @@ async def test_set_position(feature, hass: HomeAssistant) -> None: feature_mock.async_set_position = AsyncMock(side_effect=set_position) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED feature_mock.async_update = AsyncMock() await hass.services.async_call( @@ -324,7 +325,7 @@ async def test_set_position(feature, hass: HomeAssistant) -> None: {"entity_id": entity_id, ATTR_POSITION: 1}, blocking=True, ) # almost closed - assert hass.states.get(entity_id).state == CoverState.OPENING + assert hass.states.get(entity_id).state == STATE_OPENING async def test_unknown_position(shutterbox, hass: HomeAssistant) -> None: @@ -341,7 +342,7 @@ async def test_unknown_position(shutterbox, hass: HomeAssistant) -> None: await async_setup_entity(hass, entity_id) state = hass.states.get(entity_id) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert ATTR_CURRENT_POSITION not in state.attributes @@ -399,7 +400,7 @@ async def test_opening_state(feature, hass: HomeAssistant) -> None: feature_mock.async_update = AsyncMock(side_effect=initial_update) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == CoverState.OPENING + assert hass.states.get(entity_id).state == STATE_OPENING @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -413,7 +414,7 @@ async def test_closing_state(feature, hass: HomeAssistant) -> None: feature_mock.async_update = AsyncMock(side_effect=initial_update) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert hass.states.get(entity_id).state == STATE_CLOSING @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -427,7 +428,7 @@ async def test_closed_state(feature, hass: HomeAssistant) -> None: feature_mock.async_update = AsyncMock(side_effect=initial_update) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED async def test_tilt_position(shutterbox, hass: HomeAssistant) -> None: @@ -462,7 +463,7 @@ async def test_set_tilt_position(shutterbox, hass: HomeAssistant) -> None: feature_mock.async_set_tilt_position = AsyncMock(side_effect=set_tilt) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED feature_mock.async_update = AsyncMock() await hass.services.async_call( @@ -471,58 +472,4 @@ async def test_set_tilt_position(shutterbox, hass: HomeAssistant) -> None: {"entity_id": entity_id, ATTR_TILT_POSITION: 80}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.OPENING - - -async def test_open_tilt(shutterbox, hass: HomeAssistant) -> None: - """Test closing tilt.""" - feature_mock, entity_id = shutterbox - - def initial_update(): - feature_mock.tilt_current = 100 - - def set_tilt_position(tilt_position): - assert tilt_position == 0 - feature_mock.tilt_current = tilt_position - - feature_mock.async_update = AsyncMock(side_effect=initial_update) - feature_mock.async_set_tilt_position = AsyncMock(side_effect=set_tilt_position) - - await async_setup_entity(hass, entity_id) - feature_mock.async_update = AsyncMock() - - await hass.services.async_call( - "cover", - SERVICE_OPEN_COVER_TILT, - {"entity_id": entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 # inverted - - -async def test_close_tilt(shutterbox, hass: HomeAssistant) -> None: - """Test closing tilt.""" - feature_mock, entity_id = shutterbox - - def initial_update(): - feature_mock.tilt_current = 0 - - def set_tilt_position(tilt_position): - assert tilt_position == 100 - feature_mock.tilt_current = tilt_position - - feature_mock.async_update = AsyncMock(side_effect=initial_update) - feature_mock.async_set_tilt_position = AsyncMock(side_effect=set_tilt_position) - - await async_setup_entity(hass, entity_id) - feature_mock.async_update = AsyncMock() - - await hass.services.async_call( - "cover", - SERVICE_CLOSE_COVER_TILT, - {"entity_id": entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # inverted + assert hass.states.get(entity_id).state == STATE_OPENING diff --git a/tests/components/blebox/test_init.py b/tests/components/blebox/test_init.py index 0cb5139336c..f406df51bd4 100644 --- a/tests/components/blebox/test_init.py +++ b/tests/components/blebox/test_init.py @@ -5,6 +5,7 @@ import logging import blebox_uniapi import pytest +from homeassistant.components.blebox.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -56,10 +57,10 @@ async def test_unload_config_entry(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hasattr(entry, "runtime_data") + assert hass.data[DOMAIN] await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() - assert not hasattr(entry, "runtime_data") + assert not hass.data.get(DOMAIN) assert entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/blink/snapshots/test_diagnostics.ambr b/tests/components/blink/snapshots/test_diagnostics.ambr index edc2879a66b..44554dad1e3 100644 --- a/tests/components/blink/snapshots/test_diagnostics.ambr +++ b/tests/components/blink/snapshots/test_diagnostics.ambr @@ -38,8 +38,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'blink', 'minor_version': 1, 'options': dict({ diff --git a/tests/components/blink/test_config_flow.py b/tests/components/blink/test_config_flow.py index c89ab65ea1d..9c3193ec7d6 100644 --- a/tests/components/blink/test_config_flow.py +++ b/tests/components/blink/test_config_flow.py @@ -10,8 +10,6 @@ from homeassistant.components.blink import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry - async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -294,11 +292,10 @@ async def test_form_unknown_error(hass: HomeAssistant) -> None: async def test_reauth_shows_user_step(hass: HomeAssistant) -> None: """Test reauth shows the user form.""" - mock_entry = MockConfigEntry( - domain=DOMAIN, + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, data={"username": "blink@example.com", "password": "invalid_password"}, ) - mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/blink/test_diagnostics.py b/tests/components/blink/test_diagnostics.py index d527633d4c9..3b120d23038 100644 --- a/tests/components/blink/test_diagnostics.py +++ b/tests/components/blink/test_diagnostics.py @@ -31,4 +31,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) + assert result == snapshot(exclude=props("entry_id")) diff --git a/tests/components/blink/test_init.py b/tests/components/blink/test_init.py index 6d4a93e58ab..3cd2cd51ebd 100644 --- a/tests/components/blink/test_init.py +++ b/tests/components/blink/test_init.py @@ -66,17 +66,18 @@ async def test_setup_not_ready_authkey_required( assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR -async def test_unload_entry( +async def test_unload_entry_multiple( hass: HomeAssistant, mock_blink_api: MagicMock, mock_blink_auth_api: MagicMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test unload doesn't un-register services.""" + """Test being able to unload one of 2 entries.""" mock_config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() + hass.data[DOMAIN]["dummy"] = {1: 2} assert mock_config_entry.state is ConfigEntryState.LOADED assert await hass.config_entries.async_unload(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/blue_current/test_config_flow.py b/tests/components/blue_current/test_config_flow.py index a9dea70431f..33346990425 100644 --- a/tests/components/blue_current/test_config_flow.py +++ b/tests/components/blue_current/test_config_flow.py @@ -129,11 +129,6 @@ async def test_reauth( expected_api_token: str, ) -> None: """Test reauth flow.""" - config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - with ( patch( "homeassistant.components.blue_current.config_flow.Client.validate_api_token", @@ -151,6 +146,20 @@ async def test_reauth( lambda self, on_data, on_open: hass.loop.create_future(), ), ): + config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + data={"api_token": "123"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"api_token": "1234567890"}, diff --git a/tests/components/blueprint/common.py b/tests/components/blueprint/common.py index 037aa38f6cb..dd59b6df082 100644 --- a/tests/components/blueprint/common.py +++ b/tests/components/blueprint/common.py @@ -1,8 +1,9 @@ """Blueprints test helpers.""" -from collections.abc import Generator from unittest.mock import patch +from typing_extensions import Generator + def stub_blueprint_populate_fixture_helper() -> Generator[None]: """Stub copying the blueprints to the config folder.""" diff --git a/tests/components/blueprint/test_default_blueprints.py b/tests/components/blueprint/test_default_blueprints.py index f69126a7f25..9bd60a7cb6b 100644 --- a/tests/components/blueprint/test_default_blueprints.py +++ b/tests/components/blueprint/test_default_blueprints.py @@ -6,7 +6,7 @@ import pathlib import pytest -from homeassistant.components.blueprint import BLUEPRINT_SCHEMA, models +from homeassistant.components.blueprint import models from homeassistant.components.blueprint.const import BLUEPRINT_FOLDER from homeassistant.util import yaml @@ -26,4 +26,4 @@ def test_default_blueprints(domain: str) -> None: LOGGER.info("Processing %s", fil) assert fil.name.endswith(".yaml") data = yaml.load_yaml(fil) - models.Blueprint(data, expected_domain=domain, schema=BLUEPRINT_SCHEMA) + models.Blueprint(data, expected_domain=domain) diff --git a/tests/components/blueprint/test_importer.py b/tests/components/blueprint/test_importer.py index 94036d208ab..f135bbf23b8 100644 --- a/tests/components/blueprint/test_importer.py +++ b/tests/components/blueprint/test_importer.py @@ -192,28 +192,9 @@ async def test_fetch_blueprint_from_website_url( assert imported_blueprint.blueprint.metadata["source_url"] == url -async def test_fetch_blueprint_from_generic_url( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test fetching blueprint from url.""" - aioclient_mock.get( - "https://example.org/path/someblueprint.yaml", - text=Path( - hass.config.path("blueprints/automation/test_event_service.yaml") - ).read_text(encoding="utf8"), - ) +async def test_fetch_blueprint_from_unsupported_url(hass: HomeAssistant) -> None: + """Test fetching blueprint from an unsupported URL.""" + url = "https://example.com/unsupported.yaml" - url = "https://example.org/path/someblueprint.yaml" - imported_blueprint = await importer.fetch_blueprint_from_url(hass, url) - assert isinstance(imported_blueprint, importer.ImportedBlueprint) - assert imported_blueprint.blueprint.domain == "automation" - assert imported_blueprint.suggested_filename == "example.org/someblueprint" - assert imported_blueprint.blueprint.metadata["source_url"] == url - - -def test_generic_importer_last() -> None: - """Test that generic importer is always the last one.""" - assert ( - importer.FETCH_FUNCTIONS.count(importer.fetch_blueprint_from_generic_url) == 1 - ) - assert importer.FETCH_FUNCTIONS[-1] == importer.fetch_blueprint_from_generic_url + with pytest.raises(HomeAssistantError, match=r"^Unsupported URL$"): + await importer.fetch_blueprint_from_url(hass, url) diff --git a/tests/components/blueprint/test_models.py b/tests/components/blueprint/test_models.py index 0ce8c1f397a..45e35474e4c 100644 --- a/tests/components/blueprint/test_models.py +++ b/tests/components/blueprint/test_models.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch import pytest -from homeassistant.components.blueprint import BLUEPRINT_SCHEMA, errors, models +from homeassistant.components.blueprint import errors, models from homeassistant.core import HomeAssistant from homeassistant.util.yaml import Input @@ -22,8 +22,7 @@ def blueprint_1() -> models.Blueprint: "input": {"test-input": {"name": "Name", "description": "Description"}}, }, "example": Input("test-input"), - }, - schema=BLUEPRINT_SCHEMA, + } ) @@ -58,32 +57,26 @@ def blueprint_2(request: pytest.FixtureRequest) -> models.Blueprint: } }, } - return models.Blueprint(blueprint, schema=BLUEPRINT_SCHEMA) + return models.Blueprint(blueprint) @pytest.fixture def domain_bps(hass: HomeAssistant) -> models.DomainBlueprints: """Domain blueprints fixture.""" return models.DomainBlueprints( - hass, - "automation", - logging.getLogger(__name__), - None, - AsyncMock(), - BLUEPRINT_SCHEMA, + hass, "automation", logging.getLogger(__name__), None, AsyncMock() ) def test_blueprint_model_init() -> None: """Test constructor validation.""" with pytest.raises(errors.InvalidBlueprint): - models.Blueprint({}, schema=BLUEPRINT_SCHEMA) + models.Blueprint({}) with pytest.raises(errors.InvalidBlueprint): models.Blueprint( {"blueprint": {"name": "Hello", "domain": "automation"}}, expected_domain="not-automation", - schema=BLUEPRINT_SCHEMA, ) with pytest.raises(errors.InvalidBlueprint): @@ -95,8 +88,7 @@ def test_blueprint_model_init() -> None: "input": {"something": None}, }, "trigger": {"platform": Input("non-existing")}, - }, - schema=BLUEPRINT_SCHEMA, + } ) @@ -123,8 +115,7 @@ def test_blueprint_update_metadata() -> None: "name": "Hello", "domain": "automation", }, - }, - schema=BLUEPRINT_SCHEMA, + } ) bp.update_metadata(source_url="http://bla.com") @@ -140,8 +131,7 @@ def test_blueprint_validate() -> None: "name": "Hello", "domain": "automation", }, - }, - schema=BLUEPRINT_SCHEMA, + } ).validate() is None ) @@ -153,8 +143,7 @@ def test_blueprint_validate() -> None: "domain": "automation", "homeassistant": {"min_version": "100000.0.0"}, }, - }, - schema=BLUEPRINT_SCHEMA, + } ).validate() == ["Requires at least Home Assistant 100000.0.0"] diff --git a/tests/components/blueprint/test_websocket_api.py b/tests/components/blueprint/test_websocket_api.py index 921088d8ac6..13615803569 100644 --- a/tests/components/blueprint/test_websocket_api.py +++ b/tests/components/blueprint/test_websocket_api.py @@ -64,17 +64,6 @@ async def test_list_blueprints( "name": "Call service based on event", }, }, - "test_event_service_legacy_schema.yaml": { - "metadata": { - "domain": "automation", - "input": { - "service_to_call": None, - "trigger_event": {"selector": {"text": {}}}, - "a_number": {"selector": {"number": {"mode": "box", "step": 1.0}}}, - }, - "name": "Call service based on event", - }, - }, "in_folder/in_folder_blueprint.yaml": { "metadata": { "domain": "automation", @@ -223,16 +212,16 @@ async def test_save_blueprint( " input:\n trigger_event:\n selector:\n text: {}\n " " service_to_call:\n a_number:\n selector:\n number:\n " " mode: box\n step: 1.0\n source_url:" - " https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\ntriggers:\n" - " trigger: event\n event_type: !input 'trigger_event'\nactions:\n " + " https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\ntrigger:\n" + " platform: event\n event_type: !input 'trigger_event'\naction:\n " " service: !input 'service_to_call'\n entity_id: light.kitchen\n" # c dumper will not quote the value after !input "blueprint:\n name: Call service based on event\n domain: automation\n " " input:\n trigger_event:\n selector:\n text: {}\n " " service_to_call:\n a_number:\n selector:\n number:\n " " mode: box\n step: 1.0\n source_url:" - " https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\ntriggers:\n" - " trigger: event\n event_type: !input trigger_event\nactions:\n service:" + " https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/motion_light.yaml\ntrigger:\n" + " platform: event\n event_type: !input trigger_event\naction:\n service:" " !input service_to_call\n entity_id: light.kitchen\n" ) # Make sure ita parsable and does not raise @@ -494,13 +483,13 @@ async def test_substituting_blueprint_inputs( assert msg["success"] assert msg["result"]["substituted_config"] == { - "actions": { + "action": { "entity_id": "light.kitchen", "service": "test.automation", }, - "triggers": { + "trigger": { "event_type": "test_event", - "trigger": "event", + "platform": "event", }, } diff --git a/tests/components/bluesound/__init__.py b/tests/components/bluesound/__init__.py deleted file mode 100644 index f8a3701422e..00000000000 --- a/tests/components/bluesound/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Bluesound integration.""" diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py deleted file mode 100644 index b4ee61dee57..00000000000 --- a/tests/components/bluesound/conftest.py +++ /dev/null @@ -1,215 +0,0 @@ -"""Common fixtures for the Bluesound tests.""" - -from collections.abc import AsyncGenerator, Generator -from dataclasses import dataclass -import ipaddress -from typing import Any -from unittest.mock import AsyncMock, patch - -from pyblu import Input, Player, Preset, Status, SyncStatus -import pytest - -from homeassistant.components.bluesound.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.core import HomeAssistant - -from .utils import LongPollingMock - -from tests.common import MockConfigEntry - - -@dataclass -class PlayerMockData: - """Container for player mock data.""" - - host: str - player: AsyncMock - status_long_polling_mock: LongPollingMock[Status] - sync_status_long_polling_mock: LongPollingMock[SyncStatus] - - @staticmethod - async def generate(host: str) -> "PlayerMockData": - """Generate player mock data.""" - host_ip = ipaddress.ip_address(host) - assert host_ip.version == 4 - mac_parts = [0xFF, 0xFF, *host_ip.packed] - mac = ":".join(f"{x:02X}" for x in mac_parts) - - player_name = f"player-name{host.replace('.', '')}" - - player = await AsyncMock(spec=Player)() - player.__aenter__.return_value = player - - status_long_polling_mock = LongPollingMock( - Status( - etag="etag", - input_id=None, - service=None, - state="play", - shuffle=False, - album="album", - artist="artist", - name="song", - image=None, - volume=10, - volume_db=22.3, - mute=False, - mute_volume=None, - mute_volume_db=None, - seconds=2, - total_seconds=123.1, - can_seek=False, - sleep=0, - group_name=None, - group_volume=None, - indexing=False, - stream_url=None, - ) - ) - - sync_status_long_polling_mock = LongPollingMock( - SyncStatus( - etag="etag", - id=f"{host}:11000", - mac=mac, - name=player_name, - image="invalid_url", - initialized=True, - brand="brand", - model="model", - model_name="model-name", - volume_db=0.5, - volume=50, - group=None, - master=None, - slaves=None, - zone=None, - zone_master=None, - zone_slave=None, - mute_volume_db=None, - mute_volume=None, - ) - ) - - player.status.side_effect = status_long_polling_mock.side_effect() - player.sync_status.side_effect = sync_status_long_polling_mock.side_effect() - - player.inputs = AsyncMock( - return_value=[ - Input("1", "input1", "image1", "url1"), - Input("2", "input2", "image2", "url2"), - ] - ) - player.presets = AsyncMock( - return_value=[ - Preset("preset1", "1", "url1", "image1", None), - Preset("preset2", "2", "url2", "image2", None), - ] - ) - - return PlayerMockData( - host, player, status_long_polling_mock, sync_status_long_polling_mock - ) - - -@dataclass -class PlayerMocks: - """Container for mocks.""" - - player_data: PlayerMockData - player_data_secondary: PlayerMockData - player_data_for_already_configured: PlayerMockData - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.bluesound.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def config_entry() -> MockConfigEntry: - """Return a mocked config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: "1.1.1.1", - CONF_PORT: 11000, - }, - unique_id="ff:ff:01:01:01:01-11000", - ) - - -@pytest.fixture -def config_entry_secondary() -> MockConfigEntry: - """Return a mocked config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: "2.2.2.2", - CONF_PORT: 11000, - }, - unique_id="ff:ff:02:02:02:02-11000", - ) - - -@pytest.fixture -async def setup_config_entry( - hass: HomeAssistant, config_entry: MockConfigEntry, player_mocks: PlayerMocks -) -> None: - """Set up the platform.""" - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - -@pytest.fixture -async def setup_config_entry_secondary( - hass: HomeAssistant, - config_entry_secondary: MockConfigEntry, - player_mocks: PlayerMocks, -) -> None: - """Set up the platform.""" - config_entry_secondary.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry_secondary.entry_id) - await hass.async_block_till_done() - - -@pytest.fixture -async def player_mocks() -> AsyncGenerator[PlayerMocks]: - """Mock the player.""" - player_mocks = PlayerMocks( - player_data=await PlayerMockData.generate("1.1.1.1"), - player_data_secondary=await PlayerMockData.generate("2.2.2.2"), - player_data_for_already_configured=await PlayerMockData.generate("1.1.1.2"), - ) - - # to simulate a player that is already configured - player_mocks.player_data_for_already_configured.sync_status_long_polling_mock.get().mac = player_mocks.player_data.sync_status_long_polling_mock.get().mac - - def select_player(*args: Any, **kwargs: Any) -> AsyncMock: - match args[0]: - case "1.1.1.1": - return player_mocks.player_data.player - case "2.2.2.2": - return player_mocks.player_data_secondary.player - case "1.1.1.2": - return player_mocks.player_data_for_already_configured.player - case _: - raise ValueError("Invalid player") - - with ( - patch( - "homeassistant.components.bluesound.Player", autospec=True - ) as mock_player, - patch( - "homeassistant.components.bluesound.config_flow.Player", - new=mock_player, - ), - ): - mock_player.side_effect = select_player - - yield player_mocks diff --git a/tests/components/bluesound/snapshots/test_media_player.ambr b/tests/components/bluesound/snapshots/test_media_player.ambr deleted file mode 100644 index 3e644d3038a..00000000000 --- a/tests/components/bluesound/snapshots/test_media_player.ambr +++ /dev/null @@ -1,31 +0,0 @@ -# serializer version: 1 -# name: test_attributes_set - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'player-name1111', - 'is_volume_muted': False, - 'master': False, - 'media_album_name': 'album', - 'media_artist': 'artist', - 'media_content_type': , - 'media_duration': 123, - 'media_position': 2, - 'media_title': 'song', - 'shuffle': False, - 'source_list': list([ - 'input1', - 'input2', - 'preset1', - 'preset2', - ]), - 'supported_features': , - 'volume_level': 0.1, - }), - 'context': , - 'entity_id': 'media_player.player_name1111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- diff --git a/tests/components/bluesound/test_config_flow.py b/tests/components/bluesound/test_config_flow.py deleted file mode 100644 index 63744cdf0ff..00000000000 --- a/tests/components/bluesound/test_config_flow.py +++ /dev/null @@ -1,262 +0,0 @@ -"""Test the Bluesound config flow.""" - -from unittest.mock import AsyncMock - -from pyblu.errors import PlayerUnreachableError - -from homeassistant.components.bluesound.const import DOMAIN -from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .conftest import PlayerMocks - -from tests.common import MockConfigEntry - - -async def test_user_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, player_mocks: PlayerMocks -) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.1.1.1", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name1111" - assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} - assert result["result"].unique_id == "ff:ff:01:01:01:01-11000" - - mock_setup_entry.assert_called_once() - - -async def test_user_flow_cannot_connect( - hass: HomeAssistant, - player_mocks: PlayerMocks, - mock_setup_entry: AsyncMock, -) -> None: - """Test we handle cannot connect error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - player_mocks.player_data.sync_status_long_polling_mock.set_error( - PlayerUnreachableError("Player not reachable") - ) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.1.1.1", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - assert result["step_id"] == "user" - - player_mocks.player_data.sync_status_long_polling_mock.set_error(None) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.1.1.1", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name1111" - assert result["data"] == { - CONF_HOST: "1.1.1.1", - CONF_PORT: 11000, - } - - mock_setup_entry.assert_called_once() - - -async def test_user_flow_aleady_configured( - hass: HomeAssistant, - player_mocks: PlayerMocks, - config_entry: MockConfigEntry, -) -> None: - """Test we handle already configured.""" - config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.1.1.2", - CONF_PORT: 11000, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - assert config_entry.data[CONF_HOST] == "1.1.1.2" - - player_mocks.player_data_for_already_configured.player.sync_status.assert_called_once() - - -async def test_import_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, player_mocks: PlayerMocks -) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "1.1.1.1", CONF_PORT: 11000}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name1111" - assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} - assert result["result"].unique_id == "ff:ff:01:01:01:01-11000" - - mock_setup_entry.assert_called_once() - player_mocks.player_data.player.sync_status.assert_called_once() - - -async def test_import_flow_cannot_connect( - hass: HomeAssistant, player_mocks: PlayerMocks -) -> None: - """Test we handle cannot connect error.""" - player_mocks.player_data.player.sync_status.side_effect = PlayerUnreachableError( - "Player not reachable" - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "1.1.1.1", CONF_PORT: 11000}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" - - player_mocks.player_data.player.sync_status.assert_called_once() - - -async def test_import_flow_already_configured( - hass: HomeAssistant, - player_mocks: PlayerMocks, - config_entry: MockConfigEntry, -) -> None: - """Test we handle already configured.""" - config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "1.1.1.2", CONF_PORT: 11000}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - player_mocks.player_data_for_already_configured.player.sync_status.assert_called_once() - - -async def test_zeroconf_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, player_mocks: PlayerMocks -) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZeroconfServiceInfo( - ip_address="1.1.1.1", - ip_addresses=["1.1.1.1"], - port=11000, - hostname="player-name1111", - type="_musc._tcp.local.", - name="player-name._musc._tcp.local.", - properties={}, - ), - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - - mock_setup_entry.assert_not_called() - player_mocks.player_data.player.sync_status.assert_called_once() - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name1111" - assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} - assert result["result"].unique_id == "ff:ff:01:01:01:01-11000" - - mock_setup_entry.assert_called_once() - - -async def test_zeroconf_flow_cannot_connect( - hass: HomeAssistant, player_mocks: PlayerMocks -) -> None: - """Test we handle cannot connect error.""" - player_mocks.player_data.player.sync_status.side_effect = PlayerUnreachableError( - "Player not reachable" - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZeroconfServiceInfo( - ip_address="1.1.1.1", - ip_addresses=["1.1.1.1"], - port=11000, - hostname="player-name1111", - type="_musc._tcp.local.", - name="player-name._musc._tcp.local.", - properties={}, - ), - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" - - player_mocks.player_data.player.sync_status.assert_called_once() - - -async def test_zeroconf_flow_already_configured( - hass: HomeAssistant, - player_mocks: PlayerMocks, - config_entry: MockConfigEntry, -) -> None: - """Test we handle already configured and update the host.""" - config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZeroconfServiceInfo( - ip_address="1.1.1.2", - ip_addresses=["1.1.1.2"], - port=11000, - hostname="player-name1112", - type="_musc._tcp.local.", - name="player-name._musc._tcp.local.", - properties={}, - ), - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - assert config_entry.data[CONF_HOST] == "1.1.1.2" - - player_mocks.player_data_for_already_configured.player.sync_status.assert_called_once() diff --git a/tests/components/bluesound/test_init.py b/tests/components/bluesound/test_init.py deleted file mode 100644 index 4178c27acad..00000000000 --- a/tests/components/bluesound/test_init.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Test bluesound integration.""" - -from pyblu.errors import PlayerUnreachableError - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from .conftest import PlayerMocks - -from tests.common import MockConfigEntry - - -async def test_setup_entry( - hass: HomeAssistant, setup_config_entry: None, config_entry: MockConfigEntry -) -> None: - """Test a successful setup entry.""" - assert hass.states.get("media_player.player_name1111").state == "playing" - assert config_entry.state is ConfigEntryState.LOADED - - assert await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - assert hass.states.get("media_player.player_name1111").state == "unavailable" - assert config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_unload_entry_while_player_is_offline( - hass: HomeAssistant, - setup_config_entry: None, - config_entry: MockConfigEntry, - player_mocks: PlayerMocks, -) -> None: - """Test entries can be unloaded correctly while the player is offline.""" - player_mocks.player_data.player.status.side_effect = PlayerUnreachableError( - "Player not reachable" - ) - player_mocks.player_data.status_long_polling_mock.trigger() - - # give the long polling loop a chance to update the state; this could be any async call - await hass.async_block_till_done() - - assert await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - assert hass.states.get("media_player.player_name1111").state == "unavailable" - assert config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/bluesound/test_media_player.py b/tests/components/bluesound/test_media_player.py deleted file mode 100644 index 0bf615de3da..00000000000 --- a/tests/components/bluesound/test_media_player.py +++ /dev/null @@ -1,375 +0,0 @@ -"""Tests for the Bluesound Media Player platform.""" - -import dataclasses -from unittest.mock import call - -from pyblu import PairedPlayer -from pyblu.errors import PlayerUnreachableError -import pytest -from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.components.bluesound import DOMAIN as BLUESOUND_DOMAIN -from homeassistant.components.bluesound.const import ATTR_MASTER -from homeassistant.components.bluesound.services import ( - SERVICE_CLEAR_TIMER, - SERVICE_JOIN, - SERVICE_SET_TIMER, -) -from homeassistant.components.media_player import ( - ATTR_MEDIA_VOLUME_LEVEL, - DOMAIN as MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_NEXT_TRACK, - SERVICE_MEDIA_PAUSE, - SERVICE_MEDIA_PLAY, - SERVICE_MEDIA_PREVIOUS_TRACK, - SERVICE_VOLUME_DOWN, - SERVICE_VOLUME_MUTE, - SERVICE_VOLUME_SET, - SERVICE_VOLUME_UP, - MediaPlayerState, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError - -from .conftest import PlayerMocks - - -@pytest.mark.parametrize( - ("service", "method"), - [ - (SERVICE_MEDIA_PAUSE, "pause"), - (SERVICE_MEDIA_PLAY, "play"), - (SERVICE_MEDIA_NEXT_TRACK, "skip"), - (SERVICE_MEDIA_PREVIOUS_TRACK, "back"), - ], -) -async def test_simple_actions( - hass: HomeAssistant, - setup_config_entry: None, - player_mocks: PlayerMocks, - service: str, - method: str, -) -> None: - """Test the media player simple actions.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - service, - {ATTR_ENTITY_ID: "media_player.player_name1111"}, - blocking=True, - ) - - getattr(player_mocks.player_data.player, method).assert_called_once_with() - - -async def test_volume_set( - hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks -) -> None: - """Test the media player volume set.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_SET, - {ATTR_ENTITY_ID: "media_player.player_name1111", ATTR_MEDIA_VOLUME_LEVEL: 0.5}, - blocking=True, - ) - - player_mocks.player_data.player.volume.assert_called_once_with(level=50) - - -async def test_volume_mute( - hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks -) -> None: - """Test the media player volume mute.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_MUTE, - {ATTR_ENTITY_ID: "media_player.player_name1111", "is_volume_muted": True}, - blocking=True, - ) - - player_mocks.player_data.player.volume.assert_called_once_with(mute=True) - - -async def test_volume_up( - hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks -) -> None: - """Test the media player volume up.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_UP, - {ATTR_ENTITY_ID: "media_player.player_name1111"}, - blocking=True, - ) - - player_mocks.player_data.player.volume.assert_called_once_with(level=11) - - -async def test_volume_down( - hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks -) -> None: - """Test the media player volume down.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_DOWN, - {ATTR_ENTITY_ID: "media_player.player_name1111"}, - blocking=True, - ) - - player_mocks.player_data.player.volume.assert_called_once_with(level=9) - - -async def test_attributes_set( - hass: HomeAssistant, - setup_config_entry: None, - player_mocks: PlayerMocks, - snapshot: SnapshotAssertion, -) -> None: - """Test the media player attributes set.""" - state = hass.states.get("media_player.player_name1111") - assert state == snapshot(exclude=props("media_position_updated_at")) - - -async def test_stop_maps_to_idle( - hass: HomeAssistant, - setup_config_entry: None, - player_mocks: PlayerMocks, -) -> None: - """Test the media player stop maps to idle.""" - player_mocks.player_data.status_long_polling_mock.set( - dataclasses.replace( - player_mocks.player_data.status_long_polling_mock.get(), state="stop" - ) - ) - - # give the long polling loop a chance to update the state; this could be any async call - await hass.async_block_till_done() - - assert ( - hass.states.get("media_player.player_name1111").state == MediaPlayerState.IDLE - ) - - -async def test_status_updated( - hass: HomeAssistant, - setup_config_entry: None, - player_mocks: PlayerMocks, -) -> None: - """Test the media player status updated.""" - pre_state = hass.states.get("media_player.player_name1111") - assert pre_state.state == "playing" - assert pre_state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.1 - - status = player_mocks.player_data.status_long_polling_mock.get() - status = dataclasses.replace(status, state="pause", volume=50, etag="changed") - player_mocks.player_data.status_long_polling_mock.set(status) - - # give the long polling loop a chance to update the state; this could be any async call - await hass.async_block_till_done() - - post_state = hass.states.get("media_player.player_name1111") - - assert post_state.state == MediaPlayerState.PAUSED - assert post_state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.5 - - -async def test_unavailable_when_offline( - hass: HomeAssistant, - setup_config_entry: None, - player_mocks: PlayerMocks, -) -> None: - """Test that the media player goes unavailable when the player is unreachable.""" - pre_state = hass.states.get("media_player.player_name1111") - assert pre_state.state == "playing" - - player_mocks.player_data.status_long_polling_mock.set_error( - PlayerUnreachableError("Player not reachable") - ) - player_mocks.player_data.status_long_polling_mock.trigger() - - # give the long polling loop a chance to update the state; this could be any async call - await hass.async_block_till_done() - - post_state = hass.states.get("media_player.player_name1111") - - assert post_state.state == STATE_UNAVAILABLE - - -async def test_set_sleep_timer( - hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks -) -> None: - """Test the set sleep timer action.""" - await hass.services.async_call( - BLUESOUND_DOMAIN, - SERVICE_SET_TIMER, - {ATTR_ENTITY_ID: "media_player.player_name1111"}, - blocking=True, - ) - - player_mocks.player_data.player.sleep_timer.assert_called_once() - - -async def test_clear_sleep_timer( - hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks -) -> None: - """Test the clear sleep timer action.""" - - player_mocks.player_data.player.sleep_timer.side_effect = [15, 30, 45, 60, 90, 0] - - await hass.services.async_call( - BLUESOUND_DOMAIN, - SERVICE_CLEAR_TIMER, - {ATTR_ENTITY_ID: "media_player.player_name1111"}, - blocking=True, - ) - - player_mocks.player_data.player.sleep_timer.assert_has_calls([call()] * 6) - - -async def test_join_cannot_join_to_self( - hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks -) -> None: - """Test that joining to self is not allowed.""" - with pytest.raises(ServiceValidationError, match="Cannot join player to itself"): - await hass.services.async_call( - BLUESOUND_DOMAIN, - SERVICE_JOIN, - { - ATTR_ENTITY_ID: "media_player.player_name1111", - ATTR_MASTER: "media_player.player_name1111", - }, - blocking=True, - ) - - -async def test_join( - hass: HomeAssistant, - setup_config_entry: None, - setup_config_entry_secondary: None, - player_mocks: PlayerMocks, -) -> None: - """Test the join action.""" - await hass.services.async_call( - BLUESOUND_DOMAIN, - SERVICE_JOIN, - { - ATTR_ENTITY_ID: "media_player.player_name1111", - ATTR_MASTER: "media_player.player_name2222", - }, - blocking=True, - ) - - player_mocks.player_data_secondary.player.add_slave.assert_called_once_with( - "1.1.1.1", 11000 - ) - - -async def test_unjoin( - hass: HomeAssistant, - setup_config_entry: None, - setup_config_entry_secondary: None, - player_mocks: PlayerMocks, -) -> None: - """Test the unjoin action.""" - updated_sync_status = dataclasses.replace( - player_mocks.player_data.sync_status_long_polling_mock.get(), - master=PairedPlayer("2.2.2.2", 11000), - ) - player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) - - # give the long polling loop a chance to update the state; this could be any async call - await hass.async_block_till_done() - - await hass.services.async_call( - BLUESOUND_DOMAIN, - "unjoin", - {ATTR_ENTITY_ID: "media_player.player_name1111"}, - blocking=True, - ) - - player_mocks.player_data_secondary.player.remove_slave.assert_called_once_with( - "1.1.1.1", 11000 - ) - - -async def test_attr_master( - hass: HomeAssistant, - setup_config_entry: None, - player_mocks: PlayerMocks, -) -> None: - """Test the media player master.""" - attr_master = hass.states.get("media_player.player_name1111").attributes[ - ATTR_MASTER - ] - assert attr_master is False - - updated_sync_status = dataclasses.replace( - player_mocks.player_data.sync_status_long_polling_mock.get(), - slaves=[PairedPlayer("2.2.2.2", 11000)], - ) - player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) - - # give the long polling loop a chance to update the state; this could be any async call - await hass.async_block_till_done() - - attr_master = hass.states.get("media_player.player_name1111").attributes[ - ATTR_MASTER - ] - - assert attr_master is True - - -async def test_attr_bluesound_group( - hass: HomeAssistant, - setup_config_entry: None, - setup_config_entry_secondary: None, - player_mocks: PlayerMocks, -) -> None: - """Test the media player grouping.""" - attr_bluesound_group = hass.states.get( - "media_player.player_name1111" - ).attributes.get("bluesound_group") - assert attr_bluesound_group is None - - updated_status = dataclasses.replace( - player_mocks.player_data.status_long_polling_mock.get(), - group_name="player-name1111+player-name2222", - ) - player_mocks.player_data.status_long_polling_mock.set(updated_status) - - # give the long polling loop a chance to update the state; this could be any async call - await hass.async_block_till_done() - - attr_bluesound_group = hass.states.get( - "media_player.player_name1111" - ).attributes.get("bluesound_group") - - assert attr_bluesound_group == ["player-name1111", "player-name2222"] - - -async def test_volume_up_from_6_to_7( - hass: HomeAssistant, - setup_config_entry: None, - player_mocks: PlayerMocks, -) -> None: - """Test the media player volume up from 6 to 7. - - This fails if if rounding is not done correctly. See https://github.com/home-assistant/core/issues/129956 for more details. - """ - player_mocks.player_data.status_long_polling_mock.set( - dataclasses.replace( - player_mocks.player_data.status_long_polling_mock.get(), volume=6 - ) - ) - - # give the long polling loop a chance to update the state; this could be any async call - await hass.async_block_till_done() - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_UP, - {ATTR_ENTITY_ID: "media_player.player_name1111"}, - blocking=True, - ) - - player_mocks.player_data.player.volume.assert_called_once_with(level=7) diff --git a/tests/components/bluesound/utils.py b/tests/components/bluesound/utils.py deleted file mode 100644 index 112d077d7f5..00000000000 --- a/tests/components/bluesound/utils.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Utils for bluesound tests.""" - -import asyncio -from typing import Protocol - - -class Etag(Protocol): - """Etag protocol.""" - - etag: str - - -class LongPollingMock[T: Etag]: - """Mock long polling methods(status, sync_status).""" - - def __init__(self, value: T) -> None: - """Store value and allows to wait for changes.""" - self._value = value - self._error: Exception | None = None - self._event = asyncio.Event() - self._event.set() - - def trigger(self): - """Trigger the event without changing the value.""" - self._event.set() - - def set(self, value: T): - """Set the value and notify all waiting.""" - self._value = value - self._event.set() - - def set_error(self, error: Exception | None): - """Set the error and notify all waiting.""" - self._error = error - self._event.set() - - def get(self) -> T: - """Get the value without waiting.""" - return self._value - - async def wait(self) -> T: - """Wait for the value or error to change.""" - await self._event.wait() - self._event.clear() - - return self._value - - def side_effect(self): - """Return the side_effect for mocking.""" - last_etag = None - - async def mock(*args, **kwargs) -> T: - nonlocal last_etag - if self._error is not None: - raise self._error - - etag = kwargs.get("etag") - if etag is None or etag != last_etag: - last_etag = self.get().etag - return self.get() - - value = await self.wait() - last_etag = value.etag - - if self._error is not None: - raise self._error - - return value - - return mock diff --git a/tests/components/bluetooth/__init__.py b/tests/components/bluetooth/__init__.py index 8794d808718..eae867b96d5 100644 --- a/tests/components/bluetooth/__init__.py +++ b/tests/components/bluetooth/__init__.py @@ -271,7 +271,7 @@ async def _async_setup_with_adapter( class MockBleakClient(BleakClient): """Mock bleak client.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Mock init.""" super().__init__(*args, **kwargs) self._device_path = "/dev/test" diff --git a/tests/components/bluetooth/conftest.py b/tests/components/bluetooth/conftest.py index 93a1c59cba1..4373ec3f915 100644 --- a/tests/components/bluetooth/conftest.py +++ b/tests/components/bluetooth/conftest.py @@ -1,12 +1,12 @@ """Tests for the bluetooth component.""" -from collections.abc import Generator from unittest.mock import patch from bleak_retry_connector import bleak_manager from dbus_fast.aio import message_bus import habluetooth.util as habluetooth_utils import pytest +from typing_extensions import Generator @pytest.fixture(name="disable_bluez_manager_socket", autouse=True, scope="package") diff --git a/tests/components/bluetooth/test_init.py b/tests/components/bluetooth/test_init.py index ba8792a79a3..bd38c9cfbae 100644 --- a/tests/components/bluetooth/test_init.py +++ b/tests/components/bluetooth/test_init.py @@ -3,7 +3,6 @@ import asyncio from datetime import timedelta import time -from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch from bleak import BleakError @@ -101,7 +100,7 @@ async def test_setup_and_stop_passive( init_kwargs = None class MockPassiveBleakScanner: - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs @@ -152,7 +151,7 @@ async def test_setup_and_stop_old_bluez( init_kwargs = None class MockBleakScanner: - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs @@ -2872,7 +2871,7 @@ async def test_default_address_config_entries_removed_linux( assert not hass.config_entries.async_entries(bluetooth.DOMAIN) -@pytest.mark.usefixtures("one_adapter") +@pytest.mark.usefixtures("enable_bluetooth", "one_adapter") async def test_can_unsetup_bluetooth_single_adapter_linux( hass: HomeAssistant, mock_bleak_scanner_start: MagicMock ) -> None: @@ -2890,17 +2889,12 @@ async def test_can_unsetup_bluetooth_single_adapter_linux( await hass.async_block_till_done() -@pytest.mark.usefixtures("two_adapters") +@pytest.mark.usefixtures("enable_bluetooth", "two_adapters") async def test_can_unsetup_bluetooth_multiple_adapters( hass: HomeAssistant, mock_bleak_scanner_start: MagicMock, ) -> None: """Test we can setup and unsetup bluetooth with multiple adapters.""" - # Setup bluetooth first since otherwise loading the first - # config entry will load the second one as well - await async_setup_component(hass, bluetooth.DOMAIN, {}) - await hass.async_block_till_done() - entry1 = MockConfigEntry( domain=bluetooth.DOMAIN, data={}, unique_id="00:00:00:00:00:01" ) diff --git a/tests/components/bluetooth/test_manager.py b/tests/components/bluetooth/test_manager.py index 0454df9a4a7..4bff7cbe94d 100644 --- a/tests/components/bluetooth/test_manager.py +++ b/tests/components/bluetooth/test_manager.py @@ -1,6 +1,5 @@ """Tests for the Bluetooth integration manager.""" -from collections.abc import Generator from datetime import timedelta import time from typing import Any @@ -12,8 +11,8 @@ from bluetooth_adapters import AdvertisementHistory # pylint: disable-next=no-name-in-module from habluetooth.advertisement_tracker import TRACKER_BUFFERING_WOBBLE_SECONDS import pytest +from typing_extensions import Generator -from homeassistant import config_entries from homeassistant.components import bluetooth from homeassistant.components.bluetooth import ( FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS, @@ -37,7 +36,6 @@ from homeassistant.components.bluetooth.const import ( UNAVAILABLE_TRACK_SECONDS, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.json import json_loads @@ -54,13 +52,7 @@ from . import ( patch_bluetooth_time, ) -from tests.common import ( - MockConfigEntry, - MockModule, - async_fire_time_changed, - load_fixture, - mock_integration, -) +from tests.common import async_fire_time_changed, load_fixture @pytest.fixture @@ -1010,12 +1002,6 @@ async def test_goes_unavailable_dismisses_discovery_and_makes_discoverable( assert len(mock_config_flow.mock_calls) == 1 assert mock_config_flow.mock_calls[0][1][0] == "switchbot" - assert mock_config_flow.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey( - domain="bluetooth", key="44:44:33:11:23:45", version=1 - ), - "source": "bluetooth", - } assert async_ble_device_from_address(hass, "44:44:33:11:23:45", False) is not None assert async_scanner_count(hass, connectable=False) == 1 @@ -1089,12 +1075,6 @@ async def test_goes_unavailable_dismisses_discovery_and_makes_discoverable( ) assert len(mock_config_flow.mock_calls) == 1 assert mock_config_flow.mock_calls[0][1][0] == "switchbot" - assert mock_config_flow.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey( - domain="bluetooth", key="44:44:33:11:23:45", version=1 - ), - "source": "bluetooth", - } cancel_unavailable() @@ -1288,375 +1268,3 @@ async def test_set_fallback_interval_big(hass: HomeAssistant) -> None: # We should forget fallback interval after it expires assert async_get_fallback_availability_interval(hass, "44:44:33:11:23:12") is None - - -@pytest.mark.usefixtures("mock_bluetooth_adapters") -@pytest.mark.parametrize( - ( - "entry_domain", - "entry_discovery_keys", - ), - [ - # Matching discovery key - ( - "switchbot", - { - "bluetooth": ( - DiscoveryKey( - domain="bluetooth", key="44:44:33:11:23:45", version=1 - ), - ) - }, - ), - # Matching discovery key - ( - "switchbot", - { - "bluetooth": ( - DiscoveryKey( - domain="bluetooth", key="44:44:33:11:23:45", version=1 - ), - ), - "other": (DiscoveryKey(domain="other", key="blah", version=1),), - }, - ), - # Matching discovery key, other domain - # Note: Rediscovery is not currently restricted to the domain of the removed - # entry. Such a check can be added if needed. - ( - "comp", - { - "bluetooth": ( - DiscoveryKey( - domain="bluetooth", key="44:44:33:11:23:45", version=1 - ), - ) - }, - ), - ], -) -@pytest.mark.parametrize( - "entry_source", - [ - config_entries.SOURCE_BLUETOOTH, - config_entries.SOURCE_IGNORE, - config_entries.SOURCE_USER, - ], -) -async def test_bluetooth_rediscover( - hass: HomeAssistant, - entry_domain: str, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], - entry_source: str, -) -> None: - """Test we reinitiate flows when an ignored config entry is removed.""" - mock_bt = [ - { - "domain": "switchbot", - "service_data_uuid": "050a021a-0000-1000-8000-00805f9b34fb", - "connectable": False, - }, - ] - with patch( - "homeassistant.components.bluetooth.async_get_bluetooth", return_value=mock_bt - ): - assert await async_setup_component(hass, bluetooth.DOMAIN, {}) - await hass.async_block_till_done() - - assert async_scanner_count(hass, connectable=False) == 0 - switchbot_device_non_connectable = generate_ble_device( - "44:44:33:11:23:45", - "wohand", - {}, - rssi=-100, - ) - switchbot_device_adv = generate_advertisement_data( - local_name="wohand", - service_uuids=["050a021a-0000-1000-8000-00805f9b34fb"], - service_data={"050a021a-0000-1000-8000-00805f9b34fb": b"\n\xff"}, - manufacturer_data={1: b"\x01"}, - rssi=-100, - ) - callbacks = [] - - def _fake_subscriber( - service_info: BluetoothServiceInfo, - change: BluetoothChange, - ) -> None: - """Fake subscriber for the BleakScanner.""" - callbacks.append((service_info, change)) - - cancel = bluetooth.async_register_callback( - hass, - _fake_subscriber, - {"address": "44:44:33:11:23:45", "connectable": False}, - BluetoothScanningMode.ACTIVE, - ) - - class FakeScanner(BaseHaRemoteScanner): - def inject_advertisement( - self, device: BLEDevice, advertisement_data: AdvertisementData - ) -> None: - """Inject an advertisement.""" - self._async_on_advertisement( - device.address, - advertisement_data.rssi, - device.name, - advertisement_data.service_uuids, - advertisement_data.service_data, - advertisement_data.manufacturer_data, - advertisement_data.tx_power, - {"scanner_specific_data": "test"}, - MONOTONIC_TIME(), - ) - - def clear_all_devices(self) -> None: - """Clear all devices.""" - self._discovered_device_advertisement_datas.clear() - self._discovered_device_timestamps.clear() - self._previous_service_info.clear() - - connector = ( - HaBluetoothConnector(MockBleakClient, "mock_bleak_client", lambda: False), - ) - non_connectable_scanner = FakeScanner( - "connectable", - "connectable", - connector, - False, - ) - unsetup_connectable_scanner = non_connectable_scanner.async_setup() - cancel_connectable_scanner = _get_manager().async_register_scanner( - non_connectable_scanner - ) - with patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: - non_connectable_scanner.inject_advertisement( - switchbot_device_non_connectable, switchbot_device_adv - ) - await hass.async_block_till_done() - - expected_context = { - "discovery_key": DiscoveryKey( - domain="bluetooth", key="44:44:33:11:23:45", version=1 - ), - "source": "bluetooth", - } - assert len(mock_config_flow.mock_calls) == 1 - assert mock_config_flow.mock_calls[0][1][0] == "switchbot" - assert mock_config_flow.mock_calls[0][2]["context"] == expected_context - - hass.config.components.add(entry_domain) - mock_integration(hass, MockModule(entry_domain)) - - entry = MockConfigEntry( - domain=entry_domain, - discovery_keys=entry_discovery_keys, - unique_id="mock-unique-id", - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - assert ( - async_ble_device_from_address(hass, "44:44:33:11:23:45", False) is not None - ) - assert async_scanner_count(hass, connectable=False) == 1 - assert len(callbacks) == 1 - - assert ( - "44:44:33:11:23:45" - in non_connectable_scanner.discovered_devices_and_advertisement_data - ) - - await hass.config_entries.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert ( - async_ble_device_from_address(hass, "44:44:33:11:23:45", False) is not None - ) - assert async_scanner_count(hass, connectable=False) == 1 - assert len(callbacks) == 1 - - assert len(mock_config_flow.mock_calls) == 2 - assert mock_config_flow.mock_calls[1][1][0] == "switchbot" - assert mock_config_flow.mock_calls[1][2]["context"] == expected_context - - cancel() - unsetup_connectable_scanner() - cancel_connectable_scanner() - - -@pytest.mark.usefixtures("mock_bluetooth_adapters") -@pytest.mark.parametrize( - ( - "entry_domain", - "entry_discovery_keys", - "entry_source", - "entry_unique_id", - ), - [ - # Discovery key from other domain - ( - "switchbot", - { - "zeroconf": ( - DiscoveryKey(domain="zeroconf", key="44:44:33:11:23:45", version=1), - ) - }, - config_entries.SOURCE_IGNORE, - "mock-unique-id", - ), - # Discovery key from the future - ( - "switchbot", - { - "bluetooth": ( - DiscoveryKey( - domain="bluetooth", key="44:44:33:11:23:45", version=2 - ), - ) - }, - config_entries.SOURCE_IGNORE, - "mock-unique-id", - ), - ], -) -async def test_bluetooth_rediscover_no_match( - hass: HomeAssistant, - entry_domain: str, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], - entry_source: str, - entry_unique_id: str, -) -> None: - """Test we don't reinitiate flows when a non matching config entry is removed.""" - mock_bt = [ - { - "domain": "switchbot", - "service_data_uuid": "050a021a-0000-1000-8000-00805f9b34fb", - "connectable": False, - }, - ] - with patch( - "homeassistant.components.bluetooth.async_get_bluetooth", return_value=mock_bt - ): - assert await async_setup_component(hass, bluetooth.DOMAIN, {}) - await hass.async_block_till_done() - - assert async_scanner_count(hass, connectable=False) == 0 - switchbot_device_non_connectable = generate_ble_device( - "44:44:33:11:23:45", - "wohand", - {}, - rssi=-100, - ) - switchbot_device_adv = generate_advertisement_data( - local_name="wohand", - service_uuids=["050a021a-0000-1000-8000-00805f9b34fb"], - service_data={"050a021a-0000-1000-8000-00805f9b34fb": b"\n\xff"}, - manufacturer_data={1: b"\x01"}, - rssi=-100, - ) - callbacks = [] - - def _fake_subscriber( - service_info: BluetoothServiceInfo, - change: BluetoothChange, - ) -> None: - """Fake subscriber for the BleakScanner.""" - callbacks.append((service_info, change)) - - cancel = bluetooth.async_register_callback( - hass, - _fake_subscriber, - {"address": "44:44:33:11:23:45", "connectable": False}, - BluetoothScanningMode.ACTIVE, - ) - - class FakeScanner(BaseHaRemoteScanner): - def inject_advertisement( - self, device: BLEDevice, advertisement_data: AdvertisementData - ) -> None: - """Inject an advertisement.""" - self._async_on_advertisement( - device.address, - advertisement_data.rssi, - device.name, - advertisement_data.service_uuids, - advertisement_data.service_data, - advertisement_data.manufacturer_data, - advertisement_data.tx_power, - {"scanner_specific_data": "test"}, - MONOTONIC_TIME(), - ) - - def clear_all_devices(self) -> None: - """Clear all devices.""" - self._discovered_device_advertisement_datas.clear() - self._discovered_device_timestamps.clear() - self._previous_service_info.clear() - - connector = ( - HaBluetoothConnector(MockBleakClient, "mock_bleak_client", lambda: False), - ) - non_connectable_scanner = FakeScanner( - "connectable", - "connectable", - connector, - False, - ) - unsetup_connectable_scanner = non_connectable_scanner.async_setup() - cancel_connectable_scanner = _get_manager().async_register_scanner( - non_connectable_scanner - ) - with patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: - non_connectable_scanner.inject_advertisement( - switchbot_device_non_connectable, switchbot_device_adv - ) - await hass.async_block_till_done() - - expected_context = { - "discovery_key": DiscoveryKey( - domain="bluetooth", key="44:44:33:11:23:45", version=1 - ), - "source": "bluetooth", - } - assert len(mock_config_flow.mock_calls) == 1 - assert mock_config_flow.mock_calls[0][1][0] == "switchbot" - assert mock_config_flow.mock_calls[0][2]["context"] == expected_context - - hass.config.components.add(entry_domain) - mock_integration(hass, MockModule(entry_domain)) - - entry = MockConfigEntry( - domain=entry_domain, - discovery_keys=entry_discovery_keys, - unique_id=entry_unique_id, - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - assert ( - async_ble_device_from_address(hass, "44:44:33:11:23:45", False) is not None - ) - assert async_scanner_count(hass, connectable=False) == 1 - assert len(callbacks) == 1 - - assert ( - "44:44:33:11:23:45" - in non_connectable_scanner.discovered_devices_and_advertisement_data - ) - - await hass.config_entries.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert ( - async_ble_device_from_address(hass, "44:44:33:11:23:45", False) is not None - ) - assert async_scanner_count(hass, connectable=False) == 1 - assert len(callbacks) == 1 - assert len(mock_config_flow.mock_calls) == 1 - - cancel() - unsetup_connectable_scanner() - cancel_connectable_scanner() diff --git a/tests/components/bluetooth/test_passive_update_processor.py b/tests/components/bluetooth/test_passive_update_processor.py index d7a7a8ba08c..8e1163c0bdb 100644 --- a/tests/components/bluetooth/test_passive_update_processor.py +++ b/tests/components/bluetooth/test_passive_update_processor.py @@ -583,7 +583,8 @@ async def test_exception_from_update_method( nonlocal run_count run_count += 1 if run_count == 2: - raise Exception("Test exception") # noqa: TRY002 + # pylint: disable-next=broad-exception-raised + raise Exception("Test exception") return GENERIC_PASSIVE_BLUETOOTH_DATA_UPDATE coordinator = PassiveBluetoothProcessorCoordinator( @@ -1417,7 +1418,8 @@ async def test_exception_from_coordinator_update_method( nonlocal run_count run_count += 1 if run_count == 2: - raise Exception("Test exception") # noqa: TRY002 + # pylint: disable-next=broad-exception-raised + raise Exception("Test exception") return {"test": "data"} @callback @@ -1651,12 +1653,12 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( unregister_binary_sensor_processor() unregister_sensor_processor() - async with async_test_home_assistant() as test_hass: - await async_setup_component(test_hass, DOMAIN, {DOMAIN: {}}) + async with async_test_home_assistant() as hass: + await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) current_entry.set(entry) coordinator = PassiveBluetoothProcessorCoordinator( - test_hass, + hass, _LOGGER, "aa:bb:cc:dd:ee:ff", BluetoothScanningMode.ACTIVE, @@ -1704,7 +1706,7 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( ] sensor_entity_one: PassiveBluetoothProcessorEntity = sensor_entities[0] - sensor_entity_one.hass = test_hass + sensor_entity_one.hass = hass assert sensor_entity_one.available is False # service data not injected assert sensor_entity_one.unique_id == "aa:bb:cc:dd:ee:ff-pressure" assert sensor_entity_one.device_info == { @@ -1721,7 +1723,7 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( binary_sensor_entity_one: PassiveBluetoothProcessorEntity = ( binary_sensor_entities[0] ) - binary_sensor_entity_one.hass = test_hass + binary_sensor_entity_one.hass = hass assert binary_sensor_entity_one.available is False # service data not injected assert binary_sensor_entity_one.unique_id == "aa:bb:cc:dd:ee:ff-motion" assert binary_sensor_entity_one.device_info == { @@ -1737,7 +1739,7 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( cancel_coordinator() unregister_binary_sensor_processor() unregister_sensor_processor() - await test_hass.async_stop() + await hass.async_stop() NAMING_PASSIVE_BLUETOOTH_DATA_UPDATE = PassiveBluetoothDataUpdate( diff --git a/tests/components/bluetooth/test_scanner.py b/tests/components/bluetooth/test_scanner.py index 6acb86476e7..dc25f29111c 100644 --- a/tests/components/bluetooth/test_scanner.py +++ b/tests/components/bluetooth/test_scanner.py @@ -3,7 +3,6 @@ import asyncio from datetime import timedelta import time -from typing import Any from unittest.mock import ANY, MagicMock, patch from bleak import BleakError @@ -212,7 +211,7 @@ async def test_recovery_from_dbus_restart(hass: HomeAssistant) -> None: mock_discovered = [] class MockBleakScanner: - def __init__(self, detection_callback, *args: Any, **kwargs: Any) -> None: + def __init__(self, detection_callback, *args, **kwargs): nonlocal _callback _callback = detection_callback @@ -632,7 +631,7 @@ async def test_setup_and_stop_macos( init_kwargs = None class MockBleakScanner: - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs diff --git a/tests/components/bluetooth/test_wrappers.py b/tests/components/bluetooth/test_wrappers.py index c5908776882..0c5645b3f71 100644 --- a/tests/components/bluetooth/test_wrappers.py +++ b/tests/components/bluetooth/test_wrappers.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Iterator from contextlib import contextmanager from unittest.mock import patch @@ -22,13 +21,13 @@ from homeassistant.components.bluetooth import ( HaBluetoothConnector, HomeAssistantBluetoothManager, ) -from homeassistant.core import CALLBACK_TYPE, HomeAssistant +from homeassistant.core import HomeAssistant from . import _get_manager, generate_advertisement_data, generate_ble_device @contextmanager -def mock_shutdown(manager: HomeAssistantBluetoothManager) -> Iterator[None]: +def mock_shutdown(manager: HomeAssistantBluetoothManager) -> None: """Mock shutdown of the HomeAssistantBluetoothManager.""" manager.shutdown = True yield @@ -164,11 +163,7 @@ def mock_platform_client_that_raises_on_connect_fixture(): yield -def _generate_scanners_with_fake_devices( - hass: HomeAssistant, -) -> tuple[ - dict[str, tuple[BLEDevice, AdvertisementData]], CALLBACK_TYPE, CALLBACK_TYPE -]: +def _generate_scanners_with_fake_devices(hass): """Generate scanners with fake devices.""" manager = _get_manager() hci0_device_advs = {} diff --git a/tests/components/bluetooth_le_tracker/test_device_tracker.py b/tests/components/bluetooth_le_tracker/test_device_tracker.py index da90980640b..f183f987cde 100644 --- a/tests/components/bluetooth_le_tracker/test_device_tracker.py +++ b/tests/components/bluetooth_le_tracker/test_device_tracker.py @@ -1,7 +1,6 @@ """Test Bluetooth LE device tracker.""" from datetime import timedelta -from typing import Any from unittest.mock import patch from bleak import BleakError @@ -18,7 +17,7 @@ from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, CONF_SCAN_INTERVAL, CONF_TRACK_NEW, - DOMAIN as DEVICE_TRACKER_DOMAIN, + DOMAIN, ) from homeassistant.const import CONF_PLATFORM from homeassistant.core import HomeAssistant @@ -32,7 +31,7 @@ from tests.components.bluetooth import generate_advertisement_data, generate_ble class MockBleakClient: """Mock BleakClient.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): @@ -73,7 +72,7 @@ async def test_do_not_see_device_if_time_not_updated(hass: HomeAssistant) -> Non address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" + entity_id = f"{DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -101,9 +100,7 @@ async def test_do_not_see_device_if_time_not_updated(hass: HomeAssistant) -> Non CONF_TRACK_NEW: True, CONF_CONSIDER_HOME: timedelta(minutes=10), } - result = await async_setup_component( - hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} - ) + result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) await hass.async_block_till_done() assert result @@ -138,7 +135,7 @@ async def test_see_device_if_time_updated(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" + entity_id = f"{DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -166,9 +163,7 @@ async def test_see_device_if_time_updated(hass: HomeAssistant) -> None: CONF_TRACK_NEW: True, CONF_CONSIDER_HOME: timedelta(minutes=10), } - result = await async_setup_component( - hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} - ) + result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) assert result # Tick until device seen enough times for to be registered for tracking @@ -219,7 +214,7 @@ async def test_preserve_new_tracked_device_name(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" + entity_id = f"{DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -246,9 +241,7 @@ async def test_preserve_new_tracked_device_name(hass: HomeAssistant) -> None: CONF_SCAN_INTERVAL: timedelta(minutes=1), CONF_TRACK_NEW: True, } - assert await async_setup_component( - hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} - ) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: config}) await hass.async_block_till_done() # Seen once here; return without name when seen subsequent times @@ -288,7 +281,7 @@ async def test_tracking_battery_times_out(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" + entity_id = f"{DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -317,9 +310,7 @@ async def test_tracking_battery_times_out(hass: HomeAssistant) -> None: CONF_TRACK_BATTERY_INTERVAL: timedelta(minutes=2), CONF_TRACK_NEW: True, } - result = await async_setup_component( - hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} - ) + result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) await hass.async_block_till_done() assert result @@ -356,7 +347,7 @@ async def test_tracking_battery_fails(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" + entity_id = f"{DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -385,9 +376,7 @@ async def test_tracking_battery_fails(hass: HomeAssistant) -> None: CONF_TRACK_BATTERY_INTERVAL: timedelta(minutes=2), CONF_TRACK_NEW: True, } - result = await async_setup_component( - hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} - ) + result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) assert result # Tick until device seen enough times for to be registered for tracking @@ -423,7 +412,7 @@ async def test_tracking_battery_successful(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" + entity_id = f"{DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -452,9 +441,7 @@ async def test_tracking_battery_successful(hass: HomeAssistant) -> None: CONF_TRACK_BATTERY_INTERVAL: timedelta(minutes=2), CONF_TRACK_NEW: True, } - result = await async_setup_component( - hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} - ) + result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) await hass.async_block_till_done() assert result diff --git a/tests/components/bmw_connected_drive/__init__.py b/tests/components/bmw_connected_drive/__init__.py index 4d280a1d0e5..c11d5ef0021 100644 --- a/tests/components/bmw_connected_drive/__init__.py +++ b/tests/components/bmw_connected_drive/__init__.py @@ -1,10 +1,6 @@ """Tests for the for the BMW Connected Drive integration.""" -from bimmer_connected.const import ( - REMOTE_SERVICE_V4_BASE_URL, - VEHICLE_CHARGING_BASE_URL, - VEHICLE_POI_URL, -) +from bimmer_connected.const import REMOTE_SERVICE_BASE_URL, VEHICLE_CHARGING_BASE_URL import respx from homeassistant import config_entries @@ -40,7 +36,7 @@ FIXTURE_CONFIG_ENTRY = { }, "options": {CONF_READ_ONLY: False}, "source": config_entries.SOURCE_USER, - "unique_id": f"{FIXTURE_USER_INPUT[CONF_REGION]}-{FIXTURE_USER_INPUT[CONF_USERNAME]}", + "unique_id": f"{FIXTURE_USER_INPUT[CONF_REGION]}-{FIXTURE_USER_INPUT[CONF_REGION]}", } @@ -71,11 +67,10 @@ def check_remote_service_call( first_remote_service_call: respx.models.Call = next( c for c in router.calls - if c.request.url.path.startswith(REMOTE_SERVICE_V4_BASE_URL) + if c.request.url.path.startswith(REMOTE_SERVICE_BASE_URL) or c.request.url.path.startswith( VEHICLE_CHARGING_BASE_URL.replace("/{vin}", "") ) - or c.request.url.path.endswith(VEHICLE_POI_URL.rsplit("/", maxsplit=1)[-1]) ) assert ( first_remote_service_call.request.url.path.endswith(remote_service) is True @@ -92,10 +87,6 @@ def check_remote_service_call( == remote_service_params ) - # Send POI doesn't return a status response, so we can't check it - if remote_service == "send-to-car": - return - # Now check final result last_event_status_call = next( c for c in reversed(router.calls) if c.request.url.path.endswith("eventStatus") diff --git a/tests/components/bmw_connected_drive/conftest.py b/tests/components/bmw_connected_drive/conftest.py index 7581b8c6f76..f69763dae77 100644 --- a/tests/components/bmw_connected_drive/conftest.py +++ b/tests/components/bmw_connected_drive/conftest.py @@ -1,12 +1,11 @@ """Fixtures for BMW tests.""" -from collections.abc import Generator - from bimmer_connected.tests import ALL_CHARGING_SETTINGS, ALL_PROFILES, ALL_STATES from bimmer_connected.tests.common import MyBMWMockRouter from bimmer_connected.vehicle import remote_services import pytest import respx +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr index c0462279e59..610e194c0e5 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr @@ -35,6 +35,7 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'battery_charging', 'friendly_name': 'i3 (+ REX) Charging status', }), @@ -82,8 +83,11 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'i3 (+ REX)', 'device_class': 'problem', 'friendly_name': 'i3 (+ REX) Check control messages', + 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_check_control_messages', @@ -129,14 +133,17 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2022-10-01', + 'car': 'i3 (+ REX)', 'device_class': 'problem', 'friendly_name': 'i3 (+ REX) Condition based services', 'vehicle_check': 'OK', 'vehicle_check_date': '2023-05-01', 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2023-05-01', + 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_condition_based_services', @@ -182,6 +189,7 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_connection_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'plug', 'friendly_name': 'i3 (+ REX) Connection status', }), @@ -229,9 +237,12 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'i3 (+ REX)', 'device_class': 'lock', 'door_lock_state': 'UNLOCKED', 'friendly_name': 'i3 (+ REX) Door lock state', + 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_door_lock_state', @@ -277,6 +288,8 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'i3 (+ REX)', 'device_class': 'opening', 'friendly_name': 'i3 (+ REX) Lids', 'hood': 'CLOSED', @@ -286,6 +299,7 @@ 'rightRear': 'CLOSED', 'sunRoof': 'CLOSED', 'trunk': 'CLOSED', + 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_lids', @@ -331,6 +345,7 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_pre_entry_climatization-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Pre entry climatization', }), 'context': , @@ -377,10 +392,13 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'i3 (+ REX)', 'device_class': 'opening', 'friendly_name': 'i3 (+ REX) Windows', 'leftFront': 'CLOSED', 'rightFront': 'CLOSED', + 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_windows', @@ -426,6 +444,7 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'battery_charging', 'friendly_name': 'i4 eDrive40 Charging status', }), @@ -473,9 +492,12 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'i4 eDrive40', 'device_class': 'problem', 'friendly_name': 'i4 eDrive40 Check control messages', 'tire_pressure': 'LOW', + 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_check_control_messages', @@ -521,9 +543,11 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2024-12-01', 'brake_fluid_distance': '50000 km', + 'car': 'i4 eDrive40', 'device_class': 'problem', 'friendly_name': 'i4 eDrive40 Condition based services', 'tire_wear_front': 'OK', @@ -534,6 +558,7 @@ 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2024-12-01', 'vehicle_tuv_distance': '50000 km', + 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_condition_based_services', @@ -579,6 +604,7 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_connection_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'plug', 'friendly_name': 'i4 eDrive40 Connection status', }), @@ -626,9 +652,12 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'i4 eDrive40', 'device_class': 'lock', 'door_lock_state': 'LOCKED', 'friendly_name': 'i4 eDrive40 Door lock state', + 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_door_lock_state', @@ -674,6 +703,8 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'i4 eDrive40', 'device_class': 'opening', 'friendly_name': 'i4 eDrive40 Lids', 'hood': 'CLOSED', @@ -682,6 +713,7 @@ 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', 'trunk': 'CLOSED', + 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_lids', @@ -727,6 +759,7 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_pre_entry_climatization-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Pre entry climatization', }), 'context': , @@ -773,6 +806,8 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'i4 eDrive40', 'device_class': 'opening', 'friendly_name': 'i4 eDrive40 Windows', 'leftFront': 'CLOSED', @@ -780,6 +815,7 @@ 'rear': 'CLOSED', 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', + 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_windows', @@ -825,6 +861,7 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'battery_charging', 'friendly_name': 'iX xDrive50 Charging status', }), @@ -872,9 +909,12 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'iX xDrive50', 'device_class': 'problem', 'friendly_name': 'iX xDrive50 Check control messages', 'tire_pressure': 'LOW', + 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_check_control_messages', @@ -920,9 +960,11 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2024-12-01', 'brake_fluid_distance': '50000 km', + 'car': 'iX xDrive50', 'device_class': 'problem', 'friendly_name': 'iX xDrive50 Condition based services', 'tire_wear_front': 'OK', @@ -933,6 +975,7 @@ 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2024-12-01', 'vehicle_tuv_distance': '50000 km', + 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_condition_based_services', @@ -978,6 +1021,7 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_connection_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'plug', 'friendly_name': 'iX xDrive50 Connection status', }), @@ -1025,9 +1069,12 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'iX xDrive50', 'device_class': 'lock', 'door_lock_state': 'LOCKED', 'friendly_name': 'iX xDrive50 Door lock state', + 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_door_lock_state', @@ -1073,6 +1120,8 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'iX xDrive50', 'device_class': 'opening', 'friendly_name': 'iX xDrive50 Lids', 'hood': 'CLOSED', @@ -1082,6 +1131,7 @@ 'rightRear': 'CLOSED', 'sunRoof': 'CLOSED', 'trunk': 'CLOSED', + 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_lids', @@ -1127,6 +1177,7 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_pre_entry_climatization-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Pre entry climatization', }), 'context': , @@ -1173,6 +1224,8 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'iX xDrive50', 'device_class': 'opening', 'friendly_name': 'iX xDrive50 Windows', 'leftFront': 'CLOSED', @@ -1180,6 +1233,7 @@ 'rear': 'CLOSED', 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', + 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_windows', @@ -1225,10 +1279,13 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'M340i xDrive', 'device_class': 'problem', 'engine_oil': 'LOW', 'friendly_name': 'M340i xDrive Check control messages', 'tire_pressure': 'LOW', + 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_check_control_messages', @@ -1274,9 +1331,11 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2024-12-01', 'brake_fluid_distance': '50000 km', + 'car': 'M340i xDrive', 'device_class': 'problem', 'friendly_name': 'M340i xDrive Condition based services', 'oil': 'OK', @@ -1290,6 +1349,7 @@ 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2024-12-01', 'vehicle_tuv_distance': '50000 km', + 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_condition_based_services', @@ -1335,9 +1395,12 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'M340i xDrive', 'device_class': 'lock', 'door_lock_state': 'LOCKED', 'friendly_name': 'M340i xDrive Door lock state', + 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_door_lock_state', @@ -1383,6 +1446,8 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'M340i xDrive', 'device_class': 'opening', 'friendly_name': 'M340i xDrive Lids', 'hood': 'CLOSED', @@ -1391,6 +1456,7 @@ 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', 'trunk': 'CLOSED', + 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_lids', @@ -1436,6 +1502,8 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'M340i xDrive', 'device_class': 'opening', 'friendly_name': 'M340i xDrive Windows', 'leftFront': 'CLOSED', @@ -1443,6 +1511,7 @@ 'rear': 'CLOSED', 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', + 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_windows', diff --git a/tests/components/bmw_connected_drive/snapshots/test_button.ambr b/tests/components/bmw_connected_drive/snapshots/test_button.ambr index f38441125ce..cd3f94c7e5e 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_button.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_button.ambr @@ -35,6 +35,7 @@ # name: test_entity_state_attrs[button.i3_rex_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Activate air conditioning', }), 'context': , @@ -81,6 +82,7 @@ # name: test_entity_state_attrs[button.i3_rex_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Find vehicle', }), 'context': , @@ -127,6 +129,7 @@ # name: test_entity_state_attrs[button.i3_rex_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Flash lights', }), 'context': , @@ -173,6 +176,7 @@ # name: test_entity_state_attrs[button.i3_rex_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Sound horn', }), 'context': , @@ -219,6 +223,7 @@ # name: test_entity_state_attrs[button.i4_edrive40_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Activate air conditioning', }), 'context': , @@ -265,6 +270,7 @@ # name: test_entity_state_attrs[button.i4_edrive40_deactivate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Deactivate air conditioning', }), 'context': , @@ -311,6 +317,7 @@ # name: test_entity_state_attrs[button.i4_edrive40_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Find vehicle', }), 'context': , @@ -357,6 +364,7 @@ # name: test_entity_state_attrs[button.i4_edrive40_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Flash lights', }), 'context': , @@ -403,6 +411,7 @@ # name: test_entity_state_attrs[button.i4_edrive40_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Sound horn', }), 'context': , @@ -449,6 +458,7 @@ # name: test_entity_state_attrs[button.ix_xdrive50_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Activate air conditioning', }), 'context': , @@ -495,6 +505,7 @@ # name: test_entity_state_attrs[button.ix_xdrive50_deactivate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Deactivate air conditioning', }), 'context': , @@ -541,6 +552,7 @@ # name: test_entity_state_attrs[button.ix_xdrive50_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Find vehicle', }), 'context': , @@ -587,6 +599,7 @@ # name: test_entity_state_attrs[button.ix_xdrive50_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Flash lights', }), 'context': , @@ -633,6 +646,7 @@ # name: test_entity_state_attrs[button.ix_xdrive50_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Sound horn', }), 'context': , @@ -679,6 +693,7 @@ # name: test_entity_state_attrs[button.m340i_xdrive_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Activate air conditioning', }), 'context': , @@ -725,6 +740,7 @@ # name: test_entity_state_attrs[button.m340i_xdrive_deactivate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Deactivate air conditioning', }), 'context': , @@ -771,6 +787,7 @@ # name: test_entity_state_attrs[button.m340i_xdrive_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Find vehicle', }), 'context': , @@ -817,6 +834,7 @@ # name: test_entity_state_attrs[button.m340i_xdrive_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Flash lights', }), 'context': , @@ -863,6 +881,7 @@ # name: test_entity_state_attrs[button.m340i_xdrive_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Sound horn', }), 'context': , diff --git a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr index 81ef1220069..477cd24376d 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr @@ -232,19 +232,16 @@ }), 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'alarmSystem': True, + 'checkSustainabilityDPP': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, - 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -255,38 +252,27 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, - 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -301,45 +287,11 @@ 'NOT_SUPPORTED', ]), }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - 'state': 'ACTIVATED', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - 'state': 'ACTIVATED', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - 'state': 'ACTIVATED', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -618,7 +570,6 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), - 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -709,18 +660,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -1147,19 +1086,15 @@ }), 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'alarmSystem': False, + 'checkSustainabilityDPP': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, - 'inCarCamera': False, - 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -1170,80 +1105,37 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, - 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': False, + 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, - 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -1516,7 +1408,6 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), - 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -1607,18 +1498,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -1961,20 +1840,16 @@ }), 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'alarmSystem': False, + 'checkSustainabilityDPP': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, - 'inCarCamera': False, - 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -1992,73 +1867,31 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, - 'isWifiHotspotServiceSupported': False, + 'isWifiHotspotServiceSupported': True, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, - 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -2194,7 +2027,6 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), - 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -2281,18 +2113,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -3122,6 +2942,226 @@ }), ]), 'fingerprint': list([ + dict({ + 'content': dict({ + 'capabilities': dict({ + 'climateFunction': 'AIR_CONDITIONING', + 'climateNow': True, + 'climateTimerTrigger': 'DEPARTURE_TIMER', + 'horn': True, + 'isBmwChargingSupported': True, + 'isCarSharingSupported': False, + 'isChargeNowForBusinessSupported': False, + 'isChargingHistorySupported': True, + 'isChargingHospitalityEnabled': False, + 'isChargingLoudnessEnabled': False, + 'isChargingPlanSupported': True, + 'isChargingPowerLimitEnabled': False, + 'isChargingSettingsEnabled': False, + 'isChargingTargetSocEnabled': False, + 'isClimateTimerSupported': True, + 'isCustomerEsimSupported': False, + 'isDCSContractManagementSupported': True, + 'isDataPrivacyEnabled': False, + 'isEasyChargeEnabled': False, + 'isEvGoChargingSupported': False, + 'isMiniChargingSupported': False, + 'isNonLscFeatureEnabled': False, + 'isRemoteEngineStartSupported': False, + 'isRemoteHistoryDeletionSupported': False, + 'isRemoteHistorySupported': True, + 'isRemoteParkingSupported': False, + 'isRemoteServicesActivationRequired': False, + 'isRemoteServicesBookingRequired': False, + 'isScanAndChargeSupported': False, + 'isSustainabilitySupported': False, + 'isWifiHotspotServiceSupported': False, + 'lastStateCallState': 'ACTIVATED', + 'lights': True, + 'lock': True, + 'remoteChargingCommands': dict({ + }), + 'sendPoi': True, + 'specialThemeSupport': list([ + ]), + 'unlock': True, + 'vehicleFinder': False, + 'vehicleStateSource': 'LAST_STATE_CALL', + }), + 'state': dict({ + 'chargingProfile': dict({ + 'chargingControlType': 'WEEKLY_PLANNER', + 'chargingMode': 'DELAYED_CHARGING', + 'chargingPreference': 'CHARGING_WINDOW', + 'chargingSettings': dict({ + 'hospitality': 'NO_ACTION', + 'idcc': 'NO_ACTION', + 'targetSoc': 100, + }), + 'climatisationOn': False, + 'departureTimes': list([ + dict({ + 'action': 'DEACTIVATE', + 'id': 1, + 'timeStamp': dict({ + 'hour': 7, + 'minute': 35, + }), + 'timerWeekDays': list([ + 'MONDAY', + 'TUESDAY', + 'WEDNESDAY', + 'THURSDAY', + 'FRIDAY', + ]), + }), + dict({ + 'action': 'DEACTIVATE', + 'id': 2, + 'timeStamp': dict({ + 'hour': 18, + 'minute': 0, + }), + 'timerWeekDays': list([ + 'MONDAY', + 'TUESDAY', + 'WEDNESDAY', + 'THURSDAY', + 'FRIDAY', + 'SATURDAY', + 'SUNDAY', + ]), + }), + dict({ + 'action': 'DEACTIVATE', + 'id': 3, + 'timeStamp': dict({ + 'hour': 7, + 'minute': 0, + }), + 'timerWeekDays': list([ + ]), + }), + dict({ + 'action': 'DEACTIVATE', + 'id': 4, + 'timerWeekDays': list([ + ]), + }), + ]), + 'reductionOfChargeCurrent': dict({ + 'end': dict({ + 'hour': 1, + 'minute': 30, + }), + 'start': dict({ + 'hour': 18, + 'minute': 1, + }), + }), + }), + 'checkControlMessages': list([ + ]), + 'climateTimers': list([ + dict({ + 'departureTime': dict({ + 'hour': 6, + 'minute': 40, + }), + 'isWeeklyTimer': True, + 'timerAction': 'ACTIVATE', + 'timerWeekDays': list([ + 'THURSDAY', + 'SUNDAY', + ]), + }), + dict({ + 'departureTime': dict({ + 'hour': 12, + 'minute': 50, + }), + 'isWeeklyTimer': False, + 'timerAction': 'ACTIVATE', + 'timerWeekDays': list([ + 'MONDAY', + ]), + }), + dict({ + 'departureTime': dict({ + 'hour': 18, + 'minute': 59, + }), + 'isWeeklyTimer': True, + 'timerAction': 'DEACTIVATE', + 'timerWeekDays': list([ + 'WEDNESDAY', + ]), + }), + ]), + 'combustionFuelLevel': dict({ + 'range': 105, + 'remainingFuelLiters': 6, + }), + 'currentMileage': 137009, + 'doorsState': dict({ + 'combinedSecurityState': 'UNLOCKED', + 'combinedState': 'CLOSED', + 'hood': 'CLOSED', + 'leftFront': 'CLOSED', + 'leftRear': 'CLOSED', + 'rightFront': 'CLOSED', + 'rightRear': 'CLOSED', + 'trunk': 'CLOSED', + }), + 'driverPreferences': dict({ + 'lscPrivacyMode': 'OFF', + }), + 'electricChargingState': dict({ + 'chargingConnectionType': 'CONDUCTIVE', + 'chargingLevelPercent': 82, + 'chargingStatus': 'WAITING_FOR_CHARGING', + 'chargingTarget': 100, + 'isChargerConnected': True, + 'range': 174, + }), + 'isLeftSteering': True, + 'isLscSupported': True, + 'lastFetched': '2022-06-22T14:24:23.982Z', + 'lastUpdatedAt': '2022-06-22T13:58:52Z', + 'range': 174, + 'requiredServices': list([ + dict({ + 'dateTime': '2022-10-01T00:00:00.000Z', + 'description': 'Next service due by the specified date.', + 'status': 'OK', + 'type': 'BRAKE_FLUID', + }), + dict({ + 'dateTime': '2023-05-01T00:00:00.000Z', + 'description': 'Next vehicle check due after the specified distance or date.', + 'status': 'OK', + 'type': 'VEHICLE_CHECK', + }), + dict({ + 'dateTime': '2023-05-01T00:00:00.000Z', + 'description': 'Next state inspection due by the specified date.', + 'status': 'OK', + 'type': 'VEHICLE_TUV', + }), + ]), + 'roofState': dict({ + 'roofState': 'CLOSED', + 'roofStateType': 'SUN_ROOF', + }), + 'windowsState': dict({ + 'combinedState': 'CLOSED', + 'leftFront': 'CLOSED', + 'rightFront': 'CLOSED', + }), + }), + }), + 'filename': 'bmw-eadrax-vcs_v4_vehicles_state_WBY0FINGERPRINT04.json', + }), dict({ 'content': dict({ 'chargeAndClimateSettings': dict({ @@ -3195,31 +3235,20 @@ }), 'filename': 'mini-eadrax-vcs_v5_vehicle-list.json', }), - dict({ - 'content': dict({ - 'gcid': 'ceb64158-d2ca-47e9-9ee6-cbffb881434e', - 'mappingInfos': list([ - ]), - }), - 'filename': 'toyota-eadrax-vcs_v5_vehicle-list.json', - }), dict({ 'content': dict({ 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'alarmSystem': True, + 'checkSustainabilityDPP': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, - 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -3230,38 +3259,27 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, - 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -3276,45 +3294,11 @@ 'NOT_SUPPORTED', ]), }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - 'state': 'ACTIVATED', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - 'state': 'ACTIVATED', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - 'state': 'ACTIVATED', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -3492,7 +3476,6 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), - 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -3583,18 +3566,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -3714,19 +3685,15 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'alarmSystem': False, + 'checkSustainabilityDPP': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, - 'inCarCamera': False, - 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -3737,80 +3704,37 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, - 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': False, + 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, - 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -3982,7 +3906,6 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), - 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -4073,18 +3996,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -4204,20 +4115,16 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'alarmSystem': False, + 'checkSustainabilityDPP': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, - 'inCarCamera': False, - 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -4235,73 +4142,31 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, - 'isWifiHotspotServiceSupported': False, + 'isWifiHotspotServiceSupported': True, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, - 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -4435,7 +4300,6 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), - 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -4522,18 +4386,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -5491,6 +5343,226 @@ 'vin': '**REDACTED**', }), 'fingerprint': list([ + dict({ + 'content': dict({ + 'capabilities': dict({ + 'climateFunction': 'AIR_CONDITIONING', + 'climateNow': True, + 'climateTimerTrigger': 'DEPARTURE_TIMER', + 'horn': True, + 'isBmwChargingSupported': True, + 'isCarSharingSupported': False, + 'isChargeNowForBusinessSupported': False, + 'isChargingHistorySupported': True, + 'isChargingHospitalityEnabled': False, + 'isChargingLoudnessEnabled': False, + 'isChargingPlanSupported': True, + 'isChargingPowerLimitEnabled': False, + 'isChargingSettingsEnabled': False, + 'isChargingTargetSocEnabled': False, + 'isClimateTimerSupported': True, + 'isCustomerEsimSupported': False, + 'isDCSContractManagementSupported': True, + 'isDataPrivacyEnabled': False, + 'isEasyChargeEnabled': False, + 'isEvGoChargingSupported': False, + 'isMiniChargingSupported': False, + 'isNonLscFeatureEnabled': False, + 'isRemoteEngineStartSupported': False, + 'isRemoteHistoryDeletionSupported': False, + 'isRemoteHistorySupported': True, + 'isRemoteParkingSupported': False, + 'isRemoteServicesActivationRequired': False, + 'isRemoteServicesBookingRequired': False, + 'isScanAndChargeSupported': False, + 'isSustainabilitySupported': False, + 'isWifiHotspotServiceSupported': False, + 'lastStateCallState': 'ACTIVATED', + 'lights': True, + 'lock': True, + 'remoteChargingCommands': dict({ + }), + 'sendPoi': True, + 'specialThemeSupport': list([ + ]), + 'unlock': True, + 'vehicleFinder': False, + 'vehicleStateSource': 'LAST_STATE_CALL', + }), + 'state': dict({ + 'chargingProfile': dict({ + 'chargingControlType': 'WEEKLY_PLANNER', + 'chargingMode': 'DELAYED_CHARGING', + 'chargingPreference': 'CHARGING_WINDOW', + 'chargingSettings': dict({ + 'hospitality': 'NO_ACTION', + 'idcc': 'NO_ACTION', + 'targetSoc': 100, + }), + 'climatisationOn': False, + 'departureTimes': list([ + dict({ + 'action': 'DEACTIVATE', + 'id': 1, + 'timeStamp': dict({ + 'hour': 7, + 'minute': 35, + }), + 'timerWeekDays': list([ + 'MONDAY', + 'TUESDAY', + 'WEDNESDAY', + 'THURSDAY', + 'FRIDAY', + ]), + }), + dict({ + 'action': 'DEACTIVATE', + 'id': 2, + 'timeStamp': dict({ + 'hour': 18, + 'minute': 0, + }), + 'timerWeekDays': list([ + 'MONDAY', + 'TUESDAY', + 'WEDNESDAY', + 'THURSDAY', + 'FRIDAY', + 'SATURDAY', + 'SUNDAY', + ]), + }), + dict({ + 'action': 'DEACTIVATE', + 'id': 3, + 'timeStamp': dict({ + 'hour': 7, + 'minute': 0, + }), + 'timerWeekDays': list([ + ]), + }), + dict({ + 'action': 'DEACTIVATE', + 'id': 4, + 'timerWeekDays': list([ + ]), + }), + ]), + 'reductionOfChargeCurrent': dict({ + 'end': dict({ + 'hour': 1, + 'minute': 30, + }), + 'start': dict({ + 'hour': 18, + 'minute': 1, + }), + }), + }), + 'checkControlMessages': list([ + ]), + 'climateTimers': list([ + dict({ + 'departureTime': dict({ + 'hour': 6, + 'minute': 40, + }), + 'isWeeklyTimer': True, + 'timerAction': 'ACTIVATE', + 'timerWeekDays': list([ + 'THURSDAY', + 'SUNDAY', + ]), + }), + dict({ + 'departureTime': dict({ + 'hour': 12, + 'minute': 50, + }), + 'isWeeklyTimer': False, + 'timerAction': 'ACTIVATE', + 'timerWeekDays': list([ + 'MONDAY', + ]), + }), + dict({ + 'departureTime': dict({ + 'hour': 18, + 'minute': 59, + }), + 'isWeeklyTimer': True, + 'timerAction': 'DEACTIVATE', + 'timerWeekDays': list([ + 'WEDNESDAY', + ]), + }), + ]), + 'combustionFuelLevel': dict({ + 'range': 105, + 'remainingFuelLiters': 6, + }), + 'currentMileage': 137009, + 'doorsState': dict({ + 'combinedSecurityState': 'UNLOCKED', + 'combinedState': 'CLOSED', + 'hood': 'CLOSED', + 'leftFront': 'CLOSED', + 'leftRear': 'CLOSED', + 'rightFront': 'CLOSED', + 'rightRear': 'CLOSED', + 'trunk': 'CLOSED', + }), + 'driverPreferences': dict({ + 'lscPrivacyMode': 'OFF', + }), + 'electricChargingState': dict({ + 'chargingConnectionType': 'CONDUCTIVE', + 'chargingLevelPercent': 82, + 'chargingStatus': 'WAITING_FOR_CHARGING', + 'chargingTarget': 100, + 'isChargerConnected': True, + 'range': 174, + }), + 'isLeftSteering': True, + 'isLscSupported': True, + 'lastFetched': '2022-06-22T14:24:23.982Z', + 'lastUpdatedAt': '2022-06-22T13:58:52Z', + 'range': 174, + 'requiredServices': list([ + dict({ + 'dateTime': '2022-10-01T00:00:00.000Z', + 'description': 'Next service due by the specified date.', + 'status': 'OK', + 'type': 'BRAKE_FLUID', + }), + dict({ + 'dateTime': '2023-05-01T00:00:00.000Z', + 'description': 'Next vehicle check due after the specified distance or date.', + 'status': 'OK', + 'type': 'VEHICLE_CHECK', + }), + dict({ + 'dateTime': '2023-05-01T00:00:00.000Z', + 'description': 'Next state inspection due by the specified date.', + 'status': 'OK', + 'type': 'VEHICLE_TUV', + }), + ]), + 'roofState': dict({ + 'roofState': 'CLOSED', + 'roofStateType': 'SUN_ROOF', + }), + 'windowsState': dict({ + 'combinedState': 'CLOSED', + 'leftFront': 'CLOSED', + 'rightFront': 'CLOSED', + }), + }), + }), + 'filename': 'bmw-eadrax-vcs_v4_vehicles_state_WBY0FINGERPRINT04.json', + }), dict({ 'content': dict({ 'chargeAndClimateSettings': dict({ @@ -5564,31 +5636,20 @@ }), 'filename': 'mini-eadrax-vcs_v5_vehicle-list.json', }), - dict({ - 'content': dict({ - 'gcid': 'ceb64158-d2ca-47e9-9ee6-cbffb881434e', - 'mappingInfos': list([ - ]), - }), - 'filename': 'toyota-eadrax-vcs_v5_vehicle-list.json', - }), dict({ 'content': dict({ 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'alarmSystem': True, + 'checkSustainabilityDPP': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, - 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -5599,38 +5660,27 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, - 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -5645,45 +5695,11 @@ 'NOT_SUPPORTED', ]), }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - 'state': 'ACTIVATED', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - 'state': 'ACTIVATED', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - 'state': 'ACTIVATED', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -5861,7 +5877,6 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), - 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -5952,18 +5967,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -6083,19 +6086,15 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'alarmSystem': False, + 'checkSustainabilityDPP': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, - 'inCarCamera': False, - 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -6106,80 +6105,37 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, - 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': False, + 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, - 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -6351,7 +6307,6 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), - 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -6442,18 +6397,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -6573,20 +6516,16 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'alarmSystem': False, + 'checkSustainabilityDPP': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, - 'inCarCamera': False, - 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -6604,73 +6543,31 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, - 'isWifiHotspotServiceSupported': False, + 'isWifiHotspotServiceSupported': True, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, - 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -6804,7 +6701,6 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), - 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -6891,18 +6787,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -7214,6 +7098,226 @@ dict({ 'data': None, 'fingerprint': list([ + dict({ + 'content': dict({ + 'capabilities': dict({ + 'climateFunction': 'AIR_CONDITIONING', + 'climateNow': True, + 'climateTimerTrigger': 'DEPARTURE_TIMER', + 'horn': True, + 'isBmwChargingSupported': True, + 'isCarSharingSupported': False, + 'isChargeNowForBusinessSupported': False, + 'isChargingHistorySupported': True, + 'isChargingHospitalityEnabled': False, + 'isChargingLoudnessEnabled': False, + 'isChargingPlanSupported': True, + 'isChargingPowerLimitEnabled': False, + 'isChargingSettingsEnabled': False, + 'isChargingTargetSocEnabled': False, + 'isClimateTimerSupported': True, + 'isCustomerEsimSupported': False, + 'isDCSContractManagementSupported': True, + 'isDataPrivacyEnabled': False, + 'isEasyChargeEnabled': False, + 'isEvGoChargingSupported': False, + 'isMiniChargingSupported': False, + 'isNonLscFeatureEnabled': False, + 'isRemoteEngineStartSupported': False, + 'isRemoteHistoryDeletionSupported': False, + 'isRemoteHistorySupported': True, + 'isRemoteParkingSupported': False, + 'isRemoteServicesActivationRequired': False, + 'isRemoteServicesBookingRequired': False, + 'isScanAndChargeSupported': False, + 'isSustainabilitySupported': False, + 'isWifiHotspotServiceSupported': False, + 'lastStateCallState': 'ACTIVATED', + 'lights': True, + 'lock': True, + 'remoteChargingCommands': dict({ + }), + 'sendPoi': True, + 'specialThemeSupport': list([ + ]), + 'unlock': True, + 'vehicleFinder': False, + 'vehicleStateSource': 'LAST_STATE_CALL', + }), + 'state': dict({ + 'chargingProfile': dict({ + 'chargingControlType': 'WEEKLY_PLANNER', + 'chargingMode': 'DELAYED_CHARGING', + 'chargingPreference': 'CHARGING_WINDOW', + 'chargingSettings': dict({ + 'hospitality': 'NO_ACTION', + 'idcc': 'NO_ACTION', + 'targetSoc': 100, + }), + 'climatisationOn': False, + 'departureTimes': list([ + dict({ + 'action': 'DEACTIVATE', + 'id': 1, + 'timeStamp': dict({ + 'hour': 7, + 'minute': 35, + }), + 'timerWeekDays': list([ + 'MONDAY', + 'TUESDAY', + 'WEDNESDAY', + 'THURSDAY', + 'FRIDAY', + ]), + }), + dict({ + 'action': 'DEACTIVATE', + 'id': 2, + 'timeStamp': dict({ + 'hour': 18, + 'minute': 0, + }), + 'timerWeekDays': list([ + 'MONDAY', + 'TUESDAY', + 'WEDNESDAY', + 'THURSDAY', + 'FRIDAY', + 'SATURDAY', + 'SUNDAY', + ]), + }), + dict({ + 'action': 'DEACTIVATE', + 'id': 3, + 'timeStamp': dict({ + 'hour': 7, + 'minute': 0, + }), + 'timerWeekDays': list([ + ]), + }), + dict({ + 'action': 'DEACTIVATE', + 'id': 4, + 'timerWeekDays': list([ + ]), + }), + ]), + 'reductionOfChargeCurrent': dict({ + 'end': dict({ + 'hour': 1, + 'minute': 30, + }), + 'start': dict({ + 'hour': 18, + 'minute': 1, + }), + }), + }), + 'checkControlMessages': list([ + ]), + 'climateTimers': list([ + dict({ + 'departureTime': dict({ + 'hour': 6, + 'minute': 40, + }), + 'isWeeklyTimer': True, + 'timerAction': 'ACTIVATE', + 'timerWeekDays': list([ + 'THURSDAY', + 'SUNDAY', + ]), + }), + dict({ + 'departureTime': dict({ + 'hour': 12, + 'minute': 50, + }), + 'isWeeklyTimer': False, + 'timerAction': 'ACTIVATE', + 'timerWeekDays': list([ + 'MONDAY', + ]), + }), + dict({ + 'departureTime': dict({ + 'hour': 18, + 'minute': 59, + }), + 'isWeeklyTimer': True, + 'timerAction': 'DEACTIVATE', + 'timerWeekDays': list([ + 'WEDNESDAY', + ]), + }), + ]), + 'combustionFuelLevel': dict({ + 'range': 105, + 'remainingFuelLiters': 6, + }), + 'currentMileage': 137009, + 'doorsState': dict({ + 'combinedSecurityState': 'UNLOCKED', + 'combinedState': 'CLOSED', + 'hood': 'CLOSED', + 'leftFront': 'CLOSED', + 'leftRear': 'CLOSED', + 'rightFront': 'CLOSED', + 'rightRear': 'CLOSED', + 'trunk': 'CLOSED', + }), + 'driverPreferences': dict({ + 'lscPrivacyMode': 'OFF', + }), + 'electricChargingState': dict({ + 'chargingConnectionType': 'CONDUCTIVE', + 'chargingLevelPercent': 82, + 'chargingStatus': 'WAITING_FOR_CHARGING', + 'chargingTarget': 100, + 'isChargerConnected': True, + 'range': 174, + }), + 'isLeftSteering': True, + 'isLscSupported': True, + 'lastFetched': '2022-06-22T14:24:23.982Z', + 'lastUpdatedAt': '2022-06-22T13:58:52Z', + 'range': 174, + 'requiredServices': list([ + dict({ + 'dateTime': '2022-10-01T00:00:00.000Z', + 'description': 'Next service due by the specified date.', + 'status': 'OK', + 'type': 'BRAKE_FLUID', + }), + dict({ + 'dateTime': '2023-05-01T00:00:00.000Z', + 'description': 'Next vehicle check due after the specified distance or date.', + 'status': 'OK', + 'type': 'VEHICLE_CHECK', + }), + dict({ + 'dateTime': '2023-05-01T00:00:00.000Z', + 'description': 'Next state inspection due by the specified date.', + 'status': 'OK', + 'type': 'VEHICLE_TUV', + }), + ]), + 'roofState': dict({ + 'roofState': 'CLOSED', + 'roofStateType': 'SUN_ROOF', + }), + 'windowsState': dict({ + 'combinedState': 'CLOSED', + 'leftFront': 'CLOSED', + 'rightFront': 'CLOSED', + }), + }), + }), + 'filename': 'bmw-eadrax-vcs_v4_vehicles_state_WBY0FINGERPRINT04.json', + }), dict({ 'content': dict({ 'chargeAndClimateSettings': dict({ @@ -7287,31 +7391,20 @@ }), 'filename': 'mini-eadrax-vcs_v5_vehicle-list.json', }), - dict({ - 'content': dict({ - 'gcid': 'ceb64158-d2ca-47e9-9ee6-cbffb881434e', - 'mappingInfos': list([ - ]), - }), - 'filename': 'toyota-eadrax-vcs_v5_vehicle-list.json', - }), dict({ 'content': dict({ 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'alarmSystem': True, + 'checkSustainabilityDPP': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, - 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -7322,38 +7415,27 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, - 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -7368,45 +7450,11 @@ 'NOT_SUPPORTED', ]), }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - 'state': 'ACTIVATED', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - 'state': 'ACTIVATED', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - 'state': 'ACTIVATED', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -7584,7 +7632,6 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), - 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -7675,18 +7722,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -7806,19 +7841,15 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'alarmSystem': False, + 'checkSustainabilityDPP': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, - 'inCarCamera': False, - 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -7829,80 +7860,37 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, - 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': False, + 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, - 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -8074,7 +8062,6 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), - 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -8165,18 +8152,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -8296,20 +8271,16 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'alarmSystem': False, + 'checkSustainabilityDPP': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', - 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', - 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, - 'inCarCamera': False, - 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -8327,73 +8298,31 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, - 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isPlugAndChargeSupported': False, - 'isRemoteEngineStartEnabled': False, - 'isRemoteEngineStartSupported': True, + 'isRemoteEngineStartSupported': False, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, - 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isThirdPartyAppStoreSupported': False, - 'isWifiHotspotServiceSupported': False, + 'isWifiHotspotServiceSupported': True, 'lastStateCallState': 'ACTIVATED', 'lights': True, - 'locationBasedCommerceFeatures': dict({ - 'fueling': False, - 'parking': False, - 'reservations': False, - }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), - 'remoteServices': dict({ - 'doorLock': dict({ - 'id': 'doorLock', - 'state': 'ACTIVATED', - }), - 'doorUnlock': dict({ - 'id': 'doorUnlock', - 'state': 'ACTIVATED', - }), - 'hornBlow': dict({ - 'id': 'hornBlow', - 'state': 'ACTIVATED', - }), - 'inCarCamera': dict({ - 'id': 'inCarCamera', - }), - 'inCarCameraDwa': dict({ - 'id': 'inCarCameraDwa', - }), - 'lightFlash': dict({ - 'id': 'lightFlash', - 'state': 'ACTIVATED', - }), - 'remote360': dict({ - 'id': 'remote360', - 'state': 'ACTIVATED', - }), - 'surroundViewRecorder': dict({ - 'id': 'surroundViewRecorder', - }), - }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': True, + 'speechThirdPartyAlexa': False, 'speechThirdPartyAlexaSDK': False, - 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -8527,7 +8456,6 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), - 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -8614,18 +8542,6 @@ }), }), }), - 'vehicleSoftwareVersion': dict({ - 'iStep': dict({ - 'iStep': 0, - 'month': 0, - 'seriesCluster': '', - 'year': 0, - }), - 'puStep': dict({ - 'month': 0, - 'year': 0, - }), - }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', diff --git a/tests/components/bmw_connected_drive/snapshots/test_lock.ambr b/tests/components/bmw_connected_drive/snapshots/test_lock.ambr index 395c6e56dda..17e6b118011 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_lock.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_lock.ambr @@ -35,9 +35,12 @@ # name: test_entity_state_attrs[lock.i3_rex_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'i3 (+ REX)', 'door_lock_state': 'UNLOCKED', 'friendly_name': 'i3 (+ REX) Lock', 'supported_features': , + 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'lock.i3_rex_lock', @@ -83,9 +86,12 @@ # name: test_entity_state_attrs[lock.i4_edrive40_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'i4 eDrive40', 'door_lock_state': 'LOCKED', 'friendly_name': 'i4 eDrive40 Lock', 'supported_features': , + 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'lock.i4_edrive40_lock', @@ -131,9 +137,12 @@ # name: test_entity_state_attrs[lock.ix_xdrive50_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'iX xDrive50', 'door_lock_state': 'LOCKED', 'friendly_name': 'iX xDrive50 Lock', 'supported_features': , + 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'lock.ix_xdrive50_lock', @@ -179,9 +188,12 @@ # name: test_entity_state_attrs[lock.m340i_xdrive_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'car': 'M340i xDrive', 'door_lock_state': 'LOCKED', 'friendly_name': 'M340i xDrive Lock', 'supported_features': , + 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'lock.m340i_xdrive_lock', diff --git a/tests/components/bmw_connected_drive/snapshots/test_number.ambr b/tests/components/bmw_connected_drive/snapshots/test_number.ambr index 71dbc46b454..f24ea43d8e8 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_number.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_number.ambr @@ -40,6 +40,7 @@ # name: test_entity_state_attrs[number.i4_edrive40_target_soc-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Target SoC', 'max': 100.0, @@ -96,6 +97,7 @@ # name: test_entity_state_attrs[number.ix_xdrive50_target_soc-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Target SoC', 'max': 100.0, diff --git a/tests/components/bmw_connected_drive/snapshots/test_select.ambr b/tests/components/bmw_connected_drive/snapshots/test_select.ambr index b827dfe478a..34a8817c8db 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_select.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_select.ambr @@ -8,7 +8,6 @@ 'options': list([ 'immediate_charging', 'delayed_charging', - 'no_action', ]), }), 'config_entry_id': , @@ -41,11 +40,11 @@ # name: test_entity_state_attrs[select.i3_rex_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Charging Mode', 'options': list([ 'immediate_charging', 'delayed_charging', - 'no_action', ]), }), 'context': , @@ -108,6 +107,7 @@ # name: test_entity_state_attrs[select.i4_edrive40_ac_charging_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 AC Charging Limit', 'options': list([ '6', @@ -143,7 +143,6 @@ 'options': list([ 'immediate_charging', 'delayed_charging', - 'no_action', ]), }), 'config_entry_id': , @@ -176,11 +175,11 @@ # name: test_entity_state_attrs[select.i4_edrive40_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Charging Mode', 'options': list([ 'immediate_charging', 'delayed_charging', - 'no_action', ]), }), 'context': , @@ -243,6 +242,7 @@ # name: test_entity_state_attrs[select.ix_xdrive50_ac_charging_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 AC Charging Limit', 'options': list([ '6', @@ -278,7 +278,6 @@ 'options': list([ 'immediate_charging', 'delayed_charging', - 'no_action', ]), }), 'config_entry_id': , @@ -311,11 +310,11 @@ # name: test_entity_state_attrs[select.ix_xdrive50_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Charging Mode', 'options': list([ 'immediate_charging', 'delayed_charging', - 'no_action', ]), }), 'context': , diff --git a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr index 624b2c6007f..6ba87c029ee 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr @@ -31,13 +31,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'ac_current_limit', - 'unique_id': 'WBY00000000REXI01-charging_profile.ac_current_limit', + 'unique_id': 'WBY00000000REXI01-ac_current_limit', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_ac_current_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'current', 'friendly_name': 'i3 (+ REX) AC current limit', 'unit_of_measurement': , @@ -79,13 +80,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_end_time', - 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_end_time', + 'unique_id': 'WBY00000000REXI01-charging_end_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_end_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i3 (+ REX) Charging end time', }), @@ -126,13 +128,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_start_time', - 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_start_time', + 'unique_id': 'WBY00000000REXI01-charging_start_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_start_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i3 (+ REX) Charging start time', }), @@ -188,13 +191,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_status', - 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_status', + 'unique_id': 'WBY00000000REXI01-charging_status', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'i3 (+ REX) Charging status', 'options': list([ @@ -245,20 +249,22 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charging target', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_target', - 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_target', + 'unique_id': 'WBY00000000REXI01-charging_target', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'device_class': 'battery', 'friendly_name': 'i3 (+ REX) Charging target', 'unit_of_measurement': '%', }), @@ -311,6 +317,7 @@ # name: test_entity_state_attrs[sensor.i3_rex_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Mileage', 'state_class': , @@ -358,13 +365,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_battery_percent', - 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_battery_percent', + 'unique_id': 'WBY00000000REXI01-remaining_battery_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_battery_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i3 (+ REX) Remaining battery percent', 'state_class': , @@ -405,21 +413,22 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, 'original_name': 'Remaining fuel', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel', - 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_fuel', + 'unique_id': 'WBY00000000REXI01-remaining_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'volume_storage', + 'attribution': 'Data provided by MyBMW', + 'device_class': 'volume', 'friendly_name': 'i3 (+ REX) Remaining fuel', 'state_class': , 'unit_of_measurement': , @@ -466,13 +475,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel_percent', - 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_fuel_percent', + 'unique_id': 'WBY00000000REXI01-remaining_fuel_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_fuel_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Remaining fuel percent', 'state_class': , 'unit_of_measurement': '%', @@ -519,13 +529,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_electric', - 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_range_electric', + 'unique_id': 'WBY00000000REXI01-remaining_range_electric', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_range_electric-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Remaining range electric', 'state_class': , @@ -573,13 +584,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_fuel', - 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_range_fuel', + 'unique_id': 'WBY00000000REXI01-remaining_range_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_range_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Remaining range fuel', 'state_class': , @@ -627,13 +639,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_range_total', + 'unique_id': 'WBY00000000REXI01-remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Remaining range total', 'state_class': , @@ -679,13 +692,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'ac_current_limit', - 'unique_id': 'WBA00000000DEMO02-charging_profile.ac_current_limit', + 'unique_id': 'WBA00000000DEMO02-ac_current_limit', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_ac_current_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'current', 'friendly_name': 'i4 eDrive40 AC current limit', 'unit_of_measurement': , @@ -727,13 +741,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_end_time', - 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_end_time', + 'unique_id': 'WBA00000000DEMO02-charging_end_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_end_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i4 eDrive40 Charging end time', }), @@ -774,13 +789,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_start_time', - 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_start_time', + 'unique_id': 'WBA00000000DEMO02-charging_start_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_start_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i4 eDrive40 Charging start time', }), @@ -836,13 +852,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_status', - 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_status', + 'unique_id': 'WBA00000000DEMO02-charging_status', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'i4 eDrive40 Charging status', 'options': list([ @@ -893,20 +910,22 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charging target', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_target', - 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_target', + 'unique_id': 'WBA00000000DEMO02-charging_target', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Charging target', 'unit_of_measurement': '%', }), @@ -927,7 +946,6 @@ 'options': list([ 'cooling', 'heating', - 'ventilation', 'inactive', 'standby', ]), @@ -955,19 +973,19 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_status', - 'unique_id': 'WBA00000000DEMO02-climate.activity', + 'unique_id': 'WBA00000000DEMO02-activity', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_climate_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'i4 eDrive40 Climate status', 'options': list([ 'cooling', 'heating', - 'ventilation', 'inactive', 'standby', ]), @@ -980,234 +998,6 @@ 'state': 'heating', }) # --- -# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.i4_edrive40_front_left_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front left target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_left_target_pressure', - 'unique_id': 'WBA00000000DEMO02-tires.front_left.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'i4 eDrive40 Front left target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.i4_edrive40_front_left_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.69', - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.i4_edrive40_front_left_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front left tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_left_current_pressure', - 'unique_id': 'WBA00000000DEMO02-tires.front_left.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'i4 eDrive40 Front left tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.i4_edrive40_front_left_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.41', - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.i4_edrive40_front_right_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front right target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_right_target_pressure', - 'unique_id': 'WBA00000000DEMO02-tires.front_right.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'i4 eDrive40 Front right target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.i4_edrive40_front_right_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.69', - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.i4_edrive40_front_right_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front right tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_right_current_pressure', - 'unique_id': 'WBA00000000DEMO02-tires.front_right.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'i4 eDrive40 Front right tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.i4_edrive40_front_right_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.55', - }) -# --- # name: test_entity_state_attrs[sensor.i4_edrive40_mileage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1249,6 +1039,7 @@ # name: test_entity_state_attrs[sensor.i4_edrive40_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i4 eDrive40 Mileage', 'state_class': , @@ -1262,234 +1053,6 @@ 'state': '1121', }) # --- -# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.i4_edrive40_rear_left_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear left target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_left_target_pressure', - 'unique_id': 'WBA00000000DEMO02-tires.rear_left.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'i4 eDrive40 Rear left target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.i4_edrive40_rear_left_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.03', - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.i4_edrive40_rear_left_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear left tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_left_current_pressure', - 'unique_id': 'WBA00000000DEMO02-tires.rear_left.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'i4 eDrive40 Rear left tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.i4_edrive40_rear_left_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.24', - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.i4_edrive40_rear_right_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear right target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_right_target_pressure', - 'unique_id': 'WBA00000000DEMO02-tires.rear_right.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'i4 eDrive40 Rear right target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.i4_edrive40_rear_right_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.03', - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.i4_edrive40_rear_right_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear right tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_right_current_pressure', - 'unique_id': 'WBA00000000DEMO02-tires.rear_right.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'i4 eDrive40 Rear right tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.i4_edrive40_rear_right_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.31', - }) -# --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_battery_percent-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1524,13 +1087,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_battery_percent', - 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.remaining_battery_percent', + 'unique_id': 'WBA00000000DEMO02-remaining_battery_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_battery_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Remaining battery percent', 'state_class': , @@ -1578,13 +1142,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_electric', - 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.remaining_range_electric', + 'unique_id': 'WBA00000000DEMO02-remaining_range_electric', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_range_electric-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i4 eDrive40 Remaining range electric', 'state_class': , @@ -1632,13 +1197,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.remaining_range_total', + 'unique_id': 'WBA00000000DEMO02-remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i4 eDrive40 Remaining range total', 'state_class': , @@ -1684,13 +1250,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'ac_current_limit', - 'unique_id': 'WBA00000000DEMO01-charging_profile.ac_current_limit', + 'unique_id': 'WBA00000000DEMO01-ac_current_limit', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_ac_current_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'current', 'friendly_name': 'iX xDrive50 AC current limit', 'unit_of_measurement': , @@ -1732,13 +1299,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_end_time', - 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_end_time', + 'unique_id': 'WBA00000000DEMO01-charging_end_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_end_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'iX xDrive50 Charging end time', }), @@ -1779,13 +1347,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_start_time', - 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_start_time', + 'unique_id': 'WBA00000000DEMO01-charging_start_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_start_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'iX xDrive50 Charging start time', }), @@ -1841,13 +1410,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_status', - 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_status', + 'unique_id': 'WBA00000000DEMO01-charging_status', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'iX xDrive50 Charging status', 'options': list([ @@ -1898,20 +1468,22 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charging target', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_target', - 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_target', + 'unique_id': 'WBA00000000DEMO01-charging_target', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', + 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Charging target', 'unit_of_measurement': '%', }), @@ -1932,7 +1504,6 @@ 'options': list([ 'cooling', 'heating', - 'ventilation', 'inactive', 'standby', ]), @@ -1960,19 +1531,19 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_status', - 'unique_id': 'WBA00000000DEMO01-climate.activity', + 'unique_id': 'WBA00000000DEMO01-activity', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_climate_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'iX xDrive50 Climate status', 'options': list([ 'cooling', 'heating', - 'ventilation', 'inactive', 'standby', ]), @@ -1985,234 +1556,6 @@ 'state': 'inactive', }) # --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ix_xdrive50_front_left_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front left target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_left_target_pressure', - 'unique_id': 'WBA00000000DEMO01-tires.front_left.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'iX xDrive50 Front left target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ix_xdrive50_front_left_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.41', - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ix_xdrive50_front_left_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front left tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_left_current_pressure', - 'unique_id': 'WBA00000000DEMO01-tires.front_left.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'iX xDrive50 Front left tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ix_xdrive50_front_left_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.41', - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ix_xdrive50_front_right_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front right target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_right_target_pressure', - 'unique_id': 'WBA00000000DEMO01-tires.front_right.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'iX xDrive50 Front right target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ix_xdrive50_front_right_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.41', - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ix_xdrive50_front_right_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front right tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_right_current_pressure', - 'unique_id': 'WBA00000000DEMO01-tires.front_right.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'iX xDrive50 Front right tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ix_xdrive50_front_right_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.41', - }) -# --- # name: test_entity_state_attrs[sensor.ix_xdrive50_mileage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2254,6 +1597,7 @@ # name: test_entity_state_attrs[sensor.ix_xdrive50_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'iX xDrive50 Mileage', 'state_class': , @@ -2267,234 +1611,6 @@ 'state': '1121', }) # --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ix_xdrive50_rear_left_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear left target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_left_target_pressure', - 'unique_id': 'WBA00000000DEMO01-tires.rear_left.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'iX xDrive50 Rear left target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ix_xdrive50_rear_left_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.69', - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ix_xdrive50_rear_left_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear left tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_left_current_pressure', - 'unique_id': 'WBA00000000DEMO01-tires.rear_left.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'iX xDrive50 Rear left tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ix_xdrive50_rear_left_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.61', - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ix_xdrive50_rear_right_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear right target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_right_target_pressure', - 'unique_id': 'WBA00000000DEMO01-tires.rear_right.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'iX xDrive50 Rear right target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ix_xdrive50_rear_right_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.69', - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ix_xdrive50_rear_right_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear right tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_right_current_pressure', - 'unique_id': 'WBA00000000DEMO01-tires.rear_right.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'iX xDrive50 Rear right tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ix_xdrive50_rear_right_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.69', - }) -# --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_battery_percent-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2529,13 +1645,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_battery_percent', - 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.remaining_battery_percent', + 'unique_id': 'WBA00000000DEMO01-remaining_battery_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_battery_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Remaining battery percent', 'state_class': , @@ -2583,13 +1700,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_electric', - 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.remaining_range_electric', + 'unique_id': 'WBA00000000DEMO01-remaining_range_electric', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_range_electric-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'iX xDrive50 Remaining range electric', 'state_class': , @@ -2637,13 +1755,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.remaining_range_total', + 'unique_id': 'WBA00000000DEMO01-remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'iX xDrive50 Remaining range total', 'state_class': , @@ -2666,7 +1785,6 @@ 'options': list([ 'cooling', 'heating', - 'ventilation', 'inactive', 'standby', ]), @@ -2694,19 +1812,19 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_status', - 'unique_id': 'WBA00000000DEMO03-climate.activity', + 'unique_id': 'WBA00000000DEMO03-activity', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_climate_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'M340i xDrive Climate status', 'options': list([ 'cooling', 'heating', - 'ventilation', 'inactive', 'standby', ]), @@ -2719,234 +1837,6 @@ 'state': 'inactive', }) # --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.m340i_xdrive_front_left_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front left target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_left_target_pressure', - 'unique_id': 'WBA00000000DEMO03-tires.front_left.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'M340i xDrive Front left target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.m340i_xdrive_front_left_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.m340i_xdrive_front_left_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front left tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_left_current_pressure', - 'unique_id': 'WBA00000000DEMO03-tires.front_left.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'M340i xDrive Front left tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.m340i_xdrive_front_left_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.41', - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.m340i_xdrive_front_right_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front right target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_right_target_pressure', - 'unique_id': 'WBA00000000DEMO03-tires.front_right.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'M340i xDrive Front right target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.m340i_xdrive_front_right_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.m340i_xdrive_front_right_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front right tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'front_right_current_pressure', - 'unique_id': 'WBA00000000DEMO03-tires.front_right.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'M340i xDrive Front right tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.m340i_xdrive_front_right_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.55', - }) -# --- # name: test_entity_state_attrs[sensor.m340i_xdrive_mileage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2988,6 +1878,7 @@ # name: test_entity_state_attrs[sensor.m340i_xdrive_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'M340i xDrive Mileage', 'state_class': , @@ -3001,234 +1892,6 @@ 'state': '1121', }) # --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.m340i_xdrive_rear_left_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear left target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_left_target_pressure', - 'unique_id': 'WBA00000000DEMO03-tires.rear_left.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'M340i xDrive Rear left target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.m340i_xdrive_rear_left_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.m340i_xdrive_rear_left_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear left tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_left_current_pressure', - 'unique_id': 'WBA00000000DEMO03-tires.rear_left.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'M340i xDrive Rear left tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.m340i_xdrive_rear_left_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.24', - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_target_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.m340i_xdrive_rear_right_target_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear right target pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_right_target_pressure', - 'unique_id': 'WBA00000000DEMO03-tires.rear_right.target_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_target_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'M340i xDrive Rear right target pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.m340i_xdrive_rear_right_target_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_tire_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.m340i_xdrive_rear_right_tire_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear right tire pressure', - 'platform': 'bmw_connected_drive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rear_right_current_pressure', - 'unique_id': 'WBA00000000DEMO03-tires.rear_right.current_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_tire_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'M340i xDrive Rear right tire pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.m340i_xdrive_rear_right_tire_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.31', - }) -# --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_fuel-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3256,21 +1919,22 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, 'original_name': 'Remaining fuel', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel', - 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_fuel', + 'unique_id': 'WBA00000000DEMO03-remaining_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'volume_storage', + 'attribution': 'Data provided by MyBMW', + 'device_class': 'volume', 'friendly_name': 'M340i xDrive Remaining fuel', 'state_class': , 'unit_of_measurement': , @@ -3317,13 +1981,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel_percent', - 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_fuel_percent', + 'unique_id': 'WBA00000000DEMO03-remaining_fuel_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_fuel_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Remaining fuel percent', 'state_class': , 'unit_of_measurement': '%', @@ -3370,13 +2035,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_fuel', - 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_range_fuel', + 'unique_id': 'WBA00000000DEMO03-remaining_range_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_range_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'M340i xDrive Remaining range fuel', 'state_class': , @@ -3424,13 +2090,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_range_total', + 'unique_id': 'WBA00000000DEMO03-remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'M340i xDrive Remaining range total', 'state_class': , diff --git a/tests/components/bmw_connected_drive/snapshots/test_switch.ambr b/tests/components/bmw_connected_drive/snapshots/test_switch.ambr index 5b60a32c3be..5a87a6ddd84 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_switch.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_switch.ambr @@ -35,6 +35,7 @@ # name: test_entity_state_attrs[switch.i4_edrive40_climate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Climate', }), 'context': , @@ -81,6 +82,7 @@ # name: test_entity_state_attrs[switch.ix_xdrive50_charging-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Charging', }), 'context': , @@ -127,6 +129,7 @@ # name: test_entity_state_attrs[switch.ix_xdrive50_climate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Climate', }), 'context': , @@ -173,6 +176,7 @@ # name: test_entity_state_attrs[switch.m340i_xdrive_climate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Climate', }), 'context': , diff --git a/tests/components/bmw_connected_drive/test_button.py b/tests/components/bmw_connected_drive/test_button.py index 88c7990cde9..99cabc900fa 100644 --- a/tests/components/bmw_connected_drive/test_button.py +++ b/tests/components/bmw_connected_drive/test_button.py @@ -165,7 +165,7 @@ async def test_service_call_success_state_change( ( "button.i4_edrive40_find_vehicle", "device_tracker.i4_edrive40", - {"latitude": 12.345, "longitude": 34.5678, "direction": 121}, + {"latitude": 123.456, "longitude": 34.5678, "direction": 121}, {"latitude": 48.177334, "longitude": 11.556274, "direction": 180}, ), ], diff --git a/tests/components/bmw_connected_drive/test_config_flow.py b/tests/components/bmw_connected_drive/test_config_flow.py index f57f1a304ac..3c7f452a011 100644 --- a/tests/components/bmw_connected_drive/test_config_flow.py +++ b/tests/components/bmw_connected_drive/test_config_flow.py @@ -4,13 +4,8 @@ from copy import deepcopy from unittest.mock import patch from bimmer_connected.api.authentication import MyBMWAuthentication -from bimmer_connected.models import ( - MyBMWAPIError, - MyBMWAuthError, - MyBMWCaptchaMissingError, -) +from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError from httpx import RequestError -import pytest from homeassistant import config_entries from homeassistant.components.bmw_connected_drive.config_flow import DOMAIN @@ -18,7 +13,7 @@ from homeassistant.components.bmw_connected_drive.const import ( CONF_READ_ONLY, CONF_REFRESH_TOKEN, ) -from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -164,7 +159,7 @@ async def test_options_flow_implementation(hass: HomeAssistant) -> None: CONF_READ_ONLY: True, } - assert len(mock_setup_entry.mock_calls) == 2 + assert len(mock_setup_entry.mock_calls) == 1 async def test_reauth(hass: HomeAssistant) -> None: @@ -193,19 +188,19 @@ async def test_reauth(hass: HomeAssistant) -> None: assert config_entry.data == config_entry_with_wrong_password["data"] - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": config_entry.unique_id, + "entry_id": config_entry.entry_id, + }, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} - suggested_values = { - key: key.description.get("suggested_value") - for key in result["data_schema"].schema - } - assert suggested_values[CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] - assert suggested_values[CONF_PASSWORD] == wrong_password - assert suggested_values[CONF_REGION] == FIXTURE_USER_INPUT[CONF_REGION] - result2 = await hass.config_entries.flow.async_configure( result["flow_id"], FIXTURE_USER_INPUT ) @@ -215,132 +210,4 @@ async def test_reauth(hass: HomeAssistant) -> None: assert result2["reason"] == "reauth_successful" assert config_entry.data == FIXTURE_COMPLETE_ENTRY - assert len(mock_setup_entry.mock_calls) == 2 - - -async def test_reauth_unique_id_abort(hass: HomeAssistant) -> None: - """Test aborting the reauth form if unique_id changes.""" - with patch( - "bimmer_connected.api.authentication.MyBMWAuthentication.login", - side_effect=login_sideeffect, - autospec=True, - ): - wrong_password = "wrong" - - config_entry_with_wrong_password = deepcopy(FIXTURE_CONFIG_ENTRY) - config_entry_with_wrong_password["data"][CONF_PASSWORD] = wrong_password - - config_entry = MockConfigEntry(**config_entry_with_wrong_password) - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.data == config_entry_with_wrong_password["data"] - - result = await config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {**FIXTURE_USER_INPUT, CONF_REGION: "north_america"} - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "account_mismatch" - assert config_entry.data == config_entry_with_wrong_password["data"] - - -async def test_reconfigure(hass: HomeAssistant) -> None: - """Test the reconfiguration form.""" - with patch( - "bimmer_connected.api.authentication.MyBMWAuthentication.login", - side_effect=login_sideeffect, - autospec=True, - ): - config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY) - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await config_entry.start_reconfigure_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - suggested_values = { - key: key.description.get("suggested_value") - for key in result["data_schema"].schema - } - assert suggested_values[CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] - assert suggested_values[CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] - assert suggested_values[CONF_REGION] == FIXTURE_USER_INPUT[CONF_REGION] - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], FIXTURE_USER_INPUT - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" - assert config_entry.data == FIXTURE_COMPLETE_ENTRY - - -async def test_reconfigure_unique_id_abort(hass: HomeAssistant) -> None: - """Test aborting the reconfiguration form if unique_id changes.""" - with patch( - "bimmer_connected.api.authentication.MyBMWAuthentication.login", - side_effect=login_sideeffect, - autospec=True, - ): - config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY) - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await config_entry.start_reconfigure_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {**FIXTURE_USER_INPUT, CONF_USERNAME: "somebody@email.com"}, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "account_mismatch" - assert config_entry.data == FIXTURE_COMPLETE_ENTRY - - -@pytest.mark.usefixtures("bmw_fixture") -async def test_captcha_flow_not_set(hass: HomeAssistant) -> None: - """Test the external flow with captcha failing once and succeeding the second time.""" - - TEST_REGION = "north_america" - - # Start flow and open form - # Start flow and open form - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - # Add login data - with patch( - "bimmer_connected.api.authentication.MyBMWAuthentication._login_row_na", - side_effect=MyBMWCaptchaMissingError( - "Missing hCaptcha token for North America login" - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={**FIXTURE_USER_INPUT, CONF_REGION: TEST_REGION}, - ) - assert result["errors"]["base"] == "missing_captcha" + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/bmw_connected_drive/test_coordinator.py b/tests/components/bmw_connected_drive/test_coordinator.py index 774a85eb6da..5b3f99a9414 100644 --- a/tests/components/bmw_connected_drive/test_coordinator.py +++ b/tests/components/bmw_connected_drive/test_coordinator.py @@ -1,20 +1,14 @@ """Test BMW coordinator.""" -from copy import deepcopy from datetime import timedelta from unittest.mock import patch -from bimmer_connected.models import ( - MyBMWAPIError, - MyBMWAuthError, - MyBMWCaptchaMissingError, -) +from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.bmw_connected_drive import DOMAIN as BMW_DOMAIN -from homeassistant.const import CONF_REGION -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.update_coordinator import UpdateFailed @@ -33,7 +27,10 @@ async def test_update_success(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert config_entry.runtime_data.coordinator.last_update_success is True + assert ( + hass.data[config_entry.domain][config_entry.entry_id].last_update_success + is True + ) @pytest.mark.usefixtures("bmw_fixture") @@ -48,7 +45,7 @@ async def test_update_failed( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = config_entry.runtime_data.coordinator + coordinator = hass.data[config_entry.domain][config_entry.entry_id] assert coordinator.last_update_success is True @@ -77,7 +74,7 @@ async def test_update_reauth( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = config_entry.runtime_data.coordinator + coordinator = hass.data[config_entry.domain][config_entry.entry_id] assert coordinator.last_update_success is True @@ -124,42 +121,6 @@ async def test_init_reauth( await hass.async_block_till_done() reauth_issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, - f"config_entry_reauth_{BMW_DOMAIN}_{config_entry.entry_id}", + HA_DOMAIN, f"config_entry_reauth_{BMW_DOMAIN}_{config_entry.entry_id}" ) assert reauth_issue.active is True - - -@pytest.mark.usefixtures("bmw_fixture") -async def test_captcha_reauth( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the reauth form.""" - TEST_REGION = "north_america" - - config_entry_fixure = deepcopy(FIXTURE_CONFIG_ENTRY) - config_entry_fixure["data"][CONF_REGION] = TEST_REGION - config_entry = MockConfigEntry(**config_entry_fixure) - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - coordinator = config_entry.runtime_data.coordinator - - assert coordinator.last_update_success is True - - freezer.tick(timedelta(minutes=10, seconds=1)) - with patch( - "bimmer_connected.account.MyBMWAccount.get_vehicles", - side_effect=MyBMWCaptchaMissingError( - "Missing hCaptcha token for North America login" - ), - ): - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert coordinator.last_update_success is False - assert isinstance(coordinator.last_exception, ConfigEntryAuthFailed) is True - assert coordinator.last_exception.translation_key == "missing_captcha" diff --git a/tests/components/bmw_connected_drive/test_init.py b/tests/components/bmw_connected_drive/test_init.py index e523b2b3d02..5cd6362d6fa 100644 --- a/tests/components/bmw_connected_drive/test_init.py +++ b/tests/components/bmw_connected_drive/test_init.py @@ -85,7 +85,7 @@ async def test_migrate_options_from_data(hass: HomeAssistant) -> None: "disabled_by": None, }, f"{VIN}-charging_level_hv", - f"{VIN}-fuel_and_battery.remaining_battery_percent", + f"{VIN}-remaining_battery_percent", ), ( { @@ -96,18 +96,7 @@ async def test_migrate_options_from_data(hass: HomeAssistant) -> None: "disabled_by": None, }, f"{VIN}-remaining_range_total", - f"{VIN}-fuel_and_battery.remaining_range_total", - ), - ( - { - "domain": SENSOR_DOMAIN, - "platform": BMW_DOMAIN, - "unique_id": f"{VIN}-mileage", - "suggested_object_id": f"{VEHICLE_NAME} mileage", - "disabled_by": None, - }, - f"{VIN}-mileage", - f"{VIN}-mileage", + f"{VIN}-remaining_range_total", ), ], ) @@ -154,7 +143,7 @@ async def test_migrate_unique_ids( "disabled_by": None, }, f"{VIN}-charging_level_hv", - f"{VIN}-fuel_and_battery.remaining_battery_percent", + f"{VIN}-remaining_battery_percent", ), ], ) @@ -174,8 +163,8 @@ async def test_dont_migrate_unique_ids( existing_entity = entity_registry.async_get_or_create( SENSOR_DOMAIN, BMW_DOMAIN, - unique_id=f"{VIN}-fuel_and_battery.remaining_battery_percent", - suggested_object_id=f"{VEHICLE_NAME} fuel_and_battery.remaining_battery_percent", + unique_id=f"{VIN}-remaining_battery_percent", + suggested_object_id=f"{VEHICLE_NAME} remaining_battery_percent", config_entry=mock_config_entry, ) diff --git a/tests/components/bmw_connected_drive/test_notify.py b/tests/components/bmw_connected_drive/test_notify.py deleted file mode 100644 index 4113f618be0..00000000000 --- a/tests/components/bmw_connected_drive/test_notify.py +++ /dev/null @@ -1,151 +0,0 @@ -"""Test BMW numbers.""" - -from unittest.mock import AsyncMock - -from bimmer_connected.models import MyBMWAPIError, MyBMWRemoteServiceError -from bimmer_connected.tests.common import POI_DATA -from bimmer_connected.vehicle.remote_services import RemoteServices -import pytest -import respx - -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError - -from . import check_remote_service_call, setup_mocked_integration - - -async def test_legacy_notify_service_simple( - hass: HomeAssistant, - bmw_fixture: respx.Router, -) -> None: - """Test successful sending of POIs.""" - - # Setup component - assert await setup_mocked_integration(hass) - - # Minimal required data - await hass.services.async_call( - "notify", - "bmw_connected_drive_ix_xdrive50", - { - "message": POI_DATA.get("name"), - "data": { - "latitude": POI_DATA.get("lat"), - "longitude": POI_DATA.get("lon"), - }, - }, - blocking=True, - ) - check_remote_service_call(bmw_fixture, "send-to-car") - - bmw_fixture.reset() - - # Full data - await hass.services.async_call( - "notify", - "bmw_connected_drive_ix_xdrive50", - { - "message": POI_DATA.get("name"), - "data": { - "latitude": POI_DATA.get("lat"), - "longitude": POI_DATA.get("lon"), - "street": POI_DATA.get("street"), - "city": POI_DATA.get("city"), - "postal_code": POI_DATA.get("postal_code"), - "country": POI_DATA.get("country"), - }, - }, - blocking=True, - ) - check_remote_service_call(bmw_fixture, "send-to-car") - - -@pytest.mark.usefixtures("bmw_fixture") -@pytest.mark.parametrize( - ("data", "exc_translation"), - [ - ( - { - "latitude": POI_DATA.get("lat"), - }, - "Invalid data for point of interest: required key not provided @ data['longitude']", - ), - ( - { - "latitude": POI_DATA.get("lat"), - "longitude": "text", - }, - "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", - ), - ( - { - "latitude": POI_DATA.get("lat"), - "longitude": 9999, - }, - "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", - ), - ], -) -async def test_service_call_invalid_input( - hass: HomeAssistant, - data: dict, - exc_translation: str, -) -> None: - """Test invalid inputs.""" - - # Setup component - assert await setup_mocked_integration(hass) - - with pytest.raises(ServiceValidationError) as exc: - await hass.services.async_call( - "notify", - "bmw_connected_drive_ix_xdrive50", - { - "message": POI_DATA.get("name"), - "data": data, - }, - blocking=True, - ) - assert str(exc.value) == exc_translation - - -@pytest.mark.usefixtures("bmw_fixture") -@pytest.mark.parametrize( - ("raised", "expected"), - [ - (MyBMWRemoteServiceError, HomeAssistantError), - (MyBMWAPIError, HomeAssistantError), - ], -) -async def test_service_call_fail( - hass: HomeAssistant, - raised: Exception, - expected: Exception, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test exception handling.""" - - # Setup component - assert await setup_mocked_integration(hass) - - # Setup exception - monkeypatch.setattr( - RemoteServices, - "trigger_remote_service", - AsyncMock(side_effect=raised), - ) - - # Test - with pytest.raises(expected): - await hass.services.async_call( - "notify", - "bmw_connected_drive_ix_xdrive50", - { - "message": POI_DATA.get("name"), - "data": { - "latitude": POI_DATA.get("lat"), - "longitude": POI_DATA.get("lon"), - }, - }, - blocking=True, - ) diff --git a/tests/components/bond/test_button.py b/tests/components/bond/test_button.py index c14bba0d01f..8c8f38db72b 100644 --- a/tests/components/bond/test_button.py +++ b/tests/components/bond/test_button.py @@ -57,15 +57,6 @@ def light(name: str): } -def motorized_shade(name: str): - """Create a motorized shade with a given name.""" - return { - "name": name, - "type": DeviceType.MOTORIZED_SHADES, - "actions": [Action.OPEN, Action.OPEN_NEXT, Action.CLOSE, Action.CLOSE_NEXT], - } - - async def test_entity_registry( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -189,38 +180,3 @@ async def test_press_button(hass: HomeAssistant) -> None: mock_action.assert_called_once_with( "test-device-id", Action(Action.START_DECREASING_BRIGHTNESS) ) - - -async def test_motorized_shade_actions(hass: HomeAssistant) -> None: - """Tests motorized shade open next and close next actions.""" - await setup_platform( - hass, - BUTTON_DOMAIN, - motorized_shade("name-1"), - bond_device_id="test-device-id", - ) - - assert hass.states.get("button.name_1_open_next") - assert hass.states.get("button.name_1_close_next") - - with patch_bond_action() as mock_action, patch_bond_device_state(): - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: "button.name_1_open_next"}, - blocking=True, - ) - await hass.async_block_till_done() - - mock_action.assert_called_once_with("test-device-id", Action(Action.OPEN_NEXT)) - - with patch_bond_action() as mock_action, patch_bond_device_state(): - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: "button.name_1_close_next"}, - blocking=True, - ) - await hass.async_block_till_done() - - mock_action.assert_called_once_with("test-device-id", Action(Action.CLOSE_NEXT)) diff --git a/tests/components/bond/test_cover.py b/tests/components/bond/test_cover.py index 4dc8256be48..e438a830eb5 100644 --- a/tests/components/bond/test_cover.py +++ b/tests/components/bond/test_cover.py @@ -8,7 +8,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, - CoverState, + STATE_CLOSED, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -19,6 +19,7 @@ from homeassistant.const import ( SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, + STATE_OPEN, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -223,7 +224,7 @@ async def test_tilt_and_open(hass: HomeAssistant) -> None: await hass.async_block_till_done() mock_open.assert_called_once_with("test-device-id", Action.tilt_open()) - assert hass.states.get("cover.name_1").state == CoverState.CLOSED + assert hass.states.get("cover.name_1").state == STATE_CLOSED async def test_update_reports_open_cover(hass: HomeAssistant) -> None: @@ -279,7 +280,7 @@ async def test_set_position_cover(hass: HomeAssistant) -> None: mock_hold.assert_called_once_with("test-device-id", Action.set_position(0)) entity_state = hass.states.get("cover.name_1") - assert entity_state.state == CoverState.OPEN + assert entity_state.state == STATE_OPEN assert entity_state.attributes[ATTR_CURRENT_POSITION] == 100 with ( @@ -297,7 +298,7 @@ async def test_set_position_cover(hass: HomeAssistant) -> None: mock_hold.assert_called_once_with("test-device-id", Action.set_position(100)) entity_state = hass.states.get("cover.name_1") - assert entity_state.state == CoverState.CLOSED + assert entity_state.state == STATE_CLOSED assert entity_state.attributes[ATTR_CURRENT_POSITION] == 0 with ( @@ -315,5 +316,5 @@ async def test_set_position_cover(hass: HomeAssistant) -> None: mock_hold.assert_called_once_with("test-device-id", Action.set_position(40)) entity_state = hass.states.get("cover.name_1") - assert entity_state.state == CoverState.OPEN + assert entity_state.state == STATE_OPEN assert entity_state.attributes[ATTR_CURRENT_POSITION] == 60 diff --git a/tests/components/bosch_shc/test_config_flow.py b/tests/components/bosch_shc/test_config_flow.py index 63f7169b026..2c43ec0a370 100644 --- a/tests/components/bosch_shc/test_config_flow.py +++ b/tests/components/bosch_shc/test_config_flow.py @@ -99,8 +99,8 @@ async def test_form_user(hass: HomeAssistant) -> None: assert result3["title"] == "shc012345" assert result3["data"] == { "host": "1.1.1.1", - "ssl_certificate": hass.config.path(DOMAIN, "test-mac", CONF_SHC_CERT), - "ssl_key": hass.config.path(DOMAIN, "test-mac", CONF_SHC_KEY), + "ssl_certificate": hass.config.path(DOMAIN, CONF_SHC_CERT), + "ssl_key": hass.config.path(DOMAIN, CONF_SHC_KEY), "token": "abc:123", "hostname": "123", } @@ -549,8 +549,8 @@ async def test_zeroconf(hass: HomeAssistant) -> None: assert result3["title"] == "shc012345" assert result3["data"] == { "host": "1.1.1.1", - "ssl_certificate": hass.config.path(DOMAIN, "test-mac", CONF_SHC_CERT), - "ssl_key": hass.config.path(DOMAIN, "test-mac", CONF_SHC_KEY), + "ssl_certificate": hass.config.path(DOMAIN, CONF_SHC_CERT), + "ssl_key": hass.config.path(DOMAIN, CONF_SHC_KEY), "token": "abc:123", "hostname": "123", } @@ -646,7 +646,11 @@ async def test_reauth(hass: HomeAssistant) -> None: title="shc012345", ) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=mock_config.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -708,7 +712,6 @@ async def test_reauth(hass: HomeAssistant) -> None: async def test_tls_assets_writer(hass: HomeAssistant) -> None: """Test we write tls assets to correct location.""" - unique_id = "test-mac" assets = { "token": "abc:123", "cert": b"content_cert", @@ -720,163 +723,14 @@ async def test_tls_assets_writer(hass: HomeAssistant) -> None: "homeassistant.components.bosch_shc.config_flow.open", mock_open() ) as mocked_file, ): - write_tls_asset(hass, unique_id, CONF_SHC_CERT, assets["cert"]) + write_tls_asset(hass, CONF_SHC_CERT, assets["cert"]) mocked_file.assert_called_with( - hass.config.path(DOMAIN, unique_id, CONF_SHC_CERT), "w", encoding="utf8" + hass.config.path(DOMAIN, CONF_SHC_CERT), "w", encoding="utf8" ) mocked_file().write.assert_called_with("content_cert") - write_tls_asset(hass, unique_id, CONF_SHC_KEY, assets["key"]) + write_tls_asset(hass, CONF_SHC_KEY, assets["key"]) mocked_file.assert_called_with( - hass.config.path(DOMAIN, unique_id, CONF_SHC_KEY), "w", encoding="utf8" + hass.config.path(DOMAIN, CONF_SHC_KEY), "w", encoding="utf8" ) mocked_file().write.assert_called_with("content_key") - - -@pytest.mark.usefixtures("mock_zeroconf") -async def test_register_multiple_controllers(hass: HomeAssistant) -> None: - """Test register multiple controllers. - - Each registered controller must get its own key/certificate pair, - which must not get overwritten when a new controller is added. - """ - - controller_1 = { - "hostname": "shc111111", - "mac": "test-mac1", - "host": "1.1.1.1", - "register": { - "token": "abc:shc111111", - "cert": b"content_cert1", - "key": b"content_key1", - }, - } - controller_2 = { - "hostname": "shc222222", - "mac": "test-mac2", - "host": "2.2.2.2", - "register": { - "token": "abc:shc222222", - "cert": b"content_cert2", - "key": b"content_key2", - }, - } - - # Set up controller 1 - ctrl_1_result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch( - "boschshcpy.session.SHCSession.mdns_info", - return_value=SHCInformation, - ), - patch( - "boschshcpy.information.SHCInformation.name", - new_callable=PropertyMock, - return_value=controller_1["hostname"], - ), - patch( - "boschshcpy.information.SHCInformation.unique_id", - new_callable=PropertyMock, - return_value=controller_1["mac"], - ), - ): - ctrl_1_result2 = await hass.config_entries.flow.async_configure( - ctrl_1_result["flow_id"], - {"host": controller_1["host"]}, - ) - - with ( - patch( - "boschshcpy.register_client.SHCRegisterClient.register", - return_value=controller_1["register"], - ), - patch("os.mkdir"), - patch("homeassistant.components.bosch_shc.config_flow.open"), - patch("boschshcpy.session.SHCSession.authenticate"), - patch( - "homeassistant.components.bosch_shc.async_setup_entry", - return_value=True, - ), - ): - ctrl_1_result3 = await hass.config_entries.flow.async_configure( - ctrl_1_result2["flow_id"], - {"password": "test"}, - ) - await hass.async_block_till_done() - - assert ctrl_1_result3["type"] is FlowResultType.CREATE_ENTRY - assert ctrl_1_result3["title"] == "shc111111" - assert ctrl_1_result3["context"]["unique_id"] == controller_1["mac"] - assert ctrl_1_result3["data"] == { - "host": "1.1.1.1", - "ssl_certificate": hass.config.path(DOMAIN, controller_1["mac"], CONF_SHC_CERT), - "ssl_key": hass.config.path(DOMAIN, controller_1["mac"], CONF_SHC_KEY), - "token": "abc:shc111111", - "hostname": "shc111111", - } - - # Set up controller 2 - ctrl_2_result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch( - "boschshcpy.session.SHCSession.mdns_info", - return_value=SHCInformation, - ), - patch( - "boschshcpy.information.SHCInformation.name", - new_callable=PropertyMock, - return_value=controller_2["hostname"], - ), - patch( - "boschshcpy.information.SHCInformation.unique_id", - new_callable=PropertyMock, - return_value=controller_2["mac"], - ), - ): - ctrl_2_result2 = await hass.config_entries.flow.async_configure( - ctrl_2_result["flow_id"], - {"host": controller_2["host"]}, - ) - - with ( - patch( - "boschshcpy.register_client.SHCRegisterClient.register", - return_value=controller_2["register"], - ), - patch("os.mkdir"), - patch("homeassistant.components.bosch_shc.config_flow.open"), - patch("boschshcpy.session.SHCSession.authenticate"), - patch( - "homeassistant.components.bosch_shc.async_setup_entry", - return_value=True, - ), - ): - ctrl_2_result3 = await hass.config_entries.flow.async_configure( - ctrl_2_result2["flow_id"], - {"password": "test"}, - ) - await hass.async_block_till_done() - - assert ctrl_2_result3["type"] is FlowResultType.CREATE_ENTRY - assert ctrl_2_result3["title"] == "shc222222" - assert ctrl_2_result3["context"]["unique_id"] == controller_2["mac"] - assert ctrl_2_result3["data"] == { - "host": "2.2.2.2", - "ssl_certificate": hass.config.path(DOMAIN, controller_2["mac"], CONF_SHC_CERT), - "ssl_key": hass.config.path(DOMAIN, controller_2["mac"], CONF_SHC_KEY), - "token": "abc:shc222222", - "hostname": "shc222222", - } - - # Check that each controller has its own key/certificate pair - assert ( - ctrl_1_result3["data"]["ssl_certificate"] - != ctrl_2_result3["data"]["ssl_certificate"] - ) - assert ctrl_1_result3["data"]["ssl_key"] != ctrl_2_result3["data"]["ssl_key"] diff --git a/tests/components/braviatv/conftest.py b/tests/components/braviatv/conftest.py index b25e8ddf067..186f4e12337 100644 --- a/tests/components/braviatv/conftest.py +++ b/tests/components/braviatv/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for Bravia TV.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/braviatv/snapshots/test_diagnostics.ambr b/tests/components/braviatv/snapshots/test_diagnostics.ambr index cd29c647df7..2fd515b24e5 100644 --- a/tests/components/braviatv/snapshots/test_diagnostics.ambr +++ b/tests/components/braviatv/snapshots/test_diagnostics.ambr @@ -9,8 +9,6 @@ 'use_psk': True, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'braviatv', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, diff --git a/tests/components/braviatv/test_config_flow.py b/tests/components/braviatv/test_config_flow.py index 7a4f93f7f16..6fc02dbd36f 100644 --- a/tests/components/braviatv/test_config_flow.py +++ b/tests/components/braviatv/test_config_flow.py @@ -17,7 +17,7 @@ from homeassistant.components.braviatv.const import ( DOMAIN, NICKNAME_PREFIX, ) -from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_HOST, CONF_MAC, CONF_PIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -405,9 +405,6 @@ async def test_reauth_successful(hass: HomeAssistant, use_psk, new_pin) -> None: title="TV-Model", ) config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "authorize" with ( patch("pybravia.BraviaClient.connect"), @@ -424,6 +421,15 @@ async def test_reauth_successful(hass: HomeAssistant, use_psk, new_pin) -> None: return_value={}, ), ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, + data=config_entry.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "authorize" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_USE_PSK: use_psk} ) diff --git a/tests/components/braviatv/test_diagnostics.py b/tests/components/braviatv/test_diagnostics.py index a7bd1631788..13f6c92fb76 100644 --- a/tests/components/braviatv/test_diagnostics.py +++ b/tests/components/braviatv/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import patch from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.braviatv.const import CONF_USE_PSK, DOMAIN from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PIN @@ -72,4 +71,4 @@ async def test_entry_diagnostics( assert await async_setup_component(hass, DOMAIN, {}) result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/bring/conftest.py b/tests/components/bring/conftest.py index 62aa38d4e92..25330c10ba4 100644 --- a/tests/components/bring/conftest.py +++ b/tests/components/bring/conftest.py @@ -1,17 +1,16 @@ """Common fixtures for the Bring! tests.""" -from collections.abc import Generator from typing import cast from unittest.mock import AsyncMock, patch -import uuid from bring_api.types import BringAuthResponse import pytest +from typing_extensions import Generator from homeassistant.components.bring import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import MockConfigEntry EMAIL = "test-email" PASSWORD = "test-password" @@ -44,26 +43,10 @@ def mock_bring_client() -> Generator[AsyncMock]: client = mock_client.return_value client.uuid = UUID client.login.return_value = cast(BringAuthResponse, {"name": "Bring"}) - client.load_lists.return_value = load_json_object_fixture("lists.json", DOMAIN) - client.get_list.return_value = load_json_object_fixture("items.json", DOMAIN) - client.get_all_user_settings.return_value = load_json_object_fixture( - "usersettings.json", DOMAIN - ) + client.load_lists.return_value = {"lists": []} yield client -@pytest.fixture -def mock_uuid() -> Generator[AsyncMock]: - """Mock uuid.""" - - with patch( - "homeassistant.components.bring.todo.uuid.uuid4", - autospec=True, - ) as mock_client: - mock_client.return_value = uuid.UUID("b669ad23-606a-4652-b302-995d34b1cb1c") - yield mock_client - - @pytest.fixture(name="bring_config_entry") def mock_bring_config_entry() -> MockConfigEntry: """Mock bring configuration entry.""" diff --git a/tests/components/bring/fixtures/items.json b/tests/components/bring/fixtures/items.json deleted file mode 100644 index e0b9006167b..00000000000 --- a/tests/components/bring/fixtures/items.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", - "status": "REGISTERED", - "purchase": [ - { - "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", - "itemId": "Paprika", - "specification": "Rot", - "attributes": [ - { - "type": "PURCHASE_CONDITIONS", - "content": { - "urgent": true, - "convenient": true, - "discounted": true - } - } - ] - }, - { - "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", - "itemId": "Pouletbrüstli", - "specification": "Bio", - "attributes": [ - { - "type": "PURCHASE_CONDITIONS", - "content": { - "urgent": true, - "convenient": true, - "discounted": true - } - } - ] - } - ], - "recently": [ - { - "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", - "itemId": "Ananas", - "specification": "", - "attributes": [] - } - ] -} diff --git a/tests/components/bring/fixtures/items_invitation.json b/tests/components/bring/fixtures/items_invitation.json deleted file mode 100644 index 82ef623e439..00000000000 --- a/tests/components/bring/fixtures/items_invitation.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", - "status": "INVITATION", - "purchase": [ - { - "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", - "itemId": "Paprika", - "specification": "Rot", - "attributes": [ - { - "type": "PURCHASE_CONDITIONS", - "content": { - "urgent": true, - "convenient": true, - "discounted": true - } - } - ] - }, - { - "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", - "itemId": "Pouletbrüstli", - "specification": "Bio", - "attributes": [ - { - "type": "PURCHASE_CONDITIONS", - "content": { - "urgent": true, - "convenient": true, - "discounted": true - } - } - ] - } - ], - "recently": [ - { - "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", - "itemId": "Ananas", - "specification": "", - "attributes": [] - } - ] -} diff --git a/tests/components/bring/fixtures/items_shared.json b/tests/components/bring/fixtures/items_shared.json deleted file mode 100644 index 9ac999729d3..00000000000 --- a/tests/components/bring/fixtures/items_shared.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", - "status": "SHARED", - "purchase": [ - { - "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", - "itemId": "Paprika", - "specification": "Rot", - "attributes": [ - { - "type": "PURCHASE_CONDITIONS", - "content": { - "urgent": true, - "convenient": true, - "discounted": true - } - } - ] - }, - { - "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", - "itemId": "Pouletbrüstli", - "specification": "Bio", - "attributes": [ - { - "type": "PURCHASE_CONDITIONS", - "content": { - "urgent": true, - "convenient": true, - "discounted": true - } - } - ] - } - ], - "recently": [ - { - "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", - "itemId": "Ananas", - "specification": "", - "attributes": [] - } - ] -} diff --git a/tests/components/bring/fixtures/lists.json b/tests/components/bring/fixtures/lists.json deleted file mode 100644 index 5891d94f7de..00000000000 --- a/tests/components/bring/fixtures/lists.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "lists": [ - { - "listUuid": "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", - "name": "Einkauf", - "theme": "ch.publisheria.bring.theme.home" - }, - { - "listUuid": "b4776778-7f6c-496e-951b-92a35d3db0dd", - "name": "Baumarkt", - "theme": "ch.publisheria.bring.theme.home" - } - ] -} diff --git a/tests/components/bring/fixtures/usersettings.json b/tests/components/bring/fixtures/usersettings.json deleted file mode 100644 index 6c93cdc7d83..00000000000 --- a/tests/components/bring/fixtures/usersettings.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "userlistsettings": [ - { - "listUuid": "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", - "usersettings": [ - { - "key": "listSectionOrder", - "value": "[\"Früchte & Gemüse\",\"Brot & Gebäck\",\"Milch & Käse\",\"Fleisch & Fisch\",\"Zutaten & Gewürze\",\"Fertig- & Tiefkühlprodukte\",\"Getreideprodukte\",\"Snacks & Süsswaren\",\"Getränke & Tabak\",\"Haushalt & Gesundheit\",\"Pflege & Gesundheit\",\"Tierbedarf\",\"Baumarkt & Garten\",\"Eigene Artikel\"]" - }, - { - "key": "listArticleLanguage", - "value": "de-DE" - } - ] - }, - { - "listUuid": "b4776778-7f6c-496e-951b-92a35d3db0dd", - "usersettings": [ - { - "key": "listSectionOrder", - "value": "[\"Früchte & Gemüse\",\"Brot & Gebäck\",\"Milch & Käse\",\"Fleisch & Fisch\",\"Zutaten & Gewürze\",\"Fertig- & Tiefkühlprodukte\",\"Getreideprodukte\",\"Snacks & Süsswaren\",\"Getränke & Tabak\",\"Haushalt & Gesundheit\",\"Pflege & Gesundheit\",\"Tierbedarf\",\"Baumarkt & Garten\",\"Eigene Artikel\"]" - }, - { - "key": "listArticleLanguage", - "value": "en-US" - } - ] - } - ], - "usersettings": [ - { - "key": "autoPush", - "value": "ON" - }, - { - "key": "premiumHideOffersBadge", - "value": "ON" - }, - { - "key": "premiumHideSponsoredCategories", - "value": "ON" - }, - { - "key": "premiumHideInspirationsBadge", - "value": "ON" - }, - { - "key": "onboardClient", - "value": "android" - }, - { - "key": "premiumHideOffersOnMain", - "value": "ON" - }, - { - "key": "defaultListUUID", - "value": "e542eef6-dba7-4c31-a52c-29e6ab9d83a5" - } - ] -} diff --git a/tests/components/bring/snapshots/test_diagnostics.ambr b/tests/components/bring/snapshots/test_diagnostics.ambr deleted file mode 100644 index 6d830a12133..00000000000 --- a/tests/components/bring/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,101 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'b4776778-7f6c-496e-951b-92a35d3db0dd': dict({ - 'listUuid': 'b4776778-7f6c-496e-951b-92a35d3db0dd', - 'name': 'Baumarkt', - 'purchase': list([ - dict({ - 'attributes': list([ - dict({ - 'content': dict({ - 'convenient': True, - 'discounted': True, - 'urgent': True, - }), - 'type': 'PURCHASE_CONDITIONS', - }), - ]), - 'itemId': 'Paprika', - 'specification': 'Rot', - 'uuid': 'b5d0790b-5f32-4d5c-91da-e29066f167de', - }), - dict({ - 'attributes': list([ - dict({ - 'content': dict({ - 'convenient': True, - 'discounted': True, - 'urgent': True, - }), - 'type': 'PURCHASE_CONDITIONS', - }), - ]), - 'itemId': 'Pouletbrüstli', - 'specification': 'Bio', - 'uuid': '72d370ab-d8ca-4e41-b956-91df94795b4e', - }), - ]), - 'recently': list([ - dict({ - 'attributes': list([ - ]), - 'itemId': 'Ananas', - 'specification': '', - 'uuid': 'fc8db30a-647e-4e6c-9d71-3b85d6a2d954', - }), - ]), - 'status': 'REGISTERED', - 'theme': 'ch.publisheria.bring.theme.home', - 'uuid': '77a151f8-77c4-47a3-8295-c750a0e69d4f', - }), - 'e542eef6-dba7-4c31-a52c-29e6ab9d83a5': dict({ - 'listUuid': 'e542eef6-dba7-4c31-a52c-29e6ab9d83a5', - 'name': 'Einkauf', - 'purchase': list([ - dict({ - 'attributes': list([ - dict({ - 'content': dict({ - 'convenient': True, - 'discounted': True, - 'urgent': True, - }), - 'type': 'PURCHASE_CONDITIONS', - }), - ]), - 'itemId': 'Paprika', - 'specification': 'Rot', - 'uuid': 'b5d0790b-5f32-4d5c-91da-e29066f167de', - }), - dict({ - 'attributes': list([ - dict({ - 'content': dict({ - 'convenient': True, - 'discounted': True, - 'urgent': True, - }), - 'type': 'PURCHASE_CONDITIONS', - }), - ]), - 'itemId': 'Pouletbrüstli', - 'specification': 'Bio', - 'uuid': '72d370ab-d8ca-4e41-b956-91df94795b4e', - }), - ]), - 'recently': list([ - dict({ - 'attributes': list([ - ]), - 'itemId': 'Ananas', - 'specification': '', - 'uuid': 'fc8db30a-647e-4e6c-9d71-3b85d6a2d954', - }), - ]), - 'status': 'REGISTERED', - 'theme': 'ch.publisheria.bring.theme.home', - 'uuid': '77a151f8-77c4-47a3-8295-c750a0e69d4f', - }), - }) -# --- diff --git a/tests/components/bring/snapshots/test_sensor.ambr b/tests/components/bring/snapshots/test_sensor.ambr deleted file mode 100644 index 97e1d1b4bd9..00000000000 --- a/tests/components/bring/snapshots/test_sensor.ambr +++ /dev/null @@ -1,583 +0,0 @@ -# serializer version: 1 -# name: test_setup[sensor.baumarkt_discount_only-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.baumarkt_discount_only', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Discount only', - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_discounted', - 'unit_of_measurement': 'items', - }) -# --- -# name: test_setup[sensor.baumarkt_discount_only-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Baumarkt Discount only', - 'unit_of_measurement': 'items', - }), - 'context': , - 'entity_id': 'sensor.baumarkt_discount_only', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_setup[sensor.baumarkt_list_access-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'registered', - 'shared', - 'invitation', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.baumarkt_list_access', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'List access', - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_list_access', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup[sensor.baumarkt_list_access-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Baumarkt List access', - 'options': list([ - 'registered', - 'shared', - 'invitation', - ]), - }), - 'context': , - 'entity_id': 'sensor.baumarkt_list_access', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'registered', - }) -# --- -# name: test_setup[sensor.baumarkt_on_occasion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.baumarkt_on_occasion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On occasion', - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_convenient', - 'unit_of_measurement': 'items', - }) -# --- -# name: test_setup[sensor.baumarkt_on_occasion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Baumarkt On occasion', - 'unit_of_measurement': 'items', - }), - 'context': , - 'entity_id': 'sensor.baumarkt_on_occasion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_setup[sensor.baumarkt_region_language-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'de-at', - 'de-ch', - 'de-de', - 'en-au', - 'en-ca', - 'en-gb', - 'en-us', - 'es-es', - 'fr-ch', - 'fr-fr', - 'hu-hu', - 'it-ch', - 'it-it', - 'nb-no', - 'nl-nl', - 'pl-pl', - 'pt-br', - 'ru-ru', - 'sv-se', - 'tr-tr', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.baumarkt_region_language', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Region & language', - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_list_language', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup[sensor.baumarkt_region_language-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Baumarkt Region & language', - 'options': list([ - 'de-at', - 'de-ch', - 'de-de', - 'en-au', - 'en-ca', - 'en-gb', - 'en-us', - 'es-es', - 'fr-ch', - 'fr-fr', - 'hu-hu', - 'it-ch', - 'it-it', - 'nb-no', - 'nl-nl', - 'pl-pl', - 'pt-br', - 'ru-ru', - 'sv-se', - 'tr-tr', - ]), - }), - 'context': , - 'entity_id': 'sensor.baumarkt_region_language', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'en-us', - }) -# --- -# name: test_setup[sensor.baumarkt_urgent-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.baumarkt_urgent', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Urgent', - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_urgent', - 'unit_of_measurement': 'items', - }) -# --- -# name: test_setup[sensor.baumarkt_urgent-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Baumarkt Urgent', - 'unit_of_measurement': 'items', - }), - 'context': , - 'entity_id': 'sensor.baumarkt_urgent', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_setup[sensor.einkauf_discount_only-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.einkauf_discount_only', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Discount only', - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_discounted', - 'unit_of_measurement': 'items', - }) -# --- -# name: test_setup[sensor.einkauf_discount_only-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Einkauf Discount only', - 'unit_of_measurement': 'items', - }), - 'context': , - 'entity_id': 'sensor.einkauf_discount_only', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_setup[sensor.einkauf_list_access-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'registered', - 'shared', - 'invitation', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.einkauf_list_access', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'List access', - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_list_access', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup[sensor.einkauf_list_access-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Einkauf List access', - 'options': list([ - 'registered', - 'shared', - 'invitation', - ]), - }), - 'context': , - 'entity_id': 'sensor.einkauf_list_access', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'registered', - }) -# --- -# name: test_setup[sensor.einkauf_on_occasion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.einkauf_on_occasion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On occasion', - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_convenient', - 'unit_of_measurement': 'items', - }) -# --- -# name: test_setup[sensor.einkauf_on_occasion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Einkauf On occasion', - 'unit_of_measurement': 'items', - }), - 'context': , - 'entity_id': 'sensor.einkauf_on_occasion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_setup[sensor.einkauf_region_language-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'de-at', - 'de-ch', - 'de-de', - 'en-au', - 'en-ca', - 'en-gb', - 'en-us', - 'es-es', - 'fr-ch', - 'fr-fr', - 'hu-hu', - 'it-ch', - 'it-it', - 'nb-no', - 'nl-nl', - 'pl-pl', - 'pt-br', - 'ru-ru', - 'sv-se', - 'tr-tr', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.einkauf_region_language', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Region & language', - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_list_language', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup[sensor.einkauf_region_language-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Einkauf Region & language', - 'options': list([ - 'de-at', - 'de-ch', - 'de-de', - 'en-au', - 'en-ca', - 'en-gb', - 'en-us', - 'es-es', - 'fr-ch', - 'fr-fr', - 'hu-hu', - 'it-ch', - 'it-it', - 'nb-no', - 'nl-nl', - 'pl-pl', - 'pt-br', - 'ru-ru', - 'sv-se', - 'tr-tr', - ]), - }), - 'context': , - 'entity_id': 'sensor.einkauf_region_language', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'de-de', - }) -# --- -# name: test_setup[sensor.einkauf_urgent-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.einkauf_urgent', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Urgent', - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_urgent', - 'unit_of_measurement': 'items', - }) -# --- -# name: test_setup[sensor.einkauf_urgent-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Einkauf Urgent', - 'unit_of_measurement': 'items', - }), - 'context': , - 'entity_id': 'sensor.einkauf_urgent', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- diff --git a/tests/components/bring/snapshots/test_todo.ambr b/tests/components/bring/snapshots/test_todo.ambr deleted file mode 100644 index 6a7104727a1..00000000000 --- a/tests/components/bring/snapshots/test_todo.ambr +++ /dev/null @@ -1,95 +0,0 @@ -# serializer version: 1 -# name: test_todo[todo.baumarkt-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'todo', - 'entity_category': None, - 'entity_id': 'todo.baumarkt', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'shopping_list', - 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd', - 'unit_of_measurement': None, - }) -# --- -# name: test_todo[todo.baumarkt-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Baumarkt', - 'supported_features': , - }), - 'context': , - 'entity_id': 'todo.baumarkt', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_todo[todo.einkauf-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'todo', - 'entity_category': None, - 'entity_id': 'todo.einkauf', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'bring', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'shopping_list', - 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5', - 'unit_of_measurement': None, - }) -# --- -# name: test_todo[todo.einkauf-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Einkauf', - 'supported_features': , - }), - 'context': , - 'entity_id': 'todo.einkauf', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- diff --git a/tests/components/bring/test_config_flow.py b/tests/components/bring/test_config_flow.py index 8d215a5d3ee..d307e0ccbbe 100644 --- a/tests/components/bring/test_config_flow.py +++ b/tests/components/bring/test_config_flow.py @@ -10,7 +10,7 @@ from bring_api.exceptions import ( import pytest from homeassistant.components.bring.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -123,7 +123,15 @@ async def test_flow_reauth( bring_config_entry.add_to_hass(hass) - result = await bring_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": bring_config_entry.entry_id, + "unique_id": bring_config_entry.unique_id, + }, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -163,7 +171,15 @@ async def test_flow_reauth_error_and_recover( bring_config_entry.add_to_hass(hass) - result = await bring_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": bring_config_entry.entry_id, + "unique_id": bring_config_entry.unique_id, + }, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/bring/test_diagnostics.py b/tests/components/bring/test_diagnostics.py deleted file mode 100644 index a86de5a0d2d..00000000000 --- a/tests/components/bring/test_diagnostics.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Test for diagnostics platform of the Bring! integration.""" - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -@pytest.mark.usefixtures("mock_bring_client") -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - bring_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test diagnostics.""" - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, bring_config_entry) - == snapshot - ) diff --git a/tests/components/bring/test_init.py b/tests/components/bring/test_init.py index 5ee66999ea4..f1b1f78e775 100644 --- a/tests/components/bring/test_init.py +++ b/tests/components/bring/test_init.py @@ -28,9 +28,9 @@ async def setup_integration( await hass.async_block_till_done() -@pytest.mark.usefixtures("mock_bring_client") async def test_load_unload( hass: HomeAssistant, + mock_bring_client: AsyncMock, bring_config_entry: MockConfigEntry, ) -> None: """Test loading and unloading of the config entry.""" @@ -58,7 +58,7 @@ async def test_init_failure( mock_bring_client: AsyncMock, status: ConfigEntryState, exception: Exception, - bring_config_entry: MockConfigEntry, + bring_config_entry: MockConfigEntry | None, ) -> None: """Test an initialization error on integration load.""" mock_bring_client.login.side_effect = exception @@ -79,7 +79,7 @@ async def test_init_exceptions( mock_bring_client: AsyncMock, exception: Exception, expected: Exception, - bring_config_entry: MockConfigEntry, + bring_config_entry: MockConfigEntry | None, ) -> None: """Test an initialization error on integration load.""" bring_config_entry.add_to_hass(hass) @@ -87,49 +87,3 @@ async def test_init_exceptions( with pytest.raises(expected): await async_setup_entry(hass, bring_config_entry) - - -@pytest.mark.parametrize("exception", [BringRequestException, BringParseException]) -@pytest.mark.parametrize( - "bring_method", - [ - "load_lists", - "get_list", - "get_all_user_settings", - ], -) -async def test_config_entry_not_ready( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, - exception: Exception, - bring_method: str, -) -> None: - """Test config entry not ready.""" - getattr(mock_bring_client, bring_method).side_effect = exception - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.SETUP_RETRY - - -@pytest.mark.parametrize( - "exception", [None, BringAuthException, BringRequestException, BringParseException] -) -async def test_config_entry_not_ready_auth_error( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, - exception: Exception | None, -) -> None: - """Test config entry not ready from authentication error.""" - - mock_bring_client.load_lists.side_effect = BringAuthException - mock_bring_client.retrieve_new_access_token.side_effect = exception - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/bring/test_notification.py b/tests/components/bring/test_notification.py deleted file mode 100644 index b1fa28335ad..00000000000 --- a/tests/components/bring/test_notification.py +++ /dev/null @@ -1,106 +0,0 @@ -"""Test todo entity notification action of the Bring! integration.""" - -import re -from unittest.mock import AsyncMock - -from bring_api import BringNotificationType, BringRequestException -import pytest - -from homeassistant.components.bring.const import ( - ATTR_ITEM_NAME, - ATTR_NOTIFICATION_TYPE, - DOMAIN, - SERVICE_PUSH_NOTIFICATION, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from tests.common import MockConfigEntry - - -async def test_send_notification( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test send bring push notification.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - await hass.services.async_call( - DOMAIN, - SERVICE_PUSH_NOTIFICATION, - service_data={ - ATTR_NOTIFICATION_TYPE: "GOING_SHOPPING", - }, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) - - mock_bring_client.notify.assert_called_once_with( - "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", - BringNotificationType.GOING_SHOPPING, - None, - ) - - -async def test_send_notification_exception( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test send bring push notification with exception.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - mock_bring_client.notify.side_effect = BringRequestException - with pytest.raises( - HomeAssistantError, - match="Failed to send push notification for bring due to a connection error, try again later", - ): - await hass.services.async_call( - DOMAIN, - SERVICE_PUSH_NOTIFICATION, - service_data={ - ATTR_NOTIFICATION_TYPE: "GOING_SHOPPING", - }, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) - - -async def test_send_notification_service_validation_error( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test send bring push notification.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - mock_bring_client.notify.side_effect = ValueError - with pytest.raises( - HomeAssistantError, - match=re.escape( - "Failed to perform action bring.send_message. 'URGENT_MESSAGE' requires a value @ data['item']. Got None" - ), - ): - await hass.services.async_call( - DOMAIN, - SERVICE_PUSH_NOTIFICATION, - service_data={ATTR_NOTIFICATION_TYPE: "URGENT_MESSAGE", ATTR_ITEM_NAME: ""}, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) diff --git a/tests/components/bring/test_sensor.py b/tests/components/bring/test_sensor.py deleted file mode 100644 index 974818ccedf..00000000000 --- a/tests/components/bring/test_sensor.py +++ /dev/null @@ -1,76 +0,0 @@ -"""Test for sensor platform of the Bring! integration.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.bring.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform - - -@pytest.fixture(autouse=True) -def sensor_only() -> Generator[None]: - """Enable only the sensor platform.""" - with patch( - "homeassistant.components.bring.PLATFORMS", - [Platform.SENSOR], - ): - yield - - -@pytest.mark.usefixtures("mock_bring_client") -async def test_setup( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Snapshot test states of sensor platform.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform( - hass, entity_registry, snapshot, bring_config_entry.entry_id - ) - - -@pytest.mark.parametrize( - ("fixture", "entity_state"), - [ - ("items_invitation", "invitation"), - ("items_shared", "shared"), - ("items", "registered"), - ], -) -async def test_list_access_states( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, - fixture: str, - entity_state: str, -) -> None: - """Snapshot test states of list access sensor.""" - - mock_bring_client.get_list.return_value = load_json_object_fixture( - f"{fixture}.json", DOMAIN - ) - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - assert (state := hass.states.get("sensor.einkauf_list_access")) - assert state.state == entity_state diff --git a/tests/components/bring/test_todo.py b/tests/components/bring/test_todo.py deleted file mode 100644 index 9cc4ae3d888..00000000000 --- a/tests/components/bring/test_todo.py +++ /dev/null @@ -1,313 +0,0 @@ -"""Test for todo platform of the Bring! integration.""" - -from collections.abc import Generator -import re -from unittest.mock import AsyncMock, patch - -from bring_api import BringItemOperation, BringRequestException -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.todo import ( - ATTR_DESCRIPTION, - ATTR_ITEM, - ATTR_RENAME, - DOMAIN as TODO_DOMAIN, - TodoServices, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.fixture(autouse=True) -def todo_only() -> Generator[None]: - """Enable only the todo platform.""" - with patch( - "homeassistant.components.bring.PLATFORMS", - [Platform.TODO], - ): - yield - - -@pytest.mark.usefixtures("mock_bring_client") -async def test_todo( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Snapshot test states of todo platform.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform( - hass, entity_registry, snapshot, bring_config_entry.entry_id - ) - - -@pytest.mark.usefixtures("mock_uuid") -async def test_add_item( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test add item to list.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.ADD_ITEM, - service_data={ATTR_ITEM: "Äpfel", ATTR_DESCRIPTION: "rot"}, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) - - mock_bring_client.save_item.assert_called_once_with( - "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", - "Äpfel", - "rot", - "b669ad23-606a-4652-b302-995d34b1cb1c", - ) - - -async def test_add_item_exception( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test add item to list with exception.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - mock_bring_client.save_item.side_effect = BringRequestException - with pytest.raises( - HomeAssistantError, match="Failed to save item Äpfel to Bring! list" - ): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.ADD_ITEM, - service_data={ATTR_ITEM: "Äpfel", ATTR_DESCRIPTION: "rot"}, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) - - -@pytest.mark.usefixtures("mock_uuid") -async def test_update_item( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test update item.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - service_data={ - ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", - ATTR_RENAME: "Paprika", - ATTR_DESCRIPTION: "Rot", - }, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) - - mock_bring_client.batch_update_list.assert_called_once_with( - "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", - { - "itemId": "Paprika", - "spec": "Rot", - "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", - }, - BringItemOperation.ADD, - ) - - -async def test_update_item_exception( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test update item with exception.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - mock_bring_client.batch_update_list.side_effect = BringRequestException - with pytest.raises( - HomeAssistantError, match="Failed to update item Paprika to Bring! list" - ): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - service_data={ - ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", - ATTR_RENAME: "Paprika", - ATTR_DESCRIPTION: "Rot", - }, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) - - -@pytest.mark.usefixtures("mock_uuid") -async def test_rename_item( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test rename item.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - service_data={ - ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", - ATTR_RENAME: "Gurke", - ATTR_DESCRIPTION: "", - }, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) - - mock_bring_client.batch_update_list.assert_called_once_with( - "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", - [ - { - "itemId": "Paprika", - "spec": "", - "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", - "operation": BringItemOperation.REMOVE, - }, - { - "itemId": "Gurke", - "spec": "", - "uuid": "b669ad23-606a-4652-b302-995d34b1cb1c", - "operation": BringItemOperation.ADD, - }, - ], - ) - - -async def test_rename_item_exception( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test rename item with exception.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - mock_bring_client.batch_update_list.side_effect = BringRequestException - with pytest.raises( - HomeAssistantError, match="Failed to rename item Gurke to Bring! list" - ): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - service_data={ - ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", - ATTR_RENAME: "Gurke", - ATTR_DESCRIPTION: "", - }, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) - - -@pytest.mark.usefixtures("mock_uuid") -async def test_delete_items( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test delete item.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - service_data={ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de"}, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) - - mock_bring_client.batch_update_list.assert_called_once_with( - "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", - [ - { - "itemId": "b5d0790b-5f32-4d5c-91da-e29066f167de", - "spec": "", - "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", - }, - ], - BringItemOperation.REMOVE, - ) - - -async def test_delete_items_exception( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, -) -> None: - """Test delete item.""" - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - mock_bring_client.batch_update_list.side_effect = BringRequestException - with pytest.raises( - HomeAssistantError, - match=re.escape("Failed to delete 1 item(s) from Bring! list"), - ): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - service_data={ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de"}, - target={ATTR_ENTITY_ID: "todo.einkauf"}, - blocking=True, - ) diff --git a/tests/components/bring/test_util.py b/tests/components/bring/test_util.py deleted file mode 100644 index 0d9ed0c5345..00000000000 --- a/tests/components/bring/test_util.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Test for utility functions of the Bring! integration.""" - -from typing import cast - -from bring_api import BringUserSettingsResponse -import pytest - -from homeassistant.components.bring import DOMAIN -from homeassistant.components.bring.coordinator import BringData -from homeassistant.components.bring.util import list_language, sum_attributes - -from tests.common import load_json_object_fixture - - -@pytest.mark.parametrize( - ("list_uuid", "expected"), - [ - ("e542eef6-dba7-4c31-a52c-29e6ab9d83a5", "de-DE"), - ("b4776778-7f6c-496e-951b-92a35d3db0dd", "en-US"), - ("00000000-0000-0000-0000-00000000", None), - ], -) -def test_list_language(list_uuid: str, expected: str | None) -> None: - """Test function list_language.""" - - result = list_language( - list_uuid, - cast( - BringUserSettingsResponse, - load_json_object_fixture("usersettings.json", DOMAIN), - ), - ) - - assert result == expected - - -@pytest.mark.parametrize( - ("attribute", "expected"), - [ - ("urgent", 2), - ("convenient", 2), - ("discounted", 2), - ], -) -def test_sum_attributes(attribute: str, expected: int) -> None: - """Test function sum_attributes.""" - - result = sum_attributes( - cast( - BringData, - load_json_object_fixture("items.json", DOMAIN), - ), - attribute, - ) - - assert result == expected diff --git a/tests/components/broadlink/__init__.py b/tests/components/broadlink/__init__.py index 6185e9bdefc..c9245fb16fa 100644 --- a/tests/components/broadlink/__init__.py +++ b/tests/components/broadlink/__init__.py @@ -4,7 +4,6 @@ from dataclasses import dataclass from unittest.mock import MagicMock, patch from homeassistant.components.broadlink.const import DOMAIN -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -90,16 +89,6 @@ BROADLINK_DEVICES = { 57, 5, ), - "Guest room": ( - "192.168.0.66", - "34ea34b61d2e", - "HY02/HY03", - "Hysen", - "HYS", - 0x4EAD, - 10024, - 5, - ), } @@ -116,34 +105,20 @@ class BroadlinkDevice: """Representation of a Broadlink device.""" def __init__( - self, - name: str, - host: str, - mac: str, - model: str, - manufacturer: str, - type_: str, - devtype: int, - fwversion: int, - timeout: int, - ) -> None: + self, name, host, mac, model, manufacturer, type_, devtype, fwversion, timeout + ): """Initialize the device.""" - self.name = name - self.host = host - self.mac = mac - self.model = model - self.manufacturer = manufacturer - self.type = type_ - self.devtype = devtype - self.timeout = timeout - self.fwversion = fwversion + self.name: str = name + self.host: str = host + self.mac: str = mac + self.model: str = model + self.manufacturer: str = manufacturer + self.type: str = type_ + self.devtype: int = devtype + self.timeout: int = timeout + self.fwversion: int = fwversion - async def setup_entry( - self, - hass: HomeAssistant, - mock_api: MagicMock | None = None, - mock_entry: MockConfigEntry | None = None, - ) -> MockSetup: + async def setup_entry(self, hass, mock_api=None, mock_entry=None): """Set up the device.""" mock_api = mock_api or self.get_mock_api() mock_entry = mock_entry or self.get_mock_entry() @@ -193,31 +168,6 @@ class BroadlinkDevice: } -class BroadlinkMP1BG1Device(BroadlinkDevice): - """Mock device for MP1 and BG1 with special mocking of api return values.""" - - def get_mock_api(self): - """Return a mock device (API) with support for check_power calls.""" - mock_api = super().get_mock_api() - mock_api.check_power.return_value = {"s1": 0, "s2": 0, "s3": 0, "s4": 0} - return mock_api - - -class BroadlinkSP4BDevice(BroadlinkDevice): - """Mock device for SP4b with special mocking of api return values.""" - - def get_mock_api(self): - """Return a mock device (API) with support for get_state calls.""" - mock_api = super().get_mock_api() - mock_api.get_state.return_value = {"pwr": 0} - return mock_api - - def get_device(name): """Get a device by name.""" - dev_type = BROADLINK_DEVICES[name][5] - if dev_type in {0x4EB5}: - return BroadlinkMP1BG1Device(name, *BROADLINK_DEVICES[name]) - if dev_type in {0x5115}: - return BroadlinkSP4BDevice(name, *BROADLINK_DEVICES[name]) return BroadlinkDevice(name, *BROADLINK_DEVICES[name]) diff --git a/tests/components/broadlink/test_climate.py b/tests/components/broadlink/test_climate.py deleted file mode 100644 index 6b39d1895b1..00000000000 --- a/tests/components/broadlink/test_climate.py +++ /dev/null @@ -1,180 +0,0 @@ -"""Tests for Broadlink climate.""" - -from typing import Any - -import pytest - -from homeassistant.components.broadlink.climate import SensorMode -from homeassistant.components.broadlink.const import DOMAIN -from homeassistant.components.climate import ( - ATTR_TEMPERATURE, - DOMAIN as CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - HVACAction, - HVACMode, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.entity_component import async_update_entity - -from . import get_device - - -@pytest.mark.parametrize( - ( - "api_return_value", - "expected_state", - "expected_current_temperature", - "expected_temperature", - "expected_hvac_action", - ), - [ - ( - { - "sensor": SensorMode.INNER_SENSOR_CONTROL.value, - "power": 1, - "auto_mode": 0, - "active": 1, - "room_temp": 22, - "thermostat_temp": 23, - "external_temp": 30, - }, - HVACMode.HEAT, - 22, - 23, - HVACAction.HEATING, - ), - ( - { - "sensor": SensorMode.OUTER_SENSOR_CONTROL.value, - "power": 1, - "auto_mode": 1, - "active": 0, - "room_temp": 22, - "thermostat_temp": 23, - "external_temp": 30, - }, - HVACMode.AUTO, - 30, - 23, - HVACAction.IDLE, - ), - ( - { - "sensor": SensorMode.INNER_SENSOR_CONTROL.value, - "power": 0, - "auto_mode": 0, - "active": 0, - "room_temp": 22, - "thermostat_temp": 23, - "external_temp": 30, - }, - HVACMode.OFF, - 22, - 23, - HVACAction.OFF, - ), - ], -) -async def test_climate( - api_return_value: dict[str, Any], - expected_state: HVACMode, - expected_current_temperature: int, - expected_temperature: int, - expected_hvac_action: HVACAction, - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test Broadlink climate.""" - - device = get_device("Guest room") - mock_setup = await device.setup_entry(hass) - - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_setup.entry.unique_id)} - ) - entries = er.async_entries_for_device(entity_registry, device_entry.id) - climates = [entry for entry in entries if entry.domain == Platform.CLIMATE] - assert len(climates) == 1 - - climate = climates[0] - - mock_setup.api.get_full_status.return_value = api_return_value - - await async_update_entity(hass, climate.entity_id) - assert mock_setup.api.get_full_status.call_count == 2 - state = hass.states.get(climate.entity_id) - assert state.state == expected_state - assert state.attributes["current_temperature"] == expected_current_temperature - assert state.attributes["temperature"] == expected_temperature - assert state.attributes["hvac_action"] == expected_hvac_action - - -async def test_climate_set_temperature_turn_off_turn_on( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test Broadlink climate.""" - - device = get_device("Guest room") - mock_setup = await device.setup_entry(hass) - - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_setup.entry.unique_id)} - ) - entries = er.async_entries_for_device(entity_registry, device_entry.id) - climates = [entry for entry in entries if entry.domain == Platform.CLIMATE] - assert len(climates) == 1 - - climate = climates[0] - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: climate.entity_id, - ATTR_TEMPERATURE: "24", - }, - blocking=True, - ) - state = hass.states.get(climate.entity_id) - - assert mock_setup.api.set_temp.call_count == 1 - assert mock_setup.api.set_power.call_count == 0 - assert mock_setup.api.set_mode.call_count == 0 - assert state.attributes["temperature"] == 24 - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: climate.entity_id, - }, - blocking=True, - ) - state = hass.states.get(climate.entity_id) - - assert mock_setup.api.set_temp.call_count == 1 - assert mock_setup.api.set_power.call_count == 1 - assert mock_setup.api.set_mode.call_count == 0 - assert state.state == HVACMode.OFF - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: climate.entity_id, - }, - blocking=True, - ) - state = hass.states.get(climate.entity_id) - - assert mock_setup.api.set_temp.call_count == 1 - assert mock_setup.api.set_power.call_count == 2 - assert mock_setup.api.set_mode.call_count == 1 - assert state.state == HVACMode.HEAT diff --git a/tests/components/broadlink/test_config_flow.py b/tests/components/broadlink/test_config_flow.py index f31cb380631..2def8c0b3b9 100644 --- a/tests/components/broadlink/test_config_flow.py +++ b/tests/components/broadlink/test_config_flow.py @@ -734,9 +734,13 @@ async def test_flow_reauth_works(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() + data = {"name": device.name, **device.get_entry_data()} with patch(DEVICE_FACTORY, return_value=mock_api): - result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reset" @@ -766,8 +770,12 @@ async def test_flow_reauth_invalid_host(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() + data = {"name": device.name, **device.get_entry_data()} + with patch(DEVICE_FACTORY, return_value=mock_api): - result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data + ) device.mac = get_device("Office").mac mock_api = device.get_mock_api() @@ -796,9 +804,12 @@ async def test_flow_reauth_valid_host(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() + data = {"name": device.name, **device.get_entry_data()} with patch(DEVICE_FACTORY, return_value=mock_api): - result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data + ) device.host = "192.168.1.128" mock_api = device.get_mock_api() diff --git a/tests/components/broadlink/test_select.py b/tests/components/broadlink/test_select.py deleted file mode 100644 index 42715c9a5ab..00000000000 --- a/tests/components/broadlink/test_select.py +++ /dev/null @@ -1,67 +0,0 @@ -"""Tests for Broadlink select.""" - -from homeassistant.components.broadlink.const import DOMAIN -from homeassistant.components.select import ( - ATTR_OPTION, - DOMAIN as SELECT_DOMAIN, - SERVICE_SELECT_OPTION, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.entity_component import async_update_entity - -from . import get_device - - -async def test_select( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test Broadlink select.""" - await hass.config.async_set_time_zone("UTC") - - device = get_device("Guest room") - mock_setup = await device.setup_entry(hass) - - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_setup.entry.unique_id)} - ) - entries = er.async_entries_for_device(entity_registry, device_entry.id) - selects = [entry for entry in entries if entry.domain == Platform.SELECT] - assert len(selects) == 1 - - select = selects[0] - - mock_setup.api.get_full_status.return_value = { - "dayofweek": 3, - "hour": 2, - "min": 3, - "sec": 4, - } - await async_update_entity(hass, select.entity_id) - assert mock_setup.api.get_full_status.call_count == 2 - state = hass.states.get(select.entity_id) - assert state.state == "wednesday" - - # set value - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: select.entity_id, - ATTR_OPTION: "tuesday", - }, - blocking=True, - ) - state = hass.states.get(select.entity_id) - assert state.state == "tuesday" - assert mock_setup.api.set_time.call_count == 1 - call_args = mock_setup.api.set_time.call_args.kwargs - assert call_args == { - "hour": 2, - "minute": 3, - "second": 4, - "day": 2, - } diff --git a/tests/components/broadlink/test_time.py b/tests/components/broadlink/test_time.py deleted file mode 100644 index 819954158bb..00000000000 --- a/tests/components/broadlink/test_time.py +++ /dev/null @@ -1,67 +0,0 @@ -"""Tests for Broadlink time.""" - -from homeassistant.components.broadlink.const import DOMAIN -from homeassistant.components.time import ( - ATTR_TIME, - DOMAIN as TIME_DOMAIN, - SERVICE_SET_VALUE, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.entity_component import async_update_entity - -from . import get_device - - -async def test_time( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test Broadlink time.""" - await hass.config.async_set_time_zone("UTC") - - device = get_device("Guest room") - mock_setup = await device.setup_entry(hass) - - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_setup.entry.unique_id)} - ) - entries = er.async_entries_for_device(entity_registry, device_entry.id) - times = [entry for entry in entries if entry.domain == Platform.TIME] - assert len(times) == 1 - - time = times[0] - - mock_setup.api.get_full_status.return_value = { - "dayofweek": 3, - "hour": 2, - "min": 3, - "sec": 4, - } - await async_update_entity(hass, time.entity_id) - assert mock_setup.api.get_full_status.call_count == 2 - state = hass.states.get(time.entity_id) - assert state.state == "02:03:04+00:00" - - # set value - await hass.services.async_call( - TIME_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: time.entity_id, - ATTR_TIME: "03:04:05", - }, - blocking=True, - ) - state = hass.states.get(time.entity_id) - assert state.state == "03:04:05" - assert mock_setup.api.set_time.call_count == 1 - call_args = mock_setup.api.set_time.call_args.kwargs - assert call_args == { - "hour": 3, - "minute": 4, - "second": 5, - "day": 3, - } diff --git a/tests/components/brother/conftest.py b/tests/components/brother/conftest.py index de22158da00..5fadca5314d 100644 --- a/tests/components/brother/conftest.py +++ b/tests/components/brother/conftest.py @@ -1,11 +1,11 @@ """Test fixtures for brother.""" -from collections.abc import Generator from datetime import UTC, datetime -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch from brother import BrotherSensors import pytest +from typing_extensions import Generator from homeassistant.components.brother.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_TYPE @@ -87,7 +87,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_unload_entry() -> Generator[AsyncMock]: +def mock_unload_entry() -> Generator[AsyncMock, None, None]: """Override async_unload_entry.""" with patch( "homeassistant.components.brother.async_unload_entry", return_value=True @@ -96,7 +96,7 @@ def mock_unload_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_brother_client() -> Generator[MagicMock]: +def mock_brother_client() -> Generator[AsyncMock, None, None]: """Mock Brother client.""" with ( patch("homeassistant.components.brother.Brother", autospec=True) as mock_client, diff --git a/tests/components/brother/test_config_flow.py b/tests/components/brother/test_config_flow.py index 929e2f083e9..ac7af4cc912 100644 --- a/tests/components/brother/test_config_flow.py +++ b/tests/components/brother/test_config_flow.py @@ -8,7 +8,11 @@ import pytest from homeassistant.components import zeroconf from homeassistant.components.brother.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + SOURCE_USER, + SOURCE_ZEROCONF, +) from homeassistant.const import CONF_HOST, CONF_TYPE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -258,10 +262,17 @@ async def test_reconfigure_successful( """Test starting a reconfigure flow.""" await init_integration(hass, mock_config_entry) - result = await mock_config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -294,10 +305,17 @@ async def test_reconfigure_not_successful( """Test starting a reconfigure flow but no connection found.""" await init_integration(hass, mock_config_entry) - result = await mock_config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" mock_brother_client.async_update.side_effect = exc @@ -307,7 +325,7 @@ async def test_reconfigure_not_successful( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" assert result["errors"] == {"base": base_error} mock_brother_client.async_update.side_effect = None @@ -333,10 +351,17 @@ async def test_reconfigure_invalid_hostname( """Test starting a reconfigure flow but no connection found.""" await init_integration(hass, mock_config_entry) - result = await mock_config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -344,7 +369,7 @@ async def test_reconfigure_invalid_hostname( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" assert result["errors"] == {CONF_HOST: "wrong_host"} @@ -356,10 +381,17 @@ async def test_reconfigure_not_the_same_device( """Test starting the reconfiguration process, but with a different printer.""" await init_integration(hass, mock_config_entry) - result = await mock_config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" mock_brother_client.serial = "9876543210" @@ -369,5 +401,5 @@ async def test_reconfigure_not_the_same_device( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" assert result["errors"] == {"base": "another_device"} diff --git a/tests/components/brottsplatskartan/conftest.py b/tests/components/brottsplatskartan/conftest.py index 1d0cf236ed9..c10093f18b9 100644 --- a/tests/components/brottsplatskartan/conftest.py +++ b/tests/components/brottsplatskartan/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for Brottplatskartan.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/brunt/conftest.py b/tests/components/brunt/conftest.py index 1b60db682c3..bfbca238446 100644 --- a/tests/components/brunt/conftest.py +++ b/tests/components/brunt/conftest.py @@ -1,9 +1,9 @@ """Configuration for brunt tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/brunt/test_config_flow.py b/tests/components/brunt/test_config_flow.py index 7a805a9ee52..2796882a3c1 100644 --- a/tests/components/brunt/test_config_flow.py +++ b/tests/components/brunt/test_config_flow.py @@ -110,7 +110,15 @@ async def test_reauth( unique_id="test-username", ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=None, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" with patch( diff --git a/tests/components/bryant_evolution/__init__.py b/tests/components/bryant_evolution/__init__.py deleted file mode 100644 index 22fa2950253..00000000000 --- a/tests/components/bryant_evolution/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Bryant Evolution integration.""" diff --git a/tests/components/bryant_evolution/conftest.py b/tests/components/bryant_evolution/conftest.py deleted file mode 100644 index fb12d7ebf29..00000000000 --- a/tests/components/bryant_evolution/conftest.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Common fixtures for the Bryant Evolution tests.""" - -from collections.abc import Generator, Mapping -from unittest.mock import AsyncMock, patch - -from evolutionhttp import BryantEvolutionLocalClient -import pytest - -from homeassistant.components.bryant_evolution.const import CONF_SYSTEM_ZONE, DOMAIN -from homeassistant.const import CONF_FILENAME -from homeassistant.core import HomeAssistant -from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.bryant_evolution.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -DEFAULT_SYSTEM_ZONES = ((1, 1), (1, 2), (2, 3)) -""" -A tuple of (system, zone) pairs representing the default system and zone configurations -for the Bryant Evolution integration. -""" - - -@pytest.fixture(autouse=True) -def mock_evolution_client_factory() -> Generator[AsyncMock]: - """Mock an Evolution client.""" - with patch( - "evolutionhttp.BryantEvolutionLocalClient.get_client", - austospec=True, - ) as mock_get_client: - clients: Mapping[tuple[int, int], AsyncMock] = {} - for system, zone in DEFAULT_SYSTEM_ZONES: - clients[(system, zone)] = AsyncMock(spec=BryantEvolutionLocalClient) - client = clients[system, zone] - client.read_zone_name.return_value = f"System {system} Zone {zone}" - client.read_current_temperature.return_value = 75 - client.read_hvac_mode.return_value = ("COOL", False) - client.read_fan_mode.return_value = "AUTO" - client.read_cooling_setpoint.return_value = 72 - mock_get_client.side_effect = lambda system, zone, tty: clients[ - (system, zone) - ] - yield mock_get_client - - -@pytest.fixture -async def mock_evolution_entry( - hass: HomeAssistant, - mock_evolution_client_factory: AsyncMock, -) -> MockConfigEntry: - """Configure and return a Bryant evolution integration.""" - hass.config.units = US_CUSTOMARY_SYSTEM - entry = MockConfigEntry( - entry_id="01J3XJZSTEF6G5V0QJX6HBC94T", # For determinism in snapshot tests - domain=DOMAIN, - data={CONF_FILENAME: "/dev/ttyUSB0", CONF_SYSTEM_ZONE: [(1, 1)]}, - ) - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - return entry diff --git a/tests/components/bryant_evolution/snapshots/test_climate.ambr b/tests/components/bryant_evolution/snapshots/test_climate.ambr deleted file mode 100644 index 4f6c1f2bbc4..00000000000 --- a/tests/components/bryant_evolution/snapshots/test_climate.ambr +++ /dev/null @@ -1,83 +0,0 @@ -# serializer version: 1 -# name: test_setup_integration_success[climate.system_1_zone_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'fan_modes': list([ - 'auto', - 'low', - 'med', - 'high', - ]), - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 95, - 'min_temp': 45, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.system_1_zone_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'bryant_evolution', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01J3XJZSTEF6G5V0QJX6HBC94T-S1-Z1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_integration_success[climate.system_1_zone_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 75, - 'fan_mode': 'auto', - 'fan_modes': list([ - 'auto', - 'low', - 'med', - 'high', - ]), - 'friendly_name': 'System 1 Zone 1', - 'hvac_action': , - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 95, - 'min_temp': 45, - 'supported_features': , - 'target_temp_high': None, - 'target_temp_low': None, - 'temperature': 72, - }), - 'context': , - 'entity_id': 'climate.system_1_zone_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'cool', - }) -# --- diff --git a/tests/components/bryant_evolution/test_climate.py b/tests/components/bryant_evolution/test_climate.py deleted file mode 100644 index 0b527e02a10..00000000000 --- a/tests/components/bryant_evolution/test_climate.py +++ /dev/null @@ -1,259 +0,0 @@ -"""Test the BryantEvolutionClient type.""" - -from collections.abc import Generator -from datetime import timedelta -import logging -from unittest.mock import AsyncMock - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.bryant_evolution.climate import SCAN_INTERVAL -from homeassistant.components.climate import ( - ATTR_FAN_MODE, - ATTR_HVAC_ACTION, - ATTR_HVAC_MODE, - ATTR_TEMPERATURE, - DOMAIN as CLIMATE_DOMAIN, - SERVICE_SET_FAN_MODE, - SERVICE_SET_HVAC_MODE, - SERVICE_SET_TEMPERATURE, - HVACAction, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - -_LOGGER = logging.getLogger(__name__) - - -async def trigger_polling(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> None: - """Trigger a polling event.""" - freezer.tick(SCAN_INTERVAL + timedelta(seconds=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - -async def test_setup_integration_success( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_evolution_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test that an instance can be constructed.""" - await snapshot_platform( - hass, entity_registry, snapshot, mock_evolution_entry.entry_id - ) - - -async def test_set_temperature_mode_cool( - hass: HomeAssistant, - mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock], - freezer: FrozenDateTimeFactory, -) -> None: - """Test setting the temperature in cool mode.""" - # Start with known initial conditions - client = await mock_evolution_client_factory(1, 1, "/dev/unused") - client.read_hvac_mode.return_value = ("COOL", False) - client.read_cooling_setpoint.return_value = 75 - await trigger_polling(hass, freezer) - state = hass.states.get("climate.system_1_zone_1") - assert state.attributes["temperature"] == 75, state.attributes - - # Make the call, modifting the mock client to throw an exception on - # read to ensure that the update is visible iff we call - # async_update_ha_state. - data = {ATTR_TEMPERATURE: 70} - data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" - client.read_cooling_setpoint.side_effect = Exception("fake failure") - await hass.services.async_call( - CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, data, blocking=True - ) - - # Verify effect. - client.set_cooling_setpoint.assert_called_once_with(70) - state = hass.states.get("climate.system_1_zone_1") - assert state.attributes["temperature"] == 70 - - -async def test_set_temperature_mode_heat( - hass: HomeAssistant, - mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock], - freezer: FrozenDateTimeFactory, -) -> None: - """Test setting the temperature in heat mode.""" - - # Start with known initial conditions - client = await mock_evolution_client_factory(1, 1, "/dev/unused") - client.read_hvac_mode.return_value = ("HEAT", False) - client.read_heating_setpoint.return_value = 60 - await trigger_polling(hass, freezer) - - # Make the call, modifting the mock client to throw an exception on - # read to ensure that the update is visible iff we call - # async_update_ha_state. - data = {"temperature": 65} - data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" - client.read_heating_setpoint.side_effect = Exception("fake failure") - await hass.services.async_call( - CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, data, blocking=True - ) - # Verify effect. - state = hass.states.get("climate.system_1_zone_1") - assert state.attributes["temperature"] == 65, state.attributes - - -async def test_set_temperature_mode_heat_cool( - hass: HomeAssistant, - mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock], - freezer: FrozenDateTimeFactory, -) -> None: - """Test setting the temperature in heat_cool mode.""" - - # Enter heat_cool with known setpoints - mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") - mock_client.read_hvac_mode.return_value = ("AUTO", False) - mock_client.read_cooling_setpoint.return_value = 90 - mock_client.read_heating_setpoint.return_value = 40 - await trigger_polling(hass, freezer) - state = hass.states.get("climate.system_1_zone_1") - assert state.state == "heat_cool" - assert state.attributes["target_temp_low"] == 40 - assert state.attributes["target_temp_high"] == 90 - - # Make the call, modifting the mock client to throw an exception on - # read to ensure that the update is visible iff we call - # async_update_ha_state. - mock_client.read_heating_setpoint.side_effect = Exception("fake failure") - mock_client.read_cooling_setpoint.side_effect = Exception("fake failure") - data = {"target_temp_low": 70, "target_temp_high": 80} - data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" - await hass.services.async_call( - CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, data, blocking=True - ) - state = hass.states.get("climate.system_1_zone_1") - assert state.attributes["target_temp_low"] == 70, state.attributes - assert state.attributes["target_temp_high"] == 80, state.attributes - mock_client.set_cooling_setpoint.assert_called_once_with(80) - mock_client.set_heating_setpoint.assert_called_once_with(70) - - -async def test_set_fan_mode( - hass: HomeAssistant, - mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock], -) -> None: - """Test that setting fan mode works.""" - mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") - fan_modes = ["auto", "low", "med", "high"] - for mode in fan_modes: - # Make the call, modifting the mock client to throw an exception on - # read to ensure that the update is visible iff we call - # async_update_ha_state. - mock_client.read_fan_mode.side_effect = Exception("fake failure") - data = {ATTR_FAN_MODE: mode} - data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" - await hass.services.async_call( - CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, data, blocking=True - ) - assert ( - hass.states.get("climate.system_1_zone_1").attributes[ATTR_FAN_MODE] == mode - ) - mock_client.set_fan_mode.assert_called_with(mode) - - -@pytest.mark.parametrize( - ("hvac_mode", "evolution_mode"), - [("heat_cool", "auto"), ("heat", "heat"), ("cool", "cool"), ("off", "off")], -) -async def test_set_hvac_mode( - hass: HomeAssistant, - mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock], - hvac_mode, - evolution_mode, -) -> None: - """Test that setting HVAC mode works.""" - mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") - - # Make the call, modifting the mock client to throw an exception on - # read to ensure that the update is visible iff we call - # async_update_ha_state. - data = {ATTR_HVAC_MODE: hvac_mode} - data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" - mock_client.read_hvac_mode.side_effect = Exception("fake failure") - await hass.services.async_call( - CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, data, blocking=True - ) - await hass.async_block_till_done() - assert hass.states.get("climate.system_1_zone_1").state == evolution_mode - mock_client.set_hvac_mode.assert_called_with(evolution_mode) - - -@pytest.mark.parametrize( - ("curr_temp", "expected_action"), - [(62, HVACAction.HEATING), (70, HVACAction.OFF), (80, HVACAction.COOLING)], -) -async def test_read_hvac_action_heat_cool( - hass: HomeAssistant, - mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock], - freezer: FrozenDateTimeFactory, - curr_temp: int, - expected_action: HVACAction, -) -> None: - """Test that we can read the current HVAC action in heat_cool mode.""" - htsp = 68 - clsp = 72 - - mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") - mock_client.read_heating_setpoint.return_value = htsp - mock_client.read_cooling_setpoint.return_value = clsp - is_active = curr_temp < htsp or curr_temp > clsp - mock_client.read_hvac_mode.return_value = ("auto", is_active) - mock_client.read_current_temperature.return_value = curr_temp - await trigger_polling(hass, freezer) - state = hass.states.get("climate.system_1_zone_1") - assert state.attributes[ATTR_HVAC_ACTION] == expected_action - - -@pytest.mark.parametrize( - ("mode", "active", "expected_action"), - [ - ("heat", True, "heating"), - ("heat", False, "off"), - ("cool", True, "cooling"), - ("cool", False, "off"), - ("off", False, "off"), - ], -) -async def test_read_hvac_action( - hass: HomeAssistant, - mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock], - freezer: FrozenDateTimeFactory, - mode: str, - active: bool, - expected_action: str, -) -> None: - """Test that we can read the current HVAC action.""" - # Initial state should be no action. - assert ( - hass.states.get("climate.system_1_zone_1").attributes[ATTR_HVAC_ACTION] - == HVACAction.OFF - ) - # Perturb the system and verify we see an action. - mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") - mock_client.read_heating_setpoint.return_value = 75 # Needed if mode == heat - mock_client.read_hvac_mode.return_value = (mode, active) - await trigger_polling(hass, freezer) - assert ( - hass.states.get("climate.system_1_zone_1").attributes[ATTR_HVAC_ACTION] - == expected_action - ) diff --git a/tests/components/bryant_evolution/test_config_flow.py b/tests/components/bryant_evolution/test_config_flow.py deleted file mode 100644 index 54fc7bfbfcc..00000000000 --- a/tests/components/bryant_evolution/test_config_flow.py +++ /dev/null @@ -1,164 +0,0 @@ -"""Test the Bryant Evolution config flow.""" - -from unittest.mock import DEFAULT, AsyncMock, patch - -from evolutionhttp import BryantEvolutionLocalClient, ZoneInfo - -from homeassistant import config_entries -from homeassistant.components.bryant_evolution.const import CONF_SYSTEM_ZONE, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_FILENAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_form_success(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with ( - patch.object( - BryantEvolutionLocalClient, - "enumerate_zones", - return_value=DEFAULT, - ) as mock_call, - ): - mock_call.side_effect = lambda system_id, filename: { - 1: [ZoneInfo(1, 1, "S1Z1"), ZoneInfo(1, 2, "S1Z2")], - 2: [ZoneInfo(2, 3, "S2Z2"), ZoneInfo(2, 4, "S2Z3")], - }.get(system_id, []) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_FILENAME: "test_form_success", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY, result - assert result["title"] == "SAM at test_form_success" - assert result["data"] == { - CONF_FILENAME: "test_form_success", - CONF_SYSTEM_ZONE: [(1, 1), (1, 2), (2, 3), (2, 4)], - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_cannot_connect( - hass: HomeAssistant, - mock_evolution_client_factory: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test we handle cannot connect error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - with ( - patch.object( - BryantEvolutionLocalClient, - "enumerate_zones", - return_value=DEFAULT, - ) as mock_call, - ): - mock_call.return_value = [] - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_FILENAME: "test_form_cannot_connect", - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - with ( - patch.object( - BryantEvolutionLocalClient, - "enumerate_zones", - return_value=DEFAULT, - ) as mock_call, - ): - mock_call.side_effect = lambda system_id, filename: { - 1: [ZoneInfo(1, 1, "S1Z1"), ZoneInfo(1, 2, "S1Z2")], - 2: [ZoneInfo(2, 3, "S2Z3"), ZoneInfo(2, 4, "S2Z4")], - }.get(system_id, []) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_FILENAME: "some-serial", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "SAM at some-serial" - assert result["data"] == { - CONF_FILENAME: "some-serial", - CONF_SYSTEM_ZONE: [(1, 1), (1, 2), (2, 3), (2, 4)], - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_cannot_connect_bad_file( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_evolution_client_factory: AsyncMock, -) -> None: - """Test we handle cannot connect error from a missing file.""" - mock_evolution_client_factory.side_effect = FileNotFoundError("test error") - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - # This file does not exist. - CONF_FILENAME: "test_form_cannot_connect_bad_file", - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - -async def test_reconfigure( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_evolution_entry: MockConfigEntry, -) -> None: - """Test that reconfigure discovers additional systems and zones.""" - - # Reconfigure with additional systems and zones. - result = await mock_evolution_entry.start_reconfigure_flow(hass) - with ( - patch.object( - BryantEvolutionLocalClient, - "enumerate_zones", - return_value=DEFAULT, - ) as mock_call, - ): - mock_call.side_effect = lambda system_id, filename: { - 1: [ZoneInfo(1, 1, "S1Z1")], - 2: [ZoneInfo(2, 3, "S2Z3"), ZoneInfo(2, 4, "S2Z4"), ZoneInfo(2, 5, "S2Z5")], - }.get(system_id, []) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_FILENAME: "test_reconfigure", - }, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT, result - assert result["reason"] == "reconfigure_successful" - config_entry = hass.config_entries.async_entries()[0] - assert config_entry.data[CONF_SYSTEM_ZONE] == [ - (1, 1), - (2, 3), - (2, 4), - (2, 5), - ] diff --git a/tests/components/bryant_evolution/test_init.py b/tests/components/bryant_evolution/test_init.py deleted file mode 100644 index 72734f7e117..00000000000 --- a/tests/components/bryant_evolution/test_init.py +++ /dev/null @@ -1,112 +0,0 @@ -"""Test setup for the bryant_evolution integration.""" - -import logging -from unittest.mock import AsyncMock - -from evolutionhttp import BryantEvolutionLocalClient -from freezegun.api import FrozenDateTimeFactory - -from homeassistant.components.bryant_evolution.const import CONF_SYSTEM_ZONE, DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_FILENAME -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM - -from .conftest import DEFAULT_SYSTEM_ZONES -from .test_climate import trigger_polling - -from tests.common import MockConfigEntry - -_LOGGER = logging.getLogger(__name__) - - -async def test_setup_integration_prevented_by_unavailable_client( - hass: HomeAssistant, mock_evolution_client_factory: AsyncMock -) -> None: - """Test that setup throws ConfigEntryNotReady when the client is unavailable.""" - mock_evolution_client_factory.side_effect = FileNotFoundError("test error") - mock_evolution_entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_FILENAME: "test_setup_integration_prevented_by_unavailable_client", - CONF_SYSTEM_ZONE: [(1, 1)], - }, - ) - mock_evolution_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_evolution_entry.entry_id) - await hass.async_block_till_done() - assert mock_evolution_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_setup_integration_client_returns_none( - hass: HomeAssistant, mock_evolution_client_factory: AsyncMock -) -> None: - """Test that an unavailable client causes ConfigEntryNotReady.""" - mock_client = AsyncMock(spec=BryantEvolutionLocalClient) - mock_evolution_client_factory.side_effect = None - mock_evolution_client_factory.return_value = mock_client - mock_client.read_fan_mode.return_value = None - mock_client.read_current_temperature.return_value = None - mock_client.read_hvac_mode.return_value = None - mock_client.read_cooling_setpoint.return_value = None - mock_client.read_zone_name.return_value = None - mock_evolution_entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_FILENAME: "/dev/ttyUSB0", CONF_SYSTEM_ZONE: [(1, 1)]}, - ) - mock_evolution_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_evolution_entry.entry_id) - await hass.async_block_till_done() - assert mock_evolution_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_setup_multiple_systems_zones( - hass: HomeAssistant, - mock_evolution_client_factory: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test that a device with multiple systems and zones works.""" - hass.config.units = US_CUSTOMARY_SYSTEM - mock_evolution_entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_FILENAME: "/dev/ttyUSB0", CONF_SYSTEM_ZONE: DEFAULT_SYSTEM_ZONES}, - ) - mock_evolution_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_evolution_entry.entry_id) - await hass.async_block_till_done() - - # Set the temperature of each zone to its zone number so that we can - # ensure we've created the right client for each zone. - for sz, client in mock_evolution_entry.runtime_data.items(): - client.read_current_temperature.return_value = sz[1] - await trigger_polling(hass, freezer) - - # Check that each system and zone has the expected temperature value to - # verify that the initial setup flow worked as expected. - for sz in DEFAULT_SYSTEM_ZONES: - system = sz[0] - zone = sz[1] - state = hass.states.get(f"climate.system_{system}_zone_{zone}") - assert state, hass.states.async_all() - assert state.attributes["current_temperature"] == zone - - # Check that the created devices are wired to each other as expected. - device_registry = dr.async_get(hass) - - def find_device(name): - return next(filter(lambda x: x.name == name, device_registry.devices.values())) - - sam = find_device("System Access Module") - s1 = find_device("System 1") - s2 = find_device("System 2") - s1z1 = find_device("System 1 Zone 1") - s1z2 = find_device("System 1 Zone 2") - s2z3 = find_device("System 2 Zone 3") - - assert sam.via_device_id is None - assert s1.via_device_id == sam.id - assert s2.via_device_id == sam.id - assert s1z1.via_device_id == s1.id - assert s1z2.via_device_id == s1.id - assert s2z3.via_device_id == s2.id diff --git a/tests/components/bsblan/__init__.py b/tests/components/bsblan/__init__.py index 3892fcaaaca..d233fa068ea 100644 --- a/tests/components/bsblan/__init__.py +++ b/tests/components/bsblan/__init__.py @@ -1,18 +1 @@ """Tests for the bsblan integration.""" - -from unittest.mock import patch - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_with_selected_platforms( - hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] -) -> None: - """Set up the BSBLAN integration with the selected platforms.""" - config_entry.add_to_hass(hass) - with patch("homeassistant.components.bsblan.PLATFORMS", platforms): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/bsblan/conftest.py b/tests/components/bsblan/conftest.py index e46cdd75f2d..224e0e0b157 100644 --- a/tests/components/bsblan/conftest.py +++ b/tests/components/bsblan/conftest.py @@ -1,10 +1,10 @@ """Fixtures for BSBLAN integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch -from bsblan import Device, Info, Sensor, State, StaticState +from bsblan import Device, Info, State import pytest +from typing_extensions import Generator from homeassistant.components.bsblan.const import CONF_PASSKEY, DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME @@ -42,23 +42,17 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture def mock_bsblan() -> Generator[MagicMock]: """Return a mocked BSBLAN client.""" + with ( patch("homeassistant.components.bsblan.BSBLAN", autospec=True) as bsblan_mock, patch("homeassistant.components.bsblan.config_flow.BSBLAN", new=bsblan_mock), ): bsblan = bsblan_mock.return_value - bsblan.info.return_value = Info.from_json(load_fixture("info.json", DOMAIN)) - bsblan.device.return_value = Device.from_json( + bsblan.info.return_value = Info.parse_raw(load_fixture("info.json", DOMAIN)) + bsblan.device.return_value = Device.parse_raw( load_fixture("device.json", DOMAIN) ) - bsblan.state.return_value = State.from_json(load_fixture("state.json", DOMAIN)) - bsblan.static_values.return_value = StaticState.from_json( - load_fixture("static.json", DOMAIN) - ) - bsblan.sensor.return_value = Sensor.from_json( - load_fixture("sensor.json", DOMAIN) - ) - + bsblan.state.return_value = State.parse_raw(load_fixture("state.json", DOMAIN)) yield bsblan diff --git a/tests/components/bsblan/fixtures/sensor.json b/tests/components/bsblan/fixtures/sensor.json deleted file mode 100644 index 3448e7e98d8..00000000000 --- a/tests/components/bsblan/fixtures/sensor.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "outside_temperature": { - "name": "Outside temp sensor local", - "error": 0, - "value": "6.1", - "desc": "", - "dataType": 0, - "readonly": 0, - "unit": "°C" - }, - "current_temperature": { - "name": "Room temp 1 actual value", - "error": 0, - "value": "18.6", - "desc": "", - "dataType": 0, - "readonly": 1, - "unit": "°C" - } -} diff --git a/tests/components/bsblan/fixtures/state.json b/tests/components/bsblan/fixtures/state.json index 8c458e173d4..51d4cf2e136 100644 --- a/tests/components/bsblan/fixtures/state.json +++ b/tests/components/bsblan/fixtures/state.json @@ -97,14 +97,5 @@ "dataType": 1, "readonly": 1, "unit": "" - }, - "room1_temp_setpoint_boost": { - "name": "Room 1 Temp Setpoint Boost", - "error": 0, - "value": "22.5", - "desc": "Boost", - "dataType": 1, - "readonly": 1, - "unit": "°C" } } diff --git a/tests/components/bsblan/fixtures/static.json b/tests/components/bsblan/fixtures/static.json deleted file mode 100644 index 8c7abc3397b..00000000000 --- a/tests/components/bsblan/fixtures/static.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "min_temp": { - "name": "Room temp frost protection setpoint", - "error": 0, - "value": "8.0", - "desc": "", - "dataType": 0, - "readonly": 0, - "unit": "°C" - }, - "max_temp": { - "name": "Summer/winter changeover temp heat circuit 1", - "error": 0, - "value": "20.0", - "desc": "", - "dataType": 0, - "readonly": 0, - "unit": "°C" - } -} diff --git a/tests/components/bsblan/fixtures/static_F.json b/tests/components/bsblan/fixtures/static_F.json deleted file mode 100644 index a61e870f6e5..00000000000 --- a/tests/components/bsblan/fixtures/static_F.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "min_temp": { - "name": "Room temp frost protection setpoint", - "error": 0, - "value": "8.0", - "desc": "", - "dataType": 0, - "readonly": 0, - "unit": "°F" - }, - "max_temp": { - "name": "Summer/winter changeover temp heat circuit 1", - "error": 0, - "value": "20.0", - "desc": "", - "dataType": 0, - "readonly": 0, - "unit": "°F" - } -} diff --git a/tests/components/bsblan/snapshots/test_climate.ambr b/tests/components/bsblan/snapshots/test_climate.ambr deleted file mode 100644 index 4eb70fe2658..00000000000 --- a/tests/components/bsblan/snapshots/test_climate.ambr +++ /dev/null @@ -1,220 +0,0 @@ -# serializer version: 1 -# name: test_celsius_fahrenheit[static.json][climate.bsb_lan-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 20.0, - 'min_temp': 8.0, - 'preset_modes': list([ - 'eco', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.bsb_lan', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'bsblan', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:80:41:19:69:90-climate', - 'unit_of_measurement': None, - }) -# --- -# name: test_celsius_fahrenheit[static.json][climate.bsb_lan-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 18.6, - 'friendly_name': 'BSB-LAN', - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 20.0, - 'min_temp': 8.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'eco', - 'none', - ]), - 'supported_features': , - 'temperature': 18.5, - }), - 'context': , - 'entity_id': 'climate.bsb_lan', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_celsius_fahrenheit[static_F.json][climate.bsb_lan-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': -6.7, - 'min_temp': -13.3, - 'preset_modes': list([ - 'eco', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.bsb_lan', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'bsblan', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:80:41:19:69:90-climate', - 'unit_of_measurement': None, - }) -# --- -# name: test_celsius_fahrenheit[static_F.json][climate.bsb_lan-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': -7.4, - 'friendly_name': 'BSB-LAN', - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': -6.7, - 'min_temp': -13.3, - 'preset_mode': 'none', - 'preset_modes': list([ - 'eco', - 'none', - ]), - 'supported_features': , - 'temperature': -7.5, - }), - 'context': , - 'entity_id': 'climate.bsb_lan', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_climate_entity_properties[climate.bsb_lan-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 20.0, - 'min_temp': 8.0, - 'preset_modes': list([ - 'eco', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.bsb_lan', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'bsblan', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:80:41:19:69:90-climate', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_entity_properties[climate.bsb_lan-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 18.6, - 'friendly_name': 'BSB-LAN', - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 20.0, - 'min_temp': 8.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'eco', - 'none', - ]), - 'supported_features': , - 'temperature': 18.5, - }), - 'context': , - 'entity_id': 'climate.bsb_lan', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- diff --git a/tests/components/bsblan/snapshots/test_diagnostics.ambr b/tests/components/bsblan/snapshots/test_diagnostics.ambr index 9fabd373205..b172d26c249 100644 --- a/tests/components/bsblan/snapshots/test_diagnostics.ambr +++ b/tests/components/bsblan/snapshots/test_diagnostics.ambr @@ -1,111 +1,6 @@ # serializer version: 1 # name: test_diagnostics dict({ - 'coordinator_data': dict({ - 'sensor': dict({ - 'current_temperature': dict({ - 'data_type': 0, - 'desc': '', - 'error': 0, - 'name': 'Room temp 1 actual value', - 'precision': None, - 'readonly': 1, - 'readwrite': 0, - 'unit': '°C', - 'value': 18.6, - }), - 'outside_temperature': dict({ - 'data_type': 0, - 'desc': '', - 'error': 0, - 'name': 'Outside temp sensor local', - 'precision': None, - 'readonly': 0, - 'readwrite': 0, - 'unit': '°C', - 'value': 6.1, - }), - }), - 'state': dict({ - 'current_temperature': dict({ - 'data_type': 0, - 'desc': '', - 'error': 0, - 'name': 'Room temp 1 actual value', - 'precision': None, - 'readonly': 1, - 'readwrite': 0, - 'unit': '°C', - 'value': 18.6, - }), - 'hvac_action': dict({ - 'data_type': 1, - 'desc': 'Raumtemp’begrenzung', - 'error': 0, - 'name': 'Status heating circuit 1', - 'precision': None, - 'readonly': 1, - 'readwrite': 0, - 'unit': '', - 'value': 122, - }), - 'hvac_mode': dict({ - 'data_type': 1, - 'desc': 'Komfort', - 'error': 0, - 'name': 'Operating mode', - 'precision': None, - 'readonly': 0, - 'readwrite': 0, - 'unit': '', - 'value': 'heat', - }), - 'hvac_mode2': dict({ - 'data_type': 1, - 'desc': 'Reduziert', - 'error': 0, - 'name': 'Operating mode', - 'precision': None, - 'readonly': 0, - 'readwrite': 0, - 'unit': '', - 'value': 2, - }), - 'room1_temp_setpoint_boost': dict({ - 'data_type': 1, - 'desc': 'Boost', - 'error': 0, - 'name': 'Room 1 Temp Setpoint Boost', - 'precision': None, - 'readonly': 1, - 'readwrite': 0, - 'unit': '°C', - 'value': '22.5', - }), - 'room1_thermostat_mode': dict({ - 'data_type': 1, - 'desc': 'Kein Bedarf', - 'error': 0, - 'name': 'Raumthermostat 1', - 'precision': None, - 'readonly': 1, - 'readwrite': 0, - 'unit': '', - 'value': 0, - }), - 'target_temperature': dict({ - 'data_type': 0, - 'desc': '', - 'error': 0, - 'name': 'Room temperature Comfort setpoint', - 'precision': None, - 'readonly': 0, - 'readwrite': 0, - 'unit': '°C', - 'value': 18.5, - }), - }), - }), 'device': dict({ 'MAC': '00:80:41:19:69:90', 'name': 'BSB-LAN', @@ -116,59 +11,67 @@ 'controller_family': dict({ 'data_type': 0, 'desc': '', - 'error': 0, 'name': 'Device family', - 'precision': None, - 'readonly': 0, - 'readwrite': 0, 'unit': '', - 'value': 211, + 'value': '211', }), 'controller_variant': dict({ 'data_type': 0, 'desc': '', - 'error': 0, 'name': 'Device variant', - 'precision': None, - 'readonly': 0, - 'readwrite': 0, 'unit': '', - 'value': 127, + 'value': '127', }), 'device_identification': dict({ 'data_type': 7, 'desc': '', - 'error': 0, 'name': 'Gerte-Identifikation', - 'precision': None, - 'readonly': 0, - 'readwrite': 0, 'unit': '', 'value': 'RVS21.831F/127', }), }), - 'static': dict({ - 'max_temp': dict({ + 'state': dict({ + 'current_temperature': dict({ 'data_type': 0, 'desc': '', - 'error': 0, - 'name': 'Summer/winter changeover temp heat circuit 1', - 'precision': None, - 'readonly': 0, - 'readwrite': 0, + 'name': 'Room temp 1 actual value', 'unit': '°C', - 'value': 20.0, + 'value': '18.6', }), - 'min_temp': dict({ + 'hvac_action': dict({ + 'data_type': 1, + 'desc': 'Raumtemp’begrenzung', + 'name': 'Status heating circuit 1', + 'unit': '', + 'value': '122', + }), + 'hvac_mode': dict({ + 'data_type': 1, + 'desc': 'Komfort', + 'name': 'Operating mode', + 'unit': '', + 'value': 'heat', + }), + 'hvac_mode2': dict({ + 'data_type': 1, + 'desc': 'Reduziert', + 'name': 'Operating mode', + 'unit': '', + 'value': '2', + }), + 'room1_thermostat_mode': dict({ + 'data_type': 1, + 'desc': 'Kein Bedarf', + 'name': 'Raumthermostat 1', + 'unit': '', + 'value': '0', + }), + 'target_temperature': dict({ 'data_type': 0, 'desc': '', - 'error': 0, - 'name': 'Room temp frost protection setpoint', - 'precision': None, - 'readonly': 0, - 'readwrite': 0, + 'name': 'Room temperature Comfort setpoint', 'unit': '°C', - 'value': 8.0, + 'value': '18.5', }), }), }) diff --git a/tests/components/bsblan/snapshots/test_sensor.ambr b/tests/components/bsblan/snapshots/test_sensor.ambr deleted file mode 100644 index 0146dd23b3d..00000000000 --- a/tests/components/bsblan/snapshots/test_sensor.ambr +++ /dev/null @@ -1,103 +0,0 @@ -# serializer version: 1 -# name: test_sensor_entity_properties[sensor.bsb_lan_current_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.bsb_lan_current_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Current Temperature', - 'platform': 'bsblan', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_temperature', - 'unique_id': '00:80:41:19:69:90-current_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_entity_properties[sensor.bsb_lan_current_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'BSB-LAN Current Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.bsb_lan_current_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '18.6', - }) -# --- -# name: test_sensor_entity_properties[sensor.bsb_lan_outside_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.bsb_lan_outside_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outside Temperature', - 'platform': 'bsblan', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outside_temperature', - 'unique_id': '00:80:41:19:69:90-outside_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_entity_properties[sensor.bsb_lan_outside_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'BSB-LAN Outside Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.bsb_lan_outside_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6.1', - }) -# --- diff --git a/tests/components/bsblan/test_climate.py b/tests/components/bsblan/test_climate.py deleted file mode 100644 index c519c3043da..00000000000 --- a/tests/components/bsblan/test_climate.py +++ /dev/null @@ -1,307 +0,0 @@ -"""Tests for the BSB-Lan climate platform.""" - -from datetime import timedelta -from unittest.mock import AsyncMock, MagicMock - -from bsblan import BSBLANError, StaticState -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.bsblan.const import DOMAIN -from homeassistant.components.climate import ( - ATTR_HVAC_MODE, - ATTR_PRESET_MODE, - DOMAIN as CLIMATE_DOMAIN, - PRESET_ECO, - PRESET_NONE, - SERVICE_SET_HVAC_MODE, - SERVICE_SET_PRESET_MODE, - SERVICE_SET_TEMPERATURE, - HVACMode, -) -from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -import homeassistant.helpers.entity_registry as er - -from . import setup_with_selected_platforms - -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_object_fixture, - snapshot_platform, -) - -ENTITY_ID = "climate.bsb_lan" - - -@pytest.mark.parametrize( - ("static_file"), - [ - ("static.json"), - ("static_F.json"), - ], -) -async def test_celsius_fahrenheit( - hass: HomeAssistant, - mock_bsblan: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - static_file: str, -) -> None: - """Test Celsius and Fahrenheit temperature units.""" - - static_data = load_json_object_fixture(static_file, DOMAIN) - - mock_bsblan.static_values.return_value = StaticState.from_dict(static_data) - - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_climate_entity_properties( - hass: HomeAssistant, - mock_bsblan: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the climate entity properties.""" - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - # Test when current_temperature is "---" - mock_current_temp = MagicMock() - mock_current_temp.value = "---" - mock_bsblan.state.return_value.current_temperature = mock_current_temp - - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - assert state.attributes["current_temperature"] is None - - # Test target_temperature - mock_target_temp = MagicMock() - mock_target_temp.value = "23.5" - mock_bsblan.state.return_value.target_temperature = mock_target_temp - - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - assert state.attributes["temperature"] == 23.5 - - # Test hvac_mode - mock_hvac_mode = MagicMock() - mock_hvac_mode.value = HVACMode.AUTO - mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode - - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - assert state.state == HVACMode.AUTO - - # Test preset_mode - mock_hvac_mode.value = PRESET_ECO - - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - assert state.attributes["preset_mode"] == PRESET_ECO - - -@pytest.mark.parametrize( - "mode", - [HVACMode.HEAT, HVACMode.AUTO, HVACMode.OFF], -) -async def test_async_set_hvac_mode( - hass: HomeAssistant, - mock_bsblan: AsyncMock, - mock_config_entry: MockConfigEntry, - mode: HVACMode, -) -> None: - """Test setting HVAC mode via service call.""" - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) - - # Call the service to set HVAC mode - await hass.services.async_call( - domain=CLIMATE_DOMAIN, - service=SERVICE_SET_HVAC_MODE, - service_data={ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: mode}, - blocking=True, - ) - - # Assert that the thermostat method was called - mock_bsblan.thermostat.assert_called_once_with(hvac_mode=mode) - mock_bsblan.thermostat.reset_mock() - - -@pytest.mark.parametrize( - ("hvac_mode", "preset_mode"), - [ - (HVACMode.AUTO, PRESET_ECO), - (HVACMode.AUTO, PRESET_NONE), - ], -) -async def test_async_set_preset_mode_succes( - hass: HomeAssistant, - mock_bsblan: AsyncMock, - mock_config_entry: MockConfigEntry, - hvac_mode: HVACMode, - preset_mode: str, -) -> None: - """Test setting preset mode via service call.""" - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) - - # patch hvac_mode - mock_hvac_mode = MagicMock() - mock_hvac_mode.value = hvac_mode - mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode - - # Attempt to set the preset mode - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset_mode}, - blocking=True, - ) - await hass.async_block_till_done() - - -@pytest.mark.parametrize( - ("hvac_mode", "preset_mode"), - [ - ( - HVACMode.HEAT, - PRESET_ECO, - ) - ], -) -async def test_async_set_preset_mode_error( - hass: HomeAssistant, - mock_bsblan: AsyncMock, - mock_config_entry: MockConfigEntry, - hvac_mode: HVACMode, - preset_mode: str, -) -> None: - """Test setting preset mode via service call.""" - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) - - # patch hvac_mode - mock_hvac_mode = MagicMock() - mock_hvac_mode.value = hvac_mode - mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode - - # Attempt to set the preset mode - error_message = "Preset mode can only be set when HVAC mode is set to 'auto'" - with pytest.raises(HomeAssistantError, match=error_message): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset_mode}, - blocking=True, - ) - - -@pytest.mark.parametrize( - ("target_temp"), - [ - (8.0), # Min temperature - (15.0), # Mid-range temperature - (20.0), # Max temperature - ], -) -async def test_async_set_temperature( - hass: HomeAssistant, - mock_bsblan: AsyncMock, - mock_config_entry: MockConfigEntry, - target_temp: float, -) -> None: - """Test setting temperature via service call.""" - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) - - await hass.services.async_call( - domain=CLIMATE_DOMAIN, - service=SERVICE_SET_TEMPERATURE, - service_data={ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: target_temp}, - blocking=True, - ) - # Assert that the thermostat method was called with the correct temperature - mock_bsblan.thermostat.assert_called_once_with(target_temperature=target_temp) - - -async def test_async_set_data( - hass: HomeAssistant, - mock_bsblan: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test setting data via service calls.""" - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) - - # Test setting temperature - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 19}, - blocking=True, - ) - mock_bsblan.thermostat.assert_called_once_with(target_temperature=19) - mock_bsblan.thermostat.reset_mock() - - # Test setting HVAC mode - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, - blocking=True, - ) - mock_bsblan.thermostat.assert_called_once_with(hvac_mode=HVACMode.HEAT) - mock_bsblan.thermostat.reset_mock() - - # Patch HVAC mode to AUTO - mock_hvac_mode = MagicMock() - mock_hvac_mode.value = HVACMode.AUTO - mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode - - # Test setting preset mode to ECO - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_ECO}, - blocking=True, - ) - mock_bsblan.thermostat.assert_called_once_with(hvac_mode=PRESET_ECO) - mock_bsblan.thermostat.reset_mock() - - # Test setting preset mode to NONE - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_NONE}, - blocking=True, - ) - mock_bsblan.thermostat.assert_called_once() - mock_bsblan.thermostat.reset_mock() - - # Test error handling - mock_bsblan.thermostat.side_effect = BSBLANError("Test error") - error_message = "An error occurred while updating the BSBLAN device" - with pytest.raises(HomeAssistantError, match=error_message): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 20}, - blocking=True, - ) diff --git a/tests/components/bsblan/test_diagnostics.py b/tests/components/bsblan/test_diagnostics.py index aea53f8a1a2..316296df78a 100644 --- a/tests/components/bsblan/test_diagnostics.py +++ b/tests/components/bsblan/test_diagnostics.py @@ -1,7 +1,5 @@ """Tests for the diagnostics data provided by the BSBLan integration.""" -from unittest.mock import AsyncMock - from syrupy import SnapshotAssertion from homeassistant.core import HomeAssistant @@ -13,14 +11,13 @@ from tests.typing import ClientSessionGenerator async def test_diagnostics( hass: HomeAssistant, - mock_bsblan: AsyncMock, hass_client: ClientSessionGenerator, init_integration: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" - diagnostics_data = await get_diagnostics_for_config_entry( - hass, hass_client, init_integration + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot ) - assert diagnostics_data == snapshot diff --git a/tests/components/bsblan/test_sensor.py b/tests/components/bsblan/test_sensor.py deleted file mode 100644 index dc22574168d..00000000000 --- a/tests/components/bsblan/test_sensor.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Tests for the BSB-Lan sensor platform.""" - -from datetime import timedelta -from unittest.mock import AsyncMock, MagicMock - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -import homeassistant.helpers.entity_registry as er - -from . import setup_with_selected_platforms - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - -ENTITY_CURRENT_TEMP = "sensor.bsb_lan_current_temperature" -ENTITY_OUTSIDE_TEMP = "sensor.bsb_lan_outside_temperature" - - -async def test_sensor_entity_properties( - hass: HomeAssistant, - mock_bsblan: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the sensor entity properties.""" - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.SENSOR]) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize( - ("value", "expected_state"), - [ - (18.6, "18.6"), - (None, STATE_UNKNOWN), - ("---", STATE_UNKNOWN), - ], -) -async def test_current_temperature_scenarios( - hass: HomeAssistant, - mock_bsblan: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - value, - expected_state, -) -> None: - """Test various scenarios for current temperature sensor.""" - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.SENSOR]) - - # Set up the mock value - mock_current_temp = MagicMock() - mock_current_temp.value = value - mock_bsblan.sensor.return_value.current_temperature = mock_current_temp - - # Trigger an update - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Check the state - state = hass.states.get(ENTITY_CURRENT_TEMP) - assert state.state == expected_state diff --git a/tests/components/bthome/test_config_flow.py b/tests/components/bthome/test_config_flow.py index faf2f1c9ef5..acf490d341e 100644 --- a/tests/components/bthome/test_config_flow.py +++ b/tests/components/bthome/test_config_flow.py @@ -563,7 +563,16 @@ async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: device = DeviceData() - result = await entry.start_reauth_flow(hass, data={"device": device}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "title_placeholders": {"name": entry.title}, + "unique_id": entry.unique_id, + }, + data=entry.data | {"device": device}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" diff --git a/tests/components/bthome/test_device_trigger.py b/tests/components/bthome/test_device_trigger.py index c4c900ef6e1..459654826f9 100644 --- a/tests/components/bthome/test_device_trigger.py +++ b/tests/components/bthome/test_device_trigger.py @@ -1,19 +1,10 @@ """Test BTHome BLE events.""" -import pytest - from homeassistant.components import automation from homeassistant.components.bluetooth import DOMAIN as BLUETOOTH_DOMAIN from homeassistant.components.bthome.const import CONF_SUBTYPE, DOMAIN from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - CONF_DEVICE_ID, - CONF_DOMAIN, - CONF_PLATFORM, - CONF_TYPE, - STATE_ON, - STATE_UNAVAILABLE, -) +from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_PLATFORM, CONF_TYPE from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -130,117 +121,6 @@ async def test_get_triggers_button( await hass.async_block_till_done() -async def test_get_triggers_multiple_buttons( - hass: HomeAssistant, device_registry: dr.DeviceRegistry -) -> None: - """Test that we get the expected triggers for multiple buttons device.""" - mac = "A4:C1:38:8D:18:B2" - entry = await _async_setup_bthome_device(hass, mac) - events = async_capture_events(hass, "bthome_ble_event") - - # Emit button_1 long press and button_2 press events - # so it creates the device in the registry - inject_bluetooth_service_info_bleak( - hass, - make_bthome_v2_adv(mac, b"\x40\x3a\x04\x3a\x01"), - ) - - # wait for the event - await hass.async_block_till_done() - assert len(events) == 2 - - device = device_registry.async_get_device(identifiers={get_device_id(mac)}) - assert device - expected_trigger1 = { - CONF_PLATFORM: "device", - CONF_DOMAIN: DOMAIN, - CONF_DEVICE_ID: device.id, - CONF_TYPE: "button_1", - CONF_SUBTYPE: "long_press", - "metadata": {}, - } - expected_trigger2 = { - CONF_PLATFORM: "device", - CONF_DOMAIN: DOMAIN, - CONF_DEVICE_ID: device.id, - CONF_TYPE: "button_2", - CONF_SUBTYPE: "press", - "metadata": {}, - } - triggers = await async_get_device_automations( - hass, DeviceAutomationType.TRIGGER, device.id - ) - assert expected_trigger1 in triggers - assert expected_trigger2 in triggers - - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - -@pytest.mark.parametrize( - ("event_class", "event_type", "expected"), - [ - ("button_1", "long_press", STATE_ON), - ("button_2", "press", STATE_ON), - ("button_3", "long_press", STATE_UNAVAILABLE), - ("button", "long_press", STATE_UNAVAILABLE), - ("button_1", "invalid_press", STATE_UNAVAILABLE), - ], -) -async def test_validate_trigger_config( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - event_class: str, - event_type: str, - expected: str, -) -> None: - """Test unsupported trigger does not return a trigger config.""" - mac = "A4:C1:38:8D:18:B2" - entry = await _async_setup_bthome_device(hass, mac) - - # Emit button_1 long press and button_2 press events - # so it creates the device in the registry - inject_bluetooth_service_info_bleak( - hass, - make_bthome_v2_adv(mac, b"\x40\x3a\x04\x3a\x01"), - ) - - # wait for the event - await hass.async_block_till_done() - - device = device_registry.async_get_device(identifiers={get_device_id(mac)}) - - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: [ - { - "trigger": { - CONF_PLATFORM: "device", - CONF_DOMAIN: DOMAIN, - CONF_DEVICE_ID: device.id, - CONF_TYPE: event_class, - CONF_SUBTYPE: event_type, - }, - "action": { - "service": "test.automation", - "data_template": {"some": "test_trigger_button_long_press"}, - }, - }, - ] - }, - ) - await hass.async_block_till_done() - - automations = hass.states.async_entity_ids(automation.DOMAIN) - assert len(automations) == 1 - assert hass.states.get(automations[0]).state == expected - - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - async def test_get_triggers_dimmer( hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: @@ -355,7 +235,7 @@ async def test_if_fires_on_motion_detected( make_bthome_v2_adv(mac, b"\x40\x3a\x03"), ) - # wait for the event + # # wait for the event await hass.async_block_till_done() device = device_registry.async_get_device(identifiers={get_device_id(mac)}) diff --git a/tests/components/buienradar/conftest.py b/tests/components/buienradar/conftest.py index 7872b50d4a9..7c9027c7715 100644 --- a/tests/components/buienradar/conftest.py +++ b/tests/components/buienradar/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for buienradar2.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/buienradar/test_camera.py b/tests/components/buienradar/test_camera.py index f1518a1a0ea..9ef986b094c 100644 --- a/tests/components/buienradar/test_camera.py +++ b/tests/components/buienradar/test_camera.py @@ -8,7 +8,6 @@ from http import HTTPStatus from aiohttp.client_exceptions import ClientResponseError from homeassistant.components.buienradar.const import CONF_DELTA, DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_COUNTRY_CODE, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -32,7 +31,7 @@ def radar_map_url(country_code: str = "NL") -> str: return f"https://api.buienradar.nl/image/1.0/RadarMap{country_code}?w=700&h=700" -async def _setup_config_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _setup_config_entry(hass, entry): entity_registry = er.async_get(hass) entity_registry.async_get_or_create( domain="camera", diff --git a/tests/components/button/test_device_trigger.py b/tests/components/button/test_device_trigger.py index f5ade86e1a0..dee8045a71f 100644 --- a/tests/components/button/test_device_trigger.py +++ b/tests/components/button/test_device_trigger.py @@ -13,7 +13,17 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") async def test_get_triggers( @@ -99,7 +109,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -148,9 +158,9 @@ async def test_if_fires_on_state_change( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "2021-01-01T23:59:59+00:00") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"to - device - {entry.entity_id} - unknown - 2021-01-01T23:59:59+00:00 - None - 0" ) @@ -159,7 +169,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -208,8 +218,8 @@ async def test_if_fires_on_state_change_legacy( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "2021-01-01T23:59:59+00:00") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"to - device - {entry.entity_id} - unknown - 2021-01-01T23:59:59+00:00 - None - 0" ) diff --git a/tests/components/button/test_init.py b/tests/components/button/test_init.py index 7df5308e096..583c625e1b2 100644 --- a/tests/components/button/test_init.py +++ b/tests/components/button/test_init.py @@ -1,11 +1,11 @@ """The tests for the Button component.""" -from collections.abc import Generator from datetime import timedelta from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory import pytest +from typing_extensions import Generator from homeassistant.components.button import ( DOMAIN, diff --git a/tests/components/caldav/test_config_flow.py b/tests/components/caldav/test_config_flow.py index bf22fb0bd9c..7c47ea14607 100644 --- a/tests/components/caldav/test_config_flow.py +++ b/tests/components/caldav/test_config_flow.py @@ -1,11 +1,11 @@ """Test the CalDAV config flow.""" -from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from caldav.lib.error import AuthorizationError, DAVError import pytest import requests +from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.caldav.const import DOMAIN @@ -106,7 +106,13 @@ async def test_reauth_success( config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -141,7 +147,13 @@ async def test_reauth_failure( config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/caldav/test_todo.py b/tests/components/caldav/test_todo.py index 69a49e0fcbe..66f6e975453 100644 --- a/tests/components/caldav/test_todo.py +++ b/tests/components/caldav/test_todo.py @@ -8,17 +8,8 @@ from caldav.lib.error import DAVError, NotFoundError from caldav.objects import Todo import pytest -from homeassistant.components.todo import ( - ATTR_DESCRIPTION, - ATTR_DUE_DATE, - ATTR_DUE_DATETIME, - ATTR_ITEM, - ATTR_RENAME, - ATTR_STATUS, - DOMAIN as TODO_DOMAIN, - TodoServices, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -235,12 +226,12 @@ async def test_supported_components( RESULT_ITEM, ), ( - {ATTR_DUE_DATE: "2023-11-18"}, + {"due_date": "2023-11-18"}, {"status": "NEEDS-ACTION", "summary": "Cheese", "due": date(2023, 11, 18)}, {**RESULT_ITEM, "due": "2023-11-18"}, ), ( - {ATTR_DUE_DATETIME: "2023-11-18T08:30:00-06:00"}, + {"due_datetime": "2023-11-18T08:30:00-06:00"}, { "status": "NEEDS-ACTION", "summary": "Cheese", @@ -249,7 +240,7 @@ async def test_supported_components( {**RESULT_ITEM, "due": "2023-11-18T08:30:00-06:00"}, ), ( - {ATTR_DESCRIPTION: "Make sure to get Swiss"}, + {"description": "Make sure to get Swiss"}, { "status": "NEEDS-ACTION", "summary": "Cheese", @@ -287,9 +278,9 @@ async def test_add_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Cheese", **item_data}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "Cheese", **item_data}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -315,9 +306,9 @@ async def test_add_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV save error"): await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Cheese"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "Cheese"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -326,7 +317,7 @@ async def test_add_item_failure( ("update_data", "expected_ics", "expected_state", "expected_item"), [ ( - {ATTR_RENAME: "Swiss Cheese"}, + {"rename": "Swiss Cheese"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -343,7 +334,7 @@ async def test_add_item_failure( }, ), ( - {ATTR_STATUS: "needs_action"}, + {"status": "needs_action"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -360,7 +351,7 @@ async def test_add_item_failure( }, ), ( - {ATTR_STATUS: "completed"}, + {"status": "completed"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -377,7 +368,7 @@ async def test_add_item_failure( }, ), ( - {ATTR_RENAME: "Swiss Cheese", ATTR_STATUS: "needs_action"}, + {"rename": "Swiss Cheese", "status": "needs_action"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -394,7 +385,7 @@ async def test_add_item_failure( }, ), ( - {ATTR_DUE_DATE: "2023-11-18"}, + {"due_date": "2023-11-18"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20231118", @@ -411,7 +402,7 @@ async def test_add_item_failure( }, ), ( - {ATTR_DUE_DATETIME: "2023-11-18T08:30:00-06:00"}, + {"due_datetime": "2023-11-18T08:30:00-06:00"}, [ "DESCRIPTION:Any kind will do", "DUE;TZID=America/Regina:20231118T083000", @@ -428,7 +419,7 @@ async def test_add_item_failure( }, ), ( - {ATTR_DUE_DATETIME: None}, + {"due_datetime": None}, [ "DESCRIPTION:Any kind will do", "STATUS:NEEDS-ACTION", @@ -443,7 +434,7 @@ async def test_add_item_failure( }, ), ( - {ATTR_DESCRIPTION: "Make sure to get Swiss"}, + {"description": "Make sure to get Swiss"}, [ "DESCRIPTION:Make sure to get Swiss", "DUE;VALUE=DATE:20171126", @@ -460,7 +451,7 @@ async def test_add_item_failure( }, ), ( - {ATTR_DESCRIPTION: None}, + {"description": None}, ["DUE;VALUE=DATE:20171126", "STATUS:NEEDS-ACTION", "SUMMARY:Cheese"], "1", { @@ -510,12 +501,12 @@ async def test_update_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, + "update_item", { - ATTR_ITEM: "Cheese", + "item": "Cheese", **update_data, }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -529,9 +520,9 @@ async def test_update_item( result = await hass.services.async_call( TODO_DOMAIN, - TodoServices.GET_ITEMS, + "get_items", {}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, return_response=True, ) @@ -557,12 +548,12 @@ async def test_update_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV save error"): await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, + "update_item", { - ATTR_ITEM: "Cheese", - ATTR_STATUS: "completed", + "item": "Cheese", + "status": "completed", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -591,12 +582,12 @@ async def test_update_item_lookup_failure( with pytest.raises(HomeAssistantError, match=match): await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, + "update_item", { - ATTR_ITEM: "Cheese", - ATTR_STATUS: "completed", + "item": "Cheese", + "status": "completed", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -644,9 +635,9 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: uids_to_delete}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "remove_item", + {"item": uids_to_delete}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -677,9 +668,9 @@ async def test_remove_item_lookup_failure( with pytest.raises(HomeAssistantError, match=match): await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: "Cheese"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "remove_item", + {"item": "Cheese"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -706,9 +697,9 @@ async def test_remove_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV delete error"): await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: "Cheese"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "remove_item", + {"item": "Cheese"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -734,9 +725,9 @@ async def test_remove_item_not_found( with pytest.raises(HomeAssistantError, match="Could not find"): await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: "Cheese"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "remove_item", + {"item": "Cheese"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -788,12 +779,12 @@ async def test_subscribe( ] await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, + "update_item", { - ATTR_ITEM: "Cheese", - ATTR_RENAME: "Milk", + "item": "Cheese", + "rename": "Milk", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/calendar/conftest.py b/tests/components/calendar/conftest.py index 3e18f595764..83ecaca97d3 100644 --- a/tests/components/calendar/conftest.py +++ b/tests/components/calendar/conftest.py @@ -1,12 +1,12 @@ """Test fixtures for calendar sensor platforms.""" -from collections.abc import Generator import datetime import secrets from typing import Any from unittest.mock import AsyncMock import pytest +from typing_extensions import Generator from homeassistant.components.calendar import DOMAIN, CalendarEntity, CalendarEvent from homeassistant.config_entries import ConfigEntry, ConfigFlow diff --git a/tests/components/calendar/snapshots/test_init.ambr b/tests/components/calendar/snapshots/test_init.ambr index 1b2bb9f0196..fe23c5dbac9 100644 --- a/tests/components/calendar/snapshots/test_init.ambr +++ b/tests/components/calendar/snapshots/test_init.ambr @@ -7,6 +7,12 @@ }), }) # --- +# name: test_list_events_service_duration[frozen_time-calendar.calendar_1-00:15:00-list_events] + dict({ + 'events': list([ + ]), + }) +# --- # name: test_list_events_service_duration[frozen_time-calendar.calendar_1-01:00:00-get_events] dict({ 'calendar.calendar_1': dict({ @@ -22,6 +28,19 @@ }), }) # --- +# name: test_list_events_service_duration[frozen_time-calendar.calendar_1-01:00:00-list_events] + dict({ + 'events': list([ + dict({ + 'description': 'Future Description', + 'end': '2023-10-19T09:20:05-06:00', + 'location': 'Future Location', + 'start': '2023-10-19T08:20:05-06:00', + 'summary': 'Future Event', + }), + ]), + }) +# --- # name: test_list_events_service_duration[frozen_time-calendar.calendar_2-00:15:00-get_events] dict({ 'calendar.calendar_2': dict({ @@ -35,3 +54,14 @@ }), }) # --- +# name: test_list_events_service_duration[frozen_time-calendar.calendar_2-00:15:00-list_events] + dict({ + 'events': list([ + dict({ + 'end': '2023-10-19T08:20:05-06:00', + 'start': '2023-10-19T07:20:05-06:00', + 'summary': 'Current Event', + }), + ]), + }) +# --- diff --git a/tests/components/calendar/test_init.py b/tests/components/calendar/test_init.py index 4ad5e11b8e4..116ca70f15e 100644 --- a/tests/components/calendar/test_init.py +++ b/tests/components/calendar/test_init.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Generator from datetime import timedelta from http import HTTPStatus from typing import Any @@ -10,6 +9,7 @@ from typing import Any from freezegun import freeze_time import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator import voluptuous as vol from homeassistant.components.calendar import DOMAIN, SERVICE_GET_EVENTS @@ -23,7 +23,7 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator @pytest.fixture(name="frozen_time") -def mock_frozen_time() -> str | None: +def mock_frozen_time() -> None: """Fixture to set a frozen time used in tests. This is needed so that it can run before other fixtures. @@ -32,7 +32,7 @@ def mock_frozen_time() -> str | None: @pytest.fixture(autouse=True) -def mock_set_frozen_time(frozen_time: str | None) -> Generator[None]: +def mock_set_frozen_time(frozen_time: Any) -> Generator[None]: """Fixture to freeze time that also can work for other fixtures.""" if not frozen_time: yield @@ -44,9 +44,9 @@ def mock_set_frozen_time(frozen_time: str | None) -> Generator[None]: @pytest.fixture(name="setup_platform", autouse=True) async def mock_setup_platform( hass: HomeAssistant, - set_time_zone: None, - frozen_time: str | None, - mock_setup_integration: None, + set_time_zone: Any, + frozen_time: Any, + mock_setup_integration: Any, config_entry: MockConfigEntry, ) -> None: """Fixture to setup platforms used in the test and fixtures are set up in the right order.""" diff --git a/tests/components/calendar/test_recorder.py b/tests/components/calendar/test_recorder.py index c7511b8b2b0..aeddebc226c 100644 --- a/tests/components/calendar/test_recorder.py +++ b/tests/components/calendar/test_recorder.py @@ -1,6 +1,7 @@ """The tests for calendar recorder.""" from datetime import timedelta +from typing import Any import pytest @@ -18,7 +19,7 @@ from tests.components.recorder.common import async_wait_recording_done async def mock_setup_dependencies( recorder_mock: Recorder, hass: HomeAssistant, - set_time_zone: None, + set_time_zone: Any, mock_setup_integration: None, config_entry: MockConfigEntry, ) -> None: diff --git a/tests/components/calendar/test_trigger.py b/tests/components/calendar/test_trigger.py index dfe4622e82e..3b415d46e63 100644 --- a/tests/components/calendar/test_trigger.py +++ b/tests/components/calendar/test_trigger.py @@ -9,7 +9,7 @@ forward exercising the triggers. from __future__ import annotations -from collections.abc import AsyncIterator, Callable, Generator +from collections.abc import AsyncIterator, Callable from contextlib import asynccontextmanager import datetime import logging @@ -19,6 +19,7 @@ import zoneinfo from freezegun.api import FrozenDateTimeFactory import pytest +from typing_extensions import Generator from homeassistant.components import automation, calendar from homeassistant.components.calendar.trigger import EVENT_END, EVENT_START @@ -84,7 +85,9 @@ class FakeSchedule: @pytest.fixture -def fake_schedule(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> FakeSchedule: +def fake_schedule( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> Generator[FakeSchedule]: """Fixture that tests can use to make fake events.""" # Setup start time for all tests @@ -102,7 +105,7 @@ def mock_test_entity(test_entities: list[MockCalendarEntity]) -> MockCalendarEnt @pytest.fixture(name="setup_platform", autouse=True) async def mock_setup_platform( hass: HomeAssistant, - mock_setup_integration: None, + mock_setup_integration: Any, config_entry: MockConfigEntry, ) -> None: """Fixture to setup platforms used in the test.""" diff --git a/tests/components/cambridge_audio/__init__.py b/tests/components/cambridge_audio/__init__.py deleted file mode 100644 index f6b5f48d39d..00000000000 --- a/tests/components/cambridge_audio/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the Cambridge Audio integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/cambridge_audio/conftest.py b/tests/components/cambridge_audio/conftest.py deleted file mode 100644 index 33a9ded70e3..00000000000 --- a/tests/components/cambridge_audio/conftest.py +++ /dev/null @@ -1,85 +0,0 @@ -"""Cambridge Audio tests configuration.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, Mock, patch - -from aiostreammagic.models import ( - AudioOutput, - Display, - Info, - NowPlaying, - PlayState, - PresetList, - Source, - State, - Update, -) -import pytest - -from homeassistant.components.cambridge_audio.const import DOMAIN -from homeassistant.const import CONF_HOST - -from tests.common import MockConfigEntry, load_fixture, load_json_array_fixture - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.cambridge_audio.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_stream_magic_client() -> Generator[AsyncMock]: - """Mock an Cambridge Audio client.""" - with ( - patch( - "homeassistant.components.cambridge_audio.StreamMagicClient", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.cambridge_audio.config_flow.StreamMagicClient", - new=mock_client, - ), - ): - client = mock_client.return_value - client.host = "192.168.20.218" - client.info = Info.from_json(load_fixture("get_info.json", DOMAIN)) - client.sources = [ - Source.from_dict(x) - for x in load_json_array_fixture("get_sources.json", DOMAIN) - ] - client.state = State.from_json(load_fixture("get_state.json", DOMAIN)) - client.play_state = PlayState.from_json( - load_fixture("get_play_state.json", DOMAIN) - ) - client.now_playing = NowPlaying.from_json( - load_fixture("get_now_playing.json", DOMAIN) - ) - client.display = Display.from_json(load_fixture("get_display.json", DOMAIN)) - client.update = Update.from_json(load_fixture("get_update.json", DOMAIN)) - client.preset_list = PresetList.from_json( - load_fixture("get_presets_list.json", DOMAIN) - ) - client.audio_output = AudioOutput.from_json( - load_fixture("get_audio_output.json", DOMAIN) - ) - client.is_connected = Mock(return_value=True) - client.position_last_updated = client.play_state.position - client.unregister_state_update_callbacks.return_value = True - - yield client - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="Cambridge Audio CXNv2", - data={CONF_HOST: "192.168.20.218"}, - unique_id="0020c2d8", - ) diff --git a/tests/components/cambridge_audio/const.py b/tests/components/cambridge_audio/const.py deleted file mode 100644 index 36057c79bb3..00000000000 --- a/tests/components/cambridge_audio/const.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Constants for Cambridge Audio integration tests.""" - -from homeassistant.components.media_player import DOMAIN as MP_DOMAIN - -DEVICE_NAME = "cambridge_audio_cxnv2" -ENTITY_ID = f"{MP_DOMAIN}.{DEVICE_NAME}" diff --git a/tests/components/cambridge_audio/fixtures/get_audio_output.json b/tests/components/cambridge_audio/fixtures/get_audio_output.json deleted file mode 100644 index e38ae037307..00000000000 --- a/tests/components/cambridge_audio/fixtures/get_audio_output.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "outputs": [ - { - "id": "speaker_a", - "name": "Speaker A" - }, - { - "id": "speaker_b", - "name": "Speaker B" - }, - { - "id": "headphones", - "name": "Headphones" - } - ] -} diff --git a/tests/components/cambridge_audio/fixtures/get_display.json b/tests/components/cambridge_audio/fixtures/get_display.json deleted file mode 100644 index 73cbf5a60b3..00000000000 --- a/tests/components/cambridge_audio/fixtures/get_display.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "brightness": "bright" -} diff --git a/tests/components/cambridge_audio/fixtures/get_info.json b/tests/components/cambridge_audio/fixtures/get_info.json deleted file mode 100644 index ee88995412e..00000000000 --- a/tests/components/cambridge_audio/fixtures/get_info.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "Cambridge Audio CXNv2", - "timezone": "America/Chicago", - "locale": "en_GB", - "usage_reports": true, - "setup": true, - "sources_setup": true, - "versions": [ - { - "component": "cast", - "version": "1.52.272222" - }, - { - "component": "MCU", - "version": "3.1+0.5+36" - }, - { - "component": "service-pack", - "version": "v022-a-151+a" - }, - { - "component": "application", - "version": "1.0+gitAUTOINC+a94a3e2ad8" - } - ], - "udn": "02680b5c-1320-4d54-9f7c-3cfe915ad4c3", - "hcv": 3764, - "model": "CXNv2", - "unit_id": "0020c2d8", - "max_http_body_size": 65536, - "api": "1.8" -} diff --git a/tests/components/cambridge_audio/fixtures/get_now_playing.json b/tests/components/cambridge_audio/fixtures/get_now_playing.json deleted file mode 100644 index 8dcc781be9b..00000000000 --- a/tests/components/cambridge_audio/fixtures/get_now_playing.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "state": "PLAYING", - "source": { - "id": "AIRPLAY", - "name": "AirPlay" - }, - "allow_apd": false, - "listening_on": "Listening on Cambridge Audio CXNv2 - AirPlay", - "display": { - "line1": "Holiday", - "line2": "Green Day", - "line3": "Greatest Hits: God's Favorite Band", - "format": "44.1kHz/16bit ALAC", - "mqa": "none", - "playback_source": "iPhone", - "class": "stream.service.airplay", - "art_file": "/tmp/current/AlbumArtFile-811-363", - "art_url": "http://192.168.20.218:80/album-art-2d89?id=1:246", - "progress": { - "position": 216, - "duration": 232 - } - }, - "controls": ["play_pause", "track_next", "track_previous"] -} diff --git a/tests/components/cambridge_audio/fixtures/get_play_state.json b/tests/components/cambridge_audio/fixtures/get_play_state.json deleted file mode 100644 index cd727ee58a7..00000000000 --- a/tests/components/cambridge_audio/fixtures/get_play_state.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "state": "play", - "position": 179, - "presettable": false, - "mode_repeat": "off", - "mode_shuffle": "off", - "metadata": { - "class": "md.track", - "source": "AIRPLAY", - "name": "AirPlay", - "duration": 232, - "album": "Greatest Hits: God's Favorite Band", - "artist": "Green Day", - "title": "Holiday", - "art_url": "http://192.168.20.218:80/album-art-2d89?id=1:246", - "mqa": "none", - "codec": "ALAC", - "lossless": true, - "sample_rate": 44100, - "bit_depth": 16 - } -} diff --git a/tests/components/cambridge_audio/fixtures/get_presets_list.json b/tests/components/cambridge_audio/fixtures/get_presets_list.json deleted file mode 100644 index 87d49e9fd30..00000000000 --- a/tests/components/cambridge_audio/fixtures/get_presets_list.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "start": 1, - "end": 99, - "max_presets": 99, - "presettable": true, - "presets": [ - { - "id": 1, - "name": "Chicago House Radio", - "type": "Radio", - "class": "stream.radio", - "state": "OK", - "is_playing": false, - "art_url": "https://static.airable.io/43/68/432868.png", - "airable_radio_id": 5317566146608442 - }, - { - "id": 2, - "name": "Spotify: Good & Evil", - "type": "Spotify", - "class": "stream.service.spotify", - "state": "OK", - "is_playing": true, - "art_url": "https://i.scdn.co/image/ab67616d0000b27325a5a1ed28871e8e53e62d59" - }, - { - "id": 3, - "name": "Unknown Preset Type", - "type": "Unknown", - "class": "stream.unknown", - "state": "OK" - } - ] -} diff --git a/tests/components/cambridge_audio/fixtures/get_sources.json b/tests/components/cambridge_audio/fixtures/get_sources.json deleted file mode 100644 index 185f65e5ff6..00000000000 --- a/tests/components/cambridge_audio/fixtures/get_sources.json +++ /dev/null @@ -1,113 +0,0 @@ -[ - { - "id": "IR", - "name": "Internet Radio", - "default_name": "Internet Radio", - "class": "stream.radio", - "nameable": false, - "ui_selectable": false, - "description": "Internet Radio", - "description_locale": "Internet Radio", - "preferred_order": 9 - }, - { - "id": "USB_AUDIO", - "name": "USB Audio", - "default_name": "USB Audio", - "class": "digital.usb", - "nameable": true, - "ui_selectable": true, - "description": "USB Audio", - "description_locale": "USB Audio", - "preferred_order": 1 - }, - { - "id": "SPDIF_COAX", - "name": "D2", - "default_name": "D2", - "class": "digital.coax", - "nameable": true, - "ui_selectable": false, - "description": "Digital Co-axial", - "description_locale": "Digital Co-axial", - "preferred_order": 3 - }, - { - "id": "SPDIF_TOSLINK", - "name": "D1", - "default_name": "D1", - "class": "digital.toslink", - "nameable": true, - "ui_selectable": false, - "description": "Digital Optical", - "description_locale": "Digital Optical", - "preferred_order": 2 - }, - { - "id": "MEDIA_PLAYER", - "name": "Media Library", - "default_name": "Media Library", - "class": "stream.media", - "nameable": false, - "ui_selectable": true, - "description": "Media Player", - "description_locale": "Media Player", - "preferred_order": 10 - }, - { - "id": "AIRPLAY", - "name": "AirPlay", - "default_name": "AirPlay", - "class": "stream.service.airplay", - "nameable": false, - "ui_selectable": true, - "description": "AirPlay", - "description_locale": "AirPlay", - "preferred_order": 11 - }, - { - "id": "SPOTIFY", - "name": "Spotify", - "default_name": "Spotify", - "class": "stream.service.spotify", - "nameable": false, - "ui_selectable": true, - "description": "Spotify", - "description_locale": "Spotify", - "preferred_order": 6, - "normalisation": "off" - }, - { - "id": "CAST", - "name": "Chromecast built-in", - "default_name": "Chromecast built-in", - "class": "stream.service.cast", - "nameable": false, - "ui_selectable": true, - "description": "Chromecast built-in", - "description_locale": "Chromecast built-in", - "preferred_order": 8 - }, - { - "id": "ROON", - "name": "Roon Ready", - "default_name": "Roon Ready", - "class": "stream.service.roon", - "nameable": false, - "ui_selectable": false, - "description": "Roon Ready", - "description_locale": "Roon Ready", - "preferred_order": 5 - }, - { - "id": "TIDAL", - "name": "TIDAL Connect", - "default_name": "TIDAL Connect", - "class": "stream.service.tidal", - "nameable": false, - "ui_selectable": false, - "description": "TIDAL", - "description_locale": "TIDAL", - "preferred_order": 7 - } -] diff --git a/tests/components/cambridge_audio/fixtures/get_state.json b/tests/components/cambridge_audio/fixtures/get_state.json deleted file mode 100644 index 1acf0df4f6a..00000000000 --- a/tests/components/cambridge_audio/fixtures/get_state.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "source": "AIRPLAY", - "power": true, - "pre_amp_mode": false, - "pre_amp_state": "disabled_user", - "cbus": "off" -} diff --git a/tests/components/cambridge_audio/fixtures/get_update.json b/tests/components/cambridge_audio/fixtures/get_update.json deleted file mode 100644 index a6fec6265c0..00000000000 --- a/tests/components/cambridge_audio/fixtures/get_update.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "early_update": false, - "update_available": false, - "updating": false -} diff --git a/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr b/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr deleted file mode 100644 index 1ba9c4093f6..00000000000 --- a/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,196 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'display': dict({ - 'brightness': 'bright', - }), - 'info': dict({ - 'api_version': '1.8', - 'locale': 'en_GB', - 'model': 'CXNv2', - 'name': 'Cambridge Audio CXNv2', - 'timezone': 'America/Chicago', - 'udn': '02680b5c-1320-4d54-9f7c-3cfe915ad4c3', - 'unit_id': '0020c2d8', - }), - 'now_playing': dict({ - 'controls': list([ - 'play_pause', - 'track_next', - 'track_previous', - ]), - }), - 'play_state': dict({ - 'metadata': dict({ - 'album': "Greatest Hits: God's Favorite Band", - 'art_url': 'http://192.168.20.218:80/album-art-2d89?id=1:246', - 'artist': 'Green Day', - 'bitrate': None, - 'class_name': 'md.track', - 'codec': 'ALAC', - 'duration': 232, - 'encoding': None, - 'lossless': True, - 'mqa': 'none', - 'name': 'AirPlay', - 'radio_id': None, - 'sample_format': None, - 'sample_rate': 44100, - 'signal': None, - 'source': 'AIRPLAY', - 'station': None, - 'title': 'Holiday', - }), - 'mode_repeat': 'off', - 'mode_shuffle': 'off', - 'position': 179, - 'presettable': False, - 'state': 'play', - }), - 'presets_list': dict({ - 'end': 99, - 'max_presets': 99, - 'presets': list([ - dict({ - 'airable_radio_id': 5317566146608442, - 'art_url': 'https://static.airable.io/43/68/432868.png', - 'is_playing': False, - 'name': 'Chicago House Radio', - 'preset_class': 'stream.radio', - 'preset_id': 1, - 'state': 'OK', - 'type': 'Radio', - }), - dict({ - 'airable_radio_id': None, - 'art_url': 'https://i.scdn.co/image/ab67616d0000b27325a5a1ed28871e8e53e62d59', - 'is_playing': True, - 'name': 'Spotify: Good & Evil', - 'preset_class': 'stream.service.spotify', - 'preset_id': 2, - 'state': 'OK', - 'type': 'Spotify', - }), - dict({ - 'airable_radio_id': None, - 'art_url': None, - 'is_playing': False, - 'name': 'Unknown Preset Type', - 'preset_class': 'stream.unknown', - 'preset_id': 3, - 'state': 'OK', - 'type': 'Unknown', - }), - ]), - 'presettable': True, - 'start': 1, - }), - 'sources': list([ - dict({ - 'default_name': 'Internet Radio', - 'description': 'Internet Radio', - 'description_locale': 'Internet Radio', - 'id': 'IR', - 'name': 'Internet Radio', - 'nameable': False, - 'preferred_order': 9, - 'ui_selectable': False, - }), - dict({ - 'default_name': 'USB Audio', - 'description': 'USB Audio', - 'description_locale': 'USB Audio', - 'id': 'USB_AUDIO', - 'name': 'USB Audio', - 'nameable': True, - 'preferred_order': 1, - 'ui_selectable': True, - }), - dict({ - 'default_name': 'D2', - 'description': 'Digital Co-axial', - 'description_locale': 'Digital Co-axial', - 'id': 'SPDIF_COAX', - 'name': 'D2', - 'nameable': True, - 'preferred_order': 3, - 'ui_selectable': False, - }), - dict({ - 'default_name': 'D1', - 'description': 'Digital Optical', - 'description_locale': 'Digital Optical', - 'id': 'SPDIF_TOSLINK', - 'name': 'D1', - 'nameable': True, - 'preferred_order': 2, - 'ui_selectable': False, - }), - dict({ - 'default_name': 'Media Library', - 'description': 'Media Player', - 'description_locale': 'Media Player', - 'id': 'MEDIA_PLAYER', - 'name': 'Media Library', - 'nameable': False, - 'preferred_order': 10, - 'ui_selectable': True, - }), - dict({ - 'default_name': 'AirPlay', - 'description': 'AirPlay', - 'description_locale': 'AirPlay', - 'id': 'AIRPLAY', - 'name': 'AirPlay', - 'nameable': False, - 'preferred_order': 11, - 'ui_selectable': True, - }), - dict({ - 'default_name': 'Spotify', - 'description': 'Spotify', - 'description_locale': 'Spotify', - 'id': 'SPOTIFY', - 'name': 'Spotify', - 'nameable': False, - 'preferred_order': 6, - 'ui_selectable': True, - }), - dict({ - 'default_name': 'Chromecast built-in', - 'description': 'Chromecast built-in', - 'description_locale': 'Chromecast built-in', - 'id': 'CAST', - 'name': 'Chromecast built-in', - 'nameable': False, - 'preferred_order': 8, - 'ui_selectable': True, - }), - dict({ - 'default_name': 'Roon Ready', - 'description': 'Roon Ready', - 'description_locale': 'Roon Ready', - 'id': 'ROON', - 'name': 'Roon Ready', - 'nameable': False, - 'preferred_order': 5, - 'ui_selectable': False, - }), - dict({ - 'default_name': 'TIDAL Connect', - 'description': 'TIDAL', - 'description_locale': 'TIDAL', - 'id': 'TIDAL', - 'name': 'TIDAL Connect', - 'nameable': False, - 'preferred_order': 7, - 'ui_selectable': False, - }), - ]), - 'update': dict({ - 'early_update': False, - 'update_available': False, - 'updating': False, - }), - }) -# --- diff --git a/tests/components/cambridge_audio/snapshots/test_init.ambr b/tests/components/cambridge_audio/snapshots/test_init.ambr deleted file mode 100644 index 64182ee2188..00000000000 --- a/tests/components/cambridge_audio/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_device_info - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://192.168.20.218', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'cambridge_audio', - '0020c2d8', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Cambridge Audio', - 'model': 'CXNv2', - 'model_id': None, - 'name': 'Cambridge Audio CXNv2', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '0020c2d8', - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- \ No newline at end of file diff --git a/tests/components/cambridge_audio/snapshots/test_select.ambr b/tests/components/cambridge_audio/snapshots/test_select.ambr deleted file mode 100644 index b40c8a8d5c4..00000000000 --- a/tests/components/cambridge_audio/snapshots/test_select.ambr +++ /dev/null @@ -1,115 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[select.cambridge_audio_cxnv2_audio_output-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Speaker A', - 'Speaker B', - 'Headphones', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.cambridge_audio_cxnv2_audio_output', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Audio output', - 'platform': 'cambridge_audio', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'audio_output', - 'unique_id': '0020c2d8-audio_output', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[select.cambridge_audio_cxnv2_audio_output-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Cambridge Audio CXNv2 Audio output', - 'options': list([ - 'Speaker A', - 'Speaker B', - 'Headphones', - ]), - }), - 'context': , - 'entity_id': 'select.cambridge_audio_cxnv2_audio_output', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_entities[select.cambridge_audio_cxnv2_display_brightness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'bright', - 'dim', - 'off', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.cambridge_audio_cxnv2_display_brightness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Display brightness', - 'platform': 'cambridge_audio', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'display_brightness', - 'unique_id': '0020c2d8-display_brightness', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[select.cambridge_audio_cxnv2_display_brightness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Cambridge Audio CXNv2 Display brightness', - 'options': list([ - 'bright', - 'dim', - 'off', - ]), - }), - 'context': , - 'entity_id': 'select.cambridge_audio_cxnv2_display_brightness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'bright', - }) -# --- diff --git a/tests/components/cambridge_audio/snapshots/test_switch.ambr b/tests/components/cambridge_audio/snapshots/test_switch.ambr deleted file mode 100644 index 9bfcd7c6da7..00000000000 --- a/tests/components/cambridge_audio/snapshots/test_switch.ambr +++ /dev/null @@ -1,93 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[switch.cambridge_audio_cxnv2_early_update-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.cambridge_audio_cxnv2_early_update', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Early update', - 'platform': 'cambridge_audio', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'early_update', - 'unique_id': '0020c2d8-early_update', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[switch.cambridge_audio_cxnv2_early_update-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Cambridge Audio CXNv2 Early update', - }), - 'context': , - 'entity_id': 'switch.cambridge_audio_cxnv2_early_update', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[switch.cambridge_audio_cxnv2_pre_amp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.cambridge_audio_cxnv2_pre_amp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Pre-Amp', - 'platform': 'cambridge_audio', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'pre_amp', - 'unique_id': '0020c2d8-pre_amp', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[switch.cambridge_audio_cxnv2_pre_amp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Cambridge Audio CXNv2 Pre-Amp', - }), - 'context': , - 'entity_id': 'switch.cambridge_audio_cxnv2_pre_amp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/cambridge_audio/test_config_flow.py b/tests/components/cambridge_audio/test_config_flow.py deleted file mode 100644 index 9a2d077b8f8..00000000000 --- a/tests/components/cambridge_audio/test_config_flow.py +++ /dev/null @@ -1,194 +0,0 @@ -"""Tests for the Cambridge Audio config flow.""" - -from ipaddress import ip_address -from unittest.mock import AsyncMock - -from aiostreammagic import StreamMagicError - -from homeassistant.components.cambridge_audio.const import DOMAIN -from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - -ZEROCONF_DISCOVERY = ZeroconfServiceInfo( - ip_address=ip_address("192.168.20.218"), - ip_addresses=[ip_address("192.168.20.218")], - hostname="cambridge_CXNv2.local.", - name="cambridge_CXNv2._stream-magic._tcp.local.", - port=80, - type="_stream-magic._tcp.local.", - properties={ - "serial": "0020c2d8", - "hcv": "3764", - "software": "v022-a-151+a", - "model": "CXNv2", - "udn": "02680b5c-1320-4d54-9f7c-3cfe915ad4c3", - }, -) - - -async def test_full_flow( - hass: HomeAssistant, - mock_stream_magic_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.20.218"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Cambridge Audio CXNv2" - assert result["data"] == { - CONF_HOST: "192.168.20.218", - } - assert result["result"].unique_id == "0020c2d8" - - -async def test_flow_errors( - hass: HomeAssistant, - mock_stream_magic_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test flow errors.""" - mock_stream_magic_client.connect.side_effect = StreamMagicError - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.20.218"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - mock_stream_magic_client.connect.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.20.218"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_duplicate( - hass: HomeAssistant, - mock_stream_magic_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test duplicate flow.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.20.218"}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_zeroconf_flow( - hass: HomeAssistant, - mock_stream_magic_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test zeroconf flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DISCOVERY, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "discovery_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Cambridge Audio CXNv2" - assert result["data"] == { - CONF_HOST: "192.168.20.218", - } - assert result["result"].unique_id == "0020c2d8" - - -async def test_zeroconf_flow_errors( - hass: HomeAssistant, - mock_stream_magic_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test zeroconf flow.""" - mock_stream_magic_client.connect.side_effect = StreamMagicError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DISCOVERY, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" - - mock_stream_magic_client.connect.side_effect = None - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DISCOVERY, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "discovery_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Cambridge Audio CXNv2" - assert result["data"] == { - CONF_HOST: "192.168.20.218", - } - assert result["result"].unique_id == "0020c2d8" - - -async def test_zeroconf_duplicate( - hass: HomeAssistant, - mock_stream_magic_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test duplicate flow.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DISCOVERY, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/cambridge_audio/test_diagnostics.py b/tests/components/cambridge_audio/test_diagnostics.py deleted file mode 100644 index 9c1a09c6318..00000000000 --- a/tests/components/cambridge_audio/test_diagnostics.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Tests for the diagnostics data provided by the Cambridge Audio integration.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_entry_diagnostics( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test config entry diagnostics.""" - await setup_integration(hass, mock_config_entry) - - result = await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) - assert result == snapshot diff --git a/tests/components/cambridge_audio/test_init.py b/tests/components/cambridge_audio/test_init.py deleted file mode 100644 index 4a8c1b668e2..00000000000 --- a/tests/components/cambridge_audio/test_init.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Tests for the Cambridge Audio integration.""" - -from unittest.mock import AsyncMock - -from aiostreammagic import StreamMagicError -from syrupy import SnapshotAssertion - -from homeassistant.components.cambridge_audio.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_config_entry_not_ready( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test the Cambridge Audio configuration entry not ready.""" - mock_stream_magic_client.connect = AsyncMock(side_effect=StreamMagicError()) - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - mock_stream_magic_client.connect = AsyncMock(return_value=True) - - -async def test_device_info( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_stream_magic_client: AsyncMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test device registry integration.""" - await setup_integration(hass, mock_config_entry) - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - assert device_entry is not None - assert device_entry == snapshot diff --git a/tests/components/cambridge_audio/test_media_player.py b/tests/components/cambridge_audio/test_media_player.py deleted file mode 100644 index b857e61c235..00000000000 --- a/tests/components/cambridge_audio/test_media_player.py +++ /dev/null @@ -1,498 +0,0 @@ -"""Tests for the Cambridge Audio integration.""" - -from unittest.mock import AsyncMock - -from aiostreammagic import ( - RepeatMode as CambridgeRepeatMode, - ShuffleMode, - TransportControl, -) -from aiostreammagic.models import CallbackType -import pytest - -from homeassistant.components.media_player import ( - ATTR_MEDIA_CONTENT_ID, - ATTR_MEDIA_CONTENT_TYPE, - ATTR_MEDIA_REPEAT, - ATTR_MEDIA_SEEK_POSITION, - ATTR_MEDIA_SHUFFLE, - ATTR_MEDIA_VOLUME_LEVEL, - DOMAIN as MP_DOMAIN, - SERVICE_PLAY_MEDIA, - MediaPlayerEntityFeature, - RepeatMode, -) -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_SUPPORTED_FEATURES, - SERVICE_MEDIA_NEXT_TRACK, - SERVICE_MEDIA_PAUSE, - SERVICE_MEDIA_PLAY, - SERVICE_MEDIA_PREVIOUS_TRACK, - SERVICE_MEDIA_SEEK, - SERVICE_MEDIA_STOP, - SERVICE_REPEAT_SET, - SERVICE_SHUFFLE_SET, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - SERVICE_VOLUME_DOWN, - SERVICE_VOLUME_SET, - SERVICE_VOLUME_UP, - STATE_BUFFERING, - STATE_IDLE, - STATE_OFF, - STATE_ON, - STATE_PAUSED, - STATE_PLAYING, - STATE_STANDBY, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError - -from . import setup_integration -from .const import ENTITY_ID - -from tests.common import MockConfigEntry - - -async def mock_state_update(client: AsyncMock) -> None: - """Trigger a callback in the media player.""" - for callback in client.register_state_update_callbacks.call_args_list: - await callback[0][0](client, CallbackType.STATE) - - -async def test_entity_supported_features( - hass: HomeAssistant, - mock_stream_magic_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test entity attributes.""" - await setup_integration(hass, mock_config_entry) - await mock_state_update(mock_stream_magic_client) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - attrs = state.attributes - - # Ensure volume isn't available when pre-amp is disabled - assert not mock_stream_magic_client.state.pre_amp_mode - assert ( - MediaPlayerEntityFeature.VOLUME_SET - | MediaPlayerEntityFeature.VOLUME_STEP - | MediaPlayerEntityFeature.VOLUME_MUTE - not in attrs[ATTR_SUPPORTED_FEATURES] - ) - - # Check for basic media controls - assert { - TransportControl.PLAY_PAUSE, - TransportControl.TRACK_NEXT, - TransportControl.TRACK_PREVIOUS, - }.issubset(mock_stream_magic_client.now_playing.controls) - assert ( - MediaPlayerEntityFeature.PLAY - | MediaPlayerEntityFeature.PAUSE - | MediaPlayerEntityFeature.NEXT_TRACK - | MediaPlayerEntityFeature.PREVIOUS_TRACK - in attrs[ATTR_SUPPORTED_FEATURES] - ) - assert ( - MediaPlayerEntityFeature.SHUFFLE_SET - | MediaPlayerEntityFeature.REPEAT_SET - | MediaPlayerEntityFeature.SEEK - not in attrs[ATTR_SUPPORTED_FEATURES] - ) - - mock_stream_magic_client.now_playing.controls = [ - TransportControl.TOGGLE_REPEAT, - TransportControl.TOGGLE_SHUFFLE, - TransportControl.SEEK, - ] - await mock_state_update(mock_stream_magic_client) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - attrs = state.attributes - - assert ( - MediaPlayerEntityFeature.SHUFFLE_SET - | MediaPlayerEntityFeature.REPEAT_SET - | MediaPlayerEntityFeature.SEEK - in attrs[ATTR_SUPPORTED_FEATURES] - ) - - mock_stream_magic_client.state.pre_amp_mode = True - await mock_state_update(mock_stream_magic_client) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - attrs = state.attributes - assert ( - MediaPlayerEntityFeature.VOLUME_SET - | MediaPlayerEntityFeature.VOLUME_STEP - | MediaPlayerEntityFeature.VOLUME_MUTE - in attrs[ATTR_SUPPORTED_FEATURES] - ) - - -@pytest.mark.parametrize( - ("power_state", "play_state", "media_player_state"), - [ - (True, "NETWORK", STATE_STANDBY), - (False, "NETWORK", STATE_STANDBY), - (False, "play", STATE_OFF), - (True, "play", STATE_PLAYING), - (True, "pause", STATE_PAUSED), - (True, "connecting", STATE_BUFFERING), - (True, "stop", STATE_IDLE), - (True, "ready", STATE_IDLE), - (True, "other", STATE_ON), - ], -) -async def test_entity_state( - hass: HomeAssistant, - mock_stream_magic_client: AsyncMock, - mock_config_entry: MockConfigEntry, - power_state: bool, - play_state: str, - media_player_state: str, -) -> None: - """Test media player state.""" - await setup_integration(hass, mock_config_entry) - mock_stream_magic_client.state.power = power_state - mock_stream_magic_client.play_state.state = play_state - await mock_state_update(mock_stream_magic_client) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - assert state.state == media_player_state - - -async def test_media_play_pause_stop( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test media next previous track service.""" - await setup_integration(hass, mock_config_entry) - - data = {ATTR_ENTITY_ID: ENTITY_ID} - - # Test for play/pause command when separate play and pause controls are unavailable - await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PAUSE, data, True) - mock_stream_magic_client.play_pause.assert_called_once() - - await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PLAY, data, True) - assert mock_stream_magic_client.play_pause.call_count == 2 - - # Test for separate play and pause controls - mock_stream_magic_client.now_playing.controls = [ - TransportControl.PLAY, - TransportControl.PAUSE, - TransportControl.STOP, - ] - await mock_state_update(mock_stream_magic_client) - await hass.async_block_till_done() - - await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PAUSE, data, True) - mock_stream_magic_client.pause.assert_called_once() - - await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PLAY, data, True) - mock_stream_magic_client.play.assert_called_once() - - await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_STOP, data, True) - mock_stream_magic_client.stop.assert_called_once() - - -async def test_media_next_previous_track( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test media next previous track service.""" - await setup_integration(hass, mock_config_entry) - - data = {ATTR_ENTITY_ID: ENTITY_ID} - - await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_NEXT_TRACK, data, True) - - mock_stream_magic_client.next_track.assert_called_once() - - await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, data, True) - - mock_stream_magic_client.previous_track.assert_called_once() - - -async def test_shuffle_repeat_set( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test shuffle and repeat set service.""" - await setup_integration(hass, mock_config_entry) - - mock_stream_magic_client.now_playing.controls = [ - TransportControl.TOGGLE_SHUFFLE, - TransportControl.TOGGLE_REPEAT, - ] - - # Test shuffle - await hass.services.async_call( - MP_DOMAIN, - SERVICE_SHUFFLE_SET, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_SHUFFLE: False}, - ) - - mock_stream_magic_client.set_shuffle.assert_called_with(ShuffleMode.OFF) - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_SHUFFLE_SET, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_SHUFFLE: True}, - ) - - mock_stream_magic_client.set_shuffle.assert_called_with(ShuffleMode.ALL) - - # Test repeat - await hass.services.async_call( - MP_DOMAIN, - SERVICE_REPEAT_SET, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_REPEAT: RepeatMode.OFF}, - ) - - mock_stream_magic_client.set_repeat.assert_called_with(CambridgeRepeatMode.OFF) - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_REPEAT_SET, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_REPEAT: RepeatMode.ALL}, - ) - - mock_stream_magic_client.set_repeat.assert_called_with(CambridgeRepeatMode.ALL) - - -async def test_shuffle_repeat_get( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test shuffle and repeat get service.""" - await setup_integration(hass, mock_config_entry) - - mock_stream_magic_client.play_state.mode_shuffle = None - - state = hass.states.get(ENTITY_ID) - assert state.attributes[ATTR_MEDIA_SHUFFLE] is False - - mock_stream_magic_client.play_state.mode_shuffle = ShuffleMode.ALL - - await mock_state_update(mock_stream_magic_client) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - assert state.attributes[ATTR_MEDIA_SHUFFLE] is True - - mock_stream_magic_client.play_state.mode_repeat = CambridgeRepeatMode.ALL - - await mock_state_update(mock_stream_magic_client) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ALL - - -async def test_power_service( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test power service.""" - await setup_integration(hass, mock_config_entry) - - data = {ATTR_ENTITY_ID: ENTITY_ID} - - await hass.services.async_call(MP_DOMAIN, SERVICE_TURN_ON, data, True) - - mock_stream_magic_client.power_on.assert_called_once() - - await hass.services.async_call(MP_DOMAIN, SERVICE_TURN_OFF, data, True) - - mock_stream_magic_client.power_off.assert_called_once() - - -async def test_media_seek( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test media seek service.""" - await setup_integration(hass, mock_config_entry) - - mock_stream_magic_client.now_playing.controls = [ - TransportControl.SEEK, - ] - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_MEDIA_SEEK, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_SEEK_POSITION: 100}, - ) - - mock_stream_magic_client.media_seek.assert_called_once_with(100) - - -async def test_media_volume( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test volume service.""" - await setup_integration(hass, mock_config_entry) - - mock_stream_magic_client.state.pre_amp_mode = True - - # Test volume up - await hass.services.async_call( - MP_DOMAIN, - SERVICE_VOLUME_UP, - {ATTR_ENTITY_ID: ENTITY_ID}, - ) - - mock_stream_magic_client.volume_up.assert_called_once() - - # Test volume down - await hass.services.async_call( - MP_DOMAIN, - SERVICE_VOLUME_DOWN, - {ATTR_ENTITY_ID: ENTITY_ID}, - ) - - mock_stream_magic_client.volume_down.assert_called_once() - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_VOLUME_SET, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_VOLUME_LEVEL: 0.30}, - ) - - mock_stream_magic_client.set_volume.assert_called_once_with(30) - - -async def test_play_media_preset_item_id( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test playing media with a preset item id.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_MEDIA_CONTENT_TYPE: "preset", - ATTR_MEDIA_CONTENT_ID: "1", - }, - blocking=True, - ) - assert mock_stream_magic_client.recall_preset.call_count == 1 - assert mock_stream_magic_client.recall_preset.call_args_list[0].args[0] == 1 - - with pytest.raises(ServiceValidationError, match="Missing preset for media_id: 10"): - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_MEDIA_CONTENT_TYPE: "preset", - ATTR_MEDIA_CONTENT_ID: "10", - }, - blocking=True, - ) - - with pytest.raises( - ServiceValidationError, match="Preset must be an integer, got: UNKNOWN_PRESET" - ): - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_MEDIA_CONTENT_TYPE: "preset", - ATTR_MEDIA_CONTENT_ID: "UNKNOWN_PRESET", - }, - blocking=True, - ) - - -async def test_play_media_airable_radio_id( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test playing media with an airable radio id.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_MEDIA_CONTENT_TYPE: "airable", - ATTR_MEDIA_CONTENT_ID: "12345678", - }, - blocking=True, - ) - assert mock_stream_magic_client.play_radio_airable.call_count == 1 - call_args = mock_stream_magic_client.play_radio_airable.call_args_list[0].args - assert call_args[0] == "Radio" - assert call_args[1] == 12345678 - - -async def test_play_media_internet_radio( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test playing media with a url.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_MEDIA_CONTENT_TYPE: "internet_radio", - ATTR_MEDIA_CONTENT_ID: "https://example.com", - }, - blocking=True, - ) - assert mock_stream_magic_client.play_radio_url.call_count == 1 - call_args = mock_stream_magic_client.play_radio_url.call_args_list[0].args - assert call_args[0] == "Radio" - assert call_args[1] == "https://example.com" - - -async def test_play_media_unknown_type( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_stream_magic_client: AsyncMock, -) -> None: - """Test playing media with an unsupported content type.""" - await setup_integration(hass, mock_config_entry) - - with pytest.raises( - HomeAssistantError, - match="Unsupported media type for Cambridge Audio device: unsupported_content_type", - ): - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_MEDIA_CONTENT_TYPE: "unsupported_content_type", - ATTR_MEDIA_CONTENT_ID: "1", - }, - blocking=True, - ) diff --git a/tests/components/cambridge_audio/test_select.py b/tests/components/cambridge_audio/test_select.py deleted file mode 100644 index 473c4027163..00000000000 --- a/tests/components/cambridge_audio/test_select.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Tests for the Cambridge Audio select platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.select import ( - DOMAIN as SELECT_DOMAIN, - SERVICE_SELECT_OPTION, -) -from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_stream_magic_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.cambridge_audio.PLATFORMS", [Platform.SELECT]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_setting_value( - hass: HomeAssistant, - mock_stream_magic_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test setting value.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: "select.cambridge_audio_cxnv2_display_brightness", - ATTR_OPTION: "dim", - }, - blocking=True, - ) - mock_stream_magic_client.set_display_brightness.assert_called_once_with("dim") - - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: "select.cambridge_audio_cxnv2_audio_output", - ATTR_OPTION: "Speaker A", - }, - blocking=True, - ) - mock_stream_magic_client.set_audio_output.assert_called_once_with("speaker_a") diff --git a/tests/components/cambridge_audio/test_switch.py b/tests/components/cambridge_audio/test_switch.py deleted file mode 100644 index 3192f198d1f..00000000000 --- a/tests/components/cambridge_audio/test_switch.py +++ /dev/null @@ -1,60 +0,0 @@ -"""Tests for the Cambridge Audio switch platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_ON -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_stream_magic_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.cambridge_audio.PLATFORMS", [Platform.SWITCH]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_setting_value( - hass: HomeAssistant, - mock_stream_magic_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test setting value.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: "switch.cambridge_audio_cxnv2_early_update", - }, - blocking=True, - ) - mock_stream_magic_client.set_early_update.assert_called_once_with(True) - mock_stream_magic_client.set_early_update.reset_mock() - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: "switch.cambridge_audio_cxnv2_early_update", - }, - blocking=True, - ) - mock_stream_magic_client.set_early_update.assert_called_once_with(False) diff --git a/tests/components/camera/common.py b/tests/components/camera/common.py index 569756c2640..9cacf85d907 100644 --- a/tests/components/camera/common.py +++ b/tests/components/camera/common.py @@ -6,19 +6,8 @@ components. Instead call the service directly. from unittest.mock import Mock -from webrtc_models import RTCIceCandidate - -from homeassistant.components.camera import ( - Camera, - CameraWebRTCProvider, - WebRTCAnswer, - WebRTCSendMessage, -) -from homeassistant.core import callback - EMPTY_8_6_JPEG = b"empty_8_6" WEBRTC_ANSWER = "a=sendonly" -STREAM_SOURCE = "rtsp://127.0.0.1/stream" def mock_turbo_jpeg( @@ -33,43 +22,3 @@ def mock_turbo_jpeg( mocked_turbo_jpeg.scale_with_quality.return_value = EMPTY_8_6_JPEG mocked_turbo_jpeg.encode.return_value = EMPTY_8_6_JPEG return mocked_turbo_jpeg - - -class SomeTestProvider(CameraWebRTCProvider): - """Test provider.""" - - def __init__(self) -> None: - """Initialize the provider.""" - self._is_supported = True - - @property - def domain(self) -> str: - """Return the integration domain of the provider.""" - return "some_test" - - @callback - def async_is_supported(self, stream_source: str) -> bool: - """Determine if the provider supports the stream source.""" - return self._is_supported - - async def async_handle_async_webrtc_offer( - self, - camera: Camera, - offer_sdp: str, - session_id: str, - send_message: WebRTCSendMessage, - ) -> None: - """Handle the WebRTC offer and return the answer via the provided callback. - - Return value determines if the offer was handled successfully. - """ - send_message(WebRTCAnswer(answer="answer")) - - async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate - ) -> None: - """Handle the WebRTC candidate.""" - - @callback - def async_close_session(self, session_id: str) -> None: - """Close the session.""" diff --git a/tests/components/camera/conftest.py b/tests/components/camera/conftest.py index f0c418711c7..524b56c2303 100644 --- a/tests/components/camera/conftest.py +++ b/tests/components/camera/conftest.py @@ -1,30 +1,18 @@ """Test helpers for camera.""" -from collections.abc import AsyncGenerator, Generator -from unittest.mock import AsyncMock, Mock, PropertyMock, patch +from unittest.mock import PropertyMock, patch import pytest -from webrtc_models import RTCIceCandidate +from typing_extensions import AsyncGenerator, Generator from homeassistant.components import camera from homeassistant.components.camera.const import StreamType -from homeassistant.components.camera.webrtc import WebRTCAnswer, WebRTCSendMessage -from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.setup import async_setup_component -from .common import STREAM_SOURCE, WEBRTC_ANSWER, SomeTestProvider - -from tests.common import ( - MockConfigEntry, - MockModule, - mock_config_flow, - mock_integration, - mock_platform, - setup_test_component_platform, -) +from .common import WEBRTC_ANSWER @pytest.fixture(autouse=True) @@ -68,37 +56,23 @@ def mock_camera_hls_fixture(mock_camera: None) -> Generator[None]: yield -@pytest.fixture -async def mock_camera_webrtc_frontendtype_only( - hass: HomeAssistant, -) -> AsyncGenerator[None]: +@pytest.fixture(name="mock_camera_web_rtc") +async def mock_camera_web_rtc_fixture(hass: HomeAssistant) -> AsyncGenerator[None]: """Initialize a demo camera platform with WebRTC.""" assert await async_setup_component( hass, "camera", {camera.DOMAIN: {"platform": "demo"}} ) await hass.async_block_till_done() - with patch( - "homeassistant.components.camera.Camera.frontend_stream_type", - new_callable=PropertyMock(return_value=StreamType.WEB_RTC), - ): - yield - - -@pytest.fixture -async def mock_camera_webrtc( - mock_camera_webrtc_frontendtype_only: None, -) -> AsyncGenerator[None]: - """Initialize a demo camera platform with WebRTC.""" - - async def async_handle_async_webrtc_offer( - offer_sdp: str, session_id: str, send_message: WebRTCSendMessage - ) -> None: - send_message(WebRTCAnswer(WEBRTC_ANSWER)) - - with patch( - "homeassistant.components.camera.Camera.async_handle_async_webrtc_offer", - side_effect=async_handle_async_webrtc_offer, + with ( + patch( + "homeassistant.components.camera.Camera.frontend_stream_type", + new_callable=PropertyMock(return_value=StreamType.WEB_RTC), + ), + patch( + "homeassistant.components.camera.Camera.async_handle_web_rtc_offer", + return_value=WEBRTC_ANSWER, + ), ): yield @@ -137,116 +111,3 @@ def mock_camera_with_no_name_fixture(mock_camera_with_device: None) -> Generator new_callable=PropertyMock(return_value=None), ): yield - - -@pytest.fixture(name="mock_stream") -async def mock_stream_fixture(hass: HomeAssistant) -> None: - """Initialize a demo camera platform with streaming.""" - assert await async_setup_component(hass, "stream", {"stream": {}}) - - -@pytest.fixture(name="mock_stream_source") -def mock_stream_source_fixture() -> Generator[AsyncMock]: - """Fixture to create an RTSP stream source.""" - with patch( - "homeassistant.components.camera.Camera.stream_source", - return_value=STREAM_SOURCE, - ) as mock_stream_source: - yield mock_stream_source - - -@pytest.fixture -async def mock_test_webrtc_cameras(hass: HomeAssistant) -> None: - """Initialize test WebRTC cameras with native RTC support.""" - - # Cannot use the fixture mock_camera_web_rtc as it's mocking Camera.async_handle_web_rtc_offer - # and native support is checked by verify the function "async_handle_web_rtc_offer" was - # overwritten(implemented) or not - class BaseCamera(camera.Camera): - """Base Camera.""" - - _attr_supported_features: camera.CameraEntityFeature = ( - camera.CameraEntityFeature.STREAM - ) - _attr_frontend_stream_type: camera.StreamType = camera.StreamType.WEB_RTC - - async def stream_source(self) -> str | None: - return STREAM_SOURCE - - class SyncCamera(BaseCamera): - """Mock Camera with native sync WebRTC support.""" - - _attr_name = "Sync" - - async def async_handle_web_rtc_offer(self, offer_sdp: str) -> str | None: - return WEBRTC_ANSWER - - class AsyncCamera(BaseCamera): - """Mock Camera with native async WebRTC support.""" - - _attr_name = "Async" - - async def async_handle_async_webrtc_offer( - self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage - ) -> None: - send_message(WebRTCAnswer(WEBRTC_ANSWER)) - - async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate - ) -> None: - """Handle a WebRTC candidate.""" - # Do nothing - - domain = "test" - - entry = MockConfigEntry(domain=domain) - entry.add_to_hass(hass) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [camera.DOMAIN] - ) - return True - - async def async_unload_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Unload test config entry.""" - await hass.config_entries.async_forward_entry_unload( - config_entry, camera.DOMAIN - ) - return True - - mock_integration( - hass, - MockModule( - domain, - async_setup_entry=async_setup_entry_init, - async_unload_entry=async_unload_entry_init, - ), - ) - setup_test_component_platform( - hass, camera.DOMAIN, [SyncCamera(), AsyncCamera()], from_config_entry=True - ) - mock_platform(hass, f"{domain}.config_flow", Mock()) - - with mock_config_flow(domain, ConfigFlow): - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - -@pytest.fixture -async def register_test_provider( - hass: HomeAssistant, -) -> AsyncGenerator[SomeTestProvider]: - """Add WebRTC test provider.""" - await async_setup_component(hass, "camera", {}) - - provider = SomeTestProvider() - unsub = camera.async_register_webrtc_provider(hass, provider) - await hass.async_block_till_done() - yield provider - unsub() diff --git a/tests/components/camera/snapshots/test_init.ambr b/tests/components/camera/snapshots/test_init.ambr deleted file mode 100644 index eae1c481cc0..00000000000 --- a/tests/components/camera/snapshots/test_init.ambr +++ /dev/null @@ -1,127 +0,0 @@ -# serializer version: 1 -# name: test_record_service[/test/recording_{{ entity_id }}.mpg-/test/recording_.mpg-expected_issues1] - IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': '2025.6.0', - 'created': , - 'data': None, - 'dismissed_version': None, - 'domain': 'camera', - 'is_fixable': True, - 'is_persistent': False, - 'issue_domain': None, - 'issue_id': 'deprecated_filename_template_camera.demo_camera_record', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'deprecated_filename_template', - 'translation_placeholders': dict({ - 'entity_id': 'camera.demo_camera', - 'service': 'camera.record', - }), - }) -# --- -# name: test_record_service[/test/recording_{{ entity_id.entity_id }}.mpg-/test/recording_camera.demo_camera.mpg-expected_issues3] - IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': '2025.6.0', - 'created': , - 'data': None, - 'dismissed_version': None, - 'domain': 'camera', - 'is_fixable': True, - 'is_persistent': False, - 'issue_domain': None, - 'issue_id': 'deprecated_filename_template_camera.demo_camera_record', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'deprecated_filename_template', - 'translation_placeholders': dict({ - 'entity_id': 'camera.demo_camera', - 'service': 'camera.record', - }), - }) -# --- -# name: test_record_service[/test/recording_{{ entity_id.name }}.mpg-/test/recording_Demo camera.mpg-expected_issues2] - IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': '2025.6.0', - 'created': , - 'data': None, - 'dismissed_version': None, - 'domain': 'camera', - 'is_fixable': True, - 'is_persistent': False, - 'issue_domain': None, - 'issue_id': 'deprecated_filename_template_camera.demo_camera_record', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'deprecated_filename_template', - 'translation_placeholders': dict({ - 'entity_id': 'camera.demo_camera', - 'service': 'camera.record', - }), - }) -# --- -# name: test_snapshot_service[/test/snapshot_{{ entity_id }}.jpg-/test/snapshot_.jpg-expected_issues1] - IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': '2025.6.0', - 'created': , - 'data': None, - 'dismissed_version': None, - 'domain': 'camera', - 'is_fixable': True, - 'is_persistent': False, - 'issue_domain': None, - 'issue_id': 'deprecated_filename_template_camera.demo_camera_snapshot', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'deprecated_filename_template', - 'translation_placeholders': dict({ - 'entity_id': 'camera.demo_camera', - 'service': 'camera.snapshot', - }), - }) -# --- -# name: test_snapshot_service[/test/snapshot_{{ entity_id.entity_id }}.jpg-/test/snapshot_camera.demo_camera.jpg-expected_issues3] - IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': '2025.6.0', - 'created': , - 'data': None, - 'dismissed_version': None, - 'domain': 'camera', - 'is_fixable': True, - 'is_persistent': False, - 'issue_domain': None, - 'issue_id': 'deprecated_filename_template_camera.demo_camera_snapshot', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'deprecated_filename_template', - 'translation_placeholders': dict({ - 'entity_id': 'camera.demo_camera', - 'service': 'camera.snapshot', - }), - }) -# --- -# name: test_snapshot_service[/test/snapshot_{{ entity_id.name }}.jpg-/test/snapshot_Demo camera.jpg-expected_issues2] - IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': '2025.6.0', - 'created': , - 'data': None, - 'dismissed_version': None, - 'domain': 'camera', - 'is_fixable': True, - 'is_persistent': False, - 'issue_domain': None, - 'issue_id': 'deprecated_filename_template_camera.demo_camera_snapshot', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'deprecated_filename_template', - 'translation_placeholders': dict({ - 'entity_id': 'camera.demo_camera', - 'service': 'camera.snapshot', - }), - }) -# --- diff --git a/tests/components/camera/test_init.py b/tests/components/camera/test_init.py index 32024694b7e..7da6cd91a7a 100644 --- a/tests/components/camera/test_init.py +++ b/tests/components/camera/test_init.py @@ -3,41 +3,31 @@ from http import HTTPStatus import io from types import ModuleType -from unittest.mock import ANY, AsyncMock, Mock, PropertyMock, mock_open, patch +from unittest.mock import AsyncMock, Mock, PropertyMock, mock_open, patch import pytest -from syrupy.assertion import SnapshotAssertion -from webrtc_models import RTCIceCandidate +from typing_extensions import Generator from homeassistant.components import camera -from homeassistant.components.camera import ( - Camera, - CameraWebRTCProvider, - WebRTCAnswer, - WebRTCSendMessage, - async_register_webrtc_provider, -) from homeassistant.components.camera.const import ( DOMAIN, PREF_ORIENTATION, PREF_PRELOAD_STREAM, - StreamType, ) -from homeassistant.components.camera.helper import get_camera_from_entity_id from homeassistant.components.websocket_api import TYPE_RESULT +from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_STARTED, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, callback -from homeassistant.core_config import async_process_ha_core_config +from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from .common import EMPTY_8_6_JPEG, STREAM_SOURCE, mock_turbo_jpeg +from .common import EMPTY_8_6_JPEG, WEBRTC_ANSWER, mock_turbo_jpeg from tests.common import ( async_fire_time_changed, @@ -46,6 +36,18 @@ from tests.common import ( ) from tests.typing import ClientSessionGenerator, WebSocketGenerator +STREAM_SOURCE = "rtsp://127.0.0.1/stream" +HLS_STREAM_SOURCE = "http://127.0.0.1/example.m3u" +WEBRTC_OFFER = "v=0\r\n" + + +@pytest.fixture(name="mock_stream") +def mock_stream_fixture(hass: HomeAssistant) -> None: + """Initialize a demo camera platform with streaming.""" + assert hass.loop.run_until_complete( + async_setup_component(hass, "stream", {"stream": {}}) + ) + @pytest.fixture(name="image_mock_url") async def image_mock_url_fixture(hass: HomeAssistant) -> None: @@ -56,6 +58,44 @@ async def image_mock_url_fixture(hass: HomeAssistant) -> None: await hass.async_block_till_done() +@pytest.fixture(name="mock_stream_source") +def mock_stream_source_fixture() -> Generator[AsyncMock]: + """Fixture to create an RTSP stream source.""" + with patch( + "homeassistant.components.camera.Camera.stream_source", + return_value=STREAM_SOURCE, + ) as mock_stream_source: + yield mock_stream_source + + +@pytest.fixture(name="mock_hls_stream_source") +async def mock_hls_stream_source_fixture() -> Generator[AsyncMock]: + """Fixture to create an HLS stream source.""" + with patch( + "homeassistant.components.camera.Camera.stream_source", + return_value=HLS_STREAM_SOURCE, + ) as mock_hls_stream_source: + yield mock_hls_stream_source + + +async def provide_web_rtc_answer(stream_source: str, offer: str, stream_id: str) -> str: + """Simulate an rtsp to webrtc provider.""" + assert stream_source == STREAM_SOURCE + assert offer == WEBRTC_OFFER + return WEBRTC_ANSWER + + +@pytest.fixture(name="mock_rtsp_to_web_rtc") +def mock_rtsp_to_web_rtc_fixture(hass: HomeAssistant) -> Generator[Mock]: + """Fixture that registers a mock rtsp to web_rtc provider.""" + mock_provider = Mock(side_effect=provide_web_rtc_answer) + unsub = camera.async_register_rtsp_to_web_rtc_provider( + hass, "mock_domain", mock_provider + ) + yield mock_provider + unsub() + + @pytest.mark.usefixtures("image_mock_url") async def test_get_image_from_camera(hass: HomeAssistant) -> None: """Grab an image from camera entity.""" @@ -205,38 +245,7 @@ async def test_get_image_fails(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("mock_camera") -@pytest.mark.parametrize( - ("filename_template", "expected_filename", "expected_issues"), - [ - ( - "/test/snapshot.jpg", - "/test/snapshot.jpg", - [], - ), - ( - "/test/snapshot_{{ entity_id }}.jpg", - "/test/snapshot_.jpg", - ["deprecated_filename_template_camera.demo_camera_snapshot"], - ), - ( - "/test/snapshot_{{ entity_id.name }}.jpg", - "/test/snapshot_Demo camera.jpg", - ["deprecated_filename_template_camera.demo_camera_snapshot"], - ), - ( - "/test/snapshot_{{ entity_id.entity_id }}.jpg", - "/test/snapshot_camera.demo_camera.jpg", - ["deprecated_filename_template_camera.demo_camera_snapshot"], - ), - ], -) -async def test_snapshot_service( - hass: HomeAssistant, - filename_template: str, - expected_filename: str, - expected_issues: list, - snapshot: SnapshotAssertion, -) -> None: +async def test_snapshot_service(hass: HomeAssistant) -> None: """Test snapshot service.""" mopen = mock_open() @@ -252,25 +261,16 @@ async def test_snapshot_service( camera.SERVICE_SNAPSHOT, { ATTR_ENTITY_ID: "camera.demo_camera", - camera.ATTR_FILENAME: filename_template, + camera.ATTR_FILENAME: "/test/snapshot.jpg", }, blocking=True, ) - mopen.assert_called_once_with(expected_filename, "wb") - mock_write = mopen().write assert len(mock_write.mock_calls) == 1 assert mock_write.mock_calls[0][1][0] == b"Test" - issue_registry = ir.async_get(hass) - assert len(issue_registry.issues) == 1 + len(expected_issues) - for expected_issue in expected_issues: - issue = issue_registry.async_get_issue(DOMAIN, expected_issue) - assert issue is not None - assert issue == snapshot - @pytest.mark.usefixtures("mock_camera") async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: @@ -282,10 +282,7 @@ async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: patch( "homeassistant.components.camera.os.makedirs", ), - pytest.raises( - HomeAssistantError, - match="Cannot write `/test/snapshot.jpg`, no access to path", - ), + pytest.raises(HomeAssistantError, match="/test/snapshot.jpg"), ): await hass.services.async_call( camera.DOMAIN, @@ -298,28 +295,6 @@ async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: ) -@pytest.mark.usefixtures("mock_camera") -async def test_snapshot_service_os_error( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test snapshot service with os error.""" - with ( - patch.object(hass.config, "is_allowed_path", return_value=True), - patch("homeassistant.components.camera.os.makedirs", side_effect=OSError), - ): - await hass.services.async_call( - camera.DOMAIN, - camera.SERVICE_SNAPSHOT, - { - ATTR_ENTITY_ID: "camera.demo_camera", - camera.ATTR_FILENAME: "/test/snapshot.jpg", - }, - blocking=True, - ) - - assert "Can't write image to file:" in caplog.text - - @pytest.mark.usefixtures("mock_camera", "mock_stream") async def test_websocket_stream_no_source( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -601,34 +576,7 @@ async def test_record_service_invalid_path(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("mock_camera", "mock_stream") -@pytest.mark.parametrize( - ("filename_template", "expected_filename", "expected_issues"), - [ - ("/test/recording.mpg", "/test/recording.mpg", []), - ( - "/test/recording_{{ entity_id }}.mpg", - "/test/recording_.mpg", - ["deprecated_filename_template_camera.demo_camera_record"], - ), - ( - "/test/recording_{{ entity_id.name }}.mpg", - "/test/recording_Demo camera.mpg", - ["deprecated_filename_template_camera.demo_camera_record"], - ), - ( - "/test/recording_{{ entity_id.entity_id }}.mpg", - "/test/recording_camera.demo_camera.mpg", - ["deprecated_filename_template_camera.demo_camera_record"], - ), - ], -) -async def test_record_service( - hass: HomeAssistant, - filename_template: str, - expected_filename: str, - expected_issues: list, - snapshot: SnapshotAssertion, -) -> None: +async def test_record_service(hass: HomeAssistant) -> None: """Test record service.""" with ( patch( @@ -644,24 +592,12 @@ async def test_record_service( await hass.services.async_call( camera.DOMAIN, camera.SERVICE_RECORD, - { - ATTR_ENTITY_ID: "camera.demo_camera", - camera.ATTR_FILENAME: filename_template, - }, + {ATTR_ENTITY_ID: "camera.demo_camera", camera.CONF_FILENAME: "/my/path"}, blocking=True, ) # So long as we call stream.record, the rest should be covered # by those tests. - mock_record.assert_called_once_with( - ANY, expected_filename, duration=30, lookback=0 - ) - - issue_registry = ir.async_get(hass) - assert len(issue_registry.issues) == 1 + len(expected_issues) - for expected_issue in expected_issues: - issue = issue_registry.async_get_issue(DOMAIN, expected_issue) - assert issue is not None - assert issue == snapshot + assert mock_record.called @pytest.mark.usefixtures("mock_camera") @@ -683,12 +619,154 @@ async def test_camera_proxy_stream(hass_client: ClientSessionGenerator) -> None: assert response.status == HTTPStatus.BAD_GATEWAY +@pytest.mark.usefixtures("mock_camera_web_rtc") +async def test_websocket_web_rtc_offer( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test initiating a WebRTC stream with offer and answer.""" + client = await hass_ws_client(hass) + await client.send_json( + { + "id": 9, + "type": "camera/web_rtc_offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["id"] == 9 + assert response["type"] == TYPE_RESULT + assert response["success"] + assert response["result"]["answer"] == WEBRTC_ANSWER + + +@pytest.mark.usefixtures("mock_camera_web_rtc") +async def test_websocket_web_rtc_offer_invalid_entity( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC with a camera entity that does not exist.""" + client = await hass_ws_client(hass) + await client.send_json( + { + "id": 9, + "type": "camera/web_rtc_offer", + "entity_id": "camera.does_not_exist", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["id"] == 9 + assert response["type"] == TYPE_RESULT + assert not response["success"] + + +@pytest.mark.usefixtures("mock_camera_web_rtc") +async def test_websocket_web_rtc_offer_missing_offer( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC stream with missing required fields.""" + client = await hass_ws_client(hass) + await client.send_json( + { + "id": 9, + "type": "camera/web_rtc_offer", + "entity_id": "camera.demo_camera", + } + ) + response = await client.receive_json() + + assert response["id"] == 9 + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"]["code"] == "invalid_format" + + +@pytest.mark.usefixtures("mock_camera_web_rtc") +async def test_websocket_web_rtc_offer_failure( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC stream that fails handling the offer.""" + client = await hass_ws_client(hass) + + with patch( + "homeassistant.components.camera.Camera.async_handle_web_rtc_offer", + side_effect=HomeAssistantError("offer failed"), + ): + await client.send_json( + { + "id": 9, + "type": "camera/web_rtc_offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["id"] == 9 + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"]["code"] == "web_rtc_offer_failed" + assert response["error"]["message"] == "offer failed" + + +@pytest.mark.usefixtures("mock_camera_web_rtc") +async def test_websocket_web_rtc_offer_timeout( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC stream with timeout handling the offer.""" + client = await hass_ws_client(hass) + + with patch( + "homeassistant.components.camera.Camera.async_handle_web_rtc_offer", + side_effect=TimeoutError(), + ): + await client.send_json( + { + "id": 9, + "type": "camera/web_rtc_offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["id"] == 9 + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"]["code"] == "web_rtc_offer_failed" + assert response["error"]["message"] == "Timeout handling WebRTC offer" + + +@pytest.mark.usefixtures("mock_camera") +async def test_websocket_web_rtc_offer_invalid_stream_type( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC initiating for a camera with a different stream_type.""" + client = await hass_ws_client(hass) + await client.send_json( + { + "id": 9, + "type": "camera/web_rtc_offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["id"] == 9 + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"]["code"] == "web_rtc_offer_failed" + + @pytest.mark.usefixtures("mock_camera") async def test_state_streaming(hass: HomeAssistant) -> None: """Camera state.""" demo_camera = hass.states.get("camera.demo_camera") assert demo_camera is not None - assert demo_camera.state == camera.CameraState.STREAMING + assert demo_camera.state == camera.STATE_STREAMING @pytest.mark.usefixtures("mock_camera", "mock_stream") @@ -741,7 +819,145 @@ async def test_stream_unavailable( demo_camera = hass.states.get("camera.demo_camera") assert demo_camera is not None - assert demo_camera.state == camera.CameraState.STREAMING + assert demo_camera.state == camera.STATE_STREAMING + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_rtsp_to_web_rtc_offer( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_rtsp_to_web_rtc: Mock, +) -> None: + """Test creating a web_rtc offer from an rstp provider.""" + client = await hass_ws_client(hass) + await client.send_json( + { + "id": 9, + "type": "camera/web_rtc_offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response.get("id") == 9 + assert response.get("type") == TYPE_RESULT + assert response.get("success") + assert "result" in response + assert response["result"] == {"answer": WEBRTC_ANSWER} + + assert mock_rtsp_to_web_rtc.called + + +@pytest.mark.usefixtures( + "mock_camera", + "mock_hls_stream_source", # Not an RTSP stream source + "mock_rtsp_to_web_rtc", +) +async def test_unsupported_rtsp_to_web_rtc_stream_type( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test rtsp-to-webrtc is not registered for non-RTSP streams.""" + client = await hass_ws_client(hass) + await client.send_json( + { + "id": 10, + "type": "camera/web_rtc_offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response.get("id") == 10 + assert response.get("type") == TYPE_RESULT + assert "success" in response + assert not response["success"] + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_rtsp_to_web_rtc_provider_unregistered( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test creating a web_rtc offer from an rstp provider.""" + mock_provider = Mock(side_effect=provide_web_rtc_answer) + unsub = camera.async_register_rtsp_to_web_rtc_provider( + hass, "mock_domain", mock_provider + ) + + client = await hass_ws_client(hass) + + # Registered provider can handle the WebRTC offer + await client.send_json( + { + "id": 11, + "type": "camera/web_rtc_offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["id"] == 11 + assert response["type"] == TYPE_RESULT + assert response["success"] + assert response["result"]["answer"] == WEBRTC_ANSWER + + assert mock_provider.called + mock_provider.reset_mock() + + # Unregister provider, then verify the WebRTC offer cannot be handled + unsub() + await client.send_json( + { + "id": 12, + "type": "camera/web_rtc_offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response.get("id") == 12 + assert response.get("type") == TYPE_RESULT + assert "success" in response + assert not response["success"] + + assert not mock_provider.called + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_rtsp_to_web_rtc_offer_not_accepted( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test a provider that can't satisfy the rtsp to webrtc offer.""" + + async def provide_none(stream_source: str, offer: str) -> str: + """Simulate a provider that can't accept the offer.""" + return None + + mock_provider = Mock(side_effect=provide_none) + unsub = camera.async_register_rtsp_to_web_rtc_provider( + hass, "mock_domain", mock_provider + ) + client = await hass_ws_client(hass) + + # Registered provider can handle the WebRTC offer + await client.send_json( + { + "id": 11, + "type": "camera/web_rtc_offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["id"] == 11 + assert response.get("type") == TYPE_RESULT + assert "success" in response + assert not response["success"] + + assert mock_provider.called + + unsub() @pytest.mark.usefixtures("mock_camera") @@ -827,23 +1043,6 @@ def test_deprecated_stream_type_constants( ) -@pytest.mark.parametrize( - "enum", - list(camera.const.CameraState), -) -@pytest.mark.parametrize( - "module", - [camera], -) -def test_deprecated_state_constants( - caplog: pytest.LogCaptureFixture, - enum: camera.const.StreamType, - module: ModuleType, -) -> None: - """Test deprecated stream type constants.""" - import_and_test_deprecated_constant_enum(caplog, module, enum, "STATE_", "2025.10") - - @pytest.mark.parametrize( "entity_feature", list(camera.CameraEntityFeature), @@ -895,162 +1094,3 @@ async def test_entity_picture_url_changes_on_token_update(hass: HomeAssistant) - new_entity_picture = camera_state.attributes["entity_picture"] assert new_entity_picture != original_picture assert "token=" in new_entity_picture - - -async def _test_capabilities( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - entity_id: str, - expected_stream_types: set[StreamType], - expected_stream_types_with_webrtc_provider: set[StreamType], -) -> None: - """Test camera capabilities.""" - await async_setup_component(hass, "camera", {}) - await hass.async_block_till_done() - - async def test(expected_types: set[StreamType]) -> None: - camera_obj = get_camera_from_entity_id(hass, entity_id) - capabilities = camera_obj.camera_capabilities - assert capabilities == camera.CameraCapabilities(expected_types) - - # Request capabilities through WebSocket - client = await hass_ws_client(hass) - await client.send_json_auto_id( - {"type": "camera/capabilities", "entity_id": entity_id} - ) - msg = await client.receive_json() - - # Assert WebSocket response - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"] == {"frontend_stream_types": ANY} - assert sorted(msg["result"]["frontend_stream_types"]) == sorted(expected_types) - - await test(expected_stream_types) - - # Test with WebRTC provider - - class SomeTestProvider(CameraWebRTCProvider): - """Test provider.""" - - @property - def domain(self) -> str: - """Return domain.""" - return "test" - - @callback - def async_is_supported(self, stream_source: str) -> bool: - """Determine if the provider supports the stream source.""" - return True - - async def async_handle_async_webrtc_offer( - self, - camera: Camera, - offer_sdp: str, - session_id: str, - send_message: WebRTCSendMessage, - ) -> None: - """Handle the WebRTC offer and return the answer via the provided callback.""" - send_message(WebRTCAnswer("answer")) - - async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate - ) -> None: - """Handle the WebRTC candidate.""" - - provider = SomeTestProvider() - async_register_webrtc_provider(hass, provider) - await hass.async_block_till_done() - await test(expected_stream_types_with_webrtc_provider) - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_camera_capabilities_hls( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test HLS camera capabilities.""" - await _test_capabilities( - hass, - hass_ws_client, - "camera.demo_camera", - {StreamType.HLS}, - {StreamType.HLS, StreamType.WEB_RTC}, - ) - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -async def test_camera_capabilities_webrtc( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test WebRTC camera capabilities.""" - - await _test_capabilities( - hass, hass_ws_client, "camera.sync", {StreamType.WEB_RTC}, {StreamType.WEB_RTC} - ) - - -@pytest.mark.parametrize( - ("entity_id", "expect_native_async_webrtc"), - [("camera.sync", False), ("camera.async", True)], -) -@pytest.mark.usefixtures("mock_test_webrtc_cameras", "register_test_provider") -async def test_webrtc_provider_not_added_for_native_webrtc( - hass: HomeAssistant, entity_id: str, expect_native_async_webrtc: bool -) -> None: - """Test that a WebRTC provider is not added to a camera when the camera has native WebRTC support.""" - camera_obj = get_camera_from_entity_id(hass, entity_id) - assert camera_obj - assert camera_obj._webrtc_provider is None - assert camera_obj._supports_native_sync_webrtc is not expect_native_async_webrtc - assert camera_obj._supports_native_async_webrtc is expect_native_async_webrtc - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_camera_capabilities_changing_non_native_support( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test WebRTC camera capabilities.""" - cam = get_camera_from_entity_id(hass, "camera.demo_camera") - assert ( - cam.supported_features - == camera.CameraEntityFeature.ON_OFF | camera.CameraEntityFeature.STREAM - ) - - await _test_capabilities( - hass, - hass_ws_client, - cam.entity_id, - {StreamType.HLS}, - {StreamType.HLS, StreamType.WEB_RTC}, - ) - - cam._attr_supported_features = camera.CameraEntityFeature(0) - cam.async_write_ha_state() - await hass.async_block_till_done() - - await _test_capabilities(hass, hass_ws_client, cam.entity_id, set(), set()) - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -@pytest.mark.parametrize(("entity_id"), ["camera.sync", "camera.async"]) -async def test_camera_capabilities_changing_native_support( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - entity_id: str, -) -> None: - """Test WebRTC camera capabilities.""" - cam = get_camera_from_entity_id(hass, entity_id) - assert cam.supported_features == camera.CameraEntityFeature.STREAM - - await _test_capabilities( - hass, hass_ws_client, cam.entity_id, {StreamType.WEB_RTC}, {StreamType.WEB_RTC} - ) - - cam._attr_supported_features = camera.CameraEntityFeature(0) - cam.async_write_ha_state() - await hass.async_block_till_done() - - await _test_capabilities(hass, hass_ws_client, cam.entity_id, set(), set()) diff --git a/tests/components/camera/test_media_source.py b/tests/components/camera/test_media_source.py index 85f876d4e81..0780ecc2a9c 100644 --- a/tests/components/camera/test_media_source.py +++ b/tests/components/camera/test_media_source.py @@ -65,8 +65,8 @@ async def test_browsing_mjpeg(hass: HomeAssistant) -> None: assert item.children[0].title == "Demo camera without stream" -@pytest.mark.usefixtures("mock_camera_webrtc") -async def test_browsing_webrtc(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_camera_web_rtc") +async def test_browsing_web_rtc(hass: HomeAssistant) -> None: """Test browsing WebRTC camera media source.""" # 3 cameras: # one only supports WebRTC (no stream source) diff --git a/tests/components/camera/test_significant_change.py b/tests/components/camera/test_significant_change.py index b89b1c26747..a2a7ef20e71 100644 --- a/tests/components/camera/test_significant_change.py +++ b/tests/components/camera/test_significant_change.py @@ -1,6 +1,6 @@ """Test the Camera significant change platform.""" -from homeassistant.components.camera import CameraState +from homeassistant.components.camera import STATE_IDLE, STATE_RECORDING from homeassistant.components.camera.significant_change import ( async_check_significant_change, ) @@ -10,11 +10,11 @@ async def test_significant_change() -> None: """Detect Camera significant changes.""" attrs = {} assert not async_check_significant_change( - None, CameraState.IDLE, attrs, CameraState.IDLE, attrs + None, STATE_IDLE, attrs, STATE_IDLE, attrs ) assert not async_check_significant_change( - None, CameraState.IDLE, attrs, CameraState.IDLE, {"dummy": "dummy"} + None, STATE_IDLE, attrs, STATE_IDLE, {"dummy": "dummy"} ) assert async_check_significant_change( - None, CameraState.IDLE, attrs, CameraState.RECORDING, attrs + None, STATE_IDLE, attrs, STATE_RECORDING, attrs ) diff --git a/tests/components/camera/test_webrtc.py b/tests/components/camera/test_webrtc.py deleted file mode 100644 index 29fb9d61c4e..00000000000 --- a/tests/components/camera/test_webrtc.py +++ /dev/null @@ -1,1217 +0,0 @@ -"""Test camera WebRTC.""" - -from collections.abc import AsyncGenerator, Generator -import logging -from typing import Any -from unittest.mock import AsyncMock, Mock, patch - -import pytest -from webrtc_models import RTCIceCandidate, RTCIceServer - -from homeassistant.components.camera import ( - DATA_ICE_SERVERS, - DOMAIN as CAMERA_DOMAIN, - Camera, - CameraEntityFeature, - CameraWebRTCProvider, - StreamType, - WebRTCAnswer, - WebRTCCandidate, - WebRTCError, - WebRTCMessage, - WebRTCSendMessage, - async_get_supported_legacy_provider, - async_register_ice_servers, - async_register_rtsp_to_web_rtc_provider, - async_register_webrtc_provider, - get_camera_from_entity_id, -) -from homeassistant.components.websocket_api import TYPE_RESULT -from homeassistant.config_entries import ConfigEntry, ConfigFlow -from homeassistant.core import HomeAssistant, callback -from homeassistant.core_config import async_process_ha_core_config -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from .common import STREAM_SOURCE, WEBRTC_ANSWER, SomeTestProvider - -from tests.common import ( - MockConfigEntry, - MockModule, - mock_config_flow, - mock_integration, - mock_platform, - setup_test_component_platform, -) -from tests.typing import WebSocketGenerator - -WEBRTC_OFFER = "v=0\r\n" -HLS_STREAM_SOURCE = "http://127.0.0.1/example.m3u" -TEST_INTEGRATION_DOMAIN = "test" - - -class Go2RTCProvider(SomeTestProvider): - """go2rtc provider.""" - - @property - def domain(self) -> str: - """Return the integration domain of the provider.""" - return "go2rtc" - - -class MockCamera(Camera): - """Mock Camera Entity.""" - - _attr_name = "Test" - _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM - _attr_frontend_stream_type: StreamType = StreamType.WEB_RTC - - def __init__(self) -> None: - """Initialize the mock entity.""" - super().__init__() - self._sync_answer: str | None | Exception = WEBRTC_ANSWER - - def set_sync_answer(self, value: str | None | Exception) -> None: - """Set sync offer answer.""" - self._sync_answer = value - - async def async_handle_web_rtc_offer(self, offer_sdp: str) -> str | None: - """Handle the WebRTC offer and return the answer.""" - if isinstance(self._sync_answer, Exception): - raise self._sync_answer - return self._sync_answer - - async def stream_source(self) -> str | None: - """Return the source of the stream. - - This is used by cameras with CameraEntityFeature.STREAM - and StreamType.HLS. - """ - return "rtsp://stream" - - -@pytest.fixture -async def init_test_integration( - hass: HomeAssistant, -) -> MockCamera: - """Initialize components.""" - - entry = MockConfigEntry(domain=TEST_INTEGRATION_DOMAIN) - entry.add_to_hass(hass) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [CAMERA_DOMAIN] - ) - return True - - async def async_unload_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Unload test config entry.""" - await hass.config_entries.async_forward_entry_unload( - config_entry, CAMERA_DOMAIN - ) - return True - - mock_integration( - hass, - MockModule( - TEST_INTEGRATION_DOMAIN, - async_setup_entry=async_setup_entry_init, - async_unload_entry=async_unload_entry_init, - ), - ) - test_camera = MockCamera() - setup_test_component_platform( - hass, CAMERA_DOMAIN, [test_camera], from_config_entry=True - ) - mock_platform(hass, f"{TEST_INTEGRATION_DOMAIN}.config_flow", Mock()) - - with mock_config_flow(TEST_INTEGRATION_DOMAIN, ConfigFlow): - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - return test_camera - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_async_register_webrtc_provider( - hass: HomeAssistant, -) -> None: - """Test registering a WebRTC provider.""" - camera = get_camera_from_entity_id(hass, "camera.demo_camera") - assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} - - provider = SomeTestProvider() - unregister = async_register_webrtc_provider(hass, provider) - await hass.async_block_till_done() - - assert camera.camera_capabilities.frontend_stream_types == { - StreamType.HLS, - StreamType.WEB_RTC, - } - - # Mark stream as unsupported - provider._is_supported = False - # Manually refresh the provider - await camera.async_refresh_providers() - - assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} - - # Mark stream as supported - provider._is_supported = True - # Manually refresh the provider - await camera.async_refresh_providers() - assert camera.camera_capabilities.frontend_stream_types == { - StreamType.HLS, - StreamType.WEB_RTC, - } - - unregister() - await hass.async_block_till_done() - - assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_async_register_webrtc_provider_twice( - hass: HomeAssistant, - register_test_provider: SomeTestProvider, -) -> None: - """Test registering a WebRTC provider twice should raise.""" - with pytest.raises(ValueError, match="Provider already registered"): - async_register_webrtc_provider(hass, register_test_provider) - - -async def test_async_register_webrtc_provider_camera_not_loaded( - hass: HomeAssistant, -) -> None: - """Test registering a WebRTC provider when camera is not loaded.""" - with pytest.raises(ValueError, match="Unexpected state, camera not loaded"): - async_register_webrtc_provider(hass, SomeTestProvider()) - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -async def test_async_register_ice_server( - hass: HomeAssistant, -) -> None: - """Test registering an ICE server.""" - # Clear any existing ICE servers - hass.data[DATA_ICE_SERVERS].clear() - - called = 0 - - @callback - def get_ice_servers() -> list[RTCIceServer]: - nonlocal called - called += 1 - return [ - RTCIceServer(urls="stun:example.com"), - RTCIceServer(urls="turn:example.com"), - ] - - unregister = async_register_ice_servers(hass, get_ice_servers) - assert not called - - camera = get_camera_from_entity_id(hass, "camera.async") - config = camera.async_get_webrtc_client_configuration() - - assert config.configuration.ice_servers == [ - RTCIceServer(urls="stun:example.com"), - RTCIceServer(urls="turn:example.com"), - ] - assert called == 1 - - # register another ICE server - called_2 = 0 - - @callback - def get_ice_servers_2() -> list[RTCIceServer]: - nonlocal called_2 - called_2 += 1 - return [ - RTCIceServer( - urls=["stun:example2.com", "turn:example2.com"], - username="user", - credential="pass", - ) - ] - - unregister_2 = async_register_ice_servers(hass, get_ice_servers_2) - - config = camera.async_get_webrtc_client_configuration() - assert config.configuration.ice_servers == [ - RTCIceServer(urls="stun:example.com"), - RTCIceServer(urls="turn:example.com"), - RTCIceServer( - urls=["stun:example2.com", "turn:example2.com"], - username="user", - credential="pass", - ), - ] - assert called == 2 - assert called_2 == 1 - - # unregister the first ICE server - - unregister() - - config = camera.async_get_webrtc_client_configuration() - assert config.configuration.ice_servers == [ - RTCIceServer( - urls=["stun:example2.com", "turn:example2.com"], - username="user", - credential="pass", - ), - ] - assert called == 2 - assert called_2 == 2 - - # unregister the second ICE server - unregister_2() - - config = camera.async_get_webrtc_client_configuration() - assert config.configuration.ice_servers == [] - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -async def test_ws_get_client_config( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test get WebRTC client config.""" - await async_setup_component(hass, "camera", {}) - - client = await hass_ws_client(hass) - await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} - ) - msg = await client.receive_json() - - # Assert WebSocket response - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"] == { - "configuration": { - "iceServers": [ - { - "urls": [ - "stun:stun.home-assistant.io:80", - "stun:stun.home-assistant.io:3478", - ] - }, - ], - }, - "getCandidatesUpfront": False, - } - - @callback - def get_ice_server() -> list[RTCIceServer]: - return [ - RTCIceServer( - urls=["stun:example2.com", "turn:example2.com"], - username="user", - credential="pass", - ) - ] - - async_register_ice_servers(hass, get_ice_server) - - await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} - ) - msg = await client.receive_json() - - # Assert WebSocket response - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"] == { - "configuration": { - "iceServers": [ - { - "urls": [ - "stun:stun.home-assistant.io:80", - "stun:stun.home-assistant.io:3478", - ] - }, - { - "urls": ["stun:example2.com", "turn:example2.com"], - "username": "user", - "credential": "pass", - }, - ], - }, - "getCandidatesUpfront": False, - } - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -async def test_ws_get_client_config_sync_offer( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test get WebRTC client config, when camera is supporting sync offer.""" - await async_setup_component(hass, "camera", {}) - await hass.async_block_till_done() - - client = await hass_ws_client(hass) - await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.sync"} - ) - msg = await client.receive_json() - - # Assert WebSocket response - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"] == { - "configuration": {}, - "getCandidatesUpfront": True, - } - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -async def test_ws_get_client_config_custom_config( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test get WebRTC client config.""" - await async_process_ha_core_config( - hass, - {"webrtc": {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}}, - ) - - await async_setup_component(hass, "camera", {}) - - client = await hass_ws_client(hass) - await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} - ) - msg = await client.receive_json() - - # Assert WebSocket response - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"] == { - "configuration": {"iceServers": [{"urls": ["stun:custom_stun_server:3478"]}]}, - "getCandidatesUpfront": False, - } - - -@pytest.mark.usefixtures("mock_camera_hls") -async def test_ws_get_client_config_no_rtc_camera( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test get WebRTC client config.""" - await async_setup_component(hass, "camera", {}) - - client = await hass_ws_client(hass) - await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"} - ) - msg = await client.receive_json() - - # Assert WebSocket response - assert msg["type"] == TYPE_RESULT - assert not msg["success"] - assert msg["error"] == { - "code": "webrtc_get_client_config_failed", - "message": "Camera does not support WebRTC, frontend_stream_type=hls", - } - - -async def provide_webrtc_answer(stream_source: str, offer: str, stream_id: str) -> str: - """Simulate an rtsp to webrtc provider.""" - assert stream_source == STREAM_SOURCE - assert offer == WEBRTC_OFFER - return WEBRTC_ANSWER - - -@pytest.fixture(name="mock_rtsp_to_webrtc") -def mock_rtsp_to_webrtc_fixture(hass: HomeAssistant) -> Generator[Mock]: - """Fixture that registers a mock rtsp to webrtc provider.""" - mock_provider = Mock(side_effect=provide_webrtc_answer) - unsub = async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", mock_provider) - yield mock_provider - unsub() - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -async def test_websocket_webrtc_offer( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test initiating a WebRTC stream with offer and answer.""" - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.async", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "answer", - "answer": WEBRTC_ANSWER, - } - - # Unsubscribe/Close session - await client.send_json_auto_id( - { - "type": "unsubscribe_events", - "subscription": subscription_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - - -@pytest.mark.parametrize( - ("message", "expected_frontend_message"), - [ - ( - WebRTCCandidate(RTCIceCandidate("candidate")), - {"type": "candidate", "candidate": "candidate"}, - ), - ( - WebRTCError("webrtc_offer_failed", "error"), - {"type": "error", "code": "webrtc_offer_failed", "message": "error"}, - ), - (WebRTCAnswer("answer"), {"type": "answer", "answer": "answer"}), - ], - ids=["candidate", "error", "answer"], -) -@pytest.mark.usefixtures("mock_stream_source", "mock_camera") -async def test_websocket_webrtc_offer_webrtc_provider( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - register_test_provider: SomeTestProvider, - message: WebRTCMessage, - expected_frontend_message: dict[str, Any], -) -> None: - """Test initiating a WebRTC stream with a webrtc provider.""" - client = await hass_ws_client(hass) - with ( - patch.object( - register_test_provider, "async_handle_async_webrtc_offer", autospec=True - ) as mock_async_handle_async_webrtc_offer, - patch.object( - register_test_provider, "async_close_session", autospec=True - ) as mock_async_close_session, - ): - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - mock_async_handle_async_webrtc_offer.assert_called_once() - assert mock_async_handle_async_webrtc_offer.call_args[0][1] == WEBRTC_OFFER - send_message: WebRTCSendMessage = ( - mock_async_handle_async_webrtc_offer.call_args[0][3] - ) - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - session_id = response["event"]["session_id"] - - send_message(message) - - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == expected_frontend_message - - # Unsubscribe/Close session - await client.send_json_auto_id( - { - "type": "unsubscribe_events", - "subscription": subscription_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - mock_async_close_session.assert_called_once_with(session_id) - - -async def test_websocket_webrtc_offer_invalid_entity( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC with a camera entity that does not exist.""" - await async_setup_component(hass, "camera", {}) - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.does_not_exist", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"] == { - "code": "home_assistant_error", - "message": "Camera not found", - } - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -async def test_websocket_webrtc_offer_missing_offer( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC stream with missing required fields.""" - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.demo_camera", - } - ) - response = await client.receive_json() - - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"]["code"] == "invalid_format" - - -@pytest.mark.parametrize( - ("error", "expected_message"), - [ - (ValueError("value error"), "value error"), - (HomeAssistantError("offer failed"), "offer failed"), - (TimeoutError(), "Timeout handling WebRTC offer"), - ], -) -async def test_websocket_webrtc_offer_failure( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - init_test_integration: MockCamera, - error: Exception, - expected_message: str, -) -> None: - """Test WebRTC stream that fails handling the offer.""" - client = await hass_ws_client(hass) - init_test_integration.set_sync_answer(error) - - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.test", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Error - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "error", - "code": "webrtc_offer_failed", - "message": expected_message, - } - - -async def test_websocket_webrtc_offer_sync( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - init_test_integration: MockCamera, -) -> None: - """Test sync WebRTC stream offer.""" - client = await hass_ws_client(hass) - init_test_integration.set_sync_answer(WEBRTC_ANSWER) - - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.test", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == {"type": "answer", "answer": WEBRTC_ANSWER} - - -async def test_websocket_webrtc_offer_sync_no_answer( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - caplog: pytest.LogCaptureFixture, - init_test_integration: MockCamera, -) -> None: - """Test sync WebRTC stream offer with no answer.""" - client = await hass_ws_client(hass) - init_test_integration.set_sync_answer(None) - - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.test", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "error", - "code": "webrtc_offer_failed", - "message": "No answer on WebRTC offer", - } - assert ( - "homeassistant.components.camera", - logging.ERROR, - "Error handling WebRTC offer: No answer", - ) in caplog.record_tuples - - -@pytest.mark.usefixtures("mock_camera") -async def test_websocket_webrtc_offer_invalid_stream_type( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC initiating for a camera with a different stream_type.""" - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"] == { - "code": "webrtc_offer_failed", - "message": "Camera does not support WebRTC, frontend_stream_type=hls", - } - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_rtsp_to_webrtc_offer( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - mock_rtsp_to_webrtc: Mock, -) -> None: - """Test creating a webrtc offer from an rstp provider.""" - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "answer", - "answer": WEBRTC_ANSWER, - } - - assert mock_rtsp_to_webrtc.called - - -@pytest.fixture(name="mock_hls_stream_source") -async def mock_hls_stream_source_fixture() -> AsyncGenerator[AsyncMock]: - """Fixture to create an HLS stream source.""" - with patch( - "homeassistant.components.camera.Camera.stream_source", - return_value=HLS_STREAM_SOURCE, - ) as mock_hls_stream_source: - yield mock_hls_stream_source - - -@pytest.mark.usefixtures( - "mock_camera", - "mock_hls_stream_source", # Not an RTSP stream source - "mock_camera_webrtc_frontendtype_only", -) -async def test_unsupported_rtsp_to_webrtc_stream_type( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test rtsp-to-webrtc is not registered for non-RTSP streams.""" - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "error", - "code": "webrtc_offer_failed", - "message": "Camera does not support WebRTC", - } - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_rtsp_to_webrtc_provider_unregistered( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test creating a webrtc offer from an rstp provider.""" - mock_provider = Mock(side_effect=provide_webrtc_answer) - unsub = async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", mock_provider) - - client = await hass_ws_client(hass) - - # Registered provider can handle the WebRTC offer - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "answer", - "answer": WEBRTC_ANSWER, - } - - assert mock_provider.called - mock_provider.reset_mock() - - # Unregister provider, then verify the WebRTC offer cannot be handled - unsub() - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response.get("type") == TYPE_RESULT - assert not response["success"] - assert response["error"] == { - "code": "webrtc_offer_failed", - "message": "Camera does not support WebRTC, frontend_stream_type=hls", - } - - assert not mock_provider.called - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_rtsp_to_webrtc_offer_not_accepted( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test a provider that can't satisfy the rtsp to webrtc offer.""" - - async def provide_none( - stream_source: str, offer: str, stream_id: str - ) -> str | None: - """Simulate a provider that can't accept the offer.""" - return None - - mock_provider = Mock(side_effect=provide_none) - unsub = async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", mock_provider) - client = await hass_ws_client(hass) - - # Registered provider can handle the WebRTC offer - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "error", - "code": "webrtc_offer_failed", - "message": "Camera does not support WebRTC", - } - - assert mock_provider.called - - unsub() - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -async def test_ws_webrtc_candidate( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test ws webrtc candidate command.""" - client = await hass_ws_client(hass) - session_id = "session_id" - candidate = "candidate" - with patch.object( - get_camera_from_entity_id(hass, "camera.async"), "async_on_webrtc_candidate" - ) as mock_on_webrtc_candidate: - await client.send_json_auto_id( - { - "type": "camera/webrtc/candidate", - "entity_id": "camera.async", - "session_id": session_id, - "candidate": candidate, - } - ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - mock_on_webrtc_candidate.assert_called_once_with( - session_id, RTCIceCandidate(candidate) - ) - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -async def test_ws_webrtc_candidate_not_supported( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test ws webrtc candidate command is raising if not supported.""" - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/candidate", - "entity_id": "camera.sync", - "session_id": "session_id", - "candidate": "candidate", - } - ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"] == { - "code": "home_assistant_error", - "message": "Cannot handle WebRTC candidate", - } - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_ws_webrtc_candidate_webrtc_provider( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - register_test_provider: SomeTestProvider, -) -> None: - """Test ws webrtc candidate command with WebRTC provider.""" - with patch.object( - register_test_provider, "async_on_webrtc_candidate" - ) as mock_on_webrtc_candidate: - client = await hass_ws_client(hass) - session_id = "session_id" - candidate = "candidate" - await client.send_json_auto_id( - { - "type": "camera/webrtc/candidate", - "entity_id": "camera.demo_camera", - "session_id": session_id, - "candidate": candidate, - } - ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - mock_on_webrtc_candidate.assert_called_once_with( - session_id, RTCIceCandidate(candidate) - ) - - -async def test_ws_webrtc_candidate_invalid_entity( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test ws WebRTC candidate command with a camera entity that does not exist.""" - await async_setup_component(hass, "camera", {}) - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/candidate", - "entity_id": "camera.does_not_exist", - "session_id": "session_id", - "candidate": "candidate", - } - ) - response = await client.receive_json() - - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"] == { - "code": "home_assistant_error", - "message": "Camera not found", - } - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -async def test_ws_webrtc_canidate_missing_candidate( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test ws WebRTC candidate command with missing required fields.""" - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/candidate", - "entity_id": "camera.async", - "session_id": "session_id", - } - ) - response = await client.receive_json() - - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"]["code"] == "invalid_format" - - -@pytest.mark.usefixtures("mock_camera") -async def test_ws_webrtc_candidate_invalid_stream_type( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test ws WebRTC candidate command for a camera with a different stream_type.""" - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/candidate", - "entity_id": "camera.demo_camera", - "session_id": "session_id", - "candidate": "candidate", - } - ) - response = await client.receive_json() - - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"] == { - "code": "webrtc_candidate_failed", - "message": "Camera does not support WebRTC, frontend_stream_type=hls", - } - - -async def test_webrtc_provider_optional_interface(hass: HomeAssistant) -> None: - """Test optional interface for WebRTC provider.""" - - class OnlyRequiredInterfaceProvider(CameraWebRTCProvider): - """Test provider.""" - - @property - def domain(self) -> str: - """Return the domain of the provider.""" - return "test" - - @callback - def async_is_supported(self, stream_source: str) -> bool: - """Determine if the provider supports the stream source.""" - return True - - async def async_handle_async_webrtc_offer( - self, - camera: Camera, - offer_sdp: str, - session_id: str, - send_message: WebRTCSendMessage, - ) -> None: - """Handle the WebRTC offer and return the answer via the provided callback. - - Return value determines if the offer was handled successfully. - """ - send_message(WebRTCAnswer(answer="answer")) - - async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate - ) -> None: - """Handle the WebRTC candidate.""" - - provider = OnlyRequiredInterfaceProvider() - # Call all interface methods - assert provider.async_is_supported("stream_source") is True - await provider.async_handle_async_webrtc_offer( - Mock(), "offer_sdp", "session_id", Mock() - ) - await provider.async_on_webrtc_candidate("session_id", RTCIceCandidate("candidate")) - provider.async_close_session("session_id") - - -@pytest.mark.usefixtures("mock_camera") -async def test_repair_issue_legacy_provider( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test repair issue created for legacy provider.""" - # Ensure no issue if no provider is registered - assert not issue_registry.async_get_issue( - "camera", "legacy_webrtc_provider_mock_domain" - ) - - # Register a legacy provider - legacy_provider = Mock(side_effect=provide_webrtc_answer) - unsub_legacy_provider = async_register_rtsp_to_web_rtc_provider( - hass, "mock_domain", legacy_provider - ) - await hass.async_block_till_done() - - # Ensure no issue if only legacy provider is registered - assert not issue_registry.async_get_issue( - "camera", "legacy_webrtc_provider_mock_domain" - ) - - provider = Go2RTCProvider() - unsub_go2rtc_provider = async_register_webrtc_provider(hass, provider) - await hass.async_block_till_done() - - # Ensure issue when legacy and builtin provider are registered - issue = issue_registry.async_get_issue( - "camera", "legacy_webrtc_provider_mock_domain" - ) - assert issue - assert issue.is_fixable is False - assert issue.is_persistent is False - assert issue.issue_domain == "mock_domain" - assert issue.learn_more_url == "https://www.home-assistant.io/integrations/go2rtc/" - assert issue.severity == ir.IssueSeverity.WARNING - assert issue.issue_id == "legacy_webrtc_provider_mock_domain" - assert issue.translation_key == "legacy_webrtc_provider" - assert issue.translation_placeholders == { - "legacy_integration": "mock_domain", - "builtin_integration": "go2rtc", - } - - unsub_legacy_provider() - unsub_go2rtc_provider() - - -@pytest.mark.usefixtures("mock_camera", "register_test_provider", "mock_rtsp_to_webrtc") -async def test_no_repair_issue_without_new_provider( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test repair issue not created if no go2rtc provider exists.""" - assert not issue_registry.async_get_issue( - "camera", "legacy_webrtc_provider_mock_domain" - ) - - -@pytest.mark.usefixtures("mock_camera", "mock_rtsp_to_webrtc") -async def test_registering_same_legacy_provider( - hass: HomeAssistant, -) -> None: - """Test registering the same legacy provider twice.""" - legacy_provider = Mock(side_effect=provide_webrtc_answer) - with pytest.raises(ValueError, match="Provider already registered"): - async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", legacy_provider) - - -@pytest.mark.usefixtures("mock_hls_stream_source", "mock_camera", "mock_rtsp_to_webrtc") -async def test_get_not_supported_legacy_provider(hass: HomeAssistant) -> None: - """Test getting a not supported legacy provider.""" - camera = get_camera_from_entity_id(hass, "camera.demo_camera") - assert await async_get_supported_legacy_provider(hass, camera) is None diff --git a/tests/components/canary/conftest.py b/tests/components/canary/conftest.py index 07a3ce89495..583986fd483 100644 --- a/tests/components/canary/conftest.py +++ b/tests/components/canary/conftest.py @@ -1,10 +1,10 @@ """Define fixtures available for all tests.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch from canary.api import Api import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/canary/test_alarm_control_panel.py b/tests/components/canary/test_alarm_control_panel.py index a194621b0d9..83e801d67c4 100644 --- a/tests/components/canary/test_alarm_control_panel.py +++ b/tests/components/canary/test_alarm_control_panel.py @@ -4,16 +4,17 @@ from unittest.mock import PropertyMock, patch from canary.const import LOCATION_MODE_AWAY, LOCATION_MODE_HOME, LOCATION_MODE_NIGHT -from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_DOMAIN, - AlarmControlPanelState, -) +from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN from homeassistant.components.canary import DOMAIN from homeassistant.const import ( SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_DISARMED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -66,7 +67,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == AlarmControlPanelState.DISARMED + assert state.state == STATE_ALARM_DISARMED assert state.attributes["private"] type(mocked_location).is_private = PropertyMock(return_value=False) @@ -81,7 +82,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == AlarmControlPanelState.ARMED_HOME + assert state.state == STATE_ALARM_ARMED_HOME # test armed away type(mocked_location).mode = PropertyMock( @@ -93,7 +94,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMED_AWAY # test armed night type(mocked_location).mode = PropertyMock( @@ -105,7 +106,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == AlarmControlPanelState.ARMED_NIGHT + assert state.state == STATE_ALARM_ARMED_NIGHT async def test_alarm_control_panel_services(hass: HomeAssistant, canary) -> None: diff --git a/tests/components/cast/test_config_flow.py b/tests/components/cast/test_config_flow.py index 2dcf007c6d4..2c0c36d6632 100644 --- a/tests/components/cast/test_config_flow.py +++ b/tests/components/cast/test_config_flow.py @@ -148,7 +148,6 @@ def get_suggested(schema, key): if k.description is None or "suggested_value" not in k.description: return None return k.description["suggested_value"] - return None @pytest.mark.parametrize( @@ -250,7 +249,7 @@ async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: user_input=user_input_dict, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {} + assert result["data"] is None for other_param in advanced_parameters: if other_param == parameter: continue @@ -264,7 +263,7 @@ async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: user_input={"known_hosts": ""}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {} + assert result["data"] is None expected_data = {**orig_data, "known_hosts": []} if parameter in advanced_parameters: expected_data[parameter] = updated diff --git a/tests/components/cast/test_home_assistant_cast.py b/tests/components/cast/test_home_assistant_cast.py index 2fc348fd008..c9e311bb024 100644 --- a/tests/components/cast/test_home_assistant_cast.py +++ b/tests/components/cast/test_home_assistant_cast.py @@ -5,8 +5,8 @@ from unittest.mock import patch import pytest from homeassistant.components.cast import DOMAIN, home_assistant_cast +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry, async_mock_signal diff --git a/tests/components/cast/test_media_player.py b/tests/components/cast/test_media_player.py index b2ce60e9393..1d99adb4723 100644 --- a/tests/components/cast/test_media_player.py +++ b/tests/components/cast/test_media_player.py @@ -3,9 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import Callable import json -from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch from uuid import UUID @@ -27,13 +25,13 @@ from homeassistant.components.media_player import ( MediaClass, MediaPlayerEntityFeature, ) +from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ENTITY_ID, CAST_APP_ID_HOMEASSISTANT_LOVELACE, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er, network from homeassistant.helpers.dispatcher import ( @@ -114,9 +112,7 @@ def get_fake_zconf(host="192.168.178.42", port=8009): return zconf -async def async_setup_cast( - hass: HomeAssistant, config: dict[str, Any] | None = None -) -> MagicMock: +async def async_setup_cast(hass, config=None): """Set up the cast platform.""" if config is None: config = {} @@ -132,20 +128,7 @@ async def async_setup_cast( return add_entities -async def async_setup_cast_internal_discovery( - hass: HomeAssistant, config: dict[str, Any] | None = None -) -> tuple[ - Callable[ - [ - pychromecast.discovery.HostServiceInfo - | pychromecast.discovery.MDNSServiceInfo, - ChromecastInfo, - ], - None, - ], - Callable[[str, ChromecastInfo], None], - MagicMock, -]: +async def async_setup_cast_internal_discovery(hass, config=None): """Set up the cast platform and the discovery.""" browser = MagicMock(devices={}, zc={}) diff --git a/tests/components/ccm15/conftest.py b/tests/components/ccm15/conftest.py index e393b2679b6..d6cc66d77dc 100644 --- a/tests/components/ccm15/conftest.py +++ b/tests/components/ccm15/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Midea ccm15 AC Controller tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch from ccm15 import CCM15DeviceState, CCM15SlaveDevice import pytest +from typing_extensions import Generator @pytest.fixture @@ -17,7 +17,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def ccm15_device() -> Generator[None]: +def ccm15_device() -> Generator[AsyncMock]: """Mock ccm15 device.""" ccm15_devices = { 0: CCM15SlaveDevice(bytes.fromhex("000000b0b8001b")), @@ -32,7 +32,7 @@ def ccm15_device() -> Generator[None]: @pytest.fixture -def network_failure_ccm15_device() -> Generator[None]: +def network_failure_ccm15_device() -> Generator[AsyncMock]: """Mock empty set of ccm15 device.""" device_state = CCM15DeviceState(devices={}) with patch( diff --git a/tests/components/ccm15/test_climate.py b/tests/components/ccm15/test_climate.py index 785cb17c6a9..329caafd11c 100644 --- a/tests/components/ccm15/test_climate.py +++ b/tests/components/ccm15/test_climate.py @@ -1,11 +1,10 @@ """Unit test for CCM15 coordinator component.""" from datetime import timedelta -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from ccm15 import CCM15DeviceState from freezegun.api import FrozenDateTimeFactory -import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.ccm15.const import DOMAIN @@ -28,11 +27,11 @@ from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, async_fire_time_changed -@pytest.mark.usefixtures("ccm15_device") async def test_climate_state( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + ccm15_device: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test the coordinator.""" diff --git a/tests/components/ccm15/test_diagnostics.py b/tests/components/ccm15/test_diagnostics.py index f6f0d75c4e3..a433591d86e 100644 --- a/tests/components/ccm15/test_diagnostics.py +++ b/tests/components/ccm15/test_diagnostics.py @@ -1,6 +1,7 @@ """Test CCM15 diagnostics.""" -import pytest +from unittest.mock import AsyncMock + from syrupy import SnapshotAssertion from homeassistant.components.ccm15.const import DOMAIN @@ -12,10 +13,10 @@ from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator -@pytest.mark.usefixtures("ccm15_device") async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, + ccm15_device: AsyncMock, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" diff --git a/tests/components/ccm15/test_init.py b/tests/components/ccm15/test_init.py index 0fb75920ad3..3069b61f10f 100644 --- a/tests/components/ccm15/test_init.py +++ b/tests/components/ccm15/test_init.py @@ -1,6 +1,6 @@ """Tests for the ccm15 component.""" -import pytest +from unittest.mock import AsyncMock from homeassistant.components.ccm15.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -10,8 +10,7 @@ from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -@pytest.mark.usefixtures("ccm15_device") -async def test_load_unload(hass: HomeAssistant) -> None: +async def test_load_unload(hass: HomeAssistant, ccm15_device: AsyncMock) -> None: """Test options flow.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/cert_expiry/conftest.py b/tests/components/cert_expiry/conftest.py index 4932e9e1869..2a86c669970 100644 --- a/tests/components/cert_expiry/conftest.py +++ b/tests/components/cert_expiry/conftest.py @@ -1,9 +1,9 @@ """Configuration for cert_expiry tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/chacon_dio/__init__.py b/tests/components/chacon_dio/__init__.py deleted file mode 100644 index 2a340097eb2..00000000000 --- a/tests/components/chacon_dio/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the Chacon Dio integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/chacon_dio/conftest.py b/tests/components/chacon_dio/conftest.py deleted file mode 100644 index 186bc468bee..00000000000 --- a/tests/components/chacon_dio/conftest.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Common fixtures for the chacon_dio tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.chacon_dio.const import DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME - -from tests.common import MockConfigEntry - -MOCK_COVER_DEVICE = { - "L4HActuator_idmock1": { - "id": "L4HActuator_idmock1", - "name": "Shutter mock 1", - "type": "SHUTTER", - "model": "CERSwd-3B_1.0.6", - "connected": True, - "openlevel": 75, - "movement": "stop", - } -} - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.chacon_dio.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock the config entry.""" - return MockConfigEntry( - domain=DOMAIN, - unique_id="test_entry_unique_id", - data={ - CONF_USERNAME: "dummylogin", - CONF_PASSWORD: "dummypass", - }, - ) - - -@pytest.fixture -def mock_dio_chacon_client() -> Generator[AsyncMock]: - """Mock a Dio Chacon client.""" - - with ( - patch( - "homeassistant.components.chacon_dio.DIOChaconAPIClient", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.chacon_dio.config_flow.DIOChaconAPIClient", - new=mock_client, - ), - ): - client = mock_client.return_value - - # Default values for the tests using this mock : - client.get_user_id.return_value = "dummy-user-id" - client.search_all_devices.return_value = MOCK_COVER_DEVICE - - client.switch_switch.return_value = {} - - client.move_shutter_direction.return_value = {} - client.disconnect.return_value = {} - - yield client diff --git a/tests/components/chacon_dio/snapshots/test_cover.ambr b/tests/components/chacon_dio/snapshots/test_cover.ambr deleted file mode 100644 index b2febe20070..00000000000 --- a/tests/components/chacon_dio/snapshots/test_cover.ambr +++ /dev/null @@ -1,50 +0,0 @@ -# serializer version: 1 -# name: test_entities[cover.shutter_mock_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.shutter_mock_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'chacon_dio', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'L4HActuator_idmock1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[cover.shutter_mock_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_position': 75, - 'device_class': 'shutter', - 'friendly_name': 'Shutter mock 1', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.shutter_mock_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- diff --git a/tests/components/chacon_dio/snapshots/test_switch.ambr b/tests/components/chacon_dio/snapshots/test_switch.ambr deleted file mode 100644 index 7a65dad5445..00000000000 --- a/tests/components/chacon_dio/snapshots/test_switch.ambr +++ /dev/null @@ -1,48 +0,0 @@ -# serializer version: 1 -# name: test_entities[switch.switch_mock_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.switch_mock_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'chacon_dio', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'L4HActuator_idmock1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[switch.switch_mock_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Switch mock 1', - }), - 'context': , - 'entity_id': 'switch.switch_mock_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/chacon_dio/test_config_flow.py b/tests/components/chacon_dio/test_config_flow.py deleted file mode 100644 index d72b5a7dec3..00000000000 --- a/tests/components/chacon_dio/test_config_flow.py +++ /dev/null @@ -1,122 +0,0 @@ -"""Test the chacon_dio config flow.""" - -from unittest.mock import AsyncMock - -from dio_chacon_wifi_api.exceptions import DIOChaconAPIError, DIOChaconInvalidAuthError -import pytest - -from homeassistant.components.chacon_dio.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_full_flow( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_dio_chacon_client: AsyncMock -) -> None: - """Test the full flow.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert not result["errors"] - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - CONF_USERNAME: "dummylogin", - CONF_PASSWORD: "dummypass", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Chacon DiO dummylogin" - assert result["result"].unique_id == "dummy-user-id" - assert result["data"] == { - CONF_USERNAME: "dummylogin", - CONF_PASSWORD: "dummypass", - } - - -@pytest.mark.parametrize( - ("exception", "expected"), - [ - (Exception("Bad request Boy :) --"), {"base": "unknown"}), - (DIOChaconInvalidAuthError, {"base": "invalid_auth"}), - (DIOChaconAPIError, {"base": "cannot_connect"}), - ], -) -async def test_errors( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_dio_chacon_client: AsyncMock, - exception: Exception, - expected: dict[str, str], -) -> None: - """Test we handle any error.""" - mock_dio_chacon_client.get_user_id.side_effect = exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - CONF_USERNAME: "nada", - CONF_PASSWORD: "nadap", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == expected - - # Test of recover in normal state after correction of the 1st error - mock_dio_chacon_client.get_user_id.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "dummylogin", - CONF_PASSWORD: "dummypass", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Chacon DiO dummylogin" - assert result["result"].unique_id == "dummy-user-id" - assert result["data"] == { - CONF_USERNAME: "dummylogin", - CONF_PASSWORD: "dummypass", - } - - -async def test_duplicate_entry( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test abort when setting up duplicate entry.""" - - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert not result["errors"] - - mock_dio_chacon_client.get_user_id.return_value = "test_entry_unique_id" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "dummylogin", - CONF_PASSWORD: "dummypass", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/chacon_dio/test_cover.py b/tests/components/chacon_dio/test_cover.py deleted file mode 100644 index 9e9f403ed0b..00000000000 --- a/tests/components/chacon_dio/test_cover.py +++ /dev/null @@ -1,189 +0,0 @@ -"""Test the Chacon Dio cover.""" - -from collections.abc import Callable -from unittest.mock import AsyncMock - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.cover import ( - ATTR_CURRENT_POSITION, - ATTR_POSITION, - DOMAIN as COVER_DOMAIN, - SERVICE_CLOSE_COVER, - SERVICE_OPEN_COVER, - SERVICE_SET_COVER_POSITION, - SERVICE_STOP_COVER, - CoverState, -) -from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - -COVER_ENTITY_ID = "cover.shutter_mock_1" - - -async def test_entities( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the creation and values of the Chacon Dio covers.""" - - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_update( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the creation and values of the Chacon Dio covers.""" - - await setup_integration(hass, mock_config_entry) - - mock_dio_chacon_client.get_status_details.return_value = { - "L4HActuator_idmock1": { - "id": "L4HActuator_idmock1", - "connected": True, - "openlevel": 51, - "movement": "stop", - } - } - - await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {}) - await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: COVER_ENTITY_ID}, - blocking=True, - ) - - state = hass.states.get(COVER_ENTITY_ID) - assert state - assert state.attributes.get(ATTR_CURRENT_POSITION) == 51 - assert state.state == CoverState.OPEN - - -async def test_cover_actions( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the creation and values of the Chacon Dio covers.""" - - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_ENTITY_ID}, - blocking=True, - ) - await hass.async_block_till_done() - state = hass.states.get(COVER_ENTITY_ID) - assert state.state == CoverState.CLOSING - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_ENTITY_ID}, - blocking=True, - ) - await hass.async_block_till_done() - state = hass.states.get(COVER_ENTITY_ID) - assert state.state == CoverState.OPEN - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_ENTITY_ID}, - blocking=True, - ) - await hass.async_block_till_done() - state = hass.states.get(COVER_ENTITY_ID) - assert state.state == CoverState.OPENING - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_SET_COVER_POSITION, - {ATTR_POSITION: 25, ATTR_ENTITY_ID: COVER_ENTITY_ID}, - blocking=True, - ) - await hass.async_block_till_done() - state = hass.states.get(COVER_ENTITY_ID) - assert state.state == CoverState.OPENING - - -async def test_cover_callbacks( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the callbacks on the Chacon Dio covers.""" - - await setup_integration(hass, mock_config_entry) - - # Server side callback tests - # We find the callback method on the mock client - callback_device_state_function: Callable = ( - mock_dio_chacon_client.set_callback_device_state_by_device.call_args[0][1] - ) - - # Define a method to simply call it - async def _callback_device_state_function(open_level: int, movement: str) -> None: - callback_device_state_function( - { - "id": "L4HActuator_idmock1", - "connected": True, - "openlevel": open_level, - "movement": movement, - } - ) - await hass.async_block_till_done() - - # And call it to effectively launch the callback as the server would do - await _callback_device_state_function(79, "stop") - state = hass.states.get(COVER_ENTITY_ID) - assert state - assert state.attributes.get(ATTR_CURRENT_POSITION) == 79 - assert state.state == CoverState.OPEN - - await _callback_device_state_function(90, "up") - state = hass.states.get(COVER_ENTITY_ID) - assert state - assert state.attributes.get(ATTR_CURRENT_POSITION) == 90 - assert state.state == CoverState.OPENING - - await _callback_device_state_function(60, "down") - state = hass.states.get(COVER_ENTITY_ID) - assert state - assert state.attributes.get(ATTR_CURRENT_POSITION) == 60 - assert state.state == CoverState.CLOSING - - -async def test_no_cover_found( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the cover absence.""" - - mock_dio_chacon_client.search_all_devices.return_value = None - - await setup_integration(hass, mock_config_entry) - - assert not hass.states.get(COVER_ENTITY_ID) diff --git a/tests/components/chacon_dio/test_init.py b/tests/components/chacon_dio/test_init.py deleted file mode 100644 index 78f1a85c71a..00000000000 --- a/tests/components/chacon_dio/test_init.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Test the Dio Chacon Cover init.""" - -from unittest.mock import AsyncMock - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import EVENT_HOMEASSISTANT_STOP -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_cover_unload_entry( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the creation and values of the Dio Chacon covers.""" - - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED - mock_dio_chacon_client.disconnect.assert_called() - - -async def test_cover_shutdown_event( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the creation and values of the Dio Chacon covers.""" - - await setup_integration(hass, mock_config_entry) - - hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) - await hass.async_block_till_done() - mock_dio_chacon_client.disconnect.assert_called() diff --git a/tests/components/chacon_dio/test_switch.py b/tests/components/chacon_dio/test_switch.py deleted file mode 100644 index a5ad0d0ea13..00000000000 --- a/tests/components/chacon_dio/test_switch.py +++ /dev/null @@ -1,132 +0,0 @@ -"""Test the Chacon Dio switch.""" - -from collections.abc import Callable -from unittest.mock import AsyncMock - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.switch import ( - DOMAIN as SWITCH_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - -SWITCH_ENTITY_ID = "switch.switch_mock_1" - -MOCK_SWITCH_DEVICE = { - "L4HActuator_idmock1": { - "id": "L4HActuator_idmock1", - "name": "Switch mock 1", - "type": "SWITCH_LIGHT", - "model": "CERNwd-3B_1.0.6", - "connected": True, - "is_on": True, - } -} - - -async def test_entities( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the creation and values of the Chacon Dio switches.""" - - mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE - - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_switch_actions( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the actions on the Chacon Dio switch.""" - - mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE - - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, - blocking=True, - ) - state = hass.states.get(SWITCH_ENTITY_ID) - assert state.state == STATE_ON - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, - blocking=True, - ) - state = hass.states.get(SWITCH_ENTITY_ID) - # turn off does not change directly the state, it is made by a server side callback. - assert state.state == STATE_ON - - -async def test_switch_callbacks( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the callbacks on the Chacon Dio switches.""" - - mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE - - await setup_integration(hass, mock_config_entry) - - # Server side callback tests - # We find the callback method on the mock client - callback_device_state_function: Callable = ( - mock_dio_chacon_client.set_callback_device_state_by_device.call_args[0][1] - ) - - # Define a method to simply call it - async def _callback_device_state_function(is_on: bool) -> None: - callback_device_state_function( - { - "id": "L4HActuator_idmock1", - "connected": True, - "is_on": is_on, - } - ) - await hass.async_block_till_done() - - # And call it to effectively launch the callback as the server would do - await _callback_device_state_function(False) - state = hass.states.get(SWITCH_ENTITY_ID) - assert state - assert state.state == STATE_OFF - - -async def test_no_switch_found( - hass: HomeAssistant, - mock_dio_chacon_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the switch absence.""" - - mock_dio_chacon_client.search_all_devices.return_value = None - - await setup_integration(hass, mock_config_entry) - - assert not hass.states.async_entity_ids(SWITCH_DOMAIN) diff --git a/tests/components/clicksend_tts/test_notify.py b/tests/components/clicksend_tts/test_notify.py index 892d7541354..e73f0576d9e 100644 --- a/tests/components/clicksend_tts/test_notify.py +++ b/tests/components/clicksend_tts/test_notify.py @@ -46,7 +46,7 @@ def mock_clicksend_tts_notify(): yield ns -async def setup_notify(hass: HomeAssistant) -> None: +async def setup_notify(hass): """Test setup.""" with assert_setup_component(1, notify.DOMAIN) as config: assert await async_setup_component(hass, notify.DOMAIN, CONFIG) diff --git a/tests/components/climate/common.py b/tests/components/climate/common.py index d6aedd23671..c890d3a7bb5 100644 --- a/tests/components/climate/common.py +++ b/tests/components/climate/common.py @@ -23,7 +23,6 @@ from homeassistant.components.climate import ( SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, ) -from homeassistant.components.climate.const import HVACMode from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, @@ -31,13 +30,10 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass -async def async_set_preset_mode( - hass: HomeAssistant, preset_mode: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL): """Set new preset mode.""" data = {ATTR_PRESET_MODE: preset_mode} @@ -48,9 +44,7 @@ async def async_set_preset_mode( @bind_hass -def set_preset_mode( - hass: HomeAssistant, preset_mode: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL): """Set new preset mode.""" data = {ATTR_PRESET_MODE: preset_mode} @@ -60,9 +54,7 @@ def set_preset_mode( hass.services.call(DOMAIN, SERVICE_SET_PRESET_MODE, data) -async def async_set_aux_heat( - hass: HomeAssistant, aux_heat: bool, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL): """Turn all or specified climate devices auxiliary heater on.""" data = {ATTR_AUX_HEAT: aux_heat} @@ -73,9 +65,7 @@ async def async_set_aux_heat( @bind_hass -def set_aux_heat( - hass: HomeAssistant, aux_heat: bool, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL): """Turn all or specified climate devices auxiliary heater on.""" data = {ATTR_AUX_HEAT: aux_heat} @@ -86,13 +76,13 @@ def set_aux_heat( async def async_set_temperature( - hass: HomeAssistant, - temperature: float | None = None, - entity_id: str = ENTITY_MATCH_ALL, - target_temp_high: float | None = None, - target_temp_low: float | None = None, - hvac_mode: HVACMode | None = None, -) -> None: + hass, + temperature=None, + entity_id=ENTITY_MATCH_ALL, + target_temp_high=None, + target_temp_low=None, + hvac_mode=None, +): """Set new target temperature.""" kwargs = { key: value @@ -113,13 +103,13 @@ async def async_set_temperature( @bind_hass def set_temperature( - hass: HomeAssistant, - temperature: float | None = None, - entity_id: str = ENTITY_MATCH_ALL, - target_temp_high: float | None = None, - target_temp_low: float | None = None, - hvac_mode: HVACMode | None = None, -) -> None: + hass, + temperature=None, + entity_id=ENTITY_MATCH_ALL, + target_temp_high=None, + target_temp_low=None, + hvac_mode=None, +): """Set new target temperature.""" kwargs = { key: value @@ -136,9 +126,7 @@ def set_temperature( hass.services.call(DOMAIN, SERVICE_SET_TEMPERATURE, kwargs) -async def async_set_humidity( - hass: HomeAssistant, humidity: int, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL): """Set new target humidity.""" data = {ATTR_HUMIDITY: humidity} @@ -149,9 +137,7 @@ async def async_set_humidity( @bind_hass -def set_humidity( - hass: HomeAssistant, humidity: int, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL): """Set new target humidity.""" data = {ATTR_HUMIDITY: humidity} @@ -161,9 +147,7 @@ def set_humidity( hass.services.call(DOMAIN, SERVICE_SET_HUMIDITY, data) -async def async_set_fan_mode( - hass: HomeAssistant, fan: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL): """Set all or specified climate devices fan mode on.""" data = {ATTR_FAN_MODE: fan} @@ -174,9 +158,7 @@ async def async_set_fan_mode( @bind_hass -def set_fan_mode( - hass: HomeAssistant, fan: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL): """Set all or specified climate devices fan mode on.""" data = {ATTR_FAN_MODE: fan} @@ -186,9 +168,7 @@ def set_fan_mode( hass.services.call(DOMAIN, SERVICE_SET_FAN_MODE, data) -async def async_set_hvac_mode( - hass: HomeAssistant, hvac_mode: HVACMode, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_set_hvac_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL): """Set new target operation mode.""" data = {ATTR_HVAC_MODE: hvac_mode} @@ -199,9 +179,7 @@ async def async_set_hvac_mode( @bind_hass -def set_operation_mode( - hass: HomeAssistant, hvac_mode: HVACMode, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def set_operation_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL): """Set new target operation mode.""" data = {ATTR_HVAC_MODE: hvac_mode} @@ -211,9 +189,7 @@ def set_operation_mode( hass.services.call(DOMAIN, SERVICE_SET_HVAC_MODE, data) -async def async_set_swing_mode( - hass: HomeAssistant, swing_mode: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL): """Set new target swing mode.""" data = {ATTR_SWING_MODE: swing_mode} @@ -224,9 +200,7 @@ async def async_set_swing_mode( @bind_hass -def set_swing_mode( - hass: HomeAssistant, swing_mode: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL): """Set new target swing mode.""" data = {ATTR_SWING_MODE: swing_mode} @@ -236,7 +210,7 @@ def set_swing_mode( hass.services.call(DOMAIN, SERVICE_SET_SWING_MODE, data) -async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): """Turn on device.""" data = {} @@ -246,9 +220,7 @@ async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) -async def async_turn_off( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): """Turn off device.""" data = {} diff --git a/tests/components/climate/conftest.py b/tests/components/climate/conftest.py index 4ade8606e77..a3a6af6e8a3 100644 --- a/tests/components/climate/conftest.py +++ b/tests/components/climate/conftest.py @@ -1,21 +1,12 @@ """Fixtures for Climate platform tests.""" -from collections.abc import Generator - import pytest +from typing_extensions import Generator -from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN -from homeassistant.config_entries import ConfigEntry, ConfigFlow -from homeassistant.const import Platform +from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant -from tests.common import ( - MockConfigEntry, - MockModule, - mock_config_flow, - mock_integration, - mock_platform, -) +from tests.common import mock_config_flow, mock_platform class MockFlow(ConfigFlow): @@ -29,41 +20,3 @@ def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: with mock_config_flow("test", MockFlow): yield - - -@pytest.fixture -def register_test_integration( - hass: HomeAssistant, config_flow_fixture: None -) -> Generator: - """Provide a mocked integration for tests.""" - - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - - async def help_async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [CLIMATE_DOMAIN] - ) - return True - - async def help_async_unload_entry( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Unload test config emntry.""" - return await hass.config_entries.async_unload_platforms( - config_entry, [Platform.CLIMATE] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=help_async_setup_entry_init, - async_unload_entry=help_async_unload_entry, - ), - ) - - return config_entry diff --git a/tests/components/climate/test_device_condition.py b/tests/components/climate/test_device_condition.py index 16595f57c6f..0961bd3dc73 100644 --- a/tests/components/climate/test_device_condition.py +++ b/tests/components/climate/test_device_condition.py @@ -17,7 +17,11 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -25,6 +29,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_condition_types"), [ @@ -141,7 +151,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -210,7 +220,7 @@ async def test_if_state( # Should not fire, entity doesn't exist yet hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set( entry.entity_id, @@ -222,8 +232,8 @@ async def test_if_state( hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_hvac_mode - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_hvac_mode - event - test_event1" hass.states.async_set( entry.entity_id, @@ -236,13 +246,13 @@ async def test_if_state( # Should not fire hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_preset_mode - event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_preset_mode - event - test_event2" hass.states.async_set( entry.entity_id, @@ -255,14 +265,14 @@ async def test_if_state( # Should not fire hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -313,8 +323,8 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_hvac_mode - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_hvac_mode - event - test_event1" @pytest.mark.parametrize( diff --git a/tests/components/climate/test_device_trigger.py b/tests/components/climate/test_device_trigger.py index a492d9805b5..e8e5b577bf4 100644 --- a/tests/components/climate/test_device_trigger.py +++ b/tests/components/climate/test_device_trigger.py @@ -23,7 +23,11 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -31,6 +35,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -141,7 +151,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -226,8 +236,8 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "hvac_mode_changed" + assert len(calls) == 1 + assert calls[0].data["some"] == "hvac_mode_changed" # Fake that the temperature is changing hass.states.async_set( @@ -240,8 +250,8 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "current_temperature_changed" + assert len(calls) == 2 + assert calls[1].data["some"] == "current_temperature_changed" # Fake that the humidity is changing hass.states.async_set( @@ -254,15 +264,15 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].data["some"] == "current_humidity_changed" + assert len(calls) == 3 + assert calls[2].data["some"] == "current_humidity_changed" async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -319,8 +329,8 @@ async def test_if_fires_on_state_change_legacy( }, ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "hvac_mode_changed" + assert len(calls) == 1 + assert calls[0].data["some"] == "hvac_mode_changed" async def test_get_trigger_capabilities_hvac_mode(hass: HomeAssistant) -> None: diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index aa162e0b683..a459b991203 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -4,7 +4,6 @@ from __future__ import annotations from enum import Enum from types import ModuleType -from typing import Any from unittest.mock import MagicMock, Mock, patch import pytest @@ -18,31 +17,17 @@ from homeassistant.components.climate import ( HVACMode, ) from homeassistant.components.climate.const import ( - ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, - ATTR_HUMIDITY, - ATTR_MAX_TEMP, - ATTR_MIN_TEMP, ATTR_PRESET_MODE, ATTR_SWING_MODE, - ATTR_TARGET_TEMP_HIGH, - ATTR_TARGET_TEMP_LOW, SERVICE_SET_FAN_MODE, - SERVICE_SET_HUMIDITY, - SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, ClimateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_TEMPERATURE, - PRECISION_WHOLE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - UnitOfTemperature, -) +from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import issue_registry as ir @@ -59,7 +44,6 @@ from tests.common import ( import_and_test_deprecated_constant_enum, mock_integration, mock_platform, - setup_test_component_platform, ) @@ -112,9 +96,6 @@ class MockClimateEntity(MockEntity, ClimateEntity): _attr_swing_mode = "auto" _attr_swing_modes = ["auto", "off"] _attr_temperature_unit = UnitOfTemperature.CELSIUS - _attr_target_temperature = 20 - _attr_target_temperature_high = 25 - _attr_target_temperature_low = 15 @property def hvac_mode(self) -> HVACMode: @@ -144,18 +125,6 @@ class MockClimateEntity(MockEntity, ClimateEntity): """Set swing mode.""" self._attr_swing_mode = swing_mode - def set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set new target hvac mode.""" - self._attr_hvac_mode = hvac_mode - - def set_temperature(self, **kwargs: Any) -> None: - """Set new target temperature.""" - if ATTR_TEMPERATURE in kwargs: - self._attr_target_temperature = kwargs[ATTR_TEMPERATURE] - if ATTR_TARGET_TEMP_HIGH in kwargs: - self._attr_target_temperature_high = kwargs[ATTR_TARGET_TEMP_HIGH] - self._attr_target_temperature_low = kwargs[ATTR_TARGET_TEMP_LOW] - class MockClimateEntityTestMethods(MockClimateEntity): """Mock Climate device.""" @@ -189,7 +158,7 @@ async def test_sync_turn_off(hass: HomeAssistant) -> None: assert climate.turn_off.called -def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: +def _create_tuples(enum: Enum, constant_prefix: str) -> list[tuple[Enum, str]]: return [ (enum_field, constant_prefix) for enum_field in enum @@ -255,87 +224,46 @@ def test_deprecated_current_constants( ) -async def test_temperature_features_is_valid( - hass: HomeAssistant, - register_test_integration: MockConfigEntry, - caplog: pytest.LogCaptureFixture, +async def test_preset_mode_validation( + hass: HomeAssistant, config_flow_fixture: None ) -> None: - """Test correct features for setting temperature.""" + """Test mode validation for fan, swing and preset.""" - class MockClimateTempEntity(MockClimateEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True - class MockClimateTempRangeEntity(MockClimateEntity): - @property - def supported_features(self) -> int: - """Return supported features.""" - return ClimateEntityFeature.TARGET_TEMPERATURE + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities([MockClimateEntity(name="test", entity_id="climate.test")]) - climate_temp_entity = MockClimateTempEntity( - name="test", entity_id="climate.test_temp" - ) - climate_temp_range_entity = MockClimateTempRangeEntity( - name="test", entity_id="climate.test_range" - ) - - setup_test_component_platform( + mock_integration( hass, - DOMAIN, - entities=[climate_temp_entity, climate_temp_range_entity], - from_config_entry=True, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - with pytest.raises( - ServiceValidationError, - match="Set temperature action was used with the target temperature parameter but the entity does not support it", - ): - await hass.services.async_call( - DOMAIN, - SERVICE_SET_TEMPERATURE, - { - "entity_id": "climate.test_temp", - "temperature": 20, - }, - blocking=True, - ) - - with pytest.raises( - ServiceValidationError, - match="Set temperature action was used with the target temperature low/high parameter but the entity does not support it", - ): - await hass.services.async_call( - DOMAIN, - SERVICE_SET_TEMPERATURE, - { - "entity_id": "climate.test_range", - "target_temp_low": 20, - "target_temp_high": 25, - }, - blocking=True, - ) - - -async def test_mode_validation( - hass: HomeAssistant, - register_test_integration: MockConfigEntry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test mode validation for hvac_mode, fan, swing and preset.""" - climate_entity = MockClimateEntity(name="test", entity_id="climate.test") - - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), ) - await hass.config_entries.async_setup(register_test_integration.entry_id) + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") - assert state.state == "heat" assert state.attributes.get(ATTR_PRESET_MODE) == "home" assert state.attributes.get(ATTR_FAN_MODE) == "auto" assert state.attributes.get(ATTR_SWING_MODE) == "auto" @@ -372,23 +300,6 @@ async def test_mode_validation( assert state.attributes.get(ATTR_FAN_MODE) == "off" assert state.attributes.get(ATTR_SWING_MODE) == "off" - await hass.services.async_call( - DOMAIN, - SERVICE_SET_HVAC_MODE, - { - "entity_id": "climate.test", - "hvac_mode": "auto", - }, - blocking=True, - ) - - assert ( - "MockClimateEntity sets the hvac_mode auto which is not valid " - "for this entity with modes: off, heat. This will stop working " - "in 2025.4 and raise an error instead. " - "Please" in caplog.text - ) - with pytest.raises( ServiceValidationError, match="Preset mode invalid is not valid. Valid preset modes are: home, away", @@ -479,9 +390,7 @@ def test_deprecated_supported_features_ints( async def test_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None ) -> None: """Test adding feature flag and warn if missing when methods are set.""" @@ -498,15 +407,43 @@ async def test_warning_not_implemented_turn_on_off_feature( """Turn off.""" called.append("turn_off") - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities( + [MockClimateEntityTest(name="test", entity_id="climate.test")] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -550,9 +487,7 @@ async def test_warning_not_implemented_turn_on_off_feature( async def test_implicit_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None ) -> None: """Test adding feature flag and warn if missing when methods are not set. @@ -580,15 +515,43 @@ async def test_implicit_warning_not_implemented_turn_on_off_feature( """ return [HVACMode.OFF, HVACMode.HEAT] - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities( + [MockClimateEntityTest(name="test", entity_id="climate.test")] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -604,9 +567,7 @@ async def test_implicit_warning_not_implemented_turn_on_off_feature( async def test_no_warning_implemented_turn_on_off_feature( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None ) -> None: """Test no warning when feature flags are set.""" @@ -621,15 +582,43 @@ async def test_no_warning_implemented_turn_on_off_feature( | ClimateEntityFeature.TURN_ON ) - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities( + [MockClimateEntityTest(name="test", entity_id="climate.test")] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -650,9 +639,7 @@ async def test_no_warning_implemented_turn_on_off_feature( async def test_no_warning_integration_has_migrated( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None ) -> None: """Test no warning when integration migrated using `_enable_turn_on_off_backwards_compatibility`.""" @@ -666,15 +653,43 @@ async def test_no_warning_integration_has_migrated( | ClimateEntityFeature.SWING_MODE ) - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities( + [MockClimateEntityTest(name="test", entity_id="climate.test")] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -694,42 +709,6 @@ async def test_no_warning_integration_has_migrated( ) -async def test_no_warning_integration_implement_feature_flags( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, -) -> None: - """Test no warning when integration uses the correct feature flags.""" - - class MockClimateEntityTest(MockClimateEntity): - """Mock Climate device.""" - - _attr_supported_features = ( - ClimateEntityFeature.FAN_MODE - | ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.SWING_MODE - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON - ) - - climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") - - with patch.object( - MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" - ): - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state is not None - - assert "does not set ClimateEntityFeature" not in caplog.text - assert "implements HVACMode(s):" not in caplog.text - - async def test_turn_on_off_toggle(hass: HomeAssistant) -> None: """Test turn_on/turn_off/toggle methods.""" @@ -929,7 +908,7 @@ async def test_issue_aux_property_deprecated( assert ( "test::MockClimateEntityWithAux implements the `is_aux_heat` property or uses " "the auxiliary heater methods in a subclass of ClimateEntity which is deprecated " - f"and will be unsupported from Home Assistant 2025.4. Please {report}" + f"and will be unsupported from Home Assistant 2024.10. Please {report}" ) in caplog.text # Assert we only log warning once @@ -969,7 +948,7 @@ async def test_issue_aux_property_deprecated( async def test_no_issue_aux_property_deprecated_for_core( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, + config_flow_fixture: None, manifest_extra: dict[str, str], translation_key: str, translation_placeholders_extra: dict[str, str], @@ -1008,10 +987,39 @@ async def test_no_issue_aux_property_deprecated_for_core( entity_id="climate.testing", ) - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test weather platform via config entry.""" + async_add_entities([climate_entity]) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + partial_manifest=manifest_extra, + ), + built_in=False, ) - await hass.config_entries.async_setup(register_test_integration.entry_id) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert climate_entity.state == HVACMode.HEAT @@ -1029,7 +1037,7 @@ async def test_no_issue_aux_property_deprecated_for_core( async def test_no_issue_no_aux_property( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - register_test_integration: MockConfigEntry, + config_flow_fixture: None, issue_registry: ir.IssueRegistry, ) -> None: """Test the issue is raised on deprecated auxiliary heater attributes.""" @@ -1039,10 +1047,38 @@ async def test_no_issue_no_aux_property( entity_id="climate.testing", ) - setup_test_component_platform( - hass, DOMAIN, entities=[climate_entity], from_config_entry=True + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test weather platform via config entry.""" + async_add_entities([climate_entity]) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, ) - assert await hass.config_entries.async_setup(register_test_integration.entry_id) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert climate_entity.state == HVACMode.HEAT @@ -1054,230 +1090,3 @@ async def test_no_issue_no_aux_property( "the auxiliary heater methods in a subclass of ClimateEntity which is deprecated " "and will be unsupported from Home Assistant 2024.10." ) not in caplog.text - - -async def test_humidity_validation( - hass: HomeAssistant, - register_test_integration: MockConfigEntry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test validation for humidity.""" - - class MockClimateEntityHumidity(MockClimateEntity): - """Mock climate class with mocked aux heater.""" - - _attr_supported_features = ClimateEntityFeature.TARGET_HUMIDITY - _attr_target_humidity = 50 - _attr_min_humidity = 50 - _attr_max_humidity = 60 - - def set_humidity(self, humidity: int) -> None: - """Set new target humidity.""" - self._attr_target_humidity = humidity - - test_climate = MockClimateEntityHumidity( - name="Test", - unique_id="unique_climate_test", - ) - - setup_test_component_platform( - hass, DOMAIN, entities=[test_climate], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state.attributes.get(ATTR_HUMIDITY) == 50 - - with pytest.raises( - ServiceValidationError, - match="Provided humidity 1 is not valid. Accepted range is 50 to 60", - ) as exc: - await hass.services.async_call( - DOMAIN, - SERVICE_SET_HUMIDITY, - { - "entity_id": "climate.test", - ATTR_HUMIDITY: "1", - }, - blocking=True, - ) - - assert exc.value.translation_key == "humidity_out_of_range" - assert "Check valid humidity 1 in range 50 - 60" in caplog.text - - with pytest.raises( - ServiceValidationError, - match="Provided humidity 70 is not valid. Accepted range is 50 to 60", - ) as exc: - await hass.services.async_call( - DOMAIN, - SERVICE_SET_HUMIDITY, - { - "entity_id": "climate.test", - ATTR_HUMIDITY: "70", - }, - blocking=True, - ) - - -async def test_temperature_validation( - hass: HomeAssistant, register_test_integration: MockConfigEntry -) -> None: - """Test validation for temperatures.""" - - class MockClimateEntityTemp(MockClimateEntity): - """Mock climate class with mocked aux heater.""" - - _attr_supported_features = ( - ClimateEntityFeature.FAN_MODE - | ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.SWING_MODE - | ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - ) - _attr_target_temperature = 15 - _attr_target_temperature_high = 18 - _attr_target_temperature_low = 10 - _attr_target_temperature_step = PRECISION_WHOLE - - def set_temperature(self, **kwargs: Any) -> None: - """Set new target temperature.""" - if ATTR_TEMPERATURE in kwargs: - self._attr_target_temperature = kwargs[ATTR_TEMPERATURE] - if ATTR_TARGET_TEMP_HIGH in kwargs: - self._attr_target_temperature_high = kwargs[ATTR_TARGET_TEMP_HIGH] - self._attr_target_temperature_low = kwargs[ATTR_TARGET_TEMP_LOW] - - test_climate = MockClimateEntityTemp( - name="Test", - unique_id="unique_climate_test", - ) - - setup_test_component_platform( - hass, DOMAIN, entities=[test_climate], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) is None - assert state.attributes.get(ATTR_MIN_TEMP) == 7 - assert state.attributes.get(ATTR_MAX_TEMP) == 35 - - with pytest.raises( - ServiceValidationError, - match="Provided temperature 40.0 is not valid. Accepted range is 7 to 35", - ) as exc: - await hass.services.async_call( - DOMAIN, - SERVICE_SET_TEMPERATURE, - { - "entity_id": "climate.test", - ATTR_TEMPERATURE: "40", - }, - blocking=True, - ) - assert ( - str(exc.value) - == "Provided temperature 40.0 is not valid. Accepted range is 7 to 35" - ) - assert exc.value.translation_key == "temp_out_of_range" - - with pytest.raises( - ServiceValidationError, - match="Provided temperature 0.0 is not valid. Accepted range is 7 to 35", - ) as exc: - await hass.services.async_call( - DOMAIN, - SERVICE_SET_TEMPERATURE, - { - "entity_id": "climate.test", - ATTR_TARGET_TEMP_HIGH: "25", - ATTR_TARGET_TEMP_LOW: "0", - }, - blocking=True, - ) - assert ( - str(exc.value) - == "Provided temperature 0.0 is not valid. Accepted range is 7 to 35" - ) - assert exc.value.translation_key == "temp_out_of_range" - - await hass.services.async_call( - DOMAIN, - SERVICE_SET_TEMPERATURE, - { - "entity_id": "climate.test", - ATTR_TARGET_TEMP_HIGH: "25", - ATTR_TARGET_TEMP_LOW: "10", - }, - blocking=True, - ) - - state = hass.states.get("climate.test") - assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 10 - assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25 - - -async def test_target_temp_high_higher_than_low( - hass: HomeAssistant, register_test_integration: MockConfigEntry -) -> None: - """Test that target high is higher than target low.""" - - class MockClimateEntityTemp(MockClimateEntity): - """Mock climate class with mocked aux heater.""" - - _attr_supported_features = ( - ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - ) - _attr_current_temperature = 15 - _attr_target_temperature = 15 - _attr_target_temperature_high = 18 - _attr_target_temperature_low = 10 - _attr_target_temperature_step = PRECISION_WHOLE - - def set_temperature(self, **kwargs: Any) -> None: - """Set new target temperature.""" - if ATTR_TEMPERATURE in kwargs: - self._attr_target_temperature = kwargs[ATTR_TEMPERATURE] - if ATTR_TARGET_TEMP_HIGH in kwargs: - self._attr_target_temperature_high = kwargs[ATTR_TARGET_TEMP_HIGH] - self._attr_target_temperature_low = kwargs[ATTR_TARGET_TEMP_LOW] - - test_climate = MockClimateEntityTemp( - name="Test", - unique_id="unique_climate_test", - ) - - setup_test_component_platform( - hass, DOMAIN, entities=[test_climate], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("climate.test") - assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 15 - assert state.attributes.get(ATTR_MIN_TEMP) == 7 - assert state.attributes.get(ATTR_MAX_TEMP) == 35 - - with pytest.raises( - ServiceValidationError, - match="Target temperature low can not be higher than Target temperature high", - ) as exc: - await hass.services.async_call( - DOMAIN, - SERVICE_SET_TEMPERATURE, - { - "entity_id": "climate.test", - ATTR_TARGET_TEMP_HIGH: "15", - ATTR_TARGET_TEMP_LOW: "20", - }, - blocking=True, - ) - assert ( - str(exc.value) - == "Target temperature low can not be higher than Target temperature high" - ) - assert exc.value.translation_key == "low_temp_higher_than_high_temp" diff --git a/tests/components/climate/test_intent.py b/tests/components/climate/test_intent.py index d17f3a1747d..ab1e3629ef8 100644 --- a/tests/components/climate/test_intent.py +++ b/tests/components/climate/test_intent.py @@ -1,8 +1,7 @@ """Test climate intents.""" -from collections.abc import Generator - import pytest +from typing_extensions import Generator from homeassistant.components import conversation from homeassistant.components.climate import ( @@ -371,7 +370,7 @@ async def test_not_exposed( {"name": {"value": climate_1.name}}, assistant=conversation.DOMAIN, ) - assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT + assert err.value.result.no_match_reason == intent.MatchFailedReason.NAME # Expose first, hide second async_expose_entity(hass, conversation.DOMAIN, climate_1.entity_id, True) diff --git a/tests/components/climate/test_reproduce_state.py b/tests/components/climate/test_reproduce_state.py index 0632ebcc9e4..636ab326a2b 100644 --- a/tests/components/climate/test_reproduce_state.py +++ b/tests/components/climate/test_reproduce_state.py @@ -3,6 +3,7 @@ import pytest from homeassistant.components.climate import ( + ATTR_AUX_HEAT, ATTR_FAN_MODE, ATTR_HUMIDITY, ATTR_PRESET_MODE, @@ -10,6 +11,7 @@ from homeassistant.components.climate import ( ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, DOMAIN, + SERVICE_SET_AUX_HEAT, SERVICE_SET_FAN_MODE, SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, @@ -94,6 +96,7 @@ async def test_state_with_context(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("service", "attribute"), [ + (SERVICE_SET_AUX_HEAT, ATTR_AUX_HEAT), (SERVICE_SET_PRESET_MODE, ATTR_PRESET_MODE), (SERVICE_SET_SWING_MODE, ATTR_SWING_MODE), (SERVICE_SET_FAN_MODE, ATTR_FAN_MODE), diff --git a/tests/components/cloud/conftest.py b/tests/components/cloud/conftest.py index 7002f7c39ec..c7d0702ea88 100644 --- a/tests/components/cloud/conftest.py +++ b/tests/components/cloud/conftest.py @@ -1,21 +1,21 @@ """Fixtures for cloud tests.""" -from collections.abc import AsyncGenerator, Callable, Coroutine, Generator +from collections.abc import Callable, Coroutine from pathlib import Path from typing import Any -from unittest.mock import DEFAULT, AsyncMock, MagicMock, PropertyMock, patch +from unittest.mock import DEFAULT, MagicMock, PropertyMock, patch from hass_nabucasa import Cloud from hass_nabucasa.auth import CognitoAuth from hass_nabucasa.cloudhooks import Cloudhooks from hass_nabucasa.const import DEFAULT_SERVERS, DEFAULT_VALUES, STATE_CONNECTED from hass_nabucasa.google_report_state import GoogleReportState -from hass_nabucasa.ice_servers import IceServers from hass_nabucasa.iot import CloudIoT from hass_nabucasa.remote import RemoteUI from hass_nabucasa.voice import Voice import jwt import pytest +from typing_extensions import AsyncGenerator, Generator from homeassistant.components.cloud.client import CloudClient from homeassistant.components.cloud.const import DATA_CLOUD @@ -69,12 +69,6 @@ async def cloud_fixture() -> AsyncGenerator[MagicMock]: ) mock_cloud.voice = MagicMock(spec=Voice) mock_cloud.started = None - mock_cloud.ice_servers = MagicMock( - spec=IceServers, - async_register_ice_servers_listener=AsyncMock( - return_value=lambda: "mock-unregister" - ), - ) def set_up_mock_cloud( cloud_client: CloudClient, mode: str, **kwargs: Any @@ -194,8 +188,9 @@ def set_cloud_prefs_fixture( @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir @pytest.fixture(autouse=True) diff --git a/tests/components/cloud/test_account_link.py b/tests/components/cloud/test_account_link.py index cd81a7cf691..acaff7db76c 100644 --- a/tests/components/cloud/test_account_link.py +++ b/tests/components/cloud/test_account_link.py @@ -1,12 +1,12 @@ """Test account link services.""" import asyncio -from collections.abc import Generator import logging from time import time from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.cloud import account_link diff --git a/tests/components/cloud/test_binary_sensor.py b/tests/components/cloud/test_binary_sensor.py index 8a4a1a0e9aa..789947f3c7d 100644 --- a/tests/components/cloud/test_binary_sensor.py +++ b/tests/components/cloud/test_binary_sensor.py @@ -1,10 +1,10 @@ """Tests for the cloud binary sensor.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch from hass_nabucasa.const import DISPATCH_REMOTE_CONNECT, DISPATCH_REMOTE_DISCONNECT import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_registry import EntityRegistry diff --git a/tests/components/cloud/test_client.py b/tests/components/cloud/test_client.py index 43eccc5ef9c..005efd990fb 100644 --- a/tests/components/cloud/test_client.py +++ b/tests/components/cloud/test_client.py @@ -1,6 +1,5 @@ """Test the cloud.iot module.""" -from collections.abc import Callable, Coroutine from datetime import timedelta from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, PropertyMock, patch @@ -184,59 +183,6 @@ async def test_handler_google_actions_disabled( assert resp["payload"] == response_payload -async def test_handler_ice_servers( - hass: HomeAssistant, - cloud: MagicMock, - set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], -) -> None: - """Test handler ICE servers.""" - assert await async_setup_component(hass, "cloud", {"cloud": {}}) - await hass.async_block_till_done() - # make sure that preferences will not be reset - await cloud.client.prefs.async_set_username(cloud.username) - await set_cloud_prefs( - { - "alexa_enabled": False, - "google_enabled": False, - } - ) - - await cloud.login("test-user", "test-pass") - await cloud.client.cloud_connected() - - assert cloud.client._cloud_ice_servers_listener is not None - assert cloud.client._cloud_ice_servers_listener() == "mock-unregister" - - -async def test_handler_ice_servers_disabled( - hass: HomeAssistant, - cloud: MagicMock, - set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], -) -> None: - """Test handler ICE servers when user has disabled it.""" - assert await async_setup_component(hass, "cloud", {"cloud": {}}) - await hass.async_block_till_done() - # make sure that preferences will not be reset - await cloud.client.prefs.async_set_username(cloud.username) - await set_cloud_prefs( - { - "alexa_enabled": False, - "google_enabled": False, - } - ) - - await cloud.login("test-user", "test-pass") - await cloud.client.cloud_connected() - - await set_cloud_prefs( - { - "cloud_ice_servers_enabled": False, - } - ) - - assert cloud.client._cloud_ice_servers_listener is None - - async def test_webhook_msg( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -262,9 +208,7 @@ async def test_webhook_msg( received = [] - async def handler( - hass: HomeAssistant, webhook_id: str, request: web.Request - ) -> web.Response: + async def handler(hass, webhook_id, request): """Handle a webhook.""" received.append(request) return web.json_response({"from": "handler"}) @@ -529,16 +473,13 @@ async def test_logged_out( await cloud.client.cloud_connected() await hass.async_block_till_done() - assert cloud.client._cloud_ice_servers_listener is not None - # Simulate logged out await cloud.logout() await hass.async_block_till_done() - # Check we clean up Alexa, Google and ICE servers + # Check we clean up Alexa and Google assert cloud.client._alexa_config is None assert cloud.client._google_config is None - assert cloud.client._cloud_ice_servers_listener is None google_config_mock.async_deinitialize.assert_called_once_with() alexa_config_mock.async_deinitialize.assert_called_once_with() diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py index 216fc77db48..5ee9af88681 100644 --- a/tests/components/cloud/test_http_api.py +++ b/tests/components/cloud/test_http_api.py @@ -14,8 +14,6 @@ from hass_nabucasa.voice import TTS_VOICES import pytest from homeassistant.components.alexa import errors as alexa_errors - -# pylint: disable-next=hass-component-root-import from homeassistant.components.alexa.entities import LightCapabilities from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY from homeassistant.components.cloud.const import DEFAULT_EXPOSED_DOMAINS, DOMAIN @@ -784,7 +782,6 @@ async def test_websocket_status( "google_report_state": True, "remote_allow_remote_enable": True, "remote_enabled": False, - "cloud_ice_servers_enabled": True, "tts_default_voice": ["en-US", "JennyNeural"], }, "alexa_entities": { @@ -904,7 +901,6 @@ async def test_websocket_update_preferences( assert cloud.client.prefs.alexa_enabled assert cloud.client.prefs.google_secure_devices_pin is None assert cloud.client.prefs.remote_allow_remote_enable is True - assert cloud.client.prefs.cloud_ice_servers_enabled is True client = await hass_ws_client(hass) @@ -916,7 +912,6 @@ async def test_websocket_update_preferences( "google_secure_devices_pin": "1234", "tts_default_voice": ["en-GB", "RyanNeural"], "remote_allow_remote_enable": False, - "cloud_ice_servers_enabled": False, } ) response = await client.receive_json() @@ -926,7 +921,6 @@ async def test_websocket_update_preferences( assert not cloud.client.prefs.alexa_enabled assert cloud.client.prefs.google_secure_devices_pin == "1234" assert cloud.client.prefs.remote_allow_remote_enable is False - assert cloud.client.prefs.cloud_ice_servers_enabled is False assert cloud.client.prefs.tts_default_voice == ("en-GB", "RyanNeural") diff --git a/tests/components/cloud/test_stt.py b/tests/components/cloud/test_stt.py index 02acda1450e..df9e62380f8 100644 --- a/tests/components/cloud/test_stt.py +++ b/tests/components/cloud/test_stt.py @@ -1,6 +1,5 @@ """Test the speech-to-text platform for the cloud integration.""" -from collections.abc import AsyncGenerator from copy import deepcopy from http import HTTPStatus from typing import Any @@ -8,6 +7,7 @@ from unittest.mock import AsyncMock, MagicMock, patch from hass_nabucasa.voice import STTResponse, VoiceError import pytest +from typing_extensions import AsyncGenerator from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY from homeassistant.components.cloud.const import DOMAIN diff --git a/tests/components/cloud/test_system_health.py b/tests/components/cloud/test_system_health.py index 6293f44067d..60b23e47fec 100644 --- a/tests/components/cloud/test_system_health.py +++ b/tests/components/cloud/test_system_health.py @@ -50,12 +50,7 @@ async def test_cloud_system_health( await cloud.client.async_system_message({"region": "xx-earth-616"}) await set_cloud_prefs( - { - "alexa_enabled": True, - "google_enabled": False, - "remote_enabled": True, - "cloud_ice_servers_enabled": True, - } + {"alexa_enabled": True, "google_enabled": False, "remote_enabled": True} ) info = await get_system_health_info(hass, "cloud") @@ -75,7 +70,6 @@ async def test_cloud_system_health( "remote_server": "us-west-1", "alexa_enabled": True, "google_enabled": False, - "cloud_ice_servers_enabled": True, "can_reach_cert_server": "ok", "can_reach_cloud_auth": {"type": "failed", "error": "unreachable"}, "can_reach_cloud": "ok", diff --git a/tests/components/cloud/test_tts.py b/tests/components/cloud/test_tts.py index 499981c643d..bf45b6b2895 100644 --- a/tests/components/cloud/test_tts.py +++ b/tests/components/cloud/test_tts.py @@ -1,6 +1,6 @@ """Tests for cloud tts.""" -from collections.abc import AsyncGenerator, Callable, Coroutine +from collections.abc import Callable, Coroutine from copy import deepcopy from http import HTTPStatus from typing import Any @@ -8,6 +8,7 @@ from unittest.mock import AsyncMock, MagicMock, patch from hass_nabucasa.voice import TTS_VOICES, VoiceError, VoiceTokenError import pytest +from typing_extensions import AsyncGenerator import voluptuous as vol from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY @@ -23,11 +24,11 @@ from homeassistant.components.tts import ( ATTR_MEDIA_PLAYER_ENTITY_ID, ATTR_MESSAGE, DOMAIN as TTS_DOMAIN, - get_engine_instance, ) +from homeassistant.components.tts.helper import get_engine_instance +from homeassistant.config import async_process_ha_core_config from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.setup import async_setup_component diff --git a/tests/components/cloudflare/conftest.py b/tests/components/cloudflare/conftest.py index 977126f39a3..6c41e9fd179 100644 --- a/tests/components/cloudflare/conftest.py +++ b/tests/components/cloudflare/conftest.py @@ -1,9 +1,9 @@ """Define fixtures available for all tests.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from . import get_mock_client diff --git a/tests/components/cloudflare/test_config_flow.py b/tests/components/cloudflare/test_config_flow.py index f34a423833c..1278113c0c7 100644 --- a/tests/components/cloudflare/test_config_flow.py +++ b/tests/components/cloudflare/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock import pycfdns from homeassistant.components.cloudflare.const import CONF_RECORDS, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_API_TOKEN, CONF_SOURCE, CONF_ZONE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -151,7 +151,15 @@ async def test_reauth_flow(hass: HomeAssistant, cfupdate_flow: MagicMock) -> Non entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_CONFIG) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/co2signal/conftest.py b/tests/components/co2signal/conftest.py index 680465c2537..04ab6db7464 100644 --- a/tests/components/co2signal/conftest.py +++ b/tests/components/co2signal/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Electricity maps integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator -from homeassistant.components.co2signal.const import DOMAIN +from homeassistant.components.co2signal import DOMAIN from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/co2signal/snapshots/test_diagnostics.ambr b/tests/components/co2signal/snapshots/test_diagnostics.ambr index 9218e7343ec..645e0bd87e9 100644 --- a/tests/components/co2signal/snapshots/test_diagnostics.ambr +++ b/tests/components/co2signal/snapshots/test_diagnostics.ambr @@ -7,8 +7,6 @@ 'location': '', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'co2signal', 'entry_id': '904a74160aa6f335526706bee85dfb83', 'minor_version': 1, diff --git a/tests/components/co2signal/test_config_flow.py b/tests/components/co2signal/test_config_flow.py index f8f94d44126..7397b6e2355 100644 --- a/tests/components/co2signal/test_config_flow.py +++ b/tests/components/co2signal/test_config_flow.py @@ -11,8 +11,7 @@ from aioelectricitymaps import ( import pytest from homeassistant import config_entries -from homeassistant.components.co2signal import config_flow -from homeassistant.components.co2signal.const import DOMAIN +from homeassistant.components.co2signal import DOMAIN, config_flow from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -44,7 +43,7 @@ async def test_form_home(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Electricity Maps" + assert result2["title"] == "CO2 Signal" assert result2["data"] == { "api_key": "api_key", } @@ -185,7 +184,7 @@ async def test_form_error_handling( await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Electricity Maps" + assert result["title"] == "CO2 Signal" assert result["data"] == { "api_key": "api_key", } @@ -199,10 +198,17 @@ async def test_reauth( """Test reauth flow.""" config_entry.add_to_hass(hass) - init_result = await config_entry.start_reauth_flow(hass) + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=None, + ) assert init_result["type"] is FlowResultType.FORM - assert init_result["step_id"] == "reauth_confirm" + assert init_result["step_id"] == "reauth" with patch( "homeassistant.components.co2signal.async_setup_entry", diff --git a/tests/components/co2signal/test_diagnostics.py b/tests/components/co2signal/test_diagnostics.py index 3d5e1a0580b..edc0007952b 100644 --- a/tests/components/co2signal/test_diagnostics.py +++ b/tests/components/co2signal/test_diagnostics.py @@ -2,7 +2,6 @@ import pytest from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -21,4 +20,4 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/co2signal/test_sensor.py b/tests/components/co2signal/test_sensor.py index fddda17f3ed..e9f46e483d1 100644 --- a/tests/components/co2signal/test_sensor.py +++ b/tests/components/co2signal/test_sensor.py @@ -109,4 +109,4 @@ async def test_sensor_reauth_triggered( assert (flows := hass.config_entries.flow.async_progress()) assert len(flows) == 1 - assert flows[0]["step_id"] == "reauth_confirm" + assert flows[0]["step_id"] == "reauth" diff --git a/tests/components/coinbase/common.py b/tests/components/coinbase/common.py index 0a2475ac218..3421c4ce838 100644 --- a/tests/components/coinbase/common.py +++ b/tests/components/coinbase/common.py @@ -5,15 +5,13 @@ from homeassistant.components.coinbase.const import ( CONF_EXCHANGE_RATES, DOMAIN, ) -from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION -from homeassistant.core import HomeAssistant +from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN from .const import ( GOOD_CURRENCY_2, GOOD_EXCHANGE_RATE, GOOD_EXCHANGE_RATE_2, MOCK_ACCOUNTS_RESPONSE, - MOCK_ACCOUNTS_RESPONSE_V3, ) from tests.common import MockConfigEntry @@ -22,7 +20,7 @@ from tests.common import MockConfigEntry class MockPagination: """Mock pagination result.""" - def __init__(self, value=None) -> None: + def __init__(self, value=None): """Load simple pagination for tests.""" self.next_starting_after = value @@ -30,7 +28,7 @@ class MockPagination: class MockGetAccounts: """Mock accounts with pagination.""" - def __init__(self, starting_after=0) -> None: + def __init__(self, starting_after=0): """Init mocked object, forced to return two at a time.""" if (target_end := starting_after + 2) >= ( max_end := len(MOCK_ACCOUNTS_RESPONSE) @@ -56,33 +54,6 @@ def mocked_get_accounts(_, **kwargs): return MockGetAccounts(**kwargs) -class MockGetAccountsV3: - """Mock accounts with pagination.""" - - def __init__(self, cursor="") -> None: - """Init mocked object, forced to return two at a time.""" - ids = [account["uuid"] for account in MOCK_ACCOUNTS_RESPONSE_V3] - start = ids.index(cursor) if cursor else 0 - - has_next = (target_end := start + 2) < len(MOCK_ACCOUNTS_RESPONSE_V3) - end = target_end if has_next else -1 - next_cursor = ids[end] if has_next else ids[-1] - self.accounts = { - "accounts": MOCK_ACCOUNTS_RESPONSE_V3[start:end], - "has_next": has_next, - "cursor": next_cursor, - } - - def __getitem__(self, item): - """Handle subscript request.""" - return self.accounts[item] - - -def mocked_get_accounts_v3(_, **kwargs): - """Return simplified accounts using mock.""" - return MockGetAccountsV3(**kwargs) - - def mock_get_current_user(): """Return a simplified mock user.""" return { @@ -103,24 +74,7 @@ def mock_get_exchange_rates(): } -def mock_get_portfolios(): - """Return a mocked list of Coinbase portfolios.""" - return { - "portfolios": [ - { - "name": "Default", - "uuid": "123456", - "type": "DEFAULT", - } - ] - } - - -async def init_mock_coinbase( - hass: HomeAssistant, - currencies: list[str] | None = None, - rates: list[str] | None = None, -) -> MockConfigEntry: +async def init_mock_coinbase(hass, currencies=None, rates=None): """Init Coinbase integration for testing.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -139,32 +93,3 @@ async def init_mock_coinbase( await hass.async_block_till_done() return config_entry - - -async def init_mock_coinbase_v3( - hass: HomeAssistant, - currencies: list[str] | None = None, - rates: list[str] | None = None, -) -> MockConfigEntry: - """Init Coinbase integration for testing.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - entry_id="080272b77a4f80c41b94d7cdc86fd826", - unique_id=None, - title="Test User v3", - data={ - CONF_API_KEY: "organizations/123456", - CONF_API_TOKEN: "AbCDeF", - CONF_API_VERSION: "v3", - }, - options={ - CONF_CURRENCIES: currencies or [], - CONF_EXCHANGE_RATES: rates or [], - }, - ) - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/coinbase/const.py b/tests/components/coinbase/const.py index 5fbba11eb2d..dcd14555ca3 100644 --- a/tests/components/coinbase/const.py +++ b/tests/components/coinbase/const.py @@ -31,31 +31,3 @@ MOCK_ACCOUNTS_RESPONSE = [ "type": "fiat", }, ] - -MOCK_ACCOUNTS_RESPONSE_V3 = [ - { - "uuid": "123456789", - "name": "BTC Wallet", - "currency": GOOD_CURRENCY, - "available_balance": {"value": "0.00001", "currency": GOOD_CURRENCY}, - "type": "ACCOUNT_TYPE_CRYPTO", - "hold": {"value": "0", "currency": GOOD_CURRENCY}, - }, - { - "uuid": "abcdefg", - "name": "BTC Vault", - "currency": GOOD_CURRENCY, - "available_balance": {"value": "100.00", "currency": GOOD_CURRENCY}, - "type": "ACCOUNT_TYPE_VAULT", - "hold": {"value": "0", "currency": GOOD_CURRENCY}, - }, - { - "uuid": "987654321", - "name": "USD Wallet", - "currency": GOOD_CURRENCY_2, - "available_balance": {"value": "9.90", "currency": GOOD_CURRENCY_2}, - "type": "ACCOUNT_TYPE_FIAT", - "ready": True, - "hold": {"value": "0", "currency": GOOD_CURRENCY_2}, - }, -] diff --git a/tests/components/coinbase/snapshots/test_diagnostics.ambr b/tests/components/coinbase/snapshots/test_diagnostics.ambr index 51bd946f140..9079a7682c8 100644 --- a/tests/components/coinbase/snapshots/test_diagnostics.ambr +++ b/tests/components/coinbase/snapshots/test_diagnostics.ambr @@ -3,25 +3,40 @@ dict({ 'accounts': list([ dict({ - 'amount': '**REDACTED**', - 'currency': 'BTC', + 'balance': dict({ + 'amount': '**REDACTED**', + 'currency': 'BTC', + }), + 'currency': dict({ + 'code': 'BTC', + }), 'id': '**REDACTED**', - 'is_vault': False, 'name': 'BTC Wallet', + 'type': 'wallet', }), dict({ - 'amount': '**REDACTED**', - 'currency': 'BTC', + 'balance': dict({ + 'amount': '**REDACTED**', + 'currency': 'BTC', + }), + 'currency': dict({ + 'code': 'BTC', + }), 'id': '**REDACTED**', - 'is_vault': True, 'name': 'BTC Vault', + 'type': 'vault', }), dict({ - 'amount': '**REDACTED**', - 'currency': 'USD', + 'balance': dict({ + 'amount': '**REDACTED**', + 'currency': 'USD', + }), + 'currency': dict({ + 'code': 'USD', + }), 'id': '**REDACTED**', - 'is_vault': False, 'name': 'USD Wallet', + 'type': 'fiat', }), ]), 'entry': dict({ @@ -30,8 +45,6 @@ 'api_token': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'coinbase', 'entry_id': '080272b77a4f80c41b94d7cdc86fd826', 'minor_version': 1, diff --git a/tests/components/coinbase/test_config_flow.py b/tests/components/coinbase/test_config_flow.py index aa2c6208e0f..f213392bb1e 100644 --- a/tests/components/coinbase/test_config_flow.py +++ b/tests/components/coinbase/test_config_flow.py @@ -14,18 +14,15 @@ from homeassistant.components.coinbase.const import ( CONF_EXCHANGE_RATES, DOMAIN, ) -from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION +from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .common import ( init_mock_coinbase, - init_mock_coinbase_v3, mock_get_current_user, mock_get_exchange_rates, - mock_get_portfolios, mocked_get_accounts, - mocked_get_accounts_v3, ) from .const import BAD_CURRENCY, BAD_EXCHANGE_RATE, GOOD_CURRENCY, GOOD_EXCHANGE_RATE @@ -56,17 +53,16 @@ async def test_form(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF"}, + { + CONF_API_KEY: "123456", + CONF_API_TOKEN: "AbCDeF", + }, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test User" - assert result2["data"] == { - CONF_API_KEY: "123456", - CONF_API_TOKEN: "AbCDeF", - CONF_API_VERSION: "v2", - } + assert result2["data"] == {CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF"} assert len(mock_setup_entry.mock_calls) == 1 @@ -318,77 +314,3 @@ async def test_option_catch_all_exception(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} - - -async def test_form_v3(hass: HomeAssistant) -> None: - """Test we get the form.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with ( - patch("coinbase.rest.RESTClient.get_accounts", new=mocked_get_accounts_v3), - patch( - "coinbase.rest.RESTClient.get_portfolios", - return_value=mock_get_portfolios(), - ), - patch( - "coinbase.rest.RESTBase.get", - return_value={"data": mock_get_exchange_rates()}, - ), - patch( - "homeassistant.components.coinbase.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_KEY: "organizations/123456", CONF_API_TOKEN: "AbCDeF"}, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Default" - assert result2["data"] == { - CONF_API_KEY: "organizations/123456", - CONF_API_TOKEN: "AbCDeF", - CONF_API_VERSION: "v3", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_option_form_v3(hass: HomeAssistant) -> None: - """Test we handle a good wallet currency option.""" - - with ( - patch("coinbase.rest.RESTClient.get_accounts", new=mocked_get_accounts_v3), - patch( - "coinbase.rest.RESTClient.get_portfolios", - return_value=mock_get_portfolios(), - ), - patch( - "coinbase.rest.RESTBase.get", - return_value={"data": mock_get_exchange_rates()}, - ), - patch( - "homeassistant.components.coinbase.update_listener" - ) as mock_update_listener, - ): - config_entry = await init_mock_coinbase_v3(hass) - await hass.async_block_till_done() - result = await hass.config_entries.options.async_init(config_entry.entry_id) - await hass.async_block_till_done() - result2 = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - CONF_CURRENCIES: [GOOD_CURRENCY], - CONF_EXCHANGE_RATES: [GOOD_EXCHANGE_RATE], - CONF_EXCHANGE_PRECISION: 5, - }, - ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - await hass.async_block_till_done() - assert len(mock_update_listener.mock_calls) == 1 diff --git a/tests/components/coinbase/test_diagnostics.py b/tests/components/coinbase/test_diagnostics.py index 0e06c172c37..e30bdef30b8 100644 --- a/tests/components/coinbase/test_diagnostics.py +++ b/tests/components/coinbase/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import patch from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -41,4 +40,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/comelit/const.py b/tests/components/comelit/const.py index 92fdfebfa1d..998c12c09b7 100644 --- a/tests/components/comelit/const.py +++ b/tests/components/comelit/const.py @@ -1,19 +1,6 @@ """Common stuff for Comelit SimpleHome tests.""" -from aiocomelit import ComelitVedoAreaObject, ComelitVedoZoneObject -from aiocomelit.api import ComelitSerialBridgeObject -from aiocomelit.const import ( - CLIMATE, - COVER, - IRRIGATION, - LIGHT, - OTHER, - SCENARIO, - VEDO, - WATT, - AlarmAreaState, - AlarmZoneState, -) +from aiocomelit.const import VEDO from homeassistant.components.comelit.const import DOMAIN from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE @@ -40,67 +27,3 @@ MOCK_USER_BRIDGE_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0] MOCK_USER_VEDO_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][1] FAKE_PIN = 5678 - -BRIDGE_DEVICE_QUERY = { - CLIMATE: {}, - COVER: { - 0: ComelitSerialBridgeObject( - index=0, - name="Cover0", - status=0, - human_status="closed", - type="cover", - val=0, - protected=0, - zone="Open space", - power=0.0, - power_unit=WATT, - ) - }, - LIGHT: { - 0: ComelitSerialBridgeObject( - index=0, - name="Light0", - status=0, - human_status="off", - type="light", - val=0, - protected=0, - zone="Bathroom", - power=0.0, - power_unit=WATT, - ) - }, - OTHER: {}, - IRRIGATION: {}, - SCENARIO: {}, -} - -VEDO_DEVICE_QUERY = { - "aree": { - 0: ComelitVedoAreaObject( - index=0, - name="Area0", - p1=True, - p2=False, - ready=False, - armed=False, - alarm=False, - alarm_memory=False, - sabotage=False, - anomaly=False, - in_time=False, - out_time=False, - human_status=AlarmAreaState.UNKNOWN, - ) - }, - "zone": { - 0: ComelitVedoZoneObject( - index=0, - name="Zone0", - status_api="0x000", - status=0, - human_status=AlarmZoneState.REST, - ) - }, -} diff --git a/tests/components/comelit/snapshots/test_diagnostics.ambr b/tests/components/comelit/snapshots/test_diagnostics.ambr deleted file mode 100644 index 58ce74035f9..00000000000 --- a/tests/components/comelit/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,144 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics_bridge - dict({ - 'device_info': dict({ - 'devices': list([ - dict({ - 'clima': list([ - ]), - }), - dict({ - 'shutter': list([ - dict({ - '0': dict({ - 'human_status': 'closed', - 'name': 'Cover0', - 'power': 0.0, - 'power_unit': 'W', - 'protected': 0, - 'status': 0, - 'val': 0, - 'zone': 'Open space', - }), - }), - ]), - }), - dict({ - 'light': list([ - dict({ - '0': dict({ - 'human_status': 'off', - 'name': 'Light0', - 'power': 0.0, - 'power_unit': 'W', - 'protected': 0, - 'status': 0, - 'val': 0, - 'zone': 'Bathroom', - }), - }), - ]), - }), - dict({ - 'other': list([ - ]), - }), - dict({ - 'irrigation': list([ - ]), - }), - dict({ - 'scenario': list([ - ]), - }), - ]), - 'last_exception': 'None', - 'last_update success': True, - }), - 'entry': dict({ - 'data': dict({ - 'host': 'fake_host', - 'pin': '**REDACTED**', - 'port': 80, - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'comelit', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }), - 'type': 'Serial bridge', - }) -# --- -# name: test_entry_diagnostics_vedo - dict({ - 'device_info': dict({ - 'devices': list([ - dict({ - 'aree': list([ - dict({ - '0': dict({ - 'alarm': False, - 'alarm_memory': False, - 'anomaly': False, - 'armed': False, - 'human_status': 'unknown', - 'in_time': False, - 'name': 'Area0', - 'out_time': False, - 'p1': True, - 'p2': False, - 'ready': False, - 'sabotage': False, - }), - }), - ]), - }), - dict({ - 'zone': list([ - dict({ - '0': dict({ - 'human_status': 'rest', - 'name': 'Zone0', - 'status': 0, - 'status_api': '0x000', - }), - }), - ]), - }), - ]), - 'last_exception': 'None', - 'last_update success': True, - }), - 'entry': dict({ - 'data': dict({ - 'host': 'fake_vedo_host', - 'pin': '**REDACTED**', - 'port': 8080, - 'type': 'Vedo system', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'comelit', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }), - 'type': 'Vedo system', - }) -# --- diff --git a/tests/components/comelit/test_config_flow.py b/tests/components/comelit/test_config_flow.py index eeaea0e41e9..333bf09bd20 100644 --- a/tests/components/comelit/test_config_flow.py +++ b/tests/components/comelit/test_config_flow.py @@ -7,7 +7,7 @@ from aiocomelit import CannotAuthenticate, CannotConnect import pytest from homeassistant.components.comelit.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -100,9 +100,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with ( patch( @@ -116,6 +113,15 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: ): mock_request_get.return_value.status_code = 200 + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -141,9 +147,6 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with ( patch("aiocomelit.api.ComeliteSerialBridgeApi.login", side_effect=side_effect), @@ -152,6 +155,15 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> ), patch("homeassistant.components.comelit.async_setup_entry"), ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/comelit/test_diagnostics.py b/tests/components/comelit/test_diagnostics.py deleted file mode 100644 index 39d75af1152..00000000000 --- a/tests/components/comelit/test_diagnostics.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Tests for Comelit Simplehome diagnostics platform.""" - -from __future__ import annotations - -from unittest.mock import patch - -from syrupy import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.components.comelit.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from .const import ( - BRIDGE_DEVICE_QUERY, - MOCK_USER_BRIDGE_DATA, - MOCK_USER_VEDO_DATA, - VEDO_DEVICE_QUERY, -) - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_entry_diagnostics_bridge( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test Bridge config entry diagnostics.""" - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) - entry.add_to_hass(hass) - - with ( - patch("aiocomelit.api.ComeliteSerialBridgeApi.login"), - patch( - "aiocomelit.api.ComeliteSerialBridgeApi.get_all_devices", - return_value=BRIDGE_DEVICE_QUERY, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.state == ConfigEntryState.LOADED - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( - exclude=props( - "entry_id", - "created_at", - "modified_at", - ) - ) - - -async def test_entry_diagnostics_vedo( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test Vedo System config entry diagnostics.""" - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_VEDO_DATA) - entry.add_to_hass(hass) - - with ( - patch("aiocomelit.api.ComelitVedoApi.login"), - patch( - "aiocomelit.api.ComelitVedoApi.get_all_areas_and_zones", - return_value=VEDO_DEVICE_QUERY, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.state == ConfigEntryState.LOADED - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( - exclude=props( - "entry_id", - "created_at", - "modified_at", - ) - ) diff --git a/tests/components/comfoconnect/test_sensor.py b/tests/components/comfoconnect/test_sensor.py index 5cae566379a..91e7e1f0e25 100644 --- a/tests/components/comfoconnect/test_sensor.py +++ b/tests/components/comfoconnect/test_sensor.py @@ -1,11 +1,11 @@ """Tests for the comfoconnect sensor platform.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.sensor import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -14,7 +14,7 @@ from tests.common import assert_setup_component COMPONENT = "comfoconnect" VALID_CONFIG = { COMPONENT: {"host": "1.2.3.4"}, - SENSOR_DOMAIN: { + DOMAIN: { "platform": COMPONENT, "resources": [ "current_humidity", @@ -51,8 +51,8 @@ async def setup_sensor( mock_comfoconnect_command: MagicMock, ) -> None: """Set up demo sensor component.""" - with assert_setup_component(1, SENSOR_DOMAIN): - await async_setup_component(hass, SENSOR_DOMAIN, VALID_CONFIG) + with assert_setup_component(1, DOMAIN): + await async_setup_component(hass, DOMAIN, VALID_CONFIG) await hass.async_block_till_done() diff --git a/tests/components/command_line/test_binary_sensor.py b/tests/components/command_line/test_binary_sensor.py index 5d1cd845e27..fd726ab77a4 100644 --- a/tests/components/command_line/test_binary_sensor.py +++ b/tests/components/command_line/test_binary_sensor.py @@ -56,24 +56,6 @@ async def test_setup_integration_yaml( assert entity_state.name == "Test" -async def test_setup_platform_yaml(hass: HomeAssistant) -> None: - """Test setting up the platform with platform yaml.""" - await setup.async_setup_component( - hass, - "binary_sensor", - { - "binary_sensor": { - "platform": "command_line", - "command": "echo 1", - "payload_on": "1", - "payload_off": "0", - } - }, - ) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/command_line/test_cover.py b/tests/components/command_line/test_cover.py index da9d86ba8a5..7ed48909d79 100644 --- a/tests/components/command_line/test_cover.py +++ b/tests/components/command_line/test_cover.py @@ -14,11 +14,7 @@ import pytest from homeassistant import setup from homeassistant.components.command_line import DOMAIN from homeassistant.components.command_line.cover import CommandCover -from homeassistant.components.cover import ( - DOMAIN as COVER_DOMAIN, - SCAN_INTERVAL, - CoverState, -) +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, SCAN_INTERVAL from homeassistant.components.homeassistant import ( DOMAIN as HA_DOMAIN, SERVICE_UPDATE_ENTITY, @@ -28,6 +24,7 @@ from homeassistant.const import ( SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_STOP_COVER, + STATE_OPEN, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -39,24 +36,6 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed -async def test_setup_platform_yaml(hass: HomeAssistant) -> None: - """Test setting up the platform with platform yaml.""" - await setup.async_setup_component( - hass, - "cover", - { - "cover": { - "platform": "command_line", - "command": "echo 1", - "payload_on": "1", - "payload_off": "0", - } - }, - ) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - async def test_no_poll_when_cover_has_no_command_state(hass: HomeAssistant) -> None: """Test that the cover does not polls when there's no state command.""" @@ -392,7 +371,7 @@ async def test_availability( entity_state = hass.states.get("cover.test") assert entity_state - assert entity_state.state == CoverState.OPEN + assert entity_state.state == STATE_OPEN hass.states.async_set("sensor.input1", "off") await hass.async_block_till_done() diff --git a/tests/components/command_line/test_notify.py b/tests/components/command_line/test_notify.py index 6898b44f062..98bfb856bb8 100644 --- a/tests/components/command_line/test_notify.py +++ b/tests/components/command_line/test_notify.py @@ -3,7 +3,6 @@ from __future__ import annotations import os -from pathlib import Path import subprocess import tempfile from unittest.mock import patch @@ -16,24 +15,6 @@ from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN from homeassistant.core import HomeAssistant -async def test_setup_platform_yaml(hass: HomeAssistant) -> None: - """Test setting up the platform with platform yaml.""" - await setup.async_setup_component( - hass, - "notify", - { - "notify": { - "platform": "command_line", - "command": "echo 1", - "payload_on": "1", - "payload_off": "0", - } - }, - ) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - @pytest.mark.parametrize( "get_config", [ @@ -97,7 +78,9 @@ async def test_command_line_output(hass: HomeAssistant) -> None: await hass.services.async_call( NOTIFY_DOMAIN, "test3", {"message": message}, blocking=True ) - assert message == await hass.async_add_executor_job(Path(filename).read_text) + with open(filename, encoding="UTF-8") as handle: + # the echo command adds a line break + assert message == handle.read() @pytest.mark.parametrize( diff --git a/tests/components/command_line/test_sensor.py b/tests/components/command_line/test_sensor.py index f7879b334cd..26f97e37543 100644 --- a/tests/components/command_line/test_sensor.py +++ b/tests/components/command_line/test_sensor.py @@ -27,24 +27,6 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed -async def test_setup_platform_yaml(hass: HomeAssistant) -> None: - """Test setting up the platform with platform yaml.""" - await setup.async_setup_component( - hass, - "sensor", - { - "sensor": { - "platform": "command_line", - "command": "echo 1", - "payload_on": "1", - "payload_off": "0", - } - }, - ) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - @pytest.mark.parametrize( "get_config", [ @@ -485,46 +467,6 @@ async def test_update_with_unnecessary_json_attrs( assert "key_three" not in entity_state.attributes -@pytest.mark.parametrize( - "get_config", - [ - { - "command_line": [ - { - "sensor": { - "name": "Test", - "command": 'echo \ - {\ - \\"top_level\\": {\ - \\"second_level\\": {\ - \\"key\\": \\"some_json_value\\",\ - \\"another_key\\": \\"another_json_value\\",\ - \\"key_three\\": \\"value_three\\"\ - }\ - }\ - }', - "json_attributes": ["key", "another_key", "key_three"], - "json_attributes_path": "$.top_level.second_level", - } - } - ] - } - ], -) -async def test_update_with_json_attrs_with_json_attrs_path( - hass: HomeAssistant, load_yaml_integration: None -) -> None: - """Test using json_attributes_path to select a different part of the json object as root.""" - - entity_state = hass.states.get("sensor.test") - assert entity_state - assert entity_state.attributes["key"] == "some_json_value" - assert entity_state.attributes["another_key"] == "another_json_value" - assert entity_state.attributes["key_three"] == "value_three" - assert "top_level" not in entity_state.attributes - assert "second_level" not in entity_state.attributes - - @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/command_line/test_switch.py b/tests/components/command_line/test_switch.py index 549e729892c..c464ded34fb 100644 --- a/tests/components/command_line/test_switch.py +++ b/tests/components/command_line/test_switch.py @@ -37,24 +37,6 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed -async def test_setup_platform_yaml(hass: HomeAssistant) -> None: - """Test setting up the platform with platform yaml.""" - await setup.async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "command_line", - "command": "echo 1", - "payload_on": "1", - "payload_off": "0", - } - }, - ) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - async def test_state_integration_yaml(hass: HomeAssistant) -> None: """Test with none state.""" with tempfile.TemporaryDirectory() as tempdirname: diff --git a/tests/components/config/conftest.py b/tests/components/config/conftest.py index 55393a219b1..c401ac19fa9 100644 --- a/tests/components/config/conftest.py +++ b/tests/components/config/conftest.py @@ -1,6 +1,5 @@ """Test fixtures for the config integration.""" -from collections.abc import Generator from contextlib import contextmanager from copy import deepcopy import json @@ -10,6 +9,7 @@ from typing import Any from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/config/test_area_registry.py b/tests/components/config/test_area_registry.py index 03a8272e586..fb59725fd29 100644 --- a/tests/components/config/test_area_registry.py +++ b/tests/components/config/test_area_registry.py @@ -1,15 +1,11 @@ """Test area_registry API.""" -from datetime import datetime - -from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered from homeassistant.components.config import area_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import area_registry as ar -from homeassistant.util.dt import utcnow from tests.common import ANY from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -25,17 +21,10 @@ async def client_fixture( async def test_list_areas( - client: MockHAClientWebSocket, - area_registry: ar.AreaRegistry, - freezer: FrozenDateTimeFactory, + client: MockHAClientWebSocket, area_registry: ar.AreaRegistry ) -> None: """Test list entries.""" - created_area1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") - freezer.move_to(created_area1) area1 = area_registry.async_create("mock 1") - - created_area2 = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") - freezer.move_to(created_area2) area2 = area_registry.async_create( "mock 2", aliases={"alias_1", "alias_2"}, @@ -57,8 +46,6 @@ async def test_list_areas( "labels": [], "name": "mock 1", "picture": None, - "created_at": created_area1.timestamp(), - "modified_at": created_area1.timestamp(), }, { "aliases": unordered(["alias_1", "alias_2"]), @@ -68,16 +55,12 @@ async def test_list_areas( "labels": unordered(["label_1", "label_2"]), "name": "mock 2", "picture": "/image/example.png", - "created_at": created_area2.timestamp(), - "modified_at": created_area2.timestamp(), }, ] async def test_create_area( - client: MockHAClientWebSocket, - area_registry: ar.AreaRegistry, - freezer: FrozenDateTimeFactory, + client: MockHAClientWebSocket, area_registry: ar.AreaRegistry ) -> None: """Test create entry.""" # Create area with only mandatory parameters @@ -95,8 +78,6 @@ async def test_create_area( "labels": [], "name": "mock", "picture": None, - "created_at": utcnow().timestamp(), - "modified_at": utcnow().timestamp(), } assert len(area_registry.areas) == 1 @@ -123,8 +104,6 @@ async def test_create_area( "labels": unordered(["label_1", "label_2"]), "name": "mock 2", "picture": "/image/example.png", - "created_at": utcnow().timestamp(), - "modified_at": utcnow().timestamp(), } assert len(area_registry.areas) == 2 @@ -182,16 +161,10 @@ async def test_delete_non_existing_area( async def test_update_area( - client: MockHAClientWebSocket, - area_registry: ar.AreaRegistry, - freezer: FrozenDateTimeFactory, + client: MockHAClientWebSocket, area_registry: ar.AreaRegistry ) -> None: """Test update entry.""" - created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") - freezer.move_to(created_at) area = area_registry.async_create("mock 1") - modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") - freezer.move_to(modified_at) await client.send_json_auto_id( { @@ -216,14 +189,9 @@ async def test_update_area( "labels": unordered(["label_1", "label_2"]), "name": "mock 2", "picture": "/image/example.png", - "created_at": created_at.timestamp(), - "modified_at": modified_at.timestamp(), } assert len(area_registry.areas) == 1 - modified_at = datetime.fromisoformat("2024-07-16T13:50:00.900075+00:00") - freezer.move_to(modified_at) - await client.send_json_auto_id( { "aliases": ["alias_1", "alias_1"], @@ -246,8 +214,6 @@ async def test_update_area( "labels": [], "name": "mock 2", "picture": None, - "created_at": created_at.timestamp(), - "modified_at": modified_at.timestamp(), } assert len(area_registry.areas) == 1 diff --git a/tests/components/config/test_auth_provider_homeassistant.py b/tests/components/config/test_auth_provider_homeassistant.py index 6b580013968..ffee88f91ec 100644 --- a/tests/components/config/test_auth_provider_homeassistant.py +++ b/tests/components/config/test_auth_provider_homeassistant.py @@ -183,13 +183,7 @@ async def test_create_auth_duplicate_username( result = await client.receive_json() assert not result["success"], result - assert result["error"] == { - "code": "home_assistant_error", - "message": "username_already_exists", - "translation_key": "username_already_exists", - "translation_placeholders": {"username": "test-user"}, - "translation_domain": "auth", - } + assert result["error"]["code"] == "username_exists" async def test_delete_removes_just_auth( @@ -288,13 +282,7 @@ async def test_delete_unknown_auth( result = await client.receive_json() assert not result["success"], result - assert result["error"] == { - "code": "home_assistant_error", - "message": "user_not_found", - "translation_key": "user_not_found", - "translation_placeholders": None, - "translation_domain": "auth", - } + assert result["error"]["code"] == "auth_not_found" async def test_change_password( diff --git a/tests/components/config/test_automation.py b/tests/components/config/test_automation.py index 40a9c85a8d3..f907732109d 100644 --- a/tests/components/config/test_automation.py +++ b/tests/components/config/test_automation.py @@ -7,12 +7,12 @@ from unittest.mock import patch import pytest +from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import automation from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component from homeassistant.util import yaml from tests.typing import ClientSessionGenerator @@ -78,7 +78,7 @@ async def test_update_automation_config( resp = await client.post( "/api/config/automation/config/moon", - data=json.dumps({"triggers": [], "actions": [], "conditions": []}), + data=json.dumps({"trigger": [], "action": [], "condition": []}), ) await hass.async_block_till_done() assert sorted(hass.states.async_entity_ids("automation")) == [ @@ -91,13 +91,8 @@ async def test_update_automation_config( assert result == {"result": "ok"} new_data = hass_config_store["automations.yaml"] - assert list(new_data[1]) == ["id", "triggers", "conditions", "actions"] - assert new_data[1] == { - "id": "moon", - "triggers": [], - "conditions": [], - "actions": [], - } + assert list(new_data[1]) == ["id", "trigger", "condition", "action"] + assert new_data[1] == {"id": "moon", "trigger": [], "condition": [], "action": []} @pytest.mark.parametrize("automation_config", [{}]) @@ -106,18 +101,18 @@ async def test_update_automation_config( [ ( {"action": []}, - "required key not provided @ data['triggers']", + "required key not provided @ data['trigger']", ), ( { - "trigger": {"trigger": "automation"}, + "trigger": {"platform": "automation"}, "action": [], }, "Integration 'automation' does not provide trigger support", ), ( { - "trigger": {"trigger": "event", "event_type": "test_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, "condition": { "condition": "state", # The UUID will fail being resolved to en entity_id @@ -130,7 +125,7 @@ async def test_update_automation_config( ), ( { - "trigger": {"trigger": "event", "event_type": "test_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, "action": { "condition": "state", # The UUID will fail being resolved to en entity_id @@ -259,7 +254,7 @@ async def test_update_remove_key_automation_config( resp = await client.post( "/api/config/automation/config/moon", - data=json.dumps({"triggers": [], "actions": [], "conditions": []}), + data=json.dumps({"trigger": [], "action": [], "condition": []}), ) await hass.async_block_till_done() assert sorted(hass.states.async_entity_ids("automation")) == [ @@ -272,13 +267,8 @@ async def test_update_remove_key_automation_config( assert result == {"result": "ok"} new_data = hass_config_store["automations.yaml"] - assert list(new_data[1]) == ["id", "triggers", "conditions", "actions"] - assert new_data[1] == { - "id": "moon", - "triggers": [], - "conditions": [], - "actions": [], - } + assert list(new_data[1]) == ["id", "trigger", "condition", "action"] + assert new_data[1] == {"id": "moon", "trigger": [], "condition": [], "action": []} @pytest.mark.parametrize("automation_config", [{}]) @@ -307,7 +297,7 @@ async def test_bad_formatted_automations( resp = await client.post( "/api/config/automation/config/moon", - data=json.dumps({"triggers": [], "actions": [], "conditions": []}), + data=json.dumps({"trigger": [], "action": [], "condition": []}), ) await hass.async_block_till_done() assert sorted(hass.states.async_entity_ids("automation")) == [ @@ -322,12 +312,7 @@ async def test_bad_formatted_automations( # Verify ID added new_data = hass_config_store["automations.yaml"] assert "id" in new_data[0] - assert new_data[1] == { - "id": "moon", - "triggers": [], - "conditions": [], - "actions": [], - } + assert new_data[1] == {"id": "moon", "trigger": [], "condition": [], "action": []} @pytest.mark.parametrize( @@ -336,12 +321,12 @@ async def test_bad_formatted_automations( [ { "id": "sun", - "trigger": {"trigger": "event", "event_type": "test_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, { "id": "moon", - "trigger": {"trigger": "event", "event_type": "test_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, "action": {"service": "test.automation"}, }, ], diff --git a/tests/components/config/test_category_registry.py b/tests/components/config/test_category_registry.py index d4fe6a0c9b9..b4d171535b6 100644 --- a/tests/components/config/test_category_registry.py +++ b/tests/components/config/test_category_registry.py @@ -1,14 +1,10 @@ """Test category registry API.""" -from datetime import datetime - -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.config import category_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import category_registry as cr -from homeassistant.util.dt import utcnow from tests.common import ANY from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -23,7 +19,6 @@ async def client_fixture( return await hass_ws_client(hass) -@pytest.mark.usefixtures("freezer") async def test_list_categories( client: MockHAClientWebSocket, category_registry: cr.CategoryRegistry, @@ -58,15 +53,11 @@ async def test_list_categories( assert len(msg["result"]) == 2 assert msg["result"][0] == { "category_id": category1.category_id, - "created_at": utcnow().timestamp(), - "modified_at": utcnow().timestamp(), "name": "Energy saving", "icon": "mdi:leaf", } assert msg["result"][1] == { "category_id": category2.category_id, - "created_at": utcnow().timestamp(), - "modified_at": utcnow().timestamp(), "name": "Something else", "icon": "mdi:home", } @@ -80,8 +71,6 @@ async def test_list_categories( assert len(msg["result"]) == 1 assert msg["result"][0] == { "category_id": category3.category_id, - "created_at": utcnow().timestamp(), - "modified_at": utcnow().timestamp(), "name": "Grocery stores", "icon": "mdi:store", } @@ -90,11 +79,8 @@ async def test_list_categories( async def test_create_category( client: MockHAClientWebSocket, category_registry: cr.CategoryRegistry, - freezer: FrozenDateTimeFactory, ) -> None: """Test create entry.""" - created1 = datetime(2024, 2, 14, 12, 0, 0) - freezer.move_to(created1) await client.send_json_auto_id( { "type": "config/category_registry/create", @@ -112,14 +98,9 @@ async def test_create_category( assert msg["result"] == { "icon": "mdi:leaf", "category_id": ANY, - "created_at": created1.timestamp(), - "modified_at": created1.timestamp(), "name": "Energy saving", } - created2 = datetime(2024, 3, 14, 12, 0, 0) - freezer.move_to(created2) - await client.send_json_auto_id( { "scope": "automation", @@ -136,14 +117,9 @@ async def test_create_category( assert msg["result"] == { "icon": None, "category_id": ANY, - "created_at": created2.timestamp(), - "modified_at": created2.timestamp(), "name": "Something else", } - created3 = datetime(2024, 4, 14, 12, 0, 0) - freezer.move_to(created3) - # Test adding the same one again in a different scope await client.send_json_auto_id( { @@ -163,8 +139,6 @@ async def test_create_category( assert msg["result"] == { "icon": "mdi:leaf", "category_id": ANY, - "created_at": created3.timestamp(), - "modified_at": created3.timestamp(), "name": "Energy saving", } @@ -275,11 +249,8 @@ async def test_delete_non_existing_category( async def test_update_category( client: MockHAClientWebSocket, category_registry: cr.CategoryRegistry, - freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" - created = datetime(2024, 2, 14, 12, 0, 0) - freezer.move_to(created) category = category_registry.async_create( scope="automation", name="Energy saving", @@ -287,9 +258,6 @@ async def test_update_category( assert len(category_registry.categories) == 1 assert len(category_registry.categories["automation"]) == 1 - modified = datetime(2024, 3, 14, 12, 0, 0) - freezer.move_to(modified) - await client.send_json_auto_id( { "scope": "automation", @@ -307,14 +275,9 @@ async def test_update_category( assert msg["result"] == { "icon": "mdi:left", "category_id": category.category_id, - "created_at": created.timestamp(), - "modified_at": modified.timestamp(), "name": "ENERGY SAVING", } - modified = datetime(2024, 4, 14, 12, 0, 0) - freezer.move_to(modified) - await client.send_json_auto_id( { "scope": "automation", @@ -332,8 +295,6 @@ async def test_update_category( assert msg["result"] == { "icon": None, "category_id": category.category_id, - "created_at": created.timestamp(), - "modified_at": modified.timestamp(), "name": "Energy saving", } diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index b96aa9ae006..e023a60f215 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -1,13 +1,12 @@ """Test config entries API.""" from collections import OrderedDict -from collections.abc import Generator from http import HTTPStatus from unittest.mock import ANY, AsyncMock, patch from aiohttp.test_utils import TestClient -from freezegun.api import FrozenDateTimeFactory import pytest +from typing_extensions import Generator import voluptuous as vol from homeassistant import config_entries as core_ce, data_entry_flow, loader @@ -17,10 +16,8 @@ from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_flow, config_validation as cv -from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.loader import IntegrationNotFound from homeassistant.setup import async_setup_component -from homeassistant.util.dt import utcnow from tests.common import ( MockConfigEntry, @@ -72,7 +69,6 @@ def mock_flow() -> Generator[None]: yield -@pytest.mark.usefixtures("freezer") @pytest.mark.usefixtures("clear_handlers", "mock_flow") async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: """Test get entries.""" @@ -128,15 +124,12 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: data = await resp.json() for entry in data: entry.pop("entry_id") - timestamp = utcnow().timestamp() assert data == [ { - "created_at": timestamp, "disabled_by": None, "domain": "comp1", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -149,12 +142,10 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "title": "Test 1", }, { - "created_at": timestamp, "disabled_by": None, "domain": "comp2", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -167,12 +158,10 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "title": "Test 2", }, { - "created_at": timestamp, "disabled_by": core_ce.ConfigEntryDisabler.USER, "domain": "comp3", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -185,12 +174,10 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "title": "Test 3", }, { - "created_at": timestamp, "disabled_by": None, "domain": "comp4", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -203,12 +190,10 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "title": "Test 4", }, { - "created_at": timestamp, "disabled_by": None, "domain": "comp5", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -393,10 +378,6 @@ async def test_available_flows( ############################ -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.config.error.Should be unique."], -) async def test_initialize_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can initialize a flow.""" mock_platform(hass, "test.config_flow", None) @@ -504,10 +485,6 @@ async def test_initialize_flow_unauth( assert resp.status == HTTPStatus.UNAUTHORIZED -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.config.abort.bla"], -) async def test_abort(hass: HomeAssistant, client: TestClient) -> None: """Test a flow that aborts.""" mock_platform(hass, "test.config_flow", None) @@ -532,7 +509,7 @@ async def test_abort(hass: HomeAssistant, client: TestClient) -> None: } -@pytest.mark.usefixtures("enable_custom_integrations", "freezer") +@pytest.mark.usefixtures("enable_custom_integrations") async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: """Test a flow that creates an account.""" mock_platform(hass, "test.config_flow", None) @@ -559,7 +536,6 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: entries = hass.config_entries.async_entries("test") assert len(entries) == 1 - timestamp = utcnow().timestamp() data = await resp.json() data.pop("flow_id") assert data == { @@ -568,13 +544,11 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "type": "create_entry", "version": 1, "result": { - "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entries[0].entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -593,7 +567,7 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: } -@pytest.mark.usefixtures("enable_custom_integrations", "freezer") +@pytest.mark.usefixtures("enable_custom_integrations") async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can finish a two step flow.""" mock_integration( @@ -642,7 +616,6 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: entries = hass.config_entries.async_entries("test") assert len(entries) == 1 - timestamp = utcnow().timestamp() data = await resp.json() data.pop("flow_id") assert data == { @@ -651,13 +624,11 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "title": "user-title", "version": 1, "result": { - "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entries[0].entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -776,10 +747,6 @@ async def test_get_progress_index_unauth( assert response["error"]["code"] == "unauthorized" -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.config.error.Should be unique."], -) async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can query the API for same result as we get from init a flow.""" mock_platform(hass, "test.config_flow", None) @@ -804,7 +771,9 @@ async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> Non assert resp.status == HTTPStatus.OK data = await resp.json() - resp2 = await client.get(f"/api/config/config_entries/flow/{data['flow_id']}") + resp2 = await client.get( + "/api/config/config_entries/flow/{}".format(data["flow_id"]) + ) assert resp2.status == HTTPStatus.OK data2 = await resp2.json() @@ -812,10 +781,6 @@ async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> Non assert data == data2 -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.config.error.Should be unique."], -) async def test_get_progress_flow_unauth( hass: HomeAssistant, client: TestClient, hass_admin_user: MockUser ) -> None: @@ -844,7 +809,9 @@ async def test_get_progress_flow_unauth( hass_admin_user.groups = [] - resp2 = await client.get(f"/api/config/config_entries/flow/{data['flow_id']}") + resp2 = await client.get( + "/api/config/config_entries/flow/{}".format(data["flow_id"]) + ) assert resp2.status == HTTPStatus.UNAUTHORIZED @@ -1092,7 +1059,6 @@ async def test_options_flow_with_invalid_data( assert data == {"errors": {"choices": "invalid is not a valid option"}} -@pytest.mark.usefixtures("freezer") async def test_get_single( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -1114,16 +1080,13 @@ async def test_get_single( ) response = await ws_client.receive_json() - timestamp = utcnow().timestamp() assert response["success"] assert response["result"]["config_entry"] == { - "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entry.entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1334,27 +1297,8 @@ async def test_disable_entry_nonexisting( assert response["error"]["code"] == "not_found" -@pytest.mark.parametrize( - ( - "flow_context", - "entry_discovery_keys", - ), - [ - ( - {}, - {}, - ), - ( - {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, - {"test": (DiscoveryKey(domain="test", key="blah", version=1),)}, - ), - ], -) async def test_ignore_flow( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - flow_context: dict, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test we can ignore a flow.""" assert await async_setup_component(hass, "config", {}) @@ -1377,7 +1321,7 @@ async def test_ignore_flow( with patch.dict(HANDLERS, {"test": TestFlow}): result = await hass.config_entries.flow.async_init( - "test", context={"source": core_ce.SOURCE_USER} | flow_context + "test", context={"source": core_ce.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM @@ -1399,8 +1343,6 @@ async def test_ignore_flow( assert entry.source == "ignore" assert entry.unique_id == "mock-unique-id" assert entry.title == "Test Integration" - assert entry.data == {} - assert entry.discovery_keys == entry_discovery_keys async def test_ignore_flow_nonexisting( @@ -1424,7 +1366,7 @@ async def test_ignore_flow_nonexisting( assert response["error"]["code"] == "not_found" -@pytest.mark.usefixtures("clear_handlers", "freezer") +@pytest.mark.usefixtures("clear_handlers") async def test_get_matching_entries_ws( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -1478,16 +1420,13 @@ async def test_get_matching_entries_ws( await ws_client.send_json_auto_id({"type": "config_entries/get"}) response = await ws_client.receive_json() - timestamp = utcnow().timestamp() assert response["result"] == [ { - "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1500,13 +1439,11 @@ async def test_get_matching_entries_ws( "title": "Test 1", }, { - "created_at": timestamp, "disabled_by": None, "domain": "comp2", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -1519,13 +1456,11 @@ async def test_get_matching_entries_ws( "title": "Test 2", }, { - "created_at": timestamp, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1538,13 +1473,11 @@ async def test_get_matching_entries_ws( "title": "Test 3", }, { - "created_at": timestamp, "disabled_by": None, "domain": "comp4", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1557,13 +1490,11 @@ async def test_get_matching_entries_ws( "title": "Test 4", }, { - "created_at": timestamp, "disabled_by": None, "domain": "comp5", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1587,13 +1518,11 @@ async def test_get_matching_entries_ws( response = await ws_client.receive_json() assert response["result"] == [ { - "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1616,13 +1545,11 @@ async def test_get_matching_entries_ws( response = await ws_client.receive_json() assert response["result"] == [ { - "created_at": timestamp, "disabled_by": None, "domain": "comp4", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1635,13 +1562,11 @@ async def test_get_matching_entries_ws( "title": "Test 4", }, { - "created_at": timestamp, "disabled_by": None, "domain": "comp5", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1664,13 +1589,11 @@ async def test_get_matching_entries_ws( response = await ws_client.receive_json() assert response["result"] == [ { - "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1683,13 +1606,11 @@ async def test_get_matching_entries_ws( "title": "Test 1", }, { - "created_at": timestamp, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1718,13 +1639,11 @@ async def test_get_matching_entries_ws( assert response["result"] == [ { - "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1737,13 +1656,11 @@ async def test_get_matching_entries_ws( "title": "Test 1", }, { - "created_at": timestamp, "disabled_by": None, "domain": "comp2", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -1756,13 +1673,11 @@ async def test_get_matching_entries_ws( "title": "Test 2", }, { - "created_at": timestamp, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1775,13 +1690,11 @@ async def test_get_matching_entries_ws( "title": "Test 3", }, { - "created_at": timestamp, "disabled_by": None, "domain": "comp4", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1794,13 +1707,11 @@ async def test_get_matching_entries_ws( "title": "Test 4", }, { - "created_at": timestamp, "disabled_by": None, "domain": "comp5", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1848,9 +1759,7 @@ async def test_get_matching_entries_ws( @pytest.mark.usefixtures("clear_handlers") async def test_subscribe_entries_ws( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test subscribe entries with the websocket api.""" assert await async_setup_component(hass, "config", {}) @@ -1896,18 +1805,15 @@ async def test_subscribe_entries_ws( assert response["type"] == "result" response = await ws_client.receive_json() assert response["id"] == 5 - created = utcnow().timestamp() assert response["event"] == [ { "type": None, "entry": { - "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1923,13 +1829,11 @@ async def test_subscribe_entries_ws( { "type": None, "entry": { - "created_at": created, "disabled_by": None, "domain": "comp2", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -1945,13 +1849,11 @@ async def test_subscribe_entries_ws( { "type": None, "entry": { - "created_at": created, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1965,21 +1867,17 @@ async def test_subscribe_entries_ws( }, }, ] - freezer.tick() - modified = utcnow().timestamp() assert hass.config_entries.async_update_entry(entry, title="changed") response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { - "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1994,21 +1892,17 @@ async def test_subscribe_entries_ws( "type": "updated", } ] - freezer.tick() - modified = utcnow().timestamp() await hass.config_entries.async_remove(entry.entry_id) response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { - "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2023,20 +1917,17 @@ async def test_subscribe_entries_ws( "type": "removed", } ] - freezer.tick() await hass.config_entries.async_add(entry) response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { - "created_at": entry.created_at.timestamp(), "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": entry.modified_at.timestamp(), "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2055,12 +1946,9 @@ async def test_subscribe_entries_ws( @pytest.mark.usefixtures("clear_handlers") async def test_subscribe_entries_ws_filtered( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test subscribe entries with the websocket api with a type filter.""" - created = utcnow().timestamp() assert await async_setup_component(hass, "config", {}) mock_integration(hass, MockModule("comp1")) mock_integration( @@ -2120,13 +2008,11 @@ async def test_subscribe_entries_ws_filtered( { "type": None, "entry": { - "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2142,13 +2028,11 @@ async def test_subscribe_entries_ws_filtered( { "type": None, "entry": { - "created_at": created, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2162,8 +2046,6 @@ async def test_subscribe_entries_ws_filtered( }, }, ] - freezer.tick() - modified = utcnow().timestamp() assert hass.config_entries.async_update_entry(entry, title="changed") assert hass.config_entries.async_update_entry(entry3, title="changed too") assert hass.config_entries.async_update_entry(entry4, title="changed but ignored") @@ -2172,13 +2054,11 @@ async def test_subscribe_entries_ws_filtered( assert response["event"] == [ { "entry": { - "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2198,13 +2078,11 @@ async def test_subscribe_entries_ws_filtered( assert response["event"] == [ { "entry": { - "created_at": created, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2219,8 +2097,6 @@ async def test_subscribe_entries_ws_filtered( "type": "updated", } ] - freezer.tick() - modified = utcnow().timestamp() await hass.config_entries.async_remove(entry.entry_id) await hass.config_entries.async_remove(entry2.entry_id) response = await ws_client.receive_json() @@ -2228,13 +2104,11 @@ async def test_subscribe_entries_ws_filtered( assert response["event"] == [ { "entry": { - "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2249,20 +2123,17 @@ async def test_subscribe_entries_ws_filtered( "type": "removed", } ] - freezer.tick() await hass.config_entries.async_add(entry) response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { - "created_at": entry.created_at.timestamp(), "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, - "modified_at": entry.modified_at.timestamp(), "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2367,15 +2238,8 @@ async def test_flow_with_multiple_schema_errors_base( } -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.config.abort.reconfigure_successful"], -) -@pytest.mark.usefixtures("enable_custom_integrations", "freezer") -async def test_supports_reconfigure( - hass: HomeAssistant, - client: TestClient, -) -> None: +@pytest.mark.usefixtures("enable_custom_integrations") +async def test_supports_reconfigure(hass: HomeAssistant, client: TestClient) -> None: """Test a flow that support reconfigure step.""" mock_platform(hass, "test.config_flow", None) @@ -2383,9 +2247,6 @@ async def test_supports_reconfigure( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) ) - entry = MockConfigEntry(domain="test", title="Test", entry_id="1") - entry.add_to_hass(hass) - class TestFlow(core_ce.ConfigFlow): VERSION = 1 @@ -2399,10 +2260,8 @@ async def test_supports_reconfigure( return self.async_show_form( step_id="reconfigure", data_schema=vol.Schema({}) ) - return self.async_update_reload_and_abort( - self._get_reconfigure_entry(), - title="Test Entry", - data={"secret": "account_token"}, + return self.async_create_entry( + title="Test Entry", data={"secret": "account_token"} ) with patch.dict(HANDLERS, {"test": TestFlow}): @@ -2441,9 +2300,30 @@ async def test_supports_reconfigure( data.pop("flow_id") assert data == { "handler": "test", - "reason": "reconfigure_successful", - "type": "abort", + "title": "Test Entry", + "type": "create_entry", + "version": 1, + "result": { + "disabled_by": None, + "domain": "test", + "entry_id": entries[0].entry_id, + "error_reason_translation_key": None, + "error_reason_translation_placeholders": None, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "reason": None, + "source": core_ce.SOURCE_RECONFIGURE, + "state": core_ce.ConfigEntryState.LOADED.value, + "supports_options": False, + "supports_reconfigure": True, + "supports_remove_device": False, + "supports_unload": False, + "title": "Test Entry", + }, + "description": None, "description_placeholders": None, + "options": {}, + "minor_version": 1, } diff --git a/tests/components/config/test_core.py b/tests/components/config/test_core.py index 4550f2e08e5..7d02063b2b9 100644 --- a/tests/components/config/test_core.py +++ b/tests/components/config/test_core.py @@ -5,11 +5,11 @@ from unittest.mock import Mock, patch import pytest +from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import core from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util, location from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM diff --git a/tests/components/config/test_device_registry.py b/tests/components/config/test_device_registry.py index c840ce2bed2..0717bb6046d 100644 --- a/tests/components/config/test_device_registry.py +++ b/tests/components/config/test_device_registry.py @@ -1,17 +1,12 @@ """Test device_registry API.""" -from datetime import datetime - -from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered from homeassistant.components.config import device_registry -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, MockModule, mock_integration from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -31,7 +26,6 @@ async def client_fixture( return await hass_ws_client(hass) -@pytest.mark.usefixtures("freezer") async def test_list_devices( hass: HomeAssistant, client: MockHAClientWebSocket, @@ -67,7 +61,6 @@ async def test_list_devices( "config_entries": [entry.entry_id], "configuration_url": None, "connections": [["ethernet", "12:34:56:78:90:AB:CD:EF"]], - "created_at": utcnow().timestamp(), "disabled_by": None, "entry_type": None, "hw_version": None, @@ -75,8 +68,6 @@ async def test_list_devices( "labels": [], "manufacturer": "manufacturer", "model": "model", - "model_id": None, - "modified_at": utcnow().timestamp(), "name_by_user": None, "name": None, "primary_config_entry": entry.entry_id, @@ -89,7 +80,6 @@ async def test_list_devices( "config_entries": [entry.entry_id], "configuration_url": None, "connections": [], - "created_at": utcnow().timestamp(), "disabled_by": None, "entry_type": dr.DeviceEntryType.SERVICE, "hw_version": None, @@ -97,8 +87,6 @@ async def test_list_devices( "labels": [], "manufacturer": "manufacturer", "model": "model", - "model_id": None, - "modified_at": utcnow().timestamp(), "name_by_user": None, "name": None, "primary_config_entry": entry.entry_id, @@ -123,7 +111,6 @@ async def test_list_devices( "config_entries": [entry.entry_id], "configuration_url": None, "connections": [["ethernet", "12:34:56:78:90:AB:CD:EF"]], - "created_at": utcnow().timestamp(), "disabled_by": None, "entry_type": None, "hw_version": None, @@ -132,8 +119,6 @@ async def test_list_devices( "labels": [], "manufacturer": "manufacturer", "model": "model", - "model_id": None, - "modified_at": utcnow().timestamp(), "name_by_user": None, "name": None, "primary_config_entry": entry.entry_id, @@ -163,15 +148,12 @@ async def test_update_device( hass: HomeAssistant, client: MockHAClientWebSocket, device_registry: dr.DeviceRegistry, - freezer: FrozenDateTimeFactory, payload_key: str, payload_value: str | dr.DeviceEntryDisabler | None, ) -> None: """Test update entry.""" entry = MockConfigEntry(title=None) entry.add_to_hass(hass) - created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") - freezer.move_to(created_at) device = device_registry.async_get_or_create( config_entry_id=entry.entry_id, connections={("ethernet", "12:34:56:78:90:AB:CD:EF")}, @@ -182,9 +164,6 @@ async def test_update_device( assert not getattr(device, payload_key) - modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") - freezer.move_to(modified_at) - await client.send_json_auto_id( { "type": "config/device_registry/update", @@ -204,12 +183,6 @@ async def test_update_device( assert msg["result"][payload_key] == payload_value assert getattr(device, payload_key) == payload_value - for key, value in ( - ("created_at", created_at), - ("modified_at", modified_at if payload_value is not None else created_at), - ): - assert msg["result"][key] == value.timestamp() - assert getattr(device, key) == value assert isinstance(device.disabled_by, (dr.DeviceEntryDisabler, type(None))) @@ -218,13 +191,10 @@ async def test_update_device_labels( hass: HomeAssistant, client: MockHAClientWebSocket, device_registry: dr.DeviceRegistry, - freezer: FrozenDateTimeFactory, ) -> None: """Test update entry labels.""" entry = MockConfigEntry(title=None) entry.add_to_hass(hass) - created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") - freezer.move_to(created_at) device = device_registry.async_get_or_create( config_entry_id=entry.entry_id, connections={("ethernet", "12:34:56:78:90:AB:CD:EF")}, @@ -234,8 +204,6 @@ async def test_update_device_labels( ) assert not device.labels - modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") - freezer.move_to(modified_at) await client.send_json_auto_id( { @@ -256,12 +224,6 @@ async def test_update_device_labels( assert msg["result"]["labels"] == unordered(["label1", "label2"]) assert device.labels == {"label1", "label2"} - for key, value in ( - ("created_at", created_at), - ("modified_at", modified_at), - ): - assert msg["result"][key] == value.timestamp() - assert getattr(device, key) == value async def test_remove_config_entry_from_device( @@ -275,9 +237,7 @@ async def test_remove_config_entry_from_device( can_remove = False - async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry - ) -> bool: + async def async_remove_config_entry_device(hass, config_entry, device_entry): return can_remove mock_integration( @@ -359,9 +319,7 @@ async def test_remove_config_entry_from_device_fails( assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry - ) -> bool: + async def async_remove_config_entry_device(hass, config_entry, device_entry): return True mock_integration( @@ -465,93 +423,3 @@ async def test_remove_config_entry_from_device_fails( assert not response["success"] assert response["error"]["code"] == "home_assistant_error" assert response["error"]["message"] == "Integration not found" - - -async def test_remove_config_entry_from_device_if_integration_remove( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - device_registry: dr.DeviceRegistry, -) -> None: - """Test removing config entry from device doesn't lead to an error when the integration removes the entry.""" - assert await async_setup_component(hass, "config", {}) - ws_client = await hass_ws_client(hass) - - can_remove = False - - async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry - ) -> bool: - if can_remove: - device_registry.async_update_device( - device_entry.id, remove_config_entry_id=config_entry.entry_id - ) - return can_remove - - mock_integration( - hass, - MockModule( - "comp1", async_remove_config_entry_device=async_remove_config_entry_device - ), - ) - mock_integration( - hass, - MockModule( - "comp2", async_remove_config_entry_device=async_remove_config_entry_device - ), - ) - - entry_1 = MockConfigEntry( - domain="comp1", - title="Test 1", - source="bla", - ) - entry_1.supports_remove_device = True - entry_1.add_to_hass(hass) - - entry_2 = MockConfigEntry( - domain="comp1", - title="Test 1", - source="bla", - ) - entry_2.supports_remove_device = True - entry_2.add_to_hass(hass) - - device_registry.async_get_or_create( - config_entry_id=entry_1.entry_id, - connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, - ) - device_entry = device_registry.async_get_or_create( - config_entry_id=entry_2.entry_id, - connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, - ) - assert device_entry.config_entries == {entry_1.entry_id, entry_2.entry_id} - - # Try removing a config entry from the device, it should fail because - # async_remove_config_entry_device returns False - response = await ws_client.remove_device(device_entry.id, entry_1.entry_id) - - assert not response["success"] - assert response["error"]["code"] == "home_assistant_error" - - # Make async_remove_config_entry_device return True - can_remove = True - - # Remove the 1st config entry - response = await ws_client.remove_device(device_entry.id, entry_1.entry_id) - - assert response["success"] - assert response["result"]["config_entries"] == [entry_2.entry_id] - - # Check that the config entry was removed from the device - assert device_registry.async_get(device_entry.id).config_entries == { - entry_2.entry_id - } - - # Remove the 2nd config entry - response = await ws_client.remove_device(device_entry.id, entry_2.entry_id) - - assert response["success"] - assert response["result"] is None - - # This was the last config entry, the device is removed - assert not device_registry.async_get(device_entry.id) diff --git a/tests/components/config/test_entity_registry.py b/tests/components/config/test_entity_registry.py index bfbd69ec9bd..813ec654abb 100644 --- a/tests/components/config/test_entity_registry.py +++ b/tests/components/config/test_entity_registry.py @@ -1,8 +1,5 @@ """Test entity_registry API.""" -from datetime import datetime - -from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered @@ -16,7 +13,6 @@ from homeassistant.helpers.entity_registry import ( RegistryEntryDisabler, RegistryEntryHider, ) -from homeassistant.util.dt import utcnow from tests.common import ( ANY, @@ -37,7 +33,6 @@ async def client( return await hass_ws_client(hass) -@pytest.mark.usefixtures("freezer") async def test_list_entities( hass: HomeAssistant, client: MockHAClientWebSocket ) -> None: @@ -67,7 +62,6 @@ async def test_list_entities( "area_id": None, "categories": {}, "config_entry_id": None, - "created_at": utcnow().timestamp(), "device_id": None, "disabled_by": None, "entity_category": None, @@ -77,7 +71,6 @@ async def test_list_entities( "icon": None, "id": ANY, "labels": [], - "modified_at": utcnow().timestamp(), "name": "Hello World", "options": {}, "original_name": None, @@ -89,7 +82,6 @@ async def test_list_entities( "area_id": None, "categories": {}, "config_entry_id": None, - "created_at": utcnow().timestamp(), "device_id": None, "disabled_by": None, "entity_category": None, @@ -99,7 +91,6 @@ async def test_list_entities( "icon": None, "id": ANY, "labels": [], - "modified_at": utcnow().timestamp(), "name": None, "options": {}, "original_name": None, @@ -138,7 +129,6 @@ async def test_list_entities( "area_id": None, "categories": {}, "config_entry_id": None, - "created_at": utcnow().timestamp(), "device_id": None, "disabled_by": None, "entity_category": None, @@ -148,7 +138,6 @@ async def test_list_entities( "icon": None, "id": ANY, "labels": [], - "modified_at": utcnow().timestamp(), "name": "Hello World", "options": {}, "original_name": None, @@ -245,7 +234,6 @@ async def test_list_entities_for_display( "ec": 1, "ei": "test_domain.test", "en": "Hello World", - "hn": True, "ic": "mdi:icon", "lb": [], "pl": "test_platform", @@ -255,7 +243,7 @@ async def test_list_entities_for_display( "ai": "area52", "di": "device123", "ei": "test_domain.nameless", - "hn": True, + "en": None, "lb": [], "pl": "test_platform", }, @@ -263,8 +251,6 @@ async def test_list_entities_for_display( "ai": "area52", "di": "device123", "ei": "test_domain.renamed", - "en": "User name", - "hn": True, "lb": [], "pl": "test_platform", }, @@ -329,7 +315,6 @@ async def test_list_entities_for_display( "ai": "area52", "di": "device123", "ei": "test_domain.test", - "hn": True, "lb": [], "en": "Hello World", "pl": "test_platform", @@ -340,8 +325,6 @@ async def test_list_entities_for_display( async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> None: """Test get entry.""" - name_created_at = datetime(1994, 2, 14, 12, 0, 0) - no_name_created_at = datetime(2024, 2, 14, 12, 0, 1) mock_registry( hass, { @@ -350,15 +333,11 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> unique_id="1234", platform="test_platform", name="Hello World", - created_at=name_created_at, - modified_at=name_created_at, ), "test_domain.no_name": RegistryEntry( entity_id="test_domain.no_name", unique_id="6789", platform="test_platform", - created_at=no_name_created_at, - modified_at=no_name_created_at, ), }, ) @@ -374,7 +353,6 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "capabilities": None, "categories": {}, "config_entry_id": None, - "created_at": name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -385,7 +363,6 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "icon": None, "id": ANY, "labels": [], - "modified_at": name_created_at.timestamp(), "name": "Hello World", "options": {}, "original_device_class": None, @@ -410,7 +387,6 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "capabilities": None, "categories": {}, "config_entry_id": None, - "created_at": no_name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -421,7 +397,6 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "icon": None, "id": ANY, "labels": [], - "modified_at": no_name_created_at.timestamp(), "name": None, "options": {}, "original_device_class": None, @@ -435,8 +410,6 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) -> None: """Test get entry.""" - name_created_at = datetime(1994, 2, 14, 12, 0, 0) - no_name_created_at = datetime(2024, 2, 14, 12, 0, 1) mock_registry( hass, { @@ -445,15 +418,11 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) unique_id="1234", platform="test_platform", name="Hello World", - created_at=name_created_at, - modified_at=name_created_at, ), "test_domain.no_name": RegistryEntry( entity_id="test_domain.no_name", unique_id="6789", platform="test_platform", - created_at=no_name_created_at, - modified_at=no_name_created_at, ), }, ) @@ -477,7 +446,6 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "capabilities": None, "categories": {}, "config_entry_id": None, - "created_at": name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -488,7 +456,6 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "icon": None, "id": ANY, "labels": [], - "modified_at": name_created_at.timestamp(), "name": "Hello World", "options": {}, "original_device_class": None, @@ -504,7 +471,6 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "capabilities": None, "categories": {}, "config_entry_id": None, - "created_at": no_name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -515,7 +481,6 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "icon": None, "id": ANY, "labels": [], - "modified_at": no_name_created_at.timestamp(), "name": None, "options": {}, "original_device_class": None, @@ -530,11 +495,9 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) async def test_update_entity( - hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory + hass: HomeAssistant, client: MockHAClientWebSocket ) -> None: """Test updating entity.""" - created = datetime.fromisoformat("2024-02-14T12:00:00.900075+00:00") - freezer.move_to(created) registry = mock_registry( hass, { @@ -557,9 +520,6 @@ async def test_update_entity( assert state.name == "before update" assert state.attributes[ATTR_ICON] == "icon:before update" - modified = datetime.fromisoformat("2024-07-17T13:30:00.900075+00:00") - freezer.move_to(modified) - # Update area, categories, device_class, hidden_by, icon, labels & name await client.send_json_auto_id( { @@ -584,7 +544,6 @@ async def test_update_entity( "area_id": "mock-area-id", "capabilities": None, "categories": {"scope1": "id", "scope2": "id"}, - "created_at": created.timestamp(), "config_entry_id": None, "device_class": "custom_device_class", "device_id": None, @@ -596,7 +555,6 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), - "modified_at": modified.timestamp(), "name": "after update", "options": {}, "original_device_class": None, @@ -612,9 +570,6 @@ async def test_update_entity( assert state.name == "after update" assert state.attributes[ATTR_ICON] == "icon:after update" - modified = datetime.fromisoformat("2024-07-20T00:00:00.900075+00:00") - freezer.move_to(modified) - # Update hidden_by to illegal value await client.send_json_auto_id( { @@ -642,13 +597,9 @@ async def test_update_entity( assert msg["success"] assert hass.states.get("test_domain.world") is None - entry = registry.entities["test_domain.world"] - assert entry.disabled_by is RegistryEntryDisabler.USER - assert entry.created_at == created - assert entry.modified_at == modified - - modified = datetime.fromisoformat("2024-07-21T00:00:00.900075+00:00") - freezer.move_to(modified) + assert ( + registry.entities["test_domain.world"].disabled_by is RegistryEntryDisabler.USER + ) # Update disabled_by to None await client.send_json_auto_id( @@ -668,7 +619,6 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id"}, "config_entry_id": None, - "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -679,7 +629,6 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), - "modified_at": modified.timestamp(), "name": "after update", "options": {}, "original_device_class": None, @@ -692,9 +641,6 @@ async def test_update_entity( "require_restart": True, } - modified = datetime.fromisoformat("2024-07-22T00:00:00.900075+00:00") - freezer.move_to(modified) - # Update entity option await client.send_json_auto_id( { @@ -714,7 +660,6 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id"}, "config_entry_id": None, - "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -725,7 +670,6 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), - "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -737,9 +681,6 @@ async def test_update_entity( }, } - modified = datetime.fromisoformat("2024-07-23T00:00:00.900075+00:00") - freezer.move_to(modified) - # Add a category to the entity await client.send_json_auto_id( { @@ -759,7 +700,6 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id", "scope3": "id"}, "config_entry_id": None, - "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -770,7 +710,6 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), - "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -782,9 +721,6 @@ async def test_update_entity( }, } - modified = datetime.fromisoformat("2024-07-24T00:00:00.900075+00:00") - freezer.move_to(modified) - # Move the entity to a different category await client.send_json_auto_id( { @@ -804,7 +740,6 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id", "scope3": "other_id"}, "config_entry_id": None, - "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -815,7 +750,6 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), - "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -827,9 +761,6 @@ async def test_update_entity( }, } - modified = datetime.fromisoformat("2024-07-23T10:00:00.900075+00:00") - freezer.move_to(modified) - # Move the entity to a different category await client.send_json_auto_id( { @@ -849,7 +780,6 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope3": "other_id"}, "config_entry_id": None, - "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -860,7 +790,6 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), - "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -874,11 +803,9 @@ async def test_update_entity( async def test_update_entity_require_restart( - hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory + hass: HomeAssistant, client: MockHAClientWebSocket ) -> None: """Test updating entity.""" - created = datetime.fromisoformat("2024-02-14T12:00:00+00:00") - freezer.move_to(created) entity_id = "test_domain.test_platform_1234" config_entry = MockConfigEntry(domain="test_platform") config_entry.add_to_hass(hass) @@ -890,9 +817,6 @@ async def test_update_entity_require_restart( state = hass.states.get(entity_id) assert state is not None - modified = datetime.fromisoformat("2024-07-20T13:30:00+00:00") - freezer.move_to(modified) - # UPDATE DISABLED_BY TO NONE await client.send_json_auto_id( { @@ -911,7 +835,6 @@ async def test_update_entity_require_restart( "capabilities": None, "categories": {}, "config_entry_id": config_entry.entry_id, - "created_at": created.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -922,7 +845,6 @@ async def test_update_entity_require_restart( "icon": None, "id": ANY, "labels": [], - "modified_at": created.timestamp(), "name": None, "options": {}, "original_device_class": None, @@ -987,11 +909,9 @@ async def test_enable_entity_disabled_device( async def test_update_entity_no_changes( - hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory + hass: HomeAssistant, client: MockHAClientWebSocket ) -> None: """Test update entity with no changes.""" - created = datetime.fromisoformat("2024-02-14T12:00:00.900075+00:00") - freezer.move_to(created) mock_registry( hass, { @@ -1012,9 +932,6 @@ async def test_update_entity_no_changes( assert state is not None assert state.name == "name of entity" - modified = datetime.fromisoformat("2024-07-20T13:30:00.900075+00:00") - freezer.move_to(modified) - await client.send_json_auto_id( { "type": "config/entity_registry/update", @@ -1032,7 +949,6 @@ async def test_update_entity_no_changes( "capabilities": None, "categories": {}, "config_entry_id": None, - "created_at": created.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -1043,7 +959,6 @@ async def test_update_entity_no_changes( "icon": None, "id": ANY, "labels": [], - "modified_at": created.timestamp(), "name": "name of entity", "options": {}, "original_device_class": None, @@ -1087,11 +1002,9 @@ async def test_update_nonexisting_entity(client: MockHAClientWebSocket) -> None: async def test_update_entity_id( - hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory + hass: HomeAssistant, client: MockHAClientWebSocket ) -> None: """Test update entity id.""" - created = datetime.fromisoformat("2024-02-14T12:00:00.900075+00:00") - freezer.move_to(created) mock_registry( hass, { @@ -1109,9 +1022,6 @@ async def test_update_entity_id( assert hass.states.get("test_domain.world") is not None - modified = datetime.fromisoformat("2024-07-20T13:30:00.900075+00:00") - freezer.move_to(modified) - await client.send_json_auto_id( { "type": "config/entity_registry/update", @@ -1129,7 +1039,6 @@ async def test_update_entity_id( "capabilities": None, "categories": {}, "config_entry_id": None, - "created_at": created.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -1140,7 +1049,6 @@ async def test_update_entity_id( "icon": None, "id": ANY, "labels": [], - "modified_at": modified.timestamp(), "name": None, "options": {}, "original_device_class": None, diff --git a/tests/components/config/test_floor_registry.py b/tests/components/config/test_floor_registry.py index da6e550b1f6..b4e3907bc4d 100644 --- a/tests/components/config/test_floor_registry.py +++ b/tests/components/config/test_floor_registry.py @@ -1,15 +1,11 @@ """Test floor registry API.""" -from datetime import datetime - -from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered from homeassistant.components.config import floor_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import floor_registry as fr -from homeassistant.util.dt import utcnow from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -26,15 +22,9 @@ async def client_fixture( async def test_list_floors( client: MockHAClientWebSocket, floor_registry: fr.FloorRegistry, - freezer: FrozenDateTimeFactory, ) -> None: """Test list entries.""" - created_1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") - freezer.move_to(created_1) floor_registry.async_create("First floor") - - created_2 = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") - freezer.move_to(created_2) floor_registry.async_create( name="Second floor", aliases={"top floor", "attic"}, @@ -44,12 +34,6 @@ async def test_list_floors( assert len(floor_registry.floors) == 2 - # update first floor to change modified_at - floor_registry.async_update( - "first_floor", - name="First floor...", - ) - await client.send_json_auto_id({"type": "config/floor_registry/list"}) msg = await client.receive_json() @@ -57,25 +41,20 @@ async def test_list_floors( assert len(msg["result"]) == len(floor_registry.floors) assert msg["result"][0] == { "aliases": [], - "created_at": created_1.timestamp(), "icon": None, "floor_id": "first_floor", - "modified_at": created_2.timestamp(), - "name": "First floor...", + "name": "First floor", "level": None, } assert msg["result"][1] == { "aliases": unordered(["top floor", "attic"]), - "created_at": created_2.timestamp(), "icon": "mdi:home-floor-2", "floor_id": "second_floor", - "modified_at": created_2.timestamp(), "name": "Second floor", "level": 2, } -@pytest.mark.usefixtures("freezer") async def test_create_floor( client: MockHAClientWebSocket, floor_registry: fr.FloorRegistry, @@ -90,10 +69,8 @@ async def test_create_floor( assert len(floor_registry.floors) == 1 assert msg["result"] == { "aliases": [], - "created_at": utcnow().timestamp(), "icon": None, "floor_id": "first_floor", - "modified_at": utcnow().timestamp(), "name": "First floor", "level": None, } @@ -113,10 +90,8 @@ async def test_create_floor( assert len(floor_registry.floors) == 2 assert msg["result"] == { "aliases": unordered(["top floor", "attic"]), - "created_at": utcnow().timestamp(), "icon": "mdi:home-floor-2", "floor_id": "second_floor", - "modified_at": utcnow().timestamp(), "name": "Second floor", "level": 2, } @@ -188,15 +163,10 @@ async def test_delete_non_existing_floor( async def test_update_floor( client: MockHAClientWebSocket, floor_registry: fr.FloorRegistry, - freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" - created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") - freezer.move_to(created_at) floor = floor_registry.async_create("First floor") assert len(floor_registry.floors) == 1 - modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") - freezer.move_to(modified_at) await client.send_json_auto_id( { @@ -214,16 +184,12 @@ async def test_update_floor( assert len(floor_registry.floors) == 1 assert msg["result"] == { "aliases": unordered(["top floor", "attic"]), - "created_at": created_at.timestamp(), "icon": "mdi:home-floor-2", "floor_id": floor.floor_id, - "modified_at": modified_at.timestamp(), "name": "Second floor", "level": 2, } - modified_at = datetime.fromisoformat("2024-07-16T13:50:00.900075+00:00") - freezer.move_to(modified_at) await client.send_json_auto_id( { "floor_id": floor.floor_id, @@ -240,10 +206,8 @@ async def test_update_floor( assert len(floor_registry.floors) == 1 assert msg["result"] == { "aliases": [], - "created_at": created_at.timestamp(), "icon": None, "floor_id": floor.floor_id, - "modified_at": modified_at.timestamp(), "name": "First floor", "level": None, } diff --git a/tests/components/config/test_label_registry.py b/tests/components/config/test_label_registry.py index 3eff759132f..040b3bfe28a 100644 --- a/tests/components/config/test_label_registry.py +++ b/tests/components/config/test_label_registry.py @@ -1,8 +1,5 @@ """Test label registry API.""" -from datetime import datetime - -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.config import label_registry @@ -24,15 +21,9 @@ async def client_fixture( async def test_list_labels( client: MockHAClientWebSocket, label_registry: lr.LabelRegistry, - freezer: FrozenDateTimeFactory, ) -> None: """Test list entries.""" - created_1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") - freezer.move_to(created_1) label_registry.async_create("mock 1") - - created_2 = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") - freezer.move_to(created_2) label_registry.async_create( name="mock 2", color="#00FF00", @@ -42,12 +33,6 @@ async def test_list_labels( assert len(label_registry.labels) == 2 - # update mock 1 to change modified_at - label_registry.async_update( - "mock_1", - name="Mock 1...", - ) - await client.send_json_auto_id({"type": "config/label_registry/list"}) msg = await client.receive_json() @@ -55,20 +40,16 @@ async def test_list_labels( assert len(msg["result"]) == len(label_registry.labels) assert msg["result"][0] == { "color": None, - "created_at": created_1.timestamp(), "description": None, "icon": None, "label_id": "mock_1", - "modified_at": created_2.timestamp(), - "name": "Mock 1...", + "name": "mock 1", } assert msg["result"][1] == { "color": "#00FF00", - "created_at": created_2.timestamp(), "description": "This is the second label", "icon": "mdi:two", "label_id": "mock_2", - "modified_at": created_2.timestamp(), "name": "mock 2", } @@ -76,11 +57,8 @@ async def test_list_labels( async def test_create_label( client: MockHAClientWebSocket, label_registry: lr.LabelRegistry, - freezer: FrozenDateTimeFactory, ) -> None: """Test create entry.""" - created_1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") - freezer.move_to(created_1) await client.send_json_auto_id( { "name": "MOCK", @@ -93,16 +71,12 @@ async def test_create_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": None, - "created_at": created_1.timestamp(), "description": None, "icon": None, "label_id": "mock", "name": "MOCK", - "modified_at": created_1.timestamp(), } - created_2 = datetime.fromisoformat("2024-07-17T13:30:00.900075+00:00") - freezer.move_to(created_2) await client.send_json_auto_id( { "id": 2, @@ -119,16 +93,12 @@ async def test_create_label( assert len(label_registry.labels) == 2 assert msg["result"] == { "color": "#00FF00", - "created_at": created_2.timestamp(), "description": "This is the second label", "icon": "mdi:two", "label_id": "mockery", - "modified_at": created_2.timestamp(), "name": "MOCKERY", } - created_3 = datetime.fromisoformat("2024-07-18T13:30:00.900075+00:00") - freezer.move_to(created_3) await client.send_json_auto_id( { "name": "MAGIC", @@ -144,11 +114,9 @@ async def test_create_label( assert len(label_registry.labels) == 3 assert msg["result"] == { "color": "indigo", - "created_at": created_3.timestamp(), "description": "This is the third label", "icon": "mdi:three", "label_id": "magic", - "modified_at": created_3.timestamp(), "name": "MAGIC", } @@ -214,17 +182,11 @@ async def test_delete_non_existing_label( async def test_update_label( client: MockHAClientWebSocket, label_registry: lr.LabelRegistry, - freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" - created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") - freezer.move_to(created_at) label = label_registry.async_create("mock") assert len(label_registry.labels) == 1 - modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") - freezer.move_to(modified_at) - await client.send_json_auto_id( { "label_id": label.label_id, @@ -241,17 +203,12 @@ async def test_update_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": "#00FF00", - "created_at": created_at.timestamp(), "description": "This is a label description", "icon": "mdi:test", "label_id": "mock", - "modified_at": modified_at.timestamp(), "name": "UPDATED", } - modified_at = datetime.fromisoformat("2024-07-16T13:50:00.900075+00:00") - freezer.move_to(modified_at) - await client.send_json_auto_id( { "label_id": label.label_id, @@ -268,17 +225,12 @@ async def test_update_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": None, - "created_at": created_at.timestamp(), "description": None, "icon": None, "label_id": "mock", - "modified_at": modified_at.timestamp(), "name": "UPDATED AGAIN", } - modified_at = datetime.fromisoformat("2024-07-16T13:55:00.900075+00:00") - freezer.move_to(modified_at) - await client.send_json_auto_id( { "label_id": label.label_id, @@ -295,11 +247,9 @@ async def test_update_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": "primary", - "created_at": created_at.timestamp(), "description": None, "icon": None, "label_id": "mock", - "modified_at": modified_at.timestamp(), "name": "UPDATED YET AGAIN", } diff --git a/tests/components/config/test_scene.py b/tests/components/config/test_scene.py index c4c207f33f9..22bcfa345a2 100644 --- a/tests/components/config/test_scene.py +++ b/tests/components/config/test_scene.py @@ -7,11 +7,11 @@ from unittest.mock import ANY, patch import pytest +from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import scene from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component from tests.typing import ClientSessionGenerator diff --git a/tests/components/config/test_script.py b/tests/components/config/test_script.py index 88245eb567f..4771576ed6e 100644 --- a/tests/components/config/test_script.py +++ b/tests/components/config/test_script.py @@ -7,12 +7,12 @@ from unittest.mock import patch import pytest +from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import script from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component from homeassistant.util import yaml from tests.typing import ClientSessionGenerator diff --git a/tests/components/config/test_view.py b/tests/components/config/test_view.py deleted file mode 100644 index 0bea9240a89..00000000000 --- a/tests/components/config/test_view.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Test config HTTP views.""" - -from collections.abc import Callable -from contextlib import AbstractContextManager, nullcontext as does_not_raise - -import pytest - -from homeassistant.components.config import view -from homeassistant.core import HomeAssistant - - -async def _mock_validator(hass: HomeAssistant, key: str, data: dict) -> dict: - """Mock data validator.""" - return data - - -@pytest.mark.parametrize( - ("data_schema", "data_validator", "expected_result"), - [ - (None, None, pytest.raises(ValueError)), - (None, _mock_validator, does_not_raise()), - (lambda x: x, None, does_not_raise()), - (lambda x: x, _mock_validator, pytest.raises(ValueError)), - ], -) -async def test_view_requires_data_schema_or_validator( - hass: HomeAssistant, - data_schema: Callable | None, - data_validator: Callable | None, - expected_result: AbstractContextManager, -) -> None: - """Test the view base class requires a schema or validator.""" - with expected_result: - view.BaseEditConfigView( - "test", - "test", - "test", - lambda x: "", - data_schema=data_schema, - data_validator=data_validator, - ) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 363d39a2e63..42746525a0d 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -2,41 +2,18 @@ from __future__ import annotations -from collections.abc import Callable, Generator -from importlib.util import find_spec +from collections.abc import Callable from pathlib import Path -import string from typing import TYPE_CHECKING, Any -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import MagicMock, patch -from aiohasupervisor.models import ( - Discovery, - Repository, - ResolutionInfo, - StoreAddon, - StoreInfo, -) import pytest +from typing_extensions import Generator -from homeassistant.config_entries import ( - DISCOVERY_SOURCES, - ConfigEntriesFlowManager, - FlowResult, - OptionsFlowManager, -) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import ( - FlowContext, - FlowHandler, - FlowManager, - FlowResultType, -) -from homeassistant.helpers.translation import async_get_translations if TYPE_CHECKING: - from homeassistant.components.hassio import AddonManager - from .conversation import MockAgent from .device_tracker.common import MockScanner from .light.common import MockLight @@ -44,9 +21,9 @@ if TYPE_CHECKING: from .switch.common import MockSwitch -@pytest.fixture(scope="session", autouse=find_spec("zeroconf") is not None) +@pytest.fixture(scope="session", autouse=True) def patch_zeroconf_multiple_catcher() -> Generator[None]: - """If installed, patch zeroconf wrapper that detects if multiple instances are used.""" + """Patch zeroconf wrapper that detects if multiple instances are used.""" with patch( "homeassistant.components.zeroconf.install_multiple_zeroconf_catcher", side_effect=lambda zc: None, @@ -147,9 +124,9 @@ def mock_conversation_agent_fixture(hass: HomeAssistant) -> MockAgent: return mock_conversation_agent_fixture_helper(hass) -@pytest.fixture(scope="session", autouse=find_spec("ffmpeg") is not None) +@pytest.fixture(scope="session", autouse=True) def prevent_ffmpeg_subprocess() -> Generator[None]: - """If installed, prevent ffmpeg from creating a subprocess.""" + """Prevent ffmpeg from creating a subprocess.""" with patch( "homeassistant.components.ffmpeg.FFVersion.get_version", return_value="6.0" ): @@ -203,512 +180,3 @@ def mock_legacy_device_tracker_setup() -> Callable[[HomeAssistant, MockScanner], from .device_tracker.common import mock_legacy_device_tracker_setup return mock_legacy_device_tracker_setup - - -@pytest.fixture(name="addon_manager") -def addon_manager_fixture( - hass: HomeAssistant, supervisor_client: AsyncMock -) -> AddonManager: - """Return an AddonManager instance.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_addon_manager - - return mock_addon_manager(hass) - - -@pytest.fixture(name="discovery_info") -def discovery_info_fixture() -> list[Discovery]: - """Return the discovery info from the supervisor.""" - return [] - - -@pytest.fixture(name="discovery_info_side_effect") -def discovery_info_side_effect_fixture() -> Any | None: - """Return the discovery info from the supervisor.""" - return None - - -@pytest.fixture(name="get_addon_discovery_info") -def get_addon_discovery_info_fixture( - supervisor_client: AsyncMock, - discovery_info: list[Discovery], - discovery_info_side_effect: Any | None, -) -> AsyncMock: - """Mock get add-on discovery info.""" - supervisor_client.discovery.list.return_value = discovery_info - supervisor_client.discovery.list.side_effect = discovery_info_side_effect - return supervisor_client.discovery.list - - -@pytest.fixture(name="get_discovery_message_side_effect") -def get_discovery_message_side_effect_fixture() -> Any | None: - """Side effect for getting a discovery message by uuid.""" - return None - - -@pytest.fixture(name="get_discovery_message") -def get_discovery_message_fixture( - supervisor_client: AsyncMock, get_discovery_message_side_effect: Any | None -) -> AsyncMock: - """Mock getting a discovery message by uuid.""" - supervisor_client.discovery.get.side_effect = get_discovery_message_side_effect - return supervisor_client.discovery.get - - -@pytest.fixture(name="addon_store_info_side_effect") -def addon_store_info_side_effect_fixture() -> Any | None: - """Return the add-on store info side effect.""" - return None - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture( - supervisor_client: AsyncMock, - addon_store_info_side_effect: Any | None, -) -> AsyncMock: - """Mock Supervisor add-on store info.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_addon_store_info - - return mock_addon_store_info(supervisor_client, addon_store_info_side_effect) - - -@pytest.fixture(name="addon_info_side_effect") -def addon_info_side_effect_fixture() -> Any | None: - """Return the add-on info side effect.""" - return None - - -@pytest.fixture(name="addon_info") -def addon_info_fixture( - supervisor_client: AsyncMock, addon_info_side_effect: Any | None -) -> AsyncMock: - """Mock Supervisor add-on info.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_addon_info - - return mock_addon_info(supervisor_client, addon_info_side_effect) - - -@pytest.fixture(name="addon_not_installed") -def addon_not_installed_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on not installed.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_addon_not_installed - - return mock_addon_not_installed(addon_store_info, addon_info) - - -@pytest.fixture(name="addon_installed") -def addon_installed_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already installed but not running.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_addon_installed - - return mock_addon_installed(addon_store_info, addon_info) - - -@pytest.fixture(name="addon_running") -def addon_running_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already running.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_addon_running - - return mock_addon_running(addon_store_info, addon_info) - - -@pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Any | None: - """Return the install add-on side effect.""" - - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_install_addon_side_effect - - return mock_install_addon_side_effect(addon_store_info, addon_info) - - -@pytest.fixture(name="install_addon") -def install_addon_fixture( - supervisor_client: AsyncMock, - install_addon_side_effect: Any | None, -) -> AsyncMock: - """Mock install add-on.""" - supervisor_client.store.install_addon.side_effect = install_addon_side_effect - return supervisor_client.store.install_addon - - -@pytest.fixture(name="start_addon_side_effect") -def start_addon_side_effect_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Any | None: - """Return the start add-on options side effect.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_start_addon_side_effect - - return mock_start_addon_side_effect(addon_store_info, addon_info) - - -@pytest.fixture(name="start_addon") -def start_addon_fixture( - supervisor_client: AsyncMock, start_addon_side_effect: Any | None -) -> AsyncMock: - """Mock start add-on.""" - supervisor_client.addons.start_addon.side_effect = start_addon_side_effect - return supervisor_client.addons.start_addon - - -@pytest.fixture(name="restart_addon_side_effect") -def restart_addon_side_effect_fixture() -> Any | None: - """Return the restart add-on options side effect.""" - return None - - -@pytest.fixture(name="restart_addon") -def restart_addon_fixture( - supervisor_client: AsyncMock, - restart_addon_side_effect: Any | None, -) -> AsyncMock: - """Mock restart add-on.""" - supervisor_client.addons.restart_addon.side_effect = restart_addon_side_effect - return supervisor_client.addons.restart_addon - - -@pytest.fixture(name="stop_addon") -def stop_addon_fixture(supervisor_client: AsyncMock) -> AsyncMock: - """Mock stop add-on.""" - return supervisor_client.addons.stop_addon - - -@pytest.fixture(name="addon_options") -def addon_options_fixture(addon_info: AsyncMock) -> dict[str, Any]: - """Mock add-on options.""" - return addon_info.return_value.options - - -@pytest.fixture(name="set_addon_options_side_effect") -def set_addon_options_side_effect_fixture( - addon_options: dict[str, Any], -) -> Any | None: - """Return the set add-on options side effect.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_set_addon_options_side_effect - - return mock_set_addon_options_side_effect(addon_options) - - -@pytest.fixture(name="set_addon_options") -def set_addon_options_fixture( - supervisor_client: AsyncMock, - set_addon_options_side_effect: Any | None, -) -> AsyncMock: - """Mock set add-on options.""" - supervisor_client.addons.set_addon_options.side_effect = ( - set_addon_options_side_effect - ) - return supervisor_client.addons.set_addon_options - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture(supervisor_client: AsyncMock) -> AsyncMock: - """Mock uninstall add-on.""" - return supervisor_client.addons.uninstall_addon - - -@pytest.fixture(name="create_backup") -def create_backup_fixture() -> Generator[AsyncMock]: - """Mock create backup.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_create_backup - - yield from mock_create_backup() - - -@pytest.fixture(name="update_addon") -def update_addon_fixture(supervisor_client: AsyncMock) -> AsyncMock: - """Mock update add-on.""" - return supervisor_client.store.update_addon - - -@pytest.fixture(name="store_addons") -def store_addons_fixture() -> list[StoreAddon]: - """Mock store addons list.""" - return [] - - -@pytest.fixture(name="store_repositories") -def store_repositories_fixture() -> list[Repository]: - """Mock store repositories list.""" - return [] - - -@pytest.fixture(name="store_info") -def store_info_fixture( - supervisor_client: AsyncMock, - store_addons: list[StoreAddon], - store_repositories: list[Repository], -) -> AsyncMock: - """Mock store info.""" - supervisor_client.store.info.return_value = StoreInfo( - addons=store_addons, repositories=store_repositories - ) - return supervisor_client.store.info - - -@pytest.fixture(name="addon_stats") -def addon_stats_fixture(supervisor_client: AsyncMock) -> AsyncMock: - """Mock addon stats info.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_addon_stats - - return mock_addon_stats(supervisor_client) - - -@pytest.fixture(name="addon_changelog") -def addon_changelog_fixture(supervisor_client: AsyncMock) -> AsyncMock: - """Mock addon changelog.""" - supervisor_client.store.addon_changelog.return_value = "" - return supervisor_client.store.addon_changelog - - -@pytest.fixture(name="supervisor_is_connected") -def supervisor_is_connected_fixture(supervisor_client: AsyncMock) -> AsyncMock: - """Mock supervisor is connected.""" - supervisor_client.supervisor.ping.return_value = None - return supervisor_client.supervisor.ping - - -@pytest.fixture(name="resolution_info") -def resolution_info_fixture(supervisor_client: AsyncMock) -> AsyncMock: - """Mock resolution info from supervisor.""" - supervisor_client.resolution.info.return_value = ResolutionInfo( - suggestions=[], - unsupported=[], - unhealthy=[], - issues=[], - checks=[], - ) - return supervisor_client.resolution.info - - -@pytest.fixture(name="resolution_suggestions_for_issue") -def resolution_suggestions_for_issue_fixture(supervisor_client: AsyncMock) -> AsyncMock: - """Mock suggestions by issue from supervisor resolution.""" - supervisor_client.resolution.suggestions_for_issue.return_value = [] - return supervisor_client.resolution.suggestions_for_issue - - -@pytest.fixture(name="supervisor_client") -def supervisor_client() -> Generator[AsyncMock]: - """Mock the supervisor client.""" - supervisor_client = AsyncMock() - supervisor_client.addons = AsyncMock() - supervisor_client.discovery = AsyncMock() - supervisor_client.homeassistant = AsyncMock() - supervisor_client.os = AsyncMock() - supervisor_client.resolution = AsyncMock() - supervisor_client.supervisor = AsyncMock() - with ( - patch( - "homeassistant.components.hassio.get_supervisor_client", - return_value=supervisor_client, - ), - patch( - "homeassistant.components.hassio.handler.get_supervisor_client", - return_value=supervisor_client, - ), - patch( - "homeassistant.components.hassio.addon_manager.get_supervisor_client", - return_value=supervisor_client, - ), - patch( - "homeassistant.components.hassio.discovery.get_supervisor_client", - return_value=supervisor_client, - ), - patch( - "homeassistant.components.hassio.coordinator.get_supervisor_client", - return_value=supervisor_client, - ), - patch( - "homeassistant.components.hassio.issues.get_supervisor_client", - return_value=supervisor_client, - ), - patch( - "homeassistant.components.hassio.repairs.get_supervisor_client", - return_value=supervisor_client, - ), - ): - yield supervisor_client - - -def _validate_translation_placeholders( - full_key: str, - translation: str, - description_placeholders: dict[str, str] | None, -) -> str | None: - """Raise if translation exists with missing placeholders.""" - tuples = list(string.Formatter().parse(translation)) - for _, placeholder, _, _ in tuples: - if placeholder is None: - continue - if ( - description_placeholders is None - or placeholder not in description_placeholders - ): - ignore_translations[full_key] = ( - f"Description not found for placeholder `{placeholder}` in {full_key}" - ) - - -async def _validate_translation( - hass: HomeAssistant, - ignore_translations: dict[str, StoreInfo], - category: str, - component: str, - key: str, - description_placeholders: dict[str, str] | None, - *, - translation_required: bool = True, -) -> None: - """Raise if translation doesn't exist.""" - full_key = f"component.{component}.{category}.{key}" - translations = await async_get_translations(hass, "en", category, [component]) - if (translation := translations.get(full_key)) is not None: - _validate_translation_placeholders( - full_key, translation, description_placeholders - ) - return - - if not translation_required: - return - - if full_key in ignore_translations: - ignore_translations[full_key] = "used" - return - - ignore_translations[full_key] = ( - f"Translation not found for {component}: `{category}.{key}`. " - f"Please add to homeassistant/components/{component}/strings.json" - ) - - -@pytest.fixture -def ignore_translations() -> str | list[str]: - """Ignore specific translations. - - Override or parametrize this fixture with a fixture that returns, - a list of translation that should be ignored. - """ - return [] - - -async def _check_config_flow_result_translations( - manager: FlowManager, - flow: FlowHandler, - result: FlowResult[FlowContext, str], - ignore_translations: dict[str, str], -) -> None: - if isinstance(manager, ConfigEntriesFlowManager): - category = "config" - integration = flow.handler - elif isinstance(manager, OptionsFlowManager): - category = "options" - integration = flow.hass.config_entries.async_get_entry(flow.handler).domain - else: - return - - # Check if this flow has been seen before - # Gets set to False on first run, and to True on subsequent runs - setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) - - if result["type"] is FlowResultType.FORM: - if step_id := result.get("step_id"): - # neither title nor description are required - # - title defaults to integration name - # - description is optional - for header in ("title", "description"): - await _validate_translation( - flow.hass, - ignore_translations, - category, - integration, - f"step.{step_id}.{header}", - result["description_placeholders"], - translation_required=False, - ) - if errors := result.get("errors"): - for error in errors.values(): - await _validate_translation( - flow.hass, - ignore_translations, - category, - integration, - f"error.{error}", - result["description_placeholders"], - ) - return - - if result["type"] is FlowResultType.ABORT: - # We don't need translations for a discovery flow which immediately - # aborts, since such flows won't be seen by users - if not flow.__flow_seen_before and flow.source in DISCOVERY_SOURCES: - return - await _validate_translation( - flow.hass, - ignore_translations, - category, - integration, - f"abort.{result["reason"]}", - result["description_placeholders"], - ) - - -@pytest.fixture(autouse=True) -def check_translations(ignore_translations: str | list[str]) -> Generator[None]: - """Check that translation requirements are met. - - Current checks: - - data entry flow results (ConfigFlow/OptionsFlow) - """ - if not isinstance(ignore_translations, list): - ignore_translations = [ignore_translations] - - _ignore_translations = {k: "unused" for k in ignore_translations} - - # Keep reference to original functions - _original_flow_manager_async_handle_step = FlowManager._async_handle_step - - # Prepare override functions - async def _flow_manager_async_handle_step( - self: FlowManager, flow: FlowHandler, *args - ) -> FlowResult: - result = await _original_flow_manager_async_handle_step(self, flow, *args) - await _check_config_flow_result_translations( - self, flow, result, _ignore_translations - ) - return result - - # Use override functions - with patch( - "homeassistant.data_entry_flow.FlowManager._async_handle_step", - _flow_manager_async_handle_step, - ): - yield - - # Run final checks - unused_ignore = [k for k, v in _ignore_translations.items() if v == "unused"] - if unused_ignore: - pytest.fail( - f"Unused ignore translations: {', '.join(unused_ignore)}. " - "Please remove them from the ignore_translations fixture." - ) - for description in _ignore_translations.values(): - if description not in {"used", "unused"}: - pytest.fail(description) diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr deleted file mode 100644 index b1f2ea0db75..00000000000 --- a/tests/components/conversation/snapshots/test_default_agent.ambr +++ /dev/null @@ -1,496 +0,0 @@ -# serializer version: 1 -# name: test_custom_sentences - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - ]), - 'targets': list([ - ]), - }), - 'language': 'en-us', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'You ordered a stout', - }), - }), - }), - }) -# --- -# name: test_custom_sentences.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - ]), - 'targets': list([ - ]), - }), - 'language': 'en-us', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'You ordered a lager', - }), - }), - }), - }) -# --- -# name: test_custom_sentences_config - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Stealth mode engaged', - }), - }), - }), - }) -# --- -# name: test_intent_alias_added_removed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_alias_added_removed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_alias_added_removed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called late added alias', - }), - }), - }), - }) -# --- -# name: test_intent_conversion_not_expose_new - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, kitchen light is not exposed', - }), - }), - }), - }) -# --- -# name: test_intent_conversion_not_expose_new.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_added_removed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_added_removed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.late', - 'name': 'friendly light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_added_removed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.late', - 'name': 'friendly light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_added_removed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called late added light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_exposed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_fail_if_unexposed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, kitchen light is not exposed', - }), - }), - }), - }) -# --- -# name: test_intent_entity_remove_custom_name - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_remove_custom_name.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_remove_custom_name.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called renamed light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_renamed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_renamed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'renamed light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr deleted file mode 100644 index d9d859113f8..00000000000 --- a/tests/components/conversation/snapshots/test_http.ambr +++ /dev/null @@ -1,711 +0,0 @@ -# serializer version: 1 -# name: test_get_agent_list - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'af', - 'ar', - 'bg', - 'bn', - 'ca', - 'cs', - 'da', - 'de', - 'de-CH', - 'el', - 'en', - 'es', - 'et', - 'eu', - 'fa', - 'fi', - 'fr', - 'gl', - 'gu', - 'he', - 'hi', - 'hr', - 'hu', - 'id', - 'is', - 'it', - 'ka', - 'kn', - 'ko', - 'lb', - 'lt', - 'lv', - 'ml', - 'mn', - 'ms', - 'nb', - 'nl', - 'pl', - 'pt', - 'pt-br', - 'ro', - 'ru', - 'sk', - 'sl', - 'sr', - 'sv', - 'sw', - 'te', - 'th', - 'tr', - 'uk', - 'ur', - 'vi', - 'zh-cn', - 'zh-hk', - 'zh-tw', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - 'smurfish', - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.1 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - 'smurfish', - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.2 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'en', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.3 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'en', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.4 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'de', - 'de-CH', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.5 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'de-CH', - 'de', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_http_api_handle_failure - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'failed_to_handle', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'An unexpected error occurred', - }), - }), - }), - }) -# --- -# name: test_http_api_no_match - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_http_api_unexpected_failure - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'unknown', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'An unexpected error occurred', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent[None] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent[conversation.home_assistant] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent[homeassistant] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_ws_api[payload0] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload1] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'test-language', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload2] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload3] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload4] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'test-language', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload5] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_hass_agent_debug - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'name': dict({ - 'name': 'name', - 'text': 'my cool light', - 'value': 'my cool light', - }), - }), - 'intent': dict({ - 'name': 'HassTurnOn', - }), - 'match': True, - 'sentence_template': ' on ( | [in ])', - 'slots': dict({ - 'name': 'my cool light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - dict({ - 'details': dict({ - 'name': dict({ - 'name': 'name', - 'text': 'my cool light', - 'value': 'my cool light', - }), - }), - 'intent': dict({ - 'name': 'HassTurnOff', - }), - 'match': True, - 'sentence_template': '[] ( | [in ]) [to] off', - 'slots': dict({ - 'name': 'my cool light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - dict({ - 'details': dict({ - 'area': dict({ - 'name': 'area', - 'text': 'kitchen', - 'value': 'kitchen', - }), - 'domain': dict({ - 'name': 'domain', - 'text': '', - 'value': 'light', - }), - }), - 'intent': dict({ - 'name': 'HassTurnOn', - }), - 'match': True, - 'sentence_template': ' on [all] in ', - 'slots': dict({ - 'area': 'kitchen', - 'domain': 'light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - dict({ - 'details': dict({ - 'area': dict({ - 'name': 'area', - 'text': 'kitchen', - 'value': 'kitchen', - }), - 'domain': dict({ - 'name': 'domain', - 'text': 'lights', - 'value': 'light', - }), - 'state': dict({ - 'name': 'state', - 'text': 'on', - 'value': 'on', - }), - }), - 'intent': dict({ - 'name': 'HassGetState', - }), - 'match': True, - 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} [in ]', - 'slots': dict({ - 'area': 'kitchen', - 'domain': 'lights', - 'state': 'on', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': False, - }), - }), - 'unmatched_slots': dict({ - }), - }), - None, - ]), - }) -# --- -# name: test_ws_hass_agent_debug_custom_sentence - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'beer_style': dict({ - 'name': 'beer_style', - 'text': 'lager', - 'value': 'lager', - }), - }), - 'file': 'en/beer.yaml', - 'intent': dict({ - 'name': 'OrderBeer', - }), - 'match': True, - 'sentence_template': "I'd like to order a {beer_style} [please]", - 'slots': dict({ - 'beer_style': 'lager', - }), - 'source': 'custom', - 'targets': dict({ - }), - 'unmatched_slots': dict({ - }), - }), - ]), - }) -# --- -# name: test_ws_hass_agent_debug_null_result - dict({ - 'results': list([ - None, - ]), - }) -# --- -# name: test_ws_hass_agent_debug_out_of_range - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'brightness': dict({ - 'name': 'brightness', - 'text': '100', - 'value': 100, - }), - 'name': dict({ - 'name': 'name', - 'text': 'test light', - 'value': 'test light', - }), - }), - 'intent': dict({ - 'name': 'HassLightSet', - }), - 'match': True, - 'sentence_template': '[] brightness [to] ', - 'slots': dict({ - 'brightness': '100', - 'name': 'test light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.demo_1234': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - ]), - }) -# --- -# name: test_ws_hass_agent_debug_out_of_range.1 - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'name': dict({ - 'name': 'name', - 'text': 'test light', - 'value': 'test light', - }), - }), - 'intent': dict({ - 'name': 'HassLightSet', - }), - 'match': False, - 'sentence_template': '[] brightness [to] ', - 'slots': dict({ - 'name': 'test light', - }), - 'source': 'builtin', - 'targets': dict({ - }), - 'unmatched_slots': dict({ - 'brightness': 1001, - }), - }), - ]), - }) -# --- -# name: test_ws_hass_agent_debug_sentence_trigger - dict({ - 'results': list([ - dict({ - 'match': True, - 'sentence_template': 'hello[ world]', - 'source': 'trigger', - }), - ]), - }) -# --- diff --git a/tests/components/conversation/snapshots/test_init.ambr b/tests/components/conversation/snapshots/test_init.ambr index 0327be064d4..6264e61863f 100644 --- a/tests/components/conversation/snapshots/test_init.ambr +++ b/tests/components/conversation/snapshots/test_init.ambr @@ -24,6 +24,81 @@ }), }) # --- +# name: test_custom_sentences + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en-us', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'You ordered a stout', + }), + }), + }), + }) +# --- +# name: test_custom_sentences.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en-us', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'You ordered a lager', + }), + }), + }), + }) +# --- +# name: test_custom_sentences_config + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Stealth mode engaged', + }), + }), + }), + }) +# --- # name: test_get_agent_info dict({ 'id': 'conversation.home_assistant', @@ -42,6 +117,918 @@ 'name': 'Home Assistant', }) # --- +# name: test_get_agent_list + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'af', + 'ar', + 'bg', + 'bn', + 'ca', + 'cs', + 'da', + 'de', + 'de-CH', + 'el', + 'en', + 'es', + 'et', + 'eu', + 'fa', + 'fi', + 'fr', + 'fr-CA', + 'gl', + 'gu', + 'he', + 'hi', + 'hr', + 'hu', + 'id', + 'is', + 'it', + 'ka', + 'kn', + 'ko', + 'lb', + 'lt', + 'lv', + 'ml', + 'mn', + 'ms', + 'nb', + 'nl', + 'pl', + 'pt', + 'pt-br', + 'ro', + 'ru', + 'sk', + 'sl', + 'sr', + 'sv', + 'sw', + 'te', + 'tr', + 'uk', + 'ur', + 'vi', + 'zh-cn', + 'zh-hk', + 'zh-tw', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + 'smurfish', + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.1 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + 'smurfish', + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.2 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'en', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.3 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'en', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.4 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'de', + 'de-CH', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.5 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'de-CH', + 'de', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_http_api_handle_failure + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'failed_to_handle', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'An unexpected error occurred', + }), + }), + }), + }) +# --- +# name: test_http_api_no_match + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_http_api_unexpected_failure + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'unknown', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'An unexpected error occurred', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent[None] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent[conversation.home_assistant] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent[homeassistant] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_alias_added_removed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_alias_added_removed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_alias_added_removed.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called late added alias', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_conversion_not_expose_new + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_conversion_not_expose_new.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_added_removed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_added_removed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.late', + 'name': 'friendly light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_added_removed.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.late', + 'name': 'friendly light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_added_removed.3 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called late added light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_exposed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_exposed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_exposed.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_exposed.3 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called my cool light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_exposed.4 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_exposed.5 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_renamed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_renamed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'renamed light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_renamed.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_renamed.3 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_entity_renamed.4 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called renamed light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent_target_ha_agent + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- # name: test_turn_on_intent[None-turn kitchen on-None] dict({ 'conversation_id': None, @@ -402,3 +1389,361 @@ }), }) # --- +# name: test_ws_api[payload0] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload1] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'test-language', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload2] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload3] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload4] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'test-language', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload5] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_hass_agent_debug + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'name': dict({ + 'name': 'name', + 'text': 'my cool light', + 'value': 'my cool light', + }), + }), + 'intent': dict({ + 'name': 'HassTurnOn', + }), + 'match': True, + 'sentence_template': ' on ( | [in ])', + 'slots': dict({ + 'name': 'my cool light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + dict({ + 'details': dict({ + 'name': dict({ + 'name': 'name', + 'text': 'my cool light', + 'value': 'my cool light', + }), + }), + 'intent': dict({ + 'name': 'HassTurnOff', + }), + 'match': True, + 'sentence_template': '[] ( | [in ]) [to] off', + 'slots': dict({ + 'name': 'my cool light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + dict({ + 'details': dict({ + 'area': dict({ + 'name': 'area', + 'text': 'kitchen', + 'value': 'kitchen', + }), + 'domain': dict({ + 'name': 'domain', + 'text': '', + 'value': 'light', + }), + }), + 'intent': dict({ + 'name': 'HassTurnOn', + }), + 'match': True, + 'sentence_template': ' on [all] in ', + 'slots': dict({ + 'area': 'kitchen', + 'domain': 'light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + dict({ + 'details': dict({ + 'area': dict({ + 'name': 'area', + 'text': 'kitchen', + 'value': 'kitchen', + }), + 'domain': dict({ + 'name': 'domain', + 'text': 'lights', + 'value': 'light', + }), + 'state': dict({ + 'name': 'state', + 'text': 'on', + 'value': 'on', + }), + }), + 'intent': dict({ + 'name': 'HassGetState', + }), + 'match': True, + 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} [in ]', + 'slots': dict({ + 'area': 'kitchen', + 'domain': 'lights', + 'state': 'on', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': False, + }), + }), + 'unmatched_slots': dict({ + }), + }), + None, + ]), + }) +# --- +# name: test_ws_hass_agent_debug_custom_sentence + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'beer_style': dict({ + 'name': 'beer_style', + 'text': 'lager', + 'value': 'lager', + }), + }), + 'file': 'en/beer.yaml', + 'intent': dict({ + 'name': 'OrderBeer', + }), + 'match': True, + 'sentence_template': "I'd like to order a {beer_style} [please]", + 'slots': dict({ + 'beer_style': 'lager', + }), + 'source': 'custom', + 'targets': dict({ + }), + 'unmatched_slots': dict({ + }), + }), + ]), + }) +# --- +# name: test_ws_hass_agent_debug_null_result + dict({ + 'results': list([ + None, + ]), + }) +# --- +# name: test_ws_hass_agent_debug_out_of_range + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'brightness': dict({ + 'name': 'brightness', + 'text': '100%', + 'value': 100, + }), + 'name': dict({ + 'name': 'name', + 'text': 'test light', + 'value': 'test light', + }), + }), + 'intent': dict({ + 'name': 'HassLightSet', + }), + 'match': True, + 'sentence_template': '[] brightness [to] ', + 'slots': dict({ + 'brightness': '100%', + 'name': 'test light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.demo_1234': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + ]), + }) +# --- +# name: test_ws_hass_agent_debug_out_of_range.1 + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'name': dict({ + 'name': 'name', + 'text': 'test light', + 'value': 'test light', + }), + }), + 'intent': dict({ + 'name': 'HassLightSet', + }), + 'match': False, + 'sentence_template': '[] brightness [to] ', + 'slots': dict({ + 'name': 'test light', + }), + 'source': 'builtin', + 'targets': dict({ + }), + 'unmatched_slots': dict({ + 'brightness': 1001, + }), + }), + ]), + }) +# --- +# name: test_ws_hass_agent_debug_sentence_trigger + dict({ + 'results': list([ + dict({ + 'match': True, + 'sentence_template': 'hello[ world]', + 'source': 'trigger', + }), + ]), + }) +# --- diff --git a/tests/components/conversation/test_agent_manager.py b/tests/components/conversation/test_agent_manager.py deleted file mode 100644 index 47b58a522a8..00000000000 --- a/tests/components/conversation/test_agent_manager.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Test agent manager.""" - -from unittest.mock import patch - -from homeassistant.components.conversation import ConversationResult, async_converse -from homeassistant.core import Context, HomeAssistant -from homeassistant.helpers.intent import IntentResponse - - -async def test_async_converse(hass: HomeAssistant, init_components) -> None: - """Test the async_converse method.""" - context = Context() - with patch( - "homeassistant.components.conversation.default_agent.DefaultAgent.async_process", - return_value=ConversationResult(response=IntentResponse(language="test lang")), - ) as mock_process: - await async_converse( - hass, - text="test command", - conversation_id="test id", - context=context, - language="test lang", - agent_id="conversation.home_assistant", - device_id="test device id", - ) - - assert mock_process.called - conversation_input = mock_process.call_args[0][0] - assert conversation_input.text == "test command" - assert conversation_input.conversation_id == "test id" - assert conversation_input.context is context - assert conversation_input.language == "test lang" - assert conversation_input.agent_id == "conversation.home_assistant" - assert conversation_input.device_id == "test device id" diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 3c6b463670a..f8a021475d5 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -1,22 +1,14 @@ """Test for the default agent.""" from collections import defaultdict -import os -import tempfile from typing import Any from unittest.mock import AsyncMock, patch from hassil.recognize import Intent, IntentData, MatchEntity, RecognizeResult import pytest -from syrupy import SnapshotAssertion -import yaml from homeassistant.components import conversation, cover, media_player from homeassistant.components.conversation import default_agent -from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY -from homeassistant.components.conversation.default_agent import METADATA_CUSTOM_SENTENCE -from homeassistant.components.conversation.models import ConversationInput -from homeassistant.components.cover import SERVICE_OPEN_COVER from homeassistant.components.homeassistant.exposed_entities import ( async_get_assistant_settings, ) @@ -25,52 +17,21 @@ from homeassistant.components.intent import ( TimerInfo, async_register_timer_handler, ) -from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_FRIENDLY_NAME, - STATE_CLOSED, - STATE_ON, - STATE_UNKNOWN, - EntityCategory, -) -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - Context, - HomeAssistant, - callback, -) +from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, STATE_CLOSED +from homeassistant.core import DOMAIN as HASS_DOMAIN, Context, HomeAssistant, callback from homeassistant.helpers import ( area_registry as ar, device_registry as dr, + entity, entity_registry as er, floor_registry as fr, intent, ) from homeassistant.setup import async_setup_component -from . import expose_entity, expose_new +from . import expose_entity -from tests.common import ( - MockConfigEntry, - MockUser, - async_mock_service, - setup_test_component_platform, -) -from tests.components.light.common import MockLight - - -class OrderBeerIntentHandler(intent.IntentHandler): - """Handle OrderBeer intent.""" - - intent_type = "OrderBeer" - - async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: - """Return speech response.""" - beer_style = intent_obj.slots["beer_style"]["value"] - response = intent_obj.create_response() - response.async_set_speech(f"You ordered a {beer_style}") - return response +from tests.common import MockConfigEntry, async_mock_service @pytest.fixture @@ -86,8 +47,8 @@ async def init_components(hass: HomeAssistant) -> None: [ {"hidden_by": er.RegistryEntryHider.USER}, {"hidden_by": er.RegistryEntryHider.INTEGRATION}, - {"entity_category": EntityCategory.CONFIG}, - {"entity_category": EntityCategory.DIAGNOSTIC}, + {"entity_category": entity.EntityCategory.CONFIG}, + {"entity_category": entity.EntityCategory.DIAGNOSTIC}, ], ) @pytest.mark.usefixtures("init_components") @@ -100,7 +61,7 @@ async def test_hidden_entities_skipped( "light", "demo", "1234", suggested_object_id="Test light", **er_kwargs ) hass.states.async_set("light.test_light", "off") - calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, "turn_on") + calls = async_mock_service(hass, HASS_DOMAIN, "turn_on") result = await conversation.async_converse( hass, "turn on test light", None, Context(), None ) @@ -205,21 +166,13 @@ async def test_exposed_areas( @pytest.mark.usefixtures("init_components") async def test_conversation_agent(hass: HomeAssistant) -> None: """Test DefaultAgent.""" - agent = hass.data[DATA_DEFAULT_ENTITY] + agent = default_agent.async_get_default_agent(hass) with patch( "homeassistant.components.conversation.default_agent.get_languages", return_value=["dwarvish", "elvish", "entish"], ): assert agent.supported_languages == ["dwarvish", "elvish", "entish"] - state = hass.states.get(agent.entity_id) - assert state - assert state.state == STATE_UNKNOWN - assert ( - state.attributes["supported_features"] - == conversation.ConversationEntityFeature.CONTROL - ) - async def test_expose_flag_automatically_set( hass: HomeAssistant, @@ -310,79 +263,13 @@ async def test_unexposed_entities_skipped( assert result.response.matched_states[0].entity_id == exposed_light.entity_id -@pytest.mark.usefixtures("init_components") -async def test_duplicated_names_resolved_with_device_area( - hass: HomeAssistant, - area_registry: ar.AreaRegistry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test entities deduplication with device ID context.""" - area_kitchen = area_registry.async_get_or_create("kitchen_id") - area_bedroom = area_registry.async_get_or_create("bedroom_id") - - kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") - bedroom_light = entity_registry.async_get_or_create("light", "demo", "5678") - - # Same name and alias - for light in (kitchen_light, bedroom_light): - light = entity_registry.async_update_entity( - light.entity_id, - name="top light", - aliases={"overhead light"}, - ) - hass.states.async_set( - light.entity_id, - "off", - attributes={ATTR_FRIENDLY_NAME: light.name}, - ) - # Different areas - kitchen_light = entity_registry.async_update_entity( - kitchen_light.entity_id, - area_id=area_kitchen.id, - ) - bedroom_light = entity_registry.async_update_entity( - bedroom_light.entity_id, - area_id=area_bedroom.id, - ) - - # Pipeline device in bedroom area - entry = MockConfigEntry() - entry.add_to_hass(hass) - assist_device = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections=set(), - identifiers={("demo", "id-1234")}, - ) - assist_device = device_registry.async_update_device( - assist_device.id, - area_id=area_bedroom.id, - ) - - # Check name and alias - for name in ("top light", "overhead light"): - # Only one light should be turned on - calls = async_mock_service(hass, "light", "turn_on") - result = await conversation.async_converse( - hass, f"turn on {name}", None, Context(), device_id=assist_device.id - ) - - assert len(calls) == 1 - assert calls[0].data["entity_id"][0] == bedroom_light.entity_id - - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.intent is not None - assert result.response.intent.slots.get("name", {}).get("value") == name - assert result.response.intent.slots.get("name", {}).get("text") == name - - @pytest.mark.usefixtures("init_components") async def test_trigger_sentences(hass: HomeAssistant) -> None: """Test registering/unregistering/matching a few trigger sentences.""" trigger_sentences = ["It's party time", "It is time to party"] trigger_response = "Cowabunga!" - agent = hass.data[DATA_DEFAULT_ENTITY] + agent = default_agent.async_get_default_agent(hass) assert isinstance(agent, default_agent.DefaultAgent) callback = AsyncMock(return_value=trigger_response) @@ -418,44 +305,6 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None: assert len(callback.mock_calls) == 0 -@pytest.mark.parametrize( - ("language", "expected"), - [("en", "English done"), ("de", "German done"), ("not_translated", "Done")], -) -@pytest.mark.usefixtures("init_components") -async def test_trigger_sentence_response_translation( - hass: HomeAssistant, language: str, expected: str -) -> None: - """Test translation of default response 'done'.""" - hass.config.language = language - - agent = hass.data[DATA_DEFAULT_ENTITY] - assert isinstance(agent, default_agent.DefaultAgent) - - translations = { - "en": {"component.conversation.conversation.agent.done": "English done"}, - "de": {"component.conversation.conversation.agent.done": "German done"}, - "not_translated": {}, - } - - with patch( - "homeassistant.components.conversation.default_agent.translation.async_get_translations", - return_value=translations.get(language), - ): - unregister = agent.register_trigger( - ["test sentence"], AsyncMock(return_value=None) - ) - result = await conversation.async_converse( - hass, "test sentence", None, Context() - ) - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.speech == { - "plain": {"speech": expected, "extra_data": None} - } - - unregister() - - @pytest.mark.usefixtures("init_components", "sl_setup") async def test_shopping_list_add_item(hass: HomeAssistant) -> None: """Test adding an item to the shopping list through the default agent.""" @@ -469,7 +318,7 @@ async def test_shopping_list_add_item(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("init_components") -async def test_nevermind_intent(hass: HomeAssistant) -> None: +async def test_nevermind_item(hass: HomeAssistant) -> None: """Test HassNevermind intent through the default agent.""" result = await conversation.async_converse(hass, "nevermind", None, Context()) assert result.response.intent is not None @@ -479,17 +328,6 @@ async def test_nevermind_intent(hass: HomeAssistant) -> None: assert not result.response.speech -@pytest.mark.usefixtures("init_components") -async def test_respond_intent(hass: HomeAssistant) -> None: - """Test HassRespond intent through the default agent.""" - result = await conversation.async_converse(hass, "hello", None, Context()) - assert result.response.intent is not None - assert result.response.intent.intent_type == intent.INTENT_RESPOND - - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.speech["plain"]["speech"] == "Hello from Home Assistant." - - @pytest.mark.usefixtures("init_components") async def test_device_area_context( hass: HomeAssistant, @@ -631,7 +469,7 @@ async def test_device_area_context( @pytest.mark.usefixtures("init_components") async def test_error_no_device(hass: HomeAssistant) -> None: - """Test error message when device/entity doesn't exist.""" + """Test error message when device/entity is missing.""" result = await conversation.async_converse( hass, "turn on missing entity", None, Context(), None ) @@ -644,27 +482,9 @@ async def test_error_no_device(hass: HomeAssistant) -> None: ) -@pytest.mark.usefixtures("init_components") -async def test_error_no_device_exposed(hass: HomeAssistant) -> None: - """Test error message when device/entity exists but is not exposed.""" - hass.states.async_set("light.kitchen_light", "off") - expose_entity(hass, "light.kitchen_light", False) - - result = await conversation.async_converse( - hass, "turn on kitchen light", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS - assert ( - result.response.speech["plain"]["speech"] - == "Sorry, kitchen light is not exposed" - ) - - @pytest.mark.usefixtures("init_components") async def test_error_no_area(hass: HomeAssistant) -> None: - """Test error message when area doesn't exist.""" + """Test error message when area is missing.""" result = await conversation.async_converse( hass, "turn on the lights in missing area", None, Context(), None ) @@ -679,7 +499,7 @@ async def test_error_no_area(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("init_components") async def test_error_no_floor(hass: HomeAssistant) -> None: - """Test error message when floor doesn't exist.""" + """Test error message when floor is missing.""" result = await conversation.async_converse( hass, "turn on all the lights on missing floor", None, Context(), None ) @@ -696,7 +516,7 @@ async def test_error_no_floor(hass: HomeAssistant) -> None: async def test_error_no_device_in_area( hass: HomeAssistant, area_registry: ar.AreaRegistry ) -> None: - """Test error message when area exists but is does not contain a device/entity.""" + """Test error message when area is missing a device/entity.""" area_kitchen = area_registry.async_get_or_create("kitchen_id") area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") result = await conversation.async_converse( @@ -711,119 +531,6 @@ async def test_error_no_device_in_area( ) -@pytest.mark.usefixtures("init_components") -async def test_error_no_device_on_floor( - hass: HomeAssistant, - floor_registry: fr.FloorRegistry, -) -> None: - """Test error message when floor exists but is does not contain a device/entity.""" - floor_registry.async_create("ground") - result = await conversation.async_converse( - hass, "turn on missing entity on ground floor", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS - assert ( - result.response.speech["plain"]["speech"] - == "Sorry, I am not aware of any device called missing entity on ground floor" - ) - - -@pytest.mark.usefixtures("init_components") -async def test_error_no_device_on_floor_exposed( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - area_registry: ar.AreaRegistry, - floor_registry: fr.FloorRegistry, -) -> None: - """Test error message when a device/entity exists on a floor but isn't exposed.""" - floor_ground = floor_registry.async_create("ground") - - area_kitchen = area_registry.async_get_or_create("kitchen_id") - area_kitchen = area_registry.async_update( - area_kitchen.id, name="kitchen", floor_id=floor_ground.floor_id - ) - - kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") - kitchen_light = entity_registry.async_update_entity( - kitchen_light.entity_id, - name="test light", - area_id=area_kitchen.id, - ) - hass.states.async_set( - kitchen_light.entity_id, - "off", - attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, - ) - expose_entity(hass, kitchen_light.entity_id, False) - await hass.async_block_till_done() - - # We don't have a sentence for turning on devices by floor - name = MatchEntity(name="name", value=kitchen_light.name, text=kitchen_light.name) - floor = MatchEntity(name="floor", value=floor_ground.name, text=floor_ground.name) - recognize_result = RecognizeResult( - intent=Intent("HassTurnOn"), - intent_data=IntentData([]), - entities={"name": name, "floor": floor}, - entities_list=[name, floor], - ) - - with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, - ): - result = await conversation.async_converse( - hass, "turn on test light on the ground floor", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert ( - result.response.error_code - == intent.IntentResponseErrorCode.NO_VALID_TARGETS - ) - assert ( - result.response.speech["plain"]["speech"] - == "Sorry, test light in the ground floor is not exposed" - ) - - -@pytest.mark.usefixtures("init_components") -async def test_error_no_device_in_area_exposed( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - area_registry: ar.AreaRegistry, -) -> None: - """Test error message when a device/entity exists in an area but isn't exposed.""" - area_kitchen = area_registry.async_get_or_create("kitchen_id") - area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") - - kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") - kitchen_light = entity_registry.async_update_entity( - kitchen_light.entity_id, - name="test light", - area_id=area_kitchen.id, - ) - hass.states.async_set( - kitchen_light.entity_id, - "off", - attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, - ) - expose_entity(hass, kitchen_light.entity_id, False) - await hass.async_block_till_done() - - result = await conversation.async_converse( - hass, "turn on test light in the kitchen", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS - assert ( - result.response.speech["plain"]["speech"] - == "Sorry, test light in the kitchen area is not exposed" - ) - - @pytest.mark.usefixtures("init_components") async def test_error_no_domain(hass: HomeAssistant) -> None: """Test error message when no devices/entities exist for a domain.""" @@ -838,8 +545,8 @@ async def test_error_no_domain(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], ): result = await conversation.async_converse( hass, "turn on the fans", None, Context(), None @@ -856,38 +563,6 @@ async def test_error_no_domain(hass: HomeAssistant) -> None: ) -@pytest.mark.usefixtures("init_components") -async def test_error_no_domain_exposed(hass: HomeAssistant) -> None: - """Test error message when devices/entities exist for a domain but are not exposed.""" - hass.states.async_set("fan.test_fan", "off") - expose_entity(hass, "fan.test_fan", False) - await hass.async_block_till_done() - - # We don't have a sentence for turning on all fans - fan_domain = MatchEntity(name="domain", value="fan", text="fans") - recognize_result = RecognizeResult( - intent=Intent("HassTurnOn"), - intent_data=IntentData([]), - entities={"domain": fan_domain}, - entities_list=[fan_domain], - ) - - with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, - ): - result = await conversation.async_converse( - hass, "turn on the fans", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert ( - result.response.error_code - == intent.IntentResponseErrorCode.NO_VALID_TARGETS - ) - assert result.response.speech["plain"]["speech"] == "Sorry, no fan is exposed" - - @pytest.mark.usefixtures("init_components") async def test_error_no_domain_in_area( hass: HomeAssistant, area_registry: ar.AreaRegistry @@ -908,43 +583,7 @@ async def test_error_no_domain_in_area( @pytest.mark.usefixtures("init_components") -async def test_error_no_domain_in_area_exposed( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - area_registry: ar.AreaRegistry, -) -> None: - """Test error message when devices/entities for a domain exist in an area but are not exposed.""" - area_kitchen = area_registry.async_get_or_create("kitchen_id") - area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") - - kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") - kitchen_light = entity_registry.async_update_entity( - kitchen_light.entity_id, - name="test light", - area_id=area_kitchen.id, - ) - hass.states.async_set( - kitchen_light.entity_id, - "off", - attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, - ) - expose_entity(hass, kitchen_light.entity_id, False) - await hass.async_block_till_done() - - result = await conversation.async_converse( - hass, "turn on the lights in the kitchen", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS - assert ( - result.response.speech["plain"]["speech"] - == "Sorry, no light in the kitchen area is exposed" - ) - - -@pytest.mark.usefixtures("init_components") -async def test_error_no_domain_on_floor( +async def test_error_no_domain_in_floor( hass: HomeAssistant, area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, @@ -985,45 +624,6 @@ async def test_error_no_domain_on_floor( ) -@pytest.mark.usefixtures("init_components") -async def test_error_no_domain_on_floor_exposed( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - area_registry: ar.AreaRegistry, - floor_registry: fr.FloorRegistry, -) -> None: - """Test error message when devices/entities for a domain exist on a floor but are not exposed.""" - floor_ground = floor_registry.async_create("ground") - area_kitchen = area_registry.async_get_or_create("kitchen_id") - area_kitchen = area_registry.async_update( - area_kitchen.id, name="kitchen", floor_id=floor_ground.floor_id - ) - kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") - kitchen_light = entity_registry.async_update_entity( - kitchen_light.entity_id, - name="test light", - area_id=area_kitchen.id, - ) - hass.states.async_set( - kitchen_light.entity_id, - "off", - attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, - ) - expose_entity(hass, kitchen_light.entity_id, False) - await hass.async_block_till_done() - - result = await conversation.async_converse( - hass, "turn on all lights on the ground floor", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS - assert ( - result.response.speech["plain"]["speech"] - == "Sorry, no light in the ground floor is exposed" - ) - - @pytest.mark.usefixtures("init_components") async def test_error_no_device_class(hass: HomeAssistant) -> None: """Test error message when no entities of a device class exist.""" @@ -1047,8 +647,8 @@ async def test_error_no_device_class(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], ): result = await conversation.async_converse( hass, "open the windows", None, Context(), None @@ -1065,54 +665,6 @@ async def test_error_no_device_class(hass: HomeAssistant) -> None: ) -@pytest.mark.usefixtures("init_components") -async def test_error_no_device_class_exposed(hass: HomeAssistant) -> None: - """Test error message when entities of a device class exist but aren't exposed.""" - # Create a cover entity that is not a window. - # This ensures that the filtering below won't exit early because there are - # no entities in the cover domain. - hass.states.async_set( - "cover.garage_door", - STATE_CLOSED, - attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.GARAGE}, - ) - - # Create a window an ensure it's not exposed - hass.states.async_set( - "cover.test_window", - STATE_CLOSED, - attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.WINDOW}, - ) - expose_entity(hass, "cover.test_window", False) - - # We don't have a sentence for opening all windows - cover_domain = MatchEntity(name="domain", value="cover", text="cover") - window_class = MatchEntity(name="device_class", value="window", text="windows") - recognize_result = RecognizeResult( - intent=Intent("HassTurnOn"), - intent_data=IntentData([]), - entities={"domain": cover_domain, "device_class": window_class}, - entities_list=[cover_domain, window_class], - ) - - with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, - ): - result = await conversation.async_converse( - hass, "open all the windows", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert ( - result.response.error_code - == intent.IntentResponseErrorCode.NO_VALID_TARGETS - ) - assert ( - result.response.speech["plain"]["speech"] == "Sorry, no window is exposed" - ) - - @pytest.mark.usefixtures("init_components") async def test_error_no_device_class_in_area( hass: HomeAssistant, area_registry: ar.AreaRegistry @@ -1132,105 +684,12 @@ async def test_error_no_device_class_in_area( ) -@pytest.mark.usefixtures("init_components") -async def test_error_no_device_class_in_area_exposed( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - area_registry: ar.AreaRegistry, -) -> None: - """Test error message when entities of a device class exist in an area but are not exposed.""" - area_bedroom = area_registry.async_get_or_create("bedroom_id") - area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") - bedroom_window = entity_registry.async_get_or_create("cover", "demo", "1234") - bedroom_window = entity_registry.async_update_entity( - bedroom_window.entity_id, - name="test cover", - area_id=area_bedroom.id, - ) - hass.states.async_set( - bedroom_window.entity_id, - "off", - attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.WINDOW}, - ) - expose_entity(hass, bedroom_window.entity_id, False) - await hass.async_block_till_done() - - result = await conversation.async_converse( - hass, "open bedroom windows", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS - assert ( - result.response.speech["plain"]["speech"] - == "Sorry, no window in the bedroom area is exposed" - ) - - -@pytest.mark.usefixtures("init_components") -async def test_error_no_device_class_on_floor_exposed( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - area_registry: ar.AreaRegistry, - floor_registry: fr.FloorRegistry, -) -> None: - """Test error message when entities of a device class exist in on a floor but are not exposed.""" - floor_ground = floor_registry.async_create("ground") - - area_bedroom = area_registry.async_get_or_create("bedroom_id") - area_bedroom = area_registry.async_update( - area_bedroom.id, name="bedroom", floor_id=floor_ground.floor_id - ) - bedroom_window = entity_registry.async_get_or_create("cover", "demo", "1234") - bedroom_window = entity_registry.async_update_entity( - bedroom_window.entity_id, - name="test cover", - area_id=area_bedroom.id, - ) - hass.states.async_set( - bedroom_window.entity_id, - "off", - attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.WINDOW}, - ) - expose_entity(hass, bedroom_window.entity_id, False) - await hass.async_block_till_done() - - # We don't have a sentence for opening all windows on a floor - cover_domain = MatchEntity(name="domain", value="cover", text="cover") - window_class = MatchEntity(name="device_class", value="window", text="windows") - floor = MatchEntity(name="floor", value=floor_ground.name, text=floor_ground.name) - recognize_result = RecognizeResult( - intent=Intent("HassTurnOn"), - intent_data=IntentData([]), - entities={"domain": cover_domain, "device_class": window_class, "floor": floor}, - entities_list=[cover_domain, window_class, floor], - ) - - with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, - ): - result = await conversation.async_converse( - hass, "open ground floor windows", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert ( - result.response.error_code - == intent.IntentResponseErrorCode.NO_VALID_TARGETS - ) - assert ( - result.response.speech["plain"]["speech"] - == "Sorry, no window in the ground floor is exposed" - ) - - @pytest.mark.usefixtures("init_components") async def test_error_no_intent(hass: HomeAssistant) -> None: """Test response with an intent match failure.""" with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=None, + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[], ): result = await conversation.async_converse( hass, "do something", None, Context(), None @@ -1299,48 +758,12 @@ async def test_error_duplicate_names( @pytest.mark.usefixtures("init_components") -async def test_duplicate_names_but_one_is_exposed( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: - """Test when multiple devices have the same name (or alias), but only one of them is exposed.""" - kitchen_light_1 = entity_registry.async_get_or_create("light", "demo", "1234") - kitchen_light_2 = entity_registry.async_get_or_create("light", "demo", "5678") - - # Same name and alias - for light in (kitchen_light_1, kitchen_light_2): - light = entity_registry.async_update_entity( - light.entity_id, - name="kitchen light", - aliases={"overhead light"}, - ) - hass.states.async_set( - light.entity_id, - "off", - attributes={ATTR_FRIENDLY_NAME: light.name}, - ) - - # Only expose one - expose_entity(hass, kitchen_light_1.entity_id, True) - expose_entity(hass, kitchen_light_2.entity_id, False) - - # Check name and alias - async_mock_service(hass, "light", "turn_on") - for name in ("kitchen light", "overhead light"): - # command - result = await conversation.async_converse( - hass, f"turn on {name}", None, Context(), None - ) - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.matched_states[0].entity_id == kitchen_light_1.entity_id - - -@pytest.mark.usefixtures("init_components") -async def test_error_duplicate_names_same_area( +async def test_error_duplicate_names_in_area( hass: HomeAssistant, area_registry: ar.AreaRegistry, entity_registry: er.EntityRegistry, ) -> None: - """Test error message when multiple devices have the same name (or alias) in the same area.""" + """Test error message when multiple devices have the same name (or alias).""" area_kitchen = area_registry.async_get_or_create("kitchen_id") area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") @@ -1392,127 +815,6 @@ async def test_error_duplicate_names_same_area( ) -@pytest.mark.usefixtures("init_components") -async def test_duplicate_names_same_area_but_one_is_exposed( - hass: HomeAssistant, - area_registry: ar.AreaRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test when multiple devices have the same name (or alias) in the same area but only one is exposed.""" - area_kitchen = area_registry.async_get_or_create("kitchen_id") - area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") - - kitchen_light_1 = entity_registry.async_get_or_create("light", "demo", "1234") - kitchen_light_2 = entity_registry.async_get_or_create("light", "demo", "5678") - - # Same name and alias - for light in (kitchen_light_1, kitchen_light_2): - light = entity_registry.async_update_entity( - light.entity_id, - name="kitchen light", - area_id=area_kitchen.id, - aliases={"overhead light"}, - ) - hass.states.async_set( - light.entity_id, - "off", - attributes={ATTR_FRIENDLY_NAME: light.name}, - ) - - # Only expose one - expose_entity(hass, kitchen_light_1.entity_id, True) - expose_entity(hass, kitchen_light_2.entity_id, False) - - # Check name and alias - async_mock_service(hass, "light", "turn_on") - for name in ("kitchen light", "overhead light"): - # command - result = await conversation.async_converse( - hass, f"turn on {name} in {area_kitchen.name}", None, Context(), None - ) - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.matched_states[0].entity_id == kitchen_light_1.entity_id - - -@pytest.mark.usefixtures("init_components") -async def test_duplicate_names_different_areas( - hass: HomeAssistant, - area_registry: ar.AreaRegistry, - entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test preferred area when multiple devices have the same name (or alias) in different areas.""" - area_kitchen = area_registry.async_get_or_create("kitchen_id") - area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") - - area_bedroom = area_registry.async_get_or_create("bedroom_id") - area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") - - kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") - kitchen_light = entity_registry.async_update_entity( - kitchen_light.entity_id, area_id=area_kitchen.id - ) - bedroom_light = entity_registry.async_get_or_create("light", "demo", "5678") - bedroom_light = entity_registry.async_update_entity( - bedroom_light.entity_id, area_id=area_bedroom.id - ) - - # Same name and alias - for light in (kitchen_light, bedroom_light): - light = entity_registry.async_update_entity( - light.entity_id, - name="test light", - aliases={"overhead light"}, - ) - hass.states.async_set( - light.entity_id, - "off", - attributes={ATTR_FRIENDLY_NAME: light.name}, - ) - - # Add a satellite in the kitchen and bedroom - kitchen_entry = MockConfigEntry() - kitchen_entry.add_to_hass(hass) - device_kitchen = device_registry.async_get_or_create( - config_entry_id=kitchen_entry.entry_id, - connections=set(), - identifiers={("demo", "device-kitchen")}, - ) - device_registry.async_update_device(device_kitchen.id, area_id=area_kitchen.id) - - bedroom_entry = MockConfigEntry() - bedroom_entry.add_to_hass(hass) - device_bedroom = device_registry.async_get_or_create( - config_entry_id=bedroom_entry.entry_id, - connections=set(), - identifiers={("demo", "device-bedroom")}, - ) - device_registry.async_update_device(device_bedroom.id, area_id=area_bedroom.id) - - # Check name and alias - async_mock_service(hass, "light", "turn_on") - for name in ("test light", "overhead light"): - # Should fail without a preferred area - result = await conversation.async_converse( - hass, f"turn on {name}", None, Context(), None - ) - assert result.response.response_type == intent.IntentResponseType.ERROR - - # Target kitchen light by using kitchen device - result = await conversation.async_converse( - hass, f"turn on {name}", None, Context(), None, device_id=device_kitchen.id - ) - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.matched_states[0].entity_id == kitchen_light.entity_id - - # Target bedroom light by using bedroom device - result = await conversation.async_converse( - hass, f"turn on {name}", None, Context(), None, device_id=device_bedroom.id - ) - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.matched_states[0].entity_id == bedroom_light.entity_id - - @pytest.mark.usefixtures("init_components") async def test_error_wrong_state(hass: HomeAssistant) -> None: """Test error message when no entities are in the correct state.""" @@ -2048,788 +1350,3 @@ async def test_name_wildcard_lower_priority(hass: HomeAssistant) -> None: assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert not beer_handler.triggered assert food_handler.triggered - - -async def test_intent_entity_added_removed( - hass: HomeAssistant, - init_components, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with entities added later. - - We want to ensure that adding an entity later busts the cache - so that the new entity is available as well as any aliases. - """ - context = Context() - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) - await hass.async_block_till_done() - hass.states.async_set("light.kitchen", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - result = await conversation.async_converse( - hass, "turn on my cool light", None, context - ) - - assert len(calls) == 1 - data = result.as_dict() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Add an entity - entity_registry.async_get_or_create( - "light", "demo", "5678", suggested_object_id="late" - ) - hass.states.async_set("light.late", "off", {"friendly_name": "friendly light"}) - - result = await conversation.async_converse( - hass, "turn on friendly light", None, context - ) - data = result.as_dict() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Now add an alias - entity_registry.async_update_entity("light.late", aliases={"late added light"}) - - result = await conversation.async_converse( - hass, "turn on late added light", None, context - ) - - data = result.as_dict() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Now delete the entity - hass.states.async_remove("light.late") - - result = await conversation.async_converse( - hass, "turn on late added light", None, context - ) - data = result.as_dict() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - -async def test_intent_alias_added_removed( - hass: HomeAssistant, - init_components, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with aliases added later. - - We want to ensure that adding an alias later busts the cache - so that the new alias is available. - """ - context = Context() - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - hass.states.async_set("light.kitchen", "off", {"friendly_name": "kitchen light"}) - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - result = await conversation.async_converse( - hass, "turn on kitchen light", None, context - ) - assert len(calls) == 1 - data = result.as_dict() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Add an alias - entity_registry.async_update_entity("light.kitchen", aliases={"late added alias"}) - - result = await conversation.async_converse( - hass, "turn on late added alias", None, context - ) - - data = result.as_dict() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Now remove the alieas - entity_registry.async_update_entity("light.kitchen", aliases={}) - - result = await conversation.async_converse( - hass, "turn on late added alias", None, context - ) - - data = result.as_dict() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - -async def test_intent_entity_renamed( - hass: HomeAssistant, - init_components, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with entities renamed later. - - We want to ensure that renaming an entity later busts the cache - so that the new name is used. - """ - context = Context() - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - await hass.async_block_till_done() - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - result = await conversation.async_converse( - hass, "turn on kitchen light", None, context - ) - - assert len(calls) == 1 - data = result.as_dict() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Rename the entity - entity_registry.async_update_entity("light.kitchen", name="renamed light") - await hass.async_block_till_done() - - result = await conversation.async_converse( - hass, "turn on renamed light", None, context - ) - - data = result.as_dict() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - -async def test_intent_entity_remove_custom_name( - hass: HomeAssistant, - init_components, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test that removing a custom name allows targeting the entity by its auto-generated name again.""" - context = Context() - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - await hass.async_block_till_done() - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - # Should fail with auto-generated name - entity_registry.async_update_entity("light.kitchen", name="renamed light") - result = await conversation.async_converse( - hass, "turn on kitchen light", None, context - ) - - data = result.as_dict() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - # Now clear the custom name - entity_registry.async_update_entity("light.kitchen", name=None) - await hass.async_block_till_done() - - result = await conversation.async_converse( - hass, "turn on kitchen light", None, context - ) - - data = result.as_dict() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - assert len(calls) == 1 - - result = await conversation.async_converse( - hass, "turn on renamed light", None, context - ) - - data = result.as_dict() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - -async def test_intent_entity_fail_if_unexposed( - hass: HomeAssistant, - init_components, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test that an entity is not usable if unexposed.""" - context = Context() - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - await hass.async_block_till_done() - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - # Unexpose the entity - expose_entity(hass, "light.kitchen", False) - await hass.async_block_till_done(wait_background_tasks=True) - - result = await conversation.async_converse( - hass, "turn on kitchen light", None, context - ) - - data = result.as_dict() - assert data == snapshot - assert data["response"]["response_type"] == "error" - assert len(calls) == 0 - - -async def test_intent_entity_exposed( - hass: HomeAssistant, - init_components, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with manual expose. - - We want to ensure that manually exposing an entity later busts the cache - so that the new setting is used. - """ - context = Context() - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - await hass.async_block_till_done() - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - # Unexpose, then expose the entity - expose_entity(hass, "light.kitchen", False) - await hass.async_block_till_done() - expose_entity(hass, "light.kitchen", True) - await hass.async_block_till_done() - - result = await conversation.async_converse( - hass, "turn on kitchen light", None, context - ) - - data = result.as_dict() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - assert len(calls) == 1 - - -async def test_intent_conversion_not_expose_new( - hass: HomeAssistant, - init_components, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API when not exposing new entities.""" - # Disable exposing new entities to the default agent - expose_new(hass, False) - - context = Context() - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - await hass.async_block_till_done() - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - result = await conversation.async_converse( - hass, "turn on kitchen light", None, context - ) - - data = result.as_dict() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - # Expose the entity - expose_entity(hass, "light.kitchen", True) - await hass.async_block_till_done() - - result = await conversation.async_converse( - hass, "turn on kitchen light", None, context - ) - - assert len(calls) == 1 - data = result.as_dict() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - -async def test_custom_sentences( - hass: HomeAssistant, - init_components, - snapshot: SnapshotAssertion, -) -> None: - """Test custom sentences with a custom intent.""" - # Expecting testing_config/custom_sentences/en/beer.yaml - intent.async_register(hass, OrderBeerIntentHandler()) - - # Don't use "en" to test loading custom sentences with language variants. - language = "en-us" - - # Invoke intent via HTTP API - for beer_style in ("stout", "lager"): - result = await conversation.async_converse( - hass, - f"I'd like to order a {beer_style}, please", - None, - Context(), - language=language, - ) - - data = result.as_dict() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - assert ( - data["response"]["speech"]["plain"]["speech"] - == f"You ordered a {beer_style}" - ) - - -async def test_custom_sentences_config( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: - """Test custom sentences with a custom intent in config.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component( - hass, - "conversation", - {"conversation": {"intents": {"StealthMode": ["engage stealth mode"]}}}, - ) - assert await async_setup_component(hass, "intent", {}) - assert await async_setup_component( - hass, - "intent_script", - { - "intent_script": { - "StealthMode": {"speech": {"text": "Stealth mode engaged"}} - } - }, - ) - - # Invoke intent via HTTP API - result = await conversation.async_converse( - hass, "engage stealth mode", None, Context(), None - ) - - data = result.as_dict() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - assert data["response"]["speech"]["plain"]["speech"] == "Stealth mode engaged" - - -async def test_language_region(hass: HomeAssistant, init_components) -> None: - """Test regional languages.""" - hass.states.async_set("light.kitchen", "off") - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - # Add fake region - language = f"{hass.config.language}-YZ" - await hass.services.async_call( - "conversation", - "process", - { - conversation.ATTR_TEXT: "turn on the kitchen", - conversation.ATTR_LANGUAGE: language, - }, - ) - await hass.async_block_till_done() - - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": ["light.kitchen"]} - - -async def test_non_default_response(hass: HomeAssistant, init_components) -> None: - """Test intent response that is not the default.""" - hass.states.async_set("cover.front_door", "closed") - calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) - - agent = hass.data[DATA_DEFAULT_ENTITY] - assert isinstance(agent, default_agent.DefaultAgent) - - result = await agent.async_process( - ConversationInput( - text="open the front door", - context=Context(), - conversation_id=None, - device_id=None, - language=hass.config.language, - agent_id=None, - ) - ) - assert len(calls) == 1 - assert result.response.speech["plain"]["speech"] == "Opened" - - -async def test_turn_on_area( - hass: HomeAssistant, - init_components, - area_registry: ar.AreaRegistry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test turning on an area.""" - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - - device = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, - ) - - kitchen_area = area_registry.async_create("kitchen") - device_registry.async_update_device(device.id, area_id=kitchen_area.id) - - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="stove" - ) - entity_registry.async_update_entity( - "light.stove", aliases={"my stove light"}, area_id=kitchen_area.id - ) - hass.states.async_set("light.stove", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, - ) - await hass.async_block_till_done() - - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": ["light.stove"]} - - basement_area = area_registry.async_create("basement") - device_registry.async_update_device(device.id, area_id=basement_area.id) - entity_registry.async_update_entity("light.stove", area_id=basement_area.id) - calls.clear() - - # Test that the area is updated - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, - ) - await hass.async_block_till_done() - - assert len(calls) == 0 - - # Test the new area works - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on lights in the basement"}, - ) - await hass.async_block_till_done() - - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": ["light.stove"]} - - -async def test_light_area_same_name( - hass: HomeAssistant, - init_components, - area_registry: ar.AreaRegistry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test turning on a light with the same name as an area.""" - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - - device = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, - ) - - kitchen_area = area_registry.async_create("kitchen") - device_registry.async_update_device(device.id, area_id=kitchen_area.id) - - kitchen_light = entity_registry.async_get_or_create( - "light", "demo", "1234", original_name="light in the kitchen" - ) - entity_registry.async_update_entity( - kitchen_light.entity_id, area_id=kitchen_area.id - ) - hass.states.async_set( - kitchen_light.entity_id, - "off", - attributes={ATTR_FRIENDLY_NAME: "light in the kitchen"}, - ) - - ceiling_light = entity_registry.async_get_or_create( - "light", "demo", "5678", original_name="ceiling light" - ) - entity_registry.async_update_entity( - ceiling_light.entity_id, area_id=kitchen_area.id - ) - hass.states.async_set( - ceiling_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "ceiling light"} - ) - - bathroom_light = entity_registry.async_get_or_create( - "light", "demo", "9012", original_name="light" - ) - hass.states.async_set( - bathroom_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "light"} - ) - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on light in the kitchen"}, - ) - await hass.async_block_till_done() - - # Should only turn on one light instead of all lights in the kitchen - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": [kitchen_light.entity_id]} - - -async def test_custom_sentences_priority( - hass: HomeAssistant, - hass_admin_user: MockUser, - snapshot: SnapshotAssertion, -) -> None: - """Test that user intents from custom_sentences have priority over builtin intents/sentences.""" - with tempfile.NamedTemporaryFile( - mode="w+", - encoding="utf-8", - suffix=".yaml", - dir=os.path.join(hass.config.config_dir, "custom_sentences", "en"), - ) as custom_sentences_file: - # Add a custom sentence that would match a builtin sentence. - # Custom sentences have priority. - yaml.dump( - { - "language": "en", - "intents": { - "CustomIntent": {"data": [{"sentences": ["turn on the lamp"]}]} - }, - }, - custom_sentences_file, - ) - custom_sentences_file.flush() - custom_sentences_file.seek(0) - - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) - assert await async_setup_component(hass, "light", {}) - assert await async_setup_component(hass, "intent", {}) - assert await async_setup_component( - hass, - "intent_script", - { - "intent_script": { - "CustomIntent": {"speech": {"text": "custom response"}} - } - }, - ) - - # Ensure that a "lamp" exists so that we can verify the custom intent - # overrides the builtin sentence. - hass.states.async_set("light.lamp", "off") - - result = await conversation.async_converse( - hass, - "turn on the lamp", - None, - Context(), - language=hass.config.language, - ) - - data = result.as_dict() - assert data["response"]["response_type"] == "action_done" - assert data["response"]["speech"]["plain"]["speech"] == "custom response" - - -async def test_config_sentences_priority( - hass: HomeAssistant, - hass_admin_user: MockUser, - snapshot: SnapshotAssertion, -) -> None: - """Test that user intents from configuration.yaml have priority over builtin intents/sentences. - - Also test that they follow proper selection logic. - """ - # Add a custom sentence that would match a builtin sentence. - # Custom sentences have priority. - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "intent", {}) - assert await async_setup_component( - hass, - "conversation", - { - "conversation": { - "intents": { - "CustomIntent": ["turn on "], - "WorseCustomIntent": ["turn on the lamp"], - "FakeCustomIntent": ["turn on "], - } - } - }, - ) - - # Fake intent not being custom - intents = ( - await conversation.async_get_agent(hass).async_get_or_load_intents( - hass.config.language - ) - ).intents.intents - intents["FakeCustomIntent"].data[0].metadata[METADATA_CUSTOM_SENTENCE] = False - - assert await async_setup_component(hass, "light", {}) - assert await async_setup_component( - hass, - "intent_script", - { - "intent_script": { - "CustomIntent": {"speech": {"text": "custom response"}}, - "WorseCustomIntent": {"speech": {"text": "worse custom response"}}, - "FakeCustomIntent": {"speech": {"text": "fake custom response"}}, - } - }, - ) - - # Ensure that a "lamp" exists so that we can verify the custom intent - # overrides the builtin sentence. - hass.states.async_set("light.lamp", "off") - - result = await conversation.async_converse( - hass, - "turn on the lamp", - None, - Context(), - language=hass.config.language, - ) - data = result.as_dict() - assert data["response"]["response_type"] == "action_done" - assert data["response"]["speech"]["plain"]["speech"] == "custom response" - - -async def test_query_same_name_different_areas( - hass: HomeAssistant, - init_components, - area_registry: ar.AreaRegistry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test asking a question about entities with the same name in different areas.""" - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - - kitchen_device = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, - ) - - kitchen_area = area_registry.async_create("kitchen") - device_registry.async_update_device(kitchen_device.id, area_id=kitchen_area.id) - - kitchen_light = entity_registry.async_get_or_create( - "light", - "demo", - "1234", - ) - entity_registry.async_update_entity( - kitchen_light.entity_id, area_id=kitchen_area.id - ) - hass.states.async_set( - kitchen_light.entity_id, - "on", - attributes={ATTR_FRIENDLY_NAME: "overhead light"}, - ) - - bedroom_area = area_registry.async_create("bedroom") - bedroom_light = entity_registry.async_get_or_create( - "light", - "demo", - "5678", - ) - entity_registry.async_update_entity( - bedroom_light.entity_id, area_id=bedroom_area.id - ) - hass.states.async_set( - bedroom_light.entity_id, - "off", - attributes={ATTR_FRIENDLY_NAME: "overhead light"}, - ) - - # Should fail without a preferred area (duplicate name) - result = await conversation.async_converse( - hass, "is the overhead light on?", None, Context(), None - ) - assert result.response.response_type == intent.IntentResponseType.ERROR - - # Succeeds using area from device (kitchen) - result = await conversation.async_converse( - hass, - "is the overhead light on?", - None, - Context(), - None, - device_id=kitchen_device.id, - ) - assert result.response.response_type == intent.IntentResponseType.QUERY_ANSWER - assert len(result.response.matched_states) == 1 - assert result.response.matched_states[0].entity_id == kitchen_light.entity_id diff --git a/tests/components/conversation/test_default_agent_intents.py b/tests/components/conversation/test_default_agent_intents.py index 7bae9c43f70..b1c4a6d51af 100644 --- a/tests/components/conversation/test_default_agent_intents.py +++ b/tests/components/conversation/test_default_agent_intents.py @@ -1,9 +1,7 @@ """Test intents for the default agent.""" -from datetime import datetime from unittest.mock import patch -from freezegun import freeze_time import pytest from homeassistant.components import ( @@ -123,34 +121,6 @@ async def test_cover_set_position( assert call.data == {"entity_id": entity_id, cover.ATTR_POSITION: 50} -async def test_cover_device_class( - hass: HomeAssistant, - init_components, -) -> None: - """Test the open position for covers by device class.""" - await cover_intent.async_setup_intents(hass) - - entity_id = f"{cover.DOMAIN}.front" - hass.states.async_set( - entity_id, STATE_CLOSED, attributes={"device_class": "garage"} - ) - async_expose_entity(hass, conversation.DOMAIN, entity_id, True) - - # Open service - calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) - result = await conversation.async_converse( - hass, "open the garage door", None, Context(), None - ) - await hass.async_block_till_done() - - response = result.response - assert response.response_type == intent.IntentResponseType.ACTION_DONE - assert response.speech["plain"]["speech"] == "Opened the garage" - assert len(calls) == 1 - call = calls[0] - assert call.data == {"entity_id": entity_id} - - async def test_valve_intents( hass: HomeAssistant, init_components, @@ -443,28 +413,3 @@ async def test_todo_add_item_fr( assert mock_handle.call_args.args intent_obj = mock_handle.call_args.args[0] assert intent_obj.slots.get("item", {}).get("value", "").strip() == "farine" - - -@freeze_time(datetime(year=2013, month=9, day=17, hour=1, minute=2)) -async def test_date_time( - hass: HomeAssistant, - init_components, -) -> None: - """Test the date and time intents.""" - result = await conversation.async_converse( - hass, "what is the date", None, Context(), None - ) - await hass.async_block_till_done() - - response = result.response - assert response.response_type == intent.IntentResponseType.ACTION_DONE - assert response.speech["plain"]["speech"] == "September 17th, 2013" - - result = await conversation.async_converse( - hass, "what time is it", None, Context(), None - ) - await hass.async_block_till_done() - - response = result.response - assert response.response_type == intent.IntentResponseType.ACTION_DONE - assert response.speech["plain"]["speech"] == "1:02 AM" diff --git a/tests/components/conversation/test_http.py b/tests/components/conversation/test_http.py deleted file mode 100644 index 5b6f7072a2d..00000000000 --- a/tests/components/conversation/test_http.py +++ /dev/null @@ -1,525 +0,0 @@ -"""The tests for the HTTP API of the Conversation component.""" - -from http import HTTPStatus -from typing import Any -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.conversation import default_agent -from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY -from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.const import ATTR_FRIENDLY_NAME -from homeassistant.core import HomeAssistant -from homeassistant.helpers import area_registry as ar, entity_registry as er, intent -from homeassistant.setup import async_setup_component - -from . import MockAgent - -from tests.common import async_mock_service -from tests.typing import ClientSessionGenerator, WebSocketGenerator - -AGENT_ID_OPTIONS = [ - None, - # Old value of conversation.HOME_ASSISTANT_AGENT, - "homeassistant", - # Current value of conversation.HOME_ASSISTANT_AGENT, - "conversation.home_assistant", -] - - -class OrderBeerIntentHandler(intent.IntentHandler): - """Handle OrderBeer intent.""" - - intent_type = "OrderBeer" - - async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: - """Return speech response.""" - beer_style = intent_obj.slots["beer_style"]["value"] - response = intent_obj.create_response() - response.async_set_speech(f"You ordered a {beer_style}") - return response - - -@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) -async def test_http_processing_intent( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - agent_id, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API.""" - # Add an alias - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) - hass.states.async_set("light.kitchen", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - data: dict[str, Any] = {"text": "turn on my cool light"} - if agent_id: - data["agent_id"] = agent_id - resp = await client.post("/api/conversation/process", json=data) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - - -async def test_http_api_no_match( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the HTTP conversation API with an intent match failure.""" - client = await hass_client() - - # Shouldn't match any intents - resp = await client.post("/api/conversation/process", json={"text": "do something"}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "error" - assert data["response"]["data"]["code"] == "no_intent_match" - - -async def test_http_api_handle_failure( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the HTTP conversation API with an error during handling.""" - client = await hass_client() - - hass.states.async_set("light.kitchen", "off") - - # Raise an error during intent handling - def async_handle_error(*args, **kwargs): - raise intent.IntentHandleError - - with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): - resp = await client.post( - "/api/conversation/process", json={"text": "turn on the kitchen"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "error" - assert data["response"]["data"]["code"] == "failed_to_handle" - - -async def test_http_api_unexpected_failure( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the HTTP conversation API with an unexpected error during handling.""" - client = await hass_client() - - hass.states.async_set("light.kitchen", "off") - - # Raise an "unexpected" error during intent handling - def async_handle_error(*args, **kwargs): - raise intent.IntentUnexpectedError - - with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): - resp = await client.post( - "/api/conversation/process", json={"text": "turn on the kitchen"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "error" - assert data["response"]["data"]["code"] == "unknown" - - -async def test_http_api_wrong_data( - hass: HomeAssistant, init_components, hass_client: ClientSessionGenerator -) -> None: - """Test the HTTP conversation API.""" - client = await hass_client() - - resp = await client.post("/api/conversation/process", json={"text": 123}) - assert resp.status == HTTPStatus.BAD_REQUEST - - resp = await client.post("/api/conversation/process", json={}) - assert resp.status == HTTPStatus.BAD_REQUEST - - -@pytest.mark.parametrize( - "payload", - [ - { - "text": "Test Text", - }, - { - "text": "Test Text", - "language": "test-language", - }, - { - "text": "Test Text", - "conversation_id": "test-conv-id", - }, - { - "text": "Test Text", - "conversation_id": None, - }, - { - "text": "Test Text", - "conversation_id": "test-conv-id", - "language": "test-language", - }, - { - "text": "Test Text", - "agent_id": "homeassistant", - }, - ], -) -async def test_ws_api( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - payload, - snapshot: SnapshotAssertion, -) -> None: - """Test the Websocket conversation API.""" - client = await hass_ws_client(hass) - - await client.send_json_auto_id({"type": "conversation/process", **payload}) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - assert msg["result"]["response"]["data"]["code"] == "no_intent_match" - - -@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) -async def test_ws_prepare( - hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator, agent_id -) -> None: - """Test the Websocket prepare conversation API.""" - agent = hass.data[DATA_DEFAULT_ENTITY] - assert isinstance(agent, default_agent.DefaultAgent) - - # No intents should be loaded yet - assert not agent._lang_intents.get(hass.config.language) - - client = await hass_ws_client(hass) - - msg = {"type": "conversation/prepare"} - if agent_id is not None: - msg["agent_id"] = agent_id - await client.send_json_auto_id(msg) - - msg = await client.receive_json() - - assert msg["success"] - - # Intents should now be load - assert agent._lang_intents.get(hass.config.language) - - -async def test_get_agent_list( - hass: HomeAssistant, - init_components, - mock_conversation_agent: MockAgent, - mock_agent_support_all: MockAgent, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test getting agent info.""" - client = await hass_ws_client(hass) - - await client.send_json_auto_id({"type": "conversation/agent/list"}) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "smurfish"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "en"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "en-UK"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "de"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "de", "country": "ch"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - -async def test_ws_hass_agent_debug( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - area_registry: ar.AreaRegistry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test homeassistant agent debug websocket command.""" - client = await hass_ws_client(hass) - - kitchen_area = area_registry.async_create("kitchen") - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity( - "light.kitchen", - aliases={"my cool light"}, - area_id=kitchen_area.id, - ) - await hass.async_block_till_done() - hass.states.async_set("light.kitchen", "off") - - on_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - off_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") - - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "turn on my cool light", - "turn my cool light off", - "turn on all lights in the kitchen", - "how many lights are on in the kitchen?", - "this will not match anything", # None in results - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - # Last sentence should be a failed match - assert msg["result"]["results"][-1] is None - - # Light state should not have been changed - assert len(on_calls) == 0 - assert len(off_calls) == 0 - - -async def test_ws_hass_agent_debug_null_result( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test homeassistant agent debug websocket command with a null result.""" - client = await hass_ws_client(hass) - - async def async_recognize(self, user_input, *args, **kwargs): - if user_input.text == "bad sentence": - return None - - return await self.async_recognize(user_input, *args, **kwargs) - - with patch( - "homeassistant.components.conversation.default_agent.DefaultAgent.async_recognize", - async_recognize, - ): - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "bad sentence", - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - assert msg["result"]["results"] == [None] - - -async def test_ws_hass_agent_debug_out_of_range( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test homeassistant agent debug websocket command with an out of range entity.""" - test_light = entity_registry.async_get_or_create("light", "demo", "1234") - hass.states.async_set( - test_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "test light"} - ) - - client = await hass_ws_client(hass) - - # Brightness is in range (0-100) - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "set test light brightness to 100%", - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - results = msg["result"]["results"] - assert len(results) == 1 - assert results[0]["match"] - - # Brightness is out of range - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "set test light brightness to 1001%", - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - results = msg["result"]["results"] - assert len(results) == 1 - assert not results[0]["match"] - - # Name matched, but brightness didn't - assert results[0]["slots"] == {"name": "test light"} - assert results[0]["unmatched_slots"] == {"brightness": 1001} - - -async def test_ws_hass_agent_debug_custom_sentence( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test homeassistant agent debug websocket command with a custom sentence.""" - # Expecting testing_config/custom_sentences/en/beer.yaml - intent.async_register(hass, OrderBeerIntentHandler()) - - client = await hass_ws_client(hass) - - # Brightness is in range (0-100) - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "I'd like to order a lager, please.", - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - debug_results = msg["result"].get("results", []) - assert len(debug_results) == 1 - assert debug_results[0].get("match") - assert debug_results[0].get("source") == "custom" - assert debug_results[0].get("file") == "en/beer.yaml" - - -async def test_ws_hass_agent_debug_sentence_trigger( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test homeassistant agent debug websocket command with a sentence trigger.""" - calls = async_mock_service(hass, "test", "automation") - assert await async_setup_component( - hass, - "automation", - { - "automation": { - "trigger": { - "platform": "conversation", - "command": ["hello", "hello[ world]"], - }, - "action": { - "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, - }, - } - }, - ) - - client = await hass_ws_client(hass) - - # Use trigger sentence - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": ["hello world"], - } - ) - await hass.async_block_till_done() - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - debug_results = msg["result"].get("results", []) - assert len(debug_results) == 1 - assert debug_results[0].get("match") - assert debug_results[0].get("source") == "trigger" - assert debug_results[0].get("sentence_template") == "hello[ world]" - - # Trigger should not have been executed - assert len(calls) == 0 diff --git a/tests/components/conversation/test_init.py b/tests/components/conversation/test_init.py index e92b1ab538f..dc940dba81b 100644 --- a/tests/components/conversation/test_init.py +++ b/tests/components/conversation/test_init.py @@ -1,25 +1,42 @@ """The tests for the Conversation component.""" from http import HTTPStatus +import os +import tempfile +from typing import Any from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol +import yaml from homeassistant.components import conversation from homeassistant.components.conversation import default_agent -from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY +from homeassistant.components.conversation.models import ConversationInput +from homeassistant.components.cover import SERVICE_OPEN_COVER from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.core import HomeAssistant +from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_ON +from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import intent +from homeassistant.helpers import ( + area_registry as ar, + device_registry as dr, + entity_registry as er, + intent, +) from homeassistant.setup import async_setup_component -from . import MockAgent +from . import MockAgent, expose_entity, expose_new -from tests.common import MockUser, async_mock_service -from tests.typing import ClientSessionGenerator +from tests.common import ( + MockConfigEntry, + MockUser, + async_mock_service, + setup_test_component_platform, +) +from tests.components.light.common import MockLight +from tests.typing import ClientSessionGenerator, WebSocketGenerator AGENT_ID_OPTIONS = [ None, @@ -30,6 +47,460 @@ AGENT_ID_OPTIONS = [ ] +class OrderBeerIntentHandler(intent.IntentHandler): + """Handle OrderBeer intent.""" + + intent_type = "OrderBeer" + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Return speech response.""" + beer_style = intent_obj.slots["beer_style"]["value"] + response = intent_obj.create_response() + response.async_set_speech(f"You ordered a {beer_style}") + return response + + +@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) +async def test_http_processing_intent( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + agent_id, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API.""" + # Add an alias + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) + hass.states.async_set("light.kitchen", "off") + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + client = await hass_client() + data: dict[str, Any] = {"text": "turn on my cool light"} + if agent_id: + data["agent_id"] = agent_id + resp = await client.post("/api/conversation/process", json=data) + + assert resp.status == HTTPStatus.OK + assert len(calls) == 1 + data = await resp.json() + + assert data == snapshot + + +async def test_http_processing_intent_target_ha_agent( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + mock_conversation_agent: MockAgent, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent can be processed via HTTP API with picking agent.""" + # Add an alias + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) + hass.states.async_set("light.kitchen", "off") + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + client = await hass_client() + resp = await client.post( + "/api/conversation/process", + json={"text": "turn on my cool light", "agent_id": "homeassistant"}, + ) + + assert resp.status == HTTPStatus.OK + assert len(calls) == 1 + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + +async def test_http_processing_intent_entity_added_removed( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with entities added later. + + We want to ensure that adding an entity later busts the cache + so that the new entity is available as well as any aliases. + """ + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) + hass.states.async_set("light.kitchen", "off") + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on my cool light"} + ) + + assert resp.status == HTTPStatus.OK + assert len(calls) == 1 + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Add an entity + entity_registry.async_get_or_create( + "light", "demo", "5678", suggested_object_id="late" + ) + hass.states.async_set("light.late", "off", {"friendly_name": "friendly light"}) + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on friendly light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Now add an alias + entity_registry.async_update_entity("light.late", aliases={"late added light"}) + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on late added light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Now delete the entity + hass.states.async_remove("light.late") + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on late added light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + +async def test_http_processing_intent_alias_added_removed( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with aliases added later. + + We want to ensure that adding an alias later busts the cache + so that the new alias is available. + """ + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + hass.states.async_set("light.kitchen", "off", {"friendly_name": "kitchen light"}) + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on kitchen light"} + ) + + assert resp.status == HTTPStatus.OK + assert len(calls) == 1 + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Add an alias + entity_registry.async_update_entity("light.kitchen", aliases={"late added alias"}) + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on late added alias"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Now remove the alieas + entity_registry.async_update_entity("light.kitchen", aliases={}) + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on late added alias"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + +async def test_http_processing_intent_entity_renamed( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with entities renamed later. + + We want to ensure that renaming an entity later busts the cache + so that the new name is used. + """ + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on kitchen light"} + ) + + assert resp.status == HTTPStatus.OK + assert len(calls) == 1 + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Rename the entity + entity_registry.async_update_entity("light.kitchen", name="renamed light") + await hass.async_block_till_done() + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on renamed light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on kitchen light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + # Now clear the custom name + entity_registry.async_update_entity("light.kitchen", name=None) + await hass.async_block_till_done() + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on kitchen light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on renamed light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + +async def test_http_processing_intent_entity_exposed( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with manual expose. + + We want to ensure that manually exposing an entity later busts the cache + so that the new setting is used. + """ + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on kitchen light"} + ) + + assert resp.status == HTTPStatus.OK + assert len(calls) == 1 + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on my cool light"} + ) + + assert resp.status == HTTPStatus.OK + assert len(calls) == 1 + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Unexpose the entity + expose_entity(hass, "light.kitchen", False) + await hass.async_block_till_done() + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on kitchen light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on my cool light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + # Now expose the entity + expose_entity(hass, "light.kitchen", True) + await hass.async_block_till_done() + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on kitchen light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", json={"text": "turn on my cool light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + +async def test_http_processing_intent_conversion_not_expose_new( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API when not exposing new entities.""" + # Disable exposing new entities to the default agent + expose_new(hass, False) + + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + client = await hass_client() + + resp = await client.post( + "/api/conversation/process", json={"text": "turn on kitchen light"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + # Expose the entity + expose_entity(hass, "light.kitchen", True) + await hass.async_block_till_done() + + resp = await client.post( + "/api/conversation/process", json={"text": "turn on kitchen light"} + ) + + assert resp.status == HTTPStatus.OK + assert len(calls) == 1 + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + @pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) @pytest.mark.parametrize("sentence", ["turn on kitchen", "turn kitchen on"]) @pytest.mark.parametrize("conversation_id", ["my_new_conversation", None]) @@ -102,7 +573,95 @@ async def test_turn_off_intent(hass: HomeAssistant, init_components, sentence) - assert call.data == {"entity_id": ["light.kitchen"]} -@pytest.mark.usefixtures("init_components") +async def test_http_api_no_match( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the HTTP conversation API with an intent match failure.""" + client = await hass_client() + + # Shouldn't match any intents + resp = await client.post("/api/conversation/process", json={"text": "do something"}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert data["response"]["data"]["code"] == "no_intent_match" + + +async def test_http_api_handle_failure( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the HTTP conversation API with an error during handling.""" + client = await hass_client() + + hass.states.async_set("light.kitchen", "off") + + # Raise an error during intent handling + def async_handle_error(*args, **kwargs): + raise intent.IntentHandleError + + with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): + resp = await client.post( + "/api/conversation/process", json={"text": "turn on the kitchen"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert data["response"]["data"]["code"] == "failed_to_handle" + + +async def test_http_api_unexpected_failure( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the HTTP conversation API with an unexpected error during handling.""" + client = await hass_client() + + hass.states.async_set("light.kitchen", "off") + + # Raise an "unexpected" error during intent handling + def async_handle_error(*args, **kwargs): + raise intent.IntentUnexpectedError + + with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): + resp = await client.post( + "/api/conversation/process", json={"text": "turn on the kitchen"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert data["response"]["data"]["code"] == "unknown" + + +async def test_http_api_wrong_data( + hass: HomeAssistant, init_components, hass_client: ClientSessionGenerator +) -> None: + """Test the HTTP conversation API.""" + client = await hass_client() + + resp = await client.post("/api/conversation/process", json={"text": 123}) + assert resp.status == HTTPStatus.BAD_REQUEST + + resp = await client.post("/api/conversation/process", json={}) + assert resp.status == HTTPStatus.BAD_REQUEST + + async def test_custom_agent( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -111,6 +670,10 @@ async def test_custom_agent( snapshot: SnapshotAssertion, ) -> None: """Test a custom conversation agent.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + assert await async_setup_component(hass, "intent", {}) + client = await hass_client() data = { @@ -139,12 +702,165 @@ async def test_custom_agent( ) -async def test_prepare_reload(hass: HomeAssistant, init_components) -> None: +@pytest.mark.parametrize( + "payload", + [ + { + "text": "Test Text", + }, + { + "text": "Test Text", + "language": "test-language", + }, + { + "text": "Test Text", + "conversation_id": "test-conv-id", + }, + { + "text": "Test Text", + "conversation_id": None, + }, + { + "text": "Test Text", + "conversation_id": "test-conv-id", + "language": "test-language", + }, + { + "text": "Test Text", + "agent_id": "homeassistant", + }, + ], +) +async def test_ws_api( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + payload, + snapshot: SnapshotAssertion, +) -> None: + """Test the Websocket conversation API.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "conversation/process", **payload}) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + assert msg["result"]["response"]["data"]["code"] == "no_intent_match" + + +@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) +async def test_ws_prepare( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, agent_id +) -> None: + """Test the Websocket prepare conversation API.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + agent = default_agent.async_get_default_agent(hass) + assert isinstance(agent, default_agent.DefaultAgent) + + # No intents should be loaded yet + assert not agent._lang_intents.get(hass.config.language) + + client = await hass_ws_client(hass) + + msg = {"type": "conversation/prepare"} + if agent_id is not None: + msg["agent_id"] = agent_id + await client.send_json_auto_id(msg) + + msg = await client.receive_json() + + assert msg["success"] + + # Intents should now be load + assert agent._lang_intents.get(hass.config.language) + + +async def test_custom_sentences( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + snapshot: SnapshotAssertion, +) -> None: + """Test custom sentences with a custom intent.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + assert await async_setup_component(hass, "intent", {}) + + # Expecting testing_config/custom_sentences/en/beer.yaml + intent.async_register(hass, OrderBeerIntentHandler()) + + # Don't use "en" to test loading custom sentences with language variants. + language = "en-us" + + # Invoke intent via HTTP API + client = await hass_client() + for beer_style in ("stout", "lager"): + resp = await client.post( + "/api/conversation/process", + json={ + "text": f"I'd like to order a {beer_style}, please", + "language": language, + }, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + assert ( + data["response"]["speech"]["plain"]["speech"] + == f"You ordered a {beer_style}" + ) + + +async def test_custom_sentences_config( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + snapshot: SnapshotAssertion, +) -> None: + """Test custom sentences with a custom intent in config.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component( + hass, + "conversation", + {"conversation": {"intents": {"StealthMode": ["engage stealth mode"]}}}, + ) + assert await async_setup_component(hass, "intent", {}) + assert await async_setup_component( + hass, + "intent_script", + { + "intent_script": { + "StealthMode": {"speech": {"text": "Stealth mode engaged"}} + } + }, + ) + + # Invoke intent via HTTP API + client = await hass_client() + resp = await client.post( + "/api/conversation/process", + json={"text": "engage stealth mode"}, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + assert data["response"]["speech"]["plain"]["speech"] == "Stealth mode engaged" + + +async def test_prepare_reload(hass: HomeAssistant) -> None: """Test calling the reload service.""" language = hass.config.language + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) # Load intents - agent = hass.data[DATA_DEFAULT_ENTITY] + agent = default_agent.async_get_default_agent(hass) assert isinstance(agent, default_agent.DefaultAgent) await agent.async_prepare(language) @@ -172,12 +888,186 @@ async def test_prepare_fail(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "conversation", {}) # Load intents - agent = hass.data[DATA_DEFAULT_ENTITY] + agent = default_agent.async_get_default_agent(hass) assert isinstance(agent, default_agent.DefaultAgent) await agent.async_prepare("not-a-language") # Confirm no intents were loaded - assert agent._lang_intents.get("not-a-language") is default_agent.ERROR_SENTINEL + assert not agent._lang_intents.get("not-a-language") + + +async def test_language_region(hass: HomeAssistant, init_components) -> None: + """Test calling the turn on intent.""" + hass.states.async_set("light.kitchen", "off") + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + # Add fake region + language = f"{hass.config.language}-YZ" + await hass.services.async_call( + "conversation", + "process", + { + conversation.ATTR_TEXT: "turn on the kitchen", + conversation.ATTR_LANGUAGE: language, + }, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": ["light.kitchen"]} + + +async def test_non_default_response(hass: HomeAssistant, init_components) -> None: + """Test intent response that is not the default.""" + hass.states.async_set("cover.front_door", "closed") + calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) + + agent = default_agent.async_get_default_agent(hass) + assert isinstance(agent, default_agent.DefaultAgent) + + result = await agent.async_process( + ConversationInput( + text="open the front door", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + ) + assert len(calls) == 1 + assert result.response.speech["plain"]["speech"] == "Opened" + + +async def test_turn_on_area( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test turning on an area.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + + device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + + kitchen_area = area_registry.async_create("kitchen") + device_registry.async_update_device(device.id, area_id=kitchen_area.id) + + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="stove" + ) + entity_registry.async_update_entity( + "light.stove", aliases={"my stove light"}, area_id=kitchen_area.id + ) + hass.states.async_set("light.stove", "off") + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": ["light.stove"]} + + basement_area = area_registry.async_create("basement") + device_registry.async_update_device(device.id, area_id=basement_area.id) + entity_registry.async_update_entity("light.stove", area_id=basement_area.id) + calls.clear() + + # Test that the area is updated + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 0 + + # Test the new area works + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on lights in the basement"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": ["light.stove"]} + + +async def test_light_area_same_name( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test turning on a light with the same name as an area.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + + device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + + kitchen_area = area_registry.async_create("kitchen") + device_registry.async_update_device(device.id, area_id=kitchen_area.id) + + kitchen_light = entity_registry.async_get_or_create( + "light", "demo", "1234", original_name="kitchen light" + ) + entity_registry.async_update_entity( + kitchen_light.entity_id, area_id=kitchen_area.id + ) + hass.states.async_set( + kitchen_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} + ) + + ceiling_light = entity_registry.async_get_or_create( + "light", "demo", "5678", original_name="ceiling light" + ) + entity_registry.async_update_entity( + ceiling_light.entity_id, area_id=kitchen_area.id + ) + hass.states.async_set( + ceiling_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "ceiling light"} + ) + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on kitchen light"}, + ) + await hass.async_block_till_done() + + # Should only turn on one light instead of all lights in the kitchen + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": [kitchen_light.entity_id]} async def test_agent_id_validator_invalid_agent( @@ -191,6 +1081,64 @@ async def test_agent_id_validator_invalid_agent( conversation.agent_id_validator("conversation.home_assistant") +async def test_get_agent_list( + hass: HomeAssistant, + init_components, + mock_conversation_agent: MockAgent, + mock_agent_support_all: MockAgent, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test getting agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "conversation/agent/list"}) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "smurfish"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "en"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "en-UK"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "de"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "de", "country": "ch"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + async def test_get_agent_info( hass: HomeAssistant, init_components, @@ -216,16 +1164,331 @@ async def test_get_agent_info( assert agent_info == snapshot -@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) -async def test_prepare_agent( +async def test_ws_hass_agent_debug( hass: HomeAssistant, init_components, - agent_id: str, + hass_ws_client: WebSocketGenerator, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: - """Test prepare agent.""" - with patch( - "homeassistant.components.conversation.default_agent.DefaultAgent.async_prepare" - ) as mock_prepare: - await conversation.async_prepare_agent(hass, agent_id, "en") + """Test homeassistant agent debug websocket command.""" + client = await hass_ws_client(hass) - assert len(mock_prepare.mock_calls) == 1 + kitchen_area = area_registry.async_create("kitchen") + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity( + "light.kitchen", + aliases={"my cool light"}, + area_id=kitchen_area.id, + ) + hass.states.async_set("light.kitchen", "off") + + on_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + off_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") + + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "turn on my cool light", + "turn my cool light off", + "turn on all lights in the kitchen", + "how many lights are on in the kitchen?", + "this will not match anything", # None in results + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + # Last sentence should be a failed match + assert msg["result"]["results"][-1] is None + + # Light state should not have been changed + assert len(on_calls) == 0 + assert len(off_calls) == 0 + + +async def test_ws_hass_agent_debug_null_result( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test homeassistant agent debug websocket command with a null result.""" + client = await hass_ws_client(hass) + + async def async_recognize(self, user_input, *args, **kwargs): + if user_input.text == "bad sentence": + return None + + return await self.async_recognize(user_input, *args, **kwargs) + + with patch( + "homeassistant.components.conversation.default_agent.DefaultAgent.async_recognize", + async_recognize, + ): + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "bad sentence", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + assert msg["result"]["results"] == [None] + + +async def test_ws_hass_agent_debug_out_of_range( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test homeassistant agent debug websocket command with an out of range entity.""" + test_light = entity_registry.async_get_or_create("light", "demo", "1234") + hass.states.async_set( + test_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "test light"} + ) + + client = await hass_ws_client(hass) + + # Brightness is in range (0-100) + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "set test light brightness to 100%", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + results = msg["result"]["results"] + assert len(results) == 1 + assert results[0]["match"] + + # Brightness is out of range + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "set test light brightness to 1001%", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + results = msg["result"]["results"] + assert len(results) == 1 + assert not results[0]["match"] + + # Name matched, but brightness didn't + assert results[0]["slots"] == {"name": "test light"} + assert results[0]["unmatched_slots"] == {"brightness": 1001} + + +async def test_ws_hass_agent_debug_custom_sentence( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test homeassistant agent debug websocket command with a custom sentence.""" + # Expecting testing_config/custom_sentences/en/beer.yaml + intent.async_register(hass, OrderBeerIntentHandler()) + + client = await hass_ws_client(hass) + + # Brightness is in range (0-100) + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "I'd like to order a lager, please.", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + debug_results = msg["result"].get("results", []) + assert len(debug_results) == 1 + assert debug_results[0].get("match") + assert debug_results[0].get("source") == "custom" + assert debug_results[0].get("file") == "en/beer.yaml" + + +async def test_ws_hass_agent_debug_sentence_trigger( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test homeassistant agent debug websocket command with a sentence trigger.""" + calls = async_mock_service(hass, "test", "automation") + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "trigger": { + "platform": "conversation", + "command": ["hello", "hello[ world]"], + }, + "action": { + "service": "test.automation", + "data_template": {"data": "{{ trigger }}"}, + }, + } + }, + ) + + client = await hass_ws_client(hass) + + # Use trigger sentence + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": ["hello world"], + } + ) + await hass.async_block_till_done() + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + debug_results = msg["result"].get("results", []) + assert len(debug_results) == 1 + assert debug_results[0].get("match") + assert debug_results[0].get("source") == "trigger" + assert debug_results[0].get("sentence_template") == "hello[ world]" + + # Trigger should not have been executed + assert len(calls) == 0 + + +async def test_custom_sentences_priority( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + snapshot: SnapshotAssertion, +) -> None: + """Test that user intents from custom_sentences have priority over builtin intents/sentences.""" + with tempfile.NamedTemporaryFile( + mode="w+", + encoding="utf-8", + suffix=".yaml", + dir=os.path.join(hass.config.config_dir, "custom_sentences", "en"), + ) as custom_sentences_file: + # Add a custom sentence that would match a builtin sentence. + # Custom sentences have priority. + yaml.dump( + { + "language": "en", + "intents": { + "CustomIntent": {"data": [{"sentences": ["turn on the lamp"]}]} + }, + }, + custom_sentences_file, + ) + custom_sentences_file.flush() + custom_sentences_file.seek(0) + + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + assert await async_setup_component(hass, "light", {}) + assert await async_setup_component(hass, "intent", {}) + assert await async_setup_component( + hass, + "intent_script", + { + "intent_script": { + "CustomIntent": {"speech": {"text": "custom response"}} + } + }, + ) + + # Ensure that a "lamp" exists so that we can verify the custom intent + # overrides the builtin sentence. + hass.states.async_set("light.lamp", "off") + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", + json={ + "text": "turn on the lamp", + "language": hass.config.language, + }, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data["response"]["response_type"] == "action_done" + assert data["response"]["speech"]["plain"]["speech"] == "custom response" + + +async def test_config_sentences_priority( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_admin_user: MockUser, + snapshot: SnapshotAssertion, +) -> None: + """Test that user intents from configuration.yaml have priority over builtin intents/sentences.""" + # Add a custom sentence that would match a builtin sentence. + # Custom sentences have priority. + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "intent", {}) + assert await async_setup_component( + hass, + "conversation", + {"conversation": {"intents": {"CustomIntent": ["turn on the lamp"]}}}, + ) + assert await async_setup_component(hass, "light", {}) + assert await async_setup_component( + hass, + "intent_script", + {"intent_script": {"CustomIntent": {"speech": {"text": "custom response"}}}}, + ) + + # Ensure that a "lamp" exists so that we can verify the custom intent + # overrides the builtin sentence. + hass.states.async_set("light.lamp", "off") + + client = await hass_client() + resp = await client.post( + "/api/conversation/process", + json={ + "text": "turn on the lamp", + "language": hass.config.language, + }, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data["response"]["response_type"] == "action_done" + assert data["response"]["speech"]["plain"]["speech"] == "custom response" diff --git a/tests/components/conversation/test_trace.py b/tests/components/conversation/test_trace.py index 7c00b9a80b2..c586eb8865d 100644 --- a/tests/components/conversation/test_trace.py +++ b/tests/components/conversation/test_trace.py @@ -33,7 +33,7 @@ async def test_converation_trace( assert traces last_trace = traces[-1].as_dict() assert last_trace.get("events") - assert len(last_trace.get("events")) == 2 + assert len(last_trace.get("events")) == 1 trace_event = last_trace["events"][0] assert ( trace_event.get("event_type") == trace.ConversationTraceEventType.ASYNC_PROCESS @@ -50,16 +50,6 @@ async def test_converation_trace( == "Added apples" ) - trace_event = last_trace["events"][1] - assert trace_event.get("event_type") == trace.ConversationTraceEventType.TOOL_CALL - assert trace_event.get("data") == { - "intent_name": "HassListAddItem", - "slots": { - "name": "Shopping List", - "item": "apples", - }, - } - async def test_converation_trace_error( hass: HomeAssistant, diff --git a/tests/components/conversation/test_trigger.py b/tests/components/conversation/test_trigger.py index 903bc405cf0..c5d4382e917 100644 --- a/tests/components/conversation/test_trigger.py +++ b/tests/components/conversation/test_trigger.py @@ -6,15 +6,21 @@ import pytest import voluptuous as vol from homeassistant.components.conversation import default_agent -from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY from homeassistant.components.conversation.models import ConversationInput from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.helpers import trigger from homeassistant.setup import async_setup_component +from tests.common import async_mock_service from tests.typing import WebSocketGenerator +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.fixture(autouse=True) async def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" @@ -23,7 +29,7 @@ async def setup_comp(hass: HomeAssistant) -> None: async def test_if_fires_on_event( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None ) -> None: """Test the firing of events.""" assert await async_setup_component( @@ -56,10 +62,8 @@ async def test_if_fires_on_event( assert service_response["response"]["speech"]["plain"]["speech"] == "Done" await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].domain == "test" - assert service_calls[1].service == "automation" - assert service_calls[1].data["data"] == { + assert len(calls) == 1 + assert calls[0].data["data"] == { "alias": None, "id": "0", "idx": "0", @@ -71,7 +75,7 @@ async def test_if_fires_on_event( } -async def test_response(hass: HomeAssistant) -> None: +async def test_response(hass: HomeAssistant, setup_comp) -> None: """Test the conversation response action.""" response = "I'm sorry, Dave. I'm afraid I can't do that" assert await async_setup_component( @@ -102,7 +106,7 @@ async def test_response(hass: HomeAssistant) -> None: assert service_response["response"]["speech"]["plain"]["speech"] == response -async def test_empty_response(hass: HomeAssistant) -> None: +async def test_empty_response(hass: HomeAssistant, setup_comp) -> None: """Test the conversation response action with an empty response.""" assert await async_setup_component( hass, @@ -133,7 +137,7 @@ async def test_empty_response(hass: HomeAssistant) -> None: async def test_response_same_sentence( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None ) -> None: """Test the conversation response action with multiple triggers using the same sentence.""" assert await async_setup_component( @@ -182,10 +186,8 @@ async def test_response_same_sentence( assert service_response["response"]["speech"]["plain"]["speech"] == "response 1" # Service should still have been called - assert len(service_calls) == 2 - assert service_calls[1].domain == "test" - assert service_calls[1].service == "automation" - assert service_calls[1].data["data"] == { + assert len(calls) == 1 + assert calls[0].data["data"] == { "alias": None, "id": "trigger1", "idx": "0", @@ -199,6 +201,8 @@ async def test_response_same_sentence( async def test_response_same_sentence_with_error( hass: HomeAssistant, + calls: list[ServiceCall], + setup_comp: None, caplog: pytest.LogCaptureFixture, ) -> None: """Test the conversation response action with multiple triggers using the same sentence and an error.""" @@ -249,7 +253,7 @@ async def test_response_same_sentence_with_error( async def test_subscribe_trigger_does_not_interfere_with_responses( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, setup_comp, hass_ws_client: WebSocketGenerator ) -> None: """Test that subscribing to a trigger from the websocket API does not interfere with responses.""" websocket_client = await hass_ws_client() @@ -306,7 +310,7 @@ async def test_subscribe_trigger_does_not_interfere_with_responses( async def test_same_trigger_multiple_sentences( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None ) -> None: """Test matching of multiple sentences from the same trigger.""" assert await async_setup_component( @@ -337,10 +341,8 @@ async def test_same_trigger_multiple_sentences( # Only triggers once await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].domain == "test" - assert service_calls[1].service == "automation" - assert service_calls[1].data["data"] == { + assert len(calls) == 1 + assert calls[0].data["data"] == { "alias": None, "id": "0", "idx": "0", @@ -353,7 +355,7 @@ async def test_same_trigger_multiple_sentences( async def test_same_sentence_multiple_triggers( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None ) -> None: """Test use of the same sentence in multiple triggers.""" assert await async_setup_component( @@ -401,12 +403,11 @@ async def test_same_sentence_multiple_triggers( ) await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 2 # The calls may come in any order call_datas: set[tuple[str, str, str]] = set() - service_calls.pop(0) # First call is the call to conversation.process - for call in service_calls: + for call in calls: call_data = call.data["data"] call_datas.add((call_data["id"], call_data["platform"], call_data["sentence"])) @@ -473,7 +474,9 @@ async def test_fails_on_no_sentences(hass: HomeAssistant) -> None: ) -async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) -> None: +async def test_wildcards( + hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None +) -> None: """Test wildcards in trigger sentences.""" assert await async_setup_component( hass, @@ -504,10 +507,8 @@ async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].domain == "test" - assert service_calls[1].service == "automation" - assert service_calls[1].data["data"] == { + assert len(calls) == 1 + assert calls[0].data["data"] == { "alias": None, "id": "0", "idx": "0", @@ -535,6 +536,8 @@ async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) async def test_trigger_with_device_id(hass: HomeAssistant) -> None: """Test that a trigger receives a device_id.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) assert await async_setup_component( hass, "automation", @@ -551,7 +554,7 @@ async def test_trigger_with_device_id(hass: HomeAssistant) -> None: }, ) - agent = hass.data[DATA_DEFAULT_ENTITY] + agent = default_agent.async_get_default_agent(hass) assert isinstance(agent, default_agent.DefaultAgent) result = await agent.async_process( diff --git a/tests/components/coolmaster/conftest.py b/tests/components/coolmaster/conftest.py index 27a801288b0..15670af4bc8 100644 --- a/tests/components/coolmaster/conftest.py +++ b/tests/components/coolmaster/conftest.py @@ -18,7 +18,7 @@ DEFAULT_INFO: dict[str, str] = { "version": "1", } -TEST_UNITS: dict[str, dict[str, Any]] = { +TEST_UNITS: dict[dict[str, Any]] = { "L1.100": { "is_on": False, "thermostat": 20, diff --git a/tests/components/counter/common.py b/tests/components/counter/common.py index e5d9316cd22..b5156c1a432 100644 --- a/tests/components/counter/common.py +++ b/tests/components/counter/common.py @@ -11,13 +11,13 @@ from homeassistant.components.counter import ( SERVICE_RESET, ) from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.loader import bind_hass @callback @bind_hass -def async_increment(hass: HomeAssistant, entity_id: str) -> None: +def async_increment(hass, entity_id): """Increment a counter.""" hass.async_create_task( hass.services.async_call(DOMAIN, SERVICE_INCREMENT, {ATTR_ENTITY_ID: entity_id}) @@ -26,7 +26,7 @@ def async_increment(hass: HomeAssistant, entity_id: str) -> None: @callback @bind_hass -def async_decrement(hass: HomeAssistant, entity_id: str) -> None: +def async_decrement(hass, entity_id): """Decrement a counter.""" hass.async_create_task( hass.services.async_call(DOMAIN, SERVICE_DECREMENT, {ATTR_ENTITY_ID: entity_id}) @@ -35,7 +35,7 @@ def async_decrement(hass: HomeAssistant, entity_id: str) -> None: @callback @bind_hass -def async_reset(hass: HomeAssistant, entity_id: str) -> None: +def async_reset(hass, entity_id): """Reset a counter.""" hass.async_create_task( hass.services.async_call(DOMAIN, SERVICE_RESET, {ATTR_ENTITY_ID: entity_id}) diff --git a/tests/components/cover/common.py b/tests/components/cover/common.py index b4a0cdb06d4..d9f67e73f17 100644 --- a/tests/components/cover/common.py +++ b/tests/components/cover/common.py @@ -2,7 +2,8 @@ from typing import Any -from homeassistant.components.cover import CoverEntity, CoverEntityFeature, CoverState +from homeassistant.components.cover import CoverEntity, CoverEntityFeature +from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING from tests.common import MockEntity @@ -25,7 +26,7 @@ class MockCover(MockEntity, CoverEntity): @property def is_closed(self): """Return if the cover is closed or not.""" - if "state" in self._values and self._values["state"] == CoverState.CLOSED: + if "state" in self._values and self._values["state"] == STATE_CLOSED: return True return self.current_cover_position == 0 @@ -34,7 +35,7 @@ class MockCover(MockEntity, CoverEntity): def is_opening(self): """Return if the cover is opening or not.""" if "state" in self._values: - return self._values["state"] == CoverState.OPENING + return self._values["state"] == STATE_OPENING return False @@ -42,28 +43,28 @@ class MockCover(MockEntity, CoverEntity): def is_closing(self): """Return if the cover is closing or not.""" if "state" in self._values: - return self._values["state"] == CoverState.CLOSING + return self._values["state"] == STATE_CLOSING return False def open_cover(self, **kwargs) -> None: """Open cover.""" if self._reports_opening_closing: - self._values["state"] = CoverState.OPENING + self._values["state"] = STATE_OPENING else: - self._values["state"] = CoverState.OPEN + self._values["state"] = STATE_OPEN def close_cover(self, **kwargs) -> None: """Close cover.""" if self._reports_opening_closing: - self._values["state"] = CoverState.CLOSING + self._values["state"] = STATE_CLOSING else: - self._values["state"] = CoverState.CLOSED + self._values["state"] = STATE_CLOSED def stop_cover(self, **kwargs) -> None: """Stop cover.""" assert CoverEntityFeature.STOP in self.supported_features - self._values["state"] = CoverState.CLOSED if self.is_closed else CoverState.OPEN + self._values["state"] = STATE_CLOSED if self.is_closed else STATE_OPEN @property def current_cover_position(self): diff --git a/tests/components/cover/test_device_condition.py b/tests/components/cover/test_device_condition.py index aa5f150172c..545bdd6587e 100644 --- a/tests/components/cover/test_device_condition.py +++ b/tests/components/cover/test_device_condition.py @@ -4,9 +4,17 @@ import pytest from pytest_unordered import unordered from homeassistant.components import automation -from homeassistant.components.cover import DOMAIN, CoverEntityFeature, CoverState +from homeassistant.components.cover import DOMAIN, CoverEntityFeature from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import CONF_PLATFORM, STATE_UNAVAILABLE, EntityCategory +from homeassistant.const import ( + CONF_PLATFORM, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + STATE_UNAVAILABLE, + EntityCategory, +) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider @@ -18,6 +26,7 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, setup_test_component_platform, ) @@ -27,6 +36,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_condition_types"), [ @@ -344,7 +359,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -357,7 +372,7 @@ async def test_if_state( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, CoverState.OPEN) + hass.states.async_set(entry.entity_id, STATE_OPEN) assert await async_setup_component( hass, @@ -458,36 +473,36 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_open - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_open - event - test_event1" - hass.states.async_set(entry.entity_id, CoverState.CLOSED) + hass.states.async_set(entry.entity_id, STATE_CLOSED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_closed - event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_closed - event - test_event2" - hass.states.async_set(entry.entity_id, CoverState.OPENING) + hass.states.async_set(entry.entity_id, STATE_OPENING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].data["some"] == "is_opening - event - test_event3" + assert len(calls) == 3 + assert calls[2].data["some"] == "is_opening - event - test_event3" - hass.states.async_set(entry.entity_id, CoverState.CLOSING) + hass.states.async_set(entry.entity_id, STATE_CLOSING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(service_calls) == 4 - assert service_calls[3].data["some"] == "is_closing - event - test_event4" + assert len(calls) == 4 + assert calls[3].data["some"] == "is_closing - event - test_event4" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -500,7 +515,7 @@ async def test_if_state_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, CoverState.OPEN) + hass.states.async_set(entry.entity_id, STATE_OPEN) assert await async_setup_component( hass, @@ -535,15 +550,15 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_open - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_open - event - test_event1" async def test_if_position( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, mock_cover_entities: list[MockCover], ) -> None: @@ -661,13 +676,13 @@ async def test_if_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" - assert service_calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" - assert service_calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" + assert len(calls) == 3 + assert calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" + assert calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" + assert calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_position": 45} + ent.entity_id, STATE_CLOSED, attributes={"current_position": 45} ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() @@ -675,25 +690,25 @@ async def test_if_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(service_calls) == 5 - assert service_calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" - assert service_calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" + assert len(calls) == 5 + assert calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_position": 90} + ent.entity_id, STATE_CLOSED, attributes={"current_position": 90} ) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(service_calls) == 6 - assert service_calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" + assert len(calls) == 6 + assert calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" hass.states.async_set(ent.entity_id, STATE_UNAVAILABLE, attributes={}) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 7 - assert service_calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert len(calls) == 7 + assert calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" for record in caplog.records: assert record.levelname in ("DEBUG", "INFO") @@ -703,7 +718,7 @@ async def test_if_tilt_position( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, mock_cover_entities: list[MockCover], ) -> None: @@ -821,13 +836,13 @@ async def test_if_tilt_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" - assert service_calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" - assert service_calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" + assert len(calls) == 3 + assert calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" + assert calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" + assert calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 45} + ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 45} ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() @@ -835,12 +850,12 @@ async def test_if_tilt_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(service_calls) == 5 - assert service_calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" - assert service_calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" + assert len(calls) == 5 + assert calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 90} + ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 90} ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() @@ -848,14 +863,14 @@ async def test_if_tilt_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(service_calls) == 6 - assert service_calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" + assert len(calls) == 6 + assert calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" hass.states.async_set(ent.entity_id, STATE_UNAVAILABLE, attributes={}) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 7 - assert service_calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert len(calls) == 7 + assert calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" for record in caplog.records: assert record.levelname in ("DEBUG", "INFO") diff --git a/tests/components/cover/test_device_trigger.py b/tests/components/cover/test_device_trigger.py index e6021d22326..419eea05f9f 100644 --- a/tests/components/cover/test_device_trigger.py +++ b/tests/components/cover/test_device_trigger.py @@ -6,9 +6,16 @@ import pytest from pytest_unordered import unordered from homeassistant.components import automation -from homeassistant.components.cover import DOMAIN, CoverEntityFeature, CoverState +from homeassistant.components.cover import DOMAIN, CoverEntityFeature from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import CONF_PLATFORM, EntityCategory +from homeassistant.const import ( + CONF_PLATFORM, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + EntityCategory, +) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider @@ -22,6 +29,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, setup_test_component_platform, ) @@ -31,6 +39,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_trigger_types"), [ @@ -367,7 +381,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for state triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -380,7 +394,7 @@ async def test_if_fires_on_state_change( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, CoverState.CLOSED) + hass.states.async_set(entry.entity_id, STATE_CLOSED) assert await async_setup_component( hass, @@ -480,38 +494,38 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is opened. - hass.states.async_set(entry.entity_id, CoverState.OPEN) + hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"opened - device - {entry.entity_id} - closed - open - None" ) # Fake that the entity is closed. - hass.states.async_set(entry.entity_id, CoverState.CLOSED) + hass.states.async_set(entry.entity_id, STATE_CLOSED) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"closed - device - {entry.entity_id} - open - closed - None" ) # Fake that the entity is opening. - hass.states.async_set(entry.entity_id, CoverState.OPENING) + hass.states.async_set(entry.entity_id, STATE_OPENING) await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 3 assert ( - service_calls[2].data["some"] + calls[2].data["some"] == f"opening - device - {entry.entity_id} - closed - opening - None" ) # Fake that the entity is closing. - hass.states.async_set(entry.entity_id, CoverState.CLOSING) + hass.states.async_set(entry.entity_id, STATE_CLOSING) await hass.async_block_till_done() - assert len(service_calls) == 4 + assert len(calls) == 4 assert ( - service_calls[3].data["some"] + calls[3].data["some"] == f"closing - device - {entry.entity_id} - opening - closing - None" ) @@ -520,7 +534,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for state triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -533,7 +547,7 @@ async def test_if_fires_on_state_change_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, CoverState.CLOSED) + hass.states.async_set(entry.entity_id, STATE_CLOSED) assert await async_setup_component( hass, @@ -567,11 +581,11 @@ async def test_if_fires_on_state_change_legacy( ) # Fake that the entity is opened. - hass.states.async_set(entry.entity_id, CoverState.OPEN) + hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"opened - device - {entry.entity_id} - closed - open - None" ) @@ -580,7 +594,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -593,7 +607,7 @@ async def test_if_fires_on_state_change_with_for( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, CoverState.CLOSED) + hass.states.async_set(entry.entity_id, STATE_CLOSED) assert await async_setup_component( hass, @@ -626,17 +640,17 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 - hass.states.async_set(entry.entity_id, CoverState.OPEN) + hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - closed - open - 0:00:05" ) @@ -646,7 +660,7 @@ async def test_if_fires_on_position( device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mock_cover_entities: list[MockCover], - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for position triggers.""" setup_test_component_platform(hass, DOMAIN, mock_cover_entities) @@ -747,23 +761,17 @@ async def test_if_fires_on_position( ] }, ) + hass.states.async_set(ent.entity_id, STATE_OPEN, attributes={"current_position": 1}) hass.states.async_set( - ent.entity_id, CoverState.OPEN, attributes={"current_position": 1} + ent.entity_id, STATE_CLOSED, attributes={"current_position": 95} ) hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_position": 95} - ) - hass.states.async_set( - ent.entity_id, CoverState.OPEN, attributes={"current_position": 50} + ent.entity_id, STATE_OPEN, attributes={"current_position": 50} ) await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 3 assert sorted( - [ - service_calls[0].data["some"], - service_calls[1].data["some"], - service_calls[2].data["some"], - ] + [calls[0].data["some"], calls[1].data["some"], calls[2].data["some"]] ) == sorted( [ ( @@ -776,26 +784,26 @@ async def test_if_fires_on_position( ) hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_position": 95} + ent.entity_id, STATE_CLOSED, attributes={"current_position": 95} ) await hass.async_block_till_done() hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_position": 45} + ent.entity_id, STATE_CLOSED, attributes={"current_position": 45} ) await hass.async_block_till_done() - assert len(service_calls) == 4 + assert len(calls) == 4 assert ( - service_calls[3].data["some"] + calls[3].data["some"] == f"is_pos_lt_90 - device - {entry.entity_id} - closed - closed - None" ) hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_position": 90} + ent.entity_id, STATE_CLOSED, attributes={"current_position": 90} ) await hass.async_block_till_done() - assert len(service_calls) == 5 + assert len(calls) == 5 assert ( - service_calls[4].data["some"] + calls[4].data["some"] == f"is_pos_gt_45 - device - {entry.entity_id} - closed - closed - None" ) @@ -804,7 +812,7 @@ async def test_if_fires_on_tilt_position( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_cover_entities: list[MockCover], ) -> None: """Test for tilt position triggers.""" @@ -907,22 +915,18 @@ async def test_if_fires_on_tilt_position( }, ) hass.states.async_set( - ent.entity_id, CoverState.OPEN, attributes={"current_tilt_position": 1} + ent.entity_id, STATE_OPEN, attributes={"current_tilt_position": 1} ) hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 95} + ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 95} ) hass.states.async_set( - ent.entity_id, CoverState.OPEN, attributes={"current_tilt_position": 50} + ent.entity_id, STATE_OPEN, attributes={"current_tilt_position": 50} ) await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 3 assert sorted( - [ - service_calls[0].data["some"], - service_calls[1].data["some"], - service_calls[2].data["some"], - ] + [calls[0].data["some"], calls[1].data["some"], calls[2].data["some"]] ) == sorted( [ ( @@ -935,25 +939,25 @@ async def test_if_fires_on_tilt_position( ) hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 95} + ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 95} ) await hass.async_block_till_done() hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 45} + ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 45} ) await hass.async_block_till_done() - assert len(service_calls) == 4 + assert len(calls) == 4 assert ( - service_calls[3].data["some"] + calls[3].data["some"] == f"is_pos_lt_90 - device - {entry.entity_id} - closed - closed - None" ) hass.states.async_set( - ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 90} + ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 90} ) await hass.async_block_till_done() - assert len(service_calls) == 5 + assert len(calls) == 5 assert ( - service_calls[4].data["some"] + calls[4].data["some"] == f"is_pos_gt_45 - device - {entry.entity_id} - closed - closed - None" ) diff --git a/tests/components/cover/test_init.py b/tests/components/cover/test_init.py index 6b80dd1ab9a..7da6c6efe21 100644 --- a/tests/components/cover/test_init.py +++ b/tests/components/cover/test_init.py @@ -5,10 +5,16 @@ from enum import Enum import pytest from homeassistant.components import cover -from homeassistant.components.cover import CoverState -from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM, SERVICE_TOGGLE -from homeassistant.core import HomeAssistant, ServiceResponse -from homeassistant.helpers.entity import Entity +from homeassistant.const import ( + ATTR_ENTITY_ID, + CONF_PLATFORM, + SERVICE_TOGGLE, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from .common import MockCover @@ -99,23 +105,21 @@ async def test_services( assert is_closing(hass, ent6) # Without STOP but still reports opening/closing has a 4th possible toggle state - set_state(ent6, CoverState.CLOSED) + set_state(ent6, STATE_CLOSED) await call_service(hass, SERVICE_TOGGLE, ent6) assert is_opening(hass, ent6) # After the unusual state transition: closing -> fully open, toggle should close - set_state(ent5, CoverState.OPEN) + set_state(ent5, STATE_OPEN) await call_service(hass, SERVICE_TOGGLE, ent5) # Start closing assert is_closing(hass, ent5) - set_state( - ent5, CoverState.OPEN - ) # Unusual state transition from closing -> fully open + set_state(ent5, STATE_OPEN) # Unusual state transition from closing -> fully open set_cover_position(ent5, 100) await call_service(hass, SERVICE_TOGGLE, ent5) # Should close, not open assert is_closing(hass, ent5) -def call_service(hass: HomeAssistant, service: str, ent: Entity) -> ServiceResponse: +def call_service(hass, service, ent): """Call any service on entity.""" return hass.services.async_call( cover.DOMAIN, service, {ATTR_ENTITY_ID: ent.entity_id}, blocking=True @@ -132,27 +136,27 @@ def set_state(ent, state) -> None: ent._values["state"] = state -def is_open(hass: HomeAssistant, ent: Entity) -> bool: +def is_open(hass, ent): """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, CoverState.OPEN) + return hass.states.is_state(ent.entity_id, STATE_OPEN) -def is_opening(hass: HomeAssistant, ent: Entity) -> bool: +def is_opening(hass, ent): """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, CoverState.OPENING) + return hass.states.is_state(ent.entity_id, STATE_OPENING) -def is_closed(hass: HomeAssistant, ent: Entity) -> bool: +def is_closed(hass, ent): """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, CoverState.CLOSED) + return hass.states.is_state(ent.entity_id, STATE_CLOSED) -def is_closing(hass: HomeAssistant, ent: Entity) -> bool: +def is_closing(hass, ent): """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, CoverState.CLOSING) + return hass.states.is_state(ent.entity_id, STATE_CLOSING) -def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: +def _create_tuples(enum: Enum, constant_prefix: str) -> list[tuple[Enum, str]]: return [(enum_field, constant_prefix) for enum_field in enum] diff --git a/tests/components/cover/test_intent.py b/tests/components/cover/test_intent.py index 383a55e2a72..8ee621596db 100644 --- a/tests/components/cover/test_intent.py +++ b/tests/components/cover/test_intent.py @@ -1,18 +1,14 @@ """The tests for the cover platform.""" -from typing import Any - -import pytest - from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, - CoverState, intent as cover_intent, ) +from homeassistant.const import STATE_CLOSED, STATE_OPEN from homeassistant.core import HomeAssistant from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -20,26 +16,15 @@ from homeassistant.setup import async_setup_component from tests.common import async_mock_service -@pytest.mark.parametrize( - ("slots"), - [ - ({"name": {"value": "garage door"}}), - ({"device_class": {"value": "garage"}}), - ], -) -async def test_open_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> None: +async def test_open_cover_intent(hass: HomeAssistant) -> None: """Test HassOpenCover intent.""" await cover_intent.async_setup_intents(hass) - hass.states.async_set( - f"{DOMAIN}.garage_door", - CoverState.CLOSED, - attributes={"device_class": "garage"}, - ) + hass.states.async_set(f"{DOMAIN}.garage_door", STATE_CLOSED) calls = async_mock_service(hass, DOMAIN, SERVICE_OPEN_COVER) response = await intent.async_handle( - hass, "test", cover_intent.INTENT_OPEN_COVER, slots + hass, "test", cover_intent.INTENT_OPEN_COVER, {"name": {"value": "garage door"}} ) await hass.async_block_till_done() @@ -51,27 +36,18 @@ async def test_open_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> assert call.data == {"entity_id": f"{DOMAIN}.garage_door"} -@pytest.mark.parametrize( - ("slots"), - [ - ({"name": {"value": "garage door"}}), - ({"device_class": {"value": "garage"}}), - ], -) -async def test_close_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> None: +async def test_close_cover_intent(hass: HomeAssistant) -> None: """Test HassCloseCover intent.""" await cover_intent.async_setup_intents(hass) - hass.states.async_set( - f"{DOMAIN}.garage_door", CoverState.OPEN, attributes={"device_class": "garage"} - ) + hass.states.async_set(f"{DOMAIN}.garage_door", STATE_OPEN) calls = async_mock_service(hass, DOMAIN, SERVICE_CLOSE_COVER) response = await intent.async_handle( hass, "test", cover_intent.INTENT_CLOSE_COVER, - slots, + {"name": {"value": "garage door"}}, ) await hass.async_block_till_done() @@ -83,22 +59,13 @@ async def test_close_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> assert call.data == {"entity_id": f"{DOMAIN}.garage_door"} -@pytest.mark.parametrize( - ("slots"), - [ - ({"name": {"value": "test cover"}, "position": {"value": 50}}), - ({"device_class": {"value": "shade"}, "position": {"value": 50}}), - ], -) -async def test_set_cover_position(hass: HomeAssistant, slots: dict[str, Any]) -> None: +async def test_set_cover_position(hass: HomeAssistant) -> None: """Test HassSetPosition intent for covers.""" assert await async_setup_component(hass, "intent", {}) entity_id = f"{DOMAIN}.test_cover" hass.states.async_set( - entity_id, - CoverState.CLOSED, - attributes={ATTR_CURRENT_POSITION: 0, "device_class": "shade"}, + entity_id, STATE_CLOSED, attributes={ATTR_CURRENT_POSITION: 0} ) calls = async_mock_service(hass, DOMAIN, SERVICE_SET_COVER_POSITION) @@ -106,7 +73,7 @@ async def test_set_cover_position(hass: HomeAssistant, slots: dict[str, Any]) -> hass, "test", intent.INTENT_SET_POSITION, - slots, + {"name": {"value": "test cover"}, "position": {"value": 50}}, ) await hass.async_block_till_done() diff --git a/tests/components/cover/test_reproduce_state.py b/tests/components/cover/test_reproduce_state.py index 4aad27011fa..f5dd01745d3 100644 --- a/tests/components/cover/test_reproduce_state.py +++ b/tests/components/cover/test_reproduce_state.py @@ -7,7 +7,6 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - CoverState, ) from homeassistant.const import ( SERVICE_CLOSE_COVER, @@ -16,6 +15,8 @@ from homeassistant.const import ( SERVICE_OPEN_COVER_TILT, SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, + STATE_CLOSED, + STATE_OPEN, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.state import async_reproduce_state @@ -27,32 +28,32 @@ async def test_reproducing_states( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test reproducing Cover states.""" - hass.states.async_set("cover.entity_close", CoverState.CLOSED, {}) + hass.states.async_set("cover.entity_close", STATE_CLOSED, {}) hass.states.async_set( "cover.entity_close_attr", - CoverState.CLOSED, + STATE_CLOSED, {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, ) hass.states.async_set( - "cover.entity_close_tilt", CoverState.CLOSED, {ATTR_CURRENT_TILT_POSITION: 50} + "cover.entity_close_tilt", STATE_CLOSED, {ATTR_CURRENT_TILT_POSITION: 50} ) - hass.states.async_set("cover.entity_open", CoverState.OPEN, {}) + hass.states.async_set("cover.entity_open", STATE_OPEN, {}) hass.states.async_set( - "cover.entity_slightly_open", CoverState.OPEN, {ATTR_CURRENT_POSITION: 50} + "cover.entity_slightly_open", STATE_OPEN, {ATTR_CURRENT_POSITION: 50} ) hass.states.async_set( "cover.entity_open_attr", - CoverState.OPEN, + STATE_OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 0}, ) hass.states.async_set( "cover.entity_open_tilt", - CoverState.OPEN, + STATE_OPEN, {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, ) hass.states.async_set( "cover.entity_entirely_open", - CoverState.OPEN, + STATE_OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100}, ) @@ -69,36 +70,34 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State("cover.entity_close", CoverState.CLOSED), + State("cover.entity_close", STATE_CLOSED), State( "cover.entity_close_attr", - CoverState.CLOSED, + STATE_CLOSED, {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, ), State( "cover.entity_close_tilt", - CoverState.CLOSED, + STATE_CLOSED, {ATTR_CURRENT_TILT_POSITION: 50}, ), - State("cover.entity_open", CoverState.OPEN), + State("cover.entity_open", STATE_OPEN), State( - "cover.entity_slightly_open", - CoverState.OPEN, - {ATTR_CURRENT_POSITION: 50}, + "cover.entity_slightly_open", STATE_OPEN, {ATTR_CURRENT_POSITION: 50} ), State( "cover.entity_open_attr", - CoverState.OPEN, + STATE_OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 0}, ), State( "cover.entity_open_tilt", - CoverState.OPEN, + STATE_OPEN, {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, ), State( "cover.entity_entirely_open", - CoverState.OPEN, + STATE_OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100}, ), ], @@ -126,28 +125,26 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State("cover.entity_close", CoverState.OPEN), + State("cover.entity_close", STATE_OPEN), State( "cover.entity_close_attr", - CoverState.OPEN, + STATE_OPEN, {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, ), State( "cover.entity_close_tilt", - CoverState.CLOSED, + STATE_CLOSED, {ATTR_CURRENT_TILT_POSITION: 100}, ), - State("cover.entity_open", CoverState.CLOSED), - State("cover.entity_slightly_open", CoverState.OPEN, {}), - State("cover.entity_open_attr", CoverState.CLOSED, {}), + State("cover.entity_open", STATE_CLOSED), + State("cover.entity_slightly_open", STATE_OPEN, {}), + State("cover.entity_open_attr", STATE_CLOSED, {}), State( - "cover.entity_open_tilt", - CoverState.OPEN, - {ATTR_CURRENT_TILT_POSITION: 0}, + "cover.entity_open_tilt", STATE_OPEN, {ATTR_CURRENT_TILT_POSITION: 0} ), State( "cover.entity_entirely_open", - CoverState.CLOSED, + STATE_CLOSED, {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, ), # Should not raise diff --git a/tests/components/cpuspeed/conftest.py b/tests/components/cpuspeed/conftest.py index d9079079ba2..e3ea1432659 100644 --- a/tests/components/cpuspeed/conftest.py +++ b/tests/components/cpuspeed/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.cpuspeed.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/crownstone/test_config_flow.py b/tests/components/crownstone/test_config_flow.py index a38a04cb2ad..be9086e02da 100644 --- a/tests/components/crownstone/test_config_flow.py +++ b/tests/components/crownstone/test_config_flow.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from crownstone_cloud.cloud_models.spheres import Spheres @@ -12,6 +11,7 @@ from crownstone_cloud.exceptions import ( ) import pytest from serial.tools.list_ports_common import ListPortInfo +from typing_extensions import Generator from homeassistant.components import usb from homeassistant.components.crownstone.const import ( @@ -258,7 +258,7 @@ async def test_unknown_error( result = await start_config_flow(hass, cloud) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown"} + assert result["errors"] == {"base": "unknown_error"} assert crownstone_setup.call_count == 0 diff --git a/tests/components/daikin/test_config_flow.py b/tests/components/daikin/test_config_flow.py index 5c432e111dd..6d957384d4d 100644 --- a/tests/components/daikin/test_config_flow.py +++ b/tests/components/daikin/test_config_flow.py @@ -28,11 +28,9 @@ def mock_daikin(): """Mock the init function in pydaikin.""" return Appliance - with patch( - "homeassistant.components.daikin.config_flow.DaikinFactory" - ) as Appliance: + with patch("homeassistant.components.daikin.config_flow.Appliance") as Appliance: type(Appliance).mac = PropertyMock(return_value="AABBCCDDEEFF") - Appliance.side_effect = mock_daikin_factory + Appliance.factory.side_effect = mock_daikin_factory yield Appliance @@ -92,7 +90,7 @@ async def test_abort_if_already_setup(hass: HomeAssistant, mock_daikin) -> None: ) async def test_device_abort(hass: HomeAssistant, mock_daikin, s_effect, reason) -> None: """Test device abort.""" - mock_daikin.side_effect = s_effect + mock_daikin.factory.side_effect = s_effect result = await hass.config_entries.flow.async_init( "daikin", diff --git a/tests/components/daikin/test_init.py b/tests/components/daikin/test_init.py index 2380d5ad798..d7d754dacd2 100644 --- a/tests/components/daikin/test_init.py +++ b/tests/components/daikin/test_init.py @@ -7,10 +7,10 @@ from aiohttp import ClientConnectionError from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.daikin import update_unique_id +from homeassistant.components.daikin import DaikinApi, update_unique_id from homeassistant.components.daikin.const import DOMAIN, KEY_MAC from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_HOST, STATE_UNAVAILABLE +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -27,8 +27,8 @@ def mock_daikin(): """Mock the init function in pydaikin.""" return Appliance - with patch("homeassistant.components.daikin.DaikinFactory") as Appliance: - Appliance.side_effect = mock_daikin_factory + with patch("homeassistant.components.daikin.Appliance") as Appliance: + Appliance.factory.side_effect = mock_daikin_factory type(Appliance).update_status = AsyncMock() type(Appliance).device_ip = PropertyMock(return_value=HOST) type(Appliance).inside_temperature = PropertyMock(return_value=22) @@ -183,15 +183,18 @@ async def test_client_update_connection_error( await hass.config_entries.async_setup(config_entry.entry_id) - assert hass.states.get("climate.daikinap00000").state != STATE_UNAVAILABLE + api: DaikinApi = hass.data[DOMAIN][config_entry.entry_id] + + assert api.available is True type(mock_daikin).update_status.side_effect = ClientConnectionError - freezer.tick(timedelta(seconds=60)) + freezer.tick(timedelta(seconds=90)) async_fire_time_changed(hass) + await hass.async_block_till_done() - assert hass.states.get("climate.daikinap00000").state == STATE_UNAVAILABLE + assert api.available is False assert mock_daikin.update_status.call_count == 2 @@ -205,7 +208,7 @@ async def test_client_connection_error(hass: HomeAssistant, mock_daikin) -> None ) config_entry.add_to_hass(hass) - mock_daikin.side_effect = ClientConnectionError + mock_daikin.factory.side_effect = ClientConnectionError await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -221,7 +224,7 @@ async def test_timeout_error(hass: HomeAssistant, mock_daikin) -> None: ) config_entry.add_to_hass(hass) - mock_daikin.side_effect = TimeoutError + mock_daikin.factory.side_effect = TimeoutError await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/datadog/test_init.py b/tests/components/datadog/test_init.py index 3b7bea3c926..36c1d951078 100644 --- a/tests/components/datadog/test_init.py +++ b/tests/components/datadog/test_init.py @@ -79,7 +79,7 @@ async def test_logbook_entry(hass: HomeAssistant) -> None: assert mock_statsd.event.call_count == 1 assert mock_statsd.event.call_args == mock.call( title="Home Assistant", - text=f"%%% \n **{event['name']}** {event['message']} \n %%%", + text="%%% \n **{}** {} \n %%%".format(event["name"], event["message"]), tags=["entity:sensor.foo.bar", "domain:automation"], ) diff --git a/tests/components/deako/__init__.py b/tests/components/deako/__init__.py deleted file mode 100644 index 248a389f2e6..00000000000 --- a/tests/components/deako/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Deako integration.""" diff --git a/tests/components/deako/conftest.py b/tests/components/deako/conftest.py deleted file mode 100644 index 659634b8784..00000000000 --- a/tests/components/deako/conftest.py +++ /dev/null @@ -1,45 +0,0 @@ -"""deako session fixtures.""" - -from collections.abc import Generator -from unittest.mock import MagicMock, patch - -import pytest - -from homeassistant.components.deako.const import DOMAIN - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - domain=DOMAIN, - ) - - -@pytest.fixture(autouse=True) -def pydeako_deako_mock() -> Generator[MagicMock]: - """Mock pydeako deako client.""" - with patch("homeassistant.components.deako.Deako", autospec=True) as mock: - yield mock - - -@pytest.fixture(autouse=True) -def pydeako_discoverer_mock(mock_async_zeroconf: MagicMock) -> Generator[MagicMock]: - """Mock pydeako discovery client.""" - with ( - patch("homeassistant.components.deako.DeakoDiscoverer", autospec=True) as mock, - patch("homeassistant.components.deako.config_flow.DeakoDiscoverer", new=mock), - ): - yield mock - - -@pytest.fixture -def mock_deako_setup() -> Generator[MagicMock]: - """Mock async_setup_entry for config flow tests.""" - with patch( - "homeassistant.components.deako.async_setup_entry", - return_value=True, - ) as mock_setup: - yield mock_setup diff --git a/tests/components/deako/snapshots/test_light.ambr b/tests/components/deako/snapshots/test_light.ambr deleted file mode 100644 index 7bc170654e1..00000000000 --- a/tests/components/deako/snapshots/test_light.ambr +++ /dev/null @@ -1,168 +0,0 @@ -# serializer version: 1 -# name: test_dimmable_light_props[light.kitchen-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.kitchen', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deako', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'uuid', - 'unit_of_measurement': None, - }) -# --- -# name: test_dimmable_light_props[light.kitchen-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 127, - 'color_mode': , - 'friendly_name': 'kitchen', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.kitchen', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_light_initial_props[light.kitchen-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.kitchen', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deako', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'uuid', - 'unit_of_measurement': None, - }) -# --- -# name: test_light_initial_props[light.kitchen-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'color_mode': None, - 'friendly_name': 'kitchen', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.kitchen', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_light_setup_with_device[light.some_device-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.some_device', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deako', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'some_device', - 'unit_of_measurement': None, - }) -# --- -# name: test_light_setup_with_device[light.some_device-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 1, - 'color_mode': , - 'friendly_name': 'some device', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.some_device', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/deako/test_config_flow.py b/tests/components/deako/test_config_flow.py deleted file mode 100644 index 21b10eaaa36..00000000000 --- a/tests/components/deako/test_config_flow.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Tests for the deako component config flow.""" - -from unittest.mock import MagicMock - -from pydeako.discover import DevicesNotFoundException - -from homeassistant.components.deako.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_found( - hass: HomeAssistant, - pydeako_discoverer_mock: MagicMock, - mock_deako_setup: MagicMock, -) -> None: - """Test finding a Deako device.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - # Confirmation form - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - pydeako_discoverer_mock.return_value.get_address.assert_called_once() - - mock_deako_setup.assert_called_once() - - -async def test_not_found( - hass: HomeAssistant, - pydeako_discoverer_mock: MagicMock, - mock_deako_setup: MagicMock, -) -> None: - """Test not finding any Deako devices.""" - pydeako_discoverer_mock.return_value.get_address.side_effect = ( - DevicesNotFoundException() - ) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - # Confirmation form - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_devices_found" - pydeako_discoverer_mock.return_value.get_address.assert_called_once() - - mock_deako_setup.assert_not_called() - - -async def test_already_configured( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_deako_setup: MagicMock, -) -> None: - """Test flow aborts when already configured.""" - - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" - - mock_deako_setup.assert_not_called() diff --git a/tests/components/deako/test_init.py b/tests/components/deako/test_init.py deleted file mode 100644 index b4c0e8bb1f7..00000000000 --- a/tests/components/deako/test_init.py +++ /dev/null @@ -1,87 +0,0 @@ -"""Tests for the deako component init.""" - -from unittest.mock import MagicMock - -from pydeako.deako import DeviceListTimeout, FindDevicesTimeout - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def test_deako_async_setup_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - pydeako_deako_mock: MagicMock, - pydeako_discoverer_mock: MagicMock, -) -> None: - """Test successful setup entry.""" - pydeako_deako_mock.return_value.get_devices.return_value = { - "id1": {}, - "id2": {}, - } - - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - pydeako_deako_mock.assert_called_once_with( - pydeako_discoverer_mock.return_value.get_address - ) - pydeako_deako_mock.return_value.connect.assert_called_once() - pydeako_deako_mock.return_value.find_devices.assert_called_once() - pydeako_deako_mock.return_value.get_devices.assert_called() - - assert mock_config_entry.runtime_data == pydeako_deako_mock.return_value - - -async def test_deako_async_setup_entry_device_list_timeout( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - pydeako_deako_mock: MagicMock, - pydeako_discoverer_mock: MagicMock, -) -> None: - """Test async_setup_entry raises ConfigEntryNotReady when pydeako raises DeviceListTimeout.""" - - mock_config_entry.add_to_hass(hass) - - pydeako_deako_mock.return_value.find_devices.side_effect = DeviceListTimeout() - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - pydeako_deako_mock.assert_called_once_with( - pydeako_discoverer_mock.return_value.get_address - ) - pydeako_deako_mock.return_value.connect.assert_called_once() - pydeako_deako_mock.return_value.find_devices.assert_called_once() - pydeako_deako_mock.return_value.disconnect.assert_called_once() - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_deako_async_setup_entry_find_devices_timeout( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - pydeako_deako_mock: MagicMock, - pydeako_discoverer_mock: MagicMock, -) -> None: - """Test async_setup_entry raises ConfigEntryNotReady when pydeako raises FindDevicesTimeout.""" - - mock_config_entry.add_to_hass(hass) - - pydeako_deako_mock.return_value.find_devices.side_effect = FindDevicesTimeout() - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - pydeako_deako_mock.assert_called_once_with( - pydeako_discoverer_mock.return_value.get_address - ) - pydeako_deako_mock.return_value.connect.assert_called_once() - pydeako_deako_mock.return_value.find_devices.assert_called_once() - pydeako_deako_mock.return_value.disconnect.assert_called_once() - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/deako/test_light.py b/tests/components/deako/test_light.py deleted file mode 100644 index b969c7f71cb..00000000000 --- a/tests/components/deako/test_light.py +++ /dev/null @@ -1,192 +0,0 @@ -"""Tests for the light module.""" - -from unittest.mock import MagicMock - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_light_setup_with_device( - hass: HomeAssistant, - pydeako_deako_mock: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test light platform setup with device returned.""" - mock_config_entry.add_to_hass(hass) - - pydeako_deako_mock.return_value.get_devices.return_value = { - "some_device": {}, - } - pydeako_deako_mock.return_value.get_name.return_value = "some device" - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_light_initial_props( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - pydeako_deako_mock: MagicMock, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test on/off light is setup with accurate initial properties.""" - mock_config_entry.add_to_hass(hass) - - pydeako_deako_mock.return_value.get_devices.return_value = { - "uuid": { - "name": "kitchen", - } - } - pydeako_deako_mock.return_value.get_name.return_value = "kitchen" - pydeako_deako_mock.return_value.get_state.return_value = { - "power": False, - } - pydeako_deako_mock.return_value.is_dimmable.return_value = False - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_dimmable_light_props( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - pydeako_deako_mock: MagicMock, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test dimmable on/off light is setup with accurate initial properties.""" - mock_config_entry.add_to_hass(hass) - - pydeako_deako_mock.return_value.get_devices.return_value = { - "uuid": { - "name": "kitchen", - } - } - pydeako_deako_mock.return_value.get_name.return_value = "kitchen" - pydeako_deako_mock.return_value.get_state.return_value = { - "power": True, - "dim": 50, - } - pydeako_deako_mock.return_value.is_dimmable.return_value = True - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_light_power_change_on( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - pydeako_deako_mock: MagicMock, -) -> None: - """Test turing on a deako device.""" - mock_config_entry.add_to_hass(hass) - - pydeako_deako_mock.return_value.get_devices.return_value = { - "uuid": { - "name": "kitchen", - } - } - pydeako_deako_mock.return_value.get_name.return_value = "kitchen" - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.kitchen"}, - blocking=True, - ) - - pydeako_deako_mock.return_value.control_device.assert_called_once_with( - "uuid", True, None - ) - - -async def test_light_power_change_off( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - pydeako_deako_mock: MagicMock, -) -> None: - """Test turing off a deako device.""" - mock_config_entry.add_to_hass(hass) - - pydeako_deako_mock.return_value.get_devices.return_value = { - "uuid": { - "name": "kitchen", - } - } - pydeako_deako_mock.return_value.get_name.return_value = "kitchen" - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.kitchen"}, - blocking=True, - ) - - pydeako_deako_mock.return_value.control_device.assert_called_once_with( - "uuid", False, None - ) - - -@pytest.mark.parametrize( - ("dim_input", "expected_dim_value"), - [ - (3, 1), - (255, 100), - (127, 50), - ], -) -async def test_light_brightness_change( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - pydeako_deako_mock: MagicMock, - dim_input: int, - expected_dim_value: int, -) -> None: - """Test turing on a deako device.""" - mock_config_entry.add_to_hass(hass) - - pydeako_deako_mock.return_value.get_devices.return_value = { - "uuid": { - "name": "kitchen", - } - } - pydeako_deako_mock.return_value.get_name.return_value = "kitchen" - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: "light.kitchen", - ATTR_BRIGHTNESS: dim_input, - }, - blocking=True, - ) - - pydeako_deako_mock.return_value.control_device.assert_called_once_with( - "uuid", True, expected_dim_value - ) diff --git a/tests/components/deconz/conftest.py b/tests/components/deconz/conftest.py index fd3003b96ef..d0f0f11c99b 100644 --- a/tests/components/deconz/conftest.py +++ b/tests/components/deconz/conftest.py @@ -2,304 +2,30 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine, Generator -from types import MappingProxyType -from typing import Any, Protocol from unittest.mock import patch from pydeconz.websocket import Signal import pytest -from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 -from tests.test_util.aiohttp import AiohttpClientMocker - -type ConfigEntryFactoryType = Callable[ - [MockConfigEntry], Coroutine[Any, Any, MockConfigEntry] -] -type WebsocketDataType = Callable[[dict[str, Any]], Coroutine[Any, Any, None]] -type WebsocketStateType = Callable[[str], Coroutine[Any, Any, None]] -class _WebsocketMock(Protocol): - async def __call__( - self, data: dict[str, Any] | None = None, state: str = "" - ) -> None: ... - - -# Config entry fixtures - -API_KEY = "1234567890ABCDEF" -BRIDGE_ID = "01234E56789A" -HOST = "1.2.3.4" -PORT = 80 - - -@pytest.fixture(name="config_entry") -def fixture_config_entry( - config_entry_data: MappingProxyType[str, Any], - config_entry_options: MappingProxyType[str, Any], - config_entry_source: str, -) -> MockConfigEntry: - """Define a config entry fixture.""" - return MockConfigEntry( - domain=DECONZ_DOMAIN, - entry_id="1", - unique_id=BRIDGE_ID, - data=config_entry_data, - options=config_entry_options, - source=config_entry_source, - ) - - -@pytest.fixture(name="config_entry_data") -def fixture_config_entry_data() -> MappingProxyType[str, Any]: - """Define a config entry data fixture.""" - return { - CONF_API_KEY: API_KEY, - CONF_HOST: HOST, - CONF_PORT: PORT, - } - - -@pytest.fixture(name="config_entry_options") -def fixture_config_entry_options() -> MappingProxyType[str, Any]: - """Define a config entry options fixture.""" - return {} - - -@pytest.fixture(name="config_entry_source") -def fixture_config_entry_source() -> str: - """Define a config entry source fixture.""" - return SOURCE_USER - - -# Request mocks - - -@pytest.fixture(name="mock_put_request") -def fixture_put_request( - aioclient_mock: AiohttpClientMocker, config_entry_data: MappingProxyType[str, Any] -) -> Callable[[str, str], AiohttpClientMocker]: - """Mock a deCONZ put request.""" - _host = config_entry_data[CONF_HOST] - _port = config_entry_data[CONF_PORT] - _api_key = config_entry_data[CONF_API_KEY] - - def __mock_requests(path: str, host: str = "") -> AiohttpClientMocker: - url = f"http://{host or _host}:{_port}/api/{_api_key}{path}" - aioclient_mock.put(url, json={}, headers={"content-type": CONTENT_TYPE_JSON}) - return aioclient_mock - - return __mock_requests - - -@pytest.fixture(name="mock_requests") -def fixture_get_request( - aioclient_mock: AiohttpClientMocker, - config_entry_data: MappingProxyType[str, Any], - config_payload: dict[str, Any], - alarm_system_payload: dict[str, Any], - group_payload: dict[str, Any], - light_payload: dict[str, Any], - sensor_payload: dict[str, Any], - deconz_payload: dict[str, Any], -) -> Callable[[str], None]: - """Mock default deCONZ requests responses.""" - _host = config_entry_data[CONF_HOST] - _port = config_entry_data[CONF_PORT] - _api_key = config_entry_data[CONF_API_KEY] - - data = deconz_payload - data.setdefault("alarmsystems", alarm_system_payload) - data.setdefault("config", config_payload) - data.setdefault("groups", group_payload) - if "state" in light_payload: - light_payload = {"0": light_payload} - data.setdefault("lights", light_payload) - if "state" in sensor_payload or "config" in sensor_payload: - sensor_payload = {"0": sensor_payload} - data.setdefault("sensors", sensor_payload) - - def __mock_requests(host: str = "") -> None: - url = f"http://{host or _host}:{_port}/api/{_api_key}" - aioclient_mock.get( - url, - json=deconz_payload | {"config": config_payload}, - headers={ - "content-type": CONTENT_TYPE_JSON, - }, - ) - - return __mock_requests - - -# Request payload fixtures - - -@pytest.fixture(name="deconz_payload") -def fixture_data() -> dict[str, Any]: - """Combine multiple payloads with one fixture.""" - return {} - - -@pytest.fixture(name="alarm_system_payload") -def fixture_alarm_system_data() -> dict[str, Any]: - """Alarm system data.""" - return {} - - -@pytest.fixture(name="config_payload") -def fixture_config_data() -> dict[str, Any]: - """Config data.""" - return { - "bridgeid": BRIDGE_ID, - "ipaddress": HOST, - "mac": "00:11:22:33:44:55", - "modelid": "deCONZ", - "name": "deCONZ mock gateway", - "sw_version": "2.05.69", - "uuid": "1234", - "websocketport": 1234, - } - - -@pytest.fixture(name="group_payload") -def fixture_group_data() -> dict[str, Any]: - """Group data.""" - return {} - - -@pytest.fixture(name="light_payload") -def fixture_light_data() -> dict[str, Any]: - """Light data. - - Should be - - one light data payload {"state": ...} - - multiple lights {"1": ..., "2": ...} - """ - return {} - - -@pytest.fixture(name="sensor_payload") -def fixture_sensor_data() -> dict[str, Any]: - """Sensor data. - - Should be - - one sensor data payload {"config": ..., "state": ...} ("0") - - multiple sensors {"1": ..., "2": ...} - """ - return {} - - -@pytest.fixture(name="config_entry_factory") -async def fixture_config_entry_factory( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_requests: Callable[[str], None], -) -> ConfigEntryFactoryType: - """Fixture factory that can set up UniFi network integration.""" - - async def __mock_setup_config_entry( - entry: MockConfigEntry = config_entry, - ) -> MockConfigEntry: - entry.add_to_hass(hass) - mock_requests(entry.data[CONF_HOST]) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - return entry - - return __mock_setup_config_entry - - -@pytest.fixture(name="config_entry_setup") -async def fixture_config_entry_setup( - config_entry_factory: ConfigEntryFactoryType, -) -> MockConfigEntry: - """Fixture providing a set up instance of deCONZ integration.""" - return await config_entry_factory() - - -# Websocket fixtures - - -@pytest.fixture(autouse=True, name="_mock_websocket") -def fixture_websocket() -> Generator[_WebsocketMock]: +@pytest.fixture(autouse=True) +def mock_deconz_websocket(): """No real websocket allowed.""" with patch("pydeconz.gateway.WSClient") as mock: - async def make_websocket_call( - data: dict[str, Any] | None = None, state: str = "" - ) -> None: + async def make_websocket_call(data: dict | None = None, state: str = ""): """Generate a websocket call.""" pydeconz_gateway_session_handler = mock.call_args[0][3] - signal: Signal if data: mock.return_value.data = data - signal = Signal.DATA + await pydeconz_gateway_session_handler(signal=Signal.DATA) elif state: mock.return_value.state = state - signal = Signal.CONNECTION_STATE - await pydeconz_gateway_session_handler(signal) + await pydeconz_gateway_session_handler(signal=Signal.CONNECTION_STATE) + else: + raise NotImplementedError yield make_websocket_call - - -@pytest.fixture(name="mock_websocket_data") -def fixture_websocket_data(_mock_websocket: _WebsocketMock) -> WebsocketDataType: - """Fixture to send websocket data.""" - - async def change_websocket_data(data: dict[str, Any]) -> None: - """Provide new data on the websocket.""" - if "t" not in data: - data["t"] = "event" - if "e" not in data: - data["e"] = "changed" - if "id" not in data: - data["id"] = "0" - await _mock_websocket(data=data) - - return change_websocket_data - - -@pytest.fixture(name="light_ws_data") -def fixture_light_websocket_data( - mock_websocket_data: WebsocketDataType, -) -> WebsocketDataType: - """Fixture to send light data over websocket.""" - - async def send_light_data(data: dict[str, Any]) -> None: - """Send light data on the websocket.""" - await mock_websocket_data({"r": "lights"} | data) - - return send_light_data - - -@pytest.fixture(name="sensor_ws_data") -def fixture_sensor_websocket_data( - mock_websocket_data: WebsocketDataType, -) -> WebsocketDataType: - """Fixture to send sensor data over websocket.""" - - async def send_sensor_data(data: dict[str, Any]) -> None: - """Send sensor data on the websocket.""" - await mock_websocket_data({"r": "sensors"} | data) - - return send_sensor_data - - -@pytest.fixture(name="mock_websocket_state") -def fixture_websocket_state(_mock_websocket: _WebsocketMock) -> WebsocketStateType: - """Fixture to set websocket state.""" - - async def change_websocket_state(state: str) -> None: - """Simulate a change to the websocket connection state.""" - await _mock_websocket(state=state) - - return change_websocket_state diff --git a/tests/components/deconz/snapshots/test_alarm_control_panel.ambr b/tests/components/deconz/snapshots/test_alarm_control_panel.ambr deleted file mode 100644 index 86b97a62dfe..00000000000 --- a/tests/components/deconz/snapshots/test_alarm_control_panel.ambr +++ /dev/null @@ -1,51 +0,0 @@ -# serializer version: 1 -# name: test_alarm_control_panel[sensor_payload0-alarm_system_payload0][alarm_control_panel.keypad-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'alarm_control_panel', - 'entity_category': None, - 'entity_id': 'alarm_control_panel.keypad', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Keypad', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_alarm_control_panel[sensor_payload0-alarm_system_payload0][alarm_control_panel.keypad-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'changed_by': None, - 'code_arm_required': True, - 'code_format': , - 'friendly_name': 'Keypad', - 'supported_features': , - }), - 'context': , - 'entity_id': 'alarm_control_panel.keypad', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/deconz/snapshots/test_binary_sensor.ambr b/tests/components/deconz/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 584575c23af..00000000000 --- a/tests/components/deconz/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,1014 +0,0 @@ -# serializer version: 1 -# name: test_binary_sensors[sensor_payload0-expected0-config_entry_options0][binary_sensor.alarm_10-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.alarm_10', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Alarm 10', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-alarm', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload0-expected0-config_entry_options0][binary_sensor.alarm_10-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'safety', - 'friendly_name': 'Alarm 10', - 'on': True, - 'temperature': 26.0, - }), - 'context': , - 'entity_id': 'binary_sensor.alarm_10', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.cave_co', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cave CO', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-carbon_monoxide', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'carbon_monoxide', - 'friendly_name': 'Cave CO', - 'on': True, - }), - 'context': , - 'entity_id': 'binary_sensor.cave_co', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_low_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.cave_co_low_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cave CO Low Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-low_battery', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_low_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Cave CO Low Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.cave_co_low_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_tampered-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.cave_co_tampered', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cave CO Tampered', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-tampered', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_tampered-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'tamper', - 'friendly_name': 'Cave CO Tampered', - }), - 'context': , - 'entity_id': 'binary_sensor.cave_co_tampered', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.presence_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Presence sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-presence', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'dark': False, - 'device_class': 'motion', - 'friendly_name': 'Presence sensor', - 'on': True, - 'temperature': 0.1, - }), - 'context': , - 'entity_id': 'binary_sensor.presence_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_low_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.presence_sensor_low_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Presence sensor Low Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-low_battery', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_low_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Presence sensor Low Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.presence_sensor_low_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_tampered-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.presence_sensor_tampered', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Presence sensor Tampered', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-tampered', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_tampered-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'tamper', - 'friendly_name': 'Presence sensor Tampered', - }), - 'context': , - 'entity_id': 'binary_sensor.presence_sensor_tampered', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.sensor_kitchen_smoke', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'sensor_kitchen_smoke', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-fire', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'smoke', - 'friendly_name': 'sensor_kitchen_smoke', - 'on': True, - }), - 'context': , - 'entity_id': 'binary_sensor.sensor_kitchen_smoke', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'sensor_kitchen_smoke Test Mode', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-in_test_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'smoke', - 'friendly_name': 'sensor_kitchen_smoke Test Mode', - }), - 'context': , - 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.sensor_kitchen_smoke', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'sensor_kitchen_smoke', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-fire', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'smoke', - 'friendly_name': 'sensor_kitchen_smoke', - 'on': True, - }), - 'context': , - 'entity_id': 'binary_sensor.sensor_kitchen_smoke', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'sensor_kitchen_smoke Test Mode', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-in_test_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'smoke', - 'friendly_name': 'sensor_kitchen_smoke Test Mode', - }), - 'context': , - 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload4-expected4-config_entry_options0][binary_sensor.kitchen_switch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.kitchen_switch', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Kitchen Switch', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'kitchen-switch-flag', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload4-expected4-config_entry_options0][binary_sensor.kitchen_switch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Kitchen Switch', - 'on': True, - }), - 'context': , - 'entity_id': 'binary_sensor.kitchen_switch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[sensor_payload5-expected5-config_entry_options0][binary_sensor.back_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.back_door', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Back Door', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:2b:96:b4-01-0006-open', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload5-expected5-config_entry_options0][binary_sensor.back_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'opening', - 'friendly_name': 'Back Door', - 'on': True, - 'temperature': 33.0, - }), - 'context': , - 'entity_id': 'binary_sensor.back_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload6-expected6-config_entry_options0][binary_sensor.motion_sensor_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.motion_sensor_4', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion sensor 4', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:17:88:01:03:28:8c:9b-02-0406-presence', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload6-expected6-config_entry_options0][binary_sensor.motion_sensor_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'dark': False, - 'device_class': 'motion', - 'friendly_name': 'Motion sensor 4', - 'on': True, - }), - 'context': , - 'entity_id': 'binary_sensor.motion_sensor_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.water2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'water2', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:2f:07:db-01-0500-water', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'moisture', - 'friendly_name': 'water2', - 'on': True, - 'temperature': 25.0, - }), - 'context': , - 'entity_id': 'binary_sensor.water2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_low_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.water2_low_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'water2 Low Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:2f:07:db-01-0500-low_battery', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_low_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'water2 Low Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.water2_low_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_tampered-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.water2_tampered', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'water2 Tampered', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:2f:07:db-01-0500-tampered', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_tampered-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'tamper', - 'friendly_name': 'water2 Tampered', - }), - 'context': , - 'entity_id': 'binary_sensor.water2_tampered', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload8-expected8-config_entry_options0][binary_sensor.vibration_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.vibration_1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Vibration 1', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-vibration', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload8-expected8-config_entry_options0][binary_sensor.vibration_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'vibration', - 'friendly_name': 'Vibration 1', - 'on': True, - 'orientation': list([ - 10, - 1059, - 0, - ]), - 'temperature': 32.0, - 'tiltangle': 83, - 'vibrationstrength': 114, - }), - 'context': , - 'entity_id': 'binary_sensor.vibration_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.presence_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Presence sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-presence', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'dark': False, - 'device_class': 'motion', - 'friendly_name': 'Presence sensor', - 'on': True, - 'temperature': 0.1, - }), - 'context': , - 'entity_id': 'binary_sensor.presence_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_low_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.presence_sensor_low_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Presence sensor Low Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-low_battery', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_low_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Presence sensor Low Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.presence_sensor_low_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_tampered-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.presence_sensor_tampered', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Presence sensor Tampered', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-tampered', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_tampered-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'tamper', - 'friendly_name': 'Presence sensor Tampered', - }), - 'context': , - 'entity_id': 'binary_sensor.presence_sensor_tampered', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/deconz/snapshots/test_button.ambr b/tests/components/deconz/snapshots/test_button.ambr deleted file mode 100644 index 1ef5248ebc3..00000000000 --- a/tests/components/deconz/snapshots/test_button.ambr +++ /dev/null @@ -1,95 +0,0 @@ -# serializer version: 1 -# name: test_button[deconz_payload0-expected0][button.light_group_scene_store_current_scene-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.light_group_scene_store_current_scene', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:inbox-arrow-down', - 'original_name': 'Scene Store Current Scene', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01234E56789A/groups/1/scenes/1-store', - 'unit_of_measurement': None, - }) -# --- -# name: test_button[deconz_payload0-expected0][button.light_group_scene_store_current_scene-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Light group Scene Store Current Scene', - 'icon': 'mdi:inbox-arrow-down', - }), - 'context': , - 'entity_id': 'button.light_group_scene_store_current_scene', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button[deconz_payload1-expected1][button.aqara_fp1_reset_presence-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.aqara_fp1_reset_presence', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Aqara FP1 Reset Presence', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-reset_presence', - 'unit_of_measurement': None, - }) -# --- -# name: test_button[deconz_payload1-expected1][button.aqara_fp1_reset_presence-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'Aqara FP1 Reset Presence', - }), - 'context': , - 'entity_id': 'button.aqara_fp1_reset_presence', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/deconz/snapshots/test_climate.ambr b/tests/components/deconz/snapshots/test_climate.ambr deleted file mode 100644 index 4e33e11534e..00000000000 --- a/tests/components/deconz/snapshots/test_climate.ambr +++ /dev/null @@ -1,545 +0,0 @@ -# serializer version: 1 -# name: test_climate_device_with_cooling_support[sensor_payload0][climate.zen_01-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'fan_modes': list([ - 'smart', - 'auto', - 'high', - 'medium', - 'low', - 'on', - 'off', - ]), - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.zen_01', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Zen-01', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:24:46:00:00:11:6f:56-01-0201', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_device_with_cooling_support[sensor_payload0][climate.zen_01-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 23.2, - 'fan_mode': 'off', - 'fan_modes': list([ - 'smart', - 'auto', - 'high', - 'medium', - 'low', - 'on', - 'off', - ]), - 'friendly_name': 'Zen-01', - 'hvac_action': , - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - 'offset': 0, - 'supported_features': , - 'temperature': 22.2, - }), - 'context': , - 'entity_id': 'climate.zen_01', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_climate_device_with_fan_support[sensor_payload0][climate.zen_01-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'fan_modes': list([ - 'smart', - 'auto', - 'high', - 'medium', - 'low', - 'on', - 'off', - ]), - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.zen_01', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Zen-01', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:24:46:00:00:11:6f:56-01-0201', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_device_with_fan_support[sensor_payload0][climate.zen_01-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 23.2, - 'fan_mode': 'auto', - 'fan_modes': list([ - 'smart', - 'auto', - 'high', - 'medium', - 'low', - 'on', - 'off', - ]), - 'friendly_name': 'Zen-01', - 'hvac_action': , - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - 'offset': 0, - 'supported_features': , - 'temperature': 22.2, - }), - 'context': , - 'entity_id': 'climate.zen_01', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_climate_device_with_preset[sensor_payload0][climate.zen_01-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'fan_modes': list([ - 'smart', - 'auto', - 'high', - 'medium', - 'low', - 'on', - 'off', - ]), - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - 'preset_modes': list([ - 'auto', - 'boost', - 'comfort', - 'complex', - 'eco', - 'holiday', - 'manual', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.zen_01', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Zen-01', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:24:46:00:00:11:6f:56-01-0201', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_device_with_preset[sensor_payload0][climate.zen_01-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 23.2, - 'fan_mode': 'off', - 'fan_modes': list([ - 'smart', - 'auto', - 'high', - 'medium', - 'low', - 'on', - 'off', - ]), - 'friendly_name': 'Zen-01', - 'hvac_action': , - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - 'offset': 0, - 'preset_mode': 'auto', - 'preset_modes': list([ - 'auto', - 'boost', - 'comfort', - 'complex', - 'eco', - 'holiday', - 'manual', - ]), - 'supported_features': , - 'temperature': 22.2, - }), - 'context': , - 'entity_id': 'climate.zen_01', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_climate_device_without_cooling_support[sensor_payload0][climate.thermostat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.thermostat', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Thermostat', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_device_without_cooling_support[sensor_payload0][climate.thermostat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 22.6, - 'friendly_name': 'Thermostat', - 'hvac_action': , - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - 'offset': 10, - 'supported_features': , - 'temperature': 22.0, - 'valve': 30, - }), - 'context': , - 'entity_id': 'climate.thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'auto', - }) -# --- -# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.clip_thermostat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.clip_thermostat', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CLIP thermostat', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:02-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.clip_thermostat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 22.6, - 'friendly_name': 'CLIP thermostat', - 'hvac_action': , - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - 'supported_features': , - 'temperature': None, - 'valve': 30, - }), - 'context': , - 'entity_id': 'climate.clip_thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.thermostat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.thermostat', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Thermostat', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.thermostat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 22.6, - 'friendly_name': 'Thermostat', - 'hvac_action': , - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - 'offset': 10, - 'supported_features': , - 'temperature': 22.0, - 'valve': 30, - }), - 'context': , - 'entity_id': 'climate.thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'auto', - }) -# --- -# name: test_simple_climate_device[sensor_payload0][climate.thermostat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.thermostat', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'thermostat', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '14:b4:57:ff:fe:d5:4e:77-01-0201', - 'unit_of_measurement': None, - }) -# --- -# name: test_simple_climate_device[sensor_payload0][climate.thermostat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.0, - 'friendly_name': 'thermostat', - 'hvac_action': , - 'hvac_modes': list([ - , - , - ]), - 'locked': True, - 'max_temp': 35, - 'min_temp': 7, - 'offset': 0, - 'supported_features': , - 'temperature': 21.0, - 'valve': 24, - }), - 'context': , - 'entity_id': 'climate.thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- diff --git a/tests/components/deconz/snapshots/test_cover.ambr b/tests/components/deconz/snapshots/test_cover.ambr deleted file mode 100644 index 5c50923453c..00000000000 --- a/tests/components/deconz/snapshots/test_cover.ambr +++ /dev/null @@ -1,150 +0,0 @@ -# serializer version: 1 -# name: test_cover[light_payload0][cover.window_covering_device-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.window_covering_device', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Window covering device', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[light_payload0][cover.window_covering_device-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_position': 0, - 'device_class': 'shade', - 'friendly_name': 'Window covering device', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.window_covering_device', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_level_controllable_output_cover[light_payload0][cover.vent-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.vent', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Vent', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:22:a3:00:00:00:00:00-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_level_controllable_output_cover[light_payload0][cover.vent-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_position': 5, - 'current_tilt_position': 97, - 'device_class': 'damper', - 'friendly_name': 'Vent', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.vent', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_tilt_cover[light_payload0][cover.covering_device-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.covering_device', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Covering device', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:24:46:00:00:12:34:56-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_tilt_cover[light_payload0][cover.covering_device-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_position': 100, - 'current_tilt_position': 100, - 'device_class': 'shade', - 'friendly_name': 'Covering device', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.covering_device', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- diff --git a/tests/components/deconz/snapshots/test_diagnostics.ambr b/tests/components/deconz/snapshots/test_diagnostics.ambr index 1ca674a4fbe..911f2e134f2 100644 --- a/tests/components/deconz/snapshots/test_diagnostics.ambr +++ b/tests/components/deconz/snapshots/test_diagnostics.ambr @@ -10,8 +10,6 @@ 'port': 80, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'deconz', 'entry_id': '1', 'minor_version': 1, diff --git a/tests/components/deconz/snapshots/test_fan.ambr b/tests/components/deconz/snapshots/test_fan.ambr deleted file mode 100644 index 8b7dbba64e4..00000000000 --- a/tests/components/deconz/snapshots/test_fan.ambr +++ /dev/null @@ -1,54 +0,0 @@ -# serializer version: 1 -# name: test_fans[light_payload0][fan.ceiling_fan-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'preset_modes': None, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'fan', - 'entity_category': None, - 'entity_id': 'fan.ceiling_fan', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Ceiling fan', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:22:a3:00:00:27:8b:81-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_fans[light_payload0][fan.ceiling_fan-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Ceiling fan', - 'percentage': 100, - 'percentage_step': 1.0, - 'preset_mode': None, - 'preset_modes': None, - 'supported_features': , - }), - 'context': , - 'entity_id': 'fan.ceiling_fan', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/deconz/snapshots/test_hub.ambr b/tests/components/deconz/snapshots/test_hub.ambr deleted file mode 100644 index f3aa9a5e65d..00000000000 --- a/tests/components/deconz/snapshots/test_hub.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_device_registry_entry - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://1.2.3.4:80', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'deconz', - '01234E56789A', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Dresden Elektronik', - 'model': 'deCONZ', - 'model_id': None, - 'name': 'deCONZ mock gateway', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/deconz/snapshots/test_light.ambr b/tests/components/deconz/snapshots/test_light.ambr deleted file mode 100644 index a3ec7caac60..00000000000 --- a/tests/components/deconz/snapshots/test_light.ambr +++ /dev/null @@ -1,1496 +0,0 @@ -# serializer version: 1 -# name: test_groups[input0-light_payload0][light.dimmable_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.dimmable_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimmable light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:02-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-light_payload0][light.dimmable_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Dimmable light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.dimmable_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input0-light_payload0][light.group-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.group', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01234E56789A-/groups/0', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-light_payload0][light.group-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_on': False, - 'brightness': 255, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Group', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': True, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.group', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input0-light_payload0][light.rgb_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.rgb_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'RGB light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-light_payload0][light.rgb_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 50, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'RGB light', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.rgb_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input0-light_payload0][light.tunable_white_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.tunable_white_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tunable white light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-light_payload0][light.tunable_white_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'friendly_name': 'Tunable white light', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.tunable_white_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input1-light_payload0][light.dimmable_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.dimmable_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimmable light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:02-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-light_payload0][light.dimmable_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Dimmable light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.dimmable_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input1-light_payload0][light.group-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.group', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01234E56789A-/groups/0', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-light_payload0][light.group-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_on': False, - 'brightness': 50, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Group', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': True, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.group', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input1-light_payload0][light.rgb_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.rgb_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'RGB light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-light_payload0][light.rgb_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 50, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'RGB light', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.rgb_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input1-light_payload0][light.tunable_white_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.tunable_white_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tunable white light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-light_payload0][light.tunable_white_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'friendly_name': 'Tunable white light', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.tunable_white_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input2-light_payload0][light.dimmable_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.dimmable_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimmable light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:02-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-light_payload0][light.dimmable_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Dimmable light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.dimmable_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input2-light_payload0][light.group-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.group', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01234E56789A-/groups/0', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-light_payload0][light.group-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_on': False, - 'brightness': 50, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Group', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': True, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.group', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input2-light_payload0][light.rgb_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.rgb_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'RGB light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-light_payload0][light.rgb_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 50, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'RGB light', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.rgb_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input2-light_payload0][light.tunable_white_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.tunable_white_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tunable white light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-light_payload0][light.tunable_white_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'friendly_name': 'Tunable white light', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.tunable_white_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[light_payload0][light.hue_go-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_go', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue Go', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload0][light.hue_go-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'color_temp': 375, - 'color_temp_kelvin': 2666, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Hue Go', - 'hs_color': tuple( - 28.47, - 66.821, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 165, - 84, - ), - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.53, - 0.388, - ), - }), - 'context': , - 'entity_id': 'light.hue_go', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[light_payload1][light.hue_ensis-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 7142, - 'max_mireds': 650, - 'min_color_temp_kelvin': 1538, - 'min_mireds': 140, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_ensis', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue Ensis', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload1][light.hue_ensis-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Hue Ensis', - 'hs_color': tuple( - 29.691, - 38.039, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 7142, - 'max_mireds': 650, - 'min_color_temp_kelvin': 1538, - 'min_mireds': 140, - 'rgb_color': tuple( - 255, - 206, - 158, - ), - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.427, - 0.373, - ), - }), - 'context': , - 'entity_id': 'light.hue_ensis', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[light_payload2][light.lidl_xmas_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'carnival', - 'collide', - 'fading', - 'fireworks', - 'flag', - 'glow', - 'rainbow', - 'snake', - 'snow', - 'sparkles', - 'steady', - 'strobe', - 'twinkle', - 'updown', - 'vintage', - 'waves', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.lidl_xmas_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'LIDL xmas light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '58:8e:81:ff:fe:db:7b:be-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload2][light.lidl_xmas_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 25, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'carnival', - 'collide', - 'fading', - 'fireworks', - 'flag', - 'glow', - 'rainbow', - 'snake', - 'snow', - 'sparkles', - 'steady', - 'strobe', - 'twinkle', - 'updown', - 'vintage', - 'waves', - ]), - 'friendly_name': 'LIDL xmas light', - 'hs_color': tuple( - 294.938, - 55.294, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 243, - 113, - 255, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.357, - 0.188, - ), - }), - 'context': , - 'entity_id': 'light.lidl_xmas_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[light_payload3][light.hue_white_ambiance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_white_ambiance', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue White Ambiance', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-02', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload3][light.hue_white_ambiance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'color_temp': 396, - 'color_temp_kelvin': 2525, - 'friendly_name': 'Hue White Ambiance', - 'hs_color': tuple( - 28.809, - 71.624, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 160, - 72, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.544, - 0.389, - ), - }), - 'context': , - 'entity_id': 'light.hue_white_ambiance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[light_payload4][light.hue_filament-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_filament', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue Filament', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-03', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload4][light.hue_filament-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'friendly_name': 'Hue Filament', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.hue_filament', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[light_payload5][light.simple_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.simple_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Simple Light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:01:23:45:67-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload5][light.simple_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'color_mode': , - 'friendly_name': 'Simple Light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.simple_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[light_payload6][light.gradient_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - , - , - , - , - , - , - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.gradient_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Gradient light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:0b:0c:0d:0e-0f', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload6][light.gradient_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 184, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'effect': None, - 'effect_list': list([ - , - , - , - , - , - , - ]), - 'friendly_name': 'Gradient light', - 'hs_color': tuple( - 98.095, - 74.118, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 135, - 255, - 66, - ), - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.2727, - 0.6226, - ), - }), - 'context': , - 'entity_id': 'light.gradient_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/deconz/snapshots/test_number.ambr b/tests/components/deconz/snapshots/test_number.ambr deleted file mode 100644 index 26e044e1d31..00000000000 --- a/tests/components/deconz/snapshots/test_number.ambr +++ /dev/null @@ -1,111 +0,0 @@ -# serializer version: 1 -# name: test_number_entities[sensor_payload0-expected0][number.presence_sensor_delay-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65535, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.presence_sensor_delay', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Presence sensor Delay', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-delay', - 'unit_of_measurement': None, - }) -# --- -# name: test_number_entities[sensor_payload0-expected0][number.presence_sensor_delay-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Presence sensor Delay', - 'max': 65535, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.presence_sensor_delay', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_number_entities[sensor_payload1-expected1][number.presence_sensor_duration-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65535, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.presence_sensor_duration', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Presence sensor Duration', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-duration', - 'unit_of_measurement': None, - }) -# --- -# name: test_number_entities[sensor_payload1-expected1][number.presence_sensor_duration-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Presence sensor Duration', - 'max': 65535, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.presence_sensor_duration', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- diff --git a/tests/components/deconz/snapshots/test_scene.ambr b/tests/components/deconz/snapshots/test_scene.ambr deleted file mode 100644 index 85a5ab92c5c..00000000000 --- a/tests/components/deconz/snapshots/test_scene.ambr +++ /dev/null @@ -1,47 +0,0 @@ -# serializer version: 1 -# name: test_scenes[group_payload0-expected0][scene.light_group_scene-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'scene', - 'entity_category': None, - 'entity_id': 'scene.light_group_scene', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Scene', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01234E56789A/groups/1/scenes/1', - 'unit_of_measurement': None, - }) -# --- -# name: test_scenes[group_payload0-expected0][scene.light_group_scene-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Light group Scene', - }), - 'context': , - 'entity_id': 'scene.light_group_scene', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/deconz/snapshots/test_select.ambr b/tests/components/deconz/snapshots/test_select.ambr deleted file mode 100644 index 997eab0901f..00000000000 --- a/tests/components/deconz/snapshots/test_select.ambr +++ /dev/null @@ -1,573 +0,0 @@ -# serializer version: 1 -# name: test_select[sensor_payload0-expected0][select.aqara_fp1_device_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'leftright', - 'undirected', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.aqara_fp1_device_mode', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aqara FP1 Device Mode', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[sensor_payload0-expected0][select.aqara_fp1_device_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Aqara FP1 Device Mode', - 'options': list([ - 'leftright', - 'undirected', - ]), - }), - 'context': , - 'entity_id': 'select.aqara_fp1_device_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'undirected', - }) -# --- -# name: test_select[sensor_payload0-expected0][select.aqara_fp1_sensitivity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'High', - 'Medium', - 'Low', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.aqara_fp1_sensitivity', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aqara FP1 Sensitivity', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[sensor_payload0-expected0][select.aqara_fp1_sensitivity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Aqara FP1 Sensitivity', - 'options': list([ - 'High', - 'Medium', - 'Low', - ]), - }), - 'context': , - 'entity_id': 'select.aqara_fp1_sensitivity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'High', - }) -# --- -# name: test_select[sensor_payload0-expected0][select.aqara_fp1_trigger_distance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'far', - 'medium', - 'near', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.aqara_fp1_trigger_distance', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aqara FP1 Trigger Distance', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[sensor_payload0-expected0][select.aqara_fp1_trigger_distance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Aqara FP1 Trigger Distance', - 'options': list([ - 'far', - 'medium', - 'near', - ]), - }), - 'context': , - 'entity_id': 'select.aqara_fp1_trigger_distance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'medium', - }) -# --- -# name: test_select[sensor_payload1-expected1][select.aqara_fp1_device_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'leftright', - 'undirected', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.aqara_fp1_device_mode', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aqara FP1 Device Mode', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[sensor_payload1-expected1][select.aqara_fp1_device_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Aqara FP1 Device Mode', - 'options': list([ - 'leftright', - 'undirected', - ]), - }), - 'context': , - 'entity_id': 'select.aqara_fp1_device_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'undirected', - }) -# --- -# name: test_select[sensor_payload1-expected1][select.aqara_fp1_sensitivity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'High', - 'Medium', - 'Low', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.aqara_fp1_sensitivity', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aqara FP1 Sensitivity', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[sensor_payload1-expected1][select.aqara_fp1_sensitivity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Aqara FP1 Sensitivity', - 'options': list([ - 'High', - 'Medium', - 'Low', - ]), - }), - 'context': , - 'entity_id': 'select.aqara_fp1_sensitivity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'High', - }) -# --- -# name: test_select[sensor_payload1-expected1][select.aqara_fp1_trigger_distance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'far', - 'medium', - 'near', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.aqara_fp1_trigger_distance', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aqara FP1 Trigger Distance', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[sensor_payload1-expected1][select.aqara_fp1_trigger_distance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Aqara FP1 Trigger Distance', - 'options': list([ - 'far', - 'medium', - 'near', - ]), - }), - 'context': , - 'entity_id': 'select.aqara_fp1_trigger_distance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'medium', - }) -# --- -# name: test_select[sensor_payload2-expected2][select.aqara_fp1_device_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'leftright', - 'undirected', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.aqara_fp1_device_mode', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aqara FP1 Device Mode', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[sensor_payload2-expected2][select.aqara_fp1_device_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Aqara FP1 Device Mode', - 'options': list([ - 'leftright', - 'undirected', - ]), - }), - 'context': , - 'entity_id': 'select.aqara_fp1_device_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'undirected', - }) -# --- -# name: test_select[sensor_payload2-expected2][select.aqara_fp1_sensitivity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'High', - 'Medium', - 'Low', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.aqara_fp1_sensitivity', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aqara FP1 Sensitivity', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[sensor_payload2-expected2][select.aqara_fp1_sensitivity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Aqara FP1 Sensitivity', - 'options': list([ - 'High', - 'Medium', - 'Low', - ]), - }), - 'context': , - 'entity_id': 'select.aqara_fp1_sensitivity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'High', - }) -# --- -# name: test_select[sensor_payload2-expected2][select.aqara_fp1_trigger_distance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'far', - 'medium', - 'near', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.aqara_fp1_trigger_distance', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aqara FP1 Trigger Distance', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[sensor_payload2-expected2][select.aqara_fp1_trigger_distance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Aqara FP1 Trigger Distance', - 'options': list([ - 'far', - 'medium', - 'near', - ]), - }), - 'context': , - 'entity_id': 'select.aqara_fp1_trigger_distance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'medium', - }) -# --- -# name: test_select[sensor_payload3-expected3][select.ikea_starkvind_fan_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'auto', - 'speed_1', - 'speed_2', - 'speed_3', - 'speed_4', - 'speed_5', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.ikea_starkvind_fan_mode', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IKEA Starkvind Fan Mode', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '0c:43:14:ff:fe:6c:20:12-01-fc7d-fan_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[sensor_payload3-expected3][select.ikea_starkvind_fan_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'IKEA Starkvind Fan Mode', - 'options': list([ - 'off', - 'auto', - 'speed_1', - 'speed_2', - 'speed_3', - 'speed_4', - 'speed_5', - ]), - }), - 'context': , - 'entity_id': 'select.ikea_starkvind_fan_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'speed_1', - }) -# --- diff --git a/tests/components/deconz/snapshots/test_sensor.ambr b/tests/components/deconz/snapshots/test_sensor.ambr deleted file mode 100644 index 0b76366b5d1..00000000000 --- a/tests/components/deconz/snapshots/test_sensor.ambr +++ /dev/null @@ -1,2255 +0,0 @@ -# serializer version: 1 -# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_flur-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.clip_flur', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CLIP Flur', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '/sensors/3-status', - 'unit_of_measurement': None, - }) -# --- -# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_flur-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'CLIP Flur', - 'on': True, - }), - 'context': , - 'entity_id': 'sensor.clip_flur', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_light_level_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.clip_light_level_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CLIP light level sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00-light_level', - 'unit_of_measurement': 'lx', - }) -# --- -# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_light_level_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'illuminance', - 'friendly_name': 'CLIP light level sensor', - 'state_class': , - 'unit_of_measurement': 'lx', - }), - 'context': , - 'entity_id': 'sensor.clip_light_level_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '999.8', - }) -# --- -# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.light_level_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Light level sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-light_level', - 'unit_of_measurement': 'lx', - }) -# --- -# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'dark': False, - 'device_class': 'illuminance', - 'friendly_name': 'Light level sensor', - 'on': True, - 'state_class': , - 'temperature': 0.1, - 'unit_of_measurement': 'lx', - }), - 'context': , - 'entity_id': 'sensor.light_level_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '999.8', - }) -# --- -# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.light_level_sensor_temperature', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Light level sensor Temperature', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-internal_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Light level sensor Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.light_level_sensor_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.1', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.bosch_air_quality_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'BOSCH Air quality sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'BOSCH Air quality sensor', - }), - 'context': , - 'entity_id': 'sensor.bosch_air_quality_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'poor', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor_ppb-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'BOSCH Air quality sensor PPB', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality_ppb', - 'unit_of_measurement': 'ppb', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor_ppb-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'BOSCH Air quality sensor PPB', - 'state_class': , - 'unit_of_measurement': 'ppb', - }), - 'context': , - 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '809', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.bosch_air_quality_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'BOSCH Air quality sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'BOSCH Air quality sensor', - }), - 'context': , - 'entity_id': 'sensor.bosch_air_quality_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'poor', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor_ppb-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'BOSCH Air quality sensor PPB', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality_ppb', - 'unit_of_measurement': 'ppb', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor_ppb-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'BOSCH Air quality sensor PPB', - 'state_class': , - 'unit_of_measurement': 'ppb', - }), - 'context': , - 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '809', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload10-expected10][sensor.fsm_state_motion_stair-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.fsm_state_motion_stair', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'FSM_STATE Motion stair', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'fsm-state-1520195376277-status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload10-expected10][sensor.fsm_state_motion_stair-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'FSM_STATE Motion stair', - 'on': True, - }), - 'context': , - 'entity_id': 'sensor.fsm_state_motion_stair', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mi_temperature_1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Mi temperature 1', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:45:dc:53-01-0405-humidity', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'humidity', - 'friendly_name': 'Mi temperature 1', - 'on': True, - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.mi_temperature_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '35.55', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mi_temperature_1_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Mi temperature 1 Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:45:dc:53-01-0405-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Mi temperature 1 Battery', - 'on': True, - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.mi_temperature_1_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.soil_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Soil Sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'a4:c1:38:fe:86:8f:07:a3-01-0408-moisture', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'moisture', - 'friendly_name': 'Soil Sensor', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.soil_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '72.13', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.soil_sensor_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Soil Sensor Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'a4:c1:38:fe:86:8f:07:a3-01-0408-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Soil Sensor Battery', - 'on': True, - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.soil_sensor_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.motion_sensor_4', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion sensor 4', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:17:88:01:03:28:8c:9b-02-0400-light_level', - 'unit_of_measurement': 'lx', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'dark': True, - 'daylight': False, - 'device_class': 'illuminance', - 'friendly_name': 'Motion sensor 4', - 'on': True, - 'state_class': , - 'unit_of_measurement': 'lx', - }), - 'context': , - 'entity_id': 'sensor.motion_sensor_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5.0', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.motion_sensor_4_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion sensor 4 Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:17:88:01:03:28:8c:9b-02-0400-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'dark': True, - 'daylight': False, - 'device_class': 'battery', - 'friendly_name': 'Motion sensor 4 Battery', - 'on': True, - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.motion_sensor_4_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload14-expected14][sensor.starkvind_airpurifier_pm25-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.starkvind_airpurifier_pm25', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'STARKVIND AirPurifier PM25', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-042a-particulate_matter_pm2_5', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload14-expected14][sensor.starkvind_airpurifier_pm25-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm25', - 'friendly_name': 'STARKVIND AirPurifier PM25', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.starkvind_airpurifier_pm25', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload15-expected15][sensor.power_16-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.power_16', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power 16', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:0d:6f:00:0b:7a:64:29-01-0b04-power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload15-expected15][sensor.power_16-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current': 34, - 'device_class': 'power', - 'friendly_name': 'Power 16', - 'on': True, - 'state_class': , - 'unit_of_measurement': , - 'voltage': 231, - }), - 'context': , - 'entity_id': 'sensor.power_16', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '64', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mi_temperature_1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Mi temperature 1', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:45:dc:53-01-0403-pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Mi temperature 1', - 'on': True, - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mi_temperature_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1010', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mi_temperature_1_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Mi temperature 1 Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:45:dc:53-01-0403-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Mi temperature 1 Battery', - 'on': True, - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.mi_temperature_1_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mi_temperature_1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Mi temperature 1', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:45:dc:53-01-0402-temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Mi temperature 1', - 'on': True, - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mi_temperature_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '21.82', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mi_temperature_1_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Mi temperature 1 Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:45:dc:53-01-0402-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Mi temperature 1 Battery', - 'on': True, - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.mi_temperature_1_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.etrv_sejour', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'eTRV Séjour', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'cc:cc:cc:ff:fe:38:4d:b3-01-000a-last_set', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'eTRV Séjour', - }), - 'context': , - 'entity_id': 'sensor.etrv_sejour', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2020-11-19T08:07:08+00:00', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.etrv_sejour_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'eTRV Séjour Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'cc:cc:cc:ff:fe:38:4d:b3-01-000a-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'eTRV Séjour Battery', - 'on': True, - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.etrv_sejour_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.alarm_10_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Alarm 10 Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Alarm 10 Battery', - 'on': True, - 'state_class': , - 'temperature': 26.0, - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.alarm_10_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.alarm_10_temperature', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Alarm 10 Temperature', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-internal_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Alarm 10 Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.alarm_10_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '26.0', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ch2o-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_ch2o', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AirQuality 1 CH2O', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ch2o-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'volatile_organic_compounds', - 'friendly_name': 'AirQuality 1 CH2O', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_ch2o', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_co2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_co2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AirQuality 1 CO2', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_co2', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_co2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'AirQuality 1 CO2', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_co2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '359', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_pm25-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_pm25', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AirQuality 1 PM25', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_pm25-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm25', - 'friendly_name': 'AirQuality 1 PM25', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_pm25', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ppb-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_ppb', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AirQuality 1 PPB', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_ppb', - 'unit_of_measurement': 'ppb', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ppb-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'AirQuality 1 PPB', - 'state_class': , - 'unit_of_measurement': 'ppb', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_ppb', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload20-expected20][sensor.dimmer_switch_3_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dimmer_switch_3_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Dimmer switch 3 Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:17:88:01:02:0e:32:a3-02-fc00-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload20-expected20][sensor.dimmer_switch_3_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'event_id': 'dimmer_switch_3', - 'friendly_name': 'Dimmer switch 3 Battery', - 'on': True, - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.dimmer_switch_3_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '90', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload21-expected21][sensor.ikea_starkvind_filter_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.ikea_starkvind_filter_time', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'IKEA Starkvind Filter time', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '0c:43:14:ff:fe:6c:20:12-01-fc7d-air_purifier_filter_run_time', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload21-expected21][sensor.ikea_starkvind_filter_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'IKEA Starkvind Filter time', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ikea_starkvind_filter_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.849594907407407', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ch2o-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_ch2o', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AirQuality 1 CH2O', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ch2o-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'volatile_organic_compounds', - 'friendly_name': 'AirQuality 1 CH2O', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_ch2o', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_co2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_co2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AirQuality 1 CO2', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_co2', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_co2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'AirQuality 1 CO2', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_co2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '359', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_pm25-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_pm25', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AirQuality 1 PM25', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_pm25-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm25', - 'friendly_name': 'AirQuality 1 PM25', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_pm25', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ppb-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_ppb', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AirQuality 1 PPB', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_ppb', - 'unit_of_measurement': 'ppb', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ppb-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'AirQuality 1 PPB', - 'state_class': , - 'unit_of_measurement': 'ppb', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_ppb', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ch2o-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_ch2o', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AirQuality 1 CH2O', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ch2o-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'volatile_organic_compounds', - 'friendly_name': 'AirQuality 1 CH2O', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_ch2o', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_co2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_co2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AirQuality 1 CO2', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_co2', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_co2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'AirQuality 1 CO2', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_co2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '359', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_pm25-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_pm25', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AirQuality 1 PM25', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_pm25-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm25', - 'friendly_name': 'AirQuality 1 PM25', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_pm25', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ppb-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airquality_1_ppb', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AirQuality 1 PPB', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_ppb', - 'unit_of_measurement': 'ppb', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ppb-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'AirQuality 1 PPB', - 'state_class': , - 'unit_of_measurement': 'ppb', - }), - 'context': , - 'entity_id': 'sensor.airquality_1_ppb', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload5-expected5][sensor.fyrtur_block_out_roller_blind_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fyrtur_block_out_roller_blind_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'FYRTUR block-out roller blind Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:0d:6f:ff:fe:01:23:45-01-0001-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload5-expected5][sensor.fyrtur_block_out_roller_blind_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'FYRTUR block-out roller blind Battery', - 'on': True, - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fyrtur_block_out_roller_blind_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload6-expected6][sensor.carbondioxide_35-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.carbondioxide_35', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CarbonDioxide 35', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-040d-carbon_dioxide', - 'unit_of_measurement': 'ppb', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload6-expected6][sensor.carbondioxide_35-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'CarbonDioxide 35', - 'state_class': , - 'unit_of_measurement': 'ppb', - }), - 'context': , - 'entity_id': 'sensor.carbondioxide_35', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '370', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload7-expected7][sensor.consumption_15-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.consumption_15', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption 15', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:0d:6f:00:0b:7a:64:29-01-0702-consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload7-expected7][sensor.consumption_15-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Consumption 15', - 'on': True, - 'power': 123, - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.consumption_15', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.342', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload8-expected8][sensor.daylight-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.daylight', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:white-balance-sunny', - 'original_name': 'Daylight', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01:23:4E:FF:FF:56:78:9A-01-daylight_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload8-expected8][sensor.daylight-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'daylight': True, - 'friendly_name': 'Daylight', - 'icon': 'mdi:white-balance-sunny', - 'on': True, - }), - 'context': , - 'entity_id': 'sensor.daylight', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'solar_noon', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload9-expected9][sensor.formaldehyde_34-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.formaldehyde_34', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Formaldehyde 34', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-042b-formaldehyde', - 'unit_of_measurement': 'ppb', - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload9-expected9][sensor.formaldehyde_34-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'volatile_organic_compounds', - 'friendly_name': 'Formaldehyde 34', - 'state_class': , - 'unit_of_measurement': 'ppb', - }), - 'context': , - 'entity_id': 'sensor.formaldehyde_34', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- diff --git a/tests/components/deconz/test_alarm_control_panel.py b/tests/components/deconz/test_alarm_control_panel.py index dbe75584df7..c855076de2f 100644 --- a/tests/components/deconz/test_alarm_control_panel.py +++ b/tests/components/deconz/test_alarm_control_panel.py @@ -1,15 +1,11 @@ """deCONZ alarm control panel platform tests.""" -from collections.abc import Callable from unittest.mock import patch from pydeconz.models.sensor.ancillary_control import AncillaryControlPanel -import pytest -from syrupy import SnapshotAssertion from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, - AlarmControlPanelState, ) from homeassistant.const import ( ATTR_CODE, @@ -18,21 +14,41 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - Platform, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_PENDING, + STATE_ALARM_TRIGGERED, + STATE_UNAVAILABLE, + STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .conftest import ConfigEntryFactoryType, WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) -from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize( - "alarm_system_payload", - [ - { +async def test_no_sensors( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no sensors in deconz results in no climate entities.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + +async def test_alarm_control_panel( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket +) -> None: + """Test successful creation of alarm control panel entities.""" + data = { + "alarmsystems": { "0": { "name": "default", "config": { @@ -59,95 +75,230 @@ from tests.test_util.aiohttp import AiohttpClientMocker }, }, } - } - ], -) -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 95, - "enrolled": 1, - "on": True, - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "5aaa1c6bae8501f59929539c6e8f44d6", - "lastseen": "2021-07-25T18:07Z", - "manufacturername": "lk", - "modelid": "ZB-KeypadGeneric-D0002", - "name": "Keypad", - "state": { - "action": "armed_stay", - "lastupdated": "2021-07-25T18:02:51.172", - "lowbattery": False, - "panel": "none", - "seconds_remaining": 55, - "tampered": False, - }, - "swversion": "3.13", - "type": "ZHAAncillaryControl", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - ], -) -async def test_alarm_control_panel( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - sensor_ws_data: WebsocketDataType, - snapshot: SnapshotAssertion, -) -> None: - """Test successful creation of alarm control panel entities.""" - with patch( - "homeassistant.components.deconz.PLATFORMS", [Platform.ALARM_CONTROL_PANEL] - ): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + }, + "sensors": { + "0": { + "config": { + "battery": 95, + "enrolled": 1, + "on": True, + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "5aaa1c6bae8501f59929539c6e8f44d6", + "lastseen": "2021-07-25T18:07Z", + "manufacturername": "lk", + "modelid": "ZB-KeypadGeneric-D0002", + "name": "Keypad", + "state": { + "action": "armed_stay", + "lastupdated": "2021-07-25T18:02:51.172", + "lowbattery": False, + "panel": "none", + "seconds_remaining": 55, + "tampered": False, + }, + "swversion": "3.13", + "type": "ZHAAncillaryControl", + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + }, + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) - for action, state in ( - # Event signals alarm control panel armed state - (AncillaryControlPanel.ARMED_AWAY, AlarmControlPanelState.ARMED_AWAY), - (AncillaryControlPanel.ARMED_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (AncillaryControlPanel.ARMED_STAY, AlarmControlPanelState.ARMED_HOME), - (AncillaryControlPanel.DISARMED, AlarmControlPanelState.DISARMED), - # Event signals alarm control panel arming state - (AncillaryControlPanel.ARMING_AWAY, AlarmControlPanelState.ARMING), - (AncillaryControlPanel.ARMING_NIGHT, AlarmControlPanelState.ARMING), - (AncillaryControlPanel.ARMING_STAY, AlarmControlPanelState.ARMING), - # Event signals alarm control panel pending state - (AncillaryControlPanel.ENTRY_DELAY, AlarmControlPanelState.PENDING), - (AncillaryControlPanel.EXIT_DELAY, AlarmControlPanelState.PENDING), - # Event signals alarm control panel triggered state - (AncillaryControlPanel.IN_ALARM, AlarmControlPanelState.TRIGGERED), - # Event signals alarm control panel unknown state keeps previous state - (AncillaryControlPanel.NOT_READY, AlarmControlPanelState.TRIGGERED), + assert len(hass.states.async_all()) == 4 + assert hass.states.get("alarm_control_panel.keypad").state == STATE_UNKNOWN + + # Event signals alarm control panel armed away + + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"panel": AncillaryControlPanel.ARMED_AWAY}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + + assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMED_AWAY + + # Event signals alarm control panel armed night + + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"panel": AncillaryControlPanel.ARMED_NIGHT}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + + assert ( + hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMED_NIGHT + ) + + # Event signals alarm control panel armed home + + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"panel": AncillaryControlPanel.ARMED_STAY}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + + assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMED_HOME + + # Event signals alarm control panel disarmed + + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"panel": AncillaryControlPanel.DISARMED}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + + assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_DISARMED + + # Event signals alarm control panel arming + + for arming_event in ( + AncillaryControlPanel.ARMING_AWAY, + AncillaryControlPanel.ARMING_NIGHT, + AncillaryControlPanel.ARMING_STAY, ): - await sensor_ws_data({"state": {"panel": action}}) - assert hass.states.get("alarm_control_panel.keypad").state == state + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"panel": arming_event}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + + assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMING + + # Event signals alarm control panel pending + + for pending_event in ( + AncillaryControlPanel.ENTRY_DELAY, + AncillaryControlPanel.EXIT_DELAY, + ): + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"panel": pending_event}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + + assert ( + hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_PENDING + ) + + # Event signals alarm control panel triggered + + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"panel": AncillaryControlPanel.IN_ALARM}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + + assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_TRIGGERED + + # Event signals alarm control panel unknown state keeps previous state + + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"panel": AncillaryControlPanel.NOT_READY}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + + assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_TRIGGERED # Verify service calls - for path, service, code in ( - # Service set alarm to away mode - ("arm_away", SERVICE_ALARM_ARM_AWAY, "1234"), - # Service set alarm to home mode - ("arm_stay", SERVICE_ALARM_ARM_HOME, "2345"), - # Service set alarm to night mode - ("arm_night", SERVICE_ALARM_ARM_NIGHT, "3456"), - # Service set alarm to disarmed - ("disarm", SERVICE_ALARM_DISARM, "4567"), - ): - aioclient_mock.mock_calls.clear() - aioclient_mock = mock_put_request(f"/alarmsystems/0/{path}") - await hass.services.async_call( - ALARM_CONTROL_PANEL_DOMAIN, - service, - {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: code}, - blocking=True, - ) - assert aioclient_mock.mock_calls[0][2] == {"code0": code} + # Service set alarm to away mode + + mock_deconz_put_request( + aioclient_mock, config_entry.data, "/alarmsystems/0/arm_away" + ) + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "1234"}, + blocking=True, + ) + assert aioclient_mock.mock_calls[1][2] == {"code0": "1234"} + + # Service set alarm to home mode + + mock_deconz_put_request( + aioclient_mock, config_entry.data, "/alarmsystems/0/arm_stay" + ) + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_HOME, + {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "2345"}, + blocking=True, + ) + assert aioclient_mock.mock_calls[2][2] == {"code0": "2345"} + + # Service set alarm to night mode + + mock_deconz_put_request( + aioclient_mock, config_entry.data, "/alarmsystems/0/arm_night" + ) + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_NIGHT, + {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "3456"}, + blocking=True, + ) + assert aioclient_mock.mock_calls[3][2] == {"code0": "3456"} + + # Service set alarm to disarmed + + mock_deconz_put_request(aioclient_mock, config_entry.data, "/alarmsystems/0/disarm") + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "4567"}, + blocking=True, + ) + assert aioclient_mock.mock_calls[4][2] == {"code0": "4567"} + + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(states) == 4 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_binary_sensor.py b/tests/components/deconz/test_binary_sensor.py index 59d31afb9fc..6ab5f2f5477 100644 --- a/tests/components/deconz/test_binary_sensor.py +++ b/tests/components/deconz/test_binary_sensor.py @@ -1,12 +1,10 @@ """deCONZ binary sensor platform tests.""" -from collections.abc import Callable -from typing import Any from unittest.mock import patch import pytest -from syrupy import SnapshotAssertion +from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.components.deconz.const import ( CONF_ALLOW_CLIP_SENSOR, CONF_ALLOW_NEW_DEVICES, @@ -14,13 +12,32 @@ from homeassistant.components.deconz.const import ( DOMAIN as DECONZ_DOMAIN, ) from homeassistant.components.deconz.services import SERVICE_DEVICE_REFRESH -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, + EntityCategory, +) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from .conftest import ConfigEntryFactoryType, WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_request, + setup_deconz_integration, +) + +from tests.test_util.aiohttp import AiohttpClientMocker + + +async def test_no_binary_sensors( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no sensors in deconz results in no sensor entities.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 -from tests.common import MockConfigEntry, snapshot_platform TEST_DATA = [ ( # Alarm binary sensor @@ -47,7 +64,19 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:b5:d1:80-01-0500", }, { + "entity_count": 3, + "device_count": 3, "entity_id": "binary_sensor.alarm_10", + "unique_id": "00:15:8d:00:02:b5:d1:80-01-0500-alarm", + "state": STATE_OFF, + "entity_category": None, + "device_class": BinarySensorDeviceClass.SAFETY, + "attributes": { + "on": True, + "temperature": 26.0, + "device_class": "safety", + "friendly_name": "Alarm 10", + }, "websocket_event": {"alarm": True}, "next_state": STATE_ON, }, @@ -76,7 +105,18 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:a5:21:24-01-0101", }, { + "entity_count": 4, + "device_count": 3, "entity_id": "binary_sensor.cave_co", + "unique_id": "00:15:8d:00:02:a5:21:24-01-0101-carbon_monoxide", + "state": STATE_OFF, + "entity_category": None, + "device_class": BinarySensorDeviceClass.CO, + "attributes": { + "on": True, + "device_class": "carbon_monoxide", + "friendly_name": "Cave CO", + }, "websocket_event": {"carbonmonoxide": True}, "next_state": STATE_ON, }, @@ -100,7 +140,18 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:01:d9:3e:7c-01-0500", }, { + "entity_count": 2, + "device_count": 3, "entity_id": "binary_sensor.sensor_kitchen_smoke", + "unique_id": "00:15:8d:00:01:d9:3e:7c-01-0500-fire", + "state": STATE_OFF, + "entity_category": None, + "device_class": BinarySensorDeviceClass.SMOKE, + "attributes": { + "on": True, + "device_class": "smoke", + "friendly_name": "sensor_kitchen_smoke", + }, "websocket_event": {"fire": True}, "next_state": STATE_ON, }, @@ -125,7 +176,17 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:01:d9:3e:7c-01-0500", }, { + "entity_count": 2, + "device_count": 3, "entity_id": "binary_sensor.sensor_kitchen_smoke_test_mode", + "unique_id": "00:15:8d:00:01:d9:3e:7c-01-0500-in_test_mode", + "state": STATE_OFF, + "entity_category": EntityCategory.DIAGNOSTIC, + "device_class": BinarySensorDeviceClass.SMOKE, + "attributes": { + "device_class": "smoke", + "friendly_name": "sensor_kitchen_smoke Test Mode", + }, "websocket_event": {"test": True}, "next_state": STATE_ON, }, @@ -147,7 +208,17 @@ TEST_DATA = [ "uniqueid": "kitchen-switch", }, { + "entity_count": 1, + "device_count": 2, "entity_id": "binary_sensor.kitchen_switch", + "unique_id": "kitchen-switch-flag", + "state": STATE_ON, + "entity_category": None, + "device_class": None, + "attributes": { + "on": True, + "friendly_name": "Kitchen Switch", + }, "websocket_event": {"flag": False}, "next_state": STATE_OFF, }, @@ -174,7 +245,19 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:2b:96:b4-01-0006", }, { + "entity_count": 3, + "device_count": 3, "entity_id": "binary_sensor.back_door", + "unique_id": "00:15:8d:00:02:2b:96:b4-01-0006-open", + "state": STATE_OFF, + "entity_category": None, + "device_class": BinarySensorDeviceClass.OPENING, + "attributes": { + "on": True, + "temperature": 33.0, + "device_class": "opening", + "friendly_name": "Back Door", + }, "websocket_event": {"open": True}, "next_state": STATE_ON, }, @@ -208,7 +291,19 @@ TEST_DATA = [ "uniqueid": "00:17:88:01:03:28:8c:9b-02-0406", }, { + "entity_count": 3, + "device_count": 3, "entity_id": "binary_sensor.motion_sensor_4", + "unique_id": "00:17:88:01:03:28:8c:9b-02-0406-presence", + "state": STATE_OFF, + "entity_category": None, + "device_class": BinarySensorDeviceClass.MOTION, + "attributes": { + "on": True, + "dark": False, + "device_class": "motion", + "friendly_name": "Motion sensor 4", + }, "websocket_event": {"presence": True}, "next_state": STATE_ON, }, @@ -237,7 +332,19 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:2f:07:db-01-0500", }, { + "entity_count": 5, + "device_count": 3, "entity_id": "binary_sensor.water2", + "unique_id": "00:15:8d:00:02:2f:07:db-01-0500-water", + "state": STATE_OFF, + "entity_category": None, + "device_class": BinarySensorDeviceClass.MOISTURE, + "attributes": { + "on": True, + "temperature": 25.0, + "device_class": "moisture", + "friendly_name": "water2", + }, "websocket_event": {"water": True}, "next_state": STATE_ON, }, @@ -270,7 +377,22 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:a5:21:24-01-0101", }, { + "entity_count": 3, + "device_count": 3, "entity_id": "binary_sensor.vibration_1", + "unique_id": "00:15:8d:00:02:a5:21:24-01-0101-vibration", + "state": STATE_ON, + "entity_category": None, + "device_class": BinarySensorDeviceClass.VIBRATION, + "attributes": { + "on": True, + "temperature": 32.0, + "orientation": [10, 1059, 0], + "tiltangle": 83, + "vibrationstrength": 114, + "device_class": "vibration", + "friendly_name": "Vibration 1", + }, "websocket_event": {"vibration": False}, "next_state": STATE_OFF, }, @@ -293,7 +415,17 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { + "entity_count": 4, + "device_count": 3, "entity_id": "binary_sensor.presence_sensor_tampered", + "unique_id": "00:00:00:00:00:00:00:00-00-tampered", + "state": STATE_OFF, + "entity_category": EntityCategory.DIAGNOSTIC, + "device_class": BinarySensorDeviceClass.TAMPER, + "attributes": { + "device_class": "tamper", + "friendly_name": "Presence sensor Tampered", + }, "websocket_event": {"tampered": True}, "next_state": STATE_ON, }, @@ -316,7 +448,17 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { + "entity_count": 4, + "device_count": 3, "entity_id": "binary_sensor.presence_sensor_low_battery", + "unique_id": "00:00:00:00:00:00:00:00-00-low_battery", + "state": STATE_OFF, + "entity_category": EntityCategory.DIAGNOSTIC, + "device_class": BinarySensorDeviceClass.BATTERY, + "attributes": { + "device_class": "battery", + "friendly_name": "Presence sensor Low Battery", + }, "websocket_event": {"lowbattery": True}, "next_state": STATE_ON, }, @@ -324,50 +466,99 @@ TEST_DATA = [ ] -@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) -@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) +@pytest.mark.parametrize(("sensor_data", "expected"), TEST_DATA) async def test_binary_sensors( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - sensor_ws_data: WebsocketDataType, - expected: dict[str, Any], - snapshot: SnapshotAssertion, + aioclient_mock: AiohttpClientMocker, + mock_deconz_websocket, + sensor_data, + expected, ) -> None: """Test successful creation of binary sensor entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.BINARY_SENSOR]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + with patch.dict(DECONZ_WEB_REQUEST, {"sensors": {"1": sensor_data}}): + config_entry = await setup_deconz_integration( + hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} + ) + + assert len(hass.states.async_all()) == expected["entity_count"] + + # Verify state data + + sensor = hass.states.get(expected["entity_id"]) + assert sensor.state == expected["state"] + assert sensor.attributes.get(ATTR_DEVICE_CLASS) == expected["device_class"] + assert sensor.attributes == expected["attributes"] + + # Verify entity registry data + + ent_reg_entry = entity_registry.async_get(expected["entity_id"]) + assert ent_reg_entry.entity_category is expected["entity_category"] + assert ent_reg_entry.unique_id == expected["unique_id"] + + # Verify device registry data + + assert ( + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + == expected["device_count"] + ) # Change state - await sensor_ws_data({"state": expected["websocket_event"]}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": expected["websocket_event"], + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() assert hass.states.get(expected["entity_id"]).state == expected["next_state"] + # Unload entry -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "name": "CLIP presence sensor", - "type": "CLIPPresence", - "state": {"presence": False}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:02-00", - } - ], -) -@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: False}]) -@pytest.mark.usefixtures("config_entry_setup") -async def test_not_allow_clip_sensor(hass: HomeAssistant) -> None: - """Test that CLIP sensors are not allowed.""" + await hass.config_entries.async_unload(config_entry.entry_id) + assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE + + # Remove entry + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() assert len(hass.states.async_all()) == 0 -@pytest.mark.parametrize( - "sensor_payload", - [ - { +async def test_not_allow_clip_sensor( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that CLIP sensors are not allowed.""" + data = { + "sensors": { + "1": { + "name": "CLIP presence sensor", + "type": "CLIPPresence", + "state": {"presence": False}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:02-00", + }, + } + } + + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration( + hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: False} + ) + + assert len(hass.states.async_all()) == 0 + + +async def test_allow_clip_sensor( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that CLIP sensors can be allowed.""" + data = { + "sensors": { "1": { "name": "Presence sensor", "type": "ZHAPresence", @@ -394,13 +585,12 @@ async def test_not_allow_clip_sensor(hass: HomeAssistant) -> None: "uniqueid": "/sensors/3", }, } - ], -) -@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) -async def test_allow_clip_sensor( - hass: HomeAssistant, config_entry_setup: MockConfigEntry -) -> None: - """Test that CLIP sensors can be allowed.""" + } + + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration( + hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} + ) assert len(hass.states.async_all()) == 3 assert hass.states.get("binary_sensor.presence_sensor").state == STATE_OFF @@ -410,7 +600,7 @@ async def test_allow_clip_sensor( # Disallow clip sensors hass.config_entries.async_update_entry( - config_entry_setup, options={CONF_ALLOW_CLIP_SENSOR: False} + config_entry, options={CONF_ALLOW_CLIP_SENSOR: False} ) await hass.async_block_till_done() @@ -421,7 +611,7 @@ async def test_allow_clip_sensor( # Allow clip sensors hass.config_entries.async_update_entry( - config_entry_setup, options={CONF_ALLOW_CLIP_SENSOR: True} + config_entry, options={CONF_ALLOW_CLIP_SENSOR: True} ) await hass.async_block_till_done() @@ -430,16 +620,15 @@ async def test_allow_clip_sensor( assert hass.states.get("binary_sensor.clip_flag_boot_time").state == STATE_ON -@pytest.mark.usefixtures("config_entry_setup") async def test_add_new_binary_sensor( - hass: HomeAssistant, - sensor_ws_data: WebsocketDataType, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test that adding a new binary sensor works.""" - assert len(hass.states.async_all()) == 0 - event_added_sensor = { + "t": "event", "e": "added", + "r": "sensors", + "id": "1", "sensor": { "id": "Presence sensor id", "name": "Presence sensor", @@ -449,21 +638,22 @@ async def test_add_new_binary_sensor( "uniqueid": "00:00:00:00:00:00:00:00-00", }, } - await sensor_ws_data(event_added_sensor) + + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + await mock_deconz_websocket(data=event_added_sensor) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 1 assert hass.states.get("binary_sensor.presence_sensor").state == STATE_OFF -@pytest.mark.parametrize( - "config_entry_options", [{CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}] -) async def test_add_new_binary_sensor_ignored_load_entities_on_service_call( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_setup: MockConfigEntry, - deconz_payload: dict[str, Any], - mock_requests: Callable[[str], None], - sensor_ws_data: WebsocketDataType, + aioclient_mock: AiohttpClientMocker, + mock_deconz_websocket, ) -> None: """Test that adding a new binary sensor is not allowed.""" sensor = { @@ -473,24 +663,36 @@ async def test_add_new_binary_sensor_ignored_load_entities_on_service_call( "config": {"on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:00-00", } + event_added_sensor = { + "t": "event", + "e": "added", + "r": "sensors", + "id": "1", + "sensor": sensor, + } + + config_entry = await setup_deconz_integration( + hass, + aioclient_mock, + options={CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}, + ) assert len(hass.states.async_all()) == 0 - await sensor_ws_data({"e": "added", "sensor": sensor}) + await mock_deconz_websocket(data=event_added_sensor) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 assert not hass.states.get("binary_sensor.presence_sensor") assert ( - len( - er.async_entries_for_config_entry( - entity_registry, config_entry_setup.entry_id - ) - ) + len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) == 0 ) - deconz_payload["sensors"]["0"] = sensor - mock_requests() + aioclient_mock.clear_requests() + data = {"config": {}, "groups": {}, "lights": {}, "sensors": {"1": sensor}} + mock_deconz_request(aioclient_mock, config_entry.data, data) await hass.services.async_call(DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH) await hass.async_block_till_done() @@ -499,16 +701,11 @@ async def test_add_new_binary_sensor_ignored_load_entities_on_service_call( assert hass.states.get("binary_sensor.presence_sensor") -@pytest.mark.parametrize( - "config_entry_options", [{CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}] -) async def test_add_new_binary_sensor_ignored_load_entities_on_options_change( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_setup: MockConfigEntry, - deconz_payload: dict[str, Any], - mock_requests: Callable[[str], None], - sensor_ws_data: WebsocketDataType, + aioclient_mock: AiohttpClientMocker, + mock_deconz_websocket, ) -> None: """Test that adding a new binary sensor is not allowed.""" sensor = { @@ -518,27 +715,39 @@ async def test_add_new_binary_sensor_ignored_load_entities_on_options_change( "config": {"on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:00-00", } + event_added_sensor = { + "t": "event", + "e": "added", + "r": "sensors", + "id": "1", + "sensor": sensor, + } + + config_entry = await setup_deconz_integration( + hass, + aioclient_mock, + options={CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}, + ) assert len(hass.states.async_all()) == 0 - await sensor_ws_data({"e": "added", "sensor": sensor}) + await mock_deconz_websocket(data=event_added_sensor) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 assert not hass.states.get("binary_sensor.presence_sensor") assert ( - len( - er.async_entries_for_config_entry( - entity_registry, config_entry_setup.entry_id - ) - ) + len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) == 0 ) - deconz_payload["sensors"]["0"] = sensor - mock_requests() + aioclient_mock.clear_requests() + data = {"config": {}, "groups": {}, "lights": {}, "sensors": {"1": sensor}} + mock_deconz_request(aioclient_mock, config_entry.data, data) hass.config_entries.async_update_entry( - config_entry_setup, options={CONF_ALLOW_NEW_DEVICES: True} + config_entry, options={CONF_ALLOW_NEW_DEVICES: True} ) await hass.async_block_till_done() diff --git a/tests/components/deconz/test_button.py b/tests/components/deconz/test_button.py index c649dba5b00..4d85270ddca 100644 --- a/tests/components/deconz/test_button.py +++ b/tests/components/deconz/test_button.py @@ -1,22 +1,31 @@ """deCONZ button platform tests.""" -from collections.abc import Callable -from typing import Any from unittest.mock import patch import pytest -from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from .conftest import ConfigEntryFactoryType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) -from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker + +async def test_no_binary_sensors( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no sensors in deconz results in no sensor entities.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + TEST_DATA = [ ( # Store scene button { @@ -33,7 +42,15 @@ TEST_DATA = [ } }, { + "entity_count": 2, + "device_count": 3, "entity_id": "button.light_group_scene_store_current_scene", + "unique_id": "01234E56789A/groups/1/scenes/1-store", + "entity_category": EntityCategory.CONFIG, + "attributes": { + "icon": "mdi:inbox-arrow-down", + "friendly_name": "Light group Scene Store Current Scene", + }, "request": "/groups/1/scenes/1/store", "request_data": {}, }, @@ -67,7 +84,15 @@ TEST_DATA = [ } }, { + "entity_count": 5, + "device_count": 3, "entity_id": "button.aqara_fp1_reset_presence", + "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-reset_presence", + "entity_category": EntityCategory.CONFIG, + "attributes": { + "device_class": "restart", + "friendly_name": "Aqara FP1 Reset Presence", + }, "request": "/sensors/1/config", "request_data": {"resetpresence": True}, }, @@ -75,24 +100,42 @@ TEST_DATA = [ ] -@pytest.mark.parametrize(("deconz_payload", "expected"), TEST_DATA) +@pytest.mark.parametrize(("raw_data", "expected"), TEST_DATA) async def test_button( hass: HomeAssistant, entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - expected: dict[str, Any], - snapshot: SnapshotAssertion, + raw_data, + expected, ) -> None: """Test successful creation of button entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.BUTTON]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + with patch.dict(DECONZ_WEB_REQUEST, raw_data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == expected["entity_count"] + + # Verify state data + + button = hass.states.get(expected["entity_id"]) + assert button.attributes == expected["attributes"] + + # Verify entity registry data + + ent_reg_entry = entity_registry.async_get(expected["entity_id"]) + assert ent_reg_entry.entity_category is expected["entity_category"] + assert ent_reg_entry.unique_id == expected["unique_id"] + + # Verify device registry data + + assert ( + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + == expected["device_count"] + ) # Verify button press - aioclient_mock = mock_put_request(expected["request"]) + mock_deconz_put_request(aioclient_mock, config_entry.data, expected["request"]) await hass.services.async_call( BUTTON_DOMAIN, @@ -101,3 +144,14 @@ async def test_button( blocking=True, ) assert aioclient_mock.mock_calls[1][2] == expected["request_data"] + + # Unload entry + + await hass.config_entries.async_unload(config_entry.entry_id) + assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE + + # Remove entry + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_climate.py b/tests/components/deconz/test_climate.py index e1000f0b4d6..0e51f31cec4 100644 --- a/tests/components/deconz/test_climate.py +++ b/tests/components/deconz/test_climate.py @@ -1,10 +1,8 @@ """deCONZ climate platform tests.""" -from collections.abc import Callable from unittest.mock import patch import pytest -from syrupy import SnapshotAssertion from homeassistant.components.climate import ( ATTR_FAN_MODE, @@ -13,10 +11,15 @@ from homeassistant.components.climate import ( ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, DOMAIN as CLIMATE_DOMAIN, + FAN_AUTO, + FAN_HIGH, + FAN_LOW, + FAN_MEDIUM, FAN_OFF, FAN_ON, PRESET_BOOST, PRESET_COMFORT, + PRESET_ECO, SERVICE_SET_FAN_MODE, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, @@ -27,74 +30,106 @@ from homeassistant.components.climate import ( from homeassistant.components.deconz.climate import ( DECONZ_FAN_SMART, DECONZ_PRESET_AUTO, + DECONZ_PRESET_COMPLEX, + DECONZ_PRESET_HOLIDAY, DECONZ_PRESET_MANUAL, ) from homeassistant.components.deconz.const import CONF_ALLOW_CLIP_SENSOR -from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_OFF, Platform +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_TEMPERATURE, + STATE_OFF, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er -from .conftest import ConfigEntryFactoryType, WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) -from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 59, - "displayflipped": None, - "heatsetpoint": 2100, - "locked": True, - "mountingmode": None, - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "6130553ac247174809bae47144ee23f8", - "lastseen": "2020-11-29T19:31Z", - "manufacturername": "Danfoss", - "modelid": "eTRV0100", - "name": "thermostat", - "state": { - "errorcode": None, - "lastupdated": "2020-11-29T19:28:40.665", - "mountingmodeactive": False, - "on": True, - "temperature": 2102, - "valve": 24, - "windowopen": "Closed", - }, - "swversion": "01.02.0008 01.02", - "type": "ZHAThermostat", - "uniqueid": "14:b4:57:ff:fe:d5:4e:77-01-0201", - } - ], -) +async def test_no_sensors( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no sensors in deconz results in no climate entities.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + async def test_simple_climate_device( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - sensor_ws_data: WebsocketDataType, - snapshot: SnapshotAssertion, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test successful creation of climate entities. This is a simple water heater that only supports setting temperature and on and off. """ - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + data = { + "sensors": { + "0": { + "config": { + "battery": 59, + "displayflipped": None, + "heatsetpoint": 2100, + "locked": True, + "mountingmode": None, + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "6130553ac247174809bae47144ee23f8", + "lastseen": "2020-11-29T19:31Z", + "manufacturername": "Danfoss", + "modelid": "eTRV0100", + "name": "thermostat", + "state": { + "errorcode": None, + "lastupdated": "2020-11-29T19:28:40.665", + "mountingmodeactive": False, + "on": True, + "temperature": 2102, + "valve": 24, + "windowopen": "Closed", + }, + "swversion": "01.02.0008 01.02", + "type": "ZHAThermostat", + "uniqueid": "14:b4:57:ff:fe:d5:4e:77-01-0201", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 2 + climate_thermostat = hass.states.get("climate.thermostat") + assert climate_thermostat.state == HVACMode.HEAT + assert climate_thermostat.attributes["hvac_modes"] == [ + HVACMode.HEAT, + HVACMode.OFF, + ] + assert climate_thermostat.attributes["current_temperature"] == 21.0 + assert climate_thermostat.attributes["temperature"] == 21.0 + assert climate_thermostat.attributes["locked"] is True + assert hass.states.get("sensor.thermostat_battery").state == "59" + assert climate_thermostat.attributes["hvac_action"] == HVACAction.HEATING # Event signals thermostat configured off - await sensor_ws_data({"state": {"on": False}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"on": False}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.thermostat").state == STATE_OFF assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -103,7 +138,16 @@ async def test_simple_climate_device( # Event signals thermostat state on - await sensor_ws_data({"state": {"on": True}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"on": True}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.thermostat").state == HVACMode.HEAT assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -112,7 +156,7 @@ async def test_simple_climate_device( # Verify service calls - aioclient_mock = mock_put_request("/sensors/0/config") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") # Service turn on thermostat @@ -145,40 +189,61 @@ async def test_simple_climate_device( ) -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "name": "Thermostat", - "type": "ZHAThermostat", - "state": {"on": True, "temperature": 2260, "valve": 30}, - "config": { - "battery": 100, - "heatsetpoint": 2200, - "mode": "auto", - "offset": 10, - "reachable": True, - }, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - ], -) async def test_climate_device_without_cooling_support( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - sensor_ws_data: WebsocketDataType, - snapshot: SnapshotAssertion, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test successful creation of sensor entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + data = { + "sensors": { + "1": { + "name": "Thermostat", + "type": "ZHAThermostat", + "state": {"on": True, "temperature": 2260, "valve": 30}, + "config": { + "battery": 100, + "heatsetpoint": 2200, + "mode": "auto", + "offset": 10, + "reachable": True, + }, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 2 + climate_thermostat = hass.states.get("climate.thermostat") + assert climate_thermostat.state == HVACMode.AUTO + assert climate_thermostat.attributes["hvac_modes"] == [ + HVACMode.HEAT, + HVACMode.OFF, + HVACMode.AUTO, + ] + assert climate_thermostat.attributes["current_temperature"] == 22.6 + assert climate_thermostat.attributes["temperature"] == 22.0 + assert hass.states.get("sensor.thermostat") is None + assert hass.states.get("sensor.thermostat_battery").state == "100" + assert hass.states.get("climate.presence_sensor") is None + assert hass.states.get("climate.clip_thermostat") is None + assert ( + hass.states.get("climate.thermostat").attributes["hvac_action"] + == HVACAction.HEATING + ) # Event signals thermostat configured off - await sensor_ws_data({"config": {"mode": "off"}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "config": {"mode": "off"}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.thermostat").state == STATE_OFF assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -187,7 +252,17 @@ async def test_climate_device_without_cooling_support( # Event signals thermostat state on - await sensor_ws_data({"config": {"mode": "other"}, "state": {"on": True}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "config": {"mode": "other"}, + "state": {"on": True}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.thermostat").state == HVACMode.HEAT assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -196,7 +271,16 @@ async def test_climate_device_without_cooling_support( # Event signals thermostat state off - await sensor_ws_data({"state": {"on": False}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"on": False}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.thermostat").state == STATE_OFF assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -205,7 +289,7 @@ async def test_climate_device_without_cooling_support( # Verify service calls - aioclient_mock = mock_put_request("/sensors/0/config") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/1/config") # Service set HVAC mode to auto @@ -259,7 +343,7 @@ async def test_climate_device_without_cooling_support( # Service set temperature without providing temperature attribute - with pytest.raises(ServiceValidationError): + with pytest.raises(ValueError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, @@ -271,53 +355,83 @@ async def test_climate_device_without_cooling_support( blocking=True, ) + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(states) == 2 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 25, - "coolsetpoint": 1111, - "fanmode": None, - "heatsetpoint": 2222, - "mode": "heat", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": { - "lastupdated": "2020-11-27T13:42:40.863", - "on": False, - "temperature": 2320, - }, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } - ], -) async def test_climate_device_with_cooling_support( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - sensor_ws_data: WebsocketDataType, - snapshot: SnapshotAssertion, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test successful creation of sensor entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + data = { + "sensors": { + "0": { + "config": { + "battery": 25, + "coolsetpoint": 1111, + "fanmode": None, + "heatsetpoint": 2222, + "mode": "heat", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": { + "lastupdated": "2020-11-27T13:42:40.863", + "on": False, + "temperature": 2320, + }, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 2 + climate_thermostat = hass.states.get("climate.zen_01") + assert climate_thermostat.state == HVACMode.HEAT + assert climate_thermostat.attributes["hvac_modes"] == [ + HVACMode.HEAT, + HVACMode.OFF, + HVACMode.AUTO, + HVACMode.COOL, + ] + assert climate_thermostat.attributes["current_temperature"] == 23.2 + assert climate_thermostat.attributes["temperature"] == 22.2 + assert hass.states.get("sensor.zen_01_battery").state == "25" + assert ( + hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE + ) # Event signals thermostat mode cool - await sensor_ws_data({"config": {"mode": "cool"}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "config": {"mode": "cool"}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + await hass.async_block_till_done() + assert hass.states.get("climate.zen_01").state == HVACMode.COOL assert hass.states.get("climate.zen_01").attributes["temperature"] == 11.1 assert ( @@ -326,7 +440,16 @@ async def test_climate_device_with_cooling_support( # Event signals thermostat state on - await sensor_ws_data({"state": {"on": True}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "state": {"on": True}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.zen_01").state == HVACMode.COOL assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] @@ -335,7 +458,7 @@ async def test_climate_device_with_cooling_support( # Verify service calls - aioclient_mock = mock_put_request("/sensors/0/config") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") # Service set temperature to 20 @@ -348,52 +471,71 @@ async def test_climate_device_with_cooling_support( assert aioclient_mock.mock_calls[1][2] == {"coolsetpoint": 2000.0} -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 25, - "coolsetpoint": None, - "fanmode": "auto", - "heatsetpoint": 2222, - "mode": "heat", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": { - "lastupdated": "2020-11-27T13:42:40.863", - "on": False, - "temperature": 2320, - }, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } - ], -) async def test_climate_device_with_fan_support( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - sensor_ws_data: WebsocketDataType, - snapshot: SnapshotAssertion, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test successful creation of sensor entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + data = { + "sensors": { + "0": { + "config": { + "battery": 25, + "coolsetpoint": None, + "fanmode": "auto", + "heatsetpoint": 2222, + "mode": "heat", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": { + "lastupdated": "2020-11-27T13:42:40.863", + "on": False, + "temperature": 2320, + }, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 2 + climate_thermostat = hass.states.get("climate.zen_01") + assert climate_thermostat.state == HVACMode.HEAT + assert climate_thermostat.attributes["fan_mode"] == FAN_AUTO + assert climate_thermostat.attributes["fan_modes"] == [ + DECONZ_FAN_SMART, + FAN_AUTO, + FAN_HIGH, + FAN_MEDIUM, + FAN_LOW, + FAN_ON, + FAN_OFF, + ] + assert ( + hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE + ) # Event signals fan mode defaults to off - await sensor_ws_data({"config": {"fanmode": "unsupported"}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "config": {"fanmode": "unsupported"}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.zen_01").attributes["fan_mode"] == FAN_OFF assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE @@ -401,7 +543,17 @@ async def test_climate_device_with_fan_support( # Event signals unsupported fan mode - await sensor_ws_data({"config": {"fanmode": "unsupported"}, "state": {"on": True}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "config": {"fanmode": "unsupported"}, + "state": {"on": True}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.zen_01").attributes["fan_mode"] == FAN_ON assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] @@ -410,7 +562,16 @@ async def test_climate_device_with_fan_support( # Event signals unsupported fan mode - await sensor_ws_data({"config": {"fanmode": "unsupported"}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "config": {"fanmode": "unsupported"}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.zen_01").attributes["fan_mode"] == FAN_ON assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] @@ -419,7 +580,7 @@ async def test_climate_device_with_fan_support( # Verify service calls - aioclient_mock = mock_put_request("/sensors/0/config") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") # Service set fan mode to off @@ -452,53 +613,75 @@ async def test_climate_device_with_fan_support( ) -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 25, - "coolsetpoint": None, - "fanmode": None, - "heatsetpoint": 2222, - "mode": "heat", - "preset": "auto", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": { - "lastupdated": "2020-11-27T13:42:40.863", - "on": False, - "temperature": 2320, - }, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } - ], -) async def test_climate_device_with_preset( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - sensor_ws_data: WebsocketDataType, - config_entry_factory: ConfigEntryFactoryType, - snapshot: SnapshotAssertion, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test successful creation of sensor entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + data = { + "sensors": { + "0": { + "config": { + "battery": 25, + "coolsetpoint": None, + "fanmode": None, + "heatsetpoint": 2222, + "mode": "heat", + "preset": "auto", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": { + "lastupdated": "2020-11-27T13:42:40.863", + "on": False, + "temperature": 2320, + }, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 2 + + climate_zen_01 = hass.states.get("climate.zen_01") + assert climate_zen_01.state == HVACMode.HEAT + assert climate_zen_01.attributes["current_temperature"] == 23.2 + assert climate_zen_01.attributes["temperature"] == 22.2 + assert climate_zen_01.attributes["preset_mode"] == DECONZ_PRESET_AUTO + assert climate_zen_01.attributes["preset_modes"] == [ + DECONZ_PRESET_AUTO, + PRESET_BOOST, + PRESET_COMFORT, + DECONZ_PRESET_COMPLEX, + PRESET_ECO, + DECONZ_PRESET_HOLIDAY, + DECONZ_PRESET_MANUAL, + ] + assert ( + hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE + ) # Event signals deCONZ preset - await sensor_ws_data({"config": {"preset": "manual"}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "config": {"preset": "manual"}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert ( hass.states.get("climate.zen_01").attributes["preset_mode"] == DECONZ_PRESET_MANUAL @@ -506,12 +689,21 @@ async def test_climate_device_with_preset( # Event signals unknown preset - await sensor_ws_data({"config": {"preset": "unsupported"}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "config": {"preset": "unsupported"}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.zen_01").attributes["preset_mode"] is None # Verify service calls - aioclient_mock = mock_put_request("/sensors/0/config") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") # Service set preset to HASS preset @@ -544,10 +736,12 @@ async def test_climate_device_with_preset( ) -@pytest.mark.parametrize( - "sensor_payload", - [ - { +async def test_clip_climate_device( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test successful creation of sensor entities.""" + data = { + "sensors": { "1": { "name": "Thermostat", "type": "ZHAThermostat", @@ -569,19 +763,18 @@ async def test_climate_device_with_preset( "uniqueid": "00:00:00:00:00:00:00:02-00", }, } - ], -) -@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) -async def test_clip_climate_device( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - snapshot: SnapshotAssertion, -) -> None: - """Test successful creation of sensor entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration( + hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} + ) + + assert len(hass.states.async_all()) == 3 + assert hass.states.get("climate.clip_thermostat").state == HVACMode.HEAT + assert ( + hass.states.get("climate.clip_thermostat").attributes["hvac_action"] + == HVACAction.HEATING + ) # Disallow clip sensors @@ -590,7 +783,7 @@ async def test_clip_climate_device( ) await hass.async_block_till_done() - assert len(hass.states.async_all()) == 1 + assert len(hass.states.async_all()) == 2 assert not hass.states.get("climate.clip_thermostat") # Allow clip sensors @@ -600,7 +793,7 @@ async def test_clip_climate_device( ) await hass.async_block_till_done() - assert len(hass.states.async_all()) == 2 + assert len(hass.states.async_all()) == 3 assert hass.states.get("climate.clip_thermostat").state == HVACMode.HEAT assert ( hass.states.get("climate.clip_thermostat").attributes["hvac_action"] @@ -608,37 +801,46 @@ async def test_clip_climate_device( ) -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "name": "Thermostat", - "type": "ZHAThermostat", - "state": {"on": True, "temperature": 2260, "valve": 30}, - "config": { - "battery": 100, - "heatsetpoint": 2200, - "mode": "auto", - "offset": 10, - "reachable": True, - }, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_verify_state_update( - hass: HomeAssistant, - sensor_ws_data: WebsocketDataType, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test that state update properly.""" + data = { + "sensors": { + "1": { + "name": "Thermostat", + "type": "ZHAThermostat", + "state": {"on": True, "temperature": 2260, "valve": 30}, + "config": { + "battery": 100, + "heatsetpoint": 2200, + "mode": "auto", + "offset": 10, + "reachable": True, + }, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + assert hass.states.get("climate.thermostat").state == HVACMode.AUTO assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] == HVACAction.HEATING ) - await sensor_ws_data({"state": {"on": False}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"on": False}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert hass.states.get("climate.thermostat").state == HVACMode.AUTO assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -646,14 +848,15 @@ async def test_verify_state_update( ) -@pytest.mark.usefixtures("config_entry_setup") async def test_add_new_climate_device( - hass: HomeAssistant, - sensor_ws_data: WebsocketDataType, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test that adding a new climate device works.""" event_added_sensor = { + "t": "event", "e": "added", + "r": "sensors", + "id": "1", "sensor": { "id": "Thermostat id", "name": "Thermostat", @@ -670,9 +873,11 @@ async def test_add_new_climate_device( }, } + await setup_deconz_integration(hass, aioclient_mock) assert len(hass.states.async_all()) == 0 - await sensor_ws_data(event_added_sensor) + await mock_deconz_websocket(data=event_added_sensor) + await hass.async_block_till_done() assert len(hass.states.async_all()) == 2 assert hass.states.get("climate.thermostat").state == HVACMode.AUTO @@ -683,115 +888,141 @@ async def test_add_new_climate_device( ) -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "name": "CLIP thermostat sensor", - "type": "CLIPThermostat", - "state": {}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - }, - ], -) -@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: False}]) -@pytest.mark.usefixtures("config_entry_setup") -async def test_not_allow_clip_thermostat(hass: HomeAssistant) -> None: +async def test_not_allow_clip_thermostat( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Test that CLIP thermostats are not allowed.""" + data = { + "sensors": { + "1": { + "name": "CLIP thermostat sensor", + "type": "CLIPThermostat", + "state": {}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + }, + } + } + + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration( + hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: False} + ) + assert len(hass.states.async_all()) == 0 -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 25, - "heatsetpoint": 2222, - "mode": None, - "preset": "auto", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": {"lastupdated": "none", "on": None, "temperature": 2290}, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_no_mode_no_state(hass: HomeAssistant) -> None: +async def test_no_mode_no_state( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket +) -> None: """Test that a climate device without mode and state works.""" + data = { + "sensors": { + "0": { + "config": { + "battery": 25, + "heatsetpoint": 2222, + "mode": None, + "preset": "auto", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": {"lastupdated": "none", "on": None, "temperature": 2290}, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 2 climate_thermostat = hass.states.get("climate.zen_01") + assert climate_thermostat.state is STATE_OFF assert climate_thermostat.attributes["preset_mode"] is DECONZ_PRESET_AUTO assert climate_thermostat.attributes["hvac_action"] is HVACAction.IDLE + # Verify service calls + mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 58, - "heatsetpoint": 2200, - "locked": False, - "mode": "heat", - "offset": -200, - "on": True, - "preset": "manual", - "reachable": True, - "schedule": {}, - "schedule_on": False, - "setvalve": False, - "windowopen_set": False, - }, - "ep": 1, - "etag": "404c15db68c318ebe7832ce5aa3d1e30", - "lastannounced": "2022-08-31T03:00:59Z", - "lastseen": "2022-09-19T11:58Z", - "manufacturername": "_TZE200_b6wax7g0", - "modelid": "TS0601", - "name": "Thermostat", - "state": { - "lastupdated": "2022-09-19T11:58:24.204", - "lowbattery": False, - "on": False, - "temperature": 2200, - "valve": 0, - }, - "type": "ZHAThermostat", - "uniqueid": "84:fd:27:ff:fe:8a:eb:89-01-0201", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_boost_mode( - hass: HomeAssistant, - sensor_ws_data: WebsocketDataType, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test that a climate device with boost mode and different state works.""" + data = { + "sensors": { + "0": { + "config": { + "battery": 58, + "heatsetpoint": 2200, + "locked": False, + "mode": "heat", + "offset": -200, + "on": True, + "preset": "manual", + "reachable": True, + "schedule": {}, + "schedule_on": False, + "setvalve": False, + "windowopen_set": False, + }, + "ep": 1, + "etag": "404c15db68c318ebe7832ce5aa3d1e30", + "lastannounced": "2022-08-31T03:00:59Z", + "lastseen": "2022-09-19T11:58Z", + "manufacturername": "_TZE200_b6wax7g0", + "modelid": "TS0601", + "name": "Thermostat", + "state": { + "lastupdated": "2022-09-19T11:58:24.204", + "lowbattery": False, + "on": False, + "temperature": 2200, + "valve": 0, + }, + "type": "ZHAThermostat", + "uniqueid": "84:fd:27:ff:fe:8a:eb:89-01-0201", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 3 climate_thermostat = hass.states.get("climate.thermostat") + assert climate_thermostat.state == HVACMode.HEAT + assert climate_thermostat.attributes["preset_mode"] is DECONZ_PRESET_MANUAL assert climate_thermostat.attributes["hvac_action"] is HVACAction.IDLE # Event signals thermostat preset boost and valve 100 (real data) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + "config": {"preset": "boost"}, + "state": {"valve": 100}, + } - await sensor_ws_data({"config": {"preset": "boost"}, "state": {"valve": 100}}) + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() climate_thermostat = hass.states.get("climate.thermostat") assert climate_thermostat.attributes["preset_mode"] is PRESET_BOOST assert climate_thermostat.attributes["hvac_action"] is HVACAction.HEATING + + # Verify service calls + mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") diff --git a/tests/components/deconz/test_config_flow.py b/tests/components/deconz/test_config_flow.py index ce13bbfa5d4..6da940e0918 100644 --- a/tests/components/deconz/test_config_flow.py +++ b/tests/components/deconz/test_config_flow.py @@ -20,16 +20,20 @@ from homeassistant.components.deconz.const import ( DOMAIN as DECONZ_DOMAIN, HASSIO_CONFIGURATION_URL, ) +from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL -from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_HASSIO, + SOURCE_REAUTH, + SOURCE_SSDP, + SOURCE_USER, +) from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo -from .conftest import API_KEY, BRIDGE_ID +from .test_gateway import API_KEY, BRIDGEID, setup_deconz_integration -from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker BAD_BRIDGEID = "0000000000000000" @@ -43,7 +47,7 @@ async def test_flow_discovered_bridges( aioclient_mock.get( pydeconz.utils.URL_DISCOVER, json=[ - {"id": BRIDGE_ID, "internalipaddress": "1.2.3.4", "internalport": 80}, + {"id": BRIDGEID, "internalipaddress": "1.2.3.4", "internalport": 80}, {"id": "1234E567890A", "internalipaddress": "5.6.7.8", "internalport": 80}, ], headers={"content-type": CONTENT_TYPE_JSON}, @@ -74,7 +78,7 @@ async def test_flow_discovered_bridges( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGE_ID + assert result["title"] == BRIDGEID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -88,7 +92,7 @@ async def test_flow_manual_configuration_decision( """Test that config flow for one discovered bridge works.""" aioclient_mock.get( pydeconz.utils.URL_DISCOVER, - json=[{"id": BRIDGE_ID, "internalipaddress": "1.2.3.4", "internalport": 80}], + json=[{"id": BRIDGEID, "internalipaddress": "1.2.3.4", "internalport": 80}], headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -119,7 +123,7 @@ async def test_flow_manual_configuration_decision( aioclient_mock.get( f"http://1.2.3.4:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGE_ID}, + json={"bridgeid": BRIDGEID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -128,7 +132,7 @@ async def test_flow_manual_configuration_decision( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGE_ID + assert result["title"] == BRIDGEID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -170,7 +174,7 @@ async def test_flow_manual_configuration( aioclient_mock.get( f"http://1.2.3.4:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGE_ID}, + json={"bridgeid": BRIDGEID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -179,7 +183,7 @@ async def test_flow_manual_configuration( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGE_ID + assert result["title"] == BRIDGEID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -218,11 +222,11 @@ async def test_manual_configuration_after_discovery_ResponseError( async def test_manual_configuration_update_configuration( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that manual configuration can update existing config entry.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + aioclient_mock.get( pydeconz.utils.URL_DISCOVER, json=[], @@ -252,7 +256,7 @@ async def test_manual_configuration_update_configuration( aioclient_mock.get( f"http://2.3.4.5:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGE_ID}, + json={"bridgeid": BRIDGEID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -262,14 +266,15 @@ async def test_manual_configuration_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" + assert config_entry.data[CONF_HOST] == "2.3.4.5" -@pytest.mark.usefixtures("config_entry_setup") async def test_manual_configuration_dont_update_configuration( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that _create_entry work and that bridgeid can be requested.""" + await setup_deconz_integration(hass, aioclient_mock) + aioclient_mock.get( pydeconz.utils.URL_DISCOVER, json=[], @@ -299,7 +304,7 @@ async def test_manual_configuration_dont_update_configuration( aioclient_mock.get( f"http://1.2.3.4:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGE_ID}, + json={"bridgeid": BRIDGEID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -362,15 +367,12 @@ async def test_manual_configuration_timeout_get_bridge( ], ) async def test_link_step_fails( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - raised_error: Exception, - error_string: str, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, raised_error, error_string ) -> None: """Test config flow should abort if no API key was possible to retrieve.""" aioclient_mock.get( pydeconz.utils.URL_DISCOVER, - json=[{"id": BRIDGE_ID, "internalipaddress": "1.2.3.4", "internalport": 80}], + json=[{"id": BRIDGEID, "internalipaddress": "1.2.3.4", "internalport": 80}], headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -397,12 +399,17 @@ async def test_link_step_fails( async def test_reauth_flow_update_configuration( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Verify reauth flow can update gateway API key.""" - result = await config_entry_setup.start_reauth_flow(hass) + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + result = await hass.config_entries.flow.async_init( + DECONZ_DOMAIN, + data=config_entry.data, + context={"source": SOURCE_REAUTH}, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "link" @@ -416,7 +423,7 @@ async def test_reauth_flow_update_configuration( aioclient_mock.get( f"http://1.2.3.4:80/api/{new_api_key}/config", - json={"bridgeid": BRIDGE_ID}, + json={"bridgeid": BRIDGEID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -426,7 +433,7 @@ async def test_reauth_flow_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data[CONF_API_KEY] == new_api_key + assert config_entry.data[CONF_API_KEY] == new_api_key async def test_flow_ssdp_discovery( @@ -441,7 +448,7 @@ async def test_flow_ssdp_discovery( ssdp_location="http://1.2.3.4:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGE_ID, + ATTR_UPNP_SERIAL: BRIDGEID, }, ), context={"source": SOURCE_SSDP}, @@ -465,7 +472,7 @@ async def test_flow_ssdp_discovery( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGE_ID + assert result["title"] == BRIDGEID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -474,9 +481,11 @@ async def test_flow_ssdp_discovery( async def test_ssdp_discovery_update_configuration( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test if a discovered bridge is configured but updates with new attributes.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + with patch( "homeassistant.components.deconz.async_setup_entry", return_value=True, @@ -489,7 +498,7 @@ async def test_ssdp_discovery_update_configuration( ssdp_location="http://2.3.4.5:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGE_ID, + ATTR_UPNP_SERIAL: BRIDGEID, }, ), context={"source": SOURCE_SSDP}, @@ -498,14 +507,15 @@ async def test_ssdp_discovery_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" + assert config_entry.data[CONF_HOST] == "2.3.4.5" assert len(mock_setup_entry.mock_calls) == 1 async def test_ssdp_discovery_dont_update_configuration( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test if a discovered bridge has already been configured.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, @@ -515,7 +525,7 @@ async def test_ssdp_discovery_dont_update_configuration( ssdp_location="http://1.2.3.4:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGE_ID, + ATTR_UPNP_SERIAL: BRIDGEID, }, ), context={"source": SOURCE_SSDP}, @@ -523,14 +533,17 @@ async def test_ssdp_discovery_dont_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" + assert config_entry.data[CONF_HOST] == "1.2.3.4" -@pytest.mark.parametrize("config_entry_source", [SOURCE_HASSIO]) async def test_ssdp_discovery_dont_update_existing_hassio_configuration( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test to ensure the SSDP discovery does not update an Hass.io entry.""" + config_entry = await setup_deconz_integration( + hass, aioclient_mock, source=SOURCE_HASSIO + ) + result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, data=ssdp.SsdpServiceInfo( @@ -539,7 +552,7 @@ async def test_ssdp_discovery_dont_update_existing_hassio_configuration( ssdp_location="http://1.2.3.4:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGE_ID, + ATTR_UPNP_SERIAL: BRIDGEID, }, ), context={"source": SOURCE_SSDP}, @@ -547,7 +560,7 @@ async def test_ssdp_discovery_dont_update_existing_hassio_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" + assert config_entry.data[CONF_HOST] == "1.2.3.4" async def test_flow_hassio_discovery(hass: HomeAssistant) -> None: @@ -559,7 +572,7 @@ async def test_flow_hassio_discovery(hass: HomeAssistant) -> None: "addon": "Mock Addon", CONF_HOST: "mock-deconz", CONF_PORT: 80, - CONF_SERIAL: BRIDGE_ID, + CONF_SERIAL: BRIDGEID, CONF_API_KEY: API_KEY, }, name="Mock Addon", @@ -597,10 +610,11 @@ async def test_flow_hassio_discovery(hass: HomeAssistant) -> None: async def test_hassio_discovery_update_configuration( - hass: HomeAssistant, - config_entry_setup: MockConfigEntry, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test we can update an existing config entry.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + with patch( "homeassistant.components.deconz.async_setup_entry", return_value=True, @@ -612,7 +626,7 @@ async def test_hassio_discovery_update_configuration( CONF_HOST: "2.3.4.5", CONF_PORT: 8080, CONF_API_KEY: "updated", - CONF_SERIAL: BRIDGE_ID, + CONF_SERIAL: BRIDGEID, }, name="Mock Addon", slug="deconz", @@ -624,15 +638,18 @@ async def test_hassio_discovery_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" - assert config_entry_setup.data[CONF_PORT] == 8080 - assert config_entry_setup.data[CONF_API_KEY] == "updated" + assert config_entry.data[CONF_HOST] == "2.3.4.5" + assert config_entry.data[CONF_PORT] == 8080 + assert config_entry.data[CONF_API_KEY] == "updated" assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("config_entry_setup") -async def test_hassio_discovery_dont_update_configuration(hass: HomeAssistant) -> None: +async def test_hassio_discovery_dont_update_configuration( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Test we can update an existing config entry.""" + await setup_deconz_integration(hass, aioclient_mock) + result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, data=HassioServiceInfo( @@ -640,7 +657,7 @@ async def test_hassio_discovery_dont_update_configuration(hass: HomeAssistant) - CONF_HOST: "1.2.3.4", CONF_PORT: 80, CONF_API_KEY: API_KEY, - CONF_SERIAL: BRIDGE_ID, + CONF_SERIAL: BRIDGEID, }, name="Mock Addon", slug="deconz", @@ -654,10 +671,12 @@ async def test_hassio_discovery_dont_update_configuration(hass: HomeAssistant) - async def test_option_flow( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test config flow options.""" - result = await hass.config_entries.options.async_init(config_entry_setup.entry_id) + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "deconz_devices" diff --git a/tests/components/deconz/test_cover.py b/tests/components/deconz/test_cover.py index 47f8083798e..69452c3285e 100644 --- a/tests/components/deconz/test_cover.py +++ b/tests/components/deconz/test_cover.py @@ -1,13 +1,10 @@ """deCONZ cover platform tests.""" -from collections.abc import Callable from unittest.mock import patch -import pytest -from syrupy import SnapshotAssertion - from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, + ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, @@ -19,61 +16,81 @@ from homeassistant.components.cover import ( SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - CoverState, ) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_CLOSED, + STATE_OPEN, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .conftest import ConfigEntryFactoryType, WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) -from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize( - "light_payload", - [ - { - "0": { +async def test_no_covers( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no cover entities are created.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + +async def test_cover( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket +) -> None: + """Test that all supported cover entities are created.""" + data = { + "lights": { + "1": { "name": "Window covering device", "type": "Window covering device", "state": {"lift": 100, "open": False, "reachable": True}, "modelid": "lumi.curtain", "uniqueid": "00:00:00:00:00:00:00:01-00", }, - "1": { + "2": { "name": "Unsupported cover", "type": "Not a cover", "state": {"reachable": True}, "uniqueid": "00:00:00:00:00:00:00:02-00", }, } - ], -) -async def test_cover( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - light_ws_data: WebsocketDataType, - snapshot: SnapshotAssertion, -) -> None: - """Test that all supported cover entities are created.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.COVER]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 2 + cover = hass.states.get("cover.window_covering_device") + assert cover.state == STATE_CLOSED + assert cover.attributes[ATTR_CURRENT_POSITION] == 0 + assert not hass.states.get("cover.unsupported_cover") # Event signals cover is open - await light_ws_data({"state": {"lift": 0, "open": True}}) + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"lift": 0, "open": True}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + cover = hass.states.get("cover.window_covering_device") - assert cover.state == CoverState.OPEN + assert cover.state == STATE_OPEN assert cover.attributes[ATTR_CURRENT_POSITION] == 100 # Verify service calls for cover - aioclient_mock = mock_put_request("/lights/0/state") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") # Service open cover @@ -115,46 +132,56 @@ async def test_cover( ) assert aioclient_mock.mock_calls[4][2] == {"stop": True} + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(states) == 2 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + -@pytest.mark.parametrize( - "light_payload", - [ - { - "etag": "87269755b9b3a046485fdae8d96b252c", - "lastannounced": None, - "lastseen": "2020-08-01T16:22:05Z", - "manufacturername": "AXIS", - "modelid": "Gear", - "name": "Covering device", - "state": { - "bri": 0, - "lift": 0, - "on": False, - "open": True, - "reachable": True, - "tilt": 0, - }, - "swversion": "100-5.3.5.1122", - "type": "Window covering device", - "uniqueid": "00:24:46:00:00:12:34:56-01", - } - ], -) async def test_tilt_cover( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - snapshot: SnapshotAssertion, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that tilting a cover works.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.COVER]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + data = { + "lights": { + "0": { + "etag": "87269755b9b3a046485fdae8d96b252c", + "lastannounced": None, + "lastseen": "2020-08-01T16:22:05Z", + "manufacturername": "AXIS", + "modelid": "Gear", + "name": "Covering device", + "state": { + "bri": 0, + "lift": 0, + "on": False, + "open": True, + "reachable": True, + "tilt": 0, + }, + "swversion": "100-5.3.5.1122", + "type": "Window covering device", + "uniqueid": "00:24:46:00:00:12:34:56-01", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 1 + covering_device = hass.states.get("cover.covering_device") + assert covering_device.state == STATE_OPEN + assert covering_device.attributes[ATTR_CURRENT_TILT_POSITION] == 100 # Verify service calls for tilting cover - aioclient_mock = mock_put_request("/lights/0/state") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") # Service set tilt cover @@ -197,45 +224,44 @@ async def test_tilt_cover( assert aioclient_mock.mock_calls[4][2] == {"stop": True} -@pytest.mark.parametrize( - "light_payload", - [ - { - "etag": "4cefc909134c8e99086b55273c2bde67", - "hascolor": False, - "lastannounced": "2022-08-08T12:06:18Z", - "lastseen": "2022-08-14T14:22Z", - "manufacturername": "Keen Home Inc", - "modelid": "SV01-410-MP-1.0", - "name": "Vent", - "state": { - "alert": "none", - "bri": 242, - "on": False, - "reachable": True, - "sat": 10, - }, - "swversion": "0x00000012", - "type": "Level controllable output", - "uniqueid": "00:22:a3:00:00:00:00:00-01", - } - ], -) async def test_level_controllable_output_cover( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - snapshot: SnapshotAssertion, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that tilting a cover works.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.COVER]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + data = { + "lights": { + "0": { + "etag": "4cefc909134c8e99086b55273c2bde67", + "hascolor": False, + "lastannounced": "2022-08-08T12:06:18Z", + "lastseen": "2022-08-14T14:22Z", + "manufacturername": "Keen Home Inc", + "modelid": "SV01-410-MP-1.0", + "name": "Vent", + "state": { + "alert": "none", + "bri": 242, + "on": False, + "reachable": True, + "sat": 10, + }, + "swversion": "0x00000012", + "type": "Level controllable output", + "uniqueid": "00:22:a3:00:00:00:00:00-01", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 1 + covering_device = hass.states.get("cover.vent") + assert covering_device.state == STATE_OPEN + assert covering_device.attributes[ATTR_CURRENT_TILT_POSITION] == 97 # Verify service calls for tilting cover - aioclient_mock = mock_put_request("/lights/0/state") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") # Service open cover diff --git a/tests/components/deconz/test_deconz_event.py b/tests/components/deconz/test_deconz_event.py index 8bf7bb146d1..1193f348e38 100644 --- a/tests/components/deconz/test_deconz_event.py +++ b/tests/components/deconz/test_deconz_event.py @@ -1,11 +1,12 @@ """Test deCONZ remote events.""" +from unittest.mock import patch + from pydeconz.models.sensor.ancillary_control import ( AncillaryControlAction, AncillaryControlPanel, ) from pydeconz.models.sensor.presence import PresenceStatePresenceEvent -import pytest from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN from homeassistant.components.deconz.deconz_event import ( @@ -17,19 +18,31 @@ from homeassistant.components.deconz.deconz_event import ( CONF_DECONZ_RELATIVE_ROTARY_EVENT, RELATIVE_ROTARY_DECONZ_TO_EVENT, ) -from homeassistant.const import CONF_DEVICE_ID, CONF_EVENT, CONF_ID, CONF_UNIQUE_ID +from homeassistant.const import ( + CONF_DEVICE_ID, + CONF_EVENT, + CONF_ID, + CONF_UNIQUE_ID, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .conftest import WebsocketDataType +from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration -from tests.common import MockConfigEntry, async_capture_events +from tests.common import async_capture_events +from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize( - "sensor_payload", - [ - { +async def test_deconz_events( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + aioclient_mock: AiohttpClientMocker, + mock_deconz_websocket, +) -> None: + """Test successful creation of deconz events.""" + data = { + "sensors": { "1": { "name": "Switch 1", "type": "ZHASwitch", @@ -66,23 +79,14 @@ from tests.common import MockConfigEntry, async_capture_events "uniqueid": "00:00:00:00:00:00:00:05-00", }, } - ], -) -async def test_deconz_events( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - config_entry_setup: MockConfigEntry, - sensor_ws_data: WebsocketDataType, -) -> None: - """Test successful creation of deconz events.""" + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 3 # 5 switches + 2 additional devices for deconz service and host assert ( - len( - dr.async_entries_for_config_entry( - device_registry, config_entry_setup.entry_id - ) - ) + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) == 7 ) assert hass.states.get("sensor.switch_2_battery").state == "100" @@ -91,7 +95,15 @@ async def test_deconz_events( captured_events = async_capture_events(hass, CONF_DECONZ_EVENT) - await sensor_ws_data({"id": "1", "state": {"buttonevent": 2000}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"buttonevent": 2000}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -105,7 +117,15 @@ async def test_deconz_events( "device_id": device.id, } - await sensor_ws_data({"id": "3", "state": {"buttonevent": 2000}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "3", + "state": {"buttonevent": 2000}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:03")} @@ -120,7 +140,15 @@ async def test_deconz_events( "device_id": device.id, } - await sensor_ws_data({"id": "4", "state": {"gesture": 0}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "4", + "state": {"gesture": 0}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:04")} @@ -136,10 +164,14 @@ async def test_deconz_events( } event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", "id": "5", "state": {"buttonevent": 6002, "angle": 110, "xy": [0.5982, 0.3897]}, } - await sensor_ws_data(event_changed_sensor) + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:05")} @@ -157,14 +189,39 @@ async def test_deconz_events( # Unsupported event - await sensor_ws_data({"id": "1", "name": "other name"}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "name": "other name", + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert len(captured_events) == 4 + await hass.config_entries.async_unload(config_entry.entry_id) -@pytest.mark.parametrize( - "alarm_system_payload", - [ - { + states = hass.states.async_all() + assert len(hass.states.async_all()) == 3 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + +async def test_deconz_alarm_events( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + aioclient_mock: AiohttpClientMocker, + mock_deconz_websocket, +) -> None: + """Test successful creation of deconz alarm events.""" + data = { + "alarmsystems": { "0": { "name": "default", "config": { @@ -191,55 +248,43 @@ async def test_deconz_events( }, }, } - } - ], -) -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 95, - "enrolled": 1, - "on": True, - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "5aaa1c6bae8501f59929539c6e8f44d6", - "lastseen": "2021-07-25T18:07Z", - "manufacturername": "lk", - "modelid": "ZB-KeypadGeneric-D0002", - "name": "Keypad", - "state": { - "action": "invalid_code", - "lastupdated": "2021-07-25T18:02:51.172", - "lowbattery": False, - "panel": "exit_delay", - "seconds_remaining": 55, - "tampered": False, - }, - "swversion": "3.13", - "type": "ZHAAncillaryControl", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - ], -) -async def test_deconz_alarm_events( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - config_entry_setup: MockConfigEntry, - sensor_ws_data: WebsocketDataType, -) -> None: - """Test successful creation of deconz alarm events.""" + }, + "sensors": { + "1": { + "config": { + "battery": 95, + "enrolled": 1, + "on": True, + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "5aaa1c6bae8501f59929539c6e8f44d6", + "lastseen": "2021-07-25T18:07Z", + "manufacturername": "lk", + "modelid": "ZB-KeypadGeneric-D0002", + "name": "Keypad", + "state": { + "action": "invalid_code", + "lastupdated": "2021-07-25T18:02:51.172", + "lowbattery": False, + "panel": "exit_delay", + "seconds_remaining": 55, + "tampered": False, + }, + "swversion": "3.13", + "type": "ZHAAncillaryControl", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + }, + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 4 # 1 alarm control device + 2 additional devices for deconz service and host assert ( - len( - dr.async_entries_for_config_entry( - device_registry, config_entry_setup.entry_id - ) - ) + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) == 3 ) @@ -247,7 +292,15 @@ async def test_deconz_alarm_events( # Emergency event - await sensor_ws_data({"state": {"action": AncillaryControlAction.EMERGENCY}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"action": AncillaryControlAction.EMERGENCY}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -263,7 +316,15 @@ async def test_deconz_alarm_events( # Fire event - await sensor_ws_data({"state": {"action": AncillaryControlAction.FIRE}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"action": AncillaryControlAction.FIRE}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -279,7 +340,15 @@ async def test_deconz_alarm_events( # Invalid code event - await sensor_ws_data({"state": {"action": AncillaryControlAction.INVALID_CODE}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"action": AncillaryControlAction.INVALID_CODE}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -295,7 +364,15 @@ async def test_deconz_alarm_events( # Panic event - await sensor_ws_data({"state": {"action": AncillaryControlAction.PANIC}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"action": AncillaryControlAction.PANIC}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -311,57 +388,84 @@ async def test_deconz_alarm_events( # Only care for changes to specific action events - await sensor_ws_data({"state": {"action": AncillaryControlAction.ARMED_AWAY}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"action": AncillaryControlAction.ARMED_AWAY}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert len(captured_events) == 4 # Only care for action events - await sensor_ws_data({"state": {"panel": AncillaryControlPanel.ARMED_AWAY}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"panel": AncillaryControlPanel.ARMED_AWAY}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert len(captured_events) == 4 + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(hass.states.async_all()) == 4 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", - } - ], -) async def test_deconz_presence_events( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - config_entry_setup: MockConfigEntry, - sensor_ws_data: WebsocketDataType, + aioclient_mock: AiohttpClientMocker, + mock_deconz_websocket, ) -> None: """Test successful creation of deconz presence events.""" + data = { + "sensors": { + "1": { + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 5 assert ( - len( - dr.async_entries_for_config_entry( - device_registry, config_entry_setup.entry_id - ) - ) + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) == 3 ) @@ -381,7 +485,15 @@ async def test_deconz_presence_events( PresenceStatePresenceEvent.LEFT_LEAVE, PresenceStatePresenceEvent.RIGHT_LEAVE, ): - await sensor_ws_data({"state": {"presenceevent": presence_event}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"presenceevent": presence_event}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() assert len(captured_events) == 1 assert captured_events[0].data == { @@ -394,51 +506,69 @@ async def test_deconz_presence_events( # Unsupported presence event - await sensor_ws_data({"state": {"presenceevent": PresenceStatePresenceEvent.NINE}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"presenceevent": PresenceStatePresenceEvent.NINE}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert len(captured_events) == 0 + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(hass.states.async_all()) == 5 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 100, - "on": True, - "reachable": True, - }, - "etag": "463728970bdb7d04048fc4373654f45a", - "lastannounced": "2022-07-03T13:57:59Z", - "lastseen": "2022-07-03T14:02Z", - "manufacturername": "Signify Netherlands B.V.", - "modelid": "RDM002", - "name": "RDM002 44", - "state": { - "expectedeventduration": 400, - "expectedrotation": 75, - "lastupdated": "2022-07-03T11:37:49.586", - "rotaryevent": 2, - }, - "swversion": "2.59.19", - "type": "ZHARelativeRotary", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-14-fc00", - } - ], -) async def test_deconz_relative_rotary_events( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - config_entry_setup: MockConfigEntry, - sensor_ws_data: WebsocketDataType, + aioclient_mock: AiohttpClientMocker, + mock_deconz_websocket, ) -> None: """Test successful creation of deconz relative rotary events.""" + data = { + "sensors": { + "1": { + "config": { + "battery": 100, + "on": True, + "reachable": True, + }, + "etag": "463728970bdb7d04048fc4373654f45a", + "lastannounced": "2022-07-03T13:57:59Z", + "lastseen": "2022-07-03T14:02Z", + "manufacturername": "Signify Netherlands B.V.", + "modelid": "RDM002", + "name": "RDM002 44", + "state": { + "expectedeventduration": 400, + "expectedrotation": 75, + "lastupdated": "2022-07-03T11:37:49.586", + "rotaryevent": 2, + }, + "swversion": "2.59.19", + "type": "ZHARelativeRotary", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-14-fc00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 1 assert ( - len( - dr.async_entries_for_config_entry( - device_registry, config_entry_setup.entry_id - ) - ) + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) == 3 ) @@ -450,13 +580,18 @@ async def test_deconz_relative_rotary_events( for rotary_event, duration, rotation in ((1, 100, 50), (2, 200, -50)): event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", "state": { "rotaryevent": rotary_event, "expectedeventduration": duration, "expectedrotation": rotation, - } + }, } - await sensor_ws_data(event_changed_sensor) + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() assert len(captured_events) == 1 assert captured_events[0].data == { @@ -471,14 +606,38 @@ async def test_deconz_relative_rotary_events( # Unsupported relative rotary event - await sensor_ws_data({"name": "123"}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "name": "123", + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert len(captured_events) == 0 + await hass.config_entries.async_unload(config_entry.entry_id) -@pytest.mark.parametrize( - "sensor_payload", - [ - { + states = hass.states.async_all() + assert len(hass.states.async_all()) == 1 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + +async def test_deconz_events_bad_unique_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Verify no devices are created if unique id is bad or missing.""" + data = { + "sensors": { "1": { "name": "Switch 1 no unique id", "type": "ZHASwitch", @@ -493,20 +652,12 @@ async def test_deconz_relative_rotary_events( "uniqueid": "00:00-00", }, } - ], -) -async def test_deconz_events_bad_unique_id( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - config_entry_setup: MockConfigEntry, -) -> None: - """Verify no devices are created if unique id is bad or missing.""" + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 1 assert ( - len( - dr.async_entries_for_config_entry( - device_registry, config_entry_setup.entry_id - ) - ) + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) == 2 ) diff --git a/tests/components/deconz/test_device_trigger.py b/tests/components/deconz/test_device_trigger.py index 1502cc4081d..54b735ba021 100644 --- a/tests/components/deconz/test_device_trigger.py +++ b/tests/components/deconz/test_device_trigger.py @@ -1,14 +1,12 @@ """deCONZ device automation tests.""" -from unittest.mock import Mock +from unittest.mock import Mock, patch import pytest from pytest_unordered import unordered from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN - -# pylint: disable-next=hass-component-root-import from homeassistant.components.binary_sensor.device_trigger import ( CONF_BAT_LOW, CONF_NOT_BAT_LOW, @@ -34,9 +32,10 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.trigger import async_initialize_triggers from homeassistant.setup import async_setup_component -from .conftest import WebsocketDataType +from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import async_get_device_automations, async_mock_service +from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -44,37 +43,45 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "alert": "none", - "battery": 60, - "group": "10", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "1b355c0b6d2af28febd7ca9165881952", - "manufacturername": "IKEA of Sweden", - "mode": 1, - "modelid": "TRADFRI on/off switch", - "name": "TRÅDFRI on/off switch ", - "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, - "swversion": "1.4.018", - "type": "ZHASwitch", - "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") +@pytest.fixture +def automation_calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track automation calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test triggers work.""" + data = { + "sensors": { + "1": { + "config": { + "alert": "none", + "battery": 60, + "group": "10", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "1b355c0b6d2af28febd7ca9165881952", + "manufacturername": "IKEA of Sweden", + "mode": 1, + "modelid": "TRADFRI on/off switch", + "name": "TRÅDFRI on/off switch ", + "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, + "swversion": "1.4.018", + "type": "ZHASwitch", + "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")} ) @@ -148,44 +155,46 @@ async def test_get_triggers( assert triggers == unordered(expected_triggers) -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 95, - "enrolled": 1, - "on": True, - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "5aaa1c6bae8501f59929539c6e8f44d6", - "lastseen": "2021-07-25T18:07Z", - "manufacturername": "lk", - "modelid": "ZB-KeypadGeneric-D0002", - "name": "Keypad", - "state": { - "action": "armed_stay", - "lastupdated": "2021-07-25T18:02:51.172", - "lowbattery": False, - "panel": "exit_delay", - "seconds_remaining": 55, - "tampered": False, - }, - "swversion": "3.13", - "type": "ZHAAncillaryControl", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_get_triggers_for_alarm_event( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test triggers work.""" + data = { + "sensors": { + "1": { + "config": { + "battery": 95, + "enrolled": 1, + "on": True, + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "5aaa1c6bae8501f59929539c6e8f44d6", + "lastseen": "2021-07-25T18:07Z", + "manufacturername": "lk", + "modelid": "ZB-KeypadGeneric-D0002", + "name": "Keypad", + "state": { + "action": "armed_stay", + "lastupdated": "2021-07-25T18:02:51.172", + "lowbattery": False, + "panel": "exit_delay", + "seconds_remaining": 55, + "tampered": False, + }, + "swversion": "3.13", + "type": "ZHAAncillaryControl", + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:00")} ) @@ -243,34 +252,37 @@ async def test_get_triggers_for_alarm_event( assert triggers == unordered(expected_triggers) -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "alert": "none", - "group": "10", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "1b355c0b6d2af28febd7ca9165881952", - "manufacturername": "IKEA of Sweden", - "mode": 1, - "modelid": "Unsupported model", - "name": "TRÅDFRI on/off switch ", - "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, - "swversion": "1.4.018", - "type": "ZHASwitch", - "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_get_triggers_manage_unsupported_remotes( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, ) -> None: """Verify no triggers for an unsupported remote.""" + data = { + "sensors": { + "1": { + "config": { + "alert": "none", + "group": "10", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "1b355c0b6d2af28febd7ca9165881952", + "manufacturername": "IKEA of Sweden", + "mode": 1, + "modelid": "Unsupported model", + "name": "TRÅDFRI on/off switch ", + "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, + "swversion": "1.4.018", + "type": "ZHASwitch", + "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")} ) @@ -284,38 +296,41 @@ async def test_get_triggers_manage_unsupported_remotes( assert triggers == unordered(expected_triggers) -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "alert": "none", - "battery": 60, - "group": "10", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "1b355c0b6d2af28febd7ca9165881952", - "manufacturername": "IKEA of Sweden", - "mode": 1, - "modelid": "TRADFRI on/off switch", - "name": "TRÅDFRI on/off switch ", - "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, - "swversion": "1.4.018", - "type": "ZHASwitch", - "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_functional_device_trigger( hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + mock_deconz_websocket, + automation_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], - sensor_ws_data: WebsocketDataType, ) -> None: """Test proper matching and attachment of device trigger automation.""" + + data = { + "sensors": { + "1": { + "config": { + "alert": "none", + "battery": 60, + "group": "10", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "1b355c0b6d2af28febd7ca9165881952", + "manufacturername": "IKEA of Sweden", + "mode": 1, + "modelid": "TRADFRI on/off switch", + "name": "TRÅDFRI on/off switch ", + "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, + "swversion": "1.4.018", + "type": "ZHASwitch", + "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")} ) @@ -344,16 +359,27 @@ async def test_functional_device_trigger( assert len(hass.states.async_entity_ids(AUTOMATION_DOMAIN)) == 1 - await sensor_ws_data({"state": {"buttonevent": 1002}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"buttonevent": 1002}, + } + await mock_deconz_websocket(data=event_changed_sensor) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_button_press" + + assert len(automation_calls) == 1 + assert automation_calls[0].data["some"] == "test_trigger_button_press" @pytest.mark.skip(reason="Temporarily disabled until automation validation is improved") -@pytest.mark.usefixtures("config_entry_setup") -async def test_validate_trigger_unknown_device(hass: HomeAssistant) -> None: +async def test_validate_trigger_unknown_device( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Test unknown device does not return a trigger config.""" + await setup_deconz_integration(hass, aioclient_mock) + assert await async_setup_component( hass, AUTOMATION_DOMAIN, @@ -382,12 +408,14 @@ async def test_validate_trigger_unknown_device(hass: HomeAssistant) -> None: async def test_validate_trigger_unsupported_device( hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - config_entry_setup: MockConfigEntry, ) -> None: """Test unsupported device doesn't return a trigger config.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + device = device_registry.async_get_or_create( - config_entry_id=config_entry_setup.entry_id, + config_entry_id=config_entry.entry_id, identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")}, model="unsupported", ) @@ -422,12 +450,14 @@ async def test_validate_trigger_unsupported_device( async def test_validate_trigger_unsupported_trigger( hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - config_entry_setup: MockConfigEntry, ) -> None: """Test unsupported trigger does not return a trigger config.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + device = device_registry.async_get_or_create( - config_entry_id=config_entry_setup.entry_id, + config_entry_id=config_entry.entry_id, identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")}, model="TRADFRI on/off switch", ) @@ -464,12 +494,14 @@ async def test_validate_trigger_unsupported_trigger( async def test_attach_trigger_no_matching_event( hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - config_entry_setup: MockConfigEntry, ) -> None: """Test no matching event for device doesn't return a trigger config.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + device = device_registry.async_get_or_create( - config_entry_id=config_entry_setup.entry_id, + config_entry_id=config_entry.entry_id, identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")}, name="Tradfri switch", model="TRADFRI on/off switch", diff --git a/tests/components/deconz/test_diagnostics.py b/tests/components/deconz/test_diagnostics.py index 2abc6d83995..bfbc27b206d 100644 --- a/tests/components/deconz/test_diagnostics.py +++ b/tests/components/deconz/test_diagnostics.py @@ -2,28 +2,30 @@ from pydeconz.websocket import State from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant -from .conftest import WebsocketStateType +from .test_gateway import setup_deconz_integration -from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry_setup: MockConfigEntry, - mock_websocket_state: WebsocketStateType, + aioclient_mock: AiohttpClientMocker, + mock_deconz_websocket, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - await mock_websocket_state(State.RUNNING) + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + await mock_deconz_websocket(state=State.RUNNING) await hass.async_block_till_done() - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry_setup - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/deconz/test_fan.py b/tests/components/deconz/test_fan.py index 21809a138c6..5da0398c3e6 100644 --- a/tests/components/deconz/test_fan.py +++ b/tests/components/deconz/test_fan.py @@ -1,10 +1,9 @@ """deCONZ fan platform tests.""" -from collections.abc import Callable from unittest.mock import patch import pytest -from syrupy import SnapshotAssertion +from voluptuous.error import MultipleInvalid from homeassistant.components.fan import ( ATTR_PERCENTAGE, @@ -13,67 +12,129 @@ from homeassistant.components.fan import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .conftest import ConfigEntryFactoryType, WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) -from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize( - "light_payload", - [ - { - "etag": "432f3de28965052961a99e3c5494daf4", - "hascolor": False, - "manufacturername": "King Of Fans, Inc.", - "modelid": "HDC52EastwindFan", - "name": "Ceiling fan", - "state": { - "alert": "none", - "bri": 254, - "on": False, - "reachable": True, - "speed": 4, - }, - "swversion": "0000000F", - "type": "Fan", - "uniqueid": "00:22:a3:00:00:27:8b:81-01", - } - ], -) +async def test_no_fans( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no fan entities are created.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + async def test_fans( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - aioclient_mock: AiohttpClientMocker, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - light_ws_data: WebsocketDataType, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test that all supported fan entities are created.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.FAN]): - config_entry = await config_entry_factory() + data = { + "lights": { + "1": { + "etag": "432f3de28965052961a99e3c5494daf4", + "hascolor": False, + "manufacturername": "King Of Fans, Inc.", + "modelid": "HDC52EastwindFan", + "name": "Ceiling fan", + "state": { + "alert": "none", + "bri": 254, + "on": False, + "reachable": True, + "speed": 4, + }, + "swversion": "0000000F", + "type": "Fan", + "uniqueid": "00:22:a3:00:00:27:8b:81-01", + } + } + } - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 2 # Light and fan + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 100 # Test states - for speed, percent in (1, 25), (2, 50), (3, 75), (4, 100): - await light_ws_data({"state": {"speed": speed}}) - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == percent + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 1}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 25 + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 2}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 50 + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 3}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 75 + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 4}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 100 + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 0}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() - await light_ws_data({"state": {"speed": 0}}) assert hass.states.get("fan.ceiling_fan").state == STATE_OFF assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 0 # Test service calls - aioclient_mock = mock_put_request("/lights/0/state") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") # Service turn on fan using saved default_on_speed @@ -105,20 +166,323 @@ async def test_fans( ) assert aioclient_mock.mock_calls[3][2] == {"speed": 1} - # Service set fan percentage + # Service set fan percentage to 20% - for percent, speed in (20, 1), (40, 2), (60, 3), (80, 4), (0, 0): - aioclient_mock.mock_calls.clear() - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: percent}, - blocking=True, - ) - assert aioclient_mock.mock_calls[0][2] == {"speed": speed} + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 20}, + blocking=True, + ) + assert aioclient_mock.mock_calls[4][2] == {"speed": 1} + + # Service set fan percentage to 40% + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 40}, + blocking=True, + ) + assert aioclient_mock.mock_calls[5][2] == {"speed": 2} + + # Service set fan percentage to 60% + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 60}, + blocking=True, + ) + assert aioclient_mock.mock_calls[6][2] == {"speed": 3} + + # Service set fan percentage to 80% + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 80}, + blocking=True, + ) + assert aioclient_mock.mock_calls[7][2] == {"speed": 4} + + # Service set fan percentage to 0% does not equal off + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 0}, + blocking=True, + ) + assert aioclient_mock.mock_calls[8][2] == {"speed": 0} # Events with an unsupported speed does not get converted - await light_ws_data({"state": {"speed": 5}}) + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 5}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + assert hass.states.get("fan.ceiling_fan").state == STATE_ON assert not hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] + + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(states) == 2 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + +async def test_fans_legacy_speed_modes( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket +) -> None: + """Test that all supported fan entities are created. + + Legacy fan support. + """ + data = { + "lights": { + "1": { + "etag": "432f3de28965052961a99e3c5494daf4", + "hascolor": False, + "manufacturername": "King Of Fans, Inc.", + "modelid": "HDC52EastwindFan", + "name": "Ceiling fan", + "state": { + "alert": "none", + "bri": 254, + "on": False, + "reachable": True, + "speed": 4, + }, + "swversion": "0000000F", + "type": "Fan", + "uniqueid": "00:22:a3:00:00:27:8b:81-01", + } + } + } + + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 2 # Light and fan + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + + # Test states + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 1}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 25 + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 2}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 50 + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 3}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 75 + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 4}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 100 + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 0}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("fan.ceiling_fan").state == STATE_OFF + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 0 + + # Test service calls + + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + + # Service turn on fan using saved default_on_speed + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "fan.ceiling_fan"}, + blocking=True, + ) + assert aioclient_mock.mock_calls[1][2] == {"speed": 4} + + # Service turn on fan with speed_off + # async_turn_on_compat use speed_to_percentage which will return 0 + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 0}, + blocking=True, + ) + assert aioclient_mock.mock_calls[2][2] == {"speed": 0} + + # Service turn on fan with bad speed + # async_turn_on_compat use speed_to_percentage which will convert to SPEED_MEDIUM -> 2 + + with pytest.raises(MultipleInvalid): + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: "bad"}, + blocking=True, + ) + + # Service turn on fan to low speed + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 25}, + blocking=True, + ) + assert aioclient_mock.mock_calls[3][2] == {"speed": 1} + + # Service turn on fan to medium speed + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 50}, + blocking=True, + ) + assert aioclient_mock.mock_calls[4][2] == {"speed": 2} + + # Service turn on fan to high speed + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 100}, + blocking=True, + ) + assert aioclient_mock.mock_calls[5][2] == {"speed": 4} + + # Service set fan speed to low + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 25}, + blocking=True, + ) + assert aioclient_mock.mock_calls[6][2] == {"speed": 1} + + # Service set fan speed to medium + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 50}, + blocking=True, + ) + assert aioclient_mock.mock_calls[7][2] == {"speed": 2} + + # Service set fan speed to high + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 100}, + blocking=True, + ) + assert aioclient_mock.mock_calls[8][2] == {"speed": 4} + + # Service set fan speed to off + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 0}, + blocking=True, + ) + assert aioclient_mock.mock_calls[9][2] == {"speed": 0} + + # Service set fan speed to unsupported value + + with pytest.raises(MultipleInvalid): + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: "bad value"}, + blocking=True, + ) + + # Events with an unsupported speed gets converted to default speed "medium" + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"speed": 3}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 75 + + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(states) == 2 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_gateway.py b/tests/components/deconz/test_gateway.py new file mode 100644 index 00000000000..b00a5cc1f05 --- /dev/null +++ b/tests/components/deconz/test_gateway.py @@ -0,0 +1,319 @@ +"""Test deCONZ gateway.""" + +from copy import deepcopy +from typing import Any +from unittest.mock import patch + +import pydeconz +from pydeconz.websocket import State +import pytest + +from homeassistant.components import ssdp +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, +) +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.components.deconz.config_flow import DECONZ_MANUFACTURERURL +from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN +from homeassistant.components.deconz.errors import AuthenticationRequired, CannotConnect +from homeassistant.components.deconz.hub import DeconzHub, get_deconz_api +from homeassistant.components.fan import DOMAIN as FAN_DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN +from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN +from homeassistant.components.select import DOMAIN as SELECT_DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.siren import DOMAIN as SIREN_DOMAIN +from homeassistant.components.ssdp import ( + ATTR_UPNP_MANUFACTURER_URL, + ATTR_UPNP_SERIAL, + ATTR_UPNP_UDN, +) +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_SSDP, SOURCE_USER +from homeassistant.const import ( + CONF_API_KEY, + CONF_HOST, + CONF_PORT, + CONTENT_TYPE_JSON, + STATE_OFF, + STATE_UNAVAILABLE, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.typing import UNDEFINED, UndefinedType + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker + +API_KEY = "1234567890ABCDEF" +BRIDGEID = "01234E56789A" +HOST = "1.2.3.4" +PORT = 80 + +DEFAULT_URL = f"http://{HOST}:{PORT}/api/{API_KEY}" + +ENTRY_CONFIG = {CONF_API_KEY: API_KEY, CONF_HOST: HOST, CONF_PORT: PORT} + +ENTRY_OPTIONS = {} + +DECONZ_CONFIG = { + "bridgeid": BRIDGEID, + "ipaddress": HOST, + "mac": "00:11:22:33:44:55", + "modelid": "deCONZ", + "name": "deCONZ mock gateway", + "sw_version": "2.05.69", + "uuid": "1234", + "websocketport": 1234, +} + +DECONZ_WEB_REQUEST = { + "config": DECONZ_CONFIG, + "groups": {}, + "lights": {}, + "sensors": {}, +} + + +def mock_deconz_request(aioclient_mock, config, data): + """Mock a deCONZ get request.""" + host = config[CONF_HOST] + port = config[CONF_PORT] + api_key = config[CONF_API_KEY] + + aioclient_mock.get( + f"http://{host}:{port}/api/{api_key}", + json=deepcopy(data), + headers={"content-type": CONTENT_TYPE_JSON}, + ) + + +def mock_deconz_put_request(aioclient_mock, config, path): + """Mock a deCONZ put request.""" + host = config[CONF_HOST] + port = config[CONF_PORT] + api_key = config[CONF_API_KEY] + + aioclient_mock.put( + f"http://{host}:{port}/api/{api_key}{path}", + json={}, + headers={"content-type": CONTENT_TYPE_JSON}, + ) + + +async def setup_deconz_integration( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker | None = None, + *, + options: dict[str, Any] | UndefinedType = UNDEFINED, + entry_id="1", + unique_id=BRIDGEID, + source=SOURCE_USER, +): + """Create the deCONZ gateway.""" + config_entry = MockConfigEntry( + domain=DECONZ_DOMAIN, + source=source, + data=deepcopy(ENTRY_CONFIG), + options=deepcopy(ENTRY_OPTIONS if options is UNDEFINED else options), + entry_id=entry_id, + unique_id=unique_id, + ) + config_entry.add_to_hass(hass) + + if aioclient_mock: + mock_deconz_request(aioclient_mock, ENTRY_CONFIG, DECONZ_WEB_REQUEST) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry + + +async def test_gateway_setup( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, +) -> None: + """Successful setup.""" + # Patching async_forward_entry_setup* is not advisable, and should be refactored + # in the future. + with patch( + "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups", + return_value=True, + ) as forward_entry_setup: + config_entry = await setup_deconz_integration(hass, aioclient_mock) + gateway = DeconzHub.get_hub(hass, config_entry) + assert gateway.bridgeid == BRIDGEID + assert gateway.master is True + assert gateway.config.allow_clip_sensor is False + assert gateway.config.allow_deconz_groups is True + assert gateway.config.allow_new_devices is True + + assert len(gateway.deconz_ids) == 0 + assert len(hass.states.async_all()) == 0 + + assert forward_entry_setup.mock_calls[0][1] == ( + config_entry, + [ + ALARM_CONTROL_PANEL_DOMAIN, + BINARY_SENSOR_DOMAIN, + BUTTON_DOMAIN, + CLIMATE_DOMAIN, + COVER_DOMAIN, + FAN_DOMAIN, + LIGHT_DOMAIN, + LOCK_DOMAIN, + NUMBER_DOMAIN, + SCENE_DOMAIN, + SELECT_DOMAIN, + SENSOR_DOMAIN, + SIREN_DOMAIN, + SWITCH_DOMAIN, + ], + ) + + gateway_entry = device_registry.async_get_device( + identifiers={(DECONZ_DOMAIN, gateway.bridgeid)} + ) + + assert gateway_entry.configuration_url == f"http://{HOST}:{PORT}" + assert gateway_entry.entry_type is dr.DeviceEntryType.SERVICE + + +async def test_gateway_device_configuration_url_when_addon( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, +) -> None: + """Successful setup.""" + # Patching async_forward_entry_setup* is not advisable, and should be refactored + # in the future. + with patch( + "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups", + return_value=True, + ): + config_entry = await setup_deconz_integration( + hass, aioclient_mock, source=SOURCE_HASSIO + ) + gateway = DeconzHub.get_hub(hass, config_entry) + + gateway_entry = device_registry.async_get_device( + identifiers={(DECONZ_DOMAIN, gateway.bridgeid)} + ) + + assert ( + gateway_entry.configuration_url == "homeassistant://hassio/ingress/core_deconz" + ) + + +async def test_connection_status_signalling( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket +) -> None: + """Make sure that connection status triggers a dispatcher send.""" + data = { + "sensors": { + "1": { + "name": "presence", + "type": "ZHAPresence", + "state": {"presence": False}, + "config": {"on": True, "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + + assert hass.states.get("binary_sensor.presence").state == STATE_OFF + + await mock_deconz_websocket(state=State.RETRYING) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.presence").state == STATE_UNAVAILABLE + + await mock_deconz_websocket(state=State.RUNNING) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.presence").state == STATE_OFF + + +async def test_update_address( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Make sure that connection status triggers a dispatcher send.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + gateway = DeconzHub.get_hub(hass, config_entry) + assert gateway.api.host == "1.2.3.4" + + with patch( + "homeassistant.components.deconz.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + await hass.config_entries.flow.async_init( + DECONZ_DOMAIN, + data=ssdp.SsdpServiceInfo( + ssdp_st="mock_st", + ssdp_usn="mock_usn", + ssdp_location="http://2.3.4.5:80/", + upnp={ + ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, + ATTR_UPNP_SERIAL: BRIDGEID, + ATTR_UPNP_UDN: "uuid:456DEF", + }, + ), + context={"source": SOURCE_SSDP}, + ) + await hass.async_block_till_done() + + assert gateway.api.host == "2.3.4.5" + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_reset_after_successful_setup( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Make sure that connection status triggers a dispatcher send.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + gateway = DeconzHub.get_hub(hass, config_entry) + + result = await gateway.async_reset() + await hass.async_block_till_done() + + assert result is True + + +async def test_get_deconz_api(hass: HomeAssistant) -> None: + """Successful call.""" + config_entry = MockConfigEntry(domain=DECONZ_DOMAIN, data=ENTRY_CONFIG) + with patch("pydeconz.DeconzSession.refresh_state", return_value=True): + assert await get_deconz_api(hass, config_entry) + + +@pytest.mark.parametrize( + ("side_effect", "raised_exception"), + [ + (TimeoutError, CannotConnect), + (pydeconz.RequestError, CannotConnect), + (pydeconz.ResponseError, CannotConnect), + (pydeconz.Unauthorized, AuthenticationRequired), + ], +) +async def test_get_deconz_api_fails( + hass: HomeAssistant, side_effect, raised_exception +) -> None: + """Failed call.""" + config_entry = MockConfigEntry(domain=DECONZ_DOMAIN, data=ENTRY_CONFIG) + with ( + patch( + "pydeconz.DeconzSession.refresh_state", + side_effect=side_effect, + ), + pytest.raises(raised_exception), + ): + assert await get_deconz_api(hass, config_entry) diff --git a/tests/components/deconz/test_hub.py b/tests/components/deconz/test_hub.py deleted file mode 100644 index 43c51179337..00000000000 --- a/tests/components/deconz/test_hub.py +++ /dev/null @@ -1,100 +0,0 @@ -"""Test deCONZ gateway.""" - -from unittest.mock import patch - -from pydeconz.websocket import State -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components import ssdp -from homeassistant.components.deconz.config_flow import DECONZ_MANUFACTURERURL -from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN -from homeassistant.components.ssdp import ( - ATTR_UPNP_MANUFACTURER_URL, - ATTR_UPNP_SERIAL, - ATTR_UPNP_UDN, -) -from homeassistant.config_entries import SOURCE_SSDP -from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from .conftest import BRIDGE_ID - -from tests.common import MockConfigEntry - - -async def test_device_registry_entry( - config_entry_setup: MockConfigEntry, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Successful setup.""" - device_entry = device_registry.async_get_device( - identifiers={(DECONZ_DOMAIN, config_entry_setup.unique_id)} - ) - assert device_entry == snapshot - - -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "name": "presence", - "type": "ZHAPresence", - "state": {"presence": False}, - "config": {"on": True, "reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_connection_status_signalling( - hass: HomeAssistant, mock_websocket_state -) -> None: - """Make sure that connection status triggers a dispatcher send.""" - assert hass.states.get("binary_sensor.presence").state == STATE_OFF - - await mock_websocket_state(State.RETRYING) - await hass.async_block_till_done() - - assert hass.states.get("binary_sensor.presence").state == STATE_UNAVAILABLE - - await mock_websocket_state(State.RUNNING) - await hass.async_block_till_done() - - assert hass.states.get("binary_sensor.presence").state == STATE_OFF - - -async def test_update_address( - hass: HomeAssistant, config_entry_setup: MockConfigEntry -) -> None: - """Make sure that connection status triggers a dispatcher send.""" - assert config_entry_setup.data["host"] == "1.2.3.4" - - with ( - patch( - "homeassistant.components.deconz.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch("pydeconz.gateway.WSClient") as ws_mock, - ): - await hass.config_entries.flow.async_init( - DECONZ_DOMAIN, - data=ssdp.SsdpServiceInfo( - ssdp_st="mock_st", - ssdp_usn="mock_usn", - ssdp_location="http://2.3.4.5:80/", - upnp={ - ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGE_ID, - ATTR_UPNP_UDN: "uuid:456DEF", - }, - ), - context={"source": SOURCE_SSDP}, - ) - await hass.async_block_till_done() - - assert ws_mock.call_args[0][1] == "2.3.4.5" - assert config_entry_setup.data["host"] == "2.3.4.5" - assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/deconz/test_init.py b/tests/components/deconz/test_init.py index 390d8b9b353..d08bd039184 100644 --- a/tests/components/deconz/test_init.py +++ b/tests/components/deconz/test_init.py @@ -3,59 +3,64 @@ import asyncio from unittest.mock import patch -import pydeconz -import pytest - -from homeassistant.components.deconz.const import ( - CONF_MASTER_GATEWAY, - DOMAIN as DECONZ_DOMAIN, +from homeassistant.components.deconz import ( + DeconzHub, + async_setup_entry, + async_unload_entry, ) -from homeassistant.components.deconz.errors import AuthenticationRequired -from homeassistant.config_entries import ConfigEntryState +from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN +from homeassistant.components.deconz.errors import AuthenticationRequired, CannotConnect from homeassistant.core import HomeAssistant -from .conftest import ConfigEntryFactoryType +from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration -from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker + +ENTRY1_HOST = "1.2.3.4" +ENTRY1_PORT = 80 +ENTRY1_API_KEY = "1234567890ABCDEF" +ENTRY1_BRIDGEID = "12345ABC" +ENTRY1_UUID = "456DEF" + +ENTRY2_HOST = "2.3.4.5" +ENTRY2_PORT = 80 +ENTRY2_API_KEY = "1234567890ABCDEF" +ENTRY2_BRIDGEID = "23456DEF" +ENTRY2_UUID = "789ACE" -async def test_setup_entry(config_entry_setup: MockConfigEntry) -> None: - """Test successful setup of entry.""" - assert config_entry_setup.state is ConfigEntryState.LOADED - assert config_entry_setup.options[CONF_MASTER_GATEWAY] is True - - -@pytest.mark.parametrize( - ("side_effect", "state"), - [ - # Failed authentication trigger a reauthentication flow - (pydeconz.Unauthorized, ConfigEntryState.SETUP_ERROR), - # Connection fails - (TimeoutError, ConfigEntryState.SETUP_RETRY), - (pydeconz.RequestError, ConfigEntryState.SETUP_RETRY), - (pydeconz.ResponseError, ConfigEntryState.SETUP_RETRY), - ], -) -async def test_get_deconz_api_fails( - hass: HomeAssistant, - config_entry: MockConfigEntry, - side_effect: Exception, - state: ConfigEntryState, -) -> None: - """Failed setup.""" - config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.deconz.hub.api.DeconzSession.refresh_state", - side_effect=side_effect, +async def setup_entry(hass, entry): + """Test that setup entry works.""" + with ( + patch.object(DeconzHub, "async_setup", return_value=True), + patch.object(DeconzHub, "async_update_device_registry", return_value=True), ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is state + assert await async_setup_entry(hass, entry) is True -async def test_setup_entry_fails_trigger_reauth_flow( - hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType +async def test_setup_entry_successful( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: + """Test setup entry is successful.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert hass.data[DECONZ_DOMAIN] + assert config_entry.entry_id in hass.data[DECONZ_DOMAIN] + assert hass.data[DECONZ_DOMAIN][config_entry.entry_id].master + + +async def test_setup_entry_fails_config_entry_not_ready(hass: HomeAssistant) -> None: + """Failed authentication trigger a reauthentication flow.""" + with patch( + "homeassistant.components.deconz.get_deconz_api", + side_effect=CannotConnect, + ): + await setup_deconz_integration(hass) + + assert hass.data[DECONZ_DOMAIN] == {} + + +async def test_setup_entry_fails_trigger_reauth_flow(hass: HomeAssistant) -> None: """Failed authentication trigger a reauthentication flow.""" with ( patch( @@ -64,83 +69,89 @@ async def test_setup_entry_fails_trigger_reauth_flow( ), patch.object(hass.config_entries.flow, "async_init") as mock_flow_init, ): - config_entry = await config_entry_factory() + await setup_deconz_integration(hass) mock_flow_init.assert_called_once() - assert config_entry.state is ConfigEntryState.SETUP_ERROR + + assert hass.data[DECONZ_DOMAIN] == {} async def test_setup_entry_multiple_gateways( - hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup entry is successful with multiple gateways.""" - config_entry = await config_entry_factory() + config_entry = await setup_deconz_integration(hass, aioclient_mock) + aioclient_mock.clear_requests() - entry2 = MockConfigEntry( - domain=DECONZ_DOMAIN, - entry_id="2", - unique_id="01234E56789B", - data=config_entry.data | {"host": "2.3.4.5"}, - ) - config_entry2 = await config_entry_factory(entry2) + data = {"config": {"bridgeid": "01234E56789B"}} + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry2 = await setup_deconz_integration( + hass, + aioclient_mock, + entry_id="2", + unique_id="01234E56789B", + ) - assert config_entry.state is ConfigEntryState.LOADED - assert config_entry2.state is ConfigEntryState.LOADED - assert config_entry.options[CONF_MASTER_GATEWAY] is True - assert config_entry2.options[CONF_MASTER_GATEWAY] is False + assert len(hass.data[DECONZ_DOMAIN]) == 2 + assert hass.data[DECONZ_DOMAIN][config_entry.entry_id].master + assert not hass.data[DECONZ_DOMAIN][config_entry2.entry_id].master async def test_unload_entry( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test being able to unload an entry.""" - assert config_entry_setup.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(config_entry_setup.entry_id) - assert config_entry_setup.state is ConfigEntryState.NOT_LOADED + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert hass.data[DECONZ_DOMAIN] + + assert await async_unload_entry(hass, config_entry) + assert not hass.data[DECONZ_DOMAIN] async def test_unload_entry_multiple_gateways( - hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test being able to unload an entry and master gateway gets moved.""" - config_entry = await config_entry_factory() + config_entry = await setup_deconz_integration(hass, aioclient_mock) + aioclient_mock.clear_requests() - entry2 = MockConfigEntry( - domain=DECONZ_DOMAIN, - entry_id="2", - unique_id="01234E56789B", - data=config_entry.data | {"host": "2.3.4.5"}, - ) - config_entry2 = await config_entry_factory(entry2) + data = {"config": {"bridgeid": "01234E56789B"}} + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry2 = await setup_deconz_integration( + hass, + aioclient_mock, + entry_id="2", + unique_id="01234E56789B", + ) - assert config_entry.state is ConfigEntryState.LOADED - assert config_entry2.state is ConfigEntryState.LOADED + assert len(hass.data[DECONZ_DOMAIN]) == 2 - assert await hass.config_entries.async_unload(config_entry.entry_id) - assert config_entry.state is ConfigEntryState.NOT_LOADED - assert config_entry2.options[CONF_MASTER_GATEWAY] is True + assert await async_unload_entry(hass, config_entry) + + assert len(hass.data[DECONZ_DOMAIN]) == 1 + assert hass.data[DECONZ_DOMAIN][config_entry2.entry_id].master async def test_unload_entry_multiple_gateways_parallel( - hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test race condition when unloading multiple config entries in parallel.""" - config_entry = await config_entry_factory() + config_entry = await setup_deconz_integration(hass, aioclient_mock) + aioclient_mock.clear_requests() - entry2 = MockConfigEntry( - domain=DECONZ_DOMAIN, - entry_id="2", - unique_id="01234E56789B", - data=config_entry.data | {"host": "2.3.4.5"}, - ) - config_entry2 = await config_entry_factory(entry2) + data = {"config": {"bridgeid": "01234E56789B"}} + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry2 = await setup_deconz_integration( + hass, + aioclient_mock, + entry_id="2", + unique_id="01234E56789B", + ) - assert config_entry.state is ConfigEntryState.LOADED - assert config_entry2.state is ConfigEntryState.LOADED + assert len(hass.data[DECONZ_DOMAIN]) == 2 await asyncio.gather( hass.config_entries.async_unload(config_entry.entry_id), hass.config_entries.async_unload(config_entry2.entry_id), ) - assert config_entry.state is ConfigEntryState.NOT_LOADED - assert config_entry2.state is ConfigEntryState.NOT_LOADED + assert len(hass.data[DECONZ_DOMAIN]) == 0 diff --git a/tests/components/deconz/test_light.py b/tests/components/deconz/test_light.py index 8ce83d87b69..d964361df57 100644 --- a/tests/components/deconz/test_light.py +++ b/tests/components/deconz/test_light.py @@ -1,20 +1,22 @@ """deCONZ light platform tests.""" -from collections.abc import Callable -from typing import Any from unittest.mock import patch import pytest -from syrupy import SnapshotAssertion -from homeassistant.components.deconz.const import CONF_ALLOW_DECONZ_GROUPS +from homeassistant.components.deconz.const import ATTR_ON, CONF_ALLOW_DECONZ_GROUPS +from homeassistant.components.deconz.light import DECONZ_GROUP from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_TEMP, ATTR_EFFECT, + ATTR_EFFECT_LIST, ATTR_FLASH, ATTR_HS_COLOR, + ATTR_MAX_MIREDS, + ATTR_MIN_MIREDS, + ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -32,19 +34,29 @@ from homeassistant.const import ( ATTR_SUPPORTED_FEATURES, STATE_OFF, STATE_ON, - Platform, + STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .conftest import ConfigEntryFactoryType, WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) -from tests.common import MockConfigEntry, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker +async def test_no_lights_or_groups( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no lights or groups entities are created.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + @pytest.mark.parametrize( - "light_payload", + ("input", "expected"), [ ( # RGB light in color temp color mode { @@ -73,7 +85,28 @@ from tests.test_util.aiohttp import AiohttpClientMocker "swversion": "5.127.1.26420", "type": "Extended color light", "uniqueid": "00:17:88:01:01:23:45:67-00", - } + }, + { + "entity_id": "light.hue_go", + "state": STATE_ON, + "attributes": { + ATTR_BRIGHTNESS: 254, + ATTR_COLOR_TEMP: 375, + ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], + ATTR_SUPPORTED_COLOR_MODES: [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.XY, + ], + ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, + ATTR_MIN_MIREDS: 153, + ATTR_MAX_MIREDS: 500, + ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION + | LightEntityFeature.FLASH + | LightEntityFeature.EFFECT, + DECONZ_GROUP: False, + }, + }, ), ( # RGB light in XY color mode { @@ -102,7 +135,30 @@ from tests.test_util.aiohttp import AiohttpClientMocker "swversion": "1.65.9_hB3217DF4", "type": "Extended color light", "uniqueid": "00:17:88:01:01:23:45:67-01", - } + }, + { + "entity_id": "light.hue_ensis", + "state": STATE_ON, + "attributes": { + ATTR_MIN_MIREDS: 140, + ATTR_MAX_MIREDS: 650, + ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], + ATTR_SUPPORTED_COLOR_MODES: [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.XY, + ], + ATTR_COLOR_MODE: ColorMode.XY, + ATTR_BRIGHTNESS: 254, + ATTR_HS_COLOR: (29.691, 38.039), + ATTR_RGB_COLOR: (255, 206, 158), + ATTR_XY_COLOR: (0.427, 0.373), + DECONZ_GROUP: False, + ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION + | LightEntityFeature.FLASH + | LightEntityFeature.EFFECT, + }, + }, ), ( # RGB light with only HS color mode { @@ -125,7 +181,41 @@ from tests.test_util.aiohttp import AiohttpClientMocker "swversion": None, "type": "Color dimmable light", "uniqueid": "58:8e:81:ff:fe:db:7b:be-01", - } + }, + { + "entity_id": "light.lidl_xmas_light", + "state": STATE_ON, + "attributes": { + ATTR_EFFECT_LIST: [ + "carnival", + "collide", + "fading", + "fireworks", + "flag", + "glow", + "rainbow", + "snake", + "snow", + "sparkles", + "steady", + "strobe", + "twinkle", + "updown", + "vintage", + "waves", + ], + ATTR_SUPPORTED_COLOR_MODES: [ColorMode.HS], + ATTR_COLOR_MODE: ColorMode.HS, + ATTR_BRIGHTNESS: 25, + ATTR_HS_COLOR: (294.938, 55.294), + ATTR_RGB_COLOR: (243, 113, 255), + ATTR_XY_COLOR: (0.357, 0.188), + DECONZ_GROUP: False, + ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION + | LightEntityFeature.FLASH + | LightEntityFeature.EFFECT, + }, + }, ), ( # Tunable white light in CT color mode { @@ -150,7 +240,22 @@ from tests.test_util.aiohttp import AiohttpClientMocker "swversion": "1.46.13_r26312", "type": "Color temperature light", "uniqueid": "00:17:88:01:01:23:45:67-02", - } + }, + { + "entity_id": "light.hue_white_ambiance", + "state": STATE_ON, + "attributes": { + ATTR_MIN_MIREDS: 153, + ATTR_MAX_MIREDS: 454, + ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP], + ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, + ATTR_BRIGHTNESS: 254, + ATTR_COLOR_TEMP: 396, + DECONZ_GROUP: False, + ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION + | LightEntityFeature.FLASH, + }, + }, ), ( # Dimmable light { @@ -165,7 +270,19 @@ from tests.test_util.aiohttp import AiohttpClientMocker "swversion": "1.55.8_r28815", "type": "Dimmable light", "uniqueid": "00:17:88:01:01:23:45:67-03", - } + }, + { + "entity_id": "light.hue_filament", + "state": STATE_ON, + "attributes": { + ATTR_SUPPORTED_COLOR_MODES: [ColorMode.BRIGHTNESS], + ATTR_COLOR_MODE: ColorMode.BRIGHTNESS, + ATTR_BRIGHTNESS: 254, + DECONZ_GROUP: False, + ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION + | LightEntityFeature.FLASH, + }, + }, ), ( # On/Off light { @@ -180,7 +297,17 @@ from tests.test_util.aiohttp import AiohttpClientMocker "swversion": "2.0", "type": "Simple light", "uniqueid": "00:15:8d:00:01:23:45:67-01", - } + }, + { + "entity_id": "light.simple_light", + "state": STATE_ON, + "attributes": { + ATTR_SUPPORTED_COLOR_MODES: [ColorMode.ONOFF], + ATTR_COLOR_MODE: ColorMode.ONOFF, + DECONZ_GROUP: False, + ATTR_SUPPORTED_FEATURES: 0, + }, + }, ), ( # Gradient light { @@ -279,63 +406,98 @@ from tests.test_util.aiohttp import AiohttpClientMocker "swversion": "1.104.2", "type": "Extended color light", "uniqueid": "00:17:88:01:0b:0c:0d:0e-0f", - } + }, + { + "entity_id": "light.gradient_light", + "state": STATE_ON, + "attributes": { + ATTR_SUPPORTED_COLOR_MODES: [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.XY, + ], + ATTR_COLOR_MODE: ColorMode.XY, + }, + }, ), ], ) async def test_lights( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - snapshot: SnapshotAssertion, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected ) -> None: """Test that different light entities are created with expected values.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.LIGHT]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + data = {"lights": {"0": input}} + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 1 + + light = hass.states.get(expected["entity_id"]) + assert light.state == expected["state"] + for attribute, expected_value in expected["attributes"].items(): + assert light.attributes[attribute] == expected_value + + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 -@pytest.mark.parametrize( - "light_payload", - [ - { - "colorcapabilities": 31, - "ctmax": 500, - "ctmin": 153, - "etag": "055485a82553e654f156d41c9301b7cf", - "hascolor": True, - "lastannounced": None, - "lastseen": "2021-06-10T20:25Z", - "manufacturername": "Philips", - "modelid": "LLC020", - "name": "Hue Go", - "state": { - "alert": "none", - "bri": 254, - "colormode": "ct", - "ct": 375, - "effect": "none", - "hue": 8348, - "on": True, - "reachable": True, - "sat": 147, - "xy": [0.462, 0.4111], - }, - "swversion": "5.127.1.26420", - "type": "Extended color light", - "uniqueid": "00:17:88:01:01:23:45:67-00", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_light_state_change( - hass: HomeAssistant, - light_ws_data: WebsocketDataType, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Verify light can change state on websocket event.""" + data = { + "lights": { + "0": { + "colorcapabilities": 31, + "ctmax": 500, + "ctmin": 153, + "etag": "055485a82553e654f156d41c9301b7cf", + "hascolor": True, + "lastannounced": None, + "lastseen": "2021-06-10T20:25Z", + "manufacturername": "Philips", + "modelid": "LLC020", + "name": "Hue Go", + "state": { + "alert": "none", + "bri": 254, + "colormode": "ct", + "ct": 375, + "effect": "none", + "hue": 8348, + "on": True, + "reachable": True, + "sat": 147, + "xy": [0.462, 0.4111], + }, + "swversion": "5.127.1.26420", + "type": "Extended color light", + "uniqueid": "00:17:88:01:01:23:45:67-00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + assert hass.states.get("light.hue_go").state == STATE_ON - await light_ws_data({"state": {"on": False}}) + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "0", + "state": {"on": False}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + assert hass.states.get("light.hue_go").state == STATE_OFF @@ -413,7 +575,7 @@ async def test_light_state_change( ATTR_ENTITY_ID: "light.hue_go", ATTR_XY_COLOR: (0.411, 0.351), ATTR_FLASH: FLASH_LONG, - ATTR_EFFECT: "none", + ATTR_EFFECT: "None", }, }, { @@ -477,45 +639,44 @@ async def test_light_state_change( ], ) async def test_light_service_calls( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - config_entry_factory: ConfigEntryFactoryType, - light_payload: dict[str, Any], - mock_put_request: Callable[[str, str], AiohttpClientMocker], - input: dict[str, Any], - expected: dict[str, Any], + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected ) -> None: """Verify light can change state on websocket event.""" - light_payload[0] = { - "colorcapabilities": 31, - "ctmax": 500, - "ctmin": 153, - "etag": "055485a82553e654f156d41c9301b7cf", - "hascolor": True, - "lastannounced": None, - "lastseen": "2021-06-10T20:25Z", - "manufacturername": "Philips", - "modelid": "LLC020", - "name": "Hue Go", - "state": { - "alert": "none", - "bri": 254, - "colormode": "ct", - "ct": 375, - "effect": "none", - "hue": 8348, - "on": input["light_on"], - "reachable": True, - "sat": 147, - "xy": [0.462, 0.4111], - }, - "swversion": "5.127.1.26420", - "type": "Extended color light", - "uniqueid": "00:17:88:01:01:23:45:67-00", + data = { + "lights": { + "0": { + "colorcapabilities": 31, + "ctmax": 500, + "ctmin": 153, + "etag": "055485a82553e654f156d41c9301b7cf", + "hascolor": True, + "lastannounced": None, + "lastseen": "2021-06-10T20:25Z", + "manufacturername": "Philips", + "modelid": "LLC020", + "name": "Hue Go", + "state": { + "alert": "none", + "bri": 254, + "colormode": "ct", + "ct": 375, + "effect": "none", + "hue": 8348, + "on": input["light_on"], + "reachable": True, + "sat": 147, + "xy": [0.462, 0.4111], + }, + "swversion": "5.127.1.26420", + "type": "Extended color light", + "uniqueid": "00:17:88:01:01:23:45:67-00", + } + } } - await config_entry_factory() + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock = mock_put_request("/lights/0/state") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") await hass.services.async_call( LIGHT_DOMAIN, @@ -529,41 +690,41 @@ async def test_light_service_calls( assert len(aioclient_mock.mock_calls) == 1 # not called -@pytest.mark.parametrize( - "light_payload", - [ - { - "colorcapabilities": 0, - "ctmax": 65535, - "ctmin": 0, - "etag": "9dd510cd474791481f189d2a68a3c7f1", - "hascolor": True, - "lastannounced": "2020-12-17T17:44:38Z", - "lastseen": "2021-01-11T18:36Z", - "manufacturername": "IKEA of Sweden", - "modelid": "TRADFRI bulb E27 WS opal 1000lm", - "name": "IKEA light", - "state": { - "alert": "none", - "bri": 156, - "colormode": "ct", - "ct": 250, - "on": True, - "reachable": True, - }, - "swversion": "2.0.022", - "type": "Color temperature light", - "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_ikea_default_transition_time( - hass: HomeAssistant, - mock_put_request: Callable[[str, str], AiohttpClientMocker], + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Verify that service calls to IKEA lights always extend with transition tinme 0 if absent.""" - aioclient_mock = mock_put_request("/lights/0/state") + data = { + "lights": { + "0": { + "colorcapabilities": 0, + "ctmax": 65535, + "ctmin": 0, + "etag": "9dd510cd474791481f189d2a68a3c7f1", + "hascolor": True, + "lastannounced": "2020-12-17T17:44:38Z", + "lastseen": "2021-01-11T18:36Z", + "manufacturername": "IKEA of Sweden", + "modelid": "TRADFRI bulb E27 WS opal 1000lm", + "name": "IKEA light", + "state": { + "alert": "none", + "bri": 156, + "colormode": "ct", + "ct": 250, + "on": True, + "reachable": True, + }, + "swversion": "2.0.022", + "type": "Color temperature light", + "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", + }, + }, + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") await hass.services.async_call( LIGHT_DOMAIN, @@ -597,39 +758,40 @@ async def test_ikea_default_transition_time( } -@pytest.mark.parametrize( - "light_payload", - [ - { - "etag": "87a89542bf9b9d0aa8134919056844f8", - "hascolor": True, - "lastannounced": None, - "lastseen": "2020-12-05T22:57Z", - "manufacturername": "_TZE200_s8gkrkxk", - "modelid": "TS0601", - "name": "LIDL xmas light", - "state": { - "bri": 25, - "colormode": "hs", - "effect": "none", - "hue": 53691, - "on": True, - "reachable": True, - "sat": 141, - }, - "swversion": None, - "type": "Color dimmable light", - "uniqueid": "58:8e:81:ff:fe:db:7b:be-01", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_lidl_christmas_light( - hass: HomeAssistant, - mock_put_request: Callable[[str, str], AiohttpClientMocker], + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that lights or groups entities are created.""" - aioclient_mock = mock_put_request("/lights/0/state") + data = { + "lights": { + "0": { + "etag": "87a89542bf9b9d0aa8134919056844f8", + "hascolor": True, + "lastannounced": None, + "lastseen": "2020-12-05T22:57Z", + "manufacturername": "_TZE200_s8gkrkxk", + "modelid": "TS0601", + "name": "LIDL xmas light", + "state": { + "bri": 25, + "colormode": "hs", + "effect": "none", + "hue": 53691, + "on": True, + "reachable": True, + "sat": 141, + }, + "swversion": None, + "type": "Color dimmable light", + "uniqueid": "58:8e:81:ff:fe:db:7b:be-01", + } + } + } + + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") await hass.services.async_call( LIGHT_DOMAIN, @@ -641,37 +803,135 @@ async def test_lidl_christmas_light( blocking=True, ) assert aioclient_mock.mock_calls[1][2] == {"on": True, "hue": 3640, "sat": 76} + assert hass.states.get("light.lidl_xmas_light") -@pytest.mark.parametrize( - "light_payload", - [ - { - "etag": "26839cb118f5bf7ba1f2108256644010", - "hascolor": False, - "lastannounced": None, - "lastseen": "2020-11-22T11:27Z", - "manufacturername": "dresden elektronik", - "modelid": "ConBee II", - "name": "Configuration tool 1", - "state": {"reachable": True}, - "swversion": "0x264a0700", - "type": "Configuration tool", - "uniqueid": "00:21:2e:ff:ff:05:a7:a3-01", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_configuration_tool(hass: HomeAssistant) -> None: +async def test_configuration_tool( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Verify that configuration tool is not created.""" + data = { + "lights": { + "0": { + "etag": "26839cb118f5bf7ba1f2108256644010", + "hascolor": False, + "lastannounced": None, + "lastseen": "2020-11-22T11:27Z", + "manufacturername": "dresden elektronik", + "modelid": "ConBee II", + "name": "Configuration tool 1", + "state": {"reachable": True}, + "swversion": "0x264a0700", + "type": "Configuration tool", + "uniqueid": "00:21:2e:ff:ff:05:a7:a3-01", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 @pytest.mark.parametrize( - "light_payload", + ("input", "expected"), [ - { + ( + { + "lights": ["1", "2", "3"], + }, + { + "entity_id": "light.group", + "state": ATTR_ON, + "attributes": { + ATTR_MIN_MIREDS: 153, + ATTR_MAX_MIREDS: 500, + ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP, ColorMode.XY], + ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, + ATTR_BRIGHTNESS: 255, + ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], + "all_on": False, + DECONZ_GROUP: True, + ATTR_SUPPORTED_FEATURES: 44, + }, + }, + ), + ( + { + "lights": ["3", "1", "2"], + }, + { + "entity_id": "light.group", + "state": ATTR_ON, + "attributes": { + ATTR_MIN_MIREDS: 153, + ATTR_MAX_MIREDS: 500, + ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP, ColorMode.XY], + ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, + ATTR_BRIGHTNESS: 50, + ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], + "all_on": False, + DECONZ_GROUP: True, + ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION + | LightEntityFeature.FLASH + | LightEntityFeature.EFFECT, + }, + }, + ), + ( + { + "lights": ["2", "3", "1"], + }, + { + "entity_id": "light.group", + "state": ATTR_ON, + "attributes": { + ATTR_MIN_MIREDS: 153, + ATTR_MAX_MIREDS: 500, + ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP, ColorMode.XY], + ATTR_COLOR_MODE: ColorMode.XY, + ATTR_HS_COLOR: (52.0, 100.0), + ATTR_RGB_COLOR: (255, 221, 0), + ATTR_XY_COLOR: (0.5, 0.5), + "all_on": False, + DECONZ_GROUP: True, + ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION + | LightEntityFeature.FLASH + | LightEntityFeature.EFFECT, + }, + }, + ), + ], +) +async def test_groups( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected +) -> None: + """Test that different group entities are created with expected values.""" + data = { + "groups": { + "0": { + "id": "Light group id", + "name": "Group", + "type": "LightGroup", + "state": {"all_on": False, "any_on": True}, + "action": { + "alert": "none", + "bri": 127, + "colormode": "hs", + "ct": 0, + "effect": "none", + "hue": 0, + "on": True, + "sat": 127, + "scene": None, + "xy": [0, 0], + }, + "scenes": [], + "lights": input["lights"], + }, + }, + "lights": { "1": { "name": "RGB light", "state": { @@ -704,95 +964,29 @@ async def test_configuration_tool(hass: HomeAssistant) -> None: "state": {"bri": 255, "on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:02-00", }, - } - ], -) -@pytest.mark.parametrize( - "input", - [ - ({"lights": ["1", "2", "3"]}), - ({"lights": ["3", "1", "2"]}), - ({"lights": ["2", "3", "1"]}), - ], -) -async def test_groups( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - group_payload: dict[str, Any], - input: dict[str, list[str]], - snapshot: SnapshotAssertion, -) -> None: - """Test that different group entities are created with expected values.""" - group_payload |= { - "0": { - "id": "Light group id", - "name": "Group", - "type": "LightGroup", - "state": {"all_on": False, "any_on": True}, - "action": { - "alert": "none", - "bri": 127, - "colormode": "hs", - "ct": 0, - "effect": "none", - "hue": 0, - "on": True, - "sat": 127, - "scene": None, - "xy": [0, 0], - }, - "scenes": [], - "lights": input["lights"], }, } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.LIGHT]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + assert len(hass.states.async_all()) == 4 + + group = hass.states.get(expected["entity_id"]) + assert group.state == expected["state"] + for attribute, expected_value in expected["attributes"].items(): + assert group.attributes[attribute] == expected_value + + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 -@pytest.mark.parametrize( - "light_payload", - [ - { - "1": { - "name": "RGB light", - "state": { - "bri": 255, - "colormode": "xy", - "effect": "colorloop", - "hue": 53691, - "on": True, - "reachable": True, - "sat": 141, - "xy": (0.5, 0.5), - }, - "type": "Extended color light", - "uniqueid": "00:00:00:00:00:00:00:00-00", - }, - "2": { - "ctmax": 454, - "ctmin": 155, - "name": "Tunable white light", - "state": { - "on": True, - "colormode": "ct", - "ct": 2500, - "reachable": True, - }, - "type": "Tunable white light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - }, - "3": { - "name": "Dimmable light", - "type": "Dimmable light", - "state": {"bri": 254, "on": True, "reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:02-00", - }, - } - ], -) @pytest.mark.parametrize( ("input", "expected"), [ @@ -851,28 +1045,62 @@ async def test_groups( ], ) async def test_group_service_calls( - hass: HomeAssistant, - config_entry_factory: ConfigEntryFactoryType, - group_payload: dict[str, Any], - mock_put_request: Callable[[str, str], AiohttpClientMocker], - input: dict[str, Any], - expected: dict[str, Any], + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected ) -> None: """Verify expected group web request from different service calls.""" - group_payload |= { - "0": { - "id": "Light group id", - "name": "Group", - "type": "LightGroup", - "state": {"all_on": False, "any_on": input["group_on"]}, - "action": {}, - "scenes": [], - "lights": input["lights"], + data = { + "groups": { + "0": { + "id": "Light group id", + "name": "Group", + "type": "LightGroup", + "state": {"all_on": False, "any_on": input["group_on"]}, + "action": {}, + "scenes": [], + "lights": input["lights"], + }, + }, + "lights": { + "1": { + "name": "RGB light", + "state": { + "bri": 255, + "colormode": "xy", + "effect": "colorloop", + "hue": 53691, + "on": True, + "reachable": True, + "sat": 141, + "xy": (0.5, 0.5), + }, + "type": "Extended color light", + "uniqueid": "00:00:00:00:00:00:00:00-00", + }, + "2": { + "ctmax": 454, + "ctmin": 155, + "name": "Tunable white light", + "state": { + "on": True, + "colormode": "ct", + "ct": 2500, + "reachable": True, + }, + "type": "Tunable white light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + }, + "3": { + "name": "Dimmable light", + "type": "Dimmable light", + "state": {"bri": 254, "on": True, "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:02-00", + }, }, } - await config_entry_factory() + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock = mock_put_request("/groups/0/action") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/groups/0/action") await hass.services.async_call( LIGHT_DOMAIN, @@ -886,10 +1114,12 @@ async def test_group_service_calls( assert len(aioclient_mock.mock_calls) == 1 # not called -@pytest.mark.parametrize( - "group_payload", - [ - { +async def test_empty_group( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Verify that a group without a list of lights is not created.""" + data = { + "groups": { "0": { "id": "Empty group id", "name": "Empty group", @@ -899,20 +1129,21 @@ async def test_group_service_calls( "scenes": [], "lights": [], }, - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_empty_group(hass: HomeAssistant) -> None: - """Verify that a group without a list of lights is not created.""" + }, + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 assert not hass.states.get("light.empty_group") -@pytest.mark.parametrize( - "group_payload", - [ - { +async def test_disable_light_groups( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test disallowing light groups work.""" + data = { + "groups": { "1": { "id": "Light group id", "name": "Light group", @@ -920,7 +1151,7 @@ async def test_empty_group(hass: HomeAssistant) -> None: "state": {"all_on": False, "any_on": True}, "action": {}, "scenes": [], - "lights": ["0"], + "lights": ["1"], }, "2": { "id": "Empty group id", @@ -931,35 +1162,32 @@ async def test_empty_group(hass: HomeAssistant) -> None: "scenes": [], "lights": [], }, - } - ], -) -@pytest.mark.parametrize( - "light_payload", - [ - { - "ctmax": 454, - "ctmin": 155, - "name": "Tunable white light", - "state": {"on": True, "colormode": "ct", "ct": 2500, "reachable": True}, - "type": "Tunable white light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - ], -) -@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_DECONZ_GROUPS: False}]) -async def test_disable_light_groups( - hass: HomeAssistant, - config_entry_setup: MockConfigEntry, -) -> None: - """Test disallowing light groups work.""" + }, + "lights": { + "1": { + "ctmax": 454, + "ctmin": 155, + "name": "Tunable white light", + "state": {"on": True, "colormode": "ct", "ct": 2500, "reachable": True}, + "type": "Tunable white light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + }, + }, + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration( + hass, + aioclient_mock, + options={CONF_ALLOW_DECONZ_GROUPS: False}, + ) + assert len(hass.states.async_all()) == 1 assert hass.states.get("light.tunable_white_light") assert not hass.states.get("light.light_group") assert not hass.states.get("light.empty_group") hass.config_entries.async_update_entry( - config_entry_setup, options={CONF_ALLOW_DECONZ_GROUPS: True} + config_entry, options={CONF_ALLOW_DECONZ_GROUPS: True} ) await hass.async_block_till_done() @@ -967,7 +1195,7 @@ async def test_disable_light_groups( assert hass.states.get("light.light_group") hass.config_entries.async_update_entry( - config_entry_setup, options={CONF_ALLOW_DECONZ_GROUPS: False} + config_entry, options={CONF_ALLOW_DECONZ_GROUPS: False} ) await hass.async_block_till_done() @@ -975,10 +1203,16 @@ async def test_disable_light_groups( assert not hass.states.get("light.light_group") -@pytest.mark.parametrize( - "group_payload", - [ - { +async def test_non_color_light_reports_color( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket +) -> None: + """Verify hs_color does not crash when a group gets updated with a bad color value. + + After calling a scene color temp light of certain manufacturers + report color temp in color space. + """ + data = { + "groups": { "0": { "action": { "alert": "none", @@ -1000,13 +1234,8 @@ async def test_disable_light_groups( "state": {"all_on": False, "any_on": True}, "type": "LightGroup", } - } - ], -) -@pytest.mark.parametrize( - "light_payload", - [ - { + }, + "lights": { "0": { "ctmax": 500, "ctmin": 153, @@ -1056,19 +1285,11 @@ async def test_disable_light_groups( "type": "Color temperature light", "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", }, - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_non_color_light_reports_color( - hass: HomeAssistant, - light_ws_data: WebsocketDataType, -) -> None: - """Verify hs_color does not crash when a group gets updated with a bad color value. + }, + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) - After calling a scene color temp light of certain manufacturers - report color temp in color space. - """ assert len(hass.states.async_all()) == 3 assert hass.states.get("light.group").attributes[ATTR_SUPPORTED_COLOR_MODES] == [ ColorMode.COLOR_TEMP, @@ -1084,7 +1305,9 @@ async def test_non_color_light_reports_color( # Updating a scene will return a faulty color value # for a non-color light causing an exception in hs_color event_changed_light = { + "e": "changed", "id": "1", + "r": "lights", "state": { "alert": None, "bri": 216, @@ -1093,19 +1316,24 @@ async def test_non_color_light_reports_color( "on": True, "reachable": True, }, + "t": "event", "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", } - await light_ws_data(event_changed_light) + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + group = hass.states.get("light.group") assert group.attributes[ATTR_COLOR_MODE] == ColorMode.XY assert group.attributes[ATTR_HS_COLOR] == (40.571, 41.176) assert group.attributes.get(ATTR_COLOR_TEMP) is None -@pytest.mark.parametrize( - "group_payload", - [ - { +async def test_verify_group_supported_features( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that group supported features reflect what included lights support.""" + data = { + "groups": { "1": { "id": "Group1", "name": "Group", @@ -1115,13 +1343,8 @@ async def test_non_color_light_reports_color( "scenes": [], "lights": ["1", "2", "3"], }, - } - ], -) -@pytest.mark.parametrize( - "light_payload", - [ - { + }, + "lights": { "1": { "name": "Dimmable light", "state": {"on": True, "bri": 255, "reachable": True}, @@ -1149,12 +1372,11 @@ async def test_non_color_light_reports_color( "type": "Tunable white light", "uniqueid": "00:00:00:00:00:00:00:03-00", }, - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_verify_group_supported_features(hass: HomeAssistant) -> None: - """Test that group supported features reflect what included lights support.""" + }, + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 4 group_state = hass.states.get("light.group") @@ -1168,10 +1390,12 @@ async def test_verify_group_supported_features(hass: HomeAssistant) -> None: ) -@pytest.mark.parametrize( - "group_payload", - [ - { +async def test_verify_group_color_mode_fallback( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket +) -> None: + """Test that group supported features reflect what included lights support.""" + data = { + "groups": { "43": { "action": { "alert": "none", @@ -1188,7 +1412,7 @@ async def test_verify_group_supported_features(hass: HomeAssistant) -> None: "devicemembership": [], "etag": "4548e982c4cfff942f7af80958abb2a0", "id": "43", - "lights": ["0"], + "lights": ["13"], "name": "Opbergruimte", "scenes": [ { @@ -1219,68 +1443,62 @@ async def test_verify_group_supported_features(hass: HomeAssistant) -> None: "state": {"all_on": False, "any_on": False}, "type": "LightGroup", }, - } - ], -) -@pytest.mark.parametrize( - "light_payload", - [ - { - "capabilities": { - "alerts": [ - "none", - "select", - "lselect", - "blink", - "breathe", - "okay", - "channelchange", - "finish", - "stop", - ], - "bri": {"min_dim_level": 5}, + }, + "lights": { + "13": { + "capabilities": { + "alerts": [ + "none", + "select", + "lselect", + "blink", + "breathe", + "okay", + "channelchange", + "finish", + "stop", + ], + "bri": {"min_dim_level": 5}, + }, + "config": { + "bri": {"execute_if_off": True, "startup": "previous"}, + "groups": ["43"], + "on": {"startup": "previous"}, + }, + "etag": "ca0ed7763eca37f5e6b24f6d46f8a518", + "hascolor": False, + "lastannounced": None, + "lastseen": "2024-03-02T20:08Z", + "manufacturername": "Signify Netherlands B.V.", + "modelid": "LWA001", + "name": "Opbergruimte Lamp Plafond", + "productid": "Philips-LWA001-1-A19DLv5", + "productname": "Hue white lamp", + "state": { + "alert": "none", + "bri": 76, + "effect": "none", + "on": False, + "reachable": True, + }, + "swconfigid": "87169548", + "swversion": "1.104.2", + "type": "Dimmable light", + "uniqueid": "00:17:88:01:08:11:22:33-01", }, - "config": { - "bri": {"execute_if_off": True, "startup": "previous"}, - "groups": ["43"], - "on": {"startup": "previous"}, - }, - "etag": "ca0ed7763eca37f5e6b24f6d46f8a518", - "hascolor": False, - "lastannounced": None, - "lastseen": "2024-03-02T20:08Z", - "manufacturername": "Signify Netherlands B.V.", - "modelid": "LWA001", - "name": "Opbergruimte Lamp Plafond", - "productid": "Philips-LWA001-1-A19DLv5", - "productname": "Hue white lamp", - "state": { - "alert": "none", - "bri": 76, - "effect": "none", - "on": False, - "reachable": True, - }, - "swconfigid": "87169548", - "swversion": "1.104.2", - "type": "Dimmable light", - "uniqueid": "00:17:88:01:08:11:22:33-01", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_verify_group_color_mode_fallback( - hass: HomeAssistant, - mock_websocket_data: WebsocketDataType, -) -> None: - """Test that group supported features reflect what included lights support.""" + }, + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + group_state = hass.states.get("light.opbergruimte") assert group_state.state == STATE_OFF assert group_state.attributes[ATTR_COLOR_MODE] is None - await mock_websocket_data( - { - "id": "0", + await mock_deconz_websocket( + data={ + "e": "changed", + "id": "13", "r": "lights", "state": { "alert": "none", @@ -1289,14 +1507,17 @@ async def test_verify_group_color_mode_fallback( "on": True, "reachable": True, }, + "t": "event", "uniqueid": "00:17:88:01:08:11:22:33-01", } ) - await mock_websocket_data( - { + await mock_deconz_websocket( + data={ + "e": "changed", "id": "43", "r": "groups", "state": {"all_on": True, "any_on": True}, + "t": "event", } ) group_state = hass.states.get("light.opbergruimte") diff --git a/tests/components/deconz/test_lock.py b/tests/components/deconz/test_lock.py index 70a7bd732bb..03d14802083 100644 --- a/tests/components/deconz/test_lock.py +++ b/tests/components/deconz/test_lock.py @@ -1,57 +1,79 @@ """deCONZ lock platform tests.""" -from collections.abc import Callable - -import pytest +from unittest.mock import patch from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, - LockState, ) -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_LOCKED, + STATE_UNAVAILABLE, + STATE_UNLOCKED, +) from homeassistant.core import HomeAssistant -from .conftest import WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize( - "light_payload", - [ - { - "etag": "5c2ec06cde4bd654aef3a555fcd8ad12", - "hascolor": False, - "lastannounced": None, - "lastseen": "2020-08-22T15:29:03Z", - "manufacturername": "Danalock", - "modelid": "V3-BTZB", - "name": "Door lock", - "state": {"alert": "none", "on": False, "reachable": True}, - "swversion": "19042019", - "type": "Door Lock", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") +async def test_no_locks( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no lock entities are created.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + async def test_lock_from_light( - hass: HomeAssistant, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - light_ws_data: WebsocketDataType, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test that all supported lock entities based on lights are created.""" - assert len(hass.states.async_all()) == 1 - assert hass.states.get("lock.door_lock").state == LockState.UNLOCKED + data = { + "lights": { + "1": { + "etag": "5c2ec06cde4bd654aef3a555fcd8ad12", + "hascolor": False, + "lastannounced": None, + "lastseen": "2020-08-22T15:29:03Z", + "manufacturername": "Danalock", + "modelid": "V3-BTZB", + "name": "Door lock", + "state": {"alert": "none", "on": False, "reachable": True}, + "swversion": "19042019", + "type": "Door Lock", + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) - await light_ws_data({"state": {"on": True}}) - assert hass.states.get("lock.door_lock").state == LockState.LOCKED + assert len(hass.states.async_all()) == 1 + assert hass.states.get("lock.door_lock").state == STATE_UNLOCKED + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"on": True}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("lock.door_lock").state == STATE_LOCKED # Verify service calls - aioclient_mock = mock_put_request("/lights/0/state") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") # Service lock door @@ -73,49 +95,68 @@ async def test_lock_from_light( ) assert aioclient_mock.mock_calls[2][2] == {"on": False} + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(states) == 1 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "battery": 100, - "lock": False, - "on": True, - "reachable": True, - }, - "ep": 11, - "etag": "a43862f76b7fa48b0fbb9107df123b0e", - "lastseen": "2021-03-06T22:25Z", - "manufacturername": "Onesti Products AS", - "modelid": "easyCodeTouch_v1", - "name": "Door lock", - "state": { - "lastupdated": "2021-03-06T21:25:45.624", - "lockstate": "unlocked", - }, - "swversion": "20201211", - "type": "ZHADoorLock", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_lock_from_sensor( - hass: HomeAssistant, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - sensor_ws_data: WebsocketDataType, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test that all supported lock entities based on sensors are created.""" - assert len(hass.states.async_all()) == 2 - assert hass.states.get("lock.door_lock").state == LockState.UNLOCKED + data = { + "sensors": { + "1": { + "config": { + "battery": 100, + "lock": False, + "on": True, + "reachable": True, + }, + "ep": 11, + "etag": "a43862f76b7fa48b0fbb9107df123b0e", + "lastseen": "2021-03-06T22:25Z", + "manufacturername": "Onesti Products AS", + "modelid": "easyCodeTouch_v1", + "name": "Door lock", + "state": { + "lastupdated": "2021-03-06T21:25:45.624", + "lockstate": "unlocked", + }, + "swversion": "20201211", + "type": "ZHADoorLock", + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) - await sensor_ws_data({"state": {"lockstate": "locked"}}) - assert hass.states.get("lock.door_lock").state == LockState.LOCKED + assert len(hass.states.async_all()) == 2 + assert hass.states.get("lock.door_lock").state == STATE_UNLOCKED + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "state": {"lockstate": "locked"}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + + assert hass.states.get("lock.door_lock").state == STATE_LOCKED # Verify service calls - aioclient_mock = mock_put_request("/sensors/0/config") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/1/config") # Service lock door @@ -136,3 +177,14 @@ async def test_lock_from_sensor( blocking=True, ) assert aioclient_mock.mock_calls[2][2] == {"lock": False} + + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(states) == 2 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_logbook.py b/tests/components/deconz/test_logbook.py index 57cf8748762..5940d2e8e34 100644 --- a/tests/components/deconz/test_logbook.py +++ b/tests/components/deconz/test_logbook.py @@ -1,8 +1,6 @@ """The tests for deCONZ logbook.""" -from typing import Any - -import pytest +from unittest.mock import patch from homeassistant.components.deconz.const import CONF_GESTURE, DOMAIN as DECONZ_DOMAIN from homeassistant.components.deconz.deconz_event import ( @@ -16,53 +14,58 @@ from homeassistant.const import ( CONF_EVENT, CONF_ID, CONF_UNIQUE_ID, + STATE_ALARM_ARMED_AWAY, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component from homeassistant.util import slugify +from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration + from tests.components.logbook.common import MockRow, mock_humanify +from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "armed": "disarmed", - "enrolled": 0, - "on": True, - "panel": "disarmed", - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "3c4008d74035dfaa1f0bb30d24468b12", - "lastseen": "2021-04-02T13:07Z", - "manufacturername": "Universal Electronics Inc", - "modelid": "URC4450BC0-X-R", - "name": "Keypad", - "state": { - "action": "armed_away,1111,55", - "lastupdated": "2021-04-02T13:08:18.937", - "lowbattery": False, - "tampered": True, - }, - "type": "ZHAAncillaryControl", - "uniqueid": "00:0d:6f:00:13:4f:61:39-01-0501", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_humanifying_deconz_alarm_event( hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - sensor_payload: dict[str, Any], ) -> None: - """Test humanifying deCONZ alarm event.""" - keypad_event_id = slugify(sensor_payload["name"]) - keypad_serial = serial_from_unique_id(sensor_payload["uniqueid"]) + """Test humanifying deCONZ event.""" + data = { + "sensors": { + "1": { + "config": { + "armed": "disarmed", + "enrolled": 0, + "on": True, + "panel": "disarmed", + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "3c4008d74035dfaa1f0bb30d24468b12", + "lastseen": "2021-04-02T13:07Z", + "manufacturername": "Universal Electronics Inc", + "modelid": "URC4450BC0-X-R", + "name": "Keypad", + "state": { + "action": "armed_away,1111,55", + "lastupdated": "2021-04-02T13:08:18.937", + "lowbattery": False, + "tampered": True, + }, + "type": "ZHAAncillaryControl", + "uniqueid": "00:0d:6f:00:13:4f:61:39-01-0501", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + + keypad_event_id = slugify(data["sensors"]["1"]["name"]) + keypad_serial = serial_from_unique_id(data["sensors"]["1"]["uniqueid"]) keypad_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, keypad_serial)} ) @@ -82,7 +85,7 @@ async def test_humanifying_deconz_alarm_event( { CONF_CODE: 1234, CONF_DEVICE_ID: keypad_entry.id, - CONF_EVENT: "armed_away", + CONF_EVENT: STATE_ALARM_ARMED_AWAY, CONF_ID: keypad_event_id, CONF_UNIQUE_ID: keypad_serial, }, @@ -93,7 +96,7 @@ async def test_humanifying_deconz_alarm_event( { CONF_CODE: 1234, CONF_DEVICE_ID: "ff99ff99ff99ff99ff99ff99ff99ff99", - CONF_EVENT: "armed_away", + CONF_EVENT: STATE_ALARM_ARMED_AWAY, CONF_ID: removed_device_event_id, CONF_UNIQUE_ID: removed_device_serial, }, @@ -110,10 +113,14 @@ async def test_humanifying_deconz_alarm_event( assert events[1]["message"] == "fired event 'armed_away'" -@pytest.mark.parametrize( - "sensor_payload", - [ - { +async def test_humanifying_deconz_event( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, +) -> None: + """Test humanifying deCONZ event.""" + data = { + "sensors": { "1": { "name": "Switch 1", "type": "ZHASwitch", @@ -145,35 +152,30 @@ async def test_humanifying_deconz_alarm_event( "uniqueid": "00:00:00:00:00:00:00:04-00", }, } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_humanifying_deconz_event( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - sensor_payload: dict[str, Any], -) -> None: - """Test humanifying deCONZ event.""" - switch_event_id = slugify(sensor_payload["1"]["name"]) - switch_serial = serial_from_unique_id(sensor_payload["1"]["uniqueid"]) + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + + switch_event_id = slugify(data["sensors"]["1"]["name"]) + switch_serial = serial_from_unique_id(data["sensors"]["1"]["uniqueid"]) switch_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, switch_serial)} ) - hue_remote_event_id = slugify(sensor_payload["2"]["name"]) - hue_remote_serial = serial_from_unique_id(sensor_payload["2"]["uniqueid"]) + hue_remote_event_id = slugify(data["sensors"]["2"]["name"]) + hue_remote_serial = serial_from_unique_id(data["sensors"]["2"]["uniqueid"]) hue_remote_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, hue_remote_serial)} ) - xiaomi_cube_event_id = slugify(sensor_payload["3"]["name"]) - xiaomi_cube_serial = serial_from_unique_id(sensor_payload["3"]["uniqueid"]) + xiaomi_cube_event_id = slugify(data["sensors"]["3"]["name"]) + xiaomi_cube_serial = serial_from_unique_id(data["sensors"]["3"]["uniqueid"]) xiaomi_cube_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, xiaomi_cube_serial)} ) - faulty_event_id = slugify(sensor_payload["4"]["name"]) - faulty_serial = serial_from_unique_id(sensor_payload["4"]["uniqueid"]) + faulty_event_id = slugify(data["sensors"]["4"]["name"]) + faulty_serial = serial_from_unique_id(data["sensors"]["4"]["uniqueid"]) faulty_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, faulty_serial)} ) diff --git a/tests/components/deconz/test_number.py b/tests/components/deconz/test_number.py index 962c2c0a89b..655ae2f42e2 100644 --- a/tests/components/deconz/test_number.py +++ b/tests/components/deconz/test_number.py @@ -1,27 +1,36 @@ """deCONZ number platform tests.""" -from collections.abc import Callable -from typing import Any from unittest.mock import patch import pytest -from syrupy import SnapshotAssertion from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from .conftest import ConfigEntryFactoryType, WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) -from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker + +async def test_no_number_entities( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no sensors in deconz results in no number entities.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + TEST_DATA = [ ( # Presence sensor - delay configuration { @@ -37,7 +46,19 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { + "entity_count": 3, + "device_count": 3, "entity_id": "number.presence_sensor_delay", + "unique_id": "00:00:00:00:00:00:00:00-00-delay", + "state": "0", + "entity_category": EntityCategory.CONFIG, + "attributes": { + "min": 0, + "max": 65535, + "step": 1, + "mode": "auto", + "friendly_name": "Presence sensor Delay", + }, "websocket_event": {"config": {"delay": 10}}, "next_state": "10", "supported_service_value": 111, @@ -61,7 +82,19 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { + "entity_count": 3, + "device_count": 3, "entity_id": "number.presence_sensor_duration", + "unique_id": "00:00:00:00:00:00:00:00-00-duration", + "state": "0", + "entity_category": EntityCategory.CONFIG, + "attributes": { + "min": 0, + "max": 65535, + "step": 1, + "mode": "auto", + "friendly_name": "Presence sensor Duration", + }, "websocket_event": {"config": {"duration": 10}}, "next_state": "10", "supported_service_value": 111, @@ -74,29 +107,57 @@ TEST_DATA = [ ] -@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) +@pytest.mark.parametrize(("sensor_data", "expected"), TEST_DATA) async def test_number_entities( hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - sensor_ws_data: WebsocketDataType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - expected: dict[str, Any], - snapshot: SnapshotAssertion, + mock_deconz_websocket, + sensor_data, + expected, ) -> None: """Test successful creation of number entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.NUMBER]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + with patch.dict(DECONZ_WEB_REQUEST, {"sensors": {"0": sensor_data}}): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == expected["entity_count"] + + # Verify state data + + entity = hass.states.get(expected["entity_id"]) + assert entity.state == expected["state"] + assert entity.attributes == expected["attributes"] + + # Verify entity registry data + + ent_reg_entry = entity_registry.async_get(expected["entity_id"]) + assert ent_reg_entry.entity_category is expected["entity_category"] + assert ent_reg_entry.unique_id == expected["unique_id"] + + # Verify device registry data + + assert ( + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + == expected["device_count"] + ) # Change state - await sensor_ws_data(expected["websocket_event"]) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "0", + } | expected["websocket_event"] + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() assert hass.states.get(expected["entity_id"]).state == expected["next_state"] # Verify service calls - aioclient_mock = mock_put_request("/sensors/0/config") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") # Service set supported value @@ -136,3 +197,14 @@ async def test_number_entities( }, blocking=True, ) + + # Unload entry + + await hass.config_entries.async_unload(config_entry.entry_id) + assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE + + # Remove entry + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_scene.py b/tests/components/deconz/test_scene.py index c1240b6881c..2bace605db5 100644 --- a/tests/components/deconz/test_scene.py +++ b/tests/components/deconz/test_scene.py @@ -1,60 +1,97 @@ """deCONZ scene platform tests.""" -from collections.abc import Callable -from typing import Any from unittest.mock import patch import pytest -from syrupy import SnapshotAssertion from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN, SERVICE_TURN_ON -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from .conftest import ConfigEntryFactoryType, WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) -from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker + +async def test_no_scenes( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that scenes can be loaded without scenes being available.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + TEST_DATA = [ ( # Scene { - "1": { - "id": "Light group id", - "name": "Light group", - "type": "LightGroup", - "state": {"all_on": False, "any_on": True}, - "action": {}, - "scenes": [{"id": "1", "name": "Scene"}], - "lights": [], + "groups": { + "1": { + "id": "Light group id", + "name": "Light group", + "type": "LightGroup", + "state": {"all_on": False, "any_on": True}, + "action": {}, + "scenes": [{"id": "1", "name": "Scene"}], + "lights": [], + } } }, { + "entity_count": 2, + "device_count": 3, "entity_id": "scene.light_group_scene", + "unique_id": "01234E56789A/groups/1/scenes/1", + "entity_category": None, + "attributes": { + "friendly_name": "Light group Scene", + }, "request": "/groups/1/scenes/1/recall", }, ), ] -@pytest.mark.parametrize(("group_payload", "expected"), TEST_DATA) +@pytest.mark.parametrize(("raw_data", "expected"), TEST_DATA) async def test_scenes( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - expected: dict[str, Any], - snapshot: SnapshotAssertion, + aioclient_mock: AiohttpClientMocker, + raw_data, + expected, ) -> None: """Test successful creation of scene entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SCENE]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + with patch.dict(DECONZ_WEB_REQUEST, raw_data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == expected["entity_count"] + + # Verify state data + + scene = hass.states.get(expected["entity_id"]) + assert scene.attributes == expected["attributes"] + + # Verify entity registry data + + ent_reg_entry = entity_registry.async_get(expected["entity_id"]) + assert ent_reg_entry.entity_category is expected["entity_category"] + assert ent_reg_entry.unique_id == expected["unique_id"] + + # Verify device registry data + + assert ( + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + == expected["device_count"] + ) # Verify button press - aioclient_mock = mock_put_request(expected["request"]) + mock_deconz_put_request(aioclient_mock, config_entry.data, expected["request"]) await hass.services.async_call( SCENE_DOMAIN, @@ -64,11 +101,24 @@ async def test_scenes( ) assert aioclient_mock.mock_calls[1][2] == {} + # Unload entry -@pytest.mark.parametrize( - "group_payload", - [ - { + await hass.config_entries.async_unload(config_entry.entry_id) + assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE + + # Remove entry + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + +async def test_only_new_scenes_are_created( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket +) -> None: + """Test that scenes works.""" + data = { + "groups": { "1": { "id": "Light group id", "name": "Light group", @@ -79,20 +129,20 @@ async def test_scenes( "lights": [], } } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_only_new_scenes_are_created( - hass: HomeAssistant, - mock_websocket_data: WebsocketDataType, -) -> None: - """Test that scenes works.""" + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 2 event_changed_group = { + "t": "event", + "e": "changed", "r": "groups", "id": "1", "scenes": [{"id": "1", "name": "Scene"}], } - await mock_websocket_data(event_changed_group) + await mock_deconz_websocket(data=event_changed_group) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 2 diff --git a/tests/components/deconz/test_select.py b/tests/components/deconz/test_select.py index c677853841c..fb8f41293a2 100644 --- a/tests/components/deconz/test_select.py +++ b/tests/components/deconz/test_select.py @@ -1,180 +1,208 @@ """deCONZ select platform tests.""" -from collections.abc import Callable -from typing import Any from unittest.mock import patch -from pydeconz.models.sensor.air_purifier import AirPurifierFanMode from pydeconz.models.sensor.presence import ( PresenceConfigDeviceMode, PresenceConfigTriggerDistance, ) import pytest -from syrupy import SnapshotAssertion from homeassistant.components.select import ( ATTR_OPTION, DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from .conftest import ConfigEntryFactoryType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) -from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker + +async def test_no_select_entities( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no sensors in deconz results in no sensor entities.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + TEST_DATA = [ ( # Presence Device Mode { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", + "sensors": { + "1": { + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", + } + } }, { + "entity_count": 5, + "device_count": 3, "entity_id": "select.aqara_fp1_device_mode", + "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode", + "entity_category": EntityCategory.CONFIG, + "attributes": { + "friendly_name": "Aqara FP1 Device Mode", + "options": ["leftright", "undirected"], + }, "option": PresenceConfigDeviceMode.LEFT_AND_RIGHT.value, - "request": "/sensors/0/config", + "request": "/sensors/1/config", "request_data": {"devicemode": "leftright"}, }, ), ( # Presence Sensitivity { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", + "sensors": { + "1": { + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", + } + } }, { + "entity_count": 5, + "device_count": 3, "entity_id": "select.aqara_fp1_sensitivity", + "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity", + "entity_category": EntityCategory.CONFIG, + "attributes": { + "friendly_name": "Aqara FP1 Sensitivity", + "options": ["High", "Medium", "Low"], + }, "option": "Medium", - "request": "/sensors/0/config", + "request": "/sensors/1/config", "request_data": {"sensitivity": 2}, }, ), ( # Presence Trigger Distance { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", + "sensors": { + "1": { + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", + } + } }, { + "entity_count": 5, + "device_count": 3, "entity_id": "select.aqara_fp1_trigger_distance", + "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance", + "entity_category": EntityCategory.CONFIG, + "attributes": { + "friendly_name": "Aqara FP1 Trigger Distance", + "options": ["far", "medium", "near"], + }, "option": PresenceConfigTriggerDistance.FAR.value, - "request": "/sensors/0/config", + "request": "/sensors/1/config", "request_data": {"triggerdistance": "far"}, }, ), - ( # Air Purifier Fan Mode - { - "config": { - "filterlifetime": 259200, - "ledindication": True, - "locked": False, - "mode": "speed_1", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "de26d19d9e91b2db3ded6ee7ab6b6a4b", - "lastannounced": None, - "lastseen": "2024-08-07T18:27Z", - "manufacturername": "IKEA of Sweden", - "modelid": "STARKVIND Air purifier", - "name": "IKEA Starkvind", - "productid": "E2007", - "state": { - "deviceruntime": 73405, - "filterruntime": 73405, - "lastupdated": "2024-08-07T18:27:52.543", - "replacefilter": False, - "speed": 20, - }, - "swversion": "1.1.001", - "type": "ZHAAirPurifier", - "uniqueid": "0c:43:14:ff:fe:6c:20:12-01-fc7d", - }, - { - "entity_id": "select.ikea_starkvind_fan_mode", - "option": AirPurifierFanMode.AUTO.value, - "request": "/sensors/0/config", - "request_data": {"mode": "auto"}, - }, - ), ] -@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) +@pytest.mark.parametrize(("raw_data", "expected"), TEST_DATA) async def test_select( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - expected: dict[str, Any], - snapshot: SnapshotAssertion, + aioclient_mock: AiohttpClientMocker, + raw_data, + expected, ) -> None: """Test successful creation of button entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SELECT]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + with patch.dict(DECONZ_WEB_REQUEST, raw_data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == expected["entity_count"] + + # Verify state data + + button = hass.states.get(expected["entity_id"]) + assert button.attributes == expected["attributes"] + + # Verify entity registry data + + ent_reg_entry = entity_registry.async_get(expected["entity_id"]) + assert ent_reg_entry.entity_category is expected["entity_category"] + assert ent_reg_entry.unique_id == expected["unique_id"] + + # Verify device registry data + + assert ( + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + == expected["device_count"] + ) # Verify selecting option - aioclient_mock = mock_put_request(expected["request"]) + + mock_deconz_put_request(aioclient_mock, config_entry.data, expected["request"]) await hass.services.async_call( SELECT_DOMAIN, @@ -186,3 +214,14 @@ async def test_select( blocking=True, ) assert aioclient_mock.mock_calls[1][2] == expected["request_data"] + + # Unload entry + + await hass.config_entries.async_unload(config_entry.entry_id) + assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE + + # Remove entry + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_sensor.py b/tests/components/deconz/test_sensor.py index 958cb3b793a..1e1ca6efe7c 100644 --- a/tests/components/deconz/test_sensor.py +++ b/tests/components/deconz/test_sensor.py @@ -1,23 +1,42 @@ """deCONZ sensor platform tests.""" from datetime import timedelta -from typing import Any from unittest.mock import patch import pytest -from syrupy import SnapshotAssertion from homeassistant.components.deconz.const import CONF_ALLOW_CLIP_SENSOR -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.sensor import ( + DOMAIN as SENSOR_DOMAIN, + SensorDeviceClass, + SensorStateClass, +) from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY -from homeassistant.const import Platform +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + CONCENTRATION_PARTS_PER_BILLION, + CONCENTRATION_PARTS_PER_MILLION, + STATE_UNAVAILABLE, + EntityCategory, +) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import dt as dt_util -from .conftest import ConfigEntryFactoryType, WebsocketDataType +from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration + +from tests.common import async_fire_time_changed +from tests.test_util.aiohttp import AiohttpClientMocker + + +async def test_no_sensors( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no sensors in deconz results in no sensor entities.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 -from tests.common import async_fire_time_changed, snapshot_platform TEST_DATA = [ ( # Air quality sensor @@ -42,7 +61,17 @@ TEST_DATA = [ "uniqueid": "00:12:4b:00:14:4d:00:07-02-fdef", }, { + "entity_count": 2, + "device_count": 3, "entity_id": "sensor.bosch_air_quality_sensor", + "unique_id": "00:12:4b:00:14:4d:00:07-02-fdef-air_quality", + "state": "poor", + "entity_category": None, + "device_class": None, + "state_class": None, + "attributes": { + "friendly_name": "BOSCH Air quality sensor", + }, "websocket_event": {"state": {"airquality": "excellent"}}, "next_state": "excellent", }, @@ -69,7 +98,19 @@ TEST_DATA = [ "uniqueid": "00:12:4b:00:14:4d:00:07-02-fdef", }, { + "entity_count": 2, + "device_count": 3, "entity_id": "sensor.bosch_air_quality_sensor_ppb", + "unique_id": "00:12:4b:00:14:4d:00:07-02-fdef-air_quality_ppb", + "state": "809", + "entity_category": None, + "device_class": None, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "friendly_name": "BOSCH Air quality sensor PPB", + "state_class": "measurement", + "unit_of_measurement": CONCENTRATION_PARTS_PER_BILLION, + }, "websocket_event": {"state": {"airqualityppb": 1000}}, "next_state": "1000", }, @@ -96,7 +137,20 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:01-02-0113", }, { + "entity_count": 4, + "device_count": 3, "entity_id": "sensor.airquality_1_co2", + "unique_id": "00:00:00:00:00:00:00:01-02-0113-air_quality_co2", + "state": "359", + "entity_category": None, + "device_class": SensorDeviceClass.CO2, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "friendly_name": "AirQuality 1 CO2", + "device_class": SensorDeviceClass.CO2, + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": CONCENTRATION_PARTS_PER_MILLION, + }, "websocket_event": {"state": {"airquality_co2_density": 332}}, "next_state": "332", }, @@ -123,7 +177,20 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:01-02-0113", }, { + "entity_count": 4, + "device_count": 3, "entity_id": "sensor.airquality_1_ch2o", + "unique_id": "00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde", + "state": "4", + "entity_category": None, + "device_class": SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "friendly_name": "AirQuality 1 CH2O", + "device_class": SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + }, "websocket_event": {"state": {"airquality_formaldehyde_density": 5}}, "next_state": "5", }, @@ -150,7 +217,20 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:01-02-0113", }, { + "entity_count": 4, + "device_count": 3, "entity_id": "sensor.airquality_1_pm25", + "unique_id": "00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5", + "state": "8", + "entity_category": None, + "device_class": SensorDeviceClass.PM25, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "friendly_name": "AirQuality 1 PM25", + "device_class": SensorDeviceClass.PM25, + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + }, "websocket_event": {"state": {"pm2_5": 11}}, "next_state": "11", }, @@ -176,7 +256,21 @@ TEST_DATA = [ "uniqueid": "00:0d:6f:ff:fe:01:23:45-01-0001", }, { + "entity_count": 1, + "device_count": 3, "entity_id": "sensor.fyrtur_block_out_roller_blind_battery", + "unique_id": "00:0d:6f:ff:fe:01:23:45-01-0001-battery", + "state": "100", + "entity_category": EntityCategory.DIAGNOSTIC, + "device_class": SensorDeviceClass.BATTERY, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "state_class": "measurement", + "on": True, + "unit_of_measurement": "%", + "device_class": "battery", + "friendly_name": "FYRTUR block-out roller blind Battery", + }, "websocket_event": {"state": {"battery": 50}}, "next_state": "50", }, @@ -206,7 +300,20 @@ TEST_DATA = [ "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-040d", }, { + "entity_count": 1, + "device_count": 3, "entity_id": "sensor.carbondioxide_35", + "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-040d-carbon_dioxide", + "state": "370", + "entity_category": None, + "device_class": SensorDeviceClass.CO2, + "state_class": CONCENTRATION_PARTS_PER_BILLION, + "attributes": { + "device_class": "carbon_dioxide", + "friendly_name": "CarbonDioxide 35", + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": CONCENTRATION_PARTS_PER_BILLION, + }, "websocket_event": {"state": {"measured_value": 500}}, "next_state": "500", }, @@ -228,7 +335,22 @@ TEST_DATA = [ "uniqueid": "00:0d:6f:00:0b:7a:64:29-01-0702", }, { + "entity_count": 1, + "device_count": 3, "entity_id": "sensor.consumption_15", + "unique_id": "00:0d:6f:00:0b:7a:64:29-01-0702-consumption", + "state": "11.342", + "entity_category": None, + "device_class": SensorDeviceClass.ENERGY, + "state_class": SensorStateClass.TOTAL_INCREASING, + "attributes": { + "state_class": "total_increasing", + "on": True, + "power": 123, + "unit_of_measurement": "kWh", + "device_class": "energy", + "friendly_name": "Consumption 15", + }, "websocket_event": {"state": {"consumption": 10000}}, "next_state": "10.0", }, @@ -256,7 +378,21 @@ TEST_DATA = [ }, { "enable_entity": True, + "entity_count": 1, + "device_count": 3, "entity_id": "sensor.daylight", + "unique_id": "01:23:4E:FF:FF:56:78:9A-01-daylight_status", + "old-unique_id": "01:23:4E:FF:FF:56:78:9A-01", + "state": "solar_noon", + "entity_category": None, + "device_class": None, + "state_class": None, + "attributes": { + "on": True, + "daylight": True, + "icon": "mdi:white-balance-sunny", + "friendly_name": "Daylight", + }, "websocket_event": {"state": {"status": 210}}, "next_state": "dusk", }, @@ -286,7 +422,20 @@ TEST_DATA = [ "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-042b", }, { + "entity_count": 1, + "device_count": 3, "entity_id": "sensor.formaldehyde_34", + "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-042b-formaldehyde", + "state": "1", + "entity_category": None, + "device_class": SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "device_class": "volatile_organic_compounds", + "friendly_name": "Formaldehyde 34", + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": CONCENTRATION_PARTS_PER_BILLION, + }, "websocket_event": {"state": {"measured_value": 2}}, "next_state": "2", }, @@ -310,7 +459,18 @@ TEST_DATA = [ "uniqueid": "fsm-state-1520195376277", }, { + "entity_count": 1, + "device_count": 2, "entity_id": "sensor.fsm_state_motion_stair", + "unique_id": "fsm-state-1520195376277-status", + "state": "0", + "entity_category": None, + "device_class": None, + "state_class": None, + "attributes": { + "on": True, + "friendly_name": "FSM_STATE Motion stair", + }, "websocket_event": {"state": {"status": 1}}, "next_state": "1", }, @@ -337,7 +497,24 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:45:dc:53-01-0405", }, { + "entity_count": 2, + "device_count": 3, "entity_id": "sensor.mi_temperature_1", + "unique_id": "00:15:8d:00:02:45:dc:53-01-0405-humidity", + "state": "35.55", + "entity_category": None, + "device_class": SensorDeviceClass.HUMIDITY, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "state_class": "measurement", + "on": True, + "unit_of_measurement": "%", + "device_class": "humidity", + "friendly_name": "Mi temperature 1", + }, + "options": { + "suggested_display_precision": 1, + }, "websocket_event": {"state": {"humidity": 1000}}, "next_state": "10.0", }, @@ -361,7 +538,20 @@ TEST_DATA = [ "uniqueid": "a4:c1:38:fe:86:8f:07:a3-01-0408", }, { + "entity_count": 3, + "device_count": 3, "entity_id": "sensor.soil_sensor", + "unique_id": "a4:c1:38:fe:86:8f:07:a3-01-0408-moisture", + "state": "72.13", + "entity_category": None, + "device_class": SensorDeviceClass.MOISTURE, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "state_class": "measurement", + "unit_of_measurement": "%", + "device_class": "moisture", + "friendly_name": "Soil Sensor", + }, "websocket_event": {"state": {"moisture": 6923}}, "next_state": "69.23", }, @@ -396,7 +586,23 @@ TEST_DATA = [ "uniqueid": "00:17:88:01:03:28:8c:9b-02-0400", }, { + "entity_count": 2, + "device_count": 3, "entity_id": "sensor.motion_sensor_4", + "unique_id": "00:17:88:01:03:28:8c:9b-02-0400-light_level", + "state": "5.0", + "entity_category": None, + "device_class": SensorDeviceClass.ILLUMINANCE, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "on": True, + "dark": True, + "daylight": False, + "unit_of_measurement": "lx", + "device_class": "illuminance", + "friendly_name": "Motion sensor 4", + "state_class": "measurement", + }, "websocket_event": {"state": {"lightlevel": 1000}}, "next_state": "1.3", }, @@ -432,7 +638,20 @@ TEST_DATA = [ "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-042a", }, { + "entity_count": 1, + "device_count": 3, "entity_id": "sensor.starkvind_airpurifier_pm25", + "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-042a-particulate_matter_pm2_5", + "state": "1", + "entity_category": None, + "device_class": SensorDeviceClass.PM25, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "friendly_name": "STARKVIND AirPurifier PM25", + "device_class": SensorDeviceClass.PM25, + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + }, "websocket_event": {"state": {"measured_value": 2}}, "next_state": "2", }, @@ -458,7 +677,23 @@ TEST_DATA = [ "uniqueid": "00:0d:6f:00:0b:7a:64:29-01-0b04", }, { + "entity_count": 1, + "device_count": 3, "entity_id": "sensor.power_16", + "unique_id": "00:0d:6f:00:0b:7a:64:29-01-0b04-power", + "state": "64", + "entity_category": None, + "device_class": SensorDeviceClass.POWER, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "state_class": "measurement", + "on": True, + "current": 34, + "voltage": 231, + "unit_of_measurement": "W", + "device_class": "power", + "friendly_name": "Power 16", + }, "websocket_event": {"state": {"power": 1000}}, "next_state": "1000", }, @@ -484,7 +719,21 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:45:dc:53-01-0403", }, { + "entity_count": 2, + "device_count": 3, "entity_id": "sensor.mi_temperature_1", + "unique_id": "00:15:8d:00:02:45:dc:53-01-0403-pressure", + "state": "1010", + "entity_category": None, + "device_class": SensorDeviceClass.PRESSURE, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "state_class": "measurement", + "on": True, + "unit_of_measurement": "hPa", + "device_class": "pressure", + "friendly_name": "Mi temperature 1", + }, "websocket_event": {"state": {"pressure": 500}}, "next_state": "500", }, @@ -511,7 +760,24 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:45:dc:53-01-0402", }, { + "entity_count": 2, + "device_count": 3, "entity_id": "sensor.mi_temperature_1", + "unique_id": "00:15:8d:00:02:45:dc:53-01-0402-temperature", + "state": "21.82", + "entity_category": None, + "device_class": SensorDeviceClass.TEMPERATURE, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "state_class": "measurement", + "on": True, + "unit_of_measurement": "°C", + "device_class": "temperature", + "friendly_name": "Mi temperature 1", + }, + "options": { + "suggested_display_precision": 1, + }, "websocket_event": {"state": {"temperature": 1800}}, "next_state": "18.0", }, @@ -540,7 +806,17 @@ TEST_DATA = [ "uniqueid": "cc:cc:cc:ff:fe:38:4d:b3-01-000a", }, { + "entity_count": 2, + "device_count": 3, "entity_id": "sensor.etrv_sejour", + "unique_id": "cc:cc:cc:ff:fe:38:4d:b3-01-000a-last_set", + "state": "2020-11-19T08:07:08+00:00", + "entity_category": None, + "device_class": SensorDeviceClass.TIMESTAMP, + "attributes": { + "device_class": "timestamp", + "friendly_name": "eTRV Séjour", + }, "websocket_event": {"state": {"lastset": "2020-12-14T10:12:14Z"}}, "next_state": "2020-12-14T10:12:14+00:00", }, @@ -569,7 +845,20 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:b5:d1:80-01-0500", }, { + "entity_count": 3, + "device_count": 3, "entity_id": "sensor.alarm_10_temperature", + "unique_id": "00:15:8d:00:02:b5:d1:80-01-0500-internal_temperature", + "state": "26.0", + "entity_category": None, + "device_class": SensorDeviceClass.TEMPERATURE, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "state_class": "measurement", + "unit_of_measurement": "°C", + "device_class": "temperature", + "friendly_name": "Alarm 10 Temperature", + }, "websocket_event": {"state": {"temperature": 1800}}, "next_state": "26.0", }, @@ -597,62 +886,45 @@ TEST_DATA = [ "uniqueid": "00:17:88:01:02:0e:32:a3-02-fc00", }, { + "entity_count": 1, + "device_count": 3, "entity_id": "sensor.dimmer_switch_3_battery", + "unique_id": "00:17:88:01:02:0e:32:a3-02-fc00-battery", + "state": "90", + "entity_category": EntityCategory.DIAGNOSTIC, + "device_class": SensorDeviceClass.BATTERY, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "state_class": "measurement", + "on": True, + "event_id": "dimmer_switch_3", + "unit_of_measurement": "%", + "device_class": "battery", + "friendly_name": "Dimmer switch 3 Battery", + }, "websocket_event": {"config": {"battery": 80}}, "next_state": "80", }, ), - ( # Air purifier filter time sensor - { - "config": { - "filterlifetime": 259200, - "ledindication": True, - "locked": False, - "mode": "speed_1", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "de26d19d9e91b2db3ded6ee7ab6b6a4b", - "lastannounced": None, - "lastseen": "2024-08-07T18:27Z", - "manufacturername": "IKEA of Sweden", - "modelid": "STARKVIND Air purifier", - "name": "IKEA Starkvind", - "productid": "E2007", - "state": { - "deviceruntime": 73405, - "filterruntime": 73405, - "lastupdated": "2024-08-07T18:27:52.543", - "replacefilter": False, - "speed": 20, - }, - "swversion": "1.1.001", - "type": "ZHAAirPurifier", - "uniqueid": "0c:43:14:ff:fe:6c:20:12-01-fc7d", - }, - { - "entity_id": "sensor.ikea_starkvind_filter_time", - "websocket_event": {"state": {"filterruntime": 100000}}, - "next_state": "1.15740740740741", - }, - ), ] -@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) -@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) +@pytest.mark.parametrize(("sensor_data", "expected"), TEST_DATA) async def test_sensors( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - sensor_ws_data: WebsocketDataType, - expected: dict[str, Any], - snapshot: SnapshotAssertion, + aioclient_mock: AiohttpClientMocker, + mock_deconz_websocket, + sensor_data, + expected, ) -> None: """Test successful creation of sensor entities.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SENSOR]): - config_entry = await config_entry_factory() + + with patch.dict(DECONZ_WEB_REQUEST, {"sensors": {"1": sensor_data}}): + config_entry = await setup_deconz_integration( + hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} + ) # Enable in entity registry if expected.get("enable_entity"): @@ -667,37 +939,79 @@ async def test_sensors( ) await hass.async_block_till_done() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + assert len(hass.states.async_all()) == expected["entity_count"] + + # Verify entity state + sensor = hass.states.get(expected["entity_id"]) + assert sensor.state == expected["state"] + assert sensor.attributes.get(ATTR_DEVICE_CLASS) == expected["device_class"] + assert sensor.attributes == expected["attributes"] + + # Verify entity registry + assert ( + entity_registry.async_get(expected["entity_id"]).entity_category + is expected["entity_category"] + ) + ent_reg_entry = entity_registry.async_get(expected["entity_id"]) + assert ent_reg_entry.entity_category is expected["entity_category"] + assert ent_reg_entry.unique_id == expected["unique_id"] + + # Verify device registry + assert ( + len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + == expected["device_count"] + ) # Change state - await sensor_ws_data(expected["websocket_event"]) + event_changed_sensor = {"t": "event", "e": "changed", "r": "sensors", "id": "1"} + event_changed_sensor |= expected["websocket_event"] + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() assert hass.states.get(expected["entity_id"]).state == expected["next_state"] + # Unload entry -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "name": "CLIP temperature sensor", - "type": "CLIPTemperature", - "state": {"temperature": 2600}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:02-00", - }, - ], -) -@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: False}]) -@pytest.mark.usefixtures("config_entry_setup") -async def test_not_allow_clip_sensor(hass: HomeAssistant) -> None: - """Test that CLIP sensors are not allowed.""" + await hass.config_entries.async_unload(config_entry.entry_id) + assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE + + # Remove entry + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() assert len(hass.states.async_all()) == 0 -@pytest.mark.parametrize( - "sensor_payload", - [ - { +async def test_not_allow_clip_sensor( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that CLIP sensors are not allowed.""" + data = { + "sensors": { + "1": { + "name": "CLIP temperature sensor", + "type": "CLIPTemperature", + "state": {"temperature": 2600}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:02-00", + }, + } + } + + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration( + hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: False} + ) + + assert len(hass.states.async_all()) == 0 + + +async def test_allow_clip_sensors( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that CLIP sensors can be allowed.""" + data = { + "sensors": { "1": { "name": "Light level sensor", "type": "ZHALightLevel", @@ -725,19 +1039,17 @@ async def test_not_allow_clip_sensor(hass: HomeAssistant) -> None: "uniqueid": "/sensors/3", }, } - ], -) -@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) -async def test_allow_clip_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - snapshot: SnapshotAssertion, -) -> None: - """Test that CLIP sensors can be allowed.""" - with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SENSOR]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration( + hass, + aioclient_mock, + options={CONF_ALLOW_CLIP_SENSOR: True}, + ) + + assert len(hass.states.async_all()) == 4 + assert hass.states.get("sensor.clip_light_level_sensor").state == "999.8" + assert hass.states.get("sensor.clip_flur").state == "0" # Disallow clip sensors @@ -762,14 +1074,15 @@ async def test_allow_clip_sensors( assert hass.states.get("sensor.clip_flur").state == "0" -@pytest.mark.usefixtures("config_entry_setup") async def test_add_new_sensor( - hass: HomeAssistant, - sensor_ws_data: WebsocketDataType, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test that adding a new sensor works.""" event_added_sensor = { + "t": "event", "e": "added", + "r": "sensors", + "id": "1", "sensor": { "id": "Light sensor id", "name": "Light level sensor", @@ -780,9 +1093,13 @@ async def test_add_new_sensor( }, } + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 - await sensor_ws_data(event_added_sensor) + await mock_deconz_websocket(data=event_added_sensor) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 2 assert hass.states.get("sensor.light_level_sensor").state == "999.8" @@ -798,58 +1115,71 @@ BAD_SENSOR_DATA = [ @pytest.mark.parametrize(("sensor_type", "sensor_property"), BAD_SENSOR_DATA) async def test_dont_add_sensor_if_state_is_none( hass: HomeAssistant, - config_entry_factory: ConfigEntryFactoryType, - sensor_payload: dict[str, Any], - sensor_type: str, - sensor_property: str, + aioclient_mock: AiohttpClientMocker, + sensor_type, + sensor_property, ) -> None: """Test sensor with scaled data is not created if state is None.""" - sensor_payload["0"] = { - "name": "Sensor 1", - "type": sensor_type, - "state": {sensor_property: None}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:00-00", + data = { + "sensors": { + "1": { + "name": "Sensor 1", + "type": sensor_type, + "state": {sensor_property: None}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + } } - await config_entry_factory() + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) assert len(hass.states.async_all()) == 0 -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "config": { - "on": True, - "reachable": True, - }, - "ep": 2, - "etag": "c2d2e42396f7c78e11e46c66e2ec0200", - "lastseen": "2020-11-20T22:48Z", - "manufacturername": "BOSCH", - "modelid": "AIR", - "name": "BOSCH Air quality sensor", - "state": { - "airquality": "poor", - "lastupdated": "2020-11-20T22:48:00.209", - }, - "swversion": "20200402", - "type": "ZHAAirQuality", - "uniqueid": "00:00:00:00:00:00:00:00-02-fdef", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_air_quality_sensor_without_ppb(hass: HomeAssistant) -> None: +async def test_air_quality_sensor_without_ppb( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Test sensor with scaled data is not created if state is None.""" + data = { + "sensors": { + "1": { + "config": { + "on": True, + "reachable": True, + }, + "ep": 2, + "etag": "c2d2e42396f7c78e11e46c66e2ec0200", + "lastseen": "2020-11-20T22:48Z", + "manufacturername": "BOSCH", + "modelid": "AIR", + "name": "BOSCH Air quality sensor", + "state": { + "airquality": "poor", + "lastupdated": "2020-11-20T22:48:00.209", + }, + "swversion": "20200402", + "type": "ZHAAirQuality", + "uniqueid": "00:00:00:00:00:00:00:00-02-fdef", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 1 -@pytest.mark.parametrize( - "sensor_payload", - [ - { +async def test_add_battery_later( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket +) -> None: + """Test that a battery sensor can be created later on. + + Without an initial battery state a battery sensor + can be created once a value is reported. + """ + data = { + "sensors": { "1": { "name": "Switch 1", "type": "ZHASwitch", @@ -865,175 +1195,190 @@ async def test_air_quality_sensor_without_ppb(hass: HomeAssistant) -> None: "uniqueid": "00:00:00:00:00:00:00:00-00-0001", }, } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_add_battery_later( - hass: HomeAssistant, - sensor_ws_data: WebsocketDataType, -) -> None: - """Test that a battery sensor can be created later on. + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) - Without an initial battery state a battery sensor - can be created once a value is reported. - """ assert len(hass.states.async_all()) == 0 - await sensor_ws_data({"id": "2", "config": {"battery": 50}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "2", + "config": {"battery": 50}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 - await sensor_ws_data({"id": "1", "config": {"battery": 50}}) + event_changed_sensor = { + "t": "event", + "e": "changed", + "r": "sensors", + "id": "1", + "config": {"battery": 50}, + } + await mock_deconz_websocket(data=event_changed_sensor) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 1 + assert hass.states.get("sensor.switch_1_battery").state == "50" @pytest.mark.parametrize("model_id", ["0x8030", "0x8031", "0x8034", "0x8035"]) async def test_special_danfoss_battery_creation( - hass: HomeAssistant, - config_entry_factory: ConfigEntryFactoryType, - sensor_payload: dict[str, Any], - model_id: str, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, model_id ) -> None: """Test the special Danfoss battery creation works. Normally there should only be one battery sensor per device from deCONZ. With specific Danfoss devices each endpoint can report its own battery state. """ - sensor_payload |= { - "1": { - "config": { - "battery": 70, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, + data = { + "sensors": { + "1": { + "config": { + "battery": 70, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, + }, + "ep": 1, + "etag": "982d9acc38bee5b251e24a9be26558e4", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:07.994", + "on": False, + "temperature": 2307, + }, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-01-0201", }, - "ep": 1, - "etag": "982d9acc38bee5b251e24a9be26558e4", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:07.994", - "on": False, - "temperature": 2307, + "2": { + "config": { + "battery": 86, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, + }, + "ep": 2, + "etag": "62f12749f9f51c950086aff37dd02b61", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:22.399", + "on": False, + "temperature": 2316, + }, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-02-0201", }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-01-0201", - }, - "2": { - "config": { - "battery": 86, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, + "3": { + "config": { + "battery": 86, + "heatsetpoint": 2350, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, + }, + "ep": 3, + "etag": "f50061174bb7f18a3d95789bab8b646d", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:25.466", + "on": False, + "temperature": 2337, + }, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-03-0201", }, - "ep": 2, - "etag": "62f12749f9f51c950086aff37dd02b61", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:22.399", - "on": False, - "temperature": 2316, + "4": { + "config": { + "battery": 85, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, + }, + "ep": 4, + "etag": "eea97adf8ce1b971b8b6a3a31793f96b", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:41.939", + "on": False, + "temperature": 2333, + }, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-04-0201", }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-02-0201", - }, - "3": { - "config": { - "battery": 86, - "heatsetpoint": 2350, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, + "5": { + "config": { + "battery": 83, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, + }, + "ep": 5, + "etag": "1f7cd1a5d66dc27ac5eb44b8c47362fb", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": {"lastupdated": "none", "on": False, "temperature": 2325}, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-05-0201", }, - "ep": 3, - "etag": "f50061174bb7f18a3d95789bab8b646d", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:25.466", - "on": False, - "temperature": 2337, - }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-03-0201", - }, - "4": { - "config": { - "battery": 85, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 4, - "etag": "eea97adf8ce1b971b8b6a3a31793f96b", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:41.939", - "on": False, - "temperature": 2333, - }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-04-0201", - }, - "5": { - "config": { - "battery": 83, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 5, - "etag": "1f7cd1a5d66dc27ac5eb44b8c47362fb", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": {"lastupdated": "none", "on": False, "temperature": 2325}, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-05-0201", - }, + } } - - await config_entry_factory() + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) assert len(hass.states.async_all()) == 10 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 5 -@pytest.mark.parametrize( - "sensor_payload", - [{"type": "not supported", "name": "name", "state": {}, "config": {}}], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_unsupported_sensor(hass: HomeAssistant) -> None: +async def test_unsupported_sensor( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Test that unsupported sensors doesn't break anything.""" + data = { + "sensors": { + "0": {"type": "not supported", "name": "name", "state": {}, "config": {}} + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_services.py b/tests/components/deconz/test_services.py index 9a30564385c..de061fc4e8c 100644 --- a/tests/components/deconz/test_services.py +++ b/tests/components/deconz/test_services.py @@ -1,7 +1,6 @@ """deCONZ service tests.""" -from collections.abc import Callable -from typing import Any +from unittest.mock import patch import pytest import voluptuous as vol @@ -24,25 +23,31 @@ from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from .test_hub import BRIDGE_ID +from .test_gateway import ( + BRIDGEID, + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + mock_deconz_request, + setup_deconz_integration, +) -from tests.common import MockConfigEntry, async_capture_events +from tests.common import async_capture_events from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_field( - hass: HomeAssistant, - mock_put_request: Callable[[str, str], AiohttpClientMocker], + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service invokes pydeconz with the correct path and data.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + data = { SERVICE_FIELD: "/lights/2", - CONF_BRIDGE_ID: BRIDGE_ID, + CONF_BRIDGE_ID: BRIDGEID, SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20}, } - aioclient_mock = mock_put_request("/lights/2") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/2") await hass.services.async_call( DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data, blocking=True @@ -50,28 +55,29 @@ async def test_configure_service_with_field( assert aioclient_mock.mock_calls[1][2] == {"on": True, "attr1": 10, "attr2": 20} -@pytest.mark.parametrize( - "light_payload", - [ - { - "name": "Test", - "state": {"reachable": True}, - "type": "Light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_entity( - hass: HomeAssistant, - mock_put_request: Callable[[str, str], AiohttpClientMocker], + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service invokes pydeconz with the correct path and data.""" + data = { + "lights": { + "1": { + "name": "Test", + "state": {"reachable": True}, + "type": "Light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + data = { SERVICE_ENTITY: "light.test", SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20}, } - aioclient_mock = mock_put_request("/lights/0") + + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1") await hass.services.async_call( DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data, blocking=True @@ -79,29 +85,30 @@ async def test_configure_service_with_entity( assert aioclient_mock.mock_calls[1][2] == {"on": True, "attr1": 10, "attr2": 20} -@pytest.mark.parametrize( - "light_payload", - [ - { - "name": "Test", - "state": {"reachable": True}, - "type": "Light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_entity_and_field( - hass: HomeAssistant, - mock_put_request: Callable[[str, str], AiohttpClientMocker], + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service invokes pydeconz with the correct path and data.""" + data = { + "lights": { + "1": { + "name": "Test", + "state": {"reachable": True}, + "type": "Light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + data = { SERVICE_ENTITY: "light.test", SERVICE_FIELD: "/state", SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20}, } - aioclient_mock = mock_put_request("/lights/0/state") + + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") await hass.services.async_call( DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data, blocking=True @@ -109,11 +116,11 @@ async def test_configure_service_with_entity_and_field( assert aioclient_mock.mock_calls[1][2] == {"on": True, "attr1": 10, "attr2": 20} -@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_faulty_bridgeid( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service fails on a bad bridge id.""" + await setup_deconz_integration(hass, aioclient_mock) aioclient_mock.clear_requests() data = { @@ -130,9 +137,12 @@ async def test_configure_service_with_faulty_bridgeid( assert len(aioclient_mock.mock_calls) == 0 -@pytest.mark.usefixtures("config_entry_setup") -async def test_configure_service_with_faulty_field(hass: HomeAssistant) -> None: +async def test_configure_service_with_faulty_field( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Test that service fails on a bad field.""" + await setup_deconz_integration(hass, aioclient_mock) + data = {SERVICE_FIELD: "light/2", SERVICE_DATA: {}} with pytest.raises(vol.Invalid): @@ -141,11 +151,11 @@ async def test_configure_service_with_faulty_field(hass: HomeAssistant) -> None: ) -@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_faulty_entity( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service on a non existing entity.""" + await setup_deconz_integration(hass, aioclient_mock) aioclient_mock.clear_requests() data = { @@ -161,12 +171,13 @@ async def test_configure_service_with_faulty_entity( assert len(aioclient_mock.mock_calls) == 0 -@pytest.mark.parametrize("config_entry_options", [{CONF_MASTER_GATEWAY: False}]) -@pytest.mark.usefixtures("config_entry_setup") async def test_calling_service_with_no_master_gateway_fails( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service call fails when no master gateway exist.""" + await setup_deconz_integration( + hass, aioclient_mock, options={CONF_MASTER_GATEWAY: False} + ) aioclient_mock.clear_requests() data = { @@ -182,19 +193,18 @@ async def test_calling_service_with_no_master_gateway_fails( assert len(aioclient_mock.mock_calls) == 0 -@pytest.mark.usefixtures("config_entry_setup") async def test_service_refresh_devices( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - deconz_payload: dict[str, Any], - mock_requests: Callable[[], None], + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service can refresh devices.""" + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 aioclient_mock.clear_requests() - deconz_payload |= { + data = { + "config": {}, "groups": { "1": { "id": "Group 1 id", @@ -224,43 +234,43 @@ async def test_service_refresh_devices( } }, } - mock_requests() + + mock_deconz_request(aioclient_mock, config_entry.data, data) await hass.services.async_call( - DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGE_ID} + DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGEID} ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 5 -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "name": "Switch 1", - "type": "ZHASwitch", - "state": {"buttonevent": 1000}, - "config": {"battery": 100}, - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_service_refresh_devices_trigger_no_state_update( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - deconz_payload: dict[str, Any], - mock_requests, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Verify that gateway.ignore_state_updates are honored.""" + data = { + "sensors": { + "1": { + "name": "Switch 1", + "type": "ZHASwitch", + "state": {"buttonevent": 1000}, + "config": {"battery": 100}, + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 1 captured_events = async_capture_events(hass, CONF_DECONZ_EVENT) aioclient_mock.clear_requests() - deconz_payload |= { + data = { + "config": {}, "groups": { "1": { "id": "Group 1 id", @@ -281,7 +291,7 @@ async def test_service_refresh_devices_trigger_no_state_update( } }, "sensors": { - "0": { + "1": { "name": "Switch 1", "type": "ZHASwitch", "state": {"buttonevent": 1000}, @@ -290,10 +300,11 @@ async def test_service_refresh_devices_trigger_no_state_update( } }, } - mock_requests() + + mock_deconz_request(aioclient_mock, config_entry.data, data) await hass.services.async_call( - DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGE_ID} + DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGEID} ) await hass.async_block_till_done() @@ -301,38 +312,37 @@ async def test_service_refresh_devices_trigger_no_state_update( assert len(captured_events) == 0 -@pytest.mark.parametrize( - "light_payload", - [ - { - "name": "Light 0 name", - "state": {"reachable": True}, - "type": "Light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - ], -) -@pytest.mark.parametrize( - "sensor_payload", - [ - { - "name": "Switch 1", - "type": "ZHASwitch", - "state": {"buttonevent": 1000, "gesture": 1}, - "config": {"battery": 100}, - "uniqueid": "00:00:00:00:00:00:00:03-00", - } - ], -) async def test_remove_orphaned_entries_service( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - config_entry_setup: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test service works and also don't remove more than expected.""" + data = { + "lights": { + "1": { + "name": "Light 1 name", + "state": {"reachable": True}, + "type": "Light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + }, + "sensors": { + "1": { + "name": "Switch 1", + "type": "ZHASwitch", + "state": {"buttonevent": 1000, "gesture": 1}, + "config": {"battery": 100}, + "uniqueid": "00:00:00:00:00:00:00:03-00", + }, + }, + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + device = device_registry.async_get_or_create( - config_entry_id=config_entry_setup.entry_id, + config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "123")}, ) @@ -341,7 +351,7 @@ async def test_remove_orphaned_entries_service( [ entry for entry in device_registry.devices.values() - if config_entry_setup.entry_id in entry.config_entries + if config_entry.entry_id in entry.config_entries ] ) == 5 # Host, gateway, light, switch and orphan @@ -352,23 +362,19 @@ async def test_remove_orphaned_entries_service( DECONZ_DOMAIN, "12345", suggested_object_id="Orphaned sensor", - config_entry=config_entry_setup, + config_entry=config_entry, device_id=device.id, ) assert ( - len( - er.async_entries_for_config_entry( - entity_registry, config_entry_setup.entry_id - ) - ) + len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) == 3 # Light, switch battery and orphan ) await hass.services.async_call( DECONZ_DOMAIN, SERVICE_REMOVE_ORPHANED_ENTRIES, - service_data={CONF_BRIDGE_ID: BRIDGE_ID}, + service_data={CONF_BRIDGE_ID: BRIDGEID}, ) await hass.async_block_till_done() @@ -377,17 +383,13 @@ async def test_remove_orphaned_entries_service( [ entry for entry in device_registry.devices.values() - if config_entry_setup.entry_id in entry.config_entries + if config_entry.entry_id in entry.config_entries ] ) == 4 # Host, gateway, light and switch ) assert ( - len( - er.async_entries_for_config_entry( - entity_registry, config_entry_setup.entry_id - ) - ) + len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) == 2 # Light and switch battery ) diff --git a/tests/components/deconz/test_siren.py b/tests/components/deconz/test_siren.py index 5c80feef38c..62ed1b732b8 100644 --- a/tests/components/deconz/test_siren.py +++ b/tests/components/deconz/test_siren.py @@ -1,8 +1,6 @@ """deCONZ switch platform tests.""" -from collections.abc import Callable - -import pytest +from unittest.mock import patch from homeassistant.components.siren import ATTR_DURATION, DOMAIN as SIREN_DOMAIN from homeassistant.const import ( @@ -11,41 +9,61 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_OFF, STATE_ON, + STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant -from .conftest import WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize( - "light_payload", - [ - { - "name": "Warning device", - "type": "Warning device", - "state": {"alert": "lselect", "reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - ], -) -@pytest.mark.usefixtures("config_entry_setup") async def test_sirens( - hass: HomeAssistant, - light_ws_data: WebsocketDataType, - mock_put_request: Callable[[str, str], AiohttpClientMocker], + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket ) -> None: """Test that siren entities are created.""" - assert len(hass.states.async_all()) == 1 - assert hass.states.get("siren.warning_device").state == STATE_ON + data = { + "lights": { + "1": { + "name": "Warning device", + "type": "Warning device", + "state": {"alert": "lselect", "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + }, + "2": { + "name": "Unsupported siren", + "type": "Not a siren", + "state": {"reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:01-00", + }, + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + + assert len(hass.states.async_all()) == 2 + assert hass.states.get("siren.warning_device").state == STATE_ON + assert not hass.states.get("siren.unsupported_siren") + + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"alert": None}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() - await light_ws_data({"state": {"alert": None}}) assert hass.states.get("siren.warning_device").state == STATE_OFF # Verify service calls - aioclient_mock = mock_put_request("/lights/0/state") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") # Service turn on siren @@ -76,3 +94,14 @@ async def test_sirens( blocking=True, ) assert aioclient_mock.mock_calls[3][2] == {"alert": "lselect", "ontime": 100} + + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(states) == 2 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_switch.py b/tests/components/deconz/test_switch.py index ed82b0c2ac3..9ef2382a2e2 100644 --- a/tests/components/deconz/test_switch.py +++ b/tests/components/deconz/test_switch.py @@ -1,8 +1,6 @@ """deCONZ switch platform tests.""" -from collections.abc import Callable - -import pytest +from unittest.mock import patch from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN @@ -11,65 +9,83 @@ from homeassistant.components.switch import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import ConfigEntryFactoryType, WebsocketDataType +from .test_gateway import ( + DECONZ_WEB_REQUEST, + mock_deconz_put_request, + setup_deconz_integration, +) from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize( - "light_payload", - [ - { - "0": { +async def test_no_switches( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that no switch entities are created.""" + await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 0 + + +async def test_power_plugs( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket +) -> None: + """Test that all supported switch entities are created.""" + data = { + "lights": { + "1": { "name": "On off switch", "type": "On/Off plug-in unit", "state": {"on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:00-00", }, - "1": { + "2": { "name": "Smart plug", "type": "Smart plug", "state": {"on": False, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:01-00", }, - "2": { + "3": { "name": "Unsupported switch", "type": "Not a switch", "state": {"reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:02-00", + "uniqueid": "00:00:00:00:00:00:00:03-00", }, - "3": { + "4": { "name": "On off relay", "state": {"on": True, "reachable": True}, "type": "On/Off light", - "uniqueid": "00:00:00:00:00:00:00:03-00", + "uniqueid": "00:00:00:00:00:00:00:04-00", }, } - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_power_plugs( - hass: HomeAssistant, - mock_put_request: Callable[[str, str], AiohttpClientMocker], - light_ws_data: WebsocketDataType, -) -> None: - """Test that all supported switch entities are created.""" + } + with patch.dict(DECONZ_WEB_REQUEST, data): + config_entry = await setup_deconz_integration(hass, aioclient_mock) + assert len(hass.states.async_all()) == 4 assert hass.states.get("switch.on_off_switch").state == STATE_ON assert hass.states.get("switch.smart_plug").state == STATE_OFF assert hass.states.get("switch.on_off_relay").state == STATE_ON assert hass.states.get("switch.unsupported_switch") is None - await light_ws_data({"state": {"on": False}}) + event_changed_light = { + "t": "event", + "e": "changed", + "r": "lights", + "id": "1", + "state": {"on": False}, + } + await mock_deconz_websocket(data=event_changed_light) + await hass.async_block_till_done() + assert hass.states.get("switch.on_off_switch").state == STATE_OFF # Verify service calls - aioclient_mock = mock_put_request("/lights/0/state") + mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") # Service turn on power plug @@ -91,29 +107,44 @@ async def test_power_plugs( ) assert aioclient_mock.mock_calls[2][2] == {"on": False} + await hass.config_entries.async_unload(config_entry.entry_id) + + states = hass.states.async_all() + assert len(states) == 4 + for state in states: + assert state.state == STATE_UNAVAILABLE + + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + -@pytest.mark.parametrize( - "light_payload", - [ - { - "name": "On Off output device", - "type": "On/Off output", - "state": {"on": True, "reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - ], -) async def test_remove_legacy_on_off_output_as_light( hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, ) -> None: """Test that switch platform cleans up legacy light entities.""" - assert entity_registry.async_get_or_create( - LIGHT_DOMAIN, DECONZ_DOMAIN, "00:00:00:00:00:00:00:00-00" + unique_id = "00:00:00:00:00:00:00:00-00" + + switch_light_entity = entity_registry.async_get_or_create( + LIGHT_DOMAIN, DECONZ_DOMAIN, unique_id ) - await config_entry_factory() + assert switch_light_entity + + data = { + "lights": { + "1": { + "name": "On Off output device", + "type": "On/Off output", + "state": {"on": True, "reachable": True}, + "uniqueid": unique_id, + }, + } + } + with patch.dict(DECONZ_WEB_REQUEST, data): + await setup_deconz_integration(hass, aioclient_mock) assert not entity_registry.async_get("light.on_off_output_device") assert entity_registry.async_get("switch.on_off_output_device") diff --git a/tests/components/deluge/__init__.py b/tests/components/deluge/__init__.py index c9027f0c11f..4efbe04cf52 100644 --- a/tests/components/deluge/__init__.py +++ b/tests/components/deluge/__init__.py @@ -14,10 +14,3 @@ CONF_DATA = { CONF_PORT: DEFAULT_RPC_PORT, CONF_WEB_PORT: DEFAULT_WEB_PORT, } - -GET_TORRENT_STATUS_RESPONSE = { - "upload_rate": 3462.0, - "download_rate": 98.5, - "dht_upload_rate": 7818.0, - "dht_download_rate": 2658.0, -} diff --git a/tests/components/deluge/test_config_flow.py b/tests/components/deluge/test_config_flow.py index c336fc81cc6..37229d4a72e 100644 --- a/tests/components/deluge/test_config_flow.py +++ b/tests/components/deluge/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from homeassistant.components.deluge.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -113,7 +113,16 @@ async def test_flow_reauth(hass: HomeAssistant, api) -> None: entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=CONF_DATA, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/deluge/test_sensor.py b/tests/components/deluge/test_sensor.py deleted file mode 100644 index 7ff6dda0b94..00000000000 --- a/tests/components/deluge/test_sensor.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Test Deluge sensor.py methods.""" - -from homeassistant.components.deluge.const import DelugeSensorType -from homeassistant.components.deluge.sensor import get_state - -from . import GET_TORRENT_STATUS_RESPONSE - - -def test_get_state() -> None: - """Tests get_state() with different keys.""" - - download_result = get_state( - GET_TORRENT_STATUS_RESPONSE, DelugeSensorType.DOWNLOAD_SPEED_SENSOR - ) - assert download_result == 0.1 # round(98.5 / 1024, 2) - - upload_result = get_state( - GET_TORRENT_STATUS_RESPONSE, DelugeSensorType.UPLOAD_SPEED_SENSOR - ) - assert upload_result == 3.4 # round(3462.0 / 1024, 1) - - protocol_upload_result = get_state( - GET_TORRENT_STATUS_RESPONSE, - DelugeSensorType.PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR, - ) - assert protocol_upload_result == 7.6 # round(7818.0 / 1024, 1) - - protocol_download_result = get_state( - GET_TORRENT_STATUS_RESPONSE, - DelugeSensorType.PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR, - ) - assert protocol_download_result == 2.6 # round(2658.0/1024, 1) diff --git a/tests/components/demo/test_button.py b/tests/components/demo/test_button.py index 702ee3aa3e0..6049de12570 100644 --- a/tests/components/demo/test_button.py +++ b/tests/components/demo/test_button.py @@ -5,7 +5,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -27,9 +27,7 @@ async def button_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_button(hass: HomeAssistant, button_only) -> None: """Initialize setup demo button entity.""" - assert await async_setup_component( - hass, BUTTON_DOMAIN, {"button": {"platform": "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {"button": {"platform": "demo"}}) await hass.async_block_till_done() @@ -49,7 +47,7 @@ async def test_press(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> Non now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") freezer.move_to(now) await hass.services.async_call( - BUTTON_DOMAIN, + DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ENTITY_PUSH}, blocking=True, diff --git a/tests/components/demo/test_camera.py b/tests/components/demo/test_camera.py index c8d8e1ef2e4..756609ed094 100644 --- a/tests/components/demo/test_camera.py +++ b/tests/components/demo/test_camera.py @@ -1,9 +1,9 @@ """The tests for local file camera component.""" -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.camera import ( DOMAIN as CAMERA_DOMAIN, @@ -11,7 +11,8 @@ from homeassistant.components.camera import ( SERVICE_ENABLE_MOTION, SERVICE_TURN_OFF, SERVICE_TURN_ON, - CameraState, + STATE_IDLE, + STATE_STREAMING, async_get_image, ) from homeassistant.components.demo import DOMAIN @@ -45,7 +46,7 @@ async def demo_camera(hass: HomeAssistant, camera_only: None) -> None: async def test_init_state_is_streaming(hass: HomeAssistant) -> None: """Demo camera initialize as streaming.""" state = hass.states.get(ENTITY_CAMERA) - assert state.state == CameraState.STREAMING + assert state.state == STATE_STREAMING with patch( "homeassistant.components.demo.camera.Path.read_bytes", return_value=b"ON" @@ -58,21 +59,21 @@ async def test_init_state_is_streaming(hass: HomeAssistant) -> None: async def test_turn_on_state_back_to_streaming(hass: HomeAssistant) -> None: """After turn on state back to streaming.""" state = hass.states.get(ENTITY_CAMERA) - assert state.state == CameraState.STREAMING + assert state.state == STATE_STREAMING await hass.services.async_call( CAMERA_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_CAMERA}, blocking=True ) state = hass.states.get(ENTITY_CAMERA) - assert state.state == CameraState.IDLE + assert state.state == STATE_IDLE await hass.services.async_call( CAMERA_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_CAMERA}, blocking=True ) state = hass.states.get(ENTITY_CAMERA) - assert state.state == CameraState.STREAMING + assert state.state == STATE_STREAMING async def test_turn_off_image(hass: HomeAssistant) -> None: @@ -89,7 +90,7 @@ async def test_turn_off_image(hass: HomeAssistant) -> None: async def test_turn_off_invalid_camera(hass: HomeAssistant) -> None: """Turn off non-exist camera should quietly fail.""" state = hass.states.get(ENTITY_CAMERA) - assert state.state == CameraState.STREAMING + assert state.state == STATE_STREAMING await hass.services.async_call( CAMERA_DOMAIN, @@ -99,7 +100,7 @@ async def test_turn_off_invalid_camera(hass: HomeAssistant) -> None: ) state = hass.states.get(ENTITY_CAMERA) - assert state.state == CameraState.STREAMING + assert state.state == STATE_STREAMING async def test_motion_detection(hass: HomeAssistant) -> None: diff --git a/tests/components/demo/test_climate.py b/tests/components/demo/test_climate.py index 42152645ecb..682b85f0845 100644 --- a/tests/components/demo/test_climate.py +++ b/tests/components/demo/test_climate.py @@ -1,9 +1,9 @@ """The tests for the demo climate component.""" -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator import voluptuous as vol from homeassistant.components.climate import ( @@ -22,7 +22,7 @@ from homeassistant.components.climate import ( ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, - DOMAIN as CLIMATE_DOMAIN, + DOMAIN, PRESET_AWAY, PRESET_ECO, SERVICE_SET_FAN_MODE, @@ -64,9 +64,7 @@ def climate_only() -> Generator[None]: async def setup_demo_climate(hass: HomeAssistant, climate_only: None) -> None: """Initialize setup demo climate.""" hass.config.units = METRIC_SYSTEM - assert await async_setup_component( - hass, CLIMATE_DOMAIN, {"climate": {"platform": "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {"climate": {"platform": "demo"}}) await hass.async_block_till_done() @@ -106,7 +104,7 @@ async def test_set_only_target_temp_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_TEMPERATURE: None}, blocking=True, @@ -122,7 +120,7 @@ async def test_set_only_target_temp(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TEMPERATURE) == 21 await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_TEMPERATURE: 30}, blocking=True, @@ -138,7 +136,7 @@ async def test_set_only_target_temp_with_convert(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TEMPERATURE) == 20 await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_HEATPUMP, ATTR_TEMPERATURE: 21}, blocking=True, @@ -156,7 +154,7 @@ async def test_set_target_temp_range(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 24.0 await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ECOBEE, @@ -181,7 +179,7 @@ async def test_set_target_temp_range_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ECOBEE, @@ -204,7 +202,7 @@ async def test_set_temp_with_hvac_mode(hass: HomeAssistant) -> None: assert state.state == HVACMode.COOL await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_CLIMATE, @@ -226,7 +224,7 @@ async def test_set_target_humidity_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HUMIDITY: None}, blocking=True, @@ -242,7 +240,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_HUMIDITY) == 67.4 await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HUMIDITY: 64}, blocking=True, @@ -259,7 +257,7 @@ async def test_set_fan_mode_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_FAN_MODE: None}, blocking=True, @@ -275,7 +273,7 @@ async def test_set_fan_mode(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_FAN_MODE) == "on_high" await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_FAN_MODE: "on_low"}, blocking=True, @@ -292,7 +290,7 @@ async def test_set_swing_mode_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_SWING_MODE: None}, blocking=True, @@ -308,7 +306,7 @@ async def test_set_swing(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_SWING_MODE) == "off" await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_SWING_MODE: "auto"}, blocking=True, @@ -329,7 +327,7 @@ async def test_set_hvac_bad_attr_and_state(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: None}, blocking=True, @@ -346,7 +344,7 @@ async def test_set_hvac(hass: HomeAssistant) -> None: assert state.state == HVACMode.COOL await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, @@ -359,7 +357,7 @@ async def test_set_hvac(hass: HomeAssistant) -> None: async def test_set_hold_mode_away(hass: HomeAssistant) -> None: """Test setting the hold mode away.""" await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ECOBEE, ATTR_PRESET_MODE: PRESET_AWAY}, blocking=True, @@ -372,7 +370,7 @@ async def test_set_hold_mode_away(hass: HomeAssistant) -> None: async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: """Test setting the hold mode eco.""" await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ECOBEE, ATTR_PRESET_MODE: PRESET_ECO}, blocking=True, @@ -385,7 +383,7 @@ async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: async def test_turn_on(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVACMode.OFF}, blocking=True, @@ -395,7 +393,7 @@ async def test_turn_on(hass: HomeAssistant) -> None: assert state.state == HVACMode.OFF await hass.services.async_call( - CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True ) state = hass.states.get(ENTITY_CLIMATE) assert state.state == HVACMode.HEAT @@ -404,7 +402,7 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, @@ -414,10 +412,7 @@ async def test_turn_off(hass: HomeAssistant) -> None: assert state.state == HVACMode.HEAT await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: ENTITY_CLIMATE}, - blocking=True, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True ) state = hass.states.get(ENTITY_CLIMATE) assert state.state == HVACMode.OFF diff --git a/tests/components/demo/test_cover.py b/tests/components/demo/test_cover.py index 97cad5bbe14..7ee408d3bfc 100644 --- a/tests/components/demo/test_cover.py +++ b/tests/components/demo/test_cover.py @@ -1,18 +1,17 @@ """The tests for the Demo cover platform.""" -from collections.abc import Generator from datetime import timedelta from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN as COVER_DOMAIN, - CoverState, + DOMAIN, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -27,6 +26,10 @@ from homeassistant.const import ( SERVICE_STOP_COVER_TILT, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, Platform, ) from homeassistant.core import HomeAssistant @@ -52,8 +55,8 @@ def cover_only() -> Generator[None]: @pytest.fixture(autouse=True) async def setup_comp(hass: HomeAssistant, cover_only: None) -> None: """Set up demo cover component.""" - with assert_setup_component(1, COVER_DOMAIN): - await async_setup_component(hass, COVER_DOMAIN, CONFIG) + with assert_setup_component(1, DOMAIN): + await async_setup_component(hass, DOMAIN, CONFIG) await hass.async_block_till_done() @@ -72,41 +75,41 @@ async def test_supported_features(hass: HomeAssistant) -> None: async def test_close_cover(hass: HomeAssistant) -> None: """Test closing the cover.""" state = hass.states.get(ENTITY_COVER) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) state = hass.states.get(ENTITY_COVER) - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 async def test_open_cover(hass: HomeAssistant) -> None: """Test opening the cover.""" state = hass.states.get(ENTITY_COVER) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) state = hass.states.get(ENTITY_COVER) - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 @@ -114,7 +117,7 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: """Test toggling the cover.""" # Start open await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -122,11 +125,11 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes["current_position"] == 100 # Toggle closed await hass.services.async_call( - COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -134,11 +137,11 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 # Toggle open await hass.services.async_call( - COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -146,7 +149,7 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 @@ -155,7 +158,7 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_POSITION: 10}, blocking=True, @@ -174,13 +177,13 @@ async def test_stop_cover(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) async_fire_time_changed(hass, future) await hass.async_block_till_done() @@ -193,10 +196,7 @@ async def test_close_cover_tilt(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER_TILT, - {ATTR_ENTITY_ID: ENTITY_COVER}, - blocking=True, + DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -212,10 +212,7 @@ async def test_open_cover_tilt(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER_TILT, - {ATTR_ENTITY_ID: ENTITY_COVER}, - blocking=True, + DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -230,10 +227,7 @@ async def test_toggle_cover_tilt(hass: HomeAssistant) -> None: """Test toggling the cover tilt.""" # Start open await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER_TILT, - {ATTR_ENTITY_ID: ENTITY_COVER}, - blocking=True, + DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -244,10 +238,7 @@ async def test_toggle_cover_tilt(hass: HomeAssistant) -> None: assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 # Toggle closed await hass.services.async_call( - COVER_DOMAIN, - SERVICE_TOGGLE_COVER_TILT, - {ATTR_ENTITY_ID: ENTITY_COVER}, - blocking=True, + DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -258,10 +249,7 @@ async def test_toggle_cover_tilt(hass: HomeAssistant) -> None: assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Toggle Open await hass.services.async_call( - COVER_DOMAIN, - SERVICE_TOGGLE_COVER_TILT, - {ATTR_ENTITY_ID: ENTITY_COVER}, - blocking=True, + DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -277,7 +265,7 @@ async def test_set_cover_tilt_position(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_TILT_POSITION: 90}, blocking=True, @@ -296,19 +284,13 @@ async def test_stop_cover_tilt(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER_TILT, - {ATTR_ENTITY_ID: ENTITY_COVER}, - blocking=True, + DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER_TILT, - {ATTR_ENTITY_ID: ENTITY_COVER}, - blocking=True, + DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) async_fire_time_changed(hass, future) await hass.async_block_till_done() diff --git a/tests/components/demo/test_date.py b/tests/components/demo/test_date.py index 228be936599..5e0fc2c29cd 100644 --- a/tests/components/demo/test_date.py +++ b/tests/components/demo/test_date.py @@ -4,11 +4,7 @@ from unittest.mock import patch import pytest -from homeassistant.components.date import ( - ATTR_DATE, - DOMAIN as DATE_DOMAIN, - SERVICE_SET_VALUE, -) +from homeassistant.components.date import ATTR_DATE, DOMAIN, SERVICE_SET_VALUE from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -29,9 +25,7 @@ async def date_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_date(hass: HomeAssistant, date_only) -> None: """Initialize setup demo date.""" - assert await async_setup_component( - hass, DATE_DOMAIN, {"date": {"platform": "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {"date": {"platform": "demo"}}) await hass.async_block_till_done() @@ -44,7 +38,7 @@ def test_setup_params(hass: HomeAssistant) -> None: async def test_set_datetime(hass: HomeAssistant) -> None: """Test set datetime service.""" await hass.services.async_call( - DATE_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_DATE, ATTR_DATE: "2021-02-03"}, blocking=True, diff --git a/tests/components/demo/test_datetime.py b/tests/components/demo/test_datetime.py index 82cd5044068..bd4adafd695 100644 --- a/tests/components/demo/test_datetime.py +++ b/tests/components/demo/test_datetime.py @@ -4,11 +4,7 @@ from unittest.mock import patch import pytest -from homeassistant.components.datetime import ( - ATTR_DATETIME, - DOMAIN as DATETIME_DOMAIN, - SERVICE_SET_VALUE, -) +from homeassistant.components.datetime import ATTR_DATETIME, DOMAIN, SERVICE_SET_VALUE from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -29,9 +25,7 @@ async def datetime_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_datetime(hass: HomeAssistant, datetime_only) -> None: """Initialize setup demo datetime.""" - assert await async_setup_component( - hass, DATETIME_DOMAIN, {"datetime": {"platform": "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {"datetime": {"platform": "demo"}}) await hass.async_block_till_done() @@ -45,7 +39,7 @@ async def test_set_datetime(hass: HomeAssistant) -> None: """Test set datetime service.""" await hass.config.async_set_time_zone("UTC") await hass.services.async_call( - DATETIME_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_DATETIME, ATTR_DATETIME: "2021-02-03 01:02:03"}, blocking=True, diff --git a/tests/components/demo/test_humidifier.py b/tests/components/demo/test_humidifier.py index 93bd2b13743..0f0fcaf43fd 100644 --- a/tests/components/demo/test_humidifier.py +++ b/tests/components/demo/test_humidifier.py @@ -11,7 +11,7 @@ from homeassistant.components.humidifier import ( ATTR_HUMIDITY, ATTR_MAX_HUMIDITY, ATTR_MIN_HUMIDITY, - DOMAIN as HUMIDITY_DOMAIN, + DOMAIN, MODE_AWAY, SERVICE_SET_HUMIDITY, SERVICE_SET_MODE, @@ -48,7 +48,7 @@ async def humidifier_only() -> None: async def setup_demo_humidifier(hass: HomeAssistant, humidifier_only: None): """Initialize setup demo humidifier.""" assert await async_setup_component( - hass, HUMIDITY_DOMAIN, {"humidifier": {"platform": "demo"}} + hass, DOMAIN, {"humidifier": {"platform": "demo"}} ) await hass.async_block_till_done() @@ -76,7 +76,7 @@ async def test_set_target_humidity_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - HUMIDITY_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_HUMIDITY: None, ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True, @@ -93,7 +93,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_HUMIDITY) == 54.2 await hass.services.async_call( - HUMIDITY_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_HUMIDITY: 64, ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True, @@ -107,7 +107,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: async def test_set_hold_mode_away(hass: HomeAssistant) -> None: """Test setting the hold mode away.""" await hass.services.async_call( - HUMIDITY_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_MODE: MODE_AWAY, ATTR_ENTITY_ID: ENTITY_HYGROSTAT}, blocking=True, @@ -121,7 +121,7 @@ async def test_set_hold_mode_away(hass: HomeAssistant) -> None: async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: """Test setting the hold mode eco.""" await hass.services.async_call( - HUMIDITY_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_MODE: "eco", ATTR_ENTITY_ID: ENTITY_HYGROSTAT}, blocking=True, @@ -135,20 +135,14 @@ async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: async def test_turn_on(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - HUMIDITY_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, - blocking=True, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_OFF assert state.attributes.get(ATTR_ACTION) == "off" await hass.services.async_call( - HUMIDITY_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, - blocking=True, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON @@ -158,20 +152,14 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test turn off device.""" await hass.services.async_call( - HUMIDITY_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, - blocking=True, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON assert state.attributes.get(ATTR_ACTION) == "drying" await hass.services.async_call( - HUMIDITY_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, - blocking=True, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_OFF @@ -181,28 +169,19 @@ async def test_turn_off(hass: HomeAssistant) -> None: async def test_toggle(hass: HomeAssistant) -> None: """Test toggle device.""" await hass.services.async_call( - HUMIDITY_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, - blocking=True, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON await hass.services.async_call( - HUMIDITY_DOMAIN, - SERVICE_TOGGLE, - {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, - blocking=True, + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_OFF await hass.services.async_call( - HUMIDITY_DOMAIN, - SERVICE_TOGGLE, - {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, - blocking=True, + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON diff --git a/tests/components/demo/test_init.py b/tests/components/demo/test_init.py index 0af15455949..498a03600cb 100644 --- a/tests/components/demo/test_init.py +++ b/tests/components/demo/test_init.py @@ -1,10 +1,10 @@ """The tests for the Demo component.""" -from collections.abc import Generator import json from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.demo import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/demo/test_light.py b/tests/components/demo/test_light.py index e3b1efc7eec..5c2c478b0bf 100644 --- a/tests/components/demo/test_light.py +++ b/tests/components/demo/test_light.py @@ -1,9 +1,9 @@ """The tests for the demo light component.""" -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.demo import DOMAIN from homeassistant.components.light import ( diff --git a/tests/components/demo/test_lock.py b/tests/components/demo/test_lock.py index 1fc4209d300..853b9197ab7 100644 --- a/tests/components/demo/test_lock.py +++ b/tests/components/demo/test_lock.py @@ -10,9 +10,19 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - LockState, + STATE_JAMMED, + STATE_LOCKED, + STATE_LOCKING, + STATE_UNLOCKED, + STATE_UNLOCKING, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + EVENT_STATE_CHANGED, + STATE_OPEN, + STATE_OPENING, + Platform, ) -from homeassistant.const import ATTR_ENTITY_ID, EVENT_STATE_CHANGED, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -47,7 +57,7 @@ async def setup_comp(hass: HomeAssistant, lock_only: None): async def test_locking(hass: HomeAssistant) -> None: """Test the locking of a lock.""" state = hass.states.get(KITCHEN) - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -57,17 +67,17 @@ async def test_locking(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == KITCHEN - assert state_changes[0].data["new_state"].state == LockState.LOCKING + assert state_changes[0].data["new_state"].state == STATE_LOCKING assert state_changes[1].data["entity_id"] == KITCHEN - assert state_changes[1].data["new_state"].state == LockState.LOCKED + assert state_changes[1].data["new_state"].state == STATE_LOCKED @patch.object(demo_lock, "LOCK_UNLOCK_DELAY", 0) async def test_unlocking(hass: HomeAssistant) -> None: """Test the unlocking of a lock.""" state = hass.states.get(FRONT) - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -77,17 +87,17 @@ async def test_unlocking(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == FRONT - assert state_changes[0].data["new_state"].state == LockState.UNLOCKING + assert state_changes[0].data["new_state"].state == STATE_UNLOCKING assert state_changes[1].data["entity_id"] == FRONT - assert state_changes[1].data["new_state"].state == LockState.UNLOCKED + assert state_changes[1].data["new_state"].state == STATE_UNLOCKED @patch.object(demo_lock, "LOCK_UNLOCK_DELAY", 0) async def test_opening(hass: HomeAssistant) -> None: """Test the opening of a lock.""" state = hass.states.get(OPENABLE_LOCK) - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -97,17 +107,17 @@ async def test_opening(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == OPENABLE_LOCK - assert state_changes[0].data["new_state"].state == LockState.OPENING + assert state_changes[0].data["new_state"].state == STATE_OPENING assert state_changes[1].data["entity_id"] == OPENABLE_LOCK - assert state_changes[1].data["new_state"].state == LockState.OPEN + assert state_changes[1].data["new_state"].state == STATE_OPEN @patch.object(demo_lock, "LOCK_UNLOCK_DELAY", 0) async def test_jammed_when_locking(hass: HomeAssistant) -> None: """Test the locking of a lock jams.""" state = hass.states.get(POORLY_INSTALLED) - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -117,10 +127,10 @@ async def test_jammed_when_locking(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == POORLY_INSTALLED - assert state_changes[0].data["new_state"].state == LockState.LOCKING + assert state_changes[0].data["new_state"].state == STATE_LOCKING assert state_changes[1].data["entity_id"] == POORLY_INSTALLED - assert state_changes[1].data["new_state"].state == LockState.JAMMED + assert state_changes[1].data["new_state"].state == STATE_JAMMED async def test_opening_mocked(hass: HomeAssistant) -> None: diff --git a/tests/components/demo/test_media_player.py b/tests/components/demo/test_media_player.py index 7487a4c13e3..a6669fa705c 100644 --- a/tests/components/demo/test_media_player.py +++ b/tests/components/demo/test_media_player.py @@ -497,7 +497,7 @@ async def test_media_image_proxy( class MockResponse: """Test response.""" - def __init__(self) -> None: + def __init__(self): """Test response init.""" self.status = 200 self.headers = {"Content-Type": "sometype"} diff --git a/tests/components/demo/test_notify.py b/tests/components/demo/test_notify.py index 98b3de8448a..4ebbfbdac04 100644 --- a/tests/components/demo/test_notify.py +++ b/tests/components/demo/test_notify.py @@ -1,9 +1,9 @@ """The tests for the notify demo platform.""" -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components import notify from homeassistant.components.demo import DOMAIN @@ -81,6 +81,6 @@ async def test_calling_notify_from_script_loaded_from_yaml( await hass.services.async_call("script", "test") await hass.async_block_till_done() assert len(events) == 1 - assert events[0].data == { + assert { "message": "Test 123 4", - } + } == events[0].data diff --git a/tests/components/demo/test_number.py b/tests/components/demo/test_number.py index 4b7cbe4864f..37763b6e289 100644 --- a/tests/components/demo/test_number.py +++ b/tests/components/demo/test_number.py @@ -1,9 +1,9 @@ """The tests for the demo number component.""" -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator import voluptuous as vol from homeassistant.components.number import ( @@ -11,7 +11,7 @@ from homeassistant.components.number import ( ATTR_MIN, ATTR_STEP, ATTR_VALUE, - DOMAIN as NUMBER_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, NumberMode, ) @@ -39,9 +39,7 @@ def number_only() -> Generator[None]: @pytest.fixture(autouse=True) async def setup_demo_number(hass: HomeAssistant, number_only: None) -> None: """Initialize setup demo Number entity.""" - assert await async_setup_component( - hass, NUMBER_DOMAIN, {"number": {"platform": "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {"number": {"platform": "demo"}}) await hass.async_block_till_done() @@ -85,7 +83,7 @@ async def test_set_value_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - NUMBER_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: None, ATTR_ENTITY_ID: ENTITY_VOLUME}, blocking=True, @@ -103,7 +101,7 @@ async def test_set_value_bad_range(hass: HomeAssistant) -> None: with pytest.raises(ServiceValidationError): await hass.services.async_call( - NUMBER_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: 1024, ATTR_ENTITY_ID: ENTITY_VOLUME}, blocking=True, @@ -120,7 +118,7 @@ async def test_set_set_value(hass: HomeAssistant) -> None: assert state.state == "42.0" await hass.services.async_call( - NUMBER_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: 23, ATTR_ENTITY_ID: ENTITY_VOLUME}, blocking=True, diff --git a/tests/components/demo/test_select.py b/tests/components/demo/test_select.py index a78f8552ec7..f9805f44866 100644 --- a/tests/components/demo/test_select.py +++ b/tests/components/demo/test_select.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components.select import ( ATTR_OPTION, ATTR_OPTIONS, - DOMAIN as SELECT_DOMAIN, + DOMAIN, SERVICE_SELECT_OPTION, ) from homeassistant.const import ATTR_ENTITY_ID, Platform @@ -31,9 +31,7 @@ async def select_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_select(hass: HomeAssistant, select_only) -> None: """Initialize setup demo select entity.""" - assert await async_setup_component( - hass, SELECT_DOMAIN, {"select": {"platform": "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {"select": {"platform": "demo"}}) await hass.async_block_till_done() @@ -57,7 +55,7 @@ async def test_select_option_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(ServiceValidationError): await hass.services.async_call( - SELECT_DOMAIN, + DOMAIN, SERVICE_SELECT_OPTION, {ATTR_OPTION: "slow_speed", ATTR_ENTITY_ID: ENTITY_SPEED}, blocking=True, @@ -76,7 +74,7 @@ async def test_select_option(hass: HomeAssistant) -> None: assert state.state == "ridiculous_speed" await hass.services.async_call( - SELECT_DOMAIN, + DOMAIN, SERVICE_SELECT_OPTION, {ATTR_OPTION: "light_speed", ATTR_ENTITY_ID: ENTITY_SPEED}, blocking=True, diff --git a/tests/components/demo/test_siren.py b/tests/components/demo/test_siren.py index c537e73508d..e21cd96efc9 100644 --- a/tests/components/demo/test_siren.py +++ b/tests/components/demo/test_siren.py @@ -8,7 +8,7 @@ from homeassistant.components.siren import ( ATTR_AVAILABLE_TONES, ATTR_TONE, ATTR_VOLUME_LEVEL, - DOMAIN as SIREN_DOMAIN, + DOMAIN, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -39,9 +39,7 @@ async def siren_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_siren(hass: HomeAssistant, siren_only: None): """Initialize setup demo siren.""" - assert await async_setup_component( - hass, SIREN_DOMAIN, {"siren": {"platform": "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {"siren": {"platform": "demo"}}) await hass.async_block_till_done() @@ -61,13 +59,13 @@ def test_all_setup_params(hass: HomeAssistant) -> None: async def test_turn_on(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - SIREN_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_OFF await hass.services.async_call( - SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON @@ -75,7 +73,7 @@ async def test_turn_on(hass: HomeAssistant) -> None: # Test that an invalid tone will raise a ValueError with pytest.raises(ValueError): await hass.services.async_call( - SIREN_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN_WITH_ALL_FEATURES, ATTR_TONE: "invalid_tone"}, blocking=True, @@ -85,13 +83,13 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test turn off device.""" await hass.services.async_call( - SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON await hass.services.async_call( - SIREN_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_OFF @@ -100,19 +98,19 @@ async def test_turn_off(hass: HomeAssistant) -> None: async def test_toggle(hass: HomeAssistant) -> None: """Test toggle device.""" await hass.services.async_call( - SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON await hass.services.async_call( - SIREN_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_OFF await hass.services.async_call( - SIREN_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON @@ -124,7 +122,7 @@ async def test_turn_on_strip_attributes(hass: HomeAssistant) -> None: "homeassistant.components.demo.siren.DemoSiren.async_turn_on" ) as svc_call: await hass.services.async_call( - SIREN_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN, ATTR_VOLUME_LEVEL: 1}, blocking=True, diff --git a/tests/components/demo/test_switch.py b/tests/components/demo/test_switch.py index 57384526dc0..8b78171fd17 100644 --- a/tests/components/demo/test_switch.py +++ b/tests/components/demo/test_switch.py @@ -1,9 +1,9 @@ """The tests for the demo switch component.""" -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.demo import DOMAIN from homeassistant.components.switch import ( diff --git a/tests/components/demo/test_text.py b/tests/components/demo/test_text.py index b3291012167..3588330c75c 100644 --- a/tests/components/demo/test_text.py +++ b/tests/components/demo/test_text.py @@ -1,16 +1,16 @@ """The tests for the demo text component.""" -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.text import ( ATTR_MAX, ATTR_MIN, ATTR_PATTERN, ATTR_VALUE, - DOMAIN as TEXT_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, ) from homeassistant.const import ( @@ -38,9 +38,7 @@ def text_only() -> Generator[None]: @pytest.fixture(autouse=True) async def setup_demo_text(hass: HomeAssistant, text_only: None) -> None: """Initialize setup demo text.""" - assert await async_setup_component( - hass, TEXT_DOMAIN, {"text": {"platform": "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {"text": {"platform": "demo"}}) await hass.async_block_till_done() @@ -57,7 +55,7 @@ def test_setup_params(hass: HomeAssistant) -> None: async def test_set_value(hass: HomeAssistant) -> None: """Test set value service.""" await hass.services.async_call( - TEXT_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_TEXT, ATTR_VALUE: "new"}, blocking=True, diff --git a/tests/components/demo/test_time.py b/tests/components/demo/test_time.py index 6997e8392ed..8ef093a38f3 100644 --- a/tests/components/demo/test_time.py +++ b/tests/components/demo/test_time.py @@ -4,11 +4,7 @@ from unittest.mock import patch import pytest -from homeassistant.components.time import ( - ATTR_TIME, - DOMAIN as TIME_DOMAIN, - SERVICE_SET_VALUE, -) +from homeassistant.components.time import ATTR_TIME, DOMAIN, SERVICE_SET_VALUE from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -29,9 +25,7 @@ async def time_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_datetime(hass: HomeAssistant, time_only) -> None: """Initialize setup demo time.""" - assert await async_setup_component( - hass, TIME_DOMAIN, {"time": {"platform": "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {"time": {"platform": "demo"}}) await hass.async_block_till_done() @@ -44,7 +38,7 @@ def test_setup_params(hass: HomeAssistant) -> None: async def test_set_value(hass: HomeAssistant) -> None: """Test set value service.""" await hass.services.async_call( - TIME_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_TIME, ATTR_TIME: "01:02:03"}, blocking=True, diff --git a/tests/components/demo/test_update.py b/tests/components/demo/test_update.py index 93a9f272aeb..0a8886a085d 100644 --- a/tests/components/demo/test_update.py +++ b/tests/components/demo/test_update.py @@ -11,8 +11,7 @@ from homeassistant.components.update import ( ATTR_RELEASE_SUMMARY, ATTR_RELEASE_URL, ATTR_TITLE, - ATTR_UPDATE_PERCENTAGE, - DOMAIN as UPDATE_DOMAIN, + DOMAIN, SERVICE_INSTALL, UpdateDeviceClass, ) @@ -42,9 +41,7 @@ async def update_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_update(hass: HomeAssistant, update_only) -> None: """Initialize setup demo update entity.""" - assert await async_setup_component( - hass, UPDATE_DOMAIN, {"update": {"platform": "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {"update": {"platform": "demo"}}) await hass.async_block_till_done() @@ -126,73 +123,55 @@ def test_setup_params(hass: HomeAssistant) -> None: ) -@pytest.mark.parametrize( - ("entity_id", "steps"), - [ - ("update.demo_update_with_progress", 10), - ("update.demo_update_with_decimal_progress", 1000), - ], -) -async def test_update_with_progress( - hass: HomeAssistant, entity_id: str, steps: int -) -> None: +async def test_update_with_progress(hass: HomeAssistant) -> None: """Test update with progress.""" - state = hass.states.get(entity_id) + state = hass.states.get("update.demo_update_with_progress") assert state assert state.state == STATE_ON assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None events = [] async_track_state_change_event( hass, - entity_id, + "update.demo_update_with_progress", # pylint: disable-next=unnecessary-lambda callback(lambda event: events.append(event)), ) with patch("homeassistant.components.demo.update.FAKE_INSTALL_SLEEP_TIME", new=0): await hass.services.async_call( - UPDATE_DOMAIN, + DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: entity_id}, + {ATTR_ENTITY_ID: "update.demo_update_with_progress"}, blocking=True, ) - assert len(events) == steps + 1 - for i, event in enumerate(events[:steps]): - new_state = event.data["new_state"] - assert new_state.state == STATE_ON - assert new_state.attributes[ATTR_UPDATE_PERCENTAGE] == pytest.approx( - 100 / steps * i - ) - new_state = events[steps].data["new_state"] - assert new_state.attributes[ATTR_IN_PROGRESS] is False - assert new_state.attributes[ATTR_UPDATE_PERCENTAGE] is None - assert new_state.state == STATE_OFF + assert len(events) == 10 + assert events[0].data["new_state"].state == STATE_ON + assert events[0].data["new_state"].attributes[ATTR_IN_PROGRESS] == 10 + assert events[1].data["new_state"].attributes[ATTR_IN_PROGRESS] == 20 + assert events[2].data["new_state"].attributes[ATTR_IN_PROGRESS] == 30 + assert events[3].data["new_state"].attributes[ATTR_IN_PROGRESS] == 40 + assert events[4].data["new_state"].attributes[ATTR_IN_PROGRESS] == 50 + assert events[5].data["new_state"].attributes[ATTR_IN_PROGRESS] == 60 + assert events[6].data["new_state"].attributes[ATTR_IN_PROGRESS] == 70 + assert events[7].data["new_state"].attributes[ATTR_IN_PROGRESS] == 80 + assert events[8].data["new_state"].attributes[ATTR_IN_PROGRESS] == 90 + assert events[9].data["new_state"].attributes[ATTR_IN_PROGRESS] is False + assert events[9].data["new_state"].state == STATE_OFF -@pytest.mark.parametrize( - ("entity_id", "steps"), - [ - ("update.demo_update_with_progress", 10), - ("update.demo_update_with_decimal_progress", 1000), - ], -) -async def test_update_with_progress_raising( - hass: HomeAssistant, entity_id: str, steps: int -) -> None: +async def test_update_with_progress_raising(hass: HomeAssistant) -> None: """Test update with progress failing to install.""" - state = hass.states.get(entity_id) + state = hass.states.get("update.demo_update_with_progress") assert state assert state.state == STATE_ON assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None events = [] async_track_state_change_event( hass, - entity_id, + "update.demo_update_with_progress", # pylint: disable-next=unnecessary-lambda callback(lambda event: events.append(event)), ) @@ -205,21 +184,19 @@ async def test_update_with_progress_raising( pytest.raises(RuntimeError), ): await hass.services.async_call( - UPDATE_DOMAIN, + DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: entity_id}, + {ATTR_ENTITY_ID: "update.demo_update_with_progress"}, blocking=True, ) await hass.async_block_till_done() assert fake_sleep.call_count == 5 - assert len(events) == 6 - for i, event in enumerate(events[:5]): - new_state = event.data["new_state"] - assert new_state.state == STATE_ON - assert new_state.attributes[ATTR_UPDATE_PERCENTAGE] == pytest.approx( - 100 / steps * i - ) - assert events[5].data["new_state"].attributes[ATTR_IN_PROGRESS] is False - assert events[5].data["new_state"].attributes[ATTR_UPDATE_PERCENTAGE] is None - assert events[5].data["new_state"].state == STATE_ON + assert len(events) == 5 + assert events[0].data["new_state"].state == STATE_ON + assert events[0].data["new_state"].attributes[ATTR_IN_PROGRESS] == 10 + assert events[1].data["new_state"].attributes[ATTR_IN_PROGRESS] == 20 + assert events[2].data["new_state"].attributes[ATTR_IN_PROGRESS] == 30 + assert events[3].data["new_state"].attributes[ATTR_IN_PROGRESS] == 40 + assert events[4].data["new_state"].attributes[ATTR_IN_PROGRESS] is False + assert events[4].data["new_state"].state == STATE_ON diff --git a/tests/components/demo/test_vacuum.py b/tests/components/demo/test_vacuum.py index a4e4d6f0e1f..a3b982ab70e 100644 --- a/tests/components/demo/test_vacuum.py +++ b/tests/components/demo/test_vacuum.py @@ -19,7 +19,7 @@ from homeassistant.components.vacuum import ( ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, - DOMAIN as VACUUM_DOMAIN, + DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, @@ -42,11 +42,11 @@ from homeassistant.util import dt as dt_util from tests.common import async_fire_time_changed, async_mock_service from tests.components.vacuum import common -ENTITY_VACUUM_BASIC = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_BASIC}".lower() -ENTITY_VACUUM_COMPLETE = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() -ENTITY_VACUUM_MINIMAL = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() -ENTITY_VACUUM_MOST = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_MOST}".lower() -ENTITY_VACUUM_NONE = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_NONE}".lower() +ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() +ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() +ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() +ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() +ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() @pytest.fixture @@ -62,9 +62,7 @@ async def vacuum_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass: HomeAssistant, vacuum_only: None): """Initialize setup demo vacuum.""" - assert await async_setup_component( - hass, VACUUM_DOMAIN, {VACUUM_DOMAIN: {CONF_PLATFORM: "demo"}} - ) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) await hass.async_block_till_done() @@ -191,7 +189,7 @@ async def test_unsupported_methods(hass: HomeAssistant) -> None: async def test_services(hass: HomeAssistant) -> None: """Test vacuum services.""" # Test send_command - send_command_calls = async_mock_service(hass, VACUUM_DOMAIN, SERVICE_SEND_COMMAND) + send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( @@ -200,20 +198,20 @@ async def test_services(hass: HomeAssistant) -> None: assert len(send_command_calls) == 1 call = send_command_calls[-1] - assert call.domain == VACUUM_DOMAIN + assert call.domain == DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed - set_fan_speed_calls = async_mock_service(hass, VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED) + set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], ENTITY_VACUUM_COMPLETE) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] - assert call.domain == VACUUM_DOMAIN + assert call.domain == DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] diff --git a/tests/components/denonavr/test_media_player.py b/tests/components/denonavr/test_media_player.py index 6550b31b1f9..c294c449518 100644 --- a/tests/components/denonavr/test_media_player.py +++ b/tests/components/denonavr/test_media_player.py @@ -60,7 +60,7 @@ def client_fixture(): yield mock_client_class.return_value -async def setup_denonavr(hass: HomeAssistant) -> None: +async def setup_denonavr(hass): """Initialize media_player for tests.""" entry_data = { CONF_HOST: TEST_HOST, diff --git a/tests/components/derivative/test_init.py b/tests/components/derivative/test_init.py index 32802080e39..0081ab97580 100644 --- a/tests/components/derivative/test_init.py +++ b/tests/components/derivative/test_init.py @@ -42,7 +42,7 @@ async def test_setup_and_remove_config_entry( # Check the platform is setup correctly state = hass.states.get(derivative_entity_id) - assert state.state == "0.0" + assert state.state == "0" assert "unit_of_measurement" not in state.attributes assert state.attributes["source"] == "sensor.input" diff --git a/tests/components/derivative/test_sensor.py b/tests/components/derivative/test_sensor.py index 4a4d8519b25..df050c58f10 100644 --- a/tests/components/derivative/test_sensor.py +++ b/tests/components/derivative/test_sensor.py @@ -3,14 +3,12 @@ from datetime import timedelta from math import sin import random -from typing import Any from freezegun import freeze_time from homeassistant.components.derivative.const import DOMAIN -from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass from homeassistant.const import UnitOfPower, UnitOfTime -from homeassistant.core import HomeAssistant, State +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -51,9 +49,7 @@ async def test_state(hass: HomeAssistant) -> None: assert state.attributes.get("unit_of_measurement") == "kW" -async def _setup_sensor( - hass: HomeAssistant, config: dict[str, Any] -) -> tuple[dict[str, Any], str]: +async def _setup_sensor(hass, config): default_config = { "platform": "derivative", "name": "power", @@ -71,13 +67,7 @@ async def _setup_sensor( return config, entity_id -async def setup_tests( - hass: HomeAssistant, - config: dict[str, Any], - times: list[int], - values: list[float], - expected_state: float, -) -> State: +async def setup_tests(hass, config, times, values, expected_state): """Test derivative sensor state.""" config, entity_id = await _setup_sensor(hass, config) @@ -355,41 +345,6 @@ async def test_suffix(hass: HomeAssistant) -> None: assert round(float(state.state), config["sensor"]["round"]) == 0.0 -async def test_total_increasing_reset(hass: HomeAssistant) -> None: - """Test derivative sensor state with total_increasing sensor input where it should ignore the reset value.""" - times = [0, 20, 30, 35, 40, 50, 60] - values = [0, 10, 30, 40, 0, 10, 40] - expected_times = [0, 20, 30, 35, 50, 60] - expected_values = ["0.00", "0.50", "2.00", "2.00", "1.00", "3.00"] - - config, entity_id = await _setup_sensor(hass, {"unit_time": UnitOfTime.SECONDS}) - - base_time = dt_util.utcnow() - actual_times = [] - actual_values = [] - with freeze_time(base_time) as freezer: - for time, value in zip(times, values, strict=False): - current_time = base_time + timedelta(seconds=time) - freezer.move_to(current_time) - hass.states.async_set( - entity_id, - value, - {ATTR_STATE_CLASS: SensorStateClass.TOTAL_INCREASING}, - force_update=True, - ) - await hass.async_block_till_done() - - state = hass.states.get("sensor.power") - assert state is not None - - if state.last_reported == current_time: - actual_times.append(time) - actual_values.append(state.state) - - assert actual_times == expected_times - assert actual_values == expected_values - - async def test_device_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/device_automation/test_init.py b/tests/components/device_automation/test_init.py index 94625746b05..b270d2ddd7a 100644 --- a/tests/components/device_automation/test_init.py +++ b/tests/components/device_automation/test_init.py @@ -23,11 +23,17 @@ from homeassistant.loader import IntegrationNotFound from homeassistant.requirements import RequirementsNotFound from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform +from tests.common import ( + MockConfigEntry, + MockModule, + async_mock_service, + mock_integration, + mock_platform, +) from tests.typing import WebSocketGenerator -@attr.s(frozen=True, slots=True) +@attr.s(frozen=True) class MockDeviceEntry(dr.DeviceEntry): """Device Registry Entry with fixed UUID.""" @@ -720,17 +726,12 @@ async def test_async_get_device_automations_all_devices_action_exception_throw( assert "KeyError" in caplog.text -@pytest.mark.parametrize( - "trigger_key", - ["trigger", "platform"], -) async def test_websocket_get_trigger_capabilities( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, fake_integration, - trigger_key: str, ) -> None: """Test we get the expected trigger capabilities through websocket.""" await async_setup_component(hass, "device_automation", {}) @@ -772,12 +773,11 @@ async def test_websocket_get_trigger_capabilities( assert msg["id"] == 1 assert msg["type"] == TYPE_RESULT assert msg["success"] - triggers: dict = msg["result"] + triggers = msg["result"] msg_id = 2 assert len(triggers) == 3 # toggled, turned_on, turned_off for trigger in triggers: - trigger[trigger_key] = trigger.pop("platform") await client.send_json( { "id": msg_id, @@ -1313,7 +1313,7 @@ async def test_automation_with_bad_action( }, ) - assert expected_error.format(path="['actions'][0]") in caplog.text + assert expected_error.format(path="['action'][0]") in caplog.text @patch("homeassistant.helpers.device_registry.DeviceEntry", MockDeviceEntry) @@ -1347,7 +1347,7 @@ async def test_automation_with_bad_condition_action( }, ) - assert expected_error.format(path="['actions'][0]") in caplog.text + assert expected_error.format(path="['action'][0]") in caplog.text @patch("homeassistant.helpers.device_registry.DeviceEntry", MockDeviceEntry) @@ -1381,12 +1381,18 @@ async def test_automation_with_bad_condition( }, ) - assert expected_error.format(path="['conditions'][0]") in caplog.text + assert expected_error.format(path="['condition'][0]") in caplog.text + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") async def test_automation_with_sub_condition( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -1486,29 +1492,29 @@ async def test_automation_with_sub_condition( await hass.async_block_till_done() assert hass.states.get(entity_entry1.entity_id).state == STATE_ON assert hass.states.get(entity_entry2.entity_id).state == STATE_OFF - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "or event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "or event - test_event1" hass.states.async_set(entity_entry1.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.states.async_set(entity_entry2.entity_id, STATE_ON) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "or event - test_event1" + assert len(calls) == 2 + assert calls[1].data["some"] == "or event - test_event1" hass.states.async_set(entity_entry1.entity_id, STATE_ON) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 4 - assert [service_calls[2].data["some"], service_calls[3].data["some"]] == unordered( + assert len(calls) == 4 + assert [calls[2].data["some"], calls[3].data["some"]] == unordered( ["or event - test_event1", "and event - test_event1"] ) @@ -1547,7 +1553,7 @@ async def test_automation_with_bad_sub_condition( }, ) - path = "['conditions'][0]['conditions'][0]" + path = "['condition'][0]['conditions'][0]" assert expected_error.format(path=path) in caplog.text diff --git a/tests/components/device_automation/test_toggle_entity.py b/tests/components/device_automation/test_toggle_entity.py index be4d3bd4c9e..f15730d9525 100644 --- a/tests/components/device_automation/test_toggle_entity.py +++ b/tests/components/device_automation/test_toggle_entity.py @@ -11,7 +11,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, async_mock_service @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -19,11 +19,17 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing. @@ -115,20 +121,20 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { + assert len(calls) == 2 + assert {calls[0].data["some"], calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 4 - assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { + assert len(calls) == 4 + assert {calls[2].data["some"], calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -139,7 +145,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], trigger: str, ) -> None: """Test for triggers firing with delay.""" @@ -187,16 +193,16 @@ async def test_if_fires_on_state_change_with_for( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/device_sun_light_trigger/test_init.py b/tests/components/device_sun_light_trigger/test_init.py index 24996482916..65afd5743f5 100644 --- a/tests/components/device_sun_light_trigger/test_init.py +++ b/tests/components/device_sun_light_trigger/test_init.py @@ -13,7 +13,7 @@ from homeassistant.components import ( group, light, ) -from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN +from homeassistant.components.device_tracker import DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, CONF_PLATFORM, @@ -77,10 +77,11 @@ async def scanner( ) await hass.async_block_till_done() + return scanner + -@pytest.mark.usefixtures("scanner") async def test_lights_on_when_sun_sets( - hass: HomeAssistant, freezer: FrozenDateTimeFactory + hass: HomeAssistant, freezer: FrozenDateTimeFactory, scanner ) -> None: """Test lights go on when there is someone home and the sun sets.""" test_time = datetime(2017, 4, 5, 1, 2, 3, tzinfo=dt_util.UTC) @@ -135,9 +136,8 @@ async def test_lights_turn_off_when_everyone_leaves(hass: HomeAssistant) -> None ) -@pytest.mark.usefixtures("scanner") async def test_lights_turn_on_when_coming_home_after_sun_set( - hass: HomeAssistant, freezer: FrozenDateTimeFactory + hass: HomeAssistant, freezer: FrozenDateTimeFactory, scanner ) -> None: """Test lights turn on when coming home after sun set.""" test_time = datetime(2017, 4, 5, 3, 2, 3, tzinfo=dt_util.UTC) @@ -150,21 +150,21 @@ async def test_lights_turn_on_when_coming_home_after_sun_set( hass, device_sun_light_trigger.DOMAIN, {device_sun_light_trigger.DOMAIN: {}} ) - hass.states.async_set(f"{DEVICE_TRACKER_DOMAIN}.device_2", STATE_UNKNOWN) + hass.states.async_set(f"{DOMAIN}.device_2", STATE_UNKNOWN) await hass.async_block_till_done() assert all( hass.states.get(ent_id).state == STATE_OFF for ent_id in hass.states.async_entity_ids("light") ) - hass.states.async_set(f"{DEVICE_TRACKER_DOMAIN}.device_2", STATE_NOT_HOME) + hass.states.async_set(f"{DOMAIN}.device_2", STATE_NOT_HOME) await hass.async_block_till_done() assert all( hass.states.get(ent_id).state == STATE_OFF for ent_id in hass.states.async_entity_ids("light") ) - hass.states.async_set(f"{DEVICE_TRACKER_DOMAIN}.device_2", STATE_HOME) + hass.states.async_set(f"{DOMAIN}.device_2", STATE_HOME) await hass.async_block_till_done() assert all( hass.states.get(ent_id).state == light.STATE_ON @@ -172,16 +172,12 @@ async def test_lights_turn_on_when_coming_home_after_sun_set( ) -@pytest.mark.usefixtures("scanner") async def test_lights_turn_on_when_coming_home_after_sun_set_person( - hass: HomeAssistant, freezer: FrozenDateTimeFactory + hass: HomeAssistant, freezer: FrozenDateTimeFactory, scanner ) -> None: """Test lights turn on when coming home after sun set.""" - # Ensure all setup tasks are done (avoid flaky tests) - await hass.async_block_till_done(wait_background_tasks=True) - - device_1 = f"{DEVICE_TRACKER_DOMAIN}.device_1" - device_2 = f"{DEVICE_TRACKER_DOMAIN}.device_2" + device_1 = f"{DOMAIN}.device_1" + device_2 = f"{DOMAIN}.device_2" test_time = datetime(2017, 4, 5, 3, 2, 3, tzinfo=dt_util.UTC) freezer.move_to(test_time) diff --git a/tests/components/device_tracker/common.py b/tests/components/device_tracker/common.py index 4842a91ce42..d30db984a66 100644 --- a/tests/components/device_tracker/common.py +++ b/tests/components/device_tracker/common.py @@ -61,7 +61,7 @@ def async_see( class MockScannerEntity(ScannerEntity): """Test implementation of a ScannerEntity.""" - def __init__(self) -> None: + def __init__(self): """Init.""" self.connected = False self._hostname = "test.hostname.org" @@ -69,7 +69,7 @@ class MockScannerEntity(ScannerEntity): self._mac_address = "ad:de:ef:be:ed:fe" @property - def source_type(self) -> SourceType: + def source_type(self): """Return the source type, eg gps or router, of the device.""" return SourceType.ROUTER @@ -110,7 +110,7 @@ class MockScannerEntity(ScannerEntity): class MockScanner(DeviceScanner): """Mock device scanner.""" - def __init__(self) -> None: + def __init__(self): """Initialize the MockScanner.""" self.devices_home = [] diff --git a/tests/components/device_tracker/test_config_entry.py b/tests/components/device_tracker/test_config_entry.py index bc721803450..45b94012051 100644 --- a/tests/components/device_tracker/test_config_entry.py +++ b/tests/components/device_tracker/test_config_entry.py @@ -1,9 +1,9 @@ """Test Device Tracker config entry things.""" -from collections.abc import Generator from typing import Any import pytest +from typing_extensions import Generator from homeassistant.components.device_tracker import ( ATTR_HOST_NAME, @@ -162,7 +162,7 @@ class MockTrackerEntity(TrackerEntity): return self._battery_level @property - def source_type(self) -> SourceType: + def source_type(self) -> SourceType | str: """Return the source type, eg gps or router, of the device.""" return SourceType.GPS @@ -249,7 +249,7 @@ class MockScannerEntity(ScannerEntity): return False @property - def source_type(self) -> SourceType: + def source_type(self) -> SourceType | str: """Return the source type, eg gps or router, of the device.""" return SourceType.ROUTER @@ -505,7 +505,8 @@ async def test_scanner_entity_state( def test_tracker_entity() -> None: """Test coverage for base TrackerEntity class.""" entity = TrackerEntity() - assert entity.source_type is SourceType.GPS + with pytest.raises(NotImplementedError): + assert entity.source_type is None assert entity.latitude is None assert entity.longitude is None assert entity.location_name is None @@ -538,7 +539,8 @@ def test_tracker_entity() -> None: def test_scanner_entity() -> None: """Test coverage for base ScannerEntity entity class.""" entity = ScannerEntity() - assert entity.source_type is SourceType.ROUTER + with pytest.raises(NotImplementedError): + assert entity.source_type is None with pytest.raises(NotImplementedError): assert entity.is_connected is None with pytest.raises(NotImplementedError): diff --git a/tests/components/device_tracker/test_device_condition.py b/tests/components/device_tracker/test_device_condition.py index aff020d61a8..6ea4ed7a372 100644 --- a/tests/components/device_tracker/test_device_condition.py +++ b/tests/components/device_tracker/test_device_condition.py @@ -12,7 +12,11 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -20,6 +24,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -104,7 +114,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -174,22 +184,22 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_home - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_home - event - test_event1" hass.states.async_set(entry.entity_id, "school") hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_not_home - event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_not_home - event - test_event2" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -237,5 +247,5 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_home - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_home - event - test_event1" diff --git a/tests/components/device_tracker/test_device_trigger.py b/tests/components/device_tracker/test_device_trigger.py index ebff89e1a15..4236e316424 100644 --- a/tests/components/device_tracker/test_device_trigger.py +++ b/tests/components/device_tracker/test_device_trigger.py @@ -17,7 +17,11 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -32,6 +36,12 @@ HOME_LATITUDE = 32.880837 HOME_LONGITUDE = -117.237561 +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.fixture(autouse=True) def setup_zone(hass: HomeAssistant) -> None: """Create test zone.""" @@ -135,7 +145,7 @@ async def test_if_fires_on_zone_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for enter and leave triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -218,9 +228,9 @@ async def test_if_fires_on_zone_change( {"latitude": HOME_LATITUDE, "longitude": HOME_LONGITUDE}, ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"enter - device - {entry.entity_id} - -117.235 - -117.238" ) @@ -231,9 +241,9 @@ async def test_if_fires_on_zone_change( {"latitude": AWAY_LATITUDE, "longitude": AWAY_LONGITUDE}, ) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"leave - device - {entry.entity_id} - -117.238 - -117.235" ) @@ -242,7 +252,7 @@ async def test_if_fires_on_zone_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for enter and leave triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -301,9 +311,9 @@ async def test_if_fires_on_zone_change_legacy( {"latitude": HOME_LATITUDE, "longitude": HOME_LONGITUDE}, ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"enter - device - {entry.entity_id} - -117.235 - -117.238" ) diff --git a/tests/components/device_tracker/test_init.py b/tests/components/device_tracker/test_init.py index 362258b035a..cedf2a2f0bc 100644 --- a/tests/components/device_tracker/test_init.py +++ b/tests/components/device_tracker/test_init.py @@ -1,6 +1,5 @@ """The tests for the device tracker component.""" -from collections.abc import Generator from datetime import datetime, timedelta import json import logging @@ -9,6 +8,7 @@ from types import ModuleType from unittest.mock import call, patch import pytest +from typing_extensions import Generator from homeassistant.components import device_tracker, zone from homeassistant.components.device_tracker import SourceType, const, legacy diff --git a/tests/components/devolo_home_control/conftest.py b/tests/components/devolo_home_control/conftest.py index 55e072d075c..04752da5925 100644 --- a/tests/components/devolo_home_control/conftest.py +++ b/tests/components/devolo_home_control/conftest.py @@ -1,9 +1,9 @@ """Fixtures for tests.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/devolo_home_control/mocks.py b/tests/components/devolo_home_control/mocks.py index 33c0a230e90..02823871e0f 100644 --- a/tests/components/devolo_home_control/mocks.py +++ b/tests/components/devolo_home_control/mocks.py @@ -117,7 +117,6 @@ class DeviceMock(Zwave): self.uid = "Test" self.device_model_uid = "Test" self.device_type = "Test" - self.identifier = "MT01234" self.settings_property = {"general_device_settings": SettingsMock()} self.href = "https://www.mydevolo.com" diff --git a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr index 6a7ef1fc6d3..8c069de8f62 100644 --- a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr @@ -38,8 +38,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'devolo_home_control', 'entry_id': '123456', 'minor_version': 1, diff --git a/tests/components/devolo_home_control/test_binary_sensor.py b/tests/components/devolo_home_control/test_binary_sensor.py index fd28ce2fdf6..e809c94c129 100644 --- a/tests/components/devolo_home_control/test_binary_sensor.py +++ b/tests/components/devolo_home_control/test_binary_sensor.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.binary_sensor import DOMAIN from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -34,28 +34,24 @@ async def test_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door") + state = hass.states.get(f"{DOMAIN}.test_door") assert state == snapshot - assert entity_registry.async_get(f"{BINARY_SENSOR_DOMAIN}.test_door") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test_door") == snapshot - state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_overload") + state = hass.states.get(f"{DOMAIN}.test_overload") assert state == snapshot - assert ( - entity_registry.async_get(f"{BINARY_SENSOR_DOMAIN}.test_overload") == snapshot - ) + assert entity_registry.async_get(f"{DOMAIN}.test_overload") == snapshot # Emulate websocket message: sensor turned on test_gateway.publisher.dispatch("Test", ("Test", True)) await hass.async_block_till_done() - assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door").state == STATE_ON + assert hass.states.get(f"{DOMAIN}.test_door").state == STATE_ON # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert ( - hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door").state == STATE_UNAVAILABLE - ) + assert hass.states.get(f"{DOMAIN}.test_door").state == STATE_UNAVAILABLE @pytest.mark.usefixtures("mock_zeroconf") @@ -73,30 +69,25 @@ async def test_remote_control( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1") + state = hass.states.get(f"{DOMAIN}.test_button_1") assert state == snapshot - assert ( - entity_registry.async_get(f"{BINARY_SENSOR_DOMAIN}.test_button_1") == snapshot - ) + assert entity_registry.async_get(f"{DOMAIN}.test_button_1") == snapshot # Emulate websocket message: button pressed test_gateway.publisher.dispatch("Test", ("Test", 1)) await hass.async_block_till_done() - assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1").state == STATE_ON + assert hass.states.get(f"{DOMAIN}.test_button_1").state == STATE_ON # Emulate websocket message: button released test_gateway.publisher.dispatch("Test", ("Test", 0)) await hass.async_block_till_done() - assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1").state == STATE_OFF + assert hass.states.get(f"{DOMAIN}.test_button_1").state == STATE_OFF # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert ( - hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1").state - == STATE_UNAVAILABLE - ) + assert hass.states.get(f"{DOMAIN}.test_button_1").state == STATE_UNAVAILABLE @pytest.mark.usefixtures("mock_zeroconf") @@ -110,7 +101,7 @@ async def test_disabled(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door") is None + assert hass.states.get(f"{DOMAIN}.test_door") is None @pytest.mark.usefixtures("mock_zeroconf") @@ -125,7 +116,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door") + state = hass.states.get(f"{DOMAIN}.test_door") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_climate.py b/tests/components/devolo_home_control/test_climate.py index 3aedda90e02..953ff835b89 100644 --- a/tests/components/devolo_home_control/test_climate.py +++ b/tests/components/devolo_home_control/test_climate.py @@ -6,7 +6,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components.climate import ( ATTR_HVAC_MODE, - DOMAIN as CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, HVACMode, ) @@ -32,14 +32,14 @@ async def test_climate( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{CLIMATE_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{CLIMATE_DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot # Emulate websocket message: temperature changed test_gateway.publisher.dispatch("Test", ("Test", 21.0)) await hass.async_block_till_done() - state = hass.states.get(f"{CLIMATE_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state.state == HVACMode.HEAT assert state.attributes[ATTR_TEMPERATURE] == 21.0 @@ -48,10 +48,10 @@ async def test_climate( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" ) as set_value: await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { - ATTR_ENTITY_ID: f"{CLIMATE_DOMAIN}.test", + ATTR_ENTITY_ID: f"{DOMAIN}.test", ATTR_HVAC_MODE: HVACMode.HEAT, ATTR_TEMPERATURE: 20.0, }, @@ -63,7 +63,7 @@ async def test_climate( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{CLIMATE_DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -77,7 +77,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{CLIMATE_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_config_flow.py b/tests/components/devolo_home_control/test_config_flow.py index 7c9bfdeff63..48f9bf31f4f 100644 --- a/tests/components/devolo_home_control/test_config_flow.py +++ b/tests/components/devolo_home_control/test_config_flow.py @@ -164,17 +164,21 @@ async def test_zeroconf_wrong_device(hass: HomeAssistant) -> None: async def test_form_reauth(hass: HomeAssistant) -> None: """Test that the reauth confirmation form is served.""" - mock_config = MockConfigEntry( - domain=DOMAIN, - unique_id="123456", + mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) + mock_config.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config.entry_id, + }, data={ "username": "test-username", "password": "test-password", "mydevolo_url": "https://test_mydevolo_url.test", }, ) - mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -201,17 +205,20 @@ async def test_form_reauth(hass: HomeAssistant) -> None: @pytest.mark.parametrize("credentials_valid", [False]) async def test_form_invalid_credentials_reauth(hass: HomeAssistant) -> None: """Test if we get the error message on invalid credentials.""" - mock_config = MockConfigEntry( - domain=DOMAIN, - unique_id="123456", + mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) + mock_config.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config.entry_id, + }, data={ "username": "test-username", "password": "test-password", "mydevolo_url": "https://test_mydevolo_url.test", }, ) - mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -223,17 +230,20 @@ async def test_form_invalid_credentials_reauth(hass: HomeAssistant) -> None: async def test_form_uuid_change_reauth(hass: HomeAssistant) -> None: """Test that the reauth confirmation form is served.""" - mock_config = MockConfigEntry( - domain=DOMAIN, - unique_id="123456", + mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) + mock_config.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config.entry_id, + }, data={ "username": "test-username", "password": "test-password", "mydevolo_url": "https://test_mydevolo_url.test", }, ) - mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/devolo_home_control/test_cover.py b/tests/components/devolo_home_control/test_cover.py index 7d4b081c87e..c21dabadb1a 100644 --- a/tests/components/devolo_home_control/test_cover.py +++ b/tests/components/devolo_home_control/test_cover.py @@ -4,17 +4,13 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.cover import ( - ATTR_CURRENT_POSITION, - ATTR_POSITION, - DOMAIN as COVER_DOMAIN, - CoverState, -) +from homeassistant.components.cover import ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, + STATE_CLOSED, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -38,15 +34,15 @@ async def test_cover( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{COVER_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{COVER_DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot # Emulate websocket message: position changed test_gateway.publisher.dispatch("Test", ("devolo.Blinds", 0.0)) await hass.async_block_till_done() - state = hass.states.get(f"{COVER_DOMAIN}.test") - assert state.state == CoverState.CLOSED + state = hass.states.get(f"{DOMAIN}.test") + assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0.0 # Test setting position @@ -54,27 +50,27 @@ async def test_cover( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" ) as set_value: await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: f"{COVER_DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(100) set_value.reset_mock() await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: f"{COVER_DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(0) set_value.reset_mock() await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: f"{COVER_DOMAIN}.test", ATTR_POSITION: 50}, + {ATTR_ENTITY_ID: f"{DOMAIN}.test", ATTR_POSITION: 50}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(50) @@ -83,7 +79,7 @@ async def test_cover( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{COVER_DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -97,7 +93,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{COVER_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_diagnostics.py b/tests/components/devolo_home_control/test_diagnostics.py index dfadc4d1c4b..f52a9d49017 100644 --- a/tests/components/devolo_home_control/test_diagnostics.py +++ b/tests/components/devolo_home_control/test_diagnostics.py @@ -5,7 +5,6 @@ from __future__ import annotations from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -36,4 +35,4 @@ async def test_entry_diagnostics( assert entry.state is ConfigEntryState.LOADED result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/devolo_home_control/test_light.py b/tests/components/devolo_home_control/test_light.py index 46c3fbc98f3..f72136ee287 100644 --- a/tests/components/devolo_home_control/test_light.py +++ b/tests/components/devolo_home_control/test_light.py @@ -4,7 +4,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN +from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -33,18 +33,18 @@ async def test_light_without_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{LIGHT_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{LIGHT_DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot # Emulate websocket message: brightness changed test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 0.0)) await hass.async_block_till_done() - state = hass.states.get(f"{LIGHT_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state.state == STATE_OFF test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 100.0)) await hass.async_block_till_done() - state = hass.states.get(f"{LIGHT_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 255 @@ -53,27 +53,27 @@ async def test_light_without_binary_sensor( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" ) as set_value: await hass.services.async_call( - LIGHT_DOMAIN, + DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(100) set_value.reset_mock() await hass.services.async_call( - LIGHT_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(0) set_value.reset_mock() await hass.services.async_call( - LIGHT_DOMAIN, + DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test", ATTR_BRIGHTNESS: 50}, + {ATTR_ENTITY_ID: f"{DOMAIN}.test", ATTR_BRIGHTNESS: 50}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(round(50 / 255 * 100)) @@ -82,7 +82,7 @@ async def test_light_without_binary_sensor( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{LIGHT_DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE async def test_light_with_binary_sensor( @@ -101,18 +101,18 @@ async def test_light_with_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{LIGHT_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{LIGHT_DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot # Emulate websocket message: brightness changed test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 0.0)) await hass.async_block_till_done() - state = hass.states.get(f"{LIGHT_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state.state == STATE_OFF test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 100.0)) await hass.async_block_till_done() - state = hass.states.get(f"{LIGHT_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 255 @@ -121,18 +121,18 @@ async def test_light_with_binary_sensor( "devolo_home_control_api.properties.binary_switch_property.BinarySwitchProperty.set" ) as set_value: await hass.services.async_call( - LIGHT_DOMAIN, + DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(True) set_value.reset_mock() await hass.services.async_call( - LIGHT_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(False) @@ -149,7 +149,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{LIGHT_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_sensor.py b/tests/components/devolo_home_control/test_sensor.py index 08b53dae865..62023982e81 100644 --- a/tests/components/devolo_home_control/test_sensor.py +++ b/tests/components/devolo_home_control/test_sensor.py @@ -4,7 +4,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.sensor import DOMAIN from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -26,9 +26,9 @@ async def test_temperature_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SENSOR_DOMAIN}.test_temperature") + state = hass.states.get(f"{DOMAIN}.test_temperature") assert state == snapshot - assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_temperature") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test_temperature") == snapshot async def test_battery_sensor( @@ -45,14 +45,14 @@ async def test_battery_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SENSOR_DOMAIN}.test_battery_level") + state = hass.states.get(f"{DOMAIN}.test_battery_level") assert state == snapshot - assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_battery_level") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test_battery_level") == snapshot # Emulate websocket message: value changed test_gateway.publisher.dispatch("Test", ("Test", 10, "battery_level")) await hass.async_block_till_done() - assert hass.states.get(f"{SENSOR_DOMAIN}.test_battery_level").state == "10" + assert hass.states.get(f"{DOMAIN}.test_battery_level").state == "10" async def test_consumption_sensor( @@ -68,36 +68,29 @@ async def test_consumption_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SENSOR_DOMAIN}.test_current_consumption") + state = hass.states.get(f"{DOMAIN}.test_current_consumption") assert state == snapshot - assert ( - entity_registry.async_get(f"{SENSOR_DOMAIN}.test_current_consumption") - == snapshot - ) + assert entity_registry.async_get(f"{DOMAIN}.test_current_consumption") == snapshot - state = hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption") + state = hass.states.get(f"{DOMAIN}.test_total_consumption") assert state == snapshot - assert ( - entity_registry.async_get(f"{SENSOR_DOMAIN}.test_total_consumption") == snapshot - ) + assert entity_registry.async_get(f"{DOMAIN}.test_total_consumption") == snapshot # Emulate websocket message: value changed test_gateway.devices["Test"].consumption_property["devolo.Meter:Test"].total = 50.0 test_gateway.publisher.dispatch("Test", ("devolo.Meter:Test", 50.0)) await hass.async_block_till_done() - assert hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption").state == "50.0" + assert hass.states.get(f"{DOMAIN}.test_total_consumption").state == "50.0" # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() assert ( - hass.states.get(f"{SENSOR_DOMAIN}.test_current_consumption").state - == STATE_UNAVAILABLE + hass.states.get(f"{DOMAIN}.test_current_consumption").state == STATE_UNAVAILABLE ) assert ( - hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption").state - == STATE_UNAVAILABLE + hass.states.get(f"{DOMAIN}.test_total_consumption").state == STATE_UNAVAILABLE ) @@ -112,7 +105,7 @@ async def test_voltage_sensor(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SENSOR_DOMAIN}.test_voltage") + state = hass.states.get(f"{DOMAIN}.test_voltage") assert state is None @@ -130,16 +123,14 @@ async def test_sensor_change(hass: HomeAssistant) -> None: # Emulate websocket message: value changed test_gateway.publisher.dispatch("Test", ("devolo.MultiLevelSensor:Test", 50.0)) await hass.async_block_till_done() - state = hass.states.get(f"{SENSOR_DOMAIN}.test_temperature") + state = hass.states.get(f"{DOMAIN}.test_temperature") assert state.state == "50.0" # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert ( - hass.states.get(f"{SENSOR_DOMAIN}.test_temperature").state == STATE_UNAVAILABLE - ) + assert hass.states.get(f"{DOMAIN}.test_temperature").state == STATE_UNAVAILABLE async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -153,7 +144,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SENSOR_DOMAIN}.test_temperature") + state = hass.states.get(f"{DOMAIN}.test_temperature") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_siren.py b/tests/components/devolo_home_control/test_siren.py index 71f4dfdd34d..be662418967 100644 --- a/tests/components/devolo_home_control/test_siren.py +++ b/tests/components/devolo_home_control/test_siren.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.siren import DOMAIN as SIREN_DOMAIN +from homeassistant.components.siren import DOMAIN from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -29,20 +29,20 @@ async def test_siren( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SIREN_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{SIREN_DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot # Emulate websocket message: sensor turned on test_gateway.publisher.dispatch("Test", ("devolo.SirenMultiLevelSwitch:Test", 1)) await hass.async_block_till_done() - assert hass.states.get(f"{SIREN_DOMAIN}.test").state == STATE_ON + assert hass.states.get(f"{DOMAIN}.test").state == STATE_ON # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{SIREN_DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE @pytest.mark.usefixtures("mock_zeroconf") @@ -60,9 +60,9 @@ async def test_siren_switching( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SIREN_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{SIREN_DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" @@ -70,7 +70,7 @@ async def test_siren_switching( await hass.services.async_call( "siren", "turn_on", - {"entity_id": f"{SIREN_DOMAIN}.test"}, + {"entity_id": f"{DOMAIN}.test"}, blocking=True, ) # The real device state is changed by a websocket message @@ -86,7 +86,7 @@ async def test_siren_switching( await hass.services.async_call( "siren", "turn_off", - {"entity_id": f"{SIREN_DOMAIN}.test"}, + {"entity_id": f"{DOMAIN}.test"}, blocking=True, ) # The real device state is changed by a websocket message @@ -94,7 +94,7 @@ async def test_siren_switching( "Test", ("devolo.SirenMultiLevelSwitch:Test", 0) ) await hass.async_block_till_done() - assert hass.states.get(f"{SIREN_DOMAIN}.test").state == STATE_OFF + assert hass.states.get(f"{DOMAIN}.test").state == STATE_OFF property_set.assert_called_once_with(0) @@ -113,9 +113,9 @@ async def test_siren_change_default_tone( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SIREN_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{SIREN_DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" @@ -124,7 +124,7 @@ async def test_siren_change_default_tone( await hass.services.async_call( "siren", "turn_on", - {"entity_id": f"{SIREN_DOMAIN}.test"}, + {"entity_id": f"{DOMAIN}.test"}, blocking=True, ) property_set.assert_called_once_with(2) @@ -142,7 +142,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SIREN_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_switch.py b/tests/components/devolo_home_control/test_switch.py index 46adaf8c8b0..86f93bfddf6 100644 --- a/tests/components/devolo_home_control/test_switch.py +++ b/tests/components/devolo_home_control/test_switch.py @@ -4,7 +4,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.switch import DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -32,9 +32,9 @@ async def test_switch( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SWITCH_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{SWITCH_DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot # Emulate websocket message: switched on test_gateway.devices["Test"].binary_switch_property[ @@ -42,24 +42,24 @@ async def test_switch( ].state = True test_gateway.publisher.dispatch("Test", ("devolo.BinarySwitch:Test", True)) await hass.async_block_till_done() - assert hass.states.get(f"{SWITCH_DOMAIN}.test").state == STATE_ON + assert hass.states.get(f"{DOMAIN}.test").state == STATE_ON with patch( "devolo_home_control_api.properties.binary_switch_property.BinarySwitchProperty.set" ) as set_value: await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{SWITCH_DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(state=True) set_value.reset_mock() await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: f"{SWITCH_DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(state=False) @@ -68,7 +68,7 @@ async def test_switch( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{SWITCH_DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -82,7 +82,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SWITCH_DOMAIN}.test") + state = hass.states.get(f"{DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_network/const.py b/tests/components/devolo_home_network/const.py index 7b0551b1daf..9d8faab9b13 100644 --- a/tests/components/devolo_home_network/const.py +++ b/tests/components/devolo_home_network/const.py @@ -171,5 +171,3 @@ PLCNET_ATTACHED = LogicalNetwork( }, ], ) - -UPTIME = 100 diff --git a/tests/components/devolo_home_network/mock.py b/tests/components/devolo_home_network/mock.py index 82bf3e5ad76..4b999667e53 100644 --- a/tests/components/devolo_home_network/mock.py +++ b/tests/components/devolo_home_network/mock.py @@ -19,7 +19,6 @@ from .const import ( IP, NEIGHBOR_ACCESS_POINTS, PLCNET, - UPTIME, ) @@ -50,7 +49,7 @@ class MockDevice(Device): self, session_instance: httpx.AsyncClient | None = None ) -> None: """Give a mocked device the needed properties.""" - self.mac = DISCOVERY_INFO.properties["PlcMacAddress"] if self.plcnet else None + self.mac = DISCOVERY_INFO.properties["PlcMacAddress"] self.mt_number = DISCOVERY_INFO.properties["MT"] self.product = DISCOVERY_INFO.properties["Product"] self.serial_number = DISCOVERY_INFO.properties["SN"] @@ -65,7 +64,6 @@ class MockDevice(Device): ) self.device.async_get_led_setting = AsyncMock(return_value=False) self.device.async_restart = AsyncMock(return_value=True) - self.device.async_uptime = AsyncMock(return_value=UPTIME) self.device.async_start_wps = AsyncMock(return_value=True) self.device.async_get_wifi_connected_station = AsyncMock( return_value=CONNECTED_STATIONS diff --git a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr index 3da8c76c2b4..317aaac0116 100644 --- a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr @@ -22,8 +22,6 @@ 'password': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'devolo_home_network', 'entry_id': '123456', 'minor_version': 1, diff --git a/tests/components/devolo_home_network/snapshots/test_init.ambr b/tests/components/devolo_home_network/snapshots/test_init.ambr index 297c9a25183..8c265400643 100644 --- a/tests/components/devolo_home_network/snapshots/test_init.ambr +++ b/tests/components/devolo_home_network/snapshots/test_init.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_setup_entry[mock_device] +# name: test_setup_entry DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -25,39 +25,6 @@ }), 'manufacturer': 'devolo', 'model': 'dLAN pro 1200+ WiFi ac', - 'model_id': '2730', - 'name': 'Mock Title', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '1234567890', - 'suggested_area': None, - 'sw_version': '5.6.1', - 'via_device_id': None, - }) -# --- -# name: test_setup_entry[mock_repeater_device] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://192.0.2.1', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'devolo_home_network', - '1234567890', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'devolo', - 'model': 'dLAN pro 1200+ WiFi ac', - 'model_id': '2730', 'name': 'Mock Title', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/devolo_home_network/snapshots/test_sensor.ambr b/tests/components/devolo_home_network/snapshots/test_sensor.ambr index 2e6730cdb21..d985ac35495 100644 --- a/tests/components/devolo_home_network/snapshots/test_sensor.ambr +++ b/tests/components/devolo_home_network/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2-1] +# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mock Title Connected PLC devices', @@ -12,7 +12,7 @@ 'state': '1', }) # --- -# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2-1].1 +# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -45,7 +45,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0-1] +# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mock Title Connected Wi-Fi clients', @@ -59,7 +59,7 @@ 'state': '1', }) # --- -# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0-1].1 +# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -94,54 +94,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[last_restart_of_the_device-async_uptime-interval3-2023-01-13T11:58:50+00:00] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Last restart of the device', - }), - 'context': , - 'entity_id': 'sensor.mock_title_last_restart_of_the_device', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2023-01-13T11:58:20+00:00', - }) -# --- -# name: test_sensor[last_restart_of_the_device-async_uptime-interval3-2023-01-13T11:58:50+00:00].1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_last_restart_of_the_device', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last restart of the device', - 'platform': 'devolo_home_network', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_restart', - 'unique_id': '1234567890_last_restart', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1-1] +# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mock Title Neighboring Wi-Fi networks', @@ -154,7 +107,7 @@ 'state': '1', }) # --- -# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1-1].1 +# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), diff --git a/tests/components/devolo_home_network/snapshots/test_update.ambr b/tests/components/devolo_home_network/snapshots/test_update.ambr index 8a1065f9a60..83ca84c82e8 100644 --- a/tests/components/devolo_home_network/snapshots/test_update.ambr +++ b/tests/components/devolo_home_network/snapshots/test_update.ambr @@ -4,7 +4,6 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', - 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/devolo_home_network/icon.png', 'friendly_name': 'Mock Title Firmware', 'in_progress': False, @@ -15,7 +14,6 @@ 'skipped_version': None, 'supported_features': , 'title': None, - 'update_percentage': None, }), 'context': , 'entity_id': 'update.mock_title_firmware', diff --git a/tests/components/devolo_home_network/test_binary_sensor.py b/tests/components/devolo_home_network/test_binary_sensor.py index 8197ec1a1e5..3e4bf8471c1 100644 --- a/tests/components/devolo_home_network/test_binary_sensor.py +++ b/tests/components/devolo_home_network/test_binary_sensor.py @@ -7,7 +7,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.binary_sensor import DOMAIN from homeassistant.components.devolo_home_network.const import ( CONNECTED_TO_ROUTER, LONG_UPDATE_INTERVAL, @@ -31,10 +31,7 @@ async def test_binary_sensor_setup(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert ( - hass.states.get(f"{BINARY_SENSOR_DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}") - is None - ) + assert hass.states.get(f"{DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}") is None await hass.config_entries.async_unload(entry.entry_id) @@ -50,7 +47,7 @@ async def test_update_attached_to_router( """Test state change of a attached_to_router binary sensor device.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key = f"{BINARY_SENSOR_DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}" + state_key = f"{DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_network/test_config_flow.py b/tests/components/devolo_home_network/test_config_flow.py index 5234d0f073e..5aa2bfa274e 100644 --- a/tests/components/devolo_home_network/test_config_flow.py +++ b/tests/components/devolo_home_network/test_config_flow.py @@ -179,7 +179,18 @@ async def test_form_reauth(hass: HomeAssistant) -> None: entry = configure_integration(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "title_placeholders": { + CONF_NAME: DISCOVERY_INFO.hostname.split(".")[0], + }, + }, + data=entry.data, + ) + assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/devolo_home_network/test_diagnostics.py b/tests/components/devolo_home_network/test_diagnostics.py index 05d3c594677..a3580cac954 100644 --- a/tests/components/devolo_home_network/test_diagnostics.py +++ b/tests/components/devolo_home_network/test_diagnostics.py @@ -4,7 +4,6 @@ from __future__ import annotations import pytest from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -29,4 +28,4 @@ async def test_entry_diagnostics( assert entry.state is ConfigEntryState.LOADED result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/devolo_home_network/test_image.py b/tests/components/devolo_home_network/test_image.py index f13db4fce9d..80efc4fcc09 100644 --- a/tests/components/devolo_home_network/test_image.py +++ b/tests/components/devolo_home_network/test_image.py @@ -9,7 +9,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.devolo_home_network.const import SHORT_UPDATE_INTERVAL -from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN +from homeassistant.components.image import DOMAIN from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -32,9 +32,7 @@ async def test_image_setup(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert ( - hass.states.get( - f"{IMAGE_DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code" - ) + hass.states.get(f"{DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code") is not None ) @@ -53,7 +51,7 @@ async def test_guest_wifi_qr( """Test showing a QR code of the guest wifi credentials.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key = f"{IMAGE_DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code" + state_key = f"{DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_network/test_init.py b/tests/components/devolo_home_network/test_init.py index 71823eabe82..1b8903c568e 100644 --- a/tests/components/devolo_home_network/test_init.py +++ b/tests/components/devolo_home_network/test_init.py @@ -27,16 +27,13 @@ from .mock import MockDevice from tests.common import MockConfigEntry -@pytest.mark.parametrize("device", ["mock_device", "mock_repeater_device"]) async def test_setup_entry( hass: HomeAssistant, - device: str, + mock_device: MockDevice, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion, - request: pytest.FixtureRequest, ) -> None: """Test setup entry.""" - mock_device: MockDevice = request.getfixturevalue(device) entry = configure_integration(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_network/test_sensor.py b/tests/components/devolo_home_network/test_sensor.py index cf0207a2800..efcbaa803df 100644 --- a/tests/components/devolo_home_network/test_sensor.py +++ b/tests/components/devolo_home_network/test_sensor.py @@ -3,18 +3,16 @@ from datetime import timedelta from unittest.mock import AsyncMock -from devolo_plc_api.exceptions.device import DevicePasswordProtected, DeviceUnavailable +from devolo_plc_api.exceptions.device import DeviceUnavailable from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.devolo_home_network.const import ( - DOMAIN, LONG_UPDATE_INTERVAL, SHORT_UPDATE_INTERVAL, ) -from homeassistant.components.sensor import DOMAIN as PLATFORM -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.components.sensor import DOMAIN from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -35,74 +33,59 @@ async def test_sensor_setup(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert ( - hass.states.get(f"{PLATFORM}.{device_name}_connected_wi_fi_clients") is not None - ) - assert hass.states.get(f"{PLATFORM}.{device_name}_connected_plc_devices") is None - assert ( - hass.states.get(f"{PLATFORM}.{device_name}_neighboring_wi_fi_networks") is None + hass.states.get(f"{DOMAIN}.{device_name}_connected_wi_fi_clients") is not None ) + assert hass.states.get(f"{DOMAIN}.{device_name}_connected_plc_devices") is None + assert hass.states.get(f"{DOMAIN}.{device_name}_neighboring_wi_fi_networks") is None assert ( hass.states.get( - f"{PLATFORM}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" + f"{DOMAIN}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" ) is not None ) assert ( hass.states.get( - f"{PLATFORM}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" + f"{DOMAIN}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" ) is not None ) assert ( hass.states.get( - f"{PLATFORM}.{device_name}_plc_downlink_phyrate_{PLCNET.devices[2].user_device_name}" + f"{DOMAIN}.{device_name}_plc_downlink_phyrate_{PLCNET.devices[2].user_device_name}" ) is None ) assert ( hass.states.get( - f"{PLATFORM}.{device_name}_plc_uplink_phyrate_{PLCNET.devices[2].user_device_name}" + f"{DOMAIN}.{device_name}_plc_uplink_phyrate_{PLCNET.devices[2].user_device_name}" ) is None ) - assert ( - hass.states.get(f"{PLATFORM}.{device_name}_last_restart_of_the_device") is None - ) await hass.config_entries.async_unload(entry.entry_id) @pytest.mark.parametrize( - ("name", "get_method", "interval", "expected_state"), + ("name", "get_method", "interval"), [ ( "connected_wi_fi_clients", "async_get_wifi_connected_station", SHORT_UPDATE_INTERVAL, - "1", ), ( "neighboring_wi_fi_networks", "async_get_wifi_neighbor_access_points", LONG_UPDATE_INTERVAL, - "1", ), ( "connected_plc_devices", "async_get_network_overview", LONG_UPDATE_INTERVAL, - "1", - ), - ( - "last_restart_of_the_device", - "async_uptime", - SHORT_UPDATE_INTERVAL, - "2023-01-13T11:58:50+00:00", ), ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.freeze_time("2023-01-13 12:00:00+00:00") async def test_sensor( hass: HomeAssistant, mock_device: MockDevice, @@ -112,12 +95,11 @@ async def test_sensor( name: str, get_method: str, interval: timedelta, - expected_state: str, ) -> None: """Test state change of a sensor device.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key = f"{PLATFORM}.{device_name}_{name}" + state_key = f"{DOMAIN}.{device_name}_{name}" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -143,7 +125,7 @@ async def test_sensor( state = hass.states.get(state_key) assert state is not None - assert state.state == expected_state + assert state.state == "1" await hass.config_entries.async_unload(entry.entry_id) @@ -158,8 +140,8 @@ async def test_update_plc_phyrates( """Test state change of plc_downlink_phyrate and plc_uplink_phyrate sensor devices.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key_downlink = f"{PLATFORM}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" - state_key_uplink = f"{PLATFORM}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" + state_key_downlink = f"{DOMAIN}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" + state_key_uplink = f"{DOMAIN}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -199,28 +181,3 @@ async def test_update_plc_phyrates( assert state.state == str(PLCNET.data_rates[0].tx_rate) await hass.config_entries.async_unload(entry.entry_id) - - -async def test_update_last_update_auth_failed( - hass: HomeAssistant, mock_device: MockDevice -) -> None: - """Test getting the last update state with wrong password triggers the reauth flow.""" - entry = configure_integration(hass) - mock_device.device.async_uptime.side_effect = DevicePasswordProtected - - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.SETUP_ERROR - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - - flow = flows[0] - assert flow["step_id"] == "reauth_confirm" - assert flow["handler"] == DOMAIN - - assert "context" in flow - assert flow["context"]["source"] == SOURCE_REAUTH - assert flow["context"]["entry_id"] == entry.entry_id - - await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/dhcp/test_init.py b/tests/components/dhcp/test_init.py index 6852f4369cc..7c652c8ea3e 100644 --- a/tests/components/dhcp/test_init.py +++ b/tests/components/dhcp/test_init.py @@ -8,7 +8,10 @@ from unittest.mock import patch import aiodhcpwatcher import pytest -from scapy import interfaces +from scapy import ( + arch, # noqa: F401 + interfaces, +) from scapy.error import Scapy_Exception from scapy.layers.dhcp import DHCP from scapy.layers.l2 import Ether @@ -32,17 +35,11 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant import homeassistant.helpers.device_registry as dr -from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import ( - MockConfigEntry, - MockModule, - async_fire_time_changed, - mock_integration, -) +from tests.common import MockConfigEntry, async_fire_time_changed # connect b8:b7:f1:6d:b5:33 192.168.210.56 RAW_DHCP_REQUEST = ( @@ -141,15 +138,11 @@ RAW_DHCP_REQUEST_WITHOUT_HOSTNAME = ( async def _async_get_handle_dhcp_packet( - hass: HomeAssistant, - integration_matchers: dhcp.DhcpMatchers, - address_data: dict | None = None, + hass: HomeAssistant, integration_matchers: dhcp.DhcpMatchers ) -> Callable[[Any], Awaitable[None]]: - if address_data is None: - address_data = {} dhcp_watcher = dhcp.DHCPWatcher( hass, - address_data, + {}, integration_matchers, ) with patch("aiodhcpwatcher.async_start"): @@ -184,8 +177,7 @@ async def test_dhcp_match_hostname_and_macaddress(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -213,8 +205,7 @@ async def test_dhcp_renewal_match_hostname_and_macaddress(hass: HomeAssistant) - assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="50147903852c", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.1.120", @@ -263,8 +254,7 @@ async def test_registered_devices( assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="50147903852c", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.1.120", @@ -290,8 +280,7 @@ async def test_dhcp_match_hostname(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -317,8 +306,7 @@ async def test_dhcp_match_macaddress(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -347,8 +335,7 @@ async def test_dhcp_multiple_match_only_one_flow(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -374,8 +361,7 @@ async def test_dhcp_match_macaddress_without_hostname(hass: HomeAssistant) -> No assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="606bbd59e4b4", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.107.151", @@ -701,8 +687,7 @@ async def test_device_tracker_hostname_and_macaddress_exists_before_start( assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -739,8 +724,7 @@ async def test_device_tracker_registered(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -819,8 +803,7 @@ async def test_device_tracker_hostname_and_macaddress_after_start( assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -1029,8 +1012,7 @@ async def test_aiodiscover_finds_new_hosts(hass: HomeAssistant) -> None: assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -1092,8 +1074,7 @@ async def test_aiodiscover_does_not_call_again_on_shorter_hostname( assert len(mock_init.mock_calls) == 2 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -1102,8 +1083,7 @@ async def test_aiodiscover_does_not_call_again_on_shorter_hostname( ) assert mock_init.mock_calls[1][1][0] == "mock-domain" assert mock_init.mock_calls[1][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[1][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", @@ -1160,196 +1140,10 @@ async def test_aiodiscover_finds_new_hosts_after_interval(hass: HomeAssistant) - assert len(mock_init.mock_calls) == 1 assert mock_init.mock_calls[0][1][0] == "mock-domain" assert mock_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, + "source": config_entries.SOURCE_DHCP } assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( ip="192.168.210.56", hostname="connect", macaddress="b8b7f16db533", ) - - -@pytest.mark.parametrize( - ( - "entry_domain", - "entry_discovery_keys", - ), - [ - # Matching discovery key - ( - "mock-domain", - {"dhcp": (DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1),)}, - ), - # Matching discovery key - ( - "mock-domain", - { - "dhcp": (DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1),), - "other": (DiscoveryKey(domain="other", key="blah", version=1),), - }, - ), - # Matching discovery key, other domain - # Note: Rediscovery is not currently restricted to the domain of the removed - # entry. Such a check can be added if needed. - ( - "comp", - {"dhcp": (DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1),)}, - ), - ], -) -@pytest.mark.parametrize( - "entry_source", - [ - config_entries.SOURCE_DHCP, - config_entries.SOURCE_IGNORE, - config_entries.SOURCE_USER, - ], -) -async def test_dhcp_rediscover( - hass: HomeAssistant, - entry_domain: str, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], - entry_source: str, -) -> None: - """Test we reinitiate flows when an ignored config entry is removed.""" - - entry = MockConfigEntry( - domain=entry_domain, - discovery_keys=entry_discovery_keys, - unique_id="mock-unique-id", - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - address_data = {} - integration_matchers = dhcp.async_index_integration_matchers( - [{"domain": "mock-domain", "hostname": "connect", "macaddress": "B8B7F1*"}] - ) - packet = Ether(RAW_DHCP_REQUEST) - - async_handle_dhcp_packet = await _async_get_handle_dhcp_packet( - hass, integration_matchers, address_data - ) - rediscovery_watcher = dhcp.RediscoveryWatcher( - hass, address_data, integration_matchers - ) - rediscovery_watcher.async_start() - with patch.object(hass.config_entries.flow, "async_init") as mock_init: - await async_handle_dhcp_packet(packet) - # Ensure no change is ignored - await async_handle_dhcp_packet(packet) - - # Assert the cached MAC address is hexstring without : - assert address_data == { - "b8b7f16db533": {"hostname": "connect", "ip": "192.168.210.56"} - } - - expected_context = { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, - } - assert len(mock_init.mock_calls) == 1 - assert mock_init.mock_calls[0][1][0] == "mock-domain" - assert mock_init.mock_calls[0][2]["context"] == expected_context - assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( - ip="192.168.210.56", - hostname="connect", - macaddress="b8b7f16db533", - ) - - with patch.object(hass.config_entries.flow, "async_init") as mock_init: - await hass.config_entries.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert len(mock_init.mock_calls) == 1 - assert mock_init.mock_calls[0][1][0] == "mock-domain" - assert mock_init.mock_calls[0][2]["context"] == expected_context - - -@pytest.mark.usefixtures("mock_async_zeroconf") -@pytest.mark.parametrize( - ( - "entry_domain", - "entry_discovery_keys", - "entry_source", - "entry_unique_id", - ), - [ - # Discovery key from other domain - ( - "mock-domain", - { - "bluetooth": ( - DiscoveryKey(domain="bluetooth", key="b8b7f16db533", version=1), - ) - }, - config_entries.SOURCE_IGNORE, - "mock-unique-id", - ), - # Discovery key from the future - ( - "mock-domain", - {"dhcp": (DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=2),)}, - config_entries.SOURCE_IGNORE, - "mock-unique-id", - ), - ], -) -async def test_dhcp_rediscover_no_match( - hass: HomeAssistant, - entry_domain: str, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], - entry_source: str, - entry_unique_id: str, -) -> None: - """Test we don't reinitiate flows when a non matching config entry is removed.""" - - mock_integration(hass, MockModule(entry_domain)) - - entry = MockConfigEntry( - domain=entry_domain, - discovery_keys=entry_discovery_keys, - unique_id=entry_unique_id, - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - address_data = {} - integration_matchers = dhcp.async_index_integration_matchers( - [{"domain": "mock-domain", "hostname": "connect", "macaddress": "B8B7F1*"}] - ) - packet = Ether(RAW_DHCP_REQUEST) - - async_handle_dhcp_packet = await _async_get_handle_dhcp_packet( - hass, integration_matchers, address_data - ) - rediscovery_watcher = dhcp.RediscoveryWatcher( - hass, address_data, integration_matchers - ) - rediscovery_watcher.async_start() - with patch.object(hass.config_entries.flow, "async_init") as mock_init: - await async_handle_dhcp_packet(packet) - # Ensure no change is ignored - await async_handle_dhcp_packet(packet) - - expected_context = { - "discovery_key": DiscoveryKey(domain="dhcp", key="b8b7f16db533", version=1), - "source": config_entries.SOURCE_DHCP, - } - assert len(mock_init.mock_calls) == 1 - assert mock_init.mock_calls[0][1][0] == "mock-domain" - assert mock_init.mock_calls[0][2]["context"] == expected_context - assert mock_init.mock_calls[0][2]["data"] == dhcp.DhcpServiceInfo( - ip="192.168.210.56", - hostname="connect", - macaddress="b8b7f16db533", - ) - - with patch.object(hass.config_entries.flow, "async_init") as mock_init: - await hass.config_entries.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert len(mock_init.mock_calls) == 0 diff --git a/tests/components/diagnostics/test_init.py b/tests/components/diagnostics/test_init.py index ffed7e21f60..7f583395387 100644 --- a/tests/components/diagnostics/test_init.py +++ b/tests/components/diagnostics/test_init.py @@ -174,7 +174,6 @@ async def test_download_diagnostics( "dependencies": [], "domain": "fake_integration", "is_built_in": True, - "overwrites_built_in": False, "name": "fake_integration", "requirements": [], }, @@ -261,7 +260,6 @@ async def test_download_diagnostics( "dependencies": [], "domain": "fake_integration", "is_built_in": True, - "overwrites_built_in": False, "name": "fake_integration", "requirements": [], }, diff --git a/tests/components/dialogflow/test_init.py b/tests/components/dialogflow/test_init.py index 8144bef7c1c..4c36a6887aa 100644 --- a/tests/components/dialogflow/test_init.py +++ b/tests/components/dialogflow/test_init.py @@ -8,8 +8,8 @@ import pytest from homeassistant import config_entries from homeassistant.components import dialogflow, intent_script +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component diff --git a/tests/components/directv/test_media_player.py b/tests/components/directv/test_media_player.py index 37762a22fe2..33eb35ed268 100644 --- a/tests/components/directv/test_media_player.py +++ b/tests/components/directv/test_media_player.py @@ -215,7 +215,7 @@ async def test_check_attributes( assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT) assert state.attributes.get(ATTR_MEDIA_TITLE) == "Snow Bride" assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) is None - assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "HALLHD (312)" + assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "{} ({})".format("HALLHD", "312") assert state.attributes.get(ATTR_INPUT_SOURCE) == "312" assert not state.attributes.get(ATTR_MEDIA_CURRENTLY_RECORDING) assert state.attributes.get(ATTR_MEDIA_RATING) == "TV-G" @@ -234,7 +234,7 @@ async def test_check_attributes( assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT) assert state.attributes.get(ATTR_MEDIA_TITLE) == "Tyler's Ultimate" assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) == "Spaghetti and Clam Sauce" - assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "FOODHD (231)" + assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "{} ({})".format("FOODHD", "231") assert state.attributes.get(ATTR_INPUT_SOURCE) == "231" assert not state.attributes.get(ATTR_MEDIA_CURRENTLY_RECORDING) assert state.attributes.get(ATTR_MEDIA_RATING) == "No Rating" @@ -255,7 +255,7 @@ async def test_check_attributes( assert state.attributes.get(ATTR_MEDIA_ARTIST) == "Gerald Albright" assert state.attributes.get(ATTR_MEDIA_ALBUM_NAME) == "Slam Dunk (2014)" assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) is None - assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "MCSJ (851)" + assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "{} ({})".format("MCSJ", "851") assert state.attributes.get(ATTR_INPUT_SOURCE) == "851" assert not state.attributes.get(ATTR_MEDIA_CURRENTLY_RECORDING) assert state.attributes.get(ATTR_MEDIA_RATING) == "TV-PG" diff --git a/tests/components/discord/__init__.py b/tests/components/discord/__init__.py index 1d81388d1e3..bf7c188b7b5 100644 --- a/tests/components/discord/__init__.py +++ b/tests/components/discord/__init__.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, Mock, patch import nextcord from homeassistant.components.discord.const import DOMAIN +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_TOKEN, CONF_NAME from homeassistant.core import HomeAssistant @@ -21,7 +22,7 @@ CONF_DATA = { } -def create_entry(hass: HomeAssistant) -> MockConfigEntry: +def create_entry(hass: HomeAssistant) -> ConfigEntry: """Add config entry in Home Assistant.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/discord/test_config_flow.py b/tests/components/discord/test_config_flow.py index e9a1344c555..9b37179e86d 100644 --- a/tests/components/discord/test_config_flow.py +++ b/tests/components/discord/test_config_flow.py @@ -4,7 +4,7 @@ import nextcord from homeassistant import config_entries from homeassistant.components.discord.const import DOMAIN -from homeassistant.const import CONF_API_TOKEN +from homeassistant.const import CONF_API_TOKEN, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -123,7 +123,16 @@ async def test_flow_user_unknown_error(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test a reauth flow.""" entry = create_entry(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/discovergy/conftest.py b/tests/components/discovergy/conftest.py index 4f65099c1b4..056f763c3e2 100644 --- a/tests/components/discovergy/conftest.py +++ b/tests/components/discovergy/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Discovergy integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch from pydiscovergy.models import Reading import pytest +from typing_extensions import Generator from homeassistant.components.discovergy.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/discovergy/test_config_flow.py b/tests/components/discovergy/test_config_flow.py index 470ef65fccd..2464ba3846f 100644 --- a/tests/components/discovergy/test_config_flow.py +++ b/tests/components/discovergy/test_config_flow.py @@ -6,7 +6,7 @@ from pydiscovergy.error import DiscovergyClientError, HTTPError, InvalidLogin import pytest from homeassistant.components.discovergy.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -49,9 +49,15 @@ async def test_reauth( ) -> None: """Test reauth flow.""" config_entry.add_to_hass(hass) - init_result = await config_entry.start_reauth_flow(hass) + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, + data=None, + ) + assert init_result["type"] is FlowResultType.FORM - assert init_result["step_id"] == "reauth_confirm" + assert init_result["step_id"] == "reauth" with patch( "homeassistant.components.discovergy.async_setup_entry", diff --git a/tests/components/dlink/conftest.py b/tests/components/dlink/conftest.py index c56b93c4d3d..4bbf99000a9 100644 --- a/tests/components/dlink/conftest.py +++ b/tests/components/dlink/conftest.py @@ -1,10 +1,11 @@ """Configure pytest for D-Link tests.""" -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable from copy import deepcopy from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components import dhcp from homeassistant.components.dlink.const import CONF_USE_LEGACY_PROTOCOL, DOMAIN diff --git a/tests/components/dlna_dmr/conftest.py b/tests/components/dlna_dmr/conftest.py index 21cb2bc0daf..f470fbabc6f 100644 --- a/tests/components/dlna_dmr/conftest.py +++ b/tests/components/dlna_dmr/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Generator +from collections.abc import Iterable from socket import AddressFamily # pylint: disable=no-name-in-module from unittest.mock import Mock, create_autospec, patch, seal @@ -32,7 +32,7 @@ NEW_DEVICE_LOCATION = "http://198.51.100.7" + "/dmr_description.xml" @pytest.fixture -def domain_data_mock(hass: HomeAssistant) -> Mock: +def domain_data_mock(hass: HomeAssistant) -> Iterable[Mock]: """Mock the global data used by this component. This includes network clients and library object factories. Mocking it @@ -114,7 +114,7 @@ def config_entry_mock_no_mac() -> MockConfigEntry: @pytest.fixture -def dmr_device_mock(domain_data_mock: Mock) -> Generator[Mock]: +def dmr_device_mock(domain_data_mock: Mock) -> Iterable[Mock]: """Mock the async_upnp_client DMR device, initially connected.""" with patch( "homeassistant.components.dlna_dmr.media_player.DmrDevice", autospec=True @@ -135,7 +135,7 @@ def dmr_device_mock(domain_data_mock: Mock) -> Generator[Mock]: @pytest.fixture(autouse=True) -def ssdp_scanner_mock() -> Generator[Mock]: +def ssdp_scanner_mock() -> Iterable[Mock]: """Mock the SSDP Scanner.""" with patch("homeassistant.components.ssdp.Scanner", autospec=True) as mock_scanner: reg_callback = mock_scanner.return_value.async_register_callback @@ -144,14 +144,14 @@ def ssdp_scanner_mock() -> Generator[Mock]: @pytest.fixture(autouse=True) -def ssdp_server_mock() -> Generator[None]: +def ssdp_server_mock() -> Iterable[Mock]: """Mock the SSDP Server.""" with patch("homeassistant.components.ssdp.Server", autospec=True): yield @pytest.fixture(autouse=True) -def async_get_local_ip_mock() -> Generator[Mock]: +def async_get_local_ip_mock() -> Iterable[Mock]: """Mock the async_get_local_ip utility function to prevent network access.""" with patch( "homeassistant.components.dlna_dmr.media_player.async_get_local_ip", diff --git a/tests/components/dlna_dmr/test_config_flow.py b/tests/components/dlna_dmr/test_config_flow.py index cb32001e1e5..a91cd4744d9 100644 --- a/tests/components/dlna_dmr/test_config_flow.py +++ b/tests/components/dlna_dmr/test_config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Generator +from collections.abc import Iterable import dataclasses import logging from unittest.mock import Mock, patch @@ -89,7 +89,7 @@ MOCK_DISCOVERY = ssdp.SsdpServiceInfo( @pytest.fixture(autouse=True) -def mock_get_mac_address() -> Generator[Mock]: +def mock_get_mac_address() -> Iterable[Mock]: """Mock the get_mac_address function to prevent network access and assist tests.""" with patch( "homeassistant.components.dlna_dmr.config_flow.get_mac_address", autospec=True @@ -99,7 +99,7 @@ def mock_get_mac_address() -> Generator[Mock]: @pytest.fixture(autouse=True) -def mock_setup_entry() -> Generator[Mock]: +def mock_setup_entry() -> Iterable[Mock]: """Mock async_setup_entry.""" with patch( "homeassistant.components.dlna_dmr.async_setup_entry", return_value=True @@ -671,6 +671,83 @@ async def test_ignore_flow_no_ssdp( } +async def test_unignore_flow(hass: HomeAssistant, ssdp_scanner_mock: Mock) -> None: + """Test a config flow started by unignoring a device.""" + # Create ignored entry (with no extra info from SSDP) + ssdp_scanner_mock.async_get_discovery_info_by_udn_st.return_value = None + result = await hass.config_entries.flow.async_init( + DLNA_DOMAIN, + context={"source": config_entries.SOURCE_IGNORE}, + data={"unique_id": MOCK_DEVICE_UDN, "title": MOCK_DEVICE_NAME}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MOCK_DEVICE_NAME + + # Device was found via SSDP, matching the 2nd device type tried + ssdp_scanner_mock.async_get_discovery_info_by_udn_st.side_effect = [ + None, + MOCK_DISCOVERY, + None, + None, + None, + ] + + # Unignore it and expect config flow to start + result = await hass.config_entries.flow.async_init( + DLNA_DOMAIN, + context={"source": config_entries.SOURCE_UNIGNORE}, + data={"unique_id": MOCK_DEVICE_UDN}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MOCK_DEVICE_NAME + assert result["data"] == { + CONF_URL: MOCK_DEVICE_LOCATION, + CONF_DEVICE_ID: MOCK_DEVICE_UDN, + CONF_TYPE: MOCK_DEVICE_TYPE, + CONF_MAC: MOCK_MAC_ADDRESS, + } + assert result["options"] == {} + + +async def test_unignore_flow_offline( + hass: HomeAssistant, ssdp_scanner_mock: Mock +) -> None: + """Test a config flow started by unignoring a device, but the device is offline.""" + # Create ignored entry (with no extra info from SSDP) + ssdp_scanner_mock.async_get_discovery_info_by_udn_st.return_value = None + result = await hass.config_entries.flow.async_init( + DLNA_DOMAIN, + context={"source": config_entries.SOURCE_IGNORE}, + data={"unique_id": MOCK_DEVICE_UDN, "title": MOCK_DEVICE_NAME}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MOCK_DEVICE_NAME + + # Device is not in the SSDP discoveries (perhaps HA restarted between ignore and unignore) + ssdp_scanner_mock.async_get_discovery_info_by_udn_st.return_value = None + + # Unignore it and expect config flow to start then abort + result = await hass.config_entries.flow.async_init( + DLNA_DOMAIN, + context={"source": config_entries.SOURCE_UNIGNORE}, + data={"unique_id": MOCK_DEVICE_UDN}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "discovery_error" + + async def test_get_mac_address_ipv4( hass: HomeAssistant, mock_get_mac_address: Mock ) -> None: diff --git a/tests/components/dlna_dmr/test_data.py b/tests/components/dlna_dmr/test_data.py index e67a559f934..57652747ffd 100644 --- a/tests/components/dlna_dmr/test_data.py +++ b/tests/components/dlna_dmr/test_data.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Generator +from collections.abc import Iterable from unittest.mock import ANY, Mock, patch from async_upnp_client.aiohttp import AiohttpNotifyServer @@ -16,7 +16,7 @@ from homeassistant.core import Event, HomeAssistant @pytest.fixture -def aiohttp_notify_servers_mock() -> Generator[Mock]: +def aiohttp_notify_servers_mock() -> Iterable[Mock]: """Construct mock AiohttpNotifyServer on demand, eliminating network use. This fixture provides a list of the constructed servers. diff --git a/tests/components/dlna_dmr/test_media_player.py b/tests/components/dlna_dmr/test_media_player.py index 3d8f9da8ed9..d202994f988 100644 --- a/tests/components/dlna_dmr/test_media_player.py +++ b/tests/components/dlna_dmr/test_media_player.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import AsyncGenerator, Mapping +from collections.abc import AsyncIterable, Mapping from dataclasses import dataclass from datetime import timedelta from typing import Any @@ -95,7 +95,7 @@ async def mock_entity_id( config_entry_mock: MockConfigEntry, ssdp_scanner_mock: Mock, dmr_device_mock: Mock, -) -> AsyncGenerator[str]: +) -> AsyncIterable[str]: """Fixture to set up a mock DlnaDmrEntity in a connected state. Yields the entity ID. Cleans up the entity after the test is complete. @@ -145,7 +145,7 @@ async def mock_disconnected_entity_id( config_entry_mock: MockConfigEntry, ssdp_scanner_mock: Mock, dmr_device_mock: Mock, -) -> AsyncGenerator[str]: +) -> AsyncIterable[str]: """Fixture to set up a mock DlnaDmrEntity in a disconnected state. Yields the entity ID. Cleans up the entity after the test is complete. diff --git a/tests/components/dlna_dms/conftest.py b/tests/components/dlna_dms/conftest.py index eb10babf527..ed05dfa4c76 100644 --- a/tests/components/dlna_dms/conftest.py +++ b/tests/components/dlna_dms/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import AsyncGenerator, Generator +from collections.abc import AsyncIterable, Iterable from typing import Final, cast from unittest.mock import AsyncMock, MagicMock, Mock, create_autospec, patch, seal @@ -44,7 +44,7 @@ async def setup_media_source(hass: HomeAssistant) -> None: @pytest.fixture -def upnp_factory_mock() -> Generator[Mock]: +def upnp_factory_mock() -> Iterable[Mock]: """Mock the UpnpFactory class to construct DMS-style UPnP devices.""" with patch( "homeassistant.components.dlna_dms.dms.UpnpFactory", @@ -82,7 +82,7 @@ def upnp_factory_mock() -> Generator[Mock]: @pytest.fixture(autouse=True, scope="module") -def aiohttp_session_requester_mock() -> Generator[Mock]: +def aiohttp_session_requester_mock() -> Iterable[Mock]: """Mock the AiohttpSessionRequester to prevent network use.""" with patch( "homeassistant.components.dlna_dms.dms.AiohttpSessionRequester", autospec=True @@ -109,7 +109,7 @@ def config_entry_mock() -> MockConfigEntry: @pytest.fixture -def dms_device_mock(upnp_factory_mock: Mock) -> Generator[Mock]: +def dms_device_mock(upnp_factory_mock: Mock) -> Iterable[Mock]: """Mock the async_upnp_client DMS device, initially connected.""" with patch( "homeassistant.components.dlna_dms.dms.DmsDevice", autospec=True @@ -130,7 +130,7 @@ def dms_device_mock(upnp_factory_mock: Mock) -> Generator[Mock]: @pytest.fixture(autouse=True) -def ssdp_scanner_mock() -> Generator[Mock]: +def ssdp_scanner_mock() -> Iterable[Mock]: """Mock the SSDP Scanner.""" with patch("homeassistant.components.ssdp.Scanner", autospec=True) as mock_scanner: reg_callback = mock_scanner.return_value.async_register_callback @@ -139,7 +139,7 @@ def ssdp_scanner_mock() -> Generator[Mock]: @pytest.fixture(autouse=True) -def ssdp_server_mock() -> Generator[None]: +def ssdp_server_mock() -> Iterable[Mock]: """Mock the SSDP Server.""" with patch("homeassistant.components.ssdp.Server", autospec=True): yield @@ -151,7 +151,7 @@ async def device_source_mock( config_entry_mock: MockConfigEntry, ssdp_scanner_mock: Mock, dms_device_mock: Mock, -) -> AsyncGenerator[None]: +) -> AsyncIterable[None]: """Fixture to set up a DmsDeviceSource in a connected state and cleanup at completion.""" config_entry_mock.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry_mock.entry_id) diff --git a/tests/components/dlna_dms/test_config_flow.py b/tests/components/dlna_dms/test_config_flow.py index 14da36a0381..b61b4a42c49 100644 --- a/tests/components/dlna_dms/test_config_flow.py +++ b/tests/components/dlna_dms/test_config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Generator +from collections.abc import Iterable import dataclasses import logging from typing import Final @@ -68,7 +68,7 @@ MOCK_DISCOVERY: Final = ssdp.SsdpServiceInfo( @pytest.fixture(autouse=True) -def mock_setup_entry() -> Generator[Mock]: +def mock_setup_entry() -> Iterable[Mock]: """Avoid setting up the entire integration.""" with patch( "homeassistant.components.dlna_dms.async_setup_entry", diff --git a/tests/components/dlna_dms/test_device_availability.py b/tests/components/dlna_dms/test_device_availability.py index 1be68f91733..c1ad3c91a7b 100644 --- a/tests/components/dlna_dms/test_device_availability.py +++ b/tests/components/dlna_dms/test_device_availability.py @@ -15,8 +15,8 @@ import pytest from homeassistant.components import media_source, ssdp from homeassistant.components.dlna_dms.const import DOMAIN from homeassistant.components.dlna_dms.dms import get_domain_data -from homeassistant.components.media_player import BrowseError -from homeassistant.components.media_source import Unresolvable +from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_source.error import Unresolvable from homeassistant.core import HomeAssistant from .conftest import ( diff --git a/tests/components/dlna_dms/test_dms_device_source.py b/tests/components/dlna_dms/test_dms_device_source.py index 7907d40c415..23d9e6927ae 100644 --- a/tests/components/dlna_dms/test_dms_device_source.py +++ b/tests/components/dlna_dms/test_dms_device_source.py @@ -13,8 +13,9 @@ import pytest from homeassistant.components import media_source, ssdp from homeassistant.components.dlna_dms.const import DLNA_SORT_CRITERIA, DOMAIN from homeassistant.components.dlna_dms.dms import DidlPlayMedia -from homeassistant.components.media_player import BrowseError -from homeassistant.components.media_source import BrowseMediaSource, Unresolvable +from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_source.error import Unresolvable +from homeassistant.components.media_source.models import BrowseMediaSource from homeassistant.core import HomeAssistant from .conftest import ( diff --git a/tests/components/dlna_dms/test_media_source.py b/tests/components/dlna_dms/test_media_source.py index ad290826075..641232e356a 100644 --- a/tests/components/dlna_dms/test_media_source.py +++ b/tests/components/dlna_dms/test_media_source.py @@ -13,11 +13,11 @@ from homeassistant.components.dlna_dms.media_source import ( DmsMediaSource, async_get_media_source, ) -from homeassistant.components.media_player import BrowseError -from homeassistant.components.media_source import ( +from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_source.error import Unresolvable +from homeassistant.components.media_source.models import ( BrowseMediaSource, MediaSourceItem, - Unresolvable, ) from homeassistant.const import CONF_DEVICE_ID, CONF_URL from homeassistant.core import HomeAssistant diff --git a/tests/components/dnsip/test_config_flow.py b/tests/components/dnsip/test_config_flow.py index 9d92cb3554c..99dc5781d16 100644 --- a/tests/components/dnsip/test_config_flow.py +++ b/tests/components/dnsip/test_config_flow.py @@ -278,15 +278,11 @@ async def test_options_flow_empty_return(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" - with patch( - "homeassistant.components.dnsip.config_flow.aiodns.DNSResolver", - return_value=RetrieveDNS(), - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={}, - ) - await hass.async_block_till_done() + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={}, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { diff --git a/tests/components/doorbird/__init__.py b/tests/components/doorbird/__init__.py index 2d517dfcefe..57bf4c04e39 100644 --- a/tests/components/doorbird/__init__.py +++ b/tests/components/doorbird/__init__.py @@ -1,85 +1 @@ """Tests for the DoorBird integration.""" - -from typing import Any -from unittest.mock import AsyncMock, MagicMock, Mock - -import aiohttp -from doorbirdpy import DoorBird, DoorBirdScheduleEntry - -from homeassistant import config_entries -from homeassistant.components.doorbird.const import API_URL -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PASSWORD, - CONF_TOKEN, - CONF_USERNAME, -) - -VALID_CONFIG = { - CONF_HOST: "1.2.3.4", - CONF_USERNAME: "friend", - CONF_PASSWORD: "password", - CONF_NAME: "mydoorbird", -} - - -def _get_aiohttp_client_error(status: int) -> aiohttp.ClientResponseError: - """Return a mock aiohttp client response error.""" - return aiohttp.ClientResponseError( - request_info=Mock(), - history=Mock(), - status=status, - ) - - -def mock_unauthorized_exception() -> aiohttp.ClientResponseError: - """Return a mock unauthorized exception.""" - return _get_aiohttp_client_error(401) - - -def mock_not_found_exception() -> aiohttp.ClientResponseError: - """Return a mock not found exception.""" - return _get_aiohttp_client_error(404) - - -def get_mock_doorbird_api( - info: dict[str, Any] | None = None, - info_side_effect: Exception | None = None, - schedule: list[DoorBirdScheduleEntry] | None = None, - schedule_side_effect: Exception | None = None, - favorites: dict[str, dict[str, Any]] | None = None, - favorites_side_effect: Exception | None = None, - change_schedule: tuple[bool, int] | None = None, -) -> DoorBird: - """Return a mock DoorBirdAPI object with return values.""" - doorbirdapi_mock = MagicMock(spec_set=DoorBird) - api_mock_type = type(doorbirdapi_mock) - api_mock_type.info = AsyncMock(side_effect=info_side_effect, return_value=info) - api_mock_type.favorites = AsyncMock( - side_effect=favorites_side_effect, return_value=favorites - ) - api_mock_type.change_favorite = AsyncMock(return_value=True) - api_mock_type.change_schedule = AsyncMock( - return_value=change_schedule or (True, 200) - ) - api_mock_type.schedule = AsyncMock( - return_value=schedule, side_effect=schedule_side_effect - ) - api_mock_type.energize_relay = AsyncMock(return_value=True) - api_mock_type.turn_light_on = AsyncMock(return_value=True) - api_mock_type.delete_favorite = AsyncMock(return_value=True) - api_mock_type.get_image = AsyncMock(return_value=b"image") - api_mock_type.doorbell_state = AsyncMock(side_effect=mock_unauthorized_exception()) - return doorbirdapi_mock - - -async def mock_webhook_call( - config_entry: config_entries.ConfigEntry, - aiohttp_client: aiohttp.ClientSession, - event: str, -) -> None: - """Mock the webhook call.""" - token = config_entry.data.get(CONF_TOKEN, config_entry.entry_id) - response = await aiohttp_client.get(f"{API_URL}/{event}?token={token}") - response.raise_for_status() diff --git a/tests/components/doorbird/conftest.py b/tests/components/doorbird/conftest.py deleted file mode 100644 index 0da69a98303..00000000000 --- a/tests/components/doorbird/conftest.py +++ /dev/null @@ -1,133 +0,0 @@ -"""Test configuration for DoorBird tests.""" - -from collections.abc import Callable, Coroutine, Generator -from contextlib import contextmanager -from dataclasses import dataclass -from typing import Any -from unittest.mock import MagicMock, patch - -from doorbirdpy import DoorBird, DoorBirdScheduleEntry -import pytest - -from homeassistant.components.doorbird.const import ( - CONF_EVENTS, - DEFAULT_DOORBELL_EVENT, - DEFAULT_MOTION_EVENT, - DOMAIN, -) -from homeassistant.core import HomeAssistant - -from . import VALID_CONFIG, get_mock_doorbird_api - -from tests.common import MockConfigEntry, load_json_value_fixture - -type DoorbirdMockerType = Callable[[], Coroutine[Any, Any, MockDoorbirdEntry]] - - -@dataclass -class MockDoorbirdEntry: - """Mock DoorBird config entry.""" - - entry: MockConfigEntry - api: MagicMock - - -@pytest.fixture(scope="package") -def doorbird_info() -> dict[str, Any]: - """Return a loaded DoorBird info fixture.""" - return load_json_value_fixture("info.json", "doorbird")["BHA"]["VERSION"][0] - - -@pytest.fixture(scope="package") -def doorbird_schedule() -> list[DoorBirdScheduleEntry]: - """Return a loaded DoorBird schedule fixture.""" - return DoorBirdScheduleEntry.parse_all( - load_json_value_fixture("schedule.json", "doorbird") - ) - - -@pytest.fixture(scope="package") -def doorbird_schedule_wrong_param() -> list[DoorBirdScheduleEntry]: - """Return a loaded DoorBird schedule fixture with an incorrect param.""" - return DoorBirdScheduleEntry.parse_all( - load_json_value_fixture("schedule_wrong_param.json", "doorbird") - ) - - -@pytest.fixture(scope="package") -def doorbird_favorites() -> dict[str, dict[str, Any]]: - """Return a loaded DoorBird favorites fixture.""" - return load_json_value_fixture("favorites.json", "doorbird") - - -@pytest.fixture -def doorbird_api( - doorbird_info: dict[str, Any], doorbird_schedule: dict[str, Any] -) -> Generator[DoorBird]: - """Mock the DoorBirdAPI.""" - api = get_mock_doorbird_api(info=doorbird_info, schedule=doorbird_schedule) - with patch_doorbird_api_entry_points(api): - yield api - - -@contextmanager -def patch_doorbird_api_entry_points(api: MagicMock) -> Generator[DoorBird]: - """Mock the DoorBirdAPI.""" - with ( - patch( - "homeassistant.components.doorbird.DoorBird", - return_value=api, - ), - patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=api, - ), - ): - yield api - - -@pytest.fixture -async def doorbird_mocker( - hass: HomeAssistant, - doorbird_info: dict[str, Any], - doorbird_schedule: dict[str, Any], - doorbird_favorites: dict[str, dict[str, Any]], -) -> DoorbirdMockerType: - """Create a MockDoorbirdEntry.""" - - async def _async_mock( - entry: MockConfigEntry | None = None, - api: DoorBird | None = None, - change_schedule: tuple[bool, int] | None = None, - info: dict[str, Any] | None = None, - info_side_effect: Exception | None = None, - schedule: list[DoorBirdScheduleEntry] | None = None, - schedule_side_effect: Exception | None = None, - favorites: dict[str, dict[str, Any]] | None = None, - favorites_side_effect: Exception | None = None, - options: dict[str, Any] | None = None, - ) -> MockDoorbirdEntry: - """Create a MockDoorbirdEntry from defaults or specific values.""" - entry = entry or MockConfigEntry( - domain=DOMAIN, - unique_id="1CCAE3AAAAAA", - data=VALID_CONFIG, - options=options - or {CONF_EVENTS: [DEFAULT_DOORBELL_EVENT, DEFAULT_MOTION_EVENT]}, - ) - api = api or get_mock_doorbird_api( - info=info or doorbird_info, - info_side_effect=info_side_effect, - schedule=schedule or doorbird_schedule, - schedule_side_effect=schedule_side_effect, - favorites=favorites or doorbird_favorites, - favorites_side_effect=favorites_side_effect, - change_schedule=change_schedule, - ) - entry.add_to_hass(hass) - with patch_doorbird_api_entry_points(api): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - return MockDoorbirdEntry(entry=entry, api=api) - - return _async_mock diff --git a/tests/components/doorbird/fixtures/favorites.json b/tests/components/doorbird/fixtures/favorites.json deleted file mode 100644 index 50dddb850a5..00000000000 --- a/tests/components/doorbird/fixtures/favorites.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "http": { - "0": { - "title": "Home Assistant (mydoorbird_doorbell)", - "value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_doorbell?token=01J2F4B97Y7P1SARXEJ6W07EKD" - }, - "1": { - "title": "Home Assistant (mydoorbird_motion)", - "value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_motion?token=01J2F4B97Y7P1SARXEJ6W07EKD" - }, - "2": { - "title": "externally added event", - "value": "http://127.0.0.1/" - } - } -} diff --git a/tests/components/doorbird/fixtures/info.json b/tests/components/doorbird/fixtures/info.json deleted file mode 100644 index 46fb8fbac86..00000000000 --- a/tests/components/doorbird/fixtures/info.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "BHA": { - "RETURNCODE": "1", - "VERSION": [ - { - "FIRMWARE": "000125", - "BUILD_NUMBER": "15870439", - "WIFI_MAC_ADDR": "1234ABCD", - "RELAYS": [ - "1", - "2", - "ghchdi@1", - "ghchdi@2", - "ghchdi@3", - "ghdwkh@1", - "ghdwkh@2", - "ghdwkh@3" - ], - "DEVICE-TYPE": "DoorBird D2101V" - } - ] - } -} diff --git a/tests/components/doorbird/fixtures/schedule.json b/tests/components/doorbird/fixtures/schedule.json deleted file mode 100644 index c300180777c..00000000000 --- a/tests/components/doorbird/fixtures/schedule.json +++ /dev/null @@ -1,67 +0,0 @@ -[ - { - "input": "doorbell", - "param": "1", - "output": [ - { - "event": "notify", - "param": "", - "schedule": { - "weekdays": [ - { - "to": "107999", - "from": "108000" - } - ] - } - }, - { - "event": "http", - "param": "0", - "schedule": { - "weekdays": [ - { - "to": "107999", - "from": "108000" - } - ] - } - } - ] - }, - { - "input": "motion", - "param": "", - "output": [ - { - "event": "notify", - "param": "", - "schedule": { - "weekdays": [ - { - "to": "107999", - "from": "108000" - } - ] - } - }, - { - "event": "http", - "param": "5", - "schedule": { - "weekdays": [ - { - "to": "107999", - "from": "108000" - } - ] - } - } - ] - }, - { - "input": "relay", - "param": "1", - "output": [] - } -] diff --git a/tests/components/doorbird/fixtures/schedule_wrong_param.json b/tests/components/doorbird/fixtures/schedule_wrong_param.json deleted file mode 100644 index 724f19b1774..00000000000 --- a/tests/components/doorbird/fixtures/schedule_wrong_param.json +++ /dev/null @@ -1,67 +0,0 @@ -[ - { - "input": "doorbell", - "param": "99", - "output": [ - { - "event": "notify", - "param": "", - "schedule": { - "weekdays": [ - { - "to": "107999", - "from": "108000" - } - ] - } - }, - { - "event": "http", - "param": "0", - "schedule": { - "weekdays": [ - { - "to": "107999", - "from": "108000" - } - ] - } - } - ] - }, - { - "input": "motion", - "param": "", - "output": [ - { - "event": "notify", - "param": "", - "schedule": { - "weekdays": [ - { - "to": "107999", - "from": "108000" - } - ] - } - }, - { - "event": "http", - "param": "5", - "schedule": { - "weekdays": [ - { - "to": "107999", - "from": "108000" - } - ] - } - } - ] - }, - { - "input": "relay", - "param": "1", - "output": [] - } -] diff --git a/tests/components/doorbird/test_button.py b/tests/components/doorbird/test_button.py deleted file mode 100644 index abb490e9180..00000000000 --- a/tests/components/doorbird/test_button.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Test DoorBird buttons.""" - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN -from homeassistant.core import HomeAssistant - -from .conftest import DoorbirdMockerType - - -async def test_relay_button( - hass: HomeAssistant, - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test pressing a relay button.""" - doorbird_entry = await doorbird_mocker() - relay_1_entity_id = "button.mydoorbird_relay_1" - assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN - await hass.services.async_call( - BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: relay_1_entity_id}, blocking=True - ) - assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN - assert doorbird_entry.api.energize_relay.call_count == 1 - - -async def test_ir_button( - hass: HomeAssistant, - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test pressing the IR button.""" - doorbird_entry = await doorbird_mocker() - ir_entity_id = "button.mydoorbird_ir" - assert hass.states.get(ir_entity_id).state == STATE_UNKNOWN - await hass.services.async_call( - BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ir_entity_id}, blocking=True - ) - assert hass.states.get(ir_entity_id).state != STATE_UNKNOWN - assert doorbird_entry.api.turn_light_on.call_count == 1 - - -async def test_reset_favorites_button( - hass: HomeAssistant, - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test pressing the reset favorites button.""" - doorbird_entry = await doorbird_mocker() - reset_entity_id = "button.mydoorbird_reset_favorites" - assert hass.states.get(reset_entity_id).state == STATE_UNKNOWN - await hass.services.async_call( - BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: reset_entity_id}, blocking=True - ) - assert hass.states.get(reset_entity_id).state != STATE_UNKNOWN - assert doorbird_entry.api.delete_favorite.call_count == 3 diff --git a/tests/components/doorbird/test_camera.py b/tests/components/doorbird/test_camera.py deleted file mode 100644 index a310bcb88cc..00000000000 --- a/tests/components/doorbird/test_camera.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Test DoorBird cameras.""" - -from freezegun.api import FrozenDateTimeFactory -import pytest - -from homeassistant.components.camera import ( - CameraState, - async_get_image, - async_get_stream_source, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from . import mock_not_found_exception -from .conftest import DoorbirdMockerType - - -async def test_doorbird_cameras( - hass: HomeAssistant, - doorbird_mocker: DoorbirdMockerType, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the doorbird cameras.""" - doorbird_entry = await doorbird_mocker() - live_camera_entity_id = "camera.mydoorbird_live" - assert hass.states.get(live_camera_entity_id).state == CameraState.IDLE - last_motion_camera_entity_id = "camera.mydoorbird_last_motion" - assert hass.states.get(last_motion_camera_entity_id).state == CameraState.IDLE - last_ring_camera_entity_id = "camera.mydoorbird_last_ring" - assert hass.states.get(last_ring_camera_entity_id).state == CameraState.IDLE - assert await async_get_stream_source(hass, live_camera_entity_id) is not None - api = doorbird_entry.api - api.get_image.side_effect = mock_not_found_exception() - with pytest.raises(HomeAssistantError): - await async_get_image(hass, live_camera_entity_id) - api.get_image.side_effect = TimeoutError() - with pytest.raises(HomeAssistantError): - await async_get_image(hass, live_camera_entity_id) - api.get_image.side_effect = None - assert (await async_get_image(hass, live_camera_entity_id)).content == b"image" - api.get_image.return_value = b"notyet" - # Ensure rate limit works - assert (await async_get_image(hass, live_camera_entity_id)).content == b"image" - - freezer.tick(60) - assert (await async_get_image(hass, live_camera_entity_id)).content == b"notyet" diff --git a/tests/components/doorbird/test_config_flow.py b/tests/components/doorbird/test_config_flow.py index 3abdd2b87a3..cd4ddccda87 100644 --- a/tests/components/doorbird/test_config_flow.py +++ b/tests/components/doorbird/test_config_flow.py @@ -1,35 +1,47 @@ """Test the DoorBird config flow.""" from ipaddress import ip_address -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import MagicMock, Mock, patch -import aiohttp -from doorbirdpy import DoorBird import pytest +import requests from homeassistant import config_entries from homeassistant.components import zeroconf -from homeassistant.components.doorbird.const import ( - CONF_EVENTS, - DEFAULT_DOORBELL_EVENT, - DEFAULT_MOTION_EVENT, - DOMAIN, -) +from homeassistant.components.doorbird.const import CONF_EVENTS, DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import ( - VALID_CONFIG, - get_mock_doorbird_api, - mock_not_found_exception, - mock_unauthorized_exception, -) - from tests.common import MockConfigEntry +VALID_CONFIG = { + CONF_HOST: "1.2.3.4", + CONF_USERNAME: "friend", + CONF_PASSWORD: "password", + CONF_NAME: "mydoorbird", +} -async def test_user_form(hass: HomeAssistant, doorbird_api: DoorBird) -> None: + +def _get_mock_doorbirdapi_return_values(ready=None, info=None): + doorbirdapi_mock = MagicMock() + type(doorbirdapi_mock).ready = MagicMock(return_value=ready) + type(doorbirdapi_mock).info = MagicMock(return_value=info) + type(doorbirdapi_mock).doorbell_state = MagicMock( + side_effect=requests.exceptions.HTTPError(response=Mock(status_code=401)) + ) + return doorbirdapi_mock + + +def _get_mock_doorbirdapi_side_effects(ready=None, info=None): + doorbirdapi_mock = MagicMock() + type(doorbirdapi_mock).ready = MagicMock(side_effect=ready) + type(doorbirdapi_mock).info = MagicMock(side_effect=info) + + return doorbirdapi_mock + + +async def test_user_form(hass: HomeAssistant) -> None: """Test we get the user form.""" result = await hass.config_entries.flow.async_init( @@ -38,7 +50,14 @@ async def test_user_form(hass: HomeAssistant, doorbird_api: DoorBird) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} + doorbirdapi = _get_mock_doorbirdapi_return_values( + ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} + ) with ( + patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ), patch( "homeassistant.components.doorbird.async_setup", return_value=True ) as mock_setup, @@ -61,9 +80,6 @@ async def test_user_form(hass: HomeAssistant, doorbird_api: DoorBird) -> None: "password": "password", "username": "friend", } - assert result2["options"] == { - CONF_EVENTS: [DEFAULT_DOORBELL_EVENT, DEFAULT_MOTION_EVENT] - } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -156,30 +172,39 @@ async def test_form_zeroconf_non_ipv4_ignored(hass: HomeAssistant) -> None: assert result["reason"] == "not_ipv4_address" -async def test_form_zeroconf_correct_oui( - hass: HomeAssistant, doorbird_api: DoorBird -) -> None: +async def test_form_zeroconf_correct_oui(hass: HomeAssistant) -> None: """Test we can setup from zeroconf with the correct OUI source.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, - data=zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("192.168.1.5"), - ip_addresses=[ip_address("192.168.1.5")], - hostname="mock_hostname", - name="Doorstation - abc123._axis-video._tcp.local.", - port=None, - properties={"macaddress": "1CCAE3DOORBIRD"}, - type="mock_type", - ), + doorbirdapi = _get_mock_doorbirdapi_return_values( + ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} ) - await hass.async_block_till_done() + + with patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.5"), + ip_addresses=[ip_address("192.168.1.5")], + hostname="mock_hostname", + name="Doorstation - abc123._axis-video._tcp.local.", + port=None, + properties={"macaddress": "1CCAE3DOORBIRD"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} with ( + patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ), patch("homeassistant.components.logbook.async_setup", return_value=True), patch( "homeassistant.components.doorbird.async_setup", return_value=True @@ -209,19 +234,19 @@ async def test_form_zeroconf_correct_oui( @pytest.mark.parametrize( "doorbell_state_side_effect", [ - aiohttp.ClientResponseError(request_info=Mock(), history=Mock(), status=404), + requests.exceptions.HTTPError(response=Mock(status_code=404)), OSError, None, ], ) async def test_form_zeroconf_correct_oui_wrong_device( - hass: HomeAssistant, - doorbird_api: DoorBird, - doorbell_state_side_effect: Exception | None, + hass: HomeAssistant, doorbell_state_side_effect ) -> None: """Test we can setup from zeroconf with the correct OUI source but not a doorstation.""" - doorbirdapi = get_mock_doorbird_api(info={"WIFI_MAC_ADDR": "macaddr"}) - type(doorbirdapi).doorbell_state = AsyncMock(side_effect=doorbell_state_side_effect) + doorbirdapi = _get_mock_doorbirdapi_return_values( + ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} + ) + type(doorbirdapi).doorbell_state = MagicMock(side_effect=doorbell_state_side_effect) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", @@ -251,7 +276,7 @@ async def test_form_user_cannot_connect(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) - doorbirdapi = get_mock_doorbird_api(info_side_effect=OSError) + doorbirdapi = _get_mock_doorbirdapi_side_effects(ready=OSError) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", return_value=doorbirdapi, @@ -271,8 +296,8 @@ async def test_form_user_invalid_auth(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_error = mock_unauthorized_exception() - doorbirdapi = get_mock_doorbird_api(info_side_effect=mock_error) + mock_error = requests.exceptions.HTTPError(response=Mock(status_code=401)) + doorbirdapi = _get_mock_doorbirdapi_side_effects(ready=mock_error) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", return_value=doorbirdapi, @@ -286,100 +311,6 @@ async def test_form_user_invalid_auth(hass: HomeAssistant) -> None: assert result2["errors"] == {"base": "invalid_auth"} -async def test_form_user_doorbird_not_found( - doorbird_api: DoorBird, hass: HomeAssistant -) -> None: - """Test handling unable to connect to the device.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - mock_error = mock_not_found_exception() - doorbirdapi = get_mock_doorbird_api(info_side_effect=mock_error) - with patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=doorbirdapi, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - VALID_CONFIG, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - with ( - patch( - "homeassistant.components.doorbird.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.doorbird.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], VALID_CONFIG - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "1.2.3.4" - assert result3["data"] == { - "host": "1.2.3.4", - "name": "mydoorbird", - "password": "password", - "username": "friend", - } - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_user_doorbird_unknown_exception( - doorbird_api: DoorBird, hass: HomeAssistant -) -> None: - """Test handling unable an unknown exception.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - doorbirdapi = get_mock_doorbird_api(info_side_effect=ValueError) - with patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=doorbirdapi, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - VALID_CONFIG, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "unknown"} - - with ( - patch( - "homeassistant.components.doorbird.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.doorbird.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], VALID_CONFIG - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "1.2.3.4" - assert result3["data"] == { - "host": "1.2.3.4", - "name": "mydoorbird", - "password": "password", - "username": "friend", - } - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - async def test_options_flow(hass: HomeAssistant) -> None: """Test config flow options.""" @@ -405,67 +336,3 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert config_entry.options == {CONF_EVENTS: ["eventa", "eventc", "eventq"]} - - -async def test_reauth(hass: HomeAssistant) -> None: - """Test reauth flow.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: "1.1.1.1", - CONF_NAME: "DoorBird", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - config_entry.add_to_hass(hass) - config_entry.async_start_reauth(hass) - await hass.async_block_till_done() - flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) - assert len(flows) == 1 - flow = flows[0] - - mock_error = mock_unauthorized_exception() - doorbirdapi = get_mock_doorbird_api(info_side_effect=mock_error) - with patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=doorbirdapi, - ): - result2 = await hass.config_entries.flow.async_configure( - flow["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_auth"} - - doorbirdapi = get_mock_doorbird_api(info={"WIFI_MAC_ADDR": "macaddr"}) - with ( - patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=doorbirdapi, - ), - patch( - "homeassistant.components.doorbird.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.doorbird.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result2 = await hass.config_entries.flow.async_configure( - flow["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_setup.mock_calls) == 1 diff --git a/tests/components/doorbird/test_device.py b/tests/components/doorbird/test_device.py deleted file mode 100644 index cf3beae5e68..00000000000 --- a/tests/components/doorbird/test_device.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Test DoorBird device.""" - -from copy import deepcopy -from http import HTTPStatus - -from doorbirdpy import DoorBirdScheduleEntry -import pytest - -from homeassistant.components.doorbird.const import CONF_EVENTS -from homeassistant.core import HomeAssistant - -from .conftest import DoorbirdMockerType - - -async def test_no_configured_events( - hass: HomeAssistant, - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test a doorbird with no events configured.""" - await doorbird_mocker(options={CONF_EVENTS: []}) - assert not hass.states.async_all("event") - - -async def test_change_schedule_success( - doorbird_mocker: DoorbirdMockerType, - doorbird_schedule_wrong_param: list[DoorBirdScheduleEntry], - caplog: pytest.LogCaptureFixture, -) -> None: - """Test a doorbird when change_schedule fails.""" - schedule_copy = deepcopy(doorbird_schedule_wrong_param) - mock_doorbird = await doorbird_mocker(schedule=schedule_copy) - assert "Unable to update schedule entry mydoorbird" not in caplog.text - assert mock_doorbird.api.change_schedule.call_count == 1 - new_schedule: list[DoorBirdScheduleEntry] = ( - mock_doorbird.api.change_schedule.call_args[0] - ) - # Ensure the attempt to update the schedule to fix the incorrect - # param is made - assert new_schedule[-1].output[-1].param == "1" - - -async def test_change_schedule_fails( - doorbird_mocker: DoorbirdMockerType, - doorbird_schedule_wrong_param: list[DoorBirdScheduleEntry], - caplog: pytest.LogCaptureFixture, -) -> None: - """Test a doorbird when change_schedule fails.""" - schedule_copy = deepcopy(doorbird_schedule_wrong_param) - mock_doorbird = await doorbird_mocker( - schedule=schedule_copy, change_schedule=(False, HTTPStatus.UNAUTHORIZED) - ) - assert "Unable to update schedule entry mydoorbird" in caplog.text - assert mock_doorbird.api.change_schedule.call_count == 1 - new_schedule: list[DoorBirdScheduleEntry] = ( - mock_doorbird.api.change_schedule.call_args[0] - ) - # Ensure the attempt to update the schedule to fix the incorrect - # param is made - assert new_schedule[-1].output[-1].param == "1" diff --git a/tests/components/doorbird/test_event.py b/tests/components/doorbird/test_event.py deleted file mode 100644 index 11e0f3a306d..00000000000 --- a/tests/components/doorbird/test_event.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Test DoorBird events.""" - -from homeassistant.const import STATE_UNKNOWN -from homeassistant.core import HomeAssistant - -from . import mock_webhook_call -from .conftest import DoorbirdMockerType - -from tests.typing import ClientSessionGenerator - - -async def test_doorbell_ring_event( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test a doorbell ring event.""" - doorbird_entry = await doorbird_mocker() - relay_1_entity_id = "event.mydoorbird_doorbell" - assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN - client = await hass_client() - await mock_webhook_call(doorbird_entry.entry, client, "mydoorbird_doorbell") - assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN - - -async def test_motion_event( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test a doorbell motion event.""" - doorbird_entry = await doorbird_mocker() - relay_1_entity_id = "event.mydoorbird_motion" - assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN - client = await hass_client() - await mock_webhook_call(doorbird_entry.entry, client, "mydoorbird_motion") - assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN diff --git a/tests/components/doorbird/test_init.py b/tests/components/doorbird/test_init.py deleted file mode 100644 index 31266c4acf0..00000000000 --- a/tests/components/doorbird/test_init.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Test DoorBird init.""" - -import pytest - -from homeassistant.components.doorbird.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import mock_not_found_exception, mock_unauthorized_exception -from .conftest import DoorbirdMockerType - - -async def test_basic_setup( - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test basic setup.""" - doorbird_entry = await doorbird_mocker() - entry = doorbird_entry.entry - assert entry.state is ConfigEntryState.LOADED - - -async def test_auth_fails( - hass: HomeAssistant, - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test basic setup with an auth failure.""" - doorbird_entry = await doorbird_mocker( - info_side_effect=mock_unauthorized_exception() - ) - entry = doorbird_entry.entry - assert entry.state is ConfigEntryState.SETUP_ERROR - flows = hass.config_entries.flow.async_progress(DOMAIN) - assert len(flows) == 1 - assert flows[0]["step_id"] == "reauth_confirm" - - -@pytest.mark.parametrize( - "side_effect", - [OSError, mock_not_found_exception()], -) -async def test_http_info_request_fails( - doorbird_mocker: DoorbirdMockerType, side_effect: Exception -) -> None: - """Test basic setup with an http failure.""" - doorbird_entry = await doorbird_mocker(info_side_effect=side_effect) - assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_http_favorites_request_fails( - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test basic setup with an http failure.""" - doorbird_entry = await doorbird_mocker( - favorites_side_effect=mock_not_found_exception() - ) - assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_http_schedule_api_missing( - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test missing the schedule API is non-fatal as not all models support it.""" - doorbird_entry = await doorbird_mocker( - schedule_side_effect=mock_not_found_exception() - ) - assert doorbird_entry.entry.state is ConfigEntryState.LOADED - - -async def test_events_changed( - hass: HomeAssistant, - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test basic setup.""" - doorbird_entry = await doorbird_mocker() - entry = doorbird_entry.entry - assert entry.state is ConfigEntryState.LOADED - api = doorbird_entry.api - api.favorites.reset_mock() - api.change_favorite.reset_mock() - api.schedule.reset_mock() - - hass.config_entries.async_update_entry(entry, options={"events": ["xyz"]}) - await hass.async_block_till_done() - assert len(api.favorites.mock_calls) == 2 - assert len(api.schedule.mock_calls) == 1 - - assert len(api.change_favorite.mock_calls) == 1 - favorite_type, title, url = api.change_favorite.mock_calls[0][1] - assert favorite_type == "http" - assert title == "Home Assistant (mydoorbird_xyz)" - assert url == ( - f"http://10.10.10.10:8123/api/doorbird/mydoorbird_xyz?token={entry.entry_id}" - ) diff --git a/tests/components/doorbird/test_repairs.py b/tests/components/doorbird/test_repairs.py deleted file mode 100644 index 34e6de7516e..00000000000 --- a/tests/components/doorbird/test_repairs.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Test repairs for doorbird.""" - -from __future__ import annotations - -from homeassistant.components.doorbird.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from . import mock_not_found_exception -from .conftest import DoorbirdMockerType - -from tests.components.repairs import ( - async_process_repairs_platforms, - process_repair_fix_flow, - start_repair_fix_flow, -) -from tests.typing import ClientSessionGenerator - - -async def test_change_schedule_fails( - hass: HomeAssistant, - doorbird_mocker: DoorbirdMockerType, - hass_client: ClientSessionGenerator, -) -> None: - """Test a doorbird when change_schedule fails.""" - assert await async_setup_component(hass, "repairs", {}) - doorbird_entry = await doorbird_mocker( - favorites_side_effect=mock_not_found_exception() - ) - assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY - issue_reg = ir.async_get(hass) - assert len(issue_reg.issues) == 1 - issue = list(issue_reg.issues.values())[0] - issue_id = issue.issue_id - assert issue.domain == DOMAIN - - await async_process_repairs_platforms(hass) - client = await hass_client() - - data = await start_repair_fix_flow(client, DOMAIN, issue_id) - - flow_id = data["flow_id"] - placeholders = data["description_placeholders"] - assert "404" in placeholders["error"] - assert data["step_id"] == "confirm" - - data = await process_repair_fix_flow(client, flow_id) - - assert data["type"] == "create_entry" diff --git a/tests/components/doorbird/test_view.py b/tests/components/doorbird/test_view.py deleted file mode 100644 index 9d2b53714b6..00000000000 --- a/tests/components/doorbird/test_view.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Test DoorBird view.""" - -from http import HTTPStatus - -from homeassistant.components.doorbird.const import API_URL - -from .conftest import DoorbirdMockerType - -from tests.typing import ClientSessionGenerator - - -async def test_non_webhook_with_wrong_token( - hass_client: ClientSessionGenerator, - doorbird_mocker: DoorbirdMockerType, -) -> None: - """Test calling the webhook with the wrong token.""" - await doorbird_mocker() - client = await hass_client() - - response = await client.get(f"{API_URL}/doorbell?token=wrong") - assert response.status == HTTPStatus.UNAUTHORIZED diff --git a/tests/components/dormakaba_dkey/test_config_flow.py b/tests/components/dormakaba_dkey/test_config_flow.py index 8d8140d609a..499e5844949 100644 --- a/tests/components/dormakaba_dkey/test_config_flow.py +++ b/tests/components/dormakaba_dkey/test_config_flow.py @@ -310,7 +310,11 @@ async def test_reauth(hass: HomeAssistant) -> None: data={"address": DKEY_DISCOVERY_INFO.address}, ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/downloader/test_config_flow.py b/tests/components/downloader/test_config_flow.py index 6bd740afab8..132b83dffdf 100644 --- a/tests/components/downloader/test_config_flow.py +++ b/tests/components/downloader/test_config_flow.py @@ -4,8 +4,9 @@ from unittest.mock import patch import pytest +from homeassistant import config_entries from homeassistant.components.downloader.const import CONF_DOWNLOAD_DIR, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -53,7 +54,7 @@ async def test_user_form(hass: HomeAssistant) -> None: assert result["data"] == {"download_dir": "download_dir"} -@pytest.mark.parametrize("source", [SOURCE_USER]) +@pytest.mark.parametrize("source", [SOURCE_USER, SOURCE_IMPORT]) async def test_single_instance_allowed( hass: HomeAssistant, source: str, @@ -68,3 +69,40 @@ async def test_single_instance_allowed( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" + + +async def test_import_flow_success(hass: HomeAssistant) -> None: + """Test import flow.""" + with ( + patch( + "homeassistant.components.downloader.async_setup_entry", return_value=True + ), + patch( + "os.path.isdir", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data=CONFIG, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Downloader" + assert result["data"] == CONFIG + + +async def test_import_flow_directory_not_found(hass: HomeAssistant) -> None: + """Test import flow.""" + with patch("os.path.isdir", return_value=False): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={ + CONF_DOWNLOAD_DIR: "download_dir", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "directory_does_not_exist" diff --git a/tests/components/downloader/test_init.py b/tests/components/downloader/test_init.py index 70dfd227019..5832c0402b4 100644 --- a/tests/components/downloader/test_init.py +++ b/tests/components/downloader/test_init.py @@ -8,7 +8,9 @@ from homeassistant.components.downloader import ( SERVICE_DOWNLOAD_FILE, ) from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -27,3 +29,83 @@ async def test_initialization(hass: HomeAssistant) -> None: assert hass.services.has_service(DOMAIN, SERVICE_DOWNLOAD_FILE) assert config_entry.state is ConfigEntryState.LOADED + + +async def test_import(hass: HomeAssistant, issue_registry: ir.IssueRegistry) -> None: + """Test the import of the downloader component.""" + with patch("os.path.isdir", return_value=True): + assert await async_setup_component( + hass, + DOMAIN, + { + DOMAIN: { + CONF_DOWNLOAD_DIR: "/test_dir", + }, + }, + ) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + assert config_entry.data == {CONF_DOWNLOAD_DIR: "/test_dir"} + assert config_entry.state is ConfigEntryState.LOADED + assert hass.services.has_service(DOMAIN, SERVICE_DOWNLOAD_FILE) + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue( + issue_id="deprecated_yaml_downloader", domain=HOMEASSISTANT_DOMAIN + ) + assert issue + + +async def test_import_directory_missing( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test the import of the downloader component.""" + with patch("os.path.isdir", return_value=False): + assert await async_setup_component( + hass, + DOMAIN, + { + DOMAIN: { + CONF_DOWNLOAD_DIR: "/test_dir", + }, + }, + ) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 0 + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue( + issue_id="deprecated_yaml_downloader", domain=DOMAIN + ) + assert issue + + +async def test_import_already_exists( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test the import of the downloader component.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_DOWNLOAD_DIR: "/test_dir", + }, + ) + config_entry.add_to_hass(hass) + with patch("os.path.isdir", return_value=True): + assert await async_setup_component( + hass, + DOMAIN, + { + DOMAIN: { + CONF_DOWNLOAD_DIR: "/test_dir", + }, + }, + ) + await hass.async_block_till_done() + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue( + issue_id="deprecated_yaml_downloader", domain=HOMEASSISTANT_DOMAIN + ) + assert issue diff --git a/tests/components/dremel_3d_printer/conftest.py b/tests/components/dremel_3d_printer/conftest.py index cc70537db3d..6490b844dc0 100644 --- a/tests/components/dremel_3d_printer/conftest.py +++ b/tests/components/dremel_3d_printer/conftest.py @@ -34,7 +34,7 @@ def connection() -> None: """Mock Dremel 3D Printer connection.""" with requests_mock.Mocker() as mock: mock.post( - f"http://{HOST}/command", + f"http://{HOST}:80/command", response_list=[ {"text": load_fixture("dremel_3d_printer/command_1.json")}, {"text": load_fixture("dremel_3d_printer/command_2.json")}, diff --git a/tests/components/drop_connect/common.py b/tests/components/drop_connect/common.py index 9eb76f57dad..bdba79bbd95 100644 --- a/tests/components/drop_connect/common.py +++ b/tests/components/drop_connect/common.py @@ -34,10 +34,6 @@ TEST_DATA_SALT_TOPIC = "drop_connect/DROP-1_C0FFEE/8" TEST_DATA_SALT = '{"salt":1}' TEST_DATA_SALT_RESET = '{"salt":0}' -TEST_DATA_ALERT_TOPIC = "drop_connect/DROP-1_C0FFEE/81" -TEST_DATA_ALERT = '{"battery":100,"sens":1,"pwrOff":0,"temp":68.2}' -TEST_DATA_ALERT_RESET = '{"battery":0,"sens":0,"pwrOff":1,"temp":0}' - TEST_DATA_LEAK_TOPIC = "drop_connect/DROP-1_C0FFEE/20" TEST_DATA_LEAK = '{"battery":100,"leak":1,"temp":68.2}' TEST_DATA_LEAK_RESET = '{"battery":0,"leak":0,"temp":0}' @@ -113,25 +109,6 @@ def config_entry_salt() -> ConfigEntry: ) -def config_entry_alert() -> ConfigEntry: - """Config entry version 1 fixture.""" - return MockConfigEntry( - domain=DOMAIN, - unique_id="DROP-1_C0FFEE_81", - data={ - CONF_COMMAND_TOPIC: "drop_connect/DROP-1_C0FFEE/81/cmd", - CONF_DATA_TOPIC: "drop_connect/DROP-1_C0FFEE/81/#", - CONF_DEVICE_DESC: "Alert", - CONF_DEVICE_ID: 81, - CONF_DEVICE_NAME: "Alert", - CONF_DEVICE_TYPE: "alrt", - CONF_HUB_ID: "DROP-1_C0FFEE", - CONF_DEVICE_OWNER_ID: "DROP-1_C0FFEE_255", - }, - version=1, - ) - - def config_entry_leak() -> ConfigEntry: """Config entry version 1 fixture.""" return MockConfigEntry( diff --git a/tests/components/drop_connect/snapshots/test_binary_sensor.ambr b/tests/components/drop_connect/snapshots/test_binary_sensor.ambr index 9b0cc201573..c42cdb8cde1 100644 --- a/tests/components/drop_connect/snapshots/test_binary_sensor.ambr +++ b/tests/components/drop_connect/snapshots/test_binary_sensor.ambr @@ -1,98 +1,4 @@ # serializer version: 1 -# name: test_sensors[alert][binary_sensor.alert_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.alert_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'drop_connect', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'DROP-1_C0FFEE_81_power', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[alert][binary_sensor.alert_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Alert Power', - }), - 'context': , - 'entity_id': 'binary_sensor.alert_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensors[alert][binary_sensor.alert_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.alert_sensor', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sensor', - 'platform': 'drop_connect', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'alert_sensor', - 'unique_id': 'DROP-1_C0FFEE_81_alert_sensor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[alert][binary_sensor.alert_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Alert Sensor', - }), - 'context': , - 'entity_id': 'binary_sensor.alert_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_sensors[hub][binary_sensor.hub_drop_1_c0ffee_leak_detected-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/drop_connect/snapshots/test_sensor.ambr b/tests/components/drop_connect/snapshots/test_sensor.ambr index a5c91dbe3e4..54e3259e455 100644 --- a/tests/components/drop_connect/snapshots/test_sensor.ambr +++ b/tests/components/drop_connect/snapshots/test_sensor.ambr @@ -1,68 +1,4 @@ # serializer version: 1 -# name: test_sensors[alert][sensor.alert_battery-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Alert Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.alert_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[alert][sensor.alert_battery-reset] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Alert Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.alert_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[alert][sensor.alert_temperature-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Alert Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.alert_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.1111111111111', - }) -# --- -# name: test_sensors[alert][sensor.alert_temperature-reset] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Alert Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.alert_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '-17.7777777777778', - }) -# --- # name: test_sensors[filter][sensor.filter_battery-data] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/drop_connect/test_binary_sensor.py b/tests/components/drop_connect/test_binary_sensor.py index ab89e05d809..895921291ef 100644 --- a/tests/components/drop_connect/test_binary_sensor.py +++ b/tests/components/drop_connect/test_binary_sensor.py @@ -10,9 +10,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from .common import ( - TEST_DATA_ALERT, - TEST_DATA_ALERT_RESET, - TEST_DATA_ALERT_TOPIC, TEST_DATA_HUB, TEST_DATA_HUB_RESET, TEST_DATA_HUB_TOPIC, @@ -31,7 +28,6 @@ from .common import ( TEST_DATA_SOFTENER, TEST_DATA_SOFTENER_RESET, TEST_DATA_SOFTENER_TOPIC, - config_entry_alert, config_entry_hub, config_entry_leak, config_entry_protection_valve, @@ -48,12 +44,6 @@ from tests.typing import MqttMockHAClient ("config_entry", "topic", "reset", "data"), [ (config_entry_hub(), TEST_DATA_HUB_TOPIC, TEST_DATA_HUB_RESET, TEST_DATA_HUB), - ( - config_entry_alert(), - TEST_DATA_ALERT_TOPIC, - TEST_DATA_ALERT_RESET, - TEST_DATA_ALERT, - ), ( config_entry_leak(), TEST_DATA_LEAK_TOPIC, @@ -87,7 +77,6 @@ from tests.typing import MqttMockHAClient ], ids=[ "hub", - "alert", "leak", "softener", "protection_valve", diff --git a/tests/components/drop_connect/test_sensor.py b/tests/components/drop_connect/test_sensor.py index c33f0aefe37..4873d1edbd1 100644 --- a/tests/components/drop_connect/test_sensor.py +++ b/tests/components/drop_connect/test_sensor.py @@ -11,9 +11,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from .common import ( - TEST_DATA_ALERT, - TEST_DATA_ALERT_RESET, - TEST_DATA_ALERT_TOPIC, TEST_DATA_FILTER, TEST_DATA_FILTER_RESET, TEST_DATA_FILTER_TOPIC, @@ -35,7 +32,6 @@ from .common import ( TEST_DATA_SOFTENER, TEST_DATA_SOFTENER_RESET, TEST_DATA_SOFTENER_TOPIC, - config_entry_alert, config_entry_filter, config_entry_hub, config_entry_leak, @@ -51,7 +47,7 @@ from tests.typing import MqttMockHAClient @pytest.fixture(autouse=True) -def only_sensor_platform() -> Generator[None]: +def only_sensor_platform() -> Generator[[], None]: """Only setup the DROP sensor platform.""" with patch("homeassistant.components.drop_connect.PLATFORMS", [Platform.SENSOR]): yield @@ -61,12 +57,6 @@ def only_sensor_platform() -> Generator[None]: ("config_entry", "topic", "reset", "data"), [ (config_entry_hub(), TEST_DATA_HUB_TOPIC, TEST_DATA_HUB_RESET, TEST_DATA_HUB), - ( - config_entry_alert(), - TEST_DATA_ALERT_TOPIC, - TEST_DATA_ALERT_RESET, - TEST_DATA_ALERT, - ), ( config_entry_leak(), TEST_DATA_LEAK_TOPIC, @@ -106,7 +96,6 @@ def only_sensor_platform() -> Generator[None]: ], ids=[ "hub", - "alert", "leak", "softener", "filter", diff --git a/tests/components/dsmr/conftest.py b/tests/components/dsmr/conftest.py index 2301b9dfc80..2257b8414a6 100644 --- a/tests/components/dsmr/conftest.py +++ b/tests/components/dsmr/conftest.py @@ -1,7 +1,6 @@ """Common test tools.""" import asyncio -from collections.abc import Generator from unittest.mock import MagicMock, patch from dsmr_parser.clients.protocol import DSMRProtocol @@ -16,6 +15,7 @@ from dsmr_parser.obis_references import ( ) from dsmr_parser.objects import CosemObject import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/dsmr/snapshots/test_diagnostics.ambr b/tests/components/dsmr/snapshots/test_diagnostics.ambr deleted file mode 100644 index ec2dc274efa..00000000000 --- a/tests/components/dsmr/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,29 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'data': dict({ - 'CURRENT_ELECTRICITY_USAGE': dict({ - 'unit': 'W', - 'value': 0.0, - }), - 'ELECTRICITY_ACTIVE_TARIFF': dict({ - 'unit': '', - 'value': '0001', - }), - 'GAS_METER_READING': dict({ - 'datetime': '2019-03-03T19:43:33+00:00', - 'unit': 'm³', - 'value': 745.695, - }), - }), - 'entry': dict({ - 'data': dict({ - 'dsmr_version': '2.2', - 'port': '/dev/ttyUSB0', - 'serial_id': '1234', - 'serial_id_gas': '5678', - }), - 'unique_id': '/dev/ttyUSB0', - }), - }) -# --- diff --git a/tests/components/dsmr/test_config_flow.py b/tests/components/dsmr/test_config_flow.py index 91adf38eacf..3b4dc533993 100644 --- a/tests/components/dsmr/test_config_flow.py +++ b/tests/components/dsmr/test_config_flow.py @@ -10,8 +10,7 @@ import serial import serial.tools.list_ports from homeassistant import config_entries -from homeassistant.components.dsmr import config_flow -from homeassistant.components.dsmr.const import DOMAIN +from homeassistant.components.dsmr import DOMAIN, config_flow from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType diff --git a/tests/components/dsmr/test_diagnostics.py b/tests/components/dsmr/test_diagnostics.py deleted file mode 100644 index 8fc996f6e34..00000000000 --- a/tests/components/dsmr/test_diagnostics.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Test DSMR diagnostics.""" - -import datetime -from decimal import Decimal -from unittest.mock import MagicMock - -from dsmr_parser.obis_references import ( - CURRENT_ELECTRICITY_USAGE, - ELECTRICITY_ACTIVE_TARIFF, - GAS_METER_READING, -) -from dsmr_parser.objects import CosemObject, MBusObject, Telegram -from syrupy.assertion import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], - snapshot: SnapshotAssertion, -) -> None: - """Test diagnostics.""" - (connection_factory, transport, protocol) = dsmr_connection_fixture - - entry_data = { - "port": "/dev/ttyUSB0", - "dsmr_version": "2.2", - "serial_id": "1234", - "serial_id_gas": "5678", - } - entry_options = { - "time_between_update": 0, - } - - telegram = Telegram() - telegram.add( - CURRENT_ELECTRICITY_USAGE, - CosemObject( - (0, 0), - [{"value": Decimal("0.0"), "unit": "W"}], - ), - "CURRENT_ELECTRICITY_USAGE", - ) - telegram.add( - ELECTRICITY_ACTIVE_TARIFF, - CosemObject((0, 0), [{"value": "0001", "unit": ""}]), - "ELECTRICITY_ACTIVE_TARIFF", - ) - telegram.add( - GAS_METER_READING, - MBusObject( - (0, 0), - [ - {"value": datetime.datetime.fromtimestamp(1551642213)}, - {"value": Decimal(745.695), "unit": "m³"}, - ], - ), - "GAS_METER_READING", - ) - - mock_entry = MockConfigEntry( - domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options - ) - - mock_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - telegram_callback = connection_factory.call_args_list[0][0][2] - - # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser - telegram_callback(telegram) - - result = await get_diagnostics_for_config_entry(hass, hass_client, mock_entry) - assert result == snapshot diff --git a/tests/components/dsmr/test_mbus_migration.py b/tests/components/dsmr/test_mbus_migration.py index 7c7d182aa97..18f5e850ecd 100644 --- a/tests/components/dsmr/test_mbus_migration.py +++ b/tests/components/dsmr/test_mbus_migration.py @@ -5,11 +5,11 @@ from decimal import Decimal from unittest.mock import MagicMock from dsmr_parser.obis_references import ( - MBUS_DEVICE_TYPE, - MBUS_EQUIPMENT_IDENTIFIER, - MBUS_METER_READING, + BELGIUM_MBUS1_DEVICE_TYPE, + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS1_METER_READING2, ) -from dsmr_parser.objects import CosemObject, MBusObject, Telegram +from dsmr_parser.objects import CosemObject, MBusObject from homeassistant.components.dsmr.const import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN @@ -65,31 +65,22 @@ async def test_migrate_gas_to_mbus( assert entity.unique_id == old_unique_id await hass.async_block_till_done() - telegram = Telegram() - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 1), [{"value": "003", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 1), + telegram = { + BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 1), + BELGIUM_MBUS1_METER_READING2: MBusObject( + BELGIUM_MBUS1_METER_READING2, [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) + } assert await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() @@ -119,201 +110,6 @@ async def test_migrate_gas_to_mbus( ) -async def test_migrate_hourly_gas_to_mbus( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, - dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], -) -> None: - """Test migration of unique_id.""" - (connection_factory, transport, protocol) = dsmr_connection_fixture - - mock_entry = MockConfigEntry( - domain=DOMAIN, - unique_id="/dev/ttyUSB0", - data={ - "port": "/dev/ttyUSB0", - "dsmr_version": "5", - "serial_id": "1234", - "serial_id_gas": "4730303738353635363037343639323231", - }, - options={ - "time_between_update": 0, - }, - ) - - mock_entry.add_to_hass(hass) - - old_unique_id = "4730303738353635363037343639323231_hourly_gas_meter_reading" - - device = device_registry.async_get_or_create( - config_entry_id=mock_entry.entry_id, - identifiers={(DOMAIN, mock_entry.entry_id)}, - name="Gas Meter", - ) - await hass.async_block_till_done() - - entity: er.RegistryEntry = entity_registry.async_get_or_create( - suggested_object_id="gas_meter_reading", - disabled_by=None, - domain=SENSOR_DOMAIN, - platform=DOMAIN, - device_id=device.id, - unique_id=old_unique_id, - config_entry=mock_entry, - ) - assert entity.unique_id == old_unique_id - await hass.async_block_till_done() - - telegram = Telegram() - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 1), [{"value": "003", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 1), - [{"value": "4730303738353635363037343639323231", "unit": ""}], - ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 1), - [ - {"value": datetime.datetime.fromtimestamp(1722749707)}, - {"value": Decimal(778.963), "unit": "m3"}, - ], - ), - "MBUS_METER_READING", - ) - - assert await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - telegram_callback = connection_factory.call_args_list[0][0][2] - - # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser - telegram_callback(telegram) - - # after receiving telegram entities need to have the chance to be created - await hass.async_block_till_done() - - dev_entities = er.async_entries_for_device( - entity_registry, device.id, include_disabled_entities=True - ) - assert not dev_entities - - assert ( - entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) - is None - ) - assert ( - entity_registry.async_get_entity_id( - SENSOR_DOMAIN, DOMAIN, "4730303738353635363037343639323231" - ) - == "sensor.gas_meter_reading" - ) - - -async def test_migrate_gas_with_devid_to_mbus( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, - dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], -) -> None: - """Test migration of unique_id.""" - (connection_factory, transport, protocol) = dsmr_connection_fixture - - mock_entry = MockConfigEntry( - domain=DOMAIN, - unique_id="/dev/ttyUSB0", - data={ - "port": "/dev/ttyUSB0", - "dsmr_version": "5B", - "serial_id": "1234", - "serial_id_gas": "37464C4F32313139303333373331", - }, - options={ - "time_between_update": 0, - }, - ) - - mock_entry.add_to_hass(hass) - - old_unique_id = "37464C4F32313139303333373331_belgium_5min_gas_meter_reading" - - device = device_registry.async_get_or_create( - config_entry_id=mock_entry.entry_id, - identifiers={(DOMAIN, "37464C4F32313139303333373331")}, - name="Gas Meter", - ) - await hass.async_block_till_done() - - entity: er.RegistryEntry = entity_registry.async_get_or_create( - suggested_object_id="gas_meter_reading", - disabled_by=None, - domain=SENSOR_DOMAIN, - platform=DOMAIN, - device_id=device.id, - unique_id=old_unique_id, - config_entry=mock_entry, - ) - assert entity.unique_id == old_unique_id - await hass.async_block_till_done() - - telegram = Telegram() - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 1), [{"value": "003", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 1), - [{"value": "37464C4F32313139303333373331", "unit": ""}], - ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 1), - [ - {"value": datetime.datetime.fromtimestamp(1551642213)}, - {"value": Decimal(745.695), "unit": "m3"}, - ], - ), - "MBUS_METER_READING", - ) - - assert await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - telegram_callback = connection_factory.call_args_list[0][0][2] - - # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser - telegram_callback(telegram) - - # after receiving telegram entities need to have the chance to be created - await hass.async_block_till_done() - - assert ( - entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) - is None - ) - assert ( - entity_registry.async_get_entity_id( - SENSOR_DOMAIN, DOMAIN, "37464C4F32313139303333373331" - ) - == "sensor.gas_meter_reading" - ) - - async def test_migrate_gas_to_mbus_exists( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -377,31 +173,22 @@ async def test_migrate_gas_to_mbus_exists( ) await hass.async_block_till_done() - telegram = Telegram() - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 0), [{"value": "003", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 1), + telegram = { + BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 1), + BELGIUM_MBUS1_METER_READING2: MBusObject( + BELGIUM_MBUS1_METER_READING2, [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) + } assert await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/dsmr/test_sensor.py b/tests/components/dsmr/test_sensor.py index 4a2951f4ed8..435594d4eef 100644 --- a/tests/components/dsmr/test_sensor.py +++ b/tests/components/dsmr/test_sensor.py @@ -11,24 +11,35 @@ from decimal import Decimal from itertools import chain, repeat from unittest.mock import DEFAULT, MagicMock -from dsmr_parser import obis_references from dsmr_parser.obis_references import ( BELGIUM_CURRENT_AVERAGE_DEMAND, BELGIUM_MAXIMUM_DEMAND_MONTH, + BELGIUM_MBUS1_DEVICE_TYPE, + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS1_METER_READING1, + BELGIUM_MBUS1_METER_READING2, + BELGIUM_MBUS2_DEVICE_TYPE, + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS2_METER_READING1, + BELGIUM_MBUS2_METER_READING2, + BELGIUM_MBUS3_DEVICE_TYPE, + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS3_METER_READING1, + BELGIUM_MBUS3_METER_READING2, + BELGIUM_MBUS4_DEVICE_TYPE, + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS4_METER_READING1, + BELGIUM_MBUS4_METER_READING2, CURRENT_ELECTRICITY_USAGE, ELECTRICITY_ACTIVE_TARIFF, ELECTRICITY_EXPORTED_TOTAL, ELECTRICITY_IMPORTED_TOTAL, GAS_METER_READING, HOURLY_GAS_METER_READING, - MBUS_DEVICE_TYPE, - MBUS_EQUIPMENT_IDENTIFIER, - MBUS_METER_READING, ) -from dsmr_parser.objects import CosemObject, MBusObject, Telegram +from dsmr_parser.objects import CosemObject, MBusObject import pytest -from homeassistant.components.dsmr.sensor import SENSORS, SENSORS_MBUS_DEVICE_TYPE from homeassistant.components.sensor import ( ATTR_OPTIONS, ATTR_STATE_CLASS, @@ -69,31 +80,22 @@ async def test_default_setup( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - CURRENT_ELECTRICITY_USAGE, - CosemObject( - (0, 0), + telegram = { + CURRENT_ELECTRICITY_USAGE: CosemObject( + CURRENT_ELECTRICITY_USAGE, [{"value": Decimal("0.0"), "unit": UnitOfPower.WATT}], ), - "CURRENT_ELECTRICITY_USAGE", - ) - telegram.add( - ELECTRICITY_ACTIVE_TARIFF, - CosemObject((0, 0), [{"value": "0001", "unit": ""}]), - "ELECTRICITY_ACTIVE_TARIFF", - ) - telegram.add( - GAS_METER_READING, - MBusObject( - (0, 0), + ELECTRICITY_ACTIVE_TARIFF: CosemObject( + ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] + ), + GAS_METER_READING: MBusObject( + GAS_METER_READING, [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": UnitOfVolume.CUBIC_METERS}, ], ), - "GAS_METER_READING", - ) + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -132,31 +134,22 @@ async def test_default_setup( ) assert power_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "W" - telegram = Telegram() - telegram.add( - CURRENT_ELECTRICITY_USAGE, - CosemObject( - (0, 0), + telegram = { + CURRENT_ELECTRICITY_USAGE: CosemObject( + CURRENT_ELECTRICITY_USAGE, [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - "CURRENT_ELECTRICITY_USAGE", - ) - telegram.add( - ELECTRICITY_ACTIVE_TARIFF, - CosemObject((0, 0), [{"value": "0001", "unit": ""}]), - "ELECTRICITY_ACTIVE_TARIFF", - ) - telegram.add( - GAS_METER_READING, - MBusObject( - (0, 0), + ELECTRICITY_ACTIVE_TARIFF: CosemObject( + ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] + ), + GAS_METER_READING: MBusObject( + GAS_METER_READING, [ {"value": datetime.datetime.fromtimestamp(1551642214)}, {"value": Decimal(745.701), "unit": UnitOfVolume.CUBIC_METERS}, ], ), - "GAS_METER_READING", - ) + } # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser telegram_callback(telegram) @@ -216,20 +209,15 @@ async def test_setup_only_energy( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - CURRENT_ELECTRICITY_USAGE, - CosemObject( - (0, 0), + telegram = { + CURRENT_ELECTRICITY_USAGE: CosemObject( + CURRENT_ELECTRICITY_USAGE, [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - "CURRENT_ELECTRICITY_USAGE", - ) - telegram.add( - ELECTRICITY_ACTIVE_TARIFF, - CosemObject((0, 0), [{"value": "0001", "unit": ""}]), - "ELECTRICITY_ACTIVE_TARIFF", - ) + ELECTRICITY_ACTIVE_TARIFF: CosemObject( + ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] + ), + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -272,23 +260,18 @@ async def test_v4_meter( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - HOURLY_GAS_METER_READING, - MBusObject( - (0, 0), + telegram = { + HOURLY_GAS_METER_READING: MBusObject( + HOURLY_GAS_METER_READING, [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - "HOURLY_GAS_METER_READING", - ) - telegram.add( - ELECTRICITY_ACTIVE_TARIFF, - CosemObject((0, 0), [{"value": "0001", "unit": ""}]), - "ELECTRICITY_ACTIVE_TARIFF", - ) + ELECTRICITY_ACTIVE_TARIFF: CosemObject( + ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] + ), + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -360,23 +343,18 @@ async def test_v5_meter( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - HOURLY_GAS_METER_READING, - MBusObject( - (0, 0), + telegram = { + HOURLY_GAS_METER_READING: MBusObject( + HOURLY_GAS_METER_READING, [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": value, "unit": "m3"}, ], ), - "HOURLY_GAS_METER_READING", - ) - telegram.add( - ELECTRICITY_ACTIVE_TARIFF, - CosemObject((0, 0), [{"value": "0001", "unit": ""}]), - "ELECTRICITY_ACTIVE_TARIFF", - ) + ELECTRICITY_ACTIVE_TARIFF: CosemObject( + ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] + ), + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -433,34 +411,23 @@ async def test_luxembourg_meter( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - HOURLY_GAS_METER_READING, - MBusObject( - (0, 0), + telegram = { + HOURLY_GAS_METER_READING: MBusObject( + HOURLY_GAS_METER_READING, [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - "HOURLY_GAS_METER_READING", - ) - telegram.add( - ELECTRICITY_IMPORTED_TOTAL, - CosemObject( - (0, 0), + ELECTRICITY_IMPORTED_TOTAL: CosemObject( + ELECTRICITY_IMPORTED_TOTAL, [{"value": Decimal(123.456), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - "ELECTRICITY_IMPORTED_TOTAL", - ) - telegram.add( - ELECTRICITY_EXPORTED_TOTAL, - CosemObject( - (0, 0), + ELECTRICITY_EXPORTED_TOTAL: CosemObject( + ELECTRICITY_EXPORTED_TOTAL, [{"value": Decimal(654.321), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - "ELECTRICITY_EXPORTED_TOTAL", - ) + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -528,127 +495,78 @@ async def test_belgian_meter( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - BELGIUM_CURRENT_AVERAGE_DEMAND, - CosemObject( - (0, 0), + telegram = { + BELGIUM_CURRENT_AVERAGE_DEMAND: CosemObject( + BELGIUM_CURRENT_AVERAGE_DEMAND, [{"value": Decimal(1.75), "unit": "kW"}], ), - "BELGIUM_CURRENT_AVERAGE_DEMAND", - ) - telegram.add( - BELGIUM_MAXIMUM_DEMAND_MONTH, - MBusObject( - (0, 0), + BELGIUM_MAXIMUM_DEMAND_MONTH: MBusObject( + BELGIUM_MAXIMUM_DEMAND_MONTH, [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(4.11), "unit": "kW"}, ], ), - "BELGIUM_MAXIMUM_DEMAND_MONTH", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 1), [{"value": "003", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 1), + BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 1), + BELGIUM_MBUS1_METER_READING2: MBusObject( + BELGIUM_MBUS1_METER_READING2, [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 2), [{"value": "007", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 2), + BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 2), + BELGIUM_MBUS2_METER_READING1: MBusObject( + BELGIUM_MBUS2_METER_READING1, [ {"value": datetime.datetime.fromtimestamp(1551642214)}, {"value": Decimal(678.695), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 3), [{"value": "003", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 3), + BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 3), + BELGIUM_MBUS3_METER_READING2: MBusObject( + BELGIUM_MBUS3_METER_READING2, [ {"value": datetime.datetime.fromtimestamp(1551642215)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 4), [{"value": "007", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 4), + BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373334", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 4), + BELGIUM_MBUS4_METER_READING1: MBusObject( + BELGIUM_MBUS4_METER_READING1, [ {"value": datetime.datetime.fromtimestamp(1551642216)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) - telegram.add( - ELECTRICITY_ACTIVE_TARIFF, - CosemObject((0, 0), [{"value": "0001", "unit": ""}]), - "ELECTRICITY_ACTIVE_TARIFF", - ) + ELECTRICITY_ACTIVE_TARIFF: CosemObject( + ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] + ), + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -762,103 +680,64 @@ async def test_belgian_meter_alt( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 1), [{"value": "007", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 1), + telegram = { + BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 1), + BELGIUM_MBUS1_METER_READING1: MBusObject( + BELGIUM_MBUS1_METER_READING1, [ {"value": datetime.datetime.fromtimestamp(1551642215)}, {"value": Decimal(123.456), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 2), [{"value": "003", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 2), + BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 2), + BELGIUM_MBUS2_METER_READING2: MBusObject( + BELGIUM_MBUS2_METER_READING2, [ {"value": datetime.datetime.fromtimestamp(1551642216)}, {"value": Decimal(678.901), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 3), [{"value": "007", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 3), + BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 3), + BELGIUM_MBUS3_METER_READING1: MBusObject( + BELGIUM_MBUS3_METER_READING1, [ {"value": datetime.datetime.fromtimestamp(1551642217)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 4), [{"value": "003", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 4), + BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373334", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 4), + BELGIUM_MBUS4_METER_READING2: MBusObject( + BELGIUM_MBUS4_METER_READING2, [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -950,78 +829,49 @@ async def test_belgian_meter_mbus( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - ELECTRICITY_ACTIVE_TARIFF, - CosemObject((0, 0), [{"value": "0003", "unit": ""}]), - "ELECTRICITY_ACTIVE_TARIFF", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 1), [{"value": "006", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 1), + telegram = { + ELECTRICITY_ACTIVE_TARIFF: CosemObject( + ELECTRICITY_ACTIVE_TARIFF, [{"value": "0003", "unit": ""}] + ), + BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "006", "unit": ""}] + ), + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 2), [{"value": "003", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 2), + BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 3), [{"value": "007", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_EQUIPMENT_IDENTIFIER, - CosemObject( - (0, 3), + BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - "MBUS_EQUIPMENT_IDENTIFIER", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 3), + BELGIUM_MBUS3_METER_READING2: MBusObject( + BELGIUM_MBUS3_METER_READING2, [ {"value": datetime.datetime.fromtimestamp(1551642217)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 4), [{"value": "007", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 4), + BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS4_METER_READING1: MBusObject( + BELGIUM_MBUS4_METER_READING1, [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - "MBUS_METER_READING", - ) + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -1044,32 +894,20 @@ async def test_belgian_meter_mbus( active_tariff = hass.states.get("sensor.electricity_meter_active_tariff") assert active_tariff.state == "unknown" - # check if gas consumption mbus1 is parsed correctly + # check if gas consumption mbus2 is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption") assert gas_consumption is None - # check if gas consumption mbus2 is parsed correctly + # check if water usage mbus3 is parsed correctly + water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") + assert water_consumption is None + + # check if gas consumption mbus4 is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption_2") assert gas_consumption is None - # check if water usage mbus3 is parsed correctly - water_consumption = hass.states.get("sensor.water_meter_water_consumption") - assert water_consumption - assert water_consumption.state == "12.12" - assert ( - water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER - ) - assert ( - water_consumption.attributes.get(ATTR_STATE_CLASS) - == SensorStateClass.TOTAL_INCREASING - ) - assert ( - water_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == UnitOfVolume.CUBIC_METERS - ) - # check if gas consumption mbus4 is parsed correctly - water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") + water_consumption = hass.states.get("sensor.water_meter_water_consumption") assert water_consumption.state == "13.13" assert ( water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER @@ -1100,12 +938,11 @@ async def test_belgian_meter_low( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - ELECTRICITY_ACTIVE_TARIFF, - CosemObject((0, 0), [{"value": "0002", "unit": ""}]), - "ELECTRICITY_ACTIVE_TARIFF", - ) + telegram = { + ELECTRICITY_ACTIVE_TARIFF: CosemObject( + ELECTRICITY_ACTIVE_TARIFF, [{"value": "0002", "unit": ""}] + ) + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -1149,23 +986,16 @@ async def test_swedish_meter( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - ELECTRICITY_IMPORTED_TOTAL, - CosemObject( - (0, 0), + telegram = { + ELECTRICITY_IMPORTED_TOTAL: CosemObject( + ELECTRICITY_IMPORTED_TOTAL, [{"value": Decimal(123.456), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - "ELECTRICITY_IMPORTED_TOTAL", - ) - telegram.add( - ELECTRICITY_EXPORTED_TOTAL, - CosemObject( - (0, 0), + ELECTRICITY_EXPORTED_TOTAL: CosemObject( + ELECTRICITY_EXPORTED_TOTAL, [{"value": Decimal(654.321), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - "ELECTRICITY_EXPORTED_TOTAL", - ) + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -1224,23 +1054,16 @@ async def test_easymeter( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - ELECTRICITY_IMPORTED_TOTAL, - CosemObject( - (0, 0), + telegram = { + ELECTRICITY_IMPORTED_TOTAL: CosemObject( + ELECTRICITY_IMPORTED_TOTAL, [{"value": Decimal(54184.6316), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - "ELECTRICITY_IMPORTED_TOTAL", - ) - telegram.add( - ELECTRICITY_EXPORTED_TOTAL, - CosemObject( - (0, 0), + ELECTRICITY_EXPORTED_TOTAL: CosemObject( + ELECTRICITY_EXPORTED_TOTAL, [{"value": Decimal(19981.1069), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - "ELECTRICITY_EXPORTED_TOTAL", - ) + } mock_entry = MockConfigEntry( domain="dsmr", @@ -1399,20 +1222,15 @@ async def test_reconnect( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - CURRENT_ELECTRICITY_USAGE, - CosemObject( - (0, 0), + telegram = { + CURRENT_ELECTRICITY_USAGE: CosemObject( + CURRENT_ELECTRICITY_USAGE, [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - "CURRENT_ELECTRICITY_USAGE", - ) - telegram.add( - ELECTRICITY_ACTIVE_TARIFF, - CosemObject((0, 0), [{"value": "0001", "unit": ""}]), - "ELECTRICITY_ACTIVE_TARIFF", - ) + ELECTRICITY_ACTIVE_TARIFF: CosemObject( + ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] + ), + } # mock waiting coroutine while connection lasts closed = asyncio.Event() @@ -1482,18 +1300,15 @@ async def test_gas_meter_providing_energy_reading( "time_between_update": 0, } - telegram = Telegram() - telegram.add( - GAS_METER_READING, - MBusObject( - (0, 0), + telegram = { + GAS_METER_READING: MBusObject( + GAS_METER_READING, [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(123.456), "unit": UnitOfEnergy.GIGA_JOULE}, ], ), - "GAS_METER_READING", - ) + } mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -1519,81 +1334,3 @@ async def test_gas_meter_providing_energy_reading( gas_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.GIGA_JOULE ) - - -async def test_heat_meter_mbus( - hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] -) -> None: - """Test if heat meter reading is correctly parsed.""" - (connection_factory, transport, protocol) = dsmr_connection_fixture - - entry_data = { - "port": "/dev/ttyUSB0", - "dsmr_version": "5", - "serial_id": "1234", - "serial_id_gas": None, - } - entry_options = { - "time_between_update": 0, - } - - telegram = Telegram() - telegram.add( - MBUS_DEVICE_TYPE, - CosemObject((0, 1), [{"value": "004", "unit": ""}]), - "MBUS_DEVICE_TYPE", - ) - telegram.add( - MBUS_METER_READING, - MBusObject( - (0, 1), - [ - {"value": datetime.datetime.fromtimestamp(1551642213)}, - {"value": Decimal(745.695), "unit": "GJ"}, - ], - ), - "MBUS_METER_READING", - ) - - mock_entry = MockConfigEntry( - domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options - ) - - hass.loop.set_debug(True) - mock_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - telegram_callback = connection_factory.call_args_list[0][0][2] - - # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser - telegram_callback(telegram) - - # after receiving telegram entities need to have the chance to be created - await hass.async_block_till_done() - - # check if gas consumption is parsed correctly - heat_consumption = hass.states.get("sensor.heat_meter_energy") - assert heat_consumption.state == "745.695" - assert ( - heat_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY - ) - assert ( - heat_consumption.attributes.get("unit_of_measurement") - == UnitOfEnergy.GIGA_JOULE - ) - assert ( - heat_consumption.attributes.get(ATTR_STATE_CLASS) - == SensorStateClass.TOTAL_INCREASING - ) - - -def test_all_obis_references_exists() -> None: - """Verify that all attributes exist by name in database.""" - for sensor in SENSORS: - assert hasattr(obis_references, sensor.obis_reference) - - for sensors in SENSORS_MBUS_DEVICE_TYPE.values(): - for sensor in sensors: - assert hasattr(obis_references, sensor.obis_reference) diff --git a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr index d407fe2dc5b..c6bc616ffd3 100644 --- a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr +++ b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr @@ -7,8 +7,6 @@ 'data': dict({ }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'dsmr_reader', 'entry_id': 'TEST_ENTRY_ID', 'minor_version': 1, diff --git a/tests/components/dsmr_reader/test_diagnostics.py b/tests/components/dsmr_reader/test_diagnostics.py index 793fe1362b0..553efd0b38b 100644 --- a/tests/components/dsmr_reader/test_diagnostics.py +++ b/tests/components/dsmr_reader/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import patch from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.dsmr_reader.const import DOMAIN from homeassistant.core import HomeAssistant @@ -37,4 +36,4 @@ async def test_get_config_entry_diagnostics( diagnostics = await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) - assert diagnostics == snapshot(exclude=props("created_at", "modified_at")) + assert diagnostics == snapshot diff --git a/tests/components/dte_energy_bridge/test_sensor.py b/tests/components/dte_energy_bridge/test_sensor.py index 41d340fae48..244bec4e270 100644 --- a/tests/components/dte_energy_bridge/test_sensor.py +++ b/tests/components/dte_energy_bridge/test_sensor.py @@ -20,7 +20,7 @@ async def test_setup_correct_reading(hass: HomeAssistant) -> None: """Test DTE Energy bridge returns a correct value.""" with requests_mock.Mocker() as mock_req: mock_req.get( - f"http://{DTE_ENERGY_BRIDGE_CONFIG['ip']}/instantaneousdemand", + "http://{}/instantaneousdemand".format(DTE_ENERGY_BRIDGE_CONFIG["ip"]), text=".411 kW", ) assert await async_setup_component( @@ -34,7 +34,7 @@ async def test_setup_incorrect_units_reading(hass: HomeAssistant) -> None: """Test DTE Energy bridge handles a value with incorrect units.""" with requests_mock.Mocker() as mock_req: mock_req.get( - f"http://{DTE_ENERGY_BRIDGE_CONFIG['ip']}/instantaneousdemand", + "http://{}/instantaneousdemand".format(DTE_ENERGY_BRIDGE_CONFIG["ip"]), text="411 kW", ) assert await async_setup_component( @@ -48,7 +48,7 @@ async def test_setup_bad_format_reading(hass: HomeAssistant) -> None: """Test DTE Energy bridge handles an invalid value.""" with requests_mock.Mocker() as mock_req: mock_req.get( - f"http://{DTE_ENERGY_BRIDGE_CONFIG['ip']}/instantaneousdemand", + "http://{}/instantaneousdemand".format(DTE_ENERGY_BRIDGE_CONFIG["ip"]), text="411", ) assert await async_setup_component( diff --git a/tests/components/duckdns/test_init.py b/tests/components/duckdns/test_init.py index 313cc91aa18..c06add7156a 100644 --- a/tests/components/duckdns/test_init.py +++ b/tests/components/duckdns/test_init.py @@ -8,6 +8,7 @@ import pytest from homeassistant.components import duckdns from homeassistant.components.duckdns import async_track_time_interval_backoff from homeassistant.core import HomeAssistant +from homeassistant.loader import bind_hass from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -20,7 +21,8 @@ _LOGGER = logging.getLogger(__name__) INTERVAL = duckdns.INTERVAL -async def async_set_txt(hass: HomeAssistant, txt: str | None) -> None: +@bind_hass +async def async_set_txt(hass, txt): """Set the txt record. Pass in None to remove it. This is a legacy helper method. Do not use it for new tests. diff --git a/tests/components/duke_energy/__init__.py b/tests/components/duke_energy/__init__.py deleted file mode 100644 index 2750d9d806e..00000000000 --- a/tests/components/duke_energy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Duke Energy integration.""" diff --git a/tests/components/duke_energy/conftest.py b/tests/components/duke_energy/conftest.py deleted file mode 100644 index ed4182f450f..00000000000 --- a/tests/components/duke_energy/conftest.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Common fixtures for the Duke Energy tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.duke_energy.const import DOMAIN -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util - -from tests.common import MockConfigEntry -from tests.typing import RecorderInstanceGenerator - - -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.duke_energy.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> Generator[AsyncMock]: - """Return the default mocked config entry.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_EMAIL: "test@example.com", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - config_entry.add_to_hass(hass) - return config_entry - - -@pytest.fixture -def mock_api() -> Generator[AsyncMock]: - """Mock a successful Duke Energy API.""" - with ( - patch( - "homeassistant.components.duke_energy.config_flow.DukeEnergy", - autospec=True, - ) as mock_api, - patch( - "homeassistant.components.duke_energy.coordinator.DukeEnergy", - new=mock_api, - ), - ): - api = mock_api.return_value - api.authenticate.return_value = { - "email": "TEST@EXAMPLE.COM", - "cdp_internal_user_id": "test-username", - } - api.get_meters.return_value = {} - yield api - - -@pytest.fixture -def mock_api_with_meters(mock_api: AsyncMock) -> AsyncMock: - """Mock a successful Duke Energy API with meters.""" - mock_api.get_meters.return_value = { - "123": { - "serialNum": "123", - "serviceType": "ELECTRIC", - "agreementActiveDate": "2000-01-01", - }, - } - mock_api.get_energy_usage.return_value = { - "data": { - dt_util.now(): { - "energy": 1.3, - "temperature": 70, - } - }, - "missing": [], - } - return mock_api diff --git a/tests/components/duke_energy/test_config_flow.py b/tests/components/duke_energy/test_config_flow.py deleted file mode 100644 index 652267c9aac..00000000000 --- a/tests/components/duke_energy/test_config_flow.py +++ /dev/null @@ -1,118 +0,0 @@ -"""Test the Duke Energy config flow.""" - -from unittest.mock import AsyncMock, Mock - -from aiohttp import ClientError, ClientResponseError -import pytest - -from homeassistant import config_entries -from homeassistant.components.duke_energy.const import DOMAIN -from homeassistant.components.recorder import Recorder -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - - -async def test_user( - hass: HomeAssistant, - recorder_mock: Recorder, - mock_api: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test user config.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - - # test with all provided - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"}, - ) - assert result.get("type") is FlowResultType.CREATE_ENTRY - assert result.get("title") == "test@example.com" - - data = result.get("data") - assert data - assert data[CONF_USERNAME] == "test-username" - assert data[CONF_PASSWORD] == "test-password" - assert data[CONF_EMAIL] == "test@example.com" - - -async def test_abort_if_already_setup( - hass: HomeAssistant, - recorder_mock: Recorder, - mock_api: AsyncMock, - mock_config_entry: AsyncMock, -) -> None: - """Test we abort if the email is already setup.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_USER}, - data={ - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" - - -async def test_abort_if_already_setup_alternate_username( - hass: HomeAssistant, - recorder_mock: Recorder, - mock_api: AsyncMock, - mock_config_entry: AsyncMock, -) -> None: - """Test we abort if the email is already setup.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_USER}, - data={ - CONF_USERNAME: "test@example.com", - CONF_PASSWORD: "test-password", - }, - ) - assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" - - -@pytest.mark.parametrize( - ("side_effect", "expected_error"), - [ - (ClientResponseError(None, None, status=404), "invalid_auth"), - (ClientResponseError(None, None, status=500), "cannot_connect"), - (TimeoutError(), "cannot_connect"), - (ClientError(), "cannot_connect"), - (Exception(), "unknown"), - ], -) -async def test_api_errors( - hass: HomeAssistant, - recorder_mock: Recorder, - mock_api: Mock, - side_effect, - expected_error, -) -> None: - """Test the failure scenarios.""" - mock_api.authenticate.side_effect = side_effect - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_USER}, - data={CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"}, - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("errors") == {"base": expected_error} - - mock_api.authenticate.side_effect = None - - # test with all provided - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"}, - ) - assert result.get("type") is FlowResultType.CREATE_ENTRY diff --git a/tests/components/duke_energy/test_coordinator.py b/tests/components/duke_energy/test_coordinator.py deleted file mode 100644 index 77ac9e8c2bf..00000000000 --- a/tests/components/duke_energy/test_coordinator.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Tests for the SolarEdge coordinator services.""" - -from datetime import timedelta -from unittest.mock import Mock, patch - -from freezegun.api import FrozenDateTimeFactory - -from homeassistant.components.recorder import Recorder -from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_update( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_api_with_meters: Mock, - freezer: FrozenDateTimeFactory, - recorder_mock: Recorder, -) -> None: - """Test Coordinator.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert mock_api_with_meters.get_meters.call_count == 1 - # 3 years of data - assert mock_api_with_meters.get_energy_usage.call_count == 37 - - with patch( - "homeassistant.components.duke_energy.coordinator.get_last_statistics", - return_value={ - "duke_energy:electric_123_energy_consumption": [ - {"start": dt_util.now().timestamp()} - ] - }, - ): - freezer.tick(timedelta(hours=12)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert mock_api_with_meters.get_meters.call_count == 2 - # Now have stats, so only one call - assert mock_api_with_meters.get_energy_usage.call_count == 38 diff --git a/tests/components/duotecno/conftest.py b/tests/components/duotecno/conftest.py index 1bdd26bab9c..1b6ba8f65e5 100644 --- a/tests/components/duotecno/conftest.py +++ b/tests/components/duotecno/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the duotecno tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/dwd_weather_warnings/conftest.py b/tests/components/dwd_weather_warnings/conftest.py index 50c0fe51024..40c8bf3cfa0 100644 --- a/tests/components/dwd_weather_warnings/conftest.py +++ b/tests/components/dwd_weather_warnings/conftest.py @@ -1,9 +1,9 @@ """Configuration for Deutscher Wetterdienst (DWD) Weather Warnings tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant.components.dwd_weather_warnings.const import ( ADVANCE_WARNING_SENSOR, diff --git a/tests/components/dynalite/common.py b/tests/components/dynalite/common.py index 640b6b3e24f..91458b0aaff 100644 --- a/tests/components/dynalite/common.py +++ b/tests/components/dynalite/common.py @@ -2,11 +2,8 @@ from unittest.mock import AsyncMock, Mock, call, patch -from dynalite_devices_lib.dynalitebase import DynaliteBaseDevice - from homeassistant.components import dynalite from homeassistant.const import ATTR_SERVICE -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -24,14 +21,14 @@ def create_mock_device(platform, spec): return device -async def get_entry_id_from_hass(hass: HomeAssistant) -> str: +async def get_entry_id_from_hass(hass): """Get the config entry id from hass.""" conf_entries = hass.config_entries.async_entries(dynalite.DOMAIN) assert len(conf_entries) == 1 return conf_entries[0].entry_id -async def create_entity_from_device(hass: HomeAssistant, device: DynaliteBaseDevice): +async def create_entity_from_device(hass, device): """Set up the component and platform and create a light based on the device provided.""" host = "1.2.3.4" entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) @@ -48,7 +45,7 @@ async def create_entity_from_device(hass: HomeAssistant, device: DynaliteBaseDev return mock_dyn_dev.mock_calls[1][2]["update_device_func"] -async def run_service_tests(hass: HomeAssistant, device, platform, services): +async def run_service_tests(hass, device, platform, services): """Run a series of service calls and check that the entity and device behave correctly.""" for cur_item in services: service = cur_item[ATTR_SERVICE] diff --git a/tests/components/dynalite/test_cover.py b/tests/components/dynalite/test_cover.py index ac8dd7b676d..c43d349d184 100644 --- a/tests/components/dynalite/test_cover.py +++ b/tests/components/dynalite/test_cover.py @@ -1,10 +1,8 @@ """Test Dynalite cover.""" -from collections.abc import Callable from unittest.mock import Mock from dynalite_devices_lib.cover import DynaliteTimeCoverWithTiltDevice -from dynalite_devices_lib.dynalitebase import DynaliteBaseDevice import pytest from homeassistant.components.cover import ( @@ -13,9 +11,15 @@ from homeassistant.components.cover import ( ATTR_POSITION, ATTR_TILT_POSITION, CoverDeviceClass, - CoverState, ) -from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + ATTR_FRIENDLY_NAME, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError @@ -32,7 +36,7 @@ from tests.common import mock_restore_cache @pytest.fixture -def mock_device() -> Mock: +def mock_device(): """Mock a Dynalite device.""" mock_dev = create_mock_device("cover", DynaliteTimeCoverWithTiltDevice) mock_dev.device_class = CoverDeviceClass.BLIND.value @@ -50,7 +54,7 @@ def mock_device() -> Mock: return mock_dev -async def test_cover_setup(hass: HomeAssistant, mock_device: Mock) -> None: +async def test_cover_setup(hass: HomeAssistant, mock_device) -> None: """Test a successful setup.""" await create_entity_from_device(hass, mock_device) entity_state = hass.states.get("cover.name") @@ -89,7 +93,7 @@ async def test_cover_setup(hass: HomeAssistant, mock_device: Mock) -> None: ) -async def test_cover_without_tilt(hass: HomeAssistant, mock_device: Mock) -> None: +async def test_cover_without_tilt(hass: HomeAssistant, mock_device) -> None: """Test a cover with no tilt.""" mock_device.has_tilt = False await create_entity_from_device(hass, mock_device) @@ -102,14 +106,8 @@ async def test_cover_without_tilt(hass: HomeAssistant, mock_device: Mock) -> Non async def check_cover_position( - hass: HomeAssistant, - update_func: Callable[[DynaliteBaseDevice | None], None], - device: Mock, - closing: bool, - opening: bool, - closed: bool, - expected: str, -) -> None: + hass, update_func, device, closing, opening, closed, expected +): """Check that a given position behaves correctly.""" device.is_closing = closing device.is_opening = opening @@ -120,44 +118,42 @@ async def check_cover_position( assert entity_state.state == expected -async def test_cover_positions(hass: HomeAssistant, mock_device: Mock) -> None: +async def test_cover_positions(hass: HomeAssistant, mock_device) -> None: """Test that the state updates in the various positions.""" update_func = await create_entity_from_device(hass, mock_device) await check_cover_position( - hass, update_func, mock_device, True, False, False, CoverState.CLOSING + hass, update_func, mock_device, True, False, False, STATE_CLOSING ) await check_cover_position( - hass, update_func, mock_device, False, True, False, CoverState.OPENING + hass, update_func, mock_device, False, True, False, STATE_OPENING ) await check_cover_position( - hass, update_func, mock_device, False, False, True, CoverState.CLOSED + hass, update_func, mock_device, False, False, True, STATE_CLOSED ) await check_cover_position( - hass, update_func, mock_device, False, False, False, CoverState.OPEN + hass, update_func, mock_device, False, False, False, STATE_OPEN ) -async def test_cover_restore_state(hass: HomeAssistant, mock_device: Mock) -> None: +async def test_cover_restore_state(hass: HomeAssistant, mock_device) -> None: """Test restore from cache.""" mock_restore_cache( hass, - [State("cover.name", CoverState.OPEN, attributes={ATTR_CURRENT_POSITION: 77})], + [State("cover.name", STATE_OPEN, attributes={ATTR_CURRENT_POSITION: 77})], ) await create_entity_from_device(hass, mock_device) mock_device.init_level.assert_called_once_with(77) entity_state = hass.states.get("cover.name") - assert entity_state.state == CoverState.OPEN + assert entity_state.state == STATE_OPEN -async def test_cover_restore_state_bad_cache( - hass: HomeAssistant, mock_device: Mock -) -> None: +async def test_cover_restore_state_bad_cache(hass: HomeAssistant, mock_device) -> None: """Test restore from a cache without the attribute.""" mock_restore_cache( hass, - [State("cover.name", CoverState.OPEN, attributes={"bla bla": 77})], + [State("cover.name", STATE_OPEN, attributes={"bla bla": 77})], ) await create_entity_from_device(hass, mock_device) mock_device.init_level.assert_not_called() entity_state = hass.states.get("cover.name") - assert entity_state.state == CoverState.CLOSED + assert entity_state.state == STATE_CLOSED diff --git a/tests/components/eafm/test_sensor.py b/tests/components/eafm/test_sensor.py index add604167b9..986e1153cac 100644 --- a/tests/components/eafm/test_sensor.py +++ b/tests/components/eafm/test_sensor.py @@ -1,9 +1,6 @@ """Tests for polling measures.""" -from collections.abc import Callable, Coroutine import datetime -from typing import Any -from unittest.mock import AsyncMock import aiohttp import pytest @@ -26,9 +23,7 @@ CONNECTION_EXCEPTIONS = [ ] -async def async_setup_test_fixture( - hass: HomeAssistant, mock_get_station: AsyncMock, initial_value: dict[str, Any] -) -> tuple[MockConfigEntry, Callable[[Any], Coroutine[Any, Any, None]]]: +async def async_setup_test_fixture(hass, mock_get_station, initial_value): """Create a dummy config entry for testing polling.""" mock_get_station.return_value = initial_value diff --git a/tests/components/easyenergy/conftest.py b/tests/components/easyenergy/conftest.py index ffe0e36f3d2..96d356b8906 100644 --- a/tests/components/easyenergy/conftest.py +++ b/tests/components/easyenergy/conftest.py @@ -1,11 +1,11 @@ """Fixtures for easyEnergy integration tests.""" -from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from easyenergy import Electricity, Gas import pytest +from typing_extensions import Generator from homeassistant.components.easyenergy.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/ecobee/common.py b/tests/components/ecobee/common.py index 69d576ce2b5..423b0eee320 100644 --- a/tests/components/ecobee/common.py +++ b/tests/components/ecobee/common.py @@ -5,13 +5,14 @@ from unittest.mock import patch from homeassistant.components.ecobee.const import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry async def setup_platform( hass: HomeAssistant, - platforms: str | list[str], + platform: str, ) -> MockConfigEntry: """Set up the ecobee platform.""" mock_entry = MockConfigEntry( @@ -24,9 +25,8 @@ async def setup_platform( ) mock_entry.add_to_hass(hass) - platforms = [platforms] if isinstance(platforms, str) else platforms - - with patch("homeassistant.components.ecobee.PLATFORMS", platforms): - await hass.config_entries.async_setup(mock_entry.entry_id) + with patch("homeassistant.components.ecobee.const.PLATFORMS", [platform]): + assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() + return mock_entry diff --git a/tests/components/ecobee/conftest.py b/tests/components/ecobee/conftest.py index 01f249bea15..d9583e15986 100644 --- a/tests/components/ecobee/conftest.py +++ b/tests/components/ecobee/conftest.py @@ -1,10 +1,10 @@ """Fixtures for tests.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from requests_mock import Mocker +from typing_extensions import Generator from homeassistant.components.ecobee import ECOBEE_API_KEY, ECOBEE_REFRESH_TOKEN diff --git a/tests/components/ecobee/fixtures/ecobee-data.json b/tests/components/ecobee/fixtures/ecobee-data.json index e0e82d68863..b2f336e064d 100644 --- a/tests/components/ecobee/fixtures/ecobee-data.json +++ b/tests/components/ecobee/fixtures/ecobee-data.json @@ -1,7 +1,7 @@ { "thermostatList": [ { - "identifier": "8675309", + "identifier": 8675309, "name": "ecobee", "modelNumber": "athenaSmart", "utcTime": "2022-01-01 10:00:00", @@ -11,32 +11,13 @@ }, "program": { "climates": [ - { - "name": "Home", - "climateRef": "home", - "sensors": [ - { - "name": "ecobee" - } - ] - }, { "name": "Climate1", - "climateRef": "c1", - "sensors": [ - { - "name": "ecobee" - } - ] + "climateRef": "c1" }, { "name": "Climate2", - "climateRef": "c2", - "sensors": [ - { - "name": "ecobee" - } - ] + "climateRef": "c2" } ], "currentClimateRef": "c1" @@ -81,24 +62,6 @@ } ], "remoteSensors": [ - { - "id": "ei:0", - "name": "ecobee", - "type": "thermostat", - "inUse": true, - "capability": [ - { - "id": "1", - "type": "temperature", - "value": "782" - }, - { - "id": "2", - "type": "humidity", - "value": "54" - } - ] - }, { "id": "rs:100", "name": "Remote Sensor 1", @@ -160,7 +123,6 @@ "hasHumidifier": true, "humidifierMode": "manual", "hasHeatPump": true, - "compressorProtectionMinTemp": 100, "humidity": "30" }, "equipmentStatus": "fan", @@ -195,25 +157,6 @@ "value": "false" } ] - }, - { - "id": "rs:101", - "name": "Remote Sensor 2", - "type": "ecobee3_remote_sensor", - "code": "VTRK", - "inUse": false, - "capability": [ - { - "id": "1", - "type": "temperature", - "value": "782" - }, - { - "id": "2", - "type": "occupancy", - "value": "false" - } - ] } ] }, diff --git a/tests/components/ecobee/test_climate.py b/tests/components/ecobee/test_climate.py index 403ac4a01ad..ae53132fe46 100644 --- a/tests/components/ecobee/test_climate.py +++ b/tests/components/ecobee/test_climate.py @@ -1,29 +1,26 @@ """The test for the Ecobee thermostat module.""" +import copy from http import HTTPStatus from unittest import mock +from unittest.mock import MagicMock -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant import const +from homeassistant.components import climate from homeassistant.components.climate import ClimateEntityFeature from homeassistant.components.ecobee.climate import ( - ATTR_PRESET_MODE, - ATTR_SENSOR_LIST, + ECOBEE_AUX_HEAT_ONLY, PRESET_AWAY_INDEFINITELY, Thermostat, ) -from homeassistant.components.ecobee.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, STATE_OFF from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import device_registry as dr +from . import GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP from .common import setup_platform -from tests.common import MockConfigEntry, async_fire_time_changed - ENTITY_ID = "climate.ecobee" @@ -36,18 +33,9 @@ def ecobee_fixture(): "identifier": "abc", "program": { "climates": [ - { - "name": "Climate1", - "climateRef": "c1", - "sensors": [{"name": "Ecobee"}], - }, - { - "name": "Climate2", - "climateRef": "c2", - "sensors": [{"name": "Ecobee"}], - }, - {"name": "Away", "climateRef": "away", "sensors": [{"name": "Ecobee"}]}, - {"name": "Home", "climateRef": "home", "sensors": [{"name": "Ecobee"}]}, + {"name": "Climate1", "climateRef": "c1"}, + {"name": "Climate2", "climateRef": "c2"}, + {"name": "Away", "climateRef": "away"}, ], "currentClimateRef": "c1", }, @@ -80,19 +68,8 @@ def ecobee_fixture(): "endTime": "10:00:00", } ], - "remoteSensors": [ - { - "id": "ei:0", - "name": "Ecobee", - }, - { - "id": "rs2:100", - "name": "Remote Sensor 1", - }, - ], } mock_ecobee = mock.Mock() - mock_ecobee.get = mock.Mock(side_effect=vals.get) mock_ecobee.__getitem__ = mock.Mock(side_effect=vals.__getitem__) mock_ecobee.__setitem__ = mock.Mock(side_effect=vals.__setitem__) return mock_ecobee @@ -107,10 +84,10 @@ def data_fixture(ecobee_fixture): @pytest.fixture(name="thermostat") -def thermostat_fixture(data, hass: HomeAssistant): +def thermostat_fixture(data): """Set up ecobee thermostat object.""" thermostat = data.ecobee.get_thermostat(1) - return Thermostat(data, 1, thermostat, hass) + return Thermostat(data, 1, thermostat) async def test_name(thermostat) -> None: @@ -134,6 +111,25 @@ async def test_aux_heat_not_supported_by_default(hass: HomeAssistant) -> None: ) +async def test_aux_heat_supported_with_heat_pump(hass: HomeAssistant) -> None: + """Aux Heat should be supported if thermostat has heatpump.""" + mock_get_thermostat = mock.Mock() + mock_get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP + with mock.patch("pyecobee.Ecobee.get_thermostat", mock_get_thermostat): + await setup_platform(hass, const.Platform.CLIMATE) + state = hass.states.get(ENTITY_ID) + assert ( + state.attributes.get(ATTR_SUPPORTED_FEATURES) + == ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.AUX_HEAT + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TURN_ON + ) + + async def test_current_temperature(ecobee_fixture, thermostat) -> None: """Test current temperature.""" assert thermostat.current_temperature == 30 @@ -199,7 +195,7 @@ async def test_hvac_mode(ecobee_fixture, thermostat) -> None: async def test_hvac_modes(thermostat) -> None: """Test operation list property.""" - assert thermostat.hvac_modes == ["heat_cool", "heat", "cool", "off"] + assert ["heat_cool", "heat", "cool", "off"] == thermostat.hvac_modes async def test_hvac_mode2(ecobee_fixture, thermostat) -> None: @@ -212,63 +208,74 @@ async def test_hvac_mode2(ecobee_fixture, thermostat) -> None: async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: """Test device state attributes property.""" ecobee_fixture["equipmentStatus"] = "heatPump2" - assert thermostat.extra_state_attributes == { + assert { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "heatPump2", - "available_sensors": [], - "active_sensors": [], - } + } == thermostat.extra_state_attributes ecobee_fixture["equipmentStatus"] = "auxHeat2" - assert thermostat.extra_state_attributes == { + assert { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "auxHeat2", - "available_sensors": [], - "active_sensors": [], - } + } == thermostat.extra_state_attributes ecobee_fixture["equipmentStatus"] = "compCool1" - assert thermostat.extra_state_attributes == { + assert { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "compCool1", - "available_sensors": [], - "active_sensors": [], - } + } == thermostat.extra_state_attributes ecobee_fixture["equipmentStatus"] = "" - assert thermostat.extra_state_attributes == { + assert { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "", - "available_sensors": [], - "active_sensors": [], - } + } == thermostat.extra_state_attributes ecobee_fixture["equipmentStatus"] = "Unknown" - assert thermostat.extra_state_attributes == { + assert { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "Unknown", - "available_sensors": [], - "active_sensors": [], - } + } == thermostat.extra_state_attributes ecobee_fixture["program"]["currentClimateRef"] = "c2" - assert thermostat.extra_state_attributes == { + assert { "fan": "off", "climate_mode": "Climate2", "fan_min_on_time": 10, "equipment_running": "Unknown", - "available_sensors": [], - "active_sensors": [], - } + } == thermostat.extra_state_attributes + + +async def test_is_aux_heat_on(hass: HomeAssistant) -> None: + """Test aux heat property is only enabled for auxHeatOnly.""" + mock_get_thermostat = mock.Mock() + mock_get_thermostat.return_value = copy.deepcopy( + GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP + ) + mock_get_thermostat.return_value["settings"]["hvacMode"] = "auxHeatOnly" + with mock.patch("pyecobee.Ecobee.get_thermostat", mock_get_thermostat): + await setup_platform(hass, const.Platform.CLIMATE) + state = hass.states.get(ENTITY_ID) + assert state.attributes[climate.ATTR_AUX_HEAT] == "on" + + +async def test_is_aux_heat_off(hass: HomeAssistant) -> None: + """Test aux heat property is only enabled for auxHeatOnly.""" + mock_get_thermostat = mock.Mock() + mock_get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP + with mock.patch("pyecobee.Ecobee.get_thermostat", mock_get_thermostat): + await setup_platform(hass, const.Platform.CLIMATE) + state = hass.states.get(ENTITY_ID) + assert state.attributes[climate.ATTR_AUX_HEAT] == "off" async def test_set_temperature(ecobee_fixture, thermostat, data) -> None: @@ -393,6 +400,36 @@ async def test_set_fan_mode_auto(thermostat, data) -> None: ) +async def test_turn_aux_heat_on(hass: HomeAssistant, mock_ecobee: MagicMock) -> None: + """Test when aux heat is set on. This must change the HVAC mode.""" + mock_ecobee.get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP + mock_ecobee.thermostats = [GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP] + await setup_platform(hass, const.Platform.CLIMATE) + await hass.services.async_call( + climate.DOMAIN, + climate.SERVICE_SET_AUX_HEAT, + {ATTR_ENTITY_ID: ENTITY_ID, climate.ATTR_AUX_HEAT: True}, + blocking=True, + ) + assert mock_ecobee.set_hvac_mode.call_count == 1 + assert mock_ecobee.set_hvac_mode.call_args == mock.call(0, ECOBEE_AUX_HEAT_ONLY) + + +async def test_turn_aux_heat_off(hass: HomeAssistant, mock_ecobee: MagicMock) -> None: + """Test when aux heat is tuned off. Must change HVAC mode back to last used.""" + mock_ecobee.get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP + mock_ecobee.thermostats = [GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP] + await setup_platform(hass, const.Platform.CLIMATE) + await hass.services.async_call( + climate.DOMAIN, + climate.SERVICE_SET_AUX_HEAT, + {ATTR_ENTITY_ID: ENTITY_ID, climate.ATTR_AUX_HEAT: False}, + blocking=True, + ) + assert mock_ecobee.set_hvac_mode.call_count == 1 + assert mock_ecobee.set_hvac_mode.call_args == mock.call(0, "auto") + + async def test_preset_indefinite_away(ecobee_fixture, thermostat) -> None: """Test indefinite away showing correctly, and not as temporary away.""" ecobee_fixture["program"]["currentClimateRef"] = "away" @@ -418,203 +455,3 @@ async def test_set_preset_mode(ecobee_fixture, thermostat, data) -> None: data.ecobee.set_climate_hold.assert_has_calls( [mock.call(1, "away", "indefinite", thermostat.hold_hours())] ) - - -async def test_remote_sensors(hass: HomeAssistant) -> None: - """Test remote sensors.""" - await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) - platform = hass.data[const.Platform.CLIMATE].entities - for entity in platform: - if entity.entity_id == "climate.ecobee": - thermostat = entity - break - - assert thermostat is not None - remote_sensors = thermostat.remote_sensors - - assert sorted(remote_sensors) == sorted(["ecobee", "Remote Sensor 1"]) - - -async def test_remote_sensor_devices( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: - """Test remote sensor devices.""" - await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) - freezer.tick(100) - async_fire_time_changed(hass) - state = hass.states.get(ENTITY_ID) - device_registry = dr.async_get(hass) - for device in device_registry.devices.values(): - if device.name == "Remote Sensor 1": - remote_sensor_1_id = device.id - if device.name == "ecobee": - ecobee_id = device.id - assert sorted(state.attributes.get("available_sensors")) == sorted( - [f"Remote Sensor 1 ({remote_sensor_1_id})", f"ecobee ({ecobee_id})"] - ) - - -async def test_active_sensors_in_preset_mode(hass: HomeAssistant) -> None: - """Test active sensors in preset mode property.""" - await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) - platform = hass.data[const.Platform.CLIMATE].entities - for entity in platform: - if entity.entity_id == "climate.ecobee": - thermostat = entity - break - - assert thermostat is not None - remote_sensors = thermostat.active_sensors_in_preset_mode - - assert sorted(remote_sensors) == sorted(["ecobee"]) - - -async def test_active_sensor_devices_in_preset_mode(hass: HomeAssistant) -> None: - """Test active sensor devices in preset mode.""" - await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) - state = hass.states.get(ENTITY_ID) - - assert state.attributes.get("active_sensors") == ["ecobee"] - - -async def test_remote_sensor_ids_names(hass: HomeAssistant) -> None: - """Test getting ids and names_by_user for thermostat.""" - await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) - platform = hass.data[const.Platform.CLIMATE].entities - for entity in platform: - if entity.entity_id == "climate.ecobee": - thermostat = entity - break - - assert thermostat is not None - - remote_sensor_ids_names = thermostat.remote_sensor_ids_names - for id_name in remote_sensor_ids_names: - assert id_name.get("id") is not None - - name_by_user_list = [item["name_by_user"] for item in remote_sensor_ids_names] - assert sorted(name_by_user_list) == sorted(["Remote Sensor 1", "ecobee"]) - - -async def test_set_sensors_used_in_climate(hass: HomeAssistant) -> None: - """Test set sensors used in climate.""" - # Get device_id of remote sensor from the device registry. - await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) - device_registry = dr.async_get(hass) - for device in device_registry.devices.values(): - if device.name == "Remote Sensor 1": - remote_sensor_1_id = device.id - if device.name == "ecobee": - ecobee_id = device.id - if device.name == "Remote Sensor 2": - remote_sensor_2_id = device.id - - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - device_from_other_integration = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, identifiers={("test", "unique")} - ) - - # Test that the function call works in its entirety. - with mock.patch("pyecobee.Ecobee.update_climate_sensors") as mock_sensors: - await hass.services.async_call( - DOMAIN, - "set_sensors_used_in_climate", - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_PRESET_MODE: "Climate1", - ATTR_SENSOR_LIST: [remote_sensor_1_id], - }, - blocking=True, - ) - await hass.async_block_till_done() - mock_sensors.assert_called_once_with(0, "Climate1", sensor_ids=["rs:100"]) - - # Update sensors without preset mode. - with mock.patch("pyecobee.Ecobee.update_climate_sensors") as mock_sensors: - await hass.services.async_call( - DOMAIN, - "set_sensors_used_in_climate", - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_SENSOR_LIST: [remote_sensor_1_id], - }, - blocking=True, - ) - await hass.async_block_till_done() - # `temp` is the preset running because of a hold. - mock_sensors.assert_called_once_with(0, "temp", sensor_ids=["rs:100"]) - - # Check that sensors are not updated when the sent sensors are the currently set sensors. - with mock.patch("pyecobee.Ecobee.update_climate_sensors") as mock_sensors: - await hass.services.async_call( - DOMAIN, - "set_sensors_used_in_climate", - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_PRESET_MODE: "Climate1", - ATTR_SENSOR_LIST: [ecobee_id], - }, - blocking=True, - ) - mock_sensors.assert_not_called() - - # Error raised because invalid climate name. - with pytest.raises(ServiceValidationError) as execinfo: - await hass.services.async_call( - DOMAIN, - "set_sensors_used_in_climate", - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_PRESET_MODE: "InvalidClimate", - ATTR_SENSOR_LIST: [remote_sensor_1_id], - }, - blocking=True, - ) - assert execinfo.value.translation_domain == "ecobee" - assert execinfo.value.translation_key == "invalid_preset" - - ## Error raised because invalid sensor. - with pytest.raises(ServiceValidationError) as execinfo: - await hass.services.async_call( - DOMAIN, - "set_sensors_used_in_climate", - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_PRESET_MODE: "Climate1", - ATTR_SENSOR_LIST: ["abcd"], - }, - blocking=True, - ) - assert execinfo.value.translation_domain == "ecobee" - assert execinfo.value.translation_key == "invalid_sensor" - - ## Error raised because sensor not available on device. - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - DOMAIN, - "set_sensors_used_in_climate", - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_PRESET_MODE: "Climate1", - ATTR_SENSOR_LIST: [remote_sensor_2_id], - }, - blocking=True, - ) - - with pytest.raises(ServiceValidationError) as execinfo: - await hass.services.async_call( - DOMAIN, - "set_sensors_used_in_climate", - { - ATTR_ENTITY_ID: ENTITY_ID, - ATTR_PRESET_MODE: "Climate1", - ATTR_SENSOR_LIST: [ - remote_sensor_1_id, - device_from_other_integration.id, - ], - }, - blocking=True, - ) - assert execinfo.value.translation_domain == "ecobee" - assert execinfo.value.translation_key == "sensor_lookup_failed" diff --git a/tests/components/ecobee/test_config_flow.py b/tests/components/ecobee/test_config_flow.py index 5c919ffab5c..20d3dabb1ea 100644 --- a/tests/components/ecobee/test_config_flow.py +++ b/tests/components/ecobee/test_config_flow.py @@ -11,7 +11,6 @@ from homeassistant.components.ecobee.const import ( DATA_ECOBEE_CONFIG, DOMAIN, ) -from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -21,11 +20,12 @@ from tests.common import MockConfigEntry async def test_abort_if_already_setup(hass: HomeAssistant) -> None: """Test we abort if ecobee is already setup.""" + flow = config_flow.EcobeeFlowHandler() + flow.hass = hass + MockConfigEntry(domain=DOMAIN).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) + result = await flow.async_step_user() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/ecobee/test_notify.py b/tests/components/ecobee/test_notify.py index ca5e40dbdb1..c66f04c752a 100644 --- a/tests/components/ecobee/test_notify.py +++ b/tests/components/ecobee/test_notify.py @@ -2,11 +2,13 @@ from unittest.mock import MagicMock +from homeassistant.components.ecobee import DOMAIN from homeassistant.components.notify import ( DOMAIN as NOTIFY_DOMAIN, SERVICE_SEND_MESSAGE, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir from .common import setup_platform @@ -32,3 +34,24 @@ async def test_notify_entity_service( ) await hass.async_block_till_done() mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") + + +async def test_legacy_notify_service( + hass: HomeAssistant, + mock_ecobee: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test the legacy notify service.""" + await setup_platform(hass, NOTIFY_DOMAIN) + + assert hass.services.has_service(NOTIFY_DOMAIN, DOMAIN) + await hass.services.async_call( + NOTIFY_DOMAIN, + DOMAIN, + service_data={"message": "It is too cold!", "target": THERMOSTAT_ID}, + blocking=True, + ) + await hass.async_block_till_done() + mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") + mock_ecobee.send_message.reset_mock() + assert len(issue_registry.issues) == 1 diff --git a/tests/components/ecobee/test_number.py b/tests/components/ecobee/test_number.py index be65b6dbb30..da5c8135a05 100644 --- a/tests/components/ecobee/test_number.py +++ b/tests/components/ecobee/test_number.py @@ -2,48 +2,40 @@ from unittest.mock import patch -from homeassistant.components.number import ( - ATTR_VALUE, - DOMAIN as NUMBER_DOMAIN, - SERVICE_SET_VALUE, -) +from homeassistant.components.number import ATTR_VALUE, DOMAIN, SERVICE_SET_VALUE from homeassistant.const import ATTR_ENTITY_ID, UnitOfTime from homeassistant.core import HomeAssistant from .common import setup_platform -VENTILATOR_MIN_HOME_ID = "number.ecobee_ventilator_minimum_time_home" -VENTILATOR_MIN_AWAY_ID = "number.ecobee_ventilator_minimum_time_away" +VENTILATOR_MIN_HOME_ID = "number.ecobee_ventilator_min_time_home" +VENTILATOR_MIN_AWAY_ID = "number.ecobee_ventilator_min_time_away" THERMOSTAT_ID = 0 async def test_ventilator_min_on_home_attributes(hass: HomeAssistant) -> None: """Test the ventilator number on home attributes are correct.""" - await setup_platform(hass, NUMBER_DOMAIN) + await setup_platform(hass, DOMAIN) state = hass.states.get(VENTILATOR_MIN_HOME_ID) assert state.state == "20" assert state.attributes.get("min") == 0 assert state.attributes.get("max") == 60 assert state.attributes.get("step") == 5 - assert ( - state.attributes.get("friendly_name") == "ecobee Ventilator minimum time home" - ) + assert state.attributes.get("friendly_name") == "ecobee Ventilator min time home" assert state.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES async def test_ventilator_min_on_away_attributes(hass: HomeAssistant) -> None: """Test the ventilator number on away attributes are correct.""" - await setup_platform(hass, NUMBER_DOMAIN) + await setup_platform(hass, DOMAIN) state = hass.states.get(VENTILATOR_MIN_AWAY_ID) assert state.state == "10" assert state.attributes.get("min") == 0 assert state.attributes.get("max") == 60 assert state.attributes.get("step") == 5 - assert ( - state.attributes.get("friendly_name") == "ecobee Ventilator minimum time away" - ) + assert state.attributes.get("friendly_name") == "ecobee Ventilator min time away" assert state.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES @@ -53,10 +45,10 @@ async def test_set_min_time_home(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_min_on_time_home" ) as mock_set_min_home_time: - await setup_platform(hass, NUMBER_DOMAIN) + await setup_platform(hass, DOMAIN) await hass.services.async_call( - NUMBER_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: VENTILATOR_MIN_HOME_ID, ATTR_VALUE: target_value}, blocking=True, @@ -71,52 +63,13 @@ async def test_set_min_time_away(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_min_on_time_away" ) as mock_set_min_away_time: - await setup_platform(hass, NUMBER_DOMAIN) + await setup_platform(hass, DOMAIN) await hass.services.async_call( - NUMBER_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: VENTILATOR_MIN_AWAY_ID, ATTR_VALUE: target_value}, blocking=True, ) await hass.async_block_till_done() mock_set_min_away_time.assert_called_once_with(THERMOSTAT_ID, target_value) - - -COMPRESSOR_MIN_TEMP_ID = "number.ecobee2_compressor_minimum_temperature" - - -async def test_compressor_protection_min_temp_attributes(hass: HomeAssistant) -> None: - """Test the compressor min temp value is correct. - - Ecobee runs in Fahrenheit; the test rig runs in Celsius. Conversions are necessary. - """ - await setup_platform(hass, NUMBER_DOMAIN) - - state = hass.states.get(COMPRESSOR_MIN_TEMP_ID) - assert state.state == "-12.2" - assert ( - state.attributes.get("friendly_name") - == "ecobee2 Compressor minimum temperature" - ) - - -async def test_set_compressor_protection_min_temp(hass: HomeAssistant) -> None: - """Test the number can set minimum compressor operating temp. - - Ecobee runs in Fahrenheit; the test rig runs in Celsius. Conversions are necessary - """ - target_value = 0 - with patch( - "homeassistant.components.ecobee.Ecobee.set_aux_cutover_threshold" - ) as mock_set_compressor_min_temp: - await setup_platform(hass, NUMBER_DOMAIN) - - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: COMPRESSOR_MIN_TEMP_ID, ATTR_VALUE: target_value}, - blocking=True, - ) - await hass.async_block_till_done() - mock_set_compressor_min_temp.assert_called_once_with(1, 32) diff --git a/tests/components/ecobee/test_repairs.py b/tests/components/ecobee/test_repairs.py new file mode 100644 index 00000000000..1473f8eb3a1 --- /dev/null +++ b/tests/components/ecobee/test_repairs.py @@ -0,0 +1,114 @@ +"""Test repairs for Ecobee integration.""" + +from http import HTTPStatus +from unittest.mock import MagicMock + +from homeassistant.components.climate import ( + ATTR_AUX_HEAT, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_AUX_HEAT, +) +from homeassistant.components.ecobee import DOMAIN +from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from .common import setup_platform + +from tests.typing import ClientSessionGenerator + +THERMOSTAT_ID = 0 + + +async def test_ecobee_notify_repair_flow( + hass: HomeAssistant, + mock_ecobee: MagicMock, + hass_client: ClientSessionGenerator, + issue_registry: ir.IssueRegistry, +) -> None: + """Test the ecobee notify service repair flow is triggered.""" + await setup_platform(hass, NOTIFY_DOMAIN) + await async_process_repairs_platforms(hass) + + http_client = await hass_client() + + # Simulate legacy service being used + assert hass.services.has_service(NOTIFY_DOMAIN, DOMAIN) + await hass.services.async_call( + NOTIFY_DOMAIN, + DOMAIN, + service_data={"message": "It is too cold!", "target": THERMOSTAT_ID}, + blocking=True, + ) + await hass.async_block_till_done() + mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") + mock_ecobee.send_message.reset_mock() + + # Assert the issue is present + assert issue_registry.async_get_issue( + domain="notify", + issue_id=f"migrate_notify_{DOMAIN}_{DOMAIN}", + ) + assert len(issue_registry.issues) == 1 + + url = RepairsFlowIndexView.url + resp = await http_client.post( + url, json={"handler": "notify", "issue_id": f"migrate_notify_{DOMAIN}_{DOMAIN}"} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "confirm" + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await http_client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data["type"] == "create_entry" + # Test confirm step in repair flow + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue( + domain="notify", + issue_id=f"migrate_notify_{DOMAIN}_{DOMAIN}", + ) + assert len(issue_registry.issues) == 0 + + +async def test_ecobee_aux_heat_repair_flow( + hass: HomeAssistant, + mock_ecobee: MagicMock, + hass_client: ClientSessionGenerator, + issue_registry: ir.IssueRegistry, +) -> None: + """Test the ecobee aux_heat service repair flow is triggered.""" + await setup_platform(hass, CLIMATE_DOMAIN) + await async_process_repairs_platforms(hass) + + ENTITY_ID = "climate.ecobee2" + + # Simulate legacy service being used + assert hass.services.has_service(CLIMATE_DOMAIN, SERVICE_SET_AUX_HEAT) + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_AUX_HEAT, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_AUX_HEAT: True}, + blocking=True, + ) + + # Assert the issue is present + assert issue_registry.async_get_issue( + domain="ecobee", + issue_id="migrate_aux_heat", + ) + assert len(issue_registry.issues) == 1 diff --git a/tests/components/ecobee/test_switch.py b/tests/components/ecobee/test_switch.py index b3c4c4f8296..05cea5a5e9d 100644 --- a/tests/components/ecobee/test_switch.py +++ b/tests/components/ecobee/test_switch.py @@ -8,11 +8,7 @@ from unittest.mock import patch import pytest from homeassistant.components.ecobee.switch import DATE_FORMAT -from homeassistant.components.switch import ( - DOMAIN as SWITCH_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) +from homeassistant.components.switch import DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -33,7 +29,7 @@ def data_fixture(): async def test_ventilator_20min_attributes(hass: HomeAssistant) -> None: """Test the ventilator switch on home attributes are correct.""" - await setup_platform(hass, SWITCH_DOMAIN) + await setup_platform(hass, DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "off" @@ -46,7 +42,7 @@ async def test_ventilator_20min_when_on(hass: HomeAssistant, data) -> None: datetime.now() + timedelta(days=1) ).strftime(DATE_FORMAT) with mock.patch("pyecobee.Ecobee.get_thermostat", data): - await setup_platform(hass, SWITCH_DOMAIN) + await setup_platform(hass, DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "on" @@ -61,7 +57,7 @@ async def test_ventilator_20min_when_off(hass: HomeAssistant, data) -> None: datetime.now() - timedelta(days=1) ).strftime(DATE_FORMAT) with mock.patch("pyecobee.Ecobee.get_thermostat", data): - await setup_platform(hass, SWITCH_DOMAIN) + await setup_platform(hass, DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "off" @@ -74,7 +70,7 @@ async def test_ventilator_20min_when_empty(hass: HomeAssistant, data) -> None: data.return_value["settings"]["ventilatorOffDateTime"] = "" with mock.patch("pyecobee.Ecobee.get_thermostat", data): - await setup_platform(hass, SWITCH_DOMAIN) + await setup_platform(hass, DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "off" @@ -88,10 +84,10 @@ async def test_turn_on_20min_ventilator(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_timer" ) as mock_set_20min_ventilator: - await setup_platform(hass, SWITCH_DOMAIN) + await setup_platform(hass, DOMAIN) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: VENTILATOR_20MIN_ID}, blocking=True, @@ -106,10 +102,10 @@ async def test_turn_off_20min_ventilator(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_timer" ) as mock_set_20min_ventilator: - await setup_platform(hass, SWITCH_DOMAIN) + await setup_platform(hass, DOMAIN) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: VENTILATOR_20MIN_ID}, blocking=True, @@ -118,16 +114,16 @@ async def test_turn_off_20min_ventilator(hass: HomeAssistant) -> None: mock_set_20min_ventilator.assert_called_once_with(THERMOSTAT_ID, False) -DEVICE_ID = "switch.ecobee2_auxiliary_heat_only" +DEVICE_ID = "switch.ecobee2_aux_heat_only" async def test_aux_heat_only_turn_on(hass: HomeAssistant) -> None: """Test the switch can be turned on.""" with patch("pyecobee.Ecobee.set_hvac_mode") as mock_turn_on: - await setup_platform(hass, SWITCH_DOMAIN) + await setup_platform(hass, DOMAIN) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True, @@ -138,10 +134,10 @@ async def test_aux_heat_only_turn_on(hass: HomeAssistant) -> None: async def test_aux_heat_only_turn_off(hass: HomeAssistant) -> None: """Test the switch can be turned off.""" with patch("pyecobee.Ecobee.set_hvac_mode") as mock_turn_off: - await setup_platform(hass, SWITCH_DOMAIN) + await setup_platform(hass, DOMAIN) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True, diff --git a/tests/components/ecoforest/conftest.py b/tests/components/ecoforest/conftest.py index 85bfff08bdf..3eb13e58aee 100644 --- a/tests/components/ecoforest/conftest.py +++ b/tests/components/ecoforest/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Ecoforest tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from pyecoforest.models.device import Alarm, Device, OperationMode, State import pytest +from typing_extensions import Generator from homeassistant.components.ecoforest import DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/ecovacs/conftest.py b/tests/components/ecovacs/conftest.py index 22039d6c0bc..8d0033a6bc9 100644 --- a/tests/components/ecovacs/conftest.py +++ b/tests/components/ecovacs/conftest.py @@ -1,6 +1,5 @@ """Common fixtures for the Ecovacs tests.""" -from collections.abc import AsyncGenerator, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -10,7 +9,7 @@ from deebot_client.device import Device from deebot_client.exceptions import ApiError from deebot_client.models import Credentials import pytest -from sucks import EventEmitter +from typing_extensions import AsyncGenerator, Generator from homeassistant.components.ecovacs import PLATFORMS from homeassistant.components.ecovacs.const import DOMAIN @@ -118,27 +117,6 @@ def mock_mqtt_client(mock_authenticator: Mock) -> Generator[Mock]: yield client -@pytest.fixture -def mock_vacbot(device_fixture: str) -> Generator[Mock]: - """Mock the legacy VacBot.""" - with patch( - "homeassistant.components.ecovacs.controller.VacBot", - autospec=True, - ) as mock: - vacbot = mock.return_value - vacbot.vacuum = load_json_object_fixture( - f"devices/{device_fixture}/device.json", DOMAIN - ) - vacbot.statusEvents = EventEmitter() - vacbot.batteryEvents = EventEmitter() - vacbot.lifespanEvents = EventEmitter() - vacbot.errorEvents = EventEmitter() - vacbot.battery_status = None - vacbot.fan_speed = None - vacbot.components = {} - yield vacbot - - @pytest.fixture def mock_device_execute() -> Generator[AsyncMock]: """Mock the device execute function.""" @@ -176,7 +154,7 @@ async def init_integration( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) + await hass.async_block_till_done() yield mock_config_entry diff --git a/tests/components/ecovacs/fixtures/devices/123/device.json b/tests/components/ecovacs/fixtures/devices/123/device.json deleted file mode 100644 index 07bdf01b156..00000000000 --- a/tests/components/ecovacs/fixtures/devices/123/device.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "did": "E1234567890000000003", - "name": "E1234567890000000003", - "class": "123", - "resource": "atom", - "company": "eco-legacy", - "deviceName": "DEEBOT Slim2 Series", - "icon": "https://portal-ww.ecouser.net/api/pim/file/get/5d2c150dba13eb00013feaae", - "ota": false, - "UILogicId": "ECO_INTL_123", - "materialNo": "110-1639-0102", - "pid": "5cae9b201285190001685977", - "product_category": "DEEBOT", - "model": "Slim2", - "updateInfo": { - "needUpdate": false, - "changeLog": "" - }, - "nick": null, - "homeSort": 9999, - "status": 2, - "otaUpgrade": {} -} diff --git a/tests/components/ecovacs/snapshots/test_button.ambr b/tests/components/ecovacs/snapshots/test_button.ambr index efae8896962..d250a60a35f 100644 --- a/tests/components/ecovacs/snapshots/test_button.ambr +++ b/tests/components/ecovacs/snapshots/test_button.ambr @@ -229,7 +229,7 @@ 'state': '2024-01-01T00:00:00+00:00', }) # --- -# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brush_lifespan:entity-registry] +# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brushes_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -241,7 +241,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': , - 'entity_id': 'button.ozmo_950_reset_side_brush_lifespan', + 'entity_id': 'button.ozmo_950_reset_side_brushes_lifespan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -253,7 +253,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Reset side brush lifespan', + 'original_name': 'Reset side brushes lifespan', 'platform': 'ecovacs', 'previous_unique_id': None, 'supported_features': 0, @@ -262,13 +262,13 @@ 'unit_of_measurement': None, }) # --- -# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brush_lifespan:state] +# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brushes_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Ozmo 950 Reset side brush lifespan', + 'friendly_name': 'Ozmo 950 Reset side brushes lifespan', }), 'context': , - 'entity_id': 'button.ozmo_950_reset_side_brush_lifespan', + 'entity_id': 'button.ozmo_950_reset_side_brushes_lifespan', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/ecovacs/snapshots/test_diagnostics.ambr b/tests/components/ecovacs/snapshots/test_diagnostics.ambr index 38c8a9a5ab9..a4291f9fe25 100644 --- a/tests/components/ecovacs/snapshots/test_diagnostics.ambr +++ b/tests/components/ecovacs/snapshots/test_diagnostics.ambr @@ -8,8 +8,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'ecovacs', 'minor_version': 1, 'options': dict({ @@ -61,8 +59,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'ecovacs', 'minor_version': 1, 'options': dict({ diff --git a/tests/components/ecovacs/snapshots/test_init.ambr b/tests/components/ecovacs/snapshots/test_init.ambr index 9113445cc31..3ce872e7898 100644 --- a/tests/components/ecovacs/snapshots/test_init.ambr +++ b/tests/components/ecovacs/snapshots/test_init.ambr @@ -21,7 +21,6 @@ }), 'manufacturer': 'Ecovacs', 'model': 'DEEBOT OZMO 950 Series', - 'model_id': 'yna5xi', 'name': 'Ozmo 950', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/ecovacs/snapshots/test_number.ambr b/tests/components/ecovacs/snapshots/test_number.ambr index c80132784e1..da8406491b4 100644 --- a/tests/components/ecovacs/snapshots/test_number.ambr +++ b/tests/components/ecovacs/snapshots/test_number.ambr @@ -1,115 +1,4 @@ # serializer version: 1 -# name: test_number_entities[5xu9h3][number.goat_g1_cut_direction:entity-registry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 180, - 'min': 0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.goat_g1_cut_direction', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cut direction', - 'platform': 'ecovacs', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cut_direction', - 'unique_id': '8516fbb1-17f1-4194-0000000_cut_direction', - 'unit_of_measurement': '°', - }) -# --- -# name: test_number_entities[5xu9h3][number.goat_g1_cut_direction:state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Goat G1 Cut direction', - 'max': 180, - 'min': 0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'number.goat_g1_cut_direction', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '45', - }) -# --- -# name: test_number_entities[5xu9h3][number.goat_g1_volume:entity-registry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.goat_g1_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ecovacs', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '8516fbb1-17f1-4194-0000000_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_number_entities[5xu9h3][number.goat_g1_volume:state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Goat G1 Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1.0, - }), - 'context': , - 'entity_id': 'number.goat_g1_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3', - }) -# --- # name: test_number_entities[yna5x1][number.ozmo_950_volume:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ecovacs/snapshots/test_sensor.ambr b/tests/components/ecovacs/snapshots/test_sensor.ambr index 659edfde2cf..e2cee3d410f 100644 --- a/tests/components/ecovacs/snapshots/test_sensor.ambr +++ b/tests/components/ecovacs/snapshots/test_sensor.ambr @@ -1,152 +1,4 @@ # serializer version: 1 -# name: test_legacy_sensors[123][sensor.e1234567890000000003_filter_lifespan:entity-registry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.e1234567890000000003_filter_lifespan', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Filter lifespan', - 'platform': 'ecovacs', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifespan_filter', - 'unique_id': 'E1234567890000000003_lifespan_filter', - 'unit_of_measurement': '%', - }) -# --- -# name: test_legacy_sensors[123][sensor.e1234567890000000003_filter_lifespan:state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'E1234567890000000003 Filter lifespan', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.e1234567890000000003_filter_lifespan', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40', - }) -# --- -# name: test_legacy_sensors[123][sensor.e1234567890000000003_main_brush_lifespan:entity-registry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.e1234567890000000003_main_brush_lifespan', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Main brush lifespan', - 'platform': 'ecovacs', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifespan_main_brush', - 'unique_id': 'E1234567890000000003_lifespan_main_brush', - 'unit_of_measurement': '%', - }) -# --- -# name: test_legacy_sensors[123][sensor.e1234567890000000003_main_brush_lifespan:state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'E1234567890000000003 Main brush lifespan', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.e1234567890000000003_main_brush_lifespan', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80', - }) -# --- -# name: test_legacy_sensors[123][sensor.e1234567890000000003_side_brush_lifespan:entity-registry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.e1234567890000000003_side_brush_lifespan', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Side brush lifespan', - 'platform': 'ecovacs', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifespan_side_brush', - 'unique_id': 'E1234567890000000003_lifespan_side_brush', - 'unit_of_measurement': '%', - }) -# --- -# name: test_legacy_sensors[123][sensor.e1234567890000000003_side_brush_lifespan:state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'E1234567890000000003 Side brush lifespan', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.e1234567890000000003_side_brush_lifespan', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60', - }) -# --- -# name: test_legacy_sensors[123][states] - list([ - 'sensor.e1234567890000000003_main_brush_lifespan', - 'sensor.e1234567890000000003_side_brush_lifespan', - 'sensor.e1234567890000000003_filter_lifespan', - ]) -# --- # name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1058,7 +910,7 @@ 'state': '80', }) # --- -# name: test_sensors[yna5x1][sensor.ozmo_950_side_brush_lifespan:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_side_brushes_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1070,7 +922,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.ozmo_950_side_brush_lifespan', + 'entity_id': 'sensor.ozmo_950_side_brushes_lifespan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1082,7 +934,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Side brush lifespan', + 'original_name': 'Side brushes lifespan', 'platform': 'ecovacs', 'previous_unique_id': None, 'supported_features': 0, @@ -1091,14 +943,14 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1][sensor.ozmo_950_side_brush_lifespan:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_side_brushes_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Ozmo 950 Side brush lifespan', + 'friendly_name': 'Ozmo 950 Side brushes lifespan', 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.ozmo_950_side_brush_lifespan', + 'entity_id': 'sensor.ozmo_950_side_brushes_lifespan', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/ecovacs/test_button.py b/tests/components/ecovacs/test_button.py index 4b3068f6cda..08d53f3e93d 100644 --- a/tests/components/ecovacs/test_button.py +++ b/tests/components/ecovacs/test_button.py @@ -42,7 +42,7 @@ def platforms() -> Platform | list[Platform]: ResetLifeSpan(LifeSpan.FILTER), ), ( - "button.ozmo_950_reset_side_brush_lifespan", + "button.ozmo_950_reset_side_brushes_lifespan", ResetLifeSpan(LifeSpan.SIDE_BRUSH), ), ], @@ -107,7 +107,7 @@ async def test_buttons( [ "button.ozmo_950_reset_main_brush_lifespan", "button.ozmo_950_reset_filter_lifespan", - "button.ozmo_950_reset_side_brush_lifespan", + "button.ozmo_950_reset_side_brushes_lifespan", ], ), ( diff --git a/tests/components/ecovacs/test_config_flow.py b/tests/components/ecovacs/test_config_flow.py index 5bf1144db0b..0a161f88baa 100644 --- a/tests/components/ecovacs/test_config_flow.py +++ b/tests/components/ecovacs/test_config_flow.py @@ -11,23 +11,28 @@ from deebot_client.mqtt_client import create_mqtt_config import pytest from homeassistant.components.ecovacs.const import ( + CONF_CONTINENT, CONF_OVERRIDE_MQTT_URL, CONF_OVERRIDE_REST_URL, CONF_VERIFY_MQTT_CERTIFICATE, DOMAIN, InstanceMode, ) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_MODE, CONF_USERNAME -from homeassistant.core import HomeAssistant +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_COUNTRY, CONF_MODE, CONF_USERNAME +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import issue_registry as ir from .const import ( + IMPORT_DATA, VALID_ENTRY_DATA_CLOUD, VALID_ENTRY_DATA_SELF_HOSTED, VALID_ENTRY_DATA_SELF_HOSTED_WITH_VALIDATE_CERT, ) +from tests.common import MockConfigEntry + _USER_STEP_SELF_HOSTED = {CONF_MODE: InstanceMode.SELF_HOSTED} _TEST_FN_AUTH_ARG = "user_input_auth" @@ -298,3 +303,116 @@ async def test_user_flow_self_hosted_error( mock_setup_entry.assert_called() mock_authenticator_authenticate.assert_called() mock_mqtt_client.verify_config.assert_called() + + +async def test_import_flow( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + mock_setup_entry: AsyncMock, + mock_authenticator_authenticate: AsyncMock, + mock_mqtt_client: Mock, +) -> None: + """Test importing yaml config.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=IMPORT_DATA.copy(), + ) + mock_authenticator_authenticate.assert_called() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == VALID_ENTRY_DATA_CLOUD[CONF_USERNAME] + assert result["data"] == VALID_ENTRY_DATA_CLOUD + assert (HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}") in issue_registry.issues + mock_setup_entry.assert_called() + mock_mqtt_client.verify_config.assert_called() + + +async def test_import_flow_already_configured( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test importing yaml config where entry already configured.""" + entry = MockConfigEntry(domain=DOMAIN, data=VALID_ENTRY_DATA_CLOUD) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=IMPORT_DATA.copy(), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert (HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}") in issue_registry.issues + + +@pytest.mark.parametrize("show_advanced_options", [True, False]) +@pytest.mark.parametrize( + ("side_effect", "reason"), + [ + (ClientError, "cannot_connect"), + (InvalidAuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_import_flow_error( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + mock_authenticator_authenticate: AsyncMock, + mock_mqtt_client: Mock, + side_effect: Exception, + reason: str, + show_advanced_options: bool, +) -> None: + """Test handling invalid connection.""" + mock_authenticator_authenticate.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_IMPORT, + "show_advanced_options": show_advanced_options, + }, + data=IMPORT_DATA.copy(), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + assert ( + DOMAIN, + f"deprecated_yaml_import_issue_{reason}", + ) in issue_registry.issues + mock_authenticator_authenticate.assert_called() + + +@pytest.mark.parametrize("show_advanced_options", [True, False]) +@pytest.mark.parametrize( + ("reason", "user_input"), + [ + ("invalid_country_length", IMPORT_DATA | {CONF_COUNTRY: "too_long"}), + ("invalid_country_length", IMPORT_DATA | {CONF_COUNTRY: "a"}), # too short + ("invalid_continent_length", IMPORT_DATA | {CONF_CONTINENT: "too_long"}), + ("invalid_continent_length", IMPORT_DATA | {CONF_CONTINENT: "a"}), # too short + ("continent_not_match", IMPORT_DATA | {CONF_CONTINENT: "AA"}), + ], +) +async def test_import_flow_invalid_data( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + reason: str, + user_input: dict[str, Any], + show_advanced_options: bool, +) -> None: + """Test handling invalid connection.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_IMPORT, + "show_advanced_options": show_advanced_options, + }, + data=user_input, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + assert ( + DOMAIN, + f"deprecated_yaml_import_issue_{reason}", + ) in issue_registry.issues diff --git a/tests/components/ecovacs/test_diagnostics.py b/tests/components/ecovacs/test_diagnostics.py index 6e4dcd5f677..b025db43cc0 100644 --- a/tests/components/ecovacs/test_diagnostics.py +++ b/tests/components/ecovacs/test_diagnostics.py @@ -28,4 +28,4 @@ async def test_diagnostics( """Test diagnostics.""" assert await get_diagnostics_for_config_entry( hass, hass_client, init_integration - ) == snapshot(exclude=props("entry_id", "created_at", "modified_at")) + ) == snapshot(exclude=props("entry_id")) diff --git a/tests/components/ecovacs/test_init.py b/tests/components/ecovacs/test_init.py index 2185ae4c9eb..27d00a2d023 100644 --- a/tests/components/ecovacs/test_init.py +++ b/tests/components/ecovacs/test_init.py @@ -1,6 +1,7 @@ """Test init of ecovacs.""" -from unittest.mock import Mock, patch +from typing import Any +from unittest.mock import AsyncMock, Mock, patch from deebot_client.exceptions import DeebotError, InvalidAuthenticationError import pytest @@ -11,6 +12,9 @@ from homeassistant.components.ecovacs.controller import EcovacsController from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component + +from .const import IMPORT_DATA from tests.common import MockConfigEntry @@ -84,6 +88,32 @@ async def test_invalid_auth( assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR +@pytest.mark.parametrize( + ("config", "config_entries_expected"), + [ + ({}, 0), + ({DOMAIN: IMPORT_DATA.copy()}, 1), + ], + ids=["no_config", "import_config"], +) +async def test_async_setup_import( + hass: HomeAssistant, + config: dict[str, Any], + config_entries_expected: int, + mock_setup_entry: AsyncMock, + mock_authenticator_authenticate: AsyncMock, + mock_mqtt_client: Mock, +) -> None: + """Test async_setup config import.""" + assert len(hass.config_entries.async_entries(DOMAIN)) == 0 + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done() + assert len(hass.config_entries.async_entries(DOMAIN)) == config_entries_expected + assert mock_setup_entry.call_count == config_entries_expected + assert mock_authenticator_authenticate.call_count == config_entries_expected + assert mock_mqtt_client.verify_config.call_count == config_entries_expected + + async def test_devices_in_dr( device_registry: dr.DeviceRegistry, controller: EcovacsController, @@ -99,15 +129,12 @@ async def test_devices_in_dr( assert device_entry == snapshot(name=device.device_info["did"]) -@pytest.mark.usefixtures( - "entity_registry_enabled_by_default", "mock_vacbot", "init_integration" -) +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") @pytest.mark.parametrize( ("device_fixture", "entities"), [ ("yna5x1", 26), - ("5xu9h3", 25), - ("123", 1), + ("5xu9h3", 24), ], ) async def test_all_entities_loaded( diff --git a/tests/components/ecovacs/test_number.py b/tests/components/ecovacs/test_number.py index a735863d40a..d444d6510a8 100644 --- a/tests/components/ecovacs/test_number.py +++ b/tests/components/ecovacs/test_number.py @@ -3,8 +3,8 @@ from dataclasses import dataclass from deebot_client.command import Command -from deebot_client.commands.json import SetCutDirection, SetVolume -from deebot_client.events import CutDirectionEvent, Event, VolumeEvent +from deebot_client.commands.json import SetVolume +from deebot_client.events import Event, VolumeEvent import pytest from syrupy import SnapshotAssertion @@ -53,23 +53,8 @@ class NumberTestCase: ), ], ), - ( - "5xu9h3", - [ - NumberTestCase( - "number.goat_g1_volume", VolumeEvent(3, 11), "3", 7, SetVolume(7) - ), - NumberTestCase( - "number.goat_g1_cut_direction", - CutDirectionEvent(45), - "45", - 97, - SetCutDirection(97), - ), - ], - ), ], - ids=["yna5x1", "5xu9h3"], + ids=["yna5x1"], ) async def test_number_entities( hass: HomeAssistant, @@ -122,12 +107,8 @@ async def test_number_entities( "yna5x1", ["number.ozmo_950_volume"], ), - ( - "5xu9h3", - ["number.goat_g1_cut_direction", "number.goat_g1_volume"], - ), ], - ids=["yna5x1", "5xu9h3"], + ids=["yna5x1"], ) async def test_disabled_by_default_number_entities( hass: HomeAssistant, entity_registry: er.EntityRegistry, entity_ids: list[str] @@ -144,7 +125,6 @@ async def test_disabled_by_default_number_entities( @pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.parametrize(("device_fixture"), ["yna5x1"]) async def test_volume_maximum( hass: HomeAssistant, controller: EcovacsController, diff --git a/tests/components/ecovacs/test_sensor.py b/tests/components/ecovacs/test_sensor.py index 53c57999776..005d10bffbd 100644 --- a/tests/components/ecovacs/test_sensor.py +++ b/tests/components/ecovacs/test_sensor.py @@ -1,7 +1,5 @@ """Tests for Ecovacs sensors.""" -from unittest.mock import Mock - from deebot_client.event_bus import EventBus from deebot_client.events import ( BatteryEvent, @@ -66,7 +64,7 @@ async def notify_events(hass: HomeAssistant, event_bus: EventBus): "sensor.ozmo_950_wi_fi_ssid", "sensor.ozmo_950_main_brush_lifespan", "sensor.ozmo_950_filter_lifespan", - "sensor.ozmo_950_side_brush_lifespan", + "sensor.ozmo_950_side_brushes_lifespan", "sensor.ozmo_950_error", ], ), @@ -154,34 +152,3 @@ async def test_disabled_by_default_sensors( ), f"Entity registry entry for {entity_id} is missing" assert entry.disabled assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - - -@pytest.mark.usefixtures( - "entity_registry_enabled_by_default", "mock_vacbot", "init_integration" -) -@pytest.mark.parametrize(("device_fixture"), ["123"]) -async def test_legacy_sensors( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - mock_vacbot: Mock, -) -> None: - """Test that sensor entity snapshots match.""" - mock_vacbot.components = {"main_brush": 0.8, "side_brush": 0.6, "filter": 0.4} - mock_vacbot.lifespanEvents.notify("dummy_data") - await hass.async_block_till_done(wait_background_tasks=True) - - states = hass.states.async_entity_ids() - assert snapshot(name="states") == states - - for entity_id in hass.states.async_entity_ids(): - assert (state := hass.states.get(entity_id)), f"State of {entity_id} is missing" - assert snapshot(name=f"{entity_id}:state") == state - - assert (entity_entry := entity_registry.async_get(state.entity_id)) - assert snapshot(name=f"{entity_id}:entity-registry") == entity_entry - - assert entity_entry.device_id - assert (device_entry := device_registry.async_get(entity_entry.device_id)) - assert device_entry.identifiers == {(DOMAIN, "E1234567890000000003")} diff --git a/tests/components/ecovacs/test_services.py b/tests/components/ecovacs/test_services.py index 6fd10cde6d9..973c63782ec 100644 --- a/tests/components/ecovacs/test_services.py +++ b/tests/components/ecovacs/test_services.py @@ -16,7 +16,9 @@ pytestmark = [pytest.mark.usefixtures("init_integration")] @pytest.fixture -def mock_device_execute_response(data: dict[str, Any]) -> Generator[dict[str, Any]]: +def mock_device_execute_response( + data: dict[str, Any], +) -> Generator[dict[str, Any], None, None]: """Mock the device execute function response.""" response = { @@ -71,7 +73,7 @@ def mock_device_execute_response(data: dict[str, Any]) -> Generator[dict[str, An ) async def test_get_positions_service( hass: HomeAssistant, - mock_device_execute_response: dict[str, Any], + mock_device_execute_response: dict[str], entity_id: str, ) -> None: """Test that get_positions service response snapshots match.""" diff --git a/tests/components/edl21/conftest.py b/tests/components/edl21/conftest.py index 1b14e3366d8..b6af4ea9cef 100644 --- a/tests/components/edl21/conftest.py +++ b/tests/components/edl21/conftest.py @@ -1,9 +1,9 @@ """Define test fixtures for EDL21.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/efergy/__init__.py b/tests/components/efergy/__init__.py index 36efa77cf45..d763aaa2fb6 100644 --- a/tests/components/efergy/__init__.py +++ b/tests/components/efergy/__init__.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock, patch from pyefergy import exceptions -from homeassistant.components.efergy.const import DOMAIN +from homeassistant.components.efergy import DOMAIN from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/efergy/test_config_flow.py b/tests/components/efergy/test_config_flow.py index 8b77bbdc7ab..9a66c42bc9a 100644 --- a/tests/components/efergy/test_config_flow.py +++ b/tests/components/efergy/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch from pyefergy import exceptions from homeassistant.components.efergy.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -76,11 +76,20 @@ async def test_flow_user_unknown(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test reauth step.""" entry = create_entry(hass) - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - with _patch_efergy(), _patch_setup(): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=CONF_DATA, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + new_conf = {CONF_API_KEY: "1234567890"} result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/electric_kiwi/conftest.py b/tests/components/electric_kiwi/conftest.py index 010efcb7b5f..c9f9c7e04f0 100644 --- a/tests/components/electric_kiwi/conftest.py +++ b/tests/components/electric_kiwi/conftest.py @@ -2,12 +2,13 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable from time import time from unittest.mock import AsyncMock, patch from electrickiwi_api.model import AccountBalance, Hop, HopIntervals import pytest +from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/electric_kiwi/test_config_flow.py b/tests/components/electric_kiwi/test_config_flow.py index 681320972b5..bf248aafb13 100644 --- a/tests/components/electric_kiwi/test_config_flow.py +++ b/tests/components/electric_kiwi/test_config_flow.py @@ -18,6 +18,7 @@ from homeassistant.components.electric_kiwi.const import ( OAUTH2_TOKEN, SCOPE_VALUES, ) +from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -159,12 +160,16 @@ async def test_reauthentication( setup_credentials: None, ) -> None: """Test Electric Kiwi reauthentication.""" - config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH, "entry_id": DOMAIN} + ) + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert "flow_id" in flows[0] + + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) state = config_entry_oauth2_flow._encode_jwt( hass, @@ -190,7 +195,6 @@ async def test_reauthentication( ) await hass.config_entries.flow.async_configure(result["flow_id"]) - await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/elevenlabs/__init__.py b/tests/components/elevenlabs/__init__.py deleted file mode 100644 index 261286f04f7..00000000000 --- a/tests/components/elevenlabs/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the ElevenLabs integration.""" diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py deleted file mode 100644 index c4d9a87b5ad..00000000000 --- a/tests/components/elevenlabs/conftest.py +++ /dev/null @@ -1,65 +0,0 @@ -"""Common fixtures for the ElevenLabs text-to-speech tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -from elevenlabs.core import ApiError -from elevenlabs.types import GetVoicesResponse -import pytest - -from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE -from homeassistant.const import CONF_API_KEY - -from .const import MOCK_MODELS, MOCK_VOICES - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.elevenlabs.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_async_client() -> Generator[AsyncMock]: - """Override async ElevenLabs client.""" - client_mock = AsyncMock() - client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) - client_mock.models.get_all.return_value = MOCK_MODELS - with patch( - "elevenlabs.client.AsyncElevenLabs", return_value=client_mock - ) as mock_async_client: - yield mock_async_client - - -@pytest.fixture -def mock_async_client_fail() -> Generator[AsyncMock]: - """Override async ElevenLabs client.""" - with patch( - "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", - return_value=AsyncMock(), - ) as mock_async_client: - mock_async_client.side_effect = ApiError - yield mock_async_client - - -@pytest.fixture -def mock_entry() -> MockConfigEntry: - """Mock a config entry.""" - entry = MockConfigEntry( - domain="elevenlabs", - data={ - CONF_API_KEY: "api_key", - }, - options={CONF_MODEL: "model1", CONF_VOICE: "voice1"}, - ) - entry.models = { - "model1": "model1", - } - - entry.voices = {"voice1": "voice1"} - return entry diff --git a/tests/components/elevenlabs/const.py b/tests/components/elevenlabs/const.py deleted file mode 100644 index e16e1fd1334..00000000000 --- a/tests/components/elevenlabs/const.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Constants for the Testing of the ElevenLabs text-to-speech integration.""" - -from elevenlabs.types import LanguageResponse, Model, Voice - -from homeassistant.components.elevenlabs.const import DEFAULT_MODEL - -MOCK_VOICES = [ - Voice( - voice_id="voice1", - name="Voice 1", - ), - Voice( - voice_id="voice2", - name="Voice 2", - ), -] - -MOCK_MODELS = [ - Model( - model_id="model1", - name="Model 1", - can_do_text_to_speech=True, - languages=[ - LanguageResponse(language_id="en", name="English"), - LanguageResponse(language_id="de", name="German"), - LanguageResponse(language_id="es", name="Spanish"), - LanguageResponse(language_id="ja", name="Japanese"), - ], - ), - Model( - model_id="model2", - name="Model 2", - can_do_text_to_speech=True, - languages=[ - LanguageResponse(language_id="en", name="English"), - LanguageResponse(language_id="de", name="German"), - LanguageResponse(language_id="es", name="Spanish"), - LanguageResponse(language_id="ja", name="Japanese"), - ], - ), - Model( - model_id=DEFAULT_MODEL, - name=DEFAULT_MODEL, - can_do_text_to_speech=True, - languages=[ - LanguageResponse(language_id="en", name="English"), - LanguageResponse(language_id="de", name="German"), - LanguageResponse(language_id="es", name="Spanish"), - LanguageResponse(language_id="ja", name="Japanese"), - ], - ), -] diff --git a/tests/components/elevenlabs/test_config_flow.py b/tests/components/elevenlabs/test_config_flow.py deleted file mode 100644 index 971fa75939a..00000000000 --- a/tests/components/elevenlabs/test_config_flow.py +++ /dev/null @@ -1,151 +0,0 @@ -"""Test the ElevenLabs text-to-speech config flow.""" - -from unittest.mock import AsyncMock - -from homeassistant.components.elevenlabs.const import ( - CONF_CONFIGURE_VOICE, - CONF_MODEL, - CONF_OPTIMIZE_LATENCY, - CONF_SIMILARITY, - CONF_STABILITY, - CONF_STYLE, - CONF_USE_SPEAKER_BOOST, - CONF_VOICE, - DEFAULT_MODEL, - DEFAULT_OPTIMIZE_LATENCY, - DEFAULT_SIMILARITY, - DEFAULT_STABILITY, - DEFAULT_STYLE, - DEFAULT_USE_SPEAKER_BOOST, - DOMAIN, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_API_KEY -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_user_step( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_async_client: AsyncMock, -) -> None: - """Test user step create entry result.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert not result["errors"] - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_API_KEY: "api_key", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "ElevenLabs" - assert result["data"] == { - "api_key": "api_key", - } - assert result["options"] == {CONF_MODEL: DEFAULT_MODEL, CONF_VOICE: "voice1"} - - mock_setup_entry.assert_called_once() - - -async def test_invalid_api_key( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_async_client_fail: AsyncMock -) -> None: - """Test user step with invalid api key.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert not result["errors"] - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_API_KEY: "api_key", - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] - - mock_setup_entry.assert_not_called() - - -async def test_options_flow_init( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_async_client: AsyncMock, - mock_entry: MockConfigEntry, -) -> None: - """Test options flow init.""" - mock_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(mock_entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={CONF_MODEL: "model1", CONF_VOICE: "voice1"}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert mock_entry.options == { - CONF_MODEL: "model1", - CONF_VOICE: "voice1", - } - - mock_setup_entry.assert_called_once() - - -async def test_options_flow_voice_settings_default( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_async_client: AsyncMock, - mock_entry: MockConfigEntry, -) -> None: - """Test options flow voice settings.""" - mock_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(mock_entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - CONF_MODEL: "model1", - CONF_VOICE: "voice1", - CONF_CONFIGURE_VOICE: True, - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "voice_settings" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert mock_entry.options == { - CONF_MODEL: "model1", - CONF_VOICE: "voice1", - CONF_OPTIMIZE_LATENCY: DEFAULT_OPTIMIZE_LATENCY, - CONF_SIMILARITY: DEFAULT_SIMILARITY, - CONF_STABILITY: DEFAULT_STABILITY, - CONF_STYLE: DEFAULT_STYLE, - CONF_USE_SPEAKER_BOOST: DEFAULT_USE_SPEAKER_BOOST, - } diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py deleted file mode 100644 index 7151aab10f2..00000000000 --- a/tests/components/elevenlabs/test_tts.py +++ /dev/null @@ -1,450 +0,0 @@ -"""Tests for the ElevenLabs TTS entity.""" - -from __future__ import annotations - -from http import HTTPStatus -from pathlib import Path -from typing import Any -from unittest.mock import AsyncMock, MagicMock, patch - -from elevenlabs.core import ApiError -from elevenlabs.types import GetVoicesResponse, VoiceSettings -import pytest - -from homeassistant.components import tts -from homeassistant.components.elevenlabs.const import ( - CONF_MODEL, - CONF_OPTIMIZE_LATENCY, - CONF_SIMILARITY, - CONF_STABILITY, - CONF_STYLE, - CONF_USE_SPEAKER_BOOST, - CONF_VOICE, - DEFAULT_OPTIMIZE_LATENCY, - DEFAULT_SIMILARITY, - DEFAULT_STABILITY, - DEFAULT_STYLE, - DEFAULT_USE_SPEAKER_BOOST, - DOMAIN, -) -from homeassistant.components.media_player import ( - ATTR_MEDIA_CONTENT_ID, - DOMAIN as DOMAIN_MP, - SERVICE_PLAY_MEDIA, -) -from homeassistant.const import ATTR_ENTITY_ID, CONF_API_KEY -from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.core_config import async_process_ha_core_config - -from .const import MOCK_MODELS, MOCK_VOICES - -from tests.common import MockConfigEntry, async_mock_service -from tests.components.tts.common import retrieve_media -from tests.typing import ClientSessionGenerator - - -@pytest.fixture(autouse=True) -def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: - """Mock writing tags.""" - - -@pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: - """Mock the TTS cache dir with empty dir.""" - - -@pytest.fixture -async def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Mock media player calls.""" - return async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) - - -@pytest.fixture(autouse=True) -async def setup_internal_url(hass: HomeAssistant) -> None: - """Set up internal url.""" - await async_process_ha_core_config( - hass, {"internal_url": "http://example.local:8123"} - ) - - -@pytest.fixture -def mock_similarity(): - """Mock similarity.""" - return DEFAULT_SIMILARITY / 2 - - -@pytest.fixture -def mock_latency(): - """Mock latency.""" - return (DEFAULT_OPTIMIZE_LATENCY + 1) % 5 # 0, 1, 2, 3, 4 - - -@pytest.fixture(name="setup") -async def setup_fixture( - hass: HomeAssistant, - config_data: dict[str, Any], - config_options: dict[str, Any], - config_options_voice: dict[str, Any], - request: pytest.FixtureRequest, - mock_async_client: AsyncMock, -) -> AsyncMock: - """Set up the test environment.""" - if request.param == "mock_config_entry_setup": - await mock_config_entry_setup(hass, config_data, config_options) - elif request.param == "mock_config_entry_setup_voice": - await mock_config_entry_setup(hass, config_data, config_options_voice) - else: - raise RuntimeError("Invalid setup fixture") - - await hass.async_block_till_done() - return mock_async_client - - -@pytest.fixture(name="config_data") -def config_data_fixture() -> dict[str, Any]: - """Return config data.""" - return {} - - -@pytest.fixture(name="config_options") -def config_options_fixture() -> dict[str, Any]: - """Return config options.""" - return {} - - -@pytest.fixture(name="config_options_voice") -def config_options_voice_fixture(mock_similarity, mock_latency) -> dict[str, Any]: - """Return config options.""" - return { - CONF_OPTIMIZE_LATENCY: mock_latency, - CONF_SIMILARITY: mock_similarity, - CONF_STABILITY: DEFAULT_STABILITY, - CONF_STYLE: DEFAULT_STYLE, - CONF_USE_SPEAKER_BOOST: DEFAULT_USE_SPEAKER_BOOST, - } - - -async def mock_config_entry_setup( - hass: HomeAssistant, config_data: dict[str, Any], config_options: dict[str, Any] -) -> None: - """Mock config entry setup.""" - default_config_data = { - CONF_API_KEY: "api_key", - } - default_config_options = { - CONF_VOICE: "voice1", - CONF_MODEL: "model1", - } - config_entry = MockConfigEntry( - domain=DOMAIN, - data=default_config_data | config_data, - options=default_config_options | config_options, - ) - config_entry.add_to_hass(hass) - client_mock = AsyncMock() - client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) - client_mock.models.get_all.return_value = MOCK_MODELS - with patch( - "homeassistant.components.elevenlabs.AsyncElevenLabs", return_value=client_mock - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - - -@pytest.mark.parametrize( - "config_data", - [ - {}, - {tts.CONF_LANG: "de"}, - {tts.CONF_LANG: "en"}, - {tts.CONF_LANG: "ja"}, - {tts.CONF_LANG: "es"}, - ], -) -@pytest.mark.parametrize( - ("setup", "tts_service", "service_data"), - [ - ( - "mock_config_entry_setup", - "speak", - { - ATTR_ENTITY_ID: "tts.mock_title", - tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", - tts.ATTR_MESSAGE: "There is a person at the front door.", - tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice2"}, - }, - ), - ], - indirect=["setup"], -) -async def test_tts_service_speak( - setup: AsyncMock, - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - calls: list[ServiceCall], - tts_service: str, - service_data: dict[str, Any], -) -> None: - """Test tts service.""" - tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) - tts_entity._client.generate.reset_mock() - assert tts_entity._voice_settings == VoiceSettings( - stability=DEFAULT_STABILITY, - similarity_boost=DEFAULT_SIMILARITY, - style=DEFAULT_STYLE, - use_speaker_boost=DEFAULT_USE_SPEAKER_BOOST, - ) - - await hass.services.async_call( - tts.DOMAIN, - tts_service, - service_data, - blocking=True, - ) - - assert len(calls) == 1 - assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.OK - ) - - tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", - voice="voice2", - model="model1", - voice_settings=tts_entity._voice_settings, - optimize_streaming_latency=tts_entity._latency, - ) - - -@pytest.mark.parametrize( - ("setup", "tts_service", "service_data"), - [ - ( - "mock_config_entry_setup", - "speak", - { - ATTR_ENTITY_ID: "tts.mock_title", - tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", - tts.ATTR_MESSAGE: "There is a person at the front door.", - tts.ATTR_LANGUAGE: "de", - tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, - }, - ), - ( - "mock_config_entry_setup", - "speak", - { - ATTR_ENTITY_ID: "tts.mock_title", - tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", - tts.ATTR_MESSAGE: "There is a person at the front door.", - tts.ATTR_LANGUAGE: "es", - tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, - }, - ), - ], - indirect=["setup"], -) -async def test_tts_service_speak_lang_config( - setup: AsyncMock, - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - calls: list[ServiceCall], - tts_service: str, - service_data: dict[str, Any], -) -> None: - """Test service call say with other langcodes in the config.""" - tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) - tts_entity._client.generate.reset_mock() - - await hass.services.async_call( - tts.DOMAIN, - tts_service, - service_data, - blocking=True, - ) - - assert len(calls) == 1 - assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.OK - ) - - tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", - voice="voice1", - model="model1", - voice_settings=tts_entity._voice_settings, - optimize_streaming_latency=tts_entity._latency, - ) - - -@pytest.mark.parametrize( - ("setup", "tts_service", "service_data"), - [ - ( - "mock_config_entry_setup", - "speak", - { - ATTR_ENTITY_ID: "tts.mock_title", - tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", - tts.ATTR_MESSAGE: "There is a person at the front door.", - tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, - }, - ), - ], - indirect=["setup"], -) -async def test_tts_service_speak_error( - setup: AsyncMock, - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - calls: list[ServiceCall], - tts_service: str, - service_data: dict[str, Any], -) -> None: - """Test service call say with http response 400.""" - tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) - tts_entity._client.generate.reset_mock() - tts_entity._client.generate.side_effect = ApiError - - await hass.services.async_call( - tts.DOMAIN, - tts_service, - service_data, - blocking=True, - ) - - assert len(calls) == 1 - assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.NOT_FOUND - ) - - tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", - voice="voice1", - model="model1", - voice_settings=tts_entity._voice_settings, - optimize_streaming_latency=tts_entity._latency, - ) - - -@pytest.mark.parametrize( - "config_data", - [ - {}, - {tts.CONF_LANG: "de"}, - {tts.CONF_LANG: "en"}, - {tts.CONF_LANG: "ja"}, - {tts.CONF_LANG: "es"}, - ], -) -@pytest.mark.parametrize( - ("setup", "tts_service", "service_data"), - [ - ( - "mock_config_entry_setup_voice", - "speak", - { - ATTR_ENTITY_ID: "tts.mock_title", - tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", - tts.ATTR_MESSAGE: "There is a person at the front door.", - tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice2"}, - }, - ), - ], - indirect=["setup"], -) -async def test_tts_service_speak_voice_settings( - setup: AsyncMock, - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - calls: list[ServiceCall], - tts_service: str, - service_data: dict[str, Any], - mock_similarity: float, - mock_latency: int, -) -> None: - """Test tts service.""" - tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) - tts_entity._client.generate.reset_mock() - assert tts_entity._voice_settings == VoiceSettings( - stability=DEFAULT_STABILITY, - similarity_boost=mock_similarity, - style=DEFAULT_STYLE, - use_speaker_boost=DEFAULT_USE_SPEAKER_BOOST, - ) - assert tts_entity._latency == mock_latency - - await hass.services.async_call( - tts.DOMAIN, - tts_service, - service_data, - blocking=True, - ) - - assert len(calls) == 1 - assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.OK - ) - - tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", - voice="voice2", - model="model1", - voice_settings=tts_entity._voice_settings, - optimize_streaming_latency=tts_entity._latency, - ) - - -@pytest.mark.parametrize( - ("setup", "tts_service", "service_data"), - [ - ( - "mock_config_entry_setup", - "speak", - { - ATTR_ENTITY_ID: "tts.mock_title", - tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", - tts.ATTR_MESSAGE: "There is a person at the front door.", - tts.ATTR_OPTIONS: {}, - }, - ), - ], - indirect=["setup"], -) -async def test_tts_service_speak_without_options( - setup: AsyncMock, - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - calls: list[ServiceCall], - tts_service: str, - service_data: dict[str, Any], -) -> None: - """Test service call say with http response 200.""" - tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) - tts_entity._client.generate.reset_mock() - - await hass.services.async_call( - tts.DOMAIN, - tts_service, - service_data, - blocking=True, - ) - - assert len(calls) == 1 - assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.OK - ) - - tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", - voice="voice1", - optimize_streaming_latency=0, - voice_settings=VoiceSettings( - stability=0.5, similarity_boost=0.75, style=0.0, use_speaker_boost=True - ), - model="model1", - ) diff --git a/tests/components/elgato/conftest.py b/tests/components/elgato/conftest.py index 73b09421576..aaaed0dc8da 100644 --- a/tests/components/elgato/conftest.py +++ b/tests/components/elgato/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Elgato integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from elgato import BatteryInfo, ElgatoNoBatteryError, Info, Settings, State import pytest +from typing_extensions import Generator from homeassistant.components.elgato.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT diff --git a/tests/components/elgato/snapshots/test_button.ambr b/tests/components/elgato/snapshots/test_button.ambr index dcf9d1c87d0..77555c85a06 100644 --- a/tests/components/elgato/snapshots/test_button.ambr +++ b/tests/components/elgato/snapshots/test_button.ambr @@ -72,7 +72,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -155,7 +154,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/elgato/snapshots/test_config_flow.ambr b/tests/components/elgato/snapshots/test_config_flow.ambr index d5d005cff9c..39202d383fa 100644 --- a/tests/components/elgato/snapshots/test_config_flow.ambr +++ b/tests/components/elgato/snapshots/test_config_flow.ambr @@ -24,8 +24,6 @@ 'port': 9123, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'elgato', 'entry_id': , 'minor_version': 1, @@ -69,8 +67,6 @@ 'port': 9123, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'elgato', 'entry_id': , 'minor_version': 1, @@ -113,8 +109,6 @@ 'port': 9123, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'elgato', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/elgato/snapshots/test_light.ambr b/tests/components/elgato/snapshots/test_light.ambr index c3ab076ded2..8e2962fc698 100644 --- a/tests/components/elgato/snapshots/test_light.ambr +++ b/tests/components/elgato/snapshots/test_light.ambr @@ -104,7 +104,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -221,7 +220,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Light Strip', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -338,7 +336,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Light Strip', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/elgato/snapshots/test_sensor.ambr b/tests/components/elgato/snapshots/test_sensor.ambr index be0ec0a56c5..c2bcde7a66b 100644 --- a/tests/components/elgato/snapshots/test_sensor.ambr +++ b/tests/components/elgato/snapshots/test_sensor.ambr @@ -79,7 +79,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -172,7 +171,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -265,7 +263,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -355,7 +352,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -448,7 +444,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/elgato/snapshots/test_switch.ambr b/tests/components/elgato/snapshots/test_switch.ambr index ba95160d28a..12857a71cb3 100644 --- a/tests/components/elgato/snapshots/test_switch.ambr +++ b/tests/components/elgato/snapshots/test_switch.ambr @@ -71,7 +71,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -153,7 +152,6 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', - 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/elmax/conftest.py b/tests/components/elmax/conftest.py index f92fc2f1827..552aa138f1b 100644 --- a/tests/components/elmax/conftest.py +++ b/tests/components/elmax/conftest.py @@ -1,6 +1,5 @@ """Configuration for Elmax tests.""" -from collections.abc import Generator import json from unittest.mock import AsyncMock, patch @@ -13,6 +12,7 @@ from elmax_api.constants import ( from httpx import Response import pytest import respx +from typing_extensions import Generator from . import ( MOCK_DIRECT_HOST, diff --git a/tests/components/elmax/snapshots/test_alarm_control_panel.ambr b/tests/components/elmax/snapshots/test_alarm_control_panel.ambr index f175fc707bb..f09ba6752c5 100644 --- a/tests/components/elmax/snapshots/test_alarm_control_panel.ambr +++ b/tests/components/elmax/snapshots/test_alarm_control_panel.ambr @@ -46,7 +46,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'disarmed', + 'state': 'unknown', }) # --- # name: test_alarm_control_panels[alarm_control_panel.direct_panel_https_1_1_1_1_443_api_v2_area_2-entry] @@ -96,7 +96,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'disarmed', + 'state': 'unknown', }) # --- # name: test_alarm_control_panels[alarm_control_panel.direct_panel_https_1_1_1_1_443_api_v2_area_3-entry] @@ -146,6 +146,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'disarmed', + 'state': 'unknown', }) # --- diff --git a/tests/components/elmax/test_alarm_control_panel.py b/tests/components/elmax/test_alarm_control_panel.py index 76dc8845662..6e4f09710fc 100644 --- a/tests/components/elmax/test_alarm_control_panel.py +++ b/tests/components/elmax/test_alarm_control_panel.py @@ -1,11 +1,9 @@ """Tests for the Elmax alarm control panels.""" -from datetime import timedelta from unittest.mock import patch from syrupy import SnapshotAssertion -from homeassistant.components.elmax import POLLING_SECONDS from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -14,8 +12,6 @@ from . import init_integration from tests.common import snapshot_platform -WAIT = timedelta(seconds=POLLING_SECONDS) - async def test_alarm_control_panels( hass: HomeAssistant, diff --git a/tests/components/elmax/test_config_flow.py b/tests/components/elmax/test_config_flow.py index 7a4d9755fa5..85e14dd0a3f 100644 --- a/tests/components/elmax/test_config_flow.py +++ b/tests/components/elmax/test_config_flow.py @@ -21,6 +21,7 @@ from homeassistant.components.elmax.const import ( CONF_ELMAX_USERNAME, DOMAIN, ) +from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -543,7 +544,20 @@ async def test_show_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data={ + CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, + CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, + CONF_ELMAX_USERNAME: MOCK_USERNAME, + CONF_ELMAX_PASSWORD: MOCK_PASSWORD, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -563,11 +577,24 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth - reauth_result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.elmax.async_setup_entry", return_value=True, ): + reauth_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data={ + CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, + CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, + CONF_ELMAX_USERNAME: MOCK_USERNAME, + CONF_ELMAX_PASSWORD: MOCK_PASSWORD, + }, + ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -597,11 +624,24 @@ async def test_reauth_panel_disappeared(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth - reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.list_control_panels", return_value=[], ): + reauth_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data={ + CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, + CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, + CONF_ELMAX_USERNAME: MOCK_USERNAME, + CONF_ELMAX_PASSWORD: MOCK_PASSWORD, + }, + ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -630,11 +670,24 @@ async def test_reauth_invalid_pin(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth - reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.get_panel_status", side_effect=ElmaxBadPinError(), ): + reauth_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data={ + CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, + CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, + CONF_ELMAX_USERNAME: MOCK_USERNAME, + CONF_ELMAX_PASSWORD: MOCK_PASSWORD, + }, + ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -663,11 +716,24 @@ async def test_reauth_bad_login(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth - reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.login", side_effect=ElmaxBadLoginError(), ): + reauth_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data={ + CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, + CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, + CONF_ELMAX_USERNAME: MOCK_USERNAME, + CONF_ELMAX_PASSWORD: MOCK_PASSWORD, + }, + ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { diff --git a/tests/components/elvia/conftest.py b/tests/components/elvia/conftest.py index 13955db49d5..0708e5c698a 100644 --- a/tests/components/elvia/conftest.py +++ b/tests/components/elvia/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Elvia tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/emoncms/__init__.py b/tests/components/emoncms/__init__.py deleted file mode 100644 index 59dc4fa08e1..00000000000 --- a/tests/components/emoncms/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Tests for the emoncms component.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Set up the integration.""" - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/emoncms/conftest.py b/tests/components/emoncms/conftest.py deleted file mode 100644 index 4bd1d68217a..00000000000 --- a/tests/components/emoncms/conftest.py +++ /dev/null @@ -1,162 +0,0 @@ -"""Fixtures for emoncms integration tests.""" - -from collections.abc import AsyncGenerator, Generator -import copy -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN -from homeassistant.const import ( - CONF_API_KEY, - CONF_ID, - CONF_PLATFORM, - CONF_URL, - CONF_VALUE_TEMPLATE, -) -from homeassistant.helpers.typing import ConfigType - -from tests.common import MockConfigEntry - -UNITS = ["kWh", "Wh", "W", "V", "A", "VA", "°C", "°F", "K", "Hz", "hPa", ""] - - -def get_feed( - number: int, unit: str = "W", value: int = 18.04, timestamp: int = 1665509570 -): - """Generate feed details.""" - return { - "id": str(number), - "userid": "1", - "name": f"parameter {number}", - "tag": "tag", - "size": "35809224", - "unit": unit, - "time": timestamp, - "value": value, - } - - -FEEDS = [get_feed(i + 1, unit=unit) for i, unit in enumerate(UNITS)] - - -EMONCMS_FAILURE = {"success": False, "message": "failure"} - -FLOW_RESULT = { - CONF_API_KEY: "my_api_key", - CONF_ONLY_INCLUDE_FEEDID: [str(i + 1) for i in range(len(UNITS))], - CONF_URL: "http://1.1.1.1", -} - -SENSOR_NAME = "emoncms@1.1.1.1" - -YAML_BASE = { - CONF_PLATFORM: "emoncms", - CONF_API_KEY: "my_api_key", - CONF_ID: 1, - CONF_URL: "http://1.1.1.1", -} - -YAML = { - **YAML_BASE, - CONF_ONLY_INCLUDE_FEEDID: [1], -} - - -@pytest.fixture -def emoncms_yaml_config() -> ConfigType: - """Mock emoncms yaml configuration.""" - return {"sensor": YAML} - - -@pytest.fixture -def emoncms_yaml_config_with_template() -> ConfigType: - """Mock emoncms yaml conf with template parameter.""" - return {"sensor": {**YAML, CONF_VALUE_TEMPLATE: "{{ value | float + 1500 }}"}} - - -@pytest.fixture -def emoncms_yaml_config_no_include_only_feed_id() -> ConfigType: - """Mock emoncms yaml configuration without include_only_feed_id parameter.""" - return {"sensor": YAML_BASE} - - -@pytest.fixture -def config_entry() -> MockConfigEntry: - """Mock emoncms config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title=SENSOR_NAME, - data=FLOW_RESULT, - ) - - -FLOW_RESULT_SECOND_URL = copy.deepcopy(FLOW_RESULT) -FLOW_RESULT_SECOND_URL[CONF_URL] = "http://1.1.1.2" - - -@pytest.fixture -def config_entry_unique_id() -> MockConfigEntry: - """Mock emoncms config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title=SENSOR_NAME, - data=FLOW_RESULT_SECOND_URL, - unique_id="123-53535292", - ) - - -FLOW_RESULT_NO_FEED = copy.deepcopy(FLOW_RESULT) -FLOW_RESULT_NO_FEED[CONF_ONLY_INCLUDE_FEEDID] = None - - -@pytest.fixture -def config_no_feed() -> MockConfigEntry: - """Mock emoncms config entry with no feed selected.""" - return MockConfigEntry( - domain=DOMAIN, - title=SENSOR_NAME, - data=FLOW_RESULT_NO_FEED, - ) - - -FLOW_RESULT_SINGLE_FEED = copy.deepcopy(FLOW_RESULT) -FLOW_RESULT_SINGLE_FEED[CONF_ONLY_INCLUDE_FEEDID] = ["1"] - - -@pytest.fixture -def config_single_feed() -> MockConfigEntry: - """Mock emoncms config entry with a single feed exposed.""" - return MockConfigEntry( - domain=DOMAIN, - title=SENSOR_NAME, - data=FLOW_RESULT_SINGLE_FEED, - entry_id="XXXXXXXX", - ) - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.emoncms.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -async def emoncms_client() -> AsyncGenerator[AsyncMock]: - """Mock pyemoncms success response.""" - with ( - patch( - "homeassistant.components.emoncms.EmoncmsClient", autospec=True - ) as mock_client, - patch( - "homeassistant.components.emoncms.config_flow.EmoncmsClient", - new=mock_client, - ), - ): - client = mock_client.return_value - client.async_request.return_value = {"success": True, "message": FEEDS} - client.async_get_uuid.return_value = "123-53535292" - yield client diff --git a/tests/components/emoncms/snapshots/test_sensor.ambr b/tests/components/emoncms/snapshots/test_sensor.ambr deleted file mode 100644 index f6a2745fb1a..00000000000 --- a/tests/components/emoncms/snapshots/test_sensor.ambr +++ /dev/null @@ -1,59 +0,0 @@ -# serializer version: 1 -# name: test_coordinator_update[sensor.emoncms_1_1_1_1_parameter_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.emoncms_1_1_1_1_parameter_1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'emoncms@1.1.1.1 parameter 1', - 'platform': 'emoncms', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123-53535292-1', - 'unit_of_measurement': , - }) -# --- -# name: test_coordinator_update[sensor.emoncms_1_1_1_1_parameter_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'FeedId': '1', - 'FeedName': 'parameter 1', - 'LastUpdated': 1665509570, - 'LastUpdatedStr': '2022-10-11T10:32:50-07:00', - 'Size': '35809224', - 'Tag': 'tag', - 'UserId': '1', - 'device_class': 'temperature', - 'friendly_name': 'emoncms@1.1.1.1 parameter 1', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.emoncms_1_1_1_1_parameter_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '18.04', - }) -# --- diff --git a/tests/components/emoncms/test_config_flow.py b/tests/components/emoncms/test_config_flow.py deleted file mode 100644 index 1914f23fb0b..00000000000 --- a/tests/components/emoncms/test_config_flow.py +++ /dev/null @@ -1,161 +0,0 @@ -"""Test emoncms config flow.""" - -from unittest.mock import AsyncMock - -from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import CONF_API_KEY, CONF_URL -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from . import setup_integration -from .conftest import EMONCMS_FAILURE, FLOW_RESULT_SINGLE_FEED, SENSOR_NAME, YAML - -from tests.common import MockConfigEntry - - -async def test_flow_import_include_feeds( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - emoncms_client: AsyncMock, -) -> None: - """YAML import with included feed - success test.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=YAML, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == SENSOR_NAME - assert result["data"] == FLOW_RESULT_SINGLE_FEED - - -async def test_flow_import_failure( - hass: HomeAssistant, - emoncms_client: AsyncMock, -) -> None: - """YAML import - failure test.""" - emoncms_client.async_request.return_value = EMONCMS_FAILURE - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=YAML, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "api_error" - - -async def test_flow_import_already_configured( - hass: HomeAssistant, - config_entry: MockConfigEntry, - emoncms_client: AsyncMock, -) -> None: - """Test we abort import data set when entry is already configured.""" - config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=YAML, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -USER_INPUT = { - CONF_URL: "http://1.1.1.1", - CONF_API_KEY: "my_api_key", -} - - -async def test_user_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - emoncms_client: AsyncMock, -) -> None: - """Test we get the user form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - USER_INPUT, - ) - - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_ONLY_INCLUDE_FEEDID: ["1"]}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == SENSOR_NAME - assert result["data"] == {**USER_INPUT, CONF_ONLY_INCLUDE_FEEDID: ["1"]} - assert len(mock_setup_entry.mock_calls) == 1 - - -CONFIG_ENTRY = { - CONF_API_KEY: "my_api_key", - CONF_ONLY_INCLUDE_FEEDID: ["1"], - CONF_URL: "http://1.1.1.1", -} - - -async def test_options_flow( - hass: HomeAssistant, - emoncms_client: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Options flow - success test.""" - await setup_integration(hass, config_entry) - assert config_entry.options == {} - result = await hass.config_entries.options.async_init(config_entry.entry_id) - await hass.async_block_till_done() - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - CONF_ONLY_INCLUDE_FEEDID: ["1"], - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert config_entry.options == { - CONF_ONLY_INCLUDE_FEEDID: ["1"], - } - - -async def test_options_flow_failure( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - emoncms_client: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Options flow - test failure.""" - emoncms_client.async_request.return_value = EMONCMS_FAILURE - await setup_integration(hass, config_entry) - result = await hass.config_entries.options.async_init(config_entry.entry_id) - await hass.async_block_till_done() - assert result["errors"]["base"] == "api_error" - assert result["description_placeholders"]["details"] == "failure" - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - -async def test_unique_id_exists( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - emoncms_client: AsyncMock, - config_entry_unique_id: MockConfigEntry, -) -> None: - """Test when entry with same unique id already exists.""" - config_entry_unique_id.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], USER_INPUT - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/emoncms/test_init.py b/tests/components/emoncms/test_init.py deleted file mode 100644 index abe1a020034..00000000000 --- a/tests/components/emoncms/test_init.py +++ /dev/null @@ -1,89 +0,0 @@ -"""Test Emoncms component setup process.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -from homeassistant.components.emoncms.const import DOMAIN, FEED_ID, FEED_NAME -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, issue_registry as ir - -from . import setup_integration -from .conftest import EMONCMS_FAILURE, FEEDS - -from tests.common import MockConfigEntry - - -async def test_load_unload_entry( - hass: HomeAssistant, - config_entry: MockConfigEntry, - emoncms_client: AsyncMock, -) -> None: - """Test load and unload entry.""" - await setup_integration(hass, config_entry) - - assert config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_failure( - hass: HomeAssistant, - config_entry: MockConfigEntry, - emoncms_client: AsyncMock, -) -> None: - """Test load failure.""" - emoncms_client.async_request.return_value = EMONCMS_FAILURE - config_entry.add_to_hass(hass) - assert not await hass.config_entries.async_setup(config_entry.entry_id) - - -async def test_migrate_uuid( - hass: HomeAssistant, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - emoncms_client: AsyncMock, -) -> None: - """Test migration from home assistant uuid to emoncms uuid.""" - config_entry.add_to_hass(hass) - assert config_entry.unique_id is None - for _, feed in enumerate(FEEDS): - entity_registry.async_get_or_create( - Platform.SENSOR, - DOMAIN, - f"{config_entry.entry_id}-{feed[FEED_ID]}", - config_entry=config_entry, - suggested_object_id=f"{DOMAIN}_{feed[FEED_NAME]}", - ) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - emoncms_uuid = emoncms_client.async_get_uuid.return_value - assert config_entry.unique_id == emoncms_uuid - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - - for nb, feed in enumerate(FEEDS): - assert entity_entries[nb].unique_id == f"{emoncms_uuid}-{feed[FEED_ID]}" - assert ( - entity_entries[nb].previous_unique_id - == f"{config_entry.entry_id}-{feed[FEED_ID]}" - ) - - -async def test_no_uuid( - hass: HomeAssistant, - config_entry: MockConfigEntry, - issue_registry: ir.IssueRegistry, - emoncms_client: AsyncMock, -) -> None: - """Test an issue is created when the emoncms server does not ship an uuid.""" - emoncms_client.async_get_uuid.return_value = None - await setup_integration(hass, config_entry) - - assert issue_registry.async_get_issue(domain=DOMAIN, issue_id="migrate database") diff --git a/tests/components/emoncms/test_sensor.py b/tests/components/emoncms/test_sensor.py deleted file mode 100644 index a7bc8059287..00000000000 --- a/tests/components/emoncms/test_sensor.py +++ /dev/null @@ -1,151 +0,0 @@ -"""Test emoncms sensor.""" - -from unittest.mock import AsyncMock - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.emoncms.const import DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.helpers import entity_registry as er, issue_registry as ir -from homeassistant.helpers.typing import ConfigType -from homeassistant.setup import async_setup_component - -from . import setup_integration -from .conftest import EMONCMS_FAILURE, get_feed - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - - -async def test_deprecated_yaml( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - emoncms_yaml_config: ConfigType, - emoncms_client: AsyncMock, -) -> None: - """Test an issue is created when we import from yaml config.""" - - await async_setup_component(hass, SENSOR_DOMAIN, emoncms_yaml_config) - await hass.async_block_till_done() - - assert issue_registry.async_get_issue( - domain=HOMEASSISTANT_DOMAIN, issue_id=f"deprecated_yaml_{DOMAIN}" - ) - - -async def test_yaml_with_template( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - emoncms_yaml_config_with_template: ConfigType, - emoncms_client: AsyncMock, -) -> None: - """Test an issue is created when we import a yaml config with a value_template parameter.""" - - await async_setup_component(hass, SENSOR_DOMAIN, emoncms_yaml_config_with_template) - await hass.async_block_till_done() - - assert issue_registry.async_get_issue( - domain=DOMAIN, issue_id=f"remove_value_template_{DOMAIN}" - ) - - -async def test_yaml_no_include_only_feed_id( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - emoncms_yaml_config_no_include_only_feed_id: ConfigType, - emoncms_client: AsyncMock, -) -> None: - """Test an issue is created when we import a yaml config without a include_only_feed_id parameter.""" - - await async_setup_component( - hass, SENSOR_DOMAIN, emoncms_yaml_config_no_include_only_feed_id - ) - await hass.async_block_till_done() - - assert issue_registry.async_get_issue( - domain=DOMAIN, issue_id=f"missing_include_only_feed_id_{DOMAIN}" - ) - - -async def test_no_feed_selected( - hass: HomeAssistant, - config_no_feed: MockConfigEntry, - entity_registry: er.EntityRegistry, - emoncms_client: AsyncMock, -) -> None: - """Test with no feed selected.""" - await setup_integration(hass, config_no_feed) - - assert config_no_feed.state is ConfigEntryState.LOADED - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_no_feed.entry_id - ) - assert entity_entries == [] - - -async def test_no_feed_broadcast( - hass: HomeAssistant, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - emoncms_client: AsyncMock, -) -> None: - """Test with no feed broadcasted.""" - emoncms_client.async_request.return_value = {"success": True, "message": []} - await setup_integration(hass, config_entry) - - assert config_entry.state is ConfigEntryState.LOADED - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - assert entity_entries == [] - - -async def test_coordinator_update( - hass: HomeAssistant, - config_single_feed: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - emoncms_client: AsyncMock, - caplog: pytest.LogCaptureFixture, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator update.""" - emoncms_client.async_request.return_value = { - "success": True, - "message": [get_feed(1, unit="°C")], - } - await setup_integration(hass, config_single_feed) - - await snapshot_platform( - hass, entity_registry, snapshot, config_single_feed.entry_id - ) - - async def skip_time() -> None: - freezer.tick(60) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - emoncms_client.async_request.return_value = { - "success": True, - "message": [get_feed(1, unit="°C", value=24.04, timestamp=1665509670)], - } - - await skip_time() - - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_single_feed.entry_id - ) - - for entity_entry in entity_entries: - state = hass.states.get(entity_entry.entity_id) - assert state.attributes["LastUpdated"] == 1665509670 - assert state.state == "24.04" - - emoncms_client.async_request.return_value = EMONCMS_FAILURE - - await skip_time() - - assert f"Error fetching {DOMAIN}_coordinator data" in caplog.text diff --git a/tests/components/emulated_hue/test_hue_api.py b/tests/components/emulated_hue/test_hue_api.py index a445f8bae0d..40f9f7bce14 100644 --- a/tests/components/emulated_hue/test_hue_api.py +++ b/tests/components/emulated_hue/test_hue_api.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -from collections.abc import Generator from datetime import timedelta from http import HTTPStatus from ipaddress import ip_address @@ -13,6 +12,7 @@ from unittest.mock import AsyncMock, _patch, patch from aiohttp.hdrs import CONTENT_TYPE from aiohttp.test_utils import TestClient import pytest +from typing_extensions import Generator from homeassistant import const, setup from homeassistant.components import ( @@ -1248,7 +1248,9 @@ async def test_proper_put_state_request(hue_client: TestClient) -> None: """Test the request to set the state.""" # Test proper on value parsing result = await hue_client.put( - f"/api/username/lights/{ENTITY_NUMBERS_BY_ID['light.ceiling_lights']}/state", + "/api/username/lights/{}/state".format( + ENTITY_NUMBERS_BY_ID["light.ceiling_lights"] + ), data=json.dumps({HUE_API_STATE_ON: 1234}), ) @@ -1256,7 +1258,9 @@ async def test_proper_put_state_request(hue_client: TestClient) -> None: # Test proper brightness value parsing result = await hue_client.put( - f"/api/username/lights/{ENTITY_NUMBERS_BY_ID['light.ceiling_lights']}/state", + "/api/username/lights/{}/state".format( + ENTITY_NUMBERS_BY_ID["light.ceiling_lights"] + ), data=json.dumps({HUE_API_STATE_ON: True, HUE_API_STATE_BRI: "Hello world!"}), ) diff --git a/tests/components/emulated_hue/test_upnp.py b/tests/components/emulated_hue/test_upnp.py index b16fda536c6..3522f7e8047 100644 --- a/tests/components/emulated_hue/test_upnp.py +++ b/tests/components/emulated_hue/test_upnp.py @@ -1,7 +1,6 @@ """The tests for the emulated Hue component.""" from asyncio import AbstractEventLoop -from collections.abc import Generator from http import HTTPStatus import json import unittest @@ -11,6 +10,7 @@ from aiohttp import web from aiohttp.test_utils import TestClient import defusedxml.ElementTree as ET import pytest +from typing_extensions import Generator from homeassistant import setup from homeassistant.components import emulated_hue diff --git a/tests/components/energenie_power_sockets/conftest.py b/tests/components/energenie_power_sockets/conftest.py index c142e436fd3..64eb8bbd2a8 100644 --- a/tests/components/energenie_power_sockets/conftest.py +++ b/tests/components/energenie_power_sockets/conftest.py @@ -1,11 +1,11 @@ """Configure tests for Energenie-Power-Sockets.""" -from collections.abc import Generator from typing import Final from unittest.mock import MagicMock, patch from pyegps.fakes.powerstrip import FakePowerStrip import pytest +from typing_extensions import Generator from homeassistant.components.energenie_power_sockets.const import ( CONF_DEVICE_API_ID, diff --git a/tests/components/energy/test_sensor.py b/tests/components/energy/test_sensor.py index a27451b853d..0439ac2c028 100644 --- a/tests/components/energy/test_sensor.py +++ b/tests/components/energy/test_sensor.py @@ -1,6 +1,5 @@ """Test the Energy sensors.""" -from collections.abc import Callable, Coroutine import copy from datetime import timedelta from typing import Any @@ -38,12 +37,10 @@ TEST_TIME_ADVANCE_INTERVAL = timedelta(milliseconds=10) @pytest.fixture -async def setup_integration( - recorder_mock: Recorder, -) -> Callable[[HomeAssistant], Coroutine[Any, Any, None]]: +async def setup_integration(recorder_mock: Recorder): """Set up the integration.""" - async def setup_integration(hass: HomeAssistant) -> None: + async def setup_integration(hass): assert await async_setup_component(hass, "energy", {}) await hass.async_block_till_done() diff --git a/tests/components/energyzero/conftest.py b/tests/components/energyzero/conftest.py index d42283c0d4b..49f6c18b09e 100644 --- a/tests/components/energyzero/conftest.py +++ b/tests/components/energyzero/conftest.py @@ -1,11 +1,11 @@ """Fixtures for EnergyZero integration tests.""" -from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from energyzero import Electricity, Gas import pytest +from typing_extensions import Generator from homeassistant.components.energyzero.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/energyzero/snapshots/test_config_flow.ambr b/tests/components/energyzero/snapshots/test_config_flow.ambr index 72e504c97c8..9b4b3bfc635 100644 --- a/tests/components/energyzero/snapshots/test_config_flow.ambr +++ b/tests/components/energyzero/snapshots/test_config_flow.ambr @@ -18,8 +18,6 @@ 'data': dict({ }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'energyzero', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/energyzero/snapshots/test_sensor.ambr b/tests/components/energyzero/snapshots/test_sensor.ambr index 3a66f25fd32..da52526192e 100644 --- a/tests/components/energyzero/snapshots/test_sensor.ambr +++ b/tests/components/energyzero/snapshots/test_sensor.ambr @@ -62,7 +62,6 @@ }), 'manufacturer': 'EnergyZero', 'model': None, - 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, 'primary_config_entry': , @@ -138,7 +137,6 @@ }), 'manufacturer': 'EnergyZero', 'model': None, - 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, 'primary_config_entry': , @@ -211,7 +209,6 @@ }), 'manufacturer': 'EnergyZero', 'model': None, - 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, 'primary_config_entry': , @@ -284,7 +281,6 @@ }), 'manufacturer': 'EnergyZero', 'model': None, - 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, 'primary_config_entry': , @@ -357,7 +353,6 @@ }), 'manufacturer': 'EnergyZero', 'model': None, - 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, 'primary_config_entry': , @@ -433,7 +428,6 @@ }), 'manufacturer': 'EnergyZero', 'model': None, - 'model_id': None, 'name': 'Gas market price', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/enigma2/conftest.py b/tests/components/enigma2/conftest.py index a53d1494e9a..f879fb327d7 100644 --- a/tests/components/enigma2/conftest.py +++ b/tests/components/enigma2/conftest.py @@ -1,9 +1,8 @@ """Test the Enigma2 config flow.""" -from openwebif.api import OpenWebIfServiceEvent, OpenWebIfStatus - from homeassistant.components.enigma2.const import ( CONF_DEEP_STANDBY, + CONF_MAC_ADDRESS, CONF_SOURCE_BOUQUET, CONF_USE_CHANNEL_ICON, DEFAULT_DEEP_STANDBY, @@ -13,6 +12,7 @@ from homeassistant.components.enigma2.const import ( ) from homeassistant.const import ( CONF_HOST, + CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_SSL, @@ -38,6 +38,21 @@ TEST_FULL = { CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL, } +TEST_IMPORT_FULL = { + CONF_HOST: "1.1.1.1", + CONF_PORT: DEFAULT_PORT, + CONF_SSL: DEFAULT_SSL, + CONF_USERNAME: "root", + CONF_PASSWORD: "password", + CONF_NAME: "My Player", + CONF_DEEP_STANDBY: DEFAULT_DEEP_STANDBY, + CONF_SOURCE_BOUQUET: "Favourites", + CONF_MAC_ADDRESS: MAC_ADDRESS, + CONF_USE_CHANNEL_ICON: False, +} + +TEST_IMPORT_REQUIRED = {CONF_HOST: "1.1.1.1"} + EXPECTED_OPTIONS = { CONF_DEEP_STANDBY: DEFAULT_DEEP_STANDBY, CONF_SOURCE_BOUQUET: "Favourites", @@ -51,11 +66,7 @@ class MockDevice: mac_address: str | None = "12:34:56:78:90:ab" _base = "http://1.1.1.1" - def __init__(self) -> None: - """Initialize the mock Enigma2 device.""" - self.status = OpenWebIfStatus(currservice=OpenWebIfServiceEvent()) - - async def _call_api(self, url: str) -> dict | None: + async def _call_api(self, url: str) -> dict: if url.endswith("/api/about"): return { "info": { @@ -63,14 +74,11 @@ class MockDevice: { "mac": self.mac_address, } - ], - "model": "Mock Enigma2", - "brand": "Enigma2", + ] } } - return None - def get_version(self) -> str | None: + def get_version(self): """Return the version.""" return None @@ -89,8 +97,5 @@ class MockDevice: ] } - async def update(self) -> None: - """Mock update.""" - async def close(self): """Mock close.""" diff --git a/tests/components/enigma2/test_config_flow.py b/tests/components/enigma2/test_config_flow.py index 8d32da42baf..74721ce0993 100644 --- a/tests/components/enigma2/test_config_flow.py +++ b/tests/components/enigma2/test_config_flow.py @@ -10,10 +10,18 @@ import pytest from homeassistant import config_entries from homeassistant.components.enigma2.const import DOMAIN from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import issue_registry as ir -from .conftest import TEST_FULL, TEST_REQUIRED, MockDevice +from .conftest import ( + EXPECTED_OPTIONS, + TEST_FULL, + TEST_IMPORT_FULL, + TEST_IMPORT_REQUIRED, + TEST_REQUIRED, + MockDevice, +) from tests.common import MockConfigEntry @@ -79,6 +87,87 @@ async def test_form_user_errors( assert result["errors"] == {"base": error_type} +@pytest.mark.parametrize( + ("test_config", "expected_data", "expected_options"), + [ + (TEST_IMPORT_FULL, TEST_FULL, EXPECTED_OPTIONS), + (TEST_IMPORT_REQUIRED, TEST_REQUIRED, {}), + ], +) +async def test_form_import( + hass: HomeAssistant, + test_config: dict[str, Any], + expected_data: dict[str, Any], + expected_options: dict[str, Any], + issue_registry: ir.IssueRegistry, +) -> None: + """Test we get the form with import source.""" + with ( + patch( + "homeassistant.components.enigma2.config_flow.OpenWebIfDevice.__new__", + return_value=MockDevice(), + ), + patch( + "homeassistant.components.enigma2.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data=test_config, + ) + await hass.async_block_till_done() + + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) + + assert issue + assert issue.issue_domain == DOMAIN + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == test_config[CONF_HOST] + assert result["data"] == expected_data + assert result["options"] == expected_options + + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "error_type"), + [ + (InvalidAuthError, "invalid_auth"), + (ClientError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_form_import_errors( + hass: HomeAssistant, + exception: Exception, + error_type: str, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we handle errors on import.""" + with patch( + "homeassistant.components.enigma2.config_flow.OpenWebIfDevice.__new__", + side_effect=exception, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data=TEST_IMPORT_FULL, + ) + + issue = issue_registry.async_get_issue( + DOMAIN, f"deprecated_yaml_{DOMAIN}_import_issue_{error_type}" + ) + + assert issue + assert issue.issue_domain == DOMAIN + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == error_type + + async def test_options_flow(hass: HomeAssistant, user_flow: str) -> None: """Test the form options.""" diff --git a/tests/components/enigma2/test_init.py b/tests/components/enigma2/test_init.py index ab19c2ce51a..93a130eef54 100644 --- a/tests/components/enigma2/test_init.py +++ b/tests/components/enigma2/test_init.py @@ -15,7 +15,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" with ( patch( - "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", + "homeassistant.components.enigma2.OpenWebIfDevice.__new__", return_value=MockDevice(), ), patch( diff --git a/tests/components/enphase_envoy/__init__.py b/tests/components/enphase_envoy/__init__.py index f69ab8e44f2..6c6293ab76b 100644 --- a/tests/components/enphase_envoy/__init__.py +++ b/tests/components/enphase_envoy/__init__.py @@ -1,13 +1 @@ """Tests for the Enphase Envoy integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index 541b6f96e19..647084c21ff 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -1,56 +1,50 @@ """Define test fixtures for Enphase Envoy.""" -from collections.abc import AsyncGenerator, Generator -from typing import Any from unittest.mock import AsyncMock, Mock, patch import jwt from pyenphase import ( + Envoy, EnvoyData, - EnvoyEncharge, - EnvoyEnchargeAggregate, - EnvoyEnchargePower, - EnvoyEnpower, EnvoyInverter, EnvoySystemConsumption, EnvoySystemProduction, EnvoyTokenAuth, ) -from pyenphase.const import SupportedFeatures -from pyenphase.models.dry_contacts import EnvoyDryContactSettings, EnvoyDryContactStatus -from pyenphase.models.meters import EnvoyMeterData -from pyenphase.models.tariff import EnvoyStorageSettings, EnvoyTariff +from pyenphase.const import PhaseNames, SupportedFeatures +from pyenphase.models.meters import ( + CtMeterStatus, + CtState, + CtStatusFlags, + CtType, + EnvoyMeterData, + EnvoyPhaseMode, +) import pytest +from typing_extensions import AsyncGenerator from homeassistant.components.enphase_envoy import DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, load_json_object_fixture - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.enphase_envoy.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry +from tests.common import MockConfigEntry @pytest.fixture(name="config_entry") def config_entry_fixture( - hass: HomeAssistant, config: dict[str, str] + hass: HomeAssistant, config: dict[str, str], serial_number: str ) -> MockConfigEntry: """Define a config entry fixture.""" - return MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, entry_id="45a36e55aaddb2007c5f6602e0c38e72", - title="Envoy 1234", - unique_id="1234", + title=f"Envoy {serial_number}" if serial_number else "Envoy", + unique_id=serial_number, data=config, ) + entry.add_to_hass(hass) + return entry @pytest.fixture(name="config") @@ -64,190 +58,344 @@ def config_fixture() -> dict[str, str]: } -@pytest.fixture -async def mock_envoy( - request: pytest.FixtureRequest, -) -> AsyncGenerator[AsyncMock]: +@pytest.fixture(name="mock_envoy") +def mock_envoy_fixture( + serial_number: str, + mock_authenticate: AsyncMock, + mock_setup: AsyncMock, + mock_auth: EnvoyTokenAuth, +) -> Mock: """Define a mocked Envoy fixture.""" - new_token = jwt.encode( - payload={"name": "envoy", "exp": 2007837780}, - key="secret", - algorithm="HS256", + mock_envoy = Mock(spec=Envoy) + mock_envoy.serial_number = serial_number + mock_envoy.firmware = "7.1.2" + mock_envoy.part_number = "123456789" + mock_envoy.envoy_model = "Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT" + mock_envoy.authenticate = mock_authenticate + mock_envoy.setup = mock_setup + mock_envoy.auth = mock_auth + mock_envoy.supported_features = SupportedFeatures( + SupportedFeatures.INVERTERS + | SupportedFeatures.PRODUCTION + | SupportedFeatures.PRODUCTION + | SupportedFeatures.METERING + | SupportedFeatures.THREEPHASE + | SupportedFeatures.CTMETERS ) + mock_envoy.phase_mode = EnvoyPhaseMode.THREE + mock_envoy.phase_count = 3 + mock_envoy.active_phase_count = 3 + mock_envoy.ct_meter_count = 3 + mock_envoy.consumption_meter_type = CtType.NET_CONSUMPTION + mock_envoy.production_meter_type = CtType.PRODUCTION + mock_envoy.storage_meter_type = CtType.STORAGE + mock_envoy.data = EnvoyData( + system_consumption=EnvoySystemConsumption( + watt_hours_last_7_days=1234, + watt_hours_lifetime=1234, + watt_hours_today=1234, + watts_now=1234, + ), + system_production=EnvoySystemProduction( + watt_hours_last_7_days=1234, + watt_hours_lifetime=1234, + watt_hours_today=1234, + watts_now=1234, + ), + system_consumption_phases={ + PhaseNames.PHASE_1: EnvoySystemConsumption( + watt_hours_last_7_days=1321, + watt_hours_lifetime=1322, + watt_hours_today=1323, + watts_now=1324, + ), + PhaseNames.PHASE_2: EnvoySystemConsumption( + watt_hours_last_7_days=2321, + watt_hours_lifetime=2322, + watt_hours_today=2323, + watts_now=2324, + ), + PhaseNames.PHASE_3: EnvoySystemConsumption( + watt_hours_last_7_days=3321, + watt_hours_lifetime=3322, + watt_hours_today=3323, + watts_now=3324, + ), + }, + system_production_phases={ + PhaseNames.PHASE_1: EnvoySystemProduction( + watt_hours_last_7_days=1231, + watt_hours_lifetime=1232, + watt_hours_today=1233, + watts_now=1234, + ), + PhaseNames.PHASE_2: EnvoySystemProduction( + watt_hours_last_7_days=2231, + watt_hours_lifetime=2232, + watt_hours_today=2233, + watts_now=2234, + ), + PhaseNames.PHASE_3: EnvoySystemProduction( + watt_hours_last_7_days=3231, + watt_hours_lifetime=3232, + watt_hours_today=3233, + watts_now=3234, + ), + }, + ctmeter_production=EnvoyMeterData( + eid="100000010", + timestamp=1708006110, + energy_delivered=11234, + energy_received=12345, + active_power=100, + power_factor=0.11, + voltage=111, + current=0.2, + frequency=50.1, + state=CtState.ENABLED, + measurement_type=CtType.PRODUCTION, + metering_status=CtMeterStatus.NORMAL, + status_flags=[ + CtStatusFlags.PODUCTION_IMBALANCE, + CtStatusFlags.POWER_ON_UNUSED_PHASE, + ], + ), + ctmeter_consumption=EnvoyMeterData( + eid="100000020", + timestamp=1708006120, + energy_delivered=21234, + energy_received=22345, + active_power=101, + power_factor=0.21, + voltage=112, + current=0.3, + frequency=50.2, + state=CtState.ENABLED, + measurement_type=CtType.NET_CONSUMPTION, + metering_status=CtMeterStatus.NORMAL, + status_flags=[], + ), + ctmeter_storage=EnvoyMeterData( + eid="100000030", + timestamp=1708006120, + energy_delivered=31234, + energy_received=32345, + active_power=103, + power_factor=0.23, + voltage=113, + current=0.4, + frequency=50.3, + state=CtState.ENABLED, + measurement_type=CtType.STORAGE, + metering_status=CtMeterStatus.NORMAL, + status_flags=[], + ), + ctmeter_production_phases={ + PhaseNames.PHASE_1: EnvoyMeterData( + eid="100000011", + timestamp=1708006111, + energy_delivered=112341, + energy_received=123451, + active_power=20, + power_factor=0.12, + voltage=111, + current=0.2, + frequency=50.1, + state=CtState.ENABLED, + measurement_type=CtType.PRODUCTION, + metering_status=CtMeterStatus.NORMAL, + status_flags=[CtStatusFlags.PODUCTION_IMBALANCE], + ), + PhaseNames.PHASE_2: EnvoyMeterData( + eid="100000012", + timestamp=1708006112, + energy_delivered=112342, + energy_received=123452, + active_power=30, + power_factor=0.13, + voltage=111, + current=0.2, + frequency=50.1, + state=CtState.ENABLED, + measurement_type=CtType.PRODUCTION, + metering_status=CtMeterStatus.NORMAL, + status_flags=[CtStatusFlags.POWER_ON_UNUSED_PHASE], + ), + PhaseNames.PHASE_3: EnvoyMeterData( + eid="100000013", + timestamp=1708006113, + energy_delivered=112343, + energy_received=123453, + active_power=50, + power_factor=0.14, + voltage=111, + current=0.2, + frequency=50.1, + state=CtState.ENABLED, + measurement_type=CtType.PRODUCTION, + metering_status=CtMeterStatus.NORMAL, + status_flags=[], + ), + }, + ctmeter_consumption_phases={ + PhaseNames.PHASE_1: EnvoyMeterData( + eid="100000021", + timestamp=1708006121, + energy_delivered=212341, + energy_received=223451, + active_power=21, + power_factor=0.22, + voltage=112, + current=0.3, + frequency=50.2, + state=CtState.ENABLED, + measurement_type=CtType.NET_CONSUMPTION, + metering_status=CtMeterStatus.NORMAL, + status_flags=[], + ), + PhaseNames.PHASE_2: EnvoyMeterData( + eid="100000022", + timestamp=1708006122, + energy_delivered=212342, + energy_received=223452, + active_power=31, + power_factor=0.23, + voltage=112, + current=0.3, + frequency=50.2, + state=CtState.ENABLED, + measurement_type=CtType.NET_CONSUMPTION, + metering_status=CtMeterStatus.NORMAL, + status_flags=[], + ), + PhaseNames.PHASE_3: EnvoyMeterData( + eid="100000023", + timestamp=1708006123, + energy_delivered=212343, + energy_received=223453, + active_power=51, + power_factor=0.24, + voltage=112, + current=0.3, + frequency=50.2, + state=CtState.ENABLED, + measurement_type=CtType.NET_CONSUMPTION, + metering_status=CtMeterStatus.NORMAL, + status_flags=[], + ), + }, + ctmeter_storage_phases={ + PhaseNames.PHASE_1: EnvoyMeterData( + eid="100000031", + timestamp=1708006121, + energy_delivered=312341, + energy_received=323451, + active_power=22, + power_factor=0.32, + voltage=113, + current=0.4, + frequency=50.3, + state=CtState.ENABLED, + measurement_type=CtType.STORAGE, + metering_status=CtMeterStatus.NORMAL, + status_flags=[], + ), + PhaseNames.PHASE_2: EnvoyMeterData( + eid="100000032", + timestamp=1708006122, + energy_delivered=312342, + energy_received=323452, + active_power=33, + power_factor=0.23, + voltage=112, + current=0.3, + frequency=50.2, + state=CtState.ENABLED, + measurement_type=CtType.STORAGE, + metering_status=CtMeterStatus.NORMAL, + status_flags=[], + ), + PhaseNames.PHASE_3: EnvoyMeterData( + eid="100000033", + timestamp=1708006123, + energy_delivered=312343, + energy_received=323453, + active_power=53, + power_factor=0.24, + voltage=112, + current=0.3, + frequency=50.2, + state=CtState.ENABLED, + measurement_type=CtType.STORAGE, + metering_status=CtMeterStatus.NORMAL, + status_flags=[], + ), + }, + inverters={ + "1": EnvoyInverter( + serial_number="1", + last_report_date=1, + last_report_watts=1, + max_report_watts=1, + ) + }, + raw={"varies_by": "firmware_version"}, + ) + mock_envoy.update = AsyncMock(return_value=mock_envoy.data) + + response = Mock() + response.status_code = 200 + response.text = "Testing request \nreplies." + response.headers = {"Hello": "World"} + mock_envoy.request = AsyncMock(return_value=response) + + return mock_envoy + + +@pytest.fixture(name="setup_enphase_envoy") +async def setup_enphase_envoy_fixture( + hass: HomeAssistant, + config: dict[str, str], + mock_envoy: Mock, +) -> AsyncGenerator[None]: + """Define a fixture to set up Enphase Envoy.""" with ( patch( "homeassistant.components.enphase_envoy.config_flow.Envoy", - autospec=True, - ) as mock_client, + return_value=mock_envoy, + ), patch( "homeassistant.components.enphase_envoy.Envoy", - new=mock_client, - ), - patch( - "pyenphase.auth.EnvoyTokenAuth._obtain_token", - return_value=new_token, + return_value=mock_envoy, ), ): - mock_envoy = mock_client.return_value - # Add the fixtures specified - token = jwt.encode( - payload={"name": "envoy", "exp": 1907837780}, - key="secret", - algorithm="HS256", - ) - mock_envoy.auth = EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial="1234") - mock_envoy.serial_number = "1234" - mock = Mock() - mock.status_code = 200 - mock.text = "Testing request \nreplies." - mock.headers = {"Hello": "World"} - mock_envoy.request.return_value = mock - - # determine fixture file name, default envoy if no request passed - fixture_name = "envoy" - if hasattr(request, "param"): - fixture_name = request.param - - # Load envoy model from fixture - load_envoy_fixture(mock_envoy, fixture_name) - mock_envoy.update.return_value = mock_envoy.data - - yield mock_envoy + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done() + yield -def load_envoy_fixture(mock_envoy: AsyncMock, fixture_name: str) -> None: - """Load envoy model from fixture.""" +@pytest.fixture(name="mock_authenticate") +def mock_authenticate() -> AsyncMock: + """Define a mocked Envoy.authenticate fixture.""" + return AsyncMock() - json_fixture: dict[str, Any] = load_json_object_fixture( - f"{fixture_name}.json", DOMAIN + +@pytest.fixture(name="mock_auth") +def mock_auth(serial_number: str) -> EnvoyTokenAuth: + """Define a mocked EnvoyAuth fixture.""" + token = jwt.encode( + payload={"name": "envoy", "exp": 1907837780}, key="secret", algorithm="HS256" ) - - mock_envoy.firmware = json_fixture["firmware"] - mock_envoy.part_number = json_fixture["part_number"] - mock_envoy.envoy_model = json_fixture["envoy_model"] - mock_envoy.supported_features = SupportedFeatures( - json_fixture["supported_features"] - ) - mock_envoy.phase_mode = json_fixture["phase_mode"] - mock_envoy.phase_count = json_fixture["phase_count"] - mock_envoy.active_phase_count = json_fixture["active_phase_count"] - mock_envoy.ct_meter_count = json_fixture["ct_meter_count"] - mock_envoy.consumption_meter_type = json_fixture["consumption_meter_type"] - mock_envoy.production_meter_type = json_fixture["production_meter_type"] - mock_envoy.storage_meter_type = json_fixture["storage_meter_type"] - - mock_envoy.data = EnvoyData() - _load_json_2_production_data(mock_envoy.data, json_fixture) - _load_json_2_meter_data(mock_envoy.data, json_fixture) - _load_json_2_inverter_data(mock_envoy.data, json_fixture) - _load_json_2_encharge_enpower_data(mock_envoy.data, json_fixture) - _load_json_2_raw_data(mock_envoy.data, json_fixture) + return EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial=serial_number) -def _load_json_2_production_data( - mocked_data: EnvoyData, json_fixture: dict[str, Any] -) -> None: - """Fill envoy production data from fixture.""" - if item := json_fixture["data"].get("system_consumption"): - mocked_data.system_consumption = EnvoySystemConsumption(**item) - if item := json_fixture["data"].get("system_net_consumption"): - mocked_data.system_net_consumption = EnvoySystemConsumption(**item) - if item := json_fixture["data"].get("system_production"): - mocked_data.system_production = EnvoySystemProduction(**item) - if item := json_fixture["data"].get("system_consumption_phases"): - mocked_data.system_consumption_phases = {} - for sub_item, item_data in item.items(): - mocked_data.system_consumption_phases[sub_item] = EnvoySystemConsumption( - **item_data - ) - if item := json_fixture["data"].get("system_net_consumption_phases"): - mocked_data.system_net_consumption_phases = {} - for sub_item, item_data in item.items(): - mocked_data.system_net_consumption_phases[sub_item] = ( - EnvoySystemConsumption(**item_data) - ) - if item := json_fixture["data"].get("system_production_phases"): - mocked_data.system_production_phases = {} - for sub_item, item_data in item.items(): - mocked_data.system_production_phases[sub_item] = EnvoySystemProduction( - **item_data - ) +@pytest.fixture(name="mock_setup") +def mock_setup() -> AsyncMock: + """Define a mocked Envoy.setup fixture.""" + return AsyncMock() -def _load_json_2_meter_data( - mocked_data: EnvoyData, json_fixture: dict[str, Any] -) -> None: - """Fill envoy meter data from fixture.""" - if item := json_fixture["data"].get("ctmeter_production"): - mocked_data.ctmeter_production = EnvoyMeterData(**item) - if item := json_fixture["data"].get("ctmeter_consumption"): - mocked_data.ctmeter_consumption = EnvoyMeterData(**item) - if item := json_fixture["data"].get("ctmeter_storage"): - mocked_data.ctmeter_storage = EnvoyMeterData(**item) - if item := json_fixture["data"].get("ctmeter_production_phases"): - mocked_data.ctmeter_production_phases = {} - for sub_item, item_data in item.items(): - mocked_data.ctmeter_production_phases[sub_item] = EnvoyMeterData( - **item_data - ) - if item := json_fixture["data"].get("ctmeter_consumption_phases"): - mocked_data.ctmeter_consumption_phases = {} - for sub_item, item_data in item.items(): - mocked_data.ctmeter_consumption_phases[sub_item] = EnvoyMeterData( - **item_data - ) - if item := json_fixture["data"].get("ctmeter_storage_phases"): - mocked_data.ctmeter_storage_phases = {} - for sub_item, item_data in item.items(): - mocked_data.ctmeter_storage_phases[sub_item] = EnvoyMeterData(**item_data) - - -def _load_json_2_inverter_data( - mocked_data: EnvoyData, json_fixture: dict[str, Any] -) -> None: - """Fill envoy inverter data from fixture.""" - if item := json_fixture["data"].get("inverters"): - mocked_data.inverters = {} - for sub_item, item_data in item.items(): - mocked_data.inverters[sub_item] = EnvoyInverter(**item_data) - - -def _load_json_2_encharge_enpower_data( - mocked_data: EnvoyData, json_fixture: dict[str, Any] -) -> None: - """Fill envoy encharge/enpower data from fixture.""" - if item := json_fixture["data"].get("encharge_inventory"): - mocked_data.encharge_inventory = {} - for sub_item, item_data in item.items(): - mocked_data.encharge_inventory[sub_item] = EnvoyEncharge(**item_data) - if item := json_fixture["data"].get("enpower"): - mocked_data.enpower = EnvoyEnpower(**item) - if item := json_fixture["data"].get("encharge_aggregate"): - mocked_data.encharge_aggregate = EnvoyEnchargeAggregate(**item) - if item := json_fixture["data"].get("encharge_power"): - mocked_data.encharge_power = {} - for sub_item, item_data in item.items(): - mocked_data.encharge_power[sub_item] = EnvoyEnchargePower(**item_data) - if item := json_fixture["data"].get("tariff"): - mocked_data.tariff = EnvoyTariff(**item) - mocked_data.tariff.storage_settings = EnvoyStorageSettings( - **item["storage_settings"] - ) - if item := json_fixture["data"].get("dry_contact_status"): - mocked_data.dry_contact_status = {} - for sub_item, item_data in item.items(): - mocked_data.dry_contact_status[sub_item] = EnvoyDryContactStatus( - **item_data - ) - if item := json_fixture["data"].get("dry_contact_settings"): - mocked_data.dry_contact_settings = {} - for sub_item, item_data in item.items(): - mocked_data.dry_contact_settings[sub_item] = EnvoyDryContactSettings( - **item_data - ) - - -def _load_json_2_raw_data(mocked_data: EnvoyData, json_fixture: dict[str, Any]) -> None: - """Fill envoy raw data from fixture.""" - if item := json_fixture["data"].get("raw"): - mocked_data.raw = item +@pytest.fixture(name="serial_number") +def serial_number_fixture() -> str: + """Define a serial number fixture.""" + return "1234" diff --git a/tests/components/enphase_envoy/fixtures/envoy.json b/tests/components/enphase_envoy/fixtures/envoy.json deleted file mode 100644 index 3431dba6766..00000000000 --- a/tests/components/enphase_envoy/fixtures/envoy.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "serial_number": "1234", - "firmware": "7.6.175", - "part_number": "123456789", - "envoy_model": "Envoy", - "supported_features": 65, - "phase_mode": null, - "phase_count": 1, - "active_phase_count": 0, - "ct_meter_count": 0, - "consumption_meter_type": null, - "production_meter_type": null, - "storage_meter_type": null, - "data": { - "encharge_inventory": null, - "encharge_power": null, - "encharge_aggregate": null, - "enpower": null, - "system_consumption": null, - "system_net_consumption": null, - "system_production": { - "watt_hours_lifetime": 1234, - "watt_hours_last_7_days": 1234, - "watt_hours_today": 1234, - "watts_now": 1234 - }, - "system_consumption_phases": null, - "system_net_consumption_phases": null, - "system_production_phases": null, - "ctmeter_production": null, - "ctmeter_consumption": null, - "ctmeter_storage": null, - "ctmeter_production_phases": null, - "ctmeter_consumption_phases": null, - "ctmeter_storage_phases": null, - "dry_contact_status": {}, - "dry_contact_settings": {}, - "inverters": { - "1": { - "serial_number": "1", - "last_report_date": 1, - "last_report_watts": 1, - "max_report_watts": 1 - } - }, - "tariff": null, - "raw": { - "varies_by": "firmware_version" - } - } -} diff --git a/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json b/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json deleted file mode 100644 index 05a6f265dfb..00000000000 --- a/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json +++ /dev/null @@ -1,137 +0,0 @@ -{ - "serial_number": "1234", - "firmware": "7.6.175", - "part_number": "123456789", - "envoy_model": "Envoy, phases: 1, phase mode: three, net-consumption CT, production CT", - "supported_features": 1231, - "phase_mode": "three", - "phase_count": 1, - "active_phase_count": 0, - "ct_meter_count": 2, - "consumption_meter_type": "net-consumption", - "production_meter_type": "production", - "storage_meter_type": null, - "data": { - "encharge_inventory": null, - "encharge_power": null, - "encharge_aggregate": null, - "enpower": null, - "system_consumption": { - "watt_hours_lifetime": 1234, - "watt_hours_last_7_days": 1234, - "watt_hours_today": 1234, - "watts_now": 1234 - }, - "system_net_consumption": { - "watt_hours_lifetime": 4321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 2341 - }, - "system_production": { - "watt_hours_lifetime": 1234, - "watt_hours_last_7_days": 1234, - "watt_hours_today": 1234, - "watts_now": 1234 - }, - "system_consumption_phases": null, - "system_net_consumption_phases": null, - "system_production_phases": null, - "ctmeter_production": { - "eid": "100000010", - "timestamp": 1708006110, - "energy_delivered": 11234, - "energy_received": 12345, - "active_power": 100, - "power_factor": 0.11, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["production-imbalance", "power-on-unused-phase"] - }, - "ctmeter_consumption": { - "eid": "100000020", - "timestamp": 1708006120, - "energy_delivered": 21234, - "energy_received": 22345, - "active_power": 101, - "power_factor": 0.21, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "ctmeter_storage": null, - "ctmeter_production_phases": null, - "ctmeter_consumption_phases": null, - "ctmeter_storage_phases": null, - "dry_contact_status": {}, - "dry_contact_settings": {}, - "inverters": { - "1": { - "serial_number": "1", - "last_report_date": 1, - "last_report_watts": 1, - "max_report_watts": 1 - } - }, - "tariff": { - "currency": { - "code": "EUR" - }, - "logger": "mylogger", - "date": "1695744220", - "storage_settings": { - "mode": "self-consumption", - "operation_mode_sub_type": "", - "reserved_soc": 15.0, - "very_low_soc": 5, - "charge_from_grid": true, - "date": "1695598084" - }, - "single_rate": { - "rate": 0.0, - "sell": 0.0 - }, - "seasons": [ - { - "id": "season_1", - "start": "1/1", - "days": [ - { - "id": "all_days", - "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", - "must_charge_start": 444, - "must_charge_duration": 35, - "must_charge_mode": "CG", - "enable_discharge_to_grid": true, - "periods": [ - { - "id": "period_1", - "start": 480, - "rate": 0.1898 - }, - { - "id": "filler", - "start": 1320, - "rate": 0.1034 - } - ] - } - ], - "tiers": [] - } - ], - "seasons_sell": [] - }, - "raw": { - "varies_by": "firmware_version" - } - } -} diff --git a/tests/components/enphase_envoy/fixtures/envoy_eu_batt.json b/tests/components/enphase_envoy/fixtures/envoy_eu_batt.json deleted file mode 100644 index 8118630200f..00000000000 --- a/tests/components/enphase_envoy/fixtures/envoy_eu_batt.json +++ /dev/null @@ -1,262 +0,0 @@ -{ - "serial_number": "1234", - "firmware": "7.6.358", - "part_number": "800-00654-r08", - "envoy_model": "Envoy, phases: 3, phase mode: three, net-consumption CT, production CT", - "supported_features": 1759, - "phase_mode": "three", - "phase_count": 3, - "active_phase_count": 0, - "ct_meter_count": 2, - "consumption_meter_type": "net-consumption", - "production_meter_type": "production", - "storage_meter_type": null, - "data": { - "encharge_inventory": { - "123456": { - "admin_state": 6, - "admin_state_str": "ENCHG_STATE_READY", - "bmu_firmware_version": "2.1.16", - "comm_level_2_4_ghz": 4, - "comm_level_sub_ghz": 4, - "communicating": true, - "dc_switch_off": false, - "encharge_capacity": 3500, - "encharge_revision": 2, - "firmware_loaded_date": 1714736645, - "firmware_version": "2.6.6618_rel/22.11", - "installed_date": 1714736645, - "last_report_date": 1714804173, - "led_status": 17, - "max_cell_temp": 16, - "operating": true, - "part_number": "830-01760-r46", - "percent_full": 4, - "serial_number": "122327081322", - "temperature": 16, - "temperature_unit": "C", - "zigbee_dongle_fw_version": "100F" - } - }, - "encharge_power": { - "123456": { - "apparent_power_mva": 0, - "real_power_mw": 0, - "soc": 4 - } - }, - "encharge_aggregate": { - "available_energy": 140, - "backup_reserve": 0, - "state_of_charge": 4, - "reserve_state_of_charge": 0, - "configured_reserve_state_of_charge": 0, - "max_available_capacity": 3500 - }, - "enpower": null, - "system_consumption": { - "watt_hours_lifetime": 1234, - "watt_hours_last_7_days": 1234, - "watt_hours_today": 1234, - "watts_now": 1234 - }, - "system_production": { - "watt_hours_lifetime": 1234, - "watt_hours_last_7_days": 1234, - "watt_hours_today": 1234, - "watts_now": 1234 - }, - "system_consumption_phases": null, - "system_production_phases": null, - "system_net_consumption": { - "watt_hours_lifetime": 4321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 2341 - }, - "system_net_consumption_phases": null, - "ctmeter_production": { - "eid": "100000010", - "timestamp": 1708006110, - "energy_delivered": 11234, - "energy_received": 12345, - "active_power": 100, - "power_factor": 0.11, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["production-imbalance", "power-on-unused-phase"] - }, - "ctmeter_consumption": { - "eid": "100000020", - "timestamp": 1708006120, - "energy_delivered": 21234, - "energy_received": 22345, - "active_power": 101, - "power_factor": 0.21, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "ctmeter_storage": null, - "ctmeter_production_phases": { - "L1": { - "eid": "100000011", - "timestamp": 1708006111, - "energy_delivered": 112341, - "energy_received": 123451, - "active_power": 20, - "power_factor": 0.12, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["production-imbalance"] - }, - "L2": { - "eid": "100000012", - "timestamp": 1708006112, - "energy_delivered": 112342, - "energy_received": 123452, - "active_power": 30, - "power_factor": 0.13, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["power-on-unused-phase"] - }, - "L3": { - "eid": "100000013", - "timestamp": 1708006113, - "energy_delivered": 112343, - "energy_received": 123453, - "active_power": 50, - "power_factor": 0.14, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": [] - } - }, - "ctmeter_consumption_phases": { - "L1": { - "eid": "100000021", - "timestamp": 1708006121, - "energy_delivered": 212341, - "energy_received": 223451, - "active_power": 21, - "power_factor": 0.22, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "L2": { - "eid": "100000022", - "timestamp": 1708006122, - "energy_delivered": 212342, - "energy_received": 223452, - "active_power": 31, - "power_factor": 0.23, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "L3": { - "eid": "100000023", - "timestamp": 1708006123, - "energy_delivered": 212343, - "energy_received": 223453, - "active_power": 51, - "power_factor": 0.24, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - } - }, - "ctmeter_storage_phases": null, - "dry_contact_status": {}, - "dry_contact_settings": {}, - "inverters": { - "1": { - "serial_number": "1", - "last_report_date": 1, - "last_report_watts": 1, - "max_report_watts": 1 - } - }, - "tariff": { - "currency": { - "code": "EUR" - }, - "logger": "mylogger", - "date": "1714749724", - "storage_settings": { - "mode": "self-consumption", - "operation_mode_sub_type": "", - "reserved_soc": 0.0, - "very_low_soc": 5, - "charge_from_grid": true, - "date": "1714749724" - }, - "single_rate": { - "rate": 0.0, - "sell": 0.0 - }, - "seasons": [ - { - "id": "all_year_long", - "start": "1/1", - "days": [ - { - "id": "all_days", - "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", - "must_charge_start": 0, - "must_charge_duration": 0, - "must_charge_mode": "CP", - "enable_discharge_to_grid": false, - "periods": [ - { - "id": "period_1", - "start": 0, - "rate": 0.0 - } - ] - } - ], - "tiers": [] - } - ], - "seasons_sell": [] - }, - "raw": { - "varies_by": "firmware_version" - } - } -} diff --git a/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json b/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json deleted file mode 100644 index 7affc1bea0d..00000000000 --- a/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json +++ /dev/null @@ -1,471 +0,0 @@ -{ - "serial_number": "1234", - "firmware": "7.1.2", - "part_number": "123456789", - "envoy_model": "Envoy, phases: 3, phase mode: split, net-consumption CT, production CT, storage CT", - "supported_features": 1659, - "phase_mode": "three", - "phase_count": 3, - "active_phase_count": 3, - "ct_meter_count": 2, - "consumption_meter_type": "net-consumption", - "production_meter_type": "production", - "storage_meter_type": "storage", - "data": { - "encharge_inventory": { - "123456": { - "admin_state": 6, - "admin_state_str": "ENCHG_STATE_READY", - "bmu_firmware_version": "2.1.34", - "comm_level_2_4_ghz": 4, - "comm_level_sub_ghz": 4, - "communicating": true, - "dc_switch_off": false, - "encharge_capacity": 3500, - "encharge_revision": 2, - "firmware_loaded_date": 1695330323, - "firmware_version": "2.6.5973_rel/22.11", - "installed_date": 1695330323, - "last_report_date": 1695769447, - "led_status": 17, - "max_cell_temp": 30, - "operating": true, - "part_number": "830-01760-r37", - "percent_full": 15, - "serial_number": "123456", - "temperature": 29, - "temperature_unit": "C", - "zigbee_dongle_fw_version": "100F" - } - }, - "encharge_power": { - "123456": { - "apparent_power_mva": 0, - "real_power_mw": 0, - "soc": 15 - } - }, - "encharge_aggregate": { - "available_energy": 525, - "backup_reserve": 526, - "state_of_charge": 15, - "reserve_state_of_charge": 15, - "configured_reserve_state_of_charge": 15, - "max_available_capacity": 3500 - }, - "enpower": { - "grid_mode": "multimode-ongrid", - "admin_state": 24, - "admin_state_str": "ENPWR_STATE_OPER_CLOSED", - "comm_level_2_4_ghz": 5, - "comm_level_sub_ghz": 5, - "communicating": true, - "firmware_loaded_date": 1695330323, - "firmware_version": "1.2.2064_release/20.34", - "installed_date": 1695330323, - "last_report_date": 1695769447, - "mains_admin_state": "closed", - "mains_oper_state": "closed", - "operating": true, - "part_number": "830-01760-r37", - "serial_number": "654321", - "temperature": 79, - "temperature_unit": "F", - "zigbee_dongle_fw_version": "1009" - }, - "system_consumption": { - "watt_hours_lifetime": 1234, - "watt_hours_last_7_days": 1234, - "watt_hours_today": 1234, - "watts_now": 1234 - }, - "system_net_consumption": { - "watt_hours_lifetime": 4321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 2341 - }, - "system_production": { - "watt_hours_lifetime": 1234, - "watt_hours_last_7_days": 1234, - "watt_hours_today": 1234, - "watts_now": 1234 - }, - "system_consumption_phases": { - "L1": { - "watt_hours_lifetime": 1322, - "watt_hours_last_7_days": 1321, - "watt_hours_today": 1323, - "watts_now": 1324 - }, - "L2": { - "watt_hours_lifetime": 2322, - "watt_hours_last_7_days": 2321, - "watt_hours_today": 2323, - "watts_now": 2324 - }, - "L3": { - "watt_hours_lifetime": 3322, - "watt_hours_last_7_days": 3321, - "watt_hours_today": 3323, - "watts_now": 3324 - } - }, - "system_net_consumption_phases": { - "L1": { - "watt_hours_lifetime": 1321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 12341 - }, - "L2": { - "watt_hours_lifetime": 2321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 22341 - }, - "L3": { - "watt_hours_lifetime": 3321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 32341 - } - }, - "system_production_phases": { - "L1": { - "watt_hours_lifetime": 1232, - "watt_hours_last_7_days": 1231, - "watt_hours_today": 1233, - "watts_now": 1234 - }, - "L2": { - "watt_hours_lifetime": 2232, - "watt_hours_last_7_days": 2231, - "watt_hours_today": 2233, - "watts_now": 2234 - }, - "L3": { - "watt_hours_lifetime": 3232, - "watt_hours_last_7_days": 3231, - "watt_hours_today": 3233, - "watts_now": 3234 - } - }, - "ctmeter_production": { - "eid": "100000010", - "timestamp": 1708006110, - "energy_delivered": 11234, - "energy_received": 12345, - "active_power": 100, - "power_factor": 0.11, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["production-imbalance", "power-on-unused-phase"] - }, - "ctmeter_consumption": { - "eid": "100000020", - "timestamp": 1708006120, - "energy_delivered": 21234, - "energy_received": 22345, - "active_power": 101, - "power_factor": 0.21, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "ctmeter_storage": { - "eid": "100000030", - "timestamp": 1708006120, - "energy_delivered": 31234, - "energy_received": 32345, - "active_power": 103, - "power_factor": 0.23, - "voltage": 113, - "current": 0.4, - "frequency": 50.3, - "state": "enabled", - "measurement_type": "storage", - "metering_status": "normal", - "status_flags": [] - }, - "ctmeter_production_phases": { - "L1": { - "eid": "100000011", - "timestamp": 1708006111, - "energy_delivered": 112341, - "energy_received": 123451, - "active_power": 20, - "power_factor": 0.12, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["production-imbalance"] - }, - "L2": { - "eid": "100000012", - "timestamp": 1708006112, - "energy_delivered": 112342, - "energy_received": 123452, - "active_power": 30, - "power_factor": 0.13, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["power-on-unused-phase"] - }, - "L3": { - "eid": "100000013", - "timestamp": 1708006113, - "energy_delivered": 112343, - "energy_received": 123453, - "active_power": 50, - "power_factor": 0.14, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": [] - } - }, - "ctmeter_consumption_phases": { - "L1": { - "eid": "100000021", - "timestamp": 1708006121, - "energy_delivered": 212341, - "energy_received": 223451, - "active_power": 21, - "power_factor": 0.22, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "L2": { - "eid": "100000022", - "timestamp": 1708006122, - "energy_delivered": 212342, - "energy_received": 223452, - "active_power": 31, - "power_factor": 0.23, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "L3": { - "eid": "100000023", - "timestamp": 1708006123, - "energy_delivered": 212343, - "energy_received": 223453, - "active_power": 51, - "power_factor": 0.24, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - } - }, - "ctmeter_storage_phases": { - "L1": { - "eid": "100000031", - "timestamp": 1708006121, - "energy_delivered": 312341, - "energy_received": 323451, - "active_power": 22, - "power_factor": 0.32, - "voltage": 113, - "current": 0.4, - "frequency": 50.3, - "state": "enabled", - "measurement_type": "storage", - "metering_status": "normal", - "status_flags": [] - }, - "L2": { - "eid": "100000032", - "timestamp": 1708006122, - "energy_delivered": 312342, - "energy_received": 323452, - "active_power": 33, - "power_factor": 0.23, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "storage", - "metering_status": "normal", - "status_flags": [] - }, - "L3": { - "eid": "100000033", - "timestamp": 1708006123, - "energy_delivered": 312343, - "energy_received": 323453, - "active_power": 53, - "power_factor": 0.24, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "storage", - "metering_status": "normal", - "status_flags": [] - } - }, - "dry_contact_status": { - "NC1": { - "id": "NC1", - "status": "open" - }, - "NC2": { - "id": "NC2", - "status": "closed" - }, - "NC3": { - "id": "NC3", - "status": "open" - } - }, - "dry_contact_settings": { - "NC1": { - "id": "NC1", - "black_start": 5.0, - "essential_end_time": 32400.0, - "essential_start_time": 57600.0, - "generator_action": "shed", - "grid_action": "shed", - "load_name": "NC1 Fixture", - "manual_override": true, - "micro_grid_action": "shed", - "mode": "manual", - "override": true, - "priority": 1.0, - "pv_serial_nb": [], - "soc_high": 70.0, - "soc_low": 25.0, - "type": "LOAD" - }, - "NC2": { - "id": "NC2", - "black_start": 5.0, - "essential_end_time": 57600.0, - "essential_start_time": 32400.0, - "generator_action": "shed", - "grid_action": "apply", - "load_name": "NC2 Fixture", - "manual_override": true, - "micro_grid_action": "shed", - "mode": "manual", - "override": true, - "priority": 2.0, - "pv_serial_nb": [], - "soc_high": 70.0, - "soc_low": 30.0, - "type": "LOAD" - }, - "NC3": { - "id": "NC3", - "black_start": 5.0, - "essential_end_time": 57600.0, - "essential_start_time": 32400.0, - "generator_action": "apply", - "grid_action": "shed", - "load_name": "NC3 Fixture", - "manual_override": true, - "micro_grid_action": "apply", - "mode": "manual", - "override": true, - "priority": 3.0, - "pv_serial_nb": [], - "soc_high": 70.0, - "soc_low": 30.0, - "type": "NONE" - } - }, - "inverters": { - "1": { - "serial_number": "1", - "last_report_date": 1, - "last_report_watts": 1, - "max_report_watts": 1 - } - }, - "tariff": { - "currency": { - "code": "EUR" - }, - "logger": "mylogger", - "date": "1695744220", - "storage_settings": { - "mode": "self-consumption", - "operation_mode_sub_type": "", - "reserved_soc": 15.0, - "very_low_soc": 5, - "charge_from_grid": true, - "date": "1695598084" - }, - "single_rate": { - "rate": 0.0, - "sell": 0.0 - }, - "seasons": [ - { - "id": "season_1", - "start": "1/1", - "days": [ - { - "id": "all_days", - "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", - "must_charge_start": 444, - "must_charge_duration": 35, - "must_charge_mode": "CG", - "enable_discharge_to_grid": true, - "periods": [ - { - "id": "period_1", - "start": 480, - "rate": 0.1898 - }, - { - "id": "filler", - "start": 1320, - "rate": 0.1034 - } - ] - } - ], - "tiers": [] - } - ], - "seasons_sell": [] - }, - "raw": { - "varies_by": "firmware_version" - } - } -} diff --git a/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json b/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json deleted file mode 100644 index ff975b690ed..00000000000 --- a/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json +++ /dev/null @@ -1,286 +0,0 @@ -{ - "serial_number": "1234", - "firmware": "7.6.175", - "part_number": "123456789", - "envoy_model": "Envoy, phases: 3, phase mode: three, net-consumption CT, production CT", - "supported_features": 1743, - "phase_mode": "three", - "phase_count": 3, - "active_phase_count": 3, - "ct_meter_count": 2, - "consumption_meter_type": "net-consumption", - "production_meter_type": "production", - "storage_meter_type": null, - "data": { - "encharge_inventory": null, - "encharge_power": null, - "encharge_aggregate": null, - "enpower": null, - "system_consumption": { - "watt_hours_lifetime": 1234, - "watt_hours_last_7_days": 1234, - "watt_hours_today": 1234, - "watts_now": 1234 - }, - "system_net_consumption": { - "watt_hours_lifetime": 4321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 2341 - }, - "system_production": { - "watt_hours_lifetime": 1234, - "watt_hours_last_7_days": 1234, - "watt_hours_today": 1234, - "watts_now": 1234 - }, - "system_consumption_phases": { - "L1": { - "watt_hours_lifetime": 1322, - "watt_hours_last_7_days": 1321, - "watt_hours_today": 1323, - "watts_now": 1324 - }, - "L2": { - "watt_hours_lifetime": 2322, - "watt_hours_last_7_days": 2321, - "watt_hours_today": 2323, - "watts_now": 2324 - }, - "L3": { - "watt_hours_lifetime": 3322, - "watt_hours_last_7_days": 3321, - "watt_hours_today": 3323, - "watts_now": 3324 - } - }, - "system_net_consumption_phases": { - "L1": { - "watt_hours_lifetime": 1321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 12341 - }, - "L2": { - "watt_hours_lifetime": 2321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 22341 - }, - "L3": { - "watt_hours_lifetime": 3321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 32341 - } - }, - "system_production_phases": { - "L1": { - "watt_hours_lifetime": 1232, - "watt_hours_last_7_days": 1231, - "watt_hours_today": 1233, - "watts_now": 1234 - }, - "L2": { - "watt_hours_lifetime": 2232, - "watt_hours_last_7_days": 2231, - "watt_hours_today": 2233, - "watts_now": 2234 - }, - "L3": { - "watt_hours_lifetime": 3232, - "watt_hours_last_7_days": 3231, - "watt_hours_today": 3233, - "watts_now": 3234 - } - }, - "ctmeter_production": { - "eid": "100000010", - "timestamp": 1708006110, - "energy_delivered": 11234, - "energy_received": 12345, - "active_power": 100, - "power_factor": 0.11, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["production-imbalance", "power-on-unused-phase"] - }, - "ctmeter_consumption": { - "eid": "100000020", - "timestamp": 1708006120, - "energy_delivered": 21234, - "energy_received": 22345, - "active_power": 101, - "power_factor": 0.21, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "ctmeter_storage": null, - "ctmeter_production_phases": { - "L1": { - "eid": "100000011", - "timestamp": 1708006111, - "energy_delivered": 112341, - "energy_received": 123451, - "active_power": 20, - "power_factor": 0.12, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["production-imbalance"] - }, - "L2": { - "eid": "100000012", - "timestamp": 1708006112, - "energy_delivered": 112342, - "energy_received": 123452, - "active_power": 30, - "power_factor": 0.13, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["power-on-unused-phase"] - }, - "L3": { - "eid": "100000013", - "timestamp": 1708006113, - "energy_delivered": 112343, - "energy_received": 123453, - "active_power": 50, - "power_factor": 0.14, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": [] - } - }, - "ctmeter_consumption_phases": { - "L1": { - "eid": "100000021", - "timestamp": 1708006121, - "energy_delivered": 212341, - "energy_received": 223451, - "active_power": 21, - "power_factor": 0.22, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "L2": { - "eid": "100000022", - "timestamp": 1708006122, - "energy_delivered": 212342, - "energy_received": 223452, - "active_power": 31, - "power_factor": 0.23, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "L3": { - "eid": "100000023", - "timestamp": 1708006123, - "energy_delivered": 212343, - "energy_received": 223453, - "active_power": 51, - "power_factor": 0.24, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "net-consumption", - "metering_status": "normal", - "status_flags": [] - } - }, - "ctmeter_storage_phases": null, - "dry_contact_status": {}, - "dry_contact_settings": {}, - "inverters": { - "1": { - "serial_number": "1", - "last_report_date": 1, - "last_report_watts": 1, - "max_report_watts": 1 - } - }, - "tariff": { - "currency": { - "code": "EUR" - }, - "logger": "mylogger", - "date": "1695744220", - "storage_settings": { - "mode": "self-consumption", - "operation_mode_sub_type": "", - "reserved_soc": 15.0, - "very_low_soc": 5, - "charge_from_grid": true, - "date": "1695598084" - }, - "single_rate": { - "rate": 0.0, - "sell": 0.0 - }, - "seasons": [ - { - "id": "season_1", - "start": "1/1", - "days": [ - { - "id": "all_days", - "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", - "must_charge_start": 444, - "must_charge_duration": 35, - "must_charge_mode": "CG", - "enable_discharge_to_grid": true, - "periods": [ - { - "id": "period_1", - "start": 480, - "rate": 0.1898 - }, - { - "id": "filler", - "start": 1320, - "rate": 0.1034 - } - ] - } - ], - "tiers": [] - } - ], - "seasons_sell": [] - }, - "raw": { - "varies_by": "firmware_version" - } - } -} diff --git a/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json b/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json deleted file mode 100644 index 62df69c6d88..00000000000 --- a/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json +++ /dev/null @@ -1,132 +0,0 @@ -{ - "serial_number": "1234", - "firmware": "7.6.175", - "part_number": "123456789", - "envoy_model": "Envoy, phases: 1, phase mode: three, total-consumption CT, production CT", - "supported_features": 1217, - "phase_mode": "three", - "phase_count": 1, - "active_phase_count": 0, - "ct_meter_count": 2, - "consumption_meter_type": "total-consumption", - "production_meter_type": "production", - "storage_meter_type": null, - "data": { - "encharge_inventory": null, - "encharge_power": null, - "encharge_aggregate": null, - "enpower": null, - "system_consumption": null, - "system_net_consumption": { - "watt_hours_lifetime": 4321, - "watt_hours_last_7_days": -1, - "watt_hours_today": -1, - "watts_now": 2341 - }, - "system_production": { - "watt_hours_lifetime": 1234, - "watt_hours_last_7_days": 1234, - "watt_hours_today": 1234, - "watts_now": 1234 - }, - "system_consumption_phases": null, - "system_net_consumption_phases": null, - "system_production_phases": null, - "ctmeter_production": { - "eid": "100000010", - "timestamp": 1708006110, - "energy_delivered": 11234, - "energy_received": 12345, - "active_power": 100, - "power_factor": 0.11, - "voltage": 111, - "current": 0.2, - "frequency": 50.1, - "state": "enabled", - "measurement_type": "production", - "metering_status": "normal", - "status_flags": ["production-imbalance", "power-on-unused-phase"] - }, - "ctmeter_consumption": { - "eid": "100000020", - "timestamp": 1708006120, - "energy_delivered": 21234, - "energy_received": 22345, - "active_power": 101, - "power_factor": 0.21, - "voltage": 112, - "current": 0.3, - "frequency": 50.2, - "state": "enabled", - "measurement_type": "total-consumption", - "metering_status": "normal", - "status_flags": [] - }, - "ctmeter_storage": null, - "ctmeter_production_phases": null, - "ctmeter_consumption_phases": null, - "ctmeter_storage_phases": null, - "dry_contact_status": {}, - "dry_contact_settings": {}, - "inverters": { - "1": { - "serial_number": "1", - "last_report_date": 1, - "last_report_watts": 1, - "max_report_watts": 1 - } - }, - "tariff": { - "currency": { - "code": "EUR" - }, - "logger": "mylogger", - "date": "1695744220", - "storage_settings": { - "mode": "self-consumption", - "operation_mode_sub_type": "", - "reserved_soc": 15.0, - "very_low_soc": 5, - "charge_from_grid": true, - "date": "1695598084" - }, - "single_rate": { - "rate": 0.0, - "sell": 0.0 - }, - "seasons": [ - { - "id": "season_1", - "start": "1/1", - "days": [ - { - "id": "all_days", - "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", - "must_charge_start": 444, - "must_charge_duration": 35, - "must_charge_mode": "CG", - "enable_discharge_to_grid": true, - "periods": [ - { - "id": "period_1", - "start": 480, - "rate": 0.1898 - }, - { - "id": "filler", - "start": 1320, - "rate": 0.1034 - } - ] - } - ], - "tiers": [] - } - ], - "seasons_sell": [] - }, - "raw": { - "varies_by": "firmware_version" - } - } -} diff --git a/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr deleted file mode 100644 index f936a9db76e..00000000000 --- a/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,281 +0,0 @@ -# serializer version: 1 -# name: test_binary_sensor[envoy_eu_batt][binary_sensor.encharge_123456_communicating-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.encharge_123456_communicating', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Communicating', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'communicating', - 'unique_id': '123456_communicating', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[envoy_eu_batt][binary_sensor.encharge_123456_communicating-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Encharge 123456 Communicating', - }), - 'context': , - 'entity_id': 'binary_sensor.encharge_123456_communicating', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[envoy_eu_batt][binary_sensor.encharge_123456_dc_switch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.encharge_123456_dc_switch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DC switch', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dc_switch', - 'unique_id': '123456_dc_switch', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[envoy_eu_batt][binary_sensor.encharge_123456_dc_switch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Encharge 123456 DC switch', - }), - 'context': , - 'entity_id': 'binary_sensor.encharge_123456_dc_switch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_communicating-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.encharge_123456_communicating', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Communicating', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'communicating', - 'unique_id': '123456_communicating', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_communicating-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Encharge 123456 Communicating', - }), - 'context': , - 'entity_id': 'binary_sensor.encharge_123456_communicating', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_dc_switch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.encharge_123456_dc_switch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DC switch', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dc_switch', - 'unique_id': '123456_dc_switch', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_dc_switch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Encharge 123456 DC switch', - }), - 'context': , - 'entity_id': 'binary_sensor.encharge_123456_dc_switch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_communicating-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.enpower_654321_communicating', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Communicating', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'communicating', - 'unique_id': '654321_communicating', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_communicating-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Enpower 654321 Communicating', - }), - 'context': , - 'entity_id': 'binary_sensor.enpower_654321_communicating', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_grid_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.enpower_654321_grid_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:transmission-tower', - 'original_name': 'Grid status', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_status', - 'unique_id': '654321_mains_oper_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_grid_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Enpower 654321 Grid status', - 'icon': 'mdi:transmission-tower', - }), - 'context': , - 'entity_id': 'binary_sensor.enpower_654321_grid_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/enphase_envoy/snapshots/test_config_flow.ambr b/tests/components/enphase_envoy/snapshots/test_config_flow.ambr new file mode 100644 index 00000000000..b83d4e811f8 --- /dev/null +++ b/tests/components/enphase_envoy/snapshots/test_config_flow.ambr @@ -0,0 +1,10 @@ +# serializer version: 1 +# name: test_platforms + list([ + , + , + , + , + , + ]) +# --- diff --git a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr index 76835098f27..acaee292237 100644 --- a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr +++ b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr @@ -10,8 +10,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'enphase_envoy', 'entry_id': '45a36e55aaddb2007c5f6602e0c38e72', 'minor_version': 1, @@ -47,14 +45,13 @@ 'labels': list([ ]), 'manufacturer': 'Enphase', - 'model': 'Envoy', - 'model_id': None, + 'model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', 'name': 'Envoy <>', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', 'serial_number': '<>', 'suggested_area': None, - 'sw_version': '7.6.175', + 'sw_version': '7.1.2', }), 'entities': list([ dict({ @@ -262,6 +259,3493 @@ 'state': '0.00<>', }), }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '<>_consumption', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current power consumption', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_power_consumption', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '<>_daily_consumption', + 'unit_of_measurement': 'kWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'kWh', + }), + 'entity_id': 'sensor.envoy_<>_energy_consumption_today', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '<>_seven_days_consumption', + 'unit_of_measurement': 'kWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': 'kWh', + }), + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '<>_lifetime_consumption', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', + 'state': '0.00<>', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '<>_production_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '<>_daily_production_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '<>_seven_days_production_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '<>_lifetime_production_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '<>_production_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '<>_daily_production_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '<>_seven_days_production_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '<>_lifetime_production_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '<>_production_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '<>_daily_production_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '<>_seven_days_production_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '<>_lifetime_production_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '<>_consumption_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '<>_daily_consumption_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '<>_seven_days_consumption_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '<>_lifetime_consumption_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '<>_consumption_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '<>_daily_consumption_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '<>_seven_days_consumption_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '<>_lifetime_consumption_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '<>_consumption_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '<>_daily_consumption_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '<>_seven_days_consumption_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '<>_lifetime_consumption_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '<>_lifetime_net_consumption', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', + 'state': '0.02<>', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '<>_lifetime_net_production', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', + 'state': '0.022345', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '<>_net_consumption', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', + 'state': '0.101', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '<>_frequency', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '<>_voltage', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '<>_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '<>_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '<>_lifetime_net_consumption_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '<>_lifetime_net_production_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '<>_net_consumption_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '<>_frequency_l1', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '<>_voltage_l1', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '<>_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '<>_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '<>_lifetime_net_consumption_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '<>_lifetime_net_production_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '<>_net_consumption_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '<>_frequency_l2', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '<>_voltage_l2', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '<>_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '<>_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '<>_lifetime_net_consumption_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '<>_lifetime_net_production_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '<>_net_consumption_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '<>_frequency_l3', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '<>_voltage_l3', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '<>_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '<>_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '<>_production_ct_metering_status', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '<>_production_ct_status_flags', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '<>_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '<>_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '<>_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '<>_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '<>_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '<>_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged', + 'unique_id': '<>_lifetime_battery_discharged', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime battery energy discharged', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', + 'state': '0.03<>', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged', + 'unique_id': '<>_lifetime_battery_charged', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime battery energy charged', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', + 'state': '0.032345', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge', + 'unique_id': '<>_battery_discharge', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current battery discharge', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_battery_discharge', + 'state': '0.103', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage', + 'unique_id': '<>_storage_voltage', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status', + 'unique_id': '<>_storage_ct_metering_status', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags', + 'unique_id': '<>_storage_ct_status_flags', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '<>_lifetime_battery_discharged_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '<>_lifetime_battery_charged_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '<>_battery_discharge_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '<>_storage_voltage_l1', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '<>_storage_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '<>_storage_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '<>_lifetime_battery_discharged_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '<>_lifetime_battery_charged_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '<>_battery_discharge_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '<>_storage_voltage_l2', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '<>_storage_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '<>_storage_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '<>_lifetime_battery_discharged_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '<>_lifetime_battery_charged_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '<>_battery_discharge_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '<>_storage_voltage_l3', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '<>_storage_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '<>_storage_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), ]), }), dict({ @@ -287,7 +3771,6 @@ ]), 'manufacturer': 'Enphase', 'model': 'Inverter', - 'model_id': None, 'name': 'Inverter 1', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', @@ -380,12 +3863,60 @@ }), ]), 'envoy_model_data': dict({ - 'ctmeter_consumption': None, - 'ctmeter_consumption_phases': None, - 'ctmeter_production': None, - 'ctmeter_production_phases': None, - 'ctmeter_storage': None, - 'ctmeter_storage_phases': None, + 'ctmeter_consumption': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000020', timestamp=1708006120, energy_delivered=21234, energy_received=22345, active_power=101, power_factor=0.21, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'ctmeter_consumption_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000021', timestamp=1708006121, energy_delivered=212341, energy_received=223451, active_power=21, power_factor=0.22, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L2': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000022', timestamp=1708006122, energy_delivered=212342, energy_received=223452, active_power=31, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L3': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000023', timestamp=1708006123, energy_delivered=212343, energy_received=223453, active_power=51, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + }), + 'ctmeter_production': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000010', timestamp=1708006110, energy_delivered=11234, energy_received=12345, active_power=100, power_factor=0.11, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[, ])", + }), + 'ctmeter_production_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000011', timestamp=1708006111, energy_delivered=112341, energy_received=123451, active_power=20, power_factor=0.12, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L2': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000012', timestamp=1708006112, energy_delivered=112342, energy_received=123452, active_power=30, power_factor=0.13, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L3': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000013', timestamp=1708006113, energy_delivered=112343, energy_received=123453, active_power=50, power_factor=0.14, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", + }), + }), + 'ctmeter_storage': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000030', timestamp=1708006120, energy_delivered=31234, energy_received=32345, active_power=103, power_factor=0.23, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'ctmeter_storage_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000031', timestamp=1708006121, energy_delivered=312341, energy_received=323451, active_power=22, power_factor=0.32, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L2': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000032', timestamp=1708006122, energy_delivered=312342, energy_received=323452, active_power=33, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L3': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000033', timestamp=1708006123, energy_delivered=312343, energy_received=323453, active_power=53, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + }), 'dry_contact_settings': dict({ }), 'dry_contact_status': dict({ @@ -400,29 +3931,61 @@ 'repr': "EnvoyInverter(serial_number='1', last_report_date=1, last_report_watts=1, max_report_watts=1)", }), }), - 'system_consumption': None, - 'system_consumption_phases': None, + 'system_consumption': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', + }), + 'system_consumption_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1322, watt_hours_last_7_days=1321, watt_hours_today=1323, watts_now=1324)', + }), + 'L2': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=2322, watt_hours_last_7_days=2321, watt_hours_today=2323, watts_now=2324)', + }), + 'L3': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=3322, watt_hours_last_7_days=3321, watt_hours_today=3323, watts_now=3324)', + }), + }), 'system_production': dict({ '__type': "", 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', }), - 'system_production_phases': None, + 'system_production_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1232, watt_hours_last_7_days=1231, watt_hours_today=1233, watts_now=1234)', + }), + 'L2': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=2232, watt_hours_last_7_days=2231, watt_hours_today=2233, watts_now=2234)', + }), + 'L3': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=3232, watt_hours_last_7_days=3231, watt_hours_today=3233, watts_now=3234)', + }), + }), 'tariff': None, }), 'envoy_properties': dict({ - 'active_phasecount': 0, - 'ct_consumption_meter': None, - 'ct_count': 0, - 'ct_production_meter': None, - 'ct_storage_meter': None, - 'envoy_firmware': '7.6.175', - 'envoy_model': 'Envoy', + 'active_phasecount': 3, + 'ct_consumption_meter': 'net-consumption', + 'ct_count': 3, + 'ct_production_meter': 'production', + 'ct_storage_meter': 'storage', + 'envoy_firmware': '7.1.2', + 'envoy_model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', 'part_number': '123456789', - 'phase_count': 1, - 'phase_mode': None, + 'phase_count': 3, + 'phase_mode': 'three', 'supported_features': list([ 'INVERTERS', + 'METERING', 'PRODUCTION', + 'THREEPHASE', + 'CTMETERS', ]), }), 'fixtures': dict({ @@ -443,8 +4006,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'enphase_envoy', 'entry_id': '45a36e55aaddb2007c5f6602e0c38e72', 'minor_version': 1, @@ -481,14 +4042,13 @@ 'labels': list([ ]), 'manufacturer': 'Enphase', - 'model': 'Envoy', - 'model_id': None, + 'model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', 'name': 'Envoy <>', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', 'serial_number': '<>', 'suggested_area': None, - 'sw_version': '7.6.175', + 'sw_version': '7.1.2', }), 'entities': list([ dict({ @@ -696,6 +4256,3493 @@ 'state': '0.00<>', }), }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '<>_consumption', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current power consumption', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_power_consumption', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '<>_daily_consumption', + 'unit_of_measurement': 'kWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'kWh', + }), + 'entity_id': 'sensor.envoy_<>_energy_consumption_today', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '<>_seven_days_consumption', + 'unit_of_measurement': 'kWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': 'kWh', + }), + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '<>_lifetime_consumption', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', + 'state': '0.00<>', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '<>_production_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '<>_daily_production_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '<>_seven_days_production_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '<>_lifetime_production_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '<>_production_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '<>_daily_production_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '<>_seven_days_production_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '<>_lifetime_production_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '<>_production_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '<>_daily_production_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '<>_seven_days_production_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '<>_lifetime_production_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '<>_consumption_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '<>_daily_consumption_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '<>_seven_days_consumption_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '<>_lifetime_consumption_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '<>_consumption_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '<>_daily_consumption_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '<>_seven_days_consumption_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '<>_lifetime_consumption_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '<>_consumption_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '<>_daily_consumption_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '<>_seven_days_consumption_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '<>_lifetime_consumption_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '<>_lifetime_net_consumption', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', + 'state': '0.02<>', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '<>_lifetime_net_production', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', + 'state': '0.022345', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '<>_net_consumption', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', + 'state': '0.101', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '<>_frequency', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '<>_voltage', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '<>_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '<>_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '<>_lifetime_net_consumption_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '<>_lifetime_net_production_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '<>_net_consumption_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '<>_frequency_l1', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '<>_voltage_l1', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '<>_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '<>_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '<>_lifetime_net_consumption_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '<>_lifetime_net_production_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '<>_net_consumption_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '<>_frequency_l2', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '<>_voltage_l2', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '<>_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '<>_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '<>_lifetime_net_consumption_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '<>_lifetime_net_production_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '<>_net_consumption_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '<>_frequency_l3', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '<>_voltage_l3', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '<>_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '<>_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '<>_production_ct_metering_status', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '<>_production_ct_status_flags', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '<>_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '<>_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '<>_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '<>_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '<>_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '<>_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged', + 'unique_id': '<>_lifetime_battery_discharged', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime battery energy discharged', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', + 'state': '0.03<>', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged', + 'unique_id': '<>_lifetime_battery_charged', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime battery energy charged', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', + 'state': '0.032345', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge', + 'unique_id': '<>_battery_discharge', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current battery discharge', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_battery_discharge', + 'state': '0.103', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage', + 'unique_id': '<>_storage_voltage', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status', + 'unique_id': '<>_storage_ct_metering_status', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags', + 'unique_id': '<>_storage_ct_status_flags', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '<>_lifetime_battery_discharged_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '<>_lifetime_battery_charged_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '<>_battery_discharge_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '<>_storage_voltage_l1', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '<>_storage_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '<>_storage_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '<>_lifetime_battery_discharged_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '<>_lifetime_battery_charged_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '<>_battery_discharge_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '<>_storage_voltage_l2', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '<>_storage_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '<>_storage_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '<>_lifetime_battery_discharged_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '<>_lifetime_battery_charged_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '<>_battery_discharge_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '<>_storage_voltage_l3', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '<>_storage_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '<>_storage_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), ]), }), dict({ @@ -721,7 +7768,6 @@ ]), 'manufacturer': 'Enphase', 'model': 'Inverter', - 'model_id': None, 'name': 'Inverter 1', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', @@ -814,12 +7860,60 @@ }), ]), 'envoy_model_data': dict({ - 'ctmeter_consumption': None, - 'ctmeter_consumption_phases': None, - 'ctmeter_production': None, - 'ctmeter_production_phases': None, - 'ctmeter_storage': None, - 'ctmeter_storage_phases': None, + 'ctmeter_consumption': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000020', timestamp=1708006120, energy_delivered=21234, energy_received=22345, active_power=101, power_factor=0.21, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'ctmeter_consumption_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000021', timestamp=1708006121, energy_delivered=212341, energy_received=223451, active_power=21, power_factor=0.22, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L2': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000022', timestamp=1708006122, energy_delivered=212342, energy_received=223452, active_power=31, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L3': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000023', timestamp=1708006123, energy_delivered=212343, energy_received=223453, active_power=51, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + }), + 'ctmeter_production': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000010', timestamp=1708006110, energy_delivered=11234, energy_received=12345, active_power=100, power_factor=0.11, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[, ])", + }), + 'ctmeter_production_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000011', timestamp=1708006111, energy_delivered=112341, energy_received=123451, active_power=20, power_factor=0.12, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L2': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000012', timestamp=1708006112, energy_delivered=112342, energy_received=123452, active_power=30, power_factor=0.13, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L3': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000013', timestamp=1708006113, energy_delivered=112343, energy_received=123453, active_power=50, power_factor=0.14, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", + }), + }), + 'ctmeter_storage': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000030', timestamp=1708006120, energy_delivered=31234, energy_received=32345, active_power=103, power_factor=0.23, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'ctmeter_storage_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000031', timestamp=1708006121, energy_delivered=312341, energy_received=323451, active_power=22, power_factor=0.32, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L2': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000032', timestamp=1708006122, energy_delivered=312342, energy_received=323452, active_power=33, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L3': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000033', timestamp=1708006123, energy_delivered=312343, energy_received=323453, active_power=53, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + }), 'dry_contact_settings': dict({ }), 'dry_contact_status': dict({ @@ -834,29 +7928,61 @@ 'repr': "EnvoyInverter(serial_number='1', last_report_date=1, last_report_watts=1, max_report_watts=1)", }), }), - 'system_consumption': None, - 'system_consumption_phases': None, + 'system_consumption': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', + }), + 'system_consumption_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1322, watt_hours_last_7_days=1321, watt_hours_today=1323, watts_now=1324)', + }), + 'L2': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=2322, watt_hours_last_7_days=2321, watt_hours_today=2323, watts_now=2324)', + }), + 'L3': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=3322, watt_hours_last_7_days=3321, watt_hours_today=3323, watts_now=3324)', + }), + }), 'system_production': dict({ '__type': "", 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', }), - 'system_production_phases': None, + 'system_production_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1232, watt_hours_last_7_days=1231, watt_hours_today=1233, watts_now=1234)', + }), + 'L2': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=2232, watt_hours_last_7_days=2231, watt_hours_today=2233, watts_now=2234)', + }), + 'L3': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=3232, watt_hours_last_7_days=3231, watt_hours_today=3233, watts_now=3234)', + }), + }), 'tariff': None, }), 'envoy_properties': dict({ - 'active_phasecount': 0, - 'ct_consumption_meter': None, - 'ct_count': 0, - 'ct_production_meter': None, - 'ct_storage_meter': None, - 'envoy_firmware': '7.6.175', - 'envoy_model': 'Envoy', + 'active_phasecount': 3, + 'ct_consumption_meter': 'net-consumption', + 'ct_count': 3, + 'ct_production_meter': 'production', + 'ct_storage_meter': 'storage', + 'envoy_firmware': '7.1.2', + 'envoy_model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', 'part_number': '123456789', - 'phase_count': 1, - 'phase_mode': None, + 'phase_count': 3, + 'phase_mode': 'three', 'supported_features': list([ 'INVERTERS', + 'METERING', 'PRODUCTION', + 'THREEPHASE', + 'CTMETERS', ]), }), 'fixtures': dict({ @@ -917,8 +8043,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'enphase_envoy', 'entry_id': '45a36e55aaddb2007c5f6602e0c38e72', 'minor_version': 1, @@ -955,14 +8079,13 @@ 'labels': list([ ]), 'manufacturer': 'Enphase', - 'model': 'Envoy', - 'model_id': None, + 'model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', 'name': 'Envoy <>', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', 'serial_number': '<>', 'suggested_area': None, - 'sw_version': '7.6.175', + 'sw_version': '7.1.2', }), 'entities': list([ dict({ @@ -1170,6 +8293,3493 @@ 'state': '0.00<>', }), }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '<>_consumption', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current power consumption', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_power_consumption', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '<>_daily_consumption', + 'unit_of_measurement': 'kWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'kWh', + }), + 'entity_id': 'sensor.envoy_<>_energy_consumption_today', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '<>_seven_days_consumption', + 'unit_of_measurement': 'kWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': 'kWh', + }), + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '<>_lifetime_consumption', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', + 'state': '0.00<>', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '<>_production_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '<>_daily_production_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '<>_seven_days_production_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '<>_lifetime_production_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '<>_production_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '<>_daily_production_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '<>_seven_days_production_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '<>_lifetime_production_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '<>_production_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '<>_daily_production_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '<>_seven_days_production_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '<>_lifetime_production_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '<>_consumption_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '<>_daily_consumption_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '<>_seven_days_consumption_l1', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '<>_lifetime_consumption_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '<>_consumption_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '<>_daily_consumption_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '<>_seven_days_consumption_l2', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '<>_lifetime_consumption_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '<>_consumption_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '<>_daily_consumption_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '<>_seven_days_consumption_l3', + 'unit_of_measurement': 'kWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '<>_lifetime_consumption_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '<>_lifetime_net_consumption', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', + 'state': '0.02<>', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '<>_lifetime_net_production', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', + 'state': '0.022345', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '<>_net_consumption', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', + 'state': '0.101', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '<>_frequency', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '<>_voltage', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '<>_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '<>_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '<>_lifetime_net_consumption_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '<>_lifetime_net_production_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '<>_net_consumption_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '<>_frequency_l1', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '<>_voltage_l1', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '<>_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '<>_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '<>_lifetime_net_consumption_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '<>_lifetime_net_production_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '<>_net_consumption_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '<>_frequency_l2', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '<>_voltage_l2', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '<>_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '<>_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '<>_lifetime_net_consumption_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '<>_lifetime_net_production_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '<>_net_consumption_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'frequency', + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '<>_frequency_l3', + 'unit_of_measurement': 'Hz', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '<>_voltage_l3', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '<>_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '<>_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '<>_production_ct_metering_status', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '<>_production_ct_status_flags', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '<>_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '<>_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '<>_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '<>_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '<>_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '<>_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged', + 'unique_id': '<>_lifetime_battery_discharged', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime battery energy discharged', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', + 'state': '0.03<>', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged', + 'unique_id': '<>_lifetime_battery_charged', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime battery energy charged', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', + 'state': '0.032345', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge', + 'unique_id': '<>_battery_discharge', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current battery discharge', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_battery_discharge', + 'state': '0.103', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage', + 'unique_id': '<>_storage_voltage', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status', + 'unique_id': '<>_storage_ct_metering_status', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags', + 'unique_id': '<>_storage_ct_status_flags', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '<>_lifetime_battery_discharged_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '<>_lifetime_battery_charged_l1', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '<>_battery_discharge_l1', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '<>_storage_voltage_l1', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '<>_storage_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '<>_storage_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '<>_lifetime_battery_discharged_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '<>_lifetime_battery_charged_l2', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '<>_battery_discharge_l2', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '<>_storage_voltage_l2', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '<>_storage_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '<>_storage_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '<>_lifetime_battery_discharged_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '<>_lifetime_battery_charged_l3', + 'unit_of_measurement': 'MWh', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '<>_battery_discharge_l3', + 'unit_of_measurement': 'kW', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'V', + }), + }), + 'original_device_class': 'voltage', + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '<>_storage_voltage_l3', + 'unit_of_measurement': 'V', + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'normal', + 'not-metering', + 'check-wiring', + ]), + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'enum', + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '<>_storage_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '<>_storage_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + 'state': None, + }), ]), }), dict({ @@ -1195,7 +11805,6 @@ ]), 'manufacturer': 'Enphase', 'model': 'Inverter', - 'model_id': None, 'name': 'Inverter 1', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', @@ -1288,12 +11897,60 @@ }), ]), 'envoy_model_data': dict({ - 'ctmeter_consumption': None, - 'ctmeter_consumption_phases': None, - 'ctmeter_production': None, - 'ctmeter_production_phases': None, - 'ctmeter_storage': None, - 'ctmeter_storage_phases': None, + 'ctmeter_consumption': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000020', timestamp=1708006120, energy_delivered=21234, energy_received=22345, active_power=101, power_factor=0.21, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'ctmeter_consumption_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000021', timestamp=1708006121, energy_delivered=212341, energy_received=223451, active_power=21, power_factor=0.22, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L2': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000022', timestamp=1708006122, energy_delivered=212342, energy_received=223452, active_power=31, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L3': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000023', timestamp=1708006123, energy_delivered=212343, energy_received=223453, active_power=51, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + }), + 'ctmeter_production': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000010', timestamp=1708006110, energy_delivered=11234, energy_received=12345, active_power=100, power_factor=0.11, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[, ])", + }), + 'ctmeter_production_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000011', timestamp=1708006111, energy_delivered=112341, energy_received=123451, active_power=20, power_factor=0.12, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L2': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000012', timestamp=1708006112, energy_delivered=112342, energy_received=123452, active_power=30, power_factor=0.13, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L3': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000013', timestamp=1708006113, energy_delivered=112343, energy_received=123453, active_power=50, power_factor=0.14, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", + }), + }), + 'ctmeter_storage': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000030', timestamp=1708006120, energy_delivered=31234, energy_received=32345, active_power=103, power_factor=0.23, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'ctmeter_storage_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000031', timestamp=1708006121, energy_delivered=312341, energy_received=323451, active_power=22, power_factor=0.32, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L2': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000032', timestamp=1708006122, energy_delivered=312342, energy_received=323452, active_power=33, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + 'L3': dict({ + '__type': "", + 'repr': "EnvoyMeterData(eid='100000033', timestamp=1708006123, energy_delivered=312343, energy_received=323453, active_power=53, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", + }), + }), 'dry_contact_settings': dict({ }), 'dry_contact_status': dict({ @@ -1308,29 +11965,61 @@ 'repr': "EnvoyInverter(serial_number='1', last_report_date=1, last_report_watts=1, max_report_watts=1)", }), }), - 'system_consumption': None, - 'system_consumption_phases': None, + 'system_consumption': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', + }), + 'system_consumption_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1322, watt_hours_last_7_days=1321, watt_hours_today=1323, watts_now=1324)', + }), + 'L2': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=2322, watt_hours_last_7_days=2321, watt_hours_today=2323, watts_now=2324)', + }), + 'L3': dict({ + '__type': "", + 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=3322, watt_hours_last_7_days=3321, watt_hours_today=3323, watts_now=3324)', + }), + }), 'system_production': dict({ '__type': "", 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', }), - 'system_production_phases': None, + 'system_production_phases': dict({ + 'L1': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1232, watt_hours_last_7_days=1231, watt_hours_today=1233, watts_now=1234)', + }), + 'L2': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=2232, watt_hours_last_7_days=2231, watt_hours_today=2233, watts_now=2234)', + }), + 'L3': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=3232, watt_hours_last_7_days=3231, watt_hours_today=3233, watts_now=3234)', + }), + }), 'tariff': None, }), 'envoy_properties': dict({ - 'active_phasecount': 0, - 'ct_consumption_meter': None, - 'ct_count': 0, - 'ct_production_meter': None, - 'ct_storage_meter': None, - 'envoy_firmware': '7.6.175', - 'envoy_model': 'Envoy', + 'active_phasecount': 3, + 'ct_consumption_meter': 'net-consumption', + 'ct_count': 3, + 'ct_production_meter': 'production', + 'ct_storage_meter': 'storage', + 'envoy_firmware': '7.1.2', + 'envoy_model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', 'part_number': '123456789', - 'phase_count': 1, - 'phase_mode': None, + 'phase_count': 3, + 'phase_mode': 'three', 'supported_features': list([ 'INVERTERS', + 'METERING', 'PRODUCTION', + 'THREEPHASE', + 'CTMETERS', ]), }), 'fixtures': dict({ diff --git a/tests/components/enphase_envoy/snapshots/test_number.ambr b/tests/components/enphase_envoy/snapshots/test_number.ambr deleted file mode 100644 index b7e799c9ac8..00000000000 --- a/tests/components/enphase_envoy/snapshots/test_number.ambr +++ /dev/null @@ -1,451 +0,0 @@ -# serializer version: 1 -# name: test_number[envoy_eu_batt][number.envoy_1234_reserve_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.envoy_1234_reserve_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Reserve battery level', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reserve_soc', - 'unique_id': '1234_reserve_soc', - 'unit_of_measurement': '%', - }) -# --- -# name: test_number[envoy_eu_batt][number.envoy_1234_reserve_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Envoy 1234 Reserve battery level', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'number.envoy_1234_reserve_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.enpower_654321_reserve_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.enpower_654321_reserve_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Reserve battery level', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reserve_soc', - 'unique_id': '654321_reserve_soc', - 'unit_of_measurement': '%', - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.enpower_654321_reserve_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Enpower 654321 Reserve battery level', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'number.enpower_654321_reserve_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15.0', - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_cutoff_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.nc1_fixture_cutoff_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cutoff battery level', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cutoff_battery_level', - 'unique_id': '654321_relay_NC1_soc_low', - 'unit_of_measurement': None, - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_cutoff_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'NC1 Fixture Cutoff battery level', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'context': , - 'entity_id': 'number.nc1_fixture_cutoff_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '25.0', - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_restore_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.nc1_fixture_restore_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restore battery level', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'restore_battery_level', - 'unique_id': '654321_relay_NC1_soc_high', - 'unit_of_measurement': None, - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_restore_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'NC1 Fixture Restore battery level', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'context': , - 'entity_id': 'number.nc1_fixture_restore_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '70.0', - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_cutoff_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.nc2_fixture_cutoff_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cutoff battery level', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cutoff_battery_level', - 'unique_id': '654321_relay_NC2_soc_low', - 'unit_of_measurement': None, - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_cutoff_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'NC2 Fixture Cutoff battery level', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'context': , - 'entity_id': 'number.nc2_fixture_cutoff_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30.0', - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_restore_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.nc2_fixture_restore_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restore battery level', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'restore_battery_level', - 'unique_id': '654321_relay_NC2_soc_high', - 'unit_of_measurement': None, - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_restore_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'NC2 Fixture Restore battery level', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'context': , - 'entity_id': 'number.nc2_fixture_restore_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '70.0', - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_cutoff_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.nc3_fixture_cutoff_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cutoff battery level', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cutoff_battery_level', - 'unique_id': '654321_relay_NC3_soc_low', - 'unit_of_measurement': None, - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_cutoff_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'NC3 Fixture Cutoff battery level', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'context': , - 'entity_id': 'number.nc3_fixture_cutoff_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30.0', - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_restore_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.nc3_fixture_restore_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restore battery level', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'restore_battery_level', - 'unique_id': '654321_relay_NC3_soc_high', - 'unit_of_measurement': None, - }) -# --- -# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_restore_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'NC3 Fixture Restore battery level', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'context': , - 'entity_id': 'number.nc3_fixture_restore_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '70.0', - }) -# --- diff --git a/tests/components/enphase_envoy/snapshots/test_select.ambr b/tests/components/enphase_envoy/snapshots/test_select.ambr deleted file mode 100644 index f091879d9fc..00000000000 --- a/tests/components/enphase_envoy/snapshots/test_select.ambr +++ /dev/null @@ -1,811 +0,0 @@ -# serializer version: 1 -# name: test_select[envoy_eu_batt][select.envoy_1234_storage_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'backup', - 'self_consumption', - 'savings', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.envoy_1234_storage_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Storage mode', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_mode', - 'unique_id': '1234_storage_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_eu_batt][select.envoy_1234_storage_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Storage mode', - 'options': list([ - 'backup', - 'self_consumption', - 'savings', - ]), - }), - 'context': , - 'entity_id': 'select.envoy_1234_storage_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'self_consumption', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.enpower_654321_storage_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'backup', - 'self_consumption', - 'savings', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.enpower_654321_storage_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Storage mode', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_mode', - 'unique_id': '654321_storage_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.enpower_654321_storage_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Enpower 654321 Storage mode', - 'options': list([ - 'backup', - 'self_consumption', - 'savings', - ]), - }), - 'context': , - 'entity_id': 'select.enpower_654321_storage_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'self_consumption', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_generator_action-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc1_fixture_generator_action', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Generator action', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_generator_action', - 'unique_id': '654321_relay_NC1_generator_action', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_generator_action-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC1 Fixture Generator action', - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'context': , - 'entity_id': 'select.nc1_fixture_generator_action', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_powered', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_grid_action-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc1_fixture_grid_action', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Grid action', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_grid_action', - 'unique_id': '654321_relay_NC1_grid_action', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_grid_action-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC1 Fixture Grid action', - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'context': , - 'entity_id': 'select.nc1_fixture_grid_action', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_powered', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_microgrid_action-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc1_fixture_microgrid_action', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Microgrid action', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_microgrid_action', - 'unique_id': '654321_relay_NC1_microgrid_action', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_microgrid_action-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC1 Fixture Microgrid action', - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'context': , - 'entity_id': 'select.nc1_fixture_microgrid_action', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_powered', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'standard', - 'battery', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc1_fixture_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mode', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_mode', - 'unique_id': '654321_relay_NC1_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC1 Fixture Mode', - 'options': list([ - 'standard', - 'battery', - ]), - }), - 'context': , - 'entity_id': 'select.nc1_fixture_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'standard', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_generator_action-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc2_fixture_generator_action', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Generator action', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_generator_action', - 'unique_id': '654321_relay_NC2_generator_action', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_generator_action-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC2 Fixture Generator action', - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'context': , - 'entity_id': 'select.nc2_fixture_generator_action', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_powered', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_grid_action-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc2_fixture_grid_action', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Grid action', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_grid_action', - 'unique_id': '654321_relay_NC2_grid_action', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_grid_action-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC2 Fixture Grid action', - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'context': , - 'entity_id': 'select.nc2_fixture_grid_action', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'powered', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_microgrid_action-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc2_fixture_microgrid_action', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Microgrid action', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_microgrid_action', - 'unique_id': '654321_relay_NC2_microgrid_action', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_microgrid_action-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC2 Fixture Microgrid action', - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'context': , - 'entity_id': 'select.nc2_fixture_microgrid_action', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_powered', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'standard', - 'battery', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc2_fixture_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mode', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_mode', - 'unique_id': '654321_relay_NC2_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC2 Fixture Mode', - 'options': list([ - 'standard', - 'battery', - ]), - }), - 'context': , - 'entity_id': 'select.nc2_fixture_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'standard', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_generator_action-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc3_fixture_generator_action', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Generator action', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_generator_action', - 'unique_id': '654321_relay_NC3_generator_action', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_generator_action-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC3 Fixture Generator action', - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'context': , - 'entity_id': 'select.nc3_fixture_generator_action', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'powered', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_grid_action-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc3_fixture_grid_action', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Grid action', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_grid_action', - 'unique_id': '654321_relay_NC3_grid_action', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_grid_action-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC3 Fixture Grid action', - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'context': , - 'entity_id': 'select.nc3_fixture_grid_action', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_powered', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_microgrid_action-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc3_fixture_microgrid_action', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Microgrid action', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_microgrid_action', - 'unique_id': '654321_relay_NC3_microgrid_action', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_microgrid_action-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC3 Fixture Microgrid action', - 'options': list([ - 'powered', - 'not_powered', - 'schedule', - 'none', - ]), - }), - 'context': , - 'entity_id': 'select.nc3_fixture_microgrid_action', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'powered', - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'standard', - 'battery', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.nc3_fixture_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mode', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relay_mode', - 'unique_id': '654321_relay_NC3_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC3 Fixture Mode', - 'options': list([ - 'standard', - 'battery', - ]), - }), - 'context': , - 'entity_id': 'select.nc3_fixture_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'standard', - }) -# --- diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index c43325a639d..e403886b096 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -1,7100 +1,3351 @@ # serializer version: 1 -# name: test_sensor[envoy][sensor.envoy_1234_current_power_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, +# name: test_sensor + list([ + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , + 'area_id': None, + 'capabilities': dict({ + 'state_class': , }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production', - 'unique_id': '1234_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy][sensor.envoy_1234_current_power_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', - 'state_class': , + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', 'unit_of_measurement': , }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy][sensor.envoy_1234_energy_production_last_seven_days-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , + 'area_id': None, + 'capabilities': dict({ + 'state_class': , }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production', - 'unique_id': '1234_seven_days_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy][sensor.envoy_1234_energy_production_last_seven_days-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', 'unit_of_measurement': , }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy][sensor.envoy_1234_energy_production_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production', - 'unique_id': '1234_daily_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy][sensor.envoy_1234_energy_production_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', - 'state_class': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', 'unit_of_measurement': , }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy][sensor.envoy_1234_lifetime_energy_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , + 'area_id': None, + 'capabilities': dict({ + 'state_class': , }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production', - 'unique_id': '1234_lifetime_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy][sensor.envoy_1234_lifetime_energy_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', - 'state_class': , + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', 'unit_of_measurement': , }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001234', - }) -# --- -# name: test_sensor[envoy][sensor.inverter_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '1234_consumption', + 'unit_of_measurement': , }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '1234_daily_consumption', + 'unit_of_measurement': , }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '1234_seven_days_consumption', + 'unit_of_measurement': , }), - 'name': None, - 'options': dict({ + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '1234_lifetime_consumption', + 'unit_of_measurement': , }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy][sensor.inverter_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', - 'state_class': , + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '1234_lifetime_net_consumption', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '1234_lifetime_net_production', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '1234_net_consumption', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '1234_frequency', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '1234_voltage', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '1234_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '1234_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged', + 'unique_id': '1234_lifetime_battery_discharged', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged', + 'unique_id': '1234_lifetime_battery_charged', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge', + 'unique_id': '1234_battery_discharge', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage', + 'unique_id': '1234_storage_voltage', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status', + 'unique_id': '1234_storage_ct_metering_status', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags', + 'unique_id': '1234_storage_ct_status_flags', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '1234_lifetime_battery_discharged_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '1234_lifetime_battery_charged_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '1234_battery_discharge_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '1234_storage_voltage_l1', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '1234_storage_ct_metering_status_l1', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '1234_storage_ct_status_flags_l1', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '1234_lifetime_battery_discharged_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '1234_lifetime_battery_charged_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '1234_battery_discharge_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '1234_storage_voltage_l2', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '1234_storage_ct_metering_status_l2', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '1234_storage_ct_status_flags_l2', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '1234_lifetime_battery_discharged_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '1234_lifetime_battery_charged_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '1234_battery_discharge_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '1234_storage_voltage_l3', + 'unit_of_measurement': , + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '1234_storage_ct_metering_status_l3', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '1234_storage_ct_status_flags_l3', + 'unit_of_measurement': None, + }), + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', 'unit_of_measurement': , }), - 'context': , - 'entity_id': 'sensor.inverter_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[envoy][sensor.inverter_1_last_reported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '1_last_reported', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy][sensor.inverter_1_last_reported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.inverter_1_last_reported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1970-01-01T00:00:01+00:00', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_balanced_net_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption', - 'unique_id': '1234_balanced_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_balanced_net_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.341', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_net_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption', - 'unique_id': '1234_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_net_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.101', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption', - 'unique_id': '1234_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production', - 'unique_id': '1234_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_last_seven_days-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption', - 'unique_id': '1234_seven_days_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_last_seven_days-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption', - 'unique_id': '1234_daily_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_last_seven_days-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production', - 'unique_id': '1234_seven_days_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_last_seven_days-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production', - 'unique_id': '1234_daily_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency', - 'unique_id': '1234_frequency', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency', - 'unique_id': '1234_production_ct_frequency', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption', - 'unique_id': '1234_lifetime_balanced_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.321', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption', - 'unique_id': '1234_lifetime_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001234', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production', - 'unique_id': '1234_lifetime_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001234', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption', - 'unique_id': '1234_lifetime_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.021234', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production', - 'unique_id': '1234_lifetime_net_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.022345', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags', - 'unique_id': '1234_net_consumption_ct_status_flags', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '1234_production_ct_status_flags', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status', - 'unique_id': '1234_net_consumption_ct_metering_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '1234_production_ct_metering_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_net_consumption_ct_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current', - 'unique_id': '1234_net_ct_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_net_consumption_ct_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor', - 'unique_id': '1234_net_ct_powerfactor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.21', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'powerfactor production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor', - 'unique_id': '1234_production_ct_powerfactor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.11', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_production_ct_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current', - 'unique_id': '1234_production_ct_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_production_ct_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage', - 'unique_id': '1234_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage', - 'unique_id': '1234_production_ct_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.inverter_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.inverter_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.inverter_1_last_reported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '1_last_reported', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_1p_metered][sensor.inverter_1_last_reported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.inverter_1_last_reported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1970-01-01T00:00:01+00:00', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_apparent_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.encharge_123456_apparent_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Apparent power', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123456_apparent_power_mva', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_apparent_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'apparent_power', - 'friendly_name': 'Encharge 123456 Apparent power', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.encharge_123456_apparent_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.encharge_123456_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123456_soc', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Encharge 123456 Battery', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.encharge_123456_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_last_reported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.encharge_123456_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '123456_last_reported', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_last_reported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Encharge 123456 Last reported', - }), - 'context': , - 'entity_id': 'sensor.encharge_123456_last_reported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-05-04T06:29:33+00:00', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.encharge_123456_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123456_real_power_mw', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Encharge 123456 Power', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.encharge_123456_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.encharge_123456_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123456_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.encharge_123456_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Encharge 123456 Temperature', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.encharge_123456_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '16', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_available_battery_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_available_battery_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Available battery energy', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'available_energy', - 'unique_id': '1234_available_energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_available_battery_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Available battery energy', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_available_battery_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '140', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_balanced_net_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption', - 'unique_id': '1234_balanced_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_balanced_net_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.341', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Battery', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1234_battery_level', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Envoy 1234 Battery', - 'icon': 'mdi:flash', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_battery_capacity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_battery_capacity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Battery capacity', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'max_capacity', - 'unique_id': '1234_max_capacity', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_battery_capacity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Battery capacity', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_battery_capacity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3500', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption', - 'unique_id': '1234_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.101', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.021', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.031', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_net_power_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.051', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption', - 'unique_id': '1234_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_power_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production', - 'unique_id': '1234_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_current_power_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_consumption_last_seven_days-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption', - 'unique_id': '1234_seven_days_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_consumption_last_seven_days-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_consumption_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption', - 'unique_id': '1234_daily_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_consumption_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_production_last_seven_days-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production', - 'unique_id': '1234_seven_days_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_production_last_seven_days-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_production_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production', - 'unique_id': '1234_daily_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_energy_production_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency', - 'unique_id': '1234_frequency', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency', - 'unique_id': '1234_production_ct_frequency', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency_phase', - 'unique_id': '1234_production_ct_frequency_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency_phase', - 'unique_id': '1234_production_ct_frequency_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency_phase', - 'unique_id': '1234_production_ct_frequency_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_frequency_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption', - 'unique_id': '1234_lifetime_balanced_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.321', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption', - 'unique_id': '1234_lifetime_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001234', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_energy_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production', - 'unique_id': '1234_lifetime_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_energy_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001234', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption', - 'unique_id': '1234_lifetime_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.021234', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.212341', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.212342', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.212343', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production', - 'unique_id': '1234_lifetime_net_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.022345', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.223451', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.223452', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_lifetime_net_energy_production_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.223453', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags', - 'unique_id': '1234_net_consumption_ct_status_flags', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '1234_production_ct_status_flags', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status', - 'unique_id': '1234_net_consumption_ct_metering_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '1234_production_ct_metering_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT l1', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT l2', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_metering_status_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT l3', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current', - 'unique_id': '1234_net_ct_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current_phase', - 'unique_id': '1234_net_ct_current_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current_phase', - 'unique_id': '1234_net_ct_current_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current_phase', - 'unique_id': '1234_net_ct_current_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_net_consumption_ct_current_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor', - 'unique_id': '1234_net_ct_powerfactor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.21', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor_phase', - 'unique_id': '1234_net_ct_powerfactor_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.22', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor_phase', - 'unique_id': '1234_net_ct_powerfactor_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.23', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor_phase', - 'unique_id': '1234_net_ct_powerfactor_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.24', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'powerfactor production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor', - 'unique_id': '1234_production_ct_powerfactor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.11', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor_phase', - 'unique_id': '1234_production_ct_powerfactor_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.12', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor_phase', - 'unique_id': '1234_production_ct_powerfactor_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.13', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor_phase', - 'unique_id': '1234_production_ct_powerfactor_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.14', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current', - 'unique_id': '1234_production_ct_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current_phase', - 'unique_id': '1234_production_ct_current_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current_phase', - 'unique_id': '1234_production_ct_current_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current_phase', - 'unique_id': '1234_production_ct_current_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_production_ct_current_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_reserve_battery_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Reserve battery energy', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reserve_energy', - 'unique_id': '1234_reserve_energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_reserve_battery_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Reserve battery energy', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_reserve_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_reserve_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Reserve battery level', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reserve_soc', - 'unique_id': '1234_reserve_soc', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_reserve_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Envoy 1234 Reserve battery level', - 'icon': 'mdi:flash', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_reserve_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage', - 'unique_id': '1234_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage', - 'unique_id': '1234_production_ct_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage_phase', - 'unique_id': '1234_production_ct_voltage_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage_phase', - 'unique_id': '1234_production_ct_voltage_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage_phase', - 'unique_id': '1234_production_ct_voltage_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_voltage_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.inverter_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.inverter_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.inverter_1_last_reported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '1_last_reported', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_eu_batt][sensor.inverter_1_last_reported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.inverter_1_last_reported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1970-01-01T00:00:01+00:00', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_apparent_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.encharge_123456_apparent_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Apparent power', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123456_apparent_power_mva', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_apparent_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'apparent_power', - 'friendly_name': 'Encharge 123456 Apparent power', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.encharge_123456_apparent_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.encharge_123456_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123456_soc', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Encharge 123456 Battery', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.encharge_123456_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_last_reported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.encharge_123456_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '123456_last_reported', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_last_reported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Encharge 123456 Last reported', - }), - 'context': , - 'entity_id': 'sensor.encharge_123456_last_reported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2023-09-26T23:04:07+00:00', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.encharge_123456_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123456_real_power_mw', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Encharge 123456 Power', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.encharge_123456_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.encharge_123456_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123456_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Encharge 123456 Temperature', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.encharge_123456_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '29', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_last_reported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.enpower_654321_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '654321_last_reported', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_last_reported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Enpower 654321 Last reported', - }), - 'context': , - 'entity_id': 'sensor.enpower_654321_last_reported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2023-09-26T23:04:07+00:00', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.enpower_654321_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '654321_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Enpower 654321 Temperature', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.enpower_654321_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '26', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_available_battery_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_available_battery_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Available battery energy', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'available_energy', - 'unique_id': '1234_available_energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_available_battery_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Available battery energy', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_available_battery_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '525', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption', - 'unique_id': '1234_balanced_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.341', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption_phase', - 'unique_id': '1234_balanced_net_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '12.341', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption_phase', - 'unique_id': '1234_balanced_net_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '22.341', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption_phase', - 'unique_id': '1234_balanced_net_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '32.341', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Battery', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1234_battery_level', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Envoy 1234 Battery', - 'icon': 'mdi:flash', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery_capacity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_battery_capacity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Battery capacity', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'max_capacity', - 'unique_id': '1234_max_capacity', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery_capacity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Battery capacity', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_battery_capacity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3500', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge', - 'unique_id': '1234_battery_discharge', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge-state] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }), + ]) +# --- +# name: test_sensor[sensor.envoy_1234_current_battery_discharge-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -7111,222 +3362,16 @@ 'state': '0.103', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '1234_battery_discharge_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_current_battery_discharge_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current battery discharge l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.022', - }) +# name: test_sensor[sensor.envoy_1234_current_battery_discharge_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '1234_battery_discharge_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_current_battery_discharge_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current battery discharge l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.033', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '1234_battery_discharge_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current battery discharge l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.053', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption', - 'unique_id': '1234_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption-state] +# name: test_sensor[sensor.envoy_1234_current_net_power_consumption-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -7343,222 +3388,16 @@ 'state': '0.101', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_current_net_power_consumption_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.021', - }) +# name: test_sensor[sensor.envoy_1234_current_net_power_consumption_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_current_net_power_consumption_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.031', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.051', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption', - 'unique_id': '1234_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption-state] +# name: test_sensor[sensor.envoy_1234_current_power_consumption-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -7575,222 +3414,16 @@ 'state': '1.234', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_current_power_consumption_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.324', - }) +# name: test_sensor[sensor.envoy_1234_current_power_consumption_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_current_power_consumption_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.324', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.324', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production', - 'unique_id': '1234_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production-state] +# name: test_sensor[sensor.envoy_1234_current_power_production-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -7807,220 +3440,16 @@ 'state': '1.234', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_current_power_production_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) +# name: test_sensor[sensor.envoy_1234_current_power_production_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_current_power_production_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.234', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.234', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption', - 'unique_id': '1234_seven_days_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days-state] +# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -8036,213 +3465,16 @@ 'state': '1.234', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days l1', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.321', - }) +# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days l2', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.321', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days l3', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.321', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption', - 'unique_id': '1234_daily_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today-state] +# name: test_sensor[sensor.envoy_1234_energy_consumption_today-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -8259,220 +3491,16 @@ 'state': '1.234', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_energy_consumption_today_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.323', - }) +# name: test_sensor[sensor.envoy_1234_energy_consumption_today_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_energy_consumption_today_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.323', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.323', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production', - 'unique_id': '1234_seven_days_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days-state] +# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -8488,213 +3516,16 @@ 'state': '1.234', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days l1', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.231', - }) +# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days l2', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.231', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days l3', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.231', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production', - 'unique_id': '1234_daily_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today-state] +# name: test_sensor[sensor.envoy_1234_energy_production_today-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -8711,1114 +3542,28 @@ 'state': '1.234', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_energy_production_today_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.233', - }) +# name: test_sensor[sensor.envoy_1234_energy_production_today_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_energy_production_today_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.233', - }) +# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l3', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.233', - }) +# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency', - 'unique_id': '1234_frequency', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency', - 'unique_id': '1234_production_ct_frequency', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency_phase', - 'unique_id': '1234_production_ct_frequency_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency_phase', - 'unique_id': '1234_production_ct_frequency_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency_phase', - 'unique_id': '1234_production_ct_frequency_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_frequency', - 'unique_id': '1234_storage_ct_frequency', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency storage CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_storage_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.3', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_frequency_phase', - 'unique_id': '1234_storage_ct_frequency_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency storage CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.3', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_frequency_phase', - 'unique_id': '1234_storage_ct_frequency_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency storage CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_frequency_phase', - 'unique_id': '1234_storage_ct_frequency_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency storage CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption', - 'unique_id': '1234_lifetime_balanced_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.321', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption_phase', - 'unique_id': '1234_lifetime_balanced_net_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.321', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption_phase', - 'unique_id': '1234_lifetime_balanced_net_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.321', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption_phase', - 'unique_id': '1234_lifetime_balanced_net_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.321', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged', - 'unique_id': '1234_lifetime_battery_charged', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged-state] +# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -9835,222 +3580,16 @@ 'state': '0.032345', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '1234_lifetime_battery_charged_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.323451', - }) +# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '1234_lifetime_battery_charged_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.323452', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '1234_lifetime_battery_charged_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.323453', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged', - 'unique_id': '1234_lifetime_battery_discharged', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged-state] +# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -10067,222 +3606,16 @@ 'state': '0.031234', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '1234_lifetime_battery_discharged_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.312341', - }) +# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '1234_lifetime_battery_discharged_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.312342', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '1234_lifetime_battery_discharged_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.312343', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption', - 'unique_id': '1234_lifetime_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption-state] +# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -10299,222 +3632,16 @@ 'state': '0.001234', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001322', - }) +# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.002322', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.003322', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production', - 'unique_id': '1234_lifetime_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production-state] +# name: test_sensor[sensor.envoy_1234_lifetime_energy_production-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -10531,222 +3658,16 @@ 'state': '0.001234', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_energy_production_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001232', - }) +# name: test_sensor[sensor.envoy_1234_lifetime_energy_production_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_energy_production_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.002232', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.003232', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption', - 'unique_id': '1234_lifetime_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption-state] +# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -10763,222 +3684,16 @@ 'state': '0.021234', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.212341', - }) +# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.212342', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.212343', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production', - 'unique_id': '1234_lifetime_net_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production-state] +# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -10995,3626 +3710,112 @@ 'state': '0.022345', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l1', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.223451', - }) +# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l2', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.223452', - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l3', - 'unit_of_measurement': , - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.223453', - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags', - 'unique_id': '1234_net_consumption_ct_status_flags', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l1', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l2', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l3', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '1234_production_ct_status_flags', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) +# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l1', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) +# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l2', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) +# name: test_sensor[sensor.envoy_1234_metering_status_production_ct-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l3', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_metering_status_production_ct_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) +# name: test_sensor[sensor.envoy_1234_metering_status_production_ct_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags', - 'unique_id': '1234_storage_ct_status_flags', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_metering_status_production_ct_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active storage CT', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) +# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '1234_storage_ct_status_flags_l1', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l1', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) +# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '1234_storage_ct_status_flags_l2', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l2', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) +# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '1234_storage_ct_status_flags_l3', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l3', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) +# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status', - 'unique_id': '1234_net_consumption_ct_metering_status', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) +# name: test_sensor[sensor.envoy_1234_voltage_storage_ct-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l1', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_voltage_storage_ct_l1-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) +# name: test_sensor[sensor.envoy_1234_voltage_storage_ct_l2-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l2', - 'unit_of_measurement': None, - }) +# name: test_sensor[sensor.envoy_1234_voltage_storage_ct_l3-state] + None # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '1234_production_ct_metering_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT l1', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT l2', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT l3', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status', - 'unique_id': '1234_storage_ct_metering_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status storage CT', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '1234_storage_ct_metering_status_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status storage CT l1', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '1234_storage_ct_metering_status_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status storage CT l2', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '1234_storage_ct_metering_status_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status storage CT l3', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current', - 'unique_id': '1234_net_ct_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current_phase', - 'unique_id': '1234_net_ct_current_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current_phase', - 'unique_id': '1234_net_ct_current_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current_phase', - 'unique_id': '1234_net_ct_current_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor', - 'unique_id': '1234_net_ct_powerfactor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.21', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor_phase', - 'unique_id': '1234_net_ct_powerfactor_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.22', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor_phase', - 'unique_id': '1234_net_ct_powerfactor_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.23', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor_phase', - 'unique_id': '1234_net_ct_powerfactor_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.24', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'powerfactor production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor', - 'unique_id': '1234_production_ct_powerfactor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.11', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor_phase', - 'unique_id': '1234_production_ct_powerfactor_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.12', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor_phase', - 'unique_id': '1234_production_ct_powerfactor_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.13', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor_phase', - 'unique_id': '1234_production_ct_powerfactor_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.14', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_powerfactor', - 'unique_id': '1234_storage_ct_powerfactor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.23', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_powerfactor_phase', - 'unique_id': '1234_storage_ct_powerfactor_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l1', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.32', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_powerfactor_phase', - 'unique_id': '1234_storage_ct_powerfactor_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l2', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.23', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_powerfactor_phase', - 'unique_id': '1234_storage_ct_powerfactor_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l3', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.24', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current', - 'unique_id': '1234_production_ct_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current_phase', - 'unique_id': '1234_production_ct_current_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current_phase', - 'unique_id': '1234_production_ct_current_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current_phase', - 'unique_id': '1234_production_ct_current_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Reserve battery energy', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reserve_energy', - 'unique_id': '1234_reserve_energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Reserve battery energy', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '526', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_reserve_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Reserve battery level', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reserve_soc', - 'unique_id': '1234_reserve_soc', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Envoy 1234 Reserve battery level', - 'icon': 'mdi:flash', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_reserve_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_storage_ct_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Storage CT current', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_current', - 'unique_id': '1234_storage_ct_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Storage CT current', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_storage_ct_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.4', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_storage_ct_current_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Storage CT current l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_current_phase', - 'unique_id': '1234_storage_ct_current_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Storage CT current l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_storage_ct_current_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.4', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_storage_ct_current_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Storage CT current l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_current_phase', - 'unique_id': '1234_storage_ct_current_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Storage CT current l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_storage_ct_current_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_storage_ct_current_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Storage CT current l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_current_phase', - 'unique_id': '1234_storage_ct_current_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Storage CT current l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_storage_ct_current_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage', - 'unique_id': '1234_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage', - 'unique_id': '1234_production_ct_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage_phase', - 'unique_id': '1234_production_ct_voltage_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage_phase', - 'unique_id': '1234_production_ct_voltage_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage_phase', - 'unique_id': '1234_production_ct_voltage_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage', - 'unique_id': '1234_storage_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage storage CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '113', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '1234_storage_voltage_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage storage CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '113', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '1234_storage_voltage_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage storage CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '1234_storage_voltage_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage storage CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1-state] +# name: test_sensor[sensor.inverter_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -14631,6567 +3832,6 @@ 'state': '1', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1_last_reported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '1_last_reported', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1_last_reported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.inverter_1_last_reported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1970-01-01T00:00:01+00:00', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption', - 'unique_id': '1234_balanced_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.341', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption_phase', - 'unique_id': '1234_balanced_net_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '12.341', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption_phase', - 'unique_id': '1234_balanced_net_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '22.341', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption_phase', - 'unique_id': '1234_balanced_net_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '32.341', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption', - 'unique_id': '1234_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.101', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.021', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.031', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.051', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption', - 'unique_id': '1234_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.324', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.324', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.324', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production', - 'unique_id': '1234_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption', - 'unique_id': '1234_seven_days_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days l1', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.321', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days l2', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.321', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days l3', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.321', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption', - 'unique_id': '1234_daily_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.323', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.323', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.323', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production', - 'unique_id': '1234_seven_days_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days l1', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.231', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days l2', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.231', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days l3', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.231', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production', - 'unique_id': '1234_daily_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.233', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.233', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.233', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency', - 'unique_id': '1234_frequency', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency', - 'unique_id': '1234_production_ct_frequency', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency_phase', - 'unique_id': '1234_production_ct_frequency_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency_phase', - 'unique_id': '1234_production_ct_frequency_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency_phase', - 'unique_id': '1234_production_ct_frequency_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption', - 'unique_id': '1234_lifetime_balanced_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.321', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption_phase', - 'unique_id': '1234_lifetime_balanced_net_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.321', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption_phase', - 'unique_id': '1234_lifetime_balanced_net_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.321', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption_phase', - 'unique_id': '1234_lifetime_balanced_net_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.321', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption', - 'unique_id': '1234_lifetime_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001322', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.002322', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.003322', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production', - 'unique_id': '1234_lifetime_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001232', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.002232', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.003232', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption', - 'unique_id': '1234_lifetime_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.021234', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.212341', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.212342', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.212343', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production', - 'unique_id': '1234_lifetime_net_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.022345', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.223451', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.223452', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.223453', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags', - 'unique_id': '1234_net_consumption_ct_status_flags', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '1234_production_ct_status_flags', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status', - 'unique_id': '1234_net_consumption_ct_metering_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '1234_production_ct_metering_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT l1', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT l2', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT l3', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current', - 'unique_id': '1234_net_ct_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current_phase', - 'unique_id': '1234_net_ct_current_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current_phase', - 'unique_id': '1234_net_ct_current_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Net consumption CT current l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_current_phase', - 'unique_id': '1234_net_ct_current_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Net consumption CT current l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.3', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor', - 'unique_id': '1234_net_ct_powerfactor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.21', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor_phase', - 'unique_id': '1234_net_ct_powerfactor_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.22', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor_phase', - 'unique_id': '1234_net_ct_powerfactor_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.23', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_powerfactor_phase', - 'unique_id': '1234_net_ct_powerfactor_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.24', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'powerfactor production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor', - 'unique_id': '1234_production_ct_powerfactor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.11', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor_phase', - 'unique_id': '1234_production_ct_powerfactor_l1', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.12', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor_phase', - 'unique_id': '1234_production_ct_powerfactor_l2', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.13', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Powerfactor production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor_phase', - 'unique_id': '1234_production_ct_powerfactor_l3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.14', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current', - 'unique_id': '1234_production_ct_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current_phase', - 'unique_id': '1234_production_ct_current_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current_phase', - 'unique_id': '1234_production_ct_current_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current_phase', - 'unique_id': '1234_production_ct_current_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage', - 'unique_id': '1234_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '112', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage', - 'unique_id': '1234_production_ct_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage_phase', - 'unique_id': '1234_production_ct_voltage_l1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT l1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage_phase', - 'unique_id': '1234_production_ct_voltage_l2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT l2', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage_phase', - 'unique_id': '1234_production_ct_voltage_l3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT l3', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1_last_reported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '1_last_reported', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1_last_reported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.inverter_1_last_reported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1970-01-01T00:00:01+00:00', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_balanced_net_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'balanced net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balanced_net_consumption', - 'unique_id': '1234_balanced_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_balanced_net_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.341', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_current_power_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production', - 'unique_id': '1234_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_current_power_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_last_seven_days-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production', - 'unique_id': '1234_seven_days_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_last_seven_days-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production', - 'unique_id': '1234_daily_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_frequency_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_frequency', - 'unique_id': '1234_production_ct_frequency', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_frequency_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'frequency', - 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_frequency_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.1', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime balanced net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_balanced_net_consumption', - 'unique_id': '1234_lifetime_balanced_net_consumption', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.321', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_energy_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production', - 'unique_id': '1234_lifetime_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_energy_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001234', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '1234_production_ct_status_flags', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_metering_status_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '1234_production_ct_metering_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_metering_status_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'normal', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'powerfactor production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_powerfactor', - 'unique_id': '1234_production_ct_powerfactor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.11', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_production_ct_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_production_ct_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Production CT current', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_current', - 'unique_id': '1234_production_ct_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_production_ct_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_production_ct_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.2', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_voltage_production_ct-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_voltage', - 'unique_id': '1234_production_ct_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_voltage_production_ct-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_voltage_production_ct', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '111', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1_last_reported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '1_last_reported', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1_last_reported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', - }), - 'context': , - 'entity_id': 'sensor.inverter_1_last_reported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1970-01-01T00:00:01+00:00', - }) +# name: test_sensor[sensor.inverter_1_last_reported-state] + None # --- diff --git a/tests/components/enphase_envoy/snapshots/test_switch.ambr b/tests/components/enphase_envoy/snapshots/test_switch.ambr deleted file mode 100644 index 46123c03cec..00000000000 --- a/tests/components/enphase_envoy/snapshots/test_switch.ambr +++ /dev/null @@ -1,277 +0,0 @@ -# serializer version: 1 -# name: test_switch[envoy_eu_batt][switch.envoy_1234_charge_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.envoy_1234_charge_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Charge from grid', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_from_grid', - 'unique_id': '1234_charge_from_grid', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[envoy_eu_batt][switch.envoy_1234_charge_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Envoy 1234 Charge from grid', - }), - 'context': , - 'entity_id': 'switch.envoy_1234_charge_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_charge_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.enpower_654321_charge_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Charge from grid', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_from_grid', - 'unique_id': '654321_charge_from_grid', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_charge_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Enpower 654321 Charge from grid', - }), - 'context': , - 'entity_id': 'switch.enpower_654321_charge_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_grid_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.enpower_654321_grid_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Grid enabled', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_enabled', - 'unique_id': '654321_mains_admin_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_grid_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Enpower 654321 Grid enabled', - }), - 'context': , - 'entity_id': 'switch.enpower_654321_grid_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[envoy_metered_batt_relay][switch.nc1_fixture-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.nc1_fixture', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '654321_relay_NC1_relay_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[envoy_metered_batt_relay][switch.nc1_fixture-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC1 Fixture', - }), - 'context': , - 'entity_id': 'switch.nc1_fixture', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch[envoy_metered_batt_relay][switch.nc2_fixture-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.nc2_fixture', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '654321_relay_NC2_relay_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[envoy_metered_batt_relay][switch.nc2_fixture-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC2 Fixture', - }), - 'context': , - 'entity_id': 'switch.nc2_fixture', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[envoy_metered_batt_relay][switch.nc3_fixture-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.nc3_fixture', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '654321_relay_NC3_relay_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[envoy_metered_batt_relay][switch.nc3_fixture-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'NC3 Fixture', - }), - 'context': , - 'entity_id': 'switch.nc3_fixture', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/enphase_envoy/test_binary_sensor.py b/tests/components/enphase_envoy/test_binary_sensor.py deleted file mode 100644 index bb4a5c5a191..00000000000 --- a/tests/components/enphase_envoy/test_binary_sensor.py +++ /dev/null @@ -1,91 +0,0 @@ -"""Test Enphase Envoy binary sensors.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.enphase_envoy.const import Platform -from homeassistant.const import STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.parametrize( - ("mock_envoy"), - ["envoy_eu_batt", "envoy_metered_batt_relay"], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_binary_sensor( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test binary sensor platform entities against snapshot.""" - with patch( - "homeassistant.components.enphase_envoy.PLATFORMS", [Platform.BINARY_SENSOR] - ): - await setup_integration(hass, config_entry) - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy", - "envoy_1p_metered", - "envoy_nobatt_metered_3p", - "envoy_tot_cons_metered", - ], - indirect=["mock_envoy"], -) -async def test_no_binary_sensor( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test switch platform entities are not created.""" - with patch( - "homeassistant.components.enphase_envoy.PLATFORMS", [Platform.BINARY_SENSOR] - ): - await setup_integration(hass, config_entry) - assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] -) -async def test_binary_sensor_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test binary sensor entities values and names.""" - with patch( - "homeassistant.components.enphase_envoy.PLATFORMS", [Platform.BINARY_SENSOR] - ): - await setup_integration(hass, config_entry) - - sn = mock_envoy.data.enpower.serial_number - entity_base = f"{Platform.BINARY_SENSOR}.enpower" - - assert (entity_state := hass.states.get(f"{entity_base}_{sn}_communicating")) - assert entity_state.state == STATE_ON - assert (entity_state := hass.states.get(f"{entity_base}_{sn}_grid_status")) - assert entity_state.state == STATE_ON - - entity_base = f"{Platform.BINARY_SENSOR}.encharge" - - for sn in mock_envoy.data.encharge_inventory: - assert (entity_state := hass.states.get(f"{entity_base}_{sn}_communicating")) - assert entity_state.state == STATE_ON - assert (entity_state := hass.states.get(f"{entity_base}_{sn}_dc_switch")) - assert entity_state.state == STATE_ON diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index 44e2e680d5f..b60b03e5df9 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -6,141 +6,178 @@ from unittest.mock import AsyncMock from pyenphase import EnvoyAuthenticationError, EnvoyError import pytest +from syrupy.assertion import SnapshotAssertion +from homeassistant import config_entries from homeassistant.components import zeroconf from homeassistant.components.enphase_envoy.const import ( DOMAIN, OPTION_DIAGNOSTICS_INCLUDE_FIXTURES, OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE, - OPTION_DISABLE_KEEP_ALIVE, - OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE, + PLATFORMS, ) -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import setup_integration - from tests.common import MockConfigEntry _LOGGER = logging.getLogger(__name__) -async def test_form( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, -) -> None: +async def test_form(hass: HomeAssistant, config, setup_enphase_envoy) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", }, ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Envoy 1234" - assert result["data"] == { - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy 1234", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Envoy 1234" + assert result2["data"] == { + "host": "1.1.1.1", + "name": "Envoy 1234", + "username": "test-username", + "password": "test-password", } +@pytest.mark.parametrize("serial_number", [None]) async def test_user_no_serial_number( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config, setup_enphase_envoy ) -> None: """Test user setup without a serial number.""" - mock_envoy.serial_number = None result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", }, ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Envoy" - assert result["data"] == { - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Envoy" + assert result2["data"] == { + "host": "1.1.1.1", + "name": "Envoy", + "username": "test-username", + "password": "test-password", } -async def test_form_invalid_auth( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, +@pytest.mark.parametrize("serial_number", [None]) +async def test_user_fetching_serial_fails( + hass: HomeAssistant, setup_enphase_envoy ) -> None: - """Test we handle invalid auth.""" - mock_envoy.authenticate.side_effect = EnvoyAuthenticationError( - "fail authentication" - ) + """Test user setup without a serial number.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Envoy" + assert result2["data"] == { + "host": "1.1.1.1", + "name": "Envoy", + "username": "test-username", + "password": "test-password", + } + + +@pytest.mark.parametrize( + "mock_authenticate", + [ + AsyncMock(side_effect=EnvoyAuthenticationError("test")), + ], +) +async def test_form_invalid_auth(hass: HomeAssistant, setup_enphase_envoy) -> None: + """Test we handle invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", }, ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "invalid_auth"} @pytest.mark.parametrize( - ("exception", "error"), - [ - (EnvoyError, "cannot_connect"), - (ValueError, "unknown"), - ], + "mock_setup", + [AsyncMock(side_effect=EnvoyError)], ) -async def test_form_cannot_connect( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, - exception: Exception, - error: str, -) -> None: +async def test_form_cannot_connect(hass: HomeAssistant, setup_enphase_envoy) -> None: """Test we handle cannot connect error.""" - mock_envoy.setup.side_effect = exception result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", }, ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + + +@pytest.mark.parametrize( + "mock_setup", + [AsyncMock(side_effect=ValueError)], +) +async def test_form_unknown_error(hass: HomeAssistant, setup_enphase_envoy) -> None: + """Test we handle unknown error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "unknown"} def _get_schema_default(schema, key_name): @@ -152,14 +189,12 @@ def _get_schema_default(schema, key_name): async def test_zeroconf_pre_token_firmware( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, setup_enphase_envoy ) -> None: """Test we can setup from zeroconf.""" result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -173,38 +208,35 @@ async def test_zeroconf_pre_token_firmware( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert ( - _get_schema_default(result["data_schema"].schema, CONF_USERNAME) == "installer" - ) + assert _get_schema_default(result["data_schema"].schema, "username") == "installer" - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", }, ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Envoy 1234" - assert result["result"].unique_id == "1234" - assert result["data"] == { - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy 1234", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Envoy 1234" + assert result2["result"].unique_id == "1234" + assert result2["data"] == { + "host": "1.1.1.1", + "name": "Envoy 1234", + "username": "test-username", + "password": "test-password", } async def test_zeroconf_token_firmware( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, setup_enphase_envoy ) -> None: """Test we can setup from zeroconf.""" result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -217,101 +249,102 @@ async def test_zeroconf_token_firmware( ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert _get_schema_default(result["data_schema"].schema, CONF_USERNAME) == "" + assert _get_schema_default(result["data_schema"].schema, "username") == "" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", }, ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Envoy 1234" assert result2["result"].unique_id == "1234" assert result2["data"] == { - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy 1234", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "name": "Envoy 1234", + "username": "test-username", + "password": "test-password", } +@pytest.mark.parametrize( + "mock_authenticate", + [ + AsyncMock( + side_effect=[ + None, + EnvoyAuthenticationError("fail authentication"), + None, + ] + ), + ], +) async def test_form_host_already_exists( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test changing credentials for existing host.""" - config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} # existing config - assert config_entry.data[CONF_HOST] == "1.1.1.1" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "test-password" - - mock_envoy.authenticate.side_effect = EnvoyAuthenticationError( - "fail authentication" - ) + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" # mock failing authentication on first try - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.2", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "wrong-password", + "host": "1.1.1.2", + "username": "test-username", + "password": "wrong-password", }, ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_auth"} - - mock_envoy.authenticate.side_effect = None + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_auth"} # still original config after failure - assert config_entry.data[CONF_HOST] == "1.1.1.1" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "test-password" + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" # mock successful authentication and update of credentials - result = await hass.config_entries.flow.async_configure( + result3 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.2", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "changed-password", + "host": "1.1.1.2", + "username": "test-username", + "password": "changed-password", }, ) await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reauth_successful" # updated config with new ip and changed pw - assert config_entry.data[CONF_HOST] == "1.1.1.2" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "changed-password" + assert config_entry.data["host"] == "1.1.1.2" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "changed-password" async def test_zeroconf_serial_already_exists( hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + config_entry, + setup_enphase_envoy, caplog: pytest.LogCaptureFixture, ) -> None: """Test serial number already exists from zeroconf.""" _LOGGER.setLevel(logging.DEBUG) - await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("4.4.4.4"), ip_addresses=[ip_address("4.4.4.4")], @@ -322,24 +355,21 @@ async def test_zeroconf_serial_already_exists( type="mock_type", ), ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "4.4.4.4" + assert config_entry.data["host"] == "4.4.4.4" assert "Zeroconf ip 4 processing 4.4.4.4, current hosts: {'1.1.1.1'}" in caplog.text async def test_zeroconf_serial_already_exists_ignores_ipv6( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test serial number already exists from zeroconf but the discovery is ipv6.""" - await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("fd00::b27c:63bb:cc85:4ea0"), ip_addresses=[ip_address("fd00::b27c:63bb:cc85:4ea0")], @@ -354,21 +384,17 @@ async def test_zeroconf_serial_already_exists_ignores_ipv6( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "not_ipv4_address" - assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data["host"] == "1.1.1.1" +@pytest.mark.parametrize("serial_number", [None]) async def test_zeroconf_host_already_exists( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test hosts already exists from zeroconf.""" - mock_envoy.serial_number = None - await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -379,6 +405,7 @@ async def test_zeroconf_host_already_exists( type="mock_type", ), ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" @@ -387,21 +414,17 @@ async def test_zeroconf_host_already_exists( async def test_zero_conf_while_form( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test zeroconf while form is active.""" - await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -412,29 +435,26 @@ async def test_zero_conf_while_form( type="mock_type", ), ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data["host"] == "1.1.1.1" assert config_entry.unique_id == "1234" assert config_entry.title == "Envoy 1234" async def test_zero_conf_second_envoy_while_form( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test zeroconf while form is active.""" - await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM result2 = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("4.4.4.4"), ip_addresses=[ip_address("4.4.4.4")], @@ -445,51 +465,50 @@ async def test_zero_conf_second_envoy_while_form( type="mock_type", ), ) - assert result["type"] is FlowResultType.FORM - assert config_entry.data[CONF_HOST] == "1.1.1.1" + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert config_entry.data["host"] == "1.1.1.1" assert config_entry.unique_id == "1234" assert config_entry.title == "Envoy 1234" - result2 = await hass.config_entries.flow.async_configure( + result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], { - CONF_HOST: "4.4.4.4", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "4.4.4.4", + "username": "test-username", + "password": "test-password", }, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Envoy 4321" - assert result2["result"].unique_id == "4321" + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Envoy 4321" + assert result3["result"].unique_id == "4321" result4 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", }, ) + await hass.async_block_till_done() assert result4["type"] is FlowResultType.ABORT async def test_zero_conf_malformed_serial_property( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test malformed zeroconf properties.""" - await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM with pytest.raises(KeyError) as ex: await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -502,33 +521,30 @@ async def test_zero_conf_malformed_serial_property( ) assert "serialnum" in str(ex.value) - result = await hass.config_entries.flow.async_configure( + result3 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", }, ) - assert result["type"] is FlowResultType.ABORT + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.ABORT async def test_zero_conf_malformed_serial( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test malformed zeroconf properties.""" - await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - result = await hass.config_entries.flow.async_init( + result2 = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -539,36 +555,34 @@ async def test_zero_conf_malformed_serial( type="mock_type", ), ) - assert result["type"] is FlowResultType.FORM + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM - result = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", }, ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Envoy 12%4" + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Envoy 12%4" async def test_zero_conf_malformed_fw_property( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test malformed zeroconf property.""" - await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -579,26 +593,25 @@ async def test_zero_conf_malformed_fw_property( type="mock_type", ), ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data["host"] == "1.1.1.1" assert config_entry.unique_id == "1234" assert config_entry.title == "Envoy 1234" async def test_zero_conf_old_blank_entry( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, setup_enphase_envoy ) -> None: """Test re-using old blank entry.""" entry = MockConfigEntry( domain=DOMAIN, data={ - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "", - CONF_PASSWORD: "", - CONF_NAME: "unknown", + "host": "1.1.1.1", + "username": "", + "password": "", + "name": "unknown", }, unique_id=None, title="Envoy", @@ -606,7 +619,7 @@ async def test_zero_conf_old_blank_entry( entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_ZEROCONF}, + context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1"), ip_address("1.1.1.2")], @@ -617,64 +630,41 @@ async def test_zero_conf_old_blank_entry( type="mock_type", ), ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert entry.data[CONF_HOST] == "1.1.1.1" + assert entry.data["host"] == "1.1.1.1" assert entry.unique_id == "1234" assert entry.title == "Envoy 1234" -async def test_reauth( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, -) -> None: +async def test_reauth(hass: HomeAssistant, config_entry, setup_enphase_envoy) -> None: """Test we reauth auth.""" - await setup_integration(hass, config_entry) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": config_entry.unique_id, + "entry_id": config_entry.entry_id, + }, + ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "username": "test-username", + "password": "test-password", }, ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" async def test_options_default( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test we can configure options.""" - await setup_integration(hass, config_entry) - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert config_entry.options == { - OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE, - OPTION_DISABLE_KEEP_ALIVE: OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE, - } - - -async def test_options_set( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, -) -> None: - """Test we can configure options.""" - await setup_integration(hass, config_entry) result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" @@ -682,98 +672,117 @@ async def test_options_set( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ - OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True, - OPTION_DISABLE_KEEP_ALIVE: True, + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert config_entry.options == { - OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True, - OPTION_DISABLE_KEEP_ALIVE: True, + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE } +async def test_options_set( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test we can configure options.""" + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert config_entry.options == {OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True} + + async def test_reconfigure( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test we can reconfiger the entry.""" - await setup_integration(hass, config_entry) - result = await config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" assert result["errors"] == {} # original entry - assert config_entry.data[CONF_HOST] == "1.1.1.1" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "test-password" + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.2", - CONF_USERNAME: "test-username2", - CONF_PASSWORD: "test-password2", + "host": "1.1.1.2", + "username": "test-username2", + "password": "test-password2", }, ) await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" # changed entry - assert config_entry.data[CONF_HOST] == "1.1.1.2" - assert config_entry.data[CONF_USERNAME] == "test-username2" - assert config_entry.data[CONF_PASSWORD] == "test-password2" + assert config_entry.data["host"] == "1.1.1.2" + assert config_entry.data["username"] == "test-username2" + assert config_entry.data["password"] == "test-password2" async def test_reconfigure_nochange( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test we get the reconfigure form and apply nochange.""" - await setup_integration(hass, config_entry) - result = await config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" assert result["errors"] == {} # original entry - assert config_entry.data[CONF_HOST] == "1.1.1.1" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "test-password" + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", }, ) await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" # unchanged original entry - assert config_entry.data[CONF_HOST] == "1.1.1.1" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "test-password" + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" async def test_reconfigure_otherenvoy( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy, mock_envoy ) -> None: """Test entering ip of other envoy and prevent changing it based on serial.""" - await setup_integration(hass, config_entry) - result = await config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" assert result["errors"] == {} @@ -781,93 +790,152 @@ async def test_reconfigure_otherenvoy( # let mock return different serial from first time, sim it's other one on changed ip mock_envoy.serial_number = "45678" - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.2", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "new-password", + "host": "1.1.1.2", + "username": "test-username", + "password": "new-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "unexpected_envoy"} + + # entry should still be original entry + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" + + # set serial back to original to finsich flow + mock_envoy.serial_number = "1234" + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + "host": "1.1.1.1", + "username": "test-username", + "password": "new-password", }, ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unique_id_mismatch" + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reconfigure_successful" - # entry should still be original entry - assert config_entry.data[CONF_HOST] == "1.1.1.1" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "test-password" + # updated original entry + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "new-password" @pytest.mark.parametrize( - ("exception", "error"), + "mock_authenticate", [ - (EnvoyAuthenticationError("fail authentication"), "invalid_auth"), - (EnvoyError, "cannot_connect"), - (Exception, "unknown"), + AsyncMock( + side_effect=[ + None, + EnvoyAuthenticationError("fail authentication"), + EnvoyError("cannot_connect"), + Exception("Unexpected exception"), + None, + ] + ), ], ) async def test_reconfigure_auth_failure( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, - exception: Exception, - error: str, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test changing credentials for existing host with auth failure.""" - await setup_integration(hass, config_entry) - - result = await config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} # existing config - assert config_entry.data[CONF_HOST] == "1.1.1.1" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "test-password" - - mock_envoy.authenticate.side_effect = exception + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" # mock failing authentication on first try - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.2", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "wrong-password", + "host": "1.1.1.2", + "username": "test-username", + "password": "wrong-password", }, ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_auth"} - mock_envoy.authenticate.side_effect = None - # mock successful authentication and update of credentials - result = await hass.config_entries.flow.async_configure( + # still original config after failure + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" + + # mock failing authentication on first try + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.2", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "changed-password", + "host": "1.1.1.2", + "username": "new-username", + "password": "wrong-password", }, ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + + # still original config after failure + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" + + # mock failing authentication on first try + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "1.1.1.2", + "username": "other-username", + "password": "test-password", + }, + ) + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "unknown"} + + # still original config after failure + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" + + # mock successful authentication and update of credentials + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "1.1.1.2", + "username": "test-username", + "password": "changed-password", + }, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reconfigure_successful" # updated config with new ip and changed pw - assert config_entry.data[CONF_HOST] == "1.1.1.2" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "changed-password" + assert config_entry.data["host"] == "1.1.1.2" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "changed-password" async def test_reconfigure_change_ip_to_existing( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, + hass: HomeAssistant, config_entry, setup_enphase_envoy ) -> None: """Test reconfiguration to existing entry with same ip does not harm existing one.""" - await setup_integration(hass, config_entry) other_entry = MockConfigEntry( domain=DOMAIN, entry_id="65432155aaddb2007c5f6602e0c38e72", @@ -883,37 +951,49 @@ async def test_reconfigure_change_ip_to_existing( other_entry.add_to_hass(hass) # original other entry - assert other_entry.data[CONF_HOST] == "1.1.1.2" - assert other_entry.data[CONF_USERNAME] == "other-username" - assert other_entry.data[CONF_PASSWORD] == "other-password" + assert other_entry.data["host"] == "1.1.1.2" + assert other_entry.data["username"] == "other-username" + assert other_entry.data["password"] == "other-password" - result = await config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" assert result["errors"] == {} # original entry - assert config_entry.data[CONF_HOST] == "1.1.1.1" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "test-password" + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password" - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.2", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password2", + "host": "1.1.1.2", + "username": "test-username", + "password": "test-password2", }, ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" # updated entry - assert config_entry.data[CONF_HOST] == "1.1.1.2" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "test-password2" + assert config_entry.data["host"] == "1.1.1.2" + assert config_entry.data["username"] == "test-username" + assert config_entry.data["password"] == "test-password2" # unchanged other entry - assert other_entry.data[CONF_HOST] == "1.1.1.2" - assert other_entry.data[CONF_USERNAME] == "other-username" - assert other_entry.data[CONF_PASSWORD] == "other-password" + assert other_entry.data["host"] == "1.1.1.2" + assert other_entry.data["username"] == "other-username" + assert other_entry.data["password"] == "other-password" + + +async def test_platforms(snapshot: SnapshotAssertion) -> None: + """Test if platform list changed and requires more tests.""" + assert snapshot == PLATFORMS diff --git a/tests/components/enphase_envoy/test_diagnostics.py b/tests/components/enphase_envoy/test_diagnostics.py index 186ee5c46f3..9ee6b7905e7 100644 --- a/tests/components/enphase_envoy/test_diagnostics.py +++ b/tests/components/enphase_envoy/test_diagnostics.py @@ -1,6 +1,6 @@ """Test Enphase Envoy diagnostics.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from pyenphase.exceptions import EnvoyError import pytest @@ -10,9 +10,9 @@ from homeassistant.components.enphase_envoy.const import ( DOMAIN, OPTION_DIAGNOSTICS_INCLUDE_FIXTURES, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant - -from . import setup_integration +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry @@ -26,8 +26,6 @@ TO_EXCLUDE = { "last_updated", "last_changed", "last_reported", - "created_at", - "modified_at", } @@ -38,55 +36,85 @@ def limit_diagnostic_attrs(prop, path) -> bool: async def test_entry_diagnostics( hass: HomeAssistant, - config_entry: MockConfigEntry, + config_entry: ConfigEntry, hass_client: ClientSessionGenerator, - mock_envoy: AsyncMock, + setup_enphase_envoy, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - await setup_integration(hass, config_entry) assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) == snapshot(exclude=limit_diagnostic_attrs) @pytest.fixture(name="config_entry_options") -def config_entry_options_fixture(hass: HomeAssistant, config: dict[str, str]): +def config_entry_options_fixture(hass: HomeAssistant, config, serial_number): """Define a config entry fixture.""" - return MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, entry_id="45a36e55aaddb2007c5f6602e0c38e72", - title="Envoy 1234", - unique_id="1234", + title=f"Envoy {serial_number}" if serial_number else "Envoy", + unique_id=serial_number, data=config, options={OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True}, ) + entry.add_to_hass(hass) + return entry async def test_entry_diagnostics_with_fixtures( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry_options: MockConfigEntry, - mock_envoy: AsyncMock, + config_entry_options: ConfigEntry, + setup_enphase_envoy, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - await setup_integration(hass, config_entry_options) assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry_options ) == snapshot(exclude=limit_diagnostic_attrs) +@pytest.fixture(name="setup_enphase_envoy_options_error") +async def setup_enphase_envoy_options_error_fixture( + hass: HomeAssistant, + config, + mock_envoy_options_error, +): + """Define a fixture to set up Enphase Envoy.""" + with ( + patch( + "homeassistant.components.enphase_envoy.config_flow.Envoy", + return_value=mock_envoy_options_error, + ), + patch( + "homeassistant.components.enphase_envoy.Envoy", + return_value=mock_envoy_options_error, + ), + ): + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done() + yield + + +@pytest.fixture(name="mock_envoy_options_error") +def mock_envoy_options_fixture( + mock_envoy, +): + """Mock envoy with error in request.""" + mock_envoy_options = mock_envoy + mock_envoy_options.request.side_effect = AsyncMock(side_effect=EnvoyError("Test")) + return mock_envoy_options + + async def test_entry_diagnostics_with_fixtures_with_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry_options: MockConfigEntry, + config_entry_options: ConfigEntry, + setup_enphase_envoy_options_error, snapshot: SnapshotAssertion, - mock_envoy: AsyncMock, ) -> None: """Test config entry diagnostics.""" - await setup_integration(hass, config_entry_options) - mock_envoy.request.side_effect = EnvoyError("Test") assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry_options ) == snapshot(exclude=limit_diagnostic_attrs) diff --git a/tests/components/enphase_envoy/test_init.py b/tests/components/enphase_envoy/test_init.py deleted file mode 100644 index 2b35aaff5e9..00000000000 --- a/tests/components/enphase_envoy/test_init.py +++ /dev/null @@ -1,362 +0,0 @@ -"""Test Enphase Envoy runtime.""" - -from unittest.mock import AsyncMock, patch - -from freezegun.api import FrozenDateTimeFactory -from jwt import encode -from pyenphase import EnvoyAuthenticationError, EnvoyError, EnvoyTokenAuth -from pyenphase.auth import EnvoyLegacyAuth -import pytest -import respx - -from homeassistant.components.enphase_envoy import DOMAIN -from homeassistant.components.enphase_envoy.const import ( - OPTION_DIAGNOSTICS_INCLUDE_FIXTURES, - OPTION_DISABLE_KEEP_ALIVE, - Platform, -) -from homeassistant.components.enphase_envoy.coordinator import SCAN_INTERVAL -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PASSWORD, - CONF_TOKEN, - CONF_USERNAME, - STATE_UNAVAILABLE, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.setup import async_setup_component - -from . import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed -from tests.typing import WebSocketGenerator - - -async def test_with_pre_v7_firmware( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test enphase_envoy coordinator with pre V7 firmware.""" - mock_envoy.firmware = "5.1.1" - mock_envoy.auth = EnvoyLegacyAuth( - "127.0.0.1", username="test-username", password="test-password" - ) - await setup_integration(hass, config_entry) - - assert config_entry.state is ConfigEntryState.LOADED - - assert (entity_state := hass.states.get("sensor.inverter_1")) - assert entity_state.state == "1" - - -@pytest.mark.freeze_time("2024-07-23 00:00:00+00:00") -async def test_token_in_config_file( - hass: HomeAssistant, - mock_envoy: AsyncMock, -) -> None: - """Test coordinator with token provided from config.""" - token = encode( - payload={"name": "envoy", "exp": 1907837780}, - key="secret", - algorithm="HS256", - ) - entry = MockConfigEntry( - domain=DOMAIN, - entry_id="45a36e55aaddb2007c5f6602e0c38e72", - title="Envoy 1234", - unique_id="1234", - data={ - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy 1234", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_TOKEN: token, - }, - ) - mock_envoy.auth = EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial="1234") - await setup_integration(hass, entry) - await hass.async_block_till_done(wait_background_tasks=True) - assert entry.state is ConfigEntryState.LOADED - - assert (entity_state := hass.states.get("sensor.inverter_1")) - assert entity_state.state == "1" - - -@respx.mock -@pytest.mark.freeze_time("2024-07-23 00:00:00+00:00") -async def test_expired_token_in_config( - hass: HomeAssistant, - mock_envoy: AsyncMock, -) -> None: - """Test coordinator with expired token provided from config.""" - current_token = encode( - # some time in 2021 - payload={"name": "envoy", "exp": 1627314600}, - key="secret", - algorithm="HS256", - ) - - # mock envoy with expired token in config - entry = MockConfigEntry( - domain=DOMAIN, - entry_id="45a36e55aaddb2007c5f6602e0c38e72", - title="Envoy 1234", - unique_id="1234", - data={ - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy 1234", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_TOKEN: current_token, - }, - ) - # Make sure to mock pyenphase.auth.EnvoyTokenAuth._obtain_token - # when specifying username and password in EnvoyTokenauth - mock_envoy.auth = EnvoyTokenAuth( - "127.0.0.1", - token=current_token, - envoy_serial="1234", - cloud_username="test_username", - cloud_password="test_password", - ) - await setup_integration(hass, entry) - await hass.async_block_till_done(wait_background_tasks=True) - assert entry.state is ConfigEntryState.LOADED - - assert (entity_state := hass.states.get("sensor.inverter_1")) - assert entity_state.state == "1" - - -async def test_coordinator_update_error( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator update error handling.""" - await setup_integration(hass, config_entry) - - assert (entity_state := hass.states.get("sensor.inverter_1")) - original_state = entity_state - - # force HA to detect changed data by changing raw - mock_envoy.data.raw = {"I": "am changed 1"} - mock_envoy.update.side_effect = EnvoyError - - # Move time to next update - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert (entity_state := hass.states.get("sensor.inverter_1")) - assert entity_state.state == STATE_UNAVAILABLE - - mock_envoy.reset_mock(return_value=True, side_effect=True) - - mock_envoy.data.raw = {"I": "am changed 2"} - - # Move time to next update - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert (entity_state := hass.states.get("sensor.inverter_1")) - assert entity_state.state == original_state.state - - -async def test_coordinator_update_authentication_error( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test enphase_envoy coordinator update authentication error handling.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - # force HA to detect changed data by changing raw - mock_envoy.data.raw = {"I": "am changed 1"} - mock_envoy.update.side_effect = EnvoyAuthenticationError("This must fail") - - # Move time to next update - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert (entity_state := hass.states.get("sensor.inverter_1")) - assert entity_state.state == STATE_UNAVAILABLE - - -@pytest.mark.freeze_time("2024-07-23 00:00:00+00:00") -async def test_coordinator_token_refresh_error( - hass: HomeAssistant, - mock_envoy: AsyncMock, -) -> None: - """Test coordinator with expired token and failure to refresh.""" - token = encode( - # some time in 2021 - payload={"name": "envoy", "exp": 1627314600}, - key="secret", - algorithm="HS256", - ) - entry = MockConfigEntry( - domain=DOMAIN, - entry_id="45a36e55aaddb2007c5f6602e0c38e72", - title="Envoy 1234", - unique_id="1234", - data={ - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy 1234", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_TOKEN: token, - }, - ) - # override fresh token in conftest mock_envoy.auth - mock_envoy.auth = EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial="1234") - # force token refresh to fail. - with patch( - "pyenphase.auth.EnvoyTokenAuth._obtain_token", - side_effect=EnvoyError, - ): - await setup_integration(hass, entry) - - await hass.async_block_till_done(wait_background_tasks=True) - assert entry.state is ConfigEntryState.LOADED - - assert (entity_state := hass.states.get("sensor.inverter_1")) - assert entity_state.state == "1" - - -async def test_config_no_unique_id( - hass: HomeAssistant, - mock_envoy: AsyncMock, -) -> None: - """Test enphase_envoy init if config entry has no unique id.""" - entry = MockConfigEntry( - domain=DOMAIN, - entry_id="45a36e55aaddb2007c5f6602e0c38e72", - title="Envoy 1234", - unique_id=None, - data={ - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy 1234", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - await setup_integration(hass, entry) - assert entry.state is ConfigEntryState.LOADED - assert entry.unique_id == mock_envoy.serial_number - - -async def test_config_different_unique_id( - hass: HomeAssistant, - mock_envoy: AsyncMock, -) -> None: - """Test enphase_envoy init if config entry has different unique id.""" - entry = MockConfigEntry( - domain=DOMAIN, - entry_id="45a36e55aaddb2007c5f6602e0c38e72", - title="Envoy 1234", - unique_id=4321, - data={ - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy 1234", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - await setup_integration(hass, entry) - assert entry.state is ConfigEntryState.SETUP_RETRY - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - ], - indirect=["mock_envoy"], -) -async def test_remove_config_entry_device( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - hass_ws_client: WebSocketGenerator, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test removing enphase_envoy config entry device.""" - assert await async_setup_component(hass, "config", {}) - await setup_integration(hass, config_entry) - assert config_entry.state is ConfigEntryState.LOADED - - # use client to send remove_device command - hass_client = await hass_ws_client(hass) - - # add device that will pass remove test - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, "delete_this_device")}, - ) - response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) - assert response["success"] - - # inverters are not allowed to be removed - entity = entity_registry.entities["sensor.inverter_1"] - device_entry = device_registry.async_get(entity.device_id) - response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) - assert not response["success"] - - # envoy itself is not allowed to be removed - entity = entity_registry.entities["sensor.envoy_1234_current_power_production"] - device_entry = device_registry.async_get(entity.device_id) - response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) - assert not response["success"] - - # encharge can not be removed - entity = entity_registry.entities["sensor.encharge_123456_power"] - device_entry = device_registry.async_get(entity.device_id) - response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) - assert not response["success"] - - # enpower can not be removed - entity = entity_registry.entities["sensor.enpower_654321_temperature"] - device_entry = device_registry.async_get(entity.device_id) - response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) - assert not response["success"] - - # relays can be removed - entity = entity_registry.entities["switch.nc1_fixture"] - device_entry = device_registry.async_get(entity.device_id) - response = await hass_client.remove_device(device_entry.id, config_entry.entry_id) - assert response["success"] - - -async def test_option_change_reload( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_envoy: AsyncMock, -) -> None: - """Test options change will reload entity.""" - await setup_integration(hass, config_entry) - await hass.async_block_till_done(wait_background_tasks=True) - assert config_entry.state is ConfigEntryState.LOADED - - # option change will take care of COV of init::async_reload_entry - hass.config_entries.async_update_entry( - config_entry, - options={ - OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: False, - OPTION_DISABLE_KEEP_ALIVE: True, - }, - ) - await hass.async_block_till_done() - assert config_entry.options == { - OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: False, - OPTION_DISABLE_KEEP_ALIVE: True, - } diff --git a/tests/components/enphase_envoy/test_number.py b/tests/components/enphase_envoy/test_number.py deleted file mode 100644 index dbf711cacaa..00000000000 --- a/tests/components/enphase_envoy/test_number.py +++ /dev/null @@ -1,160 +0,0 @@ -"""Test Enphase Envoy number sensors.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.enphase_envoy.const import Platform -from homeassistant.components.number import ( - ATTR_VALUE, - DOMAIN as NUMBER_DOMAIN, - SERVICE_SET_VALUE, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.parametrize( - ("mock_envoy"), - ["envoy_metered_batt_relay", "envoy_eu_batt"], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_number( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test number platform entities against snapshot.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): - await setup_integration(hass, config_entry) - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy", - "envoy_1p_metered", - "envoy_nobatt_metered_3p", - "envoy_tot_cons_metered", - ], - indirect=["mock_envoy"], -) -async def test_no_number( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test number platform entities are not created.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): - await setup_integration(hass, config_entry) - assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("mock_envoy", "use_serial"), - [ - ("envoy_metered_batt_relay", "enpower_654321"), - ("envoy_eu_batt", "envoy_1234"), - ], - indirect=["mock_envoy"], -) -async def test_number_operation_storage( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - use_serial: bool, -) -> None: - """Test enphase_envoy number storage entities operation.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): - await setup_integration(hass, config_entry) - - test_entity = f"{Platform.NUMBER}.{use_serial}_reserve_battery_level" - - assert (entity_state := hass.states.get(test_entity)) - assert mock_envoy.data.tariff.storage_settings.reserved_soc == float( - entity_state.state - ) - test_value = 30.0 - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: test_entity, - ATTR_VALUE: test_value, - }, - blocking=True, - ) - - mock_envoy.set_reserve_soc.assert_awaited_once_with(test_value) - - -@pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] -) -async def test_number_operation_relays( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test enphase_envoy number relay entities operation.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): - await setup_integration(hass, config_entry) - - entity_base = f"{Platform.NUMBER}." - - for counter, (contact_id, dry_contact) in enumerate( - mock_envoy.data.dry_contact_settings.items() - ): - name = dry_contact.load_name.lower().replace(" ", "_") - test_entity = f"{entity_base}{name}_cutoff_battery_level" - assert (entity_state := hass.states.get(test_entity)) - assert mock_envoy.data.dry_contact_settings[contact_id].soc_low == float( - entity_state.state - ) - test_value = 10.0 + counter - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: test_entity, - ATTR_VALUE: test_value, - }, - blocking=True, - ) - - mock_envoy.update_dry_contact.assert_awaited_once_with( - {"id": contact_id, "soc_low": test_value} - ) - mock_envoy.update_dry_contact.reset_mock() - - test_entity = f"{entity_base}{name}_restore_battery_level" - assert (entity_state := hass.states.get(test_entity)) - assert mock_envoy.data.dry_contact_settings[contact_id].soc_high == float( - entity_state.state - ) - test_value = 80.0 - counter - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: test_entity, - ATTR_VALUE: test_value, - }, - blocking=True, - ) - - mock_envoy.update_dry_contact.assert_awaited_once_with( - {"id": contact_id, "soc_high": test_value} - ) - mock_envoy.update_dry_contact.reset_mock() diff --git a/tests/components/enphase_envoy/test_select.py b/tests/components/enphase_envoy/test_select.py deleted file mode 100644 index 071dbcb2fe2..00000000000 --- a/tests/components/enphase_envoy/test_select.py +++ /dev/null @@ -1,228 +0,0 @@ -"""Test Enphase Envoy select.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.enphase_envoy.const import Platform -from homeassistant.components.enphase_envoy.select import ( - ACTION_OPTIONS, - MODE_OPTIONS, - RELAY_ACTION_MAP, - RELAY_MODE_MAP, - REVERSE_RELAY_ACTION_MAP, - REVERSE_RELAY_MODE_MAP, - REVERSE_STORAGE_MODE_MAP, - STORAGE_MODE_MAP, - STORAGE_MODE_OPTIONS, -) -from homeassistant.components.select import ATTR_OPTION, DOMAIN as SELECT_DOMAIN -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_SELECT_OPTION -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.parametrize( - ("mock_envoy"), - ["envoy_metered_batt_relay", "envoy_eu_batt"], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_select( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test select platform entities against snapshot.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): - await setup_integration(hass, config_entry) - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy", - "envoy_1p_metered", - "envoy_nobatt_metered_3p", - "envoy_tot_cons_metered", - ], - indirect=["mock_envoy"], -) -async def test_no_select( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test select platform entities against snapshot.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): - await setup_integration(hass, config_entry) - assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] -) -async def test_select_relay_actions( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test select platform entities dry contact relay actions.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): - await setup_integration(hass, config_entry) - - entity_base = f"{Platform.SELECT}." - - for contact_id, dry_contact in mock_envoy.data.dry_contact_settings.items(): - name = dry_contact.load_name.lower().replace(" ", "_") - for target in ( - ("generator_action", dry_contact.generator_action, "generator_action"), - ("microgrid_action", dry_contact.micro_grid_action, "micro_grid_action"), - ("grid_action", dry_contact.grid_action, "grid_action"), - ): - test_entity = f"{entity_base}{name}_{target[0]}" - assert (entity_state := hass.states.get(test_entity)) - assert RELAY_ACTION_MAP[target[1]] == (current_state := entity_state.state) - # set all relay modes except current mode - for action in [action for action in ACTION_OPTIONS if not current_state]: - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: test_entity, - ATTR_OPTION: action, - }, - blocking=True, - ) - mock_envoy.update_dry_contact.assert_called_once_with( - {"id": contact_id, target[2]: REVERSE_RELAY_ACTION_MAP[action]} - ) - mock_envoy.update_dry_contact.reset_mock() - # and finally back to original - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: test_entity, - ATTR_OPTION: current_state, - }, - blocking=True, - ) - mock_envoy.update_dry_contact.assert_called_once_with( - {"id": contact_id, target[2]: REVERSE_RELAY_ACTION_MAP[current_state]} - ) - mock_envoy.update_dry_contact.reset_mock() - - -@pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] -) -async def test_select_relay_modes( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test select platform dry contact relay mode changes.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): - await setup_integration(hass, config_entry) - - entity_base = f"{Platform.SELECT}." - - for contact_id, dry_contact in mock_envoy.data.dry_contact_settings.items(): - name = dry_contact.load_name.lower().replace(" ", "_") - test_entity = f"{entity_base}{name}_mode" - assert (entity_state := hass.states.get(test_entity)) - assert RELAY_MODE_MAP[dry_contact.mode] == (current_state := entity_state.state) - for mode in [mode for mode in MODE_OPTIONS if not current_state]: - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: test_entity, - ATTR_OPTION: mode, - }, - blocking=True, - ) - mock_envoy.update_dry_contact.assert_called_once_with( - {"id": contact_id, "mode": REVERSE_RELAY_MODE_MAP[mode]} - ) - mock_envoy.update_dry_contact.reset_mock() - - # and finally current mode again - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: test_entity, - ATTR_OPTION: current_state, - }, - blocking=True, - ) - mock_envoy.update_dry_contact.assert_called_once_with( - {"id": contact_id, "mode": REVERSE_RELAY_MODE_MAP[current_state]} - ) - mock_envoy.update_dry_contact.reset_mock() - - -@pytest.mark.parametrize( - ("mock_envoy", "use_serial"), - [ - ("envoy_metered_batt_relay", "enpower_654321"), - ("envoy_eu_batt", "envoy_1234"), - ], - indirect=["mock_envoy"], -) -async def test_select_storage_modes( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - use_serial: str, -) -> None: - """Test select platform entities storage mode changes.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): - await setup_integration(hass, config_entry) - - test_entity = f"{Platform.SELECT}.{use_serial}_storage_mode" - - assert (entity_state := hass.states.get(test_entity)) - assert STORAGE_MODE_MAP[mock_envoy.data.tariff.storage_settings.mode] == ( - current_state := entity_state.state - ) - - for mode in [mode for mode in STORAGE_MODE_OPTIONS if not current_state]: - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: test_entity, - ATTR_OPTION: mode, - }, - blocking=True, - ) - mock_envoy.set_storage_mode.assert_called_once_with( - REVERSE_STORAGE_MODE_MAP[mode] - ) - mock_envoy.set_storage_mode.reset_mock() - - # and finally with original mode - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: test_entity, - ATTR_OPTION: current_state, - }, - blocking=True, - ) - mock_envoy.set_storage_mode.assert_called_once_with( - REVERSE_STORAGE_MODE_MAP[current_state] - ) diff --git a/tests/components/enphase_envoy/test_sensor.py b/tests/components/enphase_envoy/test_sensor.py index 784dfe54073..bfb6fdb2826 100644 --- a/tests/components/enphase_envoy/test_sensor.py +++ b/tests/components/enphase_envoy/test_sensor.py @@ -1,1038 +1,61 @@ """Test Enphase Envoy sensors.""" -from itertools import chain -import logging -from unittest.mock import AsyncMock, patch +from unittest.mock import Mock, patch -from freezegun.api import FrozenDateTimeFactory -from pyenphase.const import PHASENAMES import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import AsyncGenerator +from homeassistant.components.enphase_envoy import DOMAIN from homeassistant.components.enphase_envoy.const import Platform -from homeassistant.components.enphase_envoy.coordinator import SCAN_INTERVAL -from homeassistant.const import STATE_UNKNOWN, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.util import dt as dt_util -from homeassistant.util.unit_conversion import TemperatureConverter +from homeassistant.setup import async_setup_component -from . import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import MockConfigEntry + + +@pytest.fixture(name="setup_enphase_envoy_sensor") +async def setup_enphase_envoy_sensor_fixture( + hass: HomeAssistant, config: dict[str, str], mock_envoy: Mock +) -> AsyncGenerator[None]: + """Define a fixture to set up Enphase Envoy with sensor platform only.""" + with ( + patch( + "homeassistant.components.enphase_envoy.config_flow.Envoy", + return_value=mock_envoy, + ), + patch( + "homeassistant.components.enphase_envoy.Envoy", + return_value=mock_envoy, + ), + patch( + "homeassistant.components.enphase_envoy.PLATFORMS", + [Platform.SENSOR], + ), + ): + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done() + yield -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy", - "envoy_1p_metered", - "envoy_eu_batt", - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - "envoy_tot_cons_metered", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor( hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, snapshot: SnapshotAssertion, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, + setup_enphase_envoy_sensor: None, ) -> None: - """Test sensor platform entities against snapshot.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -PRODUCTION_NAMES: tuple[str, ...] = ( - "current_power_production", - "energy_production_today", - "energy_production_last_seven_days", - "lifetime_energy_production", -) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy", - "envoy_1p_metered", - "envoy_eu_batt", - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - "envoy_tot_cons_metered", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_production_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test production entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - data = mock_envoy.data.system_production - PRODUCTION_TARGETS: tuple[float, ...] = ( - data.watts_now / 1000.0, - data.watt_hours_today / 1000.0, - data.watt_hours_last_7_days / 1000.0, - data.watt_hours_lifetime / 1000000.0, + """Test enphase_envoy sensor entities.""" + # compare registered entities against snapshot of prior run + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id ) + assert entity_entries + assert entity_entries == snapshot - for name, target in list(zip(PRODUCTION_NAMES, PRODUCTION_TARGETS, strict=False)): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - -PRODUCTION_PHASE_NAMES: list[str] = [ - f"{name}_{phase.lower()}" for phase in PHASENAMES for name in PRODUCTION_NAMES -] - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_production_phase_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test production phase entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - PRODUCTION_PHASE_TARGET = chain( - *[ - ( - phase_data.watts_now / 1000.0, - phase_data.watt_hours_today / 1000.0, - phase_data.watt_hours_last_7_days / 1000.0, - phase_data.watt_hours_lifetime / 1000000.0, - ) - for phase_data in mock_envoy.data.system_production_phases.values() - ] - ) - - for name, target in list( - zip(PRODUCTION_PHASE_NAMES, PRODUCTION_PHASE_TARGET, strict=False) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - -CONSUMPTION_NAMES: tuple[str, ...] = ( - "current_power_consumption", - "energy_consumption_today", - "energy_consumption_last_seven_days", - "lifetime_energy_consumption", -) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_1p_metered", - "envoy_eu_batt", - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_consumption_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test consumption entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - data = mock_envoy.data.system_consumption - CONSUMPTION_TARGETS = ( - data.watts_now / 1000.0, - data.watt_hours_today / 1000.0, - data.watt_hours_last_7_days / 1000.0, - data.watt_hours_lifetime / 1000000.0, - ) - - for name, target in list(zip(CONSUMPTION_NAMES, CONSUMPTION_TARGETS, strict=False)): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - -NET_CONSUMPTION_NAMES: tuple[str, ...] = ( - "balanced_net_power_consumption", - "lifetime_balanced_net_energy_consumption", -) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_1p_metered", - "envoy_eu_batt", - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - "envoy_tot_cons_metered", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_net_consumption_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test net consumption entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - data = mock_envoy.data.system_net_consumption - NET_CONSUMPTION_TARGETS = ( - data.watts_now / 1000.0, - data.watt_hours_lifetime / 1000.0, - ) - for name, target in list( - zip(NET_CONSUMPTION_NAMES, NET_CONSUMPTION_TARGETS, strict=False) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - -CONSUMPTION_PHASE_NAMES: list[str] = [ - f"{name}_{phase.lower()}" for phase in PHASENAMES for name in CONSUMPTION_NAMES -] - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_consumption_phase_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test consumption phase entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - CONSUMPTION_PHASE_TARGET = chain( - *[ - ( - phase_data.watts_now / 1000.0, - phase_data.watt_hours_today / 1000.0, - phase_data.watt_hours_last_7_days / 1000.0, - phase_data.watt_hours_lifetime / 1000000.0, - ) - for phase_data in mock_envoy.data.system_consumption_phases.values() - ] - ) - - for name, target in list( - zip(CONSUMPTION_PHASE_NAMES, CONSUMPTION_PHASE_TARGET, strict=False) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - -NET_CONSUMPTION_PHASE_NAMES: list[str] = [ - f"{name}_{phase.lower()}" for phase in PHASENAMES for name in NET_CONSUMPTION_NAMES -] - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_net_consumption_phase_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test consumption phase entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - NET_CONSUMPTION_PHASE_TARGET = chain( - *[ - ( - phase_data.watts_now / 1000.0, - phase_data.watt_hours_lifetime / 1000.0, - ) - for phase_data in mock_envoy.data.system_net_consumption_phases.values() - ] - ) - for name, target in list( - zip(NET_CONSUMPTION_PHASE_NAMES, NET_CONSUMPTION_PHASE_TARGET, strict=False) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - -CT_PRODUCTION_NAMES_INT = ("meter_status_flags_active_production_ct",) -CT_PRODUCTION_NAMES_STR = ("metering_status_production_ct",) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_production_ct_data( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_envoy: AsyncMock, -) -> None: - """Test production CT phase entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - data = mock_envoy.data.ctmeter_production - - CT_PRODUCTION_TARGETS_INT = (len(data.status_flags),) - for name, target in list( - zip(CT_PRODUCTION_NAMES_INT, CT_PRODUCTION_TARGETS_INT, strict=False) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - CT_PRODUCTION_TARGETS_STR = (data.metering_status,) - for name, target in list( - zip(CT_PRODUCTION_NAMES_STR, CT_PRODUCTION_TARGETS_STR, strict=False) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert entity_state.state == target - - -CT_PRODUCTION_NAMES_FLOAT_PHASE = [ - f"{name}_{phase.lower()}" - for phase in PHASENAMES - for name in CT_PRODUCTION_NAMES_INT -] - -CT_PRODUCTION_NAMES_STR_PHASE = [ - f"{name}_{phase.lower()}" - for phase in PHASENAMES - for name in CT_PRODUCTION_NAMES_STR -] - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_production_ct_phase_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test production ct phase entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - CT_PRODUCTION_NAMES_FLOAT_TARGET = [ - len(phase_data.status_flags) - for phase_data in mock_envoy.data.ctmeter_production_phases.values() - ] - - for name, target in list( - zip( - CT_PRODUCTION_NAMES_FLOAT_PHASE, - CT_PRODUCTION_NAMES_FLOAT_TARGET, - strict=False, + # Test if all entities still have same state + for entity_entry in entity_entries: + assert hass.states.get(entity_entry.entity_id) == snapshot( + name=f"{entity_entry.entity_id}-state" ) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - CT_PRODUCTION_NAMES_STR_TARGET = [ - phase_data.metering_status - for phase_data in mock_envoy.data.ctmeter_production_phases.values() - ] - - for name, target in list( - zip( - CT_PRODUCTION_NAMES_STR_PHASE, - CT_PRODUCTION_NAMES_STR_TARGET, - strict=False, - ) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert entity_state.state == target - - -CT_CONSUMPTION_NAMES_FLOAT: tuple[str, ...] = ( - "lifetime_net_energy_consumption", - "lifetime_net_energy_production", - "current_net_power_consumption", - "frequency_net_consumption_ct", - "voltage_net_consumption_ct", - "meter_status_flags_active_net_consumption_ct", -) - -CT_CONSUMPTION_NAMES_STR: tuple[str, ...] = ("metering_status_net_consumption_ct",) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_consumption_ct_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test consumption CT phase entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - data = mock_envoy.data.ctmeter_consumption - - CT_CONSUMPTION_TARGETS_FLOAT = ( - data.energy_delivered / 1000000.0, - data.energy_received / 1000000.0, - data.active_power / 1000.0, - data.frequency, - data.voltage, - len(data.status_flags), - ) - for name, target in list( - zip(CT_CONSUMPTION_NAMES_FLOAT, CT_CONSUMPTION_TARGETS_FLOAT, strict=False) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - CT_CONSUMPTION_TARGETS_STR = (data.metering_status,) - for name, target in list( - zip(CT_CONSUMPTION_NAMES_STR, CT_CONSUMPTION_TARGETS_STR, strict=False) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert entity_state.state == target - - -CT_CONSUMPTION_NAMES_FLOAT_PHASE = [ - f"{name}_{phase.lower()}" - for phase in PHASENAMES - for name in CT_CONSUMPTION_NAMES_FLOAT -] - -CT_CONSUMPTION_NAMES_STR_PHASE = [ - f"{name}_{phase.lower()}" - for phase in PHASENAMES - for name in CT_CONSUMPTION_NAMES_STR -] - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_consumption_ct_phase_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test consumption ct phase entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - CT_CONSUMPTION_NAMES_FLOAT_PHASE_TARGET = chain( - *[ - ( - phase_data.energy_delivered / 1000000.0, - phase_data.energy_received / 1000000.0, - phase_data.active_power / 1000.0, - phase_data.frequency, - phase_data.voltage, - len(phase_data.status_flags), - ) - for phase_data in mock_envoy.data.ctmeter_consumption_phases.values() - ] - ) - - for name, target in list( - zip( - CT_CONSUMPTION_NAMES_FLOAT_PHASE, - CT_CONSUMPTION_NAMES_FLOAT_PHASE_TARGET, - strict=False, - ) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - CT_CONSUMPTION_NAMES_STR_PHASE_TARGET = [ - phase_data.metering_status - for phase_data in mock_envoy.data.ctmeter_consumption_phases.values() - ] - - for name, target in list( - zip( - CT_CONSUMPTION_NAMES_STR_PHASE, - CT_CONSUMPTION_NAMES_STR_PHASE_TARGET, - strict=False, - ) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert entity_state.state == target - - -CT_STORAGE_NAMES_FLOAT = ( - "lifetime_battery_energy_discharged", - "lifetime_battery_energy_charged", - "current_battery_discharge", - "voltage_storage_ct", - "meter_status_flags_active_storage_ct", -) -CT_STORAGE_NAMES_STR = ("metering_status_storage_ct",) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_storage_ct_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test storage phase entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - data = mock_envoy.data.ctmeter_storage - - CT_STORAGE_TARGETS_FLOAT = ( - data.energy_delivered / 1000000.0, - data.energy_received / 1000000.0, - data.active_power / 1000.0, - data.voltage, - len(data.status_flags), - ) - for name, target in list( - zip(CT_STORAGE_NAMES_FLOAT, CT_STORAGE_TARGETS_FLOAT, strict=False) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - CT_STORAGE_TARGETS_STR = (data.metering_status,) - for name, target in list( - zip(CT_STORAGE_NAMES_STR, CT_STORAGE_TARGETS_STR, strict=False) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert entity_state.state == target - - -CT_STORAGE_NAMES_FLOAT_PHASE = [ - f"{name}_{phase.lower()}" - for phase in PHASENAMES - for name in (CT_STORAGE_NAMES_FLOAT) -] - -CT_STORAGE_NAMES_STR_PHASE = [ - f"{name}_{phase.lower()}" for phase in PHASENAMES for name in (CT_STORAGE_NAMES_STR) -] - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_storage_ct_phase_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test storage ct phase entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - CT_STORAGE_NAMES_FLOAT_PHASE_TARGET = chain( - *[ - ( - phase_data.energy_delivered / 1000000.0, - phase_data.energy_received / 1000000.0, - phase_data.active_power / 1000.0, - phase_data.voltage, - len(phase_data.status_flags), - ) - for phase_data in mock_envoy.data.ctmeter_storage_phases.values() - ] - ) - - for name, target in list( - zip( - CT_STORAGE_NAMES_FLOAT_PHASE, - CT_STORAGE_NAMES_FLOAT_PHASE_TARGET, - strict=False, - ) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert float(entity_state.state) == target - - CT_STORAGE_NAMES_STR_PHASE_TARGET = [ - phase_data.metering_status - for phase_data in mock_envoy.data.ctmeter_storage_phases.values() - ] - - for name, target in list( - zip( - CT_STORAGE_NAMES_STR_PHASE, - CT_STORAGE_NAMES_STR_PHASE_TARGET, - strict=False, - ) - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) - assert entity_state.state == target - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - ], - indirect=["mock_envoy"], -) -async def test_sensor_all_phase_entities_disabled_by_integration( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all phase entities are disabled by integration.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - assert all( - f"{ENTITY_BASE}_{entity}" - in (integration_disabled_entities(entity_registry, config_entry)) - for entity in ( - PRODUCTION_PHASE_NAMES - + CONSUMPTION_PHASE_NAMES - + CT_PRODUCTION_NAMES_FLOAT_PHASE - + CT_PRODUCTION_NAMES_STR_PHASE - + CT_CONSUMPTION_NAMES_FLOAT_PHASE - + CT_CONSUMPTION_NAMES_STR_PHASE - ) - ) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - ], - indirect=["mock_envoy"], -) -async def test_sensor_storage_phase_disabled_by_integration( - hass: HomeAssistant, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_envoy: AsyncMock, -) -> None: - """Test all storage CT phase entities are disabled by integration.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" - - assert all( - f"{ENTITY_BASE}_{entity}" - in integration_disabled_entities(entity_registry, config_entry) - for entity in (CT_STORAGE_NAMES_FLOAT_PHASE + CT_STORAGE_NAMES_STR_PHASE) - ) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy", - "envoy_1p_metered", - "envoy_eu_batt", - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - "envoy_tot_cons_metered", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_inverter_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test enphase_envoy inverter entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - entity_base = f"{Platform.SENSOR}.inverter" - - for sn, inverter in mock_envoy.data.inverters.items(): - assert (entity_state := hass.states.get(f"{entity_base}_{sn}")) - assert float(entity_state.state) == (inverter.last_report_watts) - assert (last_reported := hass.states.get(f"{entity_base}_{sn}_last_reported")) - assert dt_util.parse_datetime( - last_reported.state - ) == dt_util.utc_from_timestamp(inverter.last_report_date) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy", - "envoy_1p_metered", - "envoy_eu_batt", - "envoy_metered_batt_relay", - "envoy_nobatt_metered_3p", - "envoy_tot_cons_metered", - ], - indirect=["mock_envoy"], -) -async def test_sensor_inverter_disabled_by_integration( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test enphase_envoy inverter disabled by integration entities.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - INVERTER_BASE = f"{Platform.SENSOR}.inverter" - - assert all( - f"{INVERTER_BASE}_{sn}_last_reported" - in integration_disabled_entities(entity_registry, config_entry) - for sn in mock_envoy.data.inverters - ) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - ], - indirect=["mock_envoy"], -) -async def test_sensor_encharge_aggregate_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test enphase_envoy encharge aggregate entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.serial_number - ENTITY_BASE = f"{Platform.SENSOR}.envoy_{sn}" - - data = mock_envoy.data.encharge_aggregate - - for target in ( - ("battery", data.state_of_charge), - ("reserve_battery_level", data.reserve_state_of_charge), - ("available_battery_energy", data.available_energy), - ("reserve_battery_energy", data.backup_reserve), - ("battery_capacity", data.max_available_capacity), - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{target[0]}")) - assert float(entity_state.state) == target[1] - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - ], - indirect=["mock_envoy"], -) -async def test_sensor_encharge_enpower_data( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test enphase_envoy encharge enpower entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.data.enpower.serial_number - ENTITY_BASE = f"{Platform.SENSOR}.enpower" - - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_temperature")) - assert ( - round( - TemperatureConverter.convert( - float(entity_state.state), - hass.config.units.temperature_unit, - UnitOfTemperature.FAHRENHEIT - if mock_envoy.data.enpower.temperature_unit == "F" - else UnitOfTemperature.CELSIUS, - ) - ) - == mock_envoy.data.enpower.temperature - ) - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_last_reported")) - assert dt_util.parse_datetime(entity_state.state) == dt_util.utc_from_timestamp( - mock_envoy.data.enpower.last_report_date - ) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - ], - indirect=["mock_envoy"], -) -async def test_sensor_encharge_power_data( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_envoy: AsyncMock, -) -> None: - """Test enphase_envoy encharge_power entities values.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - ENTITY_BASE = f"{Platform.SENSOR}.encharge" - - ENCHARGE_POWER_NAMES = ( - "battery", - "apparent_power", - "power", - ) - - ENCHARGE_POWER_TARGETS = [ - ( - sn, - ( - encharge_power.soc, - encharge_power.apparent_power_mva / 1000.0, - encharge_power.real_power_mw / 1000.0, - ), - ) - for sn, encharge_power in mock_envoy.data.encharge_power.items() - ] - - for sn, sn_target in ENCHARGE_POWER_TARGETS: - for name, target in list(zip(ENCHARGE_POWER_NAMES, sn_target, strict=False)): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_{name}")) - assert float(entity_state.state) == target - - for sn, encharge_inventory in mock_envoy.data.encharge_inventory.items(): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_temperature")) - assert ( - round( - TemperatureConverter.convert( - float(entity_state.state), - hass.config.units.temperature_unit, - UnitOfTemperature.FAHRENHEIT - if encharge_inventory.temperature_unit == "F" - else UnitOfTemperature.CELSIUS, - ) - ) - == encharge_inventory.temperature - ) - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_last_reported")) - assert dt_util.parse_datetime(entity_state.state) == dt_util.utc_from_timestamp( - encharge_inventory.last_report_date - ) - - -def integration_disabled_entities( - entity_registry: er.EntityRegistry, config_entry: MockConfigEntry -) -> list[str]: - """Return list of entity ids marked as disabled by integration.""" - return [ - entity_entry.entity_id - for entity_entry in er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - if entity_entry.disabled_by == er.RegistryEntryDisabler.INTEGRATION - ] - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_missing_data( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_envoy: AsyncMock, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test enphase_envoy sensor platform midding data handling.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - ENTITY_BASE = f"{Platform.SENSOR}.envoy_{mock_envoy.serial_number}" - - # force missing data to test 'if == none' code sections - mock_envoy.data.system_production_phases["L2"] = None - mock_envoy.data.system_consumption_phases["L2"] = None - mock_envoy.data.system_net_consumption_phases["L2"] = None - mock_envoy.data.ctmeter_production = None - mock_envoy.data.ctmeter_consumption = None - mock_envoy.data.ctmeter_storage = None - mock_envoy.data.ctmeter_production_phases = None - mock_envoy.data.ctmeter_consumption_phases = None - mock_envoy.data.ctmeter_storage_phases = None - - # use different inverter serial to test 'expected inverter missing' code - mock_envoy.data.inverters["2"] = mock_envoy.data.inverters.pop("1") - - # force HA to detect changed data by changing raw - mock_envoy.data.raw = {"I": "am changed"} - - # MOve time to next update - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - # all these should now be in unknown state - for entity in ( - "lifetime_energy_production_l2", - "lifetime_energy_consumption_l2", - "metering_status_production_ct", - "metering_status_net_consumption_ct", - "metering_status_storage_ct", - "metering_status_production_ct_l2", - "metering_status_net_consumption_ct_l2", - "metering_status_storage_ct_l2", - ): - assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{entity}")) - assert entity_state.state == STATE_UNKNOWN - - # test the original inverter is now unknown - assert (entity_state := hass.states.get("sensor.inverter_1")) - assert entity_state.state == STATE_UNKNOWN - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy_metered_batt_relay", - ], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_fw_update( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_envoy: AsyncMock, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test enphase_envoy sensor update over fw update.""" - logging.getLogger("homeassistant.components.enphase_envoy").setLevel(logging.DEBUG) - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, config_entry) - - # force HA to detect changed data by changing raw - mock_envoy.firmware = "0.0.0" - - # Move time to next update - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert "firmware changed from: " in caplog.text - assert "to: 0.0.0, reloading enphase envoy integration" in caplog.text diff --git a/tests/components/enphase_envoy/test_switch.py b/tests/components/enphase_envoy/test_switch.py deleted file mode 100644 index f30cba4d201..00000000000 --- a/tests/components/enphase_envoy/test_switch.py +++ /dev/null @@ -1,234 +0,0 @@ -"""Test Enphase Envoy switch platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.enphase_envoy.const import Platform -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TOGGLE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.parametrize( - ("mock_envoy"), - ["envoy_metered_batt_relay", "envoy_eu_batt"], - indirect=["mock_envoy"], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_switch( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test switch platform entities against snapshot.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): - await setup_integration(hass, config_entry) - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("mock_envoy"), - [ - "envoy", - "envoy_1p_metered", - "envoy_nobatt_metered_3p", - "envoy_tot_cons_metered", - ], - indirect=["mock_envoy"], -) -async def test_no_switch( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test switch platform entities are not created.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): - await setup_integration(hass, config_entry) - assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] -) -async def test_switch_grid_operation( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, -) -> None: - """Test switch platform operation for grid switches.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): - await setup_integration(hass, config_entry) - - sn = mock_envoy.data.enpower.serial_number - test_entity = f"{Platform.SWITCH}.enpower_{sn}_grid_enabled" - - # validate envoy value is reflected in entity - assert (entity_state := hass.states.get(test_entity)) - assert entity_state.state == STATE_ON - - # test grid status switch operation - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: test_entity}, - blocking=True, - ) - mock_envoy.go_off_grid.assert_awaited_once_with() - mock_envoy.go_off_grid.reset_mock() - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: test_entity}, - blocking=True, - ) - mock_envoy.go_on_grid.assert_awaited_once_with() - mock_envoy.go_on_grid.reset_mock() - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TOGGLE, - {ATTR_ENTITY_ID: test_entity}, - blocking=True, - ) - mock_envoy.go_off_grid.assert_awaited_once_with() - mock_envoy.go_off_grid.reset_mock() - - -@pytest.mark.parametrize( - ("mock_envoy", "use_serial"), - [ - ("envoy_metered_batt_relay", "enpower_654321"), - ("envoy_eu_batt", "envoy_1234"), - ], - indirect=["mock_envoy"], -) -async def test_switch_charge_from_grid_operation( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - use_serial: str, -) -> None: - """Test switch platform operation for charge from grid switches.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): - await setup_integration(hass, config_entry) - - test_entity = f"{Platform.SWITCH}.{use_serial}_charge_from_grid" - - # validate envoy value is reflected in entity - assert (entity_state := hass.states.get(test_entity)) - assert entity_state.state == STATE_ON - - # test grid status switch operation - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: test_entity}, - blocking=True, - ) - mock_envoy.disable_charge_from_grid.assert_awaited_once_with() - mock_envoy.disable_charge_from_grid.reset_mock() - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: test_entity}, - blocking=True, - ) - mock_envoy.enable_charge_from_grid.assert_awaited_once_with() - mock_envoy.enable_charge_from_grid.reset_mock() - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TOGGLE, - {ATTR_ENTITY_ID: test_entity}, - blocking=True, - ) - mock_envoy.disable_charge_from_grid.assert_awaited_once_with() - mock_envoy.disable_charge_from_grid.reset_mock() - - -@pytest.mark.parametrize( - ("mock_envoy", "entity_states"), - [ - ( - "envoy_metered_batt_relay", - { - "NC1": (STATE_OFF, 0, 1), - "NC2": (STATE_ON, 1, 0), - "NC3": (STATE_OFF, 0, 1), - }, - ) - ], - indirect=["mock_envoy"], -) -async def test_switch_relay_operation( - hass: HomeAssistant, - mock_envoy: AsyncMock, - config_entry: MockConfigEntry, - entity_states: dict[str, tuple[str, int, int]], -) -> None: - """Test enphase_envoy switch relay entities operation.""" - with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): - await setup_integration(hass, config_entry) - - entity_base = f"{Platform.SWITCH}." - - for contact_id, dry_contact in mock_envoy.data.dry_contact_settings.items(): - name = dry_contact.load_name.lower().replace(" ", "_") - test_entity = f"{entity_base}{name}" - assert (entity_state := hass.states.get(test_entity)) - assert entity_state.state == entity_states[contact_id][0] - open_count = entity_states[contact_id][1] - close_count = entity_states[contact_id][2] - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: test_entity}, - blocking=True, - ) - - mock_envoy.open_dry_contact.assert_awaited_once_with(contact_id) - mock_envoy.close_dry_contact.assert_not_awaited() - mock_envoy.open_dry_contact.reset_mock() - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: test_entity}, - blocking=True, - ) - - mock_envoy.close_dry_contact.assert_awaited_once_with(contact_id) - mock_envoy.open_dry_contact.assert_not_awaited() - mock_envoy.close_dry_contact.reset_mock() - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TOGGLE, - {ATTR_ENTITY_ID: test_entity}, - blocking=True, - ) - - assert mock_envoy.open_dry_contact.await_count == open_count - assert mock_envoy.close_dry_contact.await_count == close_count - mock_envoy.open_dry_contact.reset_mock() - mock_envoy.close_dry_contact.reset_mock() diff --git a/tests/components/environment_canada/__init__.py b/tests/components/environment_canada/__init__.py index 92c28e09b74..65b0ed16207 100644 --- a/tests/components/environment_canada/__init__.py +++ b/tests/components/environment_canada/__init__.py @@ -1,67 +1 @@ """Tests for the Environment Canada integration.""" - -from datetime import UTC, datetime -from unittest.mock import AsyncMock, MagicMock, patch - -from homeassistant.components.environment_canada.const import CONF_STATION, DOMAIN -from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - -FIXTURE_USER_INPUT = { - CONF_LATITUDE: 55.55, - CONF_LONGITUDE: 42.42, - CONF_STATION: "XX/1234567", - CONF_LANGUAGE: "Gibberish", -} - - -async def init_integration(hass: HomeAssistant, ec_data) -> MockConfigEntry: - """Set up the Environment Canada integration in Home Assistant.""" - - def mock_ec(): - ec_mock = MagicMock() - ec_mock.station_id = FIXTURE_USER_INPUT[CONF_STATION] - ec_mock.lat = FIXTURE_USER_INPUT[CONF_LATITUDE] - ec_mock.lon = FIXTURE_USER_INPUT[CONF_LONGITUDE] - ec_mock.language = FIXTURE_USER_INPUT[CONF_LANGUAGE] - ec_mock.update = AsyncMock() - return ec_mock - - config_entry = MockConfigEntry(domain=DOMAIN, data=FIXTURE_USER_INPUT, title="Home") - config_entry.add_to_hass(hass) - - weather_mock = mock_ec() - ec_data["metadata"]["timestamp"] = datetime(2022, 10, 4, tzinfo=UTC) - weather_mock.conditions = ec_data["conditions"] - weather_mock.alerts = ec_data["alerts"] - weather_mock.daily_forecasts = ec_data["daily_forecasts"] - weather_mock.metadata = ec_data["metadata"] - - radar_mock = mock_ec() - radar_mock.image = b"GIF..." - radar_mock.timestamp = datetime(2022, 10, 4, tzinfo=UTC) - - with ( - patch( - "homeassistant.components.environment_canada.ECWeather", - return_value=weather_mock, - ), - patch( - "homeassistant.components.environment_canada.ECAirQuality", - return_value=mock_ec(), - ), - patch( - "homeassistant.components.environment_canada.ECRadar", - return_value=radar_mock, - ), - patch( - "homeassistant.components.environment_canada.config_flow.ECWeather", - return_value=weather_mock, - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/environment_canada/conftest.py b/tests/components/environment_canada/conftest.py deleted file mode 100644 index 69cec187d11..00000000000 --- a/tests/components/environment_canada/conftest.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Common fixture for Environment Canada tests.""" - -import contextlib -from datetime import datetime -import json - -import pytest - -from tests.common import load_fixture - - -@pytest.fixture -def ec_data(): - """Load Environment Canada data.""" - - def date_hook(weather): - """Convert timestamp string to datetime.""" - - if t := weather.get("timestamp"): - with contextlib.suppress(ValueError): - weather["timestamp"] = datetime.fromisoformat(t) - return weather - - return json.loads( - load_fixture("environment_canada/current_conditions_data.json"), - object_hook=date_hook, - ) diff --git a/tests/components/environment_canada/fixtures/current_conditions_data.json b/tests/components/environment_canada/fixtures/current_conditions_data.json index ceb00028f95..f3a18869940 100644 --- a/tests/components/environment_canada/fixtures/current_conditions_data.json +++ b/tests/components/environment_canada/fixtures/current_conditions_data.json @@ -135,8 +135,7 @@ "icon_code": "30", "temperature": -1, "temperature_class": "low", - "precip_probability": 0, - "timestamp": "2022-10-03 15:00:00+00:00" + "precip_probability": 0 }, { "period": "Tuesday", @@ -144,8 +143,7 @@ "icon_code": "00", "temperature": 18, "temperature_class": "high", - "precip_probability": 0, - "timestamp": "2022-10-04 15:00:00+00:00" + "precip_probability": 0 }, { "period": "Tuesday night", @@ -153,8 +151,7 @@ "icon_code": "30", "temperature": 3, "temperature_class": "low", - "precip_probability": 0, - "timestamp": "2022-10-04 15:00:00+00:00" + "precip_probability": 0 }, { "period": "Wednesday", @@ -162,8 +159,7 @@ "icon_code": "00", "temperature": 20, "temperature_class": "high", - "precip_probability": 0, - "timestamp": "2022-10-05 15:00:00+00:00" + "precip_probability": 0 }, { "period": "Wednesday night", @@ -171,8 +167,7 @@ "icon_code": "30", "temperature": 9, "temperature_class": "low", - "precip_probability": 0, - "timestamp": "2022-10-05 15:00:00+00:00" + "precip_probability": 0 }, { "period": "Thursday", @@ -180,8 +175,7 @@ "icon_code": "02", "temperature": 20, "temperature_class": "high", - "precip_probability": 0, - "timestamp": "2022-10-06 15:00:00+00:00" + "precip_probability": 0 }, { "period": "Thursday night", @@ -189,8 +183,7 @@ "icon_code": "12", "temperature": 7, "temperature_class": "low", - "precip_probability": 0, - "timestamp": "2022-10-06 15:00:00+00:00" + "precip_probability": 0 }, { "period": "Friday", @@ -198,8 +191,7 @@ "icon_code": "12", "temperature": 13, "temperature_class": "high", - "precip_probability": 40, - "timestamp": "2022-10-07 15:00:00+00:00" + "precip_probability": 40 }, { "period": "Friday night", @@ -207,8 +199,7 @@ "icon_code": "32", "temperature": 1, "temperature_class": "low", - "precip_probability": 0, - "timestamp": "2022-10-07 15:00:00+00:00" + "precip_probability": 0 }, { "period": "Saturday", @@ -216,8 +207,7 @@ "icon_code": "02", "temperature": 10, "temperature_class": "high", - "precip_probability": 0, - "timestamp": "2022-10-08 15:00:00+00:00" + "precip_probability": 0 }, { "period": "Saturday night", @@ -225,8 +215,7 @@ "icon_code": "32", "temperature": 3, "temperature_class": "low", - "precip_probability": 0, - "timestamp": "2022-10-08 15:00:00+00:00" + "precip_probability": 0 }, { "period": "Sunday", @@ -234,8 +223,7 @@ "icon_code": "02", "temperature": 12, "temperature_class": "high", - "precip_probability": 0, - "timestamp": "2022-10-09 15:00:00+00:00" + "precip_probability": 0 } ], "metadata": { diff --git a/tests/components/environment_canada/snapshots/test_weather.ambr b/tests/components/environment_canada/snapshots/test_weather.ambr deleted file mode 100644 index cfa0ad912a4..00000000000 --- a/tests/components/environment_canada/snapshots/test_weather.ambr +++ /dev/null @@ -1,94 +0,0 @@ -# serializer version: 1 -# name: test_forecast_daily - dict({ - 'weather.home_forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2022-10-04T15:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 18.0, - 'templow': 3.0, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2022-10-05T15:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 20.0, - 'templow': 9.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2022-10-06T15:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 20.0, - 'templow': 7.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2022-10-07T15:00:00+00:00', - 'precipitation_probability': 40, - 'temperature': 13.0, - 'templow': 1.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2022-10-08T15:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 10.0, - 'templow': 3.0, - }), - ]), - }), - }) -# --- -# name: test_forecast_daily_with_some_previous_days_data - dict({ - 'weather.home_forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'clear-night', - 'datetime': '2022-10-03T15:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': None, - 'templow': -1.0, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2022-10-04T15:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 18.0, - 'templow': 3.0, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2022-10-05T15:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 20.0, - 'templow': 9.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2022-10-06T15:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 20.0, - 'templow': 7.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2022-10-07T15:00:00+00:00', - 'precipitation_probability': 40, - 'temperature': 13.0, - 'templow': 1.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2022-10-08T15:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 10.0, - 'templow': 3.0, - }), - ]), - }), - }) -# --- diff --git a/tests/components/environment_canada/test_config_flow.py b/tests/components/environment_canada/test_config_flow.py index d61966e8da1..f2c35ab4295 100644 --- a/tests/components/environment_canada/test_config_flow.py +++ b/tests/components/environment_canada/test_config_flow.py @@ -1,7 +1,7 @@ """Test the Environment Canada (EC) config flow.""" from unittest.mock import AsyncMock, MagicMock, Mock, patch -import xml.etree.ElementTree as ET +import xml.etree.ElementTree as et import aiohttp import pytest @@ -94,7 +94,7 @@ async def test_create_same_entry_twice(hass: HomeAssistant) -> None: (aiohttp.ClientResponseError(Mock(), (), status=404), "bad_station_id"), (aiohttp.ClientResponseError(Mock(), (), status=400), "error_response"), (aiohttp.ClientConnectionError, "cannot_connect"), - (ET.ParseError, "bad_station_id"), + (et.ParseError, "bad_station_id"), (ValueError, "unknown"), ], ) diff --git a/tests/components/environment_canada/test_diagnostics.py b/tests/components/environment_canada/test_diagnostics.py index 79b72961124..8f800111d39 100644 --- a/tests/components/environment_canada/test_diagnostics.py +++ b/tests/components/environment_canada/test_diagnostics.py @@ -1,17 +1,16 @@ """Test Environment Canada diagnostics.""" +from datetime import UTC, datetime import json -from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch from syrupy import SnapshotAssertion -from homeassistant.components.environment_canada.const import CONF_STATION +from homeassistant.components.environment_canada.const import CONF_STATION, DOMAIN from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant -from . import init_integration - -from tests.common import load_fixture +from tests.common import MockConfigEntry, load_fixture from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -23,19 +22,68 @@ FIXTURE_USER_INPUT = { } -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, - ec_data: dict[str, Any], -) -> None: - """Test config entry diagnostics.""" +async def init_integration(hass: HomeAssistant) -> MockConfigEntry: + """Set up the Environment Canada integration in Home Assistant.""" + + def mock_ec(): + ec_mock = MagicMock() + ec_mock.station_id = FIXTURE_USER_INPUT[CONF_STATION] + ec_mock.lat = FIXTURE_USER_INPUT[CONF_LATITUDE] + ec_mock.lon = FIXTURE_USER_INPUT[CONF_LONGITUDE] + ec_mock.language = FIXTURE_USER_INPUT[CONF_LANGUAGE] + ec_mock.update = AsyncMock() + return ec_mock + + config_entry = MockConfigEntry(domain=DOMAIN, data=FIXTURE_USER_INPUT) + config_entry.add_to_hass(hass) ec_data = json.loads( load_fixture("environment_canada/current_conditions_data.json") ) - config_entry = await init_integration(hass, ec_data) + weather_mock = mock_ec() + ec_data["metadata"]["timestamp"] = datetime(2022, 10, 4, tzinfo=UTC) + weather_mock.conditions = ec_data["conditions"] + weather_mock.alerts = ec_data["alerts"] + weather_mock.daily_forecasts = ec_data["daily_forecasts"] + weather_mock.metadata = ec_data["metadata"] + + radar_mock = mock_ec() + radar_mock.image = b"GIF..." + radar_mock.timestamp = datetime(2022, 10, 4, tzinfo=UTC) + + with ( + patch( + "homeassistant.components.environment_canada.ECWeather", + return_value=weather_mock, + ), + patch( + "homeassistant.components.environment_canada.ECAirQuality", + return_value=mock_ec(), + ), + patch( + "homeassistant.components.environment_canada.ECRadar", + return_value=radar_mock, + ), + patch( + "homeassistant.components.environment_canada.config_flow.ECWeather", + return_value=weather_mock, + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + + config_entry = await init_integration(hass) diagnostics = await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) diff --git a/tests/components/environment_canada/test_weather.py b/tests/components/environment_canada/test_weather.py deleted file mode 100644 index 8e22f68462f..00000000000 --- a/tests/components/environment_canada/test_weather.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Test weather.""" - -import copy -from typing import Any - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.weather import ( - DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECASTS, -) -from homeassistant.core import HomeAssistant - -from . import init_integration - - -async def test_forecast_daily( - hass: HomeAssistant, snapshot: SnapshotAssertion, ec_data: dict[str, Any] -) -> None: - """Test basic forecast.""" - - # First entry in test data is a half day; we don't want that for this test - local_ec_data = copy.deepcopy(ec_data) - del local_ec_data["daily_forecasts"][0] - - await init_integration(hass, local_ec_data) - - response = await hass.services.async_call( - WEATHER_DOMAIN, - SERVICE_GET_FORECASTS, - { - "entity_id": "weather.home_forecast", - "type": "daily", - }, - blocking=True, - return_response=True, - ) - assert response == snapshot - - -async def test_forecast_daily_with_some_previous_days_data( - hass: HomeAssistant, snapshot: SnapshotAssertion, ec_data: dict[str, Any] -) -> None: - """Test forecast with half day at start.""" - - await init_integration(hass, ec_data) - - response = await hass.services.async_call( - WEATHER_DOMAIN, - SERVICE_GET_FORECASTS, - { - "entity_id": "weather.home_forecast", - "type": "daily", - }, - blocking=True, - return_response=True, - ) - assert response == snapshot diff --git a/tests/components/epson/test_config_flow.py b/tests/components/epson/test_config_flow.py index f727185362c..d485a4bfdef 100644 --- a/tests/components/epson/test_config_flow.py +++ b/tests/components/epson/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch from epson_projector.const import PWR_OFF_STATE from homeassistant import config_entries -from homeassistant.components.epson.const import CONF_CONNECTION_TYPE, DOMAIN, HTTP +from homeassistant.components.epson.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -33,10 +33,6 @@ async def test_form(hass: HomeAssistant) -> None: patch( "homeassistant.components.epson.async_setup_entry", return_value=True, - ), - patch( - "homeassistant.components.epson.Projector.close", - return_value=True, ) as mock_setup_entry, ): result2 = await hass.config_entries.flow.async_configure( @@ -47,7 +43,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "test-epson" - assert result2["data"] == {CONF_CONNECTION_TYPE: HTTP, CONF_HOST: "1.1.1.1"} + assert result2["data"] == {CONF_HOST: "1.1.1.1"} assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/epson/test_init.py b/tests/components/epson/test_init.py deleted file mode 100644 index 964f9e915ab..00000000000 --- a/tests/components/epson/test_init.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Test the epson init.""" - -from unittest.mock import patch - -from homeassistant.components.epson.const import CONF_CONNECTION_TYPE, DOMAIN -from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def test_migrate_entry(hass: HomeAssistant) -> None: - """Test successful migration of entry data from version 1 to 1.2.""" - - mock_entry = MockConfigEntry( - domain=DOMAIN, - title="Epson", - version=1, - minor_version=1, - data={CONF_HOST: "1.1.1.1"}, - entry_id="1cb78c095906279574a0442a1f0003ef", - ) - assert mock_entry.version == 1 - - mock_entry.add_to_hass(hass) - - # Create entity entry to migrate to new unique ID - with patch("homeassistant.components.epson.Projector.get_power"): - await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - # Check that is now has connection_type - assert mock_entry - assert mock_entry.version == 1 - assert mock_entry.minor_version == 2 - assert mock_entry.data.get(CONF_CONNECTION_TYPE) == "http" - assert mock_entry.data.get(CONF_HOST) == "1.1.1.1" diff --git a/tests/components/epson/test_media_player.py b/tests/components/epson/test_media_player.py index 188fdd5b700..e529746dcd0 100644 --- a/tests/components/epson/test_media_player.py +++ b/tests/components/epson/test_media_player.py @@ -5,7 +5,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory -from homeassistant.components.epson.const import CONF_CONNECTION_TYPE, DOMAIN, HTTP +from homeassistant.components.epson.const import DOMAIN from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -22,7 +22,7 @@ async def test_set_unique_id( entry = MockConfigEntry( domain=DOMAIN, title="Epson", - data={CONF_CONNECTION_TYPE: HTTP, CONF_HOST: "1.1.1.1"}, + data={CONF_HOST: "1.1.1.1"}, entry_id="1cb78c095906279574a0442a1f0003ef", ) entry.add_to_hass(hass) diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index 2b7c127efd3..8a069d257d8 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from asyncio import Event -from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine +from collections.abc import Awaitable, Callable, Coroutine from pathlib import Path from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch @@ -19,11 +19,12 @@ from aioesphomeapi import ( HomeassistantServiceCall, ReconnectLogic, UserService, - VoiceAssistantAnnounceFinished, VoiceAssistantAudioSettings, + VoiceAssistantEventType, VoiceAssistantFeature, ) import pytest +from typing_extensions import AsyncGenerator from zeroconf import Zeroconf from homeassistant.components.esphome import dashboard @@ -34,6 +35,11 @@ from homeassistant.components.esphome.const import ( DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS, DOMAIN, ) +from homeassistant.components.esphome.entry_data import RuntimeEntryData +from homeassistant.components.esphome.voice_assistant import ( + VoiceAssistantAPIPipeline, + VoiceAssistantUDPPipeline, +) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -200,13 +206,12 @@ class MockESPHomeDevice: self.home_assistant_state_subscription_callback: Callable[ [str, str | None], None ] - self.home_assistant_state_request_callback: Callable[[str, str | None], None] self.voice_assistant_handle_start_callback: Callable[ [str, int, VoiceAssistantAudioSettings, str | None], Coroutine[Any, Any, int | None], ] self.voice_assistant_handle_stop_callback: Callable[ - [bool], Coroutine[Any, Any, None] + [], Coroutine[Any, Any, None] ] self.voice_assistant_handle_audio_callback: ( Callable[ @@ -215,13 +220,6 @@ class MockESPHomeDevice: ] | None ) - self.voice_assistant_handle_announcement_finished_callback: ( - Callable[ - [VoiceAssistantAnnounceFinished], - Coroutine[Any, Any, None], - ] - | None - ) self.device_info = device_info def set_state_callback(self, state_callback: Callable[[EntityState], None]) -> None: @@ -271,11 +269,9 @@ class MockESPHomeDevice: def set_home_assistant_state_subscription_callback( self, on_state_sub: Callable[[str, str | None], None], - on_state_request: Callable[[str, str | None], None], ) -> None: """Set the state call callback.""" self.home_assistant_state_subscription_callback = on_state_sub - self.home_assistant_state_request_callback = on_state_request def mock_home_assistant_state_subscription( self, entity_id: str, attribute: str | None @@ -283,19 +279,13 @@ class MockESPHomeDevice: """Mock a state subscription.""" self.home_assistant_state_subscription_callback(entity_id, attribute) - def mock_home_assistant_state_request( - self, entity_id: str, attribute: str | None - ) -> None: - """Mock a state request.""" - self.home_assistant_state_request_callback(entity_id, attribute) - def set_subscribe_voice_assistant_callbacks( self, handle_start: Callable[ [str, int, VoiceAssistantAudioSettings, str | None], Coroutine[Any, Any, int | None], ], - handle_stop: Callable[[bool], Coroutine[Any, Any, None]], + handle_stop: Callable[[], Coroutine[Any, Any, None]], handle_audio: ( Callable[ [bytes], @@ -303,21 +293,11 @@ class MockESPHomeDevice: ] | None ) = None, - handle_announcement_finished: ( - Callable[ - [VoiceAssistantAnnounceFinished], - Coroutine[Any, Any, None], - ] - | None - ) = None, ) -> None: """Set the voice assistant subscription callbacks.""" self.voice_assistant_handle_start_callback = handle_start self.voice_assistant_handle_stop_callback = handle_stop self.voice_assistant_handle_audio_callback = handle_audio - self.voice_assistant_handle_announcement_finished_callback = ( - handle_announcement_finished - ) async def mock_voice_assistant_handle_start( self, @@ -331,22 +311,15 @@ class MockESPHomeDevice: conversation_id, flags, settings, wake_word_phrase ) - async def mock_voice_assistant_handle_stop(self, abort: bool) -> None: + async def mock_voice_assistant_handle_stop(self) -> None: """Mock voice assistant handle stop.""" - await self.voice_assistant_handle_stop_callback(abort) + await self.voice_assistant_handle_stop_callback() async def mock_voice_assistant_handle_audio(self, audio: bytes) -> None: """Mock voice assistant handle audio.""" assert self.voice_assistant_handle_audio_callback is not None await self.voice_assistant_handle_audio_callback(audio) - async def mock_voice_assistant_handle_announcement_finished( - self, finished: VoiceAssistantAnnounceFinished - ) -> None: - """Mock voice assistant handle announcement finished.""" - assert self.voice_assistant_handle_announcement_finished_callback is not None - await self.voice_assistant_handle_announcement_finished_callback(finished) - async def _mock_generic_device_entry( hass: HomeAssistant, @@ -406,12 +379,9 @@ async def _mock_generic_device_entry( def _subscribe_home_assistant_states( on_state_sub: Callable[[str, str | None], None], - on_state_request: Callable[[str, str | None], None], ) -> None: """Subscribe to home assistant states.""" - mock_device.set_home_assistant_state_subscription_callback( - on_state_sub, on_state_request - ) + mock_device.set_home_assistant_state_subscription_callback(on_state_sub) def _subscribe_voice_assistant( *, @@ -419,7 +389,7 @@ async def _mock_generic_device_entry( [str, int, VoiceAssistantAudioSettings, str | None], Coroutine[Any, Any, int | None], ], - handle_stop: Callable[[bool], Coroutine[Any, Any, None]], + handle_stop: Callable[[], Coroutine[Any, Any, None]], handle_audio: ( Callable[ [bytes], @@ -427,17 +397,10 @@ async def _mock_generic_device_entry( ] | None ) = None, - handle_announcement_finished: ( - Callable[ - [VoiceAssistantAnnounceFinished], - Coroutine[Any, Any, None], - ] - | None - ) = None, ) -> Callable[[], None]: """Subscribe to voice assistant.""" mock_device.set_subscribe_voice_assistant_callbacks( - handle_start, handle_stop, handle_audio, handle_announcement_finished + handle_start, handle_stop, handle_audio ) def unsub(): @@ -459,7 +422,7 @@ async def _mock_generic_device_entry( class MockReconnectLogic(BaseMockReconnectLogic): """Mock ReconnectLogic.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init the mock.""" super().__init__(*args, **kwargs) mock_device.set_on_disconnect(kwargs["on_disconnect"]) @@ -651,3 +614,57 @@ async def mock_esphome_device( ) return _mock_device + + +@pytest.fixture +def mock_voice_assistant_api_pipeline() -> VoiceAssistantAPIPipeline: + """Return the API Pipeline factory.""" + mock_pipeline = Mock(spec=VoiceAssistantAPIPipeline) + + def mock_constructor( + hass: HomeAssistant, + entry_data: RuntimeEntryData, + handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], + handle_finished: Callable[[], None], + api_client: APIClient, + ): + """Fake the constructor.""" + mock_pipeline.hass = hass + mock_pipeline.entry_data = entry_data + mock_pipeline.handle_event = handle_event + mock_pipeline.handle_finished = handle_finished + mock_pipeline.api_client = api_client + return mock_pipeline + + mock_pipeline.side_effect = mock_constructor + with patch( + "homeassistant.components.esphome.voice_assistant.VoiceAssistantAPIPipeline", + new=mock_pipeline, + ): + yield mock_pipeline + + +@pytest.fixture +def mock_voice_assistant_udp_pipeline() -> VoiceAssistantUDPPipeline: + """Return the API Pipeline factory.""" + mock_pipeline = Mock(spec=VoiceAssistantUDPPipeline) + + def mock_constructor( + hass: HomeAssistant, + entry_data: RuntimeEntryData, + handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], + handle_finished: Callable[[], None], + ): + """Fake the constructor.""" + mock_pipeline.hass = hass + mock_pipeline.entry_data = entry_data + mock_pipeline.handle_event = handle_event + mock_pipeline.handle_finished = handle_finished + return mock_pipeline + + mock_pipeline.side_effect = mock_constructor + with patch( + "homeassistant.components.esphome.voice_assistant.VoiceAssistantUDPPipeline", + new=mock_pipeline, + ): + yield mock_pipeline diff --git a/tests/components/esphome/snapshots/test_diagnostics.ambr b/tests/components/esphome/snapshots/test_diagnostics.ambr index 4f7ea679b20..0d2f0e60b82 100644 --- a/tests/components/esphome/snapshots/test_diagnostics.ambr +++ b/tests/components/esphome/snapshots/test_diagnostics.ambr @@ -10,8 +10,6 @@ 'port': 6053, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'esphome', 'entry_id': '08d821dc059cf4f645cb024d32c8e708', 'minor_version': 1, diff --git a/tests/components/esphome/test_alarm_control_panel.py b/tests/components/esphome/test_alarm_control_panel.py index a3bfc72f3e2..af717ac1b49 100644 --- a/tests/components/esphome/test_alarm_control_panel.py +++ b/tests/components/esphome/test_alarm_control_panel.py @@ -4,9 +4,9 @@ from unittest.mock import call from aioesphomeapi import ( AlarmControlPanelCommand, - AlarmControlPanelEntityState as ESPHomeAlarmEntityState, + AlarmControlPanelEntityState, AlarmControlPanelInfo, - AlarmControlPanelState as ESPHomeAlarmState, + AlarmControlPanelState, APIClient, ) @@ -20,10 +20,9 @@ from homeassistant.components.alarm_control_panel import ( SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - AlarmControlPanelState, ) from homeassistant.components.esphome.alarm_control_panel import EspHomeACPFeatures -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, STATE_ALARM_ARMED_AWAY, STATE_UNKNOWN from homeassistant.core import HomeAssistant @@ -49,7 +48,9 @@ async def test_generic_alarm_control_panel_requires_code( requires_code_to_arm=True, ) ] - states = [ESPHomeAlarmEntityState(key=1, state=ESPHomeAlarmState.ARMED_AWAY)] + states = [ + AlarmControlPanelEntityState(key=1, state=AlarmControlPanelState.ARMED_AWAY) + ] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -59,7 +60,7 @@ async def test_generic_alarm_control_panel_requires_code( ) state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") assert state is not None - assert state.state == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMED_AWAY await hass.services.async_call( ALARM_CONTROL_PANEL_DOMAIN, @@ -182,7 +183,9 @@ async def test_generic_alarm_control_panel_no_code( requires_code_to_arm=False, ) ] - states = [ESPHomeAlarmEntityState(key=1, state=ESPHomeAlarmState.ARMED_AWAY)] + states = [ + AlarmControlPanelEntityState(key=1, state=AlarmControlPanelState.ARMED_AWAY) + ] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -192,7 +195,7 @@ async def test_generic_alarm_control_panel_no_code( ) state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") assert state is not None - assert state.state == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMED_AWAY await hass.services.async_call( ALARM_CONTROL_PANEL_DOMAIN, diff --git a/tests/components/esphome/test_assist_satellite.py b/tests/components/esphome/test_assist_satellite.py deleted file mode 100644 index e8344e50161..00000000000 --- a/tests/components/esphome/test_assist_satellite.py +++ /dev/null @@ -1,1475 +0,0 @@ -"""Test ESPHome voice assistant server.""" - -import asyncio -from collections.abc import Awaitable, Callable -from dataclasses import replace -import io -import socket -from unittest.mock import ANY, Mock, patch -import wave - -from aioesphomeapi import ( - APIClient, - EntityInfo, - EntityState, - MediaPlayerFormatPurpose, - MediaPlayerInfo, - MediaPlayerSupportedFormat, - UserService, - VoiceAssistantAnnounceFinished, - VoiceAssistantAudioSettings, - VoiceAssistantCommandFlag, - VoiceAssistantEventType, - VoiceAssistantFeature, - VoiceAssistantTimerEventType, -) -import pytest - -from homeassistant.components import assist_satellite, tts -from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType -from homeassistant.components.assist_satellite import ( - AssistSatelliteConfiguration, - AssistSatelliteEntity, - AssistSatelliteEntityFeature, - AssistSatelliteWakeWord, -) - -# pylint: disable-next=hass-component-root-import -from homeassistant.components.assist_satellite.entity import AssistSatelliteState -from homeassistant.components.esphome import DOMAIN -from homeassistant.components.esphome.assist_satellite import ( - EsphomeAssistSatellite, - VoiceAssistantUDPServer, -) -from homeassistant.components.media_source import PlayMedia -from homeassistant.const import STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, intent as intent_helper -import homeassistant.helpers.device_registry as dr -from homeassistant.helpers.entity_component import EntityComponent - -from .conftest import MockESPHomeDevice - - -def get_satellite_entity( - hass: HomeAssistant, mac_address: str -) -> EsphomeAssistSatellite | None: - """Get the satellite entity for a device.""" - ent_reg = er.async_get(hass) - satellite_entity_id = ent_reg.async_get_entity_id( - Platform.ASSIST_SATELLITE, DOMAIN, f"{mac_address}-assist_satellite" - ) - if satellite_entity_id is None: - return None - assert satellite_entity_id.endswith("_assist_satellite") - - component: EntityComponent[AssistSatelliteEntity] = hass.data[ - assist_satellite.DOMAIN - ] - if (entity := component.get_entity(satellite_entity_id)) is not None: - assert isinstance(entity, EsphomeAssistSatellite) - return entity - - return None - - -@pytest.fixture -def mock_wav() -> bytes: - """Return test WAV audio.""" - with io.BytesIO() as wav_io: - with wave.open(wav_io, "wb") as wav_file: - wav_file.setframerate(16000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(b"test-wav") - - return wav_io.getvalue() - - -async def test_no_satellite_without_voice_assistant( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test that an assist satellite entity is not created if a voice assistant is not present.""" - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={}, - ) - await hass.async_block_till_done() - - # No satellite entity should be created - assert get_satellite_entity(hass, mock_device.device_info.mac_address) is None - - -async def test_pipeline_api_audio( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], - mock_wav: bytes, -) -> None: - """Test a complete pipeline run with API audio (over the TCP connection).""" - conversation_id = "test-conversation-id" - media_url = "http://test.url" - media_id = "test-media-id" - - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.SPEAKER - | VoiceAssistantFeature.API_AUDIO - }, - ) - await hass.async_block_till_done() - dev = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} - ) - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - # Block TTS streaming until we're ready. - # This makes it easier to verify the order of pipeline events. - stream_tts_audio_ready = asyncio.Event() - original_stream_tts_audio = satellite._stream_tts_audio - - async def _stream_tts_audio(*args, **kwargs): - await stream_tts_audio_ready.wait() - await original_stream_tts_audio(*args, **kwargs) - - async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): - assert device_id == dev.id - - stt_stream = kwargs["stt_stream"] - - chunks = [chunk async for chunk in stt_stream] - - # Verify test API audio - assert chunks == [b"test-mic"] - - event_callback = kwargs["event_callback"] - - # Test unknown event type - event_callback( - PipelineEvent( - type="unknown-event", - data={}, - ) - ) - - mock_client.send_voice_assistant_event.assert_not_called() - - # Test error event - event_callback( - PipelineEvent( - type=PipelineEventType.ERROR, - data={"code": "test-error-code", "message": "test-error-message"}, - ) - ) - - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, - {"code": "test-error-code", "message": "test-error-message"}, - ) - - # Wake word - assert satellite.state == AssistSatelliteState.IDLE - - event_callback( - PipelineEvent( - type=PipelineEventType.WAKE_WORD_START, - data={ - "entity_id": "test-wake-word-entity-id", - "metadata": {}, - "timeout": 0, - }, - ) - ) - - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_START, - {}, - ) - - # Test no wake word detected - event_callback( - PipelineEvent( - type=PipelineEventType.WAKE_WORD_END, data={"wake_word_output": {}} - ) - ) - - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, - {"code": "no_wake_word", "message": "No wake word detected"}, - ) - - # Correct wake word detection - event_callback( - PipelineEvent( - type=PipelineEventType.WAKE_WORD_END, - data={"wake_word_output": {"wake_word_phrase": "test-wake-word"}}, - ) - ) - - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END, - {}, - ) - - # STT - event_callback( - PipelineEvent( - type=PipelineEventType.STT_START, - data={"engine": "test-stt-engine", "metadata": {}}, - ) - ) - - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_STT_START, - {}, - ) - assert satellite.state == AssistSatelliteState.LISTENING - - event_callback( - PipelineEvent( - type=PipelineEventType.STT_END, - data={"stt_output": {"text": "test-stt-text"}}, - ) - ) - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_STT_END, - {"text": "test-stt-text"}, - ) - - # Intent - event_callback( - PipelineEvent( - type=PipelineEventType.INTENT_START, - data={ - "engine": "test-intent-engine", - "language": hass.config.language, - "intent_input": "test-intent-text", - "conversation_id": conversation_id, - "device_id": device_id, - }, - ) - ) - - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_START, - {}, - ) - assert satellite.state == AssistSatelliteState.PROCESSING - - event_callback( - PipelineEvent( - type=PipelineEventType.INTENT_END, - data={"intent_output": {"conversation_id": conversation_id}}, - ) - ) - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END, - {"conversation_id": conversation_id}, - ) - - # TTS - event_callback( - PipelineEvent( - type=PipelineEventType.TTS_START, - data={ - "engine": "test-stt-engine", - "language": hass.config.language, - "voice": "test-voice", - "tts_input": "test-tts-text", - }, - ) - ) - - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START, - {"text": "test-tts-text"}, - ) - assert satellite.state == AssistSatelliteState.RESPONDING - - # Should return mock_wav audio - event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={"tts_output": {"url": media_url, "media_id": media_id}}, - ) - ) - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END, - {"url": media_url}, - ) - - event_callback(PipelineEvent(type=PipelineEventType.RUN_END)) - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_RUN_END, - {}, - ) - - # Allow TTS streaming to proceed - stream_tts_audio_ready.set() - - pipeline_finished = asyncio.Event() - original_handle_pipeline_finished = satellite.handle_pipeline_finished - - def handle_pipeline_finished(): - original_handle_pipeline_finished() - pipeline_finished.set() - - async def async_get_media_source_audio( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - return ("wav", mock_wav) - - tts_finished = asyncio.Event() - original_tts_response_finished = satellite.tts_response_finished - - def tts_response_finished(): - original_tts_response_finished() - tts_finished.set() - - with ( - patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch( - "homeassistant.components.tts.async_get_media_source_audio", - new=async_get_media_source_audio, - ), - patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), - patch.object(satellite, "_stream_tts_audio", _stream_tts_audio), - patch.object(satellite, "tts_response_finished", tts_response_finished), - ): - # Should be cleared at pipeline start - satellite._audio_queue.put_nowait(b"leftover-data") - - # Should be cancelled at pipeline start - mock_tts_streaming_task = Mock() - satellite._tts_streaming_task = mock_tts_streaming_task - - async with asyncio.timeout(1): - await satellite.handle_pipeline_start( - conversation_id=conversation_id, - flags=VoiceAssistantCommandFlag.USE_WAKE_WORD, - audio_settings=VoiceAssistantAudioSettings(), - wake_word_phrase="", - ) - mock_tts_streaming_task.cancel.assert_called_once() - await satellite.handle_audio(b"test-mic") - await satellite.handle_pipeline_stop(abort=False) - await pipeline_finished.wait() - - await tts_finished.wait() - - # Verify TTS streaming events. - # These are definitely the last two events because we blocked TTS streaming - # until after RUN_END above. - assert mock_client.send_voice_assistant_event.call_args_list[-2].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, - {}, - ) - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, - {}, - ) - - # Verify TTS WAV audio chunk came through - mock_client.send_voice_assistant_audio.assert_called_once_with(b"test-wav") - - -@pytest.mark.usefixtures("socket_enabled") -async def test_pipeline_udp_audio( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], - mock_wav: bytes, -) -> None: - """Test a complete pipeline run with legacy UDP audio. - - This test is not as comprehensive as test_pipeline_api_audio since we're - mainly focused on the UDP server. - """ - conversation_id = "test-conversation-id" - media_url = "http://test.url" - media_id = "test-media-id" - - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.SPEAKER - }, - ) - await hass.async_block_till_done() - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - mic_audio_event = asyncio.Event() - - async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): - stt_stream = kwargs["stt_stream"] - - chunks = [] - async for chunk in stt_stream: - chunks.append(chunk) - mic_audio_event.set() - - # Verify test UDP audio - assert chunks == [b"test-mic"] - - event_callback = kwargs["event_callback"] - - # STT - event_callback( - PipelineEvent( - type=PipelineEventType.STT_START, - data={"engine": "test-stt-engine", "metadata": {}}, - ) - ) - - event_callback( - PipelineEvent( - type=PipelineEventType.STT_END, - data={"stt_output": {"text": "test-stt-text"}}, - ) - ) - - # Intent - event_callback( - PipelineEvent( - type=PipelineEventType.INTENT_START, - data={ - "engine": "test-intent-engine", - "language": hass.config.language, - "intent_input": "test-intent-text", - "conversation_id": conversation_id, - "device_id": device_id, - }, - ) - ) - - event_callback( - PipelineEvent( - type=PipelineEventType.INTENT_END, - data={"intent_output": {"conversation_id": conversation_id}}, - ) - ) - - # TTS - event_callback( - PipelineEvent( - type=PipelineEventType.TTS_START, - data={ - "engine": "test-stt-engine", - "language": hass.config.language, - "voice": "test-voice", - "tts_input": "test-tts-text", - }, - ) - ) - - # Should return mock_wav audio - event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={"tts_output": {"url": media_url, "media_id": media_id}}, - ) - ) - - event_callback(PipelineEvent(type=PipelineEventType.RUN_END)) - - pipeline_finished = asyncio.Event() - original_handle_pipeline_finished = satellite.handle_pipeline_finished - - def handle_pipeline_finished(): - original_handle_pipeline_finished() - pipeline_finished.set() - - async def async_get_media_source_audio( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - return ("wav", mock_wav) - - tts_finished = asyncio.Event() - original_tts_response_finished = satellite.tts_response_finished - - def tts_response_finished(): - original_tts_response_finished() - tts_finished.set() - - class TestProtocol(asyncio.DatagramProtocol): - def __init__(self) -> None: - self.transport = None - self.data_received: list[bytes] = [] - - def connection_made(self, transport): - self.transport = transport - - def datagram_received(self, data: bytes, addr): - self.data_received.append(data) - - with ( - patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch( - "homeassistant.components.tts.async_get_media_source_audio", - new=async_get_media_source_audio, - ), - patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), - patch.object(satellite, "tts_response_finished", tts_response_finished), - ): - async with asyncio.timeout(1): - port = await satellite.handle_pipeline_start( - conversation_id=conversation_id, - flags=VoiceAssistantCommandFlag(0), # stt - audio_settings=VoiceAssistantAudioSettings(), - wake_word_phrase="", - ) - assert (port is not None) and (port > 0) - - ( - transport, - protocol, - ) = await asyncio.get_running_loop().create_datagram_endpoint( - TestProtocol, remote_addr=("127.0.0.1", port) - ) - assert isinstance(protocol, TestProtocol) - - # Send audio over UDP - transport.sendto(b"test-mic") - - # Wait for audio chunk to be delivered - await mic_audio_event.wait() - - await satellite.handle_pipeline_stop(abort=False) - await pipeline_finished.wait() - - await tts_finished.wait() - - # Verify TTS audio (from UDP) - assert protocol.data_received == [b"test-wav"] - - # Check that UDP server was stopped - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.setblocking(False) - sock.bind(("", port)) # will fail if UDP server is still running - sock.close() - - -async def test_udp_errors() -> None: - """Test UDP protocol error conditions.""" - audio_queue: asyncio.Queue[bytes | None] = asyncio.Queue() - protocol = VoiceAssistantUDPServer(audio_queue) - - protocol.datagram_received(b"test", ("", 0)) - assert audio_queue.qsize() == 1 - assert (await audio_queue.get()) == b"test" - - # None will stop the pipeline - protocol.error_received(RuntimeError()) - assert audio_queue.qsize() == 1 - assert (await audio_queue.get()) is None - - # No transport - assert protocol.transport is None - protocol.send_audio_bytes(b"test") - - # No remote address - protocol.transport = Mock() - protocol.remote_addr = None - protocol.send_audio_bytes(b"test") - protocol.transport.sendto.assert_not_called() - - -async def test_pipeline_media_player( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], - mock_wav: bytes, -) -> None: - """Test a complete pipeline run with the TTS response sent to a media player instead of a speaker. - - This test is not as comprehensive as test_pipeline_api_audio since we're - mainly focused on tts_response_finished getting automatically called. - """ - conversation_id = "test-conversation-id" - media_url = "http://test.url" - media_id = "test-media-id" - - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.API_AUDIO - }, - ) - await hass.async_block_till_done() - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): - stt_stream = kwargs["stt_stream"] - - async for _chunk in stt_stream: - break - - event_callback = kwargs["event_callback"] - - # STT - event_callback( - PipelineEvent( - type=PipelineEventType.STT_START, - data={"engine": "test-stt-engine", "metadata": {}}, - ) - ) - - event_callback( - PipelineEvent( - type=PipelineEventType.STT_END, - data={"stt_output": {"text": "test-stt-text"}}, - ) - ) - - # Intent - event_callback( - PipelineEvent( - type=PipelineEventType.INTENT_START, - data={ - "engine": "test-intent-engine", - "language": hass.config.language, - "intent_input": "test-intent-text", - "conversation_id": conversation_id, - "device_id": device_id, - }, - ) - ) - - event_callback( - PipelineEvent( - type=PipelineEventType.INTENT_END, - data={"intent_output": {"conversation_id": conversation_id}}, - ) - ) - - # TTS - event_callback( - PipelineEvent( - type=PipelineEventType.TTS_START, - data={ - "engine": "test-stt-engine", - "language": hass.config.language, - "voice": "test-voice", - "tts_input": "test-tts-text", - }, - ) - ) - - # Should return mock_wav audio - event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={"tts_output": {"url": media_url, "media_id": media_id}}, - ) - ) - - event_callback(PipelineEvent(type=PipelineEventType.RUN_END)) - - pipeline_finished = asyncio.Event() - original_handle_pipeline_finished = satellite.handle_pipeline_finished - - def handle_pipeline_finished(): - original_handle_pipeline_finished() - pipeline_finished.set() - - async def async_get_media_source_audio( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - return ("wav", mock_wav) - - tts_finished = asyncio.Event() - original_tts_response_finished = satellite.tts_response_finished - - def tts_response_finished(): - original_tts_response_finished() - tts_finished.set() - - with ( - patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch( - "homeassistant.components.tts.async_get_media_source_audio", - new=async_get_media_source_audio, - ), - patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), - patch.object(satellite, "tts_response_finished", tts_response_finished), - ): - async with asyncio.timeout(1): - await satellite.handle_pipeline_start( - conversation_id=conversation_id, - flags=VoiceAssistantCommandFlag(0), # stt - audio_settings=VoiceAssistantAudioSettings(), - wake_word_phrase="", - ) - - await satellite.handle_pipeline_stop(abort=False) - await pipeline_finished.wait() - - assert satellite.state == AssistSatelliteState.RESPONDING - - # Will trigger tts_response_finished - await mock_device.mock_voice_assistant_handle_announcement_finished( - VoiceAssistantAnnounceFinished(success=True) - ) - await tts_finished.wait() - - assert satellite.state == AssistSatelliteState.IDLE - - -async def test_timer_events( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test that injecting timer events results in the correct api client calls.""" - - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.TIMERS - }, - ) - await hass.async_block_till_done() - dev = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} - ) - - total_seconds = (1 * 60 * 60) + (2 * 60) + 3 - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_START_TIMER, - { - "name": {"value": "test timer"}, - "hours": {"value": 1}, - "minutes": {"value": 2}, - "seconds": {"value": 3}, - }, - device_id=dev.id, - ) - - mock_client.send_voice_assistant_timer_event.assert_called_with( - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED, - ANY, - "test timer", - total_seconds, - total_seconds, - True, - ) - - # Increase timer beyond original time and check total_seconds has increased - mock_client.send_voice_assistant_timer_event.reset_mock() - - total_seconds += 5 * 60 - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_INCREASE_TIMER, - { - "name": {"value": "test timer"}, - "minutes": {"value": 5}, - }, - device_id=dev.id, - ) - - mock_client.send_voice_assistant_timer_event.assert_called_with( - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED, - ANY, - "test timer", - total_seconds, - ANY, - True, - ) - - -async def test_unknown_timer_event( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test that unknown (new) timer event types do not result in api calls.""" - - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.TIMERS - }, - ) - await hass.async_block_till_done() - assert mock_device.entry.unique_id is not None - dev = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} - ) - assert dev is not None - - with patch( - "homeassistant.components.esphome.assist_satellite._TIMER_EVENT_TYPES.from_hass", - side_effect=KeyError, - ): - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_START_TIMER, - { - "name": {"value": "test timer"}, - "hours": {"value": 1}, - "minutes": {"value": 2}, - "seconds": {"value": 3}, - }, - device_id=dev.id, - ) - - mock_client.send_voice_assistant_timer_event.assert_not_called() - - -async def test_streaming_tts_errors( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], - mock_wav: bytes, -) -> None: - """Test error conditions for _stream_tts_audio function.""" - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - }, - ) - await hass.async_block_till_done() - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - # Should not stream if not running - satellite._is_running = False - await satellite._stream_tts_audio("test-media-id") - mock_client.send_voice_assistant_audio.assert_not_called() - satellite._is_running = True - - # Should only stream WAV - async def get_mp3( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - return ("mp3", b"") - - with patch( - "homeassistant.components.tts.async_get_media_source_audio", new=get_mp3 - ): - await satellite._stream_tts_audio("test-media-id") - mock_client.send_voice_assistant_audio.assert_not_called() - - # Needs to be the correct sample rate, etc. - async def get_bad_wav( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - with io.BytesIO() as wav_io: - with wave.open(wav_io, "wb") as wav_file: - wav_file.setframerate(48000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(b"test-wav") - - return ("wav", wav_io.getvalue()) - - with patch( - "homeassistant.components.tts.async_get_media_source_audio", new=get_bad_wav - ): - await satellite._stream_tts_audio("test-media-id") - mock_client.send_voice_assistant_audio.assert_not_called() - - # Check that TTS_STREAM_* events still get sent after cancel - media_fetched = asyncio.Event() - - async def get_slow_wav( - hass: HomeAssistant, - media_source_id: str, - ) -> tuple[str, bytes]: - media_fetched.set() - await asyncio.sleep(1) - return ("wav", mock_wav) - - mock_client.send_voice_assistant_event.reset_mock() - with patch( - "homeassistant.components.tts.async_get_media_source_audio", new=get_slow_wav - ): - task = asyncio.create_task(satellite._stream_tts_audio("test-media-id")) - async with asyncio.timeout(1): - # Wait for media to be fetched - await media_fetched.wait() - - # Cancel task - task.cancel() - await task - - # No audio should have gone out - mock_client.send_voice_assistant_audio.assert_not_called() - assert len(mock_client.send_voice_assistant_event.call_args_list) == 2 - - # The TTS_STREAM_* events should have gone out - assert mock_client.send_voice_assistant_event.call_args_list[-2].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, - {}, - ) - assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, - {}, - ) - - -async def test_tts_format_from_media_player( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test that the text-to-speech format is pulled from the first media player.""" - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[ - MediaPlayerInfo( - object_id="mymedia_player", - key=1, - name="my media_player", - unique_id="my_media_player", - supports_pause=True, - supported_formats=[ - MediaPlayerSupportedFormat( - format="flac", - sample_rate=48000, - num_channels=2, - purpose=MediaPlayerFormatPurpose.DEFAULT, - sample_bytes=2, - ), - # This is the format that should be used for tts - MediaPlayerSupportedFormat( - format="mp3", - sample_rate=22050, - num_channels=1, - purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, - sample_bytes=2, - ), - ], - ) - ], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - }, - ) - await hass.async_block_till_done() - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - with patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - ) as mock_pipeline_from_audio_stream: - await satellite.handle_pipeline_start( - conversation_id="", - flags=0, - audio_settings=VoiceAssistantAudioSettings(), - wake_word_phrase=None, - ) - - mock_pipeline_from_audio_stream.assert_called_once() - kwargs = mock_pipeline_from_audio_stream.call_args_list[0].kwargs - - # Should be ANNOUNCEMENT format from media player - assert kwargs.get("tts_audio_output") == { - tts.ATTR_PREFERRED_FORMAT: "mp3", - tts.ATTR_PREFERRED_SAMPLE_RATE: 22050, - tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, - tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, - } - - -async def test_tts_minimal_format_from_media_player( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test text-to-speech format when media player only specifies the codec.""" - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[ - MediaPlayerInfo( - object_id="mymedia_player", - key=1, - name="my media_player", - unique_id="my_media_player", - supports_pause=True, - supported_formats=[ - MediaPlayerSupportedFormat( - format="flac", - sample_rate=48000, - num_channels=2, - purpose=MediaPlayerFormatPurpose.DEFAULT, - sample_bytes=2, - ), - # This is the format that should be used for tts - MediaPlayerSupportedFormat( - format="mp3", - sample_rate=0, # source rate - num_channels=0, # source channels - purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, - sample_bytes=0, # source width - ), - ], - ) - ], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - }, - ) - await hass.async_block_till_done() - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - with patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - ) as mock_pipeline_from_audio_stream: - await satellite.handle_pipeline_start( - conversation_id="", - flags=0, - audio_settings=VoiceAssistantAudioSettings(), - wake_word_phrase=None, - ) - - mock_pipeline_from_audio_stream.assert_called_once() - kwargs = mock_pipeline_from_audio_stream.call_args_list[0].kwargs - - # Should be ANNOUNCEMENT format from media player - assert kwargs.get("tts_audio_output") == { - tts.ATTR_PREFERRED_FORMAT: "mp3", - } - - -async def test_announce_supported_features( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test that the announce supported feature is set by flags.""" - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - }, - ) - await hass.async_block_till_done() - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - assert not (satellite.supported_features & AssistSatelliteEntityFeature.ANNOUNCE) - - -async def test_announce_message( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test announcement with message.""" - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.SPEAKER - | VoiceAssistantFeature.API_AUDIO - | VoiceAssistantFeature.ANNOUNCE - }, - ) - await hass.async_block_till_done() - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - done = asyncio.Event() - - async def send_voice_assistant_announcement_await_response( - media_id: str, timeout: float, text: str - ): - assert satellite.state == AssistSatelliteState.RESPONDING - assert media_id == "https://www.home-assistant.io/resolved.mp3" - assert text == "test-text" - - done.set() - - with ( - patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", - return_value="media-source://bla", - ), - patch( - "homeassistant.components.media_source.async_resolve_media", - return_value=PlayMedia( - url="https://www.home-assistant.io/resolved.mp3", - mime_type="audio/mp3", - ), - ), - patch.object( - mock_client, - "send_voice_assistant_announcement_await_response", - new=send_voice_assistant_announcement_await_response, - ), - ): - async with asyncio.timeout(1): - await hass.services.async_call( - assist_satellite.DOMAIN, - "announce", - {"entity_id": satellite.entity_id, "message": "test-text"}, - blocking=True, - ) - await done.wait() - assert satellite.state == AssistSatelliteState.IDLE - - -async def test_announce_media_id( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], - device_registry: dr.DeviceRegistry, -) -> None: - """Test announcement with media id.""" - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[ - MediaPlayerInfo( - object_id="mymedia_player", - key=1, - name="my media_player", - unique_id="my_media_player", - supports_pause=True, - supported_formats=[ - MediaPlayerSupportedFormat( - format="flac", - sample_rate=48000, - num_channels=2, - purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, - sample_bytes=2, - ), - ], - ) - ], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.SPEAKER - | VoiceAssistantFeature.API_AUDIO - | VoiceAssistantFeature.ANNOUNCE - }, - ) - await hass.async_block_till_done() - - dev = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} - ) - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - done = asyncio.Event() - - async def send_voice_assistant_announcement_await_response( - media_id: str, timeout: float, text: str - ): - assert satellite.state == AssistSatelliteState.RESPONDING - assert media_id == "https://www.home-assistant.io/proxied.flac" - - done.set() - - with ( - patch.object( - mock_client, - "send_voice_assistant_announcement_await_response", - new=send_voice_assistant_announcement_await_response, - ), - patch( - "homeassistant.components.esphome.assist_satellite.async_create_proxy_url", - return_value="https://www.home-assistant.io/proxied.flac", - ) as mock_async_create_proxy_url, - ): - async with asyncio.timeout(1): - await hass.services.async_call( - assist_satellite.DOMAIN, - "announce", - { - "entity_id": satellite.entity_id, - "media_id": "https://www.home-assistant.io/resolved.mp3", - }, - blocking=True, - ) - await done.wait() - assert satellite.state == AssistSatelliteState.IDLE - - mock_async_create_proxy_url.assert_called_once_with( - hass, - dev.id, - "https://www.home-assistant.io/resolved.mp3", - media_format="flac", - rate=48000, - channels=2, - width=2, - ) - - -async def test_satellite_unloaded_on_disconnect( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test that the assist satellite platform is unloaded on disconnect.""" - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - }, - ) - await hass.async_block_till_done() - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - state = hass.states.get(satellite.entity_id) - assert state is not None - assert state.state != STATE_UNAVAILABLE - - # Device will be unavailable after disconnect - await mock_device.mock_disconnect(True) - - state = hass.states.get(satellite.entity_id) - assert state is not None - assert state.state == STATE_UNAVAILABLE - - -async def test_pipeline_abort( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test aborting a pipeline (no further processing).""" - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.API_AUDIO - }, - ) - await hass.async_block_till_done() - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - chunks = [] - chunk_received = asyncio.Event() - pipeline_aborted = asyncio.Event() - - async def async_pipeline_from_audio_stream(*args, **kwargs): - stt_stream = kwargs["stt_stream"] - - try: - async for chunk in stt_stream: - chunks.append(chunk) - chunk_received.set() - except asyncio.CancelledError: - # Aborting cancels the pipeline task - pipeline_aborted.set() - raise - - pipeline_finished = asyncio.Event() - original_handle_pipeline_finished = satellite.handle_pipeline_finished - - def handle_pipeline_finished(): - original_handle_pipeline_finished() - pipeline_finished.set() - - with ( - patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), - ): - async with asyncio.timeout(1): - await satellite.handle_pipeline_start( - conversation_id="", - flags=VoiceAssistantCommandFlag(0), # stt - audio_settings=VoiceAssistantAudioSettings(), - wake_word_phrase="", - ) - - await satellite.handle_audio(b"before-abort") - await chunk_received.wait() - - # Abort the pipeline, no further processing - await satellite.handle_pipeline_stop(abort=True) - await pipeline_aborted.wait() - - # This chunk should not make it into the STT stream - await satellite.handle_audio(b"after-abort") - await pipeline_finished.wait() - - # Only first chunk - assert chunks == [b"before-abort"] - - -async def test_get_set_configuration( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test getting and setting the satellite configuration.""" - expected_config = AssistSatelliteConfiguration( - available_wake_words=[ - AssistSatelliteWakeWord("1234", "okay nabu", ["en"]), - AssistSatelliteWakeWord("5678", "hey jarvis", ["en"]), - ], - active_wake_words=["1234"], - max_active_wake_words=1, - ) - mock_client.get_voice_assistant_configuration.return_value = expected_config - - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.ANNOUNCE - }, - ) - await hass.async_block_till_done() - - satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) - assert satellite is not None - - # HA should have been updated - actual_config = satellite.async_get_configuration() - assert actual_config == expected_config - - updated_config = replace(actual_config, active_wake_words=["5678"]) - mock_client.get_voice_assistant_configuration.return_value = updated_config - - # Change active wake words - await satellite.async_set_configuration(updated_config) - - # Set config method should be called - mock_client.set_voice_assistant_configuration.assert_called_once_with( - active_wake_words=["5678"] - ) - - # Device should have been updated - assert satellite.async_get_configuration() == updated_config diff --git a/tests/components/esphome/test_binary_sensor.py b/tests/components/esphome/test_binary_sensor.py index 25d8b60f574..3da8a54ff34 100644 --- a/tests/components/esphome/test_binary_sensor.py +++ b/tests/components/esphome/test_binary_sensor.py @@ -1,7 +1,6 @@ """Test ESPHome binary sensors.""" from collections.abc import Awaitable, Callable -from http import HTTPStatus from aioesphomeapi import ( APIClient, @@ -13,20 +12,15 @@ from aioesphomeapi import ( ) import pytest -from homeassistant.components.esphome import DOMAIN, DomainData -from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN +from homeassistant.components.esphome import DomainData from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, issue_registry as ir -from homeassistant.setup import async_setup_component from .conftest import MockESPHomeDevice from tests.common import MockConfigEntry -from tests.typing import ClientSessionGenerator -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_assist_in_progress( hass: HomeAssistant, mock_voice_assistant_v1_entry, @@ -50,131 +44,6 @@ async def test_assist_in_progress( assert state.state == "off" -async def test_assist_in_progress_disabled_by_default( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, - mock_voice_assistant_v1_entry, -) -> None: - """Test assist in progress binary sensor is added disabled.""" - - assert not hass.states.get("binary_sensor.test_assist_in_progress") - entity_entry = entity_registry.async_get("binary_sensor.test_assist_in_progress") - assert entity_entry - assert entity_entry.disabled - assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - - # Test no issue for disabled entity - assert len(issue_registry.issues) == 0 - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_assist_in_progress_issue( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, - mock_voice_assistant_v1_entry, -) -> None: - """Test assist in progress binary sensor.""" - - state = hass.states.get("binary_sensor.test_assist_in_progress") - assert state is not None - - entity_entry = entity_registry.async_get("binary_sensor.test_assist_in_progress") - issue = issue_registry.async_get_issue( - DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" - ) - assert issue is not None - - # Test issue goes away after disabling the entity - entity_registry.async_update_entity( - "binary_sensor.test_assist_in_progress", - disabled_by=er.RegistryEntryDisabler.USER, - ) - await hass.async_block_till_done() - issue = issue_registry.async_get_issue( - DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" - ) - assert issue is None - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_assist_in_progress_repair_flow( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, - mock_voice_assistant_v1_entry, -) -> None: - """Test assist in progress binary sensor deprecation issue flow.""" - - state = hass.states.get("binary_sensor.test_assist_in_progress") - assert state is not None - - entity_entry = entity_registry.async_get("binary_sensor.test_assist_in_progress") - assert entity_entry.disabled_by is None - issue = issue_registry.async_get_issue( - DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" - ) - assert issue is not None - assert issue.data == { - "entity_id": "binary_sensor.test_assist_in_progress", - "entity_uuid": entity_entry.id, - "integration_name": "ESPHome", - } - assert issue.translation_key == "assist_in_progress_deprecated" - assert issue.translation_placeholders == {"integration_name": "ESPHome"} - - assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) - await hass.async_block_till_done() - await hass.async_start() - - client = await hass_client() - - resp = await client.post( - "/api/repairs/issues/fix", - json={"handler": DOMAIN, "issue_id": issue.issue_id}, - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data == { - "data_schema": [], - "description_placeholders": { - "assist_satellite_domain": "assist_satellite", - "entity_id": "binary_sensor.test_assist_in_progress", - "integration_name": "ESPHome", - }, - "errors": None, - "flow_id": flow_id, - "handler": DOMAIN, - "last_step": None, - "preview": None, - "step_id": "confirm_disable_entity", - "type": "form", - } - - resp = await client.post(f"/api/repairs/issues/fix/{flow_id}") - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data == { - "description": None, - "description_placeholders": None, - "flow_id": flow_id, - "handler": DOMAIN, - "type": "create_entry", - } - - # Test the entity is disabled - entity_entry = entity_registry.async_get("binary_sensor.test_assist_in_progress") - assert entity_entry.disabled_by is er.RegistryEntryDisabler.USER - - @pytest.mark.parametrize( "binary_state", [(True, STATE_ON), (False, STATE_OFF), (None, STATE_UNKNOWN)] ) diff --git a/tests/components/esphome/test_camera.py b/tests/components/esphome/test_camera.py index 87b86b039fd..c6a61cd18e8 100644 --- a/tests/components/esphome/test_camera.py +++ b/tests/components/esphome/test_camera.py @@ -5,13 +5,13 @@ from collections.abc import Awaitable, Callable from aioesphomeapi import ( APIClient, CameraInfo, - CameraState as ESPHomeCameraState, + CameraState, EntityInfo, EntityState, UserService, ) -from homeassistant.components.camera import CameraState +from homeassistant.components.camera import STATE_IDLE from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -55,10 +55,10 @@ async def test_camera_single_image( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == CameraState.IDLE + assert state.state == STATE_IDLE def _mock_camera_image(): - mock_device.set_state(ESPHomeCameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) + mock_device.set_state(CameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) mock_client.request_single_image = _mock_camera_image @@ -67,7 +67,7 @@ async def test_camera_single_image( await hass.async_block_till_done() state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == CameraState.IDLE + assert state.state == STATE_IDLE assert resp.status == 200 assert resp.content_type == "image/jpeg" @@ -103,7 +103,7 @@ async def test_camera_single_image_unavailable_before_requested( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == CameraState.IDLE + assert state.state == STATE_IDLE await mock_device.mock_disconnect(False) client = await hass_client() @@ -144,7 +144,7 @@ async def test_camera_single_image_unavailable_during_request( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == CameraState.IDLE + assert state.state == STATE_IDLE def _mock_camera_image(): hass.async_create_task(mock_device.mock_disconnect(False)) @@ -189,7 +189,7 @@ async def test_camera_stream( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == CameraState.IDLE + assert state.state == STATE_IDLE remaining_responses = 3 def _mock_camera_image(): @@ -197,7 +197,7 @@ async def test_camera_stream( if remaining_responses == 0: return remaining_responses -= 1 - mock_device.set_state(ESPHomeCameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) + mock_device.set_state(CameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) mock_client.request_image_stream = _mock_camera_image mock_client.request_single_image = _mock_camera_image @@ -207,7 +207,7 @@ async def test_camera_stream( await hass.async_block_till_done() state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == CameraState.IDLE + assert state.state == STATE_IDLE assert resp.status == 200 assert resp.content_type == "multipart/x-mixed-replace" @@ -249,7 +249,7 @@ async def test_camera_stream_unavailable( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == CameraState.IDLE + assert state.state == STATE_IDLE await mock_device.mock_disconnect(False) @@ -289,7 +289,7 @@ async def test_camera_stream_with_disconnection( ) state = hass.states.get("camera.test_mycamera") assert state is not None - assert state.state == CameraState.IDLE + assert state.state == STATE_IDLE remaining_responses = 3 def _mock_camera_image(): @@ -299,7 +299,7 @@ async def test_camera_stream_with_disconnection( if remaining_responses == 2: hass.async_create_task(mock_device.mock_disconnect(False)) remaining_responses -= 1 - mock_device.set_state(ESPHomeCameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) + mock_device.set_state(CameraState(key=1, data=SMALLEST_VALID_JPEG_BYTES)) mock_client.request_image_stream = _mock_camera_image mock_client.request_single_image = _mock_camera_image diff --git a/tests/components/esphome/test_climate.py b/tests/components/esphome/test_climate.py index 189b86fc5fd..4ec7fee6447 100644 --- a/tests/components/esphome/test_climate.py +++ b/tests/components/esphome/test_climate.py @@ -13,7 +13,6 @@ from aioesphomeapi import ( ClimateState, ClimateSwingMode, ) -import pytest from syrupy import SnapshotAssertion from homeassistant.components.climate import ( @@ -42,7 +41,6 @@ from homeassistant.components.climate import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError async def test_climate_entity( @@ -56,6 +54,7 @@ async def test_climate_entity( name="my climate", unique_id="my_climate", supports_current_temperature=True, + supports_two_point_target_temperature=True, supports_action=True, visual_min_temperature=10.0, visual_max_temperature=30.0, @@ -135,13 +134,14 @@ async def test_climate_entity_with_step_and_two_point( assert state is not None assert state.state == HVACMode.COOL - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, - blocking=True, - ) + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, + blocking=True, + ) + mock_client.climate_command.assert_has_calls([call(key=1, target_temperature=25.0)]) + mock_client.climate_command.reset_mock() await hass.services.async_call( CLIMATE_DOMAIN, @@ -213,34 +213,38 @@ async def test_climate_entity_with_step_and_target_temp( assert state is not None assert state.state == HVACMode.COOL + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, + blocking=True, + ) + mock_client.climate_command.assert_has_calls([call(key=1, target_temperature=25.0)]) + mock_client.climate_command.reset_mock() + await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_HVAC_MODE: HVACMode.AUTO, - ATTR_TEMPERATURE: 25, + ATTR_TARGET_TEMP_LOW: 20, + ATTR_TARGET_TEMP_HIGH: 30, }, blocking=True, ) mock_client.climate_command.assert_has_calls( - [call(key=1, mode=ClimateMode.AUTO, target_temperature=25.0)] + [ + call( + key=1, + mode=ClimateMode.AUTO, + target_temperature_low=20.0, + target_temperature_high=30.0, + ) + ] ) mock_client.climate_command.reset_mock() - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: "climate.test_myclimate", - ATTR_HVAC_MODE: HVACMode.AUTO, - ATTR_TARGET_TEMP_LOW: 20, - ATTR_TARGET_TEMP_HIGH: 30, - }, - blocking=True, - ) - await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, diff --git a/tests/components/esphome/test_config_flow.py b/tests/components/esphome/test_config_flow.py index 0a389969c78..68af6665380 100644 --- a/tests/components/esphome/test_config_flow.py +++ b/tests/components/esphome/test_config_flow.py @@ -27,10 +27,10 @@ from homeassistant.components.esphome.const import ( DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS, DOMAIN, ) +from homeassistant.components.hassio import HassioServiceInfo from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.service_info.mqtt import MqttServiceInfo from . import VALID_NOISE_PSK @@ -798,7 +798,14 @@ async def test_reauth_initiation(hass: HomeAssistant, mock_client) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + "esphome", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -814,7 +821,14 @@ async def test_reauth_confirm_valid( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + "esphome", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + ) mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") result = await hass.config_entries.flow.async_configure( @@ -861,7 +875,14 @@ async def test_reauth_fixed_via_dashboard( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + "esphome", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + ) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -875,7 +896,7 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( hass: HomeAssistant, mock_client, mock_dashboard: dict[str, Any], - mock_config_entry: MockConfigEntry, + mock_config_entry, mock_setup_entry: None, ) -> None: """Test reauth fixed automatically via dashboard with password removed.""" @@ -897,7 +918,14 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + "esphome", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + "unique_id": mock_config_entry.unique_id, + }, + ) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -910,14 +938,21 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( async def test_reauth_fixed_via_remove_password( hass: HomeAssistant, mock_client, - mock_config_entry: MockConfigEntry, + mock_config_entry, mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test reauth fixed automatically by seeing password removed.""" mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + "esphome", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + "unique_id": mock_config_entry.unique_id, + }, + ) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -946,7 +981,14 @@ async def test_reauth_fixed_via_dashboard_at_confirm( mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + "esphome", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + ) assert result["type"] is FlowResultType.FORM, result assert result["step_id"] == "reauth_confirm" @@ -985,7 +1027,14 @@ async def test_reauth_confirm_invalid( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + "esphome", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + ) mock_client.device_info.side_effect = InvalidEncryptionKeyAPIError result = await hass.config_entries.flow.async_configure( @@ -1021,7 +1070,14 @@ async def test_reauth_confirm_invalid_with_unique_id( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + "esphome", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + ) mock_client.device_info.side_effect = InvalidEncryptionKeyAPIError result = await hass.config_entries.flow.async_configure( @@ -1400,14 +1456,6 @@ async def test_discovery_mqtt_no_mac( await mqtt_discovery_test_abort(hass, "{}", "mqtt_missing_mac") -@pytest.mark.usefixtures("mock_zeroconf") -async def test_discovery_mqtt_empty_payload( - hass: HomeAssistant, mock_client, mock_setup_entry: None -) -> None: - """Test discovery aborted if MQTT payload is empty.""" - await mqtt_discovery_test_abort(hass, "", "mqtt_missing_payload") - - @pytest.mark.usefixtures("mock_zeroconf") async def test_discovery_mqtt_no_api( hass: HomeAssistant, mock_client, mock_setup_entry: None diff --git a/tests/components/esphome/test_cover.py b/tests/components/esphome/test_cover.py index 4cfe91c6dea..b190d287198 100644 --- a/tests/components/esphome/test_cover.py +++ b/tests/components/esphome/test_cover.py @@ -7,7 +7,7 @@ from aioesphomeapi import ( APIClient, CoverInfo, CoverOperation, - CoverState as ESPHomeCoverState, + CoverState, EntityInfo, EntityState, UserService, @@ -26,7 +26,10 @@ from homeassistant.components.cover import ( SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, - CoverState, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -55,7 +58,7 @@ async def test_cover_entity( ) ] states = [ - ESPHomeCoverState( + CoverState( key=1, position=0.5, tilt=0.5, @@ -71,7 +74,7 @@ async def test_cover_entity( ) state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -139,30 +142,28 @@ async def test_cover_entity( mock_client.cover_command.reset_mock() mock_device.set_state( - ESPHomeCoverState(key=1, position=0.0, current_operation=CoverOperation.IDLE) + CoverState(key=1, position=0.0, current_operation=CoverOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED mock_device.set_state( - ESPHomeCoverState( - key=1, position=0.5, current_operation=CoverOperation.IS_CLOSING - ) + CoverState(key=1, position=0.5, current_operation=CoverOperation.IS_CLOSING) ) await hass.async_block_till_done() state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING mock_device.set_state( - ESPHomeCoverState(key=1, position=1.0, current_operation=CoverOperation.IDLE) + CoverState(key=1, position=1.0, current_operation=CoverOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async def test_cover_entity_without_position( @@ -186,7 +187,7 @@ async def test_cover_entity_without_position( ) ] states = [ - ESPHomeCoverState( + CoverState( key=1, position=0.5, tilt=0.5, @@ -202,6 +203,6 @@ async def test_cover_entity_without_position( ) state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING assert ATTR_CURRENT_TILT_POSITION not in state.attributes assert ATTR_CURRENT_POSITION not in state.attributes diff --git a/tests/components/esphome/test_dashboard.py b/tests/components/esphome/test_dashboard.py index 1641804e458..da805eb2eee 100644 --- a/tests/components/esphome/test_dashboard.py +++ b/tests/components/esphome/test_dashboard.py @@ -6,7 +6,7 @@ from unittest.mock import patch from aioesphomeapi import DeviceInfo, InvalidAuthAPIError from homeassistant.components.esphome import CONF_NOISE_PSK, coordinator, dashboard -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -150,7 +150,7 @@ async def test_new_info_reload_config_entries( async def test_new_dashboard_fix_reauth( - hass: HomeAssistant, mock_client, mock_config_entry: MockConfigEntry, mock_dashboard + hass: HomeAssistant, mock_client, mock_config_entry, mock_dashboard ) -> None: """Test config entries waiting for reauth are triggered.""" mock_client.device_info.side_effect = ( @@ -162,7 +162,14 @@ async def test_new_dashboard_fix_reauth( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + "esphome", + context={ + "source": SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + "unique_id": mock_config_entry.unique_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert len(mock_get_encryption_key.mock_calls) == 0 diff --git a/tests/components/esphome/test_diagnostics.py b/tests/components/esphome/test_diagnostics.py index 832e7d6572f..03689a5699e 100644 --- a/tests/components/esphome/test_diagnostics.py +++ b/tests/components/esphome/test_diagnostics.py @@ -5,7 +5,6 @@ from unittest.mock import ANY import pytest from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.components import bluetooth from homeassistant.core import HomeAssistant @@ -28,7 +27,7 @@ async def test_diagnostics( """Test diagnostics for config entry.""" result = await get_diagnostics_for_config_entry(hass, hass_client, init_integration) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot async def test_diagnostics_with_bluetooth( @@ -62,7 +61,6 @@ async def test_diagnostics_with_bluetooth( }, }, "config": { - "created_at": ANY, "data": { "device_name": "test", "host": "test.local", @@ -70,11 +68,9 @@ async def test_diagnostics_with_bluetooth( "port": 6053, }, "disabled_by": None, - "discovery_keys": {}, "domain": "esphome", "entry_id": ANY, "minor_version": 1, - "modified_at": ANY, "options": {"allow_service_calls": False}, "pref_disable_new_entities": False, "pref_disable_polling": False, diff --git a/tests/components/esphome/test_ffmpeg_proxy.py b/tests/components/esphome/test_ffmpeg_proxy.py deleted file mode 100644 index 295d8d2fda9..00000000000 --- a/tests/components/esphome/test_ffmpeg_proxy.py +++ /dev/null @@ -1,334 +0,0 @@ -"""Tests for ffmpeg proxy view.""" - -from collections.abc import Generator -from http import HTTPStatus -import io -import os -import tempfile -from unittest.mock import patch -from urllib.request import pathname2url -import wave - -from aiohttp import client_exceptions -import mutagen -import pytest - -from homeassistant.components import esphome -from homeassistant.components.esphome.ffmpeg_proxy import async_create_proxy_url -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from tests.typing import ClientSessionGenerator - - -@pytest.fixture(name="wav_file_length") -def wav_file_length_fixture() -> int: - """Wanted length of temporary wave file.""" - return 1 - - -@pytest.fixture(name="wav_file") -def wav_file_fixture(wav_file_length: int) -> Generator[str]: - """Create a temporary file and fill it with 1s of silence.""" - with tempfile.NamedTemporaryFile(mode="wb+", suffix=".wav") as temp_file: - _write_silence(temp_file.name, wav_file_length) - yield temp_file.name - - -def _write_silence(filename: str, length: int) -> None: - """Write silence to a file.""" - with wave.open(filename, "wb") as wav_file: - wav_file.setframerate(16000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(bytes(16000 * 2 * length)) # length s - - -async def test_async_create_proxy_url(hass: HomeAssistant) -> None: - """Test that async_create_proxy_url returns the correct format.""" - assert await async_setup_component(hass, "esphome", {}) - - device_id = "test-device" - convert_id = "test-id" - media_format = "flac" - media_url = "http://127.0.0.1/test.mp3" - proxy_url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.{media_format}" - - with patch( - "homeassistant.components.esphome.ffmpeg_proxy.secrets.token_urlsafe", - return_value=convert_id, - ): - assert ( - async_create_proxy_url(hass, device_id, media_url, media_format) - == proxy_url - ) - - -async def test_proxy_view( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - wav_file: str, -) -> None: - """Test proxy HTTP view for converting audio.""" - device_id = "1234" - - await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) - client = await hass_client() - - wav_url = pathname2url(wav_file) - convert_id = "test-id" - url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.mp3" - - # Should fail because we haven't allowed the URL yet - req = await client.get(url) - assert req.status == HTTPStatus.NOT_FOUND - - # Allow the URL - with patch( - "homeassistant.components.esphome.ffmpeg_proxy.secrets.token_urlsafe", - return_value=convert_id, - ): - assert ( - async_create_proxy_url( - hass, device_id, wav_url, media_format="mp3", rate=22050, channels=2 - ) - == url - ) - - # Requesting the wrong media format should fail - wrong_url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.flac" - req = await client.get(wrong_url) - assert req.status == HTTPStatus.BAD_REQUEST - - # Correct URL - req = await client.get(url) - assert req.status == HTTPStatus.OK - - mp3_data = await req.content.read() - - # Verify conversion - with io.BytesIO(mp3_data) as mp3_io: - mp3_file = mutagen.File(mp3_io) - assert mp3_file.info.sample_rate == 22050 - assert mp3_file.info.channels == 2 - - # About a second, but not exact - assert round(mp3_file.info.length, 0) == 1 - - -async def test_ffmpeg_file_doesnt_exist( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, -) -> None: - """Test ffmpeg conversion with a file that doesn't exist.""" - device_id = "1234" - - await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) - client = await hass_client() - - # Try to convert a file that doesn't exist - url = async_create_proxy_url(hass, device_id, "missing-file", media_format="mp3") - req = await client.get(url) - - # The HTTP status is OK because the ffmpeg process started, but no data is - # returned. - assert req.status == HTTPStatus.OK - mp3_data = await req.content.read() - assert not mp3_data - - -async def test_lingering_process( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - wav_file: str, -) -> None: - """Test that a new request stops the old ffmpeg process.""" - device_id = "1234" - - await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) - client = await hass_client() - - wav_url = pathname2url(wav_file) - url1 = async_create_proxy_url( - hass, - device_id, - wav_url, - media_format="wav", - rate=22050, - channels=2, - width=2, - ) - - # First request will start ffmpeg - req1 = await client.get(url1) - assert req1.status == HTTPStatus.OK - - # Only read part of the data - await req1.content.readexactly(100) - - # Allow another URL - url2 = async_create_proxy_url( - hass, - device_id, - wav_url, - media_format="wav", - rate=22050, - channels=2, - width=2, - ) - - req2 = await client.get(url2) - assert req2.status == HTTPStatus.OK - - wav_data = await req2.content.read() - - # All of the data should be there because this is a new ffmpeg process - with io.BytesIO(wav_data) as wav_io, wave.open(wav_io, "rb") as received_wav_file: - # We can't use getnframes() here because the WAV header will be incorrect. - # WAV encoders usually go back and update the WAV header after all of - # the frames are written, but ffmpeg can't do that because we're - # streaming the data. - # So instead, we just read and count frames until we run out. - num_frames = 0 - while chunk := received_wav_file.readframes(1024): - num_frames += len(chunk) // (2 * 2) # 2 channels, 16-bit samples - - assert num_frames == 22050 # 1s - - -@pytest.mark.parametrize("wav_file_length", [10]) -async def test_request_same_url_multiple_times( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - wav_file: str, -) -> None: - """Test that the ffmpeg process is restarted if the same URL is requested multiple times.""" - device_id = "1234" - - await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) - client = await hass_client() - - wav_url = pathname2url(wav_file) - url = async_create_proxy_url( - hass, - device_id, - wav_url, - media_format="wav", - rate=22050, - channels=2, - width=2, - ) - - # First request will start ffmpeg - req1 = await client.get(url) - assert req1.status == HTTPStatus.OK - - # Only read part of the data - await req1.content.readexactly(100) - - # Second request should restart ffmpeg - req2 = await client.get(url) - assert req2.status == HTTPStatus.OK - - wav_data = await req2.content.read() - - # All of the data should be there because this is a new ffmpeg process - with io.BytesIO(wav_data) as wav_io, wave.open(wav_io, "rb") as received_wav_file: - num_frames = 0 - while chunk := received_wav_file.readframes(1024): - num_frames += len(chunk) // (2 * 2) # 2 channels, 16-bit samples - - assert num_frames == 22050 * 10 # 10s - - -async def test_max_conversions_per_device( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, -) -> None: - """Test that each device has a maximum number of conversions (currently 2).""" - max_conversions = 2 - device_ids = ["1234", "5678"] - - await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) - client = await hass_client() - - with tempfile.TemporaryDirectory() as temp_dir: - wav_paths = [ - os.path.join(temp_dir, f"{i}.wav") for i in range(max_conversions + 1) - ] - for wav_path in wav_paths: - _write_silence(wav_path, 10) - - wav_urls = [pathname2url(p) for p in wav_paths] - - # Each device will have max + 1 conversions - device_urls = { - device_id: [ - async_create_proxy_url( - hass, - device_id, - wav_url, - media_format="wav", - rate=22050, - channels=2, - width=2, - ) - for wav_url in wav_urls - ] - for device_id in device_ids - } - - for urls in device_urls.values(): - # First URL should fail because it was overwritten by the others - req = await client.get(urls[0]) - assert req.status == HTTPStatus.BAD_REQUEST - - # All other URLs should succeed - for url in urls[1:]: - req = await client.get(url) - assert req.status == HTTPStatus.OK - - -async def test_abort_on_shutdown( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, -) -> None: - """Test we abort on Home Assistant shutdown.""" - device_id = "1234" - - await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) - client = await hass_client() - - with tempfile.NamedTemporaryFile(mode="wb+", suffix=".wav") as temp_file: - with wave.open(temp_file.name, "wb") as wav_file: - wav_file.setframerate(16000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(bytes(16000 * 2)) # 1s - - wav_url = pathname2url(temp_file.name) - convert_id = "test-id" - url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.mp3" - - wav_url = pathname2url(temp_file.name) - url = async_create_proxy_url( - hass, - device_id, - wav_url, - media_format="wav", - rate=22050, - channels=2, - width=2, - ) - - # Get URL and start reading - req = await client.get(url) - assert req.status == HTTPStatus.OK - initial_mp3_data = await req.content.read(4) - assert initial_mp3_data == b"RIFF" - - # Shut down Home Assistant - await hass.async_stop() - - with pytest.raises(client_exceptions.ClientPayloadError): - await req.content.read() diff --git a/tests/components/esphome/test_lock.py b/tests/components/esphome/test_lock.py index ae54b16d6e2..82c24b59a2c 100644 --- a/tests/components/esphome/test_lock.py +++ b/tests/components/esphome/test_lock.py @@ -2,20 +2,16 @@ from unittest.mock import call -from aioesphomeapi import ( - APIClient, - LockCommand, - LockEntityState, - LockInfo, - LockState as ESPHomeLockState, -) +from aioesphomeapi import APIClient, LockCommand, LockEntityState, LockInfo, LockState from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - LockState, + STATE_LOCKED, + STATE_LOCKING, + STATE_UNLOCKING, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -35,7 +31,7 @@ async def test_lock_entity_no_open( requires_code=False, ) ] - states = [LockEntityState(key=1, state=ESPHomeLockState.UNLOCKING)] + states = [LockEntityState(key=1, state=LockState.UNLOCKING)] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -45,7 +41,7 @@ async def test_lock_entity_no_open( ) state = hass.states.get("lock.test_mylock") assert state is not None - assert state.state == LockState.UNLOCKING + assert state.state == STATE_UNLOCKING await hass.services.async_call( LOCK_DOMAIN, @@ -69,7 +65,7 @@ async def test_lock_entity_start_locked( unique_id="my_lock", ) ] - states = [LockEntityState(key=1, state=ESPHomeLockState.LOCKED)] + states = [LockEntityState(key=1, state=LockState.LOCKED)] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -79,7 +75,7 @@ async def test_lock_entity_start_locked( ) state = hass.states.get("lock.test_mylock") assert state is not None - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED async def test_lock_entity_supports_open( @@ -96,7 +92,7 @@ async def test_lock_entity_supports_open( requires_code=True, ) ] - states = [LockEntityState(key=1, state=ESPHomeLockState.LOCKING)] + states = [LockEntityState(key=1, state=LockState.LOCKING)] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -106,7 +102,7 @@ async def test_lock_entity_supports_open( ) state = hass.states.get("lock.test_mylock") assert state is not None - assert state.state == LockState.LOCKING + assert state.state == STATE_LOCKING await hass.services.async_call( LOCK_DOMAIN, diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index 4b322c8744e..01f267581f4 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -2,7 +2,7 @@ import asyncio from collections.abc import Awaitable, Callable -from unittest.mock import AsyncMock, call +from unittest.mock import AsyncMock, call, patch from aioesphomeapi import ( APIClient, @@ -17,6 +17,7 @@ from aioesphomeapi import ( UserService, UserServiceArg, UserServiceArgType, + VoiceAssistantFeature, ) import pytest @@ -28,6 +29,10 @@ from homeassistant.components.esphome.const import ( DOMAIN, STABLE_BLE_VERSION_STR, ) +from homeassistant.components.esphome.voice_assistant import ( + VoiceAssistantAPIPipeline, + VoiceAssistantUDPPipeline, +) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -39,7 +44,7 @@ from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, issue_registry as ir from homeassistant.setup import async_setup_component -from .conftest import MockESPHomeDevice +from .conftest import _ONE_SECOND, MockESPHomeDevice from tests.common import MockConfigEntry, async_capture_events, async_mock_service @@ -716,34 +721,6 @@ async def test_state_subscription( assert mock_client.send_home_assistant_state.mock_calls == [] -async def test_state_request( - mock_client: APIClient, - hass: HomeAssistant, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test ESPHome requests state change.""" - device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - ) - await hass.async_block_till_done() - hass.states.async_set("binary_sensor.test", "on", {"bool": True, "float": 3.0}) - device.mock_home_assistant_state_request("binary_sensor.test", None) - await hass.async_block_till_done() - assert mock_client.send_home_assistant_state.mock_calls == [ - call("binary_sensor.test", None, "on") - ] - mock_client.send_home_assistant_state.reset_mock() - hass.states.async_set("binary_sensor.test", "off", {"bool": False, "float": 5.0}) - await hass.async_block_till_done() - assert mock_client.send_home_assistant_state.mock_calls == [] - - async def test_debug_logging( mock_client: APIClient, hass: HomeAssistant, @@ -1047,7 +1024,7 @@ async def test_esphome_device_with_project( ) assert dev.manufacturer == "mfr" assert dev.model == "model" - assert dev.sw_version == "2.2.2 (ESPHome 1.0.0)" + assert dev.hw_version == "2.2.2" async def test_esphome_device_with_manufacturer( @@ -1209,3 +1186,102 @@ async def test_entry_missing_unique_id( await mock_esphome_device(mock_client=mock_client, mock_storage=True) await hass.async_block_till_done() assert entry.unique_id == "11:22:33:44:55:aa" + + +async def test_manager_voice_assistant_handlers_api( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + caplog: pytest.LogCaptureFixture, + mock_voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test the handlers are correctly executed in manager.py.""" + + device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.API_AUDIO + }, + ) + + await hass.async_block_till_done() + + with ( + patch( + "homeassistant.components.esphome.manager.VoiceAssistantAPIPipeline", + new=mock_voice_assistant_api_pipeline, + ), + ): + port: int | None = await device.mock_voice_assistant_handle_start( + "", 0, None, None + ) + + assert port == 0 + + port: int | None = await device.mock_voice_assistant_handle_start( + "", 0, None, None + ) + + assert "Voice assistant UDP server was not stopped" in caplog.text + + await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) + + mock_voice_assistant_api_pipeline.receive_audio_bytes.assert_called_with( + bytes(_ONE_SECOND) + ) + + mock_voice_assistant_api_pipeline.receive_audio_bytes.reset_mock() + + await device.mock_voice_assistant_handle_stop() + mock_voice_assistant_api_pipeline.handle_finished() + + await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) + + mock_voice_assistant_api_pipeline.receive_audio_bytes.assert_not_called() + + +async def test_manager_voice_assistant_handlers_udp( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_voice_assistant_udp_pipeline: VoiceAssistantUDPPipeline, +) -> None: + """Test the handlers are correctly executed in manager.py.""" + + device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + + await hass.async_block_till_done() + + with ( + patch( + "homeassistant.components.esphome.manager.VoiceAssistantUDPPipeline", + new=mock_voice_assistant_udp_pipeline, + ), + ): + await device.mock_voice_assistant_handle_start("", 0, None, None) + + mock_voice_assistant_udp_pipeline.run_pipeline.assert_called() + + await device.mock_voice_assistant_handle_stop() + mock_voice_assistant_udp_pipeline.handle_finished() + + mock_voice_assistant_udp_pipeline.stop.assert_called() + mock_voice_assistant_udp_pipeline.close.assert_called() diff --git a/tests/components/esphome/test_media_player.py b/tests/components/esphome/test_media_player.py index 799666fc66e..3879129ccb6 100644 --- a/tests/components/esphome/test_media_player.py +++ b/tests/components/esphome/test_media_player.py @@ -1,19 +1,13 @@ """Test ESPHome media_players.""" -from collections.abc import Awaitable, Callable from unittest.mock import AsyncMock, Mock, call, patch from aioesphomeapi import ( APIClient, - EntityInfo, - EntityState, MediaPlayerCommand, MediaPlayerEntityState, - MediaPlayerFormatPurpose, MediaPlayerInfo, MediaPlayerState, - MediaPlayerSupportedFormat, - UserService, ) import pytest @@ -37,11 +31,8 @@ from homeassistant.components.media_player import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant -import homeassistant.helpers.device_registry as dr from homeassistant.setup import async_setup_component -from .conftest import MockESPHomeDevice - from tests.common import mock_platform from tests.typing import WebSocketGenerator @@ -64,7 +55,7 @@ async def test_media_player_entity( key=1, volume=50, muted=True, state=MediaPlayerState.PAUSED ) ] - user_service: list[UserService] = [] + user_service = [] await mock_generic_device_entry( mock_client=mock_client, entity_info=entity_info, @@ -209,7 +200,7 @@ async def test_media_player_entity_with_source( key=1, volume=50, muted=True, state=MediaPlayerState.PLAYING ) ] - user_service: list[UserService] = [] + user_service = [] await mock_generic_device_entry( mock_client=mock_client, entity_info=entity_info, @@ -286,131 +277,3 @@ async def test_media_player_entity_with_source( mock_client.media_player_command.assert_has_calls( [call(1, media_url="media-source://tts?message=hello", announcement=True)] ) - - -async def test_media_player_proxy( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test a media_player entity with a proxy URL.""" - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[ - MediaPlayerInfo( - object_id="mymedia_player", - key=1, - name="my media_player", - unique_id="my_media_player", - supports_pause=True, - supported_formats=[ - MediaPlayerSupportedFormat( - format="flac", - sample_rate=0, # source rate - num_channels=0, # source channels - purpose=MediaPlayerFormatPurpose.DEFAULT, - sample_bytes=0, # source width - ), - MediaPlayerSupportedFormat( - format="wav", - sample_rate=16000, - num_channels=1, - purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, - sample_bytes=2, - ), - MediaPlayerSupportedFormat( - format="mp3", - sample_rate=48000, - num_channels=2, - purpose=MediaPlayerFormatPurpose.DEFAULT, - ), - ], - ) - ], - user_service=[], - states=[ - MediaPlayerEntityState( - key=1, volume=50, muted=False, state=MediaPlayerState.PAUSED - ) - ], - ) - await hass.async_block_till_done() - dev = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} - ) - assert dev is not None - state = hass.states.get("media_player.test_mymedia_player") - assert state is not None - assert state.state == "paused" - - media_url = "http://127.0.0.1/test.mp3" - proxy_url = f"/api/esphome/ffmpeg_proxy/{dev.id}/test-id.flac" - - with ( - patch( - "homeassistant.components.esphome.media_player.async_create_proxy_url", - return_value=proxy_url, - ) as mock_async_create_proxy_url, - ): - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", - ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, - ATTR_MEDIA_CONTENT_ID: media_url, - }, - blocking=True, - ) - - # Should be the default format - mock_async_create_proxy_url.assert_called_once() - device_id = mock_async_create_proxy_url.call_args[0][1] - mock_async_create_proxy_url.assert_called_once_with( - hass, - device_id, - media_url, - media_format="flac", - rate=None, - channels=None, - width=None, - ) - - media_args = mock_client.media_player_command.call_args.kwargs - assert not media_args["announcement"] - - # Reset - mock_async_create_proxy_url.reset_mock() - - # Set announcement flag - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", - ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, - ATTR_MEDIA_CONTENT_ID: media_url, - ATTR_MEDIA_ANNOUNCE: True, - }, - blocking=True, - ) - - # Should be the announcement format - mock_async_create_proxy_url.assert_called_once() - device_id = mock_async_create_proxy_url.call_args[0][1] - mock_async_create_proxy_url.assert_called_once_with( - hass, - device_id, - media_url, - media_format="wav", - rate=16000, - channels=1, - width=2, - ) - - media_args = mock_client.media_player_command.call_args.kwargs - assert media_args["announcement"] diff --git a/tests/components/esphome/test_repairs.py b/tests/components/esphome/test_repairs.py deleted file mode 100644 index c365e65cbe1..00000000000 --- a/tests/components/esphome/test_repairs.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Test ESPHome repairs.""" - -import pytest - -from homeassistant.components.esphome import repairs -from homeassistant.core import HomeAssistant - - -async def test_create_fix_flow_raises_on_unknown_issue_id(hass: HomeAssistant) -> None: - """Test reate_fix_flow raises on unknown issue_id.""" - - with pytest.raises(ValueError): - await repairs.async_create_fix_flow(hass, "no_such_issue", None) diff --git a/tests/components/esphome/test_select.py b/tests/components/esphome/test_select.py index fbe30afd042..a433b1b0ab0 100644 --- a/tests/components/esphome/test_select.py +++ b/tests/components/esphome/test_select.py @@ -19,7 +19,7 @@ async def test_pipeline_selector( ) -> None: """Test assist pipeline selector.""" - state = hass.states.get("select.test_assistant") + state = hass.states.get("select.test_assist_pipeline") assert state is not None assert state.state == "preferred" diff --git a/tests/components/esphome/test_sensor.py b/tests/components/esphome/test_sensor.py index 76f71b53167..bebfaaa69d4 100644 --- a/tests/components/esphome/test_sensor.py +++ b/tests/components/esphome/test_sensor.py @@ -28,10 +28,10 @@ from homeassistant.const import ( ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, - EntityCategory, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity import EntityCategory from .conftest import MockESPHomeDevice diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index 7593ab21838..992a6ad2ba9 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -8,7 +8,6 @@ from aioesphomeapi import ( APIClient, EntityInfo, EntityState, - UpdateCommand, UpdateInfo, UpdateState, UserService, @@ -16,10 +15,6 @@ from aioesphomeapi import ( import pytest from homeassistant.components.esphome.dashboard import async_get_dashboard -from homeassistant.components.homeassistant import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, -) from homeassistant.components.update import ( DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, @@ -39,11 +34,6 @@ from homeassistant.exceptions import HomeAssistantError from .conftest import MockESPHomeDevice -@pytest.fixture(autouse=True) -def enable_entity(entity_registry_enabled_by_default: None) -> None: - """Enable update entity.""" - - @pytest.fixture def stub_reconnect(): """Stub reconnect.""" @@ -531,14 +521,4 @@ async def test_generic_device_update_entity_has_update( state = hass.states.get("update.test_myupdate") assert state is not None assert state.state == STATE_ON - assert state.attributes["in_progress"] is True - assert state.attributes["update_percentage"] == 50 - - await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: "update.test_myupdate"}, - blocking=True, - ) - - mock_client.update_command.assert_called_with(key=1, command=UpdateCommand.CHECK) + assert state.attributes["in_progress"] == 50 diff --git a/tests/components/esphome/test_valve.py b/tests/components/esphome/test_valve.py index 7a7e22b1713..5ba7bcbe187 100644 --- a/tests/components/esphome/test_valve.py +++ b/tests/components/esphome/test_valve.py @@ -10,7 +10,7 @@ from aioesphomeapi import ( UserService, ValveInfo, ValveOperation, - ValveState as ESPHomeValveState, + ValveState, ) from homeassistant.components.valve import ( @@ -21,7 +21,10 @@ from homeassistant.components.valve import ( SERVICE_OPEN_VALVE, SERVICE_SET_VALVE_POSITION, SERVICE_STOP_VALVE, - ValveState, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -49,7 +52,7 @@ async def test_valve_entity( ) ] states = [ - ESPHomeValveState( + ValveState( key=1, position=0.5, current_operation=ValveOperation.IS_OPENING, @@ -64,7 +67,7 @@ async def test_valve_entity( ) state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == ValveState.OPENING + assert state.state == STATE_OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -104,30 +107,28 @@ async def test_valve_entity( mock_client.valve_command.reset_mock() mock_device.set_state( - ESPHomeValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) + ValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == ValveState.CLOSED + assert state.state == STATE_CLOSED mock_device.set_state( - ESPHomeValveState( - key=1, position=0.5, current_operation=ValveOperation.IS_CLOSING - ) + ValveState(key=1, position=0.5, current_operation=ValveOperation.IS_CLOSING) ) await hass.async_block_till_done() state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == ValveState.CLOSING + assert state.state == STATE_CLOSING mock_device.set_state( - ESPHomeValveState(key=1, position=1.0, current_operation=ValveOperation.IDLE) + ValveState(key=1, position=1.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == ValveState.OPEN + assert state.state == STATE_OPEN async def test_valve_entity_without_position( @@ -150,7 +151,7 @@ async def test_valve_entity_without_position( ) ] states = [ - ESPHomeValveState( + ValveState( key=1, position=0.5, current_operation=ValveOperation.IS_OPENING, @@ -165,7 +166,7 @@ async def test_valve_entity_without_position( ) state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == ValveState.OPENING + assert state.state == STATE_OPENING assert ATTR_CURRENT_POSITION not in state.attributes await hass.services.async_call( @@ -187,9 +188,9 @@ async def test_valve_entity_without_position( mock_client.valve_command.reset_mock() mock_device.set_state( - ESPHomeValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) + ValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("valve.test_myvalve") assert state is not None - assert state.state == ValveState.CLOSED + assert state.state == STATE_CLOSED diff --git a/tests/components/esphome/test_voice_assistant.py b/tests/components/esphome/test_voice_assistant.py new file mode 100644 index 00000000000..eafc0243dc6 --- /dev/null +++ b/tests/components/esphome/test_voice_assistant.py @@ -0,0 +1,964 @@ +"""Test ESPHome voice assistant server.""" + +import asyncio +from collections.abc import Awaitable, Callable +import io +import socket +from unittest.mock import ANY, Mock, patch +import wave + +from aioesphomeapi import ( + APIClient, + EntityInfo, + EntityState, + UserService, + VoiceAssistantEventType, + VoiceAssistantFeature, + VoiceAssistantTimerEventType, +) +import pytest + +from homeassistant.components.assist_pipeline import ( + PipelineEvent, + PipelineEventType, + PipelineStage, +) +from homeassistant.components.assist_pipeline.error import ( + PipelineNotFound, + WakeWordDetectionAborted, + WakeWordDetectionError, +) +from homeassistant.components.esphome import DomainData +from homeassistant.components.esphome.voice_assistant import ( + VoiceAssistantAPIPipeline, + VoiceAssistantUDPPipeline, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import intent as intent_helper +import homeassistant.helpers.device_registry as dr + +from .conftest import _ONE_SECOND, MockESPHomeDevice + +_TEST_INPUT_TEXT = "This is an input test" +_TEST_OUTPUT_TEXT = "This is an output test" +_TEST_OUTPUT_URL = "output.mp3" +_TEST_MEDIA_ID = "12345" + + +@pytest.fixture +def voice_assistant_udp_pipeline( + hass: HomeAssistant, +) -> VoiceAssistantUDPPipeline: + """Return the UDP pipeline factory.""" + + def _voice_assistant_udp_server(entry): + entry_data = DomainData.get(hass).get_entry_data(entry) + + server: VoiceAssistantUDPPipeline = None + + def handle_finished(): + nonlocal server + assert server is not None + server.close() + + server = VoiceAssistantUDPPipeline(hass, entry_data, Mock(), handle_finished) + return server # noqa: RET504 + + return _voice_assistant_udp_server + + +@pytest.fixture +def voice_assistant_api_pipeline( + hass: HomeAssistant, + mock_client, + mock_voice_assistant_api_entry, +) -> VoiceAssistantAPIPipeline: + """Return the API Pipeline factory.""" + entry_data = DomainData.get(hass).get_entry_data(mock_voice_assistant_api_entry) + return VoiceAssistantAPIPipeline(hass, entry_data, Mock(), Mock(), mock_client) + + +@pytest.fixture +def voice_assistant_udp_pipeline_v1( + voice_assistant_udp_pipeline, + mock_voice_assistant_v1_entry, +) -> VoiceAssistantUDPPipeline: + """Return the UDP pipeline.""" + return voice_assistant_udp_pipeline(entry=mock_voice_assistant_v1_entry) + + +@pytest.fixture +def voice_assistant_udp_pipeline_v2( + voice_assistant_udp_pipeline, + mock_voice_assistant_v2_entry, +) -> VoiceAssistantUDPPipeline: + """Return the UDP pipeline.""" + return voice_assistant_udp_pipeline(entry=mock_voice_assistant_v2_entry) + + +@pytest.fixture +def mock_wav() -> bytes: + """Return one second of empty WAV audio.""" + with io.BytesIO() as wav_io: + with wave.open(wav_io, "wb") as wav_file: + wav_file.setframerate(16000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(bytes(_ONE_SECOND)) + + return wav_io.getvalue() + + +async def test_pipeline_events( + hass: HomeAssistant, + voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, +) -> None: + """Test that the pipeline function is called.""" + + async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): + assert device_id == "mock-device-id" + + event_callback = kwargs["event_callback"] + + event_callback( + PipelineEvent( + type=PipelineEventType.WAKE_WORD_END, + data={"wake_word_output": {}}, + ) + ) + + # Fake events + event_callback( + PipelineEvent( + type=PipelineEventType.STT_START, + data={}, + ) + ) + + event_callback( + PipelineEvent( + type=PipelineEventType.STT_END, + data={"stt_output": {"text": _TEST_INPUT_TEXT}}, + ) + ) + + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_START, + data={"tts_input": _TEST_OUTPUT_TEXT}, + ) + ) + + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={"tts_output": {"url": _TEST_OUTPUT_URL}}, + ) + ) + + def handle_event( + event_type: VoiceAssistantEventType, data: dict[str, str] | None + ) -> None: + if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: + assert data is not None + assert data["text"] == _TEST_INPUT_TEXT + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: + assert data is not None + assert data["text"] == _TEST_OUTPUT_TEXT + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: + assert data is not None + assert data["url"] == _TEST_OUTPUT_URL + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: + assert data is None + + voice_assistant_udp_pipeline_v1.handle_event = handle_event + + with patch( + "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ): + voice_assistant_udp_pipeline_v1.transport = Mock() + + await voice_assistant_udp_pipeline_v1.run_pipeline( + device_id="mock-device-id", conversation_id=None + ) + + +@pytest.mark.usefixtures("socket_enabled") +async def test_udp_server( + unused_udp_port_factory: Callable[[], int], + voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, +) -> None: + """Test the UDP server runs and queues incoming data.""" + port_to_use = unused_udp_port_factory() + + with patch( + "homeassistant.components.esphome.voice_assistant.UDP_PORT", new=port_to_use + ): + port = await voice_assistant_udp_pipeline_v1.start_server() + assert port == port_to_use + + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + + assert voice_assistant_udp_pipeline_v1.queue.qsize() == 0 + sock.sendto(b"test", ("127.0.0.1", port)) + + # Give the socket some time to send/receive the data + async with asyncio.timeout(1): + while voice_assistant_udp_pipeline_v1.queue.qsize() == 0: + await asyncio.sleep(0.1) + + assert voice_assistant_udp_pipeline_v1.queue.qsize() == 1 + + voice_assistant_udp_pipeline_v1.stop() + voice_assistant_udp_pipeline_v1.close() + + assert voice_assistant_udp_pipeline_v1.transport.is_closing() + + +async def test_udp_server_queue( + hass: HomeAssistant, + voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, +) -> None: + """Test the UDP server queues incoming data.""" + + voice_assistant_udp_pipeline_v1.started = True + + assert voice_assistant_udp_pipeline_v1.queue.qsize() == 0 + + voice_assistant_udp_pipeline_v1.datagram_received(bytes(1024), ("localhost", 0)) + assert voice_assistant_udp_pipeline_v1.queue.qsize() == 1 + + voice_assistant_udp_pipeline_v1.datagram_received(bytes(1024), ("localhost", 0)) + assert voice_assistant_udp_pipeline_v1.queue.qsize() == 2 + + async for data in voice_assistant_udp_pipeline_v1._iterate_packets(): + assert data == bytes(1024) + break + assert voice_assistant_udp_pipeline_v1.queue.qsize() == 1 # One message removed + + voice_assistant_udp_pipeline_v1.stop() + assert ( + voice_assistant_udp_pipeline_v1.queue.qsize() == 2 + ) # An empty message added by stop + + voice_assistant_udp_pipeline_v1.datagram_received(bytes(1024), ("localhost", 0)) + assert ( + voice_assistant_udp_pipeline_v1.queue.qsize() == 2 + ) # No new messages added after stop + + voice_assistant_udp_pipeline_v1.close() + + # Stopping the UDP server should cause _iterate_packets to break out + # immediately without yielding any data. + has_data = False + async for _data in voice_assistant_udp_pipeline_v1._iterate_packets(): + has_data = True + + assert not has_data, "Server was stopped" + + +async def test_api_pipeline_queue( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test the API pipeline queues incoming data.""" + + voice_assistant_api_pipeline.started = True + + assert voice_assistant_api_pipeline.queue.qsize() == 0 + + voice_assistant_api_pipeline.receive_audio_bytes(bytes(1024)) + assert voice_assistant_api_pipeline.queue.qsize() == 1 + + voice_assistant_api_pipeline.receive_audio_bytes(bytes(1024)) + assert voice_assistant_api_pipeline.queue.qsize() == 2 + + async for data in voice_assistant_api_pipeline._iterate_packets(): + assert data == bytes(1024) + break + assert voice_assistant_api_pipeline.queue.qsize() == 1 # One message removed + + voice_assistant_api_pipeline.stop() + assert ( + voice_assistant_api_pipeline.queue.qsize() == 2 + ) # An empty message added by stop + + voice_assistant_api_pipeline.receive_audio_bytes(bytes(1024)) + assert ( + voice_assistant_api_pipeline.queue.qsize() == 2 + ) # No new messages added after stop + + # Stopping the API Pipeline should cause _iterate_packets to break out + # immediately without yielding any data. + has_data = False + async for _data in voice_assistant_api_pipeline._iterate_packets(): + has_data = True + + assert not has_data, "Pipeline was stopped" + + +async def test_error_calls_handle_finished( + hass: HomeAssistant, + voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, +) -> None: + """Test that the handle_finished callback is called when an error occurs.""" + voice_assistant_udp_pipeline_v1.handle_finished = Mock() + + voice_assistant_udp_pipeline_v1.error_received(Exception()) + + voice_assistant_udp_pipeline_v1.handle_finished.assert_called() + + +@pytest.mark.usefixtures("socket_enabled") +async def test_udp_server_multiple( + unused_udp_port_factory: Callable[[], int], + voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, +) -> None: + """Test that the UDP server raises an error if started twice.""" + with patch( + "homeassistant.components.esphome.voice_assistant.UDP_PORT", + new=unused_udp_port_factory(), + ): + await voice_assistant_udp_pipeline_v1.start_server() + + with ( + patch( + "homeassistant.components.esphome.voice_assistant.UDP_PORT", + new=unused_udp_port_factory(), + ), + pytest.raises(RuntimeError), + ): + await voice_assistant_udp_pipeline_v1.start_server() + + +@pytest.mark.usefixtures("socket_enabled") +async def test_udp_server_after_stopped( + unused_udp_port_factory: Callable[[], int], + voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, +) -> None: + """Test that the UDP server raises an error if started after stopped.""" + voice_assistant_udp_pipeline_v1.close() + with ( + patch( + "homeassistant.components.esphome.voice_assistant.UDP_PORT", + new=unused_udp_port_factory(), + ), + pytest.raises(RuntimeError), + ): + await voice_assistant_udp_pipeline_v1.start_server() + + +async def test_events_converted_correctly( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test the pipeline events produce the correct data to send to the device.""" + + with patch( + "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts", + ): + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.STT_START, + data={}, + ) + ) + + voice_assistant_api_pipeline.handle_event.assert_called_with( + VoiceAssistantEventType.VOICE_ASSISTANT_STT_START, None + ) + + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.STT_END, + data={"stt_output": {"text": "text"}}, + ) + ) + + voice_assistant_api_pipeline.handle_event.assert_called_with( + VoiceAssistantEventType.VOICE_ASSISTANT_STT_END, {"text": "text"} + ) + + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_START, + data={}, + ) + ) + + voice_assistant_api_pipeline.handle_event.assert_called_with( + VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_START, None + ) + + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_END, + data={ + "intent_output": { + "conversation_id": "conversation-id", + } + }, + ) + ) + + voice_assistant_api_pipeline.handle_event.assert_called_with( + VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END, + {"conversation_id": "conversation-id"}, + ) + + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_START, + data={"tts_input": "text"}, + ) + ) + + voice_assistant_api_pipeline.handle_event.assert_called_with( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START, {"text": "text"} + ) + + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={"tts_output": {"url": "url", "media_id": "media-id"}}, + ) + ) + + voice_assistant_api_pipeline.handle_event.assert_called_with( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END, {"url": "url"} + ) + + +async def test_unknown_event_type( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test the API pipeline does not call handle_event for unknown events.""" + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type="unknown-event", + data={}, + ) + ) + + assert not voice_assistant_api_pipeline.handle_event.called + + +async def test_error_event_type( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test the API pipeline calls event handler with error.""" + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.ERROR, + data={"code": "code", "message": "message"}, + ) + ) + + voice_assistant_api_pipeline.handle_event.assert_called_with( + VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, + {"code": "code", "message": "message"}, + ) + + +async def test_send_tts_not_called( + hass: HomeAssistant, + voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, +) -> None: + """Test the UDP server with a v1 device does not call _send_tts.""" + with patch( + "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" + ) as mock_send_tts: + voice_assistant_udp_pipeline_v1._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={ + "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} + }, + ) + ) + + mock_send_tts.assert_not_called() + + +async def test_send_tts_called_udp( + hass: HomeAssistant, + voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, +) -> None: + """Test the UDP server with a v2 device calls _send_tts.""" + with patch( + "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" + ) as mock_send_tts: + voice_assistant_udp_pipeline_v2._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={ + "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} + }, + ) + ) + + mock_send_tts.assert_called_with(_TEST_MEDIA_ID) + + +async def test_send_tts_called_api( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test the API pipeline calls _send_tts.""" + with patch( + "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" + ) as mock_send_tts: + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={ + "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} + }, + ) + ) + + mock_send_tts.assert_called_with(_TEST_MEDIA_ID) + + +async def test_send_tts_not_called_when_empty( + hass: HomeAssistant, + voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, + voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test the pipelines do not call _send_tts when the output is empty.""" + with patch( + "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" + ) as mock_send_tts: + voice_assistant_udp_pipeline_v1._event_callback( + PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) + ) + + mock_send_tts.assert_not_called() + + voice_assistant_udp_pipeline_v2._event_callback( + PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) + ) + + mock_send_tts.assert_not_called() + + voice_assistant_api_pipeline._event_callback( + PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) + ) + + mock_send_tts.assert_not_called() + + +async def test_send_tts_udp( + hass: HomeAssistant, + voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, + mock_wav: bytes, +) -> None: + """Test the UDP server calls sendto to transmit audio data to device.""" + with patch( + "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", + return_value=("wav", mock_wav), + ): + voice_assistant_udp_pipeline_v2.started = True + voice_assistant_udp_pipeline_v2.transport = Mock(spec=asyncio.DatagramTransport) + with patch.object( + voice_assistant_udp_pipeline_v2.transport, "is_closing", return_value=False + ): + voice_assistant_udp_pipeline_v2._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={ + "tts_output": { + "media_id": _TEST_MEDIA_ID, + "url": _TEST_OUTPUT_URL, + } + }, + ) + ) + + await voice_assistant_udp_pipeline_v2._tts_done.wait() + + voice_assistant_udp_pipeline_v2.transport.sendto.assert_called() + + +async def test_send_tts_api( + hass: HomeAssistant, + mock_client: APIClient, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, + mock_wav: bytes, +) -> None: + """Test the API pipeline calls cli.send_voice_assistant_audio to transmit audio data to device.""" + with patch( + "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", + return_value=("wav", mock_wav), + ): + voice_assistant_api_pipeline.started = True + + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={ + "tts_output": { + "media_id": _TEST_MEDIA_ID, + "url": _TEST_OUTPUT_URL, + } + }, + ) + ) + + await voice_assistant_api_pipeline._tts_done.wait() + + mock_client.send_voice_assistant_audio.assert_called() + + +async def test_send_tts_wrong_sample_rate( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test that only 16000Hz audio will be streamed.""" + with io.BytesIO() as wav_io: + with wave.open(wav_io, "wb") as wav_file: + wav_file.setframerate(22050) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(bytes(_ONE_SECOND)) + + wav_bytes = wav_io.getvalue() + with patch( + "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", + return_value=("wav", wav_bytes), + ): + voice_assistant_api_pipeline.started = True + voice_assistant_api_pipeline.transport = Mock(spec=asyncio.DatagramTransport) + + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={ + "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} + }, + ) + ) + + assert voice_assistant_api_pipeline._tts_task is not None + with pytest.raises(ValueError): + await voice_assistant_api_pipeline._tts_task + + +async def test_send_tts_wrong_format( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test that only WAV audio will be streamed.""" + with ( + patch( + "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", + return_value=("raw", bytes(1024)), + ), + ): + voice_assistant_api_pipeline.started = True + voice_assistant_api_pipeline.transport = Mock(spec=asyncio.DatagramTransport) + + voice_assistant_api_pipeline._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={ + "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} + }, + ) + ) + + assert voice_assistant_api_pipeline._tts_task is not None + with pytest.raises(ValueError): + await voice_assistant_api_pipeline._tts_task + + +async def test_send_tts_not_started( + hass: HomeAssistant, + voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, + mock_wav: bytes, +) -> None: + """Test the UDP server does not call sendto when not started.""" + with patch( + "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", + return_value=("wav", mock_wav), + ): + voice_assistant_udp_pipeline_v2.started = False + voice_assistant_udp_pipeline_v2.transport = Mock(spec=asyncio.DatagramTransport) + + voice_assistant_udp_pipeline_v2._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={ + "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} + }, + ) + ) + + await voice_assistant_udp_pipeline_v2._tts_done.wait() + + voice_assistant_udp_pipeline_v2.transport.sendto.assert_not_called() + + +async def test_send_tts_transport_none( + hass: HomeAssistant, + voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, + mock_wav: bytes, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the UDP server does not call sendto when transport is None.""" + with patch( + "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", + return_value=("wav", mock_wav), + ): + voice_assistant_udp_pipeline_v2.started = True + voice_assistant_udp_pipeline_v2.transport = None + + voice_assistant_udp_pipeline_v2._event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={ + "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} + }, + ) + ) + await voice_assistant_udp_pipeline_v2._tts_done.wait() + + assert "No transport to send audio to" in caplog.text + + +async def test_wake_word( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test that the pipeline is set to start with Wake word.""" + + async def async_pipeline_from_audio_stream(*args, start_stage, **kwargs): + assert start_stage == PipelineStage.WAKE_WORD + + with ( + patch( + "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch("asyncio.Event.wait"), # TTS wait event + ): + await voice_assistant_api_pipeline.run_pipeline( + device_id="mock-device-id", + conversation_id=None, + flags=2, + ) + + +async def test_wake_word_exception( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test that the pipeline is set to start with Wake word.""" + + async def async_pipeline_from_audio_stream(*args, **kwargs): + raise WakeWordDetectionError("pipeline-not-found", "Pipeline not found") + + with patch( + "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ): + + def handle_event( + event_type: VoiceAssistantEventType, data: dict[str, str] | None + ) -> None: + if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: + assert data is not None + assert data["code"] == "pipeline-not-found" + assert data["message"] == "Pipeline not found" + + voice_assistant_api_pipeline.handle_event = handle_event + + await voice_assistant_api_pipeline.run_pipeline( + device_id="mock-device-id", + conversation_id=None, + flags=2, + ) + + +async def test_wake_word_abort_exception( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test that the pipeline is set to start with Wake word.""" + + async def async_pipeline_from_audio_stream(*args, **kwargs): + raise WakeWordDetectionAborted + + with ( + patch( + "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch.object(voice_assistant_api_pipeline, "handle_event") as mock_handle_event, + ): + await voice_assistant_api_pipeline.run_pipeline( + device_id="mock-device-id", + conversation_id=None, + flags=2, + ) + + mock_handle_event.assert_not_called() + + +async def test_timer_events( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that injecting timer events results in the correct api client calls.""" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.TIMERS + }, + ) + await hass.async_block_till_done() + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + + total_seconds = (1 * 60 * 60) + (2 * 60) + 3 + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_START_TIMER, + { + "name": {"value": "test timer"}, + "hours": {"value": 1}, + "minutes": {"value": 2}, + "seconds": {"value": 3}, + }, + device_id=dev.id, + ) + + mock_client.send_voice_assistant_timer_event.assert_called_with( + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED, + ANY, + "test timer", + total_seconds, + total_seconds, + True, + ) + + # Increase timer beyond original time and check total_seconds has increased + mock_client.send_voice_assistant_timer_event.reset_mock() + + total_seconds += 5 * 60 + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_INCREASE_TIMER, + { + "name": {"value": "test timer"}, + "minutes": {"value": 5}, + }, + device_id=dev.id, + ) + + mock_client.send_voice_assistant_timer_event.assert_called_with( + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED, + ANY, + "test timer", + total_seconds, + ANY, + True, + ) + + +async def test_unknown_timer_event( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that unknown (new) timer event types do not result in api calls.""" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.TIMERS + }, + ) + await hass.async_block_till_done() + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + + with patch( + "homeassistant.components.esphome.voice_assistant._TIMER_EVENT_TYPES.from_hass", + side_effect=KeyError, + ): + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_START_TIMER, + { + "name": {"value": "test timer"}, + "hours": {"value": 1}, + "minutes": {"value": 2}, + "seconds": {"value": 3}, + }, + device_id=dev.id, + ) + + mock_client.send_voice_assistant_timer_event.assert_not_called() + + +async def test_invalid_pipeline_id( + hass: HomeAssistant, + voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test that the pipeline is set to start with Wake word.""" + + invalid_pipeline_id = "invalid-pipeline-id" + + async def async_pipeline_from_audio_stream(*args, **kwargs): + raise PipelineNotFound( + "pipeline_not_found", f"Pipeline {invalid_pipeline_id} not found" + ) + + with patch( + "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ): + + def handle_event( + event_type: VoiceAssistantEventType, data: dict[str, str] | None + ) -> None: + if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: + assert data is not None + assert data["code"] == "pipeline_not_found" + assert data["message"] == f"Pipeline {invalid_pipeline_id} not found" + + voice_assistant_api_pipeline.handle_event = handle_event + + await voice_assistant_api_pipeline.run_pipeline( + device_id="mock-device-id", + conversation_id=None, + flags=2, + ) diff --git a/tests/components/event/test_init.py b/tests/components/event/test_init.py index c6828c2c290..981a7744beb 100644 --- a/tests/components/event/test_init.py +++ b/tests/components/event/test_init.py @@ -1,10 +1,10 @@ """The tests for the event integration.""" -from collections.abc import Generator from typing import Any from freezegun import freeze_time import pytest +from typing_extensions import Generator from homeassistant.components.event import ( ATTR_EVENT_TYPE, diff --git a/tests/components/evil_genius_labs/conftest.py b/tests/components/evil_genius_labs/conftest.py index fc0725607e2..3941917e130 100644 --- a/tests/components/evil_genius_labs/conftest.py +++ b/tests/components/evil_genius_labs/conftest.py @@ -1,44 +1,36 @@ """Test helpers for Evil Genius Labs.""" -from collections.abc import AsyncGenerator -from typing import Any +import json from unittest.mock import patch import pytest -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from homeassistant.util.json import JsonObjectType -from tests.common import ( - MockConfigEntry, - load_json_array_fixture, - load_json_object_fixture, -) +from tests.common import MockConfigEntry, load_fixture @pytest.fixture(scope="package") -def all_fixture() -> dict[str, Any]: +def all_fixture(): """Fixture data.""" - data = load_json_array_fixture("data.json", "evil_genius_labs") + data = json.loads(load_fixture("data.json", "evil_genius_labs")) return {item["name"]: item for item in data} @pytest.fixture(scope="package") -def info_fixture() -> JsonObjectType: +def info_fixture(): """Fixture info.""" - return load_json_object_fixture("info.json", "evil_genius_labs") + return json.loads(load_fixture("info.json", "evil_genius_labs")) @pytest.fixture(scope="package") -def product_fixture() -> dict[str, str]: +def product_fixture(): """Fixture info.""" return {"productName": "Fibonacci256"} @pytest.fixture -def config_entry(hass: HomeAssistant) -> MockConfigEntry: +def config_entry(hass): """Evil genius labs config entry.""" entry = MockConfigEntry(domain="evil_genius_labs", data={"host": "192.168.1.113"}) entry.add_to_hass(hass) @@ -47,13 +39,8 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture async def setup_evil_genius_labs( - hass: HomeAssistant, - config_entry: MockConfigEntry, - all_fixture: dict[str, Any], - info_fixture: JsonObjectType, - product_fixture: dict[str, str], - platforms: list[Platform], -) -> AsyncGenerator[None]: + hass, config_entry, all_fixture, info_fixture, product_fixture, platforms +): """Test up Evil Genius Labs instance.""" with ( patch( diff --git a/tests/components/evohome/__init__.py b/tests/components/evohome/__init__.py deleted file mode 100644 index 588e0f61746..00000000000 --- a/tests/components/evohome/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The tests for the evohome integration.""" diff --git a/tests/components/evohome/conftest.py b/tests/components/evohome/conftest.py deleted file mode 100644 index 6daab3f32bb..00000000000 --- a/tests/components/evohome/conftest.py +++ /dev/null @@ -1,208 +0,0 @@ -"""Fixtures and helpers for the evohome tests.""" - -from __future__ import annotations - -from collections.abc import AsyncGenerator, Callable -from datetime import datetime, timedelta, timezone -from http import HTTPMethod -from typing import Any -from unittest.mock import MagicMock, patch - -from aiohttp import ClientSession -from evohomeasync2 import EvohomeClient -from evohomeasync2.broker import Broker -from evohomeasync2.controlsystem import ControlSystem -from evohomeasync2.zone import Zone -import pytest - -from homeassistant.components.evohome import CONF_PASSWORD, CONF_USERNAME, DOMAIN -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util, slugify -from homeassistant.util.json import JsonArrayType, JsonObjectType - -from .const import ACCESS_TOKEN, REFRESH_TOKEN, USERNAME - -from tests.common import load_json_array_fixture, load_json_object_fixture - - -def user_account_config_fixture(install: str) -> JsonObjectType: - """Load JSON for the config of a user's account.""" - try: - return load_json_object_fixture(f"{install}/user_account.json", DOMAIN) - except FileNotFoundError: - return load_json_object_fixture("default/user_account.json", DOMAIN) - - -def user_locations_config_fixture(install: str) -> JsonArrayType: - """Load JSON for the config of a user's installation (a list of locations).""" - return load_json_array_fixture(f"{install}/user_locations.json", DOMAIN) - - -def location_status_fixture(install: str, loc_id: str | None = None) -> JsonObjectType: - """Load JSON for the status of a specific location.""" - if loc_id is None: - _install = load_json_array_fixture(f"{install}/user_locations.json", DOMAIN) - loc_id = _install[0]["locationInfo"]["locationId"] # type: ignore[assignment, call-overload, index] - return load_json_object_fixture(f"{install}/status_{loc_id}.json", DOMAIN) - - -def dhw_schedule_fixture(install: str) -> JsonObjectType: - """Load JSON for the schedule of a domesticHotWater zone.""" - try: - return load_json_object_fixture(f"{install}/schedule_dhw.json", DOMAIN) - except FileNotFoundError: - return load_json_object_fixture("default/schedule_dhw.json", DOMAIN) - - -def zone_schedule_fixture(install: str) -> JsonObjectType: - """Load JSON for the schedule of a temperatureZone zone.""" - try: - return load_json_object_fixture(f"{install}/schedule_zone.json", DOMAIN) - except FileNotFoundError: - return load_json_object_fixture("default/schedule_zone.json", DOMAIN) - - -def mock_get_factory(install: str) -> Callable: - """Return a get method for a specified installation.""" - - async def mock_get( - self: Broker, url: str, **kwargs: Any - ) -> JsonArrayType | JsonObjectType: - """Return the JSON for a HTTP get of a given URL.""" - - # a proxy for the behaviour of the real web API - if self.refresh_token is None: - self.refresh_token = f"new_{REFRESH_TOKEN}" - - if ( - self.access_token_expires is None - or self.access_token_expires < datetime.now() - ): - self.access_token = f"new_{ACCESS_TOKEN}" - self.access_token_expires = datetime.now() + timedelta(minutes=30) - - # assume a valid GET, and return the JSON for that web API - if url == "userAccount": # userAccount - return user_account_config_fixture(install) - - if url.startswith("location"): - if "installationInfo" in url: # location/installationInfo?userId={id} - return user_locations_config_fixture(install) - if "location" in url: # location/{id}/status - return location_status_fixture(install) - - elif "schedule" in url: - if url.startswith("domesticHotWater"): # domesticHotWater/{id}/schedule - return dhw_schedule_fixture(install) - if url.startswith("temperatureZone"): # temperatureZone/{id}/schedule - return zone_schedule_fixture(install) - - pytest.fail(f"Unexpected request: {HTTPMethod.GET} {url}") - - return mock_get - - -@pytest.fixture -def config() -> dict[str, str]: - "Return a default/minimal configuration." - return { - CONF_USERNAME: USERNAME, - CONF_PASSWORD: "password", - } - - -async def setup_evohome( - hass: HomeAssistant, - config: dict[str, str], - install: str = "default", -) -> AsyncGenerator[MagicMock]: - """Set up the evohome integration and return its client. - - The class is mocked here to check the client was instantiated with the correct args. - """ - - # set the time zone as for the active evohome location - loc_idx: int = config.get("location_idx", 0) # type: ignore[assignment] - - try: - locn = user_locations_config_fixture(install)[loc_idx] - except IndexError: - if loc_idx == 0: - raise - locn = user_locations_config_fixture(install)[0] - - utc_offset: int = locn["locationInfo"]["timeZone"]["currentOffsetMinutes"] # type: ignore[assignment, call-overload, index] - dt_util.set_default_time_zone(timezone(timedelta(minutes=utc_offset))) - - with ( - patch("homeassistant.components.evohome.evo.EvohomeClient") as mock_client, - patch("homeassistant.components.evohome.ev1.EvohomeClient", return_value=None), - patch("evohomeasync2.broker.Broker.get", mock_get_factory(install)), - ): - evo: EvohomeClient | None = None - - def evohome_client(*args, **kwargs) -> EvohomeClient: - nonlocal evo - evo = EvohomeClient(*args, **kwargs) - return evo - - mock_client.side_effect = evohome_client - - assert await async_setup_component(hass, DOMAIN, {DOMAIN: config}) - await hass.async_block_till_done() - - mock_client.assert_called_once() - - assert mock_client.call_args.args[0] == config[CONF_USERNAME] - assert mock_client.call_args.args[1] == config[CONF_PASSWORD] - - assert isinstance(mock_client.call_args.kwargs["session"], ClientSession) - - assert evo and evo.account_info is not None - - mock_client.return_value = evo - yield mock_client - - -@pytest.fixture -async def evohome( - hass: HomeAssistant, - config: dict[str, str], - install: str, -) -> AsyncGenerator[MagicMock]: - """Return the mocked evohome client for this install fixture.""" - - async for mock_client in setup_evohome(hass, config, install=install): - yield mock_client - - -@pytest.fixture -async def ctl_id( - hass: HomeAssistant, - config: dict[str, str], - install: MagicMock, -) -> AsyncGenerator[str]: - """Return the entity_id of the evohome integration's controller.""" - - async for mock_client in setup_evohome(hass, config, install=install): - evo: EvohomeClient = mock_client.return_value - ctl: ControlSystem = evo._get_single_tcs() - - yield f"{Platform.CLIMATE}.{slugify(ctl.location.name)}" - - -@pytest.fixture -async def zone_id( - hass: HomeAssistant, - config: dict[str, str], - install: MagicMock, -) -> AsyncGenerator[str]: - """Return the entity_id of the evohome integration's first zone.""" - - async for mock_client in setup_evohome(hass, config, install=install): - evo: EvohomeClient = mock_client.return_value - zone: Zone = list(evo._get_single_tcs().zones.values())[0] - - yield f"{Platform.CLIMATE}.{slugify(zone.name)}" diff --git a/tests/components/evohome/const.py b/tests/components/evohome/const.py deleted file mode 100644 index c3dc92c3fbc..00000000000 --- a/tests/components/evohome/const.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Constants for the evohome tests.""" - -from __future__ import annotations - -from typing import Final - -ACCESS_TOKEN: Final = "at_1dc7z657UKzbhKA..." -REFRESH_TOKEN: Final = "rf_jg68ZCKYdxEI3fF..." -SESSION_ID: Final = "F7181186..." -USERNAME: Final = "test_user@gmail.com" - -# The h-numbers refer to issues in HA's core repo -TEST_INSTALLS: Final = ( - "minimal", # evohome: single zone, no DHW - "default", # evohome: multi-zone, with DHW - "h032585", # VisionProWifi: no preset modes for TCS, zoneId=systemId - "h099625", # RoundThermostat - "sys_004", # RoundModulation -) -# "botched", # as default: but with activeFaults, ghost zones & unknown types - -TEST_INSTALLS_WITH_DHW: Final = ("default",) diff --git a/tests/components/evohome/fixtures/botched/status_2738909.json b/tests/components/evohome/fixtures/botched/status_2738909.json deleted file mode 100644 index 6d555ba4e3e..00000000000 --- a/tests/components/evohome/fixtures/botched/status_2738909.json +++ /dev/null @@ -1,125 +0,0 @@ -{ - "locationId": "2738909", - "gateways": [ - { - "gatewayId": "2499896", - "temperatureControlSystems": [ - { - "systemId": "3432522", - "zones": [ - { - "zoneId": "3432521", - "name": "Dead Zone", - "temperatureStatus": { "isAvailable": false }, - "setpointStatus": { - "targetHeatTemperature": 17.0, - "setpointMode": "FollowSchedule" - }, - "activeFaults": [] - }, - { - "zoneId": "3432576", - "name": "Main Room", - "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, - "setpointStatus": { - "targetHeatTemperature": 17.0, - "setpointMode": "PermanentOverride" - }, - "activeFaults": [ - { - "faultType": "TempZoneActuatorCommunicationLost", - "since": "2022-03-02T15:56:01" - } - ] - }, - { - "zoneId": "3432577", - "name": "Front Room", - "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, - "setpointStatus": { - "targetHeatTemperature": 21.0, - "setpointMode": "TemporaryOverride", - "until": "2022-03-07T19:00:00Z" - }, - "activeFaults": [ - { - "faultType": "TempZoneActuatorLowBattery", - "since": "2022-03-02T04:50:20" - } - ] - }, - { - "zoneId": "3432578", - "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 17.0, - "setpointMode": "FollowSchedule" - }, - "name": "Kitchen" - }, - { - "zoneId": "3432579", - "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 16.0, - "setpointMode": "FollowSchedule" - }, - "name": "Bathroom Dn" - }, - { - "zoneId": "3432580", - "temperatureStatus": { "temperature": 21.0, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 16.0, - "setpointMode": "FollowSchedule" - }, - "name": "Main Bedroom" - }, - { - "zoneId": "3449703", - "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 17.0, - "setpointMode": "FollowSchedule" - }, - "name": "Kids Room" - }, - { - "zoneId": "3449740", - "temperatureStatus": { "temperature": 21.5, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 16.5, - "setpointMode": "FollowSchedule" - }, - "name": "" - }, - { - "zoneId": "3450733", - "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 14.0, - "setpointMode": "PermanentOverride" - }, - "name": "Spare Room" - } - ], - "dhw": { - "dhwId": "3933910", - "temperatureStatus": { "temperature": 23.0, "isAvailable": true }, - "stateStatus": { "state": "Off", "mode": "PermanentOverride" }, - "activeFaults": [] - }, - "activeFaults": [], - "systemModeStatus": { "mode": "AutoWithEco", "isPermanent": true } - } - ], - "activeFaults": [] - } - ] -} diff --git a/tests/components/evohome/fixtures/botched/user_locations.json b/tests/components/evohome/fixtures/botched/user_locations.json deleted file mode 100644 index f2f4091a2dc..00000000000 --- a/tests/components/evohome/fixtures/botched/user_locations.json +++ /dev/null @@ -1,346 +0,0 @@ -[ - { - "locationInfo": { - "locationId": "2738909", - "name": "My Home", - "streetAddress": "1 Main Street", - "city": "London", - "country": "UnitedKingdom", - "postcode": "E1 1AA", - "locationType": "Residential", - "useDaylightSaveSwitching": true, - "timeZone": { - "timeZoneId": "GMTStandardTime", - "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", - "offsetMinutes": 0, - "currentOffsetMinutes": 60, - "supportsDaylightSaving": true - }, - "locationOwner": { - "userId": "2263181", - "username": "user_2263181@gmail.com", - "firstname": "John", - "lastname": "Smith" - } - }, - "gateways": [ - { - "gatewayInfo": { - "gatewayId": "2499896", - "mac": "00D02DEE0000", - "crc": "1234", - "isWiFi": false - }, - "temperatureControlSystems": [ - { - "systemId": "3432522", - "modelType": "EvoTouch", - "zones": [ - { - "zoneId": "3432521", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Dead Zone", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3432576", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Main Room", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3432577", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Front Room", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3432578", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Kitchen", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3432579", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Bathroom Dn", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3432580", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Main Bedroom", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3449703", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Kids Room", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3449740", - "modelType": "Unknown", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "", - "zoneType": "Unknown" - }, - { - "zoneId": "3450733", - "modelType": "xxx", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Spare Room", - "zoneType": "xxx" - } - ], - "dhw": { - "dhwId": "3933910", - "dhwStateCapabilitiesResponse": { - "allowedStates": ["On", "Off"], - "allowedModes": [ - "FollowSchedule", - "PermanentOverride", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilitiesResponse": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00" - } - }, - "allowedSystemModes": [ - { - "systemMode": "HeatingOff", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "Auto", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "AutoWithReset", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "AutoWithEco", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "1.00:00:00", - "timingResolution": "01:00:00", - "timingMode": "Duration" - }, - { - "systemMode": "Away", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - }, - { - "systemMode": "DayOff", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - }, - { - "systemMode": "Custom", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - } - ] - } - ] - } - ] - } -] diff --git a/tests/components/evohome/fixtures/default/schedule_dhw.json b/tests/components/evohome/fixtures/default/schedule_dhw.json deleted file mode 100644 index da9a225fb82..00000000000 --- a/tests/components/evohome/fixtures/default/schedule_dhw.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "dailySchedules": [ - { - "dayOfWeek": "Monday", - "switchpoints": [ - { "dhwState": "On", "timeOfDay": "06:30:00" }, - { "dhwState": "Off", "timeOfDay": "08:30:00" }, - { "dhwState": "On", "timeOfDay": "12:00:00" }, - { "dhwState": "Off", "timeOfDay": "13:00:00" }, - { "dhwState": "On", "timeOfDay": "16:30:00" }, - { "dhwState": "Off", "timeOfDay": "22:30:00" } - ] - }, - { - "dayOfWeek": "Tuesday", - "switchpoints": [ - { "dhwState": "On", "timeOfDay": "06:30:00" }, - { "dhwState": "Off", "timeOfDay": "08:30:00" }, - { "dhwState": "On", "timeOfDay": "12:00:00" }, - { "dhwState": "Off", "timeOfDay": "13:00:00" }, - { "dhwState": "On", "timeOfDay": "16:30:00" }, - { "dhwState": "Off", "timeOfDay": "22:30:00" } - ] - }, - { - "dayOfWeek": "Wednesday", - "switchpoints": [ - { "dhwState": "On", "timeOfDay": "06:30:00" }, - { "dhwState": "Off", "timeOfDay": "08:30:00" }, - { "dhwState": "On", "timeOfDay": "12:00:00" }, - { "dhwState": "Off", "timeOfDay": "13:00:00" }, - { "dhwState": "On", "timeOfDay": "16:30:00" }, - { "dhwState": "Off", "timeOfDay": "22:30:00" } - ] - }, - { - "dayOfWeek": "Thursday", - "switchpoints": [ - { "dhwState": "On", "timeOfDay": "06:30:00" }, - { "dhwState": "Off", "timeOfDay": "08:30:00" }, - { "dhwState": "On", "timeOfDay": "12:00:00" }, - { "dhwState": "Off", "timeOfDay": "13:00:00" }, - { "dhwState": "On", "timeOfDay": "16:30:00" }, - { "dhwState": "Off", "timeOfDay": "22:30:00" } - ] - }, - { - "dayOfWeek": "Friday", - "switchpoints": [ - { "dhwState": "On", "timeOfDay": "06:30:00" }, - { "dhwState": "Off", "timeOfDay": "08:30:00" }, - { "dhwState": "On", "timeOfDay": "12:00:00" }, - { "dhwState": "Off", "timeOfDay": "13:00:00" }, - { "dhwState": "On", "timeOfDay": "16:30:00" }, - { "dhwState": "Off", "timeOfDay": "22:30:00" } - ] - }, - { - "dayOfWeek": "Saturday", - "switchpoints": [ - { "dhwState": "On", "timeOfDay": "06:30:00" }, - { "dhwState": "Off", "timeOfDay": "09:30:00" }, - { "dhwState": "On", "timeOfDay": "12:00:00" }, - { "dhwState": "Off", "timeOfDay": "13:00:00" }, - { "dhwState": "On", "timeOfDay": "16:30:00" }, - { "dhwState": "Off", "timeOfDay": "23:00:00" } - ] - }, - { - "dayOfWeek": "Sunday", - "switchpoints": [ - { "dhwState": "On", "timeOfDay": "06:30:00" }, - { "dhwState": "Off", "timeOfDay": "09:30:00" }, - { "dhwState": "On", "timeOfDay": "12:00:00" }, - { "dhwState": "Off", "timeOfDay": "13:00:00" }, - { "dhwState": "On", "timeOfDay": "16:30:00" }, - { "dhwState": "Off", "timeOfDay": "23:00:00" } - ] - } - ] -} diff --git a/tests/components/evohome/fixtures/default/schedule_zone.json b/tests/components/evohome/fixtures/default/schedule_zone.json deleted file mode 100644 index 5030d92ff3d..00000000000 --- a/tests/components/evohome/fixtures/default/schedule_zone.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "dailySchedules": [ - { - "dayOfWeek": "Monday", - "switchpoints": [ - { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, - { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, - { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, - { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } - ] - }, - { - "dayOfWeek": "Tuesday", - "switchpoints": [ - { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, - { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, - { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, - { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } - ] - }, - { - "dayOfWeek": "Wednesday", - "switchpoints": [ - { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, - { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, - { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, - { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } - ] - }, - { - "dayOfWeek": "Thursday", - "switchpoints": [ - { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, - { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, - { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, - { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } - ] - }, - { - "dayOfWeek": "Friday", - "switchpoints": [ - { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, - { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, - { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, - { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } - ] - }, - { - "dayOfWeek": "Saturday", - "switchpoints": [ - { "heatSetpoint": 18.5, "timeOfDay": "07:00:00" }, - { "heatSetpoint": 16.0, "timeOfDay": "08:30:00" }, - { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, - { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } - ] - }, - { - "dayOfWeek": "Sunday", - "switchpoints": [ - { "heatSetpoint": 18.5, "timeOfDay": "07:00:00" }, - { "heatSetpoint": 16.0, "timeOfDay": "08:30:00" }, - { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, - { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } - ] - } - ] -} diff --git a/tests/components/evohome/fixtures/default/status_2738909.json b/tests/components/evohome/fixtures/default/status_2738909.json deleted file mode 100644 index 48754595d0f..00000000000 --- a/tests/components/evohome/fixtures/default/status_2738909.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "locationId": "2738909", - "gateways": [ - { - "gatewayId": "2499896", - "temperatureControlSystems": [ - { - "systemId": "3432522", - "zones": [ - { - "zoneId": "3432521", - "name": "Dead Zone", - "temperatureStatus": { "isAvailable": false }, - "setpointStatus": { - "targetHeatTemperature": 17.0, - "setpointMode": "FollowSchedule" - }, - "activeFaults": [] - }, - { - "zoneId": "3432576", - "name": "Main Room", - "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, - "setpointStatus": { - "targetHeatTemperature": 17.0, - "setpointMode": "PermanentOverride" - }, - "activeFaults": [] - }, - { - "zoneId": "3432577", - "name": "Front Room", - "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, - "setpointStatus": { - "targetHeatTemperature": 21.0, - "setpointMode": "TemporaryOverride", - "until": "2022-03-07T19:00:00Z" - }, - "activeFaults": [] - }, - { - "zoneId": "3432578", - "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 17.0, - "setpointMode": "FollowSchedule" - }, - "name": "Kitchen" - }, - { - "zoneId": "3432579", - "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 16.0, - "setpointMode": "FollowSchedule" - }, - "name": "Bathroom Dn" - }, - { - "zoneId": "3432580", - "temperatureStatus": { "temperature": 21.0, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 16.0, - "setpointMode": "FollowSchedule" - }, - "name": "Main Bedroom" - }, - { - "zoneId": "3449703", - "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 17.0, - "setpointMode": "FollowSchedule" - }, - "name": "Kids Room" - }, - { - "zoneId": "3450733", - "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 14.0, - "setpointMode": "PermanentOverride" - }, - "name": "Spare Room" - } - ], - "dhw": { - "dhwId": "3933910", - "temperatureStatus": { "temperature": 23.0, "isAvailable": true }, - "stateStatus": { "state": "Off", "mode": "PermanentOverride" }, - "activeFaults": [] - }, - "activeFaults": [], - "systemModeStatus": { "mode": "AutoWithEco", "isPermanent": true } - } - ], - "activeFaults": [] - } - ] -} diff --git a/tests/components/evohome/fixtures/default/user_account.json b/tests/components/evohome/fixtures/default/user_account.json deleted file mode 100644 index 99a96a7961e..00000000000 --- a/tests/components/evohome/fixtures/default/user_account.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "userId": "2263181", - "username": "user_2263181@gmail.com", - "firstname": "John", - "lastname": "Smith", - "streetAddress": "1 Main Street", - "city": "London", - "postcode": "E1 1AA", - "country": "UnitedKingdom", - "language": "enGB" -} diff --git a/tests/components/evohome/fixtures/default/user_locations.json b/tests/components/evohome/fixtures/default/user_locations.json deleted file mode 100644 index 90cd4366b75..00000000000 --- a/tests/components/evohome/fixtures/default/user_locations.json +++ /dev/null @@ -1,320 +0,0 @@ -[ - { - "locationInfo": { - "locationId": "2738909", - "name": "My Home", - "streetAddress": "1 Main Street", - "city": "London", - "country": "UnitedKingdom", - "postcode": "E1 1AA", - "locationType": "Residential", - "useDaylightSaveSwitching": true, - "timeZone": { - "timeZoneId": "GMTStandardTime", - "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", - "offsetMinutes": 0, - "currentOffsetMinutes": 60, - "supportsDaylightSaving": true - }, - "locationOwner": { - "userId": "2263181", - "username": "user_2263181@gmail.com", - "firstname": "John", - "lastname": "Smith" - } - }, - "gateways": [ - { - "gatewayInfo": { - "gatewayId": "2499896", - "mac": "00D02DEE0000", - "crc": "1234", - "isWiFi": false - }, - "temperatureControlSystems": [ - { - "systemId": "3432522", - "modelType": "EvoTouch", - "zones": [ - { - "zoneId": "3432521", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Dead Zone", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3432576", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Main Room", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3432577", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Front Room", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3432578", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Kitchen", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3432579", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Bathroom Dn", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3432580", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Main Bedroom", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3449703", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Kids Room", - "zoneType": "RadiatorZone" - }, - { - "zoneId": "3450733", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Spare Room", - "zoneType": "RadiatorZone" - } - ], - "dhw": { - "dhwId": "3933910", - "dhwStateCapabilitiesResponse": { - "allowedStates": ["On", "Off"], - "allowedModes": [ - "FollowSchedule", - "PermanentOverride", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilitiesResponse": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00" - } - }, - "allowedSystemModes": [ - { - "systemMode": "HeatingOff", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "Auto", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "AutoWithReset", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "AutoWithEco", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "1.00:00:00", - "timingResolution": "01:00:00", - "timingMode": "Duration" - }, - { - "systemMode": "Away", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - }, - { - "systemMode": "DayOff", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - }, - { - "systemMode": "Custom", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - } - ] - } - ] - } - ] - } -] diff --git a/tests/components/evohome/fixtures/h032585/status_111111.json b/tests/components/evohome/fixtures/h032585/status_111111.json deleted file mode 100644 index 0ea535c2461..00000000000 --- a/tests/components/evohome/fixtures/h032585/status_111111.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "locationId": "111111", - "gateways": [ - { - "gatewayId": "222222", - "temperatureControlSystems": [ - { - "systemId": "416856", - "zones": [ - { - "zoneId": "416856", - "temperatureStatus": { - "temperature": 21.5, - "isAvailable": true - }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 21.5, - "setpointMode": "FollowSchedule" - }, - "name": "THERMOSTAT" - } - ], - "activeFaults": [], - "systemModeStatus": { "mode": "Heat", "isPermanent": true } - } - ], - "activeFaults": [] - } - ] -} diff --git a/tests/components/evohome/fixtures/h032585/temperatures.json b/tests/components/evohome/fixtures/h032585/temperatures.json deleted file mode 100644 index a2015c94f46..00000000000 --- a/tests/components/evohome/fixtures/h032585/temperatures.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "416856": 21.5 -} diff --git a/tests/components/evohome/fixtures/h032585/user_locations.json b/tests/components/evohome/fixtures/h032585/user_locations.json deleted file mode 100644 index b4ea2e5c420..00000000000 --- a/tests/components/evohome/fixtures/h032585/user_locations.json +++ /dev/null @@ -1,79 +0,0 @@ -[ - { - "locationInfo": { - "locationId": "111111", - "name": "My Home", - "timeZone": { - "timeZoneId": "GMTStandardTime", - "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", - "offsetMinutes": 0, - "currentOffsetMinutes": 60, - "supportsDaylightSaving": true - } - }, - "gateways": [ - { - "gatewayInfo": { - "gatewayId": "222222", - "mac": "00D02DEE0000", - "crc": "1234", - "isWiFi": false - }, - "temperatureControlSystems": [ - { - "systemId": "416856", - "modelType": "VisionProWifiRetail", - "zones": [ - { - "zoneId": "416856", - "modelType": "VisionProWifiRetail", - "setpointCapabilities": { - "vacationHoldCapabilities": { - "isChangeable": true, - "isCancelable": true, - "minDuration": "1.00:00:00", - "maxDuration": "365.23:45:00", - "timingResolution": "00:15:00" - }, - "maxHeatSetpoint": 32.0, - "minHeatSetpoint": 4.5, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride", - "VacationHold" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:15:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 4, - "minSwitchpointsPerDay": 0, - "timingResolution": "00:15:00", - "setpointValueResolution": 0.5 - }, - "name": "THERMOSTAT", - "zoneType": "Thermostat" - } - ], - "allowedSystemModes": [ - { - "systemMode": "Off", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "Heat", - "canBePermanent": true, - "canBeTemporary": false - } - ] - } - ] - } - ] - } -] diff --git a/tests/components/evohome/fixtures/h099625/status_111111.json b/tests/components/evohome/fixtures/h099625/status_111111.json deleted file mode 100644 index 149d8aba783..00000000000 --- a/tests/components/evohome/fixtures/h099625/status_111111.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "locationId": "111111", - "gateways": [ - { - "gatewayId": "222222", - "temperatureControlSystems": [ - { - "systemId": "8557535", - "zones": [ - { - "zoneId": "8557539", - "temperatureStatus": { - "temperature": 21.5, - "isAvailable": true - }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 21.5, - "setpointMode": "FollowSchedule" - }, - "name": "THERMOSTAT" - }, - { - "zoneId": "8557541", - "temperatureStatus": { - "temperature": 21.5, - "isAvailable": true - }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 21.5, - "setpointMode": "FollowSchedule" - }, - "name": "THERMOSTAT" - } - ], - "activeFaults": [], - "systemModeStatus": { "mode": "Auto", "isPermanent": true } - } - ], - "activeFaults": [] - } - ] -} diff --git a/tests/components/evohome/fixtures/h099625/user_locations.json b/tests/components/evohome/fixtures/h099625/user_locations.json deleted file mode 100644 index cc32caccc73..00000000000 --- a/tests/components/evohome/fixtures/h099625/user_locations.json +++ /dev/null @@ -1,113 +0,0 @@ -[ - { - "locationInfo": { - "locationId": "111111", - "name": "My Home", - "timeZone": { - "timeZoneId": "FLEStandardTime", - "displayName": "(UTC+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius", - "offsetMinutes": 120, - "currentOffsetMinutes": 180, - "supportsDaylightSaving": true - } - }, - "gateways": [ - { - "gatewayInfo": { - "gatewayId": "222222", - "mac": "00D02DEE0000", - "crc": "1234", - "isWiFi": false - }, - "temperatureControlSystems": [ - { - "systemId": "8557535", - "modelType": "EvoTouch", - "zones": [ - { - "zoneId": "8557539", - "modelType": "RoundWireless", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 0, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Thermostat", - "zoneType": "Thermostat" - }, - { - "zoneId": "8557541", - "modelType": "RoundWireless", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 0, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Thermostat 2", - "zoneType": "Thermostat" - } - ], - "allowedSystemModes": [ - { - "systemMode": "Auto", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "AutoWithEco", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "1.00:00:00", - "timingResolution": "01:00:00", - "timingMode": "Duration" - }, - { - "systemMode": "Away", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - }, - { - "systemMode": "HeatingOff", - "canBePermanent": true, - "canBeTemporary": false - } - ] - } - ] - } - ] - } -] diff --git a/tests/components/evohome/fixtures/minimal/status_2738909.json b/tests/components/evohome/fixtures/minimal/status_2738909.json deleted file mode 100644 index 4b344314a67..00000000000 --- a/tests/components/evohome/fixtures/minimal/status_2738909.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "locationId": "2738909", - "gateways": [ - { - "gatewayId": "2499896", - "temperatureControlSystems": [ - { - "systemId": "3432522", - "zones": [ - { - "zoneId": "3432576", - "name": "Main Room", - "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, - "setpointStatus": { - "targetHeatTemperature": 17.0, - "setpointMode": "FollowSchedule" - }, - "activeFaults": [] - } - ], - "activeFaults": [], - "systemModeStatus": { "mode": "AutoWithEco", "isPermanent": true } - } - ], - "activeFaults": [] - } - ] -} diff --git a/tests/components/evohome/fixtures/minimal/user_locations.json b/tests/components/evohome/fixtures/minimal/user_locations.json deleted file mode 100644 index 932686d8728..00000000000 --- a/tests/components/evohome/fixtures/minimal/user_locations.json +++ /dev/null @@ -1,120 +0,0 @@ -[ - { - "locationInfo": { - "locationId": "2738909", - "name": "My Home", - "streetAddress": "1 Main Street", - "city": "London", - "country": "UnitedKingdom", - "postcode": "E1 1AA", - "locationType": "Residential", - "useDaylightSaveSwitching": true, - "timeZone": { - "timeZoneId": "GMTStandardTime", - "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", - "offsetMinutes": 0, - "currentOffsetMinutes": 60, - "supportsDaylightSaving": true - }, - "locationOwner": { - "userId": "2263181", - "username": "user_2263181@gmail.com", - "firstname": "John", - "lastname": "Smith" - } - }, - "gateways": [ - { - "gatewayInfo": { - "gatewayId": "2499896", - "mac": "00D02DEE0000", - "crc": "1234", - "isWiFi": false - }, - "temperatureControlSystems": [ - { - "systemId": "3432522", - "modelType": "EvoTouch", - "zones": [ - { - "zoneId": "3432576", - "modelType": "HeatingZone", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 1, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Main Room", - "zoneType": "RadiatorZone" - } - ], - "allowedSystemModes": [ - { - "systemMode": "HeatingOff", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "Auto", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "AutoWithReset", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "AutoWithEco", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "1.00:00:00", - "timingResolution": "01:00:00", - "timingMode": "Duration" - }, - { - "systemMode": "Away", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - }, - { - "systemMode": "DayOff", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - }, - { - "systemMode": "Custom", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - } - ] - } - ] - } - ] - } -] diff --git a/tests/components/evohome/fixtures/sys_004/status_3164610.json b/tests/components/evohome/fixtures/sys_004/status_3164610.json deleted file mode 100644 index a9ef3f6ee28..00000000000 --- a/tests/components/evohome/fixtures/sys_004/status_3164610.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "locationId": "3164610", - "gateways": [ - { - "gatewayId": "2938388", - "temperatureControlSystems": [ - { - "systemId": "4187769", - "zones": [ - { - "zoneId": "4187768", - "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, - "activeFaults": [], - "setpointStatus": { - "targetHeatTemperature": 15.0, - "setpointMode": "PermanentOverride" - }, - "name": "Thermostat" - } - ], - "activeFaults": [], - "systemModeStatus": { "mode": "Auto", "isPermanent": true } - } - ], - "activeFaults": [ - { - "faultType": "GatewayCommunicationLost", - "since": "2023-05-04T18:47:36.7727046" - } - ] - } - ] -} diff --git a/tests/components/evohome/fixtures/sys_004/user_locations.json b/tests/components/evohome/fixtures/sys_004/user_locations.json deleted file mode 100644 index 9defab8b6ee..00000000000 --- a/tests/components/evohome/fixtures/sys_004/user_locations.json +++ /dev/null @@ -1,99 +0,0 @@ -[ - { - "locationInfo": { - "locationId": "3164610", - "name": "Living room", - "streetAddress": "1 Main Road", - "city": "Boomtown", - "country": "Netherlands", - "postcode": "1234XX", - "locationType": "Residential", - "useDaylightSaveSwitching": true, - "timeZone": { - "timeZoneId": "WEuropeStandardTime", - "displayName": "(UTC+01:00) Amsterdam, Berlijn, Bern, Rome, Stockholm, Wenen", - "offsetMinutes": 60, - "currentOffsetMinutes": 120, - "supportsDaylightSaving": true - }, - "locationOwner": { - "userId": "2624305", - "username": "user_2624305@gmail.com", - "firstname": "Chris", - "lastname": "Jones" - } - }, - "gateways": [ - { - "gatewayInfo": { - "gatewayId": "2938388", - "mac": "00D02D5A7000", - "crc": "1234", - "isWiFi": false - }, - "temperatureControlSystems": [ - { - "systemId": "4187769", - "modelType": "EvoTouch", - "zones": [ - { - "zoneId": "4187768", - "modelType": "RoundModulation", - "setpointCapabilities": { - "maxHeatSetpoint": 35.0, - "minHeatSetpoint": 5.0, - "valueResolution": 0.5, - "canControlHeat": true, - "canControlCool": false, - "allowedSetpointModes": [ - "PermanentOverride", - "FollowSchedule", - "TemporaryOverride" - ], - "maxDuration": "1.00:00:00", - "timingResolution": "00:10:00" - }, - "scheduleCapabilities": { - "maxSwitchpointsPerDay": 6, - "minSwitchpointsPerDay": 0, - "timingResolution": "00:10:00", - "setpointValueResolution": 0.5 - }, - "name": "Thermostat", - "zoneType": "Thermostat" - } - ], - "allowedSystemModes": [ - { - "systemMode": "Auto", - "canBePermanent": true, - "canBeTemporary": false - }, - { - "systemMode": "AutoWithEco", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "1.00:00:00", - "timingResolution": "01:00:00", - "timingMode": "Duration" - }, - { - "systemMode": "Away", - "canBePermanent": true, - "canBeTemporary": true, - "maxDuration": "99.00:00:00", - "timingResolution": "1.00:00:00", - "timingMode": "Period" - }, - { - "systemMode": "HeatingOff", - "canBePermanent": true, - "canBeTemporary": false - } - ] - } - ] - } - ] - } -] diff --git a/tests/components/evohome/snapshots/test_climate.ambr b/tests/components/evohome/snapshots/test_climate.ambr deleted file mode 100644 index ce7fcf2744e..00000000000 --- a/tests/components/evohome/snapshots/test_climate.ambr +++ /dev/null @@ -1,1459 +0,0 @@ -# serializer version: 1 -# name: test_ctl_set_hvac_mode[default] - list([ - tuple( - 'HeatingOff', - ), - tuple( - 'Auto', - ), - ]) -# --- -# name: test_ctl_set_hvac_mode[h032585] - list([ - tuple( - 'Off', - ), - tuple( - 'Heat', - ), - ]) -# --- -# name: test_ctl_set_hvac_mode[h099625] - list([ - tuple( - 'HeatingOff', - ), - tuple( - 'Auto', - ), - ]) -# --- -# name: test_ctl_set_hvac_mode[minimal] - list([ - tuple( - 'HeatingOff', - ), - tuple( - 'Auto', - ), - ]) -# --- -# name: test_ctl_set_hvac_mode[sys_004] - list([ - tuple( - 'HeatingOff', - ), - tuple( - 'Auto', - ), - ]) -# --- -# name: test_ctl_turn_off[default] - list([ - tuple( - 'HeatingOff', - ), - ]) -# --- -# name: test_ctl_turn_off[h032585] - list([ - tuple( - 'Off', - ), - ]) -# --- -# name: test_ctl_turn_off[h099625] - list([ - tuple( - 'HeatingOff', - ), - ]) -# --- -# name: test_ctl_turn_off[minimal] - list([ - tuple( - 'HeatingOff', - ), - ]) -# --- -# name: test_ctl_turn_off[sys_004] - list([ - tuple( - 'HeatingOff', - ), - ]) -# --- -# name: test_ctl_turn_on[default] - list([ - tuple( - 'Auto', - ), - ]) -# --- -# name: test_ctl_turn_on[h032585] - list([ - tuple( - 'Heat', - ), - ]) -# --- -# name: test_ctl_turn_on[h099625] - list([ - tuple( - 'Auto', - ), - ]) -# --- -# name: test_ctl_turn_on[minimal] - list([ - tuple( - 'Auto', - ), - ]) -# --- -# name: test_ctl_turn_on[sys_004] - list([ - tuple( - 'Auto', - ), - ]) -# --- -# name: test_setup_platform[botched][climate.bathroom_dn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20.0, - 'friendly_name': 'Bathroom Dn', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 16.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 20.0, - }), - 'zone_id': '3432579', - }), - 'supported_features': , - 'temperature': 16.0, - }), - 'context': , - 'entity_id': 'climate.bathroom_dn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[botched][climate.dead_zone-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Dead Zone', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': False, - }), - 'zone_id': '3432521', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.dead_zone', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[botched][climate.front_room-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'Front Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'temporary', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - dict({ - 'faultType': 'TempZoneActuatorLowBattery', - 'since': '2022-03-02T04:50:20', - }), - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'TemporaryOverride', - 'target_heat_temperature': 21.0, - 'until': '2022-03-07T20:00:00+01:00', - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.0, - }), - 'zone_id': '3432577', - }), - 'supported_features': , - 'temperature': 21.0, - }), - 'context': , - 'entity_id': 'climate.front_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[botched][climate.kids_room-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.5, - 'friendly_name': 'Kids Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.5, - }), - 'zone_id': '3449703', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.kids_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[botched][climate.kitchen-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20.0, - 'friendly_name': 'Kitchen', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 20.0, - }), - 'zone_id': '3432578', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.kitchen', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[botched][climate.main_bedroom-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.0, - 'friendly_name': 'Main Bedroom', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 16.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 21.0, - }), - 'zone_id': '3432580', - }), - 'supported_features': , - 'temperature': 16.0, - }), - 'context': , - 'entity_id': 'climate.main_bedroom', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[botched][climate.main_room-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'Main Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'permanent', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - dict({ - 'faultType': 'TempZoneActuatorCommunicationLost', - 'since': '2022-03-02T15:56:01', - }), - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'PermanentOverride', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.0, - }), - 'zone_id': '3432576', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.main_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[botched][climate.my_home-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.7, - 'friendly_name': 'My Home', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'preset_mode': 'eco', - 'preset_modes': list([ - 'Reset', - 'eco', - 'away', - 'home', - 'Custom', - ]), - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '3432522', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'AutoWithEco', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.my_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[default][climate.bathroom_dn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20.0, - 'friendly_name': 'Bathroom Dn', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 16.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 20.0, - }), - 'zone_id': '3432579', - }), - 'supported_features': , - 'temperature': 16.0, - }), - 'context': , - 'entity_id': 'climate.bathroom_dn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[default][climate.dead_zone-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Dead Zone', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': False, - }), - 'zone_id': '3432521', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.dead_zone', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[default][climate.front_room-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'Front Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'temporary', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'TemporaryOverride', - 'target_heat_temperature': 21.0, - 'until': '2022-03-07T20:00:00+01:00', - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.0, - }), - 'zone_id': '3432577', - }), - 'supported_features': , - 'temperature': 21.0, - }), - 'context': , - 'entity_id': 'climate.front_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[default][climate.kids_room-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.5, - 'friendly_name': 'Kids Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.5, - }), - 'zone_id': '3449703', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.kids_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[default][climate.kitchen-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20.0, - 'friendly_name': 'Kitchen', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 20.0, - }), - 'zone_id': '3432578', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.kitchen', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[default][climate.main_bedroom-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.0, - 'friendly_name': 'Main Bedroom', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 16.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 21.0, - }), - 'zone_id': '3432580', - }), - 'supported_features': , - 'temperature': 16.0, - }), - 'context': , - 'entity_id': 'climate.main_bedroom', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[default][climate.main_room-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'Main Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'permanent', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'PermanentOverride', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.0, - }), - 'zone_id': '3432576', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.main_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[default][climate.my_home-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.7, - 'friendly_name': 'My Home', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'preset_mode': 'eco', - 'preset_modes': list([ - 'Reset', - 'eco', - 'away', - 'home', - 'Custom', - ]), - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '3432522', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'AutoWithEco', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.my_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[default][climate.spare_room-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.5, - 'friendly_name': 'Spare Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'permanent', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'PermanentOverride', - 'target_heat_temperature': 14.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.5, - }), - 'zone_id': '3450733', - }), - 'supported_features': , - 'temperature': 14.0, - }), - 'context': , - 'entity_id': 'climate.spare_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[h032585][climate.my_home-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'My Home', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '416856', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'Heat', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.my_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[h032585][climate.thermostat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'THERMOSTAT', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 32.0, - 'min_temp': 4.5, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 21.5, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 21.5, - }), - 'zone_id': '416856', - }), - 'supported_features': , - 'temperature': 21.5, - }), - 'context': , - 'entity_id': 'climate.thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[h099625][climate.my_home-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'My Home', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'preset_mode': None, - 'preset_modes': list([ - 'eco', - 'away', - ]), - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '8557535', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'Auto', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.my_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[h099625][climate.thermostat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'THERMOSTAT', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 21.5, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+03:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+03:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 21.5, - }), - 'zone_id': '8557539', - }), - 'supported_features': , - 'temperature': 21.5, - }), - 'context': , - 'entity_id': 'climate.thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[h099625][climate.thermostat_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'THERMOSTAT', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 21.5, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+03:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+03:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 21.5, - }), - 'zone_id': '8557541', - }), - 'supported_features': , - 'temperature': 21.5, - }), - 'context': , - 'entity_id': 'climate.thermostat_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[minimal][climate.main_room-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'Main Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.0, - }), - 'zone_id': '3432576', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.main_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[minimal][climate.my_home-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'My Home', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'preset_mode': 'eco', - 'preset_modes': list([ - 'Reset', - 'eco', - 'away', - 'home', - 'Custom', - ]), - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '3432522', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'AutoWithEco', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.my_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[sys_004][climate.living_room-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.5, - 'friendly_name': 'Living room', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'preset_mode': None, - 'preset_modes': list([ - 'eco', - 'away', - ]), - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '4187769', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'Auto', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.living_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[sys_004][climate.thermostat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.5, - 'friendly_name': 'Thermostat', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'permanent', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'PermanentOverride', - 'target_heat_temperature': 15.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+02:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+02:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.5, - }), - 'zone_id': '4187768', - }), - 'supported_features': , - 'temperature': 15.0, - }), - 'context': , - 'entity_id': 'climate.thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_zone_set_hvac_mode[default] - list([ - tuple( - 5.0, - ), - ]) -# --- -# name: test_zone_set_hvac_mode[h032585] - list([ - tuple( - 4.5, - ), - ]) -# --- -# name: test_zone_set_hvac_mode[h099625] - list([ - tuple( - 5.0, - ), - ]) -# --- -# name: test_zone_set_hvac_mode[minimal] - list([ - tuple( - 5.0, - ), - ]) -# --- -# name: test_zone_set_hvac_mode[sys_004] - list([ - tuple( - 5.0, - ), - ]) -# --- -# name: test_zone_set_preset_mode[default] - list([ - tuple( - 17.0, - ), - tuple( - 17.0, - ), - dict({ - 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), - }), - ]) -# --- -# name: test_zone_set_preset_mode[h032585] - list([ - tuple( - 21.5, - ), - tuple( - 21.5, - ), - dict({ - 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), - }), - ]) -# --- -# name: test_zone_set_preset_mode[h099625] - list([ - tuple( - 21.5, - ), - tuple( - 21.5, - ), - dict({ - 'until': datetime.datetime(2024, 7, 10, 19, 10, tzinfo=datetime.timezone.utc), - }), - ]) -# --- -# name: test_zone_set_preset_mode[minimal] - list([ - tuple( - 17.0, - ), - tuple( - 17.0, - ), - dict({ - 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), - }), - ]) -# --- -# name: test_zone_set_preset_mode[sys_004] - list([ - tuple( - 15.0, - ), - tuple( - 15.0, - ), - dict({ - 'until': datetime.datetime(2024, 7, 10, 20, 10, tzinfo=datetime.timezone.utc), - }), - ]) -# --- -# name: test_zone_set_temperature[default] - list([ - dict({ - 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), - }), - ]) -# --- -# name: test_zone_set_temperature[h032585] - list([ - dict({ - 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), - }), - ]) -# --- -# name: test_zone_set_temperature[h099625] - list([ - dict({ - 'until': datetime.datetime(2024, 7, 10, 19, 10, tzinfo=datetime.timezone.utc), - }), - ]) -# --- -# name: test_zone_set_temperature[minimal] - list([ - dict({ - 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), - }), - ]) -# --- -# name: test_zone_set_temperature[sys_004] - list([ - dict({ - 'until': None, - }), - ]) -# --- -# name: test_zone_turn_off[default] - list([ - tuple( - 5.0, - ), - ]) -# --- -# name: test_zone_turn_off[h032585] - list([ - tuple( - 4.5, - ), - ]) -# --- -# name: test_zone_turn_off[h099625] - list([ - tuple( - 5.0, - ), - ]) -# --- -# name: test_zone_turn_off[minimal] - list([ - tuple( - 5.0, - ), - ]) -# --- -# name: test_zone_turn_off[sys_004] - list([ - tuple( - 5.0, - ), - ]) -# --- diff --git a/tests/components/evohome/snapshots/test_init.ambr b/tests/components/evohome/snapshots/test_init.ambr deleted file mode 100644 index d2e91e3c43d..00000000000 --- a/tests/components/evohome/snapshots/test_init.ambr +++ /dev/null @@ -1,19 +0,0 @@ -# serializer version: 1 -# name: test_setup[botched] - dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) -# --- -# name: test_setup[default] - dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) -# --- -# name: test_setup[h032585] - dict_keys(['refresh_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) -# --- -# name: test_setup[h099625] - dict_keys(['refresh_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) -# --- -# name: test_setup[minimal] - dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) -# --- -# name: test_setup[sys_004] - dict_keys(['refresh_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) -# --- diff --git a/tests/components/evohome/snapshots/test_water_heater.ambr b/tests/components/evohome/snapshots/test_water_heater.ambr deleted file mode 100644 index 4cdeb28f445..00000000000 --- a/tests/components/evohome/snapshots/test_water_heater.ambr +++ /dev/null @@ -1,105 +0,0 @@ -# serializer version: 1 -# name: test_set_operation_mode[default] - list([ - dict({ - 'until': datetime.datetime(2024, 7, 10, 12, 0, tzinfo=datetime.timezone.utc), - }), - dict({ - 'until': datetime.datetime(2024, 7, 10, 12, 0, tzinfo=datetime.timezone.utc), - }), - ]) -# --- -# name: test_setup_platform[botched][water_heater.domestic_hot_water-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'away_mode': 'on', - 'current_temperature': 23, - 'friendly_name': 'Domestic Hot Water', - 'icon': 'mdi:thermometer-lines', - 'max_temp': 60, - 'min_temp': 43, - 'operation_list': list([ - 'auto', - 'on', - 'off', - ]), - 'operation_mode': 'off', - 'status': dict({ - 'active_faults': list([ - ]), - 'dhw_id': '3933910', - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T13:00:00+01:00', - 'next_sp_state': 'Off', - 'this_sp_from': '2024-07-10T12:00:00+01:00', - 'this_sp_state': 'On', - }), - 'state_status': dict({ - 'mode': 'PermanentOverride', - 'state': 'Off', - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 23.0, - }), - }), - 'supported_features': , - 'target_temp_high': None, - 'target_temp_low': None, - 'temperature': None, - }), - 'context': , - 'entity_id': 'water_heater.domestic_hot_water', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_platform[default][water_heater.domestic_hot_water-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'away_mode': 'on', - 'current_temperature': 23, - 'friendly_name': 'Domestic Hot Water', - 'icon': 'mdi:thermometer-lines', - 'max_temp': 60, - 'min_temp': 43, - 'operation_list': list([ - 'auto', - 'on', - 'off', - ]), - 'operation_mode': 'off', - 'status': dict({ - 'active_faults': list([ - ]), - 'dhw_id': '3933910', - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T13:00:00+01:00', - 'next_sp_state': 'Off', - 'this_sp_from': '2024-07-10T12:00:00+01:00', - 'this_sp_state': 'On', - }), - 'state_status': dict({ - 'mode': 'PermanentOverride', - 'state': 'Off', - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 23.0, - }), - }), - 'supported_features': , - 'target_temp_high': None, - 'target_temp_low': None, - 'temperature': None, - }), - 'context': , - 'entity_id': 'water_heater.domestic_hot_water', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/evohome/test_climate.py b/tests/components/evohome/test_climate.py deleted file mode 100644 index 325dd914bc0..00000000000 --- a/tests/components/evohome/test_climate.py +++ /dev/null @@ -1,384 +0,0 @@ -"""The tests for the climate platform of evohome. - -All evohome systems have controllers and at least one zone. -""" - -from __future__ import annotations - -from unittest.mock import patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.climate import ( - ATTR_HVAC_MODE, - ATTR_PRESET_MODE, - SERVICE_SET_HVAC_MODE, - SERVICE_SET_PRESET_MODE, - SERVICE_SET_TEMPERATURE, - HVACMode, -) -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_TEMPERATURE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from .conftest import setup_evohome -from .const import TEST_INSTALLS - - -@pytest.mark.parametrize("install", [*TEST_INSTALLS, "botched"]) -async def test_setup_platform( - hass: HomeAssistant, - config: dict[str, str], - install: str, - snapshot: SnapshotAssertion, - freezer: FrozenDateTimeFactory, -) -> None: - """Test entities and their states after setup of evohome.""" - - # Cannot use the evohome fixture, as need to set dtm first - # - some extended state attrs are relative the current time - freezer.move_to("2024-07-10T12:00:00Z") - - async for _ in setup_evohome(hass, config, install=install): - pass - - for x in hass.states.async_all(Platform.CLIMATE): - assert x == snapshot(name=f"{x.entity_id}-state") - - -@pytest.mark.parametrize("install", TEST_INSTALLS) -async def test_ctl_set_hvac_mode( - hass: HomeAssistant, - ctl_id: str, - snapshot: SnapshotAssertion, -) -> None: - """Test SERVICE_SET_HVAC_MODE of an evohome controller.""" - - results = [] - - # SERVICE_SET_HVAC_MODE: HVACMode.OFF - with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_SET_HVAC_MODE, - { - ATTR_ENTITY_ID: ctl_id, - ATTR_HVAC_MODE: HVACMode.OFF, - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args != () # 'HeatingOff' or 'Off' - assert mock_fcn.await_args.kwargs == {"until": None} - - results.append(mock_fcn.await_args.args) - - # SERVICE_SET_HVAC_MODE: HVACMode.HEAT - with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_SET_HVAC_MODE, - { - ATTR_ENTITY_ID: ctl_id, - ATTR_HVAC_MODE: HVACMode.HEAT, - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args != () # 'Auto' or 'Heat' - assert mock_fcn.await_args.kwargs == {"until": None} - - results.append(mock_fcn.await_args.args) - - assert results == snapshot - - -@pytest.mark.parametrize("install", TEST_INSTALLS) -async def test_ctl_set_temperature( - hass: HomeAssistant, - ctl_id: str, -) -> None: - """Test SERVICE_SET_TEMPERATURE of an evohome controller.""" - - # Entity climate.xxx does not support this service - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: ctl_id, - ATTR_TEMPERATURE: 19.1, - }, - blocking=True, - ) - - -@pytest.mark.parametrize("install", TEST_INSTALLS) -async def test_ctl_turn_off( - hass: HomeAssistant, - ctl_id: str, - snapshot: SnapshotAssertion, -) -> None: - """Test SERVICE_TURN_OFF of an evohome controller.""" - - results = [] - - # SERVICE_TURN_OFF - with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: ctl_id, - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args != () # 'HeatingOff' or 'Off' - assert mock_fcn.await_args.kwargs == {"until": None} - - results.append(mock_fcn.await_args.args) - - assert results == snapshot - - -@pytest.mark.parametrize("install", TEST_INSTALLS) -async def test_ctl_turn_on( - hass: HomeAssistant, - ctl_id: str, - snapshot: SnapshotAssertion, -) -> None: - """Test SERVICE_TURN_ON of an evohome controller.""" - - results = [] - - # SERVICE_TURN_ON - with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: ctl_id, - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args != () # 'Auto' or 'Heat' - assert mock_fcn.await_args.kwargs == {"until": None} - - results.append(mock_fcn.await_args.args) - - assert results == snapshot - - -@pytest.mark.parametrize("install", TEST_INSTALLS) -async def test_zone_set_hvac_mode( - hass: HomeAssistant, - zone_id: str, - snapshot: SnapshotAssertion, -) -> None: - """Test SERVICE_SET_HVAC_MODE of an evohome heating zone.""" - - results = [] - - # SERVICE_SET_HVAC_MODE: HVACMode.HEAT - with patch("evohomeasync2.zone.Zone.reset_mode") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_SET_HVAC_MODE, - { - ATTR_ENTITY_ID: zone_id, - ATTR_HVAC_MODE: HVACMode.HEAT, - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == () - assert mock_fcn.await_args.kwargs == {} - - # SERVICE_SET_HVAC_MODE: HVACMode.OFF - with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_SET_HVAC_MODE, - { - ATTR_ENTITY_ID: zone_id, - ATTR_HVAC_MODE: HVACMode.OFF, - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args != () # minimum target temp - assert mock_fcn.await_args.kwargs == {"until": None} - - results.append(mock_fcn.await_args.args) - - assert results == snapshot - - -@pytest.mark.parametrize("install", TEST_INSTALLS) -async def test_zone_set_preset_mode( - hass: HomeAssistant, - zone_id: str, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, -) -> None: - """Test SERVICE_SET_PRESET_MODE of an evohome heating zone.""" - - freezer.move_to("2024-07-10T12:00:00Z") - results = [] - - # SERVICE_SET_PRESET_MODE: none - with patch("evohomeasync2.zone.Zone.reset_mode") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_SET_PRESET_MODE, - { - ATTR_ENTITY_ID: zone_id, - ATTR_PRESET_MODE: "none", - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == () - assert mock_fcn.await_args.kwargs == {} - - # SERVICE_SET_PRESET_MODE: permanent - with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_SET_PRESET_MODE, - { - ATTR_ENTITY_ID: zone_id, - ATTR_PRESET_MODE: "permanent", - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args != () # current target temp - assert mock_fcn.await_args.kwargs == {"until": None} - - results.append(mock_fcn.await_args.args) - - # SERVICE_SET_PRESET_MODE: temporary - with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_SET_PRESET_MODE, - { - ATTR_ENTITY_ID: zone_id, - ATTR_PRESET_MODE: "temporary", - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args != () # current target temp - assert mock_fcn.await_args.kwargs != {} # next setpoint dtm - - results.append(mock_fcn.await_args.args) - results.append(mock_fcn.await_args.kwargs) - - assert results == snapshot - - -@pytest.mark.parametrize("install", TEST_INSTALLS) -async def test_zone_set_temperature( - hass: HomeAssistant, - zone_id: str, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, -) -> None: - """Test SERVICE_SET_TEMPERATURE of an evohome heating zone.""" - - freezer.move_to("2024-07-10T12:00:00Z") - results = [] - - # SERVICE_SET_TEMPERATURE: temperature - with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: zone_id, - ATTR_TEMPERATURE: 19.1, - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == (19.1,) - assert mock_fcn.await_args.kwargs != {} # next setpoint dtm - - results.append(mock_fcn.await_args.kwargs) - - assert results == snapshot - - -@pytest.mark.parametrize("install", TEST_INSTALLS) -async def test_zone_turn_off( - hass: HomeAssistant, - zone_id: str, - snapshot: SnapshotAssertion, -) -> None: - """Test SERVICE_TURN_OFF of an evohome heating zone.""" - - results = [] - - # SERVICE_TURN_OFF - with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: zone_id, - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args != () # minimum target temp - assert mock_fcn.await_args.kwargs == {"until": None} - - results.append(mock_fcn.await_args.args) - - assert results == snapshot - - -@pytest.mark.parametrize("install", TEST_INSTALLS) -async def test_zone_turn_on( - hass: HomeAssistant, - zone_id: str, -) -> None: - """Test SERVICE_TURN_ON of an evohome heating zone.""" - - # SERVICE_TURN_ON - with patch("evohomeasync2.zone.Zone.reset_mode") as mock_fcn: - await hass.services.async_call( - Platform.CLIMATE, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: zone_id, - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == () - assert mock_fcn.await_args.kwargs == {} diff --git a/tests/components/evohome/test_init.py b/tests/components/evohome/test_init.py deleted file mode 100644 index 49a854016ea..00000000000 --- a/tests/components/evohome/test_init.py +++ /dev/null @@ -1,182 +0,0 @@ -"""The tests for evohome.""" - -from __future__ import annotations - -from http import HTTPStatus -import logging -from unittest.mock import patch - -from evohomeasync2 import EvohomeClient, exceptions as exc -from evohomeasync2.broker import _ERR_MSG_LOOKUP_AUTH, _ERR_MSG_LOOKUP_BASE -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.evohome import DOMAIN, EvoService -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from .const import TEST_INSTALLS - -SETUP_FAILED_ANTICIPATED = ( - "homeassistant.setup", - logging.ERROR, - "Setup failed for 'evohome': Integration failed to initialize.", -) -SETUP_FAILED_UNEXPECTED = ( - "homeassistant.setup", - logging.ERROR, - "Error during setup of component evohome", -) -AUTHENTICATION_FAILED = ( - "homeassistant.components.evohome.helpers", - logging.ERROR, - "Failed to authenticate with the vendor's server. Check your username" - " and password. NB: Some special password characters that work" - " correctly via the website will not work via the web API. Message" - " is: ", -) -REQUEST_FAILED_NONE = ( - "homeassistant.components.evohome.helpers", - logging.WARNING, - "Unable to connect with the vendor's server. " - "Check your network and the vendor's service status page. " - "Message is: ", -) -REQUEST_FAILED_503 = ( - "homeassistant.components.evohome.helpers", - logging.WARNING, - "The vendor says their server is currently unavailable. " - "Check the vendor's service status page", -) -REQUEST_FAILED_429 = ( - "homeassistant.components.evohome.helpers", - logging.WARNING, - "The vendor's API rate limit has been exceeded. " - "If this message persists, consider increasing the scan_interval", -) - -REQUEST_FAILED_LOOKUP = { - None: [ - REQUEST_FAILED_NONE, - SETUP_FAILED_ANTICIPATED, - ], - HTTPStatus.SERVICE_UNAVAILABLE: [ - REQUEST_FAILED_503, - SETUP_FAILED_ANTICIPATED, - ], - HTTPStatus.TOO_MANY_REQUESTS: [ - REQUEST_FAILED_429, - SETUP_FAILED_ANTICIPATED, - ], -} - - -@pytest.mark.parametrize( - "status", [*sorted([*_ERR_MSG_LOOKUP_AUTH, HTTPStatus.BAD_GATEWAY]), None] -) -async def test_authentication_failure_v2( - hass: HomeAssistant, - config: dict[str, str], - status: HTTPStatus, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test failure to setup an evohome-compatible system. - - In this instance, the failure occurs in the v2 API. - """ - - with patch("evohomeasync2.broker.Broker.get") as mock_fcn: - mock_fcn.side_effect = exc.AuthenticationFailed("", status=status) - - with caplog.at_level(logging.WARNING): - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) - - assert result is False - - assert caplog.record_tuples == [ - AUTHENTICATION_FAILED, - SETUP_FAILED_ANTICIPATED, - ] - - -@pytest.mark.parametrize( - "status", [*sorted([*_ERR_MSG_LOOKUP_BASE, HTTPStatus.BAD_GATEWAY]), None] -) -async def test_client_request_failure_v2( - hass: HomeAssistant, - config: dict[str, str], - status: HTTPStatus, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test failure to setup an evohome-compatible system. - - In this instance, the failure occurs in the v2 API. - """ - - with patch("evohomeasync2.broker.Broker.get") as mock_fcn: - mock_fcn.side_effect = exc.RequestFailed("", status=status) - - with caplog.at_level(logging.WARNING): - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) - - assert result is False - - assert caplog.record_tuples == REQUEST_FAILED_LOOKUP.get( - status, [SETUP_FAILED_UNEXPECTED] - ) - - -@pytest.mark.parametrize("install", [*TEST_INSTALLS, "botched"]) -async def test_setup( - hass: HomeAssistant, - evohome: EvohomeClient, - snapshot: SnapshotAssertion, -) -> None: - """Test services after setup of evohome. - - Registered services vary by the type of system. - """ - - assert hass.services.async_services_for_domain(DOMAIN).keys() == snapshot - - -@pytest.mark.parametrize("install", ["default"]) -async def test_service_refresh_system( - hass: HomeAssistant, - evohome: EvohomeClient, -) -> None: - """Test EvoService.REFRESH_SYSTEM of an evohome system.""" - - # EvoService.REFRESH_SYSTEM - with patch("evohomeasync2.location.Location.refresh_status") as mock_fcn: - await hass.services.async_call( - DOMAIN, - EvoService.REFRESH_SYSTEM, - {}, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == () - assert mock_fcn.await_args.kwargs == {} - - -@pytest.mark.parametrize("install", ["default"]) -async def test_service_reset_system( - hass: HomeAssistant, - evohome: EvohomeClient, -) -> None: - """Test EvoService.RESET_SYSTEM of an evohome system.""" - - # EvoService.RESET_SYSTEM (if SZ_AUTO_WITH_RESET in modes) - with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: - await hass.services.async_call( - DOMAIN, - EvoService.RESET_SYSTEM, - {}, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == ("AutoWithReset",) - assert mock_fcn.await_args.kwargs == {"until": None} diff --git a/tests/components/evohome/test_storage.py b/tests/components/evohome/test_storage.py deleted file mode 100644 index 4cc21078333..00000000000 --- a/tests/components/evohome/test_storage.py +++ /dev/null @@ -1,213 +0,0 @@ -"""The tests for evohome storage load & save.""" - -from __future__ import annotations - -from datetime import datetime, timedelta -from typing import Any, Final, NotRequired, TypedDict - -import pytest - -from homeassistant.components.evohome import ( - CONF_USERNAME, - DOMAIN, - STORAGE_KEY, - STORAGE_VER, - dt_aware_to_naive, -) -from homeassistant.core import HomeAssistant -import homeassistant.util.dt as dt_util - -from .conftest import setup_evohome -from .const import ACCESS_TOKEN, REFRESH_TOKEN, SESSION_ID, USERNAME - - -class _SessionDataT(TypedDict): - sessionId: str - - -class _TokenStoreT(TypedDict): - username: str - refresh_token: str - access_token: str - access_token_expires: str # 2024-07-27T23:57:30+01:00 - user_data: NotRequired[_SessionDataT] - - -class _EmptyStoreT(TypedDict): - pass - - -SZ_USERNAME: Final = "username" -SZ_REFRESH_TOKEN: Final = "refresh_token" -SZ_ACCESS_TOKEN: Final = "access_token" -SZ_ACCESS_TOKEN_EXPIRES: Final = "access_token_expires" -SZ_USER_DATA: Final = "user_data" - - -def dt_pair(dt_dtm: datetime) -> tuple[datetime, str]: - """Return a datetime without milliseconds and its string representation.""" - dt_str = dt_dtm.isoformat(timespec="seconds") # e.g. 2024-07-28T00:57:29+01:00 - return dt_util.parse_datetime(dt_str, raise_on_error=True), dt_str - - -ACCESS_TOKEN_EXP_DTM, ACCESS_TOKEN_EXP_STR = dt_pair(dt_util.now() + timedelta(hours=1)) - -USERNAME_DIFF: Final = f"not_{USERNAME}" -USERNAME_SAME: Final = USERNAME - -_TEST_STORAGE_BASE: Final[_TokenStoreT] = { - SZ_USERNAME: USERNAME_SAME, - SZ_REFRESH_TOKEN: REFRESH_TOKEN, - SZ_ACCESS_TOKEN: ACCESS_TOKEN, - SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, -} - -TEST_STORAGE_DATA: Final[dict[str, _TokenStoreT]] = { - "sans_session_id": _TEST_STORAGE_BASE, - "null_session_id": _TEST_STORAGE_BASE | {SZ_USER_DATA: None}, # type: ignore[dict-item] - "with_session_id": _TEST_STORAGE_BASE | {SZ_USER_DATA: {"sessionId": SESSION_ID}}, -} - -TEST_STORAGE_NULL: Final[dict[str, _EmptyStoreT | None]] = { - "store_is_absent": None, - "store_was_reset": {}, -} - -DOMAIN_STORAGE_BASE: Final = { - "version": STORAGE_VER, - "minor_version": 1, - "key": STORAGE_KEY, -} - - -@pytest.mark.parametrize("install", ["minimal"]) -@pytest.mark.parametrize("idx", TEST_STORAGE_NULL) -async def test_auth_tokens_null( - hass: HomeAssistant, - hass_storage: dict[str, Any], - config: dict[str, str], - idx: str, - install: str, -) -> None: - """Test loading/saving authentication tokens when no cached tokens in the store.""" - - hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_STORAGE_NULL[idx]} - - async for mock_client in setup_evohome(hass, config, install=install): - # Confirm client was instantiated without tokens, as cache was empty... - assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs - assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs - assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg - - # Confirm the expected tokens were cached to storage... - data: _TokenStoreT = hass_storage[DOMAIN]["data"] - - assert data[SZ_USERNAME] == USERNAME_SAME - assert data[SZ_REFRESH_TOKEN] == f"new_{REFRESH_TOKEN}" - assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" - assert ( - dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) - > dt_util.now() - ) - - -@pytest.mark.parametrize("install", ["minimal"]) -@pytest.mark.parametrize("idx", TEST_STORAGE_DATA) -async def test_auth_tokens_same( - hass: HomeAssistant, - hass_storage: dict[str, Any], - config: dict[str, str], - idx: str, - install: str, -) -> None: - """Test loading/saving authentication tokens when matching username.""" - - hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_STORAGE_DATA[idx]} - - async for mock_client in setup_evohome(hass, config, install=install): - # Confirm client was instantiated with the cached tokens... - assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN - assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN - assert mock_client.call_args.kwargs[ - SZ_ACCESS_TOKEN_EXPIRES - ] == dt_aware_to_naive(ACCESS_TOKEN_EXP_DTM) - - # Confirm the expected tokens were cached to storage... - data: _TokenStoreT = hass_storage[DOMAIN]["data"] - - assert data[SZ_USERNAME] == USERNAME_SAME - assert data[SZ_REFRESH_TOKEN] == REFRESH_TOKEN - assert data[SZ_ACCESS_TOKEN] == ACCESS_TOKEN - assert dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES]) == ACCESS_TOKEN_EXP_DTM - - -@pytest.mark.parametrize("install", ["minimal"]) -@pytest.mark.parametrize("idx", TEST_STORAGE_DATA) -async def test_auth_tokens_past( - hass: HomeAssistant, - hass_storage: dict[str, Any], - config: dict[str, str], - idx: str, - install: str, -) -> None: - """Test loading/saving authentication tokens with matching username, but expired.""" - - dt_dtm, dt_str = dt_pair(dt_util.now() - timedelta(hours=1)) - - # make this access token have expired in the past... - test_data = TEST_STORAGE_DATA[idx].copy() # shallow copy is OK here - test_data[SZ_ACCESS_TOKEN_EXPIRES] = dt_str - - hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": test_data} - - async for mock_client in setup_evohome(hass, config, install=install): - # Confirm client was instantiated with the cached tokens... - assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN - assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN - assert mock_client.call_args.kwargs[ - SZ_ACCESS_TOKEN_EXPIRES - ] == dt_aware_to_naive(dt_dtm) - - # Confirm the expected tokens were cached to storage... - data: _TokenStoreT = hass_storage[DOMAIN]["data"] - - assert data[SZ_USERNAME] == USERNAME_SAME - assert data[SZ_REFRESH_TOKEN] == REFRESH_TOKEN - assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" - assert ( - dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) - > dt_util.now() - ) - - -@pytest.mark.parametrize("install", ["minimal"]) -@pytest.mark.parametrize("idx", TEST_STORAGE_DATA) -async def test_auth_tokens_diff( - hass: HomeAssistant, - hass_storage: dict[str, Any], - config: dict[str, str], - idx: str, - install: str, -) -> None: - """Test loading/saving authentication tokens when unmatched username.""" - - hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_STORAGE_DATA[idx]} - - async for mock_client in setup_evohome( - hass, config | {CONF_USERNAME: USERNAME_DIFF}, install=install - ): - # Confirm client was instantiated without tokens, as username was different... - assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs - assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs - assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg - - # Confirm the expected tokens were cached to storage... - data: _TokenStoreT = hass_storage[DOMAIN]["data"] - - assert data[SZ_USERNAME] == USERNAME_DIFF - assert data[SZ_REFRESH_TOKEN] == f"new_{REFRESH_TOKEN}" - assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" - assert ( - dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) - > dt_util.now() - ) diff --git a/tests/components/evohome/test_water_heater.py b/tests/components/evohome/test_water_heater.py deleted file mode 100644 index 8acfd469b59..00000000000 --- a/tests/components/evohome/test_water_heater.py +++ /dev/null @@ -1,190 +0,0 @@ -"""The tests for the water_heater platform of evohome. - -Not all evohome systems will have a DHW zone. -""" - -from __future__ import annotations - -from unittest.mock import patch - -from evohomeasync2 import EvohomeClient -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.water_heater import ( - ATTR_AWAY_MODE, - ATTR_OPERATION_MODE, - SERVICE_SET_AWAY_MODE, - SERVICE_SET_OPERATION_MODE, -) -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from .conftest import setup_evohome -from .const import TEST_INSTALLS_WITH_DHW - -DHW_ENTITY_ID = "water_heater.domestic_hot_water" - - -@pytest.mark.parametrize("install", [*TEST_INSTALLS_WITH_DHW, "botched"]) -async def test_setup_platform( - hass: HomeAssistant, - config: dict[str, str], - install: str, - snapshot: SnapshotAssertion, - freezer: FrozenDateTimeFactory, -) -> None: - """Test entities and their states after setup of evohome.""" - - # Cannot use the evohome fixture, as need to set dtm first - # - some extended state attrs are relative the current time - freezer.move_to("2024-07-10T12:00:00Z") - - async for _ in setup_evohome(hass, config, install=install): - pass - - for x in hass.states.async_all(Platform.WATER_HEATER): - assert x == snapshot(name=f"{x.entity_id}-state") - - -@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) -async def test_set_operation_mode( - hass: HomeAssistant, - evohome: EvohomeClient, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, -) -> None: - """Test SERVICE_SET_OPERATION_MODE of an evohome DHW zone.""" - - freezer.move_to("2024-07-10T11:55:00Z") - results = [] - - # SERVICE_SET_OPERATION_MODE: auto - with patch("evohomeasync2.hotwater.HotWater.reset_mode") as mock_fcn: - await hass.services.async_call( - Platform.WATER_HEATER, - SERVICE_SET_OPERATION_MODE, - { - ATTR_ENTITY_ID: DHW_ENTITY_ID, - ATTR_OPERATION_MODE: "auto", - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == () - assert mock_fcn.await_args.kwargs == {} - - # SERVICE_SET_OPERATION_MODE: off (until next scheduled setpoint) - with patch("evohomeasync2.hotwater.HotWater.set_off") as mock_fcn: - await hass.services.async_call( - Platform.WATER_HEATER, - SERVICE_SET_OPERATION_MODE, - { - ATTR_ENTITY_ID: DHW_ENTITY_ID, - ATTR_OPERATION_MODE: "off", - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == () - assert mock_fcn.await_args.kwargs != {} - - results.append(mock_fcn.await_args.kwargs) - - # SERVICE_SET_OPERATION_MODE: on (until next scheduled setpoint) - with patch("evohomeasync2.hotwater.HotWater.set_on") as mock_fcn: - await hass.services.async_call( - Platform.WATER_HEATER, - SERVICE_SET_OPERATION_MODE, - { - ATTR_ENTITY_ID: DHW_ENTITY_ID, - ATTR_OPERATION_MODE: "on", - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == () - assert mock_fcn.await_args.kwargs != {} - - results.append(mock_fcn.await_args.kwargs) - - assert results == snapshot - - -@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) -async def test_set_away_mode(hass: HomeAssistant, evohome: EvohomeClient) -> None: - """Test SERVICE_SET_AWAY_MODE of an evohome DHW zone.""" - - # set_away_mode: off - with patch("evohomeasync2.hotwater.HotWater.reset_mode") as mock_fcn: - await hass.services.async_call( - Platform.WATER_HEATER, - SERVICE_SET_AWAY_MODE, - { - ATTR_ENTITY_ID: DHW_ENTITY_ID, - ATTR_AWAY_MODE: "off", - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == () - assert mock_fcn.await_args.kwargs == {} - - # set_away_mode: on - with patch("evohomeasync2.hotwater.HotWater.set_off") as mock_fcn: - await hass.services.async_call( - Platform.WATER_HEATER, - SERVICE_SET_AWAY_MODE, - { - ATTR_ENTITY_ID: DHW_ENTITY_ID, - ATTR_AWAY_MODE: "on", - }, - blocking=True, - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == () - assert mock_fcn.await_args.kwargs == {} - - -@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) -async def test_turn_off(hass: HomeAssistant, evohome: EvohomeClient) -> None: - """Test SERVICE_TURN_OFF of an evohome DHW zone.""" - - # Entity water_heater.xxx does not support this service - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - Platform.WATER_HEATER, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: DHW_ENTITY_ID, - }, - blocking=True, - ) - - -@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) -async def test_turn_on(hass: HomeAssistant, evohome: EvohomeClient) -> None: - """Test SERVICE_TURN_ON of an evohome DHW zone.""" - - # Entity water_heater.xxx does not support this service - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - Platform.WATER_HEATER, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: DHW_ENTITY_ID, - }, - blocking=True, - ) diff --git a/tests/components/ezviz/__init__.py b/tests/components/ezviz/__init__.py index 78bbee0b0ad..9fc297be099 100644 --- a/tests/components/ezviz/__init__.py +++ b/tests/components/ezviz/__init__.py @@ -1,6 +1,6 @@ """Tests for the EZVIZ integration.""" -from unittest.mock import _patch, patch +from unittest.mock import patch from homeassistant.components.ezviz.const import ( ATTR_SERIAL, @@ -83,11 +83,10 @@ API_LOGIN_RETURN_VALIDATE = { } -def patch_async_setup_entry() -> _patch: - """Patch async_setup_entry.""" +def _patch_async_setup_entry(return_value=True): return patch( "homeassistant.components.ezviz.async_setup_entry", - return_value=True, + return_value=return_value, ) diff --git a/tests/components/ezviz/conftest.py b/tests/components/ezviz/conftest.py index 171cfffc2fc..10fd0406a1c 100644 --- a/tests/components/ezviz/conftest.py +++ b/tests/components/ezviz/conftest.py @@ -1,14 +1,11 @@ """Define pytest.fixtures available for all tests.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch from pyezviz import EzvizClient from pyezviz.test_cam_rtsp import TestRTSPAuth import pytest -from homeassistant.core import HomeAssistant - ezviz_login_token_return = { "session_id": "fake_token", "rf_session_id": "fake_rf_token", @@ -17,13 +14,13 @@ ezviz_login_token_return = { @pytest.fixture(autouse=True) -def mock_ffmpeg(hass: HomeAssistant) -> None: +def mock_ffmpeg(hass): """Mock ffmpeg is loaded.""" hass.config.components.add("ffmpeg") @pytest.fixture -def ezviz_test_rtsp_config_flow() -> Generator[MagicMock]: +def ezviz_test_rtsp_config_flow(hass): """Mock the EzvizApi for easier testing.""" with ( patch.object(TestRTSPAuth, "main", return_value=True), @@ -43,7 +40,7 @@ def ezviz_test_rtsp_config_flow() -> Generator[MagicMock]: @pytest.fixture -def ezviz_config_flow() -> Generator[MagicMock]: +def ezviz_config_flow(hass): """Mock the EzvizAPI for easier config flow testing.""" with ( patch.object(EzvizClient, "login", return_value=True), diff --git a/tests/components/ezviz/test_config_flow.py b/tests/components/ezviz/test_config_flow.py index 63499996c89..57c3ae0600e 100644 --- a/tests/components/ezviz/test_config_flow.py +++ b/tests/components/ezviz/test_config_flow.py @@ -1,6 +1,6 @@ """Test the EZVIZ config flow.""" -from unittest.mock import MagicMock, patch +from unittest.mock import patch from pyezviz.exceptions import ( AuthTestResultFailed, @@ -10,7 +10,6 @@ from pyezviz.exceptions import ( InvalidURL, PyEzvizError, ) -import pytest from homeassistant.components.ezviz.const import ( ATTR_SERIAL, @@ -20,7 +19,11 @@ from homeassistant.components.ezviz.const import ( DEFAULT_TIMEOUT, DOMAIN, ) -from homeassistant.config_entries import SOURCE_INTEGRATION_DISCOVERY, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_INTEGRATION_DISCOVERY, + SOURCE_REAUTH, + SOURCE_USER, +) from homeassistant.const import ( CONF_CUSTOMIZE, CONF_IP_ADDRESS, @@ -37,15 +40,12 @@ from . import ( API_LOGIN_RETURN_VALIDATE, DISCOVERY_INFO, USER_INPUT_VALIDATE, + _patch_async_setup_entry, init_integration, - patch_async_setup_entry, ) -from tests.common import MockConfigEntry, start_reauth_flow - -@pytest.mark.usefixtures("ezviz_config_flow") -async def test_user_form(hass: HomeAssistant) -> None: +async def test_user_form(hass: HomeAssistant, ezviz_config_flow) -> None: """Test the user initiated form.""" result = await hass.config_entries.flow.async_init( @@ -55,7 +55,7 @@ async def test_user_form(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - with patch_async_setup_entry() as mock_setup_entry: + with _patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], USER_INPUT_VALIDATE, @@ -75,8 +75,7 @@ async def test_user_form(hass: HomeAssistant) -> None: assert result["reason"] == "already_configured_account" -@pytest.mark.usefixtures("ezviz_config_flow") -async def test_user_custom_url(hass: HomeAssistant) -> None: +async def test_user_custom_url(hass: HomeAssistant, ezviz_config_flow) -> None: """Test custom url step.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -95,7 +94,7 @@ async def test_user_custom_url(hass: HomeAssistant) -> None: assert result["step_id"] == "user_custom_url" assert result["errors"] == {} - with patch_async_setup_entry() as mock_setup_entry: + with _patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_URL: "test-user"}, @@ -108,8 +107,7 @@ async def test_user_custom_url(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("ezviz_config_flow") -async def test_async_step_reauth(hass: HomeAssistant) -> None: +async def test_async_step_reauth(hass: HomeAssistant, ezviz_config_flow) -> None: """Test the reauth step.""" result = await hass.config_entries.flow.async_init( @@ -119,7 +117,7 @@ async def test_async_step_reauth(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - with patch_async_setup_entry() as mock_setup_entry: + with _patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], USER_INPUT_VALIDATE, @@ -132,8 +130,9 @@ async def test_async_step_reauth(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 - new_entry = hass.config_entries.async_entries(DOMAIN)[0] - result = await start_reauth_flow(hass, new_entry) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH}, data=USER_INPUT_VALIDATE + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -179,16 +178,16 @@ async def test_step_discovery_abort_if_cloud_account_missing( async def test_step_reauth_abort_if_cloud_account_missing(hass: HomeAssistant) -> None: """Test reauth and confirm step, abort if cloud account was removed.""" - entry = MockConfigEntry(domain=DOMAIN, data=USER_INPUT_VALIDATE) - entry.add_to_hass(hass) - - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH}, data=USER_INPUT_VALIDATE + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "ezviz_cloud_account_missing" -@pytest.mark.usefixtures("ezviz_config_flow", "ezviz_test_rtsp_config_flow") -async def test_async_step_integration_discovery(hass: HomeAssistant) -> None: +async def test_async_step_integration_discovery( + hass: HomeAssistant, ezviz_config_flow, ezviz_test_rtsp_config_flow +) -> None: """Test discovery and confirm step.""" with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): await init_integration(hass) @@ -200,7 +199,7 @@ async def test_async_step_integration_discovery(hass: HomeAssistant) -> None: assert result["step_id"] == "confirm" assert result["errors"] == {} - with patch_async_setup_entry() as mock_setup_entry: + with _patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -222,7 +221,7 @@ async def test_async_step_integration_discovery(hass: HomeAssistant) -> None: async def test_options_flow(hass: HomeAssistant) -> None: """Test updating options.""" - with patch_async_setup_entry() as mock_setup_entry: + with _patch_async_setup_entry() as mock_setup_entry: entry = await init_integration(hass) assert entry.options[CONF_FFMPEG_ARGUMENTS] == DEFAULT_FFMPEG_ARGUMENTS @@ -246,9 +245,7 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_form_exception( - hass: HomeAssistant, ezviz_config_flow: MagicMock -) -> None: +async def test_user_form_exception(hass: HomeAssistant, ezviz_config_flow) -> None: """Test we handle exception on user form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -314,7 +311,7 @@ async def test_user_form_exception( async def test_discover_exception_step1( hass: HomeAssistant, - ezviz_config_flow: MagicMock, + ezviz_config_flow, ) -> None: """Test we handle unexpected exception on discovery.""" with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): @@ -400,9 +397,10 @@ async def test_discover_exception_step1( assert result["reason"] == "unknown" -@pytest.mark.usefixtures("ezviz_config_flow") async def test_discover_exception_step3( - hass: HomeAssistant, ezviz_test_rtsp_config_flow: MagicMock + hass: HomeAssistant, + ezviz_config_flow, + ezviz_test_rtsp_config_flow, ) -> None: """Test we handle unexpected exception on discovery.""" with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): @@ -461,7 +459,7 @@ async def test_discover_exception_step3( async def test_user_custom_url_exception( - hass: HomeAssistant, ezviz_config_flow: MagicMock + hass: HomeAssistant, ezviz_config_flow ) -> None: """Test we handle unexpected exception.""" ezviz_config_flow.side_effect = PyEzvizError() @@ -536,7 +534,7 @@ async def test_user_custom_url_exception( async def test_async_step_reauth_exception( - hass: HomeAssistant, ezviz_config_flow: MagicMock + hass: HomeAssistant, ezviz_config_flow ) -> None: """Test the reauth step exceptions.""" @@ -547,7 +545,7 @@ async def test_async_step_reauth_exception( assert result["step_id"] == "user" assert result["errors"] == {} - with patch_async_setup_entry() as mock_setup_entry: + with _patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], USER_INPUT_VALIDATE, @@ -560,8 +558,9 @@ async def test_async_step_reauth_exception( assert len(mock_setup_entry.mock_calls) == 1 - new_entry = hass.config_entries.async_entries(DOMAIN)[0] - result = await start_reauth_flow(hass, new_entry) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH}, data=USER_INPUT_VALIDATE + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} diff --git a/tests/components/fan/conftest.py b/tests/components/fan/conftest.py deleted file mode 100644 index 2e3644793df..00000000000 --- a/tests/components/fan/conftest.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Fixtures for Fan platform tests.""" - -from collections.abc import Generator - -import pytest - -from homeassistant.config_entries import ConfigFlow -from homeassistant.core import HomeAssistant - -from tests.common import mock_config_flow, mock_platform - - -class MockFlow(ConfigFlow): - """Test flow.""" - - -@pytest.fixture -def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: - """Mock config flow.""" - mock_platform(hass, "test.config_flow") - - with mock_config_flow("test", MockFlow): - yield diff --git a/tests/components/fan/test_device_condition.py b/tests/components/fan/test_device_condition.py index da48f3223af..9f9bde1a680 100644 --- a/tests/components/fan/test_device_condition.py +++ b/tests/components/fan/test_device_condition.py @@ -12,7 +12,11 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -20,6 +24,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -104,7 +114,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -174,22 +184,22 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on - event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_off - event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_off - event - test_event2" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -236,5 +246,5 @@ async def test_if_state_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on - event - test_event1" diff --git a/tests/components/fan/test_device_trigger.py b/tests/components/fan/test_device_trigger.py index f4673636637..38f39376592 100644 --- a/tests/components/fan/test_device_trigger.py +++ b/tests/components/fan/test_device_trigger.py @@ -20,6 +20,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, ) @@ -28,6 +29,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -173,7 +180,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -266,8 +273,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { + assert len(calls) == 2 + assert {calls[0].data["some"], calls[1].data["some"]} == { f"turn_on - device - {entry.entity_id} - off - on - None", f"turn_on_or_off - device - {entry.entity_id} - off - on - None", } @@ -275,8 +282,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning off. hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 4 - assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { + assert len(calls) == 4 + assert {calls[2].data["some"], calls[3].data["some"]} == { f"turn_off - device - {entry.entity_id} - on - off - None", f"turn_on_or_off - device - {entry.entity_id} - on - off - None", } @@ -286,7 +293,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -335,9 +342,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_on - device - {entry.entity_id} - off - on - None" ) @@ -346,7 +353,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -392,16 +399,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/fan/test_init.py b/tests/components/fan/test_init.py index a7dc544a97a..04f594b959c 100644 --- a/tests/components/fan/test_init.py +++ b/tests/components/fan/test_init.py @@ -1,7 +1,5 @@ """Tests for fan platforms.""" -from unittest.mock import patch - import pytest from homeassistant.components import fan @@ -14,23 +12,15 @@ from homeassistant.components.fan import ( FanEntityFeature, NotValidPresetModeError, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.helpers.entity_registry as er from homeassistant.setup import async_setup_component from .common import MockFan from tests.common import ( - MockConfigEntry, - MockModule, - MockPlatform, help_test_all, import_and_test_deprecated_constant_enum, - mock_integration, - mock_platform, setup_test_component_platform, ) @@ -38,7 +28,7 @@ from tests.common import ( class BaseFan(FanEntity): """Implementation of the abstract FanEntity.""" - def __init__(self) -> None: + def __init__(self): """Initialize the fan.""" @@ -177,10 +167,7 @@ def test_deprecated_constants( enum: fan.FanEntityFeature, ) -> None: """Test deprecated constants.""" - if not FanEntityFeature.TURN_OFF and not FanEntityFeature.TURN_ON: - import_and_test_deprecated_constant_enum( - caplog, fan, enum, "SUPPORT_", "2025.1" - ) + import_and_test_deprecated_constant_enum(caplog, fan, enum, "SUPPORT_", "2025.1") def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: @@ -193,288 +180,11 @@ def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> return 1 entity = MockFan() - assert entity.supported_features is FanEntityFeature(1) + assert entity.supported_features_compat is FanEntityFeature(1) assert "MockFan" in caplog.text assert "is using deprecated supported features values" in caplog.text assert "Instead it should use" in caplog.text assert "FanEntityFeature.SET_SPEED" in caplog.text caplog.clear() - assert entity.supported_features is FanEntityFeature(1) + assert entity.supported_features_compat is FanEntityFeature(1) assert "is using deprecated supported features values" not in caplog.text - - -async def test_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test adding feature flag and warn if missing when methods are set.""" - - called = [] - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - def turn_on( - self, - percentage: int | None = None, - preset_mode: str | None = None, - ) -> None: - """Turn on.""" - called.append("turn_on") - - def turn_off(self) -> None: - """Turn off.""" - called.append("turn_off") - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert ( - "Entity fan.test (.MockFanEntityTest'>) " - "does not set FanEntityFeature.TURN_OFF but implements the turn_off method. Please report it to the author of the 'test' custom integration" - in caplog.text - ) - assert ( - "Entity fan.test (.MockFanEntityTest'>) " - "does not set FanEntityFeature.TURN_ON but implements the turn_on method. Please report it to the author of the 'test' custom integration" - in caplog.text - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_ON, - { - "entity_id": "fan.test", - }, - blocking=True, - ) - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_OFF, - { - "entity_id": "fan.test", - }, - blocking=True, - ) - - assert len(called) == 2 - assert "turn_on" in called - assert "turn_off" in called - - -async def test_no_warning_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test no warning when feature flags are set.""" - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - _attr_supported_features = ( - FanEntityFeature.DIRECTION - | FanEntityFeature.OSCILLATE - | FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text - assert "does not set FanEntityFeature.TURN_ON" not in caplog.text - - -async def test_no_warning_integration_has_migrated( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test no warning when integration migrated using `_enable_turn_on_off_backwards_compatibility`.""" - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - _enable_turn_on_off_backwards_compatibility = False - _attr_supported_features = ( - FanEntityFeature.DIRECTION - | FanEntityFeature.OSCILLATE - | FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - ) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text - assert "does not set FanEntityFeature.TURN_ON" not in caplog.text - - -async def test_no_warning_integration_implement_feature_flags( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None -) -> None: - """Test no warning when integration uses the correct feature flags.""" - - class MockFanEntityTest(MockFan): - """Mock Fan device.""" - - _attr_supported_features = ( - FanEntityFeature.DIRECTION - | FanEntityFeature.OSCILLATE - | FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_fan_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test fan platform via config entry.""" - async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.fan", - MockPlatform(async_setup_entry=async_setup_entry_fan_platform), - ) - - with patch.object( - MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" - ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("fan.test") - assert state is not None - - assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text - assert "does not set FanEntityFeature.TURN_ON" not in caplog.text diff --git a/tests/components/feedreader/conftest.py b/tests/components/feedreader/conftest.py index 8eeb89e00cd..0a5342615a9 100644 --- a/tests/components/feedreader/conftest.py +++ b/tests/components/feedreader/conftest.py @@ -52,18 +52,6 @@ def fixture_feed_identically_timed_events(hass: HomeAssistant) -> bytes: return load_fixture_bytes("feedreader6.xml") -@pytest.fixture(name="feed_without_items") -def fixture_feed_without_items(hass: HomeAssistant) -> bytes: - """Load test feed without any items.""" - return load_fixture_bytes("feedreader7.xml") - - -@pytest.fixture(name="feed_only_summary") -def fixture_feed_only_summary(hass: HomeAssistant) -> bytes: - """Load test feed data with one event containing only a summary, no content.""" - return load_fixture_bytes("feedreader8.xml") - - @pytest.fixture(name="events") async def fixture_events(hass: HomeAssistant) -> list[Event]: """Fixture that catches alexa events.""" diff --git a/tests/components/feedreader/fixtures/feedreader.xml b/tests/components/feedreader/fixtures/feedreader.xml index 17402cad081..8c85a4975ee 100644 --- a/tests/components/feedreader/fixtures/feedreader.xml +++ b/tests/components/feedreader/fixtures/feedreader.xml @@ -14,7 +14,6 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +1000 - Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader1.xml b/tests/components/feedreader/fixtures/feedreader1.xml index c71507c15b7..ff856125779 100644 --- a/tests/components/feedreader/fixtures/feedreader1.xml +++ b/tests/components/feedreader/fixtures/feedreader1.xml @@ -8,21 +8,19 @@ Mon, 30 Apr 2018 15:00:00 +1000 1800 - - Title 2 - Description 2 - http://www.example.com/link/2 - GUID 2 - Mon, 30 Apr 2018 15:11:00 +1000 - Content 2 - Title 1 Description 1 http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +1000 - Content 1 + + + Title 2 + Description 2 + http://www.example.com/link/2 + GUID 2 + Mon, 30 Apr 2018 15:11:00 +1000 diff --git a/tests/components/feedreader/fixtures/feedreader2.xml b/tests/components/feedreader/fixtures/feedreader2.xml index 2471d70edcb..653a16e4561 100644 --- a/tests/components/feedreader/fixtures/feedreader2.xml +++ b/tests/components/feedreader/fixtures/feedreader2.xml @@ -8,110 +8,89 @@ Mon, 30 Apr 2018 15:00:00 +1000 1800 - - Title 21 - Mon, 30 Apr 2018 15:20:00 +1000 - Content 21 - - - Title 20 - Mon, 30 Apr 2018 15:19:00 +1000 - Content 20 - - - Title 19 - Mon, 30 Apr 2018 15:18:00 +1000 - Content 19 - - - Title 18 - Mon, 30 Apr 2018 15:17:00 +1000 - Content 18 - - - Title 17 - Mon, 30 Apr 2018 15:16:00 +1000 - Content 17 - - - Title 16 - Mon, 30 Apr 2018 15:15:00 +1000 - Content 16 - - - Title 15 - Mon, 30 Apr 2018 15:14:00 +1000 - Content 15 - - - Title 14 - Mon, 30 Apr 2018 15:13:00 +1000 - Content 14 - - - Title 13 - Mon, 30 Apr 2018 15:12:00 +1000 - Content 13 - - - Title 12 - Mon, 30 Apr 2018 15:11:00 +1000 - Content 12 - - - Title 11 - Mon, 30 Apr 2018 15:10:00 +1000 - Content 11 - - - Title 10 - Mon, 30 Apr 2018 15:09:00 +1000 - Content 10 - - - Title 9 - Mon, 30 Apr 2018 15:08:00 +1000 - Content 9 - - - Title 8 - Mon, 30 Apr 2018 15:07:00 +1000 - Content 8 - - - Title 7 - Mon, 30 Apr 2018 15:06:00 +1000 - Content 7 - - - Title 6 - Mon, 30 Apr 2018 15:05:00 +1000 - Content 6 - - - Title 5 - Mon, 30 Apr 2018 15:04:00 +1000 - Content 5 - - - Title 4 - Mon, 30 Apr 2018 15:03:00 +1000 - Content 4 - - - Title 3 - Mon, 30 Apr 2018 15:02:00 +1000 - Content 3 - Title 1 Mon, 30 Apr 2018 15:00:00 +1000 - Content 1 Title 2 Mon, 30 Apr 2018 15:01:00 +1000 - Content 2 + + + Title 3 + Mon, 30 Apr 2018 15:02:00 +1000 + + + Title 4 + Mon, 30 Apr 2018 15:03:00 +1000 + + + Title 5 + Mon, 30 Apr 2018 15:04:00 +1000 + + + Title 6 + Mon, 30 Apr 2018 15:05:00 +1000 + + + Title 7 + Mon, 30 Apr 2018 15:06:00 +1000 + + + Title 8 + Mon, 30 Apr 2018 15:07:00 +1000 + + + Title 9 + Mon, 30 Apr 2018 15:08:00 +1000 + + + Title 10 + Mon, 30 Apr 2018 15:09:00 +1000 + + + Title 11 + Mon, 30 Apr 2018 15:10:00 +1000 + + + Title 12 + Mon, 30 Apr 2018 15:11:00 +1000 + + + Title 13 + Mon, 30 Apr 2018 15:12:00 +1000 + + + Title 14 + Mon, 30 Apr 2018 15:13:00 +1000 + + + Title 15 + Mon, 30 Apr 2018 15:14:00 +1000 + + + Title 16 + Mon, 30 Apr 2018 15:15:00 +1000 + + + Title 17 + Mon, 30 Apr 2018 15:16:00 +1000 + + + Title 18 + Mon, 30 Apr 2018 15:17:00 +1000 + + + Title 19 + Mon, 30 Apr 2018 15:18:00 +1000 + + + Title 20 + Mon, 30 Apr 2018 15:19:00 +1000 + + + Title 21 + Mon, 30 Apr 2018 15:20:00 +1000 diff --git a/tests/components/feedreader/fixtures/feedreader3.xml b/tests/components/feedreader/fixtures/feedreader3.xml index 67daef20fe8..d8ccd119306 100644 --- a/tests/components/feedreader/fixtures/feedreader3.xml +++ b/tests/components/feedreader/fixtures/feedreader3.xml @@ -14,20 +14,17 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +1000 - Content 1 Title 2 Description 2 http://www.example.com/link/2 GUID 2 - Content 2 Description 3 http://www.example.com/link/3 GUID 3 - Content 3 diff --git a/tests/components/feedreader/fixtures/feedreader4.xml b/tests/components/feedreader/fixtures/feedreader4.xml index 11c8d501395..81828ccb6e2 100644 --- a/tests/components/feedreader/fixtures/feedreader4.xml +++ b/tests/components/feedreader/fixtures/feedreader4.xml @@ -14,7 +14,6 @@ http://www.example.com/link/1 GUID 1 26.10.2019 - 12:06:24 - Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader5.xml b/tests/components/feedreader/fixtures/feedreader5.xml index 562fd45ea93..d9b1dda1ad2 100644 --- a/tests/components/feedreader/fixtures/feedreader5.xml +++ b/tests/components/feedreader/fixtures/feedreader5.xml @@ -14,6 +14,5 @@ urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a 2003-12-13T18:30:02Z Some text. - Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader6.xml b/tests/components/feedreader/fixtures/feedreader6.xml index 48abd06b95b..621c89787e8 100644 --- a/tests/components/feedreader/fixtures/feedreader6.xml +++ b/tests/components/feedreader/fixtures/feedreader6.xml @@ -14,7 +14,6 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +0000 - Content 1 Title 2 @@ -22,7 +21,6 @@ http://www.example.com/link/2 GUID 2 Mon, 30 Apr 2018 15:10:00 +0000 - Content 2 diff --git a/tests/components/feedreader/fixtures/feedreader7.xml b/tests/components/feedreader/fixtures/feedreader7.xml deleted file mode 100644 index 0ffac8dd2ee..00000000000 --- a/tests/components/feedreader/fixtures/feedreader7.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - RSS Sample - This is an example of an RSS feed - http://www.example.com/main.html - Mon, 30 Apr 2018 12:00:00 +1000 - Mon, 30 Apr 2018 15:00:00 +1000 - 1800 - - diff --git a/tests/components/feedreader/fixtures/feedreader8.xml b/tests/components/feedreader/fixtures/feedreader8.xml deleted file mode 100644 index d1c167352f8..00000000000 --- a/tests/components/feedreader/fixtures/feedreader8.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - RSS Sample - This is an example of an RSS feed - http://www.example.com/main.html - Mon, 30 Apr 2018 12:00:00 +1000 - Mon, 30 Apr 2018 15:00:00 +1000 - 1800 - - - Title 1 - Description 1 - http://www.example.com/link/1 - GUID 1 - Mon, 30 Apr 2018 15:10:00 +1000 - This is a summary - - - - diff --git a/tests/components/feedreader/test_config_flow.py b/tests/components/feedreader/test_config_flow.py index 2a434306c0f..48c341492e0 100644 --- a/tests/components/feedreader/test_config_flow.py +++ b/tests/components/feedreader/test_config_flow.py @@ -11,9 +11,9 @@ from homeassistant.components.feedreader.const import ( DEFAULT_MAX_ENTRIES, DOMAIN, ) -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_URL -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component @@ -83,6 +83,16 @@ async def test_user_errors( assert result["step_id"] == "user" assert result["errors"] == {"base": "url_error"} + # no feed entries returned + feedparser.side_effect = None + feedparser.return_value = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_URL: URL} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "no_feed_entries"} + # success feedparser.side_effect = None feedparser.return_value = feed_one_event @@ -128,30 +138,43 @@ async def test_import( assert config_entries[0].data == expected_data assert config_entries[0].options == expected_options - assert issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_feedreader" - ) + assert issue_registry.async_get_issue(HA_DOMAIN, "deprecated_yaml_feedreader") +@pytest.mark.parametrize( + ("side_effect", "return_value", "expected_issue_id"), + [ + ( + urllib.error.URLError("Test"), + None, + "import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml", + ), + ( + None, + None, + "import_yaml_error_feedreader_no_feed_entries_http_some_rss_local_rss_feed_xml", + ), + ], +) async def test_import_errors( hass: HomeAssistant, issue_registry: ir.IssueRegistry, feedparser, setup_entry, feed_one_event, + side_effect, + return_value, + expected_issue_id, ) -> None: """Test starting an import flow which results in an URL error.""" config_entries = hass.config_entries.async_entries(DOMAIN) assert not config_entries # raise URLError - feedparser.side_effect = urllib.error.URLError("Test") - feedparser.return_value = None + feedparser.side_effect = side_effect + feedparser.return_value = return_value assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_URLS: [URL]}}) - assert issue_registry.async_get_issue( - DOMAIN, - "import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml", - ) + assert issue_registry.async_get_issue(DOMAIN, expected_issue_id) async def test_reconfigure(hass: HomeAssistant, feedparser) -> None: @@ -162,9 +185,16 @@ async def test_reconfigure(hass: HomeAssistant, feedparser) -> None: await hass.async_block_till_done() # init user flow - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" # success with patch( @@ -194,9 +224,16 @@ async def test_reconfigure_errors( entry.add_to_hass(hass) # init user flow - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" # raise URLError feedparser.side_effect = urllib.error.URLError("Test") @@ -208,9 +245,22 @@ async def test_reconfigure_errors( }, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" assert result["errors"] == {"base": "url_error"} + # no feed entries returned + feedparser.side_effect = None + feedparser.return_value = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_URL: "http://other.rss.local/rss_feed.xml", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + assert result["errors"] == {"base": "no_feed_entries"} + # success feedparser.side_effect = None feedparser.return_value = feed_one_event diff --git a/tests/components/feedreader/test_event.py b/tests/components/feedreader/test_event.py deleted file mode 100644 index 491c7e38d02..00000000000 --- a/tests/components/feedreader/test_event.py +++ /dev/null @@ -1,61 +0,0 @@ -"""The tests for the feedreader event entity.""" - -from datetime import timedelta -from unittest.mock import patch - -from homeassistant.components.feedreader.event import ( - ATTR_CONTENT, - ATTR_DESCRIPTION, - ATTR_LINK, - ATTR_TITLE, -) -from homeassistant.core import HomeAssistant -import homeassistant.util.dt as dt_util - -from . import create_mock_entry -from .const import VALID_CONFIG_DEFAULT - -from tests.common import async_fire_time_changed - - -async def test_event_entity( - hass: HomeAssistant, feed_one_event, feed_two_event, feed_only_summary -) -> None: - """Test feed event entity.""" - entry = create_mock_entry(VALID_CONFIG_DEFAULT) - entry.add_to_hass(hass) - with patch( - "homeassistant.components.feedreader.coordinator.feedparser.http.get", - side_effect=[feed_one_event, feed_two_event, feed_only_summary], - ): - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("event.mock_title") - assert state - assert state.attributes[ATTR_TITLE] == "Title 1" - assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1" - assert state.attributes[ATTR_CONTENT] == "Content 1" - assert state.attributes[ATTR_DESCRIPTION] == "Description 1" - - future = dt_util.utcnow() + timedelta(hours=1, seconds=1) - async_fire_time_changed(hass, future) - await hass.async_block_till_done(wait_background_tasks=True) - - state = hass.states.get("event.mock_title") - assert state - assert state.attributes[ATTR_TITLE] == "Title 2" - assert state.attributes[ATTR_LINK] == "http://www.example.com/link/2" - assert state.attributes[ATTR_CONTENT] == "Content 2" - assert state.attributes[ATTR_DESCRIPTION] == "Description 2" - - future = dt_util.utcnow() + timedelta(hours=2, seconds=2) - async_fire_time_changed(hass, future) - await hass.async_block_till_done(wait_background_tasks=True) - - state = hass.states.get("event.mock_title") - assert state - assert state.attributes[ATTR_TITLE] == "Title 1" - assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1" - assert state.attributes[ATTR_CONTENT] == "This is a summary" - assert state.attributes[ATTR_DESCRIPTION] == "Description 1" diff --git a/tests/components/feedreader/test_init.py b/tests/components/feedreader/test_init.py index d7700d79e3b..1dcbf5ba45d 100644 --- a/tests/components/feedreader/test_init.py +++ b/tests/components/feedreader/test_init.py @@ -165,21 +165,6 @@ async def test_feed_identical_timestamps( ) -async def test_feed_with_only_summary( - hass: HomeAssistant, events, feed_only_summary -) -> None: - """Test simple feed with only summary, no content.""" - assert await async_setup_config_entry( - hass, VALID_CONFIG_DEFAULT, return_value=feed_only_summary - ) - await hass.async_block_till_done() - - assert len(events) == 1 - assert events[0].data.title == "Title 1" - assert events[0].data.description == "Description 1" - assert events[0].data.content[0].value == "This is a summary" - - async def test_feed_updates( hass: HomeAssistant, events, feed_one_event, feed_two_event ) -> None: @@ -262,20 +247,6 @@ async def test_feed_with_unrecognized_publication_date( assert len(events) == 1 -async def test_feed_without_items( - hass: HomeAssistant, events, feed_without_items, caplog: pytest.LogCaptureFixture -) -> None: - """Test simple feed without any items.""" - assert "No new entries to be published in feed" not in caplog.text - assert await async_setup_config_entry( - hass, VALID_CONFIG_DEFAULT, return_value=feed_without_items - ) - await hass.async_block_till_done() - - assert "No new entries to be published in feed" in caplog.text - assert len(events) == 0 - - async def test_feed_invalid_data(hass: HomeAssistant, events) -> None: """Test feed with invalid data.""" assert await async_setup_config_entry( @@ -325,7 +296,7 @@ async def test_feed_errors( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert ( - "Error fetching feed data from http://some.rss.local/rss_feed.xml : " + "Error fetching feed data from http://some.rss.local/rss_feed.xml: " in caplog.text ) diff --git a/tests/components/ffmpeg/test_init.py b/tests/components/ffmpeg/test_init.py index aa407d5b695..353b8fdfcc0 100644 --- a/tests/components/ffmpeg/test_init.py +++ b/tests/components/ffmpeg/test_init.py @@ -16,13 +16,13 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.setup import async_setup_component +from homeassistant.setup import async_setup_component, setup_component -from tests.common import assert_setup_component +from tests.common import assert_setup_component, get_test_home_assistant @callback -def async_start(hass: HomeAssistant, entity_id: str | None = None) -> None: +def async_start(hass, entity_id=None): """Start a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -32,7 +32,7 @@ def async_start(hass: HomeAssistant, entity_id: str | None = None) -> None: @callback -def async_stop(hass: HomeAssistant, entity_id: str | None = None) -> None: +def async_stop(hass, entity_id=None): """Stop a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -42,7 +42,7 @@ def async_stop(hass: HomeAssistant, entity_id: str | None = None) -> None: @callback -def async_restart(hass: HomeAssistant, entity_id: str | None = None) -> None: +def async_restart(hass, entity_id=None): """Restart a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -54,12 +54,7 @@ def async_restart(hass: HomeAssistant, entity_id: str | None = None) -> None: class MockFFmpegDev(ffmpeg.FFmpegBase): """FFmpeg device mock.""" - def __init__( - self, - hass: HomeAssistant, - initial_state: bool = True, - entity_id: str = "test.ffmpeg_device", - ) -> None: + def __init__(self, hass, initial_state=True, entity_id="test.ffmpeg_device"): """Initialize mock.""" super().__init__(None, initial_state) @@ -82,22 +77,26 @@ class MockFFmpegDev(ffmpeg.FFmpegBase): self.called_entities = entity_ids -async def test_setup_component(hass: HomeAssistant) -> None: +def test_setup_component() -> None: """Set up ffmpeg component.""" - with assert_setup_component(1): - await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + with get_test_home_assistant() as hass: + with assert_setup_component(1): + setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) - assert hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg" + assert hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg" + hass.stop() -async def test_setup_component_test_service(hass: HomeAssistant) -> None: +def test_setup_component_test_service() -> None: """Set up ffmpeg component test services.""" - with assert_setup_component(1): - await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + with get_test_home_assistant() as hass: + with assert_setup_component(1): + setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) - assert hass.services.has_service(ffmpeg.DOMAIN, "start") - assert hass.services.has_service(ffmpeg.DOMAIN, "stop") - assert hass.services.has_service(ffmpeg.DOMAIN, "restart") + assert hass.services.has_service(ffmpeg.DOMAIN, "start") + assert hass.services.has_service(ffmpeg.DOMAIN, "stop") + assert hass.services.has_service(ffmpeg.DOMAIN, "restart") + hass.stop() async def test_setup_component_test_register(hass: HomeAssistant) -> None: diff --git a/tests/components/fibaro/conftest.py b/tests/components/fibaro/conftest.py index ac10d4fc79d..d2f004a160c 100644 --- a/tests/components/fibaro/conftest.py +++ b/tests/components/fibaro/conftest.py @@ -1,9 +1,9 @@ """Test helpers.""" -from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant.components.fibaro import CONF_IMPORT_PLUGINS, DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME @@ -49,63 +49,6 @@ def mock_room() -> Mock: return room -@pytest.fixture -def mock_power_sensor() -> Mock: - """Fixture for an individual power sensor without value.""" - sensor = Mock() - sensor.fibaro_id = 1 - sensor.parent_fibaro_id = 0 - sensor.name = "Test sensor" - sensor.room_id = 1 - sensor.visible = True - sensor.enabled = True - sensor.type = "com.fibaro.powerMeter" - sensor.base_type = "com.fibaro.device" - sensor.properties = { - "zwaveCompany": "Goap", - "endPointId": "2", - "manufacturer": "", - "power": "6.60", - } - sensor.actions = {} - sensor.has_central_scene_event = False - value_mock = Mock() - value_mock.has_value = False - value_mock.is_bool_value = False - sensor.value = value_mock - return sensor - - -@pytest.fixture -def mock_cover() -> Mock: - """Fixture for a cover.""" - cover = Mock() - cover.fibaro_id = 3 - cover.parent_fibaro_id = 0 - cover.name = "Test cover" - cover.room_id = 1 - cover.dead = False - cover.visible = True - cover.enabled = True - cover.type = "com.fibaro.FGR" - cover.base_type = "com.fibaro.device" - cover.properties = {"manufacturer": ""} - cover.actions = {"open": 0, "close": 0} - cover.supported_features = {} - value_mock = Mock() - value_mock.has_value = True - value_mock.int_value.return_value = 20 - cover.value = value_mock - value2_mock = Mock() - value2_mock.has_value = False - cover.value_2 = value2_mock - state_mock = Mock() - state_mock.has_value = True - state_mock.str_value.return_value = "opening" - cover.state = state_mock - return cover - - @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return the default mocked config entry.""" diff --git a/tests/components/fibaro/test_config_flow.py b/tests/components/fibaro/test_config_flow.py index 508bb81973d..b6b4e3992cd 100644 --- a/tests/components/fibaro/test_config_flow.py +++ b/tests/components/fibaro/test_config_flow.py @@ -183,7 +183,15 @@ async def test_reauth_success( hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Successful reauth flow initialized by the user.""" - result = await mock_config_entry.start_reauth_flow(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + }, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -203,7 +211,15 @@ async def test_reauth_connect_failure( mock_fibaro_client: Mock, ) -> None: """Successful reauth flow initialized by the user.""" - result = await mock_config_entry.start_reauth_flow(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + }, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -228,7 +244,15 @@ async def test_reauth_auth_failure( mock_fibaro_client: Mock, ) -> None: """Successful reauth flow initialized by the user.""" - result = await mock_config_entry.start_reauth_flow(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + }, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} diff --git a/tests/components/fibaro/test_cover.py b/tests/components/fibaro/test_cover.py deleted file mode 100644 index d5b08f7d1f8..00000000000 --- a/tests/components/fibaro/test_cover.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Test the Fibaro cover platform.""" - -from unittest.mock import Mock, patch - -from homeassistant.components.cover import CoverState -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .conftest import init_integration - -from tests.common import MockConfigEntry - - -async def test_cover_setup( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_fibaro_client: Mock, - mock_config_entry: MockConfigEntry, - mock_cover: Mock, - mock_room: Mock, -) -> None: - """Test that the cover creates an entity.""" - - # Arrange - mock_fibaro_client.read_rooms.return_value = [mock_room] - mock_fibaro_client.read_devices.return_value = [mock_cover] - - with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): - # Act - await init_integration(hass, mock_config_entry) - # Assert - entry = entity_registry.async_get("cover.room_1_test_cover_3") - assert entry - assert entry.unique_id == "hc2_111111.3" - assert entry.original_name == "Room 1 Test cover" - - -async def test_cover_opening( - hass: HomeAssistant, - mock_fibaro_client: Mock, - mock_config_entry: MockConfigEntry, - mock_cover: Mock, - mock_room: Mock, -) -> None: - """Test that the cover opening state is reported.""" - - # Arrange - mock_fibaro_client.read_rooms.return_value = [mock_room] - mock_fibaro_client.read_devices.return_value = [mock_cover] - - with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): - # Act - await init_integration(hass, mock_config_entry) - # Assert - assert hass.states.get("cover.room_1_test_cover_3").state == CoverState.OPENING - - -async def test_cover_opening_closing_none( - hass: HomeAssistant, - mock_fibaro_client: Mock, - mock_config_entry: MockConfigEntry, - mock_cover: Mock, - mock_room: Mock, -) -> None: - """Test that the cover opening closing states return None if not available.""" - - # Arrange - mock_fibaro_client.read_rooms.return_value = [mock_room] - mock_cover.state.has_value = False - mock_fibaro_client.read_devices.return_value = [mock_cover] - - with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): - # Act - await init_integration(hass, mock_config_entry) - # Assert - assert hass.states.get("cover.room_1_test_cover_3").state == CoverState.OPEN - - -async def test_cover_closing( - hass: HomeAssistant, - mock_fibaro_client: Mock, - mock_config_entry: MockConfigEntry, - mock_cover: Mock, - mock_room: Mock, -) -> None: - """Test that the cover closing state is reported.""" - - # Arrange - mock_fibaro_client.read_rooms.return_value = [mock_room] - mock_cover.state.str_value.return_value = "closing" - mock_fibaro_client.read_devices.return_value = [mock_cover] - - with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): - # Act - await init_integration(hass, mock_config_entry) - # Assert - assert hass.states.get("cover.room_1_test_cover_3").state == CoverState.CLOSING diff --git a/tests/components/fibaro/test_sensor.py b/tests/components/fibaro/test_sensor.py deleted file mode 100644 index 38cbd5d12a8..00000000000 --- a/tests/components/fibaro/test_sensor.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Test the Fibaro sensor platform.""" - -from unittest.mock import Mock, patch - -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .conftest import init_integration - -from tests.common import MockConfigEntry - - -async def test_power_sensor_detected( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_fibaro_client: Mock, - mock_config_entry: MockConfigEntry, - mock_power_sensor: Mock, - mock_room: Mock, -) -> None: - """Test that the strange power entity is detected. - - Similar to a Qubino 3-Phase power meter. - """ - # Arrange - mock_fibaro_client.read_rooms.return_value = [mock_room] - mock_fibaro_client.read_devices.return_value = [mock_power_sensor] - - with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.SENSOR]): - # Act - await init_integration(hass, mock_config_entry) - # Assert - entry = entity_registry.async_get("sensor.room_1_test_sensor_1_power") - assert entry - assert entry.unique_id == "hc2_111111.1_power" - assert entry.original_name == "Room 1 Test sensor Power" - assert entry.original_device_class == SensorDeviceClass.POWER diff --git a/tests/components/fido/test_sensor.py b/tests/components/fido/test_sensor.py index 654221cfacd..a067f060af8 100644 --- a/tests/components/fido/test_sensor.py +++ b/tests/components/fido/test_sensor.py @@ -6,9 +6,9 @@ from unittest.mock import MagicMock, patch from pyfido.client import PyFidoError import pytest +from homeassistant.bootstrap import async_setup_component from homeassistant.components.fido import sensor as fido from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from tests.common import assert_setup_component @@ -18,7 +18,7 @@ CONTRACT = "123456789" class FidoClientMock: """Fake Fido client.""" - def __init__(self, username, password, timeout=None, httpsession=None) -> None: + def __init__(self, username, password, timeout=None, httpsession=None): """Fake Fido client init.""" def get_phone_numbers(self): diff --git a/tests/components/file/conftest.py b/tests/components/file/conftest.py index 5345a0d38d0..265acde36ca 100644 --- a/tests/components/file/conftest.py +++ b/tests/components/file/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for file platform.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/file/test_config_flow.py b/tests/components/file/test_config_flow.py index 30d00411c44..86ada1fec61 100644 --- a/tests/components/file/test_config_flow.py +++ b/tests/components/file/test_config_flow.py @@ -7,7 +7,6 @@ import pytest from homeassistant import config_entries from homeassistant.components.file import DOMAIN -from homeassistant.const import CONF_UNIT_OF_MEASUREMENT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -16,22 +15,20 @@ from tests.common import MockConfigEntry MOCK_CONFIG_NOTIFY = { "platform": "notify", "file_path": "some_file", + "timestamp": True, } -MOCK_OPTIONS_NOTIFY = {"timestamp": True} MOCK_CONFIG_SENSOR = { "platform": "sensor", "file_path": "some/path", + "value_template": "{{ value | round(1) }}", } -MOCK_OPTIONS_SENSOR = {"value_template": "{{ value | round(1) }}"} + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") -@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( - ("platform", "data", "options"), - [ - ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), - ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), - ], + ("platform", "data"), + [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], ) async def test_form( hass: HomeAssistant, @@ -39,7 +36,6 @@ async def test_form( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], - options: dict[str, Any], ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -54,7 +50,7 @@ async def test_form( ) await hass.async_block_till_done() - user_input = {**data, **options} + user_input = dict(data) user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input @@ -63,17 +59,12 @@ async def test_form( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"] == data - assert result2["options"] == options assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( - ("platform", "data", "options"), - [ - ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), - ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), - ], + ("platform", "data"), + [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], ) async def test_already_configured( hass: HomeAssistant, @@ -81,10 +72,9 @@ async def test_already_configured( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], - options: dict[str, Any], ) -> None: """Test aborting if the entry is already configured.""" - entry = MockConfigEntry(domain=DOMAIN, data=data, options=options) + entry = MockConfigEntry(domain=DOMAIN, data=data) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( @@ -101,7 +91,7 @@ async def test_already_configured( assert result["type"] is FlowResultType.FORM assert result["step_id"] == platform - user_input = {**data, **options} + user_input = dict(data) user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -113,14 +103,10 @@ async def test_already_configured( assert result2["reason"] == "already_configured" -@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize("is_allowed", [False], ids=["not_allowed"]) @pytest.mark.parametrize( - ("platform", "data", "options"), - [ - ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), - ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), - ], + ("platform", "data"), + [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], ) async def test_not_allowed( hass: HomeAssistant, @@ -128,7 +114,6 @@ async def test_not_allowed( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], - options: dict[str, Any], ) -> None: """Test aborting if the file path is not allowed.""" result = await hass.config_entries.flow.async_init( @@ -145,7 +130,7 @@ async def test_not_allowed( assert result["type"] is FlowResultType.FORM assert result["step_id"] == platform - user_input = {**data, **options} + user_input = dict(data) user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -155,49 +140,3 @@ async def test_not_allowed( assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"file_path": "not_allowed"} - - -@pytest.mark.parametrize( - ("platform", "data", "options", "new_options"), - [ - ( - "sensor", - MOCK_CONFIG_SENSOR, - MOCK_OPTIONS_SENSOR, - {CONF_UNIT_OF_MEASUREMENT: "mm"}, - ), - ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY, {"timestamp": False}), - ], -) -async def test_options_flow( - hass: HomeAssistant, - mock_is_allowed_path: bool, - platform: str, - data: dict[str, Any], - options: dict[str, Any], - new_options: dict[str, Any], -) -> None: - """Test options config flow.""" - entry = MockConfigEntry(domain=DOMAIN, data=data, options=options, version=2) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input=new_options, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == new_options - - entry = hass.config_entries.async_get_entry(entry.entry_id) - assert entry.state is config_entries.ConfigEntryState.LOADED - assert entry.options == new_options diff --git a/tests/components/file/test_init.py b/tests/components/file/test_init.py deleted file mode 100644 index faf1488ed07..00000000000 --- a/tests/components/file/test_init.py +++ /dev/null @@ -1,65 +0,0 @@ -"""The tests for local file init.""" - -from unittest.mock import MagicMock, Mock, patch - -from homeassistant.components.file import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry, get_fixture_path - - -@patch("os.path.isfile", Mock(return_value=True)) -@patch("os.access", Mock(return_value=True)) -async def test_migration_to_version_2( - hass: HomeAssistant, mock_is_allowed_path: MagicMock -) -> None: - """Test the File sensor with JSON entries.""" - data = { - "platform": "sensor", - "name": "file2", - "file_path": get_fixture_path("file_value_template.txt", "file"), - "value_template": "{{ value_json.temperature }}", - } - - entry = MockConfigEntry( - domain=DOMAIN, - version=1, - data=data, - title=f"test [{data['file_path']}]", - ) - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - - assert entry.state is ConfigEntryState.LOADED - assert entry.version == 2 - assert entry.data == { - "platform": "sensor", - "name": "file2", - "file_path": get_fixture_path("file_value_template.txt", "file"), - } - assert entry.options == { - "value_template": "{{ value_json.temperature }}", - } - - -@patch("os.path.isfile", Mock(return_value=True)) -@patch("os.access", Mock(return_value=True)) -async def test_migration_from_future_version( - hass: HomeAssistant, mock_is_allowed_path: MagicMock -) -> None: - """Test the File sensor with JSON entries.""" - data = { - "platform": "sensor", - "name": "file2", - "file_path": get_fixture_path("file_value_template.txt", "file"), - "value_template": "{{ value_json.temperature }}", - } - - entry = MockConfigEntry( - domain=DOMAIN, version=3, data=data, title=f"test [{data['file_path']}]" - ) - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - - assert entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/file/test_notify.py b/tests/components/file/test_notify.py index e7cb85a9cfc..faa9027aa21 100644 --- a/tests/components/file/test_notify.py +++ b/tests/components/file/test_notify.py @@ -12,46 +12,83 @@ from homeassistant.components.file import DOMAIN from homeassistant.components.notify import ATTR_TITLE_DEFAULT from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.typing import ConfigType +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, assert_setup_component + + +async def test_bad_config(hass: HomeAssistant) -> None: + """Test set up the platform with bad/missing config.""" + config = {notify.DOMAIN: {"name": "test", "platform": "file"}} + with assert_setup_component(0, domain="notify") as handle_config: + assert await async_setup_component(hass, notify.DOMAIN, config) + await hass.async_block_till_done() + assert not handle_config[notify.DOMAIN] @pytest.mark.parametrize( ("domain", "service", "params"), [ + (notify.DOMAIN, "test", {"message": "one, two, testing, testing"}), ( notify.DOMAIN, "send_message", {"entity_id": "notify.test", "message": "one, two, testing, testing"}, ), ], + ids=["legacy", "entity"], +) +@pytest.mark.parametrize( + ("timestamp", "config"), + [ + ( + False, + { + "notify": [ + { + "name": "test", + "platform": "file", + "filename": "mock_file", + "timestamp": False, + } + ] + }, + ), + ( + True, + { + "notify": [ + { + "name": "test", + "platform": "file", + "filename": "mock_file", + "timestamp": True, + } + ] + }, + ), + ], + ids=["no_timestamp", "timestamp"], ) -@pytest.mark.parametrize("timestamp", [False, True], ids=["no_timestamp", "timestamp"]) async def test_notify_file( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - mock_is_allowed_path: MagicMock, timestamp: bool, + mock_is_allowed_path: MagicMock, + config: ConfigType, domain: str, service: str, params: dict[str, str], ) -> None: """Test the notify file output.""" filename = "mock_file" - full_filename = os.path.join(hass.config.path(), filename) - message = params["message"] - - entry = MockConfigEntry( - domain=DOMAIN, - data={"name": "test", "platform": "notify", "file_path": full_filename}, - options={"timestamp": timestamp}, - version=2, - title=f"test [{filename}]", - ) - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) + assert await async_setup_component(hass, notify.DOMAIN, config) + await hass.async_block_till_done() + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) freezer.move_to(dt_util.utcnow()) @@ -68,6 +105,7 @@ async def test_notify_file( await hass.services.async_call(domain, service, params, blocking=True) + full_filename = os.path.join(hass.config.path(), filename) assert m_open.call_count == 1 assert m_open.call_args == call(full_filename, "a", encoding="utf8") @@ -85,7 +123,58 @@ async def test_notify_file( @pytest.mark.parametrize( - ("is_allowed", "config", "options"), + ("domain", "service", "params"), + [(notify.DOMAIN, "test", {"message": "one, two, testing, testing"})], + ids=["legacy"], +) +@pytest.mark.parametrize( + ("is_allowed", "config"), + [ + ( + True, + { + "notify": [ + { + "name": "test", + "platform": "file", + "filename": "mock_file", + } + ] + }, + ), + ], + ids=["allowed_but_access_failed"], +) +async def test_legacy_notify_file_exception( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_is_allowed_path: MagicMock, + config: ConfigType, + domain: str, + service: str, + params: dict[str, str], +) -> None: + """Test legacy notify file output has exception.""" + assert await async_setup_component(hass, notify.DOMAIN, config) + await hass.async_block_till_done() + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + + freezer.move_to(dt_util.utcnow()) + + m_open = mock_open() + with ( + patch("homeassistant.components.file.notify.open", m_open, create=True), + patch("homeassistant.components.file.notify.os.stat") as mock_st, + ): + mock_st.side_effect = OSError("Access Failed") + with pytest.raises(ServiceValidationError) as exc: + await hass.services.async_call(domain, service, params, blocking=True) + assert f"{exc.value!r}" == "ServiceValidationError('write_access_failed')" + + +@pytest.mark.parametrize( + ("timestamp", "data"), [ ( False, @@ -93,28 +182,99 @@ async def test_notify_file( "name": "test", "platform": "notify", "file_path": "mock_file", + "timestamp": False, }, + ), + ( + True, { + "name": "test", + "platform": "notify", + "file_path": "mock_file", + "timestamp": True, + }, + ), + ], + ids=["no_timestamp", "timestamp"], +) +async def test_legacy_notify_file_entry_only_setup( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + timestamp: bool, + mock_is_allowed_path: MagicMock, + data: dict[str, Any], +) -> None: + """Test the legacy notify file output in entry only setup.""" + filename = "mock_file" + + domain = notify.DOMAIN + service = "test" + params = {"message": "one, two, testing, testing"} + message = params["message"] + + entry = MockConfigEntry( + domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + + freezer.move_to(dt_util.utcnow()) + + m_open = mock_open() + with ( + patch("homeassistant.components.file.notify.open", m_open, create=True), + patch("homeassistant.components.file.notify.os.stat") as mock_st, + ): + mock_st.return_value.st_size = 0 + title = ( + f"{ATTR_TITLE_DEFAULT} notifications " + f"(Log started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" + ) + + await hass.services.async_call(domain, service, params, blocking=True) + + assert m_open.call_count == 1 + assert m_open.call_args == call(filename, "a", encoding="utf8") + + assert m_open.return_value.write.call_count == 2 + if not timestamp: + assert m_open.return_value.write.call_args_list == [ + call(title), + call(f"{message}\n"), + ] + else: + assert m_open.return_value.write.call_args_list == [ + call(title), + call(f"{dt_util.utcnow().isoformat()} {message}\n"), + ] + + +@pytest.mark.parametrize( + ("is_allowed", "config"), + [ + ( + False, + { + "name": "test", + "platform": "notify", + "file_path": "mock_file", "timestamp": False, }, ), ], ids=["not_allowed"], ) -async def test_notify_file_not_allowed( +async def test_legacy_notify_file_not_allowed( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_is_allowed_path: MagicMock, config: dict[str, Any], - options: dict[str, Any], ) -> None: - """Test notify file output not allowed.""" + """Test legacy notify file output not allowed.""" entry = MockConfigEntry( - domain=DOMAIN, - data=config, - version=2, - options=options, - title=f"test [{config['file_path']}]", + domain=DOMAIN, data=config, title=f"test [{config['file_path']}]" ) entry.add_to_hass(hass) assert not await hass.config_entries.async_setup(entry.entry_id) @@ -125,22 +285,21 @@ async def test_notify_file_not_allowed( @pytest.mark.parametrize( ("service", "params"), [ + ("test", {"message": "one, two, testing, testing"}), ( "send_message", {"entity_id": "notify.test", "message": "one, two, testing, testing"}, - ) + ), ], ) @pytest.mark.parametrize( - ("data", "options", "is_allowed"), + ("data", "is_allowed"), [ ( { "name": "test", "platform": "notify", "file_path": "mock_file", - }, - { "timestamp": False, }, True, @@ -155,17 +314,12 @@ async def test_notify_file_write_access_failed( service: str, params: dict[str, Any], data: dict[str, Any], - options: dict[str, Any], ) -> None: """Test the notify file fails.""" domain = notify.DOMAIN entry = MockConfigEntry( - domain=DOMAIN, - data=data, - version=2, - options=options, - title=f"test [{data['file_path']}]", + domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/file/test_sensor.py b/tests/components/file/test_sensor.py index 9e6a16e3e27..60a81df2b1e 100644 --- a/tests/components/file/test_sensor.py +++ b/tests/components/file/test_sensor.py @@ -7,10 +7,33 @@ import pytest from homeassistant.components.file import DOMAIN from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, get_fixture_path +@patch("os.path.isfile", Mock(return_value=True)) +@patch("os.access", Mock(return_value=True)) +async def test_file_value_yaml_setup( + hass: HomeAssistant, mock_is_allowed_path: MagicMock +) -> None: + """Test the File sensor from YAML setup.""" + config = { + "sensor": { + "platform": "file", + "scan_interval": 30, + "name": "file1", + "file_path": get_fixture_path("file_value.txt", "file"), + } + } + + assert await async_setup_component(hass, "sensor", config) + await hass.async_block_till_done() + + state = hass.states.get("sensor.file1") + assert state.state == "21" + + @patch("os.path.isfile", Mock(return_value=True)) @patch("os.access", Mock(return_value=True)) async def test_file_value_entry_setup( @@ -24,11 +47,7 @@ async def test_file_value_entry_setup( } entry = MockConfigEntry( - domain=DOMAIN, - data=data, - version=2, - options={}, - title=f"test [{data['file_path']}]", + domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -47,17 +66,11 @@ async def test_file_value_template( "platform": "sensor", "name": "file2", "file_path": get_fixture_path("file_value_template.txt", "file"), - } - options = { "value_template": "{{ value_json.temperature }}", } entry = MockConfigEntry( - domain=DOMAIN, - data=data, - version=2, - options=options, - title=f"test [{data['file_path']}]", + domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -77,11 +90,7 @@ async def test_file_empty(hass: HomeAssistant, mock_is_allowed_path: MagicMock) } entry = MockConfigEntry( - domain=DOMAIN, - data=data, - version=2, - options={}, - title=f"test [{data['file_path']}]", + domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -104,11 +113,7 @@ async def test_file_path_invalid( } entry = MockConfigEntry( - domain=DOMAIN, - data=data, - version=2, - options={}, - title=f"test [{data['file_path']}]", + domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/file_upload/test_init.py b/tests/components/file_upload/test_init.py index 22ad9323f05..149bbb7ee2f 100644 --- a/tests/components/file_upload/test_init.py +++ b/tests/components/file_upload/test_init.py @@ -3,7 +3,6 @@ from contextlib import contextmanager from pathlib import Path from random import getrandbits -from typing import Any from unittest.mock import patch import pytest @@ -142,7 +141,7 @@ async def test_upload_large_file_fails( yield MockPathOpen() class MockPathOpen: - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs) -> None: pass def write(self, data: bytes) -> None: diff --git a/tests/components/filesize/conftest.py b/tests/components/filesize/conftest.py index ac66af0d22f..859886a3058 100644 --- a/tests/components/filesize/conftest.py +++ b/tests/components/filesize/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from collections.abc import Generator from pathlib import Path from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.filesize.const import DOMAIN from homeassistant.const import CONF_FILE_PATH diff --git a/tests/components/filter/test_sensor.py b/tests/components/filter/test_sensor.py index a3e0e58908a..0ece61708f2 100644 --- a/tests/components/filter/test_sensor.py +++ b/tests/components/filter/test_sensor.py @@ -37,11 +37,6 @@ import homeassistant.util.dt as dt_util from tests.common import assert_setup_component, get_fixture_path -@pytest.fixture(autouse=True, name="stub_blueprint_populate") -def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: - """Stub copying the blueprints to the config folder.""" - - @pytest.fixture(name="values") def values_fixture() -> list[State]: """Fixture for a list of test States.""" @@ -472,7 +467,7 @@ def test_throttle(values: list[State]) -> None: new_state = filt.filter_state(state) if not filt.skip_processing: filtered.append(new_state) - assert [f.state for f in filtered] == [20, 21] + assert [20, 21] == [f.state for f in filtered] def test_time_throttle(values: list[State]) -> None: @@ -485,7 +480,7 @@ def test_time_throttle(values: list[State]) -> None: new_state = filt.filter_state(state) if not filt.skip_processing: filtered.append(new_state) - assert [f.state for f in filtered] == [20, 18, 22] + assert [20, 18, 22] == [f.state for f in filtered] def test_time_sma(values: list[State]) -> None: diff --git a/tests/components/fireservicerota/test_config_flow.py b/tests/components/fireservicerota/test_config_flow.py index 5555a8d649c..539906d800b 100644 --- a/tests/components/fireservicerota/test_config_flow.py +++ b/tests/components/fireservicerota/test_config_flow.py @@ -120,8 +120,23 @@ async def test_reauth(hass: HomeAssistant) -> None: domain=DOMAIN, data=MOCK_CONF, unique_id=MOCK_CONF[CONF_USERNAME] ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM + with patch( + "homeassistant.components.fireservicerota.config_flow.FireServiceRota" + ) as mock_fsr: + mock_fireservicerota = mock_fsr.return_value + mock_fireservicerota.request_tokens.return_value = MOCK_TOKEN_INFO + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + }, + data=MOCK_CONF, + ) + + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM with ( patch( diff --git a/tests/components/fitbit/conftest.py b/tests/components/fitbit/conftest.py index 57511739993..b1ff8a94e12 100644 --- a/tests/components/fitbit/conftest.py +++ b/tests/components/fitbit/conftest.py @@ -1,6 +1,6 @@ """Test fixtures for fitbit.""" -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable import datetime from http import HTTPStatus import time @@ -9,6 +9,7 @@ from unittest.mock import patch import pytest from requests_mock.mocker import Mocker +from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/fitbit/test_config_flow.py b/tests/components/fitbit/test_config_flow.py index 6f717459486..d5f3d09abdd 100644 --- a/tests/components/fitbit/test_config_flow.py +++ b/tests/components/fitbit/test_config_flow.py @@ -472,7 +472,13 @@ async def test_reauth_flow( assert len(entries) == 1 # config_entry.req initiates reauth - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -540,7 +546,13 @@ async def test_reauth_wrong_user_id( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/fjaraskupan/test_config_flow.py b/tests/components/fjaraskupan/test_config_flow.py index 6d3df614443..fa0df9241dd 100644 --- a/tests/components/fjaraskupan/test_config_flow.py +++ b/tests/components/fjaraskupan/test_config_flow.py @@ -2,8 +2,7 @@ from __future__ import annotations -from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest @@ -16,7 +15,7 @@ from . import COOKER_SERVICE_INFO @pytest.fixture(name="mock_setup_entry", autouse=True) -def fixture_mock_setup_entry() -> Generator[AsyncMock]: +async def fixture_mock_setup_entry(hass): """Fixture for config entry.""" with patch( @@ -25,7 +24,7 @@ def fixture_mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -async def test_configure(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: +async def test_configure(hass: HomeAssistant, mock_setup_entry) -> None: """Test we get the form.""" with patch( "homeassistant.components.fjaraskupan.config_flow.async_discovered_service_info", diff --git a/tests/components/fjaraskupan/test_coordinator.py b/tests/components/fjaraskupan/test_coordinator.py deleted file mode 100644 index e63d52a7594..00000000000 --- a/tests/components/fjaraskupan/test_coordinator.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Test the Fjäråskupan coordinator module.""" - -from fjaraskupan import ( - FjaraskupanConnectionError, - FjaraskupanError, - FjaraskupanReadError, - FjaraskupanWriteError, -) -import pytest - -from homeassistant.components.fjaraskupan.const import DOMAIN -from homeassistant.components.fjaraskupan.coordinator import exception_converter -from homeassistant.exceptions import HomeAssistantError - - -@pytest.mark.parametrize( - ("exception", "translation_key", "translation_placeholder"), - [ - (FjaraskupanReadError(), "read_error", None), - (FjaraskupanWriteError(), "write_error", None), - (FjaraskupanConnectionError(), "connection_error", None), - (FjaraskupanError("Some error"), "unexpected_error", {"msg": "Some error"}), - ], -) -def test_exeception_wrapper( - exception: Exception, translation_key: str, translation_placeholder: dict[str, str] -) -> None: - """Test our exception conversion.""" - with pytest.raises(HomeAssistantError) as exc_info, exception_converter(): - raise exception - assert exc_info.value.translation_domain == DOMAIN - assert exc_info.value.translation_key == translation_key - assert exc_info.value.translation_placeholders == translation_placeholder diff --git a/tests/components/flexit_bacnet/conftest.py b/tests/components/flexit_bacnet/conftest.py index cc7c9fa0570..e1b98070d25 100644 --- a/tests/components/flexit_bacnet/conftest.py +++ b/tests/components/flexit_bacnet/conftest.py @@ -1,10 +1,10 @@ """Configuration for Flexit Nordic (BACnet) tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch from flexit_bacnet import FlexitBACnet import pytest +from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.flexit_bacnet.const import DOMAIN diff --git a/tests/components/flic/test_binary_sensor.py b/tests/components/flic/test_binary_sensor.py index cdc1d64db41..44db1d6ea1b 100644 --- a/tests/components/flic/test_binary_sensor.py +++ b/tests/components/flic/test_binary_sensor.py @@ -8,7 +8,7 @@ from homeassistant.setup import async_setup_component class _MockFlicClient: - def __init__(self, button_addresses) -> None: + def __init__(self, button_addresses): self.addresses = button_addresses self.get_info_callback = None self.scan_wizard = None diff --git a/tests/components/flick_electric/test_config_flow.py b/tests/components/flick_electric/test_config_flow.py index 85a6495d3c5..1b3ed1de34d 100644 --- a/tests/components/flick_electric/test_config_flow.py +++ b/tests/components/flick_electric/test_config_flow.py @@ -6,7 +6,6 @@ from pyflick.authentication import AuthException from homeassistant import config_entries from homeassistant.components.flick_electric.const import DOMAIN -from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -16,7 +15,7 @@ from tests.common import MockConfigEntry CONF = {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"} -async def _flow_submit(hass: HomeAssistant) -> ConfigFlowResult: +async def _flow_submit(hass): return await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, diff --git a/tests/components/flipr/__init__.py b/tests/components/flipr/__init__.py index 3c5bfc2a6c2..26767261866 100644 --- a/tests/components/flipr/__init__.py +++ b/tests/components/flipr/__init__.py @@ -1,15 +1 @@ """Tests for the Flipr integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Fixture for setting up the component.""" - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/flipr/conftest.py b/tests/components/flipr/conftest.py deleted file mode 100644 index 18457000636..00000000000 --- a/tests/components/flipr/conftest.py +++ /dev/null @@ -1,97 +0,0 @@ -"""Common fixtures for the flipr tests.""" - -from collections.abc import Generator -from datetime import datetime -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.flipr.const import DOMAIN -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from homeassistant.util import dt as dt_util - -from tests.common import MockConfigEntry - -# Data for the mocked object returned via flipr_api client. -MOCK_DATE_TIME = datetime(2021, 2, 15, 9, 10, 32, tzinfo=dt_util.UTC) -MOCK_FLIPR_MEASURE = { - "temperature": 10.5, - "ph": 7.03, - "chlorine": 0.23654886, - "red_ox": 657.58, - "date_time": MOCK_DATE_TIME, - "ph_status": "TooLow", - "chlorine_status": "Medium", - "battery": 95.0, -} - -MOCK_HUB_STATE_ON = { - "state": True, - "mode": "planning", - "planning": "dummyplanningid", -} - -MOCK_HUB_STATE_OFF = { - "state": False, - "mode": "manual", - "planning": "dummyplanningid", -} - -MOCK_HUB_MODE_MANUAL = { - "state": False, - "mode": "manual", - "planning": "dummyplanningid", -} - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.flipr.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock the config entry.""" - return MockConfigEntry( - version=2, - domain=DOMAIN, - unique_id="toto@toto.com", - data={ - CONF_EMAIL: "toto@toto.com", - CONF_PASSWORD: "myPassword", - }, - ) - - -@pytest.fixture -def mock_flipr_client() -> Generator[AsyncMock]: - """Mock a Flipr client.""" - - with ( - patch( - "homeassistant.components.flipr.FliprAPIRestClient", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.flipr.config_flow.FliprAPIRestClient", - new=mock_client, - ), - ): - client = mock_client.return_value - - # Default values for the tests using this mock : - client.search_all_ids.return_value = {"flipr": ["myfliprid"], "hub": []} - - client.get_pool_measure_latest.return_value = MOCK_FLIPR_MEASURE - - client.get_hub_state.return_value = MOCK_HUB_STATE_ON - - client.set_hub_state.return_value = MOCK_HUB_STATE_ON - - client.set_hub_mode.return_value = MOCK_HUB_MODE_MANUAL - - yield client diff --git a/tests/components/flipr/test_binary_sensor.py b/tests/components/flipr/test_binary_sensor.py index ed43dbb8a77..971b5b046b3 100644 --- a/tests/components/flipr/test_binary_sensor.py +++ b/tests/components/flipr/test_binary_sensor.py @@ -1,24 +1,49 @@ """Test the Flipr binary sensor.""" -from unittest.mock import AsyncMock +from datetime import datetime +from unittest.mock import patch +from homeassistant.components.flipr.const import CONF_FLIPR_ID, DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er - -from . import setup_integration +from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry +# Data for the mocked object returned via flipr_api client. +MOCK_DATE_TIME = datetime(2021, 2, 15, 9, 10, 32, tzinfo=dt_util.UTC) +MOCK_FLIPR_MEASURE = { + "temperature": 10.5, + "ph": 7.03, + "chlorine": 0.23654886, + "red_ox": 657.58, + "date_time": MOCK_DATE_TIME, + "ph_status": "TooLow", + "chlorine_status": "Medium", +} -async def test_sensors( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_flipr_client: AsyncMock, -) -> None: + +async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test the creation and values of the Flipr binary sensors.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="test_entry_unique_id", + data={ + CONF_EMAIL: "toto@toto.com", + CONF_PASSWORD: "myPassword", + CONF_FLIPR_ID: "myfliprid", + }, + ) - await setup_integration(hass, mock_config_entry) + entry.add_to_hass(hass) + + with patch( + "flipr_api.FliprAPIRestClient.get_pool_measure_latest", + return_value=MOCK_FLIPR_MEASURE, + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() # Check entity unique_id value that is generated in FliprEntity base class. entity = entity_registry.async_get("binary_sensor.flipr_myfliprid_ph_status") diff --git a/tests/components/flipr/test_config_flow.py b/tests/components/flipr/test_config_flow.py index 9df77dc0b2a..b99e6af7383 100644 --- a/tests/components/flipr/test_config_flow.py +++ b/tests/components/flipr/test_config_flow.py @@ -1,131 +1,169 @@ """Test the Flipr config flow.""" -from unittest.mock import AsyncMock +from unittest.mock import patch import pytest from requests.exceptions import HTTPError, Timeout -from homeassistant.components.flipr.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant import config_entries +from homeassistant.components.flipr.const import CONF_FLIPR_ID, DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -async def test_full_flow( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_flipr_client: AsyncMock -) -> None: - """Test the full flow.""" +@pytest.fixture(name="mock_setup") +def mock_setups(): + """Prevent setup.""" + with patch( + "homeassistant.components.flipr.async_setup_entry", + return_value=True, + ): + yield + + +async def test_show_form(hass: HomeAssistant) -> None: + """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert not result["errors"] + assert result["step_id"] == config_entries.SOURCE_USER - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - CONF_EMAIL: "dummylogin", - CONF_PASSWORD: "dummypass", - }, - ) + +async def test_invalid_credential(hass: HomeAssistant, mock_setup) -> None: + """Test invalid credential.""" + with patch( + "flipr_api.FliprAPIRestClient.search_flipr_ids", side_effect=HTTPError() + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={ + CONF_EMAIL: "bad_login", + CONF_PASSWORD: "bad_pass", + CONF_FLIPR_ID: "", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + + +async def test_nominal_case(hass: HomeAssistant, mock_setup) -> None: + """Test valid login form.""" + with patch( + "flipr_api.FliprAPIRestClient.search_flipr_ids", + return_value=["flipid"], + ) as mock_flipr_client: + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={ + CONF_EMAIL: "dummylogin", + CONF_PASSWORD: "dummypass", + CONF_FLIPR_ID: "flipid", + }, + ) + await hass.async_block_till_done() + + assert len(mock_flipr_client.mock_calls) == 1 assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Flipr dummylogin" - assert result["result"].unique_id == "dummylogin" + assert result["title"] == "flipid" assert result["data"] == { CONF_EMAIL: "dummylogin", CONF_PASSWORD: "dummypass", + CONF_FLIPR_ID: "flipid", } -@pytest.mark.parametrize( - ("exception", "expected"), - [ - (Exception("Bad request Boy :) --"), {"base": "unknown"}), - (HTTPError, {"base": "invalid_auth"}), - (Timeout, {"base": "cannot_connect"}), - (ConnectionError, {"base": "cannot_connect"}), - ], -) -async def test_errors( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_flipr_client: AsyncMock, - exception: Exception, - expected: dict[str, str], -) -> None: - """Test we handle any error.""" - mock_flipr_client.search_all_ids.side_effect = exception +async def test_multiple_flip_id(hass: HomeAssistant, mock_setup) -> None: + """Test multiple flipr id adding a config step.""" + with patch( + "flipr_api.FliprAPIRestClient.search_flipr_ids", + return_value=["FLIP1", "FLIP2"], + ) as mock_flipr_client: + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={ + CONF_EMAIL: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - CONF_EMAIL: "nada", - CONF_PASSWORD: "nadap", - }, - ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "flipr_id" - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == expected + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_FLIPR_ID: "FLIP2"}, + ) - # Test of recover in normal state after correction of the 1st error - mock_flipr_client.search_all_ids.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "dummylogin", - CONF_PASSWORD: "dummypass", - }, - ) + assert len(mock_flipr_client.mock_calls) == 1 assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Flipr dummylogin" + assert result["title"] == "FLIP2" assert result["data"] == { CONF_EMAIL: "dummylogin", CONF_PASSWORD: "dummypass", + CONF_FLIPR_ID: "FLIP2", } -async def test_no_flipr_found( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_flipr_client: AsyncMock -) -> None: - """Test the case where there is no flipr found.""" +async def test_no_flip_id(hass: HomeAssistant, mock_setup) -> None: + """Test no flipr id found.""" + with patch( + "flipr_api.FliprAPIRestClient.search_flipr_ids", + return_value=[], + ) as mock_flipr_client: + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={ + CONF_EMAIL: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) - mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": []} + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "no_flipr_id_found"} + + assert len(mock_flipr_client.mock_calls) == 1 + + +async def test_http_errors(hass: HomeAssistant, mock_setup) -> None: + """Test HTTP Errors.""" + with patch("flipr_api.FliprAPIRestClient.search_flipr_ids", side_effect=Timeout()): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={ + CONF_EMAIL: "nada", + CONF_PASSWORD: "nada", + CONF_FLIPR_ID: "", + }, + ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - CONF_EMAIL: "nada", - CONF_PASSWORD: "nadap", - }, - ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "no_flipr_id_found"} + assert result["errors"] == {"base": "cannot_connect"} - # Test of recover in normal state after correction of the 1st error - mock_flipr_client.search_all_ids.return_value = {"flipr": ["myfliprid"], "hub": []} + with patch( + "flipr_api.FliprAPIRestClient.search_flipr_ids", + side_effect=Exception("Bad request Boy :) --"), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={ + CONF_EMAIL: "nada", + CONF_PASSWORD: "nada", + CONF_FLIPR_ID: "", + }, + ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - CONF_EMAIL: "dummylogin", - CONF_PASSWORD: "dummypass", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Flipr dummylogin" - assert result["data"] == { - CONF_EMAIL: "dummylogin", - CONF_PASSWORD: "dummypass", - } + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} diff --git a/tests/components/flipr/test_init.py b/tests/components/flipr/test_init.py index 6e9341b1e06..6a49b5b7200 100644 --- a/tests/components/flipr/test_init.py +++ b/tests/components/flipr/test_init.py @@ -1,90 +1,29 @@ """Tests for init methods.""" -from unittest.mock import AsyncMock +from unittest.mock import patch -from homeassistant.components.flipr.const import DOMAIN +from homeassistant.components.flipr.const import CONF_FLIPR_ID, DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant -from . import setup_integration - from tests.common import MockConfigEntry -async def test_unload_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_flipr_client: AsyncMock, -) -> None: +async def test_unload_entry(hass: HomeAssistant) -> None: """Test unload entry.""" - - mock_flipr_client.search_all_ids.return_value = { - "flipr": ["myfliprid"], - "hub": ["hubid"], - } - - await setup_integration(hass, mock_config_entry) - assert mock_config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(mock_config_entry.entry_id) - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_duplicate_config_entries( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_flipr_client: AsyncMock, -) -> None: - """Test duplicate config entries.""" - - mock_config_entry_dup = MockConfigEntry( - version=2, + entry = MockConfigEntry( domain=DOMAIN, - unique_id="toto@toto.com", data={ - CONF_EMAIL: "toto@toto.com", - CONF_PASSWORD: "myPassword", - "flipr_id": "myflipr_id_dup", + CONF_EMAIL: "dummylogin", + CONF_PASSWORD: "dummypass", + CONF_FLIPR_ID: "FLIP1", }, + unique_id="123456", ) - - mock_config_entry.add_to_hass(hass) - # Initialize the first entry with default mock - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - # Initialize the second entry with another flipr id - mock_config_entry_dup.add_to_hass(hass) - assert not await hass.config_entries.async_setup(mock_config_entry_dup.entry_id) - await hass.async_block_till_done() - assert mock_config_entry_dup.state is ConfigEntryState.SETUP_ERROR - - -async def test_migrate_entry( - hass: HomeAssistant, - mock_flipr_client: AsyncMock, -) -> None: - """Test migrate config entry from v1 to v2.""" - - mock_config_entry_v1 = MockConfigEntry( - version=1, - domain=DOMAIN, - title="myfliprid", - unique_id="test_entry_unique_id", - data={ - CONF_EMAIL: "toto@toto.com", - CONF_PASSWORD: "myPassword", - "flipr_id": "myfliprid", - }, - ) - - await setup_integration(hass, mock_config_entry_v1) - assert mock_config_entry_v1.state is ConfigEntryState.LOADED - assert mock_config_entry_v1.version == 2 - assert mock_config_entry_v1.unique_id == "toto@toto.com" - assert mock_config_entry_v1.data == { - CONF_EMAIL: "toto@toto.com", - CONF_PASSWORD: "myPassword", - "flipr_id": "myfliprid", - } + entry.add_to_hass(hass) + with patch("homeassistant.components.flipr.coordinator.FliprAPIRestClient"): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + await hass.config_entries.async_unload(entry.entry_id) + assert entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/flipr/test_select.py b/tests/components/flipr/test_select.py deleted file mode 100644 index d71297f4f1a..00000000000 --- a/tests/components/flipr/test_select.py +++ /dev/null @@ -1,109 +0,0 @@ -"""Test the Flipr select for Hub.""" - -import logging -from unittest.mock import AsyncMock - -from flipr_api.exceptions import FliprError - -from homeassistant.components.select import ( - ATTR_OPTION, - ATTR_OPTIONS, - DOMAIN as SELECT_DOMAIN, - SERVICE_SELECT_OPTION, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry - -_LOGGER = logging.getLogger(__name__) - -SELECT_ENTITY_ID = "select.flipr_hub_myhubid_mode" - - -async def test_entities( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_flipr_client: AsyncMock, -) -> None: - """Test the creation and values of the Flipr select.""" - - mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} - - await setup_integration(hass, mock_config_entry) - - # Check entity unique_id value that is generated in FliprEntity base class. - entity = entity_registry.async_get(SELECT_ENTITY_ID) - _LOGGER.debug("Found entity = %s", entity) - assert entity.unique_id == "myhubid-hubMode" - - mode = hass.states.get(SELECT_ENTITY_ID) - _LOGGER.debug("Found mode = %s", mode) - assert mode - assert mode.state == "planning" - assert mode.attributes.get(ATTR_OPTIONS) == ["auto", "manual", "planning"] - - -async def test_select_actions( - hass: HomeAssistant, - mock_flipr_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the actions on the Flipr Hub select.""" - - mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} - - await setup_integration(hass, mock_config_entry) - - state = hass.states.get(SELECT_ENTITY_ID) - assert state.state == "planning" - - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: SELECT_ENTITY_ID, ATTR_OPTION: "manual"}, - blocking=True, - ) - state = hass.states.get(SELECT_ENTITY_ID) - assert state.state == "manual" - - -async def test_no_select_found( - hass: HomeAssistant, - mock_flipr_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the select absence.""" - - mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": []} - - await setup_integration(hass, mock_config_entry) - - assert not hass.states.async_entity_ids(SELECT_ENTITY_ID) - - -async def test_error_flipr_api( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_flipr_client: AsyncMock, -) -> None: - """Test the Flipr sensors error.""" - - mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} - - mock_flipr_client.get_hub_state.side_effect = FliprError( - "Error during flipr data retrieval..." - ) - - await setup_integration(hass, mock_config_entry) - - # Check entity is not generated because of the FliprError raised. - entity = entity_registry.async_get(SELECT_ENTITY_ID) - assert entity is None diff --git a/tests/components/flipr/test_sensor.py b/tests/components/flipr/test_sensor.py index 77937e3af54..31eb075469d 100644 --- a/tests/components/flipr/test_sensor.py +++ b/tests/components/flipr/test_sensor.py @@ -1,28 +1,59 @@ """Test the Flipr sensor.""" -from unittest.mock import AsyncMock +from datetime import datetime +from unittest.mock import patch from flipr_api.exceptions import FliprError +from homeassistant.components.flipr.const import CONF_FLIPR_ID, DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass -from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, UnitOfTemperature +from homeassistant.const import ( + ATTR_UNIT_OF_MEASUREMENT, + CONF_EMAIL, + CONF_PASSWORD, + PERCENTAGE, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er - -from . import setup_integration +from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry +# Data for the mocked object returned via flipr_api client. +MOCK_DATE_TIME = datetime(2021, 2, 15, 9, 10, 32, tzinfo=dt_util.UTC) +MOCK_FLIPR_MEASURE = { + "temperature": 10.5, + "ph": 7.03, + "chlorine": 0.23654886, + "red_ox": 657.58, + "date_time": MOCK_DATE_TIME, + "ph_status": "TooLow", + "chlorine_status": "Medium", + "battery": 95.0, +} -async def test_sensors( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_flipr_client: AsyncMock, -) -> None: - """Test the creation and values of the Flipr binary sensors.""" - await setup_integration(hass, mock_config_entry) +async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: + """Test the creation and values of the Flipr sensors.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="test_entry_unique_id", + data={ + CONF_EMAIL: "toto@toto.com", + CONF_PASSWORD: "myPassword", + CONF_FLIPR_ID: "myfliprid", + }, + ) + + entry.add_to_hass(hass) + + with patch( + "flipr_api.FliprAPIRestClient.get_pool_measure_latest", + return_value=MOCK_FLIPR_MEASURE, + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() # Check entity unique_id value that is generated in FliprEntity base class. entity = entity_registry.async_get("sensor.flipr_myfliprid_red_ox") @@ -66,18 +97,27 @@ async def test_sensors( async def test_error_flipr_api_sensors( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_flipr_client: AsyncMock, + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test the Flipr sensors error.""" - - mock_flipr_client.get_pool_measure_latest.side_effect = FliprError( - "Error during flipr data retrieval..." + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="test_entry_unique_id", + data={ + CONF_EMAIL: "toto@toto.com", + CONF_PASSWORD: "myPassword", + CONF_FLIPR_ID: "myfliprid", + }, ) - await setup_integration(hass, mock_config_entry) + entry.add_to_hass(hass) + + with patch( + "flipr_api.FliprAPIRestClient.get_pool_measure_latest", + side_effect=FliprError("Error during flipr data retrieval..."), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() # Check entity is not generated because of the FliprError raised. entity = entity_registry.async_get("sensor.flipr_myfliprid_red_ox") diff --git a/tests/components/flipr/test_switch.py b/tests/components/flipr/test_switch.py deleted file mode 100644 index f994ac1bdd3..00000000000 --- a/tests/components/flipr/test_switch.py +++ /dev/null @@ -1,110 +0,0 @@ -"""Test the Flipr switch for Hub.""" - -from unittest.mock import AsyncMock - -from flipr_api.exceptions import FliprError - -from homeassistant.components.switch import ( - DOMAIN as SWITCH_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration -from .conftest import MOCK_HUB_STATE_OFF - -from tests.common import MockConfigEntry - -SWITCH_ENTITY_ID = "switch.flipr_hub_myhubid" - - -async def test_entities( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_flipr_client: AsyncMock, -) -> None: - """Test the creation and values of the Flipr switch.""" - - mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} - - await setup_integration(hass, mock_config_entry) - - # Check entity unique_id value that is generated in FliprEntity base class. - entity = entity_registry.async_get(SWITCH_ENTITY_ID) - assert entity.unique_id == "myhubid-hubState" - - state = hass.states.get(SWITCH_ENTITY_ID) - assert state - assert state.state == STATE_ON - - -async def test_switch_actions( - hass: HomeAssistant, - mock_flipr_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the actions on the Flipr Hub switch.""" - - mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} - - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, - blocking=True, - ) - state = hass.states.get(SWITCH_ENTITY_ID) - assert state.state == STATE_ON - - mock_flipr_client.set_hub_state.return_value = MOCK_HUB_STATE_OFF - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, - blocking=True, - ) - state = hass.states.get(SWITCH_ENTITY_ID) - assert state.state == STATE_OFF - - -async def test_no_switch_found( - hass: HomeAssistant, - mock_flipr_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the switch absence.""" - - mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": []} - - await setup_integration(hass, mock_config_entry) - - assert not hass.states.async_entity_ids(SWITCH_DOMAIN) - - -async def test_error_flipr_api( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_flipr_client: AsyncMock, -) -> None: - """Test the Flipr sensors error.""" - - mock_flipr_client.search_all_ids.return_value = {"flipr": [], "hub": ["myhubid"]} - - mock_flipr_client.get_hub_state.side_effect = FliprError( - "Error during flipr data retrieval..." - ) - - await setup_integration(hass, mock_config_entry) - - # Check entity is not generated because of the FliprError raised. - entity = entity_registry.async_get(SWITCH_ENTITY_ID) - assert entity is None diff --git a/tests/components/flo/conftest.py b/tests/components/flo/conftest.py index 66b56d1f10b..33d467a2abf 100644 --- a/tests/components/flo/conftest.py +++ b/tests/components/flo/conftest.py @@ -16,7 +16,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture -def config_entry() -> MockConfigEntry: +def config_entry(hass): """Config entry version 1 fixture.""" return MockConfigEntry( domain=FLO_DOMAIN, diff --git a/tests/components/flo/test_binary_sensor.py b/tests/components/flo/test_binary_sensor.py index 23a84734b0d..d3032cde1b5 100644 --- a/tests/components/flo/test_binary_sensor.py +++ b/tests/components/flo/test_binary_sensor.py @@ -1,7 +1,5 @@ """Test Flo by Moen binary sensor entities.""" -import pytest - from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.const import ( ATTR_FRIENDLY_NAME, @@ -15,12 +13,9 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID -from tests.common import MockConfigEntry - -@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_binary_sensors( - hass: HomeAssistant, config_entry: MockConfigEntry + hass: HomeAssistant, config_entry, aioclient_mock_fixture ) -> None: """Test Flo by Moen sensors.""" config_entry.add_to_hass(hass) diff --git a/tests/components/flo/test_config_flow.py b/tests/components/flo/test_config_flow.py index f9237e979a6..99f8f315fb2 100644 --- a/tests/components/flo/test_config_flow.py +++ b/tests/components/flo/test_config_flow.py @@ -5,8 +5,6 @@ import json import time from unittest.mock import patch -import pytest - from homeassistant import config_entries from homeassistant.components.flo.const import DOMAIN from homeassistant.const import CONTENT_TYPE_JSON @@ -18,8 +16,7 @@ from .common import TEST_EMAIL_ADDRESS, TEST_PASSWORD, TEST_TOKEN, TEST_USER_ID from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.usefixtures("aioclient_mock_fixture") -async def test_form(hass: HomeAssistant) -> None: +async def test_form(hass: HomeAssistant, aioclient_mock_fixture) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/flo/test_device.py b/tests/components/flo/test_device.py index c3e26e77370..6248bdcd8f9 100644 --- a/tests/components/flo/test_device.py +++ b/tests/components/flo/test_device.py @@ -5,7 +5,6 @@ from unittest.mock import patch from aioflo.errors import RequestError from freezegun.api import FrozenDateTimeFactory -import pytest from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.components.flo.coordinator import FloDeviceDataUpdateCoordinator @@ -15,14 +14,14 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_device( hass: HomeAssistant, - config_entry: MockConfigEntry, + config_entry, + aioclient_mock_fixture, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, ) -> None: @@ -91,10 +90,10 @@ async def test_device( assert aioclient_mock.call_count == call_count + 6 -@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_device_failures( hass: HomeAssistant, - config_entry: MockConfigEntry, + config_entry, + aioclient_mock_fixture, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, ) -> None: diff --git a/tests/components/flo/test_init.py b/tests/components/flo/test_init.py index 805a6278395..599a91b80fb 100644 --- a/tests/components/flo/test_init.py +++ b/tests/components/flo/test_init.py @@ -1,7 +1,5 @@ """Test init.""" -import pytest - from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -9,11 +7,10 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID -from tests.common import MockConfigEntry - -@pytest.mark.usefixtures("aioclient_mock_fixture") -async def test_setup_entry(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: +async def test_setup_entry( + hass: HomeAssistant, config_entry, aioclient_mock_fixture +) -> None: """Test migration of config entry from v1.""" config_entry.add_to_hass(hass) assert await async_setup_component( diff --git a/tests/components/flo/test_sensor.py b/tests/components/flo/test_sensor.py index 0c763927296..5fe388c62e1 100644 --- a/tests/components/flo/test_sensor.py +++ b/tests/components/flo/test_sensor.py @@ -1,7 +1,5 @@ """Test Flo by Moen sensor entities.""" -import pytest - from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass from homeassistant.const import ATTR_ENTITY_ID, CONF_PASSWORD, CONF_USERNAME @@ -11,12 +9,12 @@ from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from .common import TEST_PASSWORD, TEST_USER_ID -from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.usefixtures("aioclient_mock_fixture") -async def test_sensors(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: +async def test_sensors( + hass: HomeAssistant, config_entry, aioclient_mock_fixture +) -> None: """Test Flo by Moen sensors.""" hass.config.units = US_CUSTOMARY_SYSTEM config_entry.add_to_hass(hass) @@ -87,10 +85,10 @@ async def test_sensors(hass: HomeAssistant, config_entry: MockConfigEntry) -> No ) -@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_manual_update_entity( hass: HomeAssistant, - config_entry: MockConfigEntry, + config_entry, + aioclient_mock_fixture, aioclient_mock: AiohttpClientMocker, ) -> None: """Test manual update entity via service homeasasistant/update_entity.""" diff --git a/tests/components/flo/test_services.py b/tests/components/flo/test_services.py index 565f39f69fe..d8837d9c6b6 100644 --- a/tests/components/flo/test_services.py +++ b/tests/components/flo/test_services.py @@ -19,16 +19,15 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID -from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker SWITCH_ENTITY_ID = "switch.smart_water_shutoff_shutoff_valve" -@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_services( hass: HomeAssistant, - config_entry: MockConfigEntry, + config_entry, + aioclient_mock_fixture, aioclient_mock: AiohttpClientMocker, ) -> None: """Test Flo services.""" diff --git a/tests/components/flo/test_switch.py b/tests/components/flo/test_switch.py index 5c124d312a7..85f7ea0f317 100644 --- a/tests/components/flo/test_switch.py +++ b/tests/components/flo/test_switch.py @@ -1,21 +1,16 @@ """Tests for the switch domain for Flo by Moen.""" -import pytest - from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.switch import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID -from tests.common import MockConfigEntry - -@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_valve_switches( - hass: HomeAssistant, config_entry: MockConfigEntry + hass: HomeAssistant, config_entry, aioclient_mock_fixture ) -> None: """Test Flo by Moen valve switches.""" config_entry.add_to_hass(hass) @@ -30,11 +25,11 @@ async def test_valve_switches( assert hass.states.get(entity_id).state == STATE_ON await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True ) assert hass.states.get(entity_id).state == STATE_OFF await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True ) assert hass.states.get(entity_id).state == STATE_ON diff --git a/tests/components/flume/conftest.py b/tests/components/flume/conftest.py deleted file mode 100644 index fb0d0157bbc..00000000000 --- a/tests/components/flume/conftest.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Flume test fixtures.""" - -from collections.abc import Generator -import datetime -from http import HTTPStatus -import json -from unittest.mock import mock_open, patch - -import jwt -import pytest -import requests -from requests_mock.mocker import Mocker - -from homeassistant.components.flume.const import DOMAIN -from homeassistant.const import ( - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - CONF_PASSWORD, - CONF_USERNAME, -) -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - -USER_ID = "test-user-id" -REFRESH_TOKEN = "refresh-token" -TOKEN_URL = "https://api.flumetech.com/oauth/token" -DEVICE_LIST_URL = ( - "https://api.flumetech.com/users/test-user-id/devices?user=true&location=true" -) -BRIDGE_DEVICE = { - "id": "1234", - "type": 1, # Bridge - "location": { - "name": "Bridge Location", - }, - "name": "Flume Bridge", - "connected": True, -} -SENSOR_DEVICE = { - "id": "1234", - "type": 2, # Sensor - "location": { - "name": "Sensor Location", - }, - "name": "Flume Sensor", - "connected": True, -} -DEVICE_LIST = [BRIDGE_DEVICE, SENSOR_DEVICE] -NOTIFICATIONS_URL = "https://api.flumetech.com/users/test-user-id/notifications?limit=50&offset=0&sort_direction=ASC" -NOTIFICATION = { - "id": 111111, - "device_id": "6248148189204194987", - "user_id": USER_ID, - "type": 1, - "message": "Low Flow Leak triggered at Home. Water has been running for 2 hours averaging 0.43 gallons every minute.", - "created_datetime": "2020-01-15T16:33:39.000Z", - "title": "Potential Leak Detected!", - "read": True, - "extra": { - "query": { - "request_id": "SYSTEM_TRIGGERED_USAGE_ALERT", - "since_datetime": "2020-01-15 06:33:59", - "until_datetime": "2020-01-15 08:33:59", - "tz": "America/Los_Angeles", - "bucket": "MIN", - "raw": False, - "group_multiplier": 2, - "device_id": ["6248148189204194987"], - } - }, - "event_rule": "Low Flow Leak", -} - -NOTIFICATIONS_LIST = [NOTIFICATION] - - -@pytest.fixture(name="config_entry") -def config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry: - """Fixture to create a config entry.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - title="test-username", - unique_id="test-username", - data={ - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - }, - ) - config_entry.add_to_hass(hass) - return config_entry - - -def encode_access_token() -> str: - """Encode the payload of the access token.""" - expiration_time = datetime.datetime.now() + datetime.timedelta(hours=12) - payload = { - "user_id": USER_ID, - "exp": int(expiration_time.timestamp()), - } - return jwt.encode(payload, key="secret") - - -@pytest.fixture(name="access_token") -def access_token_fixture(requests_mock: Mocker) -> Generator[None]: - """Fixture to setup the access token.""" - token_response = { - "refresh_token": REFRESH_TOKEN, - "access_token": encode_access_token(), - } - requests_mock.register_uri( - "POST", - TOKEN_URL, - status_code=HTTPStatus.OK, - json={"data": [token_response]}, - ) - with patch("builtins.open", mock_open(read_data=json.dumps(token_response))): - yield - - -@pytest.fixture(name="device_list") -def device_list_fixture(requests_mock: Mocker) -> None: - """Fixture to setup the device list API response access token.""" - requests_mock.register_uri( - "GET", - DEVICE_LIST_URL, - status_code=HTTPStatus.OK, - json={ - "data": DEVICE_LIST, - }, - ) - - -@pytest.fixture(name="device_list_timeout") -def device_list_timeout_fixture(requests_mock: Mocker) -> None: - """Fixture to test a timeout when connecting to the device list url.""" - requests_mock.register_uri( - "GET", - DEVICE_LIST_URL, - exc=requests.exceptions.ConnectTimeout, - ) - - -@pytest.fixture(name="device_list_unauthorized") -def device_list_unauthorized_fixture(requests_mock: Mocker) -> None: - """Fixture to test an authorized error from the device list url.""" - requests_mock.register_uri( - "GET", - DEVICE_LIST_URL, - status_code=HTTPStatus.UNAUTHORIZED, - json={}, - ) - - -@pytest.fixture(name="notifications_list") -def notifications_list_fixture(requests_mock: Mocker) -> None: - """Fixture to setup the device list API response access token.""" - requests_mock.register_uri( - "GET", - NOTIFICATIONS_URL, - status_code=HTTPStatus.OK, - json={ - "data": NOTIFICATIONS_LIST, - }, - ) diff --git a/tests/components/flume/test_config_flow.py b/tests/components/flume/test_config_flow.py index c323defc791..706cee44739 100644 --- a/tests/components/flume/test_config_flow.py +++ b/tests/components/flume/test_config_flow.py @@ -1,11 +1,8 @@ """Test the flume config flow.""" -from http import HTTPStatus -from unittest.mock import patch +from unittest.mock import MagicMock, patch -import pytest import requests.exceptions -from requests_mock.mocker import Mocker from homeassistant import config_entries from homeassistant.components.flume.const import DOMAIN @@ -18,12 +15,15 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .conftest import DEVICE_LIST, DEVICE_LIST_URL - from tests.common import MockConfigEntry -@pytest.mark.usefixtures("access_token", "device_list") +def _get_mocked_flume_device_list(): + flume_device_list_mock = MagicMock() + type(flume_device_list_mock).device_list = ["mock"] + return flume_device_list_mock + + async def test_form(hass: HomeAssistant) -> None: """Test we get the form and can setup from user input.""" @@ -33,7 +33,17 @@ async def test_form(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} + mock_flume_device_list = _get_mocked_flume_device_list() + with ( + patch( + "homeassistant.components.flume.config_flow.FlumeAuth", + return_value=True, + ), + patch( + "homeassistant.components.flume.config_flow.FlumeDeviceList", + return_value=mock_flume_device_list, + ), patch( "homeassistant.components.flume.async_setup_entry", return_value=True, @@ -61,75 +71,66 @@ async def test_form(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.flume.config.error.invalid_auth"], -) -@pytest.mark.usefixtures("access_token") -async def test_form_invalid_auth(hass: HomeAssistant, requests_mock: Mocker) -> None: +async def test_form_invalid_auth(hass: HomeAssistant) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - requests_mock.register_uri( - "GET", - DEVICE_LIST_URL, - status_code=HTTPStatus.UNAUTHORIZED, - json={"message": "Failure"}, - ) - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - }, - ) + with ( + patch( + "homeassistant.components.flume.config_flow.FlumeAuth", + return_value=True, + ), + patch( + "homeassistant.components.flume.config_flow.FlumeDeviceList", + side_effect=Exception, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"password": "invalid_auth"} -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.flume.config.error.cannot_connect"], -) -@pytest.mark.usefixtures("access_token", "device_list_timeout") async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - }, - ) + with ( + patch( + "homeassistant.components.flume.config_flow.FlumeAuth", + return_value=True, + ), + patch( + "homeassistant.components.flume.config_flow.FlumeDeviceList", + side_effect=requests.exceptions.ConnectionError(), + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - [ - [ - "component.flume.config.abort.reauth_successful", - "component.flume.config.error.cannot_connect", - "component.flume.config.error.invalid_auth", - ] - ], -) -@pytest.mark.usefixtures("access_token") -async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: +async def test_reauth(hass: HomeAssistant) -> None: """Test we can reauth.""" entry = MockConfigEntry( domain=DOMAIN, @@ -142,32 +143,43 @@ async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "unique_id": "test@test.org"}, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: "test-password", - }, - ) + with ( + patch( + "homeassistant.components.flume.config_flow.FlumeAuth", + return_value=True, + ), + patch( + "homeassistant.components.flume.config_flow.FlumeDeviceList", + side_effect=Exception, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PASSWORD: "test-password", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"password": "invalid_auth"} - requests_mock.register_uri( - "GET", - DEVICE_LIST_URL, - exc=requests.exceptions.ConnectTimeout, - ) - with ( patch( - "homeassistant.components.flume.config_flow.os.path.exists", + "homeassistant.components.flume.config_flow.FlumeAuth", return_value=True, ), - patch("homeassistant.components.flume.config_flow.os.unlink") as mock_unlink, + patch( + "homeassistant.components.flume.config_flow.FlumeDeviceList", + side_effect=requests.exceptions.ConnectionError(), + ), ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], @@ -175,22 +187,21 @@ async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: CONF_PASSWORD: "test-password", }, ) - # The existing token file was removed - assert len(mock_unlink.mock_calls) == 1 assert result3["type"] is FlowResultType.FORM assert result3["errors"] == {"base": "cannot_connect"} - requests_mock.register_uri( - "GET", - DEVICE_LIST_URL, - status_code=HTTPStatus.OK, - json={ - "data": DEVICE_LIST, - }, - ) + mock_flume_device_list = _get_mocked_flume_device_list() with ( + patch( + "homeassistant.components.flume.config_flow.FlumeAuth", + return_value=True, + ), + patch( + "homeassistant.components.flume.config_flow.FlumeDeviceList", + return_value=mock_flume_device_list, + ), patch( "homeassistant.components.flume.async_setup_entry", return_value=True, @@ -206,35 +217,3 @@ async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: assert mock_setup_entry.called assert result4["type"] is FlowResultType.ABORT assert result4["reason"] == "reauth_successful" - - -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.flume.config.error.cannot_connect"], -) -@pytest.mark.usefixtures("access_token") -async def test_form_no_devices(hass: HomeAssistant, requests_mock: Mocker) -> None: - """Test a device list response that contains no values will raise an error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - requests_mock.register_uri( - "GET", - DEVICE_LIST_URL, - status_code=HTTPStatus.OK, - json={"data": []}, - ) - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} diff --git a/tests/components/flume/test_init.py b/tests/components/flume/test_init.py deleted file mode 100644 index 85c01c1051e..00000000000 --- a/tests/components/flume/test_init.py +++ /dev/null @@ -1,135 +0,0 @@ -"""Test the flume init.""" - -from collections.abc import Generator -from unittest.mock import patch - -import pytest -from requests_mock.mocker import Mocker - -from homeassistant import config_entries -from homeassistant.components.flume.const import DOMAIN -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from .conftest import USER_ID - -from tests.common import MockConfigEntry - - -@pytest.fixture(autouse=True) -def platforms_fixture() -> Generator[None]: - """Return the platforms to be loaded for this test.""" - # Arbitrary platform to ensure notifications are loaded - with patch("homeassistant.components.flume.PLATFORMS", [Platform.BINARY_SENSOR]): - yield - - -@pytest.mark.usefixtures("access_token", "device_list") -async def test_setup_config_entry( - hass: HomeAssistant, - requests_mock: Mocker, - config_entry: MockConfigEntry, -) -> None: - """Test load and unload of a ConfigEntry.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is config_entries.ConfigEntryState.LOADED - - assert await hass.config_entries.async_unload(config_entry.entry_id) - assert config_entry.state is config_entries.ConfigEntryState.NOT_LOADED - - -@pytest.mark.usefixtures("access_token", "device_list_timeout") -async def test_device_list_timeout( - hass: HomeAssistant, - requests_mock: Mocker, - config_entry: MockConfigEntry, -) -> None: - """Test error handling for a timeout when listing devices.""" - assert not await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is config_entries.ConfigEntryState.SETUP_RETRY - - -@pytest.mark.usefixtures("access_token", "device_list_unauthorized") -async def test_reauth_when_unauthorized( - hass: HomeAssistant, - requests_mock: Mocker, - config_entry: MockConfigEntry, -) -> None: - """Test error handling for an authentication error when listing devices.""" - assert not await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is config_entries.ConfigEntryState.SETUP_ERROR - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - assert flows[0]["step_id"] == "reauth_confirm" - - -@pytest.mark.usefixtures("access_token", "device_list", "notifications_list") -async def test_list_notifications_service( - hass: HomeAssistant, - requests_mock: Mocker, - config_entry: MockConfigEntry, -) -> None: - """Test the list notifications service.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is config_entries.ConfigEntryState.LOADED - - response = await hass.services.async_call( - DOMAIN, - "list_notifications", - {}, - target={ - "config_entry": config_entry.entry_id, - }, - blocking=True, - return_response=True, - ) - notifications = response.get("notifications") - assert notifications - assert len(notifications) == 1 - assert notifications[0].get("user_id") == USER_ID - - -@pytest.mark.usefixtures("access_token", "device_list", "notifications_list") -async def test_list_notifications_service_config_entry_errors( - hass: HomeAssistant, - requests_mock: Mocker, - config_entry: MockConfigEntry, -) -> None: - """Test error handling for notification service with invalid config entries.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is config_entries.ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(config_entry.entry_id) - assert config_entry.state is config_entries.ConfigEntryState.NOT_LOADED - - with pytest.raises(ValueError, match="Config entry not loaded"): - await hass.services.async_call( - DOMAIN, - "list_notifications", - {}, - target={ - "config_entry": config_entry.entry_id, - }, - blocking=True, - return_response=True, - ) - - with pytest.raises(ValueError, match="Invalid config entry: does-not-exist"): - await hass.services.async_call( - DOMAIN, - "list_notifications", - {}, - target={ - "config_entry": "does-not-exist", - }, - blocking=True, - return_response=True, - ) diff --git a/tests/components/flux/test_switch.py b/tests/components/flux/test_switch.py index ab0e8a556c4..ab85303584f 100644 --- a/tests/components/flux/test_switch.py +++ b/tests/components/flux/test_switch.py @@ -1,6 +1,5 @@ """The tests for the Flux switch platform.""" -from datetime import date, datetime from unittest.mock import patch from freezegun import freeze_time @@ -30,7 +29,7 @@ from tests.components.light.common import MockLight @pytest.fixture(autouse=True) -async def set_utc(hass: HomeAssistant) -> None: +async def set_utc(hass): """Set timezone to UTC.""" await hass.config.async_set_time_zone("UTC") @@ -188,9 +187,7 @@ async def test_flux_when_switch_is_off( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -245,9 +242,7 @@ async def test_flux_before_sunrise( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=5) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -369,9 +364,7 @@ async def test_flux_after_sunrise_before_sunset( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -433,9 +426,7 @@ async def test_flux_after_sunset_before_stop( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -498,9 +489,7 @@ async def test_flux_after_stop_before_sunrise( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -562,9 +551,7 @@ async def test_flux_with_custom_start_stop_times( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -631,9 +618,7 @@ async def test_flux_before_sunrise_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -699,9 +684,7 @@ async def test_flux_after_sunrise_before_sunset_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -740,8 +723,10 @@ async def test_flux_after_sunrise_before_sunset_stop_next_day( assert call.data[light.ATTR_XY_COLOR] == [0.439, 0.37] +@pytest.mark.parametrize("x", [0, 1]) async def test_flux_after_sunset_before_midnight_stop_next_day( hass: HomeAssistant, + x, mock_light_entities: list[MockLight], ) -> None: """Test the flux switch after sunset and before stop. @@ -767,9 +752,7 @@ async def test_flux_after_sunset_before_midnight_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -835,9 +818,7 @@ async def test_flux_after_sunset_after_midnight_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -903,9 +884,7 @@ async def test_flux_after_stop_before_sunrise_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -968,9 +947,7 @@ async def test_flux_with_custom_colortemps( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1035,9 +1012,7 @@ async def test_flux_with_custom_brightness( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1118,9 +1093,7 @@ async def test_flux_with_multiple_lights( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1187,9 +1160,7 @@ async def test_flux_with_mired( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, now: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1250,9 +1221,7 @@ async def test_flux_with_rgb( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date( - hass: HomeAssistant, event: str, date: date | datetime | None = None - ) -> datetime | None: + def event_date(hass, event, now=None): if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time diff --git a/tests/components/flux_led/conftest.py b/tests/components/flux_led/conftest.py index d323b321e08..2a67c7b46f7 100644 --- a/tests/components/flux_led/conftest.py +++ b/tests/components/flux_led/conftest.py @@ -1,13 +1,20 @@ """Tests for the flux_led integration.""" -from collections.abc import Generator from unittest.mock import patch import pytest +from tests.common import mock_device_registry + + +@pytest.fixture(name="device_reg") +def device_reg_fixture(hass): + """Return an empty, loaded, registry.""" + return mock_device_registry(hass) + @pytest.fixture -def mock_single_broadcast_address() -> Generator[None]: +def mock_single_broadcast_address(): """Mock network's async_async_get_ipv4_broadcast_addresses.""" with patch( "homeassistant.components.network.async_get_ipv4_broadcast_addresses", @@ -17,7 +24,7 @@ def mock_single_broadcast_address() -> Generator[None]: @pytest.fixture -def mock_multiple_broadcast_addresses() -> Generator[None]: +def mock_multiple_broadcast_addresses(): """Mock network's async_async_get_ipv4_broadcast_addresses to return multiple addresses.""" with patch( "homeassistant.components.network.async_get_ipv4_broadcast_addresses", diff --git a/tests/components/flux_led/test_config_flow.py b/tests/components/flux_led/test_config_flow.py index 4332cb69f02..d95bc99f097 100644 --- a/tests/components/flux_led/test_config_flow.py +++ b/tests/components/flux_led/test_config_flow.py @@ -8,7 +8,6 @@ import pytest from homeassistant import config_entries from homeassistant.components import dhcp -from homeassistant.components.flux_led.config_flow import FluxLedConfigFlow from homeassistant.components.flux_led.const import ( CONF_CUSTOM_EFFECT_COLORS, CONF_CUSTOM_EFFECT_SPEED_PCT, @@ -407,20 +406,7 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_in_progress" - real_is_matching = FluxLedConfigFlow.is_matching - return_values = [] - - def is_matching(self, other_flow) -> bool: - return_values.append(real_is_matching(self, other_flow)) - return return_values[-1] - - with ( - _patch_discovery(), - _patch_wifibulb(), - patch.object( - FluxLedConfigFlow, "is_matching", wraps=is_matching, autospec=True - ), - ): + with _patch_discovery(), _patch_wifibulb(): result3 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -431,10 +417,6 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: ), ) await hass.async_block_till_done() - - # Ensure the is_matching method returned True - assert return_values == [True] - assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "already_in_progress" diff --git a/tests/components/folder_watcher/conftest.py b/tests/components/folder_watcher/conftest.py index ed0adea7a7d..6de9c69d574 100644 --- a/tests/components/folder_watcher/conftest.py +++ b/tests/components/folder_watcher/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations -from collections.abc import Generator from pathlib import Path from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest +from typing_extensions import Generator from homeassistant.components.folder_watcher.const import DOMAIN from homeassistant.config_entries import SOURCE_USER diff --git a/tests/components/folder_watcher/test_config_flow.py b/tests/components/folder_watcher/test_config_flow.py index 3b41b5724fc..745059717fb 100644 --- a/tests/components/folder_watcher/test_config_flow.py +++ b/tests/components/folder_watcher/test_config_flow.py @@ -148,3 +148,39 @@ async def test_form_already_configured(hass: HomeAssistant, tmp_path: Path) -> N assert result["type"] == FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_import(hass: HomeAssistant, tmp_path: Path) -> None: + """Test import flow.""" + path = tmp_path.as_posix() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_FOLDER: path, CONF_PATTERNS: ["*"]}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_import_already_configured(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we abort import when entry is already configured.""" + path = tmp_path.as_posix() + + entry = MockConfigEntry( + domain=DOMAIN, + title=f"Folder Watcher {path}", + data={CONF_FOLDER: path}, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_FOLDER: path}, + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/folder_watcher/test_init.py b/tests/components/folder_watcher/test_init.py index f4a3b7e3630..8309988931a 100644 --- a/tests/components/folder_watcher/test_init.py +++ b/tests/components/folder_watcher/test_init.py @@ -1,68 +1,33 @@ """The tests for the folder_watcher component.""" -from pathlib import Path +import os from types import SimpleNamespace from unittest.mock import Mock, patch -from freezegun.api import FrozenDateTimeFactory - from homeassistant.components import folder_watcher -from homeassistant.components.folder_watcher.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from tests.common import MockConfigEntry +from homeassistant.setup import async_setup_component -async def test_invalid_path_setup( - hass: HomeAssistant, - tmp_path: Path, - freezer: FrozenDateTimeFactory, - issue_registry: ir.IssueRegistry, -) -> None: +async def test_invalid_path_setup(hass: HomeAssistant) -> None: """Test that an invalid path is not set up.""" - freezer.move_to("2022-04-19 10:31:02+00:00") - path = tmp_path.as_posix() - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - title=f"Folder Watcher {path!s}", - data={}, - options={"folder": str(path), "patterns": ["*"]}, - entry_id="1", + assert not await async_setup_component( + hass, + folder_watcher.DOMAIN, + {folder_watcher.DOMAIN: {folder_watcher.CONF_FOLDER: "invalid_path"}}, ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.SETUP_ERROR - assert len(issue_registry.issues) == 1 - - -async def test_valid_path_setup( - hass: HomeAssistant, tmp_path: Path, freezer: FrozenDateTimeFactory -) -> None: +async def test_valid_path_setup(hass: HomeAssistant) -> None: """Test that a valid path is setup.""" - freezer.move_to("2022-04-19 10:31:02+00:00") - path = tmp_path.as_posix() - hass.config.allowlist_external_dirs = {path} - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - title=f"Folder Watcher {path!s}", - data={}, - options={"folder": str(path), "patterns": ["*"]}, - entry_id="1", - ) - - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED + cwd = os.path.join(os.path.dirname(__file__)) + hass.config.allowlist_external_dirs = {cwd} + with patch.object(folder_watcher, "Watcher"): + assert await async_setup_component( + hass, + folder_watcher.DOMAIN, + {folder_watcher.DOMAIN: {folder_watcher.CONF_FOLDER: cwd}}, + ) def test_event() -> None: @@ -71,7 +36,7 @@ def test_event() -> None: class MockPatternMatchingEventHandler: """Mock base class for the pattern matcher event handler.""" - def __init__(self, patterns) -> None: + def __init__(self, patterns): pass with patch( @@ -101,7 +66,7 @@ def test_move_event() -> None: class MockPatternMatchingEventHandler: """Mock base class for the pattern matcher event handler.""" - def __init__(self, patterns) -> None: + def __init__(self, patterns): pass with patch( diff --git a/tests/components/forecast_solar/conftest.py b/tests/components/forecast_solar/conftest.py index 01c1f6d8d32..d1eacad8dbe 100644 --- a/tests/components/forecast_solar/conftest.py +++ b/tests/components/forecast_solar/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Forecast.Solar integration tests.""" -from collections.abc import Generator from datetime import datetime, timedelta from unittest.mock import AsyncMock, MagicMock, patch from forecast_solar import models import pytest +from typing_extensions import Generator from homeassistant.components.forecast_solar.const import ( CONF_AZIMUTH, diff --git a/tests/components/forecast_solar/snapshots/test_init.ambr b/tests/components/forecast_solar/snapshots/test_init.ambr index 6ae4c2f6198..43145bcef9e 100644 --- a/tests/components/forecast_solar/snapshots/test_init.ambr +++ b/tests/components/forecast_solar/snapshots/test_init.ambr @@ -6,8 +6,6 @@ 'longitude': 4.42, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'forecast_solar', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/forked_daapd/conftest.py b/tests/components/forked_daapd/conftest.py index e9f315c030c..b9dd7087aef 100644 --- a/tests/components/forked_daapd/conftest.py +++ b/tests/components/forked_daapd/conftest.py @@ -10,7 +10,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="config_entry") -def config_entry_fixture() -> MockConfigEntry: +def config_entry_fixture(): """Create hass config_entry fixture.""" data = { CONF_HOST: "192.168.1.1", diff --git a/tests/components/forked_daapd/test_browse_media.py b/tests/components/forked_daapd/test_browse_media.py index cbd278128ae..805bcac3976 100644 --- a/tests/components/forked_daapd/test_browse_media.py +++ b/tests/components/forked_daapd/test_browse_media.py @@ -3,6 +3,8 @@ from http import HTTPStatus from unittest.mock import patch +import pytest + from homeassistant.components import media_source, spotify from homeassistant.components.forked_daapd.browse_media import ( MediaContent, @@ -17,16 +19,13 @@ from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry from tests.typing import ClientSessionGenerator, WebSocketGenerator TEST_MASTER_ENTITY_NAME = "media_player.owntone_server" async def test_async_browse_media( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - config_entry: MockConfigEntry, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry ) -> None: """Test browse media.""" @@ -204,9 +203,7 @@ async def test_async_browse_media( async def test_async_browse_media_not_found( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - config_entry: MockConfigEntry, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry ) -> None: """Test browse media not found.""" @@ -264,9 +261,7 @@ async def test_async_browse_media_not_found( async def test_async_browse_spotify( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - config_entry: MockConfigEntry, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry ) -> None: """Test browsing spotify.""" @@ -318,9 +313,7 @@ async def test_async_browse_spotify( async def test_async_browse_media_source( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - config_entry: MockConfigEntry, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry ) -> None: """Test browsing media_source.""" @@ -368,9 +361,7 @@ async def test_async_browse_media_source( async def test_async_browse_image( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - config_entry: MockConfigEntry, + hass: HomeAssistant, hass_client: ClientSessionGenerator, config_entry ) -> None: """Test browse media images.""" @@ -425,7 +416,8 @@ async def test_async_browse_image( async def test_async_browse_image_missing( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry: MockConfigEntry, + config_entry, + caplog: pytest.LogCaptureFixture, ) -> None: """Test browse media images with no image available.""" diff --git a/tests/components/forked_daapd/test_config_flow.py b/tests/components/forked_daapd/test_config_flow.py index 076fffef59b..593b527009b 100644 --- a/tests/components/forked_daapd/test_config_flow.py +++ b/tests/components/forked_daapd/test_config_flow.py @@ -67,7 +67,7 @@ async def test_show_form(hass: HomeAssistant) -> None: assert result["step_id"] == "user" -async def test_config_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: +async def test_config_flow(hass: HomeAssistant, config_entry) -> None: """Test that the user step works.""" with ( patch( @@ -102,9 +102,7 @@ async def test_config_flow(hass: HomeAssistant, config_entry: MockConfigEntry) - assert result["type"] is FlowResultType.ABORT -async def test_zeroconf_updates_title( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> None: +async def test_zeroconf_updates_title(hass: HomeAssistant, config_entry) -> None: """Test that zeroconf updates title and aborts with same host.""" MockConfigEntry(domain=DOMAIN, data={CONF_HOST: "different host"}).add_to_hass(hass) config_entry.add_to_hass(hass) @@ -127,9 +125,7 @@ async def test_zeroconf_updates_title( assert len(hass.config_entries.async_entries(DOMAIN)) == 2 -async def test_config_flow_no_websocket( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> None: +async def test_config_flow_no_websocket(hass: HomeAssistant, config_entry) -> None: """Test config flow setup without websocket enabled on server.""" with patch( "homeassistant.components.forked_daapd.config_flow.ForkedDaapdAPI.test_connection", @@ -228,7 +224,7 @@ async def test_config_flow_zeroconf_valid(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM -async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: +async def test_options_flow(hass: HomeAssistant, config_entry) -> None: """Test config flow options.""" with patch( @@ -255,9 +251,7 @@ async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) assert result["type"] is FlowResultType.CREATE_ENTRY -async def test_async_setup_entry_not_ready( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> None: +async def test_async_setup_entry_not_ready(hass: HomeAssistant, config_entry) -> None: """Test that a PlatformNotReady exception is thrown during platform setup.""" with patch( diff --git a/tests/components/forked_daapd/test_media_player.py b/tests/components/forked_daapd/test_media_player.py index 6d7d267eb63..dd2e03f435f 100644 --- a/tests/components/forked_daapd/test_media_player.py +++ b/tests/components/forked_daapd/test_media_player.py @@ -1,7 +1,6 @@ """The media player tests for the forked_daapd media player platform.""" -from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import patch import pytest @@ -64,9 +63,9 @@ from homeassistant.const import ( STATE_PAUSED, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, ServiceResponse +from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry, async_mock_signal +from tests.common import async_mock_signal TEST_MASTER_ENTITY_NAME = "media_player.owntone_server" TEST_ZONE_ENTITY_NAMES = [ @@ -289,7 +288,7 @@ SAMPLE_PLAYLISTS = [{"id": 7, "name": "test_playlist", "uri": "library:playlist: @pytest.fixture(name="get_request_return_values") -async def get_request_return_values_fixture() -> dict[str, Any]: +async def get_request_return_values_fixture(): """Get request return values we can change later.""" return { "config": SAMPLE_CONFIG, @@ -300,11 +299,7 @@ async def get_request_return_values_fixture() -> dict[str, Any]: @pytest.fixture(name="mock_api_object") -async def mock_api_object_fixture( - hass: HomeAssistant, - config_entry: MockConfigEntry, - get_request_return_values: dict[str, Any], -) -> Mock: +async def mock_api_object_fixture(hass, config_entry, get_request_return_values): """Create mock api fixture.""" async def get_request_side_effect(update_type): @@ -346,9 +341,8 @@ async def mock_api_object_fixture( return mock_api.return_value -@pytest.mark.usefixtures("mock_api_object") async def test_unload_config_entry( - hass: HomeAssistant, config_entry: MockConfigEntry + hass: HomeAssistant, config_entry, mock_api_object ) -> None: """Test the player is set unavailable when the config entry is unloaded.""" assert hass.states.get(TEST_MASTER_ENTITY_NAME) @@ -358,8 +352,7 @@ async def test_unload_config_entry( assert hass.states.get(TEST_ZONE_ENTITY_NAMES[0]).state == STATE_UNAVAILABLE -@pytest.mark.usefixtures("mock_api_object") -def test_master_state(hass: HomeAssistant) -> None: +def test_master_state(hass: HomeAssistant, mock_api_object) -> None: """Test master state attributes.""" state = hass.states.get(TEST_MASTER_ENTITY_NAME) assert state.state == STATE_PAUSED @@ -380,7 +373,7 @@ def test_master_state(hass: HomeAssistant) -> None: async def test_no_update_when_get_request_returns_none( - hass: HomeAssistant, config_entry: MockConfigEntry, mock_api_object: Mock + hass: HomeAssistant, config_entry, mock_api_object ) -> None: """Test when get request returns None.""" @@ -406,12 +399,8 @@ async def test_no_update_when_get_request_returns_none( async def _service_call( - hass: HomeAssistant, - entity_name: str, - service: str, - additional_service_data: dict[str, Any] | None = None, - blocking: bool = True, -) -> ServiceResponse: + hass, entity_name, service, additional_service_data=None, blocking=True +): if additional_service_data is None: additional_service_data = {} return await hass.services.async_call( @@ -422,7 +411,7 @@ async def _service_call( ) -async def test_zone(hass: HomeAssistant, mock_api_object: Mock) -> None: +async def test_zone(hass: HomeAssistant, mock_api_object) -> None: """Test zone attributes and methods.""" zone_entity_name = TEST_ZONE_ENTITY_NAMES[0] state = hass.states.get(zone_entity_name) @@ -461,7 +450,7 @@ async def test_zone(hass: HomeAssistant, mock_api_object: Mock) -> None: mock_api_object.change_output.assert_any_call(output_id, selected=True) -async def test_last_outputs_master(hass: HomeAssistant, mock_api_object: Mock) -> None: +async def test_last_outputs_master(hass: HomeAssistant, mock_api_object) -> None: """Test restoration of _last_outputs.""" # Test turning on sends API call await _service_call(hass, TEST_MASTER_ENTITY_NAME, SERVICE_TURN_ON) @@ -478,9 +467,7 @@ async def test_last_outputs_master(hass: HomeAssistant, mock_api_object: Mock) - async def test_bunch_of_stuff_master( - hass: HomeAssistant, - get_request_return_values: dict[str, Any], - mock_api_object: Mock, + hass: HomeAssistant, get_request_return_values, mock_api_object ) -> None: """Run bunch of stuff.""" await _service_call(hass, TEST_MASTER_ENTITY_NAME, SERVICE_TURN_ON) @@ -564,8 +551,9 @@ async def test_bunch_of_stuff_master( mock_api_object.clear_queue.assert_called_once() -@pytest.mark.usefixtures("mock_api_object") -async def test_async_play_media_from_paused(hass: HomeAssistant) -> None: +async def test_async_play_media_from_paused( + hass: HomeAssistant, mock_api_object +) -> None: """Test async play media from paused.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -583,9 +571,7 @@ async def test_async_play_media_from_paused(hass: HomeAssistant) -> None: async def test_async_play_media_announcement_from_stopped( - hass: HomeAssistant, - get_request_return_values: dict[str, Any], - mock_api_object: Mock, + hass: HomeAssistant, get_request_return_values, mock_api_object ) -> None: """Test async play media announcement (from stopped).""" updater_update = mock_api_object.start_websocket_handler.call_args[0][2] @@ -611,8 +597,9 @@ async def test_async_play_media_announcement_from_stopped( assert state.last_updated > initial_state.last_updated -@pytest.mark.usefixtures("mock_api_object") -async def test_async_play_media_unsupported(hass: HomeAssistant) -> None: +async def test_async_play_media_unsupported( + hass: HomeAssistant, mock_api_object +) -> None: """Test async play media on unsupported media type.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -629,7 +616,7 @@ async def test_async_play_media_unsupported(hass: HomeAssistant) -> None: async def test_async_play_media_announcement_tts_timeout( - hass: HomeAssistant, mock_api_object: Mock + hass: HomeAssistant, mock_api_object ) -> None: """Test async play media announcement with TTS timeout.""" mock_api_object.add_to_queue.side_effect = None @@ -651,7 +638,7 @@ async def test_async_play_media_announcement_tts_timeout( async def test_use_pipe_control_with_no_api( - hass: HomeAssistant, mock_api_object: Mock + hass: HomeAssistant, mock_api_object ) -> None: """Test using pipe control with no api set.""" await _service_call( @@ -664,8 +651,7 @@ async def test_use_pipe_control_with_no_api( assert mock_api_object.start_playback.call_count == 0 -@pytest.mark.usefixtures("mock_api_object") -async def test_clear_source(hass: HomeAssistant) -> None: +async def test_clear_source(hass: HomeAssistant, mock_api_object) -> None: """Test changing source to clear.""" await _service_call( hass, @@ -679,11 +665,8 @@ async def test_clear_source(hass: HomeAssistant) -> None: @pytest.fixture(name="pipe_control_api_object") async def pipe_control_api_object_fixture( - hass: HomeAssistant, - config_entry: MockConfigEntry, - get_request_return_values: dict[str, Any], - mock_api_object: Mock, -) -> Mock: + hass, config_entry, get_request_return_values, mock_api_object +): """Fixture for mock librespot_java api.""" with patch( "homeassistant.components.forked_daapd.media_player.LibrespotJavaAPI", @@ -714,9 +697,9 @@ async def pipe_control_api_object_fixture( async def test_librespot_java_stuff( hass: HomeAssistant, - get_request_return_values: dict[str, Any], - mock_api_object: Mock, - pipe_control_api_object: Mock, + get_request_return_values, + mock_api_object, + pipe_control_api_object, ) -> None: """Test options update and librespot-java stuff.""" state = hass.states.get(TEST_MASTER_ENTITY_NAME) @@ -751,8 +734,9 @@ async def test_librespot_java_stuff( assert state.attributes[ATTR_MEDIA_ALBUM_NAME] == "some album" -@pytest.mark.usefixtures("pipe_control_api_object") -async def test_librespot_java_play_announcement(hass: HomeAssistant) -> None: +async def test_librespot_java_play_announcement( + hass: HomeAssistant, pipe_control_api_object +) -> None: """Test play announcement with librespot-java pipe.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -771,7 +755,7 @@ async def test_librespot_java_play_announcement(hass: HomeAssistant) -> None: async def test_librespot_java_play_media_pause_timeout( - hass: HomeAssistant, pipe_control_api_object: Mock + hass: HomeAssistant, pipe_control_api_object ) -> None: """Test play media with librespot-java pipe.""" # test media play with pause timeout @@ -794,7 +778,7 @@ async def test_librespot_java_play_media_pause_timeout( assert state.last_updated > initial_state.last_updated -async def test_unsupported_update(hass: HomeAssistant, mock_api_object: Mock) -> None: +async def test_unsupported_update(hass: HomeAssistant, mock_api_object) -> None: """Test unsupported update type.""" last_updated = hass.states.get(TEST_MASTER_ENTITY_NAME).last_updated updater_update = mock_api_object.start_websocket_handler.call_args[0][2] @@ -803,9 +787,7 @@ async def test_unsupported_update(hass: HomeAssistant, mock_api_object: Mock) -> assert hass.states.get(TEST_MASTER_ENTITY_NAME).last_updated == last_updated -async def test_invalid_websocket_port( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> None: +async def test_invalid_websocket_port(hass: HomeAssistant, config_entry) -> None: """Test invalid websocket port on async_init.""" with patch( "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI", @@ -818,7 +800,7 @@ async def test_invalid_websocket_port( assert hass.states.get(TEST_MASTER_ENTITY_NAME).state == STATE_UNAVAILABLE -async def test_websocket_disconnect(hass: HomeAssistant, mock_api_object: Mock) -> None: +async def test_websocket_disconnect(hass: HomeAssistant, mock_api_object) -> None: """Test websocket disconnection.""" assert hass.states.get(TEST_MASTER_ENTITY_NAME).state != STATE_UNAVAILABLE assert hass.states.get(TEST_ZONE_ENTITY_NAMES[0]).state != STATE_UNAVAILABLE @@ -829,9 +811,7 @@ async def test_websocket_disconnect(hass: HomeAssistant, mock_api_object: Mock) assert hass.states.get(TEST_ZONE_ENTITY_NAMES[0]).state == STATE_UNAVAILABLE -async def test_async_play_media_enqueue( - hass: HomeAssistant, mock_api_object: Mock -) -> None: +async def test_async_play_media_enqueue(hass: HomeAssistant, mock_api_object) -> None: """Test async play media with different enqueue options.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -907,7 +887,7 @@ async def test_async_play_media_enqueue( ) -async def test_play_owntone_media(hass: HomeAssistant, mock_api_object: Mock) -> None: +async def test_play_owntone_media(hass: HomeAssistant, mock_api_object) -> None: """Test async play media with an owntone source.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -933,7 +913,7 @@ async def test_play_owntone_media(hass: HomeAssistant, mock_api_object: Mock) -> ) -async def test_play_spotify_media(hass: HomeAssistant, mock_api_object: Mock) -> None: +async def test_play_spotify_media(hass: HomeAssistant, mock_api_object) -> None: """Test async play media with a spotify source.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -957,7 +937,7 @@ async def test_play_spotify_media(hass: HomeAssistant, mock_api_object: Mock) -> ) -async def test_play_media_source(hass: HomeAssistant, mock_api_object: Mock) -> None: +async def test_play_media_source(hass: HomeAssistant, mock_api_object) -> None: """Test async play media with a spotify source.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) with patch( diff --git a/tests/components/freebox/test_alarm_control_panel.py b/tests/components/freebox/test_alarm_control_panel.py index b02e4c974ff..e4ee8f63b2c 100644 --- a/tests/components/freebox/test_alarm_control_panel.py +++ b/tests/components/freebox/test_alarm_control_panel.py @@ -8,7 +8,6 @@ from freezegun.api import FrozenDateTimeFactory from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, - AlarmControlPanelState, ) from homeassistant.components.freebox import SCAN_INTERVAL from homeassistant.const import ( @@ -17,6 +16,11 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -55,7 +59,7 @@ async def test_alarm_changed_from_external( # Initial state assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == AlarmControlPanelState.ARMING + == STATE_ALARM_ARMING ) # Now simulate a changed status @@ -69,7 +73,7 @@ async def test_alarm_changed_from_external( assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == AlarmControlPanelState.ARMED_AWAY + == STATE_ALARM_ARMED_AWAY ) @@ -94,7 +98,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non # Initial state: arm_away assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == AlarmControlPanelState.ARMED_AWAY + == STATE_ALARM_ARMED_AWAY ) # Now call for a change -> disarmed @@ -109,7 +113,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == AlarmControlPanelState.DISARMED + == STATE_ALARM_DISARMED ) # Now call for a change -> arm_away @@ -124,7 +128,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == AlarmControlPanelState.ARMING + == STATE_ALARM_ARMING ) # Now call for a change -> arm_home @@ -140,7 +144,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == AlarmControlPanelState.ARMED_HOME + == STATE_ALARM_ARMED_HOME ) # Now call for a change -> trigger @@ -155,7 +159,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == AlarmControlPanelState.TRIGGERED + == STATE_ALARM_TRIGGERED ) diff --git a/tests/components/freedompro/conftest.py b/tests/components/freedompro/conftest.py index 8e581673b92..91eecc24f27 100644 --- a/tests/components/freedompro/conftest.py +++ b/tests/components/freedompro/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations -from collections.abc import Generator from copy import deepcopy from typing import Any from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.freedompro.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/freedompro/test_cover.py b/tests/components/freedompro/test_cover.py index bcba1e0b917..ba48da1d1d4 100644 --- a/tests/components/freedompro/test_cover.py +++ b/tests/components/freedompro/test_cover.py @@ -5,16 +5,14 @@ from unittest.mock import ANY, patch import pytest -from homeassistant.components.cover import ( - ATTR_POSITION, - DOMAIN as COVER_DOMAIN, - CoverState, -) +from homeassistant.components.cover import ATTR_POSITION, DOMAIN as COVER_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, + STATE_CLOSED, + STATE_OPEN, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -58,7 +56,7 @@ async def test_cover_get_state( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -82,7 +80,7 @@ async def test_cover_get_state( assert entry assert entry.unique_id == uid - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN @pytest.mark.parametrize( @@ -109,7 +107,7 @@ async def test_cover_set_position( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -135,7 +133,7 @@ async def test_cover_set_position( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes["current_position"] == 33 @@ -173,7 +171,7 @@ async def test_cover_close( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -198,7 +196,7 @@ async def test_cover_close( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED @pytest.mark.parametrize( @@ -225,7 +223,7 @@ async def test_cover_open( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -251,4 +249,4 @@ async def test_cover_open( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN diff --git a/tests/components/freedompro/test_lock.py b/tests/components/freedompro/test_lock.py index a17217c49e8..94f5609ee47 100644 --- a/tests/components/freedompro/test_lock.py +++ b/tests/components/freedompro/test_lock.py @@ -7,9 +7,8 @@ from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, - LockState, ) -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity @@ -40,7 +39,7 @@ async def test_lock_get_state( entity_id = "lock.lock" state = hass.states.get(entity_id) assert state - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED assert state.attributes.get("friendly_name") == "lock" entry = entity_registry.async_get(entity_id) @@ -64,7 +63,7 @@ async def test_lock_get_state( assert entry assert entry.unique_id == uid - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED async def test_lock_set_unlock( @@ -88,7 +87,7 @@ async def test_lock_set_unlock( state = hass.states.get(entity_id) assert state - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED assert state.attributes.get("friendly_name") == "lock" entry = entity_registry.async_get(entity_id) @@ -114,7 +113,7 @@ async def test_lock_set_unlock( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED async def test_lock_set_lock( @@ -127,7 +126,7 @@ async def test_lock_set_lock( entity_id = "lock.lock" state = hass.states.get(entity_id) assert state - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED assert state.attributes.get("friendly_name") == "lock" entry = entity_registry.async_get(entity_id) @@ -154,4 +153,4 @@ async def test_lock_set_lock( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED diff --git a/tests/components/fritz/conftest.py b/tests/components/fritz/conftest.py index fa92fa37c04..bb049f067b4 100644 --- a/tests/components/fritz/conftest.py +++ b/tests/components/fritz/conftest.py @@ -30,7 +30,7 @@ class FritzServiceMock(Service): class FritzConnectionMock: """FritzConnection mocking.""" - def __init__(self, services) -> None: + def __init__(self, services): """Init Mocking class.""" self.modelname = MOCK_MODELNAME self.call_action = self._call_action diff --git a/tests/components/fritz/const.py b/tests/components/fritz/const.py index acd96879b1e..0d1222dfcda 100644 --- a/tests/components/fritz/const.py +++ b/tests/components/fritz/const.py @@ -655,23 +655,7 @@ MOCK_MESH_DATA = { "cur_data_rate_tx": 0, "cur_availability_rx": 99, "cur_availability_tx": 99, - }, - { - "uid": "nl-79", - "type": "LAN", - "state": "DISCONNECTED", - "last_connected": 1642872667, - "node_1_uid": "n-167", - "node_2_uid": "n-76", - "node_interface_1_uid": "ni-140", - "node_interface_2_uid": "ni-77", - "max_data_rate_rx": 1000000, - "max_data_rate_tx": 1000000, - "cur_data_rate_rx": 0, - "cur_data_rate_tx": 0, - "cur_availability_rx": 99, - "cur_availability_tx": 99, - }, + } ], } ], @@ -920,14 +904,6 @@ MOCK_HOST_ATTRIBUTES_DATA = [ }, ] -MOCK_CALL_DEFLECTION_DATA = { - "X_AVM-DE_OnTel1": { - "GetDeflections": { - "NewDeflectionList": "00fromAll+1234657890eImmediately" - } - } -} - MOCK_USER_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0] MOCK_USER_INPUT_ADVANCED = MOCK_USER_DATA MOCK_USER_INPUT_SIMPLE = { diff --git a/tests/components/fritz/snapshots/test_button.ambr b/tests/components/fritz/snapshots/test_button.ambr deleted file mode 100644 index ed0b0e72160..00000000000 --- a/tests/components/fritz/snapshots/test_button.ambr +++ /dev/null @@ -1,235 +0,0 @@ -# serializer version: 1 -# name: test_button_setup[button.mock_title_cleanup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_title_cleanup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cleanup', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cleanup', - 'unique_id': '1C:ED:6F:12:34:11-cleanup', - 'unit_of_measurement': None, - }) -# --- -# name: test_button_setup[button.mock_title_cleanup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Cleanup', - }), - 'context': , - 'entity_id': 'button.mock_title_cleanup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button_setup[button.mock_title_firmware_update-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_title_firmware_update', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Firmware update', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'firmware_update', - 'unique_id': '1C:ED:6F:12:34:11-firmware_update', - 'unit_of_measurement': None, - }) -# --- -# name: test_button_setup[button.mock_title_firmware_update-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'update', - 'friendly_name': 'Mock Title Firmware update', - }), - 'context': , - 'entity_id': 'button.mock_title_firmware_update', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button_setup[button.mock_title_reconnect-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_title_reconnect', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Reconnect', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reconnect', - 'unique_id': '1C:ED:6F:12:34:11-reconnect', - 'unit_of_measurement': None, - }) -# --- -# name: test_button_setup[button.mock_title_reconnect-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'Mock Title Reconnect', - }), - 'context': , - 'entity_id': 'button.mock_title_reconnect', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button_setup[button.mock_title_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_title_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-reboot', - 'unit_of_measurement': None, - }) -# --- -# name: test_button_setup[button.mock_title_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'Mock Title Restart', - }), - 'context': , - 'entity_id': 'button.mock_title_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button_setup[button.printer_wake_on_lan-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.printer_wake_on_lan', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:lan-pending', - 'original_name': 'printer Wake on LAN', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'AA:BB:CC:00:11:22_wake_on_lan', - 'unit_of_measurement': None, - }) -# --- -# name: test_button_setup[button.printer_wake_on_lan-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'printer Wake on LAN', - 'icon': 'mdi:lan-pending', - }), - 'context': , - 'entity_id': 'button.printer_wake_on_lan', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/fritz/snapshots/test_diagnostics.ambr b/tests/components/fritz/snapshots/test_diagnostics.ambr deleted file mode 100644 index 53f7093a21b..00000000000 --- a/tests/components/fritz/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,69 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'device_info': dict({ - 'client_devices': list([ - dict({ - 'connected_to': 'fritz.box', - 'connection_type': 'LAN', - 'hostname': 'printer', - 'is_connected': True, - 'wan_access': True, - }), - ]), - 'connection_type': 'WANPPPConnection', - 'current_firmware': '7.29', - 'discovered_services': list([ - 'DeviceInfo1', - 'Hosts1', - 'LANEthernetInterfaceConfig1', - 'Layer3Forwarding1', - 'UserInterface1', - 'WANCommonIFC1', - 'WANCommonInterfaceConfig1', - 'WANDSLInterfaceConfig1', - 'WANIPConn1', - 'WANPPPConnection1', - 'WLANConfiguration1', - 'X_AVM-DE_Homeauto1', - 'X_AVM-DE_HostFilter1', - ]), - 'is_router': True, - 'last_exception': None, - 'last_update success': True, - 'latest_firmware': None, - 'mesh_role': 'master', - 'model': 'FRITZ!Box 7530 AX', - 'unique_id': '1C:ED:XX:XX:34:11', - 'update_available': False, - 'wan_link_properties': dict({ - 'NewLayer1DownstreamMaxBitRate': 318557000, - 'NewLayer1UpstreamMaxBitRate': 51805000, - 'NewPhysicalLinkStatus': 'Up', - 'NewWANAccessType': 'DSL', - }), - }), - 'entry': dict({ - 'data': dict({ - 'host': 'fake_host', - 'password': '**REDACTED**', - 'port': '1234', - 'ssl': False, - 'username': '**REDACTED**', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'fritz', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }), - }) -# --- diff --git a/tests/components/fritz/snapshots/test_image.ambr b/tests/components/fritz/snapshots/test_image.ambr index 6ef7413998b..a51ab015a89 100644 --- a/tests/components/fritz/snapshots/test_image.ambr +++ b/tests/components/fritz/snapshots/test_image.ambr @@ -1,10 +1,10 @@ # serializer version: 1 # name: test_image_entity[fc_data0] - b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00t\x00\x00\x00t\x01\x00\x00\x00\x00|\xe1+\x8a\x00\x00\x00\xe0IDATx\xda\x9dU\xc1\x11\xc30\x0c\xe2\xb2\x00\xfbo\xc9\x06*\xc8\xee\xa3\xaf^\xf0\xc7\xce\x03\t\x84\xa4\x00\x00G\xa04~\xe2\xc1\xef\xf9\xf7m\xf0PF\x83\xf4\xb3\xc0\x07\xb8\xd8\xf1\xb3\xc5\x87D\x8f\x87\x93\x8f\xd8\xe1\x9d\xda@g\x9fJ\x7fB\xdcS\xd5?\xd4\xad\xdf\xee\xa5\x84*\xf4\x1b\x88t\x80\xfd\xe7{<\xd6{\x07P\x14\xbc\xc7\x1f\xdb\xb4DT\xf1\x8f\xf6\x841\x015\xf5\x97\x9dK\xf1w\n\n\xff\xa1m@\xf9.\xf4\x7f\x85\x9bC44\xfd\xe7\xfcq0\x01\xaa\xfc;\x00\xbe\xa4\xa6~J\xdd\xee\x18\xa8\xf1/\xc4\xb5[\x88\x8d\xff:\xca\xe3\x01\x9b\xfd\x93\xe9\x89\xfdD\xd5\xff\x99\xbf\xbb?ba5\xffv\xff,\xd0\xce\xff]\x7fn\xff4A\xb9\xff\xa8\xd5_\xe29\xc7=\xa8\xdc\xbf\xf7'\x04t\xfa\x95\xee\xdfI\xec\xea\xef\x00\x8a\x93\x85\xfe\x0f\x80/\xb1\xfdI7\xe3s\x00\x00\x00\x00IEND\xaeB`\x82" + b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x94\x00\x00\x00\x94\x01\x00\x00\x00\x00]G=y\x00\x00\x00\xf5IDATx\xda\xedVQ\x0eC!\x0c"\xbb@\xef\x7fKn\xe0\x00\xfd\xdb\xcf6\xf9|\xc6\xc4\xc6\x0f\xd2\x02\xadb},\xe2\xb9\xfb\xe5\x0e\xc0(\x18\xf2\x84/|\xaeo\xef\x847\xda\x14\x1af\x1c\xde\xe3\x19(X\tKxN\xb2\x87\x17j9\x1d\xd7\xb7o\x8c44\x1a3\xbe\x16x\x03\xc1`\xe5k\x87Oh'\xf1\x07\xde\xd1\xcd\xa1\xc2\x877\x13]U\xfey\xe2Y\x95\xfe\xd2\x1a\xe0\xd0\x9bD\x91\x7f\xfcO\xfa\xca\xedg\xbc\xb1\xb4\xfb\x8a\x87\x16\xa2\x88\x1f\xf0\x11a\xc1_6/\xd1#\xc2\xb0\xf0/\xac}\xba\xfe\xd9\xe4\xaf\xd8n\xf1B\xbf\xcb_)<\xf3\xcfn\xf2\xc7\xba\x9f\xfam\xf4{\x1eQ\x82\xb3\xd1O;=\xae\x80\xc9\xaa\x7f2>\xf2\xd04\xf5k\xf0\xc4\xfe\xcc\x80f\xfeD\xfc}\x01\xe8\xfc\xdf\xc1u{*\xfd\xd3\xbe7@\xa7\xd4/5\x94\x06\xae\xfa\xff\xa6\xe7\xe6_\xe2\x97\xba\x99\x80\xe5\xfcO\xeby\x03l\xff?\xb8\xf8l\xe7\xaf\xa1j\xf4{\x03\x17\xfa\xb4\x19\xc7\xc5\xe1\xd3\x00\x00\x00\x00IEND\xaeB`\x82" + b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x94\x00\x00\x00\x94\x01\x00\x00\x00\x00]G=y\x00\x00\x00\xf9IDATx\xda\xedV\xc1\r\xc40\x0cB\xb7\x80\xf7\xdf\x92\r\\\xb0\xfb\xeb\xe7\xaa\xf0l\xd4\xaaQ\x1e\xc8\x06L\x8a~,\xe2;{s\x06\xa0\xd8z9\xdb\xe6\x0f\xcf\xf5\xef\x99\xf0J\x0f\x85\x86*o\xcf\xf1\x04\x04\x1ak\xb6\x11<\x97\xa6\xa6\x83x&\xb32x\x86\xa4\xab\xeb\x08\x7f\x16\xf5^\x11}\xbd$\xb0\x80k=t\xcc\x9f\xfdg\xfa\xda\xe5\x1d\xe3\t\x8br_\xdb3\x85D}\x063u\x00\x03\xfd\xb6<\xe2\xeaL\xa2y<\xae\xcf\xe3!\x895\xbfL\xf07\x0eT]n7\xc3_{0\xd4\xefx:\xc0\x1f\xc6}\x9e\xb7\x84\x1e\xfb\x91\x0e\x12\x84\t=z\xd2t\x07\x8e\x1d\xc9\x03\xc7\xa9G\xb7\x12\xf3&0\x176\x19\x98\xc8g\x8b;\x88@\xc6\x7f\x93\xa9\xfbVD\xdf\x193\xde9\x1d\xd1\xc3\x9ev`E\xf2oo\xa3\xe1/\x847\xad\x8a?0t\xffN\xb4p\xf35\xf3\x7f\x80\xad\xafS\xf7\x1bD`D\x8f\xef\x9f\xf0\xe0\xec\x02\xa4\xc0\x83\x92\xcf\xf3\xf9a\x00\x00\x00\x00IEND\xaeB`\x82' # --- diff --git a/tests/components/fritz/snapshots/test_sensor.ambr b/tests/components/fritz/snapshots/test_sensor.ambr deleted file mode 100644 index 50744815aa5..00000000000 --- a/tests/components/fritz/snapshots/test_sensor.ambr +++ /dev/null @@ -1,771 +0,0 @@ -# serializer version: 1 -# name: test_sensor_setup[sensor.mock_title_connection_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_connection_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Connection uptime', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'connection_uptime', - 'unique_id': '1C:ED:6F:12:34:11-connection_uptime', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup[sensor.mock_title_connection_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Connection uptime', - }), - 'context': , - 'entity_id': 'sensor.mock_title_connection_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-09-01T10:11:33+00:00', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_download_throughput-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_download_throughput', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Download throughput', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'kb_s_received', - 'unique_id': '1C:ED:6F:12:34:11-kb_s_received', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup[sensor.mock_title_download_throughput-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'Mock Title Download throughput', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_download_throughput', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '67.6', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_external_ip-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_external_ip', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'External IP', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'external_ip', - 'unique_id': '1C:ED:6F:12:34:11-external_ip', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup[sensor.mock_title_external_ip-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title External IP', - }), - 'context': , - 'entity_id': 'sensor.mock_title_external_ip', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.2.3.4', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_external_ipv6-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_external_ipv6', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'External IPv6', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'external_ipv6', - 'unique_id': '1C:ED:6F:12:34:11-external_ipv6', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup[sensor.mock_title_external_ipv6-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title External IPv6', - }), - 'context': , - 'entity_id': 'sensor.mock_title_external_ipv6', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'fec0::1', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_gb_received-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_gb_received', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'GB received', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'gb_received', - 'unique_id': '1C:ED:6F:12:34:11-gb_received', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup[sensor.mock_title_gb_received-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'Mock Title GB received', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_gb_received', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5.2', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_gb_sent-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_gb_sent', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'GB sent', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'gb_sent', - 'unique_id': '1C:ED:6F:12:34:11-gb_sent', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup[sensor.mock_title_gb_sent-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'Mock Title GB sent', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_gb_sent', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.7', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_last_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_last_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last restart', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_uptime', - 'unique_id': '1C:ED:6F:12:34:11-device_uptime', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup[sensor.mock_title_last_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Last restart', - }), - 'context': , - 'entity_id': 'sensor.mock_title_last_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-08-03T16:30:21+00:00', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_download_noise_margin-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_link_download_noise_margin', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Link download noise margin', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'link_noise_margin_received', - 'unique_id': '1C:ED:6F:12:34:11-link_noise_margin_received', - 'unit_of_measurement': 'dB', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_download_noise_margin-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Link download noise margin', - 'unit_of_measurement': 'dB', - }), - 'context': , - 'entity_id': 'sensor.mock_title_link_download_noise_margin', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.0', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_download_power_attenuation-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_link_download_power_attenuation', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Link download power attenuation', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'link_attenuation_received', - 'unique_id': '1C:ED:6F:12:34:11-link_attenuation_received', - 'unit_of_measurement': 'dB', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_download_power_attenuation-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Link download power attenuation', - 'unit_of_measurement': 'dB', - }), - 'context': , - 'entity_id': 'sensor.mock_title_link_download_power_attenuation', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '12.0', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_download_throughput-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_link_download_throughput', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Link download throughput', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'link_kb_s_received', - 'unique_id': '1C:ED:6F:12:34:11-link_kb_s_received', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_download_throughput-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'Mock Title Link download throughput', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_link_download_throughput', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '318557.0', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_upload_noise_margin-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_link_upload_noise_margin', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Link upload noise margin', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'link_noise_margin_sent', - 'unique_id': '1C:ED:6F:12:34:11-link_noise_margin_sent', - 'unit_of_measurement': 'dB', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_upload_noise_margin-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Link upload noise margin', - 'unit_of_measurement': 'dB', - }), - 'context': , - 'entity_id': 'sensor.mock_title_link_upload_noise_margin', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '9.0', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_upload_power_attenuation-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_link_upload_power_attenuation', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Link upload power attenuation', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'link_attenuation_sent', - 'unique_id': '1C:ED:6F:12:34:11-link_attenuation_sent', - 'unit_of_measurement': 'dB', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_upload_power_attenuation-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Link upload power attenuation', - 'unit_of_measurement': 'dB', - }), - 'context': , - 'entity_id': 'sensor.mock_title_link_upload_power_attenuation', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '7.0', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_upload_throughput-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_link_upload_throughput', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Link upload throughput', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'link_kb_s_sent', - 'unique_id': '1C:ED:6F:12:34:11-link_kb_s_sent', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup[sensor.mock_title_link_upload_throughput-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'Mock Title Link upload throughput', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_link_upload_throughput', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '51805.0', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_max_connection_download_throughput-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_max_connection_download_throughput', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Max connection download throughput', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'max_kb_s_received', - 'unique_id': '1C:ED:6F:12:34:11-max_kb_s_received', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup[sensor.mock_title_max_connection_download_throughput-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'Mock Title Max connection download throughput', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_max_connection_download_throughput', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10087.0', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_max_connection_upload_throughput-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_max_connection_upload_throughput', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Max connection upload throughput', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'max_kb_s_sent', - 'unique_id': '1C:ED:6F:12:34:11-max_kb_s_sent', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup[sensor.mock_title_max_connection_upload_throughput-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'Mock Title Max connection upload throughput', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_max_connection_upload_throughput', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2105.0', - }) -# --- -# name: test_sensor_setup[sensor.mock_title_upload_throughput-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_upload_throughput', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Upload throughput', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'kb_s_sent', - 'unique_id': '1C:ED:6F:12:34:11-kb_s_sent', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup[sensor.mock_title_upload_throughput-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'Mock Title Upload throughput', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_upload_throughput', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.4', - }) -# --- diff --git a/tests/components/fritz/snapshots/test_switch.ambr b/tests/components/fritz/snapshots/test_switch.ambr deleted file mode 100644 index b34a3626fe2..00000000000 --- a/tests/components/fritz/snapshots/test_switch.ambr +++ /dev/null @@ -1,571 +0,0 @@ -# serializer version: 1 -# name: test_switch_setup[fc_data0][switch.mock_title_wi_fi_wifi_2_4ghz-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:wifi', - 'original_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_2_4ghz', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data0][switch.mock_title_wi_fi_wifi_2_4ghz-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', - 'icon': 'mdi:wifi', - }), - 'context': , - 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[fc_data0][switch.mock_title_wi_fi_wifi_5ghz-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:wifi', - 'original_name': 'Mock Title Wi-Fi WiFi (5Ghz)', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_5ghz', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data0][switch.mock_title_wi_fi_wifi_5ghz-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Wi-Fi WiFi (5Ghz)', - 'icon': 'mdi:wifi', - }), - 'context': , - 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[fc_data0][switch.printer_internet_access-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.printer_internet_access', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:router-wireless-settings', - 'original_name': 'printer Internet Access', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'AA:BB:CC:00:11:22_internet_access', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data0][switch.printer_internet_access-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'printer Internet Access', - 'icon': 'mdi:router-wireless-settings', - }), - 'context': , - 'entity_id': 'switch.printer_internet_access', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[fc_data1][switch.mock_title_wi_fi_wifi-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_title_wi_fi_wifi', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:wifi', - 'original_name': 'Mock Title Wi-Fi WiFi', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data1][switch.mock_title_wi_fi_wifi-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Wi-Fi WiFi', - 'icon': 'mdi:wifi', - }), - 'context': , - 'entity_id': 'switch.mock_title_wi_fi_wifi', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[fc_data1][switch.mock_title_wi_fi_wifi2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_title_wi_fi_wifi2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:wifi', - 'original_name': 'Mock Title Wi-Fi WiFi2', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi2', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data1][switch.mock_title_wi_fi_wifi2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Wi-Fi WiFi2', - 'icon': 'mdi:wifi', - }), - 'context': , - 'entity_id': 'switch.mock_title_wi_fi_wifi2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[fc_data1][switch.printer_internet_access-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.printer_internet_access', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:router-wireless-settings', - 'original_name': 'printer Internet Access', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'AA:BB:CC:00:11:22_internet_access', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data1][switch.printer_internet_access-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'printer Internet Access', - 'icon': 'mdi:router-wireless-settings', - }), - 'context': , - 'entity_id': 'switch.printer_internet_access', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[fc_data2][switch.mock_title_wi_fi_wifi_2_4ghz-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:wifi', - 'original_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_2_4ghz', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data2][switch.mock_title_wi_fi_wifi_2_4ghz-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', - 'icon': 'mdi:wifi', - }), - 'context': , - 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[fc_data2][switch.mock_title_wi_fi_wifi_5ghz-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:wifi', - 'original_name': 'Mock Title Wi-Fi WiFi+ (5Ghz)', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_5ghz', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data2][switch.mock_title_wi_fi_wifi_5ghz-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Wi-Fi WiFi+ (5Ghz)', - 'icon': 'mdi:wifi', - }), - 'context': , - 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[fc_data2][switch.printer_internet_access-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.printer_internet_access', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:router-wireless-settings', - 'original_name': 'printer Internet Access', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'AA:BB:CC:00:11:22_internet_access', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data2][switch.printer_internet_access-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'printer Internet Access', - 'icon': 'mdi:router-wireless-settings', - }), - 'context': , - 'entity_id': 'switch.printer_internet_access', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[fc_data3][switch.mock_title_call_deflection_0-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_title_call_deflection_0', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:phone-forward', - 'original_name': 'Call deflection 0', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-call_deflection_0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data3][switch.mock_title_call_deflection_0-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'deflection_to_number': '+1234657890', - 'friendly_name': 'Mock Title Call deflection 0', - 'icon': 'mdi:phone-forward', - 'mode': 'Immediately', - 'number': None, - 'outgoing': None, - 'phonebook_id': None, - 'type': 'fromAll', - }), - 'context': , - 'entity_id': 'switch.mock_title_call_deflection_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch_setup[fc_data3][switch.mock_title_wi_fi_mywifi-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_title_wi_fi_mywifi', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:wifi', - 'original_name': 'Mock Title Wi-Fi MyWifi', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-wi_fi_mywifi', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data3][switch.mock_title_wi_fi_mywifi-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Wi-Fi MyWifi', - 'icon': 'mdi:wifi', - }), - 'context': , - 'entity_id': 'switch.mock_title_wi_fi_mywifi', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[fc_data3][switch.printer_internet_access-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.printer_internet_access', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:router-wireless-settings', - 'original_name': 'printer Internet Access', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'AA:BB:CC:00:11:22_internet_access', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[fc_data3][switch.printer_internet_access-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'printer Internet Access', - 'icon': 'mdi:router-wireless-settings', - }), - 'context': , - 'entity_id': 'switch.printer_internet_access', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/fritz/snapshots/test_update.ambr b/tests/components/fritz/snapshots/test_update.ambr deleted file mode 100644 index 3c7880d01e7..00000000000 --- a/tests/components/fritz/snapshots/test_update.ambr +++ /dev/null @@ -1,175 +0,0 @@ -# serializer version: 1 -# name: test_available_update_can_be_installed[update.mock_title_fritz_os-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.mock_title_fritz_os', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'FRITZ!OS', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-update', - 'unit_of_measurement': None, - }) -# --- -# name: test_available_update_can_be_installed[update.mock_title_fritz_os-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', - 'friendly_name': 'Mock Title FRITZ!OS', - 'in_progress': False, - 'installed_version': '7.29', - 'latest_version': '7.50', - 'release_summary': None, - 'release_url': 'http://download.avm.de/fritzbox/fritzbox-7530-ax/deutschland/fritz.os/info_de.txt', - 'skipped_version': None, - 'supported_features': , - 'title': 'FRITZ!OS', - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.mock_title_fritz_os', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_update_available[update.mock_title_fritz_os-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.mock_title_fritz_os', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'FRITZ!OS', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-update', - 'unit_of_measurement': None, - }) -# --- -# name: test_update_available[update.mock_title_fritz_os-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', - 'friendly_name': 'Mock Title FRITZ!OS', - 'in_progress': False, - 'installed_version': '7.29', - 'latest_version': '7.50', - 'release_summary': None, - 'release_url': 'http://download.avm.de/fritzbox/fritzbox-7530-ax/deutschland/fritz.os/info_de.txt', - 'skipped_version': None, - 'supported_features': , - 'title': 'FRITZ!OS', - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.mock_title_fritz_os', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_update_entities_initialized[update.mock_title_fritz_os-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.mock_title_fritz_os', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'FRITZ!OS', - 'platform': 'fritz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '1C:ED:6F:12:34:11-update', - 'unit_of_measurement': None, - }) -# --- -# name: test_update_entities_initialized[update.mock_title_fritz_os-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', - 'friendly_name': 'Mock Title FRITZ!OS', - 'in_progress': False, - 'installed_version': '7.29', - 'latest_version': '7.29', - 'release_summary': None, - 'release_url': None, - 'skipped_version': None, - 'supported_features': , - 'title': 'FRITZ!OS', - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.mock_title_fritz_os', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/fritz/test_button.py b/tests/components/fritz/test_button.py index 068b07c4337..8666491eb7a 100644 --- a/tests/components/fritz/test_button.py +++ b/tests/components/fritz/test_button.py @@ -1,48 +1,38 @@ """Tests for Fritz!Tools button platform.""" -from copy import deepcopy +import copy from datetime import timedelta from unittest.mock import patch import pytest -from syrupy.assertion import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.fritz.const import DOMAIN, MeshRoles from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util.dt import utcnow -from .const import ( - MOCK_HOST_ATTRIBUTES_DATA, - MOCK_MESH_DATA, - MOCK_NEW_DEVICE_NODE, - MOCK_USER_DATA, -) +from .const import MOCK_MESH_DATA, MOCK_NEW_DEVICE_NODE, MOCK_USER_DATA -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import MockConfigEntry, async_fire_time_changed -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_button_setup( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - fc_class_mock, - fh_class_mock, - snapshot: SnapshotAssertion, -) -> None: +async def test_button_setup(hass: HomeAssistant, fc_class_mock, fh_class_mock) -> None: """Test setup of Fritz!Tools buttons.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - with patch("homeassistant.components.fritz.PLATFORMS", [Platform.BUTTON]): - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + buttons = hass.states.async_all(BUTTON_DOMAIN) + assert len(buttons) == 4 + + for button in buttons: + assert button.state == STATE_UNKNOWN @pytest.mark.parametrize( @@ -130,7 +120,7 @@ async def test_wol_button_new_device( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - mesh_data = deepcopy(MOCK_MESH_DATA) + mesh_data = copy.deepcopy(MOCK_MESH_DATA) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.LOADED @@ -158,7 +148,7 @@ async def test_wol_button_absent_for_mesh_slave( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - slave_mesh_data = deepcopy(MOCK_MESH_DATA) + slave_mesh_data = copy.deepcopy(MOCK_MESH_DATA) slave_mesh_data["nodes"][0]["mesh_role"] = MeshRoles.SLAVE fh_class_mock.get_mesh_topology.return_value = slave_mesh_data @@ -180,7 +170,7 @@ async def test_wol_button_absent_for_non_lan_device( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - printer_wifi_data = deepcopy(MOCK_MESH_DATA) + printer_wifi_data = copy.deepcopy(MOCK_MESH_DATA) # initialization logic uses the connection type of the `node_interface_1_uid` pair of the printer # ni-230 is wifi interface of fritzbox printer_node_interface = printer_wifi_data["nodes"][1]["node_interfaces"][0] @@ -194,61 +184,3 @@ async def test_wol_button_absent_for_non_lan_device( button = hass.states.get("button.printer_wake_on_lan") assert button is None - - -async def test_cleanup_button( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - fc_class_mock, - fh_class_mock, -) -> None: - """Test cleanup of orphan devices.""" - - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) - entry.add_to_hass(hass) - - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED - - # check if tracked device is registered properly - device = device_registry.async_get_device( - connections={("mac", "aa:bb:cc:00:11:22")} - ) - assert device - - entities = [ - entity - for entity in er.async_entries_for_config_entry(entity_registry, entry.entry_id) - if entity.unique_id.startswith("AA:BB:CC:00:11:22") - ] - assert entities - assert len(entities) == 3 - - # removed tracked device and trigger cleanup - host_attributes = deepcopy(MOCK_HOST_ATTRIBUTES_DATA) - host_attributes.pop(0) - fh_class_mock.get_hosts_attributes.return_value = host_attributes - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: "button.mock_title_cleanup"}, - blocking=True, - ) - - await hass.async_block_till_done(wait_background_tasks=True) - - # check if orphan tracked device is removed - device = device_registry.async_get_device( - connections={("mac", "aa:bb:cc:00:11:22")} - ) - assert not device - - entities = [ - entity - for entity in er.async_entries_for_config_entry(entity_registry, entry.entry_id) - if entity.unique_id.startswith("AA:BB:CC:00:11:22") - ] - assert not entities diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index 84f1b240b88..a54acbb0ac0 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -10,7 +10,6 @@ from fritzconnection.core.exceptions import ( ) import pytest -from homeassistant.components import ssdp from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME, @@ -23,7 +22,13 @@ from homeassistant.components.fritz.const import ( ERROR_UNKNOWN, FRITZ_AUTH_EXCEPTIONS, ) -from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER +from homeassistant.components.ssdp import ATTR_UPNP_UDN +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_SSDP, + SOURCE_USER, +) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -305,9 +310,6 @@ async def test_reauth_successful( mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with ( patch( @@ -333,6 +335,15 @@ async def test_reauth_successful( mock_request_post.return_value.status_code = 200 mock_request_post.return_value.text = MOCK_REQUEST + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -365,14 +376,20 @@ async def test_reauth_not_successful( mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.fritz.config_flow.FritzConnection", side_effect=side_effect, ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -452,13 +469,18 @@ async def test_reconfigure_successful( mock_request_post.return_value.status_code = 200 mock_request_post.return_value.text = MOCK_REQUEST - result = await mock_config.start_reconfigure_flow( - hass, - show_advanced_options=show_advanced_options, + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": mock_config.entry_id, + "show_advanced_options": show_advanced_options, + }, + data=mock_config.data, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -509,10 +531,14 @@ async def test_reconfigure_not_successful( mock_request_post.return_value.status_code = 200 mock_request_post.return_value.text = MOCK_REQUEST - result = await mock_config.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -523,7 +549,7 @@ async def test_reconfigure_not_successful( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" assert result["errors"]["base"] == ERROR_CANNOT_CONNECT result = await hass.config_entries.flow.async_configure( @@ -644,7 +670,7 @@ async def test_ssdp_already_in_progress_host( MOCK_NO_UNIQUE_ID = dataclasses.replace(MOCK_SSDP_DATA) MOCK_NO_UNIQUE_ID.upnp = MOCK_NO_UNIQUE_ID.upnp.copy() - del MOCK_NO_UNIQUE_ID.upnp[ssdp.ATTR_UPNP_UDN] + del MOCK_NO_UNIQUE_ID.upnp[ATTR_UPNP_UDN] result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_NO_UNIQUE_ID ) @@ -737,23 +763,3 @@ async def test_options_flow(hass: HomeAssistant) -> None: CONF_OLD_DISCOVERY: False, CONF_CONSIDER_HOME: 37, } - - -async def test_ssdp_ipv6_link_local(hass: HomeAssistant) -> None: - """Test ignoring ipv6-link-local while ssdp discovery.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_SSDP}, - data=ssdp.SsdpServiceInfo( - ssdp_usn="mock_usn", - ssdp_st="mock_st", - ssdp_location="https://[fe80::1ff:fe23:4567:890a]:12345/test", - upnp={ - ssdp.ATTR_UPNP_FRIENDLY_NAME: "fake_name", - ssdp.ATTR_UPNP_UDN: "uuid:only-a-test", - }, - ), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "ignore_ip6_link_local" diff --git a/tests/components/fritz/test_diagnostics.py b/tests/components/fritz/test_diagnostics.py index cbcaa57dab4..55196eb6988 100644 --- a/tests/components/fritz/test_diagnostics.py +++ b/tests/components/fritz/test_diagnostics.py @@ -2,13 +2,14 @@ from __future__ import annotations -from syrupy import SnapshotAssertion -from syrupy.filters import props - +from homeassistant.components.diagnostics import REDACTED from homeassistant.components.fritz.const import DOMAIN +from homeassistant.components.fritz.coordinator import AvmWrapper +from homeassistant.components.fritz.diagnostics import TO_REDACT +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from .const import MOCK_USER_DATA +from .const import MOCK_MESH_MASTER_MAC, MOCK_USER_DATA from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry @@ -20,16 +21,64 @@ async def test_entry_diagnostics( hass_client: ClientSessionGenerator, fc_class_mock, fh_class_mock, - snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) + await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED + entry_dict = entry.as_dict() + for key in TO_REDACT: + entry_dict["data"][key] = REDACTED result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot( - exclude=props("created_at", "modified_at", "entry_id", "last_activity") - ) + avm_wrapper: AvmWrapper = hass.data[DOMAIN][entry.entry_id] + assert result == { + "entry": entry_dict, + "device_info": { + "client_devices": [ + { + "connected_to": device.connected_to, + "connection_type": device.connection_type, + "hostname": device.hostname, + "is_connected": device.is_connected, + "last_activity": device.last_activity.isoformat(), + "wan_access": device.wan_access, + } + for _, device in avm_wrapper.devices.items() + ], + "connection_type": "WANPPPConnection", + "current_firmware": "7.29", + "discovered_services": [ + "DeviceInfo1", + "Hosts1", + "LANEthernetInterfaceConfig1", + "Layer3Forwarding1", + "UserInterface1", + "WANCommonIFC1", + "WANCommonInterfaceConfig1", + "WANDSLInterfaceConfig1", + "WANIPConn1", + "WANPPPConnection1", + "WLANConfiguration1", + "X_AVM-DE_Homeauto1", + "X_AVM-DE_HostFilter1", + ], + "is_router": True, + "last_exception": None, + "last_update success": True, + "latest_firmware": None, + "mesh_role": "master", + "model": "FRITZ!Box 7530 AX", + "unique_id": MOCK_MESH_MASTER_MAC.replace("6F:12", "XX:XX"), + "update_available": False, + "wan_link_properties": { + "NewLayer1DownstreamMaxBitRate": 318557000, + "NewLayer1UpstreamMaxBitRate": 51805000, + "NewPhysicalLinkStatus": "Up", + "NewWANAccessType": "DSL", + }, + }, + } diff --git a/tests/components/fritz/test_image.py b/tests/components/fritz/test_image.py index d8652bd6508..9097aab1762 100644 --- a/tests/components/fritz/test_image.py +++ b/tests/components/fritz/test_image.py @@ -24,7 +24,6 @@ from tests.typing import ClientSessionGenerator GUEST_WIFI_ENABLED: dict[str, dict] = { "WLANConfiguration0": {}, "WLANConfiguration1": { - "GetBeaconAdvertisement": {"NewBeaconAdvertisementEnabled": 1}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -44,7 +43,6 @@ GUEST_WIFI_ENABLED: dict[str, dict] = { GUEST_WIFI_CHANGED: dict[str, dict] = { "WLANConfiguration0": {}, "WLANConfiguration1": { - "GetBeaconAdvertisement": {"NewBeaconAdvertisementEnabled": 1}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -64,7 +62,6 @@ GUEST_WIFI_CHANGED: dict[str, dict] = { GUEST_WIFI_DISABLED: dict[str, dict] = { "WLANConfiguration0": {}, "WLANConfiguration1": { - "GetBeaconAdvertisement": {"NewBeaconAdvertisementEnabled": 1}, "GetInfo": { "NewEnable": False, "NewStatus": "Up", diff --git a/tests/components/fritz/test_sensor.py b/tests/components/fritz/test_sensor.py index 77deb665f5e..f8114238376 100644 --- a/tests/components/fritz/test_sensor.py +++ b/tests/components/fritz/test_sensor.py @@ -2,44 +2,123 @@ from __future__ import annotations -from datetime import UTC, datetime, timedelta -from unittest.mock import patch +from datetime import timedelta +from typing import Any from fritzconnection.core.exceptions import FritzConnectionException -import pytest -from syrupy.assertion import SnapshotAssertion from homeassistant.components.fritz.const import DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.components.fritz.sensor import SENSOR_TYPES +from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, + DOMAIN as SENSOR_DOMAIN, + SensorDeviceClass, + SensorStateClass, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + ATTR_STATE, + ATTR_UNIT_OF_MEASUREMENT, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util from .const import MOCK_USER_DATA -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import MockConfigEntry, async_fire_time_changed + +SENSOR_STATES: dict[str, dict[str, Any]] = { + "sensor.mock_title_external_ip": { + ATTR_STATE: "1.2.3.4", + }, + "sensor.mock_title_external_ipv6": { + ATTR_STATE: "fec0::1", + }, + "sensor.mock_title_last_restart": { + # ATTR_STATE: "2022-02-05T17:46:04+00:00", + ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP, + }, + "sensor.mock_title_connection_uptime": { + # ATTR_STATE: "2022-03-06T11:27:16+00:00", + ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP, + }, + "sensor.mock_title_upload_throughput": { + ATTR_STATE: "3.4", + ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, + ATTR_UNIT_OF_MEASUREMENT: "kB/s", + }, + "sensor.mock_title_download_throughput": { + ATTR_STATE: "67.6", + ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, + ATTR_UNIT_OF_MEASUREMENT: "kB/s", + }, + "sensor.mock_title_max_connection_upload_throughput": { + ATTR_STATE: "2105.0", + ATTR_UNIT_OF_MEASUREMENT: "kbit/s", + }, + "sensor.mock_title_max_connection_download_throughput": { + ATTR_STATE: "10087.0", + ATTR_UNIT_OF_MEASUREMENT: "kbit/s", + }, + "sensor.mock_title_gb_sent": { + ATTR_STATE: "1.7", + ATTR_STATE_CLASS: SensorStateClass.TOTAL_INCREASING, + ATTR_UNIT_OF_MEASUREMENT: "GB", + }, + "sensor.mock_title_gb_received": { + ATTR_STATE: "5.2", + ATTR_STATE_CLASS: SensorStateClass.TOTAL_INCREASING, + ATTR_UNIT_OF_MEASUREMENT: "GB", + }, + "sensor.mock_title_link_upload_throughput": { + ATTR_STATE: "51805.0", + ATTR_UNIT_OF_MEASUREMENT: "kbit/s", + }, + "sensor.mock_title_link_download_throughput": { + ATTR_STATE: "318557.0", + ATTR_UNIT_OF_MEASUREMENT: "kbit/s", + }, + "sensor.mock_title_link_upload_noise_margin": { + ATTR_STATE: "9.0", + ATTR_UNIT_OF_MEASUREMENT: "dB", + }, + "sensor.mock_title_link_download_noise_margin": { + ATTR_STATE: "8.0", + ATTR_UNIT_OF_MEASUREMENT: "dB", + }, + "sensor.mock_title_link_upload_power_attenuation": { + ATTR_STATE: "7.0", + ATTR_UNIT_OF_MEASUREMENT: "dB", + }, + "sensor.mock_title_link_download_power_attenuation": { + ATTR_STATE: "12.0", + ATTR_UNIT_OF_MEASUREMENT: "dB", + }, +} -@pytest.mark.freeze_time(datetime(2024, 9, 1, 20, tzinfo=UTC)) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_setup( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - fc_class_mock, - fh_class_mock, - snapshot: SnapshotAssertion, -) -> None: +async def test_sensor_setup(hass: HomeAssistant, fc_class_mock, fh_class_mock) -> None: """Test setup of Fritz!Tools sensors.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - with patch("homeassistant.components.fritz.PLATFORMS", [Platform.SENSOR]): - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + sensors = hass.states.async_all(SENSOR_DOMAIN) + assert len(sensors) == len(SENSOR_TYPES) + + for sensor in sensors: + assert SENSOR_STATES.get(sensor.entity_id) is not None + for key, val in SENSOR_STATES[sensor.entity_id].items(): + if key == ATTR_STATE: + assert sensor.state == val + else: + assert sensor.attributes.get(key) == val async def test_sensor_update_fail( diff --git a/tests/components/fritz/test_switch.py b/tests/components/fritz/test_switch.py index fdf76d54588..b82587d42bd 100644 --- a/tests/components/fritz/test_switch.py +++ b/tests/components/fritz/test_switch.py @@ -2,19 +2,16 @@ from __future__ import annotations -from unittest.mock import patch - import pytest -from syrupy.assertion import SnapshotAssertion from homeassistant.components.fritz.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .const import MOCK_CALL_DEFLECTION_DATA, MOCK_FB_SERVICES, MOCK_USER_DATA +from .const import MOCK_FB_SERVICES, MOCK_USER_DATA -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = { "WLANConfiguration1": { @@ -169,28 +166,36 @@ MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = { @pytest.mark.parametrize( - ("fc_data"), + ("fc_data", "expected_wifi_names"), [ - ({**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_SAME_SSID}), - ({**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF_SSID}), - ({**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF2_SSID}), - ({**MOCK_FB_SERVICES, **MOCK_CALL_DEFLECTION_DATA}), + ( + {**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_SAME_SSID}, + ["WiFi (2.4Ghz)", "WiFi (5Ghz)"], + ), + ({**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF_SSID}, ["WiFi", "WiFi2"]), + ( + {**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF2_SSID}, + ["WiFi (2.4Ghz)", "WiFi+ (5Ghz)"], + ), ], ) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_switch_setup( hass: HomeAssistant, - entity_registry: er.EntityRegistry, + expected_wifi_names: list[str], fc_class_mock, fh_class_mock, - snapshot: SnapshotAssertion, ) -> None: """Test setup of Fritz!Tools switches.""" + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - with patch("homeassistant.components.fritz.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.state is ConfigEntryState.LOADED - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + switches = hass.states.async_all(Platform.SWITCH) + assert len(switches) == 3 + assert switches[0].name == f"Mock Title Wi-Fi {expected_wifi_names[0]}" + assert switches[1].name == f"Mock Title Wi-Fi {expected_wifi_names[1]}" + assert switches[2].name == "printer Internet Access" diff --git a/tests/components/fritz/test_update.py b/tests/components/fritz/test_update.py index 72997b1aa12..5d7ef852d4c 100644 --- a/tests/components/fritz/test_update.py +++ b/tests/components/fritz/test_update.py @@ -2,13 +2,10 @@ from unittest.mock import patch -import pytest -from syrupy.assertion import SnapshotAssertion - from homeassistant.components.fritz.const import DOMAIN -from homeassistant.const import Platform +from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from .const import ( MOCK_FB_SERVICES, @@ -17,7 +14,8 @@ from .const import ( MOCK_USER_DATA, ) -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator AVAILABLE_UPDATE = { "UserInterface1": { @@ -29,33 +27,30 @@ AVAILABLE_UPDATE = { } -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_update_entities_initialized( hass: HomeAssistant, - entity_registry: er.EntityRegistry, + hass_client: ClientSessionGenerator, fc_class_mock, fh_class_mock, - snapshot: SnapshotAssertion, ) -> None: """Test update entities.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - with patch("homeassistant.components.fritz.PLATFORMS", [Platform.UPDATE]): - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + updates = hass.states.async_all(UPDATE_DOMAIN) + assert len(updates) == 1 -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_update_available( hass: HomeAssistant, - entity_registry: er.EntityRegistry, + hass_client: ClientSessionGenerator, fc_class_mock, fh_class_mock, - snapshot: SnapshotAssertion, ) -> None: """Test update entities.""" @@ -64,39 +59,64 @@ async def test_update_available( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - with patch("homeassistant.components.fritz.PLATFORMS", [Platform.UPDATE]): - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + update = hass.states.get("update.mock_title_fritz_os") + assert update is not None + assert update.state == "on" + assert update.attributes.get("installed_version") == "7.29" + assert update.attributes.get("latest_version") == MOCK_FIRMWARE_AVAILABLE + assert update.attributes.get("release_url") == MOCK_FIRMWARE_RELEASE_URL -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_available_update_can_be_installed( +async def test_no_update_available( hass: HomeAssistant, - entity_registry: er.EntityRegistry, + hass_client: ClientSessionGenerator, + fc_class_mock, + fh_class_mock, +) -> None: + """Test update entities.""" + + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED + + update = hass.states.get("update.mock_title_fritz_os") + assert update is not None + assert update.state == "off" + assert update.attributes.get("installed_version") == "7.29" + assert update.attributes.get("latest_version") == "7.29" + + +async def test_available_update_can_be_installed( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, fc_class_mock, fh_class_mock, - snapshot: SnapshotAssertion, ) -> None: """Test update entities.""" fc_class_mock().override_services({**MOCK_FB_SERVICES, **AVAILABLE_UPDATE}) - with ( - patch( - "homeassistant.components.fritz.coordinator.FritzBoxTools.async_trigger_firmware_update", - return_value=True, - ) as mocked_update_call, - patch("homeassistant.components.fritz.PLATFORMS", [Platform.UPDATE]), - ): + with patch( + "homeassistant.components.fritz.coordinator.FritzBoxTools.async_trigger_firmware_update", + return_value=True, + ) as mocked_update_call: entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) + await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + update = hass.states.get("update.mock_title_fritz_os") + assert update is not None + assert update.state == "on" await hass.services.async_call( "update", diff --git a/tests/components/fritzbox/__init__.py b/tests/components/fritzbox/__init__.py index 034b86497db..61312805e91 100644 --- a/tests/components/fritzbox/__init__.py +++ b/tests/components/fritzbox/__init__.py @@ -5,6 +5,7 @@ from __future__ import annotations from typing import Any from unittest.mock import Mock +from homeassistant.components.climate import PRESET_COMFORT, PRESET_ECO from homeassistant.components.fritzbox.const import DOMAIN from homeassistant.core import HomeAssistant @@ -109,14 +110,9 @@ class FritzDeviceClimateMock(FritzEntityBaseMock): target_temperature = 19.5 window_open = "fake_window" nextchange_temperature = 22.0 - nextchange_endperiod = 1726855200 - - -class FritzDeviceClimateWithoutTempSensorMock(FritzDeviceClimateMock): - """Mock of a AVM Fritz!Box climate device without exposing temperature sensor.""" - - temperature = None - has_temperature_sensor = False + nextchange_endperiod = 0 + nextchange_preset = PRESET_COMFORT + scheduled_preset = PRESET_ECO class FritzDeviceSensorMock(FritzEntityBaseMock): @@ -177,7 +173,6 @@ class FritzDeviceLightMock(FritzEntityBaseMock): level = 100 present = True state = True - color_temp = None class FritzDeviceCoverMock(FritzEntityBaseMock): @@ -192,9 +187,3 @@ class FritzDeviceCoverMock(FritzEntityBaseMock): has_thermostat = False has_blind = True levelpercentage = 0 - - -class FritzDeviceCoverUnknownPositionMock(FritzDeviceCoverMock): - """Mock of a AVM Fritz!Box cover device with unknown position.""" - - levelpercentage = None diff --git a/tests/components/fritzbox/test_binary_sensor.py b/tests/components/fritzbox/test_binary_sensor.py index f4cc1b2e2ca..3e1a2691f67 100644 --- a/tests/components/fritzbox/test_binary_sensor.py +++ b/tests/components/fritzbox/test_binary_sensor.py @@ -6,10 +6,7 @@ from unittest.mock import Mock from requests.exceptions import HTTPError -from homeassistant.components.binary_sensor import ( - DOMAIN as BINARY_SENSOR_DOMAIN, - BinarySensorDeviceClass, -) +from homeassistant.components.binary_sensor import DOMAIN, BinarySensorDeviceClass from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN from homeassistant.const import ( @@ -30,7 +27,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{BINARY_SENSOR_DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -151,5 +148,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.new_device_alarm") + state = hass.states.get(f"{DOMAIN}.new_device_alarm") assert state diff --git a/tests/components/fritzbox/test_button.py b/tests/components/fritzbox/test_button.py index 913f828efbc..89e8d8357dd 100644 --- a/tests/components/fritzbox/test_button.py +++ b/tests/components/fritzbox/test_button.py @@ -3,7 +3,7 @@ from datetime import timedelta from unittest.mock import Mock -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN, SERVICE_PRESS from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -19,7 +19,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{BUTTON_DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -43,7 +43,7 @@ async def test_apply_template(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ENTITY_ID}, True + DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert fritz().apply_template.call_count == 1 @@ -67,5 +67,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{BUTTON_DOMAIN}.new_template") + state = hass.states.get(f"{DOMAIN}.new_template") assert state diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index 29f5742216f..8d1da9d09d5 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -1,7 +1,7 @@ """Tests for AVM Fritz!Box climate component.""" from datetime import timedelta -from unittest.mock import Mock, _Call, call +from unittest.mock import Mock, call from freezegun.api import FrozenDateTimeFactory import pytest @@ -15,7 +15,7 @@ from homeassistant.components.climate import ( ATTR_MIN_TEMP, ATTR_PRESET_MODE, ATTR_PRESET_MODES, - DOMAIN as CLIMATE_DOMAIN, + DOMAIN, PRESET_COMFORT, PRESET_ECO, SERVICE_SET_HVAC_MODE, @@ -46,17 +46,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError import homeassistant.util.dt as dt_util -from . import ( - FritzDeviceClimateMock, - FritzDeviceClimateWithoutTempSensorMock, - set_devices, - setup_config_entry, -) +from . import FritzDeviceClimateMock, set_devices, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{CLIMATE_DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -123,7 +118,7 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}_next_scheduled_change_time" ) assert state - assert state.state == "2024-09-20T18:00:00+00:00" + assert state.state == "1970-01-01T00:00:00+00:00" assert ( state.attributes[ATTR_FRIENDLY_NAME] == f"{CONF_FAKE_NAME} Next scheduled change time" @@ -167,18 +162,6 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state.state == PRESET_COMFORT -async def test_hkr_wo_temperature_sensor(hass: HomeAssistant, fritz: Mock) -> None: - """Test hkr without exposing dedicated temperature sensor data block.""" - device = FritzDeviceClimateWithoutTempSensorMock() - assert await setup_config_entry( - hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz - ) - - state = hass.states.get(ENTITY_ID) - assert state - assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 18.0 - - async def test_target_temperature_on(hass: HomeAssistant, fritz: Mock) -> None: """Test turn device on.""" device = FritzDeviceClimateMock() @@ -270,101 +253,110 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None: assert fritz().login.call_count == 4 -@pytest.mark.parametrize( - ("service_data", "expected_call_args"), - [ - ({ATTR_TEMPERATURE: 23}, [call(23)]), - ( - { - ATTR_HVAC_MODE: HVACMode.OFF, - ATTR_TEMPERATURE: 23, - }, - [call(0)], - ), - ( - { - ATTR_HVAC_MODE: HVACMode.HEAT, - ATTR_TEMPERATURE: 23, - }, - [call(23)], - ), - ], -) -async def test_set_temperature( - hass: HomeAssistant, - fritz: Mock, - service_data: dict, - expected_call_args: list[_Call], -) -> None: - """Test setting temperature.""" +async def test_set_temperature_temperature(hass: HomeAssistant, fritz: Mock) -> None: + """Test setting temperature by temperature.""" device = FritzDeviceClimateMock() assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, **service_data}, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 123}, True, ) - assert device.set_target_temperature.call_count == len(expected_call_args) - assert device.set_target_temperature.call_args_list == expected_call_args + assert device.set_target_temperature.call_args_list == [call(123)] -@pytest.mark.parametrize( - ("service_data", "target_temperature", "current_preset", "expected_call_args"), - [ - # mode off always sets target temperature to 0 - ({ATTR_HVAC_MODE: HVACMode.OFF}, 22, PRESET_COMFORT, [call(0)]), - ({ATTR_HVAC_MODE: HVACMode.OFF}, 16, PRESET_ECO, [call(0)]), - ({ATTR_HVAC_MODE: HVACMode.OFF}, 16, None, [call(0)]), - # mode heat sets target temperature based on current scheduled preset, - # when not already in mode heat - ({ATTR_HVAC_MODE: HVACMode.HEAT}, 0.0, PRESET_COMFORT, [call(22)]), - ({ATTR_HVAC_MODE: HVACMode.HEAT}, 0.0, PRESET_ECO, [call(16)]), - ({ATTR_HVAC_MODE: HVACMode.HEAT}, 0.0, None, [call(22)]), - # mode heat does not set target temperature, when already in mode heat - ({ATTR_HVAC_MODE: HVACMode.HEAT}, 16, PRESET_COMFORT, []), - ({ATTR_HVAC_MODE: HVACMode.HEAT}, 16, PRESET_ECO, []), - ({ATTR_HVAC_MODE: HVACMode.HEAT}, 16, None, []), - ({ATTR_HVAC_MODE: HVACMode.HEAT}, 22, PRESET_COMFORT, []), - ({ATTR_HVAC_MODE: HVACMode.HEAT}, 22, PRESET_ECO, []), - ({ATTR_HVAC_MODE: HVACMode.HEAT}, 22, None, []), - ], -) -async def test_set_hvac_mode( - hass: HomeAssistant, - fritz: Mock, - service_data: dict, - target_temperature: float, - current_preset: str, - expected_call_args: list[_Call], -) -> None: +async def test_set_temperature_mode_off(hass: HomeAssistant, fritz: Mock) -> None: + """Test setting temperature by mode.""" + device = FritzDeviceClimateMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_HVAC_MODE: HVACMode.OFF, + ATTR_TEMPERATURE: 123, + }, + True, + ) + assert device.set_target_temperature.call_args_list == [call(0)] + + +async def test_set_temperature_mode_heat(hass: HomeAssistant, fritz: Mock) -> None: + """Test setting temperature by mode.""" + device = FritzDeviceClimateMock() + device.target_temperature = 0.0 + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_HVAC_MODE: HVACMode.HEAT, + ATTR_TEMPERATURE: 123, + }, + True, + ) + assert device.set_target_temperature.call_args_list == [call(22)] + + +async def test_set_hvac_mode_off(hass: HomeAssistant, fritz: Mock) -> None: """Test setting hvac mode.""" device = FritzDeviceClimateMock() - device.target_temperature = target_temperature - - if current_preset is PRESET_COMFORT: - device.nextchange_temperature = device.eco_temperature - elif current_preset is PRESET_ECO: - device.nextchange_temperature = device.comfort_temperature - else: - device.nextchange_endperiod = 0 - assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, **service_data}, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, True, ) + assert device.set_target_temperature.call_args_list == [call(0)] - assert device.set_target_temperature.call_count == len(expected_call_args) - assert device.set_target_temperature.call_args_list == expected_call_args + +async def test_no_reset_hvac_mode_heat(hass: HomeAssistant, fritz: Mock) -> None: + """Test setting hvac mode.""" + device = FritzDeviceClimateMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, + True, + ) + assert device.set_target_temperature.call_count == 0 + + +async def test_set_hvac_mode_heat(hass: HomeAssistant, fritz: Mock) -> None: + """Test setting hvac mode.""" + device = FritzDeviceClimateMock() + device.target_temperature = 0.0 + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, + True, + ) + assert device.set_target_temperature.call_args_list == [call(22)] async def test_set_preset_mode_comfort(hass: HomeAssistant, fritz: Mock) -> None: @@ -375,7 +367,7 @@ async def test_set_preset_mode_comfort(hass: HomeAssistant, fritz: Mock) -> None ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_COMFORT}, True, @@ -391,7 +383,7 @@ async def test_set_preset_mode_eco(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_ECO}, True, @@ -454,7 +446,7 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{CLIMATE_DOMAIN}.new_climate") + state = hass.states.get(f"{DOMAIN}.new_climate") assert state diff --git a/tests/components/fritzbox/test_config_flow.py b/tests/components/fritzbox/test_config_flow.py index 0df6d0b2ea9..72d36a8ab63 100644 --- a/tests/components/fritzbox/test_config_flow.py +++ b/tests/components/fritzbox/test_config_flow.py @@ -12,7 +12,12 @@ from requests.exceptions import HTTPError from homeassistant.components import ssdp from homeassistant.components.fritzbox.const import DOMAIN from homeassistant.components.ssdp import ATTR_UPNP_FRIENDLY_NAME, ATTR_UPNP_UDN -from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_SSDP, + SOURCE_USER, +) from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -124,7 +129,12 @@ async def test_reauth_success(hass: HomeAssistant, fritz: Mock) -> None: """Test starting a reauthentication flow.""" mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -148,7 +158,12 @@ async def test_reauth_auth_failed(hass: HomeAssistant, fritz: Mock) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -171,7 +186,12 @@ async def test_reauth_not_successful(hass: HomeAssistant, fritz: Mock) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -196,9 +216,13 @@ async def test_reconfigure_success(hass: HomeAssistant, fritz: Mock) -> None: assert mock_config.data[CONF_USERNAME] == "fake_user" assert mock_config.data[CONF_PASSWORD] == "fake_pass" - result = await mock_config.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -225,9 +249,13 @@ async def test_reconfigure_failed(hass: HomeAssistant, fritz: Mock) -> None: assert mock_config.data[CONF_USERNAME] == "fake_user" assert mock_config.data[CONF_PASSWORD] == "fake_pass" - result = await mock_config.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -236,7 +264,7 @@ async def test_reconfigure_failed(hass: HomeAssistant, fritz: Mock) -> None: }, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" assert result["errors"]["base"] == "no_devices_found" result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/fritzbox/test_cover.py b/tests/components/fritzbox/test_cover.py index f26e65fc28a..6c301fc8f46 100644 --- a/tests/components/fritzbox/test_cover.py +++ b/tests/components/fritzbox/test_cover.py @@ -3,12 +3,7 @@ from datetime import timedelta from unittest.mock import Mock, call -from homeassistant.components.cover import ( - ATTR_CURRENT_POSITION, - ATTR_POSITION, - DOMAIN as COVER_DOMAIN, - CoverState, -) +from homeassistant.components.cover import ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -17,22 +12,16 @@ from homeassistant.const import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from . import ( - FritzDeviceCoverMock, - FritzDeviceCoverUnknownPositionMock, - set_devices, - setup_config_entry, -) +from . import FritzDeviceCoverMock, set_devices, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{COVER_DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -44,22 +33,9 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: state = hass.states.get(ENTITY_ID) assert state - assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 -async def test_unknown_position(hass: HomeAssistant, fritz: Mock) -> None: - """Test cover with unknown position.""" - device = FritzDeviceCoverUnknownPositionMock() - assert await setup_config_entry( - hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz - ) - - state = hass.states.get(ENTITY_ID) - assert state - assert state.state == STATE_UNKNOWN - - async def test_open_cover(hass: HomeAssistant, fritz: Mock) -> None: """Test opening the cover.""" device = FritzDeviceCoverMock() @@ -68,7 +44,7 @@ async def test_open_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_blind_open.call_count == 1 @@ -81,7 +57,7 @@ async def test_close_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_blind_close.call_count == 1 @@ -94,7 +70,7 @@ async def test_set_position_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_POSITION: 50}, True, @@ -110,7 +86,7 @@ async def test_stop_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True + DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_blind_stop.call_count == 1 @@ -134,5 +110,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{COVER_DOMAIN}.new_climate") + state = hass.states.get(f"{DOMAIN}.new_climate") assert state diff --git a/tests/components/fritzbox/test_diagnostics.py b/tests/components/fritzbox/test_diagnostics.py index 21d70b4b6d6..38aaa623080 100644 --- a/tests/components/fritzbox/test_diagnostics.py +++ b/tests/components/fritzbox/test_diagnostics.py @@ -30,4 +30,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, entries[0]) - assert result == {"entry": entry_dict | {"discovery_keys": {}}, "data": {}} + assert result == {"entry": entry_dict, "data": {}} diff --git a/tests/components/fritzbox/test_init.py b/tests/components/fritzbox/test_init.py index 56e3e7a5738..c84498b1560 100644 --- a/tests/components/fritzbox/test_init.py +++ b/tests/components/fritzbox/test_init.py @@ -18,7 +18,6 @@ from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_USERNAME, - EVENT_HOMEASSISTANT_STOP, STATE_UNAVAILABLE, UnitOfTemperature, ) @@ -200,35 +199,6 @@ async def test_unload_remove(hass: HomeAssistant, fritz: Mock) -> None: assert state is None -async def test_logout_on_stop(hass: HomeAssistant, fritz: Mock) -> None: - """Test we log out from fritzbox when Home Assistants stops.""" - fritz().get_devices.return_value = [FritzDeviceSwitchMock()] - entity_id = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}" - - entry = MockConfigEntry( - domain=FB_DOMAIN, - data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], - unique_id=entity_id, - ) - entry.add_to_hass(hass) - - config_entries = hass.config_entries.async_entries(FB_DOMAIN) - assert len(config_entries) == 1 - assert entry is config_entries[0] - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.state is ConfigEntryState.LOADED - state = hass.states.get(entity_id) - assert state - - hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) - await hass.async_block_till_done() - - assert fritz().logout.call_count == 1 - - async def test_remove_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/fritzbox/test_light.py b/tests/components/fritzbox/test_light.py index 84fafe25521..45920c7c3ee 100644 --- a/tests/components/fritzbox/test_light.py +++ b/tests/components/fritzbox/test_light.py @@ -3,7 +3,6 @@ from datetime import timedelta from unittest.mock import Mock, call -import pytest from requests.exceptions import HTTPError from homeassistant.components.fritzbox.const import ( @@ -13,14 +12,12 @@ from homeassistant.components.fritzbox.const import ( ) from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_MODE, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, - DOMAIN as LIGHT_DOMAIN, - ColorMode, + DOMAIN, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -38,7 +35,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{LIGHT_DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -59,11 +56,9 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.state == STATE_ON assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" - assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 - assert state.attributes[ATTR_HS_COLOR] == (28.395, 65.723) assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -104,9 +99,6 @@ async def test_setup_non_color_non_level(hass: HomeAssistant, fritz: Mock) -> No assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" assert ATTR_BRIGHTNESS not in state.attributes assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["onoff"] - assert state.attributes[ATTR_COLOR_MODE] == ColorMode.ONOFF - assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None - assert state.attributes.get(ATTR_HS_COLOR) is None async def test_setup_color(hass: HomeAssistant, fritz: Mock) -> None: @@ -128,8 +120,6 @@ async def test_setup_color(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.state == STATE_ON assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" - assert state.attributes[ATTR_COLOR_MODE] == ColorMode.HS - assert state.attributes[ATTR_COLOR_TEMP_KELVIN] is None assert state.attributes[ATTR_BRIGHTNESS] == 100 assert state.attributes[ATTR_HS_COLOR] == (100, 70) assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -147,7 +137,7 @@ async def test_turn_on(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - LIGHT_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_COLOR_TEMP_KELVIN: 3000}, True, @@ -170,7 +160,7 @@ async def test_turn_on_color(hass: HomeAssistant, fritz: Mock) -> None: hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) await hass.services.async_call( - LIGHT_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, True, @@ -193,18 +183,18 @@ async def test_turn_on_color_unsupported_api_method( device.get_colors.return_value = { "Red": [("100", "70", "10"), ("100", "50", "10"), ("100", "30", "10")] } + mockresponse = Mock() + mockresponse.status_code = 400 + + error = HTTPError("Bad Request") + error.response = mockresponse + device.set_unmapped_color.side_effect = error + assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) - - # test fallback to `setcolor` - error = HTTPError("Bad Request") - error.response = Mock() - error.response.status_code = 400 - device.set_unmapped_color.side_effect = error - await hass.services.async_call( - LIGHT_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, True, @@ -215,16 +205,6 @@ async def test_turn_on_color_unsupported_api_method( assert device.set_level.call_args_list == [call(100)] assert device.set_color.call_args_list == [call((100, 70))] - # test for unknown error - error.response.status_code = 500 - with pytest.raises(HTTPError, match="Bad Request"): - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, - True, - ) - async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: """Test turn device off.""" @@ -237,7 +217,7 @@ async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) await hass.services.async_call( - LIGHT_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_state_off.call_count == 1 @@ -316,5 +296,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{LIGHT_DOMAIN}.new_light") + state = hass.states.get(f"{DOMAIN}.new_light") assert state diff --git a/tests/components/fritzbox/test_sensor.py b/tests/components/fritzbox/test_sensor.py index 0da040bbb5b..63d0b67d7f4 100644 --- a/tests/components/fritzbox/test_sensor.py +++ b/tests/components/fritzbox/test_sensor.py @@ -3,22 +3,15 @@ from datetime import timedelta from unittest.mock import Mock -import pytest from requests.exceptions import HTTPError -from homeassistant.components.climate import PRESET_COMFORT, PRESET_ECO from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN -from homeassistant.components.sensor import ( - ATTR_STATE_CLASS, - DOMAIN as SENSOR_DOMAIN, - SensorStateClass, -) +from homeassistant.components.sensor import ATTR_STATE_CLASS, DOMAIN, SensorStateClass from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, CONF_DEVICES, PERCENTAGE, - STATE_UNKNOWN, EntityCategory, UnitOfTemperature, ) @@ -26,17 +19,12 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util -from . import ( - FritzDeviceClimateMock, - FritzDeviceSensorMock, - set_devices, - setup_config_entry, -) +from . import FritzDeviceSensorMock, set_devices, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" async def test_setup( @@ -142,57 +130,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{SENSOR_DOMAIN}.new_device_temperature") + state = hass.states.get(f"{DOMAIN}.new_device_temperature") assert state - - -@pytest.mark.parametrize( - ("next_changes", "expected_states"), - [ - ( - [0, 16], - [STATE_UNKNOWN, STATE_UNKNOWN, STATE_UNKNOWN, STATE_UNKNOWN], - ), - ( - [0, 22], - [STATE_UNKNOWN, STATE_UNKNOWN, STATE_UNKNOWN, STATE_UNKNOWN], - ), - ( - [1726855200, 16.0], - ["2024-09-20T18:00:00+00:00", "16.0", PRESET_ECO, PRESET_COMFORT], - ), - ( - [1726855200, 22.0], - ["2024-09-20T18:00:00+00:00", "22.0", PRESET_COMFORT, PRESET_ECO], - ), - ], -) -async def test_next_change_sensors( - hass: HomeAssistant, fritz: Mock, next_changes: list, expected_states: list -) -> None: - """Test next change sensors.""" - device = FritzDeviceClimateMock() - device.nextchange_endperiod = next_changes[0] - device.nextchange_temperature = next_changes[1] - - assert await setup_config_entry( - hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz - ) - - base_name = f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}" - - state = hass.states.get(f"{base_name}_next_scheduled_change_time") - assert state - assert state.state == expected_states[0] - - state = hass.states.get(f"{base_name}_next_scheduled_temperature") - assert state - assert state.state == expected_states[1] - - state = hass.states.get(f"{base_name}_next_scheduled_preset") - assert state - assert state.state == expected_states[2] - - state = hass.states.get(f"{base_name}_current_scheduled_preset") - assert state - assert state.state == expected_states[3] diff --git a/tests/components/fritzbox/test_switch.py b/tests/components/fritzbox/test_switch.py index e394ccbc7f3..ba3b1de9b2f 100644 --- a/tests/components/fritzbox/test_switch.py +++ b/tests/components/fritzbox/test_switch.py @@ -12,7 +12,7 @@ from homeassistant.components.sensor import ( DOMAIN as SENSOR_DOMAIN, SensorStateClass, ) -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.switch import DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -39,7 +39,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" async def test_setup( @@ -124,7 +124,7 @@ async def test_turn_on(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_switch_state_on.call_count == 1 @@ -138,7 +138,7 @@ async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_switch_state_off.call_count == 1 @@ -158,7 +158,7 @@ async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None: match="Can't toggle switch while manual switching is disabled for the device", ): await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) with pytest.raises( @@ -166,7 +166,7 @@ async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None: match="Can't toggle switch while manual switching is disabled for the device", ): await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) @@ -239,5 +239,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{SWITCH_DOMAIN}.new_switch") + state = hass.states.get(f"{DOMAIN}.new_switch") assert state diff --git a/tests/components/fritzbox_callmonitor/test_config_flow.py b/tests/components/fritzbox_callmonitor/test_config_flow.py index 0eccb651611..14f18e84e0c 100644 --- a/tests/components/fritzbox_callmonitor/test_config_flow.py +++ b/tests/components/fritzbox_callmonitor/test_config_flow.py @@ -264,97 +264,6 @@ async def test_setup_invalid_auth( assert result["errors"] == {"base": ConnectResult.INVALID_AUTH} -async def test_reauth_successful(hass: HomeAssistant) -> None: - """Test starting a reauthentication flow.""" - mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_ENTRY) - mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - with ( - patch( - "homeassistant.components.fritzbox_callmonitor.base.FritzPhonebook.__init__", - return_value=None, - ), - patch( - "homeassistant.components.fritzbox_callmonitor.base.FritzPhonebook.phonebook_ids", - new_callable=PropertyMock, - return_value=[0], - ), - patch( - "homeassistant.components.fritzbox_callmonitor.base.FritzPhonebook.phonebook_info", - return_value=MOCK_PHONEBOOK_INFO_1, - ), - patch( - "homeassistant.components.fritzbox_callmonitor.base.FritzPhonebook.modelname", - return_value=MOCK_PHONEBOOK_NAME_1, - ), - patch( - "homeassistant.components.fritzbox_callmonitor.config_flow.FritzConnection.__init__", - return_value=None, - ), - patch( - "homeassistant.components.fritzbox_callmonitor.config_flow.FritzConnection.updatecheck", - new_callable=PropertyMock, - return_value=MOCK_DEVICE_INFO, - ), - patch( - "homeassistant.components.fritzbox_callmonitor.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "other_fake_user", - CONF_PASSWORD: "other_fake_password", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert mock_config.data == { - **MOCK_CONFIG_ENTRY, - CONF_USERNAME: "other_fake_user", - CONF_PASSWORD: "other_fake_password", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("side_effect", "error"), - [ - (FritzConnectionException, ConnectResult.INVALID_AUTH), - (FritzSecurityError, ConnectResult.INSUFFICIENT_PERMISSIONS), - ], -) -async def test_reauth_not_successful( - hass: HomeAssistant, side_effect: Exception, error: str -) -> None: - """Test starting a reauthentication flow but no connection found.""" - mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG_ENTRY) - mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - with patch( - "homeassistant.components.fritzbox_callmonitor.base.FritzPhonebook.__init__", - side_effect=side_effect, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "other_fake_user", - CONF_PASSWORD: "other_fake_password", - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"]["base"] == error - - async def test_options_flow_correct_prefixes(hass: HomeAssistant) -> None: """Test config flow options.""" diff --git a/tests/components/fronius/__init__.py b/tests/components/fronius/__init__.py index 57b22490ed0..2109d4a6692 100644 --- a/tests/components/fronius/__init__.py +++ b/tests/components/fronius/__init__.py @@ -3,12 +3,9 @@ from __future__ import annotations from collections.abc import Callable -from datetime import timedelta import json from typing import Any -from freezegun.api import FrozenDateTimeFactory - from homeassistant.components.fronius.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -117,12 +114,7 @@ def mock_responses( ) -async def enable_all_entities( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry_id: str, - time_till_next_update: timedelta, -) -> None: +async def enable_all_entities(hass, freezer, config_entry_id, time_till_next_update): """Enable all entities for a config entry and fast forward time to receive data.""" registry = er.async_get(hass) entities = er.async_entries_for_config_entry(registry, config_entry_id) diff --git a/tests/components/fronius/snapshots/test_diagnostics.ambr b/tests/components/fronius/snapshots/test_diagnostics.ambr index 010de06e276..f23d63a58e3 100644 --- a/tests/components/fronius/snapshots/test_diagnostics.ambr +++ b/tests/components/fronius/snapshots/test_diagnostics.ambr @@ -7,8 +7,6 @@ 'is_logger': True, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'fronius', 'entry_id': 'f1e2b9837e8adaed6fa682acaa216fd8', 'minor_version': 1, diff --git a/tests/components/fronius/test_config_flow.py b/tests/components/fronius/test_config_flow.py index 1b9c41d5aa6..41593a0ad2e 100644 --- a/tests/components/fronius/test_config_flow.py +++ b/tests/components/fronius/test_config_flow.py @@ -344,7 +344,7 @@ async def test_reconfigure(hass: HomeAssistant) -> None: """Test reconfiguring an entry.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="1234567", + unique_id="123.4567890", data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -352,7 +352,14 @@ async def test_reconfigure(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" @@ -399,7 +406,14 @@ async def test_reconfigure_cannot_connect(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) with ( patch( @@ -434,7 +448,14 @@ async def test_reconfigure_unexpected(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) with patch( "pyfronius.Fronius.current_logger_info", @@ -463,7 +484,14 @@ async def test_reconfigure_already_configured(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" @@ -490,7 +518,7 @@ async def test_reconfigure_already_configured(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unique_id_mismatch" + assert result["reason"] == "already_configured" assert len(mock_setup_entry.mock_calls) == 0 @@ -517,7 +545,14 @@ async def test_reconfigure_already_existing(hass: HomeAssistant) -> None: ) entry_2.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) with patch( "pyfronius.Fronius.current_logger_info", return_value={"unique_identifier": {"value": entry_2_uid}}, @@ -531,4 +566,4 @@ async def test_reconfigure_already_existing(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "unique_id_mismatch" + assert result2["reason"] == "already_configured" diff --git a/tests/components/fronius/test_diagnostics.py b/tests/components/fronius/test_diagnostics.py index ddef5b4a18c..7b1f384e405 100644 --- a/tests/components/fronius/test_diagnostics.py +++ b/tests/components/fronius/test_diagnostics.py @@ -1,7 +1,6 @@ """Tests for the diagnostics data provided by the Fronius integration.""" from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -22,8 +21,11 @@ async def test_diagnostics( mock_responses(aioclient_mock) entry = await setup_fronius_integration(hass) - assert await get_diagnostics_for_config_entry( - hass, - hass_client, - entry, - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry( + hass, + hass_client, + entry, + ) + == snapshot + ) diff --git a/tests/components/frontend/test_init.py b/tests/components/frontend/test_init.py index 5006adedd77..83c82abea35 100644 --- a/tests/components/frontend/test_init.py +++ b/tests/components/frontend/test_init.py @@ -1,7 +1,6 @@ """The tests for Home Assistant frontend.""" from asyncio import AbstractEventLoop -from collections.abc import Generator from http import HTTPStatus from pathlib import Path import re @@ -65,7 +64,7 @@ CONFIG_THEMES = {DOMAIN: {CONF_THEMES: MOCK_THEMES}} @pytest.fixture -async def ignore_frontend_deps(hass: HomeAssistant) -> None: +async def ignore_frontend_deps(hass): """Frontend dependencies.""" frontend = await async_get_integration(hass, "frontend") for dep in frontend.dependencies: @@ -74,7 +73,7 @@ async def ignore_frontend_deps(hass: HomeAssistant) -> None: @pytest.fixture -async def frontend(hass: HomeAssistant, ignore_frontend_deps: None) -> None: +async def frontend(hass, ignore_frontend_deps): """Frontend setup with themes.""" assert await async_setup_component( hass, @@ -84,7 +83,7 @@ async def frontend(hass: HomeAssistant, ignore_frontend_deps: None) -> None: @pytest.fixture -async def frontend_themes(hass: HomeAssistant) -> None: +async def frontend_themes(hass): """Frontend setup with themes.""" assert await async_setup_component( hass, @@ -105,7 +104,7 @@ def aiohttp_client( @pytest.fixture async def mock_http_client( - hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, frontend: None + hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, frontend ) -> TestClient: """Start the Home Assistant HTTP component.""" return await aiohttp_client(hass.http.app) @@ -113,7 +112,7 @@ async def mock_http_client( @pytest.fixture async def themes_ws_client( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend_themes: None + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend_themes ) -> MockHAClientWebSocket: """Start the Home Assistant HTTP component.""" return await hass_ws_client(hass) @@ -121,7 +120,7 @@ async def themes_ws_client( @pytest.fixture async def ws_client( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend: None + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend ) -> MockHAClientWebSocket: """Start the Home Assistant HTTP component.""" return await hass_ws_client(hass) @@ -129,9 +128,7 @@ async def ws_client( @pytest.fixture async def mock_http_client_with_extra_js( - hass: HomeAssistant, - aiohttp_client: ClientSessionGenerator, - ignore_frontend_deps: None, + hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, ignore_frontend_deps ) -> TestClient: """Start the Home Assistant HTTP component.""" assert await async_setup_component( @@ -148,7 +145,7 @@ async def mock_http_client_with_extra_js( @pytest.fixture -def mock_onboarded() -> Generator[None]: +def mock_onboarded(): """Mock that we're onboarded.""" with patch( "homeassistant.components.onboarding.async_is_onboarded", return_value=True @@ -156,8 +153,7 @@ def mock_onboarded() -> Generator[None]: yield -@pytest.mark.usefixtures("mock_onboarded") -async def test_frontend_and_static(mock_http_client: TestClient) -> None: +async def test_frontend_and_static(mock_http_client, mock_onboarded) -> None: """Test if we can get the frontend.""" resp = await mock_http_client.get("") assert resp.status == 200 @@ -174,31 +170,26 @@ async def test_frontend_and_static(mock_http_client: TestClient) -> None: assert "public" in resp.headers.get("cache-control") -@pytest.mark.parametrize("sw_url", ["/sw-modern.js", "/sw-legacy.js"]) -async def test_dont_cache_service_worker( - mock_http_client: TestClient, sw_url: str -) -> None: +async def test_dont_cache_service_worker(mock_http_client) -> None: """Test that we don't cache the service worker.""" - resp = await mock_http_client.get(sw_url) + resp = await mock_http_client.get("/service_worker.js") assert resp.status == 200 assert "cache-control" not in resp.headers -async def test_404(mock_http_client: TestClient) -> None: +async def test_404(mock_http_client) -> None: """Test for HTTP 404 error.""" resp = await mock_http_client.get("/not-existing") assert resp.status == HTTPStatus.NOT_FOUND -async def test_we_cannot_POST_to_root(mock_http_client: TestClient) -> None: +async def test_we_cannot_POST_to_root(mock_http_client) -> None: """Test that POST is not allow to root.""" resp = await mock_http_client.post("/") assert resp.status == 405 -async def test_themes_api( - hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket -) -> None: +async def test_themes_api(hass: HomeAssistant, themes_ws_client) -> None: """Test that /api/themes returns correct data.""" await themes_ws_client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await themes_ws_client.receive_json() @@ -225,11 +216,11 @@ async def test_themes_api( assert msg["result"]["themes"] == {} -@pytest.mark.usefixtures("ignore_frontend_deps") async def test_themes_persist( hass: HomeAssistant, hass_storage: dict[str, Any], hass_ws_client: WebSocketGenerator, + ignore_frontend_deps, ) -> None: """Test that theme settings are restores after restart.""" hass_storage[THEMES_STORAGE_KEY] = { @@ -251,11 +242,11 @@ async def test_themes_persist( assert msg["result"]["default_dark_theme"] == "dark" -@pytest.mark.usefixtures("frontend_themes") async def test_themes_save_storage( hass: HomeAssistant, hass_storage: dict[str, Any], freezer: FrozenDateTimeFactory, + frontend_themes, ) -> None: """Test that theme settings are restores after restart.""" @@ -279,9 +270,7 @@ async def test_themes_save_storage( } -async def test_themes_set_theme( - hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket -) -> None: +async def test_themes_set_theme(hass: HomeAssistant, themes_ws_client) -> None: """Test frontend.set_theme service.""" await hass.services.async_call( DOMAIN, "set_theme", {"name": "happy"}, blocking=True @@ -314,7 +303,7 @@ async def test_themes_set_theme( async def test_themes_set_theme_wrong_name( - hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket + hass: HomeAssistant, themes_ws_client ) -> None: """Test frontend.set_theme service called with wrong name.""" @@ -329,9 +318,7 @@ async def test_themes_set_theme_wrong_name( assert msg["result"]["default_theme"] == "default" -async def test_themes_set_dark_theme( - hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket -) -> None: +async def test_themes_set_dark_theme(hass: HomeAssistant, themes_ws_client) -> None: """Test frontend.set_theme service called with dark mode.""" await hass.services.async_call( @@ -371,9 +358,8 @@ async def test_themes_set_dark_theme( assert msg["result"]["default_dark_theme"] == "light_and_dark" -@pytest.mark.usefixtures("frontend") async def test_themes_set_dark_theme_wrong_name( - hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket + hass: HomeAssistant, frontend, themes_ws_client ) -> None: """Test frontend.set_theme service called with mode dark and wrong name.""" await hass.services.async_call( @@ -387,9 +373,8 @@ async def test_themes_set_dark_theme_wrong_name( assert msg["result"]["default_dark_theme"] is None -@pytest.mark.usefixtures("frontend") async def test_themes_reload_themes( - hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket + hass: HomeAssistant, frontend, themes_ws_client ) -> None: """Test frontend.reload_themes service.""" @@ -410,7 +395,7 @@ async def test_themes_reload_themes( assert msg["result"]["default_theme"] == "default" -async def test_missing_themes(ws_client: MockHAClientWebSocket) -> None: +async def test_missing_themes(hass: HomeAssistant, ws_client) -> None: """Test that themes API works when themes are not defined.""" await ws_client.send_json({"id": 5, "type": "frontend/get_themes"}) @@ -427,7 +412,7 @@ async def test_missing_themes(ws_client: MockHAClientWebSocket) -> None: async def test_extra_js( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_http_client_with_extra_js: TestClient, + mock_http_client_with_extra_js, ) -> None: """Test that extra javascript is loaded.""" @@ -512,7 +497,7 @@ async def test_extra_js( async def test_get_panels( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_http_client: TestClient, + mock_http_client, caplog: pytest.LogCaptureFixture, ) -> None: """Test get_panels command.""" @@ -562,7 +547,7 @@ async def test_get_panels( async def test_get_panels_non_admin( - hass: HomeAssistant, ws_client: MockHAClientWebSocket, hass_admin_user: MockUser + hass: HomeAssistant, ws_client, hass_admin_user: MockUser ) -> None: """Test get_panels command.""" hass_admin_user.groups = [] @@ -583,7 +568,7 @@ async def test_get_panels_non_admin( assert "map" not in msg["result"] -async def test_get_translations(ws_client: MockHAClientWebSocket) -> None: +async def test_get_translations(hass: HomeAssistant, ws_client) -> None: """Test get_translations command.""" with patch( "homeassistant.components.frontend.async_get_translations", @@ -608,7 +593,7 @@ async def test_get_translations(ws_client: MockHAClientWebSocket) -> None: async def test_get_translations_for_integrations( - ws_client: MockHAClientWebSocket, + hass: HomeAssistant, ws_client ) -> None: """Test get_translations for integrations command.""" with patch( @@ -636,7 +621,7 @@ async def test_get_translations_for_integrations( async def test_get_translations_for_single_integration( - ws_client: MockHAClientWebSocket, + hass: HomeAssistant, ws_client ) -> None: """Test get_translations for integration command.""" with patch( @@ -675,7 +660,7 @@ async def test_onboarding_load(hass: HomeAssistant) -> None: assert "onboarding" in frontend.dependencies -async def test_auth_authorize(mock_http_client: TestClient) -> None: +async def test_auth_authorize(mock_http_client) -> None: """Test the authorize endpoint works.""" resp = await mock_http_client.get( "/auth/authorize?response_type=code&client_id=https://localhost/&" @@ -698,9 +683,7 @@ async def test_auth_authorize(mock_http_client: TestClient) -> None: assert "public" in resp.headers.get("cache-control") -async def test_get_version( - hass: HomeAssistant, ws_client: MockHAClientWebSocket -) -> None: +async def test_get_version(hass: HomeAssistant, ws_client) -> None: """Test get_version command.""" frontend = await async_get_integration(hass, "frontend") cur_version = next( @@ -718,7 +701,7 @@ async def test_get_version( assert msg["result"] == {"version": cur_version} -async def test_static_paths(mock_http_client: TestClient) -> None: +async def test_static_paths(hass: HomeAssistant, mock_http_client) -> None: """Test static paths.""" resp = await mock_http_client.get( "/.well-known/change-password", allow_redirects=False @@ -727,8 +710,9 @@ async def test_static_paths(mock_http_client: TestClient) -> None: assert resp.headers["location"] == "/profile" -@pytest.mark.usefixtures("frontend_themes") -async def test_manifest_json(hass: HomeAssistant, mock_http_client: TestClient) -> None: +async def test_manifest_json( + hass: HomeAssistant, frontend_themes, mock_http_client +) -> None: """Test for fetching manifest.json.""" resp = await mock_http_client.get("/manifest.json") assert resp.status == HTTPStatus.OK @@ -750,7 +734,7 @@ async def test_manifest_json(hass: HomeAssistant, mock_http_client: TestClient) assert json["theme_color"] != DEFAULT_THEME_COLOR -async def test_static_path_cache(mock_http_client: TestClient) -> None: +async def test_static_path_cache(hass: HomeAssistant, mock_http_client) -> None: """Test static paths cache.""" resp = await mock_http_client.get("/lovelace/default_view", allow_redirects=False) assert resp.status == 404 @@ -782,7 +766,7 @@ async def test_static_path_cache(mock_http_client: TestClient) -> None: assert resp.status == 404 -async def test_get_icons(ws_client: MockHAClientWebSocket) -> None: +async def test_get_icons(hass: HomeAssistant, ws_client: MockHAClientWebSocket) -> None: """Test get_icons command.""" with patch( "homeassistant.components.frontend.async_get_icons", @@ -803,7 +787,9 @@ async def test_get_icons(ws_client: MockHAClientWebSocket) -> None: assert msg["result"] == {"resources": {}} -async def test_get_icons_for_integrations(ws_client: MockHAClientWebSocket) -> None: +async def test_get_icons_for_integrations( + hass: HomeAssistant, ws_client: MockHAClientWebSocket +) -> None: """Test get_icons for integrations command.""" with patch( "homeassistant.components.frontend.async_get_icons", @@ -828,7 +814,7 @@ async def test_get_icons_for_integrations(ws_client: MockHAClientWebSocket) -> N async def test_get_icons_for_single_integration( - ws_client: MockHAClientWebSocket, + hass: HomeAssistant, ws_client: MockHAClientWebSocket ) -> None: """Test get_icons for integration command.""" with patch( diff --git a/tests/components/frontend/test_storage.py b/tests/components/frontend/test_storage.py index ce7f7aeb4a1..8b97fa9ee04 100644 --- a/tests/components/frontend/test_storage.py +++ b/tests/components/frontend/test_storage.py @@ -13,13 +13,15 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) -def setup_frontend(hass: HomeAssistant) -> None: +def setup_frontend(hass): """Fixture to setup the frontend.""" hass.loop.run_until_complete(async_setup_component(hass, "frontend", {})) async def test_get_user_data_empty( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], ) -> None: """Test get_user_data command.""" client = await hass_ws_client(hass) @@ -80,7 +82,9 @@ async def test_get_user_data( async def test_set_user_data_empty( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], ) -> None: """Test set_user_data command.""" client = await hass_ws_client(hass) diff --git a/tests/components/frontier_silicon/conftest.py b/tests/components/frontier_silicon/conftest.py index 709b1842472..2322740c69a 100644 --- a/tests/components/frontier_silicon/conftest.py +++ b/tests/components/frontier_silicon/conftest.py @@ -1,9 +1,9 @@ """Configuration for frontier_silicon tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.frontier_silicon.const import CONF_WEBFSAPI_URL, DOMAIN from homeassistant.const import CONF_PIN diff --git a/tests/components/frontier_silicon/test_config_flow.py b/tests/components/frontier_silicon/test_config_flow.py index c92cf897fe6..04bd1febdf8 100644 --- a/tests/components/frontier_silicon/test_config_flow.py +++ b/tests/components/frontier_silicon/test_config_flow.py @@ -26,7 +26,6 @@ MOCK_DISCOVERY = ssdp.SsdpServiceInfo( ssdp_udn="uuid:3dcc7100-f76c-11dd-87af-00226124ca30", ssdp_st="mock_st", ssdp_location="http://1.1.1.1/device", - ssdp_headers={"SPEAKER-NAME": "Speaker Name"}, upnp={"SPEAKER-NAME": "Speaker Name"}, ) @@ -35,7 +34,6 @@ INVALID_MOCK_DISCOVERY = ssdp.SsdpServiceInfo( ssdp_udn="uuid:3dcc7100-f76c-11dd-87af-00226124ca30", ssdp_st="mock_st", ssdp_location=None, - ssdp_headers={"SPEAKER-NAME": "Speaker Name"}, upnp={"SPEAKER-NAME": "Speaker Name"}, ) @@ -270,11 +268,6 @@ async def test_ssdp( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - flow = flows[0] - assert flow["context"]["title_placeholders"] == {"name": "Speaker Name"} - result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, @@ -363,7 +356,15 @@ async def test_reauth_flow(hass: HomeAssistant, config_entry: MockConfigEntry) - config_entry.add_to_hass(hass) assert config_entry.data[CONF_PIN] == "1234" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": config_entry.unique_id, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "device_config" @@ -394,7 +395,15 @@ async def test_reauth_flow_friendly_name_error( config_entry.add_to_hass(hass) assert config_entry.data[CONF_PIN] == "1234" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": config_entry.unique_id, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "device_config" diff --git a/tests/components/fujitsu_fglair/__init__.py b/tests/components/fujitsu_fglair/__init__.py deleted file mode 100644 index 2ec3fa0fce6..00000000000 --- a/tests/components/fujitsu_fglair/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Tests for the Fujitsu HVAC (based on Ayla IOT) integration.""" - -from ayla_iot_unofficial.fujitsu_hvac import FujitsuHVAC - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - -def entity_id(device: FujitsuHVAC) -> str: - """Generate the entity id for the given serial.""" - return f"{Platform.CLIMATE}.{device.device_serial_number}" diff --git a/tests/components/fujitsu_fglair/conftest.py b/tests/components/fujitsu_fglair/conftest.py deleted file mode 100644 index 5974adbeb0d..00000000000 --- a/tests/components/fujitsu_fglair/conftest.py +++ /dev/null @@ -1,121 +0,0 @@ -"""Common fixtures for the Fujitsu HVAC (based on Ayla IOT) tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, create_autospec, patch - -from ayla_iot_unofficial import AylaApi -from ayla_iot_unofficial.fujitsu_hvac import FanSpeed, FujitsuHVAC, OpMode, SwingMode -import pytest - -from homeassistant.components.fujitsu_fglair.const import ( - CONF_REGION, - DOMAIN, - REGION_DEFAULT, -) -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME - -from tests.common import MockConfigEntry - -TEST_DEVICE_NAME = "Test device" -TEST_DEVICE_SERIAL = "testserial" -TEST_USERNAME = "test-username" -TEST_PASSWORD = "test-password" - -TEST_USERNAME2 = "test-username2" -TEST_PASSWORD2 = "test-password2" - -TEST_SERIAL_NUMBER = "testserial123" -TEST_SERIAL_NUMBER2 = "testserial345" - -TEST_PROPERTY_VALUES = { - "model_name": "mock_fujitsu_device", - "mcu_firmware_version": "1", -} - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.fujitsu_fglair.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_ayla_api(mock_devices: list[AsyncMock]) -> Generator[AsyncMock]: - """Override AylaApi creation.""" - my_mock = create_autospec(AylaApi) - - with ( - patch( - "homeassistant.components.fujitsu_fglair.new_ayla_api", return_value=my_mock - ), - patch( - "homeassistant.components.fujitsu_fglair.config_flow.new_ayla_api", - return_value=my_mock, - ), - ): - my_mock.async_get_devices.return_value = mock_devices - yield my_mock - - -@pytest.fixture -def mock_config_entry(request: pytest.FixtureRequest) -> MockConfigEntry: - """Return a regular config entry.""" - region = REGION_DEFAULT - if hasattr(request, "param"): - region = request.param - - return MockConfigEntry( - domain=DOMAIN, - unique_id=TEST_USERNAME, - data={ - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_REGION: region, - }, - ) - - -def _create_device(serial_number: str) -> AsyncMock: - dev = AsyncMock(spec=FujitsuHVAC) - dev.device_serial_number = serial_number - dev.device_name = serial_number - dev.property_values = TEST_PROPERTY_VALUES - dev.has_capability.return_value = True - dev.fan_speed = FanSpeed.AUTO - dev.supported_fan_speeds = [ - FanSpeed.LOW, - FanSpeed.MEDIUM, - FanSpeed.HIGH, - FanSpeed.AUTO, - ] - dev.op_mode = OpMode.COOL - dev.supported_op_modes = [ - OpMode.OFF, - OpMode.ON, - OpMode.AUTO, - OpMode.COOL, - OpMode.DRY, - ] - dev.swing_mode = SwingMode.SWING_BOTH - dev.supported_swing_modes = [ - SwingMode.OFF, - SwingMode.SWING_HORIZONTAL, - SwingMode.SWING_VERTICAL, - SwingMode.SWING_BOTH, - ] - dev.temperature_range = [18.0, 26.0] - dev.sensed_temp = 22.0 - dev.set_temp = 21.0 - - return dev - - -@pytest.fixture -def mock_devices() -> list[AsyncMock]: - """Generate a list of mock devices that the API can return.""" - return [ - _create_device(serial) for serial in (TEST_SERIAL_NUMBER, TEST_SERIAL_NUMBER2) - ] diff --git a/tests/components/fujitsu_fglair/snapshots/test_climate.ambr b/tests/components/fujitsu_fglair/snapshots/test_climate.ambr deleted file mode 100644 index 31b143c6f95..00000000000 --- a/tests/components/fujitsu_fglair/snapshots/test_climate.ambr +++ /dev/null @@ -1,189 +0,0 @@ -# serializer version: 1 -# name: test_entities[climate.testserial123-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'fan_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - ]), - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 26.0, - 'min_temp': 18.0, - 'swing_modes': list([ - 'off', - 'horizontal', - 'vertical', - 'both', - ]), - 'target_temp_step': 0.5, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.testserial123', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'fujitsu_fglair', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'testserial123', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[climate.testserial123-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 22.0, - 'fan_mode': 'auto', - 'fan_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - ]), - 'friendly_name': 'testserial123', - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 26.0, - 'min_temp': 18.0, - 'supported_features': , - 'swing_mode': 'both', - 'swing_modes': list([ - 'off', - 'horizontal', - 'vertical', - 'both', - ]), - 'target_temp_step': 0.5, - 'temperature': 21.0, - }), - 'context': , - 'entity_id': 'climate.testserial123', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'cool', - }) -# --- -# name: test_entities[climate.testserial345-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'fan_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - ]), - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 26.0, - 'min_temp': 18.0, - 'swing_modes': list([ - 'off', - 'horizontal', - 'vertical', - 'both', - ]), - 'target_temp_step': 0.5, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.testserial345', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'fujitsu_fglair', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'testserial345', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[climate.testserial345-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 22.0, - 'fan_mode': 'auto', - 'fan_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - ]), - 'friendly_name': 'testserial345', - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 26.0, - 'min_temp': 18.0, - 'supported_features': , - 'swing_mode': 'both', - 'swing_modes': list([ - 'off', - 'horizontal', - 'vertical', - 'both', - ]), - 'target_temp_step': 0.5, - 'temperature': 21.0, - }), - 'context': , - 'entity_id': 'climate.testserial345', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'cool', - }) -# --- diff --git a/tests/components/fujitsu_fglair/test_climate.py b/tests/components/fujitsu_fglair/test_climate.py deleted file mode 100644 index daddc83a871..00000000000 --- a/tests/components/fujitsu_fglair/test_climate.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Test for the climate entities of Fujitsu HVAC.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.components.climate import ( - ATTR_FAN_MODE, - ATTR_HVAC_MODE, - ATTR_SWING_MODE, - ATTR_TEMPERATURE, - DOMAIN as CLIMATE_DOMAIN, - FAN_AUTO, - SERVICE_SET_FAN_MODE, - SERVICE_SET_HVAC_MODE, - SERVICE_SET_SWING_MODE, - SERVICE_SET_TEMPERATURE, - SWING_BOTH, - HVACMode, -) -from homeassistant.components.fujitsu_fglair.climate import ( - HA_TO_FUJI_FAN, - HA_TO_FUJI_HVAC, - HA_TO_FUJI_SWING, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import entity_id, setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_entities( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that coordinator returns the data we expect after the first refresh.""" - await setup_integration(hass, mock_config_entry) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_set_attributes( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - mock_ayla_api: AsyncMock, - mock_devices: list[AsyncMock], - mock_config_entry: MockConfigEntry, -) -> None: - """Test that setting the attributes calls the correct functions on the device.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_HVAC_MODE, - service_data={ATTR_HVAC_MODE: HVACMode.COOL}, - target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, - blocking=True, - ) - mock_devices[0].async_set_op_mode.assert_called_once_with( - HA_TO_FUJI_HVAC[HVACMode.COOL] - ) - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_FAN_MODE, - service_data={ATTR_FAN_MODE: FAN_AUTO}, - target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, - blocking=True, - ) - mock_devices[0].async_set_fan_speed.assert_called_once_with( - HA_TO_FUJI_FAN[FAN_AUTO] - ) - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_SWING_MODE, - service_data={ATTR_SWING_MODE: SWING_BOTH}, - target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, - blocking=True, - ) - mock_devices[0].async_set_swing_mode.assert_called_once_with( - HA_TO_FUJI_SWING[SWING_BOTH] - ) - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - service_data={ATTR_TEMPERATURE: 23.0}, - target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, - blocking=True, - ) - mock_devices[0].async_set_set_temp.assert_called_once_with(23.0) diff --git a/tests/components/fujitsu_fglair/test_config_flow.py b/tests/components/fujitsu_fglair/test_config_flow.py deleted file mode 100644 index 6c9ebd66e47..00000000000 --- a/tests/components/fujitsu_fglair/test_config_flow.py +++ /dev/null @@ -1,186 +0,0 @@ -"""Test the Fujitsu HVAC (based on Ayla IOT) config flow.""" - -from unittest.mock import AsyncMock - -from ayla_iot_unofficial import AylaAuthError -import pytest - -from homeassistant.components.fujitsu_fglair.const import ( - CONF_REGION, - DOMAIN, - REGION_DEFAULT, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResult, FlowResultType - -from .conftest import TEST_PASSWORD, TEST_PASSWORD2, TEST_USERNAME - -from tests.common import MockConfigEntry - - -async def _initial_step(hass: HomeAssistant) -> FlowResult: - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - return await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_REGION: REGION_DEFAULT, - }, - ) - - -async def test_full_flow( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_ayla_api: AsyncMock -) -> None: - """Test full config flow.""" - result = await _initial_step(hass) - mock_ayla_api.async_sign_in.assert_called_once() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"FGLair ({TEST_USERNAME})" - assert result["data"] == { - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_REGION: REGION_DEFAULT, - } - - -async def test_duplicate_entry( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that re-adding the same account fails.""" - mock_config_entry.add_to_hass(hass) - result = await _initial_step(hass) - mock_ayla_api.async_sign_in.assert_not_called() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("exception", "err_msg"), - [ - (AylaAuthError, "invalid_auth"), - (TimeoutError, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_form_exceptions( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_ayla_api: AsyncMock, - exception: Exception, - err_msg: str, -) -> None: - """Test we handle exceptions.""" - - mock_ayla_api.async_sign_in.side_effect = exception - result = await _initial_step(hass) - mock_ayla_api.async_sign_in.assert_called_once() - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": err_msg} - - mock_ayla_api.async_sign_in.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_REGION: REGION_DEFAULT, - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"FGLair ({TEST_USERNAME})" - assert result["data"] == { - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_REGION: REGION_DEFAULT, - } - - -async def test_reauth_success( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reauth flow.""" - mock_config_entry.add_to_hass(hass) - - result = await mock_config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: TEST_PASSWORD2, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert mock_config_entry.data[CONF_PASSWORD] == TEST_PASSWORD2 - - -@pytest.mark.parametrize( - ("exception", "err_msg"), - [ - (AylaAuthError, "invalid_auth"), - (TimeoutError, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_reauth_exceptions( - hass: HomeAssistant, - exception: Exception, - err_msg: str, - mock_setup_entry: AsyncMock, - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reauth flow when an exception occurs.""" - mock_config_entry.add_to_hass(hass) - - result = await mock_config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - mock_ayla_api.async_sign_in.side_effect = exception - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: TEST_PASSWORD2, - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {"base": err_msg} - - mock_ayla_api.async_sign_in.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: TEST_PASSWORD2, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert mock_config_entry.data[CONF_PASSWORD] == TEST_PASSWORD2 diff --git a/tests/components/fujitsu_fglair/test_init.py b/tests/components/fujitsu_fglair/test_init.py deleted file mode 100644 index af51b222c19..00000000000 --- a/tests/components/fujitsu_fglair/test_init.py +++ /dev/null @@ -1,201 +0,0 @@ -"""Test the initialization of fujitsu_fglair entities.""" - -from unittest.mock import AsyncMock, patch - -from ayla_iot_unofficial import AylaAuthError -from ayla_iot_unofficial.fujitsu_consts import FGLAIR_APP_CREDENTIALS -from freezegun.api import FrozenDateTimeFactory -import pytest - -from homeassistant.components.fujitsu_fglair.const import ( - API_REFRESH, - API_TIMEOUT, - CONF_EUROPE, - CONF_REGION, - DOMAIN, - REGION_DEFAULT, - REGION_EU, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - CONF_PASSWORD, - CONF_USERNAME, - STATE_UNAVAILABLE, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import aiohttp_client, entity_registry as er - -from . import entity_id, setup_integration -from .conftest import TEST_PASSWORD, TEST_USERNAME - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_auth_failure( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, - mock_devices: list[AsyncMock], -) -> None: - """Test entities become unavailable after auth failure.""" - await setup_integration(hass, mock_config_entry) - - mock_ayla_api.async_get_devices.side_effect = AylaAuthError - freezer.tick(API_REFRESH) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get(entity_id(mock_devices[0])).state == STATE_UNAVAILABLE - assert hass.states.get(entity_id(mock_devices[1])).state == STATE_UNAVAILABLE - - -@pytest.mark.parametrize( - "mock_config_entry", FGLAIR_APP_CREDENTIALS.keys(), indirect=True -) -async def test_auth_regions( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, - mock_devices: list[AsyncMock], -) -> None: - """Test that we use the correct credentials if europe is selected.""" - with patch( - "homeassistant.components.fujitsu_fglair.new_ayla_api", return_value=AsyncMock() - ) as new_ayla_api_patch: - await setup_integration(hass, mock_config_entry) - new_ayla_api_patch.assert_called_once_with( - TEST_USERNAME, - TEST_PASSWORD, - FGLAIR_APP_CREDENTIALS[mock_config_entry.data[CONF_REGION]][0], - FGLAIR_APP_CREDENTIALS[mock_config_entry.data[CONF_REGION]][1], - europe=mock_config_entry.data[CONF_REGION] == "EU", - websession=aiohttp_client.async_get_clientsession(hass), - timeout=API_TIMEOUT, - ) - - -@pytest.mark.parametrize("is_europe", [True, False]) -async def test_migrate_entry_v11_v12( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_ayla_api: AsyncMock, - is_europe: bool, - mock_devices: list[AsyncMock], -) -> None: - """Test migration from schema 1.1 to 1.2.""" - v11_config_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=TEST_USERNAME, - data={ - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_EUROPE: is_europe, - }, - ) - - await setup_integration(hass, v11_config_entry) - updated_entry = hass.config_entries.async_get_entry(v11_config_entry.entry_id) - - assert updated_entry.state is ConfigEntryState.LOADED - assert updated_entry.version == 1 - assert updated_entry.minor_version == 2 - if is_europe: - assert updated_entry.data[CONF_REGION] is REGION_EU - else: - assert updated_entry.data[CONF_REGION] is REGION_DEFAULT - - -async def test_device_auth_failure( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, - mock_devices: list[AsyncMock], -) -> None: - """Test entities become unavailable after auth failure with updating devices.""" - await setup_integration(hass, mock_config_entry) - - for d in mock_ayla_api.async_get_devices.return_value: - d.async_update.side_effect = AylaAuthError - - freezer.tick(API_REFRESH) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get(entity_id(mock_devices[0])).state == STATE_UNAVAILABLE - assert hass.states.get(entity_id(mock_devices[1])).state == STATE_UNAVAILABLE - - -async def test_token_expired( - hass: HomeAssistant, - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Make sure sign_in is called if the token expired.""" - mock_ayla_api.token_expired = True - await setup_integration(hass, mock_config_entry) - - # Called once during setup and once during update - assert mock_ayla_api.async_sign_in.call_count == 2 - - -async def test_token_expiring_soon( - hass: HomeAssistant, - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Make sure sign_in is called if the token expired.""" - mock_ayla_api.token_expiring_soon = True - await setup_integration(hass, mock_config_entry) - - mock_ayla_api.async_refresh_auth.assert_called_once() - - -@pytest.mark.parametrize("exception", [AylaAuthError, TimeoutError]) -async def test_startup_exception( - hass: HomeAssistant, - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, - exception: Exception, -) -> None: - """Make sure that no devices are added if there was an exception while logging in.""" - mock_ayla_api.async_sign_in.side_effect = exception - await setup_integration(hass, mock_config_entry) - - assert len(hass.states.async_all()) == 0 - - -async def test_one_device_disabled( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, - mock_devices: list[AsyncMock], - mock_ayla_api: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that coordinator only updates devices that are currently listening.""" - await setup_integration(hass, mock_config_entry) - - for d in mock_devices: - d.async_update.assert_called_once() - d.reset_mock() - - entity = entity_registry.async_get( - entity_registry.async_get_entity_id( - Platform.CLIMATE, DOMAIN, mock_devices[0].device_serial_number - ) - ) - entity_registry.async_update_entity( - entity.entity_id, disabled_by=er.RegistryEntryDisabler.USER - ) - await hass.async_block_till_done() - freezer.tick(API_REFRESH) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == len(mock_devices) - 1 - mock_devices[0].async_update.assert_not_called() - mock_devices[1].async_update.assert_called_once() diff --git a/tests/components/fully_kiosk/conftest.py b/tests/components/fully_kiosk/conftest.py index 028eefcf361..3f7c2985daf 100644 --- a/tests/components/fully_kiosk/conftest.py +++ b/tests/components/fully_kiosk/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.fully_kiosk.const import DOMAIN from homeassistant.const import ( diff --git a/tests/components/fully_kiosk/test_camera.py b/tests/components/fully_kiosk/test_camera.py index a2e7067ff1b..4e48749eebb 100644 --- a/tests/components/fully_kiosk/test_camera.py +++ b/tests/components/fully_kiosk/test_camera.py @@ -2,7 +2,6 @@ from unittest.mock import MagicMock -from fullykiosk import FullyKioskError import pytest from homeassistant.components.camera import async_get_image @@ -42,12 +41,6 @@ async def test_camera( assert mock_fully_kiosk.getCamshot.call_count == 1 assert image.content == b"image_bytes" - fully_kiosk_error = FullyKioskError("error", "status") - mock_fully_kiosk.getCamshot.side_effect = fully_kiosk_error - with pytest.raises(HomeAssistantError) as error: - await async_get_image(hass, entity_camera) - assert error.value.args[0] == fully_kiosk_error - mock_fully_kiosk.getSettings.return_value = {"motionDetection": False} await hass.services.async_call( "camera", diff --git a/tests/components/fully_kiosk/test_number.py b/tests/components/fully_kiosk/test_number.py index 5f74002f8cd..2fbbf751725 100644 --- a/tests/components/fully_kiosk/test_number.py +++ b/tests/components/fully_kiosk/test_number.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock from homeassistant.components import number from homeassistant.components.fully_kiosk.const import DOMAIN, UPDATE_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN -from homeassistant.core import HomeAssistant, ServiceResponse +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import dt as dt_util @@ -81,11 +81,9 @@ async def test_numbers( assert device_entry.sw_version == "1.42.5" -async def set_value( - hass: HomeAssistant, entity_id: str, value: float -) -> ServiceResponse: +def set_value(hass, entity_id, value): """Set the value of a number entity.""" - return await hass.services.async_call( + return hass.services.async_call( number.DOMAIN, "set_value", {ATTR_ENTITY_ID: entity_id, number.ATTR_VALUE: value}, diff --git a/tests/components/fully_kiosk/test_switch.py b/tests/components/fully_kiosk/test_switch.py index 14a464e0dcd..5b3b5e651b0 100644 --- a/tests/components/fully_kiosk/test_switch.py +++ b/tests/components/fully_kiosk/test_switch.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock from homeassistant.components import switch from homeassistant.components.fully_kiosk.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant, ServiceResponse +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, async_fire_mqtt_message @@ -149,10 +149,8 @@ def has_subscribed(mqtt_mock: MqttMockHAClient, topic: str) -> bool: return False -async def call_service( - hass: HomeAssistant, service: str, entity_id: str -) -> ServiceResponse: +def call_service(hass, service, entity_id): """Call any service on entity.""" - return await hass.services.async_call( + return hass.services.async_call( switch.DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True ) diff --git a/tests/components/fyta/conftest.py b/tests/components/fyta/conftest.py index 299b96be959..de5dece776c 100644 --- a/tests/components/fyta/conftest.py +++ b/tests/components/fyta/conftest.py @@ -1,11 +1,10 @@ """Test helpers for FYTA.""" -from collections.abc import Generator from datetime import UTC, datetime -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch -from fyta_cli.fyta_models import Credentials, Plant import pytest +from typing_extensions import Generator from homeassistant.components.fyta.const import CONF_EXPIRATION, DOMAIN as FYTA_DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME @@ -36,28 +35,23 @@ def mock_config_entry() -> MockConfigEntry: def mock_fyta_connector(): """Build a fixture for the Fyta API that connects successfully and returns one device.""" - plants: dict[int, Plant] = { - 0: Plant.from_dict(load_json_object_fixture("plant_status1.json", FYTA_DOMAIN)), - 1: Plant.from_dict(load_json_object_fixture("plant_status2.json", FYTA_DOMAIN)), - } - mock_fyta_connector = AsyncMock() mock_fyta_connector.expiration = datetime.fromisoformat(EXPIRATION).replace( tzinfo=UTC ) mock_fyta_connector.client = AsyncMock(autospec=True) - mock_fyta_connector.data = MagicMock() - mock_fyta_connector.update_all_plants.return_value = plants - mock_fyta_connector.plant_list = { - 0: "Gummibaum", - 1: "Kakaobaum", - } + mock_fyta_connector.update_all_plants.return_value = load_json_object_fixture( + "plant_status.json", FYTA_DOMAIN + ) + mock_fyta_connector.plant_list = load_json_object_fixture( + "plant_list.json", FYTA_DOMAIN + ) mock_fyta_connector.login = AsyncMock( - return_value=Credentials( - access_token=ACCESS_TOKEN, - expiration=datetime.fromisoformat(EXPIRATION).replace(tzinfo=UTC), - ) + return_value={ + CONF_ACCESS_TOKEN: ACCESS_TOKEN, + CONF_EXPIRATION: datetime.fromisoformat(EXPIRATION).replace(tzinfo=UTC), + } ) with ( patch( diff --git a/tests/components/fyta/fixtures/plant_list.json b/tests/components/fyta/fixtures/plant_list.json new file mode 100644 index 00000000000..9527c7d9d96 --- /dev/null +++ b/tests/components/fyta/fixtures/plant_list.json @@ -0,0 +1,4 @@ +{ + "0": "Gummibaum", + "1": "Kakaobaum" +} diff --git a/tests/components/fyta/fixtures/plant_status.json b/tests/components/fyta/fixtures/plant_status.json new file mode 100644 index 00000000000..5d9cb2d31d9 --- /dev/null +++ b/tests/components/fyta/fixtures/plant_status.json @@ -0,0 +1,14 @@ +{ + "0": { + "name": "Gummibaum", + "scientific_name": "Ficus elastica", + "status": 1, + "sw_version": "1.0" + }, + "1": { + "name": "Kakaobaum", + "scientific_name": "Theobroma cacao", + "status": 2, + "sw_version": "1.0" + } +} diff --git a/tests/components/fyta/fixtures/plant_status1.json b/tests/components/fyta/fixtures/plant_status1.json deleted file mode 100644 index 72d129492bb..00000000000 --- a/tests/components/fyta/fixtures/plant_status1.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "battery_level": 80, - "battery_status": true, - "last_updated": "2023-01-10 10:10:00", - "light": 2, - "light_status": 3, - "nickname": "Gummibaum", - "moisture": 61, - "moisture_status": 3, - "sensor_available": true, - "sw_version": "1.0", - "status": 1, - "online": true, - "ph": null, - "plant_id": 0, - "plant_origin_path": "", - "plant_thumb_path": "", - "salinity": 1, - "salinity_status": 4, - "scientific_name": "Ficus elastica", - "temperature": 25.2, - "temperature_status": 3 -} diff --git a/tests/components/fyta/fixtures/plant_status2.json b/tests/components/fyta/fixtures/plant_status2.json deleted file mode 100644 index 8ed09532567..00000000000 --- a/tests/components/fyta/fixtures/plant_status2.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "battery_level": 80, - "battery_status": true, - "last_updated": "2023-01-02 10:10:00", - "light": 2, - "light_status": 3, - "nickname": "Kakaobaum", - "moisture": 61, - "moisture_status": 3, - "sensor_available": true, - "sw_version": "1.0", - "status": 1, - "online": true, - "ph": 7, - "plant_id": 0, - "plant_origin_path": "", - "plant_thumb_path": "", - "salinity": 1, - "salinity_status": 4, - "scientific_name": "Theobroma cacao", - "temperature": 25.2, - "temperature_status": 3 -} diff --git a/tests/components/fyta/fixtures/plant_status3.json b/tests/components/fyta/fixtures/plant_status3.json deleted file mode 100644 index 6e32ba601ed..00000000000 --- a/tests/components/fyta/fixtures/plant_status3.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "battery_level": 80, - "battery_status": true, - "last_updated": "2023-01-02 10:10:00", - "light": 2, - "light_status": 3, - "nickname": "Tomatenpflanze", - "moisture": 61, - "moisture_status": 3, - "sensor_available": true, - "sw_version": "1.0", - "status": 1, - "online": true, - "ph": 7, - "plant_id": 0, - "plant_origin_path": "", - "plant_thumb_path": "", - "salinity": 1, - "salinity_status": 4, - "scientific_name": "Solanum lycopersicum", - "temperature": 25.2, - "temperature_status": 3 -} diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index 2af616c6412..7491310129b 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -9,8 +9,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'fyta', 'entry_id': 'ce5f5431554d101905d31797e1232da8', 'minor_version': 2, @@ -25,50 +23,16 @@ }), 'plant_data': dict({ '0': dict({ - 'battery_level': 80.0, - 'battery_status': True, - 'last_updated': '2023-01-10T10:10:00', - 'light': 2.0, - 'light_status': 3, - 'moisture': 61.0, - 'moisture_status': 3, 'name': 'Gummibaum', - 'online': True, - 'ph': None, - 'plant_id': 0, - 'plant_origin_path': '', - 'plant_thumb_path': '', - 'salinity': 1.0, - 'salinity_status': 4, 'scientific_name': 'Ficus elastica', - 'sensor_available': True, 'status': 1, 'sw_version': '1.0', - 'temperature': 25.2, - 'temperature_status': 3, }), '1': dict({ - 'battery_level': 80.0, - 'battery_status': True, - 'last_updated': '2023-01-02T10:10:00', - 'light': 2.0, - 'light_status': 3, - 'moisture': 61.0, - 'moisture_status': 3, 'name': 'Kakaobaum', - 'online': True, - 'ph': 7.0, - 'plant_id': 0, - 'plant_origin_path': '', - 'plant_thumb_path': '', - 'salinity': 1.0, - 'salinity_status': 4, 'scientific_name': 'Theobroma cacao', - 'sensor_available': True, - 'status': 1, + 'status': 2, 'sw_version': '1.0', - 'temperature': 25.2, - 'temperature_status': 3, }), }), }) diff --git a/tests/components/fyta/snapshots/test_sensor.ambr b/tests/components/fyta/snapshots/test_sensor.ambr index ef583dd28a6..1041fff501e 100644 --- a/tests/components/fyta/snapshots/test_sensor.ambr +++ b/tests/components/fyta/snapshots/test_sensor.ambr @@ -1,334 +1,4 @@ # serializer version: 1 -# name: test_all_entities[sensor.gummibaum_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.gummibaum_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-battery_level', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.gummibaum_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Gummibaum Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.gummibaum_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80.0', - }) -# --- -# name: test_all_entities[sensor.gummibaum_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gummibaum_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-light', - 'unit_of_measurement': 'μmol/s⋅m²', - }) -# --- -# name: test_all_entities[sensor.gummibaum_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Gummibaum Light', - 'state_class': , - 'unit_of_measurement': 'μmol/s⋅m²', - }), - 'context': , - 'entity_id': 'sensor.gummibaum_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_all_entities[sensor.gummibaum_light_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gummibaum_light_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Light state', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light_status', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-light_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.gummibaum_light_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Gummibaum Light state', - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'context': , - 'entity_id': 'sensor.gummibaum_light_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'perfect', - }) -# --- -# name: test_all_entities[sensor.gummibaum_moisture-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gummibaum_moisture', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Moisture', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-moisture', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.gummibaum_moisture-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'moisture', - 'friendly_name': 'Gummibaum Moisture', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.gummibaum_moisture', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '61.0', - }) -# --- -# name: test_all_entities[sensor.gummibaum_moisture_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gummibaum_moisture_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Moisture state', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'moisture_status', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-moisture_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.gummibaum_moisture_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Gummibaum Moisture state', - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'context': , - 'entity_id': 'sensor.gummibaum_moisture_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'perfect', - }) -# --- -# name: test_all_entities[sensor.gummibaum_ph-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gummibaum_ph', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'pH', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-ph', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.gummibaum_ph-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'ph', - 'friendly_name': 'Gummibaum pH', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.gummibaum_ph', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_all_entities[sensor.gummibaum_plant_state-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -389,121 +59,6 @@ 'state': 'doing_great', }) # --- -# name: test_all_entities[sensor.gummibaum_salinity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gummibaum_salinity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Salinity', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'salinity', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-salinity', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.gummibaum_salinity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'conductivity', - 'friendly_name': 'Gummibaum Salinity', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.gummibaum_salinity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.0', - }) -# --- -# name: test_all_entities[sensor.gummibaum_salinity_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gummibaum_salinity_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Salinity state', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'salinity_status', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-salinity_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.gummibaum_salinity_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Gummibaum Salinity state', - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'context': , - 'entity_id': 'sensor.gummibaum_salinity_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'high', - }) -# --- # name: test_all_entities[sensor.gummibaum_scientific_name-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -550,451 +105,6 @@ 'state': 'Ficus elastica', }) # --- -# name: test_all_entities[sensor.gummibaum_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gummibaum_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.gummibaum_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Gummibaum Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.gummibaum_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '25.2', - }) -# --- -# name: test_all_entities[sensor.gummibaum_temperature_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gummibaum_temperature_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature state', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'temperature_status', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-temperature_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.gummibaum_temperature_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Gummibaum Temperature state', - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'context': , - 'entity_id': 'sensor.gummibaum_temperature_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'perfect', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.kakaobaum_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-battery_level', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Kakaobaum Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.kakaobaum_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80.0', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.kakaobaum_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-light', - 'unit_of_measurement': 'μmol/s⋅m²', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Kakaobaum Light', - 'state_class': , - 'unit_of_measurement': 'μmol/s⋅m²', - }), - 'context': , - 'entity_id': 'sensor.kakaobaum_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_light_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.kakaobaum_light_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Light state', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light_status', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-light_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.kakaobaum_light_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Kakaobaum Light state', - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'context': , - 'entity_id': 'sensor.kakaobaum_light_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'perfect', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_moisture-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.kakaobaum_moisture', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Moisture', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-moisture', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_moisture-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'moisture', - 'friendly_name': 'Kakaobaum Moisture', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.kakaobaum_moisture', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '61.0', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_moisture_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.kakaobaum_moisture_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Moisture state', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'moisture_status', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-moisture_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.kakaobaum_moisture_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Kakaobaum Moisture state', - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'context': , - 'entity_id': 'sensor.kakaobaum_moisture_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'perfect', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_ph-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.kakaobaum_ph', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'pH', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-ph', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.kakaobaum_ph-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'ph', - 'friendly_name': 'Kakaobaum pH', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.kakaobaum_ph', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '7.0', - }) -# --- # name: test_all_entities[sensor.kakaobaum_plant_state-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1052,122 +162,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'doing_great', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_salinity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.kakaobaum_salinity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Salinity', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'salinity', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-salinity', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.kakaobaum_salinity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'conductivity', - 'friendly_name': 'Kakaobaum Salinity', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.kakaobaum_salinity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.0', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_salinity_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.kakaobaum_salinity_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Salinity state', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'salinity_status', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-salinity_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.kakaobaum_salinity_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Kakaobaum Salinity state', - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'context': , - 'entity_id': 'sensor.kakaobaum_salinity_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'high', + 'state': 'need_attention', }) # --- # name: test_all_entities[sensor.kakaobaum_scientific_name-entry] @@ -1216,118 +211,3 @@ 'state': 'Theobroma cacao', }) # --- -# name: test_all_entities[sensor.kakaobaum_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.kakaobaum_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.kakaobaum_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Kakaobaum Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.kakaobaum_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '25.2', - }) -# --- -# name: test_all_entities[sensor.kakaobaum_temperature_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.kakaobaum_temperature_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature state', - 'platform': 'fyta', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'temperature_status', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-temperature_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.kakaobaum_temperature_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Kakaobaum Temperature state', - 'options': list([ - 'no_data', - 'too_low', - 'low', - 'perfect', - 'high', - 'too_high', - ]), - }), - 'context': , - 'entity_id': 'sensor.kakaobaum_temperature_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'perfect', - }) -# --- diff --git a/tests/components/fyta/test_config_flow.py b/tests/components/fyta/test_config_flow.py index e47b78aa893..df0626d0af0 100644 --- a/tests/components/fyta/test_config_flow.py +++ b/tests/components/fyta/test_config_flow.py @@ -158,7 +158,11 @@ async def test_reauth( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/fyta/test_diagnostics.py b/tests/components/fyta/test_diagnostics.py index cfaa5484b82..3a95b533489 100644 --- a/tests/components/fyta/test_diagnostics.py +++ b/tests/components/fyta/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -29,4 +28,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/fyta/test_sensor.py b/tests/components/fyta/test_sensor.py index 07e3965e66f..e33c54695e5 100644 --- a/tests/components/fyta/test_sensor.py +++ b/tests/components/fyta/test_sensor.py @@ -5,23 +5,16 @@ from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory from fyta_cli.fyta_exceptions import FytaConnectionError, FytaPlantError -from fyta_cli.fyta_models import Plant import pytest from syrupy import SnapshotAssertion -from homeassistant.components.fyta.const import DOMAIN as FYTA_DOMAIN from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from . import setup_platform -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_object_fixture, - snapshot_platform, -) +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform async def test_all_entities( @@ -61,32 +54,3 @@ async def test_connection_error( await hass.async_block_till_done() assert hass.states.get("sensor.gummibaum_plant_state").state == STATE_UNAVAILABLE - - -async def test_add_remove_entities( - hass: HomeAssistant, - mock_fyta_connector: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test if entities are added and old are removed.""" - await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) - - assert hass.states.get("sensor.gummibaum_plant_state").state == "doing_great" - - plants: dict[int, Plant] = { - 0: Plant.from_dict(load_json_object_fixture("plant_status1.json", FYTA_DOMAIN)), - 2: Plant.from_dict(load_json_object_fixture("plant_status3.json", FYTA_DOMAIN)), - } - mock_fyta_connector.update_all_plants.return_value = plants - mock_fyta_connector.plant_list = { - 0: "Kautschukbaum", - 2: "Tomatenpflanze", - } - - freezer.tick(delta=timedelta(minutes=10)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("sensor.kakaobaum_plant_state") is None - assert hass.states.get("sensor.tomatenpflanze_plant_state").state == "doing_great" diff --git a/tests/components/gardena_bluetooth/conftest.py b/tests/components/gardena_bluetooth/conftest.py index d363e0e69f3..08f698b4b67 100644 --- a/tests/components/gardena_bluetooth/conftest.py +++ b/tests/components/gardena_bluetooth/conftest.py @@ -1,6 +1,6 @@ """Common fixtures for the Gardena Bluetooth tests.""" -from collections.abc import Callable, Coroutine, Generator +from collections.abc import Callable, Coroutine from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -10,6 +10,7 @@ from gardena_bluetooth.const import DeviceInformation from gardena_bluetooth.exceptions import CharacteristicNotFound from gardena_bluetooth.parse import Characteristic import pytest +from typing_extensions import Generator from homeassistant.components.gardena_bluetooth.const import DOMAIN from homeassistant.components.gardena_bluetooth.coordinator import SCAN_INTERVAL @@ -112,5 +113,10 @@ def mock_client( @pytest.fixture(autouse=True) -def enable_all_entities(entity_registry_enabled_by_default: None) -> None: +def enable_all_entities(): """Make sure all entities are enabled.""" + with patch( + "homeassistant.components.gardena_bluetooth.coordinator.GardenaBluetoothEntity.entity_registry_enabled_default", + new=Mock(return_value=True), + ): + yield diff --git a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr index 6d521b1f2c8..98cba151c52 100644 --- a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr +++ b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr @@ -3,11 +3,6 @@ FlowResultSnapshot({ 'context': dict({ 'confirm_only': True, - 'discovery_key': dict({ - 'domain': 'bluetooth', - 'key': '00000000-0000-0000-0000-000000000001', - 'version': 1, - }), 'source': 'bluetooth', 'title_placeholders': dict({ 'name': 'Gardena Water Computer', @@ -23,11 +18,6 @@ FlowResultSnapshot({ 'context': dict({ 'confirm_only': True, - 'discovery_key': dict({ - 'domain': 'bluetooth', - 'key': '00000000-0000-0000-0000-000000000001', - 'version': 1, - }), 'source': 'bluetooth', 'title_placeholders': dict({ 'name': 'Gardena Water Computer', @@ -49,15 +39,6 @@ 'address': '00000000-0000-0000-0000-000000000001', }), 'disabled_by': None, - 'discovery_keys': dict({ - 'bluetooth': tuple( - dict({ - 'domain': 'bluetooth', - 'key': '00000000-0000-0000-0000-000000000001', - 'version': 1, - }), - ), - }), 'domain': 'gardena_bluetooth', 'entry_id': , 'minor_version': 1, @@ -84,6 +65,60 @@ 'type': , }) # --- +# name: test_bluetooth_lost + FlowResultSnapshot({ + 'data_schema': None, + 'description_placeholders': dict({ + 'name': 'Timer', + }), + 'errors': None, + 'flow_id': , + 'handler': 'gardena_bluetooth', + 'last_step': None, + 'step_id': 'confirm', + 'type': , + }) +# --- +# name: test_bluetooth_lost.1 + FlowResultSnapshot({ + 'context': dict({ + 'confirm_only': True, + 'source': 'bluetooth', + 'title_placeholders': dict({ + 'name': 'Timer', + }), + 'unique_id': '00000000-0000-0000-0000-000000000001', + }), + 'data': dict({ + 'address': '00000000-0000-0000-0000-000000000001', + }), + 'description': None, + 'description_placeholders': None, + 'flow_id': , + 'handler': 'gardena_bluetooth', + 'options': dict({ + }), + 'result': ConfigEntrySnapshot({ + 'data': dict({ + 'address': '00000000-0000-0000-0000-000000000001', + }), + 'disabled_by': None, + 'domain': 'gardena_bluetooth', + 'entry_id': , + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'bluetooth', + 'title': 'Timer', + 'unique_id': '00000000-0000-0000-0000-000000000001', + 'version': 1, + }), + 'title': 'Timer', + 'type': , + 'version': 1, + }) +# --- # name: test_failed_connect FlowResultSnapshot({ 'data_schema': list([ @@ -213,8 +248,6 @@ 'address': '00000000-0000-0000-0000-000000000001', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'gardena_bluetooth', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/gardena_bluetooth/snapshots/test_init.ambr b/tests/components/gardena_bluetooth/snapshots/test_init.ambr index 71195918bb1..8cd77136f8f 100644 --- a/tests/components/gardena_bluetooth/snapshots/test_init.ambr +++ b/tests/components/gardena_bluetooth/snapshots/test_init.ambr @@ -21,7 +21,6 @@ }), 'manufacturer': None, 'model': 'Mock Model', - 'model_id': None, 'name': 'Mock Title', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/gardena_bluetooth/test_config_flow.py b/tests/components/gardena_bluetooth/test_config_flow.py index b20395ec40f..3b4e9c242b3 100644 --- a/tests/components/gardena_bluetooth/test_config_flow.py +++ b/tests/components/gardena_bluetooth/test_config_flow.py @@ -31,7 +31,6 @@ async def test_user_selection( inject_bluetooth_service_info(hass, WATER_TIMER_SERVICE_INFO) inject_bluetooth_service_info(hass, WATER_TIMER_UNNAMED_SERVICE_INFO) - await hass.async_block_till_done(wait_background_tasks=True) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} diff --git a/tests/components/gdacs/snapshots/test_diagnostics.ambr b/tests/components/gdacs/snapshots/test_diagnostics.ambr deleted file mode 100644 index 5b6154307f7..00000000000 --- a/tests/components/gdacs/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,21 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'info': dict({ - 'categories': list([ - ]), - 'latitude': '**REDACTED**', - 'longitude': '**REDACTED**', - 'radius': 25, - 'scan_interval': 300.0, - 'unit_system': 'metric', - }), - 'service': dict({ - 'last_timestamp': None, - 'last_update': '2024-09-05T15:00:00', - 'last_update_successful': '2024-09-05T15:00:00', - 'status': 'OK', - 'total': 0, - }), - }) -# --- diff --git a/tests/components/gdacs/test_diagnostics.py b/tests/components/gdacs/test_diagnostics.py deleted file mode 100644 index 3c6cf4080a6..00000000000 --- a/tests/components/gdacs/test_diagnostics.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Test GDACS diagnostics.""" - -from __future__ import annotations - -from unittest.mock import patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -@pytest.mark.freeze_time("2024-09-05 15:00:00") -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, - config_entry: MockConfigEntry, -) -> None: - """Test config entry diagnostics.""" - with patch("aio_georss_client.feed.GeoRssFeed.update") as mock_feed_update: - mock_feed_update.return_value = "OK", [] - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot diff --git a/tests/components/generic/conftest.py b/tests/components/generic/conftest.py index 69e6cc6b696..92a9298cbd5 100644 --- a/tests/components/generic/conftest.py +++ b/tests/components/generic/conftest.py @@ -1,10 +1,7 @@ """Test fixtures for the generic component.""" -from __future__ import annotations - -from collections.abc import Generator from io import BytesIO -from unittest.mock import AsyncMock, MagicMock, Mock, _patch, patch +from unittest.mock import AsyncMock, Mock, patch from PIL import Image import pytest @@ -12,14 +9,12 @@ import respx from homeassistant import config_entries from homeassistant.components.generic.const import DOMAIN -from homeassistant.config_entries import ConfigFlowResult -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture(scope="package") -def fakeimgbytes_png() -> bytes: +def fakeimgbytes_png(): """Fake image in RAM for testing.""" buf = BytesIO() Image.new("RGB", (1, 1)).save(buf, format="PNG") @@ -27,7 +22,7 @@ def fakeimgbytes_png() -> bytes: @pytest.fixture(scope="package") -def fakeimgbytes_jpg() -> bytes: +def fakeimgbytes_jpg(): """Fake image in RAM for testing.""" buf = BytesIO() # fake image in ram for testing. Image.new("RGB", (1, 1)).save(buf, format="jpeg") @@ -35,7 +30,7 @@ def fakeimgbytes_jpg() -> bytes: @pytest.fixture(scope="package") -def fakeimgbytes_svg() -> bytes: +def fakeimgbytes_svg(): """Fake image in RAM for testing.""" return bytes( '', @@ -44,7 +39,7 @@ def fakeimgbytes_svg() -> bytes: @pytest.fixture(scope="package") -def fakeimgbytes_gif() -> bytes: +def fakeimgbytes_gif(): """Fake image in RAM for testing.""" buf = BytesIO() # fake image in ram for testing. Image.new("RGB", (1, 1)).save(buf, format="gif") @@ -52,27 +47,19 @@ def fakeimgbytes_gif() -> bytes: @pytest.fixture -def fakeimg_png(fakeimgbytes_png: bytes) -> Generator[None]: +def fakeimg_png(fakeimgbytes_png): """Set up respx to respond to test url with fake image bytes.""" - respx.get("http://127.0.0.1/testurl/1", name="fake_img").respond( - stream=fakeimgbytes_png - ) - yield - respx.pop("fake_img") + respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) @pytest.fixture -def fakeimg_gif(fakeimgbytes_gif: bytes) -> Generator[None]: +def fakeimg_gif(fakeimgbytes_gif): """Set up respx to respond to test url with fake image bytes.""" - respx.get("http://127.0.0.1/testurl/1", name="fake_img").respond( - stream=fakeimgbytes_gif - ) - yield - respx.pop("fake_img") + respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_gif) @pytest.fixture(scope="package") -def mock_create_stream() -> _patch[MagicMock]: +def mock_create_stream(): """Mock create stream.""" mock_stream = Mock() mock_provider = Mock() @@ -88,7 +75,7 @@ def mock_create_stream() -> _patch[MagicMock]: @pytest.fixture -async def user_flow(hass: HomeAssistant) -> ConfigFlowResult: +async def user_flow(hass): """Initiate a user flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -100,7 +87,7 @@ async def user_flow(hass: HomeAssistant) -> ConfigFlowResult: @pytest.fixture(name="config_entry") -def config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry: +def config_entry_fixture(hass): """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -125,9 +112,7 @@ def config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture -async def setup_entry( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> MockConfigEntry: +async def setup_entry(hass, config_entry): """Set up a config entry ready to be used in tests.""" await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/generic/test_camera.py b/tests/components/generic/test_camera.py index d3ef0a39241..72a7c32ba25 100644 --- a/tests/components/generic/test_camera.py +++ b/tests/components/generic/test_camera.py @@ -73,7 +73,7 @@ async def help_setup_mock_config_entry( async def test_fetching_url( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_png: bytes, + fakeimgbytes_png, caplog: pytest.LogCaptureFixture, ) -> None: """Test that it fetches the given url.""" @@ -132,7 +132,7 @@ async def test_image_caching( hass: HomeAssistant, hass_client: ClientSessionGenerator, freezer: FrozenDateTimeFactory, - fakeimgbytes_png: bytes, + fakeimgbytes_png, ) -> None: """Test that the image is cached and not fetched more often than the framerate indicates.""" respx.get("http://example.com").respond(stream=fakeimgbytes_png) @@ -197,7 +197,7 @@ async def test_image_caching( @respx.mock async def test_fetching_without_verify_ssl( - hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png: bytes + hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png ) -> None: """Test that it fetches the given url when ssl verify is off.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -221,7 +221,7 @@ async def test_fetching_without_verify_ssl( @respx.mock async def test_fetching_url_with_verify_ssl( - hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png: bytes + hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png ) -> None: """Test that it fetches the given url when ssl verify is explicitly on.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -247,8 +247,8 @@ async def test_fetching_url_with_verify_ssl( async def test_limit_refetch( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_png: bytes, - fakeimgbytes_jpg: bytes, + fakeimgbytes_png, + fakeimgbytes_jpg, ) -> None: """Test that it fetches the given url.""" respx.get("http://example.com/0a").respond(stream=fakeimgbytes_png) @@ -275,9 +275,7 @@ async def test_limit_refetch( with ( pytest.raises(aiohttp.ServerTimeoutError), - patch.object( - client.session._connector, "connect", side_effect=asyncio.TimeoutError - ), + patch("asyncio.timeout", side_effect=TimeoutError()), ): resp = await client.get("/api/camera_proxy/camera.config_test") @@ -321,7 +319,7 @@ async def test_stream_source( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, - fakeimgbytes_png: bytes, + fakeimgbytes_png, ) -> None: """Test that the stream source is rendered.""" respx.get("http://example.com").respond(stream=fakeimgbytes_png) @@ -378,7 +376,7 @@ async def test_stream_source_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, - fakeimgbytes_png: bytes, + fakeimgbytes_png, ) -> None: """Test that the stream source has an error.""" respx.get("http://example.com").respond(stream=fakeimgbytes_png) @@ -420,7 +418,7 @@ async def test_stream_source_error( @respx.mock async def test_setup_alternative_options( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, fakeimgbytes_png: bytes + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, fakeimgbytes_png ) -> None: """Test that the stream source is setup with different config options.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -444,7 +442,7 @@ async def test_no_stream_source( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, - fakeimgbytes_png: bytes, + fakeimgbytes_png, ) -> None: """Test a stream request without stream source option set.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -484,8 +482,8 @@ async def test_no_stream_source( async def test_camera_content_type( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_svg: bytes, - fakeimgbytes_jpg: bytes, + fakeimgbytes_svg, + fakeimgbytes_jpg, ) -> None: """Test generic camera with custom content_type.""" urlsvg = "https://upload.wikimedia.org/wikipedia/commons/0/02/SVG_logo.svg" @@ -534,8 +532,8 @@ async def test_camera_content_type( async def test_timeout_cancelled( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_png: bytes, - fakeimgbytes_jpg: bytes, + fakeimgbytes_png, + fakeimgbytes_jpg, ) -> None: """Test that timeouts and cancellations return last image.""" diff --git a/tests/components/generic/test_config_flow.py b/tests/components/generic/test_config_flow.py index 7575a078675..7e76d8f3891 100644 --- a/tests/components/generic/test_config_flow.py +++ b/tests/components/generic/test_config_flow.py @@ -1,13 +1,10 @@ """Test The generic (IP Camera) config flow.""" -from __future__ import annotations - import contextlib import errno from http import HTTPStatus import os.path -from pathlib import Path -from unittest.mock import AsyncMock, MagicMock, PropertyMock, _patch, patch +from unittest.mock import AsyncMock, PropertyMock, patch import httpx import pytest @@ -30,7 +27,7 @@ from homeassistant.components.stream import ( CONF_USE_WALLCLOCK_AS_TIMESTAMPS, ) from homeassistant.components.stream.worker import StreamWorkerError -from homeassistant.config_entries import ConfigEntryState, ConfigFlowResult +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( CONF_AUTHENTICATION, CONF_NAME, @@ -41,7 +38,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry @@ -71,10 +67,10 @@ TESTDATA_YAML = { @respx.mock async def test_form( hass: HomeAssistant, - fakeimgbytes_png: bytes, + fakeimgbytes_png, hass_client: ClientSessionGenerator, - user_flow: ConfigFlowResult, - mock_create_stream: _patch[MagicMock], + user_flow, + mock_create_stream, ) -> None: """Test the form with a normal set of settings.""" @@ -92,9 +88,9 @@ async def test_form( assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "user_confirm_still" client = await hass_client() - preview_url = result1["description_placeholders"]["preview_url"] + preview_id = result1["flow_id"] # Check the preview image works. - resp = await client.get(preview_url) + resp = await client.get(f"/api/generic/preview_flow_image/{preview_id}?t=1") assert resp.status == HTTPStatus.OK assert await resp.read() == fakeimgbytes_png result2 = await hass.config_entries.flow.async_configure( @@ -118,16 +114,15 @@ async def test_form( await hass.async_block_till_done() # Check that the preview image is disabled after. - resp = await client.get(preview_url) + resp = await client.get(f"/api/generic/preview_flow_image/{preview_id}") assert resp.status == HTTPStatus.NOT_FOUND assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @respx.mock -@pytest.mark.usefixtures("fakeimg_png") async def test_form_only_stillimage( - hass: HomeAssistant, user_flow: ConfigFlowResult + hass: HomeAssistant, fakeimg_png, user_flow ) -> None: """Test we complete ok if the user wants still images only.""" result = await hass.config_entries.flow.async_init( @@ -168,10 +163,7 @@ async def test_form_only_stillimage( @respx.mock async def test_form_reject_still_preview( - hass: HomeAssistant, - fakeimgbytes_png: bytes, - mock_create_stream: _patch[MagicMock], - user_flow: ConfigFlowResult, + hass: HomeAssistant, fakeimgbytes_png, mock_create_stream, user_flow ) -> None: """Test we go back to the config screen if the user rejects the still preview.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) @@ -191,11 +183,11 @@ async def test_form_reject_still_preview( @respx.mock -@pytest.mark.usefixtures("fakeimg_png") async def test_form_still_preview_cam_off( hass: HomeAssistant, - mock_create_stream: _patch[MagicMock], - user_flow: ConfigFlowResult, + fakeimg_png, + mock_create_stream, + user_flow, hass_client: ClientSessionGenerator, ) -> None: """Test camera errors are triggered during preview.""" @@ -212,17 +204,16 @@ async def test_form_still_preview_cam_off( ) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "user_confirm_still" - preview_url = result1["description_placeholders"]["preview_url"] + preview_id = result1["flow_id"] # Try to view the image, should be unavailable. client = await hass_client() - resp = await client.get(preview_url) + resp = await client.get(f"/api/generic/preview_flow_image/{preview_id}?t=1") assert resp.status == HTTPStatus.SERVICE_UNAVAILABLE @respx.mock -@pytest.mark.usefixtures("fakeimg_gif") async def test_form_only_stillimage_gif( - hass: HomeAssistant, user_flow: ConfigFlowResult + hass: HomeAssistant, fakeimg_gif, user_flow ) -> None: """Test we complete ok if the user wants a gif.""" data = TESTDATA.copy() @@ -245,7 +236,7 @@ async def test_form_only_stillimage_gif( @respx.mock async def test_form_only_svg_whitespace( - hass: HomeAssistant, fakeimgbytes_svg: bytes, user_flow: ConfigFlowResult + hass: HomeAssistant, fakeimgbytes_svg, user_flow ) -> None: """Test we complete ok if svg starts with whitespace, issue #68889.""" fakeimgbytes_wspace_svg = bytes(" \n ", encoding="utf-8") + fakeimgbytes_svg @@ -279,12 +270,12 @@ async def test_form_only_svg_whitespace( ], ) async def test_form_only_still_sample( - hass: HomeAssistant, user_flow: ConfigFlowResult, image_file + hass: HomeAssistant, user_flow, image_file ) -> None: """Test various sample images #69037.""" image_path = os.path.join(os.path.dirname(__file__), image_file) - image_bytes = await hass.async_add_executor_job(Path(image_path).read_bytes) - respx.get("http://127.0.0.1/testurl/1").respond(stream=image_bytes) + with open(image_path, "rb") as image: + respx.get("http://127.0.0.1/testurl/1").respond(stream=image.read()) data = TESTDATA.copy() data.pop(CONF_STREAM_SOURCE) with patch("homeassistant.components.generic.async_setup_entry", return_value=True): @@ -341,8 +332,8 @@ async def test_form_only_still_sample( ) async def test_still_template( hass: HomeAssistant, - user_flow: ConfigFlowResult, - fakeimgbytes_png: bytes, + user_flow, + fakeimgbytes_png, template, url, expected_result, @@ -367,11 +358,8 @@ async def test_still_template( @respx.mock -@pytest.mark.usefixtures("fakeimg_png") async def test_form_rtsp_mode( - hass: HomeAssistant, - user_flow: ConfigFlowResult, - mock_create_stream: _patch[MagicMock], + hass: HomeAssistant, fakeimg_png, user_flow, mock_create_stream ) -> None: """Test we complete ok if the user enters a stream url.""" data = TESTDATA.copy() @@ -410,10 +398,7 @@ async def test_form_rtsp_mode( async def test_form_only_stream( - hass: HomeAssistant, - fakeimgbytes_jpg: bytes, - user_flow: ConfigFlowResult, - mock_create_stream: _patch[MagicMock], + hass: HomeAssistant, fakeimgbytes_jpg, user_flow, mock_create_stream ) -> None: """Test we complete ok if the user wants stream only.""" data = TESTDATA.copy() @@ -449,7 +434,7 @@ async def test_form_only_stream( async def test_form_still_and_stream_not_provided( - hass: HomeAssistant, user_flow: ConfigFlowResult + hass: HomeAssistant, user_flow ) -> None: """Test we show a suitable error if neither still or stream URL are provided.""" result2 = await hass.config_entries.flow.async_configure( @@ -496,11 +481,7 @@ async def test_form_still_and_stream_not_provided( ], ) async def test_form_image_http_exceptions( - side_effect, - expected_message, - hass: HomeAssistant, - user_flow: ConfigFlowResult, - mock_create_stream: _patch[MagicMock], + side_effect, expected_message, hass: HomeAssistant, user_flow, mock_create_stream ) -> None: """Test we handle image http exceptions.""" respx.get("http://127.0.0.1/testurl/1").side_effect = [ @@ -520,9 +501,7 @@ async def test_form_image_http_exceptions( @respx.mock async def test_form_stream_invalidimage( - hass: HomeAssistant, - user_flow: ConfigFlowResult, - mock_create_stream: _patch[MagicMock], + hass: HomeAssistant, user_flow, mock_create_stream ) -> None: """Test we handle invalid image when a stream is specified.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=b"invalid") @@ -539,9 +518,7 @@ async def test_form_stream_invalidimage( @respx.mock async def test_form_stream_invalidimage2( - hass: HomeAssistant, - user_flow: ConfigFlowResult, - mock_create_stream: _patch[MagicMock], + hass: HomeAssistant, user_flow, mock_create_stream ) -> None: """Test we handle invalid image when a stream is specified.""" respx.get("http://127.0.0.1/testurl/1").respond(content=None) @@ -558,9 +535,7 @@ async def test_form_stream_invalidimage2( @respx.mock async def test_form_stream_invalidimage3( - hass: HomeAssistant, - user_flow: ConfigFlowResult, - mock_create_stream: _patch[MagicMock], + hass: HomeAssistant, user_flow, mock_create_stream ) -> None: """Test we handle invalid image when a stream is specified.""" respx.get("http://127.0.0.1/testurl/1").respond(content=bytes([0xFF])) @@ -576,10 +551,7 @@ async def test_form_stream_invalidimage3( @respx.mock -@pytest.mark.usefixtures("fakeimg_png") -async def test_form_stream_timeout( - hass: HomeAssistant, user_flow: ConfigFlowResult -) -> None: +async def test_form_stream_timeout(hass: HomeAssistant, fakeimg_png, user_flow) -> None: """Test we handle invalid auth.""" with patch( "homeassistant.components.generic.config_flow.create_stream" @@ -598,53 +570,8 @@ async def test_form_stream_timeout( @respx.mock -async def test_form_stream_not_set_up(hass: HomeAssistant, user_flow) -> None: - """Test we handle if stream has not been set up.""" - TESTDATA_ONLY_STREAM = TESTDATA.copy() - TESTDATA_ONLY_STREAM.pop(CONF_STILL_IMAGE_URL) - - with patch( - "homeassistant.components.generic.config_flow.create_stream", - side_effect=HomeAssistantError("Stream integration is not set up."), - ): - result1 = await hass.config_entries.flow.async_configure( - user_flow["flow_id"], - TESTDATA_ONLY_STREAM, - ) - await hass.async_block_till_done() - - assert result1["type"] is FlowResultType.FORM - assert result1["errors"] == {"stream_source": "stream_not_set_up"} - - -@respx.mock -async def test_form_stream_other_error(hass: HomeAssistant, user_flow) -> None: - """Test the unknown error for streams.""" - TESTDATA_ONLY_STREAM = TESTDATA.copy() - TESTDATA_ONLY_STREAM.pop(CONF_STILL_IMAGE_URL) - - with ( - patch( - "homeassistant.components.generic.config_flow.create_stream", - side_effect=HomeAssistantError("Some other error."), - ), - pytest.raises(HomeAssistantError), - ): - await hass.config_entries.flow.async_configure( - user_flow["flow_id"], - TESTDATA_ONLY_STREAM, - ) - await hass.async_block_till_done() - - -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.generic.config.error.Some message"], -) -@respx.mock -@pytest.mark.usefixtures("fakeimg_png") async def test_form_stream_worker_error( - hass: HomeAssistant, user_flow: ConfigFlowResult + hass: HomeAssistant, fakeimg_png, user_flow ) -> None: """Test we handle a StreamWorkerError and pass the message through.""" with patch( @@ -661,7 +588,7 @@ async def test_form_stream_worker_error( @respx.mock async def test_form_stream_permission_error( - hass: HomeAssistant, fakeimgbytes_png: bytes, user_flow: ConfigFlowResult + hass: HomeAssistant, fakeimgbytes_png, user_flow ) -> None: """Test we handle permission error.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) @@ -678,9 +605,8 @@ async def test_form_stream_permission_error( @respx.mock -@pytest.mark.usefixtures("fakeimg_png") async def test_form_no_route_to_host( - hass: HomeAssistant, user_flow: ConfigFlowResult + hass: HomeAssistant, fakeimg_png, user_flow ) -> None: """Test we handle no route to host.""" with patch( @@ -696,9 +622,8 @@ async def test_form_no_route_to_host( @respx.mock -@pytest.mark.usefixtures("fakeimg_png") async def test_form_stream_io_error( - hass: HomeAssistant, user_flow: ConfigFlowResult + hass: HomeAssistant, fakeimg_png, user_flow ) -> None: """Test we handle no io error when setting up stream.""" with patch( @@ -714,8 +639,7 @@ async def test_form_stream_io_error( @respx.mock -@pytest.mark.usefixtures("fakeimg_png") -async def test_form_oserror(hass: HomeAssistant, user_flow: ConfigFlowResult) -> None: +async def test_form_oserror(hass: HomeAssistant, fakeimg_png, user_flow) -> None: """Test we handle OS error when setting up stream.""" with ( patch( @@ -732,7 +656,7 @@ async def test_form_oserror(hass: HomeAssistant, user_flow: ConfigFlowResult) -> @respx.mock async def test_options_template_error( - hass: HomeAssistant, fakeimgbytes_png: bytes, mock_create_stream: _patch[MagicMock] + hass: HomeAssistant, fakeimgbytes_png, mock_create_stream ) -> None: """Test the options flow with a template error.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) @@ -830,7 +754,7 @@ async def test_slug(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) -> No @respx.mock async def test_options_only_stream( - hass: HomeAssistant, fakeimgbytes_png: bytes, mock_create_stream: _patch[MagicMock] + hass: HomeAssistant, fakeimgbytes_png, mock_create_stream ) -> None: """Test the options flow without a still_image_url.""" respx.get("http://127.0.0.1/testurl/2").respond(stream=fakeimgbytes_png) @@ -867,8 +791,7 @@ async def test_options_only_stream( assert result3["data"][CONF_CONTENT_TYPE] == "image/jpeg" -@pytest.mark.usefixtures("fakeimg_png") -async def test_unload_entry(hass: HomeAssistant) -> None: +async def test_unload_entry(hass: HomeAssistant, fakeimg_png) -> None: """Test unloading the generic IP Camera entry.""" mock_entry = MockConfigEntry(domain=DOMAIN, options=TESTDATA) mock_entry.add_to_hass(hass) @@ -938,9 +861,8 @@ async def test_migrate_existing_ids( @respx.mock -@pytest.mark.usefixtures("fakeimg_png") async def test_use_wallclock_as_timestamps_option( - hass: HomeAssistant, mock_create_stream: _patch[MagicMock] + hass: HomeAssistant, fakeimg_png, mock_create_stream ) -> None: """Test the use_wallclock_as_timestamps option flow.""" diff --git a/tests/components/generic/test_diagnostics.py b/tests/components/generic/test_diagnostics.py index 80fa5fd4d4e..f68c3ba4bc6 100644 --- a/tests/components/generic/test_diagnostics.py +++ b/tests/components/generic/test_diagnostics.py @@ -6,15 +6,12 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.generic.diagnostics import redact_url from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - setup_entry: MockConfigEntry, + hass: HomeAssistant, hass_client: ClientSessionGenerator, setup_entry ) -> None: """Test config entry diagnostics.""" diff --git a/tests/components/generic_hygrostat/test_humidifier.py b/tests/components/generic_hygrostat/test_humidifier.py index 33a8a0f37bd..eadc1b22527 100644 --- a/tests/components/generic_hygrostat/test_humidifier.py +++ b/tests/components/generic_hygrostat/test_humidifier.py @@ -3,17 +3,13 @@ import datetime from freezegun import freeze_time -from freezegun.api import FrozenDateTimeFactory import pytest import voluptuous as vol from homeassistant.components import input_boolean, switch -from homeassistant.components.generic_hygrostat import ( - DOMAIN as GENERIC_HYDROSTAT_DOMAIN, -) from homeassistant.components.humidifier import ( ATTR_HUMIDITY, - DOMAIN as HUMIDIFIER_DOMAIN, + DOMAIN, MODE_AWAY, MODE_NORMAL, SERVICE_SET_HUMIDITY, @@ -30,20 +26,17 @@ from homeassistant.const import ( ) import homeassistant.core as ha from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, + DOMAIN as HASS_DOMAIN, CoreState, HomeAssistant, - ServiceCall, State, callback, ) -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.typing import StateType +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( - MockConfigEntry, assert_setup_component, async_fire_time_changed, mock_restore_cache, @@ -90,14 +83,13 @@ async def test_valid_conf(hass: HomeAssistant) -> None: @pytest.fixture -async def setup_comp_1(hass: HomeAssistant) -> None: +async def setup_comp_1(hass): """Initialize components.""" assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_1") -async def test_humidifier_input_boolean(hass: HomeAssistant) -> None: +async def test_humidifier_input_boolean(hass: HomeAssistant, setup_comp_1) -> None: """Test humidifier switching input_boolean.""" humidifier_switch = "input_boolean.test" assert await async_setup_component( @@ -107,7 +99,7 @@ async def test_humidifier_input_boolean(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -125,7 +117,7 @@ async def test_humidifier_input_boolean(hass: HomeAssistant) -> None: _setup_sensor(hass, 23) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 32}, blocking=True, @@ -136,9 +128,8 @@ async def test_humidifier_input_boolean(hass: HomeAssistant) -> None: assert hass.states.get(ENTITY).attributes.get("action") == "humidifying" -@pytest.mark.usefixtures("setup_comp_1") async def test_humidifier_switch( - hass: HomeAssistant, mock_switch_entities: list[MockSwitch] + hass: HomeAssistant, setup_comp_1, mock_switch_entities: list[MockSwitch] ) -> None: """Test humidifier switching test switch.""" setup_test_component_platform(hass, switch.DOMAIN, mock_switch_entities) @@ -151,7 +142,7 @@ async def test_humidifier_switch( assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -170,7 +161,7 @@ async def test_humidifier_switch( await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 32}, blocking=True, @@ -181,9 +172,8 @@ async def test_humidifier_switch( assert hass.states.get(ENTITY).attributes.get("action") == "humidifying" -@pytest.mark.usefixtures("setup_comp_1") async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp_1 ) -> None: """Test setting a unique ID.""" unique_id = "some_unique_id" @@ -191,7 +181,7 @@ async def test_unique_id( await _setup_switch(hass, True) assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -209,20 +199,20 @@ async def test_unique_id( assert entry.unique_id == unique_id -def _setup_sensor(hass: HomeAssistant, humidity: StateType) -> None: +def _setup_sensor(hass, humidity): """Set up the test sensor.""" hass.states.async_set(ENT_SENSOR, humidity) @pytest.fixture -async def setup_comp_0(hass: HomeAssistant) -> None: +async def setup_comp_0(hass): """Initialize components.""" _setup_sensor(hass, 45) hass.states.async_set(ENT_SWITCH, STATE_OFF) await hass.async_block_till_done() assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -241,14 +231,14 @@ async def setup_comp_0(hass: HomeAssistant) -> None: @pytest.fixture -async def setup_comp_2(hass: HomeAssistant) -> None: +async def setup_comp_2(hass): """Initialize components.""" _setup_sensor(hass, 45) hass.states.async_set(ENT_SWITCH, STATE_OFF) await hass.async_block_till_done() assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -269,7 +259,7 @@ async def test_unavailable_state(hass: HomeAssistant) -> None: """Test the setting of defaults to unknown.""" await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -296,7 +286,7 @@ async def test_setup_defaults_to_unknown(hass: HomeAssistant) -> None: """Test the setting of defaults to unknown.""" await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -313,8 +303,7 @@ async def test_setup_defaults_to_unknown(hass: HomeAssistant) -> None: assert hass.states.get(ENTITY).state == STATE_UNAVAILABLE -@pytest.mark.usefixtures("setup_comp_2") -async def test_default_setup_params(hass: HomeAssistant) -> None: +async def test_default_setup_params(hass: HomeAssistant, setup_comp_2) -> None: """Test the setup with default parameters.""" state = hass.states.get(ENTITY) assert state.attributes.get("min_humidity") == 0 @@ -323,8 +312,9 @@ async def test_default_setup_params(hass: HomeAssistant) -> None: assert state.attributes.get("action") == "idle" -@pytest.mark.usefixtures("setup_comp_0") -async def test_default_setup_params_dehumidifier(hass: HomeAssistant) -> None: +async def test_default_setup_params_dehumidifier( + hass: HomeAssistant, setup_comp_0 +) -> None: """Test the setup with default parameters for dehumidifier.""" state = hass.states.get(ENTITY) assert state.attributes.get("min_humidity") == 0 @@ -333,19 +323,17 @@ async def test_default_setup_params_dehumidifier(hass: HomeAssistant) -> None: assert state.attributes.get("action") == "idle" -@pytest.mark.usefixtures("setup_comp_2") -async def test_get_modes(hass: HomeAssistant) -> None: +async def test_get_modes(hass: HomeAssistant, setup_comp_2) -> None: """Test that the attributes returns the correct modes.""" state = hass.states.get(ENTITY) modes = state.attributes.get("available_modes") assert modes == [MODE_NORMAL, MODE_AWAY] -@pytest.mark.usefixtures("setup_comp_2") -async def test_set_target_humidity(hass: HomeAssistant) -> None: +async def test_set_target_humidity(hass: HomeAssistant, setup_comp_2) -> None: """Test the setting of the target humidity.""" await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 40}, blocking=True, @@ -355,7 +343,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: assert state.attributes.get("humidity") == 40 with pytest.raises(vol.Invalid): await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: None}, blocking=True, @@ -365,18 +353,17 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: assert state.attributes.get("humidity") == 40 -@pytest.mark.usefixtures("setup_comp_2") -async def test_set_away_mode(hass: HomeAssistant) -> None: +async def test_set_away_mode(hass: HomeAssistant, setup_comp_2) -> None: """Test the setting away mode.""" await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -386,21 +373,22 @@ async def test_set_away_mode(hass: HomeAssistant) -> None: assert state.attributes.get("humidity") == 35 -@pytest.mark.usefixtures("setup_comp_2") -async def test_set_away_mode_and_restore_prev_humidity(hass: HomeAssistant) -> None: +async def test_set_away_mode_and_restore_prev_humidity( + hass: HomeAssistant, setup_comp_2 +) -> None: """Test the setting and removing away mode. Verify original humidity is restored. """ await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -409,7 +397,7 @@ async def test_set_away_mode_and_restore_prev_humidity(hass: HomeAssistant) -> N state = hass.states.get(ENTITY) assert state.attributes.get("humidity") == 35 await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -419,30 +407,29 @@ async def test_set_away_mode_and_restore_prev_humidity(hass: HomeAssistant) -> N assert state.attributes.get("humidity") == 44 -@pytest.mark.usefixtures("setup_comp_2") async def test_set_away_mode_twice_and_restore_prev_humidity( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_2 ) -> None: """Test the setting away mode twice in a row. Verify original humidity is restored. """ await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -451,7 +438,7 @@ async def test_set_away_mode_twice_and_restore_prev_humidity( state = hass.states.get(ENTITY) assert state.attributes.get("humidity") == 35 await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -461,8 +448,7 @@ async def test_set_away_mode_twice_and_restore_prev_humidity( assert state.attributes.get("humidity") == 44 -@pytest.mark.usefixtures("setup_comp_2") -async def test_sensor_affects_attribute(hass: HomeAssistant) -> None: +async def test_sensor_affects_attribute(hass: HomeAssistant, setup_comp_2) -> None: """Test that the sensor changes are reflected in the current_humidity attribute.""" state = hass.states.get(ENTITY) assert state.attributes.get("current_humidity") == 45 @@ -474,8 +460,7 @@ async def test_sensor_affects_attribute(hass: HomeAssistant) -> None: assert state.attributes.get("current_humidity") == 47 -@pytest.mark.usefixtures("setup_comp_2") -async def test_sensor_bad_value(hass: HomeAssistant) -> None: +async def test_sensor_bad_value(hass: HomeAssistant, setup_comp_2) -> None: """Test sensor that have None as state.""" assert hass.states.get(ENTITY).state == STATE_ON @@ -485,9 +470,8 @@ async def test_sensor_bad_value(hass: HomeAssistant) -> None: assert hass.states.get(ENTITY).state == STATE_UNAVAILABLE -@pytest.mark.usefixtures("setup_comp_2") async def test_sensor_bad_value_twice( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, setup_comp_2, caplog: pytest.LogCaptureFixture ) -> None: """Test sensor that the second bad value is not logged as warning.""" assert hass.states.get(ENTITY).state == STATE_ON @@ -515,15 +499,15 @@ async def test_sensor_bad_value_twice( ] == ["DEBUG"] -@pytest.mark.usefixtures("setup_comp_2") -async def test_set_target_humidity_humidifier_on(hass: HomeAssistant) -> None: +async def test_set_target_humidity_humidifier_on( + hass: HomeAssistant, setup_comp_2 +) -> None: """Test if target humidity turn humidifier on.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 36) await hass.async_block_till_done() - calls.clear() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 45}, blocking=True, @@ -531,20 +515,20 @@ async def test_set_target_humidity_humidifier_on(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_2") -async def test_set_target_humidity_humidifier_off(hass: HomeAssistant) -> None: +async def test_set_target_humidity_humidifier_off( + hass: HomeAssistant, setup_comp_2 +) -> None: """Test if target humidity turn humidifier off.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) await hass.async_block_till_done() - calls.clear() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 36}, blocking=True, @@ -552,19 +536,18 @@ async def test_set_target_humidity_humidifier_off(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_on_within_tolerance( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_2 ) -> None: """Test if humidity change doesn't turn on within tolerance.""" calls = await _setup_switch(hass, False) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, @@ -575,14 +558,13 @@ async def test_humidity_change_humidifier_on_within_tolerance( assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_on_outside_tolerance( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_2 ) -> None: """Test if humidity change turn humidifier on outside dry tolerance.""" calls = await _setup_switch(hass, False) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, @@ -592,19 +574,18 @@ async def test_humidity_change_humidifier_on_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_off_within_tolerance( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_2 ) -> None: """Test if humidity change doesn't turn off within tolerance.""" calls = await _setup_switch(hass, True) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 46}, blocking=True, @@ -615,14 +596,13 @@ async def test_humidity_change_humidifier_off_within_tolerance( assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_off_outside_tolerance( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_2 ) -> None: """Test if humidity change turn humidifier off outside wet tolerance.""" calls = await _setup_switch(hass, True) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 46}, blocking=True, @@ -632,26 +612,25 @@ async def test_humidity_change_humidifier_off_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_2") -async def test_operation_mode_humidify(hass: HomeAssistant) -> None: +async def test_operation_mode_humidify(hass: HomeAssistant, setup_comp_2) -> None: """Test change mode from OFF to HUMIDIFY. Switch turns on when humidity below setpoint and mode changes. """ await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 45}, blocking=True, @@ -661,7 +640,7 @@ async def test_operation_mode_humidify(hass: HomeAssistant) -> None: await hass.async_block_till_done() calls = await _setup_switch(hass, False) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -669,18 +648,18 @@ async def test_operation_mode_humidify(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def _setup_switch(hass: HomeAssistant, is_on: bool) -> list[ServiceCall]: +async def _setup_switch(hass, is_on): """Set up the test switch.""" hass.states.async_set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF) calls = [] @callback - def log_call(call: ServiceCall) -> None: + def log_call(call): """Log service calls.""" calls.append(call) @@ -692,11 +671,11 @@ async def _setup_switch(hass: HomeAssistant, is_on: bool) -> list[ServiceCall]: @pytest.fixture -async def setup_comp_3(hass: HomeAssistant) -> None: +async def setup_comp_3(hass): """Initialize components.""" assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -715,14 +694,13 @@ async def setup_comp_3(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_3") -async def test_set_target_humidity_dry_off(hass: HomeAssistant) -> None: +async def test_set_target_humidity_dry_off(hass: HomeAssistant, setup_comp_3) -> None: """Test if target humidity turn dry off.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 50) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 55}, blocking=True, @@ -730,27 +708,26 @@ async def test_set_target_humidity_dry_off(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH assert hass.states.get(ENTITY).attributes.get("action") == "drying" -@pytest.mark.usefixtures("setup_comp_3") -async def test_turn_away_mode_on_drying(hass: HomeAssistant) -> None: +async def test_turn_away_mode_on_drying(hass: HomeAssistant, setup_comp_3) -> None: """Test the setting away mode when drying.""" await _setup_switch(hass, True) _setup_sensor(hass, 50) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 34}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -760,8 +737,7 @@ async def test_turn_away_mode_on_drying(hass: HomeAssistant) -> None: assert state.attributes.get("humidity") == 30 -@pytest.mark.usefixtures("setup_comp_3") -async def test_operation_mode_dry(hass: HomeAssistant) -> None: +async def test_operation_mode_dry(hass: HomeAssistant, setup_comp_3) -> None: """Test change mode from OFF to DRY. Switch turns on when humidity below setpoint and state changes. @@ -771,7 +747,7 @@ async def test_operation_mode_dry(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -781,7 +757,7 @@ async def test_operation_mode_dry(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -789,39 +765,38 @@ async def test_operation_mode_dry(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_set_target_humidity_dry_on(hass: HomeAssistant) -> None: +async def test_set_target_humidity_dry_on(hass: HomeAssistant, setup_comp_3) -> None: """Test if target humidity turn dry on.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 45) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_init_ignores_tolerance(hass: HomeAssistant) -> None: +async def test_init_ignores_tolerance(hass: HomeAssistant, setup_comp_3) -> None: """Test if tolerance is ignored on initialization.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 39) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_humidity_change_dry_off_within_tolerance(hass: HomeAssistant) -> None: +async def test_humidity_change_dry_off_within_tolerance( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test if humidity change doesn't turn dry off within tolerance.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) @@ -830,9 +805,8 @@ async def test_humidity_change_dry_off_within_tolerance(hass: HomeAssistant) -> assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_3") async def test_set_humidity_change_dry_off_outside_tolerance( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_3 ) -> None: """Test if humidity change turn dry off.""" calls = await _setup_switch(hass, True) @@ -840,13 +814,14 @@ async def test_set_humidity_change_dry_off_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_humidity_change_dry_on_within_tolerance(hass: HomeAssistant) -> None: +async def test_humidity_change_dry_on_within_tolerance( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test if humidity change doesn't turn dry on within tolerance.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 37) @@ -855,27 +830,29 @@ async def test_humidity_change_dry_on_within_tolerance(hass: HomeAssistant) -> N assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_3") -async def test_humidity_change_dry_on_outside_tolerance(hass: HomeAssistant) -> None: +async def test_humidity_change_dry_on_outside_tolerance( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test if humidity change turn dry on.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 45) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_running_when_operating_mode_is_off_2(hass: HomeAssistant) -> None: +async def test_running_when_operating_mode_is_off_2( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test that the switch turns off when enabled is set False.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -883,20 +860,21 @@ async def test_running_when_operating_mode_is_off_2(hass: HomeAssistant) -> None await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH assert hass.states.get(ENTITY).attributes.get("action") == "off" -@pytest.mark.usefixtures("setup_comp_3") -async def test_no_state_change_when_operation_mode_off_2(hass: HomeAssistant) -> None: +async def test_no_state_change_when_operation_mode_off_2( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test that the switch doesn't turn on when enabled is False.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 30) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -909,11 +887,11 @@ async def test_no_state_change_when_operation_mode_off_2(hass: HomeAssistant) -> @pytest.fixture -async def setup_comp_4(hass: HomeAssistant) -> None: +async def setup_comp_4(hass): """Initialize components.""" assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -923,7 +901,7 @@ async def setup_comp_4(hass: HomeAssistant) -> None: "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, "device_class": "dehumidifier", - "min_cycle_duration": {"minutes": 10}, + "min_cycle_duration": datetime.timedelta(minutes=10), "initial_state": True, "target_humidity": 40, } @@ -932,9 +910,8 @@ async def setup_comp_4(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_4") async def test_humidity_change_dry_trigger_on_not_long_enough( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_4 ) -> None: """Test if humidity change turn dry on.""" calls = await _setup_switch(hass, False) @@ -947,8 +924,9 @@ async def test_humidity_change_dry_trigger_on_not_long_enough( assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_4") -async def test_humidity_change_dry_trigger_on_long_enough(hass: HomeAssistant) -> None: +async def test_humidity_change_dry_trigger_on_long_enough( + hass: HomeAssistant, setup_comp_4 +) -> None: """Test if humidity change turn dry on.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) with freeze_time(fake_changed): @@ -961,14 +939,13 @@ async def test_humidity_change_dry_trigger_on_long_enough(hass: HomeAssistant) - await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_4") async def test_humidity_change_dry_trigger_off_not_long_enough( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_4 ) -> None: """Test if humidity change turn dry on.""" calls = await _setup_switch(hass, True) @@ -981,8 +958,9 @@ async def test_humidity_change_dry_trigger_off_not_long_enough( assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_4") -async def test_humidity_change_dry_trigger_off_long_enough(hass: HomeAssistant) -> None: +async def test_humidity_change_dry_trigger_off_long_enough( + hass: HomeAssistant, setup_comp_4 +) -> None: """Test if humidity change turn dry on.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) with freeze_time(fake_changed): @@ -995,20 +973,21 @@ async def test_humidity_change_dry_trigger_off_long_enough(hass: HomeAssistant) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_4") -async def test_mode_change_dry_trigger_off_not_long_enough(hass: HomeAssistant) -> None: +async def test_mode_change_dry_trigger_off_not_long_enough( + hass: HomeAssistant, setup_comp_4 +) -> None: """Test if mode change turns dry off despite minimum cycle.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1021,14 +1000,15 @@ async def test_mode_change_dry_trigger_off_not_long_enough(hass: HomeAssistant) assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_4") -async def test_mode_change_dry_trigger_on_not_long_enough(hass: HomeAssistant) -> None: +async def test_mode_change_dry_trigger_on_not_long_enough( + hass: HomeAssistant, setup_comp_4 +) -> None: """Test if mode change turns dry on despite minimum cycle.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 35) await hass.async_block_till_done() await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1038,7 +1018,7 @@ async def test_mode_change_dry_trigger_on_not_long_enough(hass: HomeAssistant) - await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1052,11 +1032,11 @@ async def test_mode_change_dry_trigger_on_not_long_enough(hass: HomeAssistant) - @pytest.fixture -async def setup_comp_6(hass: HomeAssistant) -> None: +async def setup_comp_6(hass): """Initialize components.""" assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1065,7 +1045,7 @@ async def setup_comp_6(hass: HomeAssistant) -> None: "wet_tolerance": 3, "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, - "min_cycle_duration": {"minutes": 10}, + "min_cycle_duration": datetime.timedelta(minutes=10), "initial_state": True, "target_humidity": 40, } @@ -1074,9 +1054,8 @@ async def setup_comp_6(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_off_not_long_enough( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_6 ) -> None: """Test if humidity change doesn't turn humidifier off because of time.""" calls = await _setup_switch(hass, True) @@ -1089,9 +1068,8 @@ async def test_humidity_change_humidifier_trigger_off_not_long_enough( assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_on_not_long_enough( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_6 ) -> None: """Test if humidity change doesn't turn humidifier on because of time.""" calls = await _setup_switch(hass, False) @@ -1104,9 +1082,8 @@ async def test_humidity_change_humidifier_trigger_on_not_long_enough( assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_on_long_enough( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_6 ) -> None: """Test if humidity change turn humidifier on after min cycle.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) @@ -1120,14 +1097,13 @@ async def test_humidity_change_humidifier_trigger_on_long_enough( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_off_long_enough( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_6 ) -> None: """Test if humidity change turn humidifier off after min cycle.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) @@ -1141,14 +1117,13 @@ async def test_humidity_change_humidifier_trigger_off_long_enough( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_6") async def test_mode_change_humidifier_trigger_off_not_long_enough( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_6 ) -> None: """Test if mode change turns humidifier off despite minimum cycle.""" calls = await _setup_switch(hass, True) @@ -1157,7 +1132,7 @@ async def test_mode_change_humidifier_trigger_off_not_long_enough( assert len(calls) == 0 await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1170,9 +1145,8 @@ async def test_mode_change_humidifier_trigger_off_not_long_enough( assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_6") async def test_mode_change_humidifier_trigger_on_not_long_enough( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_6 ) -> None: """Test if mode change turns humidifier on despite minimum cycle.""" calls = await _setup_switch(hass, False) @@ -1181,7 +1155,7 @@ async def test_mode_change_humidifier_trigger_on_not_long_enough( assert len(calls) == 0 await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1194,7 +1168,7 @@ async def test_mode_change_humidifier_trigger_on_not_long_enough( assert len(calls) == 0 await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1208,11 +1182,11 @@ async def test_mode_change_humidifier_trigger_on_not_long_enough( @pytest.fixture -async def setup_comp_7(hass: HomeAssistant) -> None: +async def setup_comp_7(hass): """Initialize components.""" assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1222,8 +1196,8 @@ async def setup_comp_7(hass: HomeAssistant) -> None: "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, "device_class": "dehumidifier", - "min_cycle_duration": {"minutes": 15}, - "keep_alive": {"minutes": 10}, + "min_cycle_duration": datetime.timedelta(minutes=15), + "keep_alive": datetime.timedelta(minutes=10), "initial_state": True, "target_humidity": 40, } @@ -1232,9 +1206,8 @@ async def setup_comp_7(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_7") async def test_humidity_change_dry_trigger_on_long_enough_3( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_7 ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, True) @@ -1248,14 +1221,13 @@ async def test_humidity_change_dry_trigger_on_long_enough_3( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_7") async def test_humidity_change_dry_trigger_off_long_enough_3( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_7 ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, False) @@ -1269,17 +1241,17 @@ async def test_humidity_change_dry_trigger_off_long_enough_3( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @pytest.fixture -async def setup_comp_8(hass: HomeAssistant) -> None: +async def setup_comp_8(hass): """Initialize components.""" assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1288,8 +1260,8 @@ async def setup_comp_8(hass: HomeAssistant) -> None: "wet_tolerance": 3, "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, - "min_cycle_duration": {"minutes": 15}, - "keep_alive": {"minutes": 10}, + "min_cycle_duration": datetime.timedelta(minutes=15), + "keep_alive": datetime.timedelta(minutes=10), "initial_state": True, "target_humidity": 40, } @@ -1298,9 +1270,8 @@ async def setup_comp_8(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_8") async def test_humidity_change_humidifier_trigger_on_long_enough_2( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_8 ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, True) @@ -1314,14 +1285,13 @@ async def test_humidity_change_humidifier_trigger_on_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_8") async def test_humidity_change_humidifier_trigger_off_long_enough_2( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_8 ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, False) @@ -1335,7 +1305,7 @@ async def test_humidity_change_humidifier_trigger_off_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @@ -1344,7 +1314,7 @@ async def test_float_tolerance_values(hass: HomeAssistant) -> None: """Test if dehumidifier does not turn on within floating point tolerance.""" assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1370,7 +1340,7 @@ async def test_float_tolerance_values_2(hass: HomeAssistant) -> None: """Test if dehumidifier turns off when oudside of floating point tolerance values.""" assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1390,7 +1360,7 @@ async def test_float_tolerance_values_2(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @@ -1401,7 +1371,7 @@ async def test_custom_setup_params(hass: HomeAssistant) -> None: await hass.async_block_till_done() result = await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1441,7 +1411,7 @@ async def test_restore_state(hass: HomeAssistant) -> None: await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1479,7 +1449,7 @@ async def test_restore_state_target_humidity(hass: HomeAssistant) -> None: await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1522,7 +1492,7 @@ async def test_restore_state_and_return_to_normal(hass: HomeAssistant) -> None: await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1542,7 +1512,7 @@ async def test_restore_state_and_return_to_normal(hass: HomeAssistant) -> None: assert state.state == STATE_OFF await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -1577,7 +1547,7 @@ async def test_no_restore_state(hass: HomeAssistant) -> None: await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1620,10 +1590,10 @@ async def test_restore_state_uncoherence_case(hass: HomeAssistant) -> None: assert state.state == STATE_OFF -async def _setup_humidifier(hass: HomeAssistant) -> None: +async def _setup_humidifier(hass): assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1640,9 +1610,7 @@ async def _setup_humidifier(hass: HomeAssistant) -> None: await hass.async_block_till_done() -def _mock_restore_cache( - hass: HomeAssistant, humidity: int = 40, state: str = STATE_OFF -) -> None: +def _mock_restore_cache(hass, humidity=40, state=STATE_OFF): mock_restore_cache( hass, ( @@ -1665,7 +1633,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: await hass.async_block_till_done() await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1687,7 +1655,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: # Switch to Away mode await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_MODE: MODE_AWAY}, blocking=True, @@ -1703,7 +1671,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: # Change target humidity await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_HUMIDITY: 42}, blocking=True, @@ -1719,7 +1687,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: # Return to Normal mode await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -1734,11 +1702,8 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: assert state.state == STATE_OFF -@pytest.mark.usefixtures("setup_comp_1") async def test_sensor_stale_duration( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, setup_comp_1, caplog: pytest.LogCaptureFixture ) -> None: """Test turn off on sensor stale.""" @@ -1750,7 +1715,7 @@ async def test_sensor_stale_duration( assert await async_setup_component( hass, - HUMIDIFIER_DOMAIN, + DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1770,7 +1735,7 @@ async def test_sensor_stale_duration( assert hass.states.get(humidifier_switch).state == STATE_OFF await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 32}, blocking=True, @@ -1780,31 +1745,14 @@ async def test_sensor_stale_duration( assert hass.states.get(humidifier_switch).state == STATE_ON # Wait 11 minutes - freezer.tick(datetime.timedelta(minutes=11)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(minutes=11)) await hass.async_block_till_done() # 11 minutes later, no news from the sensor : emergency cut off assert hass.states.get(humidifier_switch).state == STATE_OFF assert "emergency" in caplog.text - # Updated value from sensor received (same value) - _setup_sensor(hass, 23) - await hass.async_block_till_done() - - # A new value has arrived, the humidifier should go ON - assert hass.states.get(humidifier_switch).state == STATE_ON - - # Wait 11 minutes - freezer.tick(datetime.timedelta(minutes=11)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # 11 minutes later, no news from the sensor : emergency cut off - assert hass.states.get(humidifier_switch).state == STATE_OFF - assert "emergency" in caplog.text - - # Updated value from sensor received (new value) + # Updated value from sensor received _setup_sensor(hass, 24) await hass.async_block_till_done() @@ -1813,7 +1761,7 @@ async def test_sensor_stale_duration( # Manual turn off await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1834,50 +1782,3 @@ async def test_sensor_stale_duration( # Not turning on by itself assert hass.states.get(humidifier_switch).state == STATE_OFF - - -async def test_device_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test for source entity device.""" - - source_config_entry = MockConfigEntry() - source_config_entry.add_to_hass(hass) - source_device_entry = device_registry.async_get_or_create( - config_entry_id=source_config_entry.entry_id, - identifiers={("switch", "identifier_test")}, - connections={("mac", "30:31:32:33:34:35")}, - ) - source_entity = entity_registry.async_get_or_create( - "switch", - "test", - "source", - config_entry=source_config_entry, - device_id=source_device_entry.id, - ) - await hass.async_block_till_done() - assert entity_registry.async_get("switch.test_source") is not None - - helper_config_entry = MockConfigEntry( - data={}, - domain=GENERIC_HYDROSTAT_DOMAIN, - options={ - "device_class": "humidifier", - "dry_tolerance": 2.0, - "humidifier": "switch.test_source", - "name": "Test", - "target_sensor": ENT_SENSOR, - "wet_tolerance": 4.0, - }, - title="Test", - ) - helper_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(helper_config_entry.entry_id) - await hass.async_block_till_done() - - helper_entity = entity_registry.async_get("humidifier.test") - assert helper_entity is not None - assert helper_entity.device_id == source_entity.device_id diff --git a/tests/components/generic_hygrostat/test_init.py b/tests/components/generic_hygrostat/test_init.py deleted file mode 100644 index bd4792f939d..00000000000 --- a/tests/components/generic_hygrostat/test_init.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Test Generic Hygrostat component setup process.""" - -from __future__ import annotations - -from homeassistant.components.generic_hygrostat import ( - DOMAIN as GENERIC_HYDROSTAT_DOMAIN, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er - -from .test_humidifier import ENT_SENSOR - -from tests.common import MockConfigEntry - - -async def test_device_cleaning( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test cleaning of devices linked to the helper config entry.""" - - # Source entity device config entry - source_config_entry = MockConfigEntry() - source_config_entry.add_to_hass(hass) - - # Device entry of the source entity - source_device1_entry = device_registry.async_get_or_create( - config_entry_id=source_config_entry.entry_id, - identifiers={("switch", "identifier_test1")}, - connections={("mac", "30:31:32:33:34:01")}, - ) - - # Source entity registry - source_entity = entity_registry.async_get_or_create( - "switch", - "test", - "source", - config_entry=source_config_entry, - device_id=source_device1_entry.id, - ) - await hass.async_block_till_done() - assert entity_registry.async_get("switch.test_source") is not None - - # Configure the configuration entry for helper - helper_config_entry = MockConfigEntry( - data={}, - domain=GENERIC_HYDROSTAT_DOMAIN, - options={ - "device_class": "humidifier", - "dry_tolerance": 2.0, - "humidifier": "switch.test_source", - "name": "Test", - "target_sensor": ENT_SENSOR, - "wet_tolerance": 4.0, - }, - title="Test", - ) - helper_config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(helper_config_entry.entry_id) - await hass.async_block_till_done() - - # Confirm the link between the source entity device and the helper entity - helper_entity = entity_registry.async_get("humidifier.test") - assert helper_entity is not None - assert helper_entity.device_id == source_entity.device_id - - # Device entry incorrectly linked to config entry - device_registry.async_get_or_create( - config_entry_id=helper_config_entry.entry_id, - identifiers={("sensor", "identifier_test2")}, - connections={("mac", "30:31:32:33:34:02")}, - ) - device_registry.async_get_or_create( - config_entry_id=helper_config_entry.entry_id, - identifiers={("sensor", "identifier_test3")}, - connections={("mac", "30:31:32:33:34:03")}, - ) - await hass.async_block_till_done() - - # Before reloading the config entry, 3 devices are expected to be linked - devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( - helper_config_entry.entry_id - ) - assert len(devices_before_reload) == 3 - - # Config entry reload - await hass.config_entries.async_reload(helper_config_entry.entry_id) - await hass.async_block_till_done() - - # Confirm the link between the source entity device and the helper entity - helper_entity = entity_registry.async_get("humidifier.test") - assert helper_entity is not None - assert helper_entity.device_id == source_entity.device_id - - # After reloading the config entry, only one linked device is expected - devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( - helper_config_entry.entry_id - ) - assert len(devices_after_reload) == 1 - - assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/generic_thermostat/snapshots/test_config_flow.ambr b/tests/components/generic_thermostat/snapshots/test_config_flow.ambr index ed757d1c2ae..d515d52a81b 100644 --- a/tests/components/generic_thermostat/snapshots/test_config_flow.ambr +++ b/tests/components/generic_thermostat/snapshots/test_config_flow.ambr @@ -18,25 +18,6 @@ 'type': , }) # --- -# name: test_config_flow_preset_accepts_float[create_entry] - FlowResultSnapshot({ - 'result': ConfigEntrySnapshot({ - 'title': 'My thermostat', - }), - 'title': 'My thermostat', - 'type': , - }) -# --- -# name: test_config_flow_preset_accepts_float[init] - FlowResultSnapshot({ - 'type': , - }) -# --- -# name: test_config_flow_preset_accepts_float[presets] - FlowResultSnapshot({ - 'type': , - }) -# --- # name: test_options[create_entry] FlowResultSnapshot({ 'result': True, diff --git a/tests/components/generic_thermostat/test_climate.py b/tests/components/generic_thermostat/test_climate.py index 39435f154c4..1ecde733f48 100644 --- a/tests/components/generic_thermostat/test_climate.py +++ b/tests/components/generic_thermostat/test_climate.py @@ -11,7 +11,7 @@ from homeassistant import config as hass_config from homeassistant.components import input_boolean, switch from homeassistant.components.climate import ( ATTR_PRESET_MODE, - DOMAIN as CLIMATE_DOMAIN, + DOMAIN, PRESET_ACTIVITY, PRESET_AWAY, PRESET_COMFORT, @@ -21,7 +21,7 @@ from homeassistant.components.climate import ( PRESET_SLEEP, HVACMode, ) -from homeassistant.components.generic_thermostat.const import ( +from homeassistant.components.generic_thermostat import ( DOMAIN as GENERIC_THERMOSTAT_DOMAIN, ) from homeassistant.const import ( @@ -37,22 +37,19 @@ from homeassistant.const import ( ) import homeassistant.core as ha from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, + DOMAIN as HASS_DOMAIN, CoreState, HomeAssistant, - ServiceCall, State, callback, ) from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.typing import StateType +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM from tests.common import ( - MockConfigEntry, assert_setup_component, async_fire_time_changed, async_mock_service, @@ -105,15 +102,14 @@ async def test_valid_conf(hass: HomeAssistant) -> None: @pytest.fixture -async def setup_comp_1(hass: HomeAssistant) -> None: +async def setup_comp_1(hass): """Initialize components.""" hass.config.units = METRIC_SYSTEM assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_1") -async def test_heater_input_boolean(hass: HomeAssistant) -> None: +async def test_heater_input_boolean(hass: HomeAssistant, setup_comp_1) -> None: """Test heater switching input_boolean.""" heater_switch = "input_boolean.test" assert await async_setup_component( @@ -122,7 +118,7 @@ async def test_heater_input_boolean(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -145,9 +141,8 @@ async def test_heater_input_boolean(hass: HomeAssistant) -> None: assert hass.states.get(heater_switch).state == STATE_ON -@pytest.mark.usefixtures("setup_comp_1") async def test_heater_switch( - hass: HomeAssistant, mock_switch_entities: list[MockSwitch] + hass: HomeAssistant, setup_comp_1, mock_switch_entities: list[MockSwitch] ) -> None: """Test heater switching test switch.""" setup_test_component_platform(hass, switch.DOMAIN, mock_switch_entities) @@ -160,7 +155,7 @@ async def test_heater_switch( assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -182,9 +177,8 @@ async def test_heater_switch( assert hass.states.get(heater_switch).state == STATE_ON -@pytest.mark.usefixtures("setup_comp_1") async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp_1 ) -> None: """Test setting a unique ID.""" unique_id = "some_unique_id" @@ -192,7 +186,7 @@ async def test_unique_id( _setup_switch(hass, True) assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -210,18 +204,18 @@ async def test_unique_id( assert entry.unique_id == unique_id -def _setup_sensor(hass: HomeAssistant, temp: StateType) -> None: +def _setup_sensor(hass, temp): """Set up the test sensor.""" hass.states.async_set(ENT_SENSOR, temp) @pytest.fixture -async def setup_comp_2(hass: HomeAssistant) -> None: +async def setup_comp_2(hass): """Initialize components.""" hass.config.units = METRIC_SYSTEM assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -248,7 +242,7 @@ async def test_setup_defaults_to_unknown(hass: HomeAssistant) -> None: hass.config.units = METRIC_SYSTEM await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -272,7 +266,7 @@ async def test_setup_gets_current_temp_from_sensor(hass: HomeAssistant) -> None: await hass.async_block_till_done() await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -289,8 +283,7 @@ async def test_setup_gets_current_temp_from_sensor(hass: HomeAssistant) -> None: assert hass.states.get(ENTITY).attributes["current_temperature"] == 18 -@pytest.mark.usefixtures("setup_comp_2") -async def test_default_setup_params(hass: HomeAssistant) -> None: +async def test_default_setup_params(hass: HomeAssistant, setup_comp_2) -> None: """Test the setup with default parameters.""" state = hass.states.get(ENTITY) assert state.attributes.get("min_temp") == 7 @@ -299,16 +292,14 @@ async def test_default_setup_params(hass: HomeAssistant) -> None: assert state.attributes.get("target_temp_step") == 0.1 -@pytest.mark.usefixtures("setup_comp_2") -async def test_get_hvac_modes(hass: HomeAssistant) -> None: +async def test_get_hvac_modes(hass: HomeAssistant, setup_comp_2) -> None: """Test that the operation list returns the correct modes.""" state = hass.states.get(ENTITY) modes = state.attributes.get("hvac_modes") assert modes == [HVACMode.HEAT, HVACMode.OFF] -@pytest.mark.usefixtures("setup_comp_2") -async def test_set_target_temp(hass: HomeAssistant) -> None: +async def test_set_target_temp(hass: HomeAssistant, setup_comp_2) -> None: """Test the setting of the target temperature.""" await common.async_set_temperature(hass, 30) state = hass.states.get(ENTITY) @@ -331,8 +322,7 @@ async def test_set_target_temp(hass: HomeAssistant) -> None: (PRESET_ACTIVITY, 21), ], ) -@pytest.mark.usefixtures("setup_comp_2") -async def test_set_away_mode(hass: HomeAssistant, preset, temp) -> None: +async def test_set_away_mode(hass: HomeAssistant, setup_comp_2, preset, temp) -> None: """Test the setting away mode.""" await common.async_set_temperature(hass, 23) await common.async_set_preset_mode(hass, preset) @@ -352,9 +342,8 @@ async def test_set_away_mode(hass: HomeAssistant, preset, temp) -> None: (PRESET_ACTIVITY, 21), ], ) -@pytest.mark.usefixtures("setup_comp_2") async def test_set_away_mode_and_restore_prev_temp( - hass: HomeAssistant, preset, temp + hass: HomeAssistant, setup_comp_2, preset, temp ) -> None: """Test the setting and removing away mode. @@ -381,9 +370,8 @@ async def test_set_away_mode_and_restore_prev_temp( (PRESET_ACTIVITY, 21), ], ) -@pytest.mark.usefixtures("setup_comp_2") async def test_set_away_mode_twice_and_restore_prev_temp( - hass: HomeAssistant, preset, temp + hass: HomeAssistant, setup_comp_2, preset, temp ) -> None: """Test the setting away mode twice in a row. @@ -399,8 +387,7 @@ async def test_set_away_mode_twice_and_restore_prev_temp( assert state.attributes.get("temperature") == 23 -@pytest.mark.usefixtures("setup_comp_2") -async def test_set_preset_mode_invalid(hass: HomeAssistant) -> None: +async def test_set_preset_mode_invalid(hass: HomeAssistant, setup_comp_2) -> None: """Test an invalid mode raises an error and ignore case when checking modes.""" await common.async_set_temperature(hass, 23) await common.async_set_preset_mode(hass, "away") @@ -415,8 +402,7 @@ async def test_set_preset_mode_invalid(hass: HomeAssistant) -> None: assert state.attributes.get("preset_mode") == "none" -@pytest.mark.usefixtures("setup_comp_2") -async def test_sensor_bad_value(hass: HomeAssistant) -> None: +async def test_sensor_bad_value(hass: HomeAssistant, setup_comp_2) -> None: """Test sensor that have None as state.""" state = hass.states.get(ENTITY) temp = state.attributes.get("current_temperature") @@ -477,8 +463,7 @@ async def test_sensor_unavailable(hass: HomeAssistant) -> None: assert state.attributes.get("current_temperature") is None -@pytest.mark.usefixtures("setup_comp_2") -async def test_set_target_temp_heater_on(hass: HomeAssistant) -> None: +async def test_set_target_temp_heater_on(hass: HomeAssistant, setup_comp_2) -> None: """Test if target temperature turn heater on.""" calls = _setup_switch(hass, False) _setup_sensor(hass, 25) @@ -486,13 +471,12 @@ async def test_set_target_temp_heater_on(hass: HomeAssistant) -> None: await common.async_set_temperature(hass, 30) assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_2") -async def test_set_target_temp_heater_off(hass: HomeAssistant) -> None: +async def test_set_target_temp_heater_off(hass: HomeAssistant, setup_comp_2) -> None: """Test if target temperature turn heater off.""" calls = _setup_switch(hass, True) _setup_sensor(hass, 30) @@ -500,13 +484,14 @@ async def test_set_target_temp_heater_off(hass: HomeAssistant) -> None: await common.async_set_temperature(hass, 25) assert len(calls) == 2 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_2") -async def test_temp_change_heater_on_within_tolerance(hass: HomeAssistant) -> None: +async def test_temp_change_heater_on_within_tolerance( + hass: HomeAssistant, setup_comp_2 +) -> None: """Test if temperature change doesn't turn on within tolerance.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -515,8 +500,9 @@ async def test_temp_change_heater_on_within_tolerance(hass: HomeAssistant) -> No assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_2") -async def test_temp_change_heater_on_outside_tolerance(hass: HomeAssistant) -> None: +async def test_temp_change_heater_on_outside_tolerance( + hass: HomeAssistant, setup_comp_2 +) -> None: """Test if temperature change turn heater on outside cold tolerance.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -524,13 +510,14 @@ async def test_temp_change_heater_on_outside_tolerance(hass: HomeAssistant) -> N await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_2") -async def test_temp_change_heater_off_within_tolerance(hass: HomeAssistant) -> None: +async def test_temp_change_heater_off_within_tolerance( + hass: HomeAssistant, setup_comp_2 +) -> None: """Test if temperature change doesn't turn off within tolerance.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -539,8 +526,9 @@ async def test_temp_change_heater_off_within_tolerance(hass: HomeAssistant) -> N assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_2") -async def test_temp_change_heater_off_outside_tolerance(hass: HomeAssistant) -> None: +async def test_temp_change_heater_off_outside_tolerance( + hass: HomeAssistant, setup_comp_2 +) -> None: """Test if temperature change turn heater off outside hot tolerance.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -548,26 +536,26 @@ async def test_temp_change_heater_off_outside_tolerance(hass: HomeAssistant) -> await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_2") -async def test_running_when_hvac_mode_is_off(hass: HomeAssistant) -> None: +async def test_running_when_hvac_mode_is_off(hass: HomeAssistant, setup_comp_2) -> None: """Test that the switch turns off when enabled is set False.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) await common.async_set_hvac_mode(hass, HVACMode.OFF) assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_2") -async def test_no_state_change_when_hvac_mode_off(hass: HomeAssistant) -> None: +async def test_no_state_change_when_hvac_mode_off( + hass: HomeAssistant, setup_comp_2 +) -> None: """Test that the switch doesn't turn on when enabled is False.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -577,8 +565,7 @@ async def test_no_state_change_when_hvac_mode_off(hass: HomeAssistant) -> None: assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_2") -async def test_hvac_mode_heat(hass: HomeAssistant) -> None: +async def test_hvac_mode_heat(hass: HomeAssistant, setup_comp_2) -> None: """Test change mode from OFF to HEAT. Switch turns on when temp below setpoint and mode changes. @@ -591,18 +578,18 @@ async def test_hvac_mode_heat(hass: HomeAssistant) -> None: await common.async_set_hvac_mode(hass, HVACMode.HEAT) assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -def _setup_switch(hass: HomeAssistant, is_on: bool) -> list[ServiceCall]: +def _setup_switch(hass, is_on): """Set up the test switch.""" hass.states.async_set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF) calls = [] @callback - def log_call(call: ServiceCall) -> None: + def log_call(call): """Log service calls.""" calls.append(call) @@ -613,12 +600,12 @@ def _setup_switch(hass: HomeAssistant, is_on: bool) -> list[ServiceCall]: @pytest.fixture -async def setup_comp_3(hass: HomeAssistant) -> None: +async def setup_comp_3(hass): """Initialize components.""" hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -636,8 +623,7 @@ async def setup_comp_3(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_3") -async def test_set_target_temp_ac_off(hass: HomeAssistant) -> None: +async def test_set_target_temp_ac_off(hass: HomeAssistant, setup_comp_3) -> None: """Test if target temperature turn ac off.""" calls = _setup_switch(hass, True) _setup_sensor(hass, 25) @@ -645,13 +631,12 @@ async def test_set_target_temp_ac_off(hass: HomeAssistant) -> None: await common.async_set_temperature(hass, 30) assert len(calls) == 2 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_turn_away_mode_on_cooling(hass: HomeAssistant) -> None: +async def test_turn_away_mode_on_cooling(hass: HomeAssistant, setup_comp_3) -> None: """Test the setting away mode when cooling.""" _setup_switch(hass, True) _setup_sensor(hass, 25) @@ -662,8 +647,7 @@ async def test_turn_away_mode_on_cooling(hass: HomeAssistant) -> None: assert state.attributes.get("temperature") == 30 -@pytest.mark.usefixtures("setup_comp_3") -async def test_hvac_mode_cool(hass: HomeAssistant) -> None: +async def test_hvac_mode_cool(hass: HomeAssistant, setup_comp_3) -> None: """Test change mode from OFF to COOL. Switch turns on when temp below setpoint and mode changes. @@ -676,13 +660,12 @@ async def test_hvac_mode_cool(hass: HomeAssistant) -> None: await common.async_set_hvac_mode(hass, HVACMode.COOL) assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_set_target_temp_ac_on(hass: HomeAssistant) -> None: +async def test_set_target_temp_ac_on(hass: HomeAssistant, setup_comp_3) -> None: """Test if target temperature turn ac on.""" calls = _setup_switch(hass, False) _setup_sensor(hass, 30) @@ -690,13 +673,14 @@ async def test_set_target_temp_ac_on(hass: HomeAssistant) -> None: await common.async_set_temperature(hass, 25) assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_temp_change_ac_off_within_tolerance(hass: HomeAssistant) -> None: +async def test_temp_change_ac_off_within_tolerance( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test if temperature change doesn't turn ac off within tolerance.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -705,8 +689,9 @@ async def test_temp_change_ac_off_within_tolerance(hass: HomeAssistant) -> None: assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_3") -async def test_set_temp_change_ac_off_outside_tolerance(hass: HomeAssistant) -> None: +async def test_set_temp_change_ac_off_outside_tolerance( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test if temperature change turn ac off.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -714,13 +699,14 @@ async def test_set_temp_change_ac_off_outside_tolerance(hass: HomeAssistant) -> await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_temp_change_ac_on_within_tolerance(hass: HomeAssistant) -> None: +async def test_temp_change_ac_on_within_tolerance( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test if temperature change doesn't turn ac on within tolerance.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 25) @@ -729,8 +715,9 @@ async def test_temp_change_ac_on_within_tolerance(hass: HomeAssistant) -> None: assert len(calls) == 0 -@pytest.mark.usefixtures("setup_comp_3") -async def test_temp_change_ac_on_outside_tolerance(hass: HomeAssistant) -> None: +async def test_temp_change_ac_on_outside_tolerance( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test if temperature change turn ac on.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 25) @@ -738,26 +725,28 @@ async def test_temp_change_ac_on_outside_tolerance(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_running_when_operating_mode_is_off_2(hass: HomeAssistant) -> None: +async def test_running_when_operating_mode_is_off_2( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test that the switch turns off when enabled is set False.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) await common.async_set_hvac_mode(hass, HVACMode.OFF) assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_3") -async def test_no_state_change_when_operation_mode_off_2(hass: HomeAssistant) -> None: +async def test_no_state_change_when_operation_mode_off_2( + hass: HomeAssistant, setup_comp_3 +) -> None: """Test that the switch doesn't turn on when enabled is False.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -774,7 +763,7 @@ async def _setup_thermostat_with_min_cycle_duration( hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -869,7 +858,7 @@ async def test_heating_cooling_switch_toggles_when_outside_min_cycle_duration( # Then assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == expected_triggered_service_call assert call.data["entity_id"] == ENT_SWITCH @@ -922,12 +911,12 @@ async def test_hvac_mode_change_toggles_heating_cooling_switch_even_when_within_ @pytest.fixture -async def setup_comp_7(hass: HomeAssistant) -> None: +async def setup_comp_7(hass): """Initialize components.""" hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -948,8 +937,9 @@ async def setup_comp_7(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_7") -async def test_temp_change_ac_trigger_on_long_enough_3(hass: HomeAssistant) -> None: +async def test_temp_change_ac_trigger_on_long_enough_3( + hass: HomeAssistant, setup_comp_7 +) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, True) await hass.async_block_till_done() @@ -967,13 +957,14 @@ async def test_temp_change_ac_trigger_on_long_enough_3(hass: HomeAssistant) -> N await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_7") -async def test_temp_change_ac_trigger_off_long_enough_3(hass: HomeAssistant) -> None: +async def test_temp_change_ac_trigger_off_long_enough_3( + hass: HomeAssistant, setup_comp_7 +) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, False) await hass.async_block_till_done() @@ -991,18 +982,18 @@ async def test_temp_change_ac_trigger_off_long_enough_3(hass: HomeAssistant) -> await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @pytest.fixture -async def setup_comp_8(hass: HomeAssistant) -> None: +async def setup_comp_8(hass): """Initialize components.""" hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1021,8 +1012,9 @@ async def setup_comp_8(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_8") -async def test_temp_change_heater_trigger_on_long_enough_2(hass: HomeAssistant) -> None: +async def test_temp_change_heater_trigger_on_long_enough_2( + hass: HomeAssistant, setup_comp_8 +) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, True) await hass.async_block_till_done() @@ -1040,14 +1032,13 @@ async def test_temp_change_heater_trigger_on_long_enough_2(hass: HomeAssistant) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -@pytest.mark.usefixtures("setup_comp_8") async def test_temp_change_heater_trigger_off_long_enough_2( - hass: HomeAssistant, + hass: HomeAssistant, setup_comp_8 ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, False) @@ -1066,17 +1057,17 @@ async def test_temp_change_heater_trigger_off_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @pytest.fixture -async def setup_comp_9(hass: HomeAssistant) -> None: +async def setup_comp_9(hass): """Initialize components.""" assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1095,13 +1086,12 @@ async def setup_comp_9(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("setup_comp_9") -async def test_precision(hass: HomeAssistant) -> None: +async def test_precision(hass: HomeAssistant, setup_comp_9) -> None: """Test that setting precision to tenths works as intended.""" hass.config.units = US_CUSTOMARY_SYSTEM - await common.async_set_temperature(hass, 55.27) + await common.async_set_temperature(hass, 23.27) state = hass.states.get(ENTITY) - assert state.attributes.get("temperature") == 55.3 + assert state.attributes.get("temperature") == 23.3 # check that target_temp_step defaults to precision assert state.attributes.get("target_temp_step") == 0.1 @@ -1110,7 +1100,7 @@ async def test_custom_setup_params(hass: HomeAssistant) -> None: """Test the setup with custom parameters.""" result = await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1151,7 +1141,7 @@ async def test_restore_state(hass: HomeAssistant, hvac_mode) -> None: await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1189,7 +1179,7 @@ async def test_no_restore_state(hass: HomeAssistant) -> None: await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1220,7 +1210,7 @@ async def test_initial_hvac_off_force_heater_off(hass: HomeAssistant) -> None: await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1239,7 +1229,7 @@ async def test_initial_hvac_off_force_heater_off(hass: HomeAssistant) -> None: # heater must be switched off assert len(calls) == 1 call = calls[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @@ -1274,7 +1264,7 @@ async def test_restore_will_turn_off_(hass: HomeAssistant) -> None: await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1319,7 +1309,7 @@ async def test_restore_will_turn_off_when_loaded_second(hass: HomeAssistant) -> await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1347,7 +1337,7 @@ async def test_restore_will_turn_off_when_loaded_second(hass: HomeAssistant) -> assert len(calls_on) == 0 assert len(calls_off) == 1 call = calls_off[0] - assert call.domain == HOMEASSISTANT_DOMAIN + assert call.domain == HASS_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == "input_boolean.test" @@ -1376,10 +1366,10 @@ async def test_restore_state_uncoherence_case(hass: HomeAssistant) -> None: assert state.state == HVACMode.OFF -async def _setup_climate(hass: HomeAssistant) -> None: +async def _setup_climate(hass): assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1395,9 +1385,7 @@ async def _setup_climate(hass: HomeAssistant) -> None: ) -def _mock_restore_cache( - hass: HomeAssistant, temperature: int = 20, hvac_mode: HVACMode = HVACMode.OFF -) -> None: +def _mock_restore_cache(hass, temperature=20, hvac_mode=HVACMode.OFF): mock_restore_cache( hass, ( @@ -1415,7 +1403,7 @@ async def test_reload(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - CLIMATE_DOMAIN, + DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1443,50 +1431,3 @@ async def test_reload(hass: HomeAssistant) -> None: assert len(hass.states.async_all()) == 1 assert hass.states.get("climate.test") is None assert hass.states.get("climate.reload") - - -async def test_device_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test for source entity device.""" - - source_config_entry = MockConfigEntry() - source_config_entry.add_to_hass(hass) - source_device_entry = device_registry.async_get_or_create( - config_entry_id=source_config_entry.entry_id, - identifiers={("switch", "identifier_test")}, - connections={("mac", "30:31:32:33:34:35")}, - ) - source_entity = entity_registry.async_get_or_create( - "switch", - "test", - "source", - config_entry=source_config_entry, - device_id=source_device_entry.id, - ) - await hass.async_block_till_done() - assert entity_registry.async_get("switch.test_source") is not None - - helper_config_entry = MockConfigEntry( - data={}, - domain=GENERIC_THERMOSTAT_DOMAIN, - options={ - "name": "Test", - "heater": "switch.test_source", - "target_sensor": ENT_SENSOR, - "ac_mode": False, - "cold_tolerance": 0.3, - "hot_tolerance": 0.3, - }, - title="Test", - ) - helper_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(helper_config_entry.entry_id) - await hass.async_block_till_done() - - helper_entity = entity_registry.async_get("climate.test") - assert helper_entity is not None - assert helper_entity.device_id == source_entity.device_id diff --git a/tests/components/generic_thermostat/test_config_flow.py b/tests/components/generic_thermostat/test_config_flow.py index 561870ad3d4..81e06146a14 100644 --- a/tests/components/generic_thermostat/test_config_flow.py +++ b/tests/components/generic_thermostat/test_config_flow.py @@ -6,11 +6,12 @@ from syrupy.assertion import SnapshotAssertion from syrupy.filters import props from homeassistant.components.climate import PRESET_AWAY -from homeassistant.components.generic_thermostat.const import ( +from homeassistant.components.generic_thermostat.climate import ( CONF_AC_MODE, CONF_COLD_TOLERANCE, CONF_HEATER, CONF_HOT_TOLERANCE, + CONF_NAME, CONF_PRESETS, CONF_SENSOR, DOMAIN, @@ -20,7 +21,6 @@ from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_UNIT_OF_MEASUREMENT, - CONF_NAME, STATE_OFF, UnitOfTemperature, ) @@ -132,51 +132,3 @@ async def test_options(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None # Check config entry is reloaded with new options await hass.async_block_till_done() assert hass.states.get("climate.my_thermostat") == snapshot(name="without_away") - - -async def test_config_flow_preset_accepts_float( - hass: HomeAssistant, snapshot: SnapshotAssertion -) -> None: - """Test the config flow with preset is a float.""" - with patch( - "homeassistant.components.generic_thermostat.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result == snapshot(name="init", include=SNAPSHOT_FLOW_PROPS) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: "My thermostat", - CONF_HEATER: "switch.run", - CONF_SENSOR: "sensor.temperature", - CONF_AC_MODE: False, - CONF_COLD_TOLERANCE: 0.3, - CONF_HOT_TOLERANCE: 0.3, - }, - ) - assert result == snapshot(name="presets", include=SNAPSHOT_FLOW_PROPS) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PRESETS[PRESET_AWAY]: 10.4, - }, - ) - assert result == snapshot(name="create_entry", include=SNAPSHOT_FLOW_PROPS) - - await hass.async_block_till_done() - - assert len(mock_setup_entry.mock_calls) == 1 - assert result["options"] == { - "ac_mode": False, - "away_temp": 10.4, - "cold_tolerance": 0.3, - "heater": "switch.run", - "hot_tolerance": 0.3, - "name": "My thermostat", - "target_sensor": "sensor.temperature", - } diff --git a/tests/components/generic_thermostat/test_init.py b/tests/components/generic_thermostat/test_init.py deleted file mode 100644 index addae2f684e..00000000000 --- a/tests/components/generic_thermostat/test_init.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Test Generic Thermostat component setup process.""" - -from __future__ import annotations - -from homeassistant.components.generic_thermostat.const import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er - -from tests.common import MockConfigEntry - - -async def test_device_cleaning( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test cleaning of devices linked to the helper config entry.""" - - # Source entity device config entry - source_config_entry = MockConfigEntry() - source_config_entry.add_to_hass(hass) - - # Device entry of the source entity - source_device1_entry = device_registry.async_get_or_create( - config_entry_id=source_config_entry.entry_id, - identifiers={("switch", "identifier_test1")}, - connections={("mac", "30:31:32:33:34:01")}, - ) - - # Source entity registry - source_entity = entity_registry.async_get_or_create( - "switch", - "test", - "source", - config_entry=source_config_entry, - device_id=source_device1_entry.id, - ) - await hass.async_block_till_done() - assert entity_registry.async_get("switch.test_source") is not None - - # Configure the configuration entry for helper - helper_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - "name": "Test", - "heater": "switch.test_source", - "target_sensor": "sensor.temperature", - "ac_mode": False, - "cold_tolerance": 0.3, - "hot_tolerance": 0.3, - }, - title="Test", - ) - helper_config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(helper_config_entry.entry_id) - await hass.async_block_till_done() - - # Confirm the link between the source entity device and the helper entity - helper_entity = entity_registry.async_get("climate.test") - assert helper_entity is not None - assert helper_entity.device_id == source_entity.device_id - - # Device entry incorrectly linked to config entry - device_registry.async_get_or_create( - config_entry_id=helper_config_entry.entry_id, - identifiers={("sensor", "identifier_test2")}, - connections={("mac", "30:31:32:33:34:02")}, - ) - device_registry.async_get_or_create( - config_entry_id=helper_config_entry.entry_id, - identifiers={("sensor", "identifier_test3")}, - connections={("mac", "30:31:32:33:34:03")}, - ) - await hass.async_block_till_done() - - # Before reloading the config entry, 3 devices are expected to be linked - devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( - helper_config_entry.entry_id - ) - assert len(devices_before_reload) == 3 - - # Config entry reload - await hass.config_entries.async_reload(helper_config_entry.entry_id) - await hass.async_block_till_done() - - # Confirm the link between the source entity device and the helper entity - helper_entity = entity_registry.async_get("climate.test") - assert helper_entity is not None - assert helper_entity.device_id == source_entity.device_id - - # After reloading the config entry, only one linked device is expected - devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( - helper_config_entry.entry_id - ) - assert len(devices_after_reload) == 1 - - assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/geniushub/__init__.py b/tests/components/geniushub/__init__.py deleted file mode 100644 index ed06642d339..00000000000 --- a/tests/components/geniushub/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the geniushub integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/geniushub/conftest.py b/tests/components/geniushub/conftest.py deleted file mode 100644 index 304d7555a8c..00000000000 --- a/tests/components/geniushub/conftest.py +++ /dev/null @@ -1,99 +0,0 @@ -"""GeniusHub tests configuration.""" - -from collections.abc import Generator -from typing import Any -from unittest.mock import AsyncMock, MagicMock, patch - -from geniushubclient import GeniusDevice, GeniusZone -import pytest - -from homeassistant.components.geniushub.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME - -from tests.common import MockConfigEntry, load_json_array_fixture - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.geniushub.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_geniushub_client() -> Generator[AsyncMock]: - """Mock a GeniusHub client.""" - with patch( - "homeassistant.components.geniushub.config_flow.GeniusService", - autospec=True, - ) as mock_client: - client = mock_client.return_value - client.request.return_value = { - "data": { - "UID": "aa:bb:cc:dd:ee:ff", - } - } - yield client - - -@pytest.fixture(scope="package") -def zones() -> list[dict[str, Any]]: - """Return a list of zones.""" - return load_json_array_fixture("zones_cloud_test_data.json", DOMAIN) - - -@pytest.fixture(scope="package") -def devices() -> list[dict[str, Any]]: - """Return a list of devices.""" - return load_json_array_fixture("devices_cloud_test_data.json", DOMAIN) - - -@pytest.fixture -def mock_geniushub_cloud( - zones: list[dict[str, Any]], devices: list[dict[str, Any]] -) -> Generator[MagicMock]: - """Mock a GeniusHub.""" - with patch( - "homeassistant.components.geniushub.GeniusHub", - autospec=True, - ) as mock_client: - client = mock_client.return_value - genius_zones = [GeniusZone(z["id"], z, client) for z in zones] - client.zone_objs = genius_zones - client._zones = genius_zones - genius_devices = [GeniusDevice(d["id"], d, client) for d in devices] - client.device_objs = genius_devices - client._devices = genius_devices - client.api_version = 1 - yield client - - -@pytest.fixture -def mock_local_config_entry() -> MockConfigEntry: - """Mock a local config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="aa:bb:cc:dd:ee:ff", - data={ - CONF_HOST: "10.0.0.131", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - unique_id="aa:bb:cc:dd:ee:ff", - ) - - -@pytest.fixture -def mock_cloud_config_entry() -> MockConfigEntry: - """Mock a cloud config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="Genius hub", - data={ - CONF_TOKEN: "abcdef", - }, - entry_id="01J71MQF0EC62D620DGYNG2R8H", - ) diff --git a/tests/components/geniushub/fixtures/devices_cloud_test_data.json b/tests/components/geniushub/fixtures/devices_cloud_test_data.json deleted file mode 100644 index 92fd2c33811..00000000000 --- a/tests/components/geniushub/fixtures/devices_cloud_test_data.json +++ /dev/null @@ -1,151 +0,0 @@ -[ - { - "id": "4", - "type": "Smart Plug", - "assignedZones": [{ "name": "Bedroom Socket" }], - "state": { "outputOnOff": "True" } - }, - { - "id": "6", - "type": "Smart Plug", - "assignedZones": [{ "name": "Kitchen Socket" }], - "state": { "outputOnOff": "True" } - }, - { - "id": "11", - "type": "Radiator Valve", - "assignedZones": [{ "name": "Lounge" }], - "state": { "batteryLevel": 43, "setTemperature": 4 } - }, - { - "id": "16", - "type": "Room Sensor", - "assignedZones": [{ "name": "Guest room" }], - "state": { - "batteryLevel": 100, - "measuredTemperature": 21, - "luminance": 29, - "occupancyTrigger": 255 - } - }, - { - "id": "17", - "type": "Room Sensor", - "assignedZones": [{ "name": "Ensuite" }], - "state": { - "batteryLevel": 100, - "measuredTemperature": 21, - "luminance": 32, - "occupancyTrigger": 0 - } - }, - { - "id": "18", - "type": "Room Sensor", - "assignedZones": [{ "name": "Bedroom" }], - "state": { - "batteryLevel": 36, - "measuredTemperature": 21.5, - "luminance": 1, - "occupancyTrigger": 0 - } - }, - { - "id": "20", - "type": "Room Sensor", - "assignedZones": [{ "name": "Kitchen" }], - "state": { - "batteryLevel": 100, - "measuredTemperature": 21.5, - "luminance": 1, - "occupancyTrigger": 0 - } - }, - { - "id": "21", - "type": "Room Sensor", - "assignedZones": [{ "name": "Hall" }], - "state": { - "batteryLevel": 100, - "measuredTemperature": 21, - "luminance": 33, - "occupancyTrigger": 0 - } - }, - { - "id": "22", - "type": "Single Channel Receiver", - "assignedZones": [{ "name": "East Berlin" }], - "state": { "outputOnOff": "False" } - }, - { - "id": "50", - "type": "Room Sensor", - "assignedZones": [{ "name": "Study" }], - "state": { - "batteryLevel": 100, - "measuredTemperature": 22, - "luminance": 34, - "occupancyTrigger": 0 - } - }, - { - "id": "53", - "type": "Room Sensor", - "assignedZones": [{ "name": "Lounge" }], - "state": { - "batteryLevel": 28, - "measuredTemperature": 0, - "luminance": 0, - "occupancyTrigger": 0 - } - }, - { - "id": "56", - "type": "Radiator Valve", - "assignedZones": [{ "name": "Kitchen" }], - "state": { "batteryLevel": 55, "setTemperature": 4 } - }, - { - "id": "68", - "type": "Radiator Valve", - "assignedZones": [{ "name": "Hall" }], - "state": { "batteryLevel": 92, "setTemperature": 4 } - }, - { - "id": "78", - "type": "Radiator Valve", - "assignedZones": [{ "name": "Bedroom" }], - "state": { "batteryLevel": 42, "setTemperature": 4 } - }, - { - "id": "85", - "type": "Radiator Valve", - "assignedZones": [{ "name": "Study" }], - "state": { "batteryLevel": 61, "setTemperature": 4 } - }, - { - "id": "86", - "type": "Smart Plug", - "assignedZones": [{ "name": "Study Socket" }], - "state": { "outputOnOff": "False" } - }, - { - "id": "88", - "type": "Radiator Valve", - "assignedZones": [{ "name": "Ensuite" }], - "state": { "batteryLevel": 49, "setTemperature": 4 } - }, - { - "id": "89", - "type": "Radiator Valve", - "assignedZones": [{ "name": "Kitchen" }], - "state": { "batteryLevel": 48, "setTemperature": 4 } - }, - { - "id": "90", - "type": "Radiator Valve", - "assignedZones": [{ "name": "Guest room" }], - "state": { "batteryLevel": 92, "setTemperature": 4 } - } -] diff --git a/tests/components/geniushub/fixtures/zones_cloud_test_data.json b/tests/components/geniushub/fixtures/zones_cloud_test_data.json deleted file mode 100644 index 00d3109cf6e..00000000000 --- a/tests/components/geniushub/fixtures/zones_cloud_test_data.json +++ /dev/null @@ -1,1069 +0,0 @@ -[ - { - "id": 0, - "name": "West Berlin", - "output": 0, - "type": "manager", - "mode": "off", - "schedule": { "timer": {}, "footprint": {} } - }, - { - "id": 1, - "name": "Lounge", - "output": 0, - "type": "radiator", - "mode": "off", - "temperature": 20, - "setpoint": 4, - "override": { "duration": 0, "setpoint": 20 }, - "schedule": { - "timer": { - "weekly": { - "sunday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 68400, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 68400, "setpoint": 20 }, - { "end": 81000, "start": 75600, "setpoint": 18 } - ] - }, - "monday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 68400, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 68400, "setpoint": 20 }, - { "end": 81000, "start": 75600, "setpoint": 18 } - ] - }, - "tuesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 68400, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 68400, "setpoint": 20 }, - { "end": 81000, "start": 75600, "setpoint": 18 } - ] - }, - "wednesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 68400, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 68400, "setpoint": 20 }, - { "end": 81000, "start": 75600, "setpoint": 18 } - ] - }, - "thursday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 68400, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 68400, "setpoint": 20 }, - { "end": 81000, "start": 75600, "setpoint": 18 } - ] - }, - "friday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 68400, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 68400, "setpoint": 20 }, - { "end": 81000, "start": 75600, "setpoint": 18 } - ] - }, - "saturday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 68400, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 68400, "setpoint": 20 }, - { "end": 81000, "start": 75600, "setpoint": 18 } - ] - } - } - }, - "footprint": { - "weekly": { - "sunday": { - "defaultSetpoint": 17, - "heatingPeriods": [ - { "end": 61200, "start": 0, "setpoint": 4 }, - { "end": 86400, "start": 80100, "setpoint": 4 } - ] - }, - "monday": { - "defaultSetpoint": 17, - "heatingPeriods": [ - { "end": 61200, "start": 0, "setpoint": 4 }, - { "end": 86400, "start": 80100, "setpoint": 4 } - ] - }, - "tuesday": { - "defaultSetpoint": 17, - "heatingPeriods": [ - { "end": 61200, "start": 0, "setpoint": 4 }, - { "end": 86400, "start": 80100, "setpoint": 4 } - ] - }, - "wednesday": { - "defaultSetpoint": 17, - "heatingPeriods": [ - { "end": 61200, "start": 0, "setpoint": 4 }, - { "end": 86400, "start": 80100, "setpoint": 4 } - ] - }, - "thursday": { - "defaultSetpoint": 17, - "heatingPeriods": [ - { "end": 61200, "start": 0, "setpoint": 4 }, - { "end": 86400, "start": 80100, "setpoint": 4 } - ] - }, - "friday": { - "defaultSetpoint": 17, - "heatingPeriods": [ - { "end": 61200, "start": 0, "setpoint": 4 }, - { "end": 86400, "start": 80100, "setpoint": 4 } - ] - }, - "saturday": { - "defaultSetpoint": 17, - "heatingPeriods": [ - { "end": 61200, "start": 0, "setpoint": 4 }, - { "end": 86400, "start": 80100, "setpoint": 4 } - ] - } - } - } - } - }, - { - "id": 2, - "name": "Hall", - "output": 0, - "type": "radiator", - "mode": "off", - "temperature": 21, - "setpoint": 4, - "occupied": "False", - "override": { "duration": 0, "setpoint": 20 }, - "schedule": { - "timer": { - "weekly": { - "sunday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "monday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "tuesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "wednesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "thursday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "friday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "saturday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 73800, "start": 68400, "setpoint": 18.5 } - ] - } - } - }, - "footprint": { - "weekly": { - "sunday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 37800, "start": 32400, "setpoint": 20 }, - { "end": 75600, "start": 56700, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "monday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 43500, "start": 31800, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "tuesday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 34200, "start": 27300, "setpoint": 20 }, - { "end": 75600, "start": 60900, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "wednesday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 48300, "start": 28800, "setpoint": 20 }, - { "end": 75600, "start": 75300, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "thursday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 42000, "start": 28500, "setpoint": 20 }, - { "end": 70800, "start": 53700, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "friday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 64500, "start": 28500, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "saturday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 63900, "start": 53100, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - } - } - } - } - }, - { - "id": 3, - "name": "Kitchen", - "output": 0, - "type": "radiator", - "mode": "off", - "temperature": 21.5, - "setpoint": 4, - "occupied": "False", - "override": { "duration": 0, "setpoint": 20 }, - "schedule": { - "timer": { - "weekly": { - "sunday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "monday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "tuesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "wednesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "thursday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "friday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 70200, "start": 61200, "setpoint": 18.5 } - ] - }, - "saturday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 61200, "start": 29700, "setpoint": 6 }, - { "end": 73800, "start": 68400, "setpoint": 18.5 } - ] - } - } - }, - "footprint": { - "weekly": { - "sunday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 38100, "start": 29100, "setpoint": 20 }, - { "end": 75600, "start": 56700, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "monday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 51600, "start": 32400, "setpoint": 20 }, - { "end": 74400, "start": 60600, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "tuesday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 33300, "start": 27300, "setpoint": 20 }, - { "end": 75600, "start": 58800, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "wednesday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 48600, "start": 28800, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "thursday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 71400, "start": 56400, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "friday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 74400, "start": 40800, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "saturday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 63300, "start": 29700, "setpoint": 20 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - } - } - } - } - }, - { - "id": 5, - "name": "Ensuite", - "output": 0, - "type": "radiator", - "mode": "off", - "temperature": 21, - "setpoint": 4, - "occupied": "False", - "override": { "duration": 0, "setpoint": 28 }, - "schedule": { - "timer": { - "weekly": { - "sunday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 73800, "setpoint": 16 } - ] - }, - "monday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 73800, "setpoint": 16 } - ] - }, - "tuesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 73800, "setpoint": 16 } - ] - }, - "wednesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 73800, "setpoint": 16 } - ] - }, - "thursday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 73800, "setpoint": 16 } - ] - }, - "friday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 73800, "setpoint": 16 } - ] - }, - "saturday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 73800, "setpoint": 16 } - ] - } - } - }, - "footprint": { - "weekly": { - "sunday": { - "defaultSetpoint": 12, - "heatingPeriods": [ - { "end": 28800, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 81000, "setpoint": 16 } - ] - }, - "monday": { - "defaultSetpoint": 12, - "heatingPeriods": [ - { "end": 28800, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 81000, "setpoint": 16 } - ] - }, - "tuesday": { - "defaultSetpoint": 12, - "heatingPeriods": [ - { "end": 28800, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 81000, "setpoint": 16 } - ] - }, - "wednesday": { - "defaultSetpoint": 12, - "heatingPeriods": [ - { "end": 28800, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 81000, "setpoint": 16 } - ] - }, - "thursday": { - "defaultSetpoint": 12, - "heatingPeriods": [ - { "end": 28800, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 81000, "setpoint": 16 } - ] - }, - "friday": { - "defaultSetpoint": 12, - "heatingPeriods": [ - { "end": 28800, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 81000, "setpoint": 16 } - ] - }, - "saturday": { - "defaultSetpoint": 12, - "heatingPeriods": [ - { "end": 28800, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 81000, "setpoint": 16 } - ] - } - } - } - } - }, - { - "id": 7, - "name": "Guest room", - "output": 0, - "type": "radiator", - "mode": "off", - "temperature": 21, - "setpoint": 4, - "occupied": "True", - "override": { "duration": 0, "setpoint": 20 }, - "schedule": { - "timer": { - "weekly": { - "sunday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "monday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "tuesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 75600, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "wednesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "thursday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "friday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "saturday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - } - } - }, - "footprint": { - "weekly": { - "sunday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "monday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "tuesday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "wednesday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "thursday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "friday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "saturday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - } - } - } - } - }, - { - "id": 27, - "name": "Bedroom Socket", - "output": 1, - "type": "on / off", - "mode": "timer", - "setpoint": "True", - "override": { "duration": 0, "setpoint": "True" }, - "schedule": { - "timer": { - "weekly": { - "sunday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "monday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "tuesday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "wednesday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "thursday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "friday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "saturday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - } - } - }, - "footprint": {} - } - }, - { - "id": 28, - "name": "Kitchen Socket", - "output": 1, - "type": "on / off", - "mode": "timer", - "setpoint": "True", - "override": { "duration": 0, "setpoint": "True" }, - "schedule": { - "timer": { - "weekly": { - "sunday": { - "defaultSetpoint": "False", - "heatingPeriods": [ - { "end": 82800, "start": 27000, "setpoint": "True" } - ] - }, - "monday": { - "defaultSetpoint": "False", - "heatingPeriods": [ - { "end": 82800, "start": 27000, "setpoint": "True" } - ] - }, - "tuesday": { - "defaultSetpoint": "False", - "heatingPeriods": [ - { "end": 82800, "start": 27000, "setpoint": "True" } - ] - }, - "wednesday": { - "defaultSetpoint": "False", - "heatingPeriods": [ - { "end": 82800, "start": 27000, "setpoint": "True" } - ] - }, - "thursday": { - "defaultSetpoint": "False", - "heatingPeriods": [ - { "end": 82800, "start": 27000, "setpoint": "True" } - ] - }, - "friday": { - "defaultSetpoint": "False", - "heatingPeriods": [ - { "end": 82800, "start": 27000, "setpoint": "True" } - ] - }, - "saturday": { - "defaultSetpoint": "False", - "heatingPeriods": [ - { "end": 82800, "start": 27000, "setpoint": "True" } - ] - } - } - }, - "footprint": {} - } - }, - { - "id": 29, - "name": "Bedroom", - "output": 0, - "type": "radiator", - "mode": "off", - "temperature": 21.5, - "setpoint": 4, - "override": { "duration": 0, "setpoint": 23.5 }, - "schedule": { - "timer": { - "weekly": { - "sunday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 75600, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "monday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 75600, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "tuesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 75600, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "wednesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 73800, "setpoint": 18.5 } - ] - }, - "thursday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 75600, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "friday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 75600, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 75600, "setpoint": 19.5 } - ] - }, - "saturday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 75600, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - } - } - }, - "footprint": { - "weekly": { - "sunday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "monday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "tuesday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "wednesday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "thursday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "friday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "saturday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - } - } - } - } - }, - { - "id": 30, - "name": "Study", - "output": 0, - "type": "radiator", - "mode": "off", - "temperature": 22, - "setpoint": 4, - "occupied": "False", - "override": { "duration": 0, "setpoint": 28 }, - "schedule": { - "timer": { - "weekly": { - "sunday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "monday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "tuesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 75600, "start": 29700, "setpoint": 6 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "wednesday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "thursday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "friday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - }, - "saturday": { - "defaultSetpoint": 14.5, - "heatingPeriods": [ - { "end": 29700, "start": 27000, "setpoint": 18 }, - { "end": 73800, "start": 29700, "setpoint": 6 }, - { "end": 75600, "start": 73800, "setpoint": 14 }, - { "end": 81000, "start": 75600, "setpoint": 18.5 } - ] - } - } - }, - "footprint": { - "weekly": { - "sunday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "monday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "tuesday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "wednesday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "thursday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "friday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - }, - "saturday": { - "defaultSetpoint": 14, - "heatingPeriods": [ - { "end": 23400, "start": 0, "setpoint": 16 }, - { "end": 86400, "start": 75600, "setpoint": 16 } - ] - } - } - } - } - }, - { - "id": 32, - "name": "Study Socket", - "output": 0, - "type": "on / off", - "mode": "off", - "setpoint": "False", - "override": { "duration": 0, "setpoint": "True" }, - "schedule": { - "timer": { - "weekly": { - "sunday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "monday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "tuesday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "wednesday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "thursday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "friday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - }, - "saturday": { - "defaultSetpoint": "False", - "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] - } - } - }, - "footprint": {} - } - } -] diff --git a/tests/components/geniushub/snapshots/test_binary_sensor.ambr b/tests/components/geniushub/snapshots/test_binary_sensor.ambr deleted file mode 100644 index fcc256b5232..00000000000 --- a/tests/components/geniushub/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,50 +0,0 @@ -# serializer version: 1 -# name: test_cloud_all_sensors[binary_sensor.single_channel_receiver_22-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.single_channel_receiver_22', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Single Channel Receiver 22', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_22', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[binary_sensor.single_channel_receiver_22-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'East Berlin', - 'friendly_name': 'Single Channel Receiver 22', - 'state': dict({ - }), - }), - 'context': , - 'entity_id': 'binary_sensor.single_channel_receiver_22', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/geniushub/snapshots/test_climate.ambr b/tests/components/geniushub/snapshots/test_climate.ambr deleted file mode 100644 index eb372de784e..00000000000 --- a/tests/components/geniushub/snapshots/test_climate.ambr +++ /dev/null @@ -1,569 +0,0 @@ -# serializer version: 1 -# name: test_cloud_all_sensors[climate.bedroom-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_modes': list([ - 'boost', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.bedroom', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:radiator', - 'original_name': 'Bedroom', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_29', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[climate.bedroom-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'Bedroom', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:radiator', - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_mode': None, - 'preset_modes': list([ - 'boost', - ]), - 'status': dict({ - 'mode': 'off', - 'override': dict({ - 'duration': 0, - 'setpoint': 23.5, - }), - 'temperature': 21.5, - 'type': 'radiator', - }), - 'supported_features': , - 'temperature': 4, - }), - 'context': , - 'entity_id': 'climate.bedroom', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_cloud_all_sensors[climate.ensuite-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_modes': list([ - 'activity', - 'boost', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.ensuite', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:radiator', - 'original_name': 'Ensuite', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_5', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[climate.ensuite-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21, - 'friendly_name': 'Ensuite', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:radiator', - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_mode': None, - 'preset_modes': list([ - 'activity', - 'boost', - ]), - 'status': dict({ - 'mode': 'off', - 'occupied': 'False', - 'override': dict({ - 'duration': 0, - 'setpoint': 28, - }), - 'temperature': 21, - 'type': 'radiator', - }), - 'supported_features': , - 'temperature': 4, - }), - 'context': , - 'entity_id': 'climate.ensuite', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_cloud_all_sensors[climate.guest_room-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_modes': list([ - 'activity', - 'boost', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.guest_room', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:radiator', - 'original_name': 'Guest room', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_7', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[climate.guest_room-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21, - 'friendly_name': 'Guest room', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:radiator', - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_mode': None, - 'preset_modes': list([ - 'activity', - 'boost', - ]), - 'status': dict({ - 'mode': 'off', - 'occupied': 'True', - 'override': dict({ - 'duration': 0, - 'setpoint': 20, - }), - 'temperature': 21, - 'type': 'radiator', - }), - 'supported_features': , - 'temperature': 4, - }), - 'context': , - 'entity_id': 'climate.guest_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_cloud_all_sensors[climate.hall-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_modes': list([ - 'activity', - 'boost', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.hall', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:radiator', - 'original_name': 'Hall', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[climate.hall-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21, - 'friendly_name': 'Hall', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:radiator', - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_mode': None, - 'preset_modes': list([ - 'activity', - 'boost', - ]), - 'status': dict({ - 'mode': 'off', - 'occupied': 'False', - 'override': dict({ - 'duration': 0, - 'setpoint': 20, - }), - 'temperature': 21, - 'type': 'radiator', - }), - 'supported_features': , - 'temperature': 4, - }), - 'context': , - 'entity_id': 'climate.hall', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_cloud_all_sensors[climate.kitchen-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_modes': list([ - 'activity', - 'boost', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.kitchen', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:radiator', - 'original_name': 'Kitchen', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_3', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[climate.kitchen-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'Kitchen', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:radiator', - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_mode': None, - 'preset_modes': list([ - 'activity', - 'boost', - ]), - 'status': dict({ - 'mode': 'off', - 'occupied': 'False', - 'override': dict({ - 'duration': 0, - 'setpoint': 20, - }), - 'temperature': 21.5, - 'type': 'radiator', - }), - 'supported_features': , - 'temperature': 4, - }), - 'context': , - 'entity_id': 'climate.kitchen', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_cloud_all_sensors[climate.lounge-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_modes': list([ - 'boost', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.lounge', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:radiator', - 'original_name': 'Lounge', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[climate.lounge-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20, - 'friendly_name': 'Lounge', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:radiator', - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_mode': None, - 'preset_modes': list([ - 'boost', - ]), - 'status': dict({ - 'mode': 'off', - 'override': dict({ - 'duration': 0, - 'setpoint': 20, - }), - 'temperature': 20, - 'type': 'radiator', - }), - 'supported_features': , - 'temperature': 4, - }), - 'context': , - 'entity_id': 'climate.lounge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_cloud_all_sensors[climate.study-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_modes': list([ - 'activity', - 'boost', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.study', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:radiator', - 'original_name': 'Study', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_30', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[climate.study-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 22, - 'friendly_name': 'Study', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:radiator', - 'max_temp': 28.0, - 'min_temp': 4.0, - 'preset_mode': None, - 'preset_modes': list([ - 'activity', - 'boost', - ]), - 'status': dict({ - 'mode': 'off', - 'occupied': 'False', - 'override': dict({ - 'duration': 0, - 'setpoint': 28, - }), - 'temperature': 22, - 'type': 'radiator', - }), - 'supported_features': , - 'temperature': 4, - }), - 'context': , - 'entity_id': 'climate.study', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/geniushub/snapshots/test_sensor.ambr b/tests/components/geniushub/snapshots/test_sensor.ambr deleted file mode 100644 index 874f24cff95..00000000000 --- a/tests/components/geniushub/snapshots/test_sensor.ambr +++ /dev/null @@ -1,954 +0,0 @@ -# serializer version: 1 -# name: test_cloud_all_sensors[sensor.geniushub_errors-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.geniushub_errors', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'GeniusHub Errors', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_Errors', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[sensor.geniushub_errors-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'error_list': list([ - ]), - 'friendly_name': 'GeniusHub Errors', - }), - 'context': , - 'entity_id': 'sensor.geniushub_errors', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_cloud_all_sensors[sensor.geniushub_information-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.geniushub_information', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'GeniusHub Information', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_Information', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[sensor.geniushub_information-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'GeniusHub Information', - 'information_list': list([ - ]), - }), - 'context': , - 'entity_id': 'sensor.geniushub_information', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_cloud_all_sensors[sensor.geniushub_warnings-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.geniushub_warnings', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'GeniusHub Warnings', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_Warnings', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[sensor.geniushub_warnings-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'GeniusHub Warnings', - 'warning_list': list([ - ]), - }), - 'context': , - 'entity_id': 'sensor.geniushub_warnings', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_11-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.radiator_valve_11', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-40', - 'original_name': 'Radiator Valve 11', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_11', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_11-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Lounge', - 'device_class': 'battery', - 'friendly_name': 'Radiator Valve 11', - 'icon': 'mdi:battery-40', - 'state': dict({ - 'set_temperature': 4, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.radiator_valve_11', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '43', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_56-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.radiator_valve_56', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-50', - 'original_name': 'Radiator Valve 56', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_56', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_56-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Kitchen', - 'device_class': 'battery', - 'friendly_name': 'Radiator Valve 56', - 'icon': 'mdi:battery-50', - 'state': dict({ - 'set_temperature': 4, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.radiator_valve_56', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '55', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_68-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.radiator_valve_68', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-90', - 'original_name': 'Radiator Valve 68', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_68', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_68-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Hall', - 'device_class': 'battery', - 'friendly_name': 'Radiator Valve 68', - 'icon': 'mdi:battery-90', - 'state': dict({ - 'set_temperature': 4, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.radiator_valve_68', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '92', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_78-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.radiator_valve_78', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-40', - 'original_name': 'Radiator Valve 78', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_78', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_78-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Bedroom', - 'device_class': 'battery', - 'friendly_name': 'Radiator Valve 78', - 'icon': 'mdi:battery-40', - 'state': dict({ - 'set_temperature': 4, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.radiator_valve_78', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '42', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_85-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.radiator_valve_85', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-60', - 'original_name': 'Radiator Valve 85', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_85', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_85-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Study', - 'device_class': 'battery', - 'friendly_name': 'Radiator Valve 85', - 'icon': 'mdi:battery-60', - 'state': dict({ - 'set_temperature': 4, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.radiator_valve_85', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '61', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_88-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.radiator_valve_88', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-50', - 'original_name': 'Radiator Valve 88', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_88', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_88-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Ensuite', - 'device_class': 'battery', - 'friendly_name': 'Radiator Valve 88', - 'icon': 'mdi:battery-50', - 'state': dict({ - 'set_temperature': 4, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.radiator_valve_88', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '49', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_89-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.radiator_valve_89', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-50', - 'original_name': 'Radiator Valve 89', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_89', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_89-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Kitchen', - 'device_class': 'battery', - 'friendly_name': 'Radiator Valve 89', - 'icon': 'mdi:battery-50', - 'state': dict({ - 'set_temperature': 4, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.radiator_valve_89', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '48', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_90-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.radiator_valve_90', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-90', - 'original_name': 'Radiator Valve 90', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_90', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.radiator_valve_90-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Guest room', - 'device_class': 'battery', - 'friendly_name': 'Radiator Valve 90', - 'icon': 'mdi:battery-90', - 'state': dict({ - 'set_temperature': 4, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.radiator_valve_90', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '92', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_16-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.room_sensor_16', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery', - 'original_name': 'Room Sensor 16', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_16', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_16-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Guest room', - 'device_class': 'battery', - 'friendly_name': 'Room Sensor 16', - 'icon': 'mdi:battery', - 'state': dict({ - 'luminance': 29, - 'measured_temperature': 21, - 'occupancy_trigger': 255, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.room_sensor_16', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_17-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.room_sensor_17', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery', - 'original_name': 'Room Sensor 17', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_17', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_17-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Ensuite', - 'device_class': 'battery', - 'friendly_name': 'Room Sensor 17', - 'icon': 'mdi:battery', - 'state': dict({ - 'luminance': 32, - 'measured_temperature': 21, - 'occupancy_trigger': 0, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.room_sensor_17', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_18-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.room_sensor_18', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-alert', - 'original_name': 'Room Sensor 18', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_18', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_18-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Bedroom', - 'device_class': 'battery', - 'friendly_name': 'Room Sensor 18', - 'icon': 'mdi:battery-alert', - 'state': dict({ - 'luminance': 1, - 'measured_temperature': 21.5, - 'occupancy_trigger': 0, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.room_sensor_18', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '36', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_20-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.room_sensor_20', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery', - 'original_name': 'Room Sensor 20', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_20', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_20-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Kitchen', - 'device_class': 'battery', - 'friendly_name': 'Room Sensor 20', - 'icon': 'mdi:battery', - 'state': dict({ - 'luminance': 1, - 'measured_temperature': 21.5, - 'occupancy_trigger': 0, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.room_sensor_20', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_21-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.room_sensor_21', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery', - 'original_name': 'Room Sensor 21', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_21', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_21-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Hall', - 'device_class': 'battery', - 'friendly_name': 'Room Sensor 21', - 'icon': 'mdi:battery', - 'state': dict({ - 'luminance': 33, - 'measured_temperature': 21, - 'occupancy_trigger': 0, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.room_sensor_21', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_50-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.room_sensor_50', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery', - 'original_name': 'Room Sensor 50', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_50', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_50-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Study', - 'device_class': 'battery', - 'friendly_name': 'Room Sensor 50', - 'icon': 'mdi:battery', - 'state': dict({ - 'luminance': 34, - 'measured_temperature': 22, - 'occupancy_trigger': 0, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.room_sensor_50', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_53-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.room_sensor_53', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-alert', - 'original_name': 'Room Sensor 53', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_53', - 'unit_of_measurement': '%', - }) -# --- -# name: test_cloud_all_sensors[sensor.room_sensor_53-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assigned_zone': 'Lounge', - 'device_class': 'battery', - 'friendly_name': 'Room Sensor 53', - 'icon': 'mdi:battery-alert', - 'state': dict({ - 'luminance': 0, - 'measured_temperature': 0, - 'occupancy_trigger': 0, - }), - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.room_sensor_53', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '28', - }) -# --- diff --git a/tests/components/geniushub/snapshots/test_switch.ambr b/tests/components/geniushub/snapshots/test_switch.ambr deleted file mode 100644 index 6c3c95af477..00000000000 --- a/tests/components/geniushub/snapshots/test_switch.ambr +++ /dev/null @@ -1,166 +0,0 @@ -# serializer version: 1 -# name: test_cloud_all_sensors[switch.bedroom_socket-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.bedroom_socket', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Bedroom Socket', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_27', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[switch.bedroom_socket-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Bedroom Socket', - 'status': dict({ - 'mode': 'timer', - 'override': dict({ - 'duration': 0, - 'setpoint': 'True', - }), - 'type': 'on / off', - }), - }), - 'context': , - 'entity_id': 'switch.bedroom_socket', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_cloud_all_sensors[switch.kitchen_socket-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.kitchen_socket', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Kitchen Socket', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_28', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[switch.kitchen_socket-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Kitchen Socket', - 'status': dict({ - 'mode': 'timer', - 'override': dict({ - 'duration': 0, - 'setpoint': 'True', - }), - 'type': 'on / off', - }), - }), - 'context': , - 'entity_id': 'switch.kitchen_socket', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_cloud_all_sensors[switch.study_socket-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.study_socket', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Study Socket', - 'platform': 'geniushub', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_32', - 'unit_of_measurement': None, - }) -# --- -# name: test_cloud_all_sensors[switch.study_socket-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Study Socket', - 'status': dict({ - 'mode': 'off', - 'override': dict({ - 'duration': 0, - 'setpoint': 'True', - }), - 'type': 'on / off', - }), - }), - 'context': , - 'entity_id': 'switch.study_socket', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/geniushub/test_binary_sensor.py b/tests/components/geniushub/test_binary_sensor.py deleted file mode 100644 index 682929eb696..00000000000 --- a/tests/components/geniushub/test_binary_sensor.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Tests for the Geniushub binary sensor platform.""" - -from unittest.mock import patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("mock_geniushub_cloud") -async def test_cloud_all_sensors( - hass: HomeAssistant, - mock_cloud_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the creation of the Genius Hub binary sensors.""" - with patch( - "homeassistant.components.geniushub.PLATFORMS", [Platform.BINARY_SENSOR] - ): - await setup_integration(hass, mock_cloud_config_entry) - - await snapshot_platform( - hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id - ) diff --git a/tests/components/geniushub/test_climate.py b/tests/components/geniushub/test_climate.py deleted file mode 100644 index d14e57b9552..00000000000 --- a/tests/components/geniushub/test_climate.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Tests for the Geniushub climate platform.""" - -from unittest.mock import patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("mock_geniushub_cloud") -async def test_cloud_all_sensors( - hass: HomeAssistant, - mock_cloud_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the creation of the Genius Hub climate entities.""" - with patch("homeassistant.components.geniushub.PLATFORMS", [Platform.CLIMATE]): - await setup_integration(hass, mock_cloud_config_entry) - - await snapshot_platform( - hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id - ) diff --git a/tests/components/geniushub/test_config_flow.py b/tests/components/geniushub/test_config_flow.py deleted file mode 100644 index 7d1d33a2245..00000000000 --- a/tests/components/geniushub/test_config_flow.py +++ /dev/null @@ -1,304 +0,0 @@ -"""Test the Geniushub config flow.""" - -from http import HTTPStatus -import socket -from unittest.mock import AsyncMock - -from aiohttp import ClientConnectionError, ClientResponseError -import pytest - -from homeassistant.components.geniushub import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_full_local_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_geniushub_client: AsyncMock, -) -> None: - """Test full local flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "local_api"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "local_api" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "10.0.0.130" - assert result["data"] == { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - } - assert result["result"].unique_id == "aa:bb:cc:dd:ee:ff" - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (socket.gaierror, "invalid_host"), - ( - ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), - "invalid_auth", - ), - ( - ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), - "invalid_host", - ), - (TimeoutError, "cannot_connect"), - (ClientConnectionError, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_local_flow_exceptions( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_geniushub_client: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test local flow exceptions.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "local_api"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "local_api" - - mock_geniushub_client.request.side_effect = exception - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} - - mock_geniushub_client.request.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_local_duplicate_data( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - mock_local_config_entry: MockConfigEntry, -) -> None: - """Test local flow aborts on duplicate data.""" - mock_local_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "local_api"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "local_api" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_local_duplicate_mac( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - mock_local_config_entry: MockConfigEntry, -) -> None: - """Test local flow aborts on duplicate MAC.""" - mock_local_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "local_api"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "local_api" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "10.0.0.131", - CONF_USERNAME: "test-username1", - CONF_PASSWORD: "test-password", - }, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_full_cloud_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_geniushub_client: AsyncMock, -) -> None: - """Test full cloud flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "cloud_api"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "cloud_api" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_TOKEN: "abcdef", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Genius hub" - assert result["data"] == { - CONF_TOKEN: "abcdef", - } - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (socket.gaierror, "invalid_host"), - ( - ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), - "invalid_auth", - ), - ( - ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), - "invalid_host", - ), - (TimeoutError, "cannot_connect"), - (ClientConnectionError, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_cloud_flow_exceptions( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_geniushub_client: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test cloud flow exceptions.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "cloud_api"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "cloud_api" - - mock_geniushub_client.request.side_effect = exception - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_TOKEN: "abcdef", - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} - - mock_geniushub_client.request.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_TOKEN: "abcdef", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_cloud_duplicate( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - mock_cloud_config_entry: MockConfigEntry, -) -> None: - """Test cloud flow aborts on duplicate data.""" - mock_cloud_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "cloud_api"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "cloud_api" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_TOKEN: "abcdef", - }, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/geniushub/test_init.py b/tests/components/geniushub/test_init.py deleted file mode 100644 index ebdc082c4b8..00000000000 --- a/tests/components/geniushub/test_init.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Tests for the Genius Hub component.""" - -from unittest.mock import AsyncMock - -from homeassistant.components.geniushub import DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.const import CONF_MAC, CONF_TOKEN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry - - -async def test_cloud_unique_id_migration( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_geniushub_cloud: AsyncMock, -) -> None: - """Test that the cloud unique ID is migrated to the entry_id.""" - entry = MockConfigEntry( - domain=DOMAIN, - title="Genius hub", - data={ - CONF_TOKEN: "abcdef", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - entry_id="1234", - ) - entry.add_to_hass(hass) - entity_registry.async_get_or_create( - SENSOR_DOMAIN, DOMAIN, "aa:bb:cc:dd:ee:ff_device_78", config_entry=entry - ) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert hass.states.get("sensor.geniushub_aa_bb_cc_dd_ee_ff_device_78") - entity_entry = entity_registry.async_get( - "sensor.geniushub_aa_bb_cc_dd_ee_ff_device_78" - ) - assert entity_entry.unique_id == "1234_device_78" diff --git a/tests/components/geniushub/test_sensor.py b/tests/components/geniushub/test_sensor.py deleted file mode 100644 index a75329ca7fc..00000000000 --- a/tests/components/geniushub/test_sensor.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Tests for the Geniushub sensor platform.""" - -from unittest.mock import patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("mock_geniushub_cloud") -async def test_cloud_all_sensors( - hass: HomeAssistant, - mock_cloud_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the creation of the Genius Hub sensors.""" - with patch("homeassistant.components.geniushub.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_cloud_config_entry) - - await snapshot_platform( - hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id - ) diff --git a/tests/components/geniushub/test_switch.py b/tests/components/geniushub/test_switch.py deleted file mode 100644 index 0e88562e381..00000000000 --- a/tests/components/geniushub/test_switch.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Tests for the Geniushub switch platform.""" - -from unittest.mock import patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("mock_geniushub_cloud") -async def test_cloud_all_sensors( - hass: HomeAssistant, - mock_cloud_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the creation of the Genius Hub switch entities.""" - with patch("homeassistant.components.geniushub.PLATFORMS", [Platform.SWITCH]): - await setup_integration(hass, mock_cloud_config_entry) - - await snapshot_platform( - hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id - ) diff --git a/tests/components/geo_json_events/conftest.py b/tests/components/geo_json_events/conftest.py index 11928e6f012..beab7bf1403 100644 --- a/tests/components/geo_json_events/conftest.py +++ b/tests/components/geo_json_events/conftest.py @@ -1,9 +1,9 @@ """Configuration for GeoJSON Events tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.geo_json_events import DOMAIN from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_URL diff --git a/tests/components/geo_location/test_trigger.py b/tests/components/geo_location/test_trigger.py index 7673f357a08..e5fb93dcf8f 100644 --- a/tests/components/geo_location/test_trigger.py +++ b/tests/components/geo_location/test_trigger.py @@ -29,7 +29,7 @@ def calls(hass: HomeAssistant) -> list[ServiceCall]: @pytest.fixture(autouse=True) -def setup_comp(hass: HomeAssistant) -> None: +def setup_comp(hass): """Initialize components.""" mock_component(hass, "group") hass.loop.run_until_complete( @@ -49,7 +49,7 @@ def setup_comp(hass: HomeAssistant) -> None: async def test_if_fires_on_zone_enter( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on zone enter.""" context = Context() @@ -96,10 +96,10 @@ async def test_if_fires_on_zone_enter( ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id assert ( - service_calls[0].data["some"] + calls[0].data["some"] == "geo_location - geo_location.entity - hello - hello - test - 0" ) @@ -118,8 +118,6 @@ async def test_if_fires_on_zone_enter( blocking=True, ) - assert len(service_calls) == 2 - hass.states.async_set( "geo_location.entity", "hello", @@ -127,11 +125,11 @@ async def test_if_fires_on_zone_enter( ) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 async def test_if_not_fires_for_enter_on_zone_leave( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing on zone leave.""" hass.states.async_set( @@ -164,11 +162,11 @@ async def test_if_not_fires_for_enter_on_zone_leave( ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_fires_on_zone_leave( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on zone leave.""" hass.states.async_set( @@ -201,11 +199,11 @@ async def test_if_fires_on_zone_leave( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_zone_leave_2( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on zone leave for unavailable entity.""" hass.states.async_set( @@ -238,11 +236,11 @@ async def test_if_fires_on_zone_leave_2( ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_not_fires_for_leave_on_zone_enter( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing on zone enter.""" hass.states.async_set( @@ -275,11 +273,11 @@ async def test_if_not_fires_for_leave_on_zone_enter( ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_fires_on_zone_appear( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if entity appears in zone.""" assert await async_setup_component( @@ -319,16 +317,15 @@ async def test_if_fires_on_zone_appear( ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id assert ( - service_calls[0].data["some"] - == "geo_location - geo_location.entity - - hello - test" + calls[0].data["some"] == "geo_location - geo_location.entity - - hello - test" ) async def test_if_fires_on_zone_appear_2( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if entity appears in zone.""" assert await async_setup_component( @@ -376,16 +373,16 @@ async def test_if_fires_on_zone_appear_2( ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id assert ( - service_calls[0].data["some"] + calls[0].data["some"] == "geo_location - geo_location.entity - goodbye - hello - test" ) async def test_if_fires_on_zone_disappear( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if entity disappears from zone.""" hass.states.async_set( @@ -426,17 +423,14 @@ async def test_if_fires_on_zone_disappear( hass.states.async_remove("geo_location.entity") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] - == "geo_location - geo_location.entity - hello - - test" + calls[0].data["some"] == "geo_location - geo_location.entity - hello - - test" ) async def test_zone_undefined( - hass: HomeAssistant, - service_calls: list[ServiceCall], - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture ) -> None: """Test for undefined zone.""" hass.states.async_set( @@ -472,7 +466,7 @@ async def test_zone_undefined( ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 assert ( f"Unable to execute automation automation 0: Zone {zone_does_not_exist} not found" diff --git a/tests/components/geocaching/conftest.py b/tests/components/geocaching/conftest.py index 28d87176e46..155cd2c5a7e 100644 --- a/tests/components/geocaching/conftest.py +++ b/tests/components/geocaching/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from geocachingapi import GeocachingStatus import pytest +from typing_extensions import Generator from homeassistant.components.geocaching.const import DOMAIN diff --git a/tests/components/geocaching/test_config_flow.py b/tests/components/geocaching/test_config_flow.py index 5db89de0868..0c2ce66b513 100644 --- a/tests/components/geocaching/test_config_flow.py +++ b/tests/components/geocaching/test_config_flow.py @@ -14,7 +14,7 @@ from homeassistant.components.geocaching.const import ( ENVIRONMENT, ENVIRONMENT_URLS, ) -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -195,7 +195,9 @@ async def test_reauthentication( """Test Geocaching reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH} + ) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/geofency/test_init.py b/tests/components/geofency/test_init.py index 33740397868..2228cea80ee 100644 --- a/tests/components/geofency/test_init.py +++ b/tests/components/geofency/test_init.py @@ -10,6 +10,7 @@ from homeassistant import config_entries from homeassistant.components import zone from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.geofency import CONF_MOBILE_BEACONS, DOMAIN +from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_LATITUDE, ATTR_LONGITUDE, @@ -17,7 +18,6 @@ from homeassistant.const import ( STATE_NOT_HOME, ) from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -137,7 +137,7 @@ async def geofency_client( @pytest.fixture(autouse=True) -async def setup_zones(hass: HomeAssistant) -> None: +async def setup_zones(hass): """Set up Zone config in HA.""" assert await async_setup_component( hass, @@ -155,7 +155,7 @@ async def setup_zones(hass: HomeAssistant) -> None: @pytest.fixture -async def webhook_id(hass: HomeAssistant) -> str: +async def webhook_id(hass, geofency_client): """Initialize the Geofency component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -173,7 +173,7 @@ async def webhook_id(hass: HomeAssistant) -> str: return result["result"].data["webhook_id"] -async def test_data_validation(geofency_client: TestClient, webhook_id: str) -> None: +async def test_data_validation(geofency_client, webhook_id) -> None: """Test data validation.""" url = f"/api/webhook/{webhook_id}" @@ -195,8 +195,8 @@ async def test_gps_enter_and_exit_home( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - geofency_client: TestClient, - webhook_id: str, + geofency_client, + webhook_id, ) -> None: """Test GPS based zone enter and exit.""" url = f"/api/webhook/{webhook_id}" @@ -240,7 +240,7 @@ async def test_gps_enter_and_exit_home( async def test_beacon_enter_and_exit_home( - hass: HomeAssistant, geofency_client: TestClient, webhook_id: str + hass: HomeAssistant, geofency_client, webhook_id ) -> None: """Test iBeacon based zone enter and exit - a.k.a stationary iBeacon.""" url = f"/api/webhook/{webhook_id}" @@ -263,7 +263,7 @@ async def test_beacon_enter_and_exit_home( async def test_beacon_enter_and_exit_car( - hass: HomeAssistant, geofency_client: TestClient, webhook_id: str + hass: HomeAssistant, geofency_client, webhook_id ) -> None: """Test use of mobile iBeacon.""" url = f"/api/webhook/{webhook_id}" @@ -305,7 +305,7 @@ async def test_beacon_enter_and_exit_car( async def test_load_unload_entry( - hass: HomeAssistant, geofency_client: TestClient, webhook_id: str + hass: HomeAssistant, geofency_client, webhook_id ) -> None: """Test that the appropriate dispatch signals are added and removed.""" url = f"/api/webhook/{webhook_id}" diff --git a/tests/components/geonetnz_quakes/snapshots/test_diagnostics.ambr b/tests/components/geonetnz_quakes/snapshots/test_diagnostics.ambr deleted file mode 100644 index 481a662ccf9..00000000000 --- a/tests/components/geonetnz_quakes/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,21 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'info': dict({ - 'latitude': '**REDACTED**', - 'longitude': '**REDACTED**', - 'minimum_magnitude': 0.0, - 'mmi': 4, - 'radius': 25, - 'scan_interval': 300.0, - 'unit_system': 'metric', - }), - 'service': dict({ - 'last_timestamp': None, - 'last_update': '2024-09-05T15:00:00', - 'last_update_successful': '2024-09-05T15:00:00', - 'status': 'OK', - 'total': 0, - }), - }) -# --- diff --git a/tests/components/geonetnz_quakes/test_diagnostics.py b/tests/components/geonetnz_quakes/test_diagnostics.py deleted file mode 100644 index db5e1300768..00000000000 --- a/tests/components/geonetnz_quakes/test_diagnostics.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Test GeoNet NZ Quakes diagnostics.""" - -from __future__ import annotations - -from unittest.mock import patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -@pytest.mark.freeze_time("2024-09-05 15:00:00") -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, - config_entry: MockConfigEntry, -) -> None: - """Test config entry diagnostics.""" - with patch("aio_geojson_client.feed.GeoJsonFeed.update") as mock_feed_update: - mock_feed_update.return_value = "OK", [] - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot diff --git a/tests/components/geonetnz_volcano/test_config_flow.py b/tests/components/geonetnz_volcano/test_config_flow.py index 110fb3b0a9e..b074bdffa20 100644 --- a/tests/components/geonetnz_volcano/test_config_flow.py +++ b/tests/components/geonetnz_volcano/test_config_flow.py @@ -3,8 +3,7 @@ from datetime import timedelta from unittest.mock import patch -from homeassistant.components.geonetnz_volcano import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.components.geonetnz_volcano import config_flow from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, @@ -21,18 +20,19 @@ async def test_duplicate_error(hass: HomeAssistant, config_entry) -> None: conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) + flow = config_flow.GeonetnzVolcanoFlowHandler() + flow.hass = hass - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=conf - ) + result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "already_configured"} async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=None - ) + flow = config_flow.GeonetnzVolcanoFlowHandler() + flow.hass = hass + + result = await flow.async_step_user(user_input=None) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -48,6 +48,9 @@ async def test_step_import(hass: HomeAssistant) -> None: CONF_SCAN_INTERVAL: timedelta(minutes=4), } + flow = config_flow.GeonetnzVolcanoFlowHandler() + flow.hass = hass + with ( patch( "homeassistant.components.geonetnz_volcano.async_setup_entry", @@ -57,9 +60,7 @@ async def test_step_import(hass: HomeAssistant) -> None: "homeassistant.components.geonetnz_volcano.async_setup", return_value=True ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=conf - ) + result = await flow.async_step_import(import_config=conf) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { @@ -77,6 +78,9 @@ async def test_step_user(hass: HomeAssistant) -> None: hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25} + flow = config_flow.GeonetnzVolcanoFlowHandler() + flow.hass = hass + with ( patch( "homeassistant.components.geonetnz_volcano.async_setup_entry", @@ -86,9 +90,7 @@ async def test_step_user(hass: HomeAssistant) -> None: "homeassistant.components.geonetnz_volcano.async_setup", return_value=True ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=conf - ) + result = await flow.async_step_user(user_input=conf) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { diff --git a/tests/components/gios/snapshots/test_diagnostics.ambr b/tests/components/gios/snapshots/test_diagnostics.ambr index 71e0afdc495..1401b1e22a0 100644 --- a/tests/components/gios/snapshots/test_diagnostics.ambr +++ b/tests/components/gios/snapshots/test_diagnostics.ambr @@ -7,8 +7,6 @@ 'station_id': 123, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'gios', 'entry_id': '86129426118ae32020417a53712d6eef', 'minor_version': 1, diff --git a/tests/components/gios/test_diagnostics.py b/tests/components/gios/test_diagnostics.py index a965e5550df..903de4872a2 100644 --- a/tests/components/gios/test_diagnostics.py +++ b/tests/components/gios/test_diagnostics.py @@ -1,7 +1,6 @@ """Test GIOS diagnostics.""" from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -19,6 +18,4 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" entry = await init_integration(hass) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( - exclude=props("created_at", "modified_at") - ) + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot diff --git a/tests/components/github/conftest.py b/tests/components/github/conftest.py index ab262f3f522..df7de604c2c 100644 --- a/tests/components/github/conftest.py +++ b/tests/components/github/conftest.py @@ -1,9 +1,9 @@ """conftest for the GitHub integration.""" -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.github.const import CONF_REPOSITORIES, DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN diff --git a/tests/components/glances/test_config_flow.py b/tests/components/glances/test_config_flow.py index ae8c2e1d51e..a7d6934e32d 100644 --- a/tests/components/glances/test_config_flow.py +++ b/tests/components/glances/test_config_flow.py @@ -11,7 +11,6 @@ import pytest from homeassistant import config_entries from homeassistant.components import glances -from homeassistant.const import CONF_NAME, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -90,13 +89,18 @@ async def test_reauth_success(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=glances.DOMAIN, data=MOCK_USER_INPUT) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + glances.DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_USER_INPUT, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == { - CONF_NAME: "Mock Title", - CONF_USERNAME: "username", - } + assert result["description_placeholders"] == {"username": "username"} result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -124,13 +128,18 @@ async def test_reauth_fails( entry.add_to_hass(hass) mock_api.return_value.get_ha_sensor_data.side_effect = [error, HA_SENSOR_DATA] - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + glances.DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_USER_INPUT, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == { - CONF_NAME: "Mock Title", - CONF_USERNAME: "username", - } + assert result["description_placeholders"] == {"username": "username"} result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/glances/test_sensor.py b/tests/components/glances/test_sensor.py index 8e0367a712c..7dee47680ed 100644 --- a/tests/components/glances/test_sensor.py +++ b/tests/components/glances/test_sensor.py @@ -7,7 +7,6 @@ from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion from homeassistant.components.glances.const import DOMAIN -from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -72,40 +71,3 @@ async def test_uptime_variation( async_fire_time_changed(hass) await hass.async_block_till_done() assert hass.states.get("sensor.0_0_0_0_uptime").state == "2024-02-15T12:49:52+00:00" - - -async def test_sensor_removed( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_api: AsyncMock, - entity_registry: er.EntityRegistry, -) -> None: - """Test sensor removed server side.""" - - # Init with reference time - freezer.move_to(MOCK_REFERENCE_DATE) - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT, entry_id="test") - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert hass.states.get("sensor.0_0_0_0_ssl_disk_used").state != STATE_UNAVAILABLE - assert hass.states.get("sensor.0_0_0_0_memory_use").state != STATE_UNAVAILABLE - assert hass.states.get("sensor.0_0_0_0_uptime").state != STATE_UNAVAILABLE - - # Remove some sensors from Glances API data - mock_data = HA_SENSOR_DATA.copy() - mock_data.pop("fs") - mock_data.pop("mem") - mock_data.pop("uptime") - mock_api.return_value.get_ha_sensor_data = AsyncMock(return_value=mock_data) - - # Server stops providing some sensors, so state should switch to Unavailable - freezer.move_to(MOCK_REFERENCE_DATE + timedelta(minutes=2)) - freezer.tick(delta=timedelta(seconds=120)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("sensor.0_0_0_0_ssl_disk_used").state == STATE_UNAVAILABLE - assert hass.states.get("sensor.0_0_0_0_memory_use").state == STATE_UNAVAILABLE - assert hass.states.get("sensor.0_0_0_0_uptime").state == STATE_UNAVAILABLE diff --git a/tests/components/go2rtc/__init__.py b/tests/components/go2rtc/__init__.py deleted file mode 100644 index 0971541efa5..00000000000 --- a/tests/components/go2rtc/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Go2rtc tests.""" diff --git a/tests/components/go2rtc/conftest.py b/tests/components/go2rtc/conftest.py deleted file mode 100644 index abb139b89bf..00000000000 --- a/tests/components/go2rtc/conftest.py +++ /dev/null @@ -1,96 +0,0 @@ -"""Go2rtc test configuration.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, Mock, patch - -from awesomeversion import AwesomeVersion -from go2rtc_client.rest import _StreamClient, _WebRTCClient -import pytest - -from homeassistant.components.go2rtc.const import RECOMMENDED_VERSION -from homeassistant.components.go2rtc.server import Server - -GO2RTC_PATH = "homeassistant.components.go2rtc" - - -@pytest.fixture -def rest_client() -> Generator[AsyncMock]: - """Mock a go2rtc rest client.""" - with ( - patch( - "homeassistant.components.go2rtc.Go2RtcRestClient", - ) as mock_client, - patch("homeassistant.components.go2rtc.server.Go2RtcRestClient", mock_client), - ): - client = mock_client.return_value - client.streams = streams = Mock(spec_set=_StreamClient) - streams.list.return_value = {} - client.validate_server_version = AsyncMock( - return_value=AwesomeVersion(RECOMMENDED_VERSION) - ) - client.webrtc = Mock(spec_set=_WebRTCClient) - yield client - - -@pytest.fixture -def ws_client() -> Generator[Mock]: - """Mock a go2rtc websocket client.""" - with patch( - "homeassistant.components.go2rtc.Go2RtcWsClient", autospec=True - ) as ws_client_mock: - yield ws_client_mock.return_value - - -@pytest.fixture -def server_stdout() -> list[str]: - """Server stdout lines.""" - return [ - "09:00:03.466 INF go2rtc platform=linux/amd64 revision=780f378 version=1.9.5", - "09:00:03.466 INF config path=/tmp/go2rtc.yaml", - "09:00:03.467 INF [rtsp] listen addr=:8554", - "09:00:03.467 INF [api] listen addr=127.0.0.1:1984", - "09:00:03.467 INF [webrtc] listen addr=:8555/tcp", - ] - - -@pytest.fixture -def mock_create_subprocess(server_stdout: list[str]) -> Generator[AsyncMock]: - """Mock create_subprocess_exec.""" - with patch(f"{GO2RTC_PATH}.server.asyncio.create_subprocess_exec") as mock_subproc: - subproc = AsyncMock() - subproc.terminate = Mock() - subproc.kill = Mock() - subproc.returncode = None - # Simulate process output - subproc.stdout.__aiter__.return_value = iter( - [f"{entry}\n".encode() for entry in server_stdout] - ) - mock_subproc.return_value = subproc - yield mock_subproc - - -@pytest.fixture -def server_start(mock_create_subprocess: AsyncMock) -> Generator[AsyncMock]: - """Mock start of a go2rtc server.""" - with patch( - f"{GO2RTC_PATH}.server.Server.start", wraps=Server.start, autospec=True - ) as mock_server_start: - yield mock_server_start - - -@pytest.fixture -def server_stop() -> Generator[AsyncMock]: - """Mock stop of a go2rtc server.""" - with ( - patch( - f"{GO2RTC_PATH}.server.Server.stop", wraps=Server.stop, autospec=True - ) as mock_server_stop, - ): - yield mock_server_stop - - -@pytest.fixture -def server(server_start: AsyncMock, server_stop: AsyncMock) -> Generator[AsyncMock]: - """Mock a go2rtc server.""" - with patch(f"{GO2RTC_PATH}.Server", wraps=Server) as mock_server: - yield mock_server diff --git a/tests/components/go2rtc/test_config_flow.py b/tests/components/go2rtc/test_config_flow.py deleted file mode 100644 index c414af35b38..00000000000 --- a/tests/components/go2rtc/test_config_flow.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Test the Home Assistant Cloud config flow.""" - -from unittest.mock import patch - -from homeassistant.components.go2rtc.const import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_config_flow(hass: HomeAssistant) -> None: - """Test create cloud entry.""" - - with ( - patch( - "homeassistant.components.go2rtc.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.go2rtc.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "system"} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "go2rtc" - assert result["data"] == {} - await hass.async_block_till_done() - - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_multiple_entries(hass: HomeAssistant) -> None: - """Test creating multiple cloud entries.""" - config_entry = MockConfigEntry(domain=DOMAIN) - config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "system"} - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/go2rtc/test_init.py b/tests/components/go2rtc/test_init.py deleted file mode 100644 index 0f1cac6942d..00000000000 --- a/tests/components/go2rtc/test_init.py +++ /dev/null @@ -1,759 +0,0 @@ -"""The tests for the go2rtc component.""" - -from collections.abc import Callable, Generator -import logging -from typing import NamedTuple -from unittest.mock import AsyncMock, Mock, patch - -from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError -from awesomeversion import AwesomeVersion -from go2rtc_client import Stream -from go2rtc_client.exceptions import Go2RtcClientError, Go2RtcVersionError -from go2rtc_client.models import Producer -from go2rtc_client.ws import ( - ReceiveMessages, - WebRTCAnswer, - WebRTCCandidate, - WebRTCOffer, - WsError, -) -import pytest -from webrtc_models import RTCIceCandidate - -from homeassistant.components.camera import ( - DOMAIN as CAMERA_DOMAIN, - Camera, - CameraEntityFeature, - StreamType, - WebRTCAnswer as HAWebRTCAnswer, - WebRTCCandidate as HAWebRTCCandidate, - WebRTCError, - WebRTCMessage, - WebRTCSendMessage, -) -from homeassistant.components.default_config import DOMAIN as DEFAULT_CONFIG_DOMAIN -from homeassistant.components.go2rtc import WebRTCProvider -from homeassistant.components.go2rtc.const import ( - CONF_DEBUG_UI, - DEBUG_UI_URL_MESSAGE, - DOMAIN, - RECOMMENDED_VERSION, -) -from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow -from homeassistant.const import CONF_URL -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.typing import ConfigType -from homeassistant.setup import async_setup_component - -from tests.common import ( - MockConfigEntry, - MockModule, - mock_config_flow, - mock_integration, - mock_platform, - setup_test_component_platform, -) - -TEST_DOMAIN = "test" - -# The go2rtc provider does not inspect the details of the offer and answer, -# and is only a pass through. -OFFER_SDP = "v=0\r\no=carol 28908764872 28908764872 IN IP4 100.3.6.6\r\n..." -ANSWER_SDP = "v=0\r\no=bob 2890844730 2890844730 IN IP4 host.example.com\r\n..." - - -class MockCamera(Camera): - """Mock Camera Entity.""" - - _attr_name = "Test" - _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM - - def __init__(self) -> None: - """Initialize the mock entity.""" - super().__init__() - self._stream_source: str | None = "rtsp://stream" - - def set_stream_source(self, stream_source: str | None) -> None: - """Set the stream source.""" - self._stream_source = stream_source - - async def stream_source(self) -> str | None: - """Return the source of the stream. - - This is used by cameras with CameraEntityFeature.STREAM - and StreamType.HLS. - """ - return self._stream_source - - -@pytest.fixture -def integration_config_entry(hass: HomeAssistant) -> ConfigEntry: - """Test mock config entry.""" - entry = MockConfigEntry(domain=TEST_DOMAIN) - entry.add_to_hass(hass) - return entry - - -@pytest.fixture(name="go2rtc_binary") -def go2rtc_binary_fixture() -> str: - """Fixture to provide go2rtc binary name.""" - return "/usr/bin/go2rtc" - - -@pytest.fixture -def mock_get_binary(go2rtc_binary) -> Generator[Mock]: - """Mock _get_binary.""" - with patch( - "homeassistant.components.go2rtc.shutil.which", - return_value=go2rtc_binary, - ) as mock_which: - yield mock_which - - -@pytest.fixture(name="has_go2rtc_entry") -def has_go2rtc_entry_fixture() -> bool: - """Fixture to control if a go2rtc config entry should be created.""" - return True - - -@pytest.fixture -def mock_go2rtc_entry(hass: HomeAssistant, has_go2rtc_entry: bool) -> None: - """Mock a go2rtc onfig entry.""" - if not has_go2rtc_entry: - return - config_entry = MockConfigEntry(domain=DOMAIN) - config_entry.add_to_hass(hass) - - -@pytest.fixture(name="is_docker_env") -def is_docker_env_fixture() -> bool: - """Fixture to provide is_docker_env return value.""" - return True - - -@pytest.fixture -def mock_is_docker_env(is_docker_env) -> Generator[Mock]: - """Mock is_docker_env.""" - with patch( - "homeassistant.components.go2rtc.is_docker_env", - return_value=is_docker_env, - ) as mock_is_docker_env: - yield mock_is_docker_env - - -@pytest.fixture -async def init_integration( - hass: HomeAssistant, - rest_client: AsyncMock, - mock_is_docker_env, - mock_get_binary, - server: Mock, -) -> None: - """Initialize the go2rtc integration.""" - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) - - -@pytest.fixture -async def init_test_integration( - hass: HomeAssistant, - integration_config_entry: ConfigEntry, -) -> MockCamera: - """Initialize components.""" - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [CAMERA_DOMAIN] - ) - return True - - async def async_unload_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Unload test config entry.""" - await hass.config_entries.async_forward_entry_unload( - config_entry, CAMERA_DOMAIN - ) - return True - - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - async_unload_entry=async_unload_entry_init, - ), - ) - test_camera = MockCamera() - setup_test_component_platform( - hass, CAMERA_DOMAIN, [test_camera], from_config_entry=True - ) - mock_platform(hass, f"{TEST_DOMAIN}.config_flow", Mock()) - - with mock_config_flow(TEST_DOMAIN, ConfigFlow): - assert await hass.config_entries.async_setup(integration_config_entry.entry_id) - await hass.async_block_till_done() - - return test_camera - - -async def _test_setup_and_signaling( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - rest_client: AsyncMock, - ws_client: Mock, - config: ConfigType, - after_setup_fn: Callable[[], None], - camera: MockCamera, -) -> None: - """Test the go2rtc config entry.""" - entity_id = camera.entity_id - assert camera.frontend_stream_type == StreamType.HLS - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - assert issue_registry.async_get_issue(DOMAIN, "recommended_version") is None - config_entries = hass.config_entries.async_entries(DOMAIN) - assert len(config_entries) == 1 - assert config_entries[0].state == ConfigEntryState.LOADED - after_setup_fn() - - receive_message_callback = Mock(spec_set=WebRTCSendMessage) - - async def test() -> None: - await camera.async_handle_async_webrtc_offer( - OFFER_SDP, "session_id", receive_message_callback - ) - ws_client.send.assert_called_once_with( - WebRTCOffer( - OFFER_SDP, - camera.async_get_webrtc_client_configuration().configuration.ice_servers, - ) - ) - ws_client.subscribe.assert_called_once() - - # Simulate the answer from the go2rtc server - callback = ws_client.subscribe.call_args[0][0] - callback(WebRTCAnswer(ANSWER_SDP)) - receive_message_callback.assert_called_once_with(HAWebRTCAnswer(ANSWER_SDP)) - - await test() - - rest_client.streams.add.assert_called_once_with( - entity_id, - [ - "rtsp://stream", - f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug", - ], - ) - - # Stream exists but the source is different - rest_client.streams.add.reset_mock() - rest_client.streams.list.return_value = { - entity_id: Stream([Producer("rtsp://different")]) - } - - receive_message_callback.reset_mock() - ws_client.reset_mock() - await test() - - rest_client.streams.add.assert_called_once_with( - entity_id, - [ - "rtsp://stream", - f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug", - ], - ) - - # If the stream is already added, the stream should not be added again. - rest_client.streams.add.reset_mock() - rest_client.streams.list.return_value = { - entity_id: Stream([Producer("rtsp://stream")]) - } - - receive_message_callback.reset_mock() - ws_client.reset_mock() - await test() - - rest_client.streams.add.assert_not_called() - assert isinstance(camera._webrtc_provider, WebRTCProvider) - - # Set stream source to None and provider should be skipped - rest_client.streams.list.return_value = {} - receive_message_callback.reset_mock() - camera.set_stream_source(None) - await camera.async_handle_async_webrtc_offer( - OFFER_SDP, "session_id", receive_message_callback - ) - receive_message_callback.assert_called_once_with( - WebRTCError("go2rtc_webrtc_offer_failed", "Camera has no stream source") - ) - - -@pytest.mark.usefixtures( - "init_test_integration", - "mock_get_binary", - "mock_is_docker_env", - "mock_go2rtc_entry", -) -@pytest.mark.parametrize( - ("config", "ui_enabled"), - [ - ({DOMAIN: {}}, False), - ({DOMAIN: {CONF_DEBUG_UI: True}}, True), - ({DEFAULT_CONFIG_DOMAIN: {}}, False), - ({DEFAULT_CONFIG_DOMAIN: {}, DOMAIN: {CONF_DEBUG_UI: True}}, True), - ], -) -@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) -async def test_setup_go_binary( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - rest_client: AsyncMock, - ws_client: Mock, - server: AsyncMock, - server_start: Mock, - server_stop: Mock, - init_test_integration: MockCamera, - has_go2rtc_entry: bool, - config: ConfigType, - ui_enabled: bool, -) -> None: - """Test the go2rtc config entry with binary.""" - assert (len(hass.config_entries.async_entries(DOMAIN)) == 1) == has_go2rtc_entry - - def after_setup() -> None: - server.assert_called_once_with(hass, "/usr/bin/go2rtc", enable_ui=ui_enabled) - server_start.assert_called_once() - - await _test_setup_and_signaling( - hass, - issue_registry, - rest_client, - ws_client, - config, - after_setup, - init_test_integration, - ) - - await hass.async_stop() - server_stop.assert_called_once() - - -@pytest.mark.usefixtures("mock_go2rtc_entry") -@pytest.mark.parametrize( - ("go2rtc_binary", "is_docker_env"), - [ - ("/usr/bin/go2rtc", True), - (None, False), - ], -) -@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) -async def test_setup( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - rest_client: AsyncMock, - ws_client: Mock, - server: Mock, - init_test_integration: MockCamera, - mock_get_binary: Mock, - mock_is_docker_env: Mock, - has_go2rtc_entry: bool, -) -> None: - """Test the go2rtc config entry without binary.""" - assert (len(hass.config_entries.async_entries(DOMAIN)) == 1) == has_go2rtc_entry - - config = {DOMAIN: {CONF_URL: "http://localhost:1984/"}} - - def after_setup() -> None: - server.assert_not_called() - - await _test_setup_and_signaling( - hass, - issue_registry, - rest_client, - ws_client, - config, - after_setup, - init_test_integration, - ) - - mock_get_binary.assert_not_called() - server.assert_not_called() - - -class Callbacks(NamedTuple): - """Callbacks for the test.""" - - on_message: Mock - send_message: Mock - - -@pytest.fixture -async def message_callbacks( - ws_client: Mock, - init_test_integration: MockCamera, -) -> Callbacks: - """Prepare and return receive message callback.""" - receive_callback = Mock(spec_set=WebRTCSendMessage) - camera = init_test_integration - - await camera.async_handle_async_webrtc_offer( - OFFER_SDP, "session_id", receive_callback - ) - ws_client.send.assert_called_once_with( - WebRTCOffer( - OFFER_SDP, - camera.async_get_webrtc_client_configuration().configuration.ice_servers, - ) - ) - ws_client.subscribe.assert_called_once() - - # Simulate messages from the go2rtc server - send_callback = ws_client.subscribe.call_args[0][0] - - return Callbacks(receive_callback, send_callback) - - -@pytest.mark.parametrize( - ("message", "expected_message"), - [ - ( - WebRTCCandidate("candidate"), - HAWebRTCCandidate(RTCIceCandidate("candidate")), - ), - ( - WebRTCAnswer(ANSWER_SDP), - HAWebRTCAnswer(ANSWER_SDP), - ), - ( - WsError("error"), - WebRTCError("go2rtc_webrtc_offer_failed", "error"), - ), - ], -) -@pytest.mark.usefixtures("init_integration") -async def test_receiving_messages_from_go2rtc_server( - message_callbacks: Callbacks, - message: ReceiveMessages, - expected_message: WebRTCMessage, -) -> None: - """Test receiving message from go2rtc server.""" - on_message, send_message = message_callbacks - - send_message(message) - on_message.assert_called_once_with(expected_message) - - -@pytest.mark.usefixtures("init_integration") -async def test_on_candidate( - ws_client: Mock, - init_test_integration: MockCamera, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test frontend sending candidate to go2rtc server.""" - camera = init_test_integration - session_id = "session_id" - - # Session doesn't exist - await camera.async_on_webrtc_candidate(session_id, RTCIceCandidate("candidate")) - assert ( - "homeassistant.components.go2rtc", - logging.DEBUG, - f"Unknown session {session_id}. Ignoring candidate", - ) in caplog.record_tuples - caplog.clear() - - # Store session - await init_test_integration.async_handle_async_webrtc_offer( - OFFER_SDP, session_id, Mock() - ) - ws_client.send.assert_called_once_with( - WebRTCOffer( - OFFER_SDP, - camera.async_get_webrtc_client_configuration().configuration.ice_servers, - ) - ) - ws_client.reset_mock() - - await camera.async_on_webrtc_candidate(session_id, RTCIceCandidate("candidate")) - ws_client.send.assert_called_once_with(WebRTCCandidate("candidate")) - assert caplog.record_tuples == [] - - -@pytest.mark.usefixtures("init_integration") -async def test_close_session( - ws_client: Mock, - init_test_integration: MockCamera, -) -> None: - """Test closing session.""" - camera = init_test_integration - session_id = "session_id" - - # Session doesn't exist - with pytest.raises(KeyError): - camera.close_webrtc_session(session_id) - ws_client.close.assert_not_called() - - # Store session - await init_test_integration.async_handle_async_webrtc_offer( - OFFER_SDP, session_id, Mock() - ) - ws_client.send.assert_called_once_with( - WebRTCOffer( - OFFER_SDP, - camera.async_get_webrtc_client_configuration().configuration.ice_servers, - ) - ) - - # Close session - camera.close_webrtc_session(session_id) - ws_client.close.assert_called_once() - - # Close again should raise an error - ws_client.reset_mock() - with pytest.raises(KeyError): - camera.close_webrtc_session(session_id) - ws_client.close.assert_not_called() - - -ERR_BINARY_NOT_FOUND = "Could not find go2rtc docker binary" -ERR_CONNECT = "Could not connect to go2rtc instance" -ERR_CONNECT_RETRY = ( - "Could not connect to go2rtc instance on http://localhost:1984/; Retrying" -) -ERR_START_SERVER = "Could not start go2rtc server" -ERR_UNSUPPORTED_VERSION = "The go2rtc server version is not supported" -_INVALID_CONFIG = "Invalid config for 'go2rtc': " -ERR_INVALID_URL = _INVALID_CONFIG + "invalid url" -ERR_EXCLUSIVE = _INVALID_CONFIG + DEBUG_UI_URL_MESSAGE -ERR_URL_REQUIRED = "Go2rtc URL required in non-docker installs" - - -@pytest.mark.parametrize( - ("config", "go2rtc_binary", "is_docker_env"), - [ - ({}, None, False), - ], -) -@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) -@pytest.mark.usefixtures( - "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" -) -async def test_non_user_setup_with_error( - hass: HomeAssistant, - config: ConfigType, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test setup integration does not fail if not setup by user.""" - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - assert not hass.config_entries.async_entries(DOMAIN) - - -@pytest.mark.parametrize( - ("config", "go2rtc_binary", "is_docker_env", "expected_log_message"), - [ - ({DEFAULT_CONFIG_DOMAIN: {}}, None, True, ERR_BINARY_NOT_FOUND), - ({DEFAULT_CONFIG_DOMAIN: {}}, "/usr/bin/go2rtc", True, ERR_START_SERVER), - ({DOMAIN: {}}, None, False, ERR_URL_REQUIRED), - ({DOMAIN: {}}, None, True, ERR_BINARY_NOT_FOUND), - ({DOMAIN: {}}, "/usr/bin/go2rtc", True, ERR_START_SERVER), - ({DOMAIN: {CONF_URL: "invalid"}}, None, True, ERR_INVALID_URL), - ( - {DOMAIN: {CONF_URL: "http://localhost:1984", CONF_DEBUG_UI: True}}, - None, - True, - ERR_EXCLUSIVE, - ), - ], -) -@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) -@pytest.mark.usefixtures( - "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" -) -async def test_setup_with_setup_error( - hass: HomeAssistant, - config: ConfigType, - caplog: pytest.LogCaptureFixture, - has_go2rtc_entry: bool, - expected_log_message: str, -) -> None: - """Test setup integration fails.""" - - assert not await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - assert bool(hass.config_entries.async_entries(DOMAIN)) == has_go2rtc_entry - assert expected_log_message in caplog.text - - -@pytest.mark.parametrize( - ("config", "go2rtc_binary", "is_docker_env", "expected_log_message"), - [ - ({DOMAIN: {CONF_URL: "http://localhost:1984/"}}, None, True, ERR_CONNECT), - ], -) -@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) -@pytest.mark.usefixtures( - "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" -) -async def test_setup_with_setup_entry_error( - hass: HomeAssistant, - config: ConfigType, - caplog: pytest.LogCaptureFixture, - expected_log_message: str, -) -> None: - """Test setup integration entry fails.""" - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - config_entries = hass.config_entries.async_entries(DOMAIN) - assert len(config_entries) == 1 - assert config_entries[0].state == ConfigEntryState.SETUP_ERROR - assert expected_log_message in caplog.text - - -@pytest.mark.parametrize("config", [{DOMAIN: {CONF_URL: "http://localhost:1984/"}}]) -@pytest.mark.parametrize( - ("cause", "expected_config_entry_state", "expected_log_message"), - [ - (ClientConnectionError(), ConfigEntryState.SETUP_RETRY, ERR_CONNECT_RETRY), - (ServerConnectionError(), ConfigEntryState.SETUP_RETRY, ERR_CONNECT_RETRY), - (None, ConfigEntryState.SETUP_ERROR, ERR_CONNECT), - (Exception(), ConfigEntryState.SETUP_ERROR, ERR_CONNECT), - ], -) -@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) -@pytest.mark.usefixtures( - "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" -) -async def test_setup_with_retryable_setup_entry_error_custom_server( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - rest_client: AsyncMock, - config: ConfigType, - cause: Exception, - expected_config_entry_state: ConfigEntryState, - expected_log_message: str, -) -> None: - """Test setup integration entry fails.""" - go2rtc_error = Go2RtcClientError() - go2rtc_error.__cause__ = cause - rest_client.validate_server_version.side_effect = go2rtc_error - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - config_entries = hass.config_entries.async_entries(DOMAIN) - assert len(config_entries) == 1 - assert config_entries[0].state == expected_config_entry_state - assert expected_log_message in caplog.text - - -@pytest.mark.parametrize("config", [{DOMAIN: {}}, {DEFAULT_CONFIG_DOMAIN: {}}]) -@pytest.mark.parametrize( - ("cause", "expected_config_entry_state", "expected_log_message"), - [ - (ClientConnectionError(), ConfigEntryState.NOT_LOADED, ERR_START_SERVER), - (ServerConnectionError(), ConfigEntryState.NOT_LOADED, ERR_START_SERVER), - (None, ConfigEntryState.NOT_LOADED, ERR_START_SERVER), - (Exception(), ConfigEntryState.NOT_LOADED, ERR_START_SERVER), - ], -) -@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) -@pytest.mark.usefixtures( - "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" -) -async def test_setup_with_retryable_setup_entry_error_default_server( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - rest_client: AsyncMock, - has_go2rtc_entry: bool, - config: ConfigType, - cause: Exception, - expected_config_entry_state: ConfigEntryState, - expected_log_message: str, -) -> None: - """Test setup integration entry fails.""" - go2rtc_error = Go2RtcClientError() - go2rtc_error.__cause__ = cause - rest_client.validate_server_version.side_effect = go2rtc_error - assert not await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - config_entries = hass.config_entries.async_entries(DOMAIN) - assert len(config_entries) == has_go2rtc_entry - for config_entry in config_entries: - assert config_entry.state == expected_config_entry_state - assert expected_log_message in caplog.text - - -@pytest.mark.parametrize("config", [{DOMAIN: {}}, {DEFAULT_CONFIG_DOMAIN: {}}]) -@pytest.mark.parametrize( - ("go2rtc_error", "expected_config_entry_state", "expected_log_message"), - [ - ( - Go2RtcVersionError("1.9.4", "1.9.5", "2.0.0"), - ConfigEntryState.SETUP_RETRY, - ERR_UNSUPPORTED_VERSION, - ), - ], -) -@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) -@pytest.mark.usefixtures( - "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" -) -async def test_setup_with_version_error( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - rest_client: AsyncMock, - config: ConfigType, - go2rtc_error: Exception, - expected_config_entry_state: ConfigEntryState, - expected_log_message: str, -) -> None: - """Test setup integration entry fails.""" - rest_client.validate_server_version.side_effect = [None, go2rtc_error] - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - config_entries = hass.config_entries.async_entries(DOMAIN) - assert len(config_entries) == 1 - assert config_entries[0].state == expected_config_entry_state - assert expected_log_message in caplog.text - - -async def test_config_entry_remove(hass: HomeAssistant) -> None: - """Test config entry removed when neither default_config nor go2rtc is in config.""" - config_entry = MockConfigEntry(domain=DOMAIN) - config_entry.add_to_hass(hass) - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert not await hass.config_entries.async_setup(config_entry.entry_id) - assert len(hass.config_entries.async_entries(DOMAIN)) == 0 - - -@pytest.mark.parametrize("config", [{DOMAIN: {CONF_URL: "http://localhost:1984"}}]) -@pytest.mark.usefixtures("server") -async def test_setup_with_recommended_version_repair( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - rest_client: AsyncMock, - config: ConfigType, -) -> None: - """Test setup integration entry fails.""" - rest_client.validate_server_version.return_value = AwesomeVersion("1.9.5") - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - - # Verify the issue is created - issue = issue_registry.async_get_issue(DOMAIN, "recommended_version") - assert issue - assert issue.is_fixable is False - assert issue.is_persistent is False - assert issue.severity == ir.IssueSeverity.WARNING - assert issue.issue_id == "recommended_version" - assert issue.translation_key == "recommended_version" - assert issue.translation_placeholders == { - "recommended_version": RECOMMENDED_VERSION, - "current_version": "1.9.5", - } diff --git a/tests/components/go2rtc/test_server.py b/tests/components/go2rtc/test_server.py deleted file mode 100644 index e4fe3993f3c..00000000000 --- a/tests/components/go2rtc/test_server.py +++ /dev/null @@ -1,393 +0,0 @@ -"""Tests for the go2rtc server.""" - -import asyncio -from collections.abc import Generator -import logging -import subprocess -from unittest.mock import AsyncMock, MagicMock, Mock, patch - -import pytest - -from homeassistant.components.go2rtc.server import Server -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -TEST_BINARY = "/bin/go2rtc" - - -@pytest.fixture -def enable_ui() -> bool: - """Fixture to enable the UI.""" - return False - - -@pytest.fixture -def server(hass: HomeAssistant, enable_ui: bool) -> Server: - """Fixture to initialize the Server.""" - return Server(hass, binary=TEST_BINARY, enable_ui=enable_ui) - - -@pytest.fixture -def mock_tempfile() -> Generator[Mock]: - """Fixture to mock NamedTemporaryFile.""" - with patch( - "homeassistant.components.go2rtc.server.NamedTemporaryFile", autospec=True - ) as mock_tempfile: - file = mock_tempfile.return_value.__enter__.return_value - file.name = "test.yaml" - yield file - - -def _assert_server_output_logged( - server_stdout: list[str], - caplog: pytest.LogCaptureFixture, - loglevel: int, - expect_logged: bool, -) -> None: - """Check server stdout was logged.""" - for entry in server_stdout: - assert ( - ( - "homeassistant.components.go2rtc.server", - loglevel, - entry, - ) - in caplog.record_tuples - ) is expect_logged - - -def assert_server_output_logged( - server_stdout: list[str], - caplog: pytest.LogCaptureFixture, - loglevel: int, -) -> None: - """Check server stdout was logged.""" - _assert_server_output_logged(server_stdout, caplog, loglevel, True) - - -def assert_server_output_not_logged( - server_stdout: list[str], - caplog: pytest.LogCaptureFixture, - loglevel: int, -) -> None: - """Check server stdout was logged.""" - _assert_server_output_logged(server_stdout, caplog, loglevel, False) - - -@pytest.mark.parametrize( - ("enable_ui", "api_ip"), - [ - (True, ""), - (False, "127.0.0.1"), - ], -) -async def test_server_run_success( - mock_create_subprocess: AsyncMock, - rest_client: AsyncMock, - server_stdout: list[str], - server: Server, - caplog: pytest.LogCaptureFixture, - mock_tempfile: Mock, - api_ip: str, -) -> None: - """Test that the server runs successfully.""" - await server.start() - - # Check that Popen was called with the right arguments - mock_create_subprocess.assert_called_once_with( - TEST_BINARY, - "-c", - "test.yaml", - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - close_fds=False, - ) - - # Verify that the config file was written - mock_tempfile.write.assert_called_once_with( - f"""# This file is managed by Home Assistant -# Do not edit it manually - -api: - listen: "{api_ip}:11984" - -rtsp: - listen: "127.0.0.1:18554" - -webrtc: - listen: ":18555/tcp" - ice_servers: [] -""".encode() - ) - - # Verify go2rtc binary stdout was logged with debug level - assert_server_output_logged(server_stdout, caplog, logging.DEBUG) - - await server.stop() - mock_create_subprocess.return_value.terminate.assert_called_once() - - # Verify go2rtc binary stdout was not logged with warning level - assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) - - -@pytest.mark.usefixtures("mock_tempfile") -async def test_server_timeout_on_stop( - mock_create_subprocess: MagicMock, rest_client: AsyncMock, server: Server -) -> None: - """Test server run where the process takes too long to terminate.""" - # Start server thread - await server.start() - - async def sleep() -> None: - await asyncio.sleep(1) - - # Simulate timeout - mock_create_subprocess.return_value.wait.side_effect = sleep - - with patch("homeassistant.components.go2rtc.server._TERMINATE_TIMEOUT", new=0.1): - await server.stop() - - # Ensure terminate and kill were called due to timeout - mock_create_subprocess.return_value.terminate.assert_called_once() - mock_create_subprocess.return_value.kill.assert_called_once() - - -@pytest.mark.parametrize( - "server_stdout", - [ - [ - "09:00:03.466 INF go2rtc platform=linux/amd64 revision=780f378 version=1.9.5", - "09:00:03.466 INF config path=/tmp/go2rtc.yaml", - ] - ], -) -@pytest.mark.usefixtures("mock_tempfile") -async def test_server_failed_to_start( - mock_create_subprocess: MagicMock, - server_stdout: list[str], - server: Server, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test server, where an exception is raised if the expected log entry was not received until the timeout.""" - with ( - patch("homeassistant.components.go2rtc.server._SETUP_TIMEOUT", new=0.1), - pytest.raises(HomeAssistantError, match="Go2rtc server didn't start correctly"), - ): - await server.start() - - # Verify go2rtc binary stdout was logged with debug and warning level - assert_server_output_logged(server_stdout, caplog, logging.DEBUG) - assert_server_output_logged(server_stdout, caplog, logging.WARNING) - - assert ( - "homeassistant.components.go2rtc.server", - logging.ERROR, - "Go2rtc server didn't start correctly", - ) in caplog.record_tuples - - # Check that Popen was called with the right arguments - mock_create_subprocess.assert_called_once_with( - TEST_BINARY, - "-c", - "test.yaml", - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - close_fds=False, - ) - - -@pytest.mark.parametrize( - ("server_stdout", "expected_loglevel"), - [ - ( - [ - "09:00:03.466 TRC [api] register path path=/", - "09:00:03.466 DBG build vcs.time=2024-10-28T19:47:55Z version=go1.23.2", - "09:00:03.466 INF go2rtc platform=linux/amd64 revision=780f378 version=1.9.5", - "09:00:03.467 INF [api] listen addr=127.0.0.1:1984", - "09:00:03.466 WRN warning message", - '09:00:03.466 ERR [api] listen error="listen tcp 127.0.0.1:11984: bind: address already in use"', - "09:00:03.466 FTL fatal message", - "09:00:03.466 PNC panic message", - "exit with signal: interrupt", # Example of stderr write - ], - [ - logging.DEBUG, - logging.DEBUG, - logging.DEBUG, - logging.DEBUG, - logging.WARNING, - logging.WARNING, - logging.ERROR, - logging.ERROR, - logging.WARNING, - ], - ) - ], -) -@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) -async def test_log_level_mapping( - hass: HomeAssistant, - mock_create_subprocess: MagicMock, - server_stdout: list[str], - rest_client: AsyncMock, - server: Server, - caplog: pytest.LogCaptureFixture, - expected_loglevel: list[int], -) -> None: - """Log level mapping.""" - evt = asyncio.Event() - - async def wait_event() -> None: - await evt.wait() - - mock_create_subprocess.return_value.wait.side_effect = wait_event - - await server.start() - - await asyncio.sleep(0.1) - await hass.async_block_till_done() - - # Verify go2rtc binary stdout was logged with default level - for i, entry in enumerate(server_stdout): - assert ( - "homeassistant.components.go2rtc.server", - expected_loglevel[i], - entry, - ) in caplog.record_tuples - - evt.set() - await asyncio.sleep(0.1) - await hass.async_block_till_done() - - assert_server_output_logged(server_stdout, caplog, logging.WARNING) - - await server.stop() - - -@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) -async def test_server_restart_process_exit( - hass: HomeAssistant, - mock_create_subprocess: AsyncMock, - server_stdout: list[str], - rest_client: AsyncMock, - server: Server, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that the server is restarted when it exits.""" - evt = asyncio.Event() - - async def wait_event() -> None: - await evt.wait() - - mock_create_subprocess.return_value.wait.side_effect = wait_event - - await server.start() - mock_create_subprocess.assert_awaited_once() - mock_create_subprocess.reset_mock() - - await asyncio.sleep(0.1) - await hass.async_block_till_done() - mock_create_subprocess.assert_not_awaited() - - # Verify go2rtc binary stdout was not yet logged with warning level - assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) - - evt.set() - await asyncio.sleep(0.1) - mock_create_subprocess.assert_awaited_once() - - # Verify go2rtc binary stdout was logged with warning level - assert_server_output_logged(server_stdout, caplog, logging.WARNING) - - await server.stop() - - -@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) -async def test_server_restart_process_error( - hass: HomeAssistant, - mock_create_subprocess: AsyncMock, - server_stdout: list[str], - rest_client: AsyncMock, - server: Server, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that the server is restarted on error.""" - mock_create_subprocess.return_value.wait.side_effect = [Exception, None, None, None] - - await server.start() - mock_create_subprocess.assert_awaited_once() - mock_create_subprocess.reset_mock() - - # Verify go2rtc binary stdout was not yet logged with warning level - assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) - - await asyncio.sleep(0.1) - await hass.async_block_till_done() - mock_create_subprocess.assert_awaited_once() - - # Verify go2rtc binary stdout was logged with warning level - assert_server_output_logged(server_stdout, caplog, logging.WARNING) - - await server.stop() - - -@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) -async def test_server_restart_api_error( - hass: HomeAssistant, - mock_create_subprocess: AsyncMock, - server_stdout: list[str], - rest_client: AsyncMock, - server: Server, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that the server is restarted on error.""" - rest_client.streams.list.side_effect = Exception - - await server.start() - mock_create_subprocess.assert_awaited_once() - mock_create_subprocess.reset_mock() - - # Verify go2rtc binary stdout was not yet logged with warning level - assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) - - await asyncio.sleep(0.1) - await hass.async_block_till_done() - mock_create_subprocess.assert_awaited_once() - - # Verify go2rtc binary stdout was logged with warning level - assert_server_output_logged(server_stdout, caplog, logging.WARNING) - - await server.stop() - - -@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) -async def test_server_restart_error( - hass: HomeAssistant, - mock_create_subprocess: AsyncMock, - server_stdout: list[str], - rest_client: AsyncMock, - server: Server, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test error handling when exception is raised during restart.""" - rest_client.streams.list.side_effect = Exception - mock_create_subprocess.return_value.terminate.side_effect = [Exception, None] - - await server.start() - mock_create_subprocess.assert_awaited_once() - mock_create_subprocess.reset_mock() - - # Verify go2rtc binary stdout was not yet logged with warning level - assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) - - await asyncio.sleep(0.1) - await hass.async_block_till_done() - mock_create_subprocess.assert_awaited_once() - - # Verify go2rtc binary stdout was logged with warning level - assert_server_output_logged(server_stdout, caplog, logging.WARNING) - - assert "Unexpected error when restarting go2rtc server" in caplog.text - - await server.stop() diff --git a/tests/components/goalzero/test_switch.py b/tests/components/goalzero/test_switch.py index b784cff05aa..de2e6035a12 100644 --- a/tests/components/goalzero/test_switch.py +++ b/tests/components/goalzero/test_switch.py @@ -1,7 +1,7 @@ """Switch tests for the Goalzero integration.""" from homeassistant.components.goalzero.const import DEFAULT_NAME -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.switch import DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -32,7 +32,7 @@ async def test_switches_states( text=load_fixture("goalzero/state_change.json"), ) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: [entity_id]}, blocking=True, @@ -44,7 +44,7 @@ async def test_switches_states( text=load_fixture("goalzero/state_data.json"), ) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: [entity_id]}, blocking=True, diff --git a/tests/components/gogogate2/test_cover.py b/tests/components/gogogate2/test_cover.py index 42ee1f6f731..001212fa17b 100644 --- a/tests/components/gogogate2/test_cover.py +++ b/tests/components/gogogate2/test_cover.py @@ -20,7 +20,6 @@ from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, CoverDeviceClass, CoverEntityFeature, - CoverState, ) from homeassistant.components.gogogate2.const import ( DEVICE_TYPE_GOGOGATE2, @@ -35,6 +34,10 @@ from homeassistant.const import ( CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -141,7 +144,7 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None assert hass.states.get("cover.door1") is None assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == CoverState.OPEN + assert hass.states.get("cover.door1").state == STATE_OPEN assert dict(hass.states.get("cover.door1").attributes) == expected_attributes api.async_info.return_value = info_response(DoorStatus.CLOSED) @@ -160,12 +163,12 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == CoverState.CLOSING + assert hass.states.get("cover.door1").state == STATE_CLOSING api.async_close_door.assert_called_with(1) async_fire_time_changed(hass, utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == CoverState.CLOSING + assert hass.states.get("cover.door1").state == STATE_CLOSING api.async_info.return_value = info_response(DoorStatus.CLOSED) api.async_get_door_statuses_from_info.return_value = { @@ -174,7 +177,7 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == CoverState.CLOSED + assert hass.states.get("cover.door1").state == STATE_CLOSED api.async_info.return_value = info_response(DoorStatus.OPENED) api.async_get_door_statuses_from_info.return_value = { @@ -192,12 +195,12 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == CoverState.OPENING + assert hass.states.get("cover.door1").state == STATE_OPENING api.async_open_door.assert_called_with(1) async_fire_time_changed(hass, utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == CoverState.OPENING + assert hass.states.get("cover.door1").state == STATE_OPENING api.async_info.return_value = info_response(DoorStatus.OPENED) api.async_get_door_statuses_from_info.return_value = { @@ -206,7 +209,7 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == CoverState.OPEN + assert hass.states.get("cover.door1").state == STATE_OPEN api.async_info.return_value = info_response(DoorStatus.UNDEFINED) api.async_get_door_statuses_from_info.return_value = { @@ -238,7 +241,7 @@ async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == CoverState.OPENING + assert hass.states.get("cover.door1").state == STATE_OPENING api.async_open_door.assert_called_with(1) assert await hass.config_entries.async_unload(config_entry.entry_id) @@ -300,7 +303,7 @@ async def test_availability(ismartgateapi_mock, hass: HomeAssistant) -> None: } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == CoverState.CLOSED + assert hass.states.get("cover.door1").state == STATE_CLOSED assert dict(hass.states.get("cover.door1").attributes) == expected_attributes diff --git a/tests/components/gogogate2/test_init.py b/tests/components/gogogate2/test_init.py index 90765c425b4..f7e58296a43 100644 --- a/tests/components/gogogate2/test_init.py +++ b/tests/components/gogogate2/test_init.py @@ -3,10 +3,11 @@ from unittest.mock import MagicMock, patch from ismartgate import GogoGate2Api +import pytest -from homeassistant.components.gogogate2 import DEVICE_TYPE_GOGOGATE2 +from homeassistant.components.gogogate2 import DEVICE_TYPE_GOGOGATE2, async_setup_entry from homeassistant.components.gogogate2.const import DEVICE_TYPE_ISMARTGATE, DOMAIN -from homeassistant.config_entries import SOURCE_USER, ConfigEntryState +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_DEVICE, CONF_IP_ADDRESS, @@ -14,6 +15,7 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from tests.common import MockConfigEntry @@ -95,8 +97,6 @@ async def test_api_failure_on_startup(hass: HomeAssistant) -> None: "homeassistant.components.gogogate2.common.ISmartGateApi.async_info", side_effect=TimeoutError, ), + pytest.raises(ConfigEntryNotReady), ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.SETUP_RETRY + await async_setup_entry(hass, config_entry) diff --git a/tests/components/goodwe/snapshots/test_diagnostics.ambr b/tests/components/goodwe/snapshots/test_diagnostics.ambr index f52e47688e8..4097848a34a 100644 --- a/tests/components/goodwe/snapshots/test_diagnostics.ambr +++ b/tests/components/goodwe/snapshots/test_diagnostics.ambr @@ -7,8 +7,6 @@ 'model_family': 'ET', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'goodwe', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, diff --git a/tests/components/goodwe/test_diagnostics.py b/tests/components/goodwe/test_diagnostics.py index 0a997edc594..21917265811 100644 --- a/tests/components/goodwe/test_diagnostics.py +++ b/tests/components/goodwe/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import MagicMock, patch from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.goodwe import CONF_MODEL_FAMILY, DOMAIN from homeassistant.const import CONF_HOST @@ -33,4 +32,4 @@ async def test_entry_diagnostics( assert await async_setup_component(hass, DOMAIN, {}) result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/google/conftest.py b/tests/components/google/conftest.py index 23b6b884145..26a32a64b21 100644 --- a/tests/components/google/conftest.py +++ b/tests/components/google/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import AsyncGenerator, Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable import datetime import http import time @@ -13,6 +13,7 @@ from aiohttp.client_exceptions import ClientError from gcal_sync.auth import API_BASE_URL from oauth2client.client import OAuth2Credentials import pytest +from typing_extensions import AsyncGenerator, Generator import yaml from homeassistant.components.application_credentials import ( @@ -98,21 +99,12 @@ def calendar_access_role() -> str: return "owner" -@pytest.fixture -def calendar_is_primary() -> bool: - """Set if the calendar is the primary or not.""" - return False - - @pytest.fixture(name="test_api_calendar") -def api_calendar( - calendar_access_role: str, calendar_is_primary: bool -) -> dict[str, Any]: +def api_calendar(calendar_access_role: str) -> dict[str, Any]: """Return a test calendar object used in API responses.""" return { **TEST_API_CALENDAR, "accessRole": calendar_access_role, - "primary": calendar_is_primary, } @@ -302,7 +294,7 @@ def mock_calendars_list( @pytest.fixture def mock_calendar_get( aioclient_mock: AiohttpClientMocker, -) -> Callable[..., None]: +) -> Callable[[...], None]: """Fixture for returning a calendar get response.""" def _result( @@ -324,7 +316,7 @@ def mock_calendar_get( @pytest.fixture def mock_insert_event( aioclient_mock: AiohttpClientMocker, -) -> Callable[..., None]: +) -> Callable[[...], None]: """Fixture for capturing event creation.""" def _expect_result( @@ -339,7 +331,7 @@ def mock_insert_event( @pytest.fixture(autouse=True) -async def set_time_zone(hass: HomeAssistant) -> None: +async def set_time_zone(hass): """Set the time zone for the tests.""" # Set our timezone to CST/Regina so we can check calculations # This keeps UTC-6 all year round diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 6ce95a2bc17..8e934925f46 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -15,11 +15,9 @@ from gcal_sync.auth import API_BASE_URL import pytest from homeassistant.components.google.const import CONF_CALENDAR_ACCESS, DOMAIN -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.helpers.template import DATE_STR_FORMAT import homeassistant.util.dt as dt_util @@ -76,7 +74,7 @@ def upcoming_event_url(entity: str = TEST_ENTITY) -> str: class Client: """Test client with helper methods for calendar websocket.""" - def __init__(self, client) -> None: + def __init__(self, client): """Initialize Client.""" self.client = client self.id = 0 @@ -387,9 +385,6 @@ async def test_update_error( with patch("homeassistant.util.utcnow", return_value=now): async_fire_time_changed(hass, now) await hass.async_block_till_done() - # Ensure coordinator update completes - await hass.async_block_till_done() - await hass.async_block_till_done() # Entity is marked uanvailable due to API failure state = hass.states.get(TEST_ENTITY) @@ -419,9 +414,6 @@ async def test_update_error( with patch("homeassistant.util.utcnow", return_value=now): async_fire_time_changed(hass, now) await hass.async_block_till_done() - # Ensure coordinator update completes - await hass.async_block_till_done() - await hass.async_block_till_done() # State updated with new API response state = hass.states.get(TEST_ENTITY) @@ -572,62 +564,6 @@ async def test_opaque_event( assert state.state == (STATE_ON if expect_visible_event else STATE_OFF) -async def test_declined_event( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_calendars_yaml, - mock_events_list_items, - component_setup, -) -> None: - """Test querying the API and fetching events from the server.""" - event = { - **TEST_EVENT, - **upcoming(), - "attendees": [ - { - "self": "True", - "responseStatus": "declined", - } - ], - } - mock_events_list_items([event]) - assert await component_setup() - - client = await hass_client() - response = await client.get(upcoming_event_url(TEST_YAML_ENTITY)) - assert response.status == HTTPStatus.OK - events = await response.json() - assert len(events) == 0 - - -async def test_attending_event( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_calendars_yaml, - mock_events_list_items, - component_setup, -) -> None: - """Test querying the API and fetching events from the server.""" - event = { - **TEST_EVENT, - **upcoming(), - "attendees": [ - { - "self": "True", - "responseStatus": "accepted", - } - ], - } - mock_events_list_items([event]) - assert await component_setup() - - client = await hass_client() - response = await client.get(upcoming_event_url(TEST_YAML_ENTITY)) - assert response.status == HTTPStatus.OK - events = await response.json() - assert len(events) == 1 - - @pytest.mark.parametrize("mock_test_setup", [None]) async def test_scan_calendar_error( hass: HomeAssistant, @@ -670,9 +606,6 @@ async def test_future_event_update_behavior( freezer.move_to(now) async_fire_time_changed(hass, now) await hass.async_block_till_done() - # Ensure coordinator update completes - await hass.async_block_till_done() - await hass.async_block_till_done() # Event has started state = hass.states.get(TEST_ENTITY) @@ -710,9 +643,6 @@ async def test_future_event_offset_update_behavior( freezer.move_to(now) async_fire_time_changed(hass, now) await hass.async_block_till_done() - # Ensure coordinator update completes - await hass.async_block_till_done() - await hass.async_block_till_done() # Event has not started, but the offset was reached state = hass.states.get(TEST_ENTITY) @@ -897,7 +827,7 @@ async def test_websocket_create( hass: HomeAssistant, component_setup: ComponentSetup, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[..., None], + mock_insert_event: Callable[[str, dict[str, Any]], None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, @@ -939,7 +869,7 @@ async def test_websocket_create_all_day( hass: HomeAssistant, component_setup: ComponentSetup, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[..., None], + mock_insert_event: Callable[[str, dict[str, Any]], None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, @@ -1136,7 +1066,7 @@ async def test_readonly_websocket_create( hass: HomeAssistant, component_setup: ComponentSetup, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[..., None], + mock_insert_event: Callable[[str, dict[str, Any]], None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, @@ -1187,7 +1117,7 @@ async def test_readonly_search_calendar( hass: HomeAssistant, component_setup: ComponentSetup, mock_calendars_yaml, - mock_insert_event: Callable[..., None], + mock_insert_event: Callable[[str, dict[str, Any]], None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, @@ -1417,90 +1347,3 @@ async def test_invalid_rrule_fix( assert event["uid"] == "cydrevtfuybguinhomj@google.com" assert event["recurrence_id"] == "_c8rinwq863h45qnucyoi43ny8_20230915" assert event["rrule"] is None - - -@pytest.mark.parametrize( - ("event_type", "expected_event_message"), - [ - ("default", "Test All Day Event"), - ("workingLocation", None), - ], -) -async def test_working_location_ignored( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_events_list_items: Callable[[list[dict[str, Any]]], None], - component_setup: ComponentSetup, - event_type: str, - expected_event_message: str | None, -) -> None: - """Test working location events are skipped.""" - event = { - **TEST_EVENT, - **upcoming(), - "eventType": event_type, - } - mock_events_list_items([event]) - assert await component_setup() - - state = hass.states.get(TEST_ENTITY) - assert state - assert state.name == TEST_ENTITY_NAME - assert state.attributes.get("message") == expected_event_message - - -@pytest.mark.parametrize("calendar_is_primary", [True]) -async def test_working_location_entity( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - entity_registry: er.EntityRegistry, - mock_events_list_items: Callable[[list[dict[str, Any]]], None], - component_setup: ComponentSetup, -) -> None: - """Test that working location events are registered under a disabled by default entity.""" - event = { - **TEST_EVENT, - **upcoming(), - "eventType": "workingLocation", - } - mock_events_list_items([event]) - assert await component_setup() - - entity_entry = entity_registry.async_get("calendar.working_location") - assert entity_entry - assert entity_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - - entity_registry.async_update_entity( - entity_id="calendar.working_location", disabled_by=None - ) - async_fire_time_changed( - hass, - dt_util.utcnow() + datetime.timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done() - - state = hass.states.get("calendar.working_location") - assert state - assert state.name == "Working location" - assert state.attributes.get("message") == "Test All Day Event" - - -@pytest.mark.parametrize("calendar_is_primary", [False]) -async def test_no_working_location_entity( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - entity_registry: er.EntityRegistry, - mock_events_list_items: Callable[[list[dict[str, Any]]], None], - component_setup: ComponentSetup, -) -> None: - """Test that working location events are not registered for a secondary calendar.""" - event = { - **TEST_EVENT, - **upcoming(), - "eventType": "workingLocation", - } - mock_events_list_items([event]) - assert await component_setup() - - entity_entry = entity_registry.async_get("calendar.working_location") - assert not entity_entry diff --git a/tests/components/google/test_config_flow.py b/tests/components/google/test_config_flow.py index de882a6f791..12281f6d348 100644 --- a/tests/components/google/test_config_flow.py +++ b/tests/components/google/test_config_flow.py @@ -26,11 +26,9 @@ from homeassistant.components.application_credentials import ( async_import_client_credential, ) from homeassistant.components.google.const import ( - CONF_CALENDAR_ACCESS, CONF_CREDENTIAL_TYPE, DOMAIN, CredentialType, - FeatureAccess, ) from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -118,7 +116,7 @@ async def primary_calendar_status() -> HTTPStatus | None: @pytest.fixture(autouse=True) async def primary_calendar( - mock_calendar_get: Callable[..., None], + mock_calendar_get: Callable[[...], None], primary_calendar_error: ClientError | None, primary_calendar_status: HTTPStatus | None, primary_calendar_email: str, @@ -132,7 +130,7 @@ async def primary_calendar( ) -async def fire_alarm(hass: HomeAssistant, point_in_time: datetime.datetime) -> None: +async def fire_alarm(hass, point_in_time): """Fire an alarm and wait for callbacks to run.""" with freeze_time(point_in_time): async_fire_time_changed(hass, point_in_time) @@ -476,27 +474,10 @@ async def test_wrong_configuration( assert result.get("reason") == "oauth_error" -@pytest.mark.parametrize( - ("options"), - [ - ({}), - ( - { - CONF_CALENDAR_ACCESS: FeatureAccess.read_write.name, - } - ), - ( - { - CONF_CALENDAR_ACCESS: FeatureAccess.read_only.name, - } - ), - ], -) async def test_reauth_flow( hass: HomeAssistant, mock_code_flow: Mock, mock_exchange: Mock, - options: dict[str, Any] | None, ) -> None: """Test reauth of an existing config entry.""" config_entry = MockConfigEntry( @@ -505,7 +486,6 @@ async def test_reauth_flow( "auth_implementation": DOMAIN, "token": {"access_token": "OLD_ACCESS_TOKEN"}, }, - options=options, ) config_entry.add_to_hass(hass) await async_import_client_credential( @@ -517,7 +497,14 @@ async def test_reauth_flow( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -560,8 +547,6 @@ async def test_reauth_flow( }, "credential_type": "device_auth", } - # Options are preserved during reauth - assert entries[0].options == options assert len(mock_setup.mock_calls) == 1 @@ -776,7 +761,14 @@ async def test_web_reauth_flow( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/google/test_diagnostics.py b/tests/components/google/test_diagnostics.py index 78eb6d7ceea..5d6259309b8 100644 --- a/tests/components/google/test_diagnostics.py +++ b/tests/components/google/test_diagnostics.py @@ -11,6 +11,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.auth.models import Credentials from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from .conftest import TEST_EVENT, ApiResult, ComponentSetup @@ -54,6 +55,12 @@ def _get_test_client_generator( return auth_client +@pytest.fixture(autouse=True) +async def setup_diag(hass): + """Set up diagnostics platform.""" + assert await async_setup_component(hass, "diagnostics", {}) + + @freeze_time("2023-03-13 12:05:00-07:00") @pytest.mark.usefixtures("socket_enabled") async def test_diagnostics( diff --git a/tests/components/google/test_init.py b/tests/components/google/test_init.py index 536a1440958..de5e2ea9145 100644 --- a/tests/components/google/test_init.py +++ b/tests/components/google/test_init.py @@ -82,7 +82,7 @@ def assert_state(actual: State | None, expected: State | None) -> None: def add_event_call_service( hass: HomeAssistant, request: pytest.FixtureRequest, -) -> Callable[[dict[str, Any]], Awaitable[None]]: +) -> Callable[dict[str, Any], Awaitable[None]]: """Fixture for calling the add or create event service.""" (domain, service_call, data, target) = request.param @@ -248,23 +248,35 @@ async def test_init_calendar( async def test_multiple_config_entries( hass: HomeAssistant, component_setup: ComponentSetup, - config_entry: MockConfigEntry, mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, + config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, ) -> None: """Test finding a calendar from the API.""" - mock_calendars_list({"items": [test_api_calendar]}) - mock_events_list({}) - assert await component_setup() - state = hass.states.get(TEST_API_ENTITY) + config_entry1 = MockConfigEntry( + domain=DOMAIN, data=config_entry.data, unique_id=EMAIL_ADDRESS + ) + calendar1 = { + **test_api_calendar, + "id": "calendar-id1", + "summary": "Example Calendar 1", + } + + mock_calendars_list({"items": [calendar1]}) + mock_events_list({}, calendar_id="calendar-id1") + config_entry1.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry1.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("calendar.example_calendar_1") assert state assert state.state == STATE_OFF - assert state.attributes.get(ATTR_FRIENDLY_NAME) == TEST_API_ENTITY_NAME + assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Example calendar 1" config_entry2 = MockConfigEntry( domain=DOMAIN, data=config_entry.data, unique_id="other-address@example.com" @@ -410,7 +422,7 @@ async def test_add_event_invalid_params( mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, - add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], + add_event_call_service: Callable[dict[str, Any], Awaitable[None]], date_fields: dict[str, Any], expected_error: type[Exception], error_match: str | None, @@ -445,14 +457,14 @@ async def test_add_event_date_in_x( hass: HomeAssistant, component_setup: ComponentSetup, mock_calendars_list: ApiResult, - mock_insert_event: Callable[..., None], + mock_insert_event: Callable[[..., dict[str, Any]], None], test_api_calendar: dict[str, Any], mock_events_list: ApiResult, date_fields: dict[str, Any], start_timedelta: datetime.timedelta, end_timedelta: datetime.timedelta, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], + add_event_call_service: Callable[dict[str, Any], Awaitable[None]], ) -> None: """Test service call that adds an event with various time ranges.""" @@ -484,10 +496,10 @@ async def test_add_event_date( component_setup: ComponentSetup, mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[..., None], + mock_insert_event: Callable[[str, dict[str, Any]], None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], + add_event_call_service: Callable[dict[str, Any], Awaitable[None]], ) -> None: """Test service call that sets a date range.""" @@ -523,11 +535,11 @@ async def test_add_event_date_time( hass: HomeAssistant, component_setup: ComponentSetup, mock_calendars_list: ApiResult, - mock_insert_event: Callable[..., None], + mock_insert_event: Callable[[str, dict[str, Any]], None], test_api_calendar: dict[str, Any], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], + add_event_call_service: Callable[dict[str, Any], Awaitable[None]], ) -> None: """Test service call that adds an event with a date time range.""" @@ -587,7 +599,7 @@ async def test_unsupported_create_event( mock_calendars_yaml: Mock, component_setup: ComponentSetup, mock_calendars_list: ApiResult, - mock_insert_event: Callable[..., None], + mock_insert_event: Callable[[str, dict[str, Any]], None], test_api_calendar: dict[str, Any], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, @@ -624,8 +636,8 @@ async def test_add_event_failure( mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, - mock_insert_event: Callable[..., None], - add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], + mock_insert_event: Callable[[..., dict[str, Any]], None], + add_event_call_service: Callable[dict[str, Any], Awaitable[None]], ) -> None: """Test service calls with incorrect fields.""" @@ -649,10 +661,10 @@ async def test_add_event_location( component_setup: ComponentSetup, mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[..., None], + mock_insert_event: Callable[[str, dict[str, Any]], None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], + add_event_call_service: Callable[dict[str, Any], Awaitable[None]], ) -> None: """Test service call that sets a location field.""" @@ -867,7 +879,7 @@ async def test_assign_unique_id( mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, - mock_calendar_get: Callable[..., None], + mock_calendar_get: Callable[[...], None], config_entry: MockConfigEntry, ) -> None: """Test an existing config is updated to have unique id if it does not exist.""" @@ -906,7 +918,7 @@ async def test_assign_unique_id_failure( test_api_calendar: dict[str, Any], config_entry: MockConfigEntry, mock_events_list: ApiResult, - mock_calendar_get: Callable[..., None], + mock_calendar_get: Callable[[...], None], request_status: http.HTTPStatus, config_entry_status: ConfigEntryState, ) -> None: diff --git a/tests/components/google_assistant/snapshots/test_diagnostics.ambr b/tests/components/google_assistant/snapshots/test_diagnostics.ambr index edbbdb1ba28..9a4ad8b3da3 100644 --- a/tests/components/google_assistant/snapshots/test_diagnostics.ambr +++ b/tests/components/google_assistant/snapshots/test_diagnostics.ambr @@ -6,8 +6,6 @@ 'project_id': '1234', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'google_assistant', 'minor_version': 1, 'options': dict({ diff --git a/tests/components/google_assistant/test_diagnostics.py b/tests/components/google_assistant/test_diagnostics.py index 1d68079563c..26d91ce7920 100644 --- a/tests/components/google_assistant/test_diagnostics.py +++ b/tests/components/google_assistant/test_diagnostics.py @@ -50,4 +50,4 @@ async def test_diagnostics( config_entry = hass.config_entries.async_entries("google_assistant")[0] assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry - ) == snapshot(exclude=props("entry_id", "created_at", "modified_at")) + ) == snapshot(exclude=props("entry_id")) diff --git a/tests/components/google_assistant/test_helpers.py b/tests/components/google_assistant/test_helpers.py index 0e6876cc901..492f1be1829 100644 --- a/tests/components/google_assistant/test_helpers.py +++ b/tests/components/google_assistant/test_helpers.py @@ -14,9 +14,9 @@ from homeassistant.components.google_assistant.const import ( SOURCE_LOCAL, STORE_GOOGLE_LOCAL_WEBHOOK_ID, ) -from homeassistant.components.matter import MatterDeviceInfo +from homeassistant.components.matter.models import MatterDeviceInfo +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, State -from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util diff --git a/tests/components/google_assistant/test_http.py b/tests/components/google_assistant/test_http.py index 273aac1559e..b041f69828f 100644 --- a/tests/components/google_assistant/test_http.py +++ b/tests/components/google_assistant/test_http.py @@ -4,7 +4,6 @@ from datetime import UTC, datetime, timedelta from http import HTTPStatus import json import os -from pathlib import Path from typing import Any from unittest.mock import ANY, patch from uuid import uuid4 @@ -656,7 +655,9 @@ async def test_async_get_users( ) path = hass.config.config_dir / ".storage" / GoogleConfigStore._STORAGE_KEY os.makedirs(os.path.dirname(path), exist_ok=True) - await hass.async_add_executor_job(Path(path).write_text, store_data) + with open(path, "w", encoding="utf8") as f: + f.write(store_data) + assert await async_get_users(hass) == expected_users await hass.async_stop() diff --git a/tests/components/google_assistant/test_smart_home.py b/tests/components/google_assistant/test_smart_home.py index f1b7108c348..2eeb3d16b81 100644 --- a/tests/components/google_assistant/test_smart_home.py +++ b/tests/components/google_assistant/test_smart_home.py @@ -9,20 +9,10 @@ from pytest_unordered import unordered from homeassistant.components.camera import CameraEntityFeature from homeassistant.components.climate import ATTR_MAX_TEMP, ATTR_MIN_TEMP, HVACMode - -# pylint: disable-next=hass-component-root-import from homeassistant.components.demo.binary_sensor import DemoBinarySensor - -# pylint: disable-next=hass-component-root-import from homeassistant.components.demo.cover import DemoCover - -# pylint: disable-next=hass-component-root-import from homeassistant.components.demo.light import LIGHT_EFFECT_LIST, DemoLight - -# pylint: disable-next=hass-component-root-import from homeassistant.components.demo.media_player import AbstractDemoPlayer - -# pylint: disable-next=hass-component-root-import from homeassistant.components.demo.switch import DemoSwitch from homeassistant.components.google_assistant import ( EVENT_COMMAND_RECEIVED, @@ -32,15 +22,14 @@ from homeassistant.components.google_assistant import ( smart_home as sh, trait, ) +from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, - EVENT_CALL_SERVICE, Platform, UnitOfTemperature, __version__, ) -from homeassistant.core import HomeAssistant, State -from homeassistant.core_config import async_process_ha_core_config +from homeassistant.core import EVENT_CALL_SERVICE, HomeAssistant, State from homeassistant.helpers import ( area_registry as ar, device_registry as dr, @@ -209,7 +198,7 @@ async def test_sync_message(hass: HomeAssistant, registries) -> None: }, "traits": [ trait.TRAIT_BRIGHTNESS, - trait.TRAIT_ON_OFF, + trait.TRAIT_ONOFF, trait.TRAIT_COLOR_SETTING, trait.TRAIT_MODES, ], @@ -329,7 +318,7 @@ async def test_sync_in_area(area_on_device, hass: HomeAssistant, registries) -> "name": {"name": "Demo Light"}, "traits": [ trait.TRAIT_BRIGHTNESS, - trait.TRAIT_ON_OFF, + trait.TRAIT_ONOFF, trait.TRAIT_COLOR_SETTING, trait.TRAIT_MODES, ], @@ -926,7 +915,7 @@ async def test_unavailable_state_does_sync(hass: HomeAssistant) -> None: "name": {"name": "Demo Light"}, "traits": [ trait.TRAIT_BRIGHTNESS, - trait.TRAIT_ON_OFF, + trait.TRAIT_ONOFF, trait.TRAIT_COLOR_SETTING, trait.TRAIT_MODES, ], diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index 1e42edf8e7b..63a34c01dac 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -33,10 +33,7 @@ from homeassistant.components import ( valve, water_heater, ) -from homeassistant.components.alarm_control_panel import ( - AlarmControlPanelEntityFeature, - AlarmControlPanelState, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature from homeassistant.components.camera import CameraEntityFeature from homeassistant.components.climate import ClimateEntityFeature from homeassistant.components.cover import CoverEntityFeature @@ -54,6 +51,7 @@ from homeassistant.components.media_player import ( from homeassistant.components.vacuum import VacuumEntityFeature from homeassistant.components.valve import ValveEntityFeature from homeassistant.components.water_heater import WaterHeaterEntityFeature +from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_BATTERY_LEVEL, @@ -62,9 +60,11 @@ from homeassistant.const import ( ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, - EVENT_CALL_SERVICE, SERVICE_TURN_OFF, SERVICE_TURN_ON, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_DISARMED, + STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, @@ -75,8 +75,12 @@ from homeassistant.const import ( STATE_UNKNOWN, UnitOfTemperature, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State -from homeassistant.core_config import async_process_ha_core_config +from homeassistant.core import ( + DOMAIN as HA_DOMAIN, + EVENT_CALL_SERVICE, + HomeAssistant, + State, +) from homeassistant.util import color, dt as dt_util from homeassistant.util.unit_conversion import TemperatureConverter @@ -186,13 +190,13 @@ async def test_onoff_group(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} - on_calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) + on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} - off_calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) + off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} @@ -215,12 +219,12 @@ async def test_onoff_input_boolean(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} @@ -282,12 +286,12 @@ async def test_onoff_switch(hass: HomeAssistant) -> None: assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} @@ -307,12 +311,12 @@ async def test_onoff_fan(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} @@ -333,12 +337,12 @@ async def test_onoff_light(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} @@ -359,13 +363,13 @@ async def test_onoff_media_player(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @@ -386,13 +390,13 @@ async def test_onoff_humidifier(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} @@ -415,13 +419,13 @@ async def test_onoff_water_heater(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, water_heater.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "water_heater.bla"} off_calls = async_mock_service(hass, water_heater.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "water_heater.bla"} @@ -562,22 +566,22 @@ async def test_startstop_vacuum(hass: HomeAssistant) -> None: assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) - await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) - await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) - await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"pause": True}, {}) + await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) - await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"pause": False}, {}) + await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} @@ -612,10 +616,10 @@ async def test_startstop_vacuum(hass: HomeAssistant) -> None: ), ( valve.DOMAIN, - valve.ValveState.OPEN, - valve.ValveState.CLOSED, - valve.ValveState.OPENING, - valve.ValveState.CLOSING, + valve.STATE_OPEN, + valve.STATE_CLOSED, + valve.STATE_OPENING, + valve.STATE_CLOSING, ValveEntityFeature.STOP | ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE, @@ -665,7 +669,7 @@ async def test_startstop_cover_valve( open_calls = async_mock_service(hass, domain, service_open) close_calls = async_mock_service(hass, domain, service_close) toggle_calls = async_mock_service(hass, domain, service_toggle) - await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -681,18 +685,18 @@ async def test_startstop_cover_valve( with pytest.raises( SmartHomeError, match=f"{domain.capitalize()} is already stopped" ): - await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) # Start triggers toggle open state.state = state_closed - await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(open_calls) == 0 assert len(close_calls) == 0 assert len(toggle_calls) == 1 assert toggle_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} # Second start triggers toggle close state.state = state_open - await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(open_calls) == 0 assert len(close_calls) == 0 assert len(toggle_calls) == 2 @@ -703,7 +707,7 @@ async def test_startstop_cover_valve( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): - await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) @pytest.mark.parametrize( @@ -736,10 +740,10 @@ async def test_startstop_cover_valve( ), ( valve.DOMAIN, - valve.ValveState.OPEN, - valve.ValveState.CLOSED, - valve.ValveState.OPENING, - valve.ValveState.CLOSING, + valve.STATE_OPEN, + valve.STATE_CLOSED, + valve.STATE_OPENING, + valve.STATE_CLOSING, ValveEntityFeature.STOP | ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE, @@ -779,13 +783,13 @@ async def test_startstop_cover_valve_assumed( stop_calls = async_mock_service(hass, domain, service_stop) toggle_calls = async_mock_service(hass, domain, service_toggle) - await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert len(toggle_calls) == 0 assert stop_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} stop_calls.clear() - await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(stop_calls) == 0 assert len(toggle_calls) == 1 assert toggle_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -984,13 +988,13 @@ async def test_light_modes(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, @@ -1422,7 +1426,7 @@ async def test_temperature_control(hass: HomeAssistant) -> None: "temperatureAmbientCelsius": 18, } with pytest.raises(helpers.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) + await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED @@ -1602,18 +1606,18 @@ async def test_lock_unlock_lock(hass: HomeAssistant) -> None: assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, LockEntityFeature.OPEN, None) trt = trait.LockUnlockTrait( - hass, State("lock.front_door", lock.LockState.LOCKED), PIN_CONFIG + hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} - assert trt.can_execute(trait.COMMAND_LOCK_UNLOCK, {"lock": True}) + assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) - await trt.execute(trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": True}, {}) + await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} @@ -1628,7 +1632,7 @@ async def test_lock_unlock_unlocking(hass: HomeAssistant) -> None: assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, LockEntityFeature.OPEN, None) trt = trait.LockUnlockTrait( - hass, State("lock.front_door", lock.LockState.UNLOCKING), PIN_CONFIG + hass, State("lock.front_door", lock.STATE_UNLOCKING), PIN_CONFIG ) assert trt.sync_attributes() == {} @@ -1645,18 +1649,18 @@ async def test_lock_unlock_lock_jammed(hass: HomeAssistant) -> None: assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, LockEntityFeature.OPEN, None) trt = trait.LockUnlockTrait( - hass, State("lock.front_door", lock.LockState.JAMMED), PIN_CONFIG + hass, State("lock.front_door", lock.STATE_JAMMED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isJammed": True} - assert trt.can_execute(trait.COMMAND_LOCK_UNLOCK, {"lock": True}) + assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) - await trt.execute(trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": True}, {}) + await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} @@ -1670,20 +1674,20 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: ) trt = trait.LockUnlockTrait( - hass, State("lock.front_door", lock.LockState.LOCKED), PIN_CONFIG + hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} - assert trt.can_execute(trait.COMMAND_LOCK_UNLOCK, {"lock": False}) + assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: - await trt.execute(trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": False}, {}) + await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED @@ -1691,14 +1695,14 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} + trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( - trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} + trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 @@ -1706,11 +1710,11 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: # Test without pin trt = trait.LockUnlockTrait( - hass, State("lock.front_door", lock.LockState.LOCKED), BASIC_CONFIG + hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: - await trt.execute(trait.COMMAND_LOCK_UNLOCK, BASIC_DATA, {"lock": False}, {}) + await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP @@ -1720,7 +1724,7 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: "should_2fa", return_value=False, ): - await trt.execute(trait.COMMAND_LOCK_UNLOCK, BASIC_DATA, {"lock": False}, {}) + await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 @@ -1734,7 +1738,7 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.ARMED_AWAY, + STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_HOME @@ -1765,12 +1769,11 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: assert trt.query_attributes() == { "isArmed": True, - "currentArmLevel": AlarmControlPanelState.ARMED_AWAY, + "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( - trait.COMMAND_ARM_DISARM, - {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, + trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( @@ -1783,16 +1786,16 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.DISARMED, + STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARM_DISARM, + trait.COMMAND_ARMDISARM, BASIC_DATA, - {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, + {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 @@ -1802,7 +1805,7 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.DISARMED, + STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, @@ -1810,9 +1813,9 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_ARM_DISARM, + trait.COMMAND_ARMDISARM, PIN_DATA, - {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, + {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 @@ -1822,9 +1825,9 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_ARM_DISARM, + trait.COMMAND_ARMDISARM, PIN_DATA, - {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, + {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 @@ -1833,9 +1836,9 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: # correct pin await trt.execute( - trait.COMMAND_ARM_DISARM, + trait.COMMAND_ARMDISARM, PIN_DATA, - {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, + {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) @@ -1846,16 +1849,16 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.ARMED_AWAY, + STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARM_DISARM, + trait.COMMAND_ARMDISARM, PIN_DATA, - {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, + {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 @@ -1866,22 +1869,22 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.DISARMED, + STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( - trait.COMMAND_ARM_DISARM, + trait.COMMAND_ARMDISARM, PIN_DATA, - {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, + {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARM_DISARM, + trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, @@ -1898,7 +1901,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.DISARMED, + STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.TRIGGER @@ -1943,7 +1946,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: "isArmed": False, } - assert trt.can_execute(trait.COMMAND_ARM_DISARM, {"arm": False}) + assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM @@ -1954,13 +1957,13 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.ARMED_AWAY, + STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ARM_DISARM, BASIC_DATA, {"arm": False}, {}) + await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP @@ -1969,7 +1972,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.ARMED_AWAY, + STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, @@ -1977,7 +1980,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # No challenge data with pytest.raises(error.ChallengeNeeded) as err: - await trt.execute(trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {}) + await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED @@ -1985,7 +1988,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {"pin": 9999} + trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED @@ -1993,7 +1996,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # correct pin await trt.execute( - trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} + trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 @@ -2003,13 +2006,13 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.DISARMED, + STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {}) + await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED @@ -2017,7 +2020,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.ARMED_AWAY, + STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, @@ -2026,7 +2029,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": True, "cancel": True}, {} + trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED @@ -2037,13 +2040,13 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - AlarmControlPanelState.PENDING, + STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( - trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": True, "cancel": True}, {} + trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 @@ -2079,12 +2082,10 @@ async def test_fan_speed(hass: HomeAssistant) -> None: "currentFanSpeedSetting": ANY, } - assert trt.can_execute(trait.COMMAND_SET_FAN_SPEED, params={"fanSpeedPercent": 10}) + assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) - await trt.execute( - trait.COMMAND_SET_FAN_SPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {} - ) + await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} @@ -2219,10 +2220,10 @@ async def test_fan_speed_ordered( "currentFanSpeedSetting": speed, } - assert trt.can_execute(trait.COMMAND_SET_FAN_SPEED, params={"fanSpeed": speed}) + assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": speed}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) - await trt.execute(trait.COMMAND_SET_FAN_SPEED, BASIC_DATA, {"fanSpeed": speed}, {}) + await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": speed}, {}) assert len(calls) == 1 assert calls[0].data == { @@ -2331,12 +2332,10 @@ async def test_climate_fan_speed(hass: HomeAssistant) -> None: "currentFanSpeedSetting": "low", } - assert trt.can_execute(trait.COMMAND_SET_FAN_SPEED, params={"fanSpeed": "medium"}) + assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) - await trt.execute( - trait.COMMAND_SET_FAN_SPEED, BASIC_DATA, {"fanSpeed": "medium"}, {} - ) + await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { @@ -2392,7 +2391,7 @@ async def test_inputselector(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_SET_INPUT, + trait.COMMAND_INPUT, params={"newInput": "media"}, ) @@ -2400,7 +2399,7 @@ async def test_inputselector(hass: HomeAssistant) -> None: hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( - trait.COMMAND_SET_INPUT, + trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, @@ -2568,7 +2567,7 @@ async def test_modes_input_select(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) @@ -2576,7 +2575,7 @@ async def test_modes_input_select(hass: HomeAssistant) -> None: hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, @@ -2644,13 +2643,13 @@ async def test_modes_select(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service(hass, select.DOMAIN, select.SERVICE_SELECT_OPTION) await trt.execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, @@ -2721,12 +2720,12 @@ async def test_modes_humidifier(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_SET_MODES, params={"updateModeSettings": {"mode": "away"}} + trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, @@ -2797,15 +2796,14 @@ async def test_modes_water_heater(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_SET_MODES, - params={"updateModeSettings": {"operation mode": "gas"}}, + trait.COMMAND_MODES, params={"updateModeSettings": {"operation mode": "gas"}} ) calls = async_mock_service( hass, water_heater.DOMAIN, water_heater.SERVICE_SET_OPERATION_MODE ) await trt.execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"operation mode": "gas"}}, {}, @@ -2874,7 +2872,7 @@ async def test_sound_modes(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) @@ -2882,7 +2880,7 @@ async def test_sound_modes(hass: HomeAssistant) -> None: hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, @@ -2947,13 +2945,13 @@ async def test_preset_modes(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, params={"updateModeSettings": {"preset mode": "auto"}}, ) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PRESET_MODE) await trt.execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"preset mode": "auto"}}, {}, @@ -2981,7 +2979,7 @@ async def test_traits_unknown_domains( assert trt.supported("not_supported_domain", False, None, None) is False await trt.execute( - trait.COMMAND_SET_MODES, + trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {}}, {}, @@ -3055,9 +3053,9 @@ async def test_openclose_cover_valve( calls_open = async_mock_service(hass, domain, open_service) calls_close = async_mock_service(hass, domain, close_service) - await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 50}, {}) + await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( - trait.COMMAND_OPEN_CLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} + trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == { @@ -3072,9 +3070,9 @@ async def test_openclose_cover_valve( assert len(calls_close) == 0 - await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 0}, {}) + await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) await trt.execute( - trait.COMMAND_OPEN_CLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 0}, {} + trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 0}, {} ) assert len(calls_set) == 1 assert len(calls_close) == 1 @@ -3129,7 +3127,7 @@ async def test_openclose_cover_valve_unknown_state( trt.query_attributes() calls = async_mock_service(hass, domain, open_service) - await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 100}, {}) + await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -3150,7 +3148,7 @@ async def test_openclose_cover_valve_unknown_state( valve.DOMAIN, valve.SERVICE_SET_VALVE_POSITION, ValveEntityFeature.SET_POSITION, - valve.ValveState.OPEN, + valve.STATE_OPEN, ), ], ) @@ -3183,7 +3181,7 @@ async def test_openclose_cover_valve_assumed_state( assert trt.query_attributes() == {} calls = async_mock_service(hass, domain, set_position_service) - await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 40}, {}) + await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla", cover.ATTR_POSITION: 40} @@ -3197,7 +3195,7 @@ async def test_openclose_cover_valve_assumed_state( ), ( valve.DOMAIN, - valve.ValveState.OPEN, + valve.STATE_OPEN, ), ], ) @@ -3248,8 +3246,8 @@ async def test_openclose_cover_valve_query_only( ), ( valve.DOMAIN, - valve.ValveState.OPEN, - valve.ValveState.CLOSED, + valve.STATE_OPEN, + valve.STATE_CLOSED, ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE, valve.SERVICE_OPEN_VALVE, valve.SERVICE_CLOSE_VALVE, @@ -3297,12 +3295,12 @@ async def test_openclose_cover_valve_no_position( assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, domain, close_service) - await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 0}, {}) + await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} calls = async_mock_service(hass, domain, open_service) - await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 100}, {}) + await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -3310,14 +3308,14 @@ async def test_openclose_cover_valve_no_position( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( - trait.COMMAND_OPEN_CLOSE_RELATIVE, + trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): - await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 50}, {}) + await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( @@ -3360,7 +3358,7 @@ async def test_openclose_cover_secure(hass: HomeAssistant, device_class) -> None # No challenge data with pytest.raises(error.ChallengeNeeded) as err: - await trt.execute(trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 50}, {}) + await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED @@ -3368,20 +3366,20 @@ async def test_openclose_cover_secure(hass: HomeAssistant, device_class) -> None # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} + trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( - trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} + trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close - await trt.execute(trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 0}, {}) + await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @@ -3705,7 +3703,7 @@ async def test_humidity_setting_sensor_data( assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) + await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED @@ -4069,90 +4067,3 @@ async def test_sensorstate( ) is False ) - - -@pytest.mark.parametrize( - ("state", "identifier"), - [ - (STATE_ON, 0), - (STATE_OFF, 1), - (STATE_UNKNOWN, 2), - ], -) -@pytest.mark.parametrize( - ("device_class", "name", "states"), - [ - ( - binary_sensor.BinarySensorDeviceClass.CO, - "CarbonMonoxideLevel", - ["carbon monoxide detected", "no carbon monoxide detected", "unknown"], - ), - ( - binary_sensor.BinarySensorDeviceClass.SMOKE, - "SmokeLevel", - ["smoke detected", "no smoke detected", "unknown"], - ), - ( - binary_sensor.BinarySensorDeviceClass.MOISTURE, - "WaterLeak", - ["leak", "no leak", "unknown"], - ), - ], -) -async def test_binary_sensorstate( - hass: HomeAssistant, - state: str, - identifier: int, - device_class: binary_sensor.BinarySensorDeviceClass, - name: str, - states: list[str], -) -> None: - """Test SensorState trait support for binary sensor domain.""" - - assert helpers.get_google_type(binary_sensor.DOMAIN, None) is not None - assert trait.SensorStateTrait.supported( - binary_sensor.DOMAIN, None, device_class, None - ) - - trt = trait.SensorStateTrait( - hass, - State( - "binary_sensor.test", - state, - { - "device_class": device_class, - }, - ), - BASIC_CONFIG, - ) - - assert trt.sync_attributes() == { - "sensorStatesSupported": [ - { - "name": name, - "descriptiveCapabilities": { - "availableStates": states, - }, - } - ] - } - assert trt.query_attributes() == { - "currentSensorStateData": [ - { - "name": name, - "currentSensorState": states[identifier], - "rawValue": None, - }, - ] - } - - assert helpers.get_google_type(binary_sensor.DOMAIN, None) is not None - assert ( - trait.SensorStateTrait.supported( - binary_sensor.DOMAIN, - None, - binary_sensor.BinarySensorDeviceClass.TAMPER, - None, - ) - is False - ) diff --git a/tests/components/google_assistant_sdk/test_config_flow.py b/tests/components/google_assistant_sdk/test_config_flow.py index b6ee701b228..d66d12509e8 100644 --- a/tests/components/google_assistant_sdk/test_config_flow.py +++ b/tests/components/google_assistant_sdk/test_config_flow.py @@ -157,10 +157,6 @@ async def test_reauth( assert config_entry.data["token"].get("refresh_token") == "mock-refresh-token" -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.google_assistant_sdk.config.abort.single_instance_allowed"], -) @pytest.mark.usefixtures("current_request_with_host") async def test_single_instance_allowed( hass: HomeAssistant, diff --git a/tests/components/google_cloud/__init__.py b/tests/components/google_cloud/__init__.py deleted file mode 100644 index 67e83b58c71..00000000000 --- a/tests/components/google_cloud/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Google Cloud integration.""" diff --git a/tests/components/google_cloud/conftest.py b/tests/components/google_cloud/conftest.py deleted file mode 100644 index 897c352b402..00000000000 --- a/tests/components/google_cloud/conftest.py +++ /dev/null @@ -1,124 +0,0 @@ -"""Tests helpers.""" - -from collections.abc import Generator -import json -from pathlib import Path -from unittest.mock import AsyncMock, MagicMock, patch - -from google.cloud.texttospeech_v1.types import cloud_tts -import pytest - -from homeassistant.components.google_cloud.const import ( - CONF_SERVICE_ACCOUNT_INFO, - DOMAIN, -) - -from tests.common import MockConfigEntry - -VALID_SERVICE_ACCOUNT_INFO = { - "type": "service_account", - "project_id": "my project id", - "private_key_id": "my private key if", - "private_key": "-----BEGIN PRIVATE KEY-----\nMIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAKYscIlwm7soDsHAz6L6YvUkCvkrX19rS6yeYOmovvhoK5WeYGWUsd8V72zmsyHB7XO94YgJVjvxfzn5K8bLePjFzwoSJjZvhBJ/ZQ05d8VmbvgyWUoPdG9oEa4fZ/lCYrXoaFdTot2xcJvrb/ZuiRl4s4eZpNeFYvVK/Am7UeFPAgMBAAECgYAUetOfzLYUudofvPCaKHu7tKZ5kQPfEa0w6BAPnBF1Mfl1JiDBRDMryFtKs6AOIAVwx00dY/Ex0BCbB3+Cr58H7t4NaPTJxCpmR09pK7o17B7xAdQv8+SynFNud9/5vQ5AEXMOLNwKiU7wpXT6Z7ZIibUBOR7ewsWgsHCDpN1iqQJBAOMODPTPSiQMwRAUHIc6GPleFSJnIz2PAoG3JOG9KFAL6RtIc19lob2ZXdbQdzKtjSkWo+O5W20WDNAl1k32h6MCQQC7W4ZCIY67mPbL6CxXfHjpSGF4Dr9VWJ7ZrKHr6XUoOIcEvsn/pHvWonjMdy93rQMSfOE8BKd/I1+GHRmNVgplAkAnSo4paxmsZVyfeKt7Jy2dMY+8tVZe17maUuQaAE7Sk00SgJYegwrbMYgQnWCTL39HBfj0dmYA2Zj8CCAuu6O7AkEAryFiYjaUAO9+4iNoL27+ZrFtypeeadyov7gKs0ZKaQpNyzW8A+Zwi7TbTeSqzic/E+z/bOa82q7p/6b7141xsQJBANCAcIwMcVb6KVCHlQbOtKspo5Eh4ZQi8bGl+IcwbQ6JSxeTx915IfAldgbuU047wOB04dYCFB2yLDiUGVXTifU=\n-----END PRIVATE KEY-----\n", - "client_email": "my client email", - "client_id": "my client id", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://oauth2.googleapis.com/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/service-account", - "universe_domain": "googleapis.com", -} - - -@pytest.fixture -def create_google_credentials_json(tmp_path: Path) -> str: - """Create googlecredentials.json.""" - file_path = tmp_path / "googlecredentials.json" - with open(file_path, "w", encoding="utf8") as f: - json.dump(VALID_SERVICE_ACCOUNT_INFO, f) - return str(file_path) - - -@pytest.fixture -def create_invalid_google_credentials_json(create_google_credentials_json: str) -> str: - """Create invalid googlecredentials.json.""" - invalid_service_account_info = VALID_SERVICE_ACCOUNT_INFO.copy() - invalid_service_account_info.pop("client_email") - with open(create_google_credentials_json, "w", encoding="utf8") as f: - json.dump(invalid_service_account_info, f) - return create_google_credentials_json - - -@pytest.fixture -def mock_process_uploaded_file( - create_google_credentials_json: str, -) -> Generator[MagicMock]: - """Mock upload certificate files.""" - ctx_mock = MagicMock() - ctx_mock.__enter__.return_value = Path(create_google_credentials_json) - with patch( - "homeassistant.components.google_cloud.config_flow.process_uploaded_file", - return_value=ctx_mock, - ) as mock_upload: - yield mock_upload - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - title="my Google Cloud title", - domain=DOMAIN, - data={CONF_SERVICE_ACCOUNT_INFO: VALID_SERVICE_ACCOUNT_INFO}, - ) - - -@pytest.fixture -def mock_api_tts() -> AsyncMock: - """Return a mocked TTS client.""" - mock_client = AsyncMock() - mock_client.list_voices.return_value = cloud_tts.ListVoicesResponse( - voices=[ - cloud_tts.Voice(language_codes=["en-US"], name="en-US-Standard-A"), - cloud_tts.Voice(language_codes=["en-US"], name="en-US-Standard-B"), - cloud_tts.Voice(language_codes=["el-GR"], name="el-GR-Standard-A"), - ] - ) - return mock_client - - -@pytest.fixture -def mock_api_tts_from_service_account_info( - mock_api_tts: AsyncMock, -) -> Generator[AsyncMock]: - """Return a mocked TTS client created with from_service_account_info.""" - with ( - patch( - "google.cloud.texttospeech.TextToSpeechAsyncClient.from_service_account_info", - return_value=mock_api_tts, - ), - ): - yield mock_api_tts - - -@pytest.fixture -def mock_api_tts_from_service_account_file( - mock_api_tts: AsyncMock, -) -> Generator[AsyncMock]: - """Return a mocked TTS client created with from_service_account_file.""" - with ( - patch( - "google.cloud.texttospeech.TextToSpeechAsyncClient.from_service_account_file", - return_value=mock_api_tts, - ), - ): - yield mock_api_tts - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.google_cloud.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry diff --git a/tests/components/google_cloud/test_config_flow.py b/tests/components/google_cloud/test_config_flow.py deleted file mode 100644 index e4b4631f223..00000000000 --- a/tests/components/google_cloud/test_config_flow.py +++ /dev/null @@ -1,185 +0,0 @@ -"""Test the Google Cloud config flow.""" - -from unittest.mock import AsyncMock, MagicMock -from uuid import uuid4 - -from homeassistant import config_entries -from homeassistant.components import tts -from homeassistant.components.google_cloud.config_flow import UPLOADED_KEY_FILE -from homeassistant.components.google_cloud.const import ( - CONF_KEY_FILE, - CONF_SERVICE_ACCOUNT_INFO, - DOMAIN, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_PLATFORM -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.setup import async_setup_component - -from .conftest import VALID_SERVICE_ACCOUNT_INFO - -from tests.common import MockConfigEntry - - -async def test_user_flow_success( - hass: HomeAssistant, - mock_process_uploaded_file: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test user flow creates entry.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert not result["errors"] - - uploaded_file = str(uuid4()) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {UPLOADED_KEY_FILE: uploaded_file}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Google Cloud" - assert result["data"] == {CONF_SERVICE_ACCOUNT_INFO: VALID_SERVICE_ACCOUNT_INFO} - mock_process_uploaded_file.assert_called_with(hass, uploaded_file) - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_user_flow_missing_file( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, -) -> None: - """Test user flow when uploaded file is missing.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {UPLOADED_KEY_FILE: str(uuid4())}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_file"} - assert len(mock_setup_entry.mock_calls) == 0 - - -async def test_user_flow_invalid_file( - hass: HomeAssistant, - create_invalid_google_credentials_json: str, - mock_process_uploaded_file: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test user flow when uploaded file is invalid.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - uploaded_file = str(uuid4()) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {UPLOADED_KEY_FILE: uploaded_file}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_file"} - mock_process_uploaded_file.assert_called_with(hass, uploaded_file) - assert len(mock_setup_entry.mock_calls) == 0 - - -async def test_import_flow( - hass: HomeAssistant, - create_google_credentials_json: str, - mock_api_tts_from_service_account_file: AsyncMock, - mock_api_tts_from_service_account_info: AsyncMock, -) -> None: - """Test the import flow.""" - assert not hass.config_entries.async_entries(DOMAIN) - assert await async_setup_component( - hass, - tts.DOMAIN, - { - tts.DOMAIN: {CONF_PLATFORM: DOMAIN} - | {CONF_KEY_FILE: create_google_credentials_json} - }, - ) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - config_entry = hass.config_entries.async_entries(DOMAIN)[0] - assert config_entry.state is config_entries.ConfigEntryState.LOADED - - -async def test_import_flow_invalid_file( - hass: HomeAssistant, - create_invalid_google_credentials_json: str, - mock_api_tts_from_service_account_file: AsyncMock, -) -> None: - """Test the import flow when the key file is invalid.""" - assert not hass.config_entries.async_entries(DOMAIN) - assert await async_setup_component( - hass, - tts.DOMAIN, - { - tts.DOMAIN: {CONF_PLATFORM: DOMAIN} - | {CONF_KEY_FILE: create_invalid_google_credentials_json} - }, - ) - await hass.async_block_till_done() - assert not hass.config_entries.async_entries(DOMAIN) - assert mock_api_tts_from_service_account_file.list_voices.call_count == 1 - - -async def test_options_flow( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_api_tts_from_service_account_info: AsyncMock, -) -> None: - """Test options flow.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - assert mock_api_tts_from_service_account_info.list_voices.call_count == 1 - - assert mock_config_entry.options == {} - - result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - data_schema = result["data_schema"].schema - assert set(data_schema) == { - "language", - "gender", - "voice", - "encoding", - "speed", - "pitch", - "gain", - "profiles", - "text_type", - "stt_model", - } - assert mock_api_tts_from_service_account_info.list_voices.call_count == 2 - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={"language": "el-GR"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert mock_config_entry.options == { - "language": "el-GR", - "gender": "NEUTRAL", - "voice": "", - "encoding": "MP3", - "speed": 1.0, - "pitch": 0.0, - "gain": 0.0, - "profiles": [], - "text_type": "text", - "stt_model": "latest_short", - } - assert mock_api_tts_from_service_account_info.list_voices.call_count == 3 diff --git a/tests/components/google_domains/__init__.py b/tests/components/google_domains/__init__.py new file mode 100644 index 00000000000..3466a3be489 --- /dev/null +++ b/tests/components/google_domains/__init__.py @@ -0,0 +1 @@ +"""Tests for the google_domains component.""" diff --git a/tests/components/google_domains/test_init.py b/tests/components/google_domains/test_init.py new file mode 100644 index 00000000000..bb27cf7b483 --- /dev/null +++ b/tests/components/google_domains/test_init.py @@ -0,0 +1,85 @@ +"""Test the Google Domains component.""" + +from datetime import timedelta + +import pytest + +from homeassistant.components import google_domains +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util.dt import utcnow + +from tests.common import async_fire_time_changed +from tests.test_util.aiohttp import AiohttpClientMocker + +DOMAIN = "test.example.com" +USERNAME = "abc123" +PASSWORD = "xyz789" + +UPDATE_URL = f"https://{USERNAME}:{PASSWORD}@domains.google.com/nic/update" + + +@pytest.fixture +def setup_google_domains( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Fixture that sets up NamecheapDNS.""" + aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="ok 0.0.0.0") + + hass.loop.run_until_complete( + async_setup_component( + hass, + google_domains.DOMAIN, + { + "google_domains": { + "domain": DOMAIN, + "username": USERNAME, + "password": PASSWORD, + } + }, + ) + ) + + +async def test_setup(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> None: + """Test setup works if update passes.""" + aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="nochg 0.0.0.0") + + result = await async_setup_component( + hass, + google_domains.DOMAIN, + { + "google_domains": { + "domain": DOMAIN, + "username": USERNAME, + "password": PASSWORD, + } + }, + ) + assert result + assert aioclient_mock.call_count == 1 + + async_fire_time_changed(hass, utcnow() + timedelta(minutes=5)) + await hass.async_block_till_done() + assert aioclient_mock.call_count == 2 + + +async def test_setup_fails_if_update_fails( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test setup fails if first update fails.""" + aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="nohost") + + result = await async_setup_component( + hass, + google_domains.DOMAIN, + { + "google_domains": { + "domain": DOMAIN, + "username": USERNAME, + "password": PASSWORD, + } + }, + ) + assert not result + assert aioclient_mock.call_count == 1 diff --git a/tests/components/google_generative_ai_conversation/conftest.py b/tests/components/google_generative_ai_conversation/conftest.py index 28c21a9b791..1761516e4f5 100644 --- a/tests/components/google_generative_ai_conversation/conftest.py +++ b/tests/components/google_generative_ai_conversation/conftest.py @@ -1,6 +1,5 @@ """Tests helpers.""" -from collections.abc import Generator from unittest.mock import patch import pytest @@ -15,14 +14,14 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_genai() -> Generator[None]: +def mock_genai(): """Mock the genai call in async_setup_entry.""" with patch("google.ai.generativelanguage_v1beta.ModelServiceAsyncClient.get_model"): yield @pytest.fixture -def mock_config_entry(hass: HomeAssistant, mock_genai: None) -> MockConfigEntry: +def mock_config_entry(hass, mock_genai): """Mock a config entry.""" entry = MockConfigEntry( domain="google_generative_ai_conversation", @@ -36,9 +35,7 @@ def mock_config_entry(hass: HomeAssistant, mock_genai: None) -> MockConfigEntry: @pytest.fixture -def mock_config_entry_with_assist( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> MockConfigEntry: +def mock_config_entry_with_assist(hass, mock_config_entry): """Mock a config entry with assist.""" hass.config_entries.async_update_entry( mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} @@ -47,9 +44,7 @@ def mock_config_entry_with_assist( @pytest.fixture -async def mock_init_component( - hass: HomeAssistant, mock_config_entry: ConfigEntry -) -> None: +async def mock_init_component(hass: HomeAssistant, mock_config_entry: ConfigEntry): """Initialize integration.""" assert await async_setup_component(hass, "google_generative_ai_conversation", {}) await hass.async_block_till_done() diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index 65238c5212a..b0a0ce967de 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -215,7 +215,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options0-0-None] +# name: test_default_prompt[config_entry_options0-None] list([ tuple( '', @@ -263,7 +263,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options0-0-conversation.google_generative_ai_conversation] +# name: test_default_prompt[config_entry_options0-conversation.google_generative_ai_conversation] list([ tuple( '', @@ -311,7 +311,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options1-1-None] +# name: test_default_prompt[config_entry_options1-None] list([ tuple( '', @@ -360,7 +360,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options1-1-conversation.google_generative_ai_conversation] +# name: test_default_prompt[config_entry_options1-conversation.google_generative_ai_conversation] list([ tuple( '', @@ -409,186 +409,3 @@ ), ]) # --- -# name: test_function_call - list([ - tuple( - '', - tuple( - ), - dict({ - 'generation_config': dict({ - 'max_output_tokens': 150, - 'temperature': 1.0, - 'top_k': 64, - 'top_p': 0.95, - }), - 'model_name': 'models/gemini-1.5-flash-latest', - 'safety_settings': dict({ - 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', - 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', - 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', - 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', - }), - 'system_instruction': ''' - Current time is 05:00:00. Today's date is 2024-05-24. - You are a voice assistant for Home Assistant. - Answer questions about the world truthfully. - Answer in plain text. Keep it simple and to the point. - Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant. - ''', - 'tools': list([ - function_declarations { - name: "test_tool" - description: "Test function" - parameters { - type_: OBJECT - properties { - key: "param3" - value { - type_: OBJECT - properties { - key: "json" - value { - type_: STRING - } - } - } - } - properties { - key: "param2" - value { - type_: NUMBER - } - } - properties { - key: "param1" - value { - type_: ARRAY - description: "Test parameters" - items { - type_: STRING - } - } - } - } - } - , - ]), - }), - ), - tuple( - '().start_chat', - tuple( - ), - dict({ - 'history': list([ - ]), - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - 'Please call the test function', - ), - dict({ - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - parts { - function_response { - name: "test_tool" - response { - fields { - key: "result" - value { - string_value: "Test response" - } - } - } - } - } - , - ), - dict({ - }), - ), - ]) -# --- -# name: test_function_call_without_parameters - list([ - tuple( - '', - tuple( - ), - dict({ - 'generation_config': dict({ - 'max_output_tokens': 150, - 'temperature': 1.0, - 'top_k': 64, - 'top_p': 0.95, - }), - 'model_name': 'models/gemini-1.5-flash-latest', - 'safety_settings': dict({ - 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', - 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', - 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', - 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', - }), - 'system_instruction': ''' - Current time is 05:00:00. Today's date is 2024-05-24. - You are a voice assistant for Home Assistant. - Answer questions about the world truthfully. - Answer in plain text. Keep it simple and to the point. - Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant. - ''', - 'tools': list([ - function_declarations { - name: "test_tool" - description: "Test function" - } - , - ]), - }), - ), - tuple( - '().start_chat', - tuple( - ), - dict({ - 'history': list([ - ]), - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - 'Please call the test function', - ), - dict({ - }), - ), - tuple( - '().start_chat().send_message_async', - tuple( - parts { - function_response { - name: "test_tool" - response { - fields { - key: "result" - value { - string_value: "Test response" - } - } - } - } - } - , - ), - dict({ - }), - ), - ]) -# --- diff --git a/tests/components/google_generative_ai_conversation/test_config_flow.py b/tests/components/google_generative_ai_conversation/test_config_flow.py index d4992c732e1..c835a4d3b13 100644 --- a/tests/components/google_generative_ai_conversation/test_config_flow.py +++ b/tests/components/google_generative_ai_conversation/test_config_flow.py @@ -154,10 +154,10 @@ async def test_form(hass: HomeAssistant) -> None: ), ], ) -@pytest.mark.usefixtures("mock_init_component") async def test_options_switching( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + mock_config_entry, + mock_init_component, mock_models, current_options, new_options, diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 4192a60513e..990058aa89d 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -1,11 +1,10 @@ """Tests for the Google Generative AI Conversation integration conversation platform.""" -from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from freezegun import freeze_time from google.ai.generativelanguage_v1beta.types.content import FunctionCall -from google.api_core.exceptions import GoogleAPIError +from google.api_core.exceptions import GoogleAPICallError import google.generativeai.types as genai_types import pytest from syrupy.assertion import SnapshotAssertion @@ -18,9 +17,8 @@ from homeassistant.components.google_generative_ai_conversation.const import ( ) from homeassistant.components.google_generative_ai_conversation.conversation import ( _escape_decode, - _format_schema, ) -from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API +from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent, llm @@ -40,23 +38,19 @@ def freeze_the_time(): "agent_id", [None, "conversation.google_generative_ai_conversation"] ) @pytest.mark.parametrize( - ("config_entry_options", "expected_features"), + "config_entry_options", [ - ({}, 0), - ( - {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, - conversation.ConversationEntityFeature.CONTROL, - ), + {}, + {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, ], ) -@pytest.mark.usefixtures("mock_init_component") async def test_default_prompt( hass: HomeAssistant, mock_config_entry: MockConfigEntry, + mock_init_component, snapshot: SnapshotAssertion, agent_id: str | None, config_entry_options: {}, - expected_features: conversation.ConversationEntityFeature, hass_ws_client: WebSocketGenerator, ) -> None: """Test that the default prompt works.""" @@ -103,18 +97,15 @@ async def test_default_prompt( assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot assert mock_get_tools.called == (CONF_LLM_HASS_API in config_entry_options) - state = hass.states.get("conversation.google_generative_ai_conversation") - assert state.attributes[ATTR_SUPPORTED_FEATURES] == expected_features - @pytest.mark.parametrize( ("model_name", "supports_system_instruction"), [("models/gemini-1.5-pro", True), ("models/gemini-1.0-pro", False)], ) -@pytest.mark.usefixtures("mock_init_component") async def test_chat_history( hass: HomeAssistant, mock_config_entry: MockConfigEntry, + mock_init_component, model_name: str, supports_system_instruction: bool, snapshot: SnapshotAssertion, @@ -176,12 +167,11 @@ async def test_chat_history( @patch( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" ) -@pytest.mark.usefixtures("mock_init_component") async def test_function_call( mock_get_tools, hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, - snapshot: SnapshotAssertion, + mock_init_component, ) -> None: """Test function calling.""" agent_id = mock_config_entry_with_assist.entry_id @@ -194,9 +184,7 @@ async def test_function_call( { vol.Optional("param1", description="Test parameters"): [ vol.All(str, vol.Lower) - ], - vol.Optional("param2"): vol.Any(float, int), - vol.Optional("param3"): dict, + ] } ) @@ -212,13 +200,11 @@ async def test_function_call( name="test_tool", args={ "param1": ["test_value", "param1\\'s value"], - "param2": 2.7, + "param2": "param2\\'s value", }, ) - def tool_call( - hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext - ) -> dict[str, Any]: + def tool_call(hass, tool_input, tool_context): mock_part.function_call = None mock_part.text = "Hi there!" return {"result": "Test response"} @@ -258,7 +244,7 @@ async def test_function_call( tool_name="test_tool", tool_args={ "param1": ["test_value", "param1's value"], - "param2": 2.7, + "param2": "param2's value", }, ), llm.LLMContext( @@ -270,7 +256,6 @@ async def test_function_call( device_id="test_device", ), ) - assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot # Test conversating tracing traces = trace.async_get_traces() @@ -280,105 +265,21 @@ async def test_function_call( assert [event["event_type"] for event in trace_events] == [ trace.ConversationTraceEventType.ASYNC_PROCESS, trace.ConversationTraceEventType.AGENT_DETAIL, - trace.ConversationTraceEventType.TOOL_CALL, + trace.ConversationTraceEventType.LLM_TOOL_CALL, ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] assert "Answer in plain text" in detail_event["data"]["prompt"] - assert [t.name for t in detail_event["data"]["tools"]] == ["test_tool"] @patch( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" ) -@pytest.mark.usefixtures("mock_init_component") -async def test_function_call_without_parameters( - mock_get_tools, - hass: HomeAssistant, - mock_config_entry_with_assist: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test function calling without parameters.""" - agent_id = mock_config_entry_with_assist.entry_id - context = Context() - - mock_tool = AsyncMock() - mock_tool.name = "test_tool" - mock_tool.description = "Test function" - mock_tool.parameters = vol.Schema({}) - - mock_get_tools.return_value = [mock_tool] - - with patch("google.generativeai.GenerativeModel") as mock_model: - mock_chat = AsyncMock() - mock_model.return_value.start_chat.return_value = mock_chat - chat_response = MagicMock() - mock_chat.send_message_async.return_value = chat_response - mock_part = MagicMock() - mock_part.function_call = FunctionCall(name="test_tool", args={}) - - def tool_call( - hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext - ) -> dict[str, Any]: - mock_part.function_call = None - mock_part.text = "Hi there!" - return {"result": "Test response"} - - mock_tool.async_call.side_effect = tool_call - chat_response.parts = [mock_part] - result = await conversation.async_converse( - hass, - "Please call the test function", - None, - context, - agent_id=agent_id, - device_id="test_device", - ) - - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - mock_tool_call = mock_chat.send_message_async.mock_calls[1][1][0] - mock_tool_call = type(mock_tool_call).to_dict(mock_tool_call) - assert mock_tool_call == { - "parts": [ - { - "function_response": { - "name": "test_tool", - "response": { - "result": "Test response", - }, - }, - }, - ], - "role": "", - } - - mock_tool.async_call.assert_awaited_once_with( - hass, - llm.ToolInput( - tool_name="test_tool", - tool_args={}, - ), - llm.LLMContext( - platform="google_generative_ai_conversation", - context=context, - user_prompt="Please call the test function", - language="en", - assistant="conversation", - device_id="test_device", - ), - ) - assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot - - -@patch( - "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" -) -@pytest.mark.usefixtures("mock_init_component") async def test_function_exception( mock_get_tools, hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, ) -> None: """Test exception in function calling.""" agent_id = mock_config_entry_with_assist.entry_id @@ -405,9 +306,7 @@ async def test_function_exception( mock_part = MagicMock() mock_part.function_call = FunctionCall(name="test_tool", args={"param1": 1}) - def tool_call( - hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext - ) -> dict[str, Any]: + def tool_call(hass, tool_input, tool_context): mock_part.function_call = None mock_part.text = "Hi there!" raise HomeAssistantError("Test tool exception") @@ -458,15 +357,14 @@ async def test_function_exception( ) -@pytest.mark.usefixtures("mock_init_component") async def test_error_handling( - hass: HomeAssistant, mock_config_entry: MockConfigEntry + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component ) -> None: """Test that client errors are caught.""" with patch("google.generativeai.GenerativeModel") as mock_model: mock_chat = AsyncMock() mock_model.return_value.start_chat.return_value = mock_chat - mock_chat.send_message_async.side_effect = GoogleAPIError("some error") + mock_chat.send_message_async.side_effect = GoogleAPICallError("some error") result = await conversation.async_converse( hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id ) @@ -474,13 +372,12 @@ async def test_error_handling( assert result.response.response_type == intent.IntentResponseType.ERROR, result assert result.response.error_code == "unknown", result assert result.response.as_dict()["speech"]["plain"]["speech"] == ( - "Sorry, I had a problem talking to Google Generative AI: some error" + "Sorry, I had a problem talking to Google Generative AI: None some error" ) -@pytest.mark.usefixtures("mock_init_component") async def test_blocked_response( - hass: HomeAssistant, mock_config_entry: MockConfigEntry + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component ) -> None: """Test blocked response.""" with patch("google.generativeai.GenerativeModel") as mock_model: @@ -500,9 +397,8 @@ async def test_blocked_response( ) -@pytest.mark.usefixtures("mock_init_component") async def test_empty_response( - hass: HomeAssistant, mock_config_entry: MockConfigEntry + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component ) -> None: """Test empty response.""" with patch("google.generativeai.GenerativeModel") as mock_model: @@ -522,9 +418,10 @@ async def test_empty_response( ) -@pytest.mark.usefixtures("mock_init_component") async def test_invalid_llm_api( - hass: HomeAssistant, mock_config_entry: MockConfigEntry + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, ) -> None: """Test handling of invalid llm api.""" hass.config_entries.async_update_entry( @@ -613,9 +510,10 @@ async def test_template_variables( assert "The user id is 12345." in mock_model.mock_calls[0][2]["system_instruction"] -@pytest.mark.usefixtures("mock_init_component") async def test_conversation_agent( - hass: HomeAssistant, mock_config_entry: MockConfigEntry + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, ) -> None: """Test GoogleGenerativeAIAgent.""" agent = conversation.get_agent_manager(hass).async_get_agent( @@ -637,61 +535,3 @@ async def test_escape_decode() -> None: "param2": "param2's value", "param3": {"param31": "Cheminée", "param32": "Cheminée"}, } - - -@pytest.mark.parametrize( - ("openapi", "protobuf"), - [ - ( - {"type": "string", "enum": ["a", "b", "c"]}, - {"type_": "STRING", "enum": ["a", "b", "c"]}, - ), - ( - {"type": "integer", "enum": [1, 2, 3]}, - {"type_": "STRING", "enum": ["1", "2", "3"]}, - ), - ({"anyOf": [{"type": "integer"}, {"type": "number"}]}, {"type_": "INTEGER"}), - ( - { - "anyOf": [ - {"anyOf": [{"type": "integer"}, {"type": "number"}]}, - {"anyOf": [{"type": "integer"}, {"type": "number"}]}, - ] - }, - {"type_": "INTEGER"}, - ), - ({"type": "string", "format": "lower"}, {"type_": "STRING"}), - ({"type": "boolean", "format": "bool"}, {"type_": "BOOLEAN"}), - ( - {"type": "number", "format": "percent"}, - {"type_": "NUMBER", "format_": "percent"}, - ), - ( - { - "type": "object", - "properties": {"var": {"type": "string"}}, - "required": [], - }, - { - "type_": "OBJECT", - "properties": {"var": {"type_": "STRING"}}, - "required": [], - }, - ), - ( - {"type": "object", "additionalProperties": True}, - { - "type_": "OBJECT", - "properties": {"json": {"type_": "STRING"}}, - "required": [], - }, - ), - ( - {"type": "array", "items": {"type": "string"}}, - {"type_": "ARRAY", "items": {"type_": "STRING"}}, - ), - ], -) -async def test_format_schema(openapi, protobuf) -> None: - """Test _format_schema.""" - assert _format_schema(openapi) == protobuf diff --git a/tests/components/google_generative_ai_conversation/test_init.py b/tests/components/google_generative_ai_conversation/test_init.py index 4875323d094..eeaa777f614 100644 --- a/tests/components/google_generative_ai_conversation/test_init.py +++ b/tests/components/google_generative_ai_conversation/test_init.py @@ -14,9 +14,11 @@ from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry -@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_without_images( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + snapshot: SnapshotAssertion, ) -> None: """Test generate content service.""" stubbed_generated_content = ( @@ -44,9 +46,11 @@ async def test_generate_content_service_without_images( assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot -@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_with_image( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + snapshot: SnapshotAssertion, ) -> None: """Test generate content service.""" stubbed_generated_content = ( @@ -130,9 +134,11 @@ async def test_generate_content_response_has_empty_parts( ) -@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_with_image_not_allowed_path( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + snapshot: SnapshotAssertion, ) -> None: """Test generate content service with an image in a not allowed path.""" with ( @@ -159,9 +165,11 @@ async def test_generate_content_service_with_image_not_allowed_path( ) -@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_with_image_not_exists( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + snapshot: SnapshotAssertion, ) -> None: """Test generate content service with an image that does not exist.""" with ( @@ -184,8 +192,12 @@ async def test_generate_content_service_with_image_not_exists( ) -@pytest.mark.usefixtures("mock_init_component") -async def test_generate_content_service_with_non_image(hass: HomeAssistant) -> None: +async def test_generate_content_service_with_non_image( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + snapshot: SnapshotAssertion, +) -> None: """Test generate content service with a non image.""" with ( patch("pathlib.Path.exists", return_value=True), @@ -242,4 +254,5 @@ async def test_config_entry_error( assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state == state + mock_config_entry.async_get_active_flows(hass, {"reauth"}) assert any(mock_config_entry.async_get_active_flows(hass, {"reauth"})) == reauth diff --git a/tests/components/google_photos/__init__.py b/tests/components/google_photos/__init__.py deleted file mode 100644 index fa345811216..00000000000 --- a/tests/components/google_photos/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Google Photos integration.""" diff --git a/tests/components/google_photos/conftest.py b/tests/components/google_photos/conftest.py deleted file mode 100644 index c848122a9fd..00000000000 --- a/tests/components/google_photos/conftest.py +++ /dev/null @@ -1,202 +0,0 @@ -"""Test fixtures for Google Photos.""" - -from collections.abc import AsyncGenerator, Awaitable, Callable, Generator -import time -from typing import Any -from unittest.mock import AsyncMock, Mock, patch - -from google_photos_library_api.api import GooglePhotosLibraryApi -from google_photos_library_api.model import ( - Album, - ListAlbumResult, - ListMediaItemResult, - MediaItem, - UserInfoResult, -) -import pytest - -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) -from homeassistant.components.google_photos.const import DOMAIN, OAUTH2_SCOPES -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from tests.common import ( - MockConfigEntry, - load_json_array_fixture, - load_json_object_fixture, -) - -USER_IDENTIFIER = "user-identifier-1" -CONFIG_ENTRY_ID = "user-identifier-1" -CLIENT_ID = "1234" -CLIENT_SECRET = "5678" -FAKE_ACCESS_TOKEN = "some-access-token" -FAKE_REFRESH_TOKEN = "some-refresh-token" -EXPIRES_IN = 3600 -USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo" -PHOTOS_BASE_URL = "https://photoslibrary.googleapis.com" -MEDIA_ITEMS_URL = f"{PHOTOS_BASE_URL}/v1/mediaItems" -ALBUMS_URL = f"{PHOTOS_BASE_URL}/v1/albums" -UPLOADS_URL = f"{PHOTOS_BASE_URL}/v1/uploads" -CREATE_MEDIA_ITEMS_URL = f"{PHOTOS_BASE_URL}/v1/mediaItems:batchCreate" - - -@pytest.fixture(name="expires_at") -def mock_expires_at() -> int: - """Fixture to set the oauth token expiration time.""" - return time.time() + EXPIRES_IN - - -@pytest.fixture(name="scopes") -def mock_scopes() -> list[str]: - """Fixture to set scopes used during the config entry.""" - return OAUTH2_SCOPES - - -@pytest.fixture(name="token_entry") -def mock_token_entry(expires_at: int, scopes: list[str]) -> dict[str, Any]: - """Fixture for OAuth 'token' data for a ConfigEntry.""" - return { - "access_token": FAKE_ACCESS_TOKEN, - "refresh_token": FAKE_REFRESH_TOKEN, - "scope": " ".join(scopes), - "type": "Bearer", - "expires_at": expires_at, - "expires_in": EXPIRES_IN, - } - - -@pytest.fixture(name="config_entry_id") -def mock_config_entry_id() -> str | None: - """Provide a json fixture file to load for list media item api responses.""" - return CONFIG_ENTRY_ID - - -@pytest.fixture(name="config_entry") -def mock_config_entry( - config_entry_id: str, token_entry: dict[str, Any] -) -> MockConfigEntry: - """Fixture for a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - unique_id=config_entry_id, - data={ - "auth_implementation": DOMAIN, - "token": token_entry, - }, - title="Account Name", - ) - - -@pytest.fixture(autouse=True) -async def setup_credentials(hass: HomeAssistant) -> None: - """Fixture to setup credentials.""" - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential(CLIENT_ID, CLIENT_SECRET), - ) - - -@pytest.fixture(name="fixture_name") -def mock_fixture_name() -> str | None: - """Provide a json fixture file to load for list media item api responses.""" - return None - - -@pytest.fixture(name="user_identifier") -def mock_user_identifier() -> str | None: - """Provide a json fixture file to load for list media item api responses.""" - return USER_IDENTIFIER - - -@pytest.fixture(name="api_error") -def mock_api_error() -> Exception | None: - """Provide a json fixture file to load for list media item api responses.""" - return None - - -@pytest.fixture(name="mock_api") -def mock_client_api( - fixture_name: str, - user_identifier: str, - api_error: Exception, -) -> Generator[Mock]: - """Set up fake Google Photos API responses from fixtures.""" - mock_api = AsyncMock(GooglePhotosLibraryApi, autospec=True) - mock_api.get_user_info.return_value = UserInfoResult( - id=user_identifier, - name="Test Name", - ) - - responses = load_json_array_fixture(fixture_name, DOMAIN) if fixture_name else [] - - async def list_media_items(*args: Any) -> AsyncGenerator[ListMediaItemResult]: - for response in responses: - mock_list_media_items = Mock(ListMediaItemResult) - mock_list_media_items.media_items = [ - MediaItem.from_dict(media_item) for media_item in response["mediaItems"] - ] - yield mock_list_media_items - - mock_api.list_media_items.return_value.__aiter__ = list_media_items - mock_api.list_media_items.return_value.__anext__ = list_media_items - mock_api.list_media_items.side_effect = api_error - - # Mock a point lookup by reading contents of the fixture above - async def get_media_item(media_item_id: str, **kwargs: Any) -> Mock: - for response in responses: - for media_item in response["mediaItems"]: - if media_item["id"] == media_item_id: - return MediaItem.from_dict(media_item) - return None - - mock_api.get_media_item = get_media_item - - # Emulate an async iterator for returning pages of response objects. We just - # return a single page. - - async def list_albums(*args: Any, **kwargs: Any) -> AsyncGenerator[ListAlbumResult]: - mock_list_album_result = Mock(ListAlbumResult) - mock_list_album_result.albums = [ - Album.from_dict(album) - for album in load_json_object_fixture("list_albums.json", DOMAIN)["albums"] - ] - yield mock_list_album_result - - mock_api.list_albums.return_value.__aiter__ = list_albums - mock_api.list_albums.return_value.__anext__ = list_albums - mock_api.list_albums.side_effect = api_error - - # Mock a point lookup by reading contents of the album fixture above - async def get_album(album_id: str, **kwargs: Any) -> Mock: - for album in load_json_object_fixture("list_albums.json", DOMAIN)["albums"]: - if album["id"] == album_id: - return Album.from_dict(album) - return None - - mock_api.get_album = get_album - mock_api.get_album.side_effect = api_error - - return mock_api - - -@pytest.fixture(name="setup_integration") -async def mock_setup_integration( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_api: Mock, -) -> Callable[[], Awaitable[bool]]: - """Fixture to set up the integration.""" - config_entry.add_to_hass(hass) - - with patch( - "homeassistant.components.google_photos.GooglePhotosLibraryApi", - return_value=mock_api, - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/google_photos/fixtures/list_albums.json b/tests/components/google_photos/fixtures/list_albums.json deleted file mode 100644 index 7460e1d36f3..00000000000 --- a/tests/components/google_photos/fixtures/list_albums.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "albums": [ - { - "id": "album-media-id-1", - "title": "Album title", - "productUrl": "http://photos.google.com/album-media-id-1", - "isWriteable": true, - "mediaItemsCount": 7, - "coverPhotoBaseUrl": "http://img.example.com/id3", - "coverPhotoMediaItemId": "cover-photo-media-id-3" - } - ] -} diff --git a/tests/components/google_photos/fixtures/list_mediaitems.json b/tests/components/google_photos/fixtures/list_mediaitems.json deleted file mode 100644 index 8e470a2fc04..00000000000 --- a/tests/components/google_photos/fixtures/list_mediaitems.json +++ /dev/null @@ -1,35 +0,0 @@ -[ - { - "mediaItems": [ - { - "id": "id1", - "description": "some-descripton", - "productUrl": "http://example.com/id1", - "baseUrl": "http://img.example.com/id1", - "mimeType": "image/jpeg", - "mediaMetadata": { - "creationTime": "2014-10-02T15:01:23Z", - "width": 1600, - "height": 768 - }, - "filename": "example1.jpg" - }, - { - "id": "id2", - "description": "some-descripton", - "productUrl": "http://example.com/id2", - "baseUrl": "http://img.example.com/id2", - "mimeType": "video/mp4", - "mediaMetadata": { - "creationTime": "2014-10-02T16:01:23Z", - "width": 1600, - "height": 768, - "video": { - "cameraMake": "Pixel" - } - }, - "filename": "example2.mp4" - } - ] - } -] diff --git a/tests/components/google_photos/fixtures/list_mediaitems_empty.json b/tests/components/google_photos/fixtures/list_mediaitems_empty.json deleted file mode 100644 index bf6a4da855f..00000000000 --- a/tests/components/google_photos/fixtures/list_mediaitems_empty.json +++ /dev/null @@ -1,5 +0,0 @@ -[ - { - "mediaItems": [] - } -] diff --git a/tests/components/google_photos/test_config_flow.py b/tests/components/google_photos/test_config_flow.py deleted file mode 100644 index 4896f82effb..00000000000 --- a/tests/components/google_photos/test_config_flow.py +++ /dev/null @@ -1,326 +0,0 @@ -"""Test the Google Photos config flow.""" - -from collections.abc import Generator -from typing import Any -from unittest.mock import Mock, patch - -from google_photos_library_api.exceptions import GooglePhotosApiError -import pytest - -from homeassistant import config_entries -from homeassistant.components.google_photos.const import ( - DOMAIN, - OAUTH2_AUTHORIZE, - OAUTH2_TOKEN, -) -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_entry_oauth2_flow - -from .conftest import EXPIRES_IN, FAKE_ACCESS_TOKEN, FAKE_REFRESH_TOKEN, USER_IDENTIFIER - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import ClientSessionGenerator - -CLIENT_ID = "1234" -CLIENT_SECRET = "5678" - - -@pytest.fixture(name="mock_setup") -def mock_setup_entry() -> Generator[Mock]: - """Fixture to mock out integration setup.""" - with patch( - "homeassistant.components.google_photos.async_setup_entry", return_value=True - ) as mock_setup: - yield mock_setup - - -@pytest.fixture(autouse=True) -def mock_patch_api(mock_api: Mock) -> Generator[None]: - """Fixture to patch the config flow api.""" - with patch( - "homeassistant.components.google_photos.config_flow.GooglePhotosLibraryApi", - return_value=mock_api, - ): - yield - - -@pytest.fixture(name="updated_token_entry", autouse=True) -def mock_updated_token_entry() -> dict[str, Any]: - """Fixture to provide any test specific overrides to token data from the oauth token endpoint.""" - return {} - - -@pytest.fixture(name="mock_oauth_token_request", autouse=True) -def mock_token_request( - aioclient_mock: AiohttpClientMocker, - token_entry: dict[str, any], - updated_token_entry: dict[str, Any], -) -> None: - """Fixture to provide a fake response from the oauth token endpoint.""" - aioclient_mock.clear_requests() - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - **token_entry, - **updated_token_entry, - }, - ) - - -@pytest.mark.usefixtures("current_request_with_host", "mock_api") -@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) -async def test_full_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - mock_setup: Mock, -) -> None: - """Check full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - - assert result["url"] == ( - f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" - "&redirect_uri=https://example.com/auth/external/callback" - f"&state={state}" - "&scope=https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" - "+https://www.googleapis.com/auth/photoslibrary.appendonly" - "+https://www.googleapis.com/auth/userinfo.profile" - "&access_type=offline&prompt=consent" - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY - config_entry = result["result"] - assert config_entry.unique_id == USER_IDENTIFIER - assert config_entry.title == "Test Name" - config_entry_data = dict(config_entry.data) - assert "token" in config_entry_data - assert "expires_at" in config_entry_data["token"] - del config_entry_data["token"]["expires_at"] - assert config_entry_data == { - "auth_implementation": DOMAIN, - "token": { - "access_token": FAKE_ACCESS_TOKEN, - "expires_in": EXPIRES_IN, - "refresh_token": FAKE_REFRESH_TOKEN, - "type": "Bearer", - "scope": ( - "https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" - " https://www.googleapis.com/auth/photoslibrary.appendonly" - " https://www.googleapis.com/auth/userinfo.profile" - ), - }, - } - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == 1 - - -@pytest.mark.usefixtures( - "current_request_with_host", - "setup_credentials", - "mock_api", -) -@pytest.mark.parametrize( - "api_error", - [ - GooglePhotosApiError("some error"), - ], -) -async def test_api_not_enabled( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, -) -> None: - """Check flow aborts if api is not enabled.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - - assert result["url"] == ( - f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" - "&redirect_uri=https://example.com/auth/external/callback" - f"&state={state}" - "&scope=https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" - "+https://www.googleapis.com/auth/photoslibrary.appendonly" - "+https://www.googleapis.com/auth/userinfo.profile" - "&access_type=offline&prompt=consent" - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "access_not_configured" - assert result["description_placeholders"]["message"].endswith("some error") - - -@pytest.mark.usefixtures("current_request_with_host", "setup_credentials") -async def test_general_exception( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - mock_api: Mock, -) -> None: - """Check flow aborts if exception happens.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - assert result["url"] == ( - f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" - "&redirect_uri=https://example.com/auth/external/callback" - f"&state={state}" - "&scope=https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" - "+https://www.googleapis.com/auth/photoslibrary.appendonly" - "+https://www.googleapis.com/auth/userinfo.profile" - "&access_type=offline&prompt=consent" - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - mock_api.list_media_items.side_effect = Exception - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unknown" - - -@pytest.mark.usefixtures("current_request_with_host", "mock_api", "setup_integration") -@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) -@pytest.mark.parametrize( - "updated_token_entry", - [ - { - "access_token": "updated-access-token", - } - ], -) -@pytest.mark.parametrize( - ( - "user_identifier", - "abort_reason", - "resulting_access_token", - "expected_setup_calls", - ), - [ - ( - USER_IDENTIFIER, - "reauth_successful", - "updated-access-token", - 1, - ), - ( - "345", - "wrong_account", - FAKE_ACCESS_TOKEN, - 0, - ), - ], -) -@pytest.mark.usefixtures("current_request_with_host") -async def test_reauth( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - config_entry: MockConfigEntry, - user_identifier: str, - abort_reason: str, - resulting_access_token: str, - mock_setup: Mock, - expected_setup_calls: int, -) -> None: - """Test the re-authentication case updates the correct config entry.""" - - config_entry.async_start_reauth(hass) - await hass.async_block_till_done() - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - result = flows[0] - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - assert result["url"] == ( - f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" - "&redirect_uri=https://example.com/auth/external/callback" - f"&state={state}" - "&scope=https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" - "+https://www.googleapis.com/auth/photoslibrary.appendonly" - "+https://www.googleapis.com/auth/userinfo.profile" - "&access_type=offline&prompt=consent" - ) - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == abort_reason - - assert config_entry.unique_id == USER_IDENTIFIER - assert config_entry.title == "Account Name" - config_entry_data = dict(config_entry.data) - assert "token" in config_entry_data - assert "expires_at" in config_entry_data["token"] - del config_entry_data["token"]["expires_at"] - assert config_entry_data == { - "auth_implementation": DOMAIN, - "token": { - # Verify token is refreshed or not - "access_token": resulting_access_token, - "expires_in": EXPIRES_IN, - "refresh_token": FAKE_REFRESH_TOKEN, - "type": "Bearer", - "scope": ( - "https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" - " https://www.googleapis.com/auth/photoslibrary.appendonly" - " https://www.googleapis.com/auth/userinfo.profile" - ), - }, - } - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == expected_setup_calls diff --git a/tests/components/google_photos/test_init.py b/tests/components/google_photos/test_init.py deleted file mode 100644 index 80b051d092d..00000000000 --- a/tests/components/google_photos/test_init.py +++ /dev/null @@ -1,120 +0,0 @@ -"""Tests for Google Photos.""" - -import http -import time - -from aiohttp import ClientError -from google_photos_library_api.exceptions import GooglePhotosApiError -import pytest - -from homeassistant.components.google_photos.const import OAUTH2_TOKEN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker - - -@pytest.mark.usefixtures("setup_integration") -async def test_setup( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test successful setup and unload.""" - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.NOT_LOADED - - -@pytest.fixture(name="refresh_token_status") -def mock_refresh_token_status() -> http.HTTPStatus: - """Fixture to set a token refresh status.""" - return http.HTTPStatus.OK - - -@pytest.fixture(name="refresh_token_exception") -def mock_refresh_token_exception() -> Exception | None: - """Fixture to set a token refresh status.""" - return None - - -@pytest.fixture(name="refresh_token") -def mock_refresh_token( - aioclient_mock: AiohttpClientMocker, - refresh_token_status: http.HTTPStatus, - refresh_token_exception: Exception | None, -) -> MockConfigEntry: - """Fixture to simulate a token refresh response.""" - aioclient_mock.clear_requests() - aioclient_mock.post( - OAUTH2_TOKEN, - exc=refresh_token_exception, - status=refresh_token_status, - json={ - "access_token": "updated-access-token", - "refresh_token": "updated-refresh-token", - "expires_at": time.time() + 3600, - "expires_in": 3600, - }, - ) - - -@pytest.mark.usefixtures("refresh_token", "setup_integration") -@pytest.mark.parametrize("expires_at", [time.time() - 3600], ids=["expired"]) -async def test_expired_token_refresh_success( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test expired token is refreshed.""" - assert config_entry.state is ConfigEntryState.LOADED - assert config_entry.data["token"]["access_token"] == "updated-access-token" - assert config_entry.data["token"]["expires_in"] == 3600 - - -@pytest.mark.usefixtures("refresh_token", "setup_integration") -@pytest.mark.parametrize( - ("expires_at", "refresh_token_status", "refresh_token_exception", "expected_state"), - [ - ( - time.time() - 3600, - http.HTTPStatus.UNAUTHORIZED, - None, - ConfigEntryState.SETUP_ERROR, # Reauth - ), - ( - time.time() - 3600, - http.HTTPStatus.INTERNAL_SERVER_ERROR, - None, - ConfigEntryState.SETUP_RETRY, - ), - ( - time.time() - 3600, - None, - ClientError("Client exception raised"), - ConfigEntryState.SETUP_RETRY, - ), - ], - ids=["unauthorized", "internal_server_error", "client_error"], -) -async def test_expired_token_refresh_failure( - hass: HomeAssistant, - config_entry: MockConfigEntry, - expected_state: ConfigEntryState, -) -> None: - """Test failure while refreshing token with a transient error.""" - - assert config_entry.state is expected_state - - -@pytest.mark.usefixtures("setup_integration") -@pytest.mark.parametrize("api_error", [GooglePhotosApiError("some error")]) -async def test_coordinator_init_failure( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test init failure to load albums.""" - assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/google_photos/test_media_source.py b/tests/components/google_photos/test_media_source.py deleted file mode 100644 index ce059e4fce5..00000000000 --- a/tests/components/google_photos/test_media_source.py +++ /dev/null @@ -1,191 +0,0 @@ -"""Test the Google Photos media source.""" - -from unittest.mock import Mock - -from google_photos_library_api.exceptions import GooglePhotosApiError -import pytest - -from homeassistant.components.google_photos.const import DOMAIN, UPLOAD_SCOPE -from homeassistant.components.media_source import ( - URI_SCHEME, - BrowseError, - async_browse_media, - async_resolve_media, -) -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from .conftest import CONFIG_ENTRY_ID - -from tests.common import MockConfigEntry - - -@pytest.fixture(autouse=True) -async def setup_components(hass: HomeAssistant) -> None: - """Fixture to initialize the integration.""" - await async_setup_component(hass, "media_source", {}) - - -@pytest.mark.usefixtures("setup_integration") -async def test_no_config_entries( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> None: - """Test a media source with no active config entry.""" - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") - - assert browse.domain == DOMAIN - assert browse.identifier is None - assert browse.title == "Google Photos" - assert browse.can_expand - assert not browse.children - - -@pytest.mark.usefixtures("setup_integration", "mock_api") -@pytest.mark.parametrize( - ("scopes"), - [ - [UPLOAD_SCOPE], - ], -) -async def test_no_read_scopes( - hass: HomeAssistant, -) -> None: - """Test a media source with only write scopes configured so no media source exists.""" - browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") - assert browse.domain == DOMAIN - assert browse.identifier is None - assert browse.title == "Google Photos" - assert not browse.children - - -@pytest.mark.usefixtures("setup_integration", "mock_api") -@pytest.mark.parametrize( - ("album_path", "expected_album_title"), - [ - (f"{CONFIG_ENTRY_ID}/a/album-media-id-1", "Album title"), - ], -) -@pytest.mark.parametrize( - ("fixture_name", "expected_results", "expected_medias"), - [ - ("list_mediaitems_empty.json", [], []), - ( - "list_mediaitems.json", - [ - (f"{CONFIG_ENTRY_ID}/p/id1", "example1.jpg"), - (f"{CONFIG_ENTRY_ID}/p/id2", "example2.mp4"), - ], - [ - ("http://img.example.com/id1=h2160", "image/jpeg"), - ("http://img.example.com/id2=dv", "video/mp4"), - ], - ), - ], -) -async def test_browse_albums( - hass: HomeAssistant, - album_path: str, - expected_album_title: str, - expected_results: list[tuple[str, str]], - expected_medias: list[tuple[str, str]], -) -> None: - """Test a media source with no eligible camera devices.""" - browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") - assert browse.domain == DOMAIN - assert browse.identifier is None - assert browse.title == "Google Photos" - assert [(child.identifier, child.title) for child in browse.children] == [ - (CONFIG_ENTRY_ID, "Account Name") - ] - - browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}") - assert browse.domain == DOMAIN - assert browse.identifier == CONFIG_ENTRY_ID - assert browse.title == "Account Name" - assert [(child.identifier, child.title) for child in browse.children] == [ - (f"{CONFIG_ENTRY_ID}/a/album-media-id-1", "Album title"), - ] - - browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{album_path}") - assert browse.domain == DOMAIN - assert browse.identifier == album_path - assert browse.title == "Account Name" - assert [ - (child.identifier, child.title) for child in browse.children - ] == expected_results - - media = [ - await async_resolve_media( - hass, f"{URI_SCHEME}{DOMAIN}/{child.identifier}", None - ) - for child in browse.children - ] - assert [ - (play_media.url, play_media.mime_type) for play_media in media - ] == expected_medias - - -@pytest.mark.usefixtures("setup_integration", "mock_api") -async def test_invalid_config_entry(hass: HomeAssistant) -> None: - """Test browsing to a config entry that does not exist.""" - with pytest.raises(BrowseError, match="Could not find config entry"): - await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/invalid-config-entry") - - -@pytest.mark.usefixtures("setup_integration", "mock_api") -@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) -async def test_browse_invalid_path(hass: HomeAssistant) -> None: - """Test browsing to a photo is not possible.""" - browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") - assert browse.domain == DOMAIN - assert browse.identifier is None - assert browse.title == "Google Photos" - assert [(child.identifier, child.title) for child in browse.children] == [ - (CONFIG_ENTRY_ID, "Account Name") - ] - - with pytest.raises(BrowseError, match="Unsupported identifier"): - await async_browse_media( - hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/p/some-photo-id" - ) - - -@pytest.mark.usefixtures("setup_integration") -@pytest.mark.parametrize( - ("identifier", "expected_error"), - [ - (CONFIG_ENTRY_ID, "not a Photo"), - ("invalid-config-entry/a/example", "not a Photo"), - ("invalid-config-entry/q/example", "Could not parse"), - ("too/many/slashes/in/path", "Invalid identifier"), - ], -) -async def test_missing_photo_id( - hass: HomeAssistant, identifier: str, expected_error: str -) -> None: - """Test parsing an invalid media identifier.""" - with pytest.raises(BrowseError, match=expected_error): - await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/{identifier}", None) - - -@pytest.mark.usefixtures("setup_integration", "mock_api") -async def test_list_media_items_failure(hass: HomeAssistant, mock_api: Mock) -> None: - """Test browsing to an album id that does not exist.""" - browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") - assert browse.domain == DOMAIN - assert browse.identifier is None - assert browse.title == "Google Photos" - assert [(child.identifier, child.title) for child in browse.children] == [ - (CONFIG_ENTRY_ID, "Account Name") - ] - - mock_api.list_media_items.side_effect = GooglePhotosApiError("some error") - - with pytest.raises(BrowseError, match="Error listing media items"): - await async_browse_media( - hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/recent" - ) diff --git a/tests/components/google_photos/test_services.py b/tests/components/google_photos/test_services.py deleted file mode 100644 index 381fb1c431f..00000000000 --- a/tests/components/google_photos/test_services.py +++ /dev/null @@ -1,396 +0,0 @@ -"""Tests for Google Photos.""" - -from collections.abc import Generator -from dataclasses import dataclass -import re -from unittest.mock import Mock, patch - -from google_photos_library_api.exceptions import GooglePhotosApiError -from google_photos_library_api.model import ( - Album, - CreateMediaItemsResult, - MediaItem, - NewMediaItemResult, - Status, -) -import pytest - -from homeassistant.components.google_photos.const import DOMAIN, READ_SCOPE -from homeassistant.components.google_photos.services import ( - CONF_ALBUM, - CONF_CONFIG_ENTRY_ID, - UPLOAD_SERVICE, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_FILENAME -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from tests.common import MockConfigEntry - -TEST_FILENAME = "doorbell_snapshot.jpg" -ALBUM_TITLE = "Album title" - - -@dataclass -class MockUploadFile: - """Dataclass used to configure the test with a fake file behavior.""" - - content: bytes = b"image bytes" - exists: bool = True - is_allowed_path: bool = True - size: int | None = None - - -@pytest.fixture(name="upload_file") -def upload_file_fixture() -> None: - """Fixture to set up test configuration with a fake file.""" - return MockUploadFile() - - -@pytest.fixture(autouse=True) -def mock_upload_file( - hass: HomeAssistant, upload_file: MockUploadFile -) -> Generator[None]: - """Fixture that mocks out the file calls using the FakeFile fixture.""" - with ( - patch( - "homeassistant.components.google_photos.services.Path.read_bytes", - return_value=upload_file.content, - ), - patch( - "homeassistant.components.google_photos.services.Path.exists", - return_value=upload_file.exists, - ), - patch.object( - hass.config, "is_allowed_path", return_value=upload_file.is_allowed_path - ), - patch("pathlib.Path.stat") as mock_stat, - ): - mock_stat.return_value = Mock() - mock_stat.return_value.st_size = ( - upload_file.size if upload_file.size else len(upload_file.content) - ) - yield - - -@pytest.mark.usefixtures("setup_integration") -async def test_upload_service( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_api: Mock, -) -> None: - """Test service call to upload content.""" - assert hass.services.has_service(DOMAIN, "upload") - - mock_api.create_media_items.return_value = CreateMediaItemsResult( - new_media_item_results=[ - NewMediaItemResult( - upload_token="some-upload-token", - status=Status(code=200), - media_item=MediaItem(id="new-media-item-id-1"), - ) - ] - ) - - response = await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.entry_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: ALBUM_TITLE, - }, - blocking=True, - return_response=True, - ) - - assert response == { - "media_items": [{"media_item_id": "new-media-item-id-1"}], - "album_id": "album-media-id-1", - } - - -@pytest.mark.usefixtures("setup_integration") -async def test_upload_service_config_entry_not_found( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test upload service call with a config entry that does not exist.""" - with pytest.raises(HomeAssistantError, match="not found in registry"): - await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: "invalid-config-entry-id", - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: ALBUM_TITLE, - }, - blocking=True, - return_response=True, - ) - - -@pytest.mark.usefixtures("setup_integration") -async def test_config_entry_not_loaded( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test upload service call with a config entry that is not loaded.""" - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.NOT_LOADED - - with pytest.raises(HomeAssistantError, match="not found in registry"): - await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.unique_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: ALBUM_TITLE, - }, - blocking=True, - return_response=True, - ) - - -@pytest.mark.usefixtures("setup_integration") -@pytest.mark.parametrize("upload_file", [MockUploadFile(is_allowed_path=False)]) -async def test_path_is_not_allowed( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test upload service call with a filename path that is not allowed.""" - with ( - pytest.raises(HomeAssistantError, match="no access to path"), - ): - await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.entry_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: ALBUM_TITLE, - }, - blocking=True, - return_response=True, - ) - - -@pytest.mark.usefixtures("setup_integration") -@pytest.mark.parametrize("upload_file", [MockUploadFile(exists=False)]) -async def test_filename_does_not_exist( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test upload service call with a filename path that does not exist.""" - with pytest.raises(HomeAssistantError, match="does not exist"): - await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.entry_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: ALBUM_TITLE, - }, - blocking=True, - return_response=True, - ) - - -@pytest.mark.usefixtures("setup_integration") -async def test_upload_service_upload_content_failure( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_api: Mock, -) -> None: - """Test service call to upload content.""" - - mock_api.upload_content.side_effect = GooglePhotosApiError() - - with pytest.raises(HomeAssistantError, match="Failed to upload content"): - await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.entry_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: ALBUM_TITLE, - }, - blocking=True, - return_response=True, - ) - - -@pytest.mark.usefixtures("setup_integration") -async def test_upload_service_fails_create( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_api: Mock, -) -> None: - """Test service call to upload content.""" - - mock_api.create_media_items.side_effect = GooglePhotosApiError() - - with pytest.raises( - HomeAssistantError, match="Google Photos API responded with error" - ): - await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.entry_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: ALBUM_TITLE, - }, - blocking=True, - return_response=True, - ) - - -@pytest.mark.usefixtures("setup_integration") -@pytest.mark.parametrize( - ("scopes"), - [ - [READ_SCOPE], - ], -) -async def test_upload_service_no_scope( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test service call to upload content but the config entry is read-only.""" - - with pytest.raises(HomeAssistantError, match="not granted permission"): - await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.entry_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: ALBUM_TITLE, - }, - blocking=True, - return_response=True, - ) - - -@pytest.mark.usefixtures("setup_integration") -@pytest.mark.parametrize("upload_file", [MockUploadFile(size=26 * 1024 * 1024)]) -async def test_upload_size_limit( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test upload service call with a filename path that does not exist.""" - with pytest.raises( - HomeAssistantError, - match=re.escape(f"`{TEST_FILENAME}` is too large (27262976 > 20971520)"), - ): - await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.entry_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: ALBUM_TITLE, - }, - blocking=True, - return_response=True, - ) - - -@pytest.mark.usefixtures("setup_integration") -async def test_upload_to_new_album( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_api: Mock, -) -> None: - """Test service call to upload content to a new album.""" - assert hass.services.has_service(DOMAIN, "upload") - - mock_api.create_media_items.return_value = CreateMediaItemsResult( - new_media_item_results=[ - NewMediaItemResult( - upload_token="some-upload-token", - status=Status(code=200), - media_item=MediaItem(id="new-media-item-id-1"), - ) - ] - ) - mock_api.create_album.return_value = Album(id="album-media-id-2", title="New Album") - response = await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.entry_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: "New Album", - }, - blocking=True, - return_response=True, - ) - - # Verify media item was created with the new album id - mock_api.create_album.assert_awaited() - assert response == { - "media_items": [{"media_item_id": "new-media-item-id-1"}], - "album_id": "album-media-id-2", - } - - # Upload an additional item to the same album and assert that no new album is created - mock_api.create_album.reset_mock() - mock_api.create_media_items.reset_mock() - mock_api.create_media_items.return_value = CreateMediaItemsResult( - new_media_item_results=[ - NewMediaItemResult( - upload_token="some-upload-token", - status=Status(code=200), - media_item=MediaItem(id="new-media-item-id-3"), - ) - ] - ) - response = await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.entry_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: "New Album", - }, - blocking=True, - return_response=True, - ) - - # Verify the album created last time is used - mock_api.create_album.assert_not_awaited() - assert response == { - "media_items": [{"media_item_id": "new-media-item-id-3"}], - "album_id": "album-media-id-2", - } - - -@pytest.mark.usefixtures("setup_integration") -async def test_create_album_failed( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_api: Mock, -) -> None: - """Test service call to upload content to a new album but creating the album fails.""" - assert hass.services.has_service(DOMAIN, "upload") - - mock_api.create_album.side_effect = GooglePhotosApiError() - - with pytest.raises(HomeAssistantError, match="Failed to create album"): - await hass.services.async_call( - DOMAIN, - UPLOAD_SERVICE, - { - CONF_CONFIG_ENTRY_ID: config_entry.entry_id, - CONF_FILENAME: TEST_FILENAME, - CONF_ALBUM: "New Album", - }, - blocking=True, - return_response=True, - ) diff --git a/tests/components/google_pubsub/test_init.py b/tests/components/google_pubsub/test_init.py index 5f160054da7..a793ade5312 100644 --- a/tests/components/google_pubsub/test_init.py +++ b/tests/components/google_pubsub/test_init.py @@ -1,11 +1,9 @@ """The tests for the Google Pub/Sub component.""" -from collections.abc import Generator from dataclasses import dataclass from datetime import datetime import os -from typing import Any -from unittest.mock import MagicMock, Mock, patch +from unittest import mock import pytest @@ -42,30 +40,30 @@ async def test_nested() -> None: @pytest.fixture(autouse=True, name="mock_client") -def mock_client_fixture() -> Generator[MagicMock]: +def mock_client_fixture(): """Mock the pubsub client.""" - with patch(f"{GOOGLE_PUBSUB_PATH}.PublisherClient") as client: + with mock.patch(f"{GOOGLE_PUBSUB_PATH}.PublisherClient") as client: setattr( client, "from_service_account_json", - MagicMock(return_value=MagicMock()), + mock.MagicMock(return_value=mock.MagicMock()), ) yield client @pytest.fixture(autouse=True, name="mock_is_file") -def mock_is_file_fixture() -> Generator[MagicMock]: +def mock_is_file_fixture(): """Mock os.path.isfile.""" - with patch(f"{GOOGLE_PUBSUB_PATH}.os.path.isfile") as is_file: + with mock.patch(f"{GOOGLE_PUBSUB_PATH}.os.path.isfile") as is_file: is_file.return_value = True yield is_file @pytest.fixture(autouse=True) -def mock_json(monkeypatch: pytest.MonkeyPatch) -> None: +def mock_json(hass, monkeypatch): """Mock the event bus listener and os component.""" monkeypatch.setattr( - f"{GOOGLE_PUBSUB_PATH}.json.dumps", Mock(return_value=MagicMock()) + f"{GOOGLE_PUBSUB_PATH}.json.dumps", mock.Mock(return_value=mock.MagicMock()) ) @@ -112,7 +110,7 @@ async def test_full_config(hass: HomeAssistant, mock_client) -> None: ) -async def _setup(hass: HomeAssistant, filter_config: dict[str, Any]) -> None: +async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = { google_pubsub.DOMAIN: { @@ -148,7 +146,7 @@ async def test_allowlist(hass: HomeAssistant, mock_client) -> None: ] for test in tests: - hass.states.async_set(test.id, "on") + hass.states.async_set(test.id, "not blank") await hass.async_block_till_done() was_called = publish_client.publish.call_count == 1 @@ -178,7 +176,7 @@ async def test_denylist(hass: HomeAssistant, mock_client) -> None: ] for test in tests: - hass.states.async_set(test.id, "on") + hass.states.async_set(test.id, "not blank") await hass.async_block_till_done() was_called = publish_client.publish.call_count == 1 diff --git a/tests/components/google_sheets/test_config_flow.py b/tests/components/google_sheets/test_config_flow.py index 756ff080212..0da046645d2 100644 --- a/tests/components/google_sheets/test_config_flow.py +++ b/tests/components/google_sheets/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Google Sheets config flow.""" -from collections.abc import Generator from unittest.mock import Mock, patch from gspread import GSpreadException import pytest +from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.application_credentials import ( @@ -235,7 +235,6 @@ async def test_reauth( "homeassistant.components.google_sheets.async_setup_entry", return_value=True ) as mock_setup: result = await hass.config_entries.flow.async_configure(result["flow_id"]) - await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup.mock_calls) == 1 diff --git a/tests/components/google_sheets/test_init.py b/tests/components/google_sheets/test_init.py index 700783a2e30..014e89349e2 100644 --- a/tests/components/google_sheets/test_init.py +++ b/tests/components/google_sheets/test_init.py @@ -214,32 +214,6 @@ async def test_append_sheet( assert len(mock_client.mock_calls) == 8 -async def test_append_sheet_multiple_rows( - hass: HomeAssistant, - setup_integration: ComponentSetup, - config_entry: MockConfigEntry, -) -> None: - """Test service call appending to a sheet.""" - await setup_integration() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - assert entries[0].state is ConfigEntryState.LOADED - - with patch("homeassistant.components.google_sheets.Client") as mock_client: - await hass.services.async_call( - DOMAIN, - "append_sheet", - { - "config_entry": config_entry.entry_id, - "worksheet": "Sheet1", - "data": [{"foo": "bar"}, {"foo": "bar2"}], - }, - blocking=True, - ) - assert len(mock_client.mock_calls) == 8 - - async def test_append_sheet_api_error( hass: HomeAssistant, setup_integration: ComponentSetup, diff --git a/tests/components/google_tasks/snapshots/test_todo.ambr b/tests/components/google_tasks/snapshots/test_todo.ambr index 76611ba4a31..af8dec6a182 100644 --- a/tests/components/google_tasks/snapshots/test_todo.ambr +++ b/tests/components/google_tasks/snapshots/test_todo.ambr @@ -79,6 +79,9 @@ }), ]) # --- +# name: test_move_todo_item[api_responses0].4 + None +# --- # name: test_parent_child_ordering[api_responses0] list([ dict({ diff --git a/tests/components/google_tasks/test_config_flow.py b/tests/components/google_tasks/test_config_flow.py index f8ccc5e048f..f2655afd602 100644 --- a/tests/components/google_tasks/test_config_flow.py +++ b/tests/components/google_tasks/test_config_flow.py @@ -1,11 +1,11 @@ """Test the Google Tasks config flow.""" -from collections.abc import Generator from unittest.mock import Mock, patch from googleapiclient.errors import HttpError from httplib2 import Response import pytest +from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.google_tasks.const import ( diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index b0ee135d4a9..afbaabe5cd0 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -10,16 +10,8 @@ from httplib2 import Response import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import ( - ATTR_DESCRIPTION, - ATTR_DUE_DATE, - ATTR_ITEM, - ATTR_RENAME, - ATTR_STATUS, - DOMAIN as TODO_DOMAIN, - TodoServices, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -384,8 +376,8 @@ async def test_task_items_error_response( ("api_responses", "item_data"), [ (CREATE_API_RESPONSES, {}), - (CREATE_API_RESPONSES, {ATTR_DUE_DATE: "2023-11-18"}), - (CREATE_API_RESPONSES, {ATTR_DESCRIPTION: "6-pack"}), + (CREATE_API_RESPONSES, {"due_date": "2023-11-18"}), + (CREATE_API_RESPONSES, {"description": "6-pack"}), ], ids=["summary", "due", "description"], ) @@ -407,9 +399,9 @@ async def test_create_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Soda", **item_data}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "add_item", + {"item": "Soda", **item_data}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -447,9 +439,9 @@ async def test_create_todo_list_item_error( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Soda"}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "add_item", + {"item": "Soda"}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) @@ -472,9 +464,9 @@ async def test_update_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "some-task-id", ATTR_RENAME: "Soda", ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "update_item", + {"item": "some-task-id", "rename": "Soda", "status": "completed"}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -512,9 +504,9 @@ async def test_update_todo_list_item_error( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "some-task-id", ATTR_RENAME: "Soda", ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "update_item", + {"item": "some-task-id", "rename": "Soda", "status": "completed"}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) @@ -522,12 +514,12 @@ async def test_update_todo_list_item_error( @pytest.mark.parametrize( ("api_responses", "item_data"), [ - (UPDATE_API_RESPONSES, {ATTR_RENAME: "Soda"}), - (UPDATE_API_RESPONSES, {ATTR_DUE_DATE: "2023-11-18"}), - (UPDATE_API_RESPONSES, {ATTR_DUE_DATE: None}), - (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: "At least one gallon"}), - (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: ""}), - (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: None}), + (UPDATE_API_RESPONSES, {"rename": "Soda"}), + (UPDATE_API_RESPONSES, {"due_date": "2023-11-18"}), + (UPDATE_API_RESPONSES, {"due_date": None}), + (UPDATE_API_RESPONSES, {"description": "At least one gallon"}), + (UPDATE_API_RESPONSES, {"description": ""}), + (UPDATE_API_RESPONSES, {"description": None}), ], ids=( "rename", @@ -556,9 +548,9 @@ async def test_partial_update( await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "some-task-id", **item_data}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "update_item", + {"item": "some-task-id", **item_data}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -586,9 +578,9 @@ async def test_partial_update_status( await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "some-task-id", ATTR_STATUS: "needs_action"}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "update_item", + {"item": "some-task-id", "status": "needs_action"}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -630,9 +622,9 @@ async def test_delete_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "remove_item", + {"item": ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -678,9 +670,9 @@ async def test_delete_partial_failure( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "remove_item", + {"item": ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) @@ -719,9 +711,9 @@ async def test_delete_invalid_json_response( with pytest.raises(HomeAssistantError, match="unexpected response"): await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: ["some-task-id-1"]}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "remove_item", + {"item": ["some-task-id-1"]}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) @@ -758,9 +750,9 @@ async def test_delete_server_error( with pytest.raises(HomeAssistantError, match="responded with error"): await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: ["some-task-id-1"]}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "remove_item", + {"item": ["some-task-id-1"]}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) @@ -950,9 +942,9 @@ async def test_susbcribe( # Rename item await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: uid, ATTR_RENAME: "Milk"}, - target={ATTR_ENTITY_ID: "todo.my_tasks"}, + "update_item", + {"item": uid, "rename": "Milk"}, + target={"entity_id": "todo.my_tasks"}, blocking=True, ) diff --git a/tests/components/google_translate/conftest.py b/tests/components/google_translate/conftest.py index aa84c201f0e..82f8d50b83c 100644 --- a/tests/components/google_translate/conftest.py +++ b/tests/components/google_translate/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Google Translate text-to-speech tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/google_translate/test_tts.py b/tests/components/google_translate/test_tts.py index 5b691da4bdc..d19b1269438 100644 --- a/tests/components/google_translate/test_tts.py +++ b/tests/components/google_translate/test_tts.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Generator from http import HTTPStatus from pathlib import Path from typing import Any @@ -10,16 +9,21 @@ from unittest.mock import MagicMock, patch from gtts import gTTSError import pytest +from typing_extensions import Generator from homeassistant.components import tts from homeassistant.components.google_translate.const import CONF_TLD, DOMAIN -from homeassistant.components.media_player import ATTR_MEDIA_CONTENT_ID +from homeassistant.components.media_player import ( + ATTR_MEDIA_CONTENT_ID, + DOMAIN as DOMAIN_MP, + SERVICE_PLAY_MEDIA, +) +from homeassistant.config import async_process_ha_core_config from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_mock_service from tests.components.tts.common import retrieve_media from tests.typing import ClientSessionGenerator @@ -30,8 +34,15 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Mock media player calls.""" + return async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) @pytest.fixture(autouse=True) @@ -103,7 +114,7 @@ async def mock_config_entry_setup(hass: HomeAssistant, config: dict[str, Any]) - "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_translate_en_com", + ATTR_ENTITY_ID: "tts.google_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -115,7 +126,7 @@ async def test_tts_service( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - service_calls: list[ServiceCall], + calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -128,11 +139,9 @@ async def test_tts_service( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -160,7 +169,7 @@ async def test_tts_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_translate_de_com", + ATTR_ENTITY_ID: "tts.google_de_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -172,7 +181,7 @@ async def test_service_say_german_config( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - service_calls: list[ServiceCall], + calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -185,11 +194,9 @@ async def test_service_say_german_config( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -216,7 +223,7 @@ async def test_service_say_german_config( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_translate_en_com", + ATTR_ENTITY_ID: "tts.google_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_LANGUAGE: "de", @@ -229,7 +236,7 @@ async def test_service_say_german_service( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - service_calls: list[ServiceCall], + calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -242,11 +249,9 @@ async def test_service_say_german_service( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -273,7 +278,7 @@ async def test_service_say_german_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_translate_en_co_uk", + ATTR_ENTITY_ID: "tts.google_en_co_uk", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -285,7 +290,7 @@ async def test_service_say_en_uk_config( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - service_calls: list[ServiceCall], + calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -298,11 +303,9 @@ async def test_service_say_en_uk_config( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -329,7 +332,7 @@ async def test_service_say_en_uk_config( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_translate_en_com", + ATTR_ENTITY_ID: "tts.google_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_LANGUAGE: "en-uk", @@ -342,7 +345,7 @@ async def test_service_say_en_uk_service( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - service_calls: list[ServiceCall], + calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -355,11 +358,9 @@ async def test_service_say_en_uk_service( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -386,7 +387,7 @@ async def test_service_say_en_uk_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_translate_en_com", + ATTR_ENTITY_ID: "tts.google_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_OPTIONS: {"tld": "co.uk"}, @@ -399,7 +400,7 @@ async def test_service_say_en_couk( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - service_calls: list[ServiceCall], + calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -412,11 +413,9 @@ async def test_service_say_en_couk( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -443,7 +442,7 @@ async def test_service_say_en_couk( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_translate_en_com", + ATTR_ENTITY_ID: "tts.google_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -455,7 +454,7 @@ async def test_service_say_error( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - service_calls: list[ServiceCall], + calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -470,11 +469,9 @@ async def test_service_say_error( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.NOT_FOUND ) assert len(mock_gtts.mock_calls) == 2 diff --git a/tests/components/google_travel_time/conftest.py b/tests/components/google_travel_time/conftest.py index 7d1e4791eee..141b40eff29 100644 --- a/tests/components/google_travel_time/conftest.py +++ b/tests/components/google_travel_time/conftest.py @@ -1,22 +1,17 @@ """Fixtures for Google Time Travel tests.""" -from collections.abc import Generator -from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import patch from googlemaps.exceptions import ApiError, Timeout, TransportError import pytest from homeassistant.components.google_travel_time.const import DOMAIN -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture(name="mock_config") -async def mock_config_fixture( - hass: HomeAssistant, data: dict[str, Any], options: dict[str, Any] -) -> MockConfigEntry: +async def mock_config_fixture(hass, data, options): """Mock a Google Travel Time config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -31,7 +26,7 @@ async def mock_config_fixture( @pytest.fixture(name="bypass_setup") -def bypass_setup_fixture() -> Generator[None]: +def bypass_setup_fixture(): """Bypass entry setup.""" with patch( "homeassistant.components.google_travel_time.async_setup_entry", @@ -41,7 +36,7 @@ def bypass_setup_fixture() -> Generator[None]: @pytest.fixture(name="bypass_platform_setup") -def bypass_platform_setup_fixture() -> Generator[None]: +def bypass_platform_setup_fixture(): """Bypass platform setup.""" with patch( "homeassistant.components.google_travel_time.sensor.async_setup_entry", @@ -51,7 +46,7 @@ def bypass_platform_setup_fixture() -> Generator[None]: @pytest.fixture(name="validate_config_entry") -def validate_config_entry_fixture() -> Generator[MagicMock]: +def validate_config_entry_fixture(): """Return valid config entry.""" with ( patch("homeassistant.components.google_travel_time.helpers.Client"), @@ -64,24 +59,24 @@ def validate_config_entry_fixture() -> Generator[MagicMock]: @pytest.fixture(name="invalidate_config_entry") -def invalidate_config_entry_fixture(validate_config_entry: MagicMock) -> None: +def invalidate_config_entry_fixture(validate_config_entry): """Return invalid config entry.""" validate_config_entry.side_effect = ApiError("test") @pytest.fixture(name="invalid_api_key") -def invalid_api_key_fixture(validate_config_entry: MagicMock) -> None: +def invalid_api_key_fixture(validate_config_entry): """Throw a REQUEST_DENIED ApiError.""" validate_config_entry.side_effect = ApiError("REQUEST_DENIED", "Invalid API key.") @pytest.fixture(name="timeout") -def timeout_fixture(validate_config_entry: MagicMock) -> None: +def timeout_fixture(validate_config_entry): """Throw a Timeout exception.""" validate_config_entry.side_effect = Timeout() @pytest.fixture(name="transport_error") -def transport_error_fixture(validate_config_entry: MagicMock) -> None: +def transport_error_fixture(validate_config_entry): """Throw a TransportError exception.""" validate_config_entry.side_effect = TransportError("Unknown.") diff --git a/tests/components/google_travel_time/test_config_flow.py b/tests/components/google_travel_time/test_config_flow.py index 5f9d5d4549b..270b82272d8 100644 --- a/tests/components/google_travel_time/test_config_flow.py +++ b/tests/components/google_travel_time/test_config_flow.py @@ -29,8 +29,6 @@ from homeassistant.data_entry_flow import FlowResultType from .const import MOCK_CONFIG, RECONFIGURE_CONFIG -from tests.common import MockConfigEntry - async def assert_common_reconfigure_steps( hass: HomeAssistant, reconfigure_result: config_entries.ConfigFlowResult @@ -196,9 +194,15 @@ async def test_malformed_api_key(hass: HomeAssistant) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry", "bypass_setup") -async def test_reconfigure(hass: HomeAssistant, mock_config: MockConfigEntry) -> None: +async def test_reconfigure(hass: HomeAssistant, mock_config) -> None: """Test reconfigure flow.""" - reconfigure_result = await mock_config.start_reconfigure_flow(hass) + reconfigure_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": mock_config.entry_id, + }, + ) assert reconfigure_result["type"] is FlowResultType.FORM assert reconfigure_result["step_id"] == "reconfigure" @@ -219,10 +223,16 @@ async def test_reconfigure(hass: HomeAssistant, mock_config: MockConfigEntry) -> ) @pytest.mark.usefixtures("invalidate_config_entry") async def test_reconfigure_invalid_config_entry( - hass: HomeAssistant, mock_config: MockConfigEntry + hass: HomeAssistant, mock_config ) -> None: """Test we get the form.""" - result = await mock_config.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": mock_config.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None result2 = await hass.config_entries.flow.async_configure( @@ -249,11 +259,15 @@ async def test_reconfigure_invalid_config_entry( ], ) @pytest.mark.usefixtures("invalid_api_key") -async def test_reconfigure_invalid_api_key( - hass: HomeAssistant, mock_config: MockConfigEntry -) -> None: +async def test_reconfigure_invalid_api_key(hass: HomeAssistant, mock_config) -> None: """Test we get the form.""" - result = await mock_config.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": mock_config.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None result2 = await hass.config_entries.flow.async_configure( @@ -279,11 +293,15 @@ async def test_reconfigure_invalid_api_key( ], ) @pytest.mark.usefixtures("transport_error") -async def test_reconfigure_transport_error( - hass: HomeAssistant, mock_config: MockConfigEntry -) -> None: +async def test_reconfigure_transport_error(hass: HomeAssistant, mock_config) -> None: """Test we get the form.""" - result = await mock_config.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": mock_config.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None result2 = await hass.config_entries.flow.async_configure( @@ -309,11 +327,15 @@ async def test_reconfigure_transport_error( ], ) @pytest.mark.usefixtures("timeout") -async def test_reconfigure_timeout( - hass: HomeAssistant, mock_config: MockConfigEntry -) -> None: +async def test_reconfigure_timeout(hass: HomeAssistant, mock_config) -> None: """Test we get the form.""" - result = await mock_config.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": mock_config.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None result2 = await hass.config_entries.flow.async_configure( @@ -339,7 +361,7 @@ async def test_reconfigure_timeout( ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_options_flow(hass: HomeAssistant, mock_config: MockConfigEntry) -> None: +async def test_options_flow(hass: HomeAssistant, mock_config) -> None: """Test options flow.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -400,9 +422,7 @@ async def test_options_flow(hass: HomeAssistant, mock_config: MockConfigEntry) - ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_options_flow_departure_time( - hass: HomeAssistant, mock_config: MockConfigEntry -) -> None: +async def test_options_flow_departure_time(hass: HomeAssistant, mock_config) -> None: """Test options flow with departure time.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -472,9 +492,7 @@ async def test_options_flow_departure_time( ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_reset_departure_time( - hass: HomeAssistant, mock_config: MockConfigEntry -) -> None: +async def test_reset_departure_time(hass: HomeAssistant, mock_config) -> None: """Test resetting departure time.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -520,9 +538,7 @@ async def test_reset_departure_time( ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_reset_arrival_time( - hass: HomeAssistant, mock_config: MockConfigEntry -) -> None: +async def test_reset_arrival_time(hass: HomeAssistant, mock_config) -> None: """Test resetting arrival time.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -566,9 +582,7 @@ async def test_reset_arrival_time( ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_reset_options_flow_fields( - hass: HomeAssistant, mock_config: MockConfigEntry -) -> None: +async def test_reset_options_flow_fields(hass: HomeAssistant, mock_config) -> None: """Test resetting options flow fields that are not time related to None.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None diff --git a/tests/components/google_travel_time/test_sensor.py b/tests/components/google_travel_time/test_sensor.py index 5ac9ecad482..57f3d7a0b98 100644 --- a/tests/components/google_travel_time/test_sensor.py +++ b/tests/components/google_travel_time/test_sensor.py @@ -1,7 +1,6 @@ """Test the Google Maps Travel Time sensors.""" -from collections.abc import Generator -from unittest.mock import MagicMock, patch +from unittest.mock import patch import pytest @@ -26,7 +25,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="mock_update") -def mock_update_fixture() -> Generator[MagicMock]: +def mock_update_fixture(): """Mock an update to the sensor.""" with ( patch("homeassistant.components.google_travel_time.sensor.Client"), @@ -57,7 +56,7 @@ def mock_update_fixture() -> Generator[MagicMock]: @pytest.fixture(name="mock_update_duration") -def mock_update_duration_fixture(mock_update: MagicMock) -> MagicMock: +def mock_update_duration_fixture(mock_update): """Mock an update to the sensor returning no duration_in_traffic.""" mock_update.return_value = { "rows": [ @@ -78,7 +77,7 @@ def mock_update_duration_fixture(mock_update: MagicMock) -> MagicMock: @pytest.fixture(name="mock_update_empty") -def mock_update_empty_fixture(mock_update: MagicMock) -> MagicMock: +def mock_update_empty_fixture(mock_update): """Mock an update to the sensor with an empty response.""" mock_update.return_value = None return mock_update diff --git a/tests/components/google_wifi/test_sensor.py b/tests/components/google_wifi/test_sensor.py index af870a2136d..c7df2b4e822 100644 --- a/tests/components/google_wifi/test_sensor.py +++ b/tests/components/google_wifi/test_sensor.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta from http import HTTPStatus -from typing import Any from unittest.mock import Mock, patch import requests_mock @@ -79,9 +78,7 @@ async def test_setup_get( assert_setup_component(6, "sensor") -def setup_api( - hass: HomeAssistant | None, data: str | None, requests_mock: requests_mock.Mocker -) -> tuple[google_wifi.GoogleWifiAPI, dict[str, Any]]: +def setup_api(hass, data, requests_mock): """Set up API with fake data.""" resource = f"http://localhost{google_wifi.ENDPOINT}" now = datetime(1970, month=1, day=1) @@ -104,7 +101,7 @@ def setup_api( return api, sensor_dict -def fake_delay(hass: HomeAssistant, ha_delay: int) -> None: +def fake_delay(hass, ha_delay): """Fake delay to prevent update throttle.""" hass_now = dt_util.utcnow() shifted_time = hass_now + timedelta(seconds=ha_delay) @@ -223,9 +220,7 @@ def test_update_when_unavailable( assert sensor.state is None -def update_side_effect( - hass: HomeAssistant, requests_mock: requests_mock.Mocker -) -> None: +def update_side_effect(hass, requests_mock): """Mock representation of update function.""" api, sensor_dict = setup_api(hass, MOCK_DATA, requests_mock) api.data = None diff --git a/tests/components/govee_ble/__init__.py b/tests/components/govee_ble/__init__.py index 66c5b0b832c..60930d1dd0e 100644 --- a/tests/components/govee_ble/__init__.py +++ b/tests/components/govee_ble/__init__.py @@ -83,136 +83,3 @@ GVH5106_SERVICE_INFO = BluetoothServiceInfo( service_data={}, source="local", ) - - -GV5125_BUTTON_0_SERVICE_INFO = BluetoothServiceInfo( - name="GV51255367", - address="C1:37:37:32:0F:45", - rssi=-36, - manufacturer_data={ - 60552: b"\x01\n.\xaf\xd9085Sg\x01\x01", - 61320: b".\xaf\x00\x00b\\\xae\x92\x15\xb6\xa8\n\xd4\x81K\xcaK_s\xd9E40\x02", - }, - service_data={}, - service_uuids=[], - source="24:4C:AB:03:E6:B8", -) - -GV5125_BUTTON_1_SERVICE_INFO = BluetoothServiceInfo( - name="GV51255367", - address="C1:37:37:32:0F:45", - rssi=-36, - manufacturer_data={ - 60552: b"\x01\n.\xaf\xd9085Sg\x01\x01", - 61320: b".\xaf\x00\x00\xfb\x0e\xc9h\xd7\x05l\xaf*\xf3\x1b\xe8w\xf1\xe1\xe8\xe3\xa7\xf8\xc6", - }, - service_data={}, - service_uuids=[], - source="24:4C:AB:03:E6:B8", -) - - -GV5121_MOTION_SERVICE_INFO = BluetoothServiceInfo( - name="GV5121195A", - address="C1:37:37:32:0F:45", - rssi=-36, - manufacturer_data={ - 61320: b"Y\x94\x00\x00\xf0\xb9\x197\xaeP\xb67,\x86j\xc2\xf3\xd0a\xe7\x17\xc0,\xef" - }, - service_data={}, - service_uuids=[], - source="24:4C:AB:03:E6:B8", -) - - -GV5121_MOTION_SERVICE_INFO_2 = BluetoothServiceInfo( - name="GV5121195A", - address="C1:37:37:32:0F:45", - rssi=-36, - manufacturer_data={ - 61320: b"Y\x94\x00\x06\xa3f6e\xc8\xe6\xfdv\x04\xaf\xe7k\xbf\xab\xeb\xbf\xb3\xa3\xd5\x19" - }, - service_data={}, - service_uuids=[], - source="24:4C:AB:03:E6:B8", -) - - -GV5123_OPEN_SERVICE_INFO = BluetoothServiceInfo( - name="GV51230B3D", - address="C1:37:37:32:0F:45", - rssi=-36, - manufacturer_data={ - 61320: b"=\xec\x00\x00\xdeCw\xd5^U\xf9\x91In6\xbd\xc6\x7f\x8b,'\x06t\x97" - }, - service_data={}, - service_uuids=[], - source="24:4C:AB:03:E6:B8", -) - - -GV5123_CLOSED_SERVICE_INFO = BluetoothServiceInfo( - name="GV51230B3D", - address="C1:37:37:32:0F:45", - rssi=-36, - manufacturer_data={ - 61320: b"=\xec\x00\x01Y\xdbk\xd9\xbe\xd7\xaf\xf7*&\xaaK\xd7-\xfa\x94W>[\xe9" - }, - service_data={}, - service_uuids=[], - source="24:4C:AB:03:E6:B8", -) - - -GVH5124_SERVICE_INFO = BluetoothServiceInfo( - name="GV51242F68", - address="D3:32:39:37:2F:68", - rssi=-67, - manufacturer_data={ - 61320: b"\x08\xa2\x00\x01%\xc2YW\xfdzu\x0e\xf24\xa2\x18\xbb\x15F|[s{\x04" - }, - service_data={}, - service_uuids=[], - source="local", -) - -GVH5124_2_SERVICE_INFO = BluetoothServiceInfo( - name="GV51242F68", - address="D3:32:39:37:2F:68", - rssi=-67, - manufacturer_data={ - 61320: b"\x08\xa2\x00\x13^Sso\xaeC\x9aU\xcf\xd8\x02\x1b\xdf\xd5\xded;+\xd6\x13" - }, - service_data={}, - service_uuids=[], - source="local", -) - - -GVH5127_MOTION_SERVICE_INFO = BluetoothServiceInfo( - name="GVH51275E3F", - address="D0:C9:07:1B:5E:3F", - rssi=-61, - manufacturer_data={34819: b"\xec\x00\x01\x01\x01\x11"}, - service_data={}, - service_uuids=[], - source="Core Bluetooth", -) -GVH5127_PRESENT_SERVICE_INFO = BluetoothServiceInfo( - name="GVH51275E3F", - address="D0:C9:07:1B:5E:3F", - rssi=-60, - manufacturer_data={34819: b"\xec\x00\x01\x01\x01\x01"}, - service_data={}, - service_uuids=[], - source="Core Bluetooth", -) -GVH5127_ABSENT_SERVICE_INFO = BluetoothServiceInfo( - name="GVH51275E3F", - address="D0:C9:07:1B:5E:3F", - rssi=-53, - manufacturer_data={34819: b"\xec\x00\x01\x01\x00\x00"}, - service_data={}, - service_uuids=[], - source="Core Bluetooth", -) diff --git a/tests/components/govee_ble/test_binary_sensor.py b/tests/components/govee_ble/test_binary_sensor.py deleted file mode 100644 index cf8b54ef54f..00000000000 --- a/tests/components/govee_ble/test_binary_sensor.py +++ /dev/null @@ -1,84 +0,0 @@ -"""Test the Govee BLE binary_sensor.""" - -from homeassistant.components.govee_ble.const import CONF_DEVICE_TYPE, DOMAIN -from homeassistant.const import STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant - -from . import ( - GV5123_CLOSED_SERVICE_INFO, - GV5123_OPEN_SERVICE_INFO, - GVH5127_ABSENT_SERVICE_INFO, - GVH5127_MOTION_SERVICE_INFO, - GVH5127_PRESENT_SERVICE_INFO, -) - -from tests.common import MockConfigEntry -from tests.components.bluetooth import inject_bluetooth_service_info - - -async def test_window_sensor(hass: HomeAssistant) -> None: - """Test setting up creates the window sensor.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id=GV5123_OPEN_SERVICE_INFO.address, - data={CONF_DEVICE_TYPE: "H5123"}, - ) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 0 - inject_bluetooth_service_info(hass, GV5123_OPEN_SERVICE_INFO) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 2 - - motion_sensor = hass.states.get("binary_sensor.51230f45_window") - assert motion_sensor.state == STATE_ON - - inject_bluetooth_service_info(hass, GV5123_CLOSED_SERVICE_INFO) - await hass.async_block_till_done() - - motion_sensor = hass.states.get("binary_sensor.51230f45_window") - assert motion_sensor.state == STATE_OFF - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - -async def test_presence_sensor(hass: HomeAssistant) -> None: - """Test the presence sensor.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id=GVH5127_ABSENT_SERVICE_INFO.address, - data={CONF_DEVICE_TYPE: "H5127"}, - ) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 0 - inject_bluetooth_service_info(hass, GVH5127_ABSENT_SERVICE_INFO) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 2 - - motion_sensor = hass.states.get("binary_sensor.h51275e3f_motion") - assert motion_sensor.state == STATE_OFF - occupancy_sensor = hass.states.get("binary_sensor.h51275e3f_occupancy") - assert occupancy_sensor.state == STATE_OFF - - inject_bluetooth_service_info(hass, GVH5127_PRESENT_SERVICE_INFO) - await hass.async_block_till_done() - - motion_sensor = hass.states.get("binary_sensor.h51275e3f_motion") - assert motion_sensor.state == STATE_OFF - occupancy_sensor = hass.states.get("binary_sensor.h51275e3f_occupancy") - assert occupancy_sensor.state == STATE_ON - - inject_bluetooth_service_info(hass, GVH5127_MOTION_SERVICE_INFO) - await hass.async_block_till_done() - - motion_sensor = hass.states.get("binary_sensor.h51275e3f_motion") - assert motion_sensor.state == STATE_ON - occupancy_sensor = hass.states.get("binary_sensor.h51275e3f_occupancy") - assert occupancy_sensor.state == STATE_ON diff --git a/tests/components/govee_ble/test_config_flow.py b/tests/components/govee_ble/test_config_flow.py index eb0719f832c..0c340c01f2a 100644 --- a/tests/components/govee_ble/test_config_flow.py +++ b/tests/components/govee_ble/test_config_flow.py @@ -3,7 +3,7 @@ from unittest.mock import patch from homeassistant import config_entries -from homeassistant.components.govee_ble.const import CONF_DEVICE_TYPE, DOMAIN +from homeassistant.components.govee_ble.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -29,7 +29,7 @@ async def test_async_step_bluetooth_valid_device(hass: HomeAssistant) -> None: ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "H5075 2762" - assert result2["data"] == {CONF_DEVICE_TYPE: "H5075"} + assert result2["data"] == {} assert result2["result"].unique_id == "61DE521B-F0BF-9F44-64D4-75BBE1738105" @@ -75,7 +75,7 @@ async def test_async_step_user_with_found_devices(hass: HomeAssistant) -> None: ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "H5177 2EC8" - assert result2["data"] == {CONF_DEVICE_TYPE: "H5177"} + assert result2["data"] == {} assert result2["result"].unique_id == "4125DDBA-2774-4851-9889-6AADDD4CAC3D" @@ -198,7 +198,7 @@ async def test_async_step_user_takes_precedence_over_discovery( ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "H5177 2EC8" - assert result2["data"] == {CONF_DEVICE_TYPE: "H5177"} + assert result2["data"] == {} assert result2["result"].unique_id == "4125DDBA-2774-4851-9889-6AADDD4CAC3D" # Verify the original one was aborted diff --git a/tests/components/govee_ble/test_event.py b/tests/components/govee_ble/test_event.py deleted file mode 100644 index c41cdad3c89..00000000000 --- a/tests/components/govee_ble/test_event.py +++ /dev/null @@ -1,108 +0,0 @@ -"""Test the Govee BLE events.""" - -from homeassistant.components.govee_ble.const import CONF_DEVICE_TYPE, DOMAIN -from homeassistant.const import STATE_UNKNOWN -from homeassistant.core import HomeAssistant - -from . import ( - GV5121_MOTION_SERVICE_INFO, - GV5121_MOTION_SERVICE_INFO_2, - GV5125_BUTTON_0_SERVICE_INFO, - GV5125_BUTTON_1_SERVICE_INFO, - GVH5124_2_SERVICE_INFO, - GVH5124_SERVICE_INFO, -) - -from tests.common import MockConfigEntry -from tests.components.bluetooth import inject_bluetooth_service_info - - -async def test_motion_sensor(hass: HomeAssistant) -> None: - """Test setting up creates the motion sensor.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id=GV5121_MOTION_SERVICE_INFO.address, - data={CONF_DEVICE_TYPE: "H5121"}, - ) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 1 - inject_bluetooth_service_info(hass, GV5121_MOTION_SERVICE_INFO) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 2 - - motion_sensor = hass.states.get("event.h5121_motion") - first_time = motion_sensor.state - assert motion_sensor.state != STATE_UNKNOWN - - inject_bluetooth_service_info(hass, GV5121_MOTION_SERVICE_INFO_2) - await hass.async_block_till_done() - - motion_sensor = hass.states.get("event.h5121_motion") - assert motion_sensor.state != first_time - assert motion_sensor.state != STATE_UNKNOWN - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - -async def test_button(hass: HomeAssistant) -> None: - """Test setting up creates the buttons.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id=GV5125_BUTTON_1_SERVICE_INFO.address, - data={CONF_DEVICE_TYPE: "H5125"}, - ) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 6 - inject_bluetooth_service_info(hass, GV5125_BUTTON_1_SERVICE_INFO) - await hass.async_block_till_done() - - button_1 = hass.states.get("event.h5125_button_1") - assert button_1.state == STATE_UNKNOWN - - inject_bluetooth_service_info(hass, GV5125_BUTTON_0_SERVICE_INFO) - await hass.async_block_till_done() - button_1 = hass.states.get("event.h5125_button_1") - assert button_1.state != STATE_UNKNOWN - assert len(hass.states.async_all()) == 7 - - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - -async def test_vibration_sensor(hass: HomeAssistant) -> None: - """Test setting up creates the vibration sensor.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id=GVH5124_SERVICE_INFO.address, - data={CONF_DEVICE_TYPE: "H5124"}, - ) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 1 - inject_bluetooth_service_info(hass, GVH5124_SERVICE_INFO) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 2 - - motion_sensor = hass.states.get("event.h5124_vibration") - first_time = motion_sensor.state - assert motion_sensor.state != STATE_UNKNOWN - - inject_bluetooth_service_info(hass, GVH5124_2_SERVICE_INFO) - await hass.async_block_till_done() - - motion_sensor = hass.states.get("event.h5124_vibration") - assert motion_sensor.state != first_time - assert motion_sensor.state != STATE_UNKNOWN - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/govee_light_local/conftest.py b/tests/components/govee_light_local/conftest.py index 6a8ee99b764..90a9f8e6827 100644 --- a/tests/components/govee_light_local/conftest.py +++ b/tests/components/govee_light_local/conftest.py @@ -1,11 +1,11 @@ """Tests configuration for Govee Local API.""" from asyncio import Event -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from govee_local_api import GoveeLightCapability import pytest +from typing_extensions import Generator from homeassistant.components.govee_light_local.coordinator import GoveeController diff --git a/tests/components/gpsd/conftest.py b/tests/components/gpsd/conftest.py index c15ef7f0258..c323365e8fd 100644 --- a/tests/components/gpsd/conftest.py +++ b/tests/components/gpsd/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the GPSD tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/gpsd/test_config_flow.py b/tests/components/gpsd/test_config_flow.py index 4d832e120e4..6f330571076 100644 --- a/tests/components/gpsd/test_config_flow.py +++ b/tests/components/gpsd/test_config_flow.py @@ -6,7 +6,7 @@ from gps3.agps3threaded import GPSD_PORT as DEFAULT_PORT from homeassistant import config_entries from homeassistant.components.gpsd.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -43,7 +43,10 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: async def test_connection_error(hass: HomeAssistant) -> None: """Test connection to host error.""" - with patch("socket.socket", side_effect=OSError): + with patch("socket.socket") as mock_socket: + mock_connect = mock_socket.return_value.connect + mock_connect.side_effect = OSError + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, @@ -52,3 +55,23 @@ async def test_connection_error(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" + + +async def test_import(hass: HomeAssistant) -> None: + """Test import step.""" + with patch("homeassistant.components.gpsd.config_flow.socket") as mock_socket: + mock_connect = mock_socket.return_value.connect + mock_connect.return_value = None + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_HOST: HOST, CONF_PORT: 1234, CONF_NAME: "MyGPS"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "MyGPS" + assert result["data"] == { + CONF_HOST: HOST, + CONF_NAME: "MyGPS", + CONF_PORT: 1234, + } diff --git a/tests/components/gpslogger/test_init.py b/tests/components/gpslogger/test_init.py index aff8b20dc52..68b95df1702 100644 --- a/tests/components/gpslogger/test_init.py +++ b/tests/components/gpslogger/test_init.py @@ -11,9 +11,9 @@ from homeassistant.components import gpslogger, zone from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.gpslogger import DOMAIN, TRACKER_UPDATE +from homeassistant.config import async_process_ha_core_config from homeassistant.const import STATE_HOME, STATE_NOT_HOME from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import DATA_DISPATCHER @@ -45,7 +45,7 @@ async def gpslogger_client( @pytest.fixture(autouse=True) -async def setup_zones(hass: HomeAssistant) -> None: +async def setup_zones(hass): """Set up Zone config in HA.""" assert await async_setup_component( hass, @@ -63,7 +63,7 @@ async def setup_zones(hass: HomeAssistant) -> None: @pytest.fixture -async def webhook_id(hass: HomeAssistant, gpslogger_client: TestClient) -> str: +async def webhook_id(hass, gpslogger_client): """Initialize the GPSLogger component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -81,9 +81,7 @@ async def webhook_id(hass: HomeAssistant, gpslogger_client: TestClient) -> str: return result["result"].data["webhook_id"] -async def test_missing_data( - hass: HomeAssistant, gpslogger_client: TestClient, webhook_id: str -) -> None: +async def test_missing_data(hass: HomeAssistant, gpslogger_client, webhook_id) -> None: """Test missing data.""" url = f"/api/webhook/{webhook_id}" @@ -113,8 +111,8 @@ async def test_enter_and_exit( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - gpslogger_client: TestClient, - webhook_id: str, + gpslogger_client, + webhook_id, ) -> None: """Test when there is a known zone.""" url = f"/api/webhook/{webhook_id}" @@ -150,7 +148,7 @@ async def test_enter_and_exit( async def test_enter_with_attrs( - hass: HomeAssistant, gpslogger_client: TestClient, webhook_id: str + hass: HomeAssistant, gpslogger_client, webhook_id ) -> None: """Test when additional attributes are present.""" url = f"/api/webhook/{webhook_id}" @@ -212,7 +210,7 @@ async def test_enter_with_attrs( reason="The device_tracker component does not support unloading yet." ) async def test_load_unload_entry( - hass: HomeAssistant, gpslogger_client: TestClient, webhook_id: str + hass: HomeAssistant, gpslogger_client, webhook_id ) -> None: """Test that the appropriate dispatch signals are added and removed.""" url = f"/api/webhook/{webhook_id}" diff --git a/tests/components/gree/conftest.py b/tests/components/gree/conftest.py index a9e2fc9e5d4..88bcaea33c2 100644 --- a/tests/components/gree/conftest.py +++ b/tests/components/gree/conftest.py @@ -1,9 +1,9 @@ """Pytest module configuration.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from .common import FakeDiscovery, build_device_mock diff --git a/tests/components/gree/test_bridge.py b/tests/components/gree/test_bridge.py index ae2f0c74236..37b0b0dc15e 100644 --- a/tests/components/gree/test_bridge.py +++ b/tests/components/gree/test_bridge.py @@ -5,12 +5,8 @@ from datetime import timedelta from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN, HVACMode -from homeassistant.components.gree.const import ( - COORDINATORS, - DOMAIN as GREE, - UPDATE_INTERVAL, -) +from homeassistant.components.climate import DOMAIN +from homeassistant.components.gree.const import COORDINATORS, DOMAIN as GREE from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util @@ -18,8 +14,8 @@ from .common import async_setup_gree, build_device_mock from tests.common import async_fire_time_changed -ENTITY_ID_1 = f"{CLIMATE_DOMAIN}.fake_device_1" -ENTITY_ID_2 = f"{CLIMATE_DOMAIN}.fake_device_2" +ENTITY_ID_1 = f"{DOMAIN}.fake_device_1" +ENTITY_ID_2 = f"{DOMAIN}.fake_device_2" @pytest.fixture @@ -46,7 +42,7 @@ async def test_discovery_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 1 - assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 + assert len(hass.states.async_all(DOMAIN)) == 2 device_infos = [x.device.device_info for x in hass.data[GREE][COORDINATORS]] assert device_infos[0].ip == "1.1.1.1" @@ -68,35 +64,8 @@ async def test_discovery_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 - assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 + assert len(hass.states.async_all(DOMAIN)) == 2 device_infos = [x.device.device_info for x in hass.data[GREE][COORDINATORS]] assert device_infos[0].ip == "1.1.1.2" assert device_infos[1].ip == "2.2.2.1" - - -async def test_coordinator_updates( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device -) -> None: - """Test gree devices update their state.""" - await async_setup_gree(hass) - await hass.async_block_till_done() - - assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 - - callback = device().add_handler.call_args_list[0][0][1] - - async def fake_update_state(*args) -> None: - """Fake update state.""" - device().power = True - callback() - - device().update_state.side_effect = fake_update_state - - freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID_1) - assert state is not None - assert state.state != HVACMode.OFF diff --git a/tests/components/gree/test_climate.py b/tests/components/gree/test_climate.py index 0cb187f5a60..0bd767e4f35 100644 --- a/tests/components/gree/test_climate.py +++ b/tests/components/gree/test_climate.py @@ -4,24 +4,18 @@ from datetime import timedelta from unittest.mock import DEFAULT as DEFAULT_MOCK, AsyncMock, patch from freezegun.api import FrozenDateTimeFactory -from greeclimate.device import ( - TEMP_MAX, - TEMP_MAX_F, - TEMP_MIN, - TEMP_MIN_F, - HorizontalSwing, - VerticalSwing, -) +from greeclimate.device import HorizontalSwing, VerticalSwing from greeclimate.exceptions import DeviceNotBoundError, DeviceTimeoutError import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.climate import ( + ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, ATTR_HVAC_MODE, ATTR_PRESET_MODE, ATTR_SWING_MODE, - DOMAIN as CLIMATE_DOMAIN, + DOMAIN, FAN_AUTO, FAN_HIGH, FAN_LOW, @@ -46,18 +40,11 @@ from homeassistant.components.gree.climate import ( FAN_MODES_REVERSE, HVAC_MODES, HVAC_MODES_REVERSE, - GreeClimateEntity, -) -from homeassistant.components.gree.const import ( - DISCOVERY_SCAN_INTERVAL, - FAN_MEDIUM_HIGH, - FAN_MEDIUM_LOW, - UPDATE_INTERVAL, ) +from homeassistant.components.gree.const import FAN_MEDIUM_HIGH, FAN_MEDIUM_LOW from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, - ATTR_UNIT_OF_MEASUREMENT, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_UNAVAILABLE, @@ -66,12 +53,19 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er +import homeassistant.util.dt as dt_util from .common import async_setup_gree, build_device_mock from tests.common import async_fire_time_changed -ENTITY_ID = f"{CLIMATE_DOMAIN}.fake_device_1" +ENTITY_ID = f"{DOMAIN}.fake_device_1" + + +@pytest.fixture +def mock_now(): + """Fixture for dtutil.now.""" + return dt_util.utcnow() async def test_discovery_called_once(hass: HomeAssistant, discovery, device) -> None: @@ -98,11 +92,11 @@ async def test_discovery_setup(hass: HomeAssistant, discovery, device) -> None: await async_setup_gree(hass) await hass.async_block_till_done() assert discovery.call_count == 1 - assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 + assert len(hass.states.async_all(DOMAIN)) == 2 async def test_discovery_setup_connection_error( - hass: HomeAssistant, discovery, device + hass: HomeAssistant, discovery, device, mock_now ) -> None: """Test gree integration is setup.""" MockDevice1 = build_device_mock( @@ -117,14 +111,14 @@ async def test_discovery_setup_connection_error( await async_setup_gree(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 + assert len(hass.states.async_all(DOMAIN)) == 1 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE async def test_discovery_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now ) -> None: """Test gree devices don't change after multiple discoveries.""" MockDevice1 = build_device_mock( @@ -140,25 +134,27 @@ async def test_discovery_after_setup( discovery.return_value.mock_devices = [MockDevice1, MockDevice2] device.side_effect = [MockDevice1, MockDevice2] - await async_setup_gree(hass) # Update 1 + await async_setup_gree(hass) + await hass.async_block_till_done() assert discovery.return_value.scan_count == 1 - assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 + assert len(hass.states.async_all(DOMAIN)) == 2 # rediscover the same devices shouldn't change anything discovery.return_value.mock_devices = [MockDevice1, MockDevice2] device.side_effect = [MockDevice1, MockDevice2] - freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) - async_fire_time_changed(hass) + next_update = mock_now + timedelta(minutes=6) + freezer.move_to(next_update) + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 - assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 + assert len(hass.states.async_all(DOMAIN)) == 2 async def test_discovery_add_device_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now ) -> None: """Test gree devices can be added after initial setup.""" MockDevice1 = build_device_mock( @@ -174,28 +170,27 @@ async def test_discovery_add_device_after_setup( discovery.return_value.mock_devices = [MockDevice1] device.side_effect = [MockDevice1] - await async_setup_gree(hass) # Update 1 - await async_setup_gree(hass) await hass.async_block_till_done() assert discovery.return_value.scan_count == 1 - assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 + assert len(hass.states.async_all(DOMAIN)) == 1 # rediscover the same devices shouldn't change anything discovery.return_value.mock_devices = [MockDevice2] device.side_effect = [MockDevice2] - freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) - async_fire_time_changed(hass) + next_update = mock_now + timedelta(minutes=6) + freezer.move_to(next_update) + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 - assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 + assert len(hass.states.async_all(DOMAIN)) == 2 async def test_discovery_device_bind_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now ) -> None: """Test gree devices can be added after a late device bind.""" MockDevice1 = build_device_mock( @@ -207,9 +202,10 @@ async def test_discovery_device_bind_after_setup( discovery.return_value.mock_devices = [MockDevice1] device.return_value = MockDevice1 - await async_setup_gree(hass) # Update 1 + await async_setup_gree(hass) + await hass.async_block_till_done() - assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 + assert len(hass.states.async_all(DOMAIN)) == 1 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE @@ -218,8 +214,9 @@ async def test_discovery_device_bind_after_setup( MockDevice1.bind.side_effect = None MockDevice1.update_state.side_effect = None - freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) - async_fire_time_changed(hass) + next_update = mock_now + timedelta(minutes=5) + freezer.move_to(next_update) + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -227,7 +224,7 @@ async def test_discovery_device_bind_after_setup( async def test_update_connection_failure( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device + hass: HomeAssistant, freezer: FrozenDateTimeFactory, device, mock_now ) -> None: """Testing update hvac connection failure exception.""" device().update_state.side_effect = [ @@ -236,32 +233,36 @@ async def test_update_connection_failure( DeviceTimeoutError, ] - await async_setup_gree(hass) # Update 1 - - async def run_update(): - freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) - async_fire_time_changed(hass) + await async_setup_gree(hass) + next_update = mock_now + timedelta(minutes=5) + freezer.move_to(next_update) + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() - # Update 2 - await run_update() + # First update to make the device available state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE - # Update 3 - await run_update() + next_update = mock_now + timedelta(minutes=10) + freezer.move_to(next_update) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() - # Update 4 - await run_update() + next_update = mock_now + timedelta(minutes=15) + freezer.move_to(next_update) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + + # Then two more update failures to make the device unavailable state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE -async def test_update_connection_send_failure_recovery( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device +async def test_update_connection_failure_recovery( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now ) -> None: """Testing update hvac connection failure recovery.""" device().update_state.side_effect = [ @@ -270,27 +271,31 @@ async def test_update_connection_send_failure_recovery( DEFAULT_MOCK, ] - await async_setup_gree(hass) # Update 1 - - async def run_update(): - freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) - async_fire_time_changed(hass) + await async_setup_gree(hass) + # First update becomes unavailable + next_update = mock_now + timedelta(minutes=5) + freezer.move_to(next_update) + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() - await run_update() # Update 2 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE - await run_update() # Update 3 + # Second update restores the connection + next_update = mock_now + timedelta(minutes=10) + freezer.move_to(next_update) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE async def test_update_unhandled_exception( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now ) -> None: """Testing update hvac connection unhandled response exception.""" device().update_state.side_effect = [DEFAULT_MOCK, Exception] @@ -301,8 +306,9 @@ async def test_update_unhandled_exception( assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE - freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) - async_fire_time_changed(hass) + next_update = mock_now + timedelta(minutes=10) + freezer.move_to(next_update) + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -311,13 +317,15 @@ async def test_update_unhandled_exception( async def test_send_command_device_timeout( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now ) -> None: """Test for sending power on command to the device with a device timeout.""" await async_setup_gree(hass) - freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) - async_fire_time_changed(hass) + # First update to make the device available + next_update = mock_now + timedelta(minutes=5) + freezer.move_to(next_update) + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -328,7 +336,7 @@ async def test_send_command_device_timeout( # Send failure should not raise exceptions or change device state await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, @@ -339,45 +347,12 @@ async def test_send_command_device_timeout( assert state.state != STATE_UNAVAILABLE -async def test_unresponsive_device( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device -) -> None: - """Test for unresponsive device.""" - await async_setup_gree(hass) - - async def run_update(): - freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Update 2 - await run_update() - state = hass.states.get(ENTITY_ID) - assert state.name == "fake-device-1" - assert state.state != STATE_UNAVAILABLE - - # Update 3, 4, 5 - await run_update() - await run_update() - await run_update() - state = hass.states.get(ENTITY_ID) - assert state.name == "fake-device-1" - assert state.state == STATE_UNAVAILABLE - - # Receiving update from device will reset the state to available again - device().device_state_updated("test") - await run_update() - state = hass.states.get(ENTITY_ID) - assert state.name == "fake-device-1" - assert state.state != STATE_UNAVAILABLE - - -async def test_send_power_on(hass: HomeAssistant, discovery, device) -> None: +async def test_send_power_on(hass: HomeAssistant, discovery, device, mock_now) -> None: """Test for sending power on command to the device.""" await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, @@ -389,7 +364,7 @@ async def test_send_power_on(hass: HomeAssistant, discovery, device) -> None: async def test_send_power_off_device_timeout( - hass: HomeAssistant, discovery, device + hass: HomeAssistant, discovery, device, mock_now ) -> None: """Test for sending power off command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -397,7 +372,7 @@ async def test_send_power_off_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, @@ -410,7 +385,7 @@ async def test_send_power_off_device_timeout( @pytest.mark.parametrize( ("units", "temperature"), - [(UnitOfTemperature.CELSIUS, 26), (UnitOfTemperature.FAHRENHEIT, 73)], + [(UnitOfTemperature.CELSIUS, 26), (UnitOfTemperature.FAHRENHEIT, 74)], ) async def test_send_target_temperature( hass: HomeAssistant, discovery, device, units, temperature @@ -430,16 +405,8 @@ async def test_send_target_temperature( # Make sure we're trying to test something that isn't the default assert fake_device.current_temperature != temperature - hass.states.async_set( - ENTITY_ID, - "off", - { - ATTR_UNIT_OF_MEASUREMENT: units, - }, - ) - await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, blocking=True, @@ -448,6 +415,10 @@ async def test_send_target_temperature( state = hass.states.get(ENTITY_ID) assert state is not None assert state.attributes.get(ATTR_TEMPERATURE) == temperature + assert ( + state.attributes.get(ATTR_CURRENT_TEMPERATURE) + == fake_device.current_temperature + ) assert state.state == HVAC_MODES.get(fake_device.mode) # Reset config temperature_unit back to CELSIUS, required for @@ -473,7 +444,7 @@ async def test_send_target_temperature_with_hvac_mode( await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ID, @@ -491,11 +462,7 @@ async def test_send_target_temperature_with_hvac_mode( @pytest.mark.parametrize( ("units", "temperature"), - [ - (UnitOfTemperature.CELSIUS, 25), - (UnitOfTemperature.FAHRENHEIT, 73), - (UnitOfTemperature.FAHRENHEIT, 74), - ], + [(UnitOfTemperature.CELSIUS, 25), (UnitOfTemperature.FAHRENHEIT, 74)], ) async def test_send_target_temperature_device_timeout( hass: HomeAssistant, discovery, device, units, temperature @@ -509,7 +476,7 @@ async def test_send_target_temperature_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, blocking=True, @@ -525,11 +492,7 @@ async def test_send_target_temperature_device_timeout( @pytest.mark.parametrize( ("units", "temperature"), - [ - (UnitOfTemperature.CELSIUS, 25), - (UnitOfTemperature.FAHRENHEIT, 73), - (UnitOfTemperature.FAHRENHEIT, 74), - ], + [(UnitOfTemperature.CELSIUS, 25), (UnitOfTemperature.FAHRENHEIT, 74)], ) async def test_update_target_temperature( hass: HomeAssistant, discovery, device, units, temperature @@ -542,13 +505,6 @@ async def test_update_target_temperature( await async_setup_gree(hass) - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, - blocking=True, - ) - state = hass.states.get(ENTITY_ID) assert state is not None assert state.attributes.get(ATTR_TEMPERATURE) == temperature @@ -560,12 +516,14 @@ async def test_update_target_temperature( @pytest.mark.parametrize( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) -async def test_send_preset_mode(hass: HomeAssistant, discovery, device, preset) -> None: +async def test_send_preset_mode( + hass: HomeAssistant, discovery, device, mock_now, preset +) -> None: """Test for sending preset mode command to the device.""" await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset}, blocking=True, @@ -576,13 +534,15 @@ async def test_send_preset_mode(hass: HomeAssistant, discovery, device, preset) assert state.attributes.get(ATTR_PRESET_MODE) == preset -async def test_send_invalid_preset_mode(hass: HomeAssistant, discovery, device) -> None: +async def test_send_invalid_preset_mode( + hass: HomeAssistant, discovery, device, mock_now +) -> None: """Test for sending preset mode command to the device.""" await async_setup_gree(hass) with pytest.raises(ServiceValidationError): await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: "invalid"}, blocking=True, @@ -597,7 +557,7 @@ async def test_send_invalid_preset_mode(hass: HomeAssistant, discovery, device) "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) async def test_send_preset_mode_device_timeout( - hass: HomeAssistant, discovery, device, preset + hass: HomeAssistant, discovery, device, mock_now, preset ) -> None: """Test for sending preset mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -605,7 +565,7 @@ async def test_send_preset_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset}, blocking=True, @@ -620,7 +580,7 @@ async def test_send_preset_mode_device_timeout( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) async def test_update_preset_mode( - hass: HomeAssistant, discovery, device, preset + hass: HomeAssistant, discovery, device, mock_now, preset ) -> None: """Test for updating preset mode from the device.""" device().steady_heat = preset == PRESET_AWAY @@ -647,13 +607,13 @@ async def test_update_preset_mode( ], ) async def test_send_hvac_mode( - hass: HomeAssistant, discovery, device, hvac_mode + hass: HomeAssistant, discovery, device, mock_now, hvac_mode ) -> None: """Test for sending hvac mode command to the device.""" await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: hvac_mode}, blocking=True, @@ -669,7 +629,7 @@ async def test_send_hvac_mode( [HVACMode.AUTO, HVACMode.COOL, HVACMode.DRY, HVACMode.FAN_ONLY, HVACMode.HEAT], ) async def test_send_hvac_mode_device_timeout( - hass: HomeAssistant, discovery, device, hvac_mode + hass: HomeAssistant, discovery, device, mock_now, hvac_mode ) -> None: """Test for sending hvac mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -677,7 +637,7 @@ async def test_send_hvac_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: hvac_mode}, blocking=True, @@ -700,7 +660,7 @@ async def test_send_hvac_mode_device_timeout( ], ) async def test_update_hvac_mode( - hass: HomeAssistant, discovery, device, hvac_mode + hass: HomeAssistant, discovery, device, mock_now, hvac_mode ) -> None: """Test for updating hvac mode from the device.""" device().power = hvac_mode != HVACMode.OFF @@ -717,12 +677,14 @@ async def test_update_hvac_mode( "fan_mode", [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) -async def test_send_fan_mode(hass: HomeAssistant, discovery, device, fan_mode) -> None: +async def test_send_fan_mode( + hass: HomeAssistant, discovery, device, mock_now, fan_mode +) -> None: """Test for sending fan mode command to the device.""" await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: fan_mode}, blocking=True, @@ -733,13 +695,15 @@ async def test_send_fan_mode(hass: HomeAssistant, discovery, device, fan_mode) - assert state.attributes.get(ATTR_FAN_MODE) == fan_mode -async def test_send_invalid_fan_mode(hass: HomeAssistant, discovery, device) -> None: +async def test_send_invalid_fan_mode( + hass: HomeAssistant, discovery, device, mock_now +) -> None: """Test for sending fan mode command to the device.""" await async_setup_gree(hass) with pytest.raises(ServiceValidationError): await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: "invalid"}, blocking=True, @@ -755,7 +719,7 @@ async def test_send_invalid_fan_mode(hass: HomeAssistant, discovery, device) -> [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) async def test_send_fan_mode_device_timeout( - hass: HomeAssistant, discovery, device, fan_mode + hass: HomeAssistant, discovery, device, mock_now, fan_mode ) -> None: """Test for sending fan mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -763,7 +727,7 @@ async def test_send_fan_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: fan_mode}, blocking=True, @@ -779,7 +743,7 @@ async def test_send_fan_mode_device_timeout( [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) async def test_update_fan_mode( - hass: HomeAssistant, discovery, device, fan_mode + hass: HomeAssistant, discovery, device, mock_now, fan_mode ) -> None: """Test for updating fan mode from the device.""" device().fan_speed = FAN_MODES_REVERSE.get(fan_mode) @@ -795,13 +759,13 @@ async def test_update_fan_mode( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_send_swing_mode( - hass: HomeAssistant, discovery, device, swing_mode + hass: HomeAssistant, discovery, device, mock_now, swing_mode ) -> None: """Test for sending swing mode command to the device.""" await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: swing_mode}, blocking=True, @@ -812,13 +776,15 @@ async def test_send_swing_mode( assert state.attributes.get(ATTR_SWING_MODE) == swing_mode -async def test_send_invalid_swing_mode(hass: HomeAssistant, discovery, device) -> None: +async def test_send_invalid_swing_mode( + hass: HomeAssistant, discovery, device, mock_now +) -> None: """Test for sending swing mode command to the device.""" await async_setup_gree(hass) with pytest.raises(ServiceValidationError): await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: "invalid"}, blocking=True, @@ -833,7 +799,7 @@ async def test_send_invalid_swing_mode(hass: HomeAssistant, discovery, device) - "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_send_swing_mode_device_timeout( - hass: HomeAssistant, discovery, device, swing_mode + hass: HomeAssistant, discovery, device, mock_now, swing_mode ) -> None: """Test for sending swing mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -841,7 +807,7 @@ async def test_send_swing_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: swing_mode}, blocking=True, @@ -856,7 +822,7 @@ async def test_send_swing_mode_device_timeout( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_update_swing_mode( - hass: HomeAssistant, discovery, device, swing_mode + hass: HomeAssistant, discovery, device, mock_now, swing_mode ) -> None: """Test for updating swing mode from the device.""" device().horizontal_swing = ( @@ -877,41 +843,7 @@ async def test_update_swing_mode( assert state.attributes.get(ATTR_SWING_MODE) == swing_mode -async def test_coordinator_update_handler( - hass: HomeAssistant, discovery, device -) -> None: - """Test for coordinator update handler.""" - await async_setup_gree(hass) - await hass.async_block_till_done() - - entity: GreeClimateEntity = hass.data[CLIMATE_DOMAIN].get_entity(ENTITY_ID) - assert entity is not None - - # Initial state - assert entity.temperature_unit == UnitOfTemperature.CELSIUS - assert entity.min_temp == TEMP_MIN - assert entity.max_temp == TEMP_MAX - - # Set unit to FAHRENHEIT - device().temperature_units = 1 - entity.coordinator.async_set_updated_data(UnitOfTemperature.FAHRENHEIT) - await hass.async_block_till_done() - - assert entity.temperature_unit == UnitOfTemperature.FAHRENHEIT - assert entity.min_temp == TEMP_MIN_F - assert entity.max_temp == TEMP_MAX_F - - # Set unit back to CELSIUS - device().temperature_units = 0 - entity.coordinator.async_set_updated_data(UnitOfTemperature.CELSIUS) - await hass.async_block_till_done() - - assert entity.temperature_unit == UnitOfTemperature.CELSIUS - assert entity.min_temp == TEMP_MIN - assert entity.max_temp == TEMP_MAX - - -@patch("homeassistant.components.gree.PLATFORMS", [CLIMATE_DOMAIN]) +@patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) async def test_registry_settings( hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion ) -> None: @@ -922,7 +854,7 @@ async def test_registry_settings( assert entries == snapshot -@patch("homeassistant.components.gree.PLATFORMS", [CLIMATE_DOMAIN]) +@patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) async def test_entity_states(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: """Test for entity registry settings (unique_id).""" await async_setup_gree(hass) diff --git a/tests/components/gree/test_switch.py b/tests/components/gree/test_switch.py index e9491796bdf..c5684abbf6f 100644 --- a/tests/components/gree/test_switch.py +++ b/tests/components/gree/test_switch.py @@ -7,7 +7,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.gree.const import DOMAIN as GREE_DOMAIN -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.switch import DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TOGGLE, @@ -22,23 +22,23 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -ENTITY_ID_LIGHT_PANEL = f"{SWITCH_DOMAIN}.fake_device_1_panel_light" -ENTITY_ID_HEALTH_MODE = f"{SWITCH_DOMAIN}.fake_device_1_health_mode" -ENTITY_ID_QUIET = f"{SWITCH_DOMAIN}.fake_device_1_quiet" -ENTITY_ID_FRESH_AIR = f"{SWITCH_DOMAIN}.fake_device_1_fresh_air" -ENTITY_ID_XFAN = f"{SWITCH_DOMAIN}.fake_device_1_xfan" +ENTITY_ID_LIGHT_PANEL = f"{DOMAIN}.fake_device_1_panel_light" +ENTITY_ID_HEALTH_MODE = f"{DOMAIN}.fake_device_1_health_mode" +ENTITY_ID_QUIET = f"{DOMAIN}.fake_device_1_quiet" +ENTITY_ID_FRESH_AIR = f"{DOMAIN}.fake_device_1_fresh_air" +ENTITY_ID_XFAN = f"{DOMAIN}.fake_device_1_xfan" async def async_setup_gree(hass: HomeAssistant) -> MockConfigEntry: """Set up the gree switch platform.""" entry = MockConfigEntry(domain=GREE_DOMAIN) entry.add_to_hass(hass) - await async_setup_component(hass, GREE_DOMAIN, {GREE_DOMAIN: {SWITCH_DOMAIN: {}}}) + await async_setup_component(hass, GREE_DOMAIN, {GREE_DOMAIN: {DOMAIN: {}}}) await hass.async_block_till_done() return entry -@patch("homeassistant.components.gree.PLATFORMS", [SWITCH_DOMAIN]) +@patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) async def test_registry_settings( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -67,7 +67,7 @@ async def test_send_switch_on(hass: HomeAssistant, entity: str) -> None: await async_setup_gree(hass) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -98,7 +98,7 @@ async def test_send_switch_on_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -125,7 +125,7 @@ async def test_send_switch_off(hass: HomeAssistant, entity: str) -> None: await async_setup_gree(hass) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -153,7 +153,7 @@ async def test_send_switch_toggle(hass: HomeAssistant, entity: str) -> None: # Turn the service on first await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -165,7 +165,7 @@ async def test_send_switch_toggle(hass: HomeAssistant, entity: str) -> None: # Toggle it off await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -177,7 +177,7 @@ async def test_send_switch_toggle(hass: HomeAssistant, entity: str) -> None: # Toggle is back on await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -197,5 +197,5 @@ async def test_entity_state( """Test for entity registry settings (disabled_by, unique_id).""" await async_setup_gree(hass) - state = hass.states.async_all(SWITCH_DOMAIN) + state = hass.states.async_all(DOMAIN) assert state == snapshot diff --git a/tests/components/greeneye_monitor/conftest.py b/tests/components/greeneye_monitor/conftest.py index 343a15346e7..ad8a98ce3fe 100644 --- a/tests/components/greeneye_monitor/conftest.py +++ b/tests/components/greeneye_monitor/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for testing greeneye_monitor.""" -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.greeneye_monitor import DOMAIN from homeassistant.components.sensor import SensorDeviceClass diff --git a/tests/components/group/common.py b/tests/components/group/common.py index a9b6356418c..86fe537a776 100644 --- a/tests/components/group/common.py +++ b/tests/components/group/common.py @@ -13,32 +13,32 @@ from homeassistant.components.group import ( SERVICE_SET, ) from homeassistant.const import ATTR_ICON, ATTR_NAME, SERVICE_RELOAD -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.loader import bind_hass @bind_hass -def reload(hass: HomeAssistant) -> None: +def reload(hass): """Reload the automation from config.""" hass.add_job(async_reload, hass) @callback @bind_hass -def async_reload(hass: HomeAssistant) -> None: +def async_reload(hass): """Reload the automation from config.""" hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_RELOAD)) @bind_hass def set_group( - hass: HomeAssistant, - object_id: str, - name: str | None = None, - entity_ids: list[str] | None = None, - icon: str | None = None, - add: list[str] | None = None, -) -> None: + hass, + object_id, + name=None, + entity_ids=None, + icon=None, + add=None, +): """Create/Update a group.""" hass.add_job( async_set_group, @@ -54,13 +54,13 @@ def set_group( @callback @bind_hass def async_set_group( - hass: HomeAssistant, - object_id: str, - name: str | None = None, - entity_ids: list[str] | None = None, - icon: str | None = None, - add: list[str] | None = None, -) -> None: + hass, + object_id, + name=None, + entity_ids=None, + icon=None, + add=None, +): """Create/Update a group.""" data = { key: value @@ -79,7 +79,7 @@ def async_set_group( @callback @bind_hass -def async_remove(hass: HomeAssistant, object_id: str) -> None: +def async_remove(hass, object_id): """Remove a user group.""" data = {ATTR_OBJECT_ID: object_id} hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_REMOVE, data)) diff --git a/tests/components/group/test_button.py b/tests/components/group/test_button.py deleted file mode 100644 index c3f4a720d53..00000000000 --- a/tests/components/group/test_button.py +++ /dev/null @@ -1,122 +0,0 @@ -"""The tests for the group button platform.""" - -from freezegun.api import FrozenDateTimeFactory -import pytest - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.components.group import DOMAIN -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util - - -async def test_default_state( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: - """Test button group default state.""" - hass.states.async_set("button.notify_light", "2021-01-01T23:59:59.123+00:00") - await async_setup_component( - hass, - BUTTON_DOMAIN, - { - BUTTON_DOMAIN: { - "platform": DOMAIN, - "entities": ["button.notify_light", "button.self_destruct"], - "name": "Button group", - "unique_id": "unique_identifier", - } - }, - ) - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - state = hass.states.get("button.button_group") - assert state is not None - assert state.state == STATE_UNKNOWN - assert state.attributes.get(ATTR_ENTITY_ID) == [ - "button.notify_light", - "button.self_destruct", - ] - - entry = entity_registry.async_get("button.button_group") - assert entry - assert entry.unique_id == "unique_identifier" - - -async def test_state_reporting(hass: HomeAssistant) -> None: - """Test the state reporting. - - The group state is unavailable if all group members are unavailable. - Otherwise, the group state represents the last time the grouped button was pressed. - """ - await async_setup_component( - hass, - BUTTON_DOMAIN, - { - BUTTON_DOMAIN: { - "platform": DOMAIN, - "entities": ["button.test1", "button.test2"], - } - }, - ) - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - # Initial state with no group member in the state machine -> unavailable - assert hass.states.get("button.button_group").state == STATE_UNAVAILABLE - - # All group members unavailable -> unavailable - hass.states.async_set("button.test1", STATE_UNAVAILABLE) - hass.states.async_set("button.test2", STATE_UNAVAILABLE) - await hass.async_block_till_done() - assert hass.states.get("button.button_group").state == STATE_UNAVAILABLE - - # All group members available, but no group member pressed -> unknown - hass.states.async_set("button.test1", "2021-01-01T23:59:59.123+00:00") - hass.states.async_set("button.test2", "2022-02-02T23:59:59.123+00:00") - await hass.async_block_till_done() - assert hass.states.get("button.button_group").state == STATE_UNKNOWN - - -@pytest.mark.usefixtures("enable_custom_integrations") -async def test_service_calls( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: - """Test service calls.""" - await async_setup_component( - hass, - BUTTON_DOMAIN, - { - BUTTON_DOMAIN: [ - {"platform": "demo"}, - { - "platform": DOMAIN, - "entities": [ - "button.push", - "button.self_destruct", - ], - }, - ] - }, - ) - await hass.async_block_till_done() - - assert hass.states.get("button.button_group").state == STATE_UNKNOWN - assert hass.states.get("button.push").state == STATE_UNKNOWN - - now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") - freezer.move_to(now) - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: "button.button_group"}, - blocking=True, - ) - - assert hass.states.get("button.button_group").state == now.isoformat() - assert hass.states.get("button.push").state == now.isoformat() diff --git a/tests/components/group/test_config_flow.py b/tests/components/group/test_config_flow.py index 461df19ebf8..c6ee4ae5a87 100644 --- a/tests/components/group/test_config_flow.py +++ b/tests/components/group/test_config_flow.py @@ -29,7 +29,6 @@ from tests.typing import WebSocketGenerator [ ("binary_sensor", "on", "on", {}, {}, {"all": False}, {}), ("binary_sensor", "on", "on", {}, {"all": True}, {"all": True}, {}), - ("button", STATE_UNKNOWN, "2021-01-01T23:59:59.123+00:00", {}, {}, {}, {}), ("cover", "open", "open", {}, {}, {}, {}), ( "event", @@ -46,7 +45,6 @@ from tests.typing import WebSocketGenerator ("fan", "on", "on", {}, {}, {}, {}), ("light", "on", "on", {}, {}, {}, {}), ("lock", "locked", "locked", {}, {}, {}, {}), - ("notify", STATE_UNKNOWN, "2021-01-01T23:59:59.123+00:00", {}, {}, {}, {}), ("media_player", "on", "on", {}, {}, {}, {}), ( "sensor", @@ -137,13 +135,11 @@ async def test_config_flow( ("group_type", "extra_input"), [ ("binary_sensor", {"all": False}), - ("button", {}), ("cover", {}), ("event", {}), ("fan", {}), ("light", {}), ("lock", {}), - ("notify", {}), ("media_player", {}), ("switch", {}), ], @@ -216,13 +212,11 @@ def get_suggested(schema, key): ("group_type", "member_state", "extra_options", "options_options"), [ ("binary_sensor", "on", {"all": False}, {}), - ("button", "2021-01-01T23:59:59.123+00:00", {}, {}), ("cover", "open", {}, {}), ("event", "2021-01-01T23:59:59.123+00:00", {}, {}), ("fan", "on", {}, {}), ("light", "on", {"all": False}, {}), ("lock", "locked", {}, {}), - ("notify", "2021-01-01T23:59:59.123+00:00", {}, {}), ("media_player", "on", {}, {}), ( "sensor", @@ -402,13 +396,11 @@ async def test_all_options( ("group_type", "extra_input"), [ ("binary_sensor", {"all": False}), - ("button", {}), ("cover", {}), ("event", {}), ("fan", {}), ("light", {}), ("lock", {}), - ("notify", {}), ("media_player", {}), ("switch", {}), ], @@ -491,7 +483,6 @@ LIGHT_ATTRS = [ {"color_mode": "unknown"}, ] LOCK_ATTRS = [{"supported_features": 1}, {}] -NOTIFY_ATTRS = [{"supported_features": 0}, {}] MEDIA_PLAYER_ATTRS = [{"supported_features": 0}, {}] SENSOR_ATTRS = [{"icon": "mdi:calculator"}, {"max_entity_id": "sensor.input_two"}] @@ -500,13 +491,11 @@ SENSOR_ATTRS = [{"icon": "mdi:calculator"}, {"max_entity_id": "sensor.input_two" ("domain", "extra_user_input", "input_states", "group_state", "extra_attributes"), [ ("binary_sensor", {"all": True}, ["on", "off"], "off", [{}, {}]), - ("button", {}, ["", ""], "unknown", [{}, {}]), ("cover", {}, ["open", "closed"], "open", COVER_ATTRS), ("event", {}, ["", ""], "unknown", EVENT_ATTRS), ("fan", {}, ["on", "off"], "on", FAN_ATTRS), ("light", {}, ["on", "off"], "on", LIGHT_ATTRS), ("lock", {}, ["unlocked", "locked"], "unlocked", LOCK_ATTRS), - ("notify", {}, ["", ""], "unknown", NOTIFY_ATTRS), ("media_player", {}, ["on", "off"], "on", MEDIA_PLAYER_ATTRS), ("sensor", {"type": "max"}, ["10", "20"], "20.0", SENSOR_ATTRS), ("switch", {}, ["on", "off"], "on", [{}, {}]), @@ -611,13 +600,11 @@ async def test_config_flow_preview( ), [ ("binary_sensor", {"all": True}, {"all": False}, ["on", "off"], "on", [{}, {}]), - ("button", {}, {}, ["", ""], "unknown", [{}, {}]), ("cover", {}, {}, ["open", "closed"], "open", COVER_ATTRS), ("event", {}, {}, ["", ""], "unknown", EVENT_ATTRS), ("fan", {}, {}, ["on", "off"], "on", FAN_ATTRS), ("light", {}, {}, ["on", "off"], "on", LIGHT_ATTRS), ("lock", {}, {}, ["unlocked", "locked"], "unlocked", LOCK_ATTRS), - ("notify", {}, {}, ["", ""], "unknown", NOTIFY_ATTRS), ("media_player", {}, {}, ["on", "off"], "on", MEDIA_PLAYER_ATTRS), ( "sensor", diff --git a/tests/components/group/test_cover.py b/tests/components/group/test_cover.py index b1f622569bd..5b5d8fa873c 100644 --- a/tests/components/group/test_cover.py +++ b/tests/components/group/test_cover.py @@ -2,7 +2,6 @@ import asyncio from datetime import timedelta -from typing import Any import pytest @@ -11,8 +10,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN as COVER_DOMAIN, - CoverState, + DOMAIN, ) from homeassistant.components.group.cover import DEFAULT_NAME from homeassistant.const import ( @@ -32,6 +30,10 @@ from homeassistant.const import ( SERVICE_STOP_COVER_TILT, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -49,7 +51,7 @@ DEMO_COVER_TILT = "cover.living_room_window" DEMO_TILT = "cover.tilt_demo" CONFIG_ALL = { - COVER_DOMAIN: [ + DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -59,7 +61,7 @@ CONFIG_ALL = { } CONFIG_POS = { - COVER_DOMAIN: [ + DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -69,7 +71,7 @@ CONFIG_POS = { } CONFIG_TILT_ONLY = { - COVER_DOMAIN: [ + DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -79,7 +81,7 @@ CONFIG_TILT_ONLY = { } CONFIG_ATTRIBUTES = { - COVER_DOMAIN: { + DOMAIN: { "platform": "group", CONF_ENTITIES: [DEMO_COVER, DEMO_COVER_POS, DEMO_COVER_TILT, DEMO_TILT], CONF_UNIQUE_ID: "unique_identifier", @@ -88,21 +90,18 @@ CONFIG_ATTRIBUTES = { @pytest.fixture -async def setup_comp( - hass: HomeAssistant, config_count: tuple[dict[str, Any], int] -) -> None: +async def setup_comp(hass, config_count): """Set up group cover component.""" config, count = config_count - with assert_setup_component(count, COVER_DOMAIN): - await async_setup_component(hass, COVER_DOMAIN, config) + with assert_setup_component(count, DOMAIN): + await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) -@pytest.mark.usefixtures("setup_comp") -async def test_state(hass: HomeAssistant) -> None: +async def test_state(hass: HomeAssistant, setup_comp) -> None: """Test handling of state. The group state is unknown if all group members are unknown or unavailable. @@ -155,105 +154,90 @@ async def test_state(hass: HomeAssistant) -> None: # At least one member opening -> group opening for state_1 in ( - CoverState.CLOSED, - CoverState.CLOSING, - CoverState.OPEN, - CoverState.OPENING, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_2 in ( - CoverState.CLOSED, - CoverState.CLOSING, - CoverState.OPEN, - CoverState.OPENING, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_3 in ( - CoverState.CLOSED, - CoverState.CLOSING, - CoverState.OPEN, - CoverState.OPENING, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, CoverState.OPENING, {}) + hass.states.async_set(DEMO_TILT, STATE_OPENING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING # At least one member closing -> group closing for state_1 in ( - CoverState.CLOSED, - CoverState.CLOSING, - CoverState.OPEN, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_2 in ( - CoverState.CLOSED, - CoverState.CLOSING, - CoverState.OPEN, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_3 in ( - CoverState.CLOSED, - CoverState.CLOSING, - CoverState.OPEN, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN, ): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, CoverState.CLOSING, {}) + hass.states.async_set(DEMO_TILT, STATE_CLOSING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING # At least one member open -> group open - for state_1 in ( - CoverState.CLOSED, - CoverState.OPEN, - STATE_UNAVAILABLE, - STATE_UNKNOWN, - ): - for state_2 in ( - CoverState.CLOSED, - CoverState.OPEN, - STATE_UNAVAILABLE, - STATE_UNKNOWN, - ): - for state_3 in ( - CoverState.CLOSED, - CoverState.OPEN, - STATE_UNAVAILABLE, - STATE_UNKNOWN, - ): + for state_1 in (STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_2 in (STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_3 in (STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, CoverState.OPEN, {}) + hass.states.async_set(DEMO_TILT, STATE_OPEN, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN # At least one member closed -> group closed - for state_1 in (CoverState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): - for state_2 in (CoverState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): - for state_3 in (CoverState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_1 in (STATE_CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_2 in (STATE_CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_3 in (STATE_CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, CoverState.CLOSED, {}) + hass.states.async_set(DEMO_TILT, STATE_CLOSED, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED # All group members removed from the state machine -> unavailable hass.states.async_remove(DEMO_COVER) @@ -266,9 +250,8 @@ async def test_state(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) -@pytest.mark.usefixtures("setup_comp") async def test_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp ) -> None: """Test handling of state attributes.""" state = hass.states.get(COVER_GROUP) @@ -281,11 +264,11 @@ async def test_attributes( assert ATTR_CURRENT_TILT_POSITION not in state.attributes # Set entity as closed - hass.states.async_set(DEMO_COVER, CoverState.CLOSED, {}) + hass.states.async_set(DEMO_COVER, STATE_CLOSED, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes[ATTR_ENTITY_ID] == [ DEMO_COVER, DEMO_COVER_POS, @@ -294,18 +277,18 @@ async def test_attributes( ] # Set entity as opening - hass.states.async_set(DEMO_COVER, CoverState.OPENING, {}) + hass.states.async_set(DEMO_COVER, STATE_OPENING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING # Set entity as closing - hass.states.async_set(DEMO_COVER, CoverState.CLOSING, {}) + hass.states.async_set(DEMO_COVER, STATE_CLOSING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING # Set entity as unknown again hass.states.async_set(DEMO_COVER, STATE_UNKNOWN, {}) @@ -315,11 +298,11 @@ async def test_attributes( assert state.state == STATE_UNKNOWN # Add Entity that supports open / close / stop - hass.states.async_set(DEMO_COVER, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set(DEMO_COVER, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 11 assert ATTR_CURRENT_POSITION not in state.attributes @@ -328,24 +311,24 @@ async def test_attributes( # Add Entity that supports set_cover_position hass.states.async_set( DEMO_COVER_POS, - CoverState.OPEN, + STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 70}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 15 assert state.attributes[ATTR_CURRENT_POSITION] == 70 assert ATTR_CURRENT_TILT_POSITION not in state.attributes # Add Entity that supports open tilt / close tilt / stop tilt - hass.states.async_set(DEMO_TILT, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 112}) + hass.states.async_set(DEMO_TILT, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 112}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 127 assert state.attributes[ATTR_CURRENT_POSITION] == 70 @@ -354,13 +337,13 @@ async def test_attributes( # Add Entity that supports set_tilt_position hass.states.async_set( DEMO_COVER_TILT, - CoverState.OPEN, + STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 255 assert state.attributes[ATTR_CURRENT_POSITION] == 70 @@ -371,14 +354,12 @@ async def test_attributes( # Covers hass.states.async_set( - DEMO_COVER, - CoverState.OPEN, - {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 100}, + DEMO_COVER, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 100} ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 244 assert state.attributes[ATTR_CURRENT_POSITION] == 85 # (70 + 100) / 2 @@ -389,7 +370,7 @@ async def test_attributes( await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 240 assert ATTR_CURRENT_POSITION not in state.attributes @@ -398,31 +379,31 @@ async def test_attributes( # Tilts hass.states.async_set( DEMO_TILT, - CoverState.OPEN, + STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 100}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 128 assert ATTR_CURRENT_POSITION not in state.attributes assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 80 # (60 + 100) / 2 hass.states.async_remove(DEMO_COVER_TILT) - hass.states.async_set(DEMO_TILT, CoverState.CLOSED) + hass.states.async_set(DEMO_TILT, STATE_CLOSED) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 assert ATTR_CURRENT_POSITION not in state.attributes assert ATTR_CURRENT_TILT_POSITION not in state.attributes # Group member has set assumed_state - hass.states.async_set(DEMO_TILT, CoverState.CLOSED, {ATTR_ASSUMED_STATE: True}) + hass.states.async_set(DEMO_TILT, STATE_CLOSED, {ATTR_ASSUMED_STATE: True}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) @@ -435,21 +416,22 @@ async def test_attributes( @pytest.mark.parametrize("config_count", [(CONFIG_TILT_ONLY, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_cover_that_only_supports_tilt_removed(hass: HomeAssistant) -> None: +async def test_cover_that_only_supports_tilt_removed( + hass: HomeAssistant, setup_comp +) -> None: """Test removing a cover that support tilt.""" hass.states.async_set( DEMO_COVER_TILT, - CoverState.OPEN, + STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60}, ) hass.states.async_set( DEMO_TILT, - CoverState.OPEN, + STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60}, ) state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME assert state.attributes[ATTR_ENTITY_ID] == [ DEMO_COVER_TILT, @@ -459,16 +441,15 @@ async def test_cover_that_only_supports_tilt_removed(hass: HomeAssistant) -> Non assert ATTR_CURRENT_TILT_POSITION in state.attributes hass.states.async_remove(DEMO_COVER_TILT) - hass.states.async_set(DEMO_TILT, CoverState.CLOSED) + hass.states.async_set(DEMO_TILT, STATE_CLOSED) await hass.async_block_till_done() @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_open_covers(hass: HomeAssistant) -> None: +async def test_open_covers(hass: HomeAssistant, setup_comp) -> None: """Test open cover function.""" await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): @@ -477,20 +458,19 @@ async def test_open_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 - assert hass.states.get(DEMO_COVER).state == CoverState.OPEN + assert hass.states.get(DEMO_COVER).state == STATE_OPEN assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 100 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 100 @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_close_covers(hass: HomeAssistant) -> None: +async def test_close_covers(hass: HomeAssistant, setup_comp) -> None: """Test close cover function.""" await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): @@ -499,21 +479,20 @@ async def test_close_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 - assert hass.states.get(DEMO_COVER).state == CoverState.CLOSED + assert hass.states.get(DEMO_COVER).state == STATE_CLOSED assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 0 @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_toggle_covers(hass: HomeAssistant) -> None: +async def test_toggle_covers(hass: HomeAssistant, setup_comp) -> None: """Test toggle cover function.""" # Start covers in open state await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -521,11 +500,11 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN # Toggle will close covers await hass.services.async_call( - COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -533,16 +512,16 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 - assert hass.states.get(DEMO_COVER).state == CoverState.CLOSED + assert hass.states.get(DEMO_COVER).state == STATE_CLOSED assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 0 # Toggle again will open covers await hass.services.async_call( - COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -550,47 +529,45 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 - assert hass.states.get(DEMO_COVER).state == CoverState.OPEN + assert hass.states.get(DEMO_COVER).state == STATE_OPEN assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 100 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 100 @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_stop_covers(hass: HomeAssistant) -> None: +async def test_stop_covers(hass: HomeAssistant, setup_comp) -> None: """Test stop cover function.""" await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 # (20 + 80) / 2 - assert hass.states.get(DEMO_COVER).state == CoverState.OPEN + assert hass.states.get(DEMO_COVER).state == STATE_OPEN assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 20 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 80 @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_set_cover_position(hass: HomeAssistant) -> None: +async def test_set_cover_position(hass: HomeAssistant, setup_comp) -> None: """Test set cover position function.""" await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: COVER_GROUP, ATTR_POSITION: 50}, blocking=True, @@ -601,23 +578,19 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 50 - assert hass.states.get(DEMO_COVER).state == CoverState.CLOSED + assert hass.states.get(DEMO_COVER).state == STATE_CLOSED assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 50 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 50 @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_open_tilts(hass: HomeAssistant) -> None: +async def test_open_tilts(hass: HomeAssistant, setup_comp) -> None: """Test open tilt function.""" await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER_TILT, - {ATTR_ENTITY_ID: COVER_GROUP}, - blocking=True, + DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(5): future = dt_util.utcnow() + timedelta(seconds=1) @@ -625,7 +598,7 @@ async def test_open_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 assert ( @@ -634,14 +607,10 @@ async def test_open_tilts(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_close_tilts(hass: HomeAssistant) -> None: +async def test_close_tilts(hass: HomeAssistant, setup_comp) -> None: """Test close tilt function.""" await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER_TILT, - {ATTR_ENTITY_ID: COVER_GROUP}, - blocking=True, + DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(5): future = dt_util.utcnow() + timedelta(seconds=1) @@ -649,22 +618,18 @@ async def test_close_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 0 @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_toggle_tilts(hass: HomeAssistant) -> None: +async def test_toggle_tilts(hass: HomeAssistant, setup_comp) -> None: """Test toggle tilt function.""" # Start tilted open await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER_TILT, - {ATTR_ENTITY_ID: COVER_GROUP}, - blocking=True, + DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -672,7 +637,7 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 assert ( @@ -681,10 +646,7 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: # Toggle will tilt closed await hass.services.async_call( - COVER_DOMAIN, - SERVICE_TOGGLE_COVER_TILT, - {ATTR_ENTITY_ID: COVER_GROUP}, - blocking=True, + DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -692,17 +654,14 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Toggle again will tilt open await hass.services.async_call( - COVER_DOMAIN, - SERVICE_TOGGLE_COVER_TILT, - {ATTR_ENTITY_ID: COVER_GROUP}, - blocking=True, + DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -710,7 +669,7 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 assert ( @@ -719,42 +678,34 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_stop_tilts(hass: HomeAssistant) -> None: +async def test_stop_tilts(hass: HomeAssistant, setup_comp) -> None: """Test stop tilts function.""" await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER_TILT, - {ATTR_ENTITY_ID: COVER_GROUP}, - blocking=True, + DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER_TILT, - {ATTR_ENTITY_ID: COVER_GROUP}, - blocking=True, + DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 60 @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_set_tilt_positions(hass: HomeAssistant) -> None: +async def test_set_tilt_positions(hass: HomeAssistant, setup_comp) -> None: """Test set tilt position function.""" await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: COVER_GROUP, ATTR_TILT_POSITION: 80}, blocking=True, @@ -765,25 +716,24 @@ async def test_set_tilt_positions(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 80 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 80 @pytest.mark.parametrize("config_count", [(CONFIG_POS, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_is_opening_closing(hass: HomeAssistant) -> None: +async def test_is_opening_closing(hass: HomeAssistant, setup_comp) -> None: """Test is_opening property.""" await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) await hass.async_block_till_done() # Both covers opening -> opening - assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPENING - assert hass.states.get(DEMO_COVER_TILT).state == CoverState.OPENING - assert hass.states.get(COVER_GROUP).state == CoverState.OPENING + assert hass.states.get(DEMO_COVER_POS).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING + assert hass.states.get(COVER_GROUP).state == STATE_OPENING for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -791,81 +741,67 @@ async def test_is_opening_closing(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) # Both covers closing -> closing - assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSING - assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING - assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING + assert hass.states.get(COVER_GROUP).state == STATE_CLOSING - hass.states.async_set( - DEMO_COVER_POS, CoverState.OPENING, {ATTR_SUPPORTED_FEATURES: 11} - ) + hass.states.async_set(DEMO_COVER_POS, STATE_OPENING, {ATTR_SUPPORTED_FEATURES: 11}) await hass.async_block_till_done() # Closing + Opening -> Opening - assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING - assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPENING - assert hass.states.get(COVER_GROUP).state == CoverState.OPENING + assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_POS).state == STATE_OPENING + assert hass.states.get(COVER_GROUP).state == STATE_OPENING - hass.states.async_set( - DEMO_COVER_POS, CoverState.CLOSING, {ATTR_SUPPORTED_FEATURES: 11} - ) + hass.states.async_set(DEMO_COVER_POS, STATE_CLOSING, {ATTR_SUPPORTED_FEATURES: 11}) await hass.async_block_till_done() # Both covers closing -> closing - assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING - assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSING - assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING + assert hass.states.get(COVER_GROUP).state == STATE_CLOSING # Closed + Closing -> Closing - hass.states.async_set( - DEMO_COVER_POS, CoverState.CLOSED, {ATTR_SUPPORTED_FEATURES: 11} - ) + hass.states.async_set(DEMO_COVER_POS, STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: 11}) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING - assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSED - assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSED + assert hass.states.get(COVER_GROUP).state == STATE_CLOSING # Open + Closing -> Closing - hass.states.async_set( - DEMO_COVER_POS, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 11} - ) + hass.states.async_set(DEMO_COVER_POS, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11}) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING - assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPEN - assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_POS).state == STATE_OPEN + assert hass.states.get(COVER_GROUP).state == STATE_CLOSING # Closed + Opening -> Closing - hass.states.async_set( - DEMO_COVER_TILT, CoverState.OPENING, {ATTR_SUPPORTED_FEATURES: 11} - ) - hass.states.async_set( - DEMO_COVER_POS, CoverState.CLOSED, {ATTR_SUPPORTED_FEATURES: 11} - ) + hass.states.async_set(DEMO_COVER_TILT, STATE_OPENING, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set(DEMO_COVER_POS, STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: 11}) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == CoverState.OPENING - assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSED - assert hass.states.get(COVER_GROUP).state == CoverState.OPENING + assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSED + assert hass.states.get(COVER_GROUP).state == STATE_OPENING # Open + Opening -> Closing - hass.states.async_set( - DEMO_COVER_POS, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 11} - ) + hass.states.async_set(DEMO_COVER_POS, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11}) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == CoverState.OPENING - assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPEN - assert hass.states.get(COVER_GROUP).state == CoverState.OPENING + assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_POS).state == STATE_OPEN + assert hass.states.get(COVER_GROUP).state == STATE_OPENING async def test_nested_group(hass: HomeAssistant) -> None: """Test nested cover group.""" await async_setup_component( hass, - COVER_DOMAIN, + DOMAIN, { - COVER_DOMAIN: [ + DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -886,23 +822,23 @@ async def test_nested_group(hass: HomeAssistant) -> None: state = hass.states.get("cover.bedroom_group") assert state is not None - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes.get(ATTR_ENTITY_ID) == [DEMO_COVER_POS, DEMO_COVER_TILT] state = hass.states.get("cover.nested_group") assert state is not None - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes.get(ATTR_ENTITY_ID) == ["cover.bedroom_group"] # Test controlling the nested group async with asyncio.timeout(0.5): await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.nested_group"}, blocking=True, ) - assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSING - assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING - assert hass.states.get("cover.bedroom_group").state == CoverState.CLOSING - assert hass.states.get("cover.nested_group").state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING + assert hass.states.get("cover.bedroom_group").state == STATE_CLOSING + assert hass.states.get("cover.nested_group").state == STATE_CLOSING diff --git a/tests/components/group/test_fan.py b/tests/components/group/test_fan.py index 93509b5a651..6aa6fc2933d 100644 --- a/tests/components/group/test_fan.py +++ b/tests/components/group/test_fan.py @@ -1,7 +1,6 @@ """The tests for the group fan platform.""" import asyncio -from typing import Any from unittest.mock import patch import pytest @@ -14,7 +13,7 @@ from homeassistant.components.fan import ( ATTR_PERCENTAGE_STEP, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN as FAN_DOMAIN, + DOMAIN, SERVICE_OSCILLATE, SERVICE_SET_DIRECTION, SERVICE_SET_PERCENTAGE, @@ -60,7 +59,7 @@ FULL_SUPPORT_FEATURES = ( CONFIG_MISSING_FAN = { - FAN_DOMAIN: [ + DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -74,7 +73,7 @@ CONFIG_MISSING_FAN = { } CONFIG_FULL_SUPPORT = { - FAN_DOMAIN: [ + DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -84,7 +83,7 @@ CONFIG_FULL_SUPPORT = { } CONFIG_LIMITED_SUPPORT = { - FAN_DOMAIN: [ + DOMAIN: [ { "platform": "group", CONF_ENTITIES: [*LIMITED_FAN_ENTITY_IDS], @@ -94,7 +93,7 @@ CONFIG_LIMITED_SUPPORT = { CONFIG_ATTRIBUTES = { - FAN_DOMAIN: { + DOMAIN: { "platform": "group", CONF_ENTITIES: [*FULL_FAN_ENTITY_IDS, *LIMITED_FAN_ENTITY_IDS], CONF_UNIQUE_ID: "unique_identifier", @@ -103,21 +102,20 @@ CONFIG_ATTRIBUTES = { @pytest.fixture -async def setup_comp( - hass: HomeAssistant, config_count: tuple[dict[str, Any], int] -) -> None: +async def setup_comp(hass, config_count): """Set up group fan component.""" config, count = config_count - with assert_setup_component(count, FAN_DOMAIN): - await async_setup_component(hass, FAN_DOMAIN, config) + with assert_setup_component(count, DOMAIN): + await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) -@pytest.mark.usefixtures("setup_comp") -async def test_state(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_state( + hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp +) -> None: """Test handling of state. The group state is on if at least one group member is on. @@ -212,8 +210,7 @@ async def test_state(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) -@pytest.mark.usefixtures("setup_comp") -async def test_attributes(hass: HomeAssistant) -> None: +async def test_attributes(hass: HomeAssistant, setup_comp) -> None: """Test handling of state attributes.""" state = hass.states.get(FAN_GROUP) assert state.state == STATE_UNAVAILABLE @@ -270,8 +267,7 @@ async def test_attributes(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_FULL_SUPPORT, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_direction_oscillating(hass: HomeAssistant) -> None: +async def test_direction_oscillating(hass: HomeAssistant, setup_comp) -> None: """Test handling of direction and oscillating attributes.""" hass.states.async_set( @@ -382,8 +378,7 @@ async def test_direction_oscillating(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_MISSING_FAN, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_state_missing_entity_id(hass: HomeAssistant) -> None: +async def test_state_missing_entity_id(hass: HomeAssistant, setup_comp) -> None: """Test we can still setup with a missing entity id.""" state = hass.states.get(FAN_GROUP) await hass.async_block_till_done() @@ -393,7 +388,7 @@ async def test_state_missing_entity_id(hass: HomeAssistant) -> None: async def test_setup_before_started(hass: HomeAssistant) -> None: """Test we can setup before starting.""" hass.set_state(CoreState.stopped) - assert await async_setup_component(hass, FAN_DOMAIN, CONFIG_MISSING_FAN) + assert await async_setup_component(hass, DOMAIN, CONFIG_MISSING_FAN) await hass.async_block_till_done() await hass.async_start() @@ -403,8 +398,7 @@ async def test_setup_before_started(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_MISSING_FAN, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_reload(hass: HomeAssistant) -> None: +async def test_reload(hass: HomeAssistant, setup_comp) -> None: """Test the ability to reload fans.""" await hass.async_block_till_done() await hass.async_start() @@ -427,18 +421,17 @@ async def test_reload(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_FULL_SUPPORT, 2)]) -@pytest.mark.usefixtures("setup_comp") -async def test_service_calls(hass: HomeAssistant) -> None: +async def test_service_calls(hass: HomeAssistant, setup_comp) -> None: """Test calling services.""" await hass.services.async_call( - FAN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True ) assert hass.states.get(LIVING_ROOM_FAN_ENTITY_ID).state == STATE_ON assert hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID).state == STATE_ON assert hass.states.get(FAN_GROUP).state == STATE_ON await hass.services.async_call( - FAN_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 66}, blocking=True, @@ -452,14 +445,14 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_PERCENTAGE_STEP] == 100 / 3 await hass.services.async_call( - FAN_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True ) assert hass.states.get(LIVING_ROOM_FAN_ENTITY_ID).state == STATE_OFF assert hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID).state == STATE_OFF assert hass.states.get(FAN_GROUP).state == STATE_OFF await hass.services.async_call( - FAN_DOMAIN, + DOMAIN, SERVICE_SET_PERCENTAGE, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 100}, blocking=True, @@ -472,7 +465,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_PERCENTAGE] == 100 await hass.services.async_call( - FAN_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 0}, blocking=True, @@ -482,7 +475,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert hass.states.get(FAN_GROUP).state == STATE_OFF await hass.services.async_call( - FAN_DOMAIN, + DOMAIN, SERVICE_OSCILLATE, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_OSCILLATING: True}, blocking=True, @@ -495,7 +488,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_OSCILLATING] is True await hass.services.async_call( - FAN_DOMAIN, + DOMAIN, SERVICE_OSCILLATE, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_OSCILLATING: False}, blocking=True, @@ -508,7 +501,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_OSCILLATING] is False await hass.services.async_call( - FAN_DOMAIN, + DOMAIN, SERVICE_SET_DIRECTION, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_DIRECTION: DIRECTION_FORWARD}, blocking=True, @@ -521,7 +514,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_DIRECTION] == DIRECTION_FORWARD await hass.services.async_call( - FAN_DOMAIN, + DOMAIN, SERVICE_SET_DIRECTION, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_DIRECTION: DIRECTION_REVERSE}, blocking=True, @@ -538,9 +531,9 @@ async def test_nested_group(hass: HomeAssistant) -> None: """Test nested fan group.""" await async_setup_component( hass, - FAN_DOMAIN, + DOMAIN, { - FAN_DOMAIN: [ + DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -578,7 +571,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: # Test controlling the nested group async with asyncio.timeout(0.5): await hass.services.async_call( - FAN_DOMAIN, + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: "fan.nested_group"}, blocking=True, diff --git a/tests/components/group/test_init.py b/tests/components/group/test_init.py index 9e6e352e46c..7434de74f63 100644 --- a/tests/components/group/test_init.py +++ b/tests/components/group/test_init.py @@ -11,7 +11,6 @@ import pytest from homeassistant.components import group from homeassistant.components.group.registry import GroupIntegrationRegistry -from homeassistant.components.lock import LockState from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_FRIENDLY_NAME, @@ -20,10 +19,17 @@ from homeassistant.const import ( SERVICE_RELOAD, STATE_CLOSED, STATE_HOME, + STATE_JAMMED, + STATE_LOCKED, + STATE_LOCKING, STATE_NOT_HOME, STATE_OFF, STATE_ON, + STATE_OPEN, + STATE_OPENING, STATE_UNKNOWN, + STATE_UNLOCKED, + STATE_UNLOCKING, ) from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import entity_registry as er @@ -399,13 +405,13 @@ async def test_expand_entity_ids_does_not_return_duplicates( order=None, ) - assert sorted( + assert ["light.bowl", "light.ceiling"] == sorted( group.expand_entity_ids(hass, [test_group.entity_id, "light.Ceiling"]) - ) == ["light.bowl", "light.ceiling"] + ) - assert sorted( + assert ["light.bowl", "light.ceiling"] == sorted( group.expand_entity_ids(hass, ["light.bowl", test_group.entity_id]) - ) == ["light.bowl", "light.ceiling"] + ) async def test_expand_entity_ids_recursive(hass: HomeAssistant) -> None: @@ -433,7 +439,7 @@ async def test_expand_entity_ids_recursive(hass: HomeAssistant) -> None: async def test_expand_entity_ids_ignores_non_strings(hass: HomeAssistant) -> None: """Test that non string elements in lists are ignored.""" - assert group.expand_entity_ids(hass, [5, True]) == [] + assert [] == group.expand_entity_ids(hass, [5, True]) async def test_get_entity_ids(hass: HomeAssistant) -> None: @@ -454,10 +460,9 @@ async def test_get_entity_ids(hass: HomeAssistant) -> None: order=None, ) - assert sorted(group.get_entity_ids(hass, test_group.entity_id)) == [ - "light.bowl", - "light.ceiling", - ] + assert ["light.bowl", "light.ceiling"] == sorted( + group.get_entity_ids(hass, test_group.entity_id) + ) async def test_get_entity_ids_with_domain_filter(hass: HomeAssistant) -> None: @@ -477,19 +482,19 @@ async def test_get_entity_ids_with_domain_filter(hass: HomeAssistant) -> None: order=None, ) - assert group.get_entity_ids( + assert ["switch.ac"] == group.get_entity_ids( hass, mixed_group.entity_id, domain_filter="switch" - ) == ["switch.ac"] + ) async def test_get_entity_ids_with_non_existing_group_name(hass: HomeAssistant) -> None: """Test get_entity_ids with a non existing group.""" - assert group.get_entity_ids(hass, "non_existing") == [] + assert [] == group.get_entity_ids(hass, "non_existing") async def test_get_entity_ids_with_non_group_state(hass: HomeAssistant) -> None: """Test get_entity_ids with a non group state.""" - assert group.get_entity_ids(hass, "switch.AC") == [] + assert [] == group.get_entity_ids(hass, "switch.AC") async def test_group_being_init_before_first_tracked_state_is_set_to_on( @@ -615,12 +620,12 @@ async def test_expand_entity_ids_expands_nested_groups(hass: HomeAssistant) -> N order=None, ) - assert sorted(group.expand_entity_ids(hass, ["group.group_of_groups"])) == [ + assert [ "light.test_1", "light.test_2", "switch.test_1", "switch.test_2", - ] + ] == sorted(group.expand_entity_ids(hass, ["group.group_of_groups"])) async def test_set_assumed_state_based_on_tracked(hass: HomeAssistant) -> None: @@ -734,78 +739,78 @@ async def test_is_on(hass: HomeAssistant) -> None: ), ( ("cover", "cover"), - (LockState.OPEN, STATE_CLOSED), + (STATE_OPEN, STATE_CLOSED), (STATE_CLOSED, STATE_CLOSED), - (LockState.OPEN, True), + (STATE_OPEN, True), (STATE_CLOSED, False), ), ( ("lock", "lock"), - (LockState.UNLOCKED, LockState.LOCKED), - (LockState.LOCKED, LockState.LOCKED), - (LockState.UNLOCKED, True), - (LockState.LOCKED, False), + (STATE_UNLOCKED, STATE_LOCKED), + (STATE_LOCKED, STATE_LOCKED), + (STATE_UNLOCKED, True), + (STATE_LOCKED, False), ), ( ("cover", "lock"), - (LockState.OPEN, LockState.LOCKED), - (STATE_CLOSED, LockState.LOCKED), + (STATE_OPEN, STATE_LOCKED), + (STATE_CLOSED, STATE_LOCKED), (STATE_ON, True), (STATE_OFF, False), ), ( ("cover", "lock"), - (LockState.OPEN, LockState.UNLOCKED), - (STATE_CLOSED, LockState.LOCKED), + (STATE_OPEN, STATE_UNLOCKED), + (STATE_CLOSED, STATE_LOCKED), (STATE_ON, True), (STATE_OFF, False), ), ( ("cover", "lock", "light"), - (LockState.OPEN, LockState.LOCKED, STATE_ON), - (STATE_CLOSED, LockState.LOCKED, STATE_OFF), + (STATE_OPEN, STATE_LOCKED, STATE_ON), + (STATE_CLOSED, STATE_LOCKED, STATE_OFF), (STATE_ON, True), (STATE_OFF, False), ), ( ("lock", "lock"), - (LockState.OPEN, LockState.LOCKED), - (LockState.LOCKED, LockState.LOCKED), - (LockState.UNLOCKED, True), - (LockState.LOCKED, False), + (STATE_OPEN, STATE_LOCKED), + (STATE_LOCKED, STATE_LOCKED), + (STATE_UNLOCKED, True), + (STATE_LOCKED, False), ), ( ("lock", "lock"), - (LockState.OPENING, LockState.LOCKED), - (LockState.LOCKED, LockState.LOCKED), - (LockState.UNLOCKED, True), - (LockState.LOCKED, False), + (STATE_OPENING, STATE_LOCKED), + (STATE_LOCKED, STATE_LOCKED), + (STATE_UNLOCKED, True), + (STATE_LOCKED, False), ), ( ("lock", "lock"), - (LockState.UNLOCKING, LockState.LOCKED), - (LockState.LOCKED, LockState.LOCKED), - (LockState.UNLOCKED, True), - (LockState.LOCKED, False), + (STATE_UNLOCKING, STATE_LOCKED), + (STATE_LOCKED, STATE_LOCKED), + (STATE_UNLOCKED, True), + (STATE_LOCKED, False), ), ( ("lock", "lock"), - (LockState.LOCKING, LockState.LOCKED), - (LockState.LOCKED, LockState.LOCKED), - (LockState.UNLOCKED, True), - (LockState.LOCKED, False), + (STATE_LOCKING, STATE_LOCKED), + (STATE_LOCKED, STATE_LOCKED), + (STATE_UNLOCKED, True), + (STATE_LOCKED, False), ), ( ("lock", "lock"), - (LockState.JAMMED, LockState.LOCKED), - (LockState.LOCKED, LockState.LOCKED), - (LockState.LOCKED, False), - (LockState.LOCKED, False), + (STATE_JAMMED, STATE_LOCKED), + (STATE_LOCKED, STATE_LOCKED), + (STATE_LOCKED, False), + (STATE_LOCKED, False), ), ( ("cover", "lock"), - (LockState.OPEN, LockState.OPEN), - (STATE_CLOSED, LockState.LOCKED), + (STATE_OPEN, STATE_OPEN), + (STATE_CLOSED, STATE_LOCKED), (STATE_ON, True), (STATE_OFF, False), ), diff --git a/tests/components/group/test_lock.py b/tests/components/group/test_lock.py index cc255264183..0c62913ae3e 100644 --- a/tests/components/group/test_lock.py +++ b/tests/components/group/test_lock.py @@ -12,9 +12,18 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - LockState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_JAMMED, + STATE_LOCKED, + STATE_LOCKING, + STATE_OPEN, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + STATE_UNLOCKED, + STATE_UNLOCKING, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er @@ -46,7 +55,7 @@ async def test_default_state( state = hass.states.get("lock.door_group") assert state is not None - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED assert state.attributes.get(ATTR_ENTITY_ID) == ["lock.front", "lock.back"] entry = entity_registry.async_get("lock.door_group") @@ -100,63 +109,63 @@ async def test_state_reporting(hass: HomeAssistant) -> None: # At least one member jammed -> group jammed for state_1 in ( - LockState.JAMMED, - LockState.LOCKED, - LockState.LOCKING, + STATE_JAMMED, + STATE_LOCKED, + STATE_LOCKING, STATE_UNAVAILABLE, STATE_UNKNOWN, - LockState.UNLOCKED, - LockState.UNLOCKING, + STATE_UNLOCKED, + STATE_UNLOCKING, ): hass.states.async_set("lock.test1", state_1) - hass.states.async_set("lock.test2", LockState.JAMMED) + hass.states.async_set("lock.test2", STATE_JAMMED) await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == LockState.JAMMED + assert hass.states.get("lock.lock_group").state == STATE_JAMMED # At least one member locking -> group unlocking for state_1 in ( - LockState.LOCKED, - LockState.LOCKING, + STATE_LOCKED, + STATE_LOCKING, STATE_UNAVAILABLE, STATE_UNKNOWN, - LockState.UNLOCKED, - LockState.UNLOCKING, + STATE_UNLOCKED, + STATE_UNLOCKING, ): hass.states.async_set("lock.test1", state_1) - hass.states.async_set("lock.test2", LockState.LOCKING) + hass.states.async_set("lock.test2", STATE_LOCKING) await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == LockState.LOCKING + assert hass.states.get("lock.lock_group").state == STATE_LOCKING # At least one member unlocking -> group unlocking for state_1 in ( - LockState.LOCKED, + STATE_LOCKED, STATE_UNAVAILABLE, STATE_UNKNOWN, - LockState.UNLOCKED, - LockState.UNLOCKING, + STATE_UNLOCKED, + STATE_UNLOCKING, ): hass.states.async_set("lock.test1", state_1) - hass.states.async_set("lock.test2", LockState.UNLOCKING) + hass.states.async_set("lock.test2", STATE_UNLOCKING) await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == LockState.UNLOCKING + assert hass.states.get("lock.lock_group").state == STATE_UNLOCKING # At least one member unlocked -> group unlocked for state_1 in ( - LockState.LOCKED, + STATE_LOCKED, STATE_UNAVAILABLE, STATE_UNKNOWN, - LockState.UNLOCKED, + STATE_UNLOCKED, ): hass.states.async_set("lock.test1", state_1) - hass.states.async_set("lock.test2", LockState.UNLOCKED) + hass.states.async_set("lock.test2", STATE_UNLOCKED) await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == LockState.UNLOCKED + assert hass.states.get("lock.lock_group").state == STATE_UNLOCKED # Otherwise -> locked - hass.states.async_set("lock.test1", LockState.LOCKED) - hass.states.async_set("lock.test2", LockState.LOCKED) + hass.states.async_set("lock.test1", STATE_LOCKED) + hass.states.async_set("lock.test2", STATE_LOCKED) await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == LockState.LOCKED + assert hass.states.get("lock.lock_group").state == STATE_LOCKED # All group members removed from the state machine -> unavailable hass.states.async_remove("lock.test1") @@ -186,9 +195,9 @@ async def test_service_calls_openable(hass: HomeAssistant) -> None: await hass.async_block_till_done() group_state = hass.states.get("lock.lock_group") - assert group_state.state == LockState.UNLOCKED - assert hass.states.get("lock.openable_lock").state == LockState.LOCKED - assert hass.states.get("lock.another_openable_lock").state == LockState.UNLOCKED + assert group_state.state == STATE_UNLOCKED + assert hass.states.get("lock.openable_lock").state == STATE_LOCKED + assert hass.states.get("lock.another_openable_lock").state == STATE_UNLOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -196,8 +205,8 @@ async def test_service_calls_openable(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.lock_group"}, blocking=True, ) - assert hass.states.get("lock.openable_lock").state == LockState.OPEN - assert hass.states.get("lock.another_openable_lock").state == LockState.OPEN + assert hass.states.get("lock.openable_lock").state == STATE_OPEN + assert hass.states.get("lock.another_openable_lock").state == STATE_OPEN await hass.services.async_call( LOCK_DOMAIN, @@ -205,8 +214,8 @@ async def test_service_calls_openable(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.lock_group"}, blocking=True, ) - assert hass.states.get("lock.openable_lock").state == LockState.LOCKED - assert hass.states.get("lock.another_openable_lock").state == LockState.LOCKED + assert hass.states.get("lock.openable_lock").state == STATE_LOCKED + assert hass.states.get("lock.another_openable_lock").state == STATE_LOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -214,8 +223,8 @@ async def test_service_calls_openable(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.lock_group"}, blocking=True, ) - assert hass.states.get("lock.openable_lock").state == LockState.UNLOCKED - assert hass.states.get("lock.another_openable_lock").state == LockState.UNLOCKED + assert hass.states.get("lock.openable_lock").state == STATE_UNLOCKED + assert hass.states.get("lock.another_openable_lock").state == STATE_UNLOCKED async def test_service_calls_basic(hass: HomeAssistant) -> None: @@ -239,9 +248,9 @@ async def test_service_calls_basic(hass: HomeAssistant) -> None: await hass.async_block_till_done() group_state = hass.states.get("lock.lock_group") - assert group_state.state == LockState.UNLOCKED - assert hass.states.get("lock.basic_lock").state == LockState.LOCKED - assert hass.states.get("lock.another_basic_lock").state == LockState.UNLOCKED + assert group_state.state == STATE_UNLOCKED + assert hass.states.get("lock.basic_lock").state == STATE_LOCKED + assert hass.states.get("lock.another_basic_lock").state == STATE_UNLOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -249,8 +258,8 @@ async def test_service_calls_basic(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.lock_group"}, blocking=True, ) - assert hass.states.get("lock.basic_lock").state == LockState.LOCKED - assert hass.states.get("lock.another_basic_lock").state == LockState.LOCKED + assert hass.states.get("lock.basic_lock").state == STATE_LOCKED + assert hass.states.get("lock.another_basic_lock").state == STATE_LOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -258,8 +267,8 @@ async def test_service_calls_basic(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.lock_group"}, blocking=True, ) - assert hass.states.get("lock.basic_lock").state == LockState.UNLOCKED - assert hass.states.get("lock.another_basic_lock").state == LockState.UNLOCKED + assert hass.states.get("lock.basic_lock").state == STATE_UNLOCKED + assert hass.states.get("lock.another_basic_lock").state == STATE_UNLOCKED with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -294,7 +303,7 @@ async def test_reload(hass: HomeAssistant) -> None: await hass.async_start() await hass.async_block_till_done() - assert hass.states.get("lock.lock_group").state == LockState.UNLOCKED + assert hass.states.get("lock.lock_group").state == STATE_UNLOCKED yaml_path = get_fixture_path("configuration.yaml", "group") with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): @@ -313,7 +322,7 @@ async def test_reload(hass: HomeAssistant) -> None: async def test_reload_with_platform_not_setup(hass: HomeAssistant) -> None: """Test the ability to reload locks.""" - hass.states.async_set("lock.something", LockState.UNLOCKED) + hass.states.async_set("lock.something", STATE_UNLOCKED) await async_setup_component( hass, LOCK_DOMAIN, @@ -363,11 +372,11 @@ async def test_reload_with_base_integration_platform_not_setup( }, ) await hass.async_block_till_done() - hass.states.async_set("lock.front_lock", LockState.LOCKED) - hass.states.async_set("lock.back_lock", LockState.UNLOCKED) + hass.states.async_set("lock.front_lock", STATE_LOCKED) + hass.states.async_set("lock.back_lock", STATE_UNLOCKED) - hass.states.async_set("lock.outside_lock", LockState.LOCKED) - hass.states.async_set("lock.outside_lock_2", LockState.LOCKED) + hass.states.async_set("lock.outside_lock", STATE_LOCKED) + hass.states.async_set("lock.outside_lock_2", STATE_LOCKED) yaml_path = get_fixture_path("configuration.yaml", "group") with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): @@ -382,8 +391,8 @@ async def test_reload_with_base_integration_platform_not_setup( assert hass.states.get("lock.lock_group") is None assert hass.states.get("lock.inside_locks_g") is not None assert hass.states.get("lock.outside_locks_g") is not None - assert hass.states.get("lock.inside_locks_g").state == LockState.UNLOCKED - assert hass.states.get("lock.outside_locks_g").state == LockState.LOCKED + assert hass.states.get("lock.inside_locks_g").state == STATE_UNLOCKED + assert hass.states.get("lock.outside_locks_g").state == STATE_LOCKED @patch.object(demo_lock, "LOCK_UNLOCK_DELAY", 0) @@ -417,7 +426,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: state = hass.states.get("lock.some_group") assert state is not None - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED assert state.attributes.get(ATTR_ENTITY_ID) == [ "lock.front_door", "lock.kitchen_door", @@ -425,7 +434,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: state = hass.states.get("lock.nested_group") assert state is not None - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED assert state.attributes.get(ATTR_ENTITY_ID) == ["lock.some_group"] # Test controlling the nested group @@ -435,7 +444,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "lock.nested_group"}, blocking=True, ) - assert hass.states.get("lock.front_door").state == LockState.LOCKED - assert hass.states.get("lock.kitchen_door").state == LockState.LOCKED - assert hass.states.get("lock.some_group").state == LockState.LOCKED - assert hass.states.get("lock.nested_group").state == LockState.LOCKED + assert hass.states.get("lock.front_door").state == STATE_LOCKED + assert hass.states.get("lock.kitchen_door").state == STATE_LOCKED + assert hass.states.get("lock.some_group").state == STATE_LOCKED + assert hass.states.get("lock.nested_group").state == STATE_LOCKED diff --git a/tests/components/group/test_media_player.py b/tests/components/group/test_media_player.py index 23cdd1598dd..451aae200b3 100644 --- a/tests/components/group/test_media_player.py +++ b/tests/components/group/test_media_player.py @@ -1,16 +1,14 @@ """The tests for the Media group platform.""" import asyncio -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import Mock, patch import pytest from homeassistant.components.group import DOMAIN from homeassistant.components.media_player import ( - ATTR_MEDIA_ANNOUNCE, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, - ATTR_MEDIA_EXTRA, ATTR_MEDIA_SEEK_POSITION, ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_TRACK, @@ -47,7 +45,7 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_platform, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -600,59 +598,3 @@ async def test_nested_group(hass: HomeAssistant) -> None: assert hass.states.get("media_player.kitchen").state == STATE_OFF assert hass.states.get("media_player.group_1").state == STATE_OFF assert hass.states.get("media_player.nested_group").state == STATE_OFF - - -async def test_service_play_media_kwargs(hass: HomeAssistant) -> None: - """Test that kwargs get passed through on play_media service call.""" - await async_setup_component( - hass, - MEDIA_DOMAIN, - { - MEDIA_DOMAIN: [ - {"platform": "demo"}, - { - "platform": DOMAIN, - "entities": [ - "media_player.bedroom", - "media_player.living_room", - ], - }, - ] - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - platform = entity_platform.async_get_platforms(hass, "media_player")[0] - mp_bedroom = platform.domain_entities["media_player.bedroom"] - mp_bedroom.play_media = MagicMock() - - mp_living_room = platform.domain_entities["media_player.living_room"] - mp_living_room.play_media = MagicMock() - - await hass.services.async_call( - MEDIA_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.media_group", - ATTR_MEDIA_CONTENT_TYPE: "some_type", - ATTR_MEDIA_CONTENT_ID: "some_id", - ATTR_MEDIA_ANNOUNCE: "true", - ATTR_MEDIA_EXTRA: { - "volume": 20, - }, - }, - ) - await hass.async_block_till_done() - - assert mp_bedroom.play_media.call_count == 1 - mp_bedroom.play_media.assert_called_with( - "some_type", "some_id", announce=True, extra={"volume": 20} - ) - - assert mp_living_room.play_media.call_count == 1 - mp_living_room.play_media.assert_called_with( - "some_type", "some_id", announce=True, extra={"volume": 20} - ) diff --git a/tests/components/group/test_notify.py b/tests/components/group/test_notify.py index bbf2d98b492..dfd200a1542 100644 --- a/tests/components/group/test_notify.py +++ b/tests/components/group/test_notify.py @@ -1,44 +1,18 @@ """The tests for the notify.group platform.""" -from collections.abc import Generator, Mapping +from collections.abc import Mapping from pathlib import Path from typing import Any from unittest.mock import MagicMock, call, patch -import pytest - from homeassistant import config as hass_config from homeassistant.components import notify -from homeassistant.components.group import DOMAIN, SERVICE_RELOAD -from homeassistant.components.notify import ( - ATTR_MESSAGE, - ATTR_TITLE, - DOMAIN as NOTIFY_DOMAIN, - SERVICE_SEND_MESSAGE, - NotifyEntity, -) -from homeassistant.config_entries import ConfigEntry, ConfigFlow -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_UNAVAILABLE, - STATE_UNKNOWN, - Platform, -) +from homeassistant.components.group import SERVICE_RELOAD from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - MockEntity, - MockModule, - MockPlatform, - get_fixture_path, - mock_config_flow, - mock_integration, - mock_platform, - setup_test_component_platform, -) +from tests.common import MockPlatform, get_fixture_path, mock_platform class MockNotifyPlatform(MockPlatform): @@ -122,7 +96,7 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No "services": [ {"service": "test_service1"}, { - "action": "test_service2", + "service": "test_service2", "data": { "target": "unnamed device", "data": {"test": "message", "default": "default"}, @@ -202,41 +176,6 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No ) -async def test_invalid_configuration( - hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture -) -> None: - """Test failing to set up group with an invalid configuration.""" - assert await async_setup_component( - hass, - "group", - {}, - ) - await hass.async_block_till_done() - - group_setup = [ - { - "platform": "group", - "name": "My invalid notification group", - "services": [ - { - "service": "test_service1", - "action": "test_service2", - "data": { - "target": "unnamed device", - "data": {"test": "message", "default": "default"}, - }, - }, - ], - } - ] - await help_setup_notify(hass, tmp_path, {"service1": 1, "service2": 2}, group_setup) - assert not hass.services.has_service("notify", "my_invalid_notification_group") - assert ( - "Invalid config for 'notify' from integration 'group':" - " Cannot specify both 'service' and 'action'." in caplog.text - ) - - async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None: """Verify we can reload the notify service.""" assert await async_setup_component( @@ -254,7 +193,7 @@ async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None: { "name": "group_notify", "platform": "group", - "services": [{"action": "test_service1"}], + "services": [{"service": "test_service1"}], } ], ) @@ -278,144 +217,3 @@ async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None: assert hass.services.has_service(notify.DOMAIN, "test_service2") assert not hass.services.has_service(notify.DOMAIN, "group_notify") assert hass.services.has_service(notify.DOMAIN, "new_group_notify") - - -class MockFlow(ConfigFlow): - """Test flow.""" - - -@pytest.fixture -def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: - """Mock config flow.""" - mock_platform(hass, "test.config_flow") - - with mock_config_flow("test", MockFlow): - yield - - -class MockNotifyEntity(MockEntity, NotifyEntity): - """Mock Email notifier entity to use in tests.""" - - def __init__(self, **values: Any) -> None: - """Initialize the mock entity.""" - super().__init__(**values) - self.send_message_mock_calls = MagicMock() - - async def async_send_message(self, message: str, title: str | None = None) -> None: - """Send a notification message.""" - self.send_message_mock_calls(message, title=title) - - -async def help_async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry -) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [Platform.NOTIFY] - ) - return True - - -async def help_async_unload_entry( - hass: HomeAssistant, config_entry: ConfigEntry -) -> bool: - """Unload test config entry.""" - return await hass.config_entries.async_unload_platforms( - config_entry, [Platform.NOTIFY] - ) - - -@pytest.fixture -async def mock_notifiers( - hass: HomeAssistant, config_flow_fixture: None -) -> list[NotifyEntity]: - """Set up the notify entities.""" - entity = MockNotifyEntity(name="test", entity_id="notify.test") - entity2 = MockNotifyEntity(name="test2", entity_id="notify.test2") - entities = [entity, entity2] - test_entry = MockConfigEntry(domain="test") - test_entry.add_to_hass(hass) - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=help_async_setup_entry_init, - async_unload_entry=help_async_unload_entry, - ), - ) - setup_test_component_platform(hass, NOTIFY_DOMAIN, entities, from_config_entry=True) - assert await hass.config_entries.async_setup(test_entry.entry_id) - await hass.async_block_till_done() - return entities - - -async def test_notify_entity_group( - hass: HomeAssistant, mock_notifiers: list[NotifyEntity] -) -> None: - """Test sending a message to a notify group.""" - entity, entity2 = mock_notifiers - assert entity.send_message_mock_calls.call_count == 0 - assert entity2.send_message_mock_calls.call_count == 0 - - config_entry = MockConfigEntry( - domain=DOMAIN, - options={ - "group_type": "notify", - "name": "Test Group", - "entities": ["notify.test", "notify.test2"], - "hide_members": True, - }, - title="Test Group", - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - await hass.services.async_call( - NOTIFY_DOMAIN, - SERVICE_SEND_MESSAGE, - { - ATTR_MESSAGE: "Hello", - ATTR_TITLE: "Test notification", - ATTR_ENTITY_ID: "notify.test_group", - }, - blocking=True, - ) - - assert entity.send_message_mock_calls.call_count == 1 - assert entity.send_message_mock_calls.call_args == call( - "Hello", title="Test notification" - ) - assert entity2.send_message_mock_calls.call_count == 1 - assert entity2.send_message_mock_calls.call_args == call( - "Hello", title="Test notification" - ) - - -async def test_state_reporting(hass: HomeAssistant) -> None: - """Test sending a message to a notify group.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - options={ - "group_type": "notify", - "name": "Test Group", - "entities": ["notify.test", "notify.test2"], - "hide_members": True, - }, - title="Test Group", - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert hass.states.get("notify.test_group").state == STATE_UNAVAILABLE - - hass.states.async_set("notify.test", STATE_UNAVAILABLE) - hass.states.async_set("notify.test2", STATE_UNAVAILABLE) - await hass.async_block_till_done() - assert hass.states.get("notify.test_group").state == STATE_UNAVAILABLE - - hass.states.async_set("notify.test", "2021-01-01T23:59:59.123+00:00") - hass.states.async_set("notify.test2", "2021-01-01T23:59:59.123+00:00") - await hass.async_block_till_done() - assert hass.states.get("notify.test_group").state == STATE_UNKNOWN diff --git a/tests/components/group/test_sensor.py b/tests/components/group/test_sensor.py index de406cb251c..db642506361 100644 --- a/tests/components/group/test_sensor.py +++ b/tests/components/group/test_sensor.py @@ -32,7 +32,6 @@ from homeassistant.const import ( SERVICE_RELOAD, STATE_UNAVAILABLE, STATE_UNKNOWN, - UnitOfTemperature, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir @@ -497,7 +496,7 @@ async def test_sensor_with_uoms_but_no_device_class( state = hass.states.get("sensor.test_sum") assert state.attributes.get("device_class") is None assert state.attributes.get("state_class") is None - assert state.attributes.get("unit_of_measurement") is None + assert state.attributes.get("unit_of_measurement") == "W" assert state.state == STATE_UNKNOWN assert ( @@ -651,10 +650,10 @@ async def test_sensor_calculated_result_fails_on_uom(hass: HomeAssistant) -> Non await hass.async_block_till_done() state = hass.states.get("sensor.test_sum") - assert state.state == STATE_UNAVAILABLE + assert state.state == STATE_UNKNOWN assert state.attributes.get("device_class") == "energy" assert state.attributes.get("state_class") == "total" - assert state.attributes.get("unit_of_measurement") is None + assert state.attributes.get("unit_of_measurement") == "kWh" async def test_sensor_calculated_properties_not_convertible_device_class( @@ -731,7 +730,7 @@ async def test_sensor_calculated_properties_not_convertible_device_class( assert state.state == STATE_UNKNOWN assert state.attributes.get("device_class") == "humidity" assert state.attributes.get("state_class") == "measurement" - assert state.attributes.get("unit_of_measurement") is None + assert state.attributes.get("unit_of_measurement") == "%" assert ( "Unable to use state. Only entities with correct unit of measurement is" @@ -813,197 +812,3 @@ async def test_sensors_attributes_added_when_entity_info_available( assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.TOTAL assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "L" - - -async def test_sensor_state_class_no_uom_not_available( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test when input sensors drops unit of measurement.""" - - # If we have a valid unit of measurement from all input sensors - # the group sensor will go unknown in the case any input sensor - # drops the unit of measurement and log a warning. - - config = { - SENSOR_DOMAIN: { - "platform": GROUP_DOMAIN, - "name": "test_sum", - "type": "sum", - "entities": ["sensor.test_1", "sensor.test_2", "sensor.test_3"], - "unique_id": "very_unique_id_sum_sensor", - } - } - - entity_ids = config["sensor"]["entities"] - - input_attributes = { - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": PERCENTAGE, - } - - hass.states.async_set(entity_ids[0], VALUES[0], input_attributes) - hass.states.async_set(entity_ids[1], VALUES[1], input_attributes) - hass.states.async_set(entity_ids[2], VALUES[2], input_attributes) - await hass.async_block_till_done() - - assert await async_setup_component(hass, "sensor", config) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test_sum") - assert state.state == str(sum(VALUES)) - assert state.attributes.get("state_class") == "measurement" - assert state.attributes.get("unit_of_measurement") == "%" - - assert ( - "Unable to use state. Only entities with correct unit of measurement is" - " supported" - ) not in caplog.text - - # sensor.test_3 drops the unit of measurement - hass.states.async_set( - entity_ids[2], - VALUES[2], - { - "state_class": SensorStateClass.MEASUREMENT, - }, - ) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test_sum") - assert state.state == STATE_UNKNOWN - assert state.attributes.get("state_class") == "measurement" - assert state.attributes.get("unit_of_measurement") is None - - assert ( - "Unable to use state. Only entities with correct unit of measurement is" - " supported, entity sensor.test_3, value 15.3 with" - " device class None and unit of measurement None excluded from calculation" - " in sensor.test_sum" - ) in caplog.text - - -async def test_sensor_different_attributes_ignore_non_numeric( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the sensor handles calculating attributes when using ignore_non_numeric.""" - config = { - SENSOR_DOMAIN: { - "platform": GROUP_DOMAIN, - "name": "test_sum", - "type": "sum", - "ignore_non_numeric": True, - "entities": ["sensor.test_1", "sensor.test_2", "sensor.test_3"], - "unique_id": "very_unique_id_sum_sensor", - } - } - - entity_ids = config["sensor"]["entities"] - - assert await async_setup_component(hass, "sensor", config) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test_sum") - assert state.state == STATE_UNAVAILABLE - assert state.attributes.get("state_class") is None - assert state.attributes.get("device_class") is None - assert state.attributes.get("unit_of_measurement") is None - - test_cases = [ - { - "entity": entity_ids[0], - "value": VALUES[0], - "attributes": { - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": PERCENTAGE, - }, - "expected_state": str(float(VALUES[0])), - "expected_state_class": SensorStateClass.MEASUREMENT, - "expected_device_class": None, - "expected_unit_of_measurement": PERCENTAGE, - }, - { - "entity": entity_ids[1], - "value": VALUES[1], - "attributes": { - "state_class": SensorStateClass.MEASUREMENT, - "device_class": SensorDeviceClass.HUMIDITY, - "unit_of_measurement": PERCENTAGE, - }, - "expected_state": str(float(sum([VALUES[0], VALUES[1]]))), - "expected_state_class": SensorStateClass.MEASUREMENT, - "expected_device_class": None, - "expected_unit_of_measurement": PERCENTAGE, - }, - { - "entity": entity_ids[2], - "value": VALUES[2], - "attributes": { - "state_class": SensorStateClass.MEASUREMENT, - "device_class": SensorDeviceClass.TEMPERATURE, - "unit_of_measurement": UnitOfTemperature.CELSIUS, - }, - "expected_state": str(float(sum(VALUES))), - "expected_state_class": SensorStateClass.MEASUREMENT, - "expected_device_class": None, - "expected_unit_of_measurement": None, - }, - { - "entity": entity_ids[2], - "value": VALUES[2], - "attributes": { - "state_class": SensorStateClass.MEASUREMENT, - "device_class": SensorDeviceClass.HUMIDITY, - "unit_of_measurement": PERCENTAGE, - }, - "expected_state": str(float(sum(VALUES))), - "expected_state_class": SensorStateClass.MEASUREMENT, - # One sensor does not have a device class - "expected_device_class": None, - "expected_unit_of_measurement": PERCENTAGE, - }, - { - "entity": entity_ids[0], - "value": VALUES[0], - "attributes": { - "state_class": SensorStateClass.MEASUREMENT, - "device_class": SensorDeviceClass.HUMIDITY, - "unit_of_measurement": PERCENTAGE, - }, - "expected_state": str(float(sum(VALUES))), - "expected_state_class": SensorStateClass.MEASUREMENT, - # First sensor now has a device class - "expected_device_class": SensorDeviceClass.HUMIDITY, - "expected_unit_of_measurement": PERCENTAGE, - }, - { - "entity": entity_ids[0], - "value": VALUES[0], - "attributes": { - "state_class": SensorStateClass.MEASUREMENT, - }, - "expected_state": str(float(sum(VALUES))), - "expected_state_class": SensorStateClass.MEASUREMENT, - "expected_device_class": None, - "expected_unit_of_measurement": None, - }, - ] - - for test_case in test_cases: - hass.states.async_set( - test_case["entity"], - test_case["value"], - test_case["attributes"], - ) - await hass.async_block_till_done() - state = hass.states.get("sensor.test_sum") - assert state.state == test_case["expected_state"] - assert state.attributes.get("state_class") == test_case["expected_state_class"] - assert ( - state.attributes.get("device_class") == test_case["expected_device_class"] - ) - assert ( - state.attributes.get("unit_of_measurement") - == test_case["expected_unit_of_measurement"] - ) diff --git a/tests/components/guardian/conftest.py b/tests/components/guardian/conftest.py index 61813cb1df5..87ff96aff45 100644 --- a/tests/components/guardian/conftest.py +++ b/tests/components/guardian/conftest.py @@ -1,18 +1,16 @@ """Define fixtures for Elexa Guardian tests.""" -from collections.abc import AsyncGenerator, Generator -from typing import Any +import json from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.guardian import CONF_UID, DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import MockConfigEntry, load_fixture @pytest.fixture @@ -25,9 +23,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, config: dict[str, Any], unique_id: str -) -> MockConfigEntry: +def config_entry_fixture(hass, config, unique_id): """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -39,7 +35,7 @@ def config_entry_fixture( @pytest.fixture(name="config") -def config_fixture() -> dict[str, Any]: +def config_fixture(hass): """Define a config entry data fixture.""" return { CONF_IP_ADDRESS: "192.168.1.100", @@ -48,68 +44,68 @@ def config_fixture() -> dict[str, Any]: @pytest.fixture(name="data_sensor_pair_dump", scope="package") -def data_sensor_pair_dump_fixture() -> JsonObjectType: +def data_sensor_pair_dump_fixture(): """Define data from a successful sensor_pair_dump response.""" - return load_json_object_fixture("sensor_pair_dump_data.json", "guardian") + return json.loads(load_fixture("sensor_pair_dump_data.json", "guardian")) @pytest.fixture(name="data_sensor_pair_sensor", scope="package") -def data_sensor_pair_sensor_fixture() -> JsonObjectType: +def data_sensor_pair_sensor_fixture(): """Define data from a successful sensor_pair_sensor response.""" - return load_json_object_fixture("sensor_pair_sensor_data.json", "guardian") + return json.loads(load_fixture("sensor_pair_sensor_data.json", "guardian")) @pytest.fixture(name="data_sensor_paired_sensor_status", scope="package") -def data_sensor_paired_sensor_status_fixture() -> JsonObjectType: +def data_sensor_paired_sensor_status_fixture(): """Define data from a successful sensor_paired_sensor_status response.""" - return load_json_object_fixture("sensor_paired_sensor_status_data.json", "guardian") + return json.loads(load_fixture("sensor_paired_sensor_status_data.json", "guardian")) @pytest.fixture(name="data_system_diagnostics", scope="package") -def data_system_diagnostics_fixture() -> JsonObjectType: +def data_system_diagnostics_fixture(): """Define data from a successful system_diagnostics response.""" - return load_json_object_fixture("system_diagnostics_data.json", "guardian") + return json.loads(load_fixture("system_diagnostics_data.json", "guardian")) @pytest.fixture(name="data_system_onboard_sensor_status", scope="package") -def data_system_onboard_sensor_status_fixture() -> JsonObjectType: +def data_system_onboard_sensor_status_fixture(): """Define data from a successful system_onboard_sensor_status response.""" - return load_json_object_fixture( - "system_onboard_sensor_status_data.json", "guardian" + return json.loads( + load_fixture("system_onboard_sensor_status_data.json", "guardian") ) @pytest.fixture(name="data_system_ping", scope="package") -def data_system_ping_fixture() -> JsonObjectType: +def data_system_ping_fixture(): """Define data from a successful system_ping response.""" - return load_json_object_fixture("system_ping_data.json", "guardian") + return json.loads(load_fixture("system_ping_data.json", "guardian")) @pytest.fixture(name="data_valve_status", scope="package") -def data_valve_status_fixture() -> JsonObjectType: +def data_valve_status_fixture(): """Define data from a successful valve_status response.""" - return load_json_object_fixture("valve_status_data.json", "guardian") + return json.loads(load_fixture("valve_status_data.json", "guardian")) @pytest.fixture(name="data_wifi_status", scope="package") -def data_wifi_status_fixture() -> JsonObjectType: +def data_wifi_status_fixture(): """Define data from a successful wifi_status response.""" - return load_json_object_fixture("wifi_status_data.json", "guardian") + return json.loads(load_fixture("wifi_status_data.json", "guardian")) @pytest.fixture(name="setup_guardian") async def setup_guardian_fixture( - hass: HomeAssistant, - config: dict[str, Any], - data_sensor_pair_dump: JsonObjectType, - data_sensor_pair_sensor: JsonObjectType, - data_sensor_paired_sensor_status: JsonObjectType, - data_system_diagnostics: JsonObjectType, - data_system_onboard_sensor_status: JsonObjectType, - data_system_ping: JsonObjectType, - data_valve_status: JsonObjectType, - data_wifi_status: JsonObjectType, -) -> AsyncGenerator[None]: + hass, + config, + data_sensor_pair_dump, + data_sensor_pair_sensor, + data_sensor_paired_sensor_status, + data_system_diagnostics, + data_system_onboard_sensor_status, + data_system_ping, + data_valve_status, + data_wifi_status, +): """Define a fixture to set up Guardian.""" with ( patch("aioguardian.client.Client.connect"), @@ -159,6 +155,6 @@ async def setup_guardian_fixture( @pytest.fixture(name="unique_id") -def unique_id_fixture() -> str: +def unique_id_fixture(hass): """Define a config entry unique ID fixture.""" return "guardian_3456" diff --git a/tests/components/guardian/test_config_flow.py b/tests/components/guardian/test_config_flow.py index 6c06171a45f..0f99578768a 100644 --- a/tests/components/guardian/test_config_flow.py +++ b/tests/components/guardian/test_config_flow.py @@ -1,7 +1,6 @@ """Define tests for the Elexa Guardian config flow.""" from ipaddress import ip_address -from typing import Any from unittest.mock import patch from aioguardian.errors import GuardianError @@ -23,8 +22,9 @@ from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -@pytest.mark.usefixtures("config_entry", "setup_guardian") -async def test_duplicate_error(hass: HomeAssistant, config: dict[str, Any]) -> None: +async def test_duplicate_error( + hass: HomeAssistant, config, config_entry, setup_guardian +) -> None: """Test that errors are shown when duplicate entries are added.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=config @@ -33,7 +33,7 @@ async def test_duplicate_error(hass: HomeAssistant, config: dict[str, Any]) -> N assert result["reason"] == "already_configured" -async def test_connect_error(hass: HomeAssistant, config: dict[str, Any]) -> None: +async def test_connect_error(hass: HomeAssistant, config) -> None: """Test that the config entry errors out if the device cannot connect.""" with patch( "aioguardian.client.Client.connect", @@ -58,8 +58,7 @@ async def test_get_pin_from_uid() -> None: assert pin == "3456" -@pytest.mark.usefixtures("setup_guardian") -async def test_step_user(hass: HomeAssistant, config: dict[str, Any]) -> None: +async def test_step_user(hass: HomeAssistant, config, setup_guardian) -> None: """Test the user step.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -79,8 +78,7 @@ async def test_step_user(hass: HomeAssistant, config: dict[str, Any]) -> None: } -@pytest.mark.usefixtures("setup_guardian") -async def test_step_zeroconf(hass: HomeAssistant) -> None: +async def test_step_zeroconf(hass: HomeAssistant, setup_guardian) -> None: """Test the zeroconf step.""" zeroconf_data = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.100"), @@ -135,8 +133,7 @@ async def test_step_zeroconf_already_in_progress(hass: HomeAssistant) -> None: assert result["reason"] == "already_in_progress" -@pytest.mark.usefixtures("setup_guardian") -async def test_step_dhcp(hass: HomeAssistant) -> None: +async def test_step_dhcp(hass: HomeAssistant, setup_guardian) -> None: """Test the dhcp step.""" dhcp_data = dhcp.DhcpServiceInfo( ip="192.168.1.100", diff --git a/tests/components/guardian/test_diagnostics.py b/tests/components/guardian/test_diagnostics.py index faba2103000..02b620b8e01 100644 --- a/tests/components/guardian/test_diagnostics.py +++ b/tests/components/guardian/test_diagnostics.py @@ -4,16 +4,15 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.guardian import DOMAIN, GuardianData from homeassistant.core import HomeAssistant -from tests.common import ANY, MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, - config_entry: MockConfigEntry, + config_entry, hass_client: ClientSessionGenerator, - setup_guardian: None, # relies on config_entry fixture + setup_guardian, ) -> None: """Test config entry diagnostics.""" data: GuardianData = hass.data[DOMAIN][config_entry.entry_id] @@ -39,9 +38,6 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, - "created_at": ANY, - "modified_at": ANY, - "discovery_keys": {}, }, "data": { "valve_controller": { diff --git a/tests/components/habitica/conftest.py b/tests/components/habitica/conftest.py index 8d729f4358f..2401397be26 100644 --- a/tests/components/habitica/conftest.py +++ b/tests/components/habitica/conftest.py @@ -3,14 +3,6 @@ from unittest.mock import patch import pytest -from yarl import URL - -from homeassistant.components.habitica.const import CONF_API_USER, DEFAULT_URL, DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_URL -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry, load_json_object_fixture -from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(autouse=True) @@ -21,66 +13,3 @@ def disable_plumbum(): """ with patch("plumbum.local"), patch("plumbum.colors"): yield - - -def mock_called_with( - mock_client: AiohttpClientMocker, - method: str, - url: str, -) -> tuple | None: - """Assert request mock was called with json data.""" - - return next( - ( - call - for call in mock_client.mock_calls - if call[0].upper() == method.upper() and call[1] == URL(url) - ), - None, - ) - - -@pytest.fixture -def mock_habitica(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: - """Mock aiohttp requests.""" - - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/user", json=load_json_object_fixture("user.json", DOMAIN) - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/tasks/user", - params={"type": "completedTodos"}, - json=load_json_object_fixture("completed_todos.json", DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/tasks/user", - json=load_json_object_fixture("tasks.json", DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/content", - params={"language": "en"}, - json=load_json_object_fixture("content.json", DOMAIN), - ) - - return aioclient_mock - - -@pytest.fixture(name="config_entry") -def mock_config_entry() -> MockConfigEntry: - """Mock Habitica configuration entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="test-user", - data={ - CONF_URL: DEFAULT_URL, - CONF_API_USER: "test-api-user", - CONF_API_KEY: "test-api-key", - }, - unique_id="00000000-0000-0000-0000-000000000000", - ) - - -@pytest.fixture -async def set_tz(hass: HomeAssistant) -> None: - """Fixture to set timezone.""" - await hass.config.async_set_time_zone("Europe/Berlin") diff --git a/tests/components/habitica/fixtures/common_buttons_unavailable.json b/tests/components/habitica/fixtures/common_buttons_unavailable.json deleted file mode 100644 index efee5364e02..00000000000 --- a/tests/components/habitica/fixtures/common_buttons_unavailable.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, - "stealth": 0, - "streaks": true, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 50, - "mp": 50, - "exp": 737, - "gp": 0, - "lvl": 5, - "class": "wizard", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 0 - }, - "preferences": { - "sleep": false, - "automaticAllocation": false, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_warrior_5", - "armor": "armor_warrior_5", - "head": "head_warrior_5", - "shield": "shield_warrior_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/completed_todos.json b/tests/components/habitica/fixtures/completed_todos.json deleted file mode 100644 index 8185a0a4ff7..00000000000 --- a/tests/components/habitica/fixtures/completed_todos.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "success": true, - "data": [ - { - "_id": "162f0bbe-a097-4a06-b4f4-8fbeed85d2ba", - "completed": true, - "collapseChecklist": false, - "checklist": [], - "type": "todo", - "text": "Wocheneinkauf erledigen", - "notes": "Lebensmittel und Haushaltsbedarf für die Woche einkaufen.", - "tags": ["64235347-55d0-4ba1-a86a-3428dcfdf319"], - "value": 1, - "priority": 1.5, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "reminders": [], - "byHabitica": false, - "createdAt": "2024-09-21T22:19:10.919Z", - "updatedAt": "2024-09-21T22:19:15.484Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "dateCompleted": "2024-09-21T22:19:15.478Z", - "id": "162f0bbe-a097-4a06-b4f4-8fbeed85d2ba" - }, - { - "_id": "3fa06743-aa0f-472b-af1a-f27c755e329c", - "completed": true, - "collapseChecklist": false, - "checklist": [], - "type": "todo", - "text": "Wohnung aufräumen", - "notes": "Wohnzimmer und Küche gründlich aufräumen.", - "tags": ["64235347-55d0-4ba1-a86a-3428dcfdf319"], - "value": 1, - "priority": 2, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "reminders": [], - "byHabitica": false, - "createdAt": "2024-09-21T22:18:30.646Z", - "updatedAt": "2024-09-21T22:18:34.663Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "dateCompleted": "2024-09-21T22:18:34.660Z", - "id": "3fa06743-aa0f-472b-af1a-f27c755e329c" - } - ], - "notifications": [ - { - "type": "ITEM_RECEIVED", - "data": { - "icon": "notif_orca_mount", - "title": "Orcas for Summer Splash!", - "text": "To celebrate Summer Splash, we've given you an Orca Mount!", - "destination": "stable" - }, - "seen": true, - "id": "b7a85df1-06ed-4ab1-b56d-43418fc6a5e5" - }, - { - "type": "UNALLOCATED_STATS_POINTS", - "data": { - "points": 2 - }, - "seen": true, - "id": "bc3f8a69-231f-4eb1-ba48-a00b6c0e0f37" - } - ], - "userV": 584, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/content.json b/tests/components/habitica/fixtures/content.json deleted file mode 100644 index e8e14dead73..00000000000 --- a/tests/components/habitica/fixtures/content.json +++ /dev/null @@ -1,287 +0,0 @@ -{ - "success": true, - "data": { - "gear": { - "flat": { - "weapon_warrior_5": { - "text": "Ruby Sword", - "notes": "Weapon whose forge-glow never fades. Increases Strength by 15. ", - "str": 15, - "value": 90, - "type": "weapon", - "key": "weapon_warrior_5", - "set": "warrior-5", - "klass": "warrior", - "index": "5", - "int": 0, - "per": 0, - "con": 0 - }, - "armor_warrior_5": { - "text": "Golden Armor", - "notes": "Looks ceremonial, but no known blade can pierce it. Increases Constitution by 11.", - "con": 11, - "value": 120, - "last": true, - "type": "armor", - "key": "armor_warrior_5", - "set": "warrior-5", - "klass": "warrior", - "index": "5", - "str": 0, - "int": 0, - "per": 0 - }, - "head_warrior_5": { - "text": "Golden Helm", - "notes": "Regal crown bound to shining armor. Increases Strength by 12.", - "str": 12, - "value": 80, - "last": true, - "type": "head", - "key": "head_warrior_5", - "set": "warrior-5", - "klass": "warrior", - "index": "5", - "int": 0, - "per": 0, - "con": 0 - }, - "shield_warrior_5": { - "text": "Golden Shield", - "notes": "Shining badge of the vanguard. Increases Constitution by 9.", - "con": 9, - "value": 90, - "last": true, - "type": "shield", - "key": "shield_warrior_5", - "set": "warrior-5", - "klass": "warrior", - "index": "5", - "str": 0, - "int": 0, - "per": 0 - }, - "weapon_wizard_5": { - "twoHanded": true, - "text": "Archmage Staff", - "notes": "Assists in weaving the most complex of spells. Increases Intelligence by 15 and Perception by 7. Two-handed item.", - "int": 15, - "per": 7, - "value": 160, - "type": "weapon", - "key": "weapon_wizard_5", - "set": "wizard-5", - "klass": "wizard", - "index": "5", - "str": 0, - "con": 0 - }, - "armor_wizard_5": { - "text": "Royal Magus Robe", - "notes": "Symbol of the power behind the throne. Increases Intelligence by 12.", - "int": 12, - "value": 120, - "last": true, - "type": "armor", - "key": "armor_wizard_5", - "set": "wizard-5", - "klass": "wizard", - "index": "5", - "str": 0, - "per": 0, - "con": 0 - }, - "head_wizard_5": { - "text": "Royal Magus Hat", - "notes": "Shows authority over fortune, weather, and lesser mages. Increases Perception by 10.", - "per": 10, - "value": 80, - "last": true, - "type": "head", - "key": "head_wizard_5", - "set": "wizard-5", - "klass": "wizard", - "index": "5", - "str": 0, - "int": 0, - "con": 0 - }, - "weapon_healer_5": { - "text": "Royal Scepter", - "notes": "Fit to grace the hand of a monarch, or of one who stands at a monarch's right hand. Increases Intelligence by 9. ", - "int": 9, - "value": 90, - "type": "weapon", - "key": "weapon_healer_5", - "set": "healer-5", - "klass": "healer", - "index": "5", - "str": 0, - "per": 0, - "con": 0 - }, - "armor_healer_5": { - "text": "Royal Mantle", - "notes": "Attire of those who have saved the lives of kings. Increases Constitution by 18.", - "con": 18, - "value": 120, - "last": true, - "type": "armor", - "key": "armor_healer_5", - "set": "healer-5", - "klass": "healer", - "index": "5", - "str": 0, - "int": 0, - "per": 0 - }, - "head_healer_5": { - "text": "Royal Diadem", - "notes": "For king, queen, or miracle-worker. Increases Intelligence by 9.", - "int": 9, - "value": 80, - "last": true, - "type": "head", - "key": "head_healer_5", - "set": "healer-5", - "klass": "healer", - "index": "5", - "str": 0, - "per": 0, - "con": 0 - }, - "shield_healer_5": { - "text": "Royal Shield", - "notes": "Bestowed upon those most dedicated to the kingdom's defense. Increases Constitution by 12.", - "con": 12, - "value": 90, - "last": true, - "type": "shield", - "key": "shield_healer_5", - "set": "healer-5", - "klass": "healer", - "index": "5", - "str": 0, - "int": 0, - "per": 0 - }, - "weapon_rogue_5": { - "text": "Ninja-to", - "notes": "Sleek and deadly as the ninja themselves. Increases Strength by 8. ", - "str": 8, - "value": 90, - "type": "weapon", - "key": "weapon_rogue_5", - "set": "rogue-5", - "klass": "rogue", - "index": "5", - "int": 0, - "per": 0, - "con": 0 - }, - "armor_rogue_5": { - "text": "Umbral Armor", - "notes": "Allows stealth in the open in broad daylight. Increases Perception by 18.", - "per": 18, - "value": 120, - "last": true, - "type": "armor", - "key": "armor_rogue_5", - "set": "rogue-5", - "klass": "rogue", - "index": "5", - "str": 0, - "int": 0, - "con": 0 - }, - "head_rogue_5": { - "text": "Umbral Hood", - "notes": "Conceals even thoughts from those who would probe them. Increases Perception by 12.", - "per": 12, - "value": 80, - "last": true, - "type": "head", - "key": "head_rogue_5", - "set": "rogue-5", - "klass": "rogue", - "index": "5", - "str": 0, - "int": 0, - "con": 0 - }, - "shield_rogue_5": { - "text": "Ninja-to", - "notes": "Sleek and deadly as the ninja themselves. Increases Strength by 8. ", - "str": 8, - "value": 90, - "type": "shield", - "key": "shield_rogue_5", - "set": "rogue-5", - "klass": "rogue", - "index": "5", - "int": 0, - "per": 0, - "con": 0 - }, - "back_special_heroicAureole": { - "text": "Heroic Aureole", - "notes": "The gems on this aureole glimmer when you tell your tales of glory. Increases all stats by 7.", - "con": 7, - "str": 7, - "per": 7, - "int": 7, - "value": 175, - "type": "back", - "key": "back_special_heroicAureole", - "set": "special-heroicAureole", - "klass": "special", - "index": "heroicAureole" - }, - "headAccessory_armoire_gogglesOfBookbinding": { - "per": 8, - "set": "bookbinder", - "notes": "These goggles will help you zero in on any task, large or small! Increases Perception by 8. Enchanted Armoire: Bookbinder Set (Item 1 of 4).", - "text": "Goggles of Bookbinding", - "value": 100, - "type": "headAccessory", - "key": "headAccessory_armoire_gogglesOfBookbinding", - "klass": "armoire", - "index": "gogglesOfBookbinding", - "str": 0, - "int": 0, - "con": 0 - }, - "eyewear_armoire_plagueDoctorMask": { - "con": 5, - "int": 5, - "set": "plagueDoctor", - "notes": "An authentic mask worn by the doctors who battle the Plague of Procrastination. Increases Constitution and Intelligence by 5 each. Enchanted Armoire: Plague Doctor Set (Item 2 of 3).", - "text": "Plague Doctor Mask", - "value": 100, - "type": "eyewear", - "key": "eyewear_armoire_plagueDoctorMask", - "klass": "armoire", - "index": "plagueDoctorMask", - "str": 0, - "per": 0 - }, - "body_special_aetherAmulet": { - "text": "Aether Amulet", - "notes": "This amulet has a mysterious history. Increases Constitution and Strength by 10 each.", - "value": 175, - "str": 10, - "con": 10, - "type": "body", - "key": "body_special_aetherAmulet", - "set": "special-aetherAmulet", - "klass": "special", - "index": "aetherAmulet", - "int": 0, - "per": 0 - } - } - } - }, - "appVersion": "5.29.2" -} diff --git a/tests/components/habitica/fixtures/duedate_fixture_1.json b/tests/components/habitica/fixtures/duedate_fixture_1.json deleted file mode 100644 index d44d5f38498..00000000000 --- a/tests/components/habitica/fixtures/duedate_fixture_1.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "success": true, - "data": [ - { - "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "frequency": "daily", - "everyX": 1, - "repeat": { - "m": true, - "t": true, - "w": true, - "th": true, - "f": true, - "s": true, - "su": true - }, - "streak": 1, - "nextDue": ["2024-09-22T22:00:00.000Z", "2024-09-23T22:00:00.000Z"], - "yesterDaily": true, - "history": [], - "completed": false, - "collapseChecklist": false, - "type": "daily", - "text": "Zahnseide benutzen", - "notes": "Klicke um Änderungen zu machen!", - "tags": [], - "value": -2.9663035443712333, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "startDate": "2024-07-06T22:00:00.000Z", - "daysOfMonth": [], - "weeksOfMonth": [], - "checklist": [], - "reminders": [], - "createdAt": "2024-07-07T17:51:53.268Z", - "updatedAt": "2024-09-21T22:24:20.154Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "isDue": true, - "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" - } - ], - "notifications": [], - "userV": 589, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/duedate_fixture_2.json b/tests/components/habitica/fixtures/duedate_fixture_2.json deleted file mode 100644 index 99cf4e89454..00000000000 --- a/tests/components/habitica/fixtures/duedate_fixture_2.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "success": true, - "data": [ - { - "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "frequency": "daily", - "everyX": 1, - "repeat": { - "m": true, - "t": true, - "w": true, - "th": true, - "f": true, - "s": true, - "su": true - }, - "streak": 1, - "nextDue": ["2024-09-22T22:00:00.000Z", "2024-09-23T22:00:00.000Z"], - "yesterDaily": true, - "history": [], - "completed": false, - "collapseChecklist": false, - "type": "daily", - "text": "Zahnseide benutzen", - "notes": "Klicke um Änderungen zu machen!", - "tags": [], - "value": -2.9663035443712333, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "startDate": "2024-09-23T22:00:00.000Z", - "daysOfMonth": [], - "weeksOfMonth": [], - "checklist": [], - "reminders": [], - "createdAt": "2024-07-07T17:51:53.268Z", - "updatedAt": "2024-09-21T22:24:20.154Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "isDue": false, - "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" - } - ], - "notifications": [], - "userV": 589, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/duedate_fixture_3.json b/tests/components/habitica/fixtures/duedate_fixture_3.json deleted file mode 100644 index 78b66ad6643..00000000000 --- a/tests/components/habitica/fixtures/duedate_fixture_3.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "success": true, - "data": [ - { - "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "frequency": "monthly", - "everyX": 1, - "repeat": { - "m": true, - "t": true, - "w": true, - "th": true, - "f": true, - "s": true, - "su": true - }, - "streak": 1, - "nextDue": ["2024-10-22T22:00:00.000Z", "2024-11-22T22:00:00.000Z"], - "yesterDaily": true, - "history": [], - "completed": false, - "collapseChecklist": false, - "type": "daily", - "text": "Zahnseide benutzen", - "notes": "Klicke um Änderungen zu machen!", - "tags": [], - "value": -2.9663035443712333, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "startDate": "2024-10-22T22:00:00.000Z", - "daysOfMonth": [23], - "weeksOfMonth": [], - "checklist": [], - "reminders": [], - "createdAt": "2024-07-07T17:51:53.268Z", - "updatedAt": "2024-09-21T22:24:20.154Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "isDue": false, - "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" - } - ], - "notifications": [], - "userV": 589, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/duedate_fixture_4.json b/tests/components/habitica/fixtures/duedate_fixture_4.json deleted file mode 100644 index 7e14e3339e2..00000000000 --- a/tests/components/habitica/fixtures/duedate_fixture_4.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "success": true, - "data": [ - { - "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "frequency": "yearly", - "everyX": 1, - "repeat": { - "m": true, - "t": true, - "w": true, - "th": true, - "f": true, - "s": true, - "su": true - }, - "streak": 1, - "nextDue": ["2024-10-22T22:00:00.000Z", "2025-10-22T22:00:00.000Z"], - "yesterDaily": true, - "history": [], - "completed": false, - "collapseChecklist": false, - "type": "daily", - "text": "Zahnseide benutzen", - "notes": "Klicke um Änderungen zu machen!", - "tags": [], - "value": -2.9663035443712333, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "startDate": "2024-10-22T22:00:00.000Z", - "daysOfMonth": [22], - "weeksOfMonth": [], - "checklist": [], - "reminders": [], - "createdAt": "2024-07-07T17:51:53.268Z", - "updatedAt": "2024-09-21T22:24:20.154Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "isDue": false, - "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" - } - ], - "notifications": [], - "userV": 589, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/duedate_fixture_5.json b/tests/components/habitica/fixtures/duedate_fixture_5.json deleted file mode 100644 index d8d5f4cd773..00000000000 --- a/tests/components/habitica/fixtures/duedate_fixture_5.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "success": true, - "data": [ - { - "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "frequency": "weekly", - "everyX": 1, - "repeat": { - "m": true, - "t": true, - "w": true, - "th": true, - "f": true, - "s": true, - "su": true - }, - "streak": 1, - "nextDue": ["2024-09-20T22:00:00.000Z", "2024-09-27T22:00:00.000Z"], - "yesterDaily": true, - "history": [], - "completed": false, - "collapseChecklist": false, - "type": "daily", - "text": "Zahnseide benutzen", - "notes": "Klicke um Änderungen zu machen!", - "tags": [], - "value": -2.9663035443712333, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "startDate": "2024-09-25T22:00:00.000Z", - "daysOfMonth": [], - "weeksOfMonth": [], - "checklist": [], - "reminders": [], - "createdAt": "2024-07-07T17:51:53.268Z", - "updatedAt": "2024-09-21T22:24:20.154Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "isDue": false, - "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" - } - ], - "notifications": [], - "userV": 589, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/duedate_fixture_6.json b/tests/components/habitica/fixtures/duedate_fixture_6.json deleted file mode 100644 index dce177b1abc..00000000000 --- a/tests/components/habitica/fixtures/duedate_fixture_6.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "success": true, - "data": [ - { - "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "frequency": "monthly", - "everyX": 1, - "repeat": { - "m": true, - "t": true, - "w": true, - "th": true, - "f": true, - "s": true, - "su": true - }, - "streak": 1, - "nextDue": ["2024-09-20T22:00:00.000Z", "2024-10-20T22:00:00.000Z"], - "yesterDaily": true, - "history": [], - "completed": false, - "collapseChecklist": false, - "type": "daily", - "text": "Zahnseide benutzen", - "notes": "Klicke um Änderungen zu machen!", - "tags": [], - "value": -2.9663035443712333, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "startDate": "2024-09-25T22:00:00.000Z", - "daysOfMonth": [], - "weeksOfMonth": [], - "checklist": [], - "reminders": [], - "createdAt": "2024-07-07T17:51:53.268Z", - "updatedAt": "2024-09-21T22:24:20.154Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "isDue": false, - "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" - } - ], - "notifications": [], - "userV": 589, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/duedate_fixture_7.json b/tests/components/habitica/fixtures/duedate_fixture_7.json deleted file mode 100644 index 723ee40062d..00000000000 --- a/tests/components/habitica/fixtures/duedate_fixture_7.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "success": true, - "data": [ - { - "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "frequency": "monthly", - "everyX": 0, - "repeat": { - "m": true, - "t": true, - "w": true, - "th": true, - "f": true, - "s": true, - "su": true - }, - "streak": 1, - "nextDue": ["2024-09-22T22:00:00.000Z", "2024-09-23T22:00:00.000Z"], - "yesterDaily": true, - "history": [], - "completed": false, - "collapseChecklist": false, - "type": "daily", - "text": "Zahnseide benutzen", - "notes": "Klicke um Änderungen zu machen!", - "tags": [], - "value": -2.9663035443712333, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "startDate": "2024-09-23T22:00:00.000Z", - "daysOfMonth": [], - "weeksOfMonth": [], - "checklist": [], - "reminders": [], - "createdAt": "2024-07-07T17:51:53.268Z", - "updatedAt": "2024-09-21T22:24:20.154Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "isDue": false, - "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" - } - ], - "notifications": [], - "userV": 589, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/duedate_fixture_8.json b/tests/components/habitica/fixtures/duedate_fixture_8.json deleted file mode 100644 index 21a40a0a649..00000000000 --- a/tests/components/habitica/fixtures/duedate_fixture_8.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "success": true, - "data": [ - { - "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "frequency": "daily", - "everyX": 1, - "repeat": { - "m": true, - "t": true, - "w": true, - "th": true, - "f": true, - "s": true, - "su": true - }, - "streak": 1, - "nextDue": [], - "yesterDaily": true, - "history": [], - "completed": false, - "collapseChecklist": false, - "type": "daily", - "text": "Zahnseide benutzen", - "notes": "Klicke um Änderungen zu machen!", - "tags": [], - "value": -2.9663035443712333, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "startDate": "2024-09-23T22:00:00.000Z", - "daysOfMonth": [], - "weeksOfMonth": [], - "checklist": [], - "reminders": [], - "createdAt": "2024-07-07T17:51:53.268Z", - "updatedAt": "2024-09-21T22:24:20.154Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "isDue": false, - "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" - } - ], - "notifications": [], - "userV": 589, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/healer_fixture.json b/tests/components/habitica/fixtures/healer_fixture.json deleted file mode 100644 index 85f719f4ca7..00000000000 --- a/tests/components/habitica/fixtures/healer_fixture.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 0, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 45, - "mp": 50.89999999999998, - "exp": 737, - "gp": 137.62587214609795, - "lvl": 38, - "class": "healer", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 5, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": true, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z", - "items": { - "gear": { - "equipped": { - "weapon": "weapon_healer_5", - "armor": "armor_healer_5", - "head": "head_healer_5", - "shield": "shield_healer_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/healer_skills_unavailable.json b/tests/components/habitica/fixtures/healer_skills_unavailable.json deleted file mode 100644 index a6bff246b2a..00000000000 --- a/tests/components/habitica/fixtures/healer_skills_unavailable.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 0, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 50, - "mp": 10, - "exp": 737, - "gp": 0, - "lvl": 34, - "class": "healer", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": false, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_healer_5", - "armor": "armor_healer_5", - "head": "head_healer_5", - "shield": "shield_healer_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/quest_invitation_off.json b/tests/components/habitica/fixtures/quest_invitation_off.json deleted file mode 100644 index b5eccd99e10..00000000000 --- a/tests/components/habitica/fixtures/quest_invitation_off.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, - "stealth": 0, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 0, - "mp": 50.89999999999998, - "exp": 737, - "gp": 137.62587214609795, - "lvl": 38, - "class": "wizard", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 5 - }, - "preferences": { - "sleep": false, - "automaticAllocation": true, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "tasksOrder": { - "rewards": ["5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b"], - "todos": [ - "88de7cd9-af2b-49ce-9afd-bf941d87336b", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - "1aa3137e-ef72-4d1f-91ee-41933602f438", - "86ea2475-d1b5-4020-bdcc-c188c7996afa" - ], - "dailys": [ - "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a", - "bc1d1855-b2b8-4663-98ff-62e7b763dfc4", - "e97659e0-2c42-4599-a7bb-00282adc410d", - "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "f2c85972-1a19-4426-bc6d-ce3337b9d99f", - "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" - ], - "habits": ["1d147de6-5c02-4740-8e2f-71d3015a37f4"] - }, - "party": { - "quest": { - "RSVPNeeded": false, - "key": null - } - }, - "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z" - } -} diff --git a/tests/components/habitica/fixtures/rogue_fixture.json b/tests/components/habitica/fixtures/rogue_fixture.json deleted file mode 100644 index 1e5e996c034..00000000000 --- a/tests/components/habitica/fixtures/rogue_fixture.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 0, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 0, - "mp": 50.89999999999998, - "exp": 737, - "gp": 137.62587214609795, - "lvl": 38, - "class": "rogue", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 5, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": true, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z", - "items": { - "gear": { - "equipped": { - "weapon": "weapon_rogue_5", - "armor": "armor_rogue_5", - "head": "head_rogue_5", - "shield": "shield_rogue_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/rogue_skills_unavailable.json b/tests/components/habitica/fixtures/rogue_skills_unavailable.json deleted file mode 100644 index c7c5ff32245..00000000000 --- a/tests/components/habitica/fixtures/rogue_skills_unavailable.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 0, - "streaks": true, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 50, - "mp": 20, - "exp": 737, - "gp": 0, - "lvl": 38, - "class": "rogue", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": false, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_rogue_5", - "armor": "armor_rogue_5", - "head": "head_rogue_5", - "shield": "shield_rogue_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/rogue_stealth_unavailable.json b/tests/components/habitica/fixtures/rogue_stealth_unavailable.json deleted file mode 100644 index 9fd7adcca42..00000000000 --- a/tests/components/habitica/fixtures/rogue_stealth_unavailable.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 4, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 50, - "mp": 50, - "exp": 737, - "gp": 0, - "lvl": 38, - "class": "rogue", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": false, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_rogue_5", - "armor": "armor_rogue_5", - "head": "head_rogue_5", - "shield": "shield_rogue_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/score_with_drop.json b/tests/components/habitica/fixtures/score_with_drop.json deleted file mode 100644 index f25838d6c37..00000000000 --- a/tests/components/habitica/fixtures/score_with_drop.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "success": true, - "data": { - "delta": 0.9999999781878414, - "_tmp": { - "quest": { - "progressDelta": 1.049999977097233 - }, - "drop": { - "value": 3, - "key": "Dragon", - "type": "Egg", - "dialog": "You've found a Dragon Egg!" - } - }, - "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, - "stealth": 0, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "training": { - "int": 0, - "per": 0, - "str": 0, - "con": 0 - }, - "hp": 25.100000000000016, - "mp": 24, - "exp": 196, - "gp": 30.453660284128997, - "lvl": 20, - "class": "warrior", - "points": 2, - "str": 0, - "con": 0, - "int": 0, - "per": 0 - }, - "notifications": [ - { - "type": "ITEM_RECEIVED", - "data": { - "icon": "notif_orca_mount", - "title": "Orcas for Summer Splash!", - "text": "To celebrate Summer Splash, we've given you an Orca Mount!", - "destination": "stable" - }, - "seen": true, - "id": "b7a85df1-06ed-4ab1-b56d-43418fc6a5e5" - }, - { - "type": "UNALLOCATED_STATS_POINTS", - "data": { - "points": 2 - }, - "seen": true, - "id": "bc3f8a69-231f-4eb1-ba48-a00b6c0e0f37" - } - ], - "userV": 623, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/tasks.json b/tests/components/habitica/fixtures/tasks.json deleted file mode 100644 index 2e8305283d0..00000000000 --- a/tests/components/habitica/fixtures/tasks.json +++ /dev/null @@ -1,555 +0,0 @@ -{ - "success": true, - "data": [ - { - "_id": "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a", - "up": true, - "down": true, - "counterUp": 0, - "counterDown": 0, - "frequency": "daily", - "history": [], - "type": "habit", - "text": "Gesundes Essen/Junkfood", - "notes": "", - "tags": [], - "value": 0, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "reminders": [], - "createdAt": "2024-07-07T17:51:53.268Z", - "updatedAt": "2024-07-07T17:51:53.268Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a" - }, - { - "_id": "1d147de6-5c02-4740-8e2f-71d3015a37f4", - "up": true, - "down": false, - "counterUp": 0, - "counterDown": 0, - "frequency": "daily", - "history": [ - { - "date": 1720376763324, - "value": 1, - "scoredUp": 1, - "scoredDown": 0 - } - ], - "type": "habit", - "text": "Eine kurze Pause machen", - "notes": "", - "tags": [], - "value": 0, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "reminders": [], - "createdAt": "2024-07-07T17:51:53.266Z", - "updatedAt": "2024-07-12T09:58:45.438Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "1d147de6-5c02-4740-8e2f-71d3015a37f4" - }, - { - "_id": "bc1d1855-b2b8-4663-98ff-62e7b763dfc4", - "up": false, - "down": true, - "counterUp": 0, - "counterDown": 0, - "frequency": "daily", - "history": [], - "type": "habit", - "text": "Klicke hier um dies als schlechte Gewohnheit zu markieren, die Du gerne loswerden möchtest", - "notes": "Oder lösche es über die Bearbeitungs-Ansicht", - "tags": [], - "value": 0, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "reminders": [], - "createdAt": "2024-07-07T17:51:53.265Z", - "updatedAt": "2024-07-07T17:51:53.265Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "bc1d1855-b2b8-4663-98ff-62e7b763dfc4" - }, - { - "_id": "e97659e0-2c42-4599-a7bb-00282adc410d", - "up": true, - "down": false, - "counterUp": 0, - "counterDown": 0, - "frequency": "daily", - "history": [ - { - "date": 1720376763140, - "value": 1, - "scoredUp": 1, - "scoredDown": 0 - } - ], - "type": "habit", - "text": "Füge eine Aufgabe zu Habitica hinzu", - "notes": "Eine Gewohnheit, eine Tagesaufgabe oder ein To-Do", - "tags": [], - "value": 0, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "reminders": [], - "createdAt": "2024-07-07T17:51:53.264Z", - "updatedAt": "2024-07-12T09:58:45.438Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "e97659e0-2c42-4599-a7bb-00282adc410d", - "alias": "create_a_task" - }, - { - "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "frequency": "weekly", - "everyX": 1, - "repeat": { - "m": true, - "t": true, - "w": true, - "th": true, - "f": true, - "s": true, - "su": true - }, - "streak": 1, - "nextDue": [ - "Mon Sep 23 2024 00:00:00 GMT+0200", - "Tue Sep 24 2024 00:00:00 GMT+0200", - "Wed Sep 25 2024 00:00:00 GMT+0200", - "Thu Sep 26 2024 00:00:00 GMT+0200", - "Fri Sep 27 2024 00:00:00 GMT+0200", - "Sat Sep 28 2024 00:00:00 GMT+0200" - ], - "yesterDaily": true, - "history": [ - { - "date": 1720376766749, - "value": 1, - "isDue": true, - "completed": true - }, - { - "date": 1720545311292, - "value": 0.02529999999999999, - "isDue": true, - "completed": false - }, - { - "date": 1720564306719, - "value": -0.9740518837628547, - "isDue": true, - "completed": false - }, - { - "date": 1720691096907, - "value": 0.051222853419153, - "isDue": true, - "completed": true - }, - { - "date": 1720778325243, - "value": 1.0499115128458676, - "isDue": true, - "completed": true - }, - { - "date": 1724185196447, - "value": 0.07645736684721605, - "isDue": true, - "completed": false - }, - { - "date": 1724255707692, - "value": -0.921585289356988, - "isDue": true, - "completed": false - }, - { - "date": 1726846163640, - "value": -1.9454824860630637, - "isDue": true, - "completed": false - }, - { - "date": 1726953787542, - "value": -2.9966001649571803, - "isDue": true, - "completed": false - }, - { - "date": 1726956115608, - "value": -4.07641493832036, - "isDue": true, - "completed": false - }, - { - "date": 1726957460150, - "value": -2.9663035443712333, - "isDue": true, - "completed": true - } - ], - "completed": true, - "collapseChecklist": false, - "type": "daily", - "text": "Zahnseide benutzen", - "notes": "Klicke um Änderungen zu machen!", - "tags": [], - "value": -2.9663035443712333, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "startDate": "2024-07-06T22:00:00.000Z", - "daysOfMonth": [], - "weeksOfMonth": [], - "checklist": [], - "reminders": [], - "createdAt": "2024-07-07T17:51:53.268Z", - "updatedAt": "2024-09-21T22:24:20.154Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "isDue": true, - "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" - }, - { - "_id": "f2c85972-1a19-4426-bc6d-ce3337b9d99f", - "frequency": "weekly", - "everyX": 1, - "repeat": { - "m": true, - "t": true, - "w": true, - "th": true, - "f": true, - "s": true, - "su": true - }, - "streak": 0, - "nextDue": [ - "2024-09-22T22:00:00.000Z", - "2024-09-23T22:00:00.000Z", - "2024-09-24T22:00:00.000Z", - "2024-09-25T22:00:00.000Z", - "2024-09-26T22:00:00.000Z", - "2024-09-27T22:00:00.000Z" - ], - "yesterDaily": true, - "history": [ - { - "date": 1720374903074, - "value": 1, - "isDue": true, - "completed": true - }, - { - "date": 1720545311291, - "value": 0.02529999999999999, - "isDue": true, - "completed": false - }, - { - "date": 1720564306717, - "value": -0.9740518837628547, - "isDue": true, - "completed": false - }, - { - "date": 1720682459722, - "value": 0.051222853419153, - "isDue": true, - "completed": true - }, - { - "date": 1720778325246, - "value": 1.0499115128458676, - "isDue": true, - "completed": true - }, - { - "date": 1720778492219, - "value": 2.023365658844519, - "isDue": true, - "completed": true - }, - { - "date": 1724255707691, - "value": 1.0738942424964806, - "isDue": true, - "completed": false - }, - { - "date": 1726846163638, - "value": 0.10103816898038132, - "isDue": true, - "completed": false - }, - { - "date": 1726953787540, - "value": -0.8963760215867302, - "isDue": true, - "completed": false - }, - { - "date": 1726956115607, - "value": -1.919611992979862, - "isDue": true, - "completed": false - } - ], - "completed": false, - "collapseChecklist": false, - "type": "daily", - "text": "5 Minuten ruhig durchatmen", - "notes": "Klicke um Deinen Terminplan festzulegen!", - "tags": [], - "value": -1.919611992979862, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "startDate": "2024-07-06T22:00:00.000Z", - "daysOfMonth": [], - "weeksOfMonth": [], - "checklist": [], - "reminders": [], - "createdAt": "2024-07-07T17:51:53.266Z", - "updatedAt": "2024-09-21T22:51:41.756Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "isDue": true, - "id": "f2c85972-1a19-4426-bc6d-ce3337b9d99f" - }, - { - "_id": "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1", - "frequency": "weekly", - "everyX": 1, - "startDate": "2024-09-21T22:00:00.000Z", - "repeat": { - "m": false, - "t": false, - "w": true, - "th": false, - "f": false, - "s": true, - "su": true - }, - "streak": 0, - "daysOfMonth": [], - "weeksOfMonth": [], - "nextDue": [ - "2024-09-24T22:00:00.000Z", - "2024-09-27T22:00:00.000Z", - "2024-09-28T22:00:00.000Z", - "2024-10-01T22:00:00.000Z", - "2024-10-04T22:00:00.000Z", - "2024-10-08T22:00:00.000Z" - ], - "yesterDaily": true, - "history": [], - "completed": false, - "collapseChecklist": false, - "checklist": [], - "type": "daily", - "text": "Fitnessstudio besuchen", - "notes": "Ein einstündiges Workout im Fitnessstudio absolvieren.", - "tags": ["51076966-2970-4b40-b6ba-d58c6a756dd7"], - "value": 0, - "priority": 2, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "reminders": [], - "byHabitica": false, - "createdAt": "2024-09-22T11:44:43.774Z", - "updatedAt": "2024-09-22T11:44:43.774Z", - "userId": "1343a9af-d891-4027-841a-956d105ca408", - "isDue": true, - "id": "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" - }, - { - "_id": "88de7cd9-af2b-49ce-9afd-bf941d87336b", - "date": "2024-09-27T22:17:00.000Z", - "completed": false, - "collapseChecklist": false, - "checklist": [], - "type": "todo", - "text": "Buch zu Ende lesen", - "notes": "Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.", - "tags": [], - "value": 0, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "reminders": [], - "byHabitica": false, - "createdAt": "2024-09-21T22:17:57.816Z", - "updatedAt": "2024-09-21T22:17:57.816Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "88de7cd9-af2b-49ce-9afd-bf941d87336b" - }, - { - "_id": "2f6fcabc-f670-4ec3-ba65-817e8deea490", - "date": "2024-08-31T22:16:00.000Z", - "completed": false, - "collapseChecklist": false, - "checklist": [], - "type": "todo", - "text": "Rechnungen bezahlen", - "notes": "Strom- und Internetrechnungen rechtzeitig überweisen.", - "tags": [], - "value": 0, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "reminders": [ - { - "id": "91c09432-10ac-4a49-bd20-823081ec29ed", - "time": "2024-09-22T02:00:00.0000Z" - } - ], - "byHabitica": false, - "createdAt": "2024-09-21T22:17:19.513Z", - "updatedAt": "2024-09-21T22:19:35.576Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "2f6fcabc-f670-4ec3-ba65-817e8deea490", - "alias": "pay_bills" - }, - { - "_id": "1aa3137e-ef72-4d1f-91ee-41933602f438", - "completed": false, - "collapseChecklist": false, - "checklist": [], - "type": "todo", - "text": "Garten pflegen", - "notes": "Rasen mähen und die Pflanzen gießen.", - "tags": [], - "value": 0, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "reminders": [], - "byHabitica": false, - "createdAt": "2024-09-21T22:16:38.153Z", - "updatedAt": "2024-09-21T22:16:38.153Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "1aa3137e-ef72-4d1f-91ee-41933602f438" - }, - { - "_id": "86ea2475-d1b5-4020-bdcc-c188c7996afa", - "date": "2024-09-21T22:00:00.000Z", - "completed": false, - "collapseChecklist": false, - "checklist": [], - "type": "todo", - "text": "Wochenendausflug planen", - "notes": "Den Ausflug für das kommende Wochenende organisieren.", - "tags": ["51076966-2970-4b40-b6ba-d58c6a756dd7"], - "value": 0, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "reminders": [], - "byHabitica": false, - "createdAt": "2024-09-21T22:16:16.756Z", - "updatedAt": "2024-09-21T22:16:16.756Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "86ea2475-d1b5-4020-bdcc-c188c7996afa" - }, - { - "_id": "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", - "type": "reward", - "text": "Belohne Dich selbst", - "notes": "Schaue fern, spiele ein Spiel, gönne Dir einen Leckerbissen, es liegt ganz bei Dir!", - "tags": [], - "value": 10, - "priority": 1, - "attribute": "str", - "challenge": {}, - "group": { - "completedBy": {}, - "assignedUsers": [] - }, - "byHabitica": false, - "reminders": [], - "createdAt": "2024-07-07T17:51:53.266Z", - "updatedAt": "2024-07-07T17:51:53.266Z", - "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b" - } - ], - "notifications": [ - { - "type": "ITEM_RECEIVED", - "data": { - "icon": "notif_orca_mount", - "title": "Orcas for Summer Splash!", - "text": "To celebrate Summer Splash, we've given you an Orca Mount!", - "destination": "stable" - }, - "seen": true, - "id": "b7a85df1-06ed-4ab1-b56d-43418fc6a5e5" - }, - { - "type": "UNALLOCATED_STATS_POINTS", - "data": { - "points": 2 - }, - "seen": true, - "id": "bc3f8a69-231f-4eb1-ba48-a00b6c0e0f37" - } - ], - "userV": 589, - "appVersion": "5.28.6" -} diff --git a/tests/components/habitica/fixtures/user.json b/tests/components/habitica/fixtures/user.json deleted file mode 100644 index 569c5b81a02..00000000000 --- a/tests/components/habitica/fixtures/user.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 0, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 0, - "mp": 50.89999999999998, - "exp": 737, - "gp": 137.62587214609795, - "lvl": 38, - "class": "wizard", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 5, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": true, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "tasksOrder": { - "rewards": ["5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b"], - "todos": [ - "88de7cd9-af2b-49ce-9afd-bf941d87336b", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - "1aa3137e-ef72-4d1f-91ee-41933602f438", - "86ea2475-d1b5-4020-bdcc-c188c7996afa" - ], - "dailys": [ - "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a", - "bc1d1855-b2b8-4663-98ff-62e7b763dfc4", - "e97659e0-2c42-4599-a7bb-00282adc410d", - "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - "f2c85972-1a19-4426-bc6d-ce3337b9d99f", - "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" - ], - "habits": ["1d147de6-5c02-4740-8e2f-71d3015a37f4"] - }, - "party": { - "quest": { - "RSVPNeeded": true, - "key": "dustbunnies" - } - }, - "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z", - "items": { - "gear": { - "equipped": { - "weapon": "weapon_warrior_5", - "armor": "armor_warrior_5", - "head": "head_warrior_5", - "shield": "shield_warrior_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/warrior_fixture.json b/tests/components/habitica/fixtures/warrior_fixture.json deleted file mode 100644 index 3517e8a908a..00000000000 --- a/tests/components/habitica/fixtures/warrior_fixture.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 0, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 50, - "mp": 50.89999999999998, - "exp": 737, - "gp": 137.62587214609795, - "lvl": 38, - "class": "warrior", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 5, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": true, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z", - "items": { - "gear": { - "equipped": { - "weapon": "weapon_warrior_5", - "armor": "armor_warrior_5", - "head": "head_warrior_5", - "shield": "shield_warrior_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/warrior_skills_unavailable.json b/tests/components/habitica/fixtures/warrior_skills_unavailable.json deleted file mode 100644 index b3d33c85d5c..00000000000 --- a/tests/components/habitica/fixtures/warrior_skills_unavailable.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 0, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 50, - "mp": 10, - "exp": 737, - "gp": 0, - "lvl": 34, - "class": "warrior", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": false, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_warrior_5", - "armor": "armor_warrior_5", - "head": "head_warrior_5", - "shield": "shield_warrior_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/wizard_fixture.json b/tests/components/habitica/fixtures/wizard_fixture.json deleted file mode 100644 index de596e231de..00000000000 --- a/tests/components/habitica/fixtures/wizard_fixture.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 0, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 50, - "mp": 50.89999999999998, - "exp": 737, - "gp": 137.62587214609795, - "lvl": 38, - "class": "wizard", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 5, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": true, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z", - "items": { - "gear": { - "equipped": { - "weapon": "weapon_wizard_5", - "armor": "armor_wizard_5", - "head": "head_wizard_5", - "shield": "shield_base_0", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/wizard_frost_unavailable.json b/tests/components/habitica/fixtures/wizard_frost_unavailable.json deleted file mode 100644 index 31d10fde4b9..00000000000 --- a/tests/components/habitica/fixtures/wizard_frost_unavailable.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 0, - "streaks": true, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 50, - "mp": 50, - "exp": 737, - "gp": 0, - "lvl": 34, - "class": "wizard", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": false, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_wizard_5", - "armor": "armor_wizard_5", - "head": "head_wizard_5", - "shield": "shield_base_0", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/fixtures/wizard_skills_unavailable.json b/tests/components/habitica/fixtures/wizard_skills_unavailable.json deleted file mode 100644 index f3bdee9dd74..00000000000 --- a/tests/components/habitica/fixtures/wizard_skills_unavailable.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "data": { - "api_user": "test-api-user", - "profile": { "name": "test-user" }, - "stats": { - "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, - "stealth": 0, - "streaks": false, - "seafoam": false, - "shinySeed": false, - "snowball": false, - "spookySparkles": false - }, - "hp": 50, - "mp": 10, - "exp": 737, - "gp": 0, - "lvl": 34, - "class": "wizard", - "maxHealth": 50, - "maxMP": 166, - "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 - }, - "preferences": { - "sleep": false, - "automaticAllocation": false, - "disableClasses": false, - "language": "en" - }, - "flags": { - "classSelected": true - }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_wizard_5", - "armor": "armor_wizard_5", - "head": "head_wizard_5", - "shield": "shield_base_0", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } - } -} diff --git a/tests/components/habitica/snapshots/test_binary_sensor.ambr b/tests/components/habitica/snapshots/test_binary_sensor.ambr deleted file mode 100644 index c18f8f551c9..00000000000 --- a/tests/components/habitica/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,48 +0,0 @@ -# serializer version: 1 -# name: test_binary_sensors[binary_sensor.test_user_pending_quest_invitation-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.test_user_pending_quest_invitation', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Pending quest invitation', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_pending_quest', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[binary_sensor.test_user_pending_quest_invitation-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/inventory_quest_scroll_dustbunnies.png', - 'friendly_name': 'test-user Pending quest invitation', - }), - 'context': , - 'entity_id': 'binary_sensor.test_user_pending_quest_invitation', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/habitica/snapshots/test_button.ambr b/tests/components/habitica/snapshots/test_button.ambr deleted file mode 100644 index c8f92650874..00000000000 --- a/tests/components/habitica/snapshots/test_button.ambr +++ /dev/null @@ -1,1305 +0,0 @@ -# serializer version: 1 -# name: test_buttons[healer_fixture][button.test_user_allocate_all_stat_points-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_allocate_all_stat_points', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allocate all stat points', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_allocate_all_stat_points-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Allocate all stat points', - }), - 'context': , - 'entity_id': 'button.test_user_allocate_all_stat_points', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_blessing-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_blessing', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Blessing', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_heal_all', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_blessing-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_healAll.png', - 'friendly_name': 'test-user Blessing', - }), - 'context': , - 'entity_id': 'button.test_user_blessing', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_buy_a_health_potion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_buy_a_health_potion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Buy a health potion', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_buy_a_health_potion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', - 'friendly_name': 'test-user Buy a health potion', - }), - 'context': , - 'entity_id': 'button.test_user_buy_a_health_potion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_healing_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_healing_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Healing light', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_heal', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_healing_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_heal.png', - 'friendly_name': 'test-user Healing light', - }), - 'context': , - 'entity_id': 'button.test_user_healing_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_protective_aura-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_protective_aura', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Protective aura', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_protect_aura', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_protective_aura-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_protectAura.png', - 'friendly_name': 'test-user Protective aura', - }), - 'context': , - 'entity_id': 'button.test_user_protective_aura', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_revive_from_death-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_revive_from_death', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Revive from death', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_revive', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_revive_from_death-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Revive from death', - }), - 'context': , - 'entity_id': 'button.test_user_revive_from_death', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_searing_brightness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_searing_brightness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Searing brightness', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_brightness', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_searing_brightness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_brightness.png', - 'friendly_name': 'test-user Searing brightness', - }), - 'context': , - 'entity_id': 'button.test_user_searing_brightness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_start_my_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_start_my_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Start my day', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[healer_fixture][button.test_user_start_my_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Start my day', - }), - 'context': , - 'entity_id': 'button.test_user_start_my_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_allocate_all_stat_points-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_allocate_all_stat_points', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allocate all stat points', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_allocate_all_stat_points-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Allocate all stat points', - }), - 'context': , - 'entity_id': 'button.test_user_allocate_all_stat_points', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_buy_a_health_potion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_buy_a_health_potion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Buy a health potion', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_buy_a_health_potion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', - 'friendly_name': 'test-user Buy a health potion', - }), - 'context': , - 'entity_id': 'button.test_user_buy_a_health_potion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_revive_from_death-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_revive_from_death', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Revive from death', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_revive', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_revive_from_death-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Revive from death', - }), - 'context': , - 'entity_id': 'button.test_user_revive_from_death', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_start_my_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_start_my_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Start my day', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_start_my_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Start my day', - }), - 'context': , - 'entity_id': 'button.test_user_start_my_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_stealth-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_stealth', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Stealth', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_stealth', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_stealth-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_stealth.png', - 'friendly_name': 'test-user Stealth', - }), - 'context': , - 'entity_id': 'button.test_user_stealth', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_tools_of_the_trade-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_tools_of_the_trade', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tools of the trade', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_tools_of_trade', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[rogue_fixture][button.test_user_tools_of_the_trade-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_toolsOfTrade.png', - 'friendly_name': 'test-user Tools of the trade', - }), - 'context': , - 'entity_id': 'button.test_user_tools_of_the_trade', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_allocate_all_stat_points-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_allocate_all_stat_points', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allocate all stat points', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_allocate_all_stat_points-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Allocate all stat points', - }), - 'context': , - 'entity_id': 'button.test_user_allocate_all_stat_points', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_buy_a_health_potion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_buy_a_health_potion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Buy a health potion', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_buy_a_health_potion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', - 'friendly_name': 'test-user Buy a health potion', - }), - 'context': , - 'entity_id': 'button.test_user_buy_a_health_potion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_defensive_stance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_defensive_stance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Defensive stance', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_defensive_stance', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_defensive_stance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_defensiveStance.png', - 'friendly_name': 'test-user Defensive stance', - }), - 'context': , - 'entity_id': 'button.test_user_defensive_stance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_intimidating_gaze-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_intimidating_gaze', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Intimidating gaze', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_intimidate', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_intimidating_gaze-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_intimidate.png', - 'friendly_name': 'test-user Intimidating gaze', - }), - 'context': , - 'entity_id': 'button.test_user_intimidating_gaze', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_revive_from_death-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_revive_from_death', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Revive from death', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_revive', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_revive_from_death-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Revive from death', - }), - 'context': , - 'entity_id': 'button.test_user_revive_from_death', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_start_my_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_start_my_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Start my day', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_start_my_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Start my day', - }), - 'context': , - 'entity_id': 'button.test_user_start_my_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_valorous_presence-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_valorous_presence', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Valorous presence', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_valorous_presence', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[warrior_fixture][button.test_user_valorous_presence-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_valorousPresence.png', - 'friendly_name': 'test-user Valorous presence', - }), - 'context': , - 'entity_id': 'button.test_user_valorous_presence', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_allocate_all_stat_points-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_allocate_all_stat_points', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allocate all stat points', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_allocate_all_stat_points-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Allocate all stat points', - }), - 'context': , - 'entity_id': 'button.test_user_allocate_all_stat_points', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_buy_a_health_potion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_buy_a_health_potion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Buy a health potion', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_buy_a_health_potion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', - 'friendly_name': 'test-user Buy a health potion', - }), - 'context': , - 'entity_id': 'button.test_user_buy_a_health_potion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_chilling_frost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_chilling_frost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Chilling frost', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_frost', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_chilling_frost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_frost.png', - 'friendly_name': 'test-user Chilling frost', - }), - 'context': , - 'entity_id': 'button.test_user_chilling_frost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_earthquake-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_earthquake', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Earthquake', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_earth', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_earthquake-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_earth.png', - 'friendly_name': 'test-user Earthquake', - }), - 'context': , - 'entity_id': 'button.test_user_earthquake', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_ethereal_surge-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_ethereal_surge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Ethereal surge', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_mpheal', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_ethereal_surge-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_mpheal.png', - 'friendly_name': 'test-user Ethereal surge', - }), - 'context': , - 'entity_id': 'button.test_user_ethereal_surge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_revive_from_death-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_revive_from_death', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Revive from death', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_revive', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_revive_from_death-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Revive from death', - }), - 'context': , - 'entity_id': 'button.test_user_revive_from_death', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_start_my_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_user_start_my_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Start my day', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[wizard_fixture][button.test_user_start_my_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Start my day', - }), - 'context': , - 'entity_id': 'button.test_user_start_my_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/habitica/snapshots/test_calendar.ambr b/tests/components/habitica/snapshots/test_calendar.ambr deleted file mode 100644 index 7325e125470..00000000000 --- a/tests/components/habitica/snapshots/test_calendar.ambr +++ /dev/null @@ -1,730 +0,0 @@ -# serializer version: 1 -# name: test_api_events[calendar.test_user_dailies] - list([ - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-09-22', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-21', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', - 'end': dict({ - 'date': '2024-09-22', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', - 'start': dict({ - 'date': '2024-09-21', - }), - 'summary': 'Fitnessstudio besuchen', - 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-09-23', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-22', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-09-23', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-22', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', - 'end': dict({ - 'date': '2024-09-23', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', - 'start': dict({ - 'date': '2024-09-22', - }), - 'summary': 'Fitnessstudio besuchen', - 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-09-24', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-23', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-09-24', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-23', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-09-25', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-24', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-09-25', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-24', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-09-26', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-25', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-09-26', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-25', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', - 'end': dict({ - 'date': '2024-09-26', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', - 'start': dict({ - 'date': '2024-09-25', - }), - 'summary': 'Fitnessstudio besuchen', - 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-09-27', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-26', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-09-27', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-26', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-09-28', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-27', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-09-28', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-27', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-09-29', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-28', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-09-29', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-28', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', - 'end': dict({ - 'date': '2024-09-29', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', - 'start': dict({ - 'date': '2024-09-28', - }), - 'summary': 'Fitnessstudio besuchen', - 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-09-30', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-29', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-09-30', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-29', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', - 'end': dict({ - 'date': '2024-09-30', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', - 'start': dict({ - 'date': '2024-09-29', - }), - 'summary': 'Fitnessstudio besuchen', - 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-10-01', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-30', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-10-01', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-09-30', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-10-02', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-01', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-10-02', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-01', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-10-03', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-02', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-10-03', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-02', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', - 'end': dict({ - 'date': '2024-10-03', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', - 'start': dict({ - 'date': '2024-10-02', - }), - 'summary': 'Fitnessstudio besuchen', - 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-10-04', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-03', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-10-04', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-03', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-10-05', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-04', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-10-05', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-04', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-10-06', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-05', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-10-06', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-05', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', - 'end': dict({ - 'date': '2024-10-06', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', - 'start': dict({ - 'date': '2024-10-05', - }), - 'summary': 'Fitnessstudio besuchen', - 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-10-07', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-06', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-10-07', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-06', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', - 'end': dict({ - 'date': '2024-10-07', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', - 'start': dict({ - 'date': '2024-10-06', - }), - 'summary': 'Fitnessstudio besuchen', - 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', - }), - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'end': dict({ - 'date': '2024-10-08', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-07', - }), - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end': dict({ - 'date': '2024-10-08', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', - 'start': dict({ - 'date': '2024-10-07', - }), - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - ]) -# --- -# name: test_api_events[calendar.test_user_to_do_s] - list([ - dict({ - 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', - 'end': dict({ - 'date': '2024-09-01', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': None, - 'start': dict({ - 'date': '2024-08-31', - }), - 'summary': 'Rechnungen bezahlen', - 'uid': '2f6fcabc-f670-4ec3-ba65-817e8deea490', - }), - dict({ - 'description': 'Den Ausflug für das kommende Wochenende organisieren.', - 'end': dict({ - 'date': '2024-09-22', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': None, - 'start': dict({ - 'date': '2024-09-21', - }), - 'summary': 'Wochenendausflug planen', - 'uid': '86ea2475-d1b5-4020-bdcc-c188c7996afa', - }), - dict({ - 'description': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', - 'end': dict({ - 'date': '2024-09-28', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': None, - 'start': dict({ - 'date': '2024-09-27', - }), - 'summary': 'Buch zu Ende lesen', - 'uid': '88de7cd9-af2b-49ce-9afd-bf941d87336b', - }), - ]) -# --- -# name: test_calendar_platform[calendar.test_user_dailies-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'calendar', - 'entity_category': None, - 'entity_id': 'calendar.test_user_dailies', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dailies', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_dailys', - 'unit_of_measurement': None, - }) -# --- -# name: test_calendar_platform[calendar.test_user_dailies-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_day': True, - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'end_time': '2024-09-22 00:00:00', - 'friendly_name': 'test-user Dailies', - 'location': '', - 'message': '5 Minuten ruhig durchatmen', - 'start_time': '2024-09-21 00:00:00', - 'yesterdaily': False, - }), - 'context': , - 'entity_id': 'calendar.test_user_dailies', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_calendar_platform[calendar.test_user_to_do_s-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'calendar', - 'entity_category': None, - 'entity_id': 'calendar.test_user_to_do_s', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': "To-Do's", - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_todos', - 'unit_of_measurement': None, - }) -# --- -# name: test_calendar_platform[calendar.test_user_to_do_s-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_day': True, - 'description': 'Den Ausflug für das kommende Wochenende organisieren.', - 'end_time': '2024-09-22 00:00:00', - 'friendly_name': "test-user To-Do's", - 'location': '', - 'message': 'Wochenendausflug planen', - 'start_time': '2024-09-21 00:00:00', - }), - 'context': , - 'entity_id': 'calendar.test_user_to_do_s', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/habitica/snapshots/test_sensor.ambr b/tests/components/habitica/snapshots/test_sensor.ambr deleted file mode 100644 index 3a43069bfc4..00000000000 --- a/tests/components/habitica/snapshots/test_sensor.ambr +++ /dev/null @@ -1,1239 +0,0 @@ -# serializer version: 1 -# name: test_sensors[sensor.test_user_class-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'warrior', - 'healer', - 'wizard', - 'rogue', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_class', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Class', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_class', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_user_class-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'test-user Class', - 'options': list([ - 'warrior', - 'healer', - 'wizard', - 'rogue', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_user_class', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'wizard', - }) -# --- -# name: test_sensors[sensor.test_user_constitution-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_constitution', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Constitution', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_constitution', - 'unit_of_measurement': 'CON', - }) -# --- -# name: test_sensors[sensor.test_user_constitution-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'allocated': 15, - 'buffs': 26, - 'class': 0, - 'equipment': 20, - 'friendly_name': 'test-user Constitution', - 'level': 19, - 'unit_of_measurement': 'CON', - }), - 'context': , - 'entity_id': 'sensor.test_user_constitution', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80', - }) -# --- -# name: test_sensors[sensor.test_user_dailies-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_dailies', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dailies', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_dailys', - 'unit_of_measurement': 'tasks', - }) -# --- -# name: test_sensors[sensor.test_user_dailies-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1': dict({ - 'created_at': '2024-09-22T11:44:43.774Z', - 'every_x': 1, - 'frequency': 'weekly', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'is_due': True, - 'next_due': list([ - '2024-09-24T22:00:00.000Z', - '2024-09-27T22:00:00.000Z', - '2024-09-28T22:00:00.000Z', - '2024-10-01T22:00:00.000Z', - '2024-10-04T22:00:00.000Z', - '2024-10-08T22:00:00.000Z', - ]), - 'notes': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', - 'priority': 2, - 'repeat': dict({ - 'f': False, - 'm': False, - 's': True, - 'su': True, - 't': False, - 'th': False, - 'w': True, - }), - 'start_date': '2024-09-21T22:00:00.000Z', - 'tags': list([ - '51076966-2970-4b40-b6ba-d58c6a756dd7', - ]), - 'text': 'Fitnessstudio besuchen', - 'type': 'daily', - 'yester_daily': True, - }), - '564b9ac9-c53d-4638-9e7f-1cd96fe19baa': dict({ - 'completed': True, - 'created_at': '2024-07-07T17:51:53.268Z', - 'every_x': 1, - 'frequency': 'weekly', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'is_due': True, - 'next_due': list([ - 'Mon Sep 23 2024 00:00:00 GMT+0200', - 'Tue Sep 24 2024 00:00:00 GMT+0200', - 'Wed Sep 25 2024 00:00:00 GMT+0200', - 'Thu Sep 26 2024 00:00:00 GMT+0200', - 'Fri Sep 27 2024 00:00:00 GMT+0200', - 'Sat Sep 28 2024 00:00:00 GMT+0200', - ]), - 'notes': 'Klicke um Änderungen zu machen!', - 'priority': 1, - 'repeat': dict({ - 'f': True, - 'm': True, - 's': True, - 'su': True, - 't': True, - 'th': True, - 'w': True, - }), - 'start_date': '2024-07-06T22:00:00.000Z', - 'streak': 1, - 'text': 'Zahnseide benutzen', - 'type': 'daily', - 'value': -2.9663035443712333, - 'yester_daily': True, - }), - 'f2c85972-1a19-4426-bc6d-ce3337b9d99f': dict({ - 'created_at': '2024-07-07T17:51:53.266Z', - 'every_x': 1, - 'frequency': 'weekly', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'is_due': True, - 'next_due': list([ - '2024-09-22T22:00:00.000Z', - '2024-09-23T22:00:00.000Z', - '2024-09-24T22:00:00.000Z', - '2024-09-25T22:00:00.000Z', - '2024-09-26T22:00:00.000Z', - '2024-09-27T22:00:00.000Z', - ]), - 'notes': 'Klicke um Deinen Terminplan festzulegen!', - 'priority': 1, - 'repeat': dict({ - 'f': True, - 'm': True, - 's': True, - 'su': True, - 't': True, - 'th': True, - 'w': True, - }), - 'start_date': '2024-07-06T22:00:00.000Z', - 'text': '5 Minuten ruhig durchatmen', - 'type': 'daily', - 'value': -1.919611992979862, - 'yester_daily': True, - }), - 'friendly_name': 'test-user Dailies', - 'unit_of_measurement': 'tasks', - }), - 'context': , - 'entity_id': 'sensor.test_user_dailies', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3', - }) -# --- -# name: test_sensors[sensor.test_user_display_name-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_display_name', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Display name', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_display_name', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_user_display_name-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Display name', - }), - 'context': , - 'entity_id': 'sensor.test_user_display_name', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'test-user', - }) -# --- -# name: test_sensors[sensor.test_user_experience-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_experience', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Experience', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_experience', - 'unit_of_measurement': 'XP', - }) -# --- -# name: test_sensors[sensor.test_user_experience-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Experience', - 'unit_of_measurement': 'XP', - }), - 'context': , - 'entity_id': 'sensor.test_user_experience', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '737', - }) -# --- -# name: test_sensors[sensor.test_user_gems-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_gems', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Gems', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_gems', - 'unit_of_measurement': 'gems', - }) -# --- -# name: test_sensors[sensor.test_user_gems-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Gems', - 'unit_of_measurement': 'gems', - }), - 'context': , - 'entity_id': 'sensor.test_user_gems', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_user_gold-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_gold', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Gold', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_gold', - 'unit_of_measurement': 'GP', - }) -# --- -# name: test_sensors[sensor.test_user_gold-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Gold', - 'unit_of_measurement': 'GP', - }), - 'context': , - 'entity_id': 'sensor.test_user_gold', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '137.625872146098', - }) -# --- -# name: test_sensors[sensor.test_user_habits-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_habits', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Habits', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_habits', - 'unit_of_measurement': 'tasks', - }) -# --- -# name: test_sensors[sensor.test_user_habits-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - '1d147de6-5c02-4740-8e2f-71d3015a37f4': dict({ - 'created_at': '2024-07-07T17:51:53.266Z', - 'frequency': 'daily', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'priority': 1, - 'text': 'Eine kurze Pause machen', - 'type': 'habit', - 'up': True, - }), - 'bc1d1855-b2b8-4663-98ff-62e7b763dfc4': dict({ - 'created_at': '2024-07-07T17:51:53.265Z', - 'down': True, - 'frequency': 'daily', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'notes': 'Oder lösche es über die Bearbeitungs-Ansicht', - 'priority': 1, - 'text': 'Klicke hier um dies als schlechte Gewohnheit zu markieren, die Du gerne loswerden möchtest', - 'type': 'habit', - }), - 'e97659e0-2c42-4599-a7bb-00282adc410d': dict({ - 'created_at': '2024-07-07T17:51:53.264Z', - 'frequency': 'daily', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'notes': 'Eine Gewohnheit, eine Tagesaufgabe oder ein To-Do', - 'priority': 1, - 'text': 'Füge eine Aufgabe zu Habitica hinzu', - 'type': 'habit', - 'up': True, - }), - 'f21fa608-cfc6-4413-9fc7-0eb1b48ca43a': dict({ - 'created_at': '2024-07-07T17:51:53.268Z', - 'down': True, - 'frequency': 'daily', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'priority': 1, - 'text': 'Gesundes Essen/Junkfood', - 'type': 'habit', - 'up': True, - }), - 'friendly_name': 'test-user Habits', - 'unit_of_measurement': 'tasks', - }), - 'context': , - 'entity_id': 'sensor.test_user_habits', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_sensors[sensor.test_user_health-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_health', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Health', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_health', - 'unit_of_measurement': 'HP', - }) -# --- -# name: test_sensors[sensor.test_user_health-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Health', - 'unit_of_measurement': 'HP', - }), - 'context': , - 'entity_id': 'sensor.test_user_health', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_user_intelligence-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_intelligence', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Intelligence', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_intelligence', - 'unit_of_measurement': 'INT', - }) -# --- -# name: test_sensors[sensor.test_user_intelligence-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'allocated': 15, - 'buffs': 26, - 'class': 0, - 'equipment': 0, - 'friendly_name': 'test-user Intelligence', - 'level': 19, - 'unit_of_measurement': 'INT', - }), - 'context': , - 'entity_id': 'sensor.test_user_intelligence', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60', - }) -# --- -# name: test_sensors[sensor.test_user_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Level', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_level', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_user_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Level', - }), - 'context': , - 'entity_id': 'sensor.test_user_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '38', - }) -# --- -# name: test_sensors[sensor.test_user_mana-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_mana', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mana', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_mana', - 'unit_of_measurement': 'MP', - }) -# --- -# name: test_sensors[sensor.test_user_mana-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Mana', - 'unit_of_measurement': 'MP', - }), - 'context': , - 'entity_id': 'sensor.test_user_mana', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.9', - }) -# --- -# name: test_sensors[sensor.test_user_max_health-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_max_health', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Max. health', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_health_max', - 'unit_of_measurement': 'HP', - }) -# --- -# name: test_sensors[sensor.test_user_max_health-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Max. health', - 'unit_of_measurement': 'HP', - }), - 'context': , - 'entity_id': 'sensor.test_user_max_health', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50', - }) -# --- -# name: test_sensors[sensor.test_user_max_mana-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_max_mana', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Max. mana', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_mana_max', - 'unit_of_measurement': 'MP', - }) -# --- -# name: test_sensors[sensor.test_user_max_mana-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Max. mana', - 'unit_of_measurement': 'MP', - }), - 'context': , - 'entity_id': 'sensor.test_user_max_mana', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '166', - }) -# --- -# name: test_sensors[sensor.test_user_mystic_hourglasses-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_mystic_hourglasses', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mystic hourglasses', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_trinkets', - 'unit_of_measurement': '⧖', - }) -# --- -# name: test_sensors[sensor.test_user_mystic_hourglasses-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Mystic hourglasses', - 'unit_of_measurement': '⧖', - }), - 'context': , - 'entity_id': 'sensor.test_user_mystic_hourglasses', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_user_next_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_next_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Next level', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_experience_max', - 'unit_of_measurement': 'XP', - }) -# --- -# name: test_sensors[sensor.test_user_next_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Next level', - 'unit_of_measurement': 'XP', - }), - 'context': , - 'entity_id': 'sensor.test_user_next_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '880', - }) -# --- -# name: test_sensors[sensor.test_user_perception-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_perception', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Perception', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_perception', - 'unit_of_measurement': 'PER', - }) -# --- -# name: test_sensors[sensor.test_user_perception-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'allocated': 15, - 'buffs': 26, - 'class': 0, - 'equipment': 8, - 'friendly_name': 'test-user Perception', - 'level': 19, - 'unit_of_measurement': 'PER', - }), - 'context': , - 'entity_id': 'sensor.test_user_perception', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '68', - }) -# --- -# name: test_sensors[sensor.test_user_rewards-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_rewards', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Rewards', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_rewards', - 'unit_of_measurement': 'tasks', - }) -# --- -# name: test_sensors[sensor.test_user_rewards-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - '5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b': dict({ - 'created_at': '2024-07-07T17:51:53.266Z', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'notes': 'Schaue fern, spiele ein Spiel, gönne Dir einen Leckerbissen, es liegt ganz bei Dir!', - 'priority': 1, - 'text': 'Belohne Dich selbst', - 'type': 'reward', - 'value': 10, - }), - 'friendly_name': 'test-user Rewards', - 'unit_of_measurement': 'tasks', - }), - 'context': , - 'entity_id': 'sensor.test_user_rewards', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensors[sensor.test_user_strength-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_strength', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Strength', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_strength', - 'unit_of_measurement': 'STR', - }) -# --- -# name: test_sensors[sensor.test_user_strength-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'allocated': 15, - 'buffs': 26, - 'class': 0, - 'equipment': 27, - 'friendly_name': 'test-user Strength', - 'level': 19, - 'unit_of_measurement': 'STR', - }), - 'context': , - 'entity_id': 'sensor.test_user_strength', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '87', - }) -# --- -# name: test_sensors[sensor.test_user_to_do_s-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_to_do_s', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': "To-Do's", - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_todos', - 'unit_of_measurement': 'tasks', - }) -# --- -# name: test_sensors[sensor.test_user_to_do_s-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - '1aa3137e-ef72-4d1f-91ee-41933602f438': dict({ - 'created_at': '2024-09-21T22:16:38.153Z', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'notes': 'Rasen mähen und die Pflanzen gießen.', - 'priority': 1, - 'text': 'Garten pflegen', - 'type': 'todo', - }), - '2f6fcabc-f670-4ec3-ba65-817e8deea490': dict({ - 'created_at': '2024-09-21T22:17:19.513Z', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'notes': 'Strom- und Internetrechnungen rechtzeitig überweisen.', - 'priority': 1, - 'text': 'Rechnungen bezahlen', - 'type': 'todo', - }), - '86ea2475-d1b5-4020-bdcc-c188c7996afa': dict({ - 'created_at': '2024-09-21T22:16:16.756Z', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'notes': 'Den Ausflug für das kommende Wochenende organisieren.', - 'priority': 1, - 'tags': list([ - '51076966-2970-4b40-b6ba-d58c6a756dd7', - ]), - 'text': 'Wochenendausflug planen', - 'type': 'todo', - }), - '88de7cd9-af2b-49ce-9afd-bf941d87336b': dict({ - 'created_at': '2024-09-21T22:17:57.816Z', - 'group': dict({ - 'assignedUsers': list([ - ]), - 'completedBy': dict({ - }), - }), - 'notes': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', - 'priority': 1, - 'text': 'Buch zu Ende lesen', - 'type': 'todo', - }), - 'friendly_name': "test-user To-Do's", - 'unit_of_measurement': 'tasks', - }), - 'context': , - 'entity_id': 'sensor.test_user_to_do_s', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- diff --git a/tests/components/habitica/snapshots/test_switch.ambr b/tests/components/habitica/snapshots/test_switch.ambr deleted file mode 100644 index 3affbd11e2a..00000000000 --- a/tests/components/habitica/snapshots/test_switch.ambr +++ /dev/null @@ -1,48 +0,0 @@ -# serializer version: 1 -# name: test_switch[switch.test_user_rest_in_the_inn-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_user_rest_in_the_inn', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rest in the inn', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_sleep', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[switch.test_user_rest_in_the_inn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'test-user Rest in the inn', - }), - 'context': , - 'entity_id': 'switch.test_user_rest_in_the_inn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/habitica/snapshots/test_todo.ambr b/tests/components/habitica/snapshots/test_todo.ambr deleted file mode 100644 index 79eca9dbbb0..00000000000 --- a/tests/components/habitica/snapshots/test_todo.ambr +++ /dev/null @@ -1,189 +0,0 @@ -# serializer version: 1 -# name: test_complete_todo_item[daily] - tuple( - 'Habitica', - ''' - ![Dragon](https://habitica-assets.s3.amazonaws.com/mobileApp/images/Pet_Egg_Dragon.png) - You've found a Dragon Egg! - ''', - ) -# --- -# name: test_complete_todo_item[todo] - tuple( - 'Habitica', - ''' - ![Dragon](https://habitica-assets.s3.amazonaws.com/mobileApp/images/Pet_Egg_Dragon.png) - You've found a Dragon Egg! - ''', - ) -# --- -# name: test_todo_items[todo.test_user_dailies] - dict({ - 'todo.test_user_dailies': dict({ - 'items': list([ - dict({ - 'description': 'Klicke um Änderungen zu machen!', - 'due': '2024-09-22', - 'status': 'completed', - 'summary': 'Zahnseide benutzen', - 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', - }), - dict({ - 'description': 'Klicke um Deinen Terminplan festzulegen!', - 'due': '2024-09-21', - 'status': 'needs_action', - 'summary': '5 Minuten ruhig durchatmen', - 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', - }), - dict({ - 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', - 'due': '2024-09-21', - 'status': 'needs_action', - 'summary': 'Fitnessstudio besuchen', - 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', - }), - ]), - }), - }) -# --- -# name: test_todo_items[todo.test_user_to_do_s] - dict({ - 'todo.test_user_to_do_s': dict({ - 'items': list([ - dict({ - 'description': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', - 'due': '2024-09-27', - 'status': 'needs_action', - 'summary': 'Buch zu Ende lesen', - 'uid': '88de7cd9-af2b-49ce-9afd-bf941d87336b', - }), - dict({ - 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', - 'due': '2024-08-31', - 'status': 'needs_action', - 'summary': 'Rechnungen bezahlen', - 'uid': '2f6fcabc-f670-4ec3-ba65-817e8deea490', - }), - dict({ - 'description': 'Rasen mähen und die Pflanzen gießen.', - 'status': 'needs_action', - 'summary': 'Garten pflegen', - 'uid': '1aa3137e-ef72-4d1f-91ee-41933602f438', - }), - dict({ - 'description': 'Den Ausflug für das kommende Wochenende organisieren.', - 'due': '2024-09-21', - 'status': 'needs_action', - 'summary': 'Wochenendausflug planen', - 'uid': '86ea2475-d1b5-4020-bdcc-c188c7996afa', - }), - dict({ - 'description': 'Lebensmittel und Haushaltsbedarf für die Woche einkaufen.', - 'status': 'completed', - 'summary': 'Wocheneinkauf erledigen', - 'uid': '162f0bbe-a097-4a06-b4f4-8fbeed85d2ba', - }), - dict({ - 'description': 'Wohnzimmer und Küche gründlich aufräumen.', - 'status': 'completed', - 'summary': 'Wohnung aufräumen', - 'uid': '3fa06743-aa0f-472b-af1a-f27c755e329c', - }), - ]), - }), - }) -# --- -# name: test_todos[todo.test_user_dailies-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'todo', - 'entity_category': None, - 'entity_id': 'todo.test_user_dailies', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dailies', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_dailys', - 'unit_of_measurement': None, - }) -# --- -# name: test_todos[todo.test_user_dailies-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test-user Dailies', - 'supported_features': , - }), - 'context': , - 'entity_id': 'todo.test_user_dailies', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_todos[todo.test_user_to_do_s-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'todo', - 'entity_category': None, - 'entity_id': 'todo.test_user_to_do_s', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': "To-Do's", - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_todos', - 'unit_of_measurement': None, - }) -# --- -# name: test_todos[todo.test_user_to_do_s-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': "test-user To-Do's", - 'supported_features': , - }), - 'context': , - 'entity_id': 'todo.test_user_to_do_s', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- diff --git a/tests/components/habitica/test_binary_sensor.py b/tests/components/habitica/test_binary_sensor.py deleted file mode 100644 index 1710f8f217e..00000000000 --- a/tests/components/habitica/test_binary_sensor.py +++ /dev/null @@ -1,84 +0,0 @@ -"""Tests for the Habitica binary sensor platform.""" - -from collections.abc import Generator -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.habitica.const import ASSETS_URL, DEFAULT_URL, DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_OFF, STATE_ON, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform -from tests.test_util.aiohttp import AiohttpClientMocker - - -@pytest.fixture(autouse=True) -def binary_sensor_only() -> Generator[None]: - """Enable only the binarty sensor platform.""" - with patch( - "homeassistant.components.habitica.PLATFORMS", - [Platform.BINARY_SENSOR], - ): - yield - - -@pytest.mark.usefixtures("mock_habitica") -async def test_binary_sensors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test setup of the Habitica binary sensor platform.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("fixture", "entity_state", "entity_picture"), - [ - ("user", STATE_ON, f"{ASSETS_URL}inventory_quest_scroll_dustbunnies.png"), - ("quest_invitation_off", STATE_OFF, None), - ], -) -async def test_pending_quest_states( - hass: HomeAssistant, - config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, - fixture: str, - entity_state: str, - entity_picture: str | None, -) -> None: - """Test states of pending quest sensor.""" - - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/user", - json=load_json_object_fixture(f"{fixture}.json", DOMAIN), - ) - aioclient_mock.get(f"{DEFAULT_URL}/api/v3/tasks/user", json={"data": []}) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/content", - params={"language": "en"}, - json=load_json_object_fixture("content.json", DOMAIN), - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - assert ( - state := hass.states.get("binary_sensor.test_user_pending_quest_invitation") - ) - assert state.state == entity_state - assert state.attributes.get("entity_picture") == entity_picture diff --git a/tests/components/habitica/test_button.py b/tests/components/habitica/test_button.py deleted file mode 100644 index 979cefef923..00000000000 --- a/tests/components/habitica/test_button.py +++ /dev/null @@ -1,342 +0,0 @@ -"""Tests for Habitica button platform.""" - -from collections.abc import Generator -from http import HTTPStatus -import re -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.components.habitica.const import DEFAULT_URL, DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from .conftest import mock_called_with - -from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform -from tests.test_util.aiohttp import AiohttpClientMocker - - -@pytest.fixture(autouse=True) -def button_only() -> Generator[None]: - """Enable only the button platform.""" - with patch( - "homeassistant.components.habitica.PLATFORMS", - [Platform.BUTTON], - ): - yield - - -@pytest.mark.parametrize( - "fixture", - [ - "wizard_fixture", - "rogue_fixture", - "warrior_fixture", - "healer_fixture", - ], -) -async def test_buttons( - hass: HomeAssistant, - config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - fixture: str, -) -> None: - """Test button entities.""" - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/user", - json=load_json_object_fixture(f"{fixture}.json", DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/tasks/user", - params={"type": "completedTodos"}, - json=load_json_object_fixture("completed_todos.json", DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/tasks/user", - json=load_json_object_fixture("tasks.json", DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/content", - params={"language": "en"}, - json=load_json_object_fixture("content.json", DOMAIN), - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("entity_id", "api_url", "fixture"), - [ - ("button.test_user_allocate_all_stat_points", "user/allocate-now", "user"), - ("button.test_user_buy_a_health_potion", "user/buy-health-potion", "user"), - ("button.test_user_revive_from_death", "user/revive", "user"), - ("button.test_user_start_my_day", "cron", "user"), - ( - "button.test_user_chilling_frost", - "user/class/cast/frost", - "wizard_fixture", - ), - ( - "button.test_user_earthquake", - "user/class/cast/earth", - "wizard_fixture", - ), - ( - "button.test_user_ethereal_surge", - "user/class/cast/mpheal", - "wizard_fixture", - ), - ( - "button.test_user_stealth", - "user/class/cast/stealth", - "rogue_fixture", - ), - ( - "button.test_user_tools_of_the_trade", - "user/class/cast/toolsOfTrade", - "rogue_fixture", - ), - ( - "button.test_user_defensive_stance", - "user/class/cast/defensiveStance", - "warrior_fixture", - ), - ( - "button.test_user_intimidating_gaze", - "user/class/cast/intimidate", - "warrior_fixture", - ), - ( - "button.test_user_valorous_presence", - "user/class/cast/valorousPresence", - "warrior_fixture", - ), - ( - "button.test_user_healing_light", - "user/class/cast/heal", - "healer_fixture", - ), - ( - "button.test_user_protective_aura", - "user/class/cast/protectAura", - "healer_fixture", - ), - ( - "button.test_user_searing_brightness", - "user/class/cast/brightness", - "healer_fixture", - ), - ( - "button.test_user_blessing", - "user/class/cast/healAll", - "healer_fixture", - ), - ], -) -async def test_button_press( - hass: HomeAssistant, - config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, - entity_id: str, - api_url: str, - fixture: str, -) -> None: - """Test button press method.""" - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/user", - json=load_json_object_fixture(f"{fixture}.json", DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/tasks/user", - params={"type": "completedTodos"}, - json=load_json_object_fixture("completed_todos.json", DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/tasks/user", - json=load_json_object_fixture("tasks.json", DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/content", - params={"language": "en"}, - json=load_json_object_fixture("content.json", DOMAIN), - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - aioclient_mock.post(f"{DEFAULT_URL}/api/v3/{api_url}", json={"data": None}) - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert mock_called_with(aioclient_mock, "post", f"{DEFAULT_URL}/api/v3/{api_url}") - - -@pytest.mark.parametrize( - ("entity_id", "api_url"), - [ - ("button.test_user_allocate_all_stat_points", "user/allocate-now"), - ("button.test_user_buy_a_health_potion", "user/buy-health-potion"), - ("button.test_user_revive_from_death", "user/revive"), - ("button.test_user_start_my_day", "cron"), - ("button.test_user_chilling_frost", "user/class/cast/frost"), - ("button.test_user_earthquake", "user/class/cast/earth"), - ("button.test_user_ethereal_surge", "user/class/cast/mpheal"), - ], - ids=[ - "allocate-points", - "health-potion", - "revive", - "run-cron", - "chilling frost", - "earthquake", - "ethereal surge", - ], -) -@pytest.mark.parametrize( - ("status_code", "msg", "exception"), - [ - ( - HTTPStatus.TOO_MANY_REQUESTS, - "Rate limit exceeded, try again later", - ServiceValidationError, - ), - ( - HTTPStatus.BAD_REQUEST, - "Unable to connect to Habitica, try again later", - HomeAssistantError, - ), - ( - HTTPStatus.UNAUTHORIZED, - "Unable to complete action, the required conditions are not met", - ServiceValidationError, - ), - ], -) -async def test_button_press_exceptions( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - entity_id: str, - api_url: str, - status_code: HTTPStatus, - msg: str, - exception: Exception, -) -> None: - """Test button press exceptions.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/{api_url}", - status=status_code, - json={"data": None}, - ) - - with pytest.raises(exception, match=msg): - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert mock_called_with(mock_habitica, "post", f"{DEFAULT_URL}/api/v3/{api_url}") - - -@pytest.mark.parametrize( - ("fixture", "entity_ids"), - [ - ( - "common_buttons_unavailable", - [ - "button.test_user_allocate_all_stat_points", - "button.test_user_revive_from_death", - "button.test_user_buy_a_health_potion", - "button.test_user_start_my_day", - ], - ), - ( - "wizard_skills_unavailable", - [ - "button.test_user_chilling_frost", - "button.test_user_earthquake", - "button.test_user_ethereal_surge", - ], - ), - ("wizard_frost_unavailable", ["button.test_user_chilling_frost"]), - ( - "rogue_skills_unavailable", - ["button.test_user_tools_of_the_trade", "button.test_user_stealth"], - ), - ("rogue_stealth_unavailable", ["button.test_user_stealth"]), - ( - "warrior_skills_unavailable", - [ - "button.test_user_defensive_stance", - "button.test_user_intimidating_gaze", - "button.test_user_valorous_presence", - ], - ), - ( - "healer_skills_unavailable", - [ - "button.test_user_healing_light", - "button.test_user_protective_aura", - "button.test_user_searing_brightness", - "button.test_user_blessing", - ], - ), - ], -) -async def test_button_unavailable( - hass: HomeAssistant, - config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, - fixture: str, - entity_ids: list[str], -) -> None: - """Test buttons are unavailable if conditions are not met.""" - - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/user", - json=load_json_object_fixture(f"{fixture}.json", DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/tasks/user", - json=load_json_object_fixture("tasks.json", DOMAIN), - ) - aioclient_mock.get(re.compile(r".*"), json={"data": []}) - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - for entity_id in entity_ids: - assert (state := hass.states.get(entity_id)) - assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/habitica/test_calendar.py b/tests/components/habitica/test_calendar.py deleted file mode 100644 index 7c0a2686038..00000000000 --- a/tests/components/habitica/test_calendar.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Tests for the Habitica calendar platform.""" - -from collections.abc import Generator -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, snapshot_platform -from tests.typing import ClientSessionGenerator - - -@pytest.fixture(autouse=True) -def calendar_only() -> Generator[None]: - """Enable only the calendar platform.""" - with patch( - "homeassistant.components.habitica.PLATFORMS", - [Platform.CALENDAR], - ): - yield - - -@pytest.fixture(autouse=True) -async def set_tz(hass: HomeAssistant) -> None: - """Fixture to set timezone.""" - await hass.config.async_set_time_zone("Europe/Berlin") - - -@pytest.mark.usefixtures("mock_habitica") -@pytest.mark.freeze_time("2024-09-20T22:00:00.000Z") -async def test_calendar_platform( - hass: HomeAssistant, - config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test setup of the Habitica calendar platform.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("entity"), - [ - "calendar.test_user_to_do_s", - "calendar.test_user_dailies", - ], -) -@pytest.mark.freeze_time("2024-09-20T22:00:00.000Z") -@pytest.mark.usefixtures("mock_habitica") -async def test_api_events( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - config_entry: MockConfigEntry, - hass_client: ClientSessionGenerator, - entity: str, -) -> None: - """Test calendar event.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - client = await hass_client() - response = await client.get( - f"/api/calendars/{entity}?start=2024-08-29&end=2024-10-08" - ) - - assert await response.json() == snapshot diff --git a/tests/components/habitica/test_config_flow.py b/tests/components/habitica/test_config_flow.py index 604877f0c47..4dfc696daf2 100644 --- a/tests/components/habitica/test_config_flow.py +++ b/tests/components/habitica/test_config_flow.py @@ -3,150 +3,26 @@ from unittest.mock import AsyncMock, MagicMock, patch from aiohttp import ClientResponseError -import pytest from homeassistant import config_entries -from homeassistant.components.habitica.const import CONF_API_USER, DEFAULT_URL, DOMAIN -from homeassistant.const import ( - CONF_API_KEY, - CONF_PASSWORD, - CONF_URL, - CONF_USERNAME, - CONF_VERIFY_SSL, -) +from homeassistant.components.habitica.const import DEFAULT_URL, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -MOCK_DATA_LOGIN_STEP = { - CONF_USERNAME: "test-email@example.com", - CONF_PASSWORD: "test-password", -} -MOCK_DATA_ADVANCED_STEP = { - CONF_API_USER: "test-api-user", - CONF_API_KEY: "test-api-key", - CONF_URL: DEFAULT_URL, - CONF_VERIFY_SSL: True, -} +from tests.common import MockConfigEntry -async def test_form_login(hass: HomeAssistant) -> None: - """Test we get the login form.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.MENU - assert "login" in result["menu_options"] - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "login"} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - assert result["step_id"] == "login" - - mock_obj = MagicMock() - mock_obj.user.auth.local.login.post = AsyncMock() - mock_obj.user.auth.local.login.post.return_value = { - "id": "test-api-user", - "apiToken": "test-api-key", - "username": "test-username", - } - with ( - patch( - "homeassistant.components.habitica.config_flow.HabitipyAsync", - return_value=mock_obj, - ), - patch( - "homeassistant.components.habitica.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.habitica.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=MOCK_DATA_LOGIN_STEP, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "test-username" - assert result["data"] == { - **MOCK_DATA_ADVANCED_STEP, - CONF_USERNAME: "test-username", - } - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("raise_error", "text_error"), - [ - (ClientResponseError(MagicMock(), (), status=400), "cannot_connect"), - (ClientResponseError(MagicMock(), (), status=401), "invalid_auth"), - (IndexError(), "unknown"), - ], -) -async def test_form_login_errors(hass: HomeAssistant, raise_error, text_error) -> None: - """Test we handle invalid credentials error.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "login"} - ) - - mock_obj = MagicMock() - mock_obj.user.auth.local.login.post = AsyncMock(side_effect=raise_error) - with patch( - "homeassistant.components.habitica.config_flow.HabitipyAsync", - return_value=mock_obj, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=MOCK_DATA_LOGIN_STEP, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": text_error} - - -async def test_form_advanced(hass: HomeAssistant) -> None: +async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - - assert result["type"] is FlowResultType.MENU - assert "advanced" in result["menu_options"] - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "advanced"} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - assert result["step_id"] == "advanced" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "advanced"} - ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} mock_obj = MagicMock() mock_obj.user.get = AsyncMock() - mock_obj.user.get.return_value = {"auth": {"local": {"username": "test-username"}}} with ( patch( @@ -163,46 +39,29 @@ async def test_form_advanced(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=MOCK_DATA_ADVANCED_STEP, + {"api_user": "test-api-user", "api_key": "test-api-key"}, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "test-username" + assert result2["title"] == "Default username" assert result2["data"] == { - **MOCK_DATA_ADVANCED_STEP, - CONF_USERNAME: "test-username", + "url": DEFAULT_URL, + "api_user": "test-api-user", + "api_key": "test-api-key", } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize( - ("raise_error", "text_error"), - [ - (ClientResponseError(MagicMock(), (), status=400), "cannot_connect"), - (ClientResponseError(MagicMock(), (), status=401), "invalid_auth"), - (IndexError(), "unknown"), - ], -) -async def test_form_advanced_errors( - hass: HomeAssistant, raise_error, text_error -) -> None: +async def test_form_invalid_credentials(hass: HomeAssistant) -> None: """Test we handle invalid credentials error.""" - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "advanced"} - ) - mock_obj = MagicMock() - mock_obj.user.get = AsyncMock(side_effect=raise_error) + mock_obj.user.get = AsyncMock(side_effect=ClientResponseError(MagicMock(), ())) with patch( "homeassistant.components.habitica.config_flow.HabitipyAsync", @@ -210,8 +69,73 @@ async def test_form_advanced_errors( ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=MOCK_DATA_ADVANCED_STEP, + { + "url": DEFAULT_URL, + "api_user": "test-api-user", + "api_key": "test-api-key", + }, ) assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": text_error} + assert result2["errors"] == {"base": "invalid_credentials"} + + +async def test_form_unexpected_exception(hass: HomeAssistant) -> None: + """Test we handle unexpected exception error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + mock_obj = MagicMock() + mock_obj.user.get = AsyncMock(side_effect=Exception) + + with patch( + "homeassistant.components.habitica.config_flow.HabitipyAsync", + return_value=mock_obj, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "url": DEFAULT_URL, + "api_user": "test-api-user", + "api_key": "test-api-key", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "unknown"} + + +async def test_manual_flow_config_exist(hass: HomeAssistant) -> None: + """Test config flow discovers only already configured config.""" + MockConfigEntry( + domain=DOMAIN, + unique_id="test-api-user", + data={"api_user": "test-api-user", "api_key": "test-api-key"}, + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + mock_obj = MagicMock() + mock_obj.user.get = AsyncMock(return_value={"api_user": "test-api-user"}) + + with patch( + "homeassistant.components.habitica.config_flow.HabitipyAsync", + return_value=mock_obj, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "url": DEFAULT_URL, + "api_user": "test-api-user", + "api_key": "test-api-key", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/habitica/test_init.py b/tests/components/habitica/test_init.py index fd8a18b2d44..24c55c473b9 100644 --- a/tests/components/habitica/test_init.py +++ b/tests/components/habitica/test_init.py @@ -1,10 +1,7 @@ """Test the habitica module.""" -import datetime from http import HTTPStatus -import logging -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.habitica.const import ( @@ -16,16 +13,10 @@ from homeassistant.components.habitica.const import ( EVENT_API_CALL_SUCCESS, SERVICE_API_CALL, ) -from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_NAME -from homeassistant.core import Event, HomeAssistant +from homeassistant.core import HomeAssistant -from tests.common import ( - MockConfigEntry, - async_capture_events, - async_fire_time_changed, - load_json_object_fixture, -) +from tests.common import MockConfigEntry, async_capture_events from tests.test_util.aiohttp import AiohttpClientMocker TEST_API_CALL_ARGS = {"text": "Use API from Home Assistant", "type": "todo"} @@ -33,52 +24,108 @@ TEST_USER_NAME = "test_user" @pytest.fixture -def capture_api_call_success(hass: HomeAssistant) -> list[Event]: +def capture_api_call_success(hass): """Capture api_call events.""" return async_capture_events(hass, EVENT_API_CALL_SUCCESS) -@pytest.mark.usefixtures("mock_habitica") -async def test_entry_setup_unload( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> None: - """Test integration setup and unload.""" - - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - assert await hass.config_entries.async_unload(config_entry.entry_id) - - assert config_entry.state is ConfigEntryState.NOT_LOADED +@pytest.fixture +def habitica_entry(hass): + """Test entry for the following tests.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="test-api-user", + data={ + "api_user": "test-api-user", + "api_key": "test-api-key", + "url": DEFAULT_URL, + }, + ) + entry.add_to_hass(hass) + return entry -@pytest.mark.usefixtures("mock_habitica") -async def test_service_call( - hass: HomeAssistant, - config_entry: MockConfigEntry, - capture_api_call_success: list[Event], - mock_habitica: AiohttpClientMocker, -) -> None: - """Test integration setup, service call and unload.""" - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() +@pytest.fixture +def common_requests(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: + """Register requests for the tests.""" + aioclient_mock.get( + "https://habitica.com/api/v3/user", + json={ + "data": { + "api_user": "test-api-user", + "profile": {"name": TEST_USER_NAME}, + "stats": { + "class": "warrior", + "con": 1, + "exp": 2, + "gp": 3, + "hp": 4, + "int": 5, + "lvl": 6, + "maxHealth": 7, + "maxMP": 8, + "mp": 9, + "per": 10, + "points": 11, + "str": 12, + "toNextLevel": 13, + }, + } + }, + ) - assert config_entry.state is ConfigEntryState.LOADED + aioclient_mock.get( + "https://habitica.com/api/v3/tasks/user", + json={ + "data": [ + { + "text": f"this is a mock {task} #{i}", + "id": f"{i}", + "type": task, + "completed": False, + } + for i, task in enumerate(("habit", "daily", "todo", "reward"), start=1) + ] + }, + ) - assert len(capture_api_call_success) == 0 - - mock_habitica.post( + aioclient_mock.post( "https://habitica.com/api/v3/tasks/user", status=HTTPStatus.CREATED, json={"data": TEST_API_CALL_ARGS}, ) + return aioclient_mock + + +async def test_entry_setup_unload( + hass: HomeAssistant, habitica_entry, common_requests +) -> None: + """Test integration setup and unload.""" + assert await hass.config_entries.async_setup(habitica_entry.entry_id) + await hass.async_block_till_done() + + assert hass.services.has_service(DOMAIN, SERVICE_API_CALL) + + assert await hass.config_entries.async_unload(habitica_entry.entry_id) + + assert not hass.services.has_service(DOMAIN, SERVICE_API_CALL) + + +async def test_service_call( + hass: HomeAssistant, habitica_entry, common_requests, capture_api_call_success +) -> None: + """Test integration setup, service call and unload.""" + + assert await hass.config_entries.async_setup(habitica_entry.entry_id) + await hass.async_block_till_done() + + assert hass.services.has_service(DOMAIN, SERVICE_API_CALL) + + assert len(capture_api_call_success) == 0 + TEST_SERVICE_DATA = { - ATTR_NAME: "test-user", + ATTR_NAME: "test_user", ATTR_PATH: ["tasks", "user", "post"], ATTR_ARGS: TEST_API_CALL_ARGS, } @@ -92,77 +139,6 @@ async def test_service_call( del captured_data[ATTR_DATA] assert captured_data == TEST_SERVICE_DATA + assert await hass.config_entries.async_unload(habitica_entry.entry_id) -@pytest.mark.parametrize( - ("status"), [HTTPStatus.NOT_FOUND, HTTPStatus.TOO_MANY_REQUESTS] -) -async def test_config_entry_not_ready( - hass: HomeAssistant, - config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, - status: HTTPStatus, -) -> None: - """Test config entry not ready.""" - - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/user", - status=status, - ) - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_coordinator_update_failed( - hass: HomeAssistant, - config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, -) -> None: - """Test coordinator update failed.""" - - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/user", - json=load_json_object_fixture("user.json", DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/tasks/user", - status=HTTPStatus.NOT_FOUND, - ) - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_coordinator_rate_limited( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - caplog: pytest.LogCaptureFixture, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator when rate limited.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.clear_requests() - mock_habitica.get( - f"{DEFAULT_URL}/api/v3/user", - status=HTTPStatus.TOO_MANY_REQUESTS, - ) - - with caplog.at_level(logging.DEBUG): - freezer.tick(datetime.timedelta(seconds=60)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert "Rate limit exceeded, will try again later" in caplog.text + assert not hass.services.has_service(DOMAIN, SERVICE_API_CALL) diff --git a/tests/components/habitica/test_sensor.py b/tests/components/habitica/test_sensor.py deleted file mode 100644 index defe5a270ae..00000000000 --- a/tests/components/habitica/test_sensor.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Test Habitica sensor platform.""" - -from collections.abc import Generator -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.habitica.const import DOMAIN -from homeassistant.components.habitica.sensor import HabitipySensorEntity -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, issue_registry as ir - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.fixture(autouse=True) -def sensor_only() -> Generator[None]: - """Enable only the sensor platform.""" - with patch( - "homeassistant.components.habitica.PLATFORMS", - [Platform.SENSOR], - ): - yield - - -@pytest.mark.usefixtures("mock_habitica", "entity_registry_enabled_by_default") -async def test_sensors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test setup of the Habitica sensor platform.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.usefixtures("mock_habitica", "entity_registry_enabled_by_default") -async def test_sensor_deprecation_issue( - hass: HomeAssistant, - config_entry: MockConfigEntry, - issue_registry: ir.IssueRegistry, -) -> None: - """Test task sensor deprecation issue.""" - - with patch( - "homeassistant.components.habitica.sensor.entity_used_in", return_value=True - ): - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - assert issue_registry.async_get_issue( - domain=DOMAIN, - issue_id=f"deprecated_task_entity_{HabitipySensorEntity.TODOS}", - ) - assert issue_registry.async_get_issue( - domain=DOMAIN, - issue_id=f"deprecated_task_entity_{HabitipySensorEntity.DAILIES}", - ) diff --git a/tests/components/habitica/test_services.py b/tests/components/habitica/test_services.py deleted file mode 100644 index 403779bcbfb..00000000000 --- a/tests/components/habitica/test_services.py +++ /dev/null @@ -1,548 +0,0 @@ -"""Test Habitica actions.""" - -from collections.abc import Generator -from http import HTTPStatus -from typing import Any -from unittest.mock import patch - -import pytest - -from homeassistant.components.habitica.const import ( - ATTR_CONFIG_ENTRY, - ATTR_DIRECTION, - ATTR_SKILL, - ATTR_TASK, - DEFAULT_URL, - DOMAIN, - SERVICE_ABORT_QUEST, - SERVICE_ACCEPT_QUEST, - SERVICE_CANCEL_QUEST, - SERVICE_CAST_SKILL, - SERVICE_LEAVE_QUEST, - SERVICE_REJECT_QUEST, - SERVICE_SCORE_HABIT, - SERVICE_SCORE_REWARD, - SERVICE_START_QUEST, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError - -from .conftest import mock_called_with - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker - -REQUEST_EXCEPTION_MSG = "Unable to connect to Habitica, try again later" -RATE_LIMIT_EXCEPTION_MSG = "Rate limit exceeded, try again later" - - -@pytest.fixture(autouse=True) -def services_only() -> Generator[None]: - """Enable only services.""" - with patch( - "homeassistant.components.habitica.PLATFORMS", - [], - ): - yield - - -@pytest.fixture(autouse=True) -async def load_entry( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - services_only: Generator, -) -> None: - """Load config entry.""" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - -@pytest.mark.parametrize( - ("service_data", "item", "target_id"), - [ - ( - { - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "pickpocket", - }, - "pickPocket", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ( - { - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "backstab", - }, - "backStab", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ( - { - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "fireball", - }, - "fireball", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ( - { - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "smash", - }, - "smash", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ( - { - ATTR_TASK: "Rechnungen bezahlen", - ATTR_SKILL: "smash", - }, - "smash", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ( - { - ATTR_TASK: "pay_bills", - ATTR_SKILL: "smash", - }, - "smash", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ], - ids=[ - "cast pickpocket", - "cast backstab", - "cast fireball", - "cast smash", - "select task by name", - "select task_by_alias", - ], -) -async def test_cast_skill( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - service_data: dict[str, Any], - item: str, - target_id: str, -) -> None: - """Test Habitica cast skill action.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", - json={"success": True, "data": {}}, - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_CAST_SKILL, - service_data={ - ATTR_CONFIG_ENTRY: config_entry.entry_id, - **service_data, - }, - return_response=True, - blocking=True, - ) - - assert mock_called_with( - mock_habitica, - "post", - f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", - ) - - -@pytest.mark.parametrize( - ( - "service_data", - "http_status", - "expected_exception", - "expected_exception_msg", - ), - [ - ( - { - ATTR_TASK: "task-not-found", - ATTR_SKILL: "smash", - }, - HTTPStatus.OK, - ServiceValidationError, - "Unable to complete action, could not find the task 'task-not-found'", - ), - ( - { - ATTR_TASK: "Rechnungen bezahlen", - ATTR_SKILL: "smash", - }, - HTTPStatus.TOO_MANY_REQUESTS, - ServiceValidationError, - RATE_LIMIT_EXCEPTION_MSG, - ), - ( - { - ATTR_TASK: "Rechnungen bezahlen", - ATTR_SKILL: "smash", - }, - HTTPStatus.NOT_FOUND, - ServiceValidationError, - "Unable to cast skill, your character does not have the skill or spell smash", - ), - ( - { - ATTR_TASK: "Rechnungen bezahlen", - ATTR_SKILL: "smash", - }, - HTTPStatus.UNAUTHORIZED, - ServiceValidationError, - "Unable to cast skill, not enough mana. Your character has 50 MP, but the skill costs 10 MP", - ), - ( - { - ATTR_TASK: "Rechnungen bezahlen", - ATTR_SKILL: "smash", - }, - HTTPStatus.BAD_REQUEST, - HomeAssistantError, - REQUEST_EXCEPTION_MSG, - ), - ], -) -@pytest.mark.usefixtures("mock_habitica") -async def test_cast_skill_exceptions( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - service_data: dict[str, Any], - http_status: HTTPStatus, - expected_exception: Exception, - expected_exception_msg: str, -) -> None: - """Test Habitica cast skill action exceptions.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/user/class/cast/smash?targetId=2f6fcabc-f670-4ec3-ba65-817e8deea490", - json={"success": True, "data": {}}, - status=http_status, - ) - - with pytest.raises(expected_exception, match=expected_exception_msg): - await hass.services.async_call( - DOMAIN, - SERVICE_CAST_SKILL, - service_data={ - ATTR_CONFIG_ENTRY: config_entry.entry_id, - **service_data, - }, - return_response=True, - blocking=True, - ) - - -@pytest.mark.usefixtures("mock_habitica") -async def test_get_config_entry( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, -) -> None: - """Test Habitica config entry exceptions.""" - - with pytest.raises( - ServiceValidationError, - match="The selected character is not configured in Home Assistant", - ): - await hass.services.async_call( - DOMAIN, - SERVICE_CAST_SKILL, - service_data={ - ATTR_CONFIG_ENTRY: "0000000000000000", - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "smash", - }, - return_response=True, - blocking=True, - ) - - assert await hass.config_entries.async_unload(config_entry.entry_id) - - with pytest.raises( - ServiceValidationError, - match="The selected character is currently not loaded or disabled in Home Assistant", - ): - await hass.services.async_call( - DOMAIN, - SERVICE_CAST_SKILL, - service_data={ - ATTR_CONFIG_ENTRY: config_entry.entry_id, - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "smash", - }, - return_response=True, - blocking=True, - ) - - -@pytest.mark.parametrize( - ("service", "command"), - [ - (SERVICE_ABORT_QUEST, "abort"), - (SERVICE_ACCEPT_QUEST, "accept"), - (SERVICE_CANCEL_QUEST, "cancel"), - (SERVICE_LEAVE_QUEST, "leave"), - (SERVICE_REJECT_QUEST, "reject"), - (SERVICE_START_QUEST, "force-start"), - ], - ids=[], -) -async def test_handle_quests( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - service: str, - command: str, -) -> None: - """Test Habitica actions for quest handling.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/groups/party/quests/{command}", - json={"success": True, "data": {}}, - ) - - await hass.services.async_call( - DOMAIN, - service, - service_data={ATTR_CONFIG_ENTRY: config_entry.entry_id}, - return_response=True, - blocking=True, - ) - - assert mock_called_with( - mock_habitica, - "post", - f"{DEFAULT_URL}/api/v3/groups/party/quests/{command}", - ) - - -@pytest.mark.parametrize( - ( - "http_status", - "expected_exception", - "expected_exception_msg", - ), - [ - ( - HTTPStatus.TOO_MANY_REQUESTS, - ServiceValidationError, - RATE_LIMIT_EXCEPTION_MSG, - ), - ( - HTTPStatus.NOT_FOUND, - ServiceValidationError, - "Unable to complete action, quest or group not found", - ), - ( - HTTPStatus.UNAUTHORIZED, - ServiceValidationError, - "Action not allowed, only quest leader or group leader can perform this action", - ), - ( - HTTPStatus.BAD_REQUEST, - HomeAssistantError, - REQUEST_EXCEPTION_MSG, - ), - ], -) -@pytest.mark.usefixtures("mock_habitica") -async def test_handle_quests_exceptions( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - http_status: HTTPStatus, - expected_exception: Exception, - expected_exception_msg: str, -) -> None: - """Test Habitica handle quests action exceptions.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/groups/party/quests/accept", - json={"success": True, "data": {}}, - status=http_status, - ) - - with pytest.raises(expected_exception, match=expected_exception_msg): - await hass.services.async_call( - DOMAIN, - SERVICE_ACCEPT_QUEST, - service_data={ATTR_CONFIG_ENTRY: config_entry.entry_id}, - return_response=True, - blocking=True, - ) - - -@pytest.mark.parametrize( - ("service", "service_data", "task_id"), - [ - ( - SERVICE_SCORE_HABIT, - { - ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", - ATTR_DIRECTION: "up", - }, - "e97659e0-2c42-4599-a7bb-00282adc410d", - ), - ( - SERVICE_SCORE_HABIT, - { - ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", - ATTR_DIRECTION: "down", - }, - "e97659e0-2c42-4599-a7bb-00282adc410d", - ), - ( - SERVICE_SCORE_REWARD, - { - ATTR_TASK: "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", - }, - "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", - ), - ( - SERVICE_SCORE_HABIT, - { - ATTR_TASK: "Füge eine Aufgabe zu Habitica hinzu", - ATTR_DIRECTION: "up", - }, - "e97659e0-2c42-4599-a7bb-00282adc410d", - ), - ( - SERVICE_SCORE_HABIT, - { - ATTR_TASK: "create_a_task", - ATTR_DIRECTION: "up", - }, - "e97659e0-2c42-4599-a7bb-00282adc410d", - ), - ], - ids=[ - "habit score up", - "habit score down", - "buy reward", - "match task by name", - "match task by alias", - ], -) -async def test_score_task( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - service: str, - service_data: dict[str, Any], - task_id: str, -) -> None: - """Test Habitica score task action.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/{task_id}/score/{service_data.get(ATTR_DIRECTION, "up")}", - json={"success": True, "data": {}}, - ) - - await hass.services.async_call( - DOMAIN, - service, - service_data={ - ATTR_CONFIG_ENTRY: config_entry.entry_id, - **service_data, - }, - return_response=True, - blocking=True, - ) - - assert mock_called_with( - mock_habitica, - "post", - f"{DEFAULT_URL}/api/v3/tasks/{task_id}/score/{service_data.get(ATTR_DIRECTION, "up")}", - ) - - -@pytest.mark.parametrize( - ( - "service_data", - "http_status", - "expected_exception", - "expected_exception_msg", - ), - [ - ( - { - ATTR_TASK: "task does not exist", - ATTR_DIRECTION: "up", - }, - HTTPStatus.OK, - ServiceValidationError, - "Unable to complete action, could not find the task 'task does not exist'", - ), - ( - { - ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", - ATTR_DIRECTION: "up", - }, - HTTPStatus.TOO_MANY_REQUESTS, - ServiceValidationError, - RATE_LIMIT_EXCEPTION_MSG, - ), - ( - { - ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", - ATTR_DIRECTION: "up", - }, - HTTPStatus.BAD_REQUEST, - HomeAssistantError, - REQUEST_EXCEPTION_MSG, - ), - ( - { - ATTR_TASK: "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", - ATTR_DIRECTION: "up", - }, - HTTPStatus.UNAUTHORIZED, - HomeAssistantError, - "Unable to buy reward, not enough gold. Your character has 137.63 GP, but the reward costs 10 GP", - ), - ], -) -@pytest.mark.usefixtures("mock_habitica") -async def test_score_task_exceptions( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - service_data: dict[str, Any], - http_status: HTTPStatus, - expected_exception: Exception, - expected_exception_msg: str, -) -> None: - """Test Habitica score task action exceptions.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/e97659e0-2c42-4599-a7bb-00282adc410d/score/up", - json={"success": True, "data": {}}, - status=http_status, - ) - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b/score/up", - json={"success": True, "data": {}}, - status=http_status, - ) - - with pytest.raises(expected_exception, match=expected_exception_msg): - await hass.services.async_call( - DOMAIN, - SERVICE_SCORE_HABIT, - service_data={ - ATTR_CONFIG_ENTRY: config_entry.entry_id, - **service_data, - }, - return_response=True, - blocking=True, - ) diff --git a/tests/components/habitica/test_switch.py b/tests/components/habitica/test_switch.py deleted file mode 100644 index 55ba7b19b22..00000000000 --- a/tests/components/habitica/test_switch.py +++ /dev/null @@ -1,138 +0,0 @@ -"""Tests for the Habitica switch platform.""" - -from collections.abc import Generator -from http import HTTPStatus -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.habitica.const import DEFAULT_URL -from homeassistant.components.switch import ( - DOMAIN as SWITCH_DOMAIN, - SERVICE_TOGGLE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from .conftest import mock_called_with - -from tests.common import MockConfigEntry, snapshot_platform -from tests.test_util.aiohttp import AiohttpClientMocker - - -@pytest.fixture(autouse=True) -def switch_only() -> Generator[None]: - """Enable only the switch platform.""" - with patch( - "homeassistant.components.habitica.PLATFORMS", - [Platform.SWITCH], - ): - yield - - -@pytest.mark.usefixtures("mock_habitica") -async def test_switch( - hass: HomeAssistant, - config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test switch entities.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("service_call"), - [ - SERVICE_TURN_ON, - SERVICE_TURN_OFF, - SERVICE_TOGGLE, - ], -) -async def test_turn_on_off_toggle( - hass: HomeAssistant, - config_entry: MockConfigEntry, - service_call: str, - mock_habitica: AiohttpClientMocker, -) -> None: - """Test switch turn on/off, toggle method.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/user/sleep", - json={"success": True, "data": False}, - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await hass.services.async_call( - SWITCH_DOMAIN, - service_call, - {ATTR_ENTITY_ID: "switch.test_user_rest_in_the_inn"}, - blocking=True, - ) - - assert mock_called_with(mock_habitica, "post", f"{DEFAULT_URL}/api/v3/user/sleep") - - -@pytest.mark.parametrize( - ("service_call"), - [ - SERVICE_TURN_ON, - SERVICE_TURN_OFF, - SERVICE_TOGGLE, - ], -) -@pytest.mark.parametrize( - ("status_code", "exception"), - [ - (HTTPStatus.TOO_MANY_REQUESTS, ServiceValidationError), - (HTTPStatus.BAD_REQUEST, HomeAssistantError), - ], -) -async def test_turn_on_off_toggle_exceptions( - hass: HomeAssistant, - config_entry: MockConfigEntry, - service_call: str, - mock_habitica: AiohttpClientMocker, - status_code: HTTPStatus, - exception: Exception, -) -> None: - """Test switch turn on/off, toggle method.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/user/sleep", - status=status_code, - json={"success": True, "data": False}, - ) - - with pytest.raises(expected_exception=exception): - await hass.services.async_call( - SWITCH_DOMAIN, - service_call, - {ATTR_ENTITY_ID: "switch.test_user_rest_in_the_inn"}, - blocking=True, - ) - - assert mock_called_with(mock_habitica, "post", f"{DEFAULT_URL}/api/v3/user/sleep") diff --git a/tests/components/habitica/test_todo.py b/tests/components/habitica/test_todo.py deleted file mode 100644 index c9a4b3dd37a..00000000000 --- a/tests/components/habitica/test_todo.py +++ /dev/null @@ -1,700 +0,0 @@ -"""Tests for Habitica todo platform.""" - -from collections.abc import Generator -from datetime import datetime -from http import HTTPStatus -import json -import re -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.habitica.const import DEFAULT_URL, DOMAIN -from homeassistant.components.todo import ( - ATTR_DESCRIPTION, - ATTR_DUE_DATE, - ATTR_ITEM, - ATTR_RENAME, - ATTR_STATUS, - DOMAIN as TODO_DOMAIN, - TodoServices, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from .conftest import mock_called_with - -from tests.common import ( - MockConfigEntry, - async_get_persistent_notifications, - load_json_object_fixture, - snapshot_platform, -) -from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import WebSocketGenerator - - -@pytest.fixture(autouse=True) -def switch_only() -> Generator[None]: - """Enable only the todo platform.""" - with patch( - "homeassistant.components.habitica.PLATFORMS", - [Platform.TODO], - ): - yield - - -@pytest.mark.usefixtures("mock_habitica") -async def test_todos( - hass: HomeAssistant, - config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test todo platform.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.parametrize( - ("entity_id"), - [ - "todo.test_user_to_do_s", - "todo.test_user_dailies", - ], -) -@pytest.mark.usefixtures("mock_habitica") -async def test_todo_items( - hass: HomeAssistant, - config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_id: str, -) -> None: - """Test items on todo lists.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - result = await hass.services.async_call( - TODO_DOMAIN, - TodoServices.GET_ITEMS, - {}, - target={ATTR_ENTITY_ID: entity_id}, - blocking=True, - return_response=True, - ) - - assert result == snapshot - - -@pytest.mark.freeze_time("2024-09-21 00:00:00") -@pytest.mark.parametrize( - ("entity_id", "uid"), - [ - ("todo.test_user_to_do_s", "88de7cd9-af2b-49ce-9afd-bf941d87336b"), - ("todo.test_user_dailies", "f2c85972-1a19-4426-bc6d-ce3337b9d99f"), - ], - ids=["todo", "daily"], -) -async def test_complete_todo_item( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - snapshot: SnapshotAssertion, - entity_id: str, - uid: str, -) -> None: - """Test completing an item on the todo list.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/up", - json=load_json_object_fixture("score_with_drop.json", DOMAIN), - ) - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: uid, ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert mock_called_with( - mock_habitica, "post", f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/up" - ) - - # Test notification for item drop - notifications = async_get_persistent_notifications(hass) - assert len(notifications) == 1 - _id, *_ = notifications - assert snapshot == (notifications[_id]["title"], notifications[_id]["message"]) - - -@pytest.mark.parametrize( - ("entity_id", "uid"), - [ - ("todo.test_user_to_do_s", "162f0bbe-a097-4a06-b4f4-8fbeed85d2ba"), - ("todo.test_user_dailies", "564b9ac9-c53d-4638-9e7f-1cd96fe19baa"), - ], - ids=["todo", "daily"], -) -async def test_uncomplete_todo_item( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - entity_id: str, - uid: str, -) -> None: - """Test uncompleting an item on the todo list.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/down", - json={"data": {}, "success": True}, - ) - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: uid, ATTR_STATUS: "needs_action"}, - target={ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert mock_called_with( - mock_habitica, "post", f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/down" - ) - - -@pytest.mark.parametrize( - ("uid", "status"), - [ - ("88de7cd9-af2b-49ce-9afd-bf941d87336b", "completed"), - ("162f0bbe-a097-4a06-b4f4-8fbeed85d2ba", "needs_action"), - ], - ids=["completed", "needs_action"], -) -async def test_complete_todo_item_exception( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - uid: str, - status: str, -) -> None: - """Test exception when completing/uncompleting an item on the todo list.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.post( - re.compile(f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/.+"), - status=HTTPStatus.NOT_FOUND, - ) - with pytest.raises( - expected_exception=ServiceValidationError, - match=r"Unable to update the score for your Habitica to-do `.+`, please try again", - ): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: uid, ATTR_STATUS: status}, - target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, - blocking=True, - ) - - -@pytest.mark.parametrize( - ("entity_id", "uid", "date"), - [ - ( - "todo.test_user_to_do_s", - "88de7cd9-af2b-49ce-9afd-bf941d87336b", - "2024-07-30", - ), - ( - "todo.test_user_dailies", - "f2c85972-1a19-4426-bc6d-ce3337b9d99f", - None, - ), - ], - ids=["todo", "daily"], -) -async def test_update_todo_item( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - entity_id: str, - uid: str, - date: str, -) -> None: - """Test update details of a item on the todo list.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.put( - f"{DEFAULT_URL}/api/v3/tasks/{uid}", - json={"data": {}, "success": True}, - ) - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - { - ATTR_ITEM: uid, - ATTR_RENAME: "test-summary", - ATTR_DESCRIPTION: "test-description", - ATTR_DUE_DATE: date, - }, - target={ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - mock_call = mock_called_with( - mock_habitica, "PUT", f"{DEFAULT_URL}/api/v3/tasks/{uid}" - ) - assert mock_call - assert json.loads(mock_call[2]) == { - "date": date, - "notes": "test-description", - "text": "test-summary", - } - - -async def test_update_todo_item_exception( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, -) -> None: - """Test exception when update item on the todo list.""" - uid = "88de7cd9-af2b-49ce-9afd-bf941d87336b" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.put( - f"{DEFAULT_URL}/api/v3/tasks/{uid}", - status=HTTPStatus.NOT_FOUND, - ) - with pytest.raises( - expected_exception=ServiceValidationError, - match="Unable to update the Habitica to-do `test-summary`, please try again", - ): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - { - ATTR_ITEM: uid, - ATTR_RENAME: "test-summary", - ATTR_DESCRIPTION: "test-description", - ATTR_DUE_DATE: "2024-07-30", - }, - target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, - blocking=True, - ) - - -async def test_add_todo_item( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, -) -> None: - """Test add a todo item to the todo list.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/user", - json={"data": {}, "success": True}, - status=HTTPStatus.CREATED, - ) - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.ADD_ITEM, - { - ATTR_ITEM: "test-summary", - ATTR_DESCRIPTION: "test-description", - ATTR_DUE_DATE: "2024-07-30", - }, - target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, - blocking=True, - ) - - mock_call = mock_called_with( - mock_habitica, - "post", - f"{DEFAULT_URL}/api/v3/tasks/user", - ) - assert mock_call - assert json.loads(mock_call[2]) == { - "date": "2024-07-30", - "notes": "test-description", - "text": "test-summary", - "type": "todo", - } - - -async def test_add_todo_item_exception( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, -) -> None: - """Test exception when adding a todo item to the todo list.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/user", - status=HTTPStatus.NOT_FOUND, - ) - with pytest.raises( - expected_exception=ServiceValidationError, - match="Unable to create new to-do `test-summary` for Habitica, please try again", - ): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.ADD_ITEM, - { - ATTR_ITEM: "test-summary", - ATTR_DESCRIPTION: "test-description", - ATTR_DUE_DATE: "2024-07-30", - }, - target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, - blocking=True, - ) - - -async def test_delete_todo_item( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, -) -> None: - """Test deleting a todo item from the todo list.""" - - uid = "2f6fcabc-f670-4ec3-ba65-817e8deea490" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.delete( - f"{DEFAULT_URL}/api/v3/tasks/{uid}", - json={"data": {}, "success": True}, - ) - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: uid}, - target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, - blocking=True, - ) - - assert mock_called_with( - mock_habitica, "delete", f"{DEFAULT_URL}/api/v3/tasks/{uid}" - ) - - -async def test_delete_todo_item_exception( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, -) -> None: - """Test exception when deleting a todo item from the todo list.""" - - uid = "2f6fcabc-f670-4ec3-ba65-817e8deea490" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.delete( - f"{DEFAULT_URL}/api/v3/tasks/{uid}", - status=HTTPStatus.NOT_FOUND, - ) - with pytest.raises( - expected_exception=ServiceValidationError, - match="Unable to delete item from Habitica to-do list, please try again", - ): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: uid}, - target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, - blocking=True, - ) - - -async def test_delete_completed_todo_items( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, -) -> None: - """Test deleting completed todo items from the todo list.""" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/clearCompletedTodos", - json={"data": {}, "success": True}, - ) - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.REMOVE_COMPLETED_ITEMS, - {}, - target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, - blocking=True, - ) - - assert mock_called_with( - mock_habitica, "post", f"{DEFAULT_URL}/api/v3/tasks/clearCompletedTodos" - ) - - -async def test_delete_completed_todo_items_exception( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, -) -> None: - """Test exception when deleting completed todo items from the todo list.""" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/clearCompletedTodos", - status=HTTPStatus.NOT_FOUND, - ) - with pytest.raises( - expected_exception=ServiceValidationError, - match="Unable to delete completed to-do items from Habitica to-do list, please try again", - ): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.REMOVE_COMPLETED_ITEMS, - {}, - target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, - blocking=True, - ) - - -@pytest.mark.parametrize( - ("entity_id", "uid", "previous_uid"), - [ - ( - "todo.test_user_to_do_s", - "1aa3137e-ef72-4d1f-91ee-41933602f438", - "88de7cd9-af2b-49ce-9afd-bf941d87336b", - ), - ( - "todo.test_user_dailies", - "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1", - "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", - ), - ], - ids=["todo", "daily"], -) -async def test_move_todo_item( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - hass_ws_client: WebSocketGenerator, - entity_id: str, - uid: str, - previous_uid: str, -) -> None: - """Test move todo items.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - for pos in (0, 1): - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/{uid}/move/to/{pos}", - json={"data": {}, "success": True}, - ) - - client = await hass_ws_client() - # move to second position - data = { - "id": id, - "type": "todo/item/move", - "entity_id": entity_id, - "uid": uid, - "previous_uid": previous_uid, - } - await client.send_json_auto_id(data) - resp = await client.receive_json() - assert resp.get("success") - - # move to top position - data = { - "id": id, - "type": "todo/item/move", - "entity_id": entity_id, - "uid": uid, - } - await client.send_json_auto_id(data) - resp = await client.receive_json() - assert resp.get("success") - - for pos in (0, 1): - assert mock_called_with( - mock_habitica, - "post", - f"{DEFAULT_URL}/api/v3/tasks/{uid}/move/to/{pos}", - ) - - -async def test_move_todo_item_exception( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test exception when moving todo item.""" - - uid = "1aa3137e-ef72-4d1f-91ee-41933602f438" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/{uid}/move/to/0", - status=HTTPStatus.NOT_FOUND, - ) - - client = await hass_ws_client() - - data = { - "id": id, - "type": "todo/item/move", - "entity_id": "todo.test_user_to_do_s", - "uid": uid, - } - await client.send_json_auto_id(data) - resp = await client.receive_json() - assert resp.get("success") is False - - -@pytest.mark.parametrize( - ("fixture", "calculated_due_date"), - [ - ("duedate_fixture_1.json", (2024, 9, 23)), - ("duedate_fixture_2.json", (2024, 9, 24)), - ("duedate_fixture_3.json", (2024, 10, 23)), - ("duedate_fixture_4.json", (2024, 10, 23)), - ("duedate_fixture_5.json", (2024, 9, 28)), - ("duedate_fixture_6.json", (2024, 10, 21)), - ("duedate_fixture_7.json", None), - ("duedate_fixture_8.json", None), - ], - ids=[ - "default", - "daily starts on startdate", - "monthly starts on startdate", - "yearly starts on startdate", - "weekly", - "monthly starts on fixed day", - "grey daily", - "empty nextDue", - ], -) -@pytest.mark.usefixtures("set_tz") -async def test_next_due_date( - hass: HomeAssistant, - fixture: str, - calculated_due_date: tuple | None, - config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, -) -> None: - """Test next_due_date calculation.""" - - dailies_entity = "todo.test_user_dailies" - - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/user", json=load_json_object_fixture("user.json", DOMAIN) - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/tasks/user", - params={"type": "completedTodos"}, - json={"data": []}, - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/tasks/user", - json=load_json_object_fixture(fixture, DOMAIN), - ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/content", - params={"language": "en"}, - json=load_json_object_fixture("content.json", DOMAIN), - ) - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - result = await hass.services.async_call( - TODO_DOMAIN, - TodoServices.GET_ITEMS, - {}, - target={ATTR_ENTITY_ID: dailies_entity}, - blocking=True, - return_response=True, - ) - - assert ( - result[dailies_entity]["items"][0].get("due") is None - if not calculated_due_date - else datetime(*calculated_due_date).date() - ) diff --git a/tests/components/hardware/test_websocket_api.py b/tests/components/hardware/test_websocket_api.py index 1379bdba120..e8099069a9c 100644 --- a/tests/components/hardware/test_websocket_api.py +++ b/tests/components/hardware/test_websocket_api.py @@ -61,7 +61,7 @@ async def test_system_status_subscription( response = await client.receive_json() assert response["success"] - VirtualMem = namedtuple("VirtualMemory", ["available", "percent", "total"]) # noqa: PYI024 + VirtualMem = namedtuple("VirtualMemory", ["available", "percent", "total"]) vmem = VirtualMem(10 * 1024**2, 50, 30 * 1024**2) with ( diff --git a/tests/components/harmony/conftest.py b/tests/components/harmony/conftest.py index 759770e9746..fb4be73aa72 100644 --- a/tests/components/harmony/conftest.py +++ b/tests/components/harmony/conftest.py @@ -1,10 +1,10 @@ """Fixtures for harmony tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from aioharmony.const import ClientCallbackType import pytest +from typing_extensions import Generator from homeassistant.components.harmony.const import ACTIVITY_POWER_OFF, DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME diff --git a/tests/components/harmony/test_remote.py b/tests/components/harmony/test_remote.py index 8f488f9bf0d..c0ec2235b84 100644 --- a/tests/components/harmony/test_remote.py +++ b/tests/components/harmony/test_remote.py @@ -1,7 +1,6 @@ """Test the Logitech Harmony Hub remote.""" from datetime import timedelta -from typing import Any from aioharmony.const import SendCommandDevice @@ -388,9 +387,7 @@ async def test_sync( mock_write_config.assert_called() -async def _send_commands_and_wait( - hass: HomeAssistant, service_data: dict[str, Any] -) -> None: +async def _send_commands_and_wait(hass, service_data): await hass.services.async_call( REMOTE_DOMAIN, SERVICE_SEND_COMMAND, diff --git a/tests/components/harmony/test_select.py b/tests/components/harmony/test_select.py index 1451f146b98..2568feb1412 100644 --- a/tests/components/harmony/test_select.py +++ b/tests/components/harmony/test_select.py @@ -91,9 +91,7 @@ async def test_select_option( assert hass.states.is_state(ENTITY_SELECT, "power_off") -async def _select_option_and_wait( - hass: HomeAssistant, entity: str, option: str -) -> None: +async def _select_option_and_wait(hass, entity, option): await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, diff --git a/tests/components/hassio/common.py b/tests/components/hassio/common.py deleted file mode 100644 index 82d3564440b..00000000000 --- a/tests/components/hassio/common.py +++ /dev/null @@ -1,226 +0,0 @@ -"""Provide common test tools for hassio.""" - -from __future__ import annotations - -from collections.abc import Generator -from dataclasses import fields -import logging -from types import MethodType -from typing import Any -from unittest.mock import AsyncMock, Mock, patch - -from aiohasupervisor.models import ( - AddonsOptions, - AddonsStats, - AddonStage, - InstalledAddonComplete, - Repository, - StoreAddon, - StoreAddonComplete, -) - -from homeassistant.components.hassio.addon_manager import AddonManager -from homeassistant.core import HomeAssistant - -LOGGER = logging.getLogger(__name__) -INSTALLED_ADDON_FIELDS = [field.name for field in fields(InstalledAddonComplete)] -STORE_ADDON_FIELDS = [field.name for field in fields(StoreAddonComplete)] -ADDONS_STATS_FIELDS = [field.name for field in fields(AddonsStats)] - -MOCK_STORE_ADDONS = [ - StoreAddon( - name="test", - arch=[], - documentation=False, - advanced=False, - available=True, - build=False, - description="Test add-on service", - homeassistant=None, - icon=False, - logo=False, - repository="core", - slug="core_test", - stage=AddonStage.EXPERIMENTAL, - update_available=False, - url="https://example.com/addons/tree/master/test", - version_latest="1.0.0", - version="1.0.0", - installed=True, - ) -] - -MOCK_REPOSITORIES = [ - Repository( - slug="core", - name="Official add-ons", - source="core", - url="https://home-assistant.io/addons", - maintainer="Home Assistant", - ) -] - - -def mock_to_dict(obj: Mock, fields: list[str]) -> dict[str, Any]: - """Aiohasupervisor mocks to dictionary representation.""" - return { - field: getattr(obj, field) - for field in fields - if not isinstance(getattr(obj, field), Mock) - } - - -def mock_addon_manager(hass: HomeAssistant) -> AddonManager: - """Return an AddonManager instance.""" - return AddonManager(hass, LOGGER, "Test", "test_addon") - - -def mock_addon_store_info( - supervisor_client: AsyncMock, - addon_store_info_side_effect: Any | None, -) -> AsyncMock: - """Mock Supervisor add-on store info.""" - supervisor_client.store.addon_info.side_effect = addon_store_info_side_effect - - supervisor_client.store.addon_info.return_value = addon_info = Mock( - spec=StoreAddonComplete, - slug="test", - repository="core", - available=True, - installed=False, - update_available=False, - version="1.0.0", - supervisor_api=False, - supervisor_role="default", - ) - addon_info.name = "test" - addon_info.to_dict = MethodType( - lambda self: mock_to_dict(self, STORE_ADDON_FIELDS), - addon_info, - ) - return supervisor_client.store.addon_info - - -def mock_addon_info( - supervisor_client: AsyncMock, addon_info_side_effect: Any | None -) -> AsyncMock: - """Mock Supervisor add-on info.""" - supervisor_client.addons.addon_info.side_effect = addon_info_side_effect - - supervisor_client.addons.addon_info.return_value = addon_info = Mock( - spec=InstalledAddonComplete, - slug="test", - repository="core", - available=False, - hostname="", - options={}, - state="unknown", - update_available=False, - version=None, - supervisor_api=False, - supervisor_role="default", - ) - addon_info.name = "test" - addon_info.to_dict = MethodType( - lambda self: mock_to_dict(self, INSTALLED_ADDON_FIELDS), - addon_info, - ) - return supervisor_client.addons.addon_info - - -def mock_addon_not_installed( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on not installed.""" - addon_store_info.return_value.available = True - return addon_info - - -def mock_addon_installed( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already installed but not running.""" - addon_store_info.return_value.available = True - addon_store_info.return_value.installed = True - addon_info.return_value.available = True - addon_info.return_value.hostname = "core-test-addon" - addon_info.return_value.state = "stopped" - addon_info.return_value.version = "1.0.0" - return addon_info - - -def mock_addon_running(addon_store_info: AsyncMock, addon_info: AsyncMock) -> AsyncMock: - """Mock add-on already running.""" - addon_store_info.return_value.available = True - addon_store_info.return_value.installed = True - addon_info.return_value.state = "started" - return addon_info - - -def mock_install_addon_side_effect( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Any | None: - """Return the install add-on side effect.""" - - async def install_addon(addon: str): - """Mock install add-on.""" - addon_store_info.return_value.available = True - addon_store_info.return_value.installed = True - addon_info.return_value.available = True - addon_info.return_value.state = "stopped" - addon_info.return_value.version = "1.0.0" - - return install_addon - - -def mock_start_addon_side_effect( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Any | None: - """Return the start add-on options side effect.""" - - async def start_addon(addon: str) -> None: - """Mock start add-on.""" - addon_store_info.return_value.available = True - addon_store_info.return_value.installed = True - addon_info.return_value.available = True - addon_info.return_value.state = "started" - - return start_addon - - -def mock_set_addon_options_side_effect(addon_options: dict[str, Any]) -> Any | None: - """Return the set add-on options side effect.""" - - async def set_addon_options(slug: str, options: AddonsOptions) -> None: - """Mock set add-on options.""" - addon_options.update(options.config) - - return set_addon_options - - -def mock_create_backup() -> Generator[AsyncMock]: - """Mock create backup.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_create_backup" - ) as create_backup: - yield create_backup - - -def mock_addon_stats(supervisor_client: AsyncMock) -> AsyncMock: - """Mock addon stats.""" - supervisor_client.addons.addon_stats.return_value = addon_stats = Mock( - spec=AddonsStats, - cpu_percent=0.99, - memory_usage=182611968, - memory_limit=3977146368, - memory_percent=4.59, - network_rx=362570232, - network_tx=82374138, - blk_read=46010945536, - blk_write=15051526144, - ) - addon_stats.to_dict = MethodType( - lambda self: mock_to_dict(self, ADDONS_STATS_FIELDS), - addon_stats, - ) - return supervisor_client.addons.addon_stats diff --git a/tests/components/hassio/conftest.py b/tests/components/hassio/conftest.py index 7075b9d6982..7b79dfe6179 100644 --- a/tests/components/hassio/conftest.py +++ b/tests/components/hassio/conftest.py @@ -1,15 +1,12 @@ """Fixtures for Hass.io.""" -from collections.abc import Generator import os import re -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import Mock, patch -from aiohasupervisor.models import AddonsStats, AddonState from aiohttp.test_utils import TestClient import pytest -from homeassistant.auth.models import RefreshToken from homeassistant.components.hassio.handler import HassIO, HassioAPIError from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -22,7 +19,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def disable_security_filter() -> Generator[None]: +def disable_security_filter(): """Disable the security filter to ensure the integration is secure.""" with patch( "homeassistant.components.http.security_filter.FILTERS", @@ -32,10 +29,14 @@ def disable_security_filter() -> Generator[None]: @pytest.fixture -def hassio_env(supervisor_is_connected: AsyncMock) -> Generator[None]: +def hassio_env(): """Fixture to inject hassio env.""" with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), + patch( + "homeassistant.components.hassio.HassIO.is_connected", + return_value={"result": "ok", "data": {}}, + ), patch.dict(os.environ, {"SUPERVISOR_TOKEN": SUPERVISOR_TOKEN}), patch( "homeassistant.components.hassio.HassIO.get_info", @@ -47,12 +48,11 @@ def hassio_env(supervisor_is_connected: AsyncMock) -> Generator[None]: @pytest.fixture def hassio_stubs( - hassio_env: None, + hassio_env, hass: HomeAssistant, hass_client: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - supervisor_client: AsyncMock, -) -> RefreshToken: +): """Create mock hassio http client.""" with ( patch( @@ -74,6 +74,9 @@ def hassio_stubs( patch( "homeassistant.components.hassio.issues.SupervisorIssues.setup", ), + patch( + "homeassistant.components.hassio.HassIO.refresh_updates", + ), ): hass.set_state(CoreState.starting) hass.loop.run_until_complete(async_setup_component(hass, "hassio", {})) @@ -83,7 +86,7 @@ def hassio_stubs( @pytest.fixture def hassio_client( - hassio_stubs: RefreshToken, hass: HomeAssistant, hass_client: ClientSessionGenerator + hassio_stubs, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> TestClient: """Return a Hass.io HTTP client.""" return hass.loop.run_until_complete(hass_client()) @@ -91,9 +94,7 @@ def hassio_client( @pytest.fixture def hassio_noauth_client( - hassio_stubs: RefreshToken, - hass: HomeAssistant, - aiohttp_client: ClientSessionGenerator, + hassio_stubs, hass: HomeAssistant, aiohttp_client: ClientSessionGenerator ) -> TestClient: """Return a Hass.io HTTP client without auth.""" return hass.loop.run_until_complete(aiohttp_client(hass.http.app)) @@ -101,9 +102,7 @@ def hassio_noauth_client( @pytest.fixture async def hassio_client_supervisor( - hass: HomeAssistant, - aiohttp_client: ClientSessionGenerator, - hassio_stubs: RefreshToken, + hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, hassio_stubs ) -> TestClient: """Return an authenticated HTTP client.""" access_token = hass.auth.async_create_access_token(hassio_stubs) @@ -114,9 +113,7 @@ async def hassio_client_supervisor( @pytest.fixture -def hassio_handler( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> Generator[HassIO]: +async def hassio_handler(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker): """Create mock hassio handler.""" with patch.dict(os.environ, {"SUPERVISOR_TOKEN": SUPERVISOR_TOKEN}): yield HassIO(hass.loop, async_get_clientsession(hass), "127.0.0.1") @@ -124,12 +121,7 @@ def hassio_handler( @pytest.fixture def all_setup_requests( - aioclient_mock: AiohttpClientMocker, - request: pytest.FixtureRequest, - addon_installed: AsyncMock, - store_info: AsyncMock, - addon_changelog: AsyncMock, - addon_stats: AsyncMock, + aioclient_mock: AiohttpClientMocker, request: pytest.FixtureRequest ) -> None: """Mock all setup requests.""" include_addons = hasattr(request, "param") and request.param.get( @@ -137,6 +129,7 @@ def all_setup_requests( ) aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) + aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -149,6 +142,13 @@ def all_setup_requests( }, }, ) + aioclient_mock.get( + "http://127.0.0.1/store", + json={ + "result": "ok", + "data": {"addons": [], "repositories": []}, + }, + ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -217,32 +217,46 @@ def all_setup_requests( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - addon_installed.return_value.update_available = False - addon_installed.return_value.version = "1.0.0" - addon_installed.return_value.version_latest = "1.0.0" - addon_installed.return_value.repository = "core" - addon_installed.return_value.state = AddonState.STARTED - addon_installed.return_value.icon = False - - def mock_addon_info(slug: str): - if slug == "test": - addon_installed.return_value.name = "test" - addon_installed.return_value.slug = "test" - addon_installed.return_value.url = ( - "https://github.com/home-assistant/addons/test" - ) - addon_installed.return_value.auto_update = True - else: - addon_installed.return_value.name = "test2" - addon_installed.return_value.slug = "test2" - addon_installed.return_value.url = "https://github.com" - addon_installed.return_value.auto_update = False - - return addon_installed.return_value - - addon_installed.side_effect = mock_addon_info - + aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test/info", + json={ + "result": "ok", + "data": { + "name": "test", + "slug": "test", + "update_available": False, + "version": "1.0.0", + "version_latest": "1.0.0", + "repository": "core", + "state": "started", + "icon": False, + "url": "https://github.com/home-assistant/addons/test", + "auto_update": True, + }, + }, + ) + aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test2/info", + json={ + "result": "ok", + "data": { + "name": "test2", + "slug": "test2", + "update_available": False, + "version": "1.0.0", + "version_latest": "1.0.0", + "repository": "core", + "state": "started", + "icon": False, + "url": "https://github.com", + "auto_update": False, + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -275,32 +289,38 @@ def all_setup_requests( }, }, ) - - async def mock_addon_stats(addon: str) -> AddonsStats: - """Mock addon stats for test and test2.""" - if addon == "test2": - return AddonsStats( - cpu_percent=0.8, - memory_usage=51941376, - memory_limit=3977146368, - memory_percent=1.31, - network_rx=31338284, - network_tx=15692900, - blk_read=740077568, - blk_write=6004736, - ) - return AddonsStats( - cpu_percent=0.99, - memory_usage=182611968, - memory_limit=3977146368, - memory_percent=4.59, - network_rx=362570232, - network_tx=82374138, - blk_read=46010945536, - blk_write=15051526144, - ) - - addon_stats.side_effect = mock_addon_stats + aioclient_mock.get( + "http://127.0.0.1/addons/test/stats", + json={ + "result": "ok", + "data": { + "cpu_percent": 0.99, + "memory_usage": 182611968, + "memory_limit": 3977146368, + "memory_percent": 4.59, + "network_rx": 362570232, + "network_tx": 82374138, + "blk_read": 46010945536, + "blk_write": 15051526144, + }, + }, + ) + aioclient_mock.get( + "http://127.0.0.1/addons/test2/stats", + json={ + "result": "ok", + "data": { + "cpu_percent": 0.8, + "memory_usage": 51941376, + "memory_limit": 3977146368, + "memory_percent": 1.31, + "network_rx": 31338284, + "network_tx": 15692900, + "blk_read": 740077568, + "blk_write": 6004736, + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_addon_manager.py b/tests/components/hassio/test_addon_manager.py index 3d4644fbfd9..55c663d66cc 100644 --- a/tests/components/hassio/test_addon_manager.py +++ b/tests/components/hassio/test_addon_manager.py @@ -3,13 +3,12 @@ from __future__ import annotations import asyncio +import logging from typing import Any -from unittest.mock import AsyncMock, call -from uuid import uuid4 +from unittest.mock import AsyncMock, call, patch -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import AddonsOptions, Discovery import pytest +from typing_extensions import Generator from homeassistant.components.hassio.addon_manager import ( AddonError, @@ -20,6 +19,154 @@ from homeassistant.components.hassio.addon_manager import ( from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.core import HomeAssistant +LOGGER = logging.getLogger(__name__) + + +@pytest.fixture(name="addon_manager") +def addon_manager_fixture(hass: HomeAssistant) -> AddonManager: + """Return an AddonManager instance.""" + return AddonManager(hass, LOGGER, "Test", "test_addon") + + +@pytest.fixture(name="addon_not_installed") +def addon_not_installed_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on not installed.""" + addon_store_info.return_value["available"] = True + return addon_info + + +@pytest.fixture(name="addon_installed") +def mock_addon_installed( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on already installed but not running.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["hostname"] = "core-test-addon" + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="get_addon_discovery_info") +def get_addon_discovery_info_fixture() -> Generator[AsyncMock]: + """Mock get add-on discovery info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info" + ) as get_addon_discovery_info: + yield get_addon_discovery_info + + +@pytest.fixture(name="addon_store_info") +def addon_store_info_fixture() -> Generator[AsyncMock]: + """Mock Supervisor add-on store info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" + ) as addon_store_info: + addon_store_info.return_value = { + "available": False, + "installed": None, + "state": None, + "version": "1.0.0", + } + yield addon_store_info + + +@pytest.fixture(name="addon_info") +def addon_info_fixture() -> Generator[AsyncMock]: + """Mock Supervisor add-on info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_info", + ) as addon_info: + addon_info.return_value = { + "available": False, + "hostname": None, + "options": {}, + "state": None, + "update_available": False, + "version": None, + } + yield addon_info + + +@pytest.fixture(name="set_addon_options") +def set_addon_options_fixture() -> Generator[AsyncMock]: + """Mock set add-on options.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_set_addon_options" + ) as set_options: + yield set_options + + +@pytest.fixture(name="install_addon") +def install_addon_fixture() -> Generator[AsyncMock]: + """Mock install add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_install_addon" + ) as install_addon: + yield install_addon + + +@pytest.fixture(name="uninstall_addon") +def uninstall_addon_fixture() -> Generator[AsyncMock]: + """Mock uninstall add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_uninstall_addon" + ) as uninstall_addon: + yield uninstall_addon + + +@pytest.fixture(name="start_addon") +def start_addon_fixture() -> Generator[AsyncMock]: + """Mock start add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_start_addon" + ) as start_addon: + yield start_addon + + +@pytest.fixture(name="restart_addon") +def restart_addon_fixture() -> Generator[AsyncMock]: + """Mock restart add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_restart_addon" + ) as restart_addon: + yield restart_addon + + +@pytest.fixture(name="stop_addon") +def stop_addon_fixture() -> Generator[AsyncMock]: + """Mock stop add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_stop_addon" + ) as stop_addon: + yield stop_addon + + +@pytest.fixture(name="create_backup") +def create_backup_fixture() -> Generator[AsyncMock]: + """Mock create backup.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_create_backup" + ) as create_backup: + yield create_backup + + +@pytest.fixture(name="update_addon") +def mock_update_addon() -> Generator[AsyncMock]: + """Mock update add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_update_addon" + ) as update_addon: + yield update_addon + async def test_not_installed_raises_exception( addon_manager: AddonManager, @@ -45,8 +192,8 @@ async def test_not_available_raises_exception( addon_info: AsyncMock, ) -> None: """Test addon not available raises exception.""" - addon_store_info.return_value.available = False - addon_info.return_value.available = False + addon_store_info.return_value["available"] = False + addon_info.return_value["available"] = False with pytest.raises(AddonError) as err: await addon_manager.async_install_addon() @@ -63,11 +210,7 @@ async def test_get_addon_discovery_info( addon_manager: AddonManager, get_addon_discovery_info: AsyncMock ) -> None: """Test get addon discovery info.""" - get_addon_discovery_info.return_value = [ - Discovery( - addon="test_addon", service="", uuid=uuid4(), config={"test_key": "test"} - ) - ] + get_addon_discovery_info.return_value = {"config": {"test_key": "test"}} assert await addon_manager.async_get_addon_discovery_info() == {"test_key": "test"} @@ -78,6 +221,8 @@ async def test_missing_addon_discovery_info( addon_manager: AddonManager, get_addon_discovery_info: AsyncMock ) -> None: """Test missing addon discovery info.""" + get_addon_discovery_info.return_value = None + with pytest.raises(AddonError): await addon_manager.async_get_addon_discovery_info() @@ -88,7 +233,7 @@ async def test_get_addon_discovery_info_error( addon_manager: AddonManager, get_addon_discovery_info: AsyncMock ) -> None: """Test get addon discovery info raises error.""" - get_addon_discovery_info.side_effect = SupervisorError("Boom") + get_addon_discovery_info.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: assert await addon_manager.async_get_addon_discovery_info() @@ -123,7 +268,7 @@ async def test_get_addon_info( addon_state: AddonState, ) -> None: """Test get addon info when addon is installed.""" - addon_installed.return_value.state = addon_info_state + addon_installed.return_value["state"] = addon_info_state assert await addon_manager.async_get_addon_info() == AddonInfo( available=True, hostname="core-test-addon", @@ -141,7 +286,7 @@ async def test_get_addon_info( "addon_store_info_error", "addon_store_info_calls", ), - [(SupervisorError("Boom"), 1, None, 1), (None, 0, SupervisorError("Boom"), 1)], + [(HassioAPIError("Boom"), 1, None, 1), (None, 0, HassioAPIError("Boom"), 1)], ) async def test_get_addon_info_error( addon_manager: AddonManager, @@ -174,7 +319,7 @@ async def test_set_addon_options( assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - "test_addon", AddonsOptions(config={"test_key": "test"}) + hass, "test_addon", {"options": {"test_key": "test"}} ) @@ -182,7 +327,7 @@ async def test_set_addon_options_error( hass: HomeAssistant, addon_manager: AddonManager, set_addon_options: AsyncMock ) -> None: """Test set addon options raises error.""" - set_addon_options.side_effect = SupervisorError("Boom") + set_addon_options.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_set_addon_options({"test_key": "test"}) @@ -191,7 +336,7 @@ async def test_set_addon_options_error( assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - "test_addon", AddonsOptions(config={"test_key": "test"}) + hass, "test_addon", {"options": {"test_key": "test"}} ) @@ -202,8 +347,8 @@ async def test_install_addon( addon_info: AsyncMock, ) -> None: """Test install addon.""" - addon_store_info.return_value.available = True - addon_info.return_value.available = True + addon_store_info.return_value["available"] = True + addon_info.return_value["available"] = True await addon_manager.async_install_addon() @@ -217,9 +362,9 @@ async def test_install_addon_error( addon_info: AsyncMock, ) -> None: """Test install addon raises error.""" - addon_store_info.return_value.available = True - addon_info.return_value.available = True - install_addon.side_effect = SupervisorError("Boom") + addon_store_info.return_value["available"] = True + addon_info.return_value["available"] = True + install_addon.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_install_addon() @@ -270,7 +415,7 @@ async def test_schedule_install_addon_error( install_addon: AsyncMock, ) -> None: """Test schedule install addon raises error.""" - install_addon.side_effect = SupervisorError("Boom") + install_addon.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_schedule_install_addon() @@ -287,7 +432,7 @@ async def test_schedule_install_addon_logs_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test schedule install addon logs error.""" - install_addon.side_effect = SupervisorError("Boom") + install_addon.side_effect = HassioAPIError("Boom") await addon_manager.async_schedule_install_addon(catch_error=True) @@ -308,7 +453,7 @@ async def test_uninstall_addon_error( addon_manager: AddonManager, uninstall_addon: AsyncMock ) -> None: """Test uninstall addon raises error.""" - uninstall_addon.side_effect = SupervisorError("Boom") + uninstall_addon.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_uninstall_addon() @@ -329,7 +474,7 @@ async def test_start_addon_error( addon_manager: AddonManager, start_addon: AsyncMock ) -> None: """Test start addon raises error.""" - start_addon.side_effect = SupervisorError("Boom") + start_addon.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_start_addon() @@ -371,7 +516,7 @@ async def test_schedule_start_addon_error( start_addon: AsyncMock, ) -> None: """Test schedule start addon raises error.""" - start_addon.side_effect = SupervisorError("Boom") + start_addon.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_schedule_start_addon() @@ -388,7 +533,7 @@ async def test_schedule_start_addon_logs_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test schedule start addon logs error.""" - start_addon.side_effect = SupervisorError("Boom") + start_addon.side_effect = HassioAPIError("Boom") await addon_manager.async_schedule_start_addon(catch_error=True) @@ -409,7 +554,7 @@ async def test_restart_addon_error( addon_manager: AddonManager, restart_addon: AsyncMock ) -> None: """Test restart addon raises error.""" - restart_addon.side_effect = SupervisorError("Boom") + restart_addon.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_restart_addon() @@ -451,7 +596,7 @@ async def test_schedule_restart_addon_error( restart_addon: AsyncMock, ) -> None: """Test schedule restart addon raises error.""" - restart_addon.side_effect = SupervisorError("Boom") + restart_addon.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_schedule_restart_addon() @@ -468,7 +613,7 @@ async def test_schedule_restart_addon_logs_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test schedule restart addon logs error.""" - restart_addon.side_effect = SupervisorError("Boom") + restart_addon.side_effect = HassioAPIError("Boom") await addon_manager.async_schedule_restart_addon(catch_error=True) @@ -487,7 +632,7 @@ async def test_stop_addon_error( addon_manager: AddonManager, stop_addon: AsyncMock ) -> None: """Test stop addon raises error.""" - stop_addon.side_effect = SupervisorError("Boom") + stop_addon.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_stop_addon() @@ -506,7 +651,7 @@ async def test_update_addon( update_addon: AsyncMock, ) -> None: """Test update addon.""" - addon_info.return_value.update_available = True + addon_info.return_value["update_available"] = True await addon_manager.async_update_addon() @@ -526,7 +671,7 @@ async def test_update_addon_no_update( update_addon: AsyncMock, ) -> None: """Test update addon without update available.""" - addon_info.return_value.update_available = False + addon_info.return_value["update_available"] = False await addon_manager.async_update_addon() @@ -544,8 +689,8 @@ async def test_update_addon_error( update_addon: AsyncMock, ) -> None: """Test update addon raises error.""" - addon_info.return_value.update_available = True - update_addon.side_effect = SupervisorError("Boom") + addon_info.return_value["update_available"] = True + update_addon.side_effect = HassioAPIError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_update_addon() @@ -569,7 +714,7 @@ async def test_schedule_update_addon( update_addon: AsyncMock, ) -> None: """Test schedule update addon.""" - addon_info.return_value.update_available = True + addon_info.return_value["update_available"] = True update_task = addon_manager.async_schedule_update_addon() @@ -624,7 +769,7 @@ async def test_schedule_update_addon( ( None, 1, - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, "Failed to update the Test add-on: Boom", ), @@ -642,7 +787,7 @@ async def test_schedule_update_addon_error( error_message: str, ) -> None: """Test schedule update addon raises error.""" - addon_installed.return_value.update_available = True + addon_installed.return_value["update_available"] = True create_backup.side_effect = create_backup_error update_addon.side_effect = update_addon_error @@ -674,7 +819,7 @@ async def test_schedule_update_addon_error( ( None, 1, - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, "Failed to update the Test add-on: Boom", ), @@ -693,7 +838,7 @@ async def test_schedule_update_addon_logs_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test schedule update addon logs error.""" - addon_installed.return_value.update_available = True + addon_installed.return_value["update_available"] = True create_backup.side_effect = create_backup_error update_addon.side_effect = update_addon_error @@ -743,10 +888,9 @@ async def test_create_backup_error( ) -@pytest.mark.usefixtures("addon_installed") -@pytest.mark.parametrize("set_addon_options_side_effect", [None]) async def test_schedule_install_setup_addon( addon_manager: AddonManager, + addon_installed: AsyncMock, install_addon: AsyncMock, set_addon_options: AsyncMock, start_addon: AsyncMock, @@ -794,7 +938,7 @@ async def test_schedule_install_setup_addon( ), [ ( - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, None, 0, @@ -805,7 +949,7 @@ async def test_schedule_install_setup_addon( ( None, 1, - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, None, 0, @@ -816,7 +960,7 @@ async def test_schedule_install_setup_addon( 1, None, 1, - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, "Failed to start the Test add-on: Boom", ), @@ -863,7 +1007,7 @@ async def test_schedule_install_setup_addon_error( ), [ ( - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, None, 0, @@ -874,7 +1018,7 @@ async def test_schedule_install_setup_addon_error( ( None, 1, - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, None, 0, @@ -885,7 +1029,7 @@ async def test_schedule_install_setup_addon_error( 1, None, 1, - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, "Failed to start the Test add-on: Boom", ), @@ -921,10 +1065,11 @@ async def test_schedule_install_setup_addon_logs_error( assert start_addon.call_count == start_addon_calls -@pytest.mark.usefixtures("addon_installed") -@pytest.mark.parametrize("set_addon_options_side_effect", [None]) async def test_schedule_setup_addon( - addon_manager: AddonManager, set_addon_options: AsyncMock, start_addon: AsyncMock + addon_manager: AddonManager, + addon_installed: AsyncMock, + set_addon_options: AsyncMock, + start_addon: AsyncMock, ) -> None: """Test schedule setup addon.""" start_task = addon_manager.async_schedule_setup_addon({"test_key": "test"}) @@ -960,7 +1105,7 @@ async def test_schedule_setup_addon( ), [ ( - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, None, 0, @@ -969,7 +1114,7 @@ async def test_schedule_setup_addon( ( None, 1, - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, "Failed to start the Test add-on: Boom", ), @@ -1009,7 +1154,7 @@ async def test_schedule_setup_addon_error( ), [ ( - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, None, 0, @@ -1018,7 +1163,7 @@ async def test_schedule_setup_addon_error( ( None, 1, - SupervisorError("Boom"), + HassioAPIError("Boom"), 1, "Failed to start the Test add-on: Boom", ), diff --git a/tests/components/hassio/test_addon_panel.py b/tests/components/hassio/test_addon_panel.py index 2c3552c8d08..8436b3393b9 100644 --- a/tests/components/hassio/test_addon_panel.py +++ b/tests/components/hassio/test_addon_panel.py @@ -1,7 +1,7 @@ """Test add-on panel.""" from http import HTTPStatus -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest @@ -13,11 +13,10 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def mock_all( - aioclient_mock: AiohttpClientMocker, supervisor_is_connected: AsyncMock -) -> None: +def mock_all(aioclient_mock: AiohttpClientMocker) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) + aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/homeassistant/info", @@ -25,9 +24,8 @@ def mock_all( ) -@pytest.mark.usefixtures("hassio_env") async def test_hassio_addon_panel_startup( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_env ) -> None: """Test startup and panel setup after event.""" aioclient_mock.get( @@ -70,10 +68,10 @@ async def test_hassio_addon_panel_startup( ) -@pytest.mark.usefixtures("hassio_env") async def test_hassio_addon_panel_api( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, + hassio_env, hass_client: ClientSessionGenerator, ) -> None: """Test panel api after event.""" diff --git a/tests/components/hassio/test_auth.py b/tests/components/hassio/test_auth.py index ad96b58e99d..175d9061d56 100644 --- a/tests/components/hassio/test_auth.py +++ b/tests/components/hassio/test_auth.py @@ -3,12 +3,11 @@ from http import HTTPStatus from unittest.mock import Mock, patch -from aiohttp.test_utils import TestClient - from homeassistant.auth.providers.homeassistant import InvalidAuth +from homeassistant.core import HomeAssistant -async def test_auth_success(hassio_client_supervisor: TestClient) -> None: +async def test_auth_success(hass: HomeAssistant, hassio_client_supervisor) -> None: """Test no auth needed for .""" with patch( "homeassistant.auth.providers.homeassistant." @@ -24,7 +23,7 @@ async def test_auth_success(hassio_client_supervisor: TestClient) -> None: mock_login.assert_called_with("test", "123456") -async def test_auth_fails_no_supervisor(hassio_client: TestClient) -> None: +async def test_auth_fails_no_supervisor(hass: HomeAssistant, hassio_client) -> None: """Test if only supervisor can access.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -40,7 +39,7 @@ async def test_auth_fails_no_supervisor(hassio_client: TestClient) -> None: assert not mock_login.called -async def test_auth_fails_no_auth(hassio_noauth_client: TestClient) -> None: +async def test_auth_fails_no_auth(hass: HomeAssistant, hassio_noauth_client) -> None: """Test if only supervisor can access.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -56,7 +55,7 @@ async def test_auth_fails_no_auth(hassio_noauth_client: TestClient) -> None: assert not mock_login.called -async def test_login_error(hassio_client_supervisor: TestClient) -> None: +async def test_login_error(hass: HomeAssistant, hassio_client_supervisor) -> None: """Test no auth needed for error.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -73,7 +72,7 @@ async def test_login_error(hassio_client_supervisor: TestClient) -> None: mock_login.assert_called_with("test", "123456") -async def test_login_no_data(hassio_client_supervisor: TestClient) -> None: +async def test_login_no_data(hass: HomeAssistant, hassio_client_supervisor) -> None: """Test auth with no data -> error.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -87,7 +86,7 @@ async def test_login_no_data(hassio_client_supervisor: TestClient) -> None: assert not mock_login.called -async def test_login_no_username(hassio_client_supervisor: TestClient) -> None: +async def test_login_no_username(hass: HomeAssistant, hassio_client_supervisor) -> None: """Test auth with no username in data -> error.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -103,7 +102,9 @@ async def test_login_no_username(hassio_client_supervisor: TestClient) -> None: assert not mock_login.called -async def test_login_success_extra(hassio_client_supervisor: TestClient) -> None: +async def test_login_success_extra( + hass: HomeAssistant, hassio_client_supervisor +) -> None: """Test auth with extra data.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -124,7 +125,7 @@ async def test_login_success_extra(hassio_client_supervisor: TestClient) -> None mock_login.assert_called_with("test", "123456") -async def test_password_success(hassio_client_supervisor: TestClient) -> None: +async def test_password_success(hass: HomeAssistant, hassio_client_supervisor) -> None: """Test no auth needed for .""" with patch( "homeassistant.auth.providers.homeassistant." @@ -140,7 +141,7 @@ async def test_password_success(hassio_client_supervisor: TestClient) -> None: mock_change.assert_called_with("test", "123456") -async def test_password_fails_no_supervisor(hassio_client: TestClient) -> None: +async def test_password_fails_no_supervisor(hass: HomeAssistant, hassio_client) -> None: """Test if only supervisor can access.""" resp = await hassio_client.post( "/api/hassio_auth/password_reset", @@ -151,7 +152,9 @@ async def test_password_fails_no_supervisor(hassio_client: TestClient) -> None: assert resp.status == HTTPStatus.UNAUTHORIZED -async def test_password_fails_no_auth(hassio_noauth_client: TestClient) -> None: +async def test_password_fails_no_auth( + hass: HomeAssistant, hassio_noauth_client +) -> None: """Test if only supervisor can access.""" resp = await hassio_noauth_client.post( "/api/hassio_auth/password_reset", @@ -162,7 +165,7 @@ async def test_password_fails_no_auth(hassio_noauth_client: TestClient) -> None: assert resp.status == HTTPStatus.UNAUTHORIZED -async def test_password_no_user(hassio_client_supervisor: TestClient) -> None: +async def test_password_no_user(hass: HomeAssistant, hassio_client_supervisor) -> None: """Test changing password for invalid user.""" resp = await hassio_client_supervisor.post( "/api/hassio_auth/password_reset", diff --git a/tests/components/hassio/test_binary_sensor.py b/tests/components/hassio/test_binary_sensor.py index 9878dd67a21..af72ea9d702 100644 --- a/tests/components/hassio/test_binary_sensor.py +++ b/tests/components/hassio/test_binary_sensor.py @@ -1,7 +1,7 @@ """The tests for the hassio binary sensors.""" import os -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest @@ -10,8 +10,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from .common import MOCK_REPOSITORIES, MOCK_STORE_ADDONS - from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -19,16 +17,10 @@ MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) -def mock_all( - aioclient_mock: AiohttpClientMocker, - addon_installed: AsyncMock, - store_info: AsyncMock, - addon_changelog: AsyncMock, - addon_stats: AsyncMock, - resolution_info: AsyncMock, -) -> None: +def mock_all(aioclient_mock: AiohttpClientMocker) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) + aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -41,6 +33,13 @@ def mock_all( }, }, ) + aioclient_mock.get( + "http://127.0.0.1/store", + json={ + "result": "ok", + "data": {"addons": [], "repositories": []}, + }, + ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -106,6 +105,22 @@ def mock_all( }, }, ) + aioclient_mock.get( + "http://127.0.0.1/addons/test/stats", + json={ + "result": "ok", + "data": { + "cpu_percent": 0.99, + "memory_usage": 182611968, + "memory_limit": 3977146368, + "memory_percent": 4.59, + "network_rx": 362570232, + "network_tx": 82374138, + "blk_read": 46010945536, + "blk_write": 15051526144, + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -138,9 +153,33 @@ def mock_all( }, }, ) + aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test/info", + json={"result": "ok", "data": {"auto_update": True}}, + ) + aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test2/info", + json={"result": "ok", "data": {"auto_update": False}}, + ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -154,26 +193,20 @@ def mock_all( @pytest.mark.parametrize( - ("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)] -) -@pytest.mark.parametrize( - ("entity_id", "expected", "addon_state"), + ("entity_id", "expected"), [ - ("binary_sensor.test_running", "on", "started"), - ("binary_sensor.test2_running", "off", "stopped"), + ("binary_sensor.test_running", "on"), + ("binary_sensor.test2_running", "off"), ], ) async def test_binary_sensor( hass: HomeAssistant, - entity_id: str, - expected: str, - addon_state: str, + entity_id, + expected, aioclient_mock: AiohttpClientMocker, entity_registry: er.EntityRegistry, - addon_installed: AsyncMock, ) -> None: """Test hassio OS and addons binary sensor.""" - addon_installed.return_value.state = addon_state config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) diff --git a/tests/components/hassio/test_config_flow.py b/tests/components/hassio/test_config_flow.py index 48c1a06f81e..1153203817d 100644 --- a/tests/components/hassio/test_config_flow.py +++ b/tests/components/hassio/test_config_flow.py @@ -38,4 +38,4 @@ async def test_multiple_entries(hass: HomeAssistant) -> None: DOMAIN, context={"source": "system"} ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + assert result["reason"] == "already_configured" diff --git a/tests/components/hassio/test_diagnostics.py b/tests/components/hassio/test_diagnostics.py index c95cde67b8a..0d648ba9bdb 100644 --- a/tests/components/hassio/test_diagnostics.py +++ b/tests/components/hassio/test_diagnostics.py @@ -1,7 +1,7 @@ """Test Supervisor diagnostics.""" import os -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest @@ -18,16 +18,10 @@ MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) -def mock_all( - aioclient_mock: AiohttpClientMocker, - addon_installed: AsyncMock, - store_info: AsyncMock, - addon_stats: AsyncMock, - addon_changelog: AsyncMock, - resolution_info: AsyncMock, -) -> None: +def mock_all(aioclient_mock: AiohttpClientMocker) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) + aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -40,6 +34,13 @@ def mock_all( }, }, ) + aioclient_mock.get( + "http://127.0.0.1/store", + json={ + "result": "ok", + "data": {"addons": [], "repositories": []}, + }, + ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -109,6 +110,22 @@ def mock_all( }, }, ) + aioclient_mock.get( + "http://127.0.0.1/addons/test/stats", + json={ + "result": "ok", + "data": { + "cpu_percent": 0.99, + "memory_usage": 182611968, + "memory_limit": 3977146368, + "memory_percent": 4.59, + "network_rx": 362570232, + "network_tx": 82374138, + "blk_read": 46010945536, + "blk_write": 15051526144, + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -141,9 +158,33 @@ def mock_all( }, }, ) + aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test/info", + json={"result": "ok", "data": {"auto_update": True}}, + ) + aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test2/info", + json={"result": "ok", "data": {"auto_update": False}}, + ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_discovery.py b/tests/components/hassio/test_discovery.py index ba6338f84e2..0783ee77932 100644 --- a/tests/components/hassio/test_discovery.py +++ b/tests/components/hassio/test_discovery.py @@ -1,37 +1,24 @@ """Test config flow.""" -from collections.abc import Generator from http import HTTPStatus from unittest.mock import AsyncMock, Mock, patch -from uuid import uuid4 -from aiohasupervisor.models import Discovery -from aiohttp.test_utils import TestClient import pytest from homeassistant import config_entries +from homeassistant.components.hassio.discovery import HassioServiceInfo from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STARTED from homeassistant.core import HomeAssistant -from homeassistant.helpers.discovery_flow import DiscoveryKey -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - MockModule, - mock_config_flow, - mock_integration, - mock_platform, -) +from tests.common import MockModule, mock_config_flow, mock_integration, mock_platform from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(name="mock_mqtt") -def mock_mqtt_fixture( - hass: HomeAssistant, -) -> Generator[type[config_entries.ConfigFlow]]: +async def mock_mqtt_fixture(hass): """Mock the MQTT integration's config flow.""" mock_integration(hass, MockModule(MQTT_DOMAIN)) mock_platform(hass, f"{MQTT_DOMAIN}.config_flow", None) @@ -47,37 +34,44 @@ def mock_mqtt_fixture( yield MqttFlow -@pytest.mark.usefixtures("hassio_client") async def test_hassio_discovery_startup( - hass: HomeAssistant, - mock_mqtt: type[config_entries.ConfigFlow], - addon_installed: AsyncMock, - get_addon_discovery_info: AsyncMock, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_client, mock_mqtt ) -> None: """Test startup and discovery after event.""" - get_addon_discovery_info.return_value = [ - Discovery( - addon="mosquitto", - service="mqtt", - uuid=(uuid := uuid4()), - config={ - "broker": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", + aioclient_mock.get( + "http://127.0.0.1/discovery", + json={ + "result": "ok", + "data": { + "discovery": [ + { + "service": "mqtt", + "uuid": "test", + "addon": "mosquitto", + "config": { + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", + }, + } + ] }, - ) - ] - addon_installed.return_value.name = "Mosquitto Test" + }, + ) + aioclient_mock.get( + "http://127.0.0.1/addons/mosquitto/info", + json={"result": "ok", "data": {"name": "Mosquitto Test"}}, + ) - assert get_addon_discovery_info.call_count == 0 + assert aioclient_mock.call_count == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_START) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() - assert get_addon_discovery_info.call_count == 1 + assert aioclient_mock.call_count == 2 assert mock_mqtt.async_step_hassio.called mock_mqtt.async_step_hassio.assert_called_with( HassioServiceInfo( @@ -91,39 +85,45 @@ async def test_hassio_discovery_startup( }, name="Mosquitto Test", slug="mosquitto", - uuid=uuid.hex, + uuid="test", ) ) -@pytest.mark.usefixtures("hassio_client") async def test_hassio_discovery_startup_done( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - mock_mqtt: type[config_entries.ConfigFlow], - addon_installed: AsyncMock, - get_addon_discovery_info: AsyncMock, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_client, mock_mqtt ) -> None: """Test startup and discovery with hass discovery.""" aioclient_mock.post( "http://127.0.0.1/supervisor/options", json={"result": "ok", "data": {}}, ) - get_addon_discovery_info.return_value = [ - Discovery( - addon="mosquitto", - service="mqtt", - uuid=(uuid := uuid4()), - config={ - "broker": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", + aioclient_mock.get( + "http://127.0.0.1/discovery", + json={ + "result": "ok", + "data": { + "discovery": [ + { + "service": "mqtt", + "uuid": "test", + "addon": "mosquitto", + "config": { + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", + }, + } + ] }, - ) - ] - addon_installed.return_value.name = "Mosquitto Test" + }, + ) + aioclient_mock.get( + "http://127.0.0.1/addons/mosquitto/info", + json={"result": "ok", "data": {"name": "Mosquitto Test"}}, + ) with ( patch( @@ -139,7 +139,7 @@ async def test_hassio_discovery_startup_done( await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() - assert get_addon_discovery_info.call_count == 1 + assert aioclient_mock.call_count == 2 assert mock_mqtt.async_step_hassio.called mock_mqtt.async_step_hassio.assert_called_with( HassioServiceInfo( @@ -153,43 +153,48 @@ async def test_hassio_discovery_startup_done( }, name="Mosquitto Test", slug="mosquitto", - uuid=uuid.hex, + uuid="test", ) ) async def test_hassio_discovery_webhook( - hass: HomeAssistant, - hassio_client: TestClient, - mock_mqtt: type[config_entries.ConfigFlow], - addon_installed: AsyncMock, - get_discovery_message: AsyncMock, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_client, mock_mqtt ) -> None: """Test discovery webhook.""" - get_discovery_message.return_value = Discovery( - addon="mosquitto", - service="mqtt", - uuid=(uuid := uuid4()), - config={ - "broker": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", + aioclient_mock.get( + "http://127.0.0.1/discovery/testuuid", + json={ + "result": "ok", + "data": { + "service": "mqtt", + "uuid": "test", + "addon": "mosquitto", + "config": { + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", + }, + }, }, ) - addon_installed.return_value.name = "Mosquitto Test" + aioclient_mock.get( + "http://127.0.0.1/addons/mosquitto/info", + json={"result": "ok", "data": {"name": "Mosquitto Test"}}, + ) resp = await hassio_client.post( - f"/api/hassio_push/discovery/{uuid!s}", - json={"addon": "mosquitto", "service": "mqtt", "uuid": str(uuid)}, + "/api/hassio_push/discovery/testuuid", + json={"addon": "mosquitto", "service": "mqtt", "uuid": "testuuid"}, ) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert resp.status == HTTPStatus.OK - assert get_discovery_message.call_count == 1 + assert aioclient_mock.call_count == 2 assert mock_mqtt.async_step_hassio.called mock_mqtt.async_step_hassio.assert_called_with( HassioServiceInfo( @@ -203,153 +208,6 @@ async def test_hassio_discovery_webhook( }, name="Mosquitto Test", slug="mosquitto", - uuid=uuid.hex, + uuid="test", ) ) - - -TEST_UUID = str(uuid4()) - - -@pytest.mark.parametrize( - ( - "entry_domain", - "entry_discovery_keys", - ), - [ - # Matching discovery key - ( - "mock-domain", - {"hassio": (DiscoveryKey(domain="hassio", key=TEST_UUID, version=1),)}, - ), - # Matching discovery key - ( - "mock-domain", - { - "hassio": (DiscoveryKey(domain="hassio", key=TEST_UUID, version=1),), - "other": (DiscoveryKey(domain="other", key="blah", version=1),), - }, - ), - # Matching discovery key, other domain - # Note: Rediscovery is not currently restricted to the domain of the removed - # entry. Such a check can be added if needed. - ( - "comp", - {"hassio": (DiscoveryKey(domain="hassio", key=TEST_UUID, version=1),)}, - ), - ], -) -@pytest.mark.parametrize( - "entry_source", - [ - config_entries.SOURCE_HASSIO, - config_entries.SOURCE_IGNORE, - config_entries.SOURCE_USER, - ], -) -async def test_hassio_rediscover( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - hassio_client: TestClient, - addon_installed: AsyncMock, - entry_domain: str, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], - entry_source: str, - get_addon_discovery_info: AsyncMock, - get_discovery_message: AsyncMock, -) -> None: - """Test we reinitiate flows when an ignored config entry is removed.""" - - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await hass.async_block_till_done() - - entry = MockConfigEntry( - domain=entry_domain, - discovery_keys=entry_discovery_keys, - unique_id="mock-unique-id", - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - get_discovery_message.return_value = Discovery( - addon="mosquitto", - service="mqtt", - uuid=(uuid := uuid4()), - config={ - "broker": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", - }, - ) - - expected_context = { - "discovery_key": DiscoveryKey(domain="hassio", key=uuid.hex, version=1), - "source": config_entries.SOURCE_HASSIO, - } - - with patch.object(hass.config_entries.flow, "async_init") as mock_init: - await hass.config_entries.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert len(mock_init.mock_calls) == 1 - assert mock_init.mock_calls[0][1][0] == "mqtt" - assert mock_init.mock_calls[0][2]["context"] == expected_context - - -@pytest.mark.usefixtures("mock_async_zeroconf") -@pytest.mark.parametrize( - ( - "entry_domain", - "entry_discovery_keys", - "entry_source", - "entry_unique_id", - ), - [ - # Discovery key from other domain - ( - "mock-domain", - {"bluetooth": (DiscoveryKey(domain="bluetooth", key="test", version=1),)}, - config_entries.SOURCE_IGNORE, - "mock-unique-id", - ), - # Discovery key from the future - ( - "mock-domain", - {"hassio": (DiscoveryKey(domain="hassio", key="test", version=2),)}, - config_entries.SOURCE_IGNORE, - "mock-unique-id", - ), - ], -) -async def test_hassio_rediscover_no_match( - hass: HomeAssistant, - hassio_client: TestClient, - entry_domain: str, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], - entry_source: str, - entry_unique_id: str, -) -> None: - """Test we don't reinitiate flows when a non matching config entry is removed.""" - - mock_integration(hass, MockModule(entry_domain)) - - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await hass.async_block_till_done() - - entry = MockConfigEntry( - domain=entry_domain, - discovery_keys=entry_discovery_keys, - unique_id=entry_unique_id, - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - with patch.object(hass.config_entries.flow, "async_init") as mock_init: - await hass.config_entries.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert len(mock_init.mock_calls) == 0 diff --git a/tests/components/hassio/test_handler.py b/tests/components/hassio/test_handler.py index 56f0dcb706c..c418576a802 100644 --- a/tests/components/hassio/test_handler.py +++ b/tests/components/hassio/test_handler.py @@ -4,6 +4,7 @@ from __future__ import annotations from typing import Any, Literal +import aiohttp from aiohttp import hdrs, web import pytest @@ -15,6 +16,36 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from tests.test_util.aiohttp import AiohttpClientMocker +async def test_api_ping( + hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker +) -> None: + """Test setup with API ping.""" + aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) + + assert await hassio_handler.is_connected() + assert aioclient_mock.call_count == 1 + + +async def test_api_ping_error( + hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker +) -> None: + """Test setup with API ping error.""" + aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "error"}) + + assert not (await hassio_handler.is_connected()) + assert aioclient_mock.call_count == 1 + + +async def test_api_ping_exeption( + hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker +) -> None: + """Test setup with API ping exception.""" + aioclient_mock.get("http://127.0.0.1/supervisor/ping", exc=aiohttp.ClientError()) + + assert not (await hassio_handler.is_connected()) + assert aioclient_mock.call_count == 1 + + async def test_api_info( hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker ) -> None: @@ -150,6 +181,54 @@ async def test_api_core_info_error( assert aioclient_mock.call_count == 1 +async def test_api_homeassistant_stop( + hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker +) -> None: + """Test setup with API Home Assistant stop.""" + aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) + + assert await hassio_handler.stop_homeassistant() + assert aioclient_mock.call_count == 1 + + +async def test_api_homeassistant_restart( + hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker +) -> None: + """Test setup with API Home Assistant restart.""" + aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) + + assert await hassio_handler.restart_homeassistant() + assert aioclient_mock.call_count == 1 + + +async def test_api_addon_info( + hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker +) -> None: + """Test setup with API Add-on info.""" + aioclient_mock.get( + "http://127.0.0.1/addons/test/info", + json={"result": "ok", "data": {"name": "bla"}}, + ) + + data = await hassio_handler.get_addon_info("test") + assert data["name"] == "bla" + assert aioclient_mock.call_count == 1 + + +async def test_api_addon_stats( + hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker +) -> None: + """Test setup with API Add-on stats.""" + aioclient_mock.get( + "http://127.0.0.1/addons/test/stats", + json={"result": "ok", "data": {"memory_percent": 0.01}}, + ) + + data = await hassio_handler.get_addon_stats("test") + assert data["memory_percent"] == 0.01 + assert aioclient_mock.call_count == 1 + + async def test_api_core_stats( hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker ) -> None: @@ -178,6 +257,34 @@ async def test_api_supervisor_stats( assert aioclient_mock.call_count == 1 +async def test_api_discovery_message( + hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker +) -> None: + """Test setup with API discovery message.""" + aioclient_mock.get( + "http://127.0.0.1/discovery/test", + json={"result": "ok", "data": {"service": "mqtt"}}, + ) + + data = await hassio_handler.get_discovery_message("test") + assert data["service"] == "mqtt" + assert aioclient_mock.call_count == 1 + + +async def test_api_retrieve_discovery( + hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker +) -> None: + """Test setup with API discovery message.""" + aioclient_mock.get( + "http://127.0.0.1/discovery", + json={"result": "ok", "data": {"discovery": [{"service": "mqtt"}]}}, + ) + + data = await hassio_handler.retrieve_discovery_messages() + assert data["discovery"][-1]["service"] == "mqtt" + assert aioclient_mock.call_count == 1 + + async def test_api_ingress_panels( hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker ) -> None: @@ -208,7 +315,8 @@ async def test_api_ingress_panels( @pytest.mark.parametrize( ("api_call", "method", "payload"), [ - ("get_network_info", "GET", None), + ("retrieve_discovery_messages", "GET", None), + ("refresh_updates", "POST", None), ("update_diagnostics", "POST", True), ], ) @@ -257,9 +365,8 @@ async def test_api_headers( assert received_request.headers[hdrs.CONTENT_TYPE] == "application/octet-stream" -@pytest.mark.usefixtures("hassio_stubs") async def test_api_get_green_settings( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.get( @@ -282,9 +389,8 @@ async def test_api_get_green_settings( assert aioclient_mock.call_count == 1 -@pytest.mark.usefixtures("hassio_stubs") async def test_api_set_green_settings( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.post( @@ -301,9 +407,8 @@ async def test_api_set_green_settings( assert aioclient_mock.call_count == 1 -@pytest.mark.usefixtures("hassio_stubs") async def test_api_get_yellow_settings( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.get( @@ -322,9 +427,8 @@ async def test_api_get_yellow_settings( assert aioclient_mock.call_count == 1 -@pytest.mark.usefixtures("hassio_stubs") async def test_api_set_yellow_settings( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.post( @@ -341,9 +445,8 @@ async def test_api_set_yellow_settings( assert aioclient_mock.call_count == 1 -@pytest.mark.usefixtures("hassio_stubs") async def test_api_reboot_host( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.post( @@ -355,16 +458,8 @@ async def test_api_reboot_host( assert aioclient_mock.call_count == 1 -@pytest.mark.usefixtures("hassio_stubs") -async def test_send_command_invalid_command(hass: HomeAssistant) -> None: +async def test_send_command_invalid_command(hass: HomeAssistant, hassio_stubs) -> None: """Test send command fails when command is invalid.""" hassio: HassIO = hass.data["hassio"] with pytest.raises(HassioAPIError): - # absolute path await hassio.send_command("/test/../bad") - with pytest.raises(HassioAPIError): - # relative path - await hassio.send_command("test/../bad") - with pytest.raises(HassioAPIError): - # relative path with percent encoding - await hassio.send_command("test/%2E%2E/bad") diff --git a/tests/components/hassio/test_http.py b/tests/components/hassio/test_http.py index 8ed59bc78d1..a5ffb4f0d83 100644 --- a/tests/components/hassio/test_http.py +++ b/tests/components/hassio/test_http.py @@ -1,11 +1,9 @@ """The tests for the hassio component.""" -from collections.abc import Generator from http import HTTPStatus from unittest.mock import patch from aiohttp import StreamReader -from aiohttp.test_utils import TestClient import pytest from tests.common import MockUser @@ -13,7 +11,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture -def mock_not_onboarded() -> Generator[None]: +def mock_not_onboarded(): """Mock that we're not onboarded.""" with patch( "homeassistant.components.hassio.http.async_is_onboarded", return_value=False @@ -22,9 +20,7 @@ def mock_not_onboarded() -> Generator[None]: @pytest.fixture -def hassio_user_client( - hassio_client: TestClient, hass_admin_user: MockUser -) -> TestClient: +def hassio_user_client(hassio_client, hass_admin_user: MockUser): """Return a Hass.io HTTP client tied to a non-admin user.""" hass_admin_user.groups = [] return hassio_client @@ -39,7 +35,7 @@ def hassio_user_client( ], ) async def test_forward_request_onboarded_user_get( - hassio_user_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str + hassio_user_client, aioclient_mock: AiohttpClientMocker, path: str ) -> None: """Test fetching normal path.""" aioclient_mock.get(f"http://127.0.0.1/{path}", text="response") @@ -59,7 +55,7 @@ async def test_forward_request_onboarded_user_get( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_onboarded_user_unallowed_methods( - hassio_user_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str + hassio_user_client, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_user_client.post("/api/hassio/app/entrypoint.js") @@ -82,13 +78,11 @@ async def test_forward_request_onboarded_user_unallowed_methods( # Unauthenticated path ("supervisor/info", HTTPStatus.UNAUTHORIZED), ("supervisor/logs", HTTPStatus.UNAUTHORIZED), - ("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED), ("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED), - ("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED), ], ) async def test_forward_request_onboarded_user_unallowed_paths( - hassio_user_client: TestClient, + hassio_user_client, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -111,7 +105,7 @@ async def test_forward_request_onboarded_user_unallowed_paths( ], ) async def test_forward_request_onboarded_noauth_get( - hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str + hassio_noauth_client, aioclient_mock: AiohttpClientMocker, path: str ) -> None: """Test fetching normal path.""" aioclient_mock.get(f"http://127.0.0.1/{path}", text="response") @@ -131,7 +125,7 @@ async def test_forward_request_onboarded_noauth_get( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_onboarded_noauth_unallowed_methods( - hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str + hassio_noauth_client, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_noauth_client.post("/api/hassio/app/entrypoint.js") @@ -154,13 +148,11 @@ async def test_forward_request_onboarded_noauth_unallowed_methods( # Unauthenticated path ("supervisor/info", HTTPStatus.UNAUTHORIZED), ("supervisor/logs", HTTPStatus.UNAUTHORIZED), - ("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED), ("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED), - ("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED), ], ) async def test_forward_request_onboarded_noauth_unallowed_paths( - hassio_noauth_client: TestClient, + hassio_noauth_client, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -184,7 +176,7 @@ async def test_forward_request_onboarded_noauth_unallowed_paths( ], ) async def test_forward_request_not_onboarded_get( - hassio_noauth_client: TestClient, + hassio_noauth_client, aioclient_mock: AiohttpClientMocker, path: str, authenticated: bool, @@ -220,7 +212,7 @@ async def test_forward_request_not_onboarded_get( ], ) async def test_forward_request_not_onboarded_post( - hassio_noauth_client: TestClient, + hassio_noauth_client, aioclient_mock: AiohttpClientMocker, path: str, mock_not_onboarded, @@ -246,7 +238,7 @@ async def test_forward_request_not_onboarded_post( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_not_onboarded_unallowed_methods( - hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str + hassio_noauth_client, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_noauth_client.post("/api/hassio/app/entrypoint.js") @@ -269,13 +261,11 @@ async def test_forward_request_not_onboarded_unallowed_methods( # Unauthenticated path ("supervisor/info", HTTPStatus.UNAUTHORIZED), ("supervisor/logs", HTTPStatus.UNAUTHORIZED), - ("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED), ("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED), - ("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED), ], ) async def test_forward_request_not_onboarded_unallowed_paths( - hassio_noauth_client: TestClient, + hassio_noauth_client, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -298,15 +288,13 @@ async def test_forward_request_not_onboarded_unallowed_paths( ("addons/bl_b392/icon", False), ("backups/1234abcd/info", True), ("supervisor/logs", True), - ("supervisor/logs/follow", True), ("addons/bl_b392/logs", True), - ("addons/bl_b392/logs/follow", True), ("addons/bl_b392/changelog", True), ("addons/bl_b392/documentation", True), ], ) async def test_forward_request_admin_get( - hassio_client: TestClient, + hassio_client, aioclient_mock: AiohttpClientMocker, path: str, authenticated: bool, @@ -341,7 +329,7 @@ async def test_forward_request_admin_get( ], ) async def test_forward_request_admin_post( - hassio_client: TestClient, + hassio_client, aioclient_mock: AiohttpClientMocker, path: str, ) -> None: @@ -366,7 +354,7 @@ async def test_forward_request_admin_post( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_admin_unallowed_methods( - hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str + hassio_client, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_client.post("/api/hassio/app/entrypoint.js") @@ -391,7 +379,7 @@ async def test_forward_request_admin_unallowed_methods( ], ) async def test_forward_request_admin_unallowed_paths( - hassio_client: TestClient, + hassio_client, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -406,7 +394,7 @@ async def test_forward_request_admin_unallowed_paths( async def test_bad_gateway_when_cannot_find_supervisor( - hassio_client: TestClient, aioclient_mock: AiohttpClientMocker + hassio_client, aioclient_mock: AiohttpClientMocker ) -> None: """Test we get a bad gateway error if we can't find supervisor.""" aioclient_mock.get("http://127.0.0.1/app/entrypoint.js", exc=TimeoutError) @@ -416,8 +404,9 @@ async def test_bad_gateway_when_cannot_find_supervisor( async def test_backup_upload_headers( - hassio_client: TestClient, + hassio_client, aioclient_mock: AiohttpClientMocker, + caplog: pytest.LogCaptureFixture, mock_not_onboarded, ) -> None: """Test that we forward the full header for backup upload.""" @@ -438,7 +427,7 @@ async def test_backup_upload_headers( async def test_backup_download_headers( - hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, mock_not_onboarded + hassio_client, aioclient_mock: AiohttpClientMocker, mock_not_onboarded ) -> None: """Test that we forward the full header for backup download.""" content_disposition = "attachment; filename=test.tar" @@ -460,9 +449,7 @@ async def test_backup_download_headers( assert resp.headers["Content-Disposition"] == content_disposition -async def test_stream( - hassio_client: TestClient, aioclient_mock: AiohttpClientMocker -) -> None: +async def test_stream(hassio_client, aioclient_mock: AiohttpClientMocker) -> None: """Verify that the request is a stream.""" content_type = "multipart/form-data; boundary='--webkit'" aioclient_mock.post("http://127.0.0.1/backups/new/upload") @@ -475,7 +462,7 @@ async def test_stream( async def test_simple_get_no_stream( - hassio_client: TestClient, aioclient_mock: AiohttpClientMocker + hassio_client, aioclient_mock: AiohttpClientMocker ) -> None: """Verify that a simple GET request is not a stream.""" aioclient_mock.get("http://127.0.0.1/app/entrypoint.js") @@ -485,7 +472,7 @@ async def test_simple_get_no_stream( async def test_entrypoint_cache_control( - hassio_client: TestClient, aioclient_mock: AiohttpClientMocker + hassio_client, aioclient_mock: AiohttpClientMocker ) -> None: """Test that we return cache control for requests to the entrypoint only.""" aioclient_mock.get("http://127.0.0.1/app/entrypoint.js") @@ -502,70 +489,3 @@ async def test_entrypoint_cache_control( assert resp1.headers["Cache-Control"] == "no-store, max-age=0" assert "Cache-Control" not in resp2.headers - - -async def test_no_follow_logs_compress( - hassio_client: TestClient, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that we do not compress follow logs.""" - aioclient_mock.get("http://127.0.0.1/supervisor/logs/follow") - aioclient_mock.get("http://127.0.0.1/supervisor/logs") - - resp1 = await hassio_client.get("/api/hassio/supervisor/logs/follow") - resp2 = await hassio_client.get("/api/hassio/supervisor/logs") - - # Check we got right response - assert resp1.status == HTTPStatus.OK - assert resp1.headers.get("Content-Encoding") is None - - assert resp2.status == HTTPStatus.OK - assert resp2.headers.get("Content-Encoding") == "deflate" - - -async def test_forward_range_header_for_logs( - hassio_client: TestClient, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that we forward the Range header for logs.""" - aioclient_mock.get("http://127.0.0.1/host/logs") - aioclient_mock.get("http://127.0.0.1/host/logs/boots/-1") - aioclient_mock.get("http://127.0.0.1/host/logs/boots/-2/follow?lines=100") - aioclient_mock.get("http://127.0.0.1/addons/123abc_esphome/logs") - aioclient_mock.get("http://127.0.0.1/addons/123abc_esphome/logs/follow") - aioclient_mock.get("http://127.0.0.1/backups/1234abcd/download") - - test_range = ":-100:50" - - host_resp = await hassio_client.get( - "/api/hassio/host/logs", headers={"Range": test_range} - ) - host_resp2 = await hassio_client.get( - "/api/hassio/host/logs/boots/-1", headers={"Range": test_range} - ) - host_resp3 = await hassio_client.get( - "/api/hassio/host/logs/boots/-2/follow?lines=100", headers={"Range": test_range} - ) - addon_resp = await hassio_client.get( - "/api/hassio/addons/123abc_esphome/logs", headers={"Range": test_range} - ) - addon_resp2 = await hassio_client.get( - "/api/hassio/addons/123abc_esphome/logs/follow", headers={"Range": test_range} - ) - backup_resp = await hassio_client.get( - "/api/hassio/backups/1234abcd/download", headers={"Range": test_range} - ) - - assert host_resp.status == HTTPStatus.OK - assert host_resp2.status == HTTPStatus.OK - assert host_resp3.status == HTTPStatus.OK - assert addon_resp.status == HTTPStatus.OK - assert addon_resp2.status == HTTPStatus.OK - assert backup_resp.status == HTTPStatus.OK - - assert len(aioclient_mock.mock_calls) == 6 - - assert aioclient_mock.mock_calls[0][-1].get("Range") == test_range - assert aioclient_mock.mock_calls[1][-1].get("Range") == test_range - assert aioclient_mock.mock_calls[2][-1].get("Range") == test_range - assert aioclient_mock.mock_calls[3][-1].get("Range") == test_range - assert aioclient_mock.mock_calls[4][-1].get("Range") == test_range - assert aioclient_mock.mock_calls[5][-1].get("Range") is None diff --git a/tests/components/hassio/test_init.py b/tests/components/hassio/test_init.py index 5c11370ae74..0246b557ee4 100644 --- a/tests/components/hassio/test_init.py +++ b/tests/components/hassio/test_init.py @@ -1,42 +1,34 @@ """The tests for the hassio component.""" from datetime import timedelta -import logging import os from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import patch -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import AddonsStats import pytest from voluptuous import Invalid from homeassistant.auth.const import GROUP_ID_ADMIN -from homeassistant.components import frontend, hassio +from homeassistant.components import frontend from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.hassio import ( ADDONS_COORDINATOR, DOMAIN, STORAGE_KEY, + async_get_addon_store_info, get_core_info, - get_supervisor_ip, hostname_from_addon_slug, - is_hassio as deprecated_is_hassio, + is_hassio, ) from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY +from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, issue_registry as ir -from homeassistant.helpers.hassio import is_hassio -from homeassistant.helpers.service_info.hassio import HassioServiceInfo +from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - import_and_test_deprecated_constant, -) +from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @@ -60,17 +52,10 @@ def os_info(extra_os_info): @pytest.fixture(autouse=True) -def mock_all( - aioclient_mock: AiohttpClientMocker, - os_info: AsyncMock, - store_info: AsyncMock, - addon_info: AsyncMock, - addon_stats: AsyncMock, - addon_changelog: AsyncMock, - resolution_info: AsyncMock, -) -> None: +def mock_all(aioclient_mock: AiohttpClientMocker, os_info) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) + aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -83,6 +68,13 @@ def mock_all( }, }, ) + aioclient_mock.get( + "http://127.0.0.1/store", + json={ + "result": "ok", + "data": {"addons": [], "repositories": []}, + }, + ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -170,41 +162,81 @@ def mock_all( }, }, ) - - async def mock_addon_stats(addon: str) -> AddonsStats: - """Mock addon stats for test and test2.""" - if addon in {"test2", "test3"}: - return AddonsStats( - cpu_percent=0.8, - memory_usage=51941376, - memory_limit=3977146368, - memory_percent=1.31, - network_rx=31338284, - network_tx=15692900, - blk_read=740077568, - blk_write=6004736, - ) - return AddonsStats( - cpu_percent=0.99, - memory_usage=182611968, - memory_limit=3977146368, - memory_percent=4.59, - network_rx=362570232, - network_tx=82374138, - blk_read=46010945536, - blk_write=15051526144, - ) - - addon_stats.side_effect = mock_addon_stats - - def mock_addon_info(slug: str): - addon_info.return_value.auto_update = slug == "test" - return addon_info.return_value - - addon_info.side_effect = mock_addon_info + aioclient_mock.get( + "http://127.0.0.1/addons/test/stats", + json={ + "result": "ok", + "data": { + "cpu_percent": 0.99, + "memory_usage": 182611968, + "memory_limit": 3977146368, + "memory_percent": 4.59, + "network_rx": 362570232, + "network_tx": 82374138, + "blk_read": 46010945536, + "blk_write": 15051526144, + }, + }, + ) + aioclient_mock.get( + "http://127.0.0.1/addons/test2/stats", + json={ + "result": "ok", + "data": { + "cpu_percent": 0.8, + "memory_usage": 51941376, + "memory_limit": 3977146368, + "memory_percent": 1.31, + "network_rx": 31338284, + "network_tx": 15692900, + "blk_read": 740077568, + "blk_write": 6004736, + }, + }, + ) + aioclient_mock.get( + "http://127.0.0.1/addons/test3/stats", + json={ + "result": "ok", + "data": { + "cpu_percent": 0.8, + "memory_usage": 51941376, + "memory_limit": 3977146368, + "memory_percent": 1.31, + "network_rx": 31338284, + "network_tx": 15692900, + "blk_read": 740077568, + "blk_write": 6004736, + }, + }, + ) + aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test/info", + json={"result": "ok", "data": {"auto_update": True}}, + ) + aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test2/info", + json={"result": "ok", "data": {"auto_update": False}}, + ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -218,9 +250,7 @@ def mock_all( async def test_setup_api_ping( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - supervisor_client: AsyncMock, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -228,7 +258,7 @@ async def test_setup_api_ping( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert aioclient_mock.call_count == 20 assert get_core_info(hass)["version_latest"] == "1.0.0" assert is_hassio(hass) @@ -263,9 +293,7 @@ async def test_setup_api_panel( async def test_setup_api_push_api_data( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - supervisor_client: AsyncMock, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -275,16 +303,14 @@ async def test_setup_api_push_api_data( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 - assert not aioclient_mock.mock_calls[0][2]["ssl"] - assert aioclient_mock.mock_calls[0][2]["port"] == 9999 - assert "watchdog" not in aioclient_mock.mock_calls[0][2] + assert aioclient_mock.call_count == 20 + assert not aioclient_mock.mock_calls[1][2]["ssl"] + assert aioclient_mock.mock_calls[1][2]["port"] == 9999 + assert "watchdog" not in aioclient_mock.mock_calls[1][2] async def test_setup_api_push_api_data_server_host( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - supervisor_client: AsyncMock, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -296,17 +322,16 @@ async def test_setup_api_push_api_data_server_host( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 - assert not aioclient_mock.mock_calls[0][2]["ssl"] - assert aioclient_mock.mock_calls[0][2]["port"] == 9999 - assert not aioclient_mock.mock_calls[0][2]["watchdog"] + assert aioclient_mock.call_count == 20 + assert not aioclient_mock.mock_calls[1][2]["ssl"] + assert aioclient_mock.mock_calls[1][2]["port"] == 9999 + assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_storage: dict[str, Any], - supervisor_client: AsyncMock, ) -> None: """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -314,10 +339,10 @@ async def test_setup_api_push_api_data_default( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 - assert not aioclient_mock.mock_calls[0][2]["ssl"] - assert aioclient_mock.mock_calls[0][2]["port"] == 8123 - refresh_token = aioclient_mock.mock_calls[0][2]["refresh_token"] + assert aioclient_mock.call_count == 20 + assert not aioclient_mock.mock_calls[1][2]["ssl"] + assert aioclient_mock.mock_calls[1][2]["port"] == 8123 + refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) @@ -384,7 +409,6 @@ async def test_setup_api_existing_hassio_user( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_storage: dict[str, Any], - supervisor_client: AsyncMock, ) -> None: """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") @@ -395,16 +419,14 @@ async def test_setup_api_existing_hassio_user( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 - assert not aioclient_mock.mock_calls[0][2]["ssl"] - assert aioclient_mock.mock_calls[0][2]["port"] == 8123 - assert aioclient_mock.mock_calls[0][2]["refresh_token"] == token.token + assert aioclient_mock.call_count == 20 + assert not aioclient_mock.mock_calls[1][2]["ssl"] + assert aioclient_mock.mock_calls[1][2]["port"] == 8123 + assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - supervisor_client: AsyncMock, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API push default data.""" hass.config.time_zone = "testzone" @@ -414,8 +436,8 @@ async def test_setup_core_push_timezone( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 - assert aioclient_mock.mock_calls[1][2]["timezone"] == "testzone" + assert aioclient_mock.call_count == 20 + assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" with patch("homeassistant.util.dt.set_default_time_zone"): await hass.config.async_update(time_zone="America/New_York") @@ -424,9 +446,7 @@ async def test_setup_core_push_timezone( async def test_setup_hassio_no_additional_data( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - supervisor_client: AsyncMock, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API push default data.""" with ( @@ -437,7 +457,7 @@ async def test_setup_hassio_no_additional_data( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert aioclient_mock.call_count == 20 assert aioclient_mock.mock_calls[-1][3]["Authorization"] == "Bearer 123456" @@ -449,13 +469,16 @@ async def test_fail_setup_without_environ_var(hass: HomeAssistant) -> None: async def test_warn_when_cannot_connect( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - supervisor_is_connected: AsyncMock, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Fail warn when we cannot connect.""" - supervisor_is_connected.side_effect = SupervisorError - with patch.dict(os.environ, MOCK_ENVIRON): + with ( + patch.dict(os.environ, MOCK_ENVIRON), + patch( + "homeassistant.components.hassio.HassIO.is_connected", + return_value=None, + ), + ): result = await async_setup_component(hass, "hassio", {}) assert result @@ -463,8 +486,7 @@ async def test_warn_when_cannot_connect( assert "Not connected with the supervisor / system too busy!" in caplog.text -@pytest.mark.usefixtures("hassio_env") -async def test_service_register(hass: HomeAssistant) -> None: +async def test_service_register(hassio_env, hass: HomeAssistant) -> None: """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") @@ -486,14 +508,15 @@ async def test_service_calls( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - supervisor_client: AsyncMock, - addon_installed: AsyncMock, - supervisor_is_connected: AsyncMock, - issue_registry: ir.IssueRegistry, ) -> None: """Call service and check the API calls behind that.""" - supervisor_is_connected.side_effect = SupervisorError - with patch.dict(os.environ, MOCK_ENVIRON): + with ( + patch.dict(os.environ, MOCK_ENVIRON), + patch( + "homeassistant.components.hassio.HassIO.is_connected", + return_value=None, + ), + ): assert await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -517,20 +540,19 @@ async def test_service_calls( await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call("hassio", "addon_update", {"addon": "test"}) - assert (DOMAIN, "update_service_deprecated") in issue_registry.issues await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 25 + assert aioclient_mock.call_count == 24 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 27 + assert aioclient_mock.call_count == 26 await hass.services.async_call("hassio", "backup_full", {}) await hass.services.async_call( @@ -545,7 +567,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 29 + assert aioclient_mock.call_count == 28 assert aioclient_mock.mock_calls[-1][2] == { "name": "2021-11-13 03:48:00", "homeassistant": True, @@ -570,7 +592,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 31 + assert aioclient_mock.call_count == 30 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], @@ -589,7 +611,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 32 + assert aioclient_mock.call_count == 31 assert aioclient_mock.mock_calls[-1][2] == { "name": "backup_name", "location": "backup_share", @@ -605,7 +627,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 33 + assert aioclient_mock.call_count == 32 assert aioclient_mock.mock_calls[-1][2] == { "name": "2021-11-13 03:48:00", "location": None, @@ -624,7 +646,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 35 + assert aioclient_mock.call_count == 34 assert aioclient_mock.mock_calls[-1][2] == { "name": "2021-11-13 11:48:00", "location": None, @@ -634,11 +656,15 @@ async def test_service_calls( async def test_invalid_service_calls( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - supervisor_is_connected: AsyncMock, ) -> None: """Call service with invalid input and check that it raises.""" - supervisor_is_connected.side_effect = SupervisorError - with patch.dict(os.environ, MOCK_ENVIRON): + with ( + patch.dict(os.environ, MOCK_ENVIRON), + patch( + "homeassistant.components.hassio.HassIO.is_connected", + return_value=None, + ), + ): assert await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -655,7 +681,6 @@ async def test_invalid_service_calls( async def test_addon_service_call_with_complex_slug( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - supervisor_is_connected: AsyncMock, ) -> None: """Addon slugs can have ., - and _, confirm that passes validation.""" supervisor_mock_data = { @@ -675,9 +700,12 @@ async def test_addon_service_call_with_complex_slug( }, ], } - supervisor_is_connected.side_effect = SupervisorError with ( patch.dict(os.environ, MOCK_ENVIRON), + patch( + "homeassistant.components.hassio.HassIO.is_connected", + return_value=None, + ), patch( "homeassistant.components.hassio.HassIO.get_supervisor_info", return_value=supervisor_mock_data, @@ -689,11 +717,8 @@ async def test_addon_service_call_with_complex_slug( await hass.services.async_call("hassio", "addon_start", {"addon": "test.a_1-2"}) -@pytest.mark.usefixtures("hassio_env") async def test_service_calls_core( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - supervisor_client: AsyncMock, + hassio_env, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -705,12 +730,12 @@ async def test_service_calls_core( await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 6 + assert aioclient_mock.call_count == 5 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 6 + assert aioclient_mock.call_count == 5 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None @@ -719,10 +744,9 @@ async def test_service_calls_core( await hass.async_block_till_done() assert mock_check_config.called - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 7 + assert aioclient_mock.call_count == 6 -@pytest.mark.usefixtures("addon_installed") async def test_entry_load_and_unload(hass: HomeAssistant) -> None: """Test loading and unloading config entry.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -749,7 +773,6 @@ async def test_migration_off_hassio(hass: HomeAssistant) -> None: assert hass.config_entries.async_entries(DOMAIN) == [] -@pytest.mark.usefixtures("addon_installed") async def test_device_registry_calls( hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: @@ -902,110 +925,130 @@ async def test_device_registry_calls( assert len(device_registry.devices) == 5 -@pytest.mark.usefixtures("addon_installed") async def test_coordinator_updates( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, supervisor_client: AsyncMock + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test coordinator updates.""" await async_setup_component(hass, "homeassistant", {}) - with patch.dict(os.environ, MOCK_ENVIRON): + with ( + patch.dict(os.environ, MOCK_ENVIRON), + patch( + "homeassistant.components.hassio.HassIO.refresh_updates" + ) as refresh_updates_mock, + ): config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # Initial refresh, no update refresh call - supervisor_client.refresh_updates.assert_not_called() + assert refresh_updates_mock.call_count == 0 - async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) - await hass.async_block_till_done() + with patch( + "homeassistant.components.hassio.HassIO.refresh_updates", + ) as refresh_updates_mock: + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) + await hass.async_block_till_done() - # Scheduled refresh, no update refresh call - supervisor_client.refresh_updates.assert_not_called() + # Scheduled refresh, no update refresh call + assert refresh_updates_mock.call_count == 0 - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) + with patch( + "homeassistant.components.hassio.HassIO.refresh_updates", + ) as refresh_updates_mock: + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) - # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer - supervisor_client.refresh_updates.assert_not_called() - async_fire_time_changed( - hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) - ) - await hass.async_block_till_done() - supervisor_client.refresh_updates.assert_called_once() + # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer + assert refresh_updates_mock.call_count == 0 + async_fire_time_changed( + hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) + ) + await hass.async_block_till_done() + assert refresh_updates_mock.call_count == 1 - supervisor_client.refresh_updates.reset_mock() - supervisor_client.refresh_updates.side_effect = SupervisorError("Unknown") - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) - # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer - async_fire_time_changed( - hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) - ) - await hass.async_block_till_done() - supervisor_client.refresh_updates.assert_called_once() - assert "Error on Supervisor API: Unknown" in caplog.text + with patch( + "homeassistant.components.hassio.HassIO.refresh_updates", + side_effect=HassioAPIError("Unknown"), + ) as refresh_updates_mock: + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) + # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer + async_fire_time_changed( + hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) + ) + await hass.async_block_till_done() + assert refresh_updates_mock.call_count == 1 + assert "Error on Supervisor API: Unknown" in caplog.text -@pytest.mark.usefixtures("entity_registry_enabled_by_default", "addon_installed") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_coordinator_updates_stats_entities_enabled( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - supervisor_client: AsyncMock, ) -> None: """Test coordinator updates with stats entities enabled.""" await async_setup_component(hass, "homeassistant", {}) - with patch.dict(os.environ, MOCK_ENVIRON): + with ( + patch.dict(os.environ, MOCK_ENVIRON), + patch( + "homeassistant.components.hassio.HassIO.refresh_updates" + ) as refresh_updates_mock, + ): config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # Initial refresh without stats - supervisor_client.refresh_updates.assert_not_called() + assert refresh_updates_mock.call_count == 0 # Refresh with stats once we know which ones are needed async_fire_time_changed( hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) ) await hass.async_block_till_done() + assert refresh_updates_mock.call_count == 1 - supervisor_client.refresh_updates.assert_called_once() + with patch( + "homeassistant.components.hassio.HassIO.refresh_updates", + ) as refresh_updates_mock: + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) + await hass.async_block_till_done() + assert refresh_updates_mock.call_count == 0 - supervisor_client.refresh_updates.reset_mock() - async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) - await hass.async_block_till_done() - supervisor_client.refresh_updates.assert_not_called() - - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) - supervisor_client.refresh_updates.assert_not_called() + with patch( + "homeassistant.components.hassio.HassIO.refresh_updates", + ) as refresh_updates_mock: + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) + assert refresh_updates_mock.call_count == 0 # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer async_fire_time_changed( @@ -1013,26 +1056,28 @@ async def test_coordinator_updates_stats_entities_enabled( ) await hass.async_block_till_done() - supervisor_client.refresh_updates.reset_mock() - supervisor_client.refresh_updates.side_effect = SupervisorError("Unknown") - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) - # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer - async_fire_time_changed( - hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) - ) - await hass.async_block_till_done() - supervisor_client.refresh_updates.assert_called_once() - assert "Error on Supervisor API: Unknown" in caplog.text + with patch( + "homeassistant.components.hassio.HassIO.refresh_updates", + side_effect=HassioAPIError("Unknown"), + ) as refresh_updates_mock: + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) + # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer + async_fire_time_changed( + hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) + ) + await hass.async_block_till_done() + assert refresh_updates_mock.call_count == 1 + assert "Error on Supervisor API: Unknown" in caplog.text @pytest.mark.parametrize( @@ -1052,10 +1097,7 @@ async def test_coordinator_updates_stats_entities_enabled( ], ) async def test_setup_hardware_integration( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - supervisor_client: AsyncMock, - integration, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, integration ) -> None: """Test setup initiates hardware integration.""" @@ -1070,10 +1112,25 @@ async def test_setup_hardware_integration( await hass.async_block_till_done(wait_background_tasks=True) assert result - assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert aioclient_mock.call_count == 20 assert len(mock_setup_entry.mock_calls) == 1 +async def test_get_store_addon_info( + hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker +) -> None: + """Test get store add-on info from Supervisor API.""" + aioclient_mock.clear_requests() + aioclient_mock.get( + "http://127.0.0.1/store/addons/test", + json={"result": "ok", "data": {"name": "bla"}}, + ) + + data = await async_get_addon_store_info(hass, "test") + assert data["name"] == "bla" + assert aioclient_mock.call_count == 1 + + def test_hostname_from_addon_slug() -> None: """Test hostname_from_addon_slug.""" assert hostname_from_addon_slug("mqtt") == "mqtt" @@ -1081,62 +1138,3 @@ def test_hostname_from_addon_slug() -> None: hostname_from_addon_slug("core_silabs_multiprotocol") == "core-silabs-multiprotocol" ) - - -def test_deprecated_function_is_hassio( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test calling deprecated_is_hassio function will create log entry.""" - - deprecated_is_hassio(hass) - assert caplog.record_tuples == [ - ( - "homeassistant.components.hassio", - logging.WARNING, - "is_hassio is a deprecated function which will be removed in HA Core 2025.11. Use homeassistant.helpers.hassio.is_hassio instead", - ) - ] - - -def test_deprecated_function_get_supervisor_ip( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test calling get_supervisor_ip function will create log entry.""" - - get_supervisor_ip() - assert caplog.record_tuples == [ - ( - "homeassistant.helpers.hassio", - logging.WARNING, - "get_supervisor_ip is a deprecated function which will be removed in HA Core 2025.11. Use homeassistant.helpers.hassio.get_supervisor_ip instead", - ) - ] - - -@pytest.mark.parametrize( - ("constant_name", "replacement_name", "replacement"), - [ - ( - "HassioServiceInfo", - "homeassistant.helpers.service_info.hassio.HassioServiceInfo", - HassioServiceInfo, - ), - ], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - constant_name: str, - replacement_name: str, - replacement: Any, -) -> None: - """Test deprecated automation constants.""" - import_and_test_deprecated_constant( - caplog, - hassio, - constant_name, - replacement_name, - replacement, - "2025.11", - ) diff --git a/tests/components/hassio/test_issues.py b/tests/components/hassio/test_issues.py index 7ce11a18fb5..ff0e4a8dd92 100644 --- a/tests/components/hassio/test_issues.py +++ b/tests/components/hassio/test_issues.py @@ -2,30 +2,12 @@ from __future__ import annotations -from collections.abc import Generator from datetime import timedelta +from http import HTTPStatus import os from typing import Any -from unittest.mock import ANY, AsyncMock, patch -from uuid import UUID, uuid4 +from unittest.mock import ANY, patch -from aiohasupervisor import ( - SupervisorBadRequestError, - SupervisorError, - SupervisorTimeoutError, -) -from aiohasupervisor.models import ( - Check, - CheckType, - ContextType, - Issue, - IssueType, - ResolutionInfo, - Suggestion, - SuggestionType, - UnhealthyReason, - UnsupportedReason, -) from freezegun.api import FrozenDateTimeFactory import pytest @@ -35,63 +17,70 @@ from homeassistant.setup import async_setup_component from .test_init import MOCK_ENVIRON +from tests.test_util.aiohttp import AiohttpClientMocker, AiohttpClientMockResponse from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) -async def setup_repairs(hass: HomeAssistant) -> None: +async def setup_repairs(hass): """Set up the repairs integration.""" assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) @pytest.fixture(autouse=True) -def fixture_supervisor_environ() -> Generator[None]: +async def fixture_supervisor_environ(): """Mock os environ for supervisor.""" with patch.dict(os.environ, MOCK_ENVIRON): yield def mock_resolution_info( - supervisor_client: AsyncMock, - unsupported: list[UnsupportedReason] | None = None, - unhealthy: list[UnhealthyReason] | None = None, - issues: list[Issue] | None = None, - suggestions_by_issue: dict[UUID, list[Suggestion]] | None = None, - suggestion_result: SupervisorError | None = None, -) -> None: + aioclient_mock: AiohttpClientMocker, + unsupported: list[str] | None = None, + unhealthy: list[str] | None = None, + issues: list[dict[str, str]] | None = None, + suggestion_result: str = "ok", +): """Mock resolution/info endpoint with unsupported/unhealthy reasons and/or issues.""" - supervisor_client.resolution.info.return_value = ResolutionInfo( - unsupported=unsupported or [], - unhealthy=unhealthy or [], - issues=issues or [], - suggestions=[ - suggestion - for issue_list in suggestions_by_issue.values() - for suggestion in issue_list - ] - if suggestions_by_issue - else [], - checks=[ - Check(enabled=True, slug=CheckType.SUPERVISOR_TRUST), - Check(enabled=True, slug=CheckType.FREE_SPACE), - ], + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": unsupported or [], + "unhealthy": unhealthy or [], + "suggestions": [], + "issues": [ + {k: v for k, v in issue.items() if k != "suggestions"} + for issue in issues + ] + if issues + else [], + "checks": [ + {"enabled": True, "slug": "supervisor_trust"}, + {"enabled": True, "slug": "free_space"}, + ], + }, + }, ) - if suggestions_by_issue: - - async def mock_suggestions_for_issue(uuid: UUID) -> list[Suggestion]: - """Mock of suggestions for issue api.""" - return suggestions_by_issue.get(uuid, []) - - supervisor_client.resolution.suggestions_for_issue.side_effect = ( - mock_suggestions_for_issue - ) - supervisor_client.resolution.apply_suggestion.side_effect = suggestion_result + if issues: + suggestions_by_issue = { + issue["uuid"]: issue.get("suggestions", []) for issue in issues + } + for issue_uuid, suggestions in suggestions_by_issue.items(): + aioclient_mock.get( + f"http://127.0.0.1/resolution/issue/{issue_uuid}/suggestions", + json={"result": "ok", "data": {"suggestions": suggestions}}, + ) + for suggestion in suggestions: + aioclient_mock.post( + f"http://127.0.0.1/resolution/suggestion/{suggestion['uuid']}", + json={"result": suggestion_result}, + ) -def assert_repair_in_list( - issues: list[dict[str, Any]], unhealthy: bool, reason: str -) -> None: +def assert_repair_in_list(issues: list[dict[str, Any]], unhealthy: bool, reason: str): """Assert repair for unhealthy/unsupported in list.""" repair_type = "unhealthy" if unhealthy else "unsupported" assert { @@ -119,7 +108,7 @@ def assert_issue_repair_in_list( *, reference: str | None = None, placeholders: dict[str, str] | None = None, -) -> None: +): """Assert repair for unhealthy/unsupported in list.""" if reference: placeholders = (placeholders or {}) | {"reference": reference} @@ -139,16 +128,14 @@ def assert_issue_repair_in_list( } in issues -@pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Test issues added for unhealthy systems.""" - mock_resolution_info( - supervisor_client, unhealthy=[UnhealthyReason.DOCKER, UnhealthyReason.SETUP] - ) + mock_resolution_info(aioclient_mock, unhealthy=["docker", "setup"]) result = await async_setup_component(hass, "hassio", {}) assert result @@ -163,17 +150,14 @@ async def test_unhealthy_issues( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="setup") -@pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Test issues added for unsupported systems.""" - mock_resolution_info( - supervisor_client, - unsupported=[UnsupportedReason.CONTENT_TRUST, UnsupportedReason.OS], - ) + mock_resolution_info(aioclient_mock, unsupported=["content_trust", "os"]) result = await async_setup_component(hass, "hassio", {}) assert result @@ -190,14 +174,14 @@ async def test_unsupported_issues( assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") -@pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues_add_remove( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Test unhealthy issues added and removed from dispatches.""" - mock_resolution_info(supervisor_client) + mock_resolution_info(aioclient_mock) result = await async_setup_component(hass, "hassio", {}) assert result @@ -247,14 +231,14 @@ async def test_unhealthy_issues_add_remove( assert msg["result"] == {"issues": []} -@pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues_add_remove( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Test unsupported issues added and removed from dispatches.""" - mock_resolution_info(supervisor_client) + mock_resolution_info(aioclient_mock) result = await async_setup_component(hass, "hassio", {}) assert result @@ -304,24 +288,24 @@ async def test_unsupported_issues_add_remove( assert msg["result"] == {"issues": []} -@pytest.mark.usefixtures("all_setup_requests") async def test_reset_issues_supervisor_restart( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """All issues reset on supervisor restart.""" mock_resolution_info( - supervisor_client, - unsupported=[UnsupportedReason.OS], - unhealthy=[UnhealthyReason.DOCKER], + aioclient_mock, + unsupported=["os"], + unhealthy=["docker"], issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=(uuid := uuid4()), - ) + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + } ], ) @@ -338,14 +322,15 @@ async def test_reset_issues_supervisor_restart( assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") assert_issue_repair_in_list( msg["result"]["issues"], - uuid=uuid.hex, + uuid="1234", context="system", type_="reboot_required", fixable=False, reference=None, ) - mock_resolution_info(supervisor_client) + aioclient_mock.clear_requests() + mock_resolution_info(aioclient_mock) await client.send_json( { "id": 2, @@ -367,18 +352,14 @@ async def test_reset_issues_supervisor_restart( assert msg["result"] == {"issues": []} -@pytest.mark.usefixtures("all_setup_requests") async def test_reasons_added_and_removed( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Test an unsupported/unhealthy reasons being added and removed at same time.""" - mock_resolution_info( - supervisor_client, - unsupported=[UnsupportedReason.OS], - unhealthy=[UnhealthyReason.DOCKER], - ) + mock_resolution_info(aioclient_mock, unsupported=["os"], unhealthy=["docker"]) result = await async_setup_component(hass, "hassio", {}) assert result @@ -392,10 +373,9 @@ async def test_reasons_added_and_removed( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="docker") assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") + aioclient_mock.clear_requests() mock_resolution_info( - supervisor_client, - unsupported=[UnsupportedReason.CONTENT_TRUST], - unhealthy=[UnhealthyReason.SETUP], + aioclient_mock, unsupported=["content_trust"], unhealthy=["setup"] ) await client.send_json( { @@ -422,17 +402,15 @@ async def test_reasons_added_and_removed( ) -@pytest.mark.usefixtures("all_setup_requests") async def test_ignored_unsupported_skipped( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Unsupported reasons which have an identical unhealthy reason are ignored.""" mock_resolution_info( - supervisor_client, - unsupported=[UnsupportedReason.PRIVILEGED], - unhealthy=[UnhealthyReason.PRIVILEGED], + aioclient_mock, unsupported=["privileged"], unhealthy=["privileged"] ) result = await async_setup_component(hass, "hassio", {}) @@ -447,17 +425,15 @@ async def test_ignored_unsupported_skipped( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="privileged") -@pytest.mark.usefixtures("all_setup_requests") async def test_new_unsupported_unhealthy_reason( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """New unsupported/unhealthy reasons result in a generic repair until next core update.""" mock_resolution_info( - supervisor_client, - unsupported=["fake_unsupported"], - unhealthy=["fake_unhealthy"], + aioclient_mock, unsupported=["fake_unsupported"], unhealthy=["fake_unhealthy"] ) result = await async_setup_component(hass, "hassio", {}) @@ -499,46 +475,43 @@ async def test_new_unsupported_unhealthy_reason( } in msg["result"]["issues"] -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Test repairs added for supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=(uuid_issue1 := uuid4()), - ), - Issue( - type=IssueType.MULTIPLE_DATA_DISKS, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=(uuid_issue2 := uuid4()), - ), - Issue( - type="should_not_be_repair", - context=ContextType.OS, - reference=None, - uuid=uuid4(), - ), + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + }, + { + "uuid": "1235", + "type": "multiple_data_disks", + "context": "system", + "reference": "/dev/sda1", + "suggestions": [ + { + "uuid": "1236", + "type": "rename_data_disk", + "context": "system", + "reference": "/dev/sda1", + } + ], + }, + { + "uuid": "1237", + "type": "should_not_be_repair", + "context": "os", + "reference": None, + }, ], - suggestions_by_issue={ - uuid_issue2: [ - Suggestion( - type=SuggestionType.RENAME_DATA_DISK, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=uuid4(), - auto=False, - ) - ] - }, ) result = await async_setup_component(hass, "hassio", {}) @@ -552,7 +525,7 @@ async def test_supervisor_issues( assert len(msg["result"]["issues"]) == 2 assert_issue_repair_in_list( msg["result"]["issues"], - uuid=uuid_issue1.hex, + uuid="1234", context="system", type_="reboot_required", fixable=False, @@ -560,7 +533,7 @@ async def test_supervisor_issues( ) assert_issue_repair_in_list( msg["result"]["issues"], - uuid=uuid_issue2.hex, + uuid="1235", context="system", type_="multiple_data_disks", fixable=True, @@ -568,36 +541,64 @@ async def test_supervisor_issues( ) -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_initial_failure( hass: HomeAssistant, - resolution_info: AsyncMock, - resolution_suggestions_for_issue: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, + all_setup_requests, ) -> None: """Test issues manager retries after initial update failure.""" - resolution_info.side_effect = [ - SupervisorBadRequestError("System is not ready with state: setup"), - ResolutionInfo( - unsupported=[], - unhealthy=[], - suggestions=[], - issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=uuid4(), - ) - ], - checks=[ - Check(enabled=True, slug=CheckType.SUPERVISOR_TRUST), - Check(enabled=True, slug=CheckType.FREE_SPACE), - ], + responses = [ + AiohttpClientMockResponse( + method="get", + url="http://127.0.0.1/resolution/info", + status=HTTPStatus.BAD_REQUEST, + json={ + "result": "error", + "message": "System is not ready with state: setup", + }, + ), + AiohttpClientMockResponse( + method="get", + url="http://127.0.0.1/resolution/info", + status=HTTPStatus.OK, + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [ + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + }, + ], + "checks": [ + {"enabled": True, "slug": "supervisor_trust"}, + {"enabled": True, "slug": "free_space"}, + ], + }, + }, ), ] + async def mock_responses(*args): + nonlocal responses + return responses.pop(0) + + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + side_effect=mock_responses, + ) + aioclient_mock.get( + "http://127.0.0.1/resolution/issue/1234/suggestions", + json={"result": "ok", "data": {"suggestions": []}}, + ) + with patch("homeassistant.components.hassio.issues.REQUEST_REFRESH_DELAY", new=0.1): result = await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -618,14 +619,14 @@ async def test_supervisor_issues_initial_failure( assert len(msg["result"]["issues"]) == 1 -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_add_remove( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Test supervisor issues added and removed from dispatches.""" - mock_resolution_info(supervisor_client) + mock_resolution_info(aioclient_mock) result = await async_setup_component(hass, "hassio", {}) assert result @@ -639,7 +640,7 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_changed", "data": { - "uuid": (issue_uuid := uuid4().hex), + "uuid": "1234", "type": "reboot_required", "context": "system", "reference": None, @@ -657,7 +658,7 @@ async def test_supervisor_issues_add_remove( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid=issue_uuid, + uuid="1234", context="system", type_="reboot_required", fixable=False, @@ -671,13 +672,13 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_changed", "data": { - "uuid": issue_uuid, + "uuid": "1234", "type": "reboot_required", "context": "system", "reference": None, "suggestions": [ { - "uuid": uuid4().hex, + "uuid": "1235", "type": "execute_reboot", "context": "system", "reference": None, @@ -697,7 +698,7 @@ async def test_supervisor_issues_add_remove( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid=issue_uuid, + uuid="1234", context="system", type_="reboot_required", fixable=True, @@ -711,7 +712,7 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_removed", "data": { - "uuid": issue_uuid, + "uuid": "1234", "type": "reboot_required", "context": "system", "reference": None, @@ -729,26 +730,40 @@ async def test_supervisor_issues_add_remove( assert msg["result"] == {"issues": []} -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_suggestions_fail( hass: HomeAssistant, - supervisor_client: AsyncMock, - resolution_suggestions_for_issue: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Test failing to get suggestions for issue skips it.""" - mock_resolution_info( - supervisor_client, - issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=uuid4(), - ) - ], + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [ + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + } + ], + "checks": [ + {"enabled": True, "slug": "supervisor_trust"}, + {"enabled": True, "slug": "free_space"}, + ], + }, + }, + ) + aioclient_mock.get( + "http://127.0.0.1/resolution/issue/1234/suggestions", + exc=TimeoutError(), ) - resolution_suggestions_for_issue.side_effect = SupervisorTimeoutError result = await async_setup_component(hass, "hassio", {}) assert result @@ -761,14 +776,14 @@ async def test_supervisor_issues_suggestions_fail( assert len(msg["result"]["issues"]) == 0 -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_remove_missing_issue_without_error( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Test HA skips message to remove issue that it didn't know about (sync issue).""" - mock_resolution_info(supervisor_client) + mock_resolution_info(aioclient_mock) result = await async_setup_component(hass, "hassio", {}) assert result @@ -795,15 +810,19 @@ async def test_supervisor_remove_missing_issue_without_error( await hass.async_block_till_done() -@pytest.mark.usefixtures("all_setup_requests") async def test_system_is_not_ready( hass: HomeAssistant, - resolution_info: AsyncMock, + aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, + all_setup_requests, ) -> None: """Ensure hassio starts despite error.""" - resolution_info.side_effect = SupervisorBadRequestError( - "System is not ready with state: setup" + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "", + "message": "System is not ready with state: setup", + }, ) assert await async_setup_component(hass, "hassio", {}) @@ -813,14 +832,14 @@ async def test_system_is_not_ready( @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_detached_addon_missing( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + all_setup_requests, ) -> None: """Test supervisor issue for detached addon due to missing repository.""" - mock_resolution_info(supervisor_client) + mock_resolution_info(aioclient_mock) result = await async_setup_component(hass, "hassio", {}) assert result @@ -834,7 +853,7 @@ async def test_supervisor_issues_detached_addon_missing( "data": { "event": "issue_changed", "data": { - "uuid": (issue_uuid := uuid4().hex), + "uuid": "1234", "type": "detached_addon_missing", "context": "addon", "reference": "test", @@ -852,7 +871,7 @@ async def test_supervisor_issues_detached_addon_missing( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid=issue_uuid, + uuid="1234", context="addon", type_="detached_addon_missing", fixable=False, diff --git a/tests/components/hassio/test_repairs.py b/tests/components/hassio/test_repairs.py index f8cac4e1a97..8d0bbfac87c 100644 --- a/tests/components/hassio/test_repairs.py +++ b/tests/components/hassio/test_repairs.py @@ -1,19 +1,9 @@ """Test supervisor repairs.""" -from collections.abc import Generator from http import HTTPStatus import os -from unittest.mock import AsyncMock, patch -from uuid import uuid4 +from unittest.mock import patch -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import ( - ContextType, - Issue, - IssueType, - Suggestion, - SuggestionType, -) import pytest from homeassistant.core import HomeAssistant @@ -23,52 +13,48 @@ from homeassistant.setup import async_setup_component from .test_init import MOCK_ENVIRON from .test_issues import mock_resolution_info +from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def fixture_supervisor_environ() -> Generator[None]: +async def fixture_supervisor_environ(): """Mock os environ for supervisor.""" with patch.dict(os.environ, MOCK_ENVIRON): yield -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, + all_setup_requests, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.MULTIPLE_DATA_DISKS, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "multiple_data_disks", + "context": "system", + "reference": "/dev/sda1", + "suggestions": [ + { + "uuid": "1235", + "type": "rename_data_disk", + "context": "system", + "reference": "/dev/sda1", + } + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.RENAME_DATA_DISK, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=(sugg_uuid := uuid4()), - auto=False, - ) - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -108,53 +94,52 @@ async def test_supervisor_issue_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, + all_setup_requests, ) -> None: """Test fix flow for supervisor issue with multiple suggestions.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference="test", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": "test", + "suggestions": [ + { + "uuid": "1235", + "type": "execute_reboot", + "context": "system", + "reference": "test", + }, + { + "uuid": "1236", + "type": "test_type", + "context": "system", + "reference": "test", + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBOOT, - context=ContextType.SYSTEM, - reference="test", - uuid=uuid4(), - auto=False, - ), - Suggestion( - type="test_type", - context=ContextType.SYSTEM, - reference="test", - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -203,53 +188,52 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1236" + ) -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confirmation( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, + all_setup_requests, ) -> None: """Test fix flow for supervisor issue with multiple suggestions and choice requires confirmation.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + "suggestions": [ + { + "uuid": "1235", + "type": "execute_reboot", + "context": "system", + "reference": None, + }, + { + "uuid": "1236", + "type": "test_type", + "context": "system", + "reference": None, + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBOOT, - context=ContextType.SYSTEM, - reference=None, - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - Suggestion( - type="test_type", - context=ContextType.SYSTEM, - reference=None, - uuid=uuid4(), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -317,46 +301,46 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confir "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_skip_confirmation( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, + all_setup_requests, ) -> None: """Test confirmation skipped for fix flow for supervisor issue with one suggestion.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + "suggestions": [ + { + "uuid": "1235", + "type": "execute_reboot", + "context": "system", + "reference": None, + } + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBOOT, - context=ContextType.SYSTEM, - reference=None, - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -396,54 +380,53 @@ async def test_supervisor_issue_repair_flow_skip_confirmation( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) -@pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow_error( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, + all_setup_requests, ) -> None: """Test repair flow fails when repair fails to apply.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.MOUNT_FAILED, - context=ContextType.MOUNT, - reference="backup_share", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "mount_failed", + "context": "mount", + "reference": "backup_share", + "suggestions": [ + { + "uuid": "1235", + "type": "execute_reload", + "context": "mount", + "reference": "backup_share", + }, + { + "uuid": "1236", + "type": "execute_remove", + "context": "mount", + "reference": "backup_share", + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_RELOAD, - context=ContextType.MOUNT, - reference="backup_share", - uuid=uuid4(), - auto=False, - ), - Suggestion( - type=SuggestionType.EXECUTE_REMOVE, - context=ContextType.MOUNT, - reference="backup_share", - uuid=uuid4(), - auto=False, - ), - ] - }, - suggestion_result=SupervisorError("boom"), + suggestion_result=False, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -475,52 +458,46 @@ async def test_mount_failed_repair_flow_error( "description_placeholders": None, } - assert issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + assert issue_registry.async_get_issue(domain="hassio", issue_id="1234") -@pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, + all_setup_requests, ) -> None: """Test repair flow for mount_failed issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.MOUNT_FAILED, - context=ContextType.MOUNT, - reference="backup_share", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "mount_failed", + "context": "mount", + "reference": "backup_share", + "suggestions": [ + { + "uuid": "1235", + "type": "execute_reload", + "context": "mount", + "reference": "backup_share", + }, + { + "uuid": "1236", + "type": "execute_remove", + "context": "mount", + "reference": "backup_share", + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_RELOAD, - context=ContextType.MOUNT, - reference="backup_share", - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - Suggestion( - type=SuggestionType.EXECUTE_REMOVE, - context=ContextType.MOUNT, - reference="backup_share", - uuid=uuid4(), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -573,79 +550,77 @@ async def test_mount_failed_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_docker_config_repair_flow( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, + all_setup_requests, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.DOCKER_CONFIG, - context=ContextType.SYSTEM, - reference=None, - uuid=(issue1_uuid := uuid4()), - ), - Issue( - type=IssueType.DOCKER_CONFIG, - context=ContextType.CORE, - reference=None, - uuid=(issue2_uuid := uuid4()), - ), - Issue( - type=IssueType.DOCKER_CONFIG, - context=ContextType.ADDON, - reference="test", - uuid=(issue3_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "docker_config", + "context": "system", + "reference": None, + "suggestions": [ + { + "uuid": "1235", + "type": "execute_rebuild", + "context": "system", + "reference": None, + } + ], + }, + { + "uuid": "1236", + "type": "docker_config", + "context": "core", + "reference": None, + "suggestions": [ + { + "uuid": "1237", + "type": "execute_rebuild", + "context": "core", + "reference": None, + } + ], + }, + { + "uuid": "1238", + "type": "docker_config", + "context": "addon", + "reference": "test", + "suggestions": [ + { + "uuid": "1239", + "type": "execute_rebuild", + "context": "addon", + "reference": "test", + } + ], + }, ], - suggestions_by_issue={ - issue1_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBUILD, - context=ContextType.SYSTEM, - reference=None, - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - ], - issue2_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBUILD, - context=ContextType.CORE, - reference=None, - uuid=uuid4(), - auto=False, - ), - ], - issue3_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBUILD, - context=ContextType.ADDON, - reference="test", - uuid=uuid4(), - auto=False, - ), - ], - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue1_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -685,53 +660,52 @@ async def test_supervisor_issue_docker_config_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue1_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_multiple_data_disks( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, + all_setup_requests, ) -> None: """Test fix flow for multiple data disks supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.MULTIPLE_DATA_DISKS, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "multiple_data_disks", + "context": "system", + "reference": "/dev/sda1", + "suggestions": [ + { + "uuid": "1235", + "type": "rename_data_disk", + "context": "system", + "reference": "/dev/sda1", + }, + { + "uuid": "1236", + "type": "adopt_data_disk", + "context": "system", + "reference": "/dev/sda1", + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.RENAME_DATA_DISK, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=uuid4(), - auto=False, - ), - Suggestion( - type=SuggestionType.ADOPT_DATA_DISK, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -799,49 +773,49 @@ async def test_supervisor_issue_repair_flow_multiple_data_disks( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1236" + ) @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) -@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_detached_addon_removed( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, + all_setup_requests, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.DETACHED_ADDON_REMOVED, - context=ContextType.ADDON, - reference="test", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "detached_addon_removed", + "context": "addon", + "reference": "test", + "suggestions": [ + { + "uuid": "1235", + "type": "execute_remove", + "context": "addon", + "reference": "test", + } + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REMOVE, - context=ContextType.ADDON, - reference="test", - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -886,107 +860,10 @@ async def test_supervisor_issue_detached_addon_removed( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - -@pytest.mark.parametrize( - "all_setup_requests", [{"include_addons": True}], indirect=True -) -@pytest.mark.usefixtures("all_setup_requests") -async def test_supervisor_issue_addon_boot_fail( - hass: HomeAssistant, - supervisor_client: AsyncMock, - hass_client: ClientSessionGenerator, - issue_registry: ir.IssueRegistry, -) -> None: - """Test fix flow for supervisor issue.""" - mock_resolution_info( - supervisor_client, - issues=[ - Issue( - type="boot_fail", - context=ContextType.ADDON, - reference="test", - uuid=(issue_uuid := uuid4()), - ), - ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type="execute_start", - context=ContextType.ADDON, - reference="test", - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - Suggestion( - type="disable_boot", - context=ContextType.ADDON, - reference="test", - uuid=uuid4(), - auto=False, - ), - ] - }, + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" ) - - assert await async_setup_component(hass, "hassio", {}) - - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) - assert repair_issue - - client = await hass_client() - - resp = await client.post( - "/api/repairs/issues/fix", - json={"handler": "hassio", "issue_id": repair_issue.issue_id}, - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data == { - "type": "menu", - "flow_id": flow_id, - "handler": "hassio", - "step_id": "fix_menu", - "data_schema": [ - { - "type": "select", - "options": [ - ["addon_execute_start", "addon_execute_start"], - ["addon_disable_boot", "addon_disable_boot"], - ], - "name": "next_step_id", - } - ], - "menu_options": ["addon_execute_start", "addon_disable_boot"], - "description_placeholders": { - "reference": "test", - "addon": "test", - }, - } - - resp = await client.post( - f"/api/repairs/issues/fix/{flow_id}", - json={"next_step_id": "addon_execute_start"}, - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data == { - "type": "create_entry", - "flow_id": flow_id, - "handler": "hassio", - "description": None, - "description_placeholders": None, - } - - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) diff --git a/tests/components/hassio/test_sensor.py b/tests/components/hassio/test_sensor.py index 7160a2cbf16..71b867d849d 100644 --- a/tests/components/hassio/test_sensor.py +++ b/tests/components/hassio/test_sensor.py @@ -2,14 +2,17 @@ from datetime import timedelta import os -from unittest.mock import AsyncMock, patch +from unittest.mock import patch -from aiohasupervisor import SupervisorError from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant import config_entries -from homeassistant.components.hassio import DOMAIN, HASSIO_UPDATE_INTERVAL +from homeassistant.components.hassio import ( + DOMAIN, + HASSIO_UPDATE_INTERVAL, + HassioAPIError, +) from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE @@ -18,8 +21,6 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from .common import MOCK_REPOSITORIES, MOCK_STORE_ADDONS - from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker @@ -27,21 +28,44 @@ MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) -def mock_all( - aioclient_mock: AiohttpClientMocker, - addon_installed: AsyncMock, - store_info: AsyncMock, - addon_stats: AsyncMock, - addon_changelog: AsyncMock, - resolution_info: AsyncMock, -) -> None: +def mock_all(aioclient_mock: AiohttpClientMocker) -> None: """Mock all setup requests.""" _install_default_mocks(aioclient_mock) + _install_test_addon_stats_mock(aioclient_mock) + + +def _install_test_addon_stats_mock(aioclient_mock: AiohttpClientMocker): + """Install mock to provide valid stats for the test addon.""" + aioclient_mock.get( + "http://127.0.0.1/addons/test/stats", + json={ + "result": "ok", + "data": { + "cpu_percent": 0.99, + "memory_usage": 182611968, + "memory_limit": 3977146368, + "memory_percent": 4.59, + "network_rx": 362570232, + "network_tx": 82374138, + "blk_read": 46010945536, + "blk_write": 15051526144, + }, + }, + ) + + +def _install_test_addon_stats_failure_mock(aioclient_mock: AiohttpClientMocker): + """Install mocks to raise an exception when fetching stats for the test addon.""" + aioclient_mock.get( + "http://127.0.0.1/addons/test/stats", + exc=HassioAPIError, + ) def _install_default_mocks(aioclient_mock: AiohttpClientMocker): """Install default mocks.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) + aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -54,6 +78,13 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): }, }, ) + aioclient_mock.get( + "http://127.0.0.1/store", + json={ + "result": "ok", + "data": {"addons": [], "repositories": []}, + }, + ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -144,9 +175,33 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): }, }, ) + aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test/info", + json={"result": "ok", "data": {"auto_update": True}}, + ) + aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test2/info", + json={"result": "ok", "data": {"auto_update": False}}, + ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -159,9 +214,6 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): ) -@pytest.mark.parametrize( - ("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)] -) @pytest.mark.parametrize( ("entity_id", "expected"), [ @@ -220,9 +272,6 @@ async def test_sensor( assert state.state == expected -@pytest.mark.parametrize( - ("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)] -) @pytest.mark.parametrize( ("entity_id", "expected"), [ @@ -239,7 +288,6 @@ async def test_stats_addon_sensor( entity_registry: er.EntityRegistry, caplog: pytest.LogCaptureFixture, freezer: FrozenDateTimeFactory, - addon_stats: AsyncMock, ) -> None: """Test stats addons sensor.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -257,7 +305,7 @@ async def test_stats_addon_sensor( aioclient_mock.clear_requests() _install_default_mocks(aioclient_mock) - addon_stats.side_effect = SupervisorError + _install_test_addon_stats_failure_mock(aioclient_mock) freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) @@ -267,7 +315,7 @@ async def test_stats_addon_sensor( aioclient_mock.clear_requests() _install_default_mocks(aioclient_mock) - addon_stats.side_effect = None + _install_test_addon_stats_mock(aioclient_mock) freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) @@ -300,7 +348,7 @@ async def test_stats_addon_sensor( aioclient_mock.clear_requests() _install_default_mocks(aioclient_mock) - addon_stats.side_effect = SupervisorError + _install_test_addon_stats_failure_mock(aioclient_mock) freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) diff --git a/tests/components/hassio/test_update.py b/tests/components/hassio/test_update.py index c1775d6e0b4..9a047010cc3 100644 --- a/tests/components/hassio/test_update.py +++ b/tests/components/hassio/test_update.py @@ -2,13 +2,11 @@ from datetime import timedelta import os -from unittest.mock import AsyncMock, patch +from unittest.mock import patch -from aiohasupervisor import SupervisorBadRequestError, SupervisorError -from aiohasupervisor.models import StoreAddonUpdate import pytest -from homeassistant.components.hassio import DOMAIN +from homeassistant.components.hassio import DOMAIN, HassioAPIError from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -23,16 +21,10 @@ MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) -def mock_all( - aioclient_mock: AiohttpClientMocker, - addon_installed: AsyncMock, - store_info: AsyncMock, - addon_stats: AsyncMock, - addon_changelog: AsyncMock, - resolution_info: AsyncMock, -) -> None: +def mock_all(aioclient_mock: AiohttpClientMocker) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) + aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -45,6 +37,13 @@ def mock_all( }, }, ) + aioclient_mock.get( + "http://127.0.0.1/store", + json={ + "result": "ok", + "data": {"addons": [], "repositories": []}, + }, + ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -115,6 +114,22 @@ def mock_all( }, }, ) + aioclient_mock.get( + "http://127.0.0.1/addons/test/stats", + json={ + "result": "ok", + "data": { + "cpu_percent": 0.99, + "memory_usage": 182611968, + "memory_limit": 3977146368, + "memory_percent": 4.59, + "network_rx": 362570232, + "network_tx": 82374138, + "blk_read": 46010945536, + "blk_write": 15051526144, + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -147,9 +162,33 @@ def mock_all( }, }, ) + aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test/info", + json={"result": "ok", "data": {"auto_update": True}}, + ) + aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") + aioclient_mock.get( + "http://127.0.0.1/addons/test2/info", + json={"result": "ok", "data": {"auto_update": False}}, + ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -178,10 +217,8 @@ async def test_update_entities( expected_state, auto_update, aioclient_mock: AiohttpClientMocker, - addon_installed: AsyncMock, ) -> None: """Test update entities.""" - addon_installed.return_value.auto_update = auto_update config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -202,7 +239,9 @@ async def test_update_entities( assert state.attributes["auto_update"] is auto_update -async def test_update_addon(hass: HomeAssistant, update_addon: AsyncMock) -> None: +async def test_update_addon( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Test updating addon update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -216,16 +255,22 @@ async def test_update_addon(hass: HomeAssistant, update_addon: AsyncMock) -> Non assert result await hass.async_block_till_done() + aioclient_mock.post( + "http://127.0.0.1/addons/test/update", + json={"result": "ok", "data": {}}, + ) + await hass.services.async_call( "update", "install", {"entity_id": "update.test_update"}, blocking=True, ) - update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False)) -async def test_update_os(hass: HomeAssistant, supervisor_client: AsyncMock) -> None: +async def test_update_os( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Test updating OS update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -239,17 +284,22 @@ async def test_update_os(hass: HomeAssistant, supervisor_client: AsyncMock) -> N assert result await hass.async_block_till_done() - supervisor_client.os.update.return_value = None + aioclient_mock.post( + "http://127.0.0.1/os/update", + json={"result": "ok", "data": {}}, + ) + await hass.services.async_call( "update", "install", {"entity_id": "update.home_assistant_operating_system_update"}, blocking=True, ) - supervisor_client.os.update.assert_called_once() -async def test_update_core(hass: HomeAssistant, supervisor_client: AsyncMock) -> None: +async def test_update_core( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Test updating core update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -263,18 +313,21 @@ async def test_update_core(hass: HomeAssistant, supervisor_client: AsyncMock) -> assert result await hass.async_block_till_done() - supervisor_client.homeassistant.update.return_value = None + aioclient_mock.post( + "http://127.0.0.1/core/update", + json={"result": "ok", "data": {}}, + ) + await hass.services.async_call( "update", "install", - {"entity_id": "update.home_assistant_core_update"}, + {"entity_id": "update.home_assistant_os_update"}, blocking=True, ) - supervisor_client.homeassistant.update.assert_called_once() async def test_update_supervisor( - hass: HomeAssistant, supervisor_client: AsyncMock + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test updating supervisor update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -289,19 +342,21 @@ async def test_update_supervisor( assert result await hass.async_block_till_done() - supervisor_client.supervisor.update.return_value = None + aioclient_mock.post( + "http://127.0.0.1/supervisor/update", + json={"result": "ok", "data": {}}, + ) + await hass.services.async_call( "update", "install", {"entity_id": "update.home_assistant_supervisor_update"}, blocking=True, ) - supervisor_client.supervisor.update.assert_called_once() async def test_update_addon_with_error( - hass: HomeAssistant, - update_addon: AsyncMock, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test updating addon update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -315,8 +370,12 @@ async def test_update_addon_with_error( ) await hass.async_block_till_done() - update_addon.side_effect = SupervisorError - with pytest.raises(HomeAssistantError, match=r"^Error updating test:"): + aioclient_mock.post( + "http://127.0.0.1/addons/test/update", + exc=HassioAPIError, + ) + + with pytest.raises(HomeAssistantError): assert not await hass.services.async_call( "update", "install", @@ -326,7 +385,7 @@ async def test_update_addon_with_error( async def test_update_os_with_error( - hass: HomeAssistant, supervisor_client: AsyncMock + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test updating OS update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -340,10 +399,12 @@ async def test_update_os_with_error( ) await hass.async_block_till_done() - supervisor_client.os.update.side_effect = SupervisorError - with pytest.raises( - HomeAssistantError, match=r"^Error updating Home Assistant Operating System:" - ): + aioclient_mock.post( + "http://127.0.0.1/os/update", + exc=HassioAPIError, + ) + + with pytest.raises(HomeAssistantError): assert not await hass.services.async_call( "update", "install", @@ -353,7 +414,7 @@ async def test_update_os_with_error( async def test_update_supervisor_with_error( - hass: HomeAssistant, supervisor_client: AsyncMock + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test updating supervisor update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -367,10 +428,12 @@ async def test_update_supervisor_with_error( ) await hass.async_block_till_done() - supervisor_client.supervisor.update.side_effect = SupervisorError - with pytest.raises( - HomeAssistantError, match=r"^Error updating Home Assistant Supervisor:" - ): + aioclient_mock.post( + "http://127.0.0.1/supervisor/update", + exc=HassioAPIError, + ) + + with pytest.raises(HomeAssistantError): assert not await hass.services.async_call( "update", "install", @@ -380,7 +443,7 @@ async def test_update_supervisor_with_error( async def test_update_core_with_error( - hass: HomeAssistant, supervisor_client: AsyncMock + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test updating core update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -394,10 +457,12 @@ async def test_update_core_with_error( ) await hass.async_block_till_done() - supervisor_client.homeassistant.update.side_effect = SupervisorError - with pytest.raises( - HomeAssistantError, match=r"^Error updating Home Assistant Core:" - ): + aioclient_mock.post( + "http://127.0.0.1/core/update", + exc=HassioAPIError, + ) + + with pytest.raises(HomeAssistantError): assert not await hass.services.async_call( "update", "install", @@ -548,18 +613,23 @@ async def test_no_os_entity(hass: HomeAssistant) -> None: async def test_setting_up_core_update_when_addon_fails( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - addon_installed: AsyncMock, - addon_stats: AsyncMock, - addon_changelog: AsyncMock, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test setting up core update when single addon fails.""" - addon_installed.side_effect = SupervisorBadRequestError("Addon Test does not exist") - addon_stats.side_effect = SupervisorBadRequestError("add-on is not running") - addon_changelog.side_effect = SupervisorBadRequestError("add-on is not running") with ( patch.dict(os.environ, MOCK_ENVIRON), + patch( + "homeassistant.components.hassio.HassIO.get_addon_stats", + side_effect=HassioAPIError("add-on is not running"), + ), + patch( + "homeassistant.components.hassio.HassIO.get_addon_changelog", + side_effect=HassioAPIError("add-on is not running"), + ), + patch( + "homeassistant.components.hassio.HassIO.get_addon_info", + side_effect=HassioAPIError("add-on is not running"), + ), ): result = await async_setup_component( hass, diff --git a/tests/components/hassio/test_websocket_api.py b/tests/components/hassio/test_websocket_api.py index 21e6b03678b..f3be391d9b7 100644 --- a/tests/components/hassio/test_websocket_api.py +++ b/tests/components/hassio/test_websocket_api.py @@ -1,7 +1,5 @@ """Test websocket API.""" -from unittest.mock import AsyncMock - import pytest from homeassistant.components.hassio.const import ( @@ -25,13 +23,10 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) -def mock_all( - aioclient_mock: AiohttpClientMocker, - supervisor_is_connected: AsyncMock, - resolution_info: AsyncMock, -) -> None: +def mock_all(aioclient_mock: AiohttpClientMocker) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) + aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -69,11 +64,23 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) -@pytest.mark.usefixtures("hassio_env") async def test_ws_subscription( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test websocket subscription.""" assert await async_setup_component(hass, "hassio", {}) @@ -109,8 +116,8 @@ async def test_ws_subscription( assert response["success"] -@pytest.mark.usefixtures("hassio_env") async def test_websocket_supervisor_api( + hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, @@ -153,8 +160,8 @@ async def test_websocket_supervisor_api( } -@pytest.mark.usefixtures("hassio_env") async def test_websocket_supervisor_api_error( + hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, @@ -182,8 +189,8 @@ async def test_websocket_supervisor_api_error( assert msg["error"]["message"] == "example error" -@pytest.mark.usefixtures("hassio_env") async def test_websocket_supervisor_api_error_without_msg( + hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, @@ -211,8 +218,8 @@ async def test_websocket_supervisor_api_error_without_msg( assert msg["error"]["message"] == "" -@pytest.mark.usefixtures("hassio_env") async def test_websocket_non_admin_user( + hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, diff --git a/tests/components/hddtemp/test_sensor.py b/tests/components/hddtemp/test_sensor.py index 15740ffa0ea..2bd0519c12c 100644 --- a/tests/components/hddtemp/test_sensor.py +++ b/tests/components/hddtemp/test_sensor.py @@ -60,7 +60,7 @@ REFERENCE = { class TelnetMock: """Mock class for the telnetlib.Telnet object.""" - def __init__(self, host, port, timeout=0) -> None: + def __init__(self, host, port, timeout=0): """Initialize Telnet object.""" self.host = host self.port = port diff --git a/tests/components/hdmi_cec/__init__.py b/tests/components/hdmi_cec/__init__.py index 1d51fa0cc50..5cf8ed18b6a 100644 --- a/tests/components/hdmi_cec/__init__.py +++ b/tests/components/hdmi_cec/__init__.py @@ -8,7 +8,7 @@ from homeassistant.components.hdmi_cec import KeyPressCommand, KeyReleaseCommand class MockHDMIDevice: """Mock of a HDMIDevice.""" - def __init__(self, *, logical_address, **values) -> None: + def __init__(self, *, logical_address, **values): """Mock of a HDMIDevice.""" self.set_update_callback = Mock(side_effect=self._set_update_callback) self.logical_address = logical_address diff --git a/tests/components/hdmi_cec/conftest.py b/tests/components/hdmi_cec/conftest.py index 058525f2448..0756ea639b7 100644 --- a/tests/components/hdmi_cec/conftest.py +++ b/tests/components/hdmi_cec/conftest.py @@ -1,22 +1,16 @@ """Tests for the HDMI-CEC component.""" -from collections.abc import Callable, Coroutine, Generator -from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import patch import pytest from homeassistant.components.hdmi_cec import DOMAIN from homeassistant.const import EVENT_HOMEASSISTANT_START -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -type CecEntityCreator = Callable[..., Coroutine[Any, Any, None]] -type HDMINetworkCreator = Callable[..., Coroutine[Any, Any, MagicMock]] - @pytest.fixture(name="mock_cec_adapter", autouse=True) -def mock_cec_adapter_fixture() -> Generator[MagicMock]: +def mock_cec_adapter_fixture(): """Mock CecAdapter. Always mocked as it imports the `cec` library which is part of `libcec`. @@ -28,7 +22,7 @@ def mock_cec_adapter_fixture() -> Generator[MagicMock]: @pytest.fixture(name="mock_hdmi_network") -def mock_hdmi_network_fixture() -> Generator[MagicMock]: +def mock_hdmi_network_fixture(): """Mock HDMINetwork.""" with patch( "homeassistant.components.hdmi_cec.HDMINetwork", autospec=True @@ -37,9 +31,7 @@ def mock_hdmi_network_fixture() -> Generator[MagicMock]: @pytest.fixture -def create_hdmi_network( - hass: HomeAssistant, mock_hdmi_network: MagicMock -) -> HDMINetworkCreator: +def create_hdmi_network(hass, mock_hdmi_network): """Create an initialized mock hdmi_network.""" async def hdmi_network(config=None): @@ -57,7 +49,7 @@ def create_hdmi_network( @pytest.fixture -def create_cec_entity(hass: HomeAssistant) -> CecEntityCreator: +def create_cec_entity(hass): """Create a CecEntity.""" async def cec_entity(hdmi_network, device): diff --git a/tests/components/hdmi_cec/test_init.py b/tests/components/hdmi_cec/test_init.py index 1b1861b0ef8..1263078c196 100644 --- a/tests/components/hdmi_cec/test_init.py +++ b/tests/components/hdmi_cec/test_init.py @@ -1,9 +1,7 @@ """Tests for the HDMI-CEC component.""" -from collections.abc import Generator from datetime import timedelta -from typing import Any -from unittest.mock import ANY, MagicMock, PropertyMock, call, patch +from unittest.mock import ANY, PropertyMock, call, patch import pytest import voluptuous as vol @@ -30,7 +28,6 @@ from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow from . import assert_key_press_release -from .conftest import HDMINetworkCreator from tests.common import ( MockEntity, @@ -41,7 +38,7 @@ from tests.common import ( @pytest.fixture(name="mock_tcp_adapter") -def mock_tcp_adapter_fixture() -> Generator[MagicMock]: +def mock_tcp_adapter_fixture(): """Mock TcpAdapter.""" with patch( "homeassistant.components.hdmi_cec.TcpAdapter", autospec=True @@ -91,9 +88,7 @@ def mock_tcp_adapter_fixture() -> Generator[MagicMock]: ), ], ) -def test_parse_mapping_physical_address( - mapping: dict[str, Any], expected: list[tuple[str, list[int]]] -) -> None: +def test_parse_mapping_physical_address(mapping, expected) -> None: """Test the device config mapping function.""" result = parse_mapping(mapping) result = [ @@ -106,7 +101,7 @@ def test_parse_mapping_physical_address( async def test_setup_cec_adapter( - hass: HomeAssistant, mock_cec_adapter: MagicMock, mock_hdmi_network: MagicMock + hass: HomeAssistant, mock_cec_adapter, mock_hdmi_network ) -> None: """Test the general setup of this component.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) @@ -130,7 +125,7 @@ async def test_setup_cec_adapter( @pytest.mark.parametrize("osd_name", ["test", "test_a_long_name"]) async def test_setup_set_osd_name( - hass: HomeAssistant, osd_name: str, mock_cec_adapter: MagicMock + hass: HomeAssistant, osd_name, mock_cec_adapter ) -> None: """Test the setup of this component with the `osd_name` config setting.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {"osd_name": osd_name}}) @@ -139,7 +134,7 @@ async def test_setup_set_osd_name( async def test_setup_tcp_adapter( - hass: HomeAssistant, mock_tcp_adapter: MagicMock, mock_hdmi_network: MagicMock + hass: HomeAssistant, mock_tcp_adapter, mock_hdmi_network ) -> None: """Test the setup of this component with the TcpAdapter (`host` config setting).""" host = "0.0.0.0" @@ -166,9 +161,7 @@ async def test_setup_tcp_adapter( # Test services -async def test_service_power_on( - hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator -) -> None: +async def test_service_power_on(hass: HomeAssistant, create_hdmi_network) -> None: """Test the power on service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -182,9 +175,7 @@ async def test_service_power_on( mock_hdmi_network_instance.power_on.assert_called_once_with() -async def test_service_standby( - hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator -) -> None: +async def test_service_standby(hass: HomeAssistant, create_hdmi_network) -> None: """Test the standby service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -199,7 +190,7 @@ async def test_service_standby( async def test_service_select_device_alias( - hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator + hass: HomeAssistant, create_hdmi_network ) -> None: """Test the select device service call with a known alias.""" mock_hdmi_network_instance = await create_hdmi_network( @@ -229,7 +220,7 @@ class MockCecEntity(MockEntity): async def test_service_select_device_entity( - hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator + hass: HomeAssistant, create_hdmi_network ) -> None: """Test the select device service call with an existing entity.""" platform = MockEntityPlatform(hass) @@ -253,7 +244,7 @@ async def test_service_select_device_entity( async def test_service_select_device_physical_address( - hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator + hass: HomeAssistant, create_hdmi_network ) -> None: """Test the select device service call with a raw physical address.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -271,9 +262,7 @@ async def test_service_select_device_physical_address( assert str(physical_address) == "1.1.0.0" -async def test_service_update_devices( - hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator -) -> None: +async def test_service_update_devices(hass: HomeAssistant, create_hdmi_network) -> None: """Test the update devices service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -306,11 +295,11 @@ async def test_service_update_devices( @pytest.mark.parametrize(("direction", "key"), [("up", 65), ("down", 66)]) async def test_service_volume_x_times( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, + create_hdmi_network, count: int, call_count: int, - direction: str, - key: int, + direction, + key, ) -> None: """Test the volume service call with steps.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -331,10 +320,7 @@ async def test_service_volume_x_times( @pytest.mark.parametrize(("direction", "key"), [("up", 65), ("down", 66)]) async def test_service_volume_press( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - direction: str, - key: int, + hass: HomeAssistant, create_hdmi_network, direction, key ) -> None: """Test the volume service call with press attribute.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -355,10 +341,7 @@ async def test_service_volume_press( @pytest.mark.parametrize(("direction", "key"), [("up", 65), ("down", 66)]) async def test_service_volume_release( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - direction: str, - key: int, + hass: HomeAssistant, create_hdmi_network, direction, key ) -> None: """Test the volume service call with release attribute.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -393,7 +376,7 @@ async def test_service_volume_release( ], ) async def test_service_volume_mute( - hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator, attr: str, key: int + hass: HomeAssistant, create_hdmi_network, attr, key ) -> None: """Test the volume service call with mute.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -464,10 +447,7 @@ async def test_service_volume_mute( ], ) async def test_service_send_command( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - data: dict[str, Any], - expected: str, + hass: HomeAssistant, create_hdmi_network, data, expected ) -> None: """Test the send command service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -490,10 +470,10 @@ async def test_service_send_command( ) async def test_watchdog( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - mock_cec_adapter: MagicMock, - adapter_initialized_value: bool, - watchdog_actions: int, + create_hdmi_network, + mock_cec_adapter, + adapter_initialized_value, + watchdog_actions, ) -> None: """Test the watchdog when adapter is down/up.""" adapter_initialized = PropertyMock(return_value=adapter_initialized_value) diff --git a/tests/components/hdmi_cec/test_media_player.py b/tests/components/hdmi_cec/test_media_player.py index f193651c305..988279a235f 100644 --- a/tests/components/hdmi_cec/test_media_player.py +++ b/tests/components/hdmi_cec/test_media_player.py @@ -1,7 +1,6 @@ """Tests for the HDMI-CEC media player platform.""" from collections.abc import Callable -from typing import Any from pycec.const import ( DEVICE_TYPE_NAMES, @@ -56,7 +55,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from . import MockHDMIDevice, assert_key_press_release -from .conftest import CecEntityCreator, HDMINetworkCreator type AssertState = Callable[[str, str], None] @@ -93,9 +91,7 @@ def assert_state_fixture(request: pytest.FixtureRequest) -> AssertState: async def test_load_platform( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + hass: HomeAssistant, create_hdmi_network, create_cec_entity ) -> None: """Test that media_player entity is loaded.""" hdmi_network = await create_hdmi_network(config={"platform": "media_player"}) @@ -111,10 +107,7 @@ async def test_load_platform( @pytest.mark.parametrize("platform", [{}, {"platform": "switch"}]) async def test_load_types( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, - platform: dict[str, Any], + hass: HomeAssistant, create_hdmi_network, create_cec_entity, platform ) -> None: """Test that media_player entity is loaded when types is set.""" config = platform | {"types": {"hdmi_cec.hdmi_4": "media_player"}} @@ -140,8 +133,8 @@ async def test_load_types( async def test_service_on( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + create_hdmi_network, + create_cec_entity, assert_state: AssertState, ) -> None: """Test that media_player triggers on `on` service.""" @@ -167,8 +160,8 @@ async def test_service_on( async def test_service_off( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + create_hdmi_network, + create_cec_entity, assert_state: AssertState, ) -> None: """Test that media_player triggers on `off` service.""" @@ -267,10 +260,10 @@ async def test_service_off( ) async def test_supported_features( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, - type_id: int, - expected_features: MPEF, + create_hdmi_network, + create_cec_entity, + type_id, + expected_features, ) -> None: """Test that features load as expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -296,11 +289,11 @@ async def test_supported_features( ) async def test_volume_services( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, - service: str, - extra_data: dict[str, Any] | None, - key: int, + create_hdmi_network, + create_cec_entity, + service, + extra_data, + key, ) -> None: """Test volume related commands.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -331,11 +324,7 @@ async def test_volume_services( ], ) async def test_track_change_services( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, - service: str, - key: int, + hass: HomeAssistant, create_hdmi_network, create_cec_entity, service, key ) -> None: """Test track change related commands.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -371,8 +360,8 @@ async def test_track_change_services( ) async def test_playback_services( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + create_hdmi_network, + create_cec_entity, assert_state: AssertState, service: str, key: int, @@ -401,8 +390,8 @@ async def test_playback_services( @pytest.mark.xfail(reason="PLAY feature isn't enabled") async def test_play_pause_service( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + create_hdmi_network, + create_cec_entity, assert_state: AssertState, ) -> None: """Test play pause service.""" @@ -463,11 +452,11 @@ async def test_play_pause_service( ) async def test_update_state( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, - type_id: int, - update_data: dict[str, Any], - expected_state: str, + create_hdmi_network, + create_cec_entity, + type_id, + update_data, + expected_state, ) -> None: """Test state updates work as expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -513,11 +502,7 @@ async def test_update_state( ], ) async def test_starting_state( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, - data: dict[str, Any], - expected_state: str, + hass: HomeAssistant, create_hdmi_network, create_cec_entity, data, expected_state ) -> None: """Test starting states are set as expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -531,9 +516,7 @@ async def test_starting_state( reason="The code only sets the state to unavailable, doesn't set the `_attr_available` to false." ) async def test_unavailable_status( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + hass: HomeAssistant, create_hdmi_network, create_cec_entity ) -> None: """Test entity goes into unavailable status when expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) diff --git a/tests/components/hdmi_cec/test_switch.py b/tests/components/hdmi_cec/test_switch.py index 6ef6ce835ce..d54d6cc103b 100644 --- a/tests/components/hdmi_cec/test_switch.py +++ b/tests/components/hdmi_cec/test_switch.py @@ -17,15 +17,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from . import MockHDMIDevice -from .conftest import CecEntityCreator, HDMINetworkCreator @pytest.mark.parametrize("config", [{}, {"platform": "switch"}]) async def test_load_platform( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, - config, + hass: HomeAssistant, create_hdmi_network, create_cec_entity, config ) -> None: """Test that switch entity is loaded.""" hdmi_network = await create_hdmi_network(config=config) @@ -40,9 +36,7 @@ async def test_load_platform( async def test_load_types( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + hass: HomeAssistant, create_hdmi_network, create_cec_entity ) -> None: """Test that switch entity is loaded when types is set.""" config = {"platform": "media_player", "types": {"hdmi_cec.hdmi_3": "switch"}} @@ -67,9 +61,7 @@ async def test_load_types( async def test_service_on( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + hass: HomeAssistant, create_hdmi_network, create_cec_entity ) -> None: """Test that switch triggers on `on` service.""" hdmi_network = await create_hdmi_network() @@ -89,9 +81,7 @@ async def test_service_on( async def test_service_off( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + hass: HomeAssistant, create_hdmi_network, create_cec_entity ) -> None: """Test that switch triggers on `off` service.""" hdmi_network = await create_hdmi_network() @@ -128,8 +118,8 @@ async def test_service_off( ) async def test_device_status_change( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + create_hdmi_network, + create_cec_entity, power_status, expected_state, status, @@ -164,11 +154,7 @@ async def test_device_status_change( ], ) async def test_friendly_name( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, - device_values, - expected, + hass: HomeAssistant, create_hdmi_network, create_cec_entity, device_values, expected ) -> None: """Test friendly name setup.""" hdmi_network = await create_hdmi_network() @@ -221,8 +207,8 @@ async def test_friendly_name( ) async def test_extra_state_attributes( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + create_hdmi_network, + create_cec_entity, device_values, expected_attributes, ) -> None: @@ -253,8 +239,8 @@ async def test_extra_state_attributes( ) async def test_icon( hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + create_hdmi_network, + create_cec_entity, device_type, expected_icon, ) -> None: @@ -268,9 +254,7 @@ async def test_icon( async def test_unavailable_status( - hass: HomeAssistant, - create_hdmi_network: HDMINetworkCreator, - create_cec_entity: CecEntityCreator, + hass: HomeAssistant, create_hdmi_network, create_cec_entity ) -> None: """Test entity goes into unavailable status when expected.""" hdmi_network = await create_hdmi_network() diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index 089fa1cceea..19f7ec74daf 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -1,7 +1,6 @@ """Tests for the Heos Media Player platform.""" import asyncio -from typing import Any from pyheos import CommandFailedError, const from pyheos.error import HeosError @@ -59,12 +58,8 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry - -async def setup_platform( - hass: HomeAssistant, config_entry: MockConfigEntry, config: dict[str, Any] -) -> None: +async def setup_platform(hass, config_entry, config): """Set up the media player platform for testing.""" config_entry.add_to_hass(hass) assert await async_setup_component(hass, DOMAIN, config) diff --git a/tests/components/heos/test_services.py b/tests/components/heos/test_services.py index d8b8b5038b0..2d812eb83ab 100644 --- a/tests/components/heos/test_services.py +++ b/tests/components/heos/test_services.py @@ -13,10 +13,8 @@ from homeassistant.components.heos.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry - -async def setup_component(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: +async def setup_component(hass, config_entry): """Set up the component for testing.""" config_entry.add_to_hass(hass) assert await async_setup_component(hass, DOMAIN, {}) diff --git a/tests/components/here_travel_time/test_config_flow.py b/tests/components/here_travel_time/test_config_flow.py index ce210813fb2..eb958991c71 100644 --- a/tests/components/here_travel_time/test_config_flow.py +++ b/tests/components/here_travel_time/test_config_flow.py @@ -6,20 +6,17 @@ from here_routing import HERERoutingError, HERERoutingUnauthorizedError import pytest from homeassistant import config_entries -from homeassistant.components.here_travel_time.config_flow import DEFAULT_OPTIONS from homeassistant.components.here_travel_time.const import ( CONF_ARRIVAL_TIME, CONF_DEPARTURE_TIME, CONF_DESTINATION_ENTITY_ID, CONF_DESTINATION_LATITUDE, CONF_DESTINATION_LONGITUDE, - CONF_ORIGIN_ENTITY_ID, CONF_ORIGIN_LATITUDE, CONF_ORIGIN_LONGITUDE, CONF_ROUTE_MODE, DOMAIN, ROUTE_MODE_FASTEST, - TRAVEL_MODE_BICYCLE, TRAVEL_MODE_CAR, TRAVEL_MODE_PUBLIC, ) @@ -50,9 +47,7 @@ def bypass_setup_fixture(): @pytest.fixture(name="user_step_result") -async def user_step_result_fixture( - hass: HomeAssistant, -) -> config_entries.ConfigFlowResult: +async def user_step_result_fixture(hass: HomeAssistant) -> FlowResultType: """Provide the result of a completed user step.""" init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -70,9 +65,7 @@ async def user_step_result_fixture( @pytest.fixture(name="option_init_result") -async def option_init_result_fixture( - hass: HomeAssistant, -) -> config_entries.ConfigFlowResult: +async def option_init_result_fixture(hass: HomeAssistant) -> FlowResultType: """Provide the result of a completed options init step.""" entry = MockConfigEntry( domain=DOMAIN, @@ -101,8 +94,8 @@ async def option_init_result_fixture( @pytest.fixture(name="origin_step_result") async def origin_step_result_fixture( - hass: HomeAssistant, user_step_result: config_entries.ConfigFlowResult -) -> config_entries.ConfigFlowResult: + hass: HomeAssistant, user_step_result: FlowResultType +) -> FlowResultType: """Provide the result of a completed origin by coordinates step.""" origin_menu_result = await hass.config_entries.flow.async_configure( user_step_result["flow_id"], {"next_step_id": "origin_coordinates"} @@ -149,7 +142,7 @@ async def test_step_user(hass: HomeAssistant, menu_options) -> None: @pytest.mark.usefixtures("valid_response") async def test_step_origin_coordinates( - hass: HomeAssistant, user_step_result: config_entries.ConfigFlowResult + hass: HomeAssistant, user_step_result: FlowResultType ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -172,7 +165,7 @@ async def test_step_origin_coordinates( @pytest.mark.usefixtures("valid_response") async def test_step_origin_entity( - hass: HomeAssistant, user_step_result: config_entries.ConfigFlowResult + hass: HomeAssistant, user_step_result: FlowResultType ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -189,7 +182,7 @@ async def test_step_origin_entity( @pytest.mark.usefixtures("valid_response") async def test_step_destination_coordinates( - hass: HomeAssistant, origin_step_result: config_entries.ConfigFlowResult + hass: HomeAssistant, origin_step_result: FlowResultType ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -223,7 +216,7 @@ async def test_step_destination_coordinates( @pytest.mark.usefixtures("valid_response") async def test_step_destination_entity( hass: HomeAssistant, - origin_step_result: config_entries.ConfigFlowResult, + origin_step_result: FlowResultType, ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -252,99 +245,6 @@ async def test_step_destination_entity( } -@pytest.mark.usefixtures("valid_response") -async def test_reconfigure_destination_entity(hass: HomeAssistant) -> None: - """Test reconfigure flow when choosing a destination entity.""" - origin_entity_selector_result = await do_common_reconfiguration_steps(hass) - menu_result = await hass.config_entries.flow.async_configure( - origin_entity_selector_result["flow_id"], {"next_step_id": "destination_entity"} - ) - assert menu_result["type"] is FlowResultType.FORM - - destination_entity_selector_result = await hass.config_entries.flow.async_configure( - menu_result["flow_id"], - {"destination_entity_id": "zone.home"}, - ) - assert destination_entity_selector_result["type"] is FlowResultType.ABORT - assert destination_entity_selector_result["reason"] == "reconfigure_successful" - entry = hass.config_entries.async_entries(DOMAIN)[0] - assert entry.data == { - CONF_NAME: "test", - CONF_API_KEY: API_KEY, - CONF_ORIGIN_ENTITY_ID: "zone.home", - CONF_DESTINATION_ENTITY_ID: "zone.home", - CONF_MODE: TRAVEL_MODE_BICYCLE, - } - - -@pytest.mark.usefixtures("valid_response") -async def test_reconfigure_destination_coordinates(hass: HomeAssistant) -> None: - """Test reconfigure flow when choosing destination coordinates.""" - origin_entity_selector_result = await do_common_reconfiguration_steps(hass) - menu_result = await hass.config_entries.flow.async_configure( - origin_entity_selector_result["flow_id"], - {"next_step_id": "destination_coordinates"}, - ) - assert menu_result["type"] is FlowResultType.FORM - - destination_entity_selector_result = await hass.config_entries.flow.async_configure( - menu_result["flow_id"], - { - "destination": { - "latitude": 43.0, - "longitude": -80.3, - "radius": 5.0, - } - }, - ) - assert destination_entity_selector_result["type"] is FlowResultType.ABORT - assert destination_entity_selector_result["reason"] == "reconfigure_successful" - entry = hass.config_entries.async_entries(DOMAIN)[0] - assert entry.data == { - CONF_NAME: "test", - CONF_API_KEY: API_KEY, - CONF_ORIGIN_ENTITY_ID: "zone.home", - CONF_DESTINATION_LATITUDE: 43.0, - CONF_DESTINATION_LONGITUDE: -80.3, - CONF_MODE: TRAVEL_MODE_BICYCLE, - } - - -async def do_common_reconfiguration_steps(hass: HomeAssistant) -> None: - """Walk through common flow steps for reconfiguring.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="0123456789", - data=DEFAULT_CONFIG, - options=DEFAULT_OPTIONS, - ) - entry.add_to_hass(hass) - - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - reconfigure_result = await entry.start_reconfigure_flow(hass) - assert reconfigure_result["type"] is FlowResultType.FORM - assert reconfigure_result["step_id"] == "user" - - user_step_result = await hass.config_entries.flow.async_configure( - reconfigure_result["flow_id"], - { - CONF_API_KEY: API_KEY, - CONF_MODE: TRAVEL_MODE_BICYCLE, - CONF_NAME: "test", - }, - ) - await hass.async_block_till_done() - menu_result = await hass.config_entries.flow.async_configure( - user_step_result["flow_id"], {"next_step_id": "origin_entity"} - ) - return await hass.config_entries.flow.async_configure( - menu_result["flow_id"], - {"origin_entity_id": "zone.home"}, - ) - - async def test_form_invalid_auth(hass: HomeAssistant) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( @@ -422,7 +322,7 @@ async def test_options_flow(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("valid_response") async def test_options_flow_arrival_time_step( - hass: HomeAssistant, option_init_result: config_entries.ConfigFlowResult + hass: HomeAssistant, option_init_result: FlowResultType ) -> None: """Test the options flow arrival time type.""" menu_result = await hass.config_entries.options.async_configure( @@ -446,7 +346,7 @@ async def test_options_flow_arrival_time_step( @pytest.mark.usefixtures("valid_response") async def test_options_flow_departure_time_step( - hass: HomeAssistant, option_init_result: config_entries.ConfigFlowResult + hass: HomeAssistant, option_init_result: FlowResultType ) -> None: """Test the options flow departure time type.""" menu_result = await hass.config_entries.options.async_configure( @@ -470,7 +370,7 @@ async def test_options_flow_departure_time_step( @pytest.mark.usefixtures("valid_response") async def test_options_flow_no_time_step( - hass: HomeAssistant, option_init_result: config_entries.ConfigFlowResult + hass: HomeAssistant, option_init_result: FlowResultType ) -> None: """Test the options flow arrival time type.""" menu_result = await hass.config_entries.options.async_configure( diff --git a/tests/components/history/conftest.py b/tests/components/history/conftest.py index dd10fccccdc..075909dfd63 100644 --- a/tests/components/history/conftest.py +++ b/tests/components/history/conftest.py @@ -13,7 +13,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/history/test_init.py b/tests/components/history/test_init.py index 3b4b02a877e..7806b7c9ef4 100644 --- a/tests/components/history/test_init.py +++ b/tests/components/history/test_init.py @@ -1,6 +1,6 @@ """The tests the History component.""" -from datetime import datetime, timedelta +from datetime import timedelta from http import HTTPStatus import json from unittest.mock import sentinel @@ -13,7 +13,7 @@ from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.recorder.models import process_timestamp from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE -from homeassistant.core import HomeAssistant, State +from homeassistant.core import HomeAssistant from homeassistant.helpers.json import JSONEncoder from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -291,9 +291,13 @@ async def test_get_significant_states_only(hass: HomeAssistant, hass_history) -> ) -async def async_record_states( - hass: HomeAssistant, -) -> tuple[datetime, datetime, dict[str, list[State | None]]]: +async def check_significant_states(hass, zero, four, states, config): + """Check if significant states are retrieved.""" + hist = get_significant_states(hass, zero, four) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def async_record_states(hass): """Record some test states. We inject a bunch of state updates from media player, zone and diff --git a/tests/components/history/test_init_db_schema_30.py b/tests/components/history/test_init_db_schema_30.py new file mode 100644 index 00000000000..bec074362ca --- /dev/null +++ b/tests/components/history/test_init_db_schema_30.py @@ -0,0 +1,1011 @@ +"""The tests the History component.""" + +from __future__ import annotations + +from datetime import timedelta +from http import HTTPStatus +import json +from unittest.mock import patch, sentinel + +from freezegun import freeze_time +import pytest + +from homeassistant.components import recorder +from homeassistant.components.recorder import Recorder +from homeassistant.components.recorder.history import get_significant_states +from homeassistant.components.recorder.models import process_timestamp +from homeassistant.core import HomeAssistant +from homeassistant.helpers.json import JSONEncoder +from homeassistant.setup import async_setup_component +import homeassistant.util.dt as dt_util + +from tests.components.recorder.common import ( + assert_dict_of_states_equal_without_context_and_last_changed, + assert_multiple_states_equal_without_context, + assert_multiple_states_equal_without_context_and_last_changed, + assert_states_equal_without_context, + async_recorder_block_till_done, + async_wait_recording_done, + old_db_schema, +) +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +@pytest.fixture(autouse=True) +def db_schema_30(): + """Fixture to initialize the db with the old schema 30.""" + with old_db_schema("30"): + yield + + +@pytest.fixture +def legacy_hass_history(hass: HomeAssistant, hass_history): + """Home Assistant fixture to use legacy history recording.""" + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + yield + + +@pytest.mark.usefixtures("legacy_hass_history") +async def test_setup() -> None: + """Test setup method of history.""" + # Verification occurs in the fixture + + +async def test_get_significant_states(hass: HomeAssistant, legacy_hass_history) -> None: + """Test that only significant states are returned. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + zero, four, states = await async_record_states(hass) + hist = get_significant_states(hass, zero, four, entity_ids=list(states)) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def test_get_significant_states_minimal_response( + hass: HomeAssistant, legacy_hass_history +) -> None: + """Test that only significant states are returned. + + When minimal responses is set only the first and + last states return a complete state. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + zero, four, states = await async_record_states(hass) + hist = get_significant_states( + hass, zero, four, minimal_response=True, entity_ids=list(states) + ) + entites_with_reducable_states = [ + "media_player.test", + "media_player.test3", + ] + + # All states for media_player.test state are reduced + # down to last_changed and state when minimal_response + # is set except for the first state. + # is set. We use JSONEncoder to make sure that are + # pre-encoded last_changed is always the same as what + # will happen with encoding a native state + for entity_id in entites_with_reducable_states: + entity_states = states[entity_id] + for state_idx in range(1, len(entity_states)): + input_state = entity_states[state_idx] + orig_last_changed = json.dumps( + process_timestamp(input_state.last_changed), + cls=JSONEncoder, + ).replace('"', "") + orig_state = input_state.state + entity_states[state_idx] = { + "last_changed": orig_last_changed, + "state": orig_state, + } + + assert len(hist) == len(states) + assert_states_equal_without_context( + states["media_player.test"][0], hist["media_player.test"][0] + ) + assert states["media_player.test"][1] == hist["media_player.test"][1] + assert states["media_player.test"][2] == hist["media_player.test"][2] + + assert_multiple_states_equal_without_context( + states["media_player.test2"], hist["media_player.test2"] + ) + assert_states_equal_without_context( + states["media_player.test3"][0], hist["media_player.test3"][0] + ) + assert states["media_player.test3"][1] == hist["media_player.test3"][1] + + assert_multiple_states_equal_without_context( + states["script.can_cancel_this_one"], hist["script.can_cancel_this_one"] + ) + assert_multiple_states_equal_without_context_and_last_changed( + states["thermostat.test"], hist["thermostat.test"] + ) + assert_multiple_states_equal_without_context_and_last_changed( + states["thermostat.test2"], hist["thermostat.test2"] + ) + + +async def test_get_significant_states_with_initial( + hass: HomeAssistant, legacy_hass_history +) -> None: + """Test that only significant states are returned. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + zero, four, states = await async_record_states(hass) + one = zero + timedelta(seconds=1) + one_with_microsecond = zero + timedelta(seconds=1, microseconds=1) + one_and_half = zero + timedelta(seconds=1.5) + for entity_id in states: + if entity_id == "media_player.test": + states[entity_id] = states[entity_id][1:] + for state in states[entity_id]: + if state.last_changed in (one, one_with_microsecond): + state.last_changed = one_and_half + state.last_updated = one_and_half + + hist = get_significant_states( + hass, + one_and_half, + four, + include_start_time_state=True, + entity_ids=list(states), + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def test_get_significant_states_without_initial( + hass: HomeAssistant, legacy_hass_history +) -> None: + """Test that only significant states are returned. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + zero, four, states = await async_record_states(hass) + one = zero + timedelta(seconds=1) + one_with_microsecond = zero + timedelta(seconds=1, microseconds=1) + one_and_half = zero + timedelta(seconds=1.5) + for entity_id in states: + states[entity_id] = list( + filter( + lambda s: s.last_changed not in (one, one_with_microsecond), + states[entity_id], + ) + ) + del states["media_player.test2"] + + hist = get_significant_states( + hass, + one_and_half, + four, + include_start_time_state=False, + entity_ids=list(states), + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def test_get_significant_states_entity_id( + hass: HomeAssistant, hass_history +) -> None: + """Test that only significant states are returned for one entity.""" + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + zero, four, states = await async_record_states(hass) + del states["media_player.test2"] + del states["media_player.test3"] + del states["thermostat.test"] + del states["thermostat.test2"] + del states["script.can_cancel_this_one"] + + hist = get_significant_states(hass, zero, four, ["media_player.test"]) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def test_get_significant_states_multiple_entity_ids( + hass: HomeAssistant, legacy_hass_history +) -> None: + """Test that only significant states are returned for one entity.""" + zero, four, states = await async_record_states(hass) + del states["media_player.test2"] + del states["media_player.test3"] + del states["thermostat.test2"] + del states["script.can_cancel_this_one"] + + hist = get_significant_states( + hass, + zero, + four, + ["media_player.test", "thermostat.test"], + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def test_get_significant_states_are_ordered( + hass: HomeAssistant, legacy_hass_history +) -> None: + """Test order of results from get_significant_states. + + When entity ids are given, the results should be returned with the data + in the same order. + """ + zero, four, _states = await async_record_states(hass) + entity_ids = ["media_player.test", "media_player.test2"] + hist = get_significant_states(hass, zero, four, entity_ids) + assert list(hist.keys()) == entity_ids + entity_ids = ["media_player.test2", "media_player.test"] + hist = get_significant_states(hass, zero, four, entity_ids) + assert list(hist.keys()) == entity_ids + + +async def test_get_significant_states_only( + hass: HomeAssistant, legacy_hass_history +) -> None: + """Test significant states when significant_states_only is set.""" + entity_id = "sensor.test" + + async def set_state(state, **kwargs): + """Set the state.""" + hass.states.async_set(entity_id, state, **kwargs) + await async_wait_recording_done(hass) + return hass.states.get(entity_id) + + start = dt_util.utcnow() - timedelta(minutes=4) + points = [start + timedelta(minutes=i) for i in range(1, 4)] + + states = [] + with freeze_time(start) as freezer: + await set_state("123", attributes={"attribute": 10.64}) + + freezer.move_to(points[0]) + # Attributes are different, state not + states.append(await set_state("123", attributes={"attribute": 21.42})) + + freezer.move_to(points[1]) + # state is different, attributes not + states.append(await set_state("32", attributes={"attribute": 21.42})) + + freezer.move_to(points[2]) + # everything is different + states.append(await set_state("412", attributes={"attribute": 54.23})) + + hist = get_significant_states( + hass, + start, + significant_changes_only=True, + entity_ids=list({state.entity_id for state in states}), + ) + + assert len(hist[entity_id]) == 2 + assert not any( + state.last_updated == states[0].last_updated for state in hist[entity_id] + ) + assert any( + state.last_updated == states[1].last_updated for state in hist[entity_id] + ) + assert any( + state.last_updated == states[2].last_updated for state in hist[entity_id] + ) + + hist = get_significant_states( + hass, + start, + significant_changes_only=False, + entity_ids=list({state.entity_id for state in states}), + ) + + assert len(hist[entity_id]) == 3 + assert_multiple_states_equal_without_context_and_last_changed( + states, hist[entity_id] + ) + + +def check_significant_states(hass, zero, four, states, config): + """Check if significant states are retrieved.""" + hist = get_significant_states(hass, zero, four) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def async_record_states(hass): + """Record some test states. + + We inject a bunch of state updates from media player, zone and + thermostat. + """ + mp = "media_player.test" + mp2 = "media_player.test2" + mp3 = "media_player.test3" + therm = "thermostat.test" + therm2 = "thermostat.test2" + zone = "zone.home" + script_c = "script.can_cancel_this_one" + + async def async_set_state(entity_id, state, **kwargs): + """Set the state.""" + hass.states.async_set(entity_id, state, **kwargs) + await async_wait_recording_done(hass) + return hass.states.get(entity_id) + + zero = dt_util.utcnow() + one = zero + timedelta(seconds=1) + two = one + timedelta(seconds=1) + three = two + timedelta(seconds=1) + four = three + timedelta(seconds=1) + + states = {therm: [], therm2: [], mp: [], mp2: [], mp3: [], script_c: []} + with freeze_time(one) as freezer: + states[mp].append( + await async_set_state( + mp, "idle", attributes={"media_title": str(sentinel.mt1)} + ) + ) + states[mp2].append( + await async_set_state( + mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)} + ) + ) + states[mp3].append( + await async_set_state( + mp3, "idle", attributes={"media_title": str(sentinel.mt1)} + ) + ) + states[therm].append( + await async_set_state(therm, 20, attributes={"current_temperature": 19.5}) + ) + + freezer.move_to(one + timedelta(microseconds=1)) + states[mp].append( + await async_set_state( + mp, "YouTube", attributes={"media_title": str(sentinel.mt2)} + ) + ) + + freezer.move_to(two) + # This state will be skipped only different in time + await async_set_state( + mp, "YouTube", attributes={"media_title": str(sentinel.mt3)} + ) + # This state will be skipped because domain is excluded + await async_set_state(zone, "zoning") + states[script_c].append( + await async_set_state(script_c, "off", attributes={"can_cancel": True}) + ) + states[therm].append( + await async_set_state(therm, 21, attributes={"current_temperature": 19.8}) + ) + states[therm2].append( + await async_set_state(therm2, 20, attributes={"current_temperature": 19}) + ) + + freezer.move_to(three) + states[mp].append( + await async_set_state( + mp, "Netflix", attributes={"media_title": str(sentinel.mt4)} + ) + ) + states[mp3].append( + await async_set_state( + mp3, "Netflix", attributes={"media_title": str(sentinel.mt3)} + ) + ) + # Attributes changed even though state is the same + states[therm].append( + await async_set_state(therm, 21, attributes={"current_temperature": 20}) + ) + + return zero, four, states + + +async def test_fetch_period_api( + hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator +) -> None: + """Test the fetch period view for history.""" + await async_setup_component(hass, "history", {}) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + client = await hass_client() + response = await client.get( + f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=sensor.power" + ) + assert response.status == HTTPStatus.OK + + +async def test_fetch_period_api_with_minimal_response( + hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator +) -> None: + """Test the fetch period view for history with minimal_response.""" + now = dt_util.utcnow() + await async_setup_component(hass, "history", {}) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + hass.states.async_set("sensor.power", 0, {"attr": "any"}) + await async_wait_recording_done(hass) + hass.states.async_set("sensor.power", 50, {"attr": "any"}) + await async_wait_recording_done(hass) + hass.states.async_set("sensor.power", 23, {"attr": "any"}) + last_changed = hass.states.get("sensor.power").last_changed + await async_wait_recording_done(hass) + hass.states.async_set("sensor.power", 23, {"attr": "any"}) + await async_wait_recording_done(hass) + client = await hass_client() + response = await client.get( + f"/api/history/period/{now.isoformat()}?filter_entity_id=sensor.power&minimal_response&no_attributes" + ) + assert response.status == HTTPStatus.OK + response_json = await response.json() + assert len(response_json[0]) == 3 + state_list = response_json[0] + + assert state_list[0]["entity_id"] == "sensor.power" + assert state_list[0]["attributes"] == {} + assert state_list[0]["state"] == "0" + + assert "attributes" not in state_list[1] + assert "entity_id" not in state_list[1] + assert state_list[1]["state"] == "50" + + assert "attributes" not in state_list[2] + assert "entity_id" not in state_list[2] + assert state_list[2]["state"] == "23" + assert state_list[2]["last_changed"] == json.dumps( + process_timestamp(last_changed), + cls=JSONEncoder, + ).replace('"', "") + + +async def test_fetch_period_api_with_no_timestamp( + hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator +) -> None: + """Test the fetch period view for history with no timestamp.""" + await async_setup_component(hass, "history", {}) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + client = await hass_client() + response = await client.get("/api/history/period?filter_entity_id=sensor.power") + assert response.status == HTTPStatus.OK + + +async def test_fetch_period_api_with_include_order( + hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator +) -> None: + """Test the fetch period view for history.""" + await async_setup_component( + hass, + "history", + { + "history": { + "use_include_order": True, + "include": {"entities": ["light.kitchen"]}, + } + }, + ) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + client = await hass_client() + response = await client.get( + f"/api/history/period/{dt_util.utcnow().isoformat()}", + params={"filter_entity_id": "non.existing,something.else"}, + ) + assert response.status == HTTPStatus.OK + + +async def test_entity_ids_limit_via_api( + hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator +) -> None: + """Test limiting history to entity_ids.""" + await async_setup_component( + hass, + "history", + {"history": {}}, + ) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + hass.states.async_set("light.kitchen", "on") + hass.states.async_set("light.cow", "on") + hass.states.async_set("light.nomatch", "on") + + await async_wait_recording_done(hass) + + client = await hass_client() + response = await client.get( + f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow", + ) + assert response.status == HTTPStatus.OK + response_json = await response.json() + assert len(response_json) == 2 + assert response_json[0][0]["entity_id"] == "light.kitchen" + assert response_json[1][0]["entity_id"] == "light.cow" + + +async def test_entity_ids_limit_via_api_with_skip_initial_state( + hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator +) -> None: + """Test limiting history to entity_ids with skip_initial_state.""" + await async_setup_component( + hass, + "history", + {"history": {}}, + ) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + hass.states.async_set("light.kitchen", "on") + hass.states.async_set("light.cow", "on") + hass.states.async_set("light.nomatch", "on") + + await async_wait_recording_done(hass) + + client = await hass_client() + response = await client.get( + f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state", + ) + assert response.status == HTTPStatus.OK + response_json = await response.json() + assert len(response_json) == 0 + + when = dt_util.utcnow() - timedelta(minutes=1) + response = await client.get( + f"/api/history/period/{when.isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state", + ) + assert response.status == HTTPStatus.OK + response_json = await response.json() + assert len(response_json) == 2 + assert response_json[0][0]["entity_id"] == "light.kitchen" + assert response_json[1][0]["entity_id"] == "light.cow" + + +async def test_history_during_period( + hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator +) -> None: + """Test history_during_period.""" + now = dt_util.utcnow() + + await async_setup_component(hass, "history", {}) + await async_setup_component(hass, "sensor", {}) + await async_recorder_block_till_done(hass) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + hass.states.async_set("sensor.test", "on", attributes={"any": "attr"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "off", attributes={"any": "attr"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "off", attributes={"any": "changed"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "off", attributes={"any": "again"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "on", attributes={"any": "attr"}) + await async_wait_recording_done(hass) + + await async_wait_recording_done(hass) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "history/history_during_period", + "start_time": now.isoformat(), + "end_time": now.isoformat(), + "entity_ids": ["sensor.test"], + "include_start_time_state": True, + "significant_changes_only": False, + "no_attributes": True, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == {} + + await client.send_json( + { + "id": 2, + "type": "history/history_during_period", + "start_time": now.isoformat(), + "entity_ids": ["sensor.test"], + "include_start_time_state": True, + "significant_changes_only": False, + "no_attributes": True, + "minimal_response": True, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 2 + + sensor_test_history = response["result"]["sensor.test"] + assert len(sensor_test_history) == 3 + + assert sensor_test_history[0]["s"] == "on" + assert sensor_test_history[0]["a"] == {} + assert isinstance(sensor_test_history[0]["lu"], float) + assert ( + "lc" not in sensor_test_history[0] + ) # skipped if the same a last_updated (lu) + + assert "a" not in sensor_test_history[1] + assert sensor_test_history[1]["s"] == "off" + assert isinstance(sensor_test_history[1]["lu"], float) + assert ( + "lc" not in sensor_test_history[1] + ) # skipped if the same a last_updated (lu) + + assert sensor_test_history[2]["s"] == "on" + assert "a" not in sensor_test_history[2] + + await client.send_json( + { + "id": 3, + "type": "history/history_during_period", + "start_time": now.isoformat(), + "entity_ids": ["sensor.test"], + "include_start_time_state": True, + "significant_changes_only": False, + "no_attributes": False, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 3 + sensor_test_history = response["result"]["sensor.test"] + + assert len(sensor_test_history) == 5 + + assert sensor_test_history[0]["s"] == "on" + assert sensor_test_history[0]["a"] == {"any": "attr"} + assert isinstance(sensor_test_history[0]["lu"], float) + assert ( + "lc" not in sensor_test_history[0] + ) # skipped if the same a last_updated (lu) + + assert sensor_test_history[1]["s"] == "off" + assert isinstance(sensor_test_history[1]["lu"], float) + assert ( + "lc" not in sensor_test_history[1] + ) # skipped if the same a last_updated (lu) + assert sensor_test_history[1]["a"] == {"any": "attr"} + + assert sensor_test_history[4]["s"] == "on" + assert sensor_test_history[4]["a"] == {"any": "attr"} + + await client.send_json( + { + "id": 4, + "type": "history/history_during_period", + "start_time": now.isoformat(), + "entity_ids": ["sensor.test"], + "include_start_time_state": True, + "significant_changes_only": True, + "no_attributes": False, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 4 + sensor_test_history = response["result"]["sensor.test"] + + assert len(sensor_test_history) == 3 + + assert sensor_test_history[0]["s"] == "on" + assert sensor_test_history[0]["a"] == {"any": "attr"} + assert isinstance(sensor_test_history[0]["lu"], float) + assert ( + "lc" not in sensor_test_history[0] + ) # skipped if the same a last_updated (lu) + + assert sensor_test_history[1]["s"] == "off" + assert isinstance(sensor_test_history[1]["lu"], float) + assert ( + "lc" not in sensor_test_history[1] + ) # skipped if the same a last_updated (lu) + assert sensor_test_history[1]["a"] == {"any": "attr"} + + assert sensor_test_history[2]["s"] == "on" + assert sensor_test_history[2]["a"] == {"any": "attr"} + + +async def test_history_during_period_impossible_conditions( + hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator +) -> None: + """Test history_during_period returns when condition cannot be true.""" + await async_setup_component(hass, "history", {}) + await async_setup_component(hass, "sensor", {}) + await async_recorder_block_till_done(hass) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + hass.states.async_set("sensor.test", "on", attributes={"any": "attr"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "off", attributes={"any": "attr"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "off", attributes={"any": "changed"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "off", attributes={"any": "again"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("sensor.test", "on", attributes={"any": "attr"}) + await async_wait_recording_done(hass) + + await async_wait_recording_done(hass) + + after = dt_util.utcnow() + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "history/history_during_period", + "start_time": after.isoformat(), + "end_time": after.isoformat(), + "entity_ids": ["sensor.test"], + "include_start_time_state": False, + "significant_changes_only": False, + "no_attributes": True, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 1 + assert response["result"] == {} + + future = dt_util.utcnow() + timedelta(hours=10) + + await client.send_json( + { + "id": 2, + "type": "history/history_during_period", + "start_time": future.isoformat(), + "entity_ids": ["sensor.test"], + "include_start_time_state": True, + "significant_changes_only": True, + "no_attributes": True, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 2 + assert response["result"] == {} + + +@pytest.mark.parametrize( + "time_zone", ["UTC", "Europe/Berlin", "America/Chicago", "US/Hawaii"] +) +async def test_history_during_period_significant_domain( + hass: HomeAssistant, + recorder_mock: Recorder, + hass_ws_client: WebSocketGenerator, + time_zone, +) -> None: + """Test history_during_period with climate domain.""" + await hass.config.async_set_time_zone(time_zone) + now = dt_util.utcnow() + + await async_setup_component(hass, "history", {}) + await async_setup_component(hass, "sensor", {}) + await async_recorder_block_till_done(hass) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + hass.states.async_set("climate.test", "on", attributes={"temperature": "1"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("climate.test", "off", attributes={"temperature": "2"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("climate.test", "off", attributes={"temperature": "3"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("climate.test", "off", attributes={"temperature": "4"}) + await async_recorder_block_till_done(hass) + hass.states.async_set("climate.test", "on", attributes={"temperature": "5"}) + await async_wait_recording_done(hass) + + await async_wait_recording_done(hass) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "history/history_during_period", + "start_time": now.isoformat(), + "end_time": now.isoformat(), + "entity_ids": ["climate.test"], + "include_start_time_state": True, + "significant_changes_only": False, + "no_attributes": True, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == {} + + await client.send_json( + { + "id": 2, + "type": "history/history_during_period", + "start_time": now.isoformat(), + "entity_ids": ["climate.test"], + "include_start_time_state": True, + "significant_changes_only": False, + "no_attributes": True, + "minimal_response": True, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 2 + + sensor_test_history = response["result"]["climate.test"] + assert len(sensor_test_history) == 5 + + assert sensor_test_history[0]["s"] == "on" + assert sensor_test_history[0]["a"] == {} + assert isinstance(sensor_test_history[0]["lu"], float) + assert ( + "lc" not in sensor_test_history[0] + ) # skipped if the same a last_updated (lu) + + assert "a" in sensor_test_history[1] + assert sensor_test_history[1]["s"] == "off" + assert ( + "lc" not in sensor_test_history[1] + ) # skipped if the same a last_updated (lu) + + assert sensor_test_history[4]["s"] == "on" + assert sensor_test_history[4]["a"] == {} + + await client.send_json( + { + "id": 3, + "type": "history/history_during_period", + "start_time": now.isoformat(), + "entity_ids": ["climate.test"], + "include_start_time_state": True, + "significant_changes_only": False, + "no_attributes": False, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 3 + sensor_test_history = response["result"]["climate.test"] + + assert len(sensor_test_history) == 5 + + assert sensor_test_history[0]["s"] == "on" + assert sensor_test_history[0]["a"] == {"temperature": "1"} + assert isinstance(sensor_test_history[0]["lu"], float) + assert ( + "lc" not in sensor_test_history[0] + ) # skipped if the same a last_updated (lu) + + assert sensor_test_history[1]["s"] == "off" + assert isinstance(sensor_test_history[1]["lu"], float) + assert ( + "lc" not in sensor_test_history[1] + ) # skipped if the same a last_updated (lu) + assert sensor_test_history[1]["a"] == {"temperature": "2"} + + assert sensor_test_history[4]["s"] == "on" + assert sensor_test_history[4]["a"] == {"temperature": "5"} + + await client.send_json( + { + "id": 4, + "type": "history/history_during_period", + "start_time": now.isoformat(), + "entity_ids": ["climate.test"], + "include_start_time_state": True, + "significant_changes_only": True, + "no_attributes": False, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 4 + sensor_test_history = response["result"]["climate.test"] + + assert len(sensor_test_history) == 5 + + assert sensor_test_history[0]["s"] == "on" + assert sensor_test_history[0]["a"] == {"temperature": "1"} + assert isinstance(sensor_test_history[0]["lu"], float) + assert ( + "lc" not in sensor_test_history[0] + ) # skipped if the same a last_updated (lu) + + assert sensor_test_history[1]["s"] == "off" + assert isinstance(sensor_test_history[1]["lu"], float) + assert ( + "lc" not in sensor_test_history[1] + ) # skipped if the same a last_updated (lu) + assert sensor_test_history[1]["a"] == {"temperature": "2"} + + assert sensor_test_history[2]["s"] == "off" + assert sensor_test_history[2]["a"] == {"temperature": "3"} + + assert sensor_test_history[3]["s"] == "off" + assert sensor_test_history[3]["a"] == {"temperature": "4"} + + assert sensor_test_history[4]["s"] == "on" + assert sensor_test_history[4]["a"] == {"temperature": "5"} + + # Test we impute the state time state + later = dt_util.utcnow() + await client.send_json( + { + "id": 5, + "type": "history/history_during_period", + "start_time": later.isoformat(), + "entity_ids": ["climate.test"], + "include_start_time_state": True, + "significant_changes_only": True, + "no_attributes": False, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 5 + sensor_test_history = response["result"]["climate.test"] + + assert len(sensor_test_history) == 1 + + assert sensor_test_history[0]["s"] == "on" + assert sensor_test_history[0]["a"] == {"temperature": "5"} + assert sensor_test_history[0]["lu"] == later.timestamp() + assert ( + "lc" not in sensor_test_history[0] + ) # skipped if the same a last_updated (lu) + + +async def test_history_during_period_bad_start_time( + hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator +) -> None: + """Test history_during_period bad state time.""" + await async_setup_component( + hass, + "history", + {"history": {}}, + ) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "history/history_during_period", + "entity_ids": ["sensor.pet"], + "start_time": "cats", + } + ) + response = await client.receive_json() + assert not response["success"] + assert response["error"]["code"] == "invalid_start_time" + + +async def test_history_during_period_bad_end_time( + hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator +) -> None: + """Test history_during_period bad end time.""" + now = dt_util.utcnow() + + await async_setup_component( + hass, + "history", + {"history": {}}, + ) + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "history/history_during_period", + "entity_ids": ["sensor.pet"], + "start_time": now.isoformat(), + "end_time": "dogs", + } + ) + response = await client.receive_json() + assert not response["success"] + assert response["error"]["code"] == "invalid_end_time" diff --git a/tests/components/history/test_websocket_api.py b/tests/components/history/test_websocket_api.py index 717840c6b05..e5c33d0e7af 100644 --- a/tests/components/history/test_websocket_api.py +++ b/tests/components/history/test_websocket_api.py @@ -2,7 +2,7 @@ import asyncio from datetime import timedelta -from unittest.mock import ANY, patch +from unittest.mock import patch from freezegun import freeze_time import pytest @@ -10,9 +10,8 @@ import pytest from homeassistant.components import history from homeassistant.components.history import websocket_api from homeassistant.components.recorder import Recorder -from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE, STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.event import async_track_state_change_event +from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -2073,84 +2072,3 @@ async def test_history_stream_historical_only_with_start_time_state_past( "id": 1, "type": "event", } - - -async def test_history_stream_live_chained_events( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator -) -> None: - """Test history stream with history with a chained event.""" - now = dt_util.utcnow() - await async_setup_component(hass, "history", {}) - - await async_wait_recording_done(hass) - hass.states.async_set("binary_sensor.is_light", STATE_OFF) - - client = await hass_ws_client() - await client.send_json( - { - "id": 1, - "type": "history/stream", - "entity_ids": ["binary_sensor.is_light"], - "start_time": now.isoformat(), - "include_start_time_state": True, - "significant_changes_only": False, - "no_attributes": False, - "minimal_response": True, - } - ) - response = await client.receive_json() - assert response["success"] - assert response["id"] == 1 - assert response["type"] == "result" - - response = await client.receive_json() - - assert response == { - "event": { - "end_time": ANY, - "start_time": ANY, - "states": { - "binary_sensor.is_light": [ - { - "a": {}, - "lu": ANY, - "s": STATE_OFF, - }, - ], - }, - }, - "id": 1, - "type": "event", - } - - await async_recorder_block_till_done(hass) - - @callback - def auto_off_listener(event): - hass.states.async_set("binary_sensor.is_light", STATE_OFF) - - async_track_state_change_event(hass, ["binary_sensor.is_light"], auto_off_listener) - - hass.states.async_set("binary_sensor.is_light", STATE_ON) - - response = await client.receive_json() - assert response == { - "event": { - "states": { - "binary_sensor.is_light": [ - { - "lu": ANY, - "s": STATE_ON, - "a": {}, - }, - { - "lu": ANY, - "s": STATE_OFF, - "a": {}, - }, - ], - }, - }, - "id": 1, - "type": "event", - } diff --git a/tests/components/history_stats/conftest.py b/tests/components/history_stats/conftest.py deleted file mode 100644 index f8075179e94..00000000000 --- a/tests/components/history_stats/conftest.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Fixtures for the History stats integration.""" - -from __future__ import annotations - -from collections.abc import Generator -from datetime import timedelta -from typing import Any -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.history_stats.const import ( - CONF_END, - CONF_START, - DEFAULT_NAME, - DOMAIN, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE -from homeassistant.core import HomeAssistant, State -from homeassistant.helpers.entity_component import async_update_entity -from homeassistant.util import dt as dt_util - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Automatically patch history stats setup.""" - with patch( - "homeassistant.components.history_stats.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture(name="get_config") -async def get_config_to_integration_load() -> dict[str, Any]: - """Return configuration. - - To override the config, tests can be marked with: - @pytest.mark.parametrize("get_config", [{...}]) - """ - return { - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "binary_sensor.test_monitored", - CONF_STATE: ["on"], - CONF_TYPE: "count", - CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", - CONF_END: "{{ utcnow() }}", - } - - -@pytest.fixture(name="loaded_entry") -async def load_integration( - hass: HomeAssistant, get_config: dict[str, Any] -) -> MockConfigEntry: - """Set up the History stats integration in Home Assistant.""" - start_time = dt_util.utcnow() - timedelta(minutes=60) - t0 = start_time + timedelta(minutes=20) - t1 = t0 + timedelta(minutes=10) - t2 = t1 + timedelta(minutes=10) - - def _fake_states(*args, **kwargs): - return { - "binary_sensor.test_monitored": [ - State("binary_sensor.test_monitored", "off", last_changed=start_time), - State("binary_sensor.test_monitored", "on", last_changed=t0), - State("binary_sensor.test_monitored", "off", last_changed=t1), - State("binary_sensor.test_monitored", "on", last_changed=t2), - ] - } - - config_entry = MockConfigEntry( - domain=DOMAIN, - title=DEFAULT_NAME, - source=SOURCE_USER, - options=get_config, - entry_id="1", - ) - - config_entry.add_to_hass(hass) - - with patch( - "homeassistant.components.recorder.history.state_changes_during_period", - _fake_states, - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - await async_update_entity(hass, "sensor.test") - await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/history_stats/test_config_flow.py b/tests/components/history_stats/test_config_flow.py deleted file mode 100644 index a695a06995e..00000000000 --- a/tests/components/history_stats/test_config_flow.py +++ /dev/null @@ -1,195 +0,0 @@ -"""Test the History stats config flow.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -from homeassistant import config_entries -from homeassistant.components.history_stats.const import ( - CONF_DURATION, - CONF_END, - CONF_START, - DEFAULT_NAME, - DOMAIN, -) -from homeassistant.components.recorder import Recorder -from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_form( - recorder_mock: Recorder, hass: HomeAssistant, mock_setup_entry: AsyncMock -) -> None: - """Test we get the form.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "binary_sensor.test_monitored", - CONF_STATE: ["on"], - CONF_TYPE: "count", - }, - ) - await hass.async_block_till_done() - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", - CONF_END: "{{ utcnow() }}", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["version"] == 1 - assert result["options"] == { - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "binary_sensor.test_monitored", - CONF_STATE: ["on"], - CONF_TYPE: "count", - CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", - CONF_END: "{{ utcnow() }}", - } - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_options_flow( - recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: - """Test options flow.""" - - result = await hass.config_entries.options.async_init(loaded_entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - CONF_END: "{{ utcnow() }}", - CONF_DURATION: {"hours": 8, "minutes": 0, "seconds": 0, "days": 20}, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "binary_sensor.test_monitored", - CONF_STATE: ["on"], - CONF_TYPE: "count", - CONF_END: "{{ utcnow() }}", - CONF_DURATION: {"hours": 8, "minutes": 0, "seconds": 0, "days": 20}, - } - - await hass.async_block_till_done() - - # Check the entity was updated, no new entity was created - assert len(hass.states.async_all()) == 1 - - state = hass.states.get("sensor.unnamed_statistics") - assert state is not None - - -async def test_validation_options( - recorder_mock: Recorder, hass: HomeAssistant, mock_setup_entry: AsyncMock -) -> None: - """Test validation.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "binary_sensor.test_monitored", - CONF_STATE: ["on"], - CONF_TYPE: "count", - }, - ) - await hass.async_block_till_done() - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", - CONF_END: "{{ utcnow() }}", - CONF_DURATION: {"hours": 8, "minutes": 0, "seconds": 0, "days": 20}, - }, - ) - await hass.async_block_till_done() - - assert result["step_id"] == "options" - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "only_two_keys_allowed"} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", - CONF_END: "{{ utcnow() }}", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["version"] == 1 - assert result["options"] == { - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "binary_sensor.test_monitored", - CONF_STATE: ["on"], - CONF_TYPE: "count", - CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", - CONF_END: "{{ utcnow() }}", - } - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_entry_already_exist( - recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: - """Test abort when entry already exist.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "binary_sensor.test_monitored", - CONF_STATE: ["on"], - CONF_TYPE: "count", - }, - ) - await hass.async_block_till_done() - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", - CONF_END: "{{ utcnow() }}", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/history_stats/test_init.py b/tests/components/history_stats/test_init.py deleted file mode 100644 index 4cd999ba31c..00000000000 --- a/tests/components/history_stats/test_init.py +++ /dev/null @@ -1,118 +0,0 @@ -"""Test History stats component setup process.""" - -from __future__ import annotations - -from homeassistant.components.history_stats.const import ( - CONF_END, - CONF_START, - DEFAULT_NAME, - DOMAIN as HISTORY_STATS_DOMAIN, -) -from homeassistant.components.recorder import Recorder -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er - -from tests.common import MockConfigEntry - - -async def test_unload_entry( - recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: - """Test unload an entry.""" - - assert loaded_entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(loaded_entry.entry_id) - await hass.async_block_till_done() - assert loaded_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_device_cleaning( - recorder_mock: Recorder, - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the cleaning of devices linked to the helper History stats.""" - - # Source entity device config entry - source_config_entry = MockConfigEntry() - source_config_entry.add_to_hass(hass) - - # Device entry of the source entity - source_device1_entry = device_registry.async_get_or_create( - config_entry_id=source_config_entry.entry_id, - identifiers={("binary_sensor", "identifier_test1")}, - connections={("mac", "30:31:32:33:34:01")}, - ) - - # Source entity registry - source_entity = entity_registry.async_get_or_create( - "binary_sensor", - "test", - "source", - config_entry=source_config_entry, - device_id=source_device1_entry.id, - ) - await hass.async_block_till_done() - assert entity_registry.async_get("binary_sensor.test_source") is not None - - # Configure the configuration entry for History stats - history_stats_config_entry = MockConfigEntry( - data={}, - domain=HISTORY_STATS_DOMAIN, - options={ - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "binary_sensor.test_source", - CONF_STATE: ["on"], - CONF_TYPE: "count", - CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", - CONF_END: "{{ utcnow() }}", - }, - title="History stats", - ) - history_stats_config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(history_stats_config_entry.entry_id) - await hass.async_block_till_done() - - # Confirm the link between the source entity device and the History stats sensor - history_stats_entity = entity_registry.async_get("sensor.history_stats") - assert history_stats_entity is not None - assert history_stats_entity.device_id == source_entity.device_id - - # Device entry incorrectly linked to History stats config entry - device_registry.async_get_or_create( - config_entry_id=history_stats_config_entry.entry_id, - identifiers={("sensor", "identifier_test2")}, - connections={("mac", "30:31:32:33:34:02")}, - ) - device_registry.async_get_or_create( - config_entry_id=history_stats_config_entry.entry_id, - identifiers={("sensor", "identifier_test3")}, - connections={("mac", "30:31:32:33:34:03")}, - ) - await hass.async_block_till_done() - - # Before reloading the config entry, two devices are expected to be linked - devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( - history_stats_config_entry.entry_id - ) - assert len(devices_before_reload) == 3 - - # Config entry reload - await hass.config_entries.async_reload(history_stats_config_entry.entry_id) - await hass.async_block_till_done() - - # Confirm the link between the source entity device and the History stats sensor - history_stats_entity = entity_registry.async_get("sensor.history_stats") - assert history_stats_entity is not None - assert history_stats_entity.device_id == source_entity.device_id - - # After reloading the config entry, only one linked device is expected - devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( - history_stats_config_entry.entry_id - ) - assert len(devices_after_reload) == 1 - - assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index f86c04b3e5b..c18fb2ff784 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -8,33 +8,20 @@ import pytest import voluptuous as vol from homeassistant import config as hass_config -from homeassistant.components.history_stats.const import ( - CONF_END, - CONF_START, - DEFAULT_NAME, - DOMAIN, -) +from homeassistant.components.history_stats import DOMAIN from homeassistant.components.history_stats.sensor import ( PLATFORM_SCHEMA as SENSOR_SCHEMA, ) from homeassistant.components.recorder import Recorder -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - CONF_ENTITY_ID, - CONF_NAME, - CONF_STATE, - CONF_TYPE, - SERVICE_RELOAD, - STATE_UNKNOWN, -) +from homeassistant.const import ATTR_DEVICE_CLASS, SERVICE_RELOAD, STATE_UNKNOWN import homeassistant.core as ha from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, async_fire_time_changed, get_fixture_path +from tests.common import async_fire_time_changed, get_fixture_path from tests.components.recorder.common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator @@ -61,15 +48,6 @@ async def test_setup(recorder_mock: Recorder, hass: HomeAssistant) -> None: assert state.state == "0.0" -async def test_setup_config_entry( - recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: - """Test the history statistics sensor setup from a config entry.""" - - state = hass.states.get("sensor.unnamed_statistics") - assert state.state == "2" - - async def test_setup_multiple_states( recorder_mock: Recorder, hass: HomeAssistant ) -> None: @@ -1749,50 +1727,3 @@ async def test_unique_id( entity_registry.async_get("sensor.test").unique_id == "some_history_stats_unique_id" ) - - -async def test_device_id( - recorder_mock: Recorder, - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test for source entity device for History stats.""" - source_config_entry = MockConfigEntry() - source_config_entry.add_to_hass(hass) - source_device_entry = device_registry.async_get_or_create( - config_entry_id=source_config_entry.entry_id, - identifiers={("sensor", "identifier_test")}, - connections={("mac", "30:31:32:33:34:35")}, - ) - source_entity = entity_registry.async_get_or_create( - "binary_sensor", - "test", - "source", - config_entry=source_config_entry, - device_id=source_device_entry.id, - ) - await hass.async_block_till_done() - assert entity_registry.async_get("binary_sensor.test_source") is not None - - history_stats_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "binary_sensor.test_source", - CONF_STATE: ["on"], - CONF_TYPE: "count", - CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", - CONF_END: "{{ utcnow() }}", - }, - title="History stats", - ) - history_stats_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(history_stats_config_entry.entry_id) - await hass.async_block_till_done() - - history_stats_entity = entity_registry.async_get("sensor.history_stats") - assert history_stats_entity is not None - assert history_stats_entity.device_id == source_entity.device_id diff --git a/tests/components/hive/test_config_flow.py b/tests/components/hive/test_config_flow.py index e5dba49dcc1..fd6eb564a39 100644 --- a/tests/components/hive/test_config_flow.py +++ b/tests/components/hive/test_config_flow.py @@ -246,7 +246,14 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: "homeassistant.components.hive.config_flow.Auth.login", side_effect=hive_exceptions.HiveInvalidPassword(), ): - result = await mock_config.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_config.unique_id, + }, + data=mock_config.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_password"} @@ -298,7 +305,14 @@ async def test_reauth_2fa_flow(hass: HomeAssistant) -> None: "homeassistant.components.hive.config_flow.Auth.login", side_effect=hive_exceptions.HiveInvalidPassword(), ): - result = await mock_config.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_config.unique_id, + }, + data=mock_config.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_password"} diff --git a/tests/components/hlk_sw16/test_config_flow.py b/tests/components/hlk_sw16/test_config_flow.py index 2225ea1b79a..6a758ec5066 100644 --- a/tests/components/hlk_sw16/test_config_flow.py +++ b/tests/components/hlk_sw16/test_config_flow.py @@ -12,7 +12,7 @@ from homeassistant.data_entry_flow import FlowResultType class MockSW16Client: """Class to mock the SW16Client client.""" - def __init__(self, fail) -> None: + def __init__(self, fail): """Initialise client with failure modes.""" self.fail = fail self.disconnect_callback = None diff --git a/tests/components/holiday/conftest.py b/tests/components/holiday/conftest.py index 005756695fe..1ac595aa1f9 100644 --- a/tests/components/holiday/conftest.py +++ b/tests/components/holiday/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Holiday tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/holiday/test_config_flow.py b/tests/components/holiday/test_config_flow.py index 466dbaffd8b..14e2b68234c 100644 --- a/tests/components/holiday/test_config_flow.py +++ b/tests/components/holiday/test_config_flow.py @@ -230,7 +230,13 @@ async def test_reconfigure(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( @@ -261,7 +267,13 @@ async def test_reconfigure_incorrect_language( ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( @@ -296,7 +308,13 @@ async def test_reconfigure_entry_exists( ) entry2.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/home_connect/conftest.py b/tests/components/home_connect/conftest.py index 4e790074700..f4c19320826 100644 --- a/tests/components/home_connect/conftest.py +++ b/tests/components/home_connect/conftest.py @@ -67,20 +67,6 @@ def mock_config_entry(token_entry: dict[str, Any]) -> MockConfigEntry: "auth_implementation": FAKE_AUTH_IMPL, "token": token_entry, }, - minor_version=2, - ) - - -@pytest.fixture(name="config_entry_v1_1") -def mock_config_entry_v1_1(token_entry: dict[str, Any]) -> MockConfigEntry: - """Fixture for a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data={ - "auth_implementation": FAKE_AUTH_IMPL, - "token": token_entry, - }, - minor_version=1, ) @@ -108,7 +94,7 @@ async def bypass_throttle(hass: HomeAssistant, config_entry: MockConfigEntry): @pytest.fixture(name="bypass_throttle") -def mock_bypass_throttle() -> Generator[None]: +def mock_bypass_throttle(): """Fixture to bypass the throttle decorator in __init__.""" with patch( "homeassistant.components.home_connect.update_all_devices", @@ -136,7 +122,7 @@ async def mock_integration_setup( @pytest.fixture(name="get_appliances") -def mock_get_appliances() -> Generator[MagicMock]: +def mock_get_appliances() -> Generator[None, Any, None]: """Mock ConfigEntryAuth parent (HomeAssistantAPI) method.""" with patch( "homeassistant.components.home_connect.api.ConfigEntryAuth.get_appliances", @@ -166,19 +152,15 @@ def mock_appliance(request: pytest.FixtureRequest) -> MagicMock: @pytest.fixture(name="problematic_appliance") -def mock_problematic_appliance(request: pytest.FixtureRequest) -> Mock: +def mock_problematic_appliance() -> Mock: """Fixture to mock a problematic Appliance.""" app = "Washer" - if hasattr(request, "param") and request.param: - app = request.param - mock = Mock( - autospec=HomeConnectAppliance, + spec=HomeConnectAppliance, **MOCK_APPLIANCES_PROPERTIES.get(app), ) mock.name = app - type(mock).status = PropertyMock(return_value={}) - mock.get.side_effect = HomeConnectError + setattr(mock, "status", {}) mock.get_programs_active.side_effect = HomeConnectError mock.get_programs_available.side_effect = HomeConnectError mock.start_program.side_effect = HomeConnectError diff --git a/tests/components/home_connect/fixtures/programs-available.json b/tests/components/home_connect/fixtures/programs-available.json index bba1a5d2721..b99ee5c6add 100644 --- a/tests/components/home_connect/fixtures/programs-available.json +++ b/tests/components/home_connect/fixtures/programs-available.json @@ -26,7 +26,7 @@ ] } }, - "Dishwasher": { + "DishWasher": { "data": { "programs": [ { diff --git a/tests/components/home_connect/fixtures/settings.json b/tests/components/home_connect/fixtures/settings.json index 1b9bec57276..5dc0f0e0599 100644 --- a/tests/components/home_connect/fixtures/settings.json +++ b/tests/components/home_connect/fixtures/settings.json @@ -95,67 +95,5 @@ } ] } - }, - "Washer": { - "data": { - "settings": [ - { - "key": "BSH.Common.Setting.PowerState", - "value": "BSH.Common.EnumType.PowerState.On", - "type": "BSH.Common.EnumType.PowerState" - }, - { - "key": "BSH.Common.Setting.ChildLock", - "value": false, - "type": "Boolean" - } - ] - } - }, - "FridgeFreezer": { - "data": { - "settings": [ - { - "key": "Refrigeration.FridgeFreezer.Setting.SuperModeFreezer", - "value": false, - "type": "Boolean", - "constraints": { - "access": "readWrite" - } - }, - { - "key": "Refrigeration.FridgeFreezer.Setting.SuperModeRefrigerator", - "value": false, - "type": "Boolean", - "constraints": { - "access": "readWrite" - } - }, - { - "key": "Refrigeration.Common.Setting.Dispenser.Enabled", - "value": false, - "type": "Boolean", - "constraints": { - "access": "readWrite" - } - }, - { - "key": "Refrigeration.Common.Setting.Light.External.Power", - "value": true, - "type": "Boolean" - }, - { - "key": "Refrigeration.Common.Setting.Light.External.Brightness", - "value": 70, - "unit": "%", - "type": "Double", - "constraints": { - "min": 0, - "max": 100, - "access": "readWrite" - } - } - ] - } } } diff --git a/tests/components/home_connect/fixtures/status.json b/tests/components/home_connect/fixtures/status.json index efdbde6cd97..8eac586a308 100644 --- a/tests/components/home_connect/fixtures/status.json +++ b/tests/components/home_connect/fixtures/status.json @@ -10,10 +10,6 @@ { "key": "BSH.Common.Status.DoorState", "value": "BSH.Common.EnumType.DoorState.Closed" - }, - { - "key": "Refrigeration.Common.Status.Door.Refrigerator", - "value": "BSH.Common.EnumType.DoorState.Open" } ] } diff --git a/tests/components/home_connect/test_binary_sensor.py b/tests/components/home_connect/test_binary_sensor.py index b564b003af6..d21aec35045 100644 --- a/tests/components/home_connect/test_binary_sensor.py +++ b/tests/components/home_connect/test_binary_sensor.py @@ -1,32 +1,23 @@ """Tests for home_connect binary_sensor entities.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator +from typing import Any from unittest.mock import MagicMock, Mock -from homeconnect.api import HomeConnectAPI import pytest -from homeassistant.components import automation, script -from homeassistant.components.automation import automations_with_entity from homeassistant.components.home_connect.const import ( BSH_DOOR_STATE, BSH_DOOR_STATE_CLOSED, BSH_DOOR_STATE_LOCKED, BSH_DOOR_STATE_OPEN, - DOMAIN, - REFRIGERATION_STATUS_DOOR_CLOSED, - REFRIGERATION_STATUS_DOOR_OPEN, - REFRIGERATION_STATUS_DOOR_REFRIGERATOR, ) -from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity -import homeassistant.helpers.issue_registry as ir -from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import MockConfigEntry @pytest.fixture @@ -35,8 +26,9 @@ def platforms() -> list[str]: return [Platform.BINARY_SENSOR] -@pytest.mark.usefixtures("bypass_throttle") async def test_binary_sensors( + bypass_throttle: Generator[None, Any, None], + hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, @@ -59,10 +51,10 @@ async def test_binary_sensors( ("", "unavailable"), ], ) -@pytest.mark.usefixtures("bypass_throttle") async def test_binary_sensors_door_states( expected: str, state: str, + bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -74,134 +66,9 @@ async def test_binary_sensors_door_states( entity_id = "binary_sensor.washer_door" get_appliances.return_value = [appliance] assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED appliance.status.update({BSH_DOOR_STATE: {"value": state}}) - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED await async_update_entity(hass, entity_id) await hass.async_block_till_done() assert hass.states.is_state(entity_id, expected) - - -@pytest.mark.parametrize( - ("entity_id", "status_key", "event_value_update", "expected", "appliance"), - [ - ( - "binary_sensor.fridgefreezer_refrigerator_door", - REFRIGERATION_STATUS_DOOR_REFRIGERATOR, - REFRIGERATION_STATUS_DOOR_CLOSED, - STATE_OFF, - "FridgeFreezer", - ), - ( - "binary_sensor.fridgefreezer_refrigerator_door", - REFRIGERATION_STATUS_DOOR_REFRIGERATOR, - REFRIGERATION_STATUS_DOOR_OPEN, - STATE_ON, - "FridgeFreezer", - ), - ( - "binary_sensor.fridgefreezer_refrigerator_door", - REFRIGERATION_STATUS_DOOR_REFRIGERATOR, - "", - STATE_UNAVAILABLE, - "FridgeFreezer", - ), - ], - indirect=["appliance"], -) -@pytest.mark.usefixtures("bypass_throttle") -async def test_bianry_sensors_fridge_door_states( - entity_id: str, - status_key: str, - event_value_update: str, - appliance: Mock, - expected: str, - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: MagicMock, -) -> None: - """Tests for Home Connect Fridge appliance door states.""" - appliance.status.update( - HomeConnectAPI.json2dict( - load_json_object_fixture("home_connect/status.json")["data"]["status"] - ) - ) - get_appliances.return_value = [appliance] - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - appliance.status.update({status_key: {"value": event_value_update}}) - await async_update_entity(hass, entity_id) - await hass.async_block_till_done() - assert hass.states.is_state(entity_id, expected) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.usefixtures("bypass_throttle") -async def test_create_issue( - hass: HomeAssistant, - appliance: Mock, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: MagicMock, - issue_registry: ir.IssueRegistry, -) -> None: - """Test we create an issue when an automation or script is using a deprecated entity.""" - entity_id = "binary_sensor.washer_door" - get_appliances.return_value = [appliance] - issue_id = f"deprecated_binary_common_door_sensor_{entity_id}" - - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: { - "alias": "test", - "trigger": {"platform": "state", "entity_id": entity_id}, - "action": { - "action": "automation.turn_on", - "target": { - "entity_id": "automation.test", - }, - }, - } - }, - ) - assert await async_setup_component( - hass, - script.DOMAIN, - { - script.DOMAIN: { - "test": { - "sequence": [ - { - "condition": "state", - "entity_id": entity_id, - "state": "on", - }, - ], - } - } - }, - ) - - assert config_entry.state == ConfigEntryState.NOT_LOADED - appliance.status.update({BSH_DOOR_STATE: {"value": BSH_DOOR_STATE_OPEN}}) - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - assert automations_with_entity(hass, entity_id)[0] == "automation.test" - assert scripts_with_entity(hass, entity_id)[0] == "script.test" - - assert len(issue_registry.issues) == 1 - assert issue_registry.async_get_issue(DOMAIN, issue_id) - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue(DOMAIN, issue_id) - assert len(issue_registry.issues) == 0 diff --git a/tests/components/home_connect/test_init.py b/tests/components/home_connect/test_init.py index 52550d705a9..616a82edebc 100644 --- a/tests/components/home_connect/test_init.py +++ b/tests/components/home_connect/test_init.py @@ -1,32 +1,18 @@ """Test the integration init functionality.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from typing import Any -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import MagicMock, Mock -from freezegun.api import FrozenDateTimeFactory import pytest from requests import HTTPError import requests_mock -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.components.home_connect import SCAN_INTERVAL -from homeassistant.components.home_connect.const import ( - BSH_CHILD_LOCK_STATE, - BSH_OPERATION_STATE, - BSH_POWER_STATE, - BSH_REMOTE_START_ALLOWANCE_STATE, - COOKING_LIGHTING, - DOMAIN, - OAUTH2_TOKEN, -) -from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.home_connect.const import DOMAIN, OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import device_registry as dr from .conftest import ( CLIENT_ID, @@ -131,8 +117,8 @@ SERVICE_APPLIANCE_METHOD_MAPPING = { } -@pytest.mark.usefixtures("bypass_throttle") async def test_api_setup( + bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -151,38 +137,9 @@ async def test_api_setup( assert config_entry.state == ConfigEntryState.NOT_LOADED -async def test_update_throttle( - appliance: Mock, - freezer: FrozenDateTimeFactory, - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: MagicMock, -) -> None: - """Test to check Throttle functionality.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - get_appliances_call_count = get_appliances.call_count - - # First re-load after 1 minute is not blocked. - assert await hass.config_entries.async_unload(config_entry.entry_id) - assert config_entry.state == ConfigEntryState.NOT_LOADED - freezer.tick(SCAN_INTERVAL.seconds + 0.1) - assert await hass.config_entries.async_setup(config_entry.entry_id) - assert get_appliances.call_count == get_appliances_call_count + 1 - - # Second re-load is blocked by Throttle. - assert await hass.config_entries.async_unload(config_entry.entry_id) - assert config_entry.state == ConfigEntryState.NOT_LOADED - freezer.tick(SCAN_INTERVAL.seconds - 0.1) - assert await hass.config_entries.async_setup(config_entry.entry_id) - assert get_appliances.call_count == get_appliances_call_count + 1 - - -@pytest.mark.usefixtures("bypass_throttle") async def test_exception_handling( + bypass_throttle: Generator[None, Any, None], + hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], config_entry: MockConfigEntry, setup_credentials: None, @@ -197,8 +154,8 @@ async def test_exception_handling( @pytest.mark.parametrize("token_expiration_time", [12345]) -@pytest.mark.usefixtures("bypass_throttle") async def test_token_refresh_success( + bypass_throttle: Generator[None, Any, None], integration_setup: Callable[[], Awaitable[bool]], config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, @@ -235,8 +192,44 @@ async def test_token_refresh_success( ) -@pytest.mark.usefixtures("bypass_throttle") +async def test_setup( + hass: HomeAssistant, + integration_setup: Callable[[], Awaitable[bool]], + config_entry: MockConfigEntry, + setup_credentials: None, +) -> None: + """Test setting up the integration.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state == ConfigEntryState.NOT_LOADED + + +async def test_update_throttle( + appliance: Mock, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + platforms: list[Platform], + get_appliances: MagicMock, +) -> None: + """Test to check Throttle functionality.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + assert get_appliances.call_count == 0 + + async def test_http_error( + bypass_throttle: Generator[None, Any, None], + hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, @@ -254,9 +247,9 @@ async def test_http_error( "service_call", SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, ) -@pytest.mark.usefixtures("bypass_throttle") async def test_services( service_call: list[dict[str, Any]], + bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, @@ -286,8 +279,8 @@ async def test_services( ) -@pytest.mark.usefixtures("bypass_throttle") async def test_services_exception( + bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -307,68 +300,3 @@ async def test_services_exception( with pytest.raises(ValueError): await hass.services.async_call(**service_call) - - -async def test_entity_migration( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - config_entry_v1_1: MockConfigEntry, - appliance: Mock, - platforms: list[Platform], -) -> None: - """Test entity migration.""" - - config_entry_v1_1.add_to_hass(hass) - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry_v1_1.entry_id, - identifiers={(DOMAIN, appliance.haId)}, - ) - - test_entities = [ - ( - SENSOR_DOMAIN, - "Operation State", - BSH_OPERATION_STATE, - ), - ( - SWITCH_DOMAIN, - "ChildLock", - BSH_CHILD_LOCK_STATE, - ), - ( - SWITCH_DOMAIN, - "Power", - BSH_POWER_STATE, - ), - ( - BINARY_SENSOR_DOMAIN, - "Remote Start", - BSH_REMOTE_START_ALLOWANCE_STATE, - ), - ( - LIGHT_DOMAIN, - "Light", - COOKING_LIGHTING, - ), - ] - - for domain, old_unique_id_suffix, _ in test_entities: - entity_registry.async_get_or_create( - domain, - DOMAIN, - f"{appliance.haId}-{old_unique_id_suffix}", - device_id=device_entry.id, - config_entry=config_entry_v1_1, - ) - - with patch("homeassistant.components.home_connect.PLATFORMS", platforms): - await hass.config_entries.async_setup(config_entry_v1_1.entry_id) - await hass.async_block_till_done() - - for domain, _, expected_unique_id_suffix in test_entities: - assert entity_registry.async_get_entity_id( - domain, DOMAIN, f"{appliance.haId}-{expected_unique_id_suffix}" - ) - assert config_entry_v1_1.minor_version == 2 diff --git a/tests/components/home_connect/test_light.py b/tests/components/home_connect/test_light.py deleted file mode 100644 index 7a9747929c9..00000000000 --- a/tests/components/home_connect/test_light.py +++ /dev/null @@ -1,348 +0,0 @@ -"""Tests for home_connect light entities.""" - -from collections.abc import Awaitable, Callable, Generator -from unittest.mock import MagicMock, Mock - -from homeconnect.api import HomeConnectAppliance, HomeConnectError -import pytest - -from homeassistant.components.home_connect.const import ( - BSH_AMBIENT_LIGHT_BRIGHTNESS, - BSH_AMBIENT_LIGHT_COLOR, - BSH_AMBIENT_LIGHT_CUSTOM_COLOR, - BSH_AMBIENT_LIGHT_ENABLED, - COOKING_LIGHTING, - COOKING_LIGHTING_BRIGHTNESS, - REFRIGERATION_EXTERNAL_LIGHT_BRIGHTNESS, - REFRIGERATION_EXTERNAL_LIGHT_POWER, -) -from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, - STATE_UNKNOWN, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError - -from .conftest import get_all_appliances - -from tests.common import MockConfigEntry, load_json_object_fixture - -TEST_HC_APP = "Hood" - -SETTINGS_STATUS = { - setting.pop("key"): setting - for setting in load_json_object_fixture("home_connect/settings.json") - .get(TEST_HC_APP) - .get("data") - .get("settings") -} - - -@pytest.fixture -def platforms() -> list[str]: - """Fixture to specify platforms to test.""" - return [Platform.LIGHT] - - -async def test_light( - bypass_throttle: Generator[None], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: Mock, -) -> None: - """Test switch entities.""" - get_appliances.side_effect = get_all_appliances - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - -@pytest.mark.parametrize( - ("entity_id", "status", "service", "service_data", "state", "appliance"), - [ - ( - "light.hood_functional_light", - { - COOKING_LIGHTING: { - "value": True, - }, - }, - SERVICE_TURN_ON, - {}, - STATE_ON, - "Hood", - ), - ( - "light.hood_functional_light", - { - COOKING_LIGHTING: { - "value": True, - }, - COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, - }, - SERVICE_TURN_ON, - {"brightness": 200}, - STATE_ON, - "Hood", - ), - ( - "light.hood_functional_light", - { - COOKING_LIGHTING: {"value": False}, - COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, - }, - SERVICE_TURN_OFF, - {}, - STATE_OFF, - "Hood", - ), - ( - "light.hood_functional_light", - { - COOKING_LIGHTING: { - "value": None, - }, - COOKING_LIGHTING_BRIGHTNESS: None, - }, - SERVICE_TURN_ON, - {}, - STATE_UNKNOWN, - "Hood", - ), - ( - "light.hood_ambient_light", - { - BSH_AMBIENT_LIGHT_ENABLED: { - "value": True, - }, - BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, - }, - SERVICE_TURN_ON, - {"brightness": 200}, - STATE_ON, - "Hood", - ), - ( - "light.hood_ambient_light", - { - BSH_AMBIENT_LIGHT_ENABLED: {"value": False}, - BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, - }, - SERVICE_TURN_OFF, - {}, - STATE_OFF, - "Hood", - ), - ( - "light.hood_ambient_light", - { - BSH_AMBIENT_LIGHT_ENABLED: {"value": True}, - BSH_AMBIENT_LIGHT_CUSTOM_COLOR: {}, - }, - SERVICE_TURN_ON, - {}, - STATE_ON, - "Hood", - ), - ( - "light.hood_ambient_light", - { - BSH_AMBIENT_LIGHT_ENABLED: {"value": True}, - BSH_AMBIENT_LIGHT_COLOR: { - "value": "", - }, - BSH_AMBIENT_LIGHT_CUSTOM_COLOR: {}, - }, - SERVICE_TURN_ON, - { - "rgb_color": [255, 255, 0], - }, - STATE_ON, - "Hood", - ), - ( - "light.fridgefreezer_external_light", - { - REFRIGERATION_EXTERNAL_LIGHT_POWER: { - "value": True, - }, - REFRIGERATION_EXTERNAL_LIGHT_BRIGHTNESS: {"value": 75}, - }, - SERVICE_TURN_ON, - {}, - STATE_ON, - "FridgeFreezer", - ), - ], - indirect=["appliance"], -) -async def test_light_functionality( - entity_id: str, - status: dict, - service: str, - service_data: dict, - state: str, - appliance: Mock, - bypass_throttle: Generator[None], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: MagicMock, -) -> None: - """Test light functionality.""" - appliance.status.update( - HomeConnectAppliance.json2dict( - load_json_object_fixture("home_connect/settings.json") - .get(appliance.name) - .get("data") - .get("settings") - ) - ) - get_appliances.return_value = [appliance] - - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - appliance.status.update(status) - service_data["entity_id"] = entity_id - await hass.services.async_call( - LIGHT_DOMAIN, - service, - service_data, - blocking=True, - ) - assert hass.states.is_state(entity_id, state) - - -@pytest.mark.parametrize( - ( - "entity_id", - "status", - "service", - "service_data", - "mock_attr", - "attr_side_effect", - "problematic_appliance", - "exception_match", - ), - [ - ( - "light.hood_functional_light", - { - COOKING_LIGHTING: { - "value": False, - }, - }, - SERVICE_TURN_ON, - {}, - "set_setting", - [HomeConnectError, HomeConnectError], - "Hood", - r"Error.*turn.*on.*", - ), - ( - "light.hood_functional_light", - { - COOKING_LIGHTING: { - "value": True, - }, - COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, - }, - SERVICE_TURN_ON, - {"brightness": 200}, - "set_setting", - [HomeConnectError, HomeConnectError], - "Hood", - r"Error.*turn.*on.*", - ), - ( - "light.hood_functional_light", - { - COOKING_LIGHTING: {"value": False}, - }, - SERVICE_TURN_OFF, - {}, - "set_setting", - [HomeConnectError, HomeConnectError], - "Hood", - r"Error.*turn.*off.*", - ), - ( - "light.hood_ambient_light", - { - BSH_AMBIENT_LIGHT_ENABLED: { - "value": True, - }, - BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, - }, - SERVICE_TURN_ON, - {}, - "set_setting", - [HomeConnectError, HomeConnectError], - "Hood", - r"Error.*turn.*on.*", - ), - ( - "light.hood_ambient_light", - { - BSH_AMBIENT_LIGHT_ENABLED: { - "value": True, - }, - BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, - }, - SERVICE_TURN_ON, - {"brightness": 200}, - "set_setting", - [HomeConnectError, None, HomeConnectError], - "Hood", - r"Error.*set.*color.*", - ), - ], - indirect=["problematic_appliance"], -) -async def test_switch_exception_handling( - entity_id: str, - status: dict, - service: str, - service_data: dict, - mock_attr: str, - attr_side_effect: list, - problematic_appliance: Mock, - exception_match: str, - bypass_throttle: Generator[None], - hass: HomeAssistant, - integration_setup: Callable[[], Awaitable[bool]], - config_entry: MockConfigEntry, - setup_credentials: None, - get_appliances: MagicMock, -) -> None: - """Test light exception handling.""" - problematic_appliance.status.update(SETTINGS_STATUS) - problematic_appliance.set_setting.side_effect = attr_side_effect - get_appliances.return_value = [problematic_appliance] - - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - # Assert that an exception is called. - with pytest.raises(HomeConnectError): - getattr(problematic_appliance, mock_attr)() - - problematic_appliance.status.update(status) - service_data["entity_id"] = entity_id - with pytest.raises(ServiceValidationError, match=exception_match): - await hass.services.async_call( - LIGHT_DOMAIN, service, service_data, blocking=True - ) - assert getattr(problematic_appliance, mock_attr).call_count == len(attr_side_effect) diff --git a/tests/components/home_connect/test_number.py b/tests/components/home_connect/test_number.py deleted file mode 100644 index f70e307cb41..00000000000 --- a/tests/components/home_connect/test_number.py +++ /dev/null @@ -1,176 +0,0 @@ -"""Tests for home_connect number entities.""" - -from collections.abc import Awaitable, Callable, Generator -import random -from unittest.mock import MagicMock, Mock - -from homeconnect.api import HomeConnectError -import pytest - -from homeassistant.components.home_connect.const import ( - ATTR_CONSTRAINTS, - ATTR_STEPSIZE, - ATTR_UNIT, - ATTR_VALUE, -) -from homeassistant.components.number import ( - ATTR_MAX, - ATTR_MIN, - ATTR_VALUE as SERVICE_ATTR_VALUE, - DEFAULT_MIN_VALUE, - DOMAIN as NUMBER_DOMAIN, - SERVICE_SET_VALUE, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError - -from .conftest import get_all_appliances - -from tests.common import MockConfigEntry - - -@pytest.fixture -def platforms() -> list[str]: - """Fixture to specify platforms to test.""" - return [Platform.NUMBER] - - -async def test_number( - bypass_throttle: Generator[None], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: Mock, -) -> None: - """Test number entity.""" - get_appliances.side_effect = get_all_appliances - assert config_entry.state is ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state is ConfigEntryState.LOADED - - -@pytest.mark.parametrize("appliance", ["Refrigerator"], indirect=True) -@pytest.mark.parametrize( - ( - "entity_id", - "setting_key", - "min_value", - "max_value", - "step_size", - "unit_of_measurement", - ), - [ - ( - f"{NUMBER_DOMAIN.lower()}.refrigerator_refrigerator_temperature", - "Refrigeration.FridgeFreezer.Setting.SetpointTemperatureRefrigerator", - 7, - 15, - 0.1, - "°C", - ), - ], -) -@pytest.mark.usefixtures("bypass_throttle") -async def test_number_entity_functionality( - appliance: Mock, - entity_id: str, - setting_key: str, - bypass_throttle: Generator[None], - min_value: int, - max_value: int, - step_size: float, - unit_of_measurement: str, - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: MagicMock, -) -> None: - """Test number entity functionality.""" - appliance.get.side_effect = [ - { - ATTR_CONSTRAINTS: { - ATTR_MIN: min_value, - ATTR_MAX: max_value, - ATTR_STEPSIZE: step_size, - }, - ATTR_UNIT: unit_of_measurement, - } - ] - get_appliances.return_value = [appliance] - current_value = min_value - appliance.status.update({setting_key: {ATTR_VALUE: current_value}}) - - assert config_entry.state is ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state is ConfigEntryState.LOADED - assert hass.states.is_state(entity_id, str(current_value)) - state = hass.states.get(entity_id) - assert state.attributes["min"] == min_value - assert state.attributes["max"] == max_value - assert state.attributes["step"] == step_size - assert state.attributes["unit_of_measurement"] == unit_of_measurement - - new_value = random.randint(min_value + 1, max_value) - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: entity_id, - SERVICE_ATTR_VALUE: new_value, - }, - blocking=True, - ) - appliance.set_setting.assert_called_once_with(setting_key, new_value) - - -@pytest.mark.parametrize("problematic_appliance", ["Refrigerator"], indirect=True) -@pytest.mark.parametrize( - ("entity_id", "setting_key", "mock_attr"), - [ - ( - f"{NUMBER_DOMAIN.lower()}.refrigerator_refrigerator_temperature", - "Refrigeration.FridgeFreezer.Setting.SetpointTemperatureRefrigerator", - "set_setting", - ), - ], -) -@pytest.mark.usefixtures("bypass_throttle") -async def test_number_entity_error( - problematic_appliance: Mock, - entity_id: str, - setting_key: str, - mock_attr: str, - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: MagicMock, -) -> None: - """Test number entity error.""" - get_appliances.return_value = [problematic_appliance] - - assert config_entry.state is ConfigEntryState.NOT_LOADED - problematic_appliance.status.update({setting_key: {}}) - assert await integration_setup() - assert config_entry.state is ConfigEntryState.LOADED - - with pytest.raises(HomeConnectError): - getattr(problematic_appliance, mock_attr)() - - with pytest.raises( - ServiceValidationError, match=r"Error.*assign.*value.*to.*setting.*" - ): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: entity_id, - SERVICE_ATTR_VALUE: DEFAULT_MIN_VALUE, - }, - blocking=True, - ) - assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/home_connect/test_sensor.py b/tests/components/home_connect/test_sensor.py index f2ee3b13922..f30f017d6d3 100644 --- a/tests/components/home_connect/test_sensor.py +++ b/tests/components/home_connect/test_sensor.py @@ -1,43 +1,32 @@ """Tests for home_connect sensor entities.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator +from typing import Any from unittest.mock import MagicMock, Mock from freezegun.api import FrozenDateTimeFactory -from homeconnect.api import HomeConnectAPI import pytest -from homeassistant.components.home_connect.const import ( - BSH_DOOR_STATE, - BSH_DOOR_STATE_CLOSED, - BSH_DOOR_STATE_LOCKED, - BSH_DOOR_STATE_OPEN, - BSH_EVENT_PRESENT_STATE_CONFIRMED, - BSH_EVENT_PRESENT_STATE_OFF, - BSH_EVENT_PRESENT_STATE_PRESENT, - COFFEE_EVENT_BEAN_CONTAINER_EMPTY, - REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, -) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import MockConfigEntry TEST_HC_APP = "Dishwasher" EVENT_PROG_DELAYED_START = { "BSH.Common.Status.OperationState": { - "value": "BSH.Common.EnumType.OperationState.DelayedStart" + "value": "BSH.Common.EnumType.OperationState.Delayed" }, } EVENT_PROG_REMAIN_NO_VALUE = { "BSH.Common.Option.RemainingProgramTime": {}, "BSH.Common.Status.OperationState": { - "value": "BSH.Common.EnumType.OperationState.DelayedStart" + "value": "BSH.Common.EnumType.OperationState.Delayed" }, } @@ -80,8 +69,9 @@ def platforms() -> list[str]: return [Platform.SENSOR] -@pytest.mark.usefixtures("bypass_throttle") async def test_sensors( + bypass_throttle: Generator[None, Any, None], + hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, @@ -107,13 +97,13 @@ PROGRAM_SEQUENCE_EVENTS = ( # Entity mapping to expected state at each program sequence. ENTITY_ID_STATES = { "sensor.dishwasher_operation_state": ( - "delayedstart", - "run", - "run", - "run", - "ready", + "Delayed", + "Run", + "Run", + "Run", + "Ready", ), - "sensor.dishwasher_program_finish_time": ( + "sensor.dishwasher_remaining_program_time": ( "unavailable", "2021-01-09T12:00:00+00:00", "2021-01-09T12:00:00+00:00", @@ -141,12 +131,12 @@ ENTITY_ID_STATES = { ) ), ) -@pytest.mark.usefixtures("bypass_throttle") async def test_event_sensors( appliance: Mock, states: tuple, event_run: dict, freezer: FrozenDateTimeFactory, + bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -162,8 +152,6 @@ async def test_event_sensors( get_appliances.return_value = [appliance] assert config_entry.state == ConfigEntryState.NOT_LOADED - appliance.get_programs_available = MagicMock(return_value=["dummy_program"]) - appliance.status.update(EVENT_PROG_DELAYED_START) assert await integration_setup() assert config_entry.state == ConfigEntryState.LOADED @@ -192,10 +180,10 @@ ENTITY_ID_EDGE_CASE_STATES = [ @pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) -@pytest.mark.usefixtures("bypass_throttle") async def test_remaining_prog_time_edge_cases( appliance: Mock, freezer: FrozenDateTimeFactory, + bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -204,13 +192,11 @@ async def test_remaining_prog_time_edge_cases( ) -> None: """Run program sequence to test edge cases for the remaining_prog_time entity.""" get_appliances.return_value = [appliance] - entity_id = "sensor.dishwasher_program_finish_time" + entity_id = "sensor.dishwasher_remaining_program_time" time_to_freeze = "2021-01-09 12:00:00+00:00" freezer.move_to(time_to_freeze) assert config_entry.state == ConfigEntryState.NOT_LOADED - appliance.get_programs_available = MagicMock(return_value=["dummy_program"]) - appliance.status.update(EVENT_PROG_REMAIN_NO_VALUE) assert await integration_setup() assert config_entry.state == ConfigEntryState.LOADED @@ -223,115 +209,3 @@ async def test_remaining_prog_time_edge_cases( await hass.async_block_till_done() freezer.tick() assert hass.states.is_state(entity_id, expected_state) - - -@pytest.mark.parametrize( - ("entity_id", "status_key", "event_value_update", "expected", "appliance"), - [ - ( - "sensor.dishwasher_door", - BSH_DOOR_STATE, - BSH_DOOR_STATE_LOCKED, - "locked", - "Dishwasher", - ), - ( - "sensor.dishwasher_door", - BSH_DOOR_STATE, - BSH_DOOR_STATE_CLOSED, - "closed", - "Dishwasher", - ), - ( - "sensor.dishwasher_door", - BSH_DOOR_STATE, - BSH_DOOR_STATE_OPEN, - "open", - "Dishwasher", - ), - ( - "sensor.fridgefreezer_freezer_door_alarm", - "EVENT_NOT_IN_STATUS_YET_SO_SET_TO_OFF", - "", - "off", - "FridgeFreezer", - ), - ( - "sensor.fridgefreezer_freezer_door_alarm", - REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, - BSH_EVENT_PRESENT_STATE_OFF, - "off", - "FridgeFreezer", - ), - ( - "sensor.fridgefreezer_freezer_door_alarm", - REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, - BSH_EVENT_PRESENT_STATE_PRESENT, - "present", - "FridgeFreezer", - ), - ( - "sensor.fridgefreezer_freezer_door_alarm", - REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, - BSH_EVENT_PRESENT_STATE_CONFIRMED, - "confirmed", - "FridgeFreezer", - ), - ( - "sensor.coffeemaker_bean_container_empty", - "EVENT_NOT_IN_STATUS_YET_SO_SET_TO_OFF", - "", - "off", - "CoffeeMaker", - ), - ( - "sensor.coffeemaker_bean_container_empty", - COFFEE_EVENT_BEAN_CONTAINER_EMPTY, - BSH_EVENT_PRESENT_STATE_OFF, - "off", - "CoffeeMaker", - ), - ( - "sensor.coffeemaker_bean_container_empty", - COFFEE_EVENT_BEAN_CONTAINER_EMPTY, - BSH_EVENT_PRESENT_STATE_PRESENT, - "present", - "CoffeeMaker", - ), - ( - "sensor.coffeemaker_bean_container_empty", - COFFEE_EVENT_BEAN_CONTAINER_EMPTY, - BSH_EVENT_PRESENT_STATE_CONFIRMED, - "confirmed", - "CoffeeMaker", - ), - ], - indirect=["appliance"], -) -@pytest.mark.usefixtures("bypass_throttle") -async def test_sensors_states( - entity_id: str, - status_key: str, - event_value_update: str, - appliance: Mock, - expected: str, - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: MagicMock, -) -> None: - """Tests for Appliance alarm sensors.""" - appliance.status.update( - HomeConnectAPI.json2dict( - load_json_object_fixture("home_connect/status.json")["data"]["status"] - ) - ) - get_appliances.return_value = [appliance] - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - appliance.status.update({status_key: {"value": event_value_update}}) - await async_update_entity(hass, entity_id) - await hass.async_block_till_done() - assert hass.states.is_state(entity_id, expected) diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py deleted file mode 100644 index 06201ffd58c..00000000000 --- a/tests/components/home_connect/test_switch.py +++ /dev/null @@ -1,508 +0,0 @@ -"""Tests for home_connect sensor entities.""" - -from collections.abc import Awaitable, Callable, Generator -from unittest.mock import MagicMock, Mock - -from homeconnect.api import HomeConnectAppliance, HomeConnectError -import pytest - -from homeassistant.components.home_connect.const import ( - ATTR_ALLOWED_VALUES, - ATTR_CONSTRAINTS, - BSH_ACTIVE_PROGRAM, - BSH_CHILD_LOCK_STATE, - BSH_OPERATION_STATE, - BSH_POWER_OFF, - BSH_POWER_ON, - BSH_POWER_STANDBY, - BSH_POWER_STATE, - REFRIGERATION_SUPERMODEFREEZER, -) -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError - -from .conftest import get_all_appliances - -from tests.common import MockConfigEntry, load_json_object_fixture - -SETTINGS_STATUS = { - setting.pop("key"): setting - for setting in load_json_object_fixture("home_connect/settings.json") - .get("Dishwasher") - .get("data") - .get("settings") -} - -PROGRAM = "LaundryCare.Dryer.Program.Mix" - - -@pytest.fixture -def platforms() -> list[str]: - """Fixture to specify platforms to test.""" - return [Platform.SWITCH] - - -async def test_switches( - bypass_throttle: Generator[None], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: Mock, -) -> None: - """Test switch entities.""" - get_appliances.side_effect = get_all_appliances - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - -@pytest.mark.parametrize( - ("entity_id", "status", "service", "state", "appliance"), - [ - ( - "switch.dishwasher_program_mix", - {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, - SERVICE_TURN_ON, - STATE_ON, - "Dishwasher", - ), - ( - "switch.dishwasher_program_mix", - {BSH_ACTIVE_PROGRAM: {"value": ""}}, - SERVICE_TURN_OFF, - STATE_OFF, - "Dishwasher", - ), - ( - "switch.dishwasher_child_lock", - {BSH_CHILD_LOCK_STATE: {"value": True}}, - SERVICE_TURN_ON, - STATE_ON, - "Dishwasher", - ), - ( - "switch.dishwasher_child_lock", - {BSH_CHILD_LOCK_STATE: {"value": False}}, - SERVICE_TURN_OFF, - STATE_OFF, - "Dishwasher", - ), - ], - indirect=["appliance"], -) -async def test_switch_functionality( - entity_id: str, - status: dict, - service: str, - state: str, - bypass_throttle: Generator[None], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - appliance: Mock, - get_appliances: MagicMock, -) -> None: - """Test switch functionality.""" - appliance.status.update(SETTINGS_STATUS) - appliance.get_programs_available.return_value = [PROGRAM] - get_appliances.return_value = [appliance] - - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - appliance.status.update(status) - await hass.services.async_call( - SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True - ) - assert hass.states.is_state(entity_id, state) - - -@pytest.mark.parametrize( - ( - "entity_id", - "status", - "service", - "mock_attr", - "problematic_appliance", - "exception_match", - ), - [ - ( - "switch.dishwasher_program_mix", - {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, - SERVICE_TURN_ON, - "start_program", - "Dishwasher", - r"Error.*start.*program.*", - ), - ( - "switch.dishwasher_program_mix", - {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, - SERVICE_TURN_OFF, - "stop_program", - "Dishwasher", - r"Error.*stop.*program.*", - ), - ( - "switch.dishwasher_power", - {BSH_POWER_STATE: {"value": BSH_POWER_OFF}}, - SERVICE_TURN_OFF, - "set_setting", - "Dishwasher", - r"Error.*turn.*off.*appliance.*value", - ), - ( - "switch.dishwasher_power", - {BSH_POWER_STATE: {"value": ""}}, - SERVICE_TURN_ON, - "set_setting", - "Dishwasher", - r"Error.*turn.*on.*appliance.*", - ), - ( - "switch.dishwasher_child_lock", - {BSH_CHILD_LOCK_STATE: {"value": ""}}, - SERVICE_TURN_ON, - "set_setting", - "Dishwasher", - r"Error.*turn.*on.*key.*", - ), - ( - "switch.dishwasher_child_lock", - {BSH_CHILD_LOCK_STATE: {"value": ""}}, - SERVICE_TURN_OFF, - "set_setting", - "Dishwasher", - r"Error.*turn.*off.*key.*", - ), - ], - indirect=["problematic_appliance"], -) -async def test_switch_exception_handling( - entity_id: str, - status: dict, - service: str, - mock_attr: str, - exception_match: str, - bypass_throttle: Generator[None], - hass: HomeAssistant, - integration_setup: Callable[[], Awaitable[bool]], - config_entry: MockConfigEntry, - setup_credentials: None, - problematic_appliance: Mock, - get_appliances: MagicMock, -) -> None: - """Test exception handling.""" - problematic_appliance.get_programs_available.side_effect = None - problematic_appliance.get_programs_available.return_value = [PROGRAM] - get_appliances.return_value = [problematic_appliance] - - assert config_entry.state == ConfigEntryState.NOT_LOADED - problematic_appliance.status.update(status) - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - # Assert that an exception is called. - with pytest.raises(HomeConnectError): - getattr(problematic_appliance, mock_attr)() - - with pytest.raises(ServiceValidationError, match=exception_match): - await hass.services.async_call( - SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True - ) - assert getattr(problematic_appliance, mock_attr).call_count == 2 - - -@pytest.mark.parametrize( - ("entity_id", "status", "service", "state", "appliance"), - [ - ( - "switch.fridgefreezer_freezer_super_mode", - {REFRIGERATION_SUPERMODEFREEZER: {"value": True}}, - SERVICE_TURN_ON, - STATE_ON, - "FridgeFreezer", - ), - ( - "switch.fridgefreezer_freezer_super_mode", - {REFRIGERATION_SUPERMODEFREEZER: {"value": False}}, - SERVICE_TURN_OFF, - STATE_OFF, - "FridgeFreezer", - ), - ], - indirect=["appliance"], -) -async def test_ent_desc_switch_functionality( - entity_id: str, - status: dict, - service: str, - state: str, - bypass_throttle: Generator[None], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - appliance: Mock, - get_appliances: MagicMock, -) -> None: - """Test switch functionality - entity description setup.""" - appliance.status.update( - HomeConnectAppliance.json2dict( - load_json_object_fixture("home_connect/settings.json") - .get(appliance.name) - .get("data") - .get("settings") - ) - ) - get_appliances.return_value = [appliance] - - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - appliance.status.update(status) - await hass.services.async_call( - SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - assert hass.states.is_state(entity_id, state) - - -@pytest.mark.parametrize( - ( - "entity_id", - "status", - "service", - "mock_attr", - "problematic_appliance", - "exception_match", - ), - [ - ( - "switch.fridgefreezer_freezer_super_mode", - {REFRIGERATION_SUPERMODEFREEZER: {"value": ""}}, - SERVICE_TURN_ON, - "set_setting", - "FridgeFreezer", - r"Error.*turn.*on.*key.*", - ), - ( - "switch.fridgefreezer_freezer_super_mode", - {REFRIGERATION_SUPERMODEFREEZER: {"value": ""}}, - SERVICE_TURN_OFF, - "set_setting", - "FridgeFreezer", - r"Error.*turn.*off.*key.*", - ), - ], - indirect=["problematic_appliance"], -) -async def test_ent_desc_switch_exception_handling( - entity_id: str, - status: dict, - service: str, - mock_attr: str, - exception_match: str, - bypass_throttle: Generator[None], - hass: HomeAssistant, - integration_setup: Callable[[], Awaitable[bool]], - config_entry: MockConfigEntry, - setup_credentials: None, - problematic_appliance: Mock, - get_appliances: MagicMock, -) -> None: - """Test switch exception handling - entity description setup.""" - problematic_appliance.status.update( - HomeConnectAppliance.json2dict( - load_json_object_fixture("home_connect/settings.json") - .get(problematic_appliance.name) - .get("data") - .get("settings") - ) - ) - get_appliances.return_value = [problematic_appliance] - - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - # Assert that an exception is called. - with pytest.raises(HomeConnectError): - getattr(problematic_appliance, mock_attr)() - - problematic_appliance.status.update(status) - with pytest.raises(ServiceValidationError, match=exception_match): - await hass.services.async_call( - SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - assert getattr(problematic_appliance, mock_attr).call_count == 2 - - -@pytest.mark.parametrize( - ("entity_id", "status", "allowed_values", "service", "power_state", "appliance"), - [ - ( - "switch.dishwasher_power", - {BSH_POWER_STATE: {"value": BSH_POWER_ON}}, - [BSH_POWER_ON, BSH_POWER_OFF], - SERVICE_TURN_ON, - STATE_ON, - "Dishwasher", - ), - ( - "switch.dishwasher_power", - {BSH_POWER_STATE: {"value": BSH_POWER_OFF}}, - [BSH_POWER_ON, BSH_POWER_OFF], - SERVICE_TURN_OFF, - STATE_OFF, - "Dishwasher", - ), - ( - "switch.dishwasher_power", - { - BSH_POWER_STATE: {"value": ""}, - BSH_OPERATION_STATE: { - "value": "BSH.Common.EnumType.OperationState.Run" - }, - }, - [BSH_POWER_ON], - SERVICE_TURN_ON, - STATE_ON, - "Dishwasher", - ), - ( - "switch.dishwasher_power", - { - BSH_POWER_STATE: {"value": ""}, - BSH_OPERATION_STATE: { - "value": "BSH.Common.EnumType.OperationState.Inactive" - }, - }, - [BSH_POWER_ON], - SERVICE_TURN_ON, - STATE_OFF, - "Dishwasher", - ), - ( - "switch.dishwasher_power", - {BSH_POWER_STATE: {"value": BSH_POWER_ON}}, - [BSH_POWER_ON, BSH_POWER_STANDBY], - SERVICE_TURN_ON, - STATE_ON, - "Dishwasher", - ), - ( - "switch.dishwasher_power", - {BSH_POWER_STATE: {"value": BSH_POWER_STANDBY}}, - [BSH_POWER_ON, BSH_POWER_STANDBY], - SERVICE_TURN_OFF, - STATE_OFF, - "Dishwasher", - ), - ], - indirect=["appliance"], -) -@pytest.mark.usefixtures("bypass_throttle") -async def test_power_swtich( - entity_id: str, - status: dict, - allowed_values: list[str], - service: str, - power_state: str, - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - appliance: Mock, - get_appliances: MagicMock, -) -> None: - """Test power switch functionality.""" - appliance.get.side_effect = [ - { - ATTR_CONSTRAINTS: { - ATTR_ALLOWED_VALUES: allowed_values, - }, - } - ] - appliance.status.update(SETTINGS_STATUS) - appliance.status.update(status) - get_appliances.return_value = [appliance] - - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - await hass.services.async_call( - SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - assert hass.states.is_state(entity_id, power_state) - - -@pytest.mark.parametrize( - ("entity_id", "allowed_values", "service", "appliance", "exception_match"), - [ - ( - "switch.dishwasher_power", - [BSH_POWER_ON], - SERVICE_TURN_OFF, - "Dishwasher", - r".*not support.*turn.*off.*", - ), - ( - "switch.dishwasher_power", - None, - SERVICE_TURN_OFF, - "Dishwasher", - r".*Unable.*turn.*off.*support.*not.*determined.*", - ), - ], - indirect=["appliance"], -) -@pytest.mark.usefixtures("bypass_throttle") -async def test_power_switch_service_validation_errors( - entity_id: str, - allowed_values: list[str], - service: str, - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - appliance: Mock, - exception_match: str, - get_appliances: MagicMock, -) -> None: - """Test power switch functionality validation errors.""" - if allowed_values: - appliance.get.side_effect = [ - { - ATTR_CONSTRAINTS: { - ATTR_ALLOWED_VALUES: allowed_values, - }, - } - ] - appliance.status.update(SETTINGS_STATUS) - get_appliances.return_value = [appliance] - - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - appliance.status.update({BSH_POWER_STATE: {"value": BSH_POWER_ON}}) - - with pytest.raises(ServiceValidationError, match=exception_match): - await hass.services.async_call( - SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True - ) diff --git a/tests/components/home_connect/test_time.py b/tests/components/home_connect/test_time.py deleted file mode 100644 index 25ce39786a5..00000000000 --- a/tests/components/home_connect/test_time.py +++ /dev/null @@ -1,150 +0,0 @@ -"""Tests for home_connect time entities.""" - -from collections.abc import Awaitable, Callable, Generator -from datetime import time -from unittest.mock import MagicMock, Mock - -from homeconnect.api import HomeConnectError -import pytest - -from homeassistant.components.home_connect.const import ATTR_VALUE -from homeassistant.components.time import DOMAIN as TIME_DOMAIN, SERVICE_SET_VALUE -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, ATTR_TIME, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError - -from .conftest import get_all_appliances - -from tests.common import MockConfigEntry - - -@pytest.fixture -def platforms() -> list[str]: - """Fixture to specify platforms to test.""" - return [Platform.TIME] - - -async def test_time( - bypass_throttle: Generator[None], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: Mock, -) -> None: - """Test time entity.""" - get_appliances.side_effect = get_all_appliances - assert config_entry.state is ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state is ConfigEntryState.LOADED - - -@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) -@pytest.mark.parametrize( - ("entity_id", "setting_key", "setting_value", "expected_state"), - [ - ( - f"{TIME_DOMAIN}.oven_alarm_clock", - "BSH.Common.Setting.AlarmClock", - {ATTR_VALUE: 59}, - str(time(second=59)), - ), - ( - f"{TIME_DOMAIN}.oven_alarm_clock", - "BSH.Common.Setting.AlarmClock", - {ATTR_VALUE: None}, - "unknown", - ), - ( - f"{TIME_DOMAIN}.oven_alarm_clock", - "BSH.Common.Setting.AlarmClock", - None, - "unknown", - ), - ], -) -@pytest.mark.usefixtures("bypass_throttle") -async def test_time_entity_functionality( - appliance: Mock, - entity_id: str, - setting_key: str, - setting_value: dict, - expected_state: str, - bypass_throttle: Generator[None], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: MagicMock, -) -> None: - """Test time entity functionality.""" - get_appliances.return_value = [appliance] - appliance.status.update({setting_key: setting_value}) - - assert config_entry.state is ConfigEntryState.NOT_LOADED - assert await integration_setup() - assert config_entry.state is ConfigEntryState.LOADED - assert hass.states.is_state(entity_id, expected_state) - - new_value = 30 - assert hass.states.get(entity_id).state != new_value - await hass.services.async_call( - TIME_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: entity_id, - ATTR_TIME: time(second=new_value), - }, - blocking=True, - ) - appliance.set_setting.assert_called_once_with(setting_key, new_value) - - -@pytest.mark.parametrize("problematic_appliance", ["Oven"], indirect=True) -@pytest.mark.parametrize( - ("entity_id", "setting_key", "mock_attr"), - [ - ( - f"{TIME_DOMAIN}.oven_alarm_clock", - "BSH.Common.Setting.AlarmClock", - "set_setting", - ), - ], -) -@pytest.mark.usefixtures("bypass_throttle") -async def test_time_entity_error( - problematic_appliance: Mock, - entity_id: str, - setting_key: str, - mock_attr: str, - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - get_appliances: MagicMock, -) -> None: - """Test time entity error.""" - get_appliances.return_value = [problematic_appliance] - - assert config_entry.state is ConfigEntryState.NOT_LOADED - problematic_appliance.status.update({setting_key: {}}) - assert await integration_setup() - assert config_entry.state is ConfigEntryState.LOADED - - with pytest.raises(HomeConnectError): - getattr(problematic_appliance, mock_attr)() - - with pytest.raises( - ServiceValidationError, match=r"Error.*assign.*value.*to.*setting.*" - ): - await hass.services.async_call( - TIME_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: entity_id, - ATTR_TIME: time(minute=1), - }, - blocking=True, - ) - assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/homeassistant/snapshots/test_exposed_entities.ambr b/tests/components/homeassistant/snapshots/test_exposed_entities.ambr index 9c93655cd4e..55b95186b49 100644 --- a/tests/components/homeassistant/snapshots/test_exposed_entities.ambr +++ b/tests/components/homeassistant/snapshots/test_exposed_entities.ambr @@ -13,3 +13,13 @@ dict({ }) # --- +# name: test_listeners + dict({ + 'light.kitchen': dict({ + 'should_expose': True, + }), + 'switch.test_unique1': mappingproxy({ + 'should_expose': True, + }), + }) +# --- diff --git a/tests/components/homeassistant/test_init.py b/tests/components/homeassistant/test_init.py index 33d78cd6c9f..d090da280a0 100644 --- a/tests/components/homeassistant/test_init.py +++ b/tests/components/homeassistant/test_init.py @@ -1,5 +1,7 @@ """The tests for Core components.""" +import asyncio +import unittest from unittest.mock import Mock, patch import pytest @@ -7,6 +9,7 @@ import voluptuous as vol import yaml from homeassistant import config +import homeassistant.components as comps from homeassistant.components.homeassistant import ( ATTR_ENTRY_ID, ATTR_SAFE_MODE, @@ -41,134 +44,206 @@ from tests.common import ( MockUser, async_capture_events, async_mock_service, + get_test_home_assistant, + mock_service, patch_yaml_files, ) -async def test_turn_on_without_entities(hass: HomeAssistant) -> None: - """Test turn_on method without entities.""" - await async_setup_component(hass, ha.DOMAIN, {}) - calls = async_mock_service(hass, "light", SERVICE_TURN_ON) - await hass.services.async_call(ha.DOMAIN, SERVICE_TURN_ON, blocking=True) - assert len(calls) == 0 +def turn_on(hass, entity_id=None, **service_data): + """Turn specified entity on if possible. + + This is a legacy helper method. Do not use it for new tests. + """ + if entity_id is not None: + service_data[ATTR_ENTITY_ID] = entity_id + + hass.services.call(ha.DOMAIN, SERVICE_TURN_ON, service_data) -async def test_turn_on(hass: HomeAssistant) -> None: - """Test turn_on method.""" - await async_setup_component(hass, ha.DOMAIN, {}) - calls = async_mock_service(hass, "light", SERVICE_TURN_ON) - await hass.services.async_call( - ha.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: "light.Ceiling"}, blocking=True - ) - assert len(calls) == 1 +def turn_off(hass, entity_id=None, **service_data): + """Turn specified entity off. + + This is a legacy helper method. Do not use it for new tests. + """ + if entity_id is not None: + service_data[ATTR_ENTITY_ID] = entity_id + + hass.services.call(ha.DOMAIN, SERVICE_TURN_OFF, service_data) -async def test_turn_off(hass: HomeAssistant) -> None: - """Test turn_off method.""" - await async_setup_component(hass, ha.DOMAIN, {}) - calls = async_mock_service(hass, "light", SERVICE_TURN_OFF) - await hass.services.async_call( - ha.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: "light.Bowl"}, blocking=True - ) - assert len(calls) == 1 +def toggle(hass, entity_id=None, **service_data): + """Toggle specified entity. + + This is a legacy helper method. Do not use it for new tests. + """ + if entity_id is not None: + service_data[ATTR_ENTITY_ID] = entity_id + + hass.services.call(ha.DOMAIN, SERVICE_TOGGLE, service_data) -async def test_toggle(hass: HomeAssistant) -> None: - """Test toggle method.""" - await async_setup_component(hass, ha.DOMAIN, {}) - calls = async_mock_service(hass, "light", SERVICE_TOGGLE) - await hass.services.async_call( - ha.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "light.Bowl"}, blocking=True - ) - assert len(calls) == 1 +def stop(hass): + """Stop Home Assistant. + + This is a legacy helper method. Do not use it for new tests. + """ + hass.services.call(ha.DOMAIN, SERVICE_HOMEASSISTANT_STOP) -@patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) -async def test_reload_core_conf(hass: HomeAssistant) -> None: - """Test reload core conf service.""" - await async_setup_component(hass, ha.DOMAIN, {}) - ent = entity.Entity() - ent.entity_id = "test.entity" - ent.hass = hass - ent.async_write_ha_state() +def restart(hass): + """Stop Home Assistant. - state = hass.states.get("test.entity") - assert state is not None - assert state.state == "unknown" - assert state.attributes == {} + This is a legacy helper method. Do not use it for new tests. + """ + hass.services.call(ha.DOMAIN, SERVICE_HOMEASSISTANT_RESTART) - files = { - config.YAML_CONFIG_FILE: yaml.dump( - { - ha.DOMAIN: { - "country": "SE", # To avoid creating issue country_not_configured - "latitude": 10, - "longitude": 20, - "customize": {"test.Entity": {"hello": "world"}}, + +def check_config(hass): + """Check the config files. + + This is a legacy helper method. Do not use it for new tests. + """ + hass.services.call(ha.DOMAIN, SERVICE_CHECK_CONFIG) + + +def reload_core_config(hass): + """Reload the core config. + + This is a legacy helper method. Do not use it for new tests. + """ + hass.services.call(ha.DOMAIN, SERVICE_RELOAD_CORE_CONFIG) + + +class TestComponentsCore(unittest.TestCase): + """Test homeassistant.components module.""" + + def setUp(self): + """Set up things to be run when tests are started.""" + self._manager = get_test_home_assistant() + self.hass = self._manager.__enter__() + assert asyncio.run_coroutine_threadsafe( + async_setup_component(self.hass, "homeassistant", {}), self.hass.loop + ).result() + + self.hass.states.set("light.Bowl", STATE_ON) + self.hass.states.set("light.Ceiling", STATE_OFF) + + def tearDown(self) -> None: + """Tear down hass object.""" + self.hass.stop() + self._manager.__exit__(None, None, None) + + def test_is_on(self): + """Test is_on method.""" + with pytest.raises( + RuntimeError, + match="Detected code that uses homeassistant.components.is_on. This is deprecated and will stop working", + ): + assert comps.is_on(self.hass, "light.Bowl") + + def test_turn_on_without_entities(self): + """Test turn_on method without entities.""" + calls = mock_service(self.hass, "light", SERVICE_TURN_ON) + turn_on(self.hass) + self.hass.block_till_done() + assert len(calls) == 0 + + def test_turn_on(self): + """Test turn_on method.""" + calls = mock_service(self.hass, "light", SERVICE_TURN_ON) + turn_on(self.hass, "light.Ceiling") + self.hass.block_till_done() + assert len(calls) == 1 + + def test_turn_off(self): + """Test turn_off method.""" + calls = mock_service(self.hass, "light", SERVICE_TURN_OFF) + turn_off(self.hass, "light.Bowl") + self.hass.block_till_done() + assert len(calls) == 1 + + def test_toggle(self): + """Test toggle method.""" + calls = mock_service(self.hass, "light", SERVICE_TOGGLE) + toggle(self.hass, "light.Bowl") + self.hass.block_till_done() + assert len(calls) == 1 + + @patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) + def test_reload_core_conf(self): + """Test reload core conf service.""" + ent = entity.Entity() + ent.entity_id = "test.entity" + ent.hass = self.hass + ent.schedule_update_ha_state() + self.hass.block_till_done() + + state = self.hass.states.get("test.entity") + assert state is not None + assert state.state == "unknown" + assert state.attributes == {} + + files = { + config.YAML_CONFIG_FILE: yaml.dump( + { + ha.DOMAIN: { + "country": "SE", # To avoid creating issue country_not_configured + "latitude": 10, + "longitude": 20, + "customize": {"test.Entity": {"hello": "world"}}, + } } - } - ) - } - with patch_yaml_files(files, True): - await hass.services.async_call( - ha.DOMAIN, SERVICE_RELOAD_CORE_CONFIG, blocking=True - ) + ) + } + with patch_yaml_files(files, True): + reload_core_config(self.hass) + self.hass.block_till_done() - assert hass.config.latitude == 10 - assert hass.config.longitude == 20 + assert self.hass.config.latitude == 10 + assert self.hass.config.longitude == 20 - ent.async_write_ha_state() + ent.schedule_update_ha_state() + self.hass.block_till_done() - state = hass.states.get("test.entity") - assert state is not None - assert state.state == "unknown" - assert state.attributes.get("hello") == "world" + state = self.hass.states.get("test.entity") + assert state is not None + assert state.state == "unknown" + assert state.attributes.get("hello") == "world" + @patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) + @patch("homeassistant.components.homeassistant._LOGGER.error") + @patch("homeassistant.config.async_process_ha_core_config") + def test_reload_core_with_wrong_conf(self, mock_process, mock_error): + """Test reload core conf service.""" + files = {config.YAML_CONFIG_FILE: yaml.dump(["invalid", "config"])} + with patch_yaml_files(files, True): + reload_core_config(self.hass) + self.hass.block_till_done() -@patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) -@patch("homeassistant.components.homeassistant._LOGGER.error") -@patch("homeassistant.core_config.async_process_ha_core_config") -async def test_reload_core_with_wrong_conf( - mock_process, mock_error, hass: HomeAssistant -) -> None: - """Test reload core conf service.""" - files = {config.YAML_CONFIG_FILE: yaml.dump(["invalid", "config"])} - await async_setup_component(hass, ha.DOMAIN, {}) - with patch_yaml_files(files, True): - await hass.services.async_call( - ha.DOMAIN, SERVICE_RELOAD_CORE_CONFIG, blocking=True - ) + assert mock_error.called + assert mock_process.called is False - assert mock_error.called - assert mock_process.called is False + @patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) + @patch( + "homeassistant.config.async_check_ha_config_file", + side_effect=HomeAssistantError("Test error"), + ) + def test_restart_homeassistant_wrong_conf(self, mock_check, mock_restart): + """Test stop service.""" + restart(self.hass) + self.hass.block_till_done() + assert mock_check.called + assert not mock_restart.called - -@patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) -@patch( - "homeassistant.config.async_check_ha_config_file", - side_effect=HomeAssistantError("Test error"), -) -async def test_restart_homeassistant_wrong_conf( - mock_check, mock_restart, hass: HomeAssistant -) -> None: - """Test restart service with error.""" - await async_setup_component(hass, ha.DOMAIN, {}) - with pytest.raises(HomeAssistantError, match="Test error"): - await hass.services.async_call( - ha.DOMAIN, SERVICE_HOMEASSISTANT_RESTART, blocking=True - ) - assert mock_check.called - assert not mock_restart.called - - -@patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) -@patch("homeassistant.config.async_check_ha_config_file", return_value=None) -async def test_check_config(mock_check, mock_stop, hass: HomeAssistant) -> None: - """Test stop service.""" - await async_setup_component(hass, ha.DOMAIN, {}) - await hass.services.async_call(ha.DOMAIN, SERVICE_CHECK_CONFIG, blocking=True) - assert mock_check.called - assert not mock_stop.called + @patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) + @patch("homeassistant.config.async_check_ha_config_file", return_value=None) + def test_check_config(self, mock_check, mock_stop): + """Test stop service.""" + check_config(self.hass) + self.hass.block_till_done() + assert mock_check.called + assert not mock_stop.called async def test_turn_on_skips_domains_without_service( @@ -242,7 +317,7 @@ async def test_setting_location(hass: HomeAssistant) -> None: assert elevation != 50 await hass.services.async_call( "homeassistant", - SERVICE_SET_LOCATION, + "set_location", {"latitude": 30, "longitude": 40}, blocking=True, ) @@ -253,24 +328,12 @@ async def test_setting_location(hass: HomeAssistant) -> None: await hass.services.async_call( "homeassistant", - SERVICE_SET_LOCATION, + "set_location", {"latitude": 30, "longitude": 40, "elevation": 50}, blocking=True, ) - assert hass.config.latitude == 30 - assert hass.config.longitude == 40 assert hass.config.elevation == 50 - await hass.services.async_call( - "homeassistant", - SERVICE_SET_LOCATION, - {"latitude": 30, "longitude": 40, "elevation": 0}, - blocking=True, - ) - assert hass.config.latitude == 30 - assert hass.config.longitude == 40 - assert hass.config.elevation == 0 - async def test_require_admin( hass: HomeAssistant, hass_read_only_user: MockUser diff --git a/tests/components/homeassistant/test_repairs.py b/tests/components/homeassistant/test_repairs.py deleted file mode 100644 index f84b29d8d2d..00000000000 --- a/tests/components/homeassistant/test_repairs.py +++ /dev/null @@ -1,135 +0,0 @@ -"""Test the Homeassistant repairs module.""" - -from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry -from tests.components.repairs import ( - async_process_repairs_platforms, - process_repair_fix_flow, - start_repair_fix_flow, -) -from tests.typing import ClientSessionGenerator, WebSocketGenerator - - -async def test_integration_not_found_confirm_step( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test the integration_not_found issue confirm step.""" - assert await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {}) - await hass.async_block_till_done() - assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) - await hass.async_block_till_done() - MockConfigEntry(domain="test1").add_to_hass(hass) - assert await async_setup_component(hass, "test1", {}) is False - await hass.async_block_till_done() - entry1 = MockConfigEntry(domain="test1") - entry1.add_to_hass(hass) - entry2 = MockConfigEntry(domain="test1") - entry2.add_to_hass(hass) - issue_id = "integration_not_found.test1" - - await async_process_repairs_platforms(hass) - ws_client = await hass_ws_client(hass) - http_client = await hass_client() - - # Assert the issue is present - await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - assert msg["success"] - assert len(msg["result"]["issues"]) == 1 - issue = msg["result"]["issues"][0] - assert issue["issue_id"] == issue_id - assert issue["translation_placeholders"] == {"domain": "test1"} - - data = await start_repair_fix_flow(http_client, HOMEASSISTANT_DOMAIN, issue_id) - - flow_id = data["flow_id"] - assert data["step_id"] == "init" - assert data["description_placeholders"] == {"domain": "test1"} - - data = await process_repair_fix_flow(http_client, flow_id) - - assert data["type"] == "menu" - - # Apply fix - data = await process_repair_fix_flow( - http_client, flow_id, json={"next_step_id": "confirm"} - ) - - assert data["type"] == "create_entry" - - await hass.async_block_till_done() - - assert hass.config_entries.async_get_entry(entry1.entry_id) is None - assert hass.config_entries.async_get_entry(entry2.entry_id) is None - - # Assert the issue is resolved - await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - assert msg["success"] - assert len(msg["result"]["issues"]) == 0 - - -async def test_integration_not_found_ignore_step( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test the integration_not_found issue ignore step.""" - assert await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {}) - await hass.async_block_till_done() - assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) - await hass.async_block_till_done() - MockConfigEntry(domain="test1").add_to_hass(hass) - assert await async_setup_component(hass, "test1", {}) is False - await hass.async_block_till_done() - entry1 = MockConfigEntry(domain="test1") - entry1.add_to_hass(hass) - issue_id = "integration_not_found.test1" - - await async_process_repairs_platforms(hass) - ws_client = await hass_ws_client(hass) - http_client = await hass_client() - - # Assert the issue is present - await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - assert msg["success"] - assert len(msg["result"]["issues"]) == 1 - issue = msg["result"]["issues"][0] - assert issue["issue_id"] == issue_id - assert issue["translation_placeholders"] == {"domain": "test1"} - - data = await start_repair_fix_flow(http_client, HOMEASSISTANT_DOMAIN, issue_id) - - flow_id = data["flow_id"] - assert data["step_id"] == "init" - assert data["description_placeholders"] == {"domain": "test1"} - - # Show menu - data = await process_repair_fix_flow(http_client, flow_id) - - assert data["type"] == "menu" - - # Apply fix - data = await process_repair_fix_flow( - http_client, flow_id, json={"next_step_id": "ignore"} - ) - - assert data["type"] == "abort" - assert data["reason"] == "issue_ignored" - - await hass.async_block_till_done() - - assert hass.config_entries.async_get_entry(entry1.entry_id) - - # Assert the issue is resolved - await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - assert msg["success"] - assert len(msg["result"]["issues"]) == 1 - assert msg["result"]["issues"][0].get("dismissed_version") is not None diff --git a/tests/components/homeassistant/triggers/test_event.py b/tests/components/homeassistant/triggers/test_event.py index 293a9007175..b7bf8e5e7f3 100644 --- a/tests/components/homeassistant/triggers/test_event.py +++ b/tests/components/homeassistant/triggers/test_event.py @@ -7,24 +7,28 @@ from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_O from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import mock_component +from tests.common import async_mock_service, mock_component @pytest.fixture -def context_with_user() -> Context: +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + +@pytest.fixture +def context_with_user(): """Create a context with default user_id.""" return Context(user_id="test_user_id") @pytest.fixture(autouse=True) -def setup_comp(hass: HomeAssistant) -> None: +def setup_comp(hass): """Initialize components.""" mock_component(hass, "group") -async def test_if_fires_on_event( - hass: HomeAssistant, service_calls: list[ServiceCall] -) -> None: +async def test_if_fires_on_event(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test the firing of events.""" context = Context() @@ -44,8 +48,8 @@ async def test_if_fires_on_event( hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id await hass.services.async_call( automation.DOMAIN, @@ -53,16 +57,15 @@ async def test_if_fires_on_event( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[0].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["id"] == 0 async def test_if_fires_on_templated_event( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test the firing of events.""" context = Context() @@ -81,8 +84,8 @@ async def test_if_fires_on_templated_event( hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id await hass.services.async_call( automation.DOMAIN, @@ -90,15 +93,14 @@ async def test_if_fires_on_templated_event( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 async def test_if_fires_on_multiple_events( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test the firing of events.""" context = Context() @@ -121,13 +123,13 @@ async def test_if_fires_on_multiple_events( await hass.async_block_till_done() hass.bus.async_fire("test2_event", context=context) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[0].context.parent_id == context.id - assert service_calls[1].context.parent_id == context.id + assert len(calls) == 2 + assert calls[0].context.parent_id == context.id + assert calls[1].context.parent_id == context.id async def test_if_fires_on_event_extra_data( - hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events still matches with event data and context.""" assert await async_setup_component( @@ -144,7 +146,7 @@ async def test_if_fires_on_event_extra_data( "test_event", {"extra_key": "extra_data"}, context=context_with_user ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, @@ -152,15 +154,14 @@ async def test_if_fires_on_event_extra_data( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 async def test_if_fires_on_event_with_data_and_context( - hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events with data and context.""" assert await async_setup_component( @@ -188,7 +189,7 @@ async def test_if_fires_on_event_with_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.bus.async_fire( "test_event", @@ -196,18 +197,18 @@ async def test_if_fires_on_event_with_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(service_calls) == 1 # No new call + assert len(calls) == 1 # No new call hass.bus.async_fire( "test_event", {"some_attr": "some_value", "another": "value", "second_attr": "second_value"}, ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_event_with_templated_data_and_context( - hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events with templated data and context.""" assert await async_setup_component( @@ -240,7 +241,7 @@ async def test_if_fires_on_event_with_templated_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.bus.async_fire( "test_event", @@ -248,18 +249,18 @@ async def test_if_fires_on_event_with_templated_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(service_calls) == 1 # No new call + assert len(calls) == 1 # No new call hass.bus.async_fire( "test_event", {"attr_1": "milk", "another": "value", "attr_2": "beer"}, ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_event_with_empty_data_and_context_config( - hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events with empty data and context config. @@ -288,11 +289,11 @@ async def test_if_fires_on_event_with_empty_data_and_context_config( context=context_with_user, ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_event_with_nested_data( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test the firing of events with nested data. @@ -318,11 +319,11 @@ async def test_if_fires_on_event_with_nested_data( "test_event", {"parent_attr": {"some_attr": "some_value", "another": "value"}} ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_event_with_empty_data( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test the firing of events with empty data. @@ -344,11 +345,11 @@ async def test_if_fires_on_event_with_empty_data( ) hass.bus.async_fire("test_event", {"any_attr": {}}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_sample_zha_event( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test the firing of events with a sample zha event. @@ -389,7 +390,7 @@ async def test_if_fires_on_sample_zha_event( }, ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.bus.async_fire( "zha_event", @@ -403,11 +404,11 @@ async def test_if_fires_on_sample_zha_event( }, ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_not_fires_if_event_data_not_matches( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test firing of event if no data match.""" assert await async_setup_component( @@ -427,11 +428,11 @@ async def test_if_not_fires_if_event_data_not_matches( hass.bus.async_fire("test_event", {"some_attr": "some_other_value"}) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_not_fires_if_event_context_not_matches( - hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context ) -> None: """Test firing of event if no context match.""" assert await async_setup_component( @@ -451,11 +452,11 @@ async def test_if_not_fires_if_event_context_not_matches( hass.bus.async_fire("test_event", {}, context=context_with_user) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_fires_on_multiple_user_ids( - hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of event when the trigger has multiple user ids. @@ -480,11 +481,11 @@ async def test_if_fires_on_multiple_user_ids( hass.bus.async_fire("test_event", {}, context=context_with_user) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_event_data_with_list( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test the (non)firing of event when the data schema has lists.""" assert await async_setup_component( @@ -505,17 +506,17 @@ async def test_event_data_with_list( hass.bus.async_fire("test_event", {"some_attr": [1, 2]}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # don't match a single value hass.bus.async_fire("test_event", {"some_attr": 1}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # don't match a containing list hass.bus.async_fire("test_event", {"some_attr": [1, 2, 3]}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize( @@ -523,7 +524,7 @@ async def test_event_data_with_list( ) async def test_state_reported_event( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, event_type: str | list[str], ) -> None: @@ -546,7 +547,7 @@ async def test_state_reported_event( hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 assert ( "Unnamed automation failed to setup triggers and has been disabled: Can't " "listen to state_reported in event trigger for dictionary value @ " @@ -555,9 +556,7 @@ async def test_state_reported_event( async def test_templated_state_reported_event( - hass: HomeAssistant, - service_calls: list[ServiceCall], - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture ) -> None: """Test triggering on state reported event.""" context = Context() @@ -579,7 +578,7 @@ async def test_templated_state_reported_event( hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 assert ( "Got error 'Can't listen to state_reported in event trigger' " "when setting up triggers for automation 0" in caplog.text diff --git a/tests/components/homeassistant/triggers/test_numeric_state.py b/tests/components/homeassistant/triggers/test_numeric_state.py index 85882274fec..59cd7e2a2a7 100644 --- a/tests/components/homeassistant/triggers/test_numeric_state.py +++ b/tests/components/homeassistant/triggers/test_numeric_state.py @@ -23,11 +23,22 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import assert_setup_component, async_fire_time_changed, mock_component +from tests.common import ( + assert_setup_component, + async_fire_time_changed, + async_mock_service, + mock_component, +) + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") @pytest.fixture(autouse=True) -async def setup_comp(hass: HomeAssistant) -> None: +async def setup_comp(hass): """Initialize components.""" mock_component(hass, "group") await async_setup_component( @@ -52,7 +63,7 @@ async def setup_comp(hass: HomeAssistant) -> None: "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_entity_removal( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with removed entity.""" hass.states.async_set("test.entity", 11) @@ -75,14 +86,14 @@ async def test_if_not_fires_on_entity_removal( # Entity disappears hass.states.async_remove("test.entity") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entity_change_below( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -109,8 +120,8 @@ async def test_if_fires_on_entity_change_below( # 9 is below 10 hass.states.async_set("test.entity", 9, context=context) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id # Set above 12 so the automation will fire again hass.states.async_set("test.entity", 12) @@ -121,12 +132,10 @@ async def test_if_fires_on_entity_change_below( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 - hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[0].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["id"] == 0 @pytest.mark.parametrize( @@ -135,7 +144,7 @@ async def test_if_fires_on_entity_change_below( async def test_if_fires_on_entity_change_below_uuid( hass: HomeAssistant, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], below: int | str, ) -> None: """Test the firing with changed entity specified by registry entry id.""" @@ -168,8 +177,8 @@ async def test_if_fires_on_entity_change_below_uuid( # 9 is below 10 hass.states.async_set("test.entity", 9, context=context) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id # Set above 12 so the automation will fire again hass.states.async_set("test.entity", 12) @@ -180,19 +189,17 @@ async def test_if_fires_on_entity_change_below_uuid( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 - hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[0].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["id"] == 0 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entity_change_over_to_below( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -216,14 +223,14 @@ async def test_if_fires_on_entity_change_over_to_below( # 9 is below 10 hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entities_change_over_to_below( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entities.""" hass.states.async_set("test.entity_1", 11) @@ -248,17 +255,17 @@ async def test_if_fires_on_entities_change_over_to_below( # 9 is below 10 hass.states.async_set("test.entity_1", 9) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_entity_change_below_to_below( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" context = Context() @@ -283,25 +290,25 @@ async def test_if_not_fires_on_entity_change_below_to_below( # 9 is below 10 so this should fire hass.states.async_set("test.entity", 9, context=context) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id # already below so should not fire again hass.states.async_set("test.entity", 5) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # still below so should not fire again hass.states.async_set("test.entity", 3) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_below_fires_on_entity_change_to_equal( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -325,14 +332,14 @@ async def test_if_not_below_fires_on_entity_change_to_equal( # 10 is not below 10 so this should not fire again hass.states.async_set("test.entity", 10) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_initial_entity_below( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test the firing when starting with a match.""" hass.states.async_set("test.entity", 9) @@ -356,14 +363,14 @@ async def test_if_not_fires_on_initial_entity_below( # Do not fire on first update when initial state was already below hass.states.async_set("test.entity", 8) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize( "above", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_initial_entity_above( - hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str + hass: HomeAssistant, calls: list[ServiceCall], above: int | str ) -> None: """Test the firing when starting with a match.""" hass.states.async_set("test.entity", 11) @@ -387,14 +394,14 @@ async def test_if_not_fires_on_initial_entity_above( # Do not fire on first update when initial state was already above hass.states.async_set("test.entity", 12) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize( "above", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entity_change_above( - hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str + hass: HomeAssistant, calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 9) @@ -417,11 +424,11 @@ async def test_if_fires_on_entity_change_above( # 11 is above 10 hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_unavailable_at_startup( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test the firing with changed entity at startup.""" assert await async_setup_component( @@ -441,12 +448,12 @@ async def test_if_fires_on_entity_unavailable_at_startup( # 11 is above 10 hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_if_fires_on_entity_change_below_to_above( - hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str + hass: HomeAssistant, calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" # set initial state @@ -471,12 +478,12 @@ async def test_if_fires_on_entity_change_below_to_above( # 11 is above 10 and 9 is below hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_above_to_above( - hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str + hass: HomeAssistant, calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" # set initial state @@ -501,17 +508,17 @@ async def test_if_not_fires_on_entity_change_above_to_above( # 12 is above 10 so this should fire hass.states.async_set("test.entity", 12) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # already above, should not fire again hass.states.async_set("test.entity", 15) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_if_not_above_fires_on_entity_change_to_equal( - hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str + hass: HomeAssistant, calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" # set initial state @@ -536,7 +543,7 @@ async def test_if_not_above_fires_on_entity_change_to_equal( # 10 is not above 10 so this should not fire again hass.states.async_set("test.entity", 10) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize( @@ -549,10 +556,7 @@ async def test_if_not_above_fires_on_entity_change_to_equal( ], ) async def test_if_fires_on_entity_change_below_range( - hass: HomeAssistant, - service_calls: list[ServiceCall], - above: int | str, - below: int | str, + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -576,7 +580,7 @@ async def test_if_fires_on_entity_change_below_range( # 9 is below 10 hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize( @@ -589,10 +593,7 @@ async def test_if_fires_on_entity_change_below_range( ], ) async def test_if_fires_on_entity_change_below_above_range( - hass: HomeAssistant, - service_calls: list[ServiceCall], - above: int | str, - below: int | str, + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test the firing with changed entity.""" assert await async_setup_component( @@ -613,7 +614,7 @@ async def test_if_fires_on_entity_change_below_above_range( # 4 is below 5 hass.states.async_set("test.entity", 4) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize( @@ -626,10 +627,7 @@ async def test_if_fires_on_entity_change_below_above_range( ], ) async def test_if_fires_on_entity_change_over_to_below_range( - hass: HomeAssistant, - service_calls: list[ServiceCall], - above: int | str, - below: int | str, + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -654,7 +652,7 @@ async def test_if_fires_on_entity_change_over_to_below_range( # 9 is below 10 hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize( @@ -667,10 +665,7 @@ async def test_if_fires_on_entity_change_over_to_below_range( ], ) async def test_if_fires_on_entity_change_over_to_below_above_range( - hass: HomeAssistant, - service_calls: list[ServiceCall], - above: int | str, - below: int | str, + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -695,12 +690,12 @@ async def test_if_fires_on_entity_change_over_to_below_above_range( # 4 is below 5 so it should not fire hass.states.async_set("test.entity", 4) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize("below", [100, "input_number.value_100"]) async def test_if_not_fires_if_entity_not_match( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test if not fired with non matching entity.""" assert await async_setup_component( @@ -720,13 +715,11 @@ async def test_if_not_fires_if_entity_not_match( hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_not_fires_and_warns_if_below_entity_unknown( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - service_calls: list[ServiceCall], + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, calls: list[ServiceCall] ) -> None: """Test if warns with unknown below entity.""" assert await async_setup_component( @@ -749,7 +742,7 @@ async def test_if_not_fires_and_warns_if_below_entity_unknown( hass.states.async_set("test.entity", 1) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING @@ -757,7 +750,7 @@ async def test_if_not_fires_and_warns_if_below_entity_unknown( @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_fires_on_entity_change_below_with_attribute( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" hass.states.async_set("test.entity", 11, {"test_attribute": 11}) @@ -780,12 +773,12 @@ async def test_if_fires_on_entity_change_below_with_attribute( # 9 is below 10 hass.states.async_set("test.entity", 9, {"test_attribute": 11}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_not_below_with_attribute( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test attributes.""" assert await async_setup_component( @@ -805,12 +798,12 @@ async def test_if_not_fires_on_entity_change_not_below_with_attribute( # 11 is not below 10 hass.states.async_set("test.entity", 11, {"test_attribute": 9}) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_fires_on_attribute_change_with_attribute_below( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" hass.states.async_set("test.entity", "entity", {"test_attribute": 11}) @@ -834,12 +827,12 @@ async def test_if_fires_on_attribute_change_with_attribute_below( # 9 is below 10 hass.states.async_set("test.entity", "entity", {"test_attribute": 9}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_attribute_change_with_attribute_not_below( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" assert await async_setup_component( @@ -860,12 +853,12 @@ async def test_if_not_fires_on_attribute_change_with_attribute_not_below( # 11 is not below 10 hass.states.async_set("test.entity", "entity", {"test_attribute": 11}) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_with_attribute_below( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" assert await async_setup_component( @@ -886,12 +879,12 @@ async def test_if_not_fires_on_entity_change_with_attribute_below( # 11 is not below 10, entity state value should not be tested hass.states.async_set("test.entity", "9", {"test_attribute": 11}) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_with_not_attribute_below( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" assert await async_setup_component( @@ -912,12 +905,12 @@ async def test_if_not_fires_on_entity_change_with_not_attribute_below( # 11 is not below 10, entity state value should not be tested hass.states.async_set("test.entity", "entity") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_fires_on_attr_change_with_attribute_below_and_multiple_attr( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" hass.states.async_set( @@ -944,12 +937,12 @@ async def test_fires_on_attr_change_with_attribute_below_and_multiple_attr( "test.entity", "entity", {"test_attribute": 9, "not_test_attribute": 11} ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_template_list( - hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str + hass: HomeAssistant, calls: list[ServiceCall], below: int | str ) -> None: """Test template list.""" hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 11]}) @@ -972,12 +965,12 @@ async def test_template_list( # 3 is below 10 hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 3]}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize("below", [10.0, "input_number.value_10"]) async def test_template_string( - hass: HomeAssistant, service_calls: list[ServiceCall], below: float | str + hass: HomeAssistant, calls: list[ServiceCall], below: float | str ) -> None: """Test template string.""" assert await async_setup_component( @@ -1011,15 +1004,15 @@ async def test_template_string( await hass.async_block_till_done() hass.states.async_set("test.entity", "test state 2", {"test_attribute": "0.9"}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"numeric_state - test.entity - {below} - None - test state 1 - test state 2" ) async def test_not_fires_on_attr_change_with_attr_not_below_multiple_attr( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test if not fired changed attributes.""" assert await async_setup_component( @@ -1042,7 +1035,7 @@ async def test_not_fires_on_attr_change_with_attr_not_below_multiple_attr( "test.entity", "entity", {"test_attribute": 11, "not_test_attribute": 9} ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize( @@ -1055,10 +1048,7 @@ async def test_not_fires_on_attr_change_with_attr_not_below_multiple_attr( ], ) async def test_if_action( - hass: HomeAssistant, - service_calls: list[ServiceCall], - above: int | str, - below: int | str, + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test if action.""" entity_id = "domain.test_entity" @@ -1083,19 +1073,19 @@ async def test_if_action( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.states.async_set(entity_id, 8) hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.states.async_set(entity_id, 9) hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 @pytest.mark.parametrize( @@ -1108,7 +1098,7 @@ async def test_if_action( ], ) async def test_if_fails_setup_bad_for( - hass: HomeAssistant, above: int | str, below: int | str + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test for setup failure for bad for.""" hass.states.async_set("test.entity", 5) @@ -1134,7 +1124,9 @@ async def test_if_fails_setup_bad_for( assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_for_without_above_below(hass: HomeAssistant) -> None: +async def test_if_fails_setup_for_without_above_below( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: """Test for setup failures for missing above or below.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -1166,7 +1158,7 @@ async def test_if_fails_setup_for_without_above_below(hass: HomeAssistant) -> No async def test_if_not_fires_on_entity_change_with_for( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1195,7 +1187,7 @@ async def test_if_not_fires_on_entity_change_with_for( freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize( @@ -1208,10 +1200,7 @@ async def test_if_not_fires_on_entity_change_with_for( ], ) async def test_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, - service_calls: list[ServiceCall], - above: int | str, - below: int | str, + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test for not firing on entities change with for after stop.""" hass.states.async_set("test.entity_1", 0) @@ -1243,7 +1232,7 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.states.async_set("test.entity_1", 15) hass.states.async_set("test.entity_2", 15) @@ -1257,11 +1246,9 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 @pytest.mark.parametrize( @@ -1276,7 +1263,7 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( async def test_if_fires_on_entity_change_with_for_attribute_change( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1307,11 +1294,11 @@ async def test_if_fires_on_entity_change_with_for_attribute_change( async_fire_time_changed(hass) hass.states.async_set("test.entity", 9, attributes={"mock_attr": "attr_change"}) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 freezer.tick(timedelta(seconds=4)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize( @@ -1324,10 +1311,7 @@ async def test_if_fires_on_entity_change_with_for_attribute_change( ], ) async def test_if_fires_on_entity_change_with_for( - hass: HomeAssistant, - service_calls: list[ServiceCall], - above: int | str, - below: int | str, + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test for firing on entity change with for.""" hass.states.async_set("test.entity", 0) @@ -1354,12 +1338,12 @@ async def test_if_fires_on_entity_change_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_wait_template_with_trigger( - hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str + hass: HomeAssistant, calls: list[ServiceCall], above: int | str ) -> None: """Test using wait template with 'trigger.entity_id'.""" hass.states.async_set("test.entity", "0") @@ -1397,8 +1381,8 @@ async def test_wait_template_with_trigger( hass.states.async_set("test.entity", "12") hass.states.async_set("test.entity", "8") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "numeric_state - test.entity - 12" + assert len(calls) == 1 + assert calls[0].data["some"] == "numeric_state - test.entity - 12" @pytest.mark.parametrize( @@ -1413,7 +1397,7 @@ async def test_wait_template_with_trigger( async def test_if_fires_on_entities_change_no_overlap( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1448,16 +1432,16 @@ async def test_if_fires_on_entities_change_no_overlap( freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test.entity_1" + assert len(calls) == 1 + assert calls[0].data["some"] == "test.entity_1" hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "test.entity_2" + assert len(calls) == 2 + assert calls[1].data["some"] == "test.entity_2" @pytest.mark.parametrize( @@ -1472,7 +1456,7 @@ async def test_if_fires_on_entities_change_no_overlap( async def test_if_fires_on_entities_change_overlap( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1516,18 +1500,18 @@ async def test_if_fires_on_entities_change_overlap( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test.entity_1" + assert len(calls) == 1 + assert calls[0].data["some"] == "test.entity_1" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "test.entity_2" + assert len(calls) == 2 + assert calls[1].data["some"] == "test.entity_2" @pytest.mark.parametrize( @@ -1540,10 +1524,7 @@ async def test_if_fires_on_entities_change_overlap( ], ) async def test_if_fires_on_change_with_for_template_1( - hass: HomeAssistant, - service_calls: list[ServiceCall], - above: int | str, - below: int | str, + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", 0) @@ -1568,10 +1549,10 @@ async def test_if_fires_on_change_with_for_template_1( hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize( @@ -1584,10 +1565,7 @@ async def test_if_fires_on_change_with_for_template_1( ], ) async def test_if_fires_on_change_with_for_template_2( - hass: HomeAssistant, - service_calls: list[ServiceCall], - above: int | str, - below: int | str, + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", 0) @@ -1612,10 +1590,10 @@ async def test_if_fires_on_change_with_for_template_2( hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize( @@ -1628,10 +1606,7 @@ async def test_if_fires_on_change_with_for_template_2( ], ) async def test_if_fires_on_change_with_for_template_3( - hass: HomeAssistant, - service_calls: list[ServiceCall], - above: int | str, - below: int | str, + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", 0) @@ -1656,14 +1631,14 @@ async def test_if_fires_on_change_with_for_template_3( hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_not_fires_on_error_with_for_template( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing on error with for template.""" hass.states.async_set("test.entity", 0) @@ -1687,17 +1662,17 @@ async def test_if_not_fires_on_error_with_for_template( hass.states.async_set("test.entity", 101) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) hass.states.async_set("test.entity", "unavailable") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) hass.states.async_set("test.entity", 101) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 @pytest.mark.parametrize( @@ -1710,7 +1685,7 @@ async def test_if_not_fires_on_error_with_for_template( ], ) async def test_invalid_for_template( - hass: HomeAssistant, above: int | str, below: int | str + hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str ) -> None: """Test for invalid for template.""" hass.states.async_set("test.entity", 0) @@ -1751,7 +1726,7 @@ async def test_invalid_for_template( async def test_if_fires_on_entities_change_overlap_for_template( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1798,22 +1773,22 @@ async def test_if_fires_on_entities_change_overlap_for_template( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(calls) == 1 + assert calls[0].data["some"] == "test.entity_1 - 0:00:05" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 freezer.tick(timedelta(seconds=5)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "test.entity_2 - 0:00:10" + assert len(calls) == 2 + assert calls[1].data["some"] == "test.entity_2 - 0:00:10" async def test_below_above(hass: HomeAssistant) -> None: @@ -1848,7 +1823,7 @@ async def test_schema_unacceptable_entities(hass: HomeAssistant) -> None: @pytest.mark.parametrize("above", [3, "input_number.value_3"]) async def test_attribute_if_fires_on_entity_change_with_both_filters( - hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str + hass: HomeAssistant, calls: list[ServiceCall], above: int | str ) -> None: """Test for firing if both filters are match attribute.""" hass.states.async_set("test.entity", "bla", {"test-measurement": 1}) @@ -1872,12 +1847,12 @@ async def test_attribute_if_fires_on_entity_change_with_both_filters( hass.states.async_set("test.entity", "bla", {"test-measurement": 4}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize("above", [3, "input_number.value_3"]) async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str + hass: HomeAssistant, calls: list[ServiceCall], above: int | str ) -> None: """Test for not firing on entity change with for after stop trigger.""" hass.states.async_set("test.entity", "bla", {"test-measurement": 1}) @@ -1905,10 +1880,10 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( hass.states.async_set("test.entity", "bla", {"test-measurement": 4}) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 @pytest.mark.parametrize( @@ -1918,7 +1893,7 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( async def test_variables_priority( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + calls: list[ServiceCall], above: int, below: int, ) -> None: @@ -1966,17 +1941,17 @@ async def test_variables_priority( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(calls) == 1 + assert calls[0].data["some"] == "test.entity_1 - 0:00:05" @pytest.mark.parametrize("multiplier", [1, 5]) async def test_template_variable( - hass: HomeAssistant, service_calls: list[ServiceCall], multiplier: int + hass: HomeAssistant, calls: list[ServiceCall], multiplier: int ) -> None: """Test template variable.""" hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 11]}) @@ -2001,6 +1976,6 @@ async def test_template_variable( hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 3]}) await hass.async_block_till_done() if multiplier * 3 < 10: - assert len(service_calls) == 1 + assert len(calls) == 1 else: - assert len(service_calls) == 0 + assert len(calls) == 0 diff --git a/tests/components/homeassistant/triggers/test_state.py b/tests/components/homeassistant/triggers/test_state.py index 83157a158a6..a40ecae7579 100644 --- a/tests/components/homeassistant/triggers/test_state.py +++ b/tests/components/homeassistant/triggers/test_state.py @@ -19,18 +19,29 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import assert_setup_component, async_fire_time_changed, mock_component +from tests.common import ( + assert_setup_component, + async_fire_time_changed, + async_mock_service, + mock_component, +) + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") @pytest.fixture(autouse=True) -def setup_comp(hass: HomeAssistant) -> None: +def setup_comp(hass): """Initialize components.""" mock_component(hass, "group") hass.states.async_set("test.entity", "hello") async def test_if_fires_on_entity_change( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on entity change.""" context = Context() @@ -63,12 +74,9 @@ async def test_if_fires_on_entity_change( hass.states.async_set("test.entity", "world", context=context) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id - assert ( - service_calls[0].data["some"] - == "state - test.entity - hello - world - None - 0" - ) + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id + assert calls[0].data["some"] == "state - test.entity - hello - world - None - 0" await hass.services.async_call( automation.DOMAIN, @@ -76,16 +84,13 @@ async def test_if_fires_on_entity_change( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 hass.states.async_set("test.entity", "planet") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 async def test_if_fires_on_entity_change_uuid( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall] ) -> None: """Test for firing on entity change.""" context = Context() @@ -125,11 +130,9 @@ async def test_if_fires_on_entity_change_uuid( hass.states.async_set("test.beer", "world", context=context) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id - assert ( - service_calls[0].data["some"] == "state - test.beer - hello - world - None - 0" - ) + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id + assert calls[0].data["some"] == "state - test.beer - hello - world - None - 0" await hass.services.async_call( automation.DOMAIN, @@ -137,14 +140,13 @@ async def test_if_fires_on_entity_change_uuid( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 hass.states.async_set("test.beer", "planet") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_from_filter( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on entity change with filter.""" assert await async_setup_component( @@ -165,11 +167,11 @@ async def test_if_fires_on_entity_change_with_from_filter( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_not_from_filter( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on entity change inverse filter.""" assert await async_setup_component( @@ -191,15 +193,15 @@ async def test_if_fires_on_entity_change_with_not_from_filter( # Do not fire from hello hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not service_calls + assert not calls hass.states.async_set("test.entity", "universum") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_to_filter( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on entity change with to filter.""" assert await async_setup_component( @@ -220,11 +222,11 @@ async def test_if_fires_on_entity_change_with_to_filter( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_not_to_filter( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on entity change with to filter.""" assert await async_setup_component( @@ -246,15 +248,15 @@ async def test_if_fires_on_entity_change_with_not_to_filter( # Do not fire to world hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not service_calls + assert not calls hass.states.async_set("test.entity", "universum") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_from_filter_all( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on entity change with filter.""" assert await async_setup_component( @@ -276,11 +278,11 @@ async def test_if_fires_on_entity_change_with_from_filter_all( hass.states.async_set("test.entity", "world") hass.states.async_set("test.entity", "world", {"attribute": 5}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_to_filter_all( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on entity change with to filter.""" assert await async_setup_component( @@ -302,11 +304,11 @@ async def test_if_fires_on_entity_change_with_to_filter_all( hass.states.async_set("test.entity", "world") hass.states.async_set("test.entity", "world", {"attribute": 5}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_attribute_change_with_to_filter( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing on attribute change.""" assert await async_setup_component( @@ -328,11 +330,11 @@ async def test_if_fires_on_attribute_change_with_to_filter( hass.states.async_set("test.entity", "world", {"test_attribute": 11}) hass.states.async_set("test.entity", "world", {"test_attribute": 12}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_both_filters( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if both filters are a non match.""" assert await async_setup_component( @@ -354,11 +356,11 @@ async def test_if_fires_on_entity_change_with_both_filters( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_not_from_to( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if not from doesn't match and to match.""" assert await async_setup_component( @@ -381,31 +383,31 @@ async def test_if_fires_on_entity_change_with_not_from_to( # We should not trigger from hello hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not service_calls + assert not calls # We should not trigger to != galaxy hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not service_calls + assert not calls # We should trigger to galaxy hass.states.async_set("test.entity", "galaxy") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # We should not trigger from milky way hass.states.async_set("test.entity", "milky_way") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # We should trigger to universe hass.states.async_set("test.entity", "universe") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 async def test_if_fires_on_entity_change_with_from_not_to( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if not from doesn't match and to match.""" assert await async_setup_component( @@ -428,31 +430,31 @@ async def test_if_fires_on_entity_change_with_from_not_to( # We should trigger to world from hello hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Reset back to hello, should not trigger hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # We should not trigger to galaxy hass.states.async_set("test.entity", "galaxy") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # We should trigger form galaxy to milky way hass.states.async_set("test.entity", "milky_way") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 # We should not trigger to universe hass.states.async_set("test.entity", "universe") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 async def test_if_not_fires_if_to_filter_not_match( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing if to filter is not a match.""" assert await async_setup_component( @@ -474,11 +476,11 @@ async def test_if_not_fires_if_to_filter_not_match( hass.states.async_set("test.entity", "moon") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_not_fires_if_from_filter_not_match( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing if from filter is not a match.""" hass.states.async_set("test.entity", "bye") @@ -502,11 +504,11 @@ async def test_if_not_fires_if_from_filter_not_match( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_not_fires_if_entity_not_match( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing if entity is not matching.""" assert await async_setup_component( @@ -523,10 +525,10 @@ async def test_if_not_fires_if_entity_not_match( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 -async def test_if_action(hass: HomeAssistant, service_calls: list[ServiceCall]) -> None: +async def test_if_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test for to action.""" entity_id = "domain.test_entity" test_state = "new_state" @@ -549,16 +551,18 @@ async def test_if_action(hass: HomeAssistant, service_calls: list[ServiceCall]) hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.states.async_set(entity_id, test_state + "something") hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 -async def test_if_fails_setup_if_to_boolean_value(hass: HomeAssistant) -> None: +async def test_if_fails_setup_if_to_boolean_value( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: """Test for setup failure for boolean to.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -578,7 +582,9 @@ async def test_if_fails_setup_if_to_boolean_value(hass: HomeAssistant) -> None: assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_if_from_boolean_value(hass: HomeAssistant) -> None: +async def test_if_fails_setup_if_from_boolean_value( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: """Test for setup failure for boolean from.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -598,7 +604,9 @@ async def test_if_fails_setup_if_from_boolean_value(hass: HomeAssistant) -> None assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_bad_for(hass: HomeAssistant) -> None: +async def test_if_fails_setup_bad_for( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: """Test for setup failure for bad for.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -620,7 +628,7 @@ async def test_if_fails_setup_bad_for(hass: HomeAssistant) -> None: async def test_if_not_fires_on_entity_change_with_for( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing on entity change with for.""" assert await async_setup_component( @@ -646,11 +654,11 @@ async def test_if_not_fires_on_entity_change_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing on entity change with for after stop trigger.""" assert await async_setup_component( @@ -678,7 +686,7 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.states.async_set("test.entity_1", "world_no") hass.states.async_set("test.entity_2", "world_no") @@ -692,17 +700,14 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_for_attribute_change( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing on entity change with for and attribute change.""" assert await async_setup_component( @@ -730,17 +735,15 @@ async def test_if_fires_on_entity_change_with_for_attribute_change( "test.entity", "world", attributes={"mock_attr": "attr_change"} ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 freezer.tick(timedelta(seconds=4)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_for_multiple_force_update( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing on entity change with for and force update.""" assert await async_setup_component( @@ -767,15 +770,15 @@ async def test_if_fires_on_entity_change_with_for_multiple_force_update( async_fire_time_changed(hass) hass.states.async_set("test.force_entity", "world", None, True) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 freezer.tick(timedelta(seconds=4)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_for( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on entity change with for.""" assert await async_setup_component( @@ -799,11 +802,11 @@ async def test_if_fires_on_entity_change_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_entity_change_with_for_without_to( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on entity change with for.""" assert await async_setup_component( @@ -827,24 +830,22 @@ async def test_if_fires_on_entity_change_with_for_without_to( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set("test.entity", "world") await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=4)) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_does_not_fires_on_entity_change_with_for_without_to_2( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing on entity change with for.""" assert await async_setup_component( @@ -870,11 +871,11 @@ async def test_if_does_not_fires_on_entity_change_with_for_without_to_2( async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_fires_on_entity_creation_and_removal( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on entity creation and removal, with to/from constraints.""" # set automations for multiple combinations to/from @@ -916,32 +917,32 @@ async def test_if_fires_on_entity_creation_and_removal( # automation with match_all triggers on creation hass.states.async_set("test.entity_0", "any", context=context_0) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context_0.id + assert len(calls) == 1 + assert calls[0].context.parent_id == context_0.id # create entities, trigger on test.entity_2 ('to' matches, no 'from') hass.states.async_set("test.entity_1", "hello", context=context_1) hass.states.async_set("test.entity_2", "world", context=context_2) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].context.parent_id == context_2.id + assert len(calls) == 2 + assert calls[1].context.parent_id == context_2.id # removal of both, trigger on test.entity_1 ('from' matches, no 'to') assert hass.states.async_remove("test.entity_1", context=context_1) assert hass.states.async_remove("test.entity_2", context=context_2) await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].context.parent_id == context_1.id + assert len(calls) == 3 + assert calls[2].context.parent_id == context_1.id # automation with match_all triggers on removal assert hass.states.async_remove("test.entity_0", context=context_0) await hass.async_block_till_done() - assert len(service_calls) == 4 - assert service_calls[3].context.parent_id == context_0.id + assert len(calls) == 4 + assert calls[3].context.parent_id == context_0.id async def test_if_fires_on_for_condition( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if condition is on.""" point1 = dt_util.utcnow() @@ -970,17 +971,17 @@ async def test_if_fires_on_for_condition( # not enough time has passed hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Time travel 10 secs into the future mock_utcnow.return_value = point2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_for_condition_attribute_change( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if condition is on with attribute change.""" point1 = dt_util.utcnow() @@ -1010,7 +1011,7 @@ async def test_if_fires_on_for_condition_attribute_change( # not enough time has passed hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Still not enough time has passed, but an attribute is changed mock_utcnow.return_value = point2 @@ -1019,16 +1020,18 @@ async def test_if_fires_on_for_condition_attribute_change( ) hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Enough time has now passed mock_utcnow.return_value = point3 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 -async def test_if_fails_setup_for_without_time(hass: HomeAssistant) -> None: +async def test_if_fails_setup_for_without_time( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: """Test for setup failure if no time is provided.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -1050,7 +1053,9 @@ async def test_if_fails_setup_for_without_time(hass: HomeAssistant) -> None: assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_for_without_entity(hass: HomeAssistant) -> None: +async def test_if_fails_setup_for_without_entity( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: """Test for setup failure if no entity is provided.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -1072,7 +1077,7 @@ async def test_if_fails_setup_for_without_entity(hass: HomeAssistant) -> None: async def test_wait_template_with_trigger( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test using wait template with 'trigger.entity_id'.""" assert await async_setup_component( @@ -1108,14 +1113,12 @@ async def test_wait_template_with_trigger( hass.states.async_set("test.entity", "world") hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "state - test.entity - hello - world" + assert len(calls) == 1 + assert calls[0].data["some"] == "state - test.entity - hello - world" async def test_if_fires_on_entities_change_no_overlap( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing on entities change with no overlap.""" assert await async_setup_component( @@ -1143,22 +1146,20 @@ async def test_if_fires_on_entities_change_no_overlap( freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test.entity_1" + assert len(calls) == 1 + assert calls[0].data["some"] == "test.entity_1" hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "test.entity_2" + assert len(calls) == 2 + assert calls[1].data["some"] == "test.entity_2" async def test_if_fires_on_entities_change_overlap( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing on entities change with overlap.""" assert await async_setup_component( @@ -1195,22 +1196,22 @@ async def test_if_fires_on_entities_change_overlap( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test.entity_1" + assert len(calls) == 1 + assert calls[0].data["some"] == "test.entity_1" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "test.entity_2" + assert len(calls) == 2 + assert calls[1].data["some"] == "test.entity_2" async def test_if_fires_on_change_with_for_template_1( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1231,14 +1232,14 @@ async def test_if_fires_on_change_with_for_template_1( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_change_with_for_template_2( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1259,14 +1260,14 @@ async def test_if_fires_on_change_with_for_template_2( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_change_with_for_template_3( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1287,14 +1288,14 @@ async def test_if_fires_on_change_with_for_template_3( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_change_with_for_template_4( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1316,14 +1317,14 @@ async def test_if_fires_on_change_with_for_template_4( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_change_from_with_for( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with from/for.""" assert await async_setup_component( @@ -1350,11 +1351,11 @@ async def test_if_fires_on_change_from_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_not_fires_on_change_from_with_for( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on change with from/for.""" assert await async_setup_component( @@ -1381,10 +1382,12 @@ async def test_if_not_fires_on_change_from_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 -async def test_invalid_for_template_1(hass: HomeAssistant) -> None: +async def test_invalid_for_template_1( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: """Test for invalid for template.""" assert await async_setup_component( hass, @@ -1409,9 +1412,7 @@ async def test_invalid_for_template_1(hass: HomeAssistant) -> None: async def test_if_fires_on_entities_change_overlap_for_template( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing on entities change with overlap and for template.""" assert await async_setup_component( @@ -1451,26 +1452,26 @@ async def test_if_fires_on_entities_change_overlap_for_template( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(calls) == 1 + assert calls[0].data["some"] == "test.entity_1 - 0:00:05" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 freezer.tick(timedelta(seconds=5)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "test.entity_2 - 0:00:10" + assert len(calls) == 2 + assert calls[1].data["some"] == "test.entity_2 - 0:00:10" async def test_attribute_if_fires_on_entity_change_with_both_filters( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if both filters are match attribute.""" hass.states.async_set("test.entity", "bla", {"name": "hello"}) @@ -1495,11 +1496,11 @@ async def test_attribute_if_fires_on_entity_change_with_both_filters( hass.states.async_set("test.entity", "bla", {"name": "world"}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_attribute_if_fires_on_entity_where_attr_stays_constant( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if attribute stays the same.""" hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "old_value"}) @@ -1523,21 +1524,21 @@ async def test_attribute_if_fires_on_entity_where_attr_stays_constant( # Leave all attributes the same hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "old_value"}) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Change the untracked attribute hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "new_value"}) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Change the tracked attribute hass.states.async_set("test.entity", "bla", {"name": "world", "other": "old_value"}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_attribute_if_fires_on_entity_where_attr_stays_constant_filter( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if attribute stays the same.""" hass.states.async_set("test.entity", "bla", {"name": "other_name"}) @@ -1564,25 +1565,25 @@ async def test_attribute_if_fires_on_entity_where_attr_stays_constant_filter( "test.entity", "bla", {"name": "best_name", "other": "old_value"} ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Change the untracked attribute hass.states.async_set( "test.entity", "bla", {"name": "best_name", "other": "new_value"} ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Change the tracked attribute hass.states.async_set( "test.entity", "bla", {"name": "other_name", "other": "old_value"} ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_attribute_if_fires_on_entity_where_attr_stays_constant_all( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if attribute stays the same.""" hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "old_value"}) @@ -1609,25 +1610,25 @@ async def test_attribute_if_fires_on_entity_where_attr_stays_constant_all( "test.entity", "bla", {"name": "name_1", "other": "old_value"} ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Change the untracked attribute hass.states.async_set( "test.entity", "bla", {"name": "name_1", "other": "new_value"} ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Change the tracked attribute hass.states.async_set( "test.entity", "bla", {"name": "name_2", "other": "old_value"} ) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing on entity change with for after stop trigger.""" hass.states.async_set("test.entity", "bla", {"name": "hello"}) @@ -1657,33 +1658,33 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( # Test that the for-check works hass.states.async_set("test.entity", "bla", {"name": "world"}) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) hass.states.async_set("test.entity", "bla", {"name": "world", "something": "else"}) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Now remove state while inside "for" hass.states.async_set("test.entity", "bla", {"name": "hello"}) hass.states.async_set("test.entity", "bla", {"name": "world"}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.states.async_remove("test.entity") await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_attribute_if_fires_on_entity_change_with_both_filters_boolean( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing if both filters are match attribute.""" hass.states.async_set("test.entity", "bla", {"happening": False}) @@ -1708,13 +1709,11 @@ async def test_attribute_if_fires_on_entity_change_with_both_filters_boolean( hass.states.async_set("test.entity", "bla", {"happening": True}) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_variables_priority( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test an externally defined trigger variable is overridden.""" assert await async_setup_component( @@ -1755,19 +1754,19 @@ async def test_variables_priority( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(calls) == 1 + assert calls[0].data["some"] == "test.entity_1 - 0:00:05" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 freezer.tick(timedelta(seconds=5)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "test.entity_2 - 0:00:10" + assert len(calls) == 2 + assert calls[1].data["some"] == "test.entity_2 - 0:00:10" diff --git a/tests/components/homeassistant/triggers/test_time.py b/tests/components/homeassistant/triggers/test_time.py index 8900998a7b8..961bac6c367 100644 --- a/tests/components/homeassistant/triggers/test_time.py +++ b/tests/components/homeassistant/triggers/test_time.py @@ -20,19 +20,28 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import assert_setup_component, async_fire_time_changed, mock_component +from tests.common import ( + assert_setup_component, + async_fire_time_changed, + async_mock_service, + mock_component, +) + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") @pytest.fixture(autouse=True) -def setup_comp(hass: HomeAssistant) -> None: +def setup_comp(hass): """Initialize components.""" mock_component(hass, "group") async def test_if_fires_using_at( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing at.""" now = dt_util.now() @@ -62,9 +71,9 @@ async def test_if_fires_using_at( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "time - 5" - assert service_calls[0].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["some"] == "time - 5" + assert calls[0].data["id"] == 0 @pytest.mark.parametrize( @@ -73,7 +82,7 @@ async def test_if_fires_using_at( async def test_if_fires_using_at_input_datetime( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + calls: list[ServiceCall], has_date, has_time, ) -> None: @@ -123,9 +132,9 @@ async def test_if_fires_using_at_input_datetime( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - service_calls[1].data["some"] + calls[0].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger" ) @@ -143,56 +152,27 @@ async def test_if_fires_using_at_input_datetime( }, blocking=True, ) - assert len(service_calls) == 3 await hass.async_block_till_done() async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(service_calls) == 4 + assert len(calls) == 2 assert ( - service_calls[3].data["some"] + calls[1].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger" ) -@pytest.mark.parametrize( - ("conf_at", "trigger_deltas"), - [ - ( - ["5:00:00", "6:00:00", "{{ '7:00:00' }}"], - [timedelta(0), timedelta(hours=1), timedelta(hours=2)], - ), - ( - [ - "5:00:05", - {"entity_id": "sensor.next_alarm", "offset": "00:00:10"}, - "sensor.next_alarm", - ], - [timedelta(seconds=5), timedelta(seconds=10), timedelta(0)], - ), - ], -) async def test_if_fires_using_multiple_at( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], - conf_at: list[str | dict[str, int | str]], - trigger_deltas: list[timedelta], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: - """Test for firing at multiple trigger times.""" + """Test for firing at.""" now = dt_util.now() - start_dt = now.replace(hour=5, minute=0, second=0, microsecond=0) + timedelta(2) - - hass.states.async_set( - "sensor.next_alarm", - start_dt.isoformat(), - {ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP}, - ) - - time_that_will_not_match_right_away = start_dt - timedelta(minutes=1) + trigger_dt = now.replace(hour=5, minute=0, second=0, microsecond=0) + timedelta(2) + time_that_will_not_match_right_away = trigger_dt - timedelta(minutes=1) freezer.move_to(dt_util.as_utc(time_that_will_not_match_right_away)) assert await async_setup_component( @@ -200,7 +180,7 @@ async def test_if_fires_using_multiple_at( automation.DOMAIN, { automation.DOMAIN: { - "trigger": {"platform": "time", "at": conf_at}, + "trigger": {"platform": "time", "at": ["5:00:00", "6:00:00"]}, "action": { "service": "test.automation", "data_template": { @@ -212,20 +192,21 @@ async def test_if_fires_using_multiple_at( ) await hass.async_block_till_done() - for count, delta in enumerate(sorted(trigger_deltas)): - async_fire_time_changed(hass, start_dt + delta + timedelta(seconds=1)) - await hass.async_block_till_done() + async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) + await hass.async_block_till_done() - assert len(service_calls) == count + 1 - assert ( - service_calls[count].data["some"] == f"time - {5 + (delta.seconds // 3600)}" - ) + assert len(calls) == 1 + assert calls[0].data["some"] == "time - 5" + + async_fire_time_changed(hass, trigger_dt + timedelta(hours=1, seconds=1)) + await hass.async_block_till_done() + + assert len(calls) == 2 + assert calls[1].data["some"] == "time - 6" async def test_if_not_fires_using_wrong_at( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """YAML translates time values to total seconds. @@ -261,12 +242,10 @@ async def test_if_not_fires_using_wrong_at( ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 -async def test_if_action_before( - hass: HomeAssistant, service_calls: list[ServiceCall] -) -> None: +async def test_if_action_before(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test for if action before.""" assert await async_setup_component( hass, @@ -288,18 +267,16 @@ async def test_if_action_before( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 with patch("homeassistant.helpers.condition.dt_util.now", return_value=after_10): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 -async def test_if_action_after( - hass: HomeAssistant, service_calls: list[ServiceCall] -) -> None: +async def test_if_action_after(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test for if action after.""" assert await async_setup_component( hass, @@ -321,17 +298,17 @@ async def test_if_action_after( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 with patch("homeassistant.helpers.condition.dt_util.now", return_value=after_10): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_action_one_weekday( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for if action with one weekday.""" assert await async_setup_component( @@ -355,17 +332,17 @@ async def test_if_action_one_weekday( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 with patch("homeassistant.helpers.condition.dt_util.now", return_value=tuesday): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_action_list_weekday( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for action with a list of weekdays.""" assert await async_setup_component( @@ -390,19 +367,19 @@ async def test_if_action_list_weekday( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 with patch("homeassistant.helpers.condition.dt_util.now", return_value=tuesday): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 with patch("homeassistant.helpers.condition.dt_util.now", return_value=wednesday): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 async def test_untrack_time_change(hass: HomeAssistant) -> None: @@ -438,14 +415,8 @@ async def test_untrack_time_change(hass: HomeAssistant) -> None: assert len(mock_track_time_change.mock_calls) == 3 -@pytest.mark.parametrize( - ("at_sensor"), ["sensor.next_alarm", "{{ 'sensor.next_alarm' }}"] -) async def test_if_fires_using_at_sensor( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], - at_sensor: str, + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing at sensor time.""" now = dt_util.now() @@ -468,7 +439,7 @@ async def test_if_fires_using_at_sensor( automation.DOMAIN, { automation.DOMAIN: { - "trigger": {"platform": "time", "at": at_sensor}, + "trigger": {"platform": "time", "at": "sensor.next_alarm"}, "action": { "service": "test.automation", "data_template": {"some": some_data}, @@ -481,9 +452,9 @@ async def test_if_fires_using_at_sensor( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-sensor.next_alarm" ) @@ -499,9 +470,9 @@ async def test_if_fires_using_at_sensor( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-sensor.next_alarm" ) @@ -523,7 +494,7 @@ async def test_if_fires_using_at_sensor( await hass.async_block_till_done() # We should not have listened to anything - assert len(service_calls) == 2 + assert len(calls) == 2 # Now without device class hass.states.async_set( @@ -542,89 +513,7 @@ async def test_if_fires_using_at_sensor( await hass.async_block_till_done() # We should not have listened to anything - assert len(service_calls) == 2 - - -@pytest.mark.parametrize( - ("offset", "delta"), - [ - ("00:00:10", timedelta(seconds=10)), - ("-00:00:10", timedelta(seconds=-10)), - ({"minutes": 5}, timedelta(minutes=5)), - ], -) -async def test_if_fires_using_at_sensor_with_offset( - hass: HomeAssistant, - service_calls: list[ServiceCall], - freezer: FrozenDateTimeFactory, - offset: str | dict[str, int], - delta: timedelta, -) -> None: - """Test for firing at sensor time.""" - now = dt_util.now() - - start_dt = now.replace(hour=5, minute=0, second=0, microsecond=0) + timedelta(2) - trigger_dt = start_dt + delta - - hass.states.async_set( - "sensor.next_alarm", - start_dt.isoformat(), - {ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP}, - ) - - time_that_will_not_match_right_away = trigger_dt - timedelta(minutes=1) - - some_data = "{{ trigger.platform }}-{{ trigger.now.day }}-{{ trigger.now.hour }}-{{ trigger.now.minute }}-{{ trigger.now.second }}-{{trigger.entity_id}}" - - freezer.move_to(dt_util.as_utc(time_that_will_not_match_right_away)) - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: { - "trigger": { - "platform": "time", - "at": { - "entity_id": "sensor.next_alarm", - "offset": offset, - }, - }, - "action": { - "service": "test.automation", - "data_template": {"some": some_data}, - }, - } - }, - ) - await hass.async_block_till_done() - - async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) - await hass.async_block_till_done() - - assert len(service_calls) == 1 - assert ( - service_calls[0].data["some"] - == f"time-{trigger_dt.day}-{trigger_dt.hour}-{trigger_dt.minute}-{trigger_dt.second}-sensor.next_alarm" - ) - - start_dt += timedelta(days=1, hours=1) - trigger_dt += timedelta(days=1, hours=1) - - hass.states.async_set( - "sensor.next_alarm", - start_dt.isoformat(), - {ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP}, - ) - await hass.async_block_till_done() - - async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) - await hass.async_block_till_done() - - assert len(service_calls) == 2 - assert ( - service_calls[1].data["some"] - == f"time-{trigger_dt.day}-{trigger_dt.hour}-{trigger_dt.minute}-{trigger_dt.second}-sensor.next_alarm" - ) + assert len(calls) == 2 @pytest.mark.parametrize( @@ -633,14 +522,6 @@ async def test_if_fires_using_at_sensor_with_offset( {"platform": "time", "at": "input_datetime.bla"}, {"platform": "time", "at": "sensor.bla"}, {"platform": "time", "at": "12:34"}, - {"platform": "time", "at": "{{ '12:34' }}"}, - {"platform": "time", "at": "{{ 'input_datetime.bla' }}"}, - {"platform": "time", "at": "{{ 'sensor.bla' }}"}, - {"platform": "time", "at": {"entity_id": "sensor.bla", "offset": "-00:01"}}, - { - "platform": "time", - "at": [{"entity_id": "sensor.bla", "offset": "-01:00:00"}], - }, ], ) def test_schema_valid(conf) -> None: @@ -654,11 +535,6 @@ def test_schema_valid(conf) -> None: {"platform": "time", "at": "binary_sensor.bla"}, {"platform": "time", "at": 745}, {"platform": "time", "at": "25:00"}, - { - "platform": "time", - "at": {"entity_id": "input_datetime.bla", "offset": "0:10"}, - }, - {"platform": "time", "at": {"entity_id": "13:00:00", "offset": "0:10"}}, ], ) def test_schema_invalid(conf) -> None: @@ -668,7 +544,7 @@ def test_schema_invalid(conf) -> None: async def test_datetime_in_past_on_load( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test time trigger works if input_datetime is in past.""" await async_setup_component( @@ -690,7 +566,6 @@ async def test_datetime_in_past_on_load( }, blocking=True, ) - assert len(service_calls) == 1 await hass.async_block_till_done() assert await async_setup_component( @@ -712,7 +587,7 @@ async def test_datetime_in_past_on_load( async_fire_time_changed(hass, now) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 0 await hass.services.async_call( "input_datetime", @@ -723,81 +598,13 @@ async def test_datetime_in_past_on_load( }, blocking=True, ) - assert len(service_calls) == 2 await hass.async_block_till_done() async_fire_time_changed(hass, future + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 1 assert ( - service_calls[2].data["some"] + calls[0].data["some"] == f"time-{future.day}-{future.hour}-input_datetime.my_trigger" ) - - -@pytest.mark.parametrize( - "trigger", - [ - {"platform": "time", "at": "{{ 'hello world' }}"}, - {"platform": "time", "at": "{{ 74 }}"}, - {"platform": "time", "at": "{{ true }}"}, - {"platform": "time", "at": "{{ 7.5465 }}"}, - ], -) -async def test_if_at_template_renders_bad_value( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - trigger: dict[str, str], -) -> None: - """Test for invalid templates.""" - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: { - "trigger": trigger, - "action": { - "service": "test.automation", - }, - } - }, - ) - - await hass.async_block_till_done() - - assert ( - "expected HH:MM, HH:MM:SS or Entity ID with domain 'input_datetime' or 'sensor'" - in caplog.text - ) - - -@pytest.mark.parametrize( - "trigger", - [ - {"platform": "time", "at": "{{ now().strftime('%H:%M') }}"}, - {"platform": "time", "at": "{{ states('sensor.blah') | int(0) }}"}, - ], -) -async def test_if_at_template_limited_template( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - trigger: dict[str, str], -) -> None: - """Test for invalid templates.""" - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: { - "trigger": trigger, - "action": { - "service": "test.automation", - }, - } - }, - ) - - await hass.async_block_till_done() - - assert "is not supported in limited templates" in caplog.text diff --git a/tests/components/homeassistant/triggers/test_time_pattern.py b/tests/components/homeassistant/triggers/test_time_pattern.py index 7138fd7dd02..327623d373b 100644 --- a/tests/components/homeassistant/triggers/test_time_pattern.py +++ b/tests/components/homeassistant/triggers/test_time_pattern.py @@ -13,19 +13,23 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed, mock_component +from tests.common import async_fire_time_changed, async_mock_service, mock_component + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") @pytest.fixture(autouse=True) -def setup_comp(hass: HomeAssistant) -> None: +def setup_comp(hass): """Initialize components.""" mock_component(hass, "group") async def test_if_fires_when_hour_matches( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing if hour is matching.""" now = dt_util.utcnow() @@ -54,8 +58,7 @@ async def test_if_fires_when_hour_matches( async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, hour=0)) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["id"] == 0 + assert len(calls) == 1 await hass.services.async_call( automation.DOMAIN, @@ -63,17 +66,15 @@ async def test_if_fires_when_hour_matches( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 async_fire_time_changed(hass, now.replace(year=now.year + 1, day=1, hour=0)) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 + assert calls[0].data["id"] == 0 async def test_if_fires_when_minute_matches( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing if minutes are matching.""" now = dt_util.utcnow() @@ -100,13 +101,11 @@ async def test_if_fires_when_minute_matches( async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, minute=0)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_when_second_matches( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing if seconds are matching.""" now = dt_util.utcnow() @@ -133,13 +132,11 @@ async def test_if_fires_when_second_matches( async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, second=0)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_when_second_as_string_matches( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing if seconds are matching.""" now = dt_util.utcnow() @@ -168,13 +165,11 @@ async def test_if_fires_when_second_as_string_matches( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_when_all_matches( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing if everything matches.""" now = dt_util.utcnow() @@ -203,13 +198,11 @@ async def test_if_fires_when_all_matches( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_periodic_seconds( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing periodically every second.""" now = dt_util.utcnow() @@ -238,13 +231,11 @@ async def test_if_fires_periodic_seconds( ) await hass.async_block_till_done() - assert len(service_calls) >= 1 + assert len(calls) >= 1 async def test_if_fires_periodic_minutes( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing periodically every minute.""" @@ -274,13 +265,11 @@ async def test_if_fires_periodic_minutes( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_periodic_hours( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing periodically every hour.""" now = dt_util.utcnow() @@ -309,13 +298,11 @@ async def test_if_fires_periodic_hours( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_default_values( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - service_calls: list[ServiceCall], + hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] ) -> None: """Test for firing at 2 minutes every hour.""" now = dt_util.utcnow() @@ -339,24 +326,24 @@ async def test_default_values( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async_fire_time_changed( hass, now.replace(year=now.year + 2, day=1, hour=1, minute=2, second=1) ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async_fire_time_changed( hass, now.replace(year=now.year + 2, day=1, hour=2, minute=2, second=0) ) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 -async def test_invalid_schemas() -> None: +async def test_invalid_schemas(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test invalid schemas.""" schemas = ( None, diff --git a/tests/components/homeassistant_alerts/test_init.py b/tests/components/homeassistant_alerts/test_init.py index 0a38778bbee..444db019c7c 100644 --- a/tests/components/homeassistant_alerts/test_init.py +++ b/tests/components/homeassistant_alerts/test_init.py @@ -26,7 +26,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator -def stub_alert(aioclient_mock: AiohttpClientMocker, alert_id) -> None: +def stub_alert(aioclient_mock, alert_id): """Stub an alert.""" aioclient_mock.get( f"https://alerts.home-assistant.io/alerts/{alert_id}.json", @@ -35,7 +35,7 @@ def stub_alert(aioclient_mock: AiohttpClientMocker, alert_id) -> None: @pytest.fixture(autouse=True) -async def setup_repairs(hass: HomeAssistant) -> None: +async def setup_repairs(hass): """Set up the repairs integration.""" assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) @@ -99,9 +99,9 @@ async def test_alerts( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, - ha_version: str, - supervisor_info: dict[str, str] | None, - expected_alerts: list[tuple[str, str]], + ha_version, + supervisor_info, + expected_alerts, ) -> None: """Test creating issues based on alerts.""" @@ -292,12 +292,12 @@ async def test_alerts_refreshed_on_component_load( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, - ha_version: str, - supervisor_info: dict[str, str] | None, - initial_components: list[str], - late_components: list[str], - initial_alerts: list[tuple[str, str]], - late_alerts: list[tuple[str, str]], + ha_version, + supervisor_info, + initial_components, + late_components, + initial_alerts, + late_alerts, freezer: FrozenDateTimeFactory, ) -> None: """Test alerts are refreshed when components are loaded.""" @@ -433,9 +433,9 @@ async def test_bad_alerts( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, - ha_version: str, - fixture: str, - expected_alerts: list[tuple[str, str]], + ha_version, + fixture, + expected_alerts, ) -> None: """Test creating issues based on alerts.""" fixture_content = load_fixture(fixture, "homeassistant_alerts") diff --git a/tests/components/homeassistant_hardware/conftest.py b/tests/components/homeassistant_hardware/conftest.py index ddf18305b2a..72e937396ea 100644 --- a/tests/components/homeassistant_hardware/conftest.py +++ b/tests/components/homeassistant_hardware/conftest.py @@ -1,17 +1,17 @@ """Test fixtures for the Home Assistant Hardware integration.""" -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator @pytest.fixture(autouse=True) def mock_zha_config_flow_setup() -> Generator[None]: """Mock the radio connection and probing of the ZHA config flow.""" - def mock_probe(config: dict[str, Any]) -> dict[str, Any]: + def mock_probe(config: dict[str, Any]) -> None: # The radio probing will return the correct baudrate return {**config, "baudrate": 115200} @@ -47,3 +47,127 @@ def mock_zha_get_last_network_settings() -> Generator[None]: AsyncMock(return_value=None), ): yield + + +@pytest.fixture(name="addon_running") +def mock_addon_running(addon_store_info, addon_info): + """Mock add-on already running.""" + addon_store_info.return_value = { + "installed": "1.0.0", + "state": "started", + "version": "1.0.0", + } + addon_info.return_value["hostname"] = "core-silabs-multiprotocol" + addon_info.return_value["state"] = "started" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="addon_installed") +def mock_addon_installed(addon_store_info, addon_info): + """Mock add-on already installed but not running.""" + addon_store_info.return_value = { + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["hostname"] = "core-silabs-multiprotocol" + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="addon_store_info") +def addon_store_info_fixture(): + """Mock Supervisor add-on store info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" + ) as addon_store_info: + addon_store_info.return_value = { + "available": True, + "installed": None, + "state": None, + "version": "1.0.0", + } + yield addon_store_info + + +@pytest.fixture(name="addon_info") +def addon_info_fixture(): + """Mock Supervisor add-on info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_info", + ) as addon_info: + addon_info.return_value = { + "available": True, + "hostname": None, + "options": {}, + "state": None, + "update_available": False, + "version": None, + } + yield addon_info + + +@pytest.fixture(name="set_addon_options") +def set_addon_options_fixture(): + """Mock set add-on options.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_set_addon_options" + ) as set_options: + yield set_options + + +@pytest.fixture(name="install_addon_side_effect") +def install_addon_side_effect_fixture(addon_store_info, addon_info): + """Return the install add-on side effect.""" + + async def install_addon(hass, slug): + """Mock install add-on.""" + addon_store_info.return_value = { + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["hostname"] = "core-silabs-multiprotocol" + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + + return install_addon + + +@pytest.fixture(name="install_addon") +def mock_install_addon(install_addon_side_effect): + """Mock install add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_install_addon", + side_effect=install_addon_side_effect, + ) as install_addon: + yield install_addon + + +@pytest.fixture(name="start_addon") +def start_addon_fixture(): + """Mock start add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_start_addon" + ) as start_addon: + yield start_addon + + +@pytest.fixture(name="stop_addon") +def stop_addon_fixture(): + """Mock stop add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_stop_addon" + ) as stop_addon: + yield stop_addon + + +@pytest.fixture(name="uninstall_addon") +def uninstall_addon_fixture(): + """Mock uninstall add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_uninstall_addon" + ) as uninstall_addon: + yield uninstall_addon diff --git a/tests/components/homeassistant_hardware/test_config_flow.py b/tests/components/homeassistant_hardware/test_config_flow.py deleted file mode 100644 index 8b0995a67f3..00000000000 --- a/tests/components/homeassistant_hardware/test_config_flow.py +++ /dev/null @@ -1,679 +0,0 @@ -"""Test the Home Assistant hardware firmware config flow.""" - -import asyncio -from collections.abc import Awaitable, Callable, Generator, Iterator -import contextlib -from typing import Any -from unittest.mock import AsyncMock, Mock, call, patch - -import pytest -from universal_silabs_flasher.const import ApplicationType - -from homeassistant.components.hassio import AddonInfo, AddonState -from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( - STEP_PICK_FIRMWARE_THREAD, - STEP_PICK_FIRMWARE_ZIGBEE, - BaseFirmwareConfigFlow, - BaseFirmwareOptionsFlow, -) -from homeassistant.components.homeassistant_hardware.util import ( - get_otbr_addon_manager, - get_zigbee_flasher_addon_manager, -) -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult, OptionsFlow -from homeassistant.core import HomeAssistant, callback -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import ( - MockConfigEntry, - MockModule, - mock_config_flow, - mock_integration, - mock_platform, -) - -TEST_DOMAIN = "test_firmware_domain" -TEST_DEVICE = "/dev/SomeDevice123" -TEST_HARDWARE_NAME = "Some Hardware Name" - - -class FakeFirmwareConfigFlow(BaseFirmwareConfigFlow, domain=TEST_DOMAIN): - """Config flow for `test_firmware_domain`.""" - - VERSION = 1 - MINOR_VERSION = 2 - - @staticmethod - @callback - def async_get_options_flow( - config_entry: ConfigEntry, - ) -> OptionsFlow: - """Return the options flow.""" - return FakeFirmwareOptionsFlowHandler(config_entry) - - async def async_step_hardware( - self, data: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle hardware flow.""" - self._device = TEST_DEVICE - self._hardware_name = TEST_HARDWARE_NAME - - return await self.async_step_confirm() - - def _async_flow_finished(self) -> ConfigFlowResult: - """Create the config entry.""" - assert self._device is not None - assert self._hardware_name is not None - assert self._probed_firmware_type is not None - - return self.async_create_entry( - title=self._hardware_name, - data={ - "device": self._device, - "firmware": self._probed_firmware_type.value, - "hardware": self._hardware_name, - }, - ) - - -class FakeFirmwareOptionsFlowHandler(BaseFirmwareOptionsFlow): - """Options flow for `test_firmware_domain`.""" - - def __init__(self, *args: Any, **kwargs: Any) -> None: - """Instantiate options flow.""" - super().__init__(*args, **kwargs) - - self._device = self.config_entry.data["device"] - self._hardware_name = self.config_entry.data["hardware"] - - # Regenerate the translation placeholders - self._get_translation_placeholders() - - def _async_flow_finished(self) -> ConfigFlowResult: - """Create the config entry.""" - assert self._probed_firmware_type is not None - - self.hass.config_entries.async_update_entry( - entry=self.config_entry, - data={ - **self.config_entry.data, - "firmware": self._probed_firmware_type.value, - }, - options=self.config_entry.options, - ) - - return self.async_create_entry(title="", data={}) - - -@pytest.fixture(autouse=True) -def mock_test_firmware_platform( - hass: HomeAssistant, -) -> Generator[None]: - """Fixture for a test config flow.""" - mock_module = MockModule( - TEST_DOMAIN, async_setup_entry=AsyncMock(return_value=True) - ) - mock_integration(hass, mock_module) - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - - with mock_config_flow(TEST_DOMAIN, FakeFirmwareConfigFlow): - yield - - -@pytest.fixture(autouse=True) -async def fixture_mock_supervisor_client(supervisor_client: AsyncMock): - """Mock supervisor client in tests.""" - - -def delayed_side_effect() -> Callable[..., Awaitable[None]]: - """Slows down eager tasks by delaying for an event loop tick.""" - - async def side_effect(*args: Any, **kwargs: Any) -> None: - await asyncio.sleep(0) - - return side_effect - - -@contextlib.contextmanager -def mock_addon_info( - hass: HomeAssistant, - *, - is_hassio: bool = True, - app_type: ApplicationType = ApplicationType.EZSP, - otbr_addon_info: AddonInfo = AddonInfo( - available=True, - hostname=None, - options={}, - state=AddonState.NOT_INSTALLED, - update_available=False, - version=None, - ), - flasher_addon_info: AddonInfo = AddonInfo( - available=True, - hostname=None, - options={}, - state=AddonState.NOT_INSTALLED, - update_available=False, - version=None, - ), -) -> Iterator[tuple[Mock, Mock]]: - """Mock the main addon states for the config flow.""" - mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) - mock_flasher_manager.addon_name = "Silicon Labs Flasher" - mock_flasher_manager.async_start_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_flasher_manager.async_install_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_flasher_manager.async_get_addon_info.return_value = flasher_addon_info - - mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) - mock_otbr_manager.addon_name = "OpenThread Border Router" - mock_otbr_manager.async_install_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_otbr_manager.async_uninstall_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_otbr_manager.async_start_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_otbr_manager.async_get_addon_info.return_value = otbr_addon_info - - with ( - patch( - "homeassistant.components.homeassistant_hardware.firmware_config_flow.get_otbr_addon_manager", - return_value=mock_otbr_manager, - ), - patch( - "homeassistant.components.homeassistant_hardware.firmware_config_flow.get_zigbee_flasher_addon_manager", - return_value=mock_flasher_manager, - ), - patch( - "homeassistant.components.homeassistant_hardware.firmware_config_flow.is_hassio", - return_value=is_hassio, - ), - patch( - "homeassistant.components.homeassistant_hardware.firmware_config_flow.probe_silabs_firmware_type", - return_value=app_type, - ), - ): - yield mock_otbr_manager, mock_flasher_manager - - -async def test_config_flow_zigbee(hass: HomeAssistant) -> None: - """Test the config flow.""" - result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} - ) - - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - - with mock_addon_info( - hass, - app_type=ApplicationType.SPINEL, - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option: we are now installing the addon - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_zigbee_flasher_addon" - assert result["description_placeholders"]["firmware_type"] == "spinel" - - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now configuring the addon and running it - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "run_zigbee_flasher_addon" - assert result["progress_action"] == "run_zigbee_flasher_addon" - assert mock_flasher_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": TEST_DEVICE, - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now uninstalling the addon - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "uninstall_zigbee_flasher_addon" - assert result["progress_action"] == "uninstall_zigbee_flasher_addon" - - await hass.async_block_till_done(wait_background_tasks=True) - - # We are finally done with the addon - assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - config_entry = result["result"] - assert config_entry.data == { - "firmware": "ezsp", - "device": TEST_DEVICE, - "hardware": TEST_HARDWARE_NAME, - } - - # Ensure a ZHA discovery flow has been created - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - zha_flow = flows[0] - assert zha_flow["handler"] == "zha" - assert zha_flow["context"]["source"] == "hardware" - assert zha_flow["step_id"] == "confirm" - - -async def test_config_flow_zigbee_skip_step_if_installed(hass: HomeAssistant) -> None: - """Test the config flow, skip installing the addon if necessary.""" - result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} - ) - - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - - with mock_addon_info( - hass, - app_type=ApplicationType.SPINEL, - flasher_addon_info=AddonInfo( - available=True, - hostname=None, - options={ - "device": "", - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - }, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.2.3", - ), - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option: we skip installation, instead we directly run it - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "run_zigbee_flasher_addon" - assert result["progress_action"] == "run_zigbee_flasher_addon" - assert result["description_placeholders"]["firmware_type"] == "spinel" - assert mock_flasher_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": TEST_DEVICE, - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - } - ) - ] - - # Uninstall the addon - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - # Done - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - -async def test_config_flow_thread(hass: HomeAssistant) -> None: - """Test the config flow.""" - result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} - ) - - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - - with mock_addon_info( - hass, - app_type=ApplicationType.EZSP, - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, - ) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_otbr_addon" - assert result["description_placeholders"]["firmware_type"] == "ezsp" - assert result["description_placeholders"]["model"] == TEST_HARDWARE_NAME - - await hass.async_block_till_done(wait_background_tasks=True) - - mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={ - "device": "", - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - }, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.2.3", - ) - - # Progress the flow, it is now configuring the addon and running it - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_otbr_addon" - assert result["progress_action"] == "start_otbr_addon" - - assert mock_otbr_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": TEST_DEVICE, - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # The addon is now running - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_otbr" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - config_entry = result["result"] - assert config_entry.data == { - "firmware": "spinel", - "device": TEST_DEVICE, - "hardware": TEST_HARDWARE_NAME, - } - - -async def test_config_flow_thread_addon_already_installed(hass: HomeAssistant) -> None: - """Test the Thread config flow, addon is already installed.""" - result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} - ) - - with mock_addon_info( - hass, - app_type=ApplicationType.EZSP, - otbr_addon_info=AddonInfo( - available=True, - hostname=None, - options={}, - state=AddonState.NOT_RUNNING, - update_available=False, - version=None, - ), - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_otbr_addon" - assert result["progress_action"] == "start_otbr_addon" - - assert mock_otbr_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": TEST_DEVICE, - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # The addon is now running - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_otbr" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_config_flow_zigbee_not_hassio(hass: HomeAssistant) -> None: - """Test when the stick is used with a non-hassio setup.""" - result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} - ) - - with mock_addon_info( - hass, - is_hassio=False, - app_type=ApplicationType.EZSP, - ) as (mock_otbr_manager, mock_flasher_manager): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - config_entry = result["result"] - assert config_entry.data == { - "firmware": "ezsp", - "device": TEST_DEVICE, - "hardware": TEST_HARDWARE_NAME, - } - - # Ensure a ZHA discovery flow has been created - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - zha_flow = flows[0] - assert zha_flow["handler"] == "zha" - assert zha_flow["context"]["source"] == "hardware" - assert zha_flow["step_id"] == "confirm" - - -async def test_options_flow_zigbee_to_thread(hass: HomeAssistant) -> None: - """Test the options flow, migrating Zigbee to Thread.""" - config_entry = MockConfigEntry( - domain=TEST_DOMAIN, - data={ - "firmware": "ezsp", - "device": TEST_DEVICE, - "hardware": TEST_HARDWARE_NAME, - }, - version=1, - minor_version=2, - ) - config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(config_entry.entry_id) - - # First step is confirmation - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - assert result["description_placeholders"]["firmware_type"] == "ezsp" - assert result["description_placeholders"]["model"] == TEST_HARDWARE_NAME - - with mock_addon_info( - hass, - app_type=ApplicationType.EZSP, - ) as (mock_otbr_manager, mock_flasher_manager): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, - ) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_otbr_addon" - - await hass.async_block_till_done(wait_background_tasks=True) - - mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={ - "device": "", - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - }, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.2.3", - ) - - # Progress the flow, it is now configuring the addon and running it - result = await hass.config_entries.options.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_otbr_addon" - assert result["progress_action"] == "start_otbr_addon" - - assert mock_otbr_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": TEST_DEVICE, - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # The addon is now running - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_otbr" - - # We are now done - result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - # The firmware type has been updated - assert config_entry.data["firmware"] == "spinel" - - -async def test_options_flow_thread_to_zigbee(hass: HomeAssistant) -> None: - """Test the options flow, migrating Thread to Zigbee.""" - config_entry = MockConfigEntry( - domain=TEST_DOMAIN, - data={ - "firmware": "spinel", - "device": TEST_DEVICE, - "hardware": TEST_HARDWARE_NAME, - }, - version=1, - minor_version=2, - ) - config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(config_entry.entry_id) - - # First step is confirmation - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - assert result["description_placeholders"]["firmware_type"] == "spinel" - assert result["description_placeholders"]["model"] == TEST_HARDWARE_NAME - - with mock_addon_info( - hass, - app_type=ApplicationType.SPINEL, - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option: we are now installing the addon - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_zigbee_flasher_addon" - - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now configuring the addon and running it - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "run_zigbee_flasher_addon" - assert result["progress_action"] == "run_zigbee_flasher_addon" - assert mock_flasher_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": TEST_DEVICE, - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now uninstalling the addon - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "uninstall_zigbee_flasher_addon" - assert result["progress_action"] == "uninstall_zigbee_flasher_addon" - - await hass.async_block_till_done(wait_background_tasks=True) - - # We are finally done with the addon - assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - # We are now done - result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - # The firmware type has been updated - assert config_entry.data["firmware"] == "ezsp" diff --git a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py index 22e3e338986..1df8fa86cf9 100644 --- a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py +++ b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py @@ -2,15 +2,14 @@ from __future__ import annotations -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import AddonsOptions import pytest +from typing_extensions import Generator from homeassistant.components.hassio import AddonError, AddonInfo, AddonState, HassIO +from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.homeassistant_hardware import silabs_multiprotocol_addon from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigFlow @@ -33,11 +32,6 @@ TEST_DOMAIN = "test" TEST_DOMAIN_2 = "test_2" -@pytest.fixture(autouse=True) -def mock_supervisor_client(supervisor_client: AsyncMock) -> None: - """Mock supervisor client.""" - - class FakeConfigFlow(ConfigFlow): """Handle a config flow for the silabs multiprotocol add-on.""" @@ -247,25 +241,26 @@ async def test_option_flow_install_multi_pan_addon( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with("core_silabs_multiprotocol") + install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( + hass, "core_silabs_multiprotocol", - AddonsOptions( - config={ + { + "options": { "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - ), + }, ) await hass.async_block_till_done() - start_addon.assert_called_once_with("core_silabs_multiprotocol") + start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -321,7 +316,7 @@ async def test_option_flow_install_multi_pan_addon_zha( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with("core_silabs_multiprotocol") + install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager( hass @@ -335,15 +330,16 @@ async def test_option_flow_install_multi_pan_addon_zha( assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( + hass, "core_silabs_multiprotocol", - AddonsOptions( - config={ + { + "options": { "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - ), + }, ) # Check the channel is initialized from ZHA assert multipan_manager._channel == 11 @@ -359,7 +355,7 @@ async def test_option_flow_install_multi_pan_addon_zha( assert zha_config_entry.title == "Test Multiprotocol" await hass.async_block_till_done() - start_addon.assert_called_once_with("core_silabs_multiprotocol") + start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -415,26 +411,27 @@ async def test_option_flow_install_multi_pan_addon_zha_other_radio( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with("core_silabs_multiprotocol") + install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - addon_info.return_value.hostname = "core-silabs-multiprotocol" + addon_info.return_value["hostname"] = "core-silabs-multiprotocol" result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( + hass, "core_silabs_multiprotocol", - AddonsOptions( - config={ + { + "options": { "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - ), + }, ) await hass.async_block_till_done() - start_addon.assert_called_once_with("core_silabs_multiprotocol") + start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -450,10 +447,6 @@ async def test_option_flow_install_multi_pan_addon_zha_other_radio( } -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.not_hassio"], -) async def test_option_flow_non_hassio( hass: HomeAssistant, ) -> None: @@ -515,7 +508,7 @@ async def test_option_flow_addon_installed_same_device_reconfigure_unexpected_us ) -> None: """Test reconfiguring the multi pan addon.""" - addon_info.return_value.options["device"] = "/dev/ttyTEST123" + addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager( hass @@ -574,7 +567,7 @@ async def test_option_flow_addon_installed_same_device_reconfigure_expected_user ) -> None: """Test reconfiguring the multi pan addon.""" - addon_info.return_value.options["device"] = "/dev/ttyTEST123" + addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager( hass @@ -645,7 +638,7 @@ async def test_option_flow_addon_installed_same_device_uninstall( ) -> None: """Test uninstalling the multi pan addon.""" - addon_info.return_value.options["device"] = "/dev/ttyTEST123" + addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -679,8 +672,11 @@ async def test_option_flow_addon_installed_same_device_uninstall( assert result["step_id"] == "uninstall_addon" # Make sure the flasher addon is installed - addon_store_info.return_value.installed = False - addon_store_info.return_Value.available = True + addon_store_info.return_value = { + "installed": None, + "available": True, + "state": "not_installed", + } result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -698,7 +694,7 @@ async def test_option_flow_addon_installed_same_device_uninstall( assert result["progress_action"] == "uninstall_multiprotocol_addon" await hass.async_block_till_done() - uninstall_addon.assert_called_once_with("core_silabs_multiprotocol") + uninstall_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -707,7 +703,7 @@ async def test_option_flow_addon_installed_same_device_uninstall( assert result["description_placeholders"] == {"addon_name": "Silicon Labs Flasher"} await hass.async_block_till_done() - install_addon.assert_called_once_with("core_silabs_flasher") + install_addon.assert_called_once_with(hass, "core_silabs_flasher") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -737,7 +733,7 @@ async def test_option_flow_addon_installed_same_device_do_not_uninstall_multi_pa ) -> None: """Test uninstalling the multi pan addon.""" - addon_info.return_value.options["device"] = "/dev/ttyTEST123" + addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -766,10 +762,6 @@ async def test_option_flow_addon_installed_same_device_do_not_uninstall_multi_pa assert result["type"] is FlowResultType.CREATE_ENTRY -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.addon_already_running"], -) async def test_option_flow_flasher_already_running_failure( hass: HomeAssistant, addon_info, @@ -784,7 +776,7 @@ async def test_option_flow_flasher_already_running_failure( ) -> None: """Test uninstalling the multi pan addon but with the flasher addon running.""" - addon_info.return_value.options["device"] = "/dev/ttyTEST123" + addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -807,8 +799,8 @@ async def test_option_flow_flasher_already_running_failure( assert result["step_id"] == "uninstall_addon" # The flasher addon is already installed and running, this is bad - addon_store_info.return_value.installed = True - addon_info.return_value.state = "started" + addon_store_info.return_value["installed"] = True + addon_info.return_value["state"] = "started" result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -831,7 +823,7 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed ) -> None: """Test uninstalling the multi pan addon.""" - addon_info.return_value.options["device"] = "/dev/ttyTEST123" + addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -853,8 +845,11 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["type"] is FlowResultType.FORM assert result["step_id"] == "uninstall_addon" - addon_store_info.return_value.installed = True - addon_store_info.return_value.available = True + addon_store_info.return_value = { + "installed": True, + "available": True, + "state": "not_running", + } result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -864,7 +859,7 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["progress_action"] == "uninstall_multiprotocol_addon" await hass.async_block_till_done() - uninstall_addon.assert_called_once_with("core_silabs_multiprotocol") + uninstall_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -872,8 +867,11 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["progress_action"] == "start_flasher_addon" assert result["description_placeholders"] == {"addon_name": "Silicon Labs Flasher"} - addon_store_info.return_value.installed = True - addon_store_info.return_value.available = True + addon_store_info.return_value = { + "installed": True, + "available": True, + "state": "not_running", + } await hass.async_block_till_done() install_addon.assert_not_called() @@ -881,10 +879,6 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["type"] is FlowResultType.CREATE_ENTRY -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.addon_install_failed"], -) async def test_option_flow_flasher_install_failure( hass: HomeAssistant, addon_info, @@ -899,7 +893,7 @@ async def test_option_flow_flasher_install_failure( ) -> None: """Test uninstalling the multi pan addon, case where flasher addon fails.""" - addon_info.return_value.options["device"] = "/dev/ttyTEST123" + addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -932,8 +926,11 @@ async def test_option_flow_flasher_install_failure( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "uninstall_addon" - addon_store_info.return_value.installed = False - addon_store_info.return_value.available = True + addon_store_info.return_value = { + "installed": None, + "available": True, + "state": "not_installed", + } install_addon.side_effect = [AddonError()] result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -944,17 +941,13 @@ async def test_option_flow_flasher_install_failure( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with("core_silabs_flasher") + install_addon.assert_called_once_with(hass, "core_silabs_flasher") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.addon_start_failed"], -) async def test_option_flow_flasher_addon_flash_failure( hass: HomeAssistant, addon_info, @@ -969,7 +962,7 @@ async def test_option_flow_flasher_addon_flash_failure( ) -> None: """Test where flasher addon fails to flash Zigbee firmware.""" - addon_info.return_value.options["device"] = "/dev/ttyTEST123" + addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -998,10 +991,10 @@ async def test_option_flow_flasher_addon_flash_failure( assert result["step_id"] == "uninstall_multiprotocol_addon" assert result["progress_action"] == "uninstall_multiprotocol_addon" - start_addon.side_effect = SupervisorError("Boom") + start_addon.side_effect = HassioAPIError("Boom") await hass.async_block_till_done() - uninstall_addon.assert_called_once_with("core_silabs_multiprotocol") + uninstall_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1017,10 +1010,6 @@ async def test_option_flow_flasher_addon_flash_failure( assert result["description_placeholders"]["addon_name"] == "Silicon Labs Flasher" -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.zha_migration_failed"], -) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_initiate_migration", side_effect=Exception("Boom!"), @@ -1040,7 +1029,7 @@ async def test_option_flow_uninstall_migration_initiate_failure( ) -> None: """Test uninstalling the multi pan addon, case where ZHA migration init fails.""" - addon_info.return_value.options["device"] = "/dev/ttyTEST123" + addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -1082,10 +1071,6 @@ async def test_option_flow_uninstall_migration_initiate_failure( mock_initiate_migration.assert_called_once() -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.zha_migration_failed"], -) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_finish_migration", side_effect=Exception("Boom!"), @@ -1105,7 +1090,7 @@ async def test_option_flow_uninstall_migration_finish_failure( ) -> None: """Test uninstalling the multi pan addon, case where ZHA migration init fails.""" - addon_info.return_value.options["device"] = "/dev/ttyTEST123" + addon_info.return_value["options"]["device"] = "/dev/ttyTEST123" # Setup the config entry config_entry = MockConfigEntry( @@ -1143,7 +1128,7 @@ async def test_option_flow_uninstall_migration_finish_failure( ) await hass.async_block_till_done() - uninstall_addon.assert_called_once_with("core_silabs_multiprotocol") + uninstall_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1187,10 +1172,6 @@ async def test_option_flow_do_not_install_multi_pan_addon( assert result["type"] is FlowResultType.CREATE_ENTRY -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.addon_install_failed"], -) async def test_option_flow_install_multi_pan_addon_install_fails( hass: HomeAssistant, addon_store_info, @@ -1201,7 +1182,7 @@ async def test_option_flow_install_multi_pan_addon_install_fails( ) -> None: """Test installing the multi pan addon.""" - install_addon.side_effect = SupervisorError("Boom") + install_addon.side_effect = HassioAPIError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1227,17 +1208,13 @@ async def test_option_flow_install_multi_pan_addon_install_fails( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with("core_silabs_multiprotocol") + install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.addon_start_failed"], -) async def test_option_flow_install_multi_pan_addon_start_fails( hass: HomeAssistant, addon_store_info, @@ -1248,7 +1225,7 @@ async def test_option_flow_install_multi_pan_addon_start_fails( ) -> None: """Test installing the multi pan addon.""" - start_addon.side_effect = SupervisorError("Boom") + start_addon.side_effect = HassioAPIError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1274,35 +1251,32 @@ async def test_option_flow_install_multi_pan_addon_start_fails( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with("core_silabs_multiprotocol") + install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( + hass, "core_silabs_multiprotocol", - AddonsOptions( - config={ + { + "options": { "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - ), + }, ) await hass.async_block_till_done() - start_addon.assert_called_once_with("core_silabs_multiprotocol") + start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_start_failed" -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.addon_set_config_failed"], -) async def test_option_flow_install_multi_pan_addon_set_options_fails( hass: HomeAssistant, addon_store_info, @@ -1313,7 +1287,7 @@ async def test_option_flow_install_multi_pan_addon_set_options_fails( ) -> None: """Test installing the multi pan addon.""" - set_addon_options.side_effect = SupervisorError("Boom") + set_addon_options.side_effect = HassioAPIError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1339,17 +1313,13 @@ async def test_option_flow_install_multi_pan_addon_set_options_fails( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with("core_silabs_multiprotocol") + install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_set_config_failed" -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.addon_info_failed"], -) async def test_option_flow_addon_info_fails( hass: HomeAssistant, addon_store_info, @@ -1357,7 +1327,7 @@ async def test_option_flow_addon_info_fails( ) -> None: """Test installing the multi pan addon.""" - addon_store_info.side_effect = SupervisorError("Boom") + addon_store_info.side_effect = HassioAPIError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1373,10 +1343,6 @@ async def test_option_flow_addon_info_fails( assert result["reason"] == "addon_info_failed" -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.zha_migration_failed"], -) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_initiate_migration", side_effect=Exception("Boom!"), @@ -1424,7 +1390,7 @@ async def test_option_flow_install_multi_pan_addon_zha_migration_fails_step_1( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with("core_silabs_multiprotocol") + install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT @@ -1432,10 +1398,6 @@ async def test_option_flow_install_multi_pan_addon_zha_migration_fails_step_1( set_addon_options.assert_not_called() -@pytest.mark.parametrize( - "ignore_translations", - ["component.test.options.abort.zha_migration_failed"], -) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_finish_migration", side_effect=Exception("Boom!"), @@ -1484,25 +1446,26 @@ async def test_option_flow_install_multi_pan_addon_zha_migration_fails_step_2( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with("core_silabs_multiprotocol") + install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( + hass, "core_silabs_multiprotocol", - AddonsOptions( - config={ + { + "options": { "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - ), + }, ) await hass.async_block_till_done() - start_addon.assert_called_once_with("core_silabs_multiprotocol") + start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT @@ -1663,7 +1626,7 @@ async def test_check_multi_pan_addon_info_error( ) -> None: """Test `check_multi_pan_addon` where the addon info cannot be read.""" - addon_store_info.side_effect = SupervisorError("Boom") + addon_store_info.side_effect = HassioAPIError("Boom") with pytest.raises(HomeAssistantError): await silabs_multiprotocol_addon.check_multi_pan_addon(hass) @@ -1699,15 +1662,18 @@ async def test_check_multi_pan_addon_auto_start( ) -> None: """Test `check_multi_pan_addon` auto starting the addon.""" - addon_info.return_value.state = "not_running" - addon_store_info.return_value.installed = True - addon_store_info.return_value.available = True + addon_info.return_value["state"] = "not_running" + addon_store_info.return_value = { + "installed": True, + "available": True, + "state": "not_running", + } # An error is raised even if we auto-start with pytest.raises(HomeAssistantError): await silabs_multiprotocol_addon.check_multi_pan_addon(hass) - start_addon.assert_called_once_with("core_silabs_multiprotocol") + start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") async def test_check_multi_pan_addon( @@ -1715,9 +1681,12 @@ async def test_check_multi_pan_addon( ) -> None: """Test `check_multi_pan_addon`.""" - addon_info.return_value.state = "started" - addon_store_info.return_value.installed = True - addon_store_info.return_value.available = True + addon_info.return_value["state"] = "started" + addon_store_info.return_value = { + "installed": True, + "available": True, + "state": "running", + } await silabs_multiprotocol_addon.check_multi_pan_addon(hass) start_addon.assert_not_called() @@ -1743,9 +1712,12 @@ async def test_multi_pan_addon_using_device_not_running( ) -> None: """Test `multi_pan_addon_using_device` when the addon isn't running.""" - addon_info.return_value.state = "not_running" - addon_store_info.return_value.installed = True - addon_store_info.return_value.available = True + addon_info.return_value["state"] = "not_running" + addon_store_info.return_value = { + "installed": True, + "available": True, + "state": "not_running", + } assert ( await silabs_multiprotocol_addon.multi_pan_addon_using_device( @@ -1768,15 +1740,18 @@ async def test_multi_pan_addon_using_device( ) -> None: """Test `multi_pan_addon_using_device` when the addon isn't running.""" - addon_info.return_value.state = "started" - addon_info.return_value.options = { + addon_info.return_value["state"] = "started" + addon_info.return_value["options"] = { "autoflash_firmware": True, "device": options_device, "baudrate": "115200", "flow_control": True, } - addon_store_info.return_value.installed = True - addon_store_info.return_value.available = True + addon_store_info.return_value = { + "installed": True, + "available": True, + "state": "running", + } assert ( await silabs_multiprotocol_addon.multi_pan_addon_using_device( diff --git a/tests/components/homeassistant_hardware/test_util.py b/tests/components/homeassistant_hardware/test_util.py deleted file mode 100644 index 4a30a39686f..00000000000 --- a/tests/components/homeassistant_hardware/test_util.py +++ /dev/null @@ -1,158 +0,0 @@ -"""Test hardware utilities.""" - -from unittest.mock import AsyncMock, patch - -from universal_silabs_flasher.const import ApplicationType - -from homeassistant.components.hassio import AddonError, AddonInfo, AddonState -from homeassistant.components.homeassistant_hardware.util import ( - FirmwareGuess, - get_zha_device_path, - guess_firmware_type, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - -ZHA_CONFIG_ENTRY = MockConfigEntry( - domain="zha", - unique_id="some_unique_id", - data={ - "device": { - "path": "socket://1.2.3.4:5678", - "baudrate": 115200, - "flow_control": None, - }, - "radio_type": "ezsp", - }, - version=4, -) - - -def test_get_zha_device_path() -> None: - """Test extracting the ZHA device path from its config entry.""" - assert ( - get_zha_device_path(ZHA_CONFIG_ENTRY) == ZHA_CONFIG_ENTRY.data["device"]["path"] - ) - - -def test_get_zha_device_path_ignored_discovery() -> None: - """Test extracting the ZHA device path from an ignored ZHA discovery.""" - config_entry = MockConfigEntry( - domain="zha", - unique_id="some_unique_id", - data={}, - version=4, - ) - - assert get_zha_device_path(config_entry) is None - - -async def test_guess_firmware_type_unknown(hass: HomeAssistant) -> None: - """Test guessing the firmware type.""" - - assert (await guess_firmware_type(hass, "/dev/missing")) == FirmwareGuess( - is_running=False, firmware_type=ApplicationType.EZSP, source="unknown" - ) - - -async def test_guess_firmware_type(hass: HomeAssistant) -> None: - """Test guessing the firmware.""" - path = ZHA_CONFIG_ENTRY.data["device"]["path"] - - ZHA_CONFIG_ENTRY.add_to_hass(hass) - - ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.NOT_LOADED) - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=False, firmware_type=ApplicationType.EZSP, source="zha" - ) - - # When ZHA is running, we indicate as such when guessing - ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.LOADED) - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager = AsyncMock() - mock_multipan_addon_manager = AsyncMock() - - with ( - patch( - "homeassistant.components.homeassistant_hardware.util.is_hassio", - return_value=True, - ), - patch( - "homeassistant.components.homeassistant_hardware.util.get_otbr_addon_manager", - return_value=mock_otbr_addon_manager, - ), - patch( - "homeassistant.components.homeassistant_hardware.util.get_multiprotocol_addon_manager", - return_value=mock_multipan_addon_manager, - ), - ): - mock_otbr_addon_manager.async_get_addon_info.side_effect = AddonError() - mock_multipan_addon_manager.async_get_addon_info.side_effect = AddonError() - - # Hassio errors are ignored and we still go with ZHA - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager.async_get_addon_info.side_effect = None - mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": "/some/other/device"}, - state=AddonState.RUNNING, - update_available=False, - version="1.0.0", - ) - - # We will prefer ZHA, as it is running (and actually pointing to the device) - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": path}, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.0.0", - ) - - # We will still prefer ZHA, as it is the one actually running - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": path}, - state=AddonState.RUNNING, - update_available=False, - version="1.0.0", - ) - - # Finally, ZHA loses out to OTBR - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.SPINEL, source="otbr" - ) - - mock_multipan_addon_manager.async_get_addon_info.side_effect = None - mock_multipan_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": path}, - state=AddonState.RUNNING, - update_available=False, - version="1.0.0", - ) - - # Which will lose out to multi-PAN - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.CPC, source="multiprotocol" - ) diff --git a/tests/components/homeassistant_sky_connect/conftest.py b/tests/components/homeassistant_sky_connect/conftest.py index c5bfa4bd609..099582999d5 100644 --- a/tests/components/homeassistant_sky_connect/conftest.py +++ b/tests/components/homeassistant_sky_connect/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for the Home Assistant SkyConnect integration.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator @pytest.fixture(name="mock_usb_serial_by_id", autouse=True) @@ -47,3 +47,127 @@ def mock_zha_get_last_network_settings() -> Generator[None]: AsyncMock(return_value=None), ): yield + + +@pytest.fixture(name="addon_running") +def mock_addon_running(addon_store_info, addon_info): + """Mock add-on already running.""" + addon_store_info.return_value = { + "installed": "1.0.0", + "state": "started", + "version": "1.0.0", + } + addon_info.return_value["hostname"] = "core-silabs-multiprotocol" + addon_info.return_value["state"] = "started" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="addon_installed") +def mock_addon_installed(addon_store_info, addon_info): + """Mock add-on already installed but not running.""" + addon_store_info.return_value = { + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["hostname"] = "core-silabs-multiprotocol" + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="addon_store_info") +def addon_store_info_fixture(): + """Mock Supervisor add-on store info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" + ) as addon_store_info: + addon_store_info.return_value = { + "available": True, + "installed": None, + "state": None, + "version": "1.0.0", + } + yield addon_store_info + + +@pytest.fixture(name="addon_info") +def addon_info_fixture(): + """Mock Supervisor add-on info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_info", + ) as addon_info: + addon_info.return_value = { + "available": True, + "hostname": None, + "options": {}, + "state": None, + "update_available": False, + "version": None, + } + yield addon_info + + +@pytest.fixture(name="set_addon_options") +def set_addon_options_fixture(): + """Mock set add-on options.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_set_addon_options" + ) as set_options: + yield set_options + + +@pytest.fixture(name="install_addon_side_effect") +def install_addon_side_effect_fixture(addon_store_info, addon_info): + """Return the install add-on side effect.""" + + async def install_addon(hass, slug): + """Mock install add-on.""" + addon_store_info.return_value = { + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["hostname"] = "core-silabs-multiprotocol" + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + + return install_addon + + +@pytest.fixture(name="install_addon") +def mock_install_addon(install_addon_side_effect): + """Mock install add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_install_addon", + side_effect=install_addon_side_effect, + ) as install_addon: + yield install_addon + + +@pytest.fixture(name="start_addon") +def start_addon_fixture(): + """Mock start add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_start_addon" + ) as start_addon: + yield start_addon + + +@pytest.fixture(name="stop_addon") +def stop_addon_fixture(): + """Mock stop add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_stop_addon" + ) as stop_addon: + yield stop_addon + + +@pytest.fixture(name="uninstall_addon") +def uninstall_addon_fixture(): + """Mock uninstall add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_uninstall_addon" + ) as uninstall_addon: + yield uninstall_addon diff --git a/tests/components/homeassistant_sky_connect/test_config_flow.py b/tests/components/homeassistant_sky_connect/test_config_flow.py index 055b6347267..a4b7b4fb81d 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow.py @@ -1,20 +1,30 @@ """Test the Home Assistant SkyConnect config flow.""" -from unittest.mock import Mock, patch +import asyncio +from collections.abc import Awaitable, Callable +import contextlib +from typing import Any +from unittest.mock import AsyncMock, Mock, call, patch import pytest +from universal_silabs_flasher.const import ApplicationType from homeassistant.components import usb -from homeassistant.components.hassio import AddonInfo, AddonState -from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( - STEP_PICK_FIRMWARE_ZIGBEE, -) +from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( CONF_DISABLE_MULTI_PAN, get_flasher_addon_manager, get_multiprotocol_addon_manager, ) +from homeassistant.components.homeassistant_sky_connect.config_flow import ( + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, +) from homeassistant.components.homeassistant_sky_connect.const import DOMAIN +from homeassistant.components.homeassistant_sky_connect.util import ( + get_otbr_addon_manager, + get_zigbee_flasher_addon_manager, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -39,6 +49,86 @@ USB_DATA_ZBT1 = usb.UsbServiceInfo( ) +def delayed_side_effect() -> Callable[..., Awaitable[None]]: + """Slows down eager tasks by delaying for an event loop tick.""" + + async def side_effect(*args: Any, **kwargs: Any) -> None: + await asyncio.sleep(0) + + return side_effect + + +@contextlib.contextmanager +def mock_addon_info( + hass: HomeAssistant, + *, + is_hassio: bool = True, + app_type: ApplicationType = ApplicationType.EZSP, + otbr_addon_info: AddonInfo = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ), + flasher_addon_info: AddonInfo = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ), +): + """Mock the main addon states for the config flow.""" + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_get_addon_info.return_value = flasher_addon_info + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_get_addon_info.return_value = otbr_addon_info + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=is_hassio, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=app_type, + ), + ): + yield mock_otbr_manager, mock_flasher_manager + + @pytest.mark.parametrize( ("usb_data", "model"), [ @@ -46,7 +136,7 @@ USB_DATA_ZBT1 = usb.UsbServiceInfo( (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), ], ) -async def test_config_flow( +async def test_config_flow_zigbee( usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test the config flow for SkyConnect.""" @@ -56,21 +146,59 @@ async def test_config_flow( assert result["type"] is FlowResultType.MENU assert result["step_id"] == "pick_firmware" - assert result["description_placeholders"]["model"] == model - async def mock_async_step_pick_firmware_zigbee(self, data): - return await self.async_step_confirm_zigbee(user_input={}) - - with patch( - "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareConfigFlow.async_step_pick_firmware_zigbee", - autospec=True, - side_effect=mock_async_step_pick_firmware_zigbee, - ): + with mock_addon_info( + hass, + app_type=ApplicationType.SPINEL, + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option: we are now installing the addon result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_zigbee_flasher_addon" + assert result["description_placeholders"]["firmware_type"] == "spinel" + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now configuring the addon and running it + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now uninstalling the addon + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "uninstall_zigbee_flasher_addon" + assert result["progress_action"] == "uninstall_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # We are finally done with the addon + assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) assert result["type"] is FlowResultType.CREATE_ENTRY config_entry = result["result"] @@ -101,10 +229,383 @@ async def test_config_flow( (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), ], ) -async def test_options_flow( +async def test_config_flow_zigbee_skip_step_if_installed( usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: - """Test the options flow for SkyConnect.""" + """Test the config flow for SkyConnect, skip installing the addon if necessary.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + + with mock_addon_info( + hass, + app_type=ApplicationType.SPINEL, + flasher_addon_info=AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ), + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option: we skip installation, instead we directly run it + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert result["description_placeholders"]["firmware_type"] == "spinel" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + # Uninstall the addon + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + # Done + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the config flow for SkyConnect.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + + with mock_addon_info( + hass, + app_type=ApplicationType.EZSP, + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_otbr_addon" + assert result["description_placeholders"]["firmware_type"] == "ezsp" + assert result["description_placeholders"]["model"] == model + + await hass.async_block_till_done(wait_background_tasks=True) + + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Progress the flow, it is now configuring the addon and running it + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "spinel", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_already_installed( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the Thread config flow for SkyConnect, addon is already installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with mock_addon_info( + hass, + app_type=ApplicationType.EZSP, + otbr_addon_info=AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_RUNNING, + update_available=False, + version=None, + ), + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_not_hassio( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test when the stick is used with a non-hassio setup.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with mock_addon_info( + hass, + is_hassio=False, + app_type=ApplicationType.EZSP, + ) as (mock_otbr_manager, mock_flasher_manager): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } + + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_zigbee_to_thread( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the options flow for SkyConnect, migrating Zigbee to Thread.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "ezsp" + assert result["description_placeholders"]["model"] == model + + with mock_addon_info( + hass, + app_type=ApplicationType.EZSP, + ) as (mock_otbr_manager, mock_flasher_manager): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_otbr_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Progress the flow, it is now configuring the addon and running it + result = await hass.config_entries.options.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + # We are now done + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # The firmware type has been updated + assert config_entry.data["firmware"] == "spinel" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_thread_to_zigbee( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the options flow for SkyConnect, migrating Thread to Zigbee.""" config_entry = MockConfigEntry( domain="homeassistant_sky_connect", data={ @@ -131,35 +632,64 @@ async def test_options_flow( assert result["description_placeholders"]["firmware_type"] == "spinel" assert result["description_placeholders"]["model"] == model - async def mock_async_step_pick_firmware_zigbee(self, data): - return await self.async_step_confirm_zigbee(user_input={}) - - with patch( - "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareOptionsFlow.async_step_pick_firmware_zigbee", - autospec=True, - side_effect=mock_async_step_pick_firmware_zigbee, - ): + with mock_addon_info( + hass, + app_type=ApplicationType.SPINEL, + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option: we are now installing the addon result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_zigbee_flasher_addon" + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now configuring the addon and running it + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now uninstalling the addon + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "uninstall_zigbee_flasher_addon" + assert result["progress_action"] == "uninstall_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # We are finally done with the addon + assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] + + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + # We are now done + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={} + ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"] is True - assert config_entry.data == { - "firmware": "ezsp", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, - } + # The firmware type has been updated + assert config_entry.data["firmware"] == "ezsp" -@pytest.mark.usefixtures("supervisor_client") @pytest.mark.parametrize( ("usb_data", "model"), [ diff --git a/tests/components/homeassistant_hardware/test_config_flow_failures.py b/tests/components/homeassistant_sky_connect/test_config_flow_failures.py similarity index 75% rename from tests/components/homeassistant_hardware/test_config_flow_failures.py rename to tests/components/homeassistant_sky_connect/test_config_flow_failures.py index 5a6f765c44c..b29f8d808ae 100644 --- a/tests/components/homeassistant_hardware/test_config_flow_failures.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow_failures.py @@ -1,48 +1,38 @@ -"""Test the Home Assistant hardware firmware config flow failure cases.""" +"""Test the Home Assistant SkyConnect config flow failure cases.""" from unittest.mock import AsyncMock import pytest from universal_silabs_flasher.const import ApplicationType -from homeassistant.components.hassio import AddonError, AddonInfo, AddonState -from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( +from homeassistant.components import usb +from homeassistant.components.hassio.addon_manager import ( + AddonError, + AddonInfo, + AddonState, +) +from homeassistant.components.homeassistant_sky_connect.config_flow import ( STEP_PICK_FIRMWARE_THREAD, STEP_PICK_FIRMWARE_ZIGBEE, ) +from homeassistant.components.homeassistant_sky_connect.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .test_config_flow import ( - TEST_DEVICE, - TEST_DOMAIN, - TEST_HARDWARE_NAME, - delayed_side_effect, - mock_addon_info, - mock_test_firmware_platform, # noqa: F401 -) +from .test_config_flow import USB_DATA_ZBT1, delayed_side_effect, mock_addon_info from tests.common import MockConfigEntry -@pytest.fixture(autouse=True) -async def fixture_mock_supervisor_client(supervisor_client: AsyncMock): - """Mock supervisor client in tests.""" - - @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.unsupported_firmware"], -) -@pytest.mark.parametrize( - "next_step", + ("usb_data", "model", "next_step"), [ - STEP_PICK_FIRMWARE_ZIGBEE, - STEP_PICK_FIRMWARE_THREAD, + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1", STEP_PICK_FIRMWARE_ZIGBEE), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1", STEP_PICK_FIRMWARE_THREAD), ], ) async def test_config_flow_cannot_probe_firmware( - next_step: str, hass: HomeAssistant + usb_data: usb.UsbServiceInfo, model: str, next_step: str, hass: HomeAssistant ) -> None: """Test failure case when firmware cannot be probed.""" @@ -52,7 +42,7 @@ async def test_config_flow_cannot_probe_firmware( ) as (mock_otbr_manager, mock_flasher_manager): # Start the flow result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) result = await hass.config_entries.flow.async_configure( @@ -65,15 +55,17 @@ async def test_config_flow_cannot_probe_firmware( @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.not_hassio"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) async def test_config_flow_zigbee_not_hassio_wrong_firmware( - hass: HomeAssistant, + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test when the stick is used with a non-hassio setup but the firmware is bad.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -94,15 +86,17 @@ async def test_config_flow_zigbee_not_hassio_wrong_firmware( @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.addon_already_running"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) async def test_config_flow_zigbee_flasher_addon_already_running( - hass: HomeAssistant, + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test failure case when flasher addon is already running.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -132,13 +126,17 @@ async def test_config_flow_zigbee_flasher_addon_already_running( @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.addon_info_failed"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) -async def test_config_flow_zigbee_flasher_addon_info_fails(hass: HomeAssistant) -> None: +async def test_config_flow_zigbee_flasher_addon_info_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -169,15 +167,17 @@ async def test_config_flow_zigbee_flasher_addon_info_fails(hass: HomeAssistant) @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.addon_install_failed"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) async def test_config_flow_zigbee_flasher_addon_install_fails( - hass: HomeAssistant, + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -203,15 +203,17 @@ async def test_config_flow_zigbee_flasher_addon_install_fails( @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.addon_set_config_failed"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) async def test_config_flow_zigbee_flasher_addon_set_config_fails( - hass: HomeAssistant, + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test failure case when flasher addon cannot be configured.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -241,13 +243,17 @@ async def test_config_flow_zigbee_flasher_addon_set_config_fails( @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.addon_start_failed"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) -async def test_config_flow_zigbee_flasher_run_fails(hass: HomeAssistant) -> None: +async def test_config_flow_zigbee_flasher_run_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: """Test failure case when flasher addon fails to run.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -273,10 +279,18 @@ async def test_config_flow_zigbee_flasher_run_fails(hass: HomeAssistant) -> None assert result["reason"] == "addon_start_failed" -async def test_config_flow_zigbee_flasher_uninstall_fails(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_uninstall_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: """Test failure case when flasher addon uninstall fails.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -306,13 +320,17 @@ async def test_config_flow_zigbee_flasher_uninstall_fails(hass: HomeAssistant) - @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.not_hassio_thread"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) -async def test_config_flow_thread_not_hassio(hass: HomeAssistant) -> None: +async def test_config_flow_thread_not_hassio( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: """Test when the stick is used with a non-hassio setup and Thread is selected.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -333,13 +351,17 @@ async def test_config_flow_thread_not_hassio(hass: HomeAssistant) -> None: @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.addon_info_failed"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) -async def test_config_flow_thread_addon_info_fails(hass: HomeAssistant) -> None: +async def test_config_flow_thread_addon_info_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -361,13 +383,17 @@ async def test_config_flow_thread_addon_info_fails(hass: HomeAssistant) -> None: @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.otbr_addon_already_running"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) -async def test_config_flow_thread_addon_already_running(hass: HomeAssistant) -> None: +async def test_config_flow_thread_addon_already_running( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: """Test failure case when the Thread addon is already running.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -400,13 +426,17 @@ async def test_config_flow_thread_addon_already_running(hass: HomeAssistant) -> @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.addon_install_failed"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) -async def test_config_flow_thread_addon_install_fails(hass: HomeAssistant) -> None: +async def test_config_flow_thread_addon_install_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -431,13 +461,17 @@ async def test_config_flow_thread_addon_install_fails(hass: HomeAssistant) -> No @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.addon_set_config_failed"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) -async def test_config_flow_thread_addon_set_config_fails(hass: HomeAssistant) -> None: +async def test_config_flow_thread_addon_set_config_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: """Test failure case when flasher addon cannot be configured.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -462,13 +496,17 @@ async def test_config_flow_thread_addon_set_config_fails(hass: HomeAssistant) -> @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.config.abort.addon_start_failed"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) -async def test_config_flow_thread_flasher_run_fails(hass: HomeAssistant) -> None: +async def test_config_flow_thread_flasher_run_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: """Test failure case when flasher addon fails to run.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -493,10 +531,18 @@ async def test_config_flow_thread_flasher_run_fails(hass: HomeAssistant) -> None assert result["reason"] == "addon_start_failed" -async def test_config_flow_thread_flasher_uninstall_fails(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_flasher_uninstall_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: """Test failure case when flasher addon uninstall fails.""" result = await hass.config_entries.flow.async_init( - TEST_DOMAIN, context={"source": "hardware"} + DOMAIN, context={"source": "usb"}, data=usb_data ) with mock_addon_info( @@ -527,19 +573,26 @@ async def test_config_flow_thread_flasher_uninstall_fails(hass: HomeAssistant) - @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.options.abort.zha_still_using_stick"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) async def test_options_flow_zigbee_to_thread_zha_configured( - hass: HomeAssistant, + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test the options flow migration failure, ZHA using the stick.""" config_entry = MockConfigEntry( - domain=TEST_DOMAIN, + domain="homeassistant_sky_connect", data={ "firmware": "ezsp", - "device": TEST_DEVICE, - "hardware": TEST_HARDWARE_NAME, + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, }, version=1, minor_version=2, @@ -551,7 +604,7 @@ async def test_options_flow_zigbee_to_thread_zha_configured( # Set up ZHA as well zha_config_entry = MockConfigEntry( domain="zha", - data={"device": {"path": TEST_DEVICE}}, + data={"device": {"path": usb_data.device}}, ) zha_config_entry.add_to_hass(hass) @@ -568,19 +621,26 @@ async def test_options_flow_zigbee_to_thread_zha_configured( @pytest.mark.parametrize( - "ignore_translations", - ["component.test_firmware_domain.options.abort.otbr_still_using_stick"], + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], ) async def test_options_flow_thread_to_zigbee_otbr_configured( - hass: HomeAssistant, + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test the options flow migration failure, OTBR still using the stick.""" config_entry = MockConfigEntry( - domain=TEST_DOMAIN, + domain="homeassistant_sky_connect", data={ "firmware": "spinel", - "device": TEST_DEVICE, - "hardware": TEST_HARDWARE_NAME, + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, }, version=1, minor_version=2, @@ -598,7 +658,7 @@ async def test_options_flow_thread_to_zigbee_otbr_configured( otbr_addon_info=AddonInfo( available=True, hostname=None, - options={"device": TEST_DEVICE}, + options={"device": usb_data.device}, state=AddonState.RUNNING, update_available=False, version="1.0.0", diff --git a/tests/components/homeassistant_sky_connect/test_hardware.py b/tests/components/homeassistant_sky_connect/test_hardware.py index f39e648b0f2..888ed27a3c0 100644 --- a/tests/components/homeassistant_sky_connect/test_hardware.py +++ b/tests/components/homeassistant_sky_connect/test_hardware.py @@ -1,8 +1,7 @@ """Test the Home Assistant SkyConnect hardware platform.""" from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.const import EVENT_HOMEASSISTANT_STARTED -from homeassistant.core import HomeAssistant +from homeassistant.core import EVENT_HOMEASSISTANT_STARTED, HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry diff --git a/tests/components/homeassistant_sky_connect/test_init.py b/tests/components/homeassistant_sky_connect/test_init.py index e1c13771fdc..88b57f2dd64 100644 --- a/tests/components/homeassistant_sky_connect/test_init.py +++ b/tests/components/homeassistant_sky_connect/test_init.py @@ -4,8 +4,8 @@ from unittest.mock import patch from universal_silabs_flasher.const import ApplicationType -from homeassistant.components.homeassistant_hardware.util import FirmwareGuess from homeassistant.components.homeassistant_sky_connect.const import DOMAIN +from homeassistant.components.homeassistant_sky_connect.util import FirmwareGuess from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry diff --git a/tests/components/homeassistant_sky_connect/test_util.py b/tests/components/homeassistant_sky_connect/test_util.py index 1d1d70c1b4c..b560acc65b7 100644 --- a/tests/components/homeassistant_sky_connect/test_util.py +++ b/tests/components/homeassistant_sky_connect/test_util.py @@ -1,14 +1,24 @@ """Test SkyConnect utilities.""" +from unittest.mock import AsyncMock, patch + +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components.hassio import AddonError, AddonInfo, AddonState from homeassistant.components.homeassistant_sky_connect.const import ( DOMAIN, HardwareVariant, ) from homeassistant.components.homeassistant_sky_connect.util import ( + FirmwareGuess, get_hardware_variant, get_usb_service_info, + get_zha_device_path, + guess_firmware_type, ) from homeassistant.components.usb import UsbServiceInfo +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -42,6 +52,20 @@ CONNECT_ZBT1_CONFIG_ENTRY = MockConfigEntry( version=2, ) +ZHA_CONFIG_ENTRY = MockConfigEntry( + domain="zha", + unique_id="some_unique_id", + data={ + "device": { + "path": "/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_3c0ed67c628beb11b1cd64a0f320645d-if00-port0", + "baudrate": 115200, + "flow_control": None, + }, + "radio_type": "ezsp", + }, + version=4, +) + def test_get_usb_service_info() -> None: """Test `get_usb_service_info` conversion.""" @@ -61,3 +85,131 @@ def test_get_hardware_variant() -> None: assert ( get_hardware_variant(CONNECT_ZBT1_CONFIG_ENTRY) == HardwareVariant.CONNECT_ZBT1 ) + + +def test_get_zha_device_path() -> None: + """Test extracting the ZHA device path from its config entry.""" + assert ( + get_zha_device_path(ZHA_CONFIG_ENTRY) == ZHA_CONFIG_ENTRY.data["device"]["path"] + ) + + +def test_get_zha_device_path_ignored_discovery() -> None: + """Test extracting the ZHA device path from an ignored ZHA discovery.""" + config_entry = MockConfigEntry( + domain="zha", + unique_id="some_unique_id", + data={}, + version=4, + ) + + assert get_zha_device_path(config_entry) is None + + +async def test_guess_firmware_type_unknown(hass: HomeAssistant) -> None: + """Test guessing the firmware type.""" + + assert (await guess_firmware_type(hass, "/dev/missing")) == FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="unknown" + ) + + +async def test_guess_firmware_type(hass: HomeAssistant) -> None: + """Test guessing the firmware.""" + path = ZHA_CONFIG_ENTRY.data["device"]["path"] + + ZHA_CONFIG_ENTRY.add_to_hass(hass) + + ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.NOT_LOADED) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="zha" + ) + + # When ZHA is running, we indicate as such when guessing + ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.LOADED) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager = AsyncMock() + mock_multipan_addon_manager = AsyncMock() + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.util.is_hassio", + return_value=True, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.util.get_otbr_addon_manager", + return_value=mock_otbr_addon_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.util.get_multiprotocol_addon_manager", + return_value=mock_multipan_addon_manager, + ), + ): + mock_otbr_addon_manager.async_get_addon_info.side_effect = AddonError() + mock_multipan_addon_manager.async_get_addon_info.side_effect = AddonError() + + # Hassio errors are ignored and we still go with ZHA + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.side_effect = None + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": "/some/other/device"}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # We will prefer ZHA, as it is running (and actually pointing to the device) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.0.0", + ) + + # We will still prefer ZHA, as it is the one actually running + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # Finally, ZHA loses out to OTBR + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.SPINEL, source="otbr" + ) + + mock_multipan_addon_manager.async_get_addon_info.side_effect = None + mock_multipan_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # Which will lose out to multi-PAN + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.CPC, source="multiprotocol" + ) diff --git a/tests/components/homeassistant_yellow/conftest.py b/tests/components/homeassistant_yellow/conftest.py index 7247c7da4e2..38398eb719f 100644 --- a/tests/components/homeassistant_yellow/conftest.py +++ b/tests/components/homeassistant_yellow/conftest.py @@ -1,17 +1,17 @@ """Test fixtures for the Home Assistant Yellow integration.""" -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator @pytest.fixture(autouse=True) def mock_zha_config_flow_setup() -> Generator[None]: """Mock the radio connection and probing of the ZHA config flow.""" - def mock_probe(config: dict[str, Any]) -> dict[str, Any]: + def mock_probe(config: dict[str, Any]) -> None: # The radio probing will return the correct baudrate return {**config, "baudrate": 115200} @@ -47,3 +47,109 @@ def mock_zha_get_last_network_settings() -> Generator[None]: AsyncMock(return_value=None), ): yield + + +@pytest.fixture(name="addon_running") +def mock_addon_running(addon_store_info, addon_info): + """Mock add-on already running.""" + addon_store_info.return_value = { + "installed": "1.0.0", + "state": "started", + "version": "1.0.0", + } + addon_info.return_value["hostname"] = "core-silabs-multiprotocol" + addon_info.return_value["state"] = "started" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="addon_installed") +def mock_addon_installed(addon_store_info, addon_info): + """Mock add-on already installed but not running.""" + addon_store_info.return_value = { + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["hostname"] = "core-silabs-multiprotocol" + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="addon_store_info") +def addon_store_info_fixture(): + """Mock Supervisor add-on store info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" + ) as addon_store_info: + addon_store_info.return_value = { + "available": True, + "installed": None, + "state": None, + "version": "1.0.0", + } + yield addon_store_info + + +@pytest.fixture(name="addon_info") +def addon_info_fixture(): + """Mock Supervisor add-on info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_info", + ) as addon_info: + addon_info.return_value = { + "available": True, + "hostname": None, + "options": {}, + "state": None, + "update_available": False, + "version": None, + } + yield addon_info + + +@pytest.fixture(name="set_addon_options") +def set_addon_options_fixture(): + """Mock set add-on options.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_set_addon_options" + ) as set_options: + yield set_options + + +@pytest.fixture(name="install_addon_side_effect") +def install_addon_side_effect_fixture(addon_store_info, addon_info): + """Return the install add-on side effect.""" + + async def install_addon(hass, slug): + """Mock install add-on.""" + addon_store_info.return_value = { + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["hostname"] = "core-silabs-multiprotocol" + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + + return install_addon + + +@pytest.fixture(name="install_addon") +def mock_install_addon(install_addon_side_effect): + """Mock install add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_install_addon", + side_effect=install_addon_side_effect, + ) as install_addon: + yield install_addon + + +@pytest.fixture(name="start_addon") +def start_addon_fixture(): + """Mock start add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_start_addon" + ) as start_addon: + yield start_addon diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index ab6f158b211..4ae04180a64 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -1,25 +1,13 @@ """Test the Home Assistant Yellow config flow.""" -from collections.abc import Generator from unittest.mock import Mock, patch import pytest +from typing_extensions import Generator -from homeassistant.components.hassio import ( - DOMAIN as HASSIO_DOMAIN, - AddonInfo, - AddonState, -) -from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( - STEP_PICK_FIRMWARE_ZIGBEE, -) -from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( - CONF_DISABLE_MULTI_PAN, - get_flasher_addon_manager, - get_multiprotocol_addon_manager, -) -from homeassistant.components.homeassistant_hardware.util import ApplicationType -from homeassistant.components.homeassistant_yellow.const import DOMAIN, RADIO_DEVICE +from homeassistant.components.hassio import DOMAIN as HASSIO_DOMAIN +from homeassistant.components.homeassistant_yellow.const import DOMAIN +from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -69,28 +57,22 @@ async def test_config_flow(hass: HomeAssistant) -> None: mock_integration(hass, MockModule("hassio")) await async_setup_component(hass, HASSIO_DOMAIN, {}) - with ( - patch( - "homeassistant.components.homeassistant_yellow.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "homeassistant.components.homeassistant_hardware.firmware_config_flow.probe_silabs_firmware_type", - return_value=ApplicationType.EZSP, - ), - ): + with patch( + "homeassistant.components.homeassistant_yellow.async_setup_entry", + return_value=True, + ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "system"} ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Home Assistant Yellow" - assert result["data"] == {"firmware": "ezsp"} + assert result["data"] == {} assert result["options"] == {} assert len(mock_setup_entry.mock_calls) == 1 config_entry = hass.config_entries.async_entries(DOMAIN)[0] - assert config_entry.data == {"firmware": "ezsp"} + assert config_entry.data == {} assert config_entry.options == {} assert config_entry.title == "Home Assistant Yellow" @@ -102,12 +84,10 @@ async def test_config_flow_single_entry(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.EZSP}, + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) @@ -124,6 +104,165 @@ async def test_config_flow_single_entry(hass: HomeAssistant) -> None: mock_setup_entry.assert_not_called() +async def test_option_flow_install_multi_pan_addon( + hass: HomeAssistant, + addon_store_info, + addon_info, + install_addon, + set_addon_options, + start_addon, +) -> None: + """Test installing the multi pan addon.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + # Setup the config entry + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + ) + config_entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + + with patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", + side_effect=Mock(return_value=True), + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + {"next_step_id": "multipan_settings"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "addon_not_installed" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + "enable_multi_pan": True, + }, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "install_addon" + assert result["progress_action"] == "install_addon" + + await hass.async_block_till_done() + install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_addon" + set_addon_options.assert_called_once_with( + hass, + "core_silabs_multiprotocol", + { + "options": { + "autoflash_firmware": True, + "device": "/dev/ttyAMA1", + "baudrate": "115200", + "flow_control": True, + } + }, + ) + + await hass.async_block_till_done() + start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_option_flow_install_multi_pan_addon_zha( + hass: HomeAssistant, + addon_store_info, + addon_info, + install_addon, + set_addon_options, + start_addon, +) -> None: + """Test installing the multi pan addon when a zha config entry exists.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + # Setup the config entry + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + ) + config_entry.add_to_hass(hass) + + zha_config_entry = MockConfigEntry( + data={"device": {"path": "/dev/ttyAMA1"}, "radio_type": "ezsp"}, + domain=ZHA_DOMAIN, + options={}, + title="Yellow", + ) + zha_config_entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + + with patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", + side_effect=Mock(return_value=True), + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + {"next_step_id": "multipan_settings"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "addon_not_installed" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + "enable_multi_pan": True, + }, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "install_addon" + assert result["progress_action"] == "install_addon" + + await hass.async_block_till_done() + install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_addon" + set_addon_options.assert_called_once_with( + hass, + "core_silabs_multiprotocol", + { + "options": { + "autoflash_firmware": True, + "device": "/dev/ttyAMA1", + "baudrate": "115200", + "flow_control": True, + } + }, + ) + # Check the ZHA config entry data is updated + assert zha_config_entry.data == { + "device": { + "path": "socket://core-silabs-multiprotocol:9999", + "baudrate": 115200, + "flow_control": None, + }, + "radio_type": "ezsp", + } + + await hass.async_block_till_done() + start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.CREATE_ENTRY + + @pytest.mark.parametrize( ("reboot_menu_choice", "reboot_calls"), [("reboot_now", 1), ("reboot_later", 0)], @@ -142,12 +281,10 @@ async def test_option_flow_led_settings( # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.EZSP}, + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) @@ -190,12 +327,10 @@ async def test_option_flow_led_settings_unchanged( # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.EZSP}, + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) @@ -224,12 +359,10 @@ async def test_option_flow_led_settings_fail_1(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.EZSP}, + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) @@ -258,12 +391,10 @@ async def test_option_flow_led_settings_fail_2( # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.EZSP}, + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) @@ -287,140 +418,3 @@ async def test_option_flow_led_settings_fail_2( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "write_hw_settings_error" - - -async def test_firmware_options_flow(hass: HomeAssistant) -> None: - """Test the firmware options flow for Yellow.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - config_entry = MockConfigEntry( - data={"firmware": ApplicationType.SPINEL}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - version=1, - minor_version=2, - ) - config_entry.add_to_hass(hass) - - # First step is confirmation - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "main_menu" - assert "firmware_settings" in result["menu_options"] - - # Pick firmware settings - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={"next_step_id": "firmware_settings"}, - ) - - assert result["step_id"] == "pick_firmware" - assert result["description_placeholders"]["firmware_type"] == "spinel" - assert result["description_placeholders"]["model"] == "Home Assistant Yellow" - - async def mock_async_step_pick_firmware_zigbee(self, data): - return await self.async_step_confirm_zigbee(user_input={}) - - with patch( - "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareOptionsFlow.async_step_pick_firmware_zigbee", - autospec=True, - side_effect=mock_async_step_pick_firmware_zigbee, - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"] is True - - assert config_entry.data == { - "firmware": "ezsp", - } - - -@pytest.mark.usefixtures("supervisor_client") -async def test_options_flow_multipan_uninstall(hass: HomeAssistant) -> None: - """Test options flow for when multi-PAN firmware is installed.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - config_entry = MockConfigEntry( - data={"firmware": ApplicationType.CPC}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - version=1, - minor_version=2, - ) - config_entry.add_to_hass(hass) - - # Multi-PAN addon is running - mock_multipan_manager = Mock(spec_set=await get_multiprotocol_addon_manager(hass)) - mock_multipan_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": RADIO_DEVICE}, - state=AddonState.RUNNING, - update_available=False, - version="1.0.0", - ) - - mock_flasher_manager = Mock(spec_set=get_flasher_addon_manager(hass)) - mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={}, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.0.0", - ) - - with ( - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.get_multiprotocol_addon_manager", - return_value=mock_multipan_manager, - ), - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.get_flasher_addon_manager", - return_value=mock_flasher_manager, - ), - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - return_value=True, - ), - ): - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "main_menu" - assert "multipan_settings" in result["menu_options"] - - # Pick multi-PAN settings - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={"next_step_id": "multipan_settings"}, - ) - - # Pick the uninstall option - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={"next_step_id": "uninstall_addon"}, - ) - - # Check the box - result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={CONF_DISABLE_MULTI_PAN: True} - ) - - # Finish the flow - result = await hass.config_entries.options.async_configure(result["flow_id"]) - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.options.async_configure(result["flow_id"]) - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY - - # We've reverted the firmware back to Zigbee - assert config_entry.data["firmware"] == "ezsp" diff --git a/tests/components/homeassistant_yellow/test_hardware.py b/tests/components/homeassistant_yellow/test_hardware.py index 4fd2eddb704..9d43b341abf 100644 --- a/tests/components/homeassistant_yellow/test_hardware.py +++ b/tests/components/homeassistant_yellow/test_hardware.py @@ -13,7 +13,6 @@ from tests.common import MockConfigEntry, MockModule, mock_integration from tests.typing import WebSocketGenerator -@pytest.mark.usefixtures("supervisor_client") async def test_hardware_info( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, addon_store_info ) -> None: @@ -66,7 +65,6 @@ async def test_hardware_info( @pytest.mark.parametrize("os_info", [None, {"board": None}, {"board": "other"}]) -@pytest.mark.usefixtures("supervisor_client") async def test_hardware_info_fail( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, os_info, addon_store_info ) -> None: diff --git a/tests/components/homeassistant_yellow/test_init.py b/tests/components/homeassistant_yellow/test_init.py index 5d534dad1e7..ec3ba4e7005 100644 --- a/tests/components/homeassistant_yellow/test_init.py +++ b/tests/components/homeassistant_yellow/test_init.py @@ -6,14 +6,10 @@ import pytest from homeassistant.components import zha from homeassistant.components.hassio import DOMAIN as HASSIO_DOMAIN -from homeassistant.components.homeassistant_hardware.util import ( - ApplicationType, - FirmwareGuess, -) +from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.homeassistant_yellow.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, MockModule, mock_integration @@ -31,12 +27,10 @@ async def test_setup_entry( # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.EZSP}, + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -48,14 +42,6 @@ async def test_setup_entry( "homeassistant.components.onboarding.async_is_onboarded", return_value=onboarded, ), - patch( - "homeassistant.components.homeassistant_yellow.guess_firmware_type", - return_value=FirmwareGuess( # Nothing is setup - is_running=False, - firmware_type=ApplicationType.EZSP, - source="unknown", - ), - ), ): assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) @@ -88,12 +74,118 @@ async def test_setup_zha(hass: HomeAssistant, addon_store_info) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.EZSP}, + data={}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + ) + config_entry.add_to_hass(hass) + with ( + patch( + "homeassistant.components.homeassistant_yellow.get_os_info", + return_value={"board": "yellow"}, + ) as mock_get_os_info, + patch( + "homeassistant.components.onboarding.async_is_onboarded", return_value=False + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get_os_info.mock_calls) == 1 + + # Finish setting up ZHA + zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") + assert len(zha_flows) == 1 + assert zha_flows[0]["step_id"] == "choose_formation_strategy" + + await hass.config_entries.flow.async_configure( + zha_flows[0]["flow_id"], + user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, + ) + await hass.async_block_till_done() + + config_entry = hass.config_entries.async_entries("zha")[0] + assert config_entry.data == { + "device": { + "baudrate": 115200, + "flow_control": "hardware", + "path": "/dev/ttyAMA1", + }, + "radio_type": "ezsp", + } + assert config_entry.options == {} + assert config_entry.title == "Yellow" + + +async def test_setup_zha_multipan( + hass: HomeAssistant, addon_info, addon_running +) -> None: + """Test zha gets the right config.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + addon_info.return_value["options"]["device"] = "/dev/ttyAMA1" + + # Setup the config entry + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + ) + config_entry.add_to_hass(hass) + with ( + patch( + "homeassistant.components.homeassistant_yellow.get_os_info", + return_value={"board": "yellow"}, + ) as mock_get_os_info, + patch( + "homeassistant.components.onboarding.async_is_onboarded", return_value=False + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get_os_info.mock_calls) == 1 + + # Finish setting up ZHA + zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") + assert len(zha_flows) == 1 + assert zha_flows[0]["step_id"] == "choose_formation_strategy" + + await hass.config_entries.flow.async_configure( + zha_flows[0]["flow_id"], + user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, + ) + await hass.async_block_till_done() + + config_entry = hass.config_entries.async_entries("zha")[0] + assert config_entry.data == { + "device": { + "baudrate": 115200, + "flow_control": None, + "path": "socket://core-silabs-multiprotocol:9999", + }, + "radio_type": "ezsp", + } + assert config_entry.options == {} + assert config_entry.title == "Yellow Multiprotocol" + + +async def test_setup_zha_multipan_other_device( + hass: HomeAssistant, addon_info, addon_running +) -> None: + """Test zha gets the right config.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + addon_info.return_value["options"]["device"] = "/dev/not_yellow_radio" + + # Setup the config entry + config_entry = MockConfigEntry( + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -137,12 +229,10 @@ async def test_setup_entry_no_hassio(hass: HomeAssistant) -> None: """Test setup of a config entry without hassio.""" # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.EZSP}, + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) assert len(hass.config_entries.async_entries()) == 1 @@ -164,12 +254,10 @@ async def test_setup_entry_wrong_board(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.EZSP}, + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) assert len(hass.config_entries.async_entries()) == 1 @@ -192,12 +280,10 @@ async def test_setup_entry_wait_hassio(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.EZSP}, + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) with patch( @@ -217,15 +303,14 @@ async def test_setup_entry_addon_info_fails( """Test setup of a config entry when fetching addon info fails.""" mock_integration(hass, MockModule("hassio")) await async_setup_component(hass, HASSIO_DOMAIN, {}) + addon_store_info.side_effect = HassioAPIError("Boom") # Setup the config entry config_entry = MockConfigEntry( - data={"firmware": ApplicationType.CPC}, + data={}, domain=DOMAIN, options={}, title="Home Assistant Yellow", - version=1, - minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -234,15 +319,41 @@ async def test_setup_entry_addon_info_fails( return_value={"board": "yellow"}, ), patch( - "homeassistant.components.onboarding.async_is_onboarded", - return_value=False, - ), - patch( - "homeassistant.components.homeassistant_yellow.check_multi_pan_addon", - side_effect=HomeAssistantError("Boom"), + "homeassistant.components.onboarding.async_is_onboarded", return_value=False ), ): assert not await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_entry_addon_not_running( + hass: HomeAssistant, addon_installed, start_addon +) -> None: + """Test the addon is started if it is not running.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + # Setup the config entry + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + ) + config_entry.add_to_hass(hass) + with ( + patch( + "homeassistant.components.homeassistant_yellow.get_os_info", + return_value={"board": "yellow"}, + ), + patch( + "homeassistant.components.onboarding.async_is_onboarded", return_value=False + ), + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + start_addon.assert_called_once() diff --git a/tests/components/homekit/conftest.py b/tests/components/homekit/conftest.py index 6bdad5d2b4c..26333b0b807 100644 --- a/tests/components/homekit/conftest.py +++ b/tests/components/homekit/conftest.py @@ -4,6 +4,7 @@ from asyncio import AbstractEventLoop from collections.abc import Generator from contextlib import suppress import os +from typing import Any from unittest.mock import MagicMock, patch import pytest @@ -12,13 +13,13 @@ from homeassistant.components.device_tracker.legacy import YAML_DEVICES from homeassistant.components.homekit.accessories import HomeDriver from homeassistant.components.homekit.const import BRIDGE_NAME, EVENT_HOMEKIT_CHANGED from homeassistant.components.homekit.iidmanager import AccessoryIIDStorage -from homeassistant.core import Event, HomeAssistant +from homeassistant.core import HomeAssistant from tests.common import async_capture_events @pytest.fixture -def iid_storage(hass: HomeAssistant) -> Generator[AccessoryIIDStorage]: +def iid_storage(hass): """Mock the iid storage.""" with patch.object(AccessoryIIDStorage, "_async_schedule_save"): yield AccessoryIIDStorage(hass, "") @@ -27,7 +28,7 @@ def iid_storage(hass: HomeAssistant) -> Generator[AccessoryIIDStorage]: @pytest.fixture def run_driver( hass: HomeAssistant, event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage -) -> Generator[HomeDriver]: +) -> Generator[HomeDriver, Any, None]: """Return a custom AccessoryDriver instance for HomeKit accessory init. This mock does not mock async_stop, so the driver will not be stopped @@ -56,7 +57,7 @@ def run_driver( @pytest.fixture def hk_driver( hass: HomeAssistant, event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage -) -> Generator[HomeDriver]: +) -> Generator[HomeDriver, Any, None]: """Return a custom AccessoryDriver instance for HomeKit accessory init.""" with ( patch("pyhap.accessory_driver.AsyncZeroconf"), @@ -88,7 +89,7 @@ def mock_hap( event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage, mock_zeroconf: MagicMock, -) -> Generator[HomeDriver]: +) -> Generator[HomeDriver, Any, None]: """Return a custom AccessoryDriver instance for HomeKit accessory init.""" with ( patch("pyhap.accessory_driver.AsyncZeroconf"), @@ -121,13 +122,13 @@ def mock_hap( @pytest.fixture -def events(hass: HomeAssistant) -> list[Event]: +def events(hass): """Yield caught homekit_changed events.""" return async_capture_events(hass, EVENT_HOMEKIT_CHANGED) @pytest.fixture -def demo_cleanup(hass: HomeAssistant) -> Generator[None]: +def demo_cleanup(hass): """Clean up device tracker demo file.""" yield with suppress(FileNotFoundError): diff --git a/tests/components/homekit/test_accessories.py b/tests/components/homekit/test_accessories.py index c37cac84b8a..32cd6622492 100644 --- a/tests/components/homekit/test_accessories.py +++ b/tests/components/homekit/test_accessories.py @@ -47,7 +47,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, __version__ as hass_version, ) -from homeassistant.core import Event, HomeAssistant +from homeassistant.core import HomeAssistant from tests.common import async_mock_service @@ -667,9 +667,7 @@ async def test_battery_appears_after_startup( assert acc._char_battery is None -async def test_call_service( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_call_service(hass: HomeAssistant, hk_driver, events) -> None: """Test call_service method.""" entity_id = "homekit.accessory" hass.states.async_set(entity_id, None) diff --git a/tests/components/homekit/test_diagnostics.py b/tests/components/homekit/test_diagnostics.py index ce3c954c447..728624da0d0 100644 --- a/tests/components/homekit/test_diagnostics.py +++ b/tests/components/homekit/test_diagnostics.py @@ -12,7 +12,7 @@ from homeassistant.components.homekit.const import ( ) from homeassistant.const import CONF_NAME, CONF_PORT, EVENT_HOMEASSISTANT_STARTED from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from .util import async_init_integration @@ -321,7 +321,9 @@ async def test_config_entry_with_trigger_accessory( hass: HomeAssistant, hass_client: ClientSessionGenerator, hk_driver, + events, demo_cleanup, + device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: """Test generating diagnostics for a bridge config entry with a trigger accessory.""" diff --git a/tests/components/homekit/test_get_accessories.py b/tests/components/homekit/test_get_accessories.py index c4b1cbe98d8..02a39ed9258 100644 --- a/tests/components/homekit/test_get_accessories.py +++ b/tests/components/homekit/test_get_accessories.py @@ -335,10 +335,10 @@ def test_type_sensors(type_name, entity_id, state, attrs) -> None: ("SelectSwitch", "select.test", "option1", {}, {}), ("Switch", "switch.test", "on", {}, {}), ("Switch", "switch.test", "on", {}, {CONF_TYPE: TYPE_SWITCH}), - ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_FAUCET}), - ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_VALVE}), - ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_SHOWER}), - ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_SPRINKLER}), + ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_FAUCET}), + ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_VALVE}), + ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_SHOWER}), + ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_SPRINKLER}), ], ) def test_type_switches(type_name, entity_id, state, attrs, config) -> None: @@ -350,21 +350,6 @@ def test_type_switches(type_name, entity_id, state, attrs, config) -> None: assert mock_type.called -@pytest.mark.parametrize( - ("type_name", "entity_id", "state", "attrs"), - [ - ("Valve", "valve.test", "on", {}), - ], -) -def test_type_valve(type_name, entity_id, state, attrs) -> None: - """Test if valve types are associated correctly.""" - mock_type = Mock() - with patch.dict(TYPES, {type_name: mock_type}): - entity_state = State(entity_id, state, attrs) - get_accessory(None, None, entity_state, 2, {}) - assert mock_type.called - - @pytest.mark.parametrize( ("type_name", "entity_id", "state", "attrs"), [ diff --git a/tests/components/homekit/test_homekit.py b/tests/components/homekit/test_homekit.py index 4000c61e422..33bfc6e66d3 100644 --- a/tests/components/homekit/test_homekit.py +++ b/tests/components/homekit/test_homekit.py @@ -14,7 +14,6 @@ import pytest from homeassistant import config as hass_config from homeassistant.components import homekit as homekit_base, zeroconf from homeassistant.components.binary_sensor import BinarySensorDeviceClass -from homeassistant.components.event import EventDeviceClass from homeassistant.components.homekit import ( MAX_DEVICES, STATUS_READY, @@ -59,8 +58,7 @@ from homeassistant.const import ( STATE_ON, EntityCategory, ) -from homeassistant.core import HomeAssistant, State -from homeassistant.exceptions import HomeAssistantError +from homeassistant.core import HomeAssistant, HomeAssistantError, State from homeassistant.helpers import ( device_registry as dr, entity_registry as er, @@ -73,7 +71,6 @@ from homeassistant.helpers.entityfilter import ( CONF_INCLUDE_DOMAINS, CONF_INCLUDE_ENTITIES, CONF_INCLUDE_ENTITY_GLOBS, - EntityFilter, convert_filter, ) from homeassistant.setup import async_setup_component @@ -120,13 +117,7 @@ def patch_source_ip(): yield -def _mock_homekit( - hass: HomeAssistant, - entry: MockConfigEntry, - homekit_mode: str, - entity_filter: EntityFilter | None = None, - devices: list[str] | None = None, -) -> HomeKit: +def _mock_homekit(hass, entry, homekit_mode, entity_filter=None, devices=None): return HomeKit( hass=hass, name=BRIDGE_NAME, @@ -143,7 +134,7 @@ def _mock_homekit( ) -def _mock_homekit_bridge(hass: HomeAssistant, entry: MockConfigEntry) -> HomeKit: +def _mock_homekit_bridge(hass, entry): homekit = _mock_homekit(hass, entry, HOMEKIT_MODE_BRIDGE) homekit.driver = MagicMock() homekit.iid_storage = MagicMock() @@ -1850,11 +1841,7 @@ async def test_homekit_uses_system_zeroconf(hass: HomeAssistant, hk_driver) -> N entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - # New tests should not access runtime data. - # Do not use this pattern for new tests. - entry_data: HomeKitEntryData = hass.config_entries.async_get_entry( - entry.entry_id - ).runtime_data + entry_data: HomeKitEntryData = hass.data[DOMAIN][entry.entry_id] assert entry_data.homekit.driver.advertiser == system_async_zc assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() @@ -1951,21 +1938,12 @@ async def test_homekit_ignored_missing_devices( ) -@pytest.mark.parametrize( - ("domain", "device_class"), - [ - ("binary_sensor", BinarySensorDeviceClass.MOTION), - ("event", EventDeviceClass.MOTION), - ], -) @pytest.mark.usefixtures("mock_async_zeroconf") async def test_homekit_finds_linked_motion_sensors( hass: HomeAssistant, hk_driver, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - domain: str, - device_class: EventDeviceClass | BinarySensorDeviceClass, ) -> None: """Test HomeKit start method.""" entry = await async_init_integration(hass) @@ -1985,21 +1963,21 @@ async def test_homekit_finds_linked_motion_sensors( connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - entry = entity_registry.async_get_or_create( - domain, + binary_motion_sensor = entity_registry.async_get_or_create( + "binary_sensor", "camera", "motion_sensor", device_id=device_entry.id, - original_device_class=device_class, + original_device_class=BinarySensorDeviceClass.MOTION, ) camera = entity_registry.async_get_or_create( "camera", "camera", "demo", device_id=device_entry.id ) hass.states.async_set( - entry.entity_id, + binary_motion_sensor.entity_id, STATE_ON, - {ATTR_DEVICE_CLASS: device_class}, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION}, ) hass.states.async_set(camera.entity_id, STATE_ON) @@ -2022,82 +2000,7 @@ async def test_homekit_finds_linked_motion_sensors( "model": "Camera Server", "platform": "test", "sw_version": "0.16.0", - "linked_motion_sensor": entry.entity_id, - }, - ) - - -@pytest.mark.parametrize( - ("domain", "device_class"), - [ - ("event", EventDeviceClass.DOORBELL), - ], -) -@pytest.mark.usefixtures("mock_async_zeroconf") -async def test_homekit_finds_linked_doorbell_sensors( - hass: HomeAssistant, - hk_driver, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - domain: str, - device_class: EventDeviceClass | BinarySensorDeviceClass, -) -> None: - """Test homekit can find linked doorbell sensors.""" - entry = await async_init_integration(hass) - - homekit = _mock_homekit(hass, entry, HOMEKIT_MODE_BRIDGE) - - homekit.driver = hk_driver - homekit.bridge = HomeBridge(hass, hk_driver, "mock_bridge") - - config_entry = MockConfigEntry(domain="test", data={}) - config_entry.add_to_hass(hass) - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - sw_version="0.16.0", - model="Camera Server", - manufacturer="Ubq", - connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, - ) - - entry = entity_registry.async_get_or_create( - domain, - "camera", - "doorbell_sensor", - device_id=device_entry.id, - original_device_class=device_class, - ) - camera = entity_registry.async_get_or_create( - "camera", "camera", "demo", device_id=device_entry.id - ) - - hass.states.async_set( - entry.entity_id, - STATE_ON, - {ATTR_DEVICE_CLASS: device_class}, - ) - hass.states.async_set(camera.entity_id, STATE_ON) - - with ( - patch.object(homekit.bridge, "add_accessory"), - patch(f"{PATH_HOMEKIT}.async_show_setup_message"), - patch(f"{PATH_HOMEKIT}.get_accessory") as mock_get_acc, - patch("pyhap.accessory_driver.AccessoryDriver.async_start"), - ): - await homekit.async_start() - await hass.async_block_till_done() - - mock_get_acc.assert_called_with( - hass, - ANY, - ANY, - ANY, - { - "manufacturer": "Ubq", - "model": "Camera Server", - "platform": "test", - "sw_version": "0.16.0", - "linked_doorbell_sensor": entry.entity_id, + "linked_motion_sensor": "binary_sensor.camera_motion_sensor", }, ) diff --git a/tests/components/homekit/test_type_cameras.py b/tests/components/homekit/test_type_cameras.py index a42980ec2af..184ce1b6521 100644 --- a/tests/components/homekit/test_type_cameras.py +++ b/tests/components/homekit/test_type_cameras.py @@ -1,7 +1,6 @@ """Test different accessory types: Camera.""" import asyncio -from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from uuid import UUID @@ -10,7 +9,6 @@ import pytest from homeassistant.components import camera, ffmpeg from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.components.camera.img_util import TurboJPEGSingleton -from homeassistant.components.event import EventDeviceClass from homeassistant.components.homekit.accessories import HomeBridge from homeassistant.components.homekit.const import ( AUDIO_CODEC_COPY, @@ -32,17 +30,10 @@ from homeassistant.components.homekit.const import ( ) from homeassistant.components.homekit.type_cameras import Camera from homeassistant.components.homekit.type_switches import Switch -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - STATE_OFF, - STATE_ON, - STATE_UNAVAILABLE, - STATE_UNKNOWN, -) +from homeassistant.const import ATTR_DEVICE_CLASS, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util from tests.components.camera.common import mock_turbo_jpeg @@ -54,12 +45,12 @@ PID_THAT_WILL_NEVER_BE_ALIVE = 2147483647 @pytest.fixture(autouse=True) -async def setup_homeassistant(hass: HomeAssistant) -> None: +async def setup_homeassistant(hass: HomeAssistant): """Set up the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) -async def _async_start_streaming(hass: HomeAssistant, acc: Camera) -> None: +async def _async_start_streaming(hass, acc): """Start streaming a camera.""" acc.set_selected_stream_configuration(MOCK_START_STREAM_TLV) await hass.async_block_till_done() @@ -67,35 +58,28 @@ async def _async_start_streaming(hass: HomeAssistant, acc: Camera) -> None: await hass.async_block_till_done() -async def _async_setup_endpoints(hass: HomeAssistant, acc: Camera) -> None: +async def _async_setup_endpoints(hass, acc): """Set camera endpoints.""" acc.set_endpoints(MOCK_END_POINTS_TLV) acc.run() await hass.async_block_till_done() -async def _async_reconfigure_stream( - hass: HomeAssistant, - acc: Camera, - session_info: dict[str, Any], - stream_config: dict[str, Any], -) -> None: +async def _async_reconfigure_stream(hass, acc, session_info, stream_config): """Reconfigure the stream.""" await acc.reconfigure_stream(session_info, stream_config) acc.run() await hass.async_block_till_done() -async def _async_stop_all_streams(hass: HomeAssistant, acc: Camera) -> None: +async def _async_stop_all_streams(hass, acc): """Stop all camera streams.""" await acc.stop() acc.run() await hass.async_block_till_done() -async def _async_stop_stream( - hass: HomeAssistant, acc: Camera, session_info: dict[str, Any] -) -> None: +async def _async_stop_stream(hass, acc, session_info): """Stop a camera stream.""" await acc.stop_stream(session_info) acc.run() @@ -146,7 +130,9 @@ def _get_failing_mock_ffmpeg(): return ffmpeg -async def test_camera_stream_source_configured(hass: HomeAssistant, run_driver) -> None: +async def test_camera_stream_source_configured( + hass: HomeAssistant, run_driver, events +) -> None: """Test a camera that can stream with a configured source.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component( @@ -266,7 +252,7 @@ async def test_camera_stream_source_configured(hass: HomeAssistant, run_driver) async def test_camera_stream_source_configured_with_failing_ffmpeg( - hass: HomeAssistant, run_driver + hass: HomeAssistant, run_driver, events ) -> None: """Test a camera that can stream with a configured source with ffmpeg failing.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -322,7 +308,9 @@ async def test_camera_stream_source_configured_with_failing_ffmpeg( await _async_stop_all_streams(hass, acc) -async def test_camera_stream_source_found(hass: HomeAssistant, run_driver) -> None: +async def test_camera_stream_source_found( + hass: HomeAssistant, run_driver, events +) -> None: """Test a camera that can stream and we get the source from the entity.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component( @@ -408,7 +396,9 @@ async def test_camera_stream_source_found(hass: HomeAssistant, run_driver) -> No ) -async def test_camera_stream_source_fails(hass: HomeAssistant, run_driver) -> None: +async def test_camera_stream_source_fails( + hass: HomeAssistant, run_driver, events +) -> None: """Test a camera that can stream and we cannot get the source from the entity.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component( @@ -449,7 +439,7 @@ async def test_camera_stream_source_fails(hass: HomeAssistant, run_driver) -> No await _async_stop_all_streams(hass, acc) -async def test_camera_with_no_stream(hass: HomeAssistant, run_driver) -> None: +async def test_camera_with_no_stream(hass: HomeAssistant, run_driver, events) -> None: """Test a camera that cannot stream.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component(hass, camera.DOMAIN, {camera.DOMAIN: {}}) @@ -482,7 +472,7 @@ async def test_camera_with_no_stream(hass: HomeAssistant, run_driver) -> None: async def test_camera_stream_source_configured_and_copy_codec( - hass: HomeAssistant, run_driver + hass: HomeAssistant, run_driver, events ) -> None: """Test a camera that can stream with a configured source.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -557,7 +547,7 @@ async def test_camera_stream_source_configured_and_copy_codec( async def test_camera_stream_source_configured_and_override_profile_names( - hass: HomeAssistant, run_driver + hass: HomeAssistant, run_driver, events ) -> None: """Test a camera that can stream with a configured source over overridden profile names.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -633,7 +623,7 @@ async def test_camera_stream_source_configured_and_override_profile_names( async def test_camera_streaming_fails_after_starting_ffmpeg( - hass: HomeAssistant, run_driver + hass: HomeAssistant, run_driver, events ) -> None: """Test a camera that can stream with a configured source.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -710,7 +700,7 @@ async def test_camera_streaming_fails_after_starting_ffmpeg( async def test_camera_with_linked_motion_sensor( - hass: HomeAssistant, run_driver + hass: HomeAssistant, run_driver, events ) -> None: """Test a camera with a linked motion sensor can update.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -803,151 +793,8 @@ async def test_camera_with_linked_motion_sensor( assert char.value is True -async def test_camera_with_linked_motion_event(hass: HomeAssistant, run_driver) -> None: - """Test a camera with a linked motion event entity can update.""" - await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) - await async_setup_component( - hass, camera.DOMAIN, {camera.DOMAIN: {"platform": "demo"}} - ) - await hass.async_block_till_done() - motion_entity_id = "event.motion" - - hass.states.async_set( - motion_entity_id, - dt_util.utcnow().isoformat(), - {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, - ) - await hass.async_block_till_done() - entity_id = "camera.demo_camera" - - hass.states.async_set(entity_id, None) - await hass.async_block_till_done() - acc = Camera( - hass, - run_driver, - "Camera", - entity_id, - 2, - { - CONF_STREAM_SOURCE: "/dev/null", - CONF_SUPPORT_AUDIO: True, - CONF_VIDEO_CODEC: VIDEO_CODEC_H264_OMX, - CONF_AUDIO_CODEC: AUDIO_CODEC_COPY, - CONF_LINKED_MOTION_SENSOR: motion_entity_id, - }, - ) - bridge = HomeBridge("hass", run_driver, "Test Bridge") - bridge.add_accessory(acc) - - acc.run() - - assert acc.aid == 2 - assert acc.category == 17 # Camera - - service = acc.get_service(SERV_MOTION_SENSOR) - assert service - char = service.get_characteristic(CHAR_MOTION_DETECTED) - assert char - - assert char.value is False - broker = MagicMock() - char.broker = broker - - hass.states.async_set( - motion_entity_id, STATE_UNKNOWN, {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION} - ) - await hass.async_block_till_done() - assert len(broker.mock_calls) == 0 - broker.reset_mock() - assert char.value is False - - char.set_value(True) - fire_time = dt_util.utcnow().isoformat() - hass.states.async_set( - motion_entity_id, fire_time, {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION} - ) - await hass.async_block_till_done() - assert len(broker.mock_calls) == 4 - broker.reset_mock() - assert char.value is False - - hass.states.async_set( - motion_entity_id, - fire_time, - {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, - force_update=True, - ) - await hass.async_block_till_done() - assert len(broker.mock_calls) == 0 - broker.reset_mock() - - hass.states.async_set( - motion_entity_id, - fire_time, - {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, - ) - await hass.async_block_till_done() - assert len(broker.mock_calls) == 0 - broker.reset_mock() - # Ensure we do not throw when the linked - # motion sensor is removed - hass.states.async_remove(motion_entity_id) - await hass.async_block_till_done() - acc.run() - await hass.async_block_till_done() - assert char.value is False - - # Ensure re-adding does not fire an event - hass.states.async_set( - motion_entity_id, - dt_util.utcnow().isoformat(), - {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, - ) - await hass.async_block_till_done() - assert not broker.mock_calls - - # But a second update does - broker.reset_mock() - hass.states.async_set( - motion_entity_id, - dt_util.utcnow().isoformat(), - {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, - ) - await hass.async_block_till_done() - assert broker.mock_calls - - # Now go unavailable - broker.reset_mock() - hass.states.async_set( - motion_entity_id, - STATE_UNAVAILABLE, - {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, - ) - await hass.async_block_till_done() - assert not broker.mock_calls - - # Going from unavailable to a state should not fire an event - hass.states.async_set( - motion_entity_id, - dt_util.utcnow().isoformat(), - {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, - ) - await hass.async_block_till_done() - assert not broker.mock_calls - - # But a another update does - broker.reset_mock() - hass.states.async_set( - motion_entity_id, - dt_util.utcnow().isoformat(), - {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, - ) - await hass.async_block_till_done() - assert broker.mock_calls - - async def test_camera_with_a_missing_linked_motion_sensor( - hass: HomeAssistant, run_driver + hass: HomeAssistant, run_driver, events ) -> None: """Test a camera with a configured linked motion sensor that is missing.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -979,7 +826,7 @@ async def test_camera_with_a_missing_linked_motion_sensor( async def test_camera_with_linked_doorbell_sensor( - hass: HomeAssistant, run_driver + hass: HomeAssistant, run_driver, events ) -> None: """Test a camera with a linked doorbell sensor can update.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -1094,154 +941,8 @@ async def test_camera_with_linked_doorbell_sensor( assert char2.value is None -async def test_camera_with_linked_doorbell_event( - hass: HomeAssistant, run_driver -) -> None: - """Test a camera with a linked doorbell event can update.""" - await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) - await async_setup_component( - hass, camera.DOMAIN, {camera.DOMAIN: {"platform": "demo"}} - ) - await hass.async_block_till_done() - doorbell_entity_id = "event.doorbell" - - hass.states.async_set( - doorbell_entity_id, - dt_util.utcnow().isoformat(), - {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, - ) - await hass.async_block_till_done() - entity_id = "camera.demo_camera" - - hass.states.async_set(entity_id, None) - await hass.async_block_till_done() - acc = Camera( - hass, - run_driver, - "Camera", - entity_id, - 2, - { - CONF_STREAM_SOURCE: "/dev/null", - CONF_SUPPORT_AUDIO: True, - CONF_VIDEO_CODEC: VIDEO_CODEC_H264_OMX, - CONF_AUDIO_CODEC: AUDIO_CODEC_COPY, - CONF_LINKED_DOORBELL_SENSOR: doorbell_entity_id, - }, - ) - bridge = HomeBridge("hass", run_driver, "Test Bridge") - bridge.add_accessory(acc) - - acc.run() - - assert acc.aid == 2 - assert acc.category == 17 # Camera - - service = acc.get_service(SERV_DOORBELL) - assert service - char = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) - assert char - - assert char.value is None - - service2 = acc.get_service(SERV_STATELESS_PROGRAMMABLE_SWITCH) - assert service2 - char2 = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) - assert char2 - broker = MagicMock() - char2.broker = broker - assert char2.value is None - - hass.states.async_set( - doorbell_entity_id, - STATE_UNKNOWN, - {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, - ) - await hass.async_block_till_done() - assert char.value is None - assert char2.value is None - assert len(broker.mock_calls) == 0 - - char.set_value(True) - char2.set_value(True) - broker.reset_mock() - - original_time = dt_util.utcnow().isoformat() - hass.states.async_set( - doorbell_entity_id, - original_time, - {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, - ) - await hass.async_block_till_done() - assert char.value is None - assert char2.value is None - assert len(broker.mock_calls) == 2 - broker.reset_mock() - - hass.states.async_set( - doorbell_entity_id, - original_time, - {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, - force_update=True, - ) - await hass.async_block_till_done() - assert char.value is None - assert char2.value is None - assert len(broker.mock_calls) == 0 - broker.reset_mock() - - hass.states.async_set( - doorbell_entity_id, - original_time, - {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL, "other": "attr"}, - ) - await hass.async_block_till_done() - assert char.value is None - assert char2.value is None - assert len(broker.mock_calls) == 0 - broker.reset_mock() - - # Ensure we do not throw when the linked - # doorbell sensor is removed - hass.states.async_remove(doorbell_entity_id) - await hass.async_block_till_done() - acc.run() - await hass.async_block_till_done() - assert char.value is None - assert char2.value is None - - await hass.async_block_till_done() - hass.states.async_set( - doorbell_entity_id, - STATE_UNAVAILABLE, - {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, - ) - await hass.async_block_till_done() - # Ensure re-adding does not fire an event - assert not broker.mock_calls - broker.reset_mock() - - # going from unavailable to a state should not fire an event - hass.states.async_set( - doorbell_entity_id, - dt_util.utcnow().isoformat(), - {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, - ) - await hass.async_block_till_done() - assert not broker.mock_calls - - # But a second update does - hass.states.async_set( - doorbell_entity_id, - dt_util.utcnow().isoformat(), - {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, - ) - await hass.async_block_till_done() - assert broker.mock_calls - - async def test_camera_with_a_missing_linked_doorbell_sensor( - hass: HomeAssistant, run_driver + hass: HomeAssistant, run_driver, events ) -> None: """Test a camera with a configured linked doorbell sensor that is missing.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) diff --git a/tests/components/homekit/test_type_covers.py b/tests/components/homekit/test_type_covers.py index 049f6818784..6efd9118092 100644 --- a/tests/components/homekit/test_type_covers.py +++ b/tests/components/homekit/test_type_covers.py @@ -5,9 +5,8 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN as COVER_DOMAIN, + DOMAIN, CoverEntityFeature, - CoverState, ) from homeassistant.components.homekit.const import ( ATTR_OBSTRUCTION_DETECTED, @@ -32,20 +31,22 @@ from homeassistant.const import ( ATTR_SUPPORTED_FEATURES, EVENT_HOMEASSISTANT_START, SERVICE_SET_COVER_TILT_POSITION, + STATE_CLOSED, + STATE_CLOSING, STATE_OFF, STATE_ON, + STATE_OPEN, + STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import CoreState, Event, HomeAssistant +from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_garage_door_open_close( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_garage_door_open_close(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.garage_door" @@ -61,15 +62,13 @@ async def test_garage_door_open_close( assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN - hass.states.async_set( - entity_id, CoverState.CLOSED, {ATTR_OBSTRUCTION_DETECTED: False} - ) + hass.states.async_set(entity_id, STATE_CLOSED, {ATTR_OBSTRUCTION_DETECTED: False}) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_CLOSED assert acc.char_target_state.value == HK_DOOR_CLOSED assert acc.char_obstruction_detected.value is False - hass.states.async_set(entity_id, CoverState.OPEN, {ATTR_OBSTRUCTION_DETECTED: True}) + hass.states.async_set(entity_id, STATE_OPEN, {ATTR_OBSTRUCTION_DETECTED: True}) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN @@ -91,8 +90,8 @@ async def test_garage_door_open_close( assert acc.available is True # Set from HomeKit - call_close_cover = async_mock_service(hass, COVER_DOMAIN, "close_cover") - call_open_cover = async_mock_service(hass, COVER_DOMAIN, "open_cover") + call_close_cover = async_mock_service(hass, DOMAIN, "close_cover") + call_open_cover = async_mock_service(hass, DOMAIN, "open_cover") acc.char_target_state.client_update_value(1) await hass.async_block_till_done() @@ -103,7 +102,7 @@ async def test_garage_door_open_close( assert len(events) == 1 assert events[-1].data[ATTR_VALUE] is None - hass.states.async_set(entity_id, CoverState.CLOSED) + hass.states.async_set(entity_id, STATE_CLOSED) await hass.async_block_till_done() acc.char_target_state.client_update_value(1) @@ -122,7 +121,7 @@ async def test_garage_door_open_close( assert len(events) == 3 assert events[-1].data[ATTR_VALUE] is None - hass.states.async_set(entity_id, CoverState.OPEN) + hass.states.async_set(entity_id, STATE_OPEN) await hass.async_block_till_done() acc.char_target_state.client_update_value(0) @@ -133,13 +132,15 @@ async def test_garage_door_open_close( assert events[-1].data[ATTR_VALUE] is None -async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> None: +async def test_door_instantiate_set_position( + hass: HomeAssistant, hk_driver, events +) -> None: """Test if Door accessory is instantiated correctly and can set position.""" entity_id = "cover.door" hass.states.async_set( entity_id, - CoverState.OPEN, + STATE_OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 0, @@ -158,7 +159,7 @@ async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> hass.states.async_set( entity_id, - CoverState.OPEN, + STATE_OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -171,7 +172,7 @@ async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> hass.states.async_set( entity_id, - CoverState.OPEN, + STATE_OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: "GARBAGE", @@ -184,7 +185,7 @@ async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> async def test_windowcovering_set_cover_position( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.window" @@ -220,7 +221,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - CoverState.OPENING, + STATE_OPENING, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 60, @@ -233,7 +234,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - CoverState.OPENING, + STATE_OPENING, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 70.0, @@ -246,7 +247,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - CoverState.CLOSING, + STATE_CLOSING, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -259,7 +260,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - CoverState.OPEN, + STATE_OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -271,9 +272,7 @@ async def test_windowcovering_set_cover_position( assert acc.char_position_state.value == 2 # Set from HomeKit - call_set_cover_position = async_mock_service( - hass, COVER_DOMAIN, "set_cover_position" - ) + call_set_cover_position = async_mock_service(hass, DOMAIN, "set_cover_position") acc.char_target_position.client_update_value(25) await hass.async_block_till_done() @@ -296,13 +295,15 @@ async def test_windowcovering_set_cover_position( assert events[-1].data[ATTR_VALUE] == 75 -async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) -> None: +async def test_window_instantiate_set_position( + hass: HomeAssistant, hk_driver, events +) -> None: """Test if Window accessory is instantiated correctly and can set position.""" entity_id = "cover.window" hass.states.async_set( entity_id, - CoverState.OPEN, + STATE_OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 0, @@ -321,7 +322,7 @@ async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) - hass.states.async_set( entity_id, - CoverState.OPEN, + STATE_OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -334,7 +335,7 @@ async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) - hass.states.async_set( entity_id, - CoverState.OPEN, + STATE_OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: "GARBAGE", @@ -347,7 +348,7 @@ async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) - async def test_windowcovering_cover_set_tilt( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test if accessory and HA update slat tilt accordingly.""" entity_id = "cover.window" @@ -368,37 +369,29 @@ async def test_windowcovering_cover_set_tilt( assert acc.char_current_tilt.value == 0 assert acc.char_target_tilt.value == 0 - hass.states.async_set( - entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: None} - ) + hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: None}) await hass.async_block_till_done() assert acc.char_current_tilt.value == 0 assert acc.char_target_tilt.value == 0 - hass.states.async_set( - entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: 100} - ) + hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: 100}) await hass.async_block_till_done() assert acc.char_current_tilt.value == 90 assert acc.char_target_tilt.value == 90 - hass.states.async_set( - entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: 50} - ) + hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: 50}) await hass.async_block_till_done() assert acc.char_current_tilt.value == 0 assert acc.char_target_tilt.value == 0 - hass.states.async_set( - entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: 0} - ) + hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: 0}) await hass.async_block_till_done() assert acc.char_current_tilt.value == -90 assert acc.char_target_tilt.value == -90 # set from HomeKit call_set_tilt_position = async_mock_service( - hass, COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION + hass, DOMAIN, SERVICE_SET_COVER_TILT_POSITION ) # HomeKit sets tilts between -90 and 90 (degrees), whereas @@ -425,7 +418,7 @@ async def test_windowcovering_cover_set_tilt( assert events[-1].data[ATTR_VALUE] == 75 -async def test_windowcovering_tilt_only(hass: HomeAssistant, hk_driver) -> None: +async def test_windowcovering_tilt_only(hass: HomeAssistant, hk_driver, events) -> None: """Test we lock the window covering closed when its tilt only.""" entity_id = "cover.window" @@ -449,7 +442,7 @@ async def test_windowcovering_tilt_only(hass: HomeAssistant, hk_driver) -> None: async def test_windowcovering_open_close( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.window" @@ -472,33 +465,33 @@ async def test_windowcovering_open_close( assert acc.char_target_position.value == 0 assert acc.char_position_state.value == 2 - hass.states.async_set(entity_id, CoverState.OPENING) + hass.states.async_set(entity_id, STATE_OPENING) await hass.async_block_till_done() assert acc.char_current_position.value == 0 assert acc.char_target_position.value == 0 assert acc.char_position_state.value == 1 - hass.states.async_set(entity_id, CoverState.OPEN) + hass.states.async_set(entity_id, STATE_OPEN) await hass.async_block_till_done() assert acc.char_current_position.value == 100 assert acc.char_target_position.value == 100 assert acc.char_position_state.value == 2 - hass.states.async_set(entity_id, CoverState.CLOSING) + hass.states.async_set(entity_id, STATE_CLOSING) await hass.async_block_till_done() assert acc.char_current_position.value == 100 assert acc.char_target_position.value == 100 assert acc.char_position_state.value == 0 - hass.states.async_set(entity_id, CoverState.CLOSED) + hass.states.async_set(entity_id, STATE_CLOSED) await hass.async_block_till_done() assert acc.char_current_position.value == 0 assert acc.char_target_position.value == 0 assert acc.char_position_state.value == 2 # Set from HomeKit - call_close_cover = async_mock_service(hass, COVER_DOMAIN, "close_cover") - call_open_cover = async_mock_service(hass, COVER_DOMAIN, "open_cover") + call_close_cover = async_mock_service(hass, DOMAIN, "close_cover") + call_open_cover = async_mock_service(hass, DOMAIN, "open_cover") acc.char_target_position.client_update_value(25) await hass.async_block_till_done() @@ -532,7 +525,7 @@ async def test_windowcovering_open_close( async def test_windowcovering_open_close_stop( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.window" @@ -545,9 +538,9 @@ async def test_windowcovering_open_close_stop( await hass.async_block_till_done() # Set from HomeKit - call_close_cover = async_mock_service(hass, COVER_DOMAIN, "close_cover") - call_open_cover = async_mock_service(hass, COVER_DOMAIN, "open_cover") - call_stop_cover = async_mock_service(hass, COVER_DOMAIN, "stop_cover") + call_close_cover = async_mock_service(hass, DOMAIN, "close_cover") + call_open_cover = async_mock_service(hass, DOMAIN, "open_cover") + call_stop_cover = async_mock_service(hass, DOMAIN, "stop_cover") acc.char_target_position.client_update_value(25) await hass.async_block_till_done() @@ -581,7 +574,7 @@ async def test_windowcovering_open_close_stop( async def test_windowcovering_open_close_with_position_and_stop( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.stop_window" @@ -599,7 +592,7 @@ async def test_windowcovering_open_close_with_position_and_stop( await hass.async_block_till_done() # Set from HomeKit - call_stop_cover = async_mock_service(hass, COVER_DOMAIN, "stop_cover") + call_stop_cover = async_mock_service(hass, DOMAIN, "stop_cover") acc.char_hold_position.client_update_value(0) await hass.async_block_till_done() @@ -615,7 +608,7 @@ async def test_windowcovering_open_close_with_position_and_stop( async def test_windowcovering_basic_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -653,7 +646,7 @@ async def test_windowcovering_basic_restore( async def test_windowcovering_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events ) -> None: """Test setting up an entity from state in the event entity_registry.""" hass.set_state(CoreState.not_running) @@ -691,7 +684,7 @@ async def test_windowcovering_restore( async def test_garage_door_with_linked_obstruction_sensor( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events ) -> None: """Test if accessory and HA are updated accordingly with a linked obstruction sensor.""" linked_obstruction_sensor_entity_id = "binary_sensor.obstruction" @@ -717,20 +710,20 @@ async def test_garage_door_with_linked_obstruction_sensor( assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN - hass.states.async_set(entity_id, CoverState.CLOSED) + hass.states.async_set(entity_id, STATE_CLOSED) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_CLOSED assert acc.char_target_state.value == HK_DOOR_CLOSED assert acc.char_obstruction_detected.value is False - hass.states.async_set(entity_id, CoverState.OPEN) + hass.states.async_set(entity_id, STATE_OPEN) hass.states.async_set(linked_obstruction_sensor_entity_id, STATE_ON) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN assert acc.char_obstruction_detected.value is True - hass.states.async_set(entity_id, CoverState.CLOSED) + hass.states.async_set(entity_id, STATE_CLOSED) hass.states.async_set(linked_obstruction_sensor_entity_id, STATE_OFF) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_CLOSED diff --git a/tests/components/homekit/test_type_fans.py b/tests/components/homekit/test_type_fans.py index 67392f11f14..d971b8c06d2 100644 --- a/tests/components/homekit/test_type_fans.py +++ b/tests/components/homekit/test_type_fans.py @@ -11,7 +11,7 @@ from homeassistant.components.fan import ( ATTR_PRESET_MODES, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN as FAN_DOMAIN, + DOMAIN, FanEntityFeature, ) from homeassistant.components.homekit.const import ATTR_VALUE, PROP_MIN_STEP @@ -24,13 +24,13 @@ from homeassistant.const import ( STATE_ON, STATE_UNKNOWN, ) -from homeassistant.core import CoreState, Event, HomeAssistant +from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_fan_basic(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: +async def test_fan_basic(hass: HomeAssistant, hk_driver, events) -> None: """Test fan with char state.""" entity_id = "fan.demo" @@ -63,8 +63,8 @@ async def test_fan_basic(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert acc.char_active.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, FAN_DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] @@ -108,9 +108,7 @@ async def test_fan_basic(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert events[-1].data[ATTR_VALUE] is None -async def test_fan_direction( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_fan_direction(hass: HomeAssistant, hk_driver, events) -> None: """Test fan with direction.""" entity_id = "fan.demo" @@ -144,7 +142,7 @@ async def test_fan_direction( assert acc.char_direction.value == 1 # Set from HomeKit - call_set_direction = async_mock_service(hass, FAN_DOMAIN, "set_direction") + call_set_direction = async_mock_service(hass, DOMAIN, "set_direction") char_direction_iid = acc.char_direction.to_HAP()[HAP_REPR_IID] @@ -188,9 +186,7 @@ async def test_fan_direction( assert events[-1].data[ATTR_VALUE] == DIRECTION_REVERSE -async def test_fan_oscillate( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_fan_oscillate(hass: HomeAssistant, hk_driver, events) -> None: """Test fan with oscillate.""" entity_id = "fan.demo" @@ -218,7 +214,7 @@ async def test_fan_oscillate( assert acc.char_swing.value == 1 # Set from HomeKit - call_oscillate = async_mock_service(hass, FAN_DOMAIN, "oscillate") + call_oscillate = async_mock_service(hass, DOMAIN, "oscillate") char_swing_iid = acc.char_swing.to_HAP()[HAP_REPR_IID] @@ -263,7 +259,7 @@ async def test_fan_oscillate( assert events[-1].data[ATTR_VALUE] is True -async def test_fan_speed(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: +async def test_fan_speed(hass: HomeAssistant, hk_driver, events) -> None: """Test fan with speed.""" entity_id = "fan.demo" @@ -301,7 +297,7 @@ async def test_fan_speed(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert acc.char_speed.value == 100 # Set from HomeKit - call_set_percentage = async_mock_service(hass, FAN_DOMAIN, "set_percentage") + call_set_percentage = async_mock_service(hass, DOMAIN, "set_percentage") char_speed_iid = acc.char_speed.to_HAP()[HAP_REPR_IID] char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] @@ -343,7 +339,7 @@ async def test_fan_speed(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert acc.char_speed.value == 50 assert acc.char_active.value == 0 - call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") hk_driver.set_characteristics( { @@ -365,9 +361,7 @@ async def test_fan_speed(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id -async def test_fan_set_all_one_shot( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_fan_set_all_one_shot(hass: HomeAssistant, hk_driver, events) -> None: """Test fan with speed.""" entity_id = "fan.demo" @@ -409,11 +403,11 @@ async def test_fan_set_all_one_shot( assert hass.states.get(entity_id).state == STATE_OFF # Set from HomeKit - call_set_percentage = async_mock_service(hass, FAN_DOMAIN, "set_percentage") - call_oscillate = async_mock_service(hass, FAN_DOMAIN, "oscillate") - call_set_direction = async_mock_service(hass, FAN_DOMAIN, "set_direction") - call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, FAN_DOMAIN, "turn_off") + call_set_percentage = async_mock_service(hass, DOMAIN, "set_percentage") + call_oscillate = async_mock_service(hass, DOMAIN, "oscillate") + call_set_direction = async_mock_service(hass, DOMAIN, "set_direction") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] char_direction_iid = acc.char_direction.to_HAP()[HAP_REPR_IID] @@ -561,7 +555,7 @@ async def test_fan_set_all_one_shot( async def test_fan_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -603,7 +597,7 @@ async def test_fan_restore( async def test_fan_multiple_preset_modes( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test fan with multiple preset modes.""" entity_id = "fan.demo" @@ -641,8 +635,8 @@ async def test_fan_multiple_preset_modes( assert acc.preset_mode_chars["auto"].value == 0 assert acc.preset_mode_chars["smart"].value == 1 # Set from HomeKit - call_set_preset_mode = async_mock_service(hass, FAN_DOMAIN, "set_preset_mode") - call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") + call_set_preset_mode = async_mock_service(hass, DOMAIN, "set_preset_mode") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") char_auto_iid = acc.preset_mode_chars["auto"].to_HAP()[HAP_REPR_IID] @@ -684,9 +678,7 @@ async def test_fan_multiple_preset_modes( assert len(events) == 2 -async def test_fan_single_preset_mode( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_fan_single_preset_mode(hass: HomeAssistant, hk_driver, events) -> None: """Test fan with a single preset mode.""" entity_id = "fan.demo" @@ -711,8 +703,8 @@ async def test_fan_single_preset_mode( await hass.async_block_till_done() # Set from HomeKit - call_set_preset_mode = async_mock_service(hass, FAN_DOMAIN, "set_preset_mode") - call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") + call_set_preset_mode = async_mock_service(hass, DOMAIN, "set_preset_mode") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") char_target_fan_state_iid = acc.char_target_fan_state.to_HAP()[HAP_REPR_IID] diff --git a/tests/components/homekit/test_type_humidifiers.py b/tests/components/homekit/test_type_humidifiers.py index de563503b23..fdd01e05a91 100644 --- a/tests/components/homekit/test_type_humidifiers.py +++ b/tests/components/homekit/test_type_humidifiers.py @@ -26,7 +26,7 @@ from homeassistant.components.humidifier import ( ATTR_MIN_HUMIDITY, DEFAULT_MAX_HUMIDITY, DEFAULT_MIN_HUMIDITY, - DOMAIN as HUMIDIFIER_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, HumidifierDeviceClass, ) @@ -42,12 +42,12 @@ from homeassistant.const import ( STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import Event, HomeAssistant +from homeassistant.core import HomeAssistant from tests.common import async_mock_service -async def test_humidifier(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: +async def test_humidifier(hass: HomeAssistant, hk_driver, events) -> None: """Test if humidifier accessory and HA are updated accordingly.""" entity_id = "humidifier.test" @@ -106,9 +106,7 @@ async def test_humidifier(hass: HomeAssistant, hk_driver, events: list[Event]) - assert acc.char_active.value == 0 # Set from HomeKit - call_set_humidity = async_mock_service( - hass, HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY - ) + call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID] @@ -134,9 +132,7 @@ async def test_humidifier(hass: HomeAssistant, hk_driver, events: list[Event]) - assert events[-1].data[ATTR_VALUE] == "RelativeHumidityHumidifierThreshold to 39.0%" -async def test_dehumidifier( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_dehumidifier(hass: HomeAssistant, hk_driver, events) -> None: """Test if dehumidifier accessory and HA are updated accordingly.""" entity_id = "humidifier.test" @@ -196,9 +192,7 @@ async def test_dehumidifier( assert acc.char_active.value == 0 # Set from HomeKit - call_set_humidity = async_mock_service( - hass, HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY - ) + call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID] @@ -226,9 +220,7 @@ async def test_dehumidifier( ) -async def test_hygrostat_power_state( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_hygrostat_power_state(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "humidifier.test" @@ -261,7 +253,7 @@ async def test_hygrostat_power_state( assert acc.char_active.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, HUMIDIFIER_DOMAIN, SERVICE_TURN_ON) + call_turn_on = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON) char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] @@ -285,7 +277,7 @@ async def test_hygrostat_power_state( assert len(events) == 1 assert events[-1].data[ATTR_VALUE] == "Active to 1" - call_turn_off = async_mock_service(hass, HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF) + call_turn_off = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF) hk_driver.set_characteristics( { @@ -309,7 +301,7 @@ async def test_hygrostat_power_state( async def test_hygrostat_get_humidity_range( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test if humidity range is evaluated correctly.""" entity_id = "humidifier.test" @@ -327,9 +319,7 @@ async def test_hygrostat_get_humidity_range( await hass.async_block_till_done() # Set from HomeKit - call_set_humidity = async_mock_service( - hass, HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY - ) + call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID] @@ -462,10 +452,7 @@ async def test_humidifier_with_a_missing_linked_humidity_sensor( async def test_humidifier_as_dehumidifier( - hass: HomeAssistant, - hk_driver, - events: list[Event], - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture ) -> None: """Test an invalid char_target_humidifier_dehumidifier from HomeKit.""" entity_id = "humidifier.test" @@ -508,10 +495,7 @@ async def test_humidifier_as_dehumidifier( async def test_dehumidifier_as_humidifier( - hass: HomeAssistant, - hk_driver, - events: list[Event], - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture ) -> None: """Test an invalid char_target_humidifier_dehumidifier from HomeKit.""" entity_id = "humidifier.test" diff --git a/tests/components/homekit/test_type_lights.py b/tests/components/homekit/test_type_lights.py index a45e4988c36..8d2978fb0bd 100644 --- a/tests/components/homekit/test_type_lights.py +++ b/tests/components/homekit/test_type_lights.py @@ -27,7 +27,7 @@ from homeassistant.components.light import ( ATTR_RGBWW_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_WHITE, - DOMAIN as LIGHT_DOMAIN, + DOMAIN, ColorMode, ) from homeassistant.const import ( @@ -39,21 +39,21 @@ from homeassistant.const import ( STATE_ON, STATE_UNKNOWN, ) -from homeassistant.core import CoreState, Event, HomeAssistant +from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util from tests.common import async_fire_time_changed, async_mock_service -async def _wait_for_light_coalesce(hass: HomeAssistant) -> None: +async def _wait_for_light_coalesce(hass): async_fire_time_changed( hass, dt_util.utcnow() + timedelta(seconds=CHANGE_COALESCE_TIME_WINDOW) ) await hass.async_block_till_done() -async def test_light_basic(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: +async def test_light_basic(hass: HomeAssistant, hk_driver, events) -> None: """Test light with char state.""" entity_id = "light.demo" @@ -83,8 +83,8 @@ async def test_light_basic(hass: HomeAssistant, hk_driver, events: list[Event]) assert acc.char_on.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") char_on_iid = acc.char_on.to_HAP()[HAP_REPR_IID] @@ -127,7 +127,7 @@ async def test_light_basic(hass: HomeAssistant, hk_driver, events: list[Event]) [[ColorMode.BRIGHTNESS], [ColorMode.HS], [ColorMode.COLOR_TEMP]], ) async def test_light_brightness( - hass: HomeAssistant, hk_driver, events: list[Event], supported_color_modes + hass: HomeAssistant, hk_driver, events, supported_color_modes ) -> None: """Test light with brightness.""" entity_id = "light.demo" @@ -160,8 +160,8 @@ async def test_light_brightness( assert acc.char_brightness.value == 40 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") hk_driver.set_characteristics( { @@ -226,24 +226,6 @@ async def test_light_brightness( assert len(events) == 3 assert events[-1].data[ATTR_VALUE] == f"Set state to 0, brightness at 0{PERCENTAGE}" - hk_driver.set_characteristics( - { - HAP_REPR_CHARS: [ - { - HAP_REPR_AID: acc.aid, - HAP_REPR_IID: char_brightness_iid, - HAP_REPR_VALUE: 0, - }, - ] - }, - "mock_addr", - ) - await _wait_for_light_coalesce(hass) - assert call_turn_off - assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id - assert len(events) == 4 - assert events[-1].data[ATTR_VALUE] == f"Set state to 0, brightness at 0{PERCENTAGE}" - # 0 is a special case for homekit, see "Handle Brightness" # in update_state hass.states.async_set( @@ -292,9 +274,7 @@ async def test_light_brightness( assert acc.char_brightness.value == 1 -async def test_light_color_temperature( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_light_color_temperature(hass: HomeAssistant, hk_driver, events) -> None: """Test light with color temperature.""" entity_id = "light.demo" @@ -314,7 +294,7 @@ async def test_light_color_temperature( assert acc.char_color_temp.value == 190 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") char_color_temp_iid = acc.char_color_temp.to_HAP()[HAP_REPR_IID] @@ -343,7 +323,7 @@ async def test_light_color_temperature( [["color_temp", "hs"], ["color_temp", "rgb"], ["color_temp", "xy"]], ) async def test_light_color_temperature_and_rgb_color( - hass: HomeAssistant, hk_driver, events: list[Event], supported_color_modes + hass: HomeAssistant, hk_driver, events, supported_color_modes ) -> None: """Test light with color temperature and rgb color not exposing temperature.""" entity_id = "light.demo" @@ -390,7 +370,7 @@ async def test_light_color_temperature_and_rgb_color( char_color_temp_iid = acc.char_color_temp.to_HAP()[HAP_REPR_IID] # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") hk_driver.set_characteristics( { @@ -544,7 +524,7 @@ async def test_light_color_temperature_and_rgb_color( "supported_color_modes", [[ColorMode.HS], [ColorMode.RGB], [ColorMode.XY]] ) async def test_light_rgb_color( - hass: HomeAssistant, hk_driver, events: list[Event], supported_color_modes + hass: HomeAssistant, hk_driver, events, supported_color_modes ) -> None: """Test light with rgb_color.""" entity_id = "light.demo" @@ -567,7 +547,7 @@ async def test_light_rgb_color( assert acc.char_saturation.value == 90 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -598,7 +578,7 @@ async def test_light_rgb_color( async def test_light_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -662,7 +642,7 @@ async def test_light_restore( async def test_light_rgb_with_color_temp( hass: HomeAssistant, hk_driver, - events: list[Event], + events, supported_color_modes, state_props, turn_on_props_with_brightness, @@ -689,7 +669,7 @@ async def test_light_rgb_with_color_temp( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -782,7 +762,7 @@ async def test_light_rgb_with_color_temp( async def test_light_rgbwx_with_color_temp_and_brightness( hass: HomeAssistant, hk_driver, - events: list[Event], + events, supported_color_modes, state_props, turn_on_props_with_brightness, @@ -809,7 +789,7 @@ async def test_light_rgbwx_with_color_temp_and_brightness( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") char_color_temp_iid = acc.char_color_temp.to_HAP()[HAP_REPR_IID] char_brightness_iid = acc.char_brightness.to_HAP()[HAP_REPR_IID] @@ -844,7 +824,7 @@ async def test_light_rgbwx_with_color_temp_and_brightness( async def test_light_rgb_or_w_lights( hass: HomeAssistant, hk_driver, - events: list[Event], + events, ) -> None: """Test lights with RGB or W lights.""" entity_id = "light.demo" @@ -876,7 +856,7 @@ async def test_light_rgb_or_w_lights( assert acc.char_color_temp.value == 153 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -977,7 +957,7 @@ async def test_light_rgb_or_w_lights( async def test_light_rgb_with_white_switch_to_temp( hass: HomeAssistant, hk_driver, - events: list[Event], + events, supported_color_modes, state_props, ) -> None: @@ -1003,7 +983,7 @@ async def test_light_rgb_with_white_switch_to_temp( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -1054,7 +1034,11 @@ async def test_light_rgb_with_white_switch_to_temp( assert acc.char_brightness.value == 100 -async def test_light_rgb_with_hs_color_none(hass: HomeAssistant, hk_driver) -> None: +async def test_light_rgb_with_hs_color_none( + hass: HomeAssistant, + hk_driver, + events, +) -> None: """Test lights hs color set to None.""" entity_id = "light.demo" @@ -1087,7 +1071,7 @@ async def test_light_rgb_with_hs_color_none(hass: HomeAssistant, hk_driver) -> N async def test_light_rgbww_with_color_temp_conversion( hass: HomeAssistant, hk_driver, - events: list[Event], + events, ) -> None: """Test lights with RGBWW convert color temp as expected.""" entity_id = "light.demo" @@ -1118,7 +1102,7 @@ async def test_light_rgbww_with_color_temp_conversion( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -1208,7 +1192,7 @@ async def test_light_rgbww_with_color_temp_conversion( async def test_light_rgbw_with_color_temp_conversion( hass: HomeAssistant, hk_driver, - events: list[Event], + events, ) -> None: """Test lights with RGBW convert color temp as expected.""" entity_id = "light.demo" @@ -1239,7 +1223,7 @@ async def test_light_rgbw_with_color_temp_conversion( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -1296,7 +1280,7 @@ async def test_light_rgbw_with_color_temp_conversion( async def test_light_set_brightness_and_color( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test light with all chars in one go.""" entity_id = "light.demo" @@ -1343,7 +1327,7 @@ async def test_light_set_brightness_and_color( assert acc.char_saturation.value == 9 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") hk_driver.set_characteristics( { @@ -1381,7 +1365,7 @@ async def test_light_set_brightness_and_color( ) -async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver) -> None: +async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver, events) -> None: """Test mireds are forced to ints.""" entity_id = "light.demo" @@ -1402,7 +1386,7 @@ async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver) -> None: async def test_light_set_brightness_and_color_temp( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test light with all chars in one go.""" entity_id = "light.demo" @@ -1450,7 +1434,7 @@ async def test_light_set_brightness_and_color_temp( assert acc.char_color_temp.value == 224 # Set from HomeKit - call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") hk_driver.set_characteristics( { diff --git a/tests/components/homekit/test_type_locks.py b/tests/components/homekit/test_type_locks.py index 2961fe52170..4d83fe41f48 100644 --- a/tests/components/homekit/test_type_locks.py +++ b/tests/components/homekit/test_type_locks.py @@ -4,19 +4,26 @@ import pytest from homeassistant.components.homekit.const import ATTR_VALUE from homeassistant.components.homekit.type_locks import Lock -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.components.lock import ( + DOMAIN, + STATE_JAMMED, + STATE_LOCKING, + STATE_UNLOCKING, +) from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, + STATE_LOCKED, STATE_UNAVAILABLE, STATE_UNKNOWN, + STATE_UNLOCKED, ) -from homeassistant.core import Event, HomeAssistant +from homeassistant.core import HomeAssistant from tests.common import async_mock_service -async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: +async def test_lock_unlock(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly.""" code = "1234" config = {ATTR_CODE: code} @@ -33,27 +40,27 @@ async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) assert acc.char_current_state.value == 3 assert acc.char_target_state.value == 1 - hass.states.async_set(entity_id, LockState.LOCKED) + hass.states.async_set(entity_id, STATE_LOCKED) await hass.async_block_till_done() assert acc.char_current_state.value == 1 assert acc.char_target_state.value == 1 - hass.states.async_set(entity_id, LockState.LOCKING) + hass.states.async_set(entity_id, STATE_LOCKING) await hass.async_block_till_done() assert acc.char_current_state.value == 0 assert acc.char_target_state.value == 1 - hass.states.async_set(entity_id, LockState.UNLOCKED) + hass.states.async_set(entity_id, STATE_UNLOCKED) await hass.async_block_till_done() assert acc.char_current_state.value == 0 assert acc.char_target_state.value == 0 - hass.states.async_set(entity_id, LockState.UNLOCKING) + hass.states.async_set(entity_id, STATE_UNLOCKING) await hass.async_block_till_done() assert acc.char_current_state.value == 1 assert acc.char_target_state.value == 0 - hass.states.async_set(entity_id, LockState.JAMMED) + hass.states.async_set(entity_id, STATE_JAMMED) await hass.async_block_till_done() assert acc.char_current_state.value == 2 assert acc.char_target_state.value == 0 @@ -71,7 +78,7 @@ async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) assert acc.char_target_state.value == 0 assert acc.available is False - hass.states.async_set(entity_id, LockState.UNLOCKED) + hass.states.async_set(entity_id, STATE_UNLOCKED) await hass.async_block_till_done() assert acc.char_current_state.value == 0 assert acc.char_target_state.value == 0 @@ -91,8 +98,8 @@ async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) assert acc.char_target_state.value == 0 # Set from HomeKit - call_lock = async_mock_service(hass, LOCK_DOMAIN, "lock") - call_unlock = async_mock_service(hass, LOCK_DOMAIN, "unlock") + call_lock = async_mock_service(hass, DOMAIN, "lock") + call_unlock = async_mock_service(hass, DOMAIN, "unlock") acc.char_target_state.client_update_value(1) await hass.async_block_till_done() @@ -114,9 +121,7 @@ async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) @pytest.mark.parametrize("config", [{}, {ATTR_CODE: None}]) -async def test_no_code( - hass: HomeAssistant, hk_driver, config, events: list[Event] -) -> None: +async def test_no_code(hass: HomeAssistant, hk_driver, config, events) -> None: """Test accessory if lock doesn't require a code.""" entity_id = "lock.kitchen_door" @@ -125,7 +130,7 @@ async def test_no_code( acc = Lock(hass, hk_driver, "Lock", entity_id, 2, config) # Set from HomeKit - call_lock = async_mock_service(hass, LOCK_DOMAIN, "lock") + call_lock = async_mock_service(hass, DOMAIN, "lock") acc.char_target_state.client_update_value(1) await hass.async_block_till_done() diff --git a/tests/components/homekit/test_type_media_players.py b/tests/components/homekit/test_type_media_players.py index 78c35b15790..fb7233e5262 100644 --- a/tests/components/homekit/test_type_media_players.py +++ b/tests/components/homekit/test_type_media_players.py @@ -25,7 +25,7 @@ from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE_LIST, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, - DOMAIN as MEDIA_PLAYER_DOMAIN, + DOMAIN, MediaPlayerDeviceClass, ) from homeassistant.const import ( @@ -40,15 +40,13 @@ from homeassistant.const import ( STATE_PLAYING, STATE_STANDBY, ) -from homeassistant.core import CoreState, Event, HomeAssistant +from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_media_player_set_state( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_media_player_set_state(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly.""" config = { CONF_FEATURE_LIST: { @@ -112,12 +110,12 @@ async def test_media_player_set_state( assert acc.chars[FEATURE_PLAY_STOP].value is False # Set from HomeKit - call_turn_on = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_off") - call_media_play = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_play") - call_media_pause = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_pause") - call_media_stop = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_stop") - call_toggle_mute = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_mute") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_media_play = async_mock_service(hass, DOMAIN, "media_play") + call_media_pause = async_mock_service(hass, DOMAIN, "media_pause") + call_media_stop = async_mock_service(hass, DOMAIN, "media_stop") + call_toggle_mute = async_mock_service(hass, DOMAIN, "volume_mute") acc.chars[FEATURE_ON_OFF].client_update_value(True) await hass.async_block_till_done() @@ -179,10 +177,7 @@ async def test_media_player_set_state( async def test_media_player_television( - hass: HomeAssistant, - hk_driver, - events: list[Event], - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture ) -> None: """Test if television accessory and HA are updated accordingly.""" entity_id = "media_player.television" @@ -252,18 +247,16 @@ async def test_media_player_television( assert caplog.records[-2].levelname == "DEBUG" # Set from HomeKit - call_turn_on = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_off") - call_media_play = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_play") - call_media_pause = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_pause") - call_media_play_pause = async_mock_service( - hass, MEDIA_PLAYER_DOMAIN, "media_play_pause" - ) - call_toggle_mute = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_mute") - call_select_source = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "select_source") - call_volume_up = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_up") - call_volume_down = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_down") - call_volume_set = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_set") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_media_play = async_mock_service(hass, DOMAIN, "media_play") + call_media_pause = async_mock_service(hass, DOMAIN, "media_pause") + call_media_play_pause = async_mock_service(hass, DOMAIN, "media_play_pause") + call_toggle_mute = async_mock_service(hass, DOMAIN, "volume_mute") + call_select_source = async_mock_service(hass, DOMAIN, "select_source") + call_volume_up = async_mock_service(hass, DOMAIN, "volume_up") + call_volume_down = async_mock_service(hass, DOMAIN, "volume_down") + call_volume_set = async_mock_service(hass, DOMAIN, "volume_set") acc.char_active.client_update_value(1) await hass.async_block_till_done() @@ -373,7 +366,7 @@ async def test_media_player_television( async def test_media_player_television_basic( - hass: HomeAssistant, hk_driver, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture ) -> None: """Test if basic television accessory and HA are updated accordingly.""" entity_id = "media_player.television" @@ -416,7 +409,7 @@ async def test_media_player_television_basic( async def test_media_player_television_supports_source_select_no_sources( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture ) -> None: """Test if basic tv that supports source select but is missing a source list.""" entity_id = "media_player.television" @@ -436,7 +429,7 @@ async def test_media_player_television_supports_source_select_no_sources( async def test_tv_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -489,7 +482,7 @@ async def test_tv_restore( async def test_media_player_television_max_sources( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture ) -> None: """Test if television accessory that reaches the maximum number of sources.""" entity_id = "media_player.television" @@ -548,7 +541,7 @@ async def test_media_player_television_max_sources( async def test_media_player_television_duplicate_sources( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture ) -> None: """Test if television accessory with duplicate sources.""" entity_id = "media_player.television" @@ -593,7 +586,7 @@ async def test_media_player_television_duplicate_sources( async def test_media_player_television_unsafe_chars( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture ) -> None: """Test if television accessory with unsafe characters.""" entity_id = "media_player.television" @@ -636,7 +629,7 @@ async def test_media_player_television_unsafe_chars( await hass.async_block_till_done() assert acc.char_input_source.value == 1 - call_select_source = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "select_source") + call_select_source = async_mock_service(hass, DOMAIN, "select_source") acc.char_input_source.client_update_value(3) await hass.async_block_till_done() diff --git a/tests/components/homekit/test_type_remote.py b/tests/components/homekit/test_type_remote.py index 62c45c6ee89..bd4ead58a7b 100644 --- a/tests/components/homekit/test_type_remote.py +++ b/tests/components/homekit/test_type_remote.py @@ -16,7 +16,7 @@ from homeassistant.components.remote import ( ATTR_ACTIVITY, ATTR_ACTIVITY_LIST, ATTR_CURRENT_ACTIVITY, - DOMAIN as REMOTE_DOMAIN, + DOMAIN, RemoteEntityFeature, ) from homeassistant.const import ( @@ -26,13 +26,13 @@ from homeassistant.const import ( STATE_ON, STATE_STANDBY, ) -from homeassistant.core import Event, HomeAssistant +from homeassistant.core import HomeAssistant from tests.common import async_mock_service async def test_activity_remote( - hass: HomeAssistant, hk_driver: HomeDriver, events: list[Event] + hass: HomeAssistant, hk_driver: HomeDriver, events, caplog: pytest.LogCaptureFixture ) -> None: """Test if remote accessory and HA are updated accordingly.""" entity_id = "remote.harmony" @@ -91,8 +91,8 @@ async def test_activity_remote( assert acc.char_input_source.value == 1 # Set from HomeKit - call_turn_on = async_mock_service(hass, REMOTE_DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, REMOTE_DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") acc.char_active.client_update_value(1) await hass.async_block_till_done() @@ -156,10 +156,7 @@ async def test_activity_remote( async def test_activity_remote_bad_names( - hass: HomeAssistant, - hk_driver, - events: list[Event], - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture ) -> None: """Test if remote accessory with invalid names works as expected.""" entity_id = "remote.harmony" diff --git a/tests/components/homekit/test_type_security_systems.py b/tests/components/homekit/test_type_security_systems.py index 94b0e68e76d..18434a345ce 100644 --- a/tests/components/homekit/test_type_security_systems.py +++ b/tests/components/homekit/test_type_security_systems.py @@ -4,26 +4,29 @@ from pyhap.loader import get_loader import pytest from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + DOMAIN, AlarmControlPanelEntityFeature, - AlarmControlPanelState, ) from homeassistant.components.homekit.const import ATTR_VALUE from homeassistant.components.homekit.type_security_systems import SecuritySystem from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, - STATE_UNAVAILABLE, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) -from homeassistant.core import Event, HomeAssistant +from homeassistant.core import HomeAssistant from tests.common import async_mock_service -async def test_switch_set_state( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_switch_set_state(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly.""" code = "1234" config = {ATTR_CODE: code} @@ -41,27 +44,27 @@ async def test_switch_set_state( assert acc.char_current_state.value == 3 assert acc.char_target_state.value == 3 - hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_AWAY) + hass.states.async_set(entity_id, STATE_ALARM_ARMED_AWAY) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_HOME) + hass.states.async_set(entity_id, STATE_ALARM_ARMED_HOME) await hass.async_block_till_done() assert acc.char_target_state.value == 0 assert acc.char_current_state.value == 0 - hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_NIGHT) + hass.states.async_set(entity_id, STATE_ALARM_ARMED_NIGHT) await hass.async_block_till_done() assert acc.char_target_state.value == 2 assert acc.char_current_state.value == 2 - hass.states.async_set(entity_id, AlarmControlPanelState.DISARMED) + hass.states.async_set(entity_id, STATE_ALARM_DISARMED) await hass.async_block_till_done() assert acc.char_target_state.value == 3 assert acc.char_current_state.value == 3 - hass.states.async_set(entity_id, AlarmControlPanelState.TRIGGERED) + hass.states.async_set(entity_id, STATE_ALARM_TRIGGERED) await hass.async_block_till_done() assert acc.char_target_state.value == 3 assert acc.char_current_state.value == 4 @@ -72,16 +75,10 @@ async def test_switch_set_state( assert acc.char_current_state.value == 4 # Set from HomeKit - call_arm_home = async_mock_service( - hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_home" - ) - call_arm_away = async_mock_service( - hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_away" - ) - call_arm_night = async_mock_service( - hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_night" - ) - call_disarm = async_mock_service(hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_disarm") + call_arm_home = async_mock_service(hass, DOMAIN, "alarm_arm_home") + call_arm_away = async_mock_service(hass, DOMAIN, "alarm_arm_away") + call_arm_night = async_mock_service(hass, DOMAIN, "alarm_arm_night") + call_disarm = async_mock_service(hass, DOMAIN, "alarm_disarm") acc.char_target_state.client_update_value(0) await hass.async_block_till_done() @@ -121,9 +118,7 @@ async def test_switch_set_state( @pytest.mark.parametrize("config", [{}, {ATTR_CODE: None}]) -async def test_no_alarm_code( - hass: HomeAssistant, hk_driver, config, events: list[Event] -) -> None: +async def test_no_alarm_code(hass: HomeAssistant, hk_driver, config, events) -> None: """Test accessory if security_system doesn't require an alarm_code.""" entity_id = "alarm_control_panel.test" @@ -132,9 +127,7 @@ async def test_no_alarm_code( acc = SecuritySystem(hass, hk_driver, "SecuritySystem", entity_id, 2, config) # Set from HomeKit - call_arm_home = async_mock_service( - hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_home" - ) + call_arm_home = async_mock_service(hass, DOMAIN, "alarm_arm_home") acc.char_target_state.client_update_value(0) await hass.async_block_till_done() @@ -146,7 +139,7 @@ async def test_no_alarm_code( assert events[-1].data[ATTR_VALUE] is None -async def test_arming(hass: HomeAssistant, hk_driver) -> None: +async def test_arming(hass: HomeAssistant, hk_driver, events) -> None: """Test to make sure arming sets the right state.""" entity_id = "alarm_control_panel.test" @@ -156,48 +149,48 @@ async def test_arming(hass: HomeAssistant, hk_driver) -> None: acc.run() await hass.async_block_till_done() - hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_AWAY) + hass.states.async_set(entity_id, STATE_ALARM_ARMED_AWAY) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_HOME) + hass.states.async_set(entity_id, STATE_ALARM_ARMED_HOME) await hass.async_block_till_done() assert acc.char_target_state.value == 0 assert acc.char_current_state.value == 0 - hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_VACATION) + hass.states.async_set(entity_id, STATE_ALARM_ARMED_VACATION) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_NIGHT) + hass.states.async_set(entity_id, STATE_ALARM_ARMED_NIGHT) await hass.async_block_till_done() assert acc.char_target_state.value == 2 assert acc.char_current_state.value == 2 - hass.states.async_set(entity_id, AlarmControlPanelState.ARMING) + hass.states.async_set(entity_id, STATE_ALARM_ARMING) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 3 - hass.states.async_set(entity_id, AlarmControlPanelState.DISARMED) + hass.states.async_set(entity_id, STATE_ALARM_DISARMED) await hass.async_block_till_done() assert acc.char_target_state.value == 3 assert acc.char_current_state.value == 3 - hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_AWAY) + hass.states.async_set(entity_id, STATE_ALARM_ARMED_AWAY) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, AlarmControlPanelState.TRIGGERED) + hass.states.async_set(entity_id, STATE_ALARM_TRIGGERED) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 4 -async def test_supported_states(hass: HomeAssistant, hk_driver) -> None: +async def test_supported_states(hass: HomeAssistant, hk_driver, events) -> None: """Test different supported states.""" code = "1234" config = {ATTR_CODE: code} @@ -312,33 +305,3 @@ async def test_supported_states(hass: HomeAssistant, hk_driver) -> None: for val in valid_target_values.values(): assert val in test_config.get("target_values") - - -@pytest.mark.parametrize( - ("state"), - [ - (None), - ("None"), - (STATE_UNKNOWN), - (STATE_UNAVAILABLE), - ], -) -async def test_handle_non_alarm_states( - hass: HomeAssistant, hk_driver, events: list[Event], state: str -) -> None: - """Test we can handle states that should not raise.""" - code = "1234" - config = {ATTR_CODE: code} - entity_id = "alarm_control_panel.test" - - hass.states.async_set(entity_id, state) - await hass.async_block_till_done() - acc = SecuritySystem(hass, hk_driver, "SecuritySystem", entity_id, 2, config) - acc.run() - await hass.async_block_till_done() - - assert acc.aid == 2 - assert acc.category == 11 # AlarmSystem - - assert acc.char_current_state.value == 3 - assert acc.char_target_state.value == 3 diff --git a/tests/components/homekit/test_type_sensors.py b/tests/components/homekit/test_type_sensors.py index 2bfddf4d4c6..fc68b7c8ecf 100644 --- a/tests/components/homekit/test_type_sensors.py +++ b/tests/components/homekit/test_type_sensors.py @@ -30,9 +30,10 @@ from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, PERCENTAGE, + STATE_HOME, + STATE_NOT_HOME, STATE_OFF, STATE_ON, - STATE_UNAVAILABLE, STATE_UNKNOWN, UnitOfTemperature, ) @@ -212,16 +213,6 @@ async def test_pm25(hass: HomeAssistant, hk_driver) -> None: assert acc.char_density.value == 0 assert acc.char_quality.value == 0 - hass.states.async_set(entity_id, "8") - await hass.async_block_till_done() - assert acc.char_density.value == 8 - assert acc.char_quality.value == 1 - - hass.states.async_set(entity_id, "12") - await hass.async_block_till_done() - assert acc.char_density.value == 12 - assert acc.char_quality.value == 2 - hass.states.async_set(entity_id, "23") await hass.async_block_till_done() assert acc.char_density.value == 23 @@ -534,11 +525,11 @@ async def test_binary(hass: HomeAssistant, hk_driver) -> None: await hass.async_block_till_done() assert acc.char_detected.value == 0 - hass.states.async_set(entity_id, STATE_UNKNOWN, {ATTR_DEVICE_CLASS: "opening"}) + hass.states.async_set(entity_id, STATE_HOME, {ATTR_DEVICE_CLASS: "opening"}) await hass.async_block_till_done() - assert acc.char_detected.value == 0 + assert acc.char_detected.value == 1 - hass.states.async_set(entity_id, STATE_UNAVAILABLE, {ATTR_DEVICE_CLASS: "opening"}) + hass.states.async_set(entity_id, STATE_NOT_HOME, {ATTR_DEVICE_CLASS: "opening"}) await hass.async_block_till_done() assert acc.char_detected.value == 0 @@ -578,15 +569,13 @@ async def test_motion_uses_bool(hass: HomeAssistant, hk_driver) -> None: assert acc.char_detected.value is False hass.states.async_set( - entity_id, STATE_UNKNOWN, {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION} + entity_id, STATE_HOME, {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION} ) await hass.async_block_till_done() - assert acc.char_detected.value is False + assert acc.char_detected.value is True hass.states.async_set( - entity_id, - STATE_UNAVAILABLE, - {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION}, + entity_id, STATE_NOT_HOME, {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION} ) await hass.async_block_till_done() assert acc.char_detected.value is False @@ -612,7 +601,7 @@ async def test_binary_device_classes(hass: HomeAssistant, hk_driver) -> None: async def test_sensor_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -656,7 +645,7 @@ async def test_bad_name(hass: HomeAssistant, hk_driver) -> None: assert acc.category == 10 # Sensor assert acc.char_humidity.value == 20 - assert acc.display_name == "Humid" + assert acc.display_name == "--Humid--" async def test_empty_name(hass: HomeAssistant, hk_driver) -> None: diff --git a/tests/components/homekit/test_type_switches.py b/tests/components/homekit/test_type_switches.py index 9b708f18b8a..27937babc57 100644 --- a/tests/components/homekit/test_type_switches.py +++ b/tests/components/homekit/test_type_switches.py @@ -17,7 +17,6 @@ from homeassistant.components.homekit.type_switches import ( Switch, Vacuum, Valve, - ValveSwitch, ) from homeassistant.components.select import ATTR_OPTIONS from homeassistant.components.vacuum import ( @@ -34,23 +33,17 @@ from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_TYPE, - SERVICE_CLOSE_VALVE, - SERVICE_OPEN_VALVE, SERVICE_SELECT_OPTION, - STATE_CLOSED, STATE_OFF, STATE_ON, - STATE_OPEN, ) -from homeassistant.core import Event, HomeAssistant, split_entity_id +from homeassistant.core import HomeAssistant, split_entity_id import homeassistant.util.dt as dt_util from tests.common import async_fire_time_changed, async_mock_service -async def test_outlet_set_state( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_outlet_set_state(hass: HomeAssistant, hk_driver, events) -> None: """Test if Outlet accessory and HA are updated accordingly.""" entity_id = "switch.outlet_test" @@ -103,7 +96,7 @@ async def test_outlet_set_state( ], ) async def test_switch_set_state( - hass: HomeAssistant, hk_driver, entity_id, attrs, events: list[Event] + hass: HomeAssistant, hk_driver, entity_id, attrs, events ) -> None: """Test if accessory and HA are updated accordingly.""" domain = split_entity_id(entity_id)[0] @@ -147,36 +140,32 @@ async def test_switch_set_state( assert events[-1].data[ATTR_VALUE] is None -async def test_valve_switch_set_state( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_valve_set_state(hass: HomeAssistant, hk_driver, events) -> None: """Test if Valve accessory and HA are updated accordingly.""" entity_id = "switch.valve_test" hass.states.async_set(entity_id, None) await hass.async_block_till_done() - acc = ValveSwitch(hass, hk_driver, "Valve", entity_id, 2, {CONF_TYPE: TYPE_FAUCET}) + acc = Valve(hass, hk_driver, "Valve", entity_id, 2, {CONF_TYPE: TYPE_FAUCET}) acc.run() await hass.async_block_till_done() assert acc.category == 29 # Faucet assert acc.char_valve_type.value == 3 # Water faucet - acc = ValveSwitch(hass, hk_driver, "Valve", entity_id, 3, {CONF_TYPE: TYPE_SHOWER}) + acc = Valve(hass, hk_driver, "Valve", entity_id, 3, {CONF_TYPE: TYPE_SHOWER}) acc.run() await hass.async_block_till_done() assert acc.category == 30 # Shower assert acc.char_valve_type.value == 2 # Shower head - acc = ValveSwitch( - hass, hk_driver, "Valve", entity_id, 4, {CONF_TYPE: TYPE_SPRINKLER} - ) + acc = Valve(hass, hk_driver, "Valve", entity_id, 4, {CONF_TYPE: TYPE_SPRINKLER}) acc.run() await hass.async_block_till_done() assert acc.category == 28 # Sprinkler assert acc.char_valve_type.value == 1 # Irrigation - acc = ValveSwitch(hass, hk_driver, "Valve", entity_id, 5, {CONF_TYPE: TYPE_VALVE}) + acc = Valve(hass, hk_driver, "Valve", entity_id, 5, {CONF_TYPE: TYPE_VALVE}) acc.run() await hass.async_block_till_done() @@ -198,59 +187,8 @@ async def test_valve_switch_set_state( assert acc.char_in_use.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, "switch", SERVICE_TURN_ON) - call_turn_off = async_mock_service(hass, "switch", SERVICE_TURN_OFF) - - acc.char_active.client_update_value(1) - await hass.async_block_till_done() - assert acc.char_in_use.value == 1 - assert call_turn_on - assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id - assert len(events) == 1 - assert events[-1].data[ATTR_VALUE] is None - - acc.char_active.client_update_value(0) - await hass.async_block_till_done() - assert acc.char_in_use.value == 0 - assert call_turn_off - assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id - assert len(events) == 2 - assert events[-1].data[ATTR_VALUE] is None - - -async def test_valve_set_state( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: - """Test if Valve accessory and HA are updated accordingly.""" - entity_id = "valve.valve_test" - - hass.states.async_set(entity_id, None) - await hass.async_block_till_done() - - acc = Valve(hass, hk_driver, "Valve", entity_id, 5, {CONF_TYPE: TYPE_VALVE}) - acc.run() - await hass.async_block_till_done() - - assert acc.aid == 5 - assert acc.category == 29 # Faucet - - assert acc.char_active.value == 0 - assert acc.char_in_use.value == 0 - assert acc.char_valve_type.value == 0 # Generic Valve - - hass.states.async_set(entity_id, STATE_OPEN) - await hass.async_block_till_done() - assert acc.char_active.value == 1 - assert acc.char_in_use.value == 1 - - hass.states.async_set(entity_id, STATE_CLOSED) - await hass.async_block_till_done() - assert acc.char_active.value == 0 - assert acc.char_in_use.value == 0 - - # Set from HomeKit - call_turn_on = async_mock_service(hass, "valve", SERVICE_OPEN_VALVE) - call_turn_off = async_mock_service(hass, "valve", SERVICE_CLOSE_VALVE) + call_turn_on = async_mock_service(hass, "switch", "turn_on") + call_turn_off = async_mock_service(hass, "switch", "turn_off") acc.char_active.client_update_value(1) await hass.async_block_till_done() @@ -270,7 +208,7 @@ async def test_valve_set_state( async def test_vacuum_set_state_with_returnhome_and_start_support( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test if Vacuum accessory and HA are updated accordingly.""" entity_id = "vacuum.roomba" @@ -339,7 +277,7 @@ async def test_vacuum_set_state_with_returnhome_and_start_support( async def test_vacuum_set_state_without_returnhome_and_start_support( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test if Vacuum accessory and HA are updated accordingly.""" entity_id = "vacuum.roomba" @@ -384,9 +322,7 @@ async def test_vacuum_set_state_without_returnhome_and_start_support( assert events[-1].data[ATTR_VALUE] is None -async def test_reset_switch( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_reset_switch(hass: HomeAssistant, hk_driver, events) -> None: """Test if switch accessory is reset correctly.""" domain = "scene" entity_id = "scene.test" @@ -430,9 +366,7 @@ async def test_reset_switch( assert len(events) == 1 -async def test_script_switch( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_script_switch(hass: HomeAssistant, hk_driver, events) -> None: """Test if script switch accessory is reset correctly.""" domain = "script" entity_id = "script.test" @@ -481,7 +415,7 @@ async def test_script_switch( ["input_select", "select"], ) async def test_input_select_switch( - hass: HomeAssistant, hk_driver, events: list[Event], domain + hass: HomeAssistant, hk_driver, events, domain ) -> None: """Test if select switch accessory is handled correctly.""" entity_id = f"{domain}.test" @@ -536,9 +470,7 @@ async def test_input_select_switch( "domain", ["button", "input_button"], ) -async def test_button_switch( - hass: HomeAssistant, hk_driver, events: list[Event], domain -) -> None: +async def test_button_switch(hass: HomeAssistant, hk_driver, events, domain) -> None: """Test switch accessory from a (input) button entity.""" entity_id = f"{domain}.test" diff --git a/tests/components/homekit/test_type_thermostats.py b/tests/components/homekit/test_type_thermostats.py index 8454610566b..ca2a02cb440 100644 --- a/tests/components/homekit/test_type_thermostats.py +++ b/tests/components/homekit/test_type_thermostats.py @@ -74,13 +74,13 @@ from homeassistant.const import ( STATE_UNKNOWN, UnitOfTemperature, ) -from homeassistant.core import CoreState, Event, HomeAssistant +from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_thermostat(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: +async def test_thermostat(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" base_attrs = { @@ -161,40 +161,6 @@ async def test_thermostat(hass: HomeAssistant, hk_driver, events: list[Event]) - assert acc.char_current_temp.value == 23.0 assert acc.char_display_units.value == 0 - hass.states.async_set( - entity_id, - HVACMode.HEAT, - { - **base_attrs, - ATTR_TEMPERATURE: 22.2, - ATTR_CURRENT_TEMPERATURE: 17.8, - ATTR_HVAC_ACTION: HVACAction.PREHEATING, - }, - ) - await hass.async_block_till_done() - assert acc.char_target_temp.value == 22.2 - assert acc.char_current_heat_cool.value == 1 - assert acc.char_target_heat_cool.value == 1 - assert acc.char_current_temp.value == 17.8 - assert acc.char_display_units.value == 0 - - hass.states.async_set( - entity_id, - HVACMode.HEAT, - { - **base_attrs, - ATTR_TEMPERATURE: 22.2, - ATTR_CURRENT_TEMPERATURE: 17.8, - ATTR_HVAC_ACTION: HVACAction.DEFROSTING, - }, - ) - await hass.async_block_till_done() - assert acc.char_target_temp.value == 22.2 - assert acc.char_current_heat_cool.value == 1 - assert acc.char_target_heat_cool.value == 1 - assert acc.char_current_temp.value == 17.8 - assert acc.char_display_units.value == 0 - hass.states.async_set( entity_id, HVACMode.FAN_ONLY, @@ -409,9 +375,7 @@ async def test_thermostat(hass: HomeAssistant, hk_driver, events: list[Event]) - assert events[-1].data[ATTR_VALUE] == "TargetHeatingCoolingState to 3" -async def test_thermostat_auto( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_thermostat_auto(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" base_attrs = { @@ -545,7 +509,7 @@ async def test_thermostat_auto( async def test_thermostat_mode_and_temp_change( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test if accessory where the mode and temp change in the same call.""" entity_id = "climate.test" @@ -652,9 +616,7 @@ async def test_thermostat_mode_and_temp_change( ) -async def test_thermostat_humidity( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_thermostat_humidity(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly with humidity.""" entity_id = "climate.test" base_attrs = {ATTR_SUPPORTED_FEATURES: 4} @@ -718,7 +680,7 @@ async def test_thermostat_humidity( async def test_thermostat_humidity_with_target_humidity( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events ) -> None: """Test if accessory and HA are updated accordingly with humidity without target hudmidity. @@ -742,9 +704,7 @@ async def test_thermostat_humidity_with_target_humidity( assert acc.char_current_humidity.value == 65 -async def test_thermostat_power_state( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_thermostat_power_state(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" base_attrs = { @@ -852,9 +812,7 @@ async def test_thermostat_power_state( assert acc.char_target_heat_cool.value == 2 -async def test_thermostat_fahrenheit( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_thermostat_fahrenheit(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" @@ -1011,7 +969,7 @@ async def test_thermostat_temperature_step_whole( async def test_thermostat_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -1542,7 +1500,7 @@ async def test_thermostat_hvac_modes_without_off( async def test_thermostat_without_target_temp_only_range( - hass: HomeAssistant, hk_driver, events: list[Event] + hass: HomeAssistant, hk_driver, events ) -> None: """Test a thermostat that only supports a range.""" entity_id = "climate.test" @@ -1704,9 +1662,7 @@ async def test_thermostat_without_target_temp_only_range( assert events[-1].data[ATTR_VALUE] == "HeatingThresholdTemperature to 27.0°C" -async def test_water_heater( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_water_heater(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "water_heater.test" @@ -1780,9 +1736,7 @@ async def test_water_heater( assert acc.char_target_heat_cool.value == 1 -async def test_water_heater_fahrenheit( - hass: HomeAssistant, hk_driver, events: list[Event] -) -> None: +async def test_water_heater_fahrenheit(hass: HomeAssistant, hk_driver, events) -> None: """Test if accessory and HA are update accordingly.""" entity_id = "water_heater.test" @@ -1845,7 +1799,7 @@ async def test_water_heater_get_temperature_range( async def test_water_heater_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -1895,7 +1849,7 @@ async def test_water_heater_restore( async def test_thermostat_with_no_modes_when_we_first_see( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events ) -> None: """Test if a thermostat that is not ready when we first see it.""" entity_id = "climate.test" @@ -1949,7 +1903,7 @@ async def test_thermostat_with_no_modes_when_we_first_see( async def test_thermostat_with_no_off_after_recheck( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events ) -> None: """Test if a thermostat that is not ready when we first see it that actually does not have off.""" entity_id = "climate.test" @@ -2002,7 +1956,9 @@ async def test_thermostat_with_no_off_after_recheck( assert mock_reload.called -async def test_thermostat_with_temp_clamps(hass: HomeAssistant, hk_driver) -> None: +async def test_thermostat_with_temp_clamps( + hass: HomeAssistant, hk_driver, events +) -> None: """Test that temperatures are clamped to valid values to prevent homekit crash.""" entity_id = "climate.test" base_attrs = { @@ -2057,7 +2013,7 @@ async def test_thermostat_with_temp_clamps(hass: HomeAssistant, hk_driver) -> No async def test_thermostat_with_fan_modes_with_auto( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events ) -> None: """Test a thermostate with fan modes with an auto fan mode.""" entity_id = "climate.test" @@ -2263,7 +2219,7 @@ async def test_thermostat_with_fan_modes_with_auto( async def test_thermostat_with_fan_modes_with_off( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events ) -> None: """Test a thermostate with fan modes that can turn off.""" entity_id = "climate.test" @@ -2372,7 +2328,7 @@ async def test_thermostat_with_fan_modes_with_off( async def test_thermostat_with_fan_modes_set_to_none( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events ) -> None: """Test a thermostate with fan modes set to None.""" entity_id = "climate.test" @@ -2416,7 +2372,7 @@ async def test_thermostat_with_fan_modes_set_to_none( async def test_thermostat_with_fan_modes_set_to_none_not_supported( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events ) -> None: """Test a thermostate with fan modes set to None and supported feature missing.""" entity_id = "climate.test" @@ -2459,7 +2415,7 @@ async def test_thermostat_with_fan_modes_set_to_none_not_supported( async def test_thermostat_with_supported_features_target_temp_but_fan_mode_set( - hass: HomeAssistant, hk_driver + hass: HomeAssistant, hk_driver, events ) -> None: """Test a thermostate with fan mode and supported feature missing.""" entity_id = "climate.test" @@ -2496,7 +2452,9 @@ async def test_thermostat_with_supported_features_target_temp_but_fan_mode_set( assert not acc.fan_chars -async def test_thermostat_handles_unknown_state(hass: HomeAssistant, hk_driver) -> None: +async def test_thermostat_handles_unknown_state( + hass: HomeAssistant, hk_driver, events +) -> None: """Test a thermostat can handle unknown state.""" entity_id = "climate.test" attrs = { diff --git a/tests/components/homekit/test_type_triggers.py b/tests/components/homekit/test_type_triggers.py index f7415ef5599..7471e0bff1c 100644 --- a/tests/components/homekit/test_type_triggers.py +++ b/tests/components/homekit/test_type_triggers.py @@ -7,7 +7,7 @@ from homeassistant.components.homekit.const import CHAR_PROGRAMMABLE_SWITCH_EVEN from homeassistant.components.homekit.type_triggers import DeviceTriggerAccessory from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, async_get_device_automations @@ -16,7 +16,9 @@ from tests.common import MockConfigEntry, async_get_device_automations async def test_programmable_switch_button_fires_on_trigger( hass: HomeAssistant, hk_driver, + events, demo_cleanup, + device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: """Test that DeviceTriggerAccessory fires the programmable switch event on trigger.""" diff --git a/tests/components/homekit/test_util.py b/tests/components/homekit/test_util.py index 7f7e3ee0ce0..24999242dc1 100644 --- a/tests/components/homekit/test_util.py +++ b/tests/components/homekit/test_util.py @@ -7,38 +7,13 @@ import voluptuous as vol from homeassistant.components.homekit.const import ( BRIDGE_NAME, - CONF_AUDIO_CODEC, - CONF_AUDIO_MAP, - CONF_AUDIO_PACKET_SIZE, CONF_FEATURE, CONF_FEATURE_LIST, CONF_LINKED_BATTERY_SENSOR, - CONF_LINKED_DOORBELL_SENSOR, - CONF_LINKED_MOTION_SENSOR, CONF_LOW_BATTERY_THRESHOLD, - CONF_MAX_FPS, - CONF_MAX_HEIGHT, - CONF_MAX_WIDTH, - CONF_STREAM_COUNT, - CONF_SUPPORT_AUDIO, CONF_THRESHOLD_CO, CONF_THRESHOLD_CO2, - CONF_VIDEO_CODEC, - CONF_VIDEO_MAP, - CONF_VIDEO_PACKET_SIZE, - DEFAULT_AUDIO_CODEC, - DEFAULT_AUDIO_MAP, - DEFAULT_AUDIO_PACKET_SIZE, DEFAULT_CONFIG_FLOW_PORT, - DEFAULT_LOW_BATTERY_THRESHOLD, - DEFAULT_MAX_FPS, - DEFAULT_MAX_HEIGHT, - DEFAULT_MAX_WIDTH, - DEFAULT_STREAM_COUNT, - DEFAULT_SUPPORT_AUDIO, - DEFAULT_VIDEO_CODEC, - DEFAULT_VIDEO_MAP, - DEFAULT_VIDEO_PACKET_SIZE, DOMAIN, FEATURE_ON_OFF, FEATURE_PLAY_PAUSE, @@ -203,31 +178,6 @@ def test_validate_entity_config() -> None: assert vec({"sensor.co2": {CONF_THRESHOLD_CO2: 500}}) == { "sensor.co2": {CONF_THRESHOLD_CO2: 500, CONF_LOW_BATTERY_THRESHOLD: 20} } - assert vec( - { - "camera.demo": { - CONF_LINKED_DOORBELL_SENSOR: "event.doorbell", - CONF_LINKED_MOTION_SENSOR: "event.motion", - } - } - ) == { - "camera.demo": { - CONF_LINKED_DOORBELL_SENSOR: "event.doorbell", - CONF_LINKED_MOTION_SENSOR: "event.motion", - CONF_AUDIO_CODEC: DEFAULT_AUDIO_CODEC, - CONF_SUPPORT_AUDIO: DEFAULT_SUPPORT_AUDIO, - CONF_MAX_WIDTH: DEFAULT_MAX_WIDTH, - CONF_MAX_HEIGHT: DEFAULT_MAX_HEIGHT, - CONF_MAX_FPS: DEFAULT_MAX_FPS, - CONF_AUDIO_MAP: DEFAULT_AUDIO_MAP, - CONF_VIDEO_MAP: DEFAULT_VIDEO_MAP, - CONF_STREAM_COUNT: DEFAULT_STREAM_COUNT, - CONF_VIDEO_CODEC: DEFAULT_VIDEO_CODEC, - CONF_AUDIO_PACKET_SIZE: DEFAULT_AUDIO_PACKET_SIZE, - CONF_VIDEO_PACKET_SIZE: DEFAULT_VIDEO_PACKET_SIZE, - CONF_LOW_BATTERY_THRESHOLD: DEFAULT_LOW_BATTERY_THRESHOLD, - } - } def test_validate_media_player_features() -> None: @@ -280,15 +230,14 @@ def test_temperature_to_states() -> None: def test_density_to_air_quality() -> None: """Test map PM2.5 density to HomeKit AirQuality level.""" assert density_to_air_quality(0) == 1 - assert density_to_air_quality(9) == 1 - assert density_to_air_quality(9.1) == 2 - assert density_to_air_quality(12) == 2 + assert density_to_air_quality(12) == 1 + assert density_to_air_quality(12.1) == 2 assert density_to_air_quality(35.4) == 2 assert density_to_air_quality(35.5) == 3 assert density_to_air_quality(55.4) == 3 assert density_to_air_quality(55.5) == 4 - assert density_to_air_quality(125.4) == 4 - assert density_to_air_quality(125.5) == 5 + assert density_to_air_quality(150.4) == 4 + assert density_to_air_quality(150.5) == 5 assert density_to_air_quality(200) == 5 @@ -307,12 +256,7 @@ async def test_async_show_setup_msg(hass: HomeAssistant, hk_driver) -> None: hass, entry.entry_id, "bridge_name", pincode, "X-HM://0" ) await hass.async_block_till_done() - - # New tests should not access runtime data. - # Do not use this pattern for new tests. - entry_data: HomeKitEntryData = hass.config_entries.async_get_entry( - entry.entry_id - ).runtime_data + entry_data: HomeKitEntryData = hass.data[DOMAIN][entry.entry_id] assert entry_data.pairing_qr_secret assert entry_data.pairing_qr diff --git a/tests/components/homekit_controller/common.py b/tests/components/homekit_controller/common.py index b94a267104b..1360b463e4a 100644 --- a/tests/components/homekit_controller/common.py +++ b/tests/components/homekit_controller/common.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Callable from dataclasses import dataclass from datetime import timedelta import logging @@ -12,8 +11,12 @@ from unittest import mock from aiohomekit.controller.abstract import AbstractDescription, AbstractPairing from aiohomekit.hkjson import loads as hkloads -from aiohomekit.model import Accessories, AccessoriesState, Accessory -from aiohomekit.model.services import Service +from aiohomekit.model import ( + Accessories, + AccessoriesState, + Accessory, + mixin as model_mixin, +) from aiohomekit.testing import FakeController, FakePairing from homeassistant.components.device_automation import DeviceAutomationType @@ -179,7 +182,7 @@ class Helper: return state -async def time_changed(hass: HomeAssistant, seconds: int) -> None: +async def time_changed(hass, seconds): """Trigger time changed.""" next_update = dt_util.utcnow() + timedelta(seconds) async_fire_time_changed(hass, next_update) @@ -195,7 +198,7 @@ async def setup_accessories_from_file(hass: HomeAssistant, path: str) -> Accesso return Accessories.from_list(accessories_json) -async def setup_platform(hass: HomeAssistant) -> FakeController: +async def setup_platform(hass): """Load the platform but with a fake Controller API.""" config = {"discovery": {}} @@ -207,9 +210,7 @@ async def setup_platform(hass: HomeAssistant) -> FakeController: return await async_get_controller(hass) -async def setup_test_accessories( - hass: HomeAssistant, accessories: list[Accessory], connection: str | None = None -) -> tuple[MockConfigEntry, AbstractPairing]: +async def setup_test_accessories(hass, accessories, connection=None): """Load a fake homekit device based on captured JSON profile.""" fake_controller = await setup_platform(hass) return await setup_test_accessories_with_controller( @@ -218,11 +219,8 @@ async def setup_test_accessories( async def setup_test_accessories_with_controller( - hass: HomeAssistant, - accessories: list[Accessory], - fake_controller: FakeController, - connection: str | None = None, -) -> tuple[MockConfigEntry, AbstractPairing]: + hass, accessories, fake_controller, connection=None +): """Load a fake homekit device based on captured JSON profile.""" pairing_id = "00:00:00:00:00:00" @@ -284,13 +282,8 @@ async def device_config_changed(hass: HomeAssistant, accessories: Accessories): async def setup_test_component( - hass: HomeAssistant, - aid: int, - setup_accessory: Callable[[Accessory], Service | None], - capitalize: bool = False, - suffix: str | None = None, - connection: str | None = None, -) -> Helper: + hass, setup_accessory, capitalize=False, suffix=None, connection=None +): """Load a fake homekit accessory based on a homekit accessory model. If capitalize is True, property names will be in upper case. @@ -298,7 +291,7 @@ async def setup_test_component( If suffix is set, entityId will include the suffix """ accessory = Accessory.create_with_info( - aid, "TestDevice", "example.com", "Test", "0001", "0.1" + "TestDevice", "example.com", "Test", "0001", "0.1" ) setup_accessory(accessory) @@ -404,3 +397,8 @@ async def assert_devices_and_entities_created( # Root device must not have a via, otherwise its not the device assert root_device.via_device_id is None + + +def get_next_aid(): + """Get next aid.""" + return model_mixin.id_counter + 1 diff --git a/tests/components/homekit_controller/conftest.py b/tests/components/homekit_controller/conftest.py index eea3f4b67f2..427c5285436 100644 --- a/tests/components/homekit_controller/conftest.py +++ b/tests/components/homekit_controller/conftest.py @@ -1,6 +1,5 @@ """HomeKit controller session fixtures.""" -from collections.abc import Callable, Generator import datetime from unittest.mock import MagicMock, patch @@ -8,6 +7,7 @@ from aiohomekit.testing import FakeController from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory import pytest +from typing_extensions import Generator import homeassistant.util.dt as dt_util @@ -44,16 +44,3 @@ def hk_mock_async_zeroconf(mock_async_zeroconf: MagicMock) -> None: @pytest.fixture(autouse=True) def auto_mock_bluetooth(mock_bluetooth: None) -> None: """Auto mock bluetooth.""" - - -@pytest.fixture -def get_next_aid() -> Generator[Callable[[], int]]: - """Generate a function that returns increasing accessory ids.""" - id_counter = 0 - - def _get_id(): - nonlocal id_counter - id_counter += 1 - return id_counter - - return _get_id diff --git a/tests/components/homekit_controller/fixtures/somfy_venetian_blinds.json b/tests/components/homekit_controller/fixtures/somfy_venetian_blinds.json deleted file mode 100644 index 65d3126cc4b..00000000000 --- a/tests/components/homekit_controller/fixtures/somfy_venetian_blinds.json +++ /dev/null @@ -1,146 +0,0 @@ -[ - { - "aid": 1, - "services": [ - { - "iid": 1, - "type": "0000003E-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 2, - "perms": ["pr"], - "format": "string", - "value": "VELUX Internal Cover", - "description": "Name", - "maxLen": 64 - }, - { - "type": "00000020-0000-1000-8000-0026BB765291", - "iid": 3, - "perms": ["pr"], - "format": "string", - "value": "Netatmo", - "description": "Manufacturer", - "maxLen": 64 - }, - { - "type": "00000021-0000-1000-8000-0026BB765291", - "iid": 4, - "perms": ["pr"], - "format": "string", - "value": "VELUX Internal Cover", - "description": "Model", - "maxLen": 64 - }, - { - "type": "00000030-0000-1000-8000-0026BB765291", - "iid": 5, - "perms": ["pr"], - "format": "string", - "value": "**REDACTED**", - "description": "Serial Number", - "maxLen": 64 - }, - { - "type": "00000014-0000-1000-8000-0026BB765291", - "iid": 7, - "perms": ["pw"], - "format": "bool", - "description": "Identify" - }, - { - "type": "00000052-0000-1000-8000-0026BB765291", - "iid": 6, - "perms": ["pr"], - "format": "string", - "value": "0.0.0", - "description": "Firmware Revision", - "maxLen": 64 - }, - { - "type": "00000220-0000-1000-8000-0026BB765291", - "iid": 15, - "perms": ["pr"], - "format": "data", - "value": "+nvrOv1cCQU=" - } - ] - }, - { - "iid": 8, - "type": "0000008C-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 9, - "perms": ["pr"], - "format": "string", - "value": "Venetian Blinds", - "description": "Name", - "maxLen": 64 - }, - { - "type": "0000007C-0000-1000-8000-0026BB765291", - "iid": 11, - "perms": ["pr", "pw", "ev"], - "format": "uint8", - "value": 0, - "description": "Target Position", - "unit": "percentage", - "minValue": 0, - "maxValue": 100, - "minStep": 1 - }, - { - "type": "0000006D-0000-1000-8000-0026BB765291", - "iid": 10, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 0, - "description": "Current Position", - "unit": "percentage", - "minValue": 0, - "maxValue": 100, - "minStep": 1 - }, - { - "type": "00000072-0000-1000-8000-0026BB765291", - "iid": 12, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 2, - "description": "Position State", - "minValue": 0, - "maxValue": 2, - "minStep": 1 - }, - { - "type": "0000006C-0000-1000-8000-0026BB765291", - "iid": 13, - "perms": ["pr", "ev"], - "format": "int", - "value": 90, - "description": "Current Horizontal Tilt Angle", - "unit": "arcdegrees", - "minValue": -90, - "maxValue": 90, - "minStep": 1 - }, - { - "type": "0000007B-0000-1000-8000-0026BB765291", - "iid": 14, - "perms": ["pr", "pw", "ev"], - "format": "int", - "value": 90, - "description": "Target Horizontal Tilt Angle", - "unit": "arcdegrees", - "minValue": -90, - "maxValue": 90, - "minStep": 1 - } - ] - } - ] - } -] diff --git a/tests/components/homekit_controller/fixtures/u_by_moen_ts3304.json b/tests/components/homekit_controller/fixtures/u_by_moen_ts3304.json deleted file mode 100644 index a3c24eb85c3..00000000000 --- a/tests/components/homekit_controller/fixtures/u_by_moen_ts3304.json +++ /dev/null @@ -1,378 +0,0 @@ -[ - { - "aid": 1, - "services": [ - { - "iid": 1, - "type": "0000003E-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 2, - "perms": ["pr"], - "format": "string", - "value": "U by Moen-015F44", - "description": "Name", - "maxLen": 64 - }, - { - "type": "00000020-0000-1000-8000-0026BB765291", - "iid": 3, - "perms": ["pr"], - "format": "string", - "value": "Moen Incorporated", - "description": "Manufacturer", - "maxLen": 64 - }, - { - "type": "00000021-0000-1000-8000-0026BB765291", - "iid": 4, - "perms": ["pr"], - "format": "string", - "value": "TS3304", - "description": "Model", - "maxLen": 64 - }, - { - "type": "00000030-0000-1000-8000-0026BB765291", - "iid": 5, - "perms": ["pr"], - "format": "string", - "value": "**REDACTED**", - "description": "Serial Number", - "maxLen": 64 - }, - { - "type": "00000014-0000-1000-8000-0026BB765291", - "iid": 6, - "perms": ["pw"], - "format": "bool", - "description": "Identify" - }, - { - "type": "00000052-0000-1000-8000-0026BB765291", - "iid": 7, - "perms": ["pr"], - "format": "string", - "value": "3.3.0", - "description": "Firmware Revision", - "maxLen": 64 - } - ] - }, - { - "iid": 8, - "type": "000000D7-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "000000B0-0000-1000-8000-0026BB765291", - "iid": 9, - "perms": ["pr", "pw", "ev"], - "format": "uint8", - "value": 0, - "description": "Active", - "minValue": 0, - "maxValue": 1, - "minStep": 1 - }, - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 10, - "perms": ["pr"], - "format": "string", - "value": "u by moen", - "description": "Name", - "maxLen": 64 - } - ], - "linked": [11, 17, 22, 27, 32] - }, - { - "iid": 11, - "type": "000000BC-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "000000B0-0000-1000-8000-0026BB765291", - "iid": 12, - "perms": ["pr", "pw", "ev"], - "format": "uint8", - "value": 0, - "description": "Active", - "minValue": 0, - "maxValue": 1, - "minStep": 1 - }, - { - "type": "00000011-0000-1000-8000-0026BB765291", - "iid": 13, - "perms": ["pr", "ev"], - "format": "float", - "value": 21.66666, - "description": "Current Temperature", - "unit": "celsius", - "minValue": 0.0, - "maxValue": 100.0, - "minStep": 0.1 - }, - { - "type": "000000B1-0000-1000-8000-0026BB765291", - "iid": 14, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 0, - "description": "Current Heater Cooler State", - "minValue": 0, - "maxValue": 3, - "minStep": 1 - }, - { - "type": "000000B2-0000-1000-8000-0026BB765291", - "iid": 15, - "perms": ["pr", "pw", "ev"], - "format": "uint8", - "value": 0, - "description": "Target Heater Cooler State", - "minValue": 0, - "maxValue": 2, - "minStep": 1 - }, - { - "type": "00000012-0000-1000-8000-0026BB765291", - "iid": 16, - "perms": ["pr", "pw", "ev"], - "format": "float", - "value": 37.77777, - "description": "Heating Threshold Temperature", - "unit": "celsius", - "minValue": 15.55556, - "maxValue": 48.88888, - "minStep": 0.1 - } - ] - }, - { - "iid": 17, - "type": "000000D0-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "000000B0-0000-1000-8000-0026BB765291", - "iid": 18, - "perms": ["pr", "pw", "ev"], - "format": "uint8", - "value": 0, - "description": "Active", - "minValue": 0, - "maxValue": 1, - "minStep": 1 - }, - { - "type": "000000D2-0000-1000-8000-0026BB765291", - "iid": 19, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 0, - "description": "In Use" - }, - { - "type": "000000D5-0000-1000-8000-0026BB765291", - "iid": 20, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 2, - "description": "Valve Type" - }, - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 21, - "perms": ["pr"], - "format": "string", - "value": "Outlet 1", - "description": "Name", - "maxLen": 64 - } - ] - }, - { - "iid": 22, - "type": "000000D0-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "000000B0-0000-1000-8000-0026BB765291", - "iid": 23, - "perms": ["pr", "pw", "ev"], - "format": "uint8", - "value": 0, - "description": "Active", - "minValue": 0, - "maxValue": 1, - "minStep": 1 - }, - { - "type": "000000D2-0000-1000-8000-0026BB765291", - "iid": 24, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 0, - "description": "In Use" - }, - { - "type": "000000D5-0000-1000-8000-0026BB765291", - "iid": 25, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 2, - "description": "Valve Type" - }, - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 26, - "perms": ["pr"], - "format": "string", - "value": "Outlet 2", - "description": "Name", - "maxLen": 64 - } - ] - }, - { - "iid": 27, - "type": "000000D0-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "000000B0-0000-1000-8000-0026BB765291", - "iid": 28, - "perms": ["pr", "pw", "ev"], - "format": "uint8", - "value": 0, - "description": "Active", - "minValue": 0, - "maxValue": 1, - "minStep": 1 - }, - { - "type": "000000D2-0000-1000-8000-0026BB765291", - "iid": 29, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 0, - "description": "In Use" - }, - { - "type": "000000D5-0000-1000-8000-0026BB765291", - "iid": 30, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 2, - "description": "Valve Type" - }, - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 31, - "perms": ["pr"], - "format": "string", - "value": "Outlet 3", - "description": "Name", - "maxLen": 64 - } - ] - }, - { - "iid": 32, - "type": "000000D0-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "000000B0-0000-1000-8000-0026BB765291", - "iid": 33, - "perms": ["pr", "pw", "ev"], - "format": "uint8", - "value": 0, - "description": "Active", - "minValue": 0, - "maxValue": 1, - "minStep": 1 - }, - { - "type": "000000D2-0000-1000-8000-0026BB765291", - "iid": 34, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 0, - "description": "In Use" - }, - { - "type": "000000D5-0000-1000-8000-0026BB765291", - "iid": 35, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 2, - "description": "Valve Type" - }, - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 36, - "perms": ["pr"], - "format": "string", - "value": "Outlet 4", - "description": "Name", - "maxLen": 64 - } - ] - }, - { - "iid": 37, - "type": "00000010-0000-1000-8000-001D4B474349", - "characteristics": [ - { - "type": "00000011-0000-1000-8000-001D4B474349", - "iid": 38, - "perms": ["pr", "ev", "hd"], - "format": "uint8", - "value": 1 - }, - { - "type": "00000012-0000-1000-8000-001D4B474349", - "iid": 39, - "perms": ["pw", "hd"], - "format": "uint8" - }, - { - "type": "00000013-0000-1000-8000-001D4B474349", - "iid": 40, - "perms": ["pw", "hd"], - "format": "string", - "maxLen": 64 - }, - { - "type": "00000014-0000-1000-8000-001D4B474349", - "iid": 41, - "perms": ["pw", "hd"], - "format": "string", - "maxLen": 64 - }, - { - "type": "00000015-0000-1000-8000-001D4B474349", - "iid": 42, - "perms": ["pw", "hd"], - "format": "string", - "maxLen": 64 - } - ] - }, - { - "iid": 43, - "type": "000000A2-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000037-0000-1000-8000-0026BB765291", - "iid": 44, - "perms": ["pr"], - "format": "string", - "value": "1.1.0", - "description": "Version", - "maxLen": 64 - } - ] - } - ] - } -] diff --git a/tests/components/homekit_controller/fixtures/velux_active_netatmo_co2.json b/tests/components/homekit_controller/fixtures/velux_active_netatmo_co2.json deleted file mode 100644 index 80b2b34648e..00000000000 --- a/tests/components/homekit_controller/fixtures/velux_active_netatmo_co2.json +++ /dev/null @@ -1,162 +0,0 @@ -[ - { - "aid": 1, - "services": [ - { - "iid": 1, - "type": "0000003E-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 2, - "perms": ["pr"], - "format": "string", - "value": "VELUX Sensor", - "description": "Name", - "maxLen": 64 - }, - { - "type": "00000020-0000-1000-8000-0026BB765291", - "iid": 3, - "perms": ["pr"], - "format": "string", - "value": "Netatmo", - "description": "Manufacturer", - "maxLen": 64 - }, - { - "type": "00000021-0000-1000-8000-0026BB765291", - "iid": 4, - "perms": ["pr"], - "format": "string", - "value": "VELUX Sensor", - "description": "Model", - "maxLen": 64 - }, - { - "type": "00000030-0000-1000-8000-0026BB765291", - "iid": 5, - "perms": ["pr"], - "format": "string", - "value": "**REDACTED**", - "description": "Serial Number", - "maxLen": 64 - }, - { - "type": "00000014-0000-1000-8000-0026BB765291", - "iid": 7, - "perms": ["pw"], - "format": "bool", - "description": "Identify" - }, - { - "type": "00000052-0000-1000-8000-0026BB765291", - "iid": 6, - "perms": ["pr"], - "format": "string", - "value": "16.0.0", - "description": "Firmware Revision", - "maxLen": 64 - }, - { - "type": "00000220-0000-1000-8000-0026BB765291", - "iid": 18, - "perms": ["pr"], - "format": "data", - "value": "+nvrOv1cCQU=" - } - ] - }, - { - "iid": 8, - "type": "0000008A-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 9, - "perms": ["pr"], - "format": "string", - "value": "Temperature sensor", - "description": "Name", - "maxLen": 64 - }, - { - "type": "00000011-0000-1000-8000-0026BB765291", - "iid": 10, - "perms": ["pr", "ev"], - "format": "float", - "value": 23.9, - "description": "Current Temperature", - "unit": "celsius", - "minValue": 0.0, - "maxValue": 50.0, - "minStep": 0.1 - } - ] - }, - { - "iid": 11, - "type": "00000082-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 12, - "perms": ["pr"], - "format": "string", - "value": "Humidity sensor", - "description": "Name", - "maxLen": 64 - }, - { - "type": "00000010-0000-1000-8000-0026BB765291", - "iid": 13, - "perms": ["pr", "ev"], - "format": "float", - "value": 69.0, - "description": "Current Relative Humidity", - "unit": "percentage", - "minValue": 0.0, - "maxValue": 100.0, - "minStep": 1.0 - } - ] - }, - { - "iid": 14, - "type": "00000097-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 15, - "perms": ["pr"], - "format": "string", - "value": "Carbon Dioxide sensor", - "description": "Name", - "maxLen": 64 - }, - { - "type": "00000092-0000-1000-8000-0026BB765291", - "iid": 16, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 0, - "description": "Carbon Dioxide Detected", - "minValue": 0, - "maxValue": 1, - "minStep": 1 - }, - { - "type": "00000093-0000-1000-8000-0026BB765291", - "iid": 17, - "perms": ["pr", "ev"], - "format": "float", - "value": 1124.0, - "description": "Carbon Dioxide Level", - "minValue": 0.0, - "maxValue": 5000.0 - } - ] - } - ] - } -] diff --git a/tests/components/homekit_controller/fixtures/velux_window.json b/tests/components/homekit_controller/fixtures/velux_window.json deleted file mode 100644 index 4d9a09344bb..00000000000 --- a/tests/components/homekit_controller/fixtures/velux_window.json +++ /dev/null @@ -1,122 +0,0 @@ -[ - { - "aid": 1, - "services": [ - { - "iid": 1, - "type": "0000003E-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 2, - "perms": ["pr"], - "format": "string", - "value": "VELUX Window", - "description": "Name", - "maxLen": 64 - }, - { - "type": "00000020-0000-1000-8000-0026BB765291", - "iid": 3, - "perms": ["pr"], - "format": "string", - "value": "Netatmo", - "description": "Manufacturer", - "maxLen": 64 - }, - { - "type": "00000021-0000-1000-8000-0026BB765291", - "iid": 4, - "perms": ["pr"], - "format": "string", - "value": "VELUX Window", - "description": "Model", - "maxLen": 64 - }, - { - "type": "00000030-0000-1000-8000-0026BB765291", - "iid": 5, - "perms": ["pr"], - "format": "string", - "value": "**REDACTED**", - "description": "Serial Number", - "maxLen": 64 - }, - { - "type": "00000014-0000-1000-8000-0026BB765291", - "iid": 7, - "perms": ["pw"], - "format": "bool", - "description": "Identify" - }, - { - "type": "00000052-0000-1000-8000-0026BB765291", - "iid": 6, - "perms": ["pr"], - "format": "string", - "value": "0.0.0", - "description": "Firmware Revision", - "maxLen": 64 - }, - { - "type": "00000220-0000-1000-8000-0026BB765291", - "iid": 13, - "perms": ["pr"], - "format": "data", - "value": "+nvrOv1cCQU=" - } - ] - }, - { - "iid": 8, - "type": "0000008B-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 9, - "perms": ["pr"], - "format": "string", - "value": "Roof Window", - "description": "Name", - "maxLen": 64 - }, - { - "type": "0000007C-0000-1000-8000-0026BB765291", - "iid": 11, - "perms": ["pr", "pw", "ev"], - "format": "uint8", - "value": 0, - "description": "Target Position", - "unit": "percentage", - "minValue": 0, - "maxValue": 100, - "minStep": 1 - }, - { - "type": "0000006D-0000-1000-8000-0026BB765291", - "iid": 10, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 0, - "description": "Current Position", - "unit": "percentage", - "minValue": 0, - "maxValue": 100, - "minStep": 1 - }, - { - "type": "00000072-0000-1000-8000-0026BB765291", - "iid": 12, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 2, - "description": "Position State", - "minValue": 0, - "maxValue": 2, - "minStep": 1 - } - ] - } - ] - } -] diff --git a/tests/components/homekit_controller/fixtures/velux_window_cover.json b/tests/components/homekit_controller/fixtures/velux_window_cover.json deleted file mode 100644 index d95fbbd42bf..00000000000 --- a/tests/components/homekit_controller/fixtures/velux_window_cover.json +++ /dev/null @@ -1,122 +0,0 @@ -[ - { - "aid": 1, - "services": [ - { - "iid": 1, - "type": "0000003E-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 2, - "perms": ["pr"], - "format": "string", - "value": "VELUX External Cover", - "description": "Name", - "maxLen": 64 - }, - { - "type": "00000020-0000-1000-8000-0026BB765291", - "iid": 3, - "perms": ["pr"], - "format": "string", - "value": "Netatmo", - "description": "Manufacturer", - "maxLen": 64 - }, - { - "type": "00000021-0000-1000-8000-0026BB765291", - "iid": 4, - "perms": ["pr"], - "format": "string", - "value": "VELUX External Cover", - "description": "Model", - "maxLen": 64 - }, - { - "type": "00000030-0000-1000-8000-0026BB765291", - "iid": 5, - "perms": ["pr"], - "format": "string", - "value": "**REDACTED**", - "description": "Serial Number", - "maxLen": 64 - }, - { - "type": "00000014-0000-1000-8000-0026BB765291", - "iid": 7, - "perms": ["pw"], - "format": "bool", - "description": "Identify" - }, - { - "type": "00000052-0000-1000-8000-0026BB765291", - "iid": 6, - "perms": ["pr"], - "format": "string", - "value": "15.0.0", - "description": "Firmware Revision", - "maxLen": 64 - }, - { - "type": "00000220-0000-1000-8000-0026BB765291", - "iid": 13, - "perms": ["pr"], - "format": "data", - "value": "+nvrOv1cCQU=" - } - ] - }, - { - "iid": 8, - "type": "0000008C-0000-1000-8000-0026BB765291", - "characteristics": [ - { - "type": "00000023-0000-1000-8000-0026BB765291", - "iid": 9, - "perms": ["pr"], - "format": "string", - "value": "Awning Blinds", - "description": "Name", - "maxLen": 64 - }, - { - "type": "0000007C-0000-1000-8000-0026BB765291", - "iid": 11, - "perms": ["pr", "pw", "ev"], - "format": "uint8", - "value": 0, - "description": "Target Position", - "unit": "percentage", - "minValue": 0, - "maxValue": 100, - "minStep": 1 - }, - { - "type": "0000006D-0000-1000-8000-0026BB765291", - "iid": 10, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 0, - "description": "Current Position", - "unit": "percentage", - "minValue": 0, - "maxValue": 100, - "minStep": 1 - }, - { - "type": "00000072-0000-1000-8000-0026BB765291", - "iid": 12, - "perms": ["pr", "ev"], - "format": "uint8", - "value": 2, - "description": "Position State", - "minValue": 0, - "maxValue": 2, - "minStep": 1 - } - ] - } - ] - } -] diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index 8304d567916..394a442787d 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -24,7 +24,6 @@ ]), 'manufacturer': 'Sleekpoint Innovations', 'model': 'AP2', - 'model_id': None, 'name': 'Airversa AP2 1808', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -103,7 +102,7 @@ 'original_name': 'Airversa AP2 1808 AirPurifier', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_32832', 'unit_of_measurement': None, @@ -115,7 +114,7 @@ 'percentage_step': 20.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.airversa_ap2_1808_airpurifier', 'state': 'off', @@ -622,7 +621,6 @@ ]), 'manufacturer': 'Anker', 'model': 'T8010', - 'model_id': None, 'name': 'eufy HomeBase2-0AAA', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -697,7 +695,6 @@ ]), 'manufacturer': 'Anker', 'model': 'T8113', - 'model_id': None, 'name': 'eufyCam2-0000', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -940,7 +937,6 @@ ]), 'manufacturer': 'Anker', 'model': 'T8113', - 'model_id': None, 'name': 'eufyCam2-000A', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -1183,7 +1179,6 @@ ]), 'manufacturer': 'Anker', 'model': 'T8113', - 'model_id': None, 'name': 'eufyCam2-000A', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -1430,7 +1425,6 @@ ]), 'manufacturer': 'Aqara', 'model': 'HE1-G01', - 'model_id': None, 'name': 'Aqara-Hub-E1-00A0', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -1638,7 +1632,6 @@ ]), 'manufacturer': 'Aqara', 'model': 'AS006', - 'model_id': None, 'name': 'Contact Sensor', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -1804,7 +1797,6 @@ ]), 'manufacturer': 'Aqara', 'model': 'ZHWA11LM', - 'model_id': None, 'name': 'Aqara Hub-1563', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -2081,7 +2073,6 @@ ]), 'manufacturer': 'Aqara', 'model': 'AR004', - 'model_id': None, 'name': 'Programmable Switch', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -2206,7 +2197,6 @@ ]), 'manufacturer': 'Netgear, Inc', 'model': 'ABC1000', - 'model_id': None, 'name': 'ArloBabyA0', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -2692,7 +2682,6 @@ ]), 'manufacturer': 'ConnectSense', 'model': 'CS-IWO', - 'model_id': None, 'name': 'InWall Outlet-0394DE', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -3123,7 +3112,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', - 'model_id': None, 'name': 'Basement', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -3284,7 +3272,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee3', - 'model_id': None, 'name': 'HomeW', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -3740,7 +3727,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', - 'model_id': None, 'name': 'Kitchen', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -3901,7 +3887,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', - 'model_id': None, 'name': 'Porch', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -4066,7 +4051,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee3', - 'model_id': None, 'name': 'HomeW', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -4526,7 +4510,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', - 'model_id': None, 'name': 'Basement', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -4642,7 +4625,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee3', - 'model_id': None, 'name': 'HomeW', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -4925,7 +4907,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', - 'model_id': None, 'name': 'Kitchen', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -5086,7 +5067,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', - 'model_id': None, 'name': 'Porch', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -5251,7 +5231,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ECB501', - 'model_id': None, 'name': 'My ecobee', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -5720,7 +5699,6 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee Switch+', - 'model_id': None, 'name': 'Master Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -6011,7 +5989,6 @@ ]), 'manufacturer': 'Elgato', 'model': 'Eve Degree 00AAA0000', - 'model_id': None, 'name': 'Eve Degree AA11', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -6369,7 +6346,6 @@ ]), 'manufacturer': 'Elgato', 'model': 'Eve Energy 20EAO8601', - 'model_id': None, 'name': 'Eve Energy 50FF', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -6709,7 +6685,6 @@ ]), 'manufacturer': 'José A. Jiménez Campos', 'model': 'RavenSystem HAA', - 'model_id': None, 'name': 'HAA-C718B3', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -6869,7 +6844,7 @@ 'original_name': 'HAA-C718B3', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_8', 'unit_of_measurement': None, @@ -6881,7 +6856,7 @@ 'percentage_step': 33.333333333333336, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.haa_c718b3', 'state': 'on', @@ -6916,7 +6891,6 @@ ]), 'manufacturer': 'José A. Jiménez Campos', 'model': 'RavenSystem HAA', - 'model_id': None, 'name': 'HAA-C718B3', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7010,6 +6984,324 @@ }), ]) # --- +# name: test_snapshots[haa_fan] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'José A. Jiménez Campos', + 'model': 'RavenSystem HAA', + 'name': 'HAA-C718B3', + 'name_by_user': None, + 'serial_number': 'C718B3-1', + 'suggested_area': None, + 'sw_version': '5.0.18', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.haa_c718b3_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'HAA-C718B3 Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'HAA-C718B3 Identify', + }), + 'entity_id': 'button.haa_c718b3_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.haa_c718b3_setup', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'HAA-C718B3 Setup', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'setup', + 'unique_id': '00:00:00:00:00:00_1_1010_1012', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'HAA-C718B3 Setup', + }), + 'entity_id': 'button.haa_c718b3_setup', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.haa_c718b3_update', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'HAA-C718B3 Update', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1010_1011', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'update', + 'friendly_name': 'HAA-C718B3 Update', + }), + 'entity_id': 'button.haa_c718b3_update', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': None, + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.haa_c718b3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'HAA-C718B3', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'HAA-C718B3', + 'percentage': 66, + 'percentage_step': 33.333333333333336, + 'preset_mode': None, + 'preset_modes': None, + 'supported_features': , + }), + 'entity_id': 'fan.haa_c718b3', + 'state': 'on', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:2', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'José A. Jiménez Campos', + 'model': 'RavenSystem HAA', + 'name': 'HAA-C718B3', + 'name_by_user': None, + 'serial_number': 'C718B3-2', + 'suggested_area': None, + 'sw_version': '5.0.18', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.haa_c718b3_identify_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'HAA-C718B3 Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_2_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'HAA-C718B3 Identify', + }), + 'entity_id': 'button.haa_c718b3_identify_2', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.haa_c718b3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'HAA-C718B3', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_2_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'HAA-C718B3', + }), + 'entity_id': 'switch.haa_c718b3', + 'state': 'off', + }), + }), + ]), + }), + ]) +# --- # name: test_snapshots[home_assistant_bridge_basic_cover] list([ dict({ @@ -7035,7 +7327,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', - 'model_id': None, 'name': 'Family Room North', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7198,7 +7489,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7273,7 +7563,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', - 'model_id': None, 'name': 'Kitchen Window', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7440,7 +7729,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', - 'model_id': None, 'name': 'Ceiling Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7519,7 +7807,7 @@ 'original_name': 'Ceiling Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_766313939_8', 'unit_of_measurement': None, @@ -7531,7 +7819,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.ceiling_fan', 'state': 'off', @@ -7562,7 +7850,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'Home Assistant Bridge', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7637,7 +7924,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', - 'model_id': None, 'name': 'Living Room Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7716,7 +8002,7 @@ 'original_name': 'Living Room Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1256851357_8', 'unit_of_measurement': None, @@ -7729,7 +8015,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.living_room_fan', 'state': 'off', @@ -7764,7 +8050,6 @@ ]), 'manufacturer': 'Lookin', 'model': 'Climate Control', - 'model_id': None, 'name': '89 Living Room', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7905,7 +8190,7 @@ 'original_name': '89 Living Room', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1233851541_175', 'unit_of_measurement': None, @@ -7918,7 +8203,7 @@ 'percentage_step': 33.333333333333336, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.89_living_room', 'state': 'on', @@ -8088,7 +8373,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8167,7 +8451,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8242,7 +8525,6 @@ ]), 'manufacturer': 'FirstAlert', 'model': '1039102', - 'model_id': None, 'name': 'Laundry Smoke ED78', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8417,7 +8699,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', - 'model_id': None, 'name': 'Family Room North', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8580,7 +8861,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8655,7 +8935,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', - 'model_id': None, 'name': 'Kitchen Window', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8822,7 +9101,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', - 'model_id': None, 'name': 'Ceiling Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8901,7 +9179,7 @@ 'original_name': 'Ceiling Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_766313939_8', 'unit_of_measurement': None, @@ -8913,7 +9191,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.ceiling_fan', 'state': 'off', @@ -8944,7 +9222,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'Home Assistant Bridge', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9019,7 +9296,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', - 'model_id': None, 'name': 'Living Room Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9098,7 +9374,7 @@ 'original_name': 'Living Room Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1256851357_8', 'unit_of_measurement': None, @@ -9112,7 +9388,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.living_room_fan', 'state': 'off', @@ -9147,7 +9423,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'Home Assistant Bridge', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9222,7 +9497,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', - 'model_id': None, 'name': 'Living Room Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9301,7 +9575,7 @@ 'original_name': 'Living Room Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1256851357_8', 'unit_of_measurement': None, @@ -9315,7 +9589,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.living_room_fan', 'state': 'off', @@ -9350,7 +9624,6 @@ ]), 'manufacturer': 'Lookin', 'model': 'Climate Control', - 'model_id': None, 'name': '89 Living Room', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9500,7 +9773,7 @@ 'original_name': '89 Living Room', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1233851541_175', 'unit_of_measurement': None, @@ -9513,7 +9786,7 @@ 'percentage_step': 33.333333333333336, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.89_living_room', 'state': 'on', @@ -9683,7 +9956,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9762,7 +10034,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9837,7 +10108,6 @@ ]), 'manufacturer': 'switchbot', 'model': 'WoHumi', - 'model_id': None, 'name': 'Humidifier 182A', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10019,7 +10289,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10094,7 +10363,6 @@ ]), 'manufacturer': 'switchbot', 'model': 'WoHumi', - 'model_id': None, 'name': 'Humidifier 182A', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10276,7 +10544,6 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', - 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10351,7 +10618,6 @@ ]), 'manufacturer': 'FirstAlert', 'model': '1039102', - 'model_id': None, 'name': 'Laundry Smoke ED78', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10541,7 +10807,6 @@ ]), 'manufacturer': 'Garzola Marco', 'model': 'Daikin-fwec3a-esp32-homekit-bridge', - 'model_id': None, 'name': 'Air Conditioner', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10741,7 +11006,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', - 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10879,7 +11143,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', - 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11017,7 +11280,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', - 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11155,7 +11417,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', - 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11293,7 +11554,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW013', - 'model_id': None, 'name': 'Hue ambiance spot', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11441,7 +11701,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW013', - 'model_id': None, 'name': 'Hue ambiance spot', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11589,7 +11848,6 @@ ]), 'manufacturer': 'Philips', 'model': 'RWL021', - 'model_id': None, 'name': 'Hue dimmer switch', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11906,7 +12164,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', - 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12031,7 +12288,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', - 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12156,7 +12412,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', - 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12281,7 +12536,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', - 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12406,7 +12660,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', - 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12531,7 +12784,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', - 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12656,7 +12908,6 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', - 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12781,7 +13032,6 @@ ]), 'manufacturer': 'Philips Lighting', 'model': 'BSB002', - 'model_id': None, 'name': 'Philips hue - 482544', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12860,7 +13110,6 @@ ]), 'manufacturer': 'Koogeek', 'model': 'LS1', - 'model_id': None, 'name': 'Koogeek-LS1-20833F', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13004,7 +13253,6 @@ ]), 'manufacturer': 'Koogeek', 'model': 'P1EU', - 'model_id': None, 'name': 'Koogeek-P1-A00AA0', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13169,7 +13417,6 @@ ]), 'manufacturer': 'Koogeek', 'model': 'KH02CN', - 'model_id': None, 'name': 'Koogeek-SW2-187A91', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13373,7 +13620,6 @@ ]), 'manufacturer': 'Lennox', 'model': 'E30 2B', - 'model_id': None, 'name': 'Lennox', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13655,7 +13901,6 @@ ]), 'manufacturer': 'LG Electronics', 'model': 'OLED55B9PUA', - 'model_id': None, 'name': 'LG webOS TV AF80', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13836,7 +14081,6 @@ ]), 'manufacturer': 'Lutron Electronics Co., Inc', 'model': 'PD-FSQN-XX', - 'model_id': None, 'name': 'Caséta® Wireless Fan Speed Control', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13915,7 +14159,7 @@ 'original_name': 'Caséta® Wireless Fan Speed Control', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_21474836482_2', 'unit_of_measurement': None, @@ -13927,7 +14171,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.caseta_r_wireless_fan_speed_control', 'state': 'off', @@ -13958,7 +14202,6 @@ ]), 'manufacturer': 'Lutron Electronics Co., Inc', 'model': 'L-BDG2-WH', - 'model_id': None, 'name': 'Smart Bridge 2', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -14037,7 +14280,6 @@ ]), 'manufacturer': 'Meross', 'model': 'MSS425F', - 'model_id': None, 'name': 'MSS425F-15cc', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -14316,7 +14558,6 @@ ]), 'manufacturer': 'Meross', 'model': 'MSS565', - 'model_id': None, 'name': 'MSS565-28da', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -14445,7 +14686,6 @@ ]), 'manufacturer': 'Empowered Homes Inc.', 'model': 'v1', - 'model_id': None, 'name': 'Mysa-85dda9', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -14775,7 +15015,6 @@ ]), 'manufacturer': 'Nanoleaf', 'model': 'NL55', - 'model_id': None, 'name': 'Nanoleaf Strip 3B32', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -15047,7 +15286,6 @@ ]), 'manufacturer': 'Netatmo', 'model': 'Netatmo Doorbell', - 'model_id': None, 'name': 'Netatmo-Doorbell-g738658', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -15341,7 +15579,6 @@ ]), 'manufacturer': 'Netatmo', 'model': 'Smart CO Alarm', - 'model_id': None, 'name': 'Smart CO Alarm', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -15502,7 +15739,6 @@ ]), 'manufacturer': 'Netatmo', 'model': 'Healthy Home Coach', - 'model_id': None, 'name': 'Healthy Home Coach', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -15805,7 +16041,6 @@ ]), 'manufacturer': 'Green Electronics LLC', 'model': 'SPK5 Pro', - 'model_id': None, 'name': 'RainMachine-00ce4a', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16228,7 +16463,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', - 'model_id': None, 'name': 'Master Bath South', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16391,7 +16625,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE SmartBridge', - 'model_id': None, 'name': 'RYSE SmartBridge', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16466,7 +16699,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', - 'model_id': None, 'name': 'RYSE SmartShade', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16633,7 +16865,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', - 'model_id': None, 'name': 'BR Left', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16796,7 +17027,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', - 'model_id': None, 'name': 'LR Left', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16959,7 +17189,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', - 'model_id': None, 'name': 'LR Right', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17122,7 +17351,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE SmartBridge', - 'model_id': None, 'name': 'RYSE SmartBridge', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17197,7 +17425,6 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', - 'model_id': None, 'name': 'RZSS', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17364,7 +17591,6 @@ ]), 'manufacturer': 'Schlage ', 'model': 'BE479CAM619', - 'model_id': None, 'name': 'SENSE ', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17484,7 +17710,6 @@ ]), 'manufacturer': 'Hunter Fan', 'model': 'SIMPLEconnect', - 'model_id': None, 'name': 'SIMPLEconnect Fan-06F674', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17563,7 +17788,7 @@ 'original_name': 'SIMPLEconnect Fan-06F674 Hunter Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_8', 'unit_of_measurement': None, @@ -17576,7 +17801,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.simpleconnect_fan_06f674_hunter_fan', 'state': 'off', @@ -17636,733 +17861,6 @@ }), ]) # --- -# name: test_snapshots[somfy_venetian_blinds] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Internal Cover', - 'model_id': None, - 'name': 'VELUX Internal Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_internal_cover_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Internal Cover Identify', - }), - 'entity_id': 'button.velux_internal_cover_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_internal_cover_venetian_blinds', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Venetian Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'current_tilt_position': 100, - 'friendly_name': 'VELUX Internal Cover Venetian Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_internal_cover_venetian_blinds', - 'state': 'closed', - }), - }), - ]), - }), - ]) -# --- -# name: test_snapshots[u_by_moen_ts3304] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Moen Incorporated', - 'model': 'TS3304', - 'model_id': None, - 'name': 'U by Moen-015F44', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '3.3.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.u_by_moen_015f44_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'U by Moen-015F44 Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_6', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'U by Moen-015F44 Identify', - }), - 'entity_id': 'button.u_by_moen_015f44_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - 'target_temp_step': 1.0, - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.u_by_moen_015f44', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'U by Moen-015F44', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_11', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_temperature': 21.7, - 'friendly_name': 'U by Moen-015F44', - 'hvac_action': , - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - 'supported_features': , - 'target_temp_step': 1.0, - 'temperature': None, - }), - 'entity_id': 'climate.u_by_moen_015f44', - 'state': 'off', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.u_by_moen_015f44_current_temperature', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'U by Moen-015F44 Current Temperature', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_11_13', - 'unit_of_measurement': , - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'temperature', - 'friendly_name': 'U by Moen-015F44 Current Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'entity_id': 'sensor.u_by_moen_015f44_current_temperature', - 'state': '21.66666', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.u_by_moen_015f44', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'U by Moen-015F44', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'U by Moen-015F44', - }), - 'entity_id': 'switch.u_by_moen_015f44', - 'state': 'off', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.u_by_moen_015f44_outlet_1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'U by Moen-015F44 Outlet 1', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'valve', - 'unique_id': '00:00:00:00:00:00_1_17', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'U by Moen-015F44 Outlet 1', - 'in_use': False, - }), - 'entity_id': 'switch.u_by_moen_015f44_outlet_1', - 'state': 'off', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.u_by_moen_015f44_outlet_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'U by Moen-015F44 Outlet 2', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'valve', - 'unique_id': '00:00:00:00:00:00_1_22', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'U by Moen-015F44 Outlet 2', - 'in_use': False, - }), - 'entity_id': 'switch.u_by_moen_015f44_outlet_2', - 'state': 'off', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.u_by_moen_015f44_outlet_3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'U by Moen-015F44 Outlet 3', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'valve', - 'unique_id': '00:00:00:00:00:00_1_27', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'U by Moen-015F44 Outlet 3', - 'in_use': False, - }), - 'entity_id': 'switch.u_by_moen_015f44_outlet_3', - 'state': 'off', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.u_by_moen_015f44_outlet_4', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'U by Moen-015F44 Outlet 4', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'valve', - 'unique_id': '00:00:00:00:00:00_1_32', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'U by Moen-015F44 Outlet 4', - 'in_use': False, - }), - 'entity_id': 'switch.u_by_moen_015f44_outlet_4', - 'state': 'off', - }), - }), - ]), - }), - ]) -# --- -# name: test_snapshots[velux_active_netatmo_co2] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Sensor', - 'model_id': None, - 'name': 'VELUX Sensor', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '16.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_sensor_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Sensor Identify', - }), - 'entity_id': 'button.velux_sensor_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Carbon Dioxide sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_14', - 'unit_of_measurement': 'ppm', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', - 'state': '1124.0', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_humidity_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Humidity sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_11', - 'unit_of_measurement': '%', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'humidity', - 'friendly_name': 'VELUX Sensor Humidity sensor', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'entity_id': 'sensor.velux_sensor_humidity_sensor', - 'state': '69.0', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_temperature_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Temperature sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_8', - 'unit_of_measurement': , - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'temperature', - 'friendly_name': 'VELUX Sensor Temperature sensor', - 'state_class': , - 'unit_of_measurement': , - }), - 'entity_id': 'sensor.velux_sensor_temperature_sensor', - 'state': '23.9', - }), - }), - ]), - }), - ]) -# --- # name: test_snapshots[velux_gateway] list([ dict({ @@ -18388,7 +17886,6 @@ ]), 'manufacturer': 'VELUX', 'model': 'VELUX Gateway', - 'model_id': None, 'name': 'VELUX Gateway', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -18463,7 +17960,6 @@ ]), 'manufacturer': 'VELUX', 'model': 'VELUX Sensor', - 'model_id': None, 'name': 'VELUX Sensor', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -18673,7 +18169,6 @@ ]), 'manufacturer': 'VELUX', 'model': 'VELUX Window', - 'model_id': None, 'name': 'VELUX Window', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -18770,249 +18265,6 @@ }), ]) # --- -# name: test_snapshots[velux_window] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Window', - 'model_id': None, - 'name': 'VELUX Window', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_window_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Window Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Window Identify', - }), - 'entity_id': 'button.velux_window_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_window_roof_window', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Window Roof Window', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'device_class': 'window', - 'friendly_name': 'VELUX Window Roof Window', - 'supported_features': , - }), - 'entity_id': 'cover.velux_window_roof_window', - 'state': 'closed', - }), - }), - ]), - }), - ]) -# --- -# name: test_snapshots[velux_window_cover] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX External Cover', - 'model_id': None, - 'name': 'VELUX External Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '15.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_external_cover_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX External Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX External Cover Identify', - }), - 'entity_id': 'button.velux_external_cover_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_external_cover_awning_blinds', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX External Cover Awning Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'friendly_name': 'VELUX External Cover Awning Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_external_cover_awning_blinds', - 'state': 'closed', - }), - }), - ]), - }), - ]) -# --- # name: test_snapshots[vocolinc_flowerbud] list([ dict({ @@ -19038,7 +18290,6 @@ ]), 'manufacturer': 'VOCOlinc', 'model': 'Flowerbud', - 'model_id': None, 'name': 'VOCOlinc-Flowerbud-0d324b', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -19344,7 +18595,6 @@ ]), 'manufacturer': 'VOCOlinc', 'model': 'VP3', - 'model_id': None, 'name': 'VOCOlinc-VP3-123456', 'name_by_user': None, 'primary_config_entry': 'TestData', diff --git a/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py b/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py index d6dc0f70015..aea53e74d46 100644 --- a/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py +++ b/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py @@ -29,22 +29,14 @@ async def test_fan_add_feature_at_runtime( fan_state = hass.states.get("fan.living_room_fan") assert ( fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED - | FanEntityFeature.DIRECTION - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON + is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION ) fan = entity_registry.async_get("fan.ceiling_fan") assert fan.unique_id == "00:00:00:00:00:00_766313939_8" fan_state = hass.states.get("fan.ceiling_fan") - assert ( - fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED # Now change the config to add oscillation accessories = await setup_accessories_from_file( @@ -58,16 +50,9 @@ async def test_fan_add_feature_at_runtime( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON ) fan_state = hass.states.get("fan.ceiling_fan") - assert ( - fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED async def test_fan_remove_feature_at_runtime( @@ -90,20 +75,13 @@ async def test_fan_remove_feature_at_runtime( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON ) fan = entity_registry.async_get("fan.ceiling_fan") assert fan.unique_id == "00:00:00:00:00:00_766313939_8" fan_state = hass.states.get("fan.ceiling_fan") - assert ( - fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED # Now change the config to add oscillation accessories = await setup_accessories_from_file( @@ -114,18 +92,10 @@ async def test_fan_remove_feature_at_runtime( fan_state = hass.states.get("fan.living_room_fan") assert ( fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED - | FanEntityFeature.DIRECTION - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON + is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION ) fan_state = hass.states.get("fan.ceiling_fan") - assert ( - fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED async def test_bridge_with_two_fans_one_removed( @@ -149,20 +119,13 @@ async def test_bridge_with_two_fans_one_removed( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON ) fan = entity_registry.async_get("fan.ceiling_fan") assert fan.unique_id == "00:00:00:00:00:00_766313939_8" fan_state = hass.states.get("fan.ceiling_fan") - assert ( - fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED # Now change the config to remove one of the fans accessories = await setup_accessories_from_file( @@ -178,8 +141,6 @@ async def test_bridge_with_two_fans_one_removed( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON ) # The second fan should have been removed assert not hass.states.get("fan.ceiling_fan") diff --git a/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py b/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py index a16cd052c87..9c6e5a6687a 100644 --- a/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py +++ b/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py @@ -5,7 +5,7 @@ from unittest import mock from aiohomekit.exceptions import AccessoryDisconnectedError, EncryptionError from aiohomekit.model import CharacteristicsTypes, ServicesTypes -from aiohomekit.testing import FakeController, FakePairing +from aiohomekit.testing import FakePairing import pytest from homeassistant.components.homekit_controller.connection import ( @@ -48,14 +48,7 @@ async def test_recover_from_failure(hass: HomeAssistant, failure_cls) -> None: # Test that entity remains in the same state if there is a network error next_update = dt_util.utcnow() + timedelta(seconds=60) - with ( - mock.patch.object(FakePairing, "get_characteristics") as get_char, - mock.patch.object( - FakeController, - "async_reachable", - return_value=False, - ), - ): + with mock.patch.object(FakePairing, "get_characteristics") as get_char: get_char.side_effect = failure_cls("Disconnected") # Test that a poll triggers unavailable diff --git a/tests/components/homekit_controller/test_alarm_control_panel.py b/tests/components/homekit_controller/test_alarm_control_panel.py index 1e9f023fc46..a8852aac4f7 100644 --- a/tests/components/homekit_controller/test_alarm_control_panel.py +++ b/tests/components/homekit_controller/test_alarm_control_panel.py @@ -1,18 +1,15 @@ """Basic checks for HomeKitalarm_control_panel.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component -def create_security_system_service(accessory: Accessory) -> None: +def create_security_system_service(accessory): """Define a security-system characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.SECURITY_SYSTEM) @@ -30,13 +27,9 @@ def create_security_system_service(accessory: Accessory) -> None: targ_state.value = 50 -async def test_switch_change_alarm_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_change_alarm_state(hass: HomeAssistant) -> None: """Test that we can turn a HomeKit alarm on and off again.""" - helper = await setup_test_component( - hass, get_next_aid(), create_security_system_service - ) + helper = await setup_test_component(hass, create_security_system_service) await hass.services.async_call( "alarm_control_panel", @@ -91,13 +84,9 @@ async def test_switch_change_alarm_state( ) -async def test_switch_read_alarm_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_read_alarm_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit alarm accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_security_system_service - ) + helper = await setup_test_component(hass, create_security_system_service) await helper.async_update( ServicesTypes.SECURITY_SYSTEM, @@ -137,9 +126,7 @@ async def test_switch_read_alarm_state( async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a alarm_control_panel unique id.""" aid = get_next_aid() @@ -148,7 +135,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, aid, create_security_system_service) + await setup_test_component(hass, create_security_system_service) assert ( entity_registry.async_get(alarm_control_panel_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_binary_sensor.py b/tests/components/homekit_controller/test_binary_sensor.py index a46d5eca2f5..3d4486bb38d 100644 --- a/tests/components/homekit_controller/test_binary_sensor.py +++ b/tests/components/homekit_controller/test_binary_sensor.py @@ -1,8 +1,5 @@ """Basic checks for HomeKit motion sensors and contact sensors.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -10,10 +7,10 @@ from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component -def create_motion_sensor_service(accessory: Accessory) -> None: +def create_motion_sensor_service(accessory): """Define motion characteristics as per page 225 of HAP spec.""" service = accessory.add_service(ServicesTypes.MOTION_SENSOR) @@ -21,13 +18,9 @@ def create_motion_sensor_service(accessory: Accessory) -> None: cur_state.value = 0 -async def test_motion_sensor_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_motion_sensor_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit motion sensor accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_motion_sensor_service - ) + helper = await setup_test_component(hass, create_motion_sensor_service) await helper.async_update( ServicesTypes.MOTION_SENSOR, {CharacteristicsTypes.MOTION_DETECTED: False} @@ -44,7 +37,7 @@ async def test_motion_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.MOTION -def create_contact_sensor_service(accessory: Accessory) -> None: +def create_contact_sensor_service(accessory): """Define contact characteristics.""" service = accessory.add_service(ServicesTypes.CONTACT_SENSOR) @@ -52,13 +45,9 @@ def create_contact_sensor_service(accessory: Accessory) -> None: cur_state.value = 0 -async def test_contact_sensor_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_contact_sensor_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit contact accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_contact_sensor_service - ) + helper = await setup_test_component(hass, create_contact_sensor_service) await helper.async_update( ServicesTypes.CONTACT_SENSOR, {CharacteristicsTypes.CONTACT_STATE: 0} @@ -75,7 +64,7 @@ async def test_contact_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.OPENING -def create_smoke_sensor_service(accessory: Accessory) -> None: +def create_smoke_sensor_service(accessory): """Define smoke sensor characteristics.""" service = accessory.add_service(ServicesTypes.SMOKE_SENSOR) @@ -83,13 +72,9 @@ def create_smoke_sensor_service(accessory: Accessory) -> None: cur_state.value = 0 -async def test_smoke_sensor_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_smoke_sensor_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit contact accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_smoke_sensor_service - ) + helper = await setup_test_component(hass, create_smoke_sensor_service) await helper.async_update( ServicesTypes.SMOKE_SENSOR, {CharacteristicsTypes.SMOKE_DETECTED: 0} @@ -106,7 +91,7 @@ async def test_smoke_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.SMOKE -def create_carbon_monoxide_sensor_service(accessory: Accessory) -> None: +def create_carbon_monoxide_sensor_service(accessory): """Define carbon monoxide sensor characteristics.""" service = accessory.add_service(ServicesTypes.CARBON_MONOXIDE_SENSOR) @@ -114,13 +99,9 @@ def create_carbon_monoxide_sensor_service(accessory: Accessory) -> None: cur_state.value = 0 -async def test_carbon_monoxide_sensor_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_carbon_monoxide_sensor_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit contact accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_carbon_monoxide_sensor_service - ) + helper = await setup_test_component(hass, create_carbon_monoxide_sensor_service) await helper.async_update( ServicesTypes.CARBON_MONOXIDE_SENSOR, @@ -139,7 +120,7 @@ async def test_carbon_monoxide_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.CO -def create_occupancy_sensor_service(accessory: Accessory) -> None: +def create_occupancy_sensor_service(accessory): """Define occupancy characteristics.""" service = accessory.add_service(ServicesTypes.OCCUPANCY_SENSOR) @@ -147,13 +128,9 @@ def create_occupancy_sensor_service(accessory: Accessory) -> None: cur_state.value = 0 -async def test_occupancy_sensor_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_occupancy_sensor_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit occupancy sensor accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_occupancy_sensor_service - ) + helper = await setup_test_component(hass, create_occupancy_sensor_service) await helper.async_update( ServicesTypes.OCCUPANCY_SENSOR, {CharacteristicsTypes.OCCUPANCY_DETECTED: False} @@ -170,7 +147,7 @@ async def test_occupancy_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.OCCUPANCY -def create_leak_sensor_service(accessory: Accessory) -> None: +def create_leak_sensor_service(accessory): """Define leak characteristics.""" service = accessory.add_service(ServicesTypes.LEAK_SENSOR) @@ -178,13 +155,9 @@ def create_leak_sensor_service(accessory: Accessory) -> None: cur_state.value = 0 -async def test_leak_sensor_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_leak_sensor_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit leak sensor accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_leak_sensor_service - ) + helper = await setup_test_component(hass, create_leak_sensor_service) await helper.async_update( ServicesTypes.LEAK_SENSOR, {CharacteristicsTypes.LEAK_DETECTED: 0} @@ -202,9 +175,7 @@ async def test_leak_sensor_read_state( async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a binary_sensor unique id.""" aid = get_next_aid() @@ -213,7 +184,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, aid, create_leak_sensor_service) + await setup_test_component(hass, create_leak_sensor_service) assert ( entity_registry.async_get(binary_sensor_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_button.py b/tests/components/homekit_controller/test_button.py index 18391e00df3..9f935569333 100644 --- a/tests/components/homekit_controller/test_button.py +++ b/tests/components/homekit_controller/test_button.py @@ -1,18 +1,15 @@ """Basic checks for HomeKit button.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import Service, ServicesTypes +from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import Helper, setup_test_component +from .common import Helper, get_next_aid, setup_test_component -def create_switch_with_setup_button(accessory: Accessory) -> Service: +def create_switch_with_setup_button(accessory): """Define setup button characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) @@ -27,7 +24,7 @@ def create_switch_with_setup_button(accessory: Accessory) -> Service: return service -def create_switch_with_ecobee_clear_hold_button(accessory: Accessory) -> Service: +def create_switch_with_ecobee_clear_hold_button(accessory): """Define setup button characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) @@ -42,13 +39,9 @@ def create_switch_with_ecobee_clear_hold_button(accessory: Accessory) -> Service return service -async def test_press_button( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_press_button(hass: HomeAssistant) -> None: """Test a switch service that has a button characteristic is correctly handled.""" - helper = await setup_test_component( - hass, get_next_aid(), create_switch_with_setup_button - ) + helper = await setup_test_component(hass, create_switch_with_setup_button) # Helper will be for the primary entity, which is the outlet. Make a helper for the button. button = Helper( @@ -73,12 +66,10 @@ async def test_press_button( ) -async def test_ecobee_clear_hold_press_button( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_ecobee_clear_hold_press_button(hass: HomeAssistant) -> None: """Test ecobee clear hold button characteristic is correctly handled.""" helper = await setup_test_component( - hass, get_next_aid(), create_switch_with_ecobee_clear_hold_button + hass, create_switch_with_ecobee_clear_hold_button ) # Helper will be for the primary entity, which is the outlet. Make a helper for the button. @@ -105,9 +96,7 @@ async def test_ecobee_clear_hold_press_button( async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a button unique id.""" aid = get_next_aid() @@ -116,7 +105,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-0001-aid:{aid}-sid:1-cid:2", ) - await setup_test_component(hass, aid, create_switch_with_ecobee_clear_hold_button) + await setup_test_component(hass, create_switch_with_ecobee_clear_hold_button) assert ( entity_registry.async_get(button_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_1_2" diff --git a/tests/components/homekit_controller/test_camera.py b/tests/components/homekit_controller/test_camera.py index 1c57d579dc8..de64ee95d74 100644 --- a/tests/components/homekit_controller/test_camera.py +++ b/tests/components/homekit_controller/test_camera.py @@ -1,9 +1,7 @@ """Basic checks for HomeKit cameras.""" import base64 -from collections.abc import Callable -from aiohomekit.model import Accessory from aiohomekit.model.services import ServicesTypes from aiohomekit.testing import FAKE_CAMERA_IMAGE @@ -11,18 +9,16 @@ from homeassistant.components import camera from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component -def create_camera(accessory: Accessory) -> None: +def create_camera(accessory): """Define camera characteristics.""" accessory.add_service(ServicesTypes.CAMERA_RTP_STREAM_MANAGEMENT) async def test_migrate_unique_ids( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test migrating entity unique ids.""" aid = get_next_aid() @@ -31,23 +27,23 @@ async def test_migrate_unique_ids( "homekit_controller", f"homekit-0001-aid:{aid}", ) - await setup_test_component(hass, aid, create_camera) + await setup_test_component(hass, create_camera) assert ( entity_registry.async_get(camera.entity_id).unique_id == f"00:00:00:00:00:00_{aid}" ) -async def test_read_state(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: +async def test_read_state(hass: HomeAssistant) -> None: """Test reading the state of a HomeKit camera.""" - helper = await setup_test_component(hass, get_next_aid(), create_camera) + helper = await setup_test_component(hass, create_camera) state = await helper.poll_and_get_state() assert state.state == "idle" -async def test_get_image(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: +async def test_get_image(hass: HomeAssistant) -> None: """Test getting a JPEG from a camera.""" - helper = await setup_test_component(hass, get_next_aid(), create_camera) + helper = await setup_test_component(hass, create_camera) image = await camera.async_get_image(hass, helper.entity_id) assert image.content == base64.b64decode(FAKE_CAMERA_IMAGE) diff --git a/tests/components/homekit_controller/test_climate.py b/tests/components/homekit_controller/test_climate.py index 62c73af9977..5470c669700 100644 --- a/tests/components/homekit_controller/test_climate.py +++ b/tests/components/homekit_controller/test_climate.py @@ -1,12 +1,8 @@ """Basic checks for HomeKitclimate.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import ( ActivationStateValues, CharacteristicsTypes, - CurrentFanStateValues, CurrentHeaterCoolerStateValues, SwingModeValues, TargetHeaterCoolerStateValues, @@ -14,7 +10,7 @@ from aiohomekit.model.characteristics import ( from aiohomekit.model.services import ServicesTypes from homeassistant.components.climate import ( - DOMAIN as CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, @@ -25,12 +21,12 @@ from homeassistant.components.climate import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component # Test thermostat devices -def create_thermostat_service(accessory: Accessory) -> None: +def create_thermostat_service(accessory): """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) @@ -67,11 +63,8 @@ def create_thermostat_service(accessory: Accessory) -> None: char = service.add_char(CharacteristicsTypes.RELATIVE_HUMIDITY_CURRENT) char.value = 0 - char = service.add_char(CharacteristicsTypes.FAN_STATE_CURRENT) - char.value = 0 - -def create_thermostat_service_min_max(accessory: Accessory) -> None: +def create_thermostat_service_min_max(accessory): """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) char = service.add_char(CharacteristicsTypes.HEATING_COOLING_TARGET) @@ -80,18 +73,14 @@ def create_thermostat_service_min_max(accessory: Accessory) -> None: char.maxValue = 1 -async def test_climate_respect_supported_op_modes_1( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_climate_respect_supported_op_modes_1(hass: HomeAssistant) -> None: """Test that climate respects minValue/maxValue hints.""" - helper = await setup_test_component( - hass, get_next_aid(), create_thermostat_service_min_max - ) + helper = await setup_test_component(hass, create_thermostat_service_min_max) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["off", "heat"] -def create_thermostat_service_valid_vals(accessory: Accessory) -> None: +def create_thermostat_service_valid_vals(accessory): """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) char = service.add_char(CharacteristicsTypes.HEATING_COOLING_TARGET) @@ -99,25 +88,19 @@ def create_thermostat_service_valid_vals(accessory: Accessory) -> None: char.valid_values = [0, 1, 2] -async def test_climate_respect_supported_op_modes_2( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_climate_respect_supported_op_modes_2(hass: HomeAssistant) -> None: """Test that climate respects validValue hints.""" - helper = await setup_test_component( - hass, get_next_aid(), create_thermostat_service_valid_vals - ) + helper = await setup_test_component(hass, create_thermostat_service_valid_vals) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["off", "heat", "cool"] -async def test_climate_change_thermostat_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_climate_change_thermostat_state(hass: HomeAssistant) -> None: """Test that we can turn a HomeKit thermostat on and off again.""" - helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) + helper = await setup_test_component(hass, create_thermostat_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -130,7 +113,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -143,7 +126,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -156,7 +139,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.OFF}, blocking=True, @@ -169,7 +152,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "on"}, blocking=True, @@ -182,7 +165,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "auto"}, blocking=True, @@ -195,14 +178,12 @@ async def test_climate_change_thermostat_state( ) -async def test_climate_check_min_max_values_per_mode( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_climate_check_min_max_values_per_mode(hass: HomeAssistant) -> None: """Test that we we get the appropriate min/max values for each mode.""" - helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) + helper = await setup_test_component(hass, create_thermostat_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -212,7 +193,7 @@ async def test_climate_check_min_max_values_per_mode( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -222,7 +203,7 @@ async def test_climate_check_min_max_values_per_mode( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -232,14 +213,12 @@ async def test_climate_check_min_max_values_per_mode( assert climate_state.attributes["max_temp"] == 40 -async def test_climate_change_thermostat_temperature( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_climate_change_thermostat_temperature(hass: HomeAssistant) -> None: """Test that we can turn a HomeKit thermostat on and off again.""" - helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) + helper = await setup_test_component(hass, create_thermostat_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 21}, blocking=True, @@ -252,7 +231,7 @@ async def test_climate_change_thermostat_temperature( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 25}, blocking=True, @@ -265,21 +244,19 @@ async def test_climate_change_thermostat_temperature( ) -async def test_climate_change_thermostat_temperature_range( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_climate_change_thermostat_temperature_range(hass: HomeAssistant) -> None: """Test that we can set separate heat and cool setpoints in heat_cool mode.""" - helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) + helper = await setup_test_component(hass, create_thermostat_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -301,20 +278,20 @@ async def test_climate_change_thermostat_temperature_range( async def test_climate_change_thermostat_temperature_range_iphone( - hass: HomeAssistant, get_next_aid: Callable[[], int] + hass: HomeAssistant, ) -> None: """Test that we can set all three set points at once (iPhone heat_cool mode support).""" - helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) + helper = await setup_test_component(hass, create_thermostat_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -336,20 +313,20 @@ async def test_climate_change_thermostat_temperature_range_iphone( async def test_climate_cannot_set_thermostat_temp_range_in_wrong_mode( - hass: HomeAssistant, get_next_aid: Callable[[], int] + hass: HomeAssistant, ) -> None: """Test that we cannot set range values when not in heat_cool mode.""" - helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) + helper = await setup_test_component(hass, create_thermostat_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -369,7 +346,7 @@ async def test_climate_cannot_set_thermostat_temp_range_in_wrong_mode( ) -def create_thermostat_single_set_point_auto(accessory: Accessory) -> None: +def create_thermostat_single_set_point_auto(accessory): """Define thermostat characteristics with a single set point in auto.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) @@ -395,15 +372,13 @@ def create_thermostat_single_set_point_auto(accessory: Accessory) -> None: async def test_climate_check_min_max_values_per_mode_sspa_device( - hass: HomeAssistant, get_next_aid: Callable[[], int] + hass: HomeAssistant, ) -> None: """Test appropriate min/max values for each mode on sspa devices.""" - helper = await setup_test_component( - hass, get_next_aid(), create_thermostat_single_set_point_auto - ) + helper = await setup_test_component(hass, create_thermostat_single_set_point_auto) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -413,7 +388,7 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -423,7 +398,7 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -433,23 +408,19 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( assert climate_state.attributes["max_temp"] == 35 -async def test_climate_set_thermostat_temp_on_sspa_device( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_climate_set_thermostat_temp_on_sspa_device(hass: HomeAssistant) -> None: """Test setting temperature in different modes on device with single set point in auto.""" - helper = await setup_test_component( - hass, get_next_aid(), create_thermostat_single_set_point_auto - ) + helper = await setup_test_component(hass, create_thermostat_single_set_point_auto) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 21}, blocking=True, @@ -462,7 +433,7 @@ async def test_climate_set_thermostat_temp_on_sspa_device( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -475,7 +446,7 @@ async def test_climate_set_thermostat_temp_on_sspa_device( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -491,16 +462,12 @@ async def test_climate_set_thermostat_temp_on_sspa_device( ) -async def test_climate_set_mode_via_temp( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_climate_set_mode_via_temp(hass: HomeAssistant) -> None: """Test setting temperature and mode at same tims.""" - helper = await setup_test_component( - hass, get_next_aid(), create_thermostat_single_set_point_auto - ) + helper = await setup_test_component(hass, create_thermostat_single_set_point_auto) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -518,7 +485,7 @@ async def test_climate_set_mode_via_temp( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -536,14 +503,12 @@ async def test_climate_set_mode_via_temp( ) -async def test_climate_change_thermostat_humidity( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_climate_change_thermostat_humidity(hass: HomeAssistant) -> None: """Test that we can turn a HomeKit thermostat on and off again.""" - helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) + helper = await setup_test_component(hass, create_thermostat_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {"entity_id": "climate.testdevice", "humidity": 50}, blocking=True, @@ -556,7 +521,7 @@ async def test_climate_change_thermostat_humidity( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HUMIDITY, {"entity_id": "climate.testdevice", "humidity": 45}, blocking=True, @@ -569,11 +534,9 @@ async def test_climate_change_thermostat_humidity( ) -async def test_climate_read_thermostat_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_climate_read_thermostat_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit thermostat accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) + helper = await setup_test_component(hass, create_thermostat_service) # Simulate that heating is on await helper.async_update( @@ -628,11 +591,9 @@ async def test_climate_read_thermostat_state( assert state.state == HVACMode.HEAT_COOL -async def test_hvac_mode_vs_hvac_action( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_hvac_mode_vs_hvac_action(hass: HomeAssistant) -> None: """Check that we haven't conflated hvac_mode and hvac_action.""" - helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) + helper = await setup_test_component(hass, create_thermostat_service) # Simulate that current temperature is above target temp # Heating might be on, but hvac_action currently 'off' @@ -652,18 +613,6 @@ async def test_hvac_mode_vs_hvac_action( assert state.state == "heat" assert state.attributes["hvac_action"] == "idle" - # Simulate the fan running while the heat/cool is idle - await helper.async_update( - ServicesTypes.THERMOSTAT, - { - CharacteristicsTypes.FAN_STATE_CURRENT: CurrentFanStateValues.ACTIVE, - }, - ) - - state = await helper.poll_and_get_state() - assert state.state == "heat" - assert state.attributes["hvac_action"] == "fan" - # Simulate that current temperature is below target temp # Heating might be on and hvac_action currently 'heat' await helper.async_update( @@ -679,11 +628,9 @@ async def test_hvac_mode_vs_hvac_action( assert state.attributes["hvac_action"] == "heating" -async def test_hvac_mode_vs_hvac_action_current_mode_wrong( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_hvac_mode_vs_hvac_action_current_mode_wrong(hass: HomeAssistant) -> None: """Check that we cope with buggy HEATING_COOLING_CURRENT.""" - helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) + helper = await setup_test_component(hass, create_thermostat_service) await helper.async_update( ServicesTypes.THERMOSTAT, @@ -702,7 +649,7 @@ async def test_hvac_mode_vs_hvac_action_current_mode_wrong( assert state.attributes["hvac_action"] == "idle" -def create_heater_cooler_service(accessory: Accessory) -> None: +def create_heater_cooler_service(accessory): """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.HEATER_COOLER) @@ -736,7 +683,7 @@ def create_heater_cooler_service(accessory: Accessory) -> None: # Test heater-cooler devices -def create_heater_cooler_service_min_max(accessory: Accessory) -> None: +def create_heater_cooler_service_min_max(accessory): """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.HEATER_COOLER) char = service.add_char(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE) @@ -745,18 +692,14 @@ def create_heater_cooler_service_min_max(accessory: Accessory) -> None: char.maxValue = 2 -async def test_heater_cooler_respect_supported_op_modes_1( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_heater_cooler_respect_supported_op_modes_1(hass: HomeAssistant) -> None: """Test that climate respects minValue/maxValue hints.""" - helper = await setup_test_component( - hass, get_next_aid(), create_heater_cooler_service_min_max - ) + helper = await setup_test_component(hass, create_heater_cooler_service_min_max) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["heat", "cool", "off"] -def create_theater_cooler_service_valid_vals(accessory: Accessory) -> None: +def create_theater_cooler_service_valid_vals(accessory): """Define heater-cooler characteristics.""" service = accessory.add_service(ServicesTypes.HEATER_COOLER) char = service.add_char(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE) @@ -764,27 +707,19 @@ def create_theater_cooler_service_valid_vals(accessory: Accessory) -> None: char.valid_values = [1, 2] -async def test_heater_cooler_respect_supported_op_modes_2( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_heater_cooler_respect_supported_op_modes_2(hass: HomeAssistant) -> None: """Test that climate respects validValue hints.""" - helper = await setup_test_component( - hass, get_next_aid(), create_theater_cooler_service_valid_vals - ) + helper = await setup_test_component(hass, create_theater_cooler_service_valid_vals) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["heat", "cool", "off"] -async def test_heater_cooler_change_thermostat_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_heater_cooler_change_thermostat_state(hass: HomeAssistant) -> None: """Test that we can change the operational mode.""" - helper = await setup_test_component( - hass, get_next_aid(), create_heater_cooler_service - ) + helper = await setup_test_component(hass, create_heater_cooler_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -797,7 +732,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -810,7 +745,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -823,7 +758,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.OFF}, blocking=True, @@ -836,19 +771,15 @@ async def test_heater_cooler_change_thermostat_state( ) -async def test_can_turn_on_after_off( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_can_turn_on_after_off(hass: HomeAssistant) -> None: """Test that we always force device from inactive to active when setting mode. This is a regression test for #81863. """ - helper = await setup_test_component( - hass, get_next_aid(), create_heater_cooler_service - ) + helper = await setup_test_component(hass, create_heater_cooler_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.OFF}, blocking=True, @@ -861,7 +792,7 @@ async def test_can_turn_on_after_off( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -875,22 +806,18 @@ async def test_can_turn_on_after_off( ) -async def test_heater_cooler_change_thermostat_temperature( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_heater_cooler_change_thermostat_temperature(hass: HomeAssistant) -> None: """Test that we can change the target temperature.""" - helper = await setup_test_component( - hass, get_next_aid(), create_heater_cooler_service - ) + helper = await setup_test_component(hass, create_heater_cooler_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 20}, blocking=True, @@ -903,13 +830,13 @@ async def test_heater_cooler_change_thermostat_temperature( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 26}, blocking=True, @@ -922,22 +849,18 @@ async def test_heater_cooler_change_thermostat_temperature( ) -async def test_heater_cooler_change_fan_speed( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_heater_cooler_change_fan_speed(hass: HomeAssistant) -> None: """Test that we can change the target fan speed.""" - helper = await setup_test_component( - hass, get_next_aid(), create_heater_cooler_service - ) + helper = await setup_test_component(hass, create_heater_cooler_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "low"}, blocking=True, @@ -949,7 +872,7 @@ async def test_heater_cooler_change_fan_speed( }, ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "medium"}, blocking=True, @@ -961,7 +884,7 @@ async def test_heater_cooler_change_fan_speed( }, ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "high"}, blocking=True, @@ -974,13 +897,9 @@ async def test_heater_cooler_change_fan_speed( ) -async def test_heater_cooler_read_fan_speed( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_heater_cooler_read_fan_speed(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit thermostat accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_heater_cooler_service - ) + helper = await setup_test_component(hass, create_heater_cooler_service) # Simulate that fan speed is off await helper.async_update( @@ -1027,13 +946,9 @@ async def test_heater_cooler_read_fan_speed( assert state.attributes["fan_mode"] == "high" -async def test_heater_cooler_read_thermostat_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_heater_cooler_read_thermostat_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit thermostat accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_heater_cooler_service - ) + helper = await setup_test_component(hass, create_heater_cooler_service) # Simulate that heating is on await helper.async_update( @@ -1085,13 +1000,9 @@ async def test_heater_cooler_read_thermostat_state( assert state.state == HVACMode.HEAT_COOL -async def test_heater_cooler_hvac_mode_vs_hvac_action( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_heater_cooler_hvac_mode_vs_hvac_action(hass: HomeAssistant) -> None: """Check that we haven't conflated hvac_mode and hvac_action.""" - helper = await setup_test_component( - hass, get_next_aid(), create_heater_cooler_service - ) + helper = await setup_test_component(hass, create_heater_cooler_service) # Simulate that current temperature is above target temp # Heating might be on, but hvac_action currently 'off' @@ -1128,16 +1039,12 @@ async def test_heater_cooler_hvac_mode_vs_hvac_action( assert state.attributes["hvac_action"] == "heating" -async def test_heater_cooler_change_swing_mode( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_heater_cooler_change_swing_mode(hass: HomeAssistant) -> None: """Test that we can change the swing mode.""" - helper = await setup_test_component( - hass, get_next_aid(), create_heater_cooler_service - ) + helper = await setup_test_component(hass, create_heater_cooler_service) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_SWING_MODE, {"entity_id": "climate.testdevice", "swing_mode": "vertical"}, blocking=True, @@ -1150,7 +1057,7 @@ async def test_heater_cooler_change_swing_mode( ) await hass.services.async_call( - CLIMATE_DOMAIN, + DOMAIN, SERVICE_SET_SWING_MODE, {"entity_id": "climate.testdevice", "swing_mode": "off"}, blocking=True, @@ -1163,13 +1070,9 @@ async def test_heater_cooler_change_swing_mode( ) -async def test_heater_cooler_turn_off( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_heater_cooler_turn_off(hass: HomeAssistant) -> None: """Test that both hvac_action and hvac_mode return "off" when turned off.""" - helper = await setup_test_component( - hass, get_next_aid(), create_heater_cooler_service - ) + helper = await setup_test_component(hass, create_heater_cooler_service) # Simulate that the device is turned off but CURRENT_HEATER_COOLER_STATE still returns HEATING/COOLING await helper.async_update( @@ -1187,9 +1090,7 @@ async def test_heater_cooler_turn_off( async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a switch unique id.""" aid = get_next_aid() @@ -1198,7 +1099,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, aid, create_heater_cooler_service) + await setup_test_component(hass, create_heater_cooler_service) assert ( entity_registry.async_get(climate_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_8" diff --git a/tests/components/homekit_controller/test_config_flow.py b/tests/components/homekit_controller/test_config_flow.py index 4fb0a80cd26..a336758f4ac 100644 --- a/tests/components/homekit_controller/test_config_flow.py +++ b/tests/components/homekit_controller/test_config_flow.py @@ -2,7 +2,6 @@ import asyncio from ipaddress import ip_address -from typing import Any import unittest.mock from unittest.mock import AsyncMock, patch @@ -161,9 +160,7 @@ def test_valid_pairing_codes(pairing_code) -> None: assert len(valid_pin[2]) == 3 -def get_flow_context( - hass: HomeAssistant, result: config_flow.ConfigFlowResult -) -> dict[str, Any]: +def get_flow_context(hass, result): """Get the flow context from the result of async_init or async_configure.""" flow = next( flow @@ -214,13 +211,13 @@ def setup_mock_accessory(controller): bridge = Accessories() accessory = Accessory.create_with_info( - 1, name="Koogeek-LS1-20833F", manufacturer="Koogeek", model="LS1", serial_number="12345", firmware_revision="1.1", ) + accessory.aid = 1 service = accessory.add_service(ServicesTypes.LIGHTBULB) on_char = service.add_char(CharacteristicsTypes.ON) @@ -799,6 +796,7 @@ async def test_pair_form_errors_on_finish( "title_placeholders": {"name": "TestDevice", "category": "Outlet"}, "unique_id": "00:00:00:00:00:00", "source": config_entries.SOURCE_ZEROCONF, + "pairing": True, } @@ -849,6 +847,7 @@ async def test_pair_unknown_errors(hass: HomeAssistant, controller) -> None: "title_placeholders": {"name": "TestDevice", "category": "Outlet"}, "unique_id": "00:00:00:00:00:00", "source": config_entries.SOURCE_ZEROCONF, + "pairing": True, } @@ -957,6 +956,54 @@ async def test_user_no_unpaired_devices(hass: HomeAssistant, controller) -> None assert result["reason"] == "no_devices" +async def test_unignore_works(hass: HomeAssistant, controller) -> None: + """Test rediscovery triggered disovers work.""" + device = setup_mock_accessory(controller) + + # Device is unignored + result = await hass.config_entries.flow.async_init( + "homekit_controller", + context={"source": config_entries.SOURCE_UNIGNORE}, + data={"unique_id": device.description.id}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "pair" + assert get_flow_context(hass, result) == { + "title_placeholders": {"name": "TestDevice", "category": "Other"}, + "unique_id": "00:00:00:00:00:00", + "source": config_entries.SOURCE_UNIGNORE, + } + + # User initiates pairing by clicking on 'configure' - device enters pairing mode and displays code + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "pair" + + # Pairing finalized + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"pairing_code": "111-22-333"} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Koogeek-LS1-20833F" + + +async def test_unignore_ignores_missing_devices( + hass: HomeAssistant, controller +) -> None: + """Test rediscovery triggered disovers handle devices that have gone away.""" + setup_mock_accessory(controller) + + # Device is unignored + result = await hass.config_entries.flow.async_init( + "homekit_controller", + context={"source": config_entries.SOURCE_UNIGNORE}, + data={"unique_id": "00:00:00:00:00:01"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "accessory_not_found_error" + + async def test_discovery_dismiss_existing_flow_on_paired( hass: HomeAssistant, controller ) -> None: diff --git a/tests/components/homekit_controller/test_connection.py b/tests/components/homekit_controller/test_connection.py index 7ea791f9a1e..0a77509d675 100644 --- a/tests/components/homekit_controller/test_connection.py +++ b/tests/components/homekit_controller/test_connection.py @@ -1,37 +1,22 @@ """Tests for HKDevice.""" -from collections.abc import Callable import dataclasses -from unittest import mock from aiohomekit.controller import TransportType -from aiohomekit.model import Accessory -from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import Service, ServicesTypes -from aiohomekit.testing import FakeController import pytest from homeassistant.components.homekit_controller.const import ( - DEBOUNCE_COOLDOWN, DOMAIN, IDENTIFIER_ACCESSORY_ID, IDENTIFIER_LEGACY_ACCESSORY_ID, IDENTIFIER_LEGACY_SERIAL_NUMBER, ) from homeassistant.components.thread import async_add_dataset, dataset_store -from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.entity_component import async_update_entity -from .common import ( - setup_accessories_from_file, - setup_platform, - setup_test_accessories, - setup_test_component, - time_changed, -) +from .common import setup_accessories_from_file, setup_platform, setup_test_accessories from tests.common import MockConfigEntry @@ -346,96 +331,3 @@ async def test_thread_provision_migration_failed(hass: HomeAssistant) -> None: ) assert config_entry.data["Connection"] == "BLE" - - -async def test_poll_firmware_version_only_all_watchable_accessory_mode( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: - """Test that we only poll firmware if available and all chars are watchable accessory mode.""" - - def _create_accessory(accessory: Accessory) -> Service: - service = accessory.add_service(ServicesTypes.LIGHTBULB, name="TestDevice") - - on_char = service.add_char(CharacteristicsTypes.ON) - on_char.value = 0 - - brightness = service.add_char(CharacteristicsTypes.BRIGHTNESS) - brightness.value = 0 - - return service - - helper = await setup_test_component(hass, get_next_aid(), _create_accessory) - - with mock.patch.object( - helper.pairing, - "get_characteristics", - wraps=helper.pairing.get_characteristics, - ) as mock_get_characteristics: - # Initial state is that the light is off - state = await helper.poll_and_get_state() - assert state.state == STATE_OFF - assert mock_get_characteristics.call_count == 2 - # Verify only firmware version is polled - assert mock_get_characteristics.call_args_list[0][0][0] == {(1, 7)} - assert mock_get_characteristics.call_args_list[1][0][0] == {(1, 7)} - - # Test device goes offline - helper.pairing.available = False - with mock.patch.object( - FakeController, - "async_reachable", - return_value=False, - ): - state = await helper.poll_and_get_state() - assert state.state == STATE_UNAVAILABLE - # Tries twice before declaring unavailable - assert mock_get_characteristics.call_count == 4 - - # Test device comes back online - helper.pairing.available = True - state = await helper.poll_and_get_state() - assert state.state == STATE_OFF - assert mock_get_characteristics.call_count == 6 - - # Next poll should not happen because its a single - # accessory, available, and all chars are watchable - state = await helper.poll_and_get_state() - assert state.state == STATE_OFF - assert mock_get_characteristics.call_count == 8 - - -async def test_manual_poll_all_chars( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: - """Test that a manual poll will check all chars.""" - - def _create_accessory(accessory: Accessory) -> Service: - service = accessory.add_service(ServicesTypes.LIGHTBULB, name="TestDevice") - - on_char = service.add_char(CharacteristicsTypes.ON) - on_char.value = 0 - - brightness = service.add_char(CharacteristicsTypes.BRIGHTNESS) - brightness.value = 0 - - return service - - helper = await setup_test_component(hass, get_next_aid(), _create_accessory) - - with mock.patch.object( - helper.pairing, - "get_characteristics", - wraps=helper.pairing.get_characteristics, - ) as mock_get_characteristics: - # Initial state is that the light is off - await helper.poll_and_get_state() - # Verify only firmware version is polled - assert mock_get_characteristics.call_args_list[0][0][0] == {(1, 7)} - - # Now do a manual poll to ensure all chars are polled - mock_get_characteristics.reset_mock() - await async_update_entity(hass, helper.entity_id) - await time_changed(hass, 60) - await time_changed(hass, DEBOUNCE_COOLDOWN) - await hass.async_block_till_done() - assert len(mock_get_characteristics.call_args_list[0][0][0]) > 1 diff --git a/tests/components/homekit_controller/test_cover.py b/tests/components/homekit_controller/test_cover.py index 11870c801e1..2157eb51212 100644 --- a/tests/components/homekit_controller/test_cover.py +++ b/tests/components/homekit_controller/test_cover.py @@ -1,19 +1,16 @@ """Basic checks for HomeKitalarm_control_panel.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import Service, ServicesTypes +from aiohomekit.model.services import ServicesTypes from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component -def create_window_covering_service(accessory: Accessory) -> Service: +def create_window_covering_service(accessory): """Define a window-covering characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.WINDOW_COVERING) @@ -38,7 +35,7 @@ def create_window_covering_service(accessory: Accessory) -> Service: return service -def create_window_covering_service_with_h_tilt(accessory: Accessory) -> None: +def create_window_covering_service_with_h_tilt(accessory): """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -53,7 +50,7 @@ def create_window_covering_service_with_h_tilt(accessory: Accessory) -> None: tilt_target.maxValue = 90 -def create_window_covering_service_with_h_tilt_2(accessory: Accessory) -> None: +def create_window_covering_service_with_h_tilt_2(accessory): """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -68,7 +65,7 @@ def create_window_covering_service_with_h_tilt_2(accessory: Accessory) -> None: tilt_target.maxValue = 0 -def create_window_covering_service_with_v_tilt(accessory: Accessory) -> None: +def create_window_covering_service_with_v_tilt(accessory): """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -83,7 +80,7 @@ def create_window_covering_service_with_v_tilt(accessory: Accessory) -> None: tilt_target.maxValue = 90 -def create_window_covering_service_with_v_tilt_2(accessory: Accessory) -> None: +def create_window_covering_service_with_v_tilt_2(accessory): """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -98,7 +95,7 @@ def create_window_covering_service_with_v_tilt_2(accessory: Accessory) -> None: tilt_target.maxValue = 0 -def create_window_covering_service_with_none_tilt(accessory: Accessory) -> None: +def create_window_covering_service_with_none_tilt(accessory): """Define a window-covering characteristics as per page 219 of HAP spec. This accessory uses None for the tilt value unexpectedly. @@ -116,39 +113,9 @@ def create_window_covering_service_with_none_tilt(accessory: Accessory) -> None: tilt_target.maxValue = 0 -def create_window_covering_service_with_no_minmax_tilt(accessory): - """Apply use values (-90 to 90) if min/max not provided.""" - service = create_window_covering_service(accessory) - - tilt_current = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_CURRENT) - tilt_current.value = 0 - - tilt_target = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_TARGET) - tilt_target.value = 0 - - -def create_window_covering_service_with_full_range_tilt(accessory): - """Somfi Velux Integration.""" - service = create_window_covering_service(accessory) - - tilt_current = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_CURRENT) - tilt_current.value = 0 - tilt_current.minValue = -90 - tilt_current.maxValue = 90 - - tilt_target = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_TARGET) - tilt_target.value = 0 - tilt_target.minValue = -90 - tilt_target.maxValue = 90 - - -async def test_change_window_cover_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_change_window_cover_state(hass: HomeAssistant) -> None: """Test that we can turn a HomeKit alarm on and off again.""" - helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service - ) + helper = await setup_test_component(hass, create_window_covering_service) await hass.services.async_call( "cover", "open_cover", {"entity_id": helper.entity_id}, blocking=True @@ -171,13 +138,9 @@ async def test_change_window_cover_state( ) -async def test_read_window_cover_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_read_window_cover_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit alarm accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service - ) + helper = await setup_test_component(hass, create_window_covering_service) await helper.async_update( ServicesTypes.WINDOW_COVERING, @@ -208,12 +171,10 @@ async def test_read_window_cover_state( assert state.attributes["obstruction-detected"] is True -async def test_read_window_cover_tilt_horizontal( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_read_window_cover_tilt_horizontal(hass: HomeAssistant) -> None: """Test that horizontal tilt is handled correctly.""" helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_h_tilt + hass, create_window_covering_service_with_h_tilt ) await helper.async_update( @@ -225,12 +186,10 @@ async def test_read_window_cover_tilt_horizontal( assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_horizontal_2( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_read_window_cover_tilt_horizontal_2(hass: HomeAssistant) -> None: """Test that horizontal tilt is handled correctly.""" helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_h_tilt_2 + hass, create_window_covering_service_with_h_tilt_2 ) await helper.async_update( @@ -242,12 +201,10 @@ async def test_read_window_cover_tilt_horizontal_2( assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_vertical( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_read_window_cover_tilt_vertical(hass: HomeAssistant) -> None: """Test that vertical tilt is handled correctly.""" helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_v_tilt + hass, create_window_covering_service_with_v_tilt ) await helper.async_update( @@ -259,12 +216,10 @@ async def test_read_window_cover_tilt_vertical( assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_vertical_2( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_read_window_cover_tilt_vertical_2(hass: HomeAssistant) -> None: """Test that vertical tilt is handled correctly.""" helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_v_tilt_2 + hass, create_window_covering_service_with_v_tilt_2 ) await helper.async_update( @@ -276,12 +231,10 @@ async def test_read_window_cover_tilt_vertical_2( assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_missing_tilt( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_read_window_cover_tilt_missing_tilt(hass: HomeAssistant) -> None: """Test that missing tilt is handled.""" helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_none_tilt + hass, create_window_covering_service_with_none_tilt ) await helper.async_update( @@ -293,46 +246,10 @@ async def test_read_window_cover_tilt_missing_tilt( assert state.state != STATE_UNAVAILABLE -async def test_read_window_cover_tilt_full_range( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: - """Test that horizontal tilt is handled correctly.""" - helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_full_range_tilt - ) - - await helper.async_update( - ServicesTypes.WINDOW_COVERING, - {CharacteristicsTypes.HORIZONTAL_TILT_CURRENT: 0}, - ) - state = await helper.poll_and_get_state() - # Expect converted value from arcdegree scale to percentage scale. - assert state.attributes["current_tilt_position"] == 50 - - -async def test_read_window_cover_tilt_no_minmax( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: - """Test that horizontal tilt is handled correctly.""" - helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_no_minmax_tilt - ) - - await helper.async_update( - ServicesTypes.WINDOW_COVERING, - {CharacteristicsTypes.HORIZONTAL_TILT_CURRENT: 90}, - ) - state = await helper.poll_and_get_state() - # Expect converted value from arcdegree scale to percentage scale. - assert state.attributes["current_tilt_position"] == 100 - - -async def test_write_window_cover_tilt_horizontal( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_write_window_cover_tilt_horizontal(hass: HomeAssistant) -> None: """Test that horizontal tilt is written correctly.""" helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_h_tilt + hass, create_window_covering_service_with_h_tilt ) await hass.services.async_call( @@ -350,12 +267,10 @@ async def test_write_window_cover_tilt_horizontal( ) -async def test_write_window_cover_tilt_horizontal_2( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_write_window_cover_tilt_horizontal_2(hass: HomeAssistant) -> None: """Test that horizontal tilt is written correctly.""" helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_h_tilt_2 + hass, create_window_covering_service_with_h_tilt_2 ) await hass.services.async_call( @@ -373,12 +288,10 @@ async def test_write_window_cover_tilt_horizontal_2( ) -async def test_write_window_cover_tilt_vertical( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_write_window_cover_tilt_vertical(hass: HomeAssistant) -> None: """Test that vertical tilt is written correctly.""" helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_v_tilt + hass, create_window_covering_service_with_v_tilt ) await hass.services.async_call( @@ -396,12 +309,10 @@ async def test_write_window_cover_tilt_vertical( ) -async def test_write_window_cover_tilt_vertical_2( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_write_window_cover_tilt_vertical_2(hass: HomeAssistant) -> None: """Test that vertical tilt is written correctly.""" helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_v_tilt_2 + hass, create_window_covering_service_with_v_tilt_2 ) await hass.services.async_call( @@ -419,35 +330,10 @@ async def test_write_window_cover_tilt_vertical_2( ) -async def test_write_window_cover_tilt_no_minmax( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: - """Test that horizontal tilt is written correctly.""" - helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_no_minmax_tilt - ) - - await hass.services.async_call( - "cover", - "set_cover_tilt_position", - {"entity_id": helper.entity_id, "tilt_position": 90}, - blocking=True, - ) - # Expect converted value from percentage scale to arcdegree scale. - helper.async_assert_service_values( - ServicesTypes.WINDOW_COVERING, - { - CharacteristicsTypes.HORIZONTAL_TILT_TARGET: 72, - }, - ) - - -async def test_window_cover_stop( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_window_cover_stop(hass: HomeAssistant) -> None: """Test that vertical tilt is written correctly.""" helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_v_tilt + hass, create_window_covering_service_with_v_tilt ) await hass.services.async_call( @@ -461,58 +347,7 @@ async def test_window_cover_stop( ) -async def test_write_window_cover_tilt_full_range( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: - """Test that full-range tilt is working correctly.""" - helper = await setup_test_component( - hass, get_next_aid(), create_window_covering_service_with_full_range_tilt - ) - - await hass.services.async_call( - "cover", - "set_cover_tilt_position", - {"entity_id": helper.entity_id, "tilt_position": 10}, - blocking=True, - ) - # Expect converted value from percentage scale to arc on -90 to +90 scale. - helper.async_assert_service_values( - ServicesTypes.WINDOW_COVERING, - { - CharacteristicsTypes.HORIZONTAL_TILT_TARGET: -72, - }, - ) - - await hass.services.async_call( - "cover", - "set_cover_tilt_position", - {"entity_id": helper.entity_id, "tilt_position": 50}, - blocking=True, - ) - # Expect converted value from percentage scale to arc on -90 to +90 scale. - helper.async_assert_service_values( - ServicesTypes.WINDOW_COVERING, - { - CharacteristicsTypes.HORIZONTAL_TILT_TARGET: 0, - }, - ) - - await hass.services.async_call( - "cover", - "set_cover_tilt_position", - {"entity_id": helper.entity_id, "tilt_position": 90}, - blocking=True, - ) - # Expect converted value from percentage scale to arc on -90 to +90 scale. - helper.async_assert_service_values( - ServicesTypes.WINDOW_COVERING, - { - CharacteristicsTypes.HORIZONTAL_TILT_TARGET: 72, - }, - ) - - -def create_garage_door_opener_service(accessory: Accessory) -> None: +def create_garage_door_opener_service(accessory): """Define a garage-door-opener chars as per page 217 of HAP spec.""" service = accessory.add_service(ServicesTypes.GARAGE_DOOR_OPENER) @@ -531,13 +366,9 @@ def create_garage_door_opener_service(accessory: Accessory) -> None: return service -async def test_change_door_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_change_door_state(hass: HomeAssistant) -> None: """Test that we can turn open and close a HomeKit garage door.""" - helper = await setup_test_component( - hass, get_next_aid(), create_garage_door_opener_service - ) + helper = await setup_test_component(hass, create_garage_door_opener_service) await hass.services.async_call( "cover", "open_cover", {"entity_id": helper.entity_id}, blocking=True @@ -560,13 +391,9 @@ async def test_change_door_state( ) -async def test_read_door_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_read_door_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit garage door.""" - helper = await setup_test_component( - hass, get_next_aid(), create_garage_door_opener_service - ) + helper = await setup_test_component(hass, create_garage_door_opener_service) await helper.async_update( ServicesTypes.GARAGE_DOOR_OPENER, @@ -605,9 +432,7 @@ async def test_read_door_state( async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a cover unique id.""" aid = get_next_aid() @@ -616,7 +441,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, aid, create_garage_door_opener_service) + await setup_test_component(hass, create_garage_door_opener_service) assert ( entity_registry.async_get(cover_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_device_trigger.py b/tests/components/homekit_controller/test_device_trigger.py index ba952ac5913..43572f56d50 100644 --- a/tests/components/homekit_controller/test_device_trigger.py +++ b/tests/components/homekit_controller/test_device_trigger.py @@ -1,8 +1,5 @@ """Test homekit_controller stateless triggers.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes import pytest @@ -18,7 +15,7 @@ from homeassistant.setup import async_setup_component from .common import setup_test_component -from tests.common import async_get_device_automations +from tests.common import async_get_device_automations, async_mock_service @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -26,7 +23,13 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -def create_remote(accessory: Accessory) -> None: +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + +def create_remote(accessory): """Define characteristics for a button (that is inn a group).""" service_label = accessory.add_service(ServicesTypes.SERVICE_LABEL) @@ -51,7 +54,7 @@ def create_remote(accessory: Accessory) -> None: battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_button(accessory: Accessory) -> None: +def create_button(accessory): """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.STATELESS_PROGRAMMABLE_SWITCH) @@ -66,7 +69,7 @@ def create_button(accessory: Accessory) -> None: battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_doorbell(accessory: Accessory) -> None: +def create_doorbell(accessory): """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.DOORBELL) @@ -85,10 +88,9 @@ async def test_enumerate_remote( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], ) -> None: """Test that remote is correctly enumerated.""" - await setup_test_component(hass, get_next_aid(), create_remote) + await setup_test_component(hass, create_remote) bat_sensor = entity_registry.async_get("sensor.testdevice_battery") identify_button = entity_registry.async_get("button.testdevice_identify") @@ -137,10 +139,9 @@ async def test_enumerate_button( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], ) -> None: """Test that a button is correctly enumerated.""" - await setup_test_component(hass, get_next_aid(), create_button) + await setup_test_component(hass, create_button) bat_sensor = entity_registry.async_get("sensor.testdevice_battery") identify_button = entity_registry.async_get("button.testdevice_identify") @@ -188,10 +189,9 @@ async def test_enumerate_doorbell( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], ) -> None: """Test that a button is correctly enumerated.""" - await setup_test_component(hass, get_next_aid(), create_doorbell) + await setup_test_component(hass, create_doorbell) bat_sensor = entity_registry.async_get("sensor.testdevice_battery") identify_button = entity_registry.async_get("button.testdevice_identify") @@ -239,11 +239,10 @@ async def test_handle_events( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test that events are handled.""" - helper = await setup_test_component(hass, get_next_aid(), create_remote) + helper = await setup_test_component(hass, create_remote) entry = entity_registry.async_get("sensor.testdevice_battery") @@ -304,8 +303,8 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "device - button1 - single_press - 0" + assert len(calls) == 1 + assert calls[0].data["some"] == "device - button1 - single_press - 0" # Make sure automation doesn't trigger for long press helper.pairing.testing.update_named_service( @@ -313,7 +312,7 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Make sure automation doesn't trigger for double press helper.pairing.testing.update_named_service( @@ -321,7 +320,7 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Make sure second automation fires for long press helper.pairing.testing.update_named_service( @@ -329,8 +328,8 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "device - button2 - long_press - 0" + assert len(calls) == 2 + assert calls[1].data["some"] == "device - button2 - long_press - 0" # Turn the automations off await hass.services.async_call( @@ -339,7 +338,6 @@ async def test_handle_events( {"entity_id": "automation.long_press"}, blocking=True, ) - assert len(service_calls) == 3 await hass.services.async_call( "automation", @@ -347,7 +345,6 @@ async def test_handle_events( {"entity_id": "automation.single_press"}, blocking=True, ) - assert len(service_calls) == 4 # Make sure event no longer fires helper.pairing.testing.update_named_service( @@ -355,18 +352,17 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(service_calls) == 4 + assert len(calls) == 2 async def test_handle_events_late_setup( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test that events are handled when setup happens after startup.""" - helper = await setup_test_component(hass, get_next_aid(), create_remote) + helper = await setup_test_component(hass, create_remote) entry = entity_registry.async_get("sensor.testdevice_battery") @@ -436,8 +432,8 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "device - button1 - single_press - 0" + assert len(calls) == 1 + assert calls[0].data["some"] == "device - button1 - single_press - 0" # Make sure automation doesn't trigger for a polled None helper.pairing.testing.update_named_service( @@ -445,7 +441,7 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Make sure automation doesn't trigger for long press helper.pairing.testing.update_named_service( @@ -453,7 +449,7 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Make sure automation doesn't trigger for double press helper.pairing.testing.update_named_service( @@ -461,7 +457,7 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Make sure second automation fires for long press helper.pairing.testing.update_named_service( @@ -469,8 +465,8 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "device - button2 - long_press - 0" + assert len(calls) == 2 + assert calls[1].data["some"] == "device - button2 - long_press - 0" # Turn the automations off await hass.services.async_call( @@ -479,7 +475,6 @@ async def test_handle_events_late_setup( {"entity_id": "automation.long_press"}, blocking=True, ) - assert len(service_calls) == 3 await hass.services.async_call( "automation", @@ -487,7 +482,6 @@ async def test_handle_events_late_setup( {"entity_id": "automation.single_press"}, blocking=True, ) - assert len(service_calls) == 4 # Make sure event no longer fires helper.pairing.testing.update_named_service( @@ -495,4 +489,4 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(service_calls) == 4 + assert len(calls) == 2 diff --git a/tests/components/homekit_controller/test_event.py b/tests/components/homekit_controller/test_event.py index 2254845964a..e139b49982a 100644 --- a/tests/components/homekit_controller/test_event.py +++ b/tests/components/homekit_controller/test_event.py @@ -1,8 +1,5 @@ """Test homekit_controller stateless triggers.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -13,7 +10,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_remote(accessory: Accessory) -> None: +def create_remote(accessory): """Define characteristics for a button (that is inn a group).""" service_label = accessory.add_service(ServicesTypes.SERVICE_LABEL) @@ -38,7 +35,7 @@ def create_remote(accessory: Accessory) -> None: battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_button(accessory: Accessory) -> None: +def create_button(accessory): """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.STATELESS_PROGRAMMABLE_SWITCH) @@ -53,7 +50,7 @@ def create_button(accessory: Accessory) -> None: battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_doorbell(accessory: Accessory) -> None: +def create_doorbell(accessory): """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.DOORBELL) @@ -68,13 +65,9 @@ def create_doorbell(accessory: Accessory) -> None: battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -async def test_remote( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], -) -> None: +async def test_remote(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test that remote is supported.""" - helper = await setup_test_component(hass, get_next_aid(), create_remote) + helper = await setup_test_component(hass, create_remote) entities = [ ("event.testdevice_button_1", "Button 1"), @@ -115,13 +108,9 @@ async def test_remote( assert state.attributes["event_type"] == "long_press" -async def test_button( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], -) -> None: +async def test_button(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test that a button is correctly enumerated.""" - helper = await setup_test_component(hass, get_next_aid(), create_button) + helper = await setup_test_component(hass, create_button) entity_id = "event.testdevice_button_1" button = entity_registry.async_get(entity_id) @@ -156,12 +145,10 @@ async def test_button( async def test_doorbell( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test that doorbell service is handled.""" - helper = await setup_test_component(hass, get_next_aid(), create_doorbell) + helper = await setup_test_component(hass, create_doorbell) entity_id = "event.testdevice_doorbell" doorbell = entity_registry.async_get(entity_id) diff --git a/tests/components/homekit_controller/test_fan.py b/tests/components/homekit_controller/test_fan.py index 2c498e1a9c1..428d3ab7d50 100644 --- a/tests/components/homekit_controller/test_fan.py +++ b/tests/components/homekit_controller/test_fan.py @@ -1,18 +1,15 @@ """Basic checks for HomeKit fans.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component -def create_fan_service(accessory: Accessory) -> None: +def create_fan_service(accessory): """Define fan v1 characteristics as per HAP spec. This service is no longer documented in R2 of the public HAP spec but existing @@ -30,7 +27,7 @@ def create_fan_service(accessory: Accessory) -> None: speed.value = 0 -def create_fanv2_service(accessory: Accessory) -> None: +def create_fanv2_service(accessory): """Define fan v2 characteristics as per HAP spec.""" service = accessory.add_service(ServicesTypes.FAN_V2) @@ -47,7 +44,7 @@ def create_fanv2_service(accessory: Accessory) -> None: swing_mode.value = 0 -def create_fanv2_service_non_standard_rotation_range(accessory: Accessory) -> None: +def create_fanv2_service_non_standard_rotation_range(accessory): """Define fan v2 with a non-standard rotation range.""" service = accessory.add_service(ServicesTypes.FAN_V2) @@ -61,7 +58,7 @@ def create_fanv2_service_non_standard_rotation_range(accessory: Accessory) -> No speed.minStep = 1 -def create_fanv2_service_with_min_step(accessory: Accessory) -> None: +def create_fanv2_service_with_min_step(accessory): """Define fan v2 characteristics as per HAP spec.""" service = accessory.add_service(ServicesTypes.FAN_V2) @@ -79,7 +76,7 @@ def create_fanv2_service_with_min_step(accessory: Accessory) -> None: swing_mode.value = 0 -def create_fanv2_service_without_rotation_speed(accessory: Accessory) -> None: +def create_fanv2_service_without_rotation_speed(accessory): """Define fan v2 characteristics as per HAP spec.""" service = accessory.add_service(ServicesTypes.FAN_V2) @@ -93,11 +90,9 @@ def create_fanv2_service_without_rotation_speed(accessory: Accessory) -> None: swing_mode.value = 0 -async def test_fan_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_fan_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit fan accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_fan_service) + helper = await setup_test_component(hass, create_fan_service) state = await helper.async_update( ServicesTypes.FAN, {CharacteristicsTypes.ON: False} @@ -110,9 +105,9 @@ async def test_fan_read_state( assert state.state == "on" -async def test_turn_on(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: +async def test_turn_on(hass: HomeAssistant) -> None: """Test that we can turn a fan on.""" - helper = await setup_test_component(hass, get_next_aid(), create_fan_service) + helper = await setup_test_component(hass, create_fan_service) await hass.services.async_call( "fan", @@ -157,12 +152,10 @@ async def test_turn_on(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> ) -async def test_turn_on_off_without_rotation_speed( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_turn_on_off_without_rotation_speed(hass: HomeAssistant) -> None: """Test that we can turn a fan on.""" helper = await setup_test_component( - hass, get_next_aid(), create_fanv2_service_without_rotation_speed + hass, create_fanv2_service_without_rotation_speed ) await hass.services.async_call( @@ -192,9 +185,9 @@ async def test_turn_on_off_without_rotation_speed( ) -async def test_turn_off(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: +async def test_turn_off(hass: HomeAssistant) -> None: """Test that we can turn a fan off.""" - helper = await setup_test_component(hass, get_next_aid(), create_fan_service) + helper = await setup_test_component(hass, create_fan_service) await helper.async_update(ServicesTypes.FAN, {CharacteristicsTypes.ON: 1}) @@ -212,9 +205,9 @@ async def test_turn_off(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> ) -async def test_set_speed(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: +async def test_set_speed(hass: HomeAssistant) -> None: """Test that we set fan speed.""" - helper = await setup_test_component(hass, get_next_aid(), create_fan_service) + helper = await setup_test_component(hass, create_fan_service) await helper.async_update(ServicesTypes.FAN, {CharacteristicsTypes.ON: 1}) @@ -271,11 +264,9 @@ async def test_set_speed(hass: HomeAssistant, get_next_aid: Callable[[], int]) - ) -async def test_set_percentage( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_set_percentage(hass: HomeAssistant) -> None: """Test that we set fan speed by percentage.""" - helper = await setup_test_component(hass, get_next_aid(), create_fan_service) + helper = await setup_test_component(hass, create_fan_service) await helper.async_update(ServicesTypes.FAN, {CharacteristicsTypes.ON: 1}) @@ -306,9 +297,9 @@ async def test_set_percentage( ) -async def test_speed_read(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: +async def test_speed_read(hass: HomeAssistant) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, get_next_aid(), create_fan_service) + helper = await setup_test_component(hass, create_fan_service) state = await helper.async_update( ServicesTypes.FAN, @@ -346,11 +337,9 @@ async def test_speed_read(hass: HomeAssistant, get_next_aid: Callable[[], int]) assert state.attributes["percentage"] == 0 -async def test_set_direction( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_set_direction(hass: HomeAssistant) -> None: """Test that we can set fan spin direction.""" - helper = await setup_test_component(hass, get_next_aid(), create_fan_service) + helper = await setup_test_component(hass, create_fan_service) await hass.services.async_call( "fan", @@ -379,11 +368,9 @@ async def test_set_direction( ) -async def test_direction_read( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_direction_read(hass: HomeAssistant) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, get_next_aid(), create_fan_service) + helper = await setup_test_component(hass, create_fan_service) state = await helper.async_update( ServicesTypes.FAN, {CharacteristicsTypes.ROTATION_DIRECTION: 0} @@ -396,11 +383,9 @@ async def test_direction_read( assert state.attributes["direction"] == "reverse" -async def test_fanv2_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_fanv2_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit fan accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) + helper = await setup_test_component(hass, create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: False} @@ -413,9 +398,9 @@ async def test_fanv2_read_state( assert state.state == "on" -async def test_v2_turn_on(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: +async def test_v2_turn_on(hass: HomeAssistant) -> None: """Test that we can turn a fan on.""" - helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) + helper = await setup_test_component(hass, create_fanv2_service) await hass.services.async_call( "fan", @@ -488,11 +473,9 @@ async def test_v2_turn_on(hass: HomeAssistant, get_next_aid: Callable[[], int]) ) -async def test_v2_turn_off( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_v2_turn_off(hass: HomeAssistant) -> None: """Test that we can turn a fan off.""" - helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) + helper = await setup_test_component(hass, create_fanv2_service) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -510,11 +493,9 @@ async def test_v2_turn_off( ) -async def test_v2_set_speed( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_v2_set_speed(hass: HomeAssistant) -> None: """Test that we set fan speed.""" - helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) + helper = await setup_test_component(hass, create_fanv2_service) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -571,11 +552,9 @@ async def test_v2_set_speed( ) -async def test_v2_set_percentage( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_v2_set_percentage(hass: HomeAssistant) -> None: """Test that we set fan speed by percentage.""" - helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) + helper = await setup_test_component(hass, create_fanv2_service) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -606,13 +585,9 @@ async def test_v2_set_percentage( ) -async def test_v2_set_percentage_with_min_step( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_v2_set_percentage_with_min_step(hass: HomeAssistant) -> None: """Test that we set fan speed by percentage.""" - helper = await setup_test_component( - hass, get_next_aid(), create_fanv2_service_with_min_step - ) + helper = await setup_test_component(hass, create_fanv2_service_with_min_step) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -643,11 +618,9 @@ async def test_v2_set_percentage_with_min_step( ) -async def test_v2_speed_read( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_v2_speed_read(hass: HomeAssistant) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) + helper = await setup_test_component(hass, create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, @@ -684,11 +657,9 @@ async def test_v2_speed_read( assert state.attributes["percentage"] == 0 -async def test_v2_set_direction( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_v2_set_direction(hass: HomeAssistant) -> None: """Test that we can set fan spin direction.""" - helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) + helper = await setup_test_component(hass, create_fanv2_service) await hass.services.async_call( "fan", @@ -717,11 +688,9 @@ async def test_v2_set_direction( ) -async def test_v2_direction_read( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_v2_direction_read(hass: HomeAssistant) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) + helper = await setup_test_component(hass, create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, {CharacteristicsTypes.ROTATION_DIRECTION: 0} @@ -734,11 +703,9 @@ async def test_v2_direction_read( assert state.attributes["direction"] == "reverse" -async def test_v2_oscillate( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_v2_oscillate(hass: HomeAssistant) -> None: """Test that we can control a fans oscillation.""" - helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) + helper = await setup_test_component(hass, create_fanv2_service) await hass.services.async_call( "fan", @@ -767,11 +734,9 @@ async def test_v2_oscillate( ) -async def test_v2_oscillate_read( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_v2_oscillate_read(hass: HomeAssistant) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) + helper = await setup_test_component(hass, create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, {CharacteristicsTypes.SWING_MODE: 0} @@ -785,11 +750,11 @@ async def test_v2_oscillate_read( async def test_v2_set_percentage_non_standard_rotation_range( - hass: HomeAssistant, get_next_aid: Callable[[], int] + hass: HomeAssistant, ) -> None: """Test that we set fan speed with a non-standard rotation range.""" helper = await setup_test_component( - hass, get_next_aid(), create_fanv2_service_non_standard_rotation_range + hass, create_fanv2_service_non_standard_rotation_range ) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -848,9 +813,7 @@ async def test_v2_set_percentage_non_standard_rotation_range( async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a fan unique id.""" aid = get_next_aid() @@ -859,9 +822,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component( - hass, aid, create_fanv2_service_non_standard_rotation_range - ) + await setup_test_component(hass, create_fanv2_service_non_standard_rotation_range) assert ( entity_registry.async_get(fan_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_humidifier.py b/tests/components/homekit_controller/test_humidifier.py index 07bdb8a2e38..60c74be3949 100644 --- a/tests/components/homekit_controller/test_humidifier.py +++ b/tests/components/homekit_controller/test_humidifier.py @@ -1,23 +1,16 @@ """Basic checks for HomeKit Humidifier/Dehumidifier.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import Service, ServicesTypes +from aiohomekit.model.services import ServicesTypes -from homeassistant.components.humidifier import ( - DOMAIN as HUMIDIFIER_DOMAIN, - MODE_AUTO, - MODE_NORMAL, -) +from homeassistant.components.humidifier import DOMAIN, MODE_AUTO, MODE_NORMAL from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component -def create_humidifier_service(accessory: Accessory) -> Service: +def create_humidifier_service(accessory): """Define a humidifier characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.HUMIDIFIER_DEHUMIDIFIER) @@ -44,7 +37,7 @@ def create_humidifier_service(accessory: Accessory) -> Service: return service -def create_dehumidifier_service(accessory: Accessory) -> Service: +def create_dehumidifier_service(accessory): """Define a dehumidifier characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.HUMIDIFIER_DEHUMIDIFIER) @@ -71,14 +64,12 @@ def create_dehumidifier_service(accessory: Accessory) -> Service: return service -async def test_humidifier_active_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_humidifier_active_state(hass: HomeAssistant) -> None: """Test that we can turn a HomeKit humidifier on and off again.""" - helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) + helper = await setup_test_component(hass, create_humidifier_service) await hass.services.async_call( - HUMIDIFIER_DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True + DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -87,7 +78,7 @@ async def test_humidifier_active_state( ) await hass.services.async_call( - HUMIDIFIER_DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True + DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -96,16 +87,12 @@ async def test_humidifier_active_state( ) -async def test_dehumidifier_active_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_dehumidifier_active_state(hass: HomeAssistant) -> None: """Test that we can turn a HomeKit dehumidifier on and off again.""" - helper = await setup_test_component( - hass, get_next_aid(), create_dehumidifier_service - ) + helper = await setup_test_component(hass, create_dehumidifier_service) await hass.services.async_call( - HUMIDIFIER_DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True + DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -114,7 +101,7 @@ async def test_dehumidifier_active_state( ) await hass.services.async_call( - HUMIDIFIER_DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True + DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -123,11 +110,9 @@ async def test_dehumidifier_active_state( ) -async def test_humidifier_read_humidity( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_humidifier_read_humidity(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) + helper = await setup_test_component(hass, create_humidifier_service) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -164,13 +149,9 @@ async def test_humidifier_read_humidity( assert state.state == "off" -async def test_dehumidifier_read_humidity( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_dehumidifier_read_humidity(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_dehumidifier_service - ) + helper = await setup_test_component(hass, create_dehumidifier_service) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -205,14 +186,12 @@ async def test_dehumidifier_read_humidity( assert state.attributes["humidity"] == 40 -async def test_humidifier_set_humidity( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_humidifier_set_humidity(hass: HomeAssistant) -> None: """Test that we can set the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) + helper = await setup_test_component(hass, create_humidifier_service) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, "set_humidity", {"entity_id": helper.entity_id, "humidity": 20}, blocking=True, @@ -223,16 +202,12 @@ async def test_humidifier_set_humidity( ) -async def test_dehumidifier_set_humidity( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_dehumidifier_set_humidity(hass: HomeAssistant) -> None: """Test that we can set the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_dehumidifier_service - ) + helper = await setup_test_component(hass, create_dehumidifier_service) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, "set_humidity", {"entity_id": helper.entity_id, "humidity": 20}, blocking=True, @@ -243,14 +218,12 @@ async def test_dehumidifier_set_humidity( ) -async def test_humidifier_set_mode( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_humidifier_set_mode(hass: HomeAssistant) -> None: """Test that we can set the mode of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) + helper = await setup_test_component(hass, create_humidifier_service) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_AUTO}, blocking=True, @@ -264,7 +237,7 @@ async def test_humidifier_set_mode( ) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_NORMAL}, blocking=True, @@ -278,16 +251,12 @@ async def test_humidifier_set_mode( ) -async def test_dehumidifier_set_mode( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_dehumidifier_set_mode(hass: HomeAssistant) -> None: """Test that we can set the mode of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_dehumidifier_service - ) + helper = await setup_test_component(hass, create_dehumidifier_service) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_AUTO}, blocking=True, @@ -301,7 +270,7 @@ async def test_dehumidifier_set_mode( ) await hass.services.async_call( - HUMIDIFIER_DOMAIN, + DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_NORMAL}, blocking=True, @@ -315,11 +284,9 @@ async def test_dehumidifier_set_mode( ) -async def test_humidifier_read_only_mode( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_humidifier_read_only_mode(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) + helper = await setup_test_component(hass, create_humidifier_service) state = await helper.poll_and_get_state() assert state.attributes["mode"] == "normal" @@ -357,13 +324,9 @@ async def test_humidifier_read_only_mode( assert state.attributes["mode"] == "normal" -async def test_dehumidifier_read_only_mode( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_dehumidifier_read_only_mode(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_dehumidifier_service - ) + helper = await setup_test_component(hass, create_dehumidifier_service) state = await helper.poll_and_get_state() assert state.attributes["mode"] == "normal" @@ -401,11 +364,9 @@ async def test_dehumidifier_read_only_mode( assert state.attributes["mode"] == "normal" -async def test_humidifier_target_humidity_modes( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_humidifier_target_humidity_modes(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) + helper = await setup_test_component(hass, create_humidifier_service) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -448,13 +409,9 @@ async def test_humidifier_target_humidity_modes( assert state.attributes["humidity"] == 37 -async def test_dehumidifier_target_humidity_modes( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_dehumidifier_target_humidity_modes(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_dehumidifier_service - ) + helper = await setup_test_component(hass, create_dehumidifier_service) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -500,9 +457,7 @@ async def test_dehumidifier_target_humidity_modes( async def test_migrate_entity_ids( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test that we can migrate humidifier entity ids.""" aid = get_next_aid() @@ -512,7 +467,7 @@ async def test_migrate_entity_ids( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, aid, create_humidifier_service) + await setup_test_component(hass, create_humidifier_service) assert ( entity_registry.async_get(humidifier_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_8" diff --git a/tests/components/homekit_controller/test_init.py b/tests/components/homekit_controller/test_init.py index f74e8ea994e..542d87d0b0e 100644 --- a/tests/components/homekit_controller/test_init.py +++ b/tests/components/homekit_controller/test_init.py @@ -1,6 +1,5 @@ """Tests for homekit_controller init.""" -from collections.abc import Callable from datetime import timedelta import pathlib from unittest.mock import patch @@ -8,7 +7,7 @@ from unittest.mock import patch from aiohomekit import AccessoryNotFoundError from aiohomekit.model import Accessory, Transport from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import Service, ServicesTypes +from aiohomekit.model.services import ServicesTypes from aiohomekit.testing import FakePairing from attr import asdict import pytest @@ -40,18 +39,16 @@ ALIVE_DEVICE_NAME = "testdevice" ALIVE_DEVICE_ENTITY_ID = "light.testdevice" -def create_motion_sensor_service(accessory: Accessory) -> None: +def create_motion_sensor_service(accessory): """Define motion characteristics as per page 225 of HAP spec.""" service = accessory.add_service(ServicesTypes.MOTION_SENSOR) cur_state = service.add_char(CharacteristicsTypes.MOTION_DETECTED) cur_state.value = 0 -async def test_unload_on_stop( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_unload_on_stop(hass: HomeAssistant) -> None: """Test async_unload is called on stop.""" - await setup_test_component(hass, get_next_aid(), create_motion_sensor_service) + await setup_test_component(hass, create_motion_sensor_service) with patch( "homeassistant.components.homekit_controller.HKDevice.async_unload" ) as async_unlock_mock: @@ -61,13 +58,9 @@ async def test_unload_on_stop( assert async_unlock_mock.called -async def test_async_remove_entry( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_async_remove_entry(hass: HomeAssistant) -> None: """Test unpairing a component.""" - helper = await setup_test_component( - hass, get_next_aid(), create_motion_sensor_service - ) + helper = await setup_test_component(hass, create_motion_sensor_service) controller = helper.pairing.controller hkid = "00:00:00:00:00:00" @@ -83,7 +76,7 @@ async def test_async_remove_entry( assert hkid not in hass.data[ENTITY_MAP].storage_data -def create_alive_service(accessory: Accessory) -> Service: +def create_alive_service(accessory): """Create a service to validate we can only remove dead devices.""" service = accessory.add_service(ServicesTypes.LIGHTBULB, name=ALIVE_DEVICE_NAME) service.add_char(CharacteristicsTypes.ON) @@ -95,13 +88,10 @@ async def test_device_remove_devices( device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, hass_ws_client: WebSocketGenerator, - get_next_aid: Callable[[], int], ) -> None: """Test we can only remove a device that no longer exists.""" assert await async_setup_component(hass, "config", {}) - helper: Helper = await setup_test_component( - hass, get_next_aid(), create_alive_service - ) + helper: Helper = await setup_test_component(hass, create_alive_service) config_entry = helper.config_entry entry_id = config_entry.entry_id @@ -120,13 +110,10 @@ async def test_device_remove_devices( assert response["success"] -async def test_offline_device_raises( - hass: HomeAssistant, get_next_aid: Callable[[], int], controller -) -> None: +async def test_offline_device_raises(hass: HomeAssistant, controller) -> None: """Test an offline device raises ConfigEntryNotReady.""" is_connected = False - aid = get_next_aid() class OfflineFakePairing(FakePairing): """Fake pairing that can flip is_connected.""" @@ -153,7 +140,7 @@ async def test_offline_device_raises( return {} accessory = Accessory.create_with_info( - aid, "TestDevice", "example.com", "Test", "0001", "0.1" + "TestDevice", "example.com", "Test", "0001", "0.1" ) create_alive_service(accessory) @@ -175,12 +162,11 @@ async def test_offline_device_raises( async def test_ble_device_only_checks_is_available( - hass: HomeAssistant, get_next_aid: Callable[[], int], controller + hass: HomeAssistant, controller ) -> None: """Test a BLE device only checks is_available.""" is_available = False - aid = get_next_aid() class FakeBLEPairing(FakePairing): """Fake BLE pairing that can flip is_available.""" @@ -211,7 +197,7 @@ async def test_ble_device_only_checks_is_available( return {} accessory = Accessory.create_with_info( - aid, "TestDevice", "example.com", "Test", "0001", "0.1" + "TestDevice", "example.com", "Test", "0001", "0.1" ) create_alive_service(accessory) @@ -287,19 +273,12 @@ async def test_snapshots( entry = asdict(entity_entry) entry.pop("id", None) entry.pop("device_id", None) - entry.pop("created_at", None) - entry.pop("modified_at", None) - entry.pop("_cache", None) entities.append({"entry": entry, "state": state_dict}) device_dict = asdict(device) device_dict.pop("id", None) device_dict.pop("via_device_id", None) - device_dict.pop("created_at", None) - device_dict.pop("modified_at", None) - device_dict.pop("_cache", None) - devices.append({"device": device_dict, "entities": entities}) assert snapshot == devices diff --git a/tests/components/homekit_controller/test_light.py b/tests/components/homekit_controller/test_light.py index a4a5b59d5cb..c2644735ecb 100644 --- a/tests/components/homekit_controller/test_light.py +++ b/tests/components/homekit_controller/test_light.py @@ -1,12 +1,7 @@ """Basic checks for HomeKitSwitch.""" -from collections.abc import Callable -from unittest import mock - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import Service, ServicesTypes -from aiohomekit.testing import FakeController +from aiohomekit.model.services import ServicesTypes from homeassistant.components.homekit_controller.const import KNOWN_DEVICES from homeassistant.components.light import ( @@ -18,13 +13,13 @@ from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component LIGHT_BULB_NAME = "TestDevice" LIGHT_BULB_ENTITY_ID = "light.testdevice" -def create_lightbulb_service(accessory: Accessory) -> Service: +def create_lightbulb_service(accessory): """Define lightbulb characteristics.""" service = accessory.add_service(ServicesTypes.LIGHTBULB, name=LIGHT_BULB_NAME) @@ -37,7 +32,7 @@ def create_lightbulb_service(accessory: Accessory) -> Service: return service -def create_lightbulb_service_with_hs(accessory: Accessory) -> Service: +def create_lightbulb_service_with_hs(accessory): """Define a lightbulb service with hue + saturation.""" service = create_lightbulb_service(accessory) @@ -50,7 +45,7 @@ def create_lightbulb_service_with_hs(accessory: Accessory) -> Service: return service -def create_lightbulb_service_with_color_temp(accessory: Accessory) -> Service: +def create_lightbulb_service_with_color_temp(accessory): """Define a lightbulb service with color temp.""" service = create_lightbulb_service(accessory) @@ -60,13 +55,9 @@ def create_lightbulb_service_with_color_temp(accessory: Accessory) -> Service: return service -async def test_switch_change_light_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_change_light_state(hass: HomeAssistant) -> None: """Test that we can turn a HomeKit light on and off again.""" - helper = await setup_test_component( - hass, get_next_aid(), create_lightbulb_service_with_hs - ) + helper = await setup_test_component(hass, create_lightbulb_service_with_hs) await hass.services.async_call( "light", @@ -111,13 +102,9 @@ async def test_switch_change_light_state( ) -async def test_switch_change_light_state_color_temp( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_change_light_state_color_temp(hass: HomeAssistant) -> None: """Test that we can turn change color_temp.""" - helper = await setup_test_component( - hass, get_next_aid(), create_lightbulb_service_with_color_temp - ) + helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) await hass.services.async_call( "light", @@ -135,11 +122,9 @@ async def test_switch_change_light_state_color_temp( ) -async def test_switch_read_light_state_dimmer( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_read_light_state_dimmer(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_lightbulb_service) + helper = await setup_test_component(hass, create_lightbulb_service) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -172,11 +157,9 @@ async def test_switch_read_light_state_dimmer( assert state.state == "off" -async def test_switch_push_light_state_dimmer( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_push_light_state_dimmer(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_lightbulb_service) + helper = await setup_test_component(hass, create_lightbulb_service) # Initial state is that the light is off state = hass.states.get(LIGHT_BULB_ENTITY_ID) @@ -202,13 +185,9 @@ async def test_switch_push_light_state_dimmer( assert state.state == "off" -async def test_switch_read_light_state_hs( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_read_light_state_hs(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_lightbulb_service_with_hs - ) + helper = await setup_test_component(hass, create_lightbulb_service_with_hs) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -269,13 +248,9 @@ async def test_switch_read_light_state_hs( assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 -async def test_switch_push_light_state_hs( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_push_light_state_hs(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_lightbulb_service_with_hs - ) + helper = await setup_test_component(hass, create_lightbulb_service_with_hs) # Initial state is that the light is off state = hass.states.get(LIGHT_BULB_ENTITY_ID) @@ -304,13 +279,9 @@ async def test_switch_push_light_state_hs( assert state.state == "off" -async def test_switch_read_light_state_color_temp( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_read_light_state_color_temp(hass: HomeAssistant) -> None: """Test that we can read the color_temp of a light accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_lightbulb_service_with_color_temp - ) + helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -336,13 +307,9 @@ async def test_switch_read_light_state_color_temp( assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 -async def test_switch_push_light_state_color_temp( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_push_light_state_color_temp(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component( - hass, get_next_aid(), create_lightbulb_service_with_color_temp - ) + helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) # Initial state is that the light is off state = hass.states.get(LIGHT_BULB_ENTITY_ID) @@ -361,13 +328,9 @@ async def test_switch_push_light_state_color_temp( assert state.attributes["color_temp"] == 400 -async def test_light_becomes_unavailable_but_recovers( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_light_becomes_unavailable_but_recovers(hass: HomeAssistant) -> None: """Test transition to and from unavailable state.""" - helper = await setup_test_component( - hass, get_next_aid(), create_lightbulb_service_with_color_temp - ) + helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -375,12 +338,7 @@ async def test_light_becomes_unavailable_but_recovers( # Test device goes offline helper.pairing.available = False - with mock.patch.object( - FakeController, - "async_reachable", - return_value=False, - ): - state = await helper.poll_and_get_state() + state = await helper.poll_and_get_state() assert state.state == "unavailable" # Simulate that someone switched on the device in the real world not via HA @@ -398,13 +356,9 @@ async def test_light_becomes_unavailable_but_recovers( assert state.attributes["color_temp"] == 400 -async def test_light_unloaded_removed( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_light_unloaded_removed(hass: HomeAssistant) -> None: """Test entity and HKDevice are correctly unloaded and removed.""" - helper = await setup_test_component( - hass, get_next_aid(), create_lightbulb_service_with_color_temp - ) + helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -428,9 +382,7 @@ async def test_light_unloaded_removed( async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a light unique id.""" aid = get_next_aid() @@ -439,7 +391,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, aid, create_lightbulb_service_with_color_temp) + await setup_test_component(hass, create_lightbulb_service_with_color_temp) assert ( entity_registry.async_get(light_entry.entity_id).unique_id @@ -448,9 +400,7 @@ async def test_migrate_unique_id( async def test_only_migrate_once( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we handle migration happening after an upgrade and than a downgrade and then an upgrade.""" aid = get_next_aid() @@ -464,7 +414,7 @@ async def test_only_migrate_once( "homekit_controller", f"00:00:00:00:00:00_{aid}_8", ) - await setup_test_component(hass, aid, create_lightbulb_service_with_color_temp) + await setup_test_component(hass, create_lightbulb_service_with_color_temp) assert ( entity_registry.async_get(old_light_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_lock.py b/tests/components/homekit_controller/test_lock.py index 0963537c7d0..db248b82b1a 100644 --- a/tests/components/homekit_controller/test_lock.py +++ b/tests/components/homekit_controller/test_lock.py @@ -1,18 +1,15 @@ """Basic checks for HomeKitLock.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import Service, ServicesTypes +from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component -def create_lock_service(accessory: Accessory) -> Service: +def create_lock_service(accessory): """Define a lock characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.LOCK_MECHANISM) @@ -32,11 +29,9 @@ def create_lock_service(accessory: Accessory) -> Service: return service -async def test_switch_change_lock_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_change_lock_state(hass: HomeAssistant) -> None: """Test that we can turn a HomeKit lock on and off again.""" - helper = await setup_test_component(hass, get_next_aid(), create_lock_service) + helper = await setup_test_component(hass, create_lock_service) await hass.services.async_call( "lock", "lock", {"entity_id": "lock.testdevice"}, blocking=True @@ -59,11 +54,9 @@ async def test_switch_change_lock_state( ) -async def test_switch_read_lock_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_read_lock_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit lock accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_lock_service) + helper = await setup_test_component(hass, create_lock_service) state = await helper.async_update( ServicesTypes.LOCK_MECHANISM, @@ -126,9 +119,7 @@ async def test_switch_read_lock_state( async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a lock unique id.""" aid = get_next_aid() @@ -137,7 +128,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, aid, create_lock_service) + await setup_test_component(hass, create_lock_service) assert ( entity_registry.async_get(lock_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_media_player.py b/tests/components/homekit_controller/test_media_player.py index d1d280ef265..62a042ff7b9 100644 --- a/tests/components/homekit_controller/test_media_player.py +++ b/tests/components/homekit_controller/test_media_player.py @@ -1,22 +1,19 @@ """Basic checks for HomeKit motion sensors and contact sensors.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import ( CharacteristicPermissions, CharacteristicsTypes, ) -from aiohomekit.model.services import Service, ServicesTypes +from aiohomekit.model.services import ServicesTypes import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component -def create_tv_service(accessory: Accessory) -> Service: +def create_tv_service(accessory): """Define tv characteristics. The TV is not currently documented publicly - this is based on observing really TV's that have HomeKit support. @@ -54,7 +51,7 @@ def create_tv_service(accessory: Accessory) -> Service: return tv_service -def create_tv_service_with_target_media_state(accessory: Accessory) -> Service: +def create_tv_service_with_target_media_state(accessory): """Define a TV service that can play/pause/stop without generate remote events.""" service = create_tv_service(accessory) @@ -65,11 +62,9 @@ def create_tv_service_with_target_media_state(accessory: Accessory) -> Service: return service -async def test_tv_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_tv_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit fan accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_tv_service) + helper = await setup_test_component(hass, create_tv_service) state = await helper.async_update( ServicesTypes.TELEVISION, @@ -96,22 +91,18 @@ async def test_tv_read_state( assert state.state == "idle" -async def test_tv_read_sources( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_tv_read_sources(hass: HomeAssistant) -> None: """Test that we can read the input source of a HomeKit TV.""" - helper = await setup_test_component(hass, get_next_aid(), create_tv_service) + helper = await setup_test_component(hass, create_tv_service) state = await helper.poll_and_get_state() assert state.attributes["source"] == "HDMI 1" assert state.attributes["source_list"] == ["HDMI 1", "HDMI 2"] -async def test_play_remote_key( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_play_remote_key(hass: HomeAssistant) -> None: """Test that we can play media on a media player.""" - helper = await setup_test_component(hass, get_next_aid(), create_tv_service) + helper = await setup_test_component(hass, create_tv_service) await helper.async_update( ServicesTypes.TELEVISION, @@ -156,11 +147,9 @@ async def test_play_remote_key( ) -async def test_pause_remote_key( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_pause_remote_key(hass: HomeAssistant) -> None: """Test that we can pause a media player.""" - helper = await setup_test_component(hass, get_next_aid(), create_tv_service) + helper = await setup_test_component(hass, create_tv_service) await helper.async_update( ServicesTypes.TELEVISION, @@ -205,11 +194,9 @@ async def test_pause_remote_key( ) -async def test_play(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: +async def test_play(hass: HomeAssistant) -> None: """Test that we can play media on a media player.""" - helper = await setup_test_component( - hass, get_next_aid(), create_tv_service_with_target_media_state - ) + helper = await setup_test_component(hass, create_tv_service_with_target_media_state) await helper.async_update( ServicesTypes.TELEVISION, @@ -256,11 +243,9 @@ async def test_play(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> Non ) -async def test_pause(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: +async def test_pause(hass: HomeAssistant) -> None: """Test that we can turn pause a media player.""" - helper = await setup_test_component( - hass, get_next_aid(), create_tv_service_with_target_media_state - ) + helper = await setup_test_component(hass, create_tv_service_with_target_media_state) await helper.async_update( ServicesTypes.TELEVISION, @@ -306,11 +291,9 @@ async def test_pause(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> No ) -async def test_stop(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: +async def test_stop(hass: HomeAssistant) -> None: """Test that we can stop a media player.""" - helper = await setup_test_component( - hass, get_next_aid(), create_tv_service_with_target_media_state - ) + helper = await setup_test_component(hass, create_tv_service_with_target_media_state) await hass.services.async_call( "media_player", @@ -349,11 +332,9 @@ async def test_stop(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> Non ) -async def test_tv_set_source( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_tv_set_source(hass: HomeAssistant) -> None: """Test that we can set the input source of a HomeKit TV.""" - helper = await setup_test_component(hass, get_next_aid(), create_tv_service) + helper = await setup_test_component(hass, create_tv_service) await hass.services.async_call( "media_player", @@ -372,11 +353,9 @@ async def test_tv_set_source( assert state.attributes["source"] == "HDMI 2" -async def test_tv_set_source_fail( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_tv_set_source_fail(hass: HomeAssistant) -> None: """Test that we can set the input source of a HomeKit TV.""" - helper = await setup_test_component(hass, get_next_aid(), create_tv_service) + helper = await setup_test_component(hass, create_tv_service) with pytest.raises(ValueError): await hass.services.async_call( @@ -391,9 +370,7 @@ async def test_tv_set_source_fail( async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a media_player unique id.""" aid = get_next_aid() @@ -402,7 +379,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, aid, create_tv_service_with_target_media_state) + await setup_test_component(hass, create_tv_service_with_target_media_state) assert ( entity_registry.async_get(media_player_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_number.py b/tests/components/homekit_controller/test_number.py index 243b34cfc75..96e2cbe8d4d 100644 --- a/tests/components/homekit_controller/test_number.py +++ b/tests/components/homekit_controller/test_number.py @@ -1,18 +1,15 @@ """Basic checks for HomeKit sensor.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import Service, ServicesTypes +from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import Helper, setup_test_component +from .common import Helper, get_next_aid, setup_test_component -def create_switch_with_spray_level(accessory: Accessory) -> Service: +def create_switch_with_spray_level(accessory): """Define battery level characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) @@ -34,9 +31,7 @@ def create_switch_with_spray_level(accessory: Accessory) -> Service: async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a number unique id.""" aid = get_next_aid() @@ -46,7 +41,7 @@ async def test_migrate_unique_id( f"homekit-0001-aid:{aid}-sid:8-cid:9", suggested_object_id="testdevice_spray_quantity", ) - await setup_test_component(hass, aid, create_switch_with_spray_level) + await setup_test_component(hass, create_switch_with_spray_level) assert ( entity_registry.async_get(number.entity_id).unique_id @@ -54,13 +49,9 @@ async def test_migrate_unique_id( ) -async def test_read_number( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_read_number(hass: HomeAssistant) -> None: """Test a switch service that has a sensor characteristic is correctly handled.""" - helper = await setup_test_component( - hass, get_next_aid(), create_switch_with_spray_level - ) + helper = await setup_test_component(hass, create_switch_with_spray_level) # Helper will be for the primary entity, which is the outlet. Make a helper for the sensor. spray_level = Helper( @@ -84,13 +75,9 @@ async def test_read_number( assert state.state == "5" -async def test_write_number( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_write_number(hass: HomeAssistant) -> None: """Test a switch service that has a sensor characteristic is correctly handled.""" - helper = await setup_test_component( - hass, get_next_aid(), create_switch_with_spray_level - ) + helper = await setup_test_component(hass, create_switch_with_spray_level) # Helper will be for the primary entity, which is the outlet. Make a helper for the sensor. spray_level = Helper( diff --git a/tests/components/homekit_controller/test_select.py b/tests/components/homekit_controller/test_select.py index cd9357b78d9..b00206e1b0d 100644 --- a/tests/components/homekit_controller/test_select.py +++ b/tests/components/homekit_controller/test_select.py @@ -1,7 +1,5 @@ """Basic checks for HomeKit select entities.""" -from collections.abc import Callable - from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.characteristics.const import TemperatureDisplayUnits @@ -10,7 +8,7 @@ from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import Helper, setup_test_component +from .common import Helper, get_next_aid, setup_test_component def create_service_with_ecobee_mode(accessory: Accessory): @@ -37,9 +35,7 @@ def create_service_with_temperature_units(accessory: Accessory): async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test we can migrate a select unique id.""" aid = get_next_aid() @@ -50,7 +46,7 @@ async def test_migrate_unique_id( suggested_object_id="testdevice_current_mode", ) - await setup_test_component(hass, aid, create_service_with_ecobee_mode) + await setup_test_component(hass, create_service_with_ecobee_mode) assert ( entity_registry.async_get(select.entity_id).unique_id @@ -58,13 +54,9 @@ async def test_migrate_unique_id( ) -async def test_read_current_mode( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_read_current_mode(hass: HomeAssistant) -> None: """Test that Ecobee mode can be correctly read and show as human readable text.""" - helper = await setup_test_component( - hass, get_next_aid(), create_service_with_ecobee_mode - ) + helper = await setup_test_component(hass, create_service_with_ecobee_mode) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. ecobee_mode = Helper( @@ -100,13 +92,9 @@ async def test_read_current_mode( assert state.state == "away" -async def test_write_current_mode( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_write_current_mode(hass: HomeAssistant) -> None: """Test can set a specific mode.""" - helper = await setup_test_component( - hass, get_next_aid(), create_service_with_ecobee_mode - ) + helper = await setup_test_component(hass, create_service_with_ecobee_mode) helper.accessory.services.first(service_type=ServicesTypes.THERMOSTAT) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. @@ -152,13 +140,9 @@ async def test_write_current_mode( ) -async def test_read_select( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_read_select(hass: HomeAssistant) -> None: """Test the generic select can read the current value.""" - helper = await setup_test_component( - hass, get_next_aid(), create_service_with_temperature_units - ) + helper = await setup_test_component(hass, create_service_with_temperature_units) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. select_entity = Helper( @@ -186,13 +170,9 @@ async def test_read_select( assert state.state == "fahrenheit" -async def test_write_select( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_write_select(hass: HomeAssistant) -> None: """Test can set a value.""" - helper = await setup_test_component( - hass, get_next_aid(), create_service_with_temperature_units - ) + helper = await setup_test_component(hass, create_service_with_temperature_units) helper.accessory.services.first(service_type=ServicesTypes.THERMOSTAT) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. diff --git a/tests/components/homekit_controller/test_sensor.py b/tests/components/homekit_controller/test_sensor.py index c40864c9629..461d62742a5 100644 --- a/tests/components/homekit_controller/test_sensor.py +++ b/tests/components/homekit_controller/test_sensor.py @@ -1,12 +1,11 @@ """Basic checks for HomeKit sensor.""" -from collections.abc import Callable from unittest.mock import patch -from aiohomekit.model import Accessory, Transport +from aiohomekit.model import Transport from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.characteristics.const import ThreadNodeCapabilities, ThreadStatus -from aiohomekit.model.services import Service, ServicesTypes +from aiohomekit.model.services import ServicesTypes from aiohomekit.protocol.statuscodes import HapStatusCode from aiohomekit.testing import FakePairing import pytest @@ -24,7 +23,7 @@ from .common import TEST_DEVICE_SERVICE_INFO, Helper, setup_test_component from tests.components.bluetooth import inject_bluetooth_service_info -def create_temperature_sensor_service(accessory: Accessory) -> None: +def create_temperature_sensor_service(accessory): """Define temperature characteristics.""" service = accessory.add_service(ServicesTypes.TEMPERATURE_SENSOR) @@ -32,7 +31,7 @@ def create_temperature_sensor_service(accessory: Accessory) -> None: cur_state.value = 0 -def create_humidity_sensor_service(accessory: Accessory) -> None: +def create_humidity_sensor_service(accessory): """Define humidity characteristics.""" service = accessory.add_service(ServicesTypes.HUMIDITY_SENSOR) @@ -40,7 +39,7 @@ def create_humidity_sensor_service(accessory: Accessory) -> None: cur_state.value = 0 -def create_light_level_sensor_service(accessory: Accessory) -> None: +def create_light_level_sensor_service(accessory): """Define light level characteristics.""" service = accessory.add_service(ServicesTypes.LIGHT_SENSOR) @@ -48,7 +47,7 @@ def create_light_level_sensor_service(accessory: Accessory) -> None: cur_state.value = 0 -def create_carbon_dioxide_level_sensor_service(accessory: Accessory) -> None: +def create_carbon_dioxide_level_sensor_service(accessory): """Define carbon dioxide level characteristics.""" service = accessory.add_service(ServicesTypes.CARBON_DIOXIDE_SENSOR) @@ -56,7 +55,7 @@ def create_carbon_dioxide_level_sensor_service(accessory: Accessory) -> None: cur_state.value = 0 -def create_battery_level_sensor(accessory: Accessory) -> Service: +def create_battery_level_sensor(accessory): """Define battery level characteristics.""" service = accessory.add_service(ServicesTypes.BATTERY_SERVICE) @@ -72,12 +71,10 @@ def create_battery_level_sensor(accessory: Accessory) -> Service: return service -async def test_temperature_sensor_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_temperature_sensor_read_state(hass: HomeAssistant) -> None: """Test reading the state of a HomeKit temperature sensor accessory.""" helper = await setup_test_component( - hass, get_next_aid(), create_temperature_sensor_service, suffix="temperature" + hass, create_temperature_sensor_service, suffix="temperature" ) state = await helper.async_update( @@ -100,12 +97,10 @@ async def test_temperature_sensor_read_state( assert state.attributes["state_class"] == SensorStateClass.MEASUREMENT -async def test_temperature_sensor_not_added_twice( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_temperature_sensor_not_added_twice(hass: HomeAssistant) -> None: """A standalone temperature sensor should not get a characteristic AND a service entity.""" helper = await setup_test_component( - hass, get_next_aid(), create_temperature_sensor_service, suffix="temperature" + hass, create_temperature_sensor_service, suffix="temperature" ) created_sensors = set() @@ -116,12 +111,10 @@ async def test_temperature_sensor_not_added_twice( assert created_sensors == {helper.entity_id} -async def test_humidity_sensor_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_humidity_sensor_read_state(hass: HomeAssistant) -> None: """Test reading the state of a HomeKit humidity sensor accessory.""" helper = await setup_test_component( - hass, get_next_aid(), create_humidity_sensor_service, suffix="humidity" + hass, create_humidity_sensor_service, suffix="humidity" ) state = await helper.async_update( @@ -143,12 +136,10 @@ async def test_humidity_sensor_read_state( assert state.attributes["device_class"] == SensorDeviceClass.HUMIDITY -async def test_light_level_sensor_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_light_level_sensor_read_state(hass: HomeAssistant) -> None: """Test reading the state of a HomeKit temperature sensor accessory.""" helper = await setup_test_component( - hass, get_next_aid(), create_light_level_sensor_service, suffix="light_level" + hass, create_light_level_sensor_service, suffix="light_level" ) state = await helper.async_update( @@ -170,15 +161,10 @@ async def test_light_level_sensor_read_state( assert state.attributes["device_class"] == SensorDeviceClass.ILLUMINANCE -async def test_carbon_dioxide_level_sensor_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_carbon_dioxide_level_sensor_read_state(hass: HomeAssistant) -> None: """Test reading the state of a HomeKit carbon dioxide sensor accessory.""" helper = await setup_test_component( - hass, - get_next_aid(), - create_carbon_dioxide_level_sensor_service, - suffix="carbon_dioxide", + hass, create_carbon_dioxide_level_sensor_service, suffix="carbon_dioxide" ) state = await helper.async_update( @@ -198,12 +184,10 @@ async def test_carbon_dioxide_level_sensor_read_state( assert state.state == "20" -async def test_battery_level_sensor( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_battery_level_sensor(hass: HomeAssistant) -> None: """Test reading the state of a HomeKit battery level sensor.""" helper = await setup_test_component( - hass, get_next_aid(), create_battery_level_sensor, suffix="battery" + hass, create_battery_level_sensor, suffix="battery" ) state = await helper.async_update( @@ -227,12 +211,10 @@ async def test_battery_level_sensor( assert state.attributes["device_class"] == SensorDeviceClass.BATTERY -async def test_battery_charging( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_battery_charging(hass: HomeAssistant) -> None: """Test reading the state of a HomeKit battery's charging state.""" helper = await setup_test_component( - hass, get_next_aid(), create_battery_level_sensor, suffix="battery" + hass, create_battery_level_sensor, suffix="battery" ) state = await helper.async_update( @@ -253,12 +235,10 @@ async def test_battery_charging( assert state.attributes["icon"] == "mdi:battery-charging-20" -async def test_battery_low( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_battery_low(hass: HomeAssistant) -> None: """Test reading the state of a HomeKit battery's low state.""" helper = await setup_test_component( - hass, get_next_aid(), create_battery_level_sensor, suffix="battery" + hass, create_battery_level_sensor, suffix="battery" ) state = await helper.async_update( @@ -280,7 +260,7 @@ async def test_battery_low( assert state.attributes["icon"] == "mdi:battery-alert" -def create_switch_with_sensor(accessory: Accessory) -> Service: +def create_switch_with_sensor(accessory): """Define battery level characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) @@ -297,11 +277,9 @@ def create_switch_with_sensor(accessory: Accessory) -> Service: return service -async def test_switch_with_sensor( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_with_sensor(hass: HomeAssistant) -> None: """Test a switch service that has a sensor characteristic is correctly handled.""" - helper = await setup_test_component(hass, get_next_aid(), create_switch_with_sensor) + helper = await setup_test_component(hass, create_switch_with_sensor) # Helper will be for the primary entity, which is the outlet. Make a helper for the sensor. energy_helper = Helper( @@ -329,11 +307,9 @@ async def test_switch_with_sensor( assert state.state == "50" -async def test_sensor_unavailable( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_sensor_unavailable(hass: HomeAssistant) -> None: """Test a sensor becoming unavailable.""" - helper = await setup_test_component(hass, get_next_aid(), create_switch_with_sensor) + helper = await setup_test_component(hass, create_switch_with_sensor) outlet = helper.accessory.services.first(service_type=ServicesTypes.OUTLET) on_char = outlet[CharacteristicsTypes.ON] @@ -407,9 +383,7 @@ def test_thread_status_to_str() -> None: @pytest.mark.usefixtures("enable_bluetooth", "entity_registry_enabled_by_default") -async def test_rssi_sensor( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_rssi_sensor(hass: HomeAssistant) -> None: """Test an rssi sensor.""" inject_bluetooth_service_info(hass, TEST_DEVICE_SERVICE_INFO) @@ -424,20 +398,14 @@ async def test_rssi_sensor( # Any accessory will do for this test, but we need at least # one or the rssi sensor will not be created await setup_test_component( - hass, - get_next_aid(), - create_battery_level_sensor, - suffix="battery", - connection="BLE", + hass, create_battery_level_sensor, suffix="battery", connection="BLE" ) assert hass.states.get("sensor.testdevice_signal_strength").state == "-56" @pytest.mark.usefixtures("enable_bluetooth", "entity_registry_enabled_by_default") async def test_migrate_rssi_sensor_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test an rssi sensor unique id migration.""" rssi_sensor = entity_registry.async_get_or_create( @@ -460,11 +428,7 @@ async def test_migrate_rssi_sensor_unique_id( # Any accessory will do for this test, but we need at least # one or the rssi sensor will not be created await setup_test_component( - hass, - get_next_aid(), - create_battery_level_sensor, - suffix="battery", - connection="BLE", + hass, create_battery_level_sensor, suffix="battery", connection="BLE" ) assert hass.states.get("sensor.renamed_rssi").state == "-56" diff --git a/tests/components/homekit_controller/test_storage.py b/tests/components/homekit_controller/test_storage.py index 97856c2c784..9523dc9abb7 100644 --- a/tests/components/homekit_controller/test_storage.py +++ b/tests/components/homekit_controller/test_storage.py @@ -1,9 +1,7 @@ """Basic checks for entity map storage.""" -from collections.abc import Callable from typing import Any -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -66,7 +64,7 @@ async def test_storage_is_removed_idempotent(hass: HomeAssistant) -> None: assert hkid not in entity_map.storage_data -def create_lightbulb_service(accessory: Accessory) -> None: +def create_lightbulb_service(accessory): """Define lightbulb characteristics.""" service = accessory.add_service(ServicesTypes.LIGHTBULB) on_char = service.add_char(CharacteristicsTypes.ON) @@ -74,10 +72,10 @@ def create_lightbulb_service(accessory: Accessory) -> None: async def test_storage_is_updated_on_add( - hass: HomeAssistant, hass_storage: dict[str, Any], get_next_aid: Callable[[], int] + hass: HomeAssistant, hass_storage: dict[str, Any] ) -> None: """Test entity map storage is cleaned up on adding an accessory.""" - await setup_test_component(hass, get_next_aid(), create_lightbulb_service) + await setup_test_component(hass, create_lightbulb_service) entity_map: EntityMapStorage = hass.data[ENTITY_MAP] hkid = "00:00:00:00:00:00" diff --git a/tests/components/homekit_controller/test_switch.py b/tests/components/homekit_controller/test_switch.py index d841323bd59..8a6b2a65e88 100644 --- a/tests/components/homekit_controller/test_switch.py +++ b/tests/components/homekit_controller/test_switch.py @@ -1,8 +1,5 @@ """Basic checks for HomeKitSwitch.""" -from collections.abc import Callable - -from aiohomekit.model import Accessory from aiohomekit.model.characteristics import ( CharacteristicsTypes, InUseValues, @@ -13,10 +10,10 @@ from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import setup_test_component +from .common import get_next_aid, setup_test_component -def create_switch_service(accessory: Accessory) -> None: +def create_switch_service(accessory): """Define outlet characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) @@ -27,15 +24,7 @@ def create_switch_service(accessory: Accessory) -> None: outlet_in_use.value = False -def create_faucet_service(accessory: Accessory) -> None: - """Define faucet characteristics.""" - service = accessory.add_service(ServicesTypes.FAUCET) - - active_char = service.add_char(CharacteristicsTypes.ACTIVE) - active_char.value = False - - -def create_valve_service(accessory: Accessory) -> None: +def create_valve_service(accessory): """Define valve characteristics.""" service = accessory.add_service(ServicesTypes.VALVE) @@ -52,7 +41,7 @@ def create_valve_service(accessory: Accessory) -> None: remaining.value = 99 -def create_char_switch_service(accessory: Accessory) -> None: +def create_char_switch_service(accessory): """Define swtch characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) @@ -61,11 +50,9 @@ def create_char_switch_service(accessory: Accessory) -> None: on_char.value = False -async def test_switch_change_outlet_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_change_outlet_state(hass: HomeAssistant) -> None: """Test that we can turn a HomeKit outlet on and off again.""" - helper = await setup_test_component(hass, get_next_aid(), create_switch_service) + helper = await setup_test_component(hass, create_switch_service) await hass.services.async_call( "switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True @@ -88,11 +75,9 @@ async def test_switch_change_outlet_state( ) -async def test_switch_read_outlet_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_switch_read_outlet_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit outlet accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_switch_service) + helper = await setup_test_component(hass, create_switch_service) # Initial state is that the switch is off and the outlet isn't in use switch_1 = await helper.poll_and_get_state() @@ -123,63 +108,9 @@ async def test_switch_read_outlet_state( assert switch_1.attributes["outlet_in_use"] is True -async def test_faucet_change_active_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: - """Test that we can turn a HomeKit outlet on and off again.""" - helper = await setup_test_component(hass, get_next_aid(), create_faucet_service) - - await hass.services.async_call( - "switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True - ) - helper.async_assert_service_values( - ServicesTypes.FAUCET, - { - CharacteristicsTypes.ACTIVE: 1, - }, - ) - - await hass.services.async_call( - "switch", "turn_off", {"entity_id": "switch.testdevice"}, blocking=True - ) - helper.async_assert_service_values( - ServicesTypes.FAUCET, - { - CharacteristicsTypes.ACTIVE: 0, - }, - ) - - -async def test_faucet_read_active_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: - """Test that we can read the state of a HomeKit outlet accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_faucet_service) - - # Initial state is that the switch is off and the outlet isn't in use - switch_1 = await helper.poll_and_get_state() - assert switch_1.state == "off" - - # Simulate that someone switched on the device in the real world not via HA - switch_1 = await helper.async_update( - ServicesTypes.FAUCET, - {CharacteristicsTypes.ACTIVE: True}, - ) - assert switch_1.state == "on" - - # Simulate that device switched off in the real world not via HA - switch_1 = await helper.async_update( - ServicesTypes.FAUCET, - {CharacteristicsTypes.ACTIVE: False}, - ) - assert switch_1.state == "off" - - -async def test_valve_change_active_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_valve_change_active_state(hass: HomeAssistant) -> None: """Test that we can turn a valve on and off again.""" - helper = await setup_test_component(hass, get_next_aid(), create_valve_service) + helper = await setup_test_component(hass, create_valve_service) await hass.services.async_call( "switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True @@ -202,11 +133,9 @@ async def test_valve_change_active_state( ) -async def test_valve_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_valve_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a valve accessory.""" - helper = await setup_test_component(hass, get_next_aid(), create_valve_service) + helper = await setup_test_component(hass, create_valve_service) # Initial state is that the switch is off and the outlet isn't in use switch_1 = await helper.poll_and_get_state() @@ -237,12 +166,10 @@ async def test_valve_read_state( assert switch_1.attributes["in_use"] is False -async def test_char_switch_change_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_char_switch_change_state(hass: HomeAssistant) -> None: """Test that we can turn a characteristic on and off again.""" helper = await setup_test_component( - hass, get_next_aid(), create_char_switch_service, suffix="pairing_mode" + hass, create_char_switch_service, suffix="pairing_mode" ) await hass.services.async_call( @@ -272,12 +199,10 @@ async def test_char_switch_change_state( ) -async def test_char_switch_read_state( - hass: HomeAssistant, get_next_aid: Callable[[], int] -) -> None: +async def test_char_switch_read_state(hass: HomeAssistant) -> None: """Test that we can read the state of a HomeKit characteristic switch.""" helper = await setup_test_component( - hass, get_next_aid(), create_char_switch_service, suffix="pairing_mode" + hass, create_char_switch_service, suffix="pairing_mode" ) # Simulate that someone switched on the device in the real world not via HA @@ -296,9 +221,7 @@ async def test_char_switch_read_state( async def test_migrate_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - get_next_aid: Callable[[], int], + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test a we can migrate a switch unique id.""" aid = get_next_aid() @@ -312,9 +235,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-0001-aid:{aid}-sid:8-cid:9", ) - await setup_test_component( - hass, aid, create_char_switch_service, suffix="pairing_mode" - ) + await setup_test_component(hass, create_char_switch_service, suffix="pairing_mode") assert ( entity_registry.async_get(switch_entry.entity_id).unique_id diff --git a/tests/components/homematicip_cloud/conftest.py b/tests/components/homematicip_cloud/conftest.py index ad3957fea69..a43a342478b 100644 --- a/tests/components/homematicip_cloud/conftest.py +++ b/tests/components/homematicip_cloud/conftest.py @@ -8,6 +8,7 @@ from homematicip.aio.home import AsyncHome from homematicip.base.enums import WeatherCondition, WeatherDayTime import pytest +from homeassistant import config_entries from homeassistant.components.homematicip_cloud import ( DOMAIN as HMIPC_DOMAIN, async_setup as hmip_async_setup, @@ -45,7 +46,7 @@ def mock_connection_fixture() -> AsyncConnection: @pytest.fixture(name="hmip_config_entry") -def hmip_config_entry_fixture() -> MockConfigEntry: +def hmip_config_entry_fixture() -> config_entries.ConfigEntry: """Create a mock config entry for homematic ip cloud.""" entry_data = { HMIPC_HAPID: HAPID, @@ -65,8 +66,8 @@ def hmip_config_entry_fixture() -> MockConfigEntry: @pytest.fixture(name="default_mock_hap_factory") async def default_mock_hap_factory_fixture( - hass: HomeAssistant, mock_connection, hmip_config_entry: MockConfigEntry -) -> HomeFactory: + hass: HomeAssistant, mock_connection, hmip_config_entry +) -> HomematicipHAP: """Create a mocked homematic access point.""" return HomeFactory(hass, mock_connection, hmip_config_entry) @@ -93,7 +94,7 @@ def dummy_config_fixture() -> ConfigType: @pytest.fixture(name="mock_hap_with_service") async def mock_hap_with_service_fixture( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory, dummy_config + hass: HomeAssistant, default_mock_hap_factory, dummy_config ) -> HomematicipHAP: """Create a fake homematic access point with hass services.""" mock_hap = await default_mock_hap_factory.async_get_mock_hap() diff --git a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json index 7a3d3f06b09..eba2c803b1f 100644 --- a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json +++ b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json @@ -1805,164 +1805,93 @@ "updateState": "UP_TO_DATE" }, "3014F7110000000000000049": { - "availableFirmwareVersion": "1.4.8", + "availableFirmwareVersion": "1.0.8", "connectionType": "HMIP_RF", - "deviceArchetype": "HMIP", - "firmwareVersion": "1.4.8", - "firmwareVersionInteger": 66568, + "firmwareVersion": "1.0.8", + "firmwareVersionInteger": 65544, "functionalChannels": { "0": { - "busConfigMismatch": null, "coProFaulty": false, "coProRestartNeeded": false, "coProUpdateFailure": false, - "configPending": true, - "controlsMountingOrientation": null, + "configPending": false, "coolingEmergencyValue": 0.0, - "daliBusState": null, - "defaultLinkedGroup": [], - "deviceCommunicationError": null, - "deviceDriveError": null, - "deviceDriveModeError": null, "deviceId": "3014F7110000000000000049", - "deviceOperationMode": null, "deviceOverheated": false, "deviceOverloaded": false, - "devicePowerFailureDetected": false, "deviceUndervoltage": false, - "displayContrast": null, "dutyCycle": false, "frostProtectionTemperature": 8.0, "functionalChannelType": "DEVICE_BASE_FLOOR_HEATING", "groupIndex": 0, - "groups": ["00000000-0000-0000-0000-000000000005"], - "heatingEmergencyValue": 0.05, + "groups": [], + "heatingEmergencyValue": 0.25, "index": 0, "label": "", - "lockJammed": null, "lowBat": null, "minimumFloorHeatingValvePosition": 0.0, - "mountingOrientation": null, - "multicastRoutingEnabled": false, - "particulateMatterSensorCommunicationError": null, - "particulateMatterSensorError": null, - "powerShortCircuit": null, - "profilePeriodLimitReached": null, - "pulseWidthModulationAtLowFloorHeatingValvePositionEnabled": false, + "pulseWidthModulationAtLowFloorHeatingValvePositionEnabled": true, "routerModuleEnabled": false, "routerModuleSupported": false, - "rssiDeviceValue": -83, + "rssiDeviceValue": -55, "rssiPeerValue": null, - "sensorCommunicationError": null, - "sensorError": null, - "shortCircuitDataLine": null, "supportedOptionalFeatures": { - "IFeatureBusConfigMismatch": false, "IFeatureDeviceCoProError": false, "IFeatureDeviceCoProRestart": false, "IFeatureDeviceCoProUpdate": false, - "IFeatureDeviceCommunicationError": false, - "IFeatureDeviceDaliBusError": false, - "IFeatureDeviceDriveError": false, - "IFeatureDeviceDriveModeError": false, - "IFeatureDeviceIdentify": false, "IFeatureDeviceOverheated": false, "IFeatureDeviceOverloaded": false, - "IFeatureDeviceParticulateMatterSensorCommunicationError": false, - "IFeatureDeviceParticulateMatterSensorError": false, - "IFeatureDevicePowerFailure": false, - "IFeatureDeviceSensorCommunicationError": false, - "IFeatureDeviceSensorError": false, - "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, - "IFeatureDeviceTemperatureHumiditySensorError": false, "IFeatureDeviceTemperatureOutOfRange": false, "IFeatureDeviceUndervoltage": false, "IFeatureMinimumFloorHeatingValvePosition": true, - "IFeatureMulticastRouter": false, - "IFeaturePowerShortCircuit": false, - "IFeatureProfilePeriodLimit": false, - "IFeaturePulseWidthModulationAtLowFloorHeatingValvePosition": true, - "IFeatureRssiValue": true, - "IFeatureShortCircuitDataLine": false, - "IOptionalFeatureDefaultLinkedGroup": false, - "IOptionalFeatureDeviceErrorLockJammed": false, - "IOptionalFeatureDeviceOperationMode": false, - "IOptionalFeatureDisplayContrast": false, - "IOptionalFeatureDutyCycle": true, - "IOptionalFeatureLowBat": false, - "IOptionalFeatureMountingOrientation": false + "IFeaturePulseWidthModulationAtLowFloorHeatingValvePosition": true }, - "temperatureHumiditySensorCommunicationError": null, - "temperatureHumiditySensorError": null, "temperatureOutOfRange": false, "unreach": false, "valveProtectionDuration": 5, "valveProtectionSwitchingInterval": 14 }, "1": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 1, - "groups": [ - "00000000-0000-0000-0000-000000000022", - "00000000-0000-0000-0000-000000000023" - ], + "groups": [], "index": 1, - "label": "Heizkreislauf (1) OG Bad r", - "valvePosition": 0.475, + "label": "", "valveState": "ADAPTION_DONE" }, "10": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 10, - "groups": [ - "00000000-0000-0000-0000-000000000030", - "00000000-0000-0000-0000-000000000031" - ], + "groups": [], "index": 10, - "label": "Heizkreislauf (10) OG AZ rechts", - "valvePosition": 0.385, - "valveState": "ADAPTION_DONE" + "label": "", + "valveState": "ADJUSTMENT_TOO_SMALL" }, "11": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 11, - "groups": [ - "00000000-0000-0000-0000-000000000030", - "00000000-0000-0000-0000-000000000031" - ], + "groups": [], "index": 11, - "label": "Heizkreislauf (11) OG AZ links", - "valvePosition": 0.385, - "valveState": "ADAPTION_DONE" + "label": "", + "valveState": "ADJUSTMENT_TOO_SMALL" }, "12": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 12, - "groups": [ - "00000000-0000-0000-0000-000000000022", - "00000000-0000-0000-0000-000000000023" - ], + "groups": [], "index": 12, - "label": "Heizkreislauf (12) OG Bad Heizk\u00f6rper", - "valvePosition": 0.385, - "valveState": "ADAPTION_DONE" + "label": "", + "valveState": "ADJUSTMENT_TOO_SMALL" }, "13": { "deviceId": "3014F7110000000000000049", "functionalChannelType": "HEAT_DEMAND_CHANNEL", "groupIndex": 0, - "groups": [ - "00000000-0000-0000-0000-000000000058", - "00000000-0000-0000-0000-000000000059" - ], + "groups": [], "index": 13, "label": "" }, @@ -1970,7 +1899,7 @@ "deviceId": "3014F7110000000000000049", "functionalChannelType": "DEHUMIDIFIER_DEMAND_CHANNEL", "groupIndex": 0, - "groups": ["00000000-0000-0000-0000-000000000060"], + "groups": [], "index": 14, "label": "" }, @@ -1978,136 +1907,89 @@ "deviceId": "3014F7110000000000000049", "functionalChannelType": "CHANGE_OVER_CHANNEL", "groupIndex": 0, - "groups": [ - "00000000-0000-0000-0000-000000000061", - "00000000-0000-0000-0000-000000000062", - "00000000-0000-0000-0000-000000000063", - "00000000-0000-0000-0000-000000000064" - ], + "groups": [], "index": 15, "label": "" }, "2": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 2, - "groups": [ - "00000000-0000-0000-0000-000000000022", - "00000000-0000-0000-0000-000000000023" - ], + "groups": [], "index": 2, - "label": "Heizkreislauf (2) OG Bad l", - "valvePosition": 0.385, + "label": "", "valveState": "ADAPTION_DONE" }, "3": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 3, - "groups": [ - "00000000-0000-0000-0000-000000000017", - "00000000-0000-0000-0000-000000000018" - ], + "groups": [], "index": 3, - "label": "Heizkreislauf (3) OG WZ rechts", - "valvePosition": 0.0, + "label": "", "valveState": "ADAPTION_DONE" }, "4": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 4, - "groups": [ - "00000000-0000-0000-0000-000000000017", - "00000000-0000-0000-0000-000000000018" - ], + "groups": [], "index": 4, - "label": "Heizkreislauf (4) OG WZ Mitte rechts", - "valvePosition": 0.0, + "label": "", "valveState": "ADAPTION_DONE" }, "5": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 5, - "groups": [ - "00000000-0000-0000-0000-000000000017", - "00000000-0000-0000-0000-000000000018" - ], + "groups": [], "index": 5, - "label": "Heizkreislauf (5) OG WZ Mitte links", - "valvePosition": 0.0, + "label": "", "valveState": "ADAPTION_DONE" }, "6": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 6, - "groups": [ - "00000000-0000-0000-0000-000000000017", - "00000000-0000-0000-0000-000000000018" - ], + "groups": [], "index": 6, - "label": "Heizkreislauf (6) OG WZ links", - "valvePosition": 0.0, - "valveState": "ADAPTION_DONE" + "label": "", + "valveState": "ADJUSTMENT_TOO_SMALL" }, "7": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 7, - "groups": [ - "00000000-0000-0000-0000-000000000017", - "00000000-0000-0000-0000-000000000018" - ], + "groups": [], "index": 7, - "label": "Heizkreislauf (7) OG K\u00fcche", - "valvePosition": 0.0, - "valveState": "ADAPTION_DONE" + "label": "", + "valveState": "ADJUSTMENT_TOO_SMALL" }, "8": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 8, - "groups": [ - "00000000-0000-0000-0000-000000000026", - "00000000-0000-0000-0000-000000000027" - ], + "groups": [], "index": 8, - "label": "Heizkreislauf (8) OG SZ rechts", - "valvePosition": 0.0, - "valveState": "ADAPTION_DONE" + "label": "", + "valveState": "ADJUSTMENT_TOO_SMALL" }, "9": { - "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 9, - "groups": [ - "00000000-0000-0000-0000-000000000026", - "00000000-0000-0000-0000-000000000027" - ], + "groups": [], "index": 9, - "label": "Heizkreislauf (9) OG SZ links", - "valvePosition": 0.0, - "valveState": "ADAPTION_DONE" + "label": "", + "valveState": "ADJUSTMENT_TOO_SMALL" } }, "homeId": "00000000-0000-0000-0000-000000000001", "id": "3014F7110000000000000049", - "label": "Fu\u00dfbodenheizungsaktor", - "lastStatusUpdate": 1704379652281, + "label": "Fu\u00dfbodenheizungsaktor OG motorisch", + "lastStatusUpdate": 1577486092047, "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", - "manuallyUpdateForced": false, "manufacturerCode": 1, - "measuredAttributes": {}, "modelId": 365, "modelType": "HmIP-FALMOT-C12", "oem": "eQ-3", @@ -3355,173 +3237,6 @@ "type": "BRAND_SWITCH_NOTIFICATION_LIGHT", "updateState": "UP_TO_DATE" }, - "3014F711000000000000BSL2": { - "availableFirmwareVersion": "2.0.2", - "connectionType": "HMIP_RF", - "deviceArchetype": "HMIP", - "firmwareVersion": "2.0.2", - "firmwareVersionInteger": 131074, - "functionalChannels": { - "0": { - "busConfigMismatch": null, - "coProFaulty": false, - "coProRestartNeeded": false, - "coProUpdateFailure": false, - "configPending": false, - "controlsMountingOrientation": null, - "daliBusState": null, - "defaultLinkedGroup": [], - "deviceCommunicationError": null, - "deviceDriveError": null, - "deviceDriveModeError": null, - "deviceId": "3014F711000000000000BSL2", - "deviceOperationMode": null, - "deviceOverheated": false, - "deviceOverloaded": false, - "devicePowerFailureDetected": false, - "deviceUndervoltage": false, - "displayContrast": null, - "dutyCycle": false, - "functionalChannelType": "DEVICE_BASE", - "groupIndex": 0, - "groups": ["00000000-0000-0000-0000-000000000007"], - "index": 0, - "label": "", - "lockJammed": null, - "lowBat": null, - "mountingOrientation": null, - "multicastRoutingEnabled": false, - "particulateMatterSensorCommunicationError": null, - "particulateMatterSensorError": null, - "powerShortCircuit": null, - "profilePeriodLimitReached": null, - "routerModuleEnabled": false, - "routerModuleSupported": false, - "rssiDeviceValue": -74, - "rssiPeerValue": -75, - "sensorCommunicationError": null, - "sensorError": null, - "shortCircuitDataLine": null, - "supportedOptionalFeatures": { - "IFeatureBusConfigMismatch": false, - "IFeatureDeviceCoProError": false, - "IFeatureDeviceCoProRestart": false, - "IFeatureDeviceCoProUpdate": false, - "IFeatureDeviceCommunicationError": false, - "IFeatureDeviceDaliBusError": false, - "IFeatureDeviceDriveError": false, - "IFeatureDeviceDriveModeError": false, - "IFeatureDeviceIdentify": true, - "IFeatureDeviceOverheated": true, - "IFeatureDeviceOverloaded": false, - "IFeatureDeviceParticulateMatterSensorCommunicationError": false, - "IFeatureDeviceParticulateMatterSensorError": false, - "IFeatureDevicePowerFailure": false, - "IFeatureDeviceSensorCommunicationError": false, - "IFeatureDeviceSensorError": false, - "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, - "IFeatureDeviceTemperatureHumiditySensorError": false, - "IFeatureDeviceTemperatureOutOfRange": false, - "IFeatureDeviceUndervoltage": false, - "IFeatureMulticastRouter": false, - "IFeaturePowerShortCircuit": false, - "IFeatureProfilePeriodLimit": true, - "IFeatureRssiValue": true, - "IFeatureShortCircuitDataLine": false, - "IOptionalFeatureDefaultLinkedGroup": false, - "IOptionalFeatureDeviceErrorLockJammed": false, - "IOptionalFeatureDeviceOperationMode": false, - "IOptionalFeatureDisplayContrast": false, - "IOptionalFeatureDutyCycle": true, - "IOptionalFeatureLowBat": false, - "IOptionalFeatureMountingOrientation": false - }, - "temperatureHumiditySensorCommunicationError": null, - "temperatureHumiditySensorError": null, - "temperatureOutOfRange": false, - "unreach": false - }, - "1": { - "channelRole": null, - "deviceId": "3014F711000000000000BSL2", - "functionalChannelType": "SWITCH_CHANNEL", - "groupIndex": 1, - "groups": [], - "index": 1, - "internalLinkConfiguration": { - "firstInputAction": "OFF", - "internalLinkConfigurationType": "DOUBLE_INPUT_SWITCH", - "longPressOnTimeEnabled": false, - "onTime": 111600.0, - "secondInputAction": "ON" - }, - "label": "", - "on": false, - "powerUpSwitchState": "PERMANENT_OFF", - "profileMode": "AUTOMATIC", - "supportedOptionalFeatures": { - "IFeatureAccessAuthorizationActuatorChannel": false, - "IFeatureGarageGroupActuatorChannel": false, - "IFeatureLightGroupActuatorChannel": false, - "IFeatureLightProfileActuatorChannel": false, - "IOptionalFeatureInternalLinkConfiguration": true, - "IOptionalFeaturePowerUpSwitchState": true - }, - "userDesiredProfileMode": "AUTOMATIC" - }, - "2": { - "channelRole": "NOTIFICATION_LIGHT_DIMMING_ACTUATOR", - "deviceId": "3014F711000000000000BSL2", - "dimLevel": 0.0, - "functionalChannelType": "NOTIFICATION_LIGHT_CHANNEL", - "groupIndex": 2, - "groups": ["00000000-0000-0000-0000-000000000021"], - "index": 2, - "label": "Led Unten", - "on": false, - "opticalSignalBehaviour": "BLINKING_MIDDLE", - "profileMode": "AUTOMATIC", - "simpleRGBColorState": "TURQUOISE", - "supportedOptionalFeatures": { - "IFeatureOpticalSignalBehaviourState": true - }, - "userDesiredProfileMode": "AUTOMATIC" - }, - "3": { - "channelRole": "NOTIFICATION_LIGHT_DIMMING_ACTUATOR", - "deviceId": "3014F711000000000000BSL2", - "dimLevel": 0.25, - "functionalChannelType": "NOTIFICATION_LIGHT_CHANNEL", - "groupIndex": 3, - "groups": ["00000000-0000-0000-0000-000000000021"], - "index": 3, - "label": "Led Oben", - "on": true, - "opticalSignalBehaviour": "BLINKING_MIDDLE", - "profileMode": "AUTOMATIC", - "simpleRGBColorState": "GREEN", - "supportedOptionalFeatures": { - "IFeatureOpticalSignalBehaviourState": true - }, - "userDesiredProfileMode": "AUTOMATIC" - } - }, - "homeId": "00000000-0000-0000-0000-000000000001", - "id": "3014F711000000000000BSL2", - "label": "BSL2", - "lastStatusUpdate": 1714910246419, - "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", - "manuallyUpdateForced": false, - "manufacturerCode": 1, - "measuredAttributes": {}, - "modelId": 360, - "modelType": "HmIP-BSL", - "oem": "eQ-3", - "permanentlyReachable": true, - "serializedGlobalTradeItemNumber": "3014F711000000000000BSL2", - "type": "BRAND_SWITCH_NOTIFICATION_LIGHT", - "updateState": "UP_TO_DATE" - }, "3014F711SLO0000000000026": { "availableFirmwareVersion": "0.0.0", "connectionType": "HMIP_RF", @@ -8042,141 +7757,6 @@ "serializedGlobalTradeItemNumber": "3014F711000000000ESIIEC2", "type": "ENERGY_SENSORS_INTERFACE", "updateState": "UP_TO_DATE" - }, - "3014F7110000000000ESIIE3": { - "availableFirmwareVersion": "0.0.0", - "connectionType": "HMIP_RF", - "deviceArchetype": "HMIP", - "firmwareVersion": "1.0.6", - "firmwareVersionInteger": 65542, - "functionalChannels": { - "0": { - "busConfigMismatch": null, - "coProFaulty": false, - "coProRestartNeeded": false, - "coProUpdateFailure": false, - "configPending": false, - "controlsMountingOrientation": null, - "daliBusState": null, - "defaultLinkedGroup": [], - "deviceCommunicationError": null, - "deviceDriveError": null, - "deviceDriveModeError": null, - "deviceId": "3014F7110000000000ESIIE3", - "deviceOperationMode": null, - "deviceOverheated": false, - "deviceOverloaded": false, - "devicePowerFailureDetected": false, - "deviceUndervoltage": false, - "displayContrast": null, - "dutyCycle": false, - "functionalChannelType": "DEVICE_BASE", - "groupIndex": 0, - "groups": ["00000000-0000-0000-0000-000000000031"], - "index": 0, - "label": "", - "lockJammed": null, - "lowBat": false, - "mountingOrientation": null, - "multicastRoutingEnabled": false, - "particulateMatterSensorCommunicationError": null, - "particulateMatterSensorError": null, - "powerShortCircuit": null, - "profilePeriodLimitReached": null, - "routerModuleEnabled": false, - "routerModuleSupported": false, - "rssiDeviceValue": -94, - "rssiPeerValue": null, - "sensorCommunicationError": false, - "sensorError": true, - "shortCircuitDataLine": null, - "supportedOptionalFeatures": { - "IFeatureBusConfigMismatch": false, - "IFeatureDeviceCoProError": false, - "IFeatureDeviceCoProRestart": false, - "IFeatureDeviceCoProUpdate": false, - "IFeatureDeviceCommunicationError": false, - "IFeatureDeviceDaliBusError": false, - "IFeatureDeviceDriveError": false, - "IFeatureDeviceDriveModeError": false, - "IFeatureDeviceIdentify": false, - "IFeatureDeviceOverheated": false, - "IFeatureDeviceOverloaded": false, - "IFeatureDeviceParticulateMatterSensorCommunicationError": false, - "IFeatureDeviceParticulateMatterSensorError": false, - "IFeatureDevicePowerFailure": false, - "IFeatureDeviceSensorCommunicationError": true, - "IFeatureDeviceSensorError": true, - "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, - "IFeatureDeviceTemperatureHumiditySensorError": false, - "IFeatureDeviceTemperatureOutOfRange": false, - "IFeatureDeviceUndervoltage": false, - "IFeatureMulticastRouter": false, - "IFeaturePowerShortCircuit": false, - "IFeatureProfilePeriodLimit": false, - "IFeatureRssiValue": true, - "IFeatureShortCircuitDataLine": false, - "IOptionalFeatureDefaultLinkedGroup": false, - "IOptionalFeatureDeviceErrorLockJammed": false, - "IOptionalFeatureDeviceOperationMode": false, - "IOptionalFeatureDisplayContrast": false, - "IOptionalFeatureDutyCycle": true, - "IOptionalFeatureLowBat": true, - "IOptionalFeatureMountingOrientation": false - }, - "temperatureHumiditySensorCommunicationError": null, - "temperatureHumiditySensorError": null, - "temperatureOutOfRange": false, - "unreach": false - }, - "1": { - "channelRole": "ENERGY_SENSOR", - "connectedEnergySensorType": "ES_LED", - "currentGasFlow": null, - "currentPowerConsumption": 189.15, - "deviceId": "3014F7110000000000ESIIE3", - "energyCounterOne": 23825.748, - "energyCounterOneType": "UNKNOWN", - "energyCounterThree": null, - "energyCounterThreeType": "UNKNOWN", - "energyCounterTwo": null, - "energyCounterTwoType": "UNKNOWN", - "functionalChannelType": "ENERGY_SENSORS_INTERFACE_CHANNEL", - "gasVolume": null, - "gasVolumePerImpulse": 0.01, - "groupIndex": 1, - "groups": ["00000000-0000-0000-0000-000000000057"], - "impulsesPerKWH": 1000, - "index": 1, - "label": "", - "supportedOptionalFeatures": { - "IOptionalFeatureCounterOffset": true, - "IOptionalFeatureCurrentGasFlow": false, - "IOptionalFeatureCurrentPowerConsumption": true, - "IOptionalFeatureEnergyCounterOne": true, - "IOptionalFeatureEnergyCounterThree": false, - "IOptionalFeatureEnergyCounterTwo": false, - "IOptionalFeatureGasVolume": false, - "IOptionalFeatureGasVolumePerImpulse": false, - "IOptionalFeatureImpulsesPerKWH": true - } - } - }, - "homeId": "00000000-0000-0000-0000-000000000001", - "id": "3014F7110000000000ESIIE3", - "label": "esi_led", - "lastStatusUpdate": 1702420986697, - "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", - "manuallyUpdateForced": false, - "manufacturerCode": 1, - "measuredAttributes": {}, - "modelId": 509, - "modelType": "HmIP-ESI", - "oem": "eQ-3", - "permanentlyReachable": false, - "serializedGlobalTradeItemNumber": "3014F7110000000000ESIIE3", - "type": "ENERGY_SENSORS_INTERFACE", - "updateState": "UP_TO_DATE" } }, "groups": { diff --git a/tests/components/homematicip_cloud/helper.py b/tests/components/homematicip_cloud/helper.py index 80081123519..e7d7350f98e 100644 --- a/tests/components/homematicip_cloud/helper.py +++ b/tests/components/homematicip_cloud/helper.py @@ -1,7 +1,6 @@ """Helper for HomematicIP Cloud Tests.""" import json -from typing import Any from unittest.mock import Mock, patch from homematicip.aio.class_maps import ( @@ -12,19 +11,19 @@ from homematicip.aio.class_maps import ( from homematicip.aio.device import AsyncDevice from homematicip.aio.group import AsyncGroup from homematicip.aio.home import AsyncHome -from homematicip.base.homematicip_object import HomeMaticIPObject from homematicip.home import Home +from homeassistant import config_entries from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.components.homematicip_cloud.entity import ( +from homeassistant.components.homematicip_cloud.generic_entity import ( ATTR_IS_GROUP, ATTR_MODEL_TYPE, ) from homeassistant.components.homematicip_cloud.hap import HomematicipHAP -from homeassistant.core import HomeAssistant, State +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, load_fixture +from tests.common import load_fixture HAPID = "3014F7110000000000000001" HAPPIN = "5678" @@ -32,13 +31,7 @@ AUTH_TOKEN = "1234" FIXTURE_DATA = load_fixture("homematicip_cloud.json", "homematicip_cloud") -def get_and_check_entity_basics( - hass: HomeAssistant, - mock_hap: HomematicipHAP, - entity_id: str, - entity_name: str, - device_model: str | None, -) -> tuple[State, HomeMaticIPObject | None]: +def get_and_check_entity_basics(hass, mock_hap, entity_id, entity_name, device_model): """Get and test basic device.""" ha_state = hass.states.get(entity_id) assert ha_state is not None @@ -57,12 +50,7 @@ def get_and_check_entity_basics( async def async_manipulate_test_data( - hass: HomeAssistant, - hmip_device: HomeMaticIPObject, - attribute: str, - new_value: Any, - channel: int = 1, - fire_device: HomeMaticIPObject | None = None, + hass, hmip_device, attribute, new_value, channel=1, fire_device=None ): """Set new value on hmip device.""" if channel == 1: @@ -88,7 +76,7 @@ class HomeFactory: self, hass: HomeAssistant, mock_connection, - hmip_config_entry: MockConfigEntry, + hmip_config_entry: config_entries.ConfigEntry, ) -> None: """Initialize the Factory.""" self.hass = hass @@ -144,7 +132,7 @@ class HomeTemplate(Home): def __init__( self, connection=None, home_name="", test_devices=None, test_groups=None - ) -> None: + ): """Init template with connection.""" super().__init__(connection=connection) self.name = home_name @@ -186,10 +174,6 @@ class HomeTemplate(Home): def _generate_mocks(self): """Generate mocks for groups and devices.""" self.devices = [_get_mock(device) for device in self.devices] - for device in self.devices: - device.functionalChannels = [ - _get_mock(ch) for ch in device.functionalChannels - ] self.groups = [_get_mock(group) for group in self.groups] diff --git a/tests/components/homematicip_cloud/test_alarm_control_panel.py b/tests/components/homematicip_cloud/test_alarm_control_panel.py index 094308862f6..05d7963cea8 100644 --- a/tests/components/homematicip_cloud/test_alarm_control_panel.py +++ b/tests/components/homematicip_cloud/test_alarm_control_panel.py @@ -1,25 +1,24 @@ """Tests for HomematicIP Cloud alarm control panel.""" -from homematicip.aio.home import AsyncHome - from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, - AlarmControlPanelState, ) from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN +from homeassistant.const import ( + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, +) from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import HomeFactory, get_and_check_entity_basics +from .helper import get_and_check_entity_basics async def _async_manipulate_security_zones( - hass: HomeAssistant, - home: AsyncHome, - internal_active: bool = False, - external_active: bool = False, - alarm_triggered: bool = False, -) -> None: + hass, home, internal_active=False, external_active=False, alarm_triggered=False +): """Set new values on hmip security zones.""" json = home._rawJSONData json["functionalHomes"]["SECURITY_AND_ALARM"]["alarmActive"] = alarm_triggered @@ -51,7 +50,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_alarm_control_panel( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipAlarmControlPanel.""" entity_id = "alarm_control_panel.hmip_alarm_control_panel" @@ -78,7 +77,7 @@ async def test_hmip_alarm_control_panel( await _async_manipulate_security_zones( hass, home, internal_active=True, external_active=True ) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state is STATE_ALARM_ARMED_AWAY await hass.services.async_call( "alarm_control_panel", "alarm_arm_home", {"entity_id": entity_id}, blocking=True @@ -86,7 +85,7 @@ async def test_hmip_alarm_control_panel( assert home.mock_calls[-1][0] == "set_security_zones_activation" assert home.mock_calls[-1][1] == (False, True) await _async_manipulate_security_zones(hass, home, external_active=True) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME + assert hass.states.get(entity_id).state is STATE_ALARM_ARMED_HOME await hass.services.async_call( "alarm_control_panel", "alarm_disarm", {"entity_id": entity_id}, blocking=True @@ -94,7 +93,7 @@ async def test_hmip_alarm_control_panel( assert home.mock_calls[-1][0] == "set_security_zones_activation" assert home.mock_calls[-1][1] == (False, False) await _async_manipulate_security_zones(hass, home) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state is STATE_ALARM_DISARMED await hass.services.async_call( "alarm_control_panel", "alarm_arm_away", {"entity_id": entity_id}, blocking=True @@ -104,7 +103,7 @@ async def test_hmip_alarm_control_panel( await _async_manipulate_security_zones( hass, home, internal_active=True, external_active=True, alarm_triggered=True ) - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state is STATE_ALARM_TRIGGERED await hass.services.async_call( "alarm_control_panel", "alarm_arm_home", {"entity_id": entity_id}, blocking=True @@ -114,4 +113,4 @@ async def test_hmip_alarm_control_panel( await _async_manipulate_security_zones( hass, home, external_active=True, alarm_triggered=True ) - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state is STATE_ALARM_TRIGGERED diff --git a/tests/components/homematicip_cloud/test_binary_sensor.py b/tests/components/homematicip_cloud/test_binary_sensor.py index 02e96b10fe8..54f8e2141d2 100644 --- a/tests/components/homematicip_cloud/test_binary_sensor.py +++ b/tests/components/homematicip_cloud/test_binary_sensor.py @@ -16,7 +16,7 @@ from homeassistant.components.homematicip_cloud.binary_sensor import ( ATTR_WATER_LEVEL_DETECTED, ATTR_WINDOW_STATE, ) -from homeassistant.components.homematicip_cloud.entity import ( +from homeassistant.components.homematicip_cloud.generic_entity import ( ATTR_EVENT_DELAY, ATTR_GROUP_MEMBER_UNREACHABLE, ATTR_LOW_BATTERY, @@ -27,7 +27,7 @@ from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics +from .helper import async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -41,7 +41,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_home_cloud_connection_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipCloudConnectionSensor.""" entity_id = "binary_sensor.cloud_connection" @@ -64,7 +64,7 @@ async def test_hmip_home_cloud_connection_sensor( async def test_hmip_acceleration_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipAccelerationSensor.""" entity_id = "binary_sensor.garagentor" @@ -103,7 +103,7 @@ async def test_hmip_acceleration_sensor( async def test_hmip_tilt_vibration_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipTiltVibrationSensor.""" entity_id = "binary_sensor.garage_neigungs_und_erschutterungssensor" @@ -141,7 +141,7 @@ async def test_hmip_tilt_vibration_sensor( async def test_hmip_contact_interface( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipContactInterface.""" entity_id = "binary_sensor.kontakt_schnittstelle_unterputz_1_fach" @@ -166,7 +166,7 @@ async def test_hmip_contact_interface( async def test_hmip_shutter_contact( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipShutterContact.""" entity_id = "binary_sensor.fenstergriffsensor" @@ -208,7 +208,7 @@ async def test_hmip_shutter_contact( async def test_hmip_shutter_contact_optical( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipShutterContact.""" entity_id = "binary_sensor.sitzplatzture" @@ -240,7 +240,7 @@ async def test_hmip_shutter_contact_optical( async def test_hmip_motion_detector( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipMotionDetector.""" entity_id = "binary_sensor.bewegungsmelder_fur_55er_rahmen_innen" @@ -261,7 +261,7 @@ async def test_hmip_motion_detector( async def test_hmip_presence_detector( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipPresenceDetector.""" entity_id = "binary_sensor.spi_1" @@ -287,7 +287,7 @@ async def test_hmip_presence_detector( async def test_hmip_pluggable_mains_failure_surveillance_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipPresenceDetector.""" entity_id = "binary_sensor.netzausfalluberwachung" @@ -308,7 +308,7 @@ async def test_hmip_pluggable_mains_failure_surveillance_sensor( async def test_hmip_smoke_detector( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipSmokeDetector.""" entity_id = "binary_sensor.rauchwarnmelder" @@ -342,7 +342,7 @@ async def test_hmip_smoke_detector( async def test_hmip_water_detector( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipWaterDetector.""" entity_id = "binary_sensor.wassersensor" @@ -378,9 +378,7 @@ async def test_hmip_water_detector( assert ha_state.state == STATE_OFF -async def test_hmip_storm_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_storm_sensor(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipStormSensor.""" entity_id = "binary_sensor.weather_sensor_plus_storm" entity_name = "Weather Sensor – plus Storm" @@ -399,9 +397,7 @@ async def test_hmip_storm_sensor( assert ha_state.state == STATE_ON -async def test_hmip_rain_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_rain_sensor(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipRainSensor.""" entity_id = "binary_sensor.wettersensor_pro_raining" entity_name = "Wettersensor - pro Raining" @@ -421,7 +417,7 @@ async def test_hmip_rain_sensor( async def test_hmip_sunshine_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipSunshineSensor.""" entity_id = "binary_sensor.wettersensor_pro_sunshine" @@ -443,7 +439,7 @@ async def test_hmip_sunshine_sensor( async def test_hmip_battery_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipSunshineSensor.""" entity_id = "binary_sensor.wohnungsture_battery" @@ -464,7 +460,7 @@ async def test_hmip_battery_sensor( async def test_hmip_security_zone_sensor_group( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipSecurityZoneSensorGroup.""" entity_id = "binary_sensor.internal_securityzone" @@ -501,7 +497,7 @@ async def test_hmip_security_zone_sensor_group( async def test_hmip_security_sensor_group( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipSecuritySensorGroup.""" entity_id = "binary_sensor.buro_sensors" @@ -575,7 +571,7 @@ async def test_hmip_security_sensor_group( async def test_hmip_multi_contact_interface( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipMultiContactInterface.""" entity_id = "binary_sensor.wired_eingangsmodul_32_fach_channel5" diff --git a/tests/components/homematicip_cloud/test_button.py b/tests/components/homematicip_cloud/test_button.py index 7da86607096..0b5e81dd703 100644 --- a/tests/components/homematicip_cloud/test_button.py +++ b/tests/components/homematicip_cloud/test_button.py @@ -7,13 +7,11 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from .helper import HomeFactory, get_and_check_entity_basics +from .helper import get_and_check_entity_basics async def test_hmip_garage_door_controller_button( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - default_mock_hap_factory: HomeFactory, + hass: HomeAssistant, freezer: FrozenDateTimeFactory, default_mock_hap_factory ) -> None: """Test HomematicipGarageDoorControllerButton.""" entity_id = "button.garagentor" diff --git a/tests/components/homematicip_cloud/test_climate.py b/tests/components/homematicip_cloud/test_climate.py index d4711440288..f175e2060df 100644 --- a/tests/components/homematicip_cloud/test_climate.py +++ b/tests/components/homematicip_cloud/test_climate.py @@ -28,12 +28,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.setup import async_setup_component -from .helper import ( - HAPID, - HomeFactory, - async_manipulate_test_data, - get_and_check_entity_basics, -) +from .helper import HAPID, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -45,7 +40,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_heating_group_heat( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.badezimmer" @@ -141,6 +136,13 @@ async def test_hmip_heating_group_heat( ha_state = hass.states.get(entity_id) assert ha_state.attributes[ATTR_PRESET_MODE] == "STD" + # Not required for hmip, but a possibility to send no temperature. + await hass.services.async_call( + "climate", + "set_temperature", + {"entity_id": entity_id, "target_temp_low": 10, "target_temp_high": 10}, + blocking=True, + ) # No new service call should be in mock_calls. assert len(hmip_device.mock_calls) == service_call_counter + 12 # Only fire event from last async_manipulate_test_data available. @@ -255,7 +257,7 @@ async def test_hmip_heating_group_heat( async def test_hmip_heating_group_cool( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.badezimmer" @@ -378,7 +380,7 @@ async def test_hmip_heating_group_cool( async def test_hmip_heating_group_heat_with_switch( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.schlafzimmer" @@ -409,7 +411,7 @@ async def test_hmip_heating_group_heat_with_switch( async def test_hmip_heating_group_heat_with_radiator( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.vorzimmer" @@ -438,7 +440,7 @@ async def test_hmip_heating_group_heat_with_radiator( async def test_hmip_heating_profile_default_name( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test visible profile 1 without a name should be displayed as 'Default'.""" entity_id = "climate.vorzimmer3" @@ -463,7 +465,7 @@ async def test_hmip_heating_profile_default_name( async def test_hmip_heating_profile_naming( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test Heating Profile Naming.""" entity_id = "climate.vorzimmer2" @@ -488,7 +490,7 @@ async def test_hmip_heating_profile_naming( async def test_hmip_heating_profile_name_not_in_list( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test set profile when profile is not in available profiles.""" expected_profile = "Testprofile" @@ -620,69 +622,20 @@ async def test_hmip_climate_services( assert len(home._connection.mock_calls) == 10 not_existing_hap_id = "5555F7110000000000000001" - with pytest.raises(ServiceValidationError) as excinfo: - await hass.services.async_call( - "homematicip_cloud", - "deactivate_vacation", - {"accesspoint_id": not_existing_hap_id}, - blocking=True, - ) - assert excinfo.value.translation_domain == HMIPC_DOMAIN - assert excinfo.value.translation_key == "access_point_not_found" + await hass.services.async_call( + "homematicip_cloud", + "deactivate_vacation", + {"accesspoint_id": not_existing_hap_id}, + blocking=True, + ) + assert home.mock_calls[-1][0] == "deactivate_vacation" + assert home.mock_calls[-1][1] == () # There is no further call on connection. assert len(home._connection.mock_calls) == 10 -async def test_hmip_set_home_cooling_mode( - hass: HomeAssistant, mock_hap_with_service -) -> None: - """Test HomematicipSetHomeCoolingMode.""" - - home = mock_hap_with_service.home - - await hass.services.async_call( - "homematicip_cloud", - "set_home_cooling_mode", - {"accesspoint_id": HAPID, "cooling": False}, - blocking=True, - ) - assert home.mock_calls[-1][0] == "set_cooling" - assert home.mock_calls[-1][1] == (False,) - assert len(home._connection.mock_calls) == 1 - - await hass.services.async_call( - "homematicip_cloud", - "set_home_cooling_mode", - {"accesspoint_id": HAPID, "cooling": True}, - blocking=True, - ) - assert home.mock_calls[-1][0] == "set_cooling" - assert home.mock_calls[-1][1] - assert len(home._connection.mock_calls) == 2 - - await hass.services.async_call( - "homematicip_cloud", "set_home_cooling_mode", blocking=True - ) - assert home.mock_calls[-1][0] == "set_cooling" - assert home.mock_calls[-1][1] - assert len(home._connection.mock_calls) == 3 - - not_existing_hap_id = "5555F7110000000000000001" - with pytest.raises(ServiceValidationError) as excinfo: - await hass.services.async_call( - "homematicip_cloud", - "set_home_cooling_mode", - {"accesspoint_id": not_existing_hap_id, "cooling": True}, - blocking=True, - ) - assert excinfo.value.translation_domain == HMIPC_DOMAIN - assert excinfo.value.translation_key == "access_point_not_found" - # There is no further call on connection. - assert len(home._connection.mock_calls) == 3 - - async def test_hmip_heating_group_services( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipHeatingGroup services.""" entity_id = "climate.badezimmer" diff --git a/tests/components/homematicip_cloud/test_cover.py b/tests/components/homematicip_cloud/test_cover.py index bcafa689172..ee126dff936 100644 --- a/tests/components/homematicip_cloud/test_cover.py +++ b/tests/components/homematicip_cloud/test_cover.py @@ -6,14 +6,13 @@ from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_CURRENT_TILT_POSITION, DOMAIN as COVER_DOMAIN, - CoverState, ) from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.const import STATE_UNKNOWN +from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics +from .helper import async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -25,7 +24,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_cover_shutter( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipCoverShutte.""" entity_id = "cover.broll_1" @@ -51,7 +50,7 @@ async def test_hmip_cover_shutter( assert hmip_device.mock_calls[-1][1] == (0, 1) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -65,7 +64,7 @@ async def test_hmip_cover_shutter( assert hmip_device.mock_calls[-1][1] == (0.5, 1) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -76,7 +75,7 @@ async def test_hmip_cover_shutter( assert hmip_device.mock_calls[-1][1] == (1, 1) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.CLOSED + assert ha_state.state == STATE_CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -91,9 +90,7 @@ async def test_hmip_cover_shutter( assert ha_state.state == STATE_UNKNOWN -async def test_hmip_cover_slats( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_cover_slats(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipCoverSlats.""" entity_id = "cover.sofa_links" entity_name = "Sofa links" @@ -106,7 +103,7 @@ async def test_hmip_cover_slats( hass, mock_hap, entity_id, entity_name, device_model ) - assert ha_state.state == CoverState.CLOSED + assert ha_state.state == STATE_CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 service_call_counter = len(hmip_device.mock_calls) @@ -120,7 +117,7 @@ async def test_hmip_cover_slats( await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -135,7 +132,7 @@ async def test_hmip_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 1, "slatsLevel": 0.5} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -147,7 +144,7 @@ async def test_hmip_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 1, "slatsLevel": 1} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -168,7 +165,7 @@ async def test_hmip_cover_slats( async def test_hmip_multi_cover_slats( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipCoverSlats.""" entity_id = "cover.wohnzimmer_fenster" @@ -186,7 +183,7 @@ async def test_hmip_multi_cover_slats( await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.CLOSED + assert ha_state.state == STATE_CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 service_call_counter = len(hmip_device.mock_calls) @@ -200,7 +197,7 @@ async def test_hmip_multi_cover_slats( await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0, channel=4) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -215,7 +212,7 @@ async def test_hmip_multi_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 4, "slatsLevel": 0.5} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -227,7 +224,7 @@ async def test_hmip_multi_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 4, "slatsLevel": 1} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -247,9 +244,7 @@ async def test_hmip_multi_cover_slats( assert ha_state.state == STATE_UNKNOWN -async def test_hmip_blind_module( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_blind_module(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipBlindModule.""" entity_id = "cover.sonnenschutz_balkontur" entity_name = "Sonnenschutz Balkontür" @@ -262,7 +257,7 @@ async def test_hmip_blind_module( hass, mock_hap, entity_id, entity_name, device_model ) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 5 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 service_call_counter = len(hmip_device.mock_calls) @@ -288,7 +283,7 @@ async def test_hmip_blind_module( assert hmip_device.mock_calls[-1][2] == {"primaryShadingLevel": 0} ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -311,7 +306,7 @@ async def test_hmip_blind_module( assert hmip_device.mock_calls[-1][0] == "set_primary_shading_level" assert hmip_device.mock_calls[-1][2] == {"primaryShadingLevel": 0.5} ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -332,7 +327,7 @@ async def test_hmip_blind_module( } ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.CLOSED + assert ha_state.state == STATE_CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -360,7 +355,7 @@ async def test_hmip_blind_module( async def test_hmip_garage_door_tormatic( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipCoverShutte.""" entity_id = "cover.garage_door_module" @@ -386,7 +381,7 @@ async def test_hmip_garage_door_tormatic( assert hmip_device.mock_calls[-1][1] == (DoorCommand.OPEN,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.OPEN) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -397,7 +392,7 @@ async def test_hmip_garage_door_tormatic( assert hmip_device.mock_calls[-1][1] == (DoorCommand.CLOSE,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.CLOSED) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.CLOSED + assert ha_state.state == STATE_CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -409,7 +404,7 @@ async def test_hmip_garage_door_tormatic( async def test_hmip_garage_door_hoermann( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipCoverShutte.""" entity_id = "cover.garage_door" @@ -435,7 +430,7 @@ async def test_hmip_garage_door_hoermann( assert hmip_device.mock_calls[-1][1] == (DoorCommand.OPEN,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.OPEN) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -446,7 +441,7 @@ async def test_hmip_garage_door_hoermann( assert hmip_device.mock_calls[-1][1] == (DoorCommand.CLOSE,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.CLOSED) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.CLOSED + assert ha_state.state == STATE_CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -458,7 +453,7 @@ async def test_hmip_garage_door_hoermann( async def test_hmip_cover_shutter_group( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipCoverShutteGroup.""" entity_id = "cover.rollos_shuttergroup" @@ -482,7 +477,7 @@ async def test_hmip_cover_shutter_group( assert hmip_device.mock_calls[-1][1] == (0,) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -496,7 +491,7 @@ async def test_hmip_cover_shutter_group( assert hmip_device.mock_calls[-1][1] == (0.5,) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -507,7 +502,7 @@ async def test_hmip_cover_shutter_group( assert hmip_device.mock_calls[-1][1] == (1,) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.CLOSED + assert ha_state.state == STATE_CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -523,7 +518,7 @@ async def test_hmip_cover_shutter_group( async def test_hmip_cover_slats_group( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test slats with HomematicipCoverShutteGroup.""" entity_id = "cover.rollos_shuttergroup" @@ -537,7 +532,7 @@ async def test_hmip_cover_slats_group( await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.CLOSED + assert ha_state.state == STATE_CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 service_call_counter = len(hmip_device.mock_calls) @@ -558,7 +553,7 @@ async def test_hmip_cover_slats_group( await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -573,7 +568,7 @@ async def test_hmip_cover_slats_group( assert hmip_device.mock_calls[-1][1] == (0.5,) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -585,7 +580,7 @@ async def test_hmip_cover_slats_group( assert hmip_device.mock_calls[-1][1] == (1,) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == CoverState.OPEN + assert ha_state.state == STATE_OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index 5b4993f7314..348171b3187 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -17,25 +17,23 @@ from .helper import ( get_and_check_entity_basics, ) -from tests.common import MockConfigEntry - async def test_hmip_load_all_supported_devices( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Ensure that all supported devices could be loaded.""" mock_hap = await default_mock_hap_factory.async_get_mock_hap( test_devices=None, test_groups=None ) - assert len(mock_hap.hmip_device_by_entity_id) == 308 + assert len(mock_hap.hmip_device_by_entity_id) == 290 async def test_hmip_remove_device( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory: HomeFactory, + default_mock_hap_factory, ) -> None: """Test Remove of hmip device.""" entity_id = "light.treppe_ch" @@ -69,8 +67,8 @@ async def test_hmip_add_device( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory: HomeFactory, - hmip_config_entry: MockConfigEntry, + default_mock_hap_factory, + hmip_config_entry, ) -> None: """Test Remove of hmip device.""" entity_id = "light.treppe_ch" @@ -123,7 +121,7 @@ async def test_hmip_remove_group( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory: HomeFactory, + default_mock_hap_factory, ) -> None: """Test Remove of hmip group.""" entity_id = "switch.strom_group" @@ -151,7 +149,7 @@ async def test_hmip_remove_group( async def test_all_devices_unavailable_when_hap_not_connected( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test make all devices unavaulable when hap is not connected.""" entity_id = "light.treppe_ch" @@ -176,9 +174,7 @@ async def test_all_devices_unavailable_when_hap_not_connected( assert ha_state.state == STATE_UNAVAILABLE -async def test_hap_reconnected( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hap_reconnected(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test reconnect hap.""" entity_id = "light.treppe_ch" entity_name = "Treppe CH" @@ -209,7 +205,7 @@ async def test_hap_reconnected( async def test_hap_with_name( - hass: HomeAssistant, mock_connection, hmip_config_entry: MockConfigEntry + hass: HomeAssistant, mock_connection, hmip_config_entry ) -> None: """Test hap with name.""" home_name = "TestName" @@ -236,7 +232,7 @@ async def test_hap_with_name( async def test_hmip_reset_energy_counter_services( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test reset_energy_counter service.""" entity_id = "switch.pc" @@ -271,7 +267,7 @@ async def test_hmip_multi_area_device( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory: HomeFactory, + default_mock_hap_factory, ) -> None: """Test multi area device. Check if devices are created and referenced.""" entity_id = "binary_sensor.wired_eingangsmodul_32_fach_channel5" diff --git a/tests/components/homematicip_cloud/test_hap.py b/tests/components/homematicip_cloud/test_hap.py index ded1bf88292..2da32b2844d 100644 --- a/tests/components/homematicip_cloud/test_hap.py +++ b/tests/components/homematicip_cloud/test_hap.py @@ -22,7 +22,7 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .helper import HAPID, HAPPIN, HomeFactory +from .helper import HAPID, HAPPIN from tests.common import MockConfigEntry @@ -114,7 +114,7 @@ async def test_hap_setup_connection_error() -> None: async def test_hap_reset_unloads_entry_if_setup( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test calling reset while the entry has been setup.""" mock_hap = await default_mock_hap_factory.async_get_mock_hap() @@ -129,7 +129,7 @@ async def test_hap_reset_unloads_entry_if_setup( async def test_hap_create( - hass: HomeAssistant, hmip_config_entry: MockConfigEntry, simple_mock_home + hass: HomeAssistant, hmip_config_entry, simple_mock_home ) -> None: """Mock AsyncHome to execute get_hap.""" hass.config.components.add(HMIPC_DOMAIN) @@ -141,7 +141,7 @@ async def test_hap_create( async def test_hap_create_exception( - hass: HomeAssistant, hmip_config_entry: MockConfigEntry, mock_connection_init + hass: HomeAssistant, hmip_config_entry, mock_connection_init ) -> None: """Mock AsyncHome to execute get_hap.""" hass.config.components.add(HMIPC_DOMAIN) diff --git a/tests/components/homematicip_cloud/test_init.py b/tests/components/homematicip_cloud/test_init.py index 07c53248d92..9303a755e89 100644 --- a/tests/components/homematicip_cloud/test_init.py +++ b/tests/components/homematicip_cloud/test_init.py @@ -100,7 +100,7 @@ async def test_config_already_registered_not_passed_to_config_entry( async def test_load_entry_fails_due_to_connection_error( - hass: HomeAssistant, hmip_config_entry: MockConfigEntry, mock_connection_init + hass: HomeAssistant, hmip_config_entry, mock_connection_init ) -> None: """Test load entry fails due to connection error.""" hmip_config_entry.add_to_hass(hass) @@ -116,7 +116,7 @@ async def test_load_entry_fails_due_to_connection_error( async def test_load_entry_fails_due_to_generic_exception( - hass: HomeAssistant, hmip_config_entry: MockConfigEntry + hass: HomeAssistant, hmip_config_entry ) -> None: """Test load entry fails due to generic exception.""" hmip_config_entry.add_to_hass(hass) @@ -199,7 +199,7 @@ async def test_setup_services_and_unload_services(hass: HomeAssistant) -> None: # Check services are created hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 9 + assert len(hmipc_services) == 8 config_entries = hass.config_entries.async_entries(HMIPC_DOMAIN) assert len(config_entries) == 1 @@ -232,7 +232,7 @@ async def test_setup_two_haps_unload_one_by_one(hass: HomeAssistant) -> None: assert await async_setup_component(hass, HMIPC_DOMAIN, {}) hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 9 + assert len(hmipc_services) == 8 config_entries = hass.config_entries.async_entries(HMIPC_DOMAIN) assert len(config_entries) == 2 @@ -241,7 +241,7 @@ async def test_setup_two_haps_unload_one_by_one(hass: HomeAssistant) -> None: # services still exists hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 9 + assert len(hmipc_services) == 8 # unload the second AP await hass.config_entries.async_unload(config_entries[1].entry_id) diff --git a/tests/components/homematicip_cloud/test_light.py b/tests/components/homematicip_cloud/test_light.py index c0717e81e0d..18f002a5dbc 100644 --- a/tests/components/homematicip_cloud/test_light.py +++ b/tests/components/homematicip_cloud/test_light.py @@ -1,14 +1,12 @@ """Tests for HomematicIP Cloud light.""" -from homematicip.base.enums import OpticalSignalBehaviour, RGBColorState +from homematicip.base.enums import RGBColorState from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_NAME, - ATTR_EFFECT, - ATTR_HS_COLOR, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, ColorMode, @@ -18,7 +16,7 @@ from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics +from .helper import async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -29,9 +27,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: assert not hass.data.get(HMIPC_DOMAIN) -async def test_hmip_light( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_light(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipLight.""" entity_id = "light.treppe_ch" entity_name = "Treppe CH" @@ -77,7 +73,7 @@ async def test_hmip_light( async def test_hmip_notification_light( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipNotificationLight.""" entity_id = "light.alarm_status" @@ -175,104 +171,7 @@ async def test_hmip_notification_light( assert not ha_state.attributes.get(ATTR_BRIGHTNESS) -async def test_hmip_notification_light_2( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: - """Test HomematicipNotificationLight.""" - entity_id = "light.led_oben" - entity_name = "Led Oben" - device_model = "HmIP-BSL" - mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_devices=["BSL2"]) - - ha_state, hmip_device = get_and_check_entity_basics( - hass, mock_hap, entity_id, entity_name, device_model - ) - - assert ha_state.state == STATE_ON - assert ha_state.attributes[ATTR_EFFECT] == "BLINKING_MIDDLE" - - functional_channel = hmip_device.functionalChannels[3] - service_call_counter = len(functional_channel.mock_calls) - - # Send all color via service call. - await hass.services.async_call( - "light", - "turn_on", - {"entity_id": entity_id, ATTR_HS_COLOR: [240.0, 100.0], ATTR_BRIGHTNESS: 128}, - blocking=True, - ) - assert functional_channel.mock_calls[-1][0] == "async_set_optical_signal" - assert functional_channel.mock_calls[-1][2] == { - "opticalSignalBehaviour": OpticalSignalBehaviour.BLINKING_MIDDLE, - "rgb": RGBColorState.BLUE, - "dimLevel": 0.5, - } - assert service_call_counter + 1 == len(functional_channel.mock_calls) - - -async def test_hmip_notification_light_2_without_brightness_and_light( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: - """Test HomematicipNotificationLight.""" - entity_id = "light.led_oben" - entity_name = "Led Oben" - device_model = "HmIP-BSL" - mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_devices=["BSL2"]) - ha_state, hmip_device = get_and_check_entity_basics( - hass, mock_hap, entity_id, entity_name, device_model - ) - - color_before = ha_state.attributes["color_name"] - - functional_channel = hmip_device.functionalChannels[3] - service_call_counter = len(functional_channel.mock_calls) - - # Send all color via service call. - await hass.services.async_call( - "light", - "turn_on", - {"entity_id": entity_id, ATTR_EFFECT: OpticalSignalBehaviour.FLASH_MIDDLE}, - blocking=True, - ) - assert functional_channel.mock_calls[-1][0] == "async_set_optical_signal" - assert functional_channel.mock_calls[-1][2] == { - "opticalSignalBehaviour": OpticalSignalBehaviour.FLASH_MIDDLE, - "rgb": color_before, - "dimLevel": 1, - } - assert service_call_counter + 1 == len(functional_channel.mock_calls) - - -async def test_hmip_notification_light_2_turn_off( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: - """Test HomematicipNotificationLight.""" - entity_id = "light.led_oben" - entity_name = "Led Oben" - device_model = "HmIP-BSL" - mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_devices=["BSL2"]) - - ha_state, hmip_device = get_and_check_entity_basics( - hass, mock_hap, entity_id, entity_name, device_model - ) - - functional_channel = hmip_device.functionalChannels[3] - service_call_counter = len(functional_channel.mock_calls) - - # Send all color via service call. - await hass.services.async_call( - "light", - "turn_off", - {"entity_id": entity_id}, - blocking=True, - ) - assert functional_channel.mock_calls[-1][0] == "async_turn_off" - assert service_call_counter + 1 == len(functional_channel.mock_calls) - - -async def test_hmip_dimmer( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_dimmer(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipDimmer.""" entity_id = "light.schlafzimmerlicht" entity_name = "Schlafzimmerlicht" @@ -331,7 +230,7 @@ async def test_hmip_dimmer( async def test_hmip_light_measuring( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipLightMeasuring.""" entity_id = "light.flur_oben" @@ -377,7 +276,7 @@ async def test_hmip_light_measuring( async def test_hmip_wired_multi_dimmer( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipMultiDimmer.""" entity_id = "light.raumlich_kuche" @@ -437,7 +336,7 @@ async def test_hmip_wired_multi_dimmer( async def test_hmip_din_rail_dimmer_3_channel1( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicIP DinRailDimmer3 Channel 1.""" entity_id = "light.3_dimmer_channel1" @@ -496,7 +395,7 @@ async def test_hmip_din_rail_dimmer_3_channel1( async def test_hmip_din_rail_dimmer_3_channel2( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicIP DinRailDimmer3 Channel 2.""" entity_id = "light.3_dimmer_channel2" @@ -555,7 +454,7 @@ async def test_hmip_din_rail_dimmer_3_channel2( async def test_hmip_din_rail_dimmer_3_channel3( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicIP DinRailDimmer3 Channel 3.""" entity_id = "light.esstisch" diff --git a/tests/components/homematicip_cloud/test_lock.py b/tests/components/homematicip_cloud/test_lock.py index cb8a0188639..f49ad42b013 100644 --- a/tests/components/homematicip_cloud/test_lock.py +++ b/tests/components/homematicip_cloud/test_lock.py @@ -2,33 +2,34 @@ from unittest.mock import patch -from homematicip.base.enums import LockState as HomematicLockState, MotorState +from homematicip.base.enums import LockState, MotorState import pytest from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN from homeassistant.components.lock import ( - DOMAIN as LOCK_DOMAIN, + DOMAIN, + STATE_LOCKING, + STATE_UNLOCKING, LockEntityFeature, - LockState, ) from homeassistant.const import ATTR_SUPPORTED_FEATURES from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics +from .helper import async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: """Test that we do not set up an access point.""" assert await async_setup_component( - hass, LOCK_DOMAIN, {LOCK_DOMAIN: {"platform": HMIPC_DOMAIN}} + hass, DOMAIN, {DOMAIN: {"platform": HMIPC_DOMAIN}} ) assert not hass.data.get(HMIPC_DOMAIN) async def test_hmip_doorlockdrive( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipDoorLockDrive.""" entity_id = "lock.haustuer" @@ -51,7 +52,7 @@ async def test_hmip_doorlockdrive( blocking=True, ) assert hmip_device.mock_calls[-1][0] == "set_lock_state" - assert hmip_device.mock_calls[-1][1] == (HomematicLockState.OPEN,) + assert hmip_device.mock_calls[-1][1] == (LockState.OPEN,) await hass.services.async_call( "lock", @@ -60,7 +61,7 @@ async def test_hmip_doorlockdrive( blocking=True, ) assert hmip_device.mock_calls[-1][0] == "set_lock_state" - assert hmip_device.mock_calls[-1][1] == (HomematicLockState.LOCKED,) + assert hmip_device.mock_calls[-1][1] == (LockState.LOCKED,) await hass.services.async_call( "lock", @@ -70,23 +71,23 @@ async def test_hmip_doorlockdrive( ) assert hmip_device.mock_calls[-1][0] == "set_lock_state" - assert hmip_device.mock_calls[-1][1] == (HomematicLockState.UNLOCKED,) + assert hmip_device.mock_calls[-1][1] == (LockState.UNLOCKED,) await async_manipulate_test_data( hass, hmip_device, "motorState", MotorState.CLOSING ) ha_state = hass.states.get(entity_id) - assert ha_state.state == LockState.LOCKING + assert ha_state.state == STATE_LOCKING await async_manipulate_test_data( hass, hmip_device, "motorState", MotorState.OPENING ) ha_state = hass.states.get(entity_id) - assert ha_state.state == LockState.UNLOCKING + assert ha_state.state == STATE_UNLOCKING async def test_hmip_doorlockdrive_handle_errors( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipDoorLockDrive.""" entity_id = "lock.haustuer" diff --git a/tests/components/homematicip_cloud/test_sensor.py b/tests/components/homematicip_cloud/test_sensor.py index 2dda3116032..6951b750b2f 100644 --- a/tests/components/homematicip_cloud/test_sensor.py +++ b/tests/components/homematicip_cloud/test_sensor.py @@ -3,7 +3,7 @@ from homematicip.base.enums import ValveState from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.components.homematicip_cloud.entity import ( +from homeassistant.components.homematicip_cloud.generic_entity import ( ATTR_CONFIG_PENDING, ATTR_DEVICE_OVERHEATED, ATTR_DEVICE_OVERLOADED, @@ -12,7 +12,6 @@ from homeassistant.components.homematicip_cloud.entity import ( ATTR_RSSI_DEVICE, ATTR_RSSI_PEER, ) -from homeassistant.components.homematicip_cloud.hap import HomematicipHAP from homeassistant.components.homematicip_cloud.sensor import ( ATTR_CURRENT_ILLUMINATION, ATTR_HIGHEST_ILLUMINATION, @@ -23,11 +22,7 @@ from homeassistant.components.homematicip_cloud.sensor import ( ATTR_WIND_DIRECTION, ATTR_WIND_DIRECTION_VARIATION, ) -from homeassistant.components.sensor import ( - ATTR_STATE_CLASS, - DOMAIN as SENSOR_DOMAIN, - SensorStateClass, -) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, LIGHT_LUX, @@ -41,7 +36,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics +from .helper import async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -53,7 +48,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_accesspoint_status( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipSwitch.""" entity_id = "sensor.home_control_access_point_duty_cycle" @@ -72,7 +67,7 @@ async def test_hmip_accesspoint_status( async def test_hmip_heating_thermostat( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipHeatingThermostat.""" entity_id = "sensor.heizkorperthermostat_heating" @@ -108,7 +103,7 @@ async def test_hmip_heating_thermostat( async def test_hmip_humidity_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipHumiditySensor.""" entity_id = "sensor.bwth_1_humidity" @@ -133,7 +128,7 @@ async def test_hmip_humidity_sensor( async def test_hmip_temperature_sensor1( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.bwth_1_temperature" @@ -160,7 +155,7 @@ async def test_hmip_temperature_sensor1( async def test_hmip_temperature_sensor2( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.heizkorperthermostat_temperature" @@ -187,7 +182,7 @@ async def test_hmip_temperature_sensor2( async def test_hmip_temperature_sensor3( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.raumbediengerat_analog_temperature" @@ -214,7 +209,7 @@ async def test_hmip_temperature_sensor3( async def test_hmip_thermostat_evo_heating( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipHeatingThermostat for HmIP-eTRV-E.""" entity_id = "sensor.thermostat_evo_heating" @@ -236,7 +231,7 @@ async def test_hmip_thermostat_evo_heating( async def test_hmip_thermostat_evo_temperature( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.thermostat_evo_temperature" @@ -261,9 +256,7 @@ async def test_hmip_thermostat_evo_temperature( assert ha_state.attributes[ATTR_TEMPERATURE_OFFSET] == 0.7 -async def test_hmip_power_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_power_sensor(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipPowerSensor.""" entity_id = "sensor.flur_oben_power" entity_name = "Flur oben Power" @@ -301,7 +294,7 @@ async def test_hmip_power_sensor( async def test_hmip_illuminance_sensor1( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipIlluminanceSensor.""" entity_id = "sensor.wettersensor_illuminance" @@ -323,7 +316,7 @@ async def test_hmip_illuminance_sensor1( async def test_hmip_illuminance_sensor2( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipIlluminanceSensor.""" entity_id = "sensor.lichtsensor_nord_illuminance" @@ -348,7 +341,7 @@ async def test_hmip_illuminance_sensor2( async def test_hmip_windspeed_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipWindspeedSensor.""" entity_id = "sensor.wettersensor_pro_windspeed" @@ -366,7 +359,6 @@ async def test_hmip_windspeed_sensor( assert ( ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfSpeed.KILOMETERS_PER_HOUR ) - assert ha_state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT await async_manipulate_test_data(hass, hmip_device, "windSpeed", 9.4) ha_state = hass.states.get(entity_id) assert ha_state.state == "9.4" @@ -400,7 +392,7 @@ async def test_hmip_windspeed_sensor( async def test_hmip_today_rain_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipTodayRainSensor.""" entity_id = "sensor.weather_sensor_plus_today_rain" @@ -416,14 +408,13 @@ async def test_hmip_today_rain_sensor( assert ha_state.state == "3.9" assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfLength.MILLIMETERS - assert ha_state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT await async_manipulate_test_data(hass, hmip_device, "todayRainCounter", 14.2) ha_state = hass.states.get(entity_id) assert ha_state.state == "14.2" async def test_hmip_temperature_external_sensor_channel_1( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipTemperatureDifferenceSensor Channel 1 HmIP-STE2-PCB.""" entity_id = "sensor.ste2_channel_1_temperature" @@ -448,7 +439,7 @@ async def test_hmip_temperature_external_sensor_channel_1( async def test_hmip_temperature_external_sensor_channel_2( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipTemperatureDifferenceSensor Channel 2 HmIP-STE2-PCB.""" entity_id = "sensor.ste2_channel_2_temperature" @@ -473,7 +464,7 @@ async def test_hmip_temperature_external_sensor_channel_2( async def test_hmip_temperature_external_sensor_delta( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipTemperatureDifferenceSensor Delta HmIP-STE2-PCB.""" entity_id = "sensor.ste2_delta_temperature" @@ -500,7 +491,7 @@ async def test_hmip_temperature_external_sensor_delta( async def test_hmip_passage_detector_delta_counter( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipPassageDetectorDeltaCounter.""" entity_id = "sensor.spdr_1" @@ -522,49 +513,8 @@ async def test_hmip_passage_detector_delta_counter( assert ha_state.state == "190" -async def test_hmip_floor_terminal_block_mechanic_channel_1_valve_position( - hass: HomeAssistant, default_mock_hap_factory: HomematicipHAP -) -> None: - """Test HomematicipFloorTerminalBlockMechanicChannelValve Channel 1 HmIP-FALMOT-C12.""" - entity_id = "sensor.heizkreislauf_1_og_bad_r" - entity_name = "Heizkreislauf (1) OG Bad r" - device_model = "HmIP-FALMOT-C12" - - mock_hap = await default_mock_hap_factory.async_get_mock_hap( - test_devices=["Fu\u00dfbodenheizungsaktor"] - ) - ha_state, hmip_device = get_and_check_entity_basics( - hass, mock_hap, entity_id, entity_name, device_model - ) - - hmip_device = mock_hap.hmip_device_by_entity_id.get(entity_id) - - assert ha_state.state == "48" - assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE - await async_manipulate_test_data(hass, hmip_device, "valvePosition", 0.36) - ha_state = hass.states.get(entity_id) - assert ha_state.state == "36" - - await async_manipulate_test_data(hass, hmip_device, "configPending", True) - ha_state = hass.states.get(entity_id) - assert ha_state.attributes["icon"] == "mdi:alert-circle" - - await async_manipulate_test_data(hass, hmip_device, "configPending", False) - await async_manipulate_test_data( - hass, hmip_device, "valveState", ValveState.ADAPTION_IN_PROGRESS - ) - ha_state = hass.states.get(entity_id) - assert ha_state.attributes["icon"] == "mdi:alert" - - await async_manipulate_test_data( - hass, hmip_device, "valveState", ValveState.ADAPTION_DONE - ) - ha_state = hass.states.get(entity_id) - assert ha_state.attributes["icon"] == "mdi:heating-coil" - - async def test_hmip_esi_iec_current_power_consumption( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test ESI-IEC currentPowerConsumption Sensor.""" entity_id = "sensor.esi_iec_currentPowerConsumption" @@ -582,7 +532,7 @@ async def test_hmip_esi_iec_current_power_consumption( async def test_hmip_esi_iec_energy_counter_usage_high_tariff( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test ESI-IEC ENERGY_COUNTER_USAGE_HIGH_TARIFF.""" entity_id = "sensor.esi_iec_energy_counter_usage_high_tariff" @@ -600,7 +550,7 @@ async def test_hmip_esi_iec_energy_counter_usage_high_tariff( async def test_hmip_esi_iec_energy_counter_usage_low_tariff( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test ESI-IEC ENERGY_COUNTER_USAGE_LOW_TARIFF.""" entity_id = "sensor.esi_iec_energy_counter_usage_low_tariff" @@ -618,7 +568,7 @@ async def test_hmip_esi_iec_energy_counter_usage_low_tariff( async def test_hmip_esi_iec_energy_counter_input_single_tariff( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test ESI-IEC ENERGY_COUNTER_INPUT_SINGLE_TARIFF.""" entity_id = "sensor.esi_iec_energy_counter_input_single_tariff" @@ -636,7 +586,7 @@ async def test_hmip_esi_iec_energy_counter_input_single_tariff( async def test_hmip_esi_iec_unknown_channel( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test devices are loaded partially.""" not_existing_entity_id = "sensor.esi_iec2_energy_counter_input_single_tariff" @@ -651,7 +601,7 @@ async def test_hmip_esi_iec_unknown_channel( async def test_hmip_esi_gas_current_gas_flow( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test ESI-IEC CurrentGasFlow.""" entity_id = "sensor.esi_gas_currentgasflow" @@ -669,7 +619,7 @@ async def test_hmip_esi_gas_current_gas_flow( async def test_hmip_esi_gas_gas_volume( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test ESI-IEC GasVolume.""" entity_id = "sensor.esi_gas_gasvolume" @@ -684,39 +634,3 @@ async def test_hmip_esi_gas_gas_volume( ) assert ha_state.state == "1019.26" - - -async def test_hmip_esi_led_current_power_consumption( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: - """Test ESI-IEC currentPowerConsumption Sensor.""" - entity_id = "sensor.esi_led_currentPowerConsumption" - entity_name = "esi_led CurrentPowerConsumption" - device_model = "HmIP-ESI" - mock_hap = await default_mock_hap_factory.async_get_mock_hap( - test_devices=["esi_led"] - ) - - ha_state, hmip_device = get_and_check_entity_basics( - hass, mock_hap, entity_id, entity_name, device_model - ) - - assert ha_state.state == "189.15" - - -async def test_hmip_esi_led_energy_counter_usage_high_tariff( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: - """Test ESI-IEC ENERGY_COUNTER_USAGE_HIGH_TARIFF.""" - entity_id = "sensor.esi_led_energy_counter_usage_high_tariff" - entity_name = "esi_led ENERGY_COUNTER_USAGE_HIGH_TARIFF" - device_model = "HmIP-ESI" - mock_hap = await default_mock_hap_factory.async_get_mock_hap( - test_devices=["esi_led"] - ) - - ha_state, hmip_device = get_and_check_entity_basics( - hass, mock_hap, entity_id, entity_name, device_model - ) - - assert ha_state.state == "23825.748" diff --git a/tests/components/homematicip_cloud/test_switch.py b/tests/components/homematicip_cloud/test_switch.py index 54cdd632d03..a249c52393d 100644 --- a/tests/components/homematicip_cloud/test_switch.py +++ b/tests/components/homematicip_cloud/test_switch.py @@ -1,7 +1,7 @@ """Tests for HomematicIP Cloud switch.""" from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.components.homematicip_cloud.entity import ( +from homeassistant.components.homematicip_cloud.generic_entity import ( ATTR_GROUP_MEMBER_UNREACHABLE, ) from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN @@ -9,7 +9,7 @@ from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics +from .helper import async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -20,9 +20,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: assert not hass.data.get(HMIPC_DOMAIN) -async def test_hmip_switch( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_switch(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipSwitch.""" entity_id = "switch.schrank" entity_name = "Schrank" @@ -59,9 +57,7 @@ async def test_hmip_switch( assert ha_state.state == STATE_ON -async def test_hmip_switch_input( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_switch_input(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipSwitch.""" entity_id = "switch.wohnzimmer_beleuchtung" entity_name = "Wohnzimmer Beleuchtung" @@ -99,7 +95,7 @@ async def test_hmip_switch_input( async def test_hmip_switch_measuring( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipSwitchMeasuring.""" entity_id = "switch.pc" @@ -138,9 +134,7 @@ async def test_hmip_switch_measuring( assert ha_state.state == STATE_ON -async def test_hmip_group_switch( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_group_switch(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipGroupSwitch.""" entity_id = "switch.strom_group" entity_name = "Strom Group" @@ -180,9 +174,7 @@ async def test_hmip_group_switch( assert ha_state.attributes[ATTR_GROUP_MEMBER_UNREACHABLE] -async def test_hmip_multi_switch( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_multi_switch(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipMultiSwitch.""" entity_id = "switch.jalousien_1_kizi_2_schlazi_channel1" entity_name = "Jalousien - 1 KiZi, 2 SchlaZi Channel1" @@ -236,7 +228,7 @@ async def test_hmip_multi_switch( async def test_hmip_wired_multi_switch( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipMultiSwitch.""" entity_id = "switch.fernseher_wohnzimmer" diff --git a/tests/components/homematicip_cloud/test_weather.py b/tests/components/homematicip_cloud/test_weather.py index 44df907fcc5..44005afd511 100644 --- a/tests/components/homematicip_cloud/test_weather.py +++ b/tests/components/homematicip_cloud/test_weather.py @@ -12,7 +12,7 @@ from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics +from .helper import async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -24,7 +24,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_weather_sensor( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipWeatherSensor.""" entity_id = "weather.weather_sensor_plus" @@ -50,7 +50,7 @@ async def test_hmip_weather_sensor( async def test_hmip_weather_sensor_pro( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory + hass: HomeAssistant, default_mock_hap_factory ) -> None: """Test HomematicipWeatherSensorPro.""" entity_id = "weather.wettersensor_pro" @@ -76,9 +76,7 @@ async def test_hmip_weather_sensor_pro( assert ha_state.attributes[ATTR_WEATHER_TEMPERATURE] == 12.1 -async def test_hmip_home_weather( - hass: HomeAssistant, default_mock_hap_factory: HomeFactory -) -> None: +async def test_hmip_home_weather(hass: HomeAssistant, default_mock_hap_factory) -> None: """Test HomematicipHomeWeather.""" entity_id = "weather.weather_1010_wien_osterreich" entity_name = "Weather 1010 Wien, Österreich" diff --git a/tests/components/homewizard/conftest.py b/tests/components/homewizard/conftest.py index fcfe1e5c189..eb638492941 100644 --- a/tests/components/homewizard/conftest.py +++ b/tests/components/homewizard/conftest.py @@ -1,11 +1,11 @@ """Fixtures for HomeWizard integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from homewizard_energy.errors import NotFoundError from homewizard_energy.models import Data, Device, State, System import pytest +from typing_extensions import Generator from homeassistant.components.homewizard.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS diff --git a/tests/components/homewizard/snapshots/test_button.ambr b/tests/components/homewizard/snapshots/test_button.ambr index d5ad9770478..eabaeb648aa 100644 --- a/tests/components/homewizard/snapshots/test_button.ambr +++ b/tests/components/homewizard/snapshots/test_button.ambr @@ -71,8 +71,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/homewizard/snapshots/test_config_flow.ambr b/tests/components/homewizard/snapshots/test_config_flow.ambr index c3852a8c3fa..663d9153991 100644 --- a/tests/components/homewizard/snapshots/test_config_flow.ambr +++ b/tests/components/homewizard/snapshots/test_config_flow.ambr @@ -20,8 +20,6 @@ 'ip_address': '127.0.0.1', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'homewizard', 'entry_id': , 'minor_version': 1, @@ -64,8 +62,6 @@ 'ip_address': '127.0.0.1', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'homewizard', 'entry_id': , 'minor_version': 1, @@ -108,8 +104,6 @@ 'ip_address': '127.0.0.1', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'homewizard', 'entry_id': , 'minor_version': 1, @@ -148,8 +142,6 @@ 'ip_address': '2.2.2.2', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'homewizard', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/homewizard/snapshots/test_diagnostics.ambr b/tests/components/homewizard/snapshots/test_diagnostics.ambr index f8ac80f2536..7b82056aacb 100644 --- a/tests/components/homewizard/snapshots/test_diagnostics.ambr +++ b/tests/components/homewizard/snapshots/test_diagnostics.ambr @@ -65,12 +65,6 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', - 'product': dict({ - 'description': 'Measure solar panels, car chargers and more.', - 'model': 'HWE-KWH1', - 'name': 'Wi-Fi kWh Meter 1-phase', - 'url': 'https://www.homewizard.com/kwh-meter/', - }), 'product_name': 'kWh meter', 'product_type': 'HWE-KWH1', 'serial': '**REDACTED**', @@ -154,12 +148,6 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', - 'product': dict({ - 'description': 'Measure solar panels, car chargers and more.', - 'model': 'HWE-KWH3', - 'name': 'Wi-Fi kWh Meter 3-phase', - 'url': 'https://www.homewizard.com/kwh-meter/', - }), 'product_name': 'KWh meter 3-phase', 'product_type': 'HWE-KWH3', 'serial': '**REDACTED**', @@ -294,12 +282,6 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '4.19', - 'product': dict({ - 'description': 'The HomeWizard P1 Meter gives you detailed insight in your electricity-, gas consumption and solar surplus.', - 'model': 'HWE-P1', - 'name': 'Wi-Fi P1 Meter', - 'url': 'https://www.homewizard.com/p1-meter/', - }), 'product_name': 'P1 meter', 'product_type': 'HWE-P1', 'serial': '**REDACTED**', @@ -383,12 +365,6 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.03', - 'product': dict({ - 'description': 'Measure and switch every device.', - 'model': 'HWE-SKT', - 'name': 'Wi-Fi Energy Socket', - 'url': 'https://www.homewizard.com/energy-socket/', - }), 'product_name': 'Energy Socket', 'product_type': 'HWE-SKT', 'serial': '**REDACTED**', @@ -476,12 +452,6 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '4.07', - 'product': dict({ - 'description': 'Measure and switch every device.', - 'model': 'HWE-SKT', - 'name': 'Wi-Fi Energy Socket', - 'url': 'https://www.homewizard.com/energy-socket/', - }), 'product_name': 'Energy Socket', 'product_type': 'HWE-SKT', 'serial': '**REDACTED**', @@ -569,12 +539,6 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '2.03', - 'product': dict({ - 'description': 'Real-time water consumption insights', - 'model': 'HWE-WTR', - 'name': 'Wi-Fi Watermeter', - 'url': 'https://www.homewizard.com/watermeter/', - }), 'product_name': 'Watermeter', 'product_type': 'HWE-WTR', 'serial': '**REDACTED**', @@ -658,12 +622,6 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', - 'product': dict({ - 'description': 'Measure solar panels, car chargers and more.', - 'model': 'SDM230-wifi', - 'name': 'Wi-Fi kWh Meter 1-phase', - 'url': 'https://www.homewizard.com/kwh-meter/', - }), 'product_name': 'kWh meter', 'product_type': 'SDM230-wifi', 'serial': '**REDACTED**', @@ -747,12 +705,6 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', - 'product': dict({ - 'description': 'Measure solar panels, car chargers and more.', - 'model': 'SDM630-wifi', - 'name': 'Wi-Fi kWh Meter 3-phase', - 'url': 'https://www.homewizard.com/kwh-meter/', - }), 'product_name': 'KWh meter 3-phase', 'product_type': 'SDM630-wifi', 'serial': '**REDACTED**', diff --git a/tests/components/homewizard/snapshots/test_number.ambr b/tests/components/homewizard/snapshots/test_number.ambr index 768255c7508..f292847f2a2 100644 --- a/tests/components/homewizard/snapshots/test_number.ambr +++ b/tests/components/homewizard/snapshots/test_number.ambr @@ -80,8 +80,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -172,8 +171,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/homewizard/snapshots/test_sensor.ambr b/tests/components/homewizard/snapshots/test_sensor.ambr index 5d5b458dccc..27dfd6399c7 100644 --- a/tests/components/homewizard/snapshots/test_sensor.ambr +++ b/tests/components/homewizard/snapshots/test_sensor.ambr @@ -1,4 +1,37 @@ # serializer version: 1 +# name: test_gas_meter_migrated[sensor.homewizard_aabbccddeeff_total_gas_m3:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.homewizard_aabbccddeeff_total_gas_m3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'homewizard', + 'previous_unique_id': 'aabbccddeeff_total_gas_m3', + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'homewizard_gas_meter_01FFEEDDCCBBAA99887766554433221100', + 'unit_of_measurement': None, + }) +# --- # name: test_sensors[HWE-KWH1-entity_ids7][sensor.device_apparent_power:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, @@ -24,8 +57,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -111,8 +143,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -198,8 +229,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -285,8 +315,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -372,8 +401,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -459,8 +487,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -549,8 +576,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -636,8 +662,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -679,7 +704,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[HWE-KWH1-entity_ids7][sensor.device_reactive_power:state] @@ -688,7 +713,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -723,8 +748,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -810,8 +834,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -892,8 +915,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -978,8 +1000,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1065,8 +1086,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1152,8 +1172,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1239,8 +1258,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1326,8 +1344,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1413,8 +1430,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1500,8 +1516,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1587,8 +1602,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1674,8 +1688,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1761,8 +1774,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1848,8 +1860,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1935,8 +1946,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2025,8 +2035,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2112,8 +2121,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2199,8 +2207,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2286,8 +2293,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2376,8 +2382,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2466,8 +2471,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2556,8 +2560,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2599,7 +2602,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power:state] @@ -2608,7 +2611,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -2643,8 +2646,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2686,7 +2688,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l1_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power_phase_1:state] @@ -2695,7 +2697,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 1', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_1', @@ -2730,8 +2732,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2773,7 +2774,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l2_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power_phase_2:state] @@ -2782,7 +2783,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 2', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_2', @@ -2817,8 +2818,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2860,7 +2860,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l3_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power_phase_3:state] @@ -2869,7 +2869,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 3', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_3', @@ -2904,8 +2904,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2991,8 +2990,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3078,8 +3076,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3165,8 +3162,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3247,8 +3243,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3333,8 +3328,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3417,8 +3411,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3504,8 +3497,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3591,8 +3583,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3678,8 +3669,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3760,8 +3750,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3847,8 +3836,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3934,8 +3922,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4021,8 +4008,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4108,8 +4094,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4195,8 +4180,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4282,8 +4266,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4369,8 +4352,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4456,8 +4438,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4543,8 +4524,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4630,8 +4610,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4717,8 +4696,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4799,8 +4777,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4883,8 +4860,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4973,8 +4949,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5055,8 +5030,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5145,8 +5119,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5235,8 +5208,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5325,8 +5297,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5407,8 +5378,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5489,8 +5459,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5585,8 +5554,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5672,8 +5640,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5759,8 +5726,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5846,8 +5812,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5933,8 +5898,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6015,8 +5979,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6097,8 +6060,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6179,8 +6141,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6261,8 +6222,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6343,8 +6303,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6425,8 +6384,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6511,8 +6469,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6593,8 +6550,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6676,7 +6632,6 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', - 'model_id': None, 'name': 'Gas meter', 'name_by_user': None, 'primary_config_entry': , @@ -6759,7 +6714,6 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', - 'model_id': None, 'name': 'Heat meter', 'name_by_user': None, 'primary_config_entry': , @@ -6842,7 +6796,6 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', - 'model_id': None, 'name': 'Inlet heat meter', 'name_by_user': None, 'primary_config_entry': , @@ -6924,7 +6877,6 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', - 'model_id': None, 'name': 'Warm water meter', 'name_by_user': None, 'primary_config_entry': , @@ -7007,7 +6959,6 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', - 'model_id': None, 'name': 'Water meter', 'name_by_user': None, 'primary_config_entry': , @@ -7093,8 +7044,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7177,8 +7127,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7264,8 +7213,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7351,8 +7299,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7438,8 +7385,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7520,8 +7466,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7607,8 +7552,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7694,8 +7638,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7781,8 +7724,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7868,8 +7810,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7955,8 +7896,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8042,8 +7982,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8129,8 +8068,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8216,8 +8154,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8303,8 +8240,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8390,8 +8326,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8477,8 +8412,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8559,8 +8493,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8643,8 +8576,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8733,8 +8665,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8815,8 +8746,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8905,8 +8835,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8995,8 +8924,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9085,8 +9013,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9167,8 +9094,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9249,8 +9175,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9345,8 +9270,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9432,8 +9356,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9519,8 +9442,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9606,8 +9528,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9693,8 +9614,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9775,8 +9695,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9857,8 +9776,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9939,8 +9857,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10021,8 +9938,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10103,8 +10019,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10185,8 +10100,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10271,8 +10185,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10353,8 +10266,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10436,7 +10348,6 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', - 'model_id': None, 'name': 'Gas meter', 'name_by_user': None, 'primary_config_entry': , @@ -10519,7 +10430,6 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', - 'model_id': None, 'name': 'Heat meter', 'name_by_user': None, 'primary_config_entry': , @@ -10602,7 +10512,6 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', - 'model_id': None, 'name': 'Inlet heat meter', 'name_by_user': None, 'primary_config_entry': , @@ -10684,7 +10593,6 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', - 'model_id': None, 'name': 'Warm water meter', 'name_by_user': None, 'primary_config_entry': , @@ -10767,7 +10675,6 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', - 'model_id': None, 'name': 'Water meter', 'name_by_user': None, 'primary_config_entry': , @@ -10853,8 +10760,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10937,8 +10843,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11024,8 +10929,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11111,8 +11015,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11198,8 +11101,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11285,8 +11187,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11372,8 +11273,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11459,8 +11359,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11546,8 +11445,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11633,8 +11531,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11720,8 +11617,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11807,8 +11703,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11894,8 +11789,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11981,8 +11875,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12068,8 +11961,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12155,8 +12047,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12237,8 +12128,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12327,8 +12217,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12409,8 +12298,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12499,8 +12387,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12589,8 +12476,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12679,8 +12565,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12766,8 +12651,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12853,8 +12737,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12940,8 +12823,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13027,8 +12909,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13109,8 +12990,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13191,8 +13071,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13273,8 +13152,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13355,8 +13233,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13437,8 +13314,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13519,8 +13395,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi P1 Meter', - 'model_id': 'HWE-P1', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13605,8 +13480,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13692,8 +13566,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13779,8 +13652,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13869,8 +13741,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13959,8 +13830,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14041,8 +13911,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14127,8 +13996,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14214,8 +14082,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14301,8 +14168,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14388,8 +14254,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14475,8 +14340,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14562,8 +14426,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14652,8 +14515,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14739,8 +14601,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14829,8 +14690,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14872,7 +14732,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[HWE-SKT-21-entity_ids3][sensor.device_reactive_power:state] @@ -14881,7 +14741,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -14916,8 +14776,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15003,8 +14862,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15085,8 +14943,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15171,8 +15028,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Watermeter', - 'model_id': 'HWE-WTR', + 'model': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15258,8 +15114,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Watermeter', - 'model_id': 'HWE-WTR', + 'model': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15344,8 +15199,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Watermeter', - 'model_id': 'HWE-WTR', + 'model': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15426,8 +15280,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Watermeter', - 'model_id': 'HWE-WTR', + 'model': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15512,8 +15365,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15599,8 +15451,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15686,8 +15537,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15773,8 +15623,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15860,8 +15709,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15947,8 +15795,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16037,8 +15884,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16124,8 +15970,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16167,7 +16012,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[SDM230-entity_ids5][sensor.device_reactive_power:state] @@ -16176,7 +16021,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -16211,8 +16056,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16298,8 +16142,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16380,8 +16223,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16466,8 +16308,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16553,8 +16394,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16640,8 +16480,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16727,8 +16566,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16814,8 +16652,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16901,8 +16738,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16988,8 +16824,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17075,8 +16910,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17162,8 +16996,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17249,8 +17082,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17336,8 +17168,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17423,8 +17254,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17513,8 +17343,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17600,8 +17429,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17687,8 +17515,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17774,8 +17601,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17864,8 +17690,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17954,8 +17779,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18044,8 +17868,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18087,7 +17910,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power:state] @@ -18096,7 +17919,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -18131,8 +17954,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18174,7 +17996,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l1_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power_phase_1:state] @@ -18183,7 +18005,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 1', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_1', @@ -18218,8 +18040,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18261,7 +18082,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l2_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power_phase_2:state] @@ -18270,7 +18091,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 2', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_2', @@ -18305,8 +18126,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18348,7 +18168,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l3_var', - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power_phase_3:state] @@ -18357,7 +18177,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 3', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': 'var', }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_3', @@ -18392,8 +18212,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18479,8 +18298,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18566,8 +18384,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18653,8 +18470,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18735,8 +18551,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/homewizard/snapshots/test_switch.ambr b/tests/components/homewizard/snapshots/test_switch.ambr index 68a351c1ebb..ba630e2f0b4 100644 --- a/tests/components/homewizard/snapshots/test_switch.ambr +++ b/tests/components/homewizard/snapshots/test_switch.ambr @@ -70,8 +70,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'HWE-KWH1', + 'model': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -152,8 +151,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'HWE-KWH3', + 'model': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -235,8 +233,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -317,8 +314,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -399,8 +395,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -482,8 +477,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -564,8 +558,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -646,8 +639,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Energy Socket', - 'model_id': 'HWE-SKT', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -728,8 +720,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi Watermeter', - 'model_id': 'HWE-WTR', + 'model': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -810,8 +801,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 1-phase', - 'model_id': 'SDM230-wifi', + 'model': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -892,8 +882,7 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'Wi-Fi kWh Meter 3-phase', - 'model_id': 'SDM630-wifi', + 'model': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/homewizard/test_config_flow.py b/tests/components/homewizard/test_config_flow.py index 442659f2aad..8d12a8a1787 100644 --- a/tests/components/homewizard/test_config_flow.py +++ b/tests/components/homewizard/test_config_flow.py @@ -341,7 +341,13 @@ async def test_reauth_flow( """Test reauth flow while API is enabled.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -361,7 +367,13 @@ async def test_reauth_error( mock_homewizardenergy.device.side_effect = DisabledError mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/homewizard/test_init.py b/tests/components/homewizard/test_init.py index a01f075ee61..969be7a604c 100644 --- a/tests/components/homewizard/test_init.py +++ b/tests/components/homewizard/test_init.py @@ -1,17 +1,17 @@ """Tests for the homewizard component.""" -from datetime import timedelta from unittest.mock import MagicMock -from freezegun.api import FrozenDateTimeFactory -from homewizard_energy.errors import DisabledError +from homewizard_energy.errors import DisabledError, HomeWizardEnergyException import pytest from homeassistant.components.homewizard.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry async def test_load_unload( @@ -97,36 +97,206 @@ async def test_load_removes_reauth_flow( assert len(flows) == 0 -@pytest.mark.usefixtures("mock_homewizardenergy") -async def test_disablederror_reloads_integration( +@pytest.mark.parametrize( + "exception", + [ + HomeWizardEnergyException, + Exception, + ], +) +async def test_load_handles_homewizardenergy_exception( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homewizardenergy: MagicMock, - freezer: FrozenDateTimeFactory, + exception: Exception, ) -> None: - """Test DisabledError reloads integration.""" + """Test setup handles exception from API.""" + mock_homewizardenergy.device.side_effect = exception mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - # Make sure current state is loaded and not reauth flow is active - assert mock_config_entry.state is ConfigEntryState.LOADED - flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) - assert len(flows) == 0 + assert mock_config_entry.state in ( + ConfigEntryState.SETUP_RETRY, + ConfigEntryState.SETUP_ERROR, + ) - # Simulate DisabledError and wait for next update - mock_homewizardenergy.device.side_effect = DisabledError() - freezer.tick(timedelta(seconds=5)) - async_fire_time_changed(hass) +@pytest.mark.parametrize( + ("device_fixture", "old_unique_id", "new_unique_id"), + [ + ( + "HWE-SKT-11", + "aabbccddeeff_total_power_import_t1_kwh", + "aabbccddeeff_total_power_import_kwh", + ), + ( + "HWE-SKT-11", + "aabbccddeeff_total_power_export_t1_kwh", + "aabbccddeeff_total_power_export_kwh", + ), + ( + "HWE-SKT-21", + "aabbccddeeff_total_power_import_t1_kwh", + "aabbccddeeff_total_power_import_kwh", + ), + ( + "HWE-SKT-21", + "aabbccddeeff_total_power_export_t1_kwh", + "aabbccddeeff_total_power_export_kwh", + ), + ], +) +@pytest.mark.usefixtures("mock_homewizardenergy") +async def test_sensor_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + old_unique_id: str, + new_unique_id: str, +) -> None: + """Test total power T1 sensors are migrated.""" + mock_config_entry.add_to_hass(hass) + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + domain=Platform.SENSOR, + platform=DOMAIN, + unique_id=old_unique_id, + config_entry=mock_config_entry, + ) + + assert entity.unique_id == old_unique_id + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - # State should be setup retry and reauth flow should be active - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + entity_migrated = entity_registry.async_get(entity.entity_id) + assert entity_migrated + assert entity_migrated.unique_id == new_unique_id + assert entity_migrated.previous_unique_id == old_unique_id - flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) - assert len(flows) == 1 - flow = flows[0] - assert flow.get("step_id") == "reauth_confirm" - assert flow.get("handler") == DOMAIN +@pytest.mark.parametrize( + ("device_fixture", "old_unique_id", "new_unique_id"), + [ + ( + "HWE-SKT-11", + "aabbccddeeff_total_power_import_t1_kwh", + "aabbccddeeff_total_power_import_kwh", + ), + ( + "HWE-SKT-11", + "aabbccddeeff_total_power_export_t1_kwh", + "aabbccddeeff_total_power_export_kwh", + ), + ( + "HWE-SKT-21", + "aabbccddeeff_total_power_import_t1_kwh", + "aabbccddeeff_total_power_import_kwh", + ), + ( + "HWE-SKT-21", + "aabbccddeeff_total_power_export_t1_kwh", + "aabbccddeeff_total_power_export_kwh", + ), + ], +) +@pytest.mark.usefixtures("mock_homewizardenergy") +async def test_sensor_migration_does_not_trigger( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + old_unique_id: str, + new_unique_id: str, +) -> None: + """Test total power T1 sensors are not migrated when not possible.""" + mock_config_entry.add_to_hass(hass) + + old_entity: er.RegistryEntry = entity_registry.async_get_or_create( + domain=Platform.SENSOR, + platform=DOMAIN, + unique_id=old_unique_id, + config_entry=mock_config_entry, + ) + + new_entity: er.RegistryEntry = entity_registry.async_get_or_create( + domain=Platform.SENSOR, + platform=DOMAIN, + unique_id=new_unique_id, + config_entry=mock_config_entry, + ) + + assert old_entity.unique_id == old_unique_id + assert new_entity.unique_id == new_unique_id + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + entity = entity_registry.async_get(old_entity.entity_id) + assert entity + assert entity.unique_id == old_unique_id + assert entity.previous_unique_id is None + + entity = entity_registry.async_get(new_entity.entity_id) + assert entity + assert entity.unique_id == new_unique_id + assert entity.previous_unique_id is None + + +@pytest.mark.parametrize( + ("device_fixture", "old_unique_id", "new_unique_id"), + [ + ( + "HWE-P1", + "homewizard_G001", + "homewizard_gas_meter_G001", + ), + ( + "HWE-P1", + "homewizard_W001", + "homewizard_water_meter_W001", + ), + ( + "HWE-P1", + "homewizard_WW001", + "homewizard_warm_water_meter_WW001", + ), + ( + "HWE-P1", + "homewizard_H001", + "homewizard_heat_meter_H001", + ), + ( + "HWE-P1", + "homewizard_IH001", + "homewizard_inlet_heat_meter_IH001", + ), + ], +) +@pytest.mark.usefixtures("mock_homewizardenergy") +async def test_external_sensor_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + old_unique_id: str, + new_unique_id: str, +) -> None: + """Test unique ID or External sensors are migrated.""" + mock_config_entry.add_to_hass(hass) + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + domain=Platform.SENSOR, + platform=DOMAIN, + unique_id=old_unique_id, + config_entry=mock_config_entry, + ) + + assert entity.unique_id == old_unique_id + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + entity_migrated = entity_registry.async_get(entity.entity_id) + assert entity_migrated + assert entity_migrated.unique_id == new_unique_id + assert entity_migrated.previous_unique_id == old_unique_id diff --git a/tests/components/homewizard/test_sensor.py b/tests/components/homewizard/test_sensor.py index c180c2a4def..abcd6a879c5 100644 --- a/tests/components/homewizard/test_sensor.py +++ b/tests/components/homewizard/test_sensor.py @@ -7,13 +7,14 @@ from homewizard_energy.models import Data import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.homewizard import DOMAIN from homeassistant.components.homewizard.const import UPDATE_INTERVAL -from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed pytestmark = [ pytest.mark.usefixtures("init_integration"), @@ -814,3 +815,49 @@ async def test_entities_not_created_for_device( """Ensures entities for a specific device are not created.""" for entity_id in entity_ids: assert not hass.states.get(entity_id) + + +async def test_gas_meter_migrated( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test old gas meter sensor is migrated.""" + entity_registry.async_get_or_create( + Platform.SENSOR, + DOMAIN, + "aabbccddeeff_total_gas_m3", + ) + + await hass.config_entries.async_reload(init_integration.entry_id) + await hass.async_block_till_done() + + entity_id = "sensor.homewizard_aabbccddeeff_total_gas_m3" + + assert (entity_entry := entity_registry.async_get(entity_id)) + assert snapshot(name=f"{entity_id}:entity-registry") == entity_entry + + # Make really sure this happens + assert entity_entry.previous_unique_id == "aabbccddeeff_total_gas_m3" + + +async def test_gas_unique_id_removed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test old gas meter id sensor is removed.""" + entity_registry.async_get_or_create( + Platform.SENSOR, + DOMAIN, + "aabbccddeeff_gas_unique_id", + ) + + await hass.config_entries.async_reload(init_integration.entry_id) + await hass.async_block_till_done() + + entity_id = "sensor.homewizard_aabbccddeeff_gas_unique_id" + + assert not entity_registry.async_get(entity_id) diff --git a/tests/components/homeworks/conftest.py b/tests/components/homeworks/conftest.py index 9562063ab97..ca0e08e9215 100644 --- a/tests/components/homeworks/conftest.py +++ b/tests/components/homeworks/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Lutron Homeworks Series 4 and 8 tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.homeworks.const import ( CONF_ADDR, @@ -17,55 +17,10 @@ from homeassistant.components.homeworks.const import ( CONF_RELEASE_DELAY, DOMAIN, ) -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PASSWORD, - CONF_PORT, - CONF_USERNAME, -) +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT from tests.common import MockConfigEntry -CONFIG_ENTRY_OPTIONS = { - CONF_CONTROLLER_ID: "main_controller", - CONF_HOST: "192.168.0.1", - CONF_PORT: 1234, - CONF_DIMMERS: [ - { - CONF_ADDR: "[02:08:01:01]", - CONF_NAME: "Foyer Sconces", - CONF_RATE: 1.0, - } - ], - CONF_KEYPADS: [ - { - CONF_ADDR: "[02:08:02:01]", - CONF_NAME: "Foyer Keypad", - CONF_BUTTONS: [ - { - CONF_NAME: "Morning", - CONF_NUMBER: 1, - CONF_LED: True, - CONF_RELEASE_DELAY: None, - }, - { - CONF_NAME: "Relax", - CONF_NUMBER: 2, - CONF_LED: True, - CONF_RELEASE_DELAY: None, - }, - { - CONF_NAME: "Dim up", - CONF_NUMBER: 3, - CONF_LED: False, - CONF_RELEASE_DELAY: 0.2, - }, - ], - } - ], -} - @pytest.fixture def mock_config_entry() -> MockConfigEntry: @@ -73,19 +28,45 @@ def mock_config_entry() -> MockConfigEntry: return MockConfigEntry( title="Lutron Homeworks", domain=DOMAIN, - data={CONF_PASSWORD: None, CONF_USERNAME: None}, - options=CONFIG_ENTRY_OPTIONS, - ) - - -@pytest.fixture -def mock_config_entry_username_password() -> MockConfigEntry: - """Return the default mocked config entry with credentials.""" - return MockConfigEntry( - title="Lutron Homeworks", - domain=DOMAIN, - data={CONF_PASSWORD: "hunter2", CONF_USERNAME: "username"}, - options=CONFIG_ENTRY_OPTIONS, + data={}, + options={ + CONF_CONTROLLER_ID: "main_controller", + CONF_HOST: "192.168.0.1", + CONF_PORT: 1234, + CONF_DIMMERS: [ + { + CONF_ADDR: "[02:08:01:01]", + CONF_NAME: "Foyer Sconces", + CONF_RATE: 1.0, + } + ], + CONF_KEYPADS: [ + { + CONF_ADDR: "[02:08:02:01]", + CONF_NAME: "Foyer Keypad", + CONF_BUTTONS: [ + { + CONF_NAME: "Morning", + CONF_NUMBER: 1, + CONF_LED: True, + CONF_RELEASE_DELAY: None, + }, + { + CONF_NAME: "Relax", + CONF_NUMBER: 2, + CONF_LED: True, + CONF_RELEASE_DELAY: None, + }, + { + CONF_NAME: "Dim up", + CONF_NUMBER: 3, + CONF_LED: False, + CONF_RELEASE_DELAY: 0.2, + }, + ], + } + ], + }, ) diff --git a/tests/components/homeworks/test_binary_sensor.py b/tests/components/homeworks/test_binary_sensor.py index 4bd42cc0a59..0b21ae3b773 100644 --- a/tests/components/homeworks/test_binary_sensor.py +++ b/tests/components/homeworks/test_binary_sensor.py @@ -30,7 +30,7 @@ async def test_binary_sensor_attributes_state_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) hw_callback = mock_homeworks.mock_calls[0][1][2] assert entity_id in hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN) diff --git a/tests/components/homeworks/test_config_flow.py b/tests/components/homeworks/test_config_flow.py index e8c4ab15b3d..8f5334b21f9 100644 --- a/tests/components/homeworks/test_config_flow.py +++ b/tests/components/homeworks/test_config_flow.py @@ -2,7 +2,6 @@ from unittest.mock import ANY, MagicMock -from pyhomeworks import exceptions as hw_exceptions import pytest from pytest_unordered import unordered @@ -17,14 +16,8 @@ from homeassistant.components.homeworks.const import ( CONF_RELEASE_DELAY, DOMAIN, ) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PASSWORD, - CONF_PORT, - CONF_USERNAME, -) +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -52,7 +45,7 @@ async def test_user_flow( ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Main controller" - assert result["data"] == {"password": None, "username": None} + assert result["data"] == {} assert result["options"] == { "controller_id": "main_controller", "dimmers": [], @@ -60,107 +53,9 @@ async def test_user_flow( "keypads": [], "port": 1234, } - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) mock_controller.close.assert_called_once_with() - mock_controller.join.assert_not_called() - - -async def test_user_flow_credentials( - hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry -) -> None: - """Test the user configuration flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - mock_controller = MagicMock() - mock_homeworks.return_value = mock_controller - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_HOST: "192.168.0.1", - CONF_NAME: "Main controller", - CONF_PASSWORD: "hunter2", - CONF_PORT: 1234, - CONF_USERNAME: "username", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Main controller" - assert result["data"] == {"password": "hunter2", "username": "username"} - assert result["options"] == { - "controller_id": "main_controller", - "dimmers": [], - "host": "192.168.0.1", - "keypads": [], - "port": 1234, - } - mock_homeworks.assert_called_once_with( - "192.168.0.1", 1234, ANY, "username", "hunter2" - ) - mock_controller.close.assert_called_once_with() - mock_controller.join.assert_not_called() - - -async def test_user_flow_credentials_user_only( - hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry -) -> None: - """Test the user configuration flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - mock_controller = MagicMock() - mock_homeworks.return_value = mock_controller - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_HOST: "192.168.0.1", - CONF_NAME: "Main controller", - CONF_PORT: 1234, - CONF_USERNAME: "username", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Main controller" - assert result["data"] == {"password": None, "username": "username"} - assert result["options"] == { - "controller_id": "main_controller", - "dimmers": [], - "host": "192.168.0.1", - "keypads": [], - "port": 1234, - } - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, "username", None) - mock_controller.close.assert_called_once_with() - mock_controller.join.assert_not_called() - - -async def test_user_flow_credentials_password_only( - hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry -) -> None: - """Test the user configuration flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - mock_controller = MagicMock() - mock_homeworks.return_value = mock_controller - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_HOST: "192.168.0.1", - CONF_NAME: "Main controller", - CONF_PASSWORD: "hunter2", - CONF_PORT: 1234, - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "need_username_with_password"} + mock_controller.join.assert_called_once_with() async def test_user_flow_already_exists( @@ -201,12 +96,7 @@ async def test_user_flow_already_exists( @pytest.mark.parametrize( ("side_effect", "error"), - [ - (hw_exceptions.HomeworksConnectionFailed, "connection_error"), - (hw_exceptions.HomeworksInvalidCredentialsProvided, "invalid_credentials"), - (hw_exceptions.HomeworksNoCredentialsProvided, "credentials_needed"), - (Exception, "unknown_error"), - ], + [(ConnectionError, "connection_error"), (Exception, "unknown_error")], ) async def test_user_flow_cannot_connect( hass: HomeAssistant, @@ -241,7 +131,10 @@ async def test_reconfigure_flow( """Test reconfigure flow.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" @@ -306,7 +199,10 @@ async def test_reconfigure_flow_flow_duplicate( ) entry2.add_to_hass(hass) - result = await entry1.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": entry1.entry_id}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" @@ -328,7 +224,10 @@ async def test_reconfigure_flow_flow_no_change( """Test reconfigure flow.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" @@ -367,29 +266,6 @@ async def test_reconfigure_flow_flow_no_change( } -async def test_reconfigure_flow_credentials_password_only( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homeworks: MagicMock -) -> None: - """Test reconfigure flow.""" - mock_config_entry.add_to_hass(hass) - - result = await mock_config_entry.start_reconfigure_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_HOST: "192.168.0.2", - CONF_PASSWORD: "hunter2", - CONF_PORT: 1234, - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - assert result["errors"] == {"base": "need_username_with_password"} - - async def test_options_add_light_flow( hass: HomeAssistant, mock_empty_config_entry: MockConfigEntry, @@ -556,14 +432,7 @@ async def test_options_add_remove_light_flow( ) -@pytest.mark.parametrize( - "keypad_address", - [ - "[02:08:03]", - "[02:08:03:01]", - "[02:08:03:01:00]", - ], -) +@pytest.mark.parametrize("keypad_address", ["[02:08:03:01]", "[02:08:03]"]) async def test_options_add_remove_keypad_flow( hass: HomeAssistant, mock_config_entry: MockConfigEntry, diff --git a/tests/components/homeworks/test_init.py b/tests/components/homeworks/test_init.py index 2a4bd28138e..87aabb6258f 100644 --- a/tests/components/homeworks/test_init.py +++ b/tests/components/homeworks/test_init.py @@ -2,18 +2,12 @@ from unittest.mock import ANY, MagicMock -from pyhomeworks import exceptions as hw_exceptions -from pyhomeworks.pyhomeworks import ( - HW_BUTTON_PRESSED, - HW_BUTTON_RELEASED, - HW_LOGIN_INCORRECT, -) +from pyhomeworks.pyhomeworks import HW_BUTTON_PRESSED, HW_BUTTON_RELEASED import pytest from homeassistant.components.homeworks import EVENT_BUTTON_PRESS, EVENT_BUTTON_RELEASE from homeassistant.components.homeworks.const import DOMAIN from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -31,7 +25,7 @@ async def test_load_unload_config_entry( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -40,60 +34,13 @@ async def test_load_unload_config_entry( assert mock_config_entry.state is ConfigEntryState.NOT_LOADED -async def test_load_config_entry_with_credentials( - hass: HomeAssistant, - mock_config_entry_username_password: MockConfigEntry, - mock_homeworks: MagicMock, -) -> None: - """Test the Homeworks configuration entry loading/unloading.""" - mock_config_entry_username_password.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry_username_password.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry_username_password.state is ConfigEntryState.LOADED - mock_homeworks.assert_called_once_with( - "192.168.0.1", 1234, ANY, "username", "hunter2" - ) - - await hass.config_entries.async_unload(mock_config_entry_username_password.entry_id) - await hass.async_block_till_done() - - assert not hass.data.get(DOMAIN) - assert mock_config_entry_username_password.state is ConfigEntryState.NOT_LOADED - - -async def test_controller_credentials_changed( - hass: HomeAssistant, - mock_config_entry_username_password: MockConfigEntry, - mock_homeworks: MagicMock, -) -> None: - """Test controller credentials changed. - - Note: This just ensures we don't blow up when credentials changed, in the future a - reauth flow should be added. - """ - mock_config_entry_username_password.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry_username_password.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry_username_password.state is ConfigEntryState.LOADED - mock_homeworks.assert_called_once_with( - "192.168.0.1", 1234, ANY, "username", "hunter2" - ) - hw_callback = mock_homeworks.mock_calls[0][1][2] - - hw_callback(HW_LOGIN_INCORRECT, []) - - async def test_config_entry_not_ready( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homeworks: MagicMock, ) -> None: """Test the Homeworks configuration entry not ready.""" - mock_homeworks.return_value.connect.side_effect = ( - hw_exceptions.HomeworksConnectionFailed - ) + mock_homeworks.side_effect = ConnectionError mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -115,7 +62,7 @@ async def test_keypad_events( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) hw_callback = mock_homeworks.mock_calls[0][1][2] hw_callback(HW_BUTTON_PRESSED, ["[02:08:02:01]", 1]) @@ -218,25 +165,3 @@ async def test_send_command( blocking=True, ) assert len(mock_controller._send.mock_calls) == 0 - - -async def test_cleanup_on_ha_shutdown( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_homeworks: MagicMock, -) -> None: - """Test cleanup when HA shuts down.""" - mock_controller = MagicMock() - mock_homeworks.return_value = mock_controller - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) - mock_controller.stop.assert_not_called() - - hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) - await hass.async_block_till_done() - - mock_controller.stop.assert_called_once_with() diff --git a/tests/components/homeworks/test_light.py b/tests/components/homeworks/test_light.py index 1cd2951128c..a5d94f736d5 100644 --- a/tests/components/homeworks/test_light.py +++ b/tests/components/homeworks/test_light.py @@ -35,7 +35,7 @@ async def test_light_attributes_state_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) hw_callback = mock_homeworks.mock_calls[0][1][2] assert len(mock_controller.request_dimmer_level.mock_calls) == 1 @@ -106,7 +106,7 @@ async def test_light_restore_brightness( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) hw_callback = mock_homeworks.mock_calls[0][1][2] assert hass.states.async_entity_ids("light") == unordered([entity_id]) diff --git a/tests/components/honeywell/conftest.py b/tests/components/honeywell/conftest.py index e48664db9ae..5c5b6c0a44a 100644 --- a/tests/components/honeywell/conftest.py +++ b/tests/components/honeywell/conftest.py @@ -86,7 +86,6 @@ def device(): mock_device.system_mode = "off" mock_device.name = "device1" mock_device.current_temperature = CURRENTTEMPERATURE - mock_device.temperature_unit = "C" mock_device.mac_address = "macaddress1" mock_device.outdoor_temperature = None mock_device.outdoor_humidity = None diff --git a/tests/components/honeywell/snapshots/test_climate.ambr b/tests/components/honeywell/snapshots/test_climate.ambr index f26064b335a..d1faf9af9a0 100644 --- a/tests/components/honeywell/snapshots/test_climate.ambr +++ b/tests/components/honeywell/snapshots/test_climate.ambr @@ -1,8 +1,9 @@ # serializer version: 1 # name: test_static_attributes ReadOnlyDict({ + 'aux_heat': 'off', 'current_humidity': 50, - 'current_temperature': 20, + 'current_temperature': -6.7, 'fan_action': 'idle', 'fan_mode': 'auto', 'fan_modes': list([ @@ -19,9 +20,9 @@ , ]), 'max_humidity': 99, - 'max_temp': 35, + 'max_temp': 1.7, 'min_humidity': 30, - 'min_temp': 7, + 'min_temp': -13.9, 'permanent_hold': False, 'preset_mode': 'none', 'preset_modes': list([ @@ -29,7 +30,7 @@ 'away', 'hold', ]), - 'supported_features': , + 'supported_features': , 'target_temp_high': None, 'target_temp_low': None, 'temperature': None, diff --git a/tests/components/honeywell/test_climate.py b/tests/components/honeywell/test_climate.py index 73c5ff33dbc..b57be5f1838 100644 --- a/tests/components/honeywell/test_climate.py +++ b/tests/components/honeywell/test_climate.py @@ -5,12 +5,12 @@ from unittest.mock import MagicMock from aiohttp import ClientConnectionError import aiosomecomfort -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from syrupy.filters import props from homeassistant.components.climate import ( + ATTR_AUX_HEAT, ATTR_FAN_MODE, ATTR_HVAC_MODE, ATTR_PRESET_MODE, @@ -22,6 +22,7 @@ from homeassistant.components.climate import ( FAN_ON, PRESET_AWAY, PRESET_NONE, + SERVICE_SET_AUX_HEAT, SERVICE_SET_FAN_MODE, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, @@ -30,8 +31,6 @@ from homeassistant.components.climate import ( ) from homeassistant.components.honeywell.climate import ( DOMAIN, - MODE_PERMANENT_HOLD, - MODE_TEMPORARY_HOLD, PRESET_HOLD, RETRY, SCAN_INTERVAL, @@ -41,6 +40,7 @@ from homeassistant.const import ( ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, + STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant @@ -92,13 +92,14 @@ async def test_dynamic_attributes( hass: HomeAssistant, device: MagicMock, config_entry: MagicMock ) -> None: """Test dynamic attributes.""" + await init_integration(hass, config_entry) entity_id = f"climate.{device.name}" state = hass.states.get(entity_id) assert state.state == HVACMode.OFF attributes = state.attributes - assert attributes["current_temperature"] == 20 + assert attributes["current_temperature"] == -6.7 assert attributes["current_humidity"] == 50 device.system_mode = "cool" @@ -113,7 +114,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.COOL attributes = state.attributes - assert attributes["current_temperature"] == 21 + assert attributes["current_temperature"] == -6.1 assert attributes["current_humidity"] == 55 device.system_mode = "heat" @@ -128,7 +129,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT attributes = state.attributes - assert attributes["current_temperature"] == 61 + assert attributes["current_temperature"] == 16.1 assert attributes["current_humidity"] == 50 device.system_mode = "auto" @@ -141,7 +142,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT_COOL attributes = state.attributes - assert attributes["current_temperature"] == 61 + assert attributes["current_temperature"] == 16.1 assert attributes["current_humidity"] == 50 @@ -221,6 +222,53 @@ async def test_mode_service_calls( ) +async def test_auxheat_service_calls( + hass: HomeAssistant, device: MagicMock, config_entry: MagicMock +) -> None: + """Test controlling the auxheat through service calls.""" + await init_integration(hass, config_entry) + entity_id = f"climate.{device.name}" + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_AUX_HEAT, + {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: True}, + blocking=True, + ) + device.set_system_mode.assert_called_once_with("emheat") + + device.set_system_mode.reset_mock() + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_AUX_HEAT, + {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: False}, + blocking=True, + ) + device.set_system_mode.assert_called_once_with("heat") + + device.set_system_mode.reset_mock() + device.set_system_mode.side_effect = aiosomecomfort.SomeComfortError + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_AUX_HEAT, + {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: True}, + blocking=True, + ) + device.set_system_mode.assert_called_once_with("emheat") + + device.set_system_mode.reset_mock() + device.set_system_mode.side_effect = aiosomecomfort.SomeComfortError + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_AUX_HEAT, + {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: False}, + blocking=True, + ) + + async def test_fan_modes_service_calls( hass: HomeAssistant, device: MagicMock, config_entry: MagicMock ) -> None: @@ -300,7 +348,7 @@ async def test_service_calls_off_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 35}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, blocking=True, ) @@ -314,8 +362,8 @@ async def test_service_calls_off_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(35) - device.set_setpoint_heat.assert_called_with(25) + device.set_setpoint_cool.assert_called_with(95) + device.set_setpoint_heat.assert_called_with(77) device.set_setpoint_heat.reset_mock() device.set_setpoint_heat.side_effect = aiosomecomfort.SomeComfortError @@ -327,13 +375,13 @@ async def test_service_calls_off_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 24.0, - ATTR_TARGET_TEMP_HIGH: 34.0, + ATTR_TARGET_TEMP_LOW: 25.0, + ATTR_TARGET_TEMP_HIGH: 35.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(34) - device.set_setpoint_heat.assert_called_with(24) + device.set_setpoint_cool.assert_called_with(95) + device.set_setpoint_heat.assert_called_with(77) assert "Invalid temperature" in caplog.text device.set_setpoint_heat.reset_mock() @@ -351,14 +399,14 @@ async def test_service_calls_off_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(35) - device.set_setpoint_heat.assert_called_with(25) + device.set_setpoint_cool.assert_called_with(95) + device.set_setpoint_heat.assert_called_with(77) reset_mock(device) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 35}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, blocking=True, ) device.set_setpoint_heat.assert_not_called() @@ -469,7 +517,7 @@ async def test_service_calls_cool_mode( {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, blocking=True, ) - device.set_hold_cool.assert_called_once_with(datetime.time(2, 30), 15) + device.set_hold_cool.assert_called_once_with(datetime.time(2, 30), 59) device.set_hold_cool.reset_mock() await hass.services.async_call( @@ -477,13 +525,13 @@ async def test_service_calls_cool_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 15.0, - ATTR_TARGET_TEMP_HIGH: 20.0, + ATTR_TARGET_TEMP_LOW: 25.0, + ATTR_TARGET_TEMP_HIGH: 35.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(20) - device.set_setpoint_heat.assert_called_with(15) + device.set_setpoint_cool.assert_called_with(95) + device.set_setpoint_heat.assert_called_with(77) caplog.clear() device.set_setpoint_cool.reset_mock() @@ -495,13 +543,13 @@ async def test_service_calls_cool_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 15.0, - ATTR_TARGET_TEMP_HIGH: 20.0, + ATTR_TARGET_TEMP_LOW: 25.0, + ATTR_TARGET_TEMP_HIGH: 35.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(20) - device.set_setpoint_heat.assert_called_with(15) + device.set_setpoint_cool.assert_called_with(95) + device.set_setpoint_heat.assert_called_with(77) assert "Invalid temperature" in caplog.text reset_mock(device) @@ -685,10 +733,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) device.set_hold_heat.reset_mock() device.set_hold_heat.side_effect = aiosomecomfort.SomeComfortError @@ -696,10 +744,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) device.set_hold_heat.reset_mock() assert "Invalid temperature" in caplog.text @@ -708,10 +756,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) device.set_hold_heat.reset_mock() caplog.clear() @@ -725,8 +773,8 @@ async def test_service_calls_heat_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(35) - device.set_setpoint_heat.assert_called_with(25) + device.set_setpoint_cool.assert_called_with(95) + device.set_setpoint_heat.assert_called_with(77) device.set_setpoint_heat.reset_mock() device.set_setpoint_heat.side_effect = aiosomecomfort.SomeComfortError @@ -741,8 +789,8 @@ async def test_service_calls_heat_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(35) - device.set_setpoint_heat.assert_called_with(25) + device.set_setpoint_cool.assert_called_with(95) + device.set_setpoint_heat.assert_called_with(77) assert "Invalid temperature" in caplog.text reset_mock(device) @@ -936,8 +984,8 @@ async def test_service_calls_auto_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_once_with(35) - device.set_setpoint_heat.assert_called_once_with(25) + device.set_setpoint_cool.assert_called_once_with(95) + device.set_setpoint_heat.assert_called_once_with(77) reset_mock(device) caplog.clear() @@ -1193,6 +1241,37 @@ async def test_async_update_errors( assert state.state == "unavailable" +async def test_aux_heat_off_service_call( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device: MagicMock, + config_entry: MagicMock, +) -> None: + """Test aux heat off turns of system when no heat configured.""" + device.raw_ui_data["SwitchHeatAllowed"] = False + device.raw_ui_data["SwitchAutoAllowed"] = False + device.raw_ui_data["SwitchEmergencyHeatAllowed"] = True + + await init_integration(hass, config_entry) + + entity_id = f"climate.{device.name}" + entry = entity_registry.async_get(entity_id) + assert entry + + state = hass.states.get(entity_id) + assert state is not None + assert state.state != STATE_UNAVAILABLE + assert state.state == HVACMode.OFF + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_AUX_HEAT, + {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: False}, + blocking=True, + ) + device.set_system_mode.assert_called_once_with("off") + + async def test_unique_id( hass: HomeAssistant, device: MagicMock, @@ -1210,59 +1289,3 @@ async def test_unique_id( await init_integration(hass, config_entry) entity_entry = entity_registry.async_get(f"climate.{device.name}") assert entity_entry.unique_id == str(device.deviceid) - - -async def test_preset_mode( - hass: HomeAssistant, - device: MagicMock, - config_entry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test mode settings properly reflected.""" - await init_integration(hass, config_entry) - entity_id = f"climate.{device.name}" - - device.raw_ui_data["StatusHeat"] = 3 - device.raw_ui_data["StatusCool"] = 3 - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - device.raw_ui_data["StatusHeat"] = MODE_TEMPORARY_HOLD - device.raw_ui_data["StatusCool"] = MODE_TEMPORARY_HOLD - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_HOLD - - device.raw_ui_data["StatusHeat"] = MODE_PERMANENT_HOLD - device.raw_ui_data["StatusCool"] = MODE_PERMANENT_HOLD - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_HOLD - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY - - device.raw_ui_data["StatusHeat"] = 3 - device.raw_ui_data["StatusCool"] = 3 - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE diff --git a/tests/components/honeywell/test_config_flow.py b/tests/components/honeywell/test_config_flow.py index ed9c86f5e10..7cd987f0d83 100644 --- a/tests/components/honeywell/test_config_flow.py +++ b/tests/components/honeywell/test_config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components.honeywell.const import ( CONF_HEAT_AWAY_TEMPERATURE, DOMAIN, ) -from homeassistant.config_entries import SOURCE_USER, ConfigEntryState +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigEntryState from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -129,7 +129,21 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: unique_id="test-username", ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + with patch( + "homeassistant.components.honeywell.async_setup_entry", + return_value=True, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, + ) + + await hass.async_block_till_done() assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -163,7 +177,16 @@ async def test_reauth_flow_auth_error(hass: HomeAssistant, client: MagicMock) -> ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, + ) + await hass.async_block_till_done() assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -203,7 +226,17 @@ async def test_reauth_flow_connnection_error( unique_id="test-username", ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, + ) + await hass.async_block_till_done() + assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/honeywell/test_switch.py b/tests/components/honeywell/test_switch.py index 482b9837b93..73052871ef1 100644 --- a/tests/components/honeywell/test_switch.py +++ b/tests/components/honeywell/test_switch.py @@ -30,10 +30,29 @@ async def test_emheat_switch( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - device.set_system_mode.assert_called_once_with("emheat") + device.set_system_mode.assert_not_called() device.set_system_mode.reset_mock() + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + device.set_system_mode.assert_not_called() + + device.system_mode = "heat" + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + device.set_system_mode.assert_called_once_with("emheat") + + device.set_system_mode.reset_mock() device.system_mode = "emheat" await hass.services.async_call( SWITCH_DOMAIN, diff --git a/tests/components/html5/test_config_flow.py b/tests/components/html5/test_config_flow.py deleted file mode 100644 index ca0b3da0389..00000000000 --- a/tests/components/html5/test_config_flow.py +++ /dev/null @@ -1,203 +0,0 @@ -"""Test the HTML5 config flow.""" - -from unittest.mock import patch - -import pytest - -from homeassistant import config_entries, data_entry_flow -from homeassistant.components.html5.const import ( - ATTR_VAPID_EMAIL, - ATTR_VAPID_PRV_KEY, - ATTR_VAPID_PUB_KEY, - DOMAIN, -) -from homeassistant.components.html5.issues import ( - FAILED_IMPORT_TRANSLATION_KEY, - SUCCESSFUL_IMPORT_TRANSLATION_KEY, -) -from homeassistant.const import CONF_NAME -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -import homeassistant.helpers.issue_registry as ir - -MOCK_CONF = { - ATTR_VAPID_EMAIL: "test@example.com", - ATTR_VAPID_PRV_KEY: "h6acSRds8_KR8hT9djD8WucTL06Gfe29XXyZ1KcUjN8", -} -MOCK_CONF_PUB_KEY = "BIUtPN7Rq_8U7RBEqClZrfZ5dR9zPCfvxYPtLpWtRVZTJEc7lzv2dhzDU6Aw1m29Ao0-UA1Uq6XO9Df8KALBKqA" - - -async def test_step_user_success(hass: HomeAssistant) -> None: - """Test a successful user config flow.""" - - with patch( - "homeassistant.components.html5.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_USER}, - data=MOCK_CONF.copy(), - ) - - await hass.async_block_till_done() - - assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["data"] == { - ATTR_VAPID_PRV_KEY: MOCK_CONF[ATTR_VAPID_PRV_KEY], - ATTR_VAPID_PUB_KEY: MOCK_CONF_PUB_KEY, - ATTR_VAPID_EMAIL: MOCK_CONF[ATTR_VAPID_EMAIL], - CONF_NAME: DOMAIN, - } - - assert mock_setup_entry.call_count == 1 - - -async def test_step_user_success_generate(hass: HomeAssistant) -> None: - """Test a successful user config flow, generating a key pair.""" - - with patch( - "homeassistant.components.html5.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - conf = {ATTR_VAPID_EMAIL: MOCK_CONF[ATTR_VAPID_EMAIL]} - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER}, data=conf - ) - - await hass.async_block_till_done() - - assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["data"][ATTR_VAPID_EMAIL] == MOCK_CONF[ATTR_VAPID_EMAIL] - - assert mock_setup_entry.call_count == 1 - - -async def test_step_user_new_form(hass: HomeAssistant) -> None: - """Test new user input.""" - - with patch( - "homeassistant.components.html5.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER}, data=None - ) - - await hass.async_block_till_done() - - assert result["type"] is data_entry_flow.FlowResultType.FORM - assert mock_setup_entry.call_count == 0 - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], MOCK_CONF - ) - assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY - assert mock_setup_entry.call_count == 1 - - -@pytest.mark.parametrize( - ("key", "value"), - [ - (ATTR_VAPID_PRV_KEY, "invalid"), - ], -) -async def test_step_user_form_invalid_key( - hass: HomeAssistant, key: str, value: str -) -> None: - """Test invalid user input.""" - - with patch( - "homeassistant.components.html5.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - bad_conf = MOCK_CONF.copy() - bad_conf[key] = value - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER}, data=bad_conf - ) - - await hass.async_block_till_done() - - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert mock_setup_entry.call_count == 0 - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], MOCK_CONF - ) - assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY - assert mock_setup_entry.call_count == 1 - - -async def test_step_import_good( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test valid import input.""" - - with ( - patch( - "homeassistant.components.html5.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - conf = MOCK_CONF.copy() - conf[ATTR_VAPID_PUB_KEY] = MOCK_CONF_PUB_KEY - conf["random_key"] = "random_value" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=conf - ) - - await hass.async_block_till_done() - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["data"] == { - ATTR_VAPID_PRV_KEY: conf[ATTR_VAPID_PRV_KEY], - ATTR_VAPID_PUB_KEY: MOCK_CONF_PUB_KEY, - ATTR_VAPID_EMAIL: conf[ATTR_VAPID_EMAIL], - CONF_NAME: DOMAIN, - } - - assert mock_setup_entry.call_count == 1 - assert len(issue_registry.issues) == 1 - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" - ) - assert issue - assert issue.translation_key == SUCCESSFUL_IMPORT_TRANSLATION_KEY - - -@pytest.mark.parametrize( - ("key", "value"), - [ - (ATTR_VAPID_PRV_KEY, "invalid"), - ], -) -async def test_step_import_bad( - hass: HomeAssistant, issue_registry: ir.IssueRegistry, key: str, value: str -) -> None: - """Test invalid import input.""" - - with ( - patch( - "homeassistant.components.html5.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - bad_conf = MOCK_CONF.copy() - bad_conf[key] = value - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=bad_conf - ) - - await hass.async_block_till_done() - - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert mock_setup_entry.call_count == 0 - - assert len(issue_registry.issues) == 1 - issue = issue_registry.async_get_issue(DOMAIN, f"deprecated_yaml_{DOMAIN}") - assert issue - assert issue.translation_key == FAILED_IMPORT_TRANSLATION_KEY diff --git a/tests/components/html5/test_init.py b/tests/components/html5/test_init.py deleted file mode 100644 index 290cb381296..00000000000 --- a/tests/components/html5/test_init.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Test the HTML5 setup.""" - -from homeassistant.core import HomeAssistant -import homeassistant.helpers.issue_registry as ir -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry - -NOTIFY_CONF = { - "notify": [ - { - "platform": "html5", - "name": "html5", - "vapid_pub_key": "BIUtPN7Rq_8U7RBEqClZrfZ5dR9zPCfvxYPtLpWtRVZTJEc7lzv2dhzDU6Aw1m29Ao0-UA1Uq6XO9Df8KALBKqA", - "vapid_prv_key": "h6acSRds8_KR8hT9djD8WucTL06Gfe29XXyZ1KcUjN8", - "vapid_email": "test@example.com", - } - ] -} - - -async def test_setup_entry( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test setup of a good config entry.""" - config_entry = MockConfigEntry(domain="html5", data={}) - config_entry.add_to_hass(hass) - assert await async_setup_component(hass, "html5", {}) - - assert len(issue_registry.issues) == 0 - - -async def test_setup_entry_issue( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test setup of an imported config entry with deprecated YAML.""" - config_entry = MockConfigEntry(domain="html5", data={}) - config_entry.add_to_hass(hass) - assert await async_setup_component(hass, "notify", NOTIFY_CONF) - assert await async_setup_component(hass, "html5", NOTIFY_CONF) - - assert len(issue_registry.issues) == 1 diff --git a/tests/components/html5/test_notify.py b/tests/components/html5/test_notify.py index 0d9388907a9..f54ec9fa8f7 100644 --- a/tests/components/html5/test_notify.py +++ b/tests/components/html5/test_notify.py @@ -2,11 +2,9 @@ from http import HTTPStatus import json -from typing import Any from unittest.mock import mock_open, patch from aiohttp.hdrs import AUTHORIZATION -from aiohttp.test_utils import TestClient import homeassistant.components.html5.notify as html5 from homeassistant.core import HomeAssistant @@ -71,11 +69,7 @@ REGISTER_URL = "/api/notify.html5" PUBLISH_URL = "/api/notify.html5/callback" -async def mock_client( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - registrations: dict[str, Any] | None = None, -) -> TestClient: +async def mock_client(hass, hass_client, registrations=None): """Create a test client for HTML5 views.""" if registrations is None: registrations = {} @@ -94,7 +88,7 @@ async def test_get_service_with_no_json(hass: HomeAssistant) -> None: await async_setup_component(hass, "http", {}) m = mock_open() with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, {}, VAPID_CONF) + service = await html5.async_get_service(hass, VAPID_CONF) assert service is not None @@ -109,7 +103,7 @@ async def test_dismissing_message(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, {}, VAPID_CONF) + service = await html5.async_get_service(hass, VAPID_CONF) service.hass = hass assert service is not None @@ -138,7 +132,7 @@ async def test_sending_message(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, {}, VAPID_CONF) + service = await html5.async_get_service(hass, VAPID_CONF) service.hass = hass assert service is not None @@ -169,7 +163,7 @@ async def test_fcm_key_include(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, {}, VAPID_CONF) + service = await html5.async_get_service(hass, VAPID_CONF) service.hass = hass assert service is not None @@ -194,7 +188,7 @@ async def test_fcm_send_with_unknown_priority(mock_wp, hass: HomeAssistant) -> N m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, {}, VAPID_CONF) + service = await html5.async_get_service(hass, VAPID_CONF) service.hass = hass assert service is not None @@ -219,7 +213,7 @@ async def test_fcm_no_targets(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, {}, VAPID_CONF) + service = await html5.async_get_service(hass, VAPID_CONF) service.hass = hass assert service is not None @@ -244,7 +238,7 @@ async def test_fcm_additional_data(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, {}, VAPID_CONF) + service = await html5.async_get_service(hass, VAPID_CONF) service.hass = hass assert service is not None @@ -479,7 +473,7 @@ async def test_callback_view_with_jwt( mock_wp().send().status_code = 201 await hass.services.async_call( "notify", - "html5", + "notify", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) @@ -495,7 +489,7 @@ async def test_callback_view_with_jwt( assert push_payload["body"] == "Hello" assert push_payload["icon"] == "beer.png" - bearer_token = f"Bearer {push_payload['data']['jwt']}" + bearer_token = "Bearer {}".format(push_payload["data"]["jwt"]) resp = await client.post( PUBLISH_URL, json={"type": "push"}, headers={AUTHORIZATION: bearer_token} @@ -516,7 +510,7 @@ async def test_send_fcm_without_targets( mock_wp().send().status_code = 201 await hass.services.async_call( "notify", - "html5", + "notify", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) @@ -541,7 +535,7 @@ async def test_send_fcm_expired( mock_wp().send().status_code = 410 await hass.services.async_call( "notify", - "html5", + "notify", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) @@ -566,7 +560,7 @@ async def test_send_fcm_expired_save_fails( mock_wp().send().status_code = 410 await hass.services.async_call( "notify", - "html5", + "notify", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) diff --git a/tests/components/http/test_auth.py b/tests/components/http/test_auth.py index 052c0031469..20dfe0a3710 100644 --- a/tests/components/http/test_auth.py +++ b/tests/components/http/test_auth.py @@ -4,7 +4,6 @@ from datetime import timedelta from http import HTTPStatus from ipaddress import ip_network import logging -from typing import Any from unittest.mock import Mock, patch from aiohttp import BasicAuth, web @@ -64,7 +63,7 @@ PRIVATE_ADDRESSES = [ ] -async def mock_handler(request: web.Request) -> web.Response: +async def mock_handler(request): """Return if request was authenticated.""" if not request[KEY_AUTHENTICATED]: raise HTTPUnauthorized @@ -76,7 +75,7 @@ async def mock_handler(request: web.Request) -> web.Response: @pytest.fixture -def app(hass: HomeAssistant) -> web.Application: +def app(hass): """Fixture to set up a web.Application.""" app = web.Application() app[KEY_HASS] = hass @@ -86,7 +85,7 @@ def app(hass: HomeAssistant) -> web.Application: @pytest.fixture -def app2(hass: HomeAssistant) -> web.Application: +def app2(hass): """Fixture to set up a web.Application without real_ip middleware.""" app = web.Application() app[KEY_HASS] = hass @@ -95,9 +94,7 @@ def app2(hass: HomeAssistant) -> web.Application: @pytest.fixture -def trusted_networks_auth( - hass: HomeAssistant, -) -> trusted_networks.TrustedNetworksAuthProvider: +def trusted_networks_auth(hass): """Load trusted networks auth provider.""" prv = trusted_networks.TrustedNetworksAuthProvider( hass, @@ -117,7 +114,7 @@ async def test_auth_middleware_loaded_by_default(hass: HomeAssistant) -> None: async def test_cant_access_with_password_in_header( - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, local_auth: HassAuthProvider, hass: HomeAssistant, @@ -134,7 +131,7 @@ async def test_cant_access_with_password_in_header( async def test_cant_access_with_password_in_query( - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, local_auth: HassAuthProvider, hass: HomeAssistant, @@ -154,7 +151,7 @@ async def test_cant_access_with_password_in_query( async def test_basic_auth_does_not_work( - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, hass: HomeAssistant, local_auth: HassAuthProvider, @@ -178,8 +175,8 @@ async def test_basic_auth_does_not_work( async def test_cannot_access_with_trusted_ip( hass: HomeAssistant, - app2: web.Application, - trusted_networks_auth: trusted_networks.TrustedNetworksAuthProvider, + app2, + trusted_networks_auth, aiohttp_client: ClientSessionGenerator, hass_owner_user: MockUser, ) -> None: @@ -206,7 +203,7 @@ async def test_cannot_access_with_trusted_ip( async def test_auth_active_access_with_access_token_in_header( hass: HomeAssistant, - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -242,8 +239,8 @@ async def test_auth_active_access_with_access_token_in_header( async def test_auth_active_access_with_trusted_ip( hass: HomeAssistant, - app2: web.Application, - trusted_networks_auth: trusted_networks.TrustedNetworksAuthProvider, + app2, + trusted_networks_auth, aiohttp_client: ClientSessionGenerator, hass_owner_user: MockUser, ) -> None: @@ -269,7 +266,7 @@ async def test_auth_active_access_with_trusted_ip( async def test_auth_legacy_support_api_password_cannot_access( - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, local_auth: HassAuthProvider, hass: HomeAssistant, @@ -290,7 +287,7 @@ async def test_auth_legacy_support_api_password_cannot_access( async def test_auth_access_signed_path_with_refresh_token( hass: HomeAssistant, - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -312,7 +309,7 @@ async def test_auth_access_signed_path_with_refresh_token( assert data["user_id"] == refresh_token.user.id # Use signature on other path - req = await client.get(f"/another_path?{signed_path.split('?')[1]}") + req = await client.get("/another_path?{}".format(signed_path.split("?")[1])) assert req.status == HTTPStatus.UNAUTHORIZED # We only allow GET @@ -335,7 +332,7 @@ async def test_auth_access_signed_path_with_refresh_token( async def test_auth_access_signed_path_with_query_param( hass: HomeAssistant, - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -365,7 +362,7 @@ async def test_auth_access_signed_path_with_query_param( async def test_auth_access_signed_path_with_query_param_order( hass: HomeAssistant, - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -406,7 +403,7 @@ async def test_auth_access_signed_path_with_query_param_order( async def test_auth_access_signed_path_with_query_param_safe_param( hass: HomeAssistant, - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -443,7 +440,7 @@ async def test_auth_access_signed_path_with_query_param_safe_param( ) async def test_auth_access_signed_path_with_query_param_tamper( hass: HomeAssistant, - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, hass_access_token: str, base_url: str, @@ -469,7 +466,7 @@ async def test_auth_access_signed_path_with_query_param_tamper( async def test_auth_access_signed_path_via_websocket( hass: HomeAssistant, - app: web.Application, + app, hass_ws_client: WebSocketGenerator, hass_read_only_access_token: str, ) -> None: @@ -477,11 +474,7 @@ async def test_auth_access_signed_path_via_websocket( @websocket_api.websocket_command({"type": "diagnostics/list"}) @callback - def get_signed_path( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], - ) -> None: + def get_signed_path(hass, connection, msg): connection.send_result( msg["id"], {"path": async_sign_path(hass, "/", timedelta(seconds=5))} ) @@ -511,7 +504,7 @@ async def test_auth_access_signed_path_via_websocket( async def test_auth_access_signed_path_with_http( hass: HomeAssistant, - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -546,7 +539,7 @@ async def test_auth_access_signed_path_with_http( async def test_auth_access_signed_path_with_content_user( - hass: HomeAssistant, app: web.Application, aiohttp_client: ClientSessionGenerator + hass: HomeAssistant, app, aiohttp_client: ClientSessionGenerator ) -> None: """Test access signed url uses content user.""" await async_setup_auth(hass, app) @@ -563,7 +556,7 @@ async def test_auth_access_signed_path_with_content_user( async def test_local_only_user_rejected( hass: HomeAssistant, - app: web.Application, + app, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -586,9 +579,7 @@ async def test_local_only_user_rejected( assert req.status == HTTPStatus.UNAUTHORIZED -async def test_async_user_not_allowed_do_auth( - hass: HomeAssistant, app: web.Application -) -> None: +async def test_async_user_not_allowed_do_auth(hass: HomeAssistant, app) -> None: """Test for not allowing auth.""" user = await hass.auth.async_create_user("Hello") user.is_active = False diff --git a/tests/components/http/test_ban.py b/tests/components/http/test_ban.py index 59011de0cfd..41f36dad2df 100644 --- a/tests/components/http/test_ban.py +++ b/tests/components/http/test_ban.py @@ -3,7 +3,7 @@ from http import HTTPStatus from ipaddress import ip_address import os -from unittest.mock import AsyncMock, Mock, mock_open, patch +from unittest.mock import Mock, mock_open, patch from aiohttp import web from aiohttp.web_exceptions import HTTPUnauthorized @@ -34,10 +34,14 @@ BANNED_IPS_WITH_SUPERVISOR = [*BANNED_IPS, SUPERVISOR_IP] @pytest.fixture(name="hassio_env") -def hassio_env_fixture(supervisor_is_connected: AsyncMock): +def hassio_env_fixture(): """Fixture to inject hassio env.""" with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), + patch( + "homeassistant.components.hassio.HassIO.is_connected", + return_value={"result": "ok", "data": {}}, + ), patch.dict(os.environ, {"SUPERVISOR_TOKEN": "123456"}), ): yield @@ -197,7 +201,6 @@ async def test_access_from_supervisor_ip( hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, hassio_env, - resolution_info: AsyncMock, ) -> None: """Test accessing to server from supervisor IP.""" app = web.Application() @@ -219,7 +222,17 @@ async def test_access_from_supervisor_ip( manager = app[KEY_BAN_MANAGER] - assert await async_setup_component(hass, "hassio", {"hassio": {}}) + with patch( + "homeassistant.components.hassio.HassIO.get_resolution_info", + return_value={ + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + ): + assert await async_setup_component(hass, "hassio", {"hassio": {}}) m_open = mock_open() diff --git a/tests/components/http/test_cors.py b/tests/components/http/test_cors.py index c0256abb25d..1188131cc0f 100644 --- a/tests/components/http/test_cors.py +++ b/tests/components/http/test_cors.py @@ -119,7 +119,7 @@ async def test_cors_middleware_with_cors_allowed_view(hass: HomeAssistant) -> No requires_auth = False cors_allowed = True - def __init__(self, url, name) -> None: + def __init__(self, url, name): """Initialize test view.""" self.url = url self.name = name diff --git a/tests/components/http/test_init.py b/tests/components/http/test_init.py index 4d96f2267fa..2895209b5f9 100644 --- a/tests/components/http/test_init.py +++ b/tests/components/http/test_init.py @@ -12,10 +12,8 @@ from unittest.mock import Mock, patch import pytest from homeassistant.auth.providers.homeassistant import HassAuthProvider -from homeassistant.components import cloud, http -from homeassistant.components.cloud import CloudNotAvailable +from homeassistant.components import http from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.http import KEY_HASS from homeassistant.helpers.network import NoURLAvailableError from homeassistant.setup import async_setup_component @@ -547,150 +545,3 @@ async def test_register_static_paths( "event loop, instead call " "`await hass.http.async_register_static_paths" ) in caplog.text - - -async def test_ssl_issue_if_no_urls_configured( - hass: HomeAssistant, - tmp_path: Path, - issue_registry: ir.IssueRegistry, -) -> None: - """Test raising SSL issue if no external or internal URL is configured.""" - - assert hass.config.external_url is None - assert hass.config.internal_url is None - - cert_path, key_path, _ = await hass.async_add_executor_job( - _setup_empty_ssl_pem_files, tmp_path - ) - - with ( - patch("ssl.SSLContext.load_cert_chain"), - patch( - "homeassistant.util.ssl.server_context_modern", - side_effect=server_context_modern, - ), - ): - assert await async_setup_component( - hass, - "http", - {"http": {"ssl_certificate": cert_path, "ssl_key": key_path}}, - ) - await hass.async_start() - await hass.async_block_till_done() - - assert ("http", "ssl_configured_without_configured_urls") in issue_registry.issues - - -async def test_ssl_issue_if_using_cloud( - hass: HomeAssistant, - tmp_path: Path, - issue_registry: ir.IssueRegistry, -) -> None: - """Test raising no SSL issue if not right configured but using cloud.""" - assert hass.config.external_url is None - assert hass.config.internal_url is None - - cert_path, key_path, _ = await hass.async_add_executor_job( - _setup_empty_ssl_pem_files, tmp_path - ) - - with ( - patch("ssl.SSLContext.load_cert_chain"), - patch.object(cloud, "async_remote_ui_url", return_value="https://example.com"), - patch( - "homeassistant.util.ssl.server_context_modern", - side_effect=server_context_modern, - ), - ): - assert await async_setup_component( - hass, - "http", - {"http": {"ssl_certificate": cert_path, "ssl_key": key_path}}, - ) - await hass.async_start() - await hass.async_block_till_done() - - assert ( - "http", - "ssl_configured_without_configured_urls", - ) not in issue_registry.issues - - -async def test_ssl_issue_if_not_connected_to_cloud( - hass: HomeAssistant, - tmp_path: Path, - issue_registry: ir.IssueRegistry, -) -> None: - """Test raising no SSL issue if not right configured and not connected to cloud.""" - assert hass.config.external_url is None - assert hass.config.internal_url is None - - cert_path, key_path, _ = await hass.async_add_executor_job( - _setup_empty_ssl_pem_files, tmp_path - ) - - with ( - patch("ssl.SSLContext.load_cert_chain"), - patch( - "homeassistant.util.ssl.server_context_modern", - side_effect=server_context_modern, - ), - patch( - "homeassistant.components.cloud.async_remote_ui_url", - side_effect=CloudNotAvailable, - ), - ): - assert await async_setup_component( - hass, - "http", - {"http": {"ssl_certificate": cert_path, "ssl_key": key_path}}, - ) - await hass.async_start() - await hass.async_block_till_done() - - assert ("http", "ssl_configured_without_configured_urls") in issue_registry.issues - - -@pytest.mark.parametrize( - ("external_url", "internal_url"), - [ - ("https://example.com", "https://example.local"), - (None, "http://example.local"), - ("https://example.com", None), - ], -) -async def test_ssl_issue_urls_configured( - hass: HomeAssistant, - tmp_path: Path, - issue_registry: ir.IssueRegistry, - external_url: str | None, - internal_url: str | None, -) -> None: - """Test raising SSL issue if no external or internal URL is configured.""" - - cert_path, key_path, _ = await hass.async_add_executor_job( - _setup_empty_ssl_pem_files, tmp_path - ) - - hass.config.external_url = external_url - hass.config.internal_url = internal_url - - with ( - patch("ssl.SSLContext.load_cert_chain"), - patch( - "homeassistant.util.ssl.server_context_modern", - side_effect=server_context_modern, - ), - ): - assert await async_setup_component( - hass, - "http", - {"http": {"ssl_certificate": cert_path, "ssl_key": key_path}}, - ) - await hass.async_start() - await hass.async_block_till_done() - - assert ( - "http", - "ssl_configured_without_configured_urls", - ) not in issue_registry.issues diff --git a/tests/components/http/test_static.py b/tests/components/http/test_static.py index 2ac7c6ded93..3e3f21d5002 100644 --- a/tests/components/http/test_static.py +++ b/tests/components/http/test_static.py @@ -4,12 +4,12 @@ from http import HTTPStatus from pathlib import Path from aiohttp.test_utils import TestClient +from aiohttp.web_exceptions import HTTPForbidden import pytest from homeassistant.components.http import StaticPathConfig -from homeassistant.components.http.static import CachingStaticResource -from homeassistant.const import EVENT_HOMEASSISTANT_START -from homeassistant.core import HomeAssistant +from homeassistant.components.http.static import CachingStaticResource, _get_file_path +from homeassistant.core import EVENT_HOMEASSISTANT_START, HomeAssistant from homeassistant.helpers.http import KEY_ALLOW_CONFIGURED_CORS from homeassistant.setup import async_setup_component @@ -30,19 +30,37 @@ async def mock_http_client(hass: HomeAssistant, aiohttp_client: ClientSessionGen return await aiohttp_client(hass.http.app, server_kwargs={"skip_url_asserts": True}) -async def test_static_resource_show_index( - hass: HomeAssistant, mock_http_client: TestClient, tmp_path: Path +@pytest.mark.parametrize( + ("url", "canonical_url"), + [ + ("//a", "//a"), + ("///a", "///a"), + ("/c:\\a\\b", "/c:%5Ca%5Cb"), + ], +) +async def test_static_path_blocks_anchors( + hass: HomeAssistant, + mock_http_client: TestClient, + tmp_path: Path, + url: str, + canonical_url: str, ) -> None: - """Test static resource will return a directory index.""" + """Test static paths block anchors.""" app = hass.http.app - resource = CachingStaticResource("/", tmp_path, show_index=True) + resource = CachingStaticResource(url, str(tmp_path)) + assert resource.canonical == canonical_url app.router.register_resource(resource) app[KEY_ALLOW_CONFIGURED_CORS](resource) - resp = await mock_http_client.get("/") - assert resp.status == 200 - assert resp.content_type == "text/html" + resp = await mock_http_client.get(canonical_url, allow_redirects=False) + assert resp.status == 403 + + # Tested directly since aiohttp will block it before + # it gets here but we want to make sure if aiohttp ever + # changes we still block it. + with pytest.raises(HTTPForbidden): + _get_file_path(canonical_url, tmp_path) async def test_async_register_static_paths( diff --git a/tests/components/huawei_lte/test_config_flow.py b/tests/components/huawei_lte/test_config_flow.py index a9a147eb17e..862af02963c 100644 --- a/tests/components/huawei_lte/test_config_flow.py +++ b/tests/components/huawei_lte/test_config_flow.py @@ -385,7 +385,15 @@ async def test_reauth( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + context = { + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + } + result = await hass.config_entries.flow.async_init( + DOMAIN, context=context, data=entry.data + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["data_schema"] is not None diff --git a/tests/components/hue/conftest.py b/tests/components/hue/conftest.py index 7fc6c5ae33f..fca950d6b7a 100644 --- a/tests/components/hue/conftest.py +++ b/tests/components/hue/conftest.py @@ -2,7 +2,7 @@ import asyncio from collections import deque -from collections.abc import Generator +import json import logging from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -16,24 +16,27 @@ from homeassistant.components import hue from homeassistant.components.hue.v1 import sensor_base as hue_sensor_base from homeassistant.components.hue.v2.device import async_setup_devices from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from homeassistant.util.json import JsonArrayType from .const import FAKE_BRIDGE, FAKE_BRIDGE_DEVICE -from tests.common import MockConfigEntry, load_json_array_fixture +from tests.common import ( + MockConfigEntry, + async_mock_service, + load_fixture, + mock_device_registry, +) @pytest.fixture(autouse=True) -def no_request_delay() -> Generator[None]: +def no_request_delay(): """Make the request refresh delay 0 for instant tests.""" with patch("homeassistant.components.hue.const.REQUEST_REFRESH_DELAY", 0): yield -def create_mock_bridge(hass: HomeAssistant, api_version: int = 1) -> Mock: +def create_mock_bridge(hass, api_version=1): """Create a mocked HueBridge instance.""" bridge = Mock( hass=hass, @@ -47,10 +50,10 @@ def create_mock_bridge(hass: HomeAssistant, api_version: int = 1) -> Mock: bridge.logger = logging.getLogger(__name__) if bridge.api_version == 2: - bridge.api = create_mock_api_v2() + bridge.api = create_mock_api_v2(hass) bridge.mock_requests = bridge.api.mock_requests else: - bridge.api = create_mock_api_v1() + bridge.api = create_mock_api_v1(hass) bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = bridge.api.mock_requests bridge.mock_light_responses = bridge.api.mock_light_responses @@ -82,18 +85,18 @@ def create_mock_bridge(hass: HomeAssistant, api_version: int = 1) -> Mock: @pytest.fixture -def mock_api_v1() -> Mock: +def mock_api_v1(hass): """Mock the Hue V1 api.""" - return create_mock_api_v1() + return create_mock_api_v1(hass) @pytest.fixture -def mock_api_v2() -> Mock: +def mock_api_v2(hass): """Mock the Hue V2 api.""" - return create_mock_api_v2() + return create_mock_api_v2(hass) -def create_mock_api_v1() -> Mock: +def create_mock_api_v1(hass): """Create a mock V1 API.""" api = Mock(spec=aiohue_v1.HueBridgeV1) api.initialize = AsyncMock() @@ -137,12 +140,12 @@ def create_mock_api_v1() -> Mock: @pytest.fixture(scope="package") -def v2_resources_test_data() -> JsonArrayType: +def v2_resources_test_data(): """Load V2 resources mock data.""" - return load_json_array_fixture("hue/v2_resources.json") + return json.loads(load_fixture("hue/v2_resources.json")) -def create_mock_api_v2() -> Mock: +def create_mock_api_v2(hass): """Create a mock V2 API.""" api = Mock(spec=aiohue_v2.HueBridgeV2) api.initialize = AsyncMock() @@ -195,32 +198,30 @@ def create_mock_api_v2() -> Mock: @pytest.fixture -def mock_bridge_v1(hass: HomeAssistant) -> Mock: +def mock_bridge_v1(hass): """Mock a Hue bridge with V1 api.""" return create_mock_bridge(hass, api_version=1) @pytest.fixture -def mock_bridge_v2(hass: HomeAssistant) -> Mock: +def mock_bridge_v2(hass): """Mock a Hue bridge with V2 api.""" return create_mock_bridge(hass, api_version=2) @pytest.fixture -def mock_config_entry_v1() -> MockConfigEntry: +def mock_config_entry_v1(hass): """Mock a config entry for a Hue V1 bridge.""" return create_config_entry(api_version=1) @pytest.fixture -def mock_config_entry_v2() -> MockConfigEntry: +def mock_config_entry_v2(hass): """Mock a config entry.""" return create_config_entry(api_version=2) -def create_config_entry( - api_version: int = 1, host: str = "mock-host" -) -> MockConfigEntry: +def create_config_entry(api_version=1, host="mock-host"): """Mock a config entry for a Hue bridge.""" return MockConfigEntry( domain=hue.DOMAIN, @@ -229,7 +230,7 @@ def create_config_entry( ) -async def setup_component(hass: HomeAssistant) -> None: +async def setup_component(hass): """Mock setup Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( @@ -242,9 +243,7 @@ async def setup_component(hass: HomeAssistant) -> None: ) -async def setup_bridge( - hass: HomeAssistant, mock_bridge: Mock, config_entry: MockConfigEntry -) -> None: +async def setup_bridge(hass, mock_bridge, config_entry): """Load the Hue integration with the provided bridge.""" mock_bridge.config_entry = config_entry with patch.object( @@ -256,11 +255,11 @@ async def setup_bridge( async def setup_platform( - hass: HomeAssistant, - mock_bridge: Mock, - platforms: list[Platform] | tuple[Platform] | Platform, - hostname: str | None = None, -) -> None: + hass, + mock_bridge, + platforms, + hostname=None, +): """Load the Hue integration with the provided bridge for given platform(s).""" if not isinstance(platforms, (list, tuple)): platforms = [platforms] @@ -283,3 +282,15 @@ async def setup_platform( # and make sure it completes before going further await hass.async_block_till_done() + + +@pytest.fixture(name="device_reg") +def get_device_reg(hass): + """Return an empty, loaded, registry.""" + return mock_device_registry(hass) + + +@pytest.fixture(name="calls") +def track_calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") diff --git a/tests/components/hue/fixtures/v2_resources.json b/tests/components/hue/fixtures/v2_resources.json index 3d718f24c50..662e1107ca9 100644 --- a/tests/components/hue/fixtures/v2_resources.json +++ b/tests/components/hue/fixtures/v2_resources.json @@ -1288,9 +1288,7 @@ }, { "button": { - "button_report": { - "event": "short_release" - } + "last_event": "short_release" }, "id": "c658d3d8-a013-4b81-8ac6-78b248537e70", "id_v1": "/sensors/50", @@ -1329,9 +1327,7 @@ }, { "button": { - "button_report": { - "event": "short_release" - } + "last_event": "short_release" }, "id": "7f1ab9f6-cc2b-4b40-9011-65e2af153f75", "id_v1": "/sensors/10", @@ -1370,9 +1366,7 @@ }, { "button": { - "button_report": { - "event": "short_release" - } + "last_event": "short_release" }, "id": "31cffcda-efc2-401f-a152-e10db3eed232", "id_v1": "/sensors/5", @@ -1493,10 +1487,6 @@ "on": { "on": true }, - "owner": { - "rid": "7cee478d-6455-483a-9e32-9f9fdcbcc4f6", - "rtype": "zone" - }, "type": "grouped_light" }, { @@ -1508,10 +1498,6 @@ "on": { "on": true }, - "owner": { - "rid": "7cee478d-6455-483a-9e32-9f9fdcbcc4f6", - "rtype": "zone" - }, "type": "grouped_light" }, { @@ -1523,10 +1509,6 @@ "on": { "on": false }, - "owner": { - "rid": "7cee478d-6455-483a-9e32-9f9fdcbcc4f6", - "rtype": "zone" - }, "type": "grouped_light" }, { diff --git a/tests/components/hue/test_binary_sensor.py b/tests/components/hue/test_binary_sensor.py index 3721637a674..8f299a4b6a6 100644 --- a/tests/components/hue/test_binary_sensor.py +++ b/tests/components/hue/test_binary_sensor.py @@ -1,16 +1,13 @@ """Philips Hue binary_sensor platform tests for V2 bridge/api.""" -from unittest.mock import Mock - from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_BINARY_SENSOR, FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY async def test_binary_sensors( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test if all v2 binary_sensors get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -81,9 +78,7 @@ async def test_binary_sensors( assert sensor.attributes["device_class"] == "motion" -async def test_binary_sensor_add_update( - hass: HomeAssistant, mock_bridge_v2: Mock -) -> None: +async def test_binary_sensor_add_update(hass: HomeAssistant, mock_bridge_v2) -> None: """Test if binary_sensor get added/updated from events.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) await setup_platform(hass, mock_bridge_v2, "binary_sensor") diff --git a/tests/components/hue/test_bridge.py b/tests/components/hue/test_bridge.py index be7a6738617..42631215035 100644 --- a/tests/components/hue/test_bridge.py +++ b/tests/components/hue/test_bridge.py @@ -1,7 +1,7 @@ """Test Hue bridge.""" import asyncio -from unittest.mock import Mock, patch +from unittest.mock import patch from aiohttp import client_exceptions from aiohue.errors import Unauthorized @@ -21,7 +21,7 @@ from homeassistant.exceptions import ConfigEntryNotReady from tests.common import MockConfigEntry -async def test_bridge_setup_v1(hass: HomeAssistant, mock_api_v1: Mock) -> None: +async def test_bridge_setup_v1(hass: HomeAssistant, mock_api_v1) -> None: """Test a successful setup for V1 bridge.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -45,7 +45,7 @@ async def test_bridge_setup_v1(hass: HomeAssistant, mock_api_v1: Mock) -> None: assert forward_entries == {"light", "binary_sensor", "sensor"} -async def test_bridge_setup_v2(hass: HomeAssistant, mock_api_v2: Mock) -> None: +async def test_bridge_setup_v2(hass: HomeAssistant, mock_api_v2) -> None: """Test a successful setup for V2 bridge.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -113,9 +113,7 @@ async def test_bridge_setup_timeout(hass: HomeAssistant) -> None: await hue_bridge.async_initialize_bridge() -async def test_reset_unloads_entry_if_setup( - hass: HomeAssistant, mock_api_v1: Mock -) -> None: +async def test_reset_unloads_entry_if_setup(hass: HomeAssistant, mock_api_v1) -> None: """Test calling reset while the entry has been setup.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -145,7 +143,7 @@ async def test_reset_unloads_entry_if_setup( assert len(hass.services.async_services()) == 0 -async def test_handle_unauthorized(hass: HomeAssistant, mock_api_v1: Mock) -> None: +async def test_handle_unauthorized(hass: HomeAssistant, mock_api_v1) -> None: """Test handling an unauthorized error on update.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/hue/test_device_trigger_v1.py b/tests/components/hue/test_device_trigger_v1.py index 37af8c6a880..3d8fa64baf4 100644 --- a/tests/components/hue/test_device_trigger_v1.py +++ b/tests/components/hue/test_device_trigger_v1.py @@ -1,7 +1,5 @@ """The tests for Philips Hue device triggers for V1 bridge.""" -from unittest.mock import Mock - from pytest_unordered import unordered from homeassistant.components import automation, hue @@ -22,8 +20,8 @@ REMOTES_RESPONSE = {"7": HUE_TAP_REMOTE_1, "8": HUE_DIMMER_REMOTE_1} async def test_get_triggers( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v1: Mock, - device_registry: dr.DeviceRegistry, + mock_bridge_v1, + device_reg: dr.DeviceRegistry, ) -> None: """Test we get the expected triggers from a hue remote.""" mock_bridge_v1.mock_sensor_responses.append(REMOTES_RESPONSE) @@ -34,7 +32,7 @@ async def test_get_triggers( assert len(hass.states.async_all()) == 1 # Get triggers for specific tap switch - hue_tap_device = device_registry.async_get_device( + hue_tap_device = device_reg.async_get_device( identifiers={(hue.DOMAIN, "00:00:00:00:00:44:23:08")} ) triggers = await async_get_device_automations( @@ -55,7 +53,7 @@ async def test_get_triggers( assert triggers == unordered(expected_triggers) # Get triggers for specific dimmer switch - hue_dimmer_device = device_registry.async_get_device( + hue_dimmer_device = device_reg.async_get_device( identifiers={(hue.DOMAIN, "00:17:88:01:10:3e:3a:dc")} ) hue_bat_sensor = entity_registry.async_get( @@ -92,9 +90,9 @@ async def test_get_triggers( async def test_if_fires_on_state_change( hass: HomeAssistant, - mock_bridge_v1: Mock, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + mock_bridge_v1, + device_reg: dr.DeviceRegistry, + calls: list[ServiceCall], ) -> None: """Test for button press trigger firing.""" mock_bridge_v1.mock_sensor_responses.append(REMOTES_RESPONSE) @@ -103,7 +101,7 @@ async def test_if_fires_on_state_change( assert len(hass.states.async_all()) == 1 # Set an automation with a specific tap switch trigger - hue_tap_device = device_registry.async_get_device( + hue_tap_device = device_reg.async_get_device( identifiers={(hue.DOMAIN, "00:00:00:00:00:44:23:08")} ) assert await async_setup_component( @@ -160,8 +158,8 @@ async def test_if_fires_on_state_change( assert len(mock_bridge_v1.mock_requests) == 2 - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "B4 - 18" + assert len(calls) == 1 + assert calls[0].data["some"] == "B4 - 18" # Fake another button press. new_sensor_response["7"] = dict(new_sensor_response["7"]) @@ -175,4 +173,4 @@ async def test_if_fires_on_state_change( await mock_bridge_v1.sensor_manager.coordinator.async_refresh() await hass.async_block_till_done() assert len(mock_bridge_v1.mock_requests) == 3 - assert len(service_calls) == 1 + assert len(calls) == 1 diff --git a/tests/components/hue/test_device_trigger_v2.py b/tests/components/hue/test_device_trigger_v2.py index 1115e63fd92..0a89b3263c7 100644 --- a/tests/components/hue/test_device_trigger_v2.py +++ b/tests/components/hue/test_device_trigger_v2.py @@ -1,7 +1,5 @@ """The tests for Philips Hue device triggers for V2 bridge.""" -from unittest.mock import Mock - from aiohue.v2.models.button import ButtonEvent from pytest_unordered import unordered @@ -10,8 +8,7 @@ from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.hue.v2.device import async_setup_devices from homeassistant.components.hue.v2.hue_event import async_setup_hue_events from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.util.json import JsonArrayType +from homeassistant.helpers import entity_registry as er from .conftest import setup_platform @@ -19,7 +16,7 @@ from tests.common import async_capture_events, async_get_device_automations async def test_hue_event( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test hue button events.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -31,12 +28,7 @@ async def test_hue_event( # Emit button update event btn_event = { - "button": { - "button_report": { - "event": "initial_press", - "updated": "2021-10-01T12:00:00Z", - } - }, + "button": {"last_event": "initial_press"}, "id": "c658d3d8-a013-4b81-8ac6-78b248537e70", "metadata": {"control_id": 1}, "type": "button", @@ -49,23 +41,23 @@ async def test_hue_event( assert len(events) == 1 assert events[0].data["id"] == "wall_switch_with_2_controls_button" assert events[0].data["unique_id"] == btn_event["id"] - assert events[0].data["type"] == btn_event["button"]["button_report"]["event"] + assert events[0].data["type"] == btn_event["button"]["last_event"] assert events[0].data["subtype"] == btn_event["metadata"]["control_id"] async def test_get_triggers( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2: Mock, - v2_resources_test_data: JsonArrayType, - device_registry: dr.DeviceRegistry, + mock_bridge_v2, + v2_resources_test_data, + device_reg, ) -> None: """Test we get the expected triggers from a hue remote.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) await setup_platform(hass, mock_bridge_v2, ["binary_sensor", "sensor"]) # Get triggers for `Wall switch with 2 controls` - hue_wall_switch_device = device_registry.async_get_device( + hue_wall_switch_device = device_reg.async_get_device( identifiers={(hue.DOMAIN, "3ff06175-29e8-44a8-8fe7-af591b0025da")} ) hue_bat_sensor = entity_registry.async_get( diff --git a/tests/components/hue/test_diagnostics.py b/tests/components/hue/test_diagnostics.py index 49681601ebf..7e64ba1ad93 100644 --- a/tests/components/hue/test_diagnostics.py +++ b/tests/components/hue/test_diagnostics.py @@ -1,7 +1,5 @@ """Test Hue diagnostics.""" -from unittest.mock import Mock - from homeassistant.core import HomeAssistant from .conftest import setup_platform @@ -11,7 +9,7 @@ from tests.typing import ClientSessionGenerator async def test_diagnostics_v1( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v1: Mock + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v1 ) -> None: """Test diagnostics v1.""" await setup_platform(hass, mock_bridge_v1, []) @@ -21,7 +19,7 @@ async def test_diagnostics_v1( async def test_diagnostics_v2( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v2: Mock + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v2 ) -> None: """Test diagnostics v2.""" mock_bridge_v2.api.get_diagnostics.return_value = {"hello": "world"} diff --git a/tests/components/hue/test_event.py b/tests/components/hue/test_event.py index 33b4d16f8be..aedf11a6e82 100644 --- a/tests/components/hue/test_event.py +++ b/tests/components/hue/test_event.py @@ -1,17 +1,14 @@ """Philips Hue Event platform tests for V2 bridge/api.""" -from unittest.mock import Mock - from homeassistant.components.event import ATTR_EVENT_TYPE, ATTR_EVENT_TYPES from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_DEVICE, FAKE_ROTARY, FAKE_ZIGBEE_CONNECTIVITY async def test_event( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test event entity for Hue integration.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -66,7 +63,7 @@ async def test_event( assert state.attributes[ATTR_EVENT_TYPE] == "long_release" -async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: +async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2) -> None: """Test Event entity for newly added Relative Rotary resource.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) await setup_platform(hass, mock_bridge_v2, "event") diff --git a/tests/components/hue/test_light_v1.py b/tests/components/hue/test_light_v1.py index c742124e4f0..21b35e6d5e8 100644 --- a/tests/components/hue/test_light_v1.py +++ b/tests/components/hue/test_light_v1.py @@ -175,7 +175,7 @@ LIGHT_GAMUT = color.GamutType( LIGHT_GAMUT_TYPE = "A" -async def setup_bridge(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def setup_bridge(hass: HomeAssistant, mock_bridge_v1): """Load the Hue light platform with the provided bridge.""" hass.config.components.add(hue.DOMAIN) config_entry = create_config_entry() @@ -192,7 +192,7 @@ async def setup_bridge(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: async def test_not_load_groups_if_old_bridge( - hass: HomeAssistant, mock_bridge_v1: Mock + hass: HomeAssistant, mock_bridge_v1 ) -> None: """Test that we don't try to load groups if bridge runs old software.""" mock_bridge_v1.api.config.apiversion = "1.12.0" @@ -203,7 +203,7 @@ async def test_not_load_groups_if_old_bridge( assert len(hass.states.async_all()) == 0 -async def test_no_lights_or_groups(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_no_lights_or_groups(hass: HomeAssistant, mock_bridge_v1) -> None: """Test the update_lights function when no lights are found.""" mock_bridge_v1.mock_light_responses.append({}) mock_bridge_v1.mock_group_responses.append({}) @@ -212,7 +212,7 @@ async def test_no_lights_or_groups(hass: HomeAssistant, mock_bridge_v1: Mock) -> assert len(hass.states.async_all()) == 0 -async def test_lights(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_lights(hass: HomeAssistant, mock_bridge_v1) -> None: """Test the update_lights function with some lights.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -232,7 +232,7 @@ async def test_lights(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: assert lamp_2.state == "off" -async def test_lights_color_mode(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_lights_color_mode(hass: HomeAssistant, mock_bridge_v1) -> None: """Test that lights only report appropriate color mode.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) mock_bridge_v1.mock_group_responses.append(GROUP_RESPONSE) @@ -278,7 +278,7 @@ async def test_lights_color_mode(hass: HomeAssistant, mock_bridge_v1: Mock) -> N async def test_groups( - hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1: Mock + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1 ) -> None: """Test the update_lights function with some lights.""" mock_bridge_v1.mock_light_responses.append({}) @@ -303,7 +303,7 @@ async def test_groups( assert entity_registry.async_get("light.group_2").unique_id == "2" -async def test_new_group_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_new_group_discovered(hass: HomeAssistant, mock_bridge_v1) -> None: """Test if 2nd update has a new group.""" mock_bridge_v1.allow_groups = True mock_bridge_v1.mock_light_responses.append({}) @@ -350,7 +350,7 @@ async def test_new_group_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) - assert new_group.attributes["color_temp"] == 250 -async def test_new_light_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_new_light_discovered(hass: HomeAssistant, mock_bridge_v1) -> None: """Test if 2nd update has a new light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -396,7 +396,7 @@ async def test_new_light_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) - assert light.state == "off" -async def test_group_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_group_removed(hass: HomeAssistant, mock_bridge_v1) -> None: """Test if 2nd update has removed group.""" mock_bridge_v1.allow_groups = True mock_bridge_v1.mock_light_responses.append({}) @@ -427,7 +427,7 @@ async def test_group_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: assert removed_group is None -async def test_light_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_light_removed(hass: HomeAssistant, mock_bridge_v1) -> None: """Test if 2nd update has removed light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -456,7 +456,7 @@ async def test_light_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: assert removed_light is None -async def test_other_group_update(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_other_group_update(hass: HomeAssistant, mock_bridge_v1) -> None: """Test changing one group that will impact the state of other light.""" mock_bridge_v1.allow_groups = True mock_bridge_v1.mock_light_responses.append({}) @@ -509,7 +509,7 @@ async def test_other_group_update(hass: HomeAssistant, mock_bridge_v1: Mock) -> assert group_2.state == "off" -async def test_other_light_update(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_other_light_update(hass: HomeAssistant, mock_bridge_v1) -> None: """Test changing one light that will impact state of other light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -562,7 +562,7 @@ async def test_other_light_update(hass: HomeAssistant, mock_bridge_v1: Mock) -> assert lamp_2.attributes["brightness"] == 100 -async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1) -> None: """Test bridge marked as not available if timeout error during update.""" mock_bridge_v1.api.lights.update = Mock(side_effect=TimeoutError) mock_bridge_v1.api.groups.update = Mock(side_effect=TimeoutError) @@ -571,7 +571,7 @@ async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1: Mock) -> None assert len(hass.states.async_all()) == 0 -async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1) -> None: """Test bridge marked as not authorized if unauthorized during update.""" mock_bridge_v1.api.lights.update = Mock(side_effect=aiohue.Unauthorized) await setup_bridge(hass, mock_bridge_v1) @@ -580,7 +580,7 @@ async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1: Mock) -> assert len(mock_bridge_v1.handle_unauthorized_error.mock_calls) == 1 -async def test_light_turn_on_service(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_light_turn_on_service(hass: HomeAssistant, mock_bridge_v1) -> None: """Test calling the turn on service on a light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -633,9 +633,7 @@ async def test_light_turn_on_service(hass: HomeAssistant, mock_bridge_v1: Mock) } -async def test_light_turn_off_service( - hass: HomeAssistant, mock_bridge_v1: Mock -) -> None: +async def test_light_turn_off_service(hass: HomeAssistant, mock_bridge_v1) -> None: """Test calling the turn on service on a light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -777,7 +775,7 @@ async def test_group_features( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - mock_bridge_v1: Mock, + mock_bridge_v1, ) -> None: """Test group features.""" color_temp_type = "Color temperature light" diff --git a/tests/components/hue/test_light_v2.py b/tests/components/hue/test_light_v2.py index 2b978ffc33f..fca907eabb0 100644 --- a/tests/components/hue/test_light_v2.py +++ b/tests/components/hue/test_light_v2.py @@ -1,18 +1,15 @@ """Philips Hue lights platform tests for V2 bridge/api.""" -from unittest.mock import Mock - from homeassistant.components.light import ColorMode from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_DEVICE, FAKE_LIGHT, FAKE_ZIGBEE_CONNECTIVITY async def test_lights( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test if all v2 lights get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -80,7 +77,7 @@ async def test_lights( async def test_light_turn_on_service( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test calling the turn on service on a light.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -175,7 +172,7 @@ async def test_light_turn_on_service( assert len(mock_bridge_v2.mock_requests) == 6 assert mock_bridge_v2.mock_requests[5]["json"]["color_temperature"]["mirek"] == 500 - # test enable an effect + # test enable effect await hass.services.async_call( "light", "turn_on", @@ -184,20 +181,8 @@ async def test_light_turn_on_service( ) assert len(mock_bridge_v2.mock_requests) == 7 assert mock_bridge_v2.mock_requests[6]["json"]["effects"]["effect"] == "candle" - # fire event to update effect in HA state - event = { - "id": "3a6710fa-4474-4eba-b533-5e6e72968feb", - "type": "light", - "effects": {"status": "candle"}, - } - mock_bridge_v2.api.emit_event("update", event) - await hass.async_block_till_done() - test_light = hass.states.get(test_light_id) - assert test_light is not None - assert test_light.attributes["effect"] == "candle" # test disable effect - # it should send a request with effect set to "no_effect" await hass.services.async_call( "light", "turn_on", @@ -206,28 +191,6 @@ async def test_light_turn_on_service( ) assert len(mock_bridge_v2.mock_requests) == 8 assert mock_bridge_v2.mock_requests[7]["json"]["effects"]["effect"] == "no_effect" - # fire event to update effect in HA state - event = { - "id": "3a6710fa-4474-4eba-b533-5e6e72968feb", - "type": "light", - "effects": {"status": "no_effect"}, - } - mock_bridge_v2.api.emit_event("update", event) - await hass.async_block_till_done() - test_light = hass.states.get(test_light_id) - assert test_light is not None - assert test_light.attributes["effect"] == "None" - - # test turn on with useless effect - # it should send a effect in the request if the device has no effect active - await hass.services.async_call( - "light", - "turn_on", - {"entity_id": test_light_id, "effect": "None"}, - blocking=True, - ) - assert len(mock_bridge_v2.mock_requests) == 9 - assert "effects" not in mock_bridge_v2.mock_requests[8]["json"] # test timed effect await hass.services.async_call( @@ -236,11 +199,11 @@ async def test_light_turn_on_service( {"entity_id": test_light_id, "effect": "sunrise", "transition": 6}, blocking=True, ) - assert len(mock_bridge_v2.mock_requests) == 10 + assert len(mock_bridge_v2.mock_requests) == 9 assert ( - mock_bridge_v2.mock_requests[9]["json"]["timed_effects"]["effect"] == "sunrise" + mock_bridge_v2.mock_requests[8]["json"]["timed_effects"]["effect"] == "sunrise" ) - assert mock_bridge_v2.mock_requests[9]["json"]["timed_effects"]["duration"] == 6000 + assert mock_bridge_v2.mock_requests[8]["json"]["timed_effects"]["duration"] == 6000 # test enabling effect should ignore color temperature await hass.services.async_call( @@ -249,9 +212,9 @@ async def test_light_turn_on_service( {"entity_id": test_light_id, "effect": "candle", "color_temp": 500}, blocking=True, ) - assert len(mock_bridge_v2.mock_requests) == 11 - assert mock_bridge_v2.mock_requests[10]["json"]["effects"]["effect"] == "candle" - assert "color_temperature" not in mock_bridge_v2.mock_requests[10]["json"] + assert len(mock_bridge_v2.mock_requests) == 10 + assert mock_bridge_v2.mock_requests[9]["json"]["effects"]["effect"] == "candle" + assert "color_temperature" not in mock_bridge_v2.mock_requests[9]["json"] # test enabling effect should ignore xy color await hass.services.async_call( @@ -260,13 +223,13 @@ async def test_light_turn_on_service( {"entity_id": test_light_id, "effect": "candle", "xy_color": [0.123, 0.123]}, blocking=True, ) - assert len(mock_bridge_v2.mock_requests) == 12 - assert mock_bridge_v2.mock_requests[11]["json"]["effects"]["effect"] == "candle" - assert "xy_color" not in mock_bridge_v2.mock_requests[11]["json"] + assert len(mock_bridge_v2.mock_requests) == 11 + assert mock_bridge_v2.mock_requests[10]["json"]["effects"]["effect"] == "candle" + assert "xy_color" not in mock_bridge_v2.mock_requests[9]["json"] async def test_light_turn_off_service( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test calling the turn off service on a light.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -355,7 +318,7 @@ async def test_light_turn_off_service( assert mock_bridge_v2.mock_requests[4]["json"]["identify"]["action"] == "identify" -async def test_light_added(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: +async def test_light_added(hass: HomeAssistant, mock_bridge_v2) -> None: """Test new light added to bridge.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) @@ -378,7 +341,7 @@ async def test_light_added(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: async def test_light_availability( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test light availability property.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -412,8 +375,8 @@ async def test_light_availability( async def test_grouped_lights( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2: Mock, - v2_resources_test_data: JsonArrayType, + mock_bridge_v2, + v2_resources_test_data, ) -> None: """Test if all v2 grouped lights get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) diff --git a/tests/components/hue/test_migration.py b/tests/components/hue/test_migration.py index 388e2f68f99..adcc582a314 100644 --- a/tests/components/hue/test_migration.py +++ b/tests/components/hue/test_migration.py @@ -1,11 +1,10 @@ """Test Hue migration logic.""" -from unittest.mock import Mock, patch +from unittest.mock import patch from homeassistant.components import hue from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.util.json import JsonArrayType from tests.common import MockConfigEntry @@ -52,9 +51,9 @@ async def test_light_entity_migration( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - mock_bridge_v2: Mock, - mock_config_entry_v2: MockConfigEntry, - v2_resources_test_data: JsonArrayType, + mock_bridge_v2, + mock_config_entry_v2, + v2_resources_test_data, ) -> None: """Test if entity schema for lights migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 @@ -99,9 +98,9 @@ async def test_sensor_entity_migration( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - mock_bridge_v2: Mock, - mock_config_entry_v2: MockConfigEntry, - v2_resources_test_data: JsonArrayType, + mock_bridge_v2, + mock_config_entry_v2, + v2_resources_test_data, ) -> None: """Test if entity schema for sensors migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 @@ -160,9 +159,9 @@ async def test_sensor_entity_migration( async def test_group_entity_migration_with_v1_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2: Mock, - mock_config_entry_v2: MockConfigEntry, - v2_resources_test_data: JsonArrayType, + mock_bridge_v2, + mock_config_entry_v2, + v2_resources_test_data, ) -> None: """Test if entity schema for grouped_lights migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 @@ -195,9 +194,9 @@ async def test_group_entity_migration_with_v1_id( async def test_group_entity_migration_with_v2_group_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2: Mock, - mock_config_entry_v2: MockConfigEntry, - v2_resources_test_data: JsonArrayType, + mock_bridge_v2, + mock_config_entry_v2, + v2_resources_test_data, ) -> None: """Test if entity schema for grouped_lights migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 diff --git a/tests/components/hue/test_scene.py b/tests/components/hue/test_scene.py index 9488e0e14ce..5e2fd939087 100644 --- a/tests/components/hue/test_scene.py +++ b/tests/components/hue/test_scene.py @@ -1,11 +1,8 @@ """Philips Hue scene platform tests for V2 bridge/api.""" -from unittest.mock import Mock - from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_SCENE @@ -14,8 +11,8 @@ from .const import FAKE_SCENE async def test_scene( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2: Mock, - v2_resources_test_data: JsonArrayType, + mock_bridge_v2, + v2_resources_test_data, ) -> None: """Test if (config) scenes get created.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -35,7 +32,7 @@ async def test_scene( assert test_entity.attributes["group_type"] == "zone" assert test_entity.attributes["name"] == "Dynamic Test Scene" assert test_entity.attributes["speed"] == 0.6269841194152832 - assert test_entity.attributes["brightness"] == 119 + assert test_entity.attributes["brightness"] == 46.85 assert test_entity.attributes["is_dynamic"] is True # test (regular) scene for a hue room @@ -47,7 +44,7 @@ async def test_scene( assert test_entity.attributes["group_type"] == "room" assert test_entity.attributes["name"] == "Regular Test Scene" assert test_entity.attributes["speed"] == 0.5 - assert test_entity.attributes["brightness"] == 255 + assert test_entity.attributes["brightness"] == 100.0 assert test_entity.attributes["is_dynamic"] is False # test smart scene @@ -75,7 +72,7 @@ async def test_scene( async def test_scene_turn_on_service( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test calling the turn on service on a scene.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -112,7 +109,7 @@ async def test_scene_turn_on_service( async def test_scene_advanced_turn_on_service( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test calling the advanced turn on service on a scene.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -149,7 +146,7 @@ async def test_scene_advanced_turn_on_service( async def test_scene_updates( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test scene events from bridge.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -170,7 +167,7 @@ async def test_scene_updates( assert test_entity is not None assert test_entity.state == STATE_UNKNOWN assert test_entity.name == "Test Room Mocked Scene" - assert test_entity.attributes["brightness"] == 166 + assert test_entity.attributes["brightness"] == 65.0 # test update updated_resource = {**FAKE_SCENE} @@ -179,7 +176,7 @@ async def test_scene_updates( await hass.async_block_till_done() test_entity = hass.states.get(test_entity_id) assert test_entity is not None - assert test_entity.attributes["brightness"] == 89 + assert test_entity.attributes["brightness"] == 35.0 # # test entity name changes on group name change mock_bridge_v2.api.emit_event( diff --git a/tests/components/hue/test_sensor_v1.py b/tests/components/hue/test_sensor_v1.py index 0c5d7cccfe2..b1ef94f8ed0 100644 --- a/tests/components/hue/test_sensor_v1.py +++ b/tests/components/hue/test_sensor_v1.py @@ -10,7 +10,7 @@ from homeassistant.components.hue.const import ATTR_HUE_EVENT from homeassistant.components.hue.v1 import sensor_base from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from .conftest import create_mock_bridge, setup_platform @@ -282,7 +282,7 @@ SENSOR_RESPONSE = { } -async def test_no_sensors(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_no_sensors(hass: HomeAssistant, mock_bridge_v1) -> None: """Test the update_items function when no sensors are found.""" mock_bridge_v1.mock_sensor_responses.append({}) await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"]) @@ -291,7 +291,7 @@ async def test_no_sensors(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: async def test_sensors_with_multiple_bridges( - hass: HomeAssistant, mock_bridge_v1: Mock + hass: HomeAssistant, mock_bridge_v1 ) -> None: """Test the update_items function with some sensors.""" mock_bridge_2 = create_mock_bridge(hass, api_version=1) @@ -315,7 +315,7 @@ async def test_sensors_with_multiple_bridges( async def test_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1: Mock + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1 ) -> None: """Test the update_items function with some sensors.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -361,7 +361,7 @@ async def test_sensors( ) -async def test_unsupported_sensors(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_unsupported_sensors(hass: HomeAssistant, mock_bridge_v1) -> None: """Test that unsupported sensors don't get added and don't fail.""" response_with_unsupported = dict(SENSOR_RESPONSE) response_with_unsupported["7"] = UNSUPPORTED_SENSOR @@ -372,7 +372,7 @@ async def test_unsupported_sensors(hass: HomeAssistant, mock_bridge_v1: Mock) -> assert len(hass.states.async_all()) == 7 -async def test_new_sensor_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_new_sensor_discovered(hass: HomeAssistant, mock_bridge_v1) -> None: """Test if 2nd update has a new sensor.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -406,7 +406,7 @@ async def test_new_sensor_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) assert temperature.state == "17.75" -async def test_sensor_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_sensor_removed(hass: HomeAssistant, mock_bridge_v1) -> None: """Test if 2nd update has removed sensor.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -434,7 +434,7 @@ async def test_sensor_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None assert removed_sensor is None -async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1) -> None: """Test bridge marked as not available if timeout error during update.""" mock_bridge_v1.api.sensors.update = Mock(side_effect=TimeoutError) await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"]) @@ -442,7 +442,7 @@ async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1: Mock) -> None assert len(hass.states.async_all()) == 0 -async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: +async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1) -> None: """Test bridge marked as not authorized if unauthorized during update.""" mock_bridge_v1.api.sensors.update = Mock(side_effect=aiohue.Unauthorized) await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"]) @@ -452,10 +452,7 @@ async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1: Mock) -> async def test_hue_events( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_bridge_v1: Mock, - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_bridge_v1, device_reg ) -> None: """Test that hue remotes fire events when pressed.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -467,7 +464,7 @@ async def test_hue_events( assert len(hass.states.async_all()) == 7 assert len(events) == 0 - hue_tap_device = device_registry.async_get_device( + hue_tap_device = device_reg.async_get_device( identifiers={(hue.DOMAIN, "00:00:00:00:00:44:23:08")} ) @@ -498,7 +495,7 @@ async def test_hue_events( "last_updated": "2019-12-28T22:58:03", } - hue_dimmer_device = device_registry.async_get_device( + hue_dimmer_device = device_reg.async_get_device( identifiers={(hue.DOMAIN, "00:17:88:01:10:3e:3a:dc")} ) @@ -597,7 +594,7 @@ async def test_hue_events( async_fire_time_changed(hass) await hass.async_block_till_done() - hue_aurora_device = device_registry.async_get_device( + hue_aurora_device = device_reg.async_get_device( identifiers={(hue.DOMAIN, "ff:ff:00:0f:e7:fd:bc:b7")} ) diff --git a/tests/components/hue/test_sensor_v2.py b/tests/components/hue/test_sensor_v2.py index 22888a411ba..beb86de505b 100644 --- a/tests/components/hue/test_sensor_v2.py +++ b/tests/components/hue/test_sensor_v2.py @@ -1,24 +1,19 @@ """Philips Hue sensor platform tests for V2 bridge/api.""" -from unittest.mock import Mock - from homeassistant.components import hue from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from homeassistant.util.json import JsonArrayType from .conftest import setup_bridge, setup_platform from .const import FAKE_DEVICE, FAKE_SENSOR, FAKE_ZIGBEE_CONNECTIVITY -from tests.common import MockConfigEntry - async def test_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2: Mock, - v2_resources_test_data: JsonArrayType, + mock_bridge_v2, + v2_resources_test_data, ) -> None: """Test if all v2 sensors get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -70,9 +65,9 @@ async def test_sensors( async def test_enable_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2: Mock, - v2_resources_test_data: JsonArrayType, - mock_config_entry_v2: MockConfigEntry, + mock_bridge_v2, + v2_resources_test_data, + mock_config_entry_v2, ) -> None: """Test enabling of the by default disabled zigbee_connectivity sensor.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -110,7 +105,7 @@ async def test_enable_sensor( assert state.attributes["mac_address"] == "00:17:88:01:0b:aa:bb:99" -async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: +async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2) -> None: """Test if sensors get added/updated from events.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) await setup_platform(hass, mock_bridge_v2, "sensor") diff --git a/tests/components/hue/test_services.py b/tests/components/hue/test_services.py index 26a4cab8261..6ce3cf2cc82 100644 --- a/tests/components/hue/test_services.py +++ b/tests/components/hue/test_services.py @@ -1,6 +1,6 @@ """Test Hue services.""" -from unittest.mock import Mock, patch +from unittest.mock import patch from homeassistant.components import hue from homeassistant.components.hue import bridge @@ -48,7 +48,7 @@ SCENE_RESPONSE = { } -async def test_hue_activate_scene(hass: HomeAssistant, mock_api_v1: Mock) -> None: +async def test_hue_activate_scene(hass: HomeAssistant, mock_api_v1) -> None: """Test successful hue_activate_scene.""" config_entry = MockConfigEntry( domain=hue.DOMAIN, @@ -83,9 +83,7 @@ async def test_hue_activate_scene(hass: HomeAssistant, mock_api_v1: Mock) -> Non assert mock_api_v1.mock_requests[2]["path"] == "groups/group_1/action" -async def test_hue_activate_scene_transition( - hass: HomeAssistant, mock_api_v1: Mock -) -> None: +async def test_hue_activate_scene_transition(hass: HomeAssistant, mock_api_v1) -> None: """Test successful hue_activate_scene with transition.""" config_entry = MockConfigEntry( domain=hue.DOMAIN, @@ -121,7 +119,7 @@ async def test_hue_activate_scene_transition( async def test_hue_activate_scene_group_not_found( - hass: HomeAssistant, mock_api_v1: Mock + hass: HomeAssistant, mock_api_v1 ) -> None: """Test failed hue_activate_scene due to missing group.""" config_entry = MockConfigEntry( @@ -153,7 +151,7 @@ async def test_hue_activate_scene_group_not_found( async def test_hue_activate_scene_scene_not_found( - hass: HomeAssistant, mock_api_v1: Mock + hass: HomeAssistant, mock_api_v1 ) -> None: """Test failed hue_activate_scene due to missing scene.""" config_entry = MockConfigEntry( @@ -186,10 +184,10 @@ async def test_hue_activate_scene_scene_not_found( async def test_hue_multi_bridge_activate_scene_all_respond( hass: HomeAssistant, - mock_bridge_v1: Mock, - mock_bridge_v2: Mock, - mock_config_entry_v1: MockConfigEntry, - mock_config_entry_v2: MockConfigEntry, + mock_bridge_v1, + mock_bridge_v2, + mock_config_entry_v1, + mock_config_entry_v2, ) -> None: """Test that makes multiple bridges successfully activate a scene.""" await setup_component(hass) @@ -220,10 +218,10 @@ async def test_hue_multi_bridge_activate_scene_all_respond( async def test_hue_multi_bridge_activate_scene_one_responds( hass: HomeAssistant, - mock_bridge_v1: Mock, - mock_bridge_v2: Mock, - mock_config_entry_v1: MockConfigEntry, - mock_config_entry_v2: MockConfigEntry, + mock_bridge_v1, + mock_bridge_v2, + mock_config_entry_v1, + mock_config_entry_v2, ) -> None: """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) @@ -253,10 +251,10 @@ async def test_hue_multi_bridge_activate_scene_one_responds( async def test_hue_multi_bridge_activate_scene_zero_responds( hass: HomeAssistant, - mock_bridge_v1: Mock, - mock_bridge_v2: Mock, - mock_config_entry_v1: MockConfigEntry, - mock_config_entry_v2: MockConfigEntry, + mock_bridge_v1, + mock_bridge_v2, + mock_config_entry_v1, + mock_config_entry_v2, ) -> None: """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) diff --git a/tests/components/hue/test_switch.py b/tests/components/hue/test_switch.py index 478acbaa303..2e25dd715c1 100644 --- a/tests/components/hue/test_switch.py +++ b/tests/components/hue/test_switch.py @@ -1,16 +1,13 @@ """Philips Hue switch platform tests for V2 bridge/api.""" -from unittest.mock import Mock - from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_BINARY_SENSOR, FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY async def test_switch( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test if (config) switches get created.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -37,7 +34,7 @@ async def test_switch( async def test_switch_turn_on_service( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test calling the turn on service on a switch.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -61,7 +58,7 @@ async def test_switch_turn_on_service( async def test_switch_turn_off_service( - hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType + hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data ) -> None: """Test calling the turn off service on a switch.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -101,7 +98,7 @@ async def test_switch_turn_off_service( assert test_entity.state == "off" -async def test_switch_added(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: +async def test_switch_added(hass: HomeAssistant, mock_bridge_v2) -> None: """Test new switch added to bridge.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) diff --git a/tests/components/humidifier/conftest.py b/tests/components/humidifier/conftest.py deleted file mode 100644 index 9fe1720ffc0..00000000000 --- a/tests/components/humidifier/conftest.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Fixtures for Humidifier platform tests.""" - -from collections.abc import Generator - -import pytest - -from homeassistant.components.humidifier import DOMAIN as HUMIDIFIER_DOMAIN -from homeassistant.config_entries import ConfigEntry, ConfigFlow -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from tests.common import ( - MockConfigEntry, - MockModule, - mock_config_flow, - mock_integration, - mock_platform, -) - - -class MockFlow(ConfigFlow): - """Test flow.""" - - -@pytest.fixture -def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: - """Mock config flow.""" - mock_platform(hass, "test.config_flow") - - with mock_config_flow("test", MockFlow): - yield - - -@pytest.fixture -def register_test_integration( - hass: HomeAssistant, config_flow_fixture: None -) -> Generator: - """Provide a mocked integration for tests.""" - - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - - async def help_async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [HUMIDIFIER_DOMAIN] - ) - return True - - async def help_async_unload_entry( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Unload test config emntry.""" - return await hass.config_entries.async_unload_platforms( - config_entry, [Platform.HUMIDIFIER] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=help_async_setup_entry_init, - async_unload_entry=help_async_unload_entry, - ), - ) - - return config_entry diff --git a/tests/components/humidifier/test_device_condition.py b/tests/components/humidifier/test_device_condition.py index ec8406bfe7b..4f4d21adcba 100644 --- a/tests/components/humidifier/test_device_condition.py +++ b/tests/components/humidifier/test_device_condition.py @@ -17,7 +17,11 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -25,6 +29,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_condition_types"), [ @@ -143,7 +153,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -228,42 +238,42 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_off event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_off event - test_event2" hass.states.async_set(entry.entity_id, STATE_ON, {ATTR_MODE: const.MODE_AWAY}) hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].data["some"] == "is_mode - event - test_event3" + assert len(calls) == 3 + assert calls[2].data["some"] == "is_mode - event - test_event3" hass.states.async_set(entry.entity_id, STATE_ON, {ATTR_MODE: const.MODE_HOME}) # Should not fire hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 3 async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -306,15 +316,15 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_ON, {ATTR_MODE: const.MODE_AWAY}) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_mode - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_mode - event - test_event1" @pytest.mark.parametrize( diff --git a/tests/components/humidifier/test_device_trigger.py b/tests/components/humidifier/test_device_trigger.py index 3bb1f8c2551..83202e16675 100644 --- a/tests/components/humidifier/test_device_trigger.py +++ b/tests/components/humidifier/test_device_trigger.py @@ -30,6 +30,7 @@ from tests.common import ( MockConfigEntry, async_fire_time_changed, async_get_device_automations, + async_mock_service, ) @@ -38,6 +39,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -159,7 +166,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -349,8 +356,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 7, const.ATTR_CURRENT_HUMIDITY: 35}, ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "target_humidity_changed_below" + assert len(calls) == 1 + assert calls[0].data["some"] == "target_humidity_changed_below" # Fake that the current humidity is changing hass.states.async_set( @@ -359,8 +366,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 7, const.ATTR_CURRENT_HUMIDITY: 18}, ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "current_humidity_changed_below" + assert len(calls) == 2 + assert calls[1].data["some"] == "current_humidity_changed_below" # Fake that the humidity target is changing hass.states.async_set( @@ -369,8 +376,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 18}, ) await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].data["some"] == "target_humidity_changed_above" + assert len(calls) == 3 + assert calls[2].data["some"] == "target_humidity_changed_above" # Fake that the current humidity is changing hass.states.async_set( @@ -379,14 +386,14 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 41}, ) await hass.async_block_till_done() - assert len(service_calls) == 4 - assert service_calls[3].data["some"] == "current_humidity_changed_above" + assert len(calls) == 4 + assert calls[3].data["some"] == "current_humidity_changed_above" # Wait 6 minutes async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(minutes=6)) await hass.async_block_till_done() - assert len(service_calls) == 6 - assert {service_calls[4].data["some"], service_calls[5].data["some"]} == { + assert len(calls) == 6 + assert {calls[4].data["some"], calls[5].data["some"]} == { "current_humidity_changed_above_for", "target_humidity_changed_above_for", } @@ -398,8 +405,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 41}, ) await hass.async_block_till_done() - assert len(service_calls) == 8 - assert {service_calls[6].data["some"], service_calls[7].data["some"]} == { + assert len(calls) == 8 + assert {calls[6].data["some"], calls[7].data["some"]} == { "turn_off device - humidifier.test_5678 - on - off - None", "turn_on_or_off device - humidifier.test_5678 - on - off - None", } @@ -411,8 +418,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 41}, ) await hass.async_block_till_done() - assert len(service_calls) == 10 - assert {service_calls[8].data["some"], service_calls[9].data["some"]} == { + assert len(calls) == 10 + assert {calls[8].data["some"], calls[9].data["some"]} == { "turn_on device - humidifier.test_5678 - off - on - None", "turn_on_or_off device - humidifier.test_5678 - off - on - None", } @@ -422,7 +429,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -472,14 +479,12 @@ async def test_if_fires_on_state_change_legacy( # Fake that the humidity is changing hass.states.async_set(entry.entity_id, STATE_ON, {const.ATTR_HUMIDITY: 7}) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "target_humidity_changed_below" + assert len(calls) == 1 + assert calls[0].data["some"] == "target_humidity_changed_below" async def test_invalid_config( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall] ) -> None: """Test for turn_on and turn_off triggers firing.""" entry = entity_registry.async_get_or_create(DOMAIN, "test", "5678") @@ -523,7 +528,7 @@ async def test_invalid_config( hass.states.async_set(entry.entity_id, STATE_ON, {const.ATTR_HUMIDITY: 7}) await hass.async_block_till_done() # Should not trigger for invalid config - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_get_trigger_capabilities_on(hass: HomeAssistant) -> None: diff --git a/tests/components/humidifier/test_init.py b/tests/components/humidifier/test_init.py index 2725f942576..b90e7084dd1 100644 --- a/tests/components/humidifier/test_init.py +++ b/tests/components/humidifier/test_init.py @@ -8,28 +8,16 @@ import pytest from homeassistant.components import humidifier from homeassistant.components.humidifier import ( - ATTR_HUMIDITY, ATTR_MODE, - DOMAIN as HUMIDIFIER_DOMAIN, - MODE_ECO, - MODE_NORMAL, - SERVICE_SET_HUMIDITY, HumidifierEntity, HumidifierEntityFeature, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from tests.common import ( - MockConfigEntry, - MockEntity, - help_test_all, - import_and_test_deprecated_constant_enum, - setup_test_component_platform, -) +from tests.common import help_test_all, import_and_test_deprecated_constant_enum -class MockHumidifierEntity(MockEntity, HumidifierEntity): +class MockHumidifierEntity(HumidifierEntity): """Mock Humidifier device to use in tests.""" @property @@ -60,7 +48,7 @@ async def test_sync_turn_off(hass: HomeAssistant) -> None: assert humidifier.turn_off.called -def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: +def _create_tuples(enum: Enum, constant_prefix: str) -> list[tuple[Enum, str]]: return [(enum_field, constant_prefix) for enum_field in enum] @@ -113,70 +101,3 @@ def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> assert "is using deprecated supported features values" not in caplog.text assert entity.state_attributes[ATTR_MODE] == "mode1" - - -async def test_humidity_validation( - hass: HomeAssistant, - register_test_integration: MockConfigEntry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test validation for humidity.""" - - class MockHumidifierEntityHumidity(MockEntity, HumidifierEntity): - """Mock climate class with mocked aux heater.""" - - _attr_supported_features = HumidifierEntityFeature.MODES - _attr_available_modes = [MODE_NORMAL, MODE_ECO] - _attr_mode = MODE_NORMAL - _attr_target_humidity = 50 - _attr_min_humidity = 50 - _attr_max_humidity = 60 - - def set_humidity(self, humidity: int) -> None: - """Set new target humidity.""" - self._attr_target_humidity = humidity - - test_humidifier = MockHumidifierEntityHumidity( - name="Test", - unique_id="unique_humidifier_test", - ) - - setup_test_component_platform( - hass, HUMIDIFIER_DOMAIN, entities=[test_humidifier], from_config_entry=True - ) - await hass.config_entries.async_setup(register_test_integration.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("humidifier.test") - assert state.attributes.get(ATTR_HUMIDITY) == 50 - - with pytest.raises( - ServiceValidationError, - match="Provided humidity 1 is not valid. Accepted range is 50 to 60", - ) as exc: - await hass.services.async_call( - HUMIDIFIER_DOMAIN, - SERVICE_SET_HUMIDITY, - { - "entity_id": "humidifier.test", - ATTR_HUMIDITY: "1", - }, - blocking=True, - ) - - assert exc.value.translation_key == "humidity_out_of_range" - assert "Check valid humidity 1 in range 50 - 60" in caplog.text - - with pytest.raises( - ServiceValidationError, - match="Provided humidity 70 is not valid. Accepted range is 50 to 60", - ) as exc: - await hass.services.async_call( - HUMIDIFIER_DOMAIN, - SERVICE_SET_HUMIDITY, - { - "entity_id": "humidifier.test", - ATTR_HUMIDITY: "70", - }, - blocking=True, - ) diff --git a/tests/components/hunterdouglas_powerview/conftest.py b/tests/components/hunterdouglas_powerview/conftest.py index ea40ba4ecc6..da339914aac 100644 --- a/tests/components/hunterdouglas_powerview/conftest.py +++ b/tests/components/hunterdouglas_powerview/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for Hunter Douglas Powerview tests.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, PropertyMock, patch +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from aiopvapi.resources.shade import ShadePosition import pytest +from typing_extensions import Generator from homeassistant.components.hunterdouglas_powerview.const import DOMAIN @@ -29,19 +29,19 @@ def mock_hunterdouglas_hub( rooms_json: str, scenes_json: str, shades_json: str, -) -> Generator[None]: +) -> Generator[MagicMock]: """Return a mocked Powerview Hub with all data populated.""" with ( patch( - "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data", + "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_data", return_value=load_json_object_fixture(device_json, DOMAIN), ), patch( - "homeassistant.components.hunterdouglas_powerview.util.Hub.request_home_data", + "homeassistant.components.hunterdouglas_powerview.Hub.request_home_data", return_value=load_json_object_fixture(home_json, DOMAIN), ), patch( - "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_firmware", + "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_firmware", return_value=load_json_object_fixture(firmware_json, DOMAIN), ), patch( @@ -111,7 +111,7 @@ def firmware_json(api_version: int) -> str: def rooms_json(api_version: int) -> str: """Return the get_resources fixture for a specific device.""" if api_version == 1: - return "gen1/rooms.json" + return "gen2/rooms.json" if api_version == 2: return "gen2/rooms.json" if api_version == 3: @@ -124,7 +124,7 @@ def rooms_json(api_version: int) -> str: def scenes_json(api_version: int) -> str: """Return the get_resources fixture for a specific device.""" if api_version == 1: - return "gen1/scenes.json" + return "gen2/scenes.json" if api_version == 2: return "gen2/scenes.json" if api_version == 3: @@ -137,7 +137,7 @@ def scenes_json(api_version: int) -> str: def shades_json(api_version: int) -> str: """Return the get_resources fixture for a specific device.""" if api_version == 1: - return "gen1/shades.json" + return "gen2/shades.json" if api_version == 2: return "gen2/shades.json" if api_version == 3: diff --git a/tests/components/hunterdouglas_powerview/const.py b/tests/components/hunterdouglas_powerview/const.py index 65b03fd5ec2..5a912a63a17 100644 --- a/tests/components/hunterdouglas_powerview/const.py +++ b/tests/components/hunterdouglas_powerview/const.py @@ -6,7 +6,6 @@ from homeassistant import config_entries from homeassistant.components import dhcp, zeroconf MOCK_MAC = "AA::BB::CC::DD::EE::FF" -MOCK_SERIAL = "A1B2C3D4E5G6H7" HOMEKIT_DISCOVERY_GEN2 = zeroconf.ZeroconfServiceInfo( ip_address="1.2.3.4", @@ -42,7 +41,7 @@ ZEROCONF_DISCOVERY_GEN3 = zeroconf.ZeroconfServiceInfo( ip_address="1.2.3.4", ip_addresses=[IPv4Address("1.2.3.4")], hostname="mock_hostname", - name="Powerview Generation 3._PowerView-G3._tcp.local.", + name="Powerview Generation 3._powerview-g3._tcp.local.", port=None, properties={}, type="mock_type", diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/rooms.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/rooms.json deleted file mode 100644 index 4ddcccd466e..00000000000 --- a/tests/components/hunterdouglas_powerview/fixtures/gen1/rooms.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "roomIds": [4896], - "roomData": [ - { - "id": 4896, - "name": "U3BpbmRsZQ==", - "order": 0, - "colorId": 11, - "iconId": 77, - "name_unicode": "Spindle" - } - ] -} diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/scenes.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/scenes.json deleted file mode 100644 index 4b6b7fb9cc3..00000000000 --- a/tests/components/hunterdouglas_powerview/fixtures/gen1/scenes.json +++ /dev/null @@ -1,188 +0,0 @@ -{ - "sceneIds": [ - 19831, 4068, 55363, 43508, 59372, 48243, 54636, 20625, 4034, 59103, 61648, - 24626, 64679, 22498, 28856, 25458, 51159, 959 - ], - "sceneData": [ - { - "id": 19831, - "networkNumber": 0, - "name": "Q2xvc2UgTG91bmdlIFJvb20=", - "roomId": 4896, - "order": 0, - "colorId": 7, - "iconId": 171, - "name_unicode": "Close Lounge Room" - }, - { - "id": 4068, - "networkNumber": 1, - "name": "Q2xvc2UgQmVkIDQ=", - "roomId": 4896, - "order": 1, - "colorId": 7, - "iconId": 10, - "name_unicode": "Close Bed 4" - }, - { - "id": 55363, - "networkNumber": 2, - "name": "Q2xvc2UgQmVkIDI=", - "roomId": 4896, - "order": 2, - "colorId": 11, - "iconId": 171, - "name_unicode": "Close Bed 2" - }, - { - "id": 43508, - "networkNumber": 3, - "name": "Q2xvc2UgTWFzdGVyIEJlZA==", - "roomId": 4896, - "order": 3, - "colorId": 11, - "iconId": 10, - "name_unicode": "Close Master Bed" - }, - { - "id": 59372, - "networkNumber": 4, - "name": "Q2xvc2UgRmFtaWx5", - "roomId": 4896, - "order": 4, - "colorId": 0, - "iconId": 171, - "name_unicode": "Close Family" - }, - { - "id": 48243, - "networkNumber": 5, - "name": "T3BlbiBCZWQgNA==", - "roomId": 4896, - "order": 5, - "colorId": 0, - "iconId": 10, - "name_unicode": "Open Bed 4" - }, - { - "id": 54636, - "networkNumber": 6, - "name": "T3BlbiBNYXN0ZXIgQmVk", - "roomId": 4896, - "order": 6, - "colorId": 0, - "iconId": 26, - "name_unicode": "Open Master Bed" - }, - { - "id": 20625, - "networkNumber": 7, - "name": "T3BlbiBCZWQgMw==", - "roomId": 4896, - "order": 7, - "colorId": 7, - "iconId": 26, - "name_unicode": "Open Bed 3" - }, - { - "id": 4034, - "networkNumber": 8, - "name": "T3BlbiBGYW1pbHk=", - "roomId": 4896, - "order": 8, - "colorId": 11, - "iconId": 26, - "name_unicode": "Open Family" - }, - { - "id": 59103, - "networkNumber": 9, - "name": "Q2xvc2UgU3R1ZHk=", - "roomId": 4896, - "order": 9, - "colorId": 0, - "iconId": 171, - "name_unicode": "Close Study" - }, - { - "id": 61648, - "networkNumber": 10, - "name": "T3BlbiBBbGw=", - "roomId": 4896, - "order": 10, - "colorId": 11, - "iconId": 26, - "name_unicode": "Open All" - }, - { - "id": 24626, - "networkNumber": 11, - "name": "Q2xvc2UgQWxs", - "roomId": 4896, - "order": 11, - "colorId": 0, - "iconId": 171, - "name_unicode": "Close All" - }, - { - "id": 64679, - "networkNumber": 12, - "name": "T3BlbiBLaXRjaGVu", - "roomId": 4896, - "order": 12, - "colorId": 7, - "iconId": 26, - "name_unicode": "Open Kitchen" - }, - { - "id": 22498, - "networkNumber": 13, - "name": "T3BlbiBMb3VuZ2UgUm9vbQ==", - "roomId": 4896, - "order": 13, - "colorId": 7, - "iconId": 26, - "name_unicode": "Open Lounge Room" - }, - { - "id": 25458, - "networkNumber": 14, - "name": "T3BlbiBCZWQgMg==", - "roomId": 4896, - "order": 14, - "colorId": 0, - "iconId": 26, - "name_unicode": "Open Bed 2" - }, - { - "id": 46225, - "networkNumber": 15, - "name": "Q2xvc2UgQmVkIDM=", - "roomId": 4896, - "order": 15, - "colorId": 0, - "iconId": 26, - "name_unicode": "Close Bed 3" - }, - { - "id": 51159, - "networkNumber": 16, - "name": "Q2xvc2UgS2l0Y2hlbg==", - "roomId": 4896, - "order": 16, - "colorId": 0, - "iconId": 26, - "name_unicode": "Close Kitchen" - }, - { - "id": 959, - "networkNumber": 17, - "name": "T3BlbiBTdHVkeQ==", - "roomId": 4896, - "order": 17, - "colorId": 0, - "iconId": 26, - "name_unicode": "Open Study" - } - ] -} diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/shades.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/shades.json deleted file mode 100644 index 6e43c1d788d..00000000000 --- a/tests/components/hunterdouglas_powerview/fixtures/gen1/shades.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "shadeIds": [36492, 65111, 7003, 53627], - "shadeData": [ - { - "id": 36492, - "name": "S2l0Y2hlbiBOb3J0aA==", - "roomId": 4896, - "groupId": 35661, - "order": 0, - "type": 40, - "batteryStrength": 116, - "batteryStatus": 3, - "positions": { "position1": 65535, "posKind1": 1 }, - "name_unicode": "Kitchen North" - }, - { - "id": 65111, - "name": "S2l0Y2hlbiBXZXN0", - "roomId": 4896, - "groupId": 35661, - "order": 1, - "type": 40, - "batteryStrength": 124, - "batteryStatus": 3, - "positions": { "position1": 65535, "posKind1": 3 }, - "name_unicode": "Kitchen West" - }, - { - "id": 7003, - "name": "QmF0aCBFYXN0", - "roomId": 4896, - "groupId": 35661, - "order": 2, - "type": 40, - "batteryStrength": 94, - "batteryStatus": 1, - "positions": { "position1": 65535, "posKind1": 1 }, - "name_unicode": "Bath East" - }, - { - "id": 53627, - "name": "QmF0aCBTb3V0aA==", - "roomId": 4896, - "groupId": 35661, - "order": 3, - "type": 40, - "batteryStrength": 127, - "batteryStatus": 3, - "positions": { "position1": 65535, "posKind1": 3 }, - "name_unicode": "Bath South" - } - ] -} diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json index 90b64ee4686..132e2721b05 100644 --- a/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json +++ b/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json @@ -1,34 +1,34 @@ { "userData": { - "serialNumber": "A1B2C3D4E5G6H7", - "rfID": "0x8B2A", - "rfIDInt": 35626, - "rfStatus": 0, - "hubName": "UG93ZXJ2aWV3IEdlbmVyYXRpb24gMQ==", - "macAddress": "AA:BB:CC:DD:EE:FF", - "roomCount": 1, - "shadeCount": 4, - "groupCount": 5, - "sceneCount": 9, - "sceneMemberCount": 24, - "multiSceneCount": 0, - "multiSceneMemberCount": 0, - "scheduledEventCount": 4, - "sceneControllerCount": 0, - "sceneControllerMemberCount": 0, - "accessPointCount": 0, - "localTimeDataSet": true, "enableScheduledEvents": true, - "remoteConnectEnabled": true, - "editingEnabled": true, - "setupCompleted": false, - "gateway": "192.168.0.1", - "mask": "255.255.255.0", - "ip": "192.168.0.20", - "dns": "192.168.0.1", "staticIp": false, - "addressKind": "newPrimary", + "sceneControllerCount": 0, + "accessPointCount": 0, + "shadeCount": 5, + "ip": "192.168.0.20", + "groupCount": 9, + "scheduledEventCount": 0, + "editingEnabled": true, + "roomCount": 5, + "setupCompleted": false, + "sceneCount": 18, + "sceneControllerMemberCount": 0, + "mask": "255.255.255.0", + "hubName": "UG93ZXJ2aWV3IEdlbmVyYXRpb24gMQ==", + "rfID": "0x8B2A", + "remoteConnectEnabled": false, + "multiSceneMemberCount": 0, + "rfStatus": 0, + "serialNumber": "A1B2C3D4E5G6H7", + "undefinedShadeCount": 0, + "sceneMemberCount": 18, "unassignedShadeCount": 0, - "undefinedShadeCount": 0 + "multiSceneCount": 0, + "addressKind": "newPrimary", + "gateway": "192.168.0.1", + "localTimeDataSet": true, + "dns": "192.168.0.1", + "macAddress": "AA:BB:CC:DD:EE:FF", + "rfIDInt": 35626 } } diff --git a/tests/components/hunterdouglas_powerview/test_config_flow.py b/tests/components/hunterdouglas_powerview/test_config_flow.py index 42589bb10e0..b9721f4adb1 100644 --- a/tests/components/hunterdouglas_powerview/test_config_flow.py +++ b/tests/components/hunterdouglas_powerview/test_config_flow.py @@ -10,9 +10,8 @@ from homeassistant.components.hunterdouglas_powerview.const import DOMAIN from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -import homeassistant.helpers.entity_registry as er -from .const import DHCP_DATA, DISCOVERY_DATA, HOMEKIT_DATA, MOCK_SERIAL +from .const import DHCP_DATA, DISCOVERY_DATA, HOMEKIT_DATA from tests.common import MockConfigEntry, load_json_object_fixture @@ -41,7 +40,7 @@ async def test_user_form( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == f"Powerview Generation {api_version}" assert result2["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result2["result"].unique_id == MOCK_SERIAL + assert result2["result"].unique_id == "A1B2C3D4E5G6H7" assert len(mock_setup_entry.mock_calls) == 1 @@ -76,7 +75,7 @@ async def test_form_homekit_and_dhcp_cannot_connect( ignored_config_entry.add_to_hass(hass) with patch( - "homeassistant.components.hunterdouglas_powerview.util.Hub.query_firmware", + "homeassistant.components.hunterdouglas_powerview.Hub.query_firmware", side_effect=TimeoutError, ): result = await hass.config_entries.flow.async_init( @@ -101,7 +100,7 @@ async def test_form_homekit_and_dhcp_cannot_connect( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == MOCK_SERIAL + assert result3["result"].unique_id == "A1B2C3D4E5G6H7" assert len(mock_setup_entry.mock_calls) == 1 @@ -143,7 +142,7 @@ async def test_form_homekit_and_dhcp( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == f"Powerview Generation {api_version}" assert result2["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result2["result"].unique_id == MOCK_SERIAL + assert result2["result"].unique_id == "A1B2C3D4E5G6H7" assert len(mock_setup_entry.mock_calls) == 1 @@ -206,7 +205,7 @@ async def test_form_cannot_connect( # Simulate a timeout error with patch( - "homeassistant.components.hunterdouglas_powerview.util.Hub.query_firmware", + "homeassistant.components.hunterdouglas_powerview.Hub.query_firmware", side_effect=TimeoutError, ): result2 = await hass.config_entries.flow.async_configure( @@ -226,7 +225,7 @@ async def test_form_cannot_connect( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == MOCK_SERIAL + assert result3["result"].unique_id == "A1B2C3D4E5G6H7" assert len(mock_setup_entry.mock_calls) == 1 @@ -245,11 +244,11 @@ async def test_form_no_data( with ( patch( - "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data", + "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_data", return_value={}, ), patch( - "homeassistant.components.hunterdouglas_powerview.util.Hub.request_home_data", + "homeassistant.components.hunterdouglas_powerview.Hub.request_home_data", return_value={}, ), ): @@ -270,7 +269,7 @@ async def test_form_no_data( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == MOCK_SERIAL + assert result3["result"].unique_id == "A1B2C3D4E5G6H7" assert len(mock_setup_entry.mock_calls) == 1 @@ -289,7 +288,7 @@ async def test_form_unknown_exception( # Simulate a transient error with patch( - "homeassistant.components.hunterdouglas_powerview.util.Hub.query_firmware", + "homeassistant.components.hunterdouglas_powerview.config_flow.Hub.query_firmware", side_effect=SyntaxError, ): result2 = await hass.config_entries.flow.async_configure( @@ -309,7 +308,7 @@ async def test_form_unknown_exception( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == f"Powerview Generation {api_version}" assert result2["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result2["result"].unique_id == MOCK_SERIAL + assert result2["result"].unique_id == "A1B2C3D4E5G6H7" assert len(mock_setup_entry.mock_calls) == 1 @@ -328,7 +327,7 @@ async def test_form_unsupported_device( # Simulate a gen 3 secondary hub with patch( - "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data", + "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_data", return_value=load_json_object_fixture("gen3/gateway/secondary.json", DOMAIN), ): result2 = await hass.config_entries.flow.async_configure( @@ -348,57 +347,6 @@ async def test_form_unsupported_device( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == MOCK_SERIAL + assert result3["result"].unique_id == "A1B2C3D4E5G6H7" assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.usefixtures("mock_hunterdouglas_hub") -@pytest.mark.parametrize("api_version", [1, 2, 3]) -async def test_migrate_entry( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - api_version: int, -) -> None: - """Test migrate to newest version.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={"host": "1.2.3.4"}, - unique_id=MOCK_SERIAL, - version=1, - minor_version=1, - ) - - # Add entries with int unique_id - entity_registry.async_get_or_create( - domain="cover", - platform="hunterdouglas_powerview", - unique_id=123, - config_entry=entry, - ) - # Add entries with a str unique_id not starting with entry.unique_id - entity_registry.async_get_or_create( - domain="cover", - platform="hunterdouglas_powerview", - unique_id="old_unique_id", - config_entry=entry, - ) - - assert entry.version == 1 - assert entry.minor_version == 1 - - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.version == 1 - assert entry.minor_version == 2 - - # Reload the registry entries - registry_entries = er.async_entries_for_config_entry( - entity_registry, entry.entry_id - ) - - # Ensure the IDs have been migrated - for reg_entry in registry_entries: - assert reg_entry.unique_id.startswith(f"{entry.unique_id}_") diff --git a/tests/components/hunterdouglas_powerview/test_scene.py b/tests/components/hunterdouglas_powerview/test_scene.py index 43074d55470..9628805d0e8 100644 --- a/tests/components/hunterdouglas_powerview/test_scene.py +++ b/tests/components/hunterdouglas_powerview/test_scene.py @@ -14,10 +14,10 @@ from .const import MOCK_MAC from tests.common import MockConfigEntry -@pytest.mark.usefixtures("mock_hunterdouglas_hub") @pytest.mark.parametrize("api_version", [1, 2, 3]) async def test_scenes( hass: HomeAssistant, + mock_hunterdouglas_hub: None, api_version: int, ) -> None: """Test the scenes.""" diff --git a/tests/components/husqvarna_automower/__init__.py b/tests/components/husqvarna_automower/__init__.py index 9473b68a5ed..8c51d69ba3d 100644 --- a/tests/components/husqvarna_automower/__init__.py +++ b/tests/components/husqvarna_automower/__init__.py @@ -7,10 +7,6 @@ from tests.common import MockConfigEntry async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Fixture for setting up the component.""" - # We lock the timezone, because the timezone is passed to the library to generate - # some values like the next start sensor. This is needed, as the device is not aware - # of its own timezone. So we assume the device is in the timezone which is selected in - # the Home Assistant config. - await hass.config.async_set_time_zone("Europe/Berlin") config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/husqvarna_automower/conftest.py b/tests/components/husqvarna_automower/conftest.py index 0202cec05b9..7ace3b76808 100644 --- a/tests/components/husqvarna_automower/conftest.py +++ b/tests/components/husqvarna_automower/conftest.py @@ -1,15 +1,13 @@ """Test helpers for Husqvarna Automower.""" -import asyncio -from collections.abc import Generator import time from unittest.mock import AsyncMock, patch -from aioautomower.model import MowerAttributes from aioautomower.session import AutomowerSession, _MowerCommands from aioautomower.utils import mower_list_to_dictionary_dataclass from aiohttp import ClientWebSocketResponse import pytest +from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, @@ -18,7 +16,6 @@ from homeassistant.components.application_credentials import ( from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util from .const import CLIENT_ID, CLIENT_SECRET, USER_ID @@ -43,21 +40,6 @@ def mock_scope() -> str: return "iam:read amc:api" -@pytest.fixture(name="mower_time_zone") -async def mock_time_zone(hass: HomeAssistant) -> dict[str, MowerAttributes]: - """Fixture to set correct scope for the token.""" - return await dt_util.async_get_time_zone("Europe/Berlin") - - -@pytest.fixture(name="values") -def mock_values(mower_time_zone) -> dict[str, MowerAttributes]: - """Fixture to set correct scope for the token.""" - return mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN), - mower_time_zone, - ) - - @pytest.fixture def mock_config_entry(jwt: str, expires_at: int, scope: str) -> MockConfigEntry: """Return the default mocked config entry.""" @@ -99,20 +81,17 @@ async def setup_credentials(hass: HomeAssistant) -> None: @pytest.fixture -def mock_automower_client(values) -> Generator[AsyncMock]: +def mock_automower_client() -> Generator[AsyncMock]: """Mock a Husqvarna Automower client.""" - async def listen() -> None: - """Mock listen.""" - listen_block = asyncio.Event() - await listen_block.wait() - pytest.fail("Listen was not cancelled!") + mower_dict = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) mock = AsyncMock(spec=AutomowerSession) mock.auth = AsyncMock(side_effect=ClientWebSocketResponse) mock.commands = AsyncMock(spec_set=_MowerCommands) - mock.get_status.return_value = values - mock.start_listening = AsyncMock(side_effect=listen) + mock.get_status.return_value = mower_dict with patch( "homeassistant.components.husqvarna_automower.AutomowerSession", diff --git a/tests/components/husqvarna_automower/fixtures/mower.json b/tests/components/husqvarna_automower/fixtures/mower.json index 8ab2f96e42f..a5cae68f47c 100644 --- a/tests/components/husqvarna_automower/fixtures/mower.json +++ b/tests/components/husqvarna_automower/fixtures/mower.json @@ -6,14 +6,13 @@ "attributes": { "system": { "name": "Test Mower 1", - "model": "HUSQVARNA AUTOMOWER® 450XH", + "model": "450XH-TEST", "serialNumber": 123 }, "battery": { "batteryPercent": 100 }, "capabilities": { - "canConfirmError": true, "headlights": true, "workAreas": true, "position": true, @@ -40,8 +39,7 @@ "thursday": false, "friday": true, "saturday": false, - "sunday": false, - "workAreaId": 123456 + "sunday": false }, { "start": 0, @@ -52,32 +50,7 @@ "thursday": true, "friday": false, "saturday": true, - "sunday": false, - "workAreaId": 123456 - }, - { - "start": 0, - "duration": 480, - "monday": false, - "tuesday": true, - "wednesday": false, - "thursday": true, - "friday": false, - "saturday": true, - "sunday": false, - "workAreaId": 654321 - }, - { - "start": 60, - "duration": 480, - "monday": true, - "tuesday": true, - "wednesday": false, - "thursday": true, - "friday": false, - "saturday": true, - "sunday": false, - "workAreaId": 654321 + "sunday": false } ] }, @@ -90,30 +63,23 @@ }, "metadata": { "connected": true, - "statusTimestamp": 1685923200000 + "statusTimestamp": 1697669932683 }, "workAreas": [ { "workAreaId": 123456, "name": "Front lawn", - "cuttingHeight": 50, - "enabled": true, - "progress": 40, - "lastTimeCompleted": 1723449269 + "cuttingHeight": 50 }, { "workAreaId": 654321, "name": "Back lawn", - "cuttingHeight": 25, - "enabled": true + "cuttingHeight": 25 }, { "workAreaId": 0, "name": "", - "cuttingHeight": 50, - "enabled": false, - "progress": 20, - "lastTimeCompleted": 1723439269 + "cuttingHeight": 50 } ], "positions": [ @@ -206,69 +172,6 @@ } } } - }, - { - "type": "mower", - "id": "1234", - "attributes": { - "system": { - "name": "Test Mower 2", - "model": "HUSQVARNA AUTOMOWER® Aspire R4", - "serialNumber": 123 - }, - "battery": { - "batteryPercent": 50 - }, - "capabilities": { - "canConfirmError": false, - "headlights": false, - "position": false, - "stayOutZones": false, - "workAreas": false - }, - "mower": { - "mode": "MAIN_AREA", - "activity": "PARKED_IN_CS", - "inactiveReason": "NONE", - "state": "RESTRICTED", - "errorCode": 0, - "errorCodeTimestamp": 0 - }, - "calendar": { - "tasks": [ - { - "start": 120, - "duration": 49, - "monday": true, - "tuesday": false, - "wednesday": false, - "thursday": false, - "friday": false, - "saturday": false, - "sunday": false - } - ] - }, - "planner": { - "nextStartTimestamp": 1685991600000, - "override": { - "action": "NOT_ACTIVE" - }, - "restrictedReason": "WEEK_SCHEDULE" - }, - "metadata": { - "connected": true, - "statusTimestamp": 1697669932683 - }, - "positions": [], - "settings": { - "cuttingHeight": null, - "headlight": { - "mode": null - } - }, - "statistics": {} - } } ] } diff --git a/tests/components/husqvarna_automower/snapshots/test_binary_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_binary_sensor.ambr index 16d9452e847..aaa9c59679f 100644 --- a/tests/components/husqvarna_automower/snapshots/test_binary_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_binary_sensor.ambr @@ -138,142 +138,3 @@ 'state': 'off', }) # --- -# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_charging-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.test_mower_2_charging', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charging', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1234_battery_charging', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_charging-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery_charging', - 'friendly_name': 'Test Mower 2 Charging', - }), - 'context': , - 'entity_id': 'binary_sensor.test_mower_2_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_leaving_dock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.test_mower_2_leaving_dock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Leaving dock', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'leaving_dock', - 'unique_id': '1234_leaving_dock', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_leaving_dock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 2 Leaving dock', - }), - 'context': , - 'entity_id': 'binary_sensor.test_mower_2_leaving_dock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_returning_to_dock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.test_mower_2_returning_to_dock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Returning to dock', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'returning_to_dock', - 'unique_id': '1234_returning_to_dock', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor_snapshot[binary_sensor.test_mower_2_returning_to_dock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 2 Returning to dock', - }), - 'context': , - 'entity_id': 'binary_sensor.test_mower_2_returning_to_dock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/husqvarna_automower/snapshots/test_button.ambr b/tests/components/husqvarna_automower/snapshots/test_button.ambr index 2ce3aae3065..ab2cb427f1a 100644 --- a/tests/components/husqvarna_automower/snapshots/test_button.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_button.ambr @@ -45,95 +45,3 @@ 'state': 'unavailable', }) # --- -# name: test_button_snapshot[button.test_mower_1_sync_clock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_mower_1_sync_clock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Sync clock', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'sync_clock', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_sync_clock', - 'unit_of_measurement': None, - }) -# --- -# name: test_button_snapshot[button.test_mower_1_sync_clock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 1 Sync clock', - }), - 'context': , - 'entity_id': 'button.test_mower_1_sync_clock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button_snapshot[button.test_mower_2_sync_clock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_mower_2_sync_clock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Sync clock', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'sync_clock', - 'unique_id': '1234_sync_clock', - 'unit_of_measurement': None, - }) -# --- -# name: test_button_snapshot[button.test_mower_2_sync_clock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 2 Sync clock', - }), - 'context': , - 'entity_id': 'button.test_mower_2_sync_clock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/husqvarna_automower/snapshots/test_calendar.ambr b/tests/components/husqvarna_automower/snapshots/test_calendar.ambr deleted file mode 100644 index 7cd8c68b624..00000000000 --- a/tests/components/husqvarna_automower/snapshots/test_calendar.ambr +++ /dev/null @@ -1,88 +0,0 @@ -# serializer version: 1 -# name: test_calendar_snapshot[start_date0-end_date0] - dict({ - 'calendar.test_mower_1': dict({ - 'events': list([ - dict({ - 'end': '2023-06-05T09:00:00+02:00', - 'start': '2023-06-05T01:00:00+02:00', - 'summary': 'Back lawn schedule 2', - }), - dict({ - 'end': '2023-06-06T00:00:00+02:00', - 'start': '2023-06-05T19:00:00+02:00', - 'summary': 'Front lawn schedule 1', - }), - dict({ - 'end': '2023-06-06T08:00:00+02:00', - 'start': '2023-06-06T00:00:00+02:00', - 'summary': 'Back lawn schedule 1', - }), - dict({ - 'end': '2023-06-06T08:00:00+02:00', - 'start': '2023-06-06T00:00:00+02:00', - 'summary': 'Front lawn schedule 2', - }), - dict({ - 'end': '2023-06-06T09:00:00+02:00', - 'start': '2023-06-06T01:00:00+02:00', - 'summary': 'Back lawn schedule 2', - }), - dict({ - 'end': '2023-06-08T00:00:00+02:00', - 'start': '2023-06-07T19:00:00+02:00', - 'summary': 'Front lawn schedule 1', - }), - dict({ - 'end': '2023-06-08T08:00:00+02:00', - 'start': '2023-06-08T00:00:00+02:00', - 'summary': 'Back lawn schedule 1', - }), - dict({ - 'end': '2023-06-08T08:00:00+02:00', - 'start': '2023-06-08T00:00:00+02:00', - 'summary': 'Front lawn schedule 2', - }), - dict({ - 'end': '2023-06-08T09:00:00+02:00', - 'start': '2023-06-08T01:00:00+02:00', - 'summary': 'Back lawn schedule 2', - }), - dict({ - 'end': '2023-06-10T00:00:00+02:00', - 'start': '2023-06-09T19:00:00+02:00', - 'summary': 'Front lawn schedule 1', - }), - dict({ - 'end': '2023-06-10T08:00:00+02:00', - 'start': '2023-06-10T00:00:00+02:00', - 'summary': 'Back lawn schedule 1', - }), - dict({ - 'end': '2023-06-10T08:00:00+02:00', - 'start': '2023-06-10T00:00:00+02:00', - 'summary': 'Front lawn schedule 2', - }), - dict({ - 'end': '2023-06-10T09:00:00+02:00', - 'start': '2023-06-10T01:00:00+02:00', - 'summary': 'Back lawn schedule 2', - }), - dict({ - 'end': '2023-06-12T09:00:00+02:00', - 'start': '2023-06-12T01:00:00+02:00', - 'summary': 'Back lawn schedule 2', - }), - ]), - }), - 'calendar.test_mower_2': dict({ - 'events': list([ - dict({ - 'end': '2023-06-05T02:49:00+02:00', - 'start': '2023-06-05T02:00:00+02:00', - 'summary': 'Schedule 1', - }), - ]), - }), - }) -# --- diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index ee9b7510770..d8cd748c793 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -5,6 +5,22 @@ 'battery_percent': 100, }), 'calendar': dict({ + 'events': list([ + dict({ + 'end': '2024-03-02T00:00:00+00:00', + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,WE,FR', + 'start': '2024-03-01T19:00:00+00:00', + 'uid': '1140_300_MO,WE,FR', + 'work_area_id': None, + }), + dict({ + 'end': '2024-03-02T08:00:00+00:00', + 'rrule': 'FREQ=WEEKLY;BYDAY=TU,TH,SA', + 'start': '2024-03-02T00:00:00+00:00', + 'uid': '0_480_TU,TH,SA', + 'work_area_id': None, + }), + ]), 'tasks': list([ dict({ 'duration': 300, @@ -16,7 +32,7 @@ 'thursday': False, 'tuesday': False, 'wednesday': True, - 'workAreaId': 123456, + 'work_area_id': None, }), dict({ 'duration': 480, @@ -28,36 +44,11 @@ 'thursday': True, 'tuesday': True, 'wednesday': False, - 'workAreaId': 123456, - }), - dict({ - 'duration': 480, - 'friday': False, - 'monday': False, - 'saturday': True, - 'start': 0, - 'sunday': False, - 'thursday': True, - 'tuesday': True, - 'wednesday': False, - 'workAreaId': 654321, - }), - dict({ - 'duration': 480, - 'friday': False, - 'monday': True, - 'saturday': True, - 'start': 60, - 'sunday': False, - 'thursday': True, - 'tuesday': True, - 'wednesday': False, - 'workAreaId': 654321, + 'work_area_id': None, }), ]), }), 'capabilities': dict({ - 'can_confirm_error': True, 'headlights': True, 'position': True, 'stay_out_zones': True, @@ -65,36 +56,33 @@ }), 'metadata': dict({ 'connected': True, - 'status_dateteime': '2023-06-05T00:00:00+00:00', + 'status_dateteime': '2023-10-18T22:58:52.683000+00:00', }), 'mower': dict({ - 'activity': 'parked_in_cs', + 'activity': 'PARKED_IN_CS', 'error_code': 0, 'error_datetime': None, 'error_datetime_naive': None, 'error_key': None, - 'error_timestamp': 0, - 'inactive_reason': 'none', + 'inactive_reason': 'NONE', 'is_error_confirmable': False, - 'mode': 'main_area', - 'state': 'restricted', + 'mode': 'MAIN_AREA', + 'state': 'RESTRICTED', 'work_area_id': 123456, - 'work_area_name': 'Front lawn', }), 'planner': dict({ - 'next_start': 1685991600000, - 'next_start_datetime': '2023-06-05T19:00:00+02:00', + 'next_start_datetime': '2023-06-05T19:00:00+00:00', 'next_start_datetime_naive': '2023-06-05T19:00:00', 'override': dict({ - 'action': 'not_active', + 'action': 'NOT_ACTIVE', }), - 'restricted_reason': 'week_schedule', + 'restricted_reason': 'WEEK_SCHEDULE', }), 'positions': '**REDACTED**', 'settings': dict({ 'cutting_height': 4, 'headlight': dict({ - 'mode': 'evening_only', + 'mode': 'EVENING_ONLY', }), }), 'statistics': dict({ @@ -121,45 +109,22 @@ }), }), 'system': dict({ - 'model': 'HUSQVARNA AUTOMOWER® 450XH', + 'model': '450XH-TEST', 'name': 'Test Mower 1', 'serial_number': 123, }), - 'work_area_dict': dict({ - '0': 'my_lawn', - '123456': 'Front lawn', - '654321': 'Back lawn', - }), - 'work_area_names': list([ - 'Front lawn', - 'Back lawn', - 'my_lawn', - 'no_work_area_active', - ]), 'work_areas': dict({ '0': dict({ 'cutting_height': 50, - 'enabled': False, - 'last_time_completed': '2024-08-12T05:07:49+02:00', - 'last_time_completed_naive': '2024-08-12T05:07:49', 'name': 'my_lawn', - 'progress': 20, }), '123456': dict({ 'cutting_height': 50, - 'enabled': True, - 'last_time_completed': '2024-08-12T07:54:29+02:00', - 'last_time_completed_naive': '2024-08-12T07:54:29', 'name': 'Front lawn', - 'progress': 40, }), '654321': dict({ 'cutting_height': 25, - 'enabled': True, - 'last_time_completed': None, - 'last_time_completed_naive': None, 'name': 'Back lawn', - 'progress': None, }), }), }) @@ -170,7 +135,7 @@ 'auth_implementation': 'husqvarna_automower', 'token': dict({ 'access_token': '**REDACTED**', - 'expires_at': 1685919600.0, + 'expires_at': 1709208000.0, 'expires_in': 86399, 'provider': 'husqvarna', 'refresh_token': '**REDACTED**', @@ -180,8 +145,6 @@ }), }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'husqvarna_automower', 'entry_id': 'automower_test', 'minor_version': 1, diff --git a/tests/components/husqvarna_automower/snapshots/test_init.ambr b/tests/components/husqvarna_automower/snapshots/test_init.ambr index e79bd1f8145..efe1eb8bd51 100644 --- a/tests/components/husqvarna_automower/snapshots/test_init.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_init.ambr @@ -20,8 +20,7 @@ 'labels': set({ }), 'manufacturer': 'Husqvarna', - 'model': 'AUTOMOWER® 450XH', - 'model_id': None, + 'model': '450XH-TEST', 'name': 'Test Mower 1', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/husqvarna_automower/snapshots/test_number.ambr b/tests/components/husqvarna_automower/snapshots/test_number.ambr index b0ccce5800a..de8b397f01c 100644 --- a/tests/components/husqvarna_automower/snapshots/test_number.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_number.ambr @@ -32,7 +32,7 @@ 'platform': 'husqvarna_automower', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'work_area_cutting_height_work_area', + 'translation_key': 'work_area_cutting_height', 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_654321_cutting_height_work_area', 'unit_of_measurement': '%', }) @@ -143,7 +143,7 @@ 'platform': 'husqvarna_automower', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'work_area_cutting_height_work_area', + 'translation_key': 'work_area_cutting_height', 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_123456_cutting_height_work_area', 'unit_of_measurement': '%', }) @@ -195,11 +195,11 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'My lawn cutting height', + 'original_name': 'My lawn cutting height ', 'platform': 'husqvarna_automower', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'my_lawn_cutting_height_work_area', + 'translation_key': 'my_lawn_cutting_height', 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_0_cutting_height_work_area', 'unit_of_measurement': '%', }) @@ -207,7 +207,7 @@ # name: test_number_snapshot[number.test_mower_1_my_lawn_cutting_height-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 1 My lawn cutting height', + 'friendly_name': 'Test Mower 1 My lawn cutting height ', 'max': 100.0, 'min': 0.0, 'mode': , diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index d57a829a997..0b0d76620d3 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -162,9 +162,6 @@ 'docking_sensor_defect', 'electronic_problem', 'empty_battery', - 'error', - 'error_at_power_up', - 'fatal_error', 'folding_cutting_deck_sensor_defect', 'folding_sensor_activated', 'geofence_problem', @@ -343,9 +340,6 @@ 'docking_sensor_defect', 'electronic_problem', 'empty_battery', - 'error', - 'error_at_power_up', - 'fatal_error', 'folding_cutting_deck_sensor_defect', 'folding_sensor_activated', 'geofence_problem', @@ -448,103 +442,6 @@ 'state': 'no_error', }) # --- -# name: test_sensor_snapshot[sensor.test_mower_1_front_lawn_last_time_completed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_1_front_lawn_last_time_completed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front lawn last time completed', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'work_area_last_time_completed', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_123456_last_time_completed', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_1_front_lawn_last_time_completed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Test Mower 1 Front lawn last time completed', - }), - 'context': , - 'entity_id': 'sensor.test_mower_1_front_lawn_last_time_completed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-08-12T05:54:29+00:00', - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_1_front_lawn_progress-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_1_front_lawn_progress', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Front lawn progress', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'work_area_progress', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_123456_progress', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_1_front_lawn_progress-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 1 Front lawn progress', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_mower_1_front_lawn_progress', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40', - }) -# --- # name: test_sensor_snapshot[sensor.test_mower_1_mode-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -552,11 +449,11 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - , - , - , - , - , + 'main_area', + 'demo', + 'secondary_area', + 'home', + 'unknown', ]), }), 'config_entry_id': , @@ -592,11 +489,11 @@ 'device_class': 'enum', 'friendly_name': 'Test Mower 1 Mode', 'options': list([ - , - , - , - , - , + 'main_area', + 'demo', + 'secondary_area', + 'home', + 'unknown', ]), }), 'context': , @@ -607,103 +504,6 @@ 'state': 'main_area', }) # --- -# name: test_sensor_snapshot[sensor.test_mower_1_my_lawn_last_time_completed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_1_my_lawn_last_time_completed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'My lawn last time completed', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'my_lawn_last_time_completed', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_0_last_time_completed', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_1_my_lawn_last_time_completed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Test Mower 1 My lawn last time completed', - }), - 'context': , - 'entity_id': 'sensor.test_mower_1_my_lawn_last_time_completed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-08-12T03:07:49+00:00', - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_1_my_lawn_progress-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_1_my_lawn_progress', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'My lawn progress', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'my_lawn_progress', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_0_progress', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_1_my_lawn_progress-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 1 My lawn progress', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_mower_1_my_lawn_progress', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- # name: test_sensor_snapshot[sensor.test_mower_1_next_start-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -748,7 +548,65 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2023-06-05T17:00:00+00:00', + 'state': '2023-06-05T19:00:00+00:00', + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Front lawn', + 'Back lawn', + 'my_lawn', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'work_area', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_work_area', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Mower 1 None', + 'options': list([ + 'Front lawn', + 'Back lawn', + 'my_lawn', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Front lawn', }) # --- # name: test_sensor_snapshot[sensor.test_mower_1_number_of_charging_cycles-entry] @@ -856,16 +714,16 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - , - , - , - , - , - , - , - , - , - , + 'all_work_areas_completed', + 'daily_limit', + 'external', + 'fota', + 'frost', + 'none', + 'not_applicable', + 'park_override', + 'sensor', + 'week_schedule', ]), }), 'config_entry_id': , @@ -901,16 +759,16 @@ 'device_class': 'enum', 'friendly_name': 'Test Mower 1 Restricted reason', 'options': list([ - , - , - , - , - , - , - , - , - , - , + 'all_work_areas_completed', + 'daily_limit', + 'external', + 'fota', + 'frost', + 'none', + 'not_applicable', + 'park_override', + 'sensor', + 'week_schedule', ]), }), 'context': , @@ -1201,7 +1059,6 @@ 'Front lawn', 'Back lawn', 'my_lawn', - 'no_work_area_active', ]), }), 'config_entry_id': , @@ -1240,13 +1097,7 @@ 'Front lawn', 'Back lawn', 'my_lawn', - 'no_work_area_active', ]), - 'work_area_id_assignment': dict({ - 0: 'my_lawn', - 123456: 'Front lawn', - 654321: 'Back lawn', - }), }), 'context': , 'entity_id': 'sensor.test_mower_1_work_area', @@ -1256,579 +1107,3 @@ 'state': 'Front lawn', }) # --- -# name: test_sensor_snapshot[sensor.test_mower_2_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_2_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1234_battery_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_2_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Test Mower 2 Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_mower_2_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50', - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_2_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'no_error', - 'alarm_mower_in_motion', - 'alarm_mower_lifted', - 'alarm_mower_stopped', - 'alarm_mower_switched_off', - 'alarm_mower_tilted', - 'alarm_outside_geofence', - 'angular_sensor_problem', - 'battery_problem', - 'battery_problem', - 'battery_restriction_due_to_ambient_temperature', - 'can_error', - 'charging_current_too_high', - 'charging_station_blocked', - 'charging_system_problem', - 'charging_system_problem', - 'collision_sensor_defect', - 'collision_sensor_error', - 'collision_sensor_problem_front', - 'collision_sensor_problem_rear', - 'com_board_not_available', - 'communication_circuit_board_sw_must_be_updated', - 'complex_working_area', - 'connection_changed', - 'connection_not_changed', - 'connectivity_problem', - 'connectivity_problem', - 'connectivity_problem', - 'connectivity_problem', - 'connectivity_problem', - 'connectivity_problem', - 'connectivity_settings_restored', - 'cutting_drive_motor_1_defect', - 'cutting_drive_motor_2_defect', - 'cutting_drive_motor_3_defect', - 'cutting_height_blocked', - 'cutting_height_problem', - 'cutting_height_problem_curr', - 'cutting_height_problem_dir', - 'cutting_height_problem_drive', - 'cutting_motor_problem', - 'cutting_stopped_slope_too_steep', - 'cutting_system_blocked', - 'cutting_system_blocked', - 'cutting_system_imbalance_warning', - 'cutting_system_major_imbalance', - 'destination_not_reachable', - 'difficult_finding_home', - 'docking_sensor_defect', - 'electronic_problem', - 'empty_battery', - 'error', - 'error_at_power_up', - 'fatal_error', - 'folding_cutting_deck_sensor_defect', - 'folding_sensor_activated', - 'geofence_problem', - 'geofence_problem', - 'gps_navigation_problem', - 'guide_1_not_found', - 'guide_2_not_found', - 'guide_3_not_found', - 'guide_calibration_accomplished', - 'guide_calibration_failed', - 'high_charging_power_loss', - 'high_internal_power_loss', - 'high_internal_temperature', - 'internal_voltage_error', - 'invalid_battery_combination_invalid_combination_of_different_battery_types', - 'invalid_sub_device_combination', - 'invalid_system_configuration', - 'left_brush_motor_overloaded', - 'lift_sensor_defect', - 'lifted', - 'limited_cutting_height_range', - 'limited_cutting_height_range', - 'loop_sensor_defect', - 'loop_sensor_problem_front', - 'loop_sensor_problem_left', - 'loop_sensor_problem_rear', - 'loop_sensor_problem_right', - 'low_battery', - 'memory_circuit_problem', - 'mower_lifted', - 'mower_tilted', - 'no_accurate_position_from_satellites', - 'no_confirmed_position', - 'no_drive', - 'no_loop_signal', - 'no_power_in_charging_station', - 'no_response_from_charger', - 'outside_working_area', - 'poor_signal_quality', - 'reference_station_communication_problem', - 'right_brush_motor_overloaded', - 'safety_function_faulty', - 'settings_restored', - 'sim_card_locked', - 'sim_card_locked', - 'sim_card_locked', - 'sim_card_locked', - 'sim_card_not_found', - 'sim_card_requires_pin', - 'slipped_mower_has_slipped_situation_not_solved_with_moving_pattern', - 'slope_too_steep', - 'sms_could_not_be_sent', - 'stop_button_problem', - 'stuck_in_charging_station', - 'switch_cord_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'tilt_sensor_problem', - 'too_high_discharge_current', - 'too_high_internal_current', - 'trapped', - 'ultrasonic_problem', - 'ultrasonic_sensor_1_defect', - 'ultrasonic_sensor_2_defect', - 'ultrasonic_sensor_3_defect', - 'ultrasonic_sensor_4_defect', - 'unexpected_cutting_height_adj', - 'unexpected_error', - 'upside_down', - 'weak_gps_signal', - 'wheel_drive_problem_left', - 'wheel_drive_problem_rear_left', - 'wheel_drive_problem_rear_right', - 'wheel_drive_problem_right', - 'wheel_motor_blocked_left', - 'wheel_motor_blocked_rear_left', - 'wheel_motor_blocked_rear_right', - 'wheel_motor_blocked_right', - 'wheel_motor_overloaded_left', - 'wheel_motor_overloaded_rear_left', - 'wheel_motor_overloaded_rear_right', - 'wheel_motor_overloaded_right', - 'work_area_not_valid', - 'wrong_loop_signal', - 'wrong_pin_code', - 'zone_generator_problem', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_2_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Error', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'error', - 'unique_id': '1234_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_2_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Mower 2 Error', - 'options': list([ - 'no_error', - 'alarm_mower_in_motion', - 'alarm_mower_lifted', - 'alarm_mower_stopped', - 'alarm_mower_switched_off', - 'alarm_mower_tilted', - 'alarm_outside_geofence', - 'angular_sensor_problem', - 'battery_problem', - 'battery_problem', - 'battery_restriction_due_to_ambient_temperature', - 'can_error', - 'charging_current_too_high', - 'charging_station_blocked', - 'charging_system_problem', - 'charging_system_problem', - 'collision_sensor_defect', - 'collision_sensor_error', - 'collision_sensor_problem_front', - 'collision_sensor_problem_rear', - 'com_board_not_available', - 'communication_circuit_board_sw_must_be_updated', - 'complex_working_area', - 'connection_changed', - 'connection_not_changed', - 'connectivity_problem', - 'connectivity_problem', - 'connectivity_problem', - 'connectivity_problem', - 'connectivity_problem', - 'connectivity_problem', - 'connectivity_settings_restored', - 'cutting_drive_motor_1_defect', - 'cutting_drive_motor_2_defect', - 'cutting_drive_motor_3_defect', - 'cutting_height_blocked', - 'cutting_height_problem', - 'cutting_height_problem_curr', - 'cutting_height_problem_dir', - 'cutting_height_problem_drive', - 'cutting_motor_problem', - 'cutting_stopped_slope_too_steep', - 'cutting_system_blocked', - 'cutting_system_blocked', - 'cutting_system_imbalance_warning', - 'cutting_system_major_imbalance', - 'destination_not_reachable', - 'difficult_finding_home', - 'docking_sensor_defect', - 'electronic_problem', - 'empty_battery', - 'error', - 'error_at_power_up', - 'fatal_error', - 'folding_cutting_deck_sensor_defect', - 'folding_sensor_activated', - 'geofence_problem', - 'geofence_problem', - 'gps_navigation_problem', - 'guide_1_not_found', - 'guide_2_not_found', - 'guide_3_not_found', - 'guide_calibration_accomplished', - 'guide_calibration_failed', - 'high_charging_power_loss', - 'high_internal_power_loss', - 'high_internal_temperature', - 'internal_voltage_error', - 'invalid_battery_combination_invalid_combination_of_different_battery_types', - 'invalid_sub_device_combination', - 'invalid_system_configuration', - 'left_brush_motor_overloaded', - 'lift_sensor_defect', - 'lifted', - 'limited_cutting_height_range', - 'limited_cutting_height_range', - 'loop_sensor_defect', - 'loop_sensor_problem_front', - 'loop_sensor_problem_left', - 'loop_sensor_problem_rear', - 'loop_sensor_problem_right', - 'low_battery', - 'memory_circuit_problem', - 'mower_lifted', - 'mower_tilted', - 'no_accurate_position_from_satellites', - 'no_confirmed_position', - 'no_drive', - 'no_loop_signal', - 'no_power_in_charging_station', - 'no_response_from_charger', - 'outside_working_area', - 'poor_signal_quality', - 'reference_station_communication_problem', - 'right_brush_motor_overloaded', - 'safety_function_faulty', - 'settings_restored', - 'sim_card_locked', - 'sim_card_locked', - 'sim_card_locked', - 'sim_card_locked', - 'sim_card_not_found', - 'sim_card_requires_pin', - 'slipped_mower_has_slipped_situation_not_solved_with_moving_pattern', - 'slope_too_steep', - 'sms_could_not_be_sent', - 'stop_button_problem', - 'stuck_in_charging_station', - 'switch_cord_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'temporary_battery_problem', - 'tilt_sensor_problem', - 'too_high_discharge_current', - 'too_high_internal_current', - 'trapped', - 'ultrasonic_problem', - 'ultrasonic_sensor_1_defect', - 'ultrasonic_sensor_2_defect', - 'ultrasonic_sensor_3_defect', - 'ultrasonic_sensor_4_defect', - 'unexpected_cutting_height_adj', - 'unexpected_error', - 'upside_down', - 'weak_gps_signal', - 'wheel_drive_problem_left', - 'wheel_drive_problem_rear_left', - 'wheel_drive_problem_rear_right', - 'wheel_drive_problem_right', - 'wheel_motor_blocked_left', - 'wheel_motor_blocked_rear_left', - 'wheel_motor_blocked_rear_right', - 'wheel_motor_blocked_right', - 'wheel_motor_overloaded_left', - 'wheel_motor_overloaded_rear_left', - 'wheel_motor_overloaded_rear_right', - 'wheel_motor_overloaded_right', - 'work_area_not_valid', - 'wrong_loop_signal', - 'wrong_pin_code', - 'zone_generator_problem', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_mower_2_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'no_error', - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_2_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_2_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Mode', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '1234_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_2_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Mower 2 Mode', - 'options': list([ - , - , - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.test_mower_2_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'main_area', - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_2_next_start-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_2_next_start', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Next start', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'next_start_timestamp', - 'unique_id': '1234_next_start_timestamp', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_2_next_start-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Test Mower 2 Next start', - }), - 'context': , - 'entity_id': 'sensor.test_mower_2_next_start', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2023-06-05T17:00:00+00:00', - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_2_restricted_reason-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - , - , - , - , - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_2_restricted_reason', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restricted reason', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'restricted_reason', - 'unique_id': '1234_restricted_reason', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_2_restricted_reason-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Mower 2 Restricted reason', - 'options': list([ - , - , - , - , - , - , - , - , - , - , - ]), - }), - 'context': , - 'entity_id': 'sensor.test_mower_2_restricted_reason', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'week_schedule', - }) -# --- diff --git a/tests/components/husqvarna_automower/snapshots/test_switch.ambr b/tests/components/husqvarna_automower/snapshots/test_switch.ambr index 8f8f6b367c0..f52462496ff 100644 --- a/tests/components/husqvarna_automower/snapshots/test_switch.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_switch.ambr @@ -91,52 +91,6 @@ 'state': 'on', }) # --- -# name: test_switch_snapshot[switch.test_mower_1_back_lawn-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_mower_1_back_lawn', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Back lawn', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'work_area_work_area', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_654321_work_area', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_snapshot[switch.test_mower_1_back_lawn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 1 Back lawn', - }), - 'context': , - 'entity_id': 'switch.test_mower_1_back_lawn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_switch_snapshot[switch.test_mower_1_enable_schedule-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -183,141 +137,3 @@ 'state': 'on', }) # --- -# name: test_switch_snapshot[switch.test_mower_1_front_lawn-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_mower_1_front_lawn', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Front lawn', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'work_area_work_area', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_123456_work_area', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_snapshot[switch.test_mower_1_front_lawn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 1 Front lawn', - }), - 'context': , - 'entity_id': 'switch.test_mower_1_front_lawn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_snapshot[switch.test_mower_1_my_lawn-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_mower_1_my_lawn', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'My lawn', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'my_lawn_work_area', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_0_work_area', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_snapshot[switch.test_mower_1_my_lawn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 1 My lawn', - }), - 'context': , - 'entity_id': 'switch.test_mower_1_my_lawn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch_snapshot[switch.test_mower_2_enable_schedule-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_mower_2_enable_schedule', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Enable schedule', - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'enable_schedule', - 'unique_id': '1234_enable_schedule', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_snapshot[switch.test_mower_2_enable_schedule-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Mower 2 Enable schedule', - }), - 'context': , - 'entity_id': 'switch.test_mower_2_enable_schedule', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/husqvarna_automower/test_binary_sensor.py b/tests/components/husqvarna_automower/test_binary_sensor.py index 858dc03b93f..fceaeee2321 100644 --- a/tests/components/husqvarna_automower/test_binary_sensor.py +++ b/tests/components/husqvarna_automower/test_binary_sensor.py @@ -2,10 +2,12 @@ from unittest.mock import AsyncMock, patch -from aioautomower.model import MowerActivities, MowerAttributes +from aioautomower.model import MowerActivities +from aioautomower.utils import mower_list_to_dictionary_dataclass from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion +from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -14,7 +16,12 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_value_fixture, + snapshot_platform, +) async def test_binary_sensor_states( @@ -22,9 +29,11 @@ async def test_binary_sensor_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], ) -> None: """Test binary sensor states.""" + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) await setup_integration(hass, mock_config_entry) state = hass.states.get("binary_sensor.test_mower_1_charging") assert state is not None diff --git a/tests/components/husqvarna_automower/test_button.py b/tests/components/husqvarna_automower/test_button.py index 25fa64b531f..6cc465df74b 100644 --- a/tests/components/husqvarna_automower/test_button.py +++ b/tests/components/husqvarna_automower/test_button.py @@ -4,12 +4,13 @@ import datetime from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException -from aioautomower.model import MowerAttributes +from aioautomower.utils import mower_list_to_dictionary_dataclass from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.button import SERVICE_PRESS +from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import ( ATTR_ENTITY_ID, @@ -24,24 +25,32 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_value_fixture, + snapshot_platform, +) -@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5, tzinfo=datetime.UTC)) +@pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_button_states_and_commands( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], ) -> None: - """Test error confirm button command.""" + """Test button commands.""" entity_id = "button.test_mower_1_confirm_error" await setup_integration(hass, mock_config_entry) state = hass.states.get(entity_id) assert state.name == "Test Mower 1 Confirm error" assert state.state == STATE_UNAVAILABLE + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) values[TEST_MOWER_ID].mower.is_error_confirmable = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) @@ -68,7 +77,7 @@ async def test_button_states_and_commands( mocked_method.assert_called_once_with(TEST_MOWER_ID) await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == "2023-06-05T00:16:00+00:00" + assert state.state == "2024-02-29T11:16:00+00:00" getattr(mock_automower_client.commands, "error_confirm").side_effect = ApiException( "Test error" ) @@ -84,46 +93,6 @@ async def test_button_states_and_commands( ) -@pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) -async def test_sync_clock( - hass: HomeAssistant, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], -) -> None: - """Test sync clock button command.""" - entity_id = "button.test_mower_1_sync_clock" - await setup_integration(hass, mock_config_entry) - state = hass.states.get(entity_id) - assert state.name == "Test Mower 1 Sync clock" - - mock_automower_client.get_status.return_value = values - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mocked_method = mock_automower_client.commands.set_datetime - mocked_method.assert_called_once_with(TEST_MOWER_ID) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == "2024-02-29T11:00:00+00:00" - mock_automower_client.commands.set_datetime.side_effect = ApiException("Test error") - with pytest.raises( - HomeAssistantError, - match="Failed to send command: Test error", - ): - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_button_snapshot( hass: HomeAssistant, diff --git a/tests/components/husqvarna_automower/test_calendar.py b/tests/components/husqvarna_automower/test_calendar.py deleted file mode 100644 index 8138b8c139b..00000000000 --- a/tests/components/husqvarna_automower/test_calendar.py +++ /dev/null @@ -1,154 +0,0 @@ -"""Tests for calendar platform.""" - -from collections.abc import Awaitable, Callable -import datetime -from http import HTTPStatus -from typing import Any -from unittest.mock import AsyncMock -import urllib -import zoneinfo - -from aioautomower.utils import mower_list_to_dictionary_dataclass -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.calendar import ( - DOMAIN as CALENDAR_DOMAIN, - EVENT_END_DATETIME, - EVENT_START_DATETIME, - SERVICE_GET_EVENTS, -) -from homeassistant.components.husqvarna_automower.const import DOMAIN -from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, -) -from tests.typing import ClientSessionGenerator - -TEST_ENTITY = "calendar.test_mower_1" -type GetEventsFn = Callable[[str, str], Awaitable[dict[str, Any]]] - - -@pytest.fixture(name="get_events") -def get_events_fixture( - hass_client: ClientSessionGenerator, -) -> GetEventsFn: - """Fetch calendar events from the HTTP API.""" - - async def _fetch(start: str, end: str) -> list[dict[str, Any]]: - client = await hass_client() - response = await client.get( - f"/api/calendars/{TEST_ENTITY}?start={urllib.parse.quote(start)}&end={urllib.parse.quote(end)}" - ) - assert response.status == HTTPStatus.OK - results = await response.json() - return [{k: event[k] for k in ("summary", "start", "end")} for event in results] - - return _fetch - - -@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5, 12)) -async def test_calendar_state_off( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """State test of the calendar.""" - await setup_integration(hass, mock_config_entry) - state = hass.states.get("calendar.test_mower_1") - assert state is not None - assert state.state == "off" - - -@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5, 19)) -async def test_calendar_state_on( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """State test of the calendar.""" - await setup_integration(hass, mock_config_entry) - state = hass.states.get("calendar.test_mower_1") - assert state is not None - assert state.state == "on" - - -@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5)) -async def test_empty_calendar( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - get_events: GetEventsFn, - mower_time_zone: zoneinfo.ZoneInfo, -) -> None: - """State if there is no schedule set.""" - await setup_integration(hass, mock_config_entry) - json_values = load_json_value_fixture("mower.json", DOMAIN) - json_values["data"][0]["attributes"]["calendar"]["tasks"] = [] - values = mower_list_to_dictionary_dataclass( - json_values, - mower_time_zone, - ) - mock_automower_client.get_status.return_value = values - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get("calendar.test_mower_1") - assert state is not None - assert state.state == "off" - events = await get_events("2023-06-05T00:00:00", "2023-06-12T00:00:00") - assert events == [] - - -@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5)) -@pytest.mark.parametrize( - ( - "start_date", - "end_date", - ), - [ - ( - datetime.datetime(2023, 6, 5, tzinfo=datetime.UTC), - datetime.datetime(2023, 6, 12, tzinfo=datetime.UTC), - ), - ], -) -async def test_calendar_snapshot( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - start_date: datetime, - end_date: datetime, -) -> None: - """Snapshot test of the calendar entity.""" - await setup_integration(hass, mock_config_entry) - events = await hass.services.async_call( - CALENDAR_DOMAIN, - SERVICE_GET_EVENTS, - { - ATTR_ENTITY_ID: ["calendar.test_mower_1", "calendar.test_mower_2"], - EVENT_START_DATETIME: start_date, - EVENT_END_DATETIME: end_date, - }, - blocking=True, - return_response=True, - ) - - assert events == snapshot diff --git a/tests/components/husqvarna_automower/test_diagnostics.py b/tests/components/husqvarna_automower/test_diagnostics.py index 2b47bff25a4..eeb6b46e6c4 100644 --- a/tests/components/husqvarna_automower/test_diagnostics.py +++ b/tests/components/husqvarna_automower/test_diagnostics.py @@ -2,11 +2,9 @@ import datetime from unittest.mock import AsyncMock -import zoneinfo import pytest from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.core import HomeAssistant @@ -22,9 +20,7 @@ from tests.components.diagnostics import ( from tests.typing import ClientSessionGenerator -@pytest.mark.freeze_time( - datetime.datetime(2023, 6, 5, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")) -) +@pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -40,12 +36,10 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot -@pytest.mark.freeze_time( - datetime.datetime(2023, 6, 5, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")) -) +@pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) async def test_device_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -54,7 +48,7 @@ async def test_device_diagnostics( mock_config_entry: MockConfigEntry, device_registry: dr.DeviceRegistry, ) -> None: - """Test device diagnostics platform.""" + """Test select platform.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/husqvarna_automower/test_init.py b/tests/components/husqvarna_automower/test_init.py index ae688571d2c..84fe1b9e891 100644 --- a/tests/components/husqvarna_automower/test_init.py +++ b/tests/components/husqvarna_automower/test_init.py @@ -1,28 +1,23 @@ """Tests for init module.""" -from asyncio import Event -from datetime import datetime +from datetime import timedelta import http import time -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock from aioautomower.exceptions import ( ApiException, AuthException, HusqvarnaWSServerHandshakeError, - TimeoutException, ) -from aioautomower.model import MowerAttributes, WorkArea from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.husqvarna_automower.const import DOMAIN, OAUTH2_TOKEN -from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.util import dt as dt_util +from homeassistant.helpers import device_registry as dr from . import setup_integration from .const import TEST_MOWER_ID @@ -30,10 +25,6 @@ from .const import TEST_MOWER_ID from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker -ADDITIONAL_NUMBER_ENTITIES = 1 -ADDITIONAL_SENSOR_ENTITIES = 2 -ADDITIONAL_SWITCH_ENTITIES = 1 - async def test_load_unload_entry( hass: HomeAssistant, @@ -129,77 +120,28 @@ async def test_update_failed( assert entry.state is entry_state -@patch( - "homeassistant.components.husqvarna_automower.coordinator.DEFAULT_RECONNECT_TIME", 0 -) -@pytest.mark.parametrize( - ("method_path", "exception", "error_msg"), - [ - ( - ["auth", "websocket_connect"], - HusqvarnaWSServerHandshakeError, - "Failed to connect to websocket.", - ), - ( - ["start_listening"], - TimeoutException, - "Failed to listen to websocket.", - ), - ], -) async def test_websocket_not_available( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, caplog: pytest.LogCaptureFixture, freezer: FrozenDateTimeFactory, - method_path: list[str], - exception: type[Exception], - error_msg: str, ) -> None: - """Test trying to reload the websocket.""" - calls = [] - mock_called = Event() - mock_stall = Event() - - async def mock_function(): - mock_called.set() - await mock_stall.wait() - # Raise the first time the method is awaited - if not calls: - calls.append(None) - raise exception("Boom") - if mock_side_effect: - await mock_side_effect() - - # Find the method to mock - mock = mock_automower_client - for itm in method_path: - mock = getattr(mock, itm) - mock_side_effect = mock.side_effect - mock.side_effect = mock_function - - # Setup integration and verify log error message + """Test trying reload the websocket.""" + mock_automower_client.start_listening.side_effect = HusqvarnaWSServerHandshakeError( + "Boom" + ) await setup_integration(hass, mock_config_entry) - await mock_called.wait() - mock_called.clear() - # Allow the exception to be raised - mock_stall.set() - assert mock.call_count == 1 + assert "Failed to connect to websocket. Trying to reconnect: Boom" in caplog.text + assert mock_automower_client.auth.websocket_connect.call_count == 1 + assert mock_automower_client.start_listening.call_count == 1 + assert mock_config_entry.state is ConfigEntryState.LOADED + freezer.tick(timedelta(seconds=2)) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert f"{error_msg} Trying to reconnect: Boom" in caplog.text - - # Simulate a successful connection - caplog.clear() - await mock_called.wait() - mock_called.clear() - await hass.async_block_till_done() - assert mock.call_count == 2 - assert "Trying to reconnect: Boom" not in caplog.text - - # Simulate hass shutting down - await hass.async_stop() - assert mock.call_count == 2 + assert mock_automower_client.auth.websocket_connect.call_count == 2 + assert mock_automower_client.start_listening.call_count == 2 + assert mock_config_entry.state is ConfigEntryState.LOADED async def test_device_info( @@ -218,106 +160,3 @@ async def test_device_info( identifiers={(DOMAIN, TEST_MOWER_ID)}, ) assert reg_device == snapshot - - -async def test_coordinator_automatic_registry_cleanup( - hass: HomeAssistant, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - values: dict[str, MowerAttributes], -) -> None: - """Test automatic registry cleanup.""" - await setup_integration(hass, mock_config_entry) - entry = hass.config_entries.async_entries(DOMAIN)[0] - await hass.async_block_till_done() - - current_entites = len( - er.async_entries_for_config_entry(entity_registry, entry.entry_id) - ) - current_devices = len( - dr.async_entries_for_config_entry(device_registry, entry.entry_id) - ) - - values.pop(TEST_MOWER_ID) - mock_automower_client.get_status.return_value = values - await hass.config_entries.async_reload(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert ( - len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) - == current_entites - 37 - ) - assert ( - len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) - == current_devices - 1 - ) - - -async def test_add_and_remove_work_area( - hass: HomeAssistant, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - entity_registry: er.EntityRegistry, - values: dict[str, MowerAttributes], -) -> None: - """Test adding a work area in runtime.""" - await setup_integration(hass, mock_config_entry) - entry = hass.config_entries.async_entries(DOMAIN)[0] - current_entites_start = len( - er.async_entries_for_config_entry(entity_registry, entry.entry_id) - ) - values[TEST_MOWER_ID].work_area_names.append("new work area") - values[TEST_MOWER_ID].work_area_dict.update({1: "new work area"}) - values[TEST_MOWER_ID].work_areas.update( - { - 1: WorkArea( - name="new work area", - cutting_height=12, - enabled=True, - progress=12, - last_time_completed=datetime( - 2024, 10, 1, 11, 11, 0, tzinfo=dt_util.get_default_time_zone() - ), - ) - } - ) - mock_automower_client.get_status.return_value = values - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - current_entites_after_addition = len( - er.async_entries_for_config_entry(entity_registry, entry.entry_id) - ) - assert ( - current_entites_after_addition - == current_entites_start - + ADDITIONAL_NUMBER_ENTITIES - + ADDITIONAL_SENSOR_ENTITIES - + ADDITIONAL_SWITCH_ENTITIES - ) - - values[TEST_MOWER_ID].work_area_names.remove("new work area") - del values[TEST_MOWER_ID].work_area_dict[1] - del values[TEST_MOWER_ID].work_areas[1] - values[TEST_MOWER_ID].work_area_names.remove("Front lawn") - del values[TEST_MOWER_ID].work_area_dict[123456] - del values[TEST_MOWER_ID].work_areas[123456] - del values[TEST_MOWER_ID].calendar.tasks[:2] - values[TEST_MOWER_ID].mower.work_area_id = 654321 - mock_automower_client.get_status.return_value = values - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - current_entites_after_deletion = len( - er.async_entries_for_config_entry(entity_registry, entry.entry_id) - ) - assert ( - current_entites_after_deletion - == current_entites_start - - ADDITIONAL_SWITCH_ENTITIES - - ADDITIONAL_NUMBER_ENTITIES - - ADDITIONAL_SENSOR_ENTITIES - ) diff --git a/tests/components/husqvarna_automower/test_lawn_mower.py b/tests/components/husqvarna_automower/test_lawn_mower.py index 3aca509e865..5d5cacfc6bf 100644 --- a/tests/components/husqvarna_automower/test_lawn_mower.py +++ b/tests/components/husqvarna_automower/test_lawn_mower.py @@ -4,7 +4,7 @@ from datetime import timedelta from unittest.mock import AsyncMock from aioautomower.exceptions import ApiException -from aioautomower.model import MowerActivities, MowerAttributes, MowerStates +from aioautomower.utils import mower_list_to_dictionary_dataclass from freezegun.api import FrozenDateTimeFactory import pytest from voluptuous.error import MultipleInvalid @@ -13,12 +13,16 @@ from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.components.lawn_mower import LawnMowerActivity from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_value_fixture, +) async def test_lawn_mower_states( @@ -26,23 +30,20 @@ async def test_lawn_mower_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], ) -> None: """Test lawn_mower state.""" + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) await setup_integration(hass, mock_config_entry) state = hass.states.get("lawn_mower.test_mower_1") assert state is not None assert state.state == LawnMowerActivity.DOCKED for activity, state, expected_state in ( - (MowerActivities.UNKNOWN, MowerStates.PAUSED, LawnMowerActivity.PAUSED), - (MowerActivities.MOWING, MowerStates.NOT_APPLICABLE, LawnMowerActivity.MOWING), - (MowerActivities.NOT_APPLICABLE, MowerStates.ERROR, LawnMowerActivity.ERROR), - ( - MowerActivities.GOING_HOME, - MowerStates.IN_OPERATION, - LawnMowerActivity.RETURNING, - ), + ("UNKNOWN", "PAUSED", LawnMowerActivity.PAUSED), + ("MOWING", "NOT_APPLICABLE", LawnMowerActivity.MOWING), + ("NOT_APPLICABLE", "ERROR", LawnMowerActivity.ERROR), ): values[TEST_MOWER_ID].mower.activity = activity values[TEST_MOWER_ID].mower.state = state @@ -121,7 +122,7 @@ async def test_lawn_mower_commands( async def test_lawn_mower_service_commands( hass: HomeAssistant, aioautomower_command: str, - extra_data: timedelta, + extra_data: int | None, service: str, service_data: dict[str, int] | None, mock_automower_client: AsyncMock, @@ -157,61 +158,7 @@ async def test_lawn_mower_service_commands( @pytest.mark.parametrize( - ("aioautomower_command", "extra_data1", "extra_data2", "service", "service_data"), - [ - ( - "start_in_workarea", - 123456, - timedelta(days=40), - "override_schedule_work_area", - { - "work_area_id": 123456, - "duration": {"days": 40}, - }, - ), - ], -) -async def test_lawn_mower_override_work_area_command( - hass: HomeAssistant, - aioautomower_command: str, - extra_data1: int, - extra_data2: timedelta, - service: str, - service_data: dict[str, int] | None, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test lawn_mower work area override commands.""" - await setup_integration(hass, mock_config_entry) - mocked_method = AsyncMock() - setattr(mock_automower_client.commands, aioautomower_command, mocked_method) - await hass.services.async_call( - domain=DOMAIN, - service=service, - target={"entity_id": "lawn_mower.test_mower_1"}, - service_data=service_data, - blocking=True, - ) - mocked_method.assert_called_once_with(TEST_MOWER_ID, extra_data1, extra_data2) - - getattr( - mock_automower_client.commands, aioautomower_command - ).side_effect = ApiException("Test error") - with pytest.raises( - HomeAssistantError, - match="Failed to send command: Test error", - ): - await hass.services.async_call( - domain=DOMAIN, - service=service, - target={"entity_id": "lawn_mower.test_mower_1"}, - service_data=service_data, - blocking=True, - ) - - -@pytest.mark.parametrize( - ("service", "service_data", "mower_support_wa", "exception"), + ("service", "service_data"), [ ( "override_schedule", @@ -219,26 +166,6 @@ async def test_lawn_mower_override_work_area_command( "duration": {"days": 1, "hours": 12, "minutes": 30}, "override_mode": "fly_to_moon", }, - False, - MultipleInvalid, - ), - ( - "override_schedule_work_area", - { - "work_area_id": 123456, - "duration": {"days": 40}, - }, - False, - ServiceValidationError, - ), - ( - "override_schedule_work_area", - { - "work_area_id": 12345, - "duration": {"days": 40}, - }, - True, - ServiceValidationError, ), ], ) @@ -246,21 +173,12 @@ async def test_lawn_mower_wrong_service_commands( hass: HomeAssistant, service: str, service_data: dict[str, int] | None, - mower_support_wa: bool, - exception, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], ) -> None: """Test lawn_mower commands.""" await setup_integration(hass, mock_config_entry) - values[TEST_MOWER_ID].capabilities.work_areas = mower_support_wa - mock_automower_client.get_status.return_value = values - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - with pytest.raises(exception): + with pytest.raises(MultipleInvalid): await hass.services.async_call( domain=DOMAIN, service=service, diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py index e1f232e7b5c..0547d6a9b2e 100644 --- a/tests/components/husqvarna_automower/test_number.py +++ b/tests/components/husqvarna_automower/test_number.py @@ -1,15 +1,13 @@ """Tests for number platform.""" -from datetime import timedelta from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException -from aioautomower.model import MowerAttributes -from freezegun.api import FrozenDateTimeFactory +from aioautomower.utils import mower_list_to_dictionary_dataclass import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import EXECUTION_TIME_DELAY +from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -18,7 +16,7 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import MockConfigEntry, load_json_value_fixture, snapshot_platform @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -43,7 +41,7 @@ async def test_number_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Failed to send command: Test error", + match="Command couldn't be sent to the command queue: Test error", ): await hass.services.async_call( domain="number", @@ -59,26 +57,26 @@ async def test_number_workarea_commands( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], ) -> None: """Test number commands.""" entity_id = "number.test_mower_1_front_lawn_cutting_height" await setup_integration(hass, mock_config_entry) + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) values[TEST_MOWER_ID].work_areas[123456].cutting_height = 75 mock_automower_client.get_status.return_value = values mocked_method = AsyncMock() - setattr(mock_automower_client.commands, "workarea_settings", mocked_method) + setattr( + mock_automower_client.commands, "set_cutting_height_workarea", mocked_method + ) await hass.services.async_call( domain="number", service="set_value", target={"entity_id": entity_id}, service_data={"value": "75"}, - blocking=False, + blocking=True, ) - freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) - async_fire_time_changed(hass) - await hass.async_block_till_done() mocked_method.assert_called_once_with(TEST_MOWER_ID, 75, 123456) state = hass.states.get(entity_id) assert state.state is not None @@ -87,7 +85,7 @@ async def test_number_workarea_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Failed to send command: Test error", + match="Command couldn't be sent to the command queue: Test error", ): await hass.services.async_call( domain="number", @@ -99,6 +97,31 @@ async def test_number_workarea_commands( assert len(mocked_method.mock_calls) == 2 +async def test_workarea_deleted( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test if work area is deleted after removed.""" + + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) + await setup_integration(hass, mock_config_entry) + current_entries = len( + er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) + ) + + del values[TEST_MOWER_ID].work_areas[123456] + mock_automower_client.get_status.return_value = values + await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert len( + er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) + ) == (current_entries - 1) + + @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_number_snapshot( hass: HomeAssistant, diff --git a/tests/components/husqvarna_automower/test_select.py b/tests/components/husqvarna_automower/test_select.py index 18d1b0ed21f..2728bb5e672 100644 --- a/tests/components/husqvarna_automower/test_select.py +++ b/tests/components/husqvarna_automower/test_select.py @@ -3,10 +3,12 @@ from unittest.mock import AsyncMock from aioautomower.exceptions import ApiException -from aioautomower.model import HeadlightModes, MowerAttributes +from aioautomower.model import HeadlightModes +from aioautomower.utils import mower_list_to_dictionary_dataclass from freezegun.api import FrozenDateTimeFactory import pytest +from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -14,7 +16,11 @@ from homeassistant.exceptions import HomeAssistantError from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_value_fixture, +) async def test_select_states( @@ -22,9 +28,11 @@ async def test_select_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], ) -> None: """Test states of headlight mode select.""" + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) await setup_integration(hass, mock_config_entry) state = hass.states.get("select.test_mower_1_headlight_mode") assert state is not None @@ -80,7 +88,7 @@ async def test_select_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Failed to send command: Test error", + match="Command couldn't be sent to the command queue: Test error", ): await hass.services.async_call( domain="select", diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 06fcc30e40c..8f30a3dcb04 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -1,14 +1,14 @@ """Tests for sensor platform.""" -import datetime from unittest.mock import AsyncMock, patch -import zoneinfo -from aioautomower.model import MowerAttributes, MowerModes, MowerStates +from aioautomower.model import MowerModes +from aioautomower.utils import mower_list_to_dictionary_dataclass from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant @@ -17,7 +17,12 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_value_fixture, + snapshot_platform, +) async def test_sensor_unknown_states( @@ -25,9 +30,11 @@ async def test_sensor_unknown_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], ) -> None: """Test a sensor which returns unknown.""" + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) await setup_integration(hass, mock_config_entry) state = hass.states.get("sensor.test_mower_1_mode") assert state is not None @@ -56,22 +63,21 @@ async def test_cutting_blade_usage_time_sensor( assert state.state == "0.034" -@pytest.mark.freeze_time( - datetime.datetime(2023, 6, 5, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")) -) async def test_next_start_sensor( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], ) -> None: """Test if this sensor is only added, if data is available.""" await setup_integration(hass, mock_config_entry) state = hass.states.get("sensor.test_mower_1_next_start") assert state is not None - assert state.state == "2023-06-05T17:00:00+00:00" + assert state.state == "2023-06-05T19:00:00+00:00" + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) values[TEST_MOWER_ID].planner.next_start_datetime = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) @@ -81,36 +87,6 @@ async def test_next_start_sensor( assert state.state == STATE_UNKNOWN -async def test_work_area_sensor( - hass: HomeAssistant, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], -) -> None: - """Test the work area sensor.""" - await setup_integration(hass, mock_config_entry) - state = hass.states.get("sensor.test_mower_1_work_area") - assert state is not None - assert state.state == "Front lawn" - - values[TEST_MOWER_ID].mower.work_area_id = None - mock_automower_client.get_status.return_value = values - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get("sensor.test_mower_1_work_area") - assert state.state == "no_work_area_active" - - values[TEST_MOWER_ID].mower.work_area_id = 0 - mock_automower_client.get_status.return_value = values - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get("sensor.test_mower_1_work_area") - assert state.state == "my_lawn" - - @pytest.mark.parametrize( ("sensor_to_test"), [ @@ -129,10 +105,13 @@ async def test_statistics_not_available( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, sensor_to_test: str, - values: dict[str, MowerAttributes], ) -> None: """Test if this sensor is only added, if data is available.""" + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) + delattr(values[TEST_MOWER_ID].statistics, sensor_to_test) mock_automower_client.get_status.return_value = values await setup_integration(hass, mock_config_entry) @@ -145,20 +124,18 @@ async def test_error_sensor( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], ) -> None: """Test error sensor.""" + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) await setup_integration(hass, mock_config_entry) - for state, error_key, expected_state in ( - (MowerStates.IN_OPERATION, None, "no_error"), - (MowerStates.ERROR, "can_error", "can_error"), - (MowerStates.ERROR, None, MowerStates.ERROR.lower()), - (MowerStates.ERROR_AT_POWER_UP, None, MowerStates.ERROR_AT_POWER_UP.lower()), - (MowerStates.FATAL_ERROR, None, MowerStates.FATAL_ERROR.lower()), + for state, expected_state in ( + (None, "no_error"), + ("can_error", "can_error"), ): - values[TEST_MOWER_ID].mower.state = state - values[TEST_MOWER_ID].mower.error_key = error_key + values[TEST_MOWER_ID].mower.error_key = state mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) diff --git a/tests/components/husqvarna_automower/test_switch.py b/tests/components/husqvarna_automower/test_switch.py index 100fd9fe3a4..08450158876 100644 --- a/tests/components/husqvarna_automower/test_switch.py +++ b/tests/components/husqvarna_automower/test_switch.py @@ -1,29 +1,17 @@ """Tests for switch platform.""" -from datetime import timedelta from unittest.mock import AsyncMock, patch -import zoneinfo from aioautomower.exceptions import ApiException -from aioautomower.model import MowerAttributes, MowerModes, Zone +from aioautomower.model import MowerModes from aioautomower.utils import mower_list_to_dictionary_dataclass from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import ( - DOMAIN, - EXECUTION_TIME_DELAY, -) +from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TOGGLE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - Platform, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er @@ -38,8 +26,6 @@ from tests.common import ( snapshot_platform, ) -TEST_AREA_ID = 0 -TEST_VARIABLE_ZONE_ID = "203F6359-AB56-4D57-A6DC-703095BB695D" TEST_ZONE_ID = "AAAAAAAA-BBBB-CCCC-DDDD-123456789101" @@ -48,9 +34,11 @@ async def test_switch_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, - values: dict[str, MowerAttributes], ) -> None: """Test switch state.""" + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) await setup_integration(hass, mock_config_entry) for mode, expected_state in ( @@ -69,9 +57,9 @@ async def test_switch_states( @pytest.mark.parametrize( ("service", "aioautomower_command"), [ - (SERVICE_TURN_OFF, "park_until_further_notice"), - (SERVICE_TURN_ON, "resume_schedule"), - (SERVICE_TOGGLE, "park_until_further_notice"), + ("turn_off", "park_until_further_notice"), + ("turn_on", "resume_schedule"), + ("toggle", "park_until_further_notice"), ], ) async def test_switch_commands( @@ -84,9 +72,9 @@ async def test_switch_commands( """Test switch commands.""" await setup_integration(hass, mock_config_entry) await hass.services.async_call( - domain=SWITCH_DOMAIN, + domain="switch", service=service, - service_data={ATTR_ENTITY_ID: "switch.test_mower_1_enable_schedule"}, + service_data={"entity_id": "switch.test_mower_1_enable_schedule"}, blocking=True, ) mocked_method = getattr(mock_automower_client.commands, aioautomower_command) @@ -95,12 +83,12 @@ async def test_switch_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Failed to send command: Test error", + match="Command couldn't be sent to the command queue: Test error", ): await hass.services.async_call( - domain=SWITCH_DOMAIN, + domain="switch", service=service, - service_data={ATTR_ENTITY_ID: "switch.test_mower_1_enable_schedule"}, + service_data={"entity_id": "switch.test_mower_1_enable_schedule"}, blocking=True, ) assert len(mocked_method.mock_calls) == 2 @@ -109,9 +97,9 @@ async def test_switch_commands( @pytest.mark.parametrize( ("service", "boolean", "excepted_state"), [ - (SERVICE_TURN_OFF, False, "off"), - (SERVICE_TURN_ON, True, "on"), - (SERVICE_TOGGLE, True, "on"), + ("turn_off", False, "off"), + ("turn_on", True, "on"), + ("toggle", True, "on"), ], ) async def test_stay_out_zone_switch_commands( @@ -121,29 +109,23 @@ async def test_stay_out_zone_switch_commands( excepted_state: str, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - mower_time_zone: zoneinfo.ZoneInfo, ) -> None: """Test switch commands.""" entity_id = "switch.test_mower_1_avoid_danger_zone" await setup_integration(hass, mock_config_entry) values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN), - mower_time_zone, + load_json_value_fixture("mower.json", DOMAIN) ) values[TEST_MOWER_ID].stay_out_zones.zones[TEST_ZONE_ID].enabled = boolean mock_automower_client.get_status.return_value = values mocked_method = AsyncMock() setattr(mock_automower_client.commands, "switch_stay_out_zone", mocked_method) await hass.services.async_call( - domain=SWITCH_DOMAIN, + domain="switch", service=service, - service_data={ATTR_ENTITY_ID: entity_id}, - blocking=False, + service_data={"entity_id": entity_id}, + blocking=True, ) - freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) - async_fire_time_changed(hass) - await hass.async_block_till_done() mocked_method.assert_called_once_with(TEST_MOWER_ID, TEST_ZONE_ID, boolean) state = hass.states.get(entity_id) assert state is not None @@ -152,115 +134,40 @@ async def test_stay_out_zone_switch_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Failed to send command: Test error", + match="Command couldn't be sent to the command queue: Test error", ): await hass.services.async_call( - domain=SWITCH_DOMAIN, + domain="switch", service=service, - service_data={ATTR_ENTITY_ID: entity_id}, + service_data={"entity_id": entity_id}, blocking=True, ) assert len(mocked_method.mock_calls) == 2 -@pytest.mark.parametrize( - ("service", "boolean", "excepted_state"), - [ - (SERVICE_TURN_OFF, False, "off"), - (SERVICE_TURN_ON, True, "on"), - (SERVICE_TOGGLE, True, "on"), - ], -) -async def test_work_area_switch_commands( - hass: HomeAssistant, - service: str, - boolean: bool, - excepted_state: str, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - mower_time_zone: zoneinfo.ZoneInfo, - values: dict[str, MowerAttributes], -) -> None: - """Test switch commands.""" - entity_id = "switch.test_mower_1_my_lawn" - await setup_integration(hass, mock_config_entry) - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN), - mower_time_zone, - ) - values[TEST_MOWER_ID].work_areas[TEST_AREA_ID].enabled = boolean - mock_automower_client.get_status.return_value = values - mocked_method = AsyncMock() - setattr(mock_automower_client.commands, "workarea_settings", mocked_method) - await hass.services.async_call( - domain=SWITCH_DOMAIN, - service=service, - service_data={ATTR_ENTITY_ID: entity_id}, - blocking=False, - ) - freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mocked_method.assert_called_once_with(TEST_MOWER_ID, TEST_AREA_ID, enabled=boolean) - state = hass.states.get(entity_id) - assert state is not None - assert state.state == excepted_state - - mocked_method.side_effect = ApiException("Test error") - with pytest.raises( - HomeAssistantError, - match="Failed to send command: Test error", - ): - await hass.services.async_call( - domain=SWITCH_DOMAIN, - service=service, - service_data={ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - assert len(mocked_method.mock_calls) == 2 - - -async def test_add_stay_out_zone( +async def test_zones_deleted( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, entity_registry: er.EntityRegistry, - values: dict[str, MowerAttributes], ) -> None: - """Test adding a stay out zone in runtime.""" + """Test if stay-out-zone is deleted after removed.""" + + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) await setup_integration(hass, mock_config_entry) - entry = hass.config_entries.async_entries(DOMAIN)[0] - current_entites = len( - er.async_entries_for_config_entry(entity_registry, entry.entry_id) - ) - values[TEST_MOWER_ID].stay_out_zones.zones.update( - { - TEST_VARIABLE_ZONE_ID: Zone( - name="future_zone", - enabled=True, - ) - } + current_entries = len( + er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) ) + + del values[TEST_MOWER_ID].stay_out_zones.zones[TEST_ZONE_ID] mock_automower_client.get_status.return_value = values - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) + await hass.config_entries.async_reload(mock_config_entry.entry_id) await hass.async_block_till_done() - current_entites_after_addition = len( - er.async_entries_for_config_entry(entity_registry, entry.entry_id) - ) - assert current_entites_after_addition == current_entites + 1 - values[TEST_MOWER_ID].stay_out_zones.zones.pop(TEST_VARIABLE_ZONE_ID) - values[TEST_MOWER_ID].stay_out_zones.zones.pop(TEST_ZONE_ID) - mock_automower_client.get_status.return_value = values - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - current_entites_after_deletion = len( - er.async_entries_for_config_entry(entity_registry, entry.entry_id) - ) - assert current_entites_after_deletion == current_entites - 1 + assert len( + er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) + ) == (current_entries - 1) async def test_switch_snapshot( diff --git a/tests/components/husqvarna_automower_ble/__init__.py b/tests/components/husqvarna_automower_ble/__init__.py deleted file mode 100644 index 7ca5aea121d..00000000000 --- a/tests/components/husqvarna_automower_ble/__init__.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Tests for the Husqvarna Automower Bluetooth integration.""" - -from unittest.mock import patch - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo - -from tests.common import MockConfigEntry -from tests.components.bluetooth import inject_bluetooth_service_info - -AUTOMOWER_SERVICE_INFO = BluetoothServiceInfo( - name="305", - address="00000000-0000-0000-0000-000000000003", - rssi=-63, - service_data={}, - manufacturer_data={1062: b"\x05\x04\xbf\xcf\xbb\r"}, - service_uuids=[ - "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", - "00001800-0000-1000-8000-00805f9b34fb", - ], - source="local", -) - -AUTOMOWER_UNNAMED_SERVICE_INFO = BluetoothServiceInfo( - name=None, - address="00000000-0000-0000-0000-000000000004", - rssi=-63, - service_data={}, - manufacturer_data={1062: b"\x05\x04\xbf\xcf\xbb\r"}, - service_uuids=[ - "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", - "00001800-0000-1000-8000-00805f9b34fb", - ], - source="local", -) - -AUTOMOWER_MISSING_MANUFACTURER_DATA_SERVICE_INFO = BluetoothServiceInfo( - name="Missing Manufacturer Data", - address="00000000-0000-0000-0002-000000000001", - rssi=-63, - service_data={}, - manufacturer_data={}, - service_uuids=[ - "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", - "00001800-0000-1000-8000-00805f9b34fb", - ], - source="local", -) - -AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO = BluetoothServiceInfo( - name="Unsupported Group", - address="00000000-0000-0000-0002-000000000002", - rssi=-63, - service_data={}, - manufacturer_data={1062: b"\x05\x04\xbf\xcf\xbb\r"}, - service_uuids=[ - "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", - ], - source="local", -) - - -async def setup_entry( - hass: HomeAssistant, mock_entry: MockConfigEntry, platforms: list[Platform] -) -> None: - """Make sure the device is available.""" - - inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) - - with patch("homeassistant.components.husqvarna_automower_ble.PLATFORMS", platforms): - mock_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/husqvarna_automower_ble/conftest.py b/tests/components/husqvarna_automower_ble/conftest.py deleted file mode 100644 index 3a8e881aba0..00000000000 --- a/tests/components/husqvarna_automower_ble/conftest.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Common fixtures for the Husqvarna Automower Bluetooth tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.husqvarna_automower_ble.const import DOMAIN -from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID - -from . import AUTOMOWER_SERVICE_INFO - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.husqvarna_automower_ble.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture(autouse=True) -def mock_automower_client(enable_bluetooth: None) -> Generator[AsyncMock]: - """Mock a BleakClient client.""" - with ( - patch( - "homeassistant.components.husqvarna_automower_ble.Mower", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.husqvarna_automower_ble.config_flow.Mower", - new=mock_client, - ), - ): - client = mock_client.return_value - client.connect.return_value = True - client.is_connected.return_value = True - client.get_model.return_value = "305" - client.battery_level.return_value = 100 - client.mower_state.return_value = "pendingStart" - client.mower_activity.return_value = "charging" - client.probe_gatts.return_value = ("Husqvarna", "Automower", "305") - - yield client - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="Husqvarna AutoMower", - data={ - CONF_ADDRESS: AUTOMOWER_SERVICE_INFO.address, - CONF_CLIENT_ID: 1197489078, - }, - unique_id=AUTOMOWER_SERVICE_INFO.address, - ) diff --git a/tests/components/husqvarna_automower_ble/snapshots/test_init.ambr b/tests/components/husqvarna_automower_ble/snapshots/test_init.ambr deleted file mode 100644 index 1cc54020195..00000000000 --- a/tests/components/husqvarna_automower_ble/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_setup - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'husqvarna_automower_ble', - '00000000-0000-0000-0000-000000000003_1197489078', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Husqvarna', - 'model': None, - 'model_id': '305', - 'name': 'Husqvarna AutoMower', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/husqvarna_automower_ble/test_config_flow.py b/tests/components/husqvarna_automower_ble/test_config_flow.py deleted file mode 100644 index e053a28b7dd..00000000000 --- a/tests/components/husqvarna_automower_ble/test_config_flow.py +++ /dev/null @@ -1,198 +0,0 @@ -"""Test the Husqvarna Bluetooth config flow.""" - -from unittest.mock import Mock, patch - -from bleak import BleakError -import pytest - -from homeassistant.components.husqvarna_automower_ble.const import DOMAIN -from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER -from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from . import ( - AUTOMOWER_SERVICE_INFO, - AUTOMOWER_UNNAMED_SERVICE_INFO, - AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO, -) - -from tests.common import MockConfigEntry -from tests.components.bluetooth import inject_bluetooth_service_info - -pytestmark = pytest.mark.usefixtures("mock_setup_entry") - - -@pytest.fixture(autouse=True) -def mock_random() -> Mock: - """Mock random to generate predictable client id.""" - with patch( - "homeassistant.components.husqvarna_automower_ble.config_flow.random" - ) as mock_random: - mock_random.randint.return_value = 1197489078 - yield mock_random - - -async def test_user_selection(hass: HomeAssistant) -> None: - """Test we can select a device.""" - - inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) - inject_bluetooth_service_info(hass, AUTOMOWER_UNNAMED_SERVICE_INFO) - await hass.async_block_till_done(wait_background_tasks=True) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_ADDRESS: "00000000-0000-0000-0000-000000000001"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Husqvarna Automower" - assert result["result"].unique_id == "00000000-0000-0000-0000-000000000001" - - assert result["data"] == { - CONF_ADDRESS: "00000000-0000-0000-0000-000000000001", - CONF_CLIENT_ID: 1197489078, - } - - -async def test_bluetooth(hass: HomeAssistant) -> None: - """Test bluetooth device discovery.""" - - inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) - await hass.async_block_till_done(wait_background_tasks=True) - - result = hass.config_entries.flow.async_progress_by_handler(DOMAIN)[0] - assert result["step_id"] == "confirm" - assert result["context"]["unique_id"] == "00000000-0000-0000-0000-000000000003" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Husqvarna Automower" - assert result["result"].unique_id == "00000000-0000-0000-0000-000000000003" - - assert result["data"] == { - CONF_ADDRESS: "00000000-0000-0000-0000-000000000003", - CONF_CLIENT_ID: 1197489078, - } - - -async def test_bluetooth_invalid(hass: HomeAssistant) -> None: - """Test bluetooth device discovery with invalid data.""" - - inject_bluetooth_service_info(hass, AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO) - await hass.async_block_till_done(wait_background_tasks=True) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_BLUETOOTH}, - data=AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_devices_found" - - -async def test_failed_connect( - hass: HomeAssistant, - mock_automower_client: Mock, -) -> None: - """Test we can select a device.""" - - inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) - inject_bluetooth_service_info(hass, AUTOMOWER_UNNAMED_SERVICE_INFO) - await hass.async_block_till_done(wait_background_tasks=True) - - mock_automower_client.connect.side_effect = False - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_ADDRESS: "00000000-0000-0000-0000-000000000001"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Husqvarna Automower" - assert result["result"].unique_id == "00000000-0000-0000-0000-000000000001" - - assert result["data"] == { - CONF_ADDRESS: "00000000-0000-0000-0000-000000000001", - CONF_CLIENT_ID: 1197489078, - } - - -async def test_duplicate_entry( - hass: HomeAssistant, - mock_automower_client: Mock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test we can select a device.""" - - mock_config_entry.add_to_hass(hass) - - inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) - - await hass.async_block_till_done(wait_background_tasks=True) - - # Test we should not discover the already configured device - assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 0 - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_ADDRESS: "00000000-0000-0000-0000-000000000003"}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_exception_connect( - hass: HomeAssistant, - mock_automower_client: Mock, -) -> None: - """Test we can select a device.""" - - inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) - inject_bluetooth_service_info(hass, AUTOMOWER_UNNAMED_SERVICE_INFO) - await hass.async_block_till_done(wait_background_tasks=True) - - mock_automower_client.probe_gatts.side_effect = BleakError - - result = hass.config_entries.flow.async_progress_by_handler(DOMAIN)[0] - assert result["step_id"] == "confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" diff --git a/tests/components/husqvarna_automower_ble/test_init.py b/tests/components/husqvarna_automower_ble/test_init.py deleted file mode 100644 index 3cb4338eca4..00000000000 --- a/tests/components/husqvarna_automower_ble/test_init.py +++ /dev/null @@ -1,71 +0,0 @@ -"""Test the Husqvarna Automower Bluetooth setup.""" - -from unittest.mock import Mock - -from bleak import BleakError -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.husqvarna_automower_ble.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import AUTOMOWER_SERVICE_INFO - -from tests.common import MockConfigEntry - -pytestmark = pytest.mark.usefixtures("mock_automower_client") - - -async def test_setup( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test setup creates expected devices.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.LOADED - - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, f"{AUTOMOWER_SERVICE_INFO.address}_1197489078")} - ) - - assert device_entry == snapshot - - -async def test_setup_retry_connect( - hass: HomeAssistant, - mock_automower_client: Mock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test setup creates expected devices.""" - - mock_automower_client.connect.return_value = False - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_setup_failed_connect( - hass: HomeAssistant, - mock_automower_client: Mock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test setup creates expected devices.""" - - mock_automower_client.connect.side_effect = BleakError - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/husqvarna_automower_ble/test_lawn_mower.py b/tests/components/husqvarna_automower_ble/test_lawn_mower.py deleted file mode 100644 index 3f00d3dbff0..00000000000 --- a/tests/components/husqvarna_automower_ble/test_lawn_mower.py +++ /dev/null @@ -1,126 +0,0 @@ -"""Test the Husqvarna Automower Bluetooth setup.""" - -from datetime import timedelta -from unittest.mock import Mock - -from bleak import BleakError -from freezegun.api import FrozenDateTimeFactory -import pytest - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry, async_fire_time_changed - -pytestmark = pytest.mark.usefixtures("mock_automower_client") - - -@pytest.mark.parametrize( - ( - "is_connected_side_effect", - "is_connected_return_value", - "connect_side_effect", - "connect_return_value", - ), - [ - (None, False, None, False), - (None, False, BleakError, False), - (None, False, None, True), - (BleakError, False, None, True), - ], -) -async def test_setup_disconnect( - hass: HomeAssistant, - mock_automower_client: Mock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - is_connected_side_effect: Exception, - is_connected_return_value: bool, - connect_side_effect: Exception, - connect_return_value: bool, -) -> None: - """Test disconnected device.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.LOADED - - assert hass.states.get("lawn_mower.husqvarna_automower").state != STATE_UNAVAILABLE - - mock_automower_client.is_connected.side_effect = is_connected_side_effect - mock_automower_client.is_connected.return_value = is_connected_return_value - mock_automower_client.connect.side_effect = connect_side_effect - mock_automower_client.connect.return_value = connect_return_value - - freezer.tick(timedelta(seconds=60)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("lawn_mower.husqvarna_automower").state == STATE_UNAVAILABLE - - -@pytest.mark.parametrize( - ("attribute"), - [ - "mower_activity", - "mower_state", - "battery_level", - ], -) -async def test_invalid_data_received( - hass: HomeAssistant, - mock_automower_client: Mock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - attribute: str, -) -> None: - """Test invalid data received.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.LOADED - - getattr(mock_automower_client, attribute).return_value = None - - freezer.tick(timedelta(seconds=60)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("lawn_mower.husqvarna_automower").state == STATE_UNAVAILABLE - - -@pytest.mark.parametrize( - ("attribute"), - [ - "mower_activity", - "mower_state", - "battery_level", - ], -) -async def test_bleak_error_data_update( - hass: HomeAssistant, - mock_automower_client: Mock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - attribute: str, -) -> None: - """Test BleakError during data update.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.LOADED - - getattr(mock_automower_client, attribute).side_effect = BleakError - - freezer.tick(timedelta(seconds=60)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("lawn_mower.husqvarna_automower").state == STATE_UNAVAILABLE diff --git a/tests/components/hvv_departures/test_config_flow.py b/tests/components/hvv_departures/test_config_flow.py index 8d82382d9a2..c85bfb7f6ee 100644 --- a/tests/components/hvv_departures/test_config_flow.py +++ b/tests/components/hvv_departures/test_config_flow.py @@ -4,7 +4,6 @@ import json from unittest.mock import patch from pygti.exceptions import CannotConnect, InvalidAuth -import pytest from homeassistant.components.hvv_departures.const import ( CONF_FILTER, @@ -313,10 +312,6 @@ async def test_options_flow(hass: HomeAssistant) -> None: } -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.hvv_departures.options.error.invalid_auth"], -) async def test_options_flow_invalid_auth(hass: HomeAssistant) -> None: """Test that options flow works.""" @@ -360,10 +355,6 @@ async def test_options_flow_invalid_auth(hass: HomeAssistant) -> None: assert result["errors"] == {"base": "invalid_auth"} -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.hvv_departures.options.error.cannot_connect"], -) async def test_options_flow_cannot_connect(hass: HomeAssistant) -> None: """Test that options flow works.""" diff --git a/tests/components/hydrawise/snapshots/test_valve.ambr b/tests/components/hydrawise/snapshots/test_valve.ambr deleted file mode 100644 index cac08893324..00000000000 --- a/tests/components/hydrawise/snapshots/test_valve.ambr +++ /dev/null @@ -1,99 +0,0 @@ -# serializer version: 1 -# name: test_all_valves[valve.zone_one-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'valve', - 'entity_category': None, - 'entity_id': 'valve.zone_one', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'hydrawise', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '5965394_zone', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_valves[valve.zone_one-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by hydrawise.com', - 'device_class': 'water', - 'friendly_name': 'Zone One', - 'supported_features': , - }), - 'context': , - 'entity_id': 'valve.zone_one', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_all_valves[valve.zone_two-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'valve', - 'entity_category': None, - 'entity_id': 'valve.zone_two', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'hydrawise', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '5965395_zone', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_valves[valve.zone_two-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by hydrawise.com', - 'device_class': 'water', - 'friendly_name': 'Zone Two', - 'supported_features': , - }), - 'context': , - 'entity_id': 'valve.zone_two', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- diff --git a/tests/components/hydrawise/test_services.py b/tests/components/hydrawise/test_services.py deleted file mode 100644 index f61a6786270..00000000000 --- a/tests/components/hydrawise/test_services.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Test Hydrawise services.""" - -from datetime import datetime -from unittest.mock import AsyncMock - -from pydrawise.schema import Zone - -from homeassistant.components.hydrawise.const import ( - ATTR_DURATION, - ATTR_UNTIL, - DOMAIN, - SERVICE_RESUME, - SERVICE_START_WATERING, - SERVICE_SUSPEND, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def test_start_watering( - hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, - mock_pydrawise: AsyncMock, - zones: list[Zone], -) -> None: - """Test that the start_watering service works as intended.""" - await hass.services.async_call( - DOMAIN, - SERVICE_START_WATERING, - { - ATTR_ENTITY_ID: "binary_sensor.zone_one_watering", - ATTR_DURATION: 20, - }, - blocking=True, - ) - mock_pydrawise.start_zone.assert_called_once_with( - zones[0], custom_run_duration=20 * 60 - ) - - -async def test_start_watering_no_duration( - hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, - mock_pydrawise: AsyncMock, - zones: list[Zone], -) -> None: - """Test that the start_watering service works with no duration specified.""" - await hass.services.async_call( - DOMAIN, - SERVICE_START_WATERING, - {ATTR_ENTITY_ID: "binary_sensor.zone_one_watering"}, - blocking=True, - ) - mock_pydrawise.start_zone.assert_called_once_with(zones[0], custom_run_duration=0) - - -async def test_resume( - hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, - mock_pydrawise: AsyncMock, - zones: list[Zone], -) -> None: - """Test that the resume service works as intended.""" - await hass.services.async_call( - DOMAIN, - SERVICE_RESUME, - {ATTR_ENTITY_ID: "binary_sensor.zone_one_watering"}, - blocking=True, - ) - mock_pydrawise.resume_zone.assert_called_once_with(zones[0]) - - -async def test_suspend( - hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, - mock_pydrawise: AsyncMock, - zones: list[Zone], -) -> None: - """Test that the suspend service works as intended.""" - await hass.services.async_call( - DOMAIN, - SERVICE_SUSPEND, - { - ATTR_ENTITY_ID: "binary_sensor.zone_one_watering", - ATTR_UNTIL: datetime(2026, 1, 1, 0, 0, 0), - }, - blocking=True, - ) - mock_pydrawise.suspend_zone.assert_called_once_with( - zones[0], until=datetime(2026, 1, 1, 0, 0, 0) - ) diff --git a/tests/components/hydrawise/test_valve.py b/tests/components/hydrawise/test_valve.py deleted file mode 100644 index 7d769f920e6..00000000000 --- a/tests/components/hydrawise/test_valve.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Test Hydrawise valve.""" - -from collections.abc import Awaitable, Callable -from unittest.mock import AsyncMock, patch - -from pydrawise.schema import Zone -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_CLOSE_VALVE, - SERVICE_OPEN_VALVE, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_valves( - hass: HomeAssistant, - mock_add_config_entry: Callable[[], Awaitable[MockConfigEntry]], - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test that all valves are working.""" - with patch( - "homeassistant.components.hydrawise.PLATFORMS", - [Platform.VALVE], - ): - config_entry = await mock_add_config_entry() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -async def test_services( - hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, - mock_pydrawise: AsyncMock, - zones: list[Zone], -) -> None: - """Test valve services.""" - await hass.services.async_call( - VALVE_DOMAIN, - SERVICE_OPEN_VALVE, - service_data={ATTR_ENTITY_ID: "valve.zone_one"}, - blocking=True, - ) - mock_pydrawise.start_zone.assert_called_once_with(zones[0]) - mock_pydrawise.reset_mock() - - await hass.services.async_call( - VALVE_DOMAIN, - SERVICE_CLOSE_VALVE, - service_data={ATTR_ENTITY_ID: "valve.zone_one"}, - blocking=True, - ) - mock_pydrawise.stop_zone.assert_called_once_with(zones[0]) diff --git a/tests/components/hyperion/__init__.py b/tests/components/hyperion/__init__.py index 36137ce0ddd..72aba96e81f 100644 --- a/tests/components/hyperion/__init__.py +++ b/tests/components/hyperion/__init__.py @@ -124,9 +124,9 @@ def add_test_config_entry( hass: HomeAssistant, data: dict[str, Any] | None = None, options: dict[str, Any] | None = None, -) -> MockConfigEntry: +) -> ConfigEntry: """Add a test config entry.""" - config_entry = MockConfigEntry( + config_entry: MockConfigEntry = MockConfigEntry( entry_id=TEST_CONFIG_ENTRY_ID, domain=DOMAIN, data=data diff --git a/tests/components/hyperion/test_config_flow.py b/tests/components/hyperion/test_config_flow.py index 4109fe0f653..57749f5eedc 100644 --- a/tests/components/hyperion/test_config_flow.py +++ b/tests/components/hyperion/test_config_flow.py @@ -20,7 +20,7 @@ from homeassistant.components.hyperion.const import ( DOMAIN, ) from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER from homeassistant.const import ( ATTR_ENTITY_ID, CONF_HOST, @@ -427,7 +427,7 @@ async def test_auth_create_token_approval_declined_task_canceled( class CanceledAwaitableMock(AsyncMock): """A canceled awaitable mock.""" - def __init__(self) -> None: + def __init__(self): super().__init__() self.done = Mock(return_value=False) self.cancel = Mock() @@ -861,7 +861,12 @@ async def test_reauth_success(hass: HomeAssistant) -> None: ), patch("homeassistant.components.hyperion.async_setup_entry", return_value=True), ): - result = await config_entry.start_reauth_flow(hass) + result = await _init_flow( + hass, + source=SOURCE_REAUTH, + data=config_data, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM result = await _configure_flow( @@ -881,13 +886,18 @@ async def test_reauth_cannot_connect(hass: HomeAssistant) -> None: CONF_PORT: TEST_PORT, } - config_entry = add_test_config_entry(hass, data=config_data) + add_test_config_entry(hass, data=config_data) client = create_mock_client() client.async_client_connect = AsyncMock(return_value=False) with patch( "homeassistant.components.hyperion.client.HyperionClient", return_value=client ): - result = await config_entry.start_reauth_flow(hass) + result = await _init_flow( + hass, + source=SOURCE_REAUTH, + data=config_data, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" diff --git a/tests/components/iaqualink/test_config_flow.py b/tests/components/iaqualink/test_config_flow.py index 26540eb7308..4aaa66416f6 100644 --- a/tests/components/iaqualink/test_config_flow.py +++ b/tests/components/iaqualink/test_config_flow.py @@ -7,8 +7,7 @@ from iaqualink.exception import ( AqualinkServiceUnauthorizedException, ) -from homeassistant.components.iaqualink import DOMAIN, config_flow -from homeassistant.config_entries import SOURCE_USER +from homeassistant.components.iaqualink import config_flow from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -19,12 +18,13 @@ async def test_already_configured( """Test config flow when iaqualink component is already setup.""" config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) + flow = config_flow.AqualinkFlowHandler() + flow.hass = hass + flow.context = {} + + result = await flow.async_step_user(config_data) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" async def test_without_config(hass: HomeAssistant) -> None: diff --git a/tests/components/iaqualink/test_init.py b/tests/components/iaqualink/test_init.py index 1df199f706a..8e157b8d1e3 100644 --- a/tests/components/iaqualink/test_init.py +++ b/tests/components/iaqualink/test_init.py @@ -30,7 +30,7 @@ from .conftest import get_aqualink_device, get_aqualink_system from tests.common import async_fire_time_changed -async def _ffwd_next_update_interval(hass: HomeAssistant) -> None: +async def _ffwd_next_update_interval(hass): now = dt_util.utcnow() async_fire_time_changed(hass, now + UPDATE_INTERVAL) await hass.async_block_till_done() diff --git a/tests/components/ibeacon/test_device_tracker.py b/tests/components/ibeacon/test_device_tracker.py index e34cc480cb0..dcc21b5bfc9 100644 --- a/tests/components/ibeacon/test_device_tracker.py +++ b/tests/components/ibeacon/test_device_tracker.py @@ -11,7 +11,9 @@ from homeassistant.components.bluetooth import ( async_ble_device_from_address, async_last_service_info, ) -from homeassistant.components.bluetooth.const import UNAVAILABLE_TRACK_SECONDS +from homeassistant.components.bluetooth.const import ( # pylint: disable=hass-component-root-import + UNAVAILABLE_TRACK_SECONDS, +) from homeassistant.components.ibeacon.const import ( DOMAIN, UNAVAILABLE_TIMEOUT, diff --git a/tests/components/ibeacon/test_sensor.py b/tests/components/ibeacon/test_sensor.py index f4dba57bced..e2ddf1dd7bc 100644 --- a/tests/components/ibeacon/test_sensor.py +++ b/tests/components/ibeacon/test_sensor.py @@ -4,7 +4,9 @@ from datetime import timedelta import pytest -from homeassistant.components.bluetooth.const import UNAVAILABLE_TRACK_SECONDS +from homeassistant.components.bluetooth.const import ( # pylint: disable=hass-component-root-import + UNAVAILABLE_TRACK_SECONDS, +) from homeassistant.components.ibeacon.const import DOMAIN, UPDATE_INTERVAL from homeassistant.components.sensor import ATTR_STATE_CLASS from homeassistant.const import ( diff --git a/tests/components/icloud/test_config_flow.py b/tests/components/icloud/test_config_flow.py index c0bc5d7ed2e..ec8d11f1135 100644 --- a/tests/components/icloud/test_config_flow.py +++ b/tests/components/icloud/test_config_flow.py @@ -18,7 +18,7 @@ from homeassistant.components.icloud.const import ( DEFAULT_WITH_FAMILY, DOMAIN, ) -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -386,7 +386,12 @@ async def test_password_update( ) config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, + data={**MOCK_CONFIG}, + ) + assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( @@ -405,7 +410,12 @@ async def test_password_update_wrong_password(hass: HomeAssistant) -> None: ) config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, + data={**MOCK_CONFIG}, + ) + assert result["type"] is FlowResultType.FORM with patch( diff --git a/tests/components/idasen_desk/conftest.py b/tests/components/idasen_desk/conftest.py index 24ef8311445..91f3f2de40e 100644 --- a/tests/components/idasen_desk/conftest.py +++ b/tests/components/idasen_desk/conftest.py @@ -1,10 +1,11 @@ """IKEA Idasen Desk fixtures.""" -from collections.abc import Callable, Generator +from collections.abc import Callable from unittest import mock from unittest.mock import AsyncMock, MagicMock import pytest +from typing_extensions import Generator @pytest.fixture(autouse=True) diff --git a/tests/components/idasen_desk/test_cover.py b/tests/components/idasen_desk/test_cover.py index 83312c04e72..0110fe7d820 100644 --- a/tests/components/idasen_desk/test_cover.py +++ b/tests/components/idasen_desk/test_cover.py @@ -10,13 +10,14 @@ from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, - CoverState, ) from homeassistant.const import ( SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, + STATE_CLOSED, + STATE_OPEN, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -35,7 +36,7 @@ async def test_cover_available( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 mock_desk_api.connect = AsyncMock() @@ -50,11 +51,11 @@ async def test_cover_available( @pytest.mark.parametrize( ("service", "service_data", "expected_state", "expected_position"), [ - (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 100}, CoverState.OPEN, 100), - (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 0}, CoverState.CLOSED, 0), - (SERVICE_OPEN_COVER, {}, CoverState.OPEN, 100), - (SERVICE_CLOSE_COVER, {}, CoverState.CLOSED, 0), - (SERVICE_STOP_COVER, {}, CoverState.OPEN, 60), + (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 100}, STATE_OPEN, 100), + (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 0}, STATE_CLOSED, 0), + (SERVICE_OPEN_COVER, {}, STATE_OPEN, 100), + (SERVICE_CLOSE_COVER, {}, STATE_CLOSED, 0), + (SERVICE_STOP_COVER, {}, STATE_OPEN, 60), ], ) async def test_cover_services( @@ -70,7 +71,7 @@ async def test_cover_services( await init_integration(hass) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 await hass.services.async_call( COVER_DOMAIN, diff --git a/tests/components/ifttt/test_init.py b/tests/components/ifttt/test_init.py index c6d24421a8a..44896dc0f2c 100644 --- a/tests/components/ifttt/test_init.py +++ b/tests/components/ifttt/test_init.py @@ -2,8 +2,8 @@ from homeassistant import config_entries from homeassistant.components import ifttt +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, callback -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from tests.typing import ClientSessionGenerator diff --git a/tests/components/image/conftest.py b/tests/components/image/conftest.py index 06ef7db9f49..65bbf2e0c4f 100644 --- a/tests/components/image/conftest.py +++ b/tests/components/image/conftest.py @@ -1,8 +1,7 @@ """Test helpers for image.""" -from collections.abc import Generator - import pytest +from typing_extensions import Generator from homeassistant.components import image from homeassistant.config_entries import ConfigEntry, ConfigFlow @@ -52,21 +51,6 @@ class MockImageEntityInvalidContentType(image.ImageEntity): return b"Test" -class MockImageEntityCapitalContentType(image.ImageEntity): - """Mock image entity with correct content type, but capitalized.""" - - _attr_name = "Test" - - async def async_added_to_hass(self): - """Set the update time and assign and incorrect content type.""" - self._attr_content_type = "Image/jpeg" - self._attr_image_last_updated = dt_util.utcnow() - - async def async_image(self) -> bytes | None: - """Return bytes of image.""" - return b"Test" - - class MockURLImageEntity(image.ImageEntity): """Mock image entity.""" @@ -88,16 +72,6 @@ class MockImageNoStateEntity(image.ImageEntity): return b"Test" -class MockImageNoDataEntity(image.ImageEntity): - """Mock image entity.""" - - _attr_name = "Test" - - async def async_image(self) -> bytes | None: - """Return bytes of image.""" - return None - - class MockImageSyncEntity(image.ImageEntity): """Mock image entity.""" diff --git a/tests/components/image/test_init.py b/tests/components/image/test_init.py index 3bcf0df52e3..717e82a652d 100644 --- a/tests/components/image/test_init.py +++ b/tests/components/image/test_init.py @@ -3,7 +3,7 @@ from datetime import datetime from http import HTTPStatus import ssl -from unittest.mock import MagicMock, mock_open, patch +from unittest.mock import MagicMock, patch from aiohttp import hdrs from freezegun.api import FrozenDateTimeFactory @@ -13,16 +13,12 @@ import respx from homeassistant.components import image from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from .conftest import ( MockImageEntity, - MockImageEntityCapitalContentType, MockImageEntityInvalidContentType, - MockImageNoDataEntity, MockImageNoStateEntity, MockImagePlatform, MockImageSyncEntity, @@ -142,32 +138,6 @@ async def test_no_valid_content_type( assert resp.status == HTTPStatus.INTERNAL_SERVER_ERROR -async def test_valid_but_capitalized_content_type( - hass: HomeAssistant, hass_client: ClientSessionGenerator -) -> None: - """Test invalid content type.""" - mock_integration(hass, MockModule(domain="test")) - mock_platform( - hass, "test.image", MockImagePlatform([MockImageEntityCapitalContentType(hass)]) - ) - assert await async_setup_component( - hass, image.DOMAIN, {"image": {"platform": "test"}} - ) - await hass.async_block_till_done() - - client = await hass_client() - - state = hass.states.get("image.test") - access_token = state.attributes["access_token"] - assert state.attributes == { - "access_token": access_token, - "entity_picture": f"/api/image_proxy/image.test?token={access_token}", - "friendly_name": "Test", - } - resp = await client.get(f"/api/image_proxy/image.test?token={access_token}") - assert resp.status == HTTPStatus.OK - - async def test_fetch_image_authenticated( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_image_platform: None ) -> None: @@ -384,112 +354,3 @@ async def test_image_stream( await hass.async_block_till_done() await close_future - - -async def test_snapshot_service(hass: HomeAssistant) -> None: - """Test snapshot service.""" - mopen = mock_open() - mock_integration(hass, MockModule(domain="test")) - mock_platform(hass, "test.image", MockImagePlatform([MockImageSyncEntity(hass)])) - assert await async_setup_component( - hass, image.DOMAIN, {"image": {"platform": "test"}} - ) - await hass.async_block_till_done() - - with ( - patch("homeassistant.components.image.open", mopen, create=True), - patch("homeassistant.components.image.os.makedirs"), - patch.object(hass.config, "is_allowed_path", return_value=True), - ): - await hass.services.async_call( - image.DOMAIN, - image.SERVICE_SNAPSHOT, - { - ATTR_ENTITY_ID: "image.test", - image.ATTR_FILENAME: "/test/snapshot.jpg", - }, - blocking=True, - ) - - mock_write = mopen().write - - assert len(mock_write.mock_calls) == 1 - assert mock_write.mock_calls[0][1][0] == b"Test" - - -async def test_snapshot_service_no_image(hass: HomeAssistant) -> None: - """Test snapshot service with no image.""" - mopen = mock_open() - mock_integration(hass, MockModule(domain="test")) - mock_platform(hass, "test.image", MockImagePlatform([MockImageNoDataEntity(hass)])) - assert await async_setup_component( - hass, image.DOMAIN, {"image": {"platform": "test"}} - ) - await hass.async_block_till_done() - - with ( - patch("homeassistant.components.image.open", mopen, create=True), - patch( - "homeassistant.components.image.os.makedirs", - ), - patch.object(hass.config, "is_allowed_path", return_value=True), - ): - await hass.services.async_call( - image.DOMAIN, - image.SERVICE_SNAPSHOT, - { - ATTR_ENTITY_ID: "image.test", - image.ATTR_FILENAME: "/test/snapshot.jpg", - }, - blocking=True, - ) - - mock_write = mopen().write - - assert len(mock_write.mock_calls) == 0 - - -async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: - """Test snapshot service with a not allowed path.""" - mock_integration(hass, MockModule(domain="test")) - mock_platform(hass, "test.image", MockImagePlatform([MockURLImageEntity(hass)])) - assert await async_setup_component( - hass, image.DOMAIN, {"image": {"platform": "test"}} - ) - await hass.async_block_till_done() - - with pytest.raises(HomeAssistantError, match="/test/snapshot.jpg"): - await hass.services.async_call( - image.DOMAIN, - image.SERVICE_SNAPSHOT, - { - ATTR_ENTITY_ID: "image.test", - image.ATTR_FILENAME: "/test/snapshot.jpg", - }, - blocking=True, - ) - - -async def test_snapshot_service_os_error(hass: HomeAssistant) -> None: - """Test snapshot service with os error.""" - mock_integration(hass, MockModule(domain="test")) - mock_platform(hass, "test.image", MockImagePlatform([MockImageSyncEntity(hass)])) - assert await async_setup_component( - hass, image.DOMAIN, {"image": {"platform": "test"}} - ) - await hass.async_block_till_done() - - with ( - patch.object(hass.config, "is_allowed_path", return_value=True), - patch("os.makedirs", side_effect=OSError), - pytest.raises(HomeAssistantError), - ): - await hass.services.async_call( - image.DOMAIN, - image.SERVICE_SNAPSHOT, - { - ATTR_ENTITY_ID: "image.test", - image.ATTR_FILENAME: "/test/snapshot.jpg", - }, - blocking=True, - ) diff --git a/tests/components/image/test_media_source.py b/tests/components/image/test_media_source.py index 73cc76b9fb7..2037641a1a3 100644 --- a/tests/components/image/test_media_source.py +++ b/tests/components/image/test_media_source.py @@ -8,7 +8,7 @@ from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) -async def setup_media_source(hass: HomeAssistant) -> None: +async def setup_media_source(hass): """Set up media source.""" assert await async_setup_component(hass, "media_source", {}) diff --git a/tests/components/image_processing/common.py b/tests/components/image_processing/common.py index 35b94f2c91c..4b3a008c6cd 100644 --- a/tests/components/image_processing/common.py +++ b/tests/components/image_processing/common.py @@ -6,19 +6,19 @@ components. Instead call the service directly. from homeassistant.components.image_processing import DOMAIN, SERVICE_SCAN from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.loader import bind_hass @bind_hass -def scan(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def scan(hass, entity_id=ENTITY_MATCH_ALL): """Force process of all cameras or given entity.""" hass.add_job(async_scan, hass, entity_id) @callback @bind_hass -def async_scan(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def async_scan(hass, entity_id=ENTITY_MATCH_ALL): """Force process of all cameras or given entity.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_SCAN, data)) diff --git a/tests/components/image_processing/test_init.py b/tests/components/image_processing/test_init.py index 3e7c8f2fb91..577d3fc47db 100644 --- a/tests/components/image_processing/test_init.py +++ b/tests/components/image_processing/test_init.py @@ -35,15 +35,13 @@ def aiohttp_unused_port_factory( return unused_tcp_port_factory -def get_url(hass: HomeAssistant) -> str: +def get_url(hass): """Return camera url.""" state = hass.states.get("camera.demo_camera") return f"{hass.config.internal_url}{state.attributes.get(ATTR_ENTITY_PICTURE)}" -async def setup_image_processing( - hass: HomeAssistant, aiohttp_unused_port_factory: Callable[[], int] -) -> None: +async def setup_image_processing(hass, aiohttp_unused_port_factory): """Set up things to be run when tests are started.""" await async_setup_component( hass, @@ -57,7 +55,7 @@ async def setup_image_processing( await hass.async_block_till_done() -async def setup_image_processing_face(hass: HomeAssistant) -> None: +async def setup_image_processing_face(hass): """Set up things to be run when tests are started.""" config = {ip.DOMAIN: {"platform": "demo"}, "camera": {"platform": "demo"}} @@ -95,7 +93,7 @@ async def test_setup_component_with_service(hass: HomeAssistant) -> None: async def test_get_image_from_camera( mock_camera_read, hass: HomeAssistant, - aiohttp_unused_port_factory: Callable[[], int], + aiohttp_unused_port_factory, ) -> None: """Grab an image from camera entity.""" await setup_image_processing(hass, aiohttp_unused_port_factory) @@ -118,7 +116,7 @@ async def test_get_image_from_camera( async def test_get_image_without_exists_camera( mock_image, hass: HomeAssistant, - aiohttp_unused_port_factory: Callable[[], int], + aiohttp_unused_port_factory, ) -> None: """Try to get image without exists camera.""" await setup_image_processing(hass, aiohttp_unused_port_factory) @@ -193,7 +191,7 @@ async def test_face_event_call_no_confidence( @pytest.mark.usefixtures("enable_custom_integrations") async def test_update_missing_camera( hass: HomeAssistant, - aiohttp_unused_port_factory: Callable[[], int], + aiohttp_unused_port_factory, caplog: pytest.LogCaptureFixture, ) -> None: """Test when entity does not set camera.""" diff --git a/tests/components/imap/conftest.py b/tests/components/imap/conftest.py index 87663031e7a..354c9fbe24e 100644 --- a/tests/components/imap/conftest.py +++ b/tests/components/imap/conftest.py @@ -1,10 +1,10 @@ """Fixtures for imap tests.""" -from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, MagicMock, patch from aioimaplib import AUTH, LOGOUT, NONAUTH, SELECTED, STARTED, Response import pytest +from typing_extensions import AsyncGenerator, Generator from .const import EMPTY_SEARCH_RESPONSE, TEST_FETCH_RESPONSE_TEXT_PLAIN diff --git a/tests/components/imap/test_config_flow.py b/tests/components/imap/test_config_flow.py index 2270030ad4f..459cecec4a6 100644 --- a/tests/components/imap/test_config_flow.py +++ b/tests/components/imap/test_config_flow.py @@ -15,7 +15,7 @@ from homeassistant.components.imap.const import ( DOMAIN, ) from homeassistant.components.imap.errors import InvalidAuth, InvalidFolder -from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -215,13 +215,18 @@ async def test_reauth_success(hass: HomeAssistant, mock_setup_entry: AsyncMock) ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_CONFIG, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == { - CONF_USERNAME: "email@email.com", - CONF_NAME: "Mock Title", - } + assert result["description_placeholders"] == {CONF_USERNAME: "email@email.com"} with patch( "homeassistant.components.imap.config_flow.connect_to_server" @@ -251,7 +256,15 @@ async def test_reauth_failed(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_CONFIG, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -281,7 +294,15 @@ async def test_reauth_failed_conn_error(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_CONFIG, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/imgw_pib/conftest.py b/tests/components/imgw_pib/conftest.py index 6f23ed3ee80..1d278856b5b 100644 --- a/tests/components/imgw_pib/conftest.py +++ b/tests/components/imgw_pib/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the IMGW-PIB tests.""" -from collections.abc import Generator from datetime import UTC, datetime from unittest.mock import AsyncMock, patch from imgw_pib import HydrologicalData, SensorData import pytest +from typing_extensions import Generator from homeassistant.components.imgw_pib.const import DOMAIN diff --git a/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr b/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr index c5ae6880022..f314a4be590 100644 --- a/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr +++ b/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr @@ -95,3 +95,101 @@ 'state': 'off', }) # --- +# name: test_binary_sensor[binary_sensor.station_name_flood_alarm-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.station_name_flood_alarm', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Flood alarm', + 'platform': 'imgw_pib', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flood_alarm', + 'unique_id': '123_flood_alarm', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.station_name_flood_alarm-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'alarm_level': 630.0, + 'attribution': 'Data provided by IMGW-PIB', + 'device_class': 'safety', + 'friendly_name': 'Station Name Flood alarm', + }), + 'context': , + 'entity_id': 'binary_sensor.station_name_flood_alarm', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.station_name_flood_warning-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.station_name_flood_warning', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Flood warning', + 'platform': 'imgw_pib', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flood_warning', + 'unique_id': '123_flood_warning', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.station_name_flood_warning-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by IMGW-PIB', + 'device_class': 'safety', + 'friendly_name': 'Station Name Flood warning', + 'warning_level': 590.0, + }), + 'context': , + 'entity_id': 'binary_sensor.station_name_flood_warning', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr index 494980ba4ce..096e370ab02 100644 --- a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr +++ b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr @@ -6,8 +6,6 @@ 'station_id': '123', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'imgw_pib', 'minor_version': 1, 'options': dict({ diff --git a/tests/components/imgw_pib/snapshots/test_sensor.ambr b/tests/components/imgw_pib/snapshots/test_sensor.ambr index 6c69b890842..2638e468d92 100644 --- a/tests/components/imgw_pib/snapshots/test_sensor.ambr +++ b/tests/components/imgw_pib/snapshots/test_sensor.ambr @@ -213,3 +213,113 @@ 'state': '10.8', }) # --- +# name: test_sensor[sensor.station_name_water_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.station_name_water_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water level', + 'platform': 'imgw_pib', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_level', + 'unique_id': '123_water_level', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.station_name_water_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by IMGW-PIB', + 'device_class': 'distance', + 'friendly_name': 'Station Name Water level', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.station_name_water_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '526.0', + }) +# --- +# name: test_sensor[sensor.station_name_water_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.station_name_water_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water temperature', + 'platform': 'imgw_pib', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_temperature', + 'unique_id': '123_water_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.station_name_water_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by IMGW-PIB', + 'device_class': 'temperature', + 'friendly_name': 'Station Name Water temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.station_name_water_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.8', + }) +# --- diff --git a/tests/components/imgw_pib/test_diagnostics.py b/tests/components/imgw_pib/test_diagnostics.py index 14d4e7a5224..62dabc982c4 100644 --- a/tests/components/imgw_pib/test_diagnostics.py +++ b/tests/components/imgw_pib/test_diagnostics.py @@ -28,4 +28,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) + assert result == snapshot(exclude=props("entry_id")) diff --git a/tests/components/improv_ble/__init__.py b/tests/components/improv_ble/__init__.py index 521d0881443..41ea98cda7b 100644 --- a/tests/components/improv_ble/__init__.py +++ b/tests/components/improv_ble/__init__.py @@ -25,25 +25,6 @@ IMPROV_BLE_DISCOVERY_INFO = BluetoothServiceInfoBleak( ) -BAD_IMPROV_BLE_DISCOVERY_INFO = BluetoothServiceInfoBleak( - name="00123456", - address="AA:BB:CC:DD:EE:F0", - rssi=-60, - manufacturer_data={}, - service_uuids=[SERVICE_UUID], - service_data={SERVICE_DATA_UUID: b"\x00\x00\x00\x00\x00\x00"}, - source="local", - device=generate_ble_device(address="AA:BB:CC:DD:EE:F0", name="00123456"), - advertisement=generate_advertisement_data( - service_uuids=[SERVICE_UUID], - service_data={SERVICE_DATA_UUID: b"\x00\x00\x00\x00\x00\x00"}, - ), - time=0, - connectable=True, - tx_power=-127, -) - - PROVISIONED_IMPROV_BLE_DISCOVERY_INFO = BluetoothServiceInfoBleak( name="00123456", address="AA:BB:CC:DD:EE:F0", diff --git a/tests/components/improv_ble/test_config_flow.py b/tests/components/improv_ble/test_config_flow.py index 2df4be2ba7d..53da1f28425 100644 --- a/tests/components/improv_ble/test_config_flow.py +++ b/tests/components/improv_ble/test_config_flow.py @@ -15,7 +15,6 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType from . import ( - BAD_IMPROV_BLE_DISCOVERY_INFO, IMPROV_BLE_DISCOVERY_INFO, NOT_IMPROV_BLE_DISCOVERY_INFO, PROVISIONED_IMPROV_BLE_DISCOVERY_INFO, @@ -544,7 +543,7 @@ async def test_authorize_fails(hass: HomeAssistant, exc, error) -> None: assert result["reason"] == error -async def _test_provision_error(hass: HomeAssistant, exc) -> str: +async def _test_provision_error(hass: HomeAssistant, exc) -> None: """Test bluetooth flow with error.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -650,20 +649,3 @@ async def test_provision_retry(hass: HomeAssistant, exc, error) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "provision" assert result["errors"] == {"base": error} - - -async def test_provision_fails_invalid_data( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test bluetooth flow with error due to invalid data.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_BLUETOOTH}, - data=BAD_IMPROV_BLE_DISCOVERY_INFO, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "invalid_improv_data" - assert ( - "Aborting improv flow, device AA:BB:CC:DD:EE:F0 sent invalid improv data: '000000000000'" - in caplog.text - ) diff --git a/tests/components/incomfort/conftest.py b/tests/components/incomfort/conftest.py index f17547a1445..64885e38b65 100644 --- a/tests/components/incomfort/conftest.py +++ b/tests/components/incomfort/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Intergas InComfort integration.""" -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from incomfortclient import DisplayCode import pytest +from typing_extensions import Generator from homeassistant.components.incomfort import DOMAIN from homeassistant.config_entries import ConfigEntry @@ -77,9 +77,10 @@ def mock_room_status() -> dict[str, Any]: @pytest.fixture def mock_incomfort( + hass: HomeAssistant, mock_heater_status: dict[str, Any], mock_room_status: dict[str, Any], -) -> Generator[MagicMock]: +) -> Generator[MagicMock, None]: """Mock the InComfort gateway client.""" class MockRoom: diff --git a/tests/components/incomfort/snapshots/test_binary_sensor.ambr b/tests/components/incomfort/snapshots/test_binary_sensor.ambr index 2f2319b6a44..565abcaa26f 100644 --- a/tests/components/incomfort/snapshots/test_binary_sensor.ambr +++ b/tests/components/incomfort/snapshots/test_binary_sensor.ambr @@ -188,6 +188,147 @@ 'state': 'off', }) # --- +# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_pumping', + 'unique_id': 'c0ffeec0ffee_is_pumping', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_burning', + 'unique_id': 'c0ffeec0ffee_is_burning', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_tapping', + 'unique_id': 'c0ffeec0ffee_is_tapping', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -377,6 +518,147 @@ 'state': 'off', }) # --- +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_pumping', + 'unique_id': 'c0ffeec0ffee_is_pumping', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_burning', + 'unique_id': 'c0ffeec0ffee_is_burning', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_tapping', + 'unique_id': 'c0ffeec0ffee_is_tapping', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -566,6 +848,147 @@ 'state': 'on', }) # --- +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_pumping', + 'unique_id': 'c0ffeec0ffee_is_pumping', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_burning', + 'unique_id': 'c0ffeec0ffee_is_burning', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_tapping', + 'unique_id': 'c0ffeec0ffee_is_tapping', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -755,6 +1178,147 @@ 'state': 'off', }) # --- +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_pumping', + 'unique_id': 'c0ffeec0ffee_is_pumping', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_burning', + 'unique_id': 'c0ffeec0ffee_is_burning', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_tapping', + 'unique_id': 'c0ffeec0ffee_is_tapping', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_setup_platform[binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -944,3 +1508,144 @@ 'state': 'off', }) # --- +# name: test_setup_platform[binary_sensor.boiler_running-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_pumping', + 'unique_id': 'c0ffeec0ffee_is_pumping', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_platform[binary_sensor.boiler_running-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_platform[binary_sensor.boiler_running_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_burning', + 'unique_id': 'c0ffeec0ffee_is_burning', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_platform[binary_sensor.boiler_running_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_platform[binary_sensor.boiler_running_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.boiler_running_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Running', + 'platform': 'incomfort', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_tapping', + 'unique_id': 'c0ffeec0ffee_is_tapping', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_platform[binary_sensor.boiler_running_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Boiler Running', + }), + 'context': , + 'entity_id': 'binary_sensor.boiler_running_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/incomfort/snapshots/test_climate.ambr b/tests/components/incomfort/snapshots/test_climate.ambr index 17adcbb3bab..05b2d4878d0 100644 --- a/tests/components/incomfort/snapshots/test_climate.ambr +++ b/tests/components/incomfort/snapshots/test_climate.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_setup_platform[legacy_thermostat][climate.thermostat_1-entry] +# name: test_setup_platform[climate.thermostat_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -38,73 +38,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_setup_platform[legacy_thermostat][climate.thermostat_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.4, - 'friendly_name': 'Thermostat 1', - 'hvac_action': , - 'hvac_modes': list([ - , - ]), - 'max_temp': 30.0, - 'min_temp': 5.0, - 'status': dict({ - 'override': 0.0, - 'room_temp': 21.42, - 'setpoint': 18.0, - }), - 'supported_features': , - 'temperature': 18.0, - }), - 'context': , - 'entity_id': 'climate.thermostat_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_setup_platform[new_thermostat][climate.thermostat_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - ]), - 'max_temp': 30.0, - 'min_temp': 5.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.thermostat_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'c0ffeec0ffee_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_platform[new_thermostat][climate.thermostat_1-state] +# name: test_setup_platform[climate.thermostat_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'current_temperature': 21.4, diff --git a/tests/components/incomfort/test_climate.py b/tests/components/incomfort/test_climate.py index ae4c1cf31f7..d5f7397aaaf 100644 --- a/tests/components/incomfort/test_climate.py +++ b/tests/components/incomfort/test_climate.py @@ -2,7 +2,6 @@ from unittest.mock import MagicMock, patch -import pytest from syrupy import SnapshotAssertion from homeassistant.config_entries import ConfigEntry @@ -14,14 +13,6 @@ from tests.common import snapshot_platform @patch("homeassistant.components.incomfort.PLATFORMS", [Platform.CLIMATE]) -@pytest.mark.parametrize( - "mock_room_status", - [ - {"room_temp": 21.42, "setpoint": 18.0, "override": 18.0}, - {"room_temp": 21.42, "setpoint": 18.0, "override": 0.0}, - ], - ids=["new_thermostat", "legacy_thermostat"], -) async def test_setup_platform( hass: HomeAssistant, mock_incomfort: MagicMock, @@ -29,10 +20,6 @@ async def test_setup_platform( snapshot: SnapshotAssertion, mock_config_entry: ConfigEntry, ) -> None: - """Test the incomfort entities are set up correctly. - - Legacy thermostats report 0.0 as override if no override is set, - but new thermostat sync the override with the actual setpoint instead. - """ + """Test the incomfort entities are set up correctly.""" await hass.config_entries.async_setup(mock_config_entry.entry_id) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/incomfort/test_water_heater.py b/tests/components/incomfort/test_water_heater.py index 082aecf6d49..5b7aebc50a8 100644 --- a/tests/components/incomfort/test_water_heater.py +++ b/tests/components/incomfort/test_water_heater.py @@ -2,7 +2,6 @@ from unittest.mock import MagicMock, patch -import pytest from syrupy import SnapshotAssertion from homeassistant.config_entries import ConfigEntry @@ -10,8 +9,6 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MOCK_HEATER_STATUS - from tests.common import snapshot_platform @@ -26,44 +23,3 @@ async def test_setup_platform( """Test the incomfort entities are set up correctly.""" await hass.config_entries.async_setup(mock_config_entry.entry_id) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize( - ("mock_heater_status", "current_temperature"), - [ - (MOCK_HEATER_STATUS, 35.3), - (MOCK_HEATER_STATUS | {"is_tapping": True}, 30.2), - (MOCK_HEATER_STATUS | {"is_pumping": True}, 35.3), - (MOCK_HEATER_STATUS | {"heater_temp": None}, 30.2), - (MOCK_HEATER_STATUS | {"tap_temp": None}, 35.3), - (MOCK_HEATER_STATUS | {"heater_temp": None, "tap_temp": None}, None), - ], - ids=[ - "both_temps_available_choose_highest", - "is_tapping_choose_tapping_temp", - "is_pumping_choose_heater_temp", - "heater_temp_not_available_choose_tapping_temp", - "tapping_temp_not_available_choose_heater_temp", - "tapping_and_heater_temp_not_available_unknown", - ], -) -@patch("homeassistant.components.incomfort.PLATFORMS", [Platform.WATER_HEATER]) -async def test_current_temperature_cases( - hass: HomeAssistant, - mock_incomfort: MagicMock, - entity_registry: er.EntityRegistry, - mock_config_entry: ConfigEntry, - current_temperature: float | None, -) -> None: - """Test incomfort entities with alternate current temperature calculation. - - The boilers current temperature is calculated from the testdata: - heater_temp: 35.34 - tap_temp: 30.21 - - It is based on the operating mode as the boiler can heat tap water or - the house. - """ - await hass.config_entries.async_setup(mock_config_entry.entry_id) - assert (state := hass.states.get("water_heater.boiler")) is not None - assert state.attributes.get("current_temperature") == current_temperature diff --git a/tests/components/influxdb/test_init.py b/tests/components/influxdb/test_init.py index f900be7b700..2d93322999d 100644 --- a/tests/components/influxdb/test_init.py +++ b/tests/components/influxdb/test_init.py @@ -1,6 +1,5 @@ """The tests for the InfluxDB component.""" -from collections.abc import Generator from dataclasses import dataclass import datetime from http import HTTPStatus @@ -8,6 +7,7 @@ import logging from unittest.mock import ANY, MagicMock, Mock, call, patch import pytest +from typing_extensions import Generator from homeassistant.components import influxdb from homeassistant.components.influxdb.const import DEFAULT_BUCKET @@ -43,7 +43,7 @@ class FilterTest: @pytest.fixture(autouse=True) -def mock_batch_timeout(monkeypatch: pytest.MonkeyPatch) -> None: +def mock_batch_timeout(hass, monkeypatch): """Mock the event bus listener and the batch timeout for tests.""" monkeypatch.setattr( f"{INFLUX_PATH}.InfluxThread.batch_timeout", @@ -334,9 +334,7 @@ async def test_invalid_config( assert not await async_setup_component(hass, influxdb.DOMAIN, config) -async def _setup( - hass: HomeAssistant, mock_influx_client, config_ext, get_write_api -) -> None: +async def _setup(hass, mock_influx_client, config_ext, get_write_api): """Prepare client for next test and return event handler method.""" config = { "influxdb": { diff --git a/tests/components/influxdb/test_sensor.py b/tests/components/influxdb/test_sensor.py index 7f5954728a6..48cae2a3ae6 100644 --- a/tests/components/influxdb/test_sensor.py +++ b/tests/components/influxdb/test_sensor.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Generator from dataclasses import dataclass from datetime import timedelta from http import HTTPStatus @@ -11,6 +10,7 @@ from unittest.mock import MagicMock, patch from influxdb.exceptions import InfluxDBClientError, InfluxDBServerError from influxdb_client.rest import ApiException import pytest +from typing_extensions import Generator from voluptuous import Invalid from homeassistant.components import sensor @@ -25,7 +25,7 @@ from homeassistant.components.influxdb.const import ( ) from homeassistant.components.influxdb.sensor import PLATFORM_SCHEMA from homeassistant.const import STATE_UNKNOWN -from homeassistant.core import HomeAssistant, State +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import PLATFORM_NOT_READY_BASE_WAIT_TIME from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -190,9 +190,7 @@ def _set_query_mock_v2( return query_api -async def _setup( - hass: HomeAssistant, config_ext, queries, expected_sensors -) -> list[State]: +async def _setup(hass, config_ext, queries, expected_sensors): """Create client and test expected sensors.""" config = { DOMAIN: config_ext, diff --git a/tests/components/input_datetime/test_init.py b/tests/components/input_datetime/test_init.py index 411f084d39a..fdbb9a7803f 100644 --- a/tests/components/input_datetime/test_init.py +++ b/tests/components/input_datetime/test_init.py @@ -79,9 +79,7 @@ def storage_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -async def async_set_date_and_time( - hass: HomeAssistant, entity_id: str, dt_value: datetime.datetime -) -> None: +async def async_set_date_and_time(hass, entity_id, dt_value): """Set date and / or time of input_datetime.""" await hass.services.async_call( DOMAIN, @@ -95,9 +93,7 @@ async def async_set_date_and_time( ) -async def async_set_datetime( - hass: HomeAssistant, entity_id: str, dt_value: datetime.datetime -) -> None: +async def async_set_datetime(hass, entity_id, dt_value): """Set date and / or time of input_datetime.""" await hass.services.async_call( DOMAIN, @@ -107,9 +103,7 @@ async def async_set_datetime( ) -async def async_set_timestamp( - hass: HomeAssistant, entity_id: str, timestamp: float -) -> None: +async def async_set_timestamp(hass, entity_id, timestamp): """Set date and / or time of input_datetime.""" await hass.services.async_call( DOMAIN, diff --git a/tests/components/input_number/test_init.py b/tests/components/input_number/test_init.py index 8ea1c2e25b6..73e41f347ce 100644 --- a/tests/components/input_number/test_init.py +++ b/tests/components/input_number/test_init.py @@ -65,7 +65,7 @@ def storage_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -async def set_value(hass: HomeAssistant, entity_id: str, value: str) -> None: +async def set_value(hass, entity_id, value): """Set input_number to value. This is a legacy helper method. Do not use it for new tests. @@ -78,7 +78,7 @@ async def set_value(hass: HomeAssistant, entity_id: str, value: str) -> None: ) -async def increment(hass: HomeAssistant, entity_id: str) -> None: +async def increment(hass, entity_id): """Increment value of entity. This is a legacy helper method. Do not use it for new tests. @@ -88,7 +88,7 @@ async def increment(hass: HomeAssistant, entity_id: str) -> None: ) -async def decrement(hass: HomeAssistant, entity_id: str) -> None: +async def decrement(hass, entity_id): """Decrement value of entity. This is a legacy helper method. Do not use it for new tests. diff --git a/tests/components/input_text/test_init.py b/tests/components/input_text/test_init.py index 2ca1d39a983..3cae98b6dfe 100644 --- a/tests/components/input_text/test_init.py +++ b/tests/components/input_text/test_init.py @@ -71,7 +71,7 @@ def storage_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -async def async_set_value(hass: HomeAssistant, entity_id: str, value: str) -> None: +async def async_set_value(hass, entity_id, value): """Set input_text to value.""" await hass.services.async_call( DOMAIN, diff --git a/tests/components/insteon/const.py b/tests/components/insteon/const.py index a4e4e8a390d..c35db3b7092 100644 --- a/tests/components/insteon/const.py +++ b/tests/components/insteon/const.py @@ -79,4 +79,5 @@ PATCH_CONNECTION = "homeassistant.components.insteon.config_flow.async_connect" PATCH_CONNECTION_CLOSE = "homeassistant.components.insteon.config_flow.async_close" PATCH_DEVICES = "homeassistant.components.insteon.config_flow.devices" PATCH_USB_LIST = "homeassistant.components.insteon.config_flow.async_get_usb_ports" +PATCH_ASYNC_SETUP = "homeassistant.components.insteon.async_setup" PATCH_ASYNC_SETUP_ENTRY = "homeassistant.components.insteon.async_setup_entry" diff --git a/tests/components/insteon/mock_devices.py b/tests/components/insteon/mock_devices.py index 05db45d00ac..6b5f5cf5e09 100644 --- a/tests/components/insteon/mock_devices.py +++ b/tests/components/insteon/mock_devices.py @@ -30,7 +30,7 @@ class MockSwitchLinc(SwitchedLightingControl_SwitchLinc02): class MockDevices: """Mock devices class.""" - def __init__(self, connected=True) -> None: + def __init__(self, connected=True): """Init the MockDevices class.""" self._devices = {} self.modem = None @@ -168,14 +168,6 @@ class MockDevices: yield address await asyncio.sleep(0.01) - def values(self): - """Return the devices.""" - return self._devices.values() - - def items(self): - """Return the address, device pair.""" - return self._devices.items() - def subscribe(self, listener, force_strong_ref=False): """Mock the subscribe function.""" subscribe_topic(listener, DEVICE_LIST_CHANGED) diff --git a/tests/components/insteon/test_api_aldb.py b/tests/components/insteon/test_api_aldb.py index bdb749836e2..4376628d9a4 100644 --- a/tests/components/insteon/test_api_aldb.py +++ b/tests/components/insteon/test_api_aldb.py @@ -1,8 +1,6 @@ """Test the Insteon All-Link Database APIs.""" -import asyncio import json -from typing import Any from unittest.mock import patch from pyinsteon import pub @@ -25,7 +23,7 @@ from homeassistant.core import HomeAssistant from .mock_devices import MockDevices from tests.common import load_fixture -from tests.typing import MockHAClientWebSocket, WebSocketGenerator +from tests.typing import WebSocketGenerator @pytest.fixture(name="aldb_data", scope="module") @@ -34,9 +32,7 @@ def aldb_data_fixture(): return json.loads(load_fixture("insteon/aldb_data.json")) -async def _setup( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data: dict[str, Any] -) -> tuple[MockHAClientWebSocket, MockDevices]: +async def _setup(hass, hass_ws_client, aldb_data): """Set up tests.""" ws_client = await hass_ws_client(hass) devices = MockDevices() @@ -333,38 +329,3 @@ async def test_bad_address( msg = await ws_client.receive_json() assert not msg["success"] assert msg["error"]["message"] == INSTEON_DEVICE_NOT_FOUND - - -async def test_notify_on_aldb_loading( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data -) -> None: - """Test tracking changes to ALDB status across all devices.""" - ws_client, devices = await _setup(hass, hass_ws_client, aldb_data) - - with patch.object(insteon.api.aldb, "devices", devices): - await ws_client.send_json_auto_id({TYPE: "insteon/aldb/notify_all"}) - msg = await ws_client.receive_json() - assert msg["success"] - - await asyncio.sleep(0.1) - msg = await ws_client.receive_json() - assert msg["event"]["type"] == "status" - assert not msg["event"]["is_loading"] - - device = devices["333333"] - device.aldb._update_status(ALDBStatus.LOADING) - await asyncio.sleep(0.1) - msg = await ws_client.receive_json() - assert msg["event"]["type"] == "status" - assert msg["event"]["is_loading"] - - device.aldb._update_status(ALDBStatus.LOADED) - await asyncio.sleep(0.1) - msg = await ws_client.receive_json() - assert msg["event"]["type"] == "status" - assert not msg["event"]["is_loading"] - - await ws_client.client.session.close() - - # Allow lingering tasks to complete - await asyncio.sleep(0.1) diff --git a/tests/components/insteon/test_api_config.py b/tests/components/insteon/test_api_config.py index 9c85ca6a706..7c922338638 100644 --- a/tests/components/insteon/test_api_config.py +++ b/tests/components/insteon/test_api_config.py @@ -1,10 +1,7 @@ """Test the Insteon APIs for configuring the integration.""" -import asyncio -import json from unittest.mock import patch -from homeassistant.components import insteon from homeassistant.components.insteon.api.device import ID, TYPE from homeassistant.components.insteon.const import ( CONF_HUB_VERSION, @@ -21,10 +18,8 @@ from .const import ( MOCK_USER_INPUT_PLM, ) from .mock_connection import mock_failed_connection, mock_successful_connection -from .mock_devices import MockDevices from .mock_setup import async_mock_setup -from tests.common import load_fixture from tests.typing import WebSocketGenerator @@ -394,55 +389,3 @@ async def test_remove_device_override_no_overrides( config_entry = hass.config_entries.async_get_entry("abcde12345") assert not config_entry.options.get(CONF_OVERRIDE) - - -async def test_get_broken_links( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test getting broken ALDB links.""" - - ws_client, _, _, _ = await async_mock_setup(hass, hass_ws_client) - devices = MockDevices() - await devices.async_load() - aldb_data = json.loads(load_fixture("insteon/aldb_data.json")) - devices.fill_aldb("33.33.33", aldb_data) - await asyncio.sleep(1) - with patch.object(insteon.api.config, "devices", devices): - await ws_client.send_json({ID: 2, TYPE: "insteon/config/get_broken_links"}) - msg = await ws_client.receive_json() - assert msg["success"] - - assert len(msg["result"]) == 5 - - -async def test_get_unknown_devices( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test getting unknown Insteon devices.""" - - ws_client, _, _, _ = await async_mock_setup(hass, hass_ws_client) - devices = MockDevices() - await devices.async_load() - aldb_data = { - "4095": { - "memory": 4095, - "in_use": True, - "controller": False, - "high_water_mark": False, - "bit5": True, - "bit4": False, - "group": 0, - "target": "FFFFFF", - "data1": 0, - "data2": 0, - "data3": 0, - }, - } - devices.fill_aldb("33.33.33", aldb_data) - with patch.object(insteon.api.config, "devices", devices): - await ws_client.send_json({ID: 2, TYPE: "insteon/config/get_unknown_devices"}) - msg = await ws_client.receive_json() - assert msg["success"] - - assert len(msg["result"]) == 1 - await asyncio.sleep(0.1) diff --git a/tests/components/insteon/test_api_device.py b/tests/components/insteon/test_api_device.py index 6f1a174f024..29d601eb3ef 100644 --- a/tests/components/insteon/test_api_device.py +++ b/tests/components/insteon/test_api_device.py @@ -16,6 +16,7 @@ from homeassistant.components.insteon.api.device import ( ID, INSTEON_DEVICE_NOT_FOUND, TYPE, + async_device_name, ) from homeassistant.components.insteon.const import ( CONF_OVERRIDE, @@ -23,7 +24,6 @@ from homeassistant.components.insteon.const import ( DOMAIN, MULTIPLE, ) -from homeassistant.components.insteon.utils import async_device_name from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -129,6 +129,10 @@ async def test_get_ha_device_name( name = await async_device_name(device_reg, "11.11.11") assert name == "Device 11.11.11" + # Test no HA device but a real Insteon device + name = await async_device_name(device_reg, "22.22.22") + assert name == "Device 22.22.22 (2)" + # Test no HA or Insteon device name = await async_device_name(device_reg, "BB.BB.BB") assert name == "" diff --git a/tests/components/insteon/test_api_properties.py b/tests/components/insteon/test_api_properties.py index aeeeeab3d7b..aee35cb8994 100644 --- a/tests/components/insteon/test_api_properties.py +++ b/tests/components/insteon/test_api_properties.py @@ -1,8 +1,6 @@ """Test the Insteon properties APIs.""" -import asyncio import json -from typing import Any from unittest.mock import AsyncMock, patch from pyinsteon.config import MOMENTARY_DELAY, RELAY_MODE, TOGGLE_BUTTON @@ -28,7 +26,7 @@ from homeassistant.core import HomeAssistant from .mock_devices import MockDevices from tests.common import load_fixture -from tests.typing import MockHAClientWebSocket, WebSocketGenerator +from tests.typing import WebSocketGenerator @pytest.fixture(name="kpl_properties_data", scope="module") @@ -43,12 +41,7 @@ def iolinc_properties_data_fixture(): return json.loads(load_fixture("insteon/iolinc_properties.json")) -async def _setup( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - address: str, - properties_data: dict[str, Any], -) -> tuple[MockHAClientWebSocket, MockDevices]: +async def _setup(hass, hass_ws_client, address, properties_data): """Set up tests.""" ws_client = await hass_ws_client(hass) devices = MockDevices() @@ -157,7 +150,6 @@ async def test_get_read_only_properties( msg = await ws_client.receive_json() assert msg["success"] assert len(msg["result"]["properties"]) == 15 - await asyncio.sleep(1) async def test_get_unknown_properties( diff --git a/tests/components/insteon/test_api_scenes.py b/tests/components/insteon/test_api_scenes.py index 14001e0495d..1b8d4d50f08 100644 --- a/tests/components/insteon/test_api_scenes.py +++ b/tests/components/insteon/test_api_scenes.py @@ -1,8 +1,7 @@ """Test the Insteon Scenes APIs.""" -from collections.abc import Generator +import json import os -from typing import Any from unittest.mock import AsyncMock, patch from pyinsteon.constants import ResponseStatus @@ -12,22 +11,21 @@ import pytest from homeassistant.components.insteon.api import async_load_api, scenes from homeassistant.components.insteon.const import ID, TYPE from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonArrayType from .mock_devices import MockDevices -from tests.common import load_json_array_fixture -from tests.typing import MockHAClientWebSocket, WebSocketGenerator +from tests.common import load_fixture +from tests.typing import WebSocketGenerator @pytest.fixture(name="scene_data", scope="module") -def aldb_data_fixture() -> JsonArrayType: +def aldb_data_fixture(): """Load the controller state fixture data.""" - return load_json_array_fixture("insteon/scene_data.json") + return json.loads(load_fixture("insteon/scene_data.json")) @pytest.fixture(name="remove_json") -def remove_insteon_devices_json(hass: HomeAssistant) -> Generator[None]: +def remove_insteon_devices_json(hass): """Fixture to remove insteon_devices.json at the end of the test.""" yield file = os.path.join(hass.config.config_dir, "insteon_devices.json") @@ -35,7 +33,7 @@ def remove_insteon_devices_json(hass: HomeAssistant) -> Generator[None]: os.remove(file) -def _scene_to_array(scene: dict[str, Any]) -> list[dict[str, Any]]: +def _scene_to_array(scene): """Convert a scene object to a dictionary.""" scene_list = [] for device, links in scene["devices"].items(): @@ -49,9 +47,7 @@ def _scene_to_array(scene: dict[str, Any]) -> list[dict[str, Any]]: return scene_list -async def _setup( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data: JsonArrayType -) -> tuple[MockHAClientWebSocket, MockDevices]: +async def _setup(hass, hass_ws_client, scene_data): """Set up tests.""" ws_client = await hass_ws_client(hass) devices = MockDevices() @@ -67,7 +63,7 @@ async def _setup( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_get_scenes( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data: JsonArrayType + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data ) -> None: """Test getting all Insteon scenes.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -83,7 +79,7 @@ async def test_get_scenes( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_get_scene( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data: JsonArrayType + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data ) -> None: """Test getting an Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -97,11 +93,8 @@ async def test_get_scene( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) -@pytest.mark.usefixtures("remove_json") async def test_save_scene( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - scene_data: JsonArrayType, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json ) -> None: """Test saving an Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -132,11 +125,8 @@ async def test_save_scene( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) -@pytest.mark.usefixtures("remove_json") async def test_save_new_scene( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - scene_data: JsonArrayType, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json ) -> None: """Test saving a new Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -167,11 +157,8 @@ async def test_save_new_scene( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) -@pytest.mark.usefixtures("remove_json") async def test_save_scene_error( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - scene_data: JsonArrayType, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json ) -> None: """Test saving an Insteon scene with error.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -202,11 +189,8 @@ async def test_save_scene_error( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) -@pytest.mark.usefixtures("remove_json") async def test_delete_scene( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - scene_data: JsonArrayType, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json ) -> None: """Test delete an Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) diff --git a/tests/components/insteon/test_config_flow.py b/tests/components/insteon/test_config_flow.py index 31d38a603f1..4d3fb815463 100644 --- a/tests/components/insteon/test_config_flow.py +++ b/tests/components/insteon/test_config_flow.py @@ -1,8 +1,6 @@ """Test the config flow for the Insteon integration.""" -from collections.abc import Callable -from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest from voluptuous_serialize import convert @@ -16,7 +14,7 @@ from homeassistant.components.insteon.config_flow import ( STEP_PLM_MANUALLY, ) from homeassistant.components.insteon.const import CONF_HUB_VERSION, DOMAIN -from homeassistant.config_entries import ConfigEntryState, ConfigFlowResult +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_DEVICE, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -27,6 +25,7 @@ from .const import ( MOCK_USER_INPUT_HUB_V2, MOCK_USER_INPUT_PLM, MOCK_USER_INPUT_PLM_MANUAL, + PATCH_ASYNC_SETUP, PATCH_ASYNC_SETUP_ENTRY, PATCH_CONNECTION, PATCH_USB_LIST, @@ -62,7 +61,7 @@ async def mock_failed_connection(*args, **kwargs): raise ConnectionError("Connection failed") -async def _init_form(hass: HomeAssistant, modem_type: str) -> ConfigFlowResult: +async def _init_form(hass, modem_type): """Run the user form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -75,18 +74,14 @@ async def _init_form(hass: HomeAssistant, modem_type: str) -> ConfigFlowResult: ) -async def _device_form( - hass: HomeAssistant, - flow_id: str, - connection: Callable[..., Any], - user_input: dict[str, Any] | None, -) -> tuple[ConfigFlowResult, AsyncMock]: +async def _device_form(hass, flow_id, connection, user_input): """Test the PLM, Hub v1 or Hub v2 form.""" with ( patch( PATCH_CONNECTION, new=connection, ), + patch(PATCH_ASYNC_SETUP, return_value=True) as mock_setup, patch( PATCH_ASYNC_SETUP_ENTRY, return_value=True, @@ -94,7 +89,7 @@ async def _device_form( ): result = await hass.config_entries.flow.async_configure(flow_id, user_input) await hass.async_block_till_done() - return result, mock_setup_entry + return result, mock_setup, mock_setup_entry async def test_form_select_modem(hass: HomeAssistant) -> None: @@ -130,12 +125,13 @@ async def test_form_select_plm(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, mock_setup_entry = await _device_form( + result2, mock_setup, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"] == MOCK_USER_INPUT_PLM + assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -146,7 +142,7 @@ async def test_form_select_plm_no_usb(hass: HomeAssistant) -> None: USB_PORTS.clear() result = await _init_form(hass, STEP_PLM) - result2, _ = await _device_form( + result2, _, _ = await _device_form( hass, result["flow_id"], mock_successful_connection, None ) USB_PORTS.update(temp_usb_list) @@ -159,17 +155,18 @@ async def test_form_select_plm_manual(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, mock_setup_entry = await _device_form( + result2, mock_setup, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM_MANUAL ) - result3, mock_setup_entry = await _device_form( + result3, mock_setup, mock_setup_entry = await _device_form( hass, result2["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.FORM assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["data"] == MOCK_USER_INPUT_PLM + assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -178,7 +175,7 @@ async def test_form_select_hub_v1(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V1) - result2, mock_setup_entry = await _device_form( + result2, mock_setup, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_HUB_V1 ) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -187,6 +184,7 @@ async def test_form_select_hub_v1(hass: HomeAssistant) -> None: CONF_HUB_VERSION: 1, } + assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -195,7 +193,7 @@ async def test_form_select_hub_v2(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V2) - result2, mock_setup_entry = await _device_form( + result2, mock_setup, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_HUB_V2 ) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -204,6 +202,7 @@ async def test_form_select_hub_v2(hass: HomeAssistant) -> None: CONF_HUB_VERSION: 2, } + assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -234,7 +233,7 @@ async def test_failed_connection_plm(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, _ = await _device_form( + result2, _, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.FORM @@ -246,10 +245,10 @@ async def test_failed_connection_plm_manually(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, _ = await _device_form( + result2, _, _ = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM_MANUAL ) - result3, _ = await _device_form( + result3, _, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM ) assert result3["type"] is FlowResultType.FORM @@ -261,7 +260,7 @@ async def test_failed_connection_hub(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V2) - result2, _ = await _device_form( + result2, _, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_HUB_V2 ) assert result2["type"] is FlowResultType.FORM @@ -285,7 +284,7 @@ async def test_discovery_via_usb(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm_usb" - with patch(PATCH_CONNECTION): + with patch(PATCH_CONNECTION), patch(PATCH_ASYNC_SETUP, return_value=True): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) diff --git a/tests/components/insteon/test_lock.py b/tests/components/insteon/test_lock.py index ec236059c74..a782e006a62 100644 --- a/tests/components/insteon/test_lock.py +++ b/tests/components/insteon/test_lock.py @@ -7,11 +7,18 @@ import pytest from homeassistant.components import insteon from homeassistant.components.insteon import ( DOMAIN, - entity as insteon_entity, + insteon_entity, utils as insteon_utils, ) -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState -from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform +from homeassistant.components.lock import ( # SERVICE_LOCK,; SERVICE_UNLOCK, + DOMAIN as LOCK_DOMAIN, +) +from homeassistant.const import ( # ATTR_ENTITY_ID,; + EVENT_HOMEASSISTANT_STOP, + STATE_LOCKED, + STATE_UNLOCKED, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -41,7 +48,11 @@ def patch_setup_and_devices(): patch.object(insteon, "async_close"), patch.object(insteon, "devices", devices), patch.object(insteon_utils, "devices", devices), - patch.object(insteon_entity, "devices", devices), + patch.object( + insteon_entity, + "devices", + devices, + ), ): yield @@ -66,7 +77,7 @@ async def test_lock_lock( try: lock = entity_registry.async_get("lock.device_55_55_55_55_55_55") state = hass.states.get(lock.entity_id) - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED # lock via UI await hass.services.async_call( @@ -95,7 +106,7 @@ async def test_lock_unlock( lock = entity_registry.async_get("lock.device_55_55_55_55_55_55") state = hass.states.get(lock.entity_id) - assert state.state == LockState.LOCKED + assert state.state is STATE_LOCKED # lock via UI await hass.services.async_call( diff --git a/tests/components/intellifire/__init__.py b/tests/components/intellifire/__init__.py index 50497939f7f..f655ccc2fa4 100644 --- a/tests/components/intellifire/__init__.py +++ b/tests/components/intellifire/__init__.py @@ -1,13 +1 @@ """Tests for the IntelliFire integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/intellifire/conftest.py b/tests/components/intellifire/conftest.py index 0bd7073ee47..1aae4fb6dd6 100644 --- a/tests/components/intellifire/conftest.py +++ b/tests/components/intellifire/conftest.py @@ -1,36 +1,10 @@ """Fixtures for IntelliFire integration tests.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, Mock, PropertyMock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, patch -from intellifire4py.const import IntelliFireApiMode -from intellifire4py.model import ( - IntelliFireCommonFireplaceData, - IntelliFirePollData, - IntelliFireUserData, -) +from aiohttp.client_reqrep import ConnectionKey import pytest - -from homeassistant.components.intellifire.const import ( - API_MODE_CLOUD, - API_MODE_LOCAL, - CONF_AUTH_COOKIE, - CONF_CONTROL_MODE, - CONF_READ_MODE, - CONF_SERIAL, - CONF_USER_ID, - CONF_WEB_CLIENT_ID, - DOMAIN, -) -from homeassistant.const import ( - CONF_API_KEY, - CONF_HOST, - CONF_IP_ADDRESS, - CONF_PASSWORD, - CONF_USERNAME, -) - -from tests.common import MockConfigEntry, load_json_object_fixture +from typing_extensions import Generator @pytest.fixture @@ -48,201 +22,39 @@ def mock_fireplace_finder_none() -> Generator[MagicMock]: mock_found_fireplaces = Mock() mock_found_fireplaces.ips = [] with patch( - "homeassistant.components.intellifire.config_flow.UDPFireplaceFinder.search_fireplace" + "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace" ): yield mock_found_fireplaces @pytest.fixture -def mock_config_entry_current() -> MockConfigEntry: - """Return a mock config entry.""" - return MockConfigEntry( - domain=DOMAIN, - version=1, - minor_version=2, - data={ - CONF_IP_ADDRESS: "192.168.2.108", - CONF_USERNAME: "grumpypanda@china.cn", - CONF_PASSWORD: "you-stole-my-pandas", - CONF_SERIAL: "3FB284769E4736F30C8973A7ED358123", - CONF_WEB_CLIENT_ID: "FA2B1C3045601234D0AE17D72F8E975", - CONF_API_KEY: "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", - CONF_AUTH_COOKIE: "B984F21A6378560019F8A1CDE41B6782", - CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - }, - options={CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_CLOUD}, - unique_id="3FB284769E4736F30C8973A7ED358123", - ) - - -@pytest.fixture -def mock_config_entry_old() -> MockConfigEntry: - """For migration testing.""" - return MockConfigEntry( - domain=DOMAIN, - version=1, - minor_version=1, - title="Fireplace 3FB284769E4736F30C8973A7ED358123", - data={ - CONF_HOST: "192.168.2.108", - CONF_USERNAME: "grumpypanda@china.cn", - CONF_PASSWORD: "you-stole-my-pandas", - CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - }, - ) - - -@pytest.fixture -def mock_common_data_local() -> IntelliFireCommonFireplaceData: - """Fixture for mock common data.""" - return IntelliFireCommonFireplaceData( - auth_cookie="B984F21A6378560019F8A1CDE41B6782", - user_id="52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - web_client_id="FA2B1C3045601234D0AE17D72F8E975", - serial="3FB284769E4736F30C8973A7ED358123", - api_key="B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", - ip_address="192.168.2.108", - read_mode=IntelliFireApiMode.LOCAL, - control_mode=IntelliFireApiMode.LOCAL, - ) - - -@pytest.fixture -def mock_apis_multifp( - mock_cloud_interface, mock_local_interface, mock_fp -) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock]]: - """Multi fireplace version of mocks.""" - return mock_local_interface, mock_cloud_interface, mock_fp - - -@pytest.fixture -def mock_apis_single_fp( - mock_cloud_interface, mock_local_interface, mock_fp -) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock]]: - """Single fire place version of the mocks.""" - data_v1 = IntelliFireUserData( - **load_json_object_fixture("user_data_1.json", DOMAIN) - ) - with patch.object( - type(mock_cloud_interface), "user_data", new_callable=PropertyMock - ) as mock_user_data: - mock_user_data.return_value = data_v1 - yield mock_local_interface, mock_cloud_interface, mock_fp - - -@pytest.fixture -def mock_cloud_interface() -> Generator[AsyncMock]: - """Mock cloud interface to use for testing.""" - user_data = IntelliFireUserData( - **load_json_object_fixture("user_data_3.json", DOMAIN) - ) - - with ( - patch( - "homeassistant.components.intellifire.IntelliFireCloudInterface", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.intellifire.config_flow.IntelliFireCloudInterface", - new=mock_client, - ), - patch( - "intellifire4py.cloud_interface.IntelliFireCloudInterface", - new=mock_client, - ), +def mock_fireplace_finder_single() -> Generator[MagicMock]: + """Mock fireplace finder.""" + mock_found_fireplaces = Mock() + mock_found_fireplaces.ips = ["192.168.1.69"] + with patch( + "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace" ): - # Mock async context manager - mock_client = mock_client.return_value - mock_client.__aenter__ = AsyncMock(return_value=mock_client) - mock_client.__aexit__ = AsyncMock(return_value=None) - - # Mock other async methods if needed - mock_client.login_with_credentials = AsyncMock() - mock_client.poll = AsyncMock() - type(mock_client).user_data = PropertyMock(return_value=user_data) - - yield mock_client # Yielding to the test + yield mock_found_fireplaces @pytest.fixture -def mock_local_interface() -> Generator[AsyncMock]: - """Mock version of IntelliFireAPILocal.""" - poll_data = IntelliFirePollData( - **load_json_object_fixture("intellifire/local_poll.json") - ) +def mock_intellifire_config_flow() -> Generator[MagicMock]: + """Return a mocked IntelliFire client.""" + data_mock = Mock() + data_mock.serial = "12345" + with patch( - "homeassistant.components.intellifire.config_flow.IntelliFireAPILocal", + "homeassistant.components.intellifire.config_flow.IntellifireAPILocal", autospec=True, - ) as mock_client: - mock_client = mock_client.return_value - # Mock all instances of the class - type(mock_client).data = PropertyMock(return_value=poll_data) - yield mock_client + ) as intellifire_mock: + intellifire = intellifire_mock.return_value + intellifire.data = data_mock + yield intellifire -@pytest.fixture -def mock_fp(mock_common_data_local) -> Generator[AsyncMock]: - """Mock fireplace.""" - - local_poll_data = IntelliFirePollData( - **load_json_object_fixture("local_poll.json", DOMAIN) - ) - - assert local_poll_data.connection_quality == 988451 - - with patch( - "homeassistant.components.intellifire.UnifiedFireplace" - ) as mock_unified_fireplace: - # Create an instance of the mock - mock_instance = mock_unified_fireplace.return_value - - # Mock methods and properties of the instance - mock_instance.perform_cloud_poll = AsyncMock() - mock_instance.perform_local_poll = AsyncMock() - - mock_instance.async_validate_connectivity = AsyncMock(return_value=(True, True)) - - type(mock_instance).is_cloud_polling = PropertyMock(return_value=False) - type(mock_instance).is_local_polling = PropertyMock(return_value=True) - - mock_instance.get_user_data_as_json.return_value = '{"mock": "data"}' - - mock_instance.ip_address = "192.168.1.100" - mock_instance.api_key = "mock_api_key" - mock_instance.serial = "mock_serial" - mock_instance.user_id = "mock_user_id" - mock_instance.auth_cookie = "mock_auth_cookie" - mock_instance.web_client_id = "mock_web_client_id" - - # Configure the READ Api - mock_instance.read_api = MagicMock() - mock_instance.read_api.poll = MagicMock(return_value=local_poll_data) - mock_instance.read_api.data = local_poll_data - - mock_instance.control_api = MagicMock() - - mock_instance.local_connectivity = True - mock_instance.cloud_connectivity = False - - mock_instance._read_mode = IntelliFireApiMode.LOCAL - mock_instance.read_mode = IntelliFireApiMode.LOCAL - - mock_instance.control_mode = IntelliFireApiMode.LOCAL - mock_instance._control_mode = IntelliFireApiMode.LOCAL - - mock_instance.data = local_poll_data - - mock_instance.set_read_mode = AsyncMock() - mock_instance.set_control_mode = AsyncMock() - - mock_instance.async_validate_connectivity = AsyncMock( - return_value=(True, False) - ) - - # Patch class methods - with patch( - "homeassistant.components.intellifire.UnifiedFireplace.build_fireplace_from_common", - new_callable=AsyncMock, - return_value=mock_instance, - ): - yield mock_instance +def mock_api_connection_error() -> ConnectionError: + """Return a fake a ConnectionError for iftapi.net.""" + ret = ConnectionError() + ret.args = [ConnectionKey("iftapi.net", 443, False, None, None, None, None)] + return ret diff --git a/tests/components/intellifire/fixtures/local_poll.json b/tests/components/intellifire/fixtures/local_poll.json deleted file mode 100644 index 9dac47c698d..00000000000 --- a/tests/components/intellifire/fixtures/local_poll.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "", - "serial": "4GC295860E5837G40D9974B7FD459234", - "temperature": 17, - "battery": 0, - "pilot": 1, - "light": 0, - "height": 1, - "fanspeed": 1, - "hot": 0, - "power": 1, - "thermostat": 0, - "setpoint": 0, - "timer": 0, - "timeremaining": 0, - "prepurge": 0, - "feature_light": 0, - "feature_thermostat": 1, - "power_vent": 0, - "feature_fan": 1, - "errors": [], - "fw_version": "0x00030200", - "fw_ver_str": "0.3.2+hw2", - "downtime": 0, - "uptime": 117, - "connection_quality": 988451, - "ecm_latency": 0, - "ipv4_address": "192.168.2.108" -} diff --git a/tests/components/intellifire/fixtures/user_data_1.json b/tests/components/intellifire/fixtures/user_data_1.json deleted file mode 100644 index 501d240662b..00000000000 --- a/tests/components/intellifire/fixtures/user_data_1.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", - "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", - "fireplaces": [ - { - "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", - "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", - "ip_address": "192.168.2.108", - "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", - "serial": "3FB284769E4736F30C8973A7ED358123" - } - ], - "username": "grumpypanda@china.cn", - "password": "you-stole-my-pandas" -} diff --git a/tests/components/intellifire/fixtures/user_data_3.json b/tests/components/intellifire/fixtures/user_data_3.json deleted file mode 100644 index 39e9c95abbd..00000000000 --- a/tests/components/intellifire/fixtures/user_data_3.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", - "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", - "fireplaces": [ - { - "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", - "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", - "ip_address": "192.168.2.108", - "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", - "serial": "3FB284769E4736F30C8973A7ED358123" - }, - { - "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", - "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", - "ip_address": "192.168.2.109", - "api_key": "D4C5EB28BBFF41E1FB21AFF9BFA6CD34", - "serial": "4GC295860E5837G40D9974B7FD459234" - }, - { - "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", - "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", - "ip_address": "192.168.2.110", - "api_key": "E5D6FC39CCED52F1FB21AFF9BFA6DE56", - "serial": "5HD306971F5938H51EAA85C8GE561345" - } - ], - "username": "grumpypanda@china.cn", - "password": "you-stole-my-pandas" -} diff --git a/tests/components/intellifire/snapshots/test_binary_sensor.ambr b/tests/components/intellifire/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 1b85db51d68..00000000000 --- a/tests/components/intellifire/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,813 +0,0 @@ -# serializer version: 1 -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_accessory_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_accessory_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Accessory error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'accessory_error', - 'unique_id': 'error_accessory_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_accessory_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire Accessory error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_accessory_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_cloud_connectivity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_cloud_connectivity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cloud connectivity', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cloud_connectivity', - 'unique_id': 'cloud_connectivity_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_cloud_connectivity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'connectivity', - 'friendly_name': 'IntelliFire Cloud connectivity', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_cloud_connectivity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_disabled_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_disabled_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disabled error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disabled_error', - 'unique_id': 'error_disabled_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_disabled_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire Disabled error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_disabled_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_ecm_offline_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_ecm_offline_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'ECM offline error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ecm_offline_error', - 'unique_id': 'error_ecm_offline_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_ecm_offline_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire ECM offline error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_ecm_offline_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_delay_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_fan_delay_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Fan delay error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'fan_delay_error', - 'unique_id': 'error_fan_delay_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_delay_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire Fan delay error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_fan_delay_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_fan_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Fan error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'fan_error', - 'unique_id': 'error_fan_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire Fan error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_fan_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.intellifire_flame', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Flame', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flame', - 'unique_id': 'on_off_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire Flame', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_flame', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_flame_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Flame Error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flame_error', - 'unique_id': 'error_flame_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire Flame Error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_flame_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_lights_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_lights_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Lights error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lights_error', - 'unique_id': 'error_lights_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_lights_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire Lights error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_lights_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_local_connectivity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_local_connectivity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Local connectivity', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'local_connectivity', - 'unique_id': 'local_connectivity_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_local_connectivity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'connectivity', - 'friendly_name': 'IntelliFire Local connectivity', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_local_connectivity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_maintenance_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_maintenance_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Maintenance error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'maintenance_error', - 'unique_id': 'error_maintenance_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_maintenance_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire Maintenance error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_maintenance_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_offline_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_offline_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Offline error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'offline_error', - 'unique_id': 'error_offline_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_offline_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire Offline error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_offline_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_flame_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_pilot_flame_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Pilot flame error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'pilot_flame_error', - 'unique_id': 'error_pilot_flame_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_flame_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire Pilot flame error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_pilot_flame_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_light_on-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.intellifire_pilot_light_on', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Pilot light on', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'pilot_light_on', - 'unique_id': 'pilot_light_on_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_light_on-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire Pilot light on', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_pilot_light_on', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_soft_lock_out_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.intellifire_soft_lock_out_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Soft lock out error', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'soft_lock_out_error', - 'unique_id': 'error_soft_lock_out_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_soft_lock_out_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'problem', - 'friendly_name': 'IntelliFire Soft lock out error', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_soft_lock_out_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_thermostat_on-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.intellifire_thermostat_on', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Thermostat on', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'thermostat_on', - 'unique_id': 'thermostat_on_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_thermostat_on-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire Thermostat on', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_thermostat_on', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_timer_on-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.intellifire_timer_on', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Timer on', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'timer_on', - 'unique_id': 'timer_on_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensor_entities[binary_sensor.intellifire_timer_on-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire Timer on', - }), - 'context': , - 'entity_id': 'binary_sensor.intellifire_timer_on', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/intellifire/snapshots/test_climate.ambr b/tests/components/intellifire/snapshots/test_climate.ambr deleted file mode 100644 index 36f719d2264..00000000000 --- a/tests/components/intellifire/snapshots/test_climate.ambr +++ /dev/null @@ -1,66 +0,0 @@ -# serializer version: 1 -# name: test_all_sensor_entities[climate.intellifire_thermostat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 37, - 'min_temp': 0, - 'target_temp_step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.intellifire_thermostat', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Thermostat', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'climate_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[climate.intellifire_thermostat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'current_temperature': 17.0, - 'friendly_name': 'IntelliFire Thermostat', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 37, - 'min_temp': 0, - 'supported_features': , - 'target_temp_step': 1.0, - 'temperature': 0.0, - }), - 'context': , - 'entity_id': 'climate.intellifire_thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/intellifire/snapshots/test_sensor.ambr b/tests/components/intellifire/snapshots/test_sensor.ambr deleted file mode 100644 index d749da216ac..00000000000 --- a/tests/components/intellifire/snapshots/test_sensor.ambr +++ /dev/null @@ -1,493 +0,0 @@ -# serializer version: 1 -# name: test_all_sensor_entities[sensor.intellifire_connection_quality-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.intellifire_connection_quality', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Connection quality', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'connection_quality', - 'unique_id': 'connection_quality_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_connection_quality-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire Connection quality', - }), - 'context': , - 'entity_id': 'sensor.intellifire_connection_quality', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '988451', - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_downtime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.intellifire_downtime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Downtime', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'downtime', - 'unique_id': 'downtime_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_downtime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'timestamp', - 'friendly_name': 'IntelliFire Downtime', - }), - 'context': , - 'entity_id': 'sensor.intellifire_downtime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_ecm_latency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.intellifire_ecm_latency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'ECM latency', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ecm_latency', - 'unique_id': 'ecm_latency_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_ecm_latency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire ECM latency', - }), - 'context': , - 'entity_id': 'sensor.intellifire_ecm_latency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_fan_speed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.intellifire_fan_speed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Fan Speed', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'fan_speed', - 'unique_id': 'fan_speed_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_fan_speed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire Fan Speed', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.intellifire_fan_speed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_flame_height-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.intellifire_flame_height', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Flame height', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flame_height', - 'unique_id': 'flame_height_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_flame_height-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire Flame height', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.intellifire_flame_height', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_ip_address-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.intellifire_ip_address', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IP address', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ipv4_address', - 'unique_id': 'ipv4_address_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_ip_address-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire IP address', - }), - 'context': , - 'entity_id': 'sensor.intellifire_ip_address', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '192.168.2.108', - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_target_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.intellifire_target_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Target temperature', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'target_temp', - 'unique_id': 'target_temp_mock_serial', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_target_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'temperature', - 'friendly_name': 'IntelliFire Target temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.intellifire_target_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.intellifire_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'temperature_mock_serial', - 'unit_of_measurement': , - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'temperature', - 'friendly_name': 'IntelliFire Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.intellifire_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17', - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_timer_end-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.intellifire_timer_end', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Timer end', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'timer_end_timestamp', - 'unique_id': 'timer_end_timestamp_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_timer_end-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'timestamp', - 'friendly_name': 'IntelliFire Timer end', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.intellifire_timer_end', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.intellifire_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Uptime', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'uptime', - 'unique_id': 'uptime_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'device_class': 'timestamp', - 'friendly_name': 'IntelliFire Uptime', - }), - 'context': , - 'entity_id': 'sensor.intellifire_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T11:58:03+00:00', - }) -# --- diff --git a/tests/components/intellifire/test_binary_sensor.py b/tests/components/intellifire/test_binary_sensor.py deleted file mode 100644 index a40f92b84d5..00000000000 --- a/tests/components/intellifire/test_binary_sensor.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Test IntelliFire Binary Sensors.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_binary_sensor_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry_current: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_apis_single_fp: tuple[AsyncMock, AsyncMock, AsyncMock], -) -> None: - """Test all entities.""" - - with ( - patch( - "homeassistant.components.intellifire.PLATFORMS", [Platform.BINARY_SENSOR] - ), - ): - await setup_integration(hass, mock_config_entry_current) - await snapshot_platform( - hass, entity_registry, snapshot, mock_config_entry_current.entry_id - ) diff --git a/tests/components/intellifire/test_climate.py b/tests/components/intellifire/test_climate.py deleted file mode 100644 index da1b2864791..00000000000 --- a/tests/components/intellifire/test_climate.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Test climate.""" - -from unittest.mock import patch - -from freezegun import freeze_time -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@freeze_time("2021-01-01T12:00:00Z") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_sensor_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry_current: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_fp, -) -> None: - """Test all entities.""" - with ( - patch("homeassistant.components.intellifire.PLATFORMS", [Platform.CLIMATE]), - ): - await setup_integration(hass, mock_config_entry_current) - await snapshot_platform( - hass, entity_registry, snapshot, mock_config_entry_current.entry_id - ) diff --git a/tests/components/intellifire/test_config_flow.py b/tests/components/intellifire/test_config_flow.py index f1465c4dcd4..ba4e2f039a3 100644 --- a/tests/components/intellifire/test_config_flow.py +++ b/tests/components/intellifire/test_config_flow.py @@ -1,168 +1,323 @@ """Test the IntelliFire config flow.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, MagicMock, patch -from intellifire4py.exceptions import LoginError +from intellifire4py.exceptions import LoginException from homeassistant import config_entries from homeassistant.components import dhcp -from homeassistant.components.intellifire.const import CONF_SERIAL, DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.components.intellifire.config_flow import MANUAL_ENTRY_STRING +from homeassistant.components.intellifire.const import CONF_USER_ID, DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import mock_api_connection_error + from tests.common import MockConfigEntry -async def test_standard_config_with_single_fireplace( +@patch.multiple( + "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", + login=AsyncMock(), + get_user_id=MagicMock(return_value="intellifire"), + get_fireplace_api_key=MagicMock(return_value="key"), +) +async def test_no_discovery( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_apis_single_fp, + mock_intellifire_config_flow: MagicMock, ) -> None: - """Test standard flow with a user who has only a single fireplace.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] == FlowResultType.FORM + """Test we should get the manual discovery form - because no discovered fireplaces.""" + with patch( + "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", + return_value=[], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - assert result["step_id"] == "cloud_api" + assert result["step_id"] == "manual_device_entry" - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + { + CONF_HOST: "1.1.1.1", + }, ) - # For a single fireplace we just create it - assert result["type"] == FlowResultType.CREATE_ENTRY - assert result["data"] == { - "ip_address": "192.168.2.108", - "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", - "serial": "3FB284769E4736F30C8973A7ED358123", - "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", - "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", - "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - "username": "grumpypanda@china.cn", - "password": "you-stole-my-pandas", + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "api_config" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, + ) + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Fireplace 12345" + assert result3["data"] == { + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test", + CONF_PASSWORD: "AROONIE", + CONF_API_KEY: "key", + CONF_USER_ID: "intellifire", } + assert len(mock_setup_entry.mock_calls) == 1 -async def test_standard_config_with_pre_configured_fireplace( +@patch.multiple( + "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", + login=AsyncMock(side_effect=mock_api_connection_error()), + get_user_id=MagicMock(return_value="intellifire"), + get_fireplace_api_key=MagicMock(return_value="key"), +) +async def test_single_discovery( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_config_entry_current, - mock_apis_single_fp, + mock_intellifire_config_flow: MagicMock, ) -> None: - """What if we try to configure an already configured fireplace.""" - # Configure an existing entry - mock_config_entry_current.add_to_hass(hass) + """Test single fireplace UDP discovery.""" + with patch( + "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", + return_value=["192.168.1.69"], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "192.168.1.69"} ) - assert result["type"] == FlowResultType.FORM - assert result["errors"] == {} - assert result["step_id"] == "cloud_api" - - result = await hass.config_entries.flow.async_configure( + await hass.async_block_till_done() + result3 = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, ) - - # For a single fireplace we just create it - assert result["type"] == FlowResultType.ABORT - assert result["reason"] == "no_available_devices" + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.FORM + assert result3["errors"] == {"base": "iftapi_connect"} -async def test_standard_config_with_single_fireplace_and_bad_credentials( +@patch.multiple( + "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", + login=AsyncMock(side_effect=LoginException), + get_user_id=MagicMock(return_value="intellifire"), + get_fireplace_api_key=MagicMock(return_value="key"), +) +async def test_single_discovery_loign_error( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_apis_single_fp, + mock_intellifire_config_flow: MagicMock, ) -> None: - """Test bad credentials on a login.""" - mock_local_interface, mock_cloud_interface, mock_fp = mock_apis_single_fp - # Set login error - mock_cloud_interface.login_with_credentials.side_effect = LoginError + """Test single fireplace UDP discovery.""" + with patch( + "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", + return_value=["192.168.1.69"], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "192.168.1.69"} + ) + await hass.async_block_till_done() + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.FORM + assert result3["errors"] == {"base": "api_error"} + + +async def test_manual_entry( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_intellifire_config_flow: MagicMock, +) -> None: + """Test for multiple Fireplace discovery - involving a pick_device step.""" + with patch( + "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", + return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["step_id"] == "pick_device" + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_HOST: MANUAL_ENTRY_STRING} + ) + + await hass.async_block_till_done() + assert result2["step_id"] == "manual_device_entry" + + +async def test_multi_discovery( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_intellifire_config_flow: MagicMock, +) -> None: + """Test for multiple fireplace discovery - involving a pick_device step.""" + with patch( + "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", + return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["step_id"] == "pick_device" + await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_HOST: "192.168.1.33"} + ) + await hass.async_block_till_done() + assert result["step_id"] == "pick_device" + + +async def test_multi_discovery_cannot_connect( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_intellifire_config_flow: MagicMock, +) -> None: + """Test for multiple fireplace discovery - involving a pick_device step.""" + with patch( + "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", + return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], + ): + mock_intellifire_config_flow.poll.side_effect = ConnectionError + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "pick_device" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_HOST: "192.168.1.33"} + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + + +async def test_form_cannot_connect_manual_entry( + hass: HomeAssistant, + mock_intellifire_config_flow: MagicMock, + mock_fireplace_finder_single: AsyncMock, +) -> None: + """Test we handle cannot connect error.""" + mock_intellifire_config_flow.poll.side_effect = ConnectionError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - assert result["step_id"] == "cloud_api" + assert result["step_id"] == "manual_device_entry" - result = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + { + CONF_HOST: "1.1.1.1", + }, ) - # Erase the error - mock_cloud_interface.login_with_credentials.side_effect = None - - assert result["type"] == FlowResultType.FORM - assert result["errors"] == {"base": "api_error"} - assert result["step_id"] == "cloud_api" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, - ) - # For a single fireplace we just create it - assert result["type"] == FlowResultType.CREATE_ENTRY - assert result["data"] == { - "ip_address": "192.168.2.108", - "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", - "serial": "3FB284769E4736F30C8973A7ED358123", - "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", - "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", - "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - "username": "grumpypanda@china.cn", - "password": "you-stole-my-pandas", - } + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} -async def test_standard_config_with_multiple_fireplace( +async def test_picker_already_discovered( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_apis_multifp, + mock_intellifire_config_flow: MagicMock, ) -> None: - """Test multi-fireplace user who must be very rich.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] == FlowResultType.FORM - assert result["errors"] == {} - assert result["step_id"] == "cloud_api" + """Test single fireplace UDP discovery.""" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + entry = MockConfigEntry( + domain=DOMAIN, + data={ + "host": "192.168.1.3", + }, + title="Fireplace", + unique_id=44444, ) - # When we have multiple fireplaces we get to pick a serial - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == "pick_cloud_device" - result = await hass.config_entries.flow.async_configure( + entry.add_to_hass(hass) + with patch( + "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", + return_value=["192.168.1.3"], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await hass.async_block_till_done() + + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_SERIAL: "4GC295860E5837G40D9974B7FD459234"}, + { + CONF_HOST: "192.168.1.4", + }, ) - assert result["type"] == FlowResultType.CREATE_ENTRY - assert result["data"] == { - "ip_address": "192.168.2.109", - "api_key": "D4C5EB28BBFF41E1FB21AFF9BFA6CD34", - "serial": "4GC295860E5837G40D9974B7FD459234", - "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", - "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", - "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - "username": "grumpypanda@china.cn", - "password": "you-stole-my-pandas", - } + assert result2["type"] is FlowResultType.FORM + assert len(mock_setup_entry.mock_calls) == 0 + + +@patch.multiple( + "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", + login=AsyncMock(), + get_user_id=MagicMock(return_value="intellifire"), + get_fireplace_api_key=MagicMock(return_value="key"), +) +async def test_reauth_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_intellifire_config_flow: MagicMock, +) -> None: + """Test the reauth flow.""" + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + "host": "192.168.1.3", + }, + title="Fireplace 1234", + version=1, + unique_id="4444", + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": "reauth", + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "api_config" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.ABORT + assert entry.data[CONF_PASSWORD] == "AROONIE" + assert entry.data[CONF_USERNAME] == "test" async def test_dhcp_discovery_intellifire_device( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_apis_multifp, + mock_intellifire_config_flow: MagicMock, ) -> None: """Test successful DHCP Discovery.""" - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -172,26 +327,26 @@ async def test_dhcp_discovery_intellifire_device( hostname="zentrios-Test", ), ) - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == "cloud_api" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "dhcp_confirm" + result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "dhcp_confirm" + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={} ) - assert result["type"] == FlowResultType.CREATE_ENTRY + assert result3["title"] == "Fireplace 12345" + assert result3["data"] == {"host": "1.1.1.1"} async def test_dhcp_discovery_non_intellifire_device( hass: HomeAssistant, + mock_intellifire_config_flow: MagicMock, mock_setup_entry: AsyncMock, - mock_apis_multifp, ) -> None: - """Test successful DHCP Discovery of a non intellifire device..""" + """Test failed DHCP Discovery.""" - # Patch poll with an exception - mock_local_interface, mock_cloud_interface, mock_fp = mock_apis_multifp - mock_local_interface.poll.side_effect = ConnectionError + mock_intellifire_config_flow.poll.side_effect = ConnectionError result = await hass.config_entries.flow.async_init( DOMAIN, @@ -202,28 +357,6 @@ async def test_dhcp_discovery_non_intellifire_device( hostname="zentrios-Evil", ), ) - assert result["type"] == FlowResultType.ABORT + + assert result["type"] is FlowResultType.ABORT assert result["reason"] == "not_intellifire_device" - # Test is finished - the DHCP scanner detected a hostname that "might" be an IntelliFire device, but it was not. - - -async def test_reauth_flow( - hass: HomeAssistant, - mock_config_entry_current: MockConfigEntry, - mock_apis_single_fp, - mock_setup_entry: AsyncMock, -) -> None: - """Test reauth.""" - - mock_config_entry_current.add_to_hass(hass) - result = await mock_config_entry_current.start_reauth_flow(hass) - assert result["type"] == FlowResultType.FORM - result["step_id"] = "cloud_api" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, - ) - - assert result["type"] == FlowResultType.ABORT - assert result["reason"] == "reauth_successful" diff --git a/tests/components/intellifire/test_init.py b/tests/components/intellifire/test_init.py deleted file mode 100644 index 6d08fda26c3..00000000000 --- a/tests/components/intellifire/test_init.py +++ /dev/null @@ -1,111 +0,0 @@ -"""Test the IntelliFire config flow.""" - -from unittest.mock import AsyncMock, patch - -from homeassistant.components.intellifire import CONF_USER_ID -from homeassistant.components.intellifire.const import ( - API_MODE_CLOUD, - API_MODE_LOCAL, - CONF_AUTH_COOKIE, - CONF_CONTROL_MODE, - CONF_READ_MODE, - CONF_SERIAL, - CONF_WEB_CLIENT_ID, - DOMAIN, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - CONF_API_KEY, - CONF_HOST, - CONF_IP_ADDRESS, - CONF_PASSWORD, - CONF_USERNAME, -) -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def test_minor_migration( - hass: HomeAssistant, mock_config_entry_old, mock_apis_single_fp -) -> None: - """With the new library we are going to end up rewriting the config entries.""" - mock_config_entry_old.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry_old.entry_id) - - assert mock_config_entry_old.data == { - "ip_address": "192.168.2.108", - "host": "192.168.2.108", - "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", - "serial": "3FB284769E4736F30C8973A7ED358123", - "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", - "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", - "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - "username": "grumpypanda@china.cn", - "password": "you-stole-my-pandas", - } - - -async def test_minor_migration_error(hass: HomeAssistant, mock_apis_single_fp) -> None: - """Test the case where we completely fail to initialize.""" - mock_config_entry = MockConfigEntry( - domain=DOMAIN, - version=1, - minor_version=1, - title="Fireplace of testing", - data={ - CONF_HOST: "11.168.2.218", - CONF_USERNAME: "grumpypanda@china.cn", - CONF_PASSWORD: "you-stole-my-pandas", - CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - }, - ) - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert mock_config_entry.state is ConfigEntryState.MIGRATION_ERROR - - -async def test_init_with_no_username(hass: HomeAssistant, mock_apis_single_fp) -> None: - """Test the case where we completely fail to initialize.""" - mock_config_entry = MockConfigEntry( - domain=DOMAIN, - version=1, - minor_version=2, - data={ - CONF_IP_ADDRESS: "192.168.2.108", - CONF_PASSWORD: "you-stole-my-pandas", - CONF_SERIAL: "3FB284769E4736F30C8973A7ED358123", - CONF_WEB_CLIENT_ID: "FA2B1C3045601234D0AE17D72F8E975", - CONF_API_KEY: "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", - CONF_AUTH_COOKIE: "B984F21A6378560019F8A1CDE41B6782", - CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", - }, - options={CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_CLOUD}, - unique_id="3FB284769E4736F30C8973A7ED358123", - ) - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR - - -async def test_connectivity_bad( - hass: HomeAssistant, - mock_config_entry_current, - mock_apis_single_fp, -) -> None: - """Test a timeout error on the setup flow.""" - - with patch( - "homeassistant.components.intellifire.UnifiedFireplace.build_fireplace_from_common", - new_callable=AsyncMock, - side_effect=TimeoutError, - ): - mock_config_entry_current.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry_current.entry_id) - - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/intellifire/test_sensor.py b/tests/components/intellifire/test_sensor.py deleted file mode 100644 index 96e344d77fc..00000000000 --- a/tests/components/intellifire/test_sensor.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Test IntelliFire Binary Sensors.""" - -from unittest.mock import AsyncMock, patch - -from freezegun import freeze_time -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@freeze_time("2021-01-01T12:00:00Z") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_sensor_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry_current: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_apis_single_fp: tuple[AsyncMock, AsyncMock, AsyncMock], -) -> None: - """Test all entities.""" - - with ( - patch("homeassistant.components.intellifire.PLATFORMS", [Platform.SENSOR]), - ): - await setup_integration(hass, mock_config_entry_current) - await snapshot_platform( - hass, entity_registry, snapshot, mock_config_entry_current.entry_id - ) diff --git a/tests/components/intent/test_init.py b/tests/components/intent/test_init.py index 20c0f9d8d44..7288c4855af 100644 --- a/tests/components/intent/test_init.py +++ b/tests/components/intent/test_init.py @@ -34,11 +34,11 @@ async def test_http_handle_intent( assert intent_obj.context.user_id == hass_admin_user.id response = intent_obj.create_response() response.async_set_speech( - f"I've ordered a {intent_obj.slots['type']['value']}!" + "I've ordered a {}!".format(intent_obj.slots["type"]["value"]) ) response.async_set_card( "Beer ordered", - f"You chose a {intent_obj.slots['type']['value']}.", + "You chose a {}.".format(intent_obj.slots["type"]["value"]), ) return response @@ -455,14 +455,3 @@ async def test_set_position_intent_unsupported_domain(hass: HomeAssistant) -> No "HassSetPosition", {"name": {"value": "test light"}, "position": {"value": 100}}, ) - - -async def test_intents_with_no_responses(hass: HomeAssistant) -> None: - """Test intents that should not return a response during handling.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "intent", {}) - - # The "respond" intent gets its response text from home-assistant-intents - for intent_name in (intent.INTENT_NEVERMIND, intent.INTENT_RESPOND): - response = await intent.async_handle(hass, "test", intent_name, {}) - assert not response.speech diff --git a/tests/components/intent_script/test_init.py b/tests/components/intent_script/test_init.py index 26c575f0407..5f4c7b97b63 100644 --- a/tests/components/intent_script/test_init.py +++ b/tests/components/intent_script/test_init.py @@ -3,16 +3,11 @@ from unittest.mock import patch from homeassistant import config as hass_config +from homeassistant.bootstrap import async_setup_component from homeassistant.components.intent_script import DOMAIN from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant -from homeassistant.helpers import ( - area_registry as ar, - entity_registry as er, - floor_registry as fr, - intent, -) -from homeassistant.setup import async_setup_component +from homeassistant.helpers import intent from tests.common import async_mock_service, get_fixture_path @@ -202,84 +197,6 @@ async def test_intent_script_falsy_reprompt(hass: HomeAssistant) -> None: assert response.card["simple"]["content"] == "Content for Paulus" -async def test_intent_script_targets( - hass: HomeAssistant, - area_registry: ar.AreaRegistry, - entity_registry: er.EntityRegistry, - floor_registry: fr.FloorRegistry, -) -> None: - """Test intent scripts work.""" - calls = async_mock_service(hass, "test", "service") - - await async_setup_component( - hass, - "intent_script", - { - "intent_script": { - "Targets": { - "description": "Intent to control a test service.", - "action": { - "service": "test.service", - "data_template": { - "targets": "{{ targets if targets is defined }}", - }, - }, - "speech": { - "text": "{{ targets.entities[0] if targets is defined }}" - }, - } - } - }, - ) - - floor_1 = floor_registry.async_create("first floor") - kitchen = area_registry.async_get_or_create("kitchen") - area_registry.async_update(kitchen.id, floor_id=floor_1.floor_id) - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity("light.kitchen", area_id=kitchen.id) - hass.states.async_set("light.kitchen", "off") - - response = await intent.async_handle( - hass, - "test", - "Targets", - {"name": {"value": "kitchen"}, "domain": {"value": "light"}}, - ) - assert len(calls) == 1 - assert calls[0].data["targets"] == {"entities": ["light.kitchen"]} - assert response.speech["plain"]["speech"] == "light.kitchen" - calls.clear() - - response = await intent.async_handle( - hass, - "test", - "Targets", - { - "area": {"value": "kitchen"}, - "floor": {"value": "first floor"}, - }, - ) - assert len(calls) == 1 - assert calls[0].data["targets"] == { - "entities": ["light.kitchen"], - "areas": ["kitchen"], - "floors": ["first_floor"], - } - calls.clear() - - response = await intent.async_handle( - hass, - "test", - "Targets", - {"device_class": {"value": "door"}}, - ) - assert len(calls) == 1 - assert calls[0].data["targets"] == "" - calls.clear() - - async def test_reload(hass: HomeAssistant) -> None: """Verify we can reload intent config.""" diff --git a/tests/components/ios/test_init.py b/tests/components/ios/test_init.py index ddf5835a1be..afefec1530c 100644 --- a/tests/components/ios/test_init.py +++ b/tests/components/ios/test_init.py @@ -19,7 +19,7 @@ def mock_load_json(): @pytest.fixture(autouse=True) -def mock_dependencies(hass: HomeAssistant) -> None: +def mock_dependencies(hass): """Mock dependencies loaded.""" mock_component(hass, "zeroconf") mock_component(hass, "device_tracker") diff --git a/tests/components/iotawatt/conftest.py b/tests/components/iotawatt/conftest.py index 9380154b53e..f3a60e69021 100644 --- a/tests/components/iotawatt/conftest.py +++ b/tests/components/iotawatt/conftest.py @@ -1,18 +1,16 @@ """Test fixtures for IoTaWatt.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.iotawatt import DOMAIN -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture -def entry(hass: HomeAssistant) -> MockConfigEntry: +def entry(hass): """Mock config entry added to HA.""" entry = MockConfigEntry(domain=DOMAIN, data={"host": "1.2.3.4"}) entry.add_to_hass(hass) @@ -20,7 +18,7 @@ def entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture -def mock_iotawatt(entry: MockConfigEntry) -> Generator[MagicMock]: +def mock_iotawatt(entry): """Mock iotawatt.""" with patch("homeassistant.components.iotawatt.coordinator.Iotawatt") as mock: instance = mock.return_value diff --git a/tests/components/iotawatt/test_init.py b/tests/components/iotawatt/test_init.py index de3a2f9f829..8b707780eb4 100644 --- a/tests/components/iotawatt/test_init.py +++ b/tests/components/iotawatt/test_init.py @@ -1,7 +1,5 @@ """Test init.""" -from unittest.mock import MagicMock - import httpx from homeassistant.config_entries import ConfigEntryState @@ -10,12 +8,8 @@ from homeassistant.setup import async_setup_component from . import INPUT_SENSOR -from tests.common import MockConfigEntry - -async def test_setup_unload( - hass: HomeAssistant, mock_iotawatt: MagicMock, entry: MockConfigEntry -) -> None: +async def test_setup_unload(hass: HomeAssistant, mock_iotawatt, entry) -> None: """Test we can setup and unload an entry.""" mock_iotawatt.getSensors.return_value["sensors"]["my_sensor_key"] = INPUT_SENSOR assert await async_setup_component(hass, "iotawatt", {}) @@ -24,7 +18,7 @@ async def test_setup_unload( async def test_setup_connection_failed( - hass: HomeAssistant, mock_iotawatt: MagicMock, entry: MockConfigEntry + hass: HomeAssistant, mock_iotawatt, entry ) -> None: """Test connection error during startup.""" mock_iotawatt.connect.side_effect = httpx.ConnectError("") @@ -33,9 +27,7 @@ async def test_setup_connection_failed( assert entry.state is ConfigEntryState.SETUP_RETRY -async def test_setup_auth_failed( - hass: HomeAssistant, mock_iotawatt: MagicMock, entry: MockConfigEntry -) -> None: +async def test_setup_auth_failed(hass: HomeAssistant, mock_iotawatt, entry) -> None: """Test auth error during startup.""" mock_iotawatt.connect.return_value = False assert await async_setup_component(hass, "iotawatt", {}) diff --git a/tests/components/iotawatt/test_sensor.py b/tests/components/iotawatt/test_sensor.py index eb1a240a82f..ecf2f97c67a 100644 --- a/tests/components/iotawatt/test_sensor.py +++ b/tests/components/iotawatt/test_sensor.py @@ -1,7 +1,6 @@ """Test setting up sensors.""" from datetime import timedelta -from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -26,7 +25,7 @@ from tests.common import async_fire_time_changed async def test_sensor_type_input( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt: MagicMock + hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt ) -> None: """Test input sensors work.""" assert await async_setup_component(hass, "iotawatt", {}) @@ -61,7 +60,7 @@ async def test_sensor_type_input( async def test_sensor_type_output( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt: MagicMock + hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt ) -> None: """Tests the sensor type of Output.""" mock_iotawatt.getSensors.return_value["sensors"]["my_watthour_sensor_key"] = ( diff --git a/tests/components/iotty/__init__.py b/tests/components/iotty/__init__.py deleted file mode 100644 index 705b8218c8b..00000000000 --- a/tests/components/iotty/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for iotty.""" diff --git a/tests/components/iotty/conftest.py b/tests/components/iotty/conftest.py deleted file mode 100644 index 1935a069cca..00000000000 --- a/tests/components/iotty/conftest.py +++ /dev/null @@ -1,248 +0,0 @@ -"""Fixtures for iotty integration tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -from aiohttp import ClientSession -from iottycloud.device import Device -from iottycloud.lightswitch import LightSwitch -from iottycloud.shutter import Shutter -from iottycloud.verbs import ( - LS_DEVICE_TYPE_UID, - OPEN_PERCENTAGE, - RESULT, - SH_DEVICE_TYPE_UID, - STATUS, - STATUS_OFF, - STATUS_ON, - STATUS_OPENING, - STATUS_STATIONATRY, -) -import pytest - -from homeassistant import setup -from homeassistant.components.iotty.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT -from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_entry_oauth2_flow - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker, mock_aiohttp_client - -CLIENT_ID = "client_id" -CLIENT_SECRET = "client_secret" -REDIRECT_URI = "https://example.com/auth/external/callback" - -test_devices = [ - Device("TestDevice0", "TEST_SERIAL_0", LS_DEVICE_TYPE_UID, "[TEST] Device Name 0"), - Device("TestDevice1", "TEST_SERIAL_1", LS_DEVICE_TYPE_UID, "[TEST] Device Name 1"), -] - - -ls_0 = LightSwitch( - "TestLS", "TEST_SERIAL_0", LS_DEVICE_TYPE_UID, "[TEST] Light switch 0" -) -ls_1 = LightSwitch( - "TestLS1", "TEST_SERIAL_1", LS_DEVICE_TYPE_UID, "[TEST] Light switch 1" -) -ls_2 = LightSwitch( - "TestLS2", "TEST_SERIAL_2", LS_DEVICE_TYPE_UID, "[TEST] Light switch 2" -) - -test_ls = [ls_0, ls_1] - -test_ls_one_removed = [ls_0] - -test_ls_one_added = [ - ls_0, - ls_1, - ls_2, -] - -sh_0 = Shutter("TestSH", "TEST_SERIAL_SH_0", SH_DEVICE_TYPE_UID, "[TEST] Shutter 0") -sh_1 = Shutter("TestSH1", "TEST_SERIAL_SH_1", SH_DEVICE_TYPE_UID, "[TEST] Shutter 1") -sh_2 = Shutter("TestSH2", "TEST_SERIAL_SH_2", SH_DEVICE_TYPE_UID, "[TEST] Shutter 2") - -test_sh = [sh_0, sh_1] - -test_sh_one_removed = [sh_0] - -test_sh_one_added = [ - sh_0, - sh_1, - sh_2, -] - - -@pytest.fixture -async def local_oauth_impl(hass: HomeAssistant): - """Local implementation.""" - assert await setup.async_setup_component(hass, "auth", {}) - return config_entry_oauth2_flow.LocalOAuth2Implementation( - hass, DOMAIN, "client_id", "client_secret", "authorize_url", "https://token.url" - ) - - -@pytest.fixture -def aiohttp_client_session() -> None: - """AIOHTTP client session.""" - return ClientSession - - -@pytest.fixture -def mock_aioclient() -> Generator[AiohttpClientMocker]: - """Fixture to mock aioclient calls.""" - with mock_aiohttp_client() as mock_session: - yield mock_session - - -@pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - title="IOTTY00001", - domain=DOMAIN, - data={ - "auth_implementation": DOMAIN, - "token": { - "refresh_token": "REFRESH_TOKEN", - "access_token": "ACCESS_TOKEN_1", - "expires_in": 10, - "expires_at": 0, - "token_type": "bearer", - "random_other_data": "should_stay", - }, - CONF_HOST: "127.0.0.1", - CONF_MAC: "AA:BB:CC:DD:EE:FF", - CONF_PORT: 9123, - }, - unique_id="IOTTY00001", - ) - - -@pytest.fixture -def mock_config_entries_async_forward_entry_setup() -> Generator[AsyncMock]: - """Mock async_forward_entry_setup.""" - with patch( - "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups" - ) as mock_fn: - yield mock_fn - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Mock setting up a config entry.""" - with patch( - "homeassistant.components.iotty.async_setup_entry", return_value=True - ) as mock_setup: - yield mock_setup - - -@pytest.fixture -def mock_iotty() -> Generator[MagicMock]: - """Mock IottyProxy.""" - with patch( - "homeassistant.components.iotty.api.IottyProxy", autospec=True - ) as iotty_mock: - yield iotty_mock - - -@pytest.fixture -def mock_coordinator() -> Generator[MagicMock]: - """Mock IottyDataUpdateCoordinator.""" - with patch( - "homeassistant.components.iotty.coordinator.IottyDataUpdateCoordinator", - autospec=True, - ) as coordinator_mock: - yield coordinator_mock - - -@pytest.fixture -def mock_get_devices_nodevices() -> Generator[AsyncMock]: - """Mock for get_devices, returning two objects.""" - - with patch("iottycloud.cloudapi.CloudApi.get_devices") as mock_fn: - yield mock_fn - - -@pytest.fixture -def mock_get_devices_twolightswitches() -> Generator[AsyncMock]: - """Mock for get_devices, returning two switches.""" - - with patch( - "iottycloud.cloudapi.CloudApi.get_devices", return_value=test_ls - ) as mock_fn: - yield mock_fn - - -@pytest.fixture -def mock_get_devices_twoshutters() -> Generator[AsyncMock]: - """Mock for get_devices, returning two shutters.""" - - with patch( - "iottycloud.cloudapi.CloudApi.get_devices", return_value=test_sh - ) as mock_fn: - yield mock_fn - - -@pytest.fixture -def mock_command_fn() -> Generator[AsyncMock]: - """Mock for command.""" - - with patch("iottycloud.cloudapi.CloudApi.command", return_value=None) as mock_fn: - yield mock_fn - - -@pytest.fixture -def mock_get_status_filled_off() -> Generator[AsyncMock]: - """Mock setting up a get_status.""" - - retval = {RESULT: {STATUS: STATUS_OFF}} - with patch( - "iottycloud.cloudapi.CloudApi.get_status", return_value=retval - ) as mock_fn: - yield mock_fn - - -@pytest.fixture -def mock_get_status_filled_stationary_100() -> Generator[AsyncMock]: - """Mock setting up a get_status.""" - - retval = {RESULT: {STATUS: STATUS_STATIONATRY, OPEN_PERCENTAGE: 100}} - with patch( - "iottycloud.cloudapi.CloudApi.get_status", return_value=retval - ) as mock_fn: - yield mock_fn - - -@pytest.fixture -def mock_get_status_filled_stationary_0() -> Generator[AsyncMock]: - """Mock setting up a get_status.""" - - retval = {RESULT: {STATUS: STATUS_STATIONATRY, OPEN_PERCENTAGE: 0}} - with patch( - "iottycloud.cloudapi.CloudApi.get_status", return_value=retval - ) as mock_fn: - yield mock_fn - - -@pytest.fixture -def mock_get_status_filled_opening_50() -> Generator[AsyncMock]: - """Mock setting up a get_status.""" - - retval = {RESULT: {STATUS: STATUS_OPENING, OPEN_PERCENTAGE: 50}} - with patch( - "iottycloud.cloudapi.CloudApi.get_status", return_value=retval - ) as mock_fn: - yield mock_fn - - -@pytest.fixture -def mock_get_status_filled() -> Generator[AsyncMock]: - """Mock setting up a get_status.""" - - retval = {RESULT: {STATUS: STATUS_ON}} - with patch( - "iottycloud.cloudapi.CloudApi.get_status", return_value=retval - ) as mock_fn: - yield mock_fn diff --git a/tests/components/iotty/snapshots/test_switch.ambr b/tests/components/iotty/snapshots/test_switch.ambr deleted file mode 100644 index 8ec22ed162a..00000000000 --- a/tests/components/iotty/snapshots/test_switch.ambr +++ /dev/null @@ -1,126 +0,0 @@ -# serializer version: 1 -# name: test_api_not_ok_entities_stay_the_same_as_before - list([ - 'switch.test_light_switch_0_test_serial_0', - 'switch.test_light_switch_1_test_serial_1', - ]) -# --- -# name: test_api_throws_response_entities_stay_the_same_as_before - list([ - 'switch.test_light_switch_0_test_serial_0', - 'switch.test_light_switch_1_test_serial_1', - ]) -# --- -# name: test_devices_creaction_ok[device] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'iotty', - 'TestLS', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'iotty', - 'model': None, - 'model_id': None, - 'name': '[TEST] Light switch 0 (TEST_SERIAL_0)', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- -# name: test_devices_creaction_ok[entity-ids] - list([ - 'switch.test_light_switch_0_test_serial_0', - 'switch.test_light_switch_1_test_serial_1', - ]) -# --- -# name: test_devices_creaction_ok[entity] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_light_switch_0_test_serial_0', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'iotty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'TestLS', - 'unit_of_measurement': None, - }) -# --- -# name: test_devices_creaction_ok[state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': '[TEST] Light switch 0 (TEST_SERIAL_0)', - }), - 'context': , - 'entity_id': 'switch.test_light_switch_0_test_serial_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_devices_deletion_ok - list([ - 'switch.test_light_switch_0_test_serial_0', - 'switch.test_light_switch_1_test_serial_1', - ]) -# --- -# name: test_devices_deletion_ok.1 - list([ - 'switch.test_light_switch_0_test_serial_0', - ]) -# --- -# name: test_devices_insertion_ok - list([ - 'switch.test_light_switch_0_test_serial_0', - 'switch.test_light_switch_1_test_serial_1', - ]) -# --- -# name: test_devices_insertion_ok.1 - list([ - 'switch.test_light_switch_0_test_serial_0', - 'switch.test_light_switch_1_test_serial_1', - 'switch.test_light_switch_2_test_serial_2', - ]) -# --- -# name: test_setup_entry_ok_nodevices - list([ - ]) -# --- diff --git a/tests/components/iotty/test_api.py b/tests/components/iotty/test_api.py deleted file mode 100644 index 6bb396f5d4d..00000000000 --- a/tests/components/iotty/test_api.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Unit tests for iottycloud API.""" - -from unittest.mock import patch - -from aiohttp import ClientSession -import pytest - -from homeassistant.components.iotty import api -from homeassistant.components.iotty.const import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_entry_oauth2_flow - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker - - -async def test_api_create_fail( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test API creation with no session.""" - - with pytest.raises(ValueError, match="websession"): - api.IottyProxy(hass, None, None) - - with pytest.raises(ValueError, match="oauth_session"): - api.IottyProxy(hass, aioclient_mock, None) - - -async def test_api_create_ok( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - aiohttp_client_session: None, - local_oauth_impl: ClientSession, -) -> None: - """Test API creation. We're checking that we can create an IottyProxy without raising.""" - - mock_config_entry.add_to_hass(hass) - assert mock_config_entry.data["auth_implementation"] is not None - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - api.IottyProxy(hass, aiohttp_client_session, local_oauth_impl) - - -@patch( - "homeassistant.helpers.config_entry_oauth2_flow.OAuth2Session.valid_token", False -) -async def test_api_getaccesstoken_tokennotvalid_reloadtoken( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_aioclient: None, - aiohttp_client_session: ClientSession, -) -> None: - """Test getting access token. - - If a request with an invalid token is made, a request for a new token is done, - and the resulting token is used for future calls. - """ - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - new_token = "ACCESS_TOKEN_1" - - mock_aioclient.post( - "https://token.url", json={"access_token": new_token, "expires_in": 100} - ) - - mock_aioclient.post("https://example.com", status=201) - - mock_config_entry.add_to_hass(hass) - oauth2_session = config_entry_oauth2_flow.OAuth2Session( - hass, mock_config_entry, local_oauth_impl - ) - - iotty = api.IottyProxy(hass, aiohttp_client_session, oauth2_session) - - tok = await iotty.async_get_access_token() - assert tok == new_token diff --git a/tests/components/iotty/test_config_flow.py b/tests/components/iotty/test_config_flow.py deleted file mode 100644 index 83fa16ece56..00000000000 --- a/tests/components/iotty/test_config_flow.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Test the iotty config flow.""" - -from http import HTTPStatus -from unittest.mock import AsyncMock, MagicMock - -import multidict -import pytest - -from homeassistant import config_entries -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) -from homeassistant.components.iotty.application_credentials import OAUTH2_TOKEN -from homeassistant.components.iotty.const import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_entry_oauth2_flow -from homeassistant.setup import async_setup_component - -from .conftest import CLIENT_ID, CLIENT_SECRET, REDIRECT_URI - -from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import ClientSessionGenerator - - -@pytest.fixture -async def setup_credentials(hass: HomeAssistant) -> None: - """Fixture to setup application credentials component.""" - await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential(CLIENT_ID, CLIENT_SECRET), - ) - - -@pytest.fixture -def current_request_with_host(current_request: MagicMock) -> None: - """Mock current request with a host header.""" - new_headers = multidict.CIMultiDict(current_request.get.return_value.headers) - new_headers[config_entry_oauth2_flow.HEADER_FRONTEND_BASE] = "https://example.com" - current_request.get.return_value = current_request.get.return_value.clone( - headers=new_headers - ) - - -async def test_config_flow_no_credentials(hass: HomeAssistant) -> None: - """Test config flow base case with no credentials registered.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result.get("type") == FlowResultType.ABORT - assert result.get("reason") == "missing_credentials" - - -@pytest.mark.usefixtures("current_request_with_host", "setup_credentials") -async def test_full_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - mock_setup_entry: AsyncMock, -) -> None: - """Check full flow.""" - - await async_import_client_credential( - hass, DOMAIN, ClientCredential(CLIENT_ID, CLIENT_SECRET) - ) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER, "entry_id": DOMAIN} - ) - - assert result.get("type") == FlowResultType.EXTERNAL_STEP - - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": REDIRECT_URI, - }, - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == HTTPStatus.OK - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", - "type": "Bearer", - "expires_in": 60, - }, - ) - - await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/iotty/test_cover.py b/tests/components/iotty/test_cover.py deleted file mode 100644 index c9e1edaa24b..00000000000 --- a/tests/components/iotty/test_cover.py +++ /dev/null @@ -1,235 +0,0 @@ -"""Unit tests the Hass COVER component.""" - -from aiohttp import ClientSession -from freezegun.api import FrozenDateTimeFactory -from iottycloud.verbs import ( - OPEN_PERCENTAGE, - RESULT, - STATUS, - STATUS_CLOSING, - STATUS_OPENING, - STATUS_STATIONATRY, -) - -from homeassistant.components.cover import ( - ATTR_POSITION, - DOMAIN as COVER_DOMAIN, - SERVICE_CLOSE_COVER, - SERVICE_OPEN_COVER, - SERVICE_SET_COVER_POSITION, - SERVICE_STOP_COVER, - CoverState, -) -from homeassistant.components.iotty.const import DOMAIN -from homeassistant.components.iotty.coordinator import UPDATE_INTERVAL -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_entry_oauth2_flow - -from .conftest import test_sh_one_added - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_open_ok( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twoshutters, - mock_get_status_filled_stationary_0, - mock_command_fn, -) -> None: - """Issue an open command.""" - - entity_id = "cover.test_shutter_0_test_serial_sh_0" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (state := hass.states.get(entity_id)) - assert state.state == CoverState.CLOSED - - mock_get_status_filled_stationary_0.return_value = { - RESULT: {STATUS: STATUS_OPENING, OPEN_PERCENTAGE: 10} - } - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - await hass.async_block_till_done() - mock_command_fn.assert_called_once() - - assert (state := hass.states.get(entity_id)) - assert state.state == CoverState.OPENING - - -async def test_close_ok( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twoshutters, - mock_get_status_filled_stationary_100, - mock_command_fn, -) -> None: - """Issue a close command.""" - - entity_id = "cover.test_shutter_0_test_serial_sh_0" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (state := hass.states.get(entity_id)) - assert state.state == CoverState.OPEN - - mock_get_status_filled_stationary_100.return_value = { - RESULT: {STATUS: STATUS_CLOSING, OPEN_PERCENTAGE: 90} - } - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - await hass.async_block_till_done() - mock_command_fn.assert_called_once() - - assert (state := hass.states.get(entity_id)) - assert state.state == CoverState.CLOSING - - -async def test_stop_ok( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twoshutters, - mock_get_status_filled_opening_50, - mock_command_fn, -) -> None: - """Issue a stop command.""" - - entity_id = "cover.test_shutter_0_test_serial_sh_0" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (state := hass.states.get(entity_id)) - assert state.state == CoverState.OPENING - - mock_get_status_filled_opening_50.return_value = { - RESULT: {STATUS: STATUS_STATIONATRY, OPEN_PERCENTAGE: 60} - } - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - await hass.async_block_till_done() - mock_command_fn.assert_called_once() - - assert (state := hass.states.get(entity_id)) - assert state.state == CoverState.OPEN - - -async def test_set_position_ok( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twoshutters, - mock_get_status_filled_stationary_0, - mock_command_fn, -) -> None: - """Issue a set position command.""" - - entity_id = "cover.test_shutter_0_test_serial_sh_0" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (state := hass.states.get(entity_id)) - assert state.state == CoverState.CLOSED - - mock_get_status_filled_stationary_0.return_value = { - RESULT: {STATUS: STATUS_OPENING, OPEN_PERCENTAGE: 50} - } - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 10}, - blocking=True, - ) - - await hass.async_block_till_done() - mock_command_fn.assert_called_once() - - assert (state := hass.states.get(entity_id)) - assert state.state == CoverState.OPENING - - -async def test_devices_insertion_ok( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twoshutters, - mock_get_status_filled_stationary_0, - freezer: FrozenDateTimeFactory, -) -> None: - """Test iotty cover insertion.""" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - - # Should have two devices - assert hass.states.async_entity_ids_count() == 2 - assert hass.states.async_entity_ids() == [ - "cover.test_shutter_0_test_serial_sh_0", - "cover.test_shutter_1_test_serial_sh_1", - ] - - mock_get_devices_twoshutters.return_value = test_sh_one_added - - freezer.tick(UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Should have three devices - assert hass.states.async_entity_ids_count() == 3 - assert hass.states.async_entity_ids() == [ - "cover.test_shutter_0_test_serial_sh_0", - "cover.test_shutter_1_test_serial_sh_1", - "cover.test_shutter_2_test_serial_sh_2", - ] diff --git a/tests/components/iotty/test_init.py b/tests/components/iotty/test_init.py deleted file mode 100644 index ee8168fdf2f..00000000000 --- a/tests/components/iotty/test_init.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Tests for the iotty integration.""" - -from unittest.mock import MagicMock - -from homeassistant.components.iotty.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_entry_oauth2_flow - -from tests.common import MockConfigEntry - - -async def test_load_unload_coordinator_called( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_coordinator: MagicMock, - local_oauth_impl, -) -> None: - """Test the configuration entry loading/unloading.""" - - mock_config_entry.add_to_hass(hass) - assert mock_config_entry.data["auth_implementation"] is not None - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - await hass.async_block_till_done() - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - mock_coordinator.assert_called_once() - - assert mock_config_entry.state is ConfigEntryState.LOADED - method_call = mock_coordinator.method_calls[0] - name, _, _ = method_call - assert name == "().async_config_entry_first_refresh" - - await hass.config_entries.async_unload(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert not hass.data.get(DOMAIN) - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_load_unload_iottyproxy_called( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_iotty: MagicMock, - local_oauth_impl, - mock_config_entries_async_forward_entry_setup, -) -> None: - """Test the configuration entry loading/unloading.""" - - mock_config_entry.add_to_hass(hass) - assert mock_config_entry.data["auth_implementation"] is not None - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - mock_iotty.assert_called_once() - - assert mock_config_entry.state is ConfigEntryState.LOADED - method_call = mock_iotty.method_calls[0] - name, _, _ = method_call - assert name == "().get_devices" - - await hass.config_entries.async_unload(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert not hass.data.get(DOMAIN) - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/iotty/test_switch.py b/tests/components/iotty/test_switch.py deleted file mode 100644 index 235a897c305..00000000000 --- a/tests/components/iotty/test_switch.py +++ /dev/null @@ -1,300 +0,0 @@ -"""Unit tests the Hass SWITCH component.""" - -from aiohttp import ClientSession -from freezegun.api import FrozenDateTimeFactory -from iottycloud.verbs import RESULT, STATUS, STATUS_OFF, STATUS_ON -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.iotty.const import DOMAIN -from homeassistant.components.iotty.coordinator import UPDATE_INTERVAL -from homeassistant.components.switch import ( - DOMAIN as SWITCH_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers import ( - config_entry_oauth2_flow, - device_registry as dr, - entity_registry as er, -) - -from .conftest import test_ls_one_added, test_ls_one_removed - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_turn_on_ok( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twolightswitches, - mock_get_status_filled_off, - mock_command_fn, -) -> None: - """Issue a turnon command.""" - - entity_id = "switch.test_light_switch_0_test_serial_0" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_OFF - - mock_get_status_filled_off.return_value = {RESULT: {STATUS: STATUS_ON}} - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - await hass.async_block_till_done() - mock_command_fn.assert_called_once() - - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_ON - - -async def test_turn_off_ok( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twolightswitches, - mock_get_status_filled, - mock_command_fn, -) -> None: - """Issue a turnoff command.""" - - entity_id = "switch.test_light_switch_0_test_serial_0" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_ON - - mock_get_status_filled.return_value = {RESULT: {STATUS: STATUS_OFF}} - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - await hass.async_block_till_done() - mock_command_fn.assert_called_once() - - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_OFF - - -async def test_setup_entry_ok_nodevices( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_status_filled, - snapshot: SnapshotAssertion, - mock_get_devices_nodevices, -) -> None: - """Correctly setup, with no iotty Devices to add to Hass.""" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert hass.states.async_entity_ids_count() == 0 - assert hass.states.async_entity_ids() == snapshot - - -async def test_devices_creaction_ok( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twolightswitches, - mock_get_status_filled, - snapshot: SnapshotAssertion, -) -> None: - """Test iotty switch creation.""" - - entity_id = "switch.test_light_switch_0_test_serial_0" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert (state := hass.states.get(entity_id)) - assert state == snapshot(name="state") - - assert (entry := entity_registry.async_get(entity_id)) - assert entry == snapshot(name="entity") - - assert entry.device_id - assert (device_entry := device_registry.async_get(entry.device_id)) - assert device_entry == snapshot(name="device") - - assert hass.states.async_entity_ids_count() == 2 - assert hass.states.async_entity_ids() == snapshot(name="entity-ids") - - -async def test_devices_deletion_ok( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twolightswitches, - mock_get_status_filled, - snapshot: SnapshotAssertion, - freezer: FrozenDateTimeFactory, -) -> None: - """Test iotty switch deletion.""" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - - # Should have two devices - assert hass.states.async_entity_ids_count() == 2 - assert hass.states.async_entity_ids() == snapshot - - mock_get_devices_twolightswitches.return_value = test_ls_one_removed - - freezer.tick(UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Should have one device - assert hass.states.async_entity_ids_count() == 1 - assert hass.states.async_entity_ids() == snapshot - - -async def test_devices_insertion_ok( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twolightswitches, - mock_get_status_filled, - snapshot: SnapshotAssertion, - freezer: FrozenDateTimeFactory, -) -> None: - """Test iotty switch insertion.""" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - - # Should have two devices - assert hass.states.async_entity_ids_count() == 2 - assert hass.states.async_entity_ids() == snapshot - - mock_get_devices_twolightswitches.return_value = test_ls_one_added - - freezer.tick(UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Should have three devices - assert hass.states.async_entity_ids_count() == 3 - assert hass.states.async_entity_ids() == snapshot - - -async def test_api_not_ok_entities_stay_the_same_as_before( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twolightswitches, - mock_get_status_filled, - snapshot: SnapshotAssertion, - freezer: FrozenDateTimeFactory, -) -> None: - """Test case of incorrect response from iotty API on getting device status.""" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - - # Should have two devices - assert hass.states.async_entity_ids_count() == 2 - entity_ids = hass.states.async_entity_ids() - assert entity_ids == snapshot - - mock_get_status_filled.return_value = {RESULT: "Not a valid restul"} - - freezer.tick(UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Should still have have two devices - assert hass.states.async_entity_ids_count() == 2 - assert hass.states.async_entity_ids() == entity_ids - - -async def test_api_throws_response_entities_stay_the_same_as_before( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - local_oauth_impl: ClientSession, - mock_get_devices_twolightswitches, - mock_get_status_filled, - snapshot: SnapshotAssertion, - freezer: FrozenDateTimeFactory, -) -> None: - """Test case of incorrect response from iotty API on getting device status.""" - - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl - ) - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - - # Should have two devices - assert hass.states.async_entity_ids_count() == 2 - entity_ids = hass.states.async_entity_ids() - assert entity_ids == snapshot - - mock_get_devices_twolightswitches.return_value = test_ls_one_added - mock_get_status_filled.side_effect = Exception("Something went wrong") - - freezer.tick(UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Should still have have two devices - assert hass.states.async_entity_ids_count() == 2 - assert hass.states.async_entity_ids() == entity_ids diff --git a/tests/components/ipma/__init__.py b/tests/components/ipma/__init__.py index ab5998c922f..799120e3966 100644 --- a/tests/components/ipma/__init__.py +++ b/tests/components/ipma/__init__.py @@ -108,7 +108,6 @@ class MockLocation: location=Forecast_Location(0, "", 0, 0, 0, "", (0, 0)), ), ] - raise ValueError(f"Unknown forecast period: {period}") name = "HomeTown" station = "HomeTown Station" diff --git a/tests/components/ipma/conftest.py b/tests/components/ipma/conftest.py index 8f2a017dcb8..7f3e82a8819 100644 --- a/tests/components/ipma/conftest.py +++ b/tests/components/ipma/conftest.py @@ -14,7 +14,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def config_entry(hass: HomeAssistant) -> MockConfigEntry: +def config_entry(hass): """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/ipma/snapshots/test_weather.ambr b/tests/components/ipma/snapshots/test_weather.ambr index 80f385546d1..1142cb7cfe5 100644 --- a/tests/components/ipma/snapshots/test_weather.ambr +++ b/tests/components/ipma/snapshots/test_weather.ambr @@ -1,4 +1,119 @@ # serializer version: 1 +# name: test_forecast_service + dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 16, 0, 0), + 'precipitation_probability': '100.0', + 'temperature': 16.2, + 'templow': 10.6, + 'wind_bearing': 'S', + 'wind_speed': 10.0, + }), + ]), + }) +# --- +# name: test_forecast_service.1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + dict({ + 'condition': 'clear-night', + 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + ]), + }) +# --- +# name: test_forecast_service[forecast] + dict({ + 'weather.hometown': dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 16, 0, 0), + 'precipitation_probability': '100.0', + 'temperature': 16.2, + 'templow': 10.6, + 'wind_bearing': 'S', + 'wind_speed': 10.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].1 + dict({ + 'weather.hometown': dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + dict({ + 'condition': 'clear-night', + 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 16, 0, 0), + 'precipitation_probability': 100.0, + 'temperature': 16.2, + 'templow': 10.6, + 'wind_bearing': 'S', + 'wind_speed': 10.0, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + dict({ + 'condition': 'clear-night', + 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + ]), + }) +# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.hometown': dict({ diff --git a/tests/components/ipma/test_config_flow.py b/tests/components/ipma/test_config_flow.py index 2a4c3517b2a..38bb1dbf126 100644 --- a/tests/components/ipma/test_config_flow.py +++ b/tests/components/ipma/test_config_flow.py @@ -1,10 +1,10 @@ """Tests for IPMA config flow.""" -from collections.abc import Generator from unittest.mock import patch from pyipma import IPMAException import pytest +from typing_extensions import Generator from homeassistant.components.ipma.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -14,8 +14,6 @@ from homeassistant.data_entry_flow import FlowResultType from . import MockLocation -from tests.common import MockConfigEntry - @pytest.fixture(name="ipma_setup", autouse=True) def ipma_setup_fixture() -> Generator[None]: @@ -95,9 +93,7 @@ async def test_config_flow_failures(hass: HomeAssistant) -> None: } -async def test_flow_entry_already_exists( - hass: HomeAssistant, init_integration: MockConfigEntry -) -> None: +async def test_flow_entry_already_exists(hass: HomeAssistant, init_integration) -> None: """Test user input for config_entry that already exists. Test when the form should show when user puts existing location diff --git a/tests/components/ipma/test_diagnostics.py b/tests/components/ipma/test_diagnostics.py index 26e54454947..b7d421a2ee5 100644 --- a/tests/components/ipma/test_diagnostics.py +++ b/tests/components/ipma/test_diagnostics.py @@ -4,7 +4,6 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -12,7 +11,7 @@ from tests.typing import ClientSessionGenerator async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - init_integration: MockConfigEntry, + init_integration, snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" diff --git a/tests/components/ipma/test_weather.py b/tests/components/ipma/test_weather.py index 997eb582083..b7ef1347ca5 100644 --- a/tests/components/ipma/test_weather.py +++ b/tests/components/ipma/test_weather.py @@ -4,7 +4,6 @@ import datetime from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory -from pyipma.observation import Observation import pytest from syrupy.assertion import SnapshotAssertion @@ -44,7 +43,7 @@ TEST_CONFIG_HOURLY = { class MockBadLocation(MockLocation): """Mock Location with unresponsive api.""" - async def observation(self, api) -> Observation | None: + async def observation(self, api): """Mock Observation.""" return None diff --git a/tests/components/ipp/conftest.py b/tests/components/ipp/conftest.py index 9a47cc3c355..5e39a16f3b1 100644 --- a/tests/components/ipp/conftest.py +++ b/tests/components/ipp/conftest.py @@ -1,11 +1,11 @@ """Fixtures for IPP integration tests.""" -from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from pyipp import Printer import pytest +from typing_extensions import Generator from homeassistant.components.ipp.const import CONF_BASE_PATH, DOMAIN from homeassistant.const import ( diff --git a/tests/components/ipp/snapshots/test_diagnostics.ambr b/tests/components/ipp/snapshots/test_diagnostics.ambr index bd2564c5a40..98d0055c982 100644 --- a/tests/components/ipp/snapshots/test_diagnostics.ambr +++ b/tests/components/ipp/snapshots/test_diagnostics.ambr @@ -2,7 +2,6 @@ # name: test_diagnostics dict({ 'data': dict({ - 'booted_at': '2019-11-11T09:10:02+00:00', 'info': dict({ 'command_set': 'ESCPL2,BDC,D4,D4PX,ESCPR7,END4,GENEP,URF', 'location': None, diff --git a/tests/components/ipp/snapshots/test_sensor.ambr b/tests/components/ipp/snapshots/test_sensor.ambr deleted file mode 100644 index 3f910399ad8..00000000000 --- a/tests/components/ipp/snapshots/test_sensor.ambr +++ /dev/null @@ -1,378 +0,0 @@ -# serializer version: 1 -# name: test_sensors[sensor.test_ha_1000_series-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'idle', - 'printing', - 'stopped', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_ha_1000_series', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'ipp', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'printer', - 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_printer', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'command_set': 'ESCPL2,BDC,D4,D4PX,ESCPR7,END4,GENEP,URF', - 'device_class': 'enum', - 'friendly_name': 'Test HA-1000 Series', - 'info': 'Test HA-1000 Series', - 'location': None, - 'options': list([ - 'idle', - 'printing', - 'stopped', - ]), - 'serial': '555534593035345555', - 'state_message': None, - 'state_reason': None, - 'uri_supported': 'ipps://192.168.1.31:631/ipp/print,ipp://192.168.1.31:631/ipp/print', - }), - 'context': , - 'entity_id': 'sensor.test_ha_1000_series', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_black_ink-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_ha_1000_series_black_ink', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Black ink', - 'platform': 'ipp', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'marker', - 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_0', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_black_ink-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test HA-1000 Series Black ink', - 'marker_high_level': 100, - 'marker_low_level': 10, - 'marker_type': 'ink-cartridge', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_ha_1000_series_black_ink', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '58', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_cyan_ink-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_ha_1000_series_cyan_ink', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cyan ink', - 'platform': 'ipp', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'marker', - 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_1', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_cyan_ink-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test HA-1000 Series Cyan ink', - 'marker_high_level': 100, - 'marker_low_level': 10, - 'marker_type': 'ink-cartridge', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_ha_1000_series_cyan_ink', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '91', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_magenta_ink-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_ha_1000_series_magenta_ink', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Magenta ink', - 'platform': 'ipp', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'marker', - 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_2', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_magenta_ink-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test HA-1000 Series Magenta ink', - 'marker_high_level': 100, - 'marker_low_level': 10, - 'marker_type': 'ink-cartridge', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_ha_1000_series_magenta_ink', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '73', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_photo_black_ink-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_ha_1000_series_photo_black_ink', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Photo black ink', - 'platform': 'ipp', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'marker', - 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_3', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_photo_black_ink-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test HA-1000 Series Photo black ink', - 'marker_high_level': 100, - 'marker_low_level': 10, - 'marker_type': 'ink-cartridge', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_ha_1000_series_photo_black_ink', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '98', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_ha_1000_series_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Uptime', - 'platform': 'ipp', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'uptime', - 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_uptime', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Test HA-1000 Series Uptime', - }), - 'context': , - 'entity_id': 'sensor.test_ha_1000_series_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2019-11-11T09:10:02+00:00', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_yellow_ink-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_ha_1000_series_yellow_ink', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Yellow ink', - 'platform': 'ipp', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'marker', - 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_4', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.test_ha_1000_series_yellow_ink-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test HA-1000 Series Yellow ink', - 'marker_high_level': 100, - 'marker_low_level': 10, - 'marker_type': 'ink-cartridge', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_ha_1000_series_yellow_ink', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '95', - }) -# --- diff --git a/tests/components/ipp/test_diagnostics.py b/tests/components/ipp/test_diagnostics.py index d78f066d788..08446601e69 100644 --- a/tests/components/ipp/test_diagnostics.py +++ b/tests/components/ipp/test_diagnostics.py @@ -1,6 +1,5 @@ """Tests for the diagnostics data provided by the Internet Printing Protocol (IPP) integration.""" -import pytest from syrupy import SnapshotAssertion from homeassistant.core import HomeAssistant @@ -10,7 +9,6 @@ from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator -@pytest.mark.freeze_time("2019-11-11 09:10:32+00:00") async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, diff --git a/tests/components/ipp/test_sensor.py b/tests/components/ipp/test_sensor.py index bdbb9a88d35..9f0079a4e40 100644 --- a/tests/components/ipp/test_sensor.py +++ b/tests/components/ipp/test_sensor.py @@ -3,12 +3,13 @@ from unittest.mock import AsyncMock import pytest -from syrupy.assertion import SnapshotAssertion +from homeassistant.components.sensor import ATTR_OPTIONS +from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry @pytest.mark.freeze_time("2019-11-11 09:10:32+00:00") @@ -16,11 +17,53 @@ from tests.common import MockConfigEntry, snapshot_platform async def test_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, init_integration: MockConfigEntry, ) -> None: """Test the creation and values of the IPP sensors.""" - await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) + state = hass.states.get("sensor.test_ha_1000_series") + assert state + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None + assert state.attributes.get(ATTR_OPTIONS) == ["idle", "printing", "stopped"] + + entry = entity_registry.async_get("sensor.test_ha_1000_series") + assert entry + assert entry.translation_key == "printer" + + state = hass.states.get("sensor.test_ha_1000_series_black_ink") + assert state + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE + assert state.state == "58" + + state = hass.states.get("sensor.test_ha_1000_series_photo_black_ink") + assert state + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE + assert state.state == "98" + + state = hass.states.get("sensor.test_ha_1000_series_cyan_ink") + assert state + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE + assert state.state == "91" + + state = hass.states.get("sensor.test_ha_1000_series_yellow_ink") + assert state + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE + assert state.state == "95" + + state = hass.states.get("sensor.test_ha_1000_series_magenta_ink") + assert state + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE + assert state.state == "73" + + state = hass.states.get("sensor.test_ha_1000_series_uptime") + assert state + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None + assert state.state == "2019-11-11T09:10:02+00:00" + + entry = entity_registry.async_get("sensor.test_ha_1000_series_uptime") + + assert entry + assert entry.unique_id == "cfe92100-67c4-11d4-a45f-f8d027761251_uptime" + assert entry.entity_category == EntityCategory.DIAGNOSTIC async def test_disabled_by_default_sensors( diff --git a/tests/components/iqvia/conftest.py b/tests/components/iqvia/conftest.py index 0d23b825c5a..6fb14ca4d28 100644 --- a/tests/components/iqvia/conftest.py +++ b/tests/components/iqvia/conftest.py @@ -1,23 +1,18 @@ """Define test fixtures for IQVIA.""" -from collections.abc import AsyncGenerator -from typing import Any +import json from unittest.mock import patch import pytest from homeassistant.components.iqvia.const import CONF_ZIP_CODE, DOMAIN -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import MockConfigEntry, load_fixture @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, config: dict[str, Any] -) -> MockConfigEntry: +def config_entry_fixture(hass, config): """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -30,7 +25,7 @@ def config_entry_fixture( @pytest.fixture(name="config") -def config_fixture() -> dict[str, Any]: +def config_fixture(hass): """Define a config entry data fixture.""" return { CONF_ZIP_CODE: "12345", @@ -38,59 +33,59 @@ def config_fixture() -> dict[str, Any]: @pytest.fixture(name="data_allergy_forecast", scope="package") -def data_allergy_forecast_fixture() -> JsonObjectType: +def data_allergy_forecast_fixture(): """Define allergy forecast data.""" - return load_json_object_fixture("allergy_forecast_data.json", "iqvia") + return json.loads(load_fixture("allergy_forecast_data.json", "iqvia")) @pytest.fixture(name="data_allergy_index", scope="package") -def data_allergy_index_fixture() -> JsonObjectType: +def data_allergy_index_fixture(): """Define allergy index data.""" - return load_json_object_fixture("allergy_index_data.json", "iqvia") + return json.loads(load_fixture("allergy_index_data.json", "iqvia")) @pytest.fixture(name="data_allergy_outlook", scope="package") -def data_allergy_outlook_fixture() -> JsonObjectType: +def data_allergy_outlook_fixture(): """Define allergy outlook data.""" - return load_json_object_fixture("allergy_outlook_data.json", "iqvia") + return json.loads(load_fixture("allergy_outlook_data.json", "iqvia")) @pytest.fixture(name="data_asthma_forecast", scope="package") -def data_asthma_forecast_fixture() -> JsonObjectType: +def data_asthma_forecast_fixture(): """Define asthma forecast data.""" - return load_json_object_fixture("asthma_forecast_data.json", "iqvia") + return json.loads(load_fixture("asthma_forecast_data.json", "iqvia")) @pytest.fixture(name="data_asthma_index", scope="package") -def data_asthma_index_fixture() -> JsonObjectType: +def data_asthma_index_fixture(): """Define asthma index data.""" - return load_json_object_fixture("asthma_index_data.json", "iqvia") + return json.loads(load_fixture("asthma_index_data.json", "iqvia")) @pytest.fixture(name="data_disease_forecast", scope="package") -def data_disease_forecast_fixture() -> JsonObjectType: +def data_disease_forecast_fixture(): """Define disease forecast data.""" - return load_json_object_fixture("disease_forecast_data.json", "iqvia") + return json.loads(load_fixture("disease_forecast_data.json", "iqvia")) @pytest.fixture(name="data_disease_index", scope="package") -def data_disease_index_fixture() -> JsonObjectType: +def data_disease_index_fixture(): """Define disease index data.""" - return load_json_object_fixture("disease_index_data.json", "iqvia") + return json.loads(load_fixture("disease_index_data.json", "iqvia")) @pytest.fixture(name="setup_iqvia") async def setup_iqvia_fixture( - hass: HomeAssistant, - config: dict[str, Any], - data_allergy_forecast: JsonObjectType, - data_allergy_index: JsonObjectType, - data_allergy_outlook: JsonObjectType, - data_asthma_forecast: JsonObjectType, - data_asthma_index: JsonObjectType, - data_disease_forecast: JsonObjectType, - data_disease_index: JsonObjectType, -) -> AsyncGenerator[None]: + hass, + config, + data_allergy_forecast, + data_allergy_index, + data_allergy_outlook, + data_asthma_forecast, + data_asthma_index, + data_disease_forecast, + data_disease_index, +): """Define a fixture to set up IQVIA.""" with ( patch( diff --git a/tests/components/iqvia/snapshots/test_diagnostics.ambr b/tests/components/iqvia/snapshots/test_diagnostics.ambr index f2fa656cb0f..c46a2cc15e3 100644 --- a/tests/components/iqvia/snapshots/test_diagnostics.ambr +++ b/tests/components/iqvia/snapshots/test_diagnostics.ambr @@ -348,8 +348,6 @@ 'zip_code': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'iqvia', 'entry_id': '690ac4b7e99855fc5ee7b987a758d5cb', 'minor_version': 1, diff --git a/tests/components/iqvia/test_config_flow.py b/tests/components/iqvia/test_config_flow.py index 22f473a3fb5..17c977a6b4c 100644 --- a/tests/components/iqvia/test_config_flow.py +++ b/tests/components/iqvia/test_config_flow.py @@ -1,17 +1,12 @@ """Define tests for the IQVIA config flow.""" -from typing import Any - -import pytest - from homeassistant.components.iqvia import CONF_ZIP_CODE, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -@pytest.mark.usefixtures("config_entry") -async def test_duplicate_error(hass: HomeAssistant, config: dict[str, Any]) -> None: +async def test_duplicate_error(hass: HomeAssistant, config, config_entry) -> None: """Test that errors are shown when duplicates are added.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=config @@ -38,8 +33,7 @@ async def test_show_form(hass: HomeAssistant) -> None: assert result["step_id"] == "user" -@pytest.mark.usefixtures("setup_iqvia") -async def test_step_user(hass: HomeAssistant, config: dict[str, Any]) -> None: +async def test_step_user(hass: HomeAssistant, config, setup_iqvia) -> None: """Test that the user step works (without MFA).""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=config diff --git a/tests/components/iqvia/test_diagnostics.py b/tests/components/iqvia/test_diagnostics.py index 9d5639c311c..7c445c9b3e4 100644 --- a/tests/components/iqvia/test_diagnostics.py +++ b/tests/components/iqvia/test_diagnostics.py @@ -1,24 +1,23 @@ """Test IQVIA diagnostics.""" from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, - config_entry: MockConfigEntry, + config_entry, hass_client: ClientSessionGenerator, - setup_iqvia: None, # Needs to be injected after config_entry + setup_iqvia, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/iron_os/__init__.py b/tests/components/iron_os/__init__.py deleted file mode 100644 index 4e27f2c741c..00000000000 --- a/tests/components/iron_os/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Pinecil integration.""" diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py deleted file mode 100644 index a7c3592ae73..00000000000 --- a/tests/components/iron_os/conftest.py +++ /dev/null @@ -1,164 +0,0 @@ -"""Fixtures for Pinecil tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -from bleak.backends.device import BLEDevice -from habluetooth import BluetoothServiceInfoBleak -from pynecil import DeviceInfoResponse, LiveDataResponse, OperatingMode, PowerSource -import pytest - -from homeassistant.components.iron_os import DOMAIN -from homeassistant.const import CONF_ADDRESS - -from tests.common import MockConfigEntry -from tests.components.bluetooth import generate_advertisement_data, generate_ble_device - -USER_INPUT = {CONF_ADDRESS: "c0:ff:ee:c0:ff:ee"} -DEFAULT_NAME = "Pinecil-C0FFEEE" -PINECIL_SERVICE_INFO = BluetoothServiceInfoBleak( - name="Pinecil-C0FFEEE", - address="c0:ff:ee:c0:ff:ee", - device=generate_ble_device( - address="c0:ff:ee:c0:ff:ee", - name="Pinecil-C0FFEEE", - ), - rssi=-61, - manufacturer_data={}, - service_data={}, - service_uuids=["9eae1000-9d0d-48c5-aa55-33e27f9bc533"], - source="local", - advertisement=generate_advertisement_data( - manufacturer_data={}, - service_uuids=["9eae1000-9d0d-48c5-aa55-33e27f9bc533"], - ), - connectable=True, - time=0, - tx_power=None, -) - -UNKNOWN_SERVICE_INFO = BluetoothServiceInfoBleak( - name="", - address="c0:ff:ee:c0:ff:ee", - device=generate_ble_device( - address="c0:ff:ee:c0:ff:ee", - name="", - ), - rssi=-61, - manufacturer_data={}, - service_data={}, - service_uuids=[], - source="local", - advertisement=generate_advertisement_data( - manufacturer_data={}, - service_uuids=[], - ), - connectable=True, - time=0, - tx_power=None, -) - - -@pytest.fixture(autouse=True) -def mock_bluetooth(enable_bluetooth: None) -> None: - """Auto mock bluetooth.""" - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.iron_os.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture(name="discovery") -def mock_async_discovered_service_info() -> Generator[MagicMock]: - """Mock service discovery.""" - with patch( - "homeassistant.components.iron_os.config_flow.async_discovered_service_info", - return_value=[PINECIL_SERVICE_INFO, UNKNOWN_SERVICE_INFO], - ) as discovery: - yield discovery - - -@pytest.fixture(name="config_entry") -def mock_config_entry() -> MockConfigEntry: - """Mock Pinecil configuration entry.""" - return MockConfigEntry( - domain=DOMAIN, - title=DEFAULT_NAME, - data={}, - unique_id="c0:ff:ee:c0:ff:ee", - entry_id="1234567890", - ) - - -@pytest.fixture(name="ble_device") -def mock_ble_device() -> Generator[MagicMock]: - """Mock BLEDevice.""" - with patch( - "homeassistant.components.bluetooth.async_ble_device_from_address", - return_value=BLEDevice( - address="c0:ff:ee:c0:ff:ee", name=DEFAULT_NAME, rssi=-50, details={} - ), - ) as ble_device: - yield ble_device - - -@pytest.fixture(autouse=True) -def mock_githubapi() -> Generator[AsyncMock]: - """Mock aiogithubapi.""" - - with patch( - "homeassistant.components.iron_os.GitHubAPI", - autospec=True, - ) as mock_client: - client = mock_client.return_value - client.repos.releases.latest = AsyncMock() - - client.repos.releases.latest.return_value.data.html_url = ( - "https://github.com/Ralim/IronOS/releases/tag/v2.22" - ) - client.repos.releases.latest.return_value.data.name = ( - "V2.22 | TS101 & S60 Added | PinecilV2 improved" - ) - client.repos.releases.latest.return_value.data.tag_name = "v2.22" - client.repos.releases.latest.return_value.data.body = "**RELEASE_NOTES**" - - yield client - - -@pytest.fixture -def mock_pynecil() -> Generator[AsyncMock]: - """Mock Pynecil library.""" - with patch( - "homeassistant.components.iron_os.Pynecil", autospec=True - ) as mock_client: - client = mock_client.return_value - - client.get_device_info.return_value = DeviceInfoResponse( - build="v2.22", - device_id="c0ffeeC0", - address="c0:ff:ee:c0:ff:ee", - device_sn="0000c0ffeec0ffee", - name=DEFAULT_NAME, - ) - client.get_live_data.return_value = LiveDataResponse( - live_temp=298, - setpoint_temp=300, - dc_voltage=20.6, - handle_temp=36.3, - pwm_level=41, - power_src=PowerSource.PD, - tip_resistance=6.2, - uptime=1671, - movement_time=10000, - max_tip_temp_ability=460, - tip_voltage=2212, - hall_sensor=0, - operating_mode=OperatingMode.SOLDERING, - estimated_power=24.8, - ) - yield client diff --git a/tests/components/iron_os/snapshots/test_number.ambr b/tests/components/iron_os/snapshots/test_number.ambr deleted file mode 100644 index 2f5ee62e37e..00000000000 --- a/tests/components/iron_os/snapshots/test_number.ambr +++ /dev/null @@ -1,58 +0,0 @@ -# serializer version: 1 -# name: test_state[number.pinecil_setpoint_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 450, - 'min': 10, - 'mode': , - 'step': 5, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.pinecil_setpoint_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Setpoint temperature', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_setpoint_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_state[number.pinecil_setpoint_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Pinecil Setpoint temperature', - 'max': 450, - 'min': 10, - 'mode': , - 'step': 5, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.pinecil_setpoint_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '300', - }) -# --- diff --git a/tests/components/iron_os/snapshots/test_sensor.ambr b/tests/components/iron_os/snapshots/test_sensor.ambr deleted file mode 100644 index 64cb951dacc..00000000000 --- a/tests/components/iron_os/snapshots/test_sensor.ambr +++ /dev/null @@ -1,683 +0,0 @@ -# serializer version: 1 -# name: test_sensors[sensor.pinecil_dc_input_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.pinecil_dc_input_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DC input voltage', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.pinecil_dc_input_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Pinecil DC input voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.pinecil_dc_input_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.6', - }) -# --- -# name: test_sensors[sensor.pinecil_estimated_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pinecil_estimated_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Estimated power', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_estimated_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.pinecil_estimated_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Pinecil Estimated power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.pinecil_estimated_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '24.8', - }) -# --- -# name: test_sensors[sensor.pinecil_hall_effect_strength-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.pinecil_hall_effect_strength', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hall effect strength', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_hall_sensor', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.pinecil_hall_effect_strength-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Pinecil Hall effect strength', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.pinecil_hall_effect_strength', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.pinecil_handle_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pinecil_handle_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Handle temperature', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_handle_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.pinecil_handle_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Pinecil Handle temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.pinecil_handle_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '36.3', - }) -# --- -# name: test_sensors[sensor.pinecil_last_movement_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.pinecil_last_movement_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last movement time', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_movement_time', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.pinecil_last_movement_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'Pinecil Last movement time', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.pinecil_last_movement_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10000', - }) -# --- -# name: test_sensors[sensor.pinecil_max_tip_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.pinecil_max_tip_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Max tip temperature', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_max_tip_temp_ability', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.pinecil_max_tip_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Pinecil Max tip temperature', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.pinecil_max_tip_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '460', - }) -# --- -# name: test_sensors[sensor.pinecil_operating_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'idle', - 'soldering', - 'boost', - 'sleeping', - 'settings', - 'debug', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pinecil_operating_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Operating mode', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_operating_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.pinecil_operating_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Pinecil Operating mode', - 'options': list([ - 'idle', - 'soldering', - 'boost', - 'sleeping', - 'settings', - 'debug', - ]), - }), - 'context': , - 'entity_id': 'sensor.pinecil_operating_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'soldering', - }) -# --- -# name: test_sensors[sensor.pinecil_power_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.pinecil_power_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power level', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_power_pwm_level', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.pinecil_power_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'Pinecil Power level', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.pinecil_power_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '41', - }) -# --- -# name: test_sensors[sensor.pinecil_power_source-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'dc', - 'qc', - 'pd_vbus', - 'pd', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.pinecil_power_source', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power source', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_power_source', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.pinecil_power_source-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Pinecil Power source', - 'options': list([ - 'dc', - 'qc', - 'pd_vbus', - 'pd', - ]), - }), - 'context': , - 'entity_id': 'sensor.pinecil_power_source', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'pd', - }) -# --- -# name: test_sensors[sensor.pinecil_raw_tip_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.pinecil_raw_tip_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Raw tip voltage', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.pinecil_raw_tip_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Pinecil Raw tip voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.pinecil_raw_tip_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2212', - }) -# --- -# name: test_sensors[sensor.pinecil_tip_resistance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.pinecil_tip_resistance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tip resistance', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_resistance', - 'unit_of_measurement': 'Ω', - }) -# --- -# name: test_sensors[sensor.pinecil_tip_resistance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Pinecil Tip resistance', - 'unit_of_measurement': 'Ω', - }), - 'context': , - 'entity_id': 'sensor.pinecil_tip_resistance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6.2', - }) -# --- -# name: test_sensors[sensor.pinecil_tip_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pinecil_tip_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tip temperature', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_live_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.pinecil_tip_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Pinecil Tip temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.pinecil_tip_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '298', - }) -# --- -# name: test_sensors[sensor.pinecil_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.pinecil_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Uptime', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'c0:ff:ee:c0:ff:ee_uptime', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.pinecil_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'Pinecil Uptime', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.pinecil_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1671', - }) -# --- diff --git a/tests/components/iron_os/snapshots/test_update.ambr b/tests/components/iron_os/snapshots/test_update.ambr deleted file mode 100644 index e0872d032ec..00000000000 --- a/tests/components/iron_os/snapshots/test_update.ambr +++ /dev/null @@ -1,63 +0,0 @@ -# serializer version: 1 -# name: test_update.2 - '**RELEASE_NOTES**' -# --- -# name: test_update[update.pinecil_firmware-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.pinecil_firmware', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Firmware', - 'platform': 'iron_os', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'c0:ff:ee:c0:ff:ee_firmware', - 'unit_of_measurement': None, - }) -# --- -# name: test_update[update.pinecil_firmware-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'device_class': 'firmware', - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/iron_os/icon.png', - 'friendly_name': 'Pinecil Firmware', - 'in_progress': False, - 'installed_version': 'v2.22', - 'latest_version': 'v2.22', - 'release_summary': None, - 'release_url': 'https://github.com/Ralim/IronOS/releases/tag/v2.22', - 'skipped_version': None, - 'supported_features': , - 'title': 'IronOS V2.22 | TS101 & S60 Added | PinecilV2 improved', - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.pinecil_firmware', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/iron_os/test_config_flow.py b/tests/components/iron_os/test_config_flow.py deleted file mode 100644 index 231ec6cc3d6..00000000000 --- a/tests/components/iron_os/test_config_flow.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Tests for the Pinecil config flow.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock, MagicMock - -from homeassistant.components.iron_os import DOMAIN -from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .conftest import DEFAULT_NAME, PINECIL_SERVICE_INFO, USER_INPUT - - -async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, discovery: MagicMock -) -> None: - """Test the user config flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - USER_INPUT, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DEFAULT_NAME - assert result["data"] == {} - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_no_device_discovered( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - discovery: MagicMock, -) -> None: - """Test setup with no device discoveries.""" - discovery.return_value = [] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_devices_found" - - -async def test_async_step_bluetooth(hass: HomeAssistant) -> None: - """Test discovery via bluetooth..""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_BLUETOOTH}, - data=PINECIL_SERVICE_INFO, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "bluetooth_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DEFAULT_NAME - assert result["data"] == {} - assert result["result"].unique_id == "c0:ff:ee:c0:ff:ee" diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py deleted file mode 100644 index f7db2a813ec..00000000000 --- a/tests/components/iron_os/test_init.py +++ /dev/null @@ -1,60 +0,0 @@ -"""Test init of IronOS integration.""" - -from unittest.mock import AsyncMock - -from pynecil import CommunicationError -import pytest - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("mock_pynecil", "ble_device") -async def test_setup_and_unload( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test integration setup and unload.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.NOT_LOADED - - -@pytest.mark.usefixtures("ble_device") -async def test_update_data_config_entry_not_ready( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_pynecil: AsyncMock, -) -> None: - """Test config entry not ready.""" - mock_pynecil.get_live_data.side_effect = CommunicationError - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -@pytest.mark.usefixtures("ble_device") -async def test_setup_config_entry_not_ready( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_pynecil: AsyncMock, -) -> None: - """Test config entry not ready.""" - mock_pynecil.get_device_info.side_effect = CommunicationError - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py deleted file mode 100644 index 781492987ee..00000000000 --- a/tests/components/iron_os/test_number.py +++ /dev/null @@ -1,104 +0,0 @@ -"""Tests for the IronOS number platform.""" - -from collections.abc import AsyncGenerator -from unittest.mock import AsyncMock, patch - -from pynecil import CharSetting, CommunicationError -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.number import ( - ATTR_VALUE, - DOMAIN as NUMBER_DOMAIN, - SERVICE_SET_VALUE, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.fixture(autouse=True) -async def sensor_only() -> AsyncGenerator[None]: - """Enable only the number platform.""" - with patch( - "homeassistant.components.iron_os.PLATFORMS", - [Platform.NUMBER], - ): - yield - - -@pytest.mark.usefixtures( - "entity_registry_enabled_by_default", "mock_pynecil", "ble_device" -) -async def test_state( - hass: HomeAssistant, - config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test the IronOS number platform states.""" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") -async def test_set_value( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_pynecil: AsyncMock, -) -> None: - """Test the IronOS number platform set value service.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - service_data={ATTR_VALUE: 300}, - target={ATTR_ENTITY_ID: "number.pinecil_setpoint_temperature"}, - blocking=True, - ) - assert len(mock_pynecil.write.mock_calls) == 1 - mock_pynecil.write.assert_called_once_with(CharSetting.SETPOINT_TEMP, 300) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") -async def test_set_value_exception( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_pynecil: AsyncMock, -) -> None: - """Test the IronOS number platform set value service with exception.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_pynecil.write.side_effect = CommunicationError - - with pytest.raises( - ServiceValidationError, - match="Failed to submit setting to device, try again later", - ): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - service_data={ATTR_VALUE: 300}, - target={ATTR_ENTITY_ID: "number.pinecil_setpoint_temperature"}, - blocking=True, - ) diff --git a/tests/components/iron_os/test_sensor.py b/tests/components/iron_os/test_sensor.py deleted file mode 100644 index 2f79487a7fd..00000000000 --- a/tests/components/iron_os/test_sensor.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Tests for the Pinecil Sensors.""" - -from collections.abc import AsyncGenerator -from unittest.mock import AsyncMock, MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -from pynecil import CommunicationError -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.iron_os.coordinator import SCAN_INTERVAL -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - - -@pytest.fixture(autouse=True) -async def sensor_only() -> AsyncGenerator[None]: - """Enable only the sensor platform.""" - with patch( - "homeassistant.components.iron_os.PLATFORMS", - [Platform.SENSOR], - ): - yield - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_pynecil: AsyncMock, - ble_device: MagicMock, -) -> None: - """Test the Pinecil sensor platform.""" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensors_unavailable( - hass: HomeAssistant, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_pynecil: AsyncMock, - ble_device: MagicMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the sensors when device disconnects.""" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - mock_pynecil.get_live_data.side_effect = CommunicationError - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/iron_os/test_update.py b/tests/components/iron_os/test_update.py deleted file mode 100644 index 7a2650ba7a3..00000000000 --- a/tests/components/iron_os/test_update.py +++ /dev/null @@ -1,77 +0,0 @@ -"""Tests for IronOS update platform.""" - -from collections.abc import AsyncGenerator -from unittest.mock import AsyncMock, patch - -from aiogithubapi import GitHubException -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, snapshot_platform -from tests.typing import WebSocketGenerator - - -@pytest.fixture(autouse=True) -async def update_only() -> AsyncGenerator[None]: - """Enable only the update platform.""" - with patch( - "homeassistant.components.iron_os.PLATFORMS", - [Platform.UPDATE], - ): - yield - - -@pytest.mark.usefixtures("mock_pynecil", "ble_device", "mock_githubapi") -async def test_update( - hass: HomeAssistant, - config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test the IronOS update platform.""" - ws_client = await hass_ws_client(hass) - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - await ws_client.send_json( - { - "id": 1, - "type": "update/release_notes", - "entity_id": "update.pinecil_firmware", - } - ) - result = await ws_client.receive_json() - assert result["result"] == snapshot - - -@pytest.mark.usefixtures("ble_device", "mock_pynecil") -async def test_update_unavailable( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_githubapi: AsyncMock, -) -> None: - """Test update entity unavailable on error.""" - - mock_githubapi.repos.releases.latest.side_effect = GitHubException - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - state = hass.states.get("update.pinecil_firmware") - assert state is not None - assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/iskra/__init__.py b/tests/components/iskra/__init__.py deleted file mode 100644 index ca93572a9e4..00000000000 --- a/tests/components/iskra/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Iskra component.""" diff --git a/tests/components/iskra/conftest.py b/tests/components/iskra/conftest.py deleted file mode 100644 index d9cc6808aaa..00000000000 --- a/tests/components/iskra/conftest.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Fixtures for mocking pyiskra's different protocols. - -Fixtures: -- `mock_pyiskra_rest`: Mock pyiskra Rest API protocol. -- `mock_pyiskra_modbus`: Mock pyiskra Modbus protocol. -""" - -from unittest.mock import patch - -import pytest - -from .const import PQ_MODEL, SERIAL, SG_MODEL - - -class MockBasicInfo: - """Mock BasicInfo class.""" - - def __init__(self, model) -> None: - """Initialize the mock class.""" - self.serial = SERIAL - self.model = model - self.description = "Iskra mock device" - self.location = "imagination" - self.sw_ver = "1.0.0" - - -@pytest.fixture -def mock_pyiskra_rest(): - """Mock Iskra API authenticate with Rest API protocol.""" - - with patch( - "pyiskra.adapters.RestAPI.RestAPI.get_basic_info", - return_value=MockBasicInfo(model=SG_MODEL), - ) as basic_info_mock: - yield basic_info_mock - - -@pytest.fixture -def mock_pyiskra_modbus(): - """Mock Iskra API authenticate with Rest API protocol.""" - - with patch( - "pyiskra.adapters.Modbus.Modbus.get_basic_info", - return_value=MockBasicInfo(model=PQ_MODEL), - ) as basic_info_mock: - yield basic_info_mock diff --git a/tests/components/iskra/const.py b/tests/components/iskra/const.py deleted file mode 100644 index bf38c9a4a79..00000000000 --- a/tests/components/iskra/const.py +++ /dev/null @@ -1,10 +0,0 @@ -"""Constants used in the Iskra component tests.""" - -SG_MODEL = "SG-W1" -PQ_MODEL = "MC784" -SERIAL = "XXXXXXX" -HOST = "192.1.0.1" -MODBUS_PORT = 10001 -MODBUS_ADDRESS = 33 -USERNAME = "test_username" -PASSWORD = "test_password" diff --git a/tests/components/iskra/test_config_flow.py b/tests/components/iskra/test_config_flow.py deleted file mode 100644 index 0c128be9850..00000000000 --- a/tests/components/iskra/test_config_flow.py +++ /dev/null @@ -1,300 +0,0 @@ -"""Tests for the Iskra config flow.""" - -from pyiskra.exceptions import ( - DeviceConnectionError, - DeviceTimeoutError, - InvalidResponseCode, - NotAuthorised, -) -import pytest - -from homeassistant.components.iskra import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import ( - CONF_ADDRESS, - CONF_HOST, - CONF_PASSWORD, - CONF_PORT, - CONF_PROTOCOL, - CONF_USERNAME, -) -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .const import ( - HOST, - MODBUS_ADDRESS, - MODBUS_PORT, - PASSWORD, - PQ_MODEL, - SERIAL, - SG_MODEL, - USERNAME, -) - -from tests.common import MockConfigEntry - - -# Test step_user with Rest API protocol -async def test_user_rest_no_auth(hass: HomeAssistant, mock_pyiskra_rest) -> None: - """Test the user flow with Rest API protocol.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - # Test if user form is provided - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - # Test no authentication required - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, - ) - - # Test successful Rest API configuration - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == SERIAL - assert result["title"] == SG_MODEL - assert result["data"] == {CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"} - - -async def test_user_rest_auth(hass: HomeAssistant, mock_pyiskra_rest) -> None: - """Test the user flow with Rest API protocol and authentication required.""" - mock_pyiskra_rest.side_effect = NotAuthorised - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - # Test if user form is provided - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - # Test if prompted to enter username and password if not authorised - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "authentication" - - # Test failed authentication - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_auth"} - assert result["step_id"] == "authentication" - - # Test successful authentication - mock_pyiskra_rest.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD}, - ) - - # Test successful Rest API configuration - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == SERIAL - assert result["title"] == SG_MODEL - assert result["data"] == { - CONF_HOST: HOST, - CONF_PROTOCOL: "rest_api", - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, - } - - -async def test_user_modbus(hass: HomeAssistant, mock_pyiskra_modbus) -> None: - """Test the user flow with Modbus TCP protocol.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - # Test if user form is provided - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_HOST: HOST, CONF_PROTOCOL: "modbus_tcp"}, - ) - - # Test if propmpted to enter port and address - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "modbus_tcp" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PORT: MODBUS_PORT, - CONF_ADDRESS: MODBUS_ADDRESS, - }, - ) - - # Test successful Modbus TCP configuration - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == SERIAL - assert result["title"] == PQ_MODEL - assert result["data"] == { - CONF_HOST: HOST, - CONF_PROTOCOL: "modbus_tcp", - CONF_PORT: MODBUS_PORT, - CONF_ADDRESS: MODBUS_ADDRESS, - } - - -async def test_modbus_abort_if_already_setup( - hass: HomeAssistant, mock_pyiskra_modbus -) -> None: - """Test we abort if Iskra is already setup.""" - - MockConfigEntry(domain=DOMAIN, unique_id=SERIAL).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_HOST: HOST, CONF_PROTOCOL: "modbus_tcp"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "modbus_tcp" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PORT: MODBUS_PORT, - CONF_ADDRESS: MODBUS_ADDRESS, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_rest_api_abort_if_already_setup( - hass: HomeAssistant, mock_pyiskra_rest -) -> None: - """Test we abort if Iskra is already setup.""" - - MockConfigEntry(domain=DOMAIN, unique_id=SERIAL).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("s_effect", "reason"), - [ - (DeviceConnectionError, "cannot_connect"), - (DeviceTimeoutError, "cannot_connect"), - (InvalidResponseCode, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_modbus_device_error( - hass: HomeAssistant, - mock_pyiskra_modbus, - s_effect, - reason, -) -> None: - """Test device error with Modbus TCP protocol.""" - mock_pyiskra_modbus.side_effect = s_effect - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_HOST: HOST, CONF_PROTOCOL: "modbus_tcp"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "modbus_tcp" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PORT: MODBUS_PORT, - CONF_ADDRESS: MODBUS_ADDRESS, - }, - ) - - # Test if error returned - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "modbus_tcp" - assert result["errors"] == {"base": reason} - - # Remove side effect - mock_pyiskra_modbus.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PORT: MODBUS_PORT, - CONF_ADDRESS: MODBUS_ADDRESS, - }, - ) - - # Test successful Modbus TCP configuration - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == SERIAL - assert result["title"] == PQ_MODEL - assert result["data"] == { - CONF_HOST: HOST, - CONF_PROTOCOL: "modbus_tcp", - CONF_PORT: MODBUS_PORT, - CONF_ADDRESS: MODBUS_ADDRESS, - } - - -@pytest.mark.parametrize( - ("s_effect", "reason"), - [ - (DeviceConnectionError, "cannot_connect"), - (DeviceTimeoutError, "cannot_connect"), - (InvalidResponseCode, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_rest_device_error( - hass: HomeAssistant, - mock_pyiskra_rest, - s_effect, - reason, -) -> None: - """Test device error with Modbus TCP protocol.""" - mock_pyiskra_rest.side_effect = s_effect - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, - ) - - # Test if error returned - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": reason} - - # Remove side effect - mock_pyiskra_rest.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, - ) - - # Test successful Rest API configuration - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == SERIAL - assert result["title"] == SG_MODEL - assert result["data"] == {CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"} diff --git a/tests/components/islamic_prayer_times/conftest.py b/tests/components/islamic_prayer_times/conftest.py index ae0b6741fdf..ae9b1f45eb9 100644 --- a/tests/components/islamic_prayer_times/conftest.py +++ b/tests/components/islamic_prayer_times/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the islamic_prayer_times tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/islamic_prayer_times/test_config_flow.py b/tests/components/islamic_prayer_times/test_config_flow.py index 695be636a84..cb37a6b147d 100644 --- a/tests/components/islamic_prayer_times/test_config_flow.py +++ b/tests/components/islamic_prayer_times/test_config_flow.py @@ -3,6 +3,7 @@ import pytest from homeassistant import config_entries +from homeassistant.components import islamic_prayer_times from homeassistant.components.islamic_prayer_times.const import ( CONF_CALC_METHOD, CONF_LAT_ADJ_METHOD, @@ -23,7 +24,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def test_flow_works(hass: HomeAssistant) -> None: """Test user config.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + islamic_prayer_times.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -75,7 +76,7 @@ async def test_integration_already_configured(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + islamic_prayer_times.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/islamic_prayer_times/test_init.py b/tests/components/islamic_prayer_times/test_init.py index 7961b79676b..025a202e6da 100644 --- a/tests/components/islamic_prayer_times/test_init.py +++ b/tests/components/islamic_prayer_times/test_init.py @@ -6,7 +6,8 @@ from unittest.mock import patch from freezegun import freeze_time import pytest -from homeassistant.components.islamic_prayer_times.const import CONF_CALC_METHOD, DOMAIN +from homeassistant.components import islamic_prayer_times +from homeassistant.components.islamic_prayer_times.const import CONF_CALC_METHOD from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE @@ -29,7 +30,7 @@ async def test_successful_config_entry(hass: HomeAssistant) -> None: """Test that Islamic Prayer Times is configured successfully.""" entry = MockConfigEntry( - domain=DOMAIN, + domain=islamic_prayer_times.DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -47,7 +48,7 @@ async def test_successful_config_entry(hass: HomeAssistant) -> None: async def test_unload_entry(hass: HomeAssistant) -> None: """Test removing Islamic Prayer Times.""" entry = MockConfigEntry( - domain=DOMAIN, + domain=islamic_prayer_times.DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -65,7 +66,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None: async def test_options_listener(hass: HomeAssistant) -> None: """Ensure updating options triggers a coordinator refresh.""" - entry = MockConfigEntry(domain=DOMAIN, data={}) + entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) entry.add_to_hass(hass) with ( @@ -109,13 +110,13 @@ async def test_migrate_unique_id( old_unique_id: str, ) -> None: """Test unique id migration.""" - entry = MockConfigEntry(domain=DOMAIN, data={}) + entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) entry.add_to_hass(hass) entity: er.RegistryEntry = entity_registry.async_get_or_create( suggested_object_id=object_id, domain=SENSOR_DOMAIN, - platform=DOMAIN, + platform=islamic_prayer_times.DOMAIN, unique_id=old_unique_id, config_entry=entry, ) @@ -139,7 +140,7 @@ async def test_migrate_unique_id( async def test_migration_from_1_1_to_1_2(hass: HomeAssistant) -> None: """Test migrating from version 1.1 to 1.2.""" entry = MockConfigEntry( - domain=DOMAIN, + domain=islamic_prayer_times.DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -163,7 +164,7 @@ async def test_migration_from_1_1_to_1_2(hass: HomeAssistant) -> None: async def test_update_scheduling(hass: HomeAssistant) -> None: """Test that integration schedules update immediately after Islamic midnight.""" - entry = MockConfigEntry(domain=DOMAIN, data={}) + entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) entry.add_to_hass(hass) with ( diff --git a/tests/components/israel_rail/__init__.py b/tests/components/israel_rail/__init__.py deleted file mode 100644 index 23cf9f5a821..00000000000 --- a/tests/components/israel_rail/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Tests for the israel_rail component.""" - -from datetime import timedelta - -from freezegun.api import FrozenDateTimeFactory - -from homeassistant.components.israel_rail.const import DEFAULT_SCAN_INTERVAL -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def init_integration( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Set up the israel rail integration in Home Assistant.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - -async def goto_future(hass: HomeAssistant, freezer: FrozenDateTimeFactory): - """Move to future.""" - freezer.tick(DEFAULT_SCAN_INTERVAL + timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() diff --git a/tests/components/israel_rail/conftest.py b/tests/components/israel_rail/conftest.py deleted file mode 100644 index 07a101d40c7..00000000000 --- a/tests/components/israel_rail/conftest.py +++ /dev/null @@ -1,137 +0,0 @@ -"""Configuration for Israel rail tests.""" - -from collections.abc import Generator -from datetime import datetime -from unittest.mock import AsyncMock, patch -from zoneinfo import ZoneInfo - -from israelrailapi.api import TrainRoute -import pytest - -from homeassistant.components.israel_rail import CONF_DESTINATION, CONF_START, DOMAIN - -from tests.common import MockConfigEntry - -VALID_CONFIG = { - CONF_START: "באר יעקב", - CONF_DESTINATION: "אשקלון", -} - -SOURCE_DEST = "באר יעקב אשקלון" - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.israel_rail.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data=VALID_CONFIG, - unique_id=SOURCE_DEST, - ) - - -@pytest.fixture -def mock_israelrail() -> AsyncMock: - """Build a fixture for the Israel rail API.""" - with ( - patch( - "homeassistant.components.israel_rail.TrainSchedule", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.israel_rail.config_flow.TrainSchedule", - new=mock_client, - ), - ): - client = mock_client.return_value - client.query.return_value = TRAINS - - yield client - - -def get_time(hour: int, minute: int) -> str: - """Return a time in isoformat.""" - return datetime(2021, 10, 10, hour, minute, 10, tzinfo=ZoneInfo("UTC")).isoformat() - - -def get_train_route( - train_number: str = "1234", - departure_time: str = "2021-10-10T10:10:10", - arrival_time: str = "2021-10-10T10:10:10", - origin_platform: str = "1", - dest_platform: str = "2", - origin_station: str = "3500", - destination_station: str = "3700", -) -> TrainRoute: - """Build a TrainRoute of the israelrail API.""" - return TrainRoute( - [ - { - "orignStation": origin_station, - "destinationStation": destination_station, - "departureTime": departure_time, - "arrivalTime": arrival_time, - "originPlatform": origin_platform, - "destPlatform": dest_platform, - "trainNumber": train_number, - } - ] - ) - - -TRAINS = [ - get_train_route( - train_number="1234", - departure_time=get_time(10, 10), - arrival_time=get_time(10, 30), - origin_platform="1", - dest_platform="2", - origin_station="3500", - destination_station="3700", - ), - get_train_route( - train_number="1235", - departure_time=get_time(10, 20), - arrival_time=get_time(10, 40), - origin_platform="1", - dest_platform="2", - origin_station="3500", - destination_station="3700", - ), - get_train_route( - train_number="1236", - departure_time=get_time(10, 30), - arrival_time=get_time(10, 50), - origin_platform="1", - dest_platform="2", - origin_station="3500", - destination_station="3700", - ), - get_train_route( - train_number="1237", - departure_time=get_time(10, 40), - arrival_time=get_time(11, 00), - origin_platform="1", - dest_platform="2", - origin_station="3500", - destination_station="3700", - ), - get_train_route( - train_number="1238", - departure_time=get_time(10, 50), - arrival_time=get_time(11, 10), - origin_platform="1", - dest_platform="2", - origin_station="3500", - destination_station="3700", - ), -] diff --git a/tests/components/israel_rail/snapshots/test_sensor.ambr b/tests/components/israel_rail/snapshots/test_sensor.ambr deleted file mode 100644 index f851f1cd726..00000000000 --- a/tests/components/israel_rail/snapshots/test_sensor.ambr +++ /dev/null @@ -1,286 +0,0 @@ -# serializer version: 1 -# name: test_valid_config[sensor.mock_title_departure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_departure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Departure', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'departure0', - 'unique_id': 'באר יעקב אשקלון_departure', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_departure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Departure', - }), - 'context': , - 'entity_id': 'sensor.mock_title_departure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-10-10T10:10:10+00:00', - }) -# --- -# name: test_valid_config[sensor.mock_title_departure_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_departure_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Departure +1', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'departure1', - 'unique_id': 'באר יעקב אשקלון_departure1', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_departure_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Departure +1', - }), - 'context': , - 'entity_id': 'sensor.mock_title_departure_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-10-10T10:20:10+00:00', - }) -# --- -# name: test_valid_config[sensor.mock_title_departure_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_departure_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Departure +2', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'departure2', - 'unique_id': 'באר יעקב אשקלון_departure2', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_departure_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Departure +2', - }), - 'context': , - 'entity_id': 'sensor.mock_title_departure_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-10-10T10:30:10+00:00', - }) -# --- -# name: test_valid_config[sensor.mock_title_platform-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_platform', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Platform', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'platform', - 'unique_id': 'באר יעקב אשקלון_platform', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_platform-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'friendly_name': 'Mock Title Platform', - }), - 'context': , - 'entity_id': 'sensor.mock_title_platform', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_valid_config[sensor.mock_title_train_number-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_train_number', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Train number', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'train_number', - 'unique_id': 'באר יעקב אשקלון_train_number', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_train_number-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'friendly_name': 'Mock Title Train number', - }), - 'context': , - 'entity_id': 'sensor.mock_title_train_number', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1234', - }) -# --- -# name: test_valid_config[sensor.mock_title_trains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_trains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Trains', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'trains', - 'unique_id': 'באר יעקב אשקלון_trains', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_trains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'friendly_name': 'Mock Title Trains', - }), - 'context': , - 'entity_id': 'sensor.mock_title_trains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- diff --git a/tests/components/israel_rail/test_config_flow.py b/tests/components/israel_rail/test_config_flow.py deleted file mode 100644 index a27d9b3420b..00000000000 --- a/tests/components/israel_rail/test_config_flow.py +++ /dev/null @@ -1,87 +0,0 @@ -"""Define tests for the israel rail config flow.""" - -from unittest.mock import AsyncMock - -from homeassistant.components.israel_rail import CONF_DESTINATION, CONF_START, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .conftest import VALID_CONFIG - -from tests.common import MockConfigEntry - - -async def test_create_entry( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_israelrail: AsyncMock -) -> None: - """Test that the user step works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - VALID_CONFIG, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "באר יעקב אשקלון" - assert result["data"] == { - CONF_START: "באר יעקב", - CONF_DESTINATION: "אשקלון", - } - - -async def test_flow_fails( - hass: HomeAssistant, - mock_israelrail: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test that the user step fails.""" - mock_israelrail.query.side_effect = Exception("error") - failed_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data=VALID_CONFIG, - ) - - assert failed_result["errors"] == {"base": "unknown"} - assert failed_result["type"] is FlowResultType.FORM - - mock_israelrail.query.side_effect = None - - result = await hass.config_entries.flow.async_configure( - failed_result["flow_id"], - VALID_CONFIG, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "באר יעקב אשקלון" - assert result["data"] == { - CONF_START: "באר יעקב", - CONF_DESTINATION: "אשקלון", - } - - -async def test_flow_already_configured( - hass: HomeAssistant, - mock_israelrail: AsyncMock, - mock_config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, -) -> None: - """Test that the user step fails when the entry is already configured.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - result_aborted = await hass.config_entries.flow.async_configure( - result["flow_id"], - VALID_CONFIG, - ) - - assert result_aborted["type"] is FlowResultType.ABORT - assert result_aborted["reason"] == "already_configured" diff --git a/tests/components/israel_rail/test_init.py b/tests/components/israel_rail/test_init.py deleted file mode 100644 index c4dd4e5721e..00000000000 --- a/tests/components/israel_rail/test_init.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Test init of israel_rail integration.""" - -from unittest.mock import AsyncMock - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import init_integration - -from tests.common import MockConfigEntry - - -async def test_invalid_config( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_israelrail: AsyncMock, -) -> None: - """Ensure nothing is created when config is wrong.""" - mock_israelrail.query.side_effect = Exception("error") - await init_integration(hass, mock_config_entry) - assert not hass.states.async_entity_ids("sensor") - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/israel_rail/test_sensor.py b/tests/components/israel_rail/test_sensor.py deleted file mode 100644 index 85b7328742f..00000000000 --- a/tests/components/israel_rail/test_sensor.py +++ /dev/null @@ -1,68 +0,0 @@ -"""Tests for the israel_rail sensor.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion - -from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import goto_future, init_integration -from .conftest import TRAINS, get_time - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_valid_config( - hass: HomeAssistant, - mock_israelrail: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Ensure everything starts correctly.""" - await init_integration(hass, mock_config_entry) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_update_train( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_israelrail: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Ensure the train data is updated.""" - await init_integration(hass, mock_config_entry) - assert len(hass.states.async_entity_ids()) == 6 - departure_sensor = hass.states.get("sensor.mock_title_departure") - expected_time = get_time(10, 10) - assert departure_sensor.state == expected_time - - mock_israelrail.query.return_value = TRAINS[1:] - - await goto_future(hass, freezer) - - assert len(hass.states.async_entity_ids()) == 6 - departure_sensor = hass.states.get("sensor.mock_title_departure") - expected_time = get_time(10, 20) - assert departure_sensor.state == expected_time - - -async def test_fail_query( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_israelrail: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Ensure the integration handles query failures.""" - await init_integration(hass, mock_config_entry) - assert len(hass.states.async_entity_ids()) == 6 - mock_israelrail.query.side_effect = Exception("error") - await goto_future(hass, freezer) - assert len(hass.states.async_entity_ids()) == 6 - departure_sensor = hass.states.get("sensor.mock_title_departure") - assert departure_sensor.state == STATE_UNAVAILABLE diff --git a/tests/components/ista_ecotrend/conftest.py b/tests/components/ista_ecotrend/conftest.py index 7edf2e4717b..2218ef05ba7 100644 --- a/tests/components/ista_ecotrend/conftest.py +++ b/tests/components/ista_ecotrend/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the ista EcoTrend tests.""" -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.ista_ecotrend.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD @@ -166,52 +166,3 @@ def get_consumption_data(obj_uuid: str | None = None) -> dict[str, Any]: }, ], } - - -def extend_statistics(obj_uuid: str | None = None) -> dict[str, Any]: - """Extend statistics data with new values.""" - stats = get_consumption_data(obj_uuid) - - stats["costs"].insert( - 0, - { - "date": {"month": 6, "year": 2024}, - "costsByEnergyType": [ - { - "type": "heating", - "value": 9000, - }, - { - "type": "warmwater", - "value": 9000, - }, - { - "type": "water", - "value": 9000, - }, - ], - }, - ) - stats["consumptions"].insert( - 0, - { - "date": {"month": 6, "year": 2024}, - "readings": [ - { - "type": "heating", - "value": "9000", - "additionalValue": "9000,0", - }, - { - "type": "warmwater", - "value": "9999,0", - "additionalValue": "90000,0", - }, - { - "type": "water", - "value": "9000,0", - }, - ], - }, - ) - return stats diff --git a/tests/components/ista_ecotrend/snapshots/test_init.ambr b/tests/components/ista_ecotrend/snapshots/test_init.ambr index c84d55c059c..c5dec7d9d56 100644 --- a/tests/components/ista_ecotrend/snapshots/test_init.ambr +++ b/tests/components/ista_ecotrend/snapshots/test_init.ambr @@ -21,7 +21,6 @@ }), 'manufacturer': 'ista SE', 'model': 'ista EcoTrend', - 'model_id': None, 'name': 'Luxemburger Str. 1', 'name_by_user': None, 'primary_config_entry': , @@ -53,7 +52,6 @@ }), 'manufacturer': 'ista SE', 'model': 'ista EcoTrend', - 'model_id': None, 'name': 'Bahnhofsstr. 1A', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/ista_ecotrend/snapshots/test_sensor.ambr b/tests/components/ista_ecotrend/snapshots/test_sensor.ambr index b5056019c74..c312f9b6350 100644 --- a/tests/components/ista_ecotrend/snapshots/test_sensor.ambr +++ b/tests/components/ista_ecotrend/snapshots/test_sensor.ambr @@ -1,12 +1,70 @@ # serializer version: 1 +# name: test_setup.32 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'https://ecotrend.ista.de/', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'ista_ecotrend', + '26e93f1a-c828-11ea-87d0-0242ac130003', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'ista SE', + 'model': 'ista EcoTrend', + 'name': 'Luxemburger Str. 1', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_setup.33 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'https://ecotrend.ista.de/', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'ista_ecotrend', + 'eaf5c5c8-889f-4a3c-b68c-e9a676505762', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'ista SE', + 'model': 'ista EcoTrend', + 'name': 'Bahnhofsstr. 1A', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- # name: test_setup[sensor.bahnhofsstr_1a_heating-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -34,15 +92,13 @@ 'supported_features': 0, 'translation_key': , 'unique_id': 'eaf5c5c8-889f-4a3c-b68c-e9a676505762_heating', - 'unit_of_measurement': 'units', + 'unit_of_measurement': None, }) # --- # name: test_setup[sensor.bahnhofsstr_1a_heating-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Bahnhofsstr. 1A Heating', - 'state_class': , - 'unit_of_measurement': 'units', }), 'context': , 'entity_id': 'sensor.bahnhofsstr_1a_heating', @@ -435,9 +491,7 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -465,15 +519,13 @@ 'supported_features': 0, 'translation_key': , 'unique_id': '26e93f1a-c828-11ea-87d0-0242ac130003_heating', - 'unit_of_measurement': 'units', + 'unit_of_measurement': None, }) # --- # name: test_setup[sensor.luxemburger_str_1_heating-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Luxemburger Str. 1 Heating', - 'state_class': , - 'unit_of_measurement': 'units', }), 'context': , 'entity_id': 'sensor.luxemburger_str_1_heating', diff --git a/tests/components/ista_ecotrend/snapshots/test_statistics.ambr b/tests/components/ista_ecotrend/snapshots/test_statistics.ambr deleted file mode 100644 index 78ecd6a6b6b..00000000000 --- a/tests/components/ista_ecotrend/snapshots/test_statistics.ambr +++ /dev/null @@ -1,609 +0,0 @@ -# serializer version: 1 -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 104.0, - 'sum': 104.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 35.0, - 'sum': 139.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 104.0, - 'sum': 104.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 35.0, - 'sum': 139.0, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9139.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_cost_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 62.0, - 'sum': 62.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 21.0, - 'sum': 83.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_cost_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 62.0, - 'sum': 62.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 21.0, - 'sum': 83.0, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9083.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_energy_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 113.0, - 'sum': 113.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 38.0, - 'sum': 151.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_energy_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 113.0, - 'sum': 113.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 38.0, - 'sum': 151.0, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9151.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 1.1, - 'sum': 1.1, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 1.0, - 'sum': 2.1, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 1.1, - 'sum': 1.1, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 1.0, - 'sum': 2.1, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9999.0, - 'sum': 10001.1, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_cost_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 7.0, - 'sum': 7.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 7.0, - 'sum': 14.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_cost_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 7.0, - 'sum': 7.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 7.0, - 'sum': 14.0, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9014.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_energy_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 61.1, - 'sum': 61.1, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 57.0, - 'sum': 118.1, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_energy_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 61.1, - 'sum': 61.1, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 57.0, - 'sum': 118.1, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 90000.0, - 'sum': 90118.1, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 6.8, - 'sum': 6.8, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 5.0, - 'sum': 11.8, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 6.8, - 'sum': 6.8, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 5.0, - 'sum': 11.8, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9011.8, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_cost_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 2.0, - 'sum': 2.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 3.0, - 'sum': 5.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_cost_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 2.0, - 'sum': 2.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 3.0, - 'sum': 5.0, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9005.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 104.0, - 'sum': 104.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 35.0, - 'sum': 139.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 104.0, - 'sum': 104.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 35.0, - 'sum': 139.0, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9139.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_cost_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 62.0, - 'sum': 62.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 21.0, - 'sum': 83.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_cost_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 62.0, - 'sum': 62.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 21.0, - 'sum': 83.0, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9083.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_energy_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 113.0, - 'sum': 113.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 38.0, - 'sum': 151.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_energy_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 113.0, - 'sum': 113.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 38.0, - 'sum': 151.0, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9151.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 1.1, - 'sum': 1.1, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 1.0, - 'sum': 2.1, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 1.1, - 'sum': 1.1, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 1.0, - 'sum': 2.1, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9999.0, - 'sum': 10001.1, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_cost_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 7.0, - 'sum': 7.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 7.0, - 'sum': 14.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_cost_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 7.0, - 'sum': 7.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 7.0, - 'sum': 14.0, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9014.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_energy_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 61.1, - 'sum': 61.1, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 57.0, - 'sum': 118.1, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_energy_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 61.1, - 'sum': 61.1, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 57.0, - 'sum': 118.1, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 90000.0, - 'sum': 90118.1, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 6.8, - 'sum': 6.8, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 5.0, - 'sum': 11.8, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 6.8, - 'sum': 6.8, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 5.0, - 'sum': 11.8, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9011.8, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_cost_2months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 2.0, - 'sum': 2.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 3.0, - 'sum': 5.0, - }), - ]) -# --- -# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_cost_3months] - list([ - dict({ - 'end': 1714546800.0, - 'start': 1711954800.0, - 'state': 2.0, - 'sum': 2.0, - }), - dict({ - 'end': 1717225200.0, - 'start': 1714546800.0, - 'state': 3.0, - 'sum': 5.0, - }), - dict({ - 'end': 1719817200.0, - 'start': 1717225200.0, - 'state': 9000.0, - 'sum': 9005.0, - }), - ]) -# --- diff --git a/tests/components/ista_ecotrend/test_config_flow.py b/tests/components/ista_ecotrend/test_config_flow.py index d6c88c51c99..b702b0331e8 100644 --- a/tests/components/ista_ecotrend/test_config_flow.py +++ b/tests/components/ista_ecotrend/test_config_flow.py @@ -6,7 +6,7 @@ from pyecotrend_ista import LoginError, ServerError import pytest from homeassistant.components.ista_ecotrend.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -98,7 +98,15 @@ async def test_reauth( ista_config_entry.add_to_hass(hass) - result = await ista_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": ista_config_entry.entry_id, + "unique_id": ista_config_entry.unique_id, + }, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -140,7 +148,15 @@ async def test_reauth_error_and_recover( ista_config_entry.add_to_hass(hass) - result = await ista_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": ista_config_entry.entry_id, + "unique_id": ista_config_entry.unique_id, + }, + ) + assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/ista_ecotrend/test_statistics.py b/tests/components/ista_ecotrend/test_statistics.py deleted file mode 100644 index 21877f686df..00000000000 --- a/tests/components/ista_ecotrend/test_statistics.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Tests for the ista EcoTrend Statistics import.""" - -import datetime -from unittest.mock import MagicMock - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.recorder.statistics import statistics_during_period -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .conftest import extend_statistics - -from tests.common import MockConfigEntry, async_fire_time_changed -from tests.components.recorder.common import async_wait_recording_done - - -@pytest.mark.usefixtures("recorder_mock", "entity_registry_enabled_by_default") -async def test_statistics_import( - hass: HomeAssistant, - ista_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_ista: MagicMock, - snapshot: SnapshotAssertion, - freezer: FrozenDateTimeFactory, -) -> None: - """Test setup of ista EcoTrend sensor platform.""" - - ista_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(ista_config_entry.entry_id) - await hass.async_block_till_done() - - assert ista_config_entry.state is ConfigEntryState.LOADED - entities = er.async_entries_for_config_entry( - entity_registry, ista_config_entry.entry_id - ) - await async_wait_recording_done(hass) - - # Test that consumption statistics for 2 months have been added - for entity in entities: - statistic_id = f"ista_ecotrend:{entity.entity_id.removeprefix("sensor.")}" - stats = await hass.async_add_executor_job( - statistics_during_period, - hass, - datetime.datetime.fromtimestamp(0, tz=datetime.UTC), - None, - {statistic_id}, - "month", - None, - {"state", "sum"}, - ) - assert stats[statistic_id] == snapshot(name=f"{statistic_id}_2months") - assert len(stats[statistic_id]) == 2 - - # Add another monthly consumption and forward - # 1 day and test if the new values have been - # appended to the statistics - mock_ista.get_consumption_data = extend_statistics - - freezer.tick(datetime.timedelta(days=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - await async_wait_recording_done(hass) - freezer.tick(datetime.timedelta(days=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - await async_wait_recording_done(hass) - - for entity in entities: - statistic_id = f"ista_ecotrend:{entity.entity_id.removeprefix("sensor.")}" - stats = await hass.async_add_executor_job( - statistics_during_period, - hass, - datetime.datetime.fromtimestamp(0, tz=datetime.UTC), - None, - {statistic_id}, - "month", - None, - {"state", "sum"}, - ) - assert stats[statistic_id] == snapshot(name=f"{statistic_id}_3months") - - assert len(stats[statistic_id]) == 3 diff --git a/tests/components/isy994/test_config_flow.py b/tests/components/isy994/test_config_flow.py index 2bc1fff222f..411439e2e70 100644 --- a/tests/components/isy994/test_config_flow.py +++ b/tests/components/isy994/test_config_flow.py @@ -644,7 +644,10 @@ async def test_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "unique_id": MOCK_UUID}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -698,16 +701,3 @@ async def test_reauth(hass: HomeAssistant) -> None: assert mock_setup_entry.called assert result4["type"] is FlowResultType.ABORT assert result4["reason"] == "reauth_successful" - - -async def test_options_flow(hass: HomeAssistant) -> None: - """Test option flow.""" - entry = MockConfigEntry(domain=DOMAIN) - entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - # This should be improved at a later stage to increase test coverage - hass.config_entries.options.async_abort(result["flow_id"]) diff --git a/tests/components/izone/test_config_flow.py b/tests/components/izone/test_config_flow.py index 3c9707b34c6..6591e402ec2 100644 --- a/tests/components/izone/test_config_flow.py +++ b/tests/components/izone/test_config_flow.py @@ -1,7 +1,5 @@ """Tests for iZone.""" -from collections.abc import Callable -from typing import Any from unittest.mock import Mock, patch import pytest @@ -14,7 +12,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send @pytest.fixture -def mock_disco() -> Mock: +def mock_disco(): """Mock discovery service.""" disco = Mock() disco.pi_disco = Mock() @@ -22,15 +20,15 @@ def mock_disco() -> Mock: return disco -def _mock_start_discovery(hass: HomeAssistant, mock_disco: Mock) -> Callable[..., Mock]: - def do_disovered(*args: Any) -> Mock: +def _mock_start_discovery(hass, mock_disco): + def do_disovered(*args): async_dispatcher_send(hass, DISPATCH_CONTROLLER_DISCOVERED, True) return mock_disco return do_disovered -async def test_not_found(hass: HomeAssistant, mock_disco: Mock) -> None: +async def test_not_found(hass: HomeAssistant, mock_disco) -> None: """Test not finding iZone controller.""" with ( @@ -58,7 +56,7 @@ async def test_not_found(hass: HomeAssistant, mock_disco: Mock) -> None: stop_disco.assert_called_once() -async def test_found(hass: HomeAssistant, mock_disco: Mock) -> None: +async def test_found(hass: HomeAssistant, mock_disco) -> None: """Test not finding iZone controller.""" mock_disco.pi_disco.controllers["blah"] = object() diff --git a/tests/components/jellyfin/conftest.py b/tests/components/jellyfin/conftest.py index c3732714177..40d03212ceb 100644 --- a/tests/components/jellyfin/conftest.py +++ b/tests/components/jellyfin/conftest.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, create_autospec, patch from jellyfin_apiclient_python import JellyfinClient @@ -10,6 +9,7 @@ from jellyfin_apiclient_python.api import API from jellyfin_apiclient_python.configuration import Config from jellyfin_apiclient_python.connection_manager import ConnectionManager import pytest +from typing_extensions import Generator from homeassistant.components.jellyfin.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME diff --git a/tests/components/jellyfin/test_config_flow.py b/tests/components/jellyfin/test_config_flow.py index a8ffbcbf46c..c84a12d26a5 100644 --- a/tests/components/jellyfin/test_config_flow.py +++ b/tests/components/jellyfin/test_config_flow.py @@ -222,7 +222,14 @@ async def test_reauth( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + }, + data=USER_INPUT, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -265,7 +272,14 @@ async def test_reauth_cannot_connect( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + }, + data=USER_INPUT, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -325,7 +339,14 @@ async def test_reauth_invalid( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + }, + data=USER_INPUT, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -379,7 +400,14 @@ async def test_reauth_exception( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + }, + data=USER_INPUT, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/jellyfin/test_init.py b/tests/components/jellyfin/test_init.py index 1af59737296..51d7af2ae94 100644 --- a/tests/components/jellyfin/test_init.py +++ b/tests/components/jellyfin/test_init.py @@ -68,10 +68,12 @@ async def test_load_unload_config_entry( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() + assert mock_config_entry.entry_id in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.LOADED await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() + assert mock_config_entry.entry_id not in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/jellyfin/test_media_source.py b/tests/components/jellyfin/test_media_source.py index 2aca59a4d26..a57d51de1f1 100644 --- a/tests/components/jellyfin/test_media_source.py +++ b/tests/components/jellyfin/test_media_source.py @@ -6,7 +6,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.jellyfin.const import DOMAIN -from homeassistant.components.media_player import BrowseError +from homeassistant.components.media_player.errors import BrowseError from homeassistant.components.media_source import ( DOMAIN as MEDIA_SOURCE_DOMAIN, URI_SCHEME, diff --git a/tests/components/jellyfin/test_remote.py b/tests/components/jellyfin/test_remote.py deleted file mode 100644 index 38390eabdcc..00000000000 --- a/tests/components/jellyfin/test_remote.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Tests for the Jellyfin remote platform.""" - -from unittest.mock import MagicMock - -from homeassistant.components.remote import ( - ATTR_COMMAND, - ATTR_DELAY_SECS, - ATTR_HOLD_SECS, - ATTR_NUM_REPEATS, - DOMAIN as R_DOMAIN, - SERVICE_SEND_COMMAND, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er - -from tests.common import MockConfigEntry - - -async def test_remote( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - init_integration: MockConfigEntry, - mock_jellyfin: MagicMock, - mock_api: MagicMock, -) -> None: - """Test the Jellyfin remote.""" - state = hass.states.get("remote.jellyfin_device") - state2 = hass.states.get("remote.jellyfin_device_two") - state3 = hass.states.get("remote.jellyfin_device_three") - state4 = hass.states.get("remote.jellyfin_device_four") - - assert state - assert state2 - # Doesn't support remote control; remote not created - assert state3 is None - assert state4 - - assert state.state == STATE_ON - - -async def test_services( - hass: HomeAssistant, - init_integration: MockConfigEntry, - mock_jellyfin: MagicMock, - mock_api: MagicMock, -) -> None: - """Test Jellyfin remote services.""" - state = hass.states.get("remote.jellyfin_device") - assert state - - command = "Select" - await hass.services.async_call( - R_DOMAIN, - SERVICE_SEND_COMMAND, - { - ATTR_ENTITY_ID: state.entity_id, - ATTR_COMMAND: command, - ATTR_NUM_REPEATS: 1, - ATTR_DELAY_SECS: 0, - ATTR_HOLD_SECS: 0, - }, - blocking=True, - ) - assert len(mock_api.command.mock_calls) == 1 - assert mock_api.command.mock_calls[0].args == ( - "SESSION-UUID", - command, - ) - - command = "MoveLeft" - await hass.services.async_call( - R_DOMAIN, - SERVICE_SEND_COMMAND, - { - ATTR_ENTITY_ID: state.entity_id, - ATTR_COMMAND: command, - ATTR_NUM_REPEATS: 2, - ATTR_DELAY_SECS: 0, - ATTR_HOLD_SECS: 0, - }, - blocking=True, - ) - assert len(mock_api.command.mock_calls) == 3 - assert mock_api.command.mock_calls[1].args == ( - "SESSION-UUID", - command, - ) - assert mock_api.command.mock_calls[2].args == ( - "SESSION-UUID", - command, - ) diff --git a/tests/components/jellyfin/test_sensor.py b/tests/components/jellyfin/test_sensor.py index 82d42d7a27a..40a3e62a6c0 100644 --- a/tests/components/jellyfin/test_sensor.py +++ b/tests/components/jellyfin/test_sensor.py @@ -4,7 +4,12 @@ from unittest.mock import MagicMock from homeassistant.components.jellyfin.const import DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS -from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, ATTR_ICON +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + ATTR_FRIENDLY_NAME, + ATTR_ICON, + ATTR_UNIT_OF_MEASUREMENT, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -19,12 +24,13 @@ async def test_watching( mock_jellyfin: MagicMock, ) -> None: """Test the Jellyfin watching sensor.""" - state = hass.states.get("sensor.jellyfin_server_active_clients") + state = hass.states.get("sensor.jellyfin_server") assert state assert state.attributes.get(ATTR_DEVICE_CLASS) is None - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "JELLYFIN-SERVER Active clients" + assert state.attributes.get(ATTR_FRIENDLY_NAME) == "JELLYFIN-SERVER" assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_STATE_CLASS) is None + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Watching" assert state.state == "3" entry = entity_registry.async_get(state.entity_id) diff --git a/tests/components/jewish_calendar/__init__.py b/tests/components/jewish_calendar/__init__.py index 440bffc2256..60726fc3a3e 100644 --- a/tests/components/jewish_calendar/__init__.py +++ b/tests/components/jewish_calendar/__init__.py @@ -8,7 +8,7 @@ from freezegun import freeze_time as alter_time # noqa: F401 from homeassistant.components import jewish_calendar import homeassistant.util.dt as dt_util -_LatLng = namedtuple("_LatLng", ["lat", "lng"]) # noqa: PYI024 +_LatLng = namedtuple("_LatLng", ["lat", "lng"]) HDATE_DEFAULT_ALTITUDE = 754 NYC_LATLNG = _LatLng(40.7128, -74.0060) diff --git a/tests/components/jewish_calendar/conftest.py b/tests/components/jewish_calendar/conftest.py index 97909291f27..5e16289f473 100644 --- a/tests/components/jewish_calendar/conftest.py +++ b/tests/components/jewish_calendar/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the jewish_calendar tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.jewish_calendar.const import DEFAULT_NAME, DOMAIN diff --git a/tests/components/jewish_calendar/test_config_flow.py b/tests/components/jewish_calendar/test_config_flow.py index e00fe41749f..3189571a5a7 100644 --- a/tests/components/jewish_calendar/test_config_flow.py +++ b/tests/components/jewish_calendar/test_config_flow.py @@ -2,12 +2,13 @@ from unittest.mock import AsyncMock +import pytest + from homeassistant import config_entries, setup from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, CONF_HAVDALAH_OFFSET_MINUTES, - DEFAULT_CANDLE_LIGHT, DEFAULT_DIASPORA, DEFAULT_LANGUAGE, DOMAIN, @@ -18,10 +19,12 @@ from homeassistant.const import ( CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE, + CONF_NAME, CONF_TIME_ZONE, ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -55,6 +58,49 @@ async def test_step_user(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No assert entries[0].data[CONF_TIME_ZONE] == hass.config.time_zone +@pytest.mark.parametrize("diaspora", [True, False]) +@pytest.mark.parametrize("language", ["hebrew", "english"]) +async def test_import_no_options(hass: HomeAssistant, language, diaspora) -> None: + """Test that the import step works.""" + conf = { + DOMAIN: {CONF_NAME: "test", CONF_LANGUAGE: language, CONF_DIASPORA: diaspora} + } + + assert await async_setup_component(hass, DOMAIN, conf.copy()) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + for entry_key, entry_val in entries[0].data.items(): + assert entry_val == conf[DOMAIN][entry_key] + + +async def test_import_with_options(hass: HomeAssistant) -> None: + """Test that the import step works.""" + conf = { + DOMAIN: { + CONF_NAME: "test", + CONF_DIASPORA: DEFAULT_DIASPORA, + CONF_LANGUAGE: DEFAULT_LANGUAGE, + CONF_CANDLE_LIGHT_MINUTES: 20, + CONF_HAVDALAH_OFFSET_MINUTES: 50, + CONF_LATITUDE: 31.76, + CONF_LONGITUDE: 35.235, + } + } + + # Simulate HomeAssistant setting up the component + assert await async_setup_component(hass, DOMAIN, conf.copy()) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + for entry_key, entry_val in entries[0].data.items(): + assert entry_val == conf[DOMAIN][entry_key] + for entry_key, entry_val in entries[0].options.items(): + assert entry_val == conf[DOMAIN][entry_key] + + async def test_single_instance_allowed( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -92,53 +138,3 @@ async def test_options(hass: HomeAssistant, mock_config_entry: MockConfigEntry) assert len(entries) == 1 assert entries[0].options[CONF_CANDLE_LIGHT_MINUTES] == 25 assert entries[0].options[CONF_HAVDALAH_OFFSET_MINUTES] == 34 - - -async def test_options_reconfigure( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test that updating the options of the Jewish Calendar integration triggers a value update.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert CONF_CANDLE_LIGHT_MINUTES not in mock_config_entry.options - - # Update the CONF_CANDLE_LIGHT_MINUTES option to a new value - result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - CONF_CANDLE_LIGHT_MINUTES: DEFAULT_CANDLE_LIGHT + 1, - }, - ) - assert result["result"] - - # The value of the "upcoming_shabbat_candle_lighting" sensor should be the new value - assert ( - mock_config_entry.options[CONF_CANDLE_LIGHT_MINUTES] == DEFAULT_CANDLE_LIGHT + 1 - ) - - -async def test_reconfigure( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test starting a reconfigure flow.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - # init user flow - result = await mock_config_entry.start_reconfigure_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - # success - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_DIASPORA: not DEFAULT_DIASPORA, - }, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert mock_config_entry.data[CONF_DIASPORA] is not DEFAULT_DIASPORA diff --git a/tests/components/jewish_calendar/test_init.py b/tests/components/jewish_calendar/test_init.py index cb982afec0f..b8454b41a60 100644 --- a/tests/components/jewish_calendar/test_init.py +++ b/tests/components/jewish_calendar/test_init.py @@ -1 +1,76 @@ """Tests for the Jewish Calendar component's init.""" + +from hdate import Location + +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSORS +from homeassistant.components.jewish_calendar import get_unique_prefix +from homeassistant.components.jewish_calendar.const import ( + CONF_CANDLE_LIGHT_MINUTES, + CONF_DIASPORA, + CONF_HAVDALAH_OFFSET_MINUTES, + DEFAULT_DIASPORA, + DEFAULT_LANGUAGE, + DOMAIN, +) +from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er +from homeassistant.setup import async_setup_component + + +async def test_import_unique_id_migration(hass: HomeAssistant) -> None: + """Test unique_id migration.""" + yaml_conf = { + DOMAIN: { + CONF_NAME: "test", + CONF_DIASPORA: DEFAULT_DIASPORA, + CONF_LANGUAGE: DEFAULT_LANGUAGE, + CONF_CANDLE_LIGHT_MINUTES: 20, + CONF_HAVDALAH_OFFSET_MINUTES: 50, + CONF_LATITUDE: 31.76, + CONF_LONGITUDE: 35.235, + } + } + + # Create an entry in the entity registry with the data from conf + ent_reg = er.async_get(hass) + location = Location( + latitude=yaml_conf[DOMAIN][CONF_LATITUDE], + longitude=yaml_conf[DOMAIN][CONF_LONGITUDE], + timezone=hass.config.time_zone, + diaspora=DEFAULT_DIASPORA, + ) + old_prefix = get_unique_prefix(location, DEFAULT_LANGUAGE, 20, 50) + sample_entity = ent_reg.async_get_or_create( + BINARY_SENSORS, + DOMAIN, + unique_id=f"{old_prefix}_erev_shabbat_hag", + suggested_object_id=f"{DOMAIN}_erev_shabbat_hag", + ) + # Save the existing unique_id, DEFAULT_LANGUAGE should be part of it + old_unique_id = sample_entity.unique_id + assert DEFAULT_LANGUAGE in old_unique_id + + # Simulate HomeAssistant setting up the component + assert await async_setup_component(hass, DOMAIN, yaml_conf.copy()) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + for entry_key, entry_val in entries[0].data.items(): + assert entry_val == yaml_conf[DOMAIN][entry_key] + for entry_key, entry_val in entries[0].options.items(): + assert entry_val == yaml_conf[DOMAIN][entry_key] + + # Assert that the unique_id was updated + new_unique_id = ent_reg.async_get(sample_entity.entity_id).unique_id + assert new_unique_id != old_unique_id + assert DEFAULT_LANGUAGE not in new_unique_id + + # Confirm that when the component is reloaded, the unique_id is not changed + assert ent_reg.async_get(sample_entity.entity_id).unique_id == new_unique_id + + # Confirm that all the unique_ids are prefixed correctly + await hass.config_entries.async_reload(entries[0].entry_id) + er_entries = er.async_entries_for_config_entry(ent_reg, entries[0].entry_id) + assert all(entry.unique_id.startswith(entries[0].entry_id) for entry in er_entries) diff --git a/tests/components/justnimbus/test_config_flow.py b/tests/components/justnimbus/test_config_flow.py index 330b05bf48c..f66693a752c 100644 --- a/tests/components/justnimbus/test_config_flow.py +++ b/tests/components/justnimbus/test_config_flow.py @@ -125,7 +125,14 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config.entry_id, + }, + data=FIXTURE_OLD_USER_INPUT, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/jvc_projector/conftest.py b/tests/components/jvc_projector/conftest.py index 3115cbfe252..dd012d3f355 100644 --- a/tests/components/jvc_projector/conftest.py +++ b/tests/components/jvc_projector/conftest.py @@ -1,9 +1,9 @@ """Fixtures for JVC Projector integration.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.jvc_projector.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT diff --git a/tests/components/jvc_projector/test_config_flow.py b/tests/components/jvc_projector/test_config_flow.py index d7eb0995bbd..282411540a4 100644 --- a/tests/components/jvc_projector/test_config_flow.py +++ b/tests/components/jvc_projector/test_config_flow.py @@ -6,7 +6,7 @@ from jvcprojector import JvcProjectorAuthError, JvcProjectorConnectError import pytest from homeassistant.components.jvc_projector.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -163,7 +163,14 @@ async def test_reauth_config_flow_success( hass: HomeAssistant, mock_device: AsyncMock, mock_integration: MockConfigEntry ) -> None: """Test reauth config flow success.""" - result = await mock_integration.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": mock_integration.entry_id, + }, + data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -187,7 +194,14 @@ async def test_reauth_config_flow_auth_error( """Test reauth config flow when connect fails.""" mock_device.connect.side_effect = JvcProjectorAuthError - result = await mock_integration.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": mock_integration.entry_id, + }, + data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -204,7 +218,14 @@ async def test_reauth_config_flow_auth_error( mock_device.connect.side_effect = None - result = await mock_integration.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": mock_integration.entry_id, + }, + data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -228,7 +249,14 @@ async def test_reauth_config_flow_connect_error( """Test reauth config flow when connect fails.""" mock_device.connect.side_effect = JvcProjectorConnectError - result = await mock_integration.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": mock_integration.entry_id, + }, + data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -245,7 +273,14 @@ async def test_reauth_config_flow_connect_error( mock_device.connect.side_effect = None - result = await mock_integration.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": mock_integration.entry_id, + }, + data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/jvc_projector/test_coordinator.py b/tests/components/jvc_projector/test_coordinator.py index b9211250aff..24297348653 100644 --- a/tests/components/jvc_projector/test_coordinator.py +++ b/tests/components/jvc_projector/test_coordinator.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock from jvcprojector import JvcProjectorAuthError, JvcProjectorConnectError +from homeassistant.components.jvc_projector import DOMAIN from homeassistant.components.jvc_projector.coordinator import ( INTERVAL_FAST, INTERVAL_SLOW, @@ -28,7 +29,7 @@ async def test_coordinator_update( ) await hass.async_block_till_done() assert mock_device.get_state.call_count == 3 - coordinator = mock_integration.runtime_data + coordinator = hass.data[DOMAIN][mock_integration.entry_id] assert coordinator.update_interval == INTERVAL_SLOW @@ -68,5 +69,5 @@ async def test_coordinator_device_on( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - coordinator = mock_config_entry.runtime_data + coordinator = hass.data[DOMAIN][mock_config_entry.entry_id] assert coordinator.update_interval == INTERVAL_FAST diff --git a/tests/components/jvc_projector/test_init.py b/tests/components/jvc_projector/test_init.py index baf088a5dba..ef9de41ca32 100644 --- a/tests/components/jvc_projector/test_init.py +++ b/tests/components/jvc_projector/test_init.py @@ -38,6 +38,8 @@ async def test_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() + assert mock_config_entry.entry_id not in hass.data[DOMAIN] + async def test_config_entry_connect_error( hass: HomeAssistant, diff --git a/tests/components/kaleidescape/conftest.py b/tests/components/kaleidescape/conftest.py index e5aeedc3895..5cd2a8ebb18 100644 --- a/tests/components/kaleidescape/conftest.py +++ b/tests/components/kaleidescape/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Kaleidescape integration.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch from kaleidescape import Dispatcher from kaleidescape.device import Automation, Movie, Power, System import pytest +from typing_extensions import Generator from homeassistant.components.kaleidescape.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/keymitt_ble/__init__.py b/tests/components/keymitt_ble/__init__.py index 6fa608ad3b4..1e717b805c5 100644 --- a/tests/components/keymitt_ble/__init__.py +++ b/tests/components/keymitt_ble/__init__.py @@ -53,7 +53,7 @@ SERVICE_INFO = BluetoothServiceInfoBleak( class MockMicroBotApiClient: """Mock MicroBotApiClient.""" - def __init__(self, device, token) -> None: + def __init__(self, device, token): """Mock init.""" async def connect(self, init): @@ -70,7 +70,7 @@ class MockMicroBotApiClient: class MockMicroBotApiClientFail: """Mock MicroBotApiClient.""" - def __init__(self, device, token) -> None: + def __init__(self, device, token): """Mock init.""" async def connect(self, init): diff --git a/tests/components/kira/test_init.py b/tests/components/kira/test_init.py index 8e6c70c83a4..e57519667ce 100644 --- a/tests/components/kira/test_init.py +++ b/tests/components/kira/test_init.py @@ -1,7 +1,6 @@ """The tests for Kira.""" import os -from pathlib import Path import shutil import tempfile from unittest.mock import patch @@ -77,9 +76,10 @@ async def test_kira_creates_codes(work_dir) -> None: assert os.path.exists(code_path), "Kira component didn't create codes file" -async def test_load_codes(hass: HomeAssistant, work_dir) -> None: +async def test_load_codes(work_dir) -> None: """Kira should ignore invalid codes.""" code_path = os.path.join(work_dir, "codes.yaml") - await hass.async_add_executor_job(Path(code_path).write_text, KIRA_CODES) + with open(code_path, "w", encoding="utf8") as code_file: + code_file.write(KIRA_CODES) res = kira.load_codes(code_path) assert len(res) == 1, "Expected exactly 1 valid Kira code" diff --git a/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr b/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr index e3e413c5a44..4189de18ce4 100644 --- a/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr +++ b/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr @@ -49,18 +49,6 @@ 'last_updated': , 'state': 'docked', }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mower can return', - 'supported_features': , - }), - 'context': , - 'entity_id': 'lawn_mower.mower_can_return', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'returning', - }), StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mower is paused', diff --git a/tests/components/kitchen_sink/snapshots/test_switch.ambr b/tests/components/kitchen_sink/snapshots/test_switch.ambr index fe4311ad711..277b4888e05 100644 --- a/tests/components/kitchen_sink/snapshots/test_switch.ambr +++ b/tests/components/kitchen_sink/snapshots/test_switch.ambr @@ -67,7 +67,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'Outlet 1', 'name_by_user': None, 'primary_config_entry': , @@ -99,7 +98,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'Power strip with 2 sockets', 'name_by_user': None, 'primary_config_entry': , @@ -177,7 +175,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'Outlet 2', 'name_by_user': None, 'primary_config_entry': , @@ -209,7 +206,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'Power strip with 2 sockets', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/kitchen_sink/test_config_flow.py b/tests/components/kitchen_sink/test_config_flow.py index 5f163d1342e..290167196cd 100644 --- a/tests/components/kitchen_sink/test_config_flow.py +++ b/tests/components/kitchen_sink/test_config_flow.py @@ -1,6 +1,6 @@ """Test the Everything but the Kitchen Sink config flow.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator from unittest.mock import patch import pytest @@ -15,7 +15,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def no_platforms() -> Generator[None]: +async def no_platforms() -> AsyncGenerator[None, None]: """Don't enable any platforms.""" with patch( "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", diff --git a/tests/components/kitchen_sink/test_init.py b/tests/components/kitchen_sink/test_init.py index b832577a48a..1547a10bd2b 100644 --- a/tests/components/kitchen_sink/test_init.py +++ b/tests/components/kitchen_sink/test_init.py @@ -5,10 +5,9 @@ from http import HTTPStatus from unittest.mock import ANY import pytest -import voluptuous as vol from homeassistant.components.kitchen_sink import DOMAIN -from homeassistant.components.recorder import get_instance +from homeassistant.components.recorder import Recorder, get_instance from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -25,13 +24,14 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator @pytest.fixture -def mock_history(hass: HomeAssistant) -> None: +def mock_history(hass): """Mock history component loaded.""" hass.config.components.add("history") -@pytest.mark.usefixtures("recorder_mock", "mock_history") -async def test_demo_statistics(hass: HomeAssistant) -> None: +async def test_demo_statistics( + recorder_mock: Recorder, mock_history, hass: HomeAssistant +) -> None: """Test that the kitchen sink component makes some statistics available.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) await hass.async_block_till_done() @@ -63,8 +63,9 @@ async def test_demo_statistics(hass: HomeAssistant) -> None: } in statistic_ids -@pytest.mark.usefixtures("recorder_mock", "mock_history") -async def test_demo_statistics_growth(hass: HomeAssistant) -> None: +async def test_demo_statistics_growth( + recorder_mock: Recorder, mock_history, hass: HomeAssistant +) -> None: """Test that the kitchen sink sum statistics adds to the previous state.""" hass.config.units = US_CUSTOMARY_SYSTEM @@ -103,8 +104,8 @@ async def test_demo_statistics_growth(hass: HomeAssistant) -> None: @pytest.mark.freeze_time("2023-10-21") -@pytest.mark.usefixtures("mock_history") async def test_issues_created( + mock_history, hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, @@ -325,24 +326,3 @@ async def test_issues_created( }, ] } - - -async def test_service( - hass: HomeAssistant, -) -> None: - """Test we can call the service.""" - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) - - with pytest.raises(vol.error.MultipleInvalid): - await hass.services.async_call(DOMAIN, "test_service_1", blocking=True) - - await hass.services.async_call( - DOMAIN, "test_service_1", {"field_1": 1, "field_2": "auto"}, blocking=True - ) - - await hass.services.async_call( - DOMAIN, - "test_service_1", - {"field_1": 1, "field_2": "auto", "field_3": 1, "field_4": "forwards"}, - blocking=True, - ) diff --git a/tests/components/kitchen_sink/test_lawn_mower.py b/tests/components/kitchen_sink/test_lawn_mower.py index 5bd4fc834f8..48914ab5a46 100644 --- a/tests/components/kitchen_sink/test_lawn_mower.py +++ b/tests/components/kitchen_sink/test_lawn_mower.py @@ -72,12 +72,6 @@ async def test_states(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: LawnMowerActivity.MOWING, LawnMowerActivity.DOCKED, ), - ( - "lawn_mower.mower_can_return", - SERVICE_DOCK, - LawnMowerActivity.RETURNING, - LawnMowerActivity.DOCKED, - ), ], ) async def test_mower( @@ -100,7 +94,7 @@ async def test_mower( await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == entity - assert state_changes[0].data["new_state"].state == next_activity.value + assert state_changes[0].data["new_state"].state == str(next_activity.value) @pytest.mark.parametrize( diff --git a/tests/components/kitchen_sink/test_lock.py b/tests/components/kitchen_sink/test_lock.py index a626cccd45c..e86300a4d35 100644 --- a/tests/components/kitchen_sink/test_lock.py +++ b/tests/components/kitchen_sink/test_lock.py @@ -11,9 +11,17 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - LockState, + STATE_LOCKED, + STATE_LOCKING, + STATE_UNLOCKED, + STATE_UNLOCKING, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + EVENT_STATE_CHANGED, + STATE_OPEN, + Platform, ) -from homeassistant.const import ATTR_ENTITY_ID, EVENT_STATE_CHANGED, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -50,7 +58,7 @@ async def test_states(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: async def test_locking(hass: HomeAssistant) -> None: """Test the locking of a lock.""" state = hass.states.get(UNLOCKED_LOCK) - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -60,16 +68,16 @@ async def test_locking(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == UNLOCKED_LOCK - assert state_changes[0].data["new_state"].state == LockState.LOCKING + assert state_changes[0].data["new_state"].state == STATE_LOCKING assert state_changes[1].data["entity_id"] == UNLOCKED_LOCK - assert state_changes[1].data["new_state"].state == LockState.LOCKED + assert state_changes[1].data["new_state"].state == STATE_LOCKED async def test_unlocking(hass: HomeAssistant) -> None: """Test the unlocking of a lock.""" state = hass.states.get(LOCKED_LOCK) - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED await hass.async_block_till_done() state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) @@ -79,10 +87,10 @@ async def test_unlocking(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert state_changes[0].data["entity_id"] == LOCKED_LOCK - assert state_changes[0].data["new_state"].state == LockState.UNLOCKING + assert state_changes[0].data["new_state"].state == STATE_UNLOCKING assert state_changes[1].data["entity_id"] == LOCKED_LOCK - assert state_changes[1].data["new_state"].state == LockState.UNLOCKED + assert state_changes[1].data["new_state"].state == STATE_UNLOCKED async def test_opening_mocked(hass: HomeAssistant) -> None: @@ -100,4 +108,4 @@ async def test_opening(hass: HomeAssistant) -> None: LOCK_DOMAIN, SERVICE_OPEN, {ATTR_ENTITY_ID: OPENABLE_LOCK}, blocking=True ) state = hass.states.get(OPENABLE_LOCK) - assert state.state == LockState.OPEN + assert state.state == STATE_OPEN diff --git a/tests/components/kitchen_sink/test_notify.py b/tests/components/kitchen_sink/test_notify.py index 12e19ffaa49..df025087b6b 100644 --- a/tests/components/kitchen_sink/test_notify.py +++ b/tests/components/kitchen_sink/test_notify.py @@ -1,10 +1,10 @@ """The tests for the demo button component.""" -from collections.abc import AsyncGenerator from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest +from typing_extensions import AsyncGenerator from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.components.notify import ( diff --git a/tests/components/kitchen_sink/test_switch.py b/tests/components/kitchen_sink/test_switch.py index d006908e264..c744ba2be44 100644 --- a/tests/components/kitchen_sink/test_switch.py +++ b/tests/components/kitchen_sink/test_switch.py @@ -1,6 +1,5 @@ """The tests for the demo switch component.""" -from collections.abc import Generator from unittest.mock import patch import pytest @@ -21,7 +20,7 @@ SWITCH_ENTITY_IDS = ["switch.outlet_1", "switch.outlet_2"] @pytest.fixture -def switch_only() -> Generator[None]: +async def switch_only() -> None: """Enable only the switch platform.""" with patch( "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", @@ -31,7 +30,7 @@ def switch_only() -> Generator[None]: @pytest.fixture(autouse=True) -async def setup_comp(hass: HomeAssistant, switch_only: None) -> None: +async def setup_comp(hass, switch_only): """Set up demo component.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) await hass.async_block_till_done() @@ -56,7 +55,7 @@ async def test_state( @pytest.mark.parametrize("switch_entity_id", SWITCH_ENTITY_IDS) -async def test_turn_on(hass: HomeAssistant, switch_entity_id: str) -> None: +async def test_turn_on(hass: HomeAssistant, switch_entity_id) -> None: """Test switch turn on method.""" await hass.services.async_call( SWITCH_DOMAIN, @@ -80,7 +79,7 @@ async def test_turn_on(hass: HomeAssistant, switch_entity_id: str) -> None: @pytest.mark.parametrize("switch_entity_id", SWITCH_ENTITY_IDS) -async def test_turn_off(hass: HomeAssistant, switch_entity_id: str) -> None: +async def test_turn_off(hass: HomeAssistant, switch_entity_id) -> None: """Test switch turn off method.""" await hass.services.async_call( SWITCH_DOMAIN, diff --git a/tests/components/kmtronic/conftest.py b/tests/components/kmtronic/conftest.py index 11abd2a4d7b..5dc349508e3 100644 --- a/tests/components/kmtronic/conftest.py +++ b/tests/components/kmtronic/conftest.py @@ -1,9 +1,9 @@ """Define fixtures for kmtronic tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/knocki/conftest.py b/tests/components/knocki/conftest.py index 2fae89c730d..e1bc2e29cde 100644 --- a/tests/components/knocki/conftest.py +++ b/tests/components/knocki/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Knocki tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch from knocki import TokenResponse, Trigger import pytest +from typing_extensions import Generator from homeassistant.components.knocki.const import DOMAIN from homeassistant.const import CONF_TOKEN diff --git a/tests/components/knocki/test_config_flow.py b/tests/components/knocki/test_config_flow.py index 188175035da..baf43c3ad30 100644 --- a/tests/components/knocki/test_config_flow.py +++ b/tests/components/knocki/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from knocki import KnockiConnectionError, KnockiInvalidAuthError +from knocki import KnockiConnectionError import pytest from homeassistant.components.knocki.const import DOMAIN @@ -72,11 +72,7 @@ async def test_duplcate_entry( @pytest.mark.parametrize(("field"), ["login", "link"]) @pytest.mark.parametrize( ("exception", "error"), - [ - (KnockiConnectionError, "cannot_connect"), - (KnockiInvalidAuthError, "invalid_auth"), - (Exception, "unknown"), - ], + [(KnockiConnectionError, "cannot_connect"), (Exception, "unknown")], ) async def test_exceptions( hass: HomeAssistant, diff --git a/tests/components/knx/README.md b/tests/components/knx/README.md index ef8398b3d17..930b9e71c28 100644 --- a/tests/components/knx/README.md +++ b/tests/components/knx/README.md @@ -18,22 +18,21 @@ async def test_something(hass, knx): ## Asserting outgoing telegrams -All outgoing telegrams are appended to an assertion list. Assert them in order they were sent or pass `ignore_order=True` to the assertion method. +All outgoing telegrams are pushed to an assertion queue. Assert them in order they were sent. - `knx.assert_no_telegram` - Asserts that no telegram was sent (assertion list is empty). + Asserts that no telegram was sent (assertion queue is empty). - `knx.assert_telegram_count(count: int)` Asserts that `count` telegrams were sent. -- `knx.assert_read(group_address: str, response: int | tuple[int, ...] | None = None, ignore_order: bool = False)` +- `knx.assert_read(group_address: str)` Asserts that a GroupValueRead telegram was sent to `group_address`. - The telegram will be removed from the assertion list. - Optionally inject incoming GroupValueResponse telegram after reception to clear the value reader waiting task. This can also be done manually with `knx.receive_response`. -- `knx.assert_response(group_address: str, payload: int | tuple[int, ...], ignore_order: bool = False)` + The telegram will be removed from the assertion queue. +- `knx.assert_response(group_address: str, payload: int | tuple[int, ...])` Asserts that a GroupValueResponse telegram with `payload` was sent to `group_address`. - The telegram will be removed from the assertion list. -- `knx.assert_write(group_address: str, payload: int | tuple[int, ...], ignore_order: bool = False)` + The telegram will be removed from the assertion queue. +- `knx.assert_write(group_address: str, payload: int | tuple[int, ...])` Asserts that a GroupValueWrite telegram with `payload` was sent to `group_address`. - The telegram will be removed from the assertion list. + The telegram will be removed from the assertion queue. Change some states or call some services and assert outgoing telegrams. diff --git a/tests/components/knx/__init__.py b/tests/components/knx/__init__.py index fc19741d190..eaa84714dc5 100644 --- a/tests/components/knx/__init__.py +++ b/tests/components/knx/__init__.py @@ -1,8 +1 @@ """Tests for the KNX integration.""" - -from collections.abc import Callable, Coroutine -from typing import Any - -from homeassistant.helpers import entity_registry as er - -type KnxEntityGenerator = Callable[..., Coroutine[Any, Any, er.RegistryEntry]] diff --git a/tests/components/knx/conftest.py b/tests/components/knx/conftest.py index c0ec1dd9b9a..cd7146b565b 100644 --- a/tests/components/knx/conftest.py +++ b/tests/components/knx/conftest.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +import json from typing import Any from unittest.mock import DEFAULT, AsyncMock, Mock, patch @@ -29,22 +30,13 @@ from homeassistant.components.knx.const import ( DOMAIN as KNX_DOMAIN, ) from homeassistant.components.knx.project import STORAGE_KEY as KNX_PROJECT_STORAGE_KEY -from homeassistant.components.knx.storage.config_store import ( - STORAGE_KEY as KNX_CONFIG_STORAGE_KEY, -) -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component -from . import KnxEntityGenerator +from tests.common import MockConfigEntry, load_fixture -from tests.common import MockConfigEntry, load_json_object_fixture -from tests.typing import WebSocketGenerator - -FIXTURE_PROJECT_DATA = load_json_object_fixture("project.json", KNX_DOMAIN) -FIXTURE_CONFIG_STORAGE_DATA = load_json_object_fixture("config_store.json", KNX_DOMAIN) +FIXTURE_PROJECT_DATA = json.loads(load_fixture("project.json", KNX_DOMAIN)) class KNXTestKit: @@ -57,9 +49,9 @@ class KNXTestKit: self.hass: HomeAssistant = hass self.mock_config_entry: MockConfigEntry = mock_config_entry self.xknx: XKNX - # outgoing telegrams will be put in the List instead of sent to the interface + # outgoing telegrams will be put in the Queue instead of sent to the interface # telegrams to an InternalGroupAddress won't be queued here - self._outgoing_telegrams: list[Telegram] = [] + self._outgoing_telegrams: asyncio.Queue = asyncio.Queue() def assert_state(self, entity_id: str, state: str, **attributes) -> None: """Assert the state of an entity.""" @@ -76,14 +68,14 @@ class KNXTestKit: async def patch_xknx_start(): """Patch `xknx.start` for unittests.""" self.xknx.cemi_handler.send_telegram = AsyncMock( - side_effect=self._outgoing_telegrams.append + side_effect=self._outgoing_telegrams.put ) # after XKNX.__init__() to not overwrite it by the config entry again # before StateUpdater starts to avoid slow down of tests self.xknx.rate_limit = 0 # set XknxConnectionState.CONNECTED to avoid `unavailable` entities at startup # and start StateUpdater. This would be awaited on normal startup too. - self.xknx.connection_manager.connection_state_changed( + await self.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.CONNECTED, connection_type=XknxConnectionType.TUNNEL_TCP, ) @@ -93,7 +85,6 @@ class KNXTestKit: mock = Mock() mock.start = AsyncMock(side_effect=patch_xknx_start) mock.stop = AsyncMock() - mock.gateway_info = AsyncMock() return mock def fish_xknx(*args, **kwargs): @@ -117,22 +108,24 @@ class KNXTestKit: ######################## def _list_remaining_telegrams(self) -> str: - """Return a string containing remaining outgoing telegrams in test List.""" - return "\n".join(map(str, self._outgoing_telegrams)) + """Return a string containing remaining outgoing telegrams in test Queue. One per line.""" + remaining_telegrams = [] + while not self._outgoing_telegrams.empty(): + remaining_telegrams.append(self._outgoing_telegrams.get_nowait()) + return "\n".join(map(str, remaining_telegrams)) async def assert_no_telegram(self) -> None: - """Assert if every telegram in test List was checked.""" + """Assert if every telegram in test Queue was checked.""" await self.hass.async_block_till_done() - remaining_telegram_count = len(self._outgoing_telegrams) - assert not remaining_telegram_count, ( - f"Found remaining unasserted Telegrams: {remaining_telegram_count}\n" + assert self._outgoing_telegrams.empty(), ( + f"Found remaining unasserted Telegrams: {self._outgoing_telegrams.qsize()}\n" f"{self._list_remaining_telegrams()}" ) async def assert_telegram_count(self, count: int) -> None: - """Assert outgoing telegram count in test List.""" + """Assert outgoing telegram count in test Queue.""" await self.hass.async_block_till_done() - actual_count = len(self._outgoing_telegrams) + actual_count = self._outgoing_telegrams.qsize() assert actual_count == count, ( f"Outgoing telegrams: {actual_count} - Expected: {count}\n" f"{self._list_remaining_telegrams()}" @@ -147,79 +140,47 @@ class KNXTestKit: group_address: str, payload: int | tuple[int, ...] | None, apci_type: type[APCI], - ignore_order: bool = False, ) -> None: - """Assert outgoing telegram. Optionally in timely order.""" + """Assert outgoing telegram. One by one in timely order.""" await self.xknx.telegrams.join() - if not self._outgoing_telegrams: + await self.hass.async_block_till_done() + await self.hass.async_block_till_done() + try: + telegram = self._outgoing_telegrams.get_nowait() + except asyncio.QueueEmpty as err: raise AssertionError( f"No Telegram found. Expected: {apci_type.__name__} -" f" {group_address} - {payload}" - ) - _expected_ga = GroupAddress(group_address) + ) from err - if ignore_order: - for telegram in self._outgoing_telegrams: - if ( - telegram.destination_address == _expected_ga - and isinstance(telegram.payload, apci_type) - and (payload is None or telegram.payload.value.value == payload) - ): - self._outgoing_telegrams.remove(telegram) - return - raise AssertionError( - f"Telegram not found. Expected: {apci_type.__name__} -" - f" {group_address} - {payload}" - f"\nUnasserted telegrams:\n{self._list_remaining_telegrams()}" - ) - - telegram = self._outgoing_telegrams.pop(0) assert isinstance( telegram.payload, apci_type ), f"APCI type mismatch in {telegram} - Expected: {apci_type.__name__}" + assert ( - telegram.destination_address == _expected_ga + str(telegram.destination_address) == group_address ), f"Group address mismatch in {telegram} - Expected: {group_address}" + if payload is not None: assert ( telegram.payload.value.value == payload # type: ignore[attr-defined] ), f"Payload mismatch in {telegram} - Expected: {payload}" - async def assert_read( - self, - group_address: str, - response: int | tuple[int, ...] | None = None, - ignore_order: bool = False, - ) -> None: - """Assert outgoing GroupValueRead telegram. Optionally in timely order. - - Optionally inject incoming GroupValueResponse telegram after reception. - """ - await self.assert_telegram(group_address, None, GroupValueRead, ignore_order) - if response is not None: - await self.receive_response(group_address, response) + async def assert_read(self, group_address: str) -> None: + """Assert outgoing GroupValueRead telegram. One by one in timely order.""" + await self.assert_telegram(group_address, None, GroupValueRead) async def assert_response( - self, - group_address: str, - payload: int | tuple[int, ...], - ignore_order: bool = False, + self, group_address: str, payload: int | tuple[int, ...] ) -> None: - """Assert outgoing GroupValueResponse telegram. Optionally in timely order.""" - await self.assert_telegram( - group_address, payload, GroupValueResponse, ignore_order - ) + """Assert outgoing GroupValueResponse telegram. One by one in timely order.""" + await self.assert_telegram(group_address, payload, GroupValueResponse) async def assert_write( - self, - group_address: str, - payload: int | tuple[int, ...], - ignore_order: bool = False, + self, group_address: str, payload: int | tuple[int, ...] ) -> None: - """Assert outgoing GroupValueWrite telegram. Optionally in timely order.""" - await self.assert_telegram( - group_address, payload, GroupValueWrite, ignore_order - ) + """Assert outgoing GroupValueWrite telegram. One by one in timely order.""" + await self.assert_telegram(group_address, payload, GroupValueWrite) #################### # Incoming telegrams @@ -271,7 +232,6 @@ class KNXTestKit: GroupValueResponse(payload_value), source=source, ) - await asyncio.sleep(0) # advance loop to allow StateUpdater to process async def receive_write( self, @@ -320,53 +280,3 @@ def load_knxproj(hass_storage: dict[str, Any]) -> None: "version": 1, "data": FIXTURE_PROJECT_DATA, } - - -@pytest.fixture -def load_config_store(hass_storage: dict[str, Any]) -> None: - """Mock KNX config store data.""" - hass_storage[KNX_CONFIG_STORAGE_KEY] = FIXTURE_CONFIG_STORAGE_DATA - - -@pytest.fixture -async def create_ui_entity( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - hass_ws_client: WebSocketGenerator, - hass_storage: dict[str, Any], -) -> KnxEntityGenerator: - """Return a helper to create a KNX entities via WS. - - The KNX integration must be set up before using the helper. - """ - ws_client = await hass_ws_client(hass) - - async def _create_ui_entity( - platform: Platform, - knx_data: dict[str, Any], - entity_data: dict[str, Any] | None = None, - ) -> er.RegistryEntry: - """Create a KNX entity from WS with given configuration.""" - if entity_data is None: - entity_data = {"name": "Test"} - - await ws_client.send_json_auto_id( - { - "type": "knx/create_entity", - "platform": platform, - "data": { - "entity": entity_data, - "knx": knx_data, - }, - } - ) - res = await ws_client.receive_json() - assert res["success"], res - assert res["result"]["success"] is True, res["result"] - entity_id = res["result"]["entity_id"] - - entity = entity_registry.async_get(entity_id) - assert entity - return entity - - return _create_ui_entity diff --git a/tests/components/knx/fixtures/config_store.json b/tests/components/knx/fixtures/config_store.json deleted file mode 100644 index 5eabcfa87f9..00000000000 --- a/tests/components/knx/fixtures/config_store.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "version": 1, - "minor_version": 1, - "key": "knx/config_store.json", - "data": { - "entities": { - "switch": { - "knx_es_9d97829f47f1a2a3176a7c5b4216070c": { - "entity": { - "entity_category": null, - "name": "test", - "device_info": "knx_vdev_4c80a564f5fe5da701ed293966d6384d" - }, - "knx": { - "ga_switch": { - "write": "1/1/45", - "state": "1/0/45", - "passive": [] - }, - "invert": false, - "sync_state": true, - "respond_to_read": false - } - } - }, - "light": { - "knx_es_01J85ZKTFHSZNG4X9DYBE592TF": { - "entity": { - "name": "test", - "device_info": null, - "entity_category": "config" - }, - "knx": { - "color_temp_min": 2700, - "color_temp_max": 6000, - "_light_color_mode_schema": "default", - "ga_switch": { - "write": "1/1/21", - "state": "1/0/21", - "passive": [] - }, - "sync_state": true - } - } - } - } - } -} diff --git a/tests/components/knx/test_binary_sensor.py b/tests/components/knx/test_binary_sensor.py index dbb8d2ee832..b9216aa149a 100644 --- a/tests/components/knx/test_binary_sensor.py +++ b/tests/components/knx/test_binary_sensor.py @@ -2,8 +2,6 @@ from datetime import timedelta -from freezegun.api import FrozenDateTimeFactory - from homeassistant.components.knx.const import CONF_STATE_ADDRESS, CONF_SYNC_STATE from homeassistant.components.knx.schema import BinarySensorSchema from homeassistant.const import ( @@ -15,6 +13,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er +from homeassistant.util import dt as dt_util from .conftest import KNXTestKit @@ -124,30 +123,31 @@ async def test_binary_sensor_ignore_internal_state( # receive initial ON telegram await knx.receive_write("1/1/1", True) await knx.receive_write("2/2/2", True) + await hass.async_block_till_done() assert len(events) == 2 # receive second ON telegram - ignore_internal_state shall force state_changed event await knx.receive_write("1/1/1", True) await knx.receive_write("2/2/2", True) + await hass.async_block_till_done() assert len(events) == 3 # receive first OFF telegram await knx.receive_write("1/1/1", False) await knx.receive_write("2/2/2", False) + await hass.async_block_till_done() assert len(events) == 5 # receive second OFF telegram - ignore_internal_state shall force state_changed event await knx.receive_write("1/1/1", False) await knx.receive_write("2/2/2", False) + await hass.async_block_till_done() assert len(events) == 6 -async def test_binary_sensor_counter( - hass: HomeAssistant, - knx: KNXTestKit, - freezer: FrozenDateTimeFactory, -) -> None: +async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> None: """Test KNX binary_sensor with context timeout.""" + async_fire_time_changed(hass, dt_util.utcnow()) context_timeout = 1 await knx.setup_integration( @@ -166,18 +166,21 @@ async def test_binary_sensor_counter( # receive initial ON telegram await knx.receive_write("2/2/2", True) + await hass.async_block_till_done() # no change yet - still in 1 sec context (additional async_block_till_done needed for time change) assert len(events) == 0 state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF assert state.attributes.get("counter") == 0 - freezer.tick(timedelta(seconds=context_timeout)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) + await hass.async_block_till_done() await knx.xknx.task_registry.block_till_done() # state changed twice after context timeout - once to ON with counter 1 and once to counter 0 state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 + # additional async_block_till_done needed event capture + await hass.async_block_till_done() assert len(events) == 2 event = events.pop(0).data assert event.get("new_state").attributes.get("counter") == 1 @@ -193,9 +196,9 @@ async def test_binary_sensor_counter( state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 - freezer.tick(timedelta(seconds=context_timeout)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) await knx.xknx.task_registry.block_till_done() + await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 @@ -208,12 +211,10 @@ async def test_binary_sensor_counter( assert event.get("old_state").attributes.get("counter") == 2 -async def test_binary_sensor_reset( - hass: HomeAssistant, - knx: KNXTestKit, - freezer: FrozenDateTimeFactory, -) -> None: +async def test_binary_sensor_reset(hass: HomeAssistant, knx: KNXTestKit) -> None: """Test KNX binary_sensor with reset_after function.""" + async_fire_time_changed(hass, dt_util.utcnow()) + await knx.setup_integration( { BinarySensorSchema.PLATFORM: [ @@ -229,10 +230,11 @@ async def test_binary_sensor_reset( # receive ON telegram await knx.receive_write("2/2/2", True) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON - freezer.tick(timedelta(seconds=1)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1)) + await hass.async_block_till_done() await hass.async_block_till_done() # state reset after after timeout state = hass.states.get("binary_sensor.test") @@ -263,6 +265,7 @@ async def test_binary_sensor_restore_and_respond(hass: HomeAssistant, knx) -> No await knx.assert_telegram_count(0) await knx.receive_write(_ADDRESS, False) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF @@ -293,5 +296,6 @@ async def test_binary_sensor_restore_invert(hass: HomeAssistant, knx) -> None: # inverted is on, make sure the state is off after it await knx.receive_write(_ADDRESS, True) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF diff --git a/tests/components/knx/test_button.py b/tests/components/knx/test_button.py index 38ccb36200b..613208d5595 100644 --- a/tests/components/knx/test_button.py +++ b/tests/components/knx/test_button.py @@ -3,26 +3,20 @@ from datetime import timedelta import logging -from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.knx.const import ( - CONF_PAYLOAD_LENGTH, - KNX_ADDRESS, - KNX_MODULE_KEY, -) +from homeassistant.components.knx.const import CONF_PAYLOAD_LENGTH, DOMAIN, KNX_ADDRESS from homeassistant.components.knx.schema import ButtonSchema from homeassistant.const import CONF_NAME, CONF_PAYLOAD, CONF_TYPE from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util from .conftest import KNXTestKit from tests.common import async_capture_events, async_fire_time_changed -async def test_button_simple( - hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory -) -> None: +async def test_button_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: """Test KNX button with default payload.""" await knx.setup_integration( { @@ -44,8 +38,7 @@ async def test_button_simple( # received telegrams on button GA are ignored by the entity old_state = hass.states.get("button.test") - freezer.tick(timedelta(seconds=3)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) await knx.receive_write("1/2/3", False) await knx.receive_write("1/2/3", True) new_state = hass.states.get("button.test") @@ -138,4 +131,4 @@ async def test_button_invalid( assert record.levelname == "ERROR" assert "Setup failed for 'knx': Invalid config." in record.message assert hass.states.get("button.test") is None - assert hass.data.get(KNX_MODULE_KEY) is None + assert hass.data.get(DOMAIN) is None diff --git a/tests/components/knx/test_climate.py b/tests/components/knx/test_climate.py index 8fb348f1724..9c431386b43 100644 --- a/tests/components/knx/test_climate.py +++ b/tests/components/knx/test_climate.py @@ -2,7 +2,7 @@ import pytest -from homeassistant.components.climate import HVACMode +from homeassistant.components.climate import PRESET_ECO, PRESET_SLEEP, HVACMode from homeassistant.components.knx.schema import ClimateSchema from homeassistant.const import CONF_NAME, STATE_IDLE from homeassistant.core import HomeAssistant @@ -80,6 +80,12 @@ async def test_climate_on_off( ) } ) + + await hass.async_block_till_done() + # read heat/cool state + if heat_cool_ga: + await knx.assert_read("1/2/11") + await knx.receive_response("1/2/11", 0) # cool # read temperature state await knx.assert_read("1/2/3") await knx.receive_response("1/2/3", RAW_FLOAT_20_0) @@ -89,10 +95,6 @@ async def test_climate_on_off( # read on/off state await knx.assert_read("1/2/9") await knx.receive_response("1/2/9", 1) - # read heat/cool state - if heat_cool_ga: - await knx.assert_read("1/2/11") - await knx.receive_response("1/2/11", 0) # cool # turn off await hass.services.async_call( @@ -169,15 +171,18 @@ async def test_climate_hvac_mode( ) } ) + + await hass.async_block_till_done() # read states state updater - # StateUpdater semaphore allows 2 concurrent requests - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - # StateUpdater initialize state - await knx.receive_response("1/2/3", RAW_FLOAT_20_0) - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) await knx.assert_read("1/2/7") + await knx.assert_read("1/2/3") + # StateUpdater initialize state await knx.receive_response("1/2/7", (0x01,)) + await knx.receive_response("1/2/3", RAW_FLOAT_20_0) + # StateUpdater semaphore allows 2 concurrent requests + # read target temperature state + await knx.assert_read("1/2/5") + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) # turn hvac mode to off - set_hvac_mode() doesn't send to on_off if dedicated hvac mode is available await hass.services.async_call( @@ -231,90 +236,6 @@ async def test_climate_hvac_mode( assert hass.states.get("climate.test").state == "cool" -async def test_climate_heat_cool_read_only( - hass: HomeAssistant, knx: KNXTestKit -) -> None: - """Test KNX climate hvac mode.""" - heat_cool_state_ga = "3/3/3" - await knx.setup_integration( - { - ClimateSchema.PLATFORM: { - CONF_NAME: "test", - ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", - ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", - ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", - ClimateSchema.CONF_HEAT_COOL_STATE_ADDRESS: heat_cool_state_ga, - } - } - ) - # read states state updater - # StateUpdater semaphore allows 2 concurrent requests - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - # StateUpdater initialize state - await knx.receive_response("1/2/3", RAW_FLOAT_20_0) - await knx.receive_response("1/2/5", RAW_FLOAT_20_0) - await knx.assert_read(heat_cool_state_ga) - await knx.receive_response(heat_cool_state_ga, True) # heat - - state = hass.states.get("climate.test") - assert state.state == "heat" - assert state.attributes["hvac_modes"] == ["heat"] - assert state.attributes["hvac_action"] == "heating" - - await knx.receive_write(heat_cool_state_ga, False) # cool - state = hass.states.get("climate.test") - assert state.state == "cool" - assert state.attributes["hvac_modes"] == ["cool"] - assert state.attributes["hvac_action"] == "cooling" - - -async def test_climate_heat_cool_read_only_on_off( - hass: HomeAssistant, knx: KNXTestKit -) -> None: - """Test KNX climate hvac mode.""" - on_off_ga = "2/2/2" - heat_cool_state_ga = "3/3/3" - await knx.setup_integration( - { - ClimateSchema.PLATFORM: { - CONF_NAME: "test", - ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", - ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", - ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", - ClimateSchema.CONF_ON_OFF_ADDRESS: on_off_ga, - ClimateSchema.CONF_HEAT_COOL_STATE_ADDRESS: heat_cool_state_ga, - } - } - ) - # read states state updater - # StateUpdater semaphore allows 2 concurrent requests - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - # StateUpdater initialize state - await knx.receive_response("1/2/3", RAW_FLOAT_20_0) - await knx.receive_response("1/2/5", RAW_FLOAT_20_0) - await knx.assert_read(heat_cool_state_ga) - await knx.receive_response(heat_cool_state_ga, True) # heat - - state = hass.states.get("climate.test") - assert state.state == "off" - assert set(state.attributes["hvac_modes"]) == {"off", "heat"} - assert state.attributes["hvac_action"] == "off" - - await knx.receive_write(heat_cool_state_ga, False) # cool - state = hass.states.get("climate.test") - assert state.state == "off" - assert set(state.attributes["hvac_modes"]) == {"off", "cool"} - assert state.attributes["hvac_action"] == "off" - - await knx.receive_write(on_off_ga, True) - state = hass.states.get("climate.test") - assert state.state == "cool" - assert set(state.attributes["hvac_modes"]) == {"off", "cool"} - assert state.attributes["hvac_action"] == "cooling" - - async def test_climate_preset_mode( hass: HomeAssistant, knx: KNXTestKit, entity_registry: er.EntityRegistry ) -> None: @@ -331,42 +252,50 @@ async def test_climate_preset_mode( } } ) + events = async_capture_events(hass, "state_changed") - # StateUpdater initialize state - # StateUpdater semaphore allows 2 concurrent requests - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await hass.async_block_till_done() + # read states state updater await knx.assert_read("1/2/7") - await knx.receive_response("1/2/7", (0x01,)) # comfort + await knx.assert_read("1/2/3") + # StateUpdater initialize state + await knx.receive_response("1/2/7", (0x01,)) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + # StateUpdater semaphore allows 2 concurrent requests + # read target temperature state + await knx.assert_read("1/2/5") + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + events.clear() - knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="comfort") # set preset mode await hass.services.async_call( "climate", "set_preset_mode", - {"entity_id": "climate.test", "preset_mode": "building_protection"}, + {"entity_id": "climate.test", "preset_mode": PRESET_ECO}, blocking=True, ) await knx.assert_write("1/2/6", (0x04,)) - knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="building_protection") + assert len(events) == 1 + events.pop() # set preset mode await hass.services.async_call( "climate", "set_preset_mode", - {"entity_id": "climate.test", "preset_mode": "economy"}, + {"entity_id": "climate.test", "preset_mode": PRESET_SLEEP}, blocking=True, ) await knx.assert_write("1/2/6", (0x03,)) - knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="economy") + assert len(events) == 1 + events.pop() assert len(knx.xknx.devices) == 2 assert len(knx.xknx.devices[0].device_updated_cbs) == 2 assert len(knx.xknx.devices[1].device_updated_cbs) == 2 # test removing also removes hooks entity_registry.async_remove("climate.test") + await hass.async_block_till_done() + # If we remove the entity the underlying devices should disappear too assert len(knx.xknx.devices) == 0 @@ -386,15 +315,18 @@ async def test_update_entity(hass: HomeAssistant, knx: KNXTestKit) -> None: } ) assert await async_setup_component(hass, "homeassistant", {}) + await hass.async_block_till_done() + await hass.async_block_till_done() # read states state updater - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - # StateUpdater initialize state - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) await knx.assert_read("1/2/7") + await knx.assert_read("1/2/3") + # StateUpdater initialize state await knx.receive_response("1/2/7", (0x01,)) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + # StateUpdater semaphore allows 2 concurrent requests + await knx.assert_read("1/2/5") + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) # verify update entity retriggers group value reads to the bus await hass.services.async_call( @@ -422,6 +354,8 @@ async def test_command_value_idle_mode(hass: HomeAssistant, knx: KNXTestKit) -> } } ) + + await hass.async_block_till_done() # read states state updater await knx.assert_read("1/2/3") await knx.assert_read("1/2/5") @@ -439,414 +373,3 @@ async def test_command_value_idle_mode(hass: HomeAssistant, knx: KNXTestKit) -> knx.assert_state( "climate.test", HVACMode.HEAT, command_value=0, hvac_action=STATE_IDLE ) - - -async def test_fan_speed_3_steps(hass: HomeAssistant, knx: KNXTestKit) -> None: - """Test KNX climate fan speed 3 steps.""" - await knx.setup_integration( - { - ClimateSchema.PLATFORM: { - CONF_NAME: "test", - ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", - ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", - ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", - ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", - ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", - ClimateSchema.CONF_FAN_SPEED_MODE: "step", - ClimateSchema.CONF_FAN_MAX_STEP: 3, - } - } - ) - - # read states state updater - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - - # StateUpdater initialize state - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - - # Query status - await knx.assert_read("1/2/7") - await knx.receive_response("1/2/7", (0x01,)) - knx.assert_state( - "climate.test", - HVACMode.HEAT, - fan_mode="low", - fan_modes=["off", "low", "medium", "high"], - ) - - # set fan mode - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "medium"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (0x02,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="medium") - - # turn off - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "off"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (0x0,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") - - -async def test_fan_speed_2_steps(hass: HomeAssistant, knx: KNXTestKit) -> None: - """Test KNX climate fan speed 2 steps.""" - await knx.setup_integration( - { - ClimateSchema.PLATFORM: { - CONF_NAME: "test", - ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", - ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", - ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", - ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", - ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", - ClimateSchema.CONF_FAN_SPEED_MODE: "step", - ClimateSchema.CONF_FAN_MAX_STEP: 2, - } - } - ) - - # read states state updater - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - - # StateUpdater initialize state - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - - # Query status - await knx.assert_read("1/2/7") - await knx.receive_response("1/2/7", (0x01,)) - knx.assert_state( - "climate.test", HVACMode.HEAT, fan_mode="low", fan_modes=["off", "low", "high"] - ) - - # set fan mode - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "high"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (0x02,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="high") - - # turn off - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "off"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (0x0,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") - - -async def test_fan_speed_1_step(hass: HomeAssistant, knx: KNXTestKit) -> None: - """Test KNX climate fan speed 1 step.""" - await knx.setup_integration( - { - ClimateSchema.PLATFORM: { - CONF_NAME: "test", - ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", - ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", - ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", - ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", - ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", - ClimateSchema.CONF_FAN_SPEED_MODE: "step", - ClimateSchema.CONF_FAN_MAX_STEP: 1, - } - } - ) - - # read states state updater - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - - # StateUpdater initialize state - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - - # Query status - await knx.assert_read("1/2/7") - await knx.receive_response("1/2/7", (0x01,)) - knx.assert_state( - "climate.test", HVACMode.HEAT, fan_mode="on", fan_modes=["off", "on"] - ) - - # turn off - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "off"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (0x0,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") - - -async def test_fan_speed_5_steps(hass: HomeAssistant, knx: KNXTestKit) -> None: - """Test KNX climate fan speed 5 steps.""" - await knx.setup_integration( - { - ClimateSchema.PLATFORM: { - CONF_NAME: "test", - ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", - ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", - ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", - ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", - ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", - ClimateSchema.CONF_FAN_SPEED_MODE: "step", - ClimateSchema.CONF_FAN_MAX_STEP: 5, - } - } - ) - - # read states state updater - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - - # StateUpdater initialize state - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - - # Query status - await knx.assert_read("1/2/7") - await knx.receive_response("1/2/7", (0x01,)) - knx.assert_state( - "climate.test", - HVACMode.HEAT, - fan_mode="1", - fan_modes=["off", "1", "2", "3", "4", "5"], - ) - - # set fan mode - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "4"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (0x04,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="4") - - # turn off - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "off"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (0x0,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") - - -async def test_fan_speed_percentage(hass: HomeAssistant, knx: KNXTestKit) -> None: - """Test KNX climate fan speed percentage.""" - await knx.setup_integration( - { - ClimateSchema.PLATFORM: { - CONF_NAME: "test", - ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", - ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", - ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", - ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", - ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", - ClimateSchema.CONF_FAN_SPEED_MODE: "percent", - } - } - ) - - # read states state updater - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - - # StateUpdater initialize state - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - - # Query status - await knx.assert_read("1/2/7") - await knx.receive_response("1/2/7", (84,)) # 84 / 255 = 33% - knx.assert_state( - "climate.test", - HVACMode.HEAT, - fan_mode="low", - fan_modes=["off", "low", "medium", "high"], - ) - - # set fan mode - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "medium"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (168,)) # 168 / 255 = 66% - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="medium") - - # turn off - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "off"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (0x0,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") - - # check fan mode that is not in the fan modes list - await knx.receive_write("1/2/6", (127,)) # 127 / 255 = 50% - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="medium") - - # check FAN_OFF is not picked when fan_speed is closest to zero - await knx.receive_write("1/2/6", (3,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="low") - - -async def test_fan_speed_percentage_4_steps( - hass: HomeAssistant, knx: KNXTestKit -) -> None: - """Test KNX climate fan speed percentage with 4 steps.""" - await knx.setup_integration( - { - ClimateSchema.PLATFORM: { - CONF_NAME: "test", - ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", - ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", - ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", - ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", - ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", - ClimateSchema.CONF_FAN_SPEED_MODE: "percent", - ClimateSchema.CONF_FAN_MAX_STEP: 4, - } - } - ) - - # read states state updater - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - - # StateUpdater initialize state - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - - # Query status - await knx.assert_read("1/2/7") - await knx.receive_response("1/2/7", (64,)) # 64 / 255 = 25% - knx.assert_state( - "climate.test", - HVACMode.HEAT, - fan_mode="25%", - fan_modes=["off", "25%", "50%", "75%", "100%"], - ) - - # set fan mode - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "50%"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (128,)) # 128 / 255 = 50% - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="50%") - - # turn off - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "off"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (0x0,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="off") - - # check fan mode that is not in the fan modes list - await knx.receive_write("1/2/6", (168,)) # 168 / 255 = 66% - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="75%") - - -async def test_fan_speed_zero_mode_auto(hass: HomeAssistant, knx: KNXTestKit) -> None: - """Test KNX climate fan speed 3 steps.""" - await knx.setup_integration( - { - ClimateSchema.PLATFORM: { - CONF_NAME: "test", - ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", - ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", - ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", - ClimateSchema.CONF_FAN_SPEED_ADDRESS: "1/2/6", - ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS: "1/2/7", - ClimateSchema.CONF_FAN_MAX_STEP: 3, - ClimateSchema.CONF_FAN_SPEED_MODE: "step", - ClimateSchema.CONF_FAN_ZERO_MODE: "auto", - } - } - ) - - # read states state updater - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - - # StateUpdater initialize state - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - - # Query status - await knx.assert_read("1/2/7") - await knx.receive_response("1/2/7", (0x01,)) - knx.assert_state( - "climate.test", - HVACMode.HEAT, - fan_mode="low", - fan_modes=["auto", "low", "medium", "high"], - ) - - # set auto - await hass.services.async_call( - "climate", - "set_fan_mode", - {"entity_id": "climate.test", "fan_mode": "auto"}, - blocking=True, - ) - await knx.assert_write("1/2/6", (0x0,)) - knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="auto") - - -async def test_climate_humidity(hass: HomeAssistant, knx: KNXTestKit) -> None: - """Test KNX climate humidity.""" - await knx.setup_integration( - { - ClimateSchema.PLATFORM: { - CONF_NAME: "test", - ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", - ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", - ClimateSchema.CONF_HUMIDITY_STATE_ADDRESS: "1/2/16", - } - } - ) - - # read states state updater - await knx.assert_read("1/2/3") - await knx.assert_read("1/2/5") - - # StateUpdater initialize state - await knx.receive_response("1/2/5", RAW_FLOAT_22_0) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - - # Query status - await knx.assert_read("1/2/16") - await knx.receive_response("1/2/16", (0x14, 0x74)) - knx.assert_state( - "climate.test", - HVACMode.HEAT, - current_humidity=45.6, - ) diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index 78751c7e641..f12a57f97ba 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -1,7 +1,7 @@ """Test the KNX config flow.""" from contextlib import contextmanager -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import Mock, patch import pytest from xknx.exceptions.exception import CommunicationError, InvalidSecureConfiguration @@ -76,10 +76,10 @@ def patch_file_upload(return_value=FIXTURE_KEYRING, side_effect=None): """Patch file upload. Yields the Keyring instance (return_value).""" with ( patch( - "homeassistant.components.knx.storage.keyring.process_uploaded_file" + "homeassistant.components.knx.helpers.keyring.process_uploaded_file" ) as file_upload_mock, patch( - "homeassistant.components.knx.storage.keyring.sync_load_keyring", + "homeassistant.components.knx.helpers.keyring.sync_load_keyring", return_value=return_value, side_effect=side_effect, ), @@ -126,7 +126,7 @@ def _gateway_descriptor( class GatewayScannerMock: """Mock GatewayScanner.""" - def __init__(self, gateways=None) -> None: + def __init__(self, gateways=None): """Initialize GatewayScannerMock.""" # Key is a HPAI instance in xknx, but not used in HA anyway. self.found_gateways = ( @@ -184,6 +184,7 @@ async def test_routing_setup( CONF_KNX_INDIVIDUAL_ADDRESS: "1.1.110", }, ) + await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Routing as 1.1.110" assert result3["data"] == { @@ -258,6 +259,7 @@ async def test_routing_setup_advanced( CONF_KNX_LOCAL_IP: "192.168.1.112", }, ) + await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Routing as 1.1.110" assert result3["data"] == { @@ -348,6 +350,7 @@ async def test_routing_secure_manual_setup( CONF_KNX_ROUTING_SYNC_LATENCY_TOLERANCE: 2000, }, ) + await hass.async_block_till_done() assert secure_routing_manual["type"] is FlowResultType.CREATE_ENTRY assert secure_routing_manual["title"] == "Secure Routing as 0.0.123" assert secure_routing_manual["data"] == { @@ -416,6 +419,7 @@ async def test_routing_secure_keyfile( CONF_KNX_KNXKEY_PASSWORD: "password", }, ) + await hass.async_block_till_done() assert routing_secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert routing_secure_knxkeys["title"] == "Secure Routing as 0.0.123" assert routing_secure_knxkeys["data"] == { @@ -510,7 +514,7 @@ async def test_routing_secure_keyfile( return_value=GatewayScannerMock(), ) async def test_tunneling_setup_manual( - gateway_scanner_mock: MagicMock, + _gateway_scanner_mock, hass: HomeAssistant, knx_setup, user_input, @@ -548,6 +552,7 @@ async def test_tunneling_setup_manual( result2["flow_id"], user_input, ) + await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == title assert result3["data"] == config_entry_data @@ -559,7 +564,7 @@ async def test_tunneling_setup_manual( return_value=GatewayScannerMock(), ) async def test_tunneling_setup_manual_request_description_error( - gateway_scanner_mock: MagicMock, + _gateway_scanner_mock, hass: HomeAssistant, knx_setup, ) -> None: @@ -676,6 +681,7 @@ async def test_tunneling_setup_manual_request_description_error( CONF_PORT: 3671, }, ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Tunneling TCP @ 192.168.0.1" assert result["data"] == { @@ -700,10 +706,7 @@ async def test_tunneling_setup_manual_request_description_error( return_value=_gateway_descriptor("192.168.0.2", 3675), ) async def test_tunneling_setup_for_local_ip( - request_description_mock: MagicMock, - gateway_scanner_mock: MagicMock, - hass: HomeAssistant, - knx_setup, + _request_description_mock, _gateway_scanner_mock, hass: HomeAssistant, knx_setup ) -> None: """Test tunneling if only one gateway is found.""" result = await hass.config_entries.flow.async_init( @@ -769,6 +772,7 @@ async def test_tunneling_setup_for_local_ip( CONF_KNX_LOCAL_IP: "192.168.1.112", }, ) + await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Tunneling UDP @ 192.168.0.2" assert result3["data"] == { @@ -817,6 +821,7 @@ async def test_tunneling_setup_for_multiple_found_gateways( tunnel_flow["flow_id"], {CONF_KNX_GATEWAY: str(gateway)}, ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { **DEFAULT_ENTRY_DATA, @@ -900,6 +905,7 @@ async def test_form_with_automatic_connection_handling( CONF_KNX_CONNECTION_TYPE: CONF_KNX_AUTOMATIC, }, ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == CONF_KNX_AUTOMATIC.capitalize() assert result2["data"] == { @@ -965,7 +971,7 @@ async def _get_menu_step_secure_tunnel(hass: HomeAssistant) -> FlowResult: ), ) async def test_get_secure_menu_step_manual_tunnelling( - request_description_mock: MagicMock, + _request_description_mock, hass: HomeAssistant, ) -> None: """Test flow reaches secure_tunnellinn menu step from manual tunnelling configuration.""" @@ -1034,6 +1040,7 @@ async def test_configure_secure_tunnel_manual(hass: HomeAssistant, knx_setup) -> CONF_KNX_SECURE_DEVICE_AUTHENTICATION: "device_auth", }, ) + await hass.async_block_till_done() assert secure_tunnel_manual["type"] is FlowResultType.CREATE_ENTRY assert secure_tunnel_manual["data"] == { **DEFAULT_ENTRY_DATA, @@ -1079,6 +1086,7 @@ async def test_configure_secure_knxkeys(hass: HomeAssistant, knx_setup) -> None: {CONF_KNX_TUNNEL_ENDPOINT_IA: CONF_KNX_AUTOMATIC}, ) + await hass.async_block_till_done() assert secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert secure_knxkeys["data"] == { **DEFAULT_ENTRY_DATA, @@ -1193,6 +1201,7 @@ async def test_options_flow_connection_type( CONF_KNX_GATEWAY: str(gateway), }, ) + await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert not result3["data"] assert mock_config_entry.data == { @@ -1298,6 +1307,7 @@ async def test_options_flow_secure_manual_to_keyfile( {CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.1"}, ) + await hass.async_block_till_done() assert secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert mock_config_entry.data == { **DEFAULT_ENTRY_DATA, @@ -1342,6 +1352,7 @@ async def test_options_communication_settings( CONF_KNX_TELEGRAM_LOG_SIZE: 3000, }, ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert not result2.get("data") assert mock_config_entry.data == { @@ -1394,6 +1405,7 @@ async def test_options_update_keyfile(hass: HomeAssistant, knx_setup) -> None: CONF_KNX_KNXKEY_PASSWORD: "password", }, ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert not result2.get("data") assert mock_config_entry.data == { @@ -1451,6 +1463,7 @@ async def test_options_keyfile_upload(hass: HomeAssistant, knx_setup) -> None: CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.1", }, ) + await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert not result3.get("data") assert mock_config_entry.data == { diff --git a/tests/components/knx/test_config_store.py b/tests/components/knx/test_config_store.py deleted file mode 100644 index 116f4b5d839..00000000000 --- a/tests/components/knx/test_config_store.py +++ /dev/null @@ -1,412 +0,0 @@ -"""Test KNX config store.""" - -from typing import Any - -import pytest - -from homeassistant.components.knx.storage.config_store import ( - STORAGE_KEY as KNX_CONFIG_STORAGE_KEY, -) -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import KnxEntityGenerator -from .conftest import KNXTestKit - -from tests.typing import WebSocketGenerator - - -async def test_create_entity( - hass: HomeAssistant, - knx: KNXTestKit, - hass_ws_client: WebSocketGenerator, - hass_storage: dict[str, Any], - create_ui_entity: KnxEntityGenerator, -) -> None: - """Test entity creation.""" - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - test_name = "Test no device" - test_entity = await create_ui_entity( - platform=Platform.SWITCH, - knx_data={"ga_switch": {"write": "1/2/3"}}, - entity_data={"name": test_name}, - ) - - # Test if entity is correctly stored in registry - await client.send_json_auto_id({"type": "knx/get_entity_entries"}) - res = await client.receive_json() - assert res["success"], res - assert res["result"] == [ - test_entity.extended_dict, - ] - # Test if entity is correctly stored in config store - test_storage_data = next( - iter( - hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"]["switch"].values() - ) - ) - assert test_storage_data == { - "entity": { - "name": test_name, - "device_info": None, - "entity_category": None, - }, - "knx": { - "ga_switch": {"write": "1/2/3", "state": None, "passive": []}, - "invert": False, - "respond_to_read": False, - "sync_state": True, - }, - } - - -async def test_create_entity_error( - hass: HomeAssistant, - knx: KNXTestKit, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test unsuccessful entity creation.""" - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - # create entity with invalid platform - await client.send_json_auto_id( - { - "type": "knx/create_entity", - "platform": "invalid_platform", - "data": { - "entity": {"name": "Test invalid platform"}, - "knx": {"ga_switch": {"write": "1/2/3"}}, - }, - } - ) - res = await client.receive_json() - assert res["success"], res - assert not res["result"]["success"] - assert res["result"]["errors"][0]["path"] == ["platform"] - assert res["result"]["error_base"].startswith("expected Platform or one of") - - # create entity with unsupported platform - await client.send_json_auto_id( - { - "type": "knx/create_entity", - "platform": Platform.TTS, # "tts" is not a supported platform (and is unlikely to ever be) - "data": { - "entity": {"name": "Test invalid platform"}, - "knx": {"ga_switch": {"write": "1/2/3"}}, - }, - } - ) - res = await client.receive_json() - assert res["success"], res - assert not res["result"]["success"] - assert res["result"]["errors"][0]["path"] == ["platform"] - assert res["result"]["error_base"].startswith("value must be one of") - - -async def test_update_entity( - hass: HomeAssistant, - knx: KNXTestKit, - entity_registry: er.EntityRegistry, - hass_ws_client: WebSocketGenerator, - hass_storage: dict[str, Any], - create_ui_entity: KnxEntityGenerator, -) -> None: - """Test entity update.""" - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - test_entity = await create_ui_entity( - platform=Platform.SWITCH, - knx_data={"ga_switch": {"write": "1/2/3"}}, - entity_data={"name": "Test"}, - ) - test_entity_id = test_entity.entity_id - - # update entity - new_name = "Updated name" - new_ga_switch_write = "4/5/6" - await client.send_json_auto_id( - { - "type": "knx/update_entity", - "platform": Platform.SWITCH, - "entity_id": test_entity_id, - "data": { - "entity": {"name": new_name}, - "knx": {"ga_switch": {"write": new_ga_switch_write}}, - }, - } - ) - res = await client.receive_json() - assert res["success"], res - assert res["result"]["success"] - - entity = entity_registry.async_get(test_entity_id) - assert entity - assert entity.original_name == new_name - - assert ( - hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"]["switch"][ - test_entity.unique_id - ]["knx"]["ga_switch"]["write"] - == new_ga_switch_write - ) - - -async def test_update_entity_error( - hass: HomeAssistant, - knx: KNXTestKit, - hass_ws_client: WebSocketGenerator, - create_ui_entity: KnxEntityGenerator, -) -> None: - """Test entity update.""" - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - test_entity = await create_ui_entity( - platform=Platform.SWITCH, - knx_data={"ga_switch": {"write": "1/2/3"}}, - entity_data={"name": "Test"}, - ) - - # update unsupported platform - new_name = "Updated name" - new_ga_switch_write = "4/5/6" - await client.send_json_auto_id( - { - "type": "knx/update_entity", - "platform": Platform.TTS, - "entity_id": test_entity.entity_id, - "data": { - "entity": {"name": new_name}, - "knx": {"ga_switch": {"write": new_ga_switch_write}}, - }, - } - ) - res = await client.receive_json() - assert res["success"], res - assert not res["result"]["success"] - assert res["result"]["errors"][0]["path"] == ["platform"] - assert res["result"]["error_base"].startswith("value must be one of") - - # entity not found - await client.send_json_auto_id( - { - "type": "knx/update_entity", - "platform": Platform.SWITCH, - "entity_id": "non_existing_entity_id", - "data": { - "entity": {"name": new_name}, - "knx": {"ga_switch": {"write": new_ga_switch_write}}, - }, - } - ) - res = await client.receive_json() - assert not res["success"], res - assert res["error"]["code"] == "home_assistant_error" - assert res["error"]["message"].startswith("Entity not found:") - - # entity not in storage - await client.send_json_auto_id( - { - "type": "knx/update_entity", - "platform": Platform.SWITCH, - # `sensor` isn't yet supported, but we only have sensor entities automatically - # created with no configuration - it doesn't ,atter for the test though - "entity_id": "sensor.knx_interface_individual_address", - "data": { - "entity": {"name": new_name}, - "knx": {"ga_switch": {"write": new_ga_switch_write}}, - }, - } - ) - res = await client.receive_json() - assert not res["success"], res - assert res["error"]["code"] == "home_assistant_error" - assert res["error"]["message"].startswith("Entity not found in storage") - - -async def test_delete_entity( - hass: HomeAssistant, - knx: KNXTestKit, - entity_registry: er.EntityRegistry, - hass_ws_client: WebSocketGenerator, - hass_storage: dict[str, Any], - create_ui_entity: KnxEntityGenerator, -) -> None: - """Test entity deletion.""" - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - test_entity = await create_ui_entity( - platform=Platform.SWITCH, - knx_data={"ga_switch": {"write": "1/2/3"}}, - entity_data={"name": "Test"}, - ) - test_entity_id = test_entity.entity_id - - # delete entity - await client.send_json_auto_id( - { - "type": "knx/delete_entity", - "entity_id": test_entity_id, - } - ) - res = await client.receive_json() - assert res["success"], res - - assert not entity_registry.async_get(test_entity_id) - assert not hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"].get("switch") - - -async def test_delete_entity_error( - hass: HomeAssistant, - knx: KNXTestKit, - entity_registry: er.EntityRegistry, - hass_ws_client: WebSocketGenerator, - hass_storage: dict[str, Any], -) -> None: - """Test unsuccessful entity deletion.""" - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - # delete unknown entity - await client.send_json_auto_id( - { - "type": "knx/delete_entity", - "entity_id": "switch.non_existing_entity", - } - ) - res = await client.receive_json() - assert not res["success"], res - assert res["error"]["code"] == "home_assistant_error" - assert res["error"]["message"].startswith("Entity not found") - - # delete entity not in config store - test_entity_id = "sensor.knx_interface_individual_address" - assert entity_registry.async_get(test_entity_id) - await client.send_json_auto_id( - { - "type": "knx/delete_entity", - "entity_id": test_entity_id, - } - ) - res = await client.receive_json() - assert not res["success"], res - assert res["error"]["code"] == "home_assistant_error" - assert res["error"]["message"].startswith("Entity not found") - - -async def test_get_entity_config( - hass: HomeAssistant, - knx: KNXTestKit, - hass_ws_client: WebSocketGenerator, - create_ui_entity: KnxEntityGenerator, -) -> None: - """Test entity config retrieval.""" - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - test_entity = await create_ui_entity( - platform=Platform.SWITCH, - knx_data={"ga_switch": {"write": "1/2/3"}}, - entity_data={"name": "Test"}, - ) - - await client.send_json_auto_id( - { - "type": "knx/get_entity_config", - "entity_id": test_entity.entity_id, - } - ) - res = await client.receive_json() - assert res["success"], res - assert res["result"]["platform"] == Platform.SWITCH - assert res["result"]["data"] == { - "entity": { - "name": "Test", - "device_info": None, - "entity_category": None, - }, - "knx": { - "ga_switch": {"write": "1/2/3", "passive": [], "state": None}, - "respond_to_read": False, - "invert": False, - "sync_state": True, - }, - } - - -@pytest.mark.parametrize( - ("test_entity_id", "error_message_start"), - [ - ("switch.non_existing_entity", "Entity not found"), - ("sensor.knx_interface_individual_address", "Entity data not found"), - ], -) -async def test_get_entity_config_error( - hass: HomeAssistant, - knx: KNXTestKit, - hass_ws_client: WebSocketGenerator, - test_entity_id: str, - error_message_start: str, -) -> None: - """Test entity config retrieval errors.""" - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - await client.send_json_auto_id( - { - "type": "knx/get_entity_config", - "entity_id": test_entity_id, - } - ) - res = await client.receive_json() - assert not res["success"], res - assert res["error"]["code"] == "home_assistant_error" - assert res["error"]["message"].startswith(error_message_start) - - -async def test_validate_entity( - hass: HomeAssistant, - knx: KNXTestKit, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test entity validation.""" - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - await client.send_json_auto_id( - { - "type": "knx/validate_entity", - "platform": Platform.SWITCH, - "data": { - "entity": {"name": "test_name"}, - "knx": {"ga_switch": {"write": "1/2/3"}}, - }, - } - ) - res = await client.receive_json() - assert res["success"], res - assert res["result"]["success"] is True - - # invalid data - await client.send_json_auto_id( - { - "type": "knx/validate_entity", - "platform": Platform.SWITCH, - "data": { - "entity": {"name": "test_name"}, - "knx": {"ga_switch": {}}, - }, - } - ) - res = await client.receive_json() - assert res["success"], res - assert res["result"]["success"] is False - assert res["result"]["errors"][0]["path"] == ["data", "knx", "ga_switch", "write"] - assert res["result"]["errors"][0]["error_message"] == "required key not provided" - assert res["result"]["error_base"].startswith("required key not provided") diff --git a/tests/components/knx/test_cover.py b/tests/components/knx/test_cover.py index 0604b575c5b..2d2b72e9015 100644 --- a/tests/components/knx/test_cover.py +++ b/tests/components/knx/test_cover.py @@ -1,8 +1,7 @@ """Test KNX cover.""" -from homeassistant.components.cover import CoverState from homeassistant.components.knx.schema import CoverSchema -from homeassistant.const import CONF_NAME +from homeassistant.const import CONF_NAME, STATE_CLOSING from homeassistant.core import HomeAssistant from .conftest import KNXTestKit @@ -73,7 +72,7 @@ async def test_cover_basic(hass: HomeAssistant, knx: KNXTestKit) -> None: knx.assert_state( "cover.test", - CoverState.CLOSING, + STATE_CLOSING, ) assert len(events) == 1 diff --git a/tests/components/knx/test_date.py b/tests/components/knx/test_date.py index 1e6e5102bcf..d3b1ff2058e 100644 --- a/tests/components/knx/test_date.py +++ b/tests/components/knx/test_date.py @@ -1,10 +1,6 @@ """Test KNX date.""" -from homeassistant.components.date import ( - ATTR_DATE, - DOMAIN as DATE_DOMAIN, - SERVICE_SET_VALUE, -) +from homeassistant.components.date import ATTR_DATE, DOMAIN, SERVICE_SET_VALUE from homeassistant.components.knx.const import CONF_RESPOND_TO_READ, KNX_ADDRESS from homeassistant.components.knx.schema import DateSchema from homeassistant.const import CONF_NAME @@ -28,7 +24,7 @@ async def test_date(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # set value await hass.services.async_call( - DATE_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {"entity_id": "date.test", ATTR_DATE: "1999-03-31"}, blocking=True, diff --git a/tests/components/knx/test_datetime.py b/tests/components/knx/test_datetime.py index 025145ad1a3..c8c6bd4f346 100644 --- a/tests/components/knx/test_datetime.py +++ b/tests/components/knx/test_datetime.py @@ -1,10 +1,6 @@ """Test KNX date.""" -from homeassistant.components.datetime import ( - ATTR_DATETIME, - DOMAIN as DATETIME_DOMAIN, - SERVICE_SET_VALUE, -) +from homeassistant.components.datetime import ATTR_DATETIME, DOMAIN, SERVICE_SET_VALUE from homeassistant.components.knx.const import CONF_RESPOND_TO_READ, KNX_ADDRESS from homeassistant.components.knx.schema import DateTimeSchema from homeassistant.const import CONF_NAME @@ -31,15 +27,14 @@ async def test_datetime(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # set value await hass.services.async_call( - DATETIME_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {"entity_id": "datetime.test", ATTR_DATETIME: "2020-01-02T03:04:05+00:00"}, blocking=True, ) await knx.assert_write( test_address, - # service call in UTC, telegram in local time - (0x78, 0x01, 0x01, 0x13, 0x04, 0x05, 0x24, 0x00), + (0x78, 0x01, 0x01, 0x73, 0x04, 0x05, 0x20, 0x80), ) state = hass.states.get("datetime.test") assert state.state == "2020-01-02T03:04:05+00:00" @@ -79,7 +74,7 @@ async def test_date_restore_and_respond(hass: HomeAssistant, knx: KNXTestKit) -> await knx.receive_read(test_address) await knx.assert_response( test_address, - (0x7A, 0x03, 0x03, 0x04, 0x04, 0x05, 0x24, 0x00), + (0x7A, 0x03, 0x03, 0x84, 0x04, 0x05, 0x20, 0x80), ) # don't respond to passive address diff --git a/tests/components/knx/test_device.py b/tests/components/knx/test_device.py deleted file mode 100644 index 04ff02f0611..00000000000 --- a/tests/components/knx/test_device.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Test KNX devices.""" - -from typing import Any - -from homeassistant.components.knx.const import DOMAIN -from homeassistant.components.knx.storage.config_store import ( - STORAGE_KEY as KNX_CONFIG_STORAGE_KEY, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.setup import async_setup_component - -from .conftest import KNXTestKit - -from tests.typing import WebSocketGenerator - - -async def test_create_device( - hass: HomeAssistant, - knx: KNXTestKit, - device_registry: dr.DeviceRegistry, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test device creation.""" - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - await client.send_json_auto_id( - { - "type": "knx/create_device", - "name": "Test Device", - } - ) - res = await client.receive_json() - assert res["success"], res - assert res["result"]["name"] == "Test Device" - assert res["result"]["manufacturer"] == "KNX" - assert res["result"]["identifiers"] - assert res["result"]["config_entries"][0] == knx.mock_config_entry.entry_id - - device_identifier = res["result"]["identifiers"][0][1] - assert device_registry.async_get_device({(DOMAIN, device_identifier)}) - device_id = res["result"]["id"] - assert device_registry.async_get(device_id) - - -async def test_remove_device( - hass: HomeAssistant, - knx: KNXTestKit, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - hass_ws_client: WebSocketGenerator, - load_config_store: None, - hass_storage: dict[str, Any], -) -> None: - """Test device removal.""" - assert await async_setup_component(hass, "config", {}) - await knx.setup_integration({}) - client = await hass_ws_client(hass) - - await knx.assert_read("1/0/21", response=True, ignore_order=True) # test light - await knx.assert_read("1/0/45", response=True, ignore_order=True) # test switch - - assert hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"].get("switch") - test_device = device_registry.async_get_device( - {(DOMAIN, "knx_vdev_4c80a564f5fe5da701ed293966d6384d")} - ) - device_id = test_device.id - device_entities = entity_registry.entities.get_entries_for_device_id(device_id) - assert len(device_entities) == 1 - - response = await client.remove_device(device_id, knx.mock_config_entry.entry_id) - assert response["success"] - assert not device_registry.async_get_device( - {(DOMAIN, "knx_vdev_4c80a564f5fe5da701ed293966d6384d")} - ) - assert not entity_registry.entities.get_entries_for_device_id(device_id) - assert not hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"].get("switch") diff --git a/tests/components/knx/test_device_trigger.py b/tests/components/knx/test_device_trigger.py index e5f776a9404..136dddefaab 100644 --- a/tests/components/knx/test_device_trigger.py +++ b/tests/components/knx/test_device_trigger.py @@ -18,12 +18,18 @@ from homeassistant.setup import async_setup_component from .conftest import KNXTestKit -from tests.common import async_get_device_automations +from tests.common import async_get_device_automations, async_mock_service + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") async def test_if_fires_on_telegram( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, knx: KNXTestKit, ) -> None: @@ -92,31 +98,31 @@ async def test_if_fires_on_telegram( # "specific" shall ignore destination address await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(service_calls) == 1 - test_call = service_calls.pop() + assert len(calls) == 1 + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["id"] == 0 await knx.receive_write("1/2/4", (0x03, 0x2F)) - assert len(service_calls) == 2 - test_call = service_calls.pop() + assert len(calls) == 2 + test_call = calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = service_calls.pop() + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 # "specific" shall ignore GroupValueRead await knx.receive_read("1/2/4") - assert len(service_calls) == 1 - test_call = service_calls.pop() + assert len(calls) == 1 + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 async def test_default_if_fires_on_telegram( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, knx: KNXTestKit, ) -> None: @@ -173,34 +179,34 @@ async def test_default_if_fires_on_telegram( ) await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(service_calls) == 1 - test_call = service_calls.pop() + assert len(calls) == 1 + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["id"] == 0 await knx.receive_write("1/2/4", (0x03, 0x2F)) - assert len(service_calls) == 2 - test_call = service_calls.pop() + assert len(calls) == 2 + test_call = calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = service_calls.pop() + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 # "specific" shall catch GroupValueRead as it is not set explicitly await knx.receive_read("1/2/4") - assert len(service_calls) == 2 - test_call = service_calls.pop() + assert len(calls) == 2 + test_call = calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = service_calls.pop() + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 async def test_remove_device_trigger( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, knx: KNXTestKit, ) -> None: @@ -235,8 +241,8 @@ async def test_remove_device_trigger( ) await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(service_calls) == 1 - assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(calls) == 1 + assert calls.pop().data["catch_all"] == "telegram - 0/0/1" await hass.services.async_call( automation.DOMAIN, @@ -244,10 +250,8 @@ async def test_remove_device_trigger( {ATTR_ENTITY_ID: f"automation.{automation_name}"}, blocking=True, ) - assert len(service_calls) == 1 - await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(service_calls) == 1 + assert len(calls) == 0 async def test_get_triggers( @@ -391,6 +395,7 @@ async def test_invalid_device_trigger( ] }, ) + await hass.async_block_till_done() assert ( "Unnamed automation failed to setup triggers and has been disabled: " "extra keys not allowed @ data['invalid']. Got None" diff --git a/tests/components/knx/test_events.py b/tests/components/knx/test_events.py index 2228781ba89..ddb9d50240c 100644 --- a/tests/components/knx/test_events.py +++ b/tests/components/knx/test_events.py @@ -31,6 +31,7 @@ async def test_knx_event( events = async_capture_events(hass, "knx_event") async def test_event_data(address, payload, value=None): + await hass.async_block_till_done() assert len(events) == 1 event = events.pop() assert event.data["data"] == payload @@ -68,6 +69,7 @@ async def test_knx_event( ) # no event received + await hass.async_block_till_done() assert len(events) == 0 # receive telegrams for group addresses matching the filter @@ -99,6 +101,7 @@ async def test_knx_event( await knx.receive_write("0/5/0", True) await knx.receive_write("1/7/0", True) await knx.receive_write("2/6/6", True) + await hass.async_block_till_done() assert len(events) == 0 # receive telegrams with wrong payload length diff --git a/tests/components/knx/test_expose.py b/tests/components/knx/test_expose.py index 0fd790a3e33..e0b4c78e322 100644 --- a/tests/components/knx/test_expose.py +++ b/tests/components/knx/test_expose.py @@ -1,9 +1,9 @@ """Test KNX expose.""" from datetime import timedelta +import time +from unittest.mock import patch -from freezegun import freeze_time -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.knx import CONF_KNX_EXPOSE, DOMAIN, KNX_ADDRESS @@ -15,10 +15,11 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util from .conftest import KNXTestKit -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed_exact async def test_binary_expose(hass: HomeAssistant, knx: KNXTestKit) -> None: @@ -108,11 +109,6 @@ async def test_expose_attribute(hass: HomeAssistant, knx: KNXTestKit) -> None: await hass.async_block_till_done() await knx.assert_telegram_count(0) - # Ignore "unavailable" state - hass.states.async_set(entity_id, "unavailable", {attribute: None}) - await hass.async_block_till_done() - await knx.assert_telegram_count(0) - async def test_expose_attribute_with_default( hass: HomeAssistant, knx: KNXTestKit @@ -136,7 +132,7 @@ async def test_expose_attribute_with_default( await knx.receive_read("1/1/8") await knx.assert_response("1/1/8", (0,)) - # Change state to "on"; no attribute -> default + # Change state to "on"; no attribute hass.states.async_set(entity_id, "on", {}) await hass.async_block_till_done() await knx.assert_write("1/1/8", (0,)) @@ -151,11 +147,6 @@ async def test_expose_attribute_with_default( await hass.async_block_till_done() await knx.assert_no_telegram() - # Use default for "unavailable" state - hass.states.async_set(entity_id, "unavailable") - await hass.async_block_till_done() - await knx.assert_write("1/1/8", (0,)) - # Change state and attribute hass.states.async_set(entity_id, "on", {attribute: 3}) await hass.async_block_till_done() @@ -216,9 +207,7 @@ async def test_expose_string(hass: HomeAssistant, knx: KNXTestKit) -> None: ) -async def test_expose_cooldown( - hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory -) -> None: +async def test_expose_cooldown(hass: HomeAssistant, knx: KNXTestKit) -> None: """Test an expose with cooldown.""" cooldown_time = 2 entity_id = "fake.entity" @@ -246,8 +235,9 @@ async def test_expose_cooldown( await hass.async_block_till_done() await knx.assert_no_telegram() # Wait for cooldown to pass - freezer.tick(timedelta(seconds=cooldown_time)) - async_fire_time_changed(hass) + async_fire_time_changed_exact( + hass, dt_util.utcnow() + timedelta(seconds=cooldown_time) + ) await hass.async_block_till_done() await knx.assert_write("1/1/8", (3,)) @@ -300,18 +290,8 @@ async def test_expose_value_template( assert "Error rendering value template for KNX expose" in caplog.text -@pytest.mark.parametrize( - "invalid_attribute", - [ - 101.0, - "invalid", # can't cast to float - ], -) async def test_expose_conversion_exception( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - knx: KNXTestKit, - invalid_attribute: str, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, knx: KNXTestKit ) -> None: """Test expose throws exception.""" @@ -333,47 +313,39 @@ async def test_expose_conversion_exception( await knx.receive_read("1/1/8") await knx.assert_response("1/1/8", (3,)) - caplog.clear() # Change attribute: Expect no exception hass.states.async_set( entity_id, "on", - {attribute: invalid_attribute}, + {attribute: 101}, ) await hass.async_block_till_done() await knx.assert_no_telegram() assert ( - f'Could not expose fake.entity fake_attribute value "{invalid_attribute}" to KNX:' + 'Could not expose fake.entity fake_attribute value "101.0" to KNX:' in caplog.text ) -@freeze_time("2022-1-7 9:13:14") -@pytest.mark.parametrize( - ("time_type", "raw"), - [ - ("time", (0xA9, 0x0D, 0x0E)), # localtime includes day of week - ("date", (0x07, 0x01, 0x16)), - ("datetime", (0x7A, 0x1, 0x7, 0xA9, 0xD, 0xE, 0x20, 0xC0)), - ], -) +@patch("time.localtime") async def test_expose_with_date( - hass: HomeAssistant, knx: KNXTestKit, time_type: str, raw: tuple[int, ...] + localtime, hass: HomeAssistant, knx: KNXTestKit ) -> None: """Test an expose with a date.""" + localtime.return_value = time.struct_time([2022, 1, 7, 9, 13, 14, 6, 0, 0]) await knx.setup_integration( { CONF_KNX_EXPOSE: { - CONF_TYPE: time_type, + CONF_TYPE: "datetime", KNX_ADDRESS: "1/1/8", } } ) - await knx.assert_write("1/1/8", raw) + await knx.assert_write("1/1/8", (0x7A, 0x1, 0x7, 0xE9, 0xD, 0xE, 0x20, 0x80)) await knx.receive_read("1/1/8") - await knx.assert_response("1/1/8", raw) + await knx.assert_response("1/1/8", (0x7A, 0x1, 0x7, 0xE9, 0xD, 0xE, 0x20, 0x80)) entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 diff --git a/tests/components/knx/test_init.py b/tests/components/knx/test_init.py index 48cc46ef1ee..a317a6a298c 100644 --- a/tests/components/knx/test_init.py +++ b/tests/components/knx/test_init.py @@ -284,6 +284,7 @@ async def test_async_remove_entry( assert await hass.config_entries.async_remove(config_entry.entry_id) assert unlink_mock.call_count == 3 rmdir_mock.assert_called_once() + await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/knx/test_interface_device.py b/tests/components/knx/test_interface_device.py index 79114d4ffd5..6cf5d8026b9 100644 --- a/tests/components/knx/test_interface_device.py +++ b/tests/components/knx/test_interface_device.py @@ -1,28 +1,23 @@ -"""Test KNX interface device.""" +"""Test KNX scene.""" from unittest.mock import patch -from freezegun.api import FrozenDateTimeFactory from xknx.core import XknxConnectionState, XknxConnectionType from xknx.telegram import IndividualAddress from homeassistant.components.knx.sensor import SCAN_INTERVAL from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.setup import async_setup_component +from homeassistant.helpers import entity_registry as er +from homeassistant.util import dt as dt_util from .conftest import KNXTestKit from tests.common import async_capture_events, async_fire_time_changed -from tests.typing import WebSocketGenerator async def test_diagnostic_entities( - hass: HomeAssistant, - knx: KNXTestKit, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, knx: KNXTestKit, entity_registry: er.EntityRegistry ) -> None: """Test diagnostic entities.""" await knx.setup_integration({}) @@ -53,8 +48,7 @@ async def test_diagnostic_entities( knx.xknx.connection_manager.cemi_count_outgoing_error = 2 events = async_capture_events(hass, "state_changed") - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) await hass.async_block_till_done() assert len(events) == 3 # 5 polled sensors - 2 disabled @@ -70,19 +64,25 @@ async def test_diagnostic_entities( ): assert hass.states.get(entity_id).state == test_state - knx.xknx.connection_manager.connection_state_changed( + await knx.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.DISCONNECTED ) await hass.async_block_till_done() + await hass.async_block_till_done() + await hass.async_block_till_done() + await hass.async_block_till_done() assert len(events) == 4 # 3 not always_available + 3 force_update - 2 disabled events.clear() knx.xknx.current_address = IndividualAddress("1.1.1") - knx.xknx.connection_manager.connection_state_changed( + await knx.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.CONNECTED, connection_type=XknxConnectionType.TUNNEL_UDP, ) await hass.async_block_till_done() + await hass.async_block_till_done() + await hass.async_block_till_done() + await hass.async_block_till_done() assert len(events) == 6 # all diagnostic sensors - counters are reset on connect for entity_id, test_state in ( @@ -109,29 +109,5 @@ async def test_removed_entity( "sensor.knx_interface_connection_established", disabled_by=er.RegistryEntryDisabler.USER, ) + await hass.async_block_till_done() unregister_mock.assert_called_once() - - -async def test_remove_interface_device( - hass: HomeAssistant, - knx: KNXTestKit, - device_registry: dr.DeviceRegistry, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test device removal.""" - assert await async_setup_component(hass, "config", {}) - await knx.setup_integration({}) - client = await hass_ws_client(hass) - knx_devices = device_registry.devices.get_devices_for_config_entry_id( - knx.mock_config_entry.entry_id - ) - assert len(knx_devices) == 1 - assert knx_devices[0].name == "KNX Interface" - device_id = knx_devices[0].id - # interface device can't be removed - res = await client.remove_device(device_id, knx.mock_config_entry.entry_id) - assert not res["success"] - assert ( - res["error"]["message"] - == "Failed to remove device entry, rejected by integration" - ) diff --git a/tests/components/knx/test_knx_selectors.py b/tests/components/knx/test_knx_selectors.py deleted file mode 100644 index 7b2f09af84b..00000000000 --- a/tests/components/knx/test_knx_selectors.py +++ /dev/null @@ -1,128 +0,0 @@ -"""Test KNX selectors.""" - -from typing import Any - -import pytest -import voluptuous as vol - -from homeassistant.components.knx.const import ColorTempModes -from homeassistant.components.knx.storage.knx_selector import GASelector - -INVALID = "invalid" - - -@pytest.mark.parametrize( - ("selector_config", "data", "expected"), - [ - ( - {}, - {}, - {"write": None, "state": None, "passive": []}, - ), - ( - {}, - {"write": "1/2/3"}, - {"write": "1/2/3", "state": None, "passive": []}, - ), - ( - {}, - {"state": "1/2/3"}, - {"write": None, "state": "1/2/3", "passive": []}, - ), - ( - {}, - {"passive": ["1/2/3"]}, - {"write": None, "state": None, "passive": ["1/2/3"]}, - ), - ( - {}, - {"write": "1", "state": 2, "passive": ["1/2/3"]}, - {"write": "1", "state": 2, "passive": ["1/2/3"]}, - ), - ( - {"write": False}, - {"write": "1/2/3"}, - {"state": None, "passive": []}, - ), - ( - {"write": False}, - {"state": "1/2/3"}, - {"state": "1/2/3", "passive": []}, - ), - ( - {"write": False}, - {"passive": ["1/2/3"]}, - {"state": None, "passive": ["1/2/3"]}, - ), - ( - {"passive": False}, - {"passive": ["1/2/3"]}, - {"write": None, "state": None}, - ), - ( - {"passive": False}, - {"write": "1/2/3"}, - {"write": "1/2/3", "state": None}, - ), - # required keys - ( - {"write_required": True}, - {}, - INVALID, - ), - ( - {"state_required": True}, - {}, - INVALID, - ), - ( - {"write_required": True}, - {"write": "1/2/3"}, - {"write": "1/2/3", "state": None, "passive": []}, - ), - ( - {"state_required": True}, - {"state": "1/2/3"}, - {"write": None, "state": "1/2/3", "passive": []}, - ), - ( - {"write_required": True}, - {"state": "1/2/3"}, - INVALID, - ), - ( - {"state_required": True}, - {"write": "1/2/3"}, - INVALID, - ), - # dpt key - ( - {"dpt": ColorTempModes}, - {"write": "1/2/3"}, - INVALID, - ), - ( - {"dpt": ColorTempModes}, - {"write": "1/2/3", "dpt": "7.600"}, - {"write": "1/2/3", "state": None, "passive": [], "dpt": "7.600"}, - ), - ( - {"dpt": ColorTempModes}, - {"write": "1/2/3", "state": None, "passive": [], "dpt": "invalid"}, - INVALID, - ), - ], -) -def test_ga_selector( - selector_config: dict[str, Any], - data: dict[str, Any], - expected: str | dict[str, Any], -) -> None: - """Test GASelector.""" - selector = GASelector(**selector_config) - if expected == INVALID: - with pytest.raises(vol.Invalid): - selector(data) - else: - result = selector(data) - assert result == expected diff --git a/tests/components/knx/test_light.py b/tests/components/knx/test_light.py index 88f76a163d5..a14d1bb32ae 100644 --- a/tests/components/knx/test_light.py +++ b/tests/components/knx/test_light.py @@ -4,12 +4,10 @@ from __future__ import annotations from datetime import timedelta -from freezegun.api import FrozenDateTimeFactory -import pytest from xknx.core import XknxConnectionState from xknx.devices.light import Light as XknxLight -from homeassistant.components.knx.const import CONF_STATE_ADDRESS, KNX_ADDRESS, Platform +from homeassistant.components.knx.const import CONF_STATE_ADDRESS, KNX_ADDRESS from homeassistant.components.knx.schema import LightSchema from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -19,11 +17,10 @@ from homeassistant.components.light import ( ATTR_RGBW_COLOR, ColorMode, ) -from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON, EntityCategory +from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.util import dt as dt_util -from . import KnxEntityGenerator from .conftest import KNXTestKit from tests.common import async_fire_time_changed @@ -94,7 +91,9 @@ async def test_light_brightness(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # StateUpdater initialize state await knx.assert_read(test_brightness_state) - knx.xknx.connection_manager.connection_state_changed(XknxConnectionState.CONNECTED) + await knx.xknx.connection_manager.connection_state_changed( + XknxConnectionState.CONNECTED + ) # turn on light via brightness await hass.services.async_call( "light", @@ -645,9 +644,7 @@ async def test_light_rgb_individual(hass: HomeAssistant, knx: KNXTestKit) -> Non await knx.assert_write(test_blue, (45,)) -async def test_light_rgbw_individual( - hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory -) -> None: +async def test_light_rgbw_individual(hass: HomeAssistant, knx: KNXTestKit) -> None: """Test KNX light with rgbw color in individual GAs.""" test_red = "1/1/3" test_red_state = "1/1/4" @@ -767,8 +764,9 @@ async def test_light_rgbw_individual( await knx.receive_write(test_green, (0,)) # # individual color debounce takes 0.2 seconds if not all 4 addresses received knx.assert_state("light.test", STATE_ON) - freezer.tick(timedelta(seconds=XknxLight.DEBOUNCE_TIMEOUT)) - async_fire_time_changed(hass) + async_fire_time_changed( + hass, dt_util.utcnow() + timedelta(seconds=XknxLight.DEBOUNCE_TIMEOUT) + ) await knx.xknx.task_registry.block_till_done() knx.assert_state("light.test", STATE_OFF) # turn ON from KNX @@ -1153,89 +1151,3 @@ async def test_light_rgbw_brightness(hass: HomeAssistant, knx: KNXTestKit) -> No knx.assert_state( "light.test", STATE_ON, brightness=50, rgbw_color=(100, 200, 55, 12) ) - - -async def test_light_ui_create( - hass: HomeAssistant, - knx: KNXTestKit, - create_ui_entity: KnxEntityGenerator, -) -> None: - """Test creating a light.""" - await knx.setup_integration({}) - await create_ui_entity( - platform=Platform.LIGHT, - entity_data={"name": "test"}, - knx_data={ - "ga_switch": {"write": "1/1/1", "state": "2/2/2"}, - "_light_color_mode_schema": "default", - "sync_state": True, - }, - ) - # created entity sends read-request to KNX bus - await knx.assert_read("2/2/2") - await knx.receive_response("2/2/2", True) - state = hass.states.get("light.test") - assert state.state is STATE_ON - - -@pytest.mark.parametrize( - ("color_temp_mode", "raw_ct"), - [ - ("7.600", (0x10, 0x68)), - ("9", (0x46, 0x69)), - ("5.001", (0x74,)), - ], -) -async def test_light_ui_color_temp( - hass: HomeAssistant, - knx: KNXTestKit, - create_ui_entity: KnxEntityGenerator, - color_temp_mode: str, - raw_ct: tuple[int, ...], -) -> None: - """Test creating a color-temp light.""" - await knx.setup_integration({}) - await create_ui_entity( - platform=Platform.LIGHT, - entity_data={"name": "test"}, - knx_data={ - "ga_switch": {"write": "1/1/1", "state": "2/2/2"}, - "ga_color_temp": { - "write": "3/3/3", - "dpt": color_temp_mode, - }, - "_light_color_mode_schema": "default", - "sync_state": True, - }, - ) - await knx.assert_read("2/2/2", True) - await hass.services.async_call( - "light", - "turn_on", - {"entity_id": "light.test", ATTR_COLOR_TEMP_KELVIN: 4200}, - blocking=True, - ) - await knx.assert_write("3/3/3", raw_ct) - state = hass.states.get("light.test") - assert state.state is STATE_ON - assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == pytest.approx(4200, abs=1) - - -async def test_light_ui_load( - hass: HomeAssistant, - knx: KNXTestKit, - load_config_store: None, - entity_registry: er.EntityRegistry, -) -> None: - """Test loading a light from storage.""" - await knx.setup_integration({}) - - await knx.assert_read("1/0/21", response=True, ignore_order=True) - # unrelated switch in config store - await knx.assert_read("1/0/45", response=True, ignore_order=True) - - state = hass.states.get("light.test") - assert state.state is STATE_ON - - entity = entity_registry.async_get("light.test") - assert entity.entity_category is EntityCategory.CONFIG diff --git a/tests/components/knx/test_notify.py b/tests/components/knx/test_notify.py index c7e33dd5fe4..94f2d579fc8 100644 --- a/tests/components/knx/test_notify.py +++ b/tests/components/knx/test_notify.py @@ -9,6 +9,82 @@ from homeassistant.core import HomeAssistant from .conftest import KNXTestKit +async def test_legacy_notify_service_simple( + hass: HomeAssistant, knx: KNXTestKit +) -> None: + """Test KNX notify can send to one device.""" + await knx.setup_integration( + { + NotifySchema.PLATFORM: { + CONF_NAME: "test", + KNX_ADDRESS: "1/0/0", + } + } + ) + await hass.async_block_till_done() + + await hass.services.async_call( + "notify", "notify", {"target": "test", "message": "I love KNX"}, blocking=True + ) + + await knx.assert_write( + "1/0/0", + (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 0, 0, 0, 0), + ) + + await hass.services.async_call( + "notify", + "notify", + { + "target": "test", + "message": "I love KNX, but this text is too long for KNX, poor KNX", + }, + blocking=True, + ) + + await knx.assert_write( + "1/0/0", + (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 44, 32, 98, 117), + ) + + +async def test_legacy_notify_service_multiple_sends_to_all_with_different_encodings( + hass: HomeAssistant, knx: KNXTestKit +) -> None: + """Test KNX notify `type` configuration.""" + await knx.setup_integration( + { + NotifySchema.PLATFORM: [ + { + CONF_NAME: "ASCII", + KNX_ADDRESS: "1/0/0", + CONF_TYPE: "string", + }, + { + CONF_NAME: "Latin-1", + KNX_ADDRESS: "1/0/1", + CONF_TYPE: "latin_1", + }, + ] + } + ) + await hass.async_block_till_done() + + await hass.services.async_call( + "notify", "notify", {"message": "Gänsefüßchen"}, blocking=True + ) + + await knx.assert_write( + "1/0/0", + # "G?nsef??chen" + (71, 63, 110, 115, 101, 102, 63, 63, 99, 104, 101, 110, 0, 0), + ) + await knx.assert_write( + "1/0/1", + (71, 228, 110, 115, 101, 102, 252, 223, 99, 104, 101, 110, 0, 0), + ) + + async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: """Test KNX notify can send to one device.""" await knx.setup_integration( @@ -19,6 +95,7 @@ async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: } } ) + await hass.services.async_call( notify.DOMAIN, notify.SERVICE_SEND_MESSAGE, diff --git a/tests/components/knx/test_repairs.py b/tests/components/knx/test_repairs.py new file mode 100644 index 00000000000..690d6e450cb --- /dev/null +++ b/tests/components/knx/test_repairs.py @@ -0,0 +1,84 @@ +"""Test repairs for KNX integration.""" + +from http import HTTPStatus + +from homeassistant.components.knx.const import DOMAIN, KNX_ADDRESS +from homeassistant.components.knx.schema import NotifySchema +from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.const import CONF_NAME +from homeassistant.core import HomeAssistant +import homeassistant.helpers.issue_registry as ir + +from .conftest import KNXTestKit + +from tests.typing import ClientSessionGenerator + + +async def test_knx_notify_service_issue( + hass: HomeAssistant, + knx: KNXTestKit, + hass_client: ClientSessionGenerator, + issue_registry: ir.IssueRegistry, +) -> None: + """Test the legacy notify service still works before migration and repair flow is triggered.""" + await knx.setup_integration( + { + NotifySchema.PLATFORM: { + CONF_NAME: "test", + KNX_ADDRESS: "1/0/0", + } + } + ) + http_client = await hass_client() + + # Assert no issue is present + assert len(issue_registry.issues) == 0 + + # Simulate legacy service being used + assert hass.services.has_service(NOTIFY_DOMAIN, NOTIFY_DOMAIN) + await hass.services.async_call( + NOTIFY_DOMAIN, + NOTIFY_DOMAIN, + service_data={"message": "It is too cold!", "target": "test"}, + blocking=True, + ) + await knx.assert_write( + "1/0/0", + (73, 116, 32, 105, 115, 32, 116, 111, 111, 32, 99, 111, 108, 100), + ) + + # Assert the issue is present + assert len(issue_registry.issues) == 1 + assert issue_registry.async_get_issue( + domain="notify", + issue_id=f"migrate_notify_{DOMAIN}_notify", + ) + + # Test confirm step in repair flow + resp = await http_client.post( + RepairsFlowIndexView.url, + json={"handler": "notify", "issue_id": f"migrate_notify_{DOMAIN}_notify"}, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "confirm" + + resp = await http_client.post( + RepairsFlowResourceView.url.format(flow_id=flow_id), + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data["type"] == "create_entry" + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue( + domain="notify", + issue_id=f"migrate_notify_{DOMAIN}_notify", + ) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/knx/test_sensor.py b/tests/components/knx/test_sensor.py index 41ffcfcb5c7..22d9993b58f 100644 --- a/tests/components/knx/test_sensor.py +++ b/tests/components/knx/test_sensor.py @@ -68,21 +68,25 @@ async def test_always_callback(hass: HomeAssistant, knx: KNXTestKit) -> None: # receive initial telegram await knx.receive_write("1/1/1", (0x42,)) await knx.receive_write("2/2/2", (0x42,)) + await hass.async_block_till_done() assert len(events) == 2 # receive second telegram with identical payload # always_callback shall force state_changed event await knx.receive_write("1/1/1", (0x42,)) await knx.receive_write("2/2/2", (0x42,)) + await hass.async_block_till_done() assert len(events) == 3 # receive telegram with different payload await knx.receive_write("1/1/1", (0xFA,)) await knx.receive_write("2/2/2", (0xFA,)) + await hass.async_block_till_done() assert len(events) == 5 # receive telegram with second payload again # always_callback shall force state_changed event await knx.receive_write("1/1/1", (0xFA,)) await knx.receive_write("2/2/2", (0xFA,)) + await hass.async_block_till_done() assert len(events) == 6 diff --git a/tests/components/knx/test_services.py b/tests/components/knx/test_services.py index f70389dbc92..7f748af5ceb 100644 --- a/tests/components/knx/test_services.py +++ b/tests/components/knx/test_services.py @@ -154,6 +154,7 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: # no event registered await knx.receive_write(test_address, True) + await hass.async_block_till_done() assert len(events) == 0 # register event with `type` @@ -164,6 +165,7 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: blocking=True, ) await knx.receive_write(test_address, (0x04, 0xD2)) + await hass.async_block_till_done() assert len(events) == 1 typed_event = events.pop() assert typed_event.data["data"] == (0x04, 0xD2) @@ -177,6 +179,7 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: blocking=True, ) await knx.receive_write(test_address, True) + await hass.async_block_till_done() assert len(events) == 0 # register event without `type` @@ -185,6 +188,7 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: ) await knx.receive_write(test_address, True) await knx.receive_write(test_address, False) + await hass.async_block_till_done() assert len(events) == 2 untyped_event_2 = events.pop() assert untyped_event_2.data["data"] is False diff --git a/tests/components/knx/test_switch.py b/tests/components/knx/test_switch.py index bc0a6b27675..8dce4cf9c27 100644 --- a/tests/components/knx/test_switch.py +++ b/tests/components/knx/test_switch.py @@ -6,10 +6,9 @@ from homeassistant.components.knx.const import ( KNX_ADDRESS, ) from homeassistant.components.knx.schema import SwitchSchema -from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON, Platform +from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant, State -from . import KnxEntityGenerator from .conftest import KNXTestKit from tests.common import mock_restore_cache @@ -147,27 +146,3 @@ async def test_switch_restore_and_respond(hass: HomeAssistant, knx) -> None: # respond to new state await knx.receive_read(_ADDRESS) await knx.assert_response(_ADDRESS, False) - - -async def test_switch_ui_create( - hass: HomeAssistant, - knx: KNXTestKit, - create_ui_entity: KnxEntityGenerator, -) -> None: - """Test creating a switch.""" - await knx.setup_integration({}) - await create_ui_entity( - platform=Platform.SWITCH, - entity_data={"name": "test"}, - knx_data={ - "ga_switch": {"write": "1/1/1", "state": "2/2/2"}, - "respond_to_read": True, - "sync_state": True, - "invert": False, - }, - ) - # created entity sends read-request to KNX bus - await knx.assert_read("2/2/2") - await knx.receive_response("2/2/2", True) - state = hass.states.get("switch.test") - assert state.state is STATE_ON diff --git a/tests/components/knx/test_telegrams.py b/tests/components/knx/test_telegrams.py index 883e8ccbb2d..2eda718f5ac 100644 --- a/tests/components/knx/test_telegrams.py +++ b/tests/components/knx/test_telegrams.py @@ -6,10 +6,8 @@ from typing import Any import pytest -from homeassistant.components.knx.const import ( - CONF_KNX_TELEGRAM_LOG_SIZE, - KNX_MODULE_KEY, -) +from homeassistant.components.knx import DOMAIN +from homeassistant.components.knx.const import CONF_KNX_TELEGRAM_LOG_SIZE from homeassistant.components.knx.telegrams import TelegramDict from homeassistant.core import HomeAssistant @@ -41,7 +39,7 @@ MOCK_TELEGRAMS = [ "dpt_name": None, "payload": [1, 2, 3, 4], "source": "0.0.0", - "source_name": "Home Assistant", + "source_name": "", "telegramtype": "GroupValueWrite", "timestamp": MOCK_TIMESTAMP, "unit": None, @@ -78,7 +76,7 @@ async def test_store_telegam_history( ) await knx.assert_write("2/2/2", (1, 2, 3, 4)) - assert len(hass.data[KNX_MODULE_KEY].telegrams.recent_telegrams) == 2 + assert len(hass.data[DOMAIN].telegrams.recent_telegrams) == 2 with pytest.raises(KeyError): hass_storage["knx/telegrams_history.json"] @@ -95,7 +93,7 @@ async def test_load_telegam_history( """Test telegram history restoration.""" hass_storage["knx/telegrams_history.json"] = {"version": 1, "data": MOCK_TELEGRAMS} await knx.setup_integration({}) - loaded_telegrams = hass.data[KNX_MODULE_KEY].telegrams.recent_telegrams + loaded_telegrams = hass.data[DOMAIN].telegrams.recent_telegrams assert assert_telegram_history(loaded_telegrams) # TelegramDict "payload" is a tuple, this shall be restored when loading from JSON assert isinstance(loaded_telegrams[1]["payload"], tuple) @@ -116,4 +114,4 @@ async def test_remove_telegam_history( await knx.setup_integration({}, add_entry_to_hass=False) # Store.async_remove() is mocked by hass_storage - check that data was removed. assert "knx/telegrams_history.json" not in hass_storage - assert not hass.data[KNX_MODULE_KEY].telegrams.recent_telegrams + assert not hass.data[DOMAIN].telegrams.recent_telegrams diff --git a/tests/components/knx/test_time.py b/tests/components/knx/test_time.py index 05f84339742..9dc4c401ed8 100644 --- a/tests/components/knx/test_time.py +++ b/tests/components/knx/test_time.py @@ -2,11 +2,7 @@ from homeassistant.components.knx.const import CONF_RESPOND_TO_READ, KNX_ADDRESS from homeassistant.components.knx.schema import TimeSchema -from homeassistant.components.time import ( - ATTR_TIME, - DOMAIN as TIME_DOMAIN, - SERVICE_SET_VALUE, -) +from homeassistant.components.time import ATTR_TIME, DOMAIN, SERVICE_SET_VALUE from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant, State @@ -28,7 +24,7 @@ async def test_time(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # set value await hass.services.async_call( - TIME_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, {"entity_id": "time.test", ATTR_TIME: "01:02:03"}, blocking=True, diff --git a/tests/components/knx/test_trigger.py b/tests/components/knx/test_trigger.py index 73e8b10840e..d957082de18 100644 --- a/tests/components/knx/test_trigger.py +++ b/tests/components/knx/test_trigger.py @@ -11,10 +11,18 @@ from homeassistant.setup import async_setup_component from .conftest import KNXTestKit +from tests.common import async_mock_service + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + async def test_telegram_trigger( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], knx: KNXTestKit, ) -> None: """Test telegram triggers firing.""" @@ -65,24 +73,24 @@ async def test_telegram_trigger( # "specific" shall ignore destination address await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(service_calls) == 1 - test_call = service_calls.pop() + assert len(calls) == 1 + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["id"] == 0 await knx.receive_write("1/2/4", (0x03, 0x2F)) - assert len(service_calls) == 2 - test_call = service_calls.pop() + assert len(calls) == 2 + test_call = calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = service_calls.pop() + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 # "specific" shall ignore GroupValueRead await knx.receive_read("1/2/4") - assert len(service_calls) == 1 - test_call = service_calls.pop() + assert len(calls) == 1 + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 @@ -97,7 +105,7 @@ async def test_telegram_trigger( ) async def test_telegram_trigger_dpt_option( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], knx: KNXTestKit, payload: tuple[int, ...], type_option: dict[str, bool], @@ -130,16 +138,16 @@ async def test_telegram_trigger_dpt_option( ) await knx.receive_write("0/0/1", payload) - assert len(service_calls) == 1 - test_call = service_calls.pop() + assert len(calls) == 1 + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["trigger"]["value"] == expected_value assert test_call.data["trigger"]["unit"] == expected_unit await knx.receive_read("0/0/1") - assert len(service_calls) == 1 - test_call = service_calls.pop() + assert len(calls) == 1 + test_call = calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["trigger"]["value"] is None assert test_call.data["trigger"]["unit"] is None @@ -184,7 +192,7 @@ async def test_telegram_trigger_dpt_option( ) async def test_telegram_trigger_options( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], knx: KNXTestKit, group_value_options: dict[str, bool], direction_options: dict[str, bool], @@ -217,28 +225,28 @@ async def test_telegram_trigger_options( if group_value_options.get("group_value_write", True) and direction_options.get( "incoming", True ): - assert len(service_calls) == 1 - assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(calls) == 1 + assert calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(service_calls) == 0 + assert len(calls) == 0 await knx.receive_response("0/0/1", 1) if group_value_options["group_value_response"] and direction_options.get( "incoming", True ): - assert len(service_calls) == 1 - assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(calls) == 1 + assert calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(service_calls) == 0 + assert len(calls) == 0 await knx.receive_read("0/0/1") if group_value_options["group_value_read"] and direction_options.get( "incoming", True ): - assert len(service_calls) == 1 - assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(calls) == 1 + assert calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(service_calls) == 0 + assert len(calls) == 0 await hass.services.async_call( "knx", @@ -246,22 +254,20 @@ async def test_telegram_trigger_options( {"address": "0/0/1", "payload": True}, blocking=True, ) - assert len(service_calls) == 1 - await knx.assert_write("0/0/1", True) if ( group_value_options.get("group_value_write", True) and direction_options["outgoing"] ): - assert len(service_calls) == 2 - assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(calls) == 1 + assert calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(service_calls) == 1 + assert len(calls) == 0 async def test_remove_telegram_trigger( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], knx: KNXTestKit, ) -> None: """Test for removed callback when telegram trigger not used.""" @@ -290,8 +296,8 @@ async def test_remove_telegram_trigger( ) await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(service_calls) == 1 - assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(calls) == 1 + assert calls.pop().data["catch_all"] == "telegram - 0/0/1" await hass.services.async_call( automation.DOMAIN, @@ -299,10 +305,8 @@ async def test_remove_telegram_trigger( {ATTR_ENTITY_ID: f"automation.{automation_name}"}, blocking=True, ) - assert len(service_calls) == 1 - await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(service_calls) == 1 + assert len(calls) == 0 async def test_invalid_trigger( @@ -334,6 +338,7 @@ async def test_invalid_trigger( ] }, ) + await hass.async_block_till_done() assert ( "Unnamed automation failed to setup triggers and has been disabled: " "extra keys not allowed @ data['invalid']. Got None" diff --git a/tests/components/knx/test_weather.py b/tests/components/knx/test_weather.py index 5ba38d6cdf8..0adcc309252 100644 --- a/tests/components/knx/test_weather.py +++ b/tests/components/knx/test_weather.py @@ -45,12 +45,12 @@ async def test_weather(hass: HomeAssistant, knx: KNXTestKit) -> None: # brightness await knx.assert_read("1/1/6") - await knx.assert_read("1/1/8") await knx.receive_response("1/1/6", (0x7C, 0x5E)) + await knx.assert_read("1/1/8") await knx.receive_response("1/1/8", (0x7C, 0x5E)) - await knx.assert_read("1/1/5") await knx.assert_read("1/1/7") await knx.receive_response("1/1/7", (0x7C, 0x5E)) + await knx.assert_read("1/1/5") await knx.receive_response("1/1/5", (0x7C, 0x5E)) # wind speed @@ -64,10 +64,10 @@ async def test_weather(hass: HomeAssistant, knx: KNXTestKit) -> None: # alarms await knx.assert_read("1/1/2") await knx.receive_response("1/1/2", False) - await knx.assert_read("1/1/1") await knx.assert_read("1/1/3") - await knx.receive_response("1/1/1", False) await knx.receive_response("1/1/3", False) + await knx.assert_read("1/1/1") + await knx.receive_response("1/1/1", False) # day night await knx.assert_read("1/1/12") diff --git a/tests/components/knx/test_websocket.py b/tests/components/knx/test_websocket.py index b3e4b7aaa38..ca60905b0ba 100644 --- a/tests/components/knx/test_websocket.py +++ b/tests/components/knx/test_websocket.py @@ -3,11 +3,7 @@ from typing import Any from unittest.mock import patch -import pytest - -from homeassistant.components.knx.const import KNX_ADDRESS, KNX_MODULE_KEY -from homeassistant.components.knx.project import STORAGE_KEY as KNX_PROJECT_STORAGE_KEY -from homeassistant.components.knx.schema import SwitchSchema +from homeassistant.components.knx import DOMAIN, KNX_ADDRESS, SwitchSchema from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant @@ -67,7 +63,7 @@ async def test_knx_project_file_process( await knx.setup_integration({}) client = await hass_ws_client(hass) - assert not hass.data[KNX_MODULE_KEY].project.loaded + assert not hass.data[DOMAIN].project.loaded await client.send_json( { @@ -90,8 +86,7 @@ async def test_knx_project_file_process( parse_mock.assert_called_once_with() assert res["success"], res - assert hass.data[KNX_MODULE_KEY].project.loaded - assert hass_storage[KNX_PROJECT_STORAGE_KEY]["data"] == _parse_result + assert hass.data[DOMAIN].project.loaded async def test_knx_project_file_process_error( @@ -102,7 +97,7 @@ async def test_knx_project_file_process_error( """Test knx/project_file_process exception handling.""" await knx.setup_integration({}) client = await hass_ws_client(hass) - assert not hass.data[KNX_MODULE_KEY].project.loaded + assert not hass.data[DOMAIN].project.loaded await client.send_json( { @@ -123,7 +118,7 @@ async def test_knx_project_file_process_error( parse_mock.assert_called_once_with() assert res["error"], res - assert not hass.data[KNX_MODULE_KEY].project.loaded + assert not hass.data[DOMAIN].project.loaded async def test_knx_project_file_remove( @@ -131,20 +126,19 @@ async def test_knx_project_file_remove( knx: KNXTestKit, hass_ws_client: WebSocketGenerator, load_knxproj: None, - hass_storage: dict[str, Any], ) -> None: """Test knx/project_file_remove command.""" await knx.setup_integration({}) - assert hass_storage[KNX_PROJECT_STORAGE_KEY] client = await hass_ws_client(hass) - assert hass.data[KNX_MODULE_KEY].project.loaded + assert hass.data[DOMAIN].project.loaded await client.send_json({"id": 6, "type": "knx/project_file_remove"}) - res = await client.receive_json() + with patch("homeassistant.helpers.storage.Store.async_remove") as remove_mock: + res = await client.receive_json() + remove_mock.assert_called_once_with() assert res["success"], res - assert not hass.data[KNX_MODULE_KEY].project.loaded - assert not hass_storage.get(KNX_PROJECT_STORAGE_KEY) + assert not hass.data[DOMAIN].project.loaded async def test_knx_get_project( @@ -156,7 +150,7 @@ async def test_knx_get_project( """Test retrieval of kxnproject from store.""" await knx.setup_integration({}) client = await hass_ws_client(hass) - assert hass.data[KNX_MODULE_KEY].project.loaded + assert hass.data[DOMAIN].project.loaded await client.send_json({"id": 3, "type": "knx/get_knx_project"}) res = await client.receive_json() @@ -349,7 +343,7 @@ async def test_knx_subscribe_telegrams_command_project( assert res["event"]["destination"] == "0/1/1" assert res["event"]["destination_name"] == "percent" assert res["event"]["payload"] == 1 - assert res["event"]["value"] is None + assert res["event"]["value"] == "Error decoding value" assert res["event"]["telegramtype"] == "GroupValueWrite" assert res["event"]["source"] == "1.1.6" assert ( @@ -358,28 +352,3 @@ async def test_knx_subscribe_telegrams_command_project( ) assert res["event"]["direction"] == "Incoming" assert res["event"]["timestamp"] is not None - - -@pytest.mark.parametrize( - "endpoint", - [ - "knx/info", # sync ws-command - "knx/get_knx_project", # async ws-command - ], -) -async def test_websocket_when_config_entry_unloaded( - hass: HomeAssistant, - knx: KNXTestKit, - hass_ws_client: WebSocketGenerator, - endpoint: str, -) -> None: - """Test websocket connection when config entry is unloaded.""" - await knx.setup_integration({}) - await hass.config_entries.async_unload(knx.mock_config_entry.entry_id) - client = await hass_ws_client(hass) - - await client.send_json_auto_id({"type": endpoint}) - res = await client.receive_json() - assert not res["success"] - assert res["error"]["code"] == "home_assistant_error" - assert res["error"]["message"] == "KNX integration not loaded." diff --git a/tests/components/kodi/test_config_flow.py b/tests/components/kodi/test_config_flow.py index ad99067ac7a..d570654be93 100644 --- a/tests/components/kodi/test_config_flow.py +++ b/tests/components/kodi/test_config_flow.py @@ -30,7 +30,7 @@ from tests.common import MockConfigEntry @pytest.fixture -async def user_flow(hass: HomeAssistant) -> str: +async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -41,7 +41,7 @@ async def user_flow(hass: HomeAssistant) -> str: return result["flow_id"] -async def test_user_flow(hass: HomeAssistant, user_flow: str) -> None: +async def test_user_flow(hass: HomeAssistant, user_flow) -> None: """Test a successful user initiated flow.""" with ( patch( @@ -74,7 +74,7 @@ async def test_user_flow(hass: HomeAssistant, user_flow: str) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_valid_auth(hass: HomeAssistant, user_flow: str) -> None: +async def test_form_valid_auth(hass: HomeAssistant, user_flow) -> None: """Test we handle valid auth.""" with ( patch( @@ -124,7 +124,7 @@ async def test_form_valid_auth(hass: HomeAssistant, user_flow: str) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_valid_ws_port(hass: HomeAssistant, user_flow: str) -> None: +async def test_form_valid_ws_port(hass: HomeAssistant, user_flow) -> None: """Test we handle valid websocket port.""" with ( patch( @@ -180,7 +180,7 @@ async def test_form_valid_ws_port(hass: HomeAssistant, user_flow: str) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_empty_ws_port(hass: HomeAssistant, user_flow: str) -> None: +async def test_form_empty_ws_port(hass: HomeAssistant, user_flow) -> None: """Test we handle an empty websocket port input.""" with ( patch( @@ -226,7 +226,7 @@ async def test_form_empty_ws_port(hass: HomeAssistant, user_flow: str) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_invalid_auth(hass: HomeAssistant, user_flow: str) -> None: +async def test_form_invalid_auth(hass: HomeAssistant, user_flow) -> None: """Test we handle invalid auth.""" with ( patch( @@ -322,7 +322,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, user_flow: str) -> None: assert result["errors"] == {} -async def test_form_cannot_connect_http(hass: HomeAssistant, user_flow: str) -> None: +async def test_form_cannot_connect_http(hass: HomeAssistant, user_flow) -> None: """Test we handle cannot connect over HTTP error.""" with ( patch( @@ -341,7 +341,7 @@ async def test_form_cannot_connect_http(hass: HomeAssistant, user_flow: str) -> assert result["errors"] == {"base": "cannot_connect"} -async def test_form_exception_http(hass: HomeAssistant, user_flow: str) -> None: +async def test_form_exception_http(hass: HomeAssistant, user_flow) -> None: """Test we handle generic exception over HTTP.""" with ( patch( @@ -360,7 +360,7 @@ async def test_form_exception_http(hass: HomeAssistant, user_flow: str) -> None: assert result["errors"] == {"base": "unknown"} -async def test_form_cannot_connect_ws(hass: HomeAssistant, user_flow: str) -> None: +async def test_form_cannot_connect_ws(hass: HomeAssistant, user_flow) -> None: """Test we handle cannot connect over WebSocket error.""" with ( patch( @@ -423,7 +423,7 @@ async def test_form_cannot_connect_ws(hass: HomeAssistant, user_flow: str) -> No assert result["errors"] == {"base": "cannot_connect"} -async def test_form_exception_ws(hass: HomeAssistant, user_flow: str) -> None: +async def test_form_exception_ws(hass: HomeAssistant, user_flow) -> None: """Test we handle generic exception over WebSocket.""" with ( patch( @@ -560,7 +560,7 @@ async def test_discovery_cannot_connect_ws(hass: HomeAssistant) -> None: assert result["errors"] == {} -async def test_discovery_exception_http(hass: HomeAssistant) -> None: +async def test_discovery_exception_http(hass: HomeAssistant, user_flow) -> None: """Test we handle generic exception during discovery validation.""" with ( patch( diff --git a/tests/components/kodi/test_device_trigger.py b/tests/components/kodi/test_device_trigger.py index a54641a4234..d3de349018e 100644 --- a/tests/components/kodi/test_device_trigger.py +++ b/tests/components/kodi/test_device_trigger.py @@ -12,7 +12,11 @@ from homeassistant.setup import async_setup_component from . import init_integration -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -21,7 +25,13 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @pytest.fixture -async def kodi_media_player(hass: HomeAssistant) -> str: +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + +@pytest.fixture +async def kodi_media_player(hass): """Get a kodi media player.""" await init_integration(hass) return f"{MP_DOMAIN}.name" @@ -67,8 +77,8 @@ async def test_get_triggers( async def test_if_fires_on_state_change( hass: HomeAssistant, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], - kodi_media_player: str, + calls: list[ServiceCall], + kodi_media_player, ) -> None: """Test for turn_on and turn_off triggers firing.""" entry = entity_registry.async_get(kodi_media_player) @@ -125,8 +135,8 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == f"turn_on - {kodi_media_player} - 0" + assert len(calls) == 1 + assert calls[0].data["some"] == f"turn_on - {kodi_media_player} - 0" await hass.services.async_call( MP_DOMAIN, @@ -136,15 +146,15 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() - assert len(service_calls) == 4 - assert service_calls[3].data["some"] == f"turn_off - {kodi_media_player} - 0" + assert len(calls) == 2 + assert calls[1].data["some"] == f"turn_off - {kodi_media_player} - 0" async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], - kodi_media_player: str, + calls: list[ServiceCall], + kodi_media_player, ) -> None: """Test for turn_on and turn_off triggers firing.""" entry = entity_registry.async_get(kodi_media_player) @@ -184,5 +194,5 @@ async def test_if_fires_on_state_change_legacy( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == f"turn_on - {kodi_media_player} - 0" + assert len(calls) == 1 + assert calls[0].data["some"] == f"turn_on - {kodi_media_player} - 0" diff --git a/tests/components/kodi/util.py b/tests/components/kodi/util.py index e56ba03b7e5..6217a77903b 100644 --- a/tests/components/kodi/util.py +++ b/tests/components/kodi/util.py @@ -63,7 +63,7 @@ def get_kodi_connection( class MockConnection: """A mock kodi connection.""" - def __init__(self, connected=True) -> None: + def __init__(self, connected=True): """Mock the Kodi connection.""" self._connected = connected @@ -92,7 +92,7 @@ class MockConnection: class MockWSConnection: """A mock kodi websocket connection.""" - def __init__(self, connected=True) -> None: + def __init__(self, connected=True): """Mock the websocket connection.""" self._connected = connected diff --git a/tests/components/konnected/test_init.py b/tests/components/konnected/test_init.py index 6fc6b10ff20..1a2da88624d 100644 --- a/tests/components/konnected/test_init.py +++ b/tests/components/konnected/test_init.py @@ -7,8 +7,8 @@ import pytest from homeassistant.components import konnected from homeassistant.components.konnected import config_flow +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry diff --git a/tests/components/konnected/test_panel.py b/tests/components/konnected/test_panel.py index 48ebea64161..64cc414cdd3 100644 --- a/tests/components/konnected/test_panel.py +++ b/tests/components/konnected/test_panel.py @@ -700,4 +700,4 @@ async def test_connect_retry(hass: HomeAssistant, mock_panel) -> None: async_fire_time_changed(hass, utcnow() + timedelta(seconds=21)) await hass.async_block_till_done() await async_update_entity(hass, "switch.konnected_445566_actuator_6") - assert hass.states.get("switch.konnected_445566_actuator_6").state == "unknown" + assert hass.states.get("switch.konnected_445566_actuator_6").state == "off" diff --git a/tests/components/kostal_plenticore/conftest.py b/tests/components/kostal_plenticore/conftest.py index acce8ebed7a..af958f19f3a 100644 --- a/tests/components/kostal_plenticore/conftest.py +++ b/tests/components/kostal_plenticore/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pykoplenti import MeData, VersionData import pytest +from typing_extensions import Generator from homeassistant.components.kostal_plenticore.coordinator import Plenticore from homeassistant.core import HomeAssistant diff --git a/tests/components/kostal_plenticore/test_config_flow.py b/tests/components/kostal_plenticore/test_config_flow.py index bd9b9ad278d..c982e2af818 100644 --- a/tests/components/kostal_plenticore/test_config_flow.py +++ b/tests/components/kostal_plenticore/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Kostal Plenticore Solar Inverter config flow.""" -from collections.abc import Generator from unittest.mock import ANY, AsyncMock, MagicMock, patch from pykoplenti import ApiClient, AuthenticationException, SettingsData import pytest +from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.kostal_plenticore.const import DOMAIN diff --git a/tests/components/kostal_plenticore/test_diagnostics.py b/tests/components/kostal_plenticore/test_diagnostics.py index 08f06684d9a..1c3a9efe2e5 100644 --- a/tests/components/kostal_plenticore/test_diagnostics.py +++ b/tests/components/kostal_plenticore/test_diagnostics.py @@ -6,7 +6,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.kostal_plenticore.coordinator import Plenticore from homeassistant.core import HomeAssistant -from tests.common import ANY, MockConfigEntry +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -54,9 +54,6 @@ async def test_entry_diagnostics( "source": "user", "unique_id": None, "disabled_by": None, - "created_at": ANY, - "modified_at": ANY, - "discovery_keys": {}, }, "client": { "version": "api_version='0.2.0' hostname='scb' name='PUCK RESTful API' sw_version='01.16.05025'", diff --git a/tests/components/kostal_plenticore/test_helper.py b/tests/components/kostal_plenticore/test_helper.py index acd33f82a27..a18cf32c5a1 100644 --- a/tests/components/kostal_plenticore/test_helper.py +++ b/tests/components/kostal_plenticore/test_helper.py @@ -1,10 +1,10 @@ """Test Kostal Plenticore helper.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pykoplenti import ApiClient, ExtendedApiClient, SettingsData import pytest +from typing_extensions import Generator from homeassistant.components.kostal_plenticore.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/kostal_plenticore/test_number.py b/tests/components/kostal_plenticore/test_number.py index 586129c486d..9d94c6f9951 100644 --- a/tests/components/kostal_plenticore/test_number.py +++ b/tests/components/kostal_plenticore/test_number.py @@ -1,11 +1,11 @@ """Test Kostal Plenticore number.""" -from collections.abc import Generator from datetime import timedelta from unittest.mock import patch from pykoplenti import ApiClient, SettingsData import pytest +from typing_extensions import Generator from homeassistant.components.number import ( ATTR_MAX, diff --git a/tests/components/kulersky/test_light.py b/tests/components/kulersky/test_light.py index a2245e721c5..90f40d327e4 100644 --- a/tests/components/kulersky/test_light.py +++ b/tests/components/kulersky/test_light.py @@ -1,6 +1,5 @@ """Test the Kuler Sky lights.""" -from collections.abc import AsyncGenerator from unittest.mock import MagicMock, patch import pykulersky @@ -38,15 +37,13 @@ from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture -async def mock_entry() -> MockConfigEntry: +async def mock_entry(hass): """Create a mock light entity.""" return MockConfigEntry(domain=DOMAIN) @pytest.fixture -async def mock_light( - hass: HomeAssistant, mock_entry: MockConfigEntry -) -> AsyncGenerator[MagicMock]: +async def mock_light(hass, mock_entry): """Create a mock light entity.""" light = MagicMock(spec=pykulersky.Light) @@ -67,7 +64,7 @@ async def mock_light( yield light -async def test_init(hass: HomeAssistant, mock_light: MagicMock) -> None: +async def test_init(hass: HomeAssistant, mock_light) -> None: """Test platform setup.""" state = hass.states.get("light.bedroom") assert state.state == STATE_OFF @@ -90,9 +87,7 @@ async def test_init(hass: HomeAssistant, mock_light: MagicMock) -> None: assert mock_light.disconnect.called -async def test_remove_entry( - hass: HomeAssistant, mock_light: MagicMock, mock_entry: MockConfigEntry -) -> None: +async def test_remove_entry(hass: HomeAssistant, mock_light, mock_entry) -> None: """Test platform setup.""" assert hass.data[DOMAIN][DATA_ADDRESSES] == {"AA:BB:CC:11:22:33"} assert DATA_DISCOVERY_SUBSCRIPTION in hass.data[DOMAIN] @@ -104,7 +99,7 @@ async def test_remove_entry( async def test_remove_entry_exceptions_caught( - hass: HomeAssistant, mock_light: MagicMock, mock_entry: MockConfigEntry + hass: HomeAssistant, mock_light, mock_entry ) -> None: """Assert that disconnect exceptions are caught.""" mock_light.disconnect.side_effect = pykulersky.PykulerskyException("Mock error") @@ -113,7 +108,7 @@ async def test_remove_entry_exceptions_caught( assert mock_light.disconnect.called -async def test_update_exception(hass: HomeAssistant, mock_light: MagicMock) -> None: +async def test_update_exception(hass: HomeAssistant, mock_light) -> None: """Test platform setup.""" mock_light.get_color.side_effect = pykulersky.PykulerskyException @@ -123,7 +118,7 @@ async def test_update_exception(hass: HomeAssistant, mock_light: MagicMock) -> N assert state.state == STATE_UNAVAILABLE -async def test_light_turn_on(hass: HomeAssistant, mock_light: MagicMock) -> None: +async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: """Test KulerSkyLight turn_on.""" mock_light.get_color.return_value = (255, 255, 255, 255) await hass.services.async_call( @@ -180,7 +175,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light: MagicMock) -> None mock_light.set_color.assert_called_with(50, 41, 0, 50) -async def test_light_turn_off(hass: HomeAssistant, mock_light: MagicMock) -> None: +async def test_light_turn_off(hass: HomeAssistant, mock_light) -> None: """Test KulerSkyLight turn_on.""" mock_light.get_color.return_value = (0, 0, 0, 0) await hass.services.async_call( @@ -193,7 +188,7 @@ async def test_light_turn_off(hass: HomeAssistant, mock_light: MagicMock) -> Non mock_light.set_color.assert_called_with(0, 0, 0, 0) -async def test_light_update(hass: HomeAssistant, mock_light: MagicMock) -> None: +async def test_light_update(hass: HomeAssistant, mock_light) -> None: """Test KulerSkyLight update.""" utcnow = dt_util.utcnow() diff --git a/tests/components/lacrosse_view/conftest.py b/tests/components/lacrosse_view/conftest.py index 4f1bfdc5748..a6294c64210 100644 --- a/tests/components/lacrosse_view/conftest.py +++ b/tests/components/lacrosse_view/conftest.py @@ -1,9 +1,9 @@ """Define fixtures for LaCrosse View tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr index 201bbbc971e..9d880746ff9 100644 --- a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr +++ b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr @@ -15,8 +15,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'lacrosse_view', 'entry_id': 'lacrosse_view_test_entry_id', 'minor_version': 1, diff --git a/tests/components/lacrosse_view/test_config_flow.py b/tests/components/lacrosse_view/test_config_flow.py index 9ca7fb78bdd..5a48b3d15fe 100644 --- a/tests/components/lacrosse_view/test_config_flow.py +++ b/tests/components/lacrosse_view/test_config_flow.py @@ -251,7 +251,16 @@ async def test_reauth(hass: HomeAssistant) -> None: ) mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + "title_placeholders": {"name": mock_config_entry.title}, + "unique_id": mock_config_entry.unique_id, + }, + data=data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/lacrosse_view/test_diagnostics.py b/tests/components/lacrosse_view/test_diagnostics.py index dc48f160113..08cef64a935 100644 --- a/tests/components/lacrosse_view/test_diagnostics.py +++ b/tests/components/lacrosse_view/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.lacrosse_view import DOMAIN from homeassistant.core import HomeAssistant @@ -33,6 +32,7 @@ async def test_entry_diagnostics( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/lamarzocco/__init__.py b/tests/components/lamarzocco/__init__.py index f6ca0fe40df..4d274d10baa 100644 --- a/tests/components/lamarzocco/__init__.py +++ b/tests/components/lamarzocco/__init__.py @@ -1,6 +1,6 @@ """Mock inputs for tests.""" -from pylamarzocco.const import MachineModel +from lmcloud.const import MachineModel from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -19,10 +19,10 @@ PASSWORD_SELECTION = { USER_INPUT = PASSWORD_SELECTION | {CONF_USERNAME: "username"} SERIAL_DICT = { - MachineModel.GS3_AV: "GS012345", - MachineModel.GS3_MP: "GS012345", - MachineModel.LINEA_MICRA: "MR012345", - MachineModel.LINEA_MINI: "LM012345", + MachineModel.GS3_AV: "GS01234", + MachineModel.GS3_MP: "GS01234", + MachineModel.LINEA_MICRA: "MR01234", + MachineModel.LINEA_MINI: "LM01234", } WAKE_UP_SLEEP_ENTRY_IDS = ["Os2OswX", "aXFz5bJ"] diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 210dd9406cc..6741ac0797c 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -1,14 +1,15 @@ """Lamarzocco session fixtures.""" -from collections.abc import Generator +from collections.abc import Callable import json from unittest.mock import MagicMock, patch from bleak.backends.device import BLEDevice -from pylamarzocco.const import FirmwareType, MachineModel, SteamLevel -from pylamarzocco.lm_machine import LaMarzoccoMachine -from pylamarzocco.models import LaMarzoccoDeviceInfo +from lmcloud.const import FirmwareType, MachineModel, SteamLevel +from lmcloud.lm_machine import LaMarzoccoMachine +from lmcloud.models import LaMarzoccoDeviceInfo import pytest +from typing_extensions import Generator from homeassistant.components.lamarzocco.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_NAME, CONF_TOKEN @@ -24,7 +25,7 @@ def mock_config_entry( hass: HomeAssistant, mock_lamarzocco: MagicMock ) -> MockConfigEntry: """Return the default mocked config entry.""" - return MockConfigEntry( + entry = MockConfigEntry( title="My LaMarzocco", domain=DOMAIN, version=2, @@ -37,25 +38,8 @@ def mock_config_entry( }, unique_id=mock_lamarzocco.serial_number, ) - - -@pytest.fixture -def mock_config_entry_no_local_connection( - hass: HomeAssistant, mock_lamarzocco: MagicMock -) -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - title="My LaMarzocco", - domain=DOMAIN, - version=2, - data=USER_INPUT - | { - CONF_MODEL: mock_lamarzocco.model, - CONF_TOKEN: "token", - CONF_NAME: "GS3", - }, - unique_id=mock_lamarzocco.serial_number, - ) + entry.add_to_hass(hass) + return entry @pytest.fixture @@ -75,11 +59,11 @@ def device_fixture() -> MachineModel: @pytest.fixture -def mock_device_info(device_fixture: MachineModel) -> LaMarzoccoDeviceInfo: +def mock_device_info() -> LaMarzoccoDeviceInfo: """Return a mocked La Marzocco device info.""" return LaMarzoccoDeviceInfo( - model=device_fixture, - serial_number=SERIAL_DICT[device_fixture], + model=MachineModel.GS3_AV, + serial_number="GS01234", name="GS3", communication_key="token", ) @@ -145,9 +129,28 @@ def mock_lamarzocco(device_fixture: MachineModel) -> Generator[MagicMock]: lamarzocco.firmware[FirmwareType.GATEWAY].latest_version = "v3.5-rc3" lamarzocco.firmware[FirmwareType.MACHINE].latest_version = "1.55" + async def websocket_connect_mock( + notify_callback: Callable | None, + ) -> None: + """Mock the websocket connect method.""" + return None + + lamarzocco.websocket_connect = websocket_connect_mock + yield lamarzocco +@pytest.fixture +def remove_local_connection( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: + """Remove the local connection.""" + data = mock_config_entry.data.copy() + del data[CONF_HOST] + hass.config_entries.async_update_entry(mock_config_entry, data=data) + return mock_config_entry + + @pytest.fixture(autouse=True) def mock_bluetooth(enable_bluetooth: None) -> None: """Auto mock bluetooth.""" @@ -157,5 +160,5 @@ def mock_bluetooth(enable_bluetooth: None) -> None: def mock_ble_device() -> BLEDevice: """Return a mock BLE device.""" return BLEDevice( - "00:00:00:00:00:00", "GS_GS012345", details={"path": "path"}, rssi=50 + "00:00:00:00:00:00", "GS_GS01234", details={"path": "path"}, rssi=50 ) diff --git a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr index cda285a7106..df47ac002e6 100644 --- a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr @@ -1,19 +1,19 @@ # serializer version: 1 -# name: test_binary_sensors[GS012345_backflush_active-binary_sensor] +# name: test_binary_sensors[GS01234_backflush_active-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'GS012345 Backflush active', + 'friendly_name': 'GS01234 Backflush active', }), 'context': , - 'entity_id': 'binary_sensor.gs012345_backflush_active', + 'entity_id': 'binary_sensor.gs01234_backflush_active', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS012345_backflush_active-entry] +# name: test_binary_sensors[GS01234_backflush_active-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -25,7 +25,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs012345_backflush_active', + 'entity_id': 'binary_sensor.gs01234_backflush_active', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -42,25 +42,25 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'backflush_enabled', - 'unique_id': 'GS012345_backflush_enabled', + 'unique_id': 'GS01234_backflush_enabled', 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[GS012345_brewing_active-binary_sensor] +# name: test_binary_sensors[GS01234_brewing_active-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'GS012345 Brewing active', + 'friendly_name': 'GS01234 Brewing active', }), 'context': , - 'entity_id': 'binary_sensor.gs012345_brewing_active', + 'entity_id': 'binary_sensor.gs01234_brewing_active', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS012345_brewing_active-entry] +# name: test_binary_sensors[GS01234_brewing_active-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -72,7 +72,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs012345_brewing_active', + 'entity_id': 'binary_sensor.gs01234_brewing_active', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -89,25 +89,25 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'brew_active', - 'unique_id': 'GS012345_brew_active', + 'unique_id': 'GS01234_brew_active', 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[GS012345_water_tank_empty-binary_sensor] +# name: test_binary_sensors[GS01234_water_tank_empty-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'GS012345 Water tank empty', + 'friendly_name': 'GS01234 Water tank empty', }), 'context': , - 'entity_id': 'binary_sensor.gs012345_water_tank_empty', + 'entity_id': 'binary_sensor.gs01234_water_tank_empty', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS012345_water_tank_empty-entry] +# name: test_binary_sensors[GS01234_water_tank_empty-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -119,7 +119,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs012345_water_tank_empty', + 'entity_id': 'binary_sensor.gs01234_water_tank_empty', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -136,7 +136,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'water_tank', - 'unique_id': 'GS012345_water_tank', + 'unique_id': 'GS01234_water_tank', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_button.ambr b/tests/components/lamarzocco/snapshots/test_button.ambr index 64d47a11072..023039cc6f7 100644 --- a/tests/components/lamarzocco/snapshots/test_button.ambr +++ b/tests/components/lamarzocco/snapshots/test_button.ambr @@ -2,10 +2,10 @@ # name: test_start_backflush StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Start backflush', + 'friendly_name': 'GS01234 Start backflush', }), 'context': , - 'entity_id': 'button.gs012345_start_backflush', + 'entity_id': 'button.gs01234_start_backflush', 'last_changed': , 'last_reported': , 'last_updated': , @@ -24,7 +24,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': None, - 'entity_id': 'button.gs012345_start_backflush', + 'entity_id': 'button.gs01234_start_backflush', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -41,7 +41,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'start_backflush', - 'unique_id': 'GS012345_start_backflush', + 'unique_id': 'GS01234_start_backflush', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_calendar.ambr b/tests/components/lamarzocco/snapshots/test_calendar.ambr index 729eed5879a..2fd5dab846a 100644 --- a/tests/components/lamarzocco/snapshots/test_calendar.ambr +++ b/tests/components/lamarzocco/snapshots/test_calendar.ambr @@ -1,7 +1,7 @@ # serializer version: 1 # name: test_calendar_edge_cases[start_date0-end_date0] dict({ - 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -15,7 +15,7 @@ # --- # name: test_calendar_edge_cases[start_date1-end_date1] dict({ - 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -29,7 +29,7 @@ # --- # name: test_calendar_edge_cases[start_date2-end_date2] dict({ - 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -43,7 +43,7 @@ # --- # name: test_calendar_edge_cases[start_date3-end_date3] dict({ - 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -57,7 +57,7 @@ # --- # name: test_calendar_edge_cases[start_date4-end_date4] dict({ - 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ ]), }), @@ -65,7 +65,7 @@ # --- # name: test_calendar_edge_cases[start_date5-end_date5] dict({ - 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -83,7 +83,7 @@ }), }) # --- -# name: test_calendar_events[entry.GS012345_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[entry.GS01234_auto_on_off_schedule_axfz5bj] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -95,7 +95,7 @@ 'disabled_by': None, 'domain': 'calendar', 'entity_category': None, - 'entity_id': 'calendar.gs012345_auto_on_off_schedule_axfz5bj', + 'entity_id': 'calendar.gs01234_auto_on_off_schedule_axfz5bj', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -112,11 +112,11 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off_schedule', - 'unique_id': 'GS012345_auto_on_off_schedule_aXFz5bJ', + 'unique_id': 'GS01234_auto_on_off_schedule_aXFz5bJ', 'unit_of_measurement': None, }) # --- -# name: test_calendar_events[entry.GS012345_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[entry.GS01234_auto_on_off_schedule_os2oswx] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -128,7 +128,7 @@ 'disabled_by': None, 'domain': 'calendar', 'entity_category': None, - 'entity_id': 'calendar.gs012345_auto_on_off_schedule_os2oswx', + 'entity_id': 'calendar.gs01234_auto_on_off_schedule_os2oswx', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -145,13 +145,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off_schedule', - 'unique_id': 'GS012345_auto_on_off_schedule_Os2OswX', + 'unique_id': 'GS01234_auto_on_off_schedule_Os2OswX', 'unit_of_measurement': None, }) # --- -# name: test_calendar_events[events.GS012345_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[events.GS01234_auto_on_off_schedule_axfz5bj] dict({ - 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -181,9 +181,9 @@ }), }) # --- -# name: test_calendar_events[events.GS012345_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[events.GS01234_auto_on_off_schedule_os2oswx] dict({ - 'calendar.gs012345_auto_on_off_schedule_os2oswx': dict({ + 'calendar.gs01234_auto_on_off_schedule_os2oswx': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -327,38 +327,38 @@ }), }) # --- -# name: test_calendar_events[state.GS012345_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[state.GS01234_auto_on_off_schedule_axfz5bj] StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': False, 'description': 'Machine is scheduled to turn on at the start time and off at the end time', 'end_time': '2024-01-14 07:30:00', - 'friendly_name': 'GS012345 Auto on/off schedule (aXFz5bJ)', + 'friendly_name': 'GS01234 Auto on/off schedule (aXFz5bJ)', 'location': '', 'message': 'Machine My LaMarzocco on', 'start_time': '2024-01-14 07:00:00', }), 'context': , - 'entity_id': 'calendar.gs012345_auto_on_off_schedule_axfz5bj', + 'entity_id': 'calendar.gs01234_auto_on_off_schedule_axfz5bj', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_calendar_events[state.GS012345_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[state.GS01234_auto_on_off_schedule_os2oswx] StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': False, 'description': 'Machine is scheduled to turn on at the start time and off at the end time', 'end_time': '2024-01-13 00:00:00', - 'friendly_name': 'GS012345 Auto on/off schedule (Os2OswX)', + 'friendly_name': 'GS01234 Auto on/off schedule (Os2OswX)', 'location': '', 'message': 'Machine My LaMarzocco on', 'start_time': '2024-01-12 22:00:00', }), 'context': , - 'entity_id': 'calendar.gs012345_auto_on_off_schedule_os2oswx', + 'entity_id': 'calendar.gs01234_auto_on_off_schedule_os2oswx', 'last_changed': , 'last_reported': , 'last_updated': , @@ -367,7 +367,7 @@ # --- # name: test_no_calendar_events_global_disable dict({ - 'calendar.gs012345_auto_on_off_schedule_os2oswx': dict({ + 'calendar.gs01234_auto_on_off_schedule_os2oswx': dict({ 'events': list([ ]), }), diff --git a/tests/components/lamarzocco/snapshots/test_number.ambr b/tests/components/lamarzocco/snapshots/test_number.ambr index b7e42bb425f..8265e7d7646 100644 --- a/tests/components/lamarzocco/snapshots/test_number.ambr +++ b/tests/components/lamarzocco/snapshots/test_number.ambr @@ -1,9 +1,9 @@ # serializer version: 1 -# name: test_general_numbers[coffee_target_temperature-94-set_temp-kwargs0] +# name: test_coffee_boiler StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS012345 Coffee target temperature', + 'friendly_name': 'GS01234 Coffee target temperature', 'max': 104, 'min': 85, 'mode': , @@ -11,14 +11,14 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_coffee_target_temperature', + 'entity_id': 'number.gs01234_coffee_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '95', }) # --- -# name: test_general_numbers[coffee_target_temperature-94-set_temp-kwargs0].1 +# name: test_coffee_boiler.1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -35,7 +35,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs012345_coffee_target_temperature', + 'entity_id': 'number.gs01234_coffee_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -52,72 +52,15 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'coffee_temp', - 'unique_id': 'GS012345_coffee_temp', + 'unique_id': 'GS01234_coffee_temp', 'unit_of_measurement': , }) # --- -# name: test_general_numbers[smart_standby_time-23-set_smart_standby-kwargs1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'GS012345 Smart standby time', - 'max': 240, - 'min': 10, - 'mode': , - 'step': 10, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.gs012345_smart_standby_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_general_numbers[smart_standby_time-23-set_smart_standby-kwargs1].1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 240, - 'min': 10, - 'mode': , - 'step': 10, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.gs012345_smart_standby_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Smart standby time', - 'platform': 'lamarzocco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'smart_standby_time', - 'unique_id': 'GS012345_smart_standby_time', - 'unit_of_measurement': , - }) -# --- # name: test_gs3_exclusive[steam_target_temperature-131-set_temp-kwargs0-GS3 AV] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS012345 Steam target temperature', + 'friendly_name': 'GS01234 Steam target temperature', 'max': 131, 'min': 126, 'mode': , @@ -125,7 +68,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_steam_target_temperature', + 'entity_id': 'number.gs01234_steam_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -149,7 +92,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs012345_steam_target_temperature', + 'entity_id': 'number.gs01234_steam_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -166,7 +109,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp', - 'unique_id': 'GS012345_steam_temp', + 'unique_id': 'GS01234_steam_temp', 'unit_of_measurement': , }) # --- @@ -174,7 +117,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS012345 Steam target temperature', + 'friendly_name': 'GS01234 Steam target temperature', 'max': 131, 'min': 126, 'mode': , @@ -182,7 +125,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_steam_target_temperature', + 'entity_id': 'number.gs01234_steam_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -206,7 +149,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs012345_steam_target_temperature', + 'entity_id': 'number.gs01234_steam_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -223,7 +166,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp', - 'unique_id': 'GS012345_steam_temp', + 'unique_id': 'GS01234_steam_temp', 'unit_of_measurement': , }) # --- @@ -231,7 +174,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Tea water duration', + 'friendly_name': 'GS01234 Tea water duration', 'max': 30, 'min': 0, 'mode': , @@ -239,7 +182,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_tea_water_duration', + 'entity_id': 'number.gs01234_tea_water_duration', 'last_changed': , 'last_reported': , 'last_updated': , @@ -263,7 +206,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs012345_tea_water_duration', + 'entity_id': 'number.gs01234_tea_water_duration', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -280,7 +223,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tea_water_duration', - 'unique_id': 'GS012345_tea_water_duration', + 'unique_id': 'GS01234_tea_water_duration', 'unit_of_measurement': , }) # --- @@ -288,7 +231,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Tea water duration', + 'friendly_name': 'GS01234 Tea water duration', 'max': 30, 'min': 0, 'mode': , @@ -296,7 +239,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_tea_water_duration', + 'entity_id': 'number.gs01234_tea_water_duration', 'last_changed': , 'last_reported': , 'last_updated': , @@ -320,7 +263,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs012345_tea_water_duration', + 'entity_id': 'number.gs01234_tea_water_duration', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -337,14 +280,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tea_water_duration', - 'unique_id': 'GS012345_tea_water_duration', + 'unique_id': 'GS01234_tea_water_duration', 'unit_of_measurement': , }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_1-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Dose Key 1', + 'friendly_name': 'GS01234 Dose Key 1', 'max': 999, 'min': 0, 'mode': , @@ -352,17 +295,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs012345_dose_key_1', + 'entity_id': 'number.gs01234_dose_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '135', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_2-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Dose Key 2', + 'friendly_name': 'GS01234 Dose Key 2', 'max': 999, 'min': 0, 'mode': , @@ -370,17 +313,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs012345_dose_key_2', + 'entity_id': 'number.gs01234_dose_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '97', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_3-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Dose Key 3', + 'friendly_name': 'GS01234 Dose Key 3', 'max': 999, 'min': 0, 'mode': , @@ -388,17 +331,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs012345_dose_key_3', + 'entity_id': 'number.gs01234_dose_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '108', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_4-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Dose Key 4', + 'friendly_name': 'GS01234 Dose Key 4', 'max': 999, 'min': 0, 'mode': , @@ -406,18 +349,18 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs012345_dose_key_4', + 'entity_id': 'number.gs01234_dose_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '121', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Prebrew off time Key 1', + 'friendly_name': 'GS01234 Prebrew off time Key 1', 'max': 10, 'min': 1, 'mode': , @@ -425,18 +368,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_prebrew_off_time_key_1', + 'entity_id': 'number.gs01234_prebrew_off_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Prebrew off time Key 2', + 'friendly_name': 'GS01234 Prebrew off time Key 2', 'max': 10, 'min': 1, 'mode': , @@ -444,18 +387,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_prebrew_off_time_key_2', + 'entity_id': 'number.gs01234_prebrew_off_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Prebrew off time Key 3', + 'friendly_name': 'GS01234 Prebrew off time Key 3', 'max': 10, 'min': 1, 'mode': , @@ -463,18 +406,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_prebrew_off_time_key_3', + 'entity_id': 'number.gs01234_prebrew_off_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Prebrew off time Key 4', + 'friendly_name': 'GS01234 Prebrew off time Key 4', 'max': 10, 'min': 1, 'mode': , @@ -482,18 +425,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_prebrew_off_time_key_4', + 'entity_id': 'number.gs01234_prebrew_off_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '2', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Prebrew on time Key 1', + 'friendly_name': 'GS01234 Prebrew on time Key 1', 'max': 10, 'min': 2, 'mode': , @@ -501,18 +444,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_prebrew_on_time_key_1', + 'entity_id': 'number.gs01234_prebrew_on_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Prebrew on time Key 2', + 'friendly_name': 'GS01234 Prebrew on time Key 2', 'max': 10, 'min': 2, 'mode': , @@ -520,18 +463,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_prebrew_on_time_key_2', + 'entity_id': 'number.gs01234_prebrew_on_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Prebrew on time Key 3', + 'friendly_name': 'GS01234 Prebrew on time Key 3', 'max': 10, 'min': 2, 'mode': , @@ -539,18 +482,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_prebrew_on_time_key_3', + 'entity_id': 'number.gs01234_prebrew_on_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Prebrew on time Key 4', + 'friendly_name': 'GS01234 Prebrew on time Key 4', 'max': 10, 'min': 2, 'mode': , @@ -558,18 +501,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_prebrew_on_time_key_4', + 'entity_id': 'number.gs01234_prebrew_on_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '2', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Preinfusion time Key 1', + 'friendly_name': 'GS01234 Preinfusion time Key 1', 'max': 29, 'min': 2, 'mode': , @@ -577,18 +520,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_preinfusion_time_key_1', + 'entity_id': 'number.gs01234_preinfusion_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Preinfusion time Key 2', + 'friendly_name': 'GS01234 Preinfusion time Key 2', 'max': 29, 'min': 2, 'mode': , @@ -596,18 +539,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_preinfusion_time_key_2', + 'entity_id': 'number.gs01234_preinfusion_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Preinfusion time Key 3', + 'friendly_name': 'GS01234 Preinfusion time Key 3', 'max': 29, 'min': 2, 'mode': , @@ -615,18 +558,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_preinfusion_time_key_3', + 'entity_id': 'number.gs01234_preinfusion_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Preinfusion time Key 4', + 'friendly_name': 'GS01234 Preinfusion time Key 4', 'max': 29, 'min': 2, 'mode': , @@ -634,7 +577,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs012345_preinfusion_time_key_4', + 'entity_id': 'number.gs01234_preinfusion_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , @@ -645,7 +588,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM012345 Prebrew off time', + 'friendly_name': 'LM01234 Prebrew off time', 'max': 10, 'min': 1, 'mode': , @@ -653,7 +596,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm012345_prebrew_off_time', + 'entity_id': 'number.lm01234_prebrew_off_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -677,7 +620,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm012345_prebrew_off_time', + 'entity_id': 'number.lm01234_prebrew_off_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -694,7 +637,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_off', - 'unique_id': 'LM012345_prebrew_off', + 'unique_id': 'LM01234_prebrew_off', 'unit_of_measurement': , }) # --- @@ -702,7 +645,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR012345 Prebrew off time', + 'friendly_name': 'MR01234 Prebrew off time', 'max': 10, 'min': 1, 'mode': , @@ -710,7 +653,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr012345_prebrew_off_time', + 'entity_id': 'number.mr01234_prebrew_off_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -734,7 +677,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr012345_prebrew_off_time', + 'entity_id': 'number.mr01234_prebrew_off_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -751,7 +694,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_off', - 'unique_id': 'MR012345_prebrew_off', + 'unique_id': 'MR01234_prebrew_off', 'unit_of_measurement': , }) # --- @@ -759,7 +702,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM012345 Prebrew on time', + 'friendly_name': 'LM01234 Prebrew on time', 'max': 10, 'min': 2, 'mode': , @@ -767,7 +710,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm012345_prebrew_on_time', + 'entity_id': 'number.lm01234_prebrew_on_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -791,7 +734,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm012345_prebrew_on_time', + 'entity_id': 'number.lm01234_prebrew_on_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -808,7 +751,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_on', - 'unique_id': 'LM012345_prebrew_on', + 'unique_id': 'LM01234_prebrew_on', 'unit_of_measurement': , }) # --- @@ -816,7 +759,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR012345 Prebrew on time', + 'friendly_name': 'MR01234 Prebrew on time', 'max': 10, 'min': 2, 'mode': , @@ -824,7 +767,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr012345_prebrew_on_time', + 'entity_id': 'number.mr01234_prebrew_on_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -848,7 +791,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr012345_prebrew_on_time', + 'entity_id': 'number.mr01234_prebrew_on_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -865,7 +808,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_on', - 'unique_id': 'MR012345_prebrew_on', + 'unique_id': 'MR01234_prebrew_on', 'unit_of_measurement': , }) # --- @@ -873,7 +816,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM012345 Preinfusion time', + 'friendly_name': 'LM01234 Preinfusion time', 'max': 29, 'min': 2, 'mode': , @@ -881,7 +824,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm012345_preinfusion_time', + 'entity_id': 'number.lm01234_preinfusion_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -905,7 +848,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm012345_preinfusion_time', + 'entity_id': 'number.lm01234_preinfusion_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -922,7 +865,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preinfusion_off', - 'unique_id': 'LM012345_preinfusion_off', + 'unique_id': 'LM01234_preinfusion_off', 'unit_of_measurement': , }) # --- @@ -930,7 +873,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR012345 Preinfusion time', + 'friendly_name': 'MR01234 Preinfusion time', 'max': 29, 'min': 2, 'mode': , @@ -938,7 +881,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr012345_preinfusion_time', + 'entity_id': 'number.mr01234_preinfusion_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -962,7 +905,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr012345_preinfusion_time', + 'entity_id': 'number.mr01234_preinfusion_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -979,7 +922,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preinfusion_off', - 'unique_id': 'MR012345_preinfusion_off', + 'unique_id': 'MR01234_preinfusion_off', 'unit_of_measurement': , }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_select.ambr b/tests/components/lamarzocco/snapshots/test_select.ambr index 46fa55eff13..be56af2b092 100644 --- a/tests/components/lamarzocco/snapshots/test_select.ambr +++ b/tests/components/lamarzocco/snapshots/test_select.ambr @@ -2,7 +2,7 @@ # name: test_pre_brew_infusion_select[GS3 AV] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Prebrew/-infusion mode', + 'friendly_name': 'GS01234 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -10,7 +10,7 @@ ]), }), 'context': , - 'entity_id': 'select.gs012345_prebrew_infusion_mode', + 'entity_id': 'select.gs01234_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -35,7 +35,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.gs012345_prebrew_infusion_mode', + 'entity_id': 'select.gs01234_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -52,14 +52,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'GS012345_prebrew_infusion_select', + 'unique_id': 'GS01234_prebrew_infusion_select', 'unit_of_measurement': None, }) # --- # name: test_pre_brew_infusion_select[Linea Mini] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'LM012345 Prebrew/-infusion mode', + 'friendly_name': 'LM01234 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -67,7 +67,7 @@ ]), }), 'context': , - 'entity_id': 'select.lm012345_prebrew_infusion_mode', + 'entity_id': 'select.lm01234_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -92,7 +92,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.lm012345_prebrew_infusion_mode', + 'entity_id': 'select.lm01234_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -109,14 +109,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'LM012345_prebrew_infusion_select', + 'unique_id': 'LM01234_prebrew_infusion_select', 'unit_of_measurement': None, }) # --- # name: test_pre_brew_infusion_select[Micra] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MR012345 Prebrew/-infusion mode', + 'friendly_name': 'MR01234 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -124,7 +124,7 @@ ]), }), 'context': , - 'entity_id': 'select.mr012345_prebrew_infusion_mode', + 'entity_id': 'select.mr01234_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -149,7 +149,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.mr012345_prebrew_infusion_mode', + 'entity_id': 'select.mr01234_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -166,69 +166,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'MR012345_prebrew_infusion_select', - 'unit_of_measurement': None, - }) -# --- -# name: test_smart_standby_mode - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Smart standby mode', - 'options': list([ - 'power_on', - 'last_brewing', - ]), - }), - 'context': , - 'entity_id': 'select.gs012345_smart_standby_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'last_brewing', - }) -# --- -# name: test_smart_standby_mode.1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'power_on', - 'last_brewing', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.gs012345_smart_standby_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Smart standby mode', - 'platform': 'lamarzocco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'smart_standby_mode', - 'unique_id': 'GS012345_smart_standby_mode', + 'unique_id': 'MR01234_prebrew_infusion_select', 'unit_of_measurement': None, }) # --- # name: test_steam_boiler_level[Micra] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MR012345 Steam level', + 'friendly_name': 'MR01234 Steam level', 'options': list([ '1', '2', @@ -236,7 +181,7 @@ ]), }), 'context': , - 'entity_id': 'select.mr012345_steam_level', + 'entity_id': 'select.mr01234_steam_level', 'last_changed': , 'last_reported': , 'last_updated': , @@ -261,7 +206,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': None, - 'entity_id': 'select.mr012345_steam_level', + 'entity_id': 'select.mr01234_steam_level', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -278,7 +223,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp_select', - 'unique_id': 'MR012345_steam_temp_select', + 'unique_id': 'MR01234_steam_temp_select', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_sensor.ambr b/tests/components/lamarzocco/snapshots/test_sensor.ambr index da1efbf1eaa..2237a8416e1 100644 --- a/tests/components/lamarzocco/snapshots/test_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensors[GS012345_current_coffee_temperature-entry] +# name: test_sensors[GS01234_current_coffee_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -13,7 +13,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.gs012345_current_coffee_temperature', + 'entity_id': 'sensor.gs01234_current_coffee_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -33,27 +33,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'current_temp_coffee', - 'unique_id': 'GS012345_current_temp_coffee', + 'unique_id': 'GS01234_current_temp_coffee', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS012345_current_coffee_temperature-sensor] +# name: test_sensors[GS01234_current_coffee_temperature-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS012345 Current coffee temperature', + 'friendly_name': 'GS01234 Current coffee temperature', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs012345_current_coffee_temperature', + 'entity_id': 'sensor.gs01234_current_coffee_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '96.5', }) # --- -# name: test_sensors[GS012345_current_steam_temperature-entry] +# name: test_sensors[GS01234_current_steam_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -67,7 +67,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.gs012345_current_steam_temperature', + 'entity_id': 'sensor.gs01234_current_steam_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -87,27 +87,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'current_temp_steam', - 'unique_id': 'GS012345_current_temp_steam', + 'unique_id': 'GS01234_current_temp_steam', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS012345_current_steam_temperature-sensor] +# name: test_sensors[GS01234_current_steam_temperature-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS012345 Current steam temperature', + 'friendly_name': 'GS01234 Current steam temperature', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs012345_current_steam_temperature', + 'entity_id': 'sensor.gs01234_current_steam_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '123.800003051758', }) # --- -# name: test_sensors[GS012345_shot_timer-entry] +# name: test_sensors[GS01234_shot_timer-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -121,7 +121,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs012345_shot_timer', + 'entity_id': 'sensor.gs01234_shot_timer', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -138,27 +138,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'shot_timer', - 'unique_id': 'GS012345_shot_timer', + 'unique_id': 'GS01234_shot_timer', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS012345_shot_timer-sensor] +# name: test_sensors[GS01234_shot_timer-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS012345 Shot timer', + 'friendly_name': 'GS01234 Shot timer', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs012345_shot_timer', + 'entity_id': 'sensor.gs01234_shot_timer', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0', }) # --- -# name: test_sensors[GS012345_total_coffees_made-entry] +# name: test_sensors[GS01234_total_coffees_made-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -172,7 +172,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs012345_total_coffees_made', + 'entity_id': 'sensor.gs01234_total_coffees_made', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -189,26 +189,26 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drink_stats_coffee', - 'unique_id': 'GS012345_drink_stats_coffee', + 'unique_id': 'GS01234_drink_stats_coffee', 'unit_of_measurement': 'drinks', }) # --- -# name: test_sensors[GS012345_total_coffees_made-sensor] +# name: test_sensors[GS01234_total_coffees_made-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Total coffees made', + 'friendly_name': 'GS01234 Total coffees made', 'state_class': , 'unit_of_measurement': 'drinks', }), 'context': , - 'entity_id': 'sensor.gs012345_total_coffees_made', + 'entity_id': 'sensor.gs01234_total_coffees_made', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1047', }) # --- -# name: test_sensors[GS012345_total_flushes_made-entry] +# name: test_sensors[GS01234_total_flushes_made-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -222,7 +222,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs012345_total_flushes_made', + 'entity_id': 'sensor.gs01234_total_flushes_made', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -239,19 +239,19 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drink_stats_flushing', - 'unique_id': 'GS012345_drink_stats_flushing', + 'unique_id': 'GS01234_drink_stats_flushing', 'unit_of_measurement': 'drinks', }) # --- -# name: test_sensors[GS012345_total_flushes_made-sensor] +# name: test_sensors[GS01234_total_flushes_made-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Total flushes made', + 'friendly_name': 'GS01234 Total flushes made', 'state_class': , 'unit_of_measurement': 'drinks', }), 'context': , - 'entity_id': 'sensor.gs012345_total_flushes_made', + 'entity_id': 'sensor.gs01234_total_flushes_made', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/lamarzocco/snapshots/test_switch.ambr b/tests/components/lamarzocco/snapshots/test_switch.ambr index 5e3b99da617..edda4ffee3b 100644 --- a/tests/components/lamarzocco/snapshots/test_switch.ambr +++ b/tests/components/lamarzocco/snapshots/test_switch.ambr @@ -11,7 +11,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': , - 'entity_id': 'switch.gs012345_auto_on_off_os2oswx', + 'entity_id': 'switch.gs01234_auto_on_off_os2oswx', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off', - 'unique_id': 'GS012345_auto_on_off_Os2OswX', + 'unique_id': 'GS01234_auto_on_off_Os2OswX', 'unit_of_measurement': None, }) # --- @@ -44,7 +44,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': , - 'entity_id': 'switch.gs012345_auto_on_off_axfz5bj', + 'entity_id': 'switch.gs01234_auto_on_off_axfz5bj', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -61,17 +61,17 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off', - 'unique_id': 'GS012345_auto_on_off_aXFz5bJ', + 'unique_id': 'GS01234_auto_on_off_aXFz5bJ', 'unit_of_measurement': None, }) # --- # name: test_auto_on_off_switches[state.auto_on_off_Os2OswX] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Auto on/off (Os2OswX)', + 'friendly_name': 'GS01234 Auto on/off (Os2OswX)', }), 'context': , - 'entity_id': 'switch.gs012345_auto_on_off_os2oswx', + 'entity_id': 'switch.gs01234_auto_on_off_os2oswx', 'last_changed': , 'last_reported': , 'last_updated': , @@ -81,10 +81,10 @@ # name: test_auto_on_off_switches[state.auto_on_off_aXFz5bJ] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Auto on/off (aXFz5bJ)', + 'friendly_name': 'GS01234 Auto on/off (aXFz5bJ)', }), 'context': , - 'entity_id': 'switch.gs012345_auto_on_off_axfz5bj', + 'entity_id': 'switch.gs01234_auto_on_off_axfz5bj', 'last_changed': , 'last_reported': , 'last_updated': , @@ -105,7 +105,7 @@ 'identifiers': set({ tuple( 'lamarzocco', - 'GS012345', + 'GS01234', ), }), 'is_new': False, @@ -113,30 +113,29 @@ }), 'manufacturer': 'La Marzocco', 'model': , - 'model_id': , - 'name': 'GS012345', + 'name': 'GS01234', 'name_by_user': None, 'primary_config_entry': , - 'serial_number': 'GS012345', + 'serial_number': 'GS01234', 'suggested_area': None, 'sw_version': '1.40', 'via_device_id': None, }) # --- -# name: test_switches[-set_power-kwargs0] +# name: test_switches[-set_power] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345', + 'friendly_name': 'GS01234', }), 'context': , - 'entity_id': 'switch.gs012345', + 'entity_id': 'switch.gs01234', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_switches[-set_power-kwargs0].1 +# name: test_switches[-set_power].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -148,7 +147,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.gs012345', + 'entity_id': 'switch.gs01234', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -165,70 +164,24 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'main', - 'unique_id': 'GS012345_main', + 'unique_id': 'GS01234_main', 'unit_of_measurement': None, }) # --- -# name: test_switches[_smart_standby_enabled-set_smart_standby-kwargs2] +# name: test_switches[_steam_boiler-set_steam] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Smart standby enabled', + 'friendly_name': 'GS01234 Steam boiler', }), 'context': , - 'entity_id': 'switch.gs012345_smart_standby_enabled', + 'entity_id': 'switch.gs01234_steam_boiler', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_switches[_smart_standby_enabled-set_smart_standby-kwargs2].1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.gs012345_smart_standby_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Smart standby enabled', - 'platform': 'lamarzocco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'smart_standby_enabled', - 'unique_id': 'GS012345_smart_standby_enabled', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[_steam_boiler-set_steam-kwargs1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS012345 Steam boiler', - }), - 'context': , - 'entity_id': 'switch.gs012345_steam_boiler', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switches[_steam_boiler-set_steam-kwargs1].1 +# name: test_switches[_steam_boiler-set_steam].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -240,7 +193,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.gs012345_steam_boiler', + 'entity_id': 'switch.gs01234_steam_boiler', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -257,7 +210,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_boiler', - 'unique_id': 'GS012345_steam_boiler_enable', + 'unique_id': 'GS01234_steam_boiler_enable', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_update.ambr b/tests/components/lamarzocco/snapshots/test_update.ambr index 46fa4cff815..f08b9249f50 100644 --- a/tests/components/lamarzocco/snapshots/test_update.ambr +++ b/tests/components/lamarzocco/snapshots/test_update.ambr @@ -4,9 +4,8 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', - 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/lamarzocco/icon.png', - 'friendly_name': 'GS012345 Gateway firmware', + 'friendly_name': 'GS01234 Gateway firmware', 'in_progress': False, 'installed_version': 'v3.1-rc4', 'latest_version': 'v3.5-rc3', @@ -15,10 +14,9 @@ 'skipped_version': None, 'supported_features': , 'title': None, - 'update_percentage': None, }), 'context': , - 'entity_id': 'update.gs012345_gateway_firmware', + 'entity_id': 'update.gs01234_gateway_firmware', 'last_changed': , 'last_reported': , 'last_updated': , @@ -37,7 +35,7 @@ 'disabled_by': None, 'domain': 'update', 'entity_category': , - 'entity_id': 'update.gs012345_gateway_firmware', + 'entity_id': 'update.gs01234_gateway_firmware', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -54,7 +52,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'gateway_firmware', - 'unique_id': 'GS012345_gateway_firmware', + 'unique_id': 'GS01234_gateway_firmware', 'unit_of_measurement': None, }) # --- @@ -63,9 +61,8 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', - 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/lamarzocco/icon.png', - 'friendly_name': 'GS012345 Machine firmware', + 'friendly_name': 'GS01234 Machine firmware', 'in_progress': False, 'installed_version': '1.40', 'latest_version': '1.55', @@ -74,10 +71,9 @@ 'skipped_version': None, 'supported_features': , 'title': None, - 'update_percentage': None, }), 'context': , - 'entity_id': 'update.gs012345_machine_firmware', + 'entity_id': 'update.gs01234_machine_firmware', 'last_changed': , 'last_reported': , 'last_updated': , @@ -96,7 +92,7 @@ 'disabled_by': None, 'domain': 'update', 'entity_category': , - 'entity_id': 'update.gs012345_machine_firmware', + 'entity_id': 'update.gs01234_machine_firmware', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -113,7 +109,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'machine_firmware', - 'unique_id': 'GS012345_machine_firmware', + 'unique_id': 'GS01234_machine_firmware', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/test_binary_sensor.py b/tests/components/lamarzocco/test_binary_sensor.py index 956bfe90dd4..d363b96ca21 100644 --- a/tests/components/lamarzocco/test_binary_sensor.py +++ b/tests/components/lamarzocco/test_binary_sensor.py @@ -4,7 +4,8 @@ from datetime import timedelta from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.exceptions import RequestNotSuccessful +import pytest from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE @@ -46,14 +47,15 @@ async def test_binary_sensors( assert entry == snapshot(name=f"{serial_number}_{binary_sensor}-entry") +@pytest.mark.usefixtures("remove_local_connection") async def test_brew_active_does_not_exists( hass: HomeAssistant, mock_lamarzocco: MagicMock, - mock_config_entry_no_local_connection: MockConfigEntry, + mock_config_entry: MockConfigEntry, ) -> None: """Test the La Marzocco currently_making_coffee doesn't exist if host not set.""" - await async_init_integration(hass, mock_config_entry_no_local_connection) + await async_init_integration(hass, mock_config_entry) state = hass.states.get(f"sensor.{mock_lamarzocco.serial_number}_brewing_active") assert state is None diff --git a/tests/components/lamarzocco/test_button.py b/tests/components/lamarzocco/test_button.py index 61b7ba77c22..e1a036df17a 100644 --- a/tests/components/lamarzocco/test_button.py +++ b/tests/components/lamarzocco/test_button.py @@ -1,15 +1,13 @@ """Tests for the La Marzocco Buttons.""" -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import MagicMock -from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er pytestmark = pytest.mark.usefixtures("init_integration") @@ -33,41 +31,14 @@ async def test_start_backflush( assert entry assert entry == snapshot - with patch( - "homeassistant.components.lamarzocco.button.asyncio.sleep", - new_callable=AsyncMock, - ): - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - { - ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", - }, - blocking=True, - ) + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", + }, + blocking=True, + ) assert len(mock_lamarzocco.start_backflush.mock_calls) == 1 mock_lamarzocco.start_backflush.assert_called_once() - - -async def test_button_error( - hass: HomeAssistant, - mock_lamarzocco: MagicMock, -) -> None: - """Test the La Marzocco button error.""" - serial_number = mock_lamarzocco.serial_number - - state = hass.states.get(f"button.{serial_number}_start_backflush") - assert state - - mock_lamarzocco.start_backflush.side_effect = RequestNotSuccessful("Boom.") - with pytest.raises(HomeAssistantError) as exc_info: - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - { - ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", - }, - blocking=True, - ) - assert exc_info.value.translation_key == "button_error" diff --git a/tests/components/lamarzocco/test_config_flow.py b/tests/components/lamarzocco/test_config_flow.py index be93779848f..92ecd0a13f4 100644 --- a/tests/components/lamarzocco/test_config_flow.py +++ b/tests/components/lamarzocco/test_config_flow.py @@ -2,17 +2,14 @@ from unittest.mock import MagicMock, patch -from pylamarzocco.const import MachineModel -from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful -from pylamarzocco.models import LaMarzoccoDeviceInfo -import pytest +from lmcloud.exceptions import AuthFail, RequestNotSuccessful +from lmcloud.models import LaMarzoccoDeviceInfo -from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE from homeassistant.components.lamarzocco.const import CONF_USE_BLUETOOTH, DOMAIN from homeassistant.config_entries import ( SOURCE_BLUETOOTH, - SOURCE_DHCP, + SOURCE_REAUTH, SOURCE_USER, ConfigEntryState, ) @@ -250,7 +247,15 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -267,62 +272,6 @@ async def test_reauth_flow( assert mock_config_entry.data[CONF_PASSWORD] == "new_password" -async def test_reconfigure_flow( - hass: HomeAssistant, - mock_cloud_client: MagicMock, - mock_config_entry: MockConfigEntry, - mock_device_info: LaMarzoccoDeviceInfo, -) -> None: - """Testing reconfgure flow.""" - mock_config_entry.add_to_hass(hass) - - result = await mock_config_entry.start_reconfigure_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - result2 = await __do_successful_user_step(hass, result, mock_cloud_client) - service_info = get_bluetooth_service_info( - mock_device_info.model, mock_device_info.serial_number - ) - - with ( - patch( - "homeassistant.components.lamarzocco.config_flow.LaMarzoccoLocalClient.validate_connection", - return_value=True, - ), - patch( - "homeassistant.components.lamarzocco.config_flow.async_discovered_service_info", - return_value=[service_info], - ), - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - { - CONF_HOST: "192.168.1.1", - CONF_MACHINE: mock_device_info.serial_number, - }, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "bluetooth_selection" - - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], - {CONF_MAC: service_info.address}, - ) - - assert result4["type"] is FlowResultType.ABORT - assert result4["reason"] == "reconfigure_successful" - - assert mock_config_entry.title == "My LaMarzocco" - assert mock_config_entry.data == { - **mock_config_entry.data, - CONF_MAC: service_info.address, - } - - async def test_bluetooth_discovery( hass: HomeAssistant, mock_lamarzocco: MagicMock, @@ -438,50 +387,6 @@ async def test_bluetooth_discovery_errors( } -@pytest.mark.parametrize( - "device_fixture", - [MachineModel.LINEA_MICRA, MachineModel.LINEA_MINI, MachineModel.GS3_AV], -) -async def test_dhcp_discovery( - hass: HomeAssistant, - mock_lamarzocco: MagicMock, - mock_cloud_client: MagicMock, - mock_device_info: LaMarzoccoDeviceInfo, -) -> None: - """Test dhcp discovery.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_DHCP}, - data=DhcpServiceInfo( - ip="192.168.1.42", - hostname=mock_lamarzocco.serial_number, - macaddress="aa:bb:cc:dd:ee:ff", - ), - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - with patch( - "homeassistant.components.lamarzocco.config_flow.LaMarzoccoLocalClient.validate_connection", - return_value=True, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - USER_INPUT, - ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["data"] == { - **USER_INPUT, - CONF_HOST: "192.168.1.42", - CONF_MACHINE: mock_lamarzocco.serial_number, - CONF_MODEL: mock_device_info.model, - CONF_NAME: mock_device_info.name, - CONF_TOKEN: mock_device_info.communication_key, - } - - async def test_options_flow( hass: HomeAssistant, mock_lamarzocco: MagicMock, diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index b99077a9059..2c812f79438 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -2,8 +2,8 @@ from unittest.mock import AsyncMock, MagicMock, patch -from pylamarzocco.const import FirmwareType -from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful +from lmcloud.const import FirmwareType +from lmcloud.exceptions import AuthFail, RequestNotSuccessful import pytest from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE diff --git a/tests/components/lamarzocco/test_number.py b/tests/components/lamarzocco/test_number.py index 710a0220e06..288c78c26dd 100644 --- a/tests/components/lamarzocco/test_number.py +++ b/tests/components/lamarzocco/test_number.py @@ -1,16 +1,14 @@ """Tests for the La Marzocco number entities.""" -from typing import Any from unittest.mock import MagicMock -from pylamarzocco.const import ( +from lmcloud.const import ( KEYS_PER_MODEL, BoilerType, MachineModel, PhysicalKey, PrebrewMode, ) -from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -21,7 +19,6 @@ from homeassistant.components.number import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from . import async_init_integration @@ -29,41 +26,20 @@ from . import async_init_integration from tests.common import MockConfigEntry -@pytest.mark.parametrize( - ("entity_name", "value", "func_name", "kwargs"), - [ - ( - "coffee_target_temperature", - 94, - "set_temp", - {"boiler": BoilerType.COFFEE, "temperature": 94}, - ), - ( - "smart_standby_time", - 23, - "set_smart_standby", - {"enabled": True, "mode": "LastBrewing", "minutes": 23}, - ), - ], -) -async def test_general_numbers( +async def test_coffee_boiler( hass: HomeAssistant, mock_lamarzocco: MagicMock, mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion, - entity_name: str, - value: float, - func_name: str, - kwargs: dict[str, Any], ) -> None: - """Test the numbers available to all machines.""" + """Test the La Marzocco coffee temperature Number.""" await async_init_integration(hass, mock_config_entry) serial_number = mock_lamarzocco.serial_number - state = hass.states.get(f"number.{serial_number}_{entity_name}") + state = hass.states.get(f"number.{serial_number}_coffee_target_temperature") assert state assert state == snapshot @@ -81,14 +57,16 @@ async def test_general_numbers( NUMBER_DOMAIN, SERVICE_SET_VALUE, { - ATTR_ENTITY_ID: f"number.{serial_number}_{entity_name}", - ATTR_VALUE: value, + ATTR_ENTITY_ID: f"number.{serial_number}_coffee_target_temperature", + ATTR_VALUE: 94, }, blocking=True, ) - mock_func = getattr(mock_lamarzocco, func_name) - mock_func.assert_called_once_with(**kwargs) + assert len(mock_lamarzocco.set_temp.mock_calls) == 1 + mock_lamarzocco.set_temp.assert_called_once_with( + boiler=BoilerType.COFFEE, temperature=94 + ) @pytest.mark.parametrize("device_fixture", [MachineModel.GS3_AV, MachineModel.GS3_MP]) @@ -401,46 +379,3 @@ async def test_not_existing_key_entities( for key in range(1, KEYS_PER_MODEL[MachineModel.GS3_AV] + 1): state = hass.states.get(f"number.{serial_number}_{entity}_key_{key}") assert state is None - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_number_error( - hass: HomeAssistant, - mock_lamarzocco: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test number entities raise error on service call.""" - await async_init_integration(hass, mock_config_entry) - serial_number = mock_lamarzocco.serial_number - - state = hass.states.get(f"number.{serial_number}_coffee_target_temperature") - assert state - - mock_lamarzocco.set_temp.side_effect = RequestNotSuccessful("Boom") - with pytest.raises(HomeAssistantError) as exc_info: - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: f"number.{serial_number}_coffee_target_temperature", - ATTR_VALUE: 94, - }, - blocking=True, - ) - assert exc_info.value.translation_key == "number_exception" - - state = hass.states.get(f"number.{serial_number}_dose_key_1") - assert state - - mock_lamarzocco.set_dose.side_effect = RequestNotSuccessful("Boom") - with pytest.raises(HomeAssistantError) as exc_info: - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: f"number.{serial_number}_dose_key_1", - ATTR_VALUE: 99, - }, - blocking=True, - ) - assert exc_info.value.translation_key == "number_exception_key" diff --git a/tests/components/lamarzocco/test_select.py b/tests/components/lamarzocco/test_select.py index 24b96f84f37..e3521b473bd 100644 --- a/tests/components/lamarzocco/test_select.py +++ b/tests/components/lamarzocco/test_select.py @@ -2,8 +2,7 @@ from unittest.mock import MagicMock -from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.const import MachineModel, PrebrewMode, SteamLevel import pytest from syrupy import SnapshotAssertion @@ -14,7 +13,6 @@ from homeassistant.components.select import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er pytestmark = pytest.mark.usefixtures("init_integration") @@ -119,63 +117,3 @@ async def test_pre_brew_infusion_select_none( state = hass.states.get(f"select.{serial_number}_prebrew_infusion_mode") assert state is None - - -async def test_smart_standby_mode( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_lamarzocco: MagicMock, - snapshot: SnapshotAssertion, -) -> None: - """Test the La Marzocco Smart Standby mode select.""" - - serial_number = mock_lamarzocco.serial_number - - state = hass.states.get(f"select.{serial_number}_smart_standby_mode") - - assert state - assert state == snapshot - - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry == snapshot - - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: f"select.{serial_number}_smart_standby_mode", - ATTR_OPTION: "power_on", - }, - blocking=True, - ) - - mock_lamarzocco.set_smart_standby.assert_called_once_with( - enabled=True, mode=SmartStandbyMode.POWER_ON, minutes=10 - ) - - -async def test_select_errors( - hass: HomeAssistant, - mock_lamarzocco: MagicMock, -) -> None: - """Test select errors.""" - serial_number = mock_lamarzocco.serial_number - - state = hass.states.get(f"select.{serial_number}_prebrew_infusion_mode") - assert state - - mock_lamarzocco.set_prebrew_mode.side_effect = RequestNotSuccessful("Boom") - - # Test setting invalid option - with pytest.raises(HomeAssistantError) as exc_info: - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: f"select.{serial_number}_prebrew_infusion_mode", - ATTR_OPTION: "prebrew", - }, - blocking=True, - ) - assert exc_info.value.translation_key == "select_option_error" diff --git a/tests/components/lamarzocco/test_sensor.py b/tests/components/lamarzocco/test_sensor.py index 6f14d52d1fc..1ce56724fa3 100644 --- a/tests/components/lamarzocco/test_sensor.py +++ b/tests/components/lamarzocco/test_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock -from pylamarzocco.const import MachineModel +from lmcloud.const import MachineModel import pytest from syrupy import SnapshotAssertion @@ -47,14 +47,15 @@ async def test_sensors( assert entry == snapshot(name=f"{serial_number}_{sensor}-entry") +@pytest.mark.usefixtures("remove_local_connection") async def test_shot_timer_not_exists( hass: HomeAssistant, mock_lamarzocco: MagicMock, - mock_config_entry_no_local_connection: MockConfigEntry, + mock_config_entry: MockConfigEntry, ) -> None: """Test the La Marzocco shot timer doesn't exist if host not set.""" - await async_init_integration(hass, mock_config_entry_no_local_connection) + await async_init_integration(hass, mock_config_entry) state = hass.states.get(f"sensor.{mock_lamarzocco.serial_number}_shot_timer") assert state is None diff --git a/tests/components/lamarzocco/test_switch.py b/tests/components/lamarzocco/test_switch.py index 5c6d1cb1e42..4f60b264a1d 100644 --- a/tests/components/lamarzocco/test_switch.py +++ b/tests/components/lamarzocco/test_switch.py @@ -1,9 +1,7 @@ """Tests for La Marzocco switches.""" -from typing import Any from unittest.mock import MagicMock -from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -14,7 +12,6 @@ from homeassistant.components.switch import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from . import WAKE_UP_SLEEP_ENTRY_IDS, async_init_integration @@ -26,15 +23,15 @@ from tests.common import MockConfigEntry ( "entity_name", "method_name", - "kwargs", ), [ - ("", "set_power", {}), - ("_steam_boiler", "set_steam", {}), ( - "_smart_standby_enabled", - "set_smart_standby", - {"mode": "LastBrewing", "minutes": 10}, + "", + "set_power", + ), + ( + "_steam_boiler", + "set_steam", ), ], ) @@ -46,7 +43,6 @@ async def test_switches( snapshot: SnapshotAssertion, entity_name: str, method_name: str, - kwargs: dict[str, Any], ) -> None: """Test the La Marzocco switches.""" await async_init_integration(hass, mock_config_entry) @@ -73,7 +69,7 @@ async def test_switches( ) assert len(control_fn.mock_calls) == 1 - control_fn.assert_called_once_with(enabled=False, **kwargs) + control_fn.assert_called_once_with(False) await hass.services.async_call( SWITCH_DOMAIN, @@ -85,7 +81,7 @@ async def test_switches( ) assert len(control_fn.mock_calls) == 2 - control_fn.assert_called_with(enabled=True, **kwargs) + control_fn.assert_called_with(True) async def test_device( @@ -162,56 +158,3 @@ async def test_auto_on_off_switches( ) wake_up_sleep_entry.enabled = True mock_lamarzocco.set_wake_up_sleep.assert_called_with(wake_up_sleep_entry) - - -async def test_switch_exceptions( - hass: HomeAssistant, - mock_lamarzocco: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the La Marzocco switches.""" - await async_init_integration(hass, mock_config_entry) - - serial_number = mock_lamarzocco.serial_number - - state = hass.states.get(f"switch.{serial_number}") - assert state - - mock_lamarzocco.set_power.side_effect = RequestNotSuccessful("Boom") - - with pytest.raises(HomeAssistantError) as exc_info: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: f"switch.{serial_number}", - }, - blocking=True, - ) - assert exc_info.value.translation_key == "switch_off_error" - - with pytest.raises(HomeAssistantError) as exc_info: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: f"switch.{serial_number}", - }, - blocking=True, - ) - assert exc_info.value.translation_key == "switch_on_error" - - state = hass.states.get(f"switch.{serial_number}_auto_on_off_os2oswx") - assert state - - mock_lamarzocco.set_wake_up_sleep.side_effect = RequestNotSuccessful("Boom") - with pytest.raises(HomeAssistantError) as exc_info: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: f"switch.{serial_number}_auto_on_off_os2oswx", - }, - blocking=True, - ) - assert exc_info.value.translation_key == "auto_on_off_error" diff --git a/tests/components/lamarzocco/test_update.py b/tests/components/lamarzocco/test_update.py index aef37d7c921..02330daf794 100644 --- a/tests/components/lamarzocco/test_update.py +++ b/tests/components/lamarzocco/test_update.py @@ -2,8 +2,7 @@ from unittest.mock import MagicMock -from pylamarzocco.const import FirmwareType -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.const import FirmwareType import pytest from syrupy import SnapshotAssertion @@ -55,26 +54,17 @@ async def test_update_entites( mock_lamarzocco.update_firmware.assert_called_once_with(component) -@pytest.mark.parametrize( - ("attr", "value"), - [ - ("side_effect", RequestNotSuccessful("Boom")), - ("return_value", False), - ], -) async def test_update_error( hass: HomeAssistant, mock_lamarzocco: MagicMock, - attr: str, - value: bool | Exception, ) -> None: """Test error during update.""" state = hass.states.get(f"update.{mock_lamarzocco.serial_number}_machine_firmware") assert state - setattr(mock_lamarzocco.update_firmware, attr, value) + mock_lamarzocco.update_firmware.return_value = False - with pytest.raises(HomeAssistantError) as exc_info: + with pytest.raises(HomeAssistantError, match="Update failed"): await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, @@ -83,4 +73,3 @@ async def test_update_error( }, blocking=True, ) - assert exc_info.value.translation_key == "update_failed" diff --git a/tests/components/lametric/conftest.py b/tests/components/lametric/conftest.py index e8ba727f3db..dd3885b78d9 100644 --- a/tests/components/lametric/conftest.py +++ b/tests/components/lametric/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from demetriek import CloudDevice, Device from pydantic import parse_raw_as # pylint: disable=no-name-in-module import pytest +from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/lametric/test_button.py b/tests/components/lametric/test_button.py index 04efeaac87f..a6cdca5b426 100644 --- a/tests/components/lametric/test_button.py +++ b/tests/components/lametric/test_button.py @@ -49,7 +49,7 @@ async def test_button_app_next( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model_id == "LM 37X8" + assert device_entry.model == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None @@ -95,7 +95,7 @@ async def test_button_app_previous( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model_id == "LM 37X8" + assert device_entry.model == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None @@ -143,7 +143,7 @@ async def test_button_dismiss_current_notification( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model_id == "LM 37X8" + assert device_entry.model == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None @@ -191,7 +191,7 @@ async def test_button_dismiss_all_notifications( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model_id == "LM 37X8" + assert device_entry.model == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None diff --git a/tests/components/lametric/test_config_flow.py b/tests/components/lametric/test_config_flow.py index 3fbe606c7f1..2a21423ad03 100644 --- a/tests/components/lametric/test_config_flow.py +++ b/tests/components/lametric/test_config_flow.py @@ -20,7 +20,12 @@ from homeassistant.components.ssdp import ( ATTR_UPNP_SERIAL, SsdpServiceInfo, ) -from homeassistant.config_entries import SOURCE_DHCP, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_DHCP, + SOURCE_REAUTH, + SOURCE_SSDP, + SOURCE_USER, +) from homeassistant.const import CONF_API_KEY, CONF_DEVICE, CONF_HOST, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -748,7 +753,15 @@ async def test_reauth_cloud_import( """Test reauth flow importing api keys from the cloud.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) flow_id = result["flow_id"] @@ -804,7 +817,15 @@ async def test_reauth_cloud_abort_device_not_found( mock_config_entry.add_to_hass(hass) hass.config_entries.async_update_entry(mock_config_entry, unique_id="UKNOWN_DEVICE") - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) flow_id = result["flow_id"] @@ -851,7 +872,15 @@ async def test_reauth_manual( """Test reauth flow with manual entry.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) flow_id = result["flow_id"] @@ -885,7 +914,15 @@ async def test_reauth_manual_sky( """Test reauth flow with manual entry for LaMetric Sky.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) flow_id = result["flow_id"] diff --git a/tests/components/landisgyr_heat_meter/conftest.py b/tests/components/landisgyr_heat_meter/conftest.py index 1dad983c909..22f29b3a4b1 100644 --- a/tests/components/landisgyr_heat_meter/conftest.py +++ b/tests/components/landisgyr_heat_meter/conftest.py @@ -1,9 +1,9 @@ """Define fixtures for Landis + Gyr Heat Meter tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/landisgyr_heat_meter/test_init.py b/tests/components/landisgyr_heat_meter/test_init.py index 76a376e441c..c9768ec681f 100644 --- a/tests/components/landisgyr_heat_meter/test_init.py +++ b/tests/components/landisgyr_heat_meter/test_init.py @@ -1,6 +1,6 @@ """Test the Landis + Gyr Heat Meter init.""" -from unittest.mock import MagicMock, patch +from unittest.mock import patch from homeassistant.components.landisgyr_heat_meter.const import ( DOMAIN as LANDISGYR_HEAT_METER_DOMAIN, @@ -17,7 +17,7 @@ API_HEAT_METER_SERVICE = ( @patch(API_HEAT_METER_SERVICE) -async def test_unload_entry(mock_meter_service: MagicMock, hass: HomeAssistant) -> None: +async def test_unload_entry(_, hass: HomeAssistant) -> None: """Test removing config entry.""" mock_entry_data = { "device": "/dev/USB0", @@ -41,9 +41,7 @@ async def test_unload_entry(mock_meter_service: MagicMock, hass: HomeAssistant) @patch(API_HEAT_METER_SERVICE) async def test_migrate_entry( - mock_meter_service: MagicMock, - hass: HomeAssistant, - entity_registry: er.EntityRegistry, + _, hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test successful migration of entry data from version 1 to 2.""" diff --git a/tests/components/lastfm/__init__.py b/tests/components/lastfm/__init__.py index e4eb476f62d..9fe946f8dff 100644 --- a/tests/components/lastfm/__init__.py +++ b/tests/components/lastfm/__init__.py @@ -1,6 +1,5 @@ """The tests for lastfm.""" -from typing import Any from unittest.mock import patch from pylast import PyLastError, Track @@ -92,7 +91,7 @@ class MockUser: """Get mock now playing.""" return self._now_playing_result - def get_friends(self) -> list[Any]: + def get_friends(self) -> list[any]: """Get mock friends.""" if len(self._friends) == 0: raise PyLastError("network", "status", "Page not found") diff --git a/tests/components/laundrify/__init__.py b/tests/components/laundrify/__init__.py index cb4ab1ad010..c09c6290adf 100644 --- a/tests/components/laundrify/__init__.py +++ b/tests/components/laundrify/__init__.py @@ -1 +1,22 @@ """Tests for the laundrify integration.""" + +from homeassistant.components.laundrify import DOMAIN +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.core import HomeAssistant + +from .const import VALID_ACCESS_TOKEN, VALID_ACCOUNT_ID + +from tests.common import MockConfigEntry + + +def create_entry( + hass: HomeAssistant, access_token: str = VALID_ACCESS_TOKEN +) -> MockConfigEntry: + """Create laundrify entry in Home Assistant.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=VALID_ACCOUNT_ID, + data={CONF_ACCESS_TOKEN: access_token}, + ) + entry.add_to_hass(hass) + return entry diff --git a/tests/components/laundrify/conftest.py b/tests/components/laundrify/conftest.py index 4a78a2e9025..91aeebf81ee 100644 --- a/tests/components/laundrify/conftest.py +++ b/tests/components/laundrify/conftest.py @@ -1,76 +1,55 @@ """Configure py.test.""" import json -from unittest.mock import AsyncMock, patch +from unittest.mock import patch -from laundrify_aio import LaundrifyAPI, LaundrifyDevice import pytest -from homeassistant.components.laundrify import DOMAIN -from homeassistant.components.laundrify.const import MANUFACTURER -from homeassistant.const import CONF_ACCESS_TOKEN -from homeassistant.core import HomeAssistant - from .const import VALID_ACCESS_TOKEN, VALID_ACCOUNT_ID -from tests.common import MockConfigEntry, load_fixture -from tests.typing import ClientSessionGenerator +from tests.common import load_fixture -@pytest.fixture(name="mock_device") -def laundrify_sensor_fixture() -> LaundrifyDevice: - """Return a default Laundrify power sensor mock.""" - # Load test data from machines.json - machine_data = json.loads(load_fixture("laundrify/machines.json"))[0] - - mock_device = AsyncMock(spec=LaundrifyDevice) - mock_device.id = machine_data["id"] - mock_device.manufacturer = MANUFACTURER - mock_device.model = machine_data["model"] - mock_device.name = machine_data["name"] - mock_device.firmwareVersion = machine_data["firmwareVersion"] - return mock_device +@pytest.fixture(name="laundrify_setup_entry") +def laundrify_setup_entry_fixture(): + """Mock laundrify setup entry function.""" + with patch( + "homeassistant.components.laundrify.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry -@pytest.fixture(name="laundrify_config_entry") -async def laundrify_setup_config_entry( - hass: HomeAssistant, access_token: str = VALID_ACCESS_TOKEN -) -> MockConfigEntry: - """Create laundrify entry in Home Assistant.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id=VALID_ACCOUNT_ID, - data={CONF_ACCESS_TOKEN: access_token}, - minor_version=2, - ) - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - return entry +@pytest.fixture(name="laundrify_exchange_code") +def laundrify_exchange_code_fixture(): + """Mock laundrify exchange_auth_code function.""" + with patch( + "laundrify_aio.LaundrifyAPI.exchange_auth_code", + return_value=VALID_ACCESS_TOKEN, + ) as exchange_code_mock: + yield exchange_code_mock + + +@pytest.fixture(name="laundrify_validate_token") +def laundrify_validate_token_fixture(): + """Mock laundrify validate_token function.""" + with patch( + "laundrify_aio.LaundrifyAPI.validate_token", + return_value=True, + ) as validate_token_mock: + yield validate_token_mock @pytest.fixture(name="laundrify_api_mock", autouse=True) -def laundrify_api_fixture(hass_client: ClientSessionGenerator): +def laundrify_api_fixture(laundrify_exchange_code, laundrify_validate_token): """Mock valid laundrify API responses.""" with ( patch( "laundrify_aio.LaundrifyAPI.get_account_id", - return_value=1234, - ), - patch( - "laundrify_aio.LaundrifyAPI.validate_token", - return_value=True, - ), - patch( - "laundrify_aio.LaundrifyAPI.exchange_auth_code", - return_value=VALID_ACCESS_TOKEN, + return_value=VALID_ACCOUNT_ID, ), patch( "laundrify_aio.LaundrifyAPI.get_machines", - return_value=[ - LaundrifyDevice(machine, LaundrifyAPI) - for machine in json.loads(load_fixture("laundrify/machines.json")) - ], - ), + return_value=json.loads(load_fixture("laundrify/machines.json")), + ) as get_machines_mock, ): - yield LaundrifyAPI(VALID_ACCESS_TOKEN, hass_client) + yield get_machines_mock diff --git a/tests/components/laundrify/fixtures/machines.json b/tests/components/laundrify/fixtures/machines.json index 4319e76880e..ab1a737cb45 100644 --- a/tests/components/laundrify/fixtures/machines.json +++ b/tests/components/laundrify/fixtures/machines.json @@ -1,11 +1,8 @@ [ { - "id": "14", + "_id": "14", "name": "Demo Waschmaschine", "status": "OFF", - "internalIP": "192.168.0.123", - "model": "SU02", - "firmwareVersion": "2.1.0", - "totalEnergy": 1337.0 + "firmwareVersion": "2.1.0" } ] diff --git a/tests/components/laundrify/test_config_flow.py b/tests/components/laundrify/test_config_flow.py index 54e849f79d0..69a4b957cf5 100644 --- a/tests/components/laundrify/test_config_flow.py +++ b/tests/components/laundrify/test_config_flow.py @@ -3,17 +3,16 @@ from laundrify_aio import exceptions from homeassistant.components.laundrify.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CODE, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import create_entry from .const import VALID_ACCESS_TOKEN, VALID_AUTH_CODE, VALID_USER_INPUT -from tests.common import MockConfigEntry - -async def test_form(hass: HomeAssistant) -> None: +async def test_form(hass: HomeAssistant, laundrify_setup_entry) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -32,12 +31,14 @@ async def test_form(hass: HomeAssistant) -> None: assert result["data"] == { CONF_ACCESS_TOKEN: VALID_ACCESS_TOKEN, } - assert result["result"].unique_id == "1234" + assert len(laundrify_setup_entry.mock_calls) == 1 -async def test_form_invalid_format(hass: HomeAssistant, laundrify_api_mock) -> None: +async def test_form_invalid_format( + hass: HomeAssistant, laundrify_exchange_code +) -> None: """Test we handle invalid format.""" - laundrify_api_mock.exchange_auth_code.side_effect = exceptions.InvalidFormat + laundrify_exchange_code.side_effect = exceptions.InvalidFormat result = await hass.config_entries.flow.async_init( DOMAIN, @@ -49,9 +50,9 @@ async def test_form_invalid_format(hass: HomeAssistant, laundrify_api_mock) -> N assert result["errors"] == {CONF_CODE: "invalid_format"} -async def test_form_invalid_auth(hass: HomeAssistant, laundrify_api_mock) -> None: +async def test_form_invalid_auth(hass: HomeAssistant, laundrify_exchange_code) -> None: """Test we handle invalid auth.""" - laundrify_api_mock.exchange_auth_code.side_effect = exceptions.UnknownAuthCode + laundrify_exchange_code.side_effect = exceptions.UnknownAuthCode result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER}, @@ -62,11 +63,11 @@ async def test_form_invalid_auth(hass: HomeAssistant, laundrify_api_mock) -> Non assert result["errors"] == {CONF_CODE: "invalid_auth"} -async def test_form_cannot_connect(hass: HomeAssistant, laundrify_api_mock) -> None: +async def test_form_cannot_connect( + hass: HomeAssistant, laundrify_exchange_code +) -> None: """Test we handle cannot connect error.""" - laundrify_api_mock.exchange_auth_code.side_effect = ( - exceptions.ApiConnectionException - ) + laundrify_exchange_code.side_effect = exceptions.ApiConnectionException result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER}, @@ -77,9 +78,11 @@ async def test_form_cannot_connect(hass: HomeAssistant, laundrify_api_mock) -> N assert result["errors"] == {"base": "cannot_connect"} -async def test_form_unkown_exception(hass: HomeAssistant, laundrify_api_mock) -> None: +async def test_form_unkown_exception( + hass: HomeAssistant, laundrify_exchange_code +) -> None: """Test we handle all other errors.""" - laundrify_api_mock.exchange_auth_code.side_effect = Exception + laundrify_exchange_code.side_effect = Exception result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER}, @@ -90,11 +93,11 @@ async def test_form_unkown_exception(hass: HomeAssistant, laundrify_api_mock) -> assert result["errors"] == {"base": "unknown"} -async def test_step_reauth( - hass: HomeAssistant, laundrify_config_entry: MockConfigEntry -) -> None: +async def test_step_reauth(hass: HomeAssistant) -> None: """Test the reauth form is shown.""" - result = await laundrify_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH} + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -108,10 +111,9 @@ async def test_step_reauth( assert result["type"] is FlowResultType.FORM -async def test_integration_already_exists( - hass: HomeAssistant, laundrify_config_entry: MockConfigEntry -) -> None: +async def test_integration_already_exists(hass: HomeAssistant) -> None: """Test we only allow a single config flow.""" + create_entry(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER} ) diff --git a/tests/components/laundrify/test_coordinator.py b/tests/components/laundrify/test_coordinator.py index 64b486d1285..0a395c736de 100644 --- a/tests/components/laundrify/test_coordinator.py +++ b/tests/components/laundrify/test_coordinator.py @@ -1,70 +1,52 @@ """Test the laundrify coordinator.""" -from datetime import timedelta +from laundrify_aio import exceptions -from freezegun.api import FrozenDateTimeFactory -from laundrify_aio import LaundrifyDevice, exceptions +from homeassistant.components.laundrify.const import DOMAIN +from homeassistant.core import HomeAssistant -from homeassistant.components.laundrify.const import DEFAULT_POLL_INTERVAL -from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant, State -from homeassistant.util import slugify - -from tests.common import async_fire_time_changed +from . import create_entry -def get_coord_entity(hass: HomeAssistant, mock_device: LaundrifyDevice) -> State: - """Get the coordinated energy sensor entity.""" - device_slug = slugify(mock_device.name, separator="_") - return hass.states.get(f"sensor.{device_slug}_energy") - - -async def test_coordinator_update_success( - hass: HomeAssistant, - laundrify_config_entry, - mock_device: LaundrifyDevice, - freezer: FrozenDateTimeFactory, -) -> None: +async def test_coordinator_update_success(hass: HomeAssistant) -> None: """Test the coordinator update is performed successfully.""" - freezer.tick(timedelta(seconds=DEFAULT_POLL_INTERVAL)) - async_fire_time_changed(hass) + config_entry = create_entry(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + coordinator = hass.data[DOMAIN][config_entry.entry_id]["coordinator"] + await coordinator.async_refresh() await hass.async_block_till_done() - coord_entity = get_coord_entity(hass, mock_device) - assert coord_entity.state != STATE_UNAVAILABLE + assert coordinator.last_update_success async def test_coordinator_update_unauthorized( - hass: HomeAssistant, - laundrify_config_entry, - laundrify_api_mock, - mock_device: LaundrifyDevice, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, laundrify_api_mock ) -> None: """Test the coordinator update fails if an UnauthorizedException is thrown.""" - laundrify_api_mock.get_machines.side_effect = exceptions.UnauthorizedException - - freezer.tick(timedelta(seconds=DEFAULT_POLL_INTERVAL)) - async_fire_time_changed(hass) + config_entry = create_entry(hass) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coord_entity = get_coord_entity(hass, mock_device) - assert coord_entity.state == STATE_UNAVAILABLE + coordinator = hass.data[DOMAIN][config_entry.entry_id]["coordinator"] + laundrify_api_mock.side_effect = exceptions.UnauthorizedException + await coordinator.async_refresh() + await hass.async_block_till_done() + + assert not coordinator.last_update_success async def test_coordinator_update_connection_failed( - hass: HomeAssistant, - laundrify_config_entry, - laundrify_api_mock, - mock_device: LaundrifyDevice, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, laundrify_api_mock ) -> None: """Test the coordinator update fails if an ApiConnectionException is thrown.""" - laundrify_api_mock.get_machines.side_effect = exceptions.ApiConnectionException - - freezer.tick(timedelta(seconds=DEFAULT_POLL_INTERVAL)) - async_fire_time_changed(hass) + config_entry = create_entry(hass) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coord_entity = get_coord_entity(hass, mock_device) - assert coord_entity.state == STATE_UNAVAILABLE + coordinator = hass.data[DOMAIN][config_entry.entry_id]["coordinator"] + laundrify_api_mock.side_effect = exceptions.ApiConnectionException + await coordinator.async_refresh() + await hass.async_block_till_done() + + assert not coordinator.last_update_success diff --git a/tests/components/laundrify/test_init.py b/tests/components/laundrify/test_init.py index 117da661e29..e3ec54a3225 100644 --- a/tests/components/laundrify/test_init.py +++ b/tests/components/laundrify/test_init.py @@ -4,71 +4,56 @@ from laundrify_aio import exceptions from homeassistant.components.laundrify.const import DOMAIN from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant -from .const import VALID_ACCESS_TOKEN - -from tests.common import MockConfigEntry +from . import create_entry async def test_setup_entry_api_unauthorized( - hass: HomeAssistant, - laundrify_api_mock, - laundrify_config_entry: MockConfigEntry, + hass: HomeAssistant, laundrify_validate_token ) -> None: """Test that ConfigEntryAuthFailed is thrown when authentication fails.""" - laundrify_api_mock.validate_token.side_effect = exceptions.UnauthorizedException - await hass.config_entries.async_reload(laundrify_config_entry.entry_id) + laundrify_validate_token.side_effect = exceptions.UnauthorizedException + config_entry = create_entry(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert laundrify_config_entry.state is ConfigEntryState.SETUP_ERROR + assert config_entry.state is ConfigEntryState.SETUP_ERROR assert not hass.data.get(DOMAIN) async def test_setup_entry_api_cannot_connect( - hass: HomeAssistant, - laundrify_api_mock, - laundrify_config_entry: MockConfigEntry, + hass: HomeAssistant, laundrify_validate_token ) -> None: """Test that ApiConnectionException is thrown when connection fails.""" - laundrify_api_mock.validate_token.side_effect = exceptions.ApiConnectionException - await hass.config_entries.async_reload(laundrify_config_entry.entry_id) + laundrify_validate_token.side_effect = exceptions.ApiConnectionException + config_entry = create_entry(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert laundrify_config_entry.state is ConfigEntryState.SETUP_RETRY + assert config_entry.state is ConfigEntryState.SETUP_RETRY assert not hass.data.get(DOMAIN) -async def test_setup_entry_successful( - hass: HomeAssistant, laundrify_config_entry: MockConfigEntry -) -> None: +async def test_setup_entry_successful(hass: HomeAssistant) -> None: """Test entry can be setup successfully.""" + config_entry = create_entry(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert laundrify_config_entry.state is ConfigEntryState.LOADED + assert config_entry.state is ConfigEntryState.LOADED -async def test_setup_entry_unload( - hass: HomeAssistant, laundrify_config_entry: MockConfigEntry -) -> None: +async def test_setup_entry_unload(hass: HomeAssistant) -> None: """Test unloading the laundrify entry.""" - await hass.config_entries.async_unload(laundrify_config_entry.entry_id) + config_entry = create_entry(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_unload(config_entry.entry_id) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert laundrify_config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None: - """Test migrating a 1.1 config entry to 1.2.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_ACCESS_TOKEN: VALID_ACCESS_TOKEN}, - version=1, - minor_version=1, - unique_id=123456, - ) - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) - assert entry.version == 1 - assert entry.minor_version == 2 - assert entry.unique_id == "123456" + assert config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/laundrify/test_sensor.py b/tests/components/laundrify/test_sensor.py deleted file mode 100644 index 49b60200c1d..00000000000 --- a/tests/components/laundrify/test_sensor.py +++ /dev/null @@ -1,94 +0,0 @@ -"""Test the laundrify sensor platform.""" - -from datetime import timedelta -import logging -from unittest.mock import patch - -from freezegun.api import FrozenDateTimeFactory -from laundrify_aio import LaundrifyDevice -from laundrify_aio.exceptions import LaundrifyDeviceException -import pytest - -from homeassistant.components.laundrify.const import ( - DEFAULT_POLL_INTERVAL, - DOMAIN, - MODELS, -) -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_UNIT_OF_MEASUREMENT, - STATE_UNKNOWN, - UnitOfPower, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from homeassistant.util import slugify - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_laundrify_sensor_init( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_device: LaundrifyDevice, - laundrify_config_entry: MockConfigEntry, -) -> None: - """Test Laundrify sensor default state.""" - device_slug = slugify(mock_device.name, separator="_") - - state = hass.states.get(f"sensor.{device_slug}_power") - assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.POWER - assert state.state == STATE_UNKNOWN - - device = device_registry.async_get_device({(DOMAIN, mock_device.id)}) - assert device is not None - assert device.name == mock_device.name - assert device.identifiers == {(DOMAIN, mock_device.id)} - assert device.manufacturer == mock_device.manufacturer - assert device.model == MODELS[mock_device.model] - assert device.sw_version == mock_device.firmwareVersion - - -async def test_laundrify_sensor_update( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_device: LaundrifyDevice, - laundrify_config_entry: MockConfigEntry, -) -> None: - """Test Laundrify sensor update.""" - device_slug = slugify(mock_device.name, separator="_") - - state = hass.states.get(f"sensor.{device_slug}_power") - assert state.state == STATE_UNKNOWN - - with patch("laundrify_aio.LaundrifyDevice.get_power", return_value=95): - freezer.tick(timedelta(seconds=DEFAULT_POLL_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(f"sensor.{device_slug}_power") - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfPower.WATT - assert state.state == "95" - - -async def test_laundrify_sensor_update_failure( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - freezer: FrozenDateTimeFactory, - mock_device: LaundrifyDevice, - laundrify_config_entry: MockConfigEntry, -) -> None: - """Test that update failures are logged.""" - caplog.set_level(logging.DEBUG) - - # test get_power() to raise a LaundrifyDeviceException - with patch( - "laundrify_aio.LaundrifyDevice.get_power", - side_effect=LaundrifyDeviceException("Raising error to test update failure."), - ): - freezer.tick(timedelta(seconds=DEFAULT_POLL_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert f"Couldn't load power for {mock_device.id}_power" in caplog.text diff --git a/tests/components/lawn_mower/test_init.py b/tests/components/lawn_mower/test_init.py index 0735d4541ff..e7066ed43c1 100644 --- a/tests/components/lawn_mower/test_init.py +++ b/tests/components/lawn_mower/test_init.py @@ -1,9 +1,9 @@ """The tests for the lawn mower integration.""" -from collections.abc import Generator from unittest.mock import MagicMock import pytest +from typing_extensions import Generator from homeassistant.components.lawn_mower import ( DOMAIN as LAWN_MOWER_DOMAIN, @@ -176,4 +176,4 @@ async def test_lawn_mower_state(hass: HomeAssistant) -> None: lawn_mower.hass = hass lawn_mower.start_mowing() - assert lawn_mower.state == LawnMowerActivity.MOWING + assert lawn_mower.state == str(LawnMowerActivity.MOWING) diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index 3c5979c3c36..f24fdbc054f 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -1,26 +1,22 @@ """Test configuration and mocks for LCN component.""" import json -from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, patch import pypck +from pypck.connection import PchkConnectionManager import pypck.module from pypck.module import GroupConnection, ModuleConnection import pytest -from homeassistant.components.lcn import PchkConnectionManager -from homeassistant.components.lcn.config_flow import LcnFlowHandler from homeassistant.components.lcn.const import DOMAIN -from homeassistant.components.lcn.helpers import AddressType, generate_unique_id -from homeassistant.const import CONF_ADDRESS, CONF_DEVICES, CONF_ENTITIES, CONF_HOST -from homeassistant.core import HomeAssistant +from homeassistant.components.lcn.helpers import generate_unique_id +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, load_fixture - -LATEST_CONFIG_ENTRY_VERSION = (LcnFlowHandler.VERSION, LcnFlowHandler.MINOR_VERSION) +from tests.common import MockConfigEntry, async_mock_service, load_fixture class MockModuleConnection(ModuleConnection): @@ -32,7 +28,7 @@ class MockModuleConnection(ModuleConnection): request_name = AsyncMock(return_value="TestModule") send_command = AsyncMock(return_value=True) - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Construct ModuleConnection instance.""" super().__init__(*args, **kwargs) self.serials_request_handler.serial_known.set() @@ -47,75 +43,62 @@ class MockGroupConnection(GroupConnection): class MockPchkConnectionManager(PchkConnectionManager): """Fake connection handler.""" - async def async_connect(self, timeout: int = 30) -> None: + async def async_connect(self, timeout=30): """Mock establishing a connection to PCHK.""" self.authentication_completed_future.set_result(True) self.license_error_future.set_result(True) self.segment_scan_completed_event.set() - async def async_close(self) -> None: + async def async_close(self): """Mock closing a connection to PCHK.""" + @patch.object(pypck.connection, "ModuleConnection", MockModuleConnection) + @patch.object(pypck.connection, "GroupConnection", MockGroupConnection) def get_address_conn(self, addr, request_serials=False): """Get LCN address connection.""" return super().get_address_conn(addr, request_serials) - @patch.object(pypck.connection, "ModuleConnection", MockModuleConnection) - def get_module_conn(self, addr, request_serials=False): - """Get LCN module connection.""" - return super().get_module_conn(addr, request_serials) - - @patch.object(pypck.connection, "GroupConnection", MockGroupConnection) - def get_group_conn(self, addr): - """Get LCN group connection.""" - return super().get_group_conn(addr) - - scan_modules = AsyncMock() send_command = AsyncMock() -def create_config_entry( - name: str, version: tuple[int, int] = LATEST_CONFIG_ENTRY_VERSION -) -> MockConfigEntry: +def create_config_entry(name): """Set up config entries with configuration data.""" fixture_filename = f"lcn/config_entry_{name}.json" entry_data = json.loads(load_fixture(fixture_filename)) - for device in entry_data[CONF_DEVICES]: - device[CONF_ADDRESS] = tuple(device[CONF_ADDRESS]) - for entity in entry_data[CONF_ENTITIES]: - entity[CONF_ADDRESS] = tuple(entity[CONF_ADDRESS]) - options = {} title = entry_data[CONF_HOST] + unique_id = fixture_filename return MockConfigEntry( - entry_id=fixture_filename, domain=DOMAIN, title=title, + unique_id=unique_id, data=entry_data, options=options, - version=version[0], - minor_version=version[1], ) +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.fixture(name="entry") -def create_config_entry_pchk() -> MockConfigEntry: +def create_config_entry_pchk(): """Return one specific config entry.""" return create_config_entry("pchk") @pytest.fixture(name="entry2") -def create_config_entry_myhome() -> MockConfigEntry: +def create_config_entry_myhome(): """Return one specific config entry.""" return create_config_entry("myhome") -async def init_integration( - hass: HomeAssistant, entry: MockConfigEntry -) -> MockPchkConnectionManager: +@pytest.fixture(name="lcn_connection") +async def init_integration(hass, entry): """Set up the LCN integration in Home Assistant.""" - hass.http = Mock() # needs to be mocked as hass.http.register_static_path is called when registering the frontend lcn_connection = None def lcn_connection_factory(*args, **kwargs): @@ -125,16 +108,15 @@ async def init_integration( entry.add_to_hass(hass) with patch( - "homeassistant.components.lcn.PchkConnectionManager", + "pypck.connection.PchkConnectionManager", side_effect=lcn_connection_factory, ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - - return lcn_connection + yield lcn_connection -async def setup_component(hass: HomeAssistant) -> None: +async def setup_component(hass): """Set up the LCN component.""" fixture_filename = "lcn/config.json" config_data = json.loads(load_fixture(fixture_filename)) @@ -143,9 +125,7 @@ async def setup_component(hass: HomeAssistant) -> None: await hass.async_block_till_done() -def get_device( - hass: HomeAssistant, entry: MockConfigEntry, address: AddressType -) -> dr.DeviceEntry: +def get_device(hass, entry, address): """Get LCN device for specified address.""" device_registry = dr.async_get(hass) identifiers = {(DOMAIN, generate_unique_id(entry.entry_id, address))} diff --git a/tests/components/lcn/fixtures/config.json b/tests/components/lcn/fixtures/config.json index ed3e3500900..13b3dd5feed 100644 --- a/tests/components/lcn/fixtures/config.json +++ b/tests/components/lcn/fixtures/config.json @@ -91,35 +91,6 @@ "motor": "motor1" } ], - "climates": [ - { - "name": "Climate1", - "address": "s0.m7", - "source": "var1", - "setpoint": "r1varsetpoint", - "lockable": true, - "min_temp": 0, - "max_temp": 40, - "unit_of_measurement": "°C" - } - ], - "scenes": [ - { - "name": "Romantic", - "address": "s0.m7", - "register": 0, - "scene": 0, - "outputs": ["output1", "output2", "relay1"] - }, - { - "name": "Romantic Transition", - "address": "s0.m7", - "register": 0, - "scene": 1, - "outputs": ["output1", "output2", "relay1"], - "transition": 10 - } - ], "binary_sensors": [ { "name": "Sensor_LockRegulator1", diff --git a/tests/components/lcn/fixtures/config_entry_myhome.json b/tests/components/lcn/fixtures/config_entry_myhome.json index 5abc9749b46..a0f8e7d3e10 100644 --- a/tests/components/lcn/fixtures/config_entry_myhome.json +++ b/tests/components/lcn/fixtures/config_entry_myhome.json @@ -6,7 +6,6 @@ "password": "lcn", "sk_num_tries": 0, "dim_mode": "STEPS200", - "acknowledge": false, "devices": [], "entities": [ { diff --git a/tests/components/lcn/fixtures/config_entry_pchk.json b/tests/components/lcn/fixtures/config_entry_pchk.json index 068b8757707..31b51adfce7 100644 --- a/tests/components/lcn/fixtures/config_entry_pchk.json +++ b/tests/components/lcn/fixtures/config_entry_pchk.json @@ -6,18 +6,17 @@ "password": "lcn", "sk_num_tries": 0, "dim_mode": "STEPS200", - "acknowledge": false, "devices": [ { "address": [0, 7, false], - "name": "TestModule", + "name": "", "hardware_serial": -1, "software_serial": -1, "hardware_type": -1 }, { "address": [0, 5, true], - "name": "TestGroup", + "name": "", "hardware_serial": -1, "software_serial": -1, "hardware_type": -1 @@ -32,7 +31,7 @@ "domain_data": { "output": "OUTPUT1", "dimmable": true, - "transition": 5.0 + "transition": 5000.0 } }, { @@ -43,7 +42,7 @@ "domain_data": { "output": "OUTPUT2", "dimmable": false, - "transition": 0.0 + "transition": 0 } }, { @@ -93,24 +92,6 @@ "output": "RELAY2" } }, - { - "address": [0, 7, false], - "name": "Switch_Regulator1", - "resource": "r1varsetpoint", - "domain": "switch", - "domain_data": { - "output": "R1VARSETPOINT" - } - }, - { - "address": [0, 7, false], - "name": "Switch_KeyLock1", - "resource": "a1", - "domain": "switch", - "domain_data": { - "output": "A1" - } - }, { "address": [0, 5, true], "name": "Switch_Group5", @@ -140,44 +121,6 @@ "reverse_time": "RT1200" } }, - { - "address": [0, 7, false], - "name": "Climate1", - "resource": "var1.r1varsetpoint", - "domain": "climate", - "domain_data": { - "source": "VAR1", - "setpoint": "R1VARSETPOINT", - "lockable": true, - "min_temp": 0.0, - "max_temp": 40.0, - "unit_of_measurement": "°C" - } - }, - { - "address": [0, 7, false], - "name": "Romantic", - "resource": "0.0", - "domain": "scene", - "domain_data": { - "register": 0, - "scene": 0, - "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], - "transition": 0.0 - } - }, - { - "address": [0, 7, false], - "name": "Romantic Transition", - "resource": "0.1", - "domain": "scene", - "domain_data": { - "register": 0, - "scene": 1, - "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], - "transition": 10.0 - } - }, { "address": [0, 7, false], "name": "Sensor_LockRegulator1", diff --git a/tests/components/lcn/fixtures/config_entry_pchk_v1_1.json b/tests/components/lcn/fixtures/config_entry_pchk_v1_1.json deleted file mode 100644 index e1893c30b42..00000000000 --- a/tests/components/lcn/fixtures/config_entry_pchk_v1_1.json +++ /dev/null @@ -1,248 +0,0 @@ -{ - "host": "pchk", - "ip_address": "192.168.2.41", - "port": 4114, - "username": "lcn", - "password": "lcn", - "sk_num_tries": 0, - "dim_mode": "STEPS200", - "devices": [ - { - "address": [0, 7, false], - "name": "TestModule", - "hardware_serial": -1, - "software_serial": -1, - "hardware_type": -1 - }, - { - "address": [0, 5, true], - "name": "TestGroup", - "hardware_serial": -1, - "software_serial": -1, - "hardware_type": -1 - } - ], - "entities": [ - { - "address": [0, 7, false], - "name": "Light_Output1", - "resource": "output1", - "domain": "light", - "domain_data": { - "output": "OUTPUT1", - "dimmable": true, - "transition": 5000.0 - } - }, - { - "address": [0, 7, false], - "name": "Light_Output2", - "resource": "output2", - "domain": "light", - "domain_data": { - "output": "OUTPUT2", - "dimmable": false, - "transition": 0 - } - }, - { - "address": [0, 7, false], - "name": "Light_Relay1", - "resource": "relay1", - "domain": "light", - "domain_data": { - "output": "RELAY1", - "dimmable": false, - "transition": 0.0 - } - }, - { - "address": [0, 7, false], - "name": "Switch_Output1", - "resource": "output1", - "domain": "switch", - "domain_data": { - "output": "OUTPUT1" - } - }, - { - "address": [0, 7, false], - "name": "Switch_Output2", - "resource": "output2", - "domain": "switch", - "domain_data": { - "output": "OUTPUT2" - } - }, - { - "address": [0, 7, false], - "name": "Switch_Relay1", - "resource": "relay1", - "domain": "switch", - "domain_data": { - "output": "RELAY1" - } - }, - { - "address": [0, 7, false], - "name": "Switch_Relay2", - "resource": "relay2", - "domain": "switch", - "domain_data": { - "output": "RELAY2" - } - }, - { - "address": [0, 7, false], - "name": "Switch_Regulator1", - "resource": "r1varsetpoint", - "domain": "switch", - "domain_data": { - "output": "R1VARSETPOINT" - } - }, - { - "address": [0, 7, false], - "name": "Switch_KeyLock1", - "resource": "a1", - "domain": "switch", - "domain_data": { - "output": "A1" - } - }, - { - "address": [0, 5, true], - "name": "Switch_Group5", - "resource": "relay1", - "domain": "switch", - "domain_data": { - "output": "RELAY1" - } - }, - { - "address": [0, 7, false], - "name": "Cover_Outputs", - "resource": "outputs", - "domain": "cover", - "domain_data": { - "motor": "OUTPUTS", - "reverse_time": "RT1200" - } - }, - { - "address": [0, 7, false], - "name": "Cover_Relays", - "resource": "motor1", - "domain": "cover", - "domain_data": { - "motor": "MOTOR1", - "reverse_time": "RT1200" - } - }, - { - "address": [0, 7, false], - "name": "Climate1", - "resource": "var1.r1varsetpoint", - "domain": "climate", - "domain_data": { - "source": "VAR1", - "setpoint": "R1VARSETPOINT", - "lockable": true, - "min_temp": 0.0, - "max_temp": 40.0, - "unit_of_measurement": "°C" - } - }, - { - "address": [0, 7, false], - "name": "Romantic", - "resource": "0.0", - "domain": "scene", - "domain_data": { - "register": 0, - "scene": 0, - "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], - "transition": null - } - }, - { - "address": [0, 7, false], - "name": "Romantic Transition", - "resource": "0.1", - "domain": "scene", - "domain_data": { - "register": 0, - "scene": 1, - "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], - "transition": 10000 - } - }, - { - "address": [0, 7, false], - "name": "Sensor_LockRegulator1", - "resource": "r1varsetpoint", - "domain": "binary_sensor", - "domain_data": { - "source": "R1VARSETPOINT" - } - }, - { - "address": [0, 7, false], - "name": "Binary_Sensor1", - "resource": "binsensor1", - "domain": "binary_sensor", - "domain_data": { - "source": "BINSENSOR1" - } - }, - { - "address": [0, 7, false], - "name": "Sensor_KeyLock", - "resource": "a5", - "domain": "binary_sensor", - "domain_data": { - "source": "A5" - } - }, - { - "address": [0, 7, false], - "name": "Sensor_Var1", - "resource": "var1", - "domain": "sensor", - "domain_data": { - "source": "VAR1", - "unit_of_measurement": "°C" - } - }, - { - "address": [0, 7, false], - "name": "Sensor_Setpoint1", - "resource": "r1varsetpoint", - "domain": "sensor", - "domain_data": { - "source": "R1VARSETPOINT", - "unit_of_measurement": "°C" - } - }, - { - "address": [0, 7, false], - "name": "Sensor_Led6", - "resource": "led6", - "domain": "sensor", - "domain_data": { - "source": "LED6", - "unit_of_measurement": "NATIVE" - } - }, - { - "address": [0, 7, false], - "name": "Sensor_LogicOp1", - "resource": "logicop1", - "domain": "sensor", - "domain_data": { - "source": "LOGICOP1", - "unit_of_measurement": "NATIVE" - } - } - ] -} diff --git a/tests/components/lcn/fixtures/config_entry_pchk_v1_2.json b/tests/components/lcn/fixtures/config_entry_pchk_v1_2.json deleted file mode 100644 index 7389079dca9..00000000000 --- a/tests/components/lcn/fixtures/config_entry_pchk_v1_2.json +++ /dev/null @@ -1,249 +0,0 @@ -{ - "host": "pchk", - "ip_address": "192.168.2.41", - "port": 4114, - "username": "lcn", - "password": "lcn", - "sk_num_tries": 0, - "dim_mode": "STEPS200", - "acknowledge": false, - "devices": [ - { - "address": [0, 7, false], - "name": "TestModule", - "hardware_serial": -1, - "software_serial": -1, - "hardware_type": -1 - }, - { - "address": [0, 5, true], - "name": "TestGroup", - "hardware_serial": -1, - "software_serial": -1, - "hardware_type": -1 - } - ], - "entities": [ - { - "address": [0, 7, false], - "name": "Light_Output1", - "resource": "output1", - "domain": "light", - "domain_data": { - "output": "OUTPUT1", - "dimmable": true, - "transition": 5000.0 - } - }, - { - "address": [0, 7, false], - "name": "Light_Output2", - "resource": "output2", - "domain": "light", - "domain_data": { - "output": "OUTPUT2", - "dimmable": false, - "transition": 0 - } - }, - { - "address": [0, 7, false], - "name": "Light_Relay1", - "resource": "relay1", - "domain": "light", - "domain_data": { - "output": "RELAY1", - "dimmable": false, - "transition": 0.0 - } - }, - { - "address": [0, 7, false], - "name": "Switch_Output1", - "resource": "output1", - "domain": "switch", - "domain_data": { - "output": "OUTPUT1" - } - }, - { - "address": [0, 7, false], - "name": "Switch_Output2", - "resource": "output2", - "domain": "switch", - "domain_data": { - "output": "OUTPUT2" - } - }, - { - "address": [0, 7, false], - "name": "Switch_Relay1", - "resource": "relay1", - "domain": "switch", - "domain_data": { - "output": "RELAY1" - } - }, - { - "address": [0, 7, false], - "name": "Switch_Relay2", - "resource": "relay2", - "domain": "switch", - "domain_data": { - "output": "RELAY2" - } - }, - { - "address": [0, 7, false], - "name": "Switch_Regulator1", - "resource": "r1varsetpoint", - "domain": "switch", - "domain_data": { - "output": "R1VARSETPOINT" - } - }, - { - "address": [0, 7, false], - "name": "Switch_KeyLock1", - "resource": "a1", - "domain": "switch", - "domain_data": { - "output": "A1" - } - }, - { - "address": [0, 5, true], - "name": "Switch_Group5", - "resource": "relay1", - "domain": "switch", - "domain_data": { - "output": "RELAY1" - } - }, - { - "address": [0, 7, false], - "name": "Cover_Outputs", - "resource": "outputs", - "domain": "cover", - "domain_data": { - "motor": "OUTPUTS", - "reverse_time": "RT1200" - } - }, - { - "address": [0, 7, false], - "name": "Cover_Relays", - "resource": "motor1", - "domain": "cover", - "domain_data": { - "motor": "MOTOR1", - "reverse_time": "RT1200" - } - }, - { - "address": [0, 7, false], - "name": "Climate1", - "resource": "var1.r1varsetpoint", - "domain": "climate", - "domain_data": { - "source": "VAR1", - "setpoint": "R1VARSETPOINT", - "lockable": true, - "min_temp": 0.0, - "max_temp": 40.0, - "unit_of_measurement": "°C" - } - }, - { - "address": [0, 7, false], - "name": "Romantic", - "resource": "0.0", - "domain": "scene", - "domain_data": { - "register": 0, - "scene": 0, - "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], - "transition": null - } - }, - { - "address": [0, 7, false], - "name": "Romantic Transition", - "resource": "0.1", - "domain": "scene", - "domain_data": { - "register": 0, - "scene": 1, - "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], - "transition": 10000 - } - }, - { - "address": [0, 7, false], - "name": "Sensor_LockRegulator1", - "resource": "r1varsetpoint", - "domain": "binary_sensor", - "domain_data": { - "source": "R1VARSETPOINT" - } - }, - { - "address": [0, 7, false], - "name": "Binary_Sensor1", - "resource": "binsensor1", - "domain": "binary_sensor", - "domain_data": { - "source": "BINSENSOR1" - } - }, - { - "address": [0, 7, false], - "name": "Sensor_KeyLock", - "resource": "a5", - "domain": "binary_sensor", - "domain_data": { - "source": "A5" - } - }, - { - "address": [0, 7, false], - "name": "Sensor_Var1", - "resource": "var1", - "domain": "sensor", - "domain_data": { - "source": "VAR1", - "unit_of_measurement": "°C" - } - }, - { - "address": [0, 7, false], - "name": "Sensor_Setpoint1", - "resource": "r1varsetpoint", - "domain": "sensor", - "domain_data": { - "source": "R1VARSETPOINT", - "unit_of_measurement": "°C" - } - }, - { - "address": [0, 7, false], - "name": "Sensor_Led6", - "resource": "led6", - "domain": "sensor", - "domain_data": { - "source": "LED6", - "unit_of_measurement": "NATIVE" - } - }, - { - "address": [0, 7, false], - "name": "Sensor_LogicOp1", - "resource": "logicop1", - "domain": "sensor", - "domain_data": { - "source": "LOGICOP1", - "unit_of_measurement": "NATIVE" - } - } - ] -} diff --git a/tests/components/lcn/snapshots/test_binary_sensor.ambr b/tests/components/lcn/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 0ad31437dd1..00000000000 --- a/tests/components/lcn/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,139 +0,0 @@ -# serializer version: 1 -# name: test_setup_lcn_binary_sensor[binary_sensor.binary_sensor1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.binary_sensor1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Binary_Sensor1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-binsensor1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_binary_sensor[binary_sensor.binary_sensor1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Binary_Sensor1', - }), - 'context': , - 'entity_id': 'binary_sensor.binary_sensor1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_keylock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.sensor_keylock', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Sensor_KeyLock', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-a5', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_keylock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Sensor_KeyLock', - }), - 'context': , - 'entity_id': 'binary_sensor.sensor_keylock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_lockregulator1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.sensor_lockregulator1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Sensor_LockRegulator1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-r1varsetpoint', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_lockregulator1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Sensor_LockRegulator1', - }), - 'context': , - 'entity_id': 'binary_sensor.sensor_lockregulator1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/lcn/snapshots/test_climate.ambr b/tests/components/lcn/snapshots/test_climate.ambr deleted file mode 100644 index 443b13312d1..00000000000 --- a/tests/components/lcn/snapshots/test_climate.ambr +++ /dev/null @@ -1,63 +0,0 @@ -# serializer version: 1 -# name: test_setup_lcn_climate[climate.climate1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 40.0, - 'min_temp': 0.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.climate1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Climate1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-var1.r1varsetpoint', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_climate[climate.climate1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Climate1', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 40.0, - 'min_temp': 0.0, - 'supported_features': , - 'temperature': None, - }), - 'context': , - 'entity_id': 'climate.climate1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- diff --git a/tests/components/lcn/snapshots/test_cover.ambr b/tests/components/lcn/snapshots/test_cover.ambr deleted file mode 100644 index 82a19060d73..00000000000 --- a/tests/components/lcn/snapshots/test_cover.ambr +++ /dev/null @@ -1,97 +0,0 @@ -# serializer version: 1 -# name: test_setup_lcn_cover[cover.cover_outputs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.cover_outputs', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cover_Outputs', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-outputs', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_cover[cover.cover_outputs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assumed_state': True, - 'friendly_name': 'Cover_Outputs', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.cover_outputs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_setup_lcn_cover[cover.cover_relays-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.cover_relays', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cover_Relays', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-motor1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_cover[cover.cover_relays-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'assumed_state': True, - 'friendly_name': 'Cover_Relays', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.cover_relays', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- diff --git a/tests/components/lcn/snapshots/test_light.ambr b/tests/components/lcn/snapshots/test_light.ambr deleted file mode 100644 index f53d1fdf2dc..00000000000 --- a/tests/components/lcn/snapshots/test_light.ambr +++ /dev/null @@ -1,167 +0,0 @@ -# serializer version: 1 -# name: test_setup_lcn_light[light.light_output1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.light_output1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light_Output1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-output1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_light[light.light_output1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': None, - 'friendly_name': 'Light_Output1', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.light_output1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_lcn_light[light.light_output2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.light_output2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light_Output2', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-output2', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_light[light.light_output2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'color_mode': None, - 'friendly_name': 'Light_Output2', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.light_output2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_lcn_light[light.light_relay1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.light_relay1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light_Relay1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-relay1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_light[light.light_relay1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'color_mode': None, - 'friendly_name': 'Light_Relay1', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.light_relay1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/lcn/snapshots/test_scene.ambr b/tests/components/lcn/snapshots/test_scene.ambr deleted file mode 100644 index c039c4ef951..00000000000 --- a/tests/components/lcn/snapshots/test_scene.ambr +++ /dev/null @@ -1,93 +0,0 @@ -# serializer version: 1 -# name: test_setup_lcn_scene[scene.romantic-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'scene', - 'entity_category': None, - 'entity_id': 'scene.romantic', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Romantic', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-0.0', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_scene[scene.romantic-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Romantic', - }), - 'context': , - 'entity_id': 'scene.romantic', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_setup_lcn_scene[scene.romantic_transition-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'scene', - 'entity_category': None, - 'entity_id': 'scene.romantic_transition', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Romantic Transition', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-0.1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_scene[scene.romantic_transition-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Romantic Transition', - }), - 'context': , - 'entity_id': 'scene.romantic_transition', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/lcn/snapshots/test_sensor.ambr b/tests/components/lcn/snapshots/test_sensor.ambr deleted file mode 100644 index 56776e3e0f6..00000000000 --- a/tests/components/lcn/snapshots/test_sensor.ambr +++ /dev/null @@ -1,189 +0,0 @@ -# serializer version: 1 -# name: test_setup_lcn_sensor[sensor.sensor_led6-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sensor_led6', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Sensor_Led6', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-led6', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_sensor[sensor.sensor_led6-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Sensor_Led6', - }), - 'context': , - 'entity_id': 'sensor.sensor_led6', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_setup_lcn_sensor[sensor.sensor_logicop1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sensor_logicop1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Sensor_LogicOp1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-logicop1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_sensor[sensor.sensor_logicop1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Sensor_LogicOp1', - }), - 'context': , - 'entity_id': 'sensor.sensor_logicop1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_setup_lcn_sensor[sensor.sensor_setpoint1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sensor_setpoint1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sensor_Setpoint1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-r1varsetpoint', - 'unit_of_measurement': , - }) -# --- -# name: test_setup_lcn_sensor[sensor.sensor_setpoint1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Sensor_Setpoint1', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sensor_setpoint1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_setup_lcn_sensor[sensor.sensor_var1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sensor_var1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sensor_Var1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-var1', - 'unit_of_measurement': , - }) -# --- -# name: test_setup_lcn_sensor[sensor.sensor_var1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Sensor_Var1', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sensor_var1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/lcn/snapshots/test_switch.ambr b/tests/components/lcn/snapshots/test_switch.ambr deleted file mode 100644 index 36145b8d4fd..00000000000 --- a/tests/components/lcn/snapshots/test_switch.ambr +++ /dev/null @@ -1,323 +0,0 @@ -# serializer version: 1 -# name: test_setup_lcn_switch[switch.switch_group5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.switch_group5', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Switch_Group5', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-g000005-relay1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_switch[switch.switch_group5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Switch_Group5', - }), - 'context': , - 'entity_id': 'switch.switch_group5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_lcn_switch[switch.switch_keylock1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.switch_keylock1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Switch_KeyLock1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-a1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_switch[switch.switch_keylock1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Switch_KeyLock1', - }), - 'context': , - 'entity_id': 'switch.switch_keylock1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_lcn_switch[switch.switch_output1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.switch_output1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Switch_Output1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-output1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_switch[switch.switch_output1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Switch_Output1', - }), - 'context': , - 'entity_id': 'switch.switch_output1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_lcn_switch[switch.switch_output2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.switch_output2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Switch_Output2', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-output2', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_switch[switch.switch_output2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Switch_Output2', - }), - 'context': , - 'entity_id': 'switch.switch_output2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_lcn_switch[switch.switch_regulator1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.switch_regulator1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Switch_Regulator1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-r1varsetpoint', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_switch[switch.switch_regulator1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Switch_Regulator1', - }), - 'context': , - 'entity_id': 'switch.switch_regulator1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_lcn_switch[switch.switch_relay1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.switch_relay1', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Switch_Relay1', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-relay1', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_switch[switch.switch_relay1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Switch_Relay1', - }), - 'context': , - 'entity_id': 'switch.switch_relay1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_lcn_switch[switch.switch_relay2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.switch_relay2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Switch_Relay2', - 'platform': 'lcn', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'lcn/config_entry_pchk.json-m000007-relay2', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_lcn_switch[switch.switch_relay2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Switch_Relay2', - }), - 'context': , - 'entity_id': 'switch.switch_relay2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/lcn/test_binary_sensor.py b/tests/components/lcn/test_binary_sensor.py index 2f64f421b93..9ba04ac94c7 100644 --- a/tests/components/lcn/test_binary_sensor.py +++ b/tests/components/lcn/test_binary_sensor.py @@ -1,53 +1,68 @@ """Test for the LCN binary sensor platform.""" -from unittest.mock import patch - from pypck.inputs import ModStatusBinSensors, ModStatusKeyLocks, ModStatusVar from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import Var, VarValue -import pytest -from syrupy.assertion import SnapshotAssertion -from homeassistant.components import automation, script -from homeassistant.components.automation import automations_with_entity -from homeassistant.components.lcn import DOMAIN from homeassistant.components.lcn.helpers import get_device_connection -from homeassistant.components.script import scripts_with_entity -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -import homeassistant.helpers.issue_registry as ir -from homeassistant.setup import async_setup_component - -from .conftest import MockConfigEntry, init_integration - -from tests.common import snapshot_platform BINARY_SENSOR_LOCKREGULATOR1 = "binary_sensor.sensor_lockregulator1" BINARY_SENSOR_SENSOR1 = "binary_sensor.binary_sensor1" BINARY_SENSOR_KEYLOCK = "binary_sensor.sensor_keylock" -async def test_setup_lcn_binary_sensor( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: +async def test_setup_lcn_binary_sensor(hass: HomeAssistant, lcn_connection) -> None: """Test the setup of binary sensor.""" - with patch("homeassistant.components.lcn.PLATFORMS", [Platform.BINARY_SENSOR]): - await init_integration(hass, entry) + for entity_id in ( + BINARY_SENSOR_LOCKREGULATOR1, + BINARY_SENSOR_SENSOR1, + BINARY_SENSOR_KEYLOCK, + ): + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_UNKNOWN - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + +async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: + """Test state of entity.""" + state = hass.states.get(BINARY_SENSOR_LOCKREGULATOR1) + assert state + + state = hass.states.get(BINARY_SENSOR_SENSOR1) + assert state + + state = hass.states.get(BINARY_SENSOR_KEYLOCK) + assert state + + +async def test_entity_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +) -> None: + """Test the attributes of an entity.""" + + entity_setpoint1 = entity_registry.async_get(BINARY_SENSOR_LOCKREGULATOR1) + assert entity_setpoint1 + assert entity_setpoint1.unique_id == f"{entry.entry_id}-m000007-r1varsetpoint" + assert entity_setpoint1.original_name == "Sensor_LockRegulator1" + + entity_binsensor1 = entity_registry.async_get(BINARY_SENSOR_SENSOR1) + assert entity_binsensor1 + assert entity_binsensor1.unique_id == f"{entry.entry_id}-m000007-binsensor1" + assert entity_binsensor1.original_name == "Binary_Sensor1" + + entity_keylock = entity_registry.async_get(BINARY_SENSOR_KEYLOCK) + assert entity_keylock + assert entity_keylock.unique_id == f"{entry.entry_id}-m000007-a5" + assert entity_keylock.original_name == "Sensor_KeyLock" async def test_pushed_lock_setpoint_status_change( - hass: HomeAssistant, - entry: MockConfigEntry, + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the lock setpoint sensor changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -71,11 +86,9 @@ async def test_pushed_lock_setpoint_status_change( async def test_pushed_binsensor_status_change( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the binary port sensor changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 @@ -101,11 +114,9 @@ async def test_pushed_binsensor_status_change( async def test_pushed_keylock_status_change( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the keylock sensor changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [[False] * 8 for i in range(4)] @@ -130,62 +141,9 @@ async def test_pushed_keylock_status_change( assert state.state == STATE_ON -async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: """Test the binary sensor is removed when the config entry is unloaded.""" - await init_integration(hass, entry) - await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(BINARY_SENSOR_LOCKREGULATOR1).state == STATE_UNAVAILABLE assert hass.states.get(BINARY_SENSOR_SENSOR1).state == STATE_UNAVAILABLE assert hass.states.get(BINARY_SENSOR_KEYLOCK).state == STATE_UNAVAILABLE - - -@pytest.mark.parametrize( - "entity_id", ["binary_sensor.sensor_lockregulator1", "binary_sensor.sensor_keylock"] -) -async def test_create_issue( - hass: HomeAssistant, - service_calls: list[ServiceCall], - issue_registry: ir.IssueRegistry, - entry: MockConfigEntry, - entity_id, -) -> None: - """Test we create an issue when an automation or script is using a deprecated entity.""" - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: { - "alias": "test", - "trigger": {"platform": "state", "entity_id": entity_id}, - "action": {"action": "test.automation"}, - } - }, - ) - - assert await async_setup_component( - hass, - script.DOMAIN, - { - script.DOMAIN: { - "test": { - "sequence": { - "condition": "state", - "entity_id": entity_id, - "state": STATE_ON, - } - } - } - }, - ) - - await init_integration(hass, entry) - - assert automations_with_entity(hass, entity_id)[0] == "automation.test" - assert scripts_with_entity(hass, entity_id)[0] == "script.test" - - assert issue_registry.async_get_issue( - DOMAIN, f"deprecated_binary_sensor_{entity_id}" - ) - - assert len(issue_registry.issues) == 1 diff --git a/tests/components/lcn/test_climate.py b/tests/components/lcn/test_climate.py deleted file mode 100644 index 7ba263bd597..00000000000 --- a/tests/components/lcn/test_climate.py +++ /dev/null @@ -1,289 +0,0 @@ -"""Test for the LCN climate platform.""" - -from unittest.mock import patch - -from pypck.inputs import ModStatusVar, Unknown -from pypck.lcn_addr import LcnAddr -from pypck.lcn_defs import Var, VarUnit, VarValue -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.climate import ( - ATTR_CURRENT_TEMPERATURE, - ATTR_HVAC_MODE, - ATTR_TARGET_TEMP_HIGH, - ATTR_TARGET_TEMP_LOW, - DOMAIN as DOMAIN_CLIMATE, - SERVICE_SET_HVAC_MODE, - SERVICE_SET_TEMPERATURE, - HVACMode, -) -from homeassistant.components.lcn.helpers import get_device_connection -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_TEMPERATURE, - STATE_UNAVAILABLE, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from .conftest import MockConfigEntry, MockModuleConnection, init_integration - -from tests.common import snapshot_platform - - -async def test_setup_lcn_climate( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test the setup of climate.""" - with patch("homeassistant.components.lcn.PLATFORMS", [Platform.CLIMATE]): - await init_integration(hass, entry) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) - - -async def test_set_hvac_mode_heat(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test the hvac mode is set to heat.""" - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: - state = hass.states.get("climate.climate1") - state.state = HVACMode.OFF - - # command failed - lock_regulator.return_value = False - - await hass.services.async_call( - DOMAIN_CLIMATE, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.HEAT}, - blocking=True, - ) - - lock_regulator.assert_awaited_with(0, False) - - state = hass.states.get("climate.climate1") - assert state is not None - assert state.state != HVACMode.HEAT - - # command success - lock_regulator.reset_mock(return_value=True) - lock_regulator.return_value = True - - await hass.services.async_call( - DOMAIN_CLIMATE, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.HEAT}, - blocking=True, - ) - - lock_regulator.assert_awaited_with(0, False) - - state = hass.states.get("climate.climate1") - assert state is not None - assert state.state == HVACMode.HEAT - - -async def test_set_hvac_mode_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test the hvac mode is set off.""" - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: - state = hass.states.get("climate.climate1") - state.state = HVACMode.HEAT - - # command failed - lock_regulator.return_value = False - - await hass.services.async_call( - DOMAIN_CLIMATE, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.OFF}, - blocking=True, - ) - - lock_regulator.assert_awaited_with(0, True) - - state = hass.states.get("climate.climate1") - assert state is not None - assert state.state != HVACMode.OFF - - # command success - lock_regulator.reset_mock(return_value=True) - lock_regulator.return_value = True - - await hass.services.async_call( - DOMAIN_CLIMATE, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.OFF}, - blocking=True, - ) - - lock_regulator.assert_awaited_with(0, True) - - state = hass.states.get("climate.climate1") - assert state is not None - assert state.state == HVACMode.OFF - - -async def test_set_temperature(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test the temperature is set.""" - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "var_abs") as var_abs: - state = hass.states.get("climate.climate1") - state.state = HVACMode.HEAT - - # wrong temperature set via service call with high/low attributes - var_abs.return_value = False - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - DOMAIN_CLIMATE, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: "climate.climate1", - ATTR_TARGET_TEMP_LOW: 24.5, - ATTR_TARGET_TEMP_HIGH: 25.5, - }, - blocking=True, - ) - - var_abs.assert_not_awaited() - - # command failed - var_abs.reset_mock(return_value=True) - var_abs.return_value = False - - await hass.services.async_call( - DOMAIN_CLIMATE, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.climate1", ATTR_TEMPERATURE: 25.5}, - blocking=True, - ) - - var_abs.assert_awaited_with(Var.R1VARSETPOINT, 25.5, VarUnit.CELSIUS) - - state = hass.states.get("climate.climate1") - assert state is not None - assert state.attributes[ATTR_TEMPERATURE] != 25.5 - - # command success - var_abs.reset_mock(return_value=True) - var_abs.return_value = True - - await hass.services.async_call( - DOMAIN_CLIMATE, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.climate1", ATTR_TEMPERATURE: 25.5}, - blocking=True, - ) - - var_abs.assert_awaited_with(Var.R1VARSETPOINT, 25.5, VarUnit.CELSIUS) - - state = hass.states.get("climate.climate1") - assert state is not None - assert state.attributes[ATTR_TEMPERATURE] == 25.5 - - -async def test_pushed_current_temperature_status_change( - hass: HomeAssistant, - entry: MockConfigEntry, -) -> None: - """Test the climate changes its current temperature on status received.""" - await init_integration(hass, entry) - - device_connection = get_device_connection(hass, (0, 7, False), entry) - address = LcnAddr(0, 7, False) - - temperature = VarValue.from_celsius(25.5) - - inp = ModStatusVar(address, Var.VAR1, temperature) - await device_connection.async_process_input(inp) - await hass.async_block_till_done() - - state = hass.states.get("climate.climate1") - assert state is not None - assert state.state == HVACMode.HEAT - assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 25.5 - assert state.attributes[ATTR_TEMPERATURE] is None - - -async def test_pushed_setpoint_status_change( - hass: HomeAssistant, - entry: MockConfigEntry, -) -> None: - """Test the climate changes its setpoint on status received.""" - await init_integration(hass, entry) - - device_connection = get_device_connection(hass, (0, 7, False), entry) - address = LcnAddr(0, 7, False) - - temperature = VarValue.from_celsius(25.5) - - inp = ModStatusVar(address, Var.R1VARSETPOINT, temperature) - await device_connection.async_process_input(inp) - await hass.async_block_till_done() - - state = hass.states.get("climate.climate1") - assert state is not None - assert state.state == HVACMode.HEAT - assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None - assert state.attributes[ATTR_TEMPERATURE] == 25.5 - - -async def test_pushed_lock_status_change( - hass: HomeAssistant, - entry: MockConfigEntry, -) -> None: - """Test the climate changes its setpoint on status received.""" - await init_integration(hass, entry) - - device_connection = get_device_connection(hass, (0, 7, False), entry) - address = LcnAddr(0, 7, False) - - temperature = VarValue(0x8000) - - inp = ModStatusVar(address, Var.R1VARSETPOINT, temperature) - await device_connection.async_process_input(inp) - await hass.async_block_till_done() - - state = hass.states.get("climate.climate1") - assert state is not None - assert state.state == HVACMode.OFF - assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None - assert state.attributes[ATTR_TEMPERATURE] is None - - -async def test_pushed_wrong_input( - hass: HomeAssistant, - entry: MockConfigEntry, -) -> None: - """Test the climate handles wrong input correctly.""" - await init_integration(hass, entry) - - device_connection = get_device_connection(hass, (0, 7, False), entry) - - await device_connection.async_process_input(Unknown("input")) - await hass.async_block_till_done() - - state = hass.states.get("climate.climate1") - assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None - assert state.attributes[ATTR_TEMPERATURE] is None - - -async def test_unload_config_entry( - hass: HomeAssistant, - entry: MockConfigEntry, -) -> None: - """Test the climate is removed when the config entry is unloaded.""" - await init_integration(hass, entry) - - await hass.config_entries.async_unload(entry.entry_id) - state = hass.states.get("climate.climate1") - assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index b7967c247ec..e1705e4b349 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -5,16 +5,9 @@ from unittest.mock import patch from pypck.connection import PchkAuthenticationError, PchkLicenseError import pytest -from homeassistant import config_entries, data_entry_flow -from homeassistant.components.lcn.config_flow import LcnFlowHandler, validate_connection -from homeassistant.components.lcn.const import ( - CONF_ACKNOWLEDGE, - CONF_DIM_MODE, - CONF_SK_NUM_TRIES, - DOMAIN, -) +from homeassistant import config_entries +from homeassistant.components.lcn.const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DOMAIN from homeassistant.const import ( - CONF_BASE, CONF_DEVICES, CONF_ENTITIES, CONF_HOST, @@ -24,174 +17,84 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -CONFIG_DATA = { +IMPORT_DATA = { + CONF_HOST: "pchk", CONF_IP_ADDRESS: "127.0.0.1", - CONF_PORT: 1234, + CONF_PORT: 4114, CONF_USERNAME: "lcn", CONF_PASSWORD: "lcn", CONF_SK_NUM_TRIES: 0, CONF_DIM_MODE: "STEPS200", - CONF_ACKNOWLEDGE: False, -} - -CONNECTION_DATA = {CONF_HOST: "pchk", **CONFIG_DATA} - -IMPORT_DATA = { - **CONNECTION_DATA, CONF_DEVICES: [], CONF_ENTITIES: [], } -async def test_show_form(hass: HomeAssistant) -> None: - """Test that the form is served with no input.""" - flow = LcnFlowHandler() - flow.hass = hass +async def test_step_import(hass: HomeAssistant) -> None: + """Test for import step.""" - result = await flow.async_step_user(user_input=None) - - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "user" - - -async def test_step_user(hass: HomeAssistant) -> None: - """Test for user step.""" with ( - patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), + patch("pypck.connection.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.async_setup", return_value=True), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): - data = CONNECTION_DATA.copy() + data = IMPORT_DATA.copy() result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER}, data=data + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data ) + await hass.async_block_till_done() - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == CONNECTION_DATA[CONF_HOST] - assert result["data"] == { - **CONNECTION_DATA, - CONF_DEVICES: [], - CONF_ENTITIES: [], - } + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "pchk" + assert result["data"] == IMPORT_DATA -async def test_step_user_existing_host( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test for user defined host already exists.""" - entry.add_to_hass(hass) +async def test_step_import_existing_host(hass: HomeAssistant) -> None: + """Test for update of config_entry if imported host already exists.""" - with patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"): - config_data = entry.data.copy() + # Create config entry and add it to hass + mock_data = IMPORT_DATA.copy() + mock_data.update({CONF_SK_NUM_TRIES: 3, CONF_DIM_MODE: 50}) + mock_entry = MockConfigEntry(domain=DOMAIN, data=mock_data) + mock_entry.add_to_hass(hass) + # Initialize a config flow with different data but same host address + with patch("pypck.connection.PchkConnectionManager.async_connect"): + imported_data = IMPORT_DATA.copy() result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER}, data=config_data + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=imported_data ) + await hass.async_block_till_done() - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["errors"] == {CONF_BASE: "already_configured"} + # Check if config entry was updated + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "existing_configuration_updated" + assert mock_entry.source == config_entries.SOURCE_IMPORT + assert mock_entry.data == IMPORT_DATA @pytest.mark.parametrize( - ("error", "errors"), + ("error", "reason"), [ - (PchkAuthenticationError, {CONF_BASE: "authentication_error"}), - (PchkLicenseError, {CONF_BASE: "license_error"}), - (TimeoutError, {CONF_BASE: "connection_refused"}), + (PchkAuthenticationError, "authentication_error"), + (PchkLicenseError, "license_error"), + (TimeoutError, "connection_timeout"), ], ) -async def test_step_user_error( - hass: HomeAssistant, error: type[Exception], errors: dict[str, str] -) -> None: - """Test for error in user step is handled correctly.""" +async def test_step_import_error(hass: HomeAssistant, error, reason) -> None: + """Test for error in import is handled correctly.""" with patch( - "homeassistant.components.lcn.PchkConnectionManager.async_connect", - side_effect=error, + "pypck.connection.PchkConnectionManager.async_connect", side_effect=error ): - data = CONNECTION_DATA.copy() + data = IMPORT_DATA.copy() data.update({CONF_HOST: "pchk"}) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER}, data=data + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data ) + await hass.async_block_till_done() - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["errors"] == errors - - -async def test_step_reconfigure(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test for reconfigure step.""" - entry.add_to_hass(hass) - old_entry_data = entry.data.copy() - - result = await entry.start_reconfigure_flow(hass) - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - with ( - patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), - patch("homeassistant.components.lcn.async_setup_entry", return_value=True), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - CONFIG_DATA.copy(), - ) - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - - entry = hass.config_entries.async_get_entry(entry.entry_id) - assert entry.title == CONNECTION_DATA[CONF_HOST] - assert entry.data == {**old_entry_data, **CONFIG_DATA} - - -@pytest.mark.parametrize( - ("error", "errors"), - [ - (PchkAuthenticationError, {CONF_BASE: "authentication_error"}), - (PchkLicenseError, {CONF_BASE: "license_error"}), - (TimeoutError, {CONF_BASE: "connection_refused"}), - ], -) -async def test_step_reconfigure_error( - hass: HomeAssistant, - entry: MockConfigEntry, - error: type[Exception], - errors: dict[str, str], -) -> None: - """Test for error in reconfigure step is handled correctly.""" - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - with patch( - "homeassistant.components.lcn.PchkConnectionManager.async_connect", - side_effect=error, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - CONFIG_DATA.copy(), - ) - - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["errors"] == errors - - -async def test_validate_connection() -> None: - """Test the connection validation.""" - data = CONNECTION_DATA.copy() - - with ( - patch( - "homeassistant.components.lcn.PchkConnectionManager.async_connect" - ) as async_connect, - patch( - "homeassistant.components.lcn.PchkConnectionManager.async_close" - ) as async_close, - ): - result = await validate_connection(data=data) - - assert async_connect.is_called - assert async_close.is_called - assert result is None + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason diff --git a/tests/components/lcn/test_cover.py b/tests/components/lcn/test_cover.py index ff4311b6687..f50921c08a1 100644 --- a/tests/components/lcn/test_cover.py +++ b/tests/components/lcn/test_cover.py @@ -5,334 +5,336 @@ from unittest.mock import patch from pypck.inputs import ModStatusOutput, ModStatusRelays from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import MotorReverseTime, MotorStateModifier -from syrupy.assertion import SnapshotAssertion -from homeassistant.components.cover import DOMAIN as DOMAIN_COVER, CoverState +from homeassistant.components.cover import DOMAIN as DOMAIN_COVER from homeassistant.components.lcn.helpers import get_device_connection from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_STOP_COVER, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, STATE_UNAVAILABLE, - Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockConfigEntry, MockModuleConnection, init_integration - -from tests.common import snapshot_platform +from .conftest import MockModuleConnection COVER_OUTPUTS = "cover.cover_outputs" COVER_RELAYS = "cover.cover_relays" -async def test_setup_lcn_cover( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: +async def test_setup_lcn_cover(hass: HomeAssistant, entry, lcn_connection) -> None: """Test the setup of cover.""" - with patch("homeassistant.components.lcn.PLATFORMS", [Platform.COVER]): - await init_integration(hass, entry) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + for entity_id in ( + COVER_OUTPUTS, + COVER_RELAYS, + ): + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_OPEN -async def test_outputs_open(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_entity_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +) -> None: + """Test the attributes of an entity.""" + + entity_outputs = entity_registry.async_get(COVER_OUTPUTS) + + assert entity_outputs + assert entity_outputs.unique_id == f"{entry.entry_id}-m000007-outputs" + assert entity_outputs.original_name == "Cover_Outputs" + + entity_relays = entity_registry.async_get(COVER_RELAYS) + + assert entity_relays + assert entity_relays.unique_id == f"{entry.entry_id}-m000007-motor1" + assert entity_relays.original_name == "Cover_Relays" + + +@patch.object(MockModuleConnection, "control_motors_outputs") +async def test_outputs_open( + control_motors_outputs, hass: HomeAssistant, lcn_connection +) -> None: """Test the outputs cover opens.""" - await init_integration(hass, entry) + state = hass.states.get(COVER_OUTPUTS) + state.state = STATE_CLOSED - with patch.object( - MockModuleConnection, "control_motors_outputs" - ) as control_motors_outputs: - state = hass.states.get(COVER_OUTPUTS) - state.state = CoverState.CLOSED + # command failed + control_motors_outputs.return_value = False - # command failed - control_motors_outputs.return_value = False + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_outputs.assert_awaited_with( + MotorStateModifier.UP, MotorReverseTime.RT1200 + ) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state != STATE_OPENING - control_motors_outputs.assert_awaited_with( - MotorStateModifier.UP, MotorReverseTime.RT1200 - ) + # command success + control_motors_outputs.reset_mock(return_value=True) + control_motors_outputs.return_value = True - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state != CoverState.OPENING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_outputs.assert_awaited_with( + MotorStateModifier.UP, MotorReverseTime.RT1200 + ) - # command success - control_motors_outputs.reset_mock(return_value=True) - control_motors_outputs.return_value = True - - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - - control_motors_outputs.assert_awaited_with( - MotorStateModifier.UP, MotorReverseTime.RT1200 - ) - - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state == CoverState.OPENING + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state == STATE_OPENING -async def test_outputs_close(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "control_motors_outputs") +async def test_outputs_close( + control_motors_outputs, hass: HomeAssistant, lcn_connection +) -> None: """Test the outputs cover closes.""" - await init_integration(hass, entry) + state = hass.states.get(COVER_OUTPUTS) + state.state = STATE_OPEN - with patch.object( - MockModuleConnection, "control_motors_outputs" - ) as control_motors_outputs: - state = hass.states.get(COVER_OUTPUTS) - state.state = CoverState.OPEN + # command failed + control_motors_outputs.return_value = False - # command failed - control_motors_outputs.return_value = False + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_outputs.assert_awaited_with( + MotorStateModifier.DOWN, MotorReverseTime.RT1200 + ) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state != STATE_CLOSING - control_motors_outputs.assert_awaited_with( - MotorStateModifier.DOWN, MotorReverseTime.RT1200 - ) + # command success + control_motors_outputs.reset_mock(return_value=True) + control_motors_outputs.return_value = True - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state != CoverState.CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_outputs.assert_awaited_with( + MotorStateModifier.DOWN, MotorReverseTime.RT1200 + ) - # command success - control_motors_outputs.reset_mock(return_value=True) - control_motors_outputs.return_value = True - - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - - control_motors_outputs.assert_awaited_with( - MotorStateModifier.DOWN, MotorReverseTime.RT1200 - ) - - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state == CoverState.CLOSING + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state == STATE_CLOSING -async def test_outputs_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "control_motors_outputs") +async def test_outputs_stop( + control_motors_outputs, hass: HomeAssistant, lcn_connection +) -> None: """Test the outputs cover stops.""" - await init_integration(hass, entry) + state = hass.states.get(COVER_OUTPUTS) + state.state = STATE_CLOSING - with patch.object( - MockModuleConnection, "control_motors_outputs" - ) as control_motors_outputs: - state = hass.states.get(COVER_OUTPUTS) - state.state = CoverState.CLOSING + # command failed + control_motors_outputs.return_value = False - # command failed - control_motors_outputs.return_value = False + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state == STATE_CLOSING - control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) + # command success + control_motors_outputs.reset_mock(return_value=True) + control_motors_outputs.return_value = True - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state == CoverState.CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) - # command success - control_motors_outputs.reset_mock(return_value=True) - control_motors_outputs.return_value = True - - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - - control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) - - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state not in (CoverState.CLOSING, CoverState.OPENING) + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state not in (STATE_CLOSING, STATE_OPENING) -async def test_relays_open(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "control_motors_relays") +async def test_relays_open( + control_motors_relays, hass: HomeAssistant, lcn_connection +) -> None: """Test the relays cover opens.""" - await init_integration(hass, entry) + states = [MotorStateModifier.NOCHANGE] * 4 + states[0] = MotorStateModifier.UP - with patch.object( - MockModuleConnection, "control_motors_relays" - ) as control_motors_relays: - states = [MotorStateModifier.NOCHANGE] * 4 - states[0] = MotorStateModifier.UP + state = hass.states.get(COVER_RELAYS) + state.state = STATE_CLOSED - state = hass.states.get(COVER_RELAYS) - state.state = CoverState.CLOSED + # command failed + control_motors_relays.return_value = False - # command failed - control_motors_relays.return_value = False + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state != STATE_OPENING - control_motors_relays.assert_awaited_with(states) + # command success + control_motors_relays.reset_mock(return_value=True) + control_motors_relays.return_value = True - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state != CoverState.OPENING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_relays.assert_awaited_with(states) - # command success - control_motors_relays.reset_mock(return_value=True) - control_motors_relays.return_value = True - - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - - control_motors_relays.assert_awaited_with(states) - - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state == CoverState.OPENING + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state == STATE_OPENING -async def test_relays_close(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "control_motors_relays") +async def test_relays_close( + control_motors_relays, hass: HomeAssistant, lcn_connection +) -> None: """Test the relays cover closes.""" - await init_integration(hass, entry) + states = [MotorStateModifier.NOCHANGE] * 4 + states[0] = MotorStateModifier.DOWN - with patch.object( - MockModuleConnection, "control_motors_relays" - ) as control_motors_relays: - states = [MotorStateModifier.NOCHANGE] * 4 - states[0] = MotorStateModifier.DOWN + state = hass.states.get(COVER_RELAYS) + state.state = STATE_OPEN - state = hass.states.get(COVER_RELAYS) - state.state = CoverState.OPEN + # command failed + control_motors_relays.return_value = False - # command failed - control_motors_relays.return_value = False + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state != STATE_CLOSING - control_motors_relays.assert_awaited_with(states) + # command success + control_motors_relays.reset_mock(return_value=True) + control_motors_relays.return_value = True - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state != CoverState.CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_relays.assert_awaited_with(states) - # command success - control_motors_relays.reset_mock(return_value=True) - control_motors_relays.return_value = True - - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - - control_motors_relays.assert_awaited_with(states) - - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state == CoverState.CLOSING + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state == STATE_CLOSING -async def test_relays_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "control_motors_relays") +async def test_relays_stop( + control_motors_relays, hass: HomeAssistant, lcn_connection +) -> None: """Test the relays cover stops.""" - await init_integration(hass, entry) + states = [MotorStateModifier.NOCHANGE] * 4 + states[0] = MotorStateModifier.STOP - with patch.object( - MockModuleConnection, "control_motors_relays" - ) as control_motors_relays: - states = [MotorStateModifier.NOCHANGE] * 4 - states[0] = MotorStateModifier.STOP + state = hass.states.get(COVER_RELAYS) + state.state = STATE_CLOSING - state = hass.states.get(COVER_RELAYS) - state.state = CoverState.CLOSING + # command failed + control_motors_relays.return_value = False - # command failed - control_motors_relays.return_value = False + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state == STATE_CLOSING - control_motors_relays.assert_awaited_with(states) + # command success + control_motors_relays.reset_mock(return_value=True) + control_motors_relays.return_value = True - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state == CoverState.CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + await hass.async_block_till_done() + control_motors_relays.assert_awaited_with(states) - # command success - control_motors_relays.reset_mock(return_value=True) - control_motors_relays.return_value = True - - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - - control_motors_relays.assert_awaited_with(states) - - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state not in (CoverState.CLOSING, CoverState.OPENING) + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state not in (STATE_CLOSING, STATE_OPENING) async def test_pushed_outputs_status_change( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the outputs cover changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) state = hass.states.get(COVER_OUTPUTS) - state.state = CoverState.CLOSED + state.state = STATE_CLOSED # push status "open" inp = ModStatusOutput(address, 0, 100) @@ -341,7 +343,7 @@ async def test_pushed_outputs_status_change( state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING # push status "stop" inp = ModStatusOutput(address, 0, 0) @@ -350,7 +352,7 @@ async def test_pushed_outputs_status_change( state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state not in (CoverState.OPENING, CoverState.CLOSING) + assert state.state not in (STATE_OPENING, STATE_CLOSING) # push status "close" inp = ModStatusOutput(address, 1, 100) @@ -359,21 +361,19 @@ async def test_pushed_outputs_status_change( state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING async def test_pushed_relays_status_change( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the relays cover changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 state = hass.states.get(COVER_RELAYS) - state.state = CoverState.CLOSED + state.state = STATE_CLOSED # push status "open" states[0:2] = [True, False] @@ -383,7 +383,7 @@ async def test_pushed_relays_status_change( state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING # push status "stop" states[0] = False @@ -393,7 +393,7 @@ async def test_pushed_relays_status_change( state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state not in (CoverState.OPENING, CoverState.CLOSING) + assert state.state not in (STATE_OPENING, STATE_CLOSING) # push status "close" states[0:2] = [True, True] @@ -403,13 +403,11 @@ async def test_pushed_relays_status_change( state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING -async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: """Test the cover is removed when the config entry is unloaded.""" - await init_integration(hass, entry) - await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(COVER_OUTPUTS).state == STATE_UNAVAILABLE assert hass.states.get(COVER_RELAYS).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_device_trigger.py b/tests/components/lcn/test_device_trigger.py index 6537c108981..67bd7568254 100644 --- a/tests/components/lcn/test_device_trigger.py +++ b/tests/components/lcn/test_device_trigger.py @@ -15,17 +15,15 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.setup import async_setup_component -from .conftest import MockConfigEntry, get_device, init_integration +from .conftest import get_device from tests.common import async_get_device_automations async def test_get_triggers_module_device( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test we get the expected triggers from a LCN module device.""" - await init_integration(hass, entry) - device = get_device(hass, entry, (0, 7, False)) expected_triggers = [ @@ -52,11 +50,9 @@ async def test_get_triggers_module_device( async def test_get_triggers_non_module_device( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, entry: MockConfigEntry + hass: HomeAssistant, device_registry: dr.DeviceRegistry, entry, lcn_connection ) -> None: """Test we get the expected triggers from a LCN non-module device.""" - await init_integration(hass, entry) - not_included_types = ("transmitter", "transponder", "fingerprint", "send_keys") host_device = device_registry.async_get_device( @@ -76,10 +72,9 @@ async def test_get_triggers_non_module_device( async def test_if_fires_on_transponder_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry + hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for transponder event triggers firing.""" - lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -116,18 +111,17 @@ async def test_if_fires_on_transponder_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data == { + assert len(calls) == 1 + assert calls[0].data == { "test": "test_trigger_transponder", "code": "aabbcc", } async def test_if_fires_on_fingerprint_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry + hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for fingerprint event triggers firing.""" - lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -164,18 +158,17 @@ async def test_if_fires_on_fingerprint_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data == { + assert len(calls) == 1 + assert calls[0].data == { "test": "test_trigger_fingerprint", "code": "aabbcc", } async def test_if_fires_on_codelock_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry + hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for codelock event triggers firing.""" - lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -212,18 +205,17 @@ async def test_if_fires_on_codelock_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data == { + assert len(calls) == 1 + assert calls[0].data == { "test": "test_trigger_codelock", "code": "aabbcc", } async def test_if_fires_on_transmitter_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry + hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for transmitter event triggers firing.""" - lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -266,8 +258,8 @@ async def test_if_fires_on_transmitter_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data == { + assert len(calls) == 1 + assert calls[0].data == { "test": "test_trigger_transmitter", "code": "aabbcc", "level": 0, @@ -277,10 +269,9 @@ async def test_if_fires_on_transmitter_event( async def test_if_fires_on_send_keys_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry + hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for send_keys event triggers firing.""" - lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -318,8 +309,8 @@ async def test_if_fires_on_send_keys_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data == { + assert len(calls) == 1 + assert calls[0].data == { "test": "test_trigger_send_keys", "key": "a1", "action": "hit", @@ -327,10 +318,9 @@ async def test_if_fires_on_send_keys_event( async def test_get_transponder_trigger_capabilities( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test we get the expected capabilities from a transponder device trigger.""" - await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -351,10 +341,9 @@ async def test_get_transponder_trigger_capabilities( async def test_get_fingerprint_trigger_capabilities( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test we get the expected capabilities from a fingerprint device trigger.""" - await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -375,10 +364,9 @@ async def test_get_fingerprint_trigger_capabilities( async def test_get_transmitter_trigger_capabilities( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test we get the expected capabilities from a transmitter device trigger.""" - await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -409,10 +397,9 @@ async def test_get_transmitter_trigger_capabilities( async def test_get_send_keys_trigger_capabilities( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test we get the expected capabilities from a send_keys device trigger.""" - await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -448,10 +435,9 @@ async def test_get_send_keys_trigger_capabilities( async def test_unknown_trigger_capabilities( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test we get empty capabilities if trigger is unknown.""" - await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) diff --git a/tests/components/lcn/test_events.py b/tests/components/lcn/test_events.py index c6c3559e821..eb62f820103 100644 --- a/tests/components/lcn/test_events.py +++ b/tests/components/lcn/test_events.py @@ -3,11 +3,10 @@ from pypck.inputs import Input, ModSendKeysHost, ModStatusAccessControl from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import AccessControlPeriphery, KeyAction, SendKeyCommand +import pytest from homeassistant.core import HomeAssistant -from .conftest import MockConfigEntry, init_integration - from tests.common import async_capture_events LCN_TRANSPONDER = "lcn_transponder" @@ -16,11 +15,8 @@ LCN_TRANSMITTER = "lcn_transmitter" LCN_SEND_KEYS = "lcn_send_keys" -async def test_fire_transponder_event( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: +async def test_fire_transponder_event(hass: HomeAssistant, lcn_connection) -> None: """Test the transponder event is fired.""" - lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_TRANSPONDER) inp = ModStatusAccessControl( @@ -37,11 +33,8 @@ async def test_fire_transponder_event( assert events[0].data["code"] == "aabbcc" -async def test_fire_fingerprint_event( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: +async def test_fire_fingerprint_event(hass: HomeAssistant, lcn_connection) -> None: """Test the fingerprint event is fired.""" - lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_FINGERPRINT) inp = ModStatusAccessControl( @@ -58,9 +51,8 @@ async def test_fire_fingerprint_event( assert events[0].data["code"] == "aabbcc" -async def test_fire_codelock_event(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_fire_codelock_event(hass: HomeAssistant, lcn_connection) -> None: """Test the codelock event is fired.""" - lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, "lcn_codelock") inp = ModStatusAccessControl( @@ -77,11 +69,8 @@ async def test_fire_codelock_event(hass: HomeAssistant, entry: MockConfigEntry) assert events[0].data["code"] == "aabbcc" -async def test_fire_transmitter_event( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: +async def test_fire_transmitter_event(hass: HomeAssistant, lcn_connection) -> None: """Test the transmitter event is fired.""" - lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_TRANSMITTER) inp = ModStatusAccessControl( @@ -104,9 +93,8 @@ async def test_fire_transmitter_event( assert events[0].data["action"] == "hit" -async def test_fire_sendkeys_event(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_fire_sendkeys_event(hass: HomeAssistant, lcn_connection) -> None: """Test the send_keys event is fired.""" - lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_SEND_KEYS) inp = ModSendKeysHost( @@ -134,10 +122,9 @@ async def test_fire_sendkeys_event(hass: HomeAssistant, entry: MockConfigEntry) async def test_dont_fire_on_non_module_input( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, lcn_connection ) -> None: """Test for no event is fired if a non-module input is received.""" - lcn_connection = await init_integration(hass, entry) inp = Input() for event_name in ( @@ -152,16 +139,16 @@ async def test_dont_fire_on_non_module_input( assert len(events) == 0 -async def test_dont_fire_on_unknown_module( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_dont_fire_on_unknown_module(hass: HomeAssistant, lcn_connection) -> None: """Test for no event is fired if an input from an unknown module is received.""" - lcn_connection = await init_integration(hass, entry) inp = ModStatusAccessControl( LcnAddr(0, 10, False), # unknown module periphery=AccessControlPeriphery.FINGERPRINT, code="aabbcc", ) + events = async_capture_events(hass, LCN_FINGERPRINT) await lcn_connection.async_process_input(inp) await hass.async_block_till_done() diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index 2327635e356..670735439ce 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -1,9 +1,12 @@ """Test init of LCN integration.""" -from unittest.mock import Mock, patch +from unittest.mock import patch -from pypck.connection import PchkAuthenticationError, PchkLicenseError -import pytest +from pypck.connection import ( + PchkAuthenticationError, + PchkConnectionManager, + PchkLicenseError, +) from homeassistant import config_entries from homeassistant.components.lcn.const import DOMAIN @@ -11,18 +14,11 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from .conftest import ( - MockConfigEntry, - MockPchkConnectionManager, - create_config_entry, - init_integration, -) +from .conftest import MockPchkConnectionManager, setup_component -async def test_async_setup_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_async_setup_entry(hass: HomeAssistant, entry, lcn_connection) -> None: """Test a successful setup entry and unload of entry.""" - await init_integration(hass, entry) - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert entry.state is ConfigEntryState.LOADED @@ -33,16 +29,13 @@ async def test_async_setup_entry(hass: HomeAssistant, entry: MockConfigEntry) -> assert not hass.data.get(DOMAIN) -async def test_async_setup_multiple_entries( - hass: HomeAssistant, entry: MockConfigEntry, entry2 -) -> None: +async def test_async_setup_multiple_entries(hass: HomeAssistant, entry, entry2) -> None: """Test a successful setup and unload of multiple entries.""" - hass.http = Mock() - with patch( - "homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager - ): + with patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager): for config_entry in (entry, entry2): - await init_integration(hass, config_entry) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 2 @@ -60,7 +53,7 @@ async def test_async_setup_entry_update( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - entry: MockConfigEntry, + entry, ) -> None: """Test a successful setup entry if entry with same id already exists.""" # setup first entry @@ -82,17 +75,22 @@ async def test_async_setup_entry_update( assert dummy_entity in entity_registry.entities.values() assert dummy_device in device_registry.devices.values() + # setup new entry with same data via import step (should cleanup dummy device) + with patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager): + await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry.data + ) + + assert dummy_device not in device_registry.devices.values() + assert dummy_entity not in entity_registry.entities.values() + -@pytest.mark.parametrize( - "exception", [PchkAuthenticationError, PchkLicenseError, TimeoutError] -) async def test_async_setup_entry_raises_authentication_error( - hass: HomeAssistant, entry: MockConfigEntry, exception: Exception + hass: HomeAssistant, entry ) -> None: """Test that an authentication error is handled properly.""" - with patch( - "homeassistant.components.lcn.PchkConnectionManager.async_connect", - side_effect=exception, + with patch.object( + PchkConnectionManager, "async_connect", side_effect=PchkAuthenticationError ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -101,33 +99,38 @@ async def test_async_setup_entry_raises_authentication_error( assert entry.state is ConfigEntryState.SETUP_ERROR -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_migrate_1_1(hass: HomeAssistant, entry) -> None: - """Test migration config entry.""" - entry_v1_1 = create_config_entry("pchk_v1_1", version=(1, 1)) - entry_v1_1.add_to_hass(hass) +async def test_async_setup_entry_raises_license_error( + hass: HomeAssistant, entry +) -> None: + """Test that an authentication error is handled properly.""" + with patch.object( + PchkConnectionManager, "async_connect", side_effect=PchkLicenseError + ): + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - await hass.config_entries.async_setup(entry_v1_1.entry_id) - await hass.async_block_till_done() - - entry_migrated = hass.config_entries.async_get_entry(entry_v1_1.entry_id) - assert entry_migrated.state is ConfigEntryState.LOADED - assert entry_migrated.version == 2 - assert entry_migrated.minor_version == 1 - assert entry_migrated.data == entry.data + assert entry.state is ConfigEntryState.SETUP_ERROR -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_migrate_1_2(hass: HomeAssistant, entry) -> None: - """Test migration config entry.""" - entry_v1_2 = create_config_entry("pchk_v1_2", version=(1, 2)) - entry_v1_2.add_to_hass(hass) +async def test_async_setup_entry_raises_timeout_error( + hass: HomeAssistant, entry +) -> None: + """Test that an authentication error is handled properly.""" + with patch.object(PchkConnectionManager, "async_connect", side_effect=TimeoutError): + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - await hass.config_entries.async_setup(entry_v1_2.entry_id) - await hass.async_block_till_done() + assert entry.state is ConfigEntryState.SETUP_ERROR - entry_migrated = hass.config_entries.async_get_entry(entry_v1_2.entry_id) - assert entry_migrated.state is ConfigEntryState.LOADED - assert entry_migrated.version == 2 - assert entry_migrated.minor_version == 1 - assert entry_migrated.data == entry.data + +async def test_async_setup_from_configuration_yaml(hass: HomeAssistant) -> None: + """Test a successful setup using data from configuration.yaml.""" + with ( + patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager), + patch("homeassistant.components.lcn.async_setup_entry") as async_setup_entry, + ): + await setup_component(hass) + + assert async_setup_entry.await_count == 2 diff --git a/tests/components/lcn/test_light.py b/tests/components/lcn/test_light.py index 4251d997724..b91f3d5b17c 100644 --- a/tests/components/lcn/test_light.py +++ b/tests/components/lcn/test_light.py @@ -5,278 +5,297 @@ from unittest.mock import patch from pypck.inputs import ModStatusOutput, ModStatusRelays from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import RelayStateModifier -from syrupy.assertion import SnapshotAssertion from homeassistant.components.lcn.helpers import get_device_connection from homeassistant.components.light import ( ATTR_BRIGHTNESS, + ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, DOMAIN as DOMAIN_LIGHT, + ColorMode, + LightEntityFeature, ) from homeassistant.const import ( ATTR_ENTITY_ID, + ATTR_SUPPORTED_FEATURES, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, - Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockConfigEntry, MockModuleConnection, init_integration - -from tests.common import snapshot_platform +from .conftest import MockModuleConnection LIGHT_OUTPUT1 = "light.light_output1" LIGHT_OUTPUT2 = "light.light_output2" LIGHT_RELAY1 = "light.light_relay1" -async def test_setup_lcn_light( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: +async def test_setup_lcn_light(hass: HomeAssistant, lcn_connection) -> None: """Test the setup of light.""" - with patch("homeassistant.components.lcn.PLATFORMS", [Platform.LIGHT]): - await init_integration(hass, entry) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + for entity_id in ( + LIGHT_OUTPUT1, + LIGHT_OUTPUT2, + LIGHT_RELAY1, + ): + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_OFF -async def test_output_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: + """Test state of entity.""" + state = hass.states.get(LIGHT_OUTPUT1) + assert state + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.BRIGHTNESS] + + state = hass.states.get(LIGHT_OUTPUT2) + assert state + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] + + +async def test_entity_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +) -> None: + """Test the attributes of an entity.""" + entity_output = entity_registry.async_get(LIGHT_OUTPUT1) + + assert entity_output + assert entity_output.unique_id == f"{entry.entry_id}-m000007-output1" + assert entity_output.original_name == "Light_Output1" + + entity_relay = entity_registry.async_get(LIGHT_RELAY1) + + assert entity_relay + assert entity_relay.unique_id == f"{entry.entry_id}-m000007-relay1" + assert entity_relay.original_name == "Light_Relay1" + + +@patch.object(MockModuleConnection, "dim_output") +async def test_output_turn_on(dim_output, hass: HomeAssistant, lcn_connection) -> None: """Test the output light turns on.""" - await init_integration(hass, entry) + # command failed + dim_output.return_value = False - with patch.object(MockModuleConnection, "dim_output") as dim_output: - # command failed - dim_output.return_value = False + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + await hass.async_block_till_done() + dim_output.assert_awaited_with(0, 100, 9) - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state != STATE_ON - dim_output.assert_awaited_with(0, 100, 9) + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state != STATE_ON + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + await hass.async_block_till_done() + dim_output.assert_awaited_with(0, 100, 9) - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) - - dim_output.assert_awaited_with(0, 100, 9) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_ON + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state == STATE_ON +@patch.object(MockModuleConnection, "dim_output") async def test_output_turn_on_with_attributes( - hass: HomeAssistant, entry: MockConfigEntry + dim_output, hass: HomeAssistant, lcn_connection ) -> None: """Test the output light turns on.""" - await init_integration(hass, entry) + dim_output.return_value = True - with patch.object(MockModuleConnection, "dim_output") as dim_output: - dim_output.return_value = True + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: LIGHT_OUTPUT1, + ATTR_BRIGHTNESS: 50, + ATTR_TRANSITION: 2, + }, + blocking=True, + ) + await hass.async_block_till_done() + dim_output.assert_awaited_with(0, 19, 6) - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: LIGHT_OUTPUT1, - ATTR_BRIGHTNESS: 50, - ATTR_TRANSITION: 2, - }, - blocking=True, - ) - - dim_output.assert_awaited_with(0, 19, 6) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_ON + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state == STATE_ON -async def test_output_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "dim_output") +async def test_output_turn_off(dim_output, hass: HomeAssistant, lcn_connection) -> None: """Test the output light turns off.""" - await init_integration(hass, entry) + state = hass.states.get(LIGHT_OUTPUT1) + state.state = STATE_ON - with patch.object(MockModuleConnection, "dim_output") as dim_output: - state = hass.states.get(LIGHT_OUTPUT1) - state.state = STATE_ON + # command failed + dim_output.return_value = False - # command failed - dim_output.return_value = False + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + await hass.async_block_till_done() + dim_output.assert_awaited_with(0, 0, 9) - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state != STATE_OFF - dim_output.assert_awaited_with(0, 0, 9) + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state != STATE_OFF + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + await hass.async_block_till_done() + dim_output.assert_awaited_with(0, 0, 9) - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) - - dim_output.assert_awaited_with(0, 0, 9) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_OFF + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state == STATE_OFF +@patch.object(MockModuleConnection, "dim_output") async def test_output_turn_off_with_attributes( - hass: HomeAssistant, entry: MockConfigEntry + dim_output, hass: HomeAssistant, lcn_connection ) -> None: """Test the output light turns off.""" - await init_integration(hass, entry) + dim_output.return_value = True - with patch.object(MockModuleConnection, "dim_output") as dim_output: - dim_output.return_value = True + state = hass.states.get(LIGHT_OUTPUT1) + state.state = STATE_ON - state = hass.states.get(LIGHT_OUTPUT1) - state.state = STATE_ON + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: LIGHT_OUTPUT1, + ATTR_TRANSITION: 2, + }, + blocking=True, + ) + await hass.async_block_till_done() + dim_output.assert_awaited_with(0, 0, 6) - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: LIGHT_OUTPUT1, - ATTR_TRANSITION: 2, - }, - blocking=True, - ) - - dim_output.assert_awaited_with(0, 0, 6) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_OFF + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state == STATE_OFF -async def test_relay_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "control_relays") +async def test_relay_turn_on( + control_relays, hass: HomeAssistant, lcn_connection +) -> None: """Test the relay light turns on.""" - await init_integration(hass, entry) + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.ON - with patch.object(MockModuleConnection, "control_relays") as control_relays: - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.ON + # command failed + control_relays.return_value = False - # command failed - control_relays.return_value = False + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) + await hass.async_block_till_done() + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state != STATE_ON - control_relays.assert_awaited_with(states) + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state != STATE_ON + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) + await hass.async_block_till_done() + control_relays.assert_awaited_with(states) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) - - control_relays.assert_awaited_with(states) - - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state == STATE_ON + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state == STATE_ON -async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "control_relays") +async def test_relay_turn_off( + control_relays, hass: HomeAssistant, lcn_connection +) -> None: """Test the relay light turns off.""" - await init_integration(hass, entry) + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.OFF - with patch.object(MockModuleConnection, "control_relays") as control_relays: - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.OFF + state = hass.states.get(LIGHT_RELAY1) + state.state = STATE_ON - state = hass.states.get(LIGHT_RELAY1) - state.state = STATE_ON + # command failed + control_relays.return_value = False - # command failed - control_relays.return_value = False + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) + await hass.async_block_till_done() + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state != STATE_OFF - control_relays.assert_awaited_with(states) + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state != STATE_OFF + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) + await hass.async_block_till_done() + control_relays.assert_awaited_with(states) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) - - control_relays.assert_awaited_with(states) - - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state == STATE_OFF + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state == STATE_OFF async def test_pushed_output_status_change( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the output light changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -301,11 +320,9 @@ async def test_pushed_output_status_change( async def test_pushed_relay_status_change( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the relay light changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 @@ -331,9 +348,7 @@ async def test_pushed_relay_status_change( assert state.state == STATE_OFF -async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: """Test the light is removed when the config entry is unloaded.""" - await init_integration(hass, entry) - await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(LIGHT_OUTPUT1).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_scene.py b/tests/components/lcn/test_scene.py deleted file mode 100644 index 27e7864df41..00000000000 --- a/tests/components/lcn/test_scene.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Test for the LCN scene platform.""" - -from unittest.mock import patch - -from pypck.lcn_defs import OutputPort, RelayPort -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.scene import DOMAIN as DOMAIN_SCENE -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_ON, - STATE_UNAVAILABLE, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .conftest import MockConfigEntry, MockModuleConnection, init_integration - -from tests.common import snapshot_platform - - -async def test_setup_lcn_scene( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test the setup of switch.""" - with patch("homeassistant.components.lcn.PLATFORMS", [Platform.SCENE]): - await init_integration(hass, entry) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) - - -async def test_scene_activate( - hass: HomeAssistant, - entry: MockConfigEntry, -) -> None: - """Test the scene is activated.""" - await init_integration(hass, entry) - with patch.object(MockModuleConnection, "activate_scene") as activate_scene: - await hass.services.async_call( - DOMAIN_SCENE, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "scene.romantic"}, - blocking=True, - ) - - state = hass.states.get("scene.romantic") - assert state is not None - - activate_scene.assert_awaited_with( - 0, 0, [OutputPort.OUTPUT1, OutputPort.OUTPUT2], [RelayPort.RELAY1], 0.0 - ) - - -async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test the scene is removed when the config entry is unloaded.""" - await init_integration(hass, entry) - - await hass.config_entries.async_unload(entry.entry_id) - state = hass.states.get("scene.romantic") - assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_sensor.py b/tests/components/lcn/test_sensor.py index 18335f4b073..cdcd5a195a3 100644 --- a/tests/components/lcn/test_sensor.py +++ b/tests/components/lcn/test_sensor.py @@ -1,46 +1,85 @@ """Test for the LCN sensor platform.""" -from unittest.mock import patch - from pypck.inputs import ModStatusLedsAndLogicOps, ModStatusVar from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import LedStatus, LogicOpStatus, Var, VarValue -from syrupy.assertion import SnapshotAssertion from homeassistant.components.lcn.helpers import get_device_connection -from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.const import ( + ATTR_UNIT_OF_MEASUREMENT, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockConfigEntry, init_integration - -from tests.common import snapshot_platform - SENSOR_VAR1 = "sensor.sensor_var1" SENSOR_SETPOINT1 = "sensor.sensor_setpoint1" SENSOR_LED6 = "sensor.sensor_led6" SENSOR_LOGICOP1 = "sensor.sensor_logicop1" -async def test_setup_lcn_sensor( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: +async def test_setup_lcn_sensor(hass: HomeAssistant, entry, lcn_connection) -> None: """Test the setup of sensor.""" - with patch("homeassistant.components.lcn.PLATFORMS", [Platform.SENSOR]): - await init_integration(hass, entry) + for entity_id in ( + SENSOR_VAR1, + SENSOR_SETPOINT1, + SENSOR_LED6, + SENSOR_LOGICOP1, + ): + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_UNKNOWN - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + +async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: + """Test state of entity.""" + state = hass.states.get(SENSOR_VAR1) + assert state + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfTemperature.CELSIUS + + state = hass.states.get(SENSOR_SETPOINT1) + assert state + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfTemperature.CELSIUS + + state = hass.states.get(SENSOR_LED6) + assert state + + state = hass.states.get(SENSOR_LOGICOP1) + assert state + + +async def test_entity_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +) -> None: + """Test the attributes of an entity.""" + + entity_var1 = entity_registry.async_get(SENSOR_VAR1) + assert entity_var1 + assert entity_var1.unique_id == f"{entry.entry_id}-m000007-var1" + assert entity_var1.original_name == "Sensor_Var1" + + entity_r1varsetpoint = entity_registry.async_get(SENSOR_SETPOINT1) + assert entity_r1varsetpoint + assert entity_r1varsetpoint.unique_id == f"{entry.entry_id}-m000007-r1varsetpoint" + assert entity_r1varsetpoint.original_name == "Sensor_Setpoint1" + + entity_led6 = entity_registry.async_get(SENSOR_LED6) + assert entity_led6 + assert entity_led6.unique_id == f"{entry.entry_id}-m000007-led6" + assert entity_led6.original_name == "Sensor_Led6" + + entity_logicop1 = entity_registry.async_get(SENSOR_LOGICOP1) + assert entity_logicop1 + assert entity_logicop1.unique_id == f"{entry.entry_id}-m000007-logicop1" + assert entity_logicop1.original_name == "Sensor_LogicOp1" async def test_pushed_variable_status_change( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the variable sensor changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -64,11 +103,9 @@ async def test_pushed_variable_status_change( async def test_pushed_ledlogicop_status_change( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the led and logicop sensor changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -92,10 +129,8 @@ async def test_pushed_ledlogicop_status_change( assert state.state == "all" -async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: """Test the sensor is removed when the config entry is unloaded.""" - await init_integration(hass, entry) - await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(SENSOR_VAR1).state == STATE_UNAVAILABLE assert hass.states.get(SENSOR_SETPOINT1).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_services.py b/tests/components/lcn/test_services.py deleted file mode 100644 index a4ea559cd72..00000000000 --- a/tests/components/lcn/test_services.py +++ /dev/null @@ -1,439 +0,0 @@ -"""Test for the LCN services.""" - -from unittest.mock import patch - -import pypck -import pytest - -from homeassistant.components.lcn import DOMAIN -from homeassistant.components.lcn.const import ( - CONF_KEYS, - CONF_LED, - CONF_OUTPUT, - CONF_PCK, - CONF_RELVARREF, - CONF_ROW, - CONF_SETPOINT, - CONF_TABLE, - CONF_TEXT, - CONF_TIME, - CONF_TIME_UNIT, - CONF_TRANSITION, - CONF_VALUE, - CONF_VARIABLE, -) -from homeassistant.components.lcn.services import LcnService -from homeassistant.const import ( - CONF_ADDRESS, - CONF_BRIGHTNESS, - CONF_STATE, - CONF_UNIT_OF_MEASUREMENT, -) -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from .conftest import ( - MockConfigEntry, - MockModuleConnection, - MockPchkConnectionManager, - init_integration, -) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_output_abs(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test output_abs service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "dim_output") as dim_output: - await hass.services.async_call( - DOMAIN, - LcnService.OUTPUT_ABS, - { - CONF_ADDRESS: "pchk.s0.m7", - CONF_OUTPUT: "output1", - CONF_BRIGHTNESS: 100, - CONF_TRANSITION: 5, - }, - blocking=True, - ) - - dim_output.assert_awaited_with(0, 100, 9) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_output_rel(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test output_rel service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "rel_output") as rel_output: - await hass.services.async_call( - DOMAIN, - LcnService.OUTPUT_REL, - { - CONF_ADDRESS: "pchk.s0.m7", - CONF_OUTPUT: "output1", - CONF_BRIGHTNESS: 25, - }, - blocking=True, - ) - - rel_output.assert_awaited_with(0, 25) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_output_toggle( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test output_toggle service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "toggle_output") as toggle_output: - await hass.services.async_call( - DOMAIN, - LcnService.OUTPUT_TOGGLE, - { - CONF_ADDRESS: "pchk.s0.m7", - CONF_OUTPUT: "output1", - CONF_TRANSITION: 5, - }, - blocking=True, - ) - - toggle_output.assert_awaited_with(0, 9) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_relays(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test relays service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "control_relays") as control_relays: - await hass.services.async_call( - DOMAIN, - LcnService.RELAYS, - {CONF_ADDRESS: "pchk.s0.m7", CONF_STATE: "0011TT--"}, - blocking=True, - ) - - states = ["OFF", "OFF", "ON", "ON", "TOGGLE", "TOGGLE", "NOCHANGE", "NOCHANGE"] - relay_states = [pypck.lcn_defs.RelayStateModifier[state] for state in states] - - control_relays.assert_awaited_with(relay_states) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_led(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test led service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "control_led") as control_led: - await hass.services.async_call( - DOMAIN, - LcnService.LED, - {CONF_ADDRESS: "pchk.s0.m7", CONF_LED: "led6", CONF_STATE: "blink"}, - blocking=True, - ) - - led = pypck.lcn_defs.LedPort["LED6"] - led_state = pypck.lcn_defs.LedStatus["BLINK"] - - control_led.assert_awaited_with(led, led_state) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_abs(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test var_abs service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "var_abs") as var_abs: - await hass.services.async_call( - DOMAIN, - LcnService.VAR_ABS, - { - CONF_ADDRESS: "pchk.s0.m7", - CONF_VARIABLE: "var1", - CONF_VALUE: 75, - CONF_UNIT_OF_MEASUREMENT: "%", - }, - blocking=True, - ) - - var_abs.assert_awaited_with( - pypck.lcn_defs.Var["VAR1"], 75, pypck.lcn_defs.VarUnit.parse("%") - ) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_rel(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test var_rel service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "var_rel") as var_rel: - await hass.services.async_call( - DOMAIN, - LcnService.VAR_REL, - { - CONF_ADDRESS: "pchk.s0.m7", - CONF_VARIABLE: "var1", - CONF_VALUE: 10, - CONF_UNIT_OF_MEASUREMENT: "%", - CONF_RELVARREF: "current", - }, - blocking=True, - ) - - var_rel.assert_awaited_with( - pypck.lcn_defs.Var["VAR1"], - 10, - pypck.lcn_defs.VarUnit.parse("%"), - pypck.lcn_defs.RelVarRef["CURRENT"], - ) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_reset(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test var_reset service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "var_reset") as var_reset: - await hass.services.async_call( - DOMAIN, - LcnService.VAR_RESET, - {CONF_ADDRESS: "pchk.s0.m7", CONF_VARIABLE: "var1"}, - blocking=True, - ) - - var_reset.assert_awaited_with(pypck.lcn_defs.Var["VAR1"]) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_lock_regulator( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test lock_regulator service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: - await hass.services.async_call( - DOMAIN, - LcnService.LOCK_REGULATOR, - { - CONF_ADDRESS: "pchk.s0.m7", - CONF_SETPOINT: "r1varsetpoint", - CONF_STATE: True, - }, - blocking=True, - ) - - lock_regulator.assert_awaited_with(0, True) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_send_keys(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test send_keys service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "send_keys") as send_keys: - await hass.services.async_call( - DOMAIN, - LcnService.SEND_KEYS, - {CONF_ADDRESS: "pchk.s0.m7", CONF_KEYS: "a1a5d8", CONF_STATE: "hit"}, - blocking=True, - ) - - keys = [[False] * 8 for i in range(4)] - keys[0][0] = True - keys[0][4] = True - keys[3][7] = True - - send_keys.assert_awaited_with(keys, pypck.lcn_defs.SendKeyCommand["HIT"]) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_send_keys_hit_deferred( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test send_keys (hit_deferred) service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - keys = [[False] * 8 for i in range(4)] - keys[0][0] = True - keys[0][4] = True - keys[3][7] = True - - # success - with patch.object( - MockModuleConnection, "send_keys_hit_deferred" - ) as send_keys_hit_deferred: - await hass.services.async_call( - DOMAIN, - LcnService.SEND_KEYS, - { - CONF_ADDRESS: "pchk.s0.m7", - CONF_KEYS: "a1a5d8", - CONF_TIME: 5, - CONF_TIME_UNIT: "s", - }, - blocking=True, - ) - - send_keys_hit_deferred.assert_awaited_with( - keys, 5, pypck.lcn_defs.TimeUnit.parse("S") - ) - - # wrong key action - with ( - patch.object( - MockModuleConnection, "send_keys_hit_deferred" - ) as send_keys_hit_deferred, - pytest.raises(ValueError), - ): - await hass.services.async_call( - DOMAIN, - LcnService.SEND_KEYS, - { - CONF_ADDRESS: "pchk.s0.m7", - CONF_KEYS: "a1a5d8", - CONF_STATE: "make", - CONF_TIME: 5, - CONF_TIME_UNIT: "s", - }, - blocking=True, - ) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_lock_keys(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test lock_keys service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "lock_keys") as lock_keys: - await hass.services.async_call( - DOMAIN, - LcnService.LOCK_KEYS, - {CONF_ADDRESS: "pchk.s0.m7", CONF_TABLE: "a", CONF_STATE: "0011TT--"}, - blocking=True, - ) - - states = ["OFF", "OFF", "ON", "ON", "TOGGLE", "TOGGLE", "NOCHANGE", "NOCHANGE"] - lock_states = [pypck.lcn_defs.KeyLockStateModifier[state] for state in states] - - lock_keys.assert_awaited_with(0, lock_states) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_lock_keys_tab_a_temporary( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test lock_keys (tab_a_temporary) service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - # success - with patch.object( - MockModuleConnection, "lock_keys_tab_a_temporary" - ) as lock_keys_tab_a_temporary: - await hass.services.async_call( - DOMAIN, - LcnService.LOCK_KEYS, - { - CONF_ADDRESS: "pchk.s0.m7", - CONF_STATE: "0011TT--", - CONF_TIME: 10, - CONF_TIME_UNIT: "s", - }, - blocking=True, - ) - - states = ["OFF", "OFF", "ON", "ON", "TOGGLE", "TOGGLE", "NOCHANGE", "NOCHANGE"] - lock_states = [pypck.lcn_defs.KeyLockStateModifier[state] for state in states] - - lock_keys_tab_a_temporary.assert_awaited_with( - 10, pypck.lcn_defs.TimeUnit.parse("S"), lock_states - ) - - # wrong table - with ( - patch.object( - MockModuleConnection, "lock_keys_tab_a_temporary" - ) as lock_keys_tab_a_temporary, - pytest.raises(ValueError), - ): - await hass.services.async_call( - DOMAIN, - LcnService.LOCK_KEYS, - { - CONF_ADDRESS: "pchk.s0.m7", - CONF_TABLE: "b", - CONF_STATE: "0011TT--", - CONF_TIME: 10, - CONF_TIME_UNIT: "s", - }, - blocking=True, - ) - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_dyn_text(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test dyn_text service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "dyn_text") as dyn_text: - await hass.services.async_call( - DOMAIN, - LcnService.DYN_TEXT, - {CONF_ADDRESS: "pchk.s0.m7", CONF_ROW: 1, CONF_TEXT: "text in row 1"}, - blocking=True, - ) - - dyn_text.assert_awaited_with(0, "text in row 1") - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_pck(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test pck service.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "pck") as pck: - await hass.services.async_call( - DOMAIN, - LcnService.PCK, - {CONF_ADDRESS: "pchk.s0.m7", CONF_PCK: "PIN4"}, - blocking=True, - ) - - pck.assert_awaited_with("PIN4") - - -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_called_with_invalid_host_id( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test service was called with non existing host id.""" - await async_setup_component(hass, "persistent_notification", {}) - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "pck") as pck, pytest.raises(ValueError): - await hass.services.async_call( - DOMAIN, - LcnService.PCK, - {CONF_ADDRESS: "foobar.s0.m7", CONF_PCK: "PIN4"}, - blocking=True, - ) - - pck.assert_not_awaited() diff --git a/tests/components/lcn/test_switch.py b/tests/components/lcn/test_switch.py index 15b156aac43..f24828c5fcb 100644 --- a/tests/components/lcn/test_switch.py +++ b/tests/components/lcn/test_switch.py @@ -2,15 +2,9 @@ from unittest.mock import patch -from pypck.inputs import ( - ModStatusKeyLocks, - ModStatusOutput, - ModStatusRelays, - ModStatusVar, -) +from pypck.inputs import ModStatusOutput, ModStatusRelays from pypck.lcn_addr import LcnAddr -from pypck.lcn_defs import KeyLockStateModifier, RelayStateModifier, Var, VarValue -from syrupy.assertion import SnapshotAssertion +from pypck.lcn_defs import RelayStateModifier from homeassistant.components.lcn.helpers import get_device_connection from homeassistant.components.switch import DOMAIN as DOMAIN_SWITCH @@ -21,366 +15,209 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, STATE_UNAVAILABLE, - Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockConfigEntry, MockModuleConnection, init_integration - -from tests.common import snapshot_platform +from .conftest import MockModuleConnection SWITCH_OUTPUT1 = "switch.switch_output1" SWITCH_OUTPUT2 = "switch.switch_output2" SWITCH_RELAY1 = "switch.switch_relay1" SWITCH_RELAY2 = "switch.switch_relay2" -SWITCH_REGULATOR1 = "switch.switch_regulator1" -SWITCH_KEYLOCKK1 = "switch.switch_keylock1" -async def test_setup_lcn_switch( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: +async def test_setup_lcn_switch(hass: HomeAssistant, lcn_connection) -> None: """Test the setup of switch.""" - with patch("homeassistant.components.lcn.PLATFORMS", [Platform.SWITCH]): - await init_integration(hass, entry) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + for entity_id in ( + SWITCH_OUTPUT1, + SWITCH_OUTPUT2, + SWITCH_RELAY1, + SWITCH_RELAY2, + ): + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_OFF -async def test_output_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_entity_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +) -> None: + """Test the attributes of an entity.""" + + entity_output = entity_registry.async_get(SWITCH_OUTPUT1) + + assert entity_output + assert entity_output.unique_id == f"{entry.entry_id}-m000007-output1" + assert entity_output.original_name == "Switch_Output1" + + entity_relay = entity_registry.async_get(SWITCH_RELAY1) + + assert entity_relay + assert entity_relay.unique_id == f"{entry.entry_id}-m000007-relay1" + assert entity_relay.original_name == "Switch_Relay1" + + +@patch.object(MockModuleConnection, "dim_output") +async def test_output_turn_on(dim_output, hass: HomeAssistant, lcn_connection) -> None: """Test the output switch turns on.""" - await init_integration(hass, entry) + # command failed + dim_output.return_value = False - with patch.object(MockModuleConnection, "dim_output") as dim_output: - # command failed - dim_output.return_value = False + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + await hass.async_block_till_done() + dim_output.assert_awaited_with(0, 100, 0) - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) + state = hass.states.get(SWITCH_OUTPUT1) + assert state.state == STATE_OFF - dim_output.assert_awaited_with(0, 100, 0) + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_OFF + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + await hass.async_block_till_done() + dim_output.assert_awaited_with(0, 100, 0) - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) - - dim_output.assert_awaited_with(0, 100, 0) - - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_ON + state = hass.states.get(SWITCH_OUTPUT1) + assert state.state == STATE_ON -async def test_output_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "dim_output") +async def test_output_turn_off(dim_output, hass: HomeAssistant, lcn_connection) -> None: """Test the output switch turns off.""" - await init_integration(hass, entry) + state = hass.states.get(SWITCH_OUTPUT1) + state.state = STATE_ON - with patch.object(MockModuleConnection, "dim_output") as dim_output: - state = hass.states.get(SWITCH_OUTPUT1) - state.state = STATE_ON + # command failed + dim_output.return_value = False - # command failed - dim_output.return_value = False + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + await hass.async_block_till_done() + dim_output.assert_awaited_with(0, 0, 0) - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) + state = hass.states.get(SWITCH_OUTPUT1) + assert state.state == STATE_ON - dim_output.assert_awaited_with(0, 0, 0) + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_ON + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + await hass.async_block_till_done() + dim_output.assert_awaited_with(0, 0, 0) - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) - - dim_output.assert_awaited_with(0, 0, 0) - - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_OFF + state = hass.states.get(SWITCH_OUTPUT1) + assert state.state == STATE_OFF -async def test_relay_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "control_relays") +async def test_relay_turn_on( + control_relays, hass: HomeAssistant, lcn_connection +) -> None: """Test the relay switch turns on.""" - await init_integration(hass, entry) + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.ON - with patch.object(MockModuleConnection, "control_relays") as control_relays: - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.ON + # command failed + control_relays.return_value = False - # command failed - control_relays.return_value = False + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) + await hass.async_block_till_done() + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_OFF - control_relays.assert_awaited_with(states) + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_OFF + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) + await hass.async_block_till_done() + control_relays.assert_awaited_with(states) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) - - control_relays.assert_awaited_with(states) - - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_ON + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_ON -async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@patch.object(MockModuleConnection, "control_relays") +async def test_relay_turn_off( + control_relays, hass: HomeAssistant, lcn_connection +) -> None: """Test the relay switch turns off.""" - await init_integration(hass, entry) + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.OFF - with patch.object(MockModuleConnection, "control_relays") as control_relays: - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.OFF + state = hass.states.get(SWITCH_RELAY1) + state.state = STATE_ON - state = hass.states.get(SWITCH_RELAY1) - state.state = STATE_ON + # command failed + control_relays.return_value = False - # command failed - control_relays.return_value = False + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) + await hass.async_block_till_done() + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_ON - control_relays.assert_awaited_with(states) + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_ON + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) + await hass.async_block_till_done() + control_relays.assert_awaited_with(states) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) - - control_relays.assert_awaited_with(states) - - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_OFF - - -async def test_regulatorlock_turn_on( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test the regulator lock switch turns on.""" - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: - # command failed - lock_regulator.return_value = False - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, - blocking=True, - ) - - lock_regulator.assert_awaited_with(0, True) - - state = hass.states.get(SWITCH_REGULATOR1) - assert state.state == STATE_OFF - - # command success - lock_regulator.reset_mock(return_value=True) - lock_regulator.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, - blocking=True, - ) - - lock_regulator.assert_awaited_with(0, True) - - state = hass.states.get(SWITCH_REGULATOR1) - assert state.state == STATE_ON - - -async def test_regulatorlock_turn_off( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test the regulator lock switch turns off.""" - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: - state = hass.states.get(SWITCH_REGULATOR1) - state.state = STATE_ON - - # command failed - lock_regulator.return_value = False - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, - blocking=True, - ) - - lock_regulator.assert_awaited_with(0, False) - - state = hass.states.get(SWITCH_REGULATOR1) - assert state.state == STATE_ON - - # command success - lock_regulator.reset_mock(return_value=True) - lock_regulator.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, - blocking=True, - ) - - lock_regulator.assert_awaited_with(0, False) - - state = hass.states.get(SWITCH_REGULATOR1) - assert state.state == STATE_OFF - - -async def test_keylock_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test the keylock switch turns on.""" - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "lock_keys") as lock_keys: - states = [KeyLockStateModifier.NOCHANGE] * 8 - states[0] = KeyLockStateModifier.ON - - # command failed - lock_keys.return_value = False - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, - blocking=True, - ) - - lock_keys.assert_awaited_with(0, states) - - state = hass.states.get(SWITCH_KEYLOCKK1) - assert state.state == STATE_OFF - - # command success - lock_keys.reset_mock(return_value=True) - lock_keys.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, - blocking=True, - ) - - lock_keys.assert_awaited_with(0, states) - - state = hass.states.get(SWITCH_KEYLOCKK1) - assert state.state == STATE_ON - - -async def test_keylock_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: - """Test the keylock switch turns off.""" - await init_integration(hass, entry) - - with patch.object(MockModuleConnection, "lock_keys") as lock_keys: - states = [KeyLockStateModifier.NOCHANGE] * 8 - states[0] = KeyLockStateModifier.OFF - - state = hass.states.get(SWITCH_KEYLOCKK1) - state.state = STATE_ON - - # command failed - lock_keys.return_value = False - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, - blocking=True, - ) - - lock_keys.assert_awaited_with(0, states) - - state = hass.states.get(SWITCH_KEYLOCKK1) - assert state.state == STATE_ON - - # command success - lock_keys.reset_mock(return_value=True) - lock_keys.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, - blocking=True, - ) - - lock_keys.assert_awaited_with(0, states) - - state = hass.states.get(SWITCH_KEYLOCKK1) - assert state.state == STATE_OFF + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_OFF async def test_pushed_output_status_change( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the output switch changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -402,11 +239,9 @@ async def test_pushed_output_status_change( async def test_pushed_relay_status_change( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, entry, lcn_connection ) -> None: """Test the relay switch changes its state on status received.""" - await init_integration(hass, entry) - device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 @@ -430,67 +265,7 @@ async def test_pushed_relay_status_change( assert state.state == STATE_OFF -async def test_pushed_regulatorlock_status_change( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test the regulator lock switch changes its state on status received.""" - await init_integration(hass, entry) - - device_connection = get_device_connection(hass, (0, 7, False), entry) - address = LcnAddr(0, 7, False) - states = [False] * 8 - - # push status "on" - states[0] = True - inp = ModStatusVar(address, Var.R1VARSETPOINT, VarValue(0x8000)) - await device_connection.async_process_input(inp) - await hass.async_block_till_done() - - state = hass.states.get(SWITCH_REGULATOR1) - assert state.state == STATE_ON - - # push status "off" - states[0] = False - inp = ModStatusVar(address, Var.R1VARSETPOINT, VarValue(0x7FFF)) - await device_connection.async_process_input(inp) - await hass.async_block_till_done() - - state = hass.states.get(SWITCH_REGULATOR1) - assert state.state == STATE_OFF - - -async def test_pushed_keylock_status_change( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test the keylock switch changes its state on status received.""" - await init_integration(hass, entry) - - device_connection = get_device_connection(hass, (0, 7, False), entry) - address = LcnAddr(0, 7, False) - states = [[False] * 8 for i in range(4)] - states[0][0] = True - - # push status "on" - inp = ModStatusKeyLocks(address, states) - await device_connection.async_process_input(inp) - await hass.async_block_till_done() - - state = hass.states.get(SWITCH_KEYLOCKK1) - assert state.state == STATE_ON - - # push status "off" - states[0][0] = False - inp = ModStatusKeyLocks(address, states) - await device_connection.async_process_input(inp) - await hass.async_block_till_done() - - state = hass.states.get(SWITCH_KEYLOCKK1) - assert state.state == STATE_OFF - - -async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: """Test the switch is removed when the config entry is unloaded.""" - await init_integration(hass, entry) - await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(SWITCH_OUTPUT1).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_websocket.py b/tests/components/lcn/test_websocket.py deleted file mode 100644 index 2c5fff89e19..00000000000 --- a/tests/components/lcn/test_websocket.py +++ /dev/null @@ -1,323 +0,0 @@ -"""LCN Websocket Tests.""" - -from typing import Any - -from pypck.lcn_addr import LcnAddr -import pytest - -from homeassistant.components.lcn import AddressType -from homeassistant.components.lcn.const import CONF_DOMAIN_DATA -from homeassistant.components.lcn.helpers import get_device_config, get_resource -from homeassistant.const import ( - CONF_ADDRESS, - CONF_DEVICES, - CONF_DOMAIN, - CONF_ENTITIES, - CONF_NAME, - CONF_RESOURCE, - CONF_TYPE, -) -from homeassistant.core import HomeAssistant - -from .conftest import MockConfigEntry, init_integration - -from tests.typing import WebSocketGenerator - -DEVICES_PAYLOAD = {CONF_TYPE: "lcn/devices", "entry_id": ""} -ENTITIES_PAYLOAD = { - CONF_TYPE: "lcn/entities", - "entry_id": "", -} -SCAN_PAYLOAD = {CONF_TYPE: "lcn/devices/scan", "entry_id": ""} -DEVICES_ADD_PAYLOAD = { - CONF_TYPE: "lcn/devices/add", - "entry_id": "", - CONF_ADDRESS: (0, 10, False), -} -DEVICES_DELETE_PAYLOAD = { - CONF_TYPE: "lcn/devices/delete", - "entry_id": "", - CONF_ADDRESS: (0, 7, False), -} -ENTITIES_ADD_PAYLOAD = { - CONF_TYPE: "lcn/entities/add", - "entry_id": "", - CONF_ADDRESS: (0, 7, False), - CONF_NAME: "test_switch", - CONF_DOMAIN: "switch", - CONF_DOMAIN_DATA: {"output": "RELAY5"}, -} -ENTITIES_DELETE_PAYLOAD = { - CONF_TYPE: "lcn/entities/delete", - "entry_id": "", - CONF_ADDRESS: (0, 7, False), - CONF_DOMAIN: "switch", - CONF_RESOURCE: "relay1", -} - - -async def test_lcn_devices_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry -) -> None: - """Test lcn/devices command.""" - await init_integration(hass, entry) - - client = await hass_ws_client(hass) - await client.send_json_auto_id({**DEVICES_PAYLOAD, "entry_id": entry.entry_id}) - - res = await client.receive_json() - assert res["success"], res - assert len(res["result"]) == len(entry.data[CONF_DEVICES]) - assert all( - {**result, CONF_ADDRESS: tuple(result[CONF_ADDRESS])} - in entry.data[CONF_DEVICES] - for result in res["result"] - ) - - -@pytest.mark.parametrize( - "payload", - [ - ENTITIES_PAYLOAD, - {**ENTITIES_PAYLOAD, CONF_ADDRESS: (0, 7, False)}, - ], -) -async def test_lcn_entities_command( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - entry: MockConfigEntry, - payload, -) -> None: - """Test lcn/entities command.""" - await init_integration(hass, entry) - - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - **payload, - "entry_id": entry.entry_id, - } - ) - - res = await client.receive_json() - assert res["success"], res - entities = [ - entity - for entity in entry.data[CONF_ENTITIES] - if CONF_ADDRESS not in payload or entity[CONF_ADDRESS] == payload[CONF_ADDRESS] - ] - assert len(res["result"]) == len(entities) - assert all( - {**result, CONF_ADDRESS: tuple(result[CONF_ADDRESS])} in entities - for result in res["result"] - ) - - -async def test_lcn_devices_scan_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry -) -> None: - """Test lcn/devices/scan command.""" - # add new module which is not stored in config_entry - lcn_connection = await init_integration(hass, entry) - lcn_connection.get_address_conn(LcnAddr(0, 10, False)) - - client = await hass_ws_client(hass) - await client.send_json_auto_id({**SCAN_PAYLOAD, "entry_id": entry.entry_id}) - - res = await client.receive_json() - assert res["success"], res - - lcn_connection.scan_modules.assert_awaited() - assert len(res["result"]) == len(entry.data[CONF_DEVICES]) - assert all( - {**result, CONF_ADDRESS: tuple(result[CONF_ADDRESS])} - in entry.data[CONF_DEVICES] - for result in res["result"] - ) - - -async def test_lcn_devices_add_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry -) -> None: - """Test lcn/devices/add command.""" - await init_integration(hass, entry) - - client = await hass_ws_client(hass) - assert get_device_config((0, 10, False), entry) is None - - await client.send_json_auto_id({**DEVICES_ADD_PAYLOAD, "entry_id": entry.entry_id}) - - res = await client.receive_json() - assert res["success"], res - - assert get_device_config((0, 10, False), entry) - - -async def test_lcn_devices_delete_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry -) -> None: - """Test lcn/devices/delete command.""" - await init_integration(hass, entry) - - client = await hass_ws_client(hass) - assert get_device_config((0, 7, False), entry) - - await client.send_json_auto_id( - {**DEVICES_DELETE_PAYLOAD, "entry_id": entry.entry_id} - ) - - res = await client.receive_json() - assert res["success"], res - assert get_device_config((0, 7, False), entry) is None - - -async def test_lcn_entities_add_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry -) -> None: - """Test lcn/entities/add command.""" - await init_integration(hass, entry) - - client = await hass_ws_client(hass) - - entity_config = { - key: ENTITIES_ADD_PAYLOAD[key] - for key in (CONF_ADDRESS, CONF_NAME, CONF_DOMAIN, CONF_DOMAIN_DATA) - } - - resource = get_resource( - ENTITIES_ADD_PAYLOAD[CONF_DOMAIN], ENTITIES_ADD_PAYLOAD[CONF_DOMAIN_DATA] - ).lower() - - assert {**entity_config, CONF_RESOURCE: resource} not in entry.data[CONF_ENTITIES] - - await client.send_json_auto_id({**ENTITIES_ADD_PAYLOAD, "entry_id": entry.entry_id}) - - res = await client.receive_json() - assert res["success"], res - - assert {**entity_config, CONF_RESOURCE: resource} in entry.data[CONF_ENTITIES] - - -async def test_lcn_entities_delete_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry -) -> None: - """Test lcn/entities/delete command.""" - await init_integration(hass, entry) - - client = await hass_ws_client(hass) - - assert ( - len( - [ - entity - for entity in entry.data[CONF_ENTITIES] - if entity[CONF_ADDRESS] == ENTITIES_DELETE_PAYLOAD[CONF_ADDRESS] - and entity[CONF_DOMAIN] == ENTITIES_DELETE_PAYLOAD[CONF_DOMAIN] - and entity[CONF_RESOURCE] == ENTITIES_DELETE_PAYLOAD[CONF_RESOURCE] - ] - ) - == 1 - ) - - await client.send_json_auto_id( - {**ENTITIES_DELETE_PAYLOAD, "entry_id": entry.entry_id} - ) - - res = await client.receive_json() - assert res["success"], res - - assert ( - len( - [ - entity - for entity in entry.data[CONF_ENTITIES] - if entity[CONF_ADDRESS] == ENTITIES_DELETE_PAYLOAD[CONF_ADDRESS] - and entity[CONF_DOMAIN] == ENTITIES_DELETE_PAYLOAD[CONF_DOMAIN] - and entity[CONF_RESOURCE] == ENTITIES_DELETE_PAYLOAD[CONF_RESOURCE] - ] - ) - == 0 - ) - - -@pytest.mark.parametrize( - ("payload", "entity_id", "result"), - [ - (DEVICES_PAYLOAD, "12345", False), - (ENTITIES_PAYLOAD, "12345", False), - (SCAN_PAYLOAD, "12345", False), - (DEVICES_ADD_PAYLOAD, "12345", False), - (DEVICES_DELETE_PAYLOAD, "12345", False), - (ENTITIES_ADD_PAYLOAD, "12345", False), - (ENTITIES_DELETE_PAYLOAD, "12345", False), - ], -) -async def test_lcn_command_host_error( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - entry: MockConfigEntry, - payload: dict[str, str], - entity_id: str, - result: bool, -) -> None: - """Test lcn commands for unknown host.""" - await init_integration(hass, entry) - - client = await hass_ws_client(hass) - await client.send_json_auto_id({**payload, "entry_id": entity_id}) - - res = await client.receive_json() - assert res["success"], res - assert res["result"] == result - - -@pytest.mark.parametrize( - ("payload", "address", "result"), - [ - (DEVICES_ADD_PAYLOAD, (0, 7, False), False), # device already existing - (DEVICES_DELETE_PAYLOAD, (0, 42, False), False), - (ENTITIES_ADD_PAYLOAD, (0, 42, False), False), - (ENTITIES_DELETE_PAYLOAD, (0, 42, 0), False), - ], -) -async def test_lcn_command_address_error( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - entry: MockConfigEntry, - payload: dict[str, Any], - address: AddressType, - result: bool, -) -> None: - """Test lcn commands for address error.""" - await init_integration(hass, entry) - - client = await hass_ws_client(hass) - await client.send_json_auto_id( - {**payload, "entry_id": entry.entry_id, CONF_ADDRESS: address} - ) - - res = await client.receive_json() - assert res["success"], res - assert res["result"] == result - - -async def test_lcn_entities_add_existing_error( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - entry: MockConfigEntry, -) -> None: - """Test lcn commands for address error.""" - await init_integration(hass, entry) - - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - **ENTITIES_ADD_PAYLOAD, - "entry_id": entry.entry_id, - CONF_DOMAIN_DATA: {"output": "RELAY1"}, - } - ) - - res = await client.receive_json() - assert res["success"], res - assert res["result"] is False diff --git a/tests/components/lektrico/__init__.py b/tests/components/lektrico/__init__.py deleted file mode 100644 index 449da2b35c4..00000000000 --- a/tests/components/lektrico/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for Lektrico integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/lektrico/conftest.py b/tests/components/lektrico/conftest.py deleted file mode 100644 index fd840b0c290..00000000000 --- a/tests/components/lektrico/conftest.py +++ /dev/null @@ -1,92 +0,0 @@ -"""Fixtures for Lektrico Charging Station integration tests.""" - -from collections.abc import Generator -from ipaddress import ip_address -import json -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.lektrico.const import DOMAIN -from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.const import ( - ATTR_HW_VERSION, - ATTR_SERIAL_NUMBER, - CONF_HOST, - CONF_TYPE, -) - -from tests.common import MockConfigEntry, load_fixture - -MOCKED_DEVICE_IP_ADDRESS = "192.168.100.10" -MOCKED_DEVICE_SERIAL_NUMBER = "500006" -MOCKED_DEVICE_TYPE = "1p7k" -MOCKED_DEVICE_BOARD_REV = "B" - -MOCKED_DEVICE_ZC_NAME = "Lektrico-1p7k-500006._http._tcp" -MOCKED_DEVICE_ZC_TYPE = "_http._tcp.local." -MOCKED_DEVICE_ZEROCONF_DATA = ZeroconfServiceInfo( - ip_address=ip_address(MOCKED_DEVICE_IP_ADDRESS), - ip_addresses=[ip_address(MOCKED_DEVICE_IP_ADDRESS)], - hostname=f"{MOCKED_DEVICE_ZC_NAME.lower()}.local.", - port=80, - type=MOCKED_DEVICE_ZC_TYPE, - name=MOCKED_DEVICE_ZC_NAME, - properties={ - "id": "1p7k_500006", - "fw_id": "20230109-124642/v1.22-36-g56a3edd-develop-dirty", - }, -) - - -@pytest.fixture -def mock_device() -> Generator[AsyncMock]: - """Mock a Lektrico device.""" - with ( - patch( - "homeassistant.components.lektrico.Device", - autospec=True, - ) as mock_device, - patch( - "homeassistant.components.lektrico.config_flow.Device", - new=mock_device, - ), - patch( - "homeassistant.components.lektrico.coordinator.Device", - new=mock_device, - ), - ): - device = mock_device.return_value - - device.device_config.return_value = json.loads( - load_fixture("get_config.json", DOMAIN) - ) - device.device_info.return_value = json.loads( - load_fixture("get_info.json", DOMAIN) - ) - - yield device - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Mock setup entry.""" - with patch( - "homeassistant.components.lektrico.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, - CONF_TYPE: MOCKED_DEVICE_TYPE, - ATTR_SERIAL_NUMBER: MOCKED_DEVICE_SERIAL_NUMBER, - ATTR_HW_VERSION: "B", - }, - unique_id=MOCKED_DEVICE_SERIAL_NUMBER, - ) diff --git a/tests/components/lektrico/fixtures/current_measures.json b/tests/components/lektrico/fixtures/current_measures.json deleted file mode 100644 index 1175b49f63c..00000000000 --- a/tests/components/lektrico/fixtures/current_measures.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "charger_state": "Available", - "charging_time": 0, - "instant_power": 0, - "session_energy": 0.0, - "temperature": 34.5, - "total_charged_energy": 0, - "install_current": 6, - "current_limit_reason": "Installation current", - "voltage_l1": 220.0, - "current_l1": 0.0, - "type": "1p7k", - "serial_number": "500006", - "board_revision": "B", - "fw_version": "1.44" -} diff --git a/tests/components/lektrico/fixtures/get_config.json b/tests/components/lektrico/fixtures/get_config.json deleted file mode 100644 index 175475004ec..00000000000 --- a/tests/components/lektrico/fixtures/get_config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "type": "1p7k", - "serial_number": "500006", - "board_revision": "B" -} diff --git a/tests/components/lektrico/fixtures/get_info.json b/tests/components/lektrico/fixtures/get_info.json deleted file mode 100644 index 2b099a666e5..00000000000 --- a/tests/components/lektrico/fixtures/get_info.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "charger_state": "available", - "charging_time": 0, - "instant_power": 0, - "session_energy": 0.0, - "temperature": 34.5, - "total_charged_energy": 0, - "install_current": 6, - "current_limit_reason": "installation_current", - "voltage_l1": 220.0, - "current_l1": 0.0, - "fw_version": "1.44", - "led_max_brightness": 20, - "dynamic_current": 32, - "user_current": 32, - "lb_mode": 0, - "require_auth": true, - "state_e_activated": false, - "undervoltage_error": true, - "rcd_error": false, - "meter_fault": false, - "overcurrent": false, - "overtemp": false, - "overvoltage_error": false, - "contactor_failure": false, - "cp_diode_failure": false, - "critical_temp": false -} diff --git a/tests/components/lektrico/snapshots/test_binary_sensor.ambr b/tests/components/lektrico/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 6a28e7c60de..00000000000 --- a/tests/components/lektrico/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,471 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[binary_sensor.1p7k_500006_ev_diode_short-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ev diode short', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cp_diode_failure', - 'unique_id': '500006_cp_diode_failure', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_ev_diode_short-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Ev diode short', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_ev_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_ev_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ev error', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'state_e_activated', - 'unique_id': '500006_state_e_activated', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_ev_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Ev error', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_ev_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_metering_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_metering_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Metering error', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'meter_fault', - 'unique_id': '500006_meter_fault', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_metering_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Metering error', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_metering_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overcurrent-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_overcurrent', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Overcurrent', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'overcurrent', - 'unique_id': '500006_overcurrent', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overcurrent-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Overcurrent', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_overcurrent', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overheating-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_overheating', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Overheating', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'critical_temp', - 'unique_id': '500006_critical_temp', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overheating-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Overheating', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_overheating', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overvoltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_overvoltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Overvoltage', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'overvoltage', - 'unique_id': '500006_overvoltage', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overvoltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Overvoltage', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_overvoltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_rcd_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rcd error', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rcd_error', - 'unique_id': '500006_rcd_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_rcd_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Rcd error', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_relay_contacts_welded-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_relay_contacts_welded', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Relay contacts welded', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'contactor_failure', - 'unique_id': '500006_contactor_failure', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_relay_contacts_welded-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Relay contacts welded', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_relay_contacts_welded', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_thermal_throttling-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_thermal_throttling', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Thermal throttling', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'overtemp', - 'unique_id': '500006_overtemp', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_thermal_throttling-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Thermal throttling', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_thermal_throttling', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_undervoltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_undervoltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Undervoltage', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'undervoltage', - 'unique_id': '500006_undervoltage', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_undervoltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Undervoltage', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_undervoltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/lektrico/snapshots/test_button.ambr b/tests/components/lektrico/snapshots/test_button.ambr deleted file mode 100644 index 5070cd484c4..00000000000 --- a/tests/components/lektrico/snapshots/test_button.ambr +++ /dev/null @@ -1,140 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[button.1p7k_500006_charge_start-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.1p7k_500006_charge_start', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Charge start', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_start', - 'unique_id': '500006-charge_start', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[button.1p7k_500006_charge_start-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '1p7k_500006 Charge start', - }), - 'context': , - 'entity_id': 'button.1p7k_500006_charge_start', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_entities[button.1p7k_500006_charge_stop-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.1p7k_500006_charge_stop', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Charge stop', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_stop', - 'unique_id': '500006-charge_stop', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[button.1p7k_500006_charge_stop-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '1p7k_500006 Charge stop', - }), - 'context': , - 'entity_id': 'button.1p7k_500006_charge_stop', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_entities[button.1p7k_500006_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.1p7k_500006_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '500006-reboot', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[button.1p7k_500006_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': '1p7k_500006 Restart', - }), - 'context': , - 'entity_id': 'button.1p7k_500006_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/lektrico/snapshots/test_init.ambr b/tests/components/lektrico/snapshots/test_init.ambr deleted file mode 100644 index 63739e1c9d8..00000000000 --- a/tests/components/lektrico/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_device_info - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': 'B', - 'id': , - 'identifiers': set({ - tuple( - 'lektrico', - '500006', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Lektrico', - 'model': '1P7K', - 'model_id': None, - 'name': '1p7k_500006', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '500006', - 'suggested_area': None, - 'sw_version': '1.44', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/lektrico/snapshots/test_number.ambr b/tests/components/lektrico/snapshots/test_number.ambr deleted file mode 100644 index 30a37a25a09..00000000000 --- a/tests/components/lektrico/snapshots/test_number.ambr +++ /dev/null @@ -1,113 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[number.1p7k_500006_dynamic_limit-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 32, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.1p7k_500006_dynamic_limit', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dynamic limit', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dynamic_limit', - 'unique_id': '500006_dynamic_limit', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.1p7k_500006_dynamic_limit-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '1p7k_500006 Dynamic limit', - 'max': 32, - 'min': 0, - 'mode': , - 'step': 1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.1p7k_500006_dynamic_limit', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '32', - }) -# --- -# name: test_all_entities[number.1p7k_500006_led_brightness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100, - 'min': 0, - 'mode': , - 'step': 5, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.1p7k_500006_led_brightness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Led brightness', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'led_max_brightness', - 'unique_id': '500006_led_max_brightness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[number.1p7k_500006_led_brightness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '1p7k_500006 Led brightness', - 'max': 100, - 'min': 0, - 'mode': , - 'step': 5, - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'number.1p7k_500006_led_brightness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- diff --git a/tests/components/lektrico/snapshots/test_select.ambr b/tests/components/lektrico/snapshots/test_select.ambr deleted file mode 100644 index 5a964f52ada..00000000000 --- a/tests/components/lektrico/snapshots/test_select.ambr +++ /dev/null @@ -1,60 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[select.1p7k_500006_load_balancing_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'disabled', - 'power', - 'hybrid', - 'green', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.1p7k_500006_load_balancing_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Load balancing mode', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'load_balancing_mode', - 'unique_id': '500006_load_balancing_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[select.1p7k_500006_load_balancing_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '1p7k_500006 Load balancing mode', - 'options': list([ - 'disabled', - 'power', - 'hybrid', - 'green', - ]), - }), - 'context': , - 'entity_id': 'select.1p7k_500006_load_balancing_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'disabled', - }) -# --- diff --git a/tests/components/lektrico/snapshots/test_sensor.ambr b/tests/components/lektrico/snapshots/test_sensor.ambr deleted file mode 100644 index 73ec88e6fa1..00000000000 --- a/tests/components/lektrico/snapshots/test_sensor.ambr +++ /dev/null @@ -1,544 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[sensor.1p7k_500006_charging_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.1p7k_500006_charging_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charging time', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charging_time', - 'unique_id': '500006_charging_time', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_charging_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': '1p7k_500006 Charging time', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.1p7k_500006_charging_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.1p7k_500006_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Current', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '500006_current', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': '1p7k_500006 Current', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.1p7k_500006_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.1p7k_500006_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '500006_energy', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': '1p7k_500006 Energy', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.1p7k_500006_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_installation_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.1p7k_500006_installation_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Installation current', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'installation_current', - 'unique_id': '500006_installation_current', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_installation_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': '1p7k_500006 Installation current', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.1p7k_500006_installation_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6', - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_lifetime_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.1p7k_500006_lifetime_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Lifetime energy', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_energy', - 'unique_id': '500006_lifetime_energy', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_lifetime_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': '1p7k_500006 Lifetime energy', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.1p7k_500006_lifetime_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_limit_reason-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'no_limit', - 'installation_current', - 'user_limit', - 'dynamic_limit', - 'schedule', - 'em_offline', - 'em', - 'ocpp', - 'overtemperature', - 'switching_phases', - '1p_charging_disabled', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.1p7k_500006_limit_reason', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Limit reason', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'limit_reason', - 'unique_id': '500006_limit_reason', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_limit_reason-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': '1p7k_500006 Limit reason', - 'options': list([ - 'no_limit', - 'installation_current', - 'user_limit', - 'dynamic_limit', - 'schedule', - 'em_offline', - 'em', - 'ocpp', - 'overtemperature', - 'switching_phases', - '1p_charging_disabled', - ]), - }), - 'context': , - 'entity_id': 'sensor.1p7k_500006_limit_reason', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'installation_current', - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.1p7k_500006_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '500006_power', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': '1p7k_500006 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.1p7k_500006_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0000', - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'available', - 'charging', - 'connected', - 'error', - 'locked', - 'need_auth', - 'paused', - 'paused_by_scheduler', - 'updating_firmware', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.1p7k_500006_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'State', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'state', - 'unique_id': '500006_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': '1p7k_500006 State', - 'options': list([ - 'available', - 'charging', - 'connected', - 'error', - 'locked', - 'need_auth', - 'paused', - 'paused_by_scheduler', - 'updating_firmware', - ]), - }), - 'context': , - 'entity_id': 'sensor.1p7k_500006_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'available', - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.1p7k_500006_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '500006_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': '1p7k_500006 Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.1p7k_500006_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '34.5', - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.1p7k_500006_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '500006_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.1p7k_500006_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': '1p7k_500006 Voltage', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.1p7k_500006_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '220.0', - }) -# --- diff --git a/tests/components/lektrico/snapshots/test_switch.ambr b/tests/components/lektrico/snapshots/test_switch.ambr deleted file mode 100644 index 3f4a1693315..00000000000 --- a/tests/components/lektrico/snapshots/test_switch.ambr +++ /dev/null @@ -1,93 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[switch.1p7k_500006_authentication-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.1p7k_500006_authentication', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Authentication', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'authentication', - 'unique_id': '500006_authentication', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[switch.1p7k_500006_authentication-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '1p7k_500006 Authentication', - }), - 'context': , - 'entity_id': 'switch.1p7k_500006_authentication', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_entities[switch.1p7k_500006_lock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.1p7k_500006_lock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lock', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lock', - 'unique_id': '500006_lock', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[switch.1p7k_500006_lock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '1p7k_500006 Lock', - }), - 'context': , - 'entity_id': 'switch.1p7k_500006_lock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/lektrico/test_binary_sensor.py b/tests/components/lektrico/test_binary_sensor.py deleted file mode 100644 index d49eac6cc23..00000000000 --- a/tests/components/lektrico/test_binary_sensor.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Tests for the Lektrico binary sensor platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_device: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - - with patch.multiple( - "homeassistant.components.lektrico", - CHARGERS_PLATFORMS=[Platform.BINARY_SENSOR], - LB_DEVICES_PLATFORMS=[Platform.BINARY_SENSOR], - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_button.py b/tests/components/lektrico/test_button.py deleted file mode 100644 index 7bd77848d21..00000000000 --- a/tests/components/lektrico/test_button.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Tests for the Lektrico button platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_device: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - - with patch.multiple( - "homeassistant.components.lektrico", - CHARGERS_PLATFORMS=[Platform.BUTTON], - LB_DEVICES_PLATFORMS=[Platform.BUTTON], - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_config_flow.py b/tests/components/lektrico/test_config_flow.py deleted file mode 100644 index 15ab5f7cdda..00000000000 --- a/tests/components/lektrico/test_config_flow.py +++ /dev/null @@ -1,173 +0,0 @@ -"""Tests for the Lektrico Charging Station config flow.""" - -import dataclasses -from ipaddress import ip_address - -from lektricowifi import DeviceConnectionError - -from homeassistant.components.lektrico.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import ( - ATTR_HW_VERSION, - ATTR_SERIAL_NUMBER, - CONF_HOST, - CONF_TYPE, -) -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .conftest import ( - MOCKED_DEVICE_BOARD_REV, - MOCKED_DEVICE_IP_ADDRESS, - MOCKED_DEVICE_SERIAL_NUMBER, - MOCKED_DEVICE_TYPE, - MOCKED_DEVICE_ZEROCONF_DATA, -) - -from tests.common import MockConfigEntry - - -async def test_user_setup(hass: HomeAssistant, mock_device, mock_setup_entry) -> None: - """Test manually setting up.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == SOURCE_USER - assert "flow_id" in result - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, - }, - ) - - assert result.get("type") is FlowResultType.CREATE_ENTRY - assert result.get("title") == f"{MOCKED_DEVICE_TYPE}_{MOCKED_DEVICE_SERIAL_NUMBER}" - assert result.get("data") == { - CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, - ATTR_SERIAL_NUMBER: MOCKED_DEVICE_SERIAL_NUMBER, - CONF_TYPE: MOCKED_DEVICE_TYPE, - ATTR_HW_VERSION: MOCKED_DEVICE_BOARD_REV, - } - assert "result" in result - assert len(mock_setup_entry.mock_calls) == 1 - assert result.get("result").unique_id == MOCKED_DEVICE_SERIAL_NUMBER - - -async def test_user_setup_already_exists( - hass: HomeAssistant, mock_device, mock_config_entry: MockConfigEntry -) -> None: - """Test manually setting up when the device already exists.""" - mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert not result["errors"] - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_user_setup_device_offline(hass: HomeAssistant, mock_device) -> None: - """Test manually setting up when device is offline.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert not result["errors"] - - mock_device.device_config.side_effect = DeviceConnectionError - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {CONF_HOST: "cannot_connect"} - assert result["step_id"] == "user" - - mock_device.device_config.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_discovered_zeroconf( - hass: HomeAssistant, mock_device, mock_setup_entry -) -> None: - """Test we can setup when discovered from zeroconf.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=MOCKED_DEVICE_ZEROCONF_DATA, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] is None - assert result.get("step_id") == "confirm" - - result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["data"] == { - CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, - ATTR_SERIAL_NUMBER: MOCKED_DEVICE_SERIAL_NUMBER, - CONF_TYPE: MOCKED_DEVICE_TYPE, - ATTR_HW_VERSION: MOCKED_DEVICE_BOARD_REV, - } - assert result2["title"] == f"{MOCKED_DEVICE_TYPE}_{MOCKED_DEVICE_SERIAL_NUMBER}" - - -async def test_zeroconf_setup_already_exists( - hass: HomeAssistant, mock_device, mock_config_entry: MockConfigEntry -) -> None: - """Test we abort zeroconf flow if device already configured.""" - mock_config_entry.add_to_hass(hass) - zc_data_new_ip = dataclasses.replace(MOCKED_DEVICE_ZEROCONF_DATA) - zc_data_new_ip.ip_address = ip_address(MOCKED_DEVICE_IP_ADDRESS) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=zc_data_new_ip, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_discovered_zeroconf_device_connection_error( - hass: HomeAssistant, mock_device -) -> None: - """Test we can setup when discovered from zeroconf but device went offline.""" - - mock_device.device_config.side_effect = DeviceConnectionError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=MOCKED_DEVICE_ZEROCONF_DATA, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" diff --git a/tests/components/lektrico/test_init.py b/tests/components/lektrico/test_init.py deleted file mode 100644 index 93068ffe531..00000000000 --- a/tests/components/lektrico/test_init.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Tests for the Lektrico integration.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.components.lektrico.const import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_device_info( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_device: AsyncMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test device registry integration.""" - await setup_integration(hass, mock_config_entry) - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - assert device_entry is not None - assert device_entry == snapshot diff --git a/tests/components/lektrico/test_number.py b/tests/components/lektrico/test_number.py deleted file mode 100644 index ade6515ca72..00000000000 --- a/tests/components/lektrico/test_number.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Tests for the Lektrico number platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_device: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch.multiple( - "homeassistant.components.lektrico", - CHARGERS_PLATFORMS=[Platform.NUMBER], - LB_DEVICES_PLATFORMS=[Platform.NUMBER], - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_select.py b/tests/components/lektrico/test_select.py deleted file mode 100644 index cb09c47535e..00000000000 --- a/tests/components/lektrico/test_select.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Tests for the Lektrico select platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_device: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch.multiple( - "homeassistant.components.lektrico", - CHARGERS_PLATFORMS=[Platform.SELECT], - LB_DEVICES_PLATFORMS=[Platform.SELECT], - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_sensor.py b/tests/components/lektrico/test_sensor.py deleted file mode 100644 index 27be7ff1c11..00000000000 --- a/tests/components/lektrico/test_sensor.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Tests for the Lektrico sensor platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_device: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch.multiple( - "homeassistant.components.lektrico", - CHARGERS_PLATFORMS=[Platform.SENSOR], - LB_DEVICES_PLATFORMS=[Platform.SENSOR], - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_switch.py b/tests/components/lektrico/test_switch.py deleted file mode 100644 index cfa693d9e44..00000000000 --- a/tests/components/lektrico/test_switch.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Tests for the Lektrico switch platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_device: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - - with patch.multiple( - "homeassistant.components.lektrico", - CHARGERS_PLATFORMS=[Platform.SWITCH], - LB_DEVICES_PLATFORMS=[Platform.SWITCH], - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_netcast/__init__.py b/tests/components/lg_netcast/__init__.py index 6e608ae207b..ce3e09aeb65 100644 --- a/tests/components/lg_netcast/__init__.py +++ b/tests/components/lg_netcast/__init__.py @@ -1,7 +1,7 @@ """Tests for LG Netcast TV.""" from unittest.mock import patch -import xml.etree.ElementTree as ET +from xml.etree import ElementTree from pylgnetcast import AccessTokenError, LgNetCastClient, SessionIdError import requests @@ -56,7 +56,7 @@ def _patched_lgnetcast_client( if always_404: return None if invalid_details: - raise ET.ParseError("Mocked Parsed Error") + raise ElementTree.ParseError("Mocked Parsed Error") return { "uuid": UNIQUE_ID if not no_unique_id else None, "model_name": MODEL_NAME, diff --git a/tests/components/lg_netcast/conftest.py b/tests/components/lg_netcast/conftest.py new file mode 100644 index 00000000000..eb13d5c8c67 --- /dev/null +++ b/tests/components/lg_netcast/conftest.py @@ -0,0 +1,13 @@ +"""Common fixtures and objects for the LG Netcast integration tests.""" + +import pytest + +from homeassistant.core import HomeAssistant, ServiceCall + +from tests.common import async_mock_service + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") diff --git a/tests/components/lg_netcast/test_config_flow.py b/tests/components/lg_netcast/test_config_flow.py index 7959c0c445e..2ecbadbaf44 100644 --- a/tests/components/lg_netcast/test_config_flow.py +++ b/tests/components/lg_netcast/test_config_flow.py @@ -3,11 +3,9 @@ from datetime import timedelta from unittest.mock import DEFAULT, patch -import pytest - from homeassistant import data_entry_flow from homeassistant.components.lg_netcast.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import ( CONF_ACCESS_TOKEN, CONF_HOST, @@ -26,6 +24,8 @@ from . import ( _patch_lg_netcast, ) +from tests.common import MockConfigEntry + async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" @@ -114,10 +114,6 @@ async def test_manual_host_unsuccessful_details_response(hass: HomeAssistant) -> assert result["reason"] == "cannot_connect" -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.lg_netcast.config.abort.invalid_host"], -) async def test_manual_host_no_unique_id_response(hass: HomeAssistant) -> None: """Test manual host configuration.""" with _patch_lg_netcast(no_unique_id=True): @@ -150,6 +146,77 @@ async def test_invalid_session_id(hass: HomeAssistant) -> None: assert result2["errors"]["base"] == "cannot_connect" +async def test_import(hass: HomeAssistant) -> None: + """Test that the import works.""" + with _patch_lg_netcast(): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_ACCESS_TOKEN: FAKE_PIN, + CONF_NAME: MODEL_NAME, + }, + ) + + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == UNIQUE_ID + assert result["data"] == { + CONF_HOST: IP_ADDRESS, + CONF_ACCESS_TOKEN: FAKE_PIN, + CONF_NAME: MODEL_NAME, + CONF_MODEL: MODEL_NAME, + CONF_ID: UNIQUE_ID, + } + + +async def test_import_not_online(hass: HomeAssistant) -> None: + """Test that the import works.""" + with _patch_lg_netcast(fail_connection=True): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_ACCESS_TOKEN: FAKE_PIN, + CONF_NAME: MODEL_NAME, + }, + ) + + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_import_duplicate_error(hass: HomeAssistant) -> None: + """Test that errors are shown when duplicates are added during import.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=UNIQUE_ID, + data={ + CONF_HOST: IP_ADDRESS, + CONF_ACCESS_TOKEN: FAKE_PIN, + CONF_NAME: MODEL_NAME, + CONF_ID: UNIQUE_ID, + }, + ) + config_entry.add_to_hass(hass) + + with _patch_lg_netcast(): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_ACCESS_TOKEN: FAKE_PIN, + CONF_NAME: MODEL_NAME, + CONF_ID: UNIQUE_ID, + }, + ) + + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_display_access_token_aborted(hass: HomeAssistant) -> None: """Test Access token display is cancelled.""" diff --git a/tests/components/lg_netcast/test_device_trigger.py b/tests/components/lg_netcast/test_device_trigger.py index c8d725afde1..05911acc41d 100644 --- a/tests/components/lg_netcast/test_device_trigger.py +++ b/tests/components/lg_netcast/test_device_trigger.py @@ -43,9 +43,7 @@ async def test_get_triggers( async def test_if_fires_on_turn_on_request( - hass: HomeAssistant, - service_calls: list[ServiceCall], - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry ) -> None: """Test for turn_on triggers firing.""" await setup_lgnetcast(hass) @@ -98,11 +96,11 @@ async def test_if_fires_on_turn_on_request( ) await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[1].data["some"] == device.id - assert service_calls[1].data["id"] == 0 - assert service_calls[2].data["some"] == ENTITY_ID - assert service_calls[2].data["id"] == 0 + assert len(calls) == 2 + assert calls[0].data["some"] == device.id + assert calls[0].data["id"] == 0 + assert calls[1].data["some"] == ENTITY_ID + assert calls[1].data["id"] == 0 async def test_failure_scenarios( diff --git a/tests/components/lg_netcast/test_trigger.py b/tests/components/lg_netcast/test_trigger.py index d838b931560..b0c2a86ec21 100644 --- a/tests/components/lg_netcast/test_trigger.py +++ b/tests/components/lg_netcast/test_trigger.py @@ -18,9 +18,7 @@ from tests.common import MockEntity, MockEntityPlatform async def test_lg_netcast_turn_on_trigger_device_id( - hass: HomeAssistant, - service_calls: list[ServiceCall], - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry ) -> None: """Test for turn_on trigger by device_id firing.""" await setup_lgnetcast(hass) @@ -58,14 +56,14 @@ async def test_lg_netcast_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == device.id - assert service_calls[1].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["some"] == device.id + assert calls[0].data["id"] == 0 with patch("homeassistant.config.load_yaml_dict", return_value={}): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) - service_calls.clear() + calls.clear() with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -76,11 +74,11 @@ async def test_lg_netcast_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 0 async def test_lg_netcast_turn_on_trigger_entity_id( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for turn_on triggers by entity firing.""" await setup_lgnetcast(hass) @@ -115,9 +113,9 @@ async def test_lg_netcast_turn_on_trigger_entity_id( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == ENTITY_ID - assert service_calls[1].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["some"] == ENTITY_ID + assert calls[0].data["id"] == 0 async def test_wrong_trigger_platform_type( diff --git a/tests/components/lg_soundbar/test_config_flow.py b/tests/components/lg_soundbar/test_config_flow.py index 01e16ecb8d0..806c993e792 100644 --- a/tests/components/lg_soundbar/test_config_flow.py +++ b/tests/components/lg_soundbar/test_config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable import socket from typing import Any -from unittest.mock import DEFAULT, MagicMock, patch +from unittest.mock import DEFAULT, patch from homeassistant import config_entries from homeassistant.components.lg_soundbar.const import DEFAULT_PORT, DOMAIN @@ -17,12 +17,8 @@ from tests.common import MockConfigEntry def setup_mock_temescal( - hass: HomeAssistant, - mock_temescal: MagicMock, - mac_info_dev: dict[str, Any] | None = None, - product_info: dict[str, Any] | None = None, - info: dict[str, Any] | None = None, -) -> None: + hass, mock_temescal, mac_info_dev=None, product_info=None, info=None +): """Set up a mock of the temescal object to craft our expected responses.""" tmock = mock_temescal.temescal instance = tmock.return_value diff --git a/tests/components/lg_thinq/__init__.py b/tests/components/lg_thinq/__init__.py deleted file mode 100644 index a5ba55ab1c9..00000000000 --- a/tests/components/lg_thinq/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the LG ThinQ integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/lg_thinq/conftest.py b/tests/components/lg_thinq/conftest.py deleted file mode 100644 index 05cb3164137..00000000000 --- a/tests/components/lg_thinq/conftest.py +++ /dev/null @@ -1,110 +0,0 @@ -"""Configure tests for the LGThinQ integration.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from thinqconnect import ThinQAPIException - -from homeassistant.components.lg_thinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN -from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY - -from .const import MOCK_CONNECT_CLIENT_ID, MOCK_COUNTRY, MOCK_PAT, MOCK_UUID - -from tests.common import MockConfigEntry, load_json_object_fixture - - -def mock_thinq_api_response( - *, - status: int = 200, - body: dict | None = None, - error_code: str | None = None, - error_message: str | None = None, -) -> MagicMock: - """Create a mock thinq api response.""" - response = MagicMock() - response.status = status - response.body = body - response.error_code = error_code - response.error_message = error_message - return response - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Create a mock config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title=f"Test {DOMAIN}", - unique_id=MOCK_PAT, - data={ - CONF_ACCESS_TOKEN: MOCK_PAT, - CONF_CONNECT_CLIENT_ID: MOCK_CONNECT_CLIENT_ID, - CONF_COUNTRY: MOCK_COUNTRY, - }, - ) - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Mock setting up a config entry.""" - with patch( - "homeassistant.components.lg_thinq.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_uuid() -> Generator[AsyncMock]: - """Mock a uuid.""" - with ( - patch("uuid.uuid4", autospec=True, return_value=MOCK_UUID) as mock_uuid, - patch( - "homeassistant.components.lg_thinq.config_flow.uuid.uuid4", - new=mock_uuid, - ), - ): - yield mock_uuid.return_value - - -@pytest.fixture -def mock_thinq_api(mock_thinq_mqtt_client: AsyncMock) -> Generator[AsyncMock]: - """Mock a thinq api.""" - with ( - patch("homeassistant.components.lg_thinq.ThinQApi", autospec=True) as mock_api, - patch( - "homeassistant.components.lg_thinq.config_flow.ThinQApi", - new=mock_api, - ), - ): - thinq_api = mock_api.return_value - thinq_api.async_get_device_list.return_value = [ - load_json_object_fixture("air_conditioner/device.json", DOMAIN) - ] - thinq_api.async_get_device_profile.return_value = load_json_object_fixture( - "air_conditioner/profile.json", DOMAIN - ) - thinq_api.async_get_device_status.return_value = load_json_object_fixture( - "air_conditioner/status.json", DOMAIN - ) - yield thinq_api - - -@pytest.fixture -def mock_thinq_mqtt_client() -> Generator[AsyncMock]: - """Mock a thinq api.""" - with patch( - "homeassistant.components.lg_thinq.mqtt.ThinQMQTTClient", autospec=True - ) as mock_api: - yield mock_api - - -@pytest.fixture -def mock_invalid_thinq_api(mock_thinq_api: AsyncMock) -> AsyncMock: - """Mock an invalid thinq api.""" - mock_thinq_api.async_get_device_list = AsyncMock( - side_effect=ThinQAPIException( - code="1309", message="Not allowed api call", headers=None - ) - ) - return mock_thinq_api diff --git a/tests/components/lg_thinq/const.py b/tests/components/lg_thinq/const.py deleted file mode 100644 index f46baa61c38..00000000000 --- a/tests/components/lg_thinq/const.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Constants for lgthinq test.""" - -from typing import Final - -MOCK_PAT: Final[str] = "123abc4567de8f90g123h4ij56klmn789012p345rst6uvw789xy" -MOCK_UUID: Final[str] = "1b3deabc-123d-456d-987d-2a1c7b3bdb67" -MOCK_CONNECT_CLIENT_ID: Final[str] = f"home-assistant-{MOCK_UUID}" -MOCK_COUNTRY: Final[str] = "KR" diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/device.json b/tests/components/lg_thinq/fixtures/air_conditioner/device.json deleted file mode 100644 index fb931c69929..00000000000 --- a/tests/components/lg_thinq/fixtures/air_conditioner/device.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "deviceId": "MW2-2E247F93-B570-46A6-B827-920E9E10F966", - "deviceInfo": { - "deviceType": "DEVICE_AIR_CONDITIONER", - "modelName": "PAC_910604_WW", - "alias": "Test air conditioner", - "reportable": true - } -} diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/profile.json b/tests/components/lg_thinq/fixtures/air_conditioner/profile.json deleted file mode 100644 index 0d45dc5c9f4..00000000000 --- a/tests/components/lg_thinq/fixtures/air_conditioner/profile.json +++ /dev/null @@ -1,154 +0,0 @@ -{ - "notification": { - "push": ["WATER_IS_FULL"] - }, - "property": { - "airConJobMode": { - "currentJobMode": { - "mode": ["r", "w"], - "type": "enum", - "value": { - "r": ["AIR_CLEAN", "COOL", "AIR_DRY"], - "w": ["AIR_CLEAN", "COOL", "AIR_DRY"] - } - } - }, - "airFlow": { - "windStrength": { - "mode": ["r", "w"], - "type": "enum", - "value": { - "r": ["LOW", "HIGH", "MID"], - "w": ["LOW", "HIGH", "MID"] - } - } - }, - "airQualitySensor": { - "PM1": { - "mode": ["r"], - "type": "number" - }, - "PM10": { - "mode": ["r"], - "type": "number" - }, - "PM2": { - "mode": ["r"], - "type": "number" - }, - "humidity": { - "mode": ["r"], - "type": "number" - }, - "monitoringEnabled": { - "mode": ["r", "w"], - "type": "enum", - "value": { - "r": ["ON_WORKING", "ALWAYS"], - "w": ["ON_WORKING", "ALWAYS"] - } - }, - "oder": { - "mode": ["r"], - "type": "number" - }, - "totalPollution": { - "mode": ["r"], - "type": "number" - } - }, - "operation": { - "airCleanOperationMode": { - "mode": ["w"], - "type": "enum", - "value": { - "w": ["START", "STOP"] - } - }, - "airConOperationMode": { - "mode": ["r", "w"], - "type": "enum", - "value": { - "r": ["POWER_ON", "POWER_OFF"], - "w": ["POWER_ON", "POWER_OFF"] - } - } - }, - "powerSave": { - "powerSaveEnabled": { - "mode": ["r", "w"], - "type": "boolean", - "value": { - "r": [false, true], - "w": [false, true] - } - } - }, - "temperature": { - "coolTargetTemperature": { - "mode": ["w"], - "type": "range", - "value": { - "w": { - "max": 30, - "min": 18, - "step": 1 - } - } - }, - "currentTemperature": { - "mode": ["r"], - "type": "number" - }, - "targetTemperature": { - "mode": ["r", "w"], - "type": "range", - "value": { - "r": { - "max": 30, - "min": 18, - "step": 1 - }, - "w": { - "max": 30, - "min": 18, - "step": 1 - } - } - }, - "unit": { - "mode": ["r"], - "type": "enum", - "value": { - "r": ["C", "F"] - } - } - }, - "timer": { - "relativeHourToStart": { - "mode": ["r", "w"], - "type": "number" - }, - "relativeHourToStop": { - "mode": ["r", "w"], - "type": "number" - }, - "relativeMinuteToStart": { - "mode": ["r", "w"], - "type": "number" - }, - "relativeMinuteToStop": { - "mode": ["r", "w"], - "type": "number" - }, - "absoluteHourToStart": { - "mode": ["r", "w"], - "type": "number" - }, - "absoluteMinuteToStart": { - "mode": ["r", "w"], - "type": "number" - } - } - } -} diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/status.json b/tests/components/lg_thinq/fixtures/air_conditioner/status.json deleted file mode 100644 index 90d15d1ae16..00000000000 --- a/tests/components/lg_thinq/fixtures/air_conditioner/status.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "airConJobMode": { - "currentJobMode": "COOL" - }, - "airFlow": { - "windStrength": "MID" - }, - "airQualitySensor": { - "PM1": 12, - "PM10": 7, - "PM2": 24, - "humidity": 40, - "monitoringEnabled": "ON_WORKING", - "totalPollution": 3, - "totalPollutionLevel": "GOOD" - }, - "filterInfo": { - "filterLifetime": 540, - "usedTime": 180 - }, - "operation": { - "airConOperationMode": "POWER_ON" - }, - "powerSave": { - "powerSaveEnabled": false - }, - "sleepTimer": { - "relativeStopTimer": "UNSET" - }, - "temperature": { - "currentTemperature": 25, - "targetTemperature": 19, - "unit": "C" - }, - "timer": { - "relativeStartTimer": "UNSET", - "relativeStopTimer": "UNSET", - "absoluteStartTimer": "SET", - "absoluteStopTimer": "UNSET", - "absoluteHourToStart": 13, - "absoluteMinuteToStart": 14 - } -} diff --git a/tests/components/lg_thinq/snapshots/test_climate.ambr b/tests/components/lg_thinq/snapshots/test_climate.ambr deleted file mode 100644 index e9470c3de03..00000000000 --- a/tests/components/lg_thinq/snapshots/test_climate.ambr +++ /dev/null @@ -1,86 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[climate.test_air_conditioner-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'fan_modes': list([ - 'low', - 'high', - 'mid', - ]), - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 30, - 'min_temp': 18, - 'preset_modes': list([ - 'air_clean', - ]), - 'target_temp_step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_air_conditioner', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'lg_thinq', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': , - 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_climate_air_conditioner', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[climate.test_air_conditioner-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_humidity': 40, - 'current_temperature': 25, - 'fan_mode': 'mid', - 'fan_modes': list([ - 'low', - 'high', - 'mid', - ]), - 'friendly_name': 'Test air conditioner', - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 30, - 'min_temp': 18, - 'preset_mode': None, - 'preset_modes': list([ - 'air_clean', - ]), - 'supported_features': , - 'target_temp_step': 1, - 'temperature': 19, - }), - 'context': , - 'entity_id': 'climate.test_air_conditioner', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'cool', - }) -# --- diff --git a/tests/components/lg_thinq/snapshots/test_event.ambr b/tests/components/lg_thinq/snapshots/test_event.ambr deleted file mode 100644 index 025f4496aeb..00000000000 --- a/tests/components/lg_thinq/snapshots/test_event.ambr +++ /dev/null @@ -1,55 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[event.test_air_conditioner_notification-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'water_is_full', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.test_air_conditioner_notification', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Notification', - 'platform': 'lg_thinq', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_notification', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[event.test_air_conditioner_notification-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'event_type': None, - 'event_types': list([ - 'water_is_full', - ]), - 'friendly_name': 'Test air conditioner Notification', - }), - 'context': , - 'entity_id': 'event.test_air_conditioner_notification', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/lg_thinq/snapshots/test_number.ambr b/tests/components/lg_thinq/snapshots/test_number.ambr deleted file mode 100644 index 68f01854501..00000000000 --- a/tests/components/lg_thinq/snapshots/test_number.ambr +++ /dev/null @@ -1,113 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[number.test_air_conditioner_schedule_turn_off-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.test_air_conditioner_schedule_turn_off', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Schedule turn-off', - 'platform': 'lg_thinq', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_relative_hour_to_stop', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.test_air_conditioner_schedule_turn_off-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test air conditioner Schedule turn-off', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.test_air_conditioner_schedule_turn_off', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_entities[number.test_air_conditioner_schedule_turn_on-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.test_air_conditioner_schedule_turn_on', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Schedule turn-on', - 'platform': 'lg_thinq', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_relative_hour_to_start', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.test_air_conditioner_schedule_turn_on-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test air conditioner Schedule turn-on', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.test_air_conditioner_schedule_turn_on', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/lg_thinq/snapshots/test_sensor.ambr b/tests/components/lg_thinq/snapshots/test_sensor.ambr deleted file mode 100644 index 387df916eba..00000000000 --- a/tests/components/lg_thinq/snapshots/test_sensor.ambr +++ /dev/null @@ -1,205 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[sensor.test_air_conditioner_humidity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_air_conditioner_humidity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Humidity', - 'platform': 'lg_thinq', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_humidity', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.test_air_conditioner_humidity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'humidity', - 'friendly_name': 'Test air conditioner Humidity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_air_conditioner_humidity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40', - }) -# --- -# name: test_all_entities[sensor.test_air_conditioner_pm1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_air_conditioner_pm1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PM1', - 'platform': 'lg_thinq', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_pm1', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_all_entities[sensor.test_air_conditioner_pm1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm1', - 'friendly_name': 'Test air conditioner PM1', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.test_air_conditioner_pm1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '12', - }) -# --- -# name: test_all_entities[sensor.test_air_conditioner_pm10-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_air_conditioner_pm10', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PM10', - 'platform': 'lg_thinq', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_pm10', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_all_entities[sensor.test_air_conditioner_pm10-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm10', - 'friendly_name': 'Test air conditioner PM10', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.test_air_conditioner_pm10', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '7', - }) -# --- -# name: test_all_entities[sensor.test_air_conditioner_pm2_5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_air_conditioner_pm2_5', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PM2.5', - 'platform': 'lg_thinq', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_pm2', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_all_entities[sensor.test_air_conditioner_pm2_5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm25', - 'friendly_name': 'Test air conditioner PM2.5', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.test_air_conditioner_pm2_5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '24', - }) -# --- diff --git a/tests/components/lg_thinq/test_climate.py b/tests/components/lg_thinq/test_climate.py deleted file mode 100644 index 24ed3ad230d..00000000000 --- a/tests/components/lg_thinq/test_climate.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Tests for the LG Thinq climate platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_thinq_api: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.CLIMATE]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_thinq/test_config_flow.py b/tests/components/lg_thinq/test_config_flow.py deleted file mode 100644 index e7ee632810e..00000000000 --- a/tests/components/lg_thinq/test_config_flow.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Test the lgthinq config flow.""" - -from unittest.mock import AsyncMock - -from homeassistant.components.lg_thinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .const import MOCK_CONNECT_CLIENT_ID, MOCK_COUNTRY, MOCK_PAT - -from tests.common import MockConfigEntry - - -async def test_config_flow( - hass: HomeAssistant, - mock_thinq_api: AsyncMock, - mock_uuid: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test that an thinq entry is normally created.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_ACCESS_TOKEN: MOCK_PAT, - CONF_COUNTRY: MOCK_COUNTRY, - CONF_CONNECT_CLIENT_ID: MOCK_CONNECT_CLIENT_ID, - } - - mock_thinq_api.async_get_device_list.assert_called_once() - - -async def test_config_flow_invalid_pat( - hass: HomeAssistant, mock_invalid_thinq_api: AsyncMock -) -> None: - """Test that an thinq flow should be aborted with an invalid PAT.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "token_unauthorized"} - mock_invalid_thinq_api.async_get_device_list.assert_called_once() - - -async def test_config_flow_already_configured( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_thinq_api: AsyncMock -) -> None: - """Test that thinq flow should be aborted when already configured.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/lg_thinq/test_event.py b/tests/components/lg_thinq/test_event.py deleted file mode 100644 index bea758cb943..00000000000 --- a/tests/components/lg_thinq/test_event.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Tests for the LG Thinq event platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_thinq_api: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.EVENT]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_thinq/test_init.py b/tests/components/lg_thinq/test_init.py deleted file mode 100644 index 7da7e79fec0..00000000000 --- a/tests/components/lg_thinq/test_init.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Tests for the LG ThinQ integration.""" - -from unittest.mock import AsyncMock - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def test_load_unload_entry( - hass: HomeAssistant, - mock_thinq_api: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test load and unload entry.""" - mock_config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_remove(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/lg_thinq/test_number.py b/tests/components/lg_thinq/test_number.py deleted file mode 100644 index e578e4eba7a..00000000000 --- a/tests/components/lg_thinq/test_number.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Tests for the LG Thinq number platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_thinq_api: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.NUMBER]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_thinq/test_sensor.py b/tests/components/lg_thinq/test_sensor.py deleted file mode 100644 index 02b91b4771b..00000000000 --- a/tests/components/lg_thinq/test_sensor.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Tests for the LG Thinq sensor platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_thinq_api: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lidarr/conftest.py b/tests/components/lidarr/conftest.py index bd87fa947bc..588acb2b87f 100644 --- a/tests/components/lidarr/conftest.py +++ b/tests/components/lidarr/conftest.py @@ -2,12 +2,13 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable from http import HTTPStatus from aiohttp.client_exceptions import ClientError from aiopyarr.lidarr_client import LidarrClient import pytest +from typing_extensions import Generator from homeassistant.components.lidarr.const import DOMAIN from homeassistant.const import ( @@ -44,12 +45,10 @@ def mock_error( aioclient_mock.get(f"{API_URL}/rootfolder", status=status) aioclient_mock.get(f"{API_URL}/system/status", status=status) aioclient_mock.get(f"{API_URL}/wanted/missing", status=status) - aioclient_mock.get(f"{API_URL}/album", status=status) aioclient_mock.get(f"{API_URL}/queue", exc=ClientError) aioclient_mock.get(f"{API_URL}/rootfolder", exc=ClientError) aioclient_mock.get(f"{API_URL}/system/status", exc=ClientError) aioclient_mock.get(f"{API_URL}/wanted/missing", exc=ClientError) - aioclient_mock.get(f"{API_URL}/album", exc=ClientError) @pytest.fixture @@ -117,11 +116,6 @@ def mock_connection(aioclient_mock: AiohttpClientMocker) -> None: text=load_fixture("lidarr/wanted-missing.json"), headers={"Content-Type": CONTENT_TYPE_JSON}, ) - aioclient_mock.get( - f"{API_URL}/album", - text=load_fixture("lidarr/album.json"), - headers={"Content-Type": CONTENT_TYPE_JSON}, - ) aioclient_mock.get( f"{API_URL}/rootfolder", text=load_fixture("lidarr/rootfolder-linux.json"), diff --git a/tests/components/lidarr/fixtures/album.json b/tests/components/lidarr/fixtures/album.json deleted file mode 100644 index d257cabf1f1..00000000000 --- a/tests/components/lidarr/fixtures/album.json +++ /dev/null @@ -1,155 +0,0 @@ -[ - { - "id": 0, - "title": "string", - "disambiguation": "string", - "overview": "string", - "artistId": 0, - "foreignAlbumId": "string", - "monitored": true, - "anyReleaseOk": true, - "profileId": 0, - "duration": 0, - "albumType": "string", - "secondaryTypes": ["string"], - "mediumCount": 0, - "ratings": { - "votes": 0, - "value": 0 - }, - "releaseDate": "2024-09-09T20:16:28.493Z", - "releases": [ - { - "id": 0, - "albumId": 0, - "foreignReleaseId": "string", - "title": "string", - "status": "string", - "duration": 0, - "trackCount": 0, - "media": [ - { - "mediumNumber": 0, - "mediumName": "string", - "mediumFormat": "string" - } - ], - "mediumCount": 0, - "disambiguation": "string", - "country": ["string"], - "label": ["string"], - "format": "string", - "monitored": true - } - ], - "genres": ["string"], - "media": [ - { - "mediumNumber": 0, - "mediumName": "string", - "mediumFormat": "string" - } - ], - "artist": { - "id": 0, - "status": "continuing", - "ended": true, - "artistName": "string", - "foreignArtistId": "string", - "mbId": "string", - "tadbId": 0, - "discogsId": 0, - "allMusicId": "string", - "overview": "string", - "artistType": "string", - "disambiguation": "string", - "links": [ - { - "url": "string", - "name": "string" - } - ], - "nextAlbum": "string", - "lastAlbum": "string", - "images": [ - { - "url": "string", - "coverType": "unknown", - "extension": "string", - "remoteUrl": "string" - } - ], - "members": [ - { - "name": "string", - "instrument": "string", - "images": [ - { - "url": "string", - "coverType": "unknown", - "extension": "string", - "remoteUrl": "string" - } - ] - } - ], - "remotePoster": "string", - "path": "string", - "qualityProfileId": 0, - "metadataProfileId": 0, - "monitored": true, - "monitorNewItems": "all", - "rootFolderPath": "string", - "folder": "string", - "genres": ["string"], - "cleanName": "string", - "sortName": "string", - "tags": [0], - "added": "2024-09-09T20:16:28.493Z", - "addOptions": { - "monitor": "all", - "albumsToMonitor": ["string"], - "monitored": true, - "searchForMissingAlbums": true - }, - "ratings": { - "votes": 0, - "value": 0 - }, - "statistics": { - "albumCount": 0, - "trackFileCount": 0, - "trackCount": 0, - "totalTrackCount": 0, - "sizeOnDisk": 0, - "percentOfTracks": 0 - } - }, - "images": [ - { - "url": "string", - "coverType": "unknown", - "extension": "string", - "remoteUrl": "string" - } - ], - "links": [ - { - "url": "string", - "name": "string" - } - ], - "statistics": { - "trackFileCount": 0, - "trackCount": 0, - "totalTrackCount": 0, - "sizeOnDisk": 0, - "percentOfTracks": 0 - }, - "addOptions": { - "addType": "automatic", - "searchForNewAlbum": true - }, - "remoteCover": "string" - } -] diff --git a/tests/components/lidarr/test_config_flow.py b/tests/components/lidarr/test_config_flow.py index 0097e66fe24..e44b03cd2a2 100644 --- a/tests/components/lidarr/test_config_flow.py +++ b/tests/components/lidarr/test_config_flow.py @@ -1,15 +1,13 @@ """Test Lidarr config flow.""" from homeassistant.components.lidarr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import CONF_DATA, MOCK_INPUT, ComponentSetup -from tests.common import MockConfigEntry - async def test_flow_user_form(hass: HomeAssistant, connection) -> None: """Test that the user set up form is served.""" @@ -97,14 +95,20 @@ async def test_flow_user_unknown_error(hass: HomeAssistant, unknown) -> None: async def test_flow_reauth( - hass: HomeAssistant, - setup_integration: ComponentSetup, - connection, - config_entry: MockConfigEntry, + hass: HomeAssistant, setup_integration: ComponentSetup, connection ) -> None: """Test reauth.""" await setup_integration() - result = await config_entry.start_reauth_flow(hass) + entry = hass.config_entries.async_entries(DOMAIN)[0] + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=CONF_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure( @@ -119,4 +123,4 @@ async def test_flow_reauth( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" - assert config_entry.data[CONF_API_KEY] == "abc123" + assert entry.data[CONF_API_KEY] == "abc123" diff --git a/tests/components/lidarr/test_sensor.py b/tests/components/lidarr/test_sensor.py index 716df21303a..0c19355a252 100644 --- a/tests/components/lidarr/test_sensor.py +++ b/tests/components/lidarr/test_sensor.py @@ -25,14 +25,10 @@ async def test_sensors( assert state.state == "2" assert state.attributes.get("string") == "stopped" assert state.attributes.get("string2") == "downloading" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "albums" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Albums" assert state.attributes.get(CONF_STATE_CLASS) == SensorStateClass.TOTAL state = hass.states.get("sensor.mock_title_wanted") assert state.state == "1" assert state.attributes.get("test") == "test" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "albums" - assert state.attributes.get(CONF_STATE_CLASS) == SensorStateClass.TOTAL - state = hass.states.get("sensor.mock_title_albums") - assert state.state == "1" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "albums" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Albums" assert state.attributes.get(CONF_STATE_CLASS) == SensorStateClass.TOTAL diff --git a/tests/components/lifx/__init__.py b/tests/components/lifx/__init__.py index 81b913da6ce..505d212a352 100644 --- a/tests/components/lifx/__init__.py +++ b/tests/components/lifx/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations import asyncio from contextlib import contextmanager -from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch from aiolifx.aiolifx import Light @@ -26,7 +25,7 @@ DEFAULT_ENTRY_TITLE = LABEL class MockMessage: """Mock a lifx message.""" - def __init__(self, **kwargs: Any) -> None: + def __init__(self, **kwargs): """Init message.""" self.target_addr = SERIAL self.count = 9 @@ -38,7 +37,7 @@ class MockMessage: class MockFailingLifxCommand: """Mock a lifx command that fails.""" - def __init__(self, bulb, **kwargs: Any) -> None: + def __init__(self, bulb, **kwargs): """Init command.""" self.bulb = bulb self.calls = [] @@ -61,17 +60,14 @@ class MockLifxCommand: """Return name.""" return "mock_lifx_command" - def __init__(self, bulb, **kwargs: Any) -> None: + def __init__(self, bulb, **kwargs): """Init command.""" self.bulb = bulb self.calls = [] - self.msg_kwargs = { - k.removeprefix("msg_"): v for k, v in kwargs.items() if k.startswith("msg_") - } + self.msg_kwargs = kwargs for k, v in kwargs.items(): - if k.startswith("msg_") or k == "callb": - continue - setattr(self.bulb, k, v) + if k != "callb": + setattr(self.bulb, k, v) def __call__(self, *args, **kwargs): """Call command.""" @@ -159,16 +155,9 @@ def _mocked_infrared_bulb() -> Light: def _mocked_light_strip() -> Light: bulb = _mocked_bulb() bulb.product = 31 # LIFX Z - bulb.zones_count = 3 - bulb.color_zones = [MagicMock()] * 3 + bulb.color_zones = [MagicMock(), MagicMock()] bulb.effect = {"effect": "MOVE", "speed": 3, "duration": 0, "direction": "RIGHT"} - bulb.get_color_zones = MockLifxCommand( - bulb, - msg_seq_num=bulb.seq_next(), - msg_count=bulb.zones_count, - msg_index=0, - msg_color=bulb.color_zones, - ) + bulb.get_color_zones = MockLifxCommand(bulb) bulb.set_color_zones = MockLifxCommand(bulb) bulb.get_multizone_effect = MockLifxCommand(bulb) bulb.set_multizone_effect = MockLifxCommand(bulb) @@ -183,19 +172,6 @@ def _mocked_tile() -> Light: bulb.effect = {"effect": "OFF"} bulb.get_tile_effect = MockLifxCommand(bulb) bulb.set_tile_effect = MockLifxCommand(bulb) - bulb.get64 = MockLifxCommand(bulb) - bulb.get_device_chain = MockLifxCommand(bulb) - return bulb - - -def _mocked_ceiling() -> Light: - bulb = _mocked_bulb() - bulb.product = 176 # LIFX Ceiling - bulb.effect = {"effect": "OFF"} - bulb.get_tile_effect = MockLifxCommand(bulb) - bulb.set_tile_effect = MockLifxCommand(bulb) - bulb.get64 = MockLifxCommand(bulb) - bulb.get_device_chain = MockLifxCommand(bulb) return bulb @@ -223,7 +199,7 @@ def _patch_device(device: Light | None = None, no_device: bool = False): class MockLifxConnecton: """Mock lifx discovery.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init connection.""" if no_device: self.device = _mocked_failing_bulb() @@ -251,7 +227,7 @@ def _patch_discovery(device: Light | None = None, no_device: bool = False): class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init discovery.""" if no_device: self.lights = {} @@ -287,7 +263,7 @@ def _patch_config_flow_try_connect( class MockLifxConnection: """Mock lifx discovery.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init connection.""" if no_device: self.device = _mocked_failing_bulb() diff --git a/tests/components/lifx/conftest.py b/tests/components/lifx/conftest.py index e4a5f303f61..093f2309e53 100644 --- a/tests/components/lifx/conftest.py +++ b/tests/components/lifx/conftest.py @@ -1,6 +1,5 @@ """Tests for the lifx integration.""" -from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -9,6 +8,8 @@ from homeassistant.components.lifx import config_flow, coordinator, util from . import _patch_discovery +from tests.common import mock_device_registry, mock_registry + @pytest.fixture def mock_discovery(): @@ -22,7 +23,7 @@ def mock_effect_conductor(): """Mock the effect conductor.""" class MockConductor: - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs) -> None: """Mock the conductor.""" self.start = AsyncMock() self.stop = AsyncMock() @@ -60,3 +61,15 @@ def lifx_mock_async_get_ipv4_broadcast_addresses(): return_value=["255.255.255.255"], ): yield + + +@pytest.fixture(name="device_reg") +def device_reg_fixture(hass): + """Return an empty, loaded, registry.""" + return mock_device_registry(hass) + + +@pytest.fixture(name="entity_reg") +def entity_reg_fixture(hass): + """Return an empty, loaded, registry.""" + return mock_registry(hass) diff --git a/tests/components/lifx/test_config_flow.py b/tests/components/lifx/test_config_flow.py index d1a6920f84a..59b7090788a 100644 --- a/tests/components/lifx/test_config_flow.py +++ b/tests/components/lifx/test_config_flow.py @@ -2,7 +2,6 @@ from ipaddress import ip_address import socket -from typing import Any from unittest.mock import patch import pytest @@ -10,7 +9,6 @@ import pytest from homeassistant import config_entries from homeassistant.components import dhcp, zeroconf from homeassistant.components.lifx import DOMAIN -from homeassistant.components.lifx.config_flow import LifXConfigFlow from homeassistant.components.lifx.const import CONF_SERIAL from homeassistant.const import CONF_DEVICE, CONF_HOST from homeassistant.core import HomeAssistant @@ -290,7 +288,7 @@ async def test_manual_dns_error(hass: HomeAssistant) -> None: class MockLifxConnectonDnsError: """Mock lifx discovery.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init connection.""" self.device = _mocked_failing_bulb() @@ -370,18 +368,7 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_in_progress" - real_is_matching = LifXConfigFlow.is_matching - return_values = [] - - def is_matching(self, other_flow) -> bool: - return_values.append(real_is_matching(self, other_flow)) - return return_values[-1] - - with ( - _patch_discovery(), - _patch_config_flow_try_connect(), - patch.object(LifXConfigFlow, "is_matching", wraps=is_matching, autospec=True), - ): + with _patch_discovery(), _patch_config_flow_try_connect(): result3 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -392,8 +379,6 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "already_in_progress" - # Ensure the is_matching method returned True - assert return_values == [True] with ( _patch_discovery(no_device=True), @@ -589,7 +574,7 @@ async def test_suggested_area( class MockLifxCommandGetGroup: """Mock the get_group method that gets the group name from the bulb.""" - def __init__(self, bulb, **kwargs: Any) -> None: + def __init__(self, bulb, **kwargs): """Init command.""" self.bulb = bulb self.lifx_group = kwargs.get("lifx_group") diff --git a/tests/components/lifx/test_diagnostics.py b/tests/components/lifx/test_diagnostics.py index 22e335612f8..e3588dd3ed1 100644 --- a/tests/components/lifx/test_diagnostics.py +++ b/tests/components/lifx/test_diagnostics.py @@ -9,7 +9,6 @@ from . import ( DEFAULT_ENTRY_TITLE, IP_ADDRESS, SERIAL, - MockLifxCommand, _mocked_bulb, _mocked_clean_bulb, _mocked_infrared_bulb, @@ -189,22 +188,6 @@ async def test_legacy_multizone_bulb_diagnostics( ) config_entry.add_to_hass(hass) bulb = _mocked_light_strip() - bulb.get_color_zones = MockLifxCommand( - bulb, - msg_seq_num=0, - msg_count=8, - msg_color=[ - (54612, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - ], - msg_index=0, - ) bulb.zones_count = 8 bulb.color_zones = [ (54612, 65535, 65535, 3500), @@ -319,22 +302,6 @@ async def test_multizone_bulb_diagnostics( config_entry.add_to_hass(hass) bulb = _mocked_light_strip() bulb.product = 38 - bulb.get_color_zones = MockLifxCommand( - bulb, - msg_seq_num=0, - msg_count=8, - msg_color=[ - (54612, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - ], - msg_index=0, - ) bulb.zones_count = 8 bulb.color_zones = [ (54612, 65535, 65535, 3500), diff --git a/tests/components/lifx/test_init.py b/tests/components/lifx/test_init.py index 66adc54704e..42ece68a2c5 100644 --- a/tests/components/lifx/test_init.py +++ b/tests/components/lifx/test_init.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import timedelta import socket -from typing import Any from unittest.mock import patch import pytest @@ -38,7 +37,7 @@ async def test_configuring_lifx_causes_discovery(hass: HomeAssistant) -> None: class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init discovery.""" discovered = _mocked_bulb() self.lights = {discovered.mac_addr: discovered} @@ -138,7 +137,7 @@ async def test_dns_error_at_startup(hass: HomeAssistant) -> None: class MockLifxConnectonDnsError: """Mock lifx connection with a dns error.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init connection.""" self.device = bulb diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 1ce7c69d7fa..56630053cc0 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -1,7 +1,6 @@ """Tests for the lifx integration light platform.""" from datetime import timedelta -from typing import Any from unittest.mock import patch import aiolifx_effects @@ -12,19 +11,15 @@ from homeassistant.components.lifx import DOMAIN from homeassistant.components.lifx.const import ATTR_POWER from homeassistant.components.lifx.light import ATTR_INFRARED, ATTR_ZONES from homeassistant.components.lifx.manager import ( - ATTR_CLOUD_SATURATION_MAX, - ATTR_CLOUD_SATURATION_MIN, ATTR_DIRECTION, ATTR_PALETTE, ATTR_SATURATION_MAX, ATTR_SATURATION_MIN, - ATTR_SKY_TYPE, ATTR_SPEED, ATTR_THEME, SERVICE_EFFECT_COLORLOOP, SERVICE_EFFECT_MORPH, SERVICE_EFFECT_MOVE, - SERVICE_EFFECT_SKY, ) from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -67,7 +62,6 @@ from . import ( _mocked_brightness_bulb, _mocked_bulb, _mocked_bulb_new_firmware, - _mocked_ceiling, _mocked_clean_bulb, _mocked_light_strip, _mocked_tile, @@ -192,7 +186,15 @@ async def test_light_strip(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100}, blocking=True, ) - assert len(bulb.set_color_zones.calls) == 0 + call_dict = bulb.set_color_zones.calls[0][1] + call_dict.pop("callb") + assert call_dict == { + "apply": 0, + "color": [], + "duration": 0, + "end_index": 0, + "start_index": 0, + } bulb.set_color_zones.reset_mock() await hass.services.async_call( @@ -201,7 +203,15 @@ async def test_light_strip(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id, ATTR_HS_COLOR: (10, 30)}, blocking=True, ) - assert len(bulb.set_color_zones.calls) == 0 + call_dict = bulb.set_color_zones.calls[0][1] + call_dict.pop("callb") + assert call_dict == { + "apply": 0, + "color": [], + "duration": 0, + "end_index": 0, + "start_index": 0, + } bulb.set_color_zones.reset_mock() bulb.color_zones = [ @@ -222,7 +232,7 @@ async def test_light_strip(hass: HomeAssistant) -> None: blocking=True, ) # Single color uses the fast path - assert bulb.set_color.calls[1][0][0] == [1820, 19660, 65535, 3500] + assert bulb.set_color.calls[0][0][0] == [1820, 19660, 65535, 3500] bulb.set_color.reset_mock() assert len(bulb.set_color_zones.calls) == 0 @@ -406,9 +416,7 @@ async def test_light_strip(hass: HomeAssistant) -> None: blocking=True, ) - bulb.get_color_zones = MockLifxCommand( - bulb, msg_seq_num=0, msg_color=[0, 0, 65535, 3500] * 3, msg_index=0, msg_count=3 - ) + bulb.get_color_zones = MockLifxCommand(bulb) bulb.get_color = MockFailingLifxCommand(bulb) with pytest.raises(HomeAssistantError): @@ -573,14 +581,14 @@ async def test_extended_multizone_messages(hass: HomeAssistant) -> None: bulb.set_extended_color_zones.reset_mock() bulb.color_zones = [ - [0, 65535, 65535, 3500], - [54612, 65535, 65535, 3500], - [54612, 65535, 65535, 3500], - [54612, 65535, 65535, 3500], - [46420, 65535, 65535, 3500], - [46420, 65535, 65535, 3500], - [46420, 65535, 65535, 3500], - [46420, 65535, 65535, 3500], + (0, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), ] await hass.services.async_call( @@ -683,7 +691,6 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: entity_id = "light.my_bulb" - # FLAME effect test await hass.services.async_call( LIGHT_DOMAIN, "turn_on", @@ -700,15 +707,11 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: "effect": 3, "speed": 3, "palette": [], - "sky_type": None, - "cloud_saturation_min": None, - "cloud_saturation_max": None, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() bulb.set_power.reset_mock() - # MORPH effect tests bulb.power_level = 0 await hass.services.async_call( DOMAIN, @@ -747,9 +750,6 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: (8920, 65535, 32768, 3500), (10558, 65535, 32768, 3500), ], - "sky_type": None, - "cloud_saturation_min": None, - "cloud_saturation_max": None, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() @@ -808,140 +808,6 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: (43690, 65535, 65535, 3500), (54613, 65535, 65535, 3500), ], - "sky_type": None, - "cloud_saturation_min": None, - "cloud_saturation_max": None, - } - bulb.get_tile_effect.reset_mock() - bulb.set_tile_effect.reset_mock() - bulb.set_power.reset_mock() - - -@pytest.mark.usefixtures("mock_discovery") -async def test_sky_effect(hass: HomeAssistant) -> None: - """Test the firmware sky effect on a ceiling device.""" - config_entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=SERIAL - ) - config_entry.add_to_hass(hass) - bulb = _mocked_ceiling() - bulb.power_level = 0 - bulb.color = [65535, 65535, 65535, 65535] - with ( - _patch_discovery(device=bulb), - _patch_config_flow_try_connect(device=bulb), - _patch_device(device=bulb), - ): - await async_setup_component(hass, lifx.DOMAIN, {lifx.DOMAIN: {}}) - await hass.async_block_till_done() - - entity_id = "light.my_bulb" - - # SKY effect test - bulb.power_level = 0 - await hass.services.async_call( - DOMAIN, - SERVICE_EFFECT_SKY, - { - ATTR_ENTITY_ID: entity_id, - ATTR_PALETTE: [], - ATTR_SKY_TYPE: "Clouds", - ATTR_CLOUD_SATURATION_MAX: 180, - ATTR_CLOUD_SATURATION_MIN: 50, - }, - blocking=True, - ) - - bulb.power_level = 65535 - bulb.effect = { - "effect": "SKY", - "palette": [], - "sky_type": 2, - "cloud_saturation_min": 50, - "cloud_saturation_max": 180, - } - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) - await hass.async_block_till_done(wait_background_tasks=True) - - state = hass.states.get(entity_id) - assert state.state == STATE_ON - - assert len(bulb.set_power.calls) == 1 - assert len(bulb.set_tile_effect.calls) == 1 - call_dict = bulb.set_tile_effect.calls[0][1] - call_dict.pop("callb") - assert call_dict == { - "effect": 5, - "speed": 50, - "palette": [], - "sky_type": 2, - "cloud_saturation_min": 50, - "cloud_saturation_max": 180, - } - bulb.get_tile_effect.reset_mock() - bulb.set_tile_effect.reset_mock() - bulb.set_power.reset_mock() - - bulb.power_level = 0 - await hass.services.async_call( - DOMAIN, - SERVICE_EFFECT_SKY, - { - ATTR_ENTITY_ID: entity_id, - ATTR_PALETTE: [ - (200, 100, 1, 3500), - (241, 100, 1, 3500), - (189, 100, 8, 3500), - (40, 100, 100, 3500), - (40, 50, 100, 3500), - (0, 0, 100, 6500), - ], - ATTR_SKY_TYPE: "Sunrise", - ATTR_CLOUD_SATURATION_MAX: 180, - ATTR_CLOUD_SATURATION_MIN: 50, - }, - blocking=True, - ) - - bulb.power_level = 65535 - bulb.effect = { - "effect": "SKY", - "palette": [ - (200, 100, 1, 3500), - (241, 100, 1, 3500), - (189, 100, 8, 3500), - (40, 100, 100, 3500), - (40, 50, 100, 3500), - (0, 0, 100, 6500), - ], - "sky_type": 0, - "cloud_saturation_min": 50, - "cloud_saturation_max": 180, - } - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) - await hass.async_block_till_done(wait_background_tasks=True) - - state = hass.states.get(entity_id) - assert state.state == STATE_ON - - assert len(bulb.set_power.calls) == 1 - assert len(bulb.set_tile_effect.calls) == 1 - call_dict = bulb.set_tile_effect.calls[0][1] - call_dict.pop("callb") - assert call_dict == { - "effect": 5, - "speed": 50, - "palette": [ - (36408, 65535, 65535, 3500), - (43872, 65535, 65535, 3500), - (34406, 65535, 5243, 3500), - (7281, 65535, 65535, 3500), - (7281, 32768, 65535, 3500), - (0, 0, 65535, 6500), - ], - "sky_type": 0, - "cloud_saturation_min": 50, - "cloud_saturation_max": 180, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() @@ -1286,7 +1152,7 @@ async def test_config_zoned_light_strip_fails( class MockFailingLifxCommand: """Mock a lifx command that fails on the 2nd try.""" - def __init__(self, bulb, **kwargs: Any) -> None: + def __init__(self, bulb, **kwargs): """Init command.""" self.bulb = bulb self.call_count = 0 @@ -1294,11 +1160,7 @@ async def test_config_zoned_light_strip_fails( def __call__(self, callb=None, *args, **kwargs): """Call command.""" self.call_count += 1 - response = ( - None - if self.call_count >= 2 - else MockMessage(seq_num=0, color=[], index=0, count=0) - ) + response = None if self.call_count >= 2 else MockMessage() if callb: callb(self.bulb, response) @@ -1329,7 +1191,7 @@ async def test_legacy_zoned_light_strip( class MockPopulateLifxZonesCommand: """Mock populating the number of zones.""" - def __init__(self, bulb, **kwargs: Any) -> None: + def __init__(self, bulb, **kwargs): """Init command.""" self.bulb = bulb self.call_count = 0 @@ -1339,15 +1201,7 @@ async def test_legacy_zoned_light_strip( self.call_count += 1 self.bulb.color_zones = [None] * 12 if callb: - callb( - self.bulb, - MockMessage( - seq_num=0, - index=0, - count=self.bulb.zones_count, - color=self.bulb.color_zones, - ), - ) + callb(self.bulb, MockMessage()) get_color_zones_mock = MockPopulateLifxZonesCommand(light_strip) light_strip.get_color_zones = get_color_zones_mock @@ -1844,7 +1698,7 @@ async def test_color_bulb_is_actually_off(hass: HomeAssistant) -> None: class MockLifxCommandActuallyOff: """Mock a lifx command that will update our power level state.""" - def __init__(self, bulb, **kwargs: Any) -> None: + def __init__(self, bulb, **kwargs): """Init command.""" self.bulb = bulb self.calls = [] @@ -1944,33 +1798,6 @@ async def test_light_strip_zones_not_populated_yet(hass: HomeAssistant) -> None: bulb.power_level = 65535 bulb.color_zones = None bulb.color = [65535, 65535, 65535, 65535] - bulb.get_color_zones = next( - iter( - [ - MockLifxCommand( - bulb, - msg_seq_num=0, - msg_color=[0, 0, 65535, 3500] * 8, - msg_index=0, - msg_count=16, - ), - MockLifxCommand( - bulb, - msg_seq_num=1, - msg_color=[0, 0, 65535, 3500] * 8, - msg_index=0, - msg_count=16, - ), - MockLifxCommand( - bulb, - msg_seq_num=2, - msg_color=[0, 0, 65535, 3500] * 8, - msg_index=8, - msg_count=16, - ), - ] - ) - ) assert bulb.get_color_zones.calls == [] with ( diff --git a/tests/components/lifx/test_migration.py b/tests/components/lifx/test_migration.py index f984acce238..0604ee1c8a7 100644 --- a/tests/components/lifx/test_migration.py +++ b/tests/components/lifx/test_migration.py @@ -3,7 +3,6 @@ from __future__ import annotations from datetime import timedelta -from typing import Any from unittest.mock import patch from homeassistant import setup @@ -12,6 +11,8 @@ from homeassistant.components.lifx import DOMAIN, discovery from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STARTED from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -30,22 +31,20 @@ from tests.common import MockConfigEntry, async_fire_time_changed async def test_migration_device_online_end_to_end( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, + hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry ) -> None: """Test migration from single config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, title="LEGACY", data={}, unique_id=DOMAIN ) config_entry.add_to_hass(hass) - device = device_registry.async_get_or_create( + device = device_reg.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - light_entity_reg = entity_registry.async_get_or_create( + light_entity_reg = entity_reg.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -68,7 +67,7 @@ async def test_migration_device_online_end_to_end( assert device.config_entries == {migrated_entry.entry_id} assert light_entity_reg.config_entry_id == migrated_entry.entry_id - assert er.async_entries_for_config_entry(entity_registry, config_entry) == [] + assert er.async_entries_for_config_entry(entity_reg, config_entry) == [] hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() @@ -85,22 +84,20 @@ async def test_migration_device_online_end_to_end( async def test_discovery_is_more_frequent_during_migration( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, + hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry ) -> None: """Test that discovery is more frequent during migration.""" config_entry = MockConfigEntry( domain=DOMAIN, title="LEGACY", data={}, unique_id=DOMAIN ) config_entry.add_to_hass(hass) - device = device_registry.async_get_or_create( + device = device_reg.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - entity_registry.async_get_or_create( + entity_reg.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -115,7 +112,7 @@ async def test_discovery_is_more_frequent_during_migration( class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Init discovery.""" self.bulb = bulb self.lights = {} @@ -163,9 +160,7 @@ async def test_discovery_is_more_frequent_during_migration( async def test_migration_device_online_end_to_end_after_downgrade( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, + hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry ) -> None: """Test migration from single config entry can happen again after a downgrade.""" config_entry = MockConfigEntry( @@ -177,13 +172,13 @@ async def test_migration_device_online_end_to_end_after_downgrade( domain=DOMAIN, data={CONF_HOST: IP_ADDRESS}, unique_id=SERIAL ) already_migrated_config_entry.add_to_hass(hass) - device = device_registry.async_get_or_create( + device = device_reg.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - light_entity_reg = entity_registry.async_get_or_create( + light_entity_reg = entity_reg.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -202,7 +197,7 @@ async def test_migration_device_online_end_to_end_after_downgrade( assert device.config_entries == {config_entry.entry_id} assert light_entity_reg.config_entry_id == config_entry.entry_id - assert er.async_entries_for_config_entry(entity_registry, config_entry) == [] + assert er.async_entries_for_config_entry(entity_reg, config_entry) == [] legacy_entry = None for entry in hass.config_entries.async_entries(DOMAIN): @@ -214,9 +209,7 @@ async def test_migration_device_online_end_to_end_after_downgrade( async def test_migration_device_online_end_to_end_ignores_other_devices( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, + hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry ) -> None: """Test migration from single config entry.""" legacy_config_entry = MockConfigEntry( @@ -228,18 +221,18 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( domain="other_domain", data={}, unique_id="other_domain" ) other_domain_config_entry.add_to_hass(hass) - device = device_registry.async_get_or_create( + device = device_reg.async_get_or_create( config_entry_id=legacy_config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - other_device = device_registry.async_get_or_create( + other_device = device_reg.async_get_or_create( config_entry_id=other_domain_config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "556655665566")}, name=LABEL, ) - light_entity_reg = entity_registry.async_get_or_create( + light_entity_reg = entity_reg.async_get_or_create( config_entry=legacy_config_entry, platform=DOMAIN, domain="light", @@ -247,7 +240,7 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( original_name=LABEL, device_id=device.id, ) - ignored_entity_reg = entity_registry.async_get_or_create( + ignored_entity_reg = entity_reg.async_get_or_create( config_entry=other_domain_config_entry, platform=DOMAIN, domain="sensor", @@ -255,7 +248,7 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( original_name=LABEL, device_id=device.id, ) - garbage_entity_reg = entity_registry.async_get_or_create( + garbage_entity_reg = entity_reg.async_get_or_create( config_entry=legacy_config_entry, platform=DOMAIN, domain="sensor", @@ -288,11 +281,5 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( assert ignored_entity_reg.config_entry_id == other_domain_config_entry.entry_id assert garbage_entity_reg.config_entry_id == legacy_config_entry.entry_id - assert ( - er.async_entries_for_config_entry(entity_registry, legacy_config_entry) - == [] - ) - assert ( - dr.async_entries_for_config_entry(device_registry, legacy_config_entry) - == [] - ) + assert er.async_entries_for_config_entry(entity_reg, legacy_config_entry) == [] + assert dr.async_entries_for_config_entry(device_reg, legacy_config_entry) == [] diff --git a/tests/components/light/common.py b/tests/components/light/common.py index ba095a03642..4c3e95b5ef9 100644 --- a/tests/components/light/common.py +++ b/tests/components/light/common.py @@ -33,7 +33,6 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass from tests.common import MockToggleEntity @@ -41,24 +40,24 @@ from tests.common import MockToggleEntity @bind_hass def turn_on( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - brightness: int | None = None, - brightness_pct: float | None = None, - rgb_color: tuple[int, int, int] | None = None, - rgbw_color: tuple[int, int, int, int] | None = None, - rgbww_color: tuple[int, int, int, int, int] | None = None, - xy_color: tuple[float, float] | None = None, - hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, - profile: str | None = None, - flash: str | None = None, - effect: str | None = None, - color_name: str | None = None, - white: bool | None = None, -) -> None: + hass, + entity_id=ENTITY_MATCH_ALL, + transition=None, + brightness=None, + brightness_pct=None, + rgb_color=None, + rgbw_color=None, + rgbww_color=None, + xy_color=None, + hs_color=None, + color_temp=None, + kelvin=None, + profile=None, + flash=None, + effect=None, + color_name=None, + white=None, +): """Turn all or specified light on.""" hass.add_job( async_turn_on, @@ -83,24 +82,24 @@ def turn_on( async def async_turn_on( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - brightness: int | None = None, - brightness_pct: float | None = None, - rgb_color: tuple[int, int, int] | None = None, - rgbw_color: tuple[int, int, int, int] | None = None, - rgbww_color: tuple[int, int, int, int, int] | None = None, - xy_color: tuple[float, float] | None = None, - hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, - profile: str | None = None, - flash: str | None = None, - effect: str | None = None, - color_name: str | None = None, - white: int | None = None, -) -> None: + hass, + entity_id=ENTITY_MATCH_ALL, + transition=None, + brightness=None, + brightness_pct=None, + rgb_color=None, + rgbw_color=None, + rgbww_color=None, + xy_color=None, + hs_color=None, + color_temp=None, + kelvin=None, + profile=None, + flash=None, + effect=None, + color_name=None, + white=None, +): """Turn all or specified light on.""" data = { key: value @@ -129,22 +128,12 @@ async def async_turn_on( @bind_hass -def turn_off( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - flash: str | None = None, -) -> None: +def turn_off(hass, entity_id=ENTITY_MATCH_ALL, transition=None, flash=None): """Turn all or specified light off.""" hass.add_job(async_turn_off, hass, entity_id, transition, flash) -async def async_turn_off( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - flash: str | None = None, -) -> None: +async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL, transition=None, flash=None): """Turn all or specified light off.""" data = { key: value @@ -161,21 +150,21 @@ async def async_turn_off( @bind_hass def toggle( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - brightness: int | None = None, - brightness_pct: float | None = None, - rgb_color: tuple[int, int, int] | None = None, - xy_color: tuple[float, float] | None = None, - hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, - profile: str | None = None, - flash: str | None = None, - effect: str | None = None, - color_name: str | None = None, -) -> None: + hass, + entity_id=ENTITY_MATCH_ALL, + transition=None, + brightness=None, + brightness_pct=None, + rgb_color=None, + xy_color=None, + hs_color=None, + color_temp=None, + kelvin=None, + profile=None, + flash=None, + effect=None, + color_name=None, +): """Toggle all or specified light.""" hass.add_job( async_toggle, @@ -197,21 +186,21 @@ def toggle( async def async_toggle( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - brightness: int | None = None, - brightness_pct: float | None = None, - rgb_color: tuple[int, int, int] | None = None, - xy_color: tuple[float, float] | None = None, - hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, - profile: str | None = None, - flash: str | None = None, - effect: str | None = None, - color_name: str | None = None, -) -> None: + hass, + entity_id=ENTITY_MATCH_ALL, + transition=None, + brightness=None, + brightness_pct=None, + rgb_color=None, + xy_color=None, + hs_color=None, + color_temp=None, + kelvin=None, + profile=None, + flash=None, + effect=None, + color_name=None, +): """Turn all or specified light on.""" data = { key: value diff --git a/tests/components/light/conftest.py b/tests/components/light/conftest.py index 58f2d23db95..12bd62edcb7 100644 --- a/tests/components/light/conftest.py +++ b/tests/components/light/conftest.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.light import Profiles -from homeassistant.core import HomeAssistant @pytest.fixture(autouse=True) @@ -13,7 +12,7 @@ def mock_light_profiles(): """Mock loading of profiles.""" data = {} - def mock_profiles_class(hass: HomeAssistant) -> Profiles: + def mock_profiles_class(hass): profiles = Profiles(hass) profiles.data = data profiles.async_initialize = AsyncMock() diff --git a/tests/components/light/test_device_action.py b/tests/components/light/test_device_action.py index c2ac7087cf0..8848ce19621 100644 --- a/tests/components/light/test_device_action.py +++ b/tests/components/light/test_device_action.py @@ -14,7 +14,7 @@ from homeassistant.components.light import ( LightEntityFeature, ) from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component @@ -32,6 +32,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_actions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -465,6 +471,7 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -629,6 +636,7 @@ async def test_action_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) diff --git a/tests/components/light/test_device_condition.py b/tests/components/light/test_device_condition.py index 94e12ffbfa5..11dea49ea60 100644 --- a/tests/components/light/test_device_condition.py +++ b/tests/components/light/test_device_condition.py @@ -22,6 +22,7 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, setup_test_component_platform, ) @@ -31,6 +32,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -179,7 +186,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -245,20 +252,20 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_off event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_off event - test_event2" @pytest.mark.usefixtures("enable_custom_integrations") @@ -266,7 +273,7 @@ async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -311,20 +318,20 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on event - test_event1" async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_light_entities: list[MockLight], ) -> None: """Test for firing if condition is on with delay.""" @@ -378,26 +385,26 @@ async def test_if_fires_on_for_condition( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Time travel 10 secs into the future freezer.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Time travel 20 secs into the future freezer.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_off event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/light/test_device_trigger.py b/tests/components/light/test_device_trigger.py index 4e8414edabc..ab3babd1b64 100644 --- a/tests/components/light/test_device_trigger.py +++ b/tests/components/light/test_device_trigger.py @@ -20,6 +20,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, ) DATA_TEMPLATE_ATTRIBUTES = ( @@ -36,6 +37,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -182,7 +189,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -251,20 +258,20 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { + assert len(calls) == 2 + assert {calls[0].data["some"], calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 4 - assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { + assert len(calls) == 4 + assert {calls[2].data["some"], calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -275,7 +282,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -314,14 +321,13 @@ async def test_if_fires_on_state_change_legacy( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] - == f"turn_on device - {entry.entity_id} - on - off - None" + calls[0].data["some"] == f"turn_on device - {entry.entity_id} - on - off - None" ) @@ -330,7 +336,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -370,16 +376,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/light/test_recorder.py b/tests/components/light/test_recorder.py index f3f87ff6074..49c9a567856 100644 --- a/tests/components/light/test_recorder.py +++ b/tests/components/light/test_recorder.py @@ -9,23 +9,12 @@ import pytest from homeassistant.components import light from homeassistant.components.light import ( - ATTR_BRIGHTNESS, - ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, - ATTR_COLOR_TEMP_KELVIN, - ATTR_EFFECT, ATTR_EFFECT_LIST, - ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_MIN_MIREDS, - ATTR_RGB_COLOR, - ATTR_RGBW_COLOR, - ATTR_RGBWW_COLOR, ATTR_SUPPORTED_COLOR_MODES, - ATTR_XY_COLOR, - DOMAIN, ) from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states @@ -61,7 +50,7 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) await async_wait_recording_done(hass) states = await hass.async_add_executor_job( - get_significant_states, hass, now, None, hass.states.async_entity_ids(DOMAIN) + get_significant_states, hass, now, None, hass.states.async_entity_ids() ) assert len(states) >= 1 for entity_states in states.values(): @@ -73,13 +62,3 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) assert ATTR_FRIENDLY_NAME in state.attributes assert ATTR_MAX_COLOR_TEMP_KELVIN not in state.attributes assert ATTR_MIN_COLOR_TEMP_KELVIN not in state.attributes - assert ATTR_BRIGHTNESS not in state.attributes - assert ATTR_COLOR_MODE not in state.attributes - assert ATTR_COLOR_TEMP not in state.attributes - assert ATTR_COLOR_TEMP_KELVIN not in state.attributes - assert ATTR_EFFECT not in state.attributes - assert ATTR_HS_COLOR not in state.attributes - assert ATTR_RGB_COLOR not in state.attributes - assert ATTR_RGBW_COLOR not in state.attributes - assert ATTR_RGBWW_COLOR not in state.attributes - assert ATTR_XY_COLOR not in state.attributes diff --git a/tests/components/linear_garage_door/conftest.py b/tests/components/linear_garage_door/conftest.py index 4ed7662e5d0..306da23ebf9 100644 --- a/tests/components/linear_garage_door/conftest.py +++ b/tests/components/linear_garage_door/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Linear Garage Door tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.linear_garage_door import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr index c689d04949a..2543ca42156 100644 --- a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr +++ b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr @@ -63,8 +63,6 @@ 'site_id': 'test-site-id', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'linear_garage_door', 'entry_id': 'acefdd4b3a4a0911067d1cf51414201e', 'minor_version': 1, diff --git a/tests/components/linear_garage_door/test_config_flow.py b/tests/components/linear_garage_door/test_config_flow.py index 64bdc589194..4599bd24aef 100644 --- a/tests/components/linear_garage_door/test_config_flow.py +++ b/tests/components/linear_garage_door/test_config_flow.py @@ -6,7 +6,7 @@ from linear_garage_door.errors import InvalidLoginError import pytest from homeassistant.components.linear_garage_door.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -61,7 +61,16 @@ async def test_reauth( ) -> None: """Test reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + "title_placeholders": {"name": mock_config_entry.title}, + "unique_id": mock_config_entry.unique_id, + }, + data=mock_config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/linear_garage_door/test_cover.py b/tests/components/linear_garage_door/test_cover.py index be5ae8f35f7..f4593ff4d60 100644 --- a/tests/components/linear_garage_door/test_cover.py +++ b/tests/components/linear_garage_door/test_cover.py @@ -10,10 +10,16 @@ from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, - CoverState, ) from homeassistant.components.linear_garage_door import DOMAIN -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -103,8 +109,8 @@ async def test_update_cover_state( await setup_integration(hass, mock_config_entry, [Platform.COVER]) - assert hass.states.get("cover.test_garage_1").state == CoverState.OPEN - assert hass.states.get("cover.test_garage_2").state == CoverState.CLOSED + assert hass.states.get("cover.test_garage_1").state == STATE_OPEN + assert hass.states.get("cover.test_garage_2").state == STATE_CLOSED device_states = load_json_object_fixture("get_device_state_1.json", DOMAIN) mock_linear.get_device_state.side_effect = lambda device_id: device_states[ @@ -114,5 +120,5 @@ async def test_update_cover_state( freezer.tick(timedelta(seconds=60)) async_fire_time_changed(hass) - assert hass.states.get("cover.test_garage_1").state == CoverState.CLOSING - assert hass.states.get("cover.test_garage_2").state == CoverState.OPENING + assert hass.states.get("cover.test_garage_1").state == STATE_CLOSING + assert hass.states.get("cover.test_garage_2").state == STATE_OPENING diff --git a/tests/components/linear_garage_door/test_diagnostics.py b/tests/components/linear_garage_door/test_diagnostics.py index a00feed43ff..6bf7415bde5 100644 --- a/tests/components/linear_garage_door/test_diagnostics.py +++ b/tests/components/linear_garage_door/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -26,4 +25,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/linkplay/__init__.py b/tests/components/linkplay/__init__.py deleted file mode 100644 index f825826f196..00000000000 --- a/tests/components/linkplay/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Tests for the LinkPlay integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/linkplay/conftest.py b/tests/components/linkplay/conftest.py deleted file mode 100644 index 81ae993f6c3..00000000000 --- a/tests/components/linkplay/conftest.py +++ /dev/null @@ -1,107 +0,0 @@ -"""Test configuration and mocks for LinkPlay component.""" - -from collections.abc import Generator, Iterator -from contextlib import contextmanager -from typing import Any -from unittest import mock -from unittest.mock import AsyncMock, patch - -from aiohttp import ClientSession -from linkplay.bridge import LinkPlayBridge, LinkPlayDevice -import pytest - -from homeassistant.components.linkplay.const import DOMAIN -from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_CLOSE -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry, load_fixture -from tests.conftest import AiohttpClientMocker - -HOST = "10.0.0.150" -HOST_REENTRY = "10.0.0.66" -UUID = "FF31F09E-5001-FBDE-0546-2DBFFF31F09E" -NAME = "Smart Zone 1_54B9" - - -@pytest.fixture -def mock_linkplay_factory_bridge() -> Generator[AsyncMock]: - """Mock for linkplay_factory_httpapi_bridge.""" - - with ( - patch( - "homeassistant.components.linkplay.config_flow.async_get_client_session", - return_value=AsyncMock(spec=ClientSession), - ), - patch( - "homeassistant.components.linkplay.config_flow.linkplay_factory_httpapi_bridge", - ) as conf_factory, - ): - bridge = AsyncMock(spec=LinkPlayBridge) - bridge.endpoint = HOST - bridge.device = AsyncMock(spec=LinkPlayDevice) - bridge.device.uuid = UUID - bridge.device.name = NAME - conf_factory.return_value = bridge - yield conf_factory - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.linkplay.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title=NAME, - data={CONF_HOST: HOST}, - unique_id=UUID, - ) - - -@pytest.fixture -def mock_player_ex( - mock_player_ex: AsyncMock, -) -> AsyncMock: - """Mock a update_status of the LinkPlayPlayer.""" - mock_player_ex.return_value = load_fixture("getPlayerEx.json", DOMAIN) - return mock_player_ex - - -@pytest.fixture -def mock_status_ex( - mock_status_ex: AsyncMock, -) -> AsyncMock: - """Mock a update_status of the LinkPlayDevice.""" - mock_status_ex.return_value = load_fixture("getStatusEx.json", DOMAIN) - return mock_status_ex - - -@contextmanager -def mock_lp_aiohttp_client() -> Iterator[AiohttpClientMocker]: - """Context manager to mock aiohttp client.""" - mocker = AiohttpClientMocker() - - def create_session(hass: HomeAssistant, *args: Any, **kwargs: Any) -> ClientSession: - session = mocker.create_session(hass.loop) - - async def close_session(event): - """Close session.""" - await session.close() - - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, close_session) - - return session - - with mock.patch( - "homeassistant.components.linkplay.async_get_client_session", - side_effect=create_session, - ): - yield mocker diff --git a/tests/components/linkplay/fixtures/getPlayerEx.json b/tests/components/linkplay/fixtures/getPlayerEx.json deleted file mode 100644 index 79d09f942df..00000000000 --- a/tests/components/linkplay/fixtures/getPlayerEx.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "type": "0", - "ch": "0", - "mode": "0", - "loop": "0", - "eq": "0", - "status": "stop", - "curpos": "0", - "offset_pts": "0", - "totlen": "0", - "Title": "", - "Artist": "", - "Album": "", - "alarmflag": "0", - "plicount": "0", - "plicurr": "0", - "vol": "80", - "mute": "0" -} diff --git a/tests/components/linkplay/fixtures/getStatusEx.json b/tests/components/linkplay/fixtures/getStatusEx.json deleted file mode 100644 index 17eda4aeee8..00000000000 --- a/tests/components/linkplay/fixtures/getStatusEx.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "uuid": "FF31F09E5001FBDE05462DBFFF31F09E", - "DeviceName": "Smart Zone 1_54B9", - "GroupName": "Smart Zone 1_54B9", - "ssid": "Smart Zone 1_54B9", - "language": "en_us", - "firmware": "4.6.415145", - "hardware": "A31", - "build": "release", - "project": "SMART_ZONE4_AMP", - "priv_prj": "SMART_ZONE4_AMP", - "project_build_name": "a31rakoit", - "Release": "20220427", - "temp_uuid": "97296CE38DE8CC3D", - "hideSSID": "1", - "SSIDStrategy": "2", - "branch": "A31_stable_4.6", - "group": "0", - "wmrm_version": "4.2", - "internet": "1", - "MAC": "00:22:6C:21:7F:1D", - "STA_MAC": "00:00:00:00:00:00", - "CountryCode": "CN", - "CountryRegion": "1", - "netstat": "0", - "essid": "", - "apcli0": "", - "eth2": "192.168.168.197", - "ra0": "10.10.10.254", - "eth_dhcp": "1", - "VersionUpdate": "0", - "NewVer": "0", - "set_dns_enable": "1", - "mcu_ver": "37", - "mcu_ver_new": "0", - "dsp_ver": "0", - "dsp_ver_new": "0", - "date": "2024:10:29", - "time": "17:13:22", - "tz": "1.0000", - "dst_enable": "1", - "region": "unknown", - "prompt_status": "1", - "iot_ver": "1.0.0", - "upnp_version": "1005", - "cap1": "0x305200", - "capability": "0x28e90b80", - "languages": "0x6", - "streams_all": "0x7bff7ffe", - "streams": "0x7b9831fe", - "external": "0x0", - "plm_support": "0x40152", - "preset_key": "10", - "spotify_active": "0", - "lbc_support": "0", - "privacy_mode": "0", - "WifiChannel": "11", - "RSSI": "0", - "BSSID": "", - "battery": "0", - "battery_percent": "0", - "securemode": "1", - "auth": "WPAPSKWPA2PSK", - "encry": "AES", - "upnp_uuid": "uuid:FF31F09E-5001-FBDE-0546-2DBFFF31F09E", - "uart_pass_port": "8899", - "communication_port": "8819", - "web_firmware_update_hide": "0", - "ignore_talkstart": "0", - "web_login_result": "-1", - "silenceOTATime": "", - "ignore_silenceOTATime": "1", - "new_tunein_preset_and_alarm": "1", - "iheartradio_new": "1", - "new_iheart_podcast": "1", - "tidal_version": "2.0", - "service_version": "1.0", - "ETH_MAC": "00:22:6C:21:7F:20", - "security": "https/2.0", - "security_version": "2.0" -} diff --git a/tests/components/linkplay/snapshots/test_diagnostics.ambr b/tests/components/linkplay/snapshots/test_diagnostics.ambr deleted file mode 100644 index d8c52a25649..00000000000 --- a/tests/components/linkplay/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,115 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'device_info': dict({ - 'device': dict({ - 'properties': dict({ - 'BSSID': '', - 'CountryCode': 'CN', - 'CountryRegion': '1', - 'DeviceName': 'Smart Zone 1_54B9', - 'ETH_MAC': '00:22:6C:21:7F:20', - 'GroupName': 'Smart Zone 1_54B9', - 'MAC': '00:22:6C:21:7F:1D', - 'NewVer': '0', - 'RSSI': '0', - 'Release': '20220427', - 'SSIDStrategy': '2', - 'STA_MAC': '00:00:00:00:00:00', - 'VersionUpdate': '0', - 'WifiChannel': '11', - 'apcli0': '', - 'auth': 'WPAPSKWPA2PSK', - 'battery': '0', - 'battery_percent': '0', - 'branch': 'A31_stable_4.6', - 'build': 'release', - 'cap1': '0x305200', - 'capability': '0x28e90b80', - 'communication_port': '8819', - 'date': '2024:10:29', - 'dsp_ver': '0', - 'dsp_ver_new': '0', - 'dst_enable': '1', - 'encry': 'AES', - 'essid': '', - 'eth2': '192.168.168.197', - 'eth_dhcp': '1', - 'external': '0x0', - 'firmware': '4.6.415145', - 'group': '0', - 'hardware': 'A31', - 'hideSSID': '1', - 'ignore_silenceOTATime': '1', - 'ignore_talkstart': '0', - 'iheartradio_new': '1', - 'internet': '1', - 'iot_ver': '1.0.0', - 'language': 'en_us', - 'languages': '0x6', - 'lbc_support': '0', - 'mcu_ver': '37', - 'mcu_ver_new': '0', - 'netstat': '0', - 'new_iheart_podcast': '1', - 'new_tunein_preset_and_alarm': '1', - 'plm_support': '0x40152', - 'preset_key': '10', - 'priv_prj': 'SMART_ZONE4_AMP', - 'privacy_mode': '0', - 'project': 'SMART_ZONE4_AMP', - 'project_build_name': 'a31rakoit', - 'prompt_status': '1', - 'ra0': '10.10.10.254', - 'region': 'unknown', - 'securemode': '1', - 'security': 'https/2.0', - 'security_version': '2.0', - 'service_version': '1.0', - 'set_dns_enable': '1', - 'silenceOTATime': '', - 'spotify_active': '0', - 'ssid': 'Smart Zone 1_54B9', - 'streams': '0x7b9831fe', - 'streams_all': '0x7bff7ffe', - 'temp_uuid': '97296CE38DE8CC3D', - 'tidal_version': '2.0', - 'time': '17:13:22', - 'tz': '1.0000', - 'uart_pass_port': '8899', - 'upnp_uuid': 'uuid:FF31F09E-5001-FBDE-0546-2DBFFF31F09E', - 'upnp_version': '1005', - 'uuid': 'FF31F09E5001FBDE05462DBFFF31F09E', - 'web_firmware_update_hide': '0', - 'web_login_result': '-1', - 'wmrm_version': '4.2', - }), - }), - 'endpoint': dict({ - 'endpoint': 'https://10.0.0.150', - }), - 'multiroom': None, - 'player': dict({ - 'properties': dict({ - 'Album': '', - 'Artist': '', - 'Title': '', - 'alarmflag': '0', - 'ch': '0', - 'curpos': '0', - 'eq': '0', - 'loop': '0', - 'mode': '0', - 'mute': '0', - 'offset_pts': '0', - 'plicount': '0', - 'plicurr': '0', - 'status': 'stop', - 'totlen': '0', - 'type': '0', - 'vol': '80', - }), - }), - }), - }) -# --- diff --git a/tests/components/linkplay/test_config_flow.py b/tests/components/linkplay/test_config_flow.py deleted file mode 100644 index 3fd1fbea95e..00000000000 --- a/tests/components/linkplay/test_config_flow.py +++ /dev/null @@ -1,222 +0,0 @@ -"""Tests for the LinkPlay config flow.""" - -from ipaddress import ip_address -from unittest.mock import AsyncMock - -from linkplay.exceptions import LinkPlayRequestException -import pytest - -from homeassistant.components.linkplay.const import DOMAIN -from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .conftest import HOST, HOST_REENTRY, NAME, UUID - -from tests.common import MockConfigEntry - -ZEROCONF_DISCOVERY = ZeroconfServiceInfo( - ip_address=ip_address(HOST), - ip_addresses=[ip_address(HOST)], - hostname=f"{NAME}.local.", - name=f"{NAME}._linkplay._tcp.local.", - port=59152, - type="_linkplay._tcp.local.", - properties={ - "uuid": f"uuid:{UUID}", - "mac": "00:2F:69:01:84:3A", - "security": "https 2.0", - "upnp": "1.0.0", - "bootid": "1f347886-1dd2-11b2-86ab-aa0cd2803583", - }, -) - -ZEROCONF_DISCOVERY_RE_ENTRY = ZeroconfServiceInfo( - ip_address=ip_address(HOST_REENTRY), - ip_addresses=[ip_address(HOST_REENTRY)], - hostname=f"{NAME}.local.", - name=f"{NAME}._linkplay._tcp.local.", - port=59152, - type="_linkplay._tcp.local.", - properties={ - "uuid": f"uuid:{UUID}", - "mac": "00:2F:69:01:84:3A", - "security": "https 2.0", - "upnp": "1.0.0", - "bootid": "1f347886-1dd2-11b2-86ab-aa0cd2803583", - }, -) - - -@pytest.mark.usefixtures("mock_linkplay_factory_bridge", "mock_setup_entry") -async def test_user_flow( - hass: HomeAssistant, -) -> None: - """Test user setup config flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: HOST}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == NAME - assert result["data"] == { - CONF_HOST: HOST, - } - assert result["result"].unique_id == UUID - - -@pytest.mark.usefixtures("mock_linkplay_factory_bridge") -async def test_user_flow_re_entry( - hass: HomeAssistant, -) -> None: - """Test user setup config flow when an entry with the same unique id already exists.""" - - # Create mock entry which already has the same UUID - entry = MockConfigEntry( - data={CONF_HOST: HOST}, - domain=DOMAIN, - title=NAME, - unique_id=UUID, - ) - entry.add_to_hass(hass) - - # Re-create entry with different host - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: HOST_REENTRY}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.usefixtures("mock_linkplay_factory_bridge", "mock_setup_entry") -async def test_zeroconf_flow( - hass: HomeAssistant, -) -> None: - """Test Zeroconf flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DISCOVERY, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "discovery_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == NAME - assert result["data"] == { - CONF_HOST: HOST, - } - assert result["result"].unique_id == UUID - - -@pytest.mark.usefixtures("mock_linkplay_factory_bridge") -async def test_zeroconf_flow_re_entry( - hass: HomeAssistant, -) -> None: - """Test Zeroconf flow when an entry with the same unique id already exists.""" - - # Create mock entry which already has the same UUID - entry = MockConfigEntry( - data={CONF_HOST: HOST}, - domain=DOMAIN, - title=NAME, - unique_id=UUID, - ) - entry.add_to_hass(hass) - - # Re-create entry with different host - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DISCOVERY_RE_ENTRY, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.usefixtures("mock_setup_entry") -async def test_zeroconf_flow_errors( - hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, -) -> None: - """Test flow when the device discovered through Zeroconf cannot be reached.""" - - # Temporarily make the mock_linkplay_factory_bridge throw an exception - mock_linkplay_factory_bridge.side_effect = (LinkPlayRequestException("Error"),) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DISCOVERY, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" - - -@pytest.mark.usefixtures("mock_setup_entry") -async def test_user_flow_errors( - hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, -) -> None: - """Test flow when the device cannot be reached.""" - - # Temporarily make the mock_linkplay_factory_bridge throw an exception - mock_linkplay_factory_bridge.side_effect = (LinkPlayRequestException("Error"),) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: HOST}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "cannot_connect"} - - # Make mock_linkplay_factory_bridge_exception no longer throw an exception - mock_linkplay_factory_bridge.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: HOST}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == NAME - assert result["data"] == { - CONF_HOST: HOST, - } - assert result["result"].unique_id == UUID diff --git a/tests/components/linkplay/test_diagnostics.py b/tests/components/linkplay/test_diagnostics.py deleted file mode 100644 index 369142978a3..00000000000 --- a/tests/components/linkplay/test_diagnostics.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Tests for the LinkPlay diagnostics.""" - -from unittest.mock import patch - -from linkplay.bridge import LinkPlayMultiroom -from linkplay.consts import API_ENDPOINT -from linkplay.endpoint import LinkPlayApiEndpoint -from syrupy import SnapshotAssertion - -from homeassistant.components.linkplay.const import DOMAIN -from homeassistant.core import HomeAssistant - -from . import setup_integration -from .conftest import HOST, mock_lp_aiohttp_client - -from tests.common import MockConfigEntry, load_fixture -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test diagnostics.""" - - with ( - mock_lp_aiohttp_client() as mock_session, - patch.object(LinkPlayMultiroom, "update_status", return_value=None), - ): - endpoints = [ - LinkPlayApiEndpoint(protocol="https", endpoint=HOST, session=None), - LinkPlayApiEndpoint(protocol="http", endpoint=HOST, session=None), - ] - for endpoint in endpoints: - mock_session.get( - API_ENDPOINT.format(str(endpoint), "getPlayerStatusEx"), - text=load_fixture("getPlayerEx.json", DOMAIN), - ) - - mock_session.get( - API_ENDPOINT.format(str(endpoint), "getStatusEx"), - text=load_fixture("getStatusEx.json", DOMAIN), - ) - - await setup_integration(hass, mock_config_entry) - - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) - == snapshot - ) diff --git a/tests/components/litejet/test_trigger.py b/tests/components/litejet/test_trigger.py index c13fda9068c..216084c26bc 100644 --- a/tests/components/litejet/test_trigger.py +++ b/tests/components/litejet/test_trigger.py @@ -2,9 +2,8 @@ from datetime import timedelta import logging -from typing import Any from unittest import mock -from unittest.mock import MagicMock, patch +from unittest.mock import patch import pytest @@ -15,7 +14,7 @@ import homeassistant.util.dt as dt_util from . import async_init_integration -from tests.common import async_fire_time_changed_exact +from tests.common import async_fire_time_changed_exact, async_mock_service @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -31,9 +30,13 @@ ENTITY_OTHER_SWITCH = "switch.mock_switch_2" ENTITY_OTHER_SWITCH_NUMBER = 2 -async def simulate_press( - hass: HomeAssistant, mock_litejet: MagicMock, number: int -) -> None: +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + +async def simulate_press(hass, mock_litejet, number): """Test to simulate a press.""" _LOGGER.info("*** simulate press of %d", number) callback = mock_litejet.switch_pressed_callbacks.get(number) @@ -46,9 +49,7 @@ async def simulate_press( await hass.async_block_till_done() -async def simulate_release( - hass: HomeAssistant, mock_litejet: MagicMock, number: int -) -> None: +async def simulate_release(hass, mock_litejet, number): """Test to simulate releasing.""" _LOGGER.info("*** simulate release of %d", number) callback = mock_litejet.switch_released_callbacks.get(number) @@ -61,9 +62,7 @@ async def simulate_release( await hass.async_block_till_done() -async def simulate_time( - hass: HomeAssistant, mock_litejet: MagicMock, delta: timedelta -) -> None: +async def simulate_time(hass, mock_litejet, delta): """Test to simulate time.""" _LOGGER.info( "*** simulate time change by %s: %s", delta, mock_litejet.start_time + delta @@ -79,7 +78,7 @@ async def simulate_time( _LOGGER.info("*** done with now=%s", dt_util.utcnow()) -async def setup_automation(hass: HomeAssistant, trigger: dict[str, Any]) -> None: +async def setup_automation(hass, trigger): """Test setting up the automation.""" await async_init_integration(hass, use_switch=True) assert await setup.async_setup_component( @@ -102,7 +101,7 @@ async def setup_automation(hass: HomeAssistant, trigger: dict[str, Any]) -> None async def test_simple( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock + hass: HomeAssistant, calls: list[ServiceCall], mock_litejet ) -> None: """Test the simplest form of a LiteJet trigger.""" await setup_automation( @@ -112,12 +111,12 @@ async def test_simple( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 1 - assert service_calls[0].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["id"] == 0 async def test_only_release( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock + hass: HomeAssistant, calls: list[ServiceCall], mock_litejet ) -> None: """Test the simplest form of a LiteJet trigger.""" await setup_automation( @@ -126,11 +125,11 @@ async def test_only_release( await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_held_more_than_short( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock + hass: HomeAssistant, calls: list[ServiceCall], mock_litejet ) -> None: """Test a too short hold.""" await setup_automation( @@ -145,11 +144,11 @@ async def test_held_more_than_short( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_litejet, timedelta(seconds=1)) await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_held_more_than_long( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock + hass: HomeAssistant, calls: list[ServiceCall], mock_litejet ) -> None: """Test a hold that is long enough.""" await setup_automation( @@ -162,16 +161,16 @@ async def test_held_more_than_long( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 0 + assert len(calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=3)) - assert len(service_calls) == 1 - assert service_calls[0].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["id"] == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_held_less_than_short( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock + hass: HomeAssistant, calls: list[ServiceCall], mock_litejet ) -> None: """Test a hold that is short enough.""" await setup_automation( @@ -185,14 +184,14 @@ async def test_held_less_than_short( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_litejet, timedelta(seconds=1)) - assert len(service_calls) == 0 + assert len(calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 1 - assert service_calls[0].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["id"] == 0 async def test_held_less_than_long( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock + hass: HomeAssistant, calls: list[ServiceCall], mock_litejet ) -> None: """Test a hold that is too long.""" await setup_automation( @@ -205,15 +204,15 @@ async def test_held_less_than_long( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 0 + assert len(calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=3)) - assert len(service_calls) == 0 + assert len(calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_held_in_range_short( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock + hass: HomeAssistant, calls: list[ServiceCall], mock_litejet ) -> None: """Test an in-range trigger with a too short hold.""" await setup_automation( @@ -229,11 +228,11 @@ async def test_held_in_range_short( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_litejet, timedelta(seconds=0.5)) await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_held_in_range_just_right( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock + hass: HomeAssistant, calls: list[ServiceCall], mock_litejet ) -> None: """Test an in-range trigger with a just right hold.""" await setup_automation( @@ -247,16 +246,16 @@ async def test_held_in_range_just_right( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 0 + assert len(calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=2)) - assert len(service_calls) == 0 + assert len(calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 1 - assert service_calls[0].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["id"] == 0 async def test_held_in_range_long( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock + hass: HomeAssistant, calls: list[ServiceCall], mock_litejet ) -> None: """Test an in-range trigger with a too long hold.""" await setup_automation( @@ -270,15 +269,15 @@ async def test_held_in_range_long( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 0 + assert len(calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=4)) - assert len(service_calls) == 0 + assert len(calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_reload( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock + hass: HomeAssistant, calls: list[ServiceCall], mock_litejet ) -> None: """Test reloading automation.""" await setup_automation( @@ -313,8 +312,8 @@ async def test_reload( await hass.async_block_till_done() await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(service_calls) == 1 + assert len(calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=5)) - assert len(service_calls) == 1 + assert len(calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=12.5)) - assert len(service_calls) == 2 + assert len(calls) == 1 diff --git a/tests/components/litterrobot/test_config_flow.py b/tests/components/litterrobot/test_config_flow.py index 9420d3cb8a8..5ffb78c7782 100644 --- a/tests/components/litterrobot/test_config_flow.py +++ b/tests/components/litterrobot/test_config_flow.py @@ -7,7 +7,7 @@ from pylitterbot.exceptions import LitterRobotException, LitterRobotLoginExcepti from homeassistant import config_entries from homeassistant.components import litterrobot -from homeassistant.const import CONF_PASSWORD +from homeassistant.const import CONF_PASSWORD, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -124,7 +124,15 @@ async def test_step_reauth(hass: HomeAssistant, mock_account: Account) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -156,7 +164,15 @@ async def test_step_reauth_failed(hass: HomeAssistant, mock_account: Account) -> ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/local_calendar/conftest.py b/tests/components/local_calendar/conftest.py index 8aef73a9d5a..6d2c38544a5 100644 --- a/tests/components/local_calendar/conftest.py +++ b/tests/components/local_calendar/conftest.py @@ -1,6 +1,6 @@ """Fixtures for local calendar.""" -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable from http import HTTPStatus from pathlib import Path from typing import Any @@ -9,6 +9,7 @@ import urllib from aiohttp import ClientWebSocketResponse import pytest +from typing_extensions import Generator from homeassistant.components.local_calendar import LocalCalendarStore from homeassistant.components.local_calendar.const import CONF_CALENDAR_NAME, DOMAIN diff --git a/tests/components/local_calendar/test_config_flow.py b/tests/components/local_calendar/test_config_flow.py index cf37176a10f..c76fd9e283d 100644 --- a/tests/components/local_calendar/test_config_flow.py +++ b/tests/components/local_calendar/test_config_flow.py @@ -1,20 +1,10 @@ """Test the Local Calendar config flow.""" -from collections.abc import Generator, Iterator -from contextlib import contextmanager -from pathlib import Path -from unittest.mock import MagicMock, patch -from uuid import uuid4 - -import pytest +from unittest.mock import patch from homeassistant import config_entries from homeassistant.components.local_calendar.const import ( - ATTR_CREATE_EMPTY, - ATTR_IMPORT_ICS_FILE, CONF_CALENDAR_NAME, - CONF_ICS_FILE, - CONF_IMPORT, CONF_STORAGE_KEY, DOMAIN, ) @@ -24,46 +14,6 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -@pytest.fixture -def mock_ics_content(): - """Mock ics file content.""" - return b"""BEGIN:VCALENDAR - VERSION:2.0 - PRODID:-//hacksw/handcal//NONSGML v1.0//EN - END:VCALENDAR - """ - - -@pytest.fixture -def mock_process_uploaded_file( - tmp_path: Path, mock_ics_content: str -) -> Generator[MagicMock]: - """Mock upload ics file.""" - file_id_ics = str(uuid4()) - - @contextmanager - def _mock_process_uploaded_file( - hass: HomeAssistant, uploaded_file_id: str - ) -> Iterator[Path | None]: - with open(tmp_path / uploaded_file_id, "wb") as icsfile: - icsfile.write(mock_ics_content) - yield tmp_path / uploaded_file_id - - with ( - patch( - "homeassistant.components.local_calendar.config_flow.process_uploaded_file", - side_effect=_mock_process_uploaded_file, - ) as mock_upload, - patch( - "shutil.move", - ), - ): - mock_upload.file_id = { - CONF_ICS_FILE: file_id_ics, - } - yield mock_upload - - async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -88,44 +38,11 @@ async def test_form(hass: HomeAssistant) -> None: assert result2["title"] == "My Calendar" assert result2["data"] == { CONF_CALENDAR_NAME: "My Calendar", - CONF_IMPORT: ATTR_CREATE_EMPTY, CONF_STORAGE_KEY: "my_calendar", } assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_import_ics( - hass: HomeAssistant, - mock_process_uploaded_file: MagicMock, -) -> None: - """Test we get the import form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] is None - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_CALENDAR_NAME: "My Calendar", CONF_IMPORT: ATTR_IMPORT_ICS_FILE}, - ) - assert result2["type"] is FlowResultType.FORM - - with patch( - "homeassistant.components.local_calendar.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - file_id = mock_process_uploaded_file.file_id - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_ICS_FILE: file_id[CONF_ICS_FILE]}, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert len(mock_setup_entry.mock_calls) == 1 - - async def test_duplicate_name( hass: HomeAssistant, setup_integration: None, config_entry: MockConfigEntry ) -> None: @@ -148,30 +65,3 @@ async def test_duplicate_name( assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" - - -@pytest.mark.parametrize("mock_ics_content", [b"invalid-ics-content"]) -async def test_invalid_ics( - hass: HomeAssistant, - mock_process_uploaded_file: MagicMock, -) -> None: - """Test invalid ics content raises error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] is None - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_CALENDAR_NAME: "My Calendar", CONF_IMPORT: ATTR_IMPORT_ICS_FILE}, - ) - assert result2["type"] is FlowResultType.FORM - - file_id = mock_process_uploaded_file.file_id - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_ICS_FILE: file_id[CONF_ICS_FILE]}, - ) - assert result3["type"] is FlowResultType.FORM - assert result3["errors"] == {CONF_ICS_FILE: "invalid_ics_file"} diff --git a/tests/components/local_calendar/test_diagnostics.py b/tests/components/local_calendar/test_diagnostics.py index 30c857dad98..ed12391f8a9 100644 --- a/tests/components/local_calendar/test_diagnostics.py +++ b/tests/components/local_calendar/test_diagnostics.py @@ -7,6 +7,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.auth.models import Credentials from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY, Client @@ -40,6 +41,12 @@ def _get_test_client_generator( return auth_client +@pytest.fixture(autouse=True) +async def setup_diag(hass): + """Set up diagnostics platform.""" + assert await async_setup_component(hass, "diagnostics", {}) + + @freeze_time("2023-03-13 12:05:00-07:00") @pytest.mark.usefixtures("socket_enabled") async def test_empty_calendar( diff --git a/tests/components/local_file/conftest.py b/tests/components/local_file/conftest.py deleted file mode 100644 index 4ec06369c94..00000000000 --- a/tests/components/local_file/conftest.py +++ /dev/null @@ -1,63 +0,0 @@ -"""Fixtures for the Local file integration.""" - -from __future__ import annotations - -from collections.abc import Generator -from typing import Any -from unittest.mock import AsyncMock, Mock, patch - -import pytest - -from homeassistant.components.local_file.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_FILE_PATH, CONF_NAME -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Automatically patch setup.""" - with patch( - "homeassistant.components.local_file.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture(name="get_config") -async def get_config_to_integration_load() -> dict[str, Any]: - """Return configuration. - - To override the config, tests can be marked with: - @pytest.mark.parametrize("get_config", [{...}]) - """ - return {CONF_NAME: DEFAULT_NAME, CONF_FILE_PATH: "mock.file"} - - -@pytest.fixture(name="loaded_entry") -async def load_integration( - hass: HomeAssistant, get_config: dict[str, Any] -) -> MockConfigEntry: - """Set up the Local file integration in Home Assistant.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - options=get_config, - entry_id="1", - ) - - config_entry.add_to_hass(hass) - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/local_file/test_camera.py b/tests/components/local_file/test_camera.py index ddfdf4249bd..4455d47469c 100644 --- a/tests/components/local_file/test_camera.py +++ b/tests/components/local_file/test_camera.py @@ -1,287 +1,180 @@ """The tests for local file camera component.""" from http import HTTPStatus -from typing import Any -from unittest.mock import Mock, mock_open, patch +from unittest import mock import pytest -from homeassistant.components.local_file.const import ( - DEFAULT_NAME, - DOMAIN, - SERVICE_UPDATE_FILE_PATH, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import ATTR_ENTITY_ID, CONF_FILE_PATH -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import issue_registry as ir +from homeassistant.components.local_file.const import DOMAIN, SERVICE_UPDATE_FILE_PATH +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from homeassistant.util import slugify -from tests.common import MockConfigEntry from tests.typing import ClientSessionGenerator async def test_loading_file( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - loaded_entry: MockConfigEntry, + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test that it loads image from disk.""" + with ( + mock.patch("os.path.isfile", mock.Mock(return_value=True)), + mock.patch("os.access", mock.Mock(return_value=True)), + mock.patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + mock.Mock(return_value=(None, None)), + ), + ): + await async_setup_component( + hass, + "camera", + { + "camera": { + "name": "config_test", + "platform": "local_file", + "file_path": "mock.file", + } + }, + ) + await hass.async_block_till_done() client = await hass_client() - m_open = mock_open(read_data=b"hello") - with patch("homeassistant.components.local_file.camera.open", m_open, create=True): - resp = await client.get("/api/camera_proxy/camera.local_file") + m_open = mock.mock_open(read_data=b"hello") + with mock.patch( + "homeassistant.components.local_file.camera.open", m_open, create=True + ): + resp = await client.get("/api/camera_proxy/camera.config_test") assert resp.status == HTTPStatus.OK body = await resp.text() assert body == "hello" -async def test_file_not_readable_after_setup( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - caplog: pytest.LogCaptureFixture, - loaded_entry: MockConfigEntry, +async def test_file_not_readable( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test a warning is shown setup when file is not readable.""" - - client = await hass_client() - - with patch( - "homeassistant.components.local_file.camera.open", side_effect=FileNotFoundError + with ( + mock.patch("os.path.isfile", mock.Mock(return_value=True)), + mock.patch("os.access", mock.Mock(return_value=False)), ): - resp = await client.get("/api/camera_proxy/camera.local_file") + await async_setup_component( + hass, + "camera", + { + "camera": { + "name": "config_test", + "platform": "local_file", + "file_path": "mock.file", + } + }, + ) + await hass.async_block_till_done() - assert resp.status == HTTPStatus.INTERNAL_SERVER_ERROR - assert "Could not read camera Local File image from file: mock.file" in caplog.text + assert "Could not read" in caplog.text + assert "config_test" in caplog.text + assert "mock.file" in caplog.text -@pytest.mark.parametrize( - ("config", "url", "content_type"), - [ - ( - { - "name": "test_jpg", - "file_path": "/path/to/image.jpg", - }, - "/api/camera_proxy/camera.test_jpg", - "image/jpeg", - ), - ( - { - "name": "test_png", - "file_path": "/path/to/image.png", - }, - "/api/camera_proxy/camera.test_png", - "image/png", - ), - ( - { - "name": "test_svg", - "file_path": "/path/to/image.svg", - }, - "/api/camera_proxy/camera.test_svg", - "image/svg+xml", - ), - ( - { - "name": "test_no_ext", - "file_path": "/path/to/image", - }, - "/api/camera_proxy/camera.test_no_ext", - "image/jpeg", - ), - ], -) async def test_camera_content_type( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - config: dict[str, Any], - url: str, - content_type: str, + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test local_file camera content_type.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - options=config, - entry_id="1", - ) + cam_config_jpg = { + "name": "test_jpg", + "platform": "local_file", + "file_path": "/path/to/image.jpg", + } + cam_config_png = { + "name": "test_png", + "platform": "local_file", + "file_path": "/path/to/image.png", + } + cam_config_svg = { + "name": "test_svg", + "platform": "local_file", + "file_path": "/path/to/image.svg", + } + cam_config_noext = { + "name": "test_no_ext", + "platform": "local_file", + "file_path": "/path/to/image", + } - config_entry.add_to_hass(hass) - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await async_setup_component( + hass, + "camera", + {"camera": [cam_config_jpg, cam_config_png, cam_config_svg, cam_config_noext]}, + ) + await hass.async_block_till_done() client = await hass_client() image = "hello" - m_open = mock_open(read_data=image.encode()) - with patch("homeassistant.components.local_file.camera.open", m_open, create=True): - resp_1 = await client.get(url) + m_open = mock.mock_open(read_data=image.encode()) + with mock.patch( + "homeassistant.components.local_file.camera.open", m_open, create=True + ): + resp_1 = await client.get("/api/camera_proxy/camera.test_jpg") + resp_2 = await client.get("/api/camera_proxy/camera.test_png") + resp_3 = await client.get("/api/camera_proxy/camera.test_svg") + resp_4 = await client.get("/api/camera_proxy/camera.test_no_ext") assert resp_1.status == HTTPStatus.OK - assert resp_1.content_type == content_type + assert resp_1.content_type == "image/jpeg" body = await resp_1.text() assert body == image + assert resp_2.status == HTTPStatus.OK + assert resp_2.content_type == "image/png" + body = await resp_2.text() + assert body == image -@pytest.mark.parametrize( - "get_config", - [ - { - "name": DEFAULT_NAME, - "file_path": "mock/path.jpg", - } - ], -) -async def test_update_file_path( - hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: + assert resp_3.status == HTTPStatus.OK + assert resp_3.content_type == "image/svg+xml" + body = await resp_3.text() + assert body == image + + # default mime type + assert resp_4.status == HTTPStatus.OK + assert resp_4.content_type == "image/jpeg" + body = await resp_4.text() + assert body == image + + +async def test_update_file_path(hass: HomeAssistant) -> None: """Test update_file_path service.""" # Setup platform - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - options={ + with ( + mock.patch("os.path.isfile", mock.Mock(return_value=True)), + mock.patch("os.access", mock.Mock(return_value=True)), + mock.patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + mock.Mock(return_value=(None, None)), + ), + ): + camera_1 = {"platform": "local_file", "file_path": "mock/path.jpg"} + camera_2 = { + "platform": "local_file", "name": "local_file_camera_2", "file_path": "mock/path_2.jpg", - }, - entry_id="2", - ) - - config_entry.add_to_hass(hass) - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) + } + await async_setup_component(hass, "camera", {"camera": [camera_1, camera_2]}) await hass.async_block_till_done() - # Fetch state and check motion detection attribute - state = hass.states.get("camera.local_file") - assert state.attributes.get("friendly_name") == "Local File" - assert state.attributes.get("file_path") == "mock/path.jpg" + # Fetch state and check motion detection attribute + state = hass.states.get("camera.local_file") + assert state.attributes.get("friendly_name") == "Local File" + assert state.attributes.get("file_path") == "mock/path.jpg" - service_data = {"entity_id": "camera.local_file", "file_path": "new/path.jpg"} + service_data = {"entity_id": "camera.local_file", "file_path": "new/path.jpg"} - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - await hass.services.async_call( - DOMAIN, - SERVICE_UPDATE_FILE_PATH, - service_data, - blocking=True, - ) - - state = hass.states.get("camera.local_file") - assert state.attributes.get("file_path") == "new/path.jpg" - - # Check that local_file_camera_2 file_path is still as configured - state = hass.states.get("camera.local_file_camera_2") - assert state.attributes.get("file_path") == "mock/path_2.jpg" - - # Assert it fails if file is not readable - service_data = { - ATTR_ENTITY_ID: "camera.local_file", - CONF_FILE_PATH: "new/path2.jpg", - } - with pytest.raises( - ServiceValidationError, match="Path new/path2.jpg is not accessible" - ): - await hass.services.async_call( - DOMAIN, - SERVICE_UPDATE_FILE_PATH, - service_data, - blocking=True, - ) - - -async def test_import_from_yaml_success( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test import.""" - - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - await async_setup_component( - hass, - "camera", - { - "camera": { - "name": "config_test", - "platform": "local_file", - "file_path": "mock.file", - } - }, - ) + await hass.services.async_call(DOMAIN, SERVICE_UPDATE_FILE_PATH, service_data) await hass.async_block_till_done() - assert hass.config_entries.async_has_entries(DOMAIN) - state = hass.states.get("camera.config_test") - assert state.attributes.get("file_path") == "mock.file" + state = hass.states.get("camera.local_file") + assert state.attributes.get("file_path") == "new/path.jpg" - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" - ) - assert issue - assert issue.translation_key == "deprecated_yaml" - - -async def test_import_from_yaml_fails( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test import fails due to not accessible file.""" - - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=False)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - await async_setup_component( - hass, - "camera", - { - "camera": { - "name": "config_test", - "platform": "local_file", - "file_path": "mock.file", - } - }, - ) - await hass.async_block_till_done() - - assert not hass.config_entries.async_has_entries(DOMAIN) - assert not hass.states.get("camera.config_test") - - issue = issue_registry.async_get_issue( - DOMAIN, f"no_access_path_{slugify("mock.file")}" - ) - assert issue - assert issue.translation_key == "no_access_path" + # Check that local_file_camera_2 file_path is still as configured + state = hass.states.get("camera.local_file_camera_2") + assert state.attributes.get("file_path") == "mock/path_2.jpg" diff --git a/tests/components/local_file/test_config_flow.py b/tests/components/local_file/test_config_flow.py deleted file mode 100644 index dda9d606107..00000000000 --- a/tests/components/local_file/test_config_flow.py +++ /dev/null @@ -1,235 +0,0 @@ -"""Test the Scrape config flow.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock, Mock, patch - -import pytest - -from homeassistant import config_entries -from homeassistant.components.local_file.const import DEFAULT_NAME, DOMAIN -from homeassistant.const import CONF_FILE_PATH, CONF_NAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_form_sensor(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: - """Test we get the form for sensor.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_FILE_PATH: "mock.file", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["version"] == 1 - assert result["options"] == { - CONF_NAME: DEFAULT_NAME, - CONF_FILE_PATH: "mock.file", - } - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: - """Test options flow.""" - - result = await hass.config_entries.options.async_init(loaded_entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={CONF_FILE_PATH: "mock.new.file"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {CONF_NAME: DEFAULT_NAME, CONF_FILE_PATH: "mock.new.file"} - - await hass.async_block_till_done() - - # Check the entity was updated, no new entity was created - assert len(hass.states.async_all()) == 1 - - state = hass.states.get("camera.local_file") - assert state is not None - - -async def test_validation_options( - hass: HomeAssistant, mock_setup_entry: AsyncMock -) -> None: - """Test validation.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=False)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_FILE_PATH: "mock.file", - }, - ) - await hass.async_block_till_done() - - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "not_readable_path"} - - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_FILE_PATH: "mock.new.file", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["version"] == 1 - assert result["options"] == { - CONF_NAME: DEFAULT_NAME, - CONF_FILE_PATH: "mock.new.file", - } - - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.usefixtures("mock_setup_entry") -async def test_entry_already_exist( - hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: - """Test abort when entry already exist.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_FILE_PATH: "mock.file", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.usefixtures("mock_setup_entry") -async def test_import(hass: HomeAssistant) -> None: - """Test import.""" - - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - "name": DEFAULT_NAME, - "file_path": "mock/path.jpg", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["version"] == 1 - assert result["options"] == { - CONF_NAME: DEFAULT_NAME, - CONF_FILE_PATH: "mock/path.jpg", - } - - -@pytest.mark.usefixtures("mock_setup_entry") -async def test_import_already_exist( - hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: - """Test import abort existing entry.""" - - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=True)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_NAME: DEFAULT_NAME, - CONF_FILE_PATH: "mock.file", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/local_file/test_init.py b/tests/components/local_file/test_init.py deleted file mode 100644 index 2b8b93e8100..00000000000 --- a/tests/components/local_file/test_init.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Test Statistics component setup process.""" - -from __future__ import annotations - -from unittest.mock import Mock, patch - -from homeassistant.components.local_file.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER, ConfigEntryState -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: - """Test unload an entry.""" - - assert loaded_entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(loaded_entry.entry_id) - await hass.async_block_till_done() - assert loaded_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_file_not_readable_during_startup( - hass: HomeAssistant, - get_config: dict[str, str], -) -> None: - """Test a warning is shown setup when file is not readable.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - options=get_config, - entry_id="1", - ) - config_entry.add_to_hass(hass) - - with ( - patch("os.path.isfile", Mock(return_value=True)), - patch("os.access", Mock(return_value=False)), - patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - Mock(return_value=(None, None)), - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.SETUP_ERROR diff --git a/tests/components/local_ip/test_init.py b/tests/components/local_ip/test_init.py index 7f411ea9cd7..51e0628a417 100644 --- a/tests/components/local_ip/test_init.py +++ b/tests/components/local_ip/test_init.py @@ -2,7 +2,7 @@ from __future__ import annotations -from homeassistant.components.local_ip.const import DOMAIN +from homeassistant.components.local_ip import DOMAIN from homeassistant.components.network import MDNS_TARGET_IP, async_get_source_ip from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant diff --git a/tests/components/local_todo/conftest.py b/tests/components/local_todo/conftest.py index ab73dabb474..67ef76172b7 100644 --- a/tests/components/local_todo/conftest.py +++ b/tests/components/local_todo/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the local_todo tests.""" -from collections.abc import Generator from pathlib import Path from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant.components.local_todo import LocalTodoListStore from homeassistant.components.local_todo.const import ( diff --git a/tests/components/local_todo/test_todo.py b/tests/components/local_todo/test_todo.py index 253adebd757..e54ee925437 100644 --- a/tests/components/local_todo/test_todo.py +++ b/tests/components/local_todo/test_todo.py @@ -7,17 +7,7 @@ from typing import Any import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import ( - ATTR_DESCRIPTION, - ATTR_DUE_DATE, - ATTR_DUE_DATETIME, - ATTR_ITEM, - ATTR_RENAME, - ATTR_STATUS, - DOMAIN as TODO_DOMAIN, - TodoServices, -) -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.components.todo import DOMAIN as TODO_DOMAIN from homeassistant.core import HomeAssistant from .conftest import TEST_ENTITY @@ -86,17 +76,17 @@ EXPECTED_ADD_ITEM = { ("item_data", "expected_item_data"), [ ({}, EXPECTED_ADD_ITEM), - ({ATTR_DUE_DATE: "2023-11-17"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17"}), + ({"due_date": "2023-11-17"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17"}), ( - {ATTR_DUE_DATETIME: "2023-11-17T11:30:00+00:00"}, + {"due_datetime": "2023-11-17T11:30:00+00:00"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17T05:30:00-06:00"}, ), ( - {ATTR_DESCRIPTION: "Additional detail"}, + {"description": "Additional detail"}, {**EXPECTED_ADD_ITEM, "description": "Additional detail"}, ), - ({ATTR_DESCRIPTION: ""}, {**EXPECTED_ADD_ITEM, "description": ""}), - ({ATTR_DESCRIPTION: None}, EXPECTED_ADD_ITEM), + ({"description": ""}, {**EXPECTED_ADD_ITEM, "description": ""}), + ({"description": None}, EXPECTED_ADD_ITEM), ], ) async def test_add_item( @@ -115,9 +105,9 @@ async def test_add_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "replace batteries", **item_data}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "replace batteries", **item_data}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -137,12 +127,12 @@ async def test_add_item( ("item_data", "expected_item_data"), [ ({}, {}), - ({ATTR_DUE_DATE: "2023-11-17"}, {"due": "2023-11-17"}), + ({"due_date": "2023-11-17"}, {"due": "2023-11-17"}), ( {"due_datetime": "2023-11-17T11:30:00+00:00"}, {"due": "2023-11-17T05:30:00-06:00"}, ), - ({ATTR_DESCRIPTION: "Additional detail"}, {"description": "Additional detail"}), + ({"description": "Additional detail"}, {"description": "Additional detail"}), ], ) async def test_remove_item( @@ -155,9 +145,9 @@ async def test_remove_item( """Test removing a todo item.""" await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "replace batteries", **item_data}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "replace batteries", **item_data}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -175,9 +165,9 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: [items[0]["uid"]]}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "remove_item", + {"item": [items[0]["uid"]]}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -198,9 +188,9 @@ async def test_bulk_remove( for i in range(5): await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: f"soda #{i}"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": f"soda #{i}"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -214,9 +204,9 @@ async def test_bulk_remove( await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: uids}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "remove_item", + {"item": uids}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -237,23 +227,19 @@ EXPECTED_UPDATE_ITEM = { @pytest.mark.parametrize( ("item_data", "expected_item_data", "expected_state"), [ + ({"status": "completed"}, {**EXPECTED_UPDATE_ITEM, "status": "completed"}, "0"), ( - {ATTR_STATUS: "completed"}, - {**EXPECTED_UPDATE_ITEM, "status": "completed"}, - "0", - ), - ( - {ATTR_DUE_DATE: "2023-11-17"}, + {"due_date": "2023-11-17"}, {**EXPECTED_UPDATE_ITEM, "due": "2023-11-17"}, "1", ), ( - {ATTR_DUE_DATETIME: "2023-11-17T11:30:00+00:00"}, + {"due_datetime": "2023-11-17T11:30:00+00:00"}, {**EXPECTED_UPDATE_ITEM, "due": "2023-11-17T05:30:00-06:00"}, "1", ), ( - {ATTR_DESCRIPTION: "Additional detail"}, + {"description": "Additional detail"}, {**EXPECTED_UPDATE_ITEM, "description": "Additional detail"}, "1", ), @@ -272,9 +258,9 @@ async def test_update_item( # Create new item await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "soda"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "soda"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -293,9 +279,9 @@ async def test_update_item( # Update item await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: item["uid"], **item_data}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "update_item", + {"item": item["uid"], **item_data}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -317,7 +303,7 @@ async def test_update_item( ("item_data", "expected_item_data"), [ ( - {ATTR_STATUS: "completed"}, + {"status": "completed"}, { "summary": "soda", "status": "completed", @@ -326,7 +312,7 @@ async def test_update_item( }, ), ( - {ATTR_DUE_DATE: "2024-01-02"}, + {"due_date": "2024-01-02"}, { "summary": "soda", "status": "needs_action", @@ -335,7 +321,7 @@ async def test_update_item( }, ), ( - {ATTR_DUE_DATE: None}, + {"due_date": None}, { "summary": "soda", "status": "needs_action", @@ -343,7 +329,7 @@ async def test_update_item( }, ), ( - {ATTR_DUE_DATETIME: "2024-01-01 10:30:00"}, + {"due_datetime": "2024-01-01 10:30:00"}, { "summary": "soda", "status": "needs_action", @@ -352,7 +338,7 @@ async def test_update_item( }, ), ( - {ATTR_DUE_DATETIME: None}, + {"due_datetime": None}, { "summary": "soda", "status": "needs_action", @@ -360,7 +346,7 @@ async def test_update_item( }, ), ( - {ATTR_DESCRIPTION: "updated description"}, + {"description": "updated description"}, { "summary": "soda", "status": "needs_action", @@ -369,7 +355,7 @@ async def test_update_item( }, ), ( - {ATTR_DESCRIPTION: None}, + {"description": None}, {"summary": "soda", "status": "needs_action", "due": "2024-01-01"}, ), ], @@ -395,13 +381,9 @@ async def test_update_existing_field( # Create new item await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - { - ATTR_ITEM: "soda", - ATTR_DESCRIPTION: "Additional detail", - ATTR_DUE_DATE: "2024-01-01", - }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "soda", "description": "Additional detail", "due_date": "2024-01-01"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -416,9 +398,9 @@ async def test_update_existing_field( # Perform update await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: item["uid"], **item_data}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "update_item", + {"item": item["uid"], **item_data}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -442,9 +424,9 @@ async def test_rename( # Create new item await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "soda"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "soda"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -462,9 +444,9 @@ async def test_rename( # Rename item await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: item["uid"], ATTR_RENAME: "water"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "update_item", + {"item": item["uid"], "rename": "water"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -519,9 +501,9 @@ async def test_move_item( for i in range(1, 5): await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: f"item {i}"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": f"item {i}"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -577,9 +559,9 @@ async def test_move_item_previous_unknown( await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "item 1"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "item 1"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) items = await ws_get_items() @@ -750,9 +732,9 @@ async def test_susbcribe( # Create new item await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "soda"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "soda"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -783,9 +765,9 @@ async def test_susbcribe( # Rename item await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: uid, ATTR_RENAME: "milk"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "update_item", + {"item": uid, "rename": "milk"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/locative/test_init.py b/tests/components/locative/test_init.py index c41db68e3d6..305497ebbd6 100644 --- a/tests/components/locative/test_init.py +++ b/tests/components/locative/test_init.py @@ -11,8 +11,8 @@ from homeassistant.components import locative from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.locative import DOMAIN, TRACKER_UPDATE +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.dispatcher import DATA_DISPATCHER from homeassistant.setup import async_setup_component @@ -38,7 +38,7 @@ async def locative_client( @pytest.fixture -async def webhook_id(hass: HomeAssistant, locative_client: TestClient) -> str: +async def webhook_id(hass, locative_client): """Initialize the Geofency component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -56,7 +56,7 @@ async def webhook_id(hass: HomeAssistant, locative_client: TestClient) -> str: return result["result"].data["webhook_id"] -async def test_missing_data(locative_client: TestClient, webhook_id: str) -> None: +async def test_missing_data(locative_client, webhook_id) -> None: """Test missing data.""" url = f"/api/webhook/{webhook_id}" @@ -116,9 +116,7 @@ async def test_missing_data(locative_client: TestClient, webhook_id: str) -> Non assert req.status == HTTPStatus.UNPROCESSABLE_ENTITY -async def test_enter_and_exit( - hass: HomeAssistant, locative_client: TestClient, webhook_id: str -) -> None: +async def test_enter_and_exit(hass: HomeAssistant, locative_client, webhook_id) -> None: """Test when there is a known zone.""" url = f"/api/webhook/{webhook_id}" @@ -134,7 +132,9 @@ async def test_enter_and_exit( req = await locative_client.post(url, data=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state + state_name = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]) + ).state assert state_name == "home" data["id"] = "HOME" @@ -144,7 +144,9 @@ async def test_enter_and_exit( req = await locative_client.post(url, data=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state + state_name = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]) + ).state assert state_name == "not_home" data["id"] = "hOmE" @@ -154,7 +156,9 @@ async def test_enter_and_exit( req = await locative_client.post(url, data=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state + state_name = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]) + ).state assert state_name == "home" data["trigger"] = "exit" @@ -163,7 +167,9 @@ async def test_enter_and_exit( req = await locative_client.post(url, data=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state + state_name = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]) + ).state assert state_name == "not_home" data["id"] = "work" @@ -173,12 +179,14 @@ async def test_enter_and_exit( req = await locative_client.post(url, data=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state + state_name = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]) + ).state assert state_name == "work" async def test_exit_after_enter( - hass: HomeAssistant, locative_client: TestClient, webhook_id: str + hass: HomeAssistant, locative_client, webhook_id ) -> None: """Test when an exit message comes after an enter message.""" url = f"/api/webhook/{webhook_id}" @@ -196,7 +204,7 @@ async def test_exit_after_enter( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}") + state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])) assert state.state == "home" data["id"] = "Work" @@ -206,7 +214,7 @@ async def test_exit_after_enter( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}") + state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])) assert state.state == "work" data["id"] = "Home" @@ -217,13 +225,11 @@ async def test_exit_after_enter( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}") + state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])) assert state.state == "work" -async def test_exit_first( - hass: HomeAssistant, locative_client: TestClient, webhook_id: str -) -> None: +async def test_exit_first(hass: HomeAssistant, locative_client, webhook_id) -> None: """Test when an exit message is sent first on a new device.""" url = f"/api/webhook/{webhook_id}" @@ -240,13 +246,11 @@ async def test_exit_first( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}") + state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])) assert state.state == "not_home" -async def test_two_devices( - hass: HomeAssistant, locative_client: TestClient, webhook_id: str -) -> None: +async def test_two_devices(hass: HomeAssistant, locative_client, webhook_id) -> None: """Test updating two different devices.""" url = f"/api/webhook/{webhook_id}" @@ -263,7 +267,9 @@ async def test_two_devices( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_1['device']}") + state = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["device"]) + ) assert state.state == "not_home" # Enter Home @@ -274,9 +280,13 @@ async def test_two_devices( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_2['device']}") + state = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_2["device"]) + ) assert state.state == "home" - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_1['device']}") + state = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["device"]) + ) assert state.state == "not_home" @@ -284,7 +294,7 @@ async def test_two_devices( reason="The device_tracker component does not support unloading yet." ) async def test_load_unload_entry( - hass: HomeAssistant, locative_client: TestClient, webhook_id: str + hass: HomeAssistant, locative_client, webhook_id ) -> None: """Test that the appropriate dispatch signals are added and removed.""" url = f"/api/webhook/{webhook_id}" @@ -302,7 +312,7 @@ async def test_load_unload_entry( await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}") + state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])) assert state.state == "not_home" assert len(hass.data[DATA_DISPATCHER][TRACKER_UPDATE]) == 1 diff --git a/tests/components/lock/conftest.py b/tests/components/lock/conftest.py index fd569b162bc..f1715687339 100644 --- a/tests/components/lock/conftest.py +++ b/tests/components/lock/conftest.py @@ -1,10 +1,10 @@ """Fixtures for the lock entity platform tests.""" -from collections.abc import Generator from typing import Any from unittest.mock import MagicMock import pytest +from typing_extensions import Generator from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, diff --git a/tests/components/lock/test_device_condition.py b/tests/components/lock/test_device_condition.py index 1818d4933b8..97afe9fb759 100644 --- a/tests/components/lock/test_device_condition.py +++ b/tests/components/lock/test_device_condition.py @@ -5,14 +5,27 @@ from pytest_unordered import unordered from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.components.lock import DOMAIN, LockState -from homeassistant.const import EntityCategory +from homeassistant.components.lock import DOMAIN +from homeassistant.const import ( + STATE_JAMMED, + STATE_LOCKED, + STATE_LOCKING, + STATE_OPEN, + STATE_OPENING, + STATE_UNLOCKED, + STATE_UNLOCKING, + EntityCategory, +) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -20,6 +33,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -120,7 +139,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -133,7 +152,7 @@ async def test_if_state( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, LockState.LOCKED) + hass.states.async_set(entry.entity_id, STATE_LOCKED) assert await async_setup_component( hass, @@ -272,52 +291,52 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_locked - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_locked - event - test_event1" - hass.states.async_set(entry.entity_id, LockState.UNLOCKED) + hass.states.async_set(entry.entity_id, STATE_UNLOCKED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_unlocked - event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_unlocked - event - test_event2" - hass.states.async_set(entry.entity_id, LockState.UNLOCKING) + hass.states.async_set(entry.entity_id, STATE_UNLOCKING) hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].data["some"] == "is_unlocking - event - test_event3" + assert len(calls) == 3 + assert calls[2].data["some"] == "is_unlocking - event - test_event3" - hass.states.async_set(entry.entity_id, LockState.LOCKING) + hass.states.async_set(entry.entity_id, STATE_LOCKING) hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(service_calls) == 4 - assert service_calls[3].data["some"] == "is_locking - event - test_event4" + assert len(calls) == 4 + assert calls[3].data["some"] == "is_locking - event - test_event4" - hass.states.async_set(entry.entity_id, LockState.JAMMED) + hass.states.async_set(entry.entity_id, STATE_JAMMED) hass.bus.async_fire("test_event5") await hass.async_block_till_done() - assert len(service_calls) == 5 - assert service_calls[4].data["some"] == "is_jammed - event - test_event5" + assert len(calls) == 5 + assert calls[4].data["some"] == "is_jammed - event - test_event5" - hass.states.async_set(entry.entity_id, LockState.OPENING) + hass.states.async_set(entry.entity_id, STATE_OPENING) hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(service_calls) == 6 - assert service_calls[5].data["some"] == "is_opening - event - test_event6" + assert len(calls) == 6 + assert calls[5].data["some"] == "is_opening - event - test_event6" - hass.states.async_set(entry.entity_id, LockState.OPEN) + hass.states.async_set(entry.entity_id, STATE_OPEN) hass.bus.async_fire("test_event7") await hass.async_block_till_done() - assert len(service_calls) == 7 - assert service_calls[6].data["some"] == "is_open - event - test_event7" + assert len(calls) == 7 + assert calls[6].data["some"] == "is_open - event - test_event7" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -330,7 +349,7 @@ async def test_if_state_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, LockState.LOCKED) + hass.states.async_set(entry.entity_id, STATE_LOCKED) assert await async_setup_component( hass, @@ -361,5 +380,5 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_locked - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_locked - event - test_event1" diff --git a/tests/components/lock/test_device_trigger.py b/tests/components/lock/test_device_trigger.py index 3ecdf2a9bca..3cbfbb1a04c 100644 --- a/tests/components/lock/test_device_trigger.py +++ b/tests/components/lock/test_device_trigger.py @@ -7,8 +7,17 @@ from pytest_unordered import unordered from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.components.lock import DOMAIN, LockEntityFeature, LockState -from homeassistant.const import EntityCategory +from homeassistant.components.lock import DOMAIN, LockEntityFeature +from homeassistant.const import ( + STATE_JAMMED, + STATE_LOCKED, + STATE_LOCKING, + STATE_OPEN, + STATE_OPENING, + STATE_UNLOCKED, + STATE_UNLOCKING, + EntityCategory, +) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider @@ -20,6 +29,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, ) @@ -28,6 +38,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -196,7 +212,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -209,7 +225,7 @@ async def test_if_fires_on_state_change( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, LockState.UNLOCKED) + hass.states.async_set(entry.entity_id, STATE_UNLOCKED) assert await async_setup_component( hass, @@ -278,29 +294,29 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is turning on. - hass.states.async_set(entry.entity_id, LockState.LOCKED) + hass.states.async_set(entry.entity_id, STATE_LOCKED) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"locked - device - {entry.entity_id} - unlocked - locked - None" ) # Fake that the entity is turning off. - hass.states.async_set(entry.entity_id, LockState.UNLOCKED) + hass.states.async_set(entry.entity_id, STATE_UNLOCKED) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"unlocked - device - {entry.entity_id} - locked - unlocked - None" ) # Fake that the entity is opens. - hass.states.async_set(entry.entity_id, LockState.OPEN) + hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 3 assert ( - service_calls[2].data["some"] + calls[2].data["some"] == f"open - device - {entry.entity_id} - unlocked - open - None" ) @@ -309,7 +325,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -322,7 +338,7 @@ async def test_if_fires_on_state_change_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, LockState.UNLOCKED) + hass.states.async_set(entry.entity_id, STATE_UNLOCKED) assert await async_setup_component( hass, @@ -353,11 +369,11 @@ async def test_if_fires_on_state_change_legacy( ) # Fake that the entity is turning on. - hass.states.async_set(entry.entity_id, LockState.LOCKED) + hass.states.async_set(entry.entity_id, STATE_LOCKED) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"locked - device - {entry.entity_id} - unlocked - locked - None" ) @@ -366,7 +382,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -379,7 +395,7 @@ async def test_if_fires_on_state_change_with_for( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, LockState.UNLOCKED) + hass.states.async_set(entry.entity_id, STATE_UNLOCKED) assert await async_setup_component( hass, @@ -500,64 +516,64 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 - hass.states.async_set(entry.entity_id, LockState.LOCKED) + hass.states.async_set(entry.entity_id, STATE_LOCKED) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - unlocked - locked - 0:00:05" ) - hass.states.async_set(entry.entity_id, LockState.UNLOCKING) + hass.states.async_set(entry.entity_id, STATE_UNLOCKING) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=16)) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 await hass.async_block_till_done() assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"turn_on device - {entry.entity_id} - locked - unlocking - 0:00:05" ) - hass.states.async_set(entry.entity_id, LockState.JAMMED) + hass.states.async_set(entry.entity_id, STATE_JAMMED) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=21)) await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 3 await hass.async_block_till_done() assert ( - service_calls[2].data["some"] + calls[2].data["some"] == f"turn_off device - {entry.entity_id} - unlocking - jammed - 0:00:05" ) - hass.states.async_set(entry.entity_id, LockState.LOCKING) + hass.states.async_set(entry.entity_id, STATE_LOCKING) await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 3 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=27)) await hass.async_block_till_done() - assert len(service_calls) == 4 + assert len(calls) == 4 await hass.async_block_till_done() assert ( - service_calls[3].data["some"] + calls[3].data["some"] == f"turn_on device - {entry.entity_id} - jammed - locking - 0:00:05" ) - hass.states.async_set(entry.entity_id, LockState.OPENING) + hass.states.async_set(entry.entity_id, STATE_OPENING) await hass.async_block_till_done() - assert len(service_calls) == 4 + assert len(calls) == 4 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=27)) await hass.async_block_till_done() - assert len(service_calls) == 5 + assert len(calls) == 5 await hass.async_block_till_done() assert ( - service_calls[4].data["some"] + calls[4].data["some"] == f"turn_on device - {entry.entity_id} - locking - opening - 0:00:05" ) diff --git a/tests/components/lock/test_init.py b/tests/components/lock/test_init.py index a80aa78cec2..f0547fbbeae 100644 --- a/tests/components/lock/test_init.py +++ b/tests/components/lock/test_init.py @@ -2,7 +2,6 @@ from __future__ import annotations -from enum import Enum import re from typing import Any @@ -16,9 +15,14 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, + STATE_JAMMED, + STATE_LOCKED, + STATE_LOCKING, + STATE_UNLOCKED, + STATE_UNLOCKING, LockEntityFeature, - LockState, ) +from homeassistant.const import STATE_OPEN, STATE_OPENING from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.entity_registry as er @@ -63,37 +67,37 @@ async def test_lock_states(hass: HomeAssistant, mock_lock_entity: MockLock) -> N mock_lock_entity._attr_is_locking = True assert mock_lock_entity.is_locking - assert mock_lock_entity.state == LockState.LOCKING + assert mock_lock_entity.state == STATE_LOCKING mock_lock_entity._attr_is_locked = True mock_lock_entity._attr_is_locking = False assert mock_lock_entity.is_locked - assert mock_lock_entity.state == LockState.LOCKED + assert mock_lock_entity.state == STATE_LOCKED mock_lock_entity._attr_is_unlocking = True assert mock_lock_entity.is_unlocking - assert mock_lock_entity.state == LockState.UNLOCKING + assert mock_lock_entity.state == STATE_UNLOCKING mock_lock_entity._attr_is_locked = False mock_lock_entity._attr_is_unlocking = False assert not mock_lock_entity.is_locked - assert mock_lock_entity.state == LockState.UNLOCKED + assert mock_lock_entity.state == STATE_UNLOCKED mock_lock_entity._attr_is_jammed = True assert mock_lock_entity.is_jammed - assert mock_lock_entity.state == LockState.JAMMED + assert mock_lock_entity.state == STATE_JAMMED assert not mock_lock_entity.is_locked mock_lock_entity._attr_is_jammed = False mock_lock_entity._attr_is_opening = True assert mock_lock_entity.is_opening - assert mock_lock_entity.state == LockState.OPENING + assert mock_lock_entity.state == STATE_OPENING assert mock_lock_entity.is_opening mock_lock_entity._attr_is_opening = False mock_lock_entity._attr_is_open = True assert not mock_lock_entity.is_opening - assert mock_lock_entity.state == LockState.OPEN + assert mock_lock_entity.state == STATE_OPEN assert not mock_lock_entity.is_opening assert mock_lock_entity.is_open @@ -389,35 +393,13 @@ def test_all() -> None: help_test_all(lock) -def _create_tuples( - enum: type[Enum], constant_prefix: str, remove_in_version: str -) -> list[tuple[Enum, str]]: - return [ - (enum_field, constant_prefix, remove_in_version) - for enum_field in enum - if enum_field - not in [ - lock.LockState.OPEN, - lock.LockState.OPENING, - ] - ] - - -@pytest.mark.parametrize( - ("enum", "constant_prefix", "remove_in_version"), - _create_tuples(lock.LockEntityFeature, "SUPPORT_", "2025.1") - + _create_tuples(lock.LockState, "STATE_", "2025.10"), -) +@pytest.mark.parametrize(("enum"), list(LockEntityFeature)) def test_deprecated_constants( caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, - remove_in_version: str, + enum: LockEntityFeature, ) -> None: """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, lock, enum, constant_prefix, remove_in_version - ) + import_and_test_deprecated_constant_enum(caplog, lock, enum, "SUPPORT_", "2025.1") def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: diff --git a/tests/components/logbook/common.py b/tests/components/logbook/common.py index afa8b7fcde5..67f12955581 100644 --- a/tests/components/logbook/common.py +++ b/tests/components/logbook/common.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.components import logbook from homeassistant.components.logbook import processor -from homeassistant.components.logbook.models import EventAsRow, LogbookConfig +from homeassistant.components.logbook.models import LogbookConfig from homeassistant.components.recorder.models import ( process_timestamp_to_utc_isoformat, ulid_to_bytes_or_none, @@ -18,8 +18,6 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.json import JSONEncoder import homeassistant.util.dt as dt_util -IDX_TO_NAME = dict(enumerate(EventAsRow._fields)) - class MockRow: """Minimal row mock.""" @@ -50,10 +48,6 @@ class MockRow: self.attributes = None self.context_only = False - def __getitem__(self, idx: int) -> Any: - """Get item.""" - return getattr(self, IDX_TO_NAME[idx]) - @property def time_fired_minute(self): """Minute the event was fired.""" @@ -79,7 +73,7 @@ def mock_humanify(hass_, rows): event_cache, entity_name_cache, include_entity_name=True, - timestamp=False, + format_time=processor._row_time_fired_isoformat, ) context_augmenter = processor.ContextAugmenter(logbook_run) return list( diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index 8ac7dde67ab..3534192a43e 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -1,9 +1,11 @@ """The tests for the logbook component.""" import asyncio +import collections from collections.abc import Callable from datetime import datetime, timedelta from http import HTTPStatus +import json from unittest.mock import Mock from freezegun import freeze_time @@ -11,11 +13,9 @@ import pytest import voluptuous as vol from homeassistant.components import logbook, recorder - -# pylint: disable-next=hass-component-root-import from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED -from homeassistant.components.logbook.models import EventAsRow, LazyEventPartialState +from homeassistant.components.logbook.models import LazyEventPartialState from homeassistant.components.logbook.processor import EventProcessor from homeassistant.components.logbook.queries.common import PSEUDO_EVENT_STATE_CHANGED from homeassistant.components.recorder import Recorder @@ -44,6 +44,7 @@ import homeassistant.core as ha from homeassistant.core import Event, HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS +from homeassistant.helpers.json import JSONEncoder from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -67,12 +68,12 @@ async def hass_(recorder_mock: Recorder, hass: HomeAssistant) -> HomeAssistant: @pytest.fixture -async def set_utc(hass: HomeAssistant) -> None: +async def set_utc(hass): """Set timezone to UTC.""" await hass.config.async_set_time_zone("UTC") -async def test_service_call_create_logbook_entry(hass_: HomeAssistant) -> None: +async def test_service_call_create_logbook_entry(hass_) -> None: """Test if service call create log book entry.""" calls = async_capture_events(hass_, logbook.EVENT_LOGBOOK_ENTRY) @@ -125,9 +126,8 @@ async def test_service_call_create_logbook_entry(hass_: HomeAssistant) -> None: assert last_call.data.get(logbook.ATTR_DOMAIN) == "logbook" -@pytest.mark.usefixtures("recorder_mock") async def test_service_call_create_logbook_entry_invalid_entity_id( - hass: HomeAssistant, + recorder_mock: Recorder, hass: HomeAssistant ) -> None: """Test if service call create log book entry with an invalid entity id.""" await async_setup_component(hass, "logbook", {}) @@ -156,9 +156,7 @@ async def test_service_call_create_logbook_entry_invalid_entity_id( assert events[0][logbook.ATTR_MESSAGE] == "is triggered" -async def test_service_call_create_log_book_entry_no_message( - hass_: HomeAssistant, -) -> None: +async def test_service_call_create_log_book_entry_no_message(hass_) -> None: """Test if service call create log book entry without message.""" calls = async_capture_events(hass_, logbook.EVENT_LOGBOOK_ENTRY) @@ -174,7 +172,7 @@ async def test_service_call_create_log_book_entry_no_message( async def test_filter_sensor( - hass_: HomeAssistant, hass_client: ClientSessionGenerator + hass_: ha.HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test numeric sensors are filtered.""" @@ -222,7 +220,7 @@ async def test_filter_sensor( _assert_entry(entries[2], name="ble", entity_id=entity_id4, state="10") -async def test_home_assistant_start_stop_not_grouped(hass_: HomeAssistant) -> None: +async def test_home_assistant_start_stop_not_grouped(hass_) -> None: """Test if HA start and stop events are no longer grouped.""" await async_setup_component(hass_, "homeassistant", {}) await hass_.async_block_till_done() @@ -239,7 +237,7 @@ async def test_home_assistant_start_stop_not_grouped(hass_: HomeAssistant) -> No assert_entry(entries[1], name="Home Assistant", message="started", domain=ha.DOMAIN) -async def test_home_assistant_start(hass_: HomeAssistant) -> None: +async def test_home_assistant_start(hass_) -> None: """Test if HA start is not filtered or converted into a restart.""" await async_setup_component(hass_, "homeassistant", {}) await hass_.async_block_till_done() @@ -259,7 +257,7 @@ async def test_home_assistant_start(hass_: HomeAssistant) -> None: assert_entry(entries[1], pointA, "bla", entity_id=entity_id) -def test_process_custom_logbook_entries(hass_: HomeAssistant) -> None: +def test_process_custom_logbook_entries(hass_) -> None: """Test if custom log book entries get added as an entry.""" name = "Nice name" message = "has a custom entry" @@ -326,27 +324,55 @@ def create_state_changed_event_from_old_new( entity_id, event_time_fired, old_state, new_state ): """Create a state changed event from a old and new state.""" - row = EventAsRow( - row_id=1, - event_type=PSEUDO_EVENT_STATE_CHANGED, - event_data="{}", - time_fired_ts=dt_util.utc_to_timestamp(event_time_fired), - context_id_bin=None, - context_user_id_bin=None, - context_parent_id_bin=None, - state=new_state and new_state.get("state"), - entity_id=entity_id, - icon=None, - context_only=False, - data=None, - context=None, + attributes = {} + if new_state is not None: + attributes = new_state.get("attributes") + attributes_json = json.dumps(attributes, cls=JSONEncoder) + row = collections.namedtuple( + "Row", + [ + "event_type", + "event_data", + "time_fired", + "time_fired_ts", + "context_id_bin", + "context_user_id_bin", + "context_parent_id_bin", + "state", + "entity_id", + "domain", + "attributes", + "state_id", + "old_state_id", + "shared_attrs", + "shared_data", + "context_only", + ], ) + + row.event_type = PSEUDO_EVENT_STATE_CHANGED + row.event_data = "{}" + row.shared_data = "{}" + row.attributes = attributes_json + row.shared_attrs = attributes_json + row.time_fired = event_time_fired + row.time_fired_ts = dt_util.utc_to_timestamp(event_time_fired) + row.state = new_state and new_state.get("state") + row.entity_id = entity_id + row.domain = entity_id and ha.split_entity_id(entity_id)[0] + row.context_only = False + row.context_id_bin = None + row.friendly_name = None + row.icon = None + row.context_user_id_bin = None + row.context_parent_id_bin = None + row.old_state_id = old_state and 1 + row.state_id = new_state and 1 return LazyEventPartialState(row, {}) -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view.""" await async_setup_component(hass, "logbook", {}) @@ -356,9 +382,8 @@ async def test_logbook_view( assert response.status == HTTPStatus.OK -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_invalid_start_date_time( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with an invalid date time.""" await async_setup_component(hass, "logbook", {}) @@ -368,9 +393,8 @@ async def test_logbook_view_invalid_start_date_time( assert response.status == HTTPStatus.BAD_REQUEST -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_invalid_end_date_time( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view.""" await async_setup_component(hass, "logbook", {}) @@ -382,10 +406,11 @@ async def test_logbook_view_invalid_end_date_time( assert response.status == HTTPStatus.BAD_REQUEST -@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_logbook_view_period_entity( + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, + set_utc, ) -> None: """Test the logbook view with period and entity.""" await async_setup_component(hass, "logbook", {}) @@ -467,9 +492,8 @@ async def test_logbook_view_period_entity( assert response_json[0]["entity_id"] == entity_id_test -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_describe_event( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test teaching logbook about a new event.""" @@ -516,9 +540,8 @@ async def test_logbook_describe_event( assert event["domain"] == "test_domain" -@pytest.mark.usefixtures("recorder_mock") async def test_exclude_described_event( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test exclusions of events that are described by another integration.""" name = "My Automation Rule" @@ -526,7 +549,7 @@ async def test_exclude_described_event( entity_id2 = "automation.included_rule" entity_id3 = "sensor.excluded_domain" - def _describe(event: Event) -> dict[str, str]: + def _describe(event): """Describe an event.""" return { "name": "Test Name", @@ -534,12 +557,7 @@ async def test_exclude_described_event( "entity_id": event.data[ATTR_ENTITY_ID], } - def async_describe_events( - hass: HomeAssistant, - async_describe_event: Callable[ - [str, str, Callable[[Event], dict[str, str]]], None - ], - ) -> None: + def async_describe_events(hass, async_describe_event): """Mock to describe events.""" async_describe_event("automation", "some_automation_event", _describe) async_describe_event("sensor", "some_event", _describe) @@ -593,9 +611,8 @@ async def test_exclude_described_event( assert event["entity_id"] == "automation.included_rule" -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_end_time_entity( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity.""" await async_setup_component(hass, "logbook", {}) @@ -654,9 +671,8 @@ async def test_logbook_view_end_time_entity( assert response_json[0]["entity_id"] == entity_id_test -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_filter_with_automations( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -741,9 +757,8 @@ async def test_logbook_entity_filter_with_automations( assert json_dict[0]["entity_id"] == entity_id_second -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_no_longer_in_state_machine( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with an entity that hass been removed from the state machine.""" await async_setup_component(hass, "logbook", {}) @@ -781,10 +796,11 @@ async def test_logbook_entity_no_longer_in_state_machine( assert json_dict[0]["name"] == "area 001" -@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_filter_continuous_sensor_values( + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, + set_utc, ) -> None: """Test remove continuous sensor events from logbook.""" await async_setup_component(hass, "logbook", {}) @@ -824,10 +840,11 @@ async def test_filter_continuous_sensor_values( assert response_json[1]["entity_id"] == entity_id_third -@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_new_entities( + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, + set_utc, ) -> None: """Test if events are excluded on first update.""" await asyncio.gather( @@ -865,10 +882,11 @@ async def test_exclude_new_entities( assert response_json[1]["message"] == "started" -@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_removed_entities( + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, + set_utc, ) -> None: """Test if events are excluded on last update.""" await asyncio.gather( @@ -913,10 +931,11 @@ async def test_exclude_removed_entities( assert response_json[2]["entity_id"] == entity_id2 -@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_attribute_changes( + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, + set_utc, ) -> None: """Test if events of attribute changes are filtered.""" await asyncio.gather( @@ -957,9 +976,8 @@ async def test_exclude_attribute_changes( assert response_json[2]["entity_id"] == "light.kitchen" -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_context_id( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -1111,9 +1129,8 @@ async def test_logbook_entity_context_id( assert json_dict[7]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_context_id_automation_script_started_manually( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook populates context_ids for scripts and automations started manually.""" await asyncio.gather( @@ -1204,9 +1221,8 @@ async def test_logbook_context_id_automation_script_started_manually( assert json_dict[4]["context_domain"] == "script" -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_context_parent_id( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view links events via context parent_id.""" await asyncio.gather( @@ -1387,9 +1403,8 @@ async def test_logbook_entity_context_parent_id( assert json_dict[8]["context_user_id"] == "485cacf93ef84d25a99ced3126b921d2" -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_context_from_template( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -1478,9 +1493,8 @@ async def test_logbook_context_from_template( assert json_dict[5]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a single entity and .""" await async_setup_component(hass, "logbook", {}) @@ -1550,9 +1564,8 @@ async def test_logbook_( assert json_dict[1]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_many_entities_multiple_calls( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a many entities called multiple times.""" await async_setup_component(hass, "logbook", {}) @@ -1623,9 +1636,8 @@ async def test_logbook_many_entities_multiple_calls( assert len(json_dict) == 0 -@pytest.mark.usefixtures("recorder_mock") async def test_custom_log_entry_discoverable_via_( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if a custom log entry is later discoverable via .""" await async_setup_component(hass, "logbook", {}) @@ -1661,9 +1673,8 @@ async def test_custom_log_entry_discoverable_via_( assert json_dict[0]["entity_id"] == "switch.test_switch" -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_multiple_entities( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a multiple entities.""" await async_setup_component(hass, "logbook", {}) @@ -1788,9 +1799,8 @@ async def test_logbook_multiple_entities( assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_invalid_entity( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with requesting an invalid entity.""" await async_setup_component(hass, "logbook", {}) @@ -1809,9 +1819,8 @@ async def test_logbook_invalid_entity( assert response.status == HTTPStatus.INTERNAL_SERVER_ERROR -@pytest.mark.usefixtures("recorder_mock") async def test_icon_and_state( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test to ensure state and custom icons are returned.""" await asyncio.gather( @@ -1855,9 +1864,8 @@ async def test_icon_and_state( assert response_json[2]["state"] == STATE_OFF -@pytest.mark.usefixtures("recorder_mock") async def test_fire_logbook_entries( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test many logbook entry calls.""" await async_setup_component(hass, "logbook", {}) @@ -1894,9 +1902,8 @@ async def test_fire_logbook_entries( assert len(response_json) == 11 -@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_domain( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain is excluded in config.""" entity_id = "switch.bla" @@ -1931,9 +1938,8 @@ async def test_exclude_events_domain( _assert_entry(entries[1], name="blu", entity_id=entity_id2) -@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_domain_glob( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain or glob is excluded in config.""" entity_id = "switch.bla" @@ -1977,9 +1983,8 @@ async def test_exclude_events_domain_glob( _assert_entry(entries[1], name="blu", entity_id=entity_id2) -@pytest.mark.usefixtures("recorder_mock") async def test_include_events_entity( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if entity is included in config.""" entity_id = "sensor.bla" @@ -2020,9 +2025,8 @@ async def test_include_events_entity( _assert_entry(entries[1], name="blu", entity_id=entity_id2) -@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_entity( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if entity is excluded in config.""" entity_id = "sensor.bla" @@ -2057,9 +2061,8 @@ async def test_exclude_events_entity( _assert_entry(entries[1], name="blu", entity_id=entity_id2) -@pytest.mark.usefixtures("recorder_mock") async def test_include_events_domain( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain is included in config.""" assert await async_setup_component(hass, "alexa", {}) @@ -2102,9 +2105,8 @@ async def test_include_events_domain( _assert_entry(entries[2], name="blu", entity_id=entity_id2) -@pytest.mark.usefixtures("recorder_mock") async def test_include_events_domain_glob( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain or glob is included in config.""" assert await async_setup_component(hass, "alexa", {}) @@ -2162,9 +2164,8 @@ async def test_include_events_domain_glob( _assert_entry(entries[3], name="included", entity_id=entity_id3) -@pytest.mark.usefixtures("recorder_mock") async def test_include_exclude_events_no_globs( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" @@ -2221,9 +2222,8 @@ async def test_include_exclude_events_no_globs( _assert_entry(entries[5], name="keep", entity_id=entity_id4, state="10") -@pytest.mark.usefixtures("recorder_mock") async def test_include_exclude_events_with_glob_filters( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" @@ -2288,9 +2288,8 @@ async def test_include_exclude_events_with_glob_filters( _assert_entry(entries[6], name="included", entity_id=entity_id5, state="30") -@pytest.mark.usefixtures("recorder_mock") async def test_empty_config( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test we can handle an empty entity filter.""" entity_id = "sensor.blu" @@ -2323,9 +2322,8 @@ async def test_empty_config( _assert_entry(entries[1], name="blu", entity_id=entity_id) -@pytest.mark.usefixtures("recorder_mock") async def test_context_filter( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test we can filter by context.""" assert await async_setup_component(hass, "logbook", {}) @@ -2401,9 +2399,8 @@ def _assert_entry( assert state == entry["state"] -@pytest.mark.usefixtures("recorder_mock") async def test_get_events( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test logbook get_events.""" now = dt_util.utcnow() @@ -2522,9 +2519,8 @@ async def test_get_events( assert isinstance(results[0]["when"], float) -@pytest.mark.usefixtures("recorder_mock") async def test_get_events_future_start_time( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events with a future start time.""" await async_setup_component(hass, "logbook", {}) @@ -2548,9 +2544,8 @@ async def test_get_events_future_start_time( assert len(results) == 0 -@pytest.mark.usefixtures("recorder_mock") async def test_get_events_bad_start_time( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events bad start time.""" await async_setup_component(hass, "logbook", {}) @@ -2569,9 +2564,8 @@ async def test_get_events_bad_start_time( assert response["error"]["code"] == "invalid_start_time" -@pytest.mark.usefixtures("recorder_mock") async def test_get_events_bad_end_time( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events bad end time.""" now = dt_util.utcnow() @@ -2592,9 +2586,8 @@ async def test_get_events_bad_end_time( assert response["error"]["code"] == "invalid_end_time" -@pytest.mark.usefixtures("recorder_mock") async def test_get_events_invalid_filters( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events invalid filters.""" await async_setup_component(hass, "logbook", {}) @@ -2623,8 +2616,8 @@ async def test_get_events_invalid_filters( assert response["error"]["code"] == "invalid_format" -@pytest.mark.usefixtures("recorder_mock") async def test_get_events_with_device_ids( + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, @@ -2764,9 +2757,8 @@ async def test_get_events_with_device_ids( assert isinstance(results[3]["when"], float) -@pytest.mark.usefixtures("recorder_mock") async def test_logbook_select_entities_context_id( - hass: HomeAssistant, hass_client: ClientSessionGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -2900,9 +2892,8 @@ async def test_logbook_select_entities_context_id( assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" -@pytest.mark.usefixtures("recorder_mock") async def test_get_events_with_context_state( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test logbook get_events with a context state.""" now = dt_util.utcnow() @@ -2966,8 +2957,9 @@ async def test_get_events_with_context_state( assert "context_event_type" not in results[3] -@pytest.mark.usefixtures("recorder_mock") -async def test_logbook_with_empty_config(hass: HomeAssistant) -> None: +async def test_logbook_with_empty_config( + recorder_mock: Recorder, hass: HomeAssistant +) -> None: """Test we handle a empty configuration.""" assert await async_setup_component( hass, @@ -2980,8 +2972,9 @@ async def test_logbook_with_empty_config(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.mark.usefixtures("recorder_mock") -async def test_logbook_with_non_iterable_entity_filter(hass: HomeAssistant) -> None: +async def test_logbook_with_non_iterable_entity_filter( + recorder_mock: Recorder, hass: HomeAssistant +) -> None: """Test we handle a non-iterable entity filter.""" assert await async_setup_component( hass, diff --git a/tests/components/logbook/test_models.py b/tests/components/logbook/test_models.py index cfdd7efc727..7021711014f 100644 --- a/tests/components/logbook/test_models.py +++ b/tests/components/logbook/test_models.py @@ -2,26 +2,20 @@ from unittest.mock import Mock -from homeassistant.components.logbook.models import EventAsRow, LazyEventPartialState +from homeassistant.components.logbook.models import LazyEventPartialState def test_lazy_event_partial_state_context() -> None: """Test we can extract context from a lazy event partial state.""" state = LazyEventPartialState( - EventAsRow( - row_id=1, - event_type="event_type", - event_data={}, - time_fired_ts=1, + Mock( context_id_bin=b"1234123412341234", context_user_id_bin=b"1234123412341234", context_parent_id_bin=b"4444444444444444", - state="state", + event_data={}, + event_type="event_type", entity_id="entity_id", - icon="icon", - context_only=False, - data={}, - context=Mock(), + state="state", ), {}, ) diff --git a/tests/components/logbook/test_websocket_api.py b/tests/components/logbook/test_websocket_api.py index 50139d0f4f7..ac653737614 100644 --- a/tests/components/logbook/test_websocket_api.py +++ b/tests/components/logbook/test_websocket_api.py @@ -3,7 +3,6 @@ import asyncio from collections.abc import Callable from datetime import timedelta -from typing import Any from unittest.mock import ANY, patch from freezegun import freeze_time @@ -32,10 +31,9 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, ) -from homeassistant.core import Event, HomeAssistant, State, callback +from homeassistant.core import Event, HomeAssistant, State from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS -from homeassistant.helpers.event import async_track_state_change_event from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -48,6 +46,12 @@ from tests.components.recorder.common import ( from tests.typing import RecorderInstanceGenerator, WebSocketGenerator +@pytest.fixture +async def set_utc(hass): + """Set timezone to UTC.""" + await hass.config.async_set_time_zone("UTC") + + def listeners_without_writes(listeners: dict[str, int]) -> dict[str, int]: """Return listeners without final write listeners since we are not testing for these.""" return { @@ -1181,10 +1185,6 @@ async def test_subscribe_unsubscribe_logbook_stream( await async_wait_recording_done(hass) websocket_client = await hass_ws_client() init_listeners = hass.bus.async_listeners() - init_listeners = { - **init_listeners, - EVENT_HOMEASSISTANT_START: init_listeners[EVENT_HOMEASSISTANT_START] - 1, - } await websocket_client.send_json( {"id": 7, "type": "logbook/event_stream", "start_time": now.isoformat()} ) @@ -2965,79 +2965,3 @@ async def test_subscribe_all_entities_are_continuous_with_device( assert listeners_without_writes( hass.bus.async_listeners() ) == listeners_without_writes(init_listeners) - - -@pytest.mark.parametrize("params", [{"entity_ids": ["binary_sensor.is_light"]}, {}]) -async def test_live_stream_with_changed_state_change( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - params: dict[str, Any], -) -> None: - """Test the live logbook stream with chained events.""" - config = {recorder.CONF_COMMIT_INTERVAL: 0.5} - await async_setup_recorder_instance(hass, config) - now = dt_util.utcnow() - await asyncio.gather( - *[ - async_setup_component(hass, comp, {}) - for comp in ("homeassistant", "logbook") - ] - ) - - hass.states.async_set("binary_sensor.is_light", "unavailable") - hass.states.async_set("binary_sensor.is_light", "unknown") - await async_wait_recording_done(hass) - - @callback - def auto_off_listener(event): - hass.states.async_set("binary_sensor.is_light", STATE_OFF) - - async_track_state_change_event(hass, ["binary_sensor.is_light"], auto_off_listener) - - websocket_client = await hass_ws_client() - init_listeners = hass.bus.async_listeners() - await websocket_client.send_json( - { - "id": 7, - "type": "logbook/event_stream", - "start_time": now.isoformat(), - **params, - } - ) - - msg = await asyncio.wait_for(websocket_client.receive_json(), 2) - assert msg["id"] == 7 - assert msg["type"] == TYPE_RESULT - assert msg["success"] - - await hass.async_block_till_done() - hass.states.async_set("binary_sensor.is_light", STATE_ON) - - recieved_rows = [] - while len(recieved_rows) < 3: - msg = await asyncio.wait_for(websocket_client.receive_json(), 2.5) - assert msg["id"] == 7 - assert msg["type"] == "event" - recieved_rows.extend(msg["event"]["events"]) - - # Make sure we get rows back in order - assert recieved_rows == [ - {"entity_id": "binary_sensor.is_light", "state": "unknown", "when": ANY}, - {"entity_id": "binary_sensor.is_light", "state": "on", "when": ANY}, - {"entity_id": "binary_sensor.is_light", "state": "off", "when": ANY}, - ] - - await websocket_client.send_json( - {"id": 8, "type": "unsubscribe_events", "subscription": 7} - ) - msg = await asyncio.wait_for(websocket_client.receive_json(), 2) - - assert msg["id"] == 8 - assert msg["type"] == TYPE_RESULT - assert msg["success"] - - # Check our listener got unsubscribed - assert listeners_without_writes( - hass.bus.async_listeners() - ) == listeners_without_writes(init_listeners) diff --git a/tests/components/logger/test_init.py b/tests/components/logger/test_init.py index 24e58a77226..d6df1f92a72 100644 --- a/tests/components/logger/test_init.py +++ b/tests/components/logger/test_init.py @@ -226,7 +226,7 @@ async def test_can_set_level_from_store( _reset_logging() -async def _assert_log_levels(hass: HomeAssistant) -> None: +async def _assert_log_levels(hass): assert logging.getLogger(UNCONFIG_NS).level == logging.NOTSET assert logging.getLogger(UNCONFIG_NS).isEnabledFor(logging.CRITICAL) is True assert ( diff --git a/tests/components/logi_circle/__init__.py b/tests/components/logi_circle/__init__.py new file mode 100644 index 00000000000..d2e2fbb8fdb --- /dev/null +++ b/tests/components/logi_circle/__init__.py @@ -0,0 +1 @@ +"""Tests for the Logi Circle component.""" diff --git a/tests/components/logi_circle/test_config_flow.py b/tests/components/logi_circle/test_config_flow.py new file mode 100644 index 00000000000..2525354598d --- /dev/null +++ b/tests/components/logi_circle/test_config_flow.py @@ -0,0 +1,219 @@ +"""Tests for Logi Circle config flow.""" + +import asyncio +from http import HTTPStatus +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.http import KEY_HASS +from homeassistant.components.logi_circle import config_flow +from homeassistant.components.logi_circle.config_flow import ( + DOMAIN, + AuthorizationFailed, + LogiCircleAuthCallbackView, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import AbortFlow, FlowResultType +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +class MockRequest: + """Mock request passed to HomeAssistantView.""" + + def __init__(self, hass, query): + """Init request object.""" + self.app = {KEY_HASS: hass} + self.query = query + + +def init_config_flow(hass): + """Init a configuration flow.""" + config_flow.register_flow_implementation( + hass, + DOMAIN, + client_id="id", + client_secret="secret", + api_key="123", + redirect_uri="http://example.com", + sensors=None, + ) + flow = config_flow.LogiCircleFlowHandler() + flow._get_authorization_url = Mock(return_value="http://example.com") + flow.hass = hass + return flow + + +@pytest.fixture +def mock_logi_circle(): + """Mock logi_circle.""" + with patch( + "homeassistant.components.logi_circle.config_flow.LogiCircle" + ) as logi_circle: + future = asyncio.Future() + future.set_result({"accountId": "testId"}) + LogiCircle = logi_circle() + LogiCircle.authorize = AsyncMock(return_value=True) + LogiCircle.close = AsyncMock(return_value=True) + LogiCircle.account = future + LogiCircle.authorize_url = "http://authorize.url" + yield LogiCircle + + +async def test_step_import(hass: HomeAssistant, mock_logi_circle) -> None: + """Test that we trigger import when configuring with client.""" + flow = init_config_flow(hass) + + result = await flow.async_step_import() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + + +async def test_full_flow_implementation(hass: HomeAssistant, mock_logi_circle) -> None: + """Test registering an implementation and finishing flow works.""" + config_flow.register_flow_implementation( + hass, + "test-other", + client_id=None, + client_secret=None, + api_key=None, + redirect_uri=None, + sensors=None, + ) + flow = init_config_flow(hass) + + result = await flow.async_step_user() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await flow.async_step_user({"flow_impl": "test-other"}) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["description_placeholders"] == { + "authorization_url": "http://example.com" + } + + result = await flow.async_step_code("123ABC") + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Logi Circle ({})".format("testId") + + +async def test_we_reprompt_user_to_follow_link(hass: HomeAssistant) -> None: + """Test we prompt user to follow link if previously prompted.""" + flow = init_config_flow(hass) + + result = await flow.async_step_auth("dummy") + assert result["errors"]["base"] == "follow_link" + + +async def test_abort_if_no_implementation_registered(hass: HomeAssistant) -> None: + """Test we abort if no implementation is registered.""" + flow = config_flow.LogiCircleFlowHandler() + flow.hass = hass + + result = await flow.async_step_user() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "missing_configuration" + + +async def test_abort_if_already_setup(hass: HomeAssistant) -> None: + """Test we abort if Logi Circle is already setup.""" + flow = init_config_flow(hass) + MockConfigEntry(domain=config_flow.DOMAIN).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": config_entries.SOURCE_USER}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + with pytest.raises(AbortFlow): + result = await flow.async_step_code() + + result = await flow.async_step_auth() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "external_setup" + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (TimeoutError, "authorize_url_timeout"), + (AuthorizationFailed, "invalid_auth"), + ], +) +async def test_abort_if_authorize_fails( + hass: HomeAssistant, mock_logi_circle, side_effect, error +) -> None: + """Test we abort if authorizing fails.""" + flow = init_config_flow(hass) + mock_logi_circle.authorize.side_effect = side_effect + + result = await flow.async_step_code("123ABC") + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "external_error" + + result = await flow.async_step_auth() + assert result["errors"]["base"] == error + + +async def test_not_pick_implementation_if_only_one(hass: HomeAssistant) -> None: + """Test we bypass picking implementation if we have one flow_imp.""" + flow = init_config_flow(hass) + + result = await flow.async_step_user() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + + +async def test_gen_auth_url(hass: HomeAssistant, mock_logi_circle) -> None: + """Test generating authorize URL from Logi Circle API.""" + config_flow.register_flow_implementation( + hass, + "test-auth-url", + client_id="id", + client_secret="secret", + api_key="123", + redirect_uri="http://example.com", + sensors=None, + ) + flow = config_flow.LogiCircleFlowHandler() + flow.hass = hass + flow.flow_impl = "test-auth-url" + await async_setup_component(hass, "http", {}) + + result = flow._get_authorization_url() + assert result == "http://authorize.url" + + +async def test_callback_view_rejects_missing_code(hass: HomeAssistant) -> None: + """Test the auth callback view rejects requests with no code.""" + view = LogiCircleAuthCallbackView() + resp = await view.get(MockRequest(hass, {})) + + assert resp.status == HTTPStatus.BAD_REQUEST + + +async def test_callback_view_accepts_code( + hass: HomeAssistant, mock_logi_circle +) -> None: + """Test the auth callback view handles requests with auth code.""" + init_config_flow(hass) + view = LogiCircleAuthCallbackView() + + resp = await view.get(MockRequest(hass, {"code": "456"})) + assert resp.status == HTTPStatus.OK + + await hass.async_block_till_done() + mock_logi_circle.authorize.assert_called_with("456") diff --git a/tests/components/logi_circle/test_init.py b/tests/components/logi_circle/test_init.py new file mode 100644 index 00000000000..f8bf8306609 --- /dev/null +++ b/tests/components/logi_circle/test_init.py @@ -0,0 +1,68 @@ +"""Tests for the Logi Circle integration.""" + +import asyncio +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from homeassistant.components.logi_circle import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from tests.common import MockConfigEntry + + +@pytest.fixture(name="disable_platforms") +async def disable_platforms_fixture(hass): + """Disable logi_circle platforms.""" + with patch("homeassistant.components.logi_circle.PLATFORMS", []): + yield + + +@pytest.fixture +def mock_logi_circle(): + """Mock logi_circle.""" + + auth_provider_mock = Mock() + auth_provider_mock.close = AsyncMock() + auth_provider_mock.clear_authorization = AsyncMock() + + with patch("homeassistant.components.logi_circle.LogiCircle") as logi_circle: + future = asyncio.Future() + future.set_result({"accountId": "testId"}) + LogiCircle = logi_circle() + LogiCircle.auth_provider = auth_provider_mock + LogiCircle.synchronize_cameras = AsyncMock() + yield LogiCircle + + +async def test_repair_issue( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + disable_platforms, + mock_logi_circle, +) -> None: + """Test the LogiCircle configuration entry loading/unloading handles the repair.""" + config_entry = MockConfigEntry( + title="Example 1", + domain=DOMAIN, + data={ + "api_key": "blah", + "client_id": "blah", + "client_secret": "blah", + "redirect_uri": "blah", + }, + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) + + # Remove the entry + await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) is None diff --git a/tests/components/loqed/conftest.py b/tests/components/loqed/conftest.py index ddad8949d7d..57ef19d0fcb 100644 --- a/tests/components/loqed/conftest.py +++ b/tests/components/loqed/conftest.py @@ -1,12 +1,12 @@ """Contains fixtures for Loqed tests.""" -from collections.abc import AsyncGenerator import json from typing import Any from unittest.mock import AsyncMock, Mock, patch from loqedAPI import loqed import pytest +from typing_extensions import AsyncGenerator from homeassistant.components.loqed import DOMAIN from homeassistant.components.loqed.const import CONF_CLOUDHOOK_URL diff --git a/tests/components/loqed/test_lock.py b/tests/components/loqed/test_lock.py index 89a7888571a..5fd00b66c43 100644 --- a/tests/components/loqed/test_lock.py +++ b/tests/components/loqed/test_lock.py @@ -2,7 +2,6 @@ from loqedAPI import loqed -from homeassistant.components.lock import LockState from homeassistant.components.loqed import LoqedDataCoordinator from homeassistant.components.loqed.const import DOMAIN from homeassistant.const import ( @@ -10,6 +9,8 @@ from homeassistant.const import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, + STATE_LOCKED, + STATE_UNLOCKED, ) from homeassistant.core import HomeAssistant @@ -26,7 +27,7 @@ async def test_lock_entity( state = hass.states.get(entity_id) assert state - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED async def test_lock_responds_to_bolt_state_updates( @@ -42,7 +43,7 @@ async def test_lock_responds_to_bolt_state_updates( state = hass.states.get(entity_id) assert state - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED async def test_lock_transition_to_unlocked( diff --git a/tests/components/lovelace/test_cast.py b/tests/components/lovelace/test_cast.py index dc57975701d..632ea731d0c 100644 --- a/tests/components/lovelace/test_cast.py +++ b/tests/components/lovelace/test_cast.py @@ -1,15 +1,15 @@ """Test the Lovelace Cast platform.""" -from collections.abc import AsyncGenerator, Generator from time import time from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.lovelace import cast as lovelace_cast from homeassistant.components.media_player import MediaClass +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component @@ -30,7 +30,7 @@ def mock_onboarding_done() -> Generator[MagicMock]: @pytest.fixture -async def mock_https_url(hass: HomeAssistant) -> None: +async def mock_https_url(hass): """Mock valid URL.""" await async_process_ha_core_config( hass, @@ -39,7 +39,7 @@ async def mock_https_url(hass: HomeAssistant) -> None: @pytest.fixture -async def mock_yaml_dashboard(hass: HomeAssistant) -> AsyncGenerator[None]: +async def mock_yaml_dashboard(hass): """Mock the content of a YAML dashboard.""" # Set up a YAML dashboard with 2 views. assert await async_setup_component( @@ -116,8 +116,9 @@ async def test_browse_media_error(hass: HomeAssistant) -> None: ) -@pytest.mark.usefixtures("mock_yaml_dashboard", "mock_https_url") -async def test_browse_media(hass: HomeAssistant) -> None: +async def test_browse_media( + hass: HomeAssistant, mock_yaml_dashboard, mock_https_url +) -> None: """Test browse media.""" top_level_items = await lovelace_cast.async_browse_media( hass, "lovelace", "", lovelace_cast.CAST_TYPE_CHROMECAST @@ -180,8 +181,7 @@ async def test_browse_media(hass: HomeAssistant) -> None: ) -@pytest.mark.usefixtures("mock_yaml_dashboard") -async def test_play_media(hass: HomeAssistant) -> None: +async def test_play_media(hass: HomeAssistant, mock_yaml_dashboard) -> None: """Test playing media.""" calls = async_mock_service(hass, "cast", "show_lovelace_view") diff --git a/tests/components/lovelace/test_dashboard.py b/tests/components/lovelace/test_dashboard.py index 3a01e20c1fb..7577c4dcc0d 100644 --- a/tests/components/lovelace/test_dashboard.py +++ b/tests/components/lovelace/test_dashboard.py @@ -1,11 +1,11 @@ """Test the Lovelace initialization.""" -from collections.abc import Generator import time from typing import Any from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components import frontend from homeassistant.components.lovelace import const, dashboard diff --git a/tests/components/lovelace/test_init.py b/tests/components/lovelace/test_init.py index 14d93d8302f..dc111ab601e 100644 --- a/tests/components/lovelace/test_init.py +++ b/tests/components/lovelace/test_init.py @@ -1,10 +1,10 @@ """Test the Lovelace initialization.""" -from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/lovelace/test_system_health.py b/tests/components/lovelace/test_system_health.py index 251153fe419..d53ebf2871f 100644 --- a/tests/components/lovelace/test_system_health.py +++ b/tests/components/lovelace/test_system_health.py @@ -1,10 +1,10 @@ """Tests for Lovelace system health.""" -from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.lovelace import dashboard from homeassistant.core import HomeAssistant @@ -72,6 +72,6 @@ async def test_system_health_info_yaml_not_found(hass: HomeAssistant) -> None: assert info == { "dashboards": 1, "mode": "yaml", - "error": f"{hass.config.path('ui-lovelace.yaml')} not found", + "error": "{} not found".format(hass.config.path("ui-lovelace.yaml")), "resources": 0, } diff --git a/tests/components/luftdaten/conftest.py b/tests/components/luftdaten/conftest.py index c3daa390e49..e1aac7caeb0 100644 --- a/tests/components/luftdaten/conftest.py +++ b/tests/components/luftdaten/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.luftdaten.const import CONF_SENSOR_ID, DOMAIN from homeassistant.const import CONF_SHOW_ON_MAP diff --git a/tests/components/lupusec/test_config_flow.py b/tests/components/lupusec/test_config_flow.py index f354eaf0644..e106bbd5001 100644 --- a/tests/components/lupusec/test_config_flow.py +++ b/tests/components/lupusec/test_config_flow.py @@ -153,3 +153,88 @@ async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> No assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("mock_import_step", "mock_title"), + [ + (MOCK_IMPORT_STEP, MOCK_IMPORT_STEP[CONF_IP_ADDRESS]), + (MOCK_IMPORT_STEP_NAME, MOCK_IMPORT_STEP_NAME[CONF_NAME]), + ], +) +async def test_flow_source_import( + hass: HomeAssistant, mock_import_step, mock_title +) -> None: + """Test configuration import from YAML.""" + with ( + patch( + "homeassistant.components.lupusec.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + patch( + "homeassistant.components.lupusec.config_flow.lupupy.Lupusec", + ) as mock_initialize_lupusec, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data=mock_import_step, + ) + + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == mock_title + assert result["data"] == MOCK_DATA_STEP + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_initialize_lupusec.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (LupusecException("Test lupusec exception"), "cannot_connect"), + (JSONDecodeError("Test JSONDecodeError", "test", 1), "cannot_connect"), + (Exception("Test unknown exception"), "unknown"), + ], +) +async def test_flow_source_import_error_and_recover( + hass: HomeAssistant, raise_error, text_error +) -> None: + """Test exceptions and recovery.""" + + with patch( + "homeassistant.components.lupusec.config_flow.lupupy.Lupusec", + side_effect=raise_error, + ) as mock_initialize_lupusec: + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data=MOCK_IMPORT_STEP, + ) + + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == text_error + assert len(mock_initialize_lupusec.mock_calls) == 1 + + +async def test_flow_source_import_already_configured(hass: HomeAssistant) -> None: + """Test duplicate config entry..""" + + entry = MockConfigEntry( + domain=DOMAIN, + title=MOCK_DATA_STEP[CONF_HOST], + data=MOCK_DATA_STEP, + ) + + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data=MOCK_IMPORT_STEP, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/lutron/conftest.py b/tests/components/lutron/conftest.py index f2106f736dc..90f96f1783d 100644 --- a/tests/components/lutron/conftest.py +++ b/tests/components/lutron/conftest.py @@ -1,9 +1,9 @@ """Provide common Lutron fixtures and mocks.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/lutron_caseta/__init__.py b/tests/components/lutron_caseta/__init__.py index b27d30ac31f..9b25e2a0164 100644 --- a/tests/components/lutron_caseta/__init__.py +++ b/tests/components/lutron_caseta/__init__.py @@ -101,7 +101,7 @@ async def async_setup_integration(hass: HomeAssistant, mock_bridge) -> MockConfi class MockBridge: """Mock Lutron bridge that emulates configured connected status.""" - def __init__(self, can_connect=True) -> None: + def __init__(self, can_connect=True): """Initialize MockBridge instance with configured mock connectivity.""" self.can_connect = can_connect self.is_currently_connected = False diff --git a/tests/components/lutron_caseta/test_device_trigger.py b/tests/components/lutron_caseta/test_device_trigger.py index 1ab45bf7582..208dd36cccd 100644 --- a/tests/components/lutron_caseta/test_device_trigger.py +++ b/tests/components/lutron_caseta/test_device_trigger.py @@ -39,7 +39,11 @@ from homeassistant.setup import async_setup_component from . import MockBridge -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) MOCK_BUTTON_DEVICES = [ { @@ -98,7 +102,13 @@ MOCK_BUTTON_DEVICES = [ ] -async def _async_setup_lutron_with_picos(hass: HomeAssistant) -> str: +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + +async def _async_setup_lutron_with_picos(hass): """Setups a lutron bridge with picos.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -125,11 +135,7 @@ async def _async_setup_lutron_with_picos(hass: HomeAssistant) -> str: async def test_get_triggers(hass: HomeAssistant) -> None: """Test we get the expected triggers from a lutron pico.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - # Fetching the config entry runtime_data is a legacy pattern - # and should not be copied for new integrations - data: LutronCasetaData = hass.config_entries.async_get_entry( - config_entry_id - ).runtime_data + data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] keypads = data.keypad_data.keypads device_id = keypads[list(keypads)[0]]["dr_device_id"] @@ -214,9 +220,7 @@ async def test_none_serial_keypad( async def test_if_fires_on_button_event( - hass: HomeAssistant, - service_calls: list[ServiceCall], - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry ) -> None: """Test for press trigger firing.""" await _async_setup_lutron_with_picos(hass) @@ -262,14 +266,12 @@ async def test_if_fires_on_button_event( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_button_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_button_press" async def test_if_fires_on_button_event_without_lip( - hass: HomeAssistant, - service_calls: list[ServiceCall], - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry ) -> None: """Test for press trigger firing on a device that does not support lip.""" await _async_setup_lutron_with_picos(hass) @@ -313,12 +315,12 @@ async def test_if_fires_on_button_event_without_lip( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_button_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_button_press" async def test_validate_trigger_config_no_device( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for no press with no device.""" @@ -354,20 +356,16 @@ async def test_validate_trigger_config_no_device( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_validate_trigger_config_unknown_device( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for no press with an unknown device.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - # Fetching the config entry runtime_data is a legacy pattern - # and should not be copied for new integrations - data: LutronCasetaData = hass.config_entries.async_get_entry( - config_entry_id - ).runtime_data + data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] keypads = data.keypad_data.keypads lutron_device_id = list(keypads)[0] keypad = keypads[lutron_device_id] @@ -406,7 +404,7 @@ async def test_validate_trigger_config_unknown_device( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_validate_trigger_invalid_triggers( @@ -414,11 +412,7 @@ async def test_validate_trigger_invalid_triggers( ) -> None: """Test for click_event with invalid triggers.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - # Fetching the config entry runtime_data is a legacy pattern - # and should not be copied for new integrations - data: LutronCasetaData = hass.config_entries.async_get_entry( - config_entry_id - ).runtime_data + data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] keypads = data.keypad_data.keypads lutron_device_id = list(keypads)[0] keypad = keypads[lutron_device_id] @@ -450,9 +444,7 @@ async def test_validate_trigger_invalid_triggers( async def test_if_fires_on_button_event_late_setup( - hass: HomeAssistant, - service_calls: list[ServiceCall], - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry ) -> None: """Test for press trigger firing with integration getting setup late.""" config_entry_id = await _async_setup_lutron_with_picos(hass) @@ -487,9 +479,8 @@ async def test_if_fires_on_button_event_late_setup( }, ) - with patch("homeassistant.components.lutron_caseta.Smartbridge.create_tls"): - await hass.config_entries.async_setup(config_entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_setup(config_entry_id) + await hass.async_block_till_done() message = { ATTR_SERIAL: device.get("serial"), @@ -504,5 +495,5 @@ async def test_if_fires_on_button_event_late_setup( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_button_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_button_press" diff --git a/tests/components/lutron_caseta/test_logbook.py b/tests/components/lutron_caseta/test_logbook.py index 9a58838d65c..b6e8840c85c 100644 --- a/tests/components/lutron_caseta/test_logbook.py +++ b/tests/components/lutron_caseta/test_logbook.py @@ -53,11 +53,7 @@ async def test_humanify_lutron_caseta_button_event(hass: HomeAssistant) -> None: await hass.async_block_till_done() - # Fetching the config entry runtime_data is a legacy pattern - # and should not be copied for new integrations - data: LutronCasetaData = hass.config_entries.async_get_entry( - config_entry.entry_id - ).runtime_data + data: LutronCasetaData = hass.data[DOMAIN][config_entry.entry_id] keypads = data.keypad_data.keypads keypad = keypads["9"] dr_device_id = keypad["dr_device_id"] diff --git a/tests/components/lyric/test_config_flow.py b/tests/components/lyric/test_config_flow.py index e1916924e9f..e1a8d1131dc 100644 --- a/tests/components/lyric/test_config_flow.py +++ b/tests/components/lyric/test_config_flow.py @@ -26,7 +26,7 @@ CLIENT_SECRET = "5678" @pytest.fixture -async def mock_impl(hass: HomeAssistant) -> None: +async def mock_impl(hass): """Mock implementation.""" await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() @@ -45,11 +45,12 @@ async def test_abort_if_no_configuration(hass: HomeAssistant) -> None: assert result["reason"] == "missing_credentials" -@pytest.mark.usefixtures("current_request_with_host", "mock_impl") +@pytest.mark.usefixtures("current_request_with_host") async def test_full_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, + mock_impl, ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( @@ -111,11 +112,12 @@ async def test_full_flow( assert len(mock_setup.mock_calls) == 1 -@pytest.mark.usefixtures("current_request_with_host", "mock_impl") +@pytest.mark.usefixtures("current_request_with_host") async def test_reauthentication_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, + mock_impl, ) -> None: """Test reauthentication flow.""" old_entry = MockConfigEntry( @@ -126,7 +128,9 @@ async def test_reauthentication_flow( ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=old_entry.data + ) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/madvr/__init__.py b/tests/components/madvr/__init__.py deleted file mode 100644 index 343dd68a25d..00000000000 --- a/tests/components/madvr/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the madvr-envy integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/madvr/conftest.py b/tests/components/madvr/conftest.py deleted file mode 100644 index 3136e04b06b..00000000000 --- a/tests/components/madvr/conftest.py +++ /dev/null @@ -1,88 +0,0 @@ -"""MadVR conftest for shared testing setup.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch - -import pytest - -from homeassistant.components.madvr.const import DEFAULT_NAME, DOMAIN -from homeassistant.const import CONF_HOST, CONF_PORT - -from .const import MOCK_CONFIG, MOCK_MAC - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.madvr.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_madvr_client() -> Generator[AsyncMock]: - """Mock a MadVR client.""" - with ( - patch( - "homeassistant.components.madvr.config_flow.Madvr", autospec=True - ) as mock_client, - patch("homeassistant.components.madvr.Madvr", new=mock_client), - ): - client = mock_client.return_value - client.host = MOCK_CONFIG[CONF_HOST] - client.port = MOCK_CONFIG[CONF_PORT] - client.mac_address = MOCK_MAC - client.connected.return_value = True - client.is_device_connectable.return_value = True - client.loop = AsyncMock() - client.tasks = AsyncMock() - client.set_update_callback = MagicMock() - - # mock the property to be off on startup (which it is) - is_on_mock = PropertyMock(return_value=True) - type(client).is_on = is_on_mock - - yield client - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data=MOCK_CONFIG, - unique_id=MOCK_MAC, - title=DEFAULT_NAME, - entry_id="3bd2acb0e4f0476d40865546d0d91132", - ) - - -def get_update_callback(mock_client: MagicMock): - """Retrieve the update callback function from the mocked client. - - This function extracts the callback that was passed to set_update_callback - on the mocked MadVR client. This callback is typically the handle_push_data - method of the MadVRCoordinator. - - Args: - mock_client (MagicMock): The mocked MadVR client. - - Returns: - function: The update callback function. - - """ - # Get all the calls made to set_update_callback - calls = mock_client.set_update_callback.call_args_list - - if not calls: - raise ValueError("set_update_callback was not called on the mock client") - - # Get the first (and usually only) call - first_call = calls[0] - - # Get the first argument of this call, which should be the callback function - return first_call.args[0] diff --git a/tests/components/madvr/const.py b/tests/components/madvr/const.py deleted file mode 100644 index e1c5435fcbb..00000000000 --- a/tests/components/madvr/const.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Constants for the MadVR tests.""" - -from homeassistant.const import CONF_HOST, CONF_PORT - -MOCK_CONFIG = { - CONF_HOST: "192.168.1.1", - CONF_PORT: 44077, -} - -MOCK_MAC = "00:11:22:33:44:55" -MOCK_MAC_NEW = "00:00:00:00:00:01" - -TEST_CON_ERROR = ConnectionError("Connection failed") -TEST_IMP_ERROR = NotImplementedError("Not implemented") - -TEST_FAILED_ON = "Failed to turn on device" -TEST_FAILED_OFF = "Failed to turn off device" -TEST_FAILED_CMD = "Failed to send command" -TEST_COMMAND = "test" diff --git a/tests/components/madvr/snapshots/test_binary_sensors.ambr b/tests/components/madvr/snapshots/test_binary_sensors.ambr deleted file mode 100644 index 7fd54a7c240..00000000000 --- a/tests/components/madvr/snapshots/test_binary_sensors.ambr +++ /dev/null @@ -1,185 +0,0 @@ -# serializer version: 1 -# name: test_binary_sensor_setup[binary_sensor.madvr_envy_hdr_flag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.madvr_envy_hdr_flag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'HDR flag', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'hdr_flag', - 'unique_id': '00:11:22:33:44:55_hdr_flag', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor_setup[binary_sensor.madvr_envy_hdr_flag-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy HDR flag', - }), - 'context': , - 'entity_id': 'binary_sensor.madvr_envy_hdr_flag', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_setup[binary_sensor.madvr_envy_outgoing_hdr_flag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.madvr_envy_outgoing_hdr_flag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Outgoing HDR flag', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outgoing_hdr_flag', - 'unique_id': '00:11:22:33:44:55_outgoing_hdr_flag', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor_setup[binary_sensor.madvr_envy_outgoing_hdr_flag-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Outgoing HDR flag', - }), - 'context': , - 'entity_id': 'binary_sensor.madvr_envy_outgoing_hdr_flag', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_setup[binary_sensor.madvr_envy_power_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.madvr_envy_power_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power state', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_state', - 'unique_id': '00:11:22:33:44:55_power_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor_setup[binary_sensor.madvr_envy_power_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Power state', - }), - 'context': , - 'entity_id': 'binary_sensor.madvr_envy_power_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_setup[binary_sensor.madvr_envy_signal_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.madvr_envy_signal_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Signal state', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'signal_state', - 'unique_id': '00:11:22:33:44:55_signal_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor_setup[binary_sensor.madvr_envy_signal_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Signal state', - }), - 'context': , - 'entity_id': 'binary_sensor.madvr_envy_signal_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/madvr/snapshots/test_diagnostics.ambr b/tests/components/madvr/snapshots/test_diagnostics.ambr deleted file mode 100644 index 3a281391860..00000000000 --- a/tests/components/madvr/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,28 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics[positive_payload0] - dict({ - 'config_entry': dict({ - 'data': dict({ - 'host': '**REDACTED**', - 'port': 44077, - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'madvr', - 'entry_id': '3bd2acb0e4f0476d40865546d0d91132', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'envy', - 'unique_id': '00:11:22:33:44:55', - 'version': 1, - }), - 'madvr_data': dict({ - 'is_on': True, - }), - }) -# --- diff --git a/tests/components/madvr/snapshots/test_remote.ambr b/tests/components/madvr/snapshots/test_remote.ambr deleted file mode 100644 index 1157496a93e..00000000000 --- a/tests/components/madvr/snapshots/test_remote.ambr +++ /dev/null @@ -1,48 +0,0 @@ -# serializer version: 1 -# name: test_remote_setup[remote.madvr_envy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'remote', - 'entity_category': None, - 'entity_id': 'remote.madvr_envy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:11:22:33:44:55', - 'unit_of_measurement': None, - }) -# --- -# name: test_remote_setup[remote.madvr_envy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy', - 'supported_features': , - }), - 'context': , - 'entity_id': 'remote.madvr_envy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/madvr/snapshots/test_sensors.ambr b/tests/components/madvr/snapshots/test_sensors.ambr deleted file mode 100644 index 7b0dd254f77..00000000000 --- a/tests/components/madvr/snapshots/test_sensors.ambr +++ /dev/null @@ -1,1359 +0,0 @@ -# serializer version: 1 -# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_decimal-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_aspect_decimal', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aspect decimal', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'aspect_dec', - 'unique_id': '00:11:22:33:44:55_aspect_dec', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_decimal-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Aspect decimal', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_aspect_decimal', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.78', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_integer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_aspect_integer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aspect integer', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'aspect_int', - 'unique_id': '00:11:22:33:44:55_aspect_int', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_integer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Aspect integer', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_aspect_integer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '178', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_name-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_aspect_name', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aspect name', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'aspect_name', - 'unique_id': '00:11:22:33:44:55_aspect_name', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_name-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Aspect name', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_aspect_name', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Widescreen', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_resolution-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_aspect_resolution', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Aspect resolution', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'aspect_res', - 'unique_id': '00:11:22:33:44:55_aspect_res', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_resolution-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Aspect resolution', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_aspect_resolution', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3840:2160', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_cpu_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_cpu_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CPU temperature', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'temp_cpu', - 'unique_id': '00:11:22:33:44:55_temp_cpu', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_cpu_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'madVR Envy CPU temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_cpu_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.2', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_gpu_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_gpu_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'GPU temperature', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'temp_gpu', - 'unique_id': '00:11:22:33:44:55_temp_gpu', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_gpu_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'madVR Envy GPU temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_gpu_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '45.5', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_hdmi_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_hdmi_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'HDMI temperature', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'temp_hdmi', - 'unique_id': '00:11:22:33:44:55_temp_hdmi', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_hdmi_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'madVR Envy HDMI temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_hdmi_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.0', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_aspect_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - '16:9', - '4:3', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_incoming_aspect_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Incoming aspect ratio', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'incoming_aspect_ratio', - 'unique_id': '00:11:22:33:44:55_incoming_aspect_ratio', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_aspect_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Incoming aspect ratio', - 'options': list([ - '16:9', - '4:3', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_incoming_aspect_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '16:9', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_bit_depth-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - '8bit', - '10bit', - '12bit', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_incoming_bit_depth', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Incoming bit depth', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'incoming_bit_depth', - 'unique_id': '00:11:22:33:44:55_incoming_bit_depth', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_bit_depth-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Incoming bit depth', - 'options': list([ - '8bit', - '10bit', - '12bit', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_incoming_bit_depth', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10bit', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_black_levels-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'TV', - 'PC', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_incoming_black_levels', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Incoming black levels', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'incoming_black_levels', - 'unique_id': '00:11:22:33:44:55_incoming_black_levels', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_black_levels-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Incoming black levels', - 'options': list([ - 'TV', - 'PC', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_incoming_black_levels', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'PC', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_color_space-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'RGB', - '444', - '422', - '420', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_incoming_color_space', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Incoming color space', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'incoming_color_space', - 'unique_id': '00:11:22:33:44:55_incoming_color_space', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_color_space-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Incoming color space', - 'options': list([ - 'RGB', - '444', - '422', - '420', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_incoming_color_space', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'RGB', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_colorimetry-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'SDR', - 'HDR10', - 'HLG 601', - 'PAL', - '709', - 'DCI', - '2020', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_incoming_colorimetry', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Incoming colorimetry', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'incoming_colorimetry', - 'unique_id': '00:11:22:33:44:55_incoming_colorimetry', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_colorimetry-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Incoming colorimetry', - 'options': list([ - 'SDR', - 'HDR10', - 'HLG 601', - 'PAL', - '709', - 'DCI', - '2020', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_incoming_colorimetry', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2020', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_frame_rate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_incoming_frame_rate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Incoming frame rate', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'incoming_frame_rate', - 'unique_id': '00:11:22:33:44:55_incoming_frame_rate', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_frame_rate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Incoming frame rate', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_incoming_frame_rate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60p', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_resolution-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_incoming_resolution', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Incoming resolution', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'incoming_res', - 'unique_id': '00:11:22:33:44:55_incoming_res', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_resolution-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Incoming resolution', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_incoming_resolution', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3840x2160', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_signal_type-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - '2D', - '3D', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_incoming_signal_type', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Incoming signal type', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'incoming_signal_type', - 'unique_id': '00:11:22:33:44:55_incoming_signal_type', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_signal_type-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Incoming signal type', - 'options': list([ - '2D', - '3D', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_incoming_signal_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3D', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_mainboard_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_mainboard_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Mainboard temperature', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'temp_mainboard', - 'unique_id': '00:11:22:33:44:55_temp_mainboard', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_mainboard_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'madVR Envy Mainboard temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_mainboard_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '35.8', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_decimal-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_masking_decimal', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Masking decimal', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'masking_dec', - 'unique_id': '00:11:22:33:44:55_masking_dec', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_decimal-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Masking decimal', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_masking_decimal', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.78', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_integer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_masking_integer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Masking integer', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'masking_int', - 'unique_id': '00:11:22:33:44:55_masking_int', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_integer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Masking integer', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_masking_integer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '178', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_resolution-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_masking_resolution', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Masking resolution', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'masking_res', - 'unique_id': '00:11:22:33:44:55_masking_res', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_resolution-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Masking resolution', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_masking_resolution', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3840:2160', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_bit_depth-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - '8bit', - '10bit', - '12bit', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_outgoing_bit_depth', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outgoing bit depth', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outgoing_bit_depth', - 'unique_id': '00:11:22:33:44:55_outgoing_bit_depth', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_bit_depth-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Outgoing bit depth', - 'options': list([ - '8bit', - '10bit', - '12bit', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_outgoing_bit_depth', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10bit', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_black_levels-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'TV', - 'PC', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_outgoing_black_levels', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outgoing black levels', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outgoing_black_levels', - 'unique_id': '00:11:22:33:44:55_outgoing_black_levels', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_black_levels-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Outgoing black levels', - 'options': list([ - 'TV', - 'PC', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_outgoing_black_levels', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'PC', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_color_space-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'RGB', - '444', - '422', - '420', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_outgoing_color_space', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outgoing color space', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outgoing_color_space', - 'unique_id': '00:11:22:33:44:55_outgoing_color_space', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_color_space-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Outgoing color space', - 'options': list([ - 'RGB', - '444', - '422', - '420', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_outgoing_color_space', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'RGB', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_colorimetry-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'SDR', - 'HDR10', - 'HLG 601', - 'PAL', - '709', - 'DCI', - '2020', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_outgoing_colorimetry', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outgoing colorimetry', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outgoing_colorimetry', - 'unique_id': '00:11:22:33:44:55_outgoing_colorimetry', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_colorimetry-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Outgoing colorimetry', - 'options': list([ - 'SDR', - 'HDR10', - 'HLG 601', - 'PAL', - '709', - 'DCI', - '2020', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_outgoing_colorimetry', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2020', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_frame_rate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_outgoing_frame_rate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Outgoing frame rate', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outgoing_frame_rate', - 'unique_id': '00:11:22:33:44:55_outgoing_frame_rate', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_frame_rate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Outgoing frame rate', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_outgoing_frame_rate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60p', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_resolution-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_outgoing_resolution', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Outgoing resolution', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outgoing_res', - 'unique_id': '00:11:22:33:44:55_outgoing_res', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_resolution-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'madVR Envy Outgoing resolution', - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_outgoing_resolution', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3840x2160', - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_signal_type-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - '2D', - '3D', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.madvr_envy_outgoing_signal_type', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outgoing signal type', - 'platform': 'madvr', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outgoing_signal_type', - 'unique_id': '00:11:22:33:44:55_outgoing_signal_type', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_signal_type-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'madVR Envy Outgoing signal type', - 'options': list([ - '2D', - '3D', - ]), - }), - 'context': , - 'entity_id': 'sensor.madvr_envy_outgoing_signal_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2D', - }) -# --- diff --git a/tests/components/madvr/test_binary_sensors.py b/tests/components/madvr/test_binary_sensors.py deleted file mode 100644 index 469a3225ca0..00000000000 --- a/tests/components/madvr/test_binary_sensors.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Tests for the MadVR binary sensor entities.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import STATE_OFF, STATE_ON, Platform -from homeassistant.core import HomeAssistant -import homeassistant.helpers.entity_registry as er - -from . import setup_integration -from .conftest import get_update_callback - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_binary_sensor_setup( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test setup of the binary sensor entities.""" - with patch("homeassistant.components.madvr.PLATFORMS", [Platform.BINARY_SENSOR]): - await setup_integration(hass, mock_config_entry) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize( - ("entity_id", "positive_payload", "negative_payload"), - [ - ( - "binary_sensor.madvr_envy_power_state", - {"is_on": True}, - {"is_on": False}, - ), - ( - "binary_sensor.madvr_envy_signal_state", - {"is_signal": True}, - {"is_signal": False}, - ), - ( - "binary_sensor.madvr_envy_hdr_flag", - {"hdr_flag": True}, - {"hdr_flag": False}, - ), - ( - "binary_sensor.madvr_envy_outgoing_hdr_flag", - {"outgoing_hdr_flag": True}, - {"outgoing_hdr_flag": False}, - ), - ], -) -async def test_binary_sensors( - hass: HomeAssistant, - mock_madvr_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_id: str, - positive_payload: dict, - negative_payload: dict, -) -> None: - """Test the binary sensors.""" - await setup_integration(hass, mock_config_entry) - update_callback = get_update_callback(mock_madvr_client) - - # Test positive state - update_callback(positive_payload) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_ON - - # Test negative state - update_callback(negative_payload) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_OFF diff --git a/tests/components/madvr/test_config_flow.py b/tests/components/madvr/test_config_flow.py deleted file mode 100644 index 35db8a01b5b..00000000000 --- a/tests/components/madvr/test_config_flow.py +++ /dev/null @@ -1,240 +0,0 @@ -"""Tests for the MadVR config flow.""" - -from collections.abc import AsyncGenerator -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.madvr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .const import MOCK_CONFIG, MOCK_MAC, MOCK_MAC_NEW - -from tests.common import MockConfigEntry - - -@pytest.fixture(autouse=True) -async def avoid_wait() -> AsyncGenerator[None]: - """Mock sleep.""" - with patch("homeassistant.components.madvr.config_flow.RETRY_INTERVAL", 0): - yield - - -async def test_full_flow( - hass: HomeAssistant, - mock_madvr_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test full config flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_HOST: MOCK_CONFIG[CONF_HOST], - CONF_PORT: MOCK_CONFIG[CONF_PORT], - } - assert result["result"].unique_id == MOCK_MAC - mock_madvr_client.open_connection.assert_called_once() - mock_madvr_client.async_add_tasks.assert_called_once() - mock_madvr_client.async_cancel_tasks.assert_called_once() - - -async def test_flow_errors( - hass: HomeAssistant, - mock_madvr_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test error handling in config flow.""" - mock_madvr_client.open_connection.side_effect = TimeoutError - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - mock_madvr_client.open_connection.side_effect = None - mock_madvr_client.connected = False - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - mock_madvr_client.connected = True - mock_madvr_client.mac_address = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "no_mac"} - - # ensure an error is recoverable - mock_madvr_client.mac_address = MOCK_MAC - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DEFAULT_NAME - assert result["data"] == { - CONF_HOST: MOCK_CONFIG[CONF_HOST], - CONF_PORT: MOCK_CONFIG[CONF_PORT], - } - - # Verify method calls - assert mock_madvr_client.open_connection.call_count == 4 - assert mock_madvr_client.async_add_tasks.call_count == 2 - # the first call will not call this due to timeout as expected - assert mock_madvr_client.async_cancel_tasks.call_count == 2 - - -async def test_duplicate( - hass: HomeAssistant, - mock_madvr_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test duplicate config entries.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_reconfigure_flow( - hass: HomeAssistant, - mock_madvr_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reconfigure flow.""" - mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reconfigure_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - assert result["errors"] == {} - - # define new host - new_host = "192.168.1.100" - # make sure setting port works - new_port = 44078 - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: new_host, CONF_PORT: new_port}, - ) - - # should get the abort with success result - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - - # Verify that the config entry was updated - assert mock_config_entry.data[CONF_HOST] == new_host - assert mock_config_entry.data[CONF_PORT] == new_port - - # Verify that the connection was tested - mock_madvr_client.open_connection.assert_called() - mock_madvr_client.async_add_tasks.assert_called() - mock_madvr_client.async_cancel_tasks.assert_called() - - -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.madvr.config.abort.set_up_new_device"], -) -async def test_reconfigure_new_device( - hass: HomeAssistant, - mock_madvr_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reconfigure flow.""" - mock_config_entry.add_to_hass(hass) - # test reconfigure with a new device (should fail) - result = await mock_config_entry.start_reconfigure_flow(hass) - - # define new host - new_host = "192.168.1.100" - # make sure setting port works - new_port = 44078 - - # modify test_connection so it returns new_mac - mock_madvr_client.mac_address = MOCK_MAC_NEW - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: new_host, CONF_PORT: new_port}, - ) - - # unique id should remain unchanged with new device, should fail - assert mock_config_entry.unique_id == MOCK_MAC - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "set_up_new_device" - - -async def test_reconfigure_flow_errors( - hass: HomeAssistant, - mock_madvr_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test error handling in reconfigure flow.""" - mock_config_entry.add_to_hass(hass) - - result = await mock_config_entry.start_reconfigure_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - # Test CannotConnect error - mock_madvr_client.open_connection.side_effect = TimeoutError - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.1.100", CONF_PORT: 44077}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - # Test no_mac error - mock_madvr_client.open_connection.side_effect = None - mock_madvr_client.connected = True - mock_madvr_client.mac_address = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.1.100", CONF_PORT: 44077}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "no_mac"} - - # Ensure errors are recoverable - mock_madvr_client.mac_address = MOCK_MAC - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.1.100", CONF_PORT: 44077}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/madvr/test_diagnostics.py b/tests/components/madvr/test_diagnostics.py deleted file mode 100644 index 453eaba8d94..00000000000 --- a/tests/components/madvr/test_diagnostics.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Test madVR diagnostics.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from . import setup_integration -from .conftest import get_update_callback - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -@pytest.mark.parametrize( - ("positive_payload"), - [ - {"is_on": True}, - ], -) -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_config_entry: MockConfigEntry, - mock_madvr_client: AsyncMock, - snapshot: SnapshotAssertion, - positive_payload: dict, -) -> None: - """Test config entry diagnostics.""" - with patch("homeassistant.components.madvr.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - update_callback = get_update_callback(mock_madvr_client) - - # Add data to test storing diagnostic data - update_callback(positive_payload) - await hass.async_block_till_done() - - result = await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) - - assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/madvr/test_init.py b/tests/components/madvr/test_init.py deleted file mode 100644 index dace812af11..00000000000 --- a/tests/components/madvr/test_init.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Tests for the MadVR integration.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_load_unload_entry( - hass: HomeAssistant, - mock_madvr_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test load and unload entry.""" - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_remove(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/madvr/test_remote.py b/tests/components/madvr/test_remote.py deleted file mode 100644 index 6fc507534d6..00000000000 --- a/tests/components/madvr/test_remote.py +++ /dev/null @@ -1,155 +0,0 @@ -"""Tests for the MadVR remote entity.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.remote import ( - DOMAIN as REMOTE_DOMAIN, - SERVICE_SEND_COMMAND, -) -from homeassistant.const import ( - ATTR_COMMAND, - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_ON, - Platform, -) -from homeassistant.core import HomeAssistant -import homeassistant.helpers.entity_registry as er - -from . import setup_integration -from .const import ( - TEST_COMMAND, - TEST_CON_ERROR, - TEST_FAILED_CMD, - TEST_FAILED_OFF, - TEST_FAILED_ON, - TEST_IMP_ERROR, -) - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_remote_setup( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test setup of the remote entity.""" - with patch("homeassistant.components.madvr.PLATFORMS", [Platform.REMOTE]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_remote_power( - hass: HomeAssistant, - mock_madvr_client: AsyncMock, - mock_config_entry: MockConfigEntry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test turning on the remote entity.""" - - await setup_integration(hass, mock_config_entry) - - entity_id = "remote.madvr_envy" - remote = hass.states.get(entity_id) - assert remote.state == STATE_ON - - await hass.services.async_call( - REMOTE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - - mock_madvr_client.power_off.assert_called_once() - - await hass.services.async_call( - REMOTE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - await hass.async_block_till_done() - - mock_madvr_client.power_on.assert_called_once() - - # cover exception cases - caplog.clear() - mock_madvr_client.power_off.side_effect = TEST_CON_ERROR - await hass.services.async_call( - REMOTE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - assert TEST_FAILED_OFF in caplog.text - - # Test turning off with NotImplementedError - caplog.clear() - mock_madvr_client.power_off.side_effect = TEST_IMP_ERROR - await hass.services.async_call( - REMOTE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - assert TEST_FAILED_OFF in caplog.text - - # Reset side_effect for power_off - mock_madvr_client.power_off.side_effect = None - - # Test turning on with ConnectionError - caplog.clear() - mock_madvr_client.power_on.side_effect = TEST_CON_ERROR - await hass.services.async_call( - REMOTE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - assert TEST_FAILED_ON in caplog.text - - # Test turning on with NotImplementedError - caplog.clear() - mock_madvr_client.power_on.side_effect = TEST_IMP_ERROR - await hass.services.async_call( - REMOTE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - assert TEST_FAILED_ON in caplog.text - - -async def test_send_command( - hass: HomeAssistant, - mock_madvr_client: AsyncMock, - mock_config_entry: MockConfigEntry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test sending command to the remote entity.""" - - await setup_integration(hass, mock_config_entry) - - entity_id = "remote.madvr_envy" - remote = hass.states.get(entity_id) - assert remote.state == STATE_ON - - await hass.services.async_call( - REMOTE_DOMAIN, - SERVICE_SEND_COMMAND, - {ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: TEST_COMMAND}, - blocking=True, - ) - - mock_madvr_client.add_command_to_queue.assert_called_once_with([TEST_COMMAND]) - # cover exceptions - # Test ConnectionError - mock_madvr_client.add_command_to_queue.side_effect = TEST_CON_ERROR - await hass.services.async_call( - REMOTE_DOMAIN, - SERVICE_SEND_COMMAND, - {ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: TEST_COMMAND}, - blocking=True, - ) - assert TEST_FAILED_CMD in caplog.text - - # Test NotImplementedError - mock_madvr_client.add_command_to_queue.side_effect = TEST_IMP_ERROR - await hass.services.async_call( - REMOTE_DOMAIN, - SERVICE_SEND_COMMAND, - {ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: TEST_COMMAND}, - blocking=True, - ) - assert TEST_FAILED_CMD in caplog.text diff --git a/tests/components/madvr/test_sensors.py b/tests/components/madvr/test_sensors.py deleted file mode 100644 index ddc01fc737a..00000000000 --- a/tests/components/madvr/test_sensors.py +++ /dev/null @@ -1,108 +0,0 @@ -"""Tests for the MadVR sensor entities.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.madvr.sensor import get_temperature -from homeassistant.const import STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -import homeassistant.helpers.entity_registry as er - -from . import setup_integration -from .conftest import get_update_callback - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor_setup_and_states( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_madvr_client: AsyncMock, -) -> None: - """Test setup of the sensor entities and their states.""" - with patch("homeassistant.components.madvr.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - update_callback = get_update_callback(mock_madvr_client) - - # Create a big data update with all sensor values - update_data = { - "temp_gpu": 45.5, - "temp_hdmi": 40.0, - "temp_cpu": 50.2, - "temp_mainboard": 35.8, - "incoming_res": "3840x2160", - "incoming_frame_rate": "60p", - "outgoing_signal_type": "2D", - "incoming_signal_type": "3D", - "incoming_color_space": "RGB", - "incoming_bit_depth": "10bit", - "incoming_colorimetry": "2020", - "incoming_black_levels": "PC", - "incoming_aspect_ratio": "16:9", - "outgoing_res": "3840x2160", - "outgoing_frame_rate": "60p", - "outgoing_color_space": "RGB", - "outgoing_bit_depth": "10bit", - "outgoing_colorimetry": "2020", - "outgoing_black_levels": "PC", - "aspect_res": "3840:2160", - "aspect_dec": "1.78", - "aspect_int": "178", - "aspect_name": "Widescreen", - "masking_res": "3840:2160", - "masking_dec": "1.78", - "masking_int": "178", - } - - # Update all sensors at once - update_callback(update_data) - await hass.async_block_till_done() - - # Snapshot all entity states - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - # Test invalid temperature value - update_callback({"temp_gpu": -1}) - await hass.async_block_till_done() - assert hass.states.get("sensor.madvr_envy_gpu_temperature").state == STATE_UNKNOWN - - # Test sensor unknown - update_callback({"incoming_res": None}) - await hass.async_block_till_done() - assert ( - hass.states.get("sensor.madvr_envy_incoming_resolution").state == STATE_UNKNOWN - ) - - # Test sensor becomes known again - update_callback({"incoming_res": "1920x1080"}) - await hass.async_block_till_done() - assert hass.states.get("sensor.madvr_envy_incoming_resolution").state == "1920x1080" - - # Test temperature sensor - update_callback({"temp_gpu": 41.2}) - await hass.async_block_till_done() - assert hass.states.get("sensor.madvr_envy_gpu_temperature").state == "41.2" - - # test get_temperature ValueError - assert get_temperature(None, "temp_key") is None - - # test startup placeholder values - update_callback({"outgoing_bit_depth": "0bit"}) - await hass.async_block_till_done() - assert ( - hass.states.get("sensor.madvr_envy_outgoing_bit_depth").state == STATE_UNKNOWN - ) - - update_callback({"outgoing_color_space": "?"}) - await hass.async_block_till_done() - assert ( - hass.states.get("sensor.madvr_envy_outgoing_color_space").state == STATE_UNKNOWN - ) diff --git a/tests/components/mailbox/__init__.py b/tests/components/mailbox/__init__.py new file mode 100644 index 00000000000..5e212354579 --- /dev/null +++ b/tests/components/mailbox/__init__.py @@ -0,0 +1 @@ +"""The tests for mailbox platforms.""" diff --git a/tests/components/mailbox/test_init.py b/tests/components/mailbox/test_init.py new file mode 100644 index 00000000000..31e831c3bae --- /dev/null +++ b/tests/components/mailbox/test_init.py @@ -0,0 +1,225 @@ +"""The tests for the mailbox component.""" + +from datetime import datetime +from hashlib import sha1 +from http import HTTPStatus +from typing import Any + +from aiohttp.test_utils import TestClient +import pytest + +from homeassistant.bootstrap import async_setup_component +from homeassistant.components import mailbox +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util import dt as dt_util + +from tests.common import MockModule, mock_integration, mock_platform +from tests.typing import ClientSessionGenerator + +MAILBOX_NAME = "TestMailbox" +MEDIA_DATA = b"3f67c4ea33b37d1710f" +MESSAGE_TEXT = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " + + +def _create_message(idx: int) -> dict[str, Any]: + """Create a sample message.""" + msgtime = dt_util.as_timestamp(datetime(2010, 12, idx + 1, 13, 17, 00)) + msgtxt = f"Message {idx + 1}. {MESSAGE_TEXT}" + msgsha = sha1(msgtxt.encode("utf-8")).hexdigest() + return { + "info": { + "origtime": int(msgtime), + "callerid": "John Doe <212-555-1212>", + "duration": "10", + }, + "text": msgtxt, + "sha": msgsha, + } + + +class TestMailbox(mailbox.Mailbox): + """Test Mailbox, with 10 sample messages.""" + + # This class doesn't contain any tests! Skip pytest test collection. + __test__ = False + + def __init__(self, hass: HomeAssistant, name: str) -> None: + """Initialize Test mailbox.""" + super().__init__(hass, name) + self._messages: dict[str, dict[str, Any]] = {} + for idx in range(10): + msg = _create_message(idx) + msgsha = msg["sha"] + self._messages[msgsha] = msg + + @property + def media_type(self) -> str: + """Return the supported media type.""" + return mailbox.CONTENT_TYPE_MPEG + + @property + def can_delete(self) -> bool: + """Return if messages can be deleted.""" + return True + + @property + def has_media(self) -> bool: + """Return if messages have attached media files.""" + return True + + async def async_get_media(self, msgid: str) -> bytes: + """Return the media blob for the msgid.""" + if msgid not in self._messages: + raise mailbox.StreamError("Message not found") + + return MEDIA_DATA + + async def async_get_messages(self) -> list[dict[str, Any]]: + """Return a list of the current messages.""" + return sorted( + self._messages.values(), + key=lambda item: item["info"]["origtime"], # type: ignore[no-any-return] + reverse=True, + ) + + async def async_delete(self, msgid: str) -> bool: + """Delete the specified messages.""" + if msgid in self._messages: + del self._messages[msgid] + self.async_update() + return True + + +class MockMailbox: + """A mock mailbox platform.""" + + async def async_get_handler( + self, + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> mailbox.Mailbox: + """Set up the Test mailbox.""" + return TestMailbox(hass, MAILBOX_NAME) + + +@pytest.fixture +def mock_mailbox(hass: HomeAssistant) -> None: + """Mock mailbox.""" + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.mailbox", MockMailbox()) + + +@pytest.fixture +async def mock_http_client( + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_mailbox: None +) -> TestClient: + """Start the Home Assistant HTTP component.""" + assert await async_setup_component( + hass, mailbox.DOMAIN, {mailbox.DOMAIN: {"platform": "test"}} + ) + return await hass_client() + + +async def test_get_platforms_from_mailbox(mock_http_client: TestClient) -> None: + """Get platforms from mailbox.""" + url = "/api/mailbox/platforms" + + req = await mock_http_client.get(url) + assert req.status == HTTPStatus.OK + result = await req.json() + assert len(result) == 1 + assert result[0].get("name") == "TestMailbox" + + +async def test_get_messages_from_mailbox(mock_http_client: TestClient) -> None: + """Get messages from mailbox.""" + url = "/api/mailbox/messages/TestMailbox" + + req = await mock_http_client.get(url) + assert req.status == HTTPStatus.OK + result = await req.json() + assert len(result) == 10 + + +async def test_get_media_from_mailbox(mock_http_client: TestClient) -> None: + """Get audio from mailbox.""" + mp3sha = "7cad61312c7b66f619295be2da8c7ac73b4968f1" + msgtxt = "Message 1. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " + msgsha = sha1(msgtxt.encode("utf-8")).hexdigest() + + url = f"/api/mailbox/media/TestMailbox/{msgsha}" + req = await mock_http_client.get(url) + assert req.status == HTTPStatus.OK + data = await req.read() + assert sha1(data).hexdigest() == mp3sha + + +async def test_delete_from_mailbox(mock_http_client: TestClient) -> None: + """Get audio from mailbox.""" + msgtxt1 = "Message 1. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " + msgtxt2 = "Message 3. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " + msgsha1 = sha1(msgtxt1.encode("utf-8")).hexdigest() + msgsha2 = sha1(msgtxt2.encode("utf-8")).hexdigest() + + for msg in (msgsha1, msgsha2): + url = f"/api/mailbox/delete/TestMailbox/{msg}" + req = await mock_http_client.delete(url) + assert req.status == HTTPStatus.OK + + url = "/api/mailbox/messages/TestMailbox" + req = await mock_http_client.get(url) + assert req.status == HTTPStatus.OK + result = await req.json() + assert len(result) == 8 + + +async def test_get_messages_from_invalid_mailbox(mock_http_client: TestClient) -> None: + """Get messages from mailbox.""" + url = "/api/mailbox/messages/mailbox.invalid_mailbox" + + req = await mock_http_client.get(url) + assert req.status == HTTPStatus.NOT_FOUND + + +async def test_get_media_from_invalid_mailbox(mock_http_client: TestClient) -> None: + """Get messages from mailbox.""" + msgsha = "0000000000000000000000000000000000000000" + url = f"/api/mailbox/media/mailbox.invalid_mailbox/{msgsha}" + + req = await mock_http_client.get(url) + assert req.status == HTTPStatus.NOT_FOUND + + +async def test_get_media_from_invalid_msgid(mock_http_client: TestClient) -> None: + """Get messages from mailbox.""" + msgsha = "0000000000000000000000000000000000000000" + url = f"/api/mailbox/media/TestMailbox/{msgsha}" + + req = await mock_http_client.get(url) + assert req.status == HTTPStatus.INTERNAL_SERVER_ERROR + + +async def test_delete_from_invalid_mailbox(mock_http_client: TestClient) -> None: + """Get audio from mailbox.""" + msgsha = "0000000000000000000000000000000000000000" + url = f"/api/mailbox/delete/mailbox.invalid_mailbox/{msgsha}" + + req = await mock_http_client.delete(url) + assert req.status == HTTPStatus.NOT_FOUND + + +async def test_repair_issue_is_created( + hass: HomeAssistant, issue_registry: ir.IssueRegistry, mock_mailbox: None +) -> None: + """Test repair issue is created.""" + assert await async_setup_component( + hass, mailbox.DOMAIN, {mailbox.DOMAIN: {"platform": "test"}} + ) + await hass.async_block_till_done() + assert ( + mailbox.DOMAIN, + "deprecated_mailbox_test", + ) in issue_registry.issues diff --git a/tests/components/mailgun/test_init.py b/tests/components/mailgun/test_init.py index 7dbde02b10f..908e98ae31e 100644 --- a/tests/components/mailgun/test_init.py +++ b/tests/components/mailgun/test_init.py @@ -8,9 +8,9 @@ import pytest from homeassistant import config_entries from homeassistant.components import mailgun, webhook +from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_API_KEY, CONF_DOMAIN -from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.core_config import async_process_ha_core_config +from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -29,7 +29,7 @@ async def http_client( @pytest.fixture -async def webhook_id_with_api_key(hass: HomeAssistant) -> str: +async def webhook_id_with_api_key(hass): """Initialize the Mailgun component and get the webhook_id.""" await async_setup_component( hass, @@ -53,7 +53,7 @@ async def webhook_id_with_api_key(hass: HomeAssistant) -> str: @pytest.fixture -async def webhook_id_without_api_key(hass: HomeAssistant) -> str: +async def webhook_id_without_api_key(hass): """Initialize the Mailgun component and get the webhook_id w/o API key.""" await async_setup_component(hass, mailgun.DOMAIN, {}) @@ -73,7 +73,7 @@ async def webhook_id_without_api_key(hass: HomeAssistant) -> str: @pytest.fixture -async def mailgun_events(hass: HomeAssistant) -> list[Event]: +async def mailgun_events(hass): """Return a list of mailgun_events triggered.""" events = [] diff --git a/tests/components/manual/test_alarm_control_panel.py b/tests/components/manual/test_alarm_control_panel.py index 9fc92cd5458..6c9ba9ee9a0 100644 --- a/tests/components/manual/test_alarm_control_panel.py +++ b/tests/components/manual/test_alarm_control_panel.py @@ -7,15 +7,8 @@ from freezegun import freeze_time import pytest from homeassistant.components import alarm_control_panel -from homeassistant.components.alarm_control_panel import ( - AlarmControlPanelEntityFeature, - AlarmControlPanelState, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature from homeassistant.components.demo import alarm_control_panel as demo -from homeassistant.components.manual.alarm_control_panel import ( - ATTR_NEXT_STATE, - ATTR_PREVIOUS_STATE, -) from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, @@ -24,9 +17,18 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_ARM_VACATION, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_PENDING, + STATE_ALARM_TRIGGERED, ) from homeassistant.core import CoreState, HomeAssistant, State -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -47,14 +49,11 @@ async def test_setup_demo_platform(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_no_pending(hass: HomeAssistant, service, expected_state) -> None: @@ -76,7 +75,7 @@ async def test_no_pending(hass: HomeAssistant, service, expected_state) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -91,14 +90,11 @@ async def test_no_pending(hass: HomeAssistant, service, expected_state) -> None: @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_no_pending_when_code_not_req( @@ -123,7 +119,7 @@ async def test_no_pending_when_code_not_req( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -138,14 +134,11 @@ async def test_no_pending_when_code_not_req( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_with_pending(hass: HomeAssistant, service, expected_state) -> None: @@ -167,7 +160,7 @@ async def test_with_pending(hass: HomeAssistant, service, expected_state) -> Non entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -176,7 +169,7 @@ async def test_with_pending(hass: HomeAssistant, service, expected_state) -> Non blocking=True, ) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING + assert hass.states.get(entity_id).state == STATE_ALARM_ARMING state = hass.states.get(entity_id) assert state.attributes["next_state"] == expected_state @@ -206,14 +199,11 @@ async def test_with_pending(hass: HomeAssistant, service, expected_state) -> Non @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) -> None: @@ -235,9 +225,9 @@ async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) - entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED - with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): + with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): await hass.services.async_call( alarm_control_panel.DOMAIN, service, @@ -248,20 +238,17 @@ async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) - blocking=True, ) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_with_template_code(hass: HomeAssistant, service, expected_state) -> None: @@ -283,7 +270,7 @@ async def test_with_template_code(hass: HomeAssistant, service, expected_state) entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -299,14 +286,11 @@ async def test_with_template_code(hass: HomeAssistant, service, expected_state) @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_with_specific_pending( @@ -336,7 +320,7 @@ async def test_with_specific_pending( blocking=True, ) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING + assert hass.states.get(entity_id).state == STATE_ALARM_ARMING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -367,11 +351,11 @@ async def test_trigger_no_pending(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=60) with patch( @@ -382,8 +366,8 @@ async def test_trigger_no_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_delay(hass: HomeAssistant) -> None: @@ -406,17 +390,17 @@ async def test_trigger_with_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -427,8 +411,8 @@ async def test_trigger_with_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_zero_trigger_time(hass: HomeAssistant) -> None: @@ -450,11 +434,11 @@ async def test_trigger_zero_trigger_time(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_zero_trigger_time_with_pending(hass: HomeAssistant) -> None: @@ -476,11 +460,11 @@ async def test_trigger_zero_trigger_time_with_pending(hass: HomeAssistant) -> No entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_pending(hass: HomeAssistant) -> None: @@ -502,14 +486,14 @@ async def test_trigger_with_pending(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING state = hass.states.get(entity_id) - assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -520,8 +504,8 @@ async def test_trigger_with_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -532,7 +516,7 @@ async def test_trigger_with_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.DISARMED + assert state.state == STATE_ALARM_DISARMED async def test_trigger_with_unused_specific_delay(hass: HomeAssistant) -> None: @@ -556,17 +540,17 @@ async def test_trigger_with_unused_specific_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -577,8 +561,8 @@ async def test_trigger_with_unused_specific_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_specific_delay(hass: HomeAssistant) -> None: @@ -602,17 +586,17 @@ async def test_trigger_with_specific_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -623,8 +607,8 @@ async def test_trigger_with_specific_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: @@ -647,17 +631,17 @@ async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -668,8 +652,8 @@ async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with patch( @@ -680,8 +664,8 @@ async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> None: @@ -705,17 +689,17 @@ async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> N entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -726,8 +710,8 @@ async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> N await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with patch( @@ -738,8 +722,8 @@ async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> N await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: @@ -764,7 +748,7 @@ async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -775,8 +759,8 @@ async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -786,7 +770,7 @@ async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_disarm_after_trigger(hass: HomeAssistant) -> None: @@ -808,13 +792,13 @@ async def test_trigger_with_disarm_after_trigger(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -824,7 +808,7 @@ async def test_trigger_with_disarm_after_trigger(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_zero_specific_trigger_time(hass: HomeAssistant) -> None: @@ -847,11 +831,11 @@ async def test_trigger_with_zero_specific_trigger_time(hass: HomeAssistant) -> N entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_unused_zero_specific_trigger_time( @@ -876,13 +860,13 @@ async def test_trigger_with_unused_zero_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -892,7 +876,7 @@ async def test_trigger_with_unused_zero_specific_trigger_time( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_specific_trigger_time(hass: HomeAssistant) -> None: @@ -914,13 +898,13 @@ async def test_trigger_with_specific_trigger_time(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -930,7 +914,7 @@ async def test_trigger_with_specific_trigger_time(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_no_disarm_after_trigger(hass: HomeAssistant) -> None: @@ -953,17 +937,17 @@ async def test_trigger_with_no_disarm_after_trigger(hass: HomeAssistant) -> None entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -973,7 +957,7 @@ async def test_trigger_with_no_disarm_after_trigger(hass: HomeAssistant) -> None async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY async def test_back_to_back_trigger_with_no_disarm_after_trigger( @@ -998,17 +982,17 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1018,13 +1002,13 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1034,7 +1018,7 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY async def test_disarm_while_pending_trigger(hass: HomeAssistant) -> None: @@ -1055,15 +1039,15 @@ async def test_disarm_while_pending_trigger(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1073,7 +1057,7 @@ async def test_disarm_while_pending_trigger(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> None: @@ -1095,7 +1079,7 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED assert ( hass.states.get(entity_id).attributes[alarm_control_panel.ATTR_CODE_FORMAT] == alarm_control_panel.CodeFormat.NUMBER @@ -1103,12 +1087,12 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING - with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): + with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1119,8 +1103,8 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.state == STATE_ALARM_TRIGGERED async def test_disarm_with_template_code(hass: HomeAssistant) -> None: @@ -1142,23 +1126,23 @@ async def test_disarm_with_template_code(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_home(hass, "def") state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.ARMED_HOME + assert state.state == STATE_ALARM_ARMED_HOME - with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): + with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): await common.async_alarm_disarm(hass, "def") state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.ARMED_HOME + assert state.state == STATE_ALARM_ARMED_HOME await common.async_alarm_disarm(hass, "abc") state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.DISARMED + assert state.state == STATE_ALARM_DISARMED async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: @@ -1183,21 +1167,21 @@ async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.ARMING - assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED - assert state.attributes["next_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMING + assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.attributes["next_state"] == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.ARMING - assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED - assert state.attributes["next_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMING + assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.attributes["next_state"] == STATE_ALARM_ARMED_AWAY future = dt_util.utcnow() + timedelta(seconds=1) with freeze_time(future): @@ -1205,14 +1189,14 @@ async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY - assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY + assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with freeze_time(future): @@ -1220,19 +1204,19 @@ async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == STATE_ALARM_TRIGGERED @pytest.mark.parametrize( "expected_state", [ - (AlarmControlPanelState.ARMED_AWAY), - (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), - (AlarmControlPanelState.ARMED_HOME), - (AlarmControlPanelState.ARMED_NIGHT), - (AlarmControlPanelState.ARMED_VACATION), - (AlarmControlPanelState.DISARMED), + (STATE_ALARM_ARMED_AWAY), + (STATE_ALARM_ARMED_CUSTOM_BYPASS), + (STATE_ALARM_ARMED_HOME), + (STATE_ALARM_ARMED_NIGHT), + (STATE_ALARM_ARMED_VACATION), + (STATE_ALARM_DISARMED), ], ) async def test_restore_state(hass: HomeAssistant, expected_state) -> None: @@ -1265,11 +1249,11 @@ async def test_restore_state(hass: HomeAssistant, expected_state) -> None: @pytest.mark.parametrize( "expected_state", [ - (AlarmControlPanelState.ARMED_AWAY), - (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), - (AlarmControlPanelState.ARMED_HOME), - (AlarmControlPanelState.ARMED_NIGHT), - (AlarmControlPanelState.ARMED_VACATION), + (STATE_ALARM_ARMED_AWAY), + (STATE_ALARM_ARMED_CUSTOM_BYPASS), + (STATE_ALARM_ARMED_HOME), + (STATE_ALARM_ARMED_NIGHT), + (STATE_ALARM_ARMED_VACATION), ], ) async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None: @@ -1277,7 +1261,7 @@ async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None time = dt_util.utcnow() - timedelta(seconds=15) entity_id = "alarm_control_panel.test" attributes = { - "previous_state": AlarmControlPanelState.DISARMED, + "previous_state": STATE_ALARM_DISARMED, "next_state": expected_state, } mock_restore_cache( @@ -1304,9 +1288,9 @@ async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None state = hass.states.get(entity_id) assert state - assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.attributes["previous_state"] == STATE_ALARM_DISARMED assert state.attributes["next_state"] == expected_state - assert state.state == AlarmControlPanelState.ARMING + assert state.state == STATE_ALARM_ARMING future = time + timedelta(seconds=61) with freeze_time(future): @@ -1320,12 +1304,12 @@ async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None @pytest.mark.parametrize( "previous_state", [ - (AlarmControlPanelState.ARMED_AWAY), - (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), - (AlarmControlPanelState.ARMED_HOME), - (AlarmControlPanelState.ARMED_NIGHT), - (AlarmControlPanelState.ARMED_VACATION), - (AlarmControlPanelState.DISARMED), + (STATE_ALARM_ARMED_AWAY), + (STATE_ALARM_ARMED_CUSTOM_BYPASS), + (STATE_ALARM_ARMED_HOME), + (STATE_ALARM_ARMED_NIGHT), + (STATE_ALARM_ARMED_VACATION), + (STATE_ALARM_DISARMED), ], ) async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> None: @@ -1334,18 +1318,11 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non entity_id = "alarm_control_panel.test" attributes = { "previous_state": previous_state, - "next_state": AlarmControlPanelState.TRIGGERED, + "next_state": STATE_ALARM_TRIGGERED, } mock_restore_cache( hass, - ( - State( - entity_id, - AlarmControlPanelState.TRIGGERED, - attributes, - last_updated=time, - ), - ), + (State(entity_id, STATE_ALARM_TRIGGERED, attributes, last_updated=time),), ) hass.set_state(CoreState.starting) @@ -1370,8 +1347,8 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non state = hass.states.get(entity_id) assert state assert state.attributes["previous_state"] == previous_state - assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED - assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == STATE_ALARM_PENDING future = time + timedelta(seconds=61) with freeze_time(future): @@ -1379,7 +1356,7 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_TRIGGERED future = time + timedelta(seconds=121) with freeze_time(future): @@ -1393,12 +1370,12 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non @pytest.mark.parametrize( "previous_state", [ - (AlarmControlPanelState.ARMED_AWAY), - (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), - (AlarmControlPanelState.ARMED_HOME), - (AlarmControlPanelState.ARMED_NIGHT), - (AlarmControlPanelState.ARMED_VACATION), - (AlarmControlPanelState.DISARMED), + (STATE_ALARM_ARMED_AWAY), + (STATE_ALARM_ARMED_CUSTOM_BYPASS), + (STATE_ALARM_ARMED_HOME), + (STATE_ALARM_ARMED_NIGHT), + (STATE_ALARM_ARMED_VACATION), + (STATE_ALARM_DISARMED), ], ) async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> None: @@ -1410,14 +1387,7 @@ async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> N } mock_restore_cache( hass, - ( - State( - entity_id, - AlarmControlPanelState.TRIGGERED, - attributes, - last_updated=time, - ), - ), + (State(entity_id, STATE_ALARM_TRIGGERED, attributes, last_updated=time),), ) hass.set_state(CoreState.starting) @@ -1441,9 +1411,9 @@ async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> N state = hass.states.get(entity_id) assert state - assert state.attributes[ATTR_PREVIOUS_STATE] == previous_state - assert state.attributes[ATTR_NEXT_STATE] is None - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.attributes["previous_state"] == previous_state + assert "next_state" not in state.attributes + assert state.state == STATE_ALARM_TRIGGERED future = time + timedelta(seconds=121) with freeze_time(future): @@ -1459,18 +1429,11 @@ async def test_restore_state_triggered_long_ago(hass: HomeAssistant) -> None: time = dt_util.utcnow() - timedelta(seconds=125) entity_id = "alarm_control_panel.test" attributes = { - "previous_state": AlarmControlPanelState.ARMED_AWAY, + "previous_state": STATE_ALARM_ARMED_AWAY, } mock_restore_cache( hass, - ( - State( - entity_id, - AlarmControlPanelState.TRIGGERED, - attributes, - last_updated=time, - ), - ), + (State(entity_id, STATE_ALARM_TRIGGERED, attributes, last_updated=time),), ) hass.set_state(CoreState.starting) @@ -1493,7 +1456,7 @@ async def test_restore_state_triggered_long_ago(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.DISARMED + assert state.state == STATE_ALARM_DISARMED async def test_default_arming_states(hass: HomeAssistant) -> None: diff --git a/tests/components/manual_mqtt/test_alarm_control_panel.py b/tests/components/manual_mqtt/test_alarm_control_panel.py index 2b401cb10a0..a1c913135a7 100644 --- a/tests/components/manual_mqtt/test_alarm_control_panel.py +++ b/tests/components/manual_mqtt/test_alarm_control_panel.py @@ -7,7 +7,6 @@ from freezegun import freeze_time import pytest from homeassistant.components import alarm_control_panel -from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, @@ -16,6 +15,14 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_ARM_VACATION, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_DISARMED, + STATE_ALARM_PENDING, + STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -71,14 +78,11 @@ async def test_fail_setup_without_command_topic( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_no_pending( @@ -107,7 +111,7 @@ async def test_no_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -122,14 +126,11 @@ async def test_no_pending( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_no_pending_when_code_not_req( @@ -159,7 +160,7 @@ async def test_no_pending_when_code_not_req( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -174,14 +175,11 @@ async def test_no_pending_when_code_not_req( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_with_pending( @@ -210,7 +208,7 @@ async def test_with_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -219,7 +217,7 @@ async def test_with_pending( blocking=True, ) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING state = hass.states.get(entity_id) assert state.attributes["post_pending_state"] == expected_state @@ -249,14 +247,11 @@ async def test_with_pending( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_with_invalid_code( @@ -285,7 +280,7 @@ async def test_with_invalid_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): await hass.services.async_call( @@ -295,20 +290,17 @@ async def test_with_invalid_code( blocking=True, ) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_with_template_code( @@ -337,7 +329,7 @@ async def test_with_template_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -353,14 +345,11 @@ async def test_with_template_code( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - SERVICE_ALARM_ARM_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), + (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), ], ) async def test_with_specific_pending( @@ -395,7 +384,7 @@ async def test_with_specific_pending( blocking=True, ) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -430,12 +419,12 @@ async def test_trigger_no_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=60) with patch( @@ -445,7 +434,7 @@ async def test_trigger_no_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED async def test_trigger_with_delay( @@ -472,17 +461,17 @@ async def test_trigger_with_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -493,7 +482,7 @@ async def test_trigger_with_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_zero_trigger_time( @@ -519,11 +508,11 @@ async def test_trigger_zero_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_zero_trigger_time_with_pending( @@ -549,11 +538,11 @@ async def test_trigger_zero_trigger_time_with_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_pending( @@ -579,14 +568,14 @@ async def test_trigger_with_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING state = hass.states.get(entity_id) - assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -596,7 +585,7 @@ async def test_trigger_with_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -606,7 +595,7 @@ async def test_trigger_with_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_disarm_after_trigger( @@ -632,11 +621,11 @@ async def test_trigger_with_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -646,7 +635,7 @@ async def test_trigger_with_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_zero_specific_trigger_time( @@ -673,11 +662,11 @@ async def test_trigger_with_zero_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_unused_zero_specific_trigger_time( @@ -704,11 +693,11 @@ async def test_trigger_with_unused_zero_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -718,7 +707,7 @@ async def test_trigger_with_unused_zero_specific_trigger_time( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_specific_trigger_time( @@ -744,11 +733,11 @@ async def test_trigger_with_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -758,7 +747,7 @@ async def test_trigger_with_specific_trigger_time( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_back_to_back_trigger_with_no_disarm_after_trigger( @@ -784,15 +773,15 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -802,11 +791,11 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -816,7 +805,7 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY async def test_disarm_while_pending_trigger( @@ -841,15 +830,15 @@ async def test_disarm_while_pending_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -859,7 +848,7 @@ async def test_disarm_while_pending_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_disarm_during_trigger_with_invalid_code( @@ -885,7 +874,7 @@ async def test_disarm_during_trigger_with_invalid_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED assert ( hass.states.get(entity_id).attributes[alarm_control_panel.ATTR_CODE_FORMAT] == alarm_control_panel.CodeFormat.NUMBER @@ -893,12 +882,12 @@ async def test_disarm_during_trigger_with_invalid_code( await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING with pytest.raises(HomeAssistantError, match=r"Invalid alarm code provided$"): await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -908,7 +897,7 @@ async def test_disarm_during_trigger_with_invalid_code( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED async def test_trigger_with_unused_specific_delay( @@ -936,17 +925,17 @@ async def test_trigger_with_unused_specific_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -957,7 +946,7 @@ async def test_trigger_with_unused_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_specific_delay( @@ -985,17 +974,17 @@ async def test_trigger_with_specific_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -1006,7 +995,7 @@ async def test_trigger_with_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_pending_and_delay( @@ -1034,17 +1023,17 @@ async def test_trigger_with_pending_and_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -1055,8 +1044,8 @@ async def test_trigger_with_pending_and_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with patch( @@ -1067,7 +1056,7 @@ async def test_trigger_with_pending_and_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_pending_and_specific_delay( @@ -1096,17 +1085,17 @@ async def test_trigger_with_pending_and_specific_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -1117,8 +1106,8 @@ async def test_trigger_with_pending_and_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with patch( @@ -1129,7 +1118,7 @@ async def test_trigger_with_pending_and_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_specific_pending( @@ -1158,7 +1147,7 @@ async def test_trigger_with_specific_pending( await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -1168,7 +1157,7 @@ async def test_trigger_with_specific_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1178,7 +1167,7 @@ async def test_trigger_with_specific_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_no_disarm_after_trigger( @@ -1205,15 +1194,15 @@ async def test_trigger_with_no_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1223,7 +1212,7 @@ async def test_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY async def test_arm_away_after_disabled_disarmed( @@ -1252,21 +1241,21 @@ async def test_arm_away_after_disabled_disarmed( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["pre_pending_state"] == AlarmControlPanelState.DISARMED - assert state.attributes["post_pending_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_PENDING + assert state.attributes["pre_pending_state"] == STATE_ALARM_DISARMED + assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert state.attributes["pre_pending_state"] == AlarmControlPanelState.DISARMED - assert state.attributes["post_pending_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_PENDING + assert state.attributes["pre_pending_state"] == STATE_ALARM_DISARMED + assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY future = dt_util.utcnow() + timedelta(seconds=1) with freeze_time(future): @@ -1274,18 +1263,14 @@ async def test_arm_away_after_disabled_disarmed( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.PENDING - assert ( - state.attributes["pre_pending_state"] == AlarmControlPanelState.ARMED_AWAY - ) - assert ( - state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED - ) + assert state.state == STATE_ALARM_PENDING + assert state.attributes["pre_pending_state"] == STATE_ALARM_ARMED_AWAY + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with freeze_time(future): @@ -1293,7 +1278,7 @@ async def test_arm_away_after_disabled_disarmed( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_TRIGGERED async def test_disarm_with_template_code( @@ -1319,33 +1304,33 @@ async def test_disarm_with_template_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_home(hass, "def") state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.ARMED_HOME + assert state.state == STATE_ALARM_ARMED_HOME with pytest.raises(HomeAssistantError, match=r"Invalid alarm code provided$"): await common.async_alarm_disarm(hass, "def") state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.ARMED_HOME + assert state.state == STATE_ALARM_ARMED_HOME await common.async_alarm_disarm(hass, "abc") state = hass.states.get(entity_id) - assert state.state == AlarmControlPanelState.DISARMED + assert state.state == STATE_ALARM_DISARMED @pytest.mark.parametrize( ("config", "expected_state"), [ - ("payload_arm_away", AlarmControlPanelState.ARMED_AWAY), - ("payload_arm_custom_bypass", AlarmControlPanelState.ARMED_CUSTOM_BYPASS), - ("payload_arm_home", AlarmControlPanelState.ARMED_HOME), - ("payload_arm_night", AlarmControlPanelState.ARMED_NIGHT), - ("payload_arm_vacation", AlarmControlPanelState.ARMED_VACATION), + ("payload_arm_away", STATE_ALARM_ARMED_AWAY), + ("payload_arm_custom_bypass", STATE_ALARM_ARMED_CUSTOM_BYPASS), + ("payload_arm_home", STATE_ALARM_ARMED_HOME), + ("payload_arm_night", STATE_ALARM_ARMED_NIGHT), + ("payload_arm_vacation", STATE_ALARM_ARMED_VACATION), ], ) async def test_arm_via_command_topic( @@ -1374,12 +1359,12 @@ async def test_arm_via_command_topic( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED # Fire the arm command via MQTT; ensure state changes to arming async_fire_mqtt_message(hass, "alarm/command", command) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) @@ -1415,18 +1400,18 @@ async def test_disarm_pending_via_command_topic( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING + assert hass.states.get(entity_id).state == STATE_ALARM_PENDING # Now that we're pending, receive a command to disarm async_fire_mqtt_message(hass, "alarm/command", "DISARM") await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_state_changes_are_published_to_mqtt( @@ -1452,7 +1437,7 @@ async def test_state_changes_are_published_to_mqtt( # Component should send disarmed alarm state on startup await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", AlarmControlPanelState.DISARMED, 0, True + "alarm/state", STATE_ALARM_DISARMED, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1460,7 +1445,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_arm_home(hass, "1234") await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", AlarmControlPanelState.PENDING, 0, True + "alarm/state", STATE_ALARM_PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit @@ -1472,7 +1457,7 @@ async def test_state_changes_are_published_to_mqtt( async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", AlarmControlPanelState.ARMED_HOME, 0, True + "alarm/state", STATE_ALARM_ARMED_HOME, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1480,7 +1465,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_arm_away(hass, "1234") await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", AlarmControlPanelState.PENDING, 0, True + "alarm/state", STATE_ALARM_PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit @@ -1492,7 +1477,7 @@ async def test_state_changes_are_published_to_mqtt( async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", AlarmControlPanelState.ARMED_AWAY, 0, True + "alarm/state", STATE_ALARM_ARMED_AWAY, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1500,7 +1485,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_arm_night(hass, "1234") await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", AlarmControlPanelState.PENDING, 0, True + "alarm/state", STATE_ALARM_PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit @@ -1512,7 +1497,7 @@ async def test_state_changes_are_published_to_mqtt( async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", AlarmControlPanelState.ARMED_NIGHT, 0, True + "alarm/state", STATE_ALARM_ARMED_NIGHT, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1520,7 +1505,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_disarm(hass) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", AlarmControlPanelState.DISARMED, 0, True + "alarm/state", STATE_ALARM_DISARMED, 0, True ) diff --git a/tests/components/map/__init__.py b/tests/components/map/__init__.py new file mode 100644 index 00000000000..142afc0d5c9 --- /dev/null +++ b/tests/components/map/__init__.py @@ -0,0 +1 @@ +"""Tests for Map.""" diff --git a/tests/components/map/test_init.py b/tests/components/map/test_init.py new file mode 100644 index 00000000000..afafdd1eb16 --- /dev/null +++ b/tests/components/map/test_init.py @@ -0,0 +1,118 @@ +"""Test the Map initialization.""" + +from typing import Any +from unittest.mock import MagicMock, patch + +import pytest +from typing_extensions import Generator + +from homeassistant.components.map import DOMAIN +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from tests.common import MockModule, mock_integration + + +@pytest.fixture +def mock_onboarding_not_done() -> Generator[MagicMock]: + """Mock that Home Assistant is currently onboarding.""" + with patch( + "homeassistant.components.onboarding.async_is_onboarded", + return_value=False, + ) as mock_onboarding: + yield mock_onboarding + + +@pytest.fixture +def mock_onboarding_done() -> Generator[MagicMock]: + """Mock that Home Assistant is currently onboarding.""" + with patch( + "homeassistant.components.onboarding.async_is_onboarded", + return_value=True, + ) as mock_onboarding: + yield mock_onboarding + + +@pytest.fixture +def mock_create_map_dashboard() -> Generator[MagicMock]: + """Mock the create map dashboard function.""" + with patch( + "homeassistant.components.map._create_map_dashboard", + ) as mock_create_map_dashboard: + yield mock_create_map_dashboard + + +async def test_create_dashboards_when_onboarded( + hass: HomeAssistant, + hass_storage: dict[str, Any], + mock_onboarding_done, + mock_create_map_dashboard, +) -> None: + """Test we create map dashboard when onboarded.""" + # Mock the lovelace integration to prevent it from creating a map dashboard + mock_integration(hass, MockModule("lovelace")) + + assert await async_setup_component(hass, DOMAIN, {}) + + mock_create_map_dashboard.assert_called_once() + assert hass_storage[DOMAIN]["data"] == {"migrated": True} + + +async def test_create_dashboards_once_when_onboarded( + hass: HomeAssistant, + hass_storage: dict[str, Any], + mock_onboarding_done, + mock_create_map_dashboard, +) -> None: + """Test we create map dashboard once when onboarded.""" + hass_storage[DOMAIN] = { + "version": 1, + "minor_version": 1, + "key": "map", + "data": {"migrated": True}, + } + + # Mock the lovelace integration to prevent it from creating a map dashboard + mock_integration(hass, MockModule("lovelace")) + + assert await async_setup_component(hass, DOMAIN, {}) + + mock_create_map_dashboard.assert_not_called() + assert hass_storage[DOMAIN]["data"] == {"migrated": True} + + +async def test_create_dashboards_when_not_onboarded( + hass: HomeAssistant, + hass_storage: dict[str, Any], + mock_onboarding_not_done, + mock_create_map_dashboard, +) -> None: + """Test we do not create map dashboard when not onboarded.""" + # Mock the lovelace integration to prevent it from creating a map dashboard + mock_integration(hass, MockModule("lovelace")) + + assert await async_setup_component(hass, DOMAIN, {}) + + mock_create_map_dashboard.assert_not_called() + assert hass_storage[DOMAIN]["data"] == {"migrated": True} + + +async def test_create_issue_when_not_manually_configured( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test creating issue registry issues.""" + assert await async_setup_component(hass, DOMAIN, {}) + + assert not issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, "deprecated_yaml_map" + ) + + +async def test_create_issue_when_manually_configured( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test creating issue registry issues.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, "deprecated_yaml_map") diff --git a/tests/components/marytts/test_tts.py b/tests/components/marytts/test_tts.py index 0ad27cde29b..75784bb56c5 100644 --- a/tests/components/marytts/test_tts.py +++ b/tests/components/marytts/test_tts.py @@ -34,8 +34,9 @@ def get_empty_wav() -> bytes: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/mastodon/__init__.py b/tests/components/mastodon/__init__.py deleted file mode 100644 index a4c730db07a..00000000000 --- a/tests/components/mastodon/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the Mastodon integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/mastodon/conftest.py b/tests/components/mastodon/conftest.py deleted file mode 100644 index ac23141be55..00000000000 --- a/tests/components/mastodon/conftest.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Mastodon tests configuration.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN -from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET - -from tests.common import MockConfigEntry, load_json_object_fixture - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.mastodon.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_mastodon_client() -> Generator[AsyncMock]: - """Mock a Mastodon client.""" - with ( - patch( - "homeassistant.components.mastodon.utils.Mastodon", - autospec=True, - ) as mock_client, - ): - client = mock_client.return_value - client.instance.return_value = load_json_object_fixture("instance.json", DOMAIN) - client.account_verify_credentials.return_value = load_json_object_fixture( - "account_verify_credentials.json", DOMAIN - ) - client.status_post.return_value = None - yield client - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="@trwnh@mastodon.social", - data={ - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - CONF_ACCESS_TOKEN: "access_token", - }, - entry_id="01J35M4AH9HYRC2V0G6RNVNWJH", - unique_id="trwnh_mastodon_social", - version=1, - minor_version=2, - ) diff --git a/tests/components/mastodon/fixtures/account_verify_credentials.json b/tests/components/mastodon/fixtures/account_verify_credentials.json deleted file mode 100644 index 401caa121ae..00000000000 --- a/tests/components/mastodon/fixtures/account_verify_credentials.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "id": "14715", - "username": "trwnh", - "acct": "trwnh", - "display_name": "infinite love ⴳ", - "locked": false, - "bot": false, - "created_at": "2016-11-24T10:02:12.085Z", - "note": "

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
https://trwnh.com
help me live: https://liberapay.com/at or https://paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

", - "url": "https://mastodon.social/@trwnh", - "avatar": "https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png", - "avatar_static": "https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png", - "header": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", - "header_static": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", - "followers_count": 821, - "following_count": 178, - "statuses_count": 33120, - "last_status_at": "2019-11-24T15:49:42.251Z", - "source": { - "privacy": "public", - "sensitive": false, - "language": "", - "note": "i have approximate knowledge of many things. perpetual student. (nb/ace/they)\r\n\r\nxmpp/email: a@trwnh.com\r\nhttps://trwnh.com\r\nhelp me live: https://liberapay.com/at or https://paypal.me/trwnh\r\n\r\n- my triggers are moths and glitter\r\n- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise\r\n- dm me if i did something wrong, so i can improve\r\n- purest person on fedi, do not lewd in my presence\r\n- #1 ami cole fan account\r\n\r\n:fatyoshi:", - "fields": [ - { - "name": "Website", - "value": "https://trwnh.com", - "verified_at": "2019-08-29T04:14:55.571+00:00" - }, - { - "name": "Sponsor", - "value": "https://liberapay.com/at", - "verified_at": "2019-11-15T10:06:15.557+00:00" - }, - { - "name": "Fan of:", - "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", - "verified_at": null - }, - { - "name": "Main topics:", - "value": "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", - "verified_at": null - } - ], - "follow_requests_count": 0 - }, - "emojis": [ - { - "shortcode": "fatyoshi", - "url": "https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png", - "static_url": "https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png", - "visible_in_picker": true - } - ], - "fields": [ - { - "name": "Website", - "value": "https://trwnh.com", - "verified_at": "2019-08-29T04:14:55.571+00:00" - }, - { - "name": "Sponsor", - "value": "https://liberapay.com/at", - "verified_at": "2019-11-15T10:06:15.557+00:00" - }, - { - "name": "Fan of:", - "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", - "verified_at": null - }, - { - "name": "Main topics:", - "value": "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", - "verified_at": null - } - ] -} diff --git a/tests/components/mastodon/fixtures/instance.json b/tests/components/mastodon/fixtures/instance.json deleted file mode 100644 index b0e904e80ef..00000000000 --- a/tests/components/mastodon/fixtures/instance.json +++ /dev/null @@ -1,147 +0,0 @@ -{ - "domain": "mastodon.social", - "title": "Mastodon", - "version": "4.0.0rc1", - "source_url": "https://github.com/mastodon/mastodon", - "description": "The original server operated by the Mastodon gGmbH non-profit", - "usage": { - "users": { - "active_month": 123122 - } - }, - "thumbnail": { - "url": "https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png", - "blurhash": "UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$", - "versions": { - "@1x": "https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png", - "@2x": "https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png" - } - }, - "languages": ["en"], - "configuration": { - "urls": { - "streaming": "wss://mastodon.social" - }, - "vapid": { - "public_key": "BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=" - }, - "accounts": { - "max_featured_tags": 10, - "max_pinned_statuses": 4 - }, - "statuses": { - "max_characters": 500, - "max_media_attachments": 4, - "characters_reserved_per_url": 23 - }, - "media_attachments": { - "supported_mime_types": [ - "image/jpeg", - "image/png", - "image/gif", - "image/heic", - "image/heif", - "image/webp", - "video/webm", - "video/mp4", - "video/quicktime", - "video/ogg", - "audio/wave", - "audio/wav", - "audio/x-wav", - "audio/x-pn-wave", - "audio/vnd.wave", - "audio/ogg", - "audio/vorbis", - "audio/mpeg", - "audio/mp3", - "audio/webm", - "audio/flac", - "audio/aac", - "audio/m4a", - "audio/x-m4a", - "audio/mp4", - "audio/3gpp", - "video/x-ms-asf" - ], - "image_size_limit": 10485760, - "image_matrix_limit": 16777216, - "video_size_limit": 41943040, - "video_frame_rate_limit": 60, - "video_matrix_limit": 2304000 - }, - "polls": { - "max_options": 4, - "max_characters_per_option": 50, - "min_expiration": 300, - "max_expiration": 2629746 - }, - "translation": { - "enabled": true - } - }, - "registrations": { - "enabled": false, - "approval_required": false, - "message": null - }, - "contact": { - "email": "staff@mastodon.social", - "account": { - "id": "1", - "username": "Gargron", - "acct": "Gargron", - "display_name": "Eugen 💀", - "locked": false, - "bot": false, - "discoverable": true, - "group": false, - "created_at": "2016-03-16T00:00:00.000Z", - "note": "

Founder, CEO and lead developer @Mastodon, Germany.

", - "url": "https://mastodon.social/@Gargron", - "avatar": "https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg", - "avatar_static": "https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg", - "header": "https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg", - "header_static": "https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg", - "followers_count": 133026, - "following_count": 311, - "statuses_count": 72605, - "last_status_at": "2022-10-31", - "noindex": false, - "emojis": [], - "fields": [ - { - "name": "Patreon", - "value": "https://www.patreon.com/mastodon", - "verified_at": null - } - ] - } - }, - "rules": [ - { - "id": "1", - "text": "Sexually explicit or violent media must be marked as sensitive when posting" - }, - { - "id": "2", - "text": "No racism, sexism, homophobia, transphobia, xenophobia, or casteism" - }, - { - "id": "3", - "text": "No incitement of violence or promotion of violent ideologies" - }, - { - "id": "4", - "text": "No harassment, dogpiling or doxxing of other users" - }, - { - "id": "5", - "text": "No content illegal in Germany" - }, - { - "id": "7", - "text": "Do not share intentionally false or misleading information" - } - ] -} diff --git a/tests/components/mastodon/snapshots/test_diagnostics.ambr b/tests/components/mastodon/snapshots/test_diagnostics.ambr deleted file mode 100644 index 982ecee7ee2..00000000000 --- a/tests/components/mastodon/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,247 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'account': dict({ - 'acct': 'trwnh', - 'avatar': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', - 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', - 'bot': False, - 'created_at': '2016-11-24T10:02:12.085Z', - 'display_name': 'infinite love ⴳ', - 'emojis': list([ - dict({ - 'shortcode': 'fatyoshi', - 'static_url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png', - 'url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png', - 'visible_in_picker': True, - }), - ]), - 'fields': list([ - dict({ - 'name': 'Website', - 'value': 'trwnh.com', - 'verified_at': '2019-08-29T04:14:55.571+00:00', - }), - dict({ - 'name': 'Sponsor', - 'value': 'liberapay.com/at', - 'verified_at': '2019-11-15T10:06:15.557+00:00', - }), - dict({ - 'name': 'Fan of:', - 'value': 'Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)', - 'verified_at': None, - }), - dict({ - 'name': 'Main topics:', - 'value': 'systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!', - 'verified_at': None, - }), - ]), - 'followers_count': 821, - 'following_count': 178, - 'header': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', - 'header_static': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', - 'id': '14715', - 'last_status_at': '2019-11-24T15:49:42.251Z', - 'locked': False, - 'note': '

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
trwnh.com
help me live: liberapay.com/at or paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

', - 'source': dict({ - 'fields': list([ - dict({ - 'name': 'Website', - 'value': 'https://trwnh.com', - 'verified_at': '2019-08-29T04:14:55.571+00:00', - }), - dict({ - 'name': 'Sponsor', - 'value': 'https://liberapay.com/at', - 'verified_at': '2019-11-15T10:06:15.557+00:00', - }), - dict({ - 'name': 'Fan of:', - 'value': "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", - 'verified_at': None, - }), - dict({ - 'name': 'Main topics:', - 'value': "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", - 'verified_at': None, - }), - ]), - 'follow_requests_count': 0, - 'language': '', - 'note': ''' - i have approximate knowledge of many things. perpetual student. (nb/ace/they) - - xmpp/email: a@trwnh.com - https://trwnh.com - help me live: https://liberapay.com/at or https://paypal.me/trwnh - - - my triggers are moths and glitter - - i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise - - dm me if i did something wrong, so i can improve - - purest person on fedi, do not lewd in my presence - - #1 ami cole fan account - - :fatyoshi: - ''', - 'privacy': 'public', - 'sensitive': False, - }), - 'statuses_count': 33120, - 'url': 'https://mastodon.social/@trwnh', - 'username': 'trwnh', - }), - 'instance': dict({ - 'configuration': dict({ - 'accounts': dict({ - 'max_featured_tags': 10, - 'max_pinned_statuses': 4, - }), - 'media_attachments': dict({ - 'image_matrix_limit': 16777216, - 'image_size_limit': 10485760, - 'supported_mime_types': list([ - 'image/jpeg', - 'image/png', - 'image/gif', - 'image/heic', - 'image/heif', - 'image/webp', - 'video/webm', - 'video/mp4', - 'video/quicktime', - 'video/ogg', - 'audio/wave', - 'audio/wav', - 'audio/x-wav', - 'audio/x-pn-wave', - 'audio/vnd.wave', - 'audio/ogg', - 'audio/vorbis', - 'audio/mpeg', - 'audio/mp3', - 'audio/webm', - 'audio/flac', - 'audio/aac', - 'audio/m4a', - 'audio/x-m4a', - 'audio/mp4', - 'audio/3gpp', - 'video/x-ms-asf', - ]), - 'video_frame_rate_limit': 60, - 'video_matrix_limit': 2304000, - 'video_size_limit': 41943040, - }), - 'polls': dict({ - 'max_characters_per_option': 50, - 'max_expiration': 2629746, - 'max_options': 4, - 'min_expiration': 300, - }), - 'statuses': dict({ - 'characters_reserved_per_url': 23, - 'max_characters': 500, - 'max_media_attachments': 4, - }), - 'translation': dict({ - 'enabled': True, - }), - 'urls': dict({ - 'streaming': 'wss://mastodon.social', - }), - 'vapid': dict({ - 'public_key': 'BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=', - }), - }), - 'contact': dict({ - 'account': dict({ - 'acct': 'Gargron', - 'avatar': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', - 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', - 'bot': False, - 'created_at': '2016-03-16T00:00:00.000Z', - 'discoverable': True, - 'display_name': 'Eugen 💀', - 'emojis': list([ - ]), - 'fields': list([ - dict({ - 'name': 'Patreon', - 'value': 'patreon.com/mastodon', - 'verified_at': None, - }), - ]), - 'followers_count': 133026, - 'following_count': 311, - 'group': False, - 'header': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', - 'header_static': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', - 'id': '1', - 'last_status_at': '2022-10-31', - 'locked': False, - 'noindex': False, - 'note': '

Founder, CEO and lead developer @Mastodon, Germany.

', - 'statuses_count': 72605, - 'url': 'https://mastodon.social/@Gargron', - 'username': 'Gargron', - }), - 'email': 'staff@mastodon.social', - }), - 'description': 'The original server operated by the Mastodon gGmbH non-profit', - 'domain': 'mastodon.social', - 'languages': list([ - 'en', - ]), - 'registrations': dict({ - 'approval_required': False, - 'enabled': False, - 'message': None, - }), - 'rules': list([ - dict({ - 'id': '1', - 'text': 'Sexually explicit or violent media must be marked as sensitive when posting', - }), - dict({ - 'id': '2', - 'text': 'No racism, sexism, homophobia, transphobia, xenophobia, or casteism', - }), - dict({ - 'id': '3', - 'text': 'No incitement of violence or promotion of violent ideologies', - }), - dict({ - 'id': '4', - 'text': 'No harassment, dogpiling or doxxing of other users', - }), - dict({ - 'id': '5', - 'text': 'No content illegal in Germany', - }), - dict({ - 'id': '7', - 'text': 'Do not share intentionally false or misleading information', - }), - ]), - 'source_url': 'https://github.com/mastodon/mastodon', - 'thumbnail': dict({ - 'blurhash': 'UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$', - 'url': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', - 'versions': dict({ - '@1x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', - '@2x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png', - }), - }), - 'title': 'Mastodon', - 'usage': dict({ - 'users': dict({ - 'active_month': 123122, - }), - }), - 'version': '4.0.0rc1', - }), - }) -# --- diff --git a/tests/components/mastodon/snapshots/test_init.ambr b/tests/components/mastodon/snapshots/test_init.ambr deleted file mode 100644 index 37fa765acea..00000000000 --- a/tests/components/mastodon/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_device_info - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'mastodon', - 'trwnh_mastodon_social', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Mastodon gGmbH', - 'model': '@trwnh@mastodon.social', - 'model_id': None, - 'name': 'Mastodon @trwnh@mastodon.social', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': '4.0.0rc1', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/mastodon/snapshots/test_sensor.ambr b/tests/components/mastodon/snapshots/test_sensor.ambr deleted file mode 100644 index c8df8cdab19..00000000000 --- a/tests/components/mastodon/snapshots/test_sensor.ambr +++ /dev/null @@ -1,151 +0,0 @@ -# serializer version: 1 -# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_followers-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_followers', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Followers', - 'platform': 'mastodon', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'followers', - 'unique_id': 'trwnh_mastodon_social_followers', - 'unit_of_measurement': 'accounts', - }) -# --- -# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_followers-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mastodon @trwnh@mastodon.social Followers', - 'state_class': , - 'unit_of_measurement': 'accounts', - }), - 'context': , - 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_followers', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '821', - }) -# --- -# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_following-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_following', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Following', - 'platform': 'mastodon', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'following', - 'unique_id': 'trwnh_mastodon_social_following', - 'unit_of_measurement': 'accounts', - }) -# --- -# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_following-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mastodon @trwnh@mastodon.social Following', - 'state_class': , - 'unit_of_measurement': 'accounts', - }), - 'context': , - 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_following', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '178', - }) -# --- -# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_posts-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_posts', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Posts', - 'platform': 'mastodon', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'posts', - 'unique_id': 'trwnh_mastodon_social_posts', - 'unit_of_measurement': 'posts', - }) -# --- -# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_posts-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mastodon @trwnh@mastodon.social Posts', - 'state_class': , - 'unit_of_measurement': 'posts', - }), - 'context': , - 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_posts', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '33120', - }) -# --- diff --git a/tests/components/mastodon/test_config_flow.py b/tests/components/mastodon/test_config_flow.py deleted file mode 100644 index 33f73812348..00000000000 --- a/tests/components/mastodon/test_config_flow.py +++ /dev/null @@ -1,212 +0,0 @@ -"""Tests for the Mastodon config flow.""" - -from unittest.mock import AsyncMock - -from mastodon.Mastodon import MastodonNetworkError, MastodonUnauthorizedError -import pytest - -from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_full_flow( - hass: HomeAssistant, - mock_mastodon_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - CONF_ACCESS_TOKEN: "access_token", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "@trwnh@mastodon.social" - assert result["data"] == { - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - CONF_ACCESS_TOKEN: "access_token", - } - assert result["result"].unique_id == "trwnh_mastodon_social" - - -async def test_full_flow_with_path( - hass: HomeAssistant, - mock_mastodon_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test full flow, where a path is accidentally specified.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_BASE_URL: "https://mastodon.social/home", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - CONF_ACCESS_TOKEN: "access_token", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "@trwnh@mastodon.social" - assert result["data"] == { - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - CONF_ACCESS_TOKEN: "access_token", - } - assert result["result"].unique_id == "trwnh_mastodon_social" - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (MastodonNetworkError, "network_error"), - (MastodonUnauthorizedError, "unauthorized_error"), - (Exception, "unknown"), - ], -) -async def test_flow_errors( - hass: HomeAssistant, - mock_mastodon_client: AsyncMock, - mock_setup_entry: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test flow errors.""" - mock_mastodon_client.account_verify_credentials.side_effect = exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - CONF_ACCESS_TOKEN: "access_token", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} - - mock_mastodon_client.account_verify_credentials.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - CONF_ACCESS_TOKEN: "access_token", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_duplicate( - hass: HomeAssistant, - mock_mastodon_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test duplicate flow.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - CONF_ACCESS_TOKEN: "access_token", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_import_flow( - hass: HomeAssistant, - mock_mastodon_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test importing yaml config.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "import_client_id", - CONF_CLIENT_SECRET: "import_client_secret", - CONF_ACCESS_TOKEN: "import_access_token", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (MastodonNetworkError, "network_error"), - (MastodonUnauthorizedError, "unauthorized_error"), - (Exception, "unknown"), - ], -) -async def test_import_flow_abort( - hass: HomeAssistant, - mock_mastodon_client: AsyncMock, - mock_setup_entry: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test importing yaml config abort.""" - mock_mastodon_client.account_verify_credentials.side_effect = exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "import_client_id", - CONF_CLIENT_SECRET: "import_client_secret", - CONF_ACCESS_TOKEN: "import_access_token", - }, - ) - assert result["type"] is FlowResultType.ABORT diff --git a/tests/components/mastodon/test_diagnostics.py b/tests/components/mastodon/test_diagnostics.py deleted file mode 100644 index c2de15d1a51..00000000000 --- a/tests/components/mastodon/test_diagnostics.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Test Mastodon diagnostics.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_mastodon_client: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test config entry diagnostics.""" - await setup_integration(hass, mock_config_entry) - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) - == snapshot - ) diff --git a/tests/components/mastodon/test_init.py b/tests/components/mastodon/test_init.py deleted file mode 100644 index c3d0728fe08..00000000000 --- a/tests/components/mastodon/test_init.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Tests for the Mastodon integration.""" - -from unittest.mock import AsyncMock - -from mastodon.Mastodon import MastodonError -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.mastodon.config_flow import MastodonConfigFlow -from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_device_info( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mastodon_client: AsyncMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test device registry integration.""" - await setup_integration(hass, mock_config_entry) - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - assert device_entry is not None - assert device_entry == snapshot - - -async def test_initialization_failure( - hass: HomeAssistant, - mock_mastodon_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test initialization failure.""" - mock_mastodon_client.instance.side_effect = MastodonError - - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_migrate( - hass: HomeAssistant, - mock_mastodon_client: AsyncMock, -) -> None: - """Test migration.""" - # Setup the config entry - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - CONF_ACCESS_TOKEN: "access_token", - }, - title="@trwnh@mastodon.social", - unique_id="client_id", - version=1, - minor_version=1, - ) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - # Check migration was successful - assert config_entry.state is ConfigEntryState.LOADED - assert config_entry.data == { - CONF_BASE_URL: "https://mastodon.social", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - CONF_ACCESS_TOKEN: "access_token", - } - assert config_entry.version == MastodonConfigFlow.VERSION - assert config_entry.minor_version == MastodonConfigFlow.MINOR_VERSION - assert config_entry.unique_id == "trwnh_mastodon_social" diff --git a/tests/components/mastodon/test_notify.py b/tests/components/mastodon/test_notify.py deleted file mode 100644 index ab2d7456baf..00000000000 --- a/tests/components/mastodon/test_notify.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Tests for the Mastodon notify platform.""" - -from unittest.mock import AsyncMock - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_notify( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_mastodon_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test sending a message.""" - await setup_integration(hass, mock_config_entry) - - assert hass.services.has_service(NOTIFY_DOMAIN, "trwnh_mastodon_social") - - await hass.services.async_call( - NOTIFY_DOMAIN, - "trwnh_mastodon_social", - { - "message": "test toot", - }, - blocking=True, - return_response=False, - ) - - assert mock_mastodon_client.status_post.assert_called_once diff --git a/tests/components/mastodon/test_sensor.py b/tests/components/mastodon/test_sensor.py deleted file mode 100644 index 343505260e2..00000000000 --- a/tests/components/mastodon/test_sensor.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Tests for the Mastodon sensors.""" - -from unittest.mock import AsyncMock, patch - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_sensors( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_mastodon_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the sensor entities.""" - with patch("homeassistant.components.mastodon.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/matrix/conftest.py b/tests/components/matrix/conftest.py index f0f16787f77..bb5448a8a09 100644 --- a/tests/components/matrix/conftest.py +++ b/tests/components/matrix/conftest.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Generator from pathlib import Path import re import tempfile @@ -25,6 +24,7 @@ from nio import ( ) from PIL import Image import pytest +from typing_extensions import Generator from homeassistant.components.matrix import ( CONF_COMMANDS, @@ -48,7 +48,7 @@ from homeassistant.const import ( CONF_USERNAME, CONF_VERIFY_SSL, ) -from homeassistant.core import Event, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import async_capture_events @@ -267,9 +267,7 @@ def mock_load_json(): @pytest.fixture def mock_allowed_path(): """Allow using NamedTemporaryFile for mock image.""" - with patch( - "homeassistant.core_config.Config.is_allowed_path", return_value=True - ) as mock: + with patch("homeassistant.core.Config.is_allowed_path", return_value=True) as mock: yield mock @@ -296,13 +294,13 @@ async def matrix_bot( @pytest.fixture -def matrix_events(hass: HomeAssistant) -> list[Event]: +def matrix_events(hass: HomeAssistant): """Track event calls.""" return async_capture_events(hass, MATRIX_DOMAIN) @pytest.fixture -def command_events(hass: HomeAssistant) -> list[Event]: +def command_events(hass: HomeAssistant): """Track event calls.""" return async_capture_events(hass, EVENT_MATRIX_COMMAND) diff --git a/tests/components/matrix/test_commands.py b/tests/components/matrix/test_commands.py index dabee74fdc3..8539252ad66 100644 --- a/tests/components/matrix/test_commands.py +++ b/tests/components/matrix/test_commands.py @@ -1,11 +1,11 @@ """Test MatrixBot's ability to parse and respond to commands in matrix rooms.""" -from dataclasses import dataclass from functools import partial from itertools import chain from typing import Any from nio import MatrixRoom, RoomMessageText +from pydantic.dataclasses import dataclass import pytest from homeassistant.components.matrix import MatrixBot, RoomID diff --git a/tests/components/matrix/test_send_message.py b/tests/components/matrix/test_send_message.py index 3db2877e789..cdea2270cf9 100644 --- a/tests/components/matrix/test_send_message.py +++ b/tests/components/matrix/test_send_message.py @@ -10,7 +10,7 @@ from homeassistant.components.matrix import ( ) from homeassistant.components.matrix.const import FORMAT_HTML, SERVICE_SEND_MESSAGE from homeassistant.components.notify import ATTR_DATA, ATTR_MESSAGE, ATTR_TARGET -from homeassistant.core import Event, HomeAssistant +from homeassistant.core import HomeAssistant from .conftest import TEST_BAD_ROOM, TEST_JOINABLE_ROOMS @@ -19,7 +19,7 @@ async def test_send_message( hass: HomeAssistant, matrix_bot: MatrixBot, image_path, - matrix_events: list[Event], + matrix_events, caplog: pytest.LogCaptureFixture, ) -> None: """Test the send_message service.""" @@ -63,7 +63,7 @@ async def test_send_message( async def test_unsendable_message( hass: HomeAssistant, matrix_bot: MatrixBot, - matrix_events: list[Event], + matrix_events, caplog: pytest.LogCaptureFixture, ) -> None: """Test the send_message service with an invalid room.""" diff --git a/tests/components/matter/common.py b/tests/components/matter/common.py index 519b4c4027d..7878ac564fd 100644 --- a/tests/components/matter/common.py +++ b/tests/components/matter/common.py @@ -10,11 +10,8 @@ from unittest.mock import MagicMock from matter_server.client.models.node import MatterNode from matter_server.common.helpers.util import dataclass_from_dict from matter_server.common.models import EventType, MatterNodeData -from syrupy import SnapshotAssertion -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, load_fixture @@ -34,10 +31,15 @@ async def setup_integration_with_node_fixture( hass: HomeAssistant, node_fixture: str, client: MagicMock, - override_attributes: dict[str, Any] | None = None, ) -> MatterNode: """Set up Matter integration with fixture as node.""" - node = create_node_from_fixture(node_fixture, override_attributes) + node_data = load_and_parse_node_fixture(node_fixture) + node = MatterNode( + dataclass_from_dict( + MatterNodeData, + node_data, + ) + ) client.get_nodes.return_value = [node] client.get_node.return_value = node config_entry = MockConfigEntry( @@ -51,21 +53,6 @@ async def setup_integration_with_node_fixture( return node -def create_node_from_fixture( - node_fixture: str, override_attributes: dict[str, Any] | None = None -) -> MatterNode: - """Create a node from a fixture.""" - node_data = load_and_parse_node_fixture(node_fixture) - if override_attributes: - node_data["attributes"].update(override_attributes) - return MatterNode( - dataclass_from_dict( - MatterNodeData, - node_data, - ) - ) - - def set_node_attribute( node: MatterNode, endpoint: int, @@ -92,17 +79,3 @@ async def trigger_subscription_callback( if event_filter in (None, event): callback(event, data) await hass.async_block_till_done() - - -def snapshot_matter_entities( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - platform: Platform, -) -> None: - """Snapshot Matter entities.""" - entities = hass.states.async_all(platform) - for entity_state in entities: - entity_entry = entity_registry.async_get(entity_state.entity_id) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert entity_state == snapshot(name=f"{entity_entry.entity_id}-state") diff --git a/tests/components/matter/conftest.py b/tests/components/matter/conftest.py index bbafec48e10..05fd776e57a 100644 --- a/tests/components/matter/conftest.py +++ b/tests/components/matter/conftest.py @@ -3,14 +3,13 @@ from __future__ import annotations import asyncio -from collections.abc import AsyncGenerator -from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from matter_server.client.models.node import MatterNode from matter_server.common.const import SCHEMA_VERSION from matter_server.common.models import ServerInfoMessage import pytest +from typing_extensions import AsyncGenerator, Generator from homeassistant.core import HomeAssistant @@ -52,7 +51,6 @@ async def matter_client_fixture() -> AsyncGenerator[MagicMock]: wifi_credentials_set=True, thread_credentials_set=True, min_supported_schema_version=SCHEMA_VERSION, - bluetooth_enabled=False, ) yield client @@ -71,73 +69,176 @@ async def integration_fixture( return entry -@pytest.fixture( - params=[ - "air_purifier", - "air_quality_sensor", - "color_temperature_light", - "dimmable_light", - "dimmable_plugin_unit", - "door_lock", - "door_lock_with_unbolt", - "eve_contact_sensor", - "eve_energy_plug", - "eve_energy_plug_patched", - "eve_thermo", - "eve_weather_sensor", - "extended_color_light", - "fan", - "flow_sensor", - "generic_switch", - "generic_switch_multi", - "humidity_sensor", - "leak_sensor", - "light_sensor", - "microwave_oven", - "multi_endpoint_light", - "occupancy_sensor", - "on_off_plugin_unit", - "onoff_light", - "onoff_light_alt_name", - "onoff_light_no_name", - "onoff_light_with_levelcontrol_present", - "pressure_sensor", - "room_airconditioner", - "silabs_dishwasher", - "smoke_detector", - "switch_unit", - "temperature_sensor", - "thermostat", - "vacuum_cleaner", - "valve", - "window_covering_full", - "window_covering_lift", - "window_covering_pa_lift", - "window_covering_pa_tilt", - "window_covering_tilt", - ] -) -async def matter_devices( - hass: HomeAssistant, matter_client: MagicMock, request: pytest.FixtureRequest +@pytest.fixture(name="create_backup") +def create_backup_fixture() -> Generator[AsyncMock]: + """Mock Supervisor create backup of add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_create_backup" + ) as create_backup: + yield create_backup + + +@pytest.fixture(name="addon_store_info") +def addon_store_info_fixture() -> Generator[AsyncMock]: + """Mock Supervisor add-on store info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" + ) as addon_store_info: + addon_store_info.return_value = { + "available": False, + "installed": None, + "state": None, + "version": "1.0.0", + } + yield addon_store_info + + +@pytest.fixture(name="addon_info") +def addon_info_fixture() -> Generator[AsyncMock]: + """Mock Supervisor add-on info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_info", + ) as addon_info: + addon_info.return_value = { + "available": False, + "hostname": None, + "options": {}, + "state": None, + "update_available": False, + "version": None, + } + yield addon_info + + +@pytest.fixture(name="addon_not_installed") +def addon_not_installed_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on not installed.""" + addon_store_info.return_value["available"] = True + return addon_info + + +@pytest.fixture(name="addon_installed") +def addon_installed_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on already installed but not running.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["hostname"] = "core-matter-server" + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="addon_running") +def addon_running_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on already running.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "started", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["hostname"] = "core-matter-server" + addon_info.return_value["state"] = "started" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="install_addon") +def install_addon_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Generator[AsyncMock]: + """Mock install add-on.""" + + async def install_addon_side_effect(hass: HomeAssistant, slug: str) -> None: + """Mock install add-on.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + + with patch( + "homeassistant.components.hassio.addon_manager.async_install_addon" + ) as install_addon: + install_addon.side_effect = install_addon_side_effect + yield install_addon + + +@pytest.fixture(name="start_addon") +def start_addon_fixture() -> Generator[AsyncMock]: + """Mock start add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_start_addon" + ) as start_addon: + yield start_addon + + +@pytest.fixture(name="stop_addon") +def stop_addon_fixture() -> Generator[AsyncMock]: + """Mock stop add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_stop_addon" + ) as stop_addon: + yield stop_addon + + +@pytest.fixture(name="uninstall_addon") +def uninstall_addon_fixture() -> Generator[AsyncMock]: + """Mock uninstall add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_uninstall_addon" + ) as uninstall_addon: + yield uninstall_addon + + +@pytest.fixture(name="update_addon") +def update_addon_fixture() -> Generator[AsyncMock]: + """Mock update add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_update_addon" + ) as update_addon: + yield update_addon + + +@pytest.fixture(name="door_lock") +async def door_lock_fixture( + hass: HomeAssistant, matter_client: MagicMock ) -> MatterNode: - """Fixture for a Matter device.""" - return await setup_integration_with_node_fixture(hass, request.param, matter_client) + """Fixture for a door lock node.""" + return await setup_integration_with_node_fixture(hass, "door-lock", matter_client) -@pytest.fixture -def attributes() -> dict[str, Any]: - """Return common attributes for all nodes.""" - return {} - - -@pytest.fixture -async def matter_node( - hass: HomeAssistant, - matter_client: MagicMock, - node_fixture: str, - attributes: dict[str, Any], +@pytest.fixture(name="door_lock_with_unbolt") +async def door_lock_with_unbolt_fixture( + hass: HomeAssistant, matter_client: MagicMock ) -> MatterNode: - """Fixture for a Matter node.""" + """Fixture for a door lock node with unbolt feature.""" return await setup_integration_with_node_fixture( - hass, node_fixture, matter_client, attributes + hass, "door-lock-with-unbolt", matter_client + ) + + +@pytest.fixture(name="eve_contact_sensor_node") +async def eve_contact_sensor_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a contact sensor node.""" + return await setup_integration_with_node_fixture( + hass, "eve-contact-sensor", matter_client ) diff --git a/tests/components/matter/fixtures/config_entry_diagnostics.json b/tests/components/matter/fixtures/config_entry_diagnostics.json index 000b0d4e2e6..f591709fbda 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics.json @@ -6,8 +6,7 @@ "sdk_version": "2022.12.0", "wifi_credentials_set": true, "thread_credentials_set": false, - "min_supported_schema_version": 1, - "bluetooth_enabled": false + "min_supported_schema_version": 1 }, "nodes": [ { diff --git a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json index 95447783bbc..503fd3b9a7a 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json @@ -7,8 +7,7 @@ "sdk_version": "2022.12.0", "wifi_credentials_set": true, "thread_credentials_set": false, - "min_supported_schema_version": 1, - "bluetooth_enabled": false + "min_supported_schema_version": 1 }, "nodes": [ { diff --git a/tests/components/matter/fixtures/nodes/air_purifier.json b/tests/components/matter/fixtures/nodes/air-purifier.json similarity index 100% rename from tests/components/matter/fixtures/nodes/air_purifier.json rename to tests/components/matter/fixtures/nodes/air-purifier.json diff --git a/tests/components/matter/fixtures/nodes/air_quality_sensor.json b/tests/components/matter/fixtures/nodes/air-quality-sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/air_quality_sensor.json rename to tests/components/matter/fixtures/nodes/air-quality-sensor.json diff --git a/tests/components/matter/fixtures/nodes/color_temperature_light.json b/tests/components/matter/fixtures/nodes/color-temperature-light.json similarity index 100% rename from tests/components/matter/fixtures/nodes/color_temperature_light.json rename to tests/components/matter/fixtures/nodes/color-temperature-light.json diff --git a/tests/components/matter/fixtures/nodes/dimmable_light.json b/tests/components/matter/fixtures/nodes/dimmable-light.json similarity index 79% rename from tests/components/matter/fixtures/nodes/dimmable_light.json rename to tests/components/matter/fixtures/nodes/dimmable-light.json index f8a3b28fb9e..74f132a88a9 100644 --- a/tests/components/matter/fixtures/nodes/dimmable_light.json +++ b/tests/components/matter/fixtures/nodes/dimmable-light.json @@ -78,7 +78,7 @@ ], "0/42/0": [], "0/42/1": true, - "0/42/2": 1, + "0/42/2": 0, "0/42/3": 0, "0/42/65532": 0, "0/42/65533": 1, @@ -305,6 +305,13 @@ "0/65/65528": [], "0/65/65529": [], "0/65/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 0, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0, 64], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/4/0": 128, "1/4/65532": 1, "1/4/65533": 4, @@ -358,148 +365,7 @@ "1/29/65533": 1, "1/29/65528": [], "1/29/65529": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "6/80/0": "LED Color", - "6/80/1": 0, - "6/80/2": [ - { - "0": "Red", - "1": 0, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Orange", - "1": 1, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Lemon", - "1": 2, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Lime", - "1": 3, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Green", - "1": 4, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Teal", - "1": 5, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Cyan", - "1": 6, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Aqua", - "1": 7, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Blue", - "1": 8, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Violet", - "1": 9, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Magenta", - "1": 10, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "Pink", - "1": 11, - "2": [ - { - "0": 0, - "1": 0 - } - ] - }, - { - "0": "White", - "1": 12, - "2": [ - { - "0": 0, - "1": 0 - } - ] - } - ], - "6/80/3": 7, - "6/80/65532": 0, - "6/80/65533": 1, - "6/80/65528": [], - "6/80/65529": [0], - "6/80/65530": [], - "6/80/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533] + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533] }, "available": true, "attribute_subscriptions": [] diff --git a/tests/components/matter/fixtures/nodes/dimmable_plugin_unit.json b/tests/components/matter/fixtures/nodes/dimmable-plugin-unit.json similarity index 100% rename from tests/components/matter/fixtures/nodes/dimmable_plugin_unit.json rename to tests/components/matter/fixtures/nodes/dimmable-plugin-unit.json diff --git a/tests/components/matter/fixtures/nodes/door_lock_with_unbolt.json b/tests/components/matter/fixtures/nodes/door-lock-with-unbolt.json similarity index 100% rename from tests/components/matter/fixtures/nodes/door_lock_with_unbolt.json rename to tests/components/matter/fixtures/nodes/door-lock-with-unbolt.json diff --git a/tests/components/matter/fixtures/nodes/door_lock.json b/tests/components/matter/fixtures/nodes/door-lock.json similarity index 99% rename from tests/components/matter/fixtures/nodes/door_lock.json rename to tests/components/matter/fixtures/nodes/door-lock.json index b6231e04af4..8a3f0fd68dd 100644 --- a/tests/components/matter/fixtures/nodes/door_lock.json +++ b/tests/components/matter/fixtures/nodes/door-lock.json @@ -469,7 +469,7 @@ "1/47/65531": [ 0, 1, 2, 14, 15, 16, 19, 65528, 65529, 65530, 65531, 65532, 65533 ], - "1/257/0": 0, + "1/257/0": 1, "1/257/1": 0, "1/257/2": true, "1/257/3": 1, diff --git a/tests/components/matter/fixtures/nodes/eve_contact_sensor.json b/tests/components/matter/fixtures/nodes/eve-contact-sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/eve_contact_sensor.json rename to tests/components/matter/fixtures/nodes/eve-contact-sensor.json diff --git a/tests/components/matter/fixtures/nodes/eve_energy_plug.json b/tests/components/matter/fixtures/nodes/eve-energy-plug.json similarity index 100% rename from tests/components/matter/fixtures/nodes/eve_energy_plug.json rename to tests/components/matter/fixtures/nodes/eve-energy-plug.json diff --git a/tests/components/matter/fixtures/nodes/eve_energy_plug_patched.json b/tests/components/matter/fixtures/nodes/eve_energy_plug_patched.json deleted file mode 100644 index 18c4a8c68ef..00000000000 --- a/tests/components/matter/fixtures/nodes/eve_energy_plug_patched.json +++ /dev/null @@ -1,396 +0,0 @@ -{ - "node_id": 183, - "date_commissioned": "2023-11-30T14:39:37.020026", - "last_interview": "2023-11-30T14:39:37.020029", - "interview_version": 5, - "available": true, - "is_bridge": false, - "attributes": { - "0/29/0": [ - { - "0": 22, - "1": 1 - } - ], - "0/29/1": [29, 31, 40, 42, 48, 49, 51, 53, 60, 62, 63], - "0/29/2": [41], - "0/29/3": [1], - "0/29/65532": 0, - "0/29/65533": 1, - "0/29/65528": [], - "0/29/65529": [], - "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/31/0": [ - { - "254": 1 - }, - { - "254": 2 - }, - { - "1": 5, - "2": 2, - "3": [112233], - "4": null, - "254": 5 - } - ], - "0/31/1": [], - "0/31/2": 4, - "0/31/3": 3, - "0/31/4": 3, - "0/31/65532": 0, - "0/31/65533": 1, - "0/31/65528": [], - "0/31/65529": [], - "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/40/0": 1, - "0/40/1": "Eve Systems", - "0/40/2": 4874, - "0/40/3": "Eve Energy Plug Patched", - "0/40/4": 80, - "0/40/5": "", - "0/40/6": "XX", - "0/40/7": 1, - "0/40/8": "1.3", - "0/40/9": 6650, - "0/40/10": "3.2.1", - "0/40/15": "RV44L221A00081", - "0/40/18": "26E822F90561D17C42", - "0/40/19": { - "0": 3, - "1": 3 - }, - "0/40/65532": 0, - "0/40/65533": 1, - "0/40/65528": [], - "0/40/65529": [], - "0/40/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 18, 19, 65528, 65529, 65531, 65532, - 65533 - ], - "0/42/0": [ - { - "1": 2312386028615903905, - "2": 0, - "254": 1 - } - ], - "0/42/1": true, - "0/42/2": 1, - "0/42/3": null, - "0/42/65532": 0, - "0/42/65533": 1, - "0/42/65528": [], - "0/42/65529": [0], - "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/48/0": 0, - "0/48/1": { - "0": 60, - "1": 900 - }, - "0/48/2": 0, - "0/48/3": 0, - "0/48/4": true, - "0/48/65532": 0, - "0/48/65533": 1, - "0/48/65528": [1, 3, 5], - "0/48/65529": [0, 2, 4], - "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/49/0": 1, - "0/49/1": [ - { - "0": "cfUKbvsdfsBjT+0=", - "1": true - } - ], - "0/49/2": 10, - "0/49/3": 20, - "0/49/4": true, - "0/49/5": 0, - "0/49/6": "cfUKbvBjdsffwT+0=", - "0/49/7": null, - "0/49/65532": 2, - "0/49/65533": 1, - "0/49/65528": [1, 5, 7], - "0/49/65529": [0, 3, 4, 6, 8], - "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], - "0/51/0": [], - "0/51/1": 95, - "0/51/2": 268574, - "0/51/3": 4406, - "0/51/5": [], - "0/51/6": [], - "0/51/7": [], - "0/51/8": false, - "0/51/65532": 0, - "0/51/65533": 1, - "0/51/65528": [], - "0/51/65529": [0], - "0/51/65531": [0, 1, 2, 3, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533], - "0/53/0": 25, - "0/53/1": 5, - "0/53/2": "MyHome23", - "0/53/3": 14707, - "0/53/4": 8211480967175688173, - "0/53/5": "aabbccdd", - "0/53/6": 0, - "0/53/7": [], - "0/53/8": [], - "0/53/9": 1828774034, - "0/53/10": 68, - "0/53/11": 237, - "0/53/12": 170, - "0/53/13": 23, - "0/53/14": 2, - "0/53/15": 1, - "0/53/16": 2, - "0/53/17": 0, - "0/53/18": 0, - "0/53/19": 2, - "0/53/20": 0, - "0/53/21": 0, - "0/53/22": 293884, - "0/53/23": 278934, - "0/53/24": 14950, - "0/53/25": 278894, - "0/53/26": 278468, - "0/53/27": 14990, - "0/53/28": 293844, - "0/53/29": 0, - "0/53/30": 40, - "0/53/31": 0, - "0/53/32": 0, - "0/53/33": 65244, - "0/53/34": 426, - "0/53/35": 0, - "0/53/36": 87, - "0/53/37": 0, - "0/53/38": 0, - "0/53/39": 6687540, - "0/53/40": 142626, - "0/53/41": 106835, - "0/53/42": 246171, - "0/53/43": 0, - "0/53/44": 541, - "0/53/45": 40, - "0/53/46": 0, - "0/53/47": 0, - "0/53/48": 6360718, - "0/53/49": 2141, - "0/53/50": 35259, - "0/53/51": 4374, - "0/53/52": 0, - "0/53/53": 568, - "0/53/54": 18599, - "0/53/55": 19143, - "0/53/59": { - "0": 672, - "1": 8335 - }, - "0/53/60": "AB//wA==", - "0/53/61": { - "0": true, - "1": false, - "2": true, - "3": true, - "4": true, - "5": true, - "6": false, - "7": true, - "8": true, - "9": true, - "10": true, - "11": true - }, - "0/53/62": [0, 0, 0, 0], - "0/53/65532": 15, - "0/53/65533": 1, - "0/53/65528": [], - "0/53/65529": [0], - "0/53/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, - 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 59, - 60, 61, 62, 65528, 65529, 65531, 65532, 65533 - ], - "0/60/0": 0, - "0/60/1": null, - "0/60/2": null, - "0/60/65532": 0, - "0/60/65533": 1, - "0/60/65528": [], - "0/60/65529": [0, 1, 2], - "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "0/62/0": [], - "0/62/1": [], - "0/62/2": 5, - "0/62/3": 3, - "0/62/4": [ - "FTABAQAkAgE3AycUxofpv3kE1HwkFQEYJgS2Ty8rJgU2gxAtNwYnFMaH6b95BNR8JBUBGCQHASQIATAJQQSG0eCLvAjSHcSkZEo029SymN58wmxVcA645EXuFg6KwojGRyZsqWVtuMAYAB8TaPA9NEFsNvZZbvBR9XjrZhyKNwo1ASkBGCQCYDAEFNnFRJ+9qQIJtsM+LRdMdmCY3bQ4MAUU2cVEn72pAgm2wz4tF0x2YJjdtDgYMAtAFDv6Ouh7ugAGLiCjBQaEXCIAe0AkaaN8dBPskCZXOODjuZ1DCr4/f5IYg0rN2zFDUDTvG3GCxoI1+A7BvSjiNRg=", - "FTABAQAkAgE3AycUjuqR8vTQCmEkFQIYJgTFTy8rJgVFgxAtNwYnFI7qkfL00AphJBUCGCQHASQIATAJQQS5ZOLouMEkPsc/PYweZwUUFFWHWPR9nQVGsBl1VMWtm7CodpPAh4o79bZM9XU4T1wPVCvIzgGfuzIvsuwT7gHINwo1ASkBGCQCYDAEFKEEplpzAvCzsc5ga6CFmqmsv5onMAUUoQSmWnMC8LOxzmBroIWaqay/micYMAtAYkkA8OZFIGpxBEYYT+3A7Okba4WOq4NtwctIIZvCM48VU8pxQNjVvHMcJWPOP1Wh2Bw1VH7/Sg9lt9DL4DAwjBg=", - "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEECDlp5HtG4UpmG6QLEwaCUJ3TR0qWHEarwFuN7JkKUrPmQ3Zi3Nq/TFayJYQRvez268whgWhBhQudIm84xNwPXjcKNQEpARgkAmAwBBTJ3+WZAQkWgZboUpiyZL3FV8R8UzAFFMnf5ZkBCRaBluhSmLJkvcVXxHxTGDALQO9QSAdvJkM6b/wIc07MCw1ma46lTyGYG8nvpn0ICI73nuD3QeaWwGIQTkVGEpzF+TuDK7gtTz7YUrR+PSnvMk8Y" - ], - "0/62/5": 5, - "0/62/65532": 0, - "0/62/65533": 1, - "0/62/65528": [1, 3, 5, 8], - "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], - "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], - "0/63/0": [], - "0/63/1": [], - "0/63/2": 3, - "0/63/3": 3, - "0/63/65532": 0, - "0/63/65533": 1, - "0/63/65528": [2, 5], - "0/63/65529": [0, 1, 3, 4], - "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/3/0": 0, - "1/3/1": 2, - "1/3/65532": 0, - "1/3/65533": 4, - "1/3/65528": [], - "1/3/65529": [0], - "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/4/0": 128, - "1/4/65532": 1, - "1/4/65533": 4, - "1/4/65528": [0, 1, 2, 3], - "1/4/65529": [0, 1, 2, 3, 4, 5], - "1/4/65531": [0, 65528, 65529, 65531, 65532, 65533], - "1/6/0": false, - "1/6/16384": true, - "1/6/16385": 0, - "1/6/16386": 0, - "1/6/16387": null, - "1/6/65532": 1, - "1/6/65533": 4, - "1/6/65528": [], - "1/6/65529": [0, 1, 2, 64, 65, 66], - "1/6/65531": [ - 0, 16384, 16385, 16386, 16387, 65528, 65529, 65531, 65532, 65533 - ], - "1/29/0": [ - { - "0": 266, - "1": 1 - } - ], - "1/29/1": [3, 4, 6, 29, 319486977], - "1/29/2": [], - "1/29/3": [], - "1/29/65532": 0, - "1/29/65533": 1, - "1/29/65528": [], - "1/29/65529": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/319486977/319422464": "AAFQCwIAAAMC+xkEDFJWNDRMMUEwMDA4MZwBAP8EAQIA1PkBAWABZNAEAAAAAEUFBQAAAABGCQUAAAAOAABCBkkGBQwIEIABRBEFFAAFAzwAAAAAAAAAAAAAAEcRBSoh/CGWImgjeAAAADwAAABIBgUAAAAAAEoGBQAAAAAA", - "1/319486977/319422466": "BEZiAQAAAAAAAAAABgsCDAINAgcCDgEBAn4PABAAWgAAs8c+AQEA", - "1/319486977/319422467": "EgtaAAB74T4BDwAANwkAAAAA", - "1/319486977/319422471": 0, - "1/319486977/319422472": 238.8000030517578, - "1/319486977/319422473": 0.0, - "1/319486977/319422474": 0.0, - "1/319486977/319422475": 0.2200000286102295, - "1/319486977/319422476": 0, - "1/319486977/319422478": 0, - "1/319486977/319422481": false, - "1/319486977/319422482": 54272, - "1/319486977/65533": 1, - "1/319486977/65528": [], - "1/319486977/65529": [], - "1/319486977/65531": [ - 65528, 65529, 65531, 319422464, 319422465, 319422466, 319422467, - 319422468, 319422469, 319422471, 319422472, 319422473, 319422474, - 319422475, 319422476, 319422478, 319422481, 319422482, 65533 - ], - "2/29/0": [ - { - "0": 1296, - "1": 1 - } - ], - "2/29/1": [3, 29, 144, 145, 156], - "2/29/2": [], - "2/29/3": [], - "2/29/65532": 0, - "2/29/65533": 2, - "2/29/65528": [], - "2/29/65529": [], - "2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "2/144/0": 2, - "2/144/1": 3, - "2/144/2": [ - { - "0": 1, - "1": true, - "2": 0, - "3": 100, - "4": [ - { - "0": 0, - "1": 4611686018427387904 - } - ] - }, - { - "0": 2, - "1": true, - "2": 0, - "3": 100, - "4": [ - { - "0": 0, - "1": 4611686018427387904 - } - ] - }, - { - "0": 5, - "1": true, - "2": 0, - "3": 100, - "4": [ - { - "0": 0, - "1": 4611686018427387904 - } - ] - } - ], - "2/144/4": 220000, - "2/144/5": 2000, - "2/144/8": 550000, - "2/144/65533": 1, - "2/144/65532": 2, - "2/144/65531": [0, 1, 2, 4, 5, 8, 65528, 65529, 65530, 65531, 65532, 65533], - "2/144/65530": [], - "2/144/65529": [], - "2/144/65528": [], - "2/145/0": { - "0": 14, - "1": true, - "2": 0, - "3": 0, - "4": [ - { - "0": 0, - "1": 4611686018427387904 - } - ] - }, - "2/145/65533": 1, - "2/145/65532": 7, - "2/145/65531": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533], - "2/145/65530": [0], - "2/145/65529": [], - "2/145/65528": [], - "2/145/1": { - "0": 2500 - }, - "2/145/2": null - }, - "attribute_subscriptions": [], - "last_subscription_attempt": 0 -} diff --git a/tests/components/matter/fixtures/nodes/eve_thermo.json b/tests/components/matter/fixtures/nodes/eve_thermo.json deleted file mode 100644 index e00b55d2cfc..00000000000 --- a/tests/components/matter/fixtures/nodes/eve_thermo.json +++ /dev/null @@ -1,406 +0,0 @@ -{ - "node_id": 33, - "date_commissioned": "2024-09-11T05:47:53.888591", - "last_interview": "2024-09-11T05:48:45.828762", - "interview_version": 6, - "available": true, - "is_bridge": false, - "attributes": { - "0/29/0": [ - { - "0": 18, - "1": 1 - }, - { - "0": 17, - "1": 1 - }, - { - "0": 22, - "1": 2 - } - ], - "0/29/1": [29, 31, 40, 42, 47, 48, 49, 50, 51, 52, 53, 56, 60, 62, 63, 70], - "0/29/2": [41], - "0/29/3": [1], - "0/29/65532": 0, - "0/29/65533": 2, - "0/29/65528": [], - "0/29/65529": [], - "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/31/0": [ - { - "254": 1 - }, - { - "254": 1 - }, - { - "254": 2 - }, - { - "254": 3 - }, - { - "1": 5, - "2": 2, - "3": [112233], - "4": null, - "254": 4 - } - ], - "0/31/1": [], - "0/31/2": 10, - "0/31/3": 3, - "0/31/4": 5, - "0/31/65532": 0, - "0/31/65533": 1, - "0/31/65528": [], - "0/31/65529": [], - "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/40/0": 17, - "0/40/1": "Eve Systems", - "0/40/2": 4874, - "0/40/3": "Eve Thermo", - "0/40/4": 79, - "0/40/5": "", - "0/40/6": "**REDACTED**", - "0/40/7": 1, - "0/40/8": "1.1", - "0/40/9": 9217, - "0/40/10": "3.5.0", - "0/40/15": "**REDACTED**", - "0/40/18": "**REDACTED**", - "0/40/19": { - "0": 3, - "1": 3 - }, - "0/40/21": 16973824, - "0/40/22": 1, - "0/40/65532": 0, - "0/40/65533": 3, - "0/40/65528": [], - "0/40/65529": [], - "0/40/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 18, 19, 21, 22, 65528, 65529, 65531, - 65532, 65533 - ], - "0/42/0": [ - { - "1": 556220604, - "2": 0, - "254": 1 - } - ], - "0/42/1": true, - "0/42/2": 1, - "0/42/3": null, - "0/42/65532": 0, - "0/42/65533": 1, - "0/42/65528": [], - "0/42/65529": [0], - "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/47/0": 1, - "0/47/1": 0, - "0/47/2": "Battery", - "0/47/11": 3050, - "0/47/12": 200, - "0/47/14": 0, - "0/47/15": false, - "0/47/16": 2, - "0/47/18": [], - "0/47/19": "", - "0/47/25": 1, - "0/47/31": [], - "0/47/65532": 10, - "0/47/65533": 2, - "0/47/65528": [], - "0/47/65529": [], - "0/47/65531": [ - 0, 1, 2, 11, 12, 14, 15, 16, 18, 19, 25, 31, 65528, 65529, 65531, 65532, - 65533 - ], - "0/48/0": 0, - "0/48/1": { - "0": 60, - "1": 900 - }, - "0/48/2": 0, - "0/48/3": 0, - "0/48/4": true, - "0/48/65532": 0, - "0/48/65533": 1, - "0/48/65528": [1, 3, 5], - "0/48/65529": [0, 2, 4], - "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/49/0": 1, - "0/49/1": [], - "0/49/2": 10, - "0/49/3": 20, - "0/49/4": true, - "0/49/5": 0, - "0/49/6": "**REDACTED**", - "0/49/7": null, - "0/49/9": 4, - "0/49/10": 4, - "0/49/65532": 2, - "0/49/65533": 2, - "0/49/65528": [1, 5, 7], - "0/49/65529": [0, 3, 4, 6, 8], - "0/49/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 65528, 65529, 65531, 65532, 65533 - ], - "0/50/65532": 0, - "0/50/65533": 1, - "0/50/65528": [1], - "0/50/65529": [0], - "0/50/65531": [65528, 65529, 65531, 65532, 65533], - "0/51/0": [], - "0/51/1": 2, - "0/51/2": 306352, - "0/51/3": 85, - "0/51/5": [], - "0/51/6": [], - "0/51/7": [], - "0/51/8": false, - "0/51/65532": 0, - "0/51/65533": 2, - "0/51/65528": [2], - "0/51/65529": [0, 1], - "0/51/65531": [0, 1, 2, 3, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533], - "0/52/1": 10168, - "0/52/2": 1948, - "0/52/65532": 0, - "0/52/65533": 1, - "0/52/65528": [], - "0/52/65529": [], - "0/52/65531": [1, 2, 65528, 65529, 65531, 65532, 65533], - "0/53/0": 25, - "0/53/1": 2, - "0/53/2": "**REDACTED**", - "0/53/3": 4660, - "0/53/4": 12054125955590472924, - "0/53/5": "**REDACTED**", - "0/53/6": 0, - "0/53/7": [], - "0/53/8": [], - "0/53/9": 867525816, - "0/53/10": 68, - "0/53/11": 127, - "0/53/12": 197, - "0/53/13": 17, - "0/53/14": 4, - "0/53/15": 4, - "0/53/16": 0, - "0/53/17": 0, - "0/53/18": 13, - "0/53/19": 3, - "0/53/20": 0, - "0/53/21": 3, - "0/53/22": 167566, - "0/53/23": 167438, - "0/53/24": 128, - "0/53/25": 167438, - "0/53/26": 167326, - "0/53/27": 128, - "0/53/28": 14672, - "0/53/29": 152900, - "0/53/30": 0, - "0/53/31": 0, - "0/53/32": 0, - "0/53/33": 30814, - "0/53/34": 63, - "0/53/35": 0, - "0/53/36": 37, - "0/53/37": 0, - "0/53/38": 0, - "0/53/39": 16473, - "0/53/40": 7569, - "0/53/41": 23, - "0/53/42": 7273, - "0/53/43": 0, - "0/53/44": 0, - "0/53/45": 0, - "0/53/46": 0, - "0/53/47": 0, - "0/53/48": 6541, - "0/53/49": 319, - "0/53/50": 105, - "0/53/51": 1500, - "0/53/52": 0, - "0/53/53": 0, - "0/53/54": 681, - "0/53/55": 54, - "0/53/59": { - "0": 672, - "1": 8335 - }, - "0/53/60": "AB//4A==", - "0/53/61": { - "0": true, - "1": false, - "2": true, - "3": true, - "4": true, - "5": true, - "6": false, - "7": true, - "8": true, - "9": true, - "10": true, - "11": true - }, - "0/53/62": [], - "0/53/65532": 15, - "0/53/65533": 2, - "0/53/65528": [], - "0/53/65529": [0], - "0/53/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, - 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 59, - 60, 61, 62, 65528, 65529, 65531, 65532, 65533 - ], - "0/56/0": 779348920474853, - "0/56/1": 4, - "0/56/2": 2, - "0/56/3": null, - "0/56/5": [ - { - "0": 3600, - "1": 0, - "2": "Europe/Paris" - } - ], - "0/56/6": [ - { - "0": 3600, - "1": 0, - "2": 783306000000000 - }, - { - "0": 0, - "1": 783306000000000, - "2": 796611600000000 - } - ], - "0/56/7": 779356121143951, - "0/56/8": 2, - "0/56/10": 2, - "0/56/11": 2, - "0/56/65532": 9, - "0/56/65533": 2, - "0/56/65528": [3], - "0/56/65529": [0, 1, 2, 4], - "0/56/65531": [ - 0, 1, 2, 3, 5, 6, 7, 8, 10, 11, 65528, 65529, 65531, 65532, 65533 - ], - "0/60/0": 0, - "0/60/1": null, - "0/60/2": null, - "0/60/65532": 1, - "0/60/65533": 1, - "0/60/65528": [], - "0/60/65529": [0, 1, 2], - "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "0/62/0": [], - "0/62/1": [], - "0/62/2": 5, - "0/62/3": 4, - "0/62/4": [], - "0/62/5": 4, - "0/62/65532": 0, - "0/62/65533": 1, - "0/62/65528": [1, 3, 5, 8], - "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], - "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], - "0/63/0": [], - "0/63/1": [], - "0/63/2": 4, - "0/63/3": 3, - "0/63/65532": 0, - "0/63/65533": 2, - "0/63/65528": [2, 5], - "0/63/65529": [0, 1, 3, 4], - "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/70/0": 120, - "0/70/1": 300, - "0/70/2": 2000, - "0/70/65532": 0, - "0/70/65533": 2, - "0/70/65528": [], - "0/70/65529": [], - "0/70/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "1/3/0": 0, - "1/3/1": 4, - "1/3/65532": 0, - "1/3/65533": 4, - "1/3/65528": [], - "1/3/65529": [0], - "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/29/0": [ - { - "0": 769, - "1": 3 - } - ], - "1/29/1": [3, 29, 30, 513, 516, 319486977], - "1/29/2": [1026], - "1/29/3": [], - "1/29/65532": 0, - "1/29/65533": 2, - "1/29/65528": [], - "1/29/65529": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/30/0": [], - "1/30/65532": 0, - "1/30/65533": 1, - "1/30/65528": [], - "1/30/65529": [], - "1/30/65531": [0, 65528, 65529, 65531, 65532, 65533], - "1/513/0": 2100, - "1/513/3": 1000, - "1/513/4": 3000, - "1/513/16": 0, - "1/513/18": 1700, - "1/513/21": 1000, - "1/513/22": 3000, - "1/513/26": 0, - "1/513/27": 2, - "1/513/28": 4, - "1/513/65532": 1, - "1/513/65533": 6, - "1/513/65528": [], - "1/513/65529": [0], - "1/513/65531": [ - 0, 3, 4, 16, 18, 21, 22, 26, 27, 28, 65528, 65529, 65531, 65532, 65533 - ], - "1/516/0": 0, - "1/516/1": 0, - "1/516/2": 0, - "1/516/65532": 0, - "1/516/65533": 2, - "1/516/65528": [], - "1/516/65529": [], - "1/516/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "1/319486977/319422464": "AAFPCwIAAAMCEyQEDENNMzRNMUE0NzgxNZwBAP8EAQIIMPkBAR0BAD4AOwhTVEVHVDIxMjwBADcBAD8BACYBAScBHk8GAAAgICoq/wMjAQBFDQUCAAAAAAACAYk0BaVGVAXKISyfJEkCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEkGBQwIEIABRBEFHAAFAzwAAADhKT5Ch1orv0cRBSoh/CGWImgjtAAAADwAAABIBgUAAAAAAEoGBQAAAAAA/ygiCRABAAAAAAAAAAIb0XT3kNTbRpuy/pzwUAklhFBhciBkw6lmYXV0", - "1/319486977/319422466": "xqwEAFjkAwBNnpAsBgECEQIQARIBHQEjAgwCABAAAAAAEQAAAAEAAA==", - "1/319486977/319422467": "EwoCAAC8rAQAPwIIKAoUAQADDAwLAgAAvKwEACDqCw==", - "1/319486977/319422476": 0, - "1/319486977/319422482": 12296, - "1/319486977/319422487": false, - "1/319486977/319422488": 10, - "1/319486977/319422489": 30240, - "1/319486977/319422490": 0, - "1/319486977/65532": 0, - "1/319486977/65533": 1, - "1/319486977/65528": [], - "1/319486977/65529": [319422464], - "1/319486977/65531": [ - 65528, 65529, 65531, 319422464, 319422465, 319422466, 319422467, - 319422468, 319422469, 319422476, 319422482, 319422487, 319422488, - 319422489, 319422490, 65532, 65533 - ] - }, - "attribute_subscriptions": [] -} diff --git a/tests/components/matter/fixtures/nodes/eve_weather_sensor.json b/tests/components/matter/fixtures/nodes/eve_weather_sensor.json deleted file mode 100644 index dacba8d336b..00000000000 --- a/tests/components/matter/fixtures/nodes/eve_weather_sensor.json +++ /dev/null @@ -1,322 +0,0 @@ -{ - "node_id": 29, - "date_commissioned": "2024-09-10T13:34:48.252332", - "last_interview": "2024-09-10T13:34:48.252334", - "interview_version": 6, - "available": true, - "is_bridge": false, - "attributes": { - "0/29/0": [ - { - "0": 22, - "1": 1 - } - ], - "0/29/1": [29, 31, 40, 42, 47, 48, 49, 51, 53, 60, 62, 63], - "0/29/2": [41], - "0/29/3": [1, 2], - "0/29/65532": 0, - "0/29/65533": 1, - "0/29/65528": [], - "0/29/65529": [], - "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/31/0": [ - { - "1": 5, - "2": 2, - "3": [112233], - "4": null, - "254": 4 - } - ], - "0/31/1": [], - "0/31/2": 4, - "0/31/3": 3, - "0/31/4": 4, - "0/31/65532": 0, - "0/31/65533": 1, - "0/31/65528": [], - "0/31/65529": [], - "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/40/0": 1, - "0/40/1": "Eve Systems", - "0/40/2": 4874, - "0/40/3": "Eve Weather", - "0/40/4": 87, - "0/40/5": "", - "0/40/6": "**REDACTED**", - "0/40/7": 1, - "0/40/8": "1.1", - "0/40/9": 7143, - "0/40/10": "3.3.0", - "0/40/15": "**REDACTED**", - "0/40/18": "**REDACTED**", - "0/40/19": { - "0": 3, - "1": 3 - }, - "0/40/65532": 0, - "0/40/65533": 1, - "0/40/65528": [], - "0/40/65529": [], - "0/40/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 18, 19, 65528, 65529, 65531, 65532, - 65533 - ], - "0/42/0": [], - "0/42/1": true, - "0/42/2": 1, - "0/42/3": null, - "0/42/65532": 0, - "0/42/65533": 1, - "0/42/65528": [], - "0/42/65529": [0], - "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/47/0": 1, - "0/47/1": 0, - "0/47/2": "Battery", - "0/47/11": 2956, - "0/47/12": 200, - "0/47/14": 0, - "0/47/15": false, - "0/47/16": 2, - "0/47/18": [], - "0/47/19": "", - "0/47/25": 1, - "0/47/65532": 10, - "0/47/65533": 1, - "0/47/65528": [], - "0/47/65529": [], - "0/47/65531": [ - 0, 1, 2, 11, 12, 14, 15, 16, 18, 19, 25, 65528, 65529, 65531, 65532, 65533 - ], - "0/48/0": 0, - "0/48/1": { - "0": 60, - "1": 900 - }, - "0/48/2": 0, - "0/48/3": 0, - "0/48/4": true, - "0/48/65532": 0, - "0/48/65533": 1, - "0/48/65528": [1, 3, 5], - "0/48/65529": [0, 2, 4], - "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/49/0": 1, - "0/49/1": [], - "0/49/2": 10, - "0/49/3": 20, - "0/49/4": true, - "0/49/5": 0, - "0/49/6": "**REDACTED**", - "0/49/7": null, - "0/49/65532": 2, - "0/49/65533": 1, - "0/49/65528": [1, 5, 7], - "0/49/65529": [0, 3, 4, 6, 8], - "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], - "0/51/0": [], - "0/51/1": 1, - "0/51/2": 3416207, - "0/51/3": 948, - "0/51/5": [], - "0/51/6": [], - "0/51/7": [], - "0/51/8": false, - "0/51/65532": 0, - "0/51/65533": 1, - "0/51/65528": [], - "0/51/65529": [0], - "0/51/65531": [0, 1, 2, 3, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533], - "0/53/0": 25, - "0/53/1": 2, - "0/53/2": "**REDACTED**", - "0/53/3": 4660, - "0/53/4": 12054125955590472924, - "0/53/5": "**REDACTED**", - "0/53/6": 0, - "0/53/7": [], - "0/53/8": [], - "0/53/9": 867525816, - "0/53/10": 68, - "0/53/11": 127, - "0/53/12": 197, - "0/53/13": 17, - "0/53/14": 244, - "0/53/15": 243, - "0/53/16": 0, - "0/53/17": 0, - "0/53/18": 334, - "0/53/19": 6, - "0/53/20": 0, - "0/53/21": 221, - "0/53/22": 1814103, - "0/53/23": 1812208, - "0/53/24": 1895, - "0/53/25": 1812220, - "0/53/26": 1806871, - "0/53/27": 1895, - "0/53/28": 144123, - "0/53/29": 1670020, - "0/53/30": 0, - "0/53/31": 0, - "0/53/32": 0, - "0/53/33": 515245, - "0/53/34": 1061, - "0/53/35": 0, - "0/53/36": 25, - "0/53/37": 0, - "0/53/38": 0, - "0/53/39": 310675, - "0/53/40": 180775, - "0/53/41": 783, - "0/53/42": 171240, - "0/53/43": 0, - "0/53/44": 4, - "0/53/45": 0, - "0/53/46": 0, - "0/53/47": 0, - "0/53/48": 110041, - "0/53/49": 10200, - "0/53/50": 818, - "0/53/51": 11698, - "0/53/52": 0, - "0/53/53": 114, - "0/53/54": 6189, - "0/53/55": 371, - "0/53/59": { - "0": 672, - "1": 8335 - }, - "0/53/60": "AB//4A==", - "0/53/61": { - "0": true, - "1": false, - "2": true, - "3": true, - "4": true, - "5": true, - "6": false, - "7": true, - "8": true, - "9": true, - "10": true, - "11": true - }, - "0/53/62": [0, 0, 0, 0], - "0/53/65532": 15, - "0/53/65533": 1, - "0/53/65528": [], - "0/53/65529": [0], - "0/53/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, - 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 59, - 60, 61, 62, 65528, 65529, 65531, 65532, 65533 - ], - "0/60/0": 0, - "0/60/1": null, - "0/60/2": null, - "0/60/65532": 0, - "0/60/65533": 1, - "0/60/65528": [], - "0/60/65529": [0, 1, 2], - "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "0/62/0": [], - "0/62/1": [], - "0/62/2": 5, - "0/62/3": 4, - "0/62/4": [], - "0/62/5": 4, - "0/62/65532": 0, - "0/62/65533": 1, - "0/62/65528": [1, 3, 5, 8], - "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], - "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], - "0/63/0": [], - "0/63/1": [], - "0/63/2": 4, - "0/63/3": 3, - "0/63/65532": 0, - "0/63/65533": 1, - "0/63/65528": [2, 5], - "0/63/65529": [0, 1, 3, 4], - "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/3/0": 0, - "1/3/1": 4, - "1/3/65532": 0, - "1/3/65533": 4, - "1/3/65528": [], - "1/3/65529": [0], - "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/29/0": [ - { - "0": 770, - "1": 2 - } - ], - "1/29/1": [3, 29, 1026, 319486977], - "1/29/2": [], - "1/29/3": [], - "1/29/65532": 0, - "1/29/65533": 1, - "1/29/65528": [], - "1/29/65529": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/1026/0": 1603, - "1/1026/1": -4000, - "1/1026/2": 8500, - "1/1026/65532": 0, - "1/1026/65533": 4, - "1/1026/65528": [], - "1/1026/65529": [], - "1/1026/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "1/319486977/319422464": "AAFXCwIAAAMC/xsEDFNWNDNMMUEwMzg2MJwBAP8EAQJsNPkBAR0BACUE9griHksEfgeAA1EBAA==", - "1/319486977/319422466": "Ps00AOODMwBqe48sBgECAgIDAicBLwEjAlAPABAABwAA6gERAAEA", - "1/319486977/319422467": "EiMTAACLYy0AH74Fwx88JwQOEiQTAADjZS0AH7wFzB87JwQOEiUTAAA7aC0AH7oF1B86JwQOEiYTAACTai0AH7kF5x86JwQOEicTAADrbC0AH7sF8B85JwQOEigTAABDby0AH7wFAiA4JwQOEikTAACbcS0AH7sFFCA3JwQOEioTAADzcy0AH7EFMiA1JwQOEisTAABLdi0AH6gFVyA0JwQOEiwTAACjeC0AH6gFaiAzJwQOEi0TAAD7ei0AH6YFfCAyJwQOEi4TAABTfS0AH6YFgCAzJwQOEi8TAACrfy0AH6MFhyA0JwQOEjATAAADgi0AH58FnSA1JwQOEjETAABbhC0AH58FtSA1JwQOEjITAACzhi0AH5wFwSA0JwQOEjMTAAALiS0AH5cF1SA0JwQOEjQTAABjiy0AH58F3yA0JwIGEjUTAAC7jS0AH6EF7yA0JwIGEjYTAAATkC0AH60F+yAzJwIGEjcTAABrki0AH68FAiEyJwIGEjgTAADDlC0AH7kFACEyJwIGEjkTAAAbly0AH8QF7SAyJwIGEjoTAABzmS0AH9QF1SAzJwIGEjsTAADLmy0AH98FvyAzJwIG", - "1/319486977/319422482": 13420, - "1/319486977/319422483": 40.0, - "1/319486977/319422484": 1008.5, - "1/319486977/319422485": 6, - "1/319486977/319422486": 0, - "1/319486977/65533": 1, - "1/319486977/65528": [], - "1/319486977/65529": [], - "1/319486977/65531": [ - 65528, 65529, 65531, 319422464, 319422465, 319422466, 319422467, - 319422468, 319422469, 319422482, 319422483, 319422484, 319422485, - 319422486, 65533 - ], - "2/3/0": 0, - "2/3/1": 4, - "2/3/65532": 0, - "2/3/65533": 4, - "2/3/65528": [], - "2/3/65529": [0], - "2/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "2/29/0": [ - { - "0": 775, - "1": 2 - } - ], - "2/29/1": [3, 29, 1029], - "2/29/2": [], - "2/29/3": [], - "2/29/65532": 0, - "2/29/65533": 1, - "2/29/65528": [], - "2/29/65529": [], - "2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "2/1029/0": 8066, - "2/1029/1": 0, - "2/1029/2": 10000, - "2/1029/65532": 0, - "2/1029/65533": 3, - "2/1029/65528": [], - "2/1029/65529": [], - "2/1029/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533] - }, - "attribute_subscriptions": [] -} diff --git a/tests/components/matter/fixtures/nodes/extended_color_light.json b/tests/components/matter/fixtures/nodes/extended-color-light.json similarity index 100% rename from tests/components/matter/fixtures/nodes/extended_color_light.json rename to tests/components/matter/fixtures/nodes/extended-color-light.json diff --git a/tests/components/matter/fixtures/nodes/fan.json b/tests/components/matter/fixtures/nodes/fan.json deleted file mode 100644 index e33c29ce66d..00000000000 --- a/tests/components/matter/fixtures/nodes/fan.json +++ /dev/null @@ -1,340 +0,0 @@ -{ - "node_id": 29, - "date_commissioned": "2024-07-25T08:34:23.014310", - "last_interview": "2024-07-25T08:34:23.014315", - "interview_version": 6, - "available": true, - "is_bridge": false, - "attributes": { - "0/29/0": [ - { - "0": 18, - "1": 1 - }, - { - "0": 22, - "1": 1 - } - ], - "0/29/1": [29, 31, 40, 42, 48, 49, 51, 53, 60, 62, 63, 64], - "0/29/2": [41], - "0/29/3": [1, 2, 3, 4, 5, 6], - "0/29/65532": 0, - "0/29/65533": 2, - "0/29/65528": [], - "0/29/65529": [], - "0/29/65530": [], - "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], - "0/31/0": [ - { - "1": 5, - "2": 2, - "3": [112233], - "4": null, - "254": 5 - } - ], - "0/31/1": [], - "0/31/2": 4, - "0/31/3": 3, - "0/31/4": 4, - "0/31/65532": 0, - "0/31/65533": 1, - "0/31/65528": [], - "0/31/65529": [], - "0/31/65530": [0, 1], - "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533], - "0/40/0": 17, - "0/40/1": "Mock", - "0/40/2": 4961, - "0/40/3": "Fan", - "0/40/4": 2, - "0/40/5": "Mocked Fan Switch", - "0/40/6": "**REDACTED**", - "0/40/7": 1, - "0/40/8": "1.0", - "0/40/9": 4, - "0/40/10": "0.0.1", - "0/40/11": "", - "0/40/12": "", - "0/40/13": "", - "0/40/14": "", - "0/40/15": "", - "0/40/16": false, - "0/40/17": true, - "0/40/18": "", - "0/40/19": { - "0": 3, - "1": 3 - }, - "0/40/65532": 0, - "0/40/65533": 2, - "0/40/65528": [], - "0/40/65529": [], - "0/40/65530": [0], - "0/40/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, - 65528, 65529, 65530, 65531, 65532, 65533 - ], - "0/42/0": [], - "0/42/1": true, - "0/42/2": 1, - "0/42/3": null, - "0/42/65532": 0, - "0/42/65533": 1, - "0/42/65528": [], - "0/42/65529": [0], - "0/42/65530": [0, 1, 2], - "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], - "0/48/0": 0, - "0/48/1": { - "0": 60, - "1": 900 - }, - "0/48/2": 0, - "0/48/3": 0, - "0/48/4": true, - "0/48/65532": 0, - "0/48/65533": 1, - "0/48/65528": [1, 3, 5], - "0/48/65529": [0, 2, 4], - "0/48/65530": [], - "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533], - "0/49/0": 1, - "0/49/1": [ - { - "0": "J/YquJb4Ao4=", - "1": true - } - ], - "0/49/2": 10, - "0/49/3": 20, - "0/49/4": true, - "0/49/5": 0, - "0/49/6": "J/YquJb4Ao4=", - "0/49/7": null, - "0/49/65532": 2, - "0/49/65533": 1, - "0/49/65528": [1, 5, 7], - "0/49/65529": [0, 3, 4, 6, 8], - "0/49/65530": [], - "0/49/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533 - ], - "0/51/0": [], - "0/51/1": 15, - "0/51/2": 5688, - "0/51/3": 1, - "0/51/4": 0, - "0/51/5": [], - "0/51/6": [], - "0/51/7": [], - "0/51/8": false, - "0/51/65532": 0, - "0/51/65533": 1, - "0/51/65528": [], - "0/51/65529": [0], - "0/51/65530": [3], - "0/51/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533 - ], - "0/53/0": 25, - "0/53/1": 5, - "0/53/2": "ha-thread", - "0/53/3": 12768, - "0/53/4": 5924944741529093989, - "0/53/5": "", - "0/53/6": 0, - "0/53/7": [], - "0/53/8": [], - "0/53/9": 933034070, - "0/53/10": 68, - "0/53/11": 16, - "0/53/12": 151, - "0/53/13": 31, - "0/53/14": 1, - "0/53/15": 0, - "0/53/16": 1, - "0/53/17": 0, - "0/53/18": 0, - "0/53/19": 1, - "0/53/20": 0, - "0/53/21": 0, - "0/53/22": 3533, - "0/53/23": 3105, - "0/53/24": 428, - "0/53/25": 1889, - "0/53/26": 1879, - "0/53/27": 1644, - "0/53/28": 2317, - "0/53/29": 0, - "0/53/30": 1216, - "0/53/31": 0, - "0/53/32": 0, - "0/53/33": 534, - "0/53/34": 10, - "0/53/35": 0, - "0/53/36": 42, - "0/53/37": 0, - "0/53/38": 0, - "0/53/39": 18130, - "0/53/40": 12178, - "0/53/41": 5863, - "0/53/42": 5103, - "0/53/43": 0, - "0/53/44": 11639, - "0/53/45": 1216, - "0/53/46": 0, - "0/53/47": 0, - "0/53/48": 0, - "0/53/49": 14, - "0/53/50": 0, - "0/53/51": 89, - "0/53/52": 0, - "0/53/53": 69, - "0/53/54": 0, - "0/53/55": 0, - "0/53/56": 131072, - "0/53/57": 0, - "0/53/58": 0, - "0/53/59": { - "0": 672, - "1": 8335 - }, - "0/53/60": "AB//4A==", - "0/53/61": { - "0": true, - "1": false, - "2": true, - "3": true, - "4": true, - "5": true, - "6": false, - "7": true, - "8": true, - "9": true, - "10": true, - "11": true - }, - "0/53/62": [0, 0, 0, 0], - "0/53/65532": 15, - "0/53/65533": 1, - "0/53/65528": [], - "0/53/65529": [0], - "0/53/65530": [], - "0/53/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, - 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 59, 60, 61, 62, 65528, 65529, 65530, 65531, 65532, 65533 - ], - "0/60/0": 0, - "0/60/1": null, - "0/60/2": null, - "0/60/65532": 0, - "0/60/65533": 1, - "0/60/65528": [], - "0/60/65529": [0, 1, 2], - "0/60/65530": [], - "0/60/65531": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533], - "0/62/0": [], - "0/62/1": [], - "0/62/2": 5, - "0/62/3": 4, - "0/62/4": [], - "0/62/5": 5, - "0/62/65532": 0, - "0/62/65533": 1, - "0/62/65528": [1, 3, 5, 8], - "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], - "0/62/65530": [], - "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533], - "0/63/0": [], - "0/63/1": [], - "0/63/2": 4, - "0/63/3": 3, - "0/63/65532": 0, - "0/63/65533": 2, - "0/63/65528": [2, 5], - "0/63/65529": [0, 1, 3, 4], - "0/63/65530": [], - "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], - "0/64/0": [ - { - "0": "Vendor", - "1": "Mocked" - }, - { - "0": "Product", - "1": "Fan" - } - ], - "0/64/65532": 0, - "0/64/65533": 1, - "0/64/65528": [], - "0/64/65529": [], - "0/64/65530": [], - "0/64/65531": [0, 65528, 65529, 65530, 65531, 65532, 65533], - "1/3/0": 0, - "1/3/1": 2, - "1/3/65532": 0, - "1/3/65533": 4, - "1/3/65528": [], - "1/3/65529": [0, 64], - "1/3/65530": [], - "1/3/65531": [0, 1, 65528, 65529, 65530, 65531, 65532, 65533], - "1/4/0": 128, - "1/4/65532": 1, - "1/4/65533": 4, - "1/4/65528": [0, 1, 2, 3], - "1/4/65529": [0, 1, 2, 3, 4, 5], - "1/4/65530": [], - "1/4/65531": [0, 65528, 65529, 65530, 65531, 65532, 65533], - "1/29/0": [ - { - "0": 43, - "1": 1 - } - ], - "1/29/1": [3, 4, 6, 8, 29, 64, 80, 514, 305134641], - "1/29/2": [], - "1/29/3": [], - "1/29/65532": 0, - "1/29/65533": 2, - "1/29/65528": [], - "1/29/65529": [], - "1/29/65530": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], - "1/64/0": [ - { - "0": "DeviceType", - "1": "Fan" - } - ], - "1/64/65532": 0, - "1/64/65533": 1, - "1/64/65528": [], - "1/64/65529": [], - "1/64/65530": [], - "1/64/65531": [0, 65528, 65529, 65530, 65531, 65532, 65533], - - "1/514/0": 8, - "1/514/1": 2, - "1/514/2": 0, - "1/514/3": 0, - "1/514/4": 3, - "1/514/5": 0, - "1/514/6": 0, - "1/514/9": 3, - "1/514/10": 0, - "1/514/65532": 25, - "1/514/65533": 4, - "1/514/65528": [], - "1/514/65529": [0], - "1/514/65530": [], - "1/514/65531": [ - 0, 1, 2, 3, 4, 5, 6, 9, 10, 65528, 65529, 65530, 65531, 65532, 65533 - ] - }, - "attribute_subscriptions": [] -} diff --git a/tests/components/matter/fixtures/nodes/flow_sensor.json b/tests/components/matter/fixtures/nodes/flow-sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/flow_sensor.json rename to tests/components/matter/fixtures/nodes/flow-sensor.json diff --git a/tests/components/matter/fixtures/nodes/generic_switch_multi.json b/tests/components/matter/fixtures/nodes/generic-switch-multi.json similarity index 97% rename from tests/components/matter/fixtures/nodes/generic_switch_multi.json rename to tests/components/matter/fixtures/nodes/generic-switch-multi.json index 8923198c31e..f564e91a1ce 100644 --- a/tests/components/matter/fixtures/nodes/generic_switch_multi.json +++ b/tests/components/matter/fixtures/nodes/generic-switch-multi.json @@ -72,9 +72,8 @@ "1/59/0": 2, "1/59/65533": 1, "1/59/1": 0, - "1/59/2": 2, "1/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/59/65532": 30, + "1/59/65532": 14, "1/59/65528": [], "1/64/0": [ { @@ -102,9 +101,8 @@ "2/59/0": 2, "2/59/65533": 1, "2/59/1": 0, - "2/59/2": 2, "2/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "2/59/65532": 30, + "2/59/65532": 14, "2/59/65528": [], "2/64/0": [ { diff --git a/tests/components/matter/fixtures/nodes/generic_switch.json b/tests/components/matter/fixtures/nodes/generic-switch.json similarity index 98% rename from tests/components/matter/fixtures/nodes/generic_switch.json rename to tests/components/matter/fixtures/nodes/generic-switch.json index 9b334c5fb54..80773915748 100644 --- a/tests/components/matter/fixtures/nodes/generic_switch.json +++ b/tests/components/matter/fixtures/nodes/generic-switch.json @@ -73,7 +73,7 @@ "1/59/65533": 1, "1/59/1": 0, "1/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/59/65532": 14, + "1/59/65532": 30, "1/59/65528": [] }, "available": true, diff --git a/tests/components/matter/fixtures/nodes/humidity_sensor.json b/tests/components/matter/fixtures/nodes/humidity-sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/humidity_sensor.json rename to tests/components/matter/fixtures/nodes/humidity-sensor.json diff --git a/tests/components/matter/fixtures/nodes/leak_sensor.json b/tests/components/matter/fixtures/nodes/leak-sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/leak_sensor.json rename to tests/components/matter/fixtures/nodes/leak-sensor.json diff --git a/tests/components/matter/fixtures/nodes/light_sensor.json b/tests/components/matter/fixtures/nodes/light-sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/light_sensor.json rename to tests/components/matter/fixtures/nodes/light-sensor.json diff --git a/tests/components/matter/fixtures/nodes/microwave_oven.json b/tests/components/matter/fixtures/nodes/microwave_oven.json deleted file mode 100644 index ed0a4accd6a..00000000000 --- a/tests/components/matter/fixtures/nodes/microwave_oven.json +++ /dev/null @@ -1,405 +0,0 @@ -{ - "node_id": 157, - "date_commissioned": "2024-07-04T12:31:22.759270", - "last_interview": "2024-07-04T12:31:22.759275", - "interview_version": 6, - "available": true, - "is_bridge": false, - "attributes": { - "0/29/0": [ - { - "0": 22, - "1": 1 - } - ], - "0/29/1": [29, 31, 40, 44, 48, 49, 51, 54, 60, 62, 63], - "0/29/2": [], - "0/29/3": [1], - "0/29/65532": 0, - "0/29/65533": 2, - "0/29/65528": [], - "0/29/65529": [], - "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/31/0": [ - { - "1": 5, - "2": 2, - "3": [112233], - "4": null, - "254": 1 - } - ], - "0/31/1": [], - "0/31/2": 4, - "0/31/3": 3, - "0/31/4": 4, - "0/31/65532": 0, - "0/31/65533": 1, - "0/31/65528": [], - "0/31/65529": [], - "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/40/0": 17, - "0/40/1": "Mock", - "0/40/2": 65521, - "0/40/3": "Microwave Oven", - "0/40/4": 32769, - "0/40/5": "", - "0/40/6": "**REDACTED**", - "0/40/7": 0, - "0/40/8": "TEST_VERSION", - "0/40/9": 1, - "0/40/10": "1.0", - "0/40/11": "20200101", - "0/40/12": "", - "0/40/13": "", - "0/40/14": "", - "0/40/15": "TEST_SN", - "0/40/16": false, - "0/40/18": "D5908CF5E1382F42", - "0/40/19": { - "0": 3, - "1": 65535 - }, - "0/40/20": null, - "0/40/21": 16973824, - "0/40/22": 1, - "0/40/65532": 0, - "0/40/65533": 3, - "0/40/65528": [], - "0/40/65529": [], - "0/40/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 20, 21, - 22, 65528, 65529, 65531, 65532, 65533 - ], - "0/44/0": 0, - "0/44/65532": 0, - "0/44/65533": 1, - "0/44/65528": [], - "0/44/65529": [], - "0/44/65531": [0, 65528, 65529, 65531, 65532, 65533], - "0/48/0": 0, - "0/48/1": { - "0": 60, - "1": 900 - }, - "0/48/2": 0, - "0/48/3": 2, - "0/48/4": true, - "0/48/65532": 0, - "0/48/65533": 1, - "0/48/65528": [1, 3, 5], - "0/48/65529": [0, 2, 4], - "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/49/0": 1, - "0/49/1": [ - { - "0": "ZW5kMA==", - "1": true - } - ], - "0/49/2": 0, - "0/49/3": 0, - "0/49/4": true, - "0/49/5": null, - "0/49/6": null, - "0/49/7": null, - "0/49/65532": 4, - "0/49/65533": 2, - "0/49/65528": [], - "0/49/65529": [], - "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], - "0/51/0": [ - { - "0": "vethd3cc78a", - "1": true, - "2": null, - "3": null, - "4": "RiMoOM7I", - "5": [], - "6": ["/oAAAAAAAABEIyj//jjOyA=="], - "7": 0 - }, - { - "0": "veth86f4b74", - "1": true, - "2": null, - "3": null, - "4": "ehLA7XI6", - "5": [], - "6": ["/oAAAAAAAAB4EsD//u1yOg=="], - "7": 0 - }, - { - "0": "veth36c1460", - "1": true, - "2": null, - "3": null, - "4": "0sdiwOO7", - "5": [], - "6": ["/oAAAAAAAADQx2L//sDjuw=="], - "7": 0 - }, - { - "0": "veth55a0982", - "1": true, - "2": null, - "3": null, - "4": "fuu5VpgB", - "5": [], - "6": ["/oAAAAAAAAB867n//laYAQ=="], - "7": 0 - }, - { - "0": "vethd446fa5", - "1": true, - "2": null, - "3": null, - "4": "QsY5wCp1", - "5": [], - "6": ["/oAAAAAAAABAxjn//sAqdQ=="], - "7": 0 - }, - { - "0": "vethfc6e4d6", - "1": true, - "2": null, - "3": null, - "4": "IsHWia4E", - "5": [], - "6": ["/oAAAAAAAAAgwdb//omuBA=="], - "7": 0 - }, - { - "0": "veth4b35142", - "1": true, - "2": null, - "3": null, - "4": "RizM/XJz", - "5": [], - "6": ["/oAAAAAAAABELMz//v1ycw=="], - "7": 0 - }, - { - "0": "vetha0a808d", - "1": true, - "2": null, - "3": null, - "4": "JrxkpiTq", - "5": [], - "6": ["/oAAAAAAAAAkvGT//qYk6g=="], - "7": 0 - }, - { - "0": "hassio", - "1": true, - "2": null, - "3": null, - "4": "AkL+6fKF", - "5": ["rB4gAQ=="], - "6": ["/oAAAAAAAAAAQv7//unyhQ=="], - "7": 0 - }, - { - "0": "docker0", - "1": true, - "2": null, - "3": null, - "4": "AkKzcIpP", - "5": ["rB7oAQ=="], - "6": ["/oAAAAAAAAAAQrP//nCKTw=="], - "7": 0 - }, - { - "0": "end0", - "1": true, - "2": null, - "3": null, - "4": "5F8BoroJ", - "5": ["wKgBAg=="], - "6": [ - "KgKkZACnAAHGF8Tinim+lQ==", - "/XH1Cm7wY08fhLPRgO32Uw==", - "/oAAAAAAAAAENYnD2gV25w==" - ], - "7": 2 - }, - { - "0": "lo", - "1": true, - "2": null, - "3": null, - "4": "AAAAAAAA", - "5": ["fwAAAQ=="], - "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], - "7": 0 - } - ], - "0/51/1": 1, - "0/51/2": 16, - "0/51/3": 0, - "0/51/4": 0, - "0/51/5": [], - "0/51/6": [], - "0/51/7": [], - "0/51/8": false, - "0/51/65532": 0, - "0/51/65533": 2, - "0/51/65528": [2], - "0/51/65529": [0, 1], - "0/51/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 - ], - "0/54/0": null, - "0/54/1": null, - "0/54/2": null, - "0/54/3": null, - "0/54/4": null, - "0/54/5": null, - "0/54/6": null, - "0/54/7": null, - "0/54/8": null, - "0/54/9": null, - "0/54/10": null, - "0/54/11": null, - "0/54/12": null, - "0/54/65532": 3, - "0/54/65533": 1, - "0/54/65528": [], - "0/54/65529": [0], - "0/54/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 65528, 65529, 65531, 65532, - 65533 - ], - "0/60/0": 0, - "0/60/1": null, - "0/60/2": null, - "0/60/65532": 0, - "0/60/65533": 1, - "0/60/65528": [], - "0/60/65529": [0, 2], - "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "0/62/0": [ - { - "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRnRgkBwEkCAEwCUEEleMInA+X+lZO6bSa7ysHaAvYS13Fg9GoRuhiFk+wvtjLUrouyH+DUp3p3purrVdfUWTp03damVsxp9Lv48goDzcKNQEoARgkAgE2AwQCBAEYMAQUrD2d44zyVXjKbyYgNaEibaXFI7IwBRTphWiJ/NqGe3Cx3Nj8H02NgGioSRgwC0CaASOOwmsHE8cNw7FhQDtRhh0ztvwdfZKANU93vrX/+ww8UifrTjUIgvobgixpCGxmGvEmk3RN7TX6lgX4Qz7MGA==", - "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEYztrLK2UY1ORHUEFLO7PDfVjw/MnMDNX5kjdHHDU7npeITnSyg/kxxUM+pD7ccxfDuHQKHbBq9+qbJi8oGik8DcKNQEpARgkAmAwBBTphWiJ/NqGe3Cx3Nj8H02NgGioSTAFFMnf5ZkBCRaBluhSmLJkvcVXxHxTGDALQOOcZAL8XEktvE5sjrUmFNhkP2g3Ef+4BHtogItdZYyA9E/WbzW25E0UxZInwjjIzH3YimDUZVoEWGML8NV2kCEY", - "254": 1 - } - ], - "0/62/1": [ - { - "1": "BAg5aeR7RuFKZhukCxMGglCd00dKlhxGq8BbjeyZClKz5kN2Ytzav0xWsiWEEb3s9uvMIYFoQYULnSJvOMTcD14=", - "2": 65521, - "3": 1, - "4": 157, - "5": "", - "254": 1 - } - ], - "0/62/2": 16, - "0/62/3": 1, - "0/62/4": [ - "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEECDlp5HtG4UpmG6QLEwaCUJ3TR0qWHEarwFuN7JkKUrPmQ3Zi3Nq/TFayJYQRvez268whgWhBhQudIm84xNwPXjcKNQEpARgkAmAwBBTJ3+WZAQkWgZboUpiyZL3FV8R8UzAFFMnf5ZkBCRaBluhSmLJkvcVXxHxTGDALQO9QSAdvJkM6b/wIc07MCw1ma46lTyGYG8nvpn0ICI73nuD3QeaWwGIQTkVGEpzF+TuDK7gtTz7YUrR+PSnvMk8Y" - ], - "0/62/5": 1, - "0/62/65532": 0, - "0/62/65533": 1, - "0/62/65528": [1, 3, 5, 8], - "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], - "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], - "0/63/0": [], - "0/63/1": [], - "0/63/2": 4, - "0/63/3": 3, - "0/63/65532": 0, - "0/63/65533": 2, - "0/63/65528": [2, 5], - "0/63/65529": [0, 1, 3, 4], - "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/3/0": 0, - "1/3/1": 0, - "1/3/65532": 0, - "1/3/65533": 4, - "1/3/65528": [], - "1/3/65529": [0, 64], - "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/29/0": [ - { - "0": 121, - "1": 1 - } - ], - "1/29/1": [3, 29, 94, 95, 96], - "1/29/2": [], - "1/29/3": [], - "1/29/65532": 0, - "1/29/65533": 2, - "1/29/65528": [], - "1/29/65529": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/94/0": [ - { - "0": "Normal", - "1": 0, - "2": [ - { - "1": 16384 - } - ] - }, - { - "0": "Defrost", - "1": 1, - "2": [ - { - "1": 16385 - } - ] - } - ], - "1/94/1": 0, - "1/94/65532": 0, - "1/94/65533": 1, - "1/94/65528": [], - "1/94/65529": [], - "1/94/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/95/0": 30, - "1/95/1": 86400, - "1/95/2": 90, - "1/95/3": 20, - "1/95/4": 90, - "1/95/5": 10, - "1/95/8": 1000, - "1/95/65532": 5, - "1/95/65533": 1, - "1/95/65528": [], - "1/95/65529": [0, 1], - "1/95/65531": [0, 1, 2, 3, 4, 5, 8, 65528, 65529, 65531, 65532, 65533], - "1/96/0": null, - "1/96/1": null, - "1/96/2": 30, - "1/96/3": [ - { - "0": 0 - }, - { - "0": 1 - }, - { - "0": 2 - }, - { - "0": 3 - } - ], - "1/96/4": 0, - "1/96/5": { - "0": 0 - }, - "1/96/65532": 0, - "1/96/65533": 2, - "1/96/65528": [4], - "1/96/65529": [0, 1, 2, 3], - "1/96/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533] - }, - "attribute_subscriptions": [] -} diff --git a/tests/components/matter/fixtures/nodes/multi_endpoint_light.json b/tests/components/matter/fixtures/nodes/multi-endpoint-light.json similarity index 100% rename from tests/components/matter/fixtures/nodes/multi_endpoint_light.json rename to tests/components/matter/fixtures/nodes/multi-endpoint-light.json diff --git a/tests/components/matter/fixtures/nodes/occupancy_sensor.json b/tests/components/matter/fixtures/nodes/occupancy-sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/occupancy_sensor.json rename to tests/components/matter/fixtures/nodes/occupancy-sensor.json diff --git a/tests/components/matter/fixtures/nodes/on_off_plugin_unit.json b/tests/components/matter/fixtures/nodes/on-off-plugin-unit.json similarity index 100% rename from tests/components/matter/fixtures/nodes/on_off_plugin_unit.json rename to tests/components/matter/fixtures/nodes/on-off-plugin-unit.json diff --git a/tests/components/matter/fixtures/nodes/onoff_light_alt_name.json b/tests/components/matter/fixtures/nodes/onoff-light-alt-name.json similarity index 100% rename from tests/components/matter/fixtures/nodes/onoff_light_alt_name.json rename to tests/components/matter/fixtures/nodes/onoff-light-alt-name.json diff --git a/tests/components/matter/fixtures/nodes/onoff_light_no_name.json b/tests/components/matter/fixtures/nodes/onoff-light-no-name.json similarity index 100% rename from tests/components/matter/fixtures/nodes/onoff_light_no_name.json rename to tests/components/matter/fixtures/nodes/onoff-light-no-name.json diff --git a/tests/components/matter/fixtures/nodes/onoff_light_with_levelcontrol_present.json b/tests/components/matter/fixtures/nodes/onoff-light-with-levelcontrol-present.json similarity index 100% rename from tests/components/matter/fixtures/nodes/onoff_light_with_levelcontrol_present.json rename to tests/components/matter/fixtures/nodes/onoff-light-with-levelcontrol-present.json diff --git a/tests/components/matter/fixtures/nodes/onoff_light.json b/tests/components/matter/fixtures/nodes/onoff-light.json similarity index 100% rename from tests/components/matter/fixtures/nodes/onoff_light.json rename to tests/components/matter/fixtures/nodes/onoff-light.json diff --git a/tests/components/matter/fixtures/nodes/pressure_sensor.json b/tests/components/matter/fixtures/nodes/pressure-sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/pressure_sensor.json rename to tests/components/matter/fixtures/nodes/pressure-sensor.json diff --git a/tests/components/matter/fixtures/nodes/room_airconditioner.json b/tests/components/matter/fixtures/nodes/room-airconditioner.json similarity index 100% rename from tests/components/matter/fixtures/nodes/room_airconditioner.json rename to tests/components/matter/fixtures/nodes/room-airconditioner.json diff --git a/tests/components/matter/fixtures/nodes/silabs_dishwasher.json b/tests/components/matter/fixtures/nodes/silabs_dishwasher.json deleted file mode 100644 index c5015bc1c34..00000000000 --- a/tests/components/matter/fixtures/nodes/silabs_dishwasher.json +++ /dev/null @@ -1,657 +0,0 @@ -{ - "node_id": 54, - "date_commissioned": "2024-08-15T07:14:29.055273", - "last_interview": "2024-08-15T11:36:27.830863", - "interview_version": 6, - "available": true, - "is_bridge": false, - "attributes": { - "0/29/0": [ - { - "0": 22, - "1": 1 - } - ], - "0/29/1": [ - 29, 31, 40, 42, 43, 44, 45, 48, 49, 50, 51, 52, 53, 60, 62, 63, 64, 65 - ], - "0/29/2": [41], - "0/29/3": [1, 2], - "0/29/65532": 0, - "0/29/65533": 2, - "0/29/65528": [], - "0/29/65529": [], - "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/31/0": [ - { - "254": 1 - }, - { - "254": 1 - }, - { - "254": 2 - }, - { - "1": 5, - "2": 2, - "3": [112233], - "4": null, - "254": 3 - } - ], - "0/31/1": [], - "0/31/2": 4, - "0/31/3": 3, - "0/31/4": 4, - "0/31/65532": 0, - "0/31/65533": 1, - "0/31/65528": [], - "0/31/65529": [], - "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/40/0": 17, - "0/40/1": "Silabs", - "0/40/2": 65521, - "0/40/3": "Dishwasher", - "0/40/4": 32773, - "0/40/5": "", - "0/40/6": "**REDACTED**", - "0/40/7": 1, - "0/40/8": "TEST_VERSION", - "0/40/9": 1, - "0/40/10": "1", - "0/40/11": "20200101", - "0/40/12": "Dishwasher", - "0/40/13": "Dishwasher", - "0/40/14": "", - "0/40/15": "", - "0/40/16": false, - "0/40/18": "**REDACTED**", - "0/40/19": { - "0": 3, - "1": 3 - }, - "0/40/21": 16973824, - "0/40/22": 1, - "0/40/65532": 0, - "0/40/65533": 3, - "0/40/65528": [], - "0/40/65529": [], - "0/40/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 22, - 65528, 65529, 65531, 65532, 65533 - ], - "0/42/0": [ - { - "1": 556220604, - "2": 0, - "254": 1 - } - ], - "0/42/1": true, - "0/42/2": 1, - "0/42/3": null, - "0/42/65532": 0, - "0/42/65533": 1, - "0/42/65528": [], - "0/42/65529": [0], - "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/43/0": "en-US", - "0/43/1": [ - "en-US", - "de-DE", - "fr-FR", - "en-GB", - "es-ES", - "zh-CN", - "it-IT", - "ja-JP" - ], - "0/43/65532": 0, - "0/43/65533": 1, - "0/43/65528": [], - "0/43/65529": [], - "0/43/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "0/44/0": 0, - "0/44/1": 0, - "0/44/2": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 7], - "0/44/65532": 0, - "0/44/65533": 1, - "0/44/65528": [], - "0/44/65529": [], - "0/44/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "0/45/0": 1, - "0/45/65532": 0, - "0/45/65533": 1, - "0/45/65528": [], - "0/45/65529": [], - "0/45/65531": [0, 65528, 65529, 65531, 65532, 65533], - "0/48/0": 0, - "0/48/1": { - "0": 60, - "1": 900 - }, - "0/48/2": 0, - "0/48/3": 0, - "0/48/4": true, - "0/48/65532": 0, - "0/48/65533": 1, - "0/48/65528": [1, 3, 5], - "0/48/65529": [0, 2, 4], - "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/49/0": 1, - "0/49/1": [], - "0/49/2": 10, - "0/49/3": 20, - "0/49/4": true, - "0/49/5": 0, - "0/49/6": "**REDACTED**", - "0/49/7": null, - "0/49/9": 10, - "0/49/10": 4, - "0/49/65532": 2, - "0/49/65533": 2, - "0/49/65528": [1, 5, 7], - "0/49/65529": [0, 3, 4, 6, 8], - "0/49/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 65528, 65529, 65531, 65532, 65533 - ], - "0/50/65532": 0, - "0/50/65533": 1, - "0/50/65528": [1], - "0/50/65529": [0], - "0/50/65531": [65528, 65529, 65531, 65532, 65533], - "0/51/0": [], - "0/51/1": 6, - "0/51/2": 10, - "0/51/3": 4, - "0/51/4": 1, - "0/51/5": [], - "0/51/6": [], - "0/51/7": [], - "0/51/8": false, - "0/51/65532": 0, - "0/51/65533": 2, - "0/51/65528": [2], - "0/51/65529": [0, 1], - "0/51/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 - ], - "0/52/0": [ - { - "0": 3, - "1": "UART", - "3": 128 - }, - { - "0": 9, - "1": "DishWash", - "3": 766 - }, - { - "0": 2, - "1": "OT Stack", - "3": 719 - }, - { - "0": 12, - "1": "Bluetoot", - "3": 40 - }, - { - "0": 1, - "1": "Bluetoot", - "3": 282 - }, - { - "0": 11, - "1": "Bluetoot", - "3": 210 - }, - { - "0": 8, - "1": "shell", - "3": 323 - }, - { - "0": 6, - "1": "Tmr Svc", - "3": 594 - }, - { - "0": 5, - "1": "IDLE", - "3": 266 - }, - { - "0": 7, - "1": "CHIP", - "3": 705 - } - ], - "0/52/1": 100824, - "0/52/2": 16984, - "0/52/3": 4294959062, - "0/52/65532": 1, - "0/52/65533": 1, - "0/52/65528": [], - "0/52/65529": [0], - "0/52/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/53/0": 25, - "0/53/1": 5, - "0/53/2": "**REDACTED**", - "0/53/3": 39055, - "0/53/4": 12054125955590472924, - "0/53/5": "**REDACTED**", - "0/53/6": 0, - "0/53/7": [], - "0/53/8": [], - "0/53/9": 1773502518, - "0/53/10": 64, - "0/53/11": 88, - "0/53/12": 225, - "0/53/13": 22, - "0/53/14": 1, - "0/53/15": 0, - "0/53/16": 1, - "0/53/17": 0, - "0/53/18": 0, - "0/53/19": 1, - "0/53/20": 0, - "0/53/21": 0, - "0/53/22": 693, - "0/53/23": 686, - "0/53/24": 7, - "0/53/25": 686, - "0/53/26": 686, - "0/53/27": 7, - "0/53/28": 693, - "0/53/29": 0, - "0/53/30": 0, - "0/53/31": 0, - "0/53/32": 0, - "0/53/33": 61, - "0/53/34": 0, - "0/53/35": 0, - "0/53/36": 2, - "0/53/37": 0, - "0/53/38": 0, - "0/53/39": 87, - "0/53/40": 87, - "0/53/41": 0, - "0/53/42": 86, - "0/53/43": 0, - "0/53/44": 0, - "0/53/45": 0, - "0/53/46": 0, - "0/53/47": 0, - "0/53/48": 0, - "0/53/49": 1, - "0/53/50": 0, - "0/53/51": 0, - "0/53/52": 0, - "0/53/53": 0, - "0/53/54": 0, - "0/53/55": 0, - "0/53/56": 0, - "0/53/57": 0, - "0/53/58": 0, - "0/53/59": { - "0": 672, - "1": 8335 - }, - "0/53/60": "AB//wA==", - "0/53/61": { - "0": true, - "1": false, - "2": true, - "3": true, - "4": true, - "5": true, - "6": false, - "7": true, - "8": true, - "9": true, - "10": true, - "11": true - }, - "0/53/62": [], - "0/53/65532": 15, - "0/53/65533": 2, - "0/53/65528": [], - "0/53/65529": [0], - "0/53/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, - 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 59, 60, 61, 62, 65528, 65529, 65531, 65532, 65533 - ], - "0/60/0": 0, - "0/60/1": null, - "0/60/2": null, - "0/60/65532": 0, - "0/60/65533": 1, - "0/60/65528": [], - "0/60/65529": [0, 1, 2], - "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "0/62/0": [], - "0/62/1": [], - "0/62/2": 5, - "0/62/3": 3, - "0/62/4": [], - "0/62/5": 3, - "0/62/65532": 0, - "0/62/65533": 1, - "0/62/65528": [1, 3, 5, 8], - "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], - "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], - "0/63/0": [], - "0/63/1": [], - "0/63/2": 4, - "0/63/3": 3, - "0/63/65532": 0, - "0/63/65533": 2, - "0/63/65528": [2, 5], - "0/63/65529": [0, 1, 3, 4], - "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/64/0": [ - { - "0": "room", - "1": "bedroom 2" - }, - { - "0": "orientation", - "1": "North" - }, - { - "0": "floor", - "1": "2" - }, - { - "0": "direction", - "1": "up" - } - ], - "0/64/65532": 0, - "0/64/65533": 1, - "0/64/65528": [], - "0/64/65529": [], - "0/64/65531": [0, 65528, 65529, 65531, 65532, 65533], - "0/65/0": [], - "0/65/65532": 0, - "0/65/65533": 1, - "0/65/65528": [], - "0/65/65529": [], - "0/65/65531": [0, 65528, 65529, 65531, 65532, 65533], - "1/3/0": 0, - "1/3/1": 2, - "1/3/65532": 0, - "1/3/65533": 4, - "1/3/65528": [], - "1/3/65529": [0, 64], - "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/29/0": [ - { - "0": 117, - "1": 1 - } - ], - "1/29/1": [3, 29, 30, 89, 96], - "1/29/2": [], - "1/29/3": [], - "1/29/65532": 0, - "1/29/65533": 2, - "1/29/65528": [], - "1/29/65529": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/30/0": [], - "1/30/65532": 0, - "1/30/65533": 1, - "1/30/65528": [], - "1/30/65529": [], - "1/30/65531": [0, 65528, 65529, 65531, 65532, 65533], - "1/89/0": null, - "1/89/1": null, - "1/89/65532": null, - "1/89/65533": 2, - "1/89/65528": [1], - "1/89/65529": [0], - "1/89/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/96/0": null, - "1/96/1": null, - "1/96/3": [ - { - "0": 0 - }, - { - "0": 1 - }, - { - "0": 2 - }, - { - "0": 3 - }, - { - "0": 8, - "1": "Extra state" - } - ], - "1/96/4": 0, - "1/96/5": { - "0": 0 - }, - "1/96/65532": 0, - "1/96/65533": 1, - "1/96/65528": [4], - "1/96/65529": [0, 1, 2], - "1/96/65531": [0, 1, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], - "2/29/0": [ - { - "0": 1296, - "1": 1 - } - ], - "2/29/1": [29, 144, 145, 156], - "2/29/2": [], - "2/29/3": [], - "2/29/65532": 0, - "2/29/65533": 2, - "2/29/65528": [], - "2/29/65529": [], - "2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "2/144/0": 2, - "2/144/1": 3, - "2/144/2": [ - { - "0": 5, - "1": true, - "2": -50000000, - "3": 50000000, - "4": [ - { - "0": -50000000, - "1": -10000000, - "2": 5000, - "3": 2000, - "4": 3000 - }, - { - "0": -9999999, - "1": 9999999, - "2": 1000, - "3": 100, - "4": 500 - }, - { - "0": 10000000, - "1": 50000000, - "2": 5000, - "3": 2000, - "4": 3000 - } - ] - }, - { - "0": 2, - "1": true, - "2": -100000, - "3": 100000, - "4": [ - { - "0": -100000, - "1": -5000, - "2": 5000, - "3": 2000, - "4": 3000 - }, - { - "0": -4999, - "1": 4999, - "2": 1000, - "3": 100, - "4": 500 - }, - { - "0": 5000, - "1": 100000, - "2": 5000, - "3": 2000, - "4": 3000 - } - ] - }, - { - "0": 1, - "1": true, - "2": -500000, - "3": 500000, - "4": [ - { - "0": -500000, - "1": -100000, - "2": 5000, - "3": 2000, - "4": 3000 - }, - { - "0": -99999, - "1": 99999, - "2": 1000, - "3": 100, - "4": 500 - }, - { - "0": 100000, - "1": 500000, - "2": 5000, - "3": 2000, - "4": 3000 - } - ] - } - ], - "2/144/3": [ - { - "0": 0, - "1": 0, - "2": 300, - "7": 101, - "8": 101, - "9": 101, - "10": 101 - }, - { - "0": 1, - "1": 0, - "2": 500, - "7": 101, - "8": 101, - "9": 101, - "10": 101 - }, - { - "0": 2, - "1": 0, - "2": 1000, - "7": 101, - "8": 101, - "9": 101, - "10": 101 - } - ], - "2/144/4": 120000, - "2/144/5": 0, - "2/144/6": 0, - "2/144/7": 0, - "2/144/8": 0, - "2/144/9": 0, - "2/144/10": 0, - "2/144/11": 120000, - "2/144/12": 0, - "2/144/13": 0, - "2/144/14": 60, - "2/144/15": [ - { - "0": 1, - "1": 100000 - } - ], - "2/144/16": [ - { - "0": 1, - "1": 100000 - } - ], - "2/144/17": 9800, - "2/144/18": 0, - "2/144/65532": 31, - "2/144/65533": 1, - "2/144/65528": [], - "2/144/65529": [], - "2/144/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 65528, - 65529, 65531, 65532, 65533 - ], - "2/145/0": { - "0": 14, - "1": true, - "2": 0, - "3": 1000000000000000, - "4": [ - { - "0": 0, - "1": 1000000000000000, - "2": 500, - "3": 50 - } - ] - }, - "2/145/1": { - "0": 0, - "1": 9, - "2": 12, - "3": 9649, - "4": 12530 - }, - "2/145/5": { - "0": 0, - "1": 0, - "2": 0, - "3": 0 - }, - "2/145/65532": 5, - "2/145/65533": 1, - "2/145/65528": [], - "2/145/65529": [], - "2/145/65531": [0, 1, 5, 65528, 65529, 65531, 65532, 65533], - "2/156/0": [0, 1, 2], - "2/156/1": null, - "2/156/65532": 12, - "2/156/65533": 1, - "2/156/65528": [], - "2/156/65529": [], - "2/156/65531": [0, 1, 65528, 65529, 65531, 65532, 65533] - }, - "attribute_subscriptions": [] -} diff --git a/tests/components/matter/fixtures/nodes/smoke_detector.json b/tests/components/matter/fixtures/nodes/smoke_detector.json deleted file mode 100644 index 7ba525a7552..00000000000 --- a/tests/components/matter/fixtures/nodes/smoke_detector.json +++ /dev/null @@ -1,238 +0,0 @@ -{ - "node_id": 1, - "date_commissioned": "2024-09-13T20:07:21.672257", - "last_interview": "2024-09-13T21:10:36.026041", - "interview_version": 6, - "available": true, - "is_bridge": false, - "attributes": { - "0/29/0": [ - { - "0": 22, - "1": 2 - } - ], - "0/29/1": [29, 31, 40, 42, 48, 49, 51, 60, 62, 63, 70], - "0/29/2": [41], - "0/29/3": [1], - "0/29/65532": 0, - "0/29/65533": 2, - "0/29/65528": [], - "0/29/65529": [], - "0/29/65530": [], - "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], - "0/31/0": [ - { - "1": 5, - "2": 2, - "3": [112233], - "4": null, - "254": 3 - } - ], - "0/31/1": [], - "0/31/2": 4, - "0/31/3": 3, - "0/31/4": 4, - "0/31/65532": 0, - "0/31/65533": 1, - "0/31/65528": [], - "0/31/65529": [], - "0/31/65530": [0, 1], - "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533], - "0/40/0": 17, - "0/40/1": "HEIMAN", - "0/40/2": 4619, - "0/40/3": "Smoke sensor", - "0/40/4": 4099, - "0/40/5": "", - "0/40/6": "**REDACTED**", - "0/40/7": 0, - "0/40/8": "0.0", - "0/40/9": 16, - "0/40/10": "1.0", - "0/40/11": "20240403", - "0/40/14": "", - "0/40/15": "2404034099000007", - "0/40/16": false, - "0/40/18": "redacted", - "0/40/19": { - "0": 3, - "1": 3 - }, - "0/40/65532": 0, - "0/40/65533": 2, - "0/40/65528": [], - "0/40/65529": [], - "0/40/65530": [0, 2], - "0/40/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 14, 15, 16, 18, 19, 65528, 65529, - 65530, 65531, 65532, 65533 - ], - "0/42/0": [], - "0/42/1": true, - "0/42/2": 1, - "0/42/3": null, - "0/42/65532": 0, - "0/42/65533": 1, - "0/42/65528": [], - "0/42/65529": [0], - "0/42/65530": [0, 1, 2], - "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], - "0/48/0": 0, - "0/48/1": { - "0": 60, - "1": 900 - }, - "0/48/2": 0, - "0/48/3": 0, - "0/48/4": true, - "0/48/65532": 0, - "0/48/65533": 1, - "0/48/65528": [1, 3, 5], - "0/48/65529": [0, 2, 4], - "0/48/65530": [], - "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533], - "0/49/0": 1, - "0/49/1": [ - { - "0": "+uApc5vSQm4=", - "1": true - } - ], - "0/49/2": 10, - "0/49/3": 20, - "0/49/4": true, - "0/49/5": 0, - "0/49/6": "+uApc5vSQm4=", - "0/49/7": null, - "0/49/65532": 2, - "0/49/65533": 1, - "0/49/65528": [1, 5, 7], - "0/49/65529": [0, 3, 4, 6, 8], - "0/49/65530": [], - "0/49/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533 - ], - "0/51/0": [], - "0/51/1": 1, - "0/51/2": 247340, - "0/51/4": 0, - "0/51/5": [], - "0/51/6": [], - "0/51/7": [], - "0/51/8": false, - "0/51/65532": 0, - "0/51/65533": 1, - "0/51/65528": [], - "0/51/65529": [0], - "0/51/65530": [3], - "0/51/65531": [ - 0, 1, 2, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533 - ], - "0/60/0": 0, - "0/60/1": null, - "0/60/2": null, - "0/60/65532": 0, - "0/60/65533": 1, - "0/60/65528": [], - "0/60/65529": [0, 1, 2], - "0/60/65530": [], - "0/60/65531": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533], - "0/62/0": [], - "0/62/1": [], - "0/62/2": 5, - "0/62/3": 3, - "0/62/4": [], - "0/62/5": 3, - "0/62/65532": 0, - "0/62/65533": 1, - "0/62/65528": [1, 3, 5, 8], - "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], - "0/62/65530": [], - "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533], - "0/63/0": [], - "0/63/1": [], - "0/63/2": 4, - "0/63/3": 3, - "0/63/65532": 0, - "0/63/65533": 2, - "0/63/65528": [2, 5], - "0/63/65529": [0, 1, 3, 4], - "0/63/65530": [], - "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], - "0/70/0": 300, - "0/70/1": 6000, - "0/70/2": 500, - "0/70/3": [], - "0/70/4": 0, - "0/70/5": 2, - "0/70/65532": 1, - "0/70/65533": 1, - "0/70/65528": [1], - "0/70/65529": [0, 2, 3], - "0/70/65530": [], - "0/70/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533], - "1/3/0": 0, - "1/3/1": 2, - "1/3/65532": 0, - "1/3/65533": 4, - "1/3/65528": [], - "1/3/65529": [0], - "1/3/65530": [], - "1/3/65531": [0, 1, 65528, 65529, 65530, 65531, 65532, 65533], - "1/29/0": [ - { - "0": 118, - "1": 1 - } - ], - "1/29/1": [3, 29, 47, 92], - "1/29/2": [], - "1/29/3": [], - "1/29/65532": 0, - "1/29/65533": 2, - "1/29/65528": [], - "1/29/65529": [], - "1/29/65530": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], - "1/47/0": 0, - "1/47/1": 2, - "1/47/2": "B2", - "1/47/11": 0, - "1/47/12": 188, - "1/47/14": 0, - "1/47/15": false, - "1/47/16": 0, - "1/47/19": "CR123A", - "1/47/20": 0, - "1/47/24": 0, - "1/47/25": 0, - "1/47/31": [], - "1/47/65532": 10, - "1/47/65533": 2, - "1/47/65528": [], - "1/47/65529": [], - "1/47/65530": [1], - "1/47/65531": [ - 0, 1, 2, 11, 12, 14, 15, 16, 19, 20, 24, 25, 31, 65528, 65529, 65530, - 65531, 65532, 65533 - ], - "1/92/0": 0, - "1/92/1": 0, - "1/92/3": 0, - "1/92/4": 0, - "1/92/5": false, - "1/92/6": false, - "1/92/7": 0, - "1/92/65532": 1, - "1/92/65533": 1, - "1/92/65528": [], - "1/92/65529": [0], - "1/92/65530": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - "1/92/65531": [ - 0, 1, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533 - ] - }, - "attribute_subscriptions": [] -} diff --git a/tests/components/matter/fixtures/nodes/switch_unit.json b/tests/components/matter/fixtures/nodes/switch-unit.json similarity index 100% rename from tests/components/matter/fixtures/nodes/switch_unit.json rename to tests/components/matter/fixtures/nodes/switch-unit.json diff --git a/tests/components/matter/fixtures/nodes/temperature_sensor.json b/tests/components/matter/fixtures/nodes/temperature-sensor.json similarity index 100% rename from tests/components/matter/fixtures/nodes/temperature_sensor.json rename to tests/components/matter/fixtures/nodes/temperature-sensor.json diff --git a/tests/components/matter/fixtures/nodes/vacuum_cleaner.json b/tests/components/matter/fixtures/nodes/vacuum_cleaner.json deleted file mode 100644 index d6268144ffd..00000000000 --- a/tests/components/matter/fixtures/nodes/vacuum_cleaner.json +++ /dev/null @@ -1,309 +0,0 @@ -{ - "node_id": 66, - "date_commissioned": "2024-10-29T08:27:39.860951", - "last_interview": "2024-10-29T08:27:39.860959", - "interview_version": 6, - "available": true, - "is_bridge": false, - "attributes": { - "0/29/0": [ - { - "0": 22, - "1": 1 - } - ], - "0/29/1": [29, 31, 40, 48, 49, 50, 51, 60, 62, 63], - "0/29/2": [], - "0/29/3": [1], - "0/29/65532": 0, - "0/29/65533": 2, - "0/29/65528": [], - "0/29/65529": [], - "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/31/0": [ - { - "1": 5, - "2": 2, - "3": [112233], - "4": null, - "254": 1 - } - ], - "0/31/1": [], - "0/31/2": 4, - "0/31/3": 3, - "0/31/4": 4, - "0/31/65532": 0, - "0/31/65533": 1, - "0/31/65528": [], - "0/31/65529": [], - "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/40/0": 17, - "0/40/1": "TEST_VENDOR", - "0/40/2": 65521, - "0/40/3": "Mock Vacuum", - "0/40/4": 32769, - "0/40/5": "Mock Vacuum", - "0/40/6": "**REDACTED**", - "0/40/7": 0, - "0/40/8": "TEST_VERSION", - "0/40/9": 1, - "0/40/10": "1.0", - "0/40/11": "20200101", - "0/40/12": "", - "0/40/13": "", - "0/40/14": "", - "0/40/15": "TEST_SN", - "0/40/16": false, - "0/40/18": "F0D59DFAAEAD6E76", - "0/40/19": { - "0": 3, - "1": 65535 - }, - "0/40/21": 16973824, - "0/40/22": 1, - "0/40/65532": 0, - "0/40/65533": 3, - "0/40/65528": [], - "0/40/65529": [], - "0/40/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 22, - 65528, 65529, 65531, 65532, 65533 - ], - "0/48/0": 0, - "0/48/1": { - "0": 60, - "1": 900 - }, - "0/48/2": 0, - "0/48/3": 2, - "0/48/4": true, - "0/48/65532": 0, - "0/48/65533": 1, - "0/48/65528": [1, 3, 5], - "0/48/65529": [0, 2, 4], - "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/49/0": 1, - "0/49/1": [ - { - "0": "ZW5kMA==", - "1": true - } - ], - "0/49/2": 0, - "0/49/3": 0, - "0/49/4": true, - "0/49/5": null, - "0/49/6": null, - "0/49/7": null, - "0/49/65532": 4, - "0/49/65533": 2, - "0/49/65528": [], - "0/49/65529": [], - "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], - "0/50/65532": 0, - "0/50/65533": 1, - "0/50/65528": [1], - "0/50/65529": [0], - "0/50/65531": [65528, 65529, 65531, 65532, 65533], - "0/51/0": [], - "0/51/1": 1, - "0/51/2": 47, - "0/51/3": 0, - "0/51/4": 0, - "0/51/5": [], - "0/51/6": [], - "0/51/7": [], - "0/51/8": false, - "0/51/65532": 0, - "0/51/65533": 2, - "0/51/65528": [2], - "0/51/65529": [0, 1], - "0/51/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 - ], - "0/60/0": 0, - "0/60/1": null, - "0/60/2": null, - "0/60/65532": 0, - "0/60/65533": 1, - "0/60/65528": [], - "0/60/65529": [0, 2], - "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "0/62/0": [], - "0/62/1": [], - "0/62/2": 16, - "0/62/3": 1, - "0/62/4": [], - "0/62/5": 1, - "0/62/65532": 0, - "0/62/65533": 1, - "0/62/65528": [1, 3, 5, 8], - "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], - "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], - "0/63/0": [], - "0/63/1": [], - "0/63/2": 4, - "0/63/3": 3, - "0/63/65532": 0, - "0/63/65533": 2, - "0/63/65528": [2, 5], - "0/63/65529": [0, 1, 3, 4], - "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/29/0": [ - { - "0": 116, - "1": 1 - } - ], - "1/29/1": [3, 29, 84, 85, 97], - "1/29/2": [], - "1/29/3": [], - "1/29/65532": 0, - "1/29/65533": 2, - "1/29/65528": [], - "1/29/65529": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/84/0": [ - { - "0": "Idle", - "1": 0, - "2": [ - { - "1": 16384 - } - ] - }, - { - "0": "Cleaning", - "1": 1, - "2": [ - { - "1": 16385 - } - ] - }, - { - "0": "Mapping", - "1": 2, - "2": [ - { - "1": 16386 - } - ] - } - ], - "1/84/1": 0, - "1/84/65532": 0, - "1/84/65533": 2, - "1/84/65528": [1], - "1/84/65529": [0], - "1/84/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/85/0": [ - { - "0": "Quick", - "1": 0, - "2": [ - { - "1": 16385 - }, - { - "1": 1 - } - ] - }, - { - "0": "Auto", - "1": 1, - "2": [ - { - "1": 0 - }, - { - "1": 16385 - } - ] - }, - { - "0": "Deep Clean", - "1": 2, - "2": [ - { - "1": 16386 - }, - { - "1": 16384 - }, - { - "1": 16385 - } - ] - }, - { - "0": "Quiet", - "1": 3, - "2": [ - { - "1": 2 - }, - { - "1": 16385 - } - ] - }, - { - "0": "Max Vac", - "1": 4, - "2": [ - { - "1": 16385 - }, - { - "1": 16384 - } - ] - } - ], - "1/85/1": 0, - "1/85/65532": 0, - "1/85/65533": 2, - "1/85/65528": [1], - "1/85/65529": [0], - "1/85/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/97/0": null, - "1/97/1": null, - "1/97/3": [ - { - "0": 0 - }, - { - "0": 1 - }, - { - "0": 2 - }, - { - "0": 3 - }, - { - "0": 64 - }, - { - "0": 65 - }, - { - "0": 66 - } - ], - "1/97/4": 0, - "1/97/5": { - "0": 0 - }, - "1/97/65532": 0, - "1/97/65533": 1, - "1/97/65528": [4], - "1/97/65529": [0, 3, 128], - "1/97/65531": [0, 1, 3, 4, 5, 65528, 65529, 65531, 65532, 65533] - }, - "attribute_subscriptions": [] -} diff --git a/tests/components/matter/fixtures/nodes/valve.json b/tests/components/matter/fixtures/nodes/valve.json deleted file mode 100644 index 5ba06412ca9..00000000000 --- a/tests/components/matter/fixtures/nodes/valve.json +++ /dev/null @@ -1,260 +0,0 @@ -{ - "node_id": 75, - "date_commissioned": "2024-09-02T09:32:00.380607", - "last_interview": "2024-09-02T09:32:00.380611", - "interview_version": 6, - "available": true, - "is_bridge": false, - "attributes": { - "0/29/0": [ - { - "0": 22, - "1": 1 - } - ], - "0/29/1": [29, 31, 40, 43, 48, 49, 50, 51, 60, 62, 63], - "0/29/2": [], - "0/29/3": [1], - "0/29/65532": 0, - "0/29/65533": 2, - "0/29/65528": [], - "0/29/65529": [], - "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "0/31/0": [ - { - "1": 5, - "2": 2, - "3": [112233], - "4": null, - "254": 1 - } - ], - "0/31/1": [], - "0/31/2": 4, - "0/31/3": 3, - "0/31/4": 4, - "0/31/65532": 0, - "0/31/65533": 1, - "0/31/65528": [], - "0/31/65529": [], - "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/40/0": 18, - "0/40/1": "Mock", - "0/40/2": 65521, - "0/40/3": "Valve", - "0/40/4": 32768, - "0/40/5": "", - "0/40/6": "**REDACTED**", - "0/40/7": 0, - "0/40/8": "TEST_VERSION", - "0/40/9": 1, - "0/40/10": "1.0", - "0/40/11": "20200101", - "0/40/12": "", - "0/40/13": "", - "0/40/14": "", - "0/40/15": "TEST_SN", - "0/40/16": false, - "0/40/18": "A3586AC56A2CCCDB", - "0/40/19": { - "0": 3, - "1": 65535 - }, - "0/40/21": 17039360, - "0/40/22": 1, - "0/40/65532": 0, - "0/40/65533": 2, - "0/40/65528": [], - "0/40/65529": [], - "0/40/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 22, - 65528, 65529, 65531, 65532, 65533 - ], - "0/43/0": "en-US", - "0/43/1": [ - "en-US", - "de-DE", - "fr-FR", - "en-GB", - "es-ES", - "zh-CN", - "it-IT", - "ja-JP" - ], - "0/43/65532": 0, - "0/43/65533": 1, - "0/43/65528": [], - "0/43/65529": [], - "0/43/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "0/48/0": 0, - "0/48/1": { - "0": 60, - "1": 900 - }, - "0/48/2": 0, - "0/48/3": 2, - "0/48/4": true, - "0/48/65532": 0, - "0/48/65533": 1, - "0/48/65528": [1, 3, 5], - "0/48/65529": [0, 2, 4], - "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], - "0/49/0": 1, - "0/49/1": [ - { - "0": "ZW5zMzM=", - "1": true - } - ], - "0/49/2": 0, - "0/49/3": 0, - "0/49/4": true, - "0/49/5": null, - "0/49/6": null, - "0/49/7": null, - "0/49/65532": 4, - "0/49/65533": 2, - "0/49/65528": [], - "0/49/65529": [], - "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], - "0/50/65532": 0, - "0/50/65533": 1, - "0/50/65528": [1], - "0/50/65529": [0], - "0/50/65531": [65528, 65529, 65531, 65532, 65533], - "0/51/0": [ - { - "0": "ens33", - "1": true, - "2": null, - "3": null, - "4": "AAwpp2CV", - "5": ["wKgBjg=="], - "6": [ - "/adI27DsyURo2mqau/5wuw==", - "/adI27DsyUSOe4PwnMXbYg==", - "KgEOCgKzOZD9M4Fh8k4Abg==", - "KgEOCgKzOZCNpPnLBN7MTQ==", - "/oAAAAAAAADvX1kMcjUM+w==" - ], - "7": 2 - }, - { - "0": "lo", - "1": true, - "2": null, - "3": null, - "4": "AAAAAAAA", - "5": ["fwAAAQ=="], - "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], - "7": 0 - } - ], - "0/51/1": 1, - "0/51/2": 77, - "0/51/3": 0, - "0/51/4": 0, - "0/51/5": [], - "0/51/6": [], - "0/51/7": [], - "0/51/8": false, - "0/51/65532": 0, - "0/51/65533": 2, - "0/51/65528": [2], - "0/51/65529": [0, 1], - "0/51/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 - ], - "0/60/0": 0, - "0/60/1": null, - "0/60/2": null, - "0/60/65532": 0, - "0/60/65533": 1, - "0/60/65528": [], - "0/60/65529": [0, 1, 2], - "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], - "0/62/0": [ - { - "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRSxgkBwEkCAEwCUEEPt5xWN1i0R+dLM+MnDvosL8hjyrRoHq5ja+iCtZbpXTIXt17ueMKWDc7pgeEvHn9opOCiFvmqjEZ1L4hDk27MTcKNQEoARgkAgE2AwQCBAEYMAQUUPvMnV9FkGhfQedEwlqazBFbVfUwBRQ1L3KS8MJ5RVnuryNgRxdXueDAoxgwC0CA4m5xhFuvxC4iDehajKmbdNvZdo2alIbL8hGTor2jMFIPAowJeA0ZaS0+ocRsA6xxHRrpmmF095qUHbSONrPIGA==", - "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEBjOABseGNfeoeNqgBxhNV78q8SfQP8putY2hpTVwmJVaWzyqw4F/OhdJRHTZjXkSV87jHOZ58ivEb3GjFiT+OTcKNQEpARgkAmAwBBQ1L3KS8MJ5RVnuryNgRxdXueDAozAFFM2vLItbAuvwSMsedKJS5Tw7Aa2pGDALQCPtpgnYiXc8JmJmEi25z0BIPFYaf27j9yhVSmm45vjpdSZd3p8uOGjHd23m8w/22q2eWvkzU02qTVLgnV42cgkY", - "254": 1 - } - ], - "0/62/1": [ - { - "1": "BPUiJZj+BQknF7mbNOh2d9ZtKB+gQJLND+2qjIAAaMJb+2BW+xFhqDYYiA8p9YegdTb0wHA1NQY8TXMPyDwoP9Q=", - "2": 4939, - "3": 2, - "4": 75, - "5": "", - "254": 1 - } - ], - "0/62/2": 16, - "0/62/3": 1, - "0/62/4": [ - "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEE9SIlmP4FCScXuZs06HZ31m0oH6BAks0P7aqMgABowlv7YFb7EWGoNhiIDyn1h6B1NvTAcDU1BjxNcw/IPCg/1DcKNQEpARgkAmAwBBTNryyLWwLr8EjLHnSiUuU8OwGtqTAFFM2vLItbAuvwSMsedKJS5Tw7Aa2pGDALQKL0AGnKE3ezVrBBzJA+9INd8GTFOC3oX/EeCpI4CSKlc7LijfauiDVtJ5gfqR0gf1TKLcWfSUe7mIIvXzzvg0UY" - ], - "0/62/5": 1, - "0/62/65532": 0, - "0/62/65533": 1, - "0/62/65528": [1, 3, 5, 8], - "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], - "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], - "0/63/0": [], - "0/63/1": [], - "0/63/2": 4, - "0/63/3": 3, - "0/63/65532": 0, - "0/63/65533": 2, - "0/63/65528": [2, 5], - "0/63/65529": [0, 1, 3, 4], - "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/3/0": 0, - "1/3/1": 0, - "1/3/65532": 0, - "1/3/65533": 2, - "1/3/65528": [], - "1/3/65529": [0, 64], - "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/4/0": 128, - "1/4/65532": 1, - "1/4/65533": 3, - "1/4/65528": [0, 1, 2, 3], - "1/4/65529": [0, 1, 2, 3, 4, 5], - "1/4/65531": [0, 65528, 65529, 65531, 65532, 65533], - "1/29/0": [ - { - "0": 66, - "1": 1 - } - ], - "1/29/1": [3, 4, 29, 129], - "1/29/2": [], - "1/29/3": [], - "1/29/65532": 0, - "1/29/65533": 2, - "1/29/65528": [], - "1/29/65529": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], - "1/129/0": 0, - "1/129/1": 0, - "1/129/2": 0, - "1/129/3": null, - "1/129/4": 0, - "1/129/5": 0, - "1/129/6": 0, - "1/129/7": 0, - "1/129/8": 100, - "1/129/9": 0, - "1/129/10": 0, - "1/129/65532": 0, - "1/129/65533": 1, - "1/129/65528": [], - "1/129/65529": [0, 1], - "1/129/65531": [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 65528, 65529, 65531, 65532, 65533 - ] - }, - "attribute_subscriptions": [] -} diff --git a/tests/components/matter/fixtures/nodes/window_covering_full.json b/tests/components/matter/fixtures/nodes/window-covering_full.json similarity index 100% rename from tests/components/matter/fixtures/nodes/window_covering_full.json rename to tests/components/matter/fixtures/nodes/window-covering_full.json diff --git a/tests/components/matter/fixtures/nodes/window_covering_lift.json b/tests/components/matter/fixtures/nodes/window-covering_lift.json similarity index 100% rename from tests/components/matter/fixtures/nodes/window_covering_lift.json rename to tests/components/matter/fixtures/nodes/window-covering_lift.json diff --git a/tests/components/matter/fixtures/nodes/window_covering_pa_lift.json b/tests/components/matter/fixtures/nodes/window-covering_pa-lift.json similarity index 100% rename from tests/components/matter/fixtures/nodes/window_covering_pa_lift.json rename to tests/components/matter/fixtures/nodes/window-covering_pa-lift.json diff --git a/tests/components/matter/fixtures/nodes/window_covering_pa_tilt.json b/tests/components/matter/fixtures/nodes/window-covering_pa-tilt.json similarity index 100% rename from tests/components/matter/fixtures/nodes/window_covering_pa_tilt.json rename to tests/components/matter/fixtures/nodes/window-covering_pa-tilt.json diff --git a/tests/components/matter/fixtures/nodes/window_covering_tilt.json b/tests/components/matter/fixtures/nodes/window-covering_tilt.json similarity index 100% rename from tests/components/matter/fixtures/nodes/window_covering_tilt.json rename to tests/components/matter/fixtures/nodes/window-covering_tilt.json diff --git a/tests/components/matter/snapshots/test_binary_sensor.ambr b/tests/components/matter/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 2e3367121e9..00000000000 --- a/tests/components/matter/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,705 +0,0 @@ -# serializer version: 1 -# name: test_binary_sensors[door_lock][binary_sensor.mock_door_lock_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_door_lock_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-BatteryChargeLevel-47-14', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[door_lock][binary_sensor.mock_door_lock_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Mock Door Lock Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_door_lock_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[door_lock][binary_sensor.mock_door_lock_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.mock_door_lock_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Door', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-LockDoorStateSensor-257-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[door_lock][binary_sensor.mock_door_lock_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Mock Door Lock Door', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_door_lock_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[door_lock_with_unbolt][binary_sensor.mock_door_lock_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_door_lock_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-BatteryChargeLevel-47-14', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[door_lock_with_unbolt][binary_sensor.mock_door_lock_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Mock Door Lock Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_door_lock_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[door_lock_with_unbolt][binary_sensor.mock_door_lock_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.mock_door_lock_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Door', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-LockDoorStateSensor-257-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[door_lock_with_unbolt][binary_sensor.mock_door_lock_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Mock Door Lock Door', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_door_lock_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[eve_contact_sensor][binary_sensor.eve_door_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.eve_door_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Door', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-ContactSensor-69-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[eve_contact_sensor][binary_sensor.eve_door_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Eve Door Door', - }), - 'context': , - 'entity_id': 'binary_sensor.eve_door_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[leak_sensor][binary_sensor.water_leak_detector_water_leak-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.water_leak_detector_water_leak', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water leak', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_leak', - 'unique_id': '00000000000004D2-0000000000000020-MatterNodeDevice-1-WaterLeakDetector-69-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[leak_sensor][binary_sensor.water_leak_detector_water_leak-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'moisture', - 'friendly_name': 'Water Leak Detector Water leak', - }), - 'context': , - 'entity_id': 'binary_sensor.water_leak_detector_water_leak', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[occupancy_sensor][binary_sensor.mock_occupancy_sensor_occupancy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.mock_occupancy_sensor_occupancy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Occupancy', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-OccupancySensor-1030-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[occupancy_sensor][binary_sensor.mock_occupancy_sensor_occupancy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'occupancy', - 'friendly_name': 'Mock Occupancy Sensor Occupancy', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_occupancy_sensor_occupancy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[onoff_light_alt_name][binary_sensor.mock_onoff_light_occupancy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.mock_onoff_light_occupancy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Occupancy', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-OccupancySensor-1030-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[onoff_light_alt_name][binary_sensor.mock_onoff_light_occupancy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'occupancy', - 'friendly_name': 'Mock OnOff Light Occupancy', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_onoff_light_occupancy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[onoff_light_no_name][binary_sensor.mock_light_occupancy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.mock_light_occupancy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Occupancy', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-OccupancySensor-1030-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[onoff_light_no_name][binary_sensor.mock_light_occupancy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'occupancy', - 'friendly_name': 'Mock Light Occupancy', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_light_occupancy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_battery_alert-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.smoke_sensor_battery_alert', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery alert', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_alert', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmBatteryAlertSensor-92-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_battery_alert-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Smoke sensor Battery alert', - }), - 'context': , - 'entity_id': 'binary_sensor.smoke_sensor_battery_alert', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_end_of_service-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.smoke_sensor_end_of_service', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'End of service', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'end_of_service', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmEndfOfServiceSensor-92-7', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_end_of_service-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Smoke sensor End of service', - }), - 'context': , - 'entity_id': 'binary_sensor.smoke_sensor_end_of_service', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_hardware_fault-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.smoke_sensor_hardware_fault', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Hardware fault', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'hardware_fault', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmHardwareFaultAlertSensor-92-6', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_hardware_fault-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Smoke sensor Hardware fault', - }), - 'context': , - 'entity_id': 'binary_sensor.smoke_sensor_hardware_fault', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_muted-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.smoke_sensor_muted', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Muted', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'muted', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmDeviceMutedSensor-92-4', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_muted-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Smoke sensor Muted', - }), - 'context': , - 'entity_id': 'binary_sensor.smoke_sensor_muted', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_smoke-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.smoke_sensor_smoke', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Smoke', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmSmokeStateSensor-92-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_smoke-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'smoke', - 'friendly_name': 'Smoke sensor Smoke', - }), - 'context': , - 'entity_id': 'binary_sensor.smoke_sensor_smoke', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_test_in_progress-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.smoke_sensor_test_in_progress', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Test in progress', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'test_in_progress', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-SmokeCoAlarmTestInProgressSensor-92-5', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[smoke_detector][binary_sensor.smoke_sensor_test_in_progress-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Smoke sensor Test in progress', - }), - 'context': , - 'entity_id': 'binary_sensor.smoke_sensor_test_in_progress', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/matter/snapshots/test_button.ambr b/tests/components/matter/snapshots/test_button.ambr deleted file mode 100644 index 10792b58d28..00000000000 --- a/tests/components/matter/snapshots/test_button.ambr +++ /dev/null @@ -1,2812 +0,0 @@ -# serializer version: 1 -# name: test_buttons[air_purifier][button.air_purifier_identify_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.air_purifier_identify_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (1)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_identify_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Air Purifier Identify (1)', - }), - 'context': , - 'entity_id': 'button.air_purifier_identify_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_identify_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.air_purifier_identify_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (2)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_identify_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Air Purifier Identify (2)', - }), - 'context': , - 'entity_id': 'button.air_purifier_identify_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_identify_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.air_purifier_identify_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (3)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-3-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_identify_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Air Purifier Identify (3)', - }), - 'context': , - 'entity_id': 'button.air_purifier_identify_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_identify_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.air_purifier_identify_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (4)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-4-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_identify_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Air Purifier Identify (4)', - }), - 'context': , - 'entity_id': 'button.air_purifier_identify_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_identify_5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.air_purifier_identify_5', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (5)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-5-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_identify_5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Air Purifier Identify (5)', - }), - 'context': , - 'entity_id': 'button.air_purifier_identify_5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.air_purifier_reset_filter_condition', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Reset filter condition', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reset_filter_condition', - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-HepaFilterMonitoringResetButton-113-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Air Purifier Reset filter condition', - }), - 'context': , - 'entity_id': 'button.air_purifier_reset_filter_condition', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.air_purifier_reset_filter_condition_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Reset filter condition', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reset_filter_condition', - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-ActivatedCarbonFilterMonitoringResetButton-114-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Air Purifier Reset filter condition', - }), - 'context': , - 'entity_id': 'button.air_purifier_reset_filter_condition_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[air_quality_sensor][button.lightfi_aq1_air_quality_sensor_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.lightfi_aq1_air_quality_sensor_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[air_quality_sensor][button.lightfi_aq1_air_quality_sensor_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'lightfi-aq1-air-quality-sensor Identify', - }), - 'context': , - 'entity_id': 'button.lightfi_aq1_air_quality_sensor_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[color_temperature_light][button.mock_color_temperature_light_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_color_temperature_light_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[color_temperature_light][button.mock_color_temperature_light_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Color Temperature Light Identify', - }), - 'context': , - 'entity_id': 'button.mock_color_temperature_light_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[dimmable_plugin_unit][button.dimmable_plugin_unit_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.dimmable_plugin_unit_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[dimmable_plugin_unit][button.dimmable_plugin_unit_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Dimmable Plugin Unit Identify', - }), - 'context': , - 'entity_id': 'button.dimmable_plugin_unit_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[door_lock][button.mock_door_lock_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_door_lock_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[door_lock][button.mock_door_lock_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Door Lock Identify', - }), - 'context': , - 'entity_id': 'button.mock_door_lock_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[door_lock_with_unbolt][button.mock_door_lock_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_door_lock_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[door_lock_with_unbolt][button.mock_door_lock_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Door Lock Identify', - }), - 'context': , - 'entity_id': 'button.mock_door_lock_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[eve_contact_sensor][button.eve_door_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.eve_door_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[eve_contact_sensor][button.eve_door_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Eve Door Identify', - }), - 'context': , - 'entity_id': 'button.eve_door_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[eve_energy_plug][button.eve_energy_plug_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.eve_energy_plug_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[eve_energy_plug][button.eve_energy_plug_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Eve Energy Plug Identify', - }), - 'context': , - 'entity_id': 'button.eve_energy_plug_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[eve_energy_plug_patched][button.eve_energy_plug_patched_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.eve_energy_plug_patched_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[eve_energy_plug_patched][button.eve_energy_plug_patched_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Eve Energy Plug Patched Identify', - }), - 'context': , - 'entity_id': 'button.eve_energy_plug_patched_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[eve_thermo][button.eve_thermo_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.eve_thermo_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[eve_thermo][button.eve_thermo_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Eve Thermo Identify', - }), - 'context': , - 'entity_id': 'button.eve_thermo_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.eve_weather_identify_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (1)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Eve Weather Identify (1)', - }), - 'context': , - 'entity_id': 'button.eve_weather_identify_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.eve_weather_identify_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (2)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-2-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Eve Weather Identify (2)', - }), - 'context': , - 'entity_id': 'button.eve_weather_identify_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[extended_color_light][button.mock_extended_color_light_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_extended_color_light_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[extended_color_light][button.mock_extended_color_light_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Extended Color Light Identify', - }), - 'context': , - 'entity_id': 'button.mock_extended_color_light_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[fan][button.mocked_fan_switch_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mocked_fan_switch_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[fan][button.mocked_fan_switch_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mocked Fan Switch Identify', - }), - 'context': , - 'entity_id': 'button.mocked_fan_switch_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[flow_sensor][button.mock_flow_sensor_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_flow_sensor_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[flow_sensor][button.mock_flow_sensor_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Flow Sensor Identify', - }), - 'context': , - 'entity_id': 'button.mock_flow_sensor_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[generic_switch][button.mock_generic_switch_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_generic_switch_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[generic_switch][button.mock_generic_switch_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Generic Switch Identify', - }), - 'context': , - 'entity_id': 'button.mock_generic_switch_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[generic_switch_multi][button.mock_generic_switch_fancy_button-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_generic_switch_fancy_button', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Fancy Button', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-2-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[generic_switch_multi][button.mock_generic_switch_fancy_button-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Generic Switch Fancy Button', - }), - 'context': , - 'entity_id': 'button.mock_generic_switch_fancy_button', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[generic_switch_multi][button.mock_generic_switch_identify_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_generic_switch_identify_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (1)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[generic_switch_multi][button.mock_generic_switch_identify_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Generic Switch Identify (1)', - }), - 'context': , - 'entity_id': 'button.mock_generic_switch_identify_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[humidity_sensor][button.mock_humidity_sensor_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_humidity_sensor_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[humidity_sensor][button.mock_humidity_sensor_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Humidity Sensor Identify', - }), - 'context': , - 'entity_id': 'button.mock_humidity_sensor_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[light_sensor][button.mock_light_sensor_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_light_sensor_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[light_sensor][button.mock_light_sensor_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Light Sensor Identify', - }), - 'context': , - 'entity_id': 'button.mock_light_sensor_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[microwave_oven][button.microwave_oven_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.microwave_oven_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[microwave_oven][button.microwave_oven_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Microwave Oven Identify', - }), - 'context': , - 'entity_id': 'button.microwave_oven_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[microwave_oven][button.microwave_oven_pause-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.microwave_oven_pause', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Pause', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'pause', - 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStatePauseButton-96-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[microwave_oven][button.microwave_oven_pause-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Microwave Oven Pause', - }), - 'context': , - 'entity_id': 'button.microwave_oven_pause', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[microwave_oven][button.microwave_oven_resume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.microwave_oven_resume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Resume', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'resume', - 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStateResumeButton-96-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[microwave_oven][button.microwave_oven_resume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Microwave Oven Resume', - }), - 'context': , - 'entity_id': 'button.microwave_oven_resume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[microwave_oven][button.microwave_oven_start-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.microwave_oven_start', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Start', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'start', - 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStateStartButton-96-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[microwave_oven][button.microwave_oven_start-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Microwave Oven Start', - }), - 'context': , - 'entity_id': 'button.microwave_oven_start', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[microwave_oven][button.microwave_oven_stop-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.microwave_oven_stop', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Stop', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'stop', - 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStateStopButton-96-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[microwave_oven][button.microwave_oven_stop-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Microwave Oven Stop', - }), - 'context': , - 'entity_id': 'button.microwave_oven_stop', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_config-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.inovelli_config', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Config', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-5-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_config-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Inovelli Config', - }), - 'context': , - 'entity_id': 'button.inovelli_config', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_down-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.inovelli_down', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Down', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-4-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_down-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Inovelli Down', - }), - 'context': , - 'entity_id': 'button.inovelli_down', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_identify_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.inovelli_identify_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (1)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_identify_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Inovelli Identify (1)', - }), - 'context': , - 'entity_id': 'button.inovelli_identify_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_identify_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.inovelli_identify_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (2)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-2-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_identify_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Inovelli Identify (2)', - }), - 'context': , - 'entity_id': 'button.inovelli_identify_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_identify_6-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.inovelli_identify_6', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (6)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_identify_6-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Inovelli Identify (6)', - }), - 'context': , - 'entity_id': 'button.inovelli_identify_6', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_up-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.inovelli_up', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Up', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-3-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[multi_endpoint_light][button.inovelli_up-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Inovelli Up', - }), - 'context': , - 'entity_id': 'button.inovelli_up', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[occupancy_sensor][button.mock_occupancy_sensor_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_occupancy_sensor_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[occupancy_sensor][button.mock_occupancy_sensor_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Occupancy Sensor Identify', - }), - 'context': , - 'entity_id': 'button.mock_occupancy_sensor_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[on_off_plugin_unit][button.mock_onoffpluginunit_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_onoffpluginunit_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[on_off_plugin_unit][button.mock_onoffpluginunit_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock OnOffPluginUnit Identify', - }), - 'context': , - 'entity_id': 'button.mock_onoffpluginunit_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[onoff_light][button.mock_onoff_light_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_onoff_light_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[onoff_light][button.mock_onoff_light_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock OnOff Light Identify', - }), - 'context': , - 'entity_id': 'button.mock_onoff_light_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[onoff_light_alt_name][button.mock_onoff_light_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_onoff_light_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[onoff_light_alt_name][button.mock_onoff_light_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock OnOff Light Identify', - }), - 'context': , - 'entity_id': 'button.mock_onoff_light_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[onoff_light_no_name][button.mock_light_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_light_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[onoff_light_no_name][button.mock_light_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Light Identify', - }), - 'context': , - 'entity_id': 'button.mock_light_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[onoff_light_with_levelcontrol_present][button.d215s_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.d215s_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[onoff_light_with_levelcontrol_present][button.d215s_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'D215S Identify', - }), - 'context': , - 'entity_id': 'button.d215s_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[pressure_sensor][button.mock_pressure_sensor_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_pressure_sensor_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[pressure_sensor][button.mock_pressure_sensor_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Pressure Sensor Identify', - }), - 'context': , - 'entity_id': 'button.mock_pressure_sensor_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.room_airconditioner_identify_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (1)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Room AirConditioner Identify (1)', - }), - 'context': , - 'entity_id': 'button.room_airconditioner_identify_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.room_airconditioner_identify_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify (2)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-2-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Room AirConditioner Identify (2)', - }), - 'context': , - 'entity_id': 'button.room_airconditioner_identify_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[silabs_dishwasher][button.dishwasher_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.dishwasher_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[silabs_dishwasher][button.dishwasher_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Dishwasher Identify', - }), - 'context': , - 'entity_id': 'button.dishwasher_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[silabs_dishwasher][button.dishwasher_pause-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.dishwasher_pause', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Pause', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'pause', - 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalStatePauseButton-96-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[silabs_dishwasher][button.dishwasher_pause-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dishwasher Pause', - }), - 'context': , - 'entity_id': 'button.dishwasher_pause', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[silabs_dishwasher][button.dishwasher_start-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.dishwasher_start', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Start', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'start', - 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalStateStartButton-96-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[silabs_dishwasher][button.dishwasher_start-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dishwasher Start', - }), - 'context': , - 'entity_id': 'button.dishwasher_start', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[silabs_dishwasher][button.dishwasher_stop-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.dishwasher_stop', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Stop', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'stop', - 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalStateStopButton-96-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[silabs_dishwasher][button.dishwasher_stop-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dishwasher Stop', - }), - 'context': , - 'entity_id': 'button.dishwasher_stop', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[smoke_detector][button.smoke_sensor_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.smoke_sensor_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[smoke_detector][button.smoke_sensor_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Smoke sensor Identify', - }), - 'context': , - 'entity_id': 'button.smoke_sensor_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[switch_unit][button.mock_switchunit_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_switchunit_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[switch_unit][button.mock_switchunit_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock SwitchUnit Identify', - }), - 'context': , - 'entity_id': 'button.mock_switchunit_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[temperature_sensor][button.mock_temperature_sensor_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_temperature_sensor_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[temperature_sensor][button.mock_temperature_sensor_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Temperature Sensor Identify', - }), - 'context': , - 'entity_id': 'button.mock_temperature_sensor_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[thermostat][button.longan_link_hvac_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.longan_link_hvac_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[thermostat][button.longan_link_hvac_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Longan link HVAC Identify', - }), - 'context': , - 'entity_id': 'button.longan_link_hvac_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[valve][button.valve_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.valve_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[valve][button.valve_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Valve Identify', - }), - 'context': , - 'entity_id': 'button.valve_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[window_covering_full][button.mock_full_window_covering_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_full_window_covering_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[window_covering_full][button.mock_full_window_covering_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Full Window Covering Identify', - }), - 'context': , - 'entity_id': 'button.mock_full_window_covering_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[window_covering_lift][button.mock_lift_window_covering_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_lift_window_covering_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[window_covering_lift][button.mock_lift_window_covering_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Lift Window Covering Identify', - }), - 'context': , - 'entity_id': 'button.mock_lift_window_covering_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[window_covering_pa_lift][button.longan_link_wncv_da01_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.longan_link_wncv_da01_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[window_covering_pa_lift][button.longan_link_wncv_da01_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Longan link WNCV DA01 Identify', - }), - 'context': , - 'entity_id': 'button.longan_link_wncv_da01_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[window_covering_pa_tilt][button.mock_pa_tilt_window_covering_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_pa_tilt_window_covering_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[window_covering_pa_tilt][button.mock_pa_tilt_window_covering_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock PA Tilt Window Covering Identify', - }), - 'context': , - 'entity_id': 'button.mock_pa_tilt_window_covering_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[window_covering_tilt][button.mock_tilt_window_covering_identify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_tilt_window_covering_identify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Identify', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[window_covering_tilt][button.mock_tilt_window_covering_identify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'identify', - 'friendly_name': 'Mock Tilt Window Covering Identify', - }), - 'context': , - 'entity_id': 'button.mock_tilt_window_covering_identify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/matter/snapshots/test_climate.ambr b/tests/components/matter/snapshots/test_climate.ambr deleted file mode 100644 index 25f5ca06f62..00000000000 --- a/tests/components/matter/snapshots/test_climate.ambr +++ /dev/null @@ -1,263 +0,0 @@ -# serializer version: 1 -# name: test_climates[air_purifier][climate.air_purifier-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 30.0, - 'min_temp': 5.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.air_purifier', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-5-MatterThermostat-513-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_climates[air_purifier][climate.air_purifier-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20.0, - 'friendly_name': 'Air Purifier', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 30.0, - 'min_temp': 5.0, - 'supported_features': , - 'temperature': 20.0, - }), - 'context': , - 'entity_id': 'climate.air_purifier', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_climates[eve_thermo][climate.eve_thermo-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 30.0, - 'min_temp': 10.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.eve_thermo', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-MatterThermostat-513-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_climates[eve_thermo][climate.eve_thermo-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.0, - 'friendly_name': 'Eve Thermo', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 30.0, - 'min_temp': 10.0, - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.eve_thermo', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- -# name: test_climates[room_airconditioner][climate.room_airconditioner-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - , - , - , - ]), - 'max_temp': 32.0, - 'min_temp': 16.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.room_airconditioner', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterThermostat-513-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_climates[room_airconditioner][climate.room_airconditioner-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20.0, - 'friendly_name': 'Room AirConditioner', - 'hvac_modes': list([ - , - , - , - , - , - , - ]), - 'max_temp': 32.0, - 'min_temp': 16.0, - 'supported_features': , - 'temperature': 20.0, - }), - 'context': , - 'entity_id': 'climate.room_airconditioner', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_climates[thermostat][climate.longan_link_hvac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.longan_link_hvac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterThermostat-513-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_climates[thermostat][climate.longan_link_hvac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 28.3, - 'friendly_name': 'Longan link HVAC', - 'hvac_modes': list([ - , - , - , - , - ]), - 'max_temp': 35, - 'min_temp': 7, - 'supported_features': , - 'target_temp_high': None, - 'target_temp_low': None, - 'temperature': None, - }), - 'context': , - 'entity_id': 'climate.longan_link_hvac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'cool', - }) -# --- diff --git a/tests/components/matter/snapshots/test_cover.ambr b/tests/components/matter/snapshots/test_cover.ambr deleted file mode 100644 index 7d036d35983..00000000000 --- a/tests/components/matter/snapshots/test_cover.ambr +++ /dev/null @@ -1,245 +0,0 @@ -# serializer version: 1 -# name: test_covers[window_covering_full][cover.mock_full_window_covering-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.mock_full_window_covering', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCoverPositionAwareLiftAndTilt-258-10', - 'unit_of_measurement': None, - }) -# --- -# name: test_covers[window_covering_full][cover.mock_full_window_covering-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_position': 100, - 'current_tilt_position': 100, - 'device_class': 'awning', - 'friendly_name': 'Mock Full Window Covering', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.mock_full_window_covering', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_covers[window_covering_lift][cover.mock_lift_window_covering-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.mock_lift_window_covering', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCover-258-10', - 'unit_of_measurement': None, - }) -# --- -# name: test_covers[window_covering_lift][cover.mock_lift_window_covering-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'awning', - 'friendly_name': 'Mock Lift Window Covering', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.mock_lift_window_covering', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_covers[window_covering_pa_lift][cover.longan_link_wncv_da01-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.longan_link_wncv_da01', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterCoverPositionAwareLift-258-10', - 'unit_of_measurement': None, - }) -# --- -# name: test_covers[window_covering_pa_lift][cover.longan_link_wncv_da01-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_position': 51, - 'device_class': 'awning', - 'friendly_name': 'Longan link WNCV DA01', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.longan_link_wncv_da01', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_covers[window_covering_pa_tilt][cover.mock_pa_tilt_window_covering-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.mock_pa_tilt_window_covering', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCoverPositionAwareTilt-258-10', - 'unit_of_measurement': None, - }) -# --- -# name: test_covers[window_covering_pa_tilt][cover.mock_pa_tilt_window_covering-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_tilt_position': 100, - 'device_class': 'awning', - 'friendly_name': 'Mock PA Tilt Window Covering', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.mock_pa_tilt_window_covering', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_covers[window_covering_tilt][cover.mock_tilt_window_covering-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.mock_tilt_window_covering', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCover-258-10', - 'unit_of_measurement': None, - }) -# --- -# name: test_covers[window_covering_tilt][cover.mock_tilt_window_covering-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'awning', - 'friendly_name': 'Mock Tilt Window Covering', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.mock_tilt_window_covering', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/matter/snapshots/test_event.ambr b/tests/components/matter/snapshots/test_event.ambr deleted file mode 100644 index 031e8e9d24f..00000000000 --- a/tests/components/matter/snapshots/test_event.ambr +++ /dev/null @@ -1,385 +0,0 @@ -# serializer version: 1 -# name: test_events[generic_switch][event.mock_generic_switch_button-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'initial_press', - 'short_release', - 'long_press', - 'long_release', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.mock_generic_switch_button', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Button', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'button', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-GenericSwitch-59-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_events[generic_switch][event.mock_generic_switch_button-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'button', - 'event_type': None, - 'event_types': list([ - 'initial_press', - 'short_release', - 'long_press', - 'long_release', - ]), - 'friendly_name': 'Mock Generic Switch Button', - }), - 'context': , - 'entity_id': 'event.mock_generic_switch_button', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_events[generic_switch_multi][event.mock_generic_switch_button_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'multi_press_1', - 'multi_press_2', - 'long_press', - 'long_release', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.mock_generic_switch_button_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Button (1)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'button', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-GenericSwitch-59-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_events[generic_switch_multi][event.mock_generic_switch_button_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'button', - 'event_type': None, - 'event_types': list([ - 'multi_press_1', - 'multi_press_2', - 'long_press', - 'long_release', - ]), - 'friendly_name': 'Mock Generic Switch Button (1)', - }), - 'context': , - 'entity_id': 'event.mock_generic_switch_button_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_events[generic_switch_multi][event.mock_generic_switch_fancy_button-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'multi_press_1', - 'multi_press_2', - 'long_press', - 'long_release', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.mock_generic_switch_fancy_button', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Fancy Button', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'button', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-2-GenericSwitch-59-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_events[generic_switch_multi][event.mock_generic_switch_fancy_button-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'button', - 'event_type': None, - 'event_types': list([ - 'multi_press_1', - 'multi_press_2', - 'long_press', - 'long_release', - ]), - 'friendly_name': 'Mock Generic Switch Fancy Button', - }), - 'context': , - 'entity_id': 'event.mock_generic_switch_fancy_button', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_events[multi_endpoint_light][event.inovelli_config-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'multi_press_1', - 'multi_press_2', - 'multi_press_3', - 'multi_press_4', - 'multi_press_5', - 'long_press', - 'long_release', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.inovelli_config', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Config', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'button', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-5-GenericSwitch-59-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_events[multi_endpoint_light][event.inovelli_config-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'button', - 'event_type': None, - 'event_types': list([ - 'multi_press_1', - 'multi_press_2', - 'multi_press_3', - 'multi_press_4', - 'multi_press_5', - 'long_press', - 'long_release', - ]), - 'friendly_name': 'Inovelli Config', - }), - 'context': , - 'entity_id': 'event.inovelli_config', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_events[multi_endpoint_light][event.inovelli_down-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'multi_press_1', - 'multi_press_2', - 'multi_press_3', - 'multi_press_4', - 'multi_press_5', - 'long_press', - 'long_release', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.inovelli_down', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Down', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'button', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-4-GenericSwitch-59-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_events[multi_endpoint_light][event.inovelli_down-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'button', - 'event_type': None, - 'event_types': list([ - 'multi_press_1', - 'multi_press_2', - 'multi_press_3', - 'multi_press_4', - 'multi_press_5', - 'long_press', - 'long_release', - ]), - 'friendly_name': 'Inovelli Down', - }), - 'context': , - 'entity_id': 'event.inovelli_down', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_events[multi_endpoint_light][event.inovelli_up-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'multi_press_1', - 'multi_press_2', - 'multi_press_3', - 'multi_press_4', - 'multi_press_5', - 'long_press', - 'long_release', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.inovelli_up', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Up', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'button', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-3-GenericSwitch-59-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_events[multi_endpoint_light][event.inovelli_up-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'button', - 'event_type': None, - 'event_types': list([ - 'multi_press_1', - 'multi_press_2', - 'multi_press_3', - 'multi_press_4', - 'multi_press_5', - 'long_press', - 'long_release', - ]), - 'friendly_name': 'Inovelli Up', - }), - 'context': , - 'entity_id': 'event.inovelli_up', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/matter/snapshots/test_fan.ambr b/tests/components/matter/snapshots/test_fan.ambr deleted file mode 100644 index 7f1fe7d42db..00000000000 --- a/tests/components/matter/snapshots/test_fan.ambr +++ /dev/null @@ -1,263 +0,0 @@ -# serializer version: 1 -# name: test_fans[air_purifier][fan.air_purifier-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'preset_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - 'natural_wind', - 'sleep_wind', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'fan', - 'entity_category': None, - 'entity_id': 'fan.air_purifier', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-MatterFan-514-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_fans[air_purifier][fan.air_purifier-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'direction': 'forward', - 'friendly_name': 'Air Purifier', - 'oscillating': False, - 'percentage': None, - 'percentage_step': 10.0, - 'preset_mode': 'auto', - 'preset_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - 'natural_wind', - 'sleep_wind', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'fan.air_purifier', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_fans[fan][fan.mocked_fan_switch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'preset_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - 'natural_wind', - 'sleep_wind', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'fan', - 'entity_category': None, - 'entity_id': 'fan.mocked_fan_switch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-MatterFan-514-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_fans[fan][fan.mocked_fan_switch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mocked Fan Switch', - 'percentage': 0, - 'percentage_step': 33.333333333333336, - 'preset_mode': None, - 'preset_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - 'natural_wind', - 'sleep_wind', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'fan.mocked_fan_switch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_fans[room_airconditioner][fan.room_airconditioner-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'preset_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - 'sleep_wind', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'fan', - 'entity_category': None, - 'entity_id': 'fan.room_airconditioner', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterFan-514-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_fans[room_airconditioner][fan.room_airconditioner-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Room AirConditioner', - 'percentage': 0, - 'percentage_step': 33.333333333333336, - 'preset_mode': None, - 'preset_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - 'sleep_wind', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'fan.room_airconditioner', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_fans[thermostat][fan.longan_link_hvac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'preset_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'fan', - 'entity_category': None, - 'entity_id': 'fan.longan_link_hvac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterFan-514-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_fans[thermostat][fan.longan_link_hvac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Longan link HVAC', - 'preset_mode': None, - 'preset_modes': list([ - 'low', - 'medium', - 'high', - 'auto', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'fan.longan_link_hvac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/matter/snapshots/test_light.ambr b/tests/components/matter/snapshots/test_light.ambr deleted file mode 100644 index 68c1b7dca74..00000000000 --- a/tests/components/matter/snapshots/test_light.ambr +++ /dev/null @@ -1,660 +0,0 @@ -# serializer version: 1 -# name: test_lights[color_temperature_light][light.mock_color_temperature_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.mock_color_temperature_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[color_temperature_light][light.mock_color_temperature_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 128, - 'color_mode': , - 'color_temp': 284, - 'color_temp_kelvin': 3521, - 'friendly_name': 'Mock Color Temperature Light', - 'hs_color': tuple( - 27.152, - 44.32, - ), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 193, - 141, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.453, - 0.374, - ), - }), - 'context': , - 'entity_id': 'light.mock_color_temperature_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[dimmable_light][light.mock_dimmable_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.mock_dimmable_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[dimmable_light][light.mock_dimmable_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 51, - 'color_mode': , - 'friendly_name': 'Mock Dimmable Light', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.mock_dimmable_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[dimmable_plugin_unit][light.dimmable_plugin_unit-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.dimmable_plugin_unit', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterLight-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[dimmable_plugin_unit][light.dimmable_plugin_unit-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Dimmable Plugin Unit', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.dimmable_plugin_unit', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[extended_color_light][light.mock_extended_color_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.mock_extended_color_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[extended_color_light][light.mock_extended_color_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 128, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'friendly_name': 'Mock Extended Color Light', - 'hs_color': tuple( - 51.024, - 20.079, - ), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 247, - 203, - ), - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.363, - 0.374, - ), - }), - 'context': , - 'entity_id': 'light.mock_extended_color_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[multi_endpoint_light][light.inovelli_light_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.inovelli_light_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light (1)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'light', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-MatterLight-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[multi_endpoint_light][light.inovelli_light_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': None, - 'friendly_name': 'Inovelli Light (1)', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.inovelli_light_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_lights[multi_endpoint_light][light.inovelli_light_6-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.inovelli_light_6', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light (6)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'light', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-MatterLight-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[multi_endpoint_light][light.inovelli_light_6-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': None, - 'color_temp': None, - 'color_temp_kelvin': None, - 'friendly_name': 'Inovelli Light (6)', - 'hs_color': None, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': None, - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': None, - }), - 'context': , - 'entity_id': 'light.inovelli_light_6', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_lights[onoff_light][light.mock_onoff_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.mock_onoff_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[onoff_light][light.mock_onoff_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'color_mode': , - 'friendly_name': 'Mock OnOff Light', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.mock_onoff_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[onoff_light_alt_name][light.mock_onoff_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.mock_onoff_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[onoff_light_alt_name][light.mock_onoff_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'friendly_name': 'Mock OnOff Light', - 'hs_color': None, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': None, - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': None, - }), - 'context': , - 'entity_id': 'light.mock_onoff_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[onoff_light_no_name][light.mock_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.mock_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[onoff_light_no_name][light.mock_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'friendly_name': 'Mock Light', - 'hs_color': None, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': None, - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': None, - }), - 'context': , - 'entity_id': 'light.mock_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_lights[onoff_light_with_levelcontrol_present][light.d215s-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.d215s', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-MatterLight-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[onoff_light_with_levelcontrol_present][light.d215s-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'color_mode': None, - 'friendly_name': 'D215S', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.d215s', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/matter/snapshots/test_lock.ambr b/tests/components/matter/snapshots/test_lock.ambr deleted file mode 100644 index bf34ac267d7..00000000000 --- a/tests/components/matter/snapshots/test_lock.ambr +++ /dev/null @@ -1,95 +0,0 @@ -# serializer version: 1 -# name: test_locks[door_lock][lock.mock_door_lock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'lock', - 'entity_category': None, - 'entity_id': 'lock.mock_door_lock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLock-257-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_locks[door_lock][lock.mock_door_lock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Door Lock', - 'supported_features': , - }), - 'context': , - 'entity_id': 'lock.mock_door_lock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unlocked', - }) -# --- -# name: test_locks[door_lock_with_unbolt][lock.mock_door_lock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'lock', - 'entity_category': None, - 'entity_id': 'lock.mock_door_lock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLock-257-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_locks[door_lock_with_unbolt][lock.mock_door_lock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Door Lock', - 'supported_features': , - }), - 'context': , - 'entity_id': 'lock.mock_door_lock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'locked', - }) -# --- diff --git a/tests/components/matter/snapshots/test_number.ambr b/tests/components/matter/snapshots/test_number.ambr deleted file mode 100644 index 9d51bb92e51..00000000000 --- a/tests/components/matter/snapshots/test_number.ambr +++ /dev/null @@ -1,1560 +0,0 @@ -# serializer version: 1 -# name: test_numbers[color_temperature_light][number.mock_color_temperature_light_on_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_color_temperature_light_on_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On level', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_level', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[color_temperature_light][number.mock_color_temperature_light_on_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Color Temperature Light On level', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.mock_color_temperature_light_on_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '255', - }) -# --- -# name: test_numbers[dimmable_light][number.mock_dimmable_light_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_dimmable_light_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'off_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[dimmable_light][number.mock_dimmable_light_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Dimmable Light Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_dimmable_light_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_dimmable_light_on_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On level', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_level', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Dimmable Light On level', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.mock_dimmable_light_on_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '255', - }) -# --- -# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_dimmable_light_on_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On/Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_off_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Dimmable Light On/Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_dimmable_light_on_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_dimmable_light_on_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Dimmable Light On transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_dimmable_light_on_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.dimmable_plugin_unit_on_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On level', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_level', - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-on_level-8-17', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dimmable Plugin Unit On level', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.dimmable_plugin_unit_on_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '255', - }) -# --- -# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.dimmable_plugin_unit_on_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On/Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_off_transition_time', - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-on_off_transition_time-8-16', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dimmable Plugin Unit On/Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.dimmable_plugin_unit_on_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.0', - }) -# --- -# name: test_numbers[eve_weather_sensor][number.eve_weather_altitude_above_sea_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 9000, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.eve_weather_altitude_above_sea_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Altitude above Sea Level', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'altitude', - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-EveWeatherAltitude-319486977-319422483', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[eve_weather_sensor][number.eve_weather_altitude_above_sea_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Eve Weather Altitude above Sea Level', - 'max': 9000, - 'min': 0, - 'mode': , - 'step': 1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.eve_weather_altitude_above_sea_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.0', - }) -# --- -# name: test_numbers[extended_color_light][number.mock_extended_color_light_on_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_extended_color_light_on_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On level', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_level', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[extended_color_light][number.mock_extended_color_light_on_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Extended Color Light On level', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.mock_extended_color_light_on_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '255', - }) -# --- -# name: test_numbers[multi_endpoint_light][number.inovelli_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.inovelli_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'off_transition_time', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-off_transition_time-8-19', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[multi_endpoint_light][number.inovelli_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.inovelli_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.5', - }) -# --- -# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.inovelli_on_level_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On level (1)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_level', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-on_level-8-17', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli On level (1)', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.inovelli_on_level_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '137', - }) -# --- -# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_6-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.inovelli_on_level_6', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On level (6)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_level', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-on_level-8-17', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_6-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli On level (6)', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.inovelli_on_level_6', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '254', - }) -# --- -# name: test_numbers[multi_endpoint_light][number.inovelli_on_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.inovelli_on_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On/Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_off_transition_time', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-on_off_transition_time-8-16', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[multi_endpoint_light][number.inovelli_on_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli On/Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.inovelli_on_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.5', - }) -# --- -# name: test_numbers[multi_endpoint_light][number.inovelli_on_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.inovelli_on_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_transition_time', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-on_transition_time-8-18', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[multi_endpoint_light][number.inovelli_on_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli On transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.inovelli_on_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.5', - }) -# --- -# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_onoffpluginunit_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'off_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOffPluginUnit Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_onoffpluginunit_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_onoffpluginunit_on_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On level', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_level', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOffPluginUnit On level', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.mock_onoffpluginunit_on_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '255', - }) -# --- -# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_onoffpluginunit_on_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On/Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_off_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOffPluginUnit On/Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_onoffpluginunit_on_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_onoffpluginunit_on_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOffPluginUnit On transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_onoffpluginunit_on_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_onoff_light_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'off_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOff Light Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_onoff_light_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_onoff_light_on_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On level', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_level', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOff Light On level', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.mock_onoff_light_on_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '255', - }) -# --- -# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_onoff_light_on_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On/Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_off_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOff Light On/Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_onoff_light_on_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_onoff_light_on_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOff Light On transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_onoff_light_on_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[onoff_light_no_name][number.mock_light_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_light_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'off_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[onoff_light_no_name][number.mock_light_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Light Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_light_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[onoff_light_no_name][number.mock_light_on_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_light_on_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On level', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_level', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[onoff_light_no_name][number.mock_light_on_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Light On level', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.mock_light_on_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '255', - }) -# --- -# name: test_numbers[onoff_light_no_name][number.mock_light_on_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_light_on_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On/Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_off_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[onoff_light_no_name][number.mock_light_on_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Light On/Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_light_on_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[onoff_light_no_name][number.mock_light_on_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.mock_light_on_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_transition_time', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[onoff_light_no_name][number.mock_light_on_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Light On transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.mock_light_on_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.d215s_on_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On level', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_level', - 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-on_level-8-17', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'D215S On level', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.d215s_on_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '255', - }) -# --- -# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_off_transition_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.d215s_on_off_transition_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'On/Off transition time', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'on_off_transition_time', - 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-on_off_transition_time-8-16', - 'unit_of_measurement': , - }) -# --- -# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_off_transition_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'D215S On/Off transition time', - 'max': 65534, - 'min': 0, - 'mode': , - 'step': 0.1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.d215s_on_off_transition_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- diff --git a/tests/components/matter/snapshots/test_select.ambr b/tests/components/matter/snapshots/test_select.ambr deleted file mode 100644 index 663b0cdaf51..00000000000 --- a/tests/components/matter/snapshots/test_select.ambr +++ /dev/null @@ -1,1636 +0,0 @@ -# serializer version: 1 -# name: test_selects[color_temperature_light][select.mock_color_temperature_light_lighting-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Dark', - 'Medium', - 'Light', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_color_temperature_light_lighting', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lighting', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterModeSelect-80-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[color_temperature_light][select.mock_color_temperature_light_lighting-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Color Temperature Light Lighting', - 'options': list([ - 'Dark', - 'Medium', - 'Light', - ]), - }), - 'context': , - 'entity_id': 'select.mock_color_temperature_light_lighting', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Dark', - }) -# --- -# name: test_selects[color_temperature_light][select.mock_color_temperature_light_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_color_temperature_light_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[color_temperature_light][select.mock_color_temperature_light_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Color Temperature Light Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.mock_color_temperature_light_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[dimmable_light][select.mock_dimmable_light_led_color-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Red', - 'Orange', - 'Lemon', - 'Lime', - 'Green', - 'Teal', - 'Cyan', - 'Aqua', - 'Blue', - 'Violet', - 'Magenta', - 'Pink', - 'White', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_dimmable_light_led_color', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'LED Color', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-6-MatterModeSelect-80-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[dimmable_light][select.mock_dimmable_light_led_color-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Dimmable Light LED Color', - 'options': list([ - 'Red', - 'Orange', - 'Lemon', - 'Lime', - 'Green', - 'Teal', - 'Cyan', - 'Aqua', - 'Blue', - 'Violet', - 'Magenta', - 'Pink', - 'White', - ]), - }), - 'context': , - 'entity_id': 'select.mock_dimmable_light_led_color', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Aqua', - }) -# --- -# name: test_selects[dimmable_light][select.mock_dimmable_light_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_dimmable_light_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[dimmable_light][select.mock_dimmable_light_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Dimmable Light Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.mock_dimmable_light_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[dimmable_plugin_unit][select.dimmable_plugin_unit_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.dimmable_plugin_unit_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[dimmable_plugin_unit][select.dimmable_plugin_unit_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dimmable Plugin Unit Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.dimmable_plugin_unit_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[door_lock][select.mock_door_lock_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[door_lock][select.mock_door_lock_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Door Lock Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_selects[door_lock_with_unbolt][select.mock_door_lock_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[door_lock_with_unbolt][select.mock_door_lock_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Door Lock Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_selects[eve_energy_plug][select.eve_energy_plug_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.eve_energy_plug_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[eve_energy_plug][select.eve_energy_plug_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Eve Energy Plug Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.eve_energy_plug_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[eve_energy_plug_patched][select.eve_energy_plug_patched_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.eve_energy_plug_patched_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[eve_energy_plug_patched][select.eve_energy_plug_patched_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Eve Energy Plug Patched Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.eve_energy_plug_patched_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[extended_color_light][select.mock_extended_color_light_lighting-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Dark', - 'Medium', - 'Light', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_extended_color_light_lighting', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lighting', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterModeSelect-80-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[extended_color_light][select.mock_extended_color_light_lighting-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Extended Color Light Lighting', - 'options': list([ - 'Dark', - 'Medium', - 'Light', - ]), - }), - 'context': , - 'entity_id': 'select.mock_extended_color_light_lighting', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Dark', - }) -# --- -# name: test_selects[extended_color_light][select.mock_extended_color_light_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_extended_color_light_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[extended_color_light][select.mock_extended_color_light_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Extended Color Light Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.mock_extended_color_light_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_dimming_edge-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Leading', - 'Trailing', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.inovelli_dimming_edge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimming Edge', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-3-MatterModeSelect-80-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_dimming_edge-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli Dimming Edge', - 'options': list([ - 'Leading', - 'Trailing', - ]), - }), - 'context': , - 'entity_id': 'select.inovelli_dimming_edge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Leading', - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_dimming_speed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Instant', - '500ms', - '800ms', - '1s', - '1.5s', - '2s', - '2.5s', - '3s', - '3.5s', - '4s', - '5s', - '6s', - '7s', - '8s', - '10s', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.inovelli_dimming_speed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimming Speed', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-4-MatterModeSelect-80-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_dimming_speed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli Dimming Speed', - 'options': list([ - 'Instant', - '500ms', - '800ms', - '1s', - '1.5s', - '2s', - '2.5s', - '3s', - '3.5s', - '4s', - '5s', - '6s', - '7s', - '8s', - '10s', - ]), - }), - 'context': , - 'entity_id': 'select.inovelli_dimming_speed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2s', - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_led_color-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Red', - 'Orange', - 'Lemon', - 'Lime', - 'Green', - 'Teal', - 'Cyan', - 'Aqua', - 'Blue', - 'Violet', - 'Magenta', - 'Pink', - 'White', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.inovelli_led_color', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'LED Color', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-MatterModeSelect-80-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_led_color-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli LED Color', - 'options': list([ - 'Red', - 'Orange', - 'Lemon', - 'Lime', - 'Green', - 'Teal', - 'Cyan', - 'Aqua', - 'Blue', - 'Violet', - 'Magenta', - 'Pink', - 'White', - ]), - }), - 'context': , - 'entity_id': 'select.inovelli_led_color', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Lemon', - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.inovelli_power_on_behavior_on_startup_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup (1)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli Power-on behavior on startup (1)', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.inovelli_power_on_behavior_on_startup_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_6-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.inovelli_power_on_behavior_on_startup_6', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup (6)', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_6-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli Power-on behavior on startup (6)', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.inovelli_power_on_behavior_on_startup_6', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_relay-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Relay Click Enable', - 'Relay Click Disable', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.inovelli_relay', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Relay', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-5-MatterModeSelect-80-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_relay-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli Relay', - 'options': list([ - 'Relay Click Enable', - 'Relay Click Disable', - ]), - }), - 'context': , - 'entity_id': 'select.inovelli_relay', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Relay Click Disable', - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_smart_bulb_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Smart Bulb Disable', - 'Smart Bulb Enable', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.inovelli_smart_bulb_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Smart Bulb Mode', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-2-MatterModeSelect-80-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_smart_bulb_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli Smart Bulb Mode', - 'options': list([ - 'Smart Bulb Disable', - 'Smart Bulb Enable', - ]), - }), - 'context': , - 'entity_id': 'select.inovelli_smart_bulb_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Smart Bulb Disable', - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_switch_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'OnOff+Single', - 'OnOff+Dumb', - 'OnOff+AUX', - 'OnOff+Full Wave', - 'Dimmer+Single', - 'Dimmer+Dumb', - 'Dimmer+Aux', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.inovelli_switch_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Switch Mode', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-MatterModeSelect-80-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[multi_endpoint_light][select.inovelli_switch_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Inovelli Switch Mode', - 'options': list([ - 'OnOff+Single', - 'OnOff+Dumb', - 'OnOff+AUX', - 'OnOff+Full Wave', - 'Dimmer+Single', - 'Dimmer+Dumb', - 'Dimmer+Aux', - ]), - }), - 'context': , - 'entity_id': 'select.inovelli_switch_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Dimmer+Single', - }) -# --- -# name: test_selects[on_off_plugin_unit][select.mock_onoffpluginunit_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_onoffpluginunit_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[on_off_plugin_unit][select.mock_onoffpluginunit_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOffPluginUnit Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.mock_onoffpluginunit_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[onoff_light][select.mock_onoff_light_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[onoff_light][select.mock_onoff_light_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOff Light Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[onoff_light_alt_name][select.mock_onoff_light_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[onoff_light_alt_name][select.mock_onoff_light_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock OnOff Light Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[onoff_light_no_name][select.mock_light_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_light_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[onoff_light_no_name][select.mock_light_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Light Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.mock_light_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[onoff_light_with_levelcontrol_present][select.d215s_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.d215s_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[onoff_light_with_levelcontrol_present][select.d215s_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'D215S Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.d215s_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[silabs_dishwasher][select.dishwasher_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.dishwasher_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mode', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-MatterDishwasherMode-89-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[silabs_dishwasher][select.dishwasher_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dishwasher Mode', - 'options': list([ - ]), - }), - 'context': , - 'entity_id': 'select.dishwasher_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_selects[switch_unit][select.mock_switchunit_power_on_behavior_on_startup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': , - 'entity_id': 'select.mock_switchunit_power_on_behavior_on_startup', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Power-on behavior on startup', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'startup_on_off', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[switch_unit][select.mock_switchunit_power_on_behavior_on_startup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock SwitchUnit Power-on behavior on startup', - 'options': list([ - 'on', - 'off', - 'toggle', - 'previous', - ]), - }), - 'context': , - 'entity_id': 'select.mock_switchunit_power_on_behavior_on_startup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'previous', - }) -# --- -# name: test_selects[vacuum_cleaner][select.mock_vacuum_clean_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Quick', - 'Auto', - 'Deep Clean', - 'Quiet', - 'Max Vac', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.mock_vacuum_clean_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Clean mode', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'clean_mode', - 'unique_id': '00000000000004D2-0000000000000042-MatterNodeDevice-1-MatterRvcCleanMode-85-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_selects[vacuum_cleaner][select.mock_vacuum_clean_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Vacuum Clean mode', - 'options': list([ - 'Quick', - 'Auto', - 'Deep Clean', - 'Quiet', - 'Max Vac', - ]), - }), - 'context': , - 'entity_id': 'select.mock_vacuum_clean_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Quick', - }) -# --- diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr deleted file mode 100644 index 96346b906c3..00000000000 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ /dev/null @@ -1,2833 +0,0 @@ -# serializer version: 1 -# name: test_sensors[air_purifier][sensor.air_purifier_activated_carbon_filter_condition-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_activated_carbon_filter_condition', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Activated carbon filter condition', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'activated_carbon_filter_condition', - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-ActivatedCarbonFilterCondition-114-0', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_activated_carbon_filter_condition-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Air Purifier Activated carbon filter condition', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_activated_carbon_filter_condition', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_air_quality-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'extremely_poor', - 'very_poor', - 'poor', - 'fair', - 'good', - 'moderate', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_air_quality', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Air quality', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'air_quality', - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-AirQuality-91-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_air_quality-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Air Purifier Air quality', - 'options': list([ - 'extremely_poor', - 'very_poor', - 'poor', - 'fair', - 'good', - 'moderate', - ]), - }), - 'context': , - 'entity_id': 'sensor.air_purifier_air_quality', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'good', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_carbon_dioxide-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_carbon_dioxide', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Carbon dioxide', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-CarbonDioxideSensor-1037-0', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_carbon_dioxide-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'Air Purifier Carbon dioxide', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_carbon_dioxide', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_carbon_monoxide-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_carbon_monoxide', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Carbon monoxide', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-CarbonMonoxideSensor-1036-0', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_carbon_monoxide-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'carbon_monoxide', - 'friendly_name': 'Air Purifier Carbon monoxide', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_carbon_monoxide', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_hepa_filter_condition-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_hepa_filter_condition', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hepa filter condition', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'hepa_filter_condition', - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-HepaFilterCondition-113-0', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_hepa_filter_condition-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Air Purifier Hepa filter condition', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_hepa_filter_condition', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_humidity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_humidity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Humidity', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-4-HumiditySensor-1029-0', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_humidity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'humidity', - 'friendly_name': 'Air Purifier Humidity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_humidity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.0', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_nitrogen_dioxide-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_nitrogen_dioxide', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Nitrogen dioxide', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-NitrogenDioxideSensor-1043-0', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_nitrogen_dioxide-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'nitrogen_dioxide', - 'friendly_name': 'Air Purifier Nitrogen dioxide', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_nitrogen_dioxide', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_ozone-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_ozone', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ozone', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-OzoneConcentrationSensor-1045-0', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_ozone-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'ozone', - 'friendly_name': 'Air Purifier Ozone', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_ozone', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_pm1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_pm1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PM1', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-PM1Sensor-1068-0', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_pm1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm1', - 'friendly_name': 'Air Purifier PM1', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_pm1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_pm10-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_pm10', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PM10', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-PM10Sensor-1069-0', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_pm10-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm10', - 'friendly_name': 'Air Purifier PM10', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_pm10', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_pm2_5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_pm2_5', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PM2.5', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-PM25Sensor-1066-0', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_pm2_5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm25', - 'friendly_name': 'Air Purifier PM2.5', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_pm2_5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-3-TemperatureSensor-1026-0', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Air Purifier Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.air_purifier_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.0', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_vocs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.air_purifier_vocs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VOCs', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-TotalVolatileOrganicCompoundsSensor-1070-0', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[air_purifier][sensor.air_purifier_vocs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'volatile_organic_compounds_parts', - 'friendly_name': 'Air Purifier VOCs', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.air_purifier_vocs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_air_quality-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'extremely_poor', - 'very_poor', - 'poor', - 'fair', - 'good', - 'moderate', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_air_quality', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Air quality', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'air_quality', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-AirQuality-91-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_air_quality-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'lightfi-aq1-air-quality-sensor Air quality', - 'options': list([ - 'extremely_poor', - 'very_poor', - 'poor', - 'fair', - 'good', - 'moderate', - ]), - }), - 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_air_quality', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_carbon_dioxide-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_carbon_dioxide', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Carbon dioxide', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-CarbonDioxideSensor-1037-0', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_carbon_dioxide-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'lightfi-aq1-air-quality-sensor Carbon dioxide', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_carbon_dioxide', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '678.0', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_humidity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_humidity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Humidity', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-HumiditySensor-1029-0', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_humidity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'humidity', - 'friendly_name': 'lightfi-aq1-air-quality-sensor Humidity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_humidity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '28.75', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_nitrogen_dioxide-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_nitrogen_dioxide', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Nitrogen dioxide', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-NitrogenDioxideSensor-1043-0', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_nitrogen_dioxide-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'nitrogen_dioxide', - 'friendly_name': 'lightfi-aq1-air-quality-sensor Nitrogen dioxide', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_nitrogen_dioxide', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PM1', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PM1Sensor-1068-0', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm1', - 'friendly_name': 'lightfi-aq1-air-quality-sensor PM1', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.0', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm10-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm10', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PM10', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PM10Sensor-1069-0', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm10-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm10', - 'friendly_name': 'lightfi-aq1-air-quality-sensor PM10', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm10', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.0', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm2_5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm2_5', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PM2.5', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PM25Sensor-1066-0', - 'unit_of_measurement': 'µg/m³', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_pm2_5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pm25', - 'friendly_name': 'lightfi-aq1-air-quality-sensor PM2.5', - 'state_class': , - 'unit_of_measurement': 'µg/m³', - }), - 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_pm2_5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.0', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-TemperatureSensor-1026-0', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'lightfi-aq1-air-quality-sensor Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.08', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_vocs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_vocs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VOCs', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-TotalVolatileOrganicCompoundsSensor-1070-0', - 'unit_of_measurement': 'ppm', - }) -# --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_vocs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'volatile_organic_compounds_parts', - 'friendly_name': 'lightfi-aq1-air-quality-sensor VOCs', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_vocs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '189.0', - }) -# --- -# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_door_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSource-47-12', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Eve Door Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.eve_door_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[eve_contact_sensor][sensor.eve_door_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_door_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatVoltage-47-11', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_contact_sensor][sensor.eve_door_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Eve Door Voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_door_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.558', - }) -# --- -# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_energy_plug_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Current', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-EveEnergySensorWattCurrent-319486977-319422473', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Eve Energy Plug Current', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_energy_plug_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_energy_plug_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-EveEnergySensorWattAccumulated-319486977-319422475', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Eve Energy Plug Energy', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_energy_plug_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.220000028610229', - }) -# --- -# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_energy_plug_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-EveEnergySensorWatt-319486977-319422474', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Eve Energy Plug Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_energy_plug_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_energy_plug_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-EveEnergySensorVoltage-319486977-319422472', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_energy_plug][sensor.eve_energy_plug_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Eve Energy Plug Voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_energy_plug_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '238.800003051758', - }) -# --- -# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_energy_plug_patched_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Current', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-2-ElectricalPowerMeasurementActiveCurrent-144-5', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Eve Energy Plug Patched Current', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_energy_plug_patched_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_energy_plug_patched_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-2-ElectricalEnergyMeasurementCumulativeEnergyImported-145-1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Eve Energy Plug Patched Energy', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_energy_plug_patched_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0025', - }) -# --- -# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_energy_plug_patched_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-2-ElectricalPowerMeasurementWatt-144-8', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Eve Energy Plug Patched Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_energy_plug_patched_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '550.0', - }) -# --- -# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_energy_plug_patched_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-2-ElectricalPowerMeasurementVoltage-144-4', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_energy_plug_patched][sensor.eve_energy_plug_patched_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Eve Energy Plug Patched Voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_energy_plug_patched_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '220.0', - }) -# --- -# name: test_sensors[eve_thermo][sensor.eve_thermo_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_thermo_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-0-PowerSource-47-12', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[eve_thermo][sensor.eve_thermo_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Eve Thermo Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.eve_thermo_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[eve_thermo][sensor.eve_thermo_valve_position-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.eve_thermo_valve_position', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Valve position', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'valve_position', - 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-EveThermoValvePosition-319486977-319422488', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[eve_thermo][sensor.eve_thermo_valve_position-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Eve Thermo Valve position', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.eve_thermo_valve_position', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_sensors[eve_thermo][sensor.eve_thermo_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_thermo_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-0-PowerSourceBatVoltage-47-11', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_thermo][sensor.eve_thermo_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Eve Thermo Voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_thermo_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.05', - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_weather_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-0-PowerSource-47-12', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Eve Weather Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.eve_weather_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_humidity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.eve_weather_humidity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Humidity', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-2-HumiditySensor-1029-0', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_humidity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'humidity', - 'friendly_name': 'Eve Weather Humidity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.eve_weather_humidity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80.66', - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.eve_weather_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Pressure', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-EveWeatherPressure-319486977-319422484', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Eve Weather Pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_weather_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1008.5', - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.eve_weather_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-TemperatureSensor-1026-0', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Eve Weather Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_weather_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '16.03', - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_weather_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-0-PowerSourceBatVoltage-47-11', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Eve Weather Voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.eve_weather_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.956', - }) -# --- -# name: test_sensors[flow_sensor][sensor.mock_flow_sensor_flow-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_flow_sensor_flow', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Flow', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flow', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-FlowSensor-1028-0', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[flow_sensor][sensor.mock_flow_sensor_flow-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Flow Sensor Flow', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_flow_sensor_flow', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[humidity_sensor][sensor.mock_humidity_sensor_humidity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_humidity_sensor_humidity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Humidity', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-HumiditySensor-1029-0', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[humidity_sensor][sensor.mock_humidity_sensor_humidity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'humidity', - 'friendly_name': 'Mock Humidity Sensor Humidity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.mock_humidity_sensor_humidity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[light_sensor][sensor.mock_light_sensor_illuminance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_light_sensor_illuminance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Illuminance', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-LightSensor-1024-0', - 'unit_of_measurement': 'lx', - }) -# --- -# name: test_sensors[light_sensor][sensor.mock_light_sensor_illuminance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'illuminance', - 'friendly_name': 'Mock Light Sensor Illuminance', - 'state_class': , - 'unit_of_measurement': 'lx', - }), - 'context': , - 'entity_id': 'sensor.mock_light_sensor_illuminance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.3', - }) -# --- -# name: test_sensors[microwave_oven][sensor.microwave_oven_operational_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'stopped', - 'running', - 'paused', - 'error', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.microwave_oven_operational_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Operational state', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'operational_state', - 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalState-96-4', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[microwave_oven][sensor.microwave_oven_operational_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Microwave Oven Operational state', - 'options': list([ - 'stopped', - 'running', - 'paused', - 'error', - ]), - }), - 'context': , - 'entity_id': 'sensor.microwave_oven_operational_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'stopped', - }) -# --- -# name: test_sensors[pressure_sensor][sensor.mock_pressure_sensor_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_pressure_sensor_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Pressure', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PressureSensor-1027-0', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[pressure_sensor][sensor.mock_pressure_sensor_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Mock Pressure Sensor Pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_pressure_sensor_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[room_airconditioner][sensor.room_airconditioner_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.room_airconditioner_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-2-TemperatureSensor-1026-0', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[room_airconditioner][sensor.room_airconditioner_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Room AirConditioner Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.room_airconditioner_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[silabs_dishwasher][sensor.dishwasher_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dishwasher_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Current', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-2-ElectricalPowerMeasurementActiveCurrent-144-5', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[silabs_dishwasher][sensor.dishwasher_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Dishwasher Current', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.dishwasher_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[silabs_dishwasher][sensor.dishwasher_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dishwasher_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-2-ElectricalEnergyMeasurementCumulativeEnergyImported-145-1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[silabs_dishwasher][sensor.dishwasher_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Dishwasher Energy', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.dishwasher_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[silabs_dishwasher][sensor.dishwasher_operational_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'stopped', - 'running', - 'paused', - 'error', - 'extra_state', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.dishwasher_operational_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Operational state', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'operational_state', - 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalState-96-4', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[silabs_dishwasher][sensor.dishwasher_operational_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Dishwasher Operational state', - 'options': list([ - 'stopped', - 'running', - 'paused', - 'error', - 'extra_state', - ]), - }), - 'context': , - 'entity_id': 'sensor.dishwasher_operational_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'stopped', - }) -# --- -# name: test_sensors[silabs_dishwasher][sensor.dishwasher_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dishwasher_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-2-ElectricalPowerMeasurementWatt-144-8', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[silabs_dishwasher][sensor.dishwasher_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Dishwasher Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.dishwasher_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[silabs_dishwasher][sensor.dishwasher_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dishwasher_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-2-ElectricalPowerMeasurementVoltage-144-4', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[silabs_dishwasher][sensor.dishwasher_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Dishwasher Voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.dishwasher_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '120.0', - }) -# --- -# name: test_sensors[smoke_detector][sensor.smoke_sensor_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.smoke_sensor_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSource-47-12', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[smoke_detector][sensor.smoke_sensor_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Smoke sensor Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.smoke_sensor_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '94', - }) -# --- -# name: test_sensors[smoke_detector][sensor.smoke_sensor_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.smoke_sensor_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatVoltage-47-11', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[smoke_detector][sensor.smoke_sensor_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Smoke sensor Voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.smoke_sensor_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[temperature_sensor][sensor.mock_temperature_sensor_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_temperature_sensor_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-TemperatureSensor-1026-0', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[temperature_sensor][sensor.mock_temperature_sensor_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Mock Temperature Sensor Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_temperature_sensor_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '21.0', - }) -# --- diff --git a/tests/components/matter/snapshots/test_switch.ambr b/tests/components/matter/snapshots/test_switch.ambr deleted file mode 100644 index 9396dccd245..00000000000 --- a/tests/components/matter/snapshots/test_switch.ambr +++ /dev/null @@ -1,377 +0,0 @@ -# serializer version: 1 -# name: test_switches[door_lock][switch.mock_door_lock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.mock_door_lock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterSwitch-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[door_lock][switch.mock_door_lock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Mock Door Lock', - }), - 'context': , - 'entity_id': 'switch.mock_door_lock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches[door_lock_with_unbolt][switch.mock_door_lock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.mock_door_lock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterSwitch-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[door_lock_with_unbolt][switch.mock_door_lock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Mock Door Lock', - }), - 'context': , - 'entity_id': 'switch.mock_door_lock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches[eve_energy_plug][switch.eve_energy_plug-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.eve_energy_plug', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-MatterPlug-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[eve_energy_plug][switch.eve_energy_plug-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Eve Energy Plug', - }), - 'context': , - 'entity_id': 'switch.eve_energy_plug', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches[eve_energy_plug_patched][switch.eve_energy_plug_patched-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.eve_energy_plug_patched', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-1-MatterPlug-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[eve_energy_plug_patched][switch.eve_energy_plug_patched-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Eve Energy Plug Patched', - }), - 'context': , - 'entity_id': 'switch.eve_energy_plug_patched', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches[on_off_plugin_unit][switch.mock_onoffpluginunit-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.mock_onoffpluginunit', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterPlug-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[on_off_plugin_unit][switch.mock_onoffpluginunit-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Mock OnOffPluginUnit', - }), - 'context': , - 'entity_id': 'switch.mock_onoffpluginunit', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches[room_airconditioner][switch.room_airconditioner_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.room_airconditioner_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power', - 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterPowerToggle-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[room_airconditioner][switch.room_airconditioner_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Room AirConditioner Power', - }), - 'context': , - 'entity_id': 'switch.room_airconditioner_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches[switch_unit][switch.mock_switchunit-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.mock_switchunit', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterSwitch-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[switch_unit][switch.mock_switchunit-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Mock SwitchUnit', - }), - 'context': , - 'entity_id': 'switch.mock_switchunit', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches[thermostat][switch.longan_link_hvac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.longan_link_hvac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterSwitch-6-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[thermostat][switch.longan_link_hvac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Longan link HVAC', - }), - 'context': , - 'entity_id': 'switch.longan_link_hvac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/matter/snapshots/test_vacuum.ambr b/tests/components/matter/snapshots/test_vacuum.ambr deleted file mode 100644 index 9e6b52ed572..00000000000 --- a/tests/components/matter/snapshots/test_vacuum.ambr +++ /dev/null @@ -1,48 +0,0 @@ -# serializer version: 1 -# name: test_vacuum[vacuum_cleaner][vacuum.mock_vacuum-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'vacuum', - 'entity_category': None, - 'entity_id': 'vacuum.mock_vacuum', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000042-MatterNodeDevice-1-MatterVacuumCleaner-84-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_vacuum[vacuum_cleaner][vacuum.mock_vacuum-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Vacuum', - 'supported_features': , - }), - 'context': , - 'entity_id': 'vacuum.mock_vacuum', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- diff --git a/tests/components/matter/snapshots/test_valve.ambr b/tests/components/matter/snapshots/test_valve.ambr deleted file mode 100644 index 98634635476..00000000000 --- a/tests/components/matter/snapshots/test_valve.ambr +++ /dev/null @@ -1,49 +0,0 @@ -# serializer version: 1 -# name: test_valves[valve][valve.valve-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'valve', - 'entity_category': None, - 'entity_id': 'valve.valve', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-1-MatterValve-129-4', - 'unit_of_measurement': None, - }) -# --- -# name: test_valves[valve][valve.valve-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'water', - 'friendly_name': 'Valve', - 'supported_features': , - }), - 'context': , - 'entity_id': 'valve.valve', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- diff --git a/tests/components/matter/test_adapter.py b/tests/components/matter/test_adapter.py index 01dff3b7899..da2ef179c44 100644 --- a/tests/components/matter/test_adapter.py +++ b/tests/components/matter/test_adapter.py @@ -4,7 +4,9 @@ from __future__ import annotations from unittest.mock import MagicMock -from matter_server.common.models import EventType +from matter_server.client.models.node import MatterNode +from matter_server.common.helpers.util import dataclass_from_dict +from matter_server.common.models import EventType, MatterNodeData import pytest from homeassistant.components.matter.adapter import get_clean_name @@ -12,26 +14,33 @@ from homeassistant.components.matter.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .common import create_node_from_fixture - -from tests.common import MockConfigEntry +from .common import load_and_parse_node_fixture, setup_integration_with_node_fixture -@pytest.mark.usefixtures("matter_node") +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( ("node_fixture", "name"), [ - ("onoff_light", "Mock OnOff Light"), - ("onoff_light_alt_name", "Mock OnOff Light"), - ("onoff_light_no_name", "Mock Light"), + ("onoff-light", "Mock OnOff Light"), + ("onoff-light-alt-name", "Mock OnOff Light"), + ("onoff-light-no-name", "Mock Light"), ], ) async def test_device_registry_single_node_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + matter_client: MagicMock, + node_fixture: str, name: str, ) -> None: """Test bridge devices are set up correctly with via_device.""" + await setup_integration_with_node_fixture( + hass, + node_fixture, + matter_client, + ) + entry = device_registry.async_get_device( identifiers={ (DOMAIN, "deviceid_00000000000004D2-0000000000000001-MatterNodeDevice") @@ -45,19 +54,25 @@ async def test_device_registry_single_node_device( assert entry.name == name assert entry.manufacturer == "Nabu Casa" assert entry.model == "Mock Light" - assert entry.model_id == "32768" assert entry.hw_version == "v1.0" assert entry.sw_version == "v1.0" assert entry.serial_number == "12345678" -@pytest.mark.usefixtures("matter_node") -@pytest.mark.parametrize("node_fixture", ["on_off_plugin_unit"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_device_registry_single_node_device_alt( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + matter_client: MagicMock, ) -> None: """Test additional device with different attribute values.""" + await setup_integration_with_node_fixture( + hass, + "on-off-plugin-unit", + matter_client, + ) + entry = device_registry.async_get_device( identifiers={ (DOMAIN, "deviceid_00000000000004D2-0000000000000001-MatterNodeDevice") @@ -73,14 +88,19 @@ async def test_device_registry_single_node_device_alt( assert entry.serial_number is None -@pytest.mark.usefixtures("matter_node") @pytest.mark.skip("Waiting for a new test fixture") -@pytest.mark.parametrize("node_fixture", ["fake_bridge_two_light"]) async def test_device_registry_bridge( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + matter_client: MagicMock, ) -> None: """Test bridge devices are set up correctly with via_device.""" + await setup_integration_with_node_fixture( + hass, + "fake-bridge-two-light", + matter_client, + ) + # Validate bridge bridge_entry = device_registry.async_get_device( identifiers={(DOMAIN, "mock-hub-id")} @@ -120,10 +140,12 @@ async def test_device_registry_bridge( assert device2_entry.sw_version == "1.49.1" -@pytest.mark.usefixtures("integration") +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_node_added_subscription( hass: HomeAssistant, matter_client: MagicMock, + integration: MagicMock, ) -> None: """Test subscription to new devices work.""" assert matter_client.subscribe_events.call_count == 5 @@ -133,32 +155,48 @@ async def test_node_added_subscription( ) node_added_callback = matter_client.subscribe_events.call_args.kwargs["callback"] - node = create_node_from_fixture("onoff_light") + node_data = load_and_parse_node_fixture("onoff-light") + node = MatterNode( + dataclass_from_dict( + MatterNodeData, + node_data, + ) + ) - entity_state = hass.states.get("light.mock_onoff_light") + entity_state = hass.states.get("light.mock_onoff_light_light") assert not entity_state node_added_callback(EventType.NODE_ADDED, node) await hass.async_block_till_done() - entity_state = hass.states.get("light.mock_onoff_light") + entity_state = hass.states.get("light.mock_onoff_light_light") assert entity_state -@pytest.mark.usefixtures("matter_node") -@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_device_registry_single_node_composed_device( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, + matter_client: MagicMock, ) -> None: """Test that a composed device within a standalone node only creates one HA device entry.""" - assert len(device_registry.devices) == 1 + await setup_integration_with_node_fixture( + hass, + "air-purifier", + matter_client, + ) + dev_reg = dr.async_get(hass) + assert len(dev_reg.devices) == 1 -@pytest.mark.usefixtures("matter_node") -@pytest.mark.parametrize("node_fixture", ["multi_endpoint_light"]) -async def test_multi_endpoint_name(hass: HomeAssistant) -> None: +async def test_multi_endpoint_name( + hass: HomeAssistant, + matter_client: MagicMock, +) -> None: """Test that the entity name gets postfixed if the device has multiple primary endpoints.""" + await setup_integration_with_node_fixture( + hass, + "multi-endpoint-light", + matter_client, + ) entity_state = hass.states.get("light.inovelli_light_1") assert entity_state assert entity_state.name == "Inovelli Light (1)" @@ -167,7 +205,7 @@ async def test_multi_endpoint_name(hass: HomeAssistant) -> None: assert entity_state.name == "Inovelli Light (6)" -async def test_get_clean_name() -> None: +async def test_get_clean_name_() -> None: """Test get_clean_name helper. Test device names that are assigned to `null` @@ -179,27 +217,3 @@ async def test_get_clean_name() -> None: assert get_clean_name("") is None assert get_clean_name("Mock device") == "Mock device" assert get_clean_name("Mock device \x00") == "Mock device" - - -async def test_bad_node_not_crash_integration( - hass: HomeAssistant, - matter_client: MagicMock, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that a bad node does not crash the integration.""" - good_node = create_node_from_fixture("onoff_light") - bad_node = create_node_from_fixture("onoff_light") - del bad_node.endpoints[0].node - matter_client.get_nodes.return_value = [good_node, bad_node] - config_entry = MockConfigEntry( - domain="matter", data={"url": "http://mock-matter-server-url"} - ) - config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert matter_client.get_nodes.call_count == 1 - assert hass.states.get("light.mock_onoff_light") is not None - assert len(hass.states.async_all("light")) == 1 - assert "Error setting up node" in caplog.text diff --git a/tests/components/matter/test_api.py b/tests/components/matter/test_api.py index b131ca9eb19..853da113e21 100644 --- a/tests/components/matter/test_api.py +++ b/tests/components/matter/test_api.py @@ -23,10 +23,14 @@ from homeassistant.components.matter.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from .common import setup_integration_with_node_fixture + from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_commission( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -63,6 +67,8 @@ async def test_commission( matter_client.commission_with_code.assert_called_once_with("12345678", False) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_commission_on_network( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -99,6 +105,8 @@ async def test_commission_on_network( matter_client.commission_on_network.assert_called_once_with(1234, "1.2.3.4") +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_set_thread_dataset( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -139,6 +147,8 @@ async def test_set_thread_dataset( matter_client.set_thread_operational_dataset.assert_called_once_with("test_dataset") +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_set_wifi_credentials( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -187,9 +197,8 @@ async def test_set_wifi_credentials( ) -@pytest.mark.usefixtures("matter_node") -# setup (mock) integration with a random node fixture -@pytest.mark.parametrize("node_fixture", ["onoff_light"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_node_diagnostics( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -197,6 +206,12 @@ async def test_node_diagnostics( matter_client: MagicMock, ) -> None: """Test the node diagnostics command.""" + # setup (mock) integration with a random node fixture + await setup_integration_with_node_fixture( + hass, + "onoff-light", + matter_client, + ) # get the device registry entry for the mocked node entry = device_registry.async_get_device( identifiers={ @@ -256,9 +271,8 @@ async def test_node_diagnostics( assert msg["error"]["code"] == ERROR_NODE_NOT_FOUND -@pytest.mark.usefixtures("matter_node") -# setup (mock) integration with a random node fixture -@pytest.mark.parametrize("node_fixture", ["onoff_light"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_ping_node( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -266,6 +280,12 @@ async def test_ping_node( matter_client: MagicMock, ) -> None: """Test the ping_node command.""" + # setup (mock) integration with a random node fixture + await setup_integration_with_node_fixture( + hass, + "onoff-light", + matter_client, + ) # get the device registry entry for the mocked node entry = device_registry.async_get_device( identifiers={ @@ -311,9 +331,8 @@ async def test_ping_node( assert msg["error"]["code"] == ERROR_NODE_NOT_FOUND -@pytest.mark.usefixtures("matter_node") -# setup (mock) integration with a random node fixture -@pytest.mark.parametrize("node_fixture", ["onoff_light"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_open_commissioning_window( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -321,6 +340,12 @@ async def test_open_commissioning_window( matter_client: MagicMock, ) -> None: """Test the open_commissioning_window command.""" + # setup (mock) integration with a random node fixture + await setup_integration_with_node_fixture( + hass, + "onoff-light", + matter_client, + ) # get the device registry entry for the mocked node entry = device_registry.async_get_device( identifiers={ @@ -372,9 +397,8 @@ async def test_open_commissioning_window( assert msg["error"]["code"] == ERROR_NODE_NOT_FOUND -@pytest.mark.usefixtures("matter_node") -# setup (mock) integration with a random node fixture -@pytest.mark.parametrize("node_fixture", ["onoff_light"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_remove_matter_fabric( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -382,6 +406,12 @@ async def test_remove_matter_fabric( matter_client: MagicMock, ) -> None: """Test the remove_matter_fabric command.""" + # setup (mock) integration with a random node fixture + await setup_integration_with_node_fixture( + hass, + "onoff-light", + matter_client, + ) # get the device registry entry for the mocked node entry = device_registry.async_get_device( identifiers={ @@ -423,9 +453,8 @@ async def test_remove_matter_fabric( assert msg["error"]["code"] == ERROR_NODE_NOT_FOUND -@pytest.mark.usefixtures("matter_node") -# setup (mock) integration with a random node fixture -@pytest.mark.parametrize("node_fixture", ["onoff_light"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_interview_node( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -433,6 +462,12 @@ async def test_interview_node( matter_client: MagicMock, ) -> None: """Test the interview_node command.""" + # setup (mock) integration with a random node fixture + await setup_integration_with_node_fixture( + hass, + "onoff-light", + matter_client, + ) # get the device registry entry for the mocked node entry = device_registry.async_get_device( identifiers={ diff --git a/tests/components/matter/test_binary_sensor.py b/tests/components/matter/test_binary_sensor.py index 7ae483162bf..becedc0af62 100644 --- a/tests/components/matter/test_binary_sensor.py +++ b/tests/components/matter/test_binary_sensor.py @@ -1,22 +1,21 @@ """Test Matter binary sensors.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch from matter_server.client.models.node import MatterNode import pytest -from syrupy import SnapshotAssertion +from typing_extensions import Generator from homeassistant.components.matter.binary_sensor import ( DISCOVERY_SCHEMAS as BINARY_SENSOR_SCHEMAS, ) -from homeassistant.const import Platform +from homeassistant.const import EntityCategory, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - snapshot_matter_entities, + setup_integration_with_node_fixture, trigger_subscription_callback, ) @@ -33,30 +32,31 @@ def binary_sensor_platform() -> Generator[None]: yield -@pytest.mark.usefixtures("matter_devices") -async def test_binary_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test binary sensors.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.BINARY_SENSOR) +@pytest.fixture(name="occupancy_sensor_node") +async def occupancy_sensor_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a occupancy sensor node.""" + return await setup_integration_with_node_fixture( + hass, "occupancy-sensor", matter_client + ) -@pytest.mark.parametrize("node_fixture", ["occupancy_sensor"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_occupancy_sensor( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + occupancy_sensor_node: MatterNode, ) -> None: """Test occupancy sensor.""" state = hass.states.get("binary_sensor.mock_occupancy_sensor_occupancy") assert state assert state.state == "on" - set_node_attribute(matter_node, 1, 1030, 0, 0) + set_node_attribute(occupancy_sensor_node, 1, 1030, 0, 0) await trigger_subscription_callback( - hass, matter_client, data=(matter_node.node_id, "1/1030/0", 0) + hass, matter_client, data=(occupancy_sensor_node.node_id, "1/1030/0", 0) ) state = hass.states.get("binary_sensor.mock_occupancy_sensor_occupancy") @@ -64,29 +64,36 @@ async def test_occupancy_sensor( assert state.state == "off" +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("eve_contact_sensor", "binary_sensor.eve_door_door"), - ("leak_sensor", "binary_sensor.water_leak_detector_water_leak"), + ("eve-contact-sensor", "binary_sensor.eve_door_door"), + ("leak-sensor", "binary_sensor.water_leak_detector_water_leak"), ], ) async def test_boolean_state_sensors( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test if binary sensors get created from devices with Boolean State cluster.""" + node = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) state = hass.states.get(entity_id) assert state assert state.state == "on" # invert the value - cur_attr_value = matter_node.get_attribute_value(1, 69, 0) - set_node_attribute(matter_node, 1, 69, 0, not cur_attr_value) + cur_attr_value = node.get_attribute_value(1, 69, 0) + set_node_attribute(node, 1, 69, 0, not cur_attr_value) await trigger_subscription_callback( - hass, matter_client, data=(matter_node.node_id, "1/69/0", not cur_attr_value) + hass, matter_client, data=(node.node_id, "1/69/0", not cur_attr_value) ) state = hass.states.get(entity_id) @@ -94,12 +101,13 @@ async def test_boolean_state_sensors( assert state.state == "off" -@pytest.mark.parametrize("node_fixture", ["door_lock"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_battery_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, matter_client: MagicMock, - matter_node: MatterNode, + door_lock: MatterNode, ) -> None: """Test battery sensor.""" entity_id = "binary_sensor.mock_door_lock_battery" @@ -107,11 +115,16 @@ async def test_battery_sensor( assert state assert state.state == "off" - set_node_attribute(matter_node, 1, 47, 14, 1) + set_node_attribute(door_lock, 1, 47, 14, 1) await trigger_subscription_callback( - hass, matter_client, data=(matter_node.node_id, "1/47/14", 1) + hass, matter_client, data=(door_lock.node_id, "1/47/14", 1) ) state = hass.states.get(entity_id) assert state assert state.state == "on" + + entry = entity_registry.async_get(entity_id) + + assert entry + assert entry.entity_category == EntityCategory.DIAGNOSTIC diff --git a/tests/components/matter/test_button.py b/tests/components/matter/test_button.py deleted file mode 100644 index cbf62dd80c7..00000000000 --- a/tests/components/matter/test_button.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Test Matter switches.""" - -from unittest.mock import MagicMock, call - -from chip.clusters import Objects as clusters -from matter_server.client.models.node import MatterNode -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .common import snapshot_matter_entities - - -@pytest.mark.usefixtures("matter_devices") -async def test_buttons( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test buttons.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.BUTTON) - - -@pytest.mark.parametrize("node_fixture", ["eve_energy_plug"]) -async def test_identify_button( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test button entity is created for a Matter Identify Cluster.""" - state = hass.states.get("button.eve_energy_plug_identify") - assert state - assert state.attributes["friendly_name"] == "Eve Energy Plug Identify" - # test press action - await hass.services.async_call( - "button", - "press", - { - "entity_id": "button.eve_energy_plug_identify", - }, - blocking=True, - ) - assert matter_client.send_device_command.call_count == 1 - assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, - endpoint_id=1, - command=clusters.Identify.Commands.Identify(identifyTime=15), - ) - - -@pytest.mark.parametrize("node_fixture", ["silabs_dishwasher"]) -async def test_operational_state_buttons( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test if button entities are created for operational state commands.""" - assert hass.states.get("button.dishwasher_pause") - assert hass.states.get("button.dishwasher_start") - assert hass.states.get("button.dishwasher_stop") - - # resume may not be discovered as it's missing in the supported command list - assert hass.states.get("button.dishwasher_resume") is None - - # test press action - await hass.services.async_call( - "button", - "press", - { - "entity_id": "button.dishwasher_pause", - }, - blocking=True, - ) - assert matter_client.send_device_command.call_count == 1 - assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, - endpoint_id=1, - command=clusters.OperationalState.Commands.Pause(), - ) diff --git a/tests/components/matter/test_climate.py b/tests/components/matter/test_climate.py index 037ec4e7626..6a4cf34a640 100644 --- a/tests/components/matter/test_climate.py +++ b/tests/components/matter/test_climate.py @@ -6,39 +6,45 @@ from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode from matter_server.common.helpers.util import create_attribute_path_from_attribute import pytest -from syrupy import SnapshotAssertion from homeassistant.components.climate import ClimateEntityFeature, HVACAction, HVACMode -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - snapshot_matter_entities, + setup_integration_with_node_fixture, trigger_subscription_callback, ) -@pytest.mark.usefixtures("matter_devices") -async def test_climates( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test climates.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.CLIMATE) +@pytest.fixture(name="thermostat") +async def thermostat_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a thermostat node.""" + return await setup_integration_with_node_fixture(hass, "thermostat", matter_client) -@pytest.mark.parametrize("node_fixture", ["thermostat"]) +@pytest.fixture(name="room_airconditioner") +async def room_airconditioner( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a room air conditioner node.""" + return await setup_integration_with_node_fixture( + hass, "room-airconditioner", matter_client + ) + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_thermostat_base( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + thermostat: MatterNode, ) -> None: """Test thermostat base attributes and state updates.""" # test entity attributes - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["min_temp"] == 7 assert state.attributes["max_temp"] == 35 @@ -55,12 +61,12 @@ async def test_thermostat_base( assert state.attributes["supported_features"] & mask == mask # test common state updates from device - set_node_attribute(matter_node, 1, 513, 3, 1600) - set_node_attribute(matter_node, 1, 513, 4, 3000) - set_node_attribute(matter_node, 1, 513, 5, 1600) - set_node_attribute(matter_node, 1, 513, 6, 3000) + set_node_attribute(thermostat, 1, 513, 3, 1600) + set_node_attribute(thermostat, 1, 513, 4, 3000) + set_node_attribute(thermostat, 1, 513, 5, 1600) + set_node_attribute(thermostat, 1, 513, 6, 3000) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["min_temp"] == 16 assert state.attributes["max_temp"] == 30 @@ -72,94 +78,95 @@ async def test_thermostat_base( ] # test system mode update from device - set_node_attribute(matter_node, 1, 513, 28, 0) + set_node_attribute(thermostat, 1, 513, 28, 0) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.state == HVACMode.OFF # test running state update from device - set_node_attribute(matter_node, 1, 513, 41, 1) + set_node_attribute(thermostat, 1, 513, 41, 1) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["hvac_action"] == HVACAction.HEATING - set_node_attribute(matter_node, 1, 513, 41, 8) + set_node_attribute(thermostat, 1, 513, 41, 8) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["hvac_action"] == HVACAction.HEATING - set_node_attribute(matter_node, 1, 513, 41, 2) + set_node_attribute(thermostat, 1, 513, 41, 2) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["hvac_action"] == HVACAction.COOLING - set_node_attribute(matter_node, 1, 513, 41, 16) + set_node_attribute(thermostat, 1, 513, 41, 16) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["hvac_action"] == HVACAction.COOLING - set_node_attribute(matter_node, 1, 513, 41, 4) + set_node_attribute(thermostat, 1, 513, 41, 4) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["hvac_action"] == HVACAction.FAN - set_node_attribute(matter_node, 1, 513, 41, 32) + set_node_attribute(thermostat, 1, 513, 41, 32) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["hvac_action"] == HVACAction.FAN - set_node_attribute(matter_node, 1, 513, 41, 64) + set_node_attribute(thermostat, 1, 513, 41, 64) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["hvac_action"] == HVACAction.FAN - set_node_attribute(matter_node, 1, 513, 41, 66) + set_node_attribute(thermostat, 1, 513, 41, 66) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["hvac_action"] == HVACAction.OFF # change system mode to heat - set_node_attribute(matter_node, 1, 513, 28, 4) + set_node_attribute(thermostat, 1, 513, 28, 4) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.state == HVACMode.HEAT # change occupied heating setpoint to 20 - set_node_attribute(matter_node, 1, 513, 18, 2000) + set_node_attribute(thermostat, 1, 513, 18, 2000) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.attributes["temperature"] == 20 -@pytest.mark.parametrize("node_fixture", ["thermostat"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_thermostat_service_calls( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + thermostat: MatterNode, ) -> None: """Test climate platform service calls.""" # test single-setpoint temperature adjustment when cool mode is active - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.state == HVACMode.COOL await hass.services.async_call( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac", + "entity_id": "climate.longan_link_hvac_thermostat", "temperature": 25, }, blocking=True, @@ -167,20 +174,20 @@ async def test_thermostat_service_calls( assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, + node_id=thermostat.node_id, attribute_path="1/513/17", value=2500, ) matter_client.write_attribute.reset_mock() # ensure that no command is executed when the temperature is the same - set_node_attribute(matter_node, 1, 513, 17, 2500) + set_node_attribute(thermostat, 1, 513, 17, 2500) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac", + "entity_id": "climate.longan_link_hvac_thermostat", "temperature": 25, }, blocking=True, @@ -190,9 +197,9 @@ async def test_thermostat_service_calls( matter_client.write_attribute.reset_mock() # test single-setpoint temperature adjustment when heat mode is active - set_node_attribute(matter_node, 1, 513, 28, 4) + set_node_attribute(thermostat, 1, 513, 28, 4) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.state == HVACMode.HEAT @@ -200,7 +207,7 @@ async def test_thermostat_service_calls( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac", + "entity_id": "climate.longan_link_hvac_thermostat", "temperature": 20, }, blocking=True, @@ -208,16 +215,16 @@ async def test_thermostat_service_calls( assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, + node_id=thermostat.node_id, attribute_path="1/513/18", value=2000, ) matter_client.write_attribute.reset_mock() # test dual setpoint temperature adjustments when heat_cool mode is active - set_node_attribute(matter_node, 1, 513, 28, 1) + set_node_attribute(thermostat, 1, 513, 28, 1) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac") + state = hass.states.get("climate.longan_link_hvac_thermostat") assert state assert state.state == HVACMode.HEAT_COOL @@ -225,7 +232,7 @@ async def test_thermostat_service_calls( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac", + "entity_id": "climate.longan_link_hvac_thermostat", "target_temp_low": 10, "target_temp_high": 30, }, @@ -234,12 +241,12 @@ async def test_thermostat_service_calls( assert matter_client.write_attribute.call_count == 2 assert matter_client.write_attribute.call_args_list[0] == call( - node_id=matter_node.node_id, + node_id=thermostat.node_id, attribute_path="1/513/18", value=1000, ) assert matter_client.write_attribute.call_args_list[1] == call( - node_id=matter_node.node_id, + node_id=thermostat.node_id, attribute_path="1/513/17", value=3000, ) @@ -250,7 +257,7 @@ async def test_thermostat_service_calls( "climate", "set_hvac_mode", { - "entity_id": "climate.longan_link_hvac", + "entity_id": "climate.longan_link_hvac_thermostat", "hvac_mode": HVACMode.HEAT, }, blocking=True, @@ -258,7 +265,7 @@ async def test_thermostat_service_calls( assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, + node_id=thermostat.node_id, attribute_path=create_attribute_path_from_attribute( endpoint_id=1, attribute=clusters.Thermostat.Attributes.SystemMode, @@ -274,7 +281,7 @@ async def test_thermostat_service_calls( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac", + "entity_id": "climate.longan_link_hvac_thermostat", "temperature": 22, "hvac_mode": HVACMode.COOL, }, @@ -282,7 +289,7 @@ async def test_thermostat_service_calls( ) assert matter_client.write_attribute.call_count == 2 assert matter_client.write_attribute.call_args_list[0] == call( - node_id=matter_node.node_id, + node_id=thermostat.node_id, attribute_path=create_attribute_path_from_attribute( endpoint_id=1, attribute=clusters.Thermostat.Attributes.SystemMode, @@ -290,36 +297,32 @@ async def test_thermostat_service_calls( value=3, ) assert matter_client.write_attribute.call_args_list[1] == call( - node_id=matter_node.node_id, + node_id=thermostat.node_id, attribute_path="1/513/17", value=2200, ) matter_client.write_attribute.reset_mock() -@pytest.mark.parametrize("node_fixture", ["room_airconditioner"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_room_airconditioner( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + room_airconditioner: MatterNode, ) -> None: """Test if a climate entity is created for a Room Airconditioner device.""" - state = hass.states.get("climate.room_airconditioner") + state = hass.states.get("climate.room_airconditioner_thermostat") assert state assert state.attributes["current_temperature"] == 20 - # room airconditioner has mains power on OnOff cluster with value set to False - assert state.state == HVACMode.OFF + assert state.attributes["min_temp"] == 16 + assert state.attributes["max_temp"] == 32 # test supported features correctly parsed # WITHOUT temperature_range support mask = ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.TURN_OFF assert state.attributes["supported_features"] & mask == mask - # set mains power to ON (OnOff cluster) - set_node_attribute(matter_node, 1, 6, 0, True) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner") - # test supported HVAC modes include fan and dry modes assert state.attributes["hvac_modes"] == [ HVACMode.OFF, @@ -330,21 +333,15 @@ async def test_room_airconditioner( HVACMode.HEAT_COOL, ] # test fan-only hvac mode - set_node_attribute(matter_node, 1, 513, 28, 7) + set_node_attribute(room_airconditioner, 1, 513, 28, 7) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner") + state = hass.states.get("climate.room_airconditioner_thermostat") assert state assert state.state == HVACMode.FAN_ONLY # test dry hvac mode - set_node_attribute(matter_node, 1, 513, 28, 8) + set_node_attribute(room_airconditioner, 1, 513, 28, 8) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner") + state = hass.states.get("climate.room_airconditioner_thermostat") assert state assert state.state == HVACMode.DRY - - # test featuremap update - set_node_attribute(matter_node, 1, 513, 65532, 1) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner") - assert state.attributes["supported_features"] & ClimateEntityFeature.TURN_ON diff --git a/tests/components/matter/test_config_flow.py b/tests/components/matter/test_config_flow.py index eed776c132e..562cf4bb86a 100644 --- a/tests/components/matter/test_config_flow.py +++ b/tests/components/matter/test_config_flow.py @@ -2,22 +2,20 @@ from __future__ import annotations -from collections.abc import Generator from ipaddress import ip_address -from unittest.mock import AsyncMock, MagicMock, call, patch -from uuid import uuid4 +from typing import Any +from unittest.mock import DEFAULT, AsyncMock, MagicMock, call, patch -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import Discovery from matter_server.client.exceptions import CannotConnect, InvalidServerVersion import pytest +from typing_extensions import Generator from homeassistant import config_entries +from homeassistant.components.hassio import HassioAPIError, HassioServiceInfo from homeassistant.components.matter.const import ADDON_SLUG, DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry @@ -95,9 +93,20 @@ def supervisor_fixture() -> Generator[MagicMock]: yield is_hassio -@pytest.fixture(autouse=True) -def mock_get_addon_discovery_info(get_addon_discovery_info: AsyncMock) -> None: +@pytest.fixture(name="discovery_info") +def discovery_info_fixture() -> Any: + """Return the discovery info from the supervisor.""" + return DEFAULT + + +@pytest.fixture(name="get_addon_discovery_info", autouse=True) +def get_addon_discovery_info_fixture(discovery_info: Any) -> Generator[AsyncMock]: """Mock get add-on discovery info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", + return_value=discovery_info, + ) as get_addon_discovery_info: + yield get_addon_discovery_info @pytest.fixture(name="addon_setup_time", autouse=True) @@ -292,19 +301,7 @@ async def test_zeroconf_discovery_not_onboarded_not_supervisor( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_zeroconf_not_onboarded_already_discovered( hass: HomeAssistant, supervisor: MagicMock, @@ -342,19 +339,7 @@ async def test_zeroconf_not_onboarded_already_discovered( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_zeroconf_not_onboarded_running( hass: HomeAssistant, supervisor: MagicMock, @@ -386,19 +371,7 @@ async def test_zeroconf_not_onboarded_running( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_zeroconf_not_onboarded_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -419,7 +392,7 @@ async def test_zeroconf_not_onboarded_installed( await hass.async_block_till_done() assert addon_info.call_count == 1 - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" @@ -432,19 +405,7 @@ async def test_zeroconf_not_onboarded_installed( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_zeroconf_not_onboarded_not_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -468,8 +429,8 @@ async def test_zeroconf_not_onboarded_not_installed( assert addon_info.call_count == 0 assert addon_store_info.call_count == 2 - assert install_addon.call_args == call("core_matter_server") - assert start_addon.call_args == call("core_matter_server") + assert install_addon.call_args == call(hass, "core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" @@ -481,19 +442,7 @@ async def test_zeroconf_not_onboarded_not_installed( assert setup_entry.call_count == 1 -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_supervisor_discovery( hass: HomeAssistant, supervisor: MagicMock, @@ -531,19 +480,7 @@ async def test_supervisor_discovery( @pytest.mark.parametrize( ("discovery_info", "error"), - [ - ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], - SupervisorError(), - ) - ], + [({"config": ADDON_DISCOVERY_INFO}, HassioAPIError())], ) async def test_supervisor_discovery_addon_info_failed( hass: HomeAssistant, @@ -576,19 +513,7 @@ async def test_supervisor_discovery_addon_info_failed( assert result["reason"] == "addon_info_failed" -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_clean_supervisor_discovery_on_user_create( hass: HomeAssistant, supervisor: MagicMock, @@ -769,7 +694,7 @@ async def test_supervisor_discovery_addon_not_running( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" @@ -819,7 +744,7 @@ async def test_supervisor_discovery_addon_not_installed( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call("core_matter_server") + assert install_addon.call_args == call(hass, "core_matter_server") assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -827,7 +752,7 @@ async def test_supervisor_discovery_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" @@ -879,19 +804,7 @@ async def test_not_addon( assert setup_entry.call_count == 1 -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_running( hass: HomeAssistant, supervisor: MagicMock, @@ -937,15 +850,8 @@ async def test_addon_running( ), [ ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], - SupervisorError(), + {"config": ADDON_DISCOVERY_INFO}, + HassioAPIError(), None, None, "addon_get_discovery_info_failed", @@ -953,14 +859,7 @@ async def test_addon_running( False, ), ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, None, CannotConnect(Exception("Boom")), None, @@ -969,7 +868,7 @@ async def test_addon_running( True, ), ( - [], + None, None, None, None, @@ -978,17 +877,10 @@ async def test_addon_running( False, ), ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, None, None, - SupervisorError(), + HassioAPIError(), "addon_info_failed", False, False, @@ -1044,15 +936,8 @@ async def test_addon_running_failures( ), [ ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], - SupervisorError(), + {"config": ADDON_DISCOVERY_INFO}, + HassioAPIError(), None, None, "addon_get_discovery_info_failed", @@ -1060,14 +945,7 @@ async def test_addon_running_failures( False, ), ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, None, CannotConnect(Exception("Boom")), None, @@ -1076,7 +954,7 @@ async def test_addon_running_failures( True, ), ( - [], + None, None, None, None, @@ -1085,17 +963,10 @@ async def test_addon_running_failures( False, ), ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, None, None, - SupervisorError(), + HassioAPIError(), "addon_info_failed", False, False, @@ -1136,19 +1007,7 @@ async def test_addon_running_failures_zeroconf( assert result["reason"] == abort_reason -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_running_already_configured( hass: HomeAssistant, supervisor: MagicMock, @@ -1186,19 +1045,7 @@ async def test_addon_running_already_configured( assert setup_entry.call_count == 1 -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -1227,7 +1074,7 @@ async def test_addon_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" assert result["data"] == { @@ -1248,35 +1095,21 @@ async def test_addon_installed( ), [ ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], - SupervisorError(), + {"config": ADDON_DISCOVERY_INFO}, + HassioAPIError(), None, False, False, ), ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, None, CannotConnect(Exception("Boom")), True, True, ), ( - [], + None, None, None, True, @@ -1319,7 +1152,7 @@ async def test_addon_installed_failures( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") assert get_addon_discovery_info.called is discovery_info_called assert client_connect.called is client_connect_called assert result["type"] is FlowResultType.ABORT @@ -1337,35 +1170,21 @@ async def test_addon_installed_failures( ), [ ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], - SupervisorError(), + {"config": ADDON_DISCOVERY_INFO}, + HassioAPIError(), None, False, False, ), ( - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, None, CannotConnect(Exception("Boom")), True, True, ), ( - [], + None, None, None, True, @@ -1398,26 +1217,14 @@ async def test_addon_installed_failures_zeroconf( await hass.async_block_till_done() assert addon_info.call_count == 1 - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") assert get_addon_discovery_info.called is discovery_info_called assert client_connect.called is client_connect_called assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_start_failed" -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_installed_already_configured( hass: HomeAssistant, supervisor: MagicMock, @@ -1455,7 +1262,7 @@ async def test_addon_installed_already_configured( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfiguration_successful" assert entry.data["url"] == "ws://host1:5581/ws" @@ -1463,19 +1270,7 @@ async def test_addon_installed_already_configured( assert setup_entry.call_count == 1 -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_not_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -1507,7 +1302,7 @@ async def test_addon_not_installed( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call("core_matter_server") + assert install_addon.call_args == call(hass, "core_matter_server") assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -1515,7 +1310,7 @@ async def test_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Matter" assert result["data"] == { @@ -1534,7 +1329,7 @@ async def test_addon_not_installed_failures( install_addon: AsyncMock, ) -> None: """Test add-on install failure.""" - install_addon.side_effect = SupervisorError() + install_addon.side_effect = HassioAPIError() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -1554,7 +1349,7 @@ async def test_addon_not_installed_failures( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call("core_matter_server") + assert install_addon.call_args == call(hass, "core_matter_server") assert addon_info.call_count == 0 assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" @@ -1571,32 +1366,20 @@ async def test_addon_not_installed_failures_zeroconf( zeroconf_info: ZeroconfServiceInfo, ) -> None: """Test add-on install failure.""" - install_addon.side_effect = SupervisorError() + install_addon.side_effect = HassioAPIError() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf_info ) await hass.async_block_till_done() - assert install_addon.call_args == call("core_matter_server") + assert install_addon.call_args == call(hass, "core_matter_server") assert addon_info.call_count == 0 assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_matter_server", - service="matter", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_not_installed_already_configured( hass: HomeAssistant, supervisor: MagicMock, @@ -1638,7 +1421,7 @@ async def test_addon_not_installed_already_configured( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call("core_matter_server") + assert install_addon.call_args == call(hass, "core_matter_server") assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -1646,7 +1429,7 @@ async def test_addon_not_installed_already_configured( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfiguration_successful" diff --git a/tests/components/matter/test_cover.py b/tests/components/matter/test_cover.py index 224aabd9082..f526205234d 100644 --- a/tests/components/matter/test_cover.py +++ b/tests/components/matter/test_cover.py @@ -4,50 +4,50 @@ from math import floor from unittest.mock import MagicMock, call from chip.clusters import Objects as clusters -from matter_server.client.models.node import MatterNode import pytest -from syrupy import SnapshotAssertion -from homeassistant.components.cover import CoverEntityFeature, CoverState -from homeassistant.const import Platform +from homeassistant.components.cover import ( + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + CoverEntityFeature, +) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - snapshot_matter_entities, + setup_integration_with_node_fixture, trigger_subscription_callback, ) -@pytest.mark.usefixtures("matter_devices") -async def test_covers( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test covers.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.COVER) - - +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("window_covering_lift", "cover.mock_lift_window_covering"), - ("window_covering_pa_lift", "cover.longan_link_wncv_da01"), - ("window_covering_tilt", "cover.mock_tilt_window_covering"), - ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering"), - ("window_covering_full", "cover.mock_full_window_covering"), + ("window-covering_lift", "cover.mock_lift_window_covering_cover"), + ("window-covering_pa-lift", "cover.longan_link_wncv_da01_cover"), + ("window-covering_tilt", "cover.mock_tilt_window_covering_cover"), + ("window-covering_pa-tilt", "cover.mock_pa_tilt_window_covering_cover"), + ("window-covering_full", "cover.mock_full_window_covering_cover"), ], ) async def test_cover( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test window covering commands that always are implemented.""" + window_covering = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + await hass.services.async_call( "cover", "close_cover", @@ -59,7 +59,7 @@ async def test_cover( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=window_covering.node_id, endpoint_id=1, command=clusters.WindowCovering.Commands.DownOrClose(), ) @@ -76,7 +76,7 @@ async def test_cover( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=window_covering.node_id, endpoint_id=1, command=clusters.WindowCovering.Commands.StopMotion(), ) @@ -93,28 +93,37 @@ async def test_cover( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=window_covering.node_id, endpoint_id=1, command=clusters.WindowCovering.Commands.UpOrOpen(), ) matter_client.send_device_command.reset_mock() +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("window_covering_lift", "cover.mock_lift_window_covering"), - ("window_covering_pa_lift", "cover.longan_link_wncv_da01"), - ("window_covering_full", "cover.mock_full_window_covering"), + ("window-covering_lift", "cover.mock_lift_window_covering_cover"), + ("window-covering_pa-lift", "cover.longan_link_wncv_da01_cover"), + ("window-covering_full", "cover.mock_full_window_covering_cover"), ], ) async def test_cover_lift( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test window covering devices with lift and position aware lift features.""" + + window_covering = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + await hass.services.async_call( "cover", "set_cover_position", @@ -127,57 +136,65 @@ async def test_cover_lift( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=window_covering.node_id, endpoint_id=1, command=clusters.WindowCovering.Commands.GoToLiftPercentage(5000), ) matter_client.send_device_command.reset_mock() - set_node_attribute(matter_node, 1, 258, 10, 0b001010) + set_node_attribute(window_covering, 1, 258, 10, 0b001010) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING - set_node_attribute(matter_node, 1, 258, 10, 0b000101) + set_node_attribute(window_covering, 1, 258, 10, 0b000101) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("window_covering_lift", "cover.mock_lift_window_covering"), + ("window-covering_lift", "cover.mock_lift_window_covering_cover"), ], ) async def test_cover_lift_only( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test window covering devices with lift feature and without position aware lift feature.""" - set_node_attribute(matter_node, 1, 258, 14, None) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + window_covering = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + + set_node_attribute(window_covering, 1, 258, 14, None) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.state == "unknown" - set_node_attribute(matter_node, 1, 258, 65529, [0, 1, 2]) + set_node_attribute(window_covering, 1, 258, 65529, [0, 1, 2]) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.attributes["supported_features"] & CoverEntityFeature.SET_POSITION == 0 - set_node_attribute(matter_node, 1, 258, 65529, [0, 1, 2, 5]) + set_node_attribute(window_covering, 1, 258, 65529, [0, 1, 2, 5]) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -185,20 +202,28 @@ async def test_cover_lift_only( assert state.attributes["supported_features"] & CoverEntityFeature.SET_POSITION != 0 +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("window_covering_pa_lift", "cover.longan_link_wncv_da01"), + ("window-covering_pa-lift", "cover.longan_link_wncv_da01_cover"), ], ) async def test_cover_position_aware_lift( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test window covering devices with position aware lift features.""" + window_covering = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + state = hass.states.get(entity_id) assert state mask = ( @@ -210,41 +235,49 @@ async def test_cover_position_aware_lift( assert state.attributes["supported_features"] & mask == mask for position in (0, 9999): - set_node_attribute(matter_node, 1, 258, 14, position) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, position) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.attributes["current_position"] == 100 - floor(position / 100) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN - set_node_attribute(matter_node, 1, 258, 14, 10000) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, 10000) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.attributes["current_position"] == 0 - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("window_covering_tilt", "cover.mock_tilt_window_covering"), - ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering"), - ("window_covering_full", "cover.mock_full_window_covering"), + ("window-covering_tilt", "cover.mock_tilt_window_covering_cover"), + ("window-covering_pa-tilt", "cover.mock_pa_tilt_window_covering_cover"), + ("window-covering_full", "cover.mock_full_window_covering_cover"), ], ) async def test_cover_tilt( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test window covering devices with tilt and position aware tilt features.""" + window_covering = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + await hass.services.async_call( "cover", "set_cover_tilt_position", @@ -257,7 +290,7 @@ async def test_cover_tilt( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=window_covering.node_id, endpoint_id=1, command=clusters.WindowCovering.Commands.GoToTiltPercentage(5000), ) @@ -265,35 +298,43 @@ async def test_cover_tilt( await trigger_subscription_callback(hass, matter_client) - set_node_attribute(matter_node, 1, 258, 10, 0b100010) + set_node_attribute(window_covering, 1, 258, 10, 0b100010) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING - set_node_attribute(matter_node, 1, 258, 10, 0b010001) + set_node_attribute(window_covering, 1, 258, 10, 0b010001) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("window_covering_tilt", "cover.mock_tilt_window_covering"), + ("window-covering_tilt", "cover.mock_tilt_window_covering_cover"), ], ) async def test_cover_tilt_only( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test window covering devices with tilt feature and without position aware tilt feature.""" - set_node_attribute(matter_node, 1, 258, 65529, [0, 1, 2]) + window_covering = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + + set_node_attribute(window_covering, 1, 258, 65529, [0, 1, 2]) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -303,7 +344,7 @@ async def test_cover_tilt_only( == 0 ) - set_node_attribute(matter_node, 1, 258, 65529, [0, 1, 2, 8]) + set_node_attribute(window_covering, 1, 258, 65529, [0, 1, 2, 8]) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -314,20 +355,28 @@ async def test_cover_tilt_only( ) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering"), + ("window-covering_pa-tilt", "cover.mock_pa_tilt_window_covering_cover"), ], ) async def test_cover_position_aware_tilt( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test window covering devices with position aware tilt feature.""" + window_covering = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + state = hass.states.get(entity_id) assert state mask = ( @@ -339,8 +388,8 @@ async def test_cover_position_aware_tilt( assert state.attributes["supported_features"] & mask == mask for tilt_position in (0, 9999, 10000): - set_node_attribute(matter_node, 1, 258, 15, tilt_position) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 15, tilt_position) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -350,14 +399,18 @@ async def test_cover_position_aware_tilt( ) -@pytest.mark.parametrize("node_fixture", ["window_covering_full"]) async def test_cover_full_features( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, ) -> None: """Test window covering devices with all the features.""" - entity_id = "cover.mock_full_window_covering" + + window_covering = await setup_integration_with_node_fixture( + hass, + "window-covering_full", + matter_client, + ) + entity_id = "cover.mock_full_window_covering_cover" state = hass.states.get(entity_id) assert state @@ -370,77 +423,77 @@ async def test_cover_full_features( ) assert state.attributes["supported_features"] & mask == mask - set_node_attribute(matter_node, 1, 258, 14, 10000) - set_node_attribute(matter_node, 1, 258, 15, 10000) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, 10000) + set_node_attribute(window_covering, 1, 258, 15, 10000) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED - set_node_attribute(matter_node, 1, 258, 14, 5000) - set_node_attribute(matter_node, 1, 258, 15, 10000) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, 5000) + set_node_attribute(window_covering, 1, 258, 15, 10000) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN - set_node_attribute(matter_node, 1, 258, 14, 10000) - set_node_attribute(matter_node, 1, 258, 15, 5000) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, 10000) + set_node_attribute(window_covering, 1, 258, 15, 5000) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED - set_node_attribute(matter_node, 1, 258, 14, 5000) - set_node_attribute(matter_node, 1, 258, 15, 5000) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, 5000) + set_node_attribute(window_covering, 1, 258, 15, 5000) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN - set_node_attribute(matter_node, 1, 258, 14, 5000) - set_node_attribute(matter_node, 1, 258, 15, None) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, 5000) + set_node_attribute(window_covering, 1, 258, 15, None) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN - set_node_attribute(matter_node, 1, 258, 14, None) - set_node_attribute(matter_node, 1, 258, 15, 5000) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, None) + set_node_attribute(window_covering, 1, 258, 15, 5000) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.state == "unknown" - set_node_attribute(matter_node, 1, 258, 14, 10000) - set_node_attribute(matter_node, 1, 258, 15, None) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, 10000) + set_node_attribute(window_covering, 1, 258, 15, None) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED - set_node_attribute(matter_node, 1, 258, 14, None) - set_node_attribute(matter_node, 1, 258, 15, 10000) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, None) + set_node_attribute(window_covering, 1, 258, 15, 10000) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state assert state.state == "unknown" - set_node_attribute(matter_node, 1, 258, 14, None) - set_node_attribute(matter_node, 1, 258, 15, None) - set_node_attribute(matter_node, 1, 258, 10, 0b000000) + set_node_attribute(window_covering, 1, 258, 14, None) + set_node_attribute(window_covering, 1, 258, 15, None) + set_node_attribute(window_covering, 1, 258, 10, 0b000000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state diff --git a/tests/components/matter/test_diagnostics.py b/tests/components/matter/test_diagnostics.py index cfdf305a361..6863619e145 100644 --- a/tests/components/matter/test_diagnostics.py +++ b/tests/components/matter/test_diagnostics.py @@ -6,7 +6,6 @@ import json from typing import Any from unittest.mock import MagicMock -from matter_server.client.models.node import MatterNode from matter_server.common.helpers.util import dataclass_from_dict from matter_server.common.models import ServerDiagnostics import pytest @@ -16,6 +15,8 @@ from homeassistant.components.matter.diagnostics import redact_matter_attributes from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from .common import setup_integration_with_node_fixture + from tests.common import MockConfigEntry, load_fixture from tests.components.diagnostics import ( get_diagnostics_for_config_entry, @@ -56,6 +57,8 @@ async def test_matter_attribute_redact(device_diagnostics: dict[str, Any]) -> No assert redacted_device_diagnostics == device_diagnostics +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_config_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -74,7 +77,8 @@ async def test_config_entry_diagnostics( assert diagnostics == config_entry_diagnostics_redacted -@pytest.mark.parametrize("node_fixture", ["device_diagnostics"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_device_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -82,9 +86,9 @@ async def test_device_diagnostics( matter_client: MagicMock, config_entry_diagnostics: dict[str, Any], device_diagnostics: dict[str, Any], - matter_node: MatterNode, ) -> None: """Test the device diagnostics.""" + await setup_integration_with_node_fixture(hass, "device_diagnostics", matter_client) system_info_dict = config_entry_diagnostics["info"] device_diagnostics_redacted = { "server_info": system_info_dict, diff --git a/tests/components/matter/test_lock.py b/tests/components/matter/test_door_lock.py similarity index 58% rename from tests/components/matter/test_lock.py rename to tests/components/matter/test_door_lock.py index 7bcfd381d6c..a0664612aba 100644 --- a/tests/components/matter/test_lock.py +++ b/tests/components/matter/test_door_lock.py @@ -5,50 +5,43 @@ from unittest.mock import MagicMock, call from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest -from syrupy import SnapshotAssertion -from homeassistant.components.lock import LockEntityFeature, LockState -from homeassistant.const import ATTR_CODE, STATE_UNKNOWN, Platform +from homeassistant.components.lock import ( + STATE_LOCKED, + STATE_LOCKING, + STATE_OPEN, + STATE_UNLOCKED, + STATE_UNLOCKING, + LockEntityFeature, +) +from homeassistant.const import ATTR_CODE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.entity_registry as er -from .common import ( - set_node_attribute, - snapshot_matter_entities, - trigger_subscription_callback, -) +from .common import set_node_attribute, trigger_subscription_callback -@pytest.mark.usefixtures("matter_devices") -async def test_locks( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test locks.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.LOCK) - - -@pytest.mark.parametrize("node_fixture", ["door_lock"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_lock( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + door_lock: MatterNode, ) -> None: """Test door lock.""" await hass.services.async_call( "lock", "unlock", { - "entity_id": "lock.mock_door_lock", + "entity_id": "lock.mock_door_lock_lock", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=door_lock.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.UnlockDoor(), timed_request_timeout_ms=1000, @@ -59,65 +52,59 @@ async def test_lock( "lock", "lock", { - "entity_id": "lock.mock_door_lock", + "entity_id": "lock.mock_door_lock_lock", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=door_lock.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.LockDoor(), timed_request_timeout_ms=1000, ) matter_client.send_device_command.reset_mock() - await hass.async_block_till_done() - state = hass.states.get("lock.mock_door_lock") + state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == LockState.LOCKING + assert state.state == STATE_LOCKED - set_node_attribute(matter_node, 1, 257, 0, 0) + set_node_attribute(door_lock, 1, 257, 0, 0) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock") + state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKING - set_node_attribute(matter_node, 1, 257, 0, 2) + set_node_attribute(door_lock, 1, 257, 0, 2) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock") + state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED - set_node_attribute(matter_node, 1, 257, 0, 1) + set_node_attribute(door_lock, 1, 257, 0, 0) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock") + state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKING - set_node_attribute(matter_node, 1, 257, 0, None) + set_node_attribute(door_lock, 1, 257, 0, None) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock") + state = hass.states.get("lock.mock_door_lock_lock") assert state assert state.state == STATE_UNKNOWN - # test featuremap update - set_node_attribute(matter_node, 1, 257, 65532, 4096) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock") - assert state.attributes["supported_features"] & LockEntityFeature.OPEN - -@pytest.mark.parametrize("node_fixture", ["door_lock"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_lock_requires_pin( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + door_lock: MatterNode, entity_registry: er.EntityRegistry, ) -> None: """Test door lock with PINCode.""" @@ -125,9 +112,9 @@ async def test_lock_requires_pin( code = "1234567" # set RequirePINforRemoteOperation - set_node_attribute(matter_node, 1, 257, 51, True) + set_node_attribute(door_lock, 1, 257, 51, True) # set door state to unlocked - set_node_attribute(matter_node, 1, 257, 0, 2) + set_node_attribute(door_lock, 1, 257, 0, 2) await trigger_subscription_callback(hass, matter_client) with pytest.raises(ServiceValidationError): @@ -135,7 +122,7 @@ async def test_lock_requires_pin( await hass.services.async_call( "lock", "lock", - {"entity_id": "lock.mock_door_lock", ATTR_CODE: "1234"}, + {"entity_id": "lock.mock_door_lock_lock", ATTR_CODE: "1234"}, blocking=True, ) @@ -144,12 +131,12 @@ async def test_lock_requires_pin( await hass.services.async_call( "lock", "lock", - {"entity_id": "lock.mock_door_lock", ATTR_CODE: code}, + {"entity_id": "lock.mock_door_lock_lock", ATTR_CODE: code}, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=door_lock.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.LockDoor(code.encode()), timed_request_timeout_ms=1000, @@ -158,48 +145,49 @@ async def test_lock_requires_pin( # Lock door using default code default_code = "7654321" entity_registry.async_update_entity_options( - "lock.mock_door_lock", "lock", {"default_code": default_code} + "lock.mock_door_lock_lock", "lock", {"default_code": default_code} ) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( "lock", "lock", - {"entity_id": "lock.mock_door_lock"}, + {"entity_id": "lock.mock_door_lock_lock"}, blocking=True, ) assert matter_client.send_device_command.call_count == 2 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=door_lock.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.LockDoor(default_code.encode()), timed_request_timeout_ms=1000, ) -@pytest.mark.parametrize("node_fixture", ["door_lock_with_unbolt"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_lock_with_unbolt( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + door_lock_with_unbolt: MatterNode, ) -> None: """Test door lock.""" - state = hass.states.get("lock.mock_door_lock") + state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED assert state.attributes["supported_features"] & LockEntityFeature.OPEN # test unlock/unbolt await hass.services.async_call( "lock", "unlock", { - "entity_id": "lock.mock_door_lock", + "entity_id": "lock.mock_door_lock_lock", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 # unlock should unbolt on a lock with unbolt feature assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=door_lock_with_unbolt.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.UnboltDoor(), timed_request_timeout_ms=1000, @@ -210,33 +198,21 @@ async def test_lock_with_unbolt( "lock", "open", { - "entity_id": "lock.mock_door_lock", + "entity_id": "lock.mock_door_lock_lock", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=door_lock_with_unbolt.node_id, endpoint_id=1, command=clusters.DoorLock.Commands.UnlockDoor(), timed_request_timeout_ms=1000, ) - await hass.async_block_till_done() - state = hass.states.get("lock.mock_door_lock") - assert state - assert state.state == LockState.OPENING - - set_node_attribute(matter_node, 1, 257, 0, 0) + set_node_attribute(door_lock_with_unbolt, 1, 257, 3, 0) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock") + state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == LockState.UNLOCKED - - set_node_attribute(matter_node, 1, 257, 0, 3) - await trigger_subscription_callback(hass, matter_client) - - state = hass.states.get("lock.mock_door_lock") - assert state - assert state.state == LockState.OPEN + assert state.state == STATE_OPEN diff --git a/tests/components/matter/test_event.py b/tests/components/matter/test_event.py index f3a318c4e8b..a7bd7c91f7b 100644 --- a/tests/components/matter/test_event.py +++ b/tests/components/matter/test_event.py @@ -5,31 +5,39 @@ from unittest.mock import MagicMock from matter_server.client.models.node import MatterNode from matter_server.common.models import EventType, MatterNodeEvent import pytest -from syrupy import SnapshotAssertion from homeassistant.components.event import ATTR_EVENT_TYPE, ATTR_EVENT_TYPES -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .common import snapshot_matter_entities, trigger_subscription_callback +from .common import setup_integration_with_node_fixture, trigger_subscription_callback -@pytest.mark.usefixtures("matter_devices") -async def test_events( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test events.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.EVENT) +@pytest.fixture(name="generic_switch_node") +async def switch_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a GenericSwitch node.""" + return await setup_integration_with_node_fixture( + hass, "generic-switch", matter_client + ) -@pytest.mark.parametrize("node_fixture", ["generic_switch"]) +@pytest.fixture(name="generic_switch_multi_node") +async def multi_switch_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a GenericSwitch node with multiple buttons.""" + return await setup_integration_with_node_fixture( + hass, "generic-switch-multi", matter_client + ) + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_generic_switch_node( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + generic_switch_node: MatterNode, ) -> None: """Test event entity for a GenericSwitch node.""" state = hass.states.get("event.mock_generic_switch_button") @@ -42,6 +50,8 @@ async def test_generic_switch_node( "short_release", "long_press", "long_release", + "multi_press_ongoing", + "multi_press_complete", ] # trigger firing a new event from the device await trigger_subscription_callback( @@ -49,7 +59,7 @@ async def test_generic_switch_node( matter_client, EventType.NODE_EVENT, MatterNodeEvent( - node_id=matter_node.node_id, + node_id=generic_switch_node.node_id, endpoint_id=1, cluster_id=59, event_id=1, @@ -62,13 +72,34 @@ async def test_generic_switch_node( ) state = hass.states.get("event.mock_generic_switch_button") assert state.attributes[ATTR_EVENT_TYPE] == "initial_press" + # trigger firing a multi press event + await trigger_subscription_callback( + hass, + matter_client, + EventType.NODE_EVENT, + MatterNodeEvent( + node_id=generic_switch_node.node_id, + endpoint_id=1, + cluster_id=59, + event_id=5, + event_number=0, + priority=1, + timestamp=0, + timestamp_type=0, + data={"NewPosition": 3}, + ), + ) + state = hass.states.get("event.mock_generic_switch_button") + assert state.attributes[ATTR_EVENT_TYPE] == "multi_press_ongoing" + assert state.attributes["NewPosition"] == 3 -@pytest.mark.parametrize("node_fixture", ["generic_switch_multi"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_generic_switch_multi_node( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + generic_switch_multi_node: MatterNode, ) -> None: """Test event entity for a GenericSwitch node with multiple buttons.""" state_button_1 = hass.states.get("event.mock_generic_switch_button_1") @@ -78,8 +109,8 @@ async def test_generic_switch_multi_node( assert state_button_1.name == "Mock Generic Switch Button (1)" # check event_types from featuremap 14 assert state_button_1.attributes[ATTR_EVENT_TYPES] == [ - "multi_press_1", - "multi_press_2", + "initial_press", + "short_release", "long_press", "long_release", ] @@ -89,23 +120,3 @@ async def test_generic_switch_multi_node( assert state_button_1.state == "unknown" # name should be 'DeviceName Fancy Button' due to the label set to 'Fancy Button' assert state_button_1.name == "Mock Generic Switch Fancy Button" - - # trigger firing a multi press event - await trigger_subscription_callback( - hass, - matter_client, - EventType.NODE_EVENT, - MatterNodeEvent( - node_id=matter_node.node_id, - endpoint_id=1, - cluster_id=59, - event_id=6, - event_number=0, - priority=1, - timestamp=0, - timestamp_type=0, - data={"totalNumberOfPressesCounted": 2}, - ), - ) - state = hass.states.get("event.mock_generic_switch_button_1") - assert state.attributes[ATTR_EVENT_TYPE] == "multi_press_2" diff --git a/tests/components/matter/test_fan.py b/tests/components/matter/test_fan.py index 6ed95b0ecc2..30bd7f4a009 100644 --- a/tests/components/matter/test_fan.py +++ b/tests/components/matter/test_fan.py @@ -4,7 +4,6 @@ from unittest.mock import MagicMock, call from matter_server.client.models.node import MatterNode import pytest -from syrupy import SnapshotAssertion from homeassistant.components.fan import ( ATTR_DIRECTION, @@ -18,40 +17,35 @@ from homeassistant.components.fan import ( SERVICE_SET_DIRECTION, FanEntityFeature, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - snapshot_matter_entities, + setup_integration_with_node_fixture, trigger_subscription_callback, ) -@pytest.mark.usefixtures("matter_devices") -async def test_fans( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test fans.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.FAN) +@pytest.fixture(name="air_purifier") +async def air_purifier_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a Air Purifier node (containing Fan cluster).""" + return await setup_integration_with_node_fixture( + hass, "air-purifier", matter_client + ) -@pytest.mark.parametrize("node_fixture", ["air_purifier"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_fan_base( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + air_purifier: MatterNode, ) -> None: """Test Fan platform.""" - entity_id = "fan.air_purifier" + entity_id = "fan.air_purifier_fan" state = hass.states.get(entity_id) assert state assert state.attributes["preset_modes"] == [ @@ -75,51 +69,38 @@ async def test_fan_base( ) assert state.attributes["supported_features"] & mask == mask # handle fan mode update - set_node_attribute(matter_node, 1, 514, 0, 1) + set_node_attribute(air_purifier, 1, 514, 0, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["preset_mode"] == "low" # handle direction update - set_node_attribute(matter_node, 1, 514, 11, 1) + set_node_attribute(air_purifier, 1, 514, 11, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["direction"] == "reverse" # handle rock/oscillation update - set_node_attribute(matter_node, 1, 514, 8, 1) + set_node_attribute(air_purifier, 1, 514, 8, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["oscillating"] is True # handle wind mode active translates to correct preset - set_node_attribute(matter_node, 1, 514, 10, 2) + set_node_attribute(air_purifier, 1, 514, 10, 2) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["preset_mode"] == "natural_wind" - set_node_attribute(matter_node, 1, 514, 10, 1) + set_node_attribute(air_purifier, 1, 514, 10, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["preset_mode"] == "sleep_wind" - # set mains power to OFF (OnOff cluster) - set_node_attribute(matter_node, 1, 6, 0, False) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state.attributes["preset_mode"] is None - assert state.attributes["percentage"] == 0 - # test featuremap update - set_node_attribute(matter_node, 1, 514, 65532, 1) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state.attributes["supported_features"] & FanEntityFeature.SET_SPEED -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_fan_turn_on_with_percentage( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + air_purifier: MatterNode, ) -> None: """Test turning on the fan with a specific percentage.""" - entity_id = "fan.air_purifier" + entity_id = "fan.air_purifier_fan" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, @@ -128,36 +109,19 @@ async def test_fan_turn_on_with_percentage( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, + node_id=air_purifier.node_id, attribute_path="1/514/2", value=50, ) - # test again where preset_mode is omitted in the service call - # which should select the last active percentage - matter_client.write_attribute.reset_mock() - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - assert matter_client.write_attribute.call_count == 1 - assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, - attribute_path="1/514/2", - value=255, - ) -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -@pytest.mark.parametrize("node_fixture", ["fan"]) async def test_fan_turn_on_with_preset_mode( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + air_purifier: MatterNode, ) -> None: """Test turning on the fan with a specific preset mode.""" - entity_id = "fan.mocked_fan_switch" + entity_id = "fan.air_purifier_fan" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, @@ -166,7 +130,7 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, + node_id=air_purifier.node_id, attribute_path="1/514/0", value=2, ) @@ -181,13 +145,28 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, + node_id=air_purifier.node_id, attribute_path="1/514/10", value=value, ) + # test again where preset_mode is omitted in the service call + # which should select a default preset mode + matter_client.write_attribute.reset_mock() + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert matter_client.write_attribute.call_count == 1 + assert matter_client.write_attribute.call_args == call( + node_id=air_purifier.node_id, + attribute_path="1/514/0", + value=5, + ) # test again if wind mode is explicitly turned off when we set a new preset mode matter_client.write_attribute.reset_mock() - set_node_attribute(matter_node, 1, 514, 10, 2) + set_node_attribute(air_purifier, 1, 514, 10, 2) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( FAN_DOMAIN, @@ -197,43 +176,24 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 2 assert matter_client.write_attribute.call_args_list[0] == call( - node_id=matter_node.node_id, + node_id=air_purifier.node_id, attribute_path="1/514/10", value=0, ) assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, + node_id=air_purifier.node_id, attribute_path="1/514/0", value=2, ) - # test again where preset_mode is omitted in the service call - # which should select the last active preset - matter_client.write_attribute.reset_mock() - set_node_attribute(matter_node, 1, 514, 0, 1) - set_node_attribute(matter_node, 1, 514, 10, 0) - await trigger_subscription_callback(hass, matter_client) - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - assert matter_client.write_attribute.call_count == 1 - assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, - attribute_path="1/514/0", - value=1, - ) -@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_fan_turn_off( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + air_purifier: MatterNode, ) -> None: """Test turning off the fan.""" - entity_id = "fan.air_purifier" + entity_id = "fan.air_purifier_fan" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_OFF, @@ -242,13 +202,13 @@ async def test_fan_turn_off( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, + node_id=air_purifier.node_id, attribute_path="1/514/0", value=0, ) matter_client.write_attribute.reset_mock() # test again if wind mode is turned off - set_node_attribute(matter_node, 1, 514, 10, 2) + set_node_attribute(air_purifier, 1, 514, 10, 2) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( FAN_DOMAIN, @@ -258,25 +218,24 @@ async def test_fan_turn_off( ) assert matter_client.write_attribute.call_count == 2 assert matter_client.write_attribute.call_args_list[0] == call( - node_id=matter_node.node_id, + node_id=air_purifier.node_id, attribute_path="1/514/10", value=0, ) assert matter_client.write_attribute.call_args_list[1] == call( - node_id=matter_node.node_id, + node_id=air_purifier.node_id, attribute_path="1/514/0", value=0, ) -@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_fan_oscillate( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + air_purifier: MatterNode, ) -> None: """Test oscillating the fan.""" - entity_id = "fan.air_purifier" + entity_id = "fan.air_purifier_fan" for oscillating, value in ((True, 1), (False, 0)): await hass.services.async_call( FAN_DOMAIN, @@ -286,21 +245,20 @@ async def test_fan_oscillate( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, + node_id=air_purifier.node_id, attribute_path="1/514/8", value=value, ) matter_client.write_attribute.reset_mock() -@pytest.mark.parametrize("node_fixture", ["air_purifier"]) async def test_fan_set_direction( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + air_purifier: MatterNode, ) -> None: """Test oscillating the fan.""" - entity_id = "fan.air_purifier" + entity_id = "fan.air_purifier_fan" for direction, value in ((DIRECTION_FORWARD, 0), (DIRECTION_REVERSE, 1)): await hass.services.async_call( FAN_DOMAIN, @@ -310,134 +268,8 @@ async def test_fan_set_direction( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=matter_node.node_id, + node_id=air_purifier.node_id, attribute_path="1/514/11", value=value, ) matter_client.write_attribute.reset_mock() - - -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -@pytest.mark.parametrize( - ("node_fixture", "entity_id", "attributes", "features"), - [ - ( - "fan", - "fan.mocked_fan_switch", - { - "1/514/65532": 0, - }, - (FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF), - ), - ( - "fan", - "fan.mocked_fan_switch", - { - "1/514/65532": 1, - }, - ( - FanEntityFeature.TURN_ON - | FanEntityFeature.TURN_OFF - | FanEntityFeature.SET_SPEED - ), - ), - ( - "fan", - "fan.mocked_fan_switch", - { - "1/514/65532": 4, - }, - ( - FanEntityFeature.TURN_ON - | FanEntityFeature.TURN_OFF - | FanEntityFeature.OSCILLATE - ), - ), - ( - "fan", - "fan.mocked_fan_switch", - { - "1/514/65532": 36, - }, - ( - FanEntityFeature.TURN_ON - | FanEntityFeature.TURN_OFF - | FanEntityFeature.OSCILLATE - | FanEntityFeature.DIRECTION - ), - ), - ], -) -async def test_fan_supported_features( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, - entity_id: str, - features: int, -) -> None: - """Test if the correct features get discovered from featuremap.""" - state = hass.states.get(entity_id) - assert state - assert state.attributes["supported_features"] & features == features - - -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -@pytest.mark.parametrize( - ("node_fixture", "entity_id", "attributes", "preset_modes"), - [ - ( - "fan", - "fan.mocked_fan_switch", - {"1/514/1": 0, "1/514/65532": 0}, - [ - "low", - "medium", - "high", - ], - ), - ( - "fan", - "fan.mocked_fan_switch", - {"1/514/1": 1, "1/514/65532": 0}, - [ - "low", - "high", - ], - ), - ( - "fan", - "fan.mocked_fan_switch", - {"1/514/1": 2, "1/514/65532": 0}, - ["low", "medium", "high", "auto"], - ), - ( - "fan", - "fan.mocked_fan_switch", - {"1/514/1": 4, "1/514/65532": 0}, - ["high", "auto"], - ), - ( - "fan", - "fan.mocked_fan_switch", - {"1/514/1": 5, "1/514/65532": 0}, - ["high"], - ), - ( - "fan", - "fan.mocked_fan_switch", - {"1/514/1": 5, "1/514/65532": 8, "1/514/9": 3}, - ["high", "natural_wind", "sleep_wind"], - ), - ], -) -async def test_fan_features( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, - entity_id: str, - preset_modes: list[str], -) -> None: - """Test if the correct presets get discovered from fanmodesequence.""" - state = hass.states.get(entity_id) - assert state - assert state.attributes["preset_modes"] == preset_modes diff --git a/tests/components/matter/test_helpers.py b/tests/components/matter/test_helpers.py index 2f89f3703ef..a4b5e165a93 100644 --- a/tests/components/matter/test_helpers.py +++ b/tests/components/matter/test_helpers.py @@ -4,7 +4,6 @@ from __future__ import annotations from unittest.mock import MagicMock -from matter_server.client.models.node import MatterNode import pytest from homeassistant.components.matter.const import DOMAIN @@ -20,18 +19,23 @@ from .common import setup_integration_with_node_fixture from tests.common import MockConfigEntry -@pytest.mark.parametrize("node_fixture", ["device_diagnostics"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_get_device_id( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, ) -> None: """Test get_device_id.""" - device_id = get_device_id(matter_client.server_info, matter_node.endpoints[0]) + node = await setup_integration_with_node_fixture( + hass, "device_diagnostics", matter_client + ) + device_id = get_device_id(matter_client.server_info, node.endpoints[0]) assert device_id == "00000000000004D2-0000000000000005-MatterNodeDevice" +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_get_node_from_device_entry( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/matter/test_init.py b/tests/components/matter/test_init.py index f6576689413..c28385efca3 100644 --- a/tests/components/matter/test_init.py +++ b/tests/components/matter/test_init.py @@ -3,18 +3,19 @@ from __future__ import annotations import asyncio -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, call, patch -from aiohasupervisor import SupervisorError from matter_server.client.exceptions import ( CannotConnect, - NotConnected, ServerVersionTooNew, ServerVersionTooOld, ) +from matter_server.client.models.node import MatterNode from matter_server.common.errors import MatterError +from matter_server.common.helpers.util import dataclass_from_dict +from matter_server.common.models import MatterNodeData import pytest +from typing_extensions import Generator from homeassistant.components.hassio import HassioAPIError from homeassistant.components.matter.const import DOMAIN @@ -28,7 +29,7 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from .common import create_node_from_fixture, setup_integration_with_node_fixture +from .common import load_and_parse_node_fixture, setup_integration_with_node_fixture from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator @@ -55,7 +56,13 @@ async def test_entry_setup_unload( matter_client: MagicMock, ) -> None: """Test the integration set up and unload.""" - node = create_node_from_fixture("onoff_light") + node_data = load_and_parse_node_fixture("onoff-light") + node = MatterNode( + dataclass_from_dict( + MatterNodeData, + node_data, + ) + ) matter_client.get_nodes.return_value = [node] matter_client.get_node.return_value = node entry = MockConfigEntry(domain="matter", data={"url": "ws://localhost:5580/ws"}) @@ -65,9 +72,8 @@ async def test_entry_setup_unload( await hass.async_block_till_done() assert matter_client.connect.call_count == 1 - assert matter_client.set_default_fabric_label.call_count == 1 assert entry.state is ConfigEntryState.LOADED - entity_state = hass.states.get("light.mock_onoff_light") + entity_state = hass.states.get("light.mock_onoff_light_light") assert entity_state assert entity_state.state != STATE_UNAVAILABLE @@ -75,11 +81,13 @@ async def test_entry_setup_unload( assert matter_client.disconnect.call_count == 1 assert entry.state is ConfigEntryState.NOT_LOADED - entity_state = hass.states.get("light.mock_onoff_light") + entity_state = hass.states.get("light.mock_onoff_light_light") assert entity_state assert entity_state.state == STATE_UNAVAILABLE +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_home_assistant_stop( hass: HomeAssistant, matter_client: MagicMock, @@ -108,26 +116,6 @@ async def test_connect_failed( assert entry.state is ConfigEntryState.SETUP_RETRY -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_set_default_fabric_label_failed( - hass: HomeAssistant, - matter_client: MagicMock, -) -> None: - """Test failure during client connection.""" - entry = MockConfigEntry(domain=DOMAIN, data={"url": "ws://localhost:5580/ws"}) - entry.add_to_hass(hass) - - matter_client.set_default_fabric_label.side_effect = NotConnected() - - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert matter_client.connect.call_count == 1 - assert matter_client.set_default_fabric_label.call_count == 1 - - assert entry.state is ConfigEntryState.SETUP_RETRY - - async def test_connect_timeout( hass: HomeAssistant, matter_client: MagicMock, @@ -246,10 +234,10 @@ async def test_raise_addon_task_in_progress( install_addon_original_side_effect = install_addon.side_effect - async def install_addon_side_effect(slug: str) -> None: + async def install_addon_side_effect(hass: HomeAssistant, slug: str) -> None: """Mock install add-on.""" await install_event.wait() - await install_addon_original_side_effect(slug) + await install_addon_original_side_effect(hass, slug) install_addon.side_effect = install_addon_side_effect @@ -310,7 +298,7 @@ async def test_start_addon( assert addon_info.call_count == 1 assert install_addon.call_count == 0 assert start_addon.call_count == 1 - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") async def test_install_addon( @@ -337,9 +325,9 @@ async def test_install_addon( assert entry.state is ConfigEntryState.SETUP_RETRY assert addon_store_info.call_count == 3 assert install_addon.call_count == 1 - assert install_addon.call_args == call("core_matter_server") + assert install_addon.call_args == call(hass, "core_matter_server") assert start_addon.call_count == 1 - assert start_addon.call_args == call("core_matter_server") + assert start_addon.call_args == call(hass, "core_matter_server") async def test_addon_info_failure( @@ -350,7 +338,7 @@ async def test_addon_info_failure( start_addon: AsyncMock, ) -> None: """Test failure to get add-on info for Matter add-on during entry setup.""" - addon_info.side_effect = SupervisorError("Boom") + addon_info.side_effect = HassioAPIError("Boom") entry = MockConfigEntry( domain=DOMAIN, title="Matter", @@ -389,7 +377,7 @@ async def test_addon_info_failure( True, 1, 1, - SupervisorError("Boom"), + HassioAPIError("Boom"), None, ServerVersionTooOld("Invalid version"), ), @@ -423,8 +411,8 @@ async def test_update_addon( connect_side_effect: Exception, ) -> None: """Test update the Matter add-on during entry setup.""" - addon_info.return_value.version = addon_version - addon_info.return_value.update_available = update_available + addon_info.return_value["version"] = addon_version + addon_info.return_value["update_available"] = update_available create_backup.side_effect = create_backup_side_effect update_addon.side_effect = update_addon_side_effect matter_client.connect.side_effect = connect_side_effect @@ -446,6 +434,8 @@ async def test_update_addon( assert update_addon.call_count == update_calls +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( ( "connect_side_effect", @@ -502,7 +492,7 @@ async def test_issue_registry_invalid_version( ("stop_addon_side_effect", "entry_state"), [ (None, ConfigEntryState.NOT_LOADED), - (SupervisorError("Boom"), ConfigEntryState.LOADED), + (HassioAPIError("Boom"), ConfigEntryState.LOADED), ], ) async def test_stop_addon( @@ -541,7 +531,7 @@ async def test_stop_addon( assert entry.state == entry_state assert stop_addon.call_count == 1 - assert stop_addon.call_args == call("core_matter_server") + assert stop_addon.call_args == call(hass, "core_matter_server") async def test_remove_entry( @@ -580,7 +570,7 @@ async def test_remove_entry( await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call("core_matter_server") + assert stop_addon.call_args == call(hass, "core_matter_server") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -588,7 +578,7 @@ async def test_remove_entry( partial=True, ) assert uninstall_addon.call_count == 1 - assert uninstall_addon.call_args == call("core_matter_server") + assert uninstall_addon.call_args == call(hass, "core_matter_server") assert entry.state is ConfigEntryState.NOT_LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 0 stop_addon.reset_mock() @@ -598,12 +588,12 @@ async def test_remove_entry( # test add-on stop failure entry.add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - stop_addon.side_effect = SupervisorError() + stop_addon.side_effect = HassioAPIError() await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call("core_matter_server") + assert stop_addon.call_args == call(hass, "core_matter_server") assert create_backup.call_count == 0 assert uninstall_addon.call_count == 0 assert entry.state is ConfigEntryState.NOT_LOADED @@ -622,7 +612,7 @@ async def test_remove_entry( await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call("core_matter_server") + assert stop_addon.call_args == call(hass, "core_matter_server") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -641,12 +631,12 @@ async def test_remove_entry( # test add-on uninstall failure entry.add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - uninstall_addon.side_effect = SupervisorError() + uninstall_addon.side_effect = HassioAPIError() await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call("core_matter_server") + assert stop_addon.call_args == call(hass, "core_matter_server") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -654,12 +644,14 @@ async def test_remove_entry( partial=True, ) assert uninstall_addon.call_count == 1 - assert uninstall_addon.call_args == call("core_matter_server") + assert uninstall_addon.call_args == call(hass, "core_matter_server") assert entry.state is ConfigEntryState.NOT_LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 0 assert "Failed to uninstall the Matter Server add-on" in caplog.text +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_remove_config_entry_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -676,7 +668,7 @@ async def test_remove_config_entry_device( device_entry = dr.async_entries_for_config_entry( device_registry, config_entry.entry_id )[0] - entity_id = "light.m5stamp_lighting_app" + entity_id = "light.m5stamp_lighting_app_light" assert device_entry assert entity_registry.async_get(entity_id) @@ -692,6 +684,8 @@ async def test_remove_config_entry_device( assert not hass.states.get(entity_id) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_remove_config_entry_device_no_node( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/matter/test_light.py b/tests/components/matter/test_light.py index c49b47c9106..4fd73b6457b 100644 --- a/tests/components/matter/test_light.py +++ b/tests/components/matter/test_light.py @@ -3,61 +3,55 @@ from unittest.mock import MagicMock, call from chip.clusters import Objects as clusters -from matter_server.client.models.node import MatterNode import pytest -from syrupy import SnapshotAssertion from homeassistant.components.light import ColorMode -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - snapshot_matter_entities, + setup_integration_with_node_fixture, trigger_subscription_callback, ) -@pytest.mark.usefixtures("matter_devices") -async def test_lights( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test lights.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.LIGHT) - - +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id", "supported_color_modes"), + ("fixture", "entity_id", "supported_color_modes"), [ ( - "extended_color_light", - "light.mock_extended_color_light", + "extended-color-light", + "light.mock_extended_color_light_light", ["color_temp", "hs", "xy"], ), ( - "color_temperature_light", - "light.mock_color_temperature_light", + "color-temperature-light", + "light.mock_color_temperature_light_light", ["color_temp"], ), - ("dimmable_light", "light.mock_dimmable_light", ["brightness"]), - ("onoff_light", "light.mock_onoff_light", ["onoff"]), - ("onoff_light_with_levelcontrol_present", "light.d215s", ["onoff"]), + ("dimmable-light", "light.mock_dimmable_light_light", ["brightness"]), + ("onoff-light", "light.mock_onoff_light_light", ["onoff"]), + ("onoff-light-with-levelcontrol-present", "light.d215s_light", ["onoff"]), ], ) async def test_light_turn_on_off( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, supported_color_modes: list[str], ) -> None: """Test basic light discovery and turn on/off.""" + light_node = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + # Test that the light is off - set_node_attribute(matter_node, 1, 6, 0, False) + set_node_attribute(light_node, 1, 6, 0, False) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -70,7 +64,7 @@ async def test_light_turn_on_off( assert state.attributes["supported_color_modes"] == supported_color_modes # Test that the light is on - set_node_attribute(matter_node, 1, 6, 0, True) + set_node_attribute(light_node, 1, 6, 0, True) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -89,7 +83,7 @@ async def test_light_turn_on_off( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.Off(), ) @@ -107,32 +101,40 @@ async def test_light_turn_on_off( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ) matter_client.send_device_command.reset_mock() +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("extended_color_light", "light.mock_extended_color_light"), - ("color_temperature_light", "light.mock_color_temperature_light"), - ("dimmable_light", "light.mock_dimmable_light"), - ("dimmable_plugin_unit", "light.dimmable_plugin_unit"), + ("extended-color-light", "light.mock_extended_color_light_light"), + ("color-temperature-light", "light.mock_color_temperature_light_light"), + ("dimmable-light", "light.mock_dimmable_light_light"), + ("dimmable-plugin-unit", "light.dimmable_plugin_unit_light"), ], ) async def test_dimmable_light( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test a dimmable light.""" + light_node = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + # Test that the light brightness is 50 (out of 254) - set_node_attribute(matter_node, 1, 8, 0, 50) + set_node_attribute(light_node, 1, 8, 0, 50) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -153,11 +155,11 @@ async def test_dimmable_light( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.LevelControl.Commands.MoveToLevelWithOnOff( level=128, - transitionTime=0, + transitionTime=2, ), ) matter_client.send_device_command.reset_mock() @@ -172,7 +174,7 @@ async def test_dimmable_light( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.LevelControl.Commands.MoveToLevelWithOnOff( level=128, @@ -182,23 +184,32 @@ async def test_dimmable_light( matter_client.send_device_command.reset_mock() +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("extended_color_light", "light.mock_extended_color_light"), - ("color_temperature_light", "light.mock_color_temperature_light"), + ("extended-color-light", "light.mock_extended_color_light_light"), + ("color-temperature-light", "light.mock_color_temperature_light_light"), ], ) async def test_color_temperature_light( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test a color temperature light.""" + + light_node = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + # Test that the light color temperature is 3000 (out of 50000) - set_node_attribute(matter_node, 1, 768, 8, 2) - set_node_attribute(matter_node, 1, 768, 7, 3000) + set_node_attribute(light_node, 1, 768, 8, 2) + set_node_attribute(light_node, 1, 768, 7, 3000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -222,17 +233,17 @@ async def test_color_temperature_light( matter_client.send_device_command.assert_has_calls( [ call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.ColorControl.Commands.MoveToColorTemperature( colorTemperatureMireds=300, - transitionTime=0, + transitionTime=2, optionsMask=1, optionsOverride=1, ), ), call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), @@ -252,7 +263,7 @@ async def test_color_temperature_light( matter_client.send_device_command.assert_has_calls( [ call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.ColorControl.Commands.MoveToColorTemperature( colorTemperatureMireds=300, @@ -262,7 +273,7 @@ async def test_color_temperature_light( ), ), call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), @@ -271,24 +282,32 @@ async def test_color_temperature_light( matter_client.send_device_command.reset_mock() +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) @pytest.mark.parametrize( - ("node_fixture", "entity_id"), + ("fixture", "entity_id"), [ - ("extended_color_light", "light.mock_extended_color_light"), + ("extended-color-light", "light.mock_extended_color_light_light"), ], ) async def test_extended_color_light( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + fixture: str, entity_id: str, ) -> None: """Test an extended color light.""" + light_node = await setup_integration_with_node_fixture( + hass, + fixture, + matter_client, + ) + # Test that the XY color changes - set_node_attribute(matter_node, 1, 768, 8, 1) - set_node_attribute(matter_node, 1, 768, 3, 50) - set_node_attribute(matter_node, 1, 768, 4, 100) + set_node_attribute(light_node, 1, 768, 8, 1) + set_node_attribute(light_node, 1, 768, 3, 50) + set_node_attribute(light_node, 1, 768, 4, 100) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -298,9 +317,9 @@ async def test_extended_color_light( assert state.attributes["xy_color"] == (0.0007630, 0.001526) # Test that the HS color changes - set_node_attribute(matter_node, 1, 768, 8, 0) - set_node_attribute(matter_node, 1, 768, 1, 50) - set_node_attribute(matter_node, 1, 768, 0, 100) + set_node_attribute(light_node, 1, 768, 8, 0) + set_node_attribute(light_node, 1, 768, 1, 50) + set_node_attribute(light_node, 1, 768, 0, 100) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -324,18 +343,18 @@ async def test_extended_color_light( matter_client.send_device_command.assert_has_calls( [ call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.ColorControl.Commands.MoveToColor( colorX=0.5 * 65536, colorY=0.5 * 65536, - transitionTime=0, + transitionTime=2, optionsMask=1, optionsOverride=1, ), ), call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), @@ -355,7 +374,7 @@ async def test_extended_color_light( matter_client.send_device_command.assert_has_calls( [ call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.ColorControl.Commands.MoveToColor( colorX=0.5 * 65536, @@ -366,7 +385,7 @@ async def test_extended_color_light( ), ), call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), @@ -394,13 +413,13 @@ async def test_extended_color_light( command=clusters.ColorControl.Commands.MoveToHueAndSaturation( hue=167, saturation=254, - transitionTime=0, + transitionTime=2, optionsMask=1, optionsOverride=1, ), ), call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), @@ -435,7 +454,7 @@ async def test_extended_color_light( ), ), call( - node_id=matter_node.node_id, + node_id=light_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ), diff --git a/tests/components/matter/test_number.py b/tests/components/matter/test_number.py index 86e1fbbf419..917f8138c7a 100644 --- a/tests/components/matter/test_number.py +++ b/tests/components/matter/test_number.py @@ -1,39 +1,35 @@ """Test Matter number entities.""" -from unittest.mock import MagicMock, call +from unittest.mock import MagicMock from matter_server.client.models.node import MatterNode -from matter_server.common import custom_clusters -from matter_server.common.helpers.util import create_attribute_path_from_attribute import pytest -from syrupy import SnapshotAssertion -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - snapshot_matter_entities, + setup_integration_with_node_fixture, trigger_subscription_callback, ) -@pytest.mark.usefixtures("matter_devices") -async def test_numbers( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test numbers.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.NUMBER) +@pytest.fixture(name="light_node") +async def dimmable_light_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a flow sensor node.""" + return await setup_integration_with_node_fixture( + hass, "dimmable-light", matter_client + ) -@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_level_control_config_entities( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + light_node: MatterNode, ) -> None: """Test number entities are created for the LevelControl cluster (config) attributes.""" state = hass.states.get("number.mock_dimmable_light_on_level") @@ -52,48 +48,9 @@ async def test_level_control_config_entities( assert state assert state.state == "0.0" - set_node_attribute(matter_node, 1, 0x00000008, 0x0011, 20) + set_node_attribute(light_node, 1, 0x00000008, 0x0011, 20) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("number.mock_dimmable_light_on_level") assert state assert state.state == "20" - - -@pytest.mark.parametrize("node_fixture", ["eve_weather_sensor"]) -async def test_eve_weather_sensor_altitude( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test weather sensor created from (Eve) custom cluster.""" - # pressure sensor on Eve custom cluster - state = hass.states.get("number.eve_weather_altitude_above_sea_level") - assert state - assert state.state == "40.0" - - set_node_attribute(matter_node, 1, 319486977, 319422483, 800) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("number.eve_weather_altitude_above_sea_level") - assert state - assert state.state == "800.0" - - # test set value - await hass.services.async_call( - "number", - "set_value", - { - "entity_id": "number.eve_weather_altitude_above_sea_level", - "value": 500, - }, - blocking=True, - ) - assert matter_client.write_attribute.call_count == 1 - assert matter_client.write_attribute.call_args_list[0] == call( - node_id=matter_node.node_id, - attribute_path=create_attribute_path_from_attribute( - endpoint_id=1, - attribute=custom_clusters.EveCluster.Attributes.Altitude, - ), - value=500, - ) diff --git a/tests/components/matter/test_select.py b/tests/components/matter/test_select.py deleted file mode 100644 index ffe996fd840..00000000000 --- a/tests/components/matter/test_select.py +++ /dev/null @@ -1,105 +0,0 @@ -"""Test Matter select entities.""" - -from unittest.mock import MagicMock, call - -from chip.clusters import Objects as clusters -from matter_server.client.models.node import MatterNode -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .common import ( - set_node_attribute, - snapshot_matter_entities, - trigger_subscription_callback, -) - - -@pytest.mark.usefixtures("matter_devices") -async def test_selects( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test selects.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.SELECT) - - -@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) -async def test_mode_select_entities( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test select entities are created for the ModeSelect cluster attributes.""" - state = hass.states.get("select.mock_dimmable_light_led_color") - assert state - assert state.state == "Aqua" - assert state.attributes["options"] == [ - "Red", - "Orange", - "Lemon", - "Lime", - "Green", - "Teal", - "Cyan", - "Aqua", - "Blue", - "Violet", - "Magenta", - "Pink", - "White", - ] - # name should be derived from description attribute - assert state.attributes["friendly_name"] == "Mock Dimmable Light LED Color" - set_node_attribute(matter_node, 6, 80, 3, 1) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("select.mock_dimmable_light_led_color") - assert state.state == "Orange" - # test select option - await hass.services.async_call( - "select", - "select_option", - { - "entity_id": "select.mock_dimmable_light_led_color", - "option": "Lime", - }, - blocking=True, - ) - - assert matter_client.send_device_command.call_count == 1 - assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, - endpoint_id=6, - command=clusters.ModeSelect.Commands.ChangeToMode(newMode=3), - ) - - -@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) -async def test_attribute_select_entities( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test select entities are created for attribute based discovery schema(s).""" - entity_id = "select.mock_dimmable_light_power_on_behavior_on_startup" - state = hass.states.get(entity_id) - assert state - assert state.state == "previous" - assert state.attributes["options"] == ["on", "off", "toggle", "previous"] - assert ( - state.attributes["friendly_name"] - == "Mock Dimmable Light Power-on behavior on startup" - ) - set_node_attribute(matter_node, 1, 6, 16387, 1) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state.state == "on" - # test that an invalid value (e.g. 253) leads to an unknown state - set_node_attribute(matter_node, 1, 6, 16387, 253) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state.state == "unknown" diff --git a/tests/components/matter/test_sensor.py b/tests/components/matter/test_sensor.py index 27eb7da2c71..2c9bfae94ce 100644 --- a/tests/components/matter/test_sensor.py +++ b/tests/components/matter/test_sensor.py @@ -4,41 +4,109 @@ from unittest.mock import MagicMock from matter_server.client.models.node import MatterNode import pytest -from syrupy import SnapshotAssertion -from homeassistant.const import EntityCategory, Platform +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - snapshot_matter_entities, + setup_integration_with_node_fixture, trigger_subscription_callback, ) -@pytest.mark.usefixtures("matter_devices") -async def test_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test sensors.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.SENSOR) +@pytest.fixture(name="flow_sensor_node") +async def flow_sensor_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a flow sensor node.""" + return await setup_integration_with_node_fixture(hass, "flow-sensor", matter_client) -@pytest.mark.parametrize("node_fixture", ["flow_sensor"]) +@pytest.fixture(name="humidity_sensor_node") +async def humidity_sensor_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a humidity sensor node.""" + return await setup_integration_with_node_fixture( + hass, "humidity-sensor", matter_client + ) + + +@pytest.fixture(name="light_sensor_node") +async def light_sensor_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a light sensor node.""" + return await setup_integration_with_node_fixture( + hass, "light-sensor", matter_client + ) + + +@pytest.fixture(name="pressure_sensor_node") +async def pressure_sensor_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a pressure sensor node.""" + return await setup_integration_with_node_fixture( + hass, "pressure-sensor", matter_client + ) + + +@pytest.fixture(name="temperature_sensor_node") +async def temperature_sensor_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a temperature sensor node.""" + return await setup_integration_with_node_fixture( + hass, "temperature-sensor", matter_client + ) + + +@pytest.fixture(name="eve_energy_plug_node") +async def eve_energy_plug_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a Eve Energy Plug node.""" + return await setup_integration_with_node_fixture( + hass, "eve-energy-plug", matter_client + ) + + +@pytest.fixture(name="air_quality_sensor_node") +async def air_quality_sensor_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for an air quality sensor (LightFi AQ1) node.""" + return await setup_integration_with_node_fixture( + hass, "air-quality-sensor", matter_client + ) + + +@pytest.fixture(name="air_purifier_node") +async def air_purifier_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for an air purifier node.""" + return await setup_integration_with_node_fixture( + hass, "air-purifier", matter_client + ) + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_sensor_null_value( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + flow_sensor_node: MatterNode, ) -> None: """Test flow sensor.""" state = hass.states.get("sensor.mock_flow_sensor_flow") assert state assert state.state == "0.0" - set_node_attribute(matter_node, 1, 1028, 0, None) + set_node_attribute(flow_sensor_node, 1, 1028, 0, None) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.mock_flow_sensor_flow") @@ -46,18 +114,19 @@ async def test_sensor_null_value( assert state.state == "unknown" -@pytest.mark.parametrize("node_fixture", ["flow_sensor"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_flow_sensor( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + flow_sensor_node: MatterNode, ) -> None: """Test flow sensor.""" state = hass.states.get("sensor.mock_flow_sensor_flow") assert state assert state.state == "0.0" - set_node_attribute(matter_node, 1, 1028, 0, 20) + set_node_attribute(flow_sensor_node, 1, 1028, 0, 20) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.mock_flow_sensor_flow") @@ -65,18 +134,19 @@ async def test_flow_sensor( assert state.state == "2.0" -@pytest.mark.parametrize("node_fixture", ["humidity_sensor"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_humidity_sensor( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + humidity_sensor_node: MatterNode, ) -> None: """Test humidity sensor.""" state = hass.states.get("sensor.mock_humidity_sensor_humidity") assert state assert state.state == "0.0" - set_node_attribute(matter_node, 1, 1029, 0, 4000) + set_node_attribute(humidity_sensor_node, 1, 1029, 0, 4000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.mock_humidity_sensor_humidity") @@ -84,18 +154,19 @@ async def test_humidity_sensor( assert state.state == "40.0" -@pytest.mark.parametrize("node_fixture", ["light_sensor"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_light_sensor( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + light_sensor_node: MatterNode, ) -> None: """Test light sensor.""" state = hass.states.get("sensor.mock_light_sensor_illuminance") assert state assert state.state == "1.3" - set_node_attribute(matter_node, 1, 1024, 0, 3000) + set_node_attribute(light_sensor_node, 1, 1024, 0, 3000) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.mock_light_sensor_illuminance") @@ -103,18 +174,39 @@ async def test_light_sensor( assert state.state == "2.0" -@pytest.mark.parametrize("node_fixture", ["temperature_sensor"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_pressure_sensor( + hass: HomeAssistant, + matter_client: MagicMock, + pressure_sensor_node: MatterNode, +) -> None: + """Test pressure sensor.""" + state = hass.states.get("sensor.mock_pressure_sensor_pressure") + assert state + assert state.state == "0.0" + + set_node_attribute(pressure_sensor_node, 1, 1027, 0, 1010) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("sensor.mock_pressure_sensor_pressure") + assert state + assert state.state == "101.0" + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_temperature_sensor( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + temperature_sensor_node: MatterNode, ) -> None: """Test temperature sensor.""" state = hass.states.get("sensor.mock_temperature_sensor_temperature") assert state assert state.state == "21.0" - set_node_attribute(matter_node, 1, 1026, 0, 2500) + set_node_attribute(temperature_sensor_node, 1, 1026, 0, 2500) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.mock_temperature_sensor_temperature") @@ -122,12 +214,13 @@ async def test_temperature_sensor( assert state.state == "25.0" -@pytest.mark.parametrize("node_fixture", ["eve_contact_sensor"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_battery_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, matter_client: MagicMock, - matter_node: MatterNode, + eve_contact_sensor_node: MatterNode, ) -> None: """Test battery sensor.""" entity_id = "sensor.eve_door_battery" @@ -135,7 +228,7 @@ async def test_battery_sensor( assert state assert state.state == "100" - set_node_attribute(matter_node, 1, 47, 12, 100) + set_node_attribute(eve_contact_sensor_node, 1, 47, 12, 100) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) @@ -148,95 +241,59 @@ async def test_battery_sensor( assert entry.entity_category == EntityCategory.DIAGNOSTIC -@pytest.mark.parametrize("node_fixture", ["eve_contact_sensor"]) -async def test_battery_sensor_voltage( +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_eve_energy_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, matter_client: MagicMock, - matter_node: MatterNode, + eve_energy_plug_node: MatterNode, ) -> None: - """Test battery voltage sensor.""" - entity_id = "sensor.eve_door_voltage" + """Test Energy sensors created from Eve Energy custom cluster.""" + # power sensor + entity_id = "sensor.eve_energy_plug_power" state = hass.states.get(entity_id) assert state - assert state.state == "3.558" - - set_node_attribute(matter_node, 1, 47, 11, 4234) - await trigger_subscription_callback(hass, matter_client) - - state = hass.states.get(entity_id) - assert state - assert state.state == "4.234" - - entry = entity_registry.async_get(entity_id) - - assert entry - assert entry.entity_category == EntityCategory.DIAGNOSTIC - - -@pytest.mark.parametrize("node_fixture", ["eve_thermo"]) -async def test_eve_thermo_sensor( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test Eve Thermo.""" - # Valve position - state = hass.states.get("sensor.eve_thermo_valve_position") - assert state - assert state.state == "10" - - set_node_attribute(matter_node, 1, 319486977, 319422488, 0) - await trigger_subscription_callback(hass, matter_client) - - state = hass.states.get("sensor.eve_thermo_valve_position") - assert state - assert state.state == "0" - - -@pytest.mark.parametrize("node_fixture", ["pressure_sensor"]) -async def test_pressure_sensor( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test pressure sensor.""" - state = hass.states.get("sensor.mock_pressure_sensor_pressure") - assert state assert state.state == "0.0" + assert state.attributes["unit_of_measurement"] == "W" + assert state.attributes["device_class"] == "power" + assert state.attributes["friendly_name"] == "Eve Energy Plug Power" - set_node_attribute(matter_node, 1, 1027, 0, 1010) - await trigger_subscription_callback(hass, matter_client) - - state = hass.states.get("sensor.mock_pressure_sensor_pressure") + # voltage sensor + entity_id = "sensor.eve_energy_plug_voltage" + state = hass.states.get(entity_id) assert state - assert state.state == "101.0" + assert state.state == "238.800003051758" + assert state.attributes["unit_of_measurement"] == "V" + assert state.attributes["device_class"] == "voltage" + assert state.attributes["friendly_name"] == "Eve Energy Plug Voltage" - -@pytest.mark.parametrize("node_fixture", ["eve_weather_sensor"]) -async def test_eve_weather_sensor_custom_cluster( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test weather sensor created from (Eve) custom cluster.""" - # pressure sensor on Eve custom cluster - state = hass.states.get("sensor.eve_weather_pressure") + # energy sensor + entity_id = "sensor.eve_energy_plug_energy" + state = hass.states.get(entity_id) assert state - assert state.state == "1008.5" + assert state.state == "0.220000028610229" + assert state.attributes["unit_of_measurement"] == "kWh" + assert state.attributes["device_class"] == "energy" + assert state.attributes["friendly_name"] == "Eve Energy Plug Energy" + assert state.attributes["state_class"] == "total_increasing" - set_node_attribute(matter_node, 1, 319486977, 319422484, 800) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("sensor.eve_weather_pressure") + # current sensor + entity_id = "sensor.eve_energy_plug_current" + state = hass.states.get(entity_id) assert state - assert state.state == "800.0" + assert state.state == "0.0" + assert state.attributes["unit_of_measurement"] == "A" + assert state.attributes["device_class"] == "current" + assert state.attributes["friendly_name"] == "Eve Energy Plug Current" -@pytest.mark.parametrize("node_fixture", ["air_quality_sensor"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_air_quality_sensor( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + air_quality_sensor_node: MatterNode, ) -> None: """Test air quality sensor.""" # Carbon Dioxide @@ -244,7 +301,7 @@ async def test_air_quality_sensor( assert state assert state.state == "678.0" - set_node_attribute(matter_node, 1, 1037, 0, 789) + set_node_attribute(air_quality_sensor_node, 1, 1037, 0, 789) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.lightfi_aq1_air_quality_sensor_carbon_dioxide") @@ -256,7 +313,7 @@ async def test_air_quality_sensor( assert state assert state.state == "3.0" - set_node_attribute(matter_node, 1, 1068, 0, 50) + set_node_attribute(air_quality_sensor_node, 1, 1068, 0, 50) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.lightfi_aq1_air_quality_sensor_pm1") @@ -268,7 +325,7 @@ async def test_air_quality_sensor( assert state assert state.state == "3.0" - set_node_attribute(matter_node, 1, 1066, 0, 50) + set_node_attribute(air_quality_sensor_node, 1, 1066, 0, 50) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.lightfi_aq1_air_quality_sensor_pm2_5") @@ -280,7 +337,7 @@ async def test_air_quality_sensor( assert state assert state.state == "3.0" - set_node_attribute(matter_node, 1, 1069, 0, 50) + set_node_attribute(air_quality_sensor_node, 1, 1069, 0, 50) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("sensor.lightfi_aq1_air_quality_sensor_pm10") @@ -288,28 +345,108 @@ async def test_air_quality_sensor( assert state.state == "50.0" -@pytest.mark.parametrize("node_fixture", ["silabs_dishwasher"]) -async def test_operational_state_sensor( +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_air_purifier_sensor( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + air_purifier_node: MatterNode, ) -> None: - """Test dishwasher sensor.""" - # OperationalState Cluster / OperationalState attribute (1/96/4) - state = hass.states.get("sensor.dishwasher_operational_state") + """Test Air quality sensors are creayted for air purifier device.""" + # Carbon Dioxide + state = hass.states.get("sensor.air_purifier_carbon_dioxide") assert state - assert state.state == "stopped" - assert state.attributes["options"] == [ - "stopped", - "running", - "paused", - "error", - "extra_state", + assert state.state == "2.0" + + # PM1 + state = hass.states.get("sensor.air_purifier_pm1") + assert state + assert state.state == "2.0" + + # PM2.5 + state = hass.states.get("sensor.air_purifier_pm2_5") + assert state + assert state.state == "2.0" + + # PM10 + state = hass.states.get("sensor.air_purifier_pm10") + assert state + assert state.state == "2.0" + + # Temperature + state = hass.states.get("sensor.air_purifier_temperature") + assert state + assert state.state == "20.0" + + # Humidity + state = hass.states.get("sensor.air_purifier_humidity") + assert state + assert state.state == "50.0" + + # VOCS + state = hass.states.get("sensor.air_purifier_vocs") + assert state + assert state.state == "2.0" + assert state.attributes["state_class"] == "measurement" + assert state.attributes["unit_of_measurement"] == "ppm" + assert state.attributes["device_class"] == "volatile_organic_compounds_parts" + assert state.attributes["friendly_name"] == "Air Purifier VOCs" + + # Air Quality + state = hass.states.get("sensor.air_purifier_air_quality") + assert state + assert state.state == "good" + expected_options = [ + "extremely_poor", + "very_poor", + "poor", + "fair", + "good", + "moderate", + "unknown", ] + assert set(state.attributes["options"]) == set(expected_options) + assert state.attributes["device_class"] == "enum" + assert state.attributes["friendly_name"] == "Air Purifier Air quality" - set_node_attribute(matter_node, 1, 96, 4, 8) - await trigger_subscription_callback(hass, matter_client) - - state = hass.states.get("sensor.dishwasher_operational_state") + # Carbon MonoOxide + state = hass.states.get("sensor.air_purifier_carbon_monoxide") assert state - assert state.state == "extra_state" + assert state.state == "2.0" + assert state.attributes["state_class"] == "measurement" + assert state.attributes["unit_of_measurement"] == "ppm" + assert state.attributes["device_class"] == "carbon_monoxide" + assert state.attributes["friendly_name"] == "Air Purifier Carbon monoxide" + + # Nitrogen Dioxide + state = hass.states.get("sensor.air_purifier_nitrogen_dioxide") + assert state + assert state.state == "2.0" + assert state.attributes["state_class"] == "measurement" + assert state.attributes["unit_of_measurement"] == "ppm" + assert state.attributes["device_class"] == "nitrogen_dioxide" + assert state.attributes["friendly_name"] == "Air Purifier Nitrogen dioxide" + + # Ozone Concentration + state = hass.states.get("sensor.air_purifier_ozone") + assert state + assert state.state == "2.0" + assert state.attributes["state_class"] == "measurement" + assert state.attributes["unit_of_measurement"] == "ppm" + assert state.attributes["device_class"] == "ozone" + assert state.attributes["friendly_name"] == "Air Purifier Ozone" + + # Hepa Filter Condition + state = hass.states.get("sensor.air_purifier_hepa_filter_condition") + assert state + assert state.state == "100" + assert state.attributes["state_class"] == "measurement" + assert state.attributes["unit_of_measurement"] == "%" + assert state.attributes["friendly_name"] == "Air Purifier Hepa filter condition" + + # Activated Carbon Filter Condition + state = hass.states.get("sensor.air_purifier_activated_carbon_filter_condition") + assert state + assert state.state == "100" + assert state.attributes["state_class"] == "measurement" + assert state.attributes["unit_of_measurement"] == "%" diff --git a/tests/components/matter/test_switch.py b/tests/components/matter/test_switch.py index d7a6a700cde..0327e9ea5fe 100644 --- a/tests/components/matter/test_switch.py +++ b/tests/components/matter/test_switch.py @@ -5,37 +5,43 @@ from unittest.mock import MagicMock, call from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest -from syrupy import SnapshotAssertion -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from .common import ( set_node_attribute, - snapshot_matter_entities, + setup_integration_with_node_fixture, trigger_subscription_callback, ) -@pytest.mark.usefixtures("matter_devices") -async def test_switches( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test switches.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.SWITCH) +@pytest.fixture(name="powerplug_node") +async def powerplug_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a Powerplug node.""" + return await setup_integration_with_node_fixture( + hass, "on-off-plugin-unit", matter_client + ) -@pytest.mark.parametrize("node_fixture", ["on_off_plugin_unit"]) +@pytest.fixture(name="switch_unit") +async def switch_unit_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a Switch Unit node.""" + return await setup_integration_with_node_fixture(hass, "switch-unit", matter_client) + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_turn_on( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + powerplug_node: MatterNode, ) -> None: """Test turning on a switch.""" - state = hass.states.get("switch.mock_onoffpluginunit") + state = hass.states.get("switch.mock_onoffpluginunit_switch") assert state assert state.state == "off" @@ -43,34 +49,35 @@ async def test_turn_on( "switch", "turn_on", { - "entity_id": "switch.mock_onoffpluginunit", + "entity_id": "switch.mock_onoffpluginunit_switch", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=powerplug_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ) - set_node_attribute(matter_node, 1, 6, 0, True) + set_node_attribute(powerplug_node, 1, 6, 0, True) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("switch.mock_onoffpluginunit") + state = hass.states.get("switch.mock_onoffpluginunit_switch") assert state assert state.state == "on" -@pytest.mark.parametrize("node_fixture", ["on_off_plugin_unit"]) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_turn_off( hass: HomeAssistant, matter_client: MagicMock, - matter_node: MatterNode, + powerplug_node: MatterNode, ) -> None: """Test turning off a switch.""" - state = hass.states.get("switch.mock_onoffpluginunit") + state = hass.states.get("switch.mock_onoffpluginunit_switch") assert state assert state.state == "off" @@ -78,34 +85,46 @@ async def test_turn_off( "switch", "turn_off", { - "entity_id": "switch.mock_onoffpluginunit", + "entity_id": "switch.mock_onoffpluginunit_switch", }, blocking=True, ) assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, + node_id=powerplug_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.Off(), ) -@pytest.mark.parametrize("node_fixture", ["switch_unit"]) -async def test_switch_unit(hass: HomeAssistant, matter_node: MatterNode) -> None: +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_switch_unit( + hass: HomeAssistant, + matter_client: MagicMock, + switch_unit: MatterNode, +) -> None: """Test if a switch entity is discovered from any (non-light) OnOf cluster device.""" # A switch entity should be discovered as fallback for ANY Matter device (endpoint) # that has the OnOff cluster and does not fall into an explicit discovery schema # by another platform (e.g. light, lock etc.). - state = hass.states.get("switch.mock_switchunit") + state = hass.states.get("switch.mock_switchunit_switch") assert state assert state.state == "off" - assert state.attributes["friendly_name"] == "Mock SwitchUnit" + assert state.attributes["friendly_name"] == "Mock SwitchUnit Switch" -@pytest.mark.parametrize("node_fixture", ["room_airconditioner"]) -async def test_power_switch(hass: HomeAssistant, matter_node: MatterNode) -> None: +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_power_switch( + hass: HomeAssistant, + matter_client: MagicMock, +) -> None: """Test if a Power switch entity is created for a device that supports that.""" + await setup_integration_with_node_fixture( + hass, "room-airconditioner", matter_client + ) state = hass.states.get("switch.room_airconditioner_power") assert state assert state.state == "off" diff --git a/tests/components/matter/test_update.py b/tests/components/matter/test_update.py deleted file mode 100644 index 92576fa69e2..00000000000 --- a/tests/components/matter/test_update.py +++ /dev/null @@ -1,416 +0,0 @@ -"""Test Matter number entities.""" - -from typing import Any -from unittest.mock import AsyncMock, MagicMock - -from chip.clusters import Objects as clusters -from chip.clusters.ClusterObjects import ClusterAttributeDescriptor -from freezegun.api import FrozenDateTimeFactory -from matter_server.client.models.node import MatterNode -from matter_server.common.errors import UpdateCheckError, UpdateError -from matter_server.common.models import MatterSoftwareVersion, UpdateSource -import pytest - -from homeassistant.components.homeassistant import ( - DOMAIN as HA_DOMAIN, - SERVICE_UPDATE_ENTITY, -) -from homeassistant.components.matter.update import SCAN_INTERVAL -from homeassistant.components.update import ( - ATTR_VERSION, - DOMAIN as UPDATE_DOMAIN, - SERVICE_INSTALL, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant, State -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.restore_state import STORAGE_KEY as RESTORE_STATE_KEY -from homeassistant.setup import async_setup_component - -from .common import ( - set_node_attribute, - setup_integration_with_node_fixture, - trigger_subscription_callback, -) - -from tests.common import ( - async_fire_time_changed, - async_mock_restore_state_shutdown_restart, - mock_restore_cache_with_extra_data, -) - -TEST_SOFTWARE_VERSION = MatterSoftwareVersion( - vid=65521, - pid=32768, - software_version=2, - software_version_string="v2.0", - firmware_information="", - min_applicable_software_version=0, - max_applicable_software_version=1, - release_notes_url="http://home-assistant.io/non-existing-product", - update_source=UpdateSource.LOCAL, -) - - -def set_node_attribute_typed( - node: MatterNode, - endpoint: int, - attribute: ClusterAttributeDescriptor, - value: Any, -) -> None: - """Set a node attribute.""" - set_node_attribute( - node, endpoint, attribute.cluster_id, attribute.attribute_id, value - ) - - -@pytest.fixture(name="check_node_update") -async def check_node_update_fixture(matter_client: MagicMock) -> AsyncMock: - """Fixture to check for node updates.""" - matter_client.check_node_update = AsyncMock(return_value=None) - return matter_client.check_node_update - - -@pytest.fixture(name="update_node") -async def update_node_fixture(matter_client: MagicMock) -> AsyncMock: - """Fixture to install update.""" - matter_client.update_node = AsyncMock(return_value=None) - return matter_client.update_node - - -@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) -async def test_update_entity( - hass: HomeAssistant, - matter_client: MagicMock, - check_node_update: AsyncMock, - matter_node: MatterNode, -) -> None: - """Test update entity exists and update check got made.""" - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_OFF - - assert matter_client.check_node_update.call_count == 1 - - -@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) -async def test_update_check_service( - hass: HomeAssistant, - matter_client: MagicMock, - check_node_update: AsyncMock, - matter_node: MatterNode, -) -> None: - """Test check device update through service call.""" - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_OFF - assert state.attributes.get("installed_version") == "v1.0" - - await async_setup_component(hass, HA_DOMAIN, {}) - - check_node_update.return_value = MatterSoftwareVersion( - vid=65521, - pid=32768, - software_version=2, - software_version_string="v2.0", - firmware_information="", - min_applicable_software_version=0, - max_applicable_software_version=1, - release_notes_url="http://home-assistant.io/non-existing-product", - update_source=UpdateSource.LOCAL, - ) - - await hass.services.async_call( - HA_DOMAIN, - SERVICE_UPDATE_ENTITY, - { - ATTR_ENTITY_ID: "update.mock_dimmable_light", - }, - blocking=True, - ) - - assert matter_client.check_node_update.call_count == 2 - - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_ON - assert state.attributes.get("latest_version") == "v2.0" - assert ( - state.attributes.get("release_url") - == "http://home-assistant.io/non-existing-product" - ) - - -@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) -async def test_update_install( - hass: HomeAssistant, - matter_client: MagicMock, - check_node_update: AsyncMock, - matter_node: MatterNode, - freezer: FrozenDateTimeFactory, -) -> None: - """Test device update with Matter attribute changes influence progress.""" - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_OFF - assert state.attributes.get("installed_version") == "v1.0" - - check_node_update.return_value = MatterSoftwareVersion( - vid=65521, - pid=32768, - software_version=2, - software_version_string="v2.0", - firmware_information="", - min_applicable_software_version=0, - max_applicable_software_version=1, - release_notes_url="http://home-assistant.io/non-existing-product", - update_source=UpdateSource.LOCAL, - ) - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert matter_client.check_node_update.call_count == 2 - - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_ON - assert state.attributes.get("latest_version") == "v2.0" - assert ( - state.attributes.get("release_url") - == "http://home-assistant.io/non-existing-product" - ) - - await hass.services.async_call( - UPDATE_DOMAIN, - SERVICE_INSTALL, - { - ATTR_ENTITY_ID: "update.mock_dimmable_light", - }, - blocking=True, - ) - - set_node_attribute_typed( - matter_node, - 0, - clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateState, - clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kDownloading, - ) - await trigger_subscription_callback(hass, matter_client) - - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_ON - assert state.attributes["in_progress"] is True - assert state.attributes["update_percentage"] is None - - set_node_attribute_typed( - matter_node, - 0, - clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateStateProgress, - 50, - ) - await trigger_subscription_callback(hass, matter_client) - - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_ON - assert state.attributes["in_progress"] is True - assert state.attributes["update_percentage"] == 50 - - set_node_attribute_typed( - matter_node, - 0, - clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateState, - clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kIdle, - ) - set_node_attribute_typed( - matter_node, - 0, - clusters.BasicInformation.Attributes.SoftwareVersion, - 2, - ) - set_node_attribute_typed( - matter_node, - 0, - clusters.BasicInformation.Attributes.SoftwareVersionString, - "v2.0", - ) - await trigger_subscription_callback(hass, matter_client) - - state = hass.states.get("update.mock_dimmable_light") - assert state.state == STATE_OFF - assert state.attributes.get("installed_version") == "v2.0" - - -@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) -async def test_update_install_failure( - hass: HomeAssistant, - matter_client: MagicMock, - check_node_update: AsyncMock, - update_node: AsyncMock, - matter_node: MatterNode, - freezer: FrozenDateTimeFactory, -) -> None: - """Test update entity service call errors.""" - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_OFF - assert state.attributes.get("installed_version") == "v1.0" - - check_node_update.return_value = MatterSoftwareVersion( - vid=65521, - pid=32768, - software_version=2, - software_version_string="v2.0", - firmware_information="", - min_applicable_software_version=0, - max_applicable_software_version=1, - release_notes_url="http://home-assistant.io/non-existing-product", - update_source=UpdateSource.LOCAL, - ) - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert matter_client.check_node_update.call_count == 2 - - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_ON - assert state.attributes.get("latest_version") == "v2.0" - assert ( - state.attributes.get("release_url") - == "http://home-assistant.io/non-existing-product" - ) - - update_node.side_effect = UpdateCheckError("Error finding applicable update") - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - UPDATE_DOMAIN, - SERVICE_INSTALL, - { - ATTR_ENTITY_ID: "update.mock_dimmable_light", - ATTR_VERSION: "v3.0", - }, - blocking=True, - ) - - update_node.side_effect = UpdateError("Error updating node") - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - UPDATE_DOMAIN, - SERVICE_INSTALL, - { - ATTR_ENTITY_ID: "update.mock_dimmable_light", - ATTR_VERSION: "v3.0", - }, - blocking=True, - ) - - -@pytest.mark.parametrize("node_fixture", ["dimmable_light"]) -async def test_update_state_save_and_restore( - hass: HomeAssistant, - hass_storage: dict[str, Any], - matter_client: MagicMock, - check_node_update: AsyncMock, - matter_node: MatterNode, - freezer: FrozenDateTimeFactory, -) -> None: - """Test latest update information is retained across reload/restart.""" - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_OFF - assert state.attributes.get("installed_version") == "v1.0" - - check_node_update.return_value = TEST_SOFTWARE_VERSION - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert matter_client.check_node_update.call_count == 2 - - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_ON - assert state.attributes.get("latest_version") == "v2.0" - await hass.async_block_till_done() - await async_mock_restore_state_shutdown_restart(hass) - - assert len(hass_storage[RESTORE_STATE_KEY]["data"]) == 1 - state = hass_storage[RESTORE_STATE_KEY]["data"][0]["state"] - assert state["entity_id"] == "update.mock_dimmable_light" - extra_data = hass_storage[RESTORE_STATE_KEY]["data"][0]["extra_data"] - - # Check that the extra data has the format we expect. - assert extra_data == { - "software_update": { - "vid": 65521, - "pid": 32768, - "software_version": 2, - "software_version_string": "v2.0", - "firmware_information": "", - "min_applicable_software_version": 0, - "max_applicable_software_version": 1, - "release_notes_url": "http://home-assistant.io/non-existing-product", - "update_source": "local", - } - } - - -async def test_update_state_restore( - hass: HomeAssistant, - matter_client: MagicMock, - check_node_update: AsyncMock, - update_node: AsyncMock, -) -> None: - """Test latest update information extra data is restored.""" - mock_restore_cache_with_extra_data( - hass, - ( - ( - State( - "update.mock_dimmable_light", - STATE_ON, - { - "auto_update": False, - "installed_version": "v1.0", - "in_progress": False, - "latest_version": "v2.0", - }, - ), - {"software_update": TEST_SOFTWARE_VERSION.as_dict()}, - ), - ), - ) - await setup_integration_with_node_fixture(hass, "dimmable_light", matter_client) - - assert check_node_update.call_count == 0 - - state = hass.states.get("update.mock_dimmable_light") - assert state - assert state.state == STATE_ON - assert state.attributes.get("latest_version") == "v2.0" - - await hass.services.async_call( - UPDATE_DOMAIN, - SERVICE_INSTALL, - { - ATTR_ENTITY_ID: "update.mock_dimmable_light", - }, - blocking=True, - ) - - # Validate that the integer software version from the extra data is passed - # to the update_node call. - assert update_node.call_count == 1 - assert ( - update_node.call_args[1]["software_version"] - == TEST_SOFTWARE_VERSION.software_version - ) diff --git a/tests/components/matter/test_vacuum.py b/tests/components/matter/test_vacuum.py deleted file mode 100644 index 86f7542395a..00000000000 --- a/tests/components/matter/test_vacuum.py +++ /dev/null @@ -1,209 +0,0 @@ -"""Test Matter vacuum.""" - -from unittest.mock import MagicMock, call - -from chip.clusters import Objects as clusters -from matter_server.client.models.node import MatterNode -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, HomeAssistantError -from homeassistant.helpers import entity_registry as er - -from .common import ( - set_node_attribute, - snapshot_matter_entities, - trigger_subscription_callback, -) - - -@pytest.mark.usefixtures("matter_devices") -async def test_vacuum( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test that the correct entities get created for a vacuum device.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.VACUUM) - - -@pytest.mark.parametrize("node_fixture", ["vacuum_cleaner"]) -async def test_vacuum_actions( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test vacuum entity actions.""" - entity_id = "vacuum.mock_vacuum" - state = hass.states.get(entity_id) - assert state - - # test return_to_base action - await hass.services.async_call( - "vacuum", - "return_to_base", - { - "entity_id": entity_id, - }, - blocking=True, - ) - - assert matter_client.send_device_command.call_count == 1 - assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, - endpoint_id=1, - command=clusters.RvcOperationalState.Commands.GoHome(), - ) - matter_client.send_device_command.reset_mock() - - # test start/resume action - await hass.services.async_call( - "vacuum", - "start", - { - "entity_id": entity_id, - }, - blocking=True, - ) - - assert matter_client.send_device_command.call_count == 1 - assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, - endpoint_id=1, - command=clusters.RvcOperationalState.Commands.Resume(), - ) - matter_client.send_device_command.reset_mock() - - # test pause action - await hass.services.async_call( - "vacuum", - "pause", - { - "entity_id": entity_id, - }, - blocking=True, - ) - - assert matter_client.send_device_command.call_count == 1 - assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, - endpoint_id=1, - command=clusters.OperationalState.Commands.Pause(), - ) - matter_client.send_device_command.reset_mock() - - # test stop action - # stop command is not supported by the vacuum fixture - with pytest.raises( - HomeAssistantError, - match="Entity vacuum.mock_vacuum does not support this service.", - ): - await hass.services.async_call( - "vacuum", - "stop", - { - "entity_id": entity_id, - }, - blocking=True, - ) - - # update accepted command list to add support for stop command - set_node_attribute( - matter_node, 1, 97, 65529, [clusters.OperationalState.Commands.Stop.command_id] - ) - await trigger_subscription_callback(hass, matter_client) - await hass.services.async_call( - "vacuum", - "stop", - { - "entity_id": entity_id, - }, - blocking=True, - ) - assert matter_client.send_device_command.call_count == 1 - assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, - endpoint_id=1, - command=clusters.OperationalState.Commands.Stop(), - ) - matter_client.send_device_command.reset_mock() - - -@pytest.mark.parametrize("node_fixture", ["vacuum_cleaner"]) -async def test_vacuum_updates( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test vacuum entity updates.""" - entity_id = "vacuum.mock_vacuum" - state = hass.states.get(entity_id) - assert state - # confirm initial state is idle (as stored in the fixture) - assert state.state == "idle" - - # confirm state is 'docked' by setting the operational state to 0x42 - set_node_attribute(matter_node, 1, 97, 4, 0x42) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "docked" - - # confirm state is 'docked' by setting the operational state to 0x41 - set_node_attribute(matter_node, 1, 97, 4, 0x41) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "docked" - - # confirm state is 'returning' by setting the operational state to 0x40 - set_node_attribute(matter_node, 1, 97, 4, 0x40) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "returning" - - # confirm state is 'error' by setting the operational state to 0x01 - set_node_attribute(matter_node, 1, 97, 4, 0x01) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "error" - - # confirm state is 'error' by setting the operational state to 0x02 - set_node_attribute(matter_node, 1, 97, 4, 0x02) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "error" - - # confirm state is 'cleaning' by setting; - # - the operational state to 0x00 - # - the run mode is set to a mode which has cleaning tag - set_node_attribute(matter_node, 1, 97, 4, 0) - set_node_attribute(matter_node, 1, 84, 1, 1) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "cleaning" - - # confirm state is 'idle' by setting; - # - the operational state to 0x00 - # - the run mode is set to a mode which has idle tag - set_node_attribute(matter_node, 1, 97, 4, 0) - set_node_attribute(matter_node, 1, 84, 1, 0) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "idle" - - # confirm state is 'unknown' by setting; - # - the operational state to 0x00 - # - the run mode is set to a mode which has neither cleaning or idle tag - set_node_attribute(matter_node, 1, 97, 4, 0) - set_node_attribute(matter_node, 1, 84, 1, 2) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "unknown" diff --git a/tests/components/matter/test_valve.py b/tests/components/matter/test_valve.py deleted file mode 100644 index 9c4429dda65..00000000000 --- a/tests/components/matter/test_valve.py +++ /dev/null @@ -1,135 +0,0 @@ -"""Test Matter valve.""" - -from unittest.mock import MagicMock, call - -from chip.clusters import Objects as clusters -from matter_server.client.models.node import MatterNode -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .common import ( - set_node_attribute, - snapshot_matter_entities, - trigger_subscription_callback, -) - - -@pytest.mark.usefixtures("matter_devices") -async def test_valves( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test valves.""" - snapshot_matter_entities(hass, entity_registry, snapshot, Platform.VALVE) - - -@pytest.mark.parametrize("node_fixture", ["valve"]) -async def test_valve( - hass: HomeAssistant, - matter_client: MagicMock, - matter_node: MatterNode, -) -> None: - """Test valve entity is created for a Matter ValveConfigurationAndControl Cluster.""" - entity_id = "valve.valve" - state = hass.states.get(entity_id) - assert state - assert state.state == "closed" - assert state.attributes["friendly_name"] == "Valve" - - # test close_valve action - await hass.services.async_call( - "valve", - "close_valve", - { - "entity_id": entity_id, - }, - blocking=True, - ) - - assert matter_client.send_device_command.call_count == 1 - assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, - endpoint_id=1, - command=clusters.ValveConfigurationAndControl.Commands.Close(), - ) - matter_client.send_device_command.reset_mock() - - # test open_valve action - await hass.services.async_call( - "valve", - "open_valve", - { - "entity_id": entity_id, - }, - blocking=True, - ) - - assert matter_client.send_device_command.call_count == 1 - assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, - endpoint_id=1, - command=clusters.ValveConfigurationAndControl.Commands.Open(), - ) - matter_client.send_device_command.reset_mock() - - # set changing state to 'opening' - set_node_attribute(matter_node, 1, 129, 4, 2) - set_node_attribute(matter_node, 1, 129, 5, 1) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "opening" - - # set changing state to 'closing' - set_node_attribute(matter_node, 1, 129, 4, 2) - set_node_attribute(matter_node, 1, 129, 5, 0) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "closing" - - # set changing state to 'open' - set_node_attribute(matter_node, 1, 129, 4, 1) - set_node_attribute(matter_node, 1, 129, 5, 0) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.state == "open" - - # add support for setting position by updating the featuremap - set_node_attribute(matter_node, 1, 129, 65532, 2) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.attributes["current_position"] == 0 - - # update current position - set_node_attribute(matter_node, 1, 129, 6, 50) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get(entity_id) - assert state - assert state.attributes["current_position"] == 50 - - # test set_position action - await hass.services.async_call( - "valve", - "set_valve_position", - { - "entity_id": entity_id, - "position": 100, - }, - blocking=True, - ) - - assert matter_client.send_device_command.call_count == 1 - assert matter_client.send_device_command.call_args == call( - node_id=matter_node.node_id, - endpoint_id=1, - command=clusters.ValveConfigurationAndControl.Commands.Open(targetLevel=100), - ) - matter_client.send_device_command.reset_mock() diff --git a/tests/components/maxcube/test_maxcube_climate.py b/tests/components/maxcube/test_maxcube_climate.py index 8b56ee6a6de..48e616f8fd2 100644 --- a/tests/components/maxcube/test_maxcube_climate.py +++ b/tests/components/maxcube/test_maxcube_climate.py @@ -216,7 +216,7 @@ async def test_thermostat_set_no_temperature( hass: HomeAssistant, cube: MaxCube, thermostat: MaxThermostat ) -> None: """Set hvac mode to heat.""" - with pytest.raises(ServiceValidationError): + with pytest.raises(ValueError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, diff --git a/tests/components/mealie/conftest.py b/tests/components/mealie/conftest.py index 8e724e4d8ea..dd6309cb524 100644 --- a/tests/components/mealie/conftest.py +++ b/tests/components/mealie/conftest.py @@ -1,28 +1,17 @@ """Mealie tests configuration.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import patch -from aiomealie import ( - About, - Mealplan, - MealplanResponse, - Recipe, - ShoppingItemsResponse, - ShoppingListsResponse, - Statistics, - UserInfo, -) +from aiomealie import Mealplan, MealplanResponse from mashumaro.codecs.orjson import ORJSONDecoder import pytest +from typing_extensions import Generator from homeassistant.components.mealie.const import DOMAIN from homeassistant.const import CONF_API_TOKEN, CONF_HOST from tests.common import MockConfigEntry, load_fixture - -SHOPPING_LIST_ID = "list-id-1" -SHOPPING_ITEM_NOTE = "Shopping Item 1" +from tests.components.smhi.common import AsyncMock @pytest.fixture @@ -40,7 +29,7 @@ def mock_mealie_client() -> Generator[AsyncMock]: """Mock a Mealie client.""" with ( patch( - "homeassistant.components.mealie.MealieClient", + "homeassistant.components.mealie.coordinator.MealieClient", autospec=True, ) as mock_client, patch( @@ -55,27 +44,6 @@ def mock_mealie_client() -> Generator[AsyncMock]: client.get_mealplan_today.return_value = ORJSONDecoder(list[Mealplan]).decode( load_fixture("get_mealplan_today.json", DOMAIN) ) - client.get_user_info.return_value = UserInfo.from_json( - load_fixture("users_self.json", DOMAIN) - ) - client.get_about.return_value = About.from_json( - load_fixture("about.json", DOMAIN) - ) - recipe = Recipe.from_json(load_fixture("get_recipe.json", DOMAIN)) - client.get_recipe.return_value = recipe - client.import_recipe.return_value = recipe - client.get_shopping_lists.return_value = ShoppingListsResponse.from_json( - load_fixture("get_shopping_lists.json", DOMAIN) - ) - client.get_shopping_items.return_value = ShoppingItemsResponse.from_json( - load_fixture("get_shopping_items.json", DOMAIN) - ) - client.get_statistics.return_value = Statistics.from_json( - load_fixture("statistics.json", DOMAIN) - ) - mealplan = Mealplan.from_json(load_fixture("mealplan.json", DOMAIN)) - client.random_mealplan.return_value = mealplan - client.set_mealplan.return_value = mealplan yield client @@ -87,5 +55,4 @@ def mock_config_entry() -> MockConfigEntry: title="Mealie", data={CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token"}, entry_id="01J0BC4QM2YBRP6H5G933CETT7", - unique_id="bf1c62fe-4941-4332-9886-e54e88dbdba0", ) diff --git a/tests/components/mealie/fixtures/about.json b/tests/components/mealie/fixtures/about.json deleted file mode 100644 index 86f74ec66d6..00000000000 --- a/tests/components/mealie/fixtures/about.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "version": "v1.10.2" -} diff --git a/tests/components/mealie/fixtures/get_mealplans.json b/tests/components/mealie/fixtures/get_mealplans.json index 9255f9b7396..2d63b753d99 100644 --- a/tests/components/mealie/fixtures/get_mealplans.json +++ b/tests/components/mealie/fixtures/get_mealplans.json @@ -605,17 +605,6 @@ "updateAt": "2024-01-02T06:35:05.209189", "lastMade": "2024-01-02T22:59:59" } - }, - { - "date": "2024-01-21", - "entryType": "dinner", - "title": "Aquavite", - "text": "Dineren met de boys", - "recipeId": null, - "id": 1, - "groupId": "3931df86-0679-4579-8c63-4bedc9ca9a85", - "userId": "6caa6e4d-521f-4ef4-9ed7-388bdd63f47d", - "recipe": null } ], "next": null, diff --git a/tests/components/mealie/fixtures/get_recipe.json b/tests/components/mealie/fixtures/get_recipe.json deleted file mode 100644 index a5ccd1876e5..00000000000 --- a/tests/components/mealie/fixtures/get_recipe.json +++ /dev/null @@ -1,266 +0,0 @@ -{ - "id": "fada9582-709b-46aa-b384-d5952123ad93", - "userId": "bf1c62fe-4941-4332-9886-e54e88dbdba0", - "groupId": "24477569-f6af-4b53-9e3f-6d04b0ca6916", - "name": "Original Sacher-Torte (2)", - "slug": "original-sacher-torte-2", - "image": "SuPW", - "recipeYield": "4 servings", - "totalTime": "2 hours 30 minutes", - "prepTime": "1 hour 30 minutes", - "cookTime": null, - "performTime": "1 hour", - "description": "The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”", - "recipeCategory": [], - "tags": [ - { - "id": "1b5789b9-3af6-412e-8c77-8a01caa0aac9", - "name": "Sacher", - "slug": "sacher" - }, - { - "id": "1cf17f96-58b5-4bd3-b1e8-1606a64b413d", - "name": "Cake", - "slug": "cake" - }, - { - "id": "3f5f0a3d-728f-440d-a6c7-5a68612e8c67", - "name": "Torte", - "slug": "torte" - }, - { - "id": "525f388d-6ee0-4ebe-91fc-dd320a7583f0", - "name": "Sachertorte", - "slug": "sachertorte" - }, - { - "id": "544a6e08-a899-4f63-9c72-bb2924df70cb", - "name": "Sacher Torte Cake", - "slug": "sacher-torte-cake" - }, - { - "id": "576c0a82-84ee-4e50-a14e-aa7a675b6352", - "name": "Sacher Torte", - "slug": "sacher-torte" - }, - { - "id": "d530b8e4-275a-4093-804b-6d0de154c206", - "name": "Original Sachertorte", - "slug": "original-sachertorte" - } - ], - "tools": [], - "rating": null, - "orgURL": "https://www.sacher.com/en/original-sacher-torte/recipe/", - "dateAdded": "2024-06-29", - "dateUpdated": "2024-06-29T06:10:34.412665", - "createdAt": "2024-06-29T06:10:34.414927", - "updateAt": "2024-06-29T06:10:34.414928", - "lastMade": null, - "recipeIngredient": [ - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "130g dark couverture chocolate (min. 55% cocoa content)", - "isFood": true, - "disableAmount": false, - "display": "1 130g dark couverture chocolate (min. 55% cocoa content)", - "title": null, - "originalText": null, - "referenceId": "a3adfe78-d157-44d8-98be-9c133e45bb4e" - }, - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "1 Vanilla Pod", - "isFood": true, - "disableAmount": false, - "display": "1 1 Vanilla Pod", - "title": null, - "originalText": null, - "referenceId": "41d234d7-c040-48f9-91e6-f4636aebb77b" - }, - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "150g softened butter", - "isFood": true, - "disableAmount": false, - "display": "1 150g softened butter", - "title": null, - "originalText": null, - "referenceId": "f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc" - }, - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "100g Icing sugar", - "isFood": true, - "disableAmount": false, - "display": "1 100g Icing sugar", - "title": null, - "originalText": null, - "referenceId": "f7fcd86e-b04b-4e07-b69c-513925811491" - }, - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "6 Eggs", - "isFood": true, - "disableAmount": false, - "display": "1 6 Eggs", - "title": null, - "originalText": null, - "referenceId": "a831fbc3-e2f5-452e-a745-450be8b4a130" - }, - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "100g Castor sugar", - "isFood": true, - "disableAmount": false, - "display": "1 100g Castor sugar", - "title": null, - "originalText": null, - "referenceId": "b5ee4bdc-0047-4de7-968b-f3360bbcb31e" - }, - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "140g Plain wheat flour", - "isFood": true, - "disableAmount": false, - "display": "1 140g Plain wheat flour", - "title": null, - "originalText": null, - "referenceId": "a67db09d-429c-4e77-919d-cfed3da675ad" - }, - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "200g apricot jam", - "isFood": true, - "disableAmount": false, - "display": "1 200g apricot jam", - "title": null, - "originalText": null, - "referenceId": "55479752-c062-4b25-aae3-2b210999d7b9" - }, - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "200g castor sugar", - "isFood": true, - "disableAmount": false, - "display": "1 200g castor sugar", - "title": null, - "originalText": null, - "referenceId": "ff9cd404-24ec-4d38-b0aa-0120ce1df679" - }, - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "150g dark couverture chocolate (min. 55% cocoa content)", - "isFood": true, - "disableAmount": false, - "display": "1 150g dark couverture chocolate (min. 55% cocoa content)", - "title": null, - "originalText": null, - "referenceId": "c7fca92e-971e-4728-a227-8b04783583ed" - }, - { - "quantity": 1.0, - "unit": null, - "food": null, - "note": "Unsweetend whipped cream to garnish", - "isFood": true, - "disableAmount": false, - "display": "1 Unsweetend whipped cream to garnish", - "title": null, - "originalText": null, - "referenceId": "ef023f23-7816-4871-87f6-4d29f9a283f7" - } - ], - "recipeInstructions": [ - { - "id": "2d558dbf-5361-4ef2-9d86-4161f5eb6146", - "title": "", - "text": "Preheat oven to 170°C. Line the base of a springform with baking paper, grease the sides, and dust with a little flour. Melt couverture over boiling water. Let cool slightly.", - "ingredientReferences": [] - }, - { - "id": "dbcc1c37-3cbf-4045-9902-8f7fd1e68f0a", - "title": "", - "text": "Slit vanilla pod lengthwise and scrape out seeds. Using a hand mixer with whisks, beat the softened butter with the icing sugar and vanilla seeds until bubbles appear.", - "ingredientReferences": [] - }, - { - "id": "2265bd14-a691-40b1-9fe6-7b5dfeac8401", - "title": "", - "text": "Separate the eggs. Whisk the egg yolks into the butter mixture one by one. Now gradually add melted couverture chocolate. Beat the egg whites with the castor sugar until stiff, then place on top of the butter and chocolate mixture. Sift the flour over the mixture, then fold in the flour and beaten egg whites.", - "ingredientReferences": [] - }, - { - "id": "0aade447-dfac-4aae-8e67-ac250ad13ae2", - "title": "", - "text": "Transfer the mixture to the springform, smooth the top, and bake in the oven (middle rack) for 10–15 minutes, leaving the oven door a finger's width ajar. Then close the oven and bake for approximately 50 minutes. (The cake is done when it yields slightly to the touch.)", - "ingredientReferences": [] - }, - { - "id": "5fdcb703-7103-468d-a65d-a92460b92eb3", - "title": "", - "text": "Remove the cake from the oven and loosen the sides of the springform. Carefully tip the cake onto a cake rack lined with baking paper and let cool for approximately 20 minutes. Then pull off the baking paper, turn the cake over, and leave on rack to cool completely.", - "ingredientReferences": [] - }, - { - "id": "81474afc-b44e-49b3-bb67-5d7dab8f832a", - "title": "", - "text": "Cut the cake in half horizontally. Warm the jam and stir until smooth. Brush the top of both cake halves with the jam and place one on top of the other. Brush the sides with the jam as well.", - "ingredientReferences": [] - }, - { - "id": "8fac8aee-0d3c-4f78-9ff8-56d20472e5f1", - "title": "", - "text": "To make the glaze, put the castor sugar into a saucepan with 125 ml water and boil over high heat for approximately 5 minutes. Take the sugar syrup off the stove and leave to cool a little. Coarsely chop the couverture, gradually adding it to the syrup, and stir until it forms a thick liquid (see tip below).", - "ingredientReferences": [] - }, - { - "id": "7162e099-d651-4656-902a-a09a9b40c4e1", - "title": "", - "text": "Pour all the lukewarm glaze liquid at once over the top of the cake and quickly spread using a palette knife. Leave the glaze to set for a few hours. Serve garnished with whipped cream.", - "ingredientReferences": [] - } - ], - "nutrition": { - "calories": "400", - "fatContent": "17", - "proteinContent": null, - "carbohydrateContent": null, - "fiberContent": null, - "sodiumContent": null, - "sugarContent": null - }, - "settings": { - "public": true, - "showNutrition": true, - "showAssets": true, - "landscapeView": false, - "disableComments": false, - "disableAmount": false, - "locked": false - }, - "assets": [], - "notes": [], - "extras": {}, - "comments": [] -} diff --git a/tests/components/mealie/fixtures/get_shopping_items.json b/tests/components/mealie/fixtures/get_shopping_items.json deleted file mode 100644 index 1016440816b..00000000000 --- a/tests/components/mealie/fixtures/get_shopping_items.json +++ /dev/null @@ -1,108 +0,0 @@ -{ - "page": 1, - "per_page": 1000, - "total": 3, - "total_pages": 1, - "items": [ - { - "quantity": 2.0, - "unit": null, - "food": null, - "note": "Apples", - "isFood": false, - "disableAmount": true, - "display": "2 Apples", - "shoppingListId": "9ce096fe-ded2-4077-877d-78ba450ab13e", - "checked": false, - "position": 0, - "foodId": null, - "labelId": null, - "unitId": null, - "extras": {}, - "id": "f45430f7-3edf-45a9-a50f-73bb375090be", - "label": null, - "recipeReferences": [], - "createdAt": "2024-06-25T10:45:03.362623", - "updateAt": "2024-06-25T11:57:22.412650" - }, - { - "quantity": 1.0, - "unit": { - "id": "7bf539d4-fc78-48bc-b48e-c35ccccec34a", - "name": "can", - "pluralName": null, - "description": "", - "extras": {}, - "fraction": true, - "abbreviation": "", - "pluralAbbreviation": "", - "useAbbreviation": false, - "aliases": [], - "createdAt": "2024-05-14T14:45:02.464122", - "updateAt": "2024-05-14T14:45:02.464124" - }, - "food": { - "id": "09322430-d24c-4b1a-abb6-22b6ed3a88f5", - "name": "acorn squash", - "pluralName": null, - "description": "", - "extras": {}, - "labelId": null, - "aliases": [], - "label": null, - "createdAt": "2024-05-14T14:45:04.454134", - "updateAt": "2024-05-14T14:45:04.454141" - }, - "note": "", - "isFood": true, - "disableAmount": false, - "display": "1 can acorn squash", - "shoppingListId": "9ce096fe-ded2-4077-877d-78ba450ab13e", - "checked": false, - "position": 1, - "foodId": "09322430-d24c-4b1a-abb6-22b6ed3a88f5", - "labelId": null, - "unitId": "7bf539d4-fc78-48bc-b48e-c35ccccec34a", - "extras": {}, - "id": "84d8fd74-8eb0-402e-84b6-71f251bfb7cc", - "label": null, - "recipeReferences": [], - "createdAt": "2024-06-25T10:45:14.547922", - "updateAt": "2024-06-25T10:45:14.547925" - }, - { - "quantity": 0.0, - "unit": null, - "food": { - "id": "96801494-4e26-4148-849a-8155deb76327", - "name": "aubergine", - "pluralName": null, - "description": "", - "extras": {}, - "labelId": null, - "aliases": [], - "label": null, - "createdAt": "2024-05-14T14:45:03.868792", - "updateAt": "2024-05-14T14:45:03.868794" - }, - "note": "", - "isFood": true, - "disableAmount": false, - "display": "aubergine", - "shoppingListId": "9ce096fe-ded2-4077-877d-78ba450ab13e", - "checked": false, - "position": 2, - "foodId": "96801494-4e26-4148-849a-8155deb76327", - "labelId": null, - "unitId": null, - "extras": {}, - "id": "69913b9a-7c75-4935-abec-297cf7483f88", - "label": null, - "recipeReferences": [], - "createdAt": "2024-06-25T11:56:59.656699", - "updateAt": "2024-06-25T11:56:59.656701" - } - ], - "next": null, - "previous": null -} diff --git a/tests/components/mealie/fixtures/get_shopping_lists.json b/tests/components/mealie/fixtures/get_shopping_lists.json deleted file mode 100644 index 7b7ba0aaa7a..00000000000 --- a/tests/components/mealie/fixtures/get_shopping_lists.json +++ /dev/null @@ -1,838 +0,0 @@ -{ - "page": 1, - "per_page": 50, - "total": 3, - "total_pages": 1, - "items": [ - { - "name": "Supermarket", - "extras": {}, - "createdAt": "2024-06-17T11:01:54.267314", - "updateAt": "2024-06-22T10:22:13.555389", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "userId": "90b03954-00e1-46de-9520-f0305022b84f", - "id": "27edbaab-2ec6-441f-8490-0283ea77585f", - "recipeReferences": [], - "labelSettings": [ - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "0f63545a-606a-47ea-a784-452d45de6158", - "position": 0, - "id": "ad5f48b0-5b26-4c2d-a2aa-79b0beae1e42", - "label": { - "name": "Alcohol", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "0f63545a-606a-47ea-a784-452d45de6158" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "0c2d6111-9837-4319-acb5-490a32979993", - "position": 1, - "id": "c9b8289a-6693-4bec-9841-d7d08c3b240b", - "label": { - "name": "Baked Goods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "0c2d6111-9837-4319-acb5-490a32979993" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "3922802c-8e8c-47d4-9c68-e60b0a1338b6", - "position": 2, - "id": "9be06f8a-6c23-476b-a8cc-334884bcdd40", - "label": { - "name": "Beverages", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "3922802c-8e8c-47d4-9c68-e60b0a1338b6" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "4111bfff-d834-4e8c-88ed-5eff761e06db", - "position": 3, - "id": "47bc36ae-1ee4-40be-ad68-ad8662c26cae", - "label": { - "name": "Canned Goods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "4111bfff-d834-4e8c-88ed-5eff761e06db" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "24fa2836-25e8-44af-b497-ad0d428a7f78", - "position": 4, - "id": "ad41f42c-08c3-49ef-8b96-dc1740ec95b6", - "label": { - "name": "Condiments", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "24fa2836-25e8-44af-b497-ad0d428a7f78" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "20a735de-c75b-4fdb-abaf-b8d71ef192f8", - "position": 5, - "id": "5514842f-8c05-4003-a42d-7a5a70d80148", - "label": { - "name": "Confectionary", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "20a735de-c75b-4fdb-abaf-b8d71ef192f8" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "71178428-70aa-4491-b5b4-b8d93e7b04cf", - "position": 6, - "id": "0465a139-6571-4599-836b-a562afc95536", - "label": { - "name": "Dairy Products", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "71178428-70aa-4491-b5b4-b8d93e7b04cf" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "c58ed864-b5bf-4aac-88a1-007833c706c7", - "position": 7, - "id": "8d85fe1b-ec4d-49d0-aecc-15f9dbc66fd0", - "label": { - "name": "Frozen Foods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "c58ed864-b5bf-4aac-88a1-007833c706c7" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "f398f1a4-ce53-42df-95d4-8a3403bb6a38", - "position": 8, - "id": "b6980720-bd88-4703-a115-50c0b915f607", - "label": { - "name": "Fruits", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "f398f1a4-ce53-42df-95d4-8a3403bb6a38" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "fd936065-3d53-4844-99df-9332f1bf0c8a", - "position": 9, - "id": "5d69d13c-5d7f-45af-9ecc-045ca914f7ca", - "label": { - "name": "Grains", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "fd936065-3d53-4844-99df-9332f1bf0c8a" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf", - "position": 10, - "id": "a5e65ce7-3588-412b-a118-2fe1a2ca0104", - "label": { - "name": "Health Foods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b", - "position": 11, - "id": "9890d86a-98e9-4599-8daf-82d341ef1e8d", - "label": { - "name": "Household", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "cf136576-1929-4fc9-a3da-34c49ff58920", - "position": 12, - "id": "18fc0f39-3e45-412f-afa7-7eb779f7bfdf", - "label": { - "name": "Meat", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "cf136576-1929-4fc9-a3da-34c49ff58920" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa", - "position": 13, - "id": "4cd55de7-7c2e-4078-8c61-87d40b33ebda", - "label": { - "name": "Meat Products", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "2a035661-fd5d-462c-8eb0-6b78af982e0c", - "position": 14, - "id": "21c55b4a-c1b1-44c0-962e-040bbfa5e148", - "label": { - "name": "Other", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "2a035661-fd5d-462c-8eb0-6b78af982e0c" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "af147838-d114-4a92-bd0f-08f05f59bbe5", - "position": 15, - "id": "b295a6be-1437-4415-92bb-4eee21d3195d", - "label": { - "name": "Produce", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "af147838-d114-4a92-bd0f-08f05f59bbe5" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "cf7672b8-036a-45a4-8323-6a167d2731be", - "position": 16, - "id": "d3ae533f-c1a8-4f08-8a0f-a88914b2c84b", - "label": { - "name": "Regular", - "color": "#2E7D32FF", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "cf7672b8-036a-45a4-8323-6a167d2731be" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18", - "position": 17, - "id": "572dbf60-4308-499e-ad7c-d806462ee501", - "label": { - "name": "Seafood", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "1c59a263-227a-4f43-a450-d53ca1485b36", - "position": 18, - "id": "5321b4d8-3aba-4a64-95b2-03ac533dda32", - "label": { - "name": "Snacks", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "1c59a263-227a-4f43-a450-d53ca1485b36" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "189099a9-0033-4783-804a-ec6805e7d557", - "position": 19, - "id": "98aebebf-27fe-4834-b3d3-0e45201a182f", - "label": { - "name": "Spices", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "189099a9-0033-4783-804a-ec6805e7d557" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "c28efdde-5993-4044-b824-f111f3a118ef", - "position": 20, - "id": "3e3aa706-3008-4280-b332-a7d2c31cf683", - "label": { - "name": "Sweets", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "c28efdde-5993-4044-b824-f111f3a118ef" - } - }, - { - "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", - "labelId": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c", - "position": 21, - "id": "48f109ca-c57a-4828-98ab-a2db1e6514c6", - "label": { - "name": "Vegetables", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c" - } - } - ] - }, - { - "name": "Special groceries", - "extras": {}, - "createdAt": "2024-06-07T07:17:05.479808", - "updateAt": "2024-06-12T08:44:58.831239", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "userId": "90b03954-00e1-46de-9520-f0305022b84f", - "id": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "recipeReferences": [], - "labelSettings": [ - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "0f63545a-606a-47ea-a784-452d45de6158", - "position": 0, - "id": "1a5dc45b-e6ae-4db2-bd2f-fa3c07efedeb", - "label": { - "name": "Alcohol", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "0f63545a-606a-47ea-a784-452d45de6158" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "0c2d6111-9837-4319-acb5-490a32979993", - "position": 1, - "id": "d1594c9d-f1b6-4160-a4eb-0686499a40ea", - "label": { - "name": "Baked Goods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "0c2d6111-9837-4319-acb5-490a32979993" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "3922802c-8e8c-47d4-9c68-e60b0a1338b6", - "position": 2, - "id": "077106d0-5c85-493c-ae6b-dea06002c824", - "label": { - "name": "Beverages", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "3922802c-8e8c-47d4-9c68-e60b0a1338b6" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "4111bfff-d834-4e8c-88ed-5eff761e06db", - "position": 3, - "id": "bf66b7e8-3758-4f9e-9e13-c7b9ff564889", - "label": { - "name": "Canned Goods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "4111bfff-d834-4e8c-88ed-5eff761e06db" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "24fa2836-25e8-44af-b497-ad0d428a7f78", - "position": 4, - "id": "bb34f741-10b4-490a-a512-67bbd374427c", - "label": { - "name": "Condiments", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "24fa2836-25e8-44af-b497-ad0d428a7f78" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "20a735de-c75b-4fdb-abaf-b8d71ef192f8", - "position": 5, - "id": "d88b23a5-e397-4cf2-b527-d8982ecf89e0", - "label": { - "name": "Confectionary", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "20a735de-c75b-4fdb-abaf-b8d71ef192f8" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "71178428-70aa-4491-b5b4-b8d93e7b04cf", - "position": 6, - "id": "82d44804-5bef-4cc3-9d1f-0d8e879783c0", - "label": { - "name": "Dairy Products", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "71178428-70aa-4491-b5b4-b8d93e7b04cf" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "c58ed864-b5bf-4aac-88a1-007833c706c7", - "position": 7, - "id": "0ae70dde-7403-408f-a6c6-c19b8c0f6a4d", - "label": { - "name": "Frozen Foods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "c58ed864-b5bf-4aac-88a1-007833c706c7" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "f398f1a4-ce53-42df-95d4-8a3403bb6a38", - "position": 8, - "id": "7667a581-8d63-4785-a013-8e164994dfc4", - "label": { - "name": "Fruits", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "f398f1a4-ce53-42df-95d4-8a3403bb6a38" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "fd936065-3d53-4844-99df-9332f1bf0c8a", - "position": 9, - "id": "749c8cbd-c4e5-4879-bce1-40c3b62ada71", - "label": { - "name": "Grains", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "fd936065-3d53-4844-99df-9332f1bf0c8a" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf", - "position": 10, - "id": "e7979797-7679-47be-b14f-5fdcfe1c987d", - "label": { - "name": "Health Foods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b", - "position": 11, - "id": "1a9b6d19-d8b5-41a0-8e75-548c36fc0b1b", - "label": { - "name": "Household", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "cf136576-1929-4fc9-a3da-34c49ff58920", - "position": 12, - "id": "0df24ff7-1767-46a1-9841-97f816079580", - "label": { - "name": "Meat", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "cf136576-1929-4fc9-a3da-34c49ff58920" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa", - "position": 13, - "id": "761b5985-9f49-450b-a33c-5b85366501da", - "label": { - "name": "Meat Products", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "2a035661-fd5d-462c-8eb0-6b78af982e0c", - "position": 14, - "id": "cd993b6c-2c06-40b3-8fe2-8f9613d29b8e", - "label": { - "name": "Other", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "2a035661-fd5d-462c-8eb0-6b78af982e0c" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "af147838-d114-4a92-bd0f-08f05f59bbe5", - "position": 15, - "id": "9c9f8e0d-a9e8-4503-ad98-ee7039ec6eec", - "label": { - "name": "Produce", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "af147838-d114-4a92-bd0f-08f05f59bbe5" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "cf7672b8-036a-45a4-8323-6a167d2731be", - "position": 16, - "id": "f2a1fa92-1ee3-47b5-9d5f-1ac21e0d6bf3", - "label": { - "name": "Regular", - "color": "#2E7D32FF", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "cf7672b8-036a-45a4-8323-6a167d2731be" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18", - "position": 17, - "id": "bf2eb5db-bf88-44bc-a83f-7c69c38fc03f", - "label": { - "name": "Seafood", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "1c59a263-227a-4f43-a450-d53ca1485b36", - "position": 18, - "id": "14f5ca34-fcec-4847-8ee7-71b29488dc5b", - "label": { - "name": "Snacks", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "1c59a263-227a-4f43-a450-d53ca1485b36" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "189099a9-0033-4783-804a-ec6805e7d557", - "position": 19, - "id": "197f3d41-27a6-4782-a78d-60ea582108c8", - "label": { - "name": "Spices", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "189099a9-0033-4783-804a-ec6805e7d557" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "c28efdde-5993-4044-b824-f111f3a118ef", - "position": 20, - "id": "b5021331-2004-4570-a2bb-c6f364787bcc", - "label": { - "name": "Sweets", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "c28efdde-5993-4044-b824-f111f3a118ef" - } - }, - { - "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", - "labelId": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c", - "position": 21, - "id": "98e9ecff-d650-4717-96fe-d7744258bf43", - "label": { - "name": "Vegetables", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c" - } - } - ] - }, - { - "name": "Freezer", - "extras": {}, - "createdAt": "2024-06-05T09:49:00.404632", - "updateAt": "2024-06-23T08:21:51.764793", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "userId": "90b03954-00e1-46de-9520-f0305022b84f", - "id": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "recipeReferences": [], - "labelSettings": [ - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "0f63545a-606a-47ea-a784-452d45de6158", - "position": 0, - "id": "666b5b98-dcf6-4121-a5a6-2782f06f5f7e", - "label": { - "name": "Alcohol", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "0f63545a-606a-47ea-a784-452d45de6158" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "0c2d6111-9837-4319-acb5-490a32979993", - "position": 1, - "id": "6d25fc7e-33d2-459c-ba14-7e0aaf30a522", - "label": { - "name": "Baked Goods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "0c2d6111-9837-4319-acb5-490a32979993" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "3922802c-8e8c-47d4-9c68-e60b0a1338b6", - "position": 2, - "id": "56402a4e-c94e-4480-9f68-87370dbda209", - "label": { - "name": "Beverages", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "3922802c-8e8c-47d4-9c68-e60b0a1338b6" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "4111bfff-d834-4e8c-88ed-5eff761e06db", - "position": 3, - "id": "743e9e2b-a13a-4d80-b203-431d1c23f691", - "label": { - "name": "Canned Goods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "4111bfff-d834-4e8c-88ed-5eff761e06db" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "24fa2836-25e8-44af-b497-ad0d428a7f78", - "position": 4, - "id": "93b46c6e-0542-4adf-ad9d-8942b47dd9e3", - "label": { - "name": "Condiments", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "24fa2836-25e8-44af-b497-ad0d428a7f78" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "20a735de-c75b-4fdb-abaf-b8d71ef192f8", - "position": 5, - "id": "8c6f20ff-a5e3-4c64-a1ff-aa07bbdd455a", - "label": { - "name": "Confectionary", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "20a735de-c75b-4fdb-abaf-b8d71ef192f8" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "71178428-70aa-4491-b5b4-b8d93e7b04cf", - "position": 6, - "id": "02995d80-108f-4949-bd58-d04d670b388d", - "label": { - "name": "Dairy Products", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "71178428-70aa-4491-b5b4-b8d93e7b04cf" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "c58ed864-b5bf-4aac-88a1-007833c706c7", - "position": 7, - "id": "b20c178c-e719-4159-b199-91a6dd25dcd3", - "label": { - "name": "Frozen Foods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "c58ed864-b5bf-4aac-88a1-007833c706c7" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "f398f1a4-ce53-42df-95d4-8a3403bb6a38", - "position": 8, - "id": "5ff12e47-9b84-46d2-aabf-da4165a68f65", - "label": { - "name": "Fruits", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "f398f1a4-ce53-42df-95d4-8a3403bb6a38" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "fd936065-3d53-4844-99df-9332f1bf0c8a", - "position": 9, - "id": "e0ec7da9-c0b8-4d78-a5b8-591c99d87370", - "label": { - "name": "Grains", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "fd936065-3d53-4844-99df-9332f1bf0c8a" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf", - "position": 10, - "id": "3dc2d2e7-274e-40ec-8ba1-09ce1820b29b", - "label": { - "name": "Health Foods", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b", - "position": 11, - "id": "e30fa937-4bb1-4ff9-b163-2da67e2749ca", - "label": { - "name": "Household", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "cf136576-1929-4fc9-a3da-34c49ff58920", - "position": 12, - "id": "ecd715af-fafe-4d32-a376-538e476bf215", - "label": { - "name": "Meat", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "cf136576-1929-4fc9-a3da-34c49ff58920" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa", - "position": 13, - "id": "5ded867c-473f-456d-b0a0-83cae279df71", - "label": { - "name": "Meat Products", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "2a035661-fd5d-462c-8eb0-6b78af982e0c", - "position": 14, - "id": "eb88d477-cd50-4b84-a1bb-5adc077d38e5", - "label": { - "name": "Other", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "2a035661-fd5d-462c-8eb0-6b78af982e0c" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "af147838-d114-4a92-bd0f-08f05f59bbe5", - "position": 15, - "id": "ab7e96e3-f8d5-4e4e-91ee-b966bd980cf0", - "label": { - "name": "Produce", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "af147838-d114-4a92-bd0f-08f05f59bbe5" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "cf7672b8-036a-45a4-8323-6a167d2731be", - "position": 16, - "id": "3fcf5e5a-f8e2-4174-be79-2496a1cb505a", - "label": { - "name": "Regular", - "color": "#2E7D32FF", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "cf7672b8-036a-45a4-8323-6a167d2731be" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18", - "position": 17, - "id": "e768c9e7-c568-44d1-a263-081d93fd1298", - "label": { - "name": "Seafood", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "1c59a263-227a-4f43-a450-d53ca1485b36", - "position": 18, - "id": "f8a78147-c6d1-4a86-b159-5f178ae72089", - "label": { - "name": "Snacks", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "1c59a263-227a-4f43-a450-d53ca1485b36" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "189099a9-0033-4783-804a-ec6805e7d557", - "position": 19, - "id": "23253f2f-bc71-4ecf-837c-d1697738b505", - "label": { - "name": "Spices", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "189099a9-0033-4783-804a-ec6805e7d557" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "c28efdde-5993-4044-b824-f111f3a118ef", - "position": 20, - "id": "706d656b-3755-46f7-8c12-c9196730baf2", - "label": { - "name": "Sweets", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "c28efdde-5993-4044-b824-f111f3a118ef" - } - }, - { - "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", - "labelId": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c", - "position": 21, - "id": "d9d60d8d-f2de-4636-864f-d7262e24ead3", - "label": { - "name": "Vegetables", - "color": "#E0E0E0", - "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", - "id": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c" - } - } - ] - } - ], - "next": null, - "previous": null -} diff --git a/tests/components/mealie/fixtures/mealplan.json b/tests/components/mealie/fixtures/mealplan.json deleted file mode 100644 index b540280d83f..00000000000 --- a/tests/components/mealie/fixtures/mealplan.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "date": "2024-01-22", - "entryType": "dinner", - "title": "", - "text": "", - "recipeId": "c5f00a93-71a2-4e48-900f-d9ad0bb9de93", - "id": 230, - "groupId": "0bf60b2e-ca89-42a9-94d4-8f67ca72b157", - "userId": "1ce8b5fe-04e8-4b80-aab1-d92c94685c6d", - "recipe": { - "id": "c5f00a93-71a2-4e48-900f-d9ad0bb9de93", - "userId": "1ce8b5fe-04e8-4b80-aab1-d92c94685c6d", - "groupId": "0bf60b2e-ca89-42a9-94d4-8f67ca72b157", - "name": "Zoete aardappel curry traybake", - "slug": "zoete-aardappel-curry-traybake", - "image": "AiIo", - "recipeYield": "2 servings", - "totalTime": "40 Minutes", - "prepTime": null, - "cookTime": null, - "performTime": null, - "description": "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", - "recipeCategory": [], - "tags": [], - "tools": [], - "rating": null, - "orgURL": "https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/", - "dateAdded": "2024-01-22", - "dateUpdated": "2024-01-22T00:27:46.324512", - "createdAt": "2024-01-22T00:27:46.327546", - "updateAt": "2024-01-22T00:27:46.327548", - "lastMade": null - } -} diff --git a/tests/components/mealie/fixtures/statistics.json b/tests/components/mealie/fixtures/statistics.json deleted file mode 100644 index 350bf1fd9ff..00000000000 --- a/tests/components/mealie/fixtures/statistics.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "totalRecipes": 765, - "totalUsers": 3, - "totalCategories": 24, - "totalTags": 454, - "totalTools": 11 -} diff --git a/tests/components/mealie/fixtures/users_self.json b/tests/components/mealie/fixtures/users_self.json deleted file mode 100644 index 6d5901c8cc0..00000000000 --- a/tests/components/mealie/fixtures/users_self.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "id": "bf1c62fe-4941-4332-9886-e54e88dbdba0", - "username": "admin", - "fullName": "Change Me", - "email": "changeme@example.com", - "authMethod": "Mealie", - "admin": true, - "group": "home", - "advanced": true, - "canInvite": true, - "canManage": true, - "canOrganize": true, - "groupId": "24477569-f6af-4b53-9e3f-6d04b0ca6916", - "groupSlug": "home", - "tokens": [ - { - "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJsb25nX3Rva2VuIjp0cnVlLCJpZCI6ImJmMWM2MmZlLTQ5NDEtNDMzMi05ODg2LWU1NGU4OGRiZGJhMCIsIm5hbWUiOiJ0ZXN0aW5nIiwiaW50ZWdyYXRpb25faWQiOiJnZW5lcmljIiwiZXhwIjoxODczOTA5ODk4fQ.xwXZp4fL2g1RbIqGtBeOaS6RDfsYbQDHj8XtRM3wlX0", - "name": "testing", - "id": 2, - "createdAt": "2024-05-20T10:31:38.179669" - } - ], - "cacheKey": "1234" -} diff --git a/tests/components/mealie/snapshots/test_calendar.ambr b/tests/components/mealie/snapshots/test_calendar.ambr index e5a0a697157..6af53c112de 100644 --- a/tests/components/mealie/snapshots/test_calendar.ambr +++ b/tests/components/mealie/snapshots/test_calendar.ambr @@ -147,20 +147,6 @@ 'summary': 'Mousse de saumon', 'uid': None, }), - dict({ - 'description': 'Dineren met de boys', - 'end': dict({ - 'date': '2024-01-22', - }), - 'location': None, - 'recurrence_id': None, - 'rrule': None, - 'start': dict({ - 'date': '2024-01-21', - }), - 'summary': 'Aquavite', - 'uid': None, - }), ]) # --- # name: test_entities[calendar.mealie_breakfast-entry] @@ -192,7 +178,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'breakfast', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_breakfast', + 'unique_id': '01J0BC4QM2YBRP6H5G933CETT7_breakfast', 'unit_of_measurement': None, }) # --- @@ -244,7 +230,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dinner', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_dinner', + 'unique_id': '01J0BC4QM2YBRP6H5G933CETT7_dinner', 'unit_of_measurement': None, }) # --- @@ -252,12 +238,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': True, - 'description': 'Dineren met de boys', - 'end_time': '2024-01-22 00:00:00', + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'end_time': '2024-01-23 00:00:00', 'friendly_name': 'Mealie Dinner', 'location': '', - 'message': 'Aquavite', - 'start_time': '2024-01-21 00:00:00', + 'message': 'Zoete aardappel curry traybake', + 'start_time': '2024-01-22 00:00:00', }), 'context': , 'entity_id': 'calendar.mealie_dinner', @@ -296,7 +282,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'lunch', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_lunch', + 'unique_id': '01J0BC4QM2YBRP6H5G933CETT7_lunch', 'unit_of_measurement': None, }) # --- @@ -304,12 +290,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': True, - 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', - 'end_time': '2024-01-23 00:00:00', + 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', + 'end_time': '2024-01-24 00:00:00', 'friendly_name': 'Mealie Lunch', 'location': '', - 'message': 'All-American Beef Stew Recipe', - 'start_time': '2024-01-22 00:00:00', + 'message': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', + 'start_time': '2024-01-23 00:00:00', }), 'context': , 'entity_id': 'calendar.mealie_lunch', @@ -348,7 +334,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'side', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_side', + 'unique_id': '01J0BC4QM2YBRP6H5G933CETT7_side', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/mealie/snapshots/test_diagnostics.ambr b/tests/components/mealie/snapshots/test_diagnostics.ambr deleted file mode 100644 index a694c72fcf6..00000000000 --- a/tests/components/mealie/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,534 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'about': dict({ - 'version': 'v1.10.2', - }), - 'mealplans': dict({ - 'breakfast': list([ - dict({ - 'description': None, - 'entry_type': 'breakfast', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-23', - }), - 'mealplan_id': 229, - 'recipe': dict({ - 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'JeQ2', - 'name': 'Roast Chicken', - 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', - 'recipe_id': '5b055066-d57d-4fd0-8dfd-a2c2f07b36f1', - 'recipe_yield': '6 servings', - 'slug': 'roast-chicken', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - ]), - 'dinner': list([ - dict({ - 'description': None, - 'entry_type': 'dinner', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-22', - }), - 'mealplan_id': 230, - 'recipe': dict({ - 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'AiIo', - 'name': 'Zoete aardappel curry traybake', - 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', - 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', - 'recipe_yield': '2 servings', - 'slug': 'zoete-aardappel-curry-traybake', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': 'dinner', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-23', - }), - 'mealplan_id': 222, - 'recipe': dict({ - 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'En9o', - 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', - 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', - 'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34', - 'recipe_yield': '6 servings', - 'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': 'dinner', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-23', - }), - 'mealplan_id': 221, - 'recipe': dict({ - 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'Kn62', - 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', - 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', - 'recipe_id': '47595e4c-52bc-441d-b273-3edf4258806d', - 'recipe_yield': '4 servings', - 'slug': 'greek-turkey-meatballs-with-lemon-orzo-creamy-feta-yogurt-sauce', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': 'dinner', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-23', - }), - 'mealplan_id': 219, - 'recipe': dict({ - 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'ibL6', - 'name': 'Pampered Chef Double Chocolate Mocha Trifle', - 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', - 'recipe_id': '92635fd0-f2dc-4e78-a6e4-ecd556ad361f', - 'recipe_yield': '12 servings', - 'slug': 'pampered-chef-double-chocolate-mocha-trifle', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': 'dinner', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-22', - }), - 'mealplan_id': 217, - 'recipe': dict({ - 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'beGq', - 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', - 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', - 'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22', - 'recipe_yield': '24 servings', - 'slug': 'cheeseburger-sliders-easy-30-min-recipe', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': 'dinner', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-23', - }), - 'mealplan_id': 212, - 'recipe': dict({ - 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': '356X', - 'name': 'All-American Beef Stew Recipe', - 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', - 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', - 'recipe_yield': '6 servings', - 'slug': 'all-american-beef-stew-recipe', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': 'dinner', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-22', - }), - 'mealplan_id': 211, - 'recipe': dict({ - 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'nOPT', - 'name': 'Einfacher Nudelauflauf mit Brokkoli', - 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', - 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', - 'recipe_yield': '4 servings', - 'slug': 'einfacher-nudelauflauf-mit-brokkoli', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': 'dinner', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-23', - }), - 'mealplan_id': 196, - 'recipe': dict({ - 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': '5G1v', - 'name': 'Miso Udon Noodles with Spinach and Tofu', - 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', - 'recipe_id': '25b814f2-d9bf-4df0-b40d-d2f2457b4317', - 'recipe_yield': '2 servings', - 'slug': 'miso-udon-noodles-with-spinach-and-tofu', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': 'dinner', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-22', - }), - 'mealplan_id': 195, - 'recipe': dict({ - 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'rrNL', - 'name': 'Mousse de saumon', - 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', - 'recipe_id': '55c88810-4cf1-4d86-ae50-63b15fd173fb', - 'recipe_yield': '12 servings', - 'slug': 'mousse-de-saumon', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': 'Dineren met de boys', - 'entry_type': 'dinner', - 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-21', - }), - 'mealplan_id': 1, - 'recipe': None, - 'title': 'Aquavite', - 'user_id': '6caa6e4d-521f-4ef4-9ed7-388bdd63f47d', - }), - ]), - 'lunch': list([ - dict({ - 'description': None, - 'entry_type': 'lunch', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-23', - }), - 'mealplan_id': 226, - 'recipe': dict({ - 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'INQz', - 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', - 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', - 'recipe_id': 'e360a0cc-18b0-4a84-a91b-8aa59e2451c9', - 'recipe_yield': '2 servings', - 'slug': 'receta-de-pollo-al-curry-en-10-minutos-con-video-incluido', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': 'lunch', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-23', - }), - 'mealplan_id': 224, - 'recipe': dict({ - 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'nj5M', - 'name': 'Boeuf bourguignon : la vraie recette (2)', - 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', - 'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a', - 'recipe_yield': '4 servings', - 'slug': 'boeuf-bourguignon-la-vraie-recette-2', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': 'lunch', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-22', - }), - 'mealplan_id': 216, - 'recipe': dict({ - 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': '356X', - 'name': 'All-American Beef Stew Recipe', - 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', - 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', - 'recipe_yield': '6 servings', - 'slug': 'all-american-beef-stew-recipe', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - ]), - 'side': list([ - dict({ - 'description': None, - 'entry_type': 'side', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': dict({ - '__type': "", - 'isoformat': '2024-01-23', - }), - 'mealplan_id': 220, - 'recipe': dict({ - 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'nOPT', - 'name': 'Einfacher Nudelauflauf mit Brokkoli', - 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', - 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', - 'recipe_yield': '4 servings', - 'slug': 'einfacher-nudelauflauf-mit-brokkoli', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - ]), - }), - 'shoppinglist': dict({ - '27edbaab-2ec6-441f-8490-0283ea77585f': dict({ - 'items': list([ - dict({ - 'checked': False, - 'disable_amount': True, - 'display': '2 Apples', - 'food_id': None, - 'is_food': False, - 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', - 'label_id': None, - 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', - 'note': 'Apples', - 'position': 0, - 'quantity': 2.0, - 'unit_id': None, - }), - dict({ - 'checked': False, - 'disable_amount': False, - 'display': '1 can acorn squash', - 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', - 'is_food': True, - 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', - 'label_id': None, - 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', - 'note': '', - 'position': 1, - 'quantity': 1.0, - 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', - }), - dict({ - 'checked': False, - 'disable_amount': False, - 'display': 'aubergine', - 'food_id': '96801494-4e26-4148-849a-8155deb76327', - 'is_food': True, - 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', - 'label_id': None, - 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', - 'note': '', - 'position': 2, - 'quantity': 0.0, - 'unit_id': None, - }), - ]), - 'shopping_list': dict({ - 'list_id': '27edbaab-2ec6-441f-8490-0283ea77585f', - 'name': 'Supermarket', - }), - }), - 'e9d78ff2-4b23-4b77-a3a8-464827100b46': dict({ - 'items': list([ - dict({ - 'checked': False, - 'disable_amount': True, - 'display': '2 Apples', - 'food_id': None, - 'is_food': False, - 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', - 'label_id': None, - 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', - 'note': 'Apples', - 'position': 0, - 'quantity': 2.0, - 'unit_id': None, - }), - dict({ - 'checked': False, - 'disable_amount': False, - 'display': '1 can acorn squash', - 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', - 'is_food': True, - 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', - 'label_id': None, - 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', - 'note': '', - 'position': 1, - 'quantity': 1.0, - 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', - }), - dict({ - 'checked': False, - 'disable_amount': False, - 'display': 'aubergine', - 'food_id': '96801494-4e26-4148-849a-8155deb76327', - 'is_food': True, - 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', - 'label_id': None, - 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', - 'note': '', - 'position': 2, - 'quantity': 0.0, - 'unit_id': None, - }), - ]), - 'shopping_list': dict({ - 'list_id': 'e9d78ff2-4b23-4b77-a3a8-464827100b46', - 'name': 'Freezer', - }), - }), - 'f8438635-8211-4be8-80d0-0aa42e37a5f2': dict({ - 'items': list([ - dict({ - 'checked': False, - 'disable_amount': True, - 'display': '2 Apples', - 'food_id': None, - 'is_food': False, - 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', - 'label_id': None, - 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', - 'note': 'Apples', - 'position': 0, - 'quantity': 2.0, - 'unit_id': None, - }), - dict({ - 'checked': False, - 'disable_amount': False, - 'display': '1 can acorn squash', - 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', - 'is_food': True, - 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', - 'label_id': None, - 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', - 'note': '', - 'position': 1, - 'quantity': 1.0, - 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', - }), - dict({ - 'checked': False, - 'disable_amount': False, - 'display': 'aubergine', - 'food_id': '96801494-4e26-4148-849a-8155deb76327', - 'is_food': True, - 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', - 'label_id': None, - 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', - 'note': '', - 'position': 2, - 'quantity': 0.0, - 'unit_id': None, - }), - ]), - 'shopping_list': dict({ - 'list_id': 'f8438635-8211-4be8-80d0-0aa42e37a5f2', - 'name': 'Special groceries', - }), - }), - }), - }) -# --- diff --git a/tests/components/mealie/snapshots/test_init.ambr b/tests/components/mealie/snapshots/test_init.ambr index 98ca52dd15e..1333b292dac 100644 --- a/tests/components/mealie/snapshots/test_init.ambr +++ b/tests/components/mealie/snapshots/test_init.ambr @@ -13,7 +13,7 @@ 'identifiers': set({ tuple( 'mealie', - 'bf1c62fe-4941-4332-9886-e54e88dbdba0', + '01J0BC4QM2YBRP6H5G933CETT7', ), }), 'is_new': False, @@ -21,13 +21,12 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'Mealie', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'v1.10.2', + 'sw_version': None, 'via_device_id': None, }) # --- diff --git a/tests/components/mealie/snapshots/test_sensor.ambr b/tests/components/mealie/snapshots/test_sensor.ambr deleted file mode 100644 index e645cf4c45f..00000000000 --- a/tests/components/mealie/snapshots/test_sensor.ambr +++ /dev/null @@ -1,251 +0,0 @@ -# serializer version: 1 -# name: test_entities[sensor.mealie_categories-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mealie_categories', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Categories', - 'platform': 'mealie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'categories', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_categories', - 'unit_of_measurement': 'categories', - }) -# --- -# name: test_entities[sensor.mealie_categories-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mealie Categories', - 'state_class': , - 'unit_of_measurement': 'categories', - }), - 'context': , - 'entity_id': 'sensor.mealie_categories', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '24', - }) -# --- -# name: test_entities[sensor.mealie_recipes-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mealie_recipes', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Recipes', - 'platform': 'mealie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'recipes', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_recipes', - 'unit_of_measurement': 'recipes', - }) -# --- -# name: test_entities[sensor.mealie_recipes-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mealie Recipes', - 'state_class': , - 'unit_of_measurement': 'recipes', - }), - 'context': , - 'entity_id': 'sensor.mealie_recipes', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '765', - }) -# --- -# name: test_entities[sensor.mealie_tags-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mealie_tags', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tags', - 'platform': 'mealie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tags', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_tags', - 'unit_of_measurement': 'tags', - }) -# --- -# name: test_entities[sensor.mealie_tags-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mealie Tags', - 'state_class': , - 'unit_of_measurement': 'tags', - }), - 'context': , - 'entity_id': 'sensor.mealie_tags', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '454', - }) -# --- -# name: test_entities[sensor.mealie_tools-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mealie_tools', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tools', - 'platform': 'mealie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tools', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_tools', - 'unit_of_measurement': 'tools', - }) -# --- -# name: test_entities[sensor.mealie_tools-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mealie Tools', - 'state_class': , - 'unit_of_measurement': 'tools', - }), - 'context': , - 'entity_id': 'sensor.mealie_tools', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11', - }) -# --- -# name: test_entities[sensor.mealie_users-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mealie_users', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Users', - 'platform': 'mealie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'users', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_users', - 'unit_of_measurement': 'users', - }) -# --- -# name: test_entities[sensor.mealie_users-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mealie Users', - 'state_class': , - 'unit_of_measurement': 'users', - }), - 'context': , - 'entity_id': 'sensor.mealie_users', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3', - }) -# --- diff --git a/tests/components/mealie/snapshots/test_services.ambr b/tests/components/mealie/snapshots/test_services.ambr deleted file mode 100644 index 4f9ee6a5c09..00000000000 --- a/tests/components/mealie/snapshots/test_services.ambr +++ /dev/null @@ -1,786 +0,0 @@ -# serializer version: 1 -# name: test_service_import_recipe - dict({ - 'recipe': dict({ - 'date_added': datetime.date(2024, 6, 29), - 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', - 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', - 'household_id': None, - 'image': 'SuPW', - 'ingredients': list([ - dict({ - 'is_food': True, - 'note': '130g dark couverture chocolate (min. 55% cocoa content)', - 'quantity': 1.0, - 'reference_id': 'a3adfe78-d157-44d8-98be-9c133e45bb4e', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '1 Vanilla Pod', - 'quantity': 1.0, - 'reference_id': '41d234d7-c040-48f9-91e6-f4636aebb77b', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '150g softened butter', - 'quantity': 1.0, - 'reference_id': 'f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '100g Icing sugar', - 'quantity': 1.0, - 'reference_id': 'f7fcd86e-b04b-4e07-b69c-513925811491', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '6 Eggs', - 'quantity': 1.0, - 'reference_id': 'a831fbc3-e2f5-452e-a745-450be8b4a130', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '100g Castor sugar', - 'quantity': 1.0, - 'reference_id': 'b5ee4bdc-0047-4de7-968b-f3360bbcb31e', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '140g Plain wheat flour', - 'quantity': 1.0, - 'reference_id': 'a67db09d-429c-4e77-919d-cfed3da675ad', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '200g apricot jam', - 'quantity': 1.0, - 'reference_id': '55479752-c062-4b25-aae3-2b210999d7b9', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '200g castor sugar', - 'quantity': 1.0, - 'reference_id': 'ff9cd404-24ec-4d38-b0aa-0120ce1df679', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '150g dark couverture chocolate (min. 55% cocoa content)', - 'quantity': 1.0, - 'reference_id': 'c7fca92e-971e-4728-a227-8b04783583ed', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': 'Unsweetend whipped cream to garnish', - 'quantity': 1.0, - 'reference_id': 'ef023f23-7816-4871-87f6-4d29f9a283f7', - 'unit': None, - }), - ]), - 'instructions': list([ - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '2d558dbf-5361-4ef2-9d86-4161f5eb6146', - 'text': 'Preheat oven to 170°C. Line the base of a springform with baking paper, grease the sides, and dust with a little flour. Melt couverture over boiling water. Let cool slightly.', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': 'dbcc1c37-3cbf-4045-9902-8f7fd1e68f0a', - 'text': 'Slit vanilla pod lengthwise and scrape out seeds. Using a hand mixer with whisks, beat the softened butter with the icing sugar and vanilla seeds until bubbles appear.', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '2265bd14-a691-40b1-9fe6-7b5dfeac8401', - 'text': 'Separate the eggs. Whisk the egg yolks into the butter mixture one by one. Now gradually add melted couverture chocolate. Beat the egg whites with the castor sugar until stiff, then place on top of the butter and chocolate mixture. Sift the flour over the mixture, then fold in the flour and beaten egg whites.', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '0aade447-dfac-4aae-8e67-ac250ad13ae2', - 'text': "Transfer the mixture to the springform, smooth the top, and bake in the oven (middle rack) for 10–15 minutes, leaving the oven door a finger's width ajar. Then close the oven and bake for approximately 50 minutes. (The cake is done when it yields slightly to the touch.)", - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '5fdcb703-7103-468d-a65d-a92460b92eb3', - 'text': 'Remove the cake from the oven and loosen the sides of the springform. Carefully tip the cake onto a cake rack lined with baking paper and let cool for approximately 20 minutes. Then pull off the baking paper, turn the cake over, and leave on rack to cool completely.', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '81474afc-b44e-49b3-bb67-5d7dab8f832a', - 'text': 'Cut the cake in half horizontally. Warm the jam and stir until smooth. Brush the top of both cake halves with the jam and place one on top of the other. Brush the sides with the jam as well.', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '8fac8aee-0d3c-4f78-9ff8-56d20472e5f1', - 'text': 'To make the glaze, put the castor sugar into a saucepan with 125 ml water and boil over high heat for approximately 5 minutes. Take the sugar syrup off the stove and leave to cool a little. Coarsely chop the couverture, gradually adding it to the syrup, and stir until it forms a thick liquid (see tip below).', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '7162e099-d651-4656-902a-a09a9b40c4e1', - 'text': 'Pour all the lukewarm glaze liquid at once over the top of the cake and quickly spread using a palette knife. Leave the glaze to set for a few hours. Serve garnished with whipped cream.', - 'title': None, - }), - ]), - 'name': 'Original Sacher-Torte (2)', - 'original_url': 'https://www.sacher.com/en/original-sacher-torte/recipe/', - 'recipe_id': 'fada9582-709b-46aa-b384-d5952123ad93', - 'recipe_yield': '4 servings', - 'slug': 'original-sacher-torte-2', - 'tags': list([ - dict({ - 'name': 'Sacher', - 'slug': 'sacher', - 'tag_id': '1b5789b9-3af6-412e-8c77-8a01caa0aac9', - }), - dict({ - 'name': 'Cake', - 'slug': 'cake', - 'tag_id': '1cf17f96-58b5-4bd3-b1e8-1606a64b413d', - }), - dict({ - 'name': 'Torte', - 'slug': 'torte', - 'tag_id': '3f5f0a3d-728f-440d-a6c7-5a68612e8c67', - }), - dict({ - 'name': 'Sachertorte', - 'slug': 'sachertorte', - 'tag_id': '525f388d-6ee0-4ebe-91fc-dd320a7583f0', - }), - dict({ - 'name': 'Sacher Torte Cake', - 'slug': 'sacher-torte-cake', - 'tag_id': '544a6e08-a899-4f63-9c72-bb2924df70cb', - }), - dict({ - 'name': 'Sacher Torte', - 'slug': 'sacher-torte', - 'tag_id': '576c0a82-84ee-4e50-a14e-aa7a675b6352', - }), - dict({ - 'name': 'Original Sachertorte', - 'slug': 'original-sachertorte', - 'tag_id': 'd530b8e4-275a-4093-804b-6d0de154c206', - }), - ]), - 'user_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0', - }), - }) -# --- -# name: test_service_mealplan - dict({ - 'mealplan': list([ - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 22), - 'mealplan_id': 230, - 'recipe': dict({ - 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'AiIo', - 'name': 'Zoete aardappel curry traybake', - 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', - 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', - 'recipe_yield': '2 servings', - 'slug': 'zoete-aardappel-curry-traybake', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': 229, - 'recipe': dict({ - 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'JeQ2', - 'name': 'Roast Chicken', - 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', - 'recipe_id': '5b055066-d57d-4fd0-8dfd-a2c2f07b36f1', - 'recipe_yield': '6 servings', - 'slug': 'roast-chicken', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': 226, - 'recipe': dict({ - 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'INQz', - 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', - 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', - 'recipe_id': 'e360a0cc-18b0-4a84-a91b-8aa59e2451c9', - 'recipe_yield': '2 servings', - 'slug': 'receta-de-pollo-al-curry-en-10-minutos-con-video-incluido', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': 224, - 'recipe': dict({ - 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'nj5M', - 'name': 'Boeuf bourguignon : la vraie recette (2)', - 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', - 'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a', - 'recipe_yield': '4 servings', - 'slug': 'boeuf-bourguignon-la-vraie-recette-2', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': 222, - 'recipe': dict({ - 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'En9o', - 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', - 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', - 'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34', - 'recipe_yield': '6 servings', - 'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': 221, - 'recipe': dict({ - 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'Kn62', - 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', - 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', - 'recipe_id': '47595e4c-52bc-441d-b273-3edf4258806d', - 'recipe_yield': '4 servings', - 'slug': 'greek-turkey-meatballs-with-lemon-orzo-creamy-feta-yogurt-sauce', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': 220, - 'recipe': dict({ - 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'nOPT', - 'name': 'Einfacher Nudelauflauf mit Brokkoli', - 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', - 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', - 'recipe_yield': '4 servings', - 'slug': 'einfacher-nudelauflauf-mit-brokkoli', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': 219, - 'recipe': dict({ - 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'ibL6', - 'name': 'Pampered Chef Double Chocolate Mocha Trifle', - 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', - 'recipe_id': '92635fd0-f2dc-4e78-a6e4-ecd556ad361f', - 'recipe_yield': '12 servings', - 'slug': 'pampered-chef-double-chocolate-mocha-trifle', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 22), - 'mealplan_id': 217, - 'recipe': dict({ - 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'beGq', - 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', - 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', - 'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22', - 'recipe_yield': '24 servings', - 'slug': 'cheeseburger-sliders-easy-30-min-recipe', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 22), - 'mealplan_id': 216, - 'recipe': dict({ - 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': '356X', - 'name': 'All-American Beef Stew Recipe', - 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', - 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', - 'recipe_yield': '6 servings', - 'slug': 'all-american-beef-stew-recipe', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': 212, - 'recipe': dict({ - 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': '356X', - 'name': 'All-American Beef Stew Recipe', - 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', - 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', - 'recipe_yield': '6 servings', - 'slug': 'all-american-beef-stew-recipe', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 22), - 'mealplan_id': 211, - 'recipe': dict({ - 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'nOPT', - 'name': 'Einfacher Nudelauflauf mit Brokkoli', - 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', - 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', - 'recipe_yield': '4 servings', - 'slug': 'einfacher-nudelauflauf-mit-brokkoli', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 23), - 'mealplan_id': 196, - 'recipe': dict({ - 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': '5G1v', - 'name': 'Miso Udon Noodles with Spinach and Tofu', - 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', - 'recipe_id': '25b814f2-d9bf-4df0-b40d-d2f2457b4317', - 'recipe_yield': '2 servings', - 'slug': 'miso-udon-noodles-with-spinach-and-tofu', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 22), - 'mealplan_id': 195, - 'recipe': dict({ - 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'rrNL', - 'name': 'Mousse de saumon', - 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', - 'recipe_id': '55c88810-4cf1-4d86-ae50-63b15fd173fb', - 'recipe_yield': '12 servings', - 'slug': 'mousse-de-saumon', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - dict({ - 'description': 'Dineren met de boys', - 'entry_type': , - 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', - 'household_id': None, - 'mealplan_date': FakeDate(2024, 1, 21), - 'mealplan_id': 1, - 'recipe': None, - 'title': 'Aquavite', - 'user_id': '6caa6e4d-521f-4ef4-9ed7-388bdd63f47d', - }), - ]), - }) -# --- -# name: test_service_recipe - dict({ - 'recipe': dict({ - 'date_added': datetime.date(2024, 6, 29), - 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', - 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', - 'household_id': None, - 'image': 'SuPW', - 'ingredients': list([ - dict({ - 'is_food': True, - 'note': '130g dark couverture chocolate (min. 55% cocoa content)', - 'quantity': 1.0, - 'reference_id': 'a3adfe78-d157-44d8-98be-9c133e45bb4e', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '1 Vanilla Pod', - 'quantity': 1.0, - 'reference_id': '41d234d7-c040-48f9-91e6-f4636aebb77b', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '150g softened butter', - 'quantity': 1.0, - 'reference_id': 'f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '100g Icing sugar', - 'quantity': 1.0, - 'reference_id': 'f7fcd86e-b04b-4e07-b69c-513925811491', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '6 Eggs', - 'quantity': 1.0, - 'reference_id': 'a831fbc3-e2f5-452e-a745-450be8b4a130', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '100g Castor sugar', - 'quantity': 1.0, - 'reference_id': 'b5ee4bdc-0047-4de7-968b-f3360bbcb31e', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '140g Plain wheat flour', - 'quantity': 1.0, - 'reference_id': 'a67db09d-429c-4e77-919d-cfed3da675ad', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '200g apricot jam', - 'quantity': 1.0, - 'reference_id': '55479752-c062-4b25-aae3-2b210999d7b9', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '200g castor sugar', - 'quantity': 1.0, - 'reference_id': 'ff9cd404-24ec-4d38-b0aa-0120ce1df679', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': '150g dark couverture chocolate (min. 55% cocoa content)', - 'quantity': 1.0, - 'reference_id': 'c7fca92e-971e-4728-a227-8b04783583ed', - 'unit': None, - }), - dict({ - 'is_food': True, - 'note': 'Unsweetend whipped cream to garnish', - 'quantity': 1.0, - 'reference_id': 'ef023f23-7816-4871-87f6-4d29f9a283f7', - 'unit': None, - }), - ]), - 'instructions': list([ - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '2d558dbf-5361-4ef2-9d86-4161f5eb6146', - 'text': 'Preheat oven to 170°C. Line the base of a springform with baking paper, grease the sides, and dust with a little flour. Melt couverture over boiling water. Let cool slightly.', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': 'dbcc1c37-3cbf-4045-9902-8f7fd1e68f0a', - 'text': 'Slit vanilla pod lengthwise and scrape out seeds. Using a hand mixer with whisks, beat the softened butter with the icing sugar and vanilla seeds until bubbles appear.', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '2265bd14-a691-40b1-9fe6-7b5dfeac8401', - 'text': 'Separate the eggs. Whisk the egg yolks into the butter mixture one by one. Now gradually add melted couverture chocolate. Beat the egg whites with the castor sugar until stiff, then place on top of the butter and chocolate mixture. Sift the flour over the mixture, then fold in the flour and beaten egg whites.', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '0aade447-dfac-4aae-8e67-ac250ad13ae2', - 'text': "Transfer the mixture to the springform, smooth the top, and bake in the oven (middle rack) for 10–15 minutes, leaving the oven door a finger's width ajar. Then close the oven and bake for approximately 50 minutes. (The cake is done when it yields slightly to the touch.)", - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '5fdcb703-7103-468d-a65d-a92460b92eb3', - 'text': 'Remove the cake from the oven and loosen the sides of the springform. Carefully tip the cake onto a cake rack lined with baking paper and let cool for approximately 20 minutes. Then pull off the baking paper, turn the cake over, and leave on rack to cool completely.', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '81474afc-b44e-49b3-bb67-5d7dab8f832a', - 'text': 'Cut the cake in half horizontally. Warm the jam and stir until smooth. Brush the top of both cake halves with the jam and place one on top of the other. Brush the sides with the jam as well.', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '8fac8aee-0d3c-4f78-9ff8-56d20472e5f1', - 'text': 'To make the glaze, put the castor sugar into a saucepan with 125 ml water and boil over high heat for approximately 5 minutes. Take the sugar syrup off the stove and leave to cool a little. Coarsely chop the couverture, gradually adding it to the syrup, and stir until it forms a thick liquid (see tip below).', - 'title': None, - }), - dict({ - 'ingredient_references': list([ - ]), - 'instruction_id': '7162e099-d651-4656-902a-a09a9b40c4e1', - 'text': 'Pour all the lukewarm glaze liquid at once over the top of the cake and quickly spread using a palette knife. Leave the glaze to set for a few hours. Serve garnished with whipped cream.', - 'title': None, - }), - ]), - 'name': 'Original Sacher-Torte (2)', - 'original_url': 'https://www.sacher.com/en/original-sacher-torte/recipe/', - 'recipe_id': 'fada9582-709b-46aa-b384-d5952123ad93', - 'recipe_yield': '4 servings', - 'slug': 'original-sacher-torte-2', - 'tags': list([ - dict({ - 'name': 'Sacher', - 'slug': 'sacher', - 'tag_id': '1b5789b9-3af6-412e-8c77-8a01caa0aac9', - }), - dict({ - 'name': 'Cake', - 'slug': 'cake', - 'tag_id': '1cf17f96-58b5-4bd3-b1e8-1606a64b413d', - }), - dict({ - 'name': 'Torte', - 'slug': 'torte', - 'tag_id': '3f5f0a3d-728f-440d-a6c7-5a68612e8c67', - }), - dict({ - 'name': 'Sachertorte', - 'slug': 'sachertorte', - 'tag_id': '525f388d-6ee0-4ebe-91fc-dd320a7583f0', - }), - dict({ - 'name': 'Sacher Torte Cake', - 'slug': 'sacher-torte-cake', - 'tag_id': '544a6e08-a899-4f63-9c72-bb2924df70cb', - }), - dict({ - 'name': 'Sacher Torte', - 'slug': 'sacher-torte', - 'tag_id': '576c0a82-84ee-4e50-a14e-aa7a675b6352', - }), - dict({ - 'name': 'Original Sachertorte', - 'slug': 'original-sachertorte', - 'tag_id': 'd530b8e4-275a-4093-804b-6d0de154c206', - }), - ]), - 'user_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0', - }), - }) -# --- -# name: test_service_set_mealplan[payload0-kwargs0] - dict({ - 'mealplan': dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': datetime.date(2024, 1, 22), - 'mealplan_id': 230, - 'recipe': dict({ - 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'AiIo', - 'name': 'Zoete aardappel curry traybake', - 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', - 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', - 'recipe_yield': '2 servings', - 'slug': 'zoete-aardappel-curry-traybake', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - }) -# --- -# name: test_service_set_mealplan[payload1-kwargs1] - dict({ - 'mealplan': dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': datetime.date(2024, 1, 22), - 'mealplan_id': 230, - 'recipe': dict({ - 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'AiIo', - 'name': 'Zoete aardappel curry traybake', - 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', - 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', - 'recipe_yield': '2 servings', - 'slug': 'zoete-aardappel-curry-traybake', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - }) -# --- -# name: test_service_set_random_mealplan - dict({ - 'mealplan': dict({ - 'description': None, - 'entry_type': , - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'mealplan_date': datetime.date(2024, 1, 22), - 'mealplan_id': 230, - 'recipe': dict({ - 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", - 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', - 'household_id': None, - 'image': 'AiIo', - 'name': 'Zoete aardappel curry traybake', - 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', - 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', - 'recipe_yield': '2 servings', - 'slug': 'zoete-aardappel-curry-traybake', - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - 'title': None, - 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', - }), - }) -# --- diff --git a/tests/components/mealie/snapshots/test_todo.ambr b/tests/components/mealie/snapshots/test_todo.ambr deleted file mode 100644 index 4c58a839f57..00000000000 --- a/tests/components/mealie/snapshots/test_todo.ambr +++ /dev/null @@ -1,142 +0,0 @@ -# serializer version: 1 -# name: test_entities[todo.mealie_freezer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'todo', - 'entity_category': None, - 'entity_id': 'todo.mealie_freezer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Freezer', - 'platform': 'mealie', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'shopping_list', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_e9d78ff2-4b23-4b77-a3a8-464827100b46', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[todo.mealie_freezer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mealie Freezer', - 'supported_features': , - }), - 'context': , - 'entity_id': 'todo.mealie_freezer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3', - }) -# --- -# name: test_entities[todo.mealie_special_groceries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'todo', - 'entity_category': None, - 'entity_id': 'todo.mealie_special_groceries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Special groceries', - 'platform': 'mealie', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'shopping_list', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_f8438635-8211-4be8-80d0-0aa42e37a5f2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[todo.mealie_special_groceries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mealie Special groceries', - 'supported_features': , - }), - 'context': , - 'entity_id': 'todo.mealie_special_groceries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3', - }) -# --- -# name: test_entities[todo.mealie_supermarket-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'todo', - 'entity_category': None, - 'entity_id': 'todo.mealie_supermarket', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Supermarket', - 'platform': 'mealie', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'shopping_list', - 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_27edbaab-2ec6-441f-8490-0283ea77585f', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[todo.mealie_supermarket-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mealie Supermarket', - 'supported_features': , - }), - 'context': , - 'entity_id': 'todo.mealie_supermarket', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3', - }) -# --- diff --git a/tests/components/mealie/test_calendar.py b/tests/components/mealie/test_calendar.py index d11fe5d2354..9df2c1810fd 100644 --- a/tests/components/mealie/test_calendar.py +++ b/tests/components/mealie/test_calendar.py @@ -2,11 +2,10 @@ from datetime import date from http import HTTPStatus -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock from syrupy.assertion import SnapshotAssertion -from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -41,8 +40,7 @@ async def test_entities( mock_config_entry: MockConfigEntry, ) -> None: """Test the API returns the calendar.""" - with patch("homeassistant.components.mealie.PLATFORMS", [Platform.CALENDAR]): - await setup_integration(hass, mock_config_entry) + await setup_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/mealie/test_config_flow.py b/tests/components/mealie/test_config_flow.py index 15c629ec3da..ac68ed2fac5 100644 --- a/tests/components/mealie/test_config_flow.py +++ b/tests/components/mealie/test_config_flow.py @@ -2,17 +2,15 @@ from unittest.mock import AsyncMock -from aiomealie import About, MealieAuthenticationError, MealieConnectionError +from aiomealie import MealieAuthenticationError, MealieConnectionError import pytest from homeassistant.components.mealie.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL +from homeassistant.const import CONF_API_TOKEN, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import setup_integration - from tests.common import MockConfigEntry @@ -38,9 +36,7 @@ async def test_full_flow( assert result["data"] == { CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token", - CONF_VERIFY_SSL: True, } - assert result["result"].unique_id == "bf1c62fe-4941-4332-9886-e54e88dbdba0" @pytest.mark.parametrize( @@ -59,7 +55,7 @@ async def test_flow_errors( error: str, ) -> None: """Test flow errors.""" - mock_mealie_client.get_user_info.side_effect = exception + mock_mealie_client.get_mealplan_today.side_effect = exception result = await hass.config_entries.flow.async_init( DOMAIN, @@ -76,48 +72,16 @@ async def test_flow_errors( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": error} - mock_mealie_client.get_user_info.side_effect = None + mock_mealie_client.get_mealplan_today.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token"}, ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY -@pytest.mark.parametrize( - ("version"), - [ - ("v1.0.0beta-5"), - ("v1.0.0-RC2"), - ("v0.1.0"), - ], -) -async def test_flow_version_error( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_setup_entry: AsyncMock, - version, -) -> None: - """Test flow version error.""" - mock_mealie_client.get_about.return_value = About(version=version) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "mealie_version"} - - async def test_duplicate( hass: HomeAssistant, mock_mealie_client: AsyncMock, @@ -141,189 +105,3 @@ async def test_duplicate( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -async def test_reauth_flow( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reauth flow.""" - await setup_integration(hass, mock_config_entry) - - result = await mock_config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_TOKEN: "token2"}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert mock_config_entry.data[CONF_API_TOKEN] == "token2" - - -async def test_reauth_flow_wrong_account( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reauth flow with wrong account.""" - await setup_integration(hass, mock_config_entry) - - result = await mock_config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - mock_mealie_client.get_user_info.return_value.user_id = "wrong_user_id" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_TOKEN: "token2"}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "wrong_account" - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (MealieConnectionError, "cannot_connect"), - (MealieAuthenticationError, "invalid_auth"), - (Exception, "unknown"), - ], -) -async def test_reauth_flow_exceptions( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, - exception: Exception, - error: str, -) -> None: - """Test reauth flow errors.""" - await setup_integration(hass, mock_config_entry) - mock_mealie_client.get_user_info.side_effect = exception - - result = await mock_config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_TOKEN: "token"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {"base": error} - - mock_mealie_client.get_user_info.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_TOKEN: "token"}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - - -async def test_reconfigure_flow( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reconfigure flow.""" - await setup_integration(hass, mock_config_entry) - - result = await mock_config_entry.start_reconfigure_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "http://test:9090", - CONF_API_TOKEN: "token2", - CONF_VERIFY_SSL: False, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert mock_config_entry.data[CONF_API_TOKEN] == "token2" - assert mock_config_entry.data[CONF_HOST] == "http://test:9090" - assert mock_config_entry.data[CONF_VERIFY_SSL] is False - - -async def test_reconfigure_flow_wrong_account( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reconfigure flow with wrong account.""" - await setup_integration(hass, mock_config_entry) - - result = await mock_config_entry.start_reconfigure_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - mock_mealie_client.get_user_info.return_value.user_id = "wrong_user_id" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token2"}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "wrong_account" - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (MealieConnectionError, "cannot_connect"), - (MealieAuthenticationError, "invalid_auth"), - (Exception, "unknown"), - ], -) -async def test_reconfigure_flow_exceptions( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, - exception: Exception, - error: str, -) -> None: - """Test reconfigure flow errors.""" - await setup_integration(hass, mock_config_entry) - mock_mealie_client.get_user_info.side_effect = exception - - result = await mock_config_entry.start_reconfigure_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - assert result["errors"] == {"base": error} - - mock_mealie_client.get_user_info.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token"}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/mealie/test_diagnostics.py b/tests/components/mealie/test_diagnostics.py deleted file mode 100644 index 88680da9784..00000000000 --- a/tests/components/mealie/test_diagnostics.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Test Mealie diagnostics.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test config entry diagnostics.""" - await setup_integration(hass, mock_config_entry) - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) - == snapshot - ) diff --git a/tests/components/mealie/test_init.py b/tests/components/mealie/test_init.py index a45a67801df..7d63ad135f9 100644 --- a/tests/components/mealie/test_init.py +++ b/tests/components/mealie/test_init.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiomealie import About, MealieAuthenticationError, MealieConnectionError +from aiomealie import MealieAuthenticationError, MealieConnectionError import pytest from syrupy import SnapshotAssertion @@ -26,84 +26,12 @@ async def test_device_info( """Test device registry integration.""" await setup_integration(hass, mock_config_entry) device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} + identifiers={(DOMAIN, mock_config_entry.entry_id)} ) assert device_entry is not None assert device_entry == snapshot -@pytest.mark.parametrize( - "field", - [ - "get_about", - "get_mealplans", - "get_shopping_lists", - "get_statistics", - ], -) -@pytest.mark.parametrize( - ("exc", "state"), - [ - (MealieConnectionError, ConfigEntryState.SETUP_RETRY), - (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), - ], -) -async def test_setup_failure( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - field: str, - exc: Exception, - state: ConfigEntryState, -) -> None: - """Test setup failure.""" - getattr(mock_mealie_client, field).side_effect = exc - - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is state - - -@pytest.mark.parametrize( - ("version"), - [ - ("v1.0.0beta-5"), - ("v1.0.0-RC2"), - ("v0.1.0"), - ], -) -async def test_setup_too_old( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - version, -) -> None: - """Test setup of Mealie entry with too old version of Mealie.""" - mock_mealie_client.get_about.return_value = About(version=version) - - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR - - -async def test_setup_invalid( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test setup of Mealie entry with too old version of Mealie.""" - mock_mealie_client.get_about.return_value = About(version="nightly") - - await setup_integration(hass, mock_config_entry) - - assert ( - "It seems like you are using the nightly version of Mealie, nightly" - " versions could have changes that stop this integration working" in caplog.text - ) - assert mock_config_entry.state is ConfigEntryState.LOADED - - async def test_load_unload_entry( hass: HomeAssistant, mock_mealie_client: AsyncMock, @@ -127,7 +55,7 @@ async def test_load_unload_entry( (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), ], ) -async def test_mealplan_initialization_failure( +async def test_initialization_failure( hass: HomeAssistant, mock_mealie_client: AsyncMock, mock_config_entry: MockConfigEntry, @@ -140,25 +68,3 @@ async def test_mealplan_initialization_failure( await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is state - - -@pytest.mark.parametrize( - ("exc", "state"), - [ - (MealieConnectionError, ConfigEntryState.SETUP_RETRY), - (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), - ], -) -async def test_shoppingitems_initialization_failure( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - exc: Exception, - state: ConfigEntryState, -) -> None: - """Test initialization failure.""" - mock_mealie_client.get_shopping_items.side_effect = exc - - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is state diff --git a/tests/components/mealie/test_sensor.py b/tests/components/mealie/test_sensor.py deleted file mode 100644 index 5a55b89ad21..00000000000 --- a/tests/components/mealie/test_sensor.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Tests for the Mealie sensors.""" - -from unittest.mock import AsyncMock, patch - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the sensor entities.""" - with patch("homeassistant.components.mealie.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/mealie/test_services.py b/tests/components/mealie/test_services.py deleted file mode 100644 index 1c8c6f19de7..00000000000 --- a/tests/components/mealie/test_services.py +++ /dev/null @@ -1,446 +0,0 @@ -"""Tests for the Mealie services.""" - -from datetime import date -from unittest.mock import AsyncMock - -from aiomealie import ( - MealieConnectionError, - MealieNotFoundError, - MealieValidationError, - MealplanEntryType, -) -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.mealie.const import ( - ATTR_CONFIG_ENTRY_ID, - ATTR_END_DATE, - ATTR_ENTRY_TYPE, - ATTR_INCLUDE_TAGS, - ATTR_NOTE_TEXT, - ATTR_NOTE_TITLE, - ATTR_RECIPE_ID, - ATTR_START_DATE, - ATTR_URL, - DOMAIN, -) -from homeassistant.components.mealie.services import ( - SERVICE_GET_MEALPLAN, - SERVICE_GET_RECIPE, - SERVICE_IMPORT_RECIPE, - SERVICE_SET_MEALPLAN, - SERVICE_SET_RANDOM_MEALPLAN, -) -from homeassistant.const import ATTR_DATE -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_service_mealplan( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the get_mealplan service.""" - - await setup_integration(hass, mock_config_entry) - - freezer.move_to("2023-10-21") - - response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_MEALPLAN, - {ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id}, - blocking=True, - return_response=True, - ) - assert mock_mealie_client.get_mealplans.call_args_list[1][0] == ( - date(2023, 10, 21), - date(2023, 10, 21), - ) - assert response == snapshot - - response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_MEALPLAN, - { - ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_START_DATE: "2023-10-22", - ATTR_END_DATE: "2023-10-25", - }, - blocking=True, - return_response=True, - ) - assert response - assert mock_mealie_client.get_mealplans.call_args_list[2][0] == ( - date(2023, 10, 22), - date(2023, 10, 25), - ) - - response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_MEALPLAN, - { - ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_START_DATE: "2023-10-19", - }, - blocking=True, - return_response=True, - ) - assert response - assert mock_mealie_client.get_mealplans.call_args_list[3][0] == ( - date(2023, 10, 19), - date(2023, 10, 21), - ) - - response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_MEALPLAN, - { - ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_END_DATE: "2023-10-22", - }, - blocking=True, - return_response=True, - ) - assert response - assert mock_mealie_client.get_mealplans.call_args_list[4][0] == ( - date(2023, 10, 21), - date(2023, 10, 22), - ) - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - DOMAIN, - SERVICE_GET_MEALPLAN, - { - ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_START_DATE: "2023-10-22", - ATTR_END_DATE: "2023-10-19", - }, - blocking=True, - return_response=True, - ) - - -async def test_service_recipe( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test the get_recipe service.""" - - await setup_integration(hass, mock_config_entry) - - response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_RECIPE, - {ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, ATTR_RECIPE_ID: "recipe_id"}, - blocking=True, - return_response=True, - ) - assert response == snapshot - - -async def test_service_import_recipe( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test the import_recipe service.""" - - await setup_integration(hass, mock_config_entry) - - response = await hass.services.async_call( - DOMAIN, - SERVICE_IMPORT_RECIPE, - { - ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_URL: "http://example.com", - }, - blocking=True, - return_response=True, - ) - assert response == snapshot - mock_mealie_client.import_recipe.assert_called_with( - "http://example.com", include_tags=False - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_IMPORT_RECIPE, - { - ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_URL: "http://example.com", - ATTR_INCLUDE_TAGS: True, - }, - blocking=True, - return_response=False, - ) - mock_mealie_client.import_recipe.assert_called_with( - "http://example.com", include_tags=True - ) - - -async def test_service_set_random_mealplan( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test the set_random_mealplan service.""" - - await setup_integration(hass, mock_config_entry) - - response = await hass.services.async_call( - DOMAIN, - SERVICE_SET_RANDOM_MEALPLAN, - { - ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_DATE: "2023-10-21", - ATTR_ENTRY_TYPE: "lunch", - }, - blocking=True, - return_response=True, - ) - assert response == snapshot - mock_mealie_client.random_mealplan.assert_called_with( - date(2023, 10, 21), MealplanEntryType.LUNCH - ) - - mock_mealie_client.random_mealplan.reset_mock() - await hass.services.async_call( - DOMAIN, - SERVICE_SET_RANDOM_MEALPLAN, - { - ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_DATE: "2023-10-21", - ATTR_ENTRY_TYPE: "lunch", - }, - blocking=True, - return_response=False, - ) - mock_mealie_client.random_mealplan.assert_called_with( - date(2023, 10, 21), MealplanEntryType.LUNCH - ) - - -@pytest.mark.parametrize( - ("payload", "kwargs"), - [ - ( - { - ATTR_RECIPE_ID: "recipe_id", - }, - {"recipe_id": "recipe_id", "note_title": None, "note_text": None}, - ), - ( - { - ATTR_NOTE_TITLE: "Note Title", - ATTR_NOTE_TEXT: "Note Text", - }, - {"recipe_id": None, "note_title": "Note Title", "note_text": "Note Text"}, - ), - ], -) -async def test_service_set_mealplan( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - payload: dict[str, str], - kwargs: dict[str, str], -) -> None: - """Test the set_mealplan service.""" - - await setup_integration(hass, mock_config_entry) - - response = await hass.services.async_call( - DOMAIN, - SERVICE_SET_MEALPLAN, - { - ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_DATE: "2023-10-21", - ATTR_ENTRY_TYPE: "lunch", - } - | payload, - blocking=True, - return_response=True, - ) - assert response == snapshot - mock_mealie_client.set_mealplan.assert_called_with( - date(2023, 10, 21), MealplanEntryType.LUNCH, **kwargs - ) - - mock_mealie_client.random_mealplan.reset_mock() - await hass.services.async_call( - DOMAIN, - SERVICE_SET_MEALPLAN, - { - ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, - ATTR_DATE: "2023-10-21", - ATTR_ENTRY_TYPE: "lunch", - } - | payload, - blocking=True, - return_response=False, - ) - mock_mealie_client.set_mealplan.assert_called_with( - date(2023, 10, 21), MealplanEntryType.LUNCH, **kwargs - ) - - -@pytest.mark.parametrize( - ("service", "payload", "function", "exception", "raised_exception", "message"), - [ - ( - SERVICE_GET_MEALPLAN, - {}, - "get_mealplans", - MealieConnectionError, - HomeAssistantError, - "Error connecting to Mealie instance", - ), - ( - SERVICE_GET_RECIPE, - {ATTR_RECIPE_ID: "recipe_id"}, - "get_recipe", - MealieConnectionError, - HomeAssistantError, - "Error connecting to Mealie instance", - ), - ( - SERVICE_GET_RECIPE, - {ATTR_RECIPE_ID: "recipe_id"}, - "get_recipe", - MealieNotFoundError, - ServiceValidationError, - "Recipe with ID or slug `recipe_id` not found", - ), - ( - SERVICE_IMPORT_RECIPE, - {ATTR_URL: "http://example.com"}, - "import_recipe", - MealieConnectionError, - HomeAssistantError, - "Error connecting to Mealie instance", - ), - ( - SERVICE_IMPORT_RECIPE, - {ATTR_URL: "http://example.com"}, - "import_recipe", - MealieValidationError, - ServiceValidationError, - "Mealie could not import the recipe from the URL", - ), - ( - SERVICE_SET_RANDOM_MEALPLAN, - {ATTR_DATE: "2023-10-21", ATTR_ENTRY_TYPE: "lunch"}, - "random_mealplan", - MealieConnectionError, - HomeAssistantError, - "Error connecting to Mealie instance", - ), - ( - SERVICE_SET_MEALPLAN, - { - ATTR_DATE: "2023-10-21", - ATTR_ENTRY_TYPE: "lunch", - ATTR_RECIPE_ID: "recipe_id", - }, - "set_mealplan", - MealieConnectionError, - HomeAssistantError, - "Error connecting to Mealie instance", - ), - ], -) -async def test_services_connection_error( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - service: str, - payload: dict[str, str], - function: str, - exception: Exception, - raised_exception: type[Exception], - message: str, -) -> None: - """Test a connection error in the services.""" - - await setup_integration(hass, mock_config_entry) - - getattr(mock_mealie_client, function).side_effect = exception - - with pytest.raises(raised_exception, match=message): - await hass.services.async_call( - DOMAIN, - service, - {ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id} | payload, - blocking=True, - return_response=True, - ) - - -@pytest.mark.parametrize( - ("service", "payload"), - [ - (SERVICE_GET_MEALPLAN, {}), - (SERVICE_GET_RECIPE, {ATTR_RECIPE_ID: "recipe_id"}), - (SERVICE_IMPORT_RECIPE, {ATTR_URL: "http://example.com"}), - ( - SERVICE_SET_RANDOM_MEALPLAN, - {ATTR_DATE: "2023-10-21", ATTR_ENTRY_TYPE: "lunch"}, - ), - ( - SERVICE_SET_MEALPLAN, - { - ATTR_DATE: "2023-10-21", - ATTR_ENTRY_TYPE: "lunch", - ATTR_RECIPE_ID: "recipe_id", - }, - ), - ], -) -async def test_service_entry_availability( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - service: str, - payload: dict[str, str], -) -> None: - """Test the services without valid entry.""" - mock_config_entry.add_to_hass(hass) - mock_config_entry2 = MockConfigEntry(domain=DOMAIN) - mock_config_entry2.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - with pytest.raises(ServiceValidationError, match="Mock Title is not loaded"): - await hass.services.async_call( - DOMAIN, - service, - {ATTR_CONFIG_ENTRY_ID: mock_config_entry2.entry_id} | payload, - blocking=True, - return_response=True, - ) - - with pytest.raises( - ServiceValidationError, match='Integration "mealie" not found in registry' - ): - await hass.services.async_call( - DOMAIN, - service, - {ATTR_CONFIG_ENTRY_ID: "bad-config_id"} | payload, - blocking=True, - return_response=True, - ) diff --git a/tests/components/mealie/test_todo.py b/tests/components/mealie/test_todo.py deleted file mode 100644 index 920cfc47397..00000000000 --- a/tests/components/mealie/test_todo.py +++ /dev/null @@ -1,197 +0,0 @@ -"""Tests for the Mealie todo.""" - -from datetime import timedelta -from unittest.mock import AsyncMock, patch - -from aiomealie import MealieError, ShoppingListsResponse -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.mealie import DOMAIN -from homeassistant.components.todo import ( - ATTR_ITEM, - ATTR_RENAME, - ATTR_STATUS, - DOMAIN as TODO_DOMAIN, - TodoServices, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_fixture, - snapshot_platform, -) - - -async def test_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test todo entities.""" - with patch("homeassistant.components.mealie.PLATFORMS", [Platform.TODO]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_add_todo_list_item( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test for adding a To-do Item.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Soda"}, - target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, - blocking=True, - ) - - mock_mealie_client.add_shopping_item.assert_called_once() - - -async def test_add_todo_list_item_error( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test for failing to add a To-do Item.""" - await setup_integration(hass, mock_config_entry) - - mock_mealie_client.add_shopping_item.side_effect = MealieError - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Soda"}, - target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, - blocking=True, - ) - - -async def test_update_todo_list_item( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test for updating a To-do Item.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, - blocking=True, - ) - - mock_mealie_client.update_shopping_item.assert_called_once() - - -async def test_update_todo_list_item_error( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test for failing to update a To-do Item.""" - await setup_integration(hass, mock_config_entry) - - mock_mealie_client.update_shopping_item.side_effect = MealieError - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, - blocking=True, - ) - - -async def test_delete_todo_list_item( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test for deleting a To-do Item.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: "aubergine"}, - target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, - blocking=True, - ) - - mock_mealie_client.delete_shopping_item.assert_called_once() - - -async def test_delete_todo_list_item_error( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test for failing to delete a To-do Item.""" - await setup_integration(hass, mock_config_entry) - - mock_mealie_client.delete_shopping_item = AsyncMock() - mock_mealie_client.delete_shopping_item.side_effect = MealieError - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: "aubergine"}, - target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, - blocking=True, - ) - - -async def test_runtime_management( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test for creating and deleting shopping lists.""" - response = ShoppingListsResponse.from_json( - load_fixture("get_shopping_lists.json", DOMAIN) - ).items - mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( - items=[response[0]] - ) - await setup_integration(hass, mock_config_entry) - assert hass.states.get("todo.mealie_supermarket") is not None - assert hass.states.get("todo.mealie_special_groceries") is None - - mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( - items=response[0:2] - ) - freezer.tick(timedelta(minutes=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert hass.states.get("todo.mealie_special_groceries") is not None - - mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( - items=[response[0]] - ) - freezer.tick(timedelta(minutes=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert hass.states.get("todo.mealie_special_groceries") is None diff --git a/tests/components/meater/test_config_flow.py b/tests/components/meater/test_config_flow.py index 9049cf4ac9a..b8c1be15268 100644 --- a/tests/components/meater/test_config_flow.py +++ b/tests/components/meater/test_config_flow.py @@ -123,7 +123,11 @@ async def test_reauth_flow(hass: HomeAssistant, mock_meater) -> None: ) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/media_extractor/conftest.py b/tests/components/media_extractor/conftest.py index 58d51f1cb2e..1d198681f3f 100644 --- a/tests/components/media_extractor/conftest.py +++ b/tests/components/media_extractor/conftest.py @@ -1,18 +1,20 @@ """Common fixtures for the Media Extractor tests.""" -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.media_extractor import DOMAIN -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component from . import MockYoutubeDL from .const import AUDIO_QUERY +from tests.common import async_mock_service + @pytest.fixture(autouse=True) async def setup_homeassistant(hass: HomeAssistant): @@ -29,6 +31,12 @@ async def setup_media_player(hass: HomeAssistant) -> None: await hass.async_block_till_done() +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "media_player", "play_media") + + @pytest.fixture(name="mock_youtube_dl") async def setup_mock_yt_dlp(hass: HomeAssistant) -> MockYoutubeDL: """Mock YoutubeDL.""" diff --git a/tests/components/media_extractor/snapshots/test_init.ambr b/tests/components/media_extractor/snapshots/test_init.ambr index 9731a415c00..ed56f40af73 100644 --- a/tests/components/media_extractor/snapshots/test_init.ambr +++ b/tests/components/media_extractor/snapshots/test_init.ambr @@ -30,6 +30,15 @@ 'media_content_type': 'VIDEO', }) # --- +# name: test_play_media_service + ReadOnlyDict({ + 'entity_id': 'media_player.bedroom', + 'extra': dict({ + }), + 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694794256/ei/sC0EZYCPHbuZx_AP3bGz0Ac/ip/84.31.234.146/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr2---sn-5hnekn7k.googlevideo.com/mh/7c/mm/31,29/mn/sn-5hnekn7k,sn-5hne6nzy/ms/au,rdu/mv/m/mvi/2/pl/14/initcwndbps/2267500/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694772337/fvip/3/short_key/1/keepalive/yes/fexp/24007246,24362685/beids/24350018/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRgIhAIC0iobMnRschmQ3QaYsytXg9eg7l9B_-UNvMciis4bmAiEAg-3jr6SwOfAGCCU-JyTyxcXmraug-hPcjjJzm__43ug%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRQIhAOlqbgmuueNhIuGENYKCsdwiNAUPheXw-RMUqsiaB7YuAiANN43FxJl14Ve_H_c9K-aDoXG4sI7PDCqKDhov6Qro_g%3D%3D/playlist/index.m3u8', + 'media_content_type': 'VIDEO', + }) +# --- # name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-AUDIO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -48,6 +57,24 @@ 'media_content_type': 'AUDIO', }) # --- +# name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-VIDEO-audio_media_extractor_config] + ReadOnlyDict({ + 'entity_id': 'media_player.bedroom', + 'extra': dict({ + }), + 'media_content_id': 'https://cf-media.sndcdn.com/50remGX1OqRY.128.mp3?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiKjovL2NmLW1lZGlhLnNuZGNkbi5jb20vNTByZW1HWDFPcVJZLjEyOC5tcDMqIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNjk0Nzk5MTc5fX19XX0_&Signature=JtF8BXxTCElhjCrhnSAq3W6z960VmdVXx7BPhQvI0MCxr~J43JFGO8CVw9-VBM2oEf14mqWo63-C0FO29DvUuBZnmLD3dhDfryVfWJsrix7voimoRDaNFE~3zntDbg7O2S8uWYyZK8OZC9anzwokvjH7jbmviWqK4~2IM9dwgejGgzrQU1aadV2Yro7NJZnF7SD~7tVjkM-hBg~X5zDYVxmGrdzN3tFoLwRmUch6RNDL~1DcWBk0AveBKQFAdBrFBjDDUeIyDz9Idhw2aG9~fjfckcf95KwqrVQxz1N5XEzfNDDo8xkUgDt0eb9dtXdwxLJ0swC6e5VLS8bsH91GMg__&Key-Pair-Id=APKAI6TU7MMXM5DG6EPQ', + 'media_content_type': 'VIDEO', + }) +# --- +# name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-VIDEO-empty_media_extractor_config] + ReadOnlyDict({ + 'entity_id': 'media_player.bedroom', + 'extra': dict({ + }), + 'media_content_id': 'https://cf-media.sndcdn.com/50remGX1OqRY.128.mp3?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiKjovL2NmLW1lZGlhLnNuZGNkbi5jb20vNTByZW1HWDFPcVJZLjEyOC5tcDMqIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNjk0Nzk4NTkzfX19XX0_&Signature=flALJvEBnzS0ZOOhf0-07Ap~NURw2Gn2OqkeKKTTMX5HRGJw9eXFay79tcC4GsMMXWUgWoCx-n3yelpyilE2MOEIufBNUbjqRfMSJaX5YhYxjQdoDYuiU~gqBzJyPw9pKzr6P8~5HNKL3Idr0CNhUzdV6FQLaUPKMMibq9ghV833mUmdyvdk1~GZBc8MOg9GrTdcigGgpPzd-vrIMICMvFzFnwBOeOotxX2Vfqf9~wVekBKGlvB9A~7TlZ71lv9Fl9u4m8rse9E-mByweVc1M784ehJV3~tRPjuF~FXXWKP8x0nGJmoq7RAnG7iFIt~fQFmsfOq2o~PG7dHMRPh7hw__&Key-Pair-Id=APKAI6TU7MMXM5DG6EPQ', + 'media_content_type': 'VIDEO', + }) +# --- # name: test_play_media_service[https://test.com/abc-AUDIO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -66,6 +93,15 @@ 'media_content_type': 'AUDIO', }) # --- +# name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-audio_media_extractor_config-] + ReadOnlyDict({ + 'entity_id': 'media_player.bedroom', + 'extra': dict({ + }), + 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694805268/ei/tFgEZcu0DoOD-gaqg47wBA/ip/45.93.75.130/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr3---sn-5hne6nzy.googlevideo.com/mh/7c/mm/31,29/mn/sn-5hne6nzy,sn-5hnekn7k/ms/au,rdu/mv/m/mvi/3/pl/22/initcwndbps/1957500/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694783146/fvip/2/short_key/1/keepalive/yes/fexp/24007246/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRQIhALAASH0_ZDQQoMA82qWNCXSHPZ0bb9TQldIs7AAxktiiAiASA5bQy7IAa6NwdGIOpfye5OgcY_BNuo0WgSdh84tosw%3D%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRgIhAIsDcLGH8KJpQpBgyJ5VWlDxfr75HyO8hMSVS9v7nRu4AiEA2xjtLZOzeNFoJlxwCsH3YqsUQt-BF_4gikhi_P4FbBc%3D/playlist/index.m3u8', + 'media_content_type': 'VIDEO', + }) +# --- # name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -75,6 +111,15 @@ 'media_content_type': 'VIDEO', }) # --- +# name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-empty_media_extractor_config-] + ReadOnlyDict({ + 'entity_id': 'media_player.bedroom', + 'extra': dict({ + }), + 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694805294/ei/zlgEZcCPFpqOx_APj42f2Ao/ip/45.93.75.130/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr3---sn-5hne6nzy.googlevideo.com/mh/7c/mm/31,26/mn/sn-5hne6nzy,sn-aigzrnld/ms/au,onr/mv/m/mvi/3/pl/22/initcwndbps/2095000/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694783390/fvip/1/short_key/1/keepalive/yes/fexp/24007246,24362685/beids/24350017/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRgIhANCPwWNfq6wBp1Xo1L8bRJpDrzOyv7kfH_J65cZ_PRZLAiEAwo-0wQgeIjPe7OgyAAvMCx_A9wd1h8Qyh7VntKwGJUs%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRQIgIqS9Ub_6L9ScKXr0T9bkeu6TZsEsyNApYfF_MqeukqECIQCMSeJ1sSEw5QGMgHAW8Fhsir4TYHEK5KVg-PzJbrT6hw%3D%3D/playlist/index.m3u8', + 'media_content_type': 'VIDEO', + }) +# --- # name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-empty_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', diff --git a/tests/components/media_extractor/test_init.py b/tests/components/media_extractor/test_init.py index bc80e063697..8c8a1407ccc 100644 --- a/tests/components/media_extractor/test_init.py +++ b/tests/components/media_extractor/test_init.py @@ -100,7 +100,7 @@ async def test_extracting_playlist_no_entries( async def test_play_media_service( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, - service_calls: list[ServiceCall], + calls: list[ServiceCall], snapshot: SnapshotAssertion, request: pytest.FixtureRequest, config_fixture: str, @@ -123,14 +123,13 @@ async def test_play_media_service( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data == snapshot + assert calls[0].data == snapshot async def test_download_error( hass: HomeAssistant, empty_media_extractor_config: dict[str, Any], - service_calls: list[ServiceCall], + calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test handling DownloadError.""" @@ -153,7 +152,7 @@ async def test_download_error( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 0 assert f"Could not retrieve data for the URL: {YOUTUBE_VIDEO}" in caplog.text @@ -161,7 +160,7 @@ async def test_no_target_entity( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - service_calls: list[ServiceCall], + calls: list[ServiceCall], snapshot: SnapshotAssertion, ) -> None: """Test having no target entity.""" @@ -180,15 +179,14 @@ async def test_no_target_entity( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data == snapshot + assert calls[0].data == snapshot async def test_playlist( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - service_calls: list[ServiceCall], + calls: list[ServiceCall], snapshot: SnapshotAssertion, ) -> None: """Test extracting a playlist.""" @@ -207,15 +205,14 @@ async def test_playlist( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data == snapshot + assert calls[0].data == snapshot async def test_playlist_no_entries( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - service_calls: list[ServiceCall], + calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test extracting a playlist without entries.""" @@ -234,7 +231,7 @@ async def test_playlist_no_entries( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 0 assert ( f"Could not retrieve data for the URL: {YOUTUBE_EMPTY_PLAYLIST}" in caplog.text ) @@ -243,7 +240,7 @@ async def test_playlist_no_entries( async def test_query_error( hass: HomeAssistant, empty_media_extractor_config: dict[str, Any], - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test handling error with query.""" @@ -273,13 +270,15 @@ async def test_query_error( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 0 async def test_cookiefile_detection( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], + calls: list[ServiceCall], + snapshot: SnapshotAssertion, caplog: pytest.LogCaptureFixture, ) -> None: """Test cookie file detection.""" @@ -290,19 +289,16 @@ async def test_cookiefile_detection( cookies_dir = os.path.join(hass.config.config_dir, "media_extractor") cookies_file = os.path.join(cookies_dir, "cookies.txt") - def _write_cookies_file() -> None: - if not os.path.exists(cookies_dir): - os.makedirs(cookies_dir) + if not os.path.exists(cookies_dir): + os.makedirs(cookies_dir) - with open(cookies_file, "w+", encoding="utf-8") as f: - f.write( - """# Netscape HTTP Cookie File + with open(cookies_file, "w+", encoding="utf-8") as f: + f.write( + """# Netscape HTTP Cookie File - .youtube.com TRUE / TRUE 1701708706 GPS 1 - """ - ) - - await hass.async_add_executor_job(_write_cookies_file) + .youtube.com TRUE / TRUE 1701708706 GPS 1 + """ + ) await hass.services.async_call( DOMAIN, @@ -317,7 +313,7 @@ async def test_cookiefile_detection( assert "Media extractor loaded cookies file" in caplog.text - await hass.async_add_executor_job(os.remove, cookies_file) + os.remove(cookies_file) await hass.services.async_call( DOMAIN, diff --git a/tests/components/media_player/common.py b/tests/components/media_player/common.py index c0cdfbf26d7..77076d903a6 100644 --- a/tests/components/media_player/common.py +++ b/tests/components/media_player/common.py @@ -16,7 +16,6 @@ from homeassistant.components.media_player import ( SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, - MediaPlayerEnqueue, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -36,79 +35,70 @@ from homeassistant.const import ( SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, ) -from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass -async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): """Turn on specified media player or all.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) @bind_hass -def turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def turn_on(hass, entity_id=ENTITY_MATCH_ALL): """Turn on specified media player or all.""" hass.add_job(async_turn_on, hass, entity_id) -async def async_turn_off( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): """Turn off specified media player or all.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) @bind_hass -def turn_off(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def turn_off(hass, entity_id=ENTITY_MATCH_ALL): """Turn off specified media player or all.""" hass.add_job(async_turn_off, hass, entity_id) -async def async_toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def async_toggle(hass, entity_id=ENTITY_MATCH_ALL): """Toggle specified media player or all.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_TOGGLE, data, blocking=True) @bind_hass -def toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def toggle(hass, entity_id=ENTITY_MATCH_ALL): """Toggle specified media player or all.""" hass.add_job(async_toggle, hass, entity_id) -async def async_volume_up( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_volume_up(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for volume up.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_VOLUME_UP, data, blocking=True) @bind_hass -def volume_up(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def volume_up(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for volume up.""" hass.add_job(async_volume_up, hass, entity_id) -async def async_volume_down( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_volume_down(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for volume down.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_VOLUME_DOWN, data, blocking=True) @bind_hass -def volume_down(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def volume_down(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for volume down.""" hass.add_job(async_volume_down, hass, entity_id) -async def async_mute_volume( - hass: HomeAssistant, mute: bool, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_mute_volume(hass, mute, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for muting the volume.""" data = {ATTR_MEDIA_VOLUME_MUTED: mute} @@ -119,16 +109,12 @@ async def async_mute_volume( @bind_hass -def mute_volume( - hass: HomeAssistant, mute: bool, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def mute_volume(hass, mute, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for muting the volume.""" hass.add_job(async_mute_volume, hass, mute, entity_id) -async def async_set_volume_level( - hass: HomeAssistant, volume: float, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_set_volume_level(hass, volume, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for setting the volume.""" data = {ATTR_MEDIA_VOLUME_LEVEL: volume} @@ -139,16 +125,12 @@ async def async_set_volume_level( @bind_hass -def set_volume_level( - hass: HomeAssistant, volume: float, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def set_volume_level(hass, volume, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for setting the volume.""" hass.add_job(async_set_volume_level, hass, volume, entity_id) -async def async_media_play_pause( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_media_play_pause(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for play/pause.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call( @@ -157,56 +139,48 @@ async def async_media_play_pause( @bind_hass -def media_play_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def media_play_pause(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for play/pause.""" hass.add_job(async_media_play_pause, hass, entity_id) -async def async_media_play( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_media_play(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for play/pause.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_MEDIA_PLAY, data, blocking=True) @bind_hass -def media_play(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def media_play(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for play/pause.""" hass.add_job(async_media_play, hass, entity_id) -async def async_media_pause( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_media_pause(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for pause.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_MEDIA_PAUSE, data, blocking=True) @bind_hass -def media_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def media_pause(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for pause.""" hass.add_job(async_media_pause, hass, entity_id) -async def async_media_stop( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_media_stop(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for stop.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_MEDIA_STOP, data, blocking=True) @bind_hass -def media_stop(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def media_stop(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for stop.""" hass.add_job(async_media_stop, hass, entity_id) -async def async_media_next_track( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_media_next_track(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for next track.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call( @@ -215,14 +189,12 @@ async def async_media_next_track( @bind_hass -def media_next_track(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def media_next_track(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for next track.""" hass.add_job(async_media_next_track, hass, entity_id) -async def async_media_previous_track( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_media_previous_track(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for prev track.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call( @@ -231,16 +203,12 @@ async def async_media_previous_track( @bind_hass -def media_previous_track( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def media_previous_track(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for prev track.""" hass.add_job(async_media_previous_track, hass, entity_id) -async def async_media_seek( - hass: HomeAssistant, position: float, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_media_seek(hass, position, entity_id=ENTITY_MATCH_ALL): """Send the media player the command to seek in current playing media.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} data[ATTR_MEDIA_SEEK_POSITION] = position @@ -248,20 +216,14 @@ async def async_media_seek( @bind_hass -def media_seek( - hass: HomeAssistant, position: float, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def media_seek(hass, position, entity_id=ENTITY_MATCH_ALL): """Send the media player the command to seek in current playing media.""" hass.add_job(async_media_seek, hass, position, entity_id) async def async_play_media( - hass: HomeAssistant, - media_type: str, - media_id: str, - entity_id: str = ENTITY_MATCH_ALL, - enqueue: MediaPlayerEnqueue | bool | None = None, -) -> None: + hass, media_type, media_id, entity_id=ENTITY_MATCH_ALL, enqueue=None +): """Send the media player the command for playing media.""" data = {ATTR_MEDIA_CONTENT_TYPE: media_type, ATTR_MEDIA_CONTENT_ID: media_id} @@ -275,20 +237,12 @@ async def async_play_media( @bind_hass -def play_media( - hass: HomeAssistant, - media_type: str, - media_id: str, - entity_id: str = ENTITY_MATCH_ALL, - enqueue: MediaPlayerEnqueue | bool | None = None, -) -> None: +def play_media(hass, media_type, media_id, entity_id=ENTITY_MATCH_ALL, enqueue=None): """Send the media player the command for playing media.""" hass.add_job(async_play_media, hass, media_type, media_id, entity_id, enqueue) -async def async_select_source( - hass: HomeAssistant, source: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_select_source(hass, source, entity_id=ENTITY_MATCH_ALL): """Send the media player the command to select input source.""" data = {ATTR_INPUT_SOURCE: source} @@ -299,22 +253,18 @@ async def async_select_source( @bind_hass -def select_source( - hass: HomeAssistant, source: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def select_source(hass, source, entity_id=ENTITY_MATCH_ALL): """Send the media player the command to select input source.""" hass.add_job(async_select_source, hass, source, entity_id) -async def async_clear_playlist( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_clear_playlist(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for clear playlist.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_CLEAR_PLAYLIST, data, blocking=True) @bind_hass -def clear_playlist(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def clear_playlist(hass, entity_id=ENTITY_MATCH_ALL): """Send the media player the command for clear playlist.""" hass.add_job(async_clear_playlist, hass, entity_id) diff --git a/tests/components/media_player/test_async_helpers.py b/tests/components/media_player/test_async_helpers.py index 750d2861f21..783846d8857 100644 --- a/tests/components/media_player/test_async_helpers.py +++ b/tests/components/media_player/test_async_helpers.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant class SimpleMediaPlayer(mp.MediaPlayerEntity): """Media player test class.""" - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass): """Initialize the test media player.""" self.hass = hass self._volume = 0 diff --git a/tests/components/media_player/test_browse_media.py b/tests/components/media_player/test_browse_media.py index ea684ea2bc2..2b7e40923bf 100644 --- a/tests/components/media_player/test_browse_media.py +++ b/tests/components/media_player/test_browse_media.py @@ -7,8 +7,8 @@ import pytest from homeassistant.components.media_player.browse_media import ( async_process_play_media_url, ) +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.network import NoURLAvailableError diff --git a/tests/components/media_player/test_device_condition.py b/tests/components/media_player/test_device_condition.py index 78d30e2ca6e..186cd674b39 100644 --- a/tests/components/media_player/test_device_condition.py +++ b/tests/components/media_player/test_device_condition.py @@ -20,7 +20,11 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -28,6 +32,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -126,7 +136,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -264,8 +274,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on - event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") @@ -275,8 +285,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_off - event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_off - event - test_event2" hass.states.async_set(entry.entity_id, STATE_IDLE) hass.bus.async_fire("test_event1") @@ -286,8 +296,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].data["some"] == "is_idle - event - test_event3" + assert len(calls) == 3 + assert calls[2].data["some"] == "is_idle - event - test_event3" hass.states.async_set(entry.entity_id, STATE_PAUSED) hass.bus.async_fire("test_event1") @@ -297,8 +307,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(service_calls) == 4 - assert service_calls[3].data["some"] == "is_paused - event - test_event4" + assert len(calls) == 4 + assert calls[3].data["some"] == "is_paused - event - test_event4" hass.states.async_set(entry.entity_id, STATE_PLAYING) hass.bus.async_fire("test_event1") @@ -308,8 +318,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(service_calls) == 5 - assert service_calls[4].data["some"] == "is_playing - event - test_event5" + assert len(calls) == 5 + assert calls[4].data["some"] == "is_playing - event - test_event5" hass.states.async_set(entry.entity_id, STATE_BUFFERING) hass.bus.async_fire("test_event1") @@ -319,15 +329,15 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(service_calls) == 6 - assert service_calls[5].data["some"] == "is_buffering - event - test_event6" + assert len(calls) == 6 + assert calls[5].data["some"] == "is_buffering - event - test_event6" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -370,5 +380,5 @@ async def test_if_state_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on - event - test_event1" diff --git a/tests/components/media_player/test_device_trigger.py b/tests/components/media_player/test_device_trigger.py index 4bb27b73f24..e9d5fbd646e 100644 --- a/tests/components/media_player/test_device_trigger.py +++ b/tests/components/media_player/test_device_trigger.py @@ -28,6 +28,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, ) @@ -36,6 +37,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -202,7 +209,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -258,8 +265,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { + assert len(calls) == 2 + assert {calls[0].data["some"], calls[1].data["some"]} == { "turned_on - device - media_player.test_5678 - off - on - None", "changed_states - device - media_player.test_5678 - off - on - None", } @@ -267,8 +274,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning off. hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 4 - assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { + assert len(calls) == 4 + assert {calls[2].data["some"], calls[3].data["some"]} == { "turned_off - device - media_player.test_5678 - on - off - None", "changed_states - device - media_player.test_5678 - on - off - None", } @@ -276,8 +283,8 @@ async def test_if_fires_on_state_change( # Fake that the entity becomes idle. hass.states.async_set(entry.entity_id, STATE_IDLE) await hass.async_block_till_done() - assert len(service_calls) == 6 - assert {service_calls[4].data["some"], service_calls[5].data["some"]} == { + assert len(calls) == 6 + assert {calls[4].data["some"], calls[5].data["some"]} == { "idle - device - media_player.test_5678 - off - idle - None", "changed_states - device - media_player.test_5678 - off - idle - None", } @@ -285,8 +292,8 @@ async def test_if_fires_on_state_change( # Fake that the entity starts playing. hass.states.async_set(entry.entity_id, STATE_PLAYING) await hass.async_block_till_done() - assert len(service_calls) == 8 - assert {service_calls[6].data["some"], service_calls[7].data["some"]} == { + assert len(calls) == 8 + assert {calls[6].data["some"], calls[7].data["some"]} == { "playing - device - media_player.test_5678 - idle - playing - None", "changed_states - device - media_player.test_5678 - idle - playing - None", } @@ -294,8 +301,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is paused. hass.states.async_set(entry.entity_id, STATE_PAUSED) await hass.async_block_till_done() - assert len(service_calls) == 10 - assert {service_calls[8].data["some"], service_calls[9].data["some"]} == { + assert len(calls) == 10 + assert {calls[8].data["some"], calls[9].data["some"]} == { "paused - device - media_player.test_5678 - playing - paused - None", "changed_states - device - media_player.test_5678 - playing - paused - None", } @@ -303,8 +310,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is buffering. hass.states.async_set(entry.entity_id, STATE_BUFFERING) await hass.async_block_till_done() - assert len(service_calls) == 12 - assert {service_calls[10].data["some"], service_calls[11].data["some"]} == { + assert len(calls) == 12 + assert {calls[10].data["some"], calls[11].data["some"]} == { "buffering - device - media_player.test_5678 - paused - buffering - None", "changed_states - device - media_player.test_5678 - paused - buffering - None", } @@ -314,7 +321,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -362,9 +369,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == "turned_on - device - media_player.test_5678 - off - on - None" ) @@ -373,7 +380,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -419,16 +426,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - off - on - 0:00:05" ) diff --git a/tests/components/media_player/test_init.py b/tests/components/media_player/test_init.py index 47f0530f0ff..11898edfc36 100644 --- a/tests/components/media_player/test_init.py +++ b/tests/components/media_player/test_init.py @@ -1,14 +1,11 @@ """Test the base functions of the media player.""" -from enum import Enum from http import HTTPStatus -from types import ModuleType from unittest.mock import patch import pytest import voluptuous as vol -from homeassistant.components import media_player from homeassistant.components.media_player import ( BrowseMedia, MediaClass, @@ -21,7 +18,6 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import help_test_all, import_and_test_deprecated_constant_enum from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -32,71 +28,6 @@ async def setup_homeassistant(hass: HomeAssistant): await async_setup_component(hass, "homeassistant", {}) -def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: - return [ - (enum_field, constant_prefix) - for enum_field in enum - if enum_field - not in [ - MediaPlayerEntityFeature.MEDIA_ANNOUNCE, - MediaPlayerEntityFeature.MEDIA_ENQUEUE, - ] - ] - - -@pytest.mark.parametrize( - "module", - [media_player, media_player.const], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize( - ("enum", "constant_prefix"), - _create_tuples(media_player.MediaPlayerEntityFeature, "SUPPORT_") - + _create_tuples(media_player.MediaPlayerDeviceClass, "DEVICE_CLASS_"), -) -@pytest.mark.parametrize( - "module", - [media_player], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, constant_prefix, "2025.10" - ) - - -@pytest.mark.parametrize( - ("enum", "constant_prefix"), - _create_tuples(media_player.MediaClass, "MEDIA_CLASS_") - + _create_tuples(media_player.MediaPlayerEntityFeature, "SUPPORT_") - + _create_tuples(media_player.MediaType, "MEDIA_TYPE_") - + _create_tuples(media_player.RepeatMode, "REPEAT_MODE_"), -) -@pytest.mark.parametrize( - "module", - [media_player.const], -) -def test_deprecated_constants_const( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, constant_prefix, "2025.10" - ) - - async def test_get_image_http( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator ) -> None: @@ -367,20 +298,10 @@ async def test_enqueue_alert_exclusive(hass: HomeAssistant) -> None: ) -@pytest.mark.parametrize( - "media_content_id", - [ - "a/b c/d+e%2Fg{}", - "a/b c/d+e%2D", - "a/b c/d+e%2E", - "2012-06%20Pool%20party%20%2F%20BBQ", - ], -) async def test_get_async_get_browse_image_quoting( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, - media_content_id: str, ) -> None: """Test get browse image using media_content_id with special characters. @@ -404,6 +325,7 @@ async def test_get_async_get_browse_image_quoting( "homeassistant.components.media_player.MediaPlayerEntity." "async_get_browse_image", ) as mock_browse_image: + media_content_id = "a/b c/d+e%2Fg{}" url = player.get_browse_image_url("album", media_content_id) await client.get(url) mock_browse_image.assert_called_with("album", media_content_id, None) diff --git a/tests/components/media_source/test_local_source.py b/tests/components/media_source/test_local_source.py index d3ae95736a5..4c7fbd06edc 100644 --- a/tests/components/media_source/test_local_source.py +++ b/tests/components/media_source/test_local_source.py @@ -1,6 +1,5 @@ """Test Local Media Source.""" -from collections.abc import AsyncGenerator from http import HTTPStatus import io from pathlib import Path @@ -8,11 +7,12 @@ from tempfile import TemporaryDirectory from unittest.mock import patch import pytest +from typing_extensions import AsyncGenerator from homeassistant.components import media_source, websocket_api from homeassistant.components.media_source import const +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockUser diff --git a/tests/components/melcloud/snapshots/test_diagnostics.ambr b/tests/components/melcloud/snapshots/test_diagnostics.ambr index e6a432de07e..7b0173c240e 100644 --- a/tests/components/melcloud/snapshots/test_diagnostics.ambr +++ b/tests/components/melcloud/snapshots/test_diagnostics.ambr @@ -7,8 +7,6 @@ 'data': dict({ }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'melcloud', 'entry_id': 'TEST_ENTRY_ID', 'minor_version': 1, diff --git a/tests/components/melcloud/test_config_flow.py b/tests/components/melcloud/test_config_flow.py index 3f6e42ac264..c1c6c10ac4c 100644 --- a/tests/components/melcloud/test_config_flow.py +++ b/tests/components/melcloud/test_config_flow.py @@ -9,6 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.melcloud.const import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -165,7 +166,15 @@ async def test_token_reauthentication( ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -203,7 +212,15 @@ async def test_form_errors_reauthentication( ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) with patch( "homeassistant.components.melcloud.async_setup_entry", @@ -253,7 +270,15 @@ async def test_client_errors_reauthentication( ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) with patch( "homeassistant.components.melcloud.async_setup_entry", @@ -303,7 +328,15 @@ async def test_reconfigure_flow( ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) assert result["type"] is FlowResultType.FORM @@ -362,7 +395,15 @@ async def test_form_errors_reconfigure( ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) with patch( "homeassistant.components.melcloud.async_setup_entry", diff --git a/tests/components/melcloud/test_diagnostics.py b/tests/components/melcloud/test_diagnostics.py index 32ec94a54d1..cbb35eadfd4 100644 --- a/tests/components/melcloud/test_diagnostics.py +++ b/tests/components/melcloud/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import patch from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.melcloud.const import DOMAIN from homeassistant.core import HomeAssistant @@ -37,4 +36,4 @@ async def test_get_config_entry_diagnostics( diagnostics = await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) - assert diagnostics == snapshot(exclude=props("created_at", "modified_at")) + assert diagnostics == snapshot diff --git a/tests/components/melnor/conftest.py b/tests/components/melnor/conftest.py index f30213c4efd..38bc1a62d51 100644 --- a/tests/components/melnor/conftest.py +++ b/tests/components/melnor/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations -from collections.abc import Generator from datetime import UTC, datetime, time, timedelta from unittest.mock import AsyncMock, _patch, patch from melnor_bluetooth.device import Device import pytest +from typing_extensions import Generator from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak from homeassistant.components.melnor.const import DOMAIN diff --git a/tests/components/melnor/test_sensor.py b/tests/components/melnor/test_sensor.py index a2ba23d9e61..d04494d44ad 100644 --- a/tests/components/melnor/test_sensor.py +++ b/tests/components/melnor/test_sensor.py @@ -2,8 +2,6 @@ from __future__ import annotations -from datetime import timedelta - from freezegun import freeze_time from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass @@ -53,7 +51,7 @@ async def test_minutes_remaining_sensor(hass: HomeAssistant) -> None: entry = mock_config_entry(hass) device = mock_melnor_device() - end_time = now + timedelta(minutes=10) + end_time = now + dt_util.dt.timedelta(minutes=10) # we control this mock @@ -78,7 +76,7 @@ async def test_minutes_remaining_sensor(hass: HomeAssistant) -> None: # Turn valve on device.zone1._is_watering = True - async_fire_time_changed(hass, now + timedelta(seconds=10)) + async_fire_time_changed(hass, now + dt_util.dt.timedelta(seconds=10)) await hass.async_block_till_done() # Valve is on, report 10 @@ -96,7 +94,7 @@ async def test_schedule_next_cycle_sensor(hass: HomeAssistant) -> None: entry = mock_config_entry(hass) device = mock_melnor_device() - next_cycle = now + timedelta(minutes=10) + next_cycle = now + dt_util.dt.timedelta(minutes=10) # we control this mock device.zone1.frequency._next_run_time = next_cycle @@ -120,7 +118,7 @@ async def test_schedule_next_cycle_sensor(hass: HomeAssistant) -> None: # Turn valve on device.zone1._schedule_enabled = True - async_fire_time_changed(hass, now + timedelta(seconds=10)) + async_fire_time_changed(hass, now + dt_util.dt.timedelta(seconds=10)) await hass.async_block_till_done() # Valve is on, report 10 diff --git a/tests/components/melnor/test_time.py b/tests/components/melnor/test_time.py index 50b51d31ff8..1d12c3b47f8 100644 --- a/tests/components/melnor/test_time.py +++ b/tests/components/melnor/test_time.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import time, timedelta +from datetime import time from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util @@ -46,7 +46,7 @@ async def test_schedule_start_time(hass: HomeAssistant) -> None: blocking=True, ) - async_fire_time_changed(hass, now + timedelta(seconds=10)) + async_fire_time_changed(hass, now + dt_util.dt.timedelta(seconds=10)) await hass.async_block_till_done() time_entity = hass.states.get("time.zone_1_schedule_start_time") diff --git a/tests/components/meraki/test_device_tracker.py b/tests/components/meraki/test_device_tracker.py index 139396a0689..c3126f7b76a 100644 --- a/tests/components/meraki/test_device_tracker.py +++ b/tests/components/meraki/test_device_tracker.py @@ -142,8 +142,12 @@ async def test_data_will_be_saved( req = await meraki_client.post(URL, data=json.dumps(data)) assert req.status == HTTPStatus.OK await hass.async_block_till_done() - state_name = hass.states.get("device_tracker.00_26_ab_b8_a9_a4").state + state_name = hass.states.get( + "{}.{}".format("device_tracker", "00_26_ab_b8_a9_a4") + ).state assert state_name == "home" - state_name = hass.states.get("device_tracker.00_26_ab_b8_a9_a5").state + state_name = hass.states.get( + "{}.{}".format("device_tracker", "00_26_ab_b8_a9_a5") + ).state assert state_name == "home" diff --git a/tests/components/met/conftest.py b/tests/components/met/conftest.py index 92b81d3d320..699c1c81795 100644 --- a/tests/components/met/conftest.py +++ b/tests/components/met/conftest.py @@ -17,9 +17,8 @@ def mock_weather(): "pressure": 100, "humidity": 50, "wind_speed": 10, - "wind_bearing": 90, + "wind_bearing": "NE", "dew_point": 12.1, - "uv_index": 1.1, } mock_data.get_forecast.return_value = {} yield mock_data diff --git a/tests/components/met/test_config_flow.py b/tests/components/met/test_config_flow.py index 1a2485615d7..c7f0311edef 100644 --- a/tests/components/met/test_config_flow.py +++ b/tests/components/met/test_config_flow.py @@ -8,9 +8,9 @@ import pytest from homeassistant import config_entries from homeassistant.components.met.const import DOMAIN, HOME_LOCATION_NAME +from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_ELEVATION, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from . import init_integration diff --git a/tests/components/met/test_init.py b/tests/components/met/test_init.py index 54f6930513b..b329e2ff01c 100644 --- a/tests/components/met/test_init.py +++ b/tests/components/met/test_init.py @@ -7,9 +7,9 @@ from homeassistant.components.met.const import ( DEFAULT_HOME_LONGITUDE, DOMAIN, ) +from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import device_registry as dr from . import init_integration diff --git a/tests/components/met/test_weather.py b/tests/components/met/test_weather.py index ac3904684e3..80820ef0186 100644 --- a/tests/components/met/test_weather.py +++ b/tests/components/met/test_weather.py @@ -2,22 +2,10 @@ from homeassistant import config_entries from homeassistant.components.met import DOMAIN -from homeassistant.components.weather import ( - ATTR_CONDITION_CLOUDY, - ATTR_WEATHER_DEW_POINT, - ATTR_WEATHER_HUMIDITY, - ATTR_WEATHER_PRESSURE, - ATTR_WEATHER_TEMPERATURE, - ATTR_WEATHER_UV_INDEX, - ATTR_WEATHER_WIND_BEARING, - ATTR_WEATHER_WIND_SPEED, - DOMAIN as WEATHER_DOMAIN, -) +from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from . import init_integration - async def test_new_config_entry( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_weather @@ -48,25 +36,6 @@ async def test_legacy_config_entry( assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 1 -async def test_weather(hass: HomeAssistant, mock_weather) -> None: - """Test states of the weather.""" - - await init_integration(hass) - assert len(hass.states.async_entity_ids("weather")) == 1 - entity_id = hass.states.async_entity_ids("weather")[0] - - state = hass.states.get(entity_id) - assert state - assert state.state == ATTR_CONDITION_CLOUDY - assert state.attributes[ATTR_WEATHER_TEMPERATURE] == 15 - assert state.attributes[ATTR_WEATHER_PRESSURE] == 100 - assert state.attributes[ATTR_WEATHER_HUMIDITY] == 50 - assert state.attributes[ATTR_WEATHER_WIND_SPEED] == 10 - assert state.attributes[ATTR_WEATHER_WIND_BEARING] == 90 - assert state.attributes[ATTR_WEATHER_DEW_POINT] == 12.1 - assert state.attributes[ATTR_WEATHER_UV_INDEX] == 1.1 - - async def test_tracking_home(hass: HomeAssistant, mock_weather) -> None: """Test we track home.""" await hass.config_entries.flow.async_init("met", context={"source": "onboarding"}) diff --git a/tests/components/met_eireann/snapshots/test_weather.ambr b/tests/components/met_eireann/snapshots/test_weather.ambr index de8b69de18a..90f36d09d25 100644 --- a/tests/components/met_eireann/snapshots/test_weather.ambr +++ b/tests/components/met_eireann/snapshots/test_weather.ambr @@ -1,4 +1,104 @@ # serializer version: 1 +# name: test_forecast_service + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }) +# --- +# name: test_forecast_service.1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }) +# --- +# name: test_forecast_service[forecast] + dict({ + 'weather.somewhere': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].1 + dict({ + 'weather.somewhere': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }) +# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.somewhere': dict({ diff --git a/tests/components/metoffice/snapshots/test_weather.ambr b/tests/components/metoffice/snapshots/test_weather.ambr index 0bbc0e06a0a..a6991a8631b 100644 --- a/tests/components/metoffice/snapshots/test_weather.ambr +++ b/tests/components/metoffice/snapshots/test_weather.ambr @@ -1,4 +1,658 @@ # serializer version: 1 +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 13.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-25T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 19.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T18:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 17.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 14.0, + 'wind_bearing': 'NW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T00:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 13.0, + 'wind_bearing': 'WSW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T03:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T09:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T15:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T18:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T00:00:00+00:00', + 'precipitation_probability': 11, + 'temperature': 9.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T03:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 8.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T06:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 8.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 4, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T18:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-27T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T00:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 8.0, + 'wind_bearing': 'NNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 7.0, + 'wind_bearing': 'W', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-28T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 6.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-28T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T15:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T18:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NNE', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T00:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'E', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-29T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 8.0, + 'wind_bearing': 'SSE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T06:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 8.0, + 'wind_bearing': 'SE', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T09:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 10.0, + 'wind_bearing': 'SE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 47, + 'temperature': 12.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'pouring', + 'datetime': '2020-04-29T15:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T18:00:00+00:00', + 'precipitation_probability': 39, + 'temperature': 12.0, + 'wind_bearing': 'SSE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T21:00:00+00:00', + 'precipitation_probability': 19, + 'temperature': 11.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].2 + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 13.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].3 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-25T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 19.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T18:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 17.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 14.0, + 'wind_bearing': 'NW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T00:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 13.0, + 'wind_bearing': 'WSW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T03:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T09:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T15:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T18:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T00:00:00+00:00', + 'precipitation_probability': 11, + 'temperature': 9.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T03:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 8.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T06:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 8.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 4, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T18:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-27T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T00:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 8.0, + 'wind_bearing': 'NNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 7.0, + 'wind_bearing': 'W', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-28T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 6.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-28T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T15:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T18:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NNE', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T00:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'E', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-29T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 8.0, + 'wind_bearing': 'SSE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T06:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 8.0, + 'wind_bearing': 'SE', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T09:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 10.0, + 'wind_bearing': 'SE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 47, + 'temperature': 12.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'pouring', + 'datetime': '2020-04-29T15:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T18:00:00+00:00', + 'precipitation_probability': 39, + 'temperature': 12.0, + 'wind_bearing': 'SSE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T21:00:00+00:00', + 'precipitation_probability': 19, + 'temperature': 11.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].4 + dict({ + 'forecast': list([ + ]), + }) +# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.met_office_wavertree_daily': dict({ diff --git a/tests/components/mfi/test_sensor.py b/tests/components/mfi/test_sensor.py index 37512ca78f8..49efdd5dc71 100644 --- a/tests/components/mfi/test_sensor.py +++ b/tests/components/mfi/test_sensor.py @@ -116,13 +116,13 @@ async def test_setup_adds_proper_devices(hass: HomeAssistant) -> None: @pytest.fixture(name="port") -def port_fixture() -> mock.MagicMock: +def port_fixture(): """Port fixture.""" return mock.MagicMock() @pytest.fixture(name="sensor") -def sensor_fixture(hass: HomeAssistant, port: mock.MagicMock) -> mfi.MfiSensor: +def sensor_fixture(hass, port): """Sensor fixture.""" sensor = mfi.MfiSensor(port, hass) sensor.hass = hass diff --git a/tests/components/microbees/test_config_flow.py b/tests/components/microbees/test_config_flow.py index f4e074d000d..d168dcd5017 100644 --- a/tests/components/microbees/test_config_flow.py +++ b/tests/components/microbees/test_config_flow.py @@ -6,7 +6,7 @@ from microBeesPy import MicroBeesException import pytest from homeassistant.components.microbees.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -144,7 +144,14 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, config_entry) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -198,7 +205,14 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, config_entry) microbees.return_value.getMyProfile.return_value.id = 12345 - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/microsoft/test_tts.py b/tests/components/microsoft/test_tts.py index e10ec589113..082def901c5 100644 --- a/tests/components/microsoft/test_tts.py +++ b/tests/components/microsoft/test_tts.py @@ -8,20 +8,32 @@ from pycsspeechtts import pycsspeechtts import pytest from homeassistant.components import tts -from homeassistant.components.media_player import ATTR_MEDIA_CONTENT_ID +from homeassistant.components.media_player import ( + ATTR_MEDIA_CONTENT_ID, + DOMAIN as DOMAIN_MP, + SERVICE_PLAY_MEDIA, +) from homeassistant.components.microsoft.tts import SUPPORTED_LANGUAGES +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import ServiceNotFound from homeassistant.setup import async_setup_component +from tests.common import async_mock_service from tests.components.tts.common import retrieve_media from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Mock media player calls.""" + return async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) @pytest.fixture(autouse=True) @@ -46,7 +58,7 @@ async def test_service_say( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test service call say.""" @@ -65,11 +77,9 @@ async def test_service_say( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) @@ -92,7 +102,7 @@ async def test_service_say_en_gb_config( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test service call say with en-gb code in the config.""" @@ -120,11 +130,9 @@ async def test_service_say_en_gb_config( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) @@ -146,7 +154,7 @@ async def test_service_say_en_gb_service( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test service call say with en-gb code in the service.""" @@ -169,11 +177,9 @@ async def test_service_say_en_gb_service( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) @@ -195,7 +201,7 @@ async def test_service_say_fa_ir_config( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test service call say with fa-ir code in the config.""" @@ -223,11 +229,9 @@ async def test_service_say_fa_ir_config( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) @@ -249,7 +253,7 @@ async def test_service_say_fa_ir_service( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test service call say with fa-ir code in the service.""" @@ -276,11 +280,9 @@ async def test_service_say_fa_ir_service( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.OK ) @@ -315,7 +317,9 @@ def test_supported_languages() -> None: assert len(SUPPORTED_LANGUAGES) > 100 -async def test_invalid_language(hass: HomeAssistant, mock_tts) -> None: +async def test_invalid_language( + hass: HomeAssistant, mock_tts, calls: list[ServiceCall] +) -> None: """Test setup component with invalid language.""" await async_setup_component( hass, @@ -335,6 +339,7 @@ async def test_invalid_language(hass: HomeAssistant, mock_tts) -> None: blocking=True, ) + assert len(calls) == 0 assert len(mock_tts.mock_calls) == 0 @@ -342,7 +347,7 @@ async def test_service_say_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test service call say with http error.""" mock_tts.return_value.speak.side_effect = pycsspeechtts.requests.HTTPError @@ -361,11 +366,9 @@ async def test_service_say_error( blocking=True, ) - assert len(service_calls) == 2 + assert len(calls) == 1 assert ( - await retrieve_media( - hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] - ) + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == HTTPStatus.NOT_FOUND ) diff --git a/tests/components/microsoft_face/test_init.py b/tests/components/microsoft_face/test_init.py index 0819dd82f21..63014a095c0 100644 --- a/tests/components/microsoft_face/test_init.py +++ b/tests/components/microsoft_face/test_init.py @@ -31,7 +31,7 @@ async def setup_homeassistant(hass: HomeAssistant): await async_setup_component(hass, "homeassistant", {}) -def create_group(hass: HomeAssistant, name: str) -> None: +def create_group(hass, name): """Create a new person group. This is a legacy helper method. Do not use it for new tests. @@ -40,7 +40,7 @@ def create_group(hass: HomeAssistant, name: str) -> None: hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_CREATE_GROUP, data)) -def delete_group(hass: HomeAssistant, name: str) -> None: +def delete_group(hass, name): """Delete a person group. This is a legacy helper method. Do not use it for new tests. @@ -49,7 +49,7 @@ def delete_group(hass: HomeAssistant, name: str) -> None: hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_DELETE_GROUP, data)) -def train_group(hass: HomeAssistant, group: str) -> None: +def train_group(hass, group): """Train a person group. This is a legacy helper method. Do not use it for new tests. @@ -58,7 +58,7 @@ def train_group(hass: HomeAssistant, group: str) -> None: hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_TRAIN_GROUP, data)) -def create_person(hass: HomeAssistant, group: str, name: str) -> None: +def create_person(hass, group, name): """Create a person in a group. This is a legacy helper method. Do not use it for new tests. @@ -69,7 +69,7 @@ def create_person(hass: HomeAssistant, group: str, name: str) -> None: ) -def delete_person(hass: HomeAssistant, group: str, name: str) -> None: +def delete_person(hass, group, name): """Delete a person in a group. This is a legacy helper method. Do not use it for new tests. @@ -80,9 +80,7 @@ def delete_person(hass: HomeAssistant, group: str, name: str) -> None: ) -def face_person( - hass: HomeAssistant, group: str, person: str, camera_entity: str -) -> None: +def face_person(hass, group, person, camera_entity): """Add a new face picture to a person. This is a legacy helper method. Do not use it for new tests. diff --git a/tests/components/mikrotik/test_config_flow.py b/tests/components/mikrotik/test_config_flow.py index f65c7f0dfc5..f34fde0c9a5 100644 --- a/tests/components/mikrotik/test_config_flow.py +++ b/tests/components/mikrotik/test_config_flow.py @@ -14,7 +14,6 @@ from homeassistant.components.mikrotik.const import ( ) from homeassistant.const import ( CONF_HOST, - CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, @@ -176,14 +175,18 @@ async def test_reauth_success(hass: HomeAssistant, api) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=DEMO_USER_INPUT, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == { - CONF_NAME: "Mock Title", - CONF_USERNAME: "username", - } + assert result["description_placeholders"] == {CONF_USERNAME: "username"} result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -204,7 +207,14 @@ async def test_reauth_failed(hass: HomeAssistant, auth_error) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=DEMO_USER_INPUT, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -230,7 +240,14 @@ async def test_reauth_failed_conn_error(hass: HomeAssistant, conn_error) -> None ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=DEMO_USER_INPUT, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/mjpeg/conftest.py b/tests/components/mjpeg/conftest.py index 12e0b4c0faf..00eaf946113 100644 --- a/tests/components/mjpeg/conftest.py +++ b/tests/components/mjpeg/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from requests_mock import Mocker +from typing_extensions import Generator from homeassistant.components.mjpeg.const import ( CONF_MJPEG_URL, diff --git a/tests/components/mobile_app/conftest.py b/tests/components/mobile_app/conftest.py index 53e90cb61ae..657b80a759a 100644 --- a/tests/components/mobile_app/conftest.py +++ b/tests/components/mobile_app/conftest.py @@ -1,7 +1,6 @@ """Tests for mobile_app component.""" from http import HTTPStatus -from typing import Any from aiohttp.test_utils import TestClient import pytest @@ -16,9 +15,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture -async def create_registrations( - hass: HomeAssistant, webhook_client: TestClient -) -> tuple[dict[str, Any], dict[str, Any]]: +async def create_registrations(hass, webhook_client): """Return two new registrations.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) @@ -40,7 +37,7 @@ async def create_registrations( @pytest.fixture -async def push_registration(hass: HomeAssistant, webhook_client: TestClient): +async def push_registration(hass, webhook_client): """Return registration with push notifications enabled.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) @@ -70,7 +67,7 @@ async def webhook_client( @pytest.fixture(autouse=True) -async def setup_ws(hass: HomeAssistant) -> None: +async def setup_ws(hass): """Configure the websocket_api component.""" assert await async_setup_component(hass, "repairs", {}) assert await async_setup_component(hass, "websocket_api", {}) diff --git a/tests/components/mobile_app/test_binary_sensor.py b/tests/components/mobile_app/test_binary_sensor.py index 9ffb61f92ab..acebd8796b7 100644 --- a/tests/components/mobile_app/test_binary_sensor.py +++ b/tests/components/mobile_app/test_binary_sensor.py @@ -1,9 +1,7 @@ """Entity tests for mobile_app.""" from http import HTTPStatus -from typing import Any -from aiohttp.test_utils import TestClient import pytest from homeassistant.const import STATE_UNKNOWN @@ -14,8 +12,8 @@ from homeassistant.helpers import device_registry as dr async def test_sensor( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + create_registrations, + webhook_client, ) -> None: """Test that sensors can be registered and updated.""" webhook_id = create_registrations[1]["webhook_id"] @@ -100,9 +98,7 @@ async def test_sensor( async def test_sensor_must_register( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that sensors must be registered before updating.""" webhook_id = create_registrations[1]["webhook_id"] @@ -126,8 +122,8 @@ async def test_sensor_must_register( async def test_sensor_id_no_dupes( hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + create_registrations, + webhook_client, caplog: pytest.LogCaptureFixture, ) -> None: """Test that a duplicate unique ID in registration updates the sensor.""" @@ -189,9 +185,7 @@ async def test_sensor_id_no_dupes( async def test_register_sensor_no_state( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that sensors can be registered, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -250,9 +244,7 @@ async def test_register_sensor_no_state( async def test_update_sensor_no_state( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that sensors can be updated, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] diff --git a/tests/components/mobile_app/test_device_tracker.py b/tests/components/mobile_app/test_device_tracker.py index 92a956ab629..e3e2ce3227a 100644 --- a/tests/components/mobile_app/test_device_tracker.py +++ b/tests/components/mobile_app/test_device_tracker.py @@ -1,21 +1,16 @@ """Test mobile app device tracker.""" from http import HTTPStatus -from typing import Any - -from aiohttp.test_utils import TestClient from homeassistant.core import HomeAssistant async def test_sending_location( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test sending a location via a webhook.""" resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "update_location", "data": { @@ -48,7 +43,7 @@ async def test_sending_location( assert state.attributes["vertical_accuracy"] == 80 resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "update_location", "data": { @@ -81,13 +76,11 @@ async def test_sending_location( async def test_restoring_location( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test sending a location via a webhook.""" resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "update_location", "data": { diff --git a/tests/components/mobile_app/test_init.py b/tests/components/mobile_app/test_init.py index a4edbea6ecf..15380a0d8d7 100644 --- a/tests/components/mobile_app/test_init.py +++ b/tests/components/mobile_app/test_init.py @@ -89,7 +89,6 @@ async def _test_create_cloud_hook( "homeassistant.components.cloud.async_active_subscription", return_value=async_active_subscription_return_value, ), - patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), patch("homeassistant.components.cloud.async_is_connected", return_value=True), patch( "homeassistant.components.cloud.async_get_or_create_cloudhook", @@ -188,75 +187,3 @@ async def test_create_cloud_hook_after_connection( ) await _test_create_cloud_hook(hass, hass_admin_user, {}, False, additional_steps) - - -@pytest.mark.parametrize( - ("cloud_logged_in", "should_cloudhook_exist"), - [(True, True), (False, False)], -) -async def test_delete_cloud_hook( - hass: HomeAssistant, - hass_admin_user: MockUser, - cloud_logged_in: bool, - should_cloudhook_exist: bool, -) -> None: - """Test deleting the cloud hook only when logged out of the cloud.""" - - config_entry = MockConfigEntry( - data={ - **REGISTER_CLEARTEXT, - CONF_WEBHOOK_ID: "test-webhook-id", - ATTR_DEVICE_NAME: "Test", - ATTR_DEVICE_ID: "Test", - CONF_USER_ID: hass_admin_user.id, - CONF_CLOUDHOOK_URL: "https://hook-url-already-exists", - }, - domain=DOMAIN, - title="Test", - ) - config_entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.cloud.async_is_logged_in", - return_value=cloud_logged_in, - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - assert (CONF_CLOUDHOOK_URL in config_entry.data) == should_cloudhook_exist - - -async def test_remove_entry_on_user_remove( - hass: HomeAssistant, - hass_admin_user: MockUser, -) -> None: - """Test removing related config entry, when a user gets removed from HA.""" - - config_entry = MockConfigEntry( - data={ - **REGISTER_CLEARTEXT, - CONF_WEBHOOK_ID: "test-webhook-id", - ATTR_DEVICE_NAME: "Test", - ATTR_DEVICE_ID: "Test", - CONF_USER_ID: hass_admin_user.id, - CONF_CLOUDHOOK_URL: "https://hook-url-already-exists", - }, - domain=DOMAIN, - title="Test", - ) - config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - - await hass.auth.async_remove_user(hass_admin_user) - await hass.async_block_till_done() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 0 diff --git a/tests/components/mobile_app/test_sensor.py b/tests/components/mobile_app/test_sensor.py index fb124797523..a7fb0ffc183 100644 --- a/tests/components/mobile_app/test_sensor.py +++ b/tests/components/mobile_app/test_sensor.py @@ -1,10 +1,8 @@ """Entity tests for mobile_app.""" from http import HTTPStatus -from typing import Any from unittest.mock import patch -from aiohttp.test_utils import TestClient import pytest from homeassistant.components.sensor import SensorDeviceClass @@ -16,11 +14,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.util.unit_system import ( - METRIC_SYSTEM, - US_CUSTOMARY_SYSTEM, - UnitSystem, -) +from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM @pytest.mark.parametrize( @@ -34,12 +28,12 @@ async def test_sensor( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, - unit_system: UnitSystem, - state_unit: UnitOfTemperature, - state1: str, - state2: str, + create_registrations, + webhook_client, + unit_system, + state_unit, + state1, + state2, ) -> None: """Test that sensors can be registered and updated.""" hass.config.units = unit_system @@ -155,13 +149,13 @@ async def test_sensor( ) async def test_sensor_migration( hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, - unique_id: str, - unit_system: UnitSystem, - state_unit: UnitOfTemperature, - state1: str, - state2: str, + create_registrations, + webhook_client, + unique_id, + unit_system, + state_unit, + state1, + state2, ) -> None: """Test migration to RestoreSensor.""" hass.config.units = unit_system @@ -249,9 +243,7 @@ async def test_sensor_migration( async def test_sensor_must_register( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that sensors must be registered before updating.""" webhook_id = create_registrations[1]["webhook_id"] @@ -273,8 +265,8 @@ async def test_sensor_must_register( async def test_sensor_id_no_dupes( hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + create_registrations, + webhook_client, caplog: pytest.LogCaptureFixture, ) -> None: """Test that a duplicate unique ID in registration updates the sensor.""" @@ -339,9 +331,7 @@ async def test_sensor_id_no_dupes( async def test_register_sensor_no_state( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that sensors can be registered, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -400,9 +390,7 @@ async def test_register_sensor_no_state( async def test_update_sensor_no_state( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that sensors can be updated, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -476,11 +464,11 @@ async def test_update_sensor_no_state( ) async def test_sensor_datetime( hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, - device_class: SensorDeviceClass, - native_value: str, - state_value: str, + create_registrations, + webhook_client, + device_class, + native_value, + state_value, ) -> None: """Test that sensors can be registered and updated.""" webhook_id = create_registrations[1]["webhook_id"] @@ -517,8 +505,8 @@ async def test_sensor_datetime( async def test_default_disabling_entity( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + create_registrations, + webhook_client, ) -> None: """Test that sensors can be disabled by default upon registration.""" webhook_id = create_registrations[1]["webhook_id"] @@ -555,8 +543,8 @@ async def test_default_disabling_entity( async def test_updating_disabled_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + create_registrations, + webhook_client, ) -> None: """Test that sensors return error if disabled in instance.""" webhook_id = create_registrations[1]["webhook_id"] @@ -622,78 +610,3 @@ async def test_updating_disabled_sensor( json = await update_resp.json() assert json["battery_state"]["success"] is True assert json["battery_state"]["is_disabled"] is True - - -async def test_recreate_correct_from_entity_registry( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, -) -> None: - """Test that sensors can be re-created from entity registry.""" - webhook_id = create_registrations[1]["webhook_id"] - webhook_url = f"/api/webhook/{webhook_id}" - - reg_resp = await webhook_client.post( - webhook_url, - json={ - "type": "register_sensor", - "data": { - "device_class": "battery", - "icon": "mdi:battery", - "name": "Battery State", - "state": 100, - "type": "sensor", - "unique_id": "battery_state", - "unit_of_measurement": PERCENTAGE, - "state_class": "measurement", - }, - }, - ) - - assert reg_resp.status == HTTPStatus.CREATED - - update_resp = await webhook_client.post( - webhook_url, - json={ - "type": "update_sensor_states", - "data": [ - { - "icon": "mdi:battery-unknown", - "state": 123, - "type": "sensor", - "unique_id": "battery_state", - }, - ], - }, - ) - - assert update_resp.status == HTTPStatus.OK - - entity = hass.states.get("sensor.test_1_battery_state") - - assert entity is not None - entity_entry = entity_registry.async_get("sensor.test_1_battery_state") - assert entity_entry is not None - - assert entity_entry.capabilities == { - "state_class": "measurement", - } - - entry = hass.config_entries.async_entries("mobile_app")[1] - - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - assert hass.states.get("sensor.test_1_battery_state").state == STATE_UNAVAILABLE - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - entity_entry = entity_registry.async_get("sensor.test_1_battery_state") - assert entity_entry is not None - assert hass.states.get("sensor.test_1_battery_state") is not None - - assert entity_entry.capabilities == { - "state_class": "measurement", - } diff --git a/tests/components/mobile_app/test_timers.py b/tests/components/mobile_app/test_timers.py deleted file mode 100644 index 9f7d4cebc58..00000000000 --- a/tests/components/mobile_app/test_timers.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Test mobile app timers.""" - -from unittest.mock import patch - -import pytest - -from homeassistant.components.mobile_app import DATA_DEVICES, DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import intent as intent_helper - - -@pytest.mark.parametrize( - ("intent_args", "message"), - [ - ( - {}, - "0:02:00 timer finished", - ), - ( - {"name": {"value": "pizza"}}, - "pizza finished", - ), - ], -) -async def test_timer_events( - hass: HomeAssistant, push_registration, intent_args: dict, message: str -) -> None: - """Test for timer events.""" - webhook_id = push_registration["webhook_id"] - device_id = hass.data[DOMAIN][DATA_DEVICES][webhook_id].id - - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_START_TIMER, - { - "minutes": {"value": 2}, - } - | intent_args, - device_id=device_id, - ) - - with patch( - "homeassistant.components.mobile_app.notify.MobileAppNotificationService.async_send_message" - ) as mock_send_message: - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_DECREASE_TIMER, - { - "minutes": {"value": 2}, - }, - device_id=device_id, - ) - await hass.async_block_till_done() - - assert mock_send_message.mock_calls[0][2] == { - "target": [webhook_id], - "message": message, - "data": { - "channel": "Timers", - "group": "timers", - "importance": "high", - "ttl": 0, - "priority": "high", - "push": { - "interruption-level": "time-sensitive", - }, - }, - } diff --git a/tests/components/mobile_app/test_webhook.py b/tests/components/mobile_app/test_webhook.py index dda5f369ad5..ca5c9936409 100644 --- a/tests/components/mobile_app/test_webhook.py +++ b/tests/components/mobile_app/test_webhook.py @@ -1,19 +1,16 @@ """Webhook tests for mobile_app.""" from binascii import unhexlify -from collections.abc import Callable from http import HTTPStatus import json -from typing import Any from unittest.mock import ANY, patch -from aiohttp.test_utils import TestClient from nacl.encoding import Base64Encoder from nacl.secret import SecretBox import pytest from homeassistant.components.camera import CameraEntityFeature -from homeassistant.components.mobile_app.const import CONF_SECRET, DATA_DEVICES, DOMAIN +from homeassistant.components.mobile_app.const import CONF_SECRET, DOMAIN from homeassistant.components.tag import EVENT_TAG_SCANNED from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN from homeassistant.const import ( @@ -34,7 +31,7 @@ from tests.components.conversation import MockAgent @pytest.fixture -async def homeassistant(hass: HomeAssistant) -> None: +async def homeassistant(hass): """Load the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) @@ -96,12 +93,11 @@ def decrypt_payload_legacy(secret_key, encrypted_data): async def test_webhook_handle_render_template( - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + create_registrations, webhook_client ) -> None: """Test that we render templates properly.""" resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "render_template", "data": { @@ -125,15 +121,13 @@ async def test_webhook_handle_render_template( async def test_webhook_handle_call_services( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that we call services properly.""" calls = async_mock_service(hass, "test", "mobile_app") resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json=CALL_SERVICE, ) @@ -143,9 +137,7 @@ async def test_webhook_handle_call_services( async def test_webhook_handle_fire_event( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that we can fire events.""" events = [] @@ -158,7 +150,7 @@ async def test_webhook_handle_fire_event( hass.bus.async_listen("test_event", store_event) resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", json=FIRE_EVENT + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json=FIRE_EVENT ) assert resp.status == HTTPStatus.OK @@ -169,7 +161,7 @@ async def test_webhook_handle_fire_event( assert events[0].data["hello"] == "yo world" -async def test_webhook_update_registration(webhook_client: TestClient) -> None: +async def test_webhook_update_registration(webhook_client) -> None: """Test that a we can update an existing registration via webhook.""" register_resp = await webhook_client.post( "/api/mobile_app/registrations", json=REGISTER_CLEARTEXT @@ -194,9 +186,7 @@ async def test_webhook_update_registration(webhook_client: TestClient) -> None: async def test_webhook_handle_get_zones( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that we can get zones properly.""" # Zone is already loaded as part of the fixture, @@ -224,7 +214,7 @@ async def test_webhook_handle_get_zones( await hass.services.async_call(ZONE_DOMAIN, "reload", blocking=True) resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={"type": "get_zones"}, ) @@ -248,14 +238,11 @@ async def test_webhook_handle_get_zones( async def test_webhook_handle_get_config( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that we can get config properly.""" webhook_id = create_registrations[1]["webhook_id"] webhook_url = f"/api/webhook/{webhook_id}" - device: dr.DeviceEntry = hass.data[DOMAIN][DATA_DEVICES][webhook_id] # Create two entities for sensor in ( @@ -293,7 +280,6 @@ async def test_webhook_handle_get_config( "latitude": hass_config["latitude"], "longitude": hass_config["longitude"], "elevation": hass_config["elevation"], - "hass_device_id": device.id, "unit_system": hass_config["unit_system"], "location_name": hass_config["location_name"], "time_zone": hass_config["time_zone"], @@ -311,13 +297,11 @@ async def test_webhook_handle_get_config( async def test_webhook_returns_error_incorrect_json( - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, - caplog: pytest.LogCaptureFixture, + webhook_client, create_registrations, caplog: pytest.LogCaptureFixture ) -> None: """Test that an error is returned when JSON is invalid.""" resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", data="not json" + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), data="not json" ) assert resp.status == HTTPStatus.BAD_REQUEST @@ -337,11 +321,7 @@ async def test_webhook_returns_error_incorrect_json( ], ) async def test_webhook_handle_decryption( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, - msg: dict[str, Any], - generate_response: Callable[[HomeAssistant], dict[str, Any]], + hass: HomeAssistant, webhook_client, create_registrations, msg, generate_response ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -350,7 +330,7 @@ async def test_webhook_handle_decryption( container = {"type": msg["type"], "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -364,8 +344,7 @@ async def test_webhook_handle_decryption( async def test_webhook_handle_decryption_legacy( - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + webhook_client, create_registrations ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -374,7 +353,7 @@ async def test_webhook_handle_decryption_legacy( container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -388,9 +367,7 @@ async def test_webhook_handle_decryption_legacy( async def test_webhook_handle_decryption_fail( - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, - caplog: pytest.LogCaptureFixture, + webhook_client, create_registrations, caplog: pytest.LogCaptureFixture ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -399,7 +376,7 @@ async def test_webhook_handle_decryption_fail( data = encrypt_payload(key, RENDER_TEMPLATE["data"]) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -412,7 +389,7 @@ async def test_webhook_handle_decryption_fail( data = encrypt_payload(key, "{not_valid", encode_json=False) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -424,7 +401,7 @@ async def test_webhook_handle_decryption_fail( data = encrypt_payload(key[::-1], RENDER_TEMPLATE["data"]) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -433,9 +410,7 @@ async def test_webhook_handle_decryption_fail( async def test_webhook_handle_decryption_legacy_fail( - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, - caplog: pytest.LogCaptureFixture, + webhook_client, create_registrations, caplog: pytest.LogCaptureFixture ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -444,7 +419,7 @@ async def test_webhook_handle_decryption_legacy_fail( data = encrypt_payload_legacy(key, RENDER_TEMPLATE["data"]) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -457,7 +432,7 @@ async def test_webhook_handle_decryption_legacy_fail( data = encrypt_payload_legacy(key, "{not_valid", encode_json=False) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -469,7 +444,7 @@ async def test_webhook_handle_decryption_legacy_fail( data = encrypt_payload_legacy(key[::-1], RENDER_TEMPLATE["data"]) container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -478,8 +453,7 @@ async def test_webhook_handle_decryption_legacy_fail( async def test_webhook_handle_decryption_legacy_upgrade( - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + webhook_client, create_registrations ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -490,7 +464,7 @@ async def test_webhook_handle_decryption_legacy_upgrade( container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -508,7 +482,7 @@ async def test_webhook_handle_decryption_legacy_upgrade( container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -526,7 +500,7 @@ async def test_webhook_handle_decryption_legacy_upgrade( container = {"type": "render_template", "encrypted": True, "encrypted_data": data} resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", json=container + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container ) assert resp.status == HTTPStatus.OK @@ -534,12 +508,11 @@ async def test_webhook_handle_decryption_legacy_upgrade( async def test_webhook_requires_encryption( - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + webhook_client, create_registrations ) -> None: """Test that encrypted registrations only accept encrypted data.""" resp = await webhook_client.post( - f"/api/webhook/{create_registrations[0]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=RENDER_TEMPLATE, ) @@ -552,15 +525,13 @@ async def test_webhook_requires_encryption( async def test_webhook_update_location_without_locations( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, webhook_client, create_registrations ) -> None: """Test that location can be updated.""" # start off with a location set by name resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "update_location", "data": {"location_name": STATE_HOME}, @@ -575,7 +546,7 @@ async def test_webhook_update_location_without_locations( # set location to an 'unknown' state resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "update_location", "data": {"altitude": 123}, @@ -591,13 +562,11 @@ async def test_webhook_update_location_without_locations( async def test_webhook_update_location_with_gps( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, webhook_client, create_registrations ) -> None: """Test that location can be updated.""" resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "update_location", "data": {"gps": [1, 2], "gps_accuracy": 10, "altitude": -10}, @@ -615,13 +584,11 @@ async def test_webhook_update_location_with_gps( async def test_webhook_update_location_with_gps_without_accuracy( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, webhook_client, create_registrations ) -> None: """Test that location can be updated.""" resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "update_location", "data": {"gps": [1, 2]}, @@ -635,9 +602,7 @@ async def test_webhook_update_location_with_gps_without_accuracy( async def test_webhook_update_location_with_location_name( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, webhook_client, create_registrations ) -> None: """Test that location can be updated.""" @@ -659,7 +624,7 @@ async def test_webhook_update_location_with_location_name( await hass.services.async_call(ZONE_DOMAIN, "reload", blocking=True) resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "update_location", "data": {"location_name": "zone_name"}, @@ -672,7 +637,7 @@ async def test_webhook_update_location_with_location_name( assert state.state == "zone_name" resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "update_location", "data": {"location_name": STATE_HOME}, @@ -685,7 +650,7 @@ async def test_webhook_update_location_with_location_name( assert state.state == STATE_HOME resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "update_location", "data": {"location_name": STATE_NOT_HOME}, @@ -699,9 +664,7 @@ async def test_webhook_update_location_with_location_name( async def test_webhook_enable_encryption( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, webhook_client, create_registrations ) -> None: """Test that encryption can be added to a reg initially created without.""" webhook_id = create_registrations[1]["webhook_id"] @@ -752,9 +715,7 @@ async def test_webhook_enable_encryption( async def test_webhook_camera_stream_non_existent( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test fetching camera stream URLs for a non-existent camera.""" webhook_id = create_registrations[1]["webhook_id"] @@ -773,9 +734,7 @@ async def test_webhook_camera_stream_non_existent( async def test_webhook_camera_stream_non_hls( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test fetching camera stream URLs for a non-HLS/stream-supporting camera.""" hass.states.async_set("camera.non_stream_camera", "idle", {"supported_features": 0}) @@ -800,9 +759,7 @@ async def test_webhook_camera_stream_non_hls( async def test_webhook_camera_stream_stream_available( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test fetching camera stream URLs for an HLS/stream-supporting camera.""" hass.states.async_set( @@ -832,9 +789,7 @@ async def test_webhook_camera_stream_stream_available( async def test_webhook_camera_stream_stream_available_but_errors( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test fetching camera stream URLs for an HLS/stream-supporting camera but that streaming errors.""" hass.states.async_set( @@ -866,8 +821,8 @@ async def test_webhook_camera_stream_stream_available_but_errors( async def test_webhook_handle_scan_tag( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + create_registrations, + webhook_client, ) -> None: """Test that we can scan tags.""" device = device_registry.async_get_device(identifiers={(DOMAIN, "mock-device-id")}) @@ -876,7 +831,7 @@ async def test_webhook_handle_scan_tag( events = async_capture_events(hass, EVENT_TAG_SCANNED) resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={"type": "scan_tag", "data": {"tag_id": "mock-tag-id"}}, ) @@ -890,9 +845,7 @@ async def test_webhook_handle_scan_tag( async def test_register_sensor_limits_state_class( - hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + hass: HomeAssistant, create_registrations, webhook_client ) -> None: """Test that we limit state classes to sensors only.""" webhook_id = create_registrations[1]["webhook_id"] @@ -935,8 +888,8 @@ async def test_register_sensor_limits_state_class( async def test_reregister_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + create_registrations, + webhook_client, ) -> None: """Test that we can add more info in re-registration.""" webhook_id = create_registrations[1]["webhook_id"] @@ -1037,11 +990,11 @@ async def test_reregister_sensor( assert entry.original_icon is None -@pytest.mark.usefixtures("homeassistant") async def test_webhook_handle_conversation_process( hass: HomeAssistant, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + homeassistant, + create_registrations, + webhook_client, mock_conversation_agent: MockAgent, ) -> None: """Test that we can converse.""" @@ -1052,7 +1005,7 @@ async def test_webhook_handle_conversation_process( return_value=mock_conversation_agent, ): resp = await webhook_client.post( - f"/api/webhook/{create_registrations[1]['webhook_id']}", + "/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json={ "type": "conversation_process", "data": { @@ -1087,8 +1040,9 @@ async def test_webhook_handle_conversation_process( async def test_sending_sensor_state( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations: tuple[dict[str, Any], dict[str, Any]], - webhook_client: TestClient, + create_registrations, + webhook_client, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that we can register and send sensor state as number and None.""" webhook_id = create_registrations[1]["webhook_id"] diff --git a/tests/components/mochad/test_light.py b/tests/components/mochad/test_light.py index 49beebbaec6..872bd3a9d61 100644 --- a/tests/components/mochad/test_light.py +++ b/tests/components/mochad/test_light.py @@ -18,7 +18,7 @@ def pymochad_mock(): @pytest.fixture -def light_mock(hass: HomeAssistant, brightness: int) -> mochad.MochadLight: +def light_mock(hass, brightness): """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} diff --git a/tests/components/mochad/test_switch.py b/tests/components/mochad/test_switch.py index 9fea3b5c14c..750dd48296e 100644 --- a/tests/components/mochad/test_switch.py +++ b/tests/components/mochad/test_switch.py @@ -21,7 +21,7 @@ def pymochad_mock(): @pytest.fixture -def switch_mock(hass: HomeAssistant) -> mochad.MochadSwitch: +def switch_mock(hass): """Mock switch.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_switch"} diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index cdea046ceea..067fb2d123d 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -37,7 +37,7 @@ TEST_PORT_SERIAL = "usb01" class ReadResult: """Storage class for register read results.""" - def __init__(self, register_words) -> None: + def __init__(self, register_words): """Init.""" self.registers = register_words self.bits = register_words @@ -57,11 +57,11 @@ def check_config_loaded_fixture(): @pytest.fixture(name="register_words") def register_words_fixture(): """Set default for register_words.""" - return [0x00] + return [0x00, 0x00] @pytest.fixture(name="config_addon") -def config_addon_fixture() -> dict[str, Any] | None: +def config_addon_fixture(): """Add extra configuration items.""" return None @@ -192,9 +192,7 @@ async def mock_test_state_fixture( @pytest.fixture(name="mock_modbus_ha") -async def mock_modbus_ha_fixture( - hass: HomeAssistant, mock_modbus: mock.AsyncMock -) -> mock.AsyncMock: +async def mock_modbus_ha_fixture(hass, mock_modbus): """Load homeassistant to allow service calls.""" assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() diff --git a/tests/components/modbus/test_binary_sensor.py b/tests/components/modbus/test_binary_sensor.py index 24293377174..6aae0e7feae 100644 --- a/tests/components/modbus/test_binary_sensor.py +++ b/tests/components/modbus/test_binary_sensor.py @@ -3,7 +3,6 @@ import pytest from homeassistant.components.binary_sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_DISCRETE, @@ -16,12 +15,10 @@ from homeassistant.components.modbus.const import ( MODBUS_DOMAIN, ) from homeassistant.const import ( - ATTR_ENTITY_ID, CONF_ADDRESS, CONF_BINARY_SENSORS, CONF_DEVICE_CLASS, CONF_NAME, - CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, CONF_UNIQUE_ID, @@ -29,7 +26,7 @@ from homeassistant.const import ( STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State +from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -215,20 +212,14 @@ async def test_service_binary_sensor_update( """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -437,7 +428,7 @@ async def test_no_discovery_info_binary_sensor( assert await async_setup_component( hass, SENSOR_DOMAIN, - {SENSOR_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, + {SENSOR_DOMAIN: {"platform": MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert SENSOR_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_climate.py b/tests/components/modbus/test_climate.py index d34846639b5..a52285b22d7 100644 --- a/tests/components/modbus/test_climate.py +++ b/tests/components/modbus/test_climate.py @@ -20,10 +20,6 @@ from homeassistant.components.climate import ( FAN_OFF, FAN_ON, FAN_TOP, - SERVICE_SET_FAN_MODE, - SERVICE_SET_HVAC_MODE, - SERVICE_SET_SWING_MODE, - SERVICE_SET_TEMPERATURE, SWING_BOTH, SWING_HORIZONTAL, SWING_OFF, @@ -31,7 +27,6 @@ from homeassistant.components.climate import ( SWING_VERTICAL, HVACMode, ) -from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CONF_CLIMATES, CONF_DATA_TYPE, @@ -71,17 +66,15 @@ from homeassistant.components.modbus.const import ( DataType, ) from homeassistant.const import ( - ATTR_ENTITY_ID, ATTR_TEMPERATURE, CONF_ADDRESS, CONF_NAME, - CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State +from homeassistant.core import HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -159,13 +152,13 @@ ENTITY_ID = f"{CLIMATE_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") CONF_HVAC_MODE_REGISTER: { CONF_ADDRESS: 11, CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_OFF: 0, - CONF_HVAC_MODE_HEAT: 1, - CONF_HVAC_MODE_COOL: 2, - CONF_HVAC_MODE_HEAT_COOL: 3, - CONF_HVAC_MODE_DRY: 4, - CONF_HVAC_MODE_FAN_ONLY: 5, - CONF_HVAC_MODE_AUTO: 6, + "state_off": 0, + "state_heat": 1, + "state_cool": 2, + "state_heat_cool": 3, + "state_dry": 4, + "state_fan_only": 5, + "state_auto": 6, }, }, } @@ -183,13 +176,13 @@ ENTITY_ID = f"{CLIMATE_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") CONF_ADDRESS: 11, CONF_WRITE_REGISTERS: True, CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_OFF: 0, - CONF_HVAC_MODE_HEAT: 1, - CONF_HVAC_MODE_COOL: 2, - CONF_HVAC_MODE_HEAT_COOL: 3, - CONF_HVAC_MODE_DRY: 4, - CONF_HVAC_MODE_FAN_ONLY: 5, - CONF_HVAC_MODE_AUTO: 6, + "state_off": 0, + "state_heat": 1, + "state_cool": 2, + "state_heat_cool": 3, + "state_dry": 4, + "state_fan_only": 5, + "state_auto": 6, }, }, } @@ -508,10 +501,7 @@ async def test_service_climate_update( """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == result @@ -626,10 +616,7 @@ async def test_service_climate_fan_update( """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).attributes[ATTR_FAN_MODE] == result @@ -769,10 +756,7 @@ async def test_service_climate_swing_update( """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).attributes[ATTR_SWING_MODE] == result @@ -782,7 +766,7 @@ async def test_service_climate_swing_update( ("temperature", "result", "do_config"), [ ( - 31, + 35, [0x00], { CONF_CLIMATES: [ @@ -797,7 +781,7 @@ async def test_service_climate_swing_update( }, ), ( - 32, + 36, [0x00, 0x00], { CONF_CLIMATES: [ @@ -812,7 +796,7 @@ async def test_service_climate_swing_update( }, ), ( - 33.5, + 37.5, [0x00, 0x00], { CONF_CLIMATES: [ @@ -827,7 +811,7 @@ async def test_service_climate_swing_update( }, ), ( - "34", + "39", [0x00, 0x00, 0x00, 0x00], { CONF_CLIMATES: [ @@ -866,9 +850,9 @@ async def test_service_climate_set_temperature( mock_modbus_ha.read_holding_registers.return_value = ReadResult(result) await hass.services.async_call( CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, + "set_temperature", { - ATTR_ENTITY_ID: ENTITY_ID, + "entity_id": ENTITY_ID, ATTR_TEMPERATURE: temperature, }, blocking=True, @@ -977,9 +961,9 @@ async def test_service_set_hvac_mode( await hass.services.async_call( CLIMATE_DOMAIN, - SERVICE_SET_HVAC_MODE, + "set_hvac_mode", { - ATTR_ENTITY_ID: ENTITY_ID, + "entity_id": ENTITY_ID, ATTR_HVAC_MODE: hvac_mode, }, blocking=True, @@ -1040,9 +1024,9 @@ async def test_service_set_fan_mode( mock_modbus_ha.read_holding_registers.return_value = ReadResult(result) await hass.services.async_call( CLIMATE_DOMAIN, - SERVICE_SET_FAN_MODE, + "set_fan_mode", { - ATTR_ENTITY_ID: ENTITY_ID, + "entity_id": ENTITY_ID, ATTR_FAN_MODE: fan_mode, }, blocking=True, @@ -1103,9 +1087,9 @@ async def test_service_set_swing_mode( mock_modbus_ha.read_holding_registers.return_value = ReadResult(result) await hass.services.async_call( CLIMATE_DOMAIN, - SERVICE_SET_SWING_MODE, + "set_swing_mode", { - ATTR_ENTITY_ID: ENTITY_ID, + "entity_id": ENTITY_ID, ATTR_SWING_MODE: swing_mode, }, blocking=True, @@ -1190,7 +1174,7 @@ async def test_no_discovery_info_climate( assert await async_setup_component( hass, CLIMATE_DOMAIN, - {CLIMATE_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, + {CLIMATE_DOMAIN: {"platform": MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert CLIMATE_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_cover.py b/tests/components/modbus/test_cover.py index ae709f483e1..0860b3136ba 100644 --- a/tests/components/modbus/test_cover.py +++ b/tests/components/modbus/test_cover.py @@ -3,8 +3,7 @@ from pymodbus.exceptions import ModbusException import pytest -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, CoverState -from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_REGISTER_HOLDING, @@ -19,18 +18,18 @@ from homeassistant.components.modbus.const import ( MODBUS_DOMAIN, ) from homeassistant.const import ( - ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COVERS, CONF_NAME, - CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, - SERVICE_CLOSE_COVER, - SERVICE_OPEN_COVER, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, STATE_UNAVAILABLE, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State +from homeassistant.core import HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -100,23 +99,23 @@ async def test_config_cover(hass: HomeAssistant, mock_modbus) -> None: [ ( [0x00], - CoverState.CLOSED, + STATE_CLOSED, ), ( [0x80], - CoverState.CLOSED, + STATE_CLOSED, ), ( [0xFE], - CoverState.CLOSED, + STATE_CLOSED, ), ( [0xFF], - CoverState.OPEN, + STATE_OPEN, ), ( [0x01], - CoverState.OPEN, + STATE_OPEN, ), ], ) @@ -144,23 +143,23 @@ async def test_coil_cover(hass: HomeAssistant, expected, mock_do_cycle) -> None: [ ( [0x00], - CoverState.CLOSED, + STATE_CLOSED, ), ( [0x80], - CoverState.OPEN, + STATE_OPEN, ), ( [0xFE], - CoverState.OPEN, + STATE_OPEN, ), ( [0xFF], - CoverState.OPEN, + STATE_OPEN, ), ( [0x01], - CoverState.OPEN, + STATE_OPEN, ), ], ) @@ -186,29 +185,23 @@ async def test_register_cover(hass: HomeAssistant, expected, mock_do_cycle) -> N async def test_service_cover_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - "update_entity", - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) - assert hass.states.get(ENTITY_ID).state == CoverState.CLOSED + assert hass.states.get(ENTITY_ID).state == STATE_CLOSED mock_modbus_ha.read_holding_registers.return_value = ReadResult([0x01]) await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) - assert hass.states.get(ENTITY_ID).state == CoverState.OPEN + assert hass.states.get(ENTITY_ID).state == STATE_OPEN @pytest.mark.parametrize( "mock_test_state", [ - (State(ENTITY_ID, CoverState.CLOSED),), - (State(ENTITY_ID, CoverState.CLOSING),), - (State(ENTITY_ID, CoverState.OPENING),), - (State(ENTITY_ID, CoverState.OPEN),), + (State(ENTITY_ID, STATE_CLOSED),), + (State(ENTITY_ID, STATE_CLOSING),), + (State(ENTITY_ID, STATE_OPENING),), + (State(ENTITY_ID, STATE_OPEN),), ], indirect=True, ) @@ -267,27 +260,27 @@ async def test_service_cover_move(hass: HomeAssistant, mock_modbus_ha) -> None: mock_modbus_ha.read_holding_registers.return_value = ReadResult([0x01]) await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True + "cover", "open_cover", {"entity_id": ENTITY_ID}, blocking=True ) - assert hass.states.get(ENTITY_ID).state == CoverState.OPEN + assert hass.states.get(ENTITY_ID).state == STATE_OPEN mock_modbus_ha.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True + "cover", "close_cover", {"entity_id": ENTITY_ID}, blocking=True ) - assert hass.states.get(ENTITY_ID).state == CoverState.CLOSED + assert hass.states.get(ENTITY_ID).state == STATE_CLOSED await mock_modbus_ha.reset() mock_modbus_ha.read_holding_registers.side_effect = ModbusException("fail write_") await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True + "cover", "close_cover", {"entity_id": ENTITY_ID}, blocking=True ) assert mock_modbus_ha.read_holding_registers.called assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE mock_modbus_ha.read_coils.side_effect = ModbusException("fail write_") await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID2}, blocking=True + "cover", "close_cover", {"entity_id": ENTITY_ID2}, blocking=True ) assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE @@ -300,7 +293,7 @@ async def test_no_discovery_info_cover( assert await async_setup_component( hass, COVER_DOMAIN, - {COVER_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, + {COVER_DOMAIN: {"platform": MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert COVER_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_fan.py b/tests/components/modbus/test_fan.py index 2afc6314048..d52b9dc309a 100644 --- a/tests/components/modbus/test_fan.py +++ b/tests/components/modbus/test_fan.py @@ -4,7 +4,6 @@ from pymodbus.exceptions import ModbusException import pytest from homeassistant.components.fan import DOMAIN as FAN_DOMAIN -from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_DISCRETE, @@ -20,21 +19,17 @@ from homeassistant.components.modbus.const import ( MODBUS_DOMAIN, ) from homeassistant.const import ( - ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_NAME, - CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State +from homeassistant.core import HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -274,12 +269,12 @@ async def test_fan_service_turn( assert hass.states.get(ENTITY_ID).state == STATE_OFF await hass.services.async_call( - FAN_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} + "fan", "turn_on", service_data={"entity_id": ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON await hass.services.async_call( - FAN_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} + "fan", "turn_off", service_data={"entity_id": ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -287,26 +282,26 @@ async def test_fan_service_turn( mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) assert hass.states.get(ENTITY_ID2).state == STATE_OFF await hass.services.async_call( - FAN_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} + "fan", "turn_on", service_data={"entity_id": ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_ON mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - FAN_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID2} + "fan", "turn_off", service_data={"entity_id": ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_OFF mock_modbus.write_register.side_effect = ModbusException("fail write_") await hass.services.async_call( - FAN_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} + "fan", "turn_on", service_data={"entity_id": ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE mock_modbus.write_coil.side_effect = ModbusException("fail write_") await hass.services.async_call( - FAN_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} + "fan", "turn_off", service_data={"entity_id": ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE @@ -330,18 +325,12 @@ async def test_fan_service_turn( async def test_service_fan_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -354,7 +343,7 @@ async def test_no_discovery_info_fan( assert await async_setup_component( hass, FAN_DOMAIN, - {FAN_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, + {FAN_DOMAIN: {"platform": MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert FAN_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index 3b8a76f5606..920003ad0c9 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -48,25 +48,37 @@ from homeassistant.components.modbus.const import ( CONF_FAN_MODE_HIGH, CONF_FAN_MODE_OFF, CONF_FAN_MODE_ON, + CONF_FAN_MODE_REGISTER, CONF_FAN_MODE_VALUES, + CONF_HVAC_MODE_COOL, + CONF_HVAC_MODE_DRY, + CONF_HVAC_MODE_HEAT, + CONF_HVAC_MODE_HEAT_COOL, + CONF_HVAC_MODE_REGISTER, + CONF_HVAC_MODE_VALUES, + CONF_HVAC_ONOFF_REGISTER, CONF_INPUT_TYPE, CONF_MSG_WAIT, CONF_PARITY, + CONF_RETRIES, CONF_SLAVE_COUNT, CONF_STOPBITS, CONF_SWAP, CONF_SWAP_BYTE, CONF_SWAP_WORD, CONF_SWAP_WORD_BYTE, + CONF_SWING_MODE_REGISTER, CONF_SWING_MODE_SWING_BOTH, CONF_SWING_MODE_SWING_OFF, CONF_SWING_MODE_SWING_ON, CONF_SWING_MODE_VALUES, + CONF_TARGET_TEMP, CONF_VIRTUAL_COUNT, DEFAULT_SCAN_INTERVAL, MODBUS_DOMAIN as DOMAIN, RTUOVERTCP, SERIAL, + SERVICE_RESTART, SERVICE_STOP, SERVICE_WRITE_COIL, SERVICE_WRITE_REGISTER, @@ -76,6 +88,7 @@ from homeassistant.components.modbus.const import ( ) from homeassistant.components.modbus.validators import ( check_config, + check_hvac_target_temp_registers, duplicate_fan_mode_validator, duplicate_swing_mode_validator, hvac_fixedsize_reglist_validator, @@ -444,6 +457,27 @@ async def test_check_config(hass: HomeAssistant, do_config) -> None: ], } ], + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_SENSORS: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + { + CONF_NAME: TEST_ENTITY_NAME + " 2", + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + ], + } + ], ], ) async def test_check_config_sensor(hass: HomeAssistant, do_config) -> None: @@ -476,6 +510,225 @@ async def test_check_config_sensor(hass: HomeAssistant, do_config) -> None: ], } ], + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + { + CONF_NAME: TEST_ENTITY_NAME + " 2", + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + ], + } + ], + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 118, + CONF_SLAVE: 0, + CONF_HVAC_MODE_REGISTER: { + CONF_ADDRESS: 119, + CONF_HVAC_MODE_VALUES: { + CONF_HVAC_MODE_COOL: 0, + CONF_HVAC_MODE_HEAT: 1, + }, + }, + }, + { + CONF_NAME: TEST_ENTITY_NAME + " 2", + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + CONF_HVAC_MODE_REGISTER: { + CONF_ADDRESS: 118, + CONF_HVAC_MODE_VALUES: { + CONF_HVAC_MODE_COOL: 0, + CONF_HVAC_MODE_HEAT: 1, + }, + }, + }, + ], + } + ], + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + CONF_FAN_MODE_REGISTER: { + CONF_ADDRESS: 120, + CONF_FAN_MODE_VALUES: { + CONF_FAN_MODE_ON: 0, + CONF_FAN_MODE_HIGH: 1, + }, + }, + }, + { + CONF_NAME: TEST_ENTITY_NAME + " 2", + CONF_ADDRESS: 118, + CONF_SLAVE: 0, + CONF_TARGET_TEMP: [99], + CONF_FAN_MODE_REGISTER: { + CONF_ADDRESS: 120, + CONF_FAN_MODE_VALUES: { + CONF_FAN_MODE_ON: 0, + CONF_FAN_MODE_HIGH: 1, + }, + }, + }, + ], + } + ], + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + CONF_FAN_MODE_REGISTER: { + CONF_ADDRESS: 120, + CONF_FAN_MODE_VALUES: { + CONF_FAN_MODE_ON: 0, + CONF_FAN_MODE_HIGH: 1, + }, + }, + }, + { + CONF_NAME: TEST_ENTITY_NAME + " 2", + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + CONF_TARGET_TEMP: [117], + CONF_FAN_MODE_REGISTER: { + CONF_ADDRESS: [121], + CONF_FAN_MODE_VALUES: { + CONF_FAN_MODE_ON: 0, + CONF_FAN_MODE_HIGH: 1, + }, + }, + }, + ], + } + ], + [ # Testing Swing modes + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + CONF_SWING_MODE_REGISTER: { + CONF_ADDRESS: 120, + CONF_SWING_MODE_VALUES: { + CONF_SWING_MODE_SWING_ON: 0, + CONF_SWING_MODE_SWING_BOTH: 1, + }, + }, + }, + { + CONF_NAME: TEST_ENTITY_NAME + " 2", + CONF_ADDRESS: 119, + CONF_SLAVE: 0, + CONF_TARGET_TEMP: 118, + CONF_SWING_MODE_REGISTER: { + CONF_ADDRESS: [120], + CONF_SWING_MODE_VALUES: { + CONF_SWING_MODE_SWING_ON: 0, + CONF_SWING_MODE_SWING_BOTH: 1, + }, + }, + }, + ], + } + ], + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_TARGET_TEMP: [130, 131, 132, 133, 134, 135, 136], + CONF_SLAVE: 0, + CONF_HVAC_MODE_REGISTER: { + CONF_ADDRESS: 118, + CONF_HVAC_MODE_VALUES: { + CONF_HVAC_MODE_COOL: 0, + CONF_HVAC_MODE_HEAT: 2, + CONF_HVAC_MODE_DRY: 3, + }, + }, + CONF_HVAC_ONOFF_REGISTER: 122, + CONF_FAN_MODE_REGISTER: { + CONF_ADDRESS: 120, + CONF_FAN_MODE_VALUES: { + CONF_FAN_MODE_ON: 0, + CONF_FAN_MODE_HIGH: 1, + }, + }, + }, + { + CONF_NAME: TEST_ENTITY_NAME + " 2", + CONF_ADDRESS: 118, + CONF_TARGET_TEMP: [130, 131, 132, 133, 134, 135, 136], + CONF_SLAVE: 0, + CONF_HVAC_MODE_REGISTER: { + CONF_ADDRESS: 130, + CONF_HVAC_MODE_VALUES: { + CONF_HVAC_MODE_COOL: 0, + CONF_HVAC_MODE_HEAT: 2, + CONF_HVAC_MODE_DRY: 3, + }, + }, + CONF_HVAC_ONOFF_REGISTER: 122, + CONF_FAN_MODE_REGISTER: { + CONF_ADDRESS: 120, + CONF_FAN_MODE_VALUES: { + CONF_FAN_MODE_ON: 0, + CONF_FAN_MODE_HIGH: 1, + }, + }, + }, + ], + } + ], ], ) async def test_check_config_climate(hass: HomeAssistant, do_config) -> None: @@ -484,6 +737,83 @@ async def test_check_config_climate(hass: HomeAssistant, do_config) -> None: assert len(do_config[0][CONF_CLIMATES]) == 1 +@pytest.mark.parametrize( + "do_config", + [ + [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1, + CONF_TARGET_TEMP: [117, 121, 119, 150, 151, 152, 156], + CONF_HVAC_MODE_REGISTER: { + CONF_ADDRESS: 119, + CONF_HVAC_MODE_VALUES: { + CONF_HVAC_MODE_COOL: 0, + CONF_HVAC_MODE_HEAT: 1, + CONF_HVAC_MODE_HEAT_COOL: 2, + CONF_HVAC_MODE_DRY: 3, + }, + }, + CONF_HVAC_ONOFF_REGISTER: 117, + CONF_FAN_MODE_REGISTER: { + CONF_ADDRESS: 121, + }, + }, + ], + [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1, + CONF_TARGET_TEMP: [117], + CONF_HVAC_MODE_REGISTER: { + CONF_ADDRESS: 117, + CONF_HVAC_MODE_VALUES: { + CONF_HVAC_MODE_COOL: 0, + CONF_HVAC_MODE_HEAT: 1, + CONF_HVAC_MODE_HEAT_COOL: 2, + CONF_HVAC_MODE_DRY: 3, + }, + }, + CONF_HVAC_ONOFF_REGISTER: 117, + CONF_FAN_MODE_REGISTER: { + CONF_ADDRESS: 117, + }, + CONF_SWING_MODE_REGISTER: { + CONF_ADDRESS: 117, + }, + }, + ], + [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1, + CONF_TARGET_TEMP: [117], + CONF_HVAC_MODE_REGISTER: { + CONF_ADDRESS: 117, + CONF_HVAC_MODE_VALUES: { + CONF_HVAC_MODE_COOL: 0, + CONF_HVAC_MODE_HEAT: 1, + CONF_HVAC_MODE_HEAT_COOL: 2, + CONF_HVAC_MODE_DRY: 3, + }, + }, + CONF_HVAC_ONOFF_REGISTER: 117, + CONF_SWING_MODE_REGISTER: { + CONF_ADDRESS: [117], + }, + }, + ], + ], +) +async def test_climate_conflict_addresses(do_config) -> None: + """Test conflicts among the addresses of target temp and other climate addresses.""" + check_hvac_target_temp_registers(do_config[0]) + assert CONF_HVAC_MODE_REGISTER not in do_config[0] + assert CONF_HVAC_ONOFF_REGISTER not in do_config[0] + assert CONF_FAN_MODE_REGISTER not in do_config[0] + assert CONF_SWING_MODE_REGISTER not in do_config[0] + + @pytest.mark.parametrize( "do_config", [ @@ -522,6 +852,157 @@ async def test_duplicate_swing_mode_validator(do_config) -> None: assert len(do_config[CONF_SWING_MODE_VALUES]) == 2 +@pytest.mark.parametrize( + ("do_config", "sensor_cnt"), + [ + ( + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_SENSORS: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + { + CONF_NAME: TEST_ENTITY_NAME + "1", + CONF_ADDRESS: 119, + CONF_SLAVE: 0, + }, + ], + }, + ], + 2, + ), + ( + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_SENSORS: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + { + CONF_NAME: TEST_ENTITY_NAME + "1", + CONF_ADDRESS: 117, + CONF_SLAVE: 1, + }, + ], + }, + ], + 2, + ), + ( + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_SENSORS: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + { + CONF_NAME: TEST_ENTITY_NAME + "1", + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + ], + }, + ], + 1, + ), + ( + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_SENSORS: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + { + CONF_NAME: TEST_ENTITY_NAME + "1", + CONF_ADDRESS: 119, + CONF_SLAVE: 0, + }, + ], + }, + { + CONF_NAME: TEST_MODBUS_NAME + "1", + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_SENSORS: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 119, + CONF_SLAVE: 0, + }, + ], + }, + ], + 2, + ), + ( + [ + { + CONF_NAME: TEST_MODBUS_NAME, + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_TIMEOUT: 3, + CONF_SENSORS: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 117, + CONF_SLAVE: 0, + }, + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1179, + CONF_SLAVE: 0, + }, + ], + }, + ], + 1, + ), + ], +) +async def test_duplicate_addresses(hass: HomeAssistant, do_config, sensor_cnt) -> None: + """Test duplicate entity validator.""" + check_config(hass, do_config) + use_inx = len(do_config) - 1 + assert len(do_config[use_inx][CONF_SENSORS]) == sensor_cnt + + @pytest.mark.parametrize( "do_config", [ @@ -571,6 +1052,18 @@ async def test_no_duplicate_names(hass: HomeAssistant, do_config) -> None: } ], }, + { + CONF_TYPE: TCP, + CONF_HOST: TEST_MODBUS_HOST, + CONF_PORT: TEST_PORT_TCP, + CONF_RETRIES: 3, + CONF_SENSORS: [ + { + CONF_NAME: "dummy", + CONF_ADDRESS: 9999, + } + ], + }, { CONF_TYPE: TCP, CONF_HOST: TEST_MODBUS_HOST, @@ -1135,6 +1628,61 @@ async def test_shutdown( assert caplog.text == "" +@pytest.mark.parametrize( + "do_config", + [ + { + CONF_SENSORS: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 51, + CONF_SLAVE: 0, + } + ] + }, + ], +) +async def test_stop_restart( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus +) -> None: + """Run test for service stop.""" + + caplog.set_level(logging.INFO) + entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") + assert hass.states.get(entity_id).state in (STATE_UNKNOWN, STATE_UNAVAILABLE) + hass.states.async_set(entity_id, 17) + await hass.async_block_till_done() + assert hass.states.get(entity_id).state == "17" + + mock_modbus.reset_mock() + caplog.clear() + data = { + ATTR_HUB: TEST_MODBUS_NAME, + } + await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True) + await hass.async_block_till_done() + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + assert mock_modbus.close.called + assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text + + mock_modbus.reset_mock() + caplog.clear() + await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True) + await hass.async_block_till_done() + assert not mock_modbus.close.called + assert mock_modbus.connect.called + assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text + + mock_modbus.reset_mock() + caplog.clear() + await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True) + await hass.async_block_till_done() + assert mock_modbus.close.called + assert mock_modbus.connect.called + assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text + assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text + + @pytest.mark.parametrize("do_config", [{}]) async def test_write_no_client(hass: HomeAssistant, mock_modbus) -> None: """Run test for service stop and write without client.""" @@ -1165,7 +1713,7 @@ async def test_integration_reload( ) -> None: """Run test for integration reload.""" - caplog.set_level(logging.DEBUG) + caplog.set_level(logging.INFO) caplog.clear() yaml_path = get_fixture_path("configuration.yaml", "modbus") @@ -1184,7 +1732,7 @@ async def test_integration_reload_failed( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus ) -> None: """Run test for integration connect failure on reload.""" - caplog.set_level(logging.DEBUG) + caplog.set_level(logging.INFO) caplog.clear() yaml_path = get_fixture_path("configuration.yaml", "modbus") diff --git a/tests/components/modbus/test_light.py b/tests/components/modbus/test_light.py index 745249ff866..e74da085180 100644 --- a/tests/components/modbus/test_light.py +++ b/tests/components/modbus/test_light.py @@ -3,7 +3,6 @@ from pymodbus.exceptions import ModbusException import pytest -from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, @@ -19,22 +18,18 @@ from homeassistant.components.modbus.const import ( MODBUS_DOMAIN, ) from homeassistant.const import ( - ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_LIGHTS, CONF_NAME, - CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State +from homeassistant.core import HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -274,12 +269,12 @@ async def test_light_service_turn( assert hass.states.get(ENTITY_ID).state == STATE_OFF await hass.services.async_call( - LIGHT_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} + "light", "turn_on", service_data={"entity_id": ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON await hass.services.async_call( - LIGHT_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} + "light", "turn_off", service_data={"entity_id": ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -287,20 +282,20 @@ async def test_light_service_turn( mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) assert hass.states.get(ENTITY_ID2).state == STATE_OFF await hass.services.async_call( - LIGHT_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} + "light", "turn_on", service_data={"entity_id": ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_ON mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - LIGHT_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID2} + "light", "turn_off", service_data={"entity_id": ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_OFF mock_modbus.write_register.side_effect = ModbusException("fail write_") await hass.services.async_call( - LIGHT_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} + "light", "turn_on", service_data={"entity_id": ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE @@ -324,18 +319,12 @@ async def test_light_service_turn( async def test_service_light_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -348,7 +337,7 @@ async def test_no_discovery_info_light( assert await async_setup_component( hass, LIGHT_DOMAIN, - {LIGHT_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, + {LIGHT_DOMAIN: {"platform": MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert LIGHT_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_sensor.py b/tests/components/modbus/test_sensor.py index fc63a300c5c..20ff558fce6 100644 --- a/tests/components/modbus/test_sensor.py +++ b/tests/components/modbus/test_sensor.py @@ -4,13 +4,13 @@ import struct import pytest -from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_REGISTER_HOLDING, CALL_TYPE_REGISTER_INPUT, CONF_DATA_TYPE, CONF_DEVICE_ADDRESS, CONF_INPUT_TYPE, + CONF_LAZY_ERROR, CONF_MAX_VALUE, CONF_MIN_VALUE, CONF_NAN_VALUE, @@ -32,13 +32,11 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( - ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COUNT, CONF_DEVICE_CLASS, CONF_NAME, CONF_OFFSET, - CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SENSORS, CONF_SLAVE, @@ -47,7 +45,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State +from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -168,6 +166,17 @@ SLAVE_UNIQUE_ID = "ground_floor_sensor" } ] }, + { + CONF_SENSORS: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 51, + CONF_DATA_TYPE: DataType.INT32, + CONF_VIRTUAL_COUNT: 5, + CONF_LAZY_ERROR: 3, + } + ] + }, { CONF_SENSORS: [ { @@ -1326,7 +1335,7 @@ async def test_wrap_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> None @pytest.fixture(name="mock_restore") -async def mock_restore(hass: HomeAssistant) -> None: +async def mock_restore(hass): """Mock restore cache.""" mock_restore_cache_with_extra_data( hass, @@ -1386,18 +1395,12 @@ async def test_service_sensor_update(hass: HomeAssistant, mock_modbus_ha) -> Non """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_input_registers.return_value = ReadResult([27]) await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) assert hass.states.get(ENTITY_ID).state == "27" mock_modbus_ha.read_input_registers.return_value = ReadResult([32]) await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) assert hass.states.get(ENTITY_ID).state == "32" @@ -1410,7 +1413,7 @@ async def test_no_discovery_info_sensor( assert await async_setup_component( hass, SENSOR_DOMAIN, - {SENSOR_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, + {SENSOR_DOMAIN: {"platform": MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert SENSOR_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_switch.py b/tests/components/modbus/test_switch.py index 4e0ad0841ea..bdb95c667c7 100644 --- a/tests/components/modbus/test_switch.py +++ b/tests/components/modbus/test_switch.py @@ -6,7 +6,6 @@ from unittest import mock from pymodbus.exceptions import ModbusException import pytest -from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_DISCRETE, @@ -22,24 +21,20 @@ from homeassistant.components.modbus.const import ( ) from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( - ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_DELAY, CONF_DEVICE_CLASS, CONF_NAME, - CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, CONF_SWITCHES, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State +from homeassistant.core import HomeAssistant, State from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -49,7 +44,6 @@ from tests.common import async_fire_time_changed ENTITY_ID = f"{SWITCH_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") ENTITY_ID2 = f"{ENTITY_ID}_2" -ENTITY_ID3 = f"{ENTITY_ID}_3" @pytest.mark.parametrize( @@ -80,7 +74,7 @@ ENTITY_ID3 = f"{ENTITY_ID}_3" CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: SWITCH_DOMAIN, + CONF_DEVICE_CLASS: "switch", CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, CONF_ADDRESS: 1235, @@ -98,7 +92,7 @@ ENTITY_ID3 = f"{ENTITY_ID}_3" CONF_DEVICE_ADDRESS: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: SWITCH_DOMAIN, + CONF_DEVICE_CLASS: "switch", CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, CONF_ADDRESS: 1235, @@ -116,7 +110,7 @@ ENTITY_ID3 = f"{ENTITY_ID}_3" CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: SWITCH_DOMAIN, + CONF_DEVICE_CLASS: "switch", CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_REGISTER_INPUT, CONF_ADDRESS: 1235, @@ -135,7 +129,7 @@ ENTITY_ID3 = f"{ENTITY_ID}_3" CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: SWITCH_DOMAIN, + CONF_DEVICE_CLASS: "switch", CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_DISCRETE, CONF_ADDRESS: 1235, @@ -153,48 +147,12 @@ ENTITY_ID3 = f"{ENTITY_ID}_3" CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: SWITCH_DOMAIN, + CONF_DEVICE_CLASS: "switch", CONF_SCAN_INTERVAL: 0, CONF_VERIFY: None, } ] }, - { - CONF_SWITCHES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1234, - CONF_DEVICE_ADDRESS: 10, - CONF_COMMAND_OFF: 0x00, - CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: SWITCH_DOMAIN, - CONF_VERIFY: { - CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, - CONF_ADDRESS: 1235, - CONF_STATE_OFF: 0, - CONF_STATE_ON: [1, 2, 3], - }, - } - ] - }, - { - CONF_SWITCHES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1236, - CONF_DEVICE_ADDRESS: 10, - CONF_COMMAND_OFF: 0x00, - CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: SWITCH_DOMAIN, - CONF_VERIFY: { - CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, - CONF_ADDRESS: 1235, - CONF_STATE_OFF: [0, 5, 6], - CONF_STATE_ON: 1, - }, - } - ] - }, ], ) async def test_config_switch(hass: HomeAssistant, mock_modbus) -> None: @@ -260,18 +218,6 @@ async def test_config_switch(hass: HomeAssistant, mock_modbus) -> None: None, STATE_OFF, ), - ( - [0x03], - False, - {CONF_VERIFY: {CONF_STATE_ON: [1, 3]}}, - STATE_ON, - ), - ( - [0x04], - False, - {CONF_VERIFY: {CONF_STATE_OFF: [0, 4]}}, - STATE_OFF, - ), ], ) async def test_all_switch(hass: HomeAssistant, mock_do_cycle, expected) -> None: @@ -323,13 +269,6 @@ async def test_restore_state_switch( CONF_SCAN_INTERVAL: 0, CONF_VERIFY: {}, }, - { - CONF_NAME: f"{TEST_ENTITY_NAME} 3", - CONF_ADDRESS: 18, - CONF_WRITE_TYPE: CALL_TYPE_REGISTER_HOLDING, - CONF_SCAN_INTERVAL: 0, - CONF_VERIFY: {CONF_STATE_ON: [1, 3]}, - }, ], }, ], @@ -344,12 +283,12 @@ async def test_switch_service_turn( assert hass.states.get(ENTITY_ID).state == STATE_OFF await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} + "switch", "turn_on", service_data={"entity_id": ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} + "switch", "turn_off", service_data={"entity_id": ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -357,48 +296,29 @@ async def test_switch_service_turn( mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) assert hass.states.get(ENTITY_ID2).state == STATE_OFF await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} + "switch", "turn_on", service_data={"entity_id": ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_ON mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID2} + "switch", "turn_off", service_data={"entity_id": ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_OFF - mock_modbus.read_holding_registers.return_value = ReadResult([0x03]) - assert hass.states.get(ENTITY_ID3).state == STATE_OFF - await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID3} - ) - await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID3).state == STATE_ON - mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) - await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID3} - ) - await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID3).state == STATE_OFF mock_modbus.write_register.side_effect = ModbusException("fail write_") await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} + "switch", "turn_on", service_data={"entity_id": ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE mock_modbus.write_coil.side_effect = ModbusException("fail write_") await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} + "switch", "turn_off", service_data={"entity_id": ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE - mock_modbus.write_register.side_effect = ModbusException("fail write_") - await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID3} - ) - await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID3).state == STATE_UNAVAILABLE @pytest.mark.parametrize( @@ -414,43 +334,17 @@ async def test_switch_service_turn( } ] }, - { - CONF_SWITCHES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1236, - CONF_WRITE_TYPE: CALL_TYPE_COIL, - CONF_VERIFY: {CONF_STATE_ON: [1, 3]}, - } - ] - }, - { - CONF_SWITCHES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1235, - CONF_WRITE_TYPE: CALL_TYPE_COIL, - CONF_VERIFY: {CONF_STATE_OFF: [0, 5]}, - } - ] - }, ], ) async def test_service_switch_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - HOMEASSISTANT_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: ENTITY_ID}, - blocking=True, + "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True ) assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -478,7 +372,7 @@ async def test_delay_switch(hass: HomeAssistant, mock_modbus) -> None: mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) now = dt_util.utcnow() await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} + "switch", "turn_on", service_data={"entity_id": ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -497,7 +391,7 @@ async def test_no_discovery_info_switch( assert await async_setup_component( hass, SWITCH_DOMAIN, - {SWITCH_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, + {SWITCH_DOMAIN: {"platform": MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert SWITCH_DOMAIN in hass.config.components diff --git a/tests/components/modern_forms/snapshots/test_diagnostics.ambr b/tests/components/modern_forms/snapshots/test_diagnostics.ambr deleted file mode 100644 index 75794aaca12..00000000000 --- a/tests/components/modern_forms/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,52 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'config_entry': dict({ - 'data': dict({ - 'host': '192.168.1.123', - 'mac': '**REDACTED**', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'modern_forms', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }), - 'device': dict({ - 'info': dict({ - 'client_id': 'MF_000000000000', - 'device_name': 'ModernFormsFan', - 'fan_motor_type': 'DC125X25', - 'fan_type': '1818-56', - 'federated_identity': 'us-east-1:f3da237b-c19c-4f61-b387-0e6dde2e470b', - 'firmware_url': '', - 'firmware_version': '01.03.0025', - 'light_type': 'F6IN-120V-R1-30', - 'mac_address': '**REDACTED**', - 'main_mcu_firmware_version': '01.03.3008', - 'owner': '**REDACTED**', - 'product_sku': '', - 'production_lot_number': '', - }), - 'status': dict({ - 'adaptive_learning_enabled': False, - 'away_mode_enabled': False, - 'fan_direction': 'forward', - 'fan_on': True, - 'fan_sleep_timer': 0, - 'fan_speed': 3, - 'light_brightness': 50, - 'light_on': True, - 'light_sleep_timer': 0, - }), - }), - }) -# --- diff --git a/tests/components/modern_forms/test_config_flow.py b/tests/components/modern_forms/test_config_flow.py index 1484b5d5992..4c39f83f688 100644 --- a/tests/components/modern_forms/test_config_flow.py +++ b/tests/components/modern_forms/test_config_flow.py @@ -84,9 +84,10 @@ async def test_full_zeroconf_flow_implementation( assert result.get("step_id") == "zeroconf_confirm" assert result.get("type") is FlowResultType.FORM - flow = hass.config_entries.flow._progress[flows[0]["flow_id"]] - assert flow.host == "192.168.1.123" - assert flow.name == "example" + flow = flows[0] + assert "context" in flow + assert flow["context"][CONF_HOST] == "192.168.1.123" + assert flow["context"][CONF_NAME] == "example" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} diff --git a/tests/components/modern_forms/test_diagnostics.py b/tests/components/modern_forms/test_diagnostics.py deleted file mode 100644 index 9eb2e4efa94..00000000000 --- a/tests/components/modern_forms/test_diagnostics.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Tests for the Modern Forms diagnostics platform.""" - -from syrupy import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.core import HomeAssistant - -from . import init_integration - -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import ClientSessionGenerator - - -async def test_entry_diagnostics( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the creation and values of the Modern Forms fans.""" - entry = await init_integration(hass, aioclient_mock) - - result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - - assert result == snapshot(exclude=props("created_at", "modified_at", "entry_id")) diff --git a/tests/components/mold_indicator/conftest.py b/tests/components/mold_indicator/conftest.py deleted file mode 100644 index 11f07e1db35..00000000000 --- a/tests/components/mold_indicator/conftest.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Fixtures for the Mold indicator integration.""" - -from __future__ import annotations - -from collections.abc import Generator -from typing import Any -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.mold_indicator.const import ( - CONF_CALIBRATION_FACTOR, - CONF_INDOOR_HUMIDITY, - CONF_INDOOR_TEMP, - CONF_OUTDOOR_TEMP, - DEFAULT_NAME, - DOMAIN, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import ( - ATTR_UNIT_OF_MEASUREMENT, - CONF_NAME, - PERCENTAGE, - UnitOfTemperature, -) -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Automatically path mold indicator.""" - with patch( - "homeassistant.components.mold_indicator.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture(name="get_config") -async def get_config_to_integration_load() -> dict[str, Any]: - """Return configuration. - - To override the config, tests can be marked with: - @pytest.mark.parametrize("get_config", [{...}]) - """ - return { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 2.0, - } - - -@pytest.fixture(name="loaded_entry") -async def load_integration( - hass: HomeAssistant, get_config: dict[str, Any] -) -> MockConfigEntry: - """Set up the Mold indicator integration in Home Assistant.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - options=get_config, - entry_id="1", - title=DEFAULT_NAME, - ) - - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - hass.states.async_set( - "sensor.indoor_temp", - "10", - {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, - ) - hass.states.async_set( - "sensor.outdoor_temp", - "10", - {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, - ) - hass.states.async_set( - "sensor.indoor_humidity", "0", {ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE} - ) - await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/mold_indicator/snapshots/test_config_flow.ambr b/tests/components/mold_indicator/snapshots/test_config_flow.ambr deleted file mode 100644 index a7986ad051e..00000000000 --- a/tests/components/mold_indicator/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,49 +0,0 @@ -# serializer version: 1 -# name: test_config_flow_preview_success[missing_calibration_factor] - dict({ - 'attributes': dict({ - 'device_class': 'humidity', - 'friendly_name': 'Mold Indicator', - 'state_class': 'measurement', - 'unit_of_measurement': '%', - }), - 'state': 'unavailable', - }) -# --- -# name: test_config_flow_preview_success[missing_humidity_entity] - dict({ - 'attributes': dict({ - 'device_class': 'humidity', - 'friendly_name': 'Mold Indicator', - 'state_class': 'measurement', - 'unit_of_measurement': '%', - }), - 'state': 'unavailable', - }) -# --- -# name: test_config_flow_preview_success[success] - dict({ - 'attributes': dict({ - 'device_class': 'humidity', - 'dewpoint': 12.01, - 'estimated_critical_temp': 19.5, - 'friendly_name': 'Mold Indicator', - 'state_class': 'measurement', - 'unit_of_measurement': '%', - }), - 'state': '61', - }) -# --- -# name: test_options_flow_preview - dict({ - 'attributes': dict({ - 'device_class': 'humidity', - 'dewpoint': 12.01, - 'estimated_critical_temp': 19.5, - 'friendly_name': 'Mold Indicator', - 'state_class': 'measurement', - 'unit_of_measurement': '%', - }), - 'state': '61', - }) -# --- diff --git a/tests/components/mold_indicator/test_config_flow.py b/tests/components/mold_indicator/test_config_flow.py deleted file mode 100644 index 9df0e18d9ed..00000000000 --- a/tests/components/mold_indicator/test_config_flow.py +++ /dev/null @@ -1,386 +0,0 @@ -"""Test the Mold indicator config flow.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant import config_entries -from homeassistant.components.mold_indicator.const import ( - CONF_CALIBRATION_FACTOR, - CONF_INDOOR_HUMIDITY, - CONF_INDOOR_TEMP, - CONF_OUTDOOR_TEMP, - DEFAULT_NAME, - DOMAIN, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT, UnitOfTemperature -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry -from tests.typing import WebSocketGenerator - - -async def test_form_sensor(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: - """Test we get the form for sensor.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 2.0, - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DEFAULT_NAME - assert result["version"] == 1 - assert result["options"] == { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 2.0, - } - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: - """Test options flow.""" - - result = await hass.config_entries.options.async_init(loaded_entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - CONF_CALIBRATION_FACTOR: 3.0, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 3.0, - } - - await hass.async_block_till_done() - - # Check the entity was updated, no new entity was created - # 3 input entities + resulting mold indicator sensor - assert len(hass.states.async_all()) == 4 - - state = hass.states.get("sensor.mold_indicator") - assert state is not None - - -async def test_calibration_factor_not_zero(hass: HomeAssistant) -> None: - """Test calibration factor is not zero.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 0.0, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "calibration_is_zero"} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 1.0, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["options"] == { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 1.0, - } - - -async def test_entry_already_exist( - hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: - """Test abort when entry already exist.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 2.0, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize( - "user_input", - [ - ( - { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 2.0, - } - ), - ( - { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - } - ), - ( - { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 2.0, - } - ), - ], - ids=("success", "missing_calibration_factor", "missing_humidity_entity"), -) -async def test_config_flow_preview_success( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - user_input: str, - snapshot: SnapshotAssertion, -) -> None: - """Test the config flow preview.""" - client = await hass_ws_client(hass) - - # add state for the tests - hass.states.async_set( - "sensor.indoor_temp", - 23, - {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, - ) - hass.states.async_set( - "sensor.indoor_humidity", - 50, - {CONF_UNIT_OF_MEASUREMENT: "%"}, - ) - hass.states.async_set( - "sensor.outdoor_temp", - 16, - {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, - ) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] is None - assert result["preview"] == "mold_indicator" - - await client.send_json_auto_id( - { - "type": "mold_indicator/start_preview", - "flow_id": result["flow_id"], - "flow_type": "config_flow", - "user_input": user_input, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] is None - - msg = await client.receive_json() - assert msg["event"] == snapshot - assert len(hass.states.async_all()) == 3 - - -async def test_options_flow_preview( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the options flow preview.""" - client = await hass_ws_client(hass) - - # add state for the tests - hass.states.async_set( - "sensor.indoor_temp", - 23, - {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, - ) - hass.states.async_set( - "sensor.indoor_humidity", - 50, - {CONF_UNIT_OF_MEASUREMENT: "%"}, - ) - hass.states.async_set( - "sensor.outdoor_temp", - 16, - {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, - ) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 2.0, - }, - title="Test Sensor", - ) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] == FlowResultType.FORM - assert result["errors"] is None - assert result["preview"] == "mold_indicator" - - await client.send_json_auto_id( - { - "type": "mold_indicator/start_preview", - "flow_id": result["flow_id"], - "flow_type": "options_flow", - "user_input": { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 2.0, - }, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] is None - - msg = await client.receive_json() - assert msg["event"] == snapshot - assert len(hass.states.async_all()) == 4 - - -async def test_options_flow_sensor_preview_config_entry_removed( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test the option flow preview where the config entry is removed.""" - client = await hass_ws_client(hass) - - hass.states.async_set( - "sensor.indoor_temp", - 23, - {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, - ) - hass.states.async_set( - "sensor.indoor_humidity", - 50, - {CONF_UNIT_OF_MEASUREMENT: "%"}, - ) - hass.states.async_set( - "sensor.outdoor_temp", - 16, - {CONF_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, - ) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 2.0, - }, - title="Test Sensor", - ) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] == FlowResultType.FORM - assert result["errors"] is None - assert result["preview"] == "mold_indicator" - - await hass.config_entries.async_remove(config_entry.entry_id) - - await client.send_json_auto_id( - { - "type": "mold_indicator/start_preview", - "flow_id": result["flow_id"], - "flow_type": "options_flow", - "user_input": { - CONF_NAME: DEFAULT_NAME, - CONF_INDOOR_TEMP: "sensor.indoor_temp", - CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", - CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", - CONF_CALIBRATION_FACTOR: 2.0, - }, - } - ) - msg = await client.receive_json() - assert not msg["success"] - assert msg["error"] == { - "code": "home_assistant_error", - "message": "Config entry not found", - } diff --git a/tests/components/mold_indicator/test_init.py b/tests/components/mold_indicator/test_init.py deleted file mode 100644 index 5fd6b11c8fe..00000000000 --- a/tests/components/mold_indicator/test_init.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Test Mold indicator component setup process.""" - -from __future__ import annotations - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: - """Test unload an entry.""" - - assert loaded_entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(loaded_entry.entry_id) - await hass.async_block_till_done() - assert loaded_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/mold_indicator/test_sensor.py b/tests/components/mold_indicator/test_sensor.py index bb3f7c4fc93..760d82dfedc 100644 --- a/tests/components/mold_indicator/test_sensor.py +++ b/tests/components/mold_indicator/test_sensor.py @@ -16,11 +16,9 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry - @pytest.fixture(autouse=True) -def init_sensors_fixture(hass: HomeAssistant) -> None: +def init_sensors_fixture(hass): """Set up things to be run when tests are started.""" hass.states.async_set( "test.indoortemp", "20", {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS} @@ -54,16 +52,6 @@ async def test_setup(hass: HomeAssistant) -> None: assert moldind.attributes.get("unit_of_measurement") == PERCENTAGE -async def test_setup_from_config_entry( - hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: - """Test the mold indicator sensor setup from a config entry.""" - - moldind = hass.states.get("sensor.mold_indicator") - assert moldind - assert moldind.attributes.get("unit_of_measurement") == PERCENTAGE - - async def test_invalidcalib(hass: HomeAssistant) -> None: """Test invalid sensor values.""" hass.states.async_set( diff --git a/tests/components/monarch_money/__init__.py b/tests/components/monarch_money/__init__.py deleted file mode 100644 index f08addf2ec6..00000000000 --- a/tests/components/monarch_money/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the Monarch Money integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/monarch_money/conftest.py b/tests/components/monarch_money/conftest.py deleted file mode 100644 index 7d6a965a009..00000000000 --- a/tests/components/monarch_money/conftest.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Common fixtures for the Monarch Money tests.""" - -from collections.abc import Generator -import json -from typing import Any -from unittest.mock import AsyncMock, PropertyMock, patch - -import pytest -from typedmonarchmoney.models import ( - MonarchAccount, - MonarchCashflowSummary, - MonarchSubscription, -) - -from homeassistant.components.monarch_money.const import DOMAIN -from homeassistant.const import CONF_TOKEN - -from tests.common import MockConfigEntry, load_fixture, load_json_object_fixture - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.monarch_money.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -async def mock_config_entry() -> MockConfigEntry: - """Fixture for mock config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data={CONF_TOKEN: "fake_token_of_doom"}, - unique_id="222260252323873333", - version=1, - ) - - -@pytest.fixture -def mock_config_api() -> Generator[AsyncMock]: - """Mock the MonarchMoney class.""" - - account_json: dict[str, Any] = load_json_object_fixture("get_accounts.json", DOMAIN) - account_data = [MonarchAccount(data) for data in account_json["accounts"]] - account_data_dict: dict[str, MonarchAccount] = { - acc["id"]: MonarchAccount(acc) for acc in account_json["accounts"] - } - - cashflow_json: dict[str, Any] = json.loads( - load_fixture("get_cashflow_summary.json", DOMAIN) - ) - cashflow_summary = MonarchCashflowSummary(cashflow_json) - subscription_details = MonarchSubscription( - json.loads(load_fixture("get_subscription_details.json", DOMAIN)) - ) - - with ( - patch( - "homeassistant.components.monarch_money.config_flow.TypedMonarchMoney", - autospec=True, - ) as mock_class, - patch( - "homeassistant.components.monarch_money.TypedMonarchMoney", new=mock_class - ), - ): - instance = mock_class.return_value - type(instance).token = PropertyMock(return_value="mocked_token") - instance.login = AsyncMock(return_value=None) - instance.multi_factor_authenticate = AsyncMock(return_value=None) - instance.get_subscription_details = AsyncMock(return_value=subscription_details) - instance.get_accounts = AsyncMock(return_value=account_data) - instance.get_accounts_as_dict_with_id_key = AsyncMock( - return_value=account_data_dict - ) - instance.get_cashflow_summary = AsyncMock(return_value=cashflow_summary) - instance.get_subscription_details = AsyncMock(return_value=subscription_details) - yield mock_class diff --git a/tests/components/monarch_money/fixtures/get_accounts.json b/tests/components/monarch_money/fixtures/get_accounts.json deleted file mode 100644 index ddaecc1721b..00000000000 --- a/tests/components/monarch_money/fixtures/get_accounts.json +++ /dev/null @@ -1,516 +0,0 @@ -{ - "accounts": [ - { - "id": "900000000", - "displayName": "Brokerage", - "syncDisabled": false, - "deactivatedAt": null, - "isHidden": false, - "isAsset": true, - "mask": "0189", - "createdAt": "2021-10-15T01:32:33.809450+00:00", - "updatedAt": "2022-05-26T00:56:41.322045+00:00", - "displayLastUpdatedAt": "2022-05-26T00:56:41.321928+00:00", - "currentBalance": 1000.5, - "displayBalance": 1000.5, - "includeInNetWorth": true, - "hideFromList": true, - "hideTransactionsFromReports": false, - "includeBalanceInNetWorth": false, - "includeInGoalBalance": false, - "dataProvider": "plaid", - "dataProviderAccountId": "testProviderAccountId", - "isManual": false, - "transactionsCount": 0, - "holdingsCount": 0, - "manualInvestmentsTrackingMethod": null, - "order": 11, - "icon": "trending-up", - "logoUrl": "base64Nonce", - "type": { - "name": "brokerage", - "display": "Investments", - "__typename": "AccountType" - }, - "subtype": { - "name": "brokerage", - "display": "Brokerage", - "__typename": "AccountSubtype" - }, - "credential": { - "id": "900000001", - "updateRequired": false, - "disconnectedFromDataProviderAt": null, - "dataProvider": "PLAID", - "institution": { - "id": "700000000", - "plaidInstitutionId": "ins_0", - "name": "Rando Brokerage", - "status": "DEGRADED", - "logo": "base64Nonce", - "__typename": "Institution" - }, - "__typename": "Credential" - }, - "institution": { - "id": "700000000", - "name": "Rando Brokerage", - "logo": "base64Nonce", - "primaryColor": "#0075a3", - "url": "https://rando.brokerage/", - "__typename": "Institution" - }, - "__typename": "Account" - }, - { - "id": "900000002", - "displayName": "Checking", - "syncDisabled": false, - "deactivatedAt": null, - "isHidden": false, - "isAsset": true, - "mask": "2602", - "createdAt": "2021-10-15T01:32:33.900521+00:00", - "updatedAt": "2024-02-17T11:21:05.228959+00:00", - "displayLastUpdatedAt": "2024-02-17T11:21:05.228721+00:00", - "currentBalance": 1000.02, - "displayBalance": 1000.02, - "includeInNetWorth": true, - "hideFromList": false, - "hideTransactionsFromReports": false, - "includeBalanceInNetWorth": true, - "includeInGoalBalance": true, - "dataProvider": "plaid", - "dataProviderAccountId": "testProviderAccountId", - "isManual": false, - "transactionsCount": 1403, - "holdingsCount": 0, - "manualInvestmentsTrackingMethod": null, - "order": 0, - "icon": "dollar-sign", - "logoUrl": "data:image/png;base64,base64Nonce", - "type": { - "name": "depository", - "display": "Cash", - "__typename": "AccountType" - }, - "subtype": { - "name": "checking", - "display": "Checking", - "__typename": "AccountSubtype" - }, - "credential": { - "id": "900000003", - "updateRequired": false, - "disconnectedFromDataProviderAt": null, - "dataProvider": "PLAID", - "institution": { - "id": "7000000002", - "plaidInstitutionId": "ins_01", - "name": "Rando Bank", - "status": "DEGRADED", - "logo": "base64Nonce", - "__typename": "Institution" - }, - "__typename": "Credential" - }, - "institution": { - "id": "7000000005", - "name": "Rando Bank", - "logo": "base64Nonce", - "primaryColor": "#0075a3", - "url": "https://rando.bank/", - "__typename": "Institution" - }, - "__typename": "Account" - }, - - { - "id": "121212192626186051", - "displayName": "2050 Toyota RAV8", - "syncDisabled": false, - "deactivatedAt": null, - "isHidden": false, - "isAsset": true, - "mask": null, - "createdAt": "2024-08-16T17:37:21.885036+00:00", - "updatedAt": "2024-08-16T17:37:21.885057+00:00", - "displayLastUpdatedAt": "2024-08-16T17:37:21.885057+00:00", - "currentBalance": 11075.58, - "displayBalance": 11075.58, - "includeInNetWorth": true, - "hideFromList": false, - "hideTransactionsFromReports": false, - "includeBalanceInNetWorth": true, - "includeInGoalBalance": false, - "dataProvider": "vin_audit", - "dataProviderAccountId": "1111111v5cw252004", - "isManual": false, - "transactionsCount": 0, - "holdingsCount": 0, - "manualInvestmentsTrackingMethod": null, - "order": 0, - "logoUrl": "https://api.monarchmoney.com/cdn-cgi/image/width=128/images/institution/159427559853802644", - "type": { - "name": "vehicle", - "display": "Vehicles", - "__typename": "AccountType" - }, - "subtype": { - "name": "car", - "display": "Car", - "__typename": "AccountSubtype" - }, - "credential": null, - "institution": { - "id": "123456789853802644", - "name": "VinAudit", - "primaryColor": "#74ab16", - "url": "https://www.vinaudit.com/", - "__typename": "Institution" - }, - "__typename": "Account" - }, - { - "id": "9000000007", - "displayName": "Credit Card", - "syncDisabled": true, - "deactivatedAt": null, - "isHidden": true, - "isAsset": false, - "mask": "3542", - "createdAt": "2021-10-15T01:33:46.646459+00:00", - "updatedAt": "2022-12-10T18:17:06.129456+00:00", - "displayLastUpdatedAt": "2022-10-15T08:34:34.815239+00:00", - "currentBalance": -200.0, - "displayBalance": -200.0, - "includeInNetWorth": true, - "hideFromList": false, - "hideTransactionsFromReports": false, - "includeBalanceInNetWorth": false, - "includeInGoalBalance": true, - "dataProvider": "finicity", - "dataProviderAccountId": "50001", - "isManual": false, - "transactionsCount": 1138, - "holdingsCount": 0, - "manualInvestmentsTrackingMethod": null, - "order": 1, - "icon": "credit-card", - "logoUrl": "data:image/png;base64,base64Nonce", - "type": { - "name": "credit", - "display": "Credit Cards", - "__typename": "AccountType" - }, - "subtype": { - "name": "credit_card", - "display": "Credit Card", - "__typename": "AccountSubtype" - }, - "credential": { - "id": "9000000009", - "updateRequired": true, - "disconnectedFromDataProviderAt": null, - "dataProvider": "FINICITY", - "institution": { - "id": "7000000002", - "plaidInstitutionId": "ins_9", - "name": "Rando Credit", - "status": null, - "logo": "base64Nonce", - "__typename": "Institution" - }, - "__typename": "Credential" - }, - "institution": { - "id": "70000000010", - "name": "Rando Credit", - "logo": "base64Nonce", - "primaryColor": "#004966", - "url": "https://rando.credit/", - "__typename": "Institution" - }, - "__typename": "Account" - }, - { - "id": "900000000012", - "displayName": "Roth IRA", - "syncDisabled": false, - "deactivatedAt": null, - "isHidden": false, - "isAsset": true, - "mask": "1052", - "createdAt": "2021-10-15T01:35:59.299450+00:00", - "updatedAt": "2024-02-17T13:32:21.072711+00:00", - "displayLastUpdatedAt": "2024-02-17T13:32:21.072453+00:00", - "currentBalance": 10000.43, - "displayBalance": 10000.43, - "includeInNetWorth": true, - "hideFromList": false, - "hideTransactionsFromReports": false, - "includeBalanceInNetWorth": true, - "includeInGoalBalance": false, - "dataProvider": "plaid", - "dataProviderAccountId": "testProviderAccountId", - "isManual": false, - "transactionsCount": 28, - "holdingsCount": 24, - "manualInvestmentsTrackingMethod": null, - "order": 4, - "icon": "trending-up", - "logoUrl": "data:image/png;base64,base64Nonce", - "type": { - "name": "brokerage", - "display": "Investments", - "__typename": "AccountType" - }, - "subtype": { - "name": "roth", - "display": "Roth IRA", - "__typename": "AccountSubtype" - }, - "credential": { - "id": "90000000014", - "updateRequired": false, - "disconnectedFromDataProviderAt": null, - "dataProvider": "PLAID", - "institution": { - "id": "70000000016", - "plaidInstitutionId": "ins_02", - "name": "Rando Investments", - "status": null, - "logo": "base64Nonce", - "__typename": "Institution" - }, - "__typename": "Credential" - }, - "institution": { - "id": "70000000018", - "name": "Rando Investments", - "logo": "base64Nonce", - "primaryColor": "#40a829", - "url": "https://rando.investments/", - "__typename": "Institution" - }, - "__typename": "Account" - }, - { - "id": "90000000020", - "displayName": "House", - "syncDisabled": false, - "deactivatedAt": null, - "isHidden": false, - "isAsset": true, - "mask": null, - "createdAt": "2021-10-15T01:39:29.370279+00:00", - "updatedAt": "2024-02-12T09:00:25.451425+00:00", - "displayLastUpdatedAt": "2024-02-12T09:00:25.451425+00:00", - "currentBalance": 123000.0, - "displayBalance": 123000.0, - "includeInNetWorth": true, - "hideFromList": false, - "hideTransactionsFromReports": false, - "includeBalanceInNetWorth": true, - "includeInGoalBalance": false, - "dataProvider": "zillow", - "dataProviderAccountId": "testProviderAccountId", - "isManual": false, - "transactionsCount": 0, - "holdingsCount": 0, - "manualInvestmentsTrackingMethod": null, - "order": 2, - "icon": "home", - "logoUrl": "data:image/png;base64,base64Nonce", - "type": { - "name": "real_estate", - "display": "Real Estate", - "__typename": "AccountType" - }, - "subtype": { - "name": "primary_home", - "display": "Primary Home", - "__typename": "AccountSubtype" - }, - "credential": null, - "institution": { - "id": "800000000", - "name": "Zillow", - "logo": "base64Nonce", - "primaryColor": "#006AFF", - "url": "https://www.zillow.com/", - "__typename": "Institution" - }, - "__typename": "Account" - }, - { - "id": "90000000022", - "displayName": "401.k", - "syncDisabled": false, - "deactivatedAt": null, - "isHidden": false, - "isAsset": true, - "mask": null, - "createdAt": "2021-10-15T01:41:54.593239+00:00", - "updatedAt": "2024-02-17T08:13:10.554296+00:00", - "displayLastUpdatedAt": "2024-02-17T08:13:10.554029+00:00", - "currentBalance": 100000.35, - "displayBalance": 100000.35, - "includeInNetWorth": true, - "hideFromList": false, - "hideTransactionsFromReports": false, - "includeBalanceInNetWorth": true, - "includeInGoalBalance": false, - "dataProvider": "finicity", - "dataProviderAccountId": "testProviderAccountId", - "isManual": false, - "transactionsCount": 0, - "holdingsCount": 100, - "manualInvestmentsTrackingMethod": null, - "order": 3, - "icon": "trending-up", - "logoUrl": "data:image/png;base64,base64Nonce", - "type": { - "name": "brokerage", - "display": "Investments", - "__typename": "AccountType" - }, - "subtype": { - "name": "st_401k", - "display": "401k", - "__typename": "AccountSubtype" - }, - "credential": { - "id": "90000000024", - "updateRequired": false, - "disconnectedFromDataProviderAt": null, - "dataProvider": "FINICITY", - "institution": { - "id": "70000000026", - "plaidInstitutionId": "ins_03", - "name": "Rando Employer Investments", - "status": "HEALTHY", - "logo": "base64Nonce", - "__typename": "Institution" - }, - "__typename": "Credential" - }, - "institution": { - "id": "70000000028", - "name": "Rando Employer Investments", - "logo": "base64Nonce", - "primaryColor": "#408800", - "url": "https://rando-employer.investments/", - "__typename": "Institution" - }, - "__typename": "Account" - }, - { - "id": "90000000030", - "displayName": "Mortgage", - "syncDisabled": true, - "deactivatedAt": "2023-08-15", - "isHidden": true, - "isAsset": false, - "mask": "0973", - "createdAt": "2021-10-15T01:45:25.244570+00:00", - "updatedAt": "2023-08-16T01:41:36.115588+00:00", - "displayLastUpdatedAt": "2023-08-15T18:11:09.134874+00:00", - "currentBalance": 0.0, - "displayBalance": -0.0, - "includeInNetWorth": true, - "hideFromList": false, - "hideTransactionsFromReports": false, - "includeBalanceInNetWorth": false, - "includeInGoalBalance": false, - "dataProvider": "plaid", - "dataProviderAccountId": "testProviderAccountId", - "isManual": false, - "transactionsCount": 0, - "holdingsCount": 0, - "manualInvestmentsTrackingMethod": null, - "order": 1, - "icon": "home", - "logoUrl": "data:image/png;base64,base64Nonce", - "type": { - "name": "loan", - "display": "Loans", - "__typename": "AccountType" - }, - "subtype": { - "name": "mortgage", - "display": "Mortgage", - "__typename": "AccountSubtype" - }, - "credential": { - "id": "90000000032", - "updateRequired": false, - "disconnectedFromDataProviderAt": null, - "dataProvider": "PLAID", - "institution": { - "id": "70000000034", - "plaidInstitutionId": "ins_04", - "name": "Rando Mortgage", - "status": "HEALTHY", - "logo": "base64Nonce", - "__typename": "Institution" - }, - "__typename": "Credential" - }, - "institution": { - "id": "70000000036", - "name": "Rando Mortgage", - "logo": "base64Nonce", - "primaryColor": "#095aa6", - "url": "https://rando.mortgage/", - "__typename": "Institution" - }, - "__typename": "Account" - }, - { - "id": "186321412999033223", - "displayName": "Wallet", - "syncDisabled": false, - "deactivatedAt": null, - "isHidden": false, - "isAsset": true, - "mask": null, - "createdAt": "2024-08-16T14:22:10.440514+00:00", - "updatedAt": "2024-08-16T14:22:10.512731+00:00", - "displayLastUpdatedAt": "2024-08-16T14:22:10.512731+00:00", - "currentBalance": 20.0, - "displayBalance": 20.0, - "includeInNetWorth": true, - "hideFromList": false, - "hideTransactionsFromReports": false, - "includeBalanceInNetWorth": true, - "includeInGoalBalance": true, - "dataProvider": "", - "dataProviderAccountId": null, - "isManual": true, - "transactionsCount": 0, - "holdingsCount": 0, - "manualInvestmentsTrackingMethod": null, - "order": 14, - "logoUrl": null, - "type": { - "name": "depository", - "display": "Cash", - "__typename": "AccountType" - }, - "subtype": { - "name": "prepaid", - "display": "Prepaid", - "__typename": "AccountSubtype" - }, - "credential": null, - "institution": null, - "__typename": "Account" - } - ], - "householdPreferences": { - "id": "900000000022", - "accountGroupOrder": [], - "__typename": "HouseholdPreferences" - } -} diff --git a/tests/components/monarch_money/fixtures/get_cashflow_summary.json b/tests/components/monarch_money/fixtures/get_cashflow_summary.json deleted file mode 100644 index a223782469a..00000000000 --- a/tests/components/monarch_money/fixtures/get_cashflow_summary.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "summary": [ - { - "summary": { - "sumIncome": 15000.0, - "sumExpense": -9000.0, - "savings": 6000.0, - "savingsRate": 0.4, - "__typename": "TransactionsSummary" - }, - "__typename": "AggregateData" - } - ] -} diff --git a/tests/components/monarch_money/fixtures/get_subscription_details.json b/tests/components/monarch_money/fixtures/get_subscription_details.json deleted file mode 100644 index 16f90a2ca38..00000000000 --- a/tests/components/monarch_money/fixtures/get_subscription_details.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "subscription": { - "id": "222260252323873333", - "paymentSource": "STRIPE", - "referralCode": "go3dpvrdmw", - "isOnFreeTrial": true, - "hasPremiumEntitlement": true, - "__typename": "HouseholdSubscription" - } -} diff --git a/tests/components/monarch_money/snapshots/test_sensor.ambr b/tests/components/monarch_money/snapshots/test_sensor.ambr deleted file mode 100644 index cf7e0cb7b2f..00000000000 --- a/tests/components/monarch_money/snapshots/test_sensor.ambr +++ /dev/null @@ -1,1112 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[sensor.cashflow_expense_year_to_date-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.cashflow_expense_year_to_date', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Expense year to date', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'sum_expense', - 'unique_id': '222260252323873333_cashflow_sum_expense', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.cashflow_expense_year_to_date-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'monetary', - 'friendly_name': 'Cashflow Expense year to date', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.cashflow_expense_year_to_date', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '-9000.0', - }) -# --- -# name: test_all_entities[sensor.cashflow_income_year_to_date-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.cashflow_income_year_to_date', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Income year to date', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'sum_income', - 'unique_id': '222260252323873333_cashflow_sum_income', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.cashflow_income_year_to_date-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'monetary', - 'friendly_name': 'Cashflow Income year to date', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.cashflow_income_year_to_date', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15000.0', - }) -# --- -# name: test_all_entities[sensor.cashflow_savings_rate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.cashflow_savings_rate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Savings rate', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'savings_rate', - 'unique_id': '222260252323873333_cashflow_savings_rate', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.cashflow_savings_rate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Cashflow Savings rate', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.cashflow_savings_rate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.0', - }) -# --- -# name: test_all_entities[sensor.cashflow_savings_year_to_date-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.cashflow_savings_year_to_date', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Savings year to date', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'savings', - 'unique_id': '222260252323873333_cashflow_savings', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.cashflow_savings_year_to_date-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'monetary', - 'friendly_name': 'Cashflow Savings year to date', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.cashflow_savings_year_to_date', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6000.0', - }) -# --- -# name: test_all_entities[sensor.manual_entry_wallet_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.manual_entry_wallet_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Balance', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': '222260252323873333_186321412999033223_balance', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.manual_entry_wallet_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via Manual entry', - 'device_class': 'monetary', - 'friendly_name': 'Manual entry Wallet Balance', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.manual_entry_wallet_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.0', - }) -# --- -# name: test_all_entities[sensor.manual_entry_wallet_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.manual_entry_wallet_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': '222260252323873333_186321412999033223_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.manual_entry_wallet_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via Manual entry', - 'device_class': 'timestamp', - 'friendly_name': 'Manual entry Wallet Data age', - }), - 'context': , - 'entity_id': 'sensor.manual_entry_wallet_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-08-16T14:22:10+00:00', - }) -# --- -# name: test_all_entities[sensor.rando_bank_checking_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.rando_bank_checking_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Balance', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': '222260252323873333_900000002_balance', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.rando_bank_checking_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via PLAID', - 'device_class': 'monetary', - 'entity_picture': 'data:image/png;base64,base64Nonce', - 'friendly_name': 'Rando Bank Checking Balance', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.rando_bank_checking_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1000.02', - }) -# --- -# name: test_all_entities[sensor.rando_bank_checking_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.rando_bank_checking_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': '222260252323873333_900000002_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.rando_bank_checking_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via PLAID', - 'device_class': 'timestamp', - 'friendly_name': 'Rando Bank Checking Data age', - }), - 'context': , - 'entity_id': 'sensor.rando_bank_checking_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-02-17T11:21:05+00:00', - }) -# --- -# name: test_all_entities[sensor.rando_brokerage_brokerage_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.rando_brokerage_brokerage_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Balance', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': '222260252323873333_900000000_balance', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.rando_brokerage_brokerage_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via PLAID', - 'device_class': 'monetary', - 'entity_picture': 'base64Nonce', - 'friendly_name': 'Rando Brokerage Brokerage Balance', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.rando_brokerage_brokerage_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1000.5', - }) -# --- -# name: test_all_entities[sensor.rando_brokerage_brokerage_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.rando_brokerage_brokerage_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': '222260252323873333_900000000_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.rando_brokerage_brokerage_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via PLAID', - 'device_class': 'timestamp', - 'friendly_name': 'Rando Brokerage Brokerage Data age', - }), - 'context': , - 'entity_id': 'sensor.rando_brokerage_brokerage_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2022-05-26T00:56:41+00:00', - }) -# --- -# name: test_all_entities[sensor.rando_credit_credit_card_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.rando_credit_credit_card_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Balance', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': '222260252323873333_9000000007_balance', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.rando_credit_credit_card_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via FINICITY', - 'device_class': 'monetary', - 'entity_picture': 'data:image/png;base64,base64Nonce', - 'friendly_name': 'Rando Credit Credit Card Balance', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.rando_credit_credit_card_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '-200.0', - }) -# --- -# name: test_all_entities[sensor.rando_credit_credit_card_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.rando_credit_credit_card_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': '222260252323873333_9000000007_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.rando_credit_credit_card_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via FINICITY', - 'device_class': 'timestamp', - 'friendly_name': 'Rando Credit Credit Card Data age', - }), - 'context': , - 'entity_id': 'sensor.rando_credit_credit_card_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2022-12-10T18:17:06+00:00', - }) -# --- -# name: test_all_entities[sensor.rando_employer_investments_401_k_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.rando_employer_investments_401_k_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Balance', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': '222260252323873333_90000000022_balance', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.rando_employer_investments_401_k_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via FINICITY', - 'device_class': 'monetary', - 'entity_picture': 'data:image/png;base64,base64Nonce', - 'friendly_name': 'Rando Employer Investments 401.k Balance', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.rando_employer_investments_401_k_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100000.35', - }) -# --- -# name: test_all_entities[sensor.rando_employer_investments_401_k_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.rando_employer_investments_401_k_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': '222260252323873333_90000000022_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.rando_employer_investments_401_k_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via FINICITY', - 'device_class': 'timestamp', - 'friendly_name': 'Rando Employer Investments 401.k Data age', - }), - 'context': , - 'entity_id': 'sensor.rando_employer_investments_401_k_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-02-17T08:13:10+00:00', - }) -# --- -# name: test_all_entities[sensor.rando_investments_roth_ira_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.rando_investments_roth_ira_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Balance', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': '222260252323873333_900000000012_balance', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.rando_investments_roth_ira_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via PLAID', - 'device_class': 'monetary', - 'entity_picture': 'data:image/png;base64,base64Nonce', - 'friendly_name': 'Rando Investments Roth IRA Balance', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.rando_investments_roth_ira_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10000.43', - }) -# --- -# name: test_all_entities[sensor.rando_investments_roth_ira_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.rando_investments_roth_ira_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': '222260252323873333_900000000012_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.rando_investments_roth_ira_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via PLAID', - 'device_class': 'timestamp', - 'friendly_name': 'Rando Investments Roth IRA Data age', - }), - 'context': , - 'entity_id': 'sensor.rando_investments_roth_ira_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-02-17T13:32:21+00:00', - }) -# --- -# name: test_all_entities[sensor.rando_mortgage_mortgage_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.rando_mortgage_mortgage_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Balance', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': '222260252323873333_90000000030_balance', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.rando_mortgage_mortgage_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via PLAID', - 'device_class': 'monetary', - 'entity_picture': 'data:image/png;base64,base64Nonce', - 'friendly_name': 'Rando Mortgage Mortgage Balance', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.rando_mortgage_mortgage_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_all_entities[sensor.rando_mortgage_mortgage_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.rando_mortgage_mortgage_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': '222260252323873333_90000000030_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.rando_mortgage_mortgage_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via PLAID', - 'device_class': 'timestamp', - 'friendly_name': 'Rando Mortgage Mortgage Data age', - }), - 'context': , - 'entity_id': 'sensor.rando_mortgage_mortgage_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2023-08-16T01:41:36+00:00', - }) -# --- -# name: test_all_entities[sensor.vinaudit_2050_toyota_rav8_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.vinaudit_2050_toyota_rav8_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': '222260252323873333_121212192626186051_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.vinaudit_2050_toyota_rav8_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via Manual entry', - 'device_class': 'timestamp', - 'friendly_name': 'VinAudit 2050 Toyota RAV8 Data age', - }), - 'context': , - 'entity_id': 'sensor.vinaudit_2050_toyota_rav8_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-08-16T17:37:21+00:00', - }) -# --- -# name: test_all_entities[sensor.vinaudit_2050_toyota_rav8_value-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.vinaudit_2050_toyota_rav8_value', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Value', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'value', - 'unique_id': '222260252323873333_121212192626186051_value', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.vinaudit_2050_toyota_rav8_value-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via Manual entry', - 'device_class': 'monetary', - 'entity_picture': 'https://api.monarchmoney.com/cdn-cgi/image/width=128/images/institution/159427559853802644', - 'friendly_name': 'VinAudit 2050 Toyota RAV8 Value', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.vinaudit_2050_toyota_rav8_value', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11075.58', - }) -# --- -# name: test_all_entities[sensor.zillow_house_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.zillow_house_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Balance', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': '222260252323873333_90000000020_balance', - 'unit_of_measurement': '$', - }) -# --- -# name: test_all_entities[sensor.zillow_house_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via Manual entry', - 'device_class': 'monetary', - 'entity_picture': 'data:image/png;base64,base64Nonce', - 'friendly_name': 'Zillow House Balance', - 'state_class': , - 'unit_of_measurement': '$', - }), - 'context': , - 'entity_id': 'sensor.zillow_house_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '123000.0', - }) -# --- -# name: test_all_entities[sensor.zillow_house_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.zillow_house_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'monarch_money', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': '222260252323873333_90000000020_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.zillow_house_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Monarch Money API via Manual entry', - 'device_class': 'timestamp', - 'friendly_name': 'Zillow House Data age', - }), - 'context': , - 'entity_id': 'sensor.zillow_house_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-02-12T09:00:25+00:00', - }) -# --- diff --git a/tests/components/monarch_money/test_config_flow.py b/tests/components/monarch_money/test_config_flow.py deleted file mode 100644 index 03f0df0c526..00000000000 --- a/tests/components/monarch_money/test_config_flow.py +++ /dev/null @@ -1,166 +0,0 @@ -"""Test the Monarch Money config flow.""" - -from unittest.mock import AsyncMock - -from monarchmoney import LoginFailedException, RequireMFAException - -from homeassistant.components.monarch_money.const import CONF_MFA_CODE, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TOKEN -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - - -async def test_form_simple( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_config_api: AsyncMock -) -> None: - """Test simple case (no MFA / no errors).""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Monarch Money" - assert result["data"] == { - CONF_TOKEN: "mocked_token", - } - assert result["result"].unique_id == "222260252323873333" - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_add_duplicate_entry( - hass: HomeAssistant, - mock_config_entry, - mock_setup_entry: AsyncMock, - mock_config_api: AsyncMock, -) -> None: - """Test a duplicate error config flow.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_form_invalid_auth( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_config_api: AsyncMock -) -> None: - """Test config flow with a login error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - # Change the login mock to raise an MFA required error - mock_config_api.return_value.login.side_effect = LoginFailedException( - "Invalid Auth" - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_auth"} - - mock_config_api.return_value.login.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Monarch Money" - assert result["data"] == { - CONF_TOKEN: "mocked_token", - } - assert result["context"]["unique_id"] == "222260252323873333" - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_mfa( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_config_api: AsyncMock -) -> None: - """Test MFA enabled on account configuration.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - # Change the login mock to raise an MFA required error - mock_config_api.return_value.login.side_effect = RequireMFAException("mfa_required") - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "mfa_required"} - assert result["step_id"] == "user" - - # Add a bad MFA Code response - mock_config_api.return_value.multi_factor_authenticate.side_effect = KeyError - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_MFA_CODE: "123456", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "bad_mfa"} - assert result["step_id"] == "user" - - # Use a good MFA Code - Clear mock - mock_config_api.return_value.multi_factor_authenticate.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_MFA_CODE: "123456", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Monarch Money" - assert result["data"] == { - CONF_TOKEN: "mocked_token", - } - assert result["result"].unique_id == "222260252323873333" - - assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/monarch_money/test_sensor.py b/tests/components/monarch_money/test_sensor.py deleted file mode 100644 index aac1eaefb2d..00000000000 --- a/tests/components/monarch_money/test_sensor.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Test sensors.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_config_api: AsyncMock, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.monarch_money.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/monoprice/test_media_player.py b/tests/components/monoprice/test_media_player.py index 7d05003153d..f7d88692cf5 100644 --- a/tests/components/monoprice/test_media_player.py +++ b/tests/components/monoprice/test_media_player.py @@ -1,7 +1,6 @@ """The tests for Monoprice Media player platform.""" from collections import defaultdict -from typing import Any from unittest.mock import patch from serial import SerialException @@ -59,7 +58,7 @@ class AttrDict(dict): class MockMonoprice: """Mock for pymonoprice object.""" - def __init__(self) -> None: + def __init__(self): """Init mock object.""" self.zones = defaultdict( lambda: AttrDict(power=True, volume=0, mute=True, source=1) @@ -106,7 +105,7 @@ async def test_cannot_connect(hass: HomeAssistant) -> None: assert hass.states.get(ZONE_1_ID) is None -async def _setup_monoprice(hass: HomeAssistant, monoprice: MockMonoprice) -> None: +async def _setup_monoprice(hass, monoprice): with patch( "homeassistant.components.monoprice.get_monoprice", new=lambda *a: monoprice, @@ -117,9 +116,7 @@ async def _setup_monoprice(hass: HomeAssistant, monoprice: MockMonoprice) -> Non await hass.async_block_till_done() -async def _setup_monoprice_with_options( - hass: HomeAssistant, monoprice: MockMonoprice -) -> None: +async def _setup_monoprice_with_options(hass, monoprice): with patch( "homeassistant.components.monoprice.get_monoprice", new=lambda *a: monoprice, @@ -132,9 +129,7 @@ async def _setup_monoprice_with_options( await hass.async_block_till_done() -async def _setup_monoprice_not_first_run( - hass: HomeAssistant, monoprice: MockMonoprice -) -> None: +async def _setup_monoprice_not_first_run(hass, monoprice): with patch( "homeassistant.components.monoprice.get_monoprice", new=lambda *a: monoprice, @@ -146,17 +141,19 @@ async def _setup_monoprice_not_first_run( await hass.async_block_till_done() -async def _call_media_player_service( - hass: HomeAssistant, name: str, data: dict[str, Any] -) -> None: +async def _call_media_player_service(hass, name, data): await hass.services.async_call( MEDIA_PLAYER_DOMAIN, name, service_data=data, blocking=True ) -async def _call_monoprice_service( - hass: HomeAssistant, name: str, data: dict[str, Any] -) -> None: +async def _call_homeassistant_service(hass, name, data): + await hass.services.async_call( + "homeassistant", name, service_data=data, blocking=True + ) + + +async def _call_monoprice_service(hass, name, data): await hass.services.async_call(DOMAIN, name, service_data=data, blocking=True) diff --git a/tests/components/monzo/test_config_flow.py b/tests/components/monzo/test_config_flow.py index 7630acfc1cf..b7d0de9cdc3 100644 --- a/tests/components/monzo/test_config_flow.py +++ b/tests/components/monzo/test_config_flow.py @@ -1,7 +1,10 @@ """Tests for config flow.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory +from monzopy import AuthorisationExpiredError import pytest from homeassistant.components.monzo.application_credentials import ( @@ -9,7 +12,7 @@ from homeassistant.components.monzo.application_credentials import ( OAUTH2_TOKEN, ) from homeassistant.components.monzo.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -17,7 +20,7 @@ from homeassistant.helpers import config_entry_oauth2_flow from . import setup_integration from .conftest import CLIENT_ID, USER_ID -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -151,7 +154,14 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, polling_config_entry) - result = await polling_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": polling_config_entry.entry_id, + }, + data=polling_config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -213,7 +223,14 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, polling_config_entry) - result = await polling_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": polling_config_entry.entry_id, + }, + data=polling_config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -252,3 +269,25 @@ async def test_config_reauth_wrong_account( assert result assert result["type"] is FlowResultType.ABORT assert result["reason"] == "wrong_account" + + +async def test_api_can_trigger_reauth( + hass: HomeAssistant, + polling_config_entry: MockConfigEntry, + monzo: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test reauth an existing profile reauthenticates the config entry.""" + await setup_integration(hass, polling_config_entry) + + monzo.user_account.accounts.side_effect = AuthorisationExpiredError() + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + + assert len(flows) == 1 + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + assert flow["context"]["source"] == SOURCE_REAUTH diff --git a/tests/components/monzo/test_init.py b/tests/components/monzo/test_init.py deleted file mode 100644 index b24fb6ff86e..00000000000 --- a/tests/components/monzo/test_init.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Tests for component initialisation.""" - -from datetime import timedelta -from unittest.mock import AsyncMock - -from freezegun.api import FrozenDateTimeFactory -from monzopy import AuthorisationExpiredError - -from homeassistant.components.monzo.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_api_can_trigger_reauth( - hass: HomeAssistant, - polling_config_entry: MockConfigEntry, - monzo: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test reauth an existing profile reauthenticates the config entry.""" - await setup_integration(hass, polling_config_entry) - - monzo.user_account.accounts.side_effect = AuthorisationExpiredError() - freezer.tick(timedelta(minutes=10)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - flows = hass.config_entries.flow.async_progress() - - assert len(flows) == 1 - flow = flows[0] - assert flow["step_id"] == "reauth_confirm" - assert flow["handler"] == DOMAIN - assert flow["context"]["source"] == SOURCE_REAUTH diff --git a/tests/components/moon/conftest.py b/tests/components/moon/conftest.py index 3cf0eb1afc3..6fa54fcb603 100644 --- a/tests/components/moon/conftest.py +++ b/tests/components/moon/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.moon.const import DOMAIN diff --git a/tests/components/mopeka/test_config_flow.py b/tests/components/mopeka/test_config_flow.py index 7a341052f22..826fe8db2aa 100644 --- a/tests/components/mopeka/test_config_flow.py +++ b/tests/components/mopeka/test_config_flow.py @@ -2,10 +2,8 @@ from unittest.mock import patch -import voluptuous as vol - from homeassistant import config_entries -from homeassistant.components.mopeka.const import CONF_MEDIUM_TYPE, DOMAIN, MediumType +from homeassistant.components.mopeka.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -23,14 +21,13 @@ async def test_async_step_bluetooth_valid_device(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "bluetooth_confirm" - with patch("homeassistant.components.mopeka.async_setup_entry", return_value=True): result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_MEDIUM_TYPE: MediumType.PROPANE.value} + result["flow_id"], user_input={} ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert result2["data"] == {CONF_MEDIUM_TYPE: MediumType.PROPANE.value} + assert result2["data"] == {} assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" @@ -74,10 +71,7 @@ async def test_async_step_user_with_found_devices(hass: HomeAssistant) -> None: ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert CONF_MEDIUM_TYPE in result2["data"] - assert result2["data"][CONF_MEDIUM_TYPE] in [ - medium_type.value for medium_type in MediumType - ] + assert result2["data"] == {} assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" @@ -196,44 +190,8 @@ async def test_async_step_user_takes_precedence_over_discovery( ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert CONF_MEDIUM_TYPE in result2["data"] - assert result2["data"][CONF_MEDIUM_TYPE] in [ - medium_type.value for medium_type in MediumType - ] + assert result2["data"] == {} assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" # Verify the original one was aborted assert not hass.config_entries.flow.async_progress(DOMAIN) - - -async def test_async_step_reconfigure_options(hass: HomeAssistant) -> None: - """Test reconfig options: change MediumType from air to fresh water.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="aa:bb:cc:dd:75:10", - title="TD40/TD200 7510", - data={CONF_MEDIUM_TYPE: MediumType.AIR.value}, - ) - entry.add_to_hass(hass) - - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.data[CONF_MEDIUM_TYPE] == MediumType.AIR.value - - result = await hass.config_entries.options.async_init(entry.entry_id) - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == "init" - schema: vol.Schema = result["data_schema"] - medium_type_key = next( - iter(key for key in schema.schema if key == CONF_MEDIUM_TYPE) - ) - assert medium_type_key.default() == MediumType.AIR.value - - result2 = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={CONF_MEDIUM_TYPE: MediumType.FRESH_WATER.value}, - ) - assert result2["type"] == FlowResultType.CREATE_ENTRY - - # Verify the new configuration - assert entry.data[CONF_MEDIUM_TYPE] == MediumType.FRESH_WATER.value diff --git a/tests/components/motionblinds_ble/__init__.py b/tests/components/motionblinds_ble/__init__.py index e1caef9f51f..c2385555dbf 100644 --- a/tests/components/motionblinds_ble/__init__.py +++ b/tests/components/motionblinds_ble/__init__.py @@ -1,16 +1 @@ """Tests for the Motionblinds Bluetooth integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Mock a fully setup config entry.""" - - mock_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/motionblinds_ble/conftest.py b/tests/components/motionblinds_ble/conftest.py index ef4f2e1e15d..342e958eae4 100644 --- a/tests/components/motionblinds_ble/conftest.py +++ b/tests/components/motionblinds_ble/conftest.py @@ -1,148 +1,23 @@ """Setup the Motionblinds Bluetooth tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch -from motionblindsble.const import MotionBlindType import pytest +from typing_extensions import Generator -from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak -from homeassistant.components.motionblinds_ble.const import ( - CONF_BLIND_TYPE, - CONF_LOCAL_NAME, - CONF_MAC_CODE, - DOMAIN, -) -from homeassistant.const import CONF_ADDRESS - -from tests.common import MockConfigEntry -from tests.components.bluetooth import generate_advertisement_data, generate_ble_device +TEST_MAC = "abcd" +TEST_NAME = f"MOTION_{TEST_MAC.upper()}" +TEST_ADDRESS = "test_adress" -@pytest.fixture(autouse=True) -def mock_bluetooth(enable_bluetooth: None) -> None: - """Auto mock bluetooth.""" - - -@pytest.fixture -def address() -> str: - """Address fixture.""" - return "cc:cc:cc:cc:cc:cc" - - -@pytest.fixture -def mac_code(address: str) -> str: - """MAC code fixture.""" - return "".join(address.split(":")[-3:-1]).upper() - - -@pytest.fixture -def display_name(mac_code: str) -> str: - """Display name fixture.""" - return f"Motionblind {mac_code.upper()}" - - -@pytest.fixture -def name(display_name: str) -> str: - """Name fixture.""" - return display_name.lower().replace(" ", "_") - - -@pytest.fixture -def local_name(mac_code: str) -> str: - """Local name fixture.""" - return f"MOTION_{mac_code.upper()}" - - -@pytest.fixture -def blind_type() -> MotionBlindType: - """Blind type fixture.""" - return MotionBlindType.ROLLER - - -@pytest.fixture -def service_info(local_name: str, address: str) -> BluetoothServiceInfoBleak: - """Service info fixture.""" - return BluetoothServiceInfoBleak( - name=local_name, - address=address, - device=generate_ble_device( - address=address, - name=local_name, - ), - rssi=-61, - manufacturer_data={000: b"test"}, - service_data={ - "test": bytearray(b"0000"), - }, - service_uuids=[ - "test", - ], - source="local", - advertisement=generate_advertisement_data( - manufacturer_data={000: b"test"}, - service_uuids=["test"], - ), - connectable=True, - time=0, - tx_power=-127, - ) - - -@pytest.fixture -def mock_motion_device( - blind_type: MotionBlindType, display_name: str -) -> Generator[AsyncMock]: - """Mock a MotionDevice.""" - - with patch( - "homeassistant.components.motionblinds_ble.MotionDevice", - autospec=True, - ) as mock_device: - device = mock_device.return_value - device.ble_device = Mock() - device.display_name = display_name - device.blind_type = blind_type - yield device - - -@pytest.fixture -def mock_config_entry( - blind_type: MotionBlindType, address: str, display_name: str, mac_code: str -) -> MockConfigEntry: - """Config entry fixture.""" - return MockConfigEntry( - title="mock_title", - domain=DOMAIN, - entry_id="mock_entry_id", - unique_id=address, - data={ - CONF_ADDRESS: address, - CONF_LOCAL_NAME: display_name, - CONF_MAC_CODE: mac_code, - CONF_BLIND_TYPE: blind_type.name.lower(), - }, - ) - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.motionblinds_ble.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def motionblinds_ble_connect( - enable_bluetooth: None, local_name: str, address: str +@pytest.fixture(name="motionblinds_ble_connect", autouse=True) +def motion_blinds_connect_fixture( + enable_bluetooth: None, ) -> Generator[tuple[AsyncMock, Mock]]: """Mock motion blinds ble connection and entry setup.""" device = Mock() - device.name = local_name - device.address = address + device.name = TEST_NAME + device.address = TEST_ADDRESS bleak_scanner = AsyncMock() bleak_scanner.discover.return_value = [device] @@ -156,5 +31,9 @@ def motionblinds_ble_connect( "homeassistant.components.motionblinds_ble.config_flow.bluetooth.async_get_scanner", return_value=bleak_scanner, ), + patch( + "homeassistant.components.motionblinds_ble.async_setup_entry", + return_value=True, + ), ): yield bleak_scanner, device diff --git a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr deleted file mode 100644 index 5b4b169c0fe..00000000000 --- a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,36 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'device': dict({ - 'blind_type': 'Roller blind', - 'calibration_type': None, - 'connection_type': 'disconnected', - 'end_position_info': None, - 'position': None, - 'tilt': None, - 'timezone': None, - }), - 'entry': dict({ - 'data': dict({ - 'address': 'cc:cc:cc:cc:cc:cc', - 'blind_type': 'roller', - 'local_name': 'Motionblind CCCC', - 'mac_code': 'CCCC', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'motionblinds_ble', - 'entry_id': 'mock_entry_id', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': '**REDACTED**', - 'unique_id': '**REDACTED**', - 'version': 1, - }), - }) -# --- diff --git a/tests/components/motionblinds_ble/test_button.py b/tests/components/motionblinds_ble/test_button.py deleted file mode 100644 index 9c27056c929..00000000000 --- a/tests/components/motionblinds_ble/test_button.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Tests for Motionblinds BLE buttons.""" - -from unittest.mock import Mock - -import pytest - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.components.motionblinds_ble.const import ( - ATTR_CONNECT, - ATTR_DISCONNECT, - ATTR_FAVORITE, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("motionblinds_ble_connect") -@pytest.mark.parametrize( - ("button"), - [ - ATTR_CONNECT, - ATTR_DISCONNECT, - ATTR_FAVORITE, - ], -) -async def test_button( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_motion_device: Mock, - name: str, - button: str, -) -> None: - """Test states of the button.""" - - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: f"button.{name}_{button}"}, - blocking=True, - ) - getattr(mock_motion_device, button).assert_called_once() diff --git a/tests/components/motionblinds_ble/test_config_flow.py b/tests/components/motionblinds_ble/test_config_flow.py index 05d3077ceb1..4cab12269dd 100644 --- a/tests/components/motionblinds_ble/test_config_flow.py +++ b/tests/components/motionblinds_ble/test_config_flow.py @@ -12,19 +12,41 @@ from homeassistant.const import CONF_ADDRESS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import TEST_ADDRESS, TEST_MAC, TEST_NAME + from tests.common import MockConfigEntry +from tests.components.bluetooth import generate_advertisement_data, generate_ble_device + +TEST_BLIND_TYPE = MotionBlindType.ROLLER.name.lower() + +BLIND_SERVICE_INFO = BluetoothServiceInfoBleak( + name=TEST_NAME, + address=TEST_ADDRESS, + device=generate_ble_device( + address="cc:cc:cc:cc:cc:cc", + name=TEST_NAME, + ), + rssi=-61, + manufacturer_data={000: b"test"}, + service_data={ + "test": bytearray(b"0000"), + }, + service_uuids=[ + "test", + ], + source="local", + advertisement=generate_advertisement_data( + manufacturer_data={000: b"test"}, + service_uuids=["test"], + ), + connectable=True, + time=0, + tx_power=-127, +) @pytest.mark.usefixtures("motionblinds_ble_connect") -@pytest.mark.usefixtures("mock_setup_entry") -async def test_config_flow_manual_success( - hass: HomeAssistant, - blind_type: MotionBlindType, - mac_code: str, - address: str, - local_name: str, - display_name: str, -) -> None: +async def test_config_flow_manual_success(hass: HomeAssistant) -> None: """Successful flow manually initialized by the user.""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -35,36 +57,28 @@ async def test_config_flow_manual_success( result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: mac_code}, + {const.CONF_MAC_CODE: TEST_MAC}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: blind_type.name.lower()}, + {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == display_name + assert result["title"] == f"Motionblind {TEST_MAC.upper()}" assert result["data"] == { - CONF_ADDRESS: address, - const.CONF_LOCAL_NAME: local_name, - const.CONF_MAC_CODE: mac_code, - const.CONF_BLIND_TYPE: blind_type.name.lower(), + CONF_ADDRESS: TEST_ADDRESS, + const.CONF_LOCAL_NAME: TEST_NAME, + const.CONF_MAC_CODE: TEST_MAC.upper(), + const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, } assert result["options"] == {} @pytest.mark.usefixtures("motionblinds_ble_connect") -@pytest.mark.usefixtures("mock_setup_entry") -async def test_config_flow_manual_error_invalid_mac( - hass: HomeAssistant, - mac_code: str, - address: str, - local_name: str, - display_name: str, - blind_type: MotionBlindType, -) -> None: +async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None: """Invalid MAC code error flow manually initialized by the user.""" # Initialize @@ -87,7 +101,7 @@ async def test_config_flow_manual_error_invalid_mac( # Recover result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: mac_code}, + {const.CONF_MAC_CODE: TEST_MAC}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" @@ -95,15 +109,15 @@ async def test_config_flow_manual_error_invalid_mac( # Finish flow result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: blind_type.name.lower()}, + {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == display_name + assert result["title"] == f"Motionblind {TEST_MAC.upper()}" assert result["data"] == { - CONF_ADDRESS: address, - const.CONF_LOCAL_NAME: local_name, - const.CONF_MAC_CODE: mac_code, - const.CONF_BLIND_TYPE: blind_type.name.lower(), + CONF_ADDRESS: TEST_ADDRESS, + const.CONF_LOCAL_NAME: TEST_NAME, + const.CONF_MAC_CODE: TEST_MAC.upper(), + const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, } assert result["options"] == {} @@ -111,7 +125,6 @@ async def test_config_flow_manual_error_invalid_mac( @pytest.mark.usefixtures("motionblinds_ble_connect") async def test_config_flow_manual_error_no_bluetooth_adapter( hass: HomeAssistant, - mac_code: str, ) -> None: """No Bluetooth adapter error flow manually initialized by the user.""" @@ -140,21 +153,14 @@ async def test_config_flow_manual_error_no_bluetooth_adapter( ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: mac_code}, + {const.CONF_MAC_CODE: TEST_MAC}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == const.ERROR_NO_BLUETOOTH_ADAPTER -@pytest.mark.usefixtures("mock_setup_entry") async def test_config_flow_manual_error_could_not_find_motor( - hass: HomeAssistant, - motionblinds_ble_connect: tuple[AsyncMock, Mock], - mac_code: str, - local_name: str, - display_name: str, - address: str, - blind_type: MotionBlindType, + hass: HomeAssistant, motionblinds_ble_connect: tuple[AsyncMock, Mock] ) -> None: """Could not find motor error flow manually initialized by the user.""" @@ -170,17 +176,17 @@ async def test_config_flow_manual_error_could_not_find_motor( motionblinds_ble_connect[1].name = "WRONG_NAME" result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: mac_code}, + {const.CONF_MAC_CODE: TEST_MAC}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {"base": const.ERROR_COULD_NOT_FIND_MOTOR} # Recover - motionblinds_ble_connect[1].name = local_name + motionblinds_ble_connect[1].name = TEST_NAME result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: mac_code}, + {const.CONF_MAC_CODE: TEST_MAC}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" @@ -188,23 +194,21 @@ async def test_config_flow_manual_error_could_not_find_motor( # Finish flow result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: blind_type.name.lower()}, + {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == display_name + assert result["title"] == f"Motionblind {TEST_MAC.upper()}" assert result["data"] == { - CONF_ADDRESS: address, - const.CONF_LOCAL_NAME: local_name, - const.CONF_MAC_CODE: mac_code, - const.CONF_BLIND_TYPE: blind_type.name.lower(), + CONF_ADDRESS: TEST_ADDRESS, + const.CONF_LOCAL_NAME: TEST_NAME, + const.CONF_MAC_CODE: TEST_MAC.upper(), + const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, } assert result["options"] == {} async def test_config_flow_manual_error_no_devices_found( - hass: HomeAssistant, - motionblinds_ble_connect: tuple[AsyncMock, Mock], - mac_code: str, + hass: HomeAssistant, motionblinds_ble_connect: tuple[AsyncMock, Mock] ) -> None: """No devices found error flow manually initialized by the user.""" @@ -220,27 +224,19 @@ async def test_config_flow_manual_error_no_devices_found( motionblinds_ble_connect[0].discover.return_value = [] result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: mac_code}, + {const.CONF_MAC_CODE: TEST_MAC}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == const.ERROR_NO_DEVICES_FOUND @pytest.mark.usefixtures("motionblinds_ble_connect") -async def test_config_flow_bluetooth_success( - hass: HomeAssistant, - mac_code: str, - service_info: BluetoothServiceInfoBleak, - address: str, - local_name: str, - display_name: str, - blind_type: MotionBlindType, -) -> None: +async def test_config_flow_bluetooth_success(hass: HomeAssistant) -> None: """Successful bluetooth discovery flow.""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_BLUETOOTH}, - data=service_info, + data=BLIND_SERVICE_INFO, ) assert result["type"] is FlowResultType.FORM @@ -248,32 +244,36 @@ async def test_config_flow_bluetooth_success( result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: blind_type.name.lower()}, + {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == display_name + assert result["title"] == f"Motionblind {TEST_MAC.upper()}" assert result["data"] == { - CONF_ADDRESS: address, - const.CONF_LOCAL_NAME: local_name, - const.CONF_MAC_CODE: mac_code, - const.CONF_BLIND_TYPE: blind_type.name.lower(), + CONF_ADDRESS: TEST_ADDRESS, + const.CONF_LOCAL_NAME: TEST_NAME, + const.CONF_MAC_CODE: TEST_MAC.upper(), + const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, } assert result["options"] == {} -@pytest.mark.usefixtures("mock_setup_entry") -async def test_options_flow( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> None: +async def test_options_flow(hass: HomeAssistant) -> None: """Test the options flow.""" - mock_config_entry.add_to_hass(hass) + entry = MockConfigEntry( + domain=const.DOMAIN, + unique_id="0123456789", + data={ + const.CONF_BLIND_TYPE: MotionBlindType.ROLLER, + }, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) - await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" diff --git a/tests/components/motionblinds_ble/test_cover.py b/tests/components/motionblinds_ble/test_cover.py deleted file mode 100644 index 009bd1d0fa3..00000000000 --- a/tests/components/motionblinds_ble/test_cover.py +++ /dev/null @@ -1,124 +0,0 @@ -"""Tests for Motionblinds BLE covers.""" - -from typing import Any -from unittest.mock import Mock - -from motionblindsble.const import MotionBlindType, MotionRunningType -import pytest - -from homeassistant.components.cover import ( - ATTR_POSITION, - ATTR_TILT_POSITION, - DOMAIN as COVER_DOMAIN, - SERVICE_CLOSE_COVER, - SERVICE_CLOSE_COVER_TILT, - SERVICE_OPEN_COVER, - SERVICE_OPEN_COVER_TILT, - SERVICE_SET_COVER_POSITION, - SERVICE_SET_COVER_TILT_POSITION, - SERVICE_STOP_COVER, - SERVICE_STOP_COVER_TILT, - CoverState, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("motionblinds_ble_connect") -@pytest.mark.parametrize("blind_type", [MotionBlindType.VENETIAN]) -@pytest.mark.parametrize( - ("service", "method", "kwargs"), - [ - (SERVICE_OPEN_COVER, "open", {}), - (SERVICE_CLOSE_COVER, "close", {}), - (SERVICE_OPEN_COVER_TILT, "open_tilt", {}), - (SERVICE_CLOSE_COVER_TILT, "close_tilt", {}), - (SERVICE_SET_COVER_POSITION, "position", {ATTR_POSITION: 5}), - (SERVICE_SET_COVER_TILT_POSITION, "tilt", {ATTR_TILT_POSITION: 10}), - (SERVICE_STOP_COVER, "stop", {}), - (SERVICE_STOP_COVER_TILT, "stop", {}), - ], -) -async def test_cover_service( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_motion_device: Mock, - name: str, - service: str, - method: str, - kwargs: dict[str, Any], -) -> None: - """Test cover service.""" - - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - COVER_DOMAIN, - service, - {ATTR_ENTITY_ID: f"cover.{name}", **kwargs}, - blocking=True, - ) - getattr(mock_motion_device, method).assert_called_once() - - -@pytest.mark.usefixtures("motionblinds_ble_connect") -@pytest.mark.parametrize( - ("running_type", "state"), - [ - (None, "unknown"), - (MotionRunningType.STILL, "unknown"), - (MotionRunningType.OPENING, CoverState.OPENING), - (MotionRunningType.CLOSING, CoverState.CLOSING), - ], -) -async def test_cover_update_running( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_motion_device: Mock, - name: str, - running_type: str | None, - state: str, -) -> None: - """Test updating running status.""" - - await setup_integration(hass, mock_config_entry) - - async_update_running = mock_motion_device.register_running_callback.call_args[0][0] - - async_update_running(running_type) - assert hass.states.get(f"cover.{name}").state == state - - -@pytest.mark.usefixtures("motionblinds_ble_connect") -@pytest.mark.parametrize( - ("position", "tilt", "state"), - [ - (None, None, "unknown"), - (0, 0, CoverState.OPEN), - (50, 90, CoverState.OPEN), - (100, 180, CoverState.CLOSED), - ], -) -async def test_cover_update_position( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_motion_device: Mock, - name: str, - position: int, - tilt: int, - state: str, -) -> None: - """Test updating cover position and tilt.""" - - await setup_integration(hass, mock_config_entry) - - async_update_position = mock_motion_device.register_position_callback.call_args[0][ - 0 - ] - - async_update_position(position, tilt) - assert hass.states.get(f"cover.{name}").state == state diff --git a/tests/components/motionblinds_ble/test_diagnostics.py b/tests/components/motionblinds_ble/test_diagnostics.py deleted file mode 100644 index 878d2caa326..00000000000 --- a/tests/components/motionblinds_ble/test_diagnostics.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Test Motionblinds Bluetooth diagnostics.""" - -from syrupy import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test diagnostics.""" - - await setup_integration(hass, mock_config_entry) - - assert await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) == snapshot(exclude=props("created_at", "modified_at", "repr")) diff --git a/tests/components/motionblinds_ble/test_entity.py b/tests/components/motionblinds_ble/test_entity.py deleted file mode 100644 index 00369ba1e22..00000000000 --- a/tests/components/motionblinds_ble/test_entity.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Tests for Motionblinds BLE entities.""" - -from unittest.mock import Mock - -import pytest - -from homeassistant.components.homeassistant import ( - DOMAIN as HA_DOMAIN, - SERVICE_UPDATE_ENTITY, -) -from homeassistant.components.motionblinds_ble.const import ( - ATTR_CONNECT, - ATTR_DISCONNECT, - ATTR_FAVORITE, - ATTR_SPEED, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from . import setup_integration - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("motionblinds_ble_connect") -@pytest.mark.parametrize( - ("platform", "entity"), - [ - (Platform.BUTTON, ATTR_CONNECT), - (Platform.BUTTON, ATTR_DISCONNECT), - (Platform.BUTTON, ATTR_FAVORITE), - (Platform.SELECT, ATTR_SPEED), - ], -) -async def test_entity_update( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_motion_device: Mock, - name: str, - platform: Platform, - entity: str, -) -> None: - """Test updating entity using homeassistant.update_entity.""" - - await async_setup_component(hass, HA_DOMAIN, {}) - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - HA_DOMAIN, - SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: f"{platform.name.lower()}.{name}_{entity}"}, - blocking=True, - ) - getattr(mock_motion_device, "status_query").assert_called_once_with() diff --git a/tests/components/motionblinds_ble/test_init.py b/tests/components/motionblinds_ble/test_init.py deleted file mode 100644 index 09596bd8d5e..00000000000 --- a/tests/components/motionblinds_ble/test_init.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Tests for Motionblinds BLE init.""" - -from unittest.mock import patch - -from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak -from homeassistant.components.motionblinds_ble import options_update_listener -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry -from tests.components.bluetooth import inject_bluetooth_service_info - - -async def test_options_update_listener( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> None: - """Test options_update_listener.""" - - await setup_integration(hass, mock_config_entry) - - with ( - patch( - "homeassistant.components.motionblinds_ble.MotionDevice.set_custom_disconnect_time" - ) as mock_set_custom_disconnect_time, - patch( - "homeassistant.components.motionblinds_ble.MotionDevice.set_permanent_connection" - ) as set_permanent_connection, - ): - await options_update_listener(hass, mock_config_entry) - mock_set_custom_disconnect_time.assert_called_once() - set_permanent_connection.assert_called_once() - - -async def test_update_ble_device( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - service_info: BluetoothServiceInfoBleak, -) -> None: - """Test async_update_ble_device.""" - - await setup_integration(hass, mock_config_entry) - - with patch( - "homeassistant.components.motionblinds_ble.MotionDevice.set_ble_device" - ) as mock_set_ble_device: - inject_bluetooth_service_info(hass, service_info) - mock_set_ble_device.assert_called_once() diff --git a/tests/components/motionblinds_ble/test_select.py b/tests/components/motionblinds_ble/test_select.py deleted file mode 100644 index 2bd1bb30ec2..00000000000 --- a/tests/components/motionblinds_ble/test_select.py +++ /dev/null @@ -1,76 +0,0 @@ -"""Tests for Motionblinds BLE selects.""" - -from collections.abc import Callable -from enum import Enum -from typing import Any -from unittest.mock import Mock - -from motionblindsble.const import MotionSpeedLevel -from motionblindsble.device import MotionDevice -import pytest - -from homeassistant.components.motionblinds_ble.const import ATTR_SPEED -from homeassistant.components.select import ( - DOMAIN as SELECT_DOMAIN, - SERVICE_SELECT_OPTION, -) -from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -@pytest.mark.parametrize(("select", "args"), [(ATTR_SPEED, MotionSpeedLevel.HIGH)]) -async def test_select( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_motion_device: Mock, - name: str, - select: str, - args: Any, -) -> None: - """Test select.""" - - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: f"select.{name}_{select}", - ATTR_OPTION: MotionSpeedLevel.HIGH.value, - }, - blocking=True, - ) - getattr(mock_motion_device, select).assert_called_once_with(args) - - -@pytest.mark.parametrize( - ("select", "register_callback", "value"), - [ - ( - ATTR_SPEED, - lambda device: device.register_speed_callback, - MotionSpeedLevel.HIGH, - ) - ], -) -async def test_select_update( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_motion_device: Mock, - name: str, - select: str, - register_callback: Callable[[MotionDevice], Callable[..., None]], - value: type[Enum], -) -> None: - """Test select state update.""" - - await setup_integration(hass, mock_config_entry) - - update_func = register_callback(mock_motion_device).call_args[0][0] - - update_func(value) - assert hass.states.get(f"select.{name}_{select}").state == str(value.value) diff --git a/tests/components/motionblinds_ble/test_sensor.py b/tests/components/motionblinds_ble/test_sensor.py deleted file mode 100644 index c2468b876ae..00000000000 --- a/tests/components/motionblinds_ble/test_sensor.py +++ /dev/null @@ -1,108 +0,0 @@ -"""Tests for Motionblinds BLE sensors.""" - -from collections.abc import Callable -from typing import Any -from unittest.mock import Mock - -from motionblindsble.const import ( - MotionBlindType, - MotionCalibrationType, - MotionConnectionType, -) -from motionblindsble.device import MotionDevice -import pytest - -from homeassistant.components.motionblinds_ble.const import ( - ATTR_BATTERY, - ATTR_SIGNAL_STRENGTH, -) -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.parametrize("blind_type", [MotionBlindType.CURTAIN]) -@pytest.mark.parametrize( - ("sensor", "register_callback", "initial_value", "args", "expected_value"), - [ - ( - "connection_status", - lambda device: device.register_connection_callback, - MotionConnectionType.DISCONNECTED.value, - [MotionConnectionType.CONNECTING], - MotionConnectionType.CONNECTING.value, - ), - ( - ATTR_BATTERY, - lambda device: device.register_battery_callback, - "unknown", - [25, True, False], - "25", - ), - ( # Battery unknown - ATTR_BATTERY, - lambda device: device.register_battery_callback, - "unknown", - [None, False, False], - "unknown", - ), - ( # Wired - ATTR_BATTERY, - lambda device: device.register_battery_callback, - "unknown", - [255, False, True], - "255", - ), - ( # Almost full - ATTR_BATTERY, - lambda device: device.register_battery_callback, - "unknown", - [99, False, False], - "99", - ), - ( # Almost empty - ATTR_BATTERY, - lambda device: device.register_battery_callback, - "unknown", - [1, False, False], - "1", - ), - ( - "calibration_status", - lambda device: device.register_calibration_callback, - "unknown", - [MotionCalibrationType.CALIBRATING], - MotionCalibrationType.CALIBRATING.value, - ), - ( - ATTR_SIGNAL_STRENGTH, - lambda device: device.register_signal_strength_callback, - "unknown", - [-50], - "-50", - ), - ], -) -async def test_sensor( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_motion_device: Mock, - name: str, - sensor: str, - register_callback: Callable[[MotionDevice], Callable[..., None]], - initial_value: str, - args: list[Any], - expected_value: str, -) -> None: - """Test sensors.""" - - await setup_integration(hass, mock_config_entry) - - assert hass.states.get(f"{SENSOR_DOMAIN}.{name}_{sensor}").state == initial_value - update_func = register_callback(mock_motion_device).call_args[0][0] - update_func(*args) - assert hass.states.get(f"{SENSOR_DOMAIN}.{name}_{sensor}").state == expected_value diff --git a/tests/components/motioneye/__init__.py b/tests/components/motioneye/__init__.py index 842d862a222..183d1b3e6bf 100644 --- a/tests/components/motioneye/__init__.py +++ b/tests/components/motioneye/__init__.py @@ -7,12 +7,12 @@ from unittest.mock import AsyncMock, Mock, patch from motioneye_client.const import DEFAULT_PORT +from homeassistant.components.motioneye import get_motioneye_entity_unique_id from homeassistant.components.motioneye.const import DOMAIN -from homeassistant.components.motioneye.entity import get_motioneye_entity_unique_id +from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry diff --git a/tests/components/motioneye/test_camera.py b/tests/components/motioneye/test_camera.py index 8ef58cc968d..0f3a7d6f904 100644 --- a/tests/components/motioneye/test_camera.py +++ b/tests/components/motioneye/test_camera.py @@ -3,6 +3,7 @@ from asyncio import AbstractEventLoop from collections.abc import Callable import copy +from typing import cast from unittest.mock import AsyncMock, Mock, call from aiohttp import web @@ -45,7 +46,6 @@ from homeassistant.const import ATTR_DEVICE_ID, ATTR_ENTITY_ID, CONF_URL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.util.aiohttp import MockRequest import homeassistant.util.dt as dt_util from . import ( @@ -231,7 +231,7 @@ async def test_get_still_image_from_camera( ) -> None: """Test getting a still image.""" - image_handler = AsyncMock(return_value=web.Response(body="")) + image_handler = AsyncMock(return_value="") app = web.Application() app.add_routes( @@ -273,8 +273,7 @@ async def test_get_stream_from_camera( ) -> None: """Test getting a stream.""" - stream_handler = AsyncMock(return_value=web.Response(body="")) - + stream_handler = AsyncMock(return_value="") app = web.Application() app.add_routes([web.get("/", stream_handler)]) stream_server = await aiohttp_server(app) @@ -298,7 +297,12 @@ async def test_get_stream_from_camera( ) await hass.async_block_till_done() - await async_get_mjpeg_stream(hass, MockRequest(b"", "test"), TEST_CAMERA_ENTITY_ID) + # It won't actually get a stream from the dummy handler, so just catch + # the expected exception, then verify the right handler was called. + with pytest.raises(HTTPBadGateway): + await async_get_mjpeg_stream( + hass, cast(web.Request, None), TEST_CAMERA_ENTITY_ID + ) assert stream_handler.called @@ -354,8 +358,7 @@ async def test_camera_option_stream_url_template( """Verify camera with a stream URL template option.""" client = create_mock_motioneye_client() - stream_handler = AsyncMock(return_value=web.Response(body="")) - + stream_handler = AsyncMock(return_value="") app = web.Application() app.add_routes([web.get(f"/{TEST_CAMERA_NAME}/{TEST_CAMERA_ID}", stream_handler)]) stream_server = await aiohttp_server(app) @@ -381,7 +384,10 @@ async def test_camera_option_stream_url_template( ) await hass.async_block_till_done() - await async_get_mjpeg_stream(hass, MockRequest(b"", "test"), TEST_CAMERA_ENTITY_ID) + # It won't actually get a stream from the dummy handler, so just catch + # the expected exception, then verify the right handler was called. + with pytest.raises(HTTPBadGateway): + await async_get_mjpeg_stream(hass, Mock(), TEST_CAMERA_ENTITY_ID) assert AsyncMock.called assert not client.get_camera_stream_url.called diff --git a/tests/components/motioneye/test_config_flow.py b/tests/components/motioneye/test_config_flow.py index 8d942e7a2a1..816fb31933a 100644 --- a/tests/components/motioneye/test_config_flow.py +++ b/tests/components/motioneye/test_config_flow.py @@ -9,6 +9,7 @@ from motioneye_client.client import ( ) from homeassistant import config_entries +from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.motioneye.const import ( CONF_ADMIN_PASSWORD, CONF_ADMIN_USERNAME, @@ -22,7 +23,6 @@ from homeassistant.components.motioneye.const import ( from homeassistant.const import CONF_URL, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from . import TEST_URL, create_mock_motioneye_client, create_mock_motioneye_config_entry @@ -264,7 +264,14 @@ async def test_reauth(hass: HomeAssistant) -> None: config_entry = create_mock_motioneye_config_entry(hass, data=config_data) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert not result["errors"] diff --git a/tests/components/motionmount/conftest.py b/tests/components/motionmount/conftest.py index 49f624b5266..9e5b0355387 100644 --- a/tests/components/motionmount/conftest.py +++ b/tests/components/motionmount/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Vogel's MotionMount integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.motionmount.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT diff --git a/tests/components/mpd/conftest.py b/tests/components/mpd/conftest.py index a73a529cd0b..818f085decc 100644 --- a/tests/components/mpd/conftest.py +++ b/tests/components/mpd/conftest.py @@ -1,7 +1,7 @@ """Fixtures for Music Player Daemon integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch import pytest @@ -22,7 +22,7 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Mock setting up a config entry.""" with patch( "homeassistant.components.mpd.async_setup_entry", return_value=True @@ -31,7 +31,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_mpd_client() -> Generator[MagicMock]: +def mock_mpd_client() -> Generator[AsyncMock, None, None]: """Return a mock for Music Player Daemon client.""" with patch( diff --git a/tests/components/mqtt/conftest.py b/tests/components/mqtt/conftest.py index 22f0416a2c6..774785bb42a 100644 --- a/tests/components/mqtt/conftest.py +++ b/tests/components/mqtt/conftest.py @@ -1,12 +1,12 @@ """Test fixtures for mqtt component.""" import asyncio -from collections.abc import AsyncGenerator, Generator from random import getrandbits from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest +from typing_extensions import AsyncGenerator, Generator from homeassistant.components import mqtt from homeassistant.components.mqtt.models import MessageCallbackType, ReceiveMessage @@ -87,8 +87,7 @@ async def setup_with_birth_msg_client_mock( patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0), ): entry = MockConfigEntry( - domain=mqtt.DOMAIN, - data=mqtt_config_entry_data or {mqtt.CONF_BROKER: "test-broker"}, + domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"} ) entry.add_to_hass(hass) hass.config.components.add(mqtt.DOMAIN) @@ -122,10 +121,3 @@ def record_calls(recorded_calls: list[ReceiveMessage]) -> MessageCallbackType: recorded_calls.append(msg) return record_calls - - -@pytest.fixture -def tag_mock() -> Generator[AsyncMock]: - """Fixture to mock tag.""" - with patch("homeassistant.components.tag.async_scan_tag") as mock_tag: - yield mock_tag diff --git a/tests/components/mqtt/test_alarm_control_panel.py b/tests/components/mqtt/test_alarm_control_panel.py index b46829650f6..aba2d5f6da2 100644 --- a/tests/components/mqtt/test_alarm_control_panel.py +++ b/tests/components/mqtt/test_alarm_control_panel.py @@ -9,10 +9,7 @@ from unittest.mock import patch import pytest from homeassistant.components import alarm_control_panel, mqtt -from homeassistant.components.alarm_control_panel import ( - AlarmControlPanelEntityFeature, - AlarmControlPanelState, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature from homeassistant.components.mqtt.alarm_control_panel import ( MQTT_ALARM_ATTRIBUTES_BLOCKED, ) @@ -28,6 +25,16 @@ from homeassistant.const import ( SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, SERVICE_RELOAD, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_DISARMING, + STATE_ALARM_PENDING, + STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -50,7 +57,6 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, - help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -207,23 +213,23 @@ async def test_update_state_via_state_topic( assert hass.states.get(entity_id).state == STATE_UNKNOWN for state in ( - AlarmControlPanelState.DISARMED, - AlarmControlPanelState.ARMED_HOME, - AlarmControlPanelState.ARMED_AWAY, - AlarmControlPanelState.ARMED_NIGHT, - AlarmControlPanelState.ARMED_VACATION, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - AlarmControlPanelState.PENDING, - AlarmControlPanelState.ARMING, - AlarmControlPanelState.DISARMING, - AlarmControlPanelState.TRIGGERED, + STATE_ALARM_DISARMED, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_PENDING, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMING, + STATE_ALARM_TRIGGERED, ): async_fire_mqtt_message(hass, "alarm/state", state) assert hass.states.get(entity_id).state == state - # Ignore empty payload (last state is AlarmControlPanelState.TRIGGERED) + # Ignore empty payload (last state is STATE_ALARM_TRIGGERED) async_fire_mqtt_message(hass, "alarm/state", "") - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED # Reset state on `None` payload async_fire_mqtt_message(hass, "alarm/state", "None") @@ -763,7 +769,7 @@ async def test_update_state_via_state_topic_template( async_fire_mqtt_message(hass, "test-topic", "100") state = hass.states.get("alarm_control_panel.test") - assert state.state == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMED_AWAY @pytest.mark.parametrize( @@ -847,7 +853,10 @@ async def test_availability_without_topic( ) -> None: """Test availability without defined availability topic.""" await help_test_availability_without_topic( - hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE + hass, + mqtt_mock_entry, + alarm_control_panel.DOMAIN, + DEFAULT_CONFIG_CODE, ) @@ -856,7 +865,10 @@ async def test_default_availability_payload( ) -> None: """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( - hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE + hass, + mqtt_mock_entry, + alarm_control_panel.DOMAIN, + DEFAULT_CONFIG_CODE, ) @@ -865,7 +877,10 @@ async def test_custom_availability_payload( ) -> None: """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( - hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + alarm_control_panel.DOMAIN, + DEFAULT_CONFIG, ) @@ -874,7 +889,10 @@ async def test_setting_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + alarm_control_panel.DOMAIN, + DEFAULT_CONFIG, ) @@ -896,7 +914,10 @@ async def test_setting_attribute_with_template( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( - hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + alarm_control_panel.DOMAIN, + DEFAULT_CONFIG, ) @@ -907,7 +928,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, alarm_control_panel.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + alarm_control_panel.DOMAIN, + DEFAULT_CONFIG, ) @@ -918,7 +943,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, alarm_control_panel.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + alarm_control_panel.DOMAIN, + DEFAULT_CONFIG, ) @@ -1230,7 +1259,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = alarm_control_panel.DOMAIN @@ -1253,7 +1283,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = alarm_control_panel.DOMAIN @@ -1281,18 +1312,6 @@ async def test_entity_name( ) -async def test_entity_icon_and_entity_picture( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test the entity icon or picture setup.""" - domain = alarm_control_panel.DOMAIN - config = DEFAULT_CONFIG - await help_test_entity_icon_and_entity_picture( - hass, mqtt_mock_entry, domain, config - ) - - @pytest.mark.parametrize( "hass_config", [ @@ -1312,11 +1331,7 @@ async def test_entity_icon_and_entity_picture( @pytest.mark.parametrize( ("topic", "payload1", "payload2"), [ - ( - "test-topic", - AlarmControlPanelState.DISARMED, - AlarmControlPanelState.ARMED_HOME, - ), + ("test-topic", STATE_ALARM_DISARMED, STATE_ALARM_ARMED_HOME), ("availability-topic", "online", "offline"), ("json-attributes-topic", '{"attr1": "val1"}', '{"attr1": "val2"}'), ], diff --git a/tests/components/mqtt/test_binary_sensor.py b/tests/components/mqtt/test_binary_sensor.py index d27163c3423..6ba479fca74 100644 --- a/tests/components/mqtt/test_binary_sensor.py +++ b/tests/components/mqtt/test_binary_sensor.py @@ -40,7 +40,6 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, - help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -759,7 +758,10 @@ async def test_setting_attribute_with_template( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( - hass, mqtt_mock_entry, binary_sensor.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + binary_sensor.DOMAIN, + DEFAULT_CONFIG, ) @@ -770,7 +772,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, binary_sensor.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + binary_sensor.DOMAIN, + DEFAULT_CONFIG, ) @@ -781,7 +787,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, binary_sensor.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + binary_sensor.DOMAIN, + DEFAULT_CONFIG, ) @@ -1026,7 +1036,8 @@ async def test_entity_debug_info_message( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = binary_sensor.DOMAIN @@ -1134,7 +1145,7 @@ async def test_skip_restoring_state_with_over_due_expire_trigger( freezer.move_to("2022-02-02 12:02:00+01:00") domain = binary_sensor.DOMAIN - config3: ConfigType = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][domain]) + config3 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][domain]) config3["name"] = "test3" config3["expire_after"] = 10 config3["state_topic"] = "test-topic3" @@ -1194,18 +1205,6 @@ async def test_entity_name( ) -async def test_entity_icon_and_entity_picture( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test the entity icon or picture setup.""" - domain = binary_sensor.DOMAIN - config = DEFAULT_CONFIG - await help_test_entity_icon_and_entity_picture( - hass, mqtt_mock_entry, domain, config - ) - - @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_button.py b/tests/components/mqtt/test_button.py index f147b33c88b..7e5d748e2ab 100644 --- a/tests/components/mqtt/test_button.py +++ b/tests/components/mqtt/test_button.py @@ -25,7 +25,6 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, - help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_name, help_test_publishing_with_custom_encoding, @@ -217,7 +216,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, button.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + button.DOMAIN, + DEFAULT_CONFIG, ) @@ -228,7 +231,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, button.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + button.DOMAIN, + DEFAULT_CONFIG, ) @@ -481,7 +488,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = button.DOMAIN @@ -504,7 +512,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = button.DOMAIN @@ -535,15 +544,3 @@ async def test_entity_name( await help_test_entity_name( hass, mqtt_mock_entry, domain, config, expected_friendly_name, device_class ) - - -async def test_entity_icon_and_entity_picture( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test the entity icon or picture setup.""" - domain = button.DOMAIN - config = DEFAULT_CONFIG - await help_test_entity_icon_and_entity_picture( - hass, mqtt_mock_entry, domain, config - ) diff --git a/tests/components/mqtt/test_camera.py b/tests/components/mqtt/test_camera.py index cda536dc19e..d02e19e6063 100644 --- a/tests/components/mqtt/test_camera.py +++ b/tests/components/mqtt/test_camera.py @@ -389,7 +389,8 @@ async def test_entity_debug_info_message( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = camera.DOMAIN @@ -412,7 +413,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = camera.DOMAIN diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 164c164cdfc..49b590383d1 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -1,10 +1,9 @@ """The tests for the MQTT client.""" import asyncio -from datetime import timedelta +from datetime import datetime, timedelta import socket import ssl -import time from typing import Any from unittest.mock import MagicMock, Mock, call, patch @@ -14,7 +13,6 @@ import pytest from homeassistant.components import mqtt from homeassistant.components.mqtt.client import RECONNECT_INTERVAL_SECONDS -from homeassistant.components.mqtt.const import SUPPORTED_COMPONENTS from homeassistant.components.mqtt.models import MessageCallbackType, ReceiveMessage from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState from homeassistant.const import ( @@ -38,6 +36,11 @@ from tests.common import ( from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, MqttMockPahoClient +@pytest.fixture(autouse=True) +def mock_storage(hass_storage: dict[str, Any]) -> None: + """Autouse hass_storage for the TestCase tests.""" + + def help_assert_message( msg: ReceiveMessage, topic: str | None = None, @@ -222,7 +225,7 @@ async def test_publish( async def test_convert_outgoing_payload(hass: HomeAssistant) -> None: """Test the converting of outgoing MQTT payloads without template.""" - command_template = mqtt.MqttCommandTemplate(None) + command_template = mqtt.MqttCommandTemplate(None, hass=hass) assert command_template.async_render(b"\xde\xad\xbe\xef") == b"\xde\xad\xbe\xef" assert ( command_template.async_render("b'\\xde\\xad\\xbe\\xef'") @@ -297,13 +300,10 @@ async def test_subscribe_mqtt_config_entry_disabled( mqtt_mock.connected = True mqtt_config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - - mqtt_config_entry_state = mqtt_config_entry.state - assert mqtt_config_entry_state is ConfigEntryState.LOADED + assert mqtt_config_entry.state is ConfigEntryState.LOADED assert await hass.config_entries.async_unload(mqtt_config_entry.entry_id) - mqtt_config_entry_state = mqtt_config_entry.state - assert mqtt_config_entry_state is ConfigEntryState.NOT_LOADED + assert mqtt_config_entry.state is ConfigEntryState.NOT_LOADED await hass.config_entries.async_set_disabled_by( mqtt_config_entry.entry_id, ConfigEntryDisabler.USER @@ -1194,23 +1194,6 @@ async def test_handle_mqtt_on_callback( assert "No ACK from MQTT server" not in caplog.text -async def test_handle_mqtt_on_callback_after_cancellation( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, -) -> None: - """Test receiving an ACK after a cancellation.""" - mqtt_mock = await mqtt_mock_entry() - # Simulate the mid future getting a cancellation - mqtt_mock()._async_get_mid_future(101).cancel() - # Simulate an ACK for mid == 101, being received after the cancellation - mqtt_client_mock.on_publish(mqtt_client_mock, None, 101) - await hass.async_block_till_done() - assert "No ACK from MQTT server" not in caplog.text - assert "InvalidStateError" not in caplog.text - - async def test_handle_mqtt_on_callback_after_timeout( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -1283,7 +1266,7 @@ async def test_handle_message_callback( callbacks.append(args) msg = ReceiveMessage( - "some-topic", b"test-payload", 1, False, "some-topic", time.monotonic() + "some-topic", b"test-payload", 1, False, "some-topic", datetime.now() ) mock_debouncer.clear() await mqtt.async_subscribe(hass, "some-topic", _callback) @@ -1614,9 +1597,8 @@ async def test_subscription_done_when_birth_message_is_sent( """Test sending birth message until initial subscription has been completed.""" mqtt_client_mock = setup_with_birth_msg_client_mock subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) - for component in SUPPORTED_COMPONENTS: - assert (f"homeassistant/{component}/+/config", 0) in subscribe_calls - assert (f"homeassistant/{component}/+/+/config", 0) in subscribe_calls + assert ("homeassistant/+/+/config", 0) in subscribe_calls + assert ("homeassistant/+/+/+/config", 0) in subscribe_calls mqtt_client_mock.publish.assert_called_with( "homeassistant/status", "online", 0, False ) @@ -1716,97 +1698,6 @@ async def test_mqtt_subscribes_topics_on_connect( assert ("still/pending", 1) in subscribe_calls -@pytest.mark.parametrize("mqtt_config_entry_data", [ENTRY_DEFAULT_BIRTH_MESSAGE]) -async def test_mqtt_subscribes_wildcard_topics_in_correct_order( - hass: HomeAssistant, - mock_debouncer: asyncio.Event, - setup_with_birth_msg_client_mock: MqttMockPahoClient, - record_calls: MessageCallbackType, -) -> None: - """Test subscription to wildcard topics on connect in the order of subscription.""" - mqtt_client_mock = setup_with_birth_msg_client_mock - - mock_debouncer.clear() - await mqtt.async_subscribe(hass, "integration/test#", record_calls) - await mqtt.async_subscribe(hass, "integration/kitchen_sink#", record_calls) - await mock_debouncer.wait() - - def _assert_subscription_order(): - discovery_subscribes = [ - f"homeassistant/{platform}/+/config" for platform in SUPPORTED_COMPONENTS - ] - discovery_subscribes.extend( - [ - f"homeassistant/{platform}/+/+/config" - for platform in SUPPORTED_COMPONENTS - ] - ) - discovery_subscribes.extend( - ["homeassistant/device/+/config", "homeassistant/device/+/+/config"] - ) - discovery_subscribes.extend(["integration/test#", "integration/kitchen_sink#"]) - - expected_discovery_subscribes = discovery_subscribes.copy() - - # Assert we see the expected subscribes and in the correct order - actual_subscribes = [ - discovery_subscribes.pop(0) - for call in help_all_subscribe_calls(mqtt_client_mock) - if discovery_subscribes and discovery_subscribes[0] == call[0] - ] - - # Assert we have processed all items and that they are in the correct order - assert len(discovery_subscribes) == 0 - assert actual_subscribes == expected_discovery_subscribes - - # Assert the initial wildcard topic subscription order - _assert_subscription_order() - - mqtt_client_mock.on_disconnect(Mock(), None, 0) - - mqtt_client_mock.reset_mock() - - mock_debouncer.clear() - mqtt_client_mock.on_connect(Mock(), None, 0, 0) - await mock_debouncer.wait() - - # Assert the wildcard topic subscription order after a reconnect - _assert_subscription_order() - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [ENTRY_DEFAULT_BIRTH_MESSAGE | {mqtt.CONF_DISCOVERY: False}], -) -async def test_mqtt_discovery_not_subscribes_when_disabled( - hass: HomeAssistant, - mock_debouncer: asyncio.Event, - setup_with_birth_msg_client_mock: MqttMockPahoClient, -) -> None: - """Test discovery subscriptions not performend when discovery is disabled.""" - mqtt_client_mock = setup_with_birth_msg_client_mock - - await mock_debouncer.wait() - - subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) - for component in SUPPORTED_COMPONENTS: - assert (f"homeassistant/{component}/+/config", 0) not in subscribe_calls - assert (f"homeassistant/{component}/+/+/config", 0) not in subscribe_calls - - mqtt_client_mock.on_disconnect(Mock(), None, 0) - - mqtt_client_mock.reset_mock() - - mock_debouncer.clear() - mqtt_client_mock.on_connect(Mock(), None, 0, 0) - await mock_debouncer.wait() - - subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) - for component in SUPPORTED_COMPONENTS: - assert (f"homeassistant/{component}/+/config", 0) not in subscribe_calls - assert (f"homeassistant/{component}/+/+/config", 0) not in subscribe_calls - - @pytest.mark.parametrize( "mqtt_config_entry_data", [ENTRY_DEFAULT_BIRTH_MESSAGE], diff --git a/tests/components/mqtt/test_climate.py b/tests/components/mqtt/test_climate.py index 5edd73e3f5a..c41a6366dfe 100644 --- a/tests/components/mqtt/test_climate.py +++ b/tests/components/mqtt/test_climate.py @@ -53,7 +53,6 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, - help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_publishing_with_custom_encoding, @@ -180,14 +179,14 @@ async def test_get_hvac_modes( state = hass.states.get(ENTITY_CLIMATE) modes = state.attributes.get("hvac_modes") - assert modes == [ + assert [ HVACMode.AUTO, HVACMode.OFF, HVACMode.COOL, HVACMode.HEAT, HVACMode.DRY, HVACMode.FAN_ONLY, - ] + ] == modes @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) @@ -203,7 +202,7 @@ async def test_set_operation_bad_attr_and_state( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_hvac_mode(hass, None, ENTITY_CLIMATE) # type:ignore[arg-type] + await common.async_set_hvac_mode(hass, None, ENTITY_CLIMATE) assert ( "expected HVACMode or one of 'off', 'heat', 'cool', 'heat_cool', 'auto', 'dry'," " 'fan_only' for dictionary value @ data['hvac_mode']" in str(excinfo.value) @@ -221,9 +220,10 @@ async def test_set_operation( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" + assert state.state == "cool" mqtt_mock.async_publish.assert_called_once_with("mode-topic", "cool", 0, False) @@ -245,7 +245,7 @@ async def test_set_operation_pessimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.state == STATE_UNKNOWN - await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == STATE_UNKNOWN @@ -287,7 +287,7 @@ async def test_set_operation_optimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" @@ -316,13 +316,13 @@ async def test_set_operation_with_power_command( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" mqtt_mock.async_publish.assert_has_calls([call("mode-topic", "cool", 0, False)]) mqtt_mock.async_publish.reset_mock() - await common.async_set_hvac_mode(hass, HVACMode.OFF, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "off", ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" mqtt_mock.async_publish.assert_has_calls([call("mode-topic", "off", 0, False)]) @@ -358,12 +358,12 @@ async def test_turn_on_and_off_optimistic_with_power_command( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" mqtt_mock.async_publish.assert_has_calls([call("mode-topic", "cool", 0, False)]) mqtt_mock.async_publish.reset_mock() - await common.async_set_hvac_mode(hass, HVACMode.OFF, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "off", ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" @@ -374,7 +374,7 @@ async def test_turn_on_and_off_optimistic_with_power_command( mqtt_mock.async_publish.assert_has_calls([call("power-command", "ON", 0, False)]) mqtt_mock.async_publish.reset_mock() - await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" await common.async_turn_off(hass, ENTITY_CLIMATE) @@ -433,7 +433,7 @@ async def test_turn_on_and_off_without_power_command( else: mqtt_mock.async_publish.assert_has_calls([]) - await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" mqtt_mock.async_publish.reset_mock() @@ -460,7 +460,7 @@ async def test_set_fan_mode_bad_attr( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("fan_mode") == "low" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_fan_mode(hass, None, ENTITY_CLIMATE) # type:ignore[arg-type] + await common.async_set_fan_mode(hass, None, ENTITY_CLIMATE) assert "string value is None for dictionary value @ data['fan_mode']" in str( excinfo.value ) @@ -555,7 +555,7 @@ async def test_set_swing_mode_bad_attr( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("swing_mode") == "off" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_swing_mode(hass, None, ENTITY_CLIMATE) # type:ignore[arg-type] + await common.async_set_swing_mode(hass, None, ENTITY_CLIMATE) assert "string value is None for dictionary value @ data['swing_mode']" in str( excinfo.value ) @@ -649,16 +649,16 @@ async def test_set_target_temperature( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") == 21 - await common.async_set_hvac_mode(hass, HVACMode.HEAT, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "heat" mqtt_mock.async_publish.assert_called_once_with("mode-topic", "heat", 0, False) mqtt_mock.async_publish.reset_mock() - await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("temperature") == 35 + assert state.attributes.get("temperature") == 47 mqtt_mock.async_publish.assert_called_once_with( - "temperature-topic", "35.0", 0, False + "temperature-topic", "47.0", 0, False ) # also test directly supplying the operation mode to set_temperature @@ -712,8 +712,8 @@ async def test_set_target_temperature_pessimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None - await common.async_set_hvac_mode(hass, HVACMode.HEAT, ENTITY_CLIMATE) - await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None @@ -744,7 +744,7 @@ async def test_set_target_temperature_optimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") == 21 - await common.async_set_hvac_mode(hass, HVACMode.HEAT, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) await common.async_set_temperature(hass, temperature=17, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") == 17 @@ -1017,16 +1017,7 @@ async def test_handle_action_received( # Cycle through valid modes # Redefine actions according to https://developers.home-assistant.io/docs/core/entity/climate/#hvac-action - actions = [ - "off", - "preheating", - "defrosting", - "heating", - "cooling", - "drying", - "idle", - "fan", - ] + actions = ["off", "preheating", "heating", "cooling", "drying", "idle", "fan"] assert all(elem in actions for elem in HVACAction) for action in actions: async_fire_mqtt_message(hass, "action", action) @@ -1547,14 +1538,14 @@ async def test_set_and_templates( assert state.attributes.get("preset_mode") == PRESET_ECO # Mode - await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) mqtt_mock.async_publish.assert_any_call("mode-topic", "mode: cool", 0, False) assert mqtt_mock.async_publish.call_count == 1 mqtt_mock.async_publish.reset_mock() state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" - await common.async_set_hvac_mode(hass, HVACMode.OFF, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, "off", ENTITY_CLIMATE) mqtt_mock.async_publish.assert_any_call("mode-topic", "mode: off", 0, False) assert mqtt_mock.async_publish.call_count == 1 mqtt_mock.async_publish.reset_mock() @@ -1590,13 +1581,13 @@ async def test_set_and_templates( assert state.attributes.get("swing_mode") == "on" # Temperature - await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) mqtt_mock.async_publish.assert_called_once_with( - "temperature-topic", "temp: 35.0", 0, False + "temperature-topic", "temp: 47.0", 0, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("temperature") == 35 + assert state.attributes.get("temperature") == 47 # Temperature Low/High await common.async_set_temperature( @@ -1876,7 +1867,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, climate.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + climate.DOMAIN, + DEFAULT_CONFIG, ) @@ -1887,7 +1882,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, climate.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + climate.DOMAIN, + DEFAULT_CONFIG, ) @@ -2449,15 +2448,3 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) - - -async def test_entity_icon_and_entity_picture( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test the entity name setup.""" - domain = climate.DOMAIN - config = DEFAULT_CONFIG - await help_test_entity_icon_and_entity_picture( - hass, mqtt_mock_entry, domain, config - ) diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index 95a26daf562..8d457d9da85 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -16,11 +16,8 @@ import yaml from homeassistant import config as module_hass_config from homeassistant.components import mqtt from homeassistant.components.mqtt import debug_info -from homeassistant.components.mqtt.const import ( - MQTT_CONNECTION_STATE, - SUPPORTED_COMPONENTS, -) -from homeassistant.components.mqtt.entity import MQTT_ATTRIBUTES_BLOCKED +from homeassistant.components.mqtt.const import MQTT_CONNECTION_STATE +from homeassistant.components.mqtt.mixins import MQTT_ATTRIBUTES_BLOCKED from homeassistant.components.mqtt.models import PublishPayloadType from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( @@ -45,7 +42,6 @@ DEFAULT_CONFIG_DEVICE_INFO_ID = { "manufacturer": "Whatever", "name": "Beer", "model": "Glass", - "model_id": "XYZ001", "hw_version": "rev1", "serial_number": "1234deadbeef", "sw_version": "0.1-beta", @@ -58,7 +54,6 @@ DEFAULT_CONFIG_DEVICE_INFO_MAC = { "manufacturer": "Whatever", "name": "Beer", "model": "Glass", - "model_id": "XYZ001", "hw_version": "rev1", "serial_number": "1234deadbeef", "sw_version": "0.1-beta", @@ -69,25 +64,18 @@ DEFAULT_CONFIG_DEVICE_INFO_MAC = { _SENTINEL = object() DISCOVERY_COUNT = len(MQTT) -DEVICE_DISCOVERY_COUNT = 2 type _MqttMessageType = list[tuple[str, str]] type _AttributesType = list[tuple[str, Any]] -type _StateDataType = ( - list[tuple[_MqttMessageType, str, _AttributesType | None]] - | list[tuple[_MqttMessageType, str, None]] -) +type _StateDataType = list[tuple[_MqttMessageType, str | None, _AttributesType | None]] def help_all_subscribe_calls(mqtt_client_mock: MqttMockPahoClient) -> list[Any]: """Test of a call.""" all_calls = [] - for call_l1 in mqtt_client_mock.subscribe.mock_calls: - if isinstance(call_l1[1][0], list): - for call_l2 in call_l1[1]: - all_calls.extend(call_l2) - else: - all_calls.append(call_l1[1]) + for calls in mqtt_client_mock.subscribe.mock_calls: + for call in calls[1]: + all_calls.extend(call) return all_calls @@ -110,7 +98,7 @@ def help_custom_config( ) base.update(instance) entity_instances.append(base) - config[mqtt.DOMAIN][mqtt_entity_domain] = entity_instances + config[mqtt.DOMAIN][mqtt_entity_domain]: list[ConfigType] = entity_instances return config @@ -1011,7 +999,6 @@ async def help_test_entity_device_info_with_identifier( assert device.manufacturer == "Whatever" assert device.name == "Beer" assert device.model == "Glass" - assert device.model_id == "XYZ001" assert device.hw_version == "rev1" assert device.sw_version == "0.1-beta" assert device.suggested_area == "default_area" @@ -1048,7 +1035,6 @@ async def help_test_entity_device_info_with_connection( assert device.manufacturer == "Whatever" assert device.name == "Beer" assert device.model == "Glass" - assert device.model_id == "XYZ001" assert device.hw_version == "rev1" assert device.sw_version == "0.1-beta" assert device.suggested_area == "default_area" @@ -1188,13 +1174,7 @@ async def help_test_entity_id_update_subscriptions( state = hass.states.get(f"{domain}.test") assert state is not None - assert ( - mqtt_mock.async_subscribe.call_count - == len(topics) - + 2 * len(SUPPORTED_COMPONENTS) - + DISCOVERY_COUNT - + DEVICE_DISCOVERY_COUNT - ) + assert mqtt_mock.async_subscribe.call_count == len(topics) + 2 + DISCOVERY_COUNT for topic in topics: mqtt_mock.async_subscribe.assert_any_call( topic, ANY, ANY, ANY, HassJobType.Callback @@ -1367,11 +1347,11 @@ async def help_test_entity_debug_info_message( mqtt_mock_entry: MqttMockHAClientGenerator, domain: str, config: ConfigType, - service: str | None, + service: str, command_topic: str | None = None, command_payload: str | None = None, state_topic: str | object | None = _SENTINEL, - state_payload: bytes | str | None = None, + state_payload: str | None = None, service_parameters: dict[str, Any] | None = None, ) -> None: """Test debug_info. @@ -1672,61 +1652,6 @@ async def help_test_entity_category( assert not ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) -async def help_test_entity_icon_and_entity_picture( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - domain: str, - config: ConfigType, - default_entity_picture: str | None = None, -) -> None: - """Test entity picture and icon.""" - await mqtt_mock_entry() - # Add device settings to config - config = copy.deepcopy(config[mqtt.DOMAIN][domain]) - config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID) - - ent_registry = er.async_get(hass) - - # Discover an entity without entity icon or picture - unique_id = "veryunique1" - config["unique_id"] = unique_id - data = json.dumps(config) - async_fire_mqtt_message(hass, f"homeassistant/{domain}/{unique_id}/config", data) - await hass.async_block_till_done() - entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) - state = hass.states.get(entity_id) - assert entity_id is not None and state - assert state.attributes.get("icon") is None - assert state.attributes.get("entity_picture") == default_entity_picture - - # Discover an entity with an entity picture set - unique_id = "veryunique2" - config["entity_picture"] = "https://example.com/mypicture.png" - config["unique_id"] = unique_id - data = json.dumps(config) - async_fire_mqtt_message(hass, f"homeassistant/{domain}/{unique_id}/config", data) - await hass.async_block_till_done() - entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) - state = hass.states.get(entity_id) - assert entity_id is not None and state - assert state.attributes.get("icon") is None - assert state.attributes.get("entity_picture") == "https://example.com/mypicture.png" - config.pop("entity_picture") - - # Discover an entity with an entity icon set - unique_id = "veryunique3" - config["icon"] = "mdi:emoji-happy-outline" - config["unique_id"] = unique_id - data = json.dumps(config) - async_fire_mqtt_message(hass, f"homeassistant/{domain}/{unique_id}/config", data) - await hass.async_block_till_done() - entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) - state = hass.states.get(entity_id) - assert entity_id is not None and state - assert state.attributes.get("icon") == "mdi:emoji-happy-outline" - assert state.attributes.get("entity_picture") == default_entity_picture - - async def help_test_publishing_with_custom_encoding( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, @@ -2000,7 +1925,7 @@ async def help_test_skipped_async_ha_write_state( ) -> None: """Test entity.async_ha_write_state is only called on changes.""" with patch( - "homeassistant.components.mqtt.entity.MqttEntity.async_write_ha_state" + "homeassistant.components.mqtt.mixins.MqttEntity.async_write_ha_state" ) as mock_async_ha_write_state: assert len(mock_async_ha_write_state.mock_calls) == 0 async_fire_mqtt_message(hass, topic, payload1) diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index e99063b088b..457bd19c16f 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -1,6 +1,6 @@ """Test config flow.""" -from collections.abc import Generator, Iterator +from collections.abc import Iterator from contextlib import contextmanager from pathlib import Path from ssl import SSLError @@ -8,14 +8,13 @@ from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from uuid import uuid4 -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import Discovery import pytest +from typing_extensions import Generator import voluptuous as vol from homeassistant import config_entries from homeassistant.components import mqtt -from homeassistant.components.hassio import AddonError +from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.mqtt.config_flow import PWD_NOT_CHANGED from homeassistant.const import ( CONF_CLIENT_ID, @@ -26,20 +25,10 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient -ADD_ON_DISCOVERY_INFO = { - "addon": "Mosquitto Mqtt Broker", - "host": "core-mosquitto", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", - "ssl": False, -} MOCK_CLIENT_CERT = b"## mock client certificate file ##" MOCK_CLIENT_KEY = b"## mock key file ##" @@ -198,29 +187,6 @@ def mock_process_uploaded_file( yield mock_upload -@pytest.fixture(name="supervisor") -def supervisor_fixture() -> Generator[MagicMock]: - """Mock Supervisor.""" - with patch( - "homeassistant.components.mqtt.config_flow.is_hassio", return_value=True - ) as is_hassio: - yield is_hassio - - -@pytest.fixture(name="addon_setup_time", autouse=True) -def addon_setup_time_fixture() -> Generator[int]: - """Mock add-on setup sleep time.""" - with patch( - "homeassistant.components.mqtt.config_flow.ADDON_SETUP_TIMEOUT", new=0 - ) as addon_setup_time: - yield addon_setup_time - - -@pytest.fixture(autouse=True) -def mock_get_addon_discovery_info(get_addon_discovery_info: AsyncMock) -> None: - """Mock get add-on discovery info.""" - - @pytest.mark.usefixtures("mqtt_client_mock") async def test_user_connection_works( hass: HomeAssistant, @@ -251,47 +217,6 @@ async def test_user_connection_works( assert len(mock_finish_setup.mock_calls) == 1 -@pytest.mark.usefixtures("mqtt_client_mock", "supervisor", "supervisor_client") -async def test_user_connection_works_with_supervisor( - hass: HomeAssistant, - mock_try_connection: MagicMock, - mock_finish_setup: MagicMock, -) -> None: - """Test we can finish a config flow with a supervised install.""" - mock_try_connection.return_value = True - - result = await hass.config_entries.flow.async_init( - "mqtt", context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["menu_options"] == ["addon", "broker"] - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "broker"}, - ) - - # Assert a manual setup flow - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"broker": "127.0.0.1"} - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].data == { - "broker": "127.0.0.1", - "port": 1883, - "discovery": True, - } - # Check we tried the connection - assert len(mock_try_connection.mock_calls) == 1 - # Check config entry got setup - assert len(mock_finish_setup.mock_calls) == 1 - await hass.async_block_till_done(wait_background_tasks=True) - - @pytest.mark.usefixtures("mqtt_client_mock") async def test_user_v5_connection_works( hass: HomeAssistant, @@ -418,7 +343,7 @@ async def test_hassio_already_configured(hass: HomeAssistant) -> None: "mqtt", context={"source": config_entries.SOURCE_HASSIO} ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + assert result["reason"] == "already_configured" async def test_hassio_ignored(hass: HomeAssistant) -> None: @@ -444,7 +369,7 @@ async def test_hassio_ignored(hass: HomeAssistant) -> None: ) assert result assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "single_instance_allowed" + assert result.get("reason") == "already_configured" async def test_hassio_confirm( @@ -453,11 +378,21 @@ async def test_hassio_confirm( mock_finish_setup: MagicMock, ) -> None: """Test we can finish a config flow.""" + mock_try_connection.return_value = True + result = await hass.config_entries.flow.async_init( "mqtt", data=HassioServiceInfo( - config=ADD_ON_DISCOVERY_INFO.copy(), - name="Mosquitto Mqtt Broker", + config={ + "addon": "Mock Addon", + "host": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", # Set by the addon's discovery, ignored by HA + "ssl": False, # Set by the addon's discovery, ignored by HA + }, + name="Mock Addon", slug="mosquitto", uuid="1234", ), @@ -465,7 +400,7 @@ async def test_hassio_confirm( ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "hassio_confirm" - assert result["description_placeholders"] == {"addon": "Mosquitto Mqtt Broker"} + assert result["description_placeholders"] == {"addon": "Mock Addon"} mock_try_connection_success.reset_mock() result = await hass.config_entries.flow.async_configure( @@ -474,7 +409,7 @@ async def test_hassio_confirm( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].data == { - "broker": "core-mosquitto", + "broker": "mock-broker", "port": 1883, "username": "mock-user", "password": "mock-pass", @@ -492,12 +427,14 @@ async def test_hassio_cannot_connect( mock_finish_setup: MagicMock, ) -> None: """Test a config flow is aborted when a connection was not successful.""" + mock_try_connection.return_value = True + result = await hass.config_entries.flow.async_init( "mqtt", data=HassioServiceInfo( config={ "addon": "Mock Addon", - "host": "core-mosquitto", + "host": "mock-broker", "port": 1883, "username": "mock-user", "password": "mock-pass", @@ -527,410 +464,6 @@ async def test_hassio_cannot_connect( assert len(mock_finish_setup.mock_calls) == 0 -@pytest.mark.usefixtures( - "mqtt_client_mock", "supervisor", "addon_info", "addon_running" -) -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_mosquitto", - service="mqtt", - uuid=uuid4(), - config=ADD_ON_DISCOVERY_INFO.copy(), - ) - ] - ], -) -async def test_addon_flow_with_supervisor_addon_running( - hass: HomeAssistant, - mock_try_connection_success: MagicMock, - mock_finish_setup: MagicMock, -) -> None: - """Test we perform an auto config flow with a supervised install. - - Case: The Mosquitto add-on is already installed, and running. - """ - # show menu - result = await hass.config_entries.flow.async_init( - "mqtt", context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["menu_options"] == ["addon", "broker"] - assert result["step_id"] == "user" - - # select install via add-on - mock_try_connection_success.reset_mock() - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "addon"}, - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].data == { - "broker": "core-mosquitto", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "discovery": True, - } - # Check we tried the connection - assert len(mock_try_connection_success.mock_calls) - # Check config entry got setup - assert len(mock_finish_setup.mock_calls) == 1 - - -@pytest.mark.usefixtures( - "mqtt_client_mock", "supervisor", "addon_info", "addon_installed", "start_addon" -) -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_mosquitto", - service="mqtt", - uuid=uuid4(), - config=ADD_ON_DISCOVERY_INFO.copy(), - ) - ] - ], -) -async def test_addon_flow_with_supervisor_addon_installed( - hass: HomeAssistant, - mock_try_connection_success: MagicMock, - mock_finish_setup: MagicMock, -) -> None: - """Test we perform an auto config flow with a supervised install. - - Case: The Mosquitto add-on is installed, but not running. - """ - # show menu - result = await hass.config_entries.flow.async_init( - "mqtt", context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["menu_options"] == ["addon", "broker"] - assert result["step_id"] == "user" - - # select install via add-on - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "addon"}, - ) - - # add-on installed but not started, so we wait for start-up - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "start_addon" - assert result["step_id"] == "start_addon" - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) - mock_try_connection_success.reset_mock() - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "start_addon"}, - ) - - # add-on is running, so entry can be installed - await hass.async_block_till_done(wait_background_tasks=True) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].data == { - "broker": "core-mosquitto", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "discovery": True, - } - # Check we tried the connection - assert len(mock_try_connection_success.mock_calls) - # Check config entry got setup - assert len(mock_finish_setup.mock_calls) == 1 - - -@pytest.mark.usefixtures( - "mqtt_client_mock", "supervisor", "addon_info", "addon_running" -) -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_mosquitto", - service="mqtt", - uuid=uuid4(), - config=ADD_ON_DISCOVERY_INFO.copy(), - ) - ] - ], -) -async def test_addon_flow_with_supervisor_addon_running_connection_fails( - hass: HomeAssistant, - mock_try_connection: MagicMock, -) -> None: - """Test we perform an auto config flow with a supervised install. - - Case: The Mosquitto add-on is already installed, and running. - """ - # show menu - result = await hass.config_entries.flow.async_init( - "mqtt", context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["menu_options"] == ["addon", "broker"] - assert result["step_id"] == "user" - - # select install via add-on but the connection fails and the flow will be aborted. - mock_try_connection.return_value = False - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "addon"}, - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert result["type"] is FlowResultType.ABORT - - -@pytest.mark.usefixtures( - "mqtt_client_mock", - "supervisor", - "addon_info", - "addon_installed", -) -async def test_addon_not_running_api_error( - hass: HomeAssistant, - start_addon: AsyncMock, -) -> None: - """Test we perform an auto config flow with a supervised install. - - Case: The Mosquitto add-on start fails on a API error. - """ - start_addon.side_effect = SupervisorError() - - result = await hass.config_entries.flow.async_init( - "mqtt", context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["menu_options"] == ["addon", "broker"] - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "addon"}, - ) - # add-on not installed, so we wait for install - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "start_addon" - assert result["step_id"] == "start_addon" - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "install_addon"}, - ) - - # add-on start-up failed - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "addon_start_failed" - - -@pytest.mark.usefixtures( - "mqtt_client_mock", - "supervisor", - "start_addon", - "addon_installed", -) -async def test_addon_discovery_info_error( - hass: HomeAssistant, - addon_info: AsyncMock, - get_addon_discovery_info: AsyncMock, -) -> None: - """Test we perform an auto config flow with a supervised install. - - Case: The Mosquitto add-on start on a discovery error. - """ - get_addon_discovery_info.side_effect = AddonError - - result = await hass.config_entries.flow.async_init( - "mqtt", context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["menu_options"] == ["addon", "broker"] - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "addon"}, - ) - # Addon will retry - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "start_addon" - assert result["step_id"] == "start_addon" - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "start_addon"}, - ) - - # add-on start-up failed - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "addon_start_failed" - - -@pytest.mark.usefixtures( - "mqtt_client_mock", - "supervisor", - "start_addon", - "addon_installed", -) -async def test_addon_info_error( - hass: HomeAssistant, - addon_info: AsyncMock, -) -> None: - """Test we perform an auto config flow with a supervised install. - - Case: The Mosquitto add-on info could not be retrieved. - """ - addon_info.side_effect = SupervisorError() - - result = await hass.config_entries.flow.async_init( - "mqtt", context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["menu_options"] == ["addon", "broker"] - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "addon"}, - ) - - # add-on info failed - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "addon_info_failed" - - -@pytest.mark.usefixtures( - "mqtt_client_mock", - "supervisor", - "addon_info", - "addon_not_installed", - "install_addon", - "start_addon", -) -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_mosquitto", - service="mqtt", - uuid=uuid4(), - config=ADD_ON_DISCOVERY_INFO.copy(), - ) - ] - ], -) -async def test_addon_flow_with_supervisor_addon_not_installed( - hass: HomeAssistant, - mock_try_connection_success: MagicMock, - mock_finish_setup: MagicMock, -) -> None: - """Test we perform an auto config flow with a supervised install. - - Case: The Mosquitto add-on is not yet installed nor running. - """ - result = await hass.config_entries.flow.async_init( - "mqtt", context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["menu_options"] == ["addon", "broker"] - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "addon"}, - ) - # add-on not installed, so we wait for install - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_addon" - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "install_addon"}, - ) - - # add-on installed but not started, so we wait for start-up - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "start_addon" - assert result["step_id"] == "start_addon" - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) - mock_try_connection_success.reset_mock() - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "start_addon"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].data == { - "broker": "core-mosquitto", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "discovery": True, - } - # Check we tried the connection - assert len(mock_try_connection_success.mock_calls) - # Check config entry got setup - assert len(mock_finish_setup.mock_calls) == 1 - - -@pytest.mark.usefixtures( - "mqtt_client_mock", - "supervisor", - "addon_info", - "addon_not_installed", - "start_addon", -) -async def test_addon_not_installed_failures( - hass: HomeAssistant, - install_addon: AsyncMock, -) -> None: - """Test we perform an auto config flow with a supervised install. - - Case: The Mosquitto add-on install fails. - """ - install_addon.side_effect = SupervisorError() - - result = await hass.config_entries.flow.async_init( - "mqtt", context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.MENU - assert result["menu_options"] == ["addon", "broker"] - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "addon"}, - ) - # add-on not installed, so we wait for install - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_addon" - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"next_step_id": "install_addon"}, - ) - - # add-on install failed - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "addon_install_failed" - - async def test_option_flow( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, @@ -1071,6 +604,7 @@ async def test_bad_certificate( test_input.pop(mqtt.CONF_CLIENT_KEY) mqtt_mock = await mqtt_mock_entry() + mock_try_connection.return_value = True config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] # Add at least one advanced option to get the full form hass.config_entries.async_update_entry( @@ -1317,19 +851,18 @@ async def test_invalid_discovery_prefix( assert mock_reload_after_entry_update.call_count == 0 -def get_default(schema: vol.Schema, key: str) -> Any | None: +def get_default(schema: vol.Schema, key: str) -> Any: """Get default value for key in voluptuous schema.""" - for schema_key in schema: # type:ignore[attr-defined] + for schema_key in schema: if schema_key == key: if schema_key.default == vol.UNDEFINED: return None return schema_key.default() - return None -def get_suggested(schema: vol.Schema, key: str) -> Any | None: +def get_suggested(schema: vol.Schema, key: str) -> Any: """Get suggested value for key in voluptuous schema.""" - for schema_key in schema: # type:ignore[attr-defined] + for schema_key in schema: if schema_key == key: if ( schema_key.description is None @@ -1337,7 +870,6 @@ def get_suggested(schema: vol.Schema, key: str) -> Any | None: ): return None return schema_key.description["suggested_value"] - return None @pytest.mark.usefixtures("mock_reload_after_entry_update") @@ -1597,7 +1129,14 @@ async def test_step_reauth( assert result["context"]["source"] == "reauth" # Show the form - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + mqtt.DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -1626,142 +1165,6 @@ async def test_step_reauth( await hass.async_block_till_done() -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_mosquitto", - service="mqtt", - uuid=uuid4(), - config=ADD_ON_DISCOVERY_INFO.copy(), - ) - ] - ], -) -@pytest.mark.usefixtures( - "mqtt_client_mock", "mock_reload_after_entry_update", "supervisor", "addon_running" -) -async def test_step_hassio_reauth( - hass: HomeAssistant, mock_try_connection: MagicMock, addon_info: AsyncMock -) -> None: - """Test that the reauth step works in case the Mosquitto broker add-on was re-installed.""" - - # Set up entry data based on the discovery data, but with a stale password - entry_data = { - mqtt.CONF_BROKER: "core-mosquitto", - CONF_PORT: 1883, - CONF_USERNAME: "mock-user", - CONF_PASSWORD: "stale-secret", - } - - addon_info["hostname"] = "core-mosquitto" - - # Prepare the config entry - config_entry = MockConfigEntry(domain=mqtt.DOMAIN, data=entry_data) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - - assert config_entry.data.get(CONF_PASSWORD) == "stale-secret" - - # Start reauth flow - mock_try_connection.reset_mock() - mock_try_connection.return_value = True - config_entry.async_start_reauth(hass) - await hass.async_block_till_done() - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 0 - - # Assert the entry is updated automatically - assert config_entry.data.get(CONF_PASSWORD) == "mock-pass" - mock_try_connection.assert_called_once_with( - { - "broker": "core-mosquitto", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - } - ) - - -@pytest.mark.parametrize( - ("discovery_info", "discovery_info_side_effect", "broker"), - [ - ( - [ - Discovery( - addon="core_mosquitto", - service="mqtt", - uuid=uuid4(), - config=ADD_ON_DISCOVERY_INFO.copy(), - ) - ], - AddonError, - "core-mosquitto", - ), - ( - [ - Discovery( - addon="core_mosquitto", - service="mqtt", - uuid=uuid4(), - config=ADD_ON_DISCOVERY_INFO.copy(), - ) - ], - None, - "broker-not-addon", - ), - ], -) -@pytest.mark.usefixtures( - "mqtt_client_mock", "mock_reload_after_entry_update", "supervisor", "addon_running" -) -async def test_step_hassio_reauth_no_discovery_info( - hass: HomeAssistant, - mock_try_connection: MagicMock, - addon_info: AsyncMock, - broker: str, -) -> None: - """Test hassio reauth flow defaults to manual flow. - - Test that the reauth step defaults to - normal reauth flow if fetching add-on discovery info failed, - or the broker is not the add-on. - """ - - # Set up entry data based on the discovery data, but with a stale password - entry_data = { - mqtt.CONF_BROKER: broker, - CONF_PORT: 1883, - CONF_USERNAME: "mock-user", - CONF_PASSWORD: "wrong-pass", - } - - addon_info["hostname"] = "core-mosquitto" - - # Prepare the config entry - config_entry = MockConfigEntry(domain=mqtt.DOMAIN, data=entry_data) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - - assert config_entry.data.get(CONF_PASSWORD) == "wrong-pass" - - # Start reauth flow - mock_try_connection.reset_mock() - mock_try_connection.return_value = True - config_entry.async_start_reauth(hass) - await hass.async_block_till_done() - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - result = flows[0] - assert result["step_id"] == "reauth_confirm" - assert result["context"]["source"] == "reauth" - - # Assert the entry is not updated - assert config_entry.data.get(CONF_PASSWORD) == "wrong-pass" - mock_try_connection.assert_not_called() - - async def test_options_user_connection_fails( hass: HomeAssistant, mock_try_connection_time_out: MagicMock ) -> None: diff --git a/tests/components/mqtt/test_cover.py b/tests/components/mqtt/test_cover.py index ee74b78be81..f37de8b6a2e 100644 --- a/tests/components/mqtt/test_cover.py +++ b/tests/components/mqtt/test_cover.py @@ -12,7 +12,6 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - CoverState, ) from homeassistant.components.mqtt.const import CONF_STATE_TOPIC from homeassistant.components.mqtt.cover import ( @@ -40,7 +39,9 @@ from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, STATE_CLOSED, + STATE_CLOSING, STATE_OPEN, + STATE_OPENING, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -62,7 +63,6 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, - help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_publishing_with_custom_encoding, @@ -116,12 +116,12 @@ async def test_state_via_state_topic( async_fire_mqtt_message(hass, "state-topic", STATE_CLOSED) state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED async_fire_mqtt_message(hass, "state-topic", STATE_OPEN) state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message(hass, "state-topic", "None") @@ -162,17 +162,17 @@ async def test_opening_and_closing_state_via_custom_state_payload( async_fire_mqtt_message(hass, "state-topic", "34") state = hass.states.get("cover.test") - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING async_fire_mqtt_message(hass, "state-topic", "--43") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING async_fire_mqtt_message(hass, "state-topic", STATE_CLOSED) state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED @pytest.mark.parametrize( @@ -197,11 +197,11 @@ async def test_opening_and_closing_state_via_custom_state_payload( @pytest.mark.parametrize( ("position", "assert_state"), [ - (0, CoverState.CLOSED), - (1, CoverState.OPEN), - (30, CoverState.OPEN), - (99, CoverState.OPEN), - (100, CoverState.OPEN), + (0, STATE_CLOSED), + (1, STATE_OPEN), + (30, STATE_OPEN), + (99, STATE_OPEN), + (100, STATE_OPEN), ], ) async def test_open_closed_state_from_position_optimistic( @@ -253,13 +253,13 @@ async def test_open_closed_state_from_position_optimistic( @pytest.mark.parametrize( ("position", "assert_state"), [ - (0, CoverState.CLOSED), - (1, CoverState.CLOSED), - (10, CoverState.CLOSED), - (11, CoverState.OPEN), - (30, CoverState.OPEN), - (99, CoverState.OPEN), - (100, CoverState.OPEN), + (0, STATE_CLOSED), + (1, STATE_CLOSED), + (10, STATE_CLOSED), + (11, STATE_OPEN), + (30, STATE_OPEN), + (99, STATE_OPEN), + (100, STATE_OPEN), ], ) async def test_open_closed_state_from_position_optimistic_alt_positions( @@ -449,12 +449,12 @@ async def test_position_via_position_topic( async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN @pytest.mark.parametrize( @@ -490,12 +490,12 @@ async def test_state_via_template( async_fire_mqtt_message(hass, "state-topic", "10000") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message(hass, "state-topic", "99") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED @pytest.mark.parametrize( @@ -532,13 +532,13 @@ async def test_state_via_template_and_entity_id( async_fire_mqtt_message(hass, "state-topic", "invalid") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message(hass, "state-topic", "closed") async_fire_mqtt_message(hass, "state-topic", "invalid") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED @pytest.mark.parametrize( @@ -571,14 +571,14 @@ async def test_state_via_template_with_json_value( async_fire_mqtt_message(hass, "state-topic", '{ "Var1": "open", "Var2": "other" }') state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message( hass, "state-topic", '{ "Var1": "closed", "Var2": "other" }' ) state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED async_fire_mqtt_message(hass, "state-topic", '{ "Var2": "other" }') assert ( @@ -741,7 +741,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN await hass.services.async_call( cover.DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True @@ -750,7 +750,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED await hass.services.async_call( cover.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "cover.test"}, blocking=True @@ -759,7 +759,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN await hass.services.async_call( cover.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "cover.test"}, blocking=True @@ -767,7 +767,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED @pytest.mark.parametrize( @@ -804,7 +804,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 await hass.services.async_call( @@ -814,7 +814,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 await hass.services.async_call( @@ -824,7 +824,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 await hass.services.async_call( @@ -833,7 +833,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 @@ -1026,35 +1026,35 @@ async def test_current_cover_position_inverted( ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 0 - assert hass.states.get("cover.test").state == CoverState.CLOSED + assert hass.states.get("cover.test").state == STATE_CLOSED async_fire_mqtt_message(hass, "get-position-topic", "0") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 100 - assert hass.states.get("cover.test").state == CoverState.OPEN + assert hass.states.get("cover.test").state == STATE_OPEN async_fire_mqtt_message(hass, "get-position-topic", "50") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 50 - assert hass.states.get("cover.test").state == CoverState.OPEN + assert hass.states.get("cover.test").state == STATE_OPEN async_fire_mqtt_message(hass, "get-position-topic", "non-numeric") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 50 - assert hass.states.get("cover.test").state == CoverState.OPEN + assert hass.states.get("cover.test").state == STATE_OPEN async_fire_mqtt_message(hass, "get-position-topic", "101") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 0 - assert hass.states.get("cover.test").state == CoverState.CLOSED + assert hass.states.get("cover.test").state == STATE_CLOSED @pytest.mark.parametrize( @@ -2738,32 +2738,32 @@ async def test_state_and_position_topics_state_not_set_via_position_topic( async_fire_mqtt_message(hass, "state-topic", "OPEN") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message(hass, "state-topic", "CLOSE") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED @pytest.mark.parametrize( @@ -2800,27 +2800,27 @@ async def test_set_state_via_position_using_stopped_state( async_fire_mqtt_message(hass, "state-topic", "OPEN") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN @pytest.mark.parametrize( @@ -3136,32 +3136,32 @@ async def test_set_state_via_stopped_state_no_position_topic( async_fire_mqtt_message(hass, "state-topic", "OPEN") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message(hass, "state-topic", "OPENING") state = hass.states.get("cover.test") - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async_fire_mqtt_message(hass, "state-topic", "CLOSING") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED @pytest.mark.parametrize( @@ -3461,7 +3461,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = cover.DOMAIN @@ -3549,15 +3550,3 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) - - -async def test_entity_icon_and_entity_picture( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test the entity name setup.""" - domain = cover.DOMAIN - config = DEFAULT_CONFIG - await help_test_entity_icon_and_entity_picture( - hass, mqtt_mock_entry, domain, config - ) diff --git a/tests/components/mqtt/test_device_tracker.py b/tests/components/mqtt/test_device_tracker.py index 00e88860299..9759dfcadd7 100644 --- a/tests/components/mqtt/test_device_tracker.py +++ b/tests/components/mqtt/test_device_tracker.py @@ -584,7 +584,11 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, device_tracker.DOMAIN, DEFAULT_CONFIG, None + hass, + mqtt_mock_entry, + device_tracker.DOMAIN, + DEFAULT_CONFIG, + None, ) diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 009a0315029..ce75bd01a03 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -17,8 +17,12 @@ from homeassistant.setup import async_setup_component from .test_common import help_test_unload_config_entry -from tests.common import async_fire_mqtt_message, async_get_device_automations -from tests.typing import MqttMockHAClientGenerator, WebSocketGenerator +from tests.common import ( + async_fire_mqtt_message, + async_get_device_automations, + async_mock_service, +) +from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, WebSocketGenerator @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -26,46 +30,32 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.mark.parametrize( - ("discovery_topic", "data"), - [ - ( - "homeassistant/device_automation/0AFFD2/bla/config", - '{ "automation_type":"trigger",' - ' "device":{"identifiers":["0AFFD2"]},' - ' "payload": "short_press",' - ' "topic": "foobar/triggers/button1",' - ' "type": "button_short_press",' - ' "subtype": "button_1" }', - ), - ( - "homeassistant/device/0AFFD2/config", - '{ "device":{"identifiers":["0AFFD2"]},' - ' "o": {"name": "foobar"}, "cmps": ' - '{ "bla": {' - ' "automation_type":"trigger", ' - ' "payload": "short_press",' - ' "topic": "foobar/triggers/button1",' - ' "type": "button_short_press",' - ' "subtype": "button_1",' - ' "platform":"device_automation"}}}', - ), - ], -) +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, - discovery_topic: str, - data: str, ) -> None: """Test we get the expected triggers from a discovered mqtt device.""" await mqtt_mock_entry() - async_fire_mqtt_message(hass, discovery_topic, data) + data1 = ( + '{ "automation_type":"trigger",' + ' "device":{"identifiers":["0AFFD2"]},' + ' "payload": "short_press",' + ' "topic": "foobar/triggers/button1",' + ' "type": "button_short_press",' + ' "subtype": "button_1" }' + ) + async_fire_mqtt_message(hass, "homeassistant/device_automation/bla/config", data1) await hass.async_block_till_done() device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - expected_triggers: list[dict[str, Any]] = [ + expected_triggers = [ { "platform": "device", "domain": DOMAIN, @@ -185,7 +175,7 @@ async def test_discover_bad_triggers( await hass.async_block_till_done() device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - expected_triggers: list[dict[str, Any]] = [ + expected_triggers = [ { "platform": "device", "domain": DOMAIN, @@ -246,7 +236,7 @@ async def test_update_remove_triggers( device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry.name == "milk" - expected_triggers1: list[dict[str, Any]] = [ + expected_triggers1 = [ { "platform": "device", "domain": DOMAIN, @@ -294,7 +284,7 @@ async def test_update_remove_triggers( async def test_if_fires_on_mqtt_message( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers firing.""" @@ -360,20 +350,20 @@ async def test_if_fires_on_mqtt_message( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "short_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message(hass, "foobar/triggers/button2", "long_press") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "long_press" + assert len(calls) == 2 + assert calls[1].data["some"] == "long_press" async def test_if_discovery_id_is_prefered( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test if discovery is preferred over referencing by type/subtype. @@ -447,21 +437,21 @@ async def test_if_discovery_id_is_prefered( # Fake short press, matching on type and subtype async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "short_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "short_press" # Fake long press, matching on discovery_id - service_calls.clear() + calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/button1", "long_press") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "long_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "long_press" async def test_non_unique_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -538,20 +528,20 @@ async def test_non_unique_triggers( # and triggers both attached instances. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 2 - all_calls = {service_calls[0].data["some"], service_calls[1].data["some"]} + assert len(calls) == 2 + all_calls = {calls[0].data["some"], calls[1].data["some"]} assert all_calls == {"press1", "press2"} # Trigger second config references to same trigger # and triggers both attached instances. async_fire_mqtt_message(hass, "foobar/triggers/button2", "long_press") await hass.async_block_till_done() - assert len(service_calls) == 2 - all_calls = {service_calls[0].data["some"], service_calls[1].data["some"]} + assert len(calls) == 2 + all_calls = {calls[0].data["some"], calls[1].data["some"]} assert all_calls == {"press1", "press2"} # Removing the first trigger will clean up - service_calls.clear() + calls.clear() async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", "") await hass.async_block_till_done() await hass.async_block_till_done() @@ -559,13 +549,13 @@ async def test_non_unique_triggers( "Device trigger ('device_automation', 'bla1') has been removed" in caplog.text ) async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_fires_on_mqtt_message_template( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers firing with a message template and a shared topic.""" @@ -633,20 +623,20 @@ async def test_if_fires_on_mqtt_message_template( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button4", '{"button":"short_press"}') await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "short_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message(hass, "foobar/triggers/button4", '{"button":"long_press"}') await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "long_press" + assert len(calls) == 2 + assert calls[1].data["some"] == "long_press" async def test_if_fires_on_mqtt_message_late_discover( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers firing of MQTT device triggers discovered after setup.""" @@ -720,20 +710,20 @@ async def test_if_fires_on_mqtt_message_late_discover( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "short_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message(hass, "foobar/triggers/button2", "long_press") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "long_press" + assert len(calls) == 2 + assert calls[1].data["some"] == "long_press" async def test_if_fires_on_mqtt_message_after_update( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -792,7 +782,7 @@ async def test_if_fires_on_mqtt_message_after_update( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Update the trigger with existing type/subtype change async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", data1) @@ -803,29 +793,29 @@ async def test_if_fires_on_mqtt_message_after_update( async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", data3) await hass.async_block_till_done() - service_calls.clear() + calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/button1", "") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 - service_calls.clear() + calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/buttonOne", "") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Update the trigger with same topic async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", data3) await hass.async_block_till_done() - service_calls.clear() + calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/button1", "") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 - service_calls.clear() + calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/buttonOne", "") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_no_resubscribe_same_topic( @@ -878,7 +868,7 @@ async def test_no_resubscribe_same_topic( async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers not firing after removal.""" @@ -921,7 +911,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Remove the trigger async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", "") @@ -929,7 +919,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Rediscover the trigger async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", data1) @@ -937,14 +927,14 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers not firing after removal.""" @@ -992,7 +982,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Remove MQTT from the device mqtt_config_entry = hass.config_entries.async_entries(DOMAIN)[0] @@ -1004,7 +994,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_attach_remove( @@ -1283,7 +1273,7 @@ async def test_entity_device_info_update( """Test device registry update.""" await mqtt_mock_entry() - config: dict[str, Any] = { + config = { "automation_type": "trigger", "topic": "test-topic", "type": "foo", @@ -1692,11 +1682,11 @@ async def test_trigger_debug_info( assert debug_info_data["triggers"][0]["discovery_data"]["payload"] == config2 -@pytest.mark.usefixtures("mqtt_mock") async def test_unload_entry( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, ) -> None: """Test unloading the MQTT entry.""" @@ -1737,7 +1727,7 @@ async def test_unload_entry( # Fake short press 1 async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await help_test_unload_config_entry(hass) @@ -1746,7 +1736,7 @@ async def test_unload_entry( await hass.async_block_till_done() async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Start entry again mqtt_entry = hass.config_entries.async_entries("mqtt")[0] @@ -1757,5 +1747,4 @@ async def test_unload_entry( await hass.async_block_till_done() async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(service_calls) == 2 - await hass.async_block_till_done(wait_background_tasks=True) + assert len(calls) == 2 diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index e49e7a27c8d..8c51e295998 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -3,22 +3,18 @@ import asyncio import copy import json -import logging from pathlib import Path import re -from typing import Any -from unittest.mock import ANY, AsyncMock, call, patch +from unittest.mock import AsyncMock, call, patch import pytest from homeassistant import config_entries from homeassistant.components import mqtt -from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.mqtt.abbreviations import ( ABBREVIATIONS, DEVICE_ABBREVIATIONS, ) -from homeassistant.components.mqtt.const import SUPPORTED_COMPONENTS from homeassistant.components.mqtt.discovery import ( MQTT_DISCOVERY_DONE, MQTT_DISCOVERY_NEW, @@ -36,7 +32,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.data_entry_flow import AbortFlow, FlowResult +from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, @@ -48,16 +44,12 @@ from homeassistant.util.signal_type import SignalTypeFormat from .conftest import ENTRY_DEFAULT_BIRTH_MESSAGE from .test_common import help_all_subscribe_calls, help_test_unload_config_entry -from .test_tag import DEFAULT_TAG_ID, DEFAULT_TAG_SCAN from tests.common import ( MockConfigEntry, - MockModule, async_capture_events, async_fire_mqtt_message, - async_get_device_automations, mock_config_flow, - mock_integration, mock_platform, ) from tests.typing import ( @@ -66,133 +58,6 @@ from tests.typing import ( WebSocketGenerator, ) -TEST_SINGLE_CONFIGS = [ - ( - "homeassistant/device_automation/0AFFD2/bla1/config", - { - "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, - "o": {"name": "Foo2Mqtt", "sw": "1.40.2", "url": "https://www.foo2mqtt.io"}, - "automation_type": "trigger", - "payload": "short_press", - "topic": "foobar/triggers/button1", - "type": "button_short_press", - "subtype": "button_1", - }, - ), - ( - "homeassistant/sensor/0AFFD2/bla2/config", - { - "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, - "o": {"name": "Foo2Mqtt", "sw": "1.40.2", "url": "https://www.foo2mqtt.io"}, - "state_topic": "foobar/sensors/bla2/state", - "unique_id": "bla002", - }, - ), - ( - "homeassistant/tag/0AFFD2/bla3/config", - { - "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, - "o": {"name": "Foo2Mqtt", "sw": "1.40.2", "url": "https://www.foo2mqtt.io"}, - "topic": "foobar/tags/bla3/see", - }, - ), -] -TEST_DEVICE_CONFIG = { - "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, - "o": {"name": "Foo2Mqtt", "sw": "1.50.0", "url": "https://www.foo2mqtt.io"}, - "cmps": { - "bla1": { - "platform": "device_automation", - "automation_type": "trigger", - "payload": "short_press", - "topic": "foobar/triggers/button1", - "type": "button_short_press", - "subtype": "button_1", - }, - "bla2": { - "platform": "sensor", - "state_topic": "foobar/sensors/bla2/state", - "unique_id": "bla002", - "name": "mqtt_sensor", - }, - "bla3": { - "platform": "tag", - "topic": "foobar/tags/bla3/see", - }, - }, -} -TEST_DEVICE_DISCOVERY_TOPIC = "homeassistant/device/0AFFD2/config" - - -async def help_check_discovered_items( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, tag_mock: AsyncMock -) -> None: - """Help checking discovered test items are still available.""" - - # Check the device_trigger was discovered - device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - assert device_entry is not None - triggers = await async_get_device_automations( - hass, DeviceAutomationType.TRIGGER, device_entry.id - ) - assert len(triggers) == 1 - # Check the sensor was discovered - state = hass.states.get("sensor.test_device_mqtt_sensor") - assert state is not None - - # Check the tag works - async_fire_mqtt_message(hass, "foobar/tags/bla3/see", DEFAULT_TAG_SCAN) - await hass.async_block_till_done() - tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id) - tag_mock.reset_mock() - - -@pytest.fixture -def mqtt_data_flow_calls() -> list[MqttServiceInfo]: - """Return list to capture MQTT data data flow calls.""" - return [] - - -@pytest.fixture -async def mock_mqtt_flow( - hass: HomeAssistant, mqtt_data_flow_calls: list[MqttServiceInfo] -) -> config_entries.ConfigFlow: - """Test fixure for mqtt integration flow. - - The topic is used as a unique ID. - The component test domain used is: `comp`. - - Creates an entry if does not exist. - Updates an entry if it exists, and there is an updated payload. - """ - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - async def async_step_mqtt(self, discovery_info: MqttServiceInfo) -> FlowResult: - """Test mqtt step.""" - await asyncio.sleep(0) - mqtt_data_flow_calls.append(discovery_info) - # Abort a flow if there is an update for the existing entry - if entry := self.hass.config_entries.async_entry_for_domain_unique_id( - "comp", discovery_info.topic - ): - hass.config_entries.async_update_entry( - entry, - data={ - "name": discovery_info.topic, - "payload": discovery_info.payload, - }, - ) - raise AbortFlow("already_configured") - await self.async_set_unique_id(discovery_info.topic) - return self.async_create_entry( - title="Test", - data={"name": discovery_info.topic, "payload": discovery_info.payload}, - ) - - return TestFlow - @pytest.mark.parametrize( "mqtt_config_entry_data", @@ -208,10 +73,13 @@ async def test_subscribing_config_topic( discovery_topic = "homeassistant" await async_start(hass, discovery_topic, entry) - topics = [call[1][0] for call in mqtt_mock.async_subscribe.mock_calls] - for component in SUPPORTED_COMPONENTS: - assert f"{discovery_topic}/{component}/+/config" in topics - assert f"{discovery_topic}/{component}/+/+/config" in topics + call_args1 = mqtt_mock.async_subscribe.mock_calls[0][1] + assert call_args1[2] == 0 + call_args2 = mqtt_mock.async_subscribe.mock_calls[1][1] + assert call_args2[2] == 0 + topics = [call_args1[0], call_args2[0]] + assert discovery_topic + "/+/+/config" in topics + assert discovery_topic + "/+/+/+/config" in topics @pytest.mark.parametrize( @@ -219,8 +87,6 @@ async def test_subscribing_config_topic( [ ("homeassistant/binary_sensor/bla/not_config", False), ("homeassistant/binary_sensor/rörkrökare/config", True), - ("homeassistant/device/bla/not_config", False), - ("homeassistant/device/rörkrökare/config", True), ], ) async def test_invalid_topic( @@ -249,15 +115,10 @@ async def test_invalid_topic( caplog.clear() -@pytest.mark.parametrize( - "discovery_topic", - ["homeassistant/binary_sensor/bla/config", "homeassistant/device/bla/config"], -) async def test_invalid_json( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, - discovery_topic: str, ) -> None: """Test sending in invalid JSON.""" await mqtt_mock_entry() @@ -266,7 +127,9 @@ async def test_invalid_json( ) as mock_dispatcher_send: mock_dispatcher_send = AsyncMock(return_value=None) - async_fire_mqtt_message(hass, discovery_topic, "not json") + async_fire_mqtt_message( + hass, "homeassistant/binary_sensor/bla/config", "not json" + ) await hass.async_block_till_done() assert "Unable to parse JSON" in caplog.text assert not mock_dispatcher_send.called @@ -315,56 +178,6 @@ async def test_invalid_config( assert "Error 'expected int for dictionary value @ data['qos']'" in caplog.text -async def test_invalid_device_discovery_config( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test sending in JSON that violates the discovery schema if device or platform key is missing.""" - await mqtt_mock_entry() - async_fire_mqtt_message( - hass, - "homeassistant/device/bla/config", - '{ "o": {"name": "foobar"}, "cmps": ' - '{ "acp1": {"name": "abc", "state_topic": "home/alarm", ' - '"unique_id": "very_unique",' - '"command_topic": "home/alarm/set", ' - '"platform":"alarm_control_panel"}}}', - ) - await hass.async_block_till_done() - assert ( - "Invalid MQTT device discovery payload for bla, " - "required key not provided @ data['device']" in caplog.text - ) - - caplog.clear() - async_fire_mqtt_message( - hass, - "homeassistant/device/bla/config", - '{ "o": {"name": "foobar"}, "dev": {"identifiers": ["ABDE03"]}, ' - '"cmps": { "acp1": {"name": "abc", "state_topic": "home/alarm", ' - '"command_topic": "home/alarm/set" }}}', - ) - await hass.async_block_till_done() - assert ( - "Invalid MQTT device discovery payload for bla, " - "required key not provided @ data['components']['acp1']['platform']" - in caplog.text - ) - - caplog.clear() - async_fire_mqtt_message( - hass, - "homeassistant/device/bla/config", - '{ "o": {"name": "foobar"}, "dev": {"identifiers": ["ABDE03"]}, ' '"cmps": ""}', - ) - await hass.async_block_till_done() - assert ( - "Invalid MQTT device discovery payload for bla, " - "expected a dictionary for dictionary value @ data['components']" in caplog.text - ) - - async def test_only_valid_components( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, @@ -385,6 +198,8 @@ async def test_only_valid_components( await hass.async_block_till_done() + assert f"Integration {invalid_component} is not supported" in caplog.text + assert not mock_dispatcher_send.called @@ -407,70 +222,27 @@ async def test_correct_config_discovery( assert ("binary_sensor", "bla") in hass.data["mqtt"].discovery_already_discovered -@pytest.mark.parametrize( - ("discovery_topic", "payloads", "discovery_id"), - [ - ( - "homeassistant/binary_sensor/bla/config", - ( - '{"name":"Beer","state_topic": "test-topic",' - '"unique_id": "very_unique1",' - '"o":{"name":"bla2mqtt","sw":"1.0"},' - '"dev":{"identifiers":["bla"],"name": "bla"}}', - '{"name":"Milk","state_topic": "test-topic",' - '"unique_id": "very_unique1",' - '"o":{"name":"bla2mqtt","sw":"1.1",' - '"url":"https://bla2mqtt.example.com/support"},' - '"dev":{"identifiers":["bla"],"name": "bla"}}', - ), - "bla", - ), - ( - "homeassistant/device/bla/config", - ( - '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' - '"unique_id": "very_unique1",' - '"name":"Beer","state_topic": "test-topic"}},' - '"o":{"name":"bla2mqtt","sw":"1.0"},' - '"dev":{"identifiers":["bla"],"name": "bla"}}', - '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' - '"unique_id": "very_unique1",' - '"name":"Milk","state_topic": "test-topic"}},' - '"o":{"name":"bla2mqtt","sw":"1.1",' - '"url":"https://bla2mqtt.example.com/support"},' - '"dev":{"identifiers":["bla"],"name": "bla"}}', - ), - "bla bin_sens1", - ), - ], -) async def test_discovery_integration_info( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, - discovery_topic: str, - payloads: tuple[str, str], - discovery_id: str, ) -> None: - """Test discovery of integration info.""" + """Test logging discovery of new and updated items.""" await mqtt_mock_entry() async_fire_mqtt_message( hass, - discovery_topic, - payloads[0], + "homeassistant/binary_sensor/bla/config", + '{ "name": "Beer", "state_topic": "test-topic", "o": {"name": "bla2mqtt", "sw": "1.0" } }', ) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.bla_beer") + state = hass.states.get("binary_sensor.beer") assert state is not None - assert state.name == "bla Beer" + assert state.name == "Beer" assert ( - "Processing device discovery for 'bla' from external " - "application bla2mqtt, version: 1.0" - in caplog.text - or f"Found new component: binary_sensor {discovery_id} from external application bla2mqtt, version: 1.0" + "Found new component: binary_sensor bla from external application bla2mqtt, version: 1.0" in caplog.text ) caplog.clear() @@ -478,635 +250,47 @@ async def test_discovery_integration_info( # Send an update and add support url async_fire_mqtt_message( hass, - discovery_topic, - payloads[1], + "homeassistant/binary_sensor/bla/config", + '{ "name": "Milk", "state_topic": "test-topic", "o": {"name": "bla2mqtt", "sw": "1.1", "url": "https://bla2mqtt.example.com/support" } }', ) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.bla_beer") + state = hass.states.get("binary_sensor.beer") assert state is not None - assert state.name == "bla Milk" + assert state.name == "Milk" assert ( - f"Component has already been discovered: binary_sensor {discovery_id}" + "Component has already been discovered: binary_sensor bla, sending update from external application bla2mqtt, version: 1.1, support URL: https://bla2mqtt.example.com/support" in caplog.text ) @pytest.mark.parametrize( - ("single_configs", "device_discovery_topic", "device_config"), - [(TEST_SINGLE_CONFIGS, TEST_DEVICE_DISCOVERY_TOPIC, TEST_DEVICE_CONFIG)], -) -async def test_discovery_migration_to_device_base( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - mqtt_mock_entry: MqttMockHAClientGenerator, - tag_mock: AsyncMock, - caplog: pytest.LogCaptureFixture, - single_configs: list[tuple[str, dict[str, Any]]], - device_discovery_topic: str, - device_config: dict[str, Any], -) -> None: - """Test the migration of single discovery to device discovery.""" - await mqtt_mock_entry() - - # Discovery single config schema - for discovery_topic, config in single_configs: - payload = json.dumps(config) - async_fire_mqtt_message( - hass, - discovery_topic, - payload, - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - await help_check_discovered_items(hass, device_registry, tag_mock) - - # Try to migrate to device based discovery without migrate_discovery flag - payload = json.dumps(device_config) - async_fire_mqtt_message( - hass, - device_discovery_topic, - payload, - ) - await hass.async_block_till_done() - assert ( - "Received a conflicting MQTT discovery message for device_automation " - "'0AFFD2 bla1' which was previously discovered on topic homeassistant/" - "device_automation/0AFFD2/bla1/config from external application Foo2Mqtt, " - "version: 1.40.2; the conflicting discovery message was received on topic " - "homeassistant/device/0AFFD2/config from external application Foo2Mqtt, " - "version: 1.50.0; for support visit https://www.foo2mqtt.io" in caplog.text - ) - assert ( - "Received a conflicting MQTT discovery message for entity sensor." - "test_device_mqtt_sensor; the entity was previously discovered on topic " - "homeassistant/sensor/0AFFD2/bla2/config from external application Foo2Mqtt, " - "version: 1.40.2; the conflicting discovery message was received on topic " - "homeassistant/device/0AFFD2/config from external application Foo2Mqtt, " - "version: 1.50.0; for support visit https://www.foo2mqtt.io" in caplog.text - ) - assert ( - "Received a conflicting MQTT discovery message for tag '0AFFD2 bla3' which " - "was previously discovered on topic homeassistant/tag/0AFFD2/bla3/config " - "from external application Foo2Mqtt, version: 1.40.2; the conflicting " - "discovery message was received on topic homeassistant/device/0AFFD2/config " - "from external application Foo2Mqtt, version: 1.50.0; for support visit " - "https://www.foo2mqtt.io" in caplog.text - ) - - # Check we still have our mqtt items - await help_check_discovered_items(hass, device_registry, tag_mock) - - # Test Enable discovery migration - # Discovery single config schema - caplog.clear() - for discovery_topic, _ in single_configs: - # migr_discvry is abbreviation for migrate_discovery - payload = json.dumps({"migr_discvry": True}) - async_fire_mqtt_message( - hass, - discovery_topic, - payload, - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - # Assert we still have our device entry - device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - assert device_entry is not None - # Check our trigger was unloaden - triggers = await async_get_device_automations( - hass, DeviceAutomationType.TRIGGER, device_entry.id - ) - assert len(triggers) == 0 - # Check the sensor was unloaded - state = hass.states.get("sensor.test_device_mqtt_sensor") - assert state is None - # Check the entity registry entry is retained - assert entity_registry.async_is_registered("sensor.test_device_mqtt_sensor") - - assert ( - "Migration to MQTT device discovery schema started for device_automation " - "'0AFFD2 bla1' from external application Foo2Mqtt, version: 1.40.2 on topic " - "homeassistant/device_automation/0AFFD2/bla1/config. To complete migration, " - "publish a device discovery message with device_automation '0AFFD2 bla1'. " - "After completed migration, publish an empty (retained) payload to " - "homeassistant/device_automation/0AFFD2/bla1/config" in caplog.text - ) - assert ( - "Migration to MQTT device discovery schema started for entity sensor." - "test_device_mqtt_sensor from external application Foo2Mqtt, version: 1.40.2 " - "on topic homeassistant/sensor/0AFFD2/bla2/config. To complete migration, " - "publish a device discovery message with sensor entity '0AFFD2 bla2'. After " - "completed migration, publish an empty (retained) payload to " - "homeassistant/sensor/0AFFD2/bla2/config" in caplog.text - ) - - # Migrate to device based discovery - caplog.clear() - payload = json.dumps(device_config) - async_fire_mqtt_message( - hass, - device_discovery_topic, - payload, - ) - await hass.async_block_till_done() - - caplog.clear() - for _ in range(2): - # Test publishing an empty payload twice to the migrated discovery topics - # does not remove the migrated items - for discovery_topic, _ in single_configs: - async_fire_mqtt_message( - hass, - discovery_topic, - "", - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - # Check we still have our mqtt items after publishing an - # empty payload to the old discovery topics - await help_check_discovered_items(hass, device_registry, tag_mock) - - # Check we cannot accidentally migrate back and remove the items - caplog.clear() - for discovery_topic, config in single_configs: - payload = json.dumps(config) - async_fire_mqtt_message( - hass, - discovery_topic, - payload, - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - assert ( - "Received a conflicting MQTT discovery message for device_automation " - "'0AFFD2 bla1' which was previously discovered on topic homeassistant/device" - "/0AFFD2/config from external application Foo2Mqtt, version: 1.50.0; the " - "conflicting discovery message was received on topic homeassistant/" - "device_automation/0AFFD2/bla1/config from external application Foo2Mqtt, " - "version: 1.40.2; for support visit https://www.foo2mqtt.io" in caplog.text - ) - assert ( - "Received a conflicting MQTT discovery message for entity sensor." - "test_device_mqtt_sensor; the entity was previously discovered on topic " - "homeassistant/device/0AFFD2/config from external application Foo2Mqtt, " - "version: 1.50.0; the conflicting discovery message was received on topic " - "homeassistant/sensor/0AFFD2/bla2/config from external application Foo2Mqtt, " - "version: 1.40.2; for support visit https://www.foo2mqtt.io" in caplog.text - ) - assert ( - "Received a conflicting MQTT discovery message for tag '0AFFD2 bla3' which was " - "previously discovered on topic homeassistant/device/0AFFD2/config from " - "external application Foo2Mqtt, version: 1.50.0; the conflicting discovery " - "message was received on topic homeassistant/tag/0AFFD2/bla3/config from " - "external application Foo2Mqtt, version: 1.40.2; for support visit " - "https://www.foo2mqtt.io" in caplog.text - ) - - caplog.clear() - for discovery_topic, config in single_configs: - payload = json.dumps(config) - async_fire_mqtt_message( - hass, - discovery_topic, - "", - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - # Check we still have our mqtt items after publishing an - # empty payload to the old discovery topics - await help_check_discovered_items(hass, device_registry, tag_mock) - - # Check we can remove the config using the new discovery topic - async_fire_mqtt_message( - hass, - device_discovery_topic, - "", - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - # Check the device was removed as all device components were removed - device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - assert device_entry is None - await hass.async_block_till_done(wait_background_tasks=True) - - -@pytest.mark.parametrize( - "config", + "config_message", [ - {"state_topic": "foobar/sensors/bla2/state", "name": "none_test"}, - { - "state_topic": "foobar/sensors/bla2/state", - "name": "none_test", - "unique_id": "very_unique", - }, - { - "state_topic": "foobar/sensors/bla2/state", - "device": {"identifiers": ["0AFFD2"], "name": "none_test"}, - }, - ], -) -async def test_discovery_migration_unique_id( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, - config: dict[str, Any], -) -> None: - """Test entity has a unique_id and device context when migrating.""" - await mqtt_mock_entry() - - discovery_topic = "homeassistant/sensor/0AFFD2/bla2/config" - - # Discovery with single config schema - payload = json.dumps(config) - async_fire_mqtt_message( - hass, - discovery_topic, - payload, - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - # Try discovery migration - payload = json.dumps({"migr_discvry": True}) - async_fire_mqtt_message( - hass, - discovery_topic, - payload, - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - # Assert the migration attempt fails - assert "Discovery migration is not possible" in caplog.text - - -@pytest.mark.parametrize( - ("single_configs", "device_discovery_topic", "device_config"), - [(TEST_SINGLE_CONFIGS, TEST_DEVICE_DISCOVERY_TOPIC, TEST_DEVICE_CONFIG)], -) -async def test_discovery_rollback_to_single_base( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - mqtt_mock_entry: MqttMockHAClientGenerator, - tag_mock: AsyncMock, - caplog: pytest.LogCaptureFixture, - single_configs: list[tuple[str, dict[str, Any]]], - device_discovery_topic: str, - device_config: dict[str, Any], -) -> None: - """Test the rollback of device discovery to a single component discovery.""" - await mqtt_mock_entry() - - # Start device based discovery - # any single component discovery will be migrated - payload = json.dumps(device_config) - async_fire_mqtt_message( - hass, - device_discovery_topic, - payload, - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - await help_check_discovered_items(hass, device_registry, tag_mock) - - # Migrate to single component discovery - # Test the schema - caplog.clear() - payload = json.dumps({"migrate_discovery": "invalid"}) - async_fire_mqtt_message( - hass, - device_discovery_topic, - payload, - ) - await hass.async_block_till_done() - assert "Invalid MQTT device discovery payload for 0AFFD2" in caplog.text - - # Set the correct migrate_discovery flag in the device payload - # to allow rollback - payload = json.dumps({"migrate_discovery": True}) - async_fire_mqtt_message( - hass, - device_discovery_topic, - payload, - ) - await hass.async_block_till_done() - - # Check the log messages - assert ( - "Rollback to MQTT platform discovery schema started for entity sensor." - "test_device_mqtt_sensor from external application Foo2Mqtt, version: 1.50.0 " - "on topic homeassistant/device/0AFFD2/config. To complete rollback, publish a " - "platform discovery message with sensor entity '0AFFD2 bla2'. After completed " - "rollback, publish an empty (retained) payload to " - "homeassistant/device/0AFFD2/config" in caplog.text - ) - assert ( - "Rollback to MQTT platform discovery schema started for device_automation " - "'0AFFD2 bla1' from external application Foo2Mqtt, version: 1.50.0 on topic " - "homeassistant/device/0AFFD2/config. To complete rollback, publish a platform " - "discovery message with device_automation '0AFFD2 bla1'. After completed " - "rollback, publish an empty (retained) payload to " - "homeassistant/device/0AFFD2/config" in caplog.text - ) - - # Assert we still have our device entry - device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - assert device_entry is not None - # Check our trigger was unloaded - triggers = await async_get_device_automations( - hass, DeviceAutomationType.TRIGGER, device_entry.id - ) - assert len(triggers) == 0 - # Check the sensor was unloaded - state = hass.states.get("sensor.test_device_mqtt_sensor") - assert state is None - # Check the entity registry entry is retained - assert entity_registry.async_is_registered("sensor.test_device_mqtt_sensor") - - # Publish the new component based payloads - # to switch back to component based discovery - for discovery_topic, config in single_configs: - payload = json.dumps(config) - async_fire_mqtt_message( - hass, - discovery_topic, - payload, - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - # Check we still have our mqtt items - # await help_check_discovered_items(hass, device_registry, tag_mock) - - for _ in range(2): - # Test publishing an empty payload twice to the migrated discovery topic - # does not remove the migrated items - async_fire_mqtt_message( - hass, - device_discovery_topic, - "", - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - # Check we still have our mqtt items after publishing an - # empty payload to the old discovery topics - await help_check_discovered_items(hass, device_registry, tag_mock) - - # Check we cannot accidentally migrate back and remove the items - payload = json.dumps(device_config) - async_fire_mqtt_message( - hass, - device_discovery_topic, - payload, - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - - # Check we still have our mqtt items after publishing an - # empty payload to the old discovery topics - await help_check_discovered_items(hass, device_registry, tag_mock) - - # Check we can remove the the config using the new discovery topics - for discovery_topic, config in single_configs: - payload = json.dumps(config) - async_fire_mqtt_message( - hass, - discovery_topic, - "", - ) - await hass.async_block_till_done() - await hass.async_block_till_done() - # Check the device was removed as all device components were removed - device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - assert device_entry is None - - -@pytest.mark.parametrize( - ("discovery_topic", "payload"), - [ - ( - "homeassistant/binary_sensor/bla/config", - '{"state_topic": "test-topic",' - '"name":"bla","unique_id":"very_unique1",' - '"avty": {"topic": "avty-topic"},' - '"o":{"name":"bla2mqtt","sw":"1.0"},' - '"dev":{"identifiers":["bla"],"name":"Beer"}}', - ), - ( - "homeassistant/device/bla/config", - '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' - '"name":"bla","unique_id":"very_unique1",' - '"state_topic": "test-topic"}},' - '"avty": {"topic": "avty-topic"},' - '"o":{"name":"bla2mqtt","sw":"1.0"},' - '"dev":{"identifiers":["bla"],"name":"Beer"}}', - ), - ], - ids=["component", "device"], -) -async def test_discovery_availability( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - discovery_topic: str, - payload: str, -) -> None: - """Test device discovery with shared availability mapping.""" - await mqtt_mock_entry() - async_fire_mqtt_message( - hass, - discovery_topic, - payload, - ) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.beer_bla") - assert state is not None - assert state.name == "Beer bla" - assert state.state == STATE_UNAVAILABLE - - async_fire_mqtt_message( - hass, - "avty-topic", - "online", - ) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.beer_bla") - assert state is not None - assert state.state == STATE_UNKNOWN - - async_fire_mqtt_message( - hass, - "test-topic", - "ON", - ) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.beer_bla") - assert state is not None - assert state.state == STATE_ON - - -@pytest.mark.parametrize( - ("discovery_topic", "payload"), - [ - ( - "homeassistant/device/bla/config", - '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' - '"unique_id":"very_unique",' - '"avty": {"topic": "avty-topic-component"},' - '"name":"Beer","state_topic": "test-topic"}},' - '"avty": {"topic": "avty-topic-device"},' - '"o":{"name":"bla2mqtt","sw":"1.0"},"dev":{"identifiers":["bla"]}}', - ), - ( - "homeassistant/device/bla/config", - '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' - '"unique_id":"very_unique",' - '"availability_topic": "avty-topic-component",' - '"name":"Beer","state_topic": "test-topic"}},' - '"availability_topic": "avty-topic-device",' - '"o":{"name":"bla2mqtt","sw":"1.0"},"dev":{"identifiers":["bla"]}}', - ), - ], - ids=["test1", "test2"], -) -async def test_discovery_component_availability_overridden( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - discovery_topic: str, - payload: str, -) -> None: - """Test device discovery with overridden shared availability mapping.""" - await mqtt_mock_entry() - async_fire_mqtt_message( - hass, - discovery_topic, - payload, - ) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.none_beer") - assert state is not None - assert state.name == "Beer" - assert state.state == STATE_UNAVAILABLE - - async_fire_mqtt_message( - hass, - "avty-topic-device", - "online", - ) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.none_beer") - assert state is not None - assert state.state == STATE_UNAVAILABLE - - async_fire_mqtt_message( - hass, - "avty-topic-component", - "online", - ) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.none_beer") - assert state is not None - assert state.state == STATE_UNKNOWN - - async_fire_mqtt_message( - hass, - "test-topic", - "ON", - ) - await hass.async_block_till_done() - state = hass.states.get("binary_sensor.none_beer") - assert state is not None - assert state.state == STATE_ON - - -@pytest.mark.parametrize( - ("discovery_topic", "config_message", "error_message"), - [ - ( - "homeassistant/binary_sensor/bla/config", - '{ "name": "Beer", "unique_id": "very_unique", ' - '"state_topic": "test-topic", "o": "bla2mqtt" }', - "Unable to parse origin information from discovery message", - ), - ( - "homeassistant/binary_sensor/bla/config", - '{ "name": "Beer", "unique_id": "very_unique", ' - '"state_topic": "test-topic", "o": 2.0 }', - "Unable to parse origin information from discovery message", - ), - ( - "homeassistant/binary_sensor/bla/config", - '{ "name": "Beer", "unique_id": "very_unique", ' - '"state_topic": "test-topic", "o": null }', - "Unable to parse origin information from discovery message", - ), - ( - "homeassistant/binary_sensor/bla/config", - '{ "name": "Beer", "unique_id": "very_unique", ' - '"state_topic": "test-topic", "o": {"sw": "bla2mqtt"} }', - "Unable to parse origin information from discovery message", - ), - ( - "homeassistant/device/bla/config", - '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' - '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' - '"state_topic":"test-topic"}},"o": "bla2mqtt"}', - "Invalid MQTT device discovery payload for bla, " - "expected a dictionary for dictionary value @ data['origin']", - ), - ( - "homeassistant/device/bla/config", - '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' - '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' - '"state_topic":"test-topic"}},"o": 2.0}', - "Invalid MQTT device discovery payload for bla, " - "expected a dictionary for dictionary value @ data['origin']", - ), - ( - "homeassistant/device/bla/config", - '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' - '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' - '"state_topic":"test-topic"}},"o": null}', - "Invalid MQTT device discovery payload for bla, " - "expected a dictionary for dictionary value @ data['origin']", - ), - ( - "homeassistant/device/bla/config", - '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' - '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' - '"state_topic":"test-topic"}},"o": {"sw": "bla2mqtt"}}', - "Invalid MQTT device discovery payload for bla, " - "required key not provided @ data['origin']['name']", - ), + '{ "name": "Beer", "state_topic": "test-topic", "o": "bla2mqtt" }', + '{ "name": "Beer", "state_topic": "test-topic", "o": 2.0 }', + '{ "name": "Beer", "state_topic": "test-topic", "o": null }', + '{ "name": "Beer", "state_topic": "test-topic", "o": {"sw": "bla2mqtt"} }', ], ) async def test_discovery_with_invalid_integration_info( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, - discovery_topic: str, config_message: str, - error_message: str, ) -> None: """Test sending in correct JSON.""" await mqtt_mock_entry() - async_fire_mqtt_message(hass, discovery_topic, config_message) + async_fire_mqtt_message( + hass, "homeassistant/binary_sensor/bla/config", config_message + ) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.none_beer") + state = hass.states.get("binary_sensor.beer") assert state is None - assert error_message in caplog.text + assert "Unable to parse origin information from discovery message" in caplog.text async def test_discover_fan( @@ -1625,86 +809,43 @@ async def test_duplicate_removal( assert "Component has already been discovered: binary_sensor bla" not in caplog.text -@pytest.mark.parametrize( - ("discovery_payloads", "entity_ids"), - [ - ( - { - "homeassistant/sensor/sens1/config": "{" - '"device":{"identifiers":["0AFFD2"]},' - '"state_topic": "foobar/sensor1",' - '"unique_id": "unique1",' - '"name": "sensor1"' - "}", - "homeassistant/sensor/sens2/config": "{" - '"device":{"identifiers":["0AFFD2"]},' - '"state_topic": "foobar/sensor2",' - '"unique_id": "unique2",' - '"name": "sensor2"' - "}", - }, - ["sensor.none_sensor1", "sensor.none_sensor2"], - ), - ( - { - "homeassistant/device/bla/config": "{" - '"device":{"identifiers":["0AFFD2"]},' - '"o": {"name": "foobar"},' - '"cmps": {"sens1": {' - '"platform": "sensor",' - '"name": "sensor1",' - '"state_topic": "foobar/sensor1",' - '"unique_id": "unique1"' - '},"sens2": {' - '"platform": "sensor",' - '"name": "sensor2",' - '"state_topic": "foobar/sensor2",' - '"unique_id": "unique2"' - "}}}" - }, - ["sensor.none_sensor1", "sensor.none_sensor2"], - ), - ], -) async def test_cleanup_device_manual( hass: HomeAssistant, - mock_debouncer: asyncio.Event, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, - discovery_payloads: dict[str, str], - entity_ids: list[str], ) -> None: """Test discovered device is cleaned up when entry removed from device.""" mqtt_mock = await mqtt_mock_entry() assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - mock_debouncer.clear() - for discovery_topic, discovery_payload in discovery_payloads.items(): - async_fire_mqtt_message(hass, discovery_topic, discovery_payload) - await mock_debouncer.wait() + data = ( + '{ "device":{"identifiers":["0AFFD2"]},' + ' "state_topic": "foobar/sensor",' + ' "unique_id": "unique" }' + ) + + async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) + await hass.async_block_till_done() # Verify device and registry entries are created device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry is not None + entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") + assert entity_entry is not None - for entity_id in entity_ids: - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry is not None - - state = hass.states.get(entity_id) - assert state is not None + state = hass.states.get("sensor.none_mqtt_sensor") + assert state is not None # Remove MQTT from the device mqtt_config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - mock_debouncer.clear() response = await ws_client.remove_device( device_entry.id, mqtt_config_entry.entry_id ) assert response["success"] - await mock_debouncer.wait() + await hass.async_block_till_done() await hass.async_block_till_done() # Verify device and registry entries are cleared @@ -1714,224 +855,60 @@ async def test_cleanup_device_manual( assert entity_entry is None # Verify state is removed - for entity_id in entity_ids: - state = hass.states.get(entity_id) - assert state is None + state = hass.states.get("sensor.none_mqtt_sensor") + assert state is None + await hass.async_block_till_done() - # Verify retained discovery topics have been cleared - mqtt_mock.async_publish.assert_has_calls( - [call(discovery_topic, None, 0, True) for discovery_topic in discovery_payloads] + # Verify retained discovery topic has been cleared + mqtt_mock.async_publish.assert_called_once_with( + "homeassistant/sensor/bla/config", None, 0, True ) - await hass.async_block_till_done(wait_background_tasks=True) - -@pytest.mark.parametrize( - ("discovery_topic", "discovery_payload", "entity_ids"), - [ - ( - "homeassistant/sensor/bla/config", - '{ "device":{"identifiers":["0AFFD2"]},' - ' "state_topic": "foobar/sensor",' - ' "unique_id": "unique" }', - ["sensor.none_mqtt_sensor"], - ), - ( - "homeassistant/device/bla/config", - '{ "device":{"identifiers":["0AFFD2"]},' - ' "o": {"name": "foobar"},' - ' "cmps": {"sens1": {' - ' "platform": "sensor",' - ' "name": "sensor1",' - ' "state_topic": "foobar/sensor1",' - ' "unique_id": "unique1"' - ' },"sens2": {' - ' "platform": "sensor",' - ' "name": "sensor2",' - ' "state_topic": "foobar/sensor2",' - ' "unique_id": "unique2"' - "}}}", - ["sensor.none_sensor1", "sensor.none_sensor2"], - ), - ], -) async def test_cleanup_device_mqtt( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, - discovery_topic: str, - discovery_payload: str, - entity_ids: list[str], ) -> None: - """Test discovered device is cleaned up when removed through MQTT.""" + """Test discvered device is cleaned up when removed through MQTT.""" mqtt_mock = await mqtt_mock_entry() - - # set up an existing sensor first data = ( - '{ "device":{"identifiers":["0AFFD3"]},' - ' "name": "sensor_base",' + '{ "device":{"identifiers":["0AFFD2"]},' ' "state_topic": "foobar/sensor",' - ' "unique_id": "unique_base" }' + ' "unique_id": "unique" }' ) - base_discovery_topic = "homeassistant/sensor/bla_base/config" - base_entity_id = "sensor.none_sensor_base" - async_fire_mqtt_message(hass, base_discovery_topic, data) - await hass.async_block_till_done() - # Verify the base entity has been created and it has a state - base_device_entry = device_registry.async_get_device( - identifiers={("mqtt", "0AFFD3")} - ) - assert base_device_entry is not None - entity_entry = entity_registry.async_get(base_entity_id) - assert entity_entry is not None - state = hass.states.get(base_entity_id) - assert state is not None - - async_fire_mqtt_message(hass, discovery_topic, discovery_payload) + async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) await hass.async_block_till_done() # Verify device and registry entries are created device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry is not None - for entity_id in entity_ids: - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry is not None + entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") + assert entity_entry is not None - state = hass.states.get(entity_id) - assert state is not None + state = hass.states.get("sensor.none_mqtt_sensor") + assert state is not None - async_fire_mqtt_message(hass, discovery_topic, "") + async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", "") await hass.async_block_till_done() await hass.async_block_till_done() # Verify device and registry entries are cleared device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry is None + entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") + assert entity_entry is None - for entity_id in entity_ids: - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry is None - - # Verify state is removed - state = hass.states.get(entity_id) - assert state is None - await hass.async_block_till_done() + # Verify state is removed + state = hass.states.get("sensor.none_mqtt_sensor") + assert state is None + await hass.async_block_till_done() # Verify retained discovery topics have not been cleared again mqtt_mock.async_publish.assert_not_called() - # Verify the base entity still exists and it has a state - base_device_entry = device_registry.async_get_device( - identifiers={("mqtt", "0AFFD3")} - ) - assert base_device_entry is not None - entity_entry = entity_registry.async_get(base_entity_id) - assert entity_entry is not None - state = hass.states.get(base_entity_id) - assert state is not None - - -async def test_cleanup_device_mqtt_device_discovery( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test discovered device is cleaned up partly when removed through MQTT.""" - await mqtt_mock_entry() - - discovery_topic = "homeassistant/device/bla/config" - discovery_payload = ( - '{ "device":{"identifiers":["0AFFD2"]},' - ' "o": {"name": "foobar"},' - ' "cmps": {"sens1": {' - ' "p": "sensor",' - ' "name": "sensor1",' - ' "state_topic": "foobar/sensor1",' - ' "unique_id": "unique1"' - ' },"sens2": {' - ' "p": "sensor",' - ' "name": "sensor2",' - ' "state_topic": "foobar/sensor2",' - ' "unique_id": "unique2"' - "}}}" - ) - entity_ids = ["sensor.none_sensor1", "sensor.none_sensor2"] - async_fire_mqtt_message(hass, discovery_topic, discovery_payload) - await hass.async_block_till_done() - - # Verify device and registry entries are created - device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - assert device_entry is not None - for entity_id in entity_ids: - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry is not None - - state = hass.states.get(entity_id) - assert state is not None - - # Do update and remove sensor 2 from device - discovery_payload_update1 = ( - '{ "device":{"identifiers":["0AFFD2"]},' - ' "o": {"name": "foobar"},' - ' "cmps": {"sens1": {' - ' "p": "sensor",' - ' "name": "sensor1",' - ' "state_topic": "foobar/sensor1",' - ' "unique_id": "unique1"' - ' },"sens2": {' - ' "p": "sensor"' - "}}}" - ) - async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update1) - await hass.async_block_till_done() - state = hass.states.get(entity_ids[0]) - assert state is not None - state = hass.states.get(entity_ids[1]) - assert state is None - - # Repeating the update - async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update1) - await hass.async_block_till_done() - state = hass.states.get(entity_ids[0]) - assert state is not None - state = hass.states.get(entity_ids[1]) - assert state is None - - # Removing last sensor - discovery_payload_update2 = ( - '{ "device":{"identifiers":["0AFFD2"]},' - ' "o": {"name": "foobar"},' - ' "cmps": {"sens1": {' - ' "p": "sensor"' - ' },"sens2": {' - ' "p": "sensor"' - "}}}" - ) - async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update2) - await hass.async_block_till_done() - device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - # Verify the device entry was removed with the last sensor - assert device_entry is None - for entity_id in entity_ids: - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry is None - - state = hass.states.get(entity_id) - assert state is None - - # Repeating the update - async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update2) - await hass.async_block_till_done() - - # Clear the empty discovery payload and verify there was nothing to cleanup - async_fire_mqtt_message(hass, discovery_topic, "") - await hass.async_block_till_done() - assert "No device components to cleanup" in caplog.text - async def test_cleanup_device_multiple_config_entries( hass: HomeAssistant, @@ -2386,29 +1363,24 @@ EXCLUDED_MODULES = { async def test_missing_discover_abbreviations( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Check MQTT platforms for missing abbreviations.""" await mqtt_mock_entry() - missing: list[str] = [] + missing = [] regex = re.compile(r"(CONF_[a-zA-Z\d_]*) *= *[\'\"]([a-zA-Z\d_]*)[\'\"]") - - def _add_missing(): - for fil in Path(mqtt.__file__).parent.rglob("*.py"): - if fil.name in EXCLUDED_MODULES: - continue - with open(fil, encoding="utf-8") as file: - matches = re.findall(regex, file.read()) - missing.extend( - f"{fil}: no abbreviation for {match[1]} ({match[0]})" - for match in matches - if match[1] not in ABBREVIATIONS.values() - and match[1] not in DEVICE_ABBREVIATIONS.values() - and match[0] not in ABBREVIATIONS_WHITE_LIST - ) - - await hass.async_add_executor_job(_add_missing) + for fil in Path(mqtt.__file__).parent.rglob("*.py"): + if fil.name in EXCLUDED_MODULES: + continue + with open(fil, encoding="utf-8") as file: + matches = re.findall(regex, file.read()) + missing.extend( + f"{fil}: no abbreviation for {match[1]} ({match[0]})" + for match in matches + if match[1] not in ABBREVIATIONS.values() + and match[1] not in DEVICE_ABBREVIATIONS.values() + and match[0] not in ABBREVIATIONS_WHITE_LIST + ) assert not missing @@ -2471,22 +1443,17 @@ async def test_complex_discovery_topic_prefix( @patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.0) -@pytest.mark.parametrize( - "reason", ["single_instance_allowed", "already_configured", "some_abort_error"] -) -async def test_mqtt_integration_discovery_flow_fitering_on_redundant_payload( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient, reason: str +async def test_mqtt_integration_discovery_subscribe_unsubscribe( + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: - """Check MQTT integration discovery starts a flow once.""" - flow_calls: list[MqttServiceInfo] = [] + """Check MQTT integration discovery subscribe and unsubscribe.""" class TestFlow(config_entries.ConfigFlow): """Test flow.""" async def async_step_mqtt(self, discovery_info: MqttServiceInfo) -> FlowResult: """Test mqtt step.""" - flow_calls.append(discovery_info) - return self.async_abort(reason=reason) + return self.async_abort(reason="already_configured") mock_platform(hass, "comp.config_flow", None) @@ -2497,6 +1464,13 @@ async def test_mqtt_integration_discovery_flow_fitering_on_redundant_payload( """Handle birth message.""" birth.set() + wait_unsub = asyncio.Event() + + @callback + def _mock_unsubscribe(topics: list[str]) -> tuple[int, int]: + wait_unsub.set() + return (0, 0) + entry = MockConfigEntry(domain=mqtt.DOMAIN, data=ENTRY_DEFAULT_BIRTH_MESSAGE) entry.add_to_hass(hass) with ( @@ -2505,6 +1479,7 @@ async def test_mqtt_integration_discovery_flow_fitering_on_redundant_payload( return_value={"comp": ["comp/discovery/#"]}, ), mock_config_flow("comp", TestFlow), + patch.object(mqtt_client_mock, "unsubscribe", side_effect=_mock_unsubscribe), ): assert await hass.config_entries.async_setup(entry.entry_id) await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) @@ -2514,45 +1489,31 @@ async def test_mqtt_integration_discovery_flow_fitering_on_redundant_payload( assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) assert not mqtt_client_mock.unsubscribe.called mqtt_client_mock.reset_mock() - assert len(flow_calls) == 0 await hass.async_block_till_done(wait_background_tasks=True) - async_fire_mqtt_message(hass, "comp/discovery/bla/config", "initial message") + async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") + await wait_unsub.wait() + mqtt_client_mock.unsubscribe.assert_called_once_with(["comp/discovery/#"]) await hass.async_block_till_done(wait_background_tasks=True) - assert len(flow_calls) == 1 - - # A redundant message gets does not start a new flow - await hass.async_block_till_done(wait_background_tasks=True) - async_fire_mqtt_message(hass, "comp/discovery/bla/config", "initial message") - await hass.async_block_till_done(wait_background_tasks=True) - assert len(flow_calls) == 1 - - # An updated message gets starts a new flow - await hass.async_block_till_done(wait_background_tasks=True) - async_fire_mqtt_message(hass, "comp/discovery/bla/config", "update message") - await hass.async_block_till_done(wait_background_tasks=True) - assert len(flow_calls) == 2 @patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.0) -async def test_mqtt_discovery_flow_starts_once( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - caplog: pytest.LogCaptureFixture, - mock_mqtt_flow: config_entries.ConfigFlow, - mqtt_data_flow_calls: list[MqttServiceInfo], +async def test_mqtt_discovery_unsubscribe_once( + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: - """Check MQTT integration discovery starts a flow once. + """Check MQTT integration discovery unsubscribe once.""" + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + async def async_step_mqtt(self, discovery_info: MqttServiceInfo) -> FlowResult: + """Test mqtt step.""" + await asyncio.sleep(0) + return self.async_abort(reason="already_configured") - A flow should be started once after discovery, - and after an entry was removed, to trigger re-discovery. - """ - mock_integration( - hass, MockModule(domain="comp", async_setup_entry=AsyncMock(return_value=True)) - ) mock_platform(hass, "comp.config_flow", None) birth = asyncio.Event() @@ -2562,6 +1523,13 @@ async def test_mqtt_discovery_flow_starts_once( """Handle birth message.""" birth.set() + wait_unsub = asyncio.Event() + + @callback + def _mock_unsubscribe(topics: list[str]) -> tuple[int, int]: + wait_unsub.set() + return (0, 0) + entry = MockConfigEntry(domain=mqtt.DOMAIN, data=ENTRY_DEFAULT_BIRTH_MESSAGE) entry.add_to_hass(hass) @@ -2570,7 +1538,8 @@ async def test_mqtt_discovery_flow_starts_once( "homeassistant.components.mqtt.discovery.async_get_mqtt", return_value={"comp": ["comp/discovery/#"]}, ), - mock_config_flow("comp", mock_mqtt_flow), + mock_config_flow("comp", TestFlow), + patch.object(mqtt_client_mock, "unsubscribe", side_effect=_mock_unsubscribe), ): assert await hass.config_entries.async_setup(entry.entry_id) await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) @@ -2578,86 +1547,17 @@ async def test_mqtt_discovery_flow_starts_once( await birth.wait() assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) - - # Test the initial flow - async_fire_mqtt_message(hass, "comp/discovery/bla/config1", "initial message") - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mqtt_data_flow_calls) == 1 - assert mqtt_data_flow_calls[0].topic == "comp/discovery/bla/config1" - assert mqtt_data_flow_calls[0].payload == "initial message" - - # Test we can ignore updates if they are the same - with caplog.at_level(logging.DEBUG): - async_fire_mqtt_message( - hass, "comp/discovery/bla/config1", "initial message" - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert "Ignoring already processed discovery message" in caplog.text - assert len(mqtt_data_flow_calls) == 1 - - # Test we can apply updates - async_fire_mqtt_message(hass, "comp/discovery/bla/config1", "update message") - await hass.async_block_till_done(wait_background_tasks=True) - - assert len(mqtt_data_flow_calls) == 2 - assert mqtt_data_flow_calls[1].topic == "comp/discovery/bla/config1" - assert mqtt_data_flow_calls[1].payload == "update message" - - # Test we set up multiple entries - async_fire_mqtt_message(hass, "comp/discovery/bla/config2", "initial message") - await hass.async_block_till_done(wait_background_tasks=True) - - assert len(mqtt_data_flow_calls) == 3 - assert mqtt_data_flow_calls[2].topic == "comp/discovery/bla/config2" - assert mqtt_data_flow_calls[2].payload == "initial message" - - # Test we update multiple entries - async_fire_mqtt_message(hass, "comp/discovery/bla/config2", "update message") - await hass.async_block_till_done(wait_background_tasks=True) - - assert len(mqtt_data_flow_calls) == 4 - assert mqtt_data_flow_calls[3].topic == "comp/discovery/bla/config2" - assert mqtt_data_flow_calls[3].payload == "update message" - - # Test an empty message triggers a flow to allow cleanup (if needed) - async_fire_mqtt_message(hass, "comp/discovery/bla/config2", "") - await hass.async_block_till_done(wait_background_tasks=True) - - assert len(mqtt_data_flow_calls) == 5 - assert mqtt_data_flow_calls[4].topic == "comp/discovery/bla/config2" - assert mqtt_data_flow_calls[4].payload == "" - - # Cleanup the the second entry - assert ( - entry := hass.config_entries.async_entry_for_domain_unique_id( - "comp", "comp/discovery/bla/config2" - ) - ) is not None - await hass.config_entries.async_remove(entry.entry_id) - assert len(hass.config_entries.async_entries(domain="comp")) == 1 - - # Remove remaining entry1 and assert this triggers an - # automatic re-discovery flow with latest config - assert ( - entry := hass.config_entries.async_entry_for_domain_unique_id( - "comp", "comp/discovery/bla/config1" - ) - ) is not None - assert entry.unique_id == "comp/discovery/bla/config1" - await hass.config_entries.async_remove(entry.entry_id) - assert len(hass.config_entries.async_entries(domain="comp")) == 0 - - # Wait for re-discovery flow to complete - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mqtt_data_flow_calls) == 6 - assert mqtt_data_flow_calls[5].topic == "comp/discovery/bla/config1" - assert mqtt_data_flow_calls[5].payload == "update message" - - # Re-discovery triggered the config flow - assert len(hass.config_entries.async_entries(domain="comp")) == 1 - assert not mqtt_client_mock.unsubscribe.called + await hass.async_block_till_done(wait_background_tasks=True) + async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") + async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") + await wait_unsub.wait() + await asyncio.sleep(0) + await hass.async_block_till_done(wait_background_tasks=True) + mqtt_client_mock.unsubscribe.assert_called_once_with(["comp/discovery/#"]) + await hass.async_block_till_done(wait_background_tasks=True) + async def test_clear_config_topic_disabled_entity( hass: HomeAssistant, @@ -2913,77 +1813,3 @@ async def test_discovery_dispatcher_signal_type_messages( assert len(calls) == 1 assert calls[0] == test_data unsub() - - -@pytest.mark.parametrize( - ("discovery_topic", "discovery_payload", "entity_ids"), - [ - ( - "homeassistant/device/bla/config", - '{ "device":{"identifiers":["0AFFD2"]},' - ' "o": {"name": "foobar"},' - ' "state_topic": "foobar/sensor-shared",' - ' "cmps": {"sens1": {' - ' "platform": "sensor",' - ' "name": "sensor1",' - ' "unique_id": "unique1"' - ' },"sens2": {' - ' "platform": "sensor",' - ' "name": "sensor2",' - ' "unique_id": "unique2"' - ' },"sens3": {' - ' "platform": "sensor",' - ' "name": "sensor3",' - ' "state_topic": "foobar/sensor3",' - ' "unique_id": "unique3"' - "}}}", - ["sensor.none_sensor1", "sensor.none_sensor2", "sensor.none_sensor3"], - ), - ], -) -async def test_shared_state_topic( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - mqtt_mock_entry: MqttMockHAClientGenerator, - discovery_topic: str, - discovery_payload: str, - entity_ids: list[str], -) -> None: - """Test a shared state_topic can be used.""" - await mqtt_mock_entry() - - async_fire_mqtt_message(hass, discovery_topic, discovery_payload) - await hass.async_block_till_done() - - # Verify device and registry entries are created - device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - assert device_entry is not None - for entity_id in entity_ids: - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry is not None - - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_UNKNOWN - - async_fire_mqtt_message(hass, "foobar/sensor-shared", "New state") - - entity_id = entity_ids[0] - state = hass.states.get(entity_id) - assert state is not None - assert state.state == "New state" - entity_id = entity_ids[1] - state = hass.states.get(entity_id) - assert state is not None - assert state.state == "New state" - entity_id = entity_ids[2] - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_UNKNOWN - - async_fire_mqtt_message(hass, "foobar/sensor3", "New state3") - entity_id = entity_ids[2] - state = hass.states.get(entity_id) - assert state is not None - assert state.state == "New state3" diff --git a/tests/components/mqtt/test_event.py b/tests/components/mqtt/test_event.py index 41049ed0887..662a279f639 100644 --- a/tests/components/mqtt/test_event.py +++ b/tests/components/mqtt/test_event.py @@ -37,7 +37,6 @@ from .test_common import ( help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_disabled_by_default, - help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -91,7 +90,7 @@ async def test_multiple_events_are_all_updating_the_state( """Test all events are respected and trigger a state write.""" await mqtt_mock_entry() with patch( - "homeassistant.components.mqtt.entity.MqttEntity.async_write_ha_state" + "homeassistant.components.mqtt.mixins.MqttEntity.async_write_ha_state" ) as mock_async_ha_write_state: async_fire_mqtt_message( hass, "test-topic", '{"event_type": "press", "duration": "short" }' @@ -110,7 +109,7 @@ async def test_handling_retained_event_payloads( """Test if event messages with a retained flag are ignored.""" await mqtt_mock_entry() with patch( - "homeassistant.components.mqtt.entity.MqttEntity.async_write_ha_state" + "homeassistant.components.mqtt.mixins.MqttEntity.async_write_ha_state" ) as mock_async_ha_write_state: async_fire_mqtt_message( hass, @@ -373,7 +372,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, event.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + event.DOMAIN, + DEFAULT_CONFIG, ) @@ -384,7 +387,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, event.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + event.DOMAIN, + DEFAULT_CONFIG, ) @@ -706,18 +713,6 @@ async def test_entity_name( ) -async def test_entity_icon_and_entity_picture( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test the entity icon or picture setup.""" - domain = event.DOMAIN - config = DEFAULT_CONFIG - await help_test_entity_icon_and_entity_picture( - hass, mqtt_mock_entry, domain, config - ) - - @pytest.mark.parametrize( "hass_config", [ @@ -765,7 +760,7 @@ async def test_skipped_async_ha_write_state2( payload1 = '{"event_type": "press"}' payload2 = '{"event_type": "unknown"}' with patch( - "homeassistant.components.mqtt.entity.MqttEntity.async_write_ha_state" + "homeassistant.components.mqtt.mixins.MqttEntity.async_write_ha_state" ) as mock_async_ha_write_state: assert len(mock_async_ha_write_state.mock_calls) == 0 async_fire_mqtt_message(hass, topic, payload1) diff --git a/tests/components/mqtt/test_fan.py b/tests/components/mqtt/test_fan.py index 6c8afe8c1b4..2d1d717c58f 100644 --- a/tests/components/mqtt/test_fan.py +++ b/tests/components/mqtt/test_fan.py @@ -1486,7 +1486,7 @@ async def test_encoding_subscribable_topics( attribute_value: Any, ) -> None: """Test handling of incoming encoded payload.""" - config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][fan.DOMAIN]) + config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][fan.DOMAIN]) config[ATTR_PRESET_MODES] = ["eco", "auto"] config[CONF_PRESET_MODE_COMMAND_TOPIC] = "fan/some_preset_mode_command_topic" config[CONF_PERCENTAGE_COMMAND_TOPIC] = "fan/some_percentage_command_topic" @@ -1590,7 +1590,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.TURN_OFF | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature(0), None, ), ( @@ -1605,9 +1605,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.OSCILLATE - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.OSCILLATE, None, ), ( @@ -1622,9 +1620,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.SET_SPEED - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.SET_SPEED, None, ), ( @@ -1655,9 +1651,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.PRESET_MODE, None, ), ( @@ -1673,9 +1667,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.PRESET_MODE, None, ), ( @@ -1690,9 +1682,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.SET_SPEED - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.SET_SPEED, None, ), ( @@ -1708,10 +1698,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.OSCILLATE - | fan.FanEntityFeature.SET_SPEED - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.OSCILLATE | fan.FanEntityFeature.SET_SPEED, None, ), ( @@ -1727,9 +1714,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.PRESET_MODE, None, ), ( @@ -1745,9 +1730,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.PRESET_MODE, None, ), ( @@ -1764,10 +1747,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE - | fan.FanEntityFeature.OSCILLATE - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.PRESET_MODE | fan.FanEntityFeature.OSCILLATE, None, ), ( @@ -1784,9 +1764,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.SET_SPEED - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.SET_SPEED, None, ), ( @@ -1853,9 +1831,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.PRESET_MODE, "some error", ), ( @@ -1870,9 +1846,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.DIRECTION - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON, + fan.FanEntityFeature.DIRECTION, "some error", ), ], @@ -1958,7 +1932,11 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, fan.DOMAIN, DEFAULT_CONFIG, MQTT_FAN_ATTRIBUTES_BLOCKED + hass, + mqtt_mock_entry, + fan.DOMAIN, + DEFAULT_CONFIG, + MQTT_FAN_ATTRIBUTES_BLOCKED, ) @@ -1978,7 +1956,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, fan.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + fan.DOMAIN, + DEFAULT_CONFIG, ) @@ -1989,7 +1971,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, fan.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + fan.DOMAIN, + DEFAULT_CONFIG, ) @@ -2201,7 +2187,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = fan.DOMAIN - config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) + config = copy.deepcopy(DEFAULT_CONFIG) if topic == "preset_mode_command_topic": config[mqtt.DOMAIN][domain]["preset_modes"] = ["auto", "eco"] diff --git a/tests/components/mqtt/test_humidifier.py b/tests/components/mqtt/test_humidifier.py index 20ca89181eb..05180c17b2f 100644 --- a/tests/components/mqtt/test_humidifier.py +++ b/tests/components/mqtt/test_humidifier.py @@ -12,6 +12,7 @@ from homeassistant.components.humidifier import ( ATTR_CURRENT_HUMIDITY, ATTR_HUMIDITY, ATTR_MODE, + DOMAIN, SERVICE_SET_HUMIDITY, SERVICE_SET_MODE, HumidifierAction, @@ -86,9 +87,7 @@ async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) """Turn all or specified humidifier on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} - await hass.services.async_call( - humidifier.DOMAIN, SERVICE_TURN_ON, data, blocking=True - ) + await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) async def async_turn_off( @@ -97,9 +96,7 @@ async def async_turn_off( """Turn all or specified humidier off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} - await hass.services.async_call( - humidifier.DOMAIN, SERVICE_TURN_OFF, data, blocking=True - ) + await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) async def async_set_mode( @@ -112,9 +109,7 @@ async def async_set_mode( if value is not None } - await hass.services.async_call( - humidifier.DOMAIN, SERVICE_SET_MODE, data, blocking=True - ) + await hass.services.async_call(DOMAIN, SERVICE_SET_MODE, data, blocking=True) async def async_set_humidity( @@ -127,9 +122,7 @@ async def async_set_humidity( if value is not None } - await hass.services.async_call( - humidifier.DOMAIN, SERVICE_SET_HUMIDITY, data, blocking=True - ) + await hass.services.async_call(DOMAIN, SERVICE_SET_HUMIDITY, data, blocking=True) @pytest.mark.parametrize( @@ -862,9 +855,7 @@ async def test_encoding_subscribable_topics( attribute_value: Any, ) -> None: """Test handling of incoming encoded payload.""" - config: dict[str, Any] = copy.deepcopy( - DEFAULT_CONFIG[mqtt.DOMAIN][humidifier.DOMAIN] - ) + config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][humidifier.DOMAIN]) config["modes"] = ["eco", "auto"] config[CONF_MODE_COMMAND_TOPIC] = "humidifier/some_mode_command_topic" await help_test_encoding_subscribable_topics( @@ -1255,7 +1246,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, humidifier.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + humidifier.DOMAIN, + DEFAULT_CONFIG, ) @@ -1266,7 +1261,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, humidifier.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + humidifier.DOMAIN, + DEFAULT_CONFIG, ) @@ -1475,7 +1474,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = humidifier.DOMAIN - config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) + config = copy.deepcopy(DEFAULT_CONFIG) if topic == "mode_command_topic": config[mqtt.DOMAIN][domain]["modes"] = ["auto", "eco"] diff --git a/tests/components/mqtt/test_image.py b/tests/components/mqtt/test_image.py index 6f0eb8edf49..bb029fba231 100644 --- a/tests/components/mqtt/test_image.py +++ b/tests/components/mqtt/test_image.py @@ -573,7 +573,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, image.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + image.DOMAIN, + DEFAULT_CONFIG, ) @@ -584,7 +588,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, image.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + image.DOMAIN, + DEFAULT_CONFIG, ) diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 145016751e7..403f7974878 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -77,6 +77,11 @@ class _DebugInfo(TypedDict): config: _DebugDeviceInfo +@pytest.fixture(autouse=True) +def mock_storage(hass_storage: dict[str, Any]) -> None: + """Autouse hass_storage for the TestCase tests.""" + + async def test_command_template_value(hass: HomeAssistant) -> None: """Test the rendering of MQTT command template.""" @@ -84,12 +89,12 @@ async def test_command_template_value(hass: HomeAssistant) -> None: # test rendering value tpl = template.Template("{{ value + 1 }}", hass=hass) - cmd_tpl = mqtt.MqttCommandTemplate(tpl) + cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) assert cmd_tpl.async_render(4321) == "4322" # test variables at rendering tpl = template.Template("{{ some_var }}", hass=hass) - cmd_tpl = mqtt.MqttCommandTemplate(tpl) + cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) assert cmd_tpl.async_render(None, variables=variables) == "beer" @@ -156,8 +161,8 @@ async def test_command_template_variables( async def test_command_template_fails(hass: HomeAssistant) -> None: """Test the exception handling of an MQTT command template.""" - tpl = template.Template("{{ value * 2 }}", hass=hass) - cmd_tpl = mqtt.MqttCommandTemplate(tpl) + tpl = template.Template("{{ value * 2 }}") + cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) with pytest.raises(MqttCommandTemplateException) as exc: cmd_tpl.async_render(None) assert "unsupported operand type(s) for *: 'NoneType' and 'int'" in str(exc.value) @@ -169,13 +174,13 @@ async def test_value_template_value(hass: HomeAssistant) -> None: variables = {"id": 1234, "some_var": "beer"} # test rendering value - tpl = template.Template("{{ value_json.id }}", hass=hass) - val_tpl = mqtt.MqttValueTemplate(tpl) + tpl = template.Template("{{ value_json.id }}") + val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass) assert val_tpl.async_render_with_possible_json_value('{"id": 4321}') == "4321" # test variables at rendering - tpl = template.Template("{{ value_json.id }} {{ some_var }} {{ code }}", hass=hass) - val_tpl = mqtt.MqttValueTemplate(tpl, config_attributes={"code": 1234}) + tpl = template.Template("{{ value_json.id }} {{ some_var }} {{ code }}") + val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass, config_attributes={"code": 1234}) assert ( val_tpl.async_render_with_possible_json_value( '{"id": 4321}', variables=variables @@ -184,8 +189,8 @@ async def test_value_template_value(hass: HomeAssistant) -> None: ) # test with default value if an error occurs due to an invalid template - tpl = template.Template("{{ value_json.id | as_datetime }}", hass=hass) - val_tpl = mqtt.MqttValueTemplate(tpl) + tpl = template.Template("{{ value_json.id | as_datetime }}") + val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass) assert ( val_tpl.async_render_with_possible_json_value('{"otherid": 4321}', "my default") == "my default" @@ -195,19 +200,19 @@ async def test_value_template_value(hass: HomeAssistant) -> None: entity = Entity() entity.hass = hass entity.entity_id = "select.test" - tpl = template.Template("{{ value_json.id }}", hass=hass) + tpl = template.Template("{{ value_json.id }}") val_tpl = mqtt.MqttValueTemplate(tpl, entity=entity) assert val_tpl.async_render_with_possible_json_value('{"id": 4321}') == "4321" # test this object in a template - tpl2 = template.Template("{{ this.entity_id }}", hass=hass) + tpl2 = template.Template("{{ this.entity_id }}") val_tpl2 = mqtt.MqttValueTemplate(tpl2, entity=entity) assert val_tpl2.async_render_with_possible_json_value("bla") == "select.test" with patch( "homeassistant.helpers.template.TemplateStateFromEntityId", MagicMock() ) as template_state_calls: - tpl3 = template.Template("{{ this.entity_id }}", hass=hass) + tpl3 = template.Template("{{ this.entity_id }}") val_tpl3 = mqtt.MqttValueTemplate(tpl3, entity=entity) val_tpl3.async_render_with_possible_json_value("call1") val_tpl3.async_render_with_possible_json_value("call2") @@ -218,8 +223,8 @@ async def test_value_template_fails(hass: HomeAssistant) -> None: """Test the rendering of MQTT value template fails.""" entity = MockEntity(entity_id="sensor.test") entity.hass = hass - tpl = template.Template("{{ value_json.some_var * 2 }}", hass=hass) - val_tpl = mqtt.MqttValueTemplate(tpl, entity=entity) + tpl = template.Template("{{ value_json.some_var * 2 }}") + val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass, entity=entity) with pytest.raises(MqttValueTemplateException) as exc: val_tpl.async_render_with_possible_json_value('{"some_var": null }') assert str(exc.value) == ( @@ -230,7 +235,7 @@ async def test_value_template_fails(hass: HomeAssistant) -> None: ) with pytest.raises(MqttValueTemplateException) as exc: val_tpl.async_render_with_possible_json_value( - '{"some_var": null }', default="100" + '{"some_var": null }', default=100 ) assert str(exc.value) == ( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' " @@ -255,12 +260,10 @@ async def test_service_call_without_topic_does_not_publish( assert not mqtt_mock.async_publish.called -# The use of a topic_template in an mqtt publish action call -# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 -async def test_mqtt_publish_action_call_with_topic_and_topic_template_does_not_publish( +async def test_service_call_with_topic_and_topic_template_does_not_publish( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the mqtt publish action call with topic/topic template. + """Test the service call with topic/topic template. If both 'topic' and 'topic_template' are provided then fail. """ @@ -281,12 +284,10 @@ async def test_mqtt_publish_action_call_with_topic_and_topic_template_does_not_p assert not mqtt_mock.async_publish.called -# The use of a topic_template in an mqtt publish action call -# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 -async def test_mqtt_action_call_with_invalid_topic_template_does_not_publish( +async def test_service_call_with_invalid_topic_template_does_not_publish( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the mqtt publish action call with a problematic topic template.""" + """Test the service call with a problematic topic template.""" mqtt_mock = await mqtt_mock_entry() with pytest.raises(MqttCommandTemplateException) as exc: await hass.services.async_call( @@ -306,12 +307,10 @@ async def test_mqtt_action_call_with_invalid_topic_template_does_not_publish( assert not mqtt_mock.async_publish.called -# The use of a topic_template in an mqtt publish action call -# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 -async def test_mqtt_publish_action_call_with_template_topic_renders_template( +async def test_service_call_with_template_topic_renders_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the mqtt publish action call with rendered topic template. + """Test the service call with rendered topic template. If 'topic_template' is provided and 'topic' is not, then render it. """ @@ -332,7 +331,7 @@ async def test_mqtt_publish_action_call_with_template_topic_renders_template( async def test_service_call_with_template_topic_renders_invalid_topic( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the action call with rendered, invalid topic template. + """Test the service call with rendered, invalid topic template. If a wildcard topic is rendered, then fail. """ @@ -355,12 +354,10 @@ async def test_service_call_with_template_topic_renders_invalid_topic( assert not mqtt_mock.async_publish.called -# The use of a payload_template in an mqtt publish action call -# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 -async def test_action_call_with_invalid_rendered_payload_template_doesnt_render_template( +async def test_service_call_with_invalid_rendered_template_topic_doesnt_render_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the action call with unrendered payload template. + """Test the service call with unrendered template. If both 'payload' and 'payload_template' are provided then fail. """ @@ -381,12 +378,10 @@ async def test_action_call_with_invalid_rendered_payload_template_doesnt_render_ assert not mqtt_mock.async_publish.called -# The use of a payload_template in an mqtt publish action call -# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 -async def test_mqtt_publish_action_call_with_template_payload_renders_template( +async def test_service_call_with_template_payload_renders_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the mqtt publish action call with rendered template. + """Test the service call with rendered template. If 'payload_template' is provided and 'payload' is not, then render it. """ @@ -415,80 +410,10 @@ async def test_mqtt_publish_action_call_with_template_payload_renders_template( mqtt_mock.reset_mock() -@pytest.mark.parametrize( - ("attr_payload", "payload", "evaluate_payload", "literal_eval_calls"), - [ - ("b'\\xde\\xad\\xbe\\xef'", b"\xde\xad\xbe\xef", True, 1), - ("b'\\xde\\xad\\xbe\\xef'", "b'\\xde\\xad\\xbe\\xef'", False, 0), - ("DEADBEEF", "DEADBEEF", False, 0), - ( - "b'\\xde", - "b'\\xde", - True, - 1, - ), # Bytes literal is invalid, fall back to string - ], -) -async def test_mqtt_publish_action_call_with_raw_data( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - attr_payload: str, - payload: str | bytes, - evaluate_payload: bool, - literal_eval_calls: int, -) -> None: - """Test the mqtt publish action call raw data. - - When `payload` represents a `bytes` object, it should be published - as raw data if `evaluate_payload` is set. - """ - mqtt_mock = await mqtt_mock_entry() - await hass.services.async_call( - mqtt.DOMAIN, - mqtt.SERVICE_PUBLISH, - { - mqtt.ATTR_TOPIC: "test/topic", - mqtt.ATTR_PAYLOAD: attr_payload, - mqtt.ATTR_EVALUATE_PAYLOAD: evaluate_payload, - }, - blocking=True, - ) - assert mqtt_mock.async_publish.called - assert mqtt_mock.async_publish.call_args[0][1] == payload - - with patch( - "homeassistant.components.mqtt.models.literal_eval" - ) as literal_eval_mock: - await hass.services.async_call( - mqtt.DOMAIN, - mqtt.SERVICE_PUBLISH, - { - mqtt.ATTR_TOPIC: "test/topic", - mqtt.ATTR_PAYLOAD: attr_payload, - }, - blocking=True, - ) - literal_eval_mock.assert_not_called() - - await hass.services.async_call( - mqtt.DOMAIN, - mqtt.SERVICE_PUBLISH, - { - mqtt.ATTR_TOPIC: "test/topic", - mqtt.ATTR_PAYLOAD: attr_payload, - mqtt.ATTR_EVALUATE_PAYLOAD: evaluate_payload, - }, - blocking=True, - ) - assert len(literal_eval_mock.mock_calls) == literal_eval_calls - - -# The use of a payload_template in an mqtt publish action call -# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 -async def test_publish_action_call_with_bad_payload_template( +async def test_service_call_with_bad_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the mqtt publish action call with a bad template does not publish.""" + """Test the service call with a bad template does not publish.""" mqtt_mock = await mqtt_mock_entry() with pytest.raises(MqttCommandTemplateException) as exc: await hass.services.async_call( @@ -507,12 +432,10 @@ async def test_publish_action_call_with_bad_payload_template( ) -# The use of a payload_template in an mqtt publish action call -# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 -async def test_action_call_with_payload_doesnt_render_template( +async def test_service_call_with_payload_doesnt_render_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the mqtt publish action call with an unrendered template. + """Test the service call with unrendered template. If both 'payload' and 'payload_template' are provided then fail. """ @@ -835,7 +758,7 @@ async def test_receiving_message_with_non_utf8_topic_gets_logged( msg.payload = b"Payload" msg.qos = 2 msg.retain = True - msg.timestamp = time.monotonic() # type:ignore[assignment] + msg.timestamp = time.monotonic() mqtt_data: MqttData = hass.data["mqtt"] assert mqtt_data.client @@ -1197,6 +1120,7 @@ async def test_mqtt_ws_get_device_debug_info( } data_sensor = json.dumps(config_sensor) data_trigger = json.dumps(config_trigger) + config_sensor["platform"] = config_trigger["platform"] = mqtt.DOMAIN async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data_sensor) async_fire_mqtt_message( @@ -1253,6 +1177,7 @@ async def test_mqtt_ws_get_device_debug_info_binary( "unique_id": "unique", } data = json.dumps(config) + config["platform"] = mqtt.DOMAIN async_fire_mqtt_message(hass, "homeassistant/camera/bla/config", data) await hass.async_block_till_done() @@ -1487,7 +1412,7 @@ async def test_debug_info_non_mqtt( """Test we get empty debug_info for a device with non MQTT entities.""" await mqtt_mock_entry() domain = "sensor" - setup_test_component_platform(hass, domain, mock_sensor_entities.values()) + setup_test_component_platform(hass, domain, mock_sensor_entities) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) @@ -1701,12 +1626,10 @@ async def test_debug_info_qos_retain( } in messages -# The use of a payload_template in an mqtt publish action call -# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 async def test_publish_json_from_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the publishing of call to mqtt publish action.""" + """Test the publishing of call to services.""" mqtt_mock = await mqtt_mock_entry() test_str = "{'valid': 'python', 'invalid': 'json'}" @@ -1892,7 +1815,7 @@ async def test_disabling_and_enabling_entry( config_light = '{"name": "test_new", "command_topic": "test-topic_new"}' with patch( - "homeassistant.components.mqtt.entity.mqtt_config_entry_enabled", + "homeassistant.components.mqtt.mixins.mqtt_config_entry_enabled", return_value=False, ): # Discovery of mqtt tag @@ -2451,6 +2374,7 @@ async def test_multi_platform_discovery( "PayloadSentinel", "PublishPayloadType", "ReceiveMessage", + "ReceivePayloadType", "async_prepare_subscribe_topics", "async_publish", "async_subscribe", diff --git a/tests/components/mqtt/test_lawn_mower.py b/tests/components/mqtt/test_lawn_mower.py index 0bef4196ef2..120a09deb88 100644 --- a/tests/components/mqtt/test_lawn_mower.py +++ b/tests/components/mqtt/test_lawn_mower.py @@ -91,7 +91,8 @@ DEFAULT_CONFIG = { ], ) async def test_run_lawn_mower_setup_and_state_updates( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test that it sets up correctly fetches the given payload.""" await mqtt_mock_entry() @@ -103,13 +104,6 @@ async def test_run_lawn_mower_setup_and_state_updates( state = hass.states.get("lawn_mower.test_lawn_mower") assert state.state == "mowing" - async_fire_mqtt_message(hass, "test/lawn_mower_stat", "returning") - - await hass.async_block_till_done() - - state = hass.states.get("lawn_mower.test_lawn_mower") - assert state.state == "returning" - async_fire_mqtt_message(hass, "test/lawn_mower_stat", "docked") await hass.async_block_till_done() @@ -205,13 +199,6 @@ async def test_value_template( state = hass.states.get("lawn_mower.test_lawn_mower") assert state.state == "paused" - async_fire_mqtt_message(hass, "test/lawn_mower_stat", '{"val":"returning"}') - - await hass.async_block_till_done() - - state = hass.states.get("lawn_mower.test_lawn_mower") - assert state.state == "returning" - async_fire_mqtt_message(hass, "test/lawn_mower_stat", '{"val": null}') await hass.async_block_till_done() @@ -455,7 +442,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, lawn_mower.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + lawn_mower.DOMAIN, + DEFAULT_CONFIG, ) @@ -466,7 +457,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, lawn_mower.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + lawn_mower.DOMAIN, + DEFAULT_CONFIG, ) @@ -516,7 +511,8 @@ async def test_discovery_removal_lawn_mower( async def test_discovery_update_lawn_mower( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test update of discovered lawn_mower.""" config1 = { @@ -716,8 +712,7 @@ async def test_mqtt_payload_not_a_valid_activity_warning( assert ( "Invalid activity for lawn_mower.test_lawn_mower: 'painting' " - "(valid activities: ['error', 'paused', 'mowing', 'docked', 'returning'])" - in caplog.text + "(valid activities: ['error', 'paused', 'mowing', 'docked'])" in caplog.text ) @@ -776,7 +771,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = lawn_mower.DOMAIN @@ -789,7 +785,6 @@ async def test_reloadable( [ ("activity_state_topic", "paused", None, "paused"), ("activity_state_topic", "docked", None, "docked"), - ("activity_state_topic", "returning", None, "returning"), ("activity_state_topic", "mowing", None, "mowing"), ], ) @@ -802,9 +797,7 @@ async def test_encoding_subscribable_topics( attribute_value: Any, ) -> None: """Test handling of incoming encoded payload.""" - config: dict[str, Any] = copy.deepcopy( - DEFAULT_CONFIG[mqtt.DOMAIN][lawn_mower.DOMAIN] - ) + config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][lawn_mower.DOMAIN]) config["actions"] = ["milk", "beer"] await help_test_encoding_subscribable_topics( hass, @@ -833,7 +826,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = lawn_mower.DOMAIN diff --git a/tests/components/mqtt/test_legacy_vacuum.py b/tests/components/mqtt/test_legacy_vacuum.py new file mode 100644 index 00000000000..e4f5e3cd481 --- /dev/null +++ b/tests/components/mqtt/test_legacy_vacuum.py @@ -0,0 +1,80 @@ +"""The tests for the Legacy Mqtt vacuum platform.""" + +# The legacy schema for MQTT vacuum was deprecated with HA Core 2023.8.0 +# and was removed with HA Core 2024.2.0 +# cleanup is planned with HA Core 2025.2 + +import json + +import pytest + +from homeassistant.components import mqtt, vacuum +from homeassistant.core import HomeAssistant +from homeassistant.helpers.typing import DiscoveryInfoType + +from tests.common import async_fire_mqtt_message +from tests.typing import MqttMockHAClientGenerator + +DEFAULT_CONFIG = {mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}} + + +@pytest.mark.parametrize( + ("hass_config", "removed"), + [ + ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "legacy"}}}, True), + ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}}, False), + ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "state"}}}, False), + ], +) +async def test_removed_support_yaml( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, + removed: bool, +) -> None: + """Test that the removed support validation for the legacy schema works.""" + assert await mqtt_mock_entry() + entity = hass.states.get("vacuum.test") + + if removed: + assert entity is None + assert ( + "The support for the `legacy` MQTT " + "vacuum schema has been removed" in caplog.text + ) + else: + assert entity is not None + + +@pytest.mark.parametrize( + ("config", "removed"), + [ + ({"name": "test", "schema": "legacy"}, True), + ({"name": "test"}, False), + ({"name": "test", "schema": "state"}, False), + ], +) +async def test_removed_support_discovery( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, + config: DiscoveryInfoType, + removed: bool, +) -> None: + """Test that the removed support validation for the legacy schema works.""" + assert await mqtt_mock_entry() + + config_payload = json.dumps(config) + async_fire_mqtt_message(hass, "homeassistant/vacuum/test/config", config_payload) + await hass.async_block_till_done() + + entity = hass.states.get("vacuum.test") + + if removed: + assert entity is None + assert ( + "The support for the `legacy` MQTT " + "vacuum schema has been removed" in caplog.text + ) + else: + assert entity is not None diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index 0ef7cda2a7d..bfce49b9ecb 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -1053,7 +1053,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( - hass, "light.test", brightness=10, rgb_color=(80, 40, 20) + hass, "light.test", brightness=10, rgb_color=[80, 40, 20] ) mqtt_mock.async_publish.assert_has_calls( [ @@ -1073,7 +1073,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( - hass, "light.test", brightness=20, rgbw_color=(80, 40, 20, 10) + hass, "light.test", brightness=20, rgbw_color=[80, 40, 20, 10] ) mqtt_mock.async_publish.assert_has_calls( [ @@ -1093,7 +1093,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( - hass, "light.test", brightness=40, rgbww_color=(80, 40, 20, 10, 8) + hass, "light.test", brightness=40, rgbww_color=[80, 40, 20, 10, 8] ) mqtt_mock.async_publish.assert_has_calls( [ @@ -1112,7 +1112,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_COLOR_MODE) == "rgbww" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), @@ -1130,7 +1130,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - await common.async_turn_on(hass, "light.test", brightness=60, xy_color=(0.2, 0.3)) + await common.async_turn_on(hass, "light.test", brightness=60, xy_color=[0.2, 0.3]) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), @@ -1193,7 +1193,7 @@ async def test_sending_mqtt_rgb_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 64)) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 64]) mqtt_mock.async_publish.assert_has_calls( [ @@ -1236,7 +1236,7 @@ async def test_sending_mqtt_rgbw_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", rgbw_color=(255, 128, 64, 32)) + await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 64, 32]) mqtt_mock.async_publish.assert_has_calls( [ @@ -1279,7 +1279,7 @@ async def test_sending_mqtt_rgbww_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", rgbww_color=(255, 128, 64, 32, 16)) + await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 64, 32, 16]) mqtt_mock.async_publish.assert_has_calls( [ @@ -1469,7 +1469,7 @@ async def test_on_command_brightness( # Turn on w/ just a color to ensure brightness gets # added and sent. - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ @@ -1545,7 +1545,7 @@ async def test_on_command_brightness_scaled( # Turn on w/ just a color to ensure brightness gets # added and sent. - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ @@ -1626,7 +1626,7 @@ async def test_on_command_rgb( mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ @@ -1722,7 +1722,7 @@ async def test_on_command_rgbw( mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. - await common.async_turn_on(hass, "light.test", rgbw_color=(255, 128, 0, 16)) + await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 0, 16]) mqtt_mock.async_publish.assert_has_calls( [ @@ -1818,7 +1818,7 @@ async def test_on_command_rgbww( mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. - await common.async_turn_on(hass, "light.test", rgbww_color=(255, 128, 0, 16, 32)) + await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 0, 16, 32]) mqtt_mock.async_publish.assert_has_calls( [ @@ -2492,7 +2492,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + light.DOMAIN, + DEFAULT_CONFIG, ) @@ -2503,7 +2507,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + light.DOMAIN, + DEFAULT_CONFIG, ) @@ -2559,7 +2567,9 @@ async def test_discovery_removal_light( async def test_discovery_ignores_extra_keys( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test discovery ignores extra keys that are not blocked.""" await mqtt_mock_entry() @@ -3262,7 +3272,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = light.DOMAIN - config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) + config = copy.deepcopy(DEFAULT_CONFIG) if topic == "effect_command_topic": config[mqtt.DOMAIN][domain]["effect_list"] = ["random", "color_loop"] elif topic == "white_command_topic": @@ -3285,7 +3295,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = light.DOMAIN @@ -3333,7 +3344,7 @@ async def test_encoding_subscribable_topics( init_payload: tuple[str, str] | None, ) -> None: """Test handling of incoming encoded payload.""" - config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) + config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) config[CONF_EFFECT_COMMAND_TOPIC] = "light/CONF_EFFECT_COMMAND_TOPIC" config[CONF_RGB_COMMAND_TOPIC] = "light/CONF_RGB_COMMAND_TOPIC" config[CONF_BRIGHTNESS_COMMAND_TOPIC] = "light/CONF_BRIGHTNESS_COMMAND_TOPIC" @@ -3367,6 +3378,7 @@ async def test_encoding_subscribable_topics( async def test_encoding_subscribable_topics_brightness( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, topic: str, value: str, attribute: str, @@ -3578,7 +3590,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = light.DOMAIN diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 31573ad88c6..5ab2a32dc83 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -99,7 +99,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.json import json_dumps -from homeassistant.util.json import json_loads +from homeassistant.util.json import JsonValueType, json_loads from .test_common import ( help_custom_config, @@ -172,11 +172,11 @@ COLOR_MODES_CONFIG = { class JsonValidator: """Helper to compare JSON.""" - def __init__(self, jsondata: bytes | str) -> None: + def __init__(self, jsondata: JsonValueType) -> None: """Initialize JSON validator.""" self.jsondata = jsondata - def __eq__(self, other: bytes | str) -> bool: # type:ignore[override] + def __eq__(self, other: JsonValueType) -> bool: """Compare JSON data.""" return json_loads(self.jsondata) == json_loads(other) @@ -185,6 +185,7 @@ class JsonValidator: "hass_config", [{mqtt.DOMAIN: {light.DOMAIN: {"schema": "json", "name": "test"}}}] ) async def test_fail_setup_if_no_command_topic( + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -203,6 +204,7 @@ async def test_fail_setup_if_no_command_topic( ], ) async def test_fail_setup_if_color_mode_deprecated( + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -231,6 +233,7 @@ async def test_fail_setup_if_color_mode_deprecated( ids=["color_temp", "hs", "rgb", "xy", "color_temp, rgb"], ) async def test_warning_if_color_mode_flags_are_used( + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, color_modes: tuple[str, ...], @@ -313,6 +316,7 @@ async def test_warning_on_discovery_if_color_mode_flags_are_used( ids=["color_temp"], ) async def test_warning_if_color_mode_option_flag_is_used( + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -389,6 +393,7 @@ async def test_warning_on_discovery_if_color_mode_option_flag_is_used( ], ) async def test_fail_setup_if_color_modes_invalid( + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, error: str, @@ -416,7 +421,8 @@ async def test_fail_setup_if_color_modes_invalid( ], ) async def test_single_color_mode( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test setup with single color_mode.""" await mqtt_mock_entry() @@ -442,7 +448,8 @@ async def test_single_color_mode( @pytest.mark.parametrize("hass_config", [COLOR_MODES_CONFIG]) async def test_turn_on_with_unknown_color_mode_optimistic( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test setup and turn with unknown color_mode in optimistic mode.""" await mqtt_mock_entry() @@ -479,7 +486,8 @@ async def test_turn_on_with_unknown_color_mode_optimistic( ], ) async def test_controlling_state_with_unknown_color_mode( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test setup and turn with unknown color_mode in optimistic mode.""" await mqtt_mock_entry() @@ -1108,7 +1116,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.reset_mock() await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=(0.123, 0.123) + hass, "light.test", brightness=50, xy_color=[0.123, 0.123] ) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", @@ -1128,7 +1136,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes["rgb_color"] == (0, 123, 255) assert state.attributes["xy_color"] == (0.14, 0.131) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator( @@ -1148,7 +1156,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes["rgb_color"] == (255, 56, 59) assert state.attributes["xy_color"] == (0.654, 0.301) - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator( @@ -1265,7 +1273,7 @@ async def test_sending_mqtt_commands_and_optimistic2( assert state.state == STATE_OFF # Set hs color - await common.async_turn_on(hass, "light.test", brightness=75, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", brightness=75, hs_color=[359, 78]) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1286,7 +1294,7 @@ async def test_sending_mqtt_commands_and_optimistic2( mqtt_mock.async_publish.reset_mock() # Set rgb color - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1305,7 +1313,7 @@ async def test_sending_mqtt_commands_and_optimistic2( mqtt_mock.async_publish.reset_mock() # Set rgbw color - await common.async_turn_on(hass, "light.test", rgbw_color=(255, 128, 0, 123)) + await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 0, 123]) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1326,7 +1334,7 @@ async def test_sending_mqtt_commands_and_optimistic2( mqtt_mock.async_publish.reset_mock() # Set rgbww color - await common.async_turn_on(hass, "light.test", rgbww_color=(255, 128, 0, 45, 32)) + await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 0, 45, 32]) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1348,7 +1356,7 @@ async def test_sending_mqtt_commands_and_optimistic2( # Set xy color await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=(0.123, 0.223) + hass, "light.test", brightness=50, xy_color=[0.123, 0.223] ) state = hass.states.get("light.test") assert state.state == STATE_ON @@ -1435,10 +1443,10 @@ async def test_sending_hs_color( mqtt_mock.reset_mock() await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=(0.123, 0.123) + hass, "light.test", brightness=50, xy_color=[0.123, 0.123] ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ @@ -1497,11 +1505,11 @@ async def test_sending_rgb_color_no_brightness( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=(0.123, 0.123) + hass, "light.test", brightness=50, xy_color=[0.123, 0.123] ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) await common.async_turn_on( - hass, "light.test", rgb_color=(255, 128, 0), brightness=255 + hass, "light.test", rgb_color=[255, 128, 0], brightness=255 ) mqtt_mock.async_publish.assert_has_calls( @@ -1555,17 +1563,17 @@ async def test_sending_rgb_color_no_brightness2( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=(0.123, 0.123) + hass, "light.test", brightness=50, xy_color=[0.123, 0.123] ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) await common.async_turn_on( - hass, "light.test", rgb_color=(255, 128, 0), brightness=255 + hass, "light.test", rgb_color=[255, 128, 0], brightness=255 ) await common.async_turn_on( - hass, "light.test", rgbw_color=(128, 64, 32, 16), brightness=128 + hass, "light.test", rgbw_color=[128, 64, 32, 16], brightness=128 ) await common.async_turn_on( - hass, "light.test", rgbww_color=(128, 64, 32, 16, 8), brightness=64 + hass, "light.test", rgbww_color=[128, 64, 32, 16, 8], brightness=64 ) mqtt_mock.async_publish.assert_has_calls( @@ -1635,11 +1643,11 @@ async def test_sending_rgb_color_with_brightness( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=(0.123, 0.123) + hass, "light.test", brightness=50, xy_color=[0.123, 0.123] ) - await common.async_turn_on(hass, "light.test", brightness=255, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", brightness=255, hs_color=[359, 78]) await common.async_turn_on(hass, "light.test", brightness=1) - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ @@ -1705,11 +1713,11 @@ async def test_sending_rgb_color_with_scaled_brightness( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=(0.123, 0.123) + hass, "light.test", brightness=50, xy_color=[0.123, 0.123] ) - await common.async_turn_on(hass, "light.test", brightness=255, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", brightness=255, hs_color=[359, 78]) await common.async_turn_on(hass, "light.test", brightness=1) - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ @@ -1820,10 +1828,10 @@ async def test_sending_xy_color( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=(0.123, 0.123) + hass, "light.test", brightness=50, xy_color=[0.123, 0.123] ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_has_calls( [ @@ -2366,7 +2374,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + light.DOMAIN, + DEFAULT_CONFIG, ) @@ -2377,7 +2389,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + light.DOMAIN, + DEFAULT_CONFIG, ) @@ -2629,7 +2645,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = light.DOMAIN - config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) + config = copy.deepcopy(DEFAULT_CONFIG) if topic == "effect_command_topic": config[mqtt.DOMAIN][domain]["effect_list"] = ["random", "color_loop"] @@ -2650,7 +2666,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = light.DOMAIN @@ -2680,7 +2697,7 @@ async def test_encoding_subscribable_topics( init_payload: tuple[str, str] | None, ) -> None: """Test handling of incoming encoded payload.""" - config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) + config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) config["color_mode"] = True config["supported_color_modes"] = [ "color_temp", diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index 63e110ba7c0..aace09f402a 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -482,7 +482,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON # Full brightness - no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,255-128-0,30.118-100.0", 2, False ) @@ -492,7 +492,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("rgb_color") == (255, 128, 0) # Full brightness - normalization of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=(128, 64, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[128, 64, 0]) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,255-127-0,30.0-100.0", 2, False ) @@ -511,7 +511,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON # Half brightness - scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=(0, 255, 128)) + await common.async_turn_on(hass, "light.test", rgb_color=[0, 255, 128]) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-128-64,150.118-100.0", 2, False ) @@ -521,7 +521,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("rgb_color") == (0, 255, 128) # Half brightness - normalization+scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=(0, 32, 16)) + await common.async_turn_on(hass, "light.test", rgb_color=[0, 32, 16]) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-128-64,150.0-100.0", 2, False ) @@ -614,7 +614,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( assert not state.attributes.get("brightness") # Full brightness - no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,255-128-0,30.118-100.0", 0, False ) @@ -624,7 +624,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( assert not state.attributes.get("rgb_color") # Full brightness - normalization of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=(128, 64, 0)) + await common.async_turn_on(hass, "light.test", rgb_color=[128, 64, 0]) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,255-127-0,30.0-100.0", 0, False ) @@ -638,7 +638,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( mqtt_mock.async_publish.reset_mock() # Half brightness - no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=(0, 255, 128)) + await common.async_turn_on(hass, "light.test", rgb_color=[0, 255, 128]) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-255-128,150.118-100.0", 0, False ) @@ -646,7 +646,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( state = hass.states.get("light.test") # Half brightness - normalization but no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=(0, 32, 16)) + await common.async_turn_on(hass, "light.test", rgb_color=[0, 32, 16]) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-255-127,150.0-100.0", 0, False ) @@ -978,7 +978,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + light.DOMAIN, + DEFAULT_CONFIG, ) @@ -989,7 +993,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + light.DOMAIN, + DEFAULT_CONFIG, ) @@ -1259,7 +1267,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = light.DOMAIN - config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) + config = copy.deepcopy(DEFAULT_CONFIG) if topic == "effect_command_topic": config[mqtt.DOMAIN][domain]["effect_list"] = ["random", "color_loop"] @@ -1280,7 +1288,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = light.DOMAIN @@ -1334,7 +1343,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = light.DOMAIN diff --git a/tests/components/mqtt/test_lock.py b/tests/components/mqtt/test_lock.py index 034f9b5ff6e..c9546bdfdb3 100644 --- a/tests/components/mqtt/test_lock.py +++ b/tests/components/mqtt/test_lock.py @@ -10,8 +10,14 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, + STATE_JAMMED, + STATE_LOCKED, + STATE_LOCKING, + STATE_OPEN, + STATE_OPENING, + STATE_UNLOCKED, + STATE_UNLOCKING, LockEntityFeature, - LockState, ) from homeassistant.components.mqtt.lock import MQTT_LOCK_ATTRIBUTES_BLOCKED from homeassistant.const import ( @@ -83,12 +89,12 @@ CONFIG_WITH_STATES = { @pytest.mark.parametrize( ("hass_config", "payload", "lock_state"), [ - (CONFIG_WITH_STATES, "closed", LockState.LOCKED), - (CONFIG_WITH_STATES, "closing", LockState.LOCKING), - (CONFIG_WITH_STATES, "open", LockState.OPEN), - (CONFIG_WITH_STATES, "opening", LockState.OPENING), - (CONFIG_WITH_STATES, "unlocked", LockState.UNLOCKED), - (CONFIG_WITH_STATES, "unlocking", LockState.UNLOCKING), + (CONFIG_WITH_STATES, "closed", STATE_LOCKED), + (CONFIG_WITH_STATES, "closing", STATE_LOCKING), + (CONFIG_WITH_STATES, "open", STATE_OPEN), + (CONFIG_WITH_STATES, "opening", STATE_OPENING), + (CONFIG_WITH_STATES, "unlocked", STATE_UNLOCKED), + (CONFIG_WITH_STATES, "unlocking", STATE_UNLOCKING), ], ) async def test_controlling_state_via_topic( @@ -109,18 +115,18 @@ async def test_controlling_state_via_topic( await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state == lock_state + assert state.state is lock_state @pytest.mark.parametrize( ("hass_config", "payload", "lock_state"), [ - (CONFIG_WITH_STATES, "closed", LockState.LOCKED), - (CONFIG_WITH_STATES, "closing", LockState.LOCKING), - (CONFIG_WITH_STATES, "open", LockState.OPEN), - (CONFIG_WITH_STATES, "opening", LockState.OPENING), - (CONFIG_WITH_STATES, "unlocked", LockState.UNLOCKED), - (CONFIG_WITH_STATES, "unlocking", LockState.UNLOCKING), + (CONFIG_WITH_STATES, "closed", STATE_LOCKED), + (CONFIG_WITH_STATES, "closing", STATE_LOCKING), + (CONFIG_WITH_STATES, "open", STATE_OPEN), + (CONFIG_WITH_STATES, "opening", STATE_OPENING), + (CONFIG_WITH_STATES, "unlocked", STATE_UNLOCKED), + (CONFIG_WITH_STATES, "unlocking", STATE_UNLOCKING), (CONFIG_WITH_STATES, "None", STATE_UNKNOWN), ], ) @@ -140,13 +146,13 @@ async def test_controlling_non_default_state_via_topic( async_fire_mqtt_message(hass, "state-topic", payload) state = hass.states.get("lock.test") - assert state.state == lock_state + assert state.state is lock_state # Empty state is ignored async_fire_mqtt_message(hass, "state-topic", "") state = hass.states.get("lock.test") - assert state.state == lock_state + assert state.state is lock_state @pytest.mark.parametrize( @@ -159,7 +165,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"closed"}', - LockState.LOCKED, + STATE_LOCKED, ), ( help_custom_config( @@ -168,7 +174,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"closing"}', - LockState.LOCKING, + STATE_LOCKING, ), ( help_custom_config( @@ -177,7 +183,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"unlocking"}', - LockState.UNLOCKING, + STATE_UNLOCKING, ), ( help_custom_config( @@ -186,7 +192,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"open"}', - LockState.OPEN, + STATE_OPEN, ), ( help_custom_config( @@ -195,7 +201,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"opening"}', - LockState.OPENING, + STATE_OPENING, ), ( help_custom_config( @@ -204,7 +210,7 @@ async def test_controlling_non_default_state_via_topic( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"unlocked"}', - LockState.UNLOCKED, + STATE_UNLOCKED, ), ( help_custom_config( @@ -232,7 +238,7 @@ async def test_controlling_state_via_topic_and_json_message( async_fire_mqtt_message(hass, "state-topic", payload) state = hass.states.get("lock.test") - assert state.state == lock_state + assert state.state is lock_state @pytest.mark.parametrize( @@ -245,7 +251,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"closed"}', - LockState.LOCKED, + STATE_LOCKED, ), ( help_custom_config( @@ -254,7 +260,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"closing"}', - LockState.LOCKING, + STATE_LOCKING, ), ( help_custom_config( @@ -263,7 +269,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"open"}', - LockState.OPEN, + STATE_OPEN, ), ( help_custom_config( @@ -272,7 +278,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"opening"}', - LockState.OPENING, + STATE_OPENING, ), ( help_custom_config( @@ -281,7 +287,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"unlocked"}', - LockState.UNLOCKED, + STATE_UNLOCKED, ), ( help_custom_config( @@ -290,7 +296,7 @@ async def test_controlling_state_via_topic_and_json_message( ({"value_template": "{{ value_json.val }}"},), ), '{"val":"unlocking"}', - LockState.UNLOCKING, + STATE_UNLOCKING, ), ], ) @@ -309,7 +315,7 @@ async def test_controlling_non_default_state_via_topic_and_json_message( async_fire_mqtt_message(hass, "state-topic", payload) state = hass.states.get("lock.test") - assert state.state == lock_state + assert state.state is lock_state @pytest.mark.parametrize( @@ -336,7 +342,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock = await mqtt_mock_entry() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -346,7 +352,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.LOCKED + assert state.state is STATE_LOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -356,7 +362,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -387,7 +393,7 @@ async def test_sending_mqtt_commands_with_template( mqtt_mock = await mqtt_mock_entry() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -402,7 +408,7 @@ async def test_sending_mqtt_commands_with_template( ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.LOCKED + assert state.state is STATE_LOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -417,7 +423,7 @@ async def test_sending_mqtt_commands_with_template( ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -447,7 +453,7 @@ async def test_sending_mqtt_commands_and_explicit_optimistic( mqtt_mock = await mqtt_mock_entry() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -457,7 +463,7 @@ async def test_sending_mqtt_commands_and_explicit_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.LOCKED + assert state.state is STATE_LOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -467,7 +473,7 @@ async def test_sending_mqtt_commands_and_explicit_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -496,7 +502,7 @@ async def test_sending_mqtt_commands_support_open_and_optimistic( mqtt_mock = await mqtt_mock_entry() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == LockEntityFeature.OPEN @@ -507,7 +513,7 @@ async def test_sending_mqtt_commands_support_open_and_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.LOCKED + assert state.state is STATE_LOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -517,7 +523,7 @@ async def test_sending_mqtt_commands_support_open_and_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -527,7 +533,7 @@ async def test_sending_mqtt_commands_support_open_and_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.OPEN + assert state.state is STATE_OPEN assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -558,7 +564,7 @@ async def test_sending_mqtt_commands_support_open_and_explicit_optimistic( mqtt_mock = await mqtt_mock_entry() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == LockEntityFeature.OPEN @@ -569,7 +575,7 @@ async def test_sending_mqtt_commands_support_open_and_explicit_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.LOCKED + assert state.state is STATE_LOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -579,7 +585,7 @@ async def test_sending_mqtt_commands_support_open_and_explicit_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED assert state.attributes.get(ATTR_ASSUMED_STATE) await hass.services.async_call( @@ -589,7 +595,7 @@ async def test_sending_mqtt_commands_support_open_and_explicit_optimistic( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("lock.test") - assert state.state == LockState.OPEN + assert state.state is STATE_OPEN assert state.attributes.get(ATTR_ASSUMED_STATE) @@ -638,7 +644,7 @@ async def test_sending_mqtt_commands_pessimistic( await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state == LockState.LOCKED + assert state.state is STATE_LOCKED await hass.services.async_call( lock.DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: "lock.test"}, blocking=True @@ -652,7 +658,7 @@ async def test_sending_mqtt_commands_pessimistic( await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED await hass.services.async_call( lock.DOMAIN, SERVICE_OPEN, {ATTR_ENTITY_ID: "lock.test"}, blocking=True @@ -666,7 +672,7 @@ async def test_sending_mqtt_commands_pessimistic( await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state == LockState.UNLOCKED + assert state.state is STATE_UNLOCKED # send lock command to lock await hass.services.async_call( @@ -682,21 +688,21 @@ async def test_sending_mqtt_commands_pessimistic( await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state == LockState.LOCKING + assert state.state is STATE_LOCKING # receive jammed state from lock async_fire_mqtt_message(hass, "state-topic", "JAMMED") await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state == LockState.JAMMED + assert state.state is STATE_JAMMED # receive solved state from lock async_fire_mqtt_message(hass, "state-topic", "LOCKED") await hass.async_block_till_done() state = hass.states.get("lock.test") - assert state.state == LockState.LOCKED + assert state.state is STATE_LOCKED @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) @@ -751,7 +757,11 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, lock.DOMAIN, DEFAULT_CONFIG, MQTT_LOCK_ATTRIBUTES_BLOCKED + hass, + mqtt_mock_entry, + lock.DOMAIN, + DEFAULT_CONFIG, + MQTT_LOCK_ATTRIBUTES_BLOCKED, ) @@ -771,7 +781,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, lock.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + lock.DOMAIN, + DEFAULT_CONFIG, ) @@ -782,7 +796,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, lock.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + lock.DOMAIN, + DEFAULT_CONFIG, ) @@ -990,7 +1008,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = lock.DOMAIN @@ -1040,7 +1059,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = lock.DOMAIN diff --git a/tests/components/mqtt/test_mixins.py b/tests/components/mqtt/test_mixins.py index 5b7984cad62..ae4d232ba54 100644 --- a/tests/components/mqtt/test_mixins.py +++ b/tests/components/mqtt/test_mixins.py @@ -15,7 +15,7 @@ from homeassistant.core import CoreState, HomeAssistant, callback from homeassistant.helpers import device_registry as dr, issue_registry as ir from tests.common import MockConfigEntry, async_capture_events, async_fire_mqtt_message -from tests.typing import MqttMockHAClientGenerator +from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient @pytest.mark.parametrize( @@ -37,7 +37,8 @@ from tests.typing import MqttMockHAClientGenerator ], ) async def test_availability_with_shared_state_topic( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test the state is not changed twice. @@ -294,10 +295,11 @@ async def test_availability_with_shared_state_topic( ], ) @patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@pytest.mark.usefixtures("mqtt_client_mock") async def test_default_entity_and_device_name( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + mqtt_client_mock: MqttMockPahoClient, + mqtt_config_entry_data, caplog: pytest.LogCaptureFixture, entity_id: str, friendly_name: str, @@ -339,7 +341,8 @@ async def test_default_entity_and_device_name( async def test_name_attribute_is_set_or_not( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test frendly name with device_class set. diff --git a/tests/components/mqtt/test_notify.py b/tests/components/mqtt/test_notify.py index 4837ee214c4..540dbbafd99 100644 --- a/tests/components/mqtt/test_notify.py +++ b/tests/components/mqtt/test_notify.py @@ -199,7 +199,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, notify.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + notify.DOMAIN, + DEFAULT_CONFIG, ) @@ -210,7 +214,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, notify.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + notify.DOMAIN, + DEFAULT_CONFIG, ) @@ -427,7 +435,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = notify.DOMAIN diff --git a/tests/components/mqtt/test_number.py b/tests/components/mqtt/test_number.py index 48aaa11f672..2cd5c5390f5 100644 --- a/tests/components/mqtt/test_number.py +++ b/tests/components/mqtt/test_number.py @@ -47,7 +47,6 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, - help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -558,7 +557,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, number.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + number.DOMAIN, + DEFAULT_CONFIG, ) @@ -569,7 +572,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, number.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + number.DOMAIN, + DEFAULT_CONFIG, ) @@ -784,6 +791,7 @@ async def test_min_max_step_attributes( ], ) async def test_invalid_min_max_attributes( + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -863,7 +871,7 @@ async def test_default_mode( async def test_mode( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - mode: str, + mode, ) -> None: """Test mode.""" await mqtt_mock_entry() @@ -1022,7 +1030,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = number.DOMAIN @@ -1073,7 +1082,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = number.DOMAIN @@ -1101,18 +1111,6 @@ async def test_entity_name( ) -async def test_entity_icon_and_entity_picture( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test the entity icon or picture setup.""" - domain = number.DOMAIN - config = DEFAULT_CONFIG - await help_test_entity_icon_and_entity_picture( - hass, mqtt_mock_entry, domain, config - ) - - @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_scene.py b/tests/components/mqtt/test_scene.py index d78dbe5c003..9badd6aeee0 100644 --- a/tests/components/mqtt/test_scene.py +++ b/tests/components/mqtt/test_scene.py @@ -263,7 +263,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, scene.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + scene.DOMAIN, + DEFAULT_CONFIG, ) @@ -274,7 +278,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, scene.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + scene.DOMAIN, + DEFAULT_CONFIG, ) @@ -382,7 +390,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = scene.DOMAIN @@ -405,7 +414,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = scene.DOMAIN diff --git a/tests/components/mqtt/test_select.py b/tests/components/mqtt/test_select.py index 8d79a3ce609..26a64d70fee 100644 --- a/tests/components/mqtt/test_select.py +++ b/tests/components/mqtt/test_select.py @@ -67,7 +67,9 @@ DEFAULT_CONFIG = { } -def _test_run_select_setup_params(topic: str) -> Generator[tuple[ConfigType, str]]: +def _test_run_select_setup_params( + topic: str, +) -> Generator[tuple[ConfigType, str], None]: yield ( { mqtt.DOMAIN: { @@ -405,7 +407,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, select.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + select.DOMAIN, + DEFAULT_CONFIG, ) @@ -591,7 +597,7 @@ async def test_entity_debug_info_message( def _test_options_attributes_options_config( request: tuple[list[str]], -) -> Generator[tuple[ConfigType, list[str]]]: +) -> Generator[tuple[ConfigType, list[str]], None]: for option in request: yield ( { @@ -610,10 +616,12 @@ def _test_options_attributes_options_config( @pytest.mark.parametrize( ("hass_config", "options"), - _test_options_attributes_options_config((["milk", "beer"], ["milk"], [])), # type:ignore[arg-type] + _test_options_attributes_options_config((["milk", "beer"], ["milk"], [])), ) async def test_options_attributes( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, options: list[str] + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + options: list[str], ) -> None: """Test options attribute.""" await mqtt_mock_entry() @@ -697,7 +705,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = select.DOMAIN @@ -750,7 +759,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = select.DOMAIN diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index 7f418864872..94eb049dda7 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -3,7 +3,6 @@ import copy from datetime import datetime, timedelta import json -import logging from pathlib import Path from typing import Any from unittest.mock import MagicMock, patch @@ -53,7 +52,6 @@ from .test_common import ( help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_disabled_by_default, - help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -112,48 +110,6 @@ async def test_setting_sensor_value_via_mqtt_message( assert state.attributes.get("unit_of_measurement") == "fav unit" -@pytest.mark.parametrize( - "hass_config", - [ - { - mqtt.DOMAIN: { - sensor.DOMAIN: { - "name": "test", - "state_topic": "test-topic", - "device_class": "enum", - "options": ["red", "green", "blue"], - } - } - }, - ], -) -async def test_setting_enum_sensor_value_via_mqtt_message( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the setting of the value via MQTT of an enum type sensor.""" - await mqtt_mock_entry() - - async_fire_mqtt_message(hass, "test-topic", "red") - state = hass.states.get("sensor.test") - assert state.state == "red" - - async_fire_mqtt_message(hass, "test-topic", "green") - state = hass.states.get("sensor.test") - assert state.state == "green" - - with caplog.at_level(logging.WARNING): - async_fire_mqtt_message(hass, "test-topic", "yellow") - assert ( - "Ignoring invalid option received on topic 'test-topic', " - "got 'yellow', allowed: red, green, blue" in caplog.text - ) - # Assert the state update was filtered out and ignored - state = hass.states.get("sensor.test") - assert state.state == "green" - - @pytest.mark.parametrize( "hass_config", [ @@ -300,17 +256,6 @@ async def test_setting_sensor_to_long_state_via_mqtt_message( STATE_UNKNOWN, True, ), - ( - help_custom_config( - sensor.DOMAIN, - DEFAULT_CONFIG, - ({"device_class": sensor.SensorDeviceClass.TIMESTAMP},), - ), - sensor.SensorDeviceClass.TIMESTAMP, - "None", - STATE_UNKNOWN, - False, - ), ( help_custom_config( sensor.DOMAIN, @@ -672,7 +617,9 @@ async def test_setting_sensor_last_reset_via_mqtt_json_message( ], ) async def test_setting_sensor_last_reset_via_mqtt_json_message_2( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test the setting of the value via MQTT with JSON payload.""" await hass.async_block_till_done() @@ -714,7 +661,7 @@ async def test_force_update_disabled( def test_callback(event: Event) -> None: events.append(event) - hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) # type:ignore[arg-type] + hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) async_fire_mqtt_message(hass, "test-topic", "100") await hass.async_block_till_done() @@ -752,7 +699,7 @@ async def test_force_update_enabled( def test_callback(event: Event) -> None: events.append(event) - hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) # type:ignore[arg-type] + hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) async_fire_mqtt_message(hass, "test-topic", "100") await hass.async_block_till_done() @@ -863,7 +810,9 @@ async def test_discovery_update_availability( ], ) async def test_invalid_device_class( - mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test device_class option with invalid value.""" assert await mqtt_mock_entry() @@ -922,68 +871,15 @@ async def test_valid_device_class_and_uom( ], ) async def test_invalid_state_class( - mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test state_class option with invalid value.""" assert await mqtt_mock_entry() assert "expected SensorStateClass or one of" in caplog.text -@pytest.mark.parametrize( - ("hass_config", "error_logged"), - [ - ( - { - mqtt.DOMAIN: { - sensor.DOMAIN: { - "name": "test", - "state_topic": "test-topic", - "state_class": "measurement", - "options": ["red", "green", "blue"], - } - } - }, - "Specifying `options` is not allowed together with the `state_class` " - "or `unit_of_measurement` option", - ), - ( - { - mqtt.DOMAIN: { - sensor.DOMAIN: { - "name": "test", - "state_topic": "test-topic", - "device_class": "gas", - "options": ["red", "green", "blue"], - } - } - }, - "The option `options` must be used together with " - "device class `enum`, got `device_class` 'gas'", - ), - ( - { - mqtt.DOMAIN: { - sensor.DOMAIN: { - "name": "test", - "state_topic": "test-topic", - "options": [], - } - } - }, - "An empty options list is not allowed", - ), - ], -) -async def test_invalid_options_config( - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, - error_logged: str, -) -> None: - """Test state_class, deviceclass with sensor options.""" - assert await mqtt_mock_entry() - assert error_logged in caplog.text - - @pytest.mark.parametrize( "hass_config", [ @@ -1001,13 +897,6 @@ async def test_invalid_options_config( "state_topic": "test-topic", "state_class": None, }, - { - "name": "Test 4", - "state_topic": "test-topic", - "state_class": None, - "device_class": "enum", - "options": ["red", "green", "blue"], - }, ] } } @@ -1065,7 +954,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, sensor.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + sensor.DOMAIN, + DEFAULT_CONFIG, ) @@ -1076,7 +969,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, sensor.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + sensor.DOMAIN, + DEFAULT_CONFIG, ) @@ -1401,7 +1298,8 @@ async def test_value_template_with_entity_id( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = sensor.DOMAIN @@ -1556,7 +1454,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = sensor.DOMAIN @@ -1584,18 +1483,6 @@ async def test_entity_name( ) -async def test_entity_icon_and_entity_picture( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test the entity name setup.""" - domain = sensor.DOMAIN - config = DEFAULT_CONFIG - await help_test_entity_icon_and_entity_picture( - hass, mqtt_mock_entry, domain, config - ) - - @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_siren.py b/tests/components/mqtt/test_siren.py index 58a5cb735f9..c32c57d4f02 100644 --- a/tests/components/mqtt/test_siren.py +++ b/tests/components/mqtt/test_siren.py @@ -60,7 +60,9 @@ DEFAULT_CONFIG = { async def async_turn_on( - hass: HomeAssistant, entity_id: str, parameters: dict[str, Any] + hass: HomeAssistant, + entity_id: str, + parameters: dict[str, Any], ) -> None: """Turn all or specified siren on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} @@ -178,7 +180,9 @@ async def test_sending_mqtt_commands_and_optimistic( ], ) async def test_controlling_state_via_topic_and_json_message( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test the controlling state via topic and JSON message.""" await mqtt_mock_entry() @@ -594,7 +598,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, siren.DOMAIN, DEFAULT_CONFIG, None + hass, mqtt_mock_entry, siren.DOMAIN, DEFAULT_CONFIG, {} ) @@ -614,7 +618,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, siren.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + siren.DOMAIN, + DEFAULT_CONFIG, ) @@ -625,7 +633,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, siren.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + siren.DOMAIN, + DEFAULT_CONFIG, ) @@ -775,7 +787,8 @@ async def test_discovery_update_siren_template( ], ) async def test_command_templates( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test siren with command templates optimistic.""" mqtt_mock = await mqtt_mock_entry() @@ -974,7 +987,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with command templates and different encoding.""" domain = siren.DOMAIN - config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) + config = copy.deepcopy(DEFAULT_CONFIG) config[mqtt.DOMAIN][domain][siren.ATTR_AVAILABLE_TONES] = ["siren", "xylophone"] await help_test_publishing_with_custom_encoding( @@ -992,7 +1005,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = siren.DOMAIN @@ -1002,7 +1016,9 @@ async def test_reloadable( @pytest.mark.parametrize( ("topic", "value", "attribute", "attribute_value"), - [("state_topic", "ON", None, "on")], + [ + ("state_topic", "ON", None, "on"), + ], ) async def test_encoding_subscribable_topics( hass: HomeAssistant, @@ -1040,7 +1056,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = siren.DOMAIN diff --git a/tests/components/mqtt/test_subscription.py b/tests/components/mqtt/test_subscription.py index 86279b2006c..7247458a667 100644 --- a/tests/components/mqtt/test_subscription.py +++ b/tests/components/mqtt/test_subscription.py @@ -2,6 +2,8 @@ from unittest.mock import ANY +import pytest + from homeassistant.components.mqtt.subscription import ( async_prepare_subscribe_topics, async_subscribe_topics, @@ -14,7 +16,9 @@ from tests.typing import MqttMockHAClientGenerator async def test_subscribe_topics( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test subscription to topics.""" await mqtt_mock_entry() @@ -65,7 +69,9 @@ async def test_subscribe_topics( async def test_modify_topics( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test modification of topics.""" await mqtt_mock_entry() @@ -130,7 +136,9 @@ async def test_modify_topics( async def test_qos_encoding_default( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test default qos and encoding.""" mqtt_mock = await mqtt_mock_entry() @@ -150,7 +158,9 @@ async def test_qos_encoding_default( async def test_qos_encoding_custom( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test custom qos and encoding.""" mqtt_mock = await mqtt_mock_entry() @@ -177,7 +187,9 @@ async def test_qos_encoding_custom( async def test_no_change( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test subscription to topics without change.""" mqtt_mock = await mqtt_mock_entry() diff --git a/tests/components/mqtt/test_switch.py b/tests/components/mqtt/test_switch.py index dceeff07377..42d2e092d83 100644 --- a/tests/components/mqtt/test_switch.py +++ b/tests/components/mqtt/test_switch.py @@ -191,50 +191,6 @@ async def test_sending_inital_state_and_optimistic( assert state.attributes.get(ATTR_ASSUMED_STATE) -@pytest.mark.parametrize( - "hass_config", - [ - { - mqtt.DOMAIN: { - switch.DOMAIN: { - "name": "test", - "command_topic": "command-topic", - "command_template": '{"state": "{{ value }}"}', - "payload_on": "beer on", - "payload_off": "beer off", - "qos": "2", - } - } - } - ], -) -async def test_sending_mqtt_commands_with_command_template( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator -) -> None: - """Test the sending MQTT commands using a command template.""" - fake_state = State("switch.test", "on") - mock_restore_cache(hass, (fake_state,)) - - mqtt_mock = await mqtt_mock_entry() - - state = hass.states.get("switch.test") - assert state.state == STATE_ON - assert state.attributes.get(ATTR_ASSUMED_STATE) - - await common.async_turn_on(hass, "switch.test") - - mqtt_mock.async_publish.assert_called_once_with( - "command-topic", '{"state": "beer on"}', 2, False - ) - mqtt_mock.async_publish.reset_mock() - - await common.async_turn_off(hass, "switch.test") - - mqtt_mock.async_publish.assert_called_once_with( - "command-topic", '{"state": "beer off"}', 2, False - ) - - @pytest.mark.parametrize( "hass_config", [ @@ -403,7 +359,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, switch.DOMAIN, DEFAULT_CONFIG, None + hass, mqtt_mock_entry, switch.DOMAIN, DEFAULT_CONFIG, {} ) @@ -423,7 +379,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, switch.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + switch.DOMAIN, + DEFAULT_CONFIG, ) @@ -434,7 +394,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, switch.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + switch.DOMAIN, + DEFAULT_CONFIG, ) @@ -560,7 +524,8 @@ async def test_discovery_update_switch_template( async def test_discovery_update_unchanged_switch( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test update of discovered switch.""" data1 = ( @@ -710,7 +675,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = switch.DOMAIN @@ -760,7 +726,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = switch.DOMAIN diff --git a/tests/components/mqtt/test_tag.py b/tests/components/mqtt/test_tag.py index 41c417fe3e9..0d0765258f2 100644 --- a/tests/components/mqtt/test_tag.py +++ b/tests/components/mqtt/test_tag.py @@ -2,10 +2,10 @@ import copy import json -from typing import Any -from unittest.mock import ANY, AsyncMock +from unittest.mock import ANY, AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.mqtt.const import DOMAIN as MQTT_DOMAIN @@ -20,7 +20,7 @@ from tests.common import ( async_fire_mqtt_message, async_get_device_automations, ) -from tests.typing import MqttMockHAClientGenerator, WebSocketGenerator +from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, WebSocketGenerator DEFAULT_CONFIG_DEVICE = { "device": {"identifiers": ["0AFFD2"]}, @@ -46,6 +46,13 @@ DEFAULT_TAG_SCAN_JSON = ( ) +@pytest.fixture +def tag_mock() -> Generator[AsyncMock]: + """Fixture to mock tag.""" + with patch("homeassistant.components.tag.async_scan_tag") as mock_tag: + yield mock_tag + + @pytest.mark.no_fail_on_log_exception async def test_discover_bad_tag( hass: HomeAssistant, @@ -95,7 +102,9 @@ async def test_if_fires_on_mqtt_message_with_device( async def test_if_fires_on_mqtt_message_without_device( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, ) -> None: """Test tag scanning, without device.""" await mqtt_mock_entry() @@ -131,7 +140,9 @@ async def test_if_fires_on_mqtt_message_with_template( async def test_strip_tag_id( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, ) -> None: """Test strip whitespace from tag_id.""" await mqtt_mock_entry() @@ -197,7 +208,9 @@ async def test_if_fires_on_mqtt_message_after_update_with_device( async def test_if_fires_on_mqtt_message_after_update_without_device( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, ) -> None: """Test tag scanning after update.""" await mqtt_mock_entry() @@ -346,7 +359,9 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt_with_device( async def test_not_fires_on_mqtt_message_after_remove_by_mqtt_without_device( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, ) -> None: """Test tag scanning not firing after removal.""" await mqtt_mock_entry() @@ -497,7 +512,7 @@ async def test_entity_device_info_update( """Test device registry update.""" await mqtt_mock_entry() - config: dict[str, Any] = { + config = { "topic": "test-topic", "device": { "identifiers": ["helloworld"], @@ -889,9 +904,11 @@ async def test_update_with_bad_config_not_breaks_discovery( tag_mock.assert_called_once_with(ANY, "12345", ANY) -@pytest.mark.usefixtures("mqtt_mock") async def test_unload_entry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, tag_mock: AsyncMock + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + tag_mock: AsyncMock, ) -> None: """Test unloading the MQTT entry.""" @@ -917,9 +934,12 @@ async def test_unload_entry( tag_mock.assert_not_called() -@pytest.mark.usefixtures("mqtt_mock", "tag_mock") async def test_value_template_fails( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + tag_mock: AsyncMock, + caplog: pytest.LogCaptureFixture, ) -> None: """Test the rendering of MQTT value template fails.""" config = copy.deepcopy(DEFAULT_CONFIG_DEVICE) diff --git a/tests/components/mqtt/test_text.py b/tests/components/mqtt/test_text.py index 96924030279..fc714efa513 100644 --- a/tests/components/mqtt/test_text.py +++ b/tests/components/mqtt/test_text.py @@ -251,7 +251,9 @@ async def test_controlling_validation_state_via_topic( ], ) async def test_attribute_validation_max_greater_then_min( - mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test the validation of min and max configuration attributes.""" assert await mqtt_mock_entry() @@ -274,7 +276,9 @@ async def test_attribute_validation_max_greater_then_min( ], ) async def test_attribute_validation_max_not_greater_then_max_state_length( - mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test the max value of of max configuration attribute.""" assert await mqtt_mock_entry() @@ -432,7 +436,13 @@ async def test_default_availability_payload( } } await help_test_default_availability_payload( - hass, mqtt_mock_entry, text.DOMAIN, config, True, "state-topic", "some state" + hass, + mqtt_mock_entry, + text.DOMAIN, + config, + True, + "state-topic", + "some state", ) @@ -451,7 +461,13 @@ async def test_custom_availability_payload( } await help_test_custom_availability_payload( - hass, mqtt_mock_entry, text.DOMAIN, config, True, "state-topic", "1" + hass, + mqtt_mock_entry, + text.DOMAIN, + config, + True, + "state-topic", + "1", ) @@ -469,7 +485,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, text.DOMAIN, DEFAULT_CONFIG, None + hass, mqtt_mock_entry, text.DOMAIN, DEFAULT_CONFIG, {} ) @@ -489,7 +505,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, text.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + text.DOMAIN, + DEFAULT_CONFIG, ) @@ -500,7 +520,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, text.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + text.DOMAIN, + DEFAULT_CONFIG, ) @@ -730,7 +754,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = text.DOMAIN @@ -780,7 +805,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = text.DOMAIN diff --git a/tests/components/mqtt/test_trigger.py b/tests/components/mqtt/test_trigger.py index 5bf36849b13..2e0506a02ab 100644 --- a/tests/components/mqtt/test_trigger.py +++ b/tests/components/mqtt/test_trigger.py @@ -9,7 +9,7 @@ from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_O from homeassistant.core import HassJobType, HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import async_fire_mqtt_message, mock_component +from tests.common import async_fire_mqtt_message, async_mock_service, mock_component from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator @@ -18,6 +18,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.fixture(autouse=True) async def setup_comp( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator @@ -28,7 +34,7 @@ async def setup_comp( async def test_if_fires_on_topic_match( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test if message is fired on topic match.""" assert await async_setup_component( @@ -51,10 +57,9 @@ async def test_if_fires_on_topic_match( async_fire_mqtt_message(hass, "test-topic", '{ "hello": "world" }') await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] - == 'mqtt - test-topic - { "hello": "world" } - world - 0' + calls[0].data["some"] == 'mqtt - test-topic - { "hello": "world" } - world - 0' ) await hass.services.async_call( @@ -63,15 +68,13 @@ async def test_if_fires_on_topic_match( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 - async_fire_mqtt_message(hass, "test-topic", "test_payload") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 async def test_if_fires_on_topic_and_payload_match( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test if message is fired on topic and payload match.""" assert await async_setup_component( @@ -91,11 +94,11 @@ async def test_if_fires_on_topic_and_payload_match( async_fire_mqtt_message(hass, "test-topic", "hello") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_topic_and_payload_match2( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test if message is fired on topic and payload match. @@ -118,11 +121,11 @@ async def test_if_fires_on_topic_and_payload_match2( async_fire_mqtt_message(hass, "test-topic", "0") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_templated_topic_and_payload_match( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test if message is fired on templated topic and payload match.""" assert await async_setup_component( @@ -142,19 +145,19 @@ async def test_if_fires_on_templated_topic_and_payload_match( async_fire_mqtt_message(hass, "test-topic-", "foo") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_mqtt_message(hass, "test-topic-4", "foo") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_mqtt_message(hass, "test-topic-4", "bar") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_fires_on_payload_template( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test if message is fired on templated topic and payload match.""" assert await async_setup_component( @@ -175,21 +178,19 @@ async def test_if_fires_on_payload_template( async_fire_mqtt_message(hass, "test-topic", "hello") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_mqtt_message(hass, "test-topic", '{"unwanted_key":"hello"}') await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_mqtt_message(hass, "test-topic", '{"wanted_key":"hello"}') await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_non_allowed_templates( - hass: HomeAssistant, - service_calls: list[ServiceCall], - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture ) -> None: """Test non allowed function in template.""" assert await async_setup_component( @@ -213,7 +214,7 @@ async def test_non_allowed_templates( async def test_if_not_fires_on_topic_but_no_payload_match( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test if message is not fired on topic but no payload.""" assert await async_setup_component( @@ -233,11 +234,11 @@ async def test_if_not_fires_on_topic_but_no_payload_match( async_fire_mqtt_message(hass, "test-topic", "no-hello") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_encoding_default( - hass: HomeAssistant, service_calls: list[ServiceCall], setup_comp + hass: HomeAssistant, calls: list[ServiceCall], setup_comp ) -> None: """Test default encoding.""" assert await async_setup_component( @@ -257,7 +258,7 @@ async def test_encoding_default( async def test_encoding_custom( - hass: HomeAssistant, service_calls: list[ServiceCall], setup_comp + hass: HomeAssistant, calls: list[ServiceCall], setup_comp ) -> None: """Test default encoding.""" assert await async_setup_component( diff --git a/tests/components/mqtt/test_update.py b/tests/components/mqtt/test_update.py index 4ca10cbe8b2..bb9ae12c66b 100644 --- a/tests/components/mqtt/test_update.py +++ b/tests/components/mqtt/test_update.py @@ -25,7 +25,6 @@ from .test_common import ( help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, - help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_reloadable, help_test_setting_attribute_via_mqtt_json_message, @@ -314,60 +313,6 @@ async def test_empty_json_state_message( } ], ) -async def test_invalid_json_state_message( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test an empty JSON payload.""" - state_topic = "test/state-topic" - await mqtt_mock_entry() - - async_fire_mqtt_message( - hass, - state_topic, - '{"installed_version":"1.9.0","latest_version":"1.9.0",' - '"title":"Test Update 1 Title","release_url":"https://example.com/release1",' - '"release_summary":"Test release summary 1",' - '"entity_picture": "https://example.com/icon1.png"}', - ) - - await hass.async_block_till_done() - - state = hass.states.get("update.test_update") - assert state.state == STATE_OFF - assert state.attributes.get("installed_version") == "1.9.0" - assert state.attributes.get("latest_version") == "1.9.0" - assert state.attributes.get("release_summary") == "Test release summary 1" - assert state.attributes.get("release_url") == "https://example.com/release1" - assert state.attributes.get("title") == "Test Update 1 Title" - assert state.attributes.get("entity_picture") == "https://example.com/icon1.png" - - # Test update schema validation with invalid value in JSON update - async_fire_mqtt_message(hass, state_topic, '{"update_percentage":101}') - - await hass.async_block_till_done() - assert ( - "Schema violation after processing payload '{\"update_percentage\":101}' on " - "topic 'test/state-topic' for entity 'update.test_update': value must be at " - "most 100 for dictionary value @ data['update_percentage']" in caplog.text - ) - - -@pytest.mark.parametrize( - "hass_config", - [ - { - mqtt.DOMAIN: { - update.DOMAIN: { - "state_topic": "test/state-topic", - "name": "Test Update", - "display_precision": 1, - } - } - } - ], -) async def test_json_state_message( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: @@ -409,45 +354,6 @@ async def test_json_state_message( assert state.attributes.get("installed_version") == "1.9.0" assert state.attributes.get("latest_version") == "2.0.0" assert state.attributes.get("entity_picture") == "https://example.com/icon2.png" - assert state.attributes.get("in_progress") is False - assert state.attributes.get("update_percentage") is None - - # Test in_progress status - async_fire_mqtt_message(hass, state_topic, '{"in_progress":true}') - await hass.async_block_till_done() - - state = hass.states.get("update.test_update") - assert state.state == STATE_ON - assert state.attributes.get("installed_version") == "1.9.0" - assert state.attributes.get("latest_version") == "2.0.0" - assert state.attributes.get("entity_picture") == "https://example.com/icon2.png" - assert state.attributes.get("in_progress") is True - assert state.attributes.get("update_percentage") is None - - async_fire_mqtt_message(hass, state_topic, '{"in_progress":false}') - await hass.async_block_till_done() - state = hass.states.get("update.test_update") - assert state.attributes.get("in_progress") is False - - # Test update_percentage status - async_fire_mqtt_message(hass, state_topic, '{"update_percentage":51.75}') - await hass.async_block_till_done() - state = hass.states.get("update.test_update") - assert state.attributes.get("in_progress") is True - assert state.attributes.get("update_percentage") == 51.75 - assert state.attributes.get("display_precision") == 1 - - async_fire_mqtt_message(hass, state_topic, '{"update_percentage":100}') - await hass.async_block_till_done() - state = hass.states.get("update.test_update") - assert state.attributes.get("in_progress") is True - assert state.attributes.get("update_percentage") == 100 - - async_fire_mqtt_message(hass, state_topic, '{"update_percentage":null}') - await hass.async_block_till_done() - state = hass.states.get("update.test_update") - assert state.attributes.get("in_progress") is False - assert state.attributes.get("update_percentage") is None @pytest.mark.parametrize( @@ -598,7 +504,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, update.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + update.DOMAIN, + DEFAULT_CONFIG, ) @@ -609,7 +519,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, update.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + update.DOMAIN, + DEFAULT_CONFIG, ) @@ -765,7 +679,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = update.DOMAIN @@ -776,7 +691,8 @@ async def test_unload_entry( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = update.DOMAIN @@ -818,10 +734,6 @@ async def test_reloadable( '{"entity_picture": "https://example.com/icon1.png"}', '{"entity_picture": "https://example.com/icon2.png"}', ), - ("test-topic", '{"in_progress": true}', '{"in_progress": false}'), - ("test-topic", '{"update_percentage": 0}', '{"update_percentage": 50}'), - ("test-topic", '{"update_percentage": 50}', '{"update_percentage": 100}'), - ("test-topic", '{"update_percentage": 100}', '{"update_percentage": null}'), ("availability-topic", "online", "offline"), ("json-attributes-topic", '{"attr1": "val1"}', '{"attr1": "val2"}'), ], @@ -873,19 +785,3 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) - - -async def test_entity_icon_and_entity_picture( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test the entity icon or picture setup.""" - domain = update.DOMAIN - config = DEFAULT_CONFIG - await help_test_entity_icon_and_entity_picture( - hass, - mqtt_mock_entry, - domain, - config, - default_entity_picture="https://brands.home-assistant.io/_/mqtt/icon.png", - ) diff --git a/tests/components/mqtt/test_util.py b/tests/components/mqtt/test_util.py index 37bf6982b7a..a3802de69da 100644 --- a/tests/components/mqtt/test_util.py +++ b/tests/components/mqtt/test_util.py @@ -236,7 +236,8 @@ async def test_waiting_for_client_not_loaded( unsubs: list[Callable[[], None]] = [] - async def _async_just_in_time_subscribe() -> None: + async def _async_just_in_time_subscribe() -> Callable[[], None]: + nonlocal unsub assert await mqtt.async_wait_for_mqtt_client(hass) # Awaiting a second time should work too and return True assert await mqtt.async_wait_for_mqtt_client(hass) @@ -260,12 +261,12 @@ async def test_waiting_for_client_loaded( """Test waiting for client where mqtt entry is loaded.""" unsub: Callable[[], None] | None = None - async def _async_just_in_time_subscribe() -> None: + async def _async_just_in_time_subscribe() -> Callable[[], None]: nonlocal unsub assert await mqtt.async_wait_for_mqtt_client(hass) unsub = await mqtt.async_subscribe(hass, "test_topic", lambda msg: None) - entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + entry = hass.config_entries.async_entries(mqtt.DATA_MQTT)[0] assert entry.state is ConfigEntryState.LOADED await _async_just_in_time_subscribe() @@ -289,7 +290,7 @@ async def test_waiting_for_client_entry_fails( ) entry.add_to_hass(hass) - async def _async_just_in_time_subscribe() -> None: + async def _async_just_in_time_subscribe() -> Callable[[], None]: assert not await mqtt.async_wait_for_mqtt_client(hass) hass.async_create_task(_async_just_in_time_subscribe()) @@ -299,7 +300,7 @@ async def test_waiting_for_client_entry_fails( side_effect=Exception, ): await hass.config_entries.async_setup(entry.entry_id) - assert entry.state is ConfigEntryState.SETUP_ERROR # type:ignore[comparison-overlap] + assert entry.state is ConfigEntryState.SETUP_ERROR async def test_waiting_for_client_setup_fails( @@ -317,7 +318,7 @@ async def test_waiting_for_client_setup_fails( ) entry.add_to_hass(hass) - async def _async_just_in_time_subscribe() -> None: + async def _async_just_in_time_subscribe() -> Callable[[], None]: assert not await mqtt.async_wait_for_mqtt_client(hass) hass.async_create_task(_async_just_in_time_subscribe()) @@ -326,7 +327,7 @@ async def test_waiting_for_client_setup_fails( # Simulate MQTT setup fails before the client would become available mqtt_client_mock.connect.side_effect = Exception assert not await hass.config_entries.async_setup(entry.entry_id) - assert entry.state is ConfigEntryState.SETUP_ERROR # type:ignore[comparison-overlap] + assert entry.state is ConfigEntryState.SETUP_ERROR @patch("homeassistant.components.mqtt.util.AVAILABILITY_TIMEOUT", 0.01) diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index fef62c33a93..8c01138ccb9 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -2,6 +2,7 @@ from copy import deepcopy import json +import logging from typing import Any from unittest.mock import patch @@ -21,6 +22,7 @@ from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, + DOMAIN, SERVICE_CLEAN_SPOT, SERVICE_LOCATE, SERVICE_PAUSE, @@ -100,6 +102,35 @@ CONFIG_ALL_SERVICES = help_custom_config( ) +async def test_warning_schema_option( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the warning on use of deprecated schema option.""" + await mqtt_mock_entry() + # Send discovery message with deprecated schema option + async_fire_mqtt_message( + hass, + f"homeassistant/{vacuum.DOMAIN}/bla/config", + '{"name": "test", "schema": "state", "o": {"name": "Bla2MQTT", "sw": "0.99", "url":"https://example.com/support"}}', + ) + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("vacuum.test") + assert state is not None + with caplog.at_level(logging.WARNING): + assert ( + "The `schema` option is deprecated for MQTT vacuum, but it was used in a " + "discovery payload. Please contact the maintainer of the integration or " + "service that supplies the config, and suggest to remove the option." + in caplog.text + ) + assert "https://example.com/support" in caplog.text + assert "at discovery topic homeassistant/vacuum/bla/config" in caplog.text + + @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_default_supported_features( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator @@ -121,34 +152,31 @@ async def test_all_commands( mqtt_mock = await mqtt_mock_entry() await hass.services.async_call( - vacuum.DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "start", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - vacuum.DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "stop", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - vacuum.DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "pause", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - vacuum.DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "locate", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - vacuum.DOMAIN, - SERVICE_CLEAN_SPOT, - {"entity_id": ENTITY_MATCH_ALL}, - blocking=True, + DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with( COMMAND_TOPIC, "clean_spot", 0, False @@ -156,10 +184,7 @@ async def test_all_commands( mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - vacuum.DOMAIN, - SERVICE_RETURN_TO_BASE, - {"entity_id": ENTITY_MATCH_ALL}, - blocking=True, + DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with( COMMAND_TOPIC, "return_to_base", 0, False @@ -210,43 +235,37 @@ async def test_commands_without_supported_features( mqtt_mock = await mqtt_mock_entry() await hass.services.async_call( - vacuum.DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - vacuum.DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - vacuum.DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - vacuum.DOMAIN, - SERVICE_RETURN_TO_BASE, - {"entity_id": ENTITY_MATCH_ALL}, - blocking=True, + DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - vacuum.DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - vacuum.DOMAIN, - SERVICE_CLEAN_SPOT, - {"entity_id": ENTITY_MATCH_ALL}, - blocking=True, + DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() @@ -292,7 +311,7 @@ async def test_command_without_command_topic( mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() - await common.async_send_command(hass, "some command", entity_id="vacuum.test") + await common.async_send_command(hass, "some command", "vacuum.test") mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() @@ -488,7 +507,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, vacuum.DOMAIN, DEFAULT_CONFIG_2 + hass, + mqtt_mock_entry, + caplog, + vacuum.DOMAIN, + DEFAULT_CONFIG_2, ) @@ -499,7 +522,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, vacuum.DOMAIN, DEFAULT_CONFIG_2 + hass, + mqtt_mock_entry, + caplog, + vacuum.DOMAIN, + DEFAULT_CONFIG_2, ) @@ -655,8 +682,20 @@ async def test_entity_debug_info_message( @pytest.mark.parametrize( ("service", "topic", "parameters", "payload", "template"), [ - (vacuum.SERVICE_START, "command_topic", None, "start", None), - (vacuum.SERVICE_CLEAN_SPOT, "command_topic", None, "clean_spot", None), + ( + vacuum.SERVICE_START, + "command_topic", + None, + "start", + None, + ), + ( + vacuum.SERVICE_CLEAN_SPOT, + "command_topic", + None, + "clean_spot", + None, + ), ( vacuum.SERVICE_SET_FAN_SPEED, "set_fan_speed_topic", @@ -671,7 +710,13 @@ async def test_entity_debug_info_message( "custom command", None, ), - (vacuum.SERVICE_STOP, "command_topic", None, "stop", None), + ( + vacuum.SERVICE_STOP, + "command_topic", + None, + "stop", + None, + ), ], ) async def test_publishing_with_custom_encoding( @@ -715,7 +760,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = vacuum.DOMAIN diff --git a/tests/components/mqtt/test_valve.py b/tests/components/mqtt/test_valve.py index 6dd0102b8a3..6f88e160b73 100644 --- a/tests/components/mqtt/test_valve.py +++ b/tests/components/mqtt/test_valve.py @@ -14,7 +14,6 @@ from homeassistant.components.valve import ( ATTR_CURRENT_POSITION, ATTR_POSITION, SERVICE_SET_VALVE_POSITION, - ValveState, ) from homeassistant.const import ( ATTR_ASSUMED_STATE, @@ -23,6 +22,10 @@ from homeassistant.const import ( SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE, SERVICE_STOP_VALVE, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -100,14 +103,14 @@ DEFAULT_CONFIG_REPORTS_POSITION = { @pytest.mark.parametrize( ("message", "asserted_state"), [ - ("open", ValveState.OPEN), - ("closed", ValveState.CLOSED), - ("closing", ValveState.CLOSING), - ("opening", ValveState.OPENING), - ('{"state" : "open"}', ValveState.OPEN), - ('{"state" : "closed"}', ValveState.CLOSED), - ('{"state" : "closing"}', ValveState.CLOSING), - ('{"state" : "opening"}', ValveState.OPENING), + ("open", STATE_OPEN), + ("closed", STATE_CLOSED), + ("closing", STATE_CLOSING), + ("opening", STATE_OPENING), + ('{"state" : "open"}', STATE_OPEN), + ('{"state" : "closed"}', STATE_CLOSED), + ('{"state" : "closing"}', STATE_CLOSING), + ('{"state" : "opening"}', STATE_OPENING), ], ) async def test_state_via_state_topic_no_position( @@ -152,10 +155,10 @@ async def test_state_via_state_topic_no_position( @pytest.mark.parametrize( ("message", "asserted_state"), [ - ('{"state":"open"}', ValveState.OPEN), - ('{"state":"closed"}', ValveState.CLOSED), - ('{"state":"closing"}', ValveState.CLOSING), - ('{"state":"opening"}', ValveState.OPENING), + ('{"state":"open"}', STATE_OPEN), + ('{"state":"closed"}', STATE_CLOSED), + ('{"state":"closing"}', STATE_CLOSING), + ('{"state":"opening"}', STATE_OPENING), ], ) async def test_state_via_state_topic_with_template( @@ -196,9 +199,9 @@ async def test_state_via_state_topic_with_template( @pytest.mark.parametrize( ("message", "asserted_state"), [ - ('{"position":100}', ValveState.OPEN), - ('{"position":50.0}', ValveState.OPEN), - ('{"position":0}', ValveState.CLOSED), + ('{"position":100}', STATE_OPEN), + ('{"position":50.0}', STATE_OPEN), + ('{"position":0}', STATE_CLOSED), ('{"position":null}', STATE_UNKNOWN), ('{"position":"non_numeric"}', STATE_UNKNOWN), ('{"ignored":12}', STATE_UNKNOWN), @@ -242,23 +245,23 @@ async def test_state_via_state_topic_with_position_template( ("message", "asserted_state", "valve_position"), [ ("invalid", STATE_UNKNOWN, None), - ("0", ValveState.CLOSED, 0), - ("opening", ValveState.OPENING, None), - ("50", ValveState.OPEN, 50), - ("closing", ValveState.CLOSING, None), - ("100", ValveState.OPEN, 100), + ("0", STATE_CLOSED, 0), + ("opening", STATE_OPENING, None), + ("50", STATE_OPEN, 50), + ("closing", STATE_CLOSING, None), + ("100", STATE_OPEN, 100), ("open", STATE_UNKNOWN, None), ("closed", STATE_UNKNOWN, None), - ("-10", ValveState.CLOSED, 0), - ("110", ValveState.OPEN, 100), - ('{"position": 0, "state": "opening"}', ValveState.OPENING, 0), - ('{"position": 10, "state": "opening"}', ValveState.OPENING, 10), - ('{"position": 50, "state": "open"}', ValveState.OPEN, 50), - ('{"position": 100, "state": "closing"}', ValveState.CLOSING, 100), - ('{"position": 90, "state": "closing"}', ValveState.CLOSING, 90), - ('{"position": 0, "state": "closed"}', ValveState.CLOSED, 0), - ('{"position": -10, "state": "closed"}', ValveState.CLOSED, 0), - ('{"position": 110, "state": "open"}', ValveState.OPEN, 100), + ("-10", STATE_CLOSED, 0), + ("110", STATE_OPEN, 100), + ('{"position": 0, "state": "opening"}', STATE_OPENING, 0), + ('{"position": 10, "state": "opening"}', STATE_OPENING, 10), + ('{"position": 50, "state": "open"}', STATE_OPEN, 50), + ('{"position": 100, "state": "closing"}', STATE_CLOSING, 100), + ('{"position": 90, "state": "closing"}', STATE_CLOSING, 90), + ('{"position": 0, "state": "closed"}', STATE_CLOSED, 0), + ('{"position": -10, "state": "closed"}', STATE_CLOSED, 0), + ('{"position": 110, "state": "open"}', STATE_OPEN, 100), ], ) async def test_state_via_state_topic_through_position( @@ -303,7 +306,8 @@ async def test_state_via_state_topic_through_position( ], ) async def test_opening_closing_state_is_reset( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test the controlling state via topic through position. @@ -316,18 +320,18 @@ async def test_opening_closing_state_is_reset( assert not state.attributes.get(ATTR_ASSUMED_STATE) messages = [ - ('{"position": 0, "state": "opening"}', ValveState.OPENING, 0), - ('{"position": 50, "state": "opening"}', ValveState.OPENING, 50), - ('{"position": 60}', ValveState.OPENING, 60), - ('{"position": 100, "state": "opening"}', ValveState.OPENING, 100), - ('{"position": 100, "state": null}', ValveState.OPEN, 100), - ('{"position": 90, "state": "closing"}', ValveState.CLOSING, 90), - ('{"position": 40}', ValveState.CLOSING, 40), - ('{"position": 0}', ValveState.CLOSED, 0), - ('{"position": 10}', ValveState.OPEN, 10), - ('{"position": 0, "state": "opening"}', ValveState.OPENING, 0), - ('{"position": 0, "state": "closing"}', ValveState.CLOSING, 0), - ('{"position": 0}', ValveState.CLOSED, 0), + ('{"position": 0, "state": "opening"}', STATE_OPENING, 0), + ('{"position": 50, "state": "opening"}', STATE_OPENING, 50), + ('{"position": 60}', STATE_OPENING, 60), + ('{"position": 100, "state": "opening"}', STATE_OPENING, 100), + ('{"position": 100, "state": null}', STATE_OPEN, 100), + ('{"position": 90, "state": "closing"}', STATE_CLOSING, 90), + ('{"position": 40}', STATE_CLOSING, 40), + ('{"position": 0}', STATE_CLOSED, 0), + ('{"position": 10}', STATE_OPEN, 10), + ('{"position": 0, "state": "opening"}', STATE_OPENING, 0), + ('{"position": 0, "state": "closing"}', STATE_CLOSING, 0), + ('{"position": 0}', STATE_CLOSED, 0), ] for message, asserted_state, valve_position in messages: @@ -413,19 +417,19 @@ async def test_invalid_state_updates( @pytest.mark.parametrize( ("message", "asserted_state", "valve_position"), [ - ("-128", ValveState.CLOSED, 0), - ("0", ValveState.OPEN, 50), - ("127", ValveState.OPEN, 100), - ("-130", ValveState.CLOSED, 0), - ("130", ValveState.OPEN, 100), - ('{"position": -128, "state": "opening"}', ValveState.OPENING, 0), - ('{"position": -30, "state": "opening"}', ValveState.OPENING, 38), - ('{"position": 30, "state": "open"}', ValveState.OPEN, 61), - ('{"position": 127, "state": "closing"}', ValveState.CLOSING, 100), - ('{"position": 100, "state": "closing"}', ValveState.CLOSING, 89), - ('{"position": -128, "state": "closed"}', ValveState.CLOSED, 0), - ('{"position": -130, "state": "closed"}', ValveState.CLOSED, 0), - ('{"position": 130, "state": "open"}', ValveState.OPEN, 100), + ("-128", STATE_CLOSED, 0), + ("0", STATE_OPEN, 50), + ("127", STATE_OPEN, 100), + ("-130", STATE_CLOSED, 0), + ("130", STATE_OPEN, 100), + ('{"position": -128, "state": "opening"}', STATE_OPENING, 0), + ('{"position": -30, "state": "opening"}', STATE_OPENING, 38), + ('{"position": 30, "state": "open"}', STATE_OPEN, 61), + ('{"position": 127, "state": "closing"}', STATE_CLOSING, 100), + ('{"position": 100, "state": "closing"}', STATE_CLOSING, 89), + ('{"position": -128, "state": "closed"}', STATE_CLOSED, 0), + ('{"position": -130, "state": "closed"}', STATE_CLOSED, 0), + ('{"position": 130, "state": "open"}', STATE_OPEN, 100), ], ) async def test_state_via_state_trough_position_with_alt_range( @@ -629,8 +633,8 @@ async def test_open_close_payload_config_not_allowed( @pytest.mark.parametrize( ("service", "asserted_message", "asserted_state"), [ - (SERVICE_CLOSE_VALVE, "CLOSE", ValveState.CLOSED), - (SERVICE_OPEN_VALVE, "OPEN", ValveState.OPEN), + (SERVICE_CLOSE_VALVE, "CLOSE", STATE_CLOSED), + (SERVICE_OPEN_VALVE, "OPEN", STATE_OPEN), ], ) async def test_controlling_valve_by_state_optimistic( @@ -730,7 +734,11 @@ async def test_controlling_valve_by_position( ) @pytest.mark.parametrize( ("position", "asserted_message"), - [(0, "0"), (30, "30"), (100, "100")], + [ + (0, "0"), + (30, "30"), + (100, "100"), + ], ) async def test_controlling_valve_by_set_valve_position( hass: HomeAssistant, @@ -779,9 +787,9 @@ async def test_controlling_valve_by_set_valve_position( @pytest.mark.parametrize( ("position", "asserted_message", "asserted_position", "asserted_state"), [ - (0, "0", 0, ValveState.CLOSED), - (30, "30", 30, ValveState.OPEN), - (100, "100", 100, ValveState.OPEN), + (0, "0", 0, STATE_CLOSED), + (30, "30", 30, STATE_OPEN), + (100, "100", 100, STATE_OPEN), ], ) async def test_controlling_valve_optimistic_by_set_valve_position( @@ -834,7 +842,12 @@ async def test_controlling_valve_optimistic_by_set_valve_position( ) @pytest.mark.parametrize( ("position", "asserted_message"), - [(0, "-128"), (30, "-52"), (80, "76"), (100, "127")], + [ + (0, "-128"), + (30, "-52"), + (80, "76"), + (100, "127"), + ], ) async def test_controlling_valve_with_alt_range_by_set_valve_position( hass: HomeAssistant, @@ -944,8 +957,8 @@ async def test_controlling_valve_with_alt_range_by_position( @pytest.mark.parametrize( ("service", "asserted_message", "asserted_state", "asserted_position"), [ - (SERVICE_CLOSE_VALVE, "0", ValveState.CLOSED, 0), - (SERVICE_OPEN_VALVE, "100", ValveState.OPEN, 100), + (SERVICE_CLOSE_VALVE, "0", STATE_CLOSED, 0), + (SERVICE_OPEN_VALVE, "100", STATE_OPEN, 100), ], ) async def test_controlling_valve_by_position_optimistic( @@ -1001,10 +1014,10 @@ async def test_controlling_valve_by_position_optimistic( @pytest.mark.parametrize( ("position", "asserted_message", "asserted_position", "asserted_state"), [ - (0, "-128", 0, ValveState.CLOSED), - (30, "-52", 30, ValveState.OPEN), - (50, "0", 50, ValveState.OPEN), - (100, "127", 100, ValveState.OPEN), + (0, "-128", 0, STATE_CLOSED), + (30, "-52", 30, STATE_OPEN), + (50, "0", 50, STATE_OPEN), + (100, "127", 100, STATE_OPEN), ], ) async def test_controlling_valve_optimistic_alt_range_by_set_valve_position( @@ -1114,7 +1127,9 @@ async def test_valid_device_class( ], ) async def test_invalid_device_class( - mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test the setting of an invalid device class.""" assert await mqtt_mock_entry() @@ -1159,7 +1174,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, valve.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + valve.DOMAIN, + DEFAULT_CONFIG, ) @@ -1170,12 +1189,17 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, valve.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + valve.DOMAIN, + DEFAULT_CONFIG, ) async def test_discovery_update_attr( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( @@ -1362,7 +1386,8 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test reloading the MQTT platform.""" domain = valve.DOMAIN @@ -1414,7 +1439,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = valve.DOMAIN diff --git a/tests/components/mqtt/test_water_heater.py b/tests/components/mqtt/test_water_heater.py index 02ae54c1a85..849a1ac8785 100644 --- a/tests/components/mqtt/test_water_heater.py +++ b/tests/components/mqtt/test_water_heater.py @@ -141,7 +141,7 @@ async def test_get_operation_modes( await mqtt_mock_entry() state = hass.states.get(ENTITY_WATER_HEATER) - assert state.attributes.get("operation_list") == [ + assert [ STATE_ECO, STATE_ELECTRIC, STATE_GAS, @@ -149,12 +149,14 @@ async def test_get_operation_modes( STATE_HIGH_DEMAND, STATE_PERFORMANCE, STATE_OFF, - ] + ] == state.attributes.get("operation_list") @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_set_operation_mode_bad_attr_and_state( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test setting operation mode without required attribute.""" await mqtt_mock_entry() @@ -162,7 +164,7 @@ async def test_set_operation_mode_bad_attr_and_state( state = hass.states.get(ENTITY_WATER_HEATER) assert state.state == "off" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_operation_mode(hass, None, ENTITY_WATER_HEATER) # type:ignore[arg-type] + await common.async_set_operation_mode(hass, None, ENTITY_WATER_HEATER) assert "string value is None for dictionary value @ data['operation_mode']" in str( excinfo.value ) @@ -613,7 +615,8 @@ async def test_get_with_templates( ], ) async def test_set_and_templates( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test setting various attributes with templates.""" mqtt_mock = await mqtt_mock_entry() @@ -831,7 +834,11 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, mqtt_mock_entry, caplog, water_heater.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + water_heater.DOMAIN, + DEFAULT_CONFIG, ) @@ -842,7 +849,11 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, mqtt_mock_entry, caplog, water_heater.DOMAIN, DEFAULT_CONFIG + hass, + mqtt_mock_entry, + caplog, + water_heater.DOMAIN, + DEFAULT_CONFIG, ) @@ -1009,7 +1020,11 @@ async def test_entity_id_update_subscriptions( } } await help_test_entity_id_update_subscriptions( - hass, mqtt_mock_entry, water_heater.DOMAIN, config, ["test-topic", "avty-topic"] + hass, + mqtt_mock_entry, + water_heater.DOMAIN, + config, + ["test-topic", "avty-topic"], ) @@ -1185,7 +1200,8 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test unloading the config entry.""" domain = water_heater.DOMAIN diff --git a/tests/components/mqtt_eventstream/test_init.py b/tests/components/mqtt_eventstream/test_init.py index b6c1940b149..82def7ef145 100644 --- a/tests/components/mqtt_eventstream/test_init.py +++ b/tests/components/mqtt_eventstream/test_init.py @@ -20,12 +20,7 @@ from tests.common import ( from tests.typing import MqttMockHAClient -async def add_eventstream( - hass: HomeAssistant, - sub_topic: str | None = None, - pub_topic: str | None = None, - ignore_event: list[str] | None = None, -) -> bool: +async def add_eventstream(hass, sub_topic=None, pub_topic=None, ignore_event=None): """Add a mqtt_eventstream component.""" config = {} if sub_topic: diff --git a/tests/components/mqtt_json/test_device_tracker.py b/tests/components/mqtt_json/test_device_tracker.py index c372a448d98..a992c985057 100644 --- a/tests/components/mqtt_json/test_device_tracker.py +++ b/tests/components/mqtt_json/test_device_tracker.py @@ -1,23 +1,21 @@ """The tests for the JSON MQTT device tracker platform.""" -from collections.abc import AsyncGenerator import json import logging import os from unittest.mock import patch import pytest +from typing_extensions import AsyncGenerator from homeassistant.components.device_tracker.legacy import ( DOMAIN as DT_DOMAIN, YAML_DEVICES, - AsyncSeeCallback, ) from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN from homeassistant.config_entries import ConfigEntryDisabler from homeassistant.const import CONF_PLATFORM from homeassistant.core import HomeAssistant -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component from tests.common import async_fire_mqtt_message @@ -73,15 +71,9 @@ async def test_setup_fails_without_mqtt_being_setup( async def test_ensure_device_tracker_platform_validation(hass: HomeAssistant) -> None: """Test if platform validation was done.""" - async def mock_setup_scanner( - hass: HomeAssistant, - config: ConfigType, - see: AsyncSeeCallback, - discovery_info: DiscoveryInfoType | None = None, - ) -> bool: + async def mock_setup_scanner(hass, config, see, discovery_info=None): """Check that Qos was added by validation.""" assert "qos" in config - return True with patch( "homeassistant.components.mqtt_json.device_tracker.async_setup_scanner", diff --git a/tests/components/mqtt_room/test_sensor.py b/tests/components/mqtt_room/test_sensor.py index 658dda4b6f8..e6fe7db3b8e 100644 --- a/tests/components/mqtt_room/test_sensor.py +++ b/tests/components/mqtt_room/test_sensor.py @@ -2,7 +2,6 @@ import datetime import json -from typing import Any from unittest.mock import patch import pytest @@ -41,22 +40,20 @@ FAR_MESSAGE = {"id": DEVICE_ID, "name": NAME, "distance": 10} REALLY_FAR_MESSAGE = {"id": DEVICE_ID, "name": NAME, "distance": 20} -async def send_message( - hass: HomeAssistant, topic: str, message: dict[str, Any] -) -> None: +async def send_message(hass, topic, message): """Test the sending of a message.""" async_fire_mqtt_message(hass, topic, json.dumps(message)) await hass.async_block_till_done() await hass.async_block_till_done() -async def assert_state(hass: HomeAssistant, room: str) -> None: +async def assert_state(hass, room): """Test the assertion of a room state.""" state = hass.states.get(SENSOR_STATE) assert state.state == room -async def assert_distance(hass: HomeAssistant, distance: int) -> None: +async def assert_distance(hass, distance): """Test the assertion of a distance state.""" state = hass.states.get(SENSOR_STATE) assert state.attributes.get("distance") == distance diff --git a/tests/components/music_assistant/__init__.py b/tests/components/music_assistant/__init__.py deleted file mode 100644 index 6893b862e2d..00000000000 --- a/tests/components/music_assistant/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The tests for the Music Assistant component.""" diff --git a/tests/components/music_assistant/conftest.py b/tests/components/music_assistant/conftest.py deleted file mode 100644 index b03a56ab4a6..00000000000 --- a/tests/components/music_assistant/conftest.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Music Assistant test fixtures.""" - -from collections.abc import Generator -from unittest.mock import patch - -from music_assistant_models.api import ServerInfoMessage -import pytest - -from homeassistant.components.music_assistant.config_flow import CONF_URL -from homeassistant.components.music_assistant.const import DOMAIN - -from tests.common import AsyncMock, MockConfigEntry, load_fixture - - -@pytest.fixture -def mock_get_server_info() -> Generator[AsyncMock]: - """Mock the function to get server info.""" - with patch( - "homeassistant.components.music_assistant.config_flow.get_server_info" - ) as mock_get_server_info: - mock_get_server_info.return_value = ServerInfoMessage.from_json( - load_fixture("server_info_message.json", DOMAIN) - ) - yield mock_get_server_info - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="Music Assistant", - data={CONF_URL: "http://localhost:8095"}, - unique_id="1234", - ) diff --git a/tests/components/music_assistant/fixtures/server_info_message.json b/tests/components/music_assistant/fixtures/server_info_message.json deleted file mode 100644 index 907ec8af820..00000000000 --- a/tests/components/music_assistant/fixtures/server_info_message.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "server_id": "1234", - "server_version": "0.0.0", - "schema_version": 23, - "min_supported_schema_version": 23, - "base_url": "http://localhost:8095", - "homeassistant_addon": false, - "onboard_done": false -} diff --git a/tests/components/music_assistant/test_config_flow.py b/tests/components/music_assistant/test_config_flow.py deleted file mode 100644 index c700060889c..00000000000 --- a/tests/components/music_assistant/test_config_flow.py +++ /dev/null @@ -1,217 +0,0 @@ -"""Define tests for the Music Assistant Integration config flow.""" - -from copy import deepcopy -from ipaddress import ip_address -from unittest import mock -from unittest.mock import AsyncMock - -from music_assistant_client.exceptions import ( - CannotConnect, - InvalidServerVersion, - MusicAssistantClientException, -) -from music_assistant_models.api import ServerInfoMessage -import pytest - -from homeassistant.components.music_assistant.config_flow import CONF_URL -from homeassistant.components.music_assistant.const import DEFAULT_NAME, DOMAIN -from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry, load_fixture - -SERVER_INFO = { - "server_id": "1234", - "base_url": "http://localhost:8095", - "server_version": "0.0.0", - "schema_version": 23, - "min_supported_schema_version": 23, - "homeassistant_addon": True, -} - -ZEROCONF_DATA = ZeroconfServiceInfo( - ip_address=ip_address("127.0.0.1"), - ip_addresses=[ip_address("127.0.0.1")], - hostname="mock_hostname", - port=None, - type=mock.ANY, - name=mock.ANY, - properties=SERVER_INFO, -) - - -async def test_full_flow( - hass: HomeAssistant, - mock_get_server_info: AsyncMock, -) -> None: - """Test full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_URL: "http://localhost:8095"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DEFAULT_NAME - assert result["data"] == { - CONF_URL: "http://localhost:8095", - } - assert result["result"].unique_id == "1234" - - -async def test_zero_conf_flow( - hass: HomeAssistant, - mock_get_server_info: AsyncMock, -) -> None: - """Test zeroconf flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DATA, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "discovery_confirm" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DEFAULT_NAME - assert result["data"] == { - CONF_URL: "http://localhost:8095", - } - assert result["result"].unique_id == "1234" - - -async def test_zero_conf_missing_server_id( - hass: HomeAssistant, - mock_get_server_info: AsyncMock, -) -> None: - """Test zeroconf flow with missing server id.""" - bad_zero_conf_data = deepcopy(ZEROCONF_DATA) - bad_zero_conf_data.properties.pop("server_id") - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=bad_zero_conf_data, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "missing_server_id" - - -async def test_duplicate_user( - hass: HomeAssistant, - mock_get_server_info: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test duplicate user flow.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_URL: "http://localhost:8095"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_duplicate_zeroconf( - hass: HomeAssistant, - mock_get_server_info: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test duplicate zeroconf flow.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DATA, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("exception", "error_message"), - [ - (InvalidServerVersion("invalid_server_version"), "invalid_server_version"), - (CannotConnect("cannot_connect"), "cannot_connect"), - (MusicAssistantClientException("unknown"), "unknown"), - ], -) -async def test_flow_user_server_version_invalid( - hass: HomeAssistant, - mock_get_server_info: AsyncMock, - exception: MusicAssistantClientException, - error_message: str, -) -> None: - """Test user flow when server url is invalid.""" - mock_get_server_info.side_effect = exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_URL: "http://localhost:8095"}, - ) - await hass.async_block_till_done() - assert result["errors"] == {"base": error_message} - - mock_get_server_info.side_effect = None - mock_get_server_info.return_value = ServerInfoMessage.from_json( - load_fixture("server_info_message.json", DOMAIN) - ) - - assert result["type"] is FlowResultType.FORM - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_URL: "http://localhost:8095"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_flow_zeroconf_connect_issue( - hass: HomeAssistant, - mock_get_server_info: AsyncMock, -) -> None: - """Test zeroconf flow when server connect be reached.""" - mock_get_server_info.side_effect = CannotConnect("cannot_connect") - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=ZEROCONF_DATA, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" diff --git a/tests/components/mysensors/conftest.py b/tests/components/mysensors/conftest.py index 1d407815db0..f1b86c9ce5b 100644 --- a/tests/components/mysensors/conftest.py +++ b/tests/components/mysensors/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import AsyncGenerator, Callable, Generator +from collections.abc import Callable from copy import deepcopy import json from typing import Any @@ -12,6 +12,7 @@ from mysensors import BaseSyncGateway from mysensors.persistence import MySensorsJSONDecoder from mysensors.sensor import Sensor import pytest +from typing_extensions import AsyncGenerator, Generator from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN from homeassistant.components.mysensors.config_flow import DEFAULT_BAUD_RATE @@ -141,7 +142,7 @@ async def integration_fixture( config: dict[str, Any] = {} config_entry.add_to_hass(hass) with patch( - "homeassistant.components.mysensors.entity.Debouncer", autospec=True + "homeassistant.components.mysensors.device.Debouncer", autospec=True ) as debouncer_class: def debouncer( diff --git a/tests/components/mysensors/test_cover.py b/tests/components/mysensors/test_cover.py index a063aa8f8d8..e056bff80fa 100644 --- a/tests/components/mysensors/test_cover.py +++ b/tests/components/mysensors/test_cover.py @@ -15,7 +15,10 @@ from homeassistant.components.cover import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - CoverState, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, ) from homeassistant.const import ATTR_BATTERY_LEVEL, ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -33,7 +36,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 assert state.attributes[ATTR_BATTERY_LEVEL] == 0 @@ -54,7 +57,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 transport_write.reset_mock() @@ -76,7 +79,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 50 transport_write.reset_mock() @@ -99,7 +102,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 75 receive_message("1;1;1;0;29;0\n") @@ -109,7 +112,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 transport_write.reset_mock() @@ -131,7 +134,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING assert state.attributes[ATTR_CURRENT_POSITION] == 50 receive_message("1;1;1;0;30;0\n") @@ -141,7 +144,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 transport_write.reset_mock() @@ -162,7 +165,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 25 @@ -178,7 +181,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED await hass.services.async_call( COVER_DOMAIN, @@ -197,7 +200,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING transport_write.reset_mock() @@ -217,7 +220,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN transport_write.reset_mock() @@ -238,7 +241,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING receive_message("1;1;1;0;29;0\n") receive_message("1;1;1;0;2;1\n") @@ -247,7 +250,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN transport_write.reset_mock() @@ -267,7 +270,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING receive_message("1;1;1;0;30;0\n") receive_message("1;1;1;0;2;0\n") @@ -276,4 +279,4 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED diff --git a/tests/components/mystrom/conftest.py b/tests/components/mystrom/conftest.py index af8d80ed27e..f5405055805 100644 --- a/tests/components/mystrom/conftest.py +++ b/tests/components/mystrom/conftest.py @@ -1,9 +1,9 @@ """Provide common mystrom fixtures and mocks.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.mystrom.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/myuplink/conftest.py b/tests/components/myuplink/conftest.py index 9ede11146ef..dd05bedcaf4 100644 --- a/tests/components/myuplink/conftest.py +++ b/tests/components/myuplink/conftest.py @@ -1,6 +1,5 @@ """Test helpers for myuplink.""" -from collections.abc import AsyncGenerator, Generator import time from typing import Any from unittest.mock import MagicMock, patch @@ -8,6 +7,7 @@ from unittest.mock import MagicMock, patch from myuplink import Device, DevicePoint, System import orjson import pytest +from typing_extensions import AsyncGenerator, Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/myuplink/fixtures/device_points_nibe_f730.json b/tests/components/myuplink/fixtures/device_points_nibe_f730.json index 99dd9c857e6..49340bd9e26 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_f730.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_f730.json @@ -951,94 +951,5 @@ ], "scaleValue": "1", "zoneId": null - }, - { - "category": "NIBEF F730 CU 3x400V", - "parameterId": "47041", - "parameterName": "comfort mode", - "parameterUnit": "", - "writable": true, - "timestamp": "2024-05-22T15:02:03+00:00", - "value": 0, - "strVal": "economy", - "smartHomeCategories": [], - "minValue": null, - "maxValue": null, - "stepValue": 1, - "enumValues": [ - { - "value": "4", - "text": "smart control", - "icon": "" - }, - { - "value": "0", - "text": "economy", - "icon": "" - }, - { - "value": "1", - "text": "normal", - "icon": "" - }, - { - "value": "2", - "text": "luxury", - "icon": "" - } - ], - "scaleValue": "1", - "zoneId": null - }, - { - "category": "F730 CU 3x400V", - "parameterId": "147641", - "parameterName": "Start Wednesday", - "parameterUnit": "", - "writable": true, - "timestamp": "2024-10-18T09:52:01+00:00", - "value": 0, - "strVal": "0", - "smartHomeCategories": [], - "minValue": 0, - "maxValue": 86400, - "stepValue": 900, - "enumValues": [], - "scaleValue": "1", - "zoneId": null - }, - { - "category": "F730 CU 3x400V", - "parameterId": "148072", - "parameterName": "start diff additional heat", - "parameterUnit": "DM", - "writable": true, - "timestamp": "2024-10-18T09:51:39+00:00", - "value": 700, - "strVal": "700DM", - "smartHomeCategories": [], - "minValue": 100, - "maxValue": 2000, - "stepValue": 10, - "enumValues": [], - "scaleValue": "1", - "zoneId": null - }, - { - "category": "F730 CU 3x400V", - "parameterId": "47011", - "parameterName": "Heating offset climate system 1", - "parameterUnit": "", - "writable": true, - "timestamp": "2024-10-18T09:51:39+00:00", - "value": 1, - "strVal": "1", - "smartHomeCategories": ["sh-indoorSpOffsHeat"], - "minValue": -10, - "maxValue": 10, - "stepValue": 1, - "enumValues": [], - "scaleValue": "1", - "zoneId": null } ] diff --git a/tests/components/myuplink/fixtures/device_points_nibe_smo20.json b/tests/components/myuplink/fixtures/device_points_nibe_smo20.json index 9135862d991..b64869c236c 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_smo20.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_smo20.json @@ -3996,9 +3996,9 @@ "parameterUnit": "", "writable": true, "timestamp": "2024-02-14T08:36:05+00:00", - "value": 0.0, + "value": 0, "strVal": "economy", - "smartHomeCategories": ["test"], + "smartHomeCategories": [], "minValue": null, "maxValue": null, "stepValue": 1, diff --git a/tests/components/myuplink/snapshots/test_diagnostics.ambr b/tests/components/myuplink/snapshots/test_diagnostics.ambr index 1b3502c1f04..53664820364 100644 --- a/tests/components/myuplink/snapshots/test_diagnostics.ambr +++ b/tests/components/myuplink/snapshots/test_diagnostics.ambr @@ -1012,95 +1012,6 @@ ], "scaleValue": "1", "zoneId": null - }, - { - "category": "NIBEF F730 CU 3x400V", - "parameterId": "47041", - "parameterName": "comfort mode", - "parameterUnit": "", - "writable": true, - "timestamp": "2024-05-22T15:02:03+00:00", - "value": 0, - "strVal": "economy", - "smartHomeCategories": [], - "minValue": null, - "maxValue": null, - "stepValue": 1, - "enumValues": [ - { - "value": "4", - "text": "smart control", - "icon": "" - }, - { - "value": "0", - "text": "economy", - "icon": "" - }, - { - "value": "1", - "text": "normal", - "icon": "" - }, - { - "value": "2", - "text": "luxury", - "icon": "" - } - ], - "scaleValue": "1", - "zoneId": null - }, - { - "category": "F730 CU 3x400V", - "parameterId": "147641", - "parameterName": "Start Wednesday", - "parameterUnit": "", - "writable": true, - "timestamp": "2024-10-18T09:52:01+00:00", - "value": 0, - "strVal": "0", - "smartHomeCategories": [], - "minValue": 0, - "maxValue": 86400, - "stepValue": 900, - "enumValues": [], - "scaleValue": "1", - "zoneId": null - }, - { - "category": "F730 CU 3x400V", - "parameterId": "148072", - "parameterName": "start diff additional heat", - "parameterUnit": "DM", - "writable": true, - "timestamp": "2024-10-18T09:51:39+00:00", - "value": 700, - "strVal": "700DM", - "smartHomeCategories": [], - "minValue": 100, - "maxValue": 2000, - "stepValue": 10, - "enumValues": [], - "scaleValue": "1", - "zoneId": null - }, - { - "category": "F730 CU 3x400V", - "parameterId": "47011", - "parameterName": "Heating offset climate system 1", - "parameterUnit": "", - "writable": true, - "timestamp": "2024-10-18T09:51:39+00:00", - "value": 1, - "strVal": "1", - "smartHomeCategories": ["sh-indoorSpOffsHeat"], - "minValue": -10, - "maxValue": 10, - "stepValue": 1, - "enumValues": [], - "scaleValue": "1", - "zoneId": null } ] @@ -2106,95 +2017,6 @@ ], "scaleValue": "1", "zoneId": null - }, - { - "category": "NIBEF F730 CU 3x400V", - "parameterId": "47041", - "parameterName": "comfort mode", - "parameterUnit": "", - "writable": true, - "timestamp": "2024-05-22T15:02:03+00:00", - "value": 0, - "strVal": "economy", - "smartHomeCategories": [], - "minValue": null, - "maxValue": null, - "stepValue": 1, - "enumValues": [ - { - "value": "4", - "text": "smart control", - "icon": "" - }, - { - "value": "0", - "text": "economy", - "icon": "" - }, - { - "value": "1", - "text": "normal", - "icon": "" - }, - { - "value": "2", - "text": "luxury", - "icon": "" - } - ], - "scaleValue": "1", - "zoneId": null - }, - { - "category": "F730 CU 3x400V", - "parameterId": "147641", - "parameterName": "Start Wednesday", - "parameterUnit": "", - "writable": true, - "timestamp": "2024-10-18T09:52:01+00:00", - "value": 0, - "strVal": "0", - "smartHomeCategories": [], - "minValue": 0, - "maxValue": 86400, - "stepValue": 900, - "enumValues": [], - "scaleValue": "1", - "zoneId": null - }, - { - "category": "F730 CU 3x400V", - "parameterId": "148072", - "parameterName": "start diff additional heat", - "parameterUnit": "DM", - "writable": true, - "timestamp": "2024-10-18T09:51:39+00:00", - "value": 700, - "strVal": "700DM", - "smartHomeCategories": [], - "minValue": 100, - "maxValue": 2000, - "stepValue": 10, - "enumValues": [], - "scaleValue": "1", - "zoneId": null - }, - { - "category": "F730 CU 3x400V", - "parameterId": "47011", - "parameterName": "Heating offset climate system 1", - "parameterUnit": "", - "writable": true, - "timestamp": "2024-10-18T09:51:39+00:00", - "value": 1, - "strVal": "1", - "smartHomeCategories": ["sh-indoorSpOffsHeat"], - "minValue": -10, - "maxValue": 10, - "stepValue": 1, - "enumValues": [], - "scaleValue": "1", - "zoneId": null } ] diff --git a/tests/components/myuplink/test_config_flow.py b/tests/components/myuplink/test_config_flow.py index c24d26057de..3ae32575257 100644 --- a/tests/components/myuplink/test_config_flow.py +++ b/tests/components/myuplink/test_config_flow.py @@ -105,7 +105,14 @@ async def test_flow_reauth( assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/myuplink/test_number.py b/tests/components/myuplink/test_number.py index 4106af1b5b9..273c35ab749 100644 --- a/tests/components/myuplink/test_number.py +++ b/tests/components/myuplink/test_number.py @@ -14,9 +14,9 @@ from homeassistant.helpers import entity_registry as er TEST_PLATFORM = Platform.NUMBER pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) -ENTITY_ID = "number.gotham_city_heating_offset_climate_system_1" -ENTITY_FRIENDLY_NAME = "Gotham City Heating offset climate system 1" -ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47011" +ENTITY_ID = "number.gotham_city_degree_minutes" +ENTITY_FRIENDLY_NAME = "Gotham City Degree minutes" +ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940" async def test_entity_registry( @@ -36,16 +36,17 @@ async def test_attributes( mock_myuplink_client: MagicMock, setup_platform: None, ) -> None: - """Test the entity attributes are correct.""" + """Test the switch attributes are correct.""" state = hass.states.get(ENTITY_ID) - assert state.state == "1.0" + assert state.state == "-875.0" assert state.attributes == { "friendly_name": ENTITY_FRIENDLY_NAME, - "min": -10.0, - "max": 10.0, + "min": -3000, + "max": 3000, "mode": "auto", "step": 1.0, + "unit_of_measurement": "DM", } @@ -59,7 +60,7 @@ async def test_set_value( await hass.services.async_call( TEST_PLATFORM, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, + {ATTR_ENTITY_ID: ENTITY_ID, "value": -125}, blocking=True, ) await hass.async_block_till_done() @@ -78,7 +79,7 @@ async def test_api_failure( await hass.services.async_call( TEST_PLATFORM, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, + {ATTR_ENTITY_ID: ENTITY_ID, "value": -125}, blocking=True, ) mock_myuplink_client.async_set_device_points.assert_called_once() diff --git a/tests/components/myuplink/test_select.py b/tests/components/myuplink/test_select.py deleted file mode 100644 index 7ad2d17cb5d..00000000000 --- a/tests/components/myuplink/test_select.py +++ /dev/null @@ -1,89 +0,0 @@ -"""Tests for myuplink select module.""" - -from unittest.mock import MagicMock - -from aiohttp import ClientError -import pytest - -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_OPTION, - SERVICE_SELECT_OPTION, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er - -TEST_PLATFORM = Platform.SELECT -pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) - -ENTITY_ID = "select.gotham_city_comfort_mode" -ENTITY_FRIENDLY_NAME = "Gotham City comfort mode" -ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041" - - -async def test_select_entity( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test that the entities are registered in the entity registry.""" - - entry = entity_registry.async_get(ENTITY_ID) - assert entry.unique_id == ENTITY_UID - - # Test the select attributes are correct. - - state = hass.states.get(ENTITY_ID) - assert state.state == "Economy" - assert state.attributes == { - "options": ["Smart control", "Economy", "Normal", "Luxury"], - "friendly_name": ENTITY_FRIENDLY_NAME, - } - - -async def test_selecting( - hass: HomeAssistant, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test select option service.""" - - await hass.services.async_call( - TEST_PLATFORM, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_OPTION: "Economy"}, - blocking=True, - ) - await hass.async_block_till_done() - mock_myuplink_client.async_set_device_points.assert_called_once() - - # Test handling of exception from API. - - mock_myuplink_client.async_set_device_points.side_effect = ClientError - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - TEST_PLATFORM, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_OPTION: "Economy"}, - blocking=True, - ) - assert mock_myuplink_client.async_set_device_points.call_count == 2 - - -@pytest.mark.parametrize( - "load_device_points_file", - ["device_points_nibe_smo20.json"], -) -async def test_entity_registry_smo20( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test that the entities are registered in the entity registry.""" - - entry = entity_registry.async_get("select.gotham_city_all") - assert entry.unique_id == "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47660" diff --git a/tests/components/nam/snapshots/test_diagnostics.ambr b/tests/components/nam/snapshots/test_diagnostics.ambr index e92e02fa1d8..c187dec2866 100644 --- a/tests/components/nam/snapshots/test_diagnostics.ambr +++ b/tests/components/nam/snapshots/test_diagnostics.ambr @@ -2,18 +2,18 @@ # name: test_entry_diagnostics dict({ 'data': dict({ - 'bme280_humidity': 45.69, - 'bme280_pressure': 1011.0117, - 'bme280_temperature': 7.56, - 'bmp180_pressure': 1032.0118, - 'bmp180_temperature': 7.56, - 'bmp280_pressure': 1022.0117999999999, - 'bmp280_temperature': 5.56, - 'dht22_humidity': 46.23, - 'dht22_temperature': 6.26, - 'ds18b20_temperature': 12.56, - 'heca_humidity': 49.97, - 'heca_temperature': 7.95, + 'bme280_humidity': 45.7, + 'bme280_pressure': 1011.012, + 'bme280_temperature': 7.6, + 'bmp180_pressure': 1032.012, + 'bmp180_temperature': 7.6, + 'bmp280_pressure': 1022.012, + 'bmp280_temperature': 5.6, + 'dht22_humidity': 46.2, + 'dht22_temperature': 6.3, + 'ds18b20_temperature': 12.6, + 'heca_humidity': 50.0, + 'heca_temperature': 8.0, 'mhz14a_carbon_dioxide': 865.0, 'pms_caqi': 19, 'pms_caqi_level': 'very_low', @@ -22,17 +22,17 @@ 'pms_p2': 11.0, 'sds011_caqi': 19, 'sds011_caqi_level': 'very_low', - 'sds011_p1': 18.65, - 'sds011_p2': 11.03, - 'sht3x_humidity': 34.69, - 'sht3x_temperature': 6.28, + 'sds011_p1': 18.6, + 'sds011_p2': 11.0, + 'sht3x_humidity': 34.7, + 'sht3x_temperature': 6.3, 'signal': -72.0, 'sps30_caqi': 54, 'sps30_caqi_level': 'medium', - 'sps30_p0': 31.23, - 'sps30_p1': 21.23, - 'sps30_p2': 34.32, - 'sps30_p4': 24.72, + 'sps30_p0': 31.2, + 'sps30_p1': 21.2, + 'sps30_p2': 34.3, + 'sps30_p4': 24.7, 'uptime': 456987, }), 'info': dict({ diff --git a/tests/components/nam/snapshots/test_sensor.ambr b/tests/components/nam/snapshots/test_sensor.ambr index 16129c5d7ce..ea47998f3de 100644 --- a/tests/components/nam/snapshots/test_sensor.ambr +++ b/tests/components/nam/snapshots/test_sensor.ambr @@ -1,4 +1,51 @@ # serializer version: 1 +# name: test_sensor[button.nettigo_air_monitor_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.nettigo_air_monitor_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff-restart', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[button.nettigo_air_monitor_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Nettigo Air Monitor Restart', + }), + 'context': , + 'entity_id': 'button.nettigo_air_monitor_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_humidity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -50,7 +97,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '45.69', + 'state': '45.7', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_pressure-entry] @@ -104,7 +151,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1011.0117', + 'state': '1011.012', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_temperature-entry] @@ -158,7 +205,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '7.56', + 'state': '7.6', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp180_pressure-entry] @@ -212,7 +259,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1032.0118', + 'state': '1032.012', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp180_temperature-entry] @@ -266,7 +313,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '7.56', + 'state': '7.6', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp280_pressure-entry] @@ -320,7 +367,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1022.0118', + 'state': '1022.012', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp280_temperature-entry] @@ -374,7 +421,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '5.56', + 'state': '5.6', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_dht22_humidity-entry] @@ -428,7 +475,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '46.23', + 'state': '46.2', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_dht22_temperature-entry] @@ -482,7 +529,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6.26', + 'state': '6.3', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_ds18b20_temperature-entry] @@ -536,7 +583,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '12.56', + 'state': '12.6', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_heca_humidity-entry] @@ -590,7 +637,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '49.97', + 'state': '50.0', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_heca_temperature-entry] @@ -644,7 +691,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '7.95', + 'state': '8.0', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_last_restart-entry] @@ -1177,7 +1224,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '18.65', + 'state': '18.6', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sds011_pm2_5-entry] @@ -1231,7 +1278,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '11.03', + 'state': '11.0', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sht3x_humidity-entry] @@ -1285,7 +1332,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '34.69', + 'state': '34.7', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sht3x_temperature-entry] @@ -1339,7 +1386,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6.28', + 'state': '6.3', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_signal_strength-entry] @@ -1555,7 +1602,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '31.23', + 'state': '31.2', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sps30_pm10-entry] @@ -1609,7 +1656,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '21.23', + 'state': '21.2', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sps30_pm2_5-entry] @@ -1663,7 +1710,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '34.32', + 'state': '34.3', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sps30_pm4-entry] @@ -1716,6 +1763,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '24.72', + 'state': '24.7', }) # --- diff --git a/tests/components/nam/test_config_flow.py b/tests/components/nam/test_config_flow.py index 6c11399c888..b96eddfd18b 100644 --- a/tests/components/nam/test_config_flow.py +++ b/tests/components/nam/test_config_flow.py @@ -8,7 +8,12 @@ import pytest from homeassistant.components import zeroconf from homeassistant.components.nam.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_USER, + SOURCE_ZEROCONF, +) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -117,9 +122,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: data={"host": "10.10.2.3"}, ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with ( patch( @@ -131,6 +133,15 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: return_value="aa:bb:cc:dd:ee:ff", ), ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=VALID_AUTH, @@ -149,14 +160,20 @@ async def test_reauth_unsuccessful(hass: HomeAssistant) -> None: data={"host": "10.10.2.3"}, ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.nam.NettigoAirMonitor.async_check_credentials", side_effect=ApiError("API Error"), ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=VALID_AUTH, @@ -442,10 +459,17 @@ async def test_reconfigure_successful(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" with ( patch( @@ -485,10 +509,17 @@ async def test_reconfigure_not_successful(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" with patch( "homeassistant.components.nam.NettigoAirMonitor.async_check_credentials", @@ -500,7 +531,7 @@ async def test_reconfigure_not_successful(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" assert result["errors"] == {"base": "cannot_connect"} with ( @@ -541,10 +572,17 @@ async def test_reconfigure_not_the_same_device(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" with ( patch( diff --git a/tests/components/nam/test_init.py b/tests/components/nam/test_init.py index 13bde1432b3..8b8c3a4835a 100644 --- a/tests/components/nam/test_init.py +++ b/tests/components/nam/test_init.py @@ -23,7 +23,7 @@ async def test_async_setup_entry(hass: HomeAssistant) -> None: state = hass.states.get("sensor.nettigo_air_monitor_sds011_pm2_5") assert state is not None assert state.state != STATE_UNAVAILABLE - assert state.state == "11.03" + assert state.state == "11.0" async def test_config_not_ready(hass: HomeAssistant) -> None: diff --git a/tests/components/nam/test_sensor.py b/tests/components/nam/test_sensor.py index 6924af48f01..53945e1c8a2 100644 --- a/tests/components/nam/test_sensor.py +++ b/tests/components/nam/test_sensor.py @@ -77,7 +77,7 @@ async def test_incompleta_data_after_device_restart(hass: HomeAssistant) -> None state = hass.states.get("sensor.nettigo_air_monitor_heca_temperature") assert state - assert state.state == "7.95" + assert state.state == "8.0" assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS @@ -110,7 +110,7 @@ async def test_availability( state = hass.states.get("sensor.nettigo_air_monitor_bme280_temperature") assert state assert state.state != STATE_UNAVAILABLE - assert state.state == "7.56" + assert state.state == "7.6" with ( patch("homeassistant.components.nam.NettigoAirMonitor.initialize"), @@ -142,7 +142,7 @@ async def test_availability( state = hass.states.get("sensor.nettigo_air_monitor_bme280_temperature") assert state assert state.state != STATE_UNAVAILABLE - assert state.state == "7.56" + assert state.state == "7.6" async def test_manual_update_entity(hass: HomeAssistant) -> None: diff --git a/tests/components/nanoleaf/test_config_flow.py b/tests/components/nanoleaf/test_config_flow.py index 97a314b0bf4..eaa1c60dcd4 100644 --- a/tests/components/nanoleaf/test_config_flow.py +++ b/tests/components/nanoleaf/test_config_flow.py @@ -297,7 +297,15 @@ async def test_reauth(hass: HomeAssistant) -> None: return_value=True, ), ): - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "link" diff --git a/tests/components/nasweb/__init__.py b/tests/components/nasweb/__init__.py deleted file mode 100644 index d4906d710d5..00000000000 --- a/tests/components/nasweb/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the NASweb integration.""" diff --git a/tests/components/nasweb/conftest.py b/tests/components/nasweb/conftest.py deleted file mode 100644 index 7757f40ee44..00000000000 --- a/tests/components/nasweb/conftest.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Common fixtures for the NASweb tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.nasweb.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -BASE_CONFIG_FLOW = "homeassistant.components.nasweb.config_flow." -BASE_NASWEB_DATA = "homeassistant.components.nasweb.nasweb_data." -BASE_COORDINATOR = "homeassistant.components.nasweb.coordinator." -TEST_SERIAL_NUMBER = "0011223344556677" - - -@pytest.fixture -def validate_input_all_ok() -> Generator[dict[str, AsyncMock | MagicMock]]: - """Yield dictionary of mocked functions required for successful test_form execution.""" - with ( - patch( - BASE_CONFIG_FLOW + "WebioAPI.check_connection", - return_value=True, - ) as check_connection, - patch( - BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info", - return_value=True, - ) as refresh_device_info, - patch( - BASE_NASWEB_DATA + "NASwebData.get_webhook_url", - return_value="http://127.0.0.1:8123/api/webhook/de705e77291402afa0dd961426e9f19bb53631a9f2a106c52cfd2d2266913c04", - ) as get_webhook_url, - patch( - BASE_CONFIG_FLOW + "WebioAPI.get_serial_number", - return_value=TEST_SERIAL_NUMBER, - ) as get_serial, - patch( - BASE_CONFIG_FLOW + "WebioAPI.status_subscription", - return_value=True, - ) as status_subscription, - patch( - BASE_NASWEB_DATA + "NotificationCoordinator.check_connection", - return_value=True, - ) as check_status_confirmation, - ): - yield { - BASE_CONFIG_FLOW + "WebioAPI.check_connection": check_connection, - BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info": refresh_device_info, - BASE_NASWEB_DATA + "NASwebData.get_webhook_url": get_webhook_url, - BASE_CONFIG_FLOW + "WebioAPI.get_serial_number": get_serial, - BASE_CONFIG_FLOW + "WebioAPI.status_subscription": status_subscription, - BASE_NASWEB_DATA - + "NotificationCoordinator.check_connection": check_status_confirmation, - } diff --git a/tests/components/nasweb/test_config_flow.py b/tests/components/nasweb/test_config_flow.py deleted file mode 100644 index a5f2dca680d..00000000000 --- a/tests/components/nasweb/test_config_flow.py +++ /dev/null @@ -1,208 +0,0 @@ -"""Test the NASweb config flow.""" - -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from webio_api.api_client import AuthError - -from homeassistant import config_entries -from homeassistant.components.nasweb.const import DOMAIN -from homeassistant.config_entries import ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.network import NoURLAvailableError - -from .conftest import ( - BASE_CONFIG_FLOW, - BASE_COORDINATOR, - BASE_NASWEB_DATA, - TEST_SERIAL_NUMBER, -) - -pytestmark = pytest.mark.usefixtures("mock_setup_entry") - - -TEST_USER_INPUT = { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", -} - - -async def _add_test_config_entry(hass: HomeAssistant) -> ConfigFlowResult: - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result.get("type") == FlowResultType.FORM - assert not result.get("errors") - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - await hass.async_block_till_done() - return result2 - - -async def test_form( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test the form.""" - result = await _add_test_config_entry(hass) - - assert result.get("type") == FlowResultType.CREATE_ENTRY - assert result.get("title") == "1.1.1.1" - assert result.get("data") == TEST_USER_INPUT - - config_entry = result.get("result") - assert config_entry is not None - assert config_entry.unique_id == TEST_SERIAL_NUMBER - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_cannot_connect( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test cannot connect error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch(BASE_CONFIG_FLOW + "WebioAPI.check_connection", return_value=False): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "cannot_connect"} - - -async def test_form_invalid_auth( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test invalid auth.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info", - side_effect=AuthError, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "invalid_auth"} - - -async def test_form_missing_internal_url( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test missing internal url.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - BASE_NASWEB_DATA + "NASwebData.get_webhook_url", side_effect=NoURLAvailableError - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "missing_internal_url"} - - -async def test_form_missing_nasweb_data( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test invalid auth.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - BASE_CONFIG_FLOW + "WebioAPI.get_serial_number", - return_value=None, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "missing_nasweb_data"} - with patch(BASE_CONFIG_FLOW + "WebioAPI.status_subscription", return_value=False): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "missing_nasweb_data"} - - -async def test_missing_status( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test missing status update.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - BASE_COORDINATOR + "NotificationCoordinator.check_connection", - return_value=False, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "missing_status"} - - -async def test_form_exception( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test other exceptions.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - "homeassistant.components.nasweb.config_flow.validate_input", - side_effect=Exception, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "unknown"} - - -async def test_form_already_configured( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test already configured device.""" - result = await _add_test_config_entry(hass) - config_entry = result.get("result") - assert config_entry is not None - assert config_entry.unique_id == TEST_SERIAL_NUMBER - - result2_1 = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - result2_2 = await hass.config_entries.flow.async_configure( - result2_1["flow_id"], TEST_USER_INPUT - ) - await hass.async_block_till_done() - - assert result2_2.get("type") == FlowResultType.ABORT - assert result2_2.get("reason") == "already_configured" diff --git a/tests/components/neato/test_config_flow.py b/tests/components/neato/test_config_flow.py index c5289927d91..1b86c4e9980 100644 --- a/tests/components/neato/test_config_flow.py +++ b/tests/components/neato/test_config_flow.py @@ -111,15 +111,16 @@ async def test_reauth( hass, NEATO_DOMAIN, ClientCredential(CLIENT_ID, CLIENT_SECRET) ) - entry = MockConfigEntry( + MockConfigEntry( entry_id="my_entry", domain=NEATO_DOMAIN, data={"username": "abcdef", "password": "123456", "vendor": "neato"}, - ) - entry.add_to_hass(hass) + ).add_to_hass(hass) # Should show form - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + "neato", context={"source": config_entries.SOURCE_REAUTH} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/ness_alarm/test_init.py b/tests/components/ness_alarm/test_init.py index 48821d3e68d..fb003d253de 100644 --- a/tests/components/ness_alarm/test_init.py +++ b/tests/components/ness_alarm/test_init.py @@ -6,7 +6,6 @@ from nessclient import ArmingMode, ArmingState import pytest from homeassistant.components import alarm_control_panel -from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.ness_alarm import ( ATTR_CODE, ATTR_OUTPUT_ID, @@ -25,6 +24,13 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_PENDING, + STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -84,9 +90,7 @@ async def test_dispatch_state_change(hass: HomeAssistant, mock_nessclient) -> No on_state_change(ArmingState.ARMING, None) await hass.async_block_till_done() - assert hass.states.is_state( - "alarm_control_panel.alarm_panel", AlarmControlPanelState.ARMING - ) + assert hass.states.is_state("alarm_control_panel.alarm_panel", STATE_ALARM_ARMING) async def test_alarm_disarm(hass: HomeAssistant, mock_nessclient) -> None: @@ -174,27 +178,15 @@ async def test_arming_state_change(hass: HomeAssistant, mock_nessclient) -> None """Test arming state change handing.""" states = [ (ArmingState.UNKNOWN, None, STATE_UNKNOWN), - (ArmingState.DISARMED, None, AlarmControlPanelState.DISARMED), - (ArmingState.ARMING, None, AlarmControlPanelState.ARMING), - (ArmingState.EXIT_DELAY, None, AlarmControlPanelState.ARMING), - (ArmingState.ARMED, None, AlarmControlPanelState.ARMED_AWAY), - ( - ArmingState.ARMED, - ArmingMode.ARMED_AWAY, - AlarmControlPanelState.ARMED_AWAY, - ), - ( - ArmingState.ARMED, - ArmingMode.ARMED_HOME, - AlarmControlPanelState.ARMED_HOME, - ), - ( - ArmingState.ARMED, - ArmingMode.ARMED_NIGHT, - AlarmControlPanelState.ARMED_NIGHT, - ), - (ArmingState.ENTRY_DELAY, None, AlarmControlPanelState.PENDING), - (ArmingState.TRIGGERED, None, AlarmControlPanelState.TRIGGERED), + (ArmingState.DISARMED, None, STATE_ALARM_DISARMED), + (ArmingState.ARMING, None, STATE_ALARM_ARMING), + (ArmingState.EXIT_DELAY, None, STATE_ALARM_ARMING), + (ArmingState.ARMED, None, STATE_ALARM_ARMED_AWAY), + (ArmingState.ARMED, ArmingMode.ARMED_AWAY, STATE_ALARM_ARMED_AWAY), + (ArmingState.ARMED, ArmingMode.ARMED_HOME, STATE_ALARM_ARMED_HOME), + (ArmingState.ARMED, ArmingMode.ARMED_NIGHT, STATE_ALARM_ARMED_NIGHT), + (ArmingState.ENTRY_DELAY, None, STATE_ALARM_PENDING), + (ArmingState.TRIGGERED, None, STATE_ALARM_TRIGGERED), ] await async_setup_component(hass, DOMAIN, VALID_CONFIG) diff --git a/tests/components/nest/common.py b/tests/components/nest/common.py index f34c40e09f9..bbaa92b7b28 100644 --- a/tests/components/nest/common.py +++ b/tests/components/nest/common.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable import copy from dataclasses import dataclass, field import time @@ -14,6 +14,7 @@ from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.event import EventMessage from google_nest_sdm.event_media import CachePolicy from google_nest_sdm.google_nest_subscriber import GoogleNestSubscriber +from typing_extensions import Generator from homeassistant.components.application_credentials import ClientCredential from homeassistant.components.nest import DOMAIN @@ -30,7 +31,6 @@ CLIENT_ID = "some-client-id" CLIENT_SECRET = "some-client-secret" CLOUD_PROJECT_ID = "cloud-id-9876" SUBSCRIBER_ID = "projects/cloud-id-9876/subscriptions/subscriber-id-9876" -SUBSCRIPTION_NAME = "projects/cloud-id-9876/subscriptions/subscriber-id-9876" @dataclass @@ -87,27 +87,15 @@ TEST_CONFIG_ENTRY_LEGACY = NestTestConfig( }, ) -TEST_CONFIG_NEW_SUBSCRIPTION = NestTestConfig( - config_entry_data={ - "sdm": {}, - "project_id": PROJECT_ID, - "cloud_project_id": CLOUD_PROJECT_ID, - "subscription_name": SUBSCRIPTION_NAME, - "auth_implementation": "imported-cred", - }, - credential=ClientCredential(CLIENT_ID, CLIENT_SECRET), -) - class FakeSubscriber(GoogleNestSubscriber): """Fake subscriber that supplies a FakeDeviceManager.""" stop_calls = 0 - def __init__(self) -> None: # pylint: disable=super-init-not-called + def __init__(self): # pylint: disable=super-init-not-called """Initialize Fake Subscriber.""" self._device_manager = DeviceManager() - self._subscriber_name = "fake-name" def set_update_callback(self, target: Callable[[EventMessage], Awaitable[None]]): """Capture the callback set by Home Assistant.""" diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index b070d025612..de0fc2079fa 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -3,7 +3,6 @@ from __future__ import annotations from asyncio import AbstractEventLoop -from collections.abc import Generator import copy import shutil import time @@ -16,13 +15,13 @@ from google_nest_sdm import diagnostics from google_nest_sdm.auth import AbstractAuth from google_nest_sdm.device_manager import DeviceManager import pytest +from typing_extensions import Generator from homeassistant.components.application_credentials import ( async_import_client_credential, ) from homeassistant.components.nest import DOMAIN from homeassistant.components.nest.const import CONF_SUBSCRIBER_ID, SDM_SCOPES -from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -54,7 +53,7 @@ class FakeAuth(AbstractAuth): from the API. """ - def __init__(self) -> None: + def __init__(self): """Initialize FakeAuth.""" super().__init__(None, None) # Tests can set fake responses here. @@ -110,7 +109,7 @@ async def auth(aiohttp_client: ClientSessionGenerator) -> FakeAuth: @pytest.fixture(autouse=True) -def cleanup_media_storage(hass: HomeAssistant) -> Generator[None]: +def cleanup_media_storage(hass): """Test cleanup, remove any media storage persisted during the test.""" tmp_path = str(uuid.uuid4()) with patch("homeassistant.components.nest.media_source.MEDIA_PATH", new=tmp_path): @@ -288,8 +287,6 @@ async def setup_base_platform( await hass.async_block_till_done() yield _setup_func - if config_entry and config_entry.state == ConfigEntryState.LOADED: - await hass.config_entries.async_unload(config_entry.entry_id) @pytest.fixture diff --git a/tests/components/nest/test_api.py b/tests/components/nest/test_api.py index a13d4d3a337..fd07233fa8c 100644 --- a/tests/components/nest/test_api.py +++ b/tests/components/nest/test_api.py @@ -18,7 +18,7 @@ from homeassistant.components.nest.const import API_URL, OAUTH2_TOKEN, SDM_SCOPE from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from .common import CLIENT_ID, CLIENT_SECRET, PROJECT_ID, FakeSubscriber, PlatformSetup +from .common import CLIENT_ID, CLIENT_SECRET, PROJECT_ID, PlatformSetup from .conftest import FAKE_REFRESH_TOKEN, FAKE_TOKEN from tests.test_util.aiohttp import AiohttpClientMocker @@ -27,7 +27,7 @@ FAKE_UPDATED_TOKEN = "fake-updated-token" @pytest.fixture -def subscriber() -> FakeSubscriber | None: +def subscriber() -> None: """Disable default subscriber since tests use their own patch.""" return None diff --git a/tests/components/nest/test_camera.py b/tests/components/nest/test_camera.py index 029879f1413..1838c18b6d4 100644 --- a/tests/components/nest/test_camera.py +++ b/tests/components/nest/test_camera.py @@ -4,7 +4,6 @@ These tests fake out the subscriber/devicemanager, and are not using a real pubsub subscriber. """ -from collections.abc import Generator import datetime from http import HTTPStatus from unittest.mock import AsyncMock, Mock, patch @@ -13,9 +12,10 @@ import aiohttp from freezegun import freeze_time from google_nest_sdm.event import EventMessage import pytest +from typing_extensions import Generator from homeassistant.components import camera -from homeassistant.components.camera import CameraState, StreamType +from homeassistant.components.camera import STATE_IDLE, STATE_STREAMING, StreamType from homeassistant.components.nest.const import DOMAIN from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.const import ATTR_FRIENDLY_NAME @@ -28,7 +28,7 @@ from .common import DEVICE_ID, CreateDevice, FakeSubscriber, PlatformSetup from .conftest import FakeAuth from tests.common import async_fire_time_changed -from tests.typing import MockHAClientWebSocket, WebSocketGenerator +from tests.typing import WebSocketGenerator PLATFORM = "camera" CAMERA_DEVICE_TYPE = "sdm.devices.types.CAMERA" @@ -165,9 +165,7 @@ async def mock_create_stream(hass: HomeAssistant) -> Generator[AsyncMock]: yield mock_stream -async def async_get_image( - hass: HomeAssistant, width: int | None = None, height: int | None = None -) -> bytes: +async def async_get_image(hass, width=None, height=None): """Get the camera image.""" image = await camera.async_get_image( hass, "camera.my_camera", width=width, height=height @@ -176,31 +174,7 @@ async def async_get_image( return image.content -def get_frontend_stream_type_attribute( - hass: HomeAssistant, entity_id: str -) -> StreamType: - """Get the frontend_stream_type camera attribute.""" - cam = hass.states.get(entity_id) - assert cam is not None - assert cam.state == CameraState.STREAMING - return cam.attributes.get("frontend_stream_type") - - -async def async_frontend_stream_types( - client: MockHAClientWebSocket, entity_id: str -) -> list[str] | None: - """Get the frontend stream types supported.""" - await client.send_json_auto_id( - {"type": "camera/capabilities", "entity_id": entity_id} - ) - msg = await client.receive_json() - assert msg.get("type") == TYPE_RESULT - assert msg.get("success") - assert msg.get("result") - return msg["result"].get("frontend_stream_types") - - -async def fire_alarm(hass: HomeAssistant, point_in_time: datetime.datetime) -> None: +async def fire_alarm(hass, point_in_time): """Fire an alarm and wait for callbacks to run.""" with freeze_time(point_in_time): async_fire_time_changed(hass, point_in_time) @@ -242,7 +216,7 @@ async def test_camera_device( assert len(hass.states.async_all()) == 1 camera = hass.states.get("camera.my_camera") assert camera is not None - assert camera.state == CameraState.STREAMING + assert camera.state == STATE_STREAMING assert camera.attributes.get(ATTR_FRIENDLY_NAME) == "My Camera" entry = entity_registry.async_get("camera.my_camera") @@ -261,21 +235,16 @@ async def test_camera_stream( camera_device: None, auth: FakeAuth, mock_create_stream: Mock, - hass_ws_client: WebSocketGenerator, ) -> None: """Test a basic camera and fetch its live stream.""" auth.responses = [make_stream_url_response()] await setup_platform() assert len(hass.states.async_all()) == 1 - assert ( - get_frontend_stream_type_attribute(hass, "camera.my_camera") == StreamType.HLS - ) - client = await hass_ws_client(hass) - frontend_stream_types = await async_frontend_stream_types( - client, "camera.my_camera" - ) - assert frontend_stream_types == [StreamType.HLS] + cam = hass.states.get("camera.my_camera") + assert cam is not None + assert cam.state == STATE_STREAMING + assert cam.attributes["frontend_stream_type"] == StreamType.HLS stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") assert stream_source == "rtsp://some/url?auth=g.0.streamingToken" @@ -294,16 +263,12 @@ async def test_camera_ws_stream( await setup_platform() assert len(hass.states.async_all()) == 1 - assert ( - get_frontend_stream_type_attribute(hass, "camera.my_camera") == StreamType.HLS - ) + cam = hass.states.get("camera.my_camera") + assert cam is not None + assert cam.state == STATE_STREAMING + assert cam.attributes["frontend_stream_type"] == StreamType.HLS client = await hass_ws_client(hass) - frontend_stream_types = await async_frontend_stream_types( - client, "camera.my_camera" - ) - assert frontend_stream_types == [StreamType.HLS] - await client.send_json( { "id": 2, @@ -333,7 +298,7 @@ async def test_camera_ws_stream_failure( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING client = await hass_ws_client(hass) await client.send_json( @@ -355,7 +320,7 @@ async def test_camera_ws_stream_failure( async def test_camera_stream_missing_trait( hass: HomeAssistant, setup_platform, create_device ) -> None: - """Test that cameras missing a live stream are not supported.""" + """Test fetching a video stream when not supported by the API.""" create_device.create( { "sdm.devices.traits.Info": { @@ -371,7 +336,16 @@ async def test_camera_stream_missing_trait( ) await setup_platform() - assert len(hass.states.async_all()) == 0 + assert len(hass.states.async_all()) == 1 + cam = hass.states.get("camera.my_camera") + assert cam is not None + assert cam.state == STATE_IDLE + + stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") + assert stream_source is None + + # Fallback to placeholder image + await async_get_image(hass) async def test_refresh_expired_stream_token( @@ -399,7 +373,7 @@ async def test_refresh_expired_stream_token( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING # Request a stream for the camera entity to exercise nest cam + camera interaction # and shutdown on url expiration @@ -470,7 +444,7 @@ async def test_stream_response_already_expired( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING # The stream is expired, but we return it anyway stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") @@ -483,50 +457,6 @@ async def test_stream_response_already_expired( assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" -async def test_extending_stream_already_expired( - hass: HomeAssistant, - auth: FakeAuth, - setup_platform: PlatformSetup, - camera_device: None, -) -> None: - """Test a API response when extending the stream returns an expired stream url.""" - now = utcnow() - stream_1_expiration = now + datetime.timedelta(seconds=180) - stream_2_expiration = now + datetime.timedelta(seconds=30) # Will be in the past - stream_3_expiration = now + datetime.timedelta(seconds=600) - auth.responses = [ - make_stream_url_response(stream_1_expiration, token_num=1), - make_stream_url_response(stream_2_expiration, token_num=2), - make_stream_url_response(stream_3_expiration, token_num=3), - ] - await setup_platform() - - assert len(hass.states.async_all()) == 1 - cam = hass.states.get("camera.my_camera") - assert cam is not None - assert cam.state == CameraState.STREAMING - - # The stream is expired, but we return it anyway - stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source == "rtsp://some/url?auth=g.1.streamingToken" - - # Jump to when the stream will be refreshed - await fire_alarm(hass, now + datetime.timedelta(seconds=160)) - stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" - - # The stream will have expired in the past, but 1 minute min refresh interval is applied. - # The stream token is not updated. - await fire_alarm(hass, now + datetime.timedelta(seconds=170)) - stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" - - # Now go past the min update interval and the stream is refreshed - await fire_alarm(hass, now + datetime.timedelta(seconds=225)) - stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source == "rtsp://some/url?auth=g.3.streamingToken" - - async def test_camera_removed( hass: HomeAssistant, auth: FakeAuth, @@ -542,7 +472,7 @@ async def test_camera_removed( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING # Start a stream, exercising cleanup on remove auth.responses = [ @@ -570,7 +500,7 @@ async def test_camera_remove_failure( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING # Start a stream, exercising cleanup on remove auth.responses = [ @@ -611,7 +541,7 @@ async def test_refresh_expired_stream_failure( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING # Request an HLS stream with patch("homeassistant.components.camera.create_stream") as create_stream: @@ -645,11 +575,11 @@ async def test_refresh_expired_stream_failure( assert create_stream.called -@pytest.mark.usefixtures("webrtc_camera_device") async def test_camera_web_rtc( hass: HomeAssistant, auth, hass_ws_client: WebSocketGenerator, + webrtc_camera_device, setup_platform, ) -> None: """Test a basic camera that supports web rtc.""" @@ -670,47 +600,35 @@ async def test_camera_web_rtc( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC client = await hass_ws_client(hass) - await client.send_json_auto_id( + await client.send_json( { - "type": "camera/webrtc/offer", + "id": 5, + "type": "camera/web_rtc_offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "answer", - "answer": "v=0\r\ns=-\r\n", - } + msg = await client.receive_json() + assert msg["id"] == 5 + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"]["answer"] == "v=0\r\ns=-\r\n" # Nest WebRTC cameras return a placeholder await async_get_image(hass) await async_get_image(hass, width=1024, height=768) -@pytest.mark.usefixtures("auth", "camera_device") async def test_camera_web_rtc_unsupported( hass: HomeAssistant, + auth, hass_ws_client: WebSocketGenerator, + camera_device, setup_platform, ) -> None: """Test a basic camera that supports web rtc.""" @@ -719,41 +637,32 @@ async def test_camera_web_rtc_unsupported( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING assert cam.attributes["frontend_stream_type"] == StreamType.HLS client = await hass_ws_client(hass) - await client.send_json_auto_id( - {"type": "camera/capabilities", "entity_id": "camera.my_camera"} - ) - msg = await client.receive_json() - - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"] == {"frontend_stream_types": ["hls"]} - - await client.send_json_auto_id( + await client.send_json( { - "type": "camera/webrtc/offer", + "id": 5, + "type": "camera/web_rtc_offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) msg = await client.receive_json() + assert msg["id"] == 5 assert msg["type"] == TYPE_RESULT assert not msg["success"] - assert msg["error"] == { - "code": "webrtc_offer_failed", - "message": "Camera does not support WebRTC, frontend_stream_type=hls", - } + assert msg["error"]["code"] == "web_rtc_offer_failed" + assert msg["error"]["message"].startswith("Camera does not support WebRTC") -@pytest.mark.usefixtures("webrtc_camera_device") async def test_camera_web_rtc_offer_failure( hass: HomeAssistant, auth, hass_ws_client: WebSocketGenerator, + webrtc_camera_device, setup_platform, ) -> None: """Test a basic camera that supports web rtc.""" @@ -765,50 +674,39 @@ async def test_camera_web_rtc_offer_failure( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING client = await hass_ws_client(hass) - await client.send_json_auto_id( + await client.send_json( { - "type": "camera/webrtc/offer", + "id": 5, + "type": "camera/web_rtc_offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "error", - "code": "webrtc_offer_failed", - "message": "Nest API error: Bad Request response from API (400)", - } + msg = await client.receive_json() + assert msg["id"] == 5 + assert msg["type"] == TYPE_RESULT + assert not msg["success"] + assert msg["error"]["code"] == "web_rtc_offer_failed" + assert msg["error"]["message"].startswith("Nest API error") -@pytest.mark.usefixtures("mock_create_stream") async def test_camera_multiple_streams( hass: HomeAssistant, auth, hass_ws_client: WebSocketGenerator, create_device, setup_platform, + mock_create_stream, ) -> None: """Test a camera supporting multiple stream types.""" expiration = utcnow() + datetime.timedelta(seconds=100) auth.responses = [ + # RTSP response + make_stream_url_response(), # WebRTC response aiohttp.web.json_response( { @@ -841,131 +739,27 @@ async def test_camera_multiple_streams( assert len(hass.states.async_all()) == 1 cam = hass.states.get("camera.my_camera") assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING # Prefer WebRTC over RTSP/HLS assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC - # RTSP stream is not supported + # RTSP stream stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert not stream_source + assert stream_source == "rtsp://some/url?auth=g.0.streamingToken" # WebRTC stream client = await hass_ws_client(hass) - await client.send_json_auto_id( + await client.send_json( { - "type": "camera/webrtc/offer", + "id": 5, + "type": "camera/web_rtc_offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "answer", - "answer": "v=0\r\ns=-\r\n", - } - - -@pytest.mark.usefixtures("webrtc_camera_device") -async def test_webrtc_refresh_expired_stream( - hass: HomeAssistant, - setup_platform: PlatformSetup, - hass_ws_client: WebSocketGenerator, - auth: FakeAuth, -) -> None: - """Test a camera webrtc expiration and refresh.""" - now = utcnow() - - stream_1_expiration = now + datetime.timedelta(seconds=90) - stream_2_expiration = now + datetime.timedelta(seconds=180) - auth.responses = [ - aiohttp.web.json_response( - { - "results": { - "answerSdp": "v=0\r\ns=-\r\n", - "mediaSessionId": "yP2grqz0Y1V_wgiX9KEbMWHoLd...", - "expiresAt": stream_1_expiration.isoformat(timespec="seconds"), - }, - } - ), - aiohttp.web.json_response( - { - "results": { - "mediaSessionId": "yP2grqz0Y1V_wgiX9KEbMWHoLd...", - "expiresAt": stream_2_expiration.isoformat(timespec="seconds"), - }, - } - ), - ] - await setup_platform() - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 1 - cam = hass.states.get("camera.my_camera") - assert cam is not None - assert cam.state == CameraState.STREAMING - assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC - - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.my_camera", - "offer": "a=recvonly", - } - ) - - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "answer", - "answer": "v=0\r\ns=-\r\n", - } - - assert len(auth.captured_requests) == 1 - assert ( - auth.captured_requests[0][2].get("command") - == "sdm.devices.commands.CameraLiveStream.GenerateWebRtcStream" - ) - - # Fire alarm before stream_1_expiration. The stream url is not refreshed - next_update = now + datetime.timedelta(seconds=25) - await fire_alarm(hass, next_update) - assert len(auth.captured_requests) == 1 - - # Alarm is near stream_1_expiration which causes the stream extension - next_update = now + datetime.timedelta(seconds=60) - await fire_alarm(hass, next_update) - - assert len(auth.captured_requests) >= 2 - assert ( - auth.captured_requests[1][2].get("command") - == "sdm.devices.commands.CameraLiveStream.ExtendWebRtcStream" - ) + msg = await client.receive_json() + assert msg["id"] == 5 + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"]["answer"] == "v=0\r\ns=-\r\n" diff --git a/tests/components/nest/test_config_flow.py b/tests/components/nest/test_config_flow.py index 8b05ace6d4d..5c8f01c8e39 100644 --- a/tests/components/nest/test_config_flow.py +++ b/tests/components/nest/test_config_flow.py @@ -6,7 +6,11 @@ from http import HTTPStatus from typing import Any from unittest.mock import patch -from google_nest_sdm.exceptions import AuthException +from google_nest_sdm.exceptions import ( + AuthException, + ConfigurationException, + SubscriberException, +) from google_nest_sdm.structure import Structure import pytest @@ -36,7 +40,7 @@ from tests.typing import ClientSessionGenerator WEB_REDIRECT_URL = "https://example.com/auth/external/callback" APP_REDIRECT_URL = "urn:ietf:wg:oauth:2.0:oob" -RAND_SUBSCRIBER_SUFFIX = "ABCDEF" + FAKE_DHCP_DATA = dhcp.DhcpServiceInfo( ip="127.0.0.2", macaddress="001122334455", hostname="fake_hostname" @@ -49,25 +53,10 @@ def nest_test_config() -> NestTestConfig: return TEST_CONFIGFLOW_APP_CREDS -@pytest.fixture(autouse=True) -def mock_rand_topic_name_fixture() -> None: - """Set the topic name random string to a constant.""" - with patch( - "homeassistant.components.nest.config_flow.get_random_string", - return_value=RAND_SUBSCRIBER_SUFFIX, - ): - yield - - class OAuthFixture: """Simulate the oauth flow used by the config flow.""" - def __init__( - self, - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - ) -> None: + def __init__(self, hass, hass_client_no_auth, aioclient_mock): """Initialize OAuthFixture.""" self.hass = hass self.hass_client = hass_client_no_auth @@ -164,43 +153,6 @@ class OAuthFixture: }, ) - async def async_complete_pubsub_flow( - self, - result: dict, - selected_topic: str, - selected_subscription: str = "create_new_subscription", - user_input: dict | None = None, - ) -> ConfigEntry: - """Fixture to walk through the Pub/Sub topic and subscription steps. - - This picks a simple set of steps that are reusable for most flows without - exercising the corner cases. - """ - - # Validate Pub/Sub topics are shown - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_topic" - assert not result.get("errors") - - # Select Pub/Sub topic the show available subscriptions (none) - result = await self.async_configure( - result, - { - "topic_name": selected_topic, - }, - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_subscription" - assert not result.get("errors") - - # Create the subscription and end the flow - return await self.async_finish_setup( - result, - { - "subscription_name": selected_subscription, - }, - ) - async def async_finish_setup( self, result: dict, user_input: dict | None = None ) -> ConfigEntry: @@ -222,6 +174,15 @@ class OAuthFixture: user_input, ) + async def async_pubsub_flow(self, result: dict, cloud_project_id="") -> None: + """Verify the pubsub creation step.""" + # Render form with a link to get an auth token + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "pubsub" + assert "description_placeholders" in result + assert "url" in result["description_placeholders"] + assert result["data_schema"]({}) == {"cloud_project_id": cloud_project_id} + def get_config_entry(self) -> ConfigEntry: """Get the config entry.""" entries = self.hass.config_entries.async_entries(DOMAIN) @@ -240,115 +201,6 @@ async def oauth( return OAuthFixture(hass, hass_client_no_auth, aioclient_mock) -@pytest.fixture(name="sdm_managed_topic") -def mock_sdm_managed_topic() -> bool: - """Fixture to configure fake server responses for SDM owend Pub/Sub topics.""" - return False - - -@pytest.fixture(name="user_managed_topics") -def mock_user_managed_topics() -> list[str]: - """Fixture to configure fake server response for user owned Pub/Sub topics.""" - return [] - - -@pytest.fixture(name="subscriptions") -def mock_subscriptions() -> list[tuple[str, str]]: - """Fixture to configure fake server response for user subscriptions that exist.""" - return [] - - -@pytest.fixture(name="device_access_project_id") -def mock_device_access_project_id() -> str: - """Fixture to configure the device access console project id used in tests.""" - return PROJECT_ID - - -@pytest.fixture(name="cloud_project_id") -def mock_cloud_project_id() -> str: - """Fixture to configure the cloud console project id used in tests.""" - return CLOUD_PROJECT_ID - - -@pytest.fixture(name="create_subscription_status") -def mock_create_subscription_status() -> str: - """Fixture to configure the return code when creating the subscription.""" - return HTTPStatus.OK - - -@pytest.fixture(name="list_topics_status") -def mock_list_topics_status() -> str: - """Fixture to configure the return code when listing topics.""" - return HTTPStatus.OK - - -@pytest.fixture(name="list_subscriptions_status") -def mock_list_subscriptions_status() -> str: - """Fixture to configure the return code when listing subscriptions.""" - return HTTPStatus.OK - - -@pytest.fixture(autouse=True) -def mock_pubsub_api_responses( - aioclient_mock: AiohttpClientMocker, - sdm_managed_topic: bool, - user_managed_topics: list[str], - subscriptions: list[tuple[str, str]], - device_access_project_id: str, - cloud_project_id: str, - create_subscription_status: HTTPStatus, - list_topics_status: HTTPStatus, - list_subscriptions_status: HTTPStatus, -) -> None: - """Configure a server response for an SDM managed Pub/Sub topic. - - We check for a topic created by the SDM Device Access Console (but note we don't have permission to read it) - or the user has created one themselves in the Google Cloud Project. - """ - aioclient_mock.get( - f"https://pubsub.googleapis.com/v1/projects/sdm-prod/topics/enterprise-{device_access_project_id}", - status=HTTPStatus.FORBIDDEN if sdm_managed_topic else HTTPStatus.NOT_FOUND, - ) - aioclient_mock.get( - f"https://pubsub.googleapis.com/v1/projects/{cloud_project_id}/topics", - json={ - "topics": [ - { - "name": topic_name, - } - for topic_name in user_managed_topics or () - ] - }, - status=list_topics_status, - ) - # We check for a topic created by the SDM Device Access Console (but note we don't have permission to read it) - # or the user has created one themselves in the Google Cloud Project. - aioclient_mock.get( - f"https://pubsub.googleapis.com/v1/projects/{cloud_project_id}/subscriptions", - json={ - "subscriptions": [ - { - "name": subscription_name, - "topic": topic, - "pushConfig": {}, - "ackDeadlineSeconds": 10, - "messageRetentionDuration": "604800s", - "expirationPolicy": {"ttl": "2678400s"}, - "state": "ACTIVE", - } - for (subscription_name, topic) in subscriptions or () - ] - }, - status=list_subscriptions_status, - ) - aioclient_mock.put( - f"https://pubsub.googleapis.com/v1/projects/{cloud_project_id}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}", - json={}, - status=create_subscription_status, - ) - - -@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_app_credentials( hass: HomeAssistant, oauth, subscriber, setup_platform ) -> None: @@ -361,22 +213,20 @@ async def test_app_credentials( await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - result = await oauth.async_configure(result, None) - entry = await oauth.async_complete_pubsub_flow( - result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" - ) + entry = await oauth.async_finish_setup(result) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") + assert "subscriber_id" in data + assert f"projects/{CLOUD_PROJECT_ID}/subscriptions" in data["subscriber_id"] + data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": CLOUD_PROJECT_ID, "project_id": PROJECT_ID, - "subscription_name": f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}", - "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -385,10 +235,6 @@ async def test_app_credentials( } -@pytest.mark.parametrize( - ("sdm_managed_topic", "device_access_project_id", "cloud_project_id"), - [(True, "new-project-id", "new-cloud-project-id")], -) async def test_config_flow_restart( hass: HomeAssistant, oauth, subscriber, setup_platform ) -> None: @@ -421,22 +267,20 @@ async def test_config_flow_restart( await oauth.async_oauth_web_flow(result, "new-project-id") oauth.async_mock_refresh() - result = await oauth.async_configure(result, {"code": "1234"}) - entry = await oauth.async_complete_pubsub_flow( - result, selected_topic="projects/sdm-prod/topics/enterprise-new-project-id" - ) + entry = await oauth.async_finish_setup(result, {"code": "1234"}) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") + assert "subscriber_id" in data + assert "projects/new-cloud-project-id/subscriptions" in data["subscriber_id"] + data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": "new-cloud-project-id", "project_id": "new-project-id", - "subscription_name": "projects/new-cloud-project-id/subscriptions/home-assistant-ABCDEF", - "topic_name": "projects/sdm-prod/topics/enterprise-new-project-id", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -445,7 +289,6 @@ async def test_config_flow_restart( } -@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_flow_wrong_project_id( hass: HomeAssistant, oauth, subscriber, setup_platform ) -> None: @@ -476,22 +319,20 @@ async def test_config_flow_wrong_project_id( await hass.async_block_till_done() oauth.async_mock_refresh() - result = await oauth.async_configure(result, {"code": "1234"}) - entry = await oauth.async_complete_pubsub_flow( - result, selected_topic="projects/sdm-prod/topics/enterprise-some-project-id" - ) + entry = await oauth.async_finish_setup(result, {"code": "1234"}) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") + assert "subscriber_id" in data + assert f"projects/{CLOUD_PROJECT_ID}/subscriptions" in data["subscriber_id"] + data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": CLOUD_PROJECT_ID, "project_id": PROJECT_ID, - "subscription_name": "projects/cloud-id-9876/subscriptions/home-assistant-ABCDEF", - "topic_name": "projects/sdm-prod/topics/enterprise-some-project-id", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -500,9 +341,6 @@ async def test_config_flow_wrong_project_id( } -@pytest.mark.parametrize( - ("sdm_managed_topic", "create_subscription_status"), [(True, HTTPStatus.NOT_FOUND)] -) async def test_config_flow_pubsub_configuration_error( hass: HomeAssistant, oauth, @@ -518,41 +356,14 @@ async def test_config_flow_pubsub_configuration_error( await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() + mock_subscriber.create_subscription.side_effect = ConfigurationException result = await oauth.async_configure(result, {"code": "1234"}) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_topic" - assert result.get("data_schema")({}) == { - "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", - } - - # Select Pub/Sub topic the show available subscriptions (none) - result = await oauth.async_configure( - result, - { - "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", - }, - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_subscription" - assert result.get("data_schema")({}) == { - "subscription_name": "create_new_subscription", - } - - # Failure when creating the subscription - result = await oauth.async_configure( - result, - { - "subscription_name": "create_new_subscription", - }, - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("errors") == {"base": "pubsub_api_error"} + assert result["type"] is FlowResultType.FORM + assert "errors" in result + assert "cloud_project_id" in result["errors"] + assert result["errors"]["cloud_project_id"] == "bad_project_id" -@pytest.mark.parametrize( - ("sdm_managed_topic", "create_subscription_status"), - [(True, HTTPStatus.INTERNAL_SERVER_ERROR)], -) async def test_config_flow_pubsub_subscriber_error( hass: HomeAssistant, oauth, setup_platform, mock_subscriber ) -> None: @@ -564,42 +375,17 @@ async def test_config_flow_pubsub_subscriber_error( ) await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() + + mock_subscriber.create_subscription.side_effect = SubscriberException() result = await oauth.async_configure(result, {"code": "1234"}) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_topic" - assert result.get("data_schema")({}) == { - "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", - } - # Select Pub/Sub topic the show available subscriptions (none) - result = await oauth.async_configure( - result, - { - "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", - }, - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_subscription" - assert result.get("data_schema")({}) == { - "subscription_name": "create_new_subscription", - } - - # Failure when creating the subscription - result = await oauth.async_configure( - result, - { - "subscription_name": "create_new_subscription", - }, - ) - - assert result.get("type") is FlowResultType.FORM - assert result.get("errors") == {"base": "pubsub_api_error"} + assert result["type"] is FlowResultType.FORM + assert "errors" in result + assert "cloud_project_id" in result["errors"] + assert result["errors"]["cloud_project_id"] == "subscriber_error" -@pytest.mark.parametrize( - ("nest_test_config", "sdm_managed_topic", "device_access_project_id"), - [(TEST_CONFIG_APP_CREDS, True, "project-id-2")], -) +@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) async def test_multiple_config_entries( hass: HomeAssistant, oauth, setup_platform ) -> None: @@ -614,10 +400,7 @@ async def test_multiple_config_entries( ) await oauth.async_app_creds_flow(result, project_id="project-id-2") oauth.async_mock_refresh() - result = await oauth.async_configure(result, user_input={}) - entry = await oauth.async_complete_pubsub_flow( - result, selected_topic="projects/sdm-prod/topics/enterprise-project-id-2" - ) + entry = await oauth.async_finish_setup(result) assert entry.title == "Mock Title" assert "token" in entry.data @@ -625,9 +408,7 @@ async def test_multiple_config_entries( assert len(entries) == 2 -@pytest.mark.parametrize( - ("nest_test_config", "sdm_managed_topic"), [(TEST_CONFIG_APP_CREDS, True)] -) +@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) async def test_duplicate_config_entries( hass: HomeAssistant, oauth, setup_platform ) -> None: @@ -652,9 +433,7 @@ async def test_duplicate_config_entries( assert result.get("reason") == "already_configured" -@pytest.mark.parametrize( - ("nest_test_config", "sdm_managed_topic"), [(TEST_CONFIG_APP_CREDS, True)] -) +@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) async def test_reauth_multiple_config_entries( hass: HomeAssistant, oauth, setup_platform, config_entry ) -> None: @@ -705,7 +484,6 @@ async def test_reauth_multiple_config_entries( assert entry.data.get("extra_data") -@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_pubsub_subscription_strip_whitespace( hass: HomeAssistant, oauth, subscriber, setup_platform ) -> None: @@ -719,10 +497,8 @@ async def test_pubsub_subscription_strip_whitespace( result, cloud_project_id=" " + CLOUD_PROJECT_ID + " " ) oauth.async_mock_refresh() - result = await oauth.async_configure(result, {"code": "1234"}) - entry = await oauth.async_complete_pubsub_flow( - result, selected_topic="projects/sdm-prod/topics/enterprise-some-project-id" - ) + entry = await oauth.async_finish_setup(result, {"code": "1234"}) + assert entry.title == "Import from configuration.yaml" assert "token" in entry.data entry.data["token"].pop("expires_at") @@ -733,14 +509,10 @@ async def test_pubsub_subscription_strip_whitespace( "type": "Bearer", "expires_in": 60, } - assert "subscription_name" in entry.data + assert "subscriber_id" in entry.data assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID -@pytest.mark.parametrize( - ("sdm_managed_topic", "create_subscription_status"), - [(True, HTTPStatus.UNAUTHORIZED)], -) async def test_pubsub_subscription_auth_failure( hass: HomeAssistant, oauth, setup_platform, mock_subscriber ) -> None: @@ -751,43 +523,17 @@ async def test_pubsub_subscription_auth_failure( DOMAIN, context={"source": config_entries.SOURCE_USER} ) + mock_subscriber.create_subscription.side_effect = AuthException() + await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() result = await oauth.async_configure(result, {"code": "1234"}) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_topic" - assert result.get("data_schema")({}) == { - "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", - } - # Select Pub/Sub topic the show available subscriptions (none) - result = await oauth.async_configure( - result, - { - "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", - }, - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_subscription" - assert result.get("data_schema")({}) == { - "subscription_name": "create_new_subscription", - } - - # Failure when creating the subscription - result = await oauth.async_configure( - result, - { - "subscription_name": "create_new_subscription", - }, - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_subscription" - assert result.get("errors") == {"base": "pubsub_api_error"} + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "invalid_access_token" -@pytest.mark.parametrize( - ("nest_test_config", "sdm_managed_topic"), [(TEST_CONFIG_APP_CREDS, True)] -) +@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) async def test_pubsub_subscriber_config_entry_reauth( hass: HomeAssistant, oauth, @@ -817,7 +563,6 @@ async def test_pubsub_subscriber_config_entry_reauth( assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID -@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_entry_title_from_home( hass: HomeAssistant, oauth, setup_platform, subscriber ) -> None: @@ -845,24 +590,13 @@ async def test_config_entry_title_from_home( await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - result = await oauth.async_configure(result, {"code": "1234"}) - entry = await oauth.async_complete_pubsub_flow( - result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" - ) + entry = await oauth.async_finish_setup(result, {"code": "1234"}) assert entry.title == "Example Home" assert "token" in entry.data - assert entry.data.get("cloud_project_id") == CLOUD_PROJECT_ID - assert ( - entry.data.get("subscription_name") - == f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}" - ) - assert ( - entry.data.get("topic_name") - == f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" - ) + assert "subscriber_id" in entry.data + assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID -@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_entry_title_multiple_homes( hass: HomeAssistant, oauth, setup_platform, subscriber ) -> None: @@ -902,14 +636,10 @@ async def test_config_entry_title_multiple_homes( await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - result = await oauth.async_configure(result, {"code": "1234"}) - entry = await oauth.async_complete_pubsub_flow( - result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" - ) + entry = await oauth.async_finish_setup(result, {"code": "1234"}) assert entry.title == "Example Home #1, Example Home #2" -@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_title_failure_fallback( hass: HomeAssistant, oauth, setup_platform, mock_subscriber ) -> None: @@ -923,26 +653,13 @@ async def test_title_failure_fallback( oauth.async_mock_refresh() mock_subscriber.async_get_device_manager.side_effect = AuthException() - - result = await oauth.async_configure(result, {"code": "1234"}) - entry = await oauth.async_complete_pubsub_flow( - result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" - ) - + entry = await oauth.async_finish_setup(result, {"code": "1234"}) assert entry.title == "Import from configuration.yaml" assert "token" in entry.data - assert entry.data.get("cloud_project_id") == CLOUD_PROJECT_ID - assert ( - entry.data.get("subscription_name") - == f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}" - ) - assert ( - entry.data.get("topic_name") - == f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" - ) + assert "subscriber_id" in entry.data + assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID -@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_structure_missing_trait( hass: HomeAssistant, oauth, setup_platform, subscriber ) -> None: @@ -967,10 +684,7 @@ async def test_structure_missing_trait( await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - result = await oauth.async_configure(result, {"code": "1234"}) - entry = await oauth.async_complete_pubsub_flow( - result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" - ) + entry = await oauth.async_finish_setup(result, {"code": "1234"}) # Fallback to default name assert entry.title == "Import from configuration.yaml" @@ -994,7 +708,6 @@ async def test_dhcp_discovery( assert result.get("reason") == "missing_credentials" -@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_dhcp_discovery_with_creds( hass: HomeAssistant, oauth, subscriber, setup_platform ) -> None: @@ -1017,23 +730,21 @@ async def test_dhcp_discovery_with_creds( result = await oauth.async_configure(result, {"project_id": PROJECT_ID}) await oauth.async_oauth_web_flow(result) oauth.async_mock_refresh() - - result = await oauth.async_configure(result, {"code": "1234"}) - entry = await oauth.async_complete_pubsub_flow( - result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" - ) + entry = await oauth.async_finish_setup(result, {"code": "1234"}) + await hass.async_block_till_done() data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") + assert "subscriber_id" in data + assert f"projects/{CLOUD_PROJECT_ID}/subscriptions" in data["subscriber_id"] + data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": CLOUD_PROJECT_ID, "project_id": PROJECT_ID, - "subscription_name": f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}", - "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -1073,133 +784,3 @@ async def test_token_error( result = await oauth.async_configure(result, user_input=None) assert result.get("type") is FlowResultType.ABORT assert result.get("reason") == error_reason - - -@pytest.mark.parametrize( - ("user_managed_topics", "subscriptions"), - [ - ( - [f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id"], - [ - ( - f"projects/{CLOUD_PROJECT_ID}/subscriptions/some-subscription-id", - f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id", - ) - ], - ) - ], -) -async def test_existing_topic_and_subscription( - hass: HomeAssistant, oauth, subscriber, setup_platform -) -> None: - """Test selecting existing user managed topic and subscription.""" - await setup_platform() - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - await oauth.async_app_creds_flow(result) - oauth.async_mock_refresh() - - result = await oauth.async_configure(result, None) - entry = await oauth.async_complete_pubsub_flow( - result, - selected_topic=f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id", - selected_subscription=f"projects/{CLOUD_PROJECT_ID}/subscriptions/some-subscription-id", - ) - - data = dict(entry.data) - assert "token" in data - data["token"].pop("expires_in") - data["token"].pop("expires_at") - assert data == { - "sdm": {}, - "auth_implementation": "imported-cred", - "cloud_project_id": CLOUD_PROJECT_ID, - "project_id": PROJECT_ID, - "subscription_name": f"projects/{CLOUD_PROJECT_ID}/subscriptions/some-subscription-id", - "subscriber_id_imported": True, - "topic_name": f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id", - "token": { - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", - "type": "Bearer", - }, - } - - -async def test_no_eligible_topics( - hass: HomeAssistant, oauth, subscriber, setup_platform -) -> None: - """Test the case where there are no eligible pub/sub topics.""" - await setup_platform() - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - await oauth.async_app_creds_flow(result) - oauth.async_mock_refresh() - - result = await oauth.async_configure(result, None) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub" - assert result.get("errors") == {"base": "no_pubsub_topics"} - - -@pytest.mark.parametrize( - ("list_topics_status"), - [ - (HTTPStatus.INTERNAL_SERVER_ERROR), - ], -) -async def test_list_topics_failure( - hass: HomeAssistant, oauth, subscriber, setup_platform -) -> None: - """Test selecting existing user managed topic and subscription.""" - await setup_platform() - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - await oauth.async_app_creds_flow(result) - oauth.async_mock_refresh() - - result = await oauth.async_configure(result, None) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub" - assert result.get("errors") == {"base": "pubsub_api_error"} - - -@pytest.mark.parametrize( - ("sdm_managed_topic", "list_subscriptions_status"), - [ - (True, HTTPStatus.INTERNAL_SERVER_ERROR), - ], -) -async def test_list_subscriptions_failure( - hass: HomeAssistant, oauth, subscriber, setup_platform -) -> None: - """Test selecting existing user managed topic and subscription.""" - await setup_platform() - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - await oauth.async_app_creds_flow(result) - oauth.async_mock_refresh() - - result = await oauth.async_configure(result, None) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_topic" - assert not result.get("errors") - - # Select Pub/Sub topic the show available subscriptions (none) - result = await oauth.async_configure( - result, - { - "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", - }, - ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "pubsub_subscription" - assert result.get("errors") == {"base": "pubsub_api_error"} diff --git a/tests/components/nest/test_device_trigger.py b/tests/components/nest/test_device_trigger.py index cf0e1c5ecce..1820096d2a6 100644 --- a/tests/components/nest/test_device_trigger.py +++ b/tests/components/nest/test_device_trigger.py @@ -20,7 +20,7 @@ from homeassistant.util.dt import utcnow from .common import DEVICE_ID, CreateDevice, FakeSubscriber, PlatformSetup -from tests.common import async_get_device_automations +from tests.common import async_get_device_automations, async_mock_service DEVICE_NAME = "My Camera" DATA_MESSAGE = {"message": "service-called"} @@ -59,9 +59,7 @@ def make_camera( } -async def setup_automation( - hass: HomeAssistant, device_id: str, trigger_type: str -) -> bool: +async def setup_automation(hass, device_id, trigger_type): """Set up an automation trigger for testing triggering.""" return await async_setup_component( hass, @@ -85,6 +83,12 @@ async def setup_automation( ) +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -244,7 +248,7 @@ async def test_fires_on_camera_motion( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test camera_motion triggers firing.""" create_device.create( @@ -269,8 +273,8 @@ async def test_fires_on_camera_motion( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data == DATA_MESSAGE + assert len(calls) == 1 + assert calls[0].data == DATA_MESSAGE async def test_fires_on_camera_person( @@ -278,7 +282,7 @@ async def test_fires_on_camera_person( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test camera_person triggers firing.""" create_device.create( @@ -303,8 +307,8 @@ async def test_fires_on_camera_person( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data == DATA_MESSAGE + assert len(calls) == 1 + assert calls[0].data == DATA_MESSAGE async def test_fires_on_camera_sound( @@ -312,7 +316,7 @@ async def test_fires_on_camera_sound( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test camera_sound triggers firing.""" create_device.create( @@ -337,8 +341,8 @@ async def test_fires_on_camera_sound( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data == DATA_MESSAGE + assert len(calls) == 1 + assert calls[0].data == DATA_MESSAGE async def test_fires_on_doorbell_chime( @@ -346,7 +350,7 @@ async def test_fires_on_doorbell_chime( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test doorbell_chime triggers firing.""" create_device.create( @@ -371,8 +375,8 @@ async def test_fires_on_doorbell_chime( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data == DATA_MESSAGE + assert len(calls) == 1 + assert calls[0].data == DATA_MESSAGE async def test_trigger_for_wrong_device_id( @@ -380,7 +384,7 @@ async def test_trigger_for_wrong_device_id( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test messages for the wrong device are ignored.""" create_device.create( @@ -405,7 +409,7 @@ async def test_trigger_for_wrong_device_id( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_trigger_for_wrong_event_type( @@ -413,7 +417,7 @@ async def test_trigger_for_wrong_event_type( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test that messages for the wrong event type are ignored.""" create_device.create( @@ -438,13 +442,13 @@ async def test_trigger_for_wrong_event_type( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_subscriber_automation( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], create_device: CreateDevice, setup_platform: PlatformSetup, subscriber: FakeSubscriber, @@ -484,5 +488,5 @@ async def test_subscriber_automation( await subscriber.async_receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data == DATA_MESSAGE + assert len(calls) == 1 + assert calls[0].data == DATA_MESSAGE diff --git a/tests/components/nest/test_event.py b/tests/components/nest/test_event.py deleted file mode 100644 index f45e6c1c6e6..00000000000 --- a/tests/components/nest/test_event.py +++ /dev/null @@ -1,325 +0,0 @@ -"""Test for Nest event platform.""" - -import datetime -from typing import Any -from unittest.mock import patch - -from freezegun.api import FrozenDateTimeFactory -from google_nest_sdm.event import EventMessage, EventType -from google_nest_sdm.traits import TraitType -import pytest - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.util.dt import utcnow - -from .common import DEVICE_ID, CreateDevice, FakeSubscriber -from .conftest import PlatformSetup - -EVENT_SESSION_ID = "CjY5Y3VKaTZwR3o4Y19YbTVfMF..." -EVENT_ID = "FWWVQVUdGNUlTU2V4MGV2aTNXV..." -ENCODED_EVENT_ID = "WyJDalk1WTNWS2FUWndSM280WTE5WWJUVmZNRi4uLiIsICJGV1dWUVZVZEdOVWxUVTJWNE1HVjJhVE5YVi4uLiJd" - -EVENT_SESSION_ID2 = "DjY5Y3VKaTZwR3o4Y19YbTVfMF..." -EVENT_ID2 = "GWWVQVUdGNUlTU2V4MGV2aTNXV..." -ENCODED_EVENT_ID2 = "WyJEalk1WTNWS2FUWndSM280WTE5WWJUVmZNRi4uLiIsICJHV1dWUVZVZEdOVWxUVTJWNE1HVjJhVE5YVi4uLiJd" - - -@pytest.fixture -def platforms() -> list[Platform]: - """Fixture for platforms to setup.""" - return [Platform.EVENT] - - -@pytest.fixture(autouse=True) -def enable_prefetch(subscriber: FakeSubscriber) -> None: - """Fixture to enable media fetching for tests to exercise.""" - subscriber.cache_policy.fetch = True - with patch("homeassistant.components.nest.EVENT_MEDIA_CACHE_SIZE", new=5): - yield - - -@pytest.fixture -def device_type() -> str: - """Fixture for the type of device under test.""" - return "sdm.devices.types.DOORBELL" - - -@pytest.fixture -async def device_traits() -> dict[str, Any]: - """Fixture to set default device traits used when creating devices.""" - return { - "sdm.devices.traits.Info": { - "customName": "Front", - }, - "sdm.devices.traits.CameraLiveStream": { - "maxVideoResolution": { - "width": 640, - "height": 480, - }, - "videoCodecs": ["H264"], - "audioCodecs": ["AAC"], - }, - } - - -def create_events(events: str) -> EventMessage: - """Create an EventMessage for events.""" - return create_event_messages( - { - event: { - "eventSessionId": EVENT_SESSION_ID, - "eventId": EVENT_ID, - } - for event in events - } - ) - - -def create_event_messages( - events: dict[str, Any], parameters: dict[str, Any] | None = None -) -> EventMessage: - """Create an EventMessage for events.""" - return EventMessage.create_event( - { - "eventId": "some-event-id", - "timestamp": utcnow().isoformat(timespec="seconds"), - "resourceUpdate": { - "name": DEVICE_ID, - "events": events, - }, - **(parameters if parameters else {}), - }, - auth=None, - ) - - -@pytest.mark.freeze_time("2024-08-24T12:00:00Z") -@pytest.mark.parametrize( - ( - "trait_types", - "entity_id", - "expected_attributes", - "api_event_type", - "expected_event_type", - ), - [ - ( - [TraitType.DOORBELL_CHIME, TraitType.CAMERA_MOTION], - "event.front_chime", - { - "device_class": "doorbell", - "event_types": ["doorbell_chime"], - "friendly_name": "Front Chime", - }, - EventType.DOORBELL_CHIME, - "doorbell_chime", - ), - ( - [TraitType.CAMERA_MOTION, TraitType.CAMERA_PERSON, TraitType.CAMERA_SOUND], - "event.front_motion", - { - "device_class": "motion", - "event_types": ["camera_motion", "camera_person", "camera_sound"], - "friendly_name": "Front Motion", - }, - EventType.CAMERA_MOTION, - "camera_motion", - ), - ( - [TraitType.CAMERA_MOTION, TraitType.CAMERA_PERSON, TraitType.CAMERA_SOUND], - "event.front_motion", - { - "device_class": "motion", - "event_types": ["camera_motion", "camera_person", "camera_sound"], - "friendly_name": "Front Motion", - }, - EventType.CAMERA_PERSON, - "camera_person", - ), - ( - [TraitType.CAMERA_MOTION, TraitType.CAMERA_PERSON, TraitType.CAMERA_SOUND], - "event.front_motion", - { - "device_class": "motion", - "event_types": ["camera_motion", "camera_person", "camera_sound"], - "friendly_name": "Front Motion", - }, - EventType.CAMERA_SOUND, - "camera_sound", - ), - ], -) -async def test_receive_events( - hass: HomeAssistant, - subscriber: FakeSubscriber, - setup_platform: PlatformSetup, - create_device: CreateDevice, - trait_types: list[TraitType], - entity_id: str, - expected_attributes: dict[str, str], - api_event_type: EventType, - expected_event_type: str, -) -> None: - """Test a pubsub message for a camera person event.""" - create_device.create( - raw_traits={ - **{trait_type: {} for trait_type in trait_types}, - api_event_type: {}, - } - ) - await setup_platform() - - state = hass.states.get(entity_id) - assert state.state == "unknown" - assert state.attributes == { - **expected_attributes, - "event_type": None, - } - - await subscriber.async_receive_event(create_events([api_event_type])) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state == "2024-08-24T12:00:00.000+00:00" - assert state.attributes == { - **expected_attributes, - "event_type": expected_event_type, - "nest_event_id": ENCODED_EVENT_ID, - } - - -@pytest.mark.parametrize(("trait_type"), [(TraitType.DOORBELL_CHIME)]) -async def test_ignore_unrelated_event( - hass: HomeAssistant, - subscriber: FakeSubscriber, - setup_platform: PlatformSetup, - create_device: CreateDevice, - trait_type: TraitType, -) -> None: - """Test a pubsub message for a camera person event.""" - create_device.create( - raw_traits={ - trait_type: {}, - } - ) - await setup_platform() - - # Device does not have traits matching this event type - await subscriber.async_receive_event(create_events([EventType.CAMERA_MOTION])) - await hass.async_block_till_done() - - state = hass.states.get("event.front_chime") - assert state.state == "unknown" - assert state.attributes == { - "device_class": "doorbell", - "event_type": None, - "event_types": ["doorbell_chime"], - "friendly_name": "Front Chime", - } - - -@pytest.mark.freeze_time("2024-08-24T12:00:00Z") -async def test_event_threads( - hass: HomeAssistant, - subscriber: FakeSubscriber, - setup_platform: PlatformSetup, - create_device: CreateDevice, - freezer: FrozenDateTimeFactory, -) -> None: - """Test multiple events delivered as part of a thread are a single home assistant event.""" - create_device.create( - raw_traits={ - TraitType.DOORBELL_CHIME: {}, - TraitType.CAMERA_CLIP_PREVIEW: {}, - } - ) - await setup_platform() - - state = hass.states.get("event.front_chime") - assert state.state == "unknown" - - # Doorbell event is received - freezer.tick(datetime.timedelta(seconds=2)) - await subscriber.async_receive_event( - create_event_messages( - { - EventType.DOORBELL_CHIME: { - "eventSessionId": EVENT_SESSION_ID, - "eventId": EVENT_ID, - } - }, - parameters={"eventThreadState": "STARTED"}, - ) - ) - await hass.async_block_till_done() - - state = hass.states.get("event.front_chime") - assert state.state == "2024-08-24T12:00:02.000+00:00" - assert state.attributes == { - "device_class": "doorbell", - "event_types": ["doorbell_chime"], - "friendly_name": "Front Chime", - "event_type": "doorbell_chime", - "nest_event_id": ENCODED_EVENT_ID, - } - - # Media arrives in a second message that ends the thread - freezer.tick(datetime.timedelta(seconds=2)) - await subscriber.async_receive_event( - create_event_messages( - { - EventType.DOORBELL_CHIME: { - "eventSessionId": EVENT_SESSION_ID, - "eventId": EVENT_ID, - }, - EventType.CAMERA_CLIP_PREVIEW: { - "eventSessionId": EVENT_SESSION_ID, - "previewUrl": "http://example", - }, - }, - parameters={"eventThreadState": "ENDED"}, - ) - ) - await hass.async_block_till_done() - - state = hass.states.get("event.front_chime") - assert ( - state.state == "2024-08-24T12:00:02.000+00:00" - ) # A second event is not received - assert state.attributes == { - "device_class": "doorbell", - "event_types": ["doorbell_chime"], - "friendly_name": "Front Chime", - "event_type": "doorbell_chime", - "nest_event_id": ENCODED_EVENT_ID, - } - - # An additional doorbell press event happens (with an updated session id) - freezer.tick(datetime.timedelta(seconds=2)) - await subscriber.async_receive_event( - create_event_messages( - { - EventType.DOORBELL_CHIME: { - "eventSessionId": EVENT_SESSION_ID2, - "eventId": EVENT_ID2, - }, - EventType.CAMERA_CLIP_PREVIEW: { - "eventSessionId": EVENT_SESSION_ID2, - "previewUrl": "http://example", - }, - }, - parameters={"eventThreadState": "ENDED"}, - ) - ) - await hass.async_block_till_done() - - state = hass.states.get("event.front_chime") - assert state.state == "2024-08-24T12:00:06.000+00:00" # Third event is received - assert state.attributes == { - "device_class": "doorbell", - "event_types": ["doorbell_chime"], - "friendly_name": "Front Chime", - "event_type": "doorbell_chime", - "nest_event_id": ENCODED_EVENT_ID2, - } diff --git a/tests/components/nest/test_events.py b/tests/components/nest/test_events.py index e746e5f263f..08cf9f775b7 100644 --- a/tests/components/nest/test_events.py +++ b/tests/components/nest/test_events.py @@ -122,28 +122,28 @@ def create_events(events, device_id=DEVICE_ID, timestamp=None): [ ( "sdm.devices.types.DOORBELL", - ["sdm.devices.traits.DoorbellChime", "sdm.devices.traits.CameraEventImage"], + ["sdm.devices.traits.DoorbellChime"], "sdm.devices.events.DoorbellChime.Chime", "Doorbell", "doorbell_chime", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraMotion", "sdm.devices.traits.CameraEventImage"], + ["sdm.devices.traits.CameraMotion"], "sdm.devices.events.CameraMotion.Motion", "Camera", "camera_motion", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraPerson", "sdm.devices.traits.CameraEventImage"], + ["sdm.devices.traits.CameraPerson"], "sdm.devices.events.CameraPerson.Person", "Camera", "camera_person", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraSound", "sdm.devices.traits.CameraEventImage"], + ["sdm.devices.traits.CameraSound"], "sdm.devices.events.CameraSound.Sound", "Camera", "camera_sound", @@ -186,8 +186,6 @@ async def test_event( "type": expected_type, "timestamp": event_time, } - assert "image" in events[0].data["attachment"] - assert "video" not in events[0].data["attachment"] @pytest.mark.parametrize( @@ -234,41 +232,6 @@ async def test_camera_multiple_event( } -@pytest.mark.parametrize( - "device_traits", - [(["sdm.devices.traits.CameraMotion"])], -) -async def test_media_not_supported( - hass: HomeAssistant, entity_registry: er.EntityRegistry, subscriber, setup_platform -) -> None: - """Test a pubsub message for a camera person event.""" - events = async_capture_events(hass, NEST_EVENT) - await setup_platform() - entry = entity_registry.async_get("camera.front") - assert entry is not None - - event_map = { - "sdm.devices.events.CameraMotion.Motion": { - "eventSessionId": EVENT_SESSION_ID, - "eventId": EVENT_ID, - }, - } - - timestamp = utcnow() - await subscriber.async_receive_event(create_events(event_map, timestamp=timestamp)) - await hass.async_block_till_done() - - event_time = timestamp.replace(microsecond=0) - assert len(events) == 1 - assert event_view(events[0].data) == { - "device_id": entry.device_id, - "type": "camera_motion", - "timestamp": event_time, - } - # Media fetching not supported by this device - assert "attachment" not in events[0].data - - async def test_unknown_event(hass: HomeAssistant, subscriber, setup_platform) -> None: """Test a pubsub message for an unknown event type.""" events = async_capture_events(hass, NEST_EVENT) @@ -381,8 +344,6 @@ async def test_doorbell_event_thread( "type": "camera_motion", "timestamp": timestamp1.replace(microsecond=0), } - assert "image" in events[0].data["attachment"] - assert "video" in events[0].data["attachment"] @pytest.mark.parametrize( diff --git a/tests/components/nest/test_init.py b/tests/components/nest/test_init.py index a17803a6cde..f9813ca63ee 100644 --- a/tests/components/nest/test_init.py +++ b/tests/components/nest/test_init.py @@ -8,7 +8,6 @@ mode (e.g. yaml, ConfigEntry, etc) however some tests override and just run in relevant modes. """ -from collections.abc import Generator import logging from typing import Any from unittest.mock import patch @@ -20,6 +19,7 @@ from google_nest_sdm.exceptions import ( SubscriberException, ) import pytest +from typing_extensions import Generator from homeassistant.components.nest import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -31,7 +31,6 @@ from .common import ( SUBSCRIBER_ID, TEST_CONFIG_ENTRY_LEGACY, TEST_CONFIG_LEGACY, - TEST_CONFIG_NEW_SUBSCRIPTION, TEST_CONFIGFLOW_APP_CREDS, FakeSubscriber, PlatformSetup, @@ -68,15 +67,13 @@ def warning_caplog( @pytest.fixture -def subscriber_side_effect() -> Any | None: +def subscriber_side_effect() -> None: """Fixture to inject failures into FakeSubscriber start.""" return None @pytest.fixture -def failing_subscriber( - subscriber_side_effect: Any | None, -) -> YieldFixture[FakeSubscriber]: +def failing_subscriber(subscriber_side_effect: Any) -> YieldFixture[FakeSubscriber]: """Fixture overriding default subscriber behavior to allow failure injection.""" subscriber = FakeSubscriber() with patch( @@ -98,19 +95,6 @@ async def test_setup_success( assert entries[0].state is ConfigEntryState.LOADED -@pytest.mark.parametrize("nest_test_config", [(TEST_CONFIG_NEW_SUBSCRIPTION)]) -async def test_setup_success_new_subscription_format( - hass: HomeAssistant, error_caplog: pytest.LogCaptureFixture, setup_platform -) -> None: - """Test successful setup.""" - await setup_platform() - assert not error_caplog.records - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - assert entries[0].state is ConfigEntryState.LOADED - - @pytest.mark.parametrize("subscriber_id", [("invalid-subscriber-format")]) async def test_setup_configuration_failure( hass: HomeAssistant, @@ -185,6 +169,19 @@ async def test_subscriber_auth_failure( assert flows[0]["step_id"] == "reauth_confirm" +@pytest.mark.parametrize("subscriber_id", [(None)]) +async def test_setup_missing_subscriber_id( + hass: HomeAssistant, warning_caplog: pytest.LogCaptureFixture, setup_base_platform +) -> None: + """Test missing subscriber id from configuration.""" + await setup_base_platform() + assert "Configuration option" in warning_caplog.text + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].state is ConfigEntryState.SETUP_ERROR + + @pytest.mark.parametrize("subscriber_side_effect", [(ConfigurationException())]) async def test_subscriber_configuration_failure( hass: HomeAssistant, diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index 2526bfdf975..f4fb8bdb623 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -4,7 +4,6 @@ These tests simulate recent camera events received by the subscriber exposed as media in the media source. """ -from collections.abc import Generator import datetime from http import HTTPStatus import io @@ -16,8 +15,9 @@ import av from google_nest_sdm.event import EventMessage import numpy as np import pytest +from typing_extensions import Generator -from homeassistant.components.media_player import BrowseError +from homeassistant.components.media_player.errors import BrowseError from homeassistant.components.media_source import ( URI_SCHEME, Unresolvable, @@ -48,9 +48,6 @@ CAMERA_TRAITS = { "customName": DEVICE_NAME, }, "sdm.devices.traits.CameraImage": {}, - "sdm.devices.traits.CameraLiveStream": { - "supportedProtocols": ["RTSP"], - }, "sdm.devices.traits.CameraEventImage": {}, "sdm.devices.traits.CameraPerson": {}, "sdm.devices.traits.CameraMotion": {}, @@ -60,9 +57,7 @@ BATTERY_CAMERA_TRAITS = { "customName": DEVICE_NAME, }, "sdm.devices.traits.CameraClipPreview": {}, - "sdm.devices.traits.CameraLiveStream": { - "supportedProtocols": ["WEB_RTC"], - }, + "sdm.devices.traits.CameraLiveStream": {}, "sdm.devices.traits.CameraPerson": {}, "sdm.devices.traits.CameraMotion": {}, } @@ -79,6 +74,7 @@ GENERATE_IMAGE_URL_RESPONSE = { } IMAGE_BYTES_FROM_EVENT = b"test url image bytes" IMAGE_AUTHORIZATION_HEADERS = {"Authorization": "Basic g.0.eventToken"} +NEST_EVENT = "nest_event" def frame_image_data(frame_i, total_frames): @@ -1465,111 +1461,3 @@ async def test_camera_image_resize( assert browse.title == "Front: Recent Events" assert not browse.thumbnail assert len(browse.children) == 1 - - -async def test_event_media_attachment( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - device_registry: dr.DeviceRegistry, - subscriber, - auth, - setup_platform, -) -> None: - """Verify that an event media attachment is successfully resolved.""" - await setup_platform() - - assert len(hass.states.async_all()) == 1 - camera = hass.states.get("camera.front") - assert camera is not None - - device = device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_ID)}) - assert device - assert device.name == DEVICE_NAME - - # Capture any events published - received_events = async_capture_events(hass, NEST_EVENT) - - # Set up fake media, and publish image events - auth.responses = [ - aiohttp.web.json_response(GENERATE_IMAGE_URL_RESPONSE), - aiohttp.web.Response(body=IMAGE_BYTES_FROM_EVENT), - ] - event_timestamp = dt_util.now() - await subscriber.async_receive_event( - create_event( - EVENT_SESSION_ID, - EVENT_ID, - PERSON_EVENT, - timestamp=event_timestamp, - ) - ) - await hass.async_block_till_done() - - assert len(received_events) == 1 - received_event = received_events[0] - attachment = received_event.data.get("attachment") - assert attachment - assert list(attachment.keys()) == ["image"] - assert attachment["image"].startswith("/api/nest/event_media") - assert attachment["image"].endswith("/thumbnail") - - # Download the attachment content and verify it works - client = await hass_client() - response = await client.get(attachment["image"]) - assert response.status == HTTPStatus.OK, f"Response not matched: {response}" - await response.read() - - -@pytest.mark.parametrize("device_traits", [BATTERY_CAMERA_TRAITS]) -async def test_event_clip_media_attachment( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - device_registry: dr.DeviceRegistry, - subscriber, - auth, - setup_platform, - mp4, -) -> None: - """Verify that an event media attachment is successfully resolved.""" - await setup_platform() - - assert len(hass.states.async_all()) == 1 - camera = hass.states.get("camera.front") - assert camera is not None - - device = device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_ID)}) - assert device - assert device.name == DEVICE_NAME - - # Capture any events published - received_events = async_capture_events(hass, NEST_EVENT) - - # Set up fake media, and publish clip events - auth.responses = [ - aiohttp.web.Response(body=mp4.getvalue()), - ] - event_timestamp = dt_util.now() - await subscriber.async_receive_event( - create_event_message( - create_battery_event_data(MOTION_EVENT), - timestamp=event_timestamp, - ) - ) - await hass.async_block_till_done() - - assert len(received_events) == 1 - received_event = received_events[0] - attachment = received_event.data.get("attachment") - assert attachment - assert list(attachment.keys()) == ["image", "video"] - assert attachment["image"].startswith("/api/nest/event_media") - assert attachment["image"].endswith("/thumbnail") - assert attachment["video"].startswith("/api/nest/event_media") - assert not attachment["video"].endswith("/thumbnail") - - # Download the attachment content and verify it works - for content_path in attachment.values(): - client = await hass_client() - response = await client.get(content_path) - assert response.status == HTTPStatus.OK, f"Response not matched: {response}" - await response.read() diff --git a/tests/components/netatmo/common.py b/tests/components/netatmo/common.py index 730cb0cb117..08c8679acf3 100644 --- a/tests/components/netatmo/common.py +++ b/tests/components/netatmo/common.py @@ -1,10 +1,9 @@ """Common methods used across tests for Netatmo.""" -from collections.abc import Iterator from contextlib import contextmanager import json from typing import Any -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from syrupy import SnapshotAssertion @@ -87,7 +86,7 @@ async def fake_post_request(*args: Any, **kwargs: Any): ) -async def fake_get_image(*args: Any, **kwargs: Any) -> bytes | str | None: +async def fake_get_image(*args: Any, **kwargs: Any) -> bytes | str: """Return fake data.""" if "endpoint" not in kwargs: return "{}" @@ -96,7 +95,6 @@ async def fake_get_image(*args: Any, **kwargs: Any) -> bytes | str | None: if endpoint in "snapshot_720.jpg": return b"test stream image bytes" - return None async def simulate_webhook(hass: HomeAssistant, webhook_id: str, response) -> None: @@ -111,7 +109,7 @@ async def simulate_webhook(hass: HomeAssistant, webhook_id: str, response) -> No @contextmanager -def selected_platforms(platforms: list[Platform]) -> Iterator[None]: +def selected_platforms(platforms: list[Platform]) -> AsyncMock: """Restrict loaded platforms to list given.""" with ( patch("homeassistant.components.netatmo.data_handler.PLATFORMS", platforms), diff --git a/tests/components/netatmo/conftest.py b/tests/components/netatmo/conftest.py index b79e6480711..d2e6c1fdc88 100644 --- a/tests/components/netatmo/conftest.py +++ b/tests/components/netatmo/conftest.py @@ -69,15 +69,6 @@ def mock_config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry: "area_name": "Home max", "mode": "max", }, - "Home min": { - "lat_ne": 32.2345678, - "lon_ne": -117.1234567, - "lat_sw": 32.1234567, - "lon_sw": -117.2345678, - "show_on_map": True, - "area_name": "Home min", - "mode": "min", - }, } }, ) diff --git a/tests/components/netatmo/snapshots/test_climate.ambr b/tests/components/netatmo/snapshots/test_climate.ambr index aeae1fd71c7..b9a92882b9e 100644 --- a/tests/components/netatmo/snapshots/test_climate.ambr +++ b/tests/components/netatmo/snapshots/test_climate.ambr @@ -14,8 +14,8 @@ 'preset_modes': list([ 'away', 'boost', - 'frost_guard', - 'schedule', + 'Frost Guard', + 'Schedule', ]), 'target_temp_step': 0.5, }), @@ -41,7 +41,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': 'thermostat', + 'translation_key': None, 'unique_id': '222452125-DeviceType.OTM', 'unit_of_measurement': None, }) @@ -60,8 +60,8 @@ 'preset_modes': list([ 'away', 'boost', - 'frost_guard', - 'schedule', + 'Frost Guard', + 'Schedule', ]), 'supported_features': , 'target_temp_step': 0.5, @@ -89,8 +89,8 @@ 'preset_modes': list([ 'away', 'boost', - 'frost_guard', - 'schedule', + 'Frost Guard', + 'Schedule', ]), 'target_temp_step': 0.5, }), @@ -116,7 +116,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': 'thermostat', + 'translation_key': None, 'unique_id': '2940411577-DeviceType.NRV', 'unit_of_measurement': None, }) @@ -135,12 +135,12 @@ ]), 'max_temp': 30, 'min_temp': 7, - 'preset_mode': 'frost_guard', + 'preset_mode': 'Frost Guard', 'preset_modes': list([ 'away', 'boost', - 'frost_guard', - 'schedule', + 'Frost Guard', + 'Schedule', ]), 'selected_schedule': 'Default', 'supported_features': , @@ -170,8 +170,8 @@ 'preset_modes': list([ 'away', 'boost', - 'frost_guard', - 'schedule', + 'Frost Guard', + 'Schedule', ]), 'target_temp_step': 0.5, }), @@ -197,7 +197,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': 'thermostat', + 'translation_key': None, 'unique_id': '1002003001-DeviceType.BNS', 'unit_of_measurement': None, }) @@ -215,12 +215,12 @@ ]), 'max_temp': 30, 'min_temp': 7, - 'preset_mode': 'schedule', + 'preset_mode': 'Schedule', 'preset_modes': list([ 'away', 'boost', - 'frost_guard', - 'schedule', + 'Frost Guard', + 'Schedule', ]), 'selected_schedule': 'Default', 'supported_features': , @@ -250,8 +250,8 @@ 'preset_modes': list([ 'away', 'boost', - 'frost_guard', - 'schedule', + 'Frost Guard', + 'Schedule', ]), 'target_temp_step': 0.5, }), @@ -277,7 +277,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': 'thermostat', + 'translation_key': None, 'unique_id': '2833524037-DeviceType.NRV', 'unit_of_measurement': None, }) @@ -296,12 +296,12 @@ ]), 'max_temp': 30, 'min_temp': 7, - 'preset_mode': 'frost_guard', + 'preset_mode': 'Frost Guard', 'preset_modes': list([ 'away', 'boost', - 'frost_guard', - 'schedule', + 'Frost Guard', + 'Schedule', ]), 'selected_schedule': 'Default', 'supported_features': , @@ -332,8 +332,8 @@ 'preset_modes': list([ 'away', 'boost', - 'frost_guard', - 'schedule', + 'Frost Guard', + 'Schedule', ]), 'target_temp_step': 0.5, }), @@ -359,7 +359,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': 'thermostat', + 'translation_key': None, 'unique_id': '2746182631-DeviceType.NATherm1', 'unit_of_measurement': None, }) @@ -382,8 +382,8 @@ 'preset_modes': list([ 'away', 'boost', - 'frost_guard', - 'schedule', + 'Frost Guard', + 'Schedule', ]), 'selected_schedule': 'Default', 'supported_features': , diff --git a/tests/components/netatmo/snapshots/test_diagnostics.ambr b/tests/components/netatmo/snapshots/test_diagnostics.ambr index 463556ec657..8ce00279b83 100644 --- a/tests/components/netatmo/snapshots/test_diagnostics.ambr +++ b/tests/components/netatmo/snapshots/test_diagnostics.ambr @@ -608,8 +608,6 @@ 'webhook_id': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'netatmo', 'minor_version': 1, 'options': dict({ @@ -632,15 +630,6 @@ 'mode': 'max', 'show_on_map': True, }), - 'Home min': dict({ - 'area_name': 'Home min', - 'lat_ne': '**REDACTED**', - 'lat_sw': '**REDACTED**', - 'lon_ne': '**REDACTED**', - 'lon_sw': '**REDACTED**', - 'mode': 'min', - 'show_on_map': True, - }), }), }), 'pref_disable_new_entities': False, diff --git a/tests/components/netatmo/snapshots/test_init.ambr b/tests/components/netatmo/snapshots/test_init.ambr index 60cb22d74f2..38a54f507a0 100644 --- a/tests/components/netatmo/snapshots/test_init.ambr +++ b/tests/components/netatmo/snapshots/test_init.ambr @@ -21,7 +21,6 @@ }), 'manufacturer': 'Bubbendorf', 'model': 'Roller Shutter', - 'model_id': None, 'name': 'Entrance Blinds', 'name_by_user': None, 'primary_config_entry': , @@ -53,7 +52,6 @@ }), 'manufacturer': 'Bubbendorf', 'model': 'Orientable Shutter', - 'model_id': None, 'name': 'Bubendorff blind', 'name_by_user': None, 'primary_config_entry': , @@ -85,7 +83,6 @@ }), 'manufacturer': 'Legrand', 'model': '2 wire light switch/dimmer', - 'model_id': None, 'name': 'Unknown 00:11:22:33:00:11:45:fe', 'name_by_user': None, 'primary_config_entry': , @@ -117,7 +114,6 @@ }), 'manufacturer': 'Smarther', 'model': 'Smarther with Netatmo', - 'model_id': None, 'name': 'Corridor', 'name_by_user': None, 'primary_config_entry': , @@ -149,7 +145,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Energy Meter', - 'model_id': None, 'name': 'Consumption meter', 'name_by_user': None, 'primary_config_entry': , @@ -181,7 +176,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Light switch/dimmer with neutral', - 'model_id': None, 'name': 'Bathroom light', 'name_by_user': None, 'primary_config_entry': , @@ -213,7 +207,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', - 'model_id': None, 'name': 'Line 1', 'name_by_user': None, 'primary_config_entry': , @@ -245,7 +238,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', - 'model_id': None, 'name': 'Line 2', 'name_by_user': None, 'primary_config_entry': , @@ -277,7 +269,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', - 'model_id': None, 'name': 'Line 3', 'name_by_user': None, 'primary_config_entry': , @@ -309,7 +300,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', - 'model_id': None, 'name': 'Line 4', 'name_by_user': None, 'primary_config_entry': , @@ -341,7 +331,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', - 'model_id': None, 'name': 'Line 5', 'name_by_user': None, 'primary_config_entry': , @@ -373,7 +362,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', - 'model_id': None, 'name': 'Total', 'name_by_user': None, 'primary_config_entry': , @@ -405,7 +393,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', - 'model_id': None, 'name': 'Gas', 'name_by_user': None, 'primary_config_entry': , @@ -437,7 +424,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', - 'model_id': None, 'name': 'Hot water', 'name_by_user': None, 'primary_config_entry': , @@ -469,7 +455,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', - 'model_id': None, 'name': 'Cold water', 'name_by_user': None, 'primary_config_entry': , @@ -501,7 +486,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', - 'model_id': None, 'name': 'Écocompteur', 'name_by_user': None, 'primary_config_entry': , @@ -533,7 +517,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Camera', - 'model_id': None, 'name': 'Hall', 'name_by_user': None, 'primary_config_entry': , @@ -565,7 +548,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Anemometer', - 'model_id': None, 'name': 'Villa Garden', 'name_by_user': None, 'primary_config_entry': , @@ -597,7 +579,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Outdoor Camera', - 'model_id': None, 'name': 'Front', 'name_by_user': None, 'primary_config_entry': , @@ -629,7 +610,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Video Doorbell', - 'model_id': None, 'name': 'Netatmo-Doorbell', 'name_by_user': None, 'primary_config_entry': , @@ -661,7 +641,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', - 'model_id': None, 'name': 'Kitchen', 'name_by_user': None, 'primary_config_entry': , @@ -693,7 +672,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', - 'model_id': None, 'name': 'Livingroom', 'name_by_user': None, 'primary_config_entry': , @@ -725,7 +703,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', - 'model_id': None, 'name': 'Baby Bedroom', 'name_by_user': None, 'primary_config_entry': , @@ -757,7 +734,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', - 'model_id': None, 'name': 'Bedroom', 'name_by_user': None, 'primary_config_entry': , @@ -789,7 +765,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', - 'model_id': None, 'name': 'Parents Bedroom', 'name_by_user': None, 'primary_config_entry': , @@ -821,7 +796,6 @@ }), 'manufacturer': 'Legrand', 'model': 'Plug', - 'model_id': None, 'name': 'Prise', 'name_by_user': None, 'primary_config_entry': , @@ -853,7 +827,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Outdoor Module', - 'model_id': None, 'name': 'Villa Outdoor', 'name_by_user': None, 'primary_config_entry': , @@ -885,7 +858,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Module', - 'model_id': None, 'name': 'Villa Bedroom', 'name_by_user': None, 'primary_config_entry': , @@ -917,7 +889,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Module', - 'model_id': None, 'name': 'Villa Bathroom', 'name_by_user': None, 'primary_config_entry': , @@ -949,7 +920,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Home Weather station', - 'model_id': None, 'name': 'Villa', 'name_by_user': None, 'primary_config_entry': , @@ -981,7 +951,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Rain Gauge', - 'model_id': None, 'name': 'Villa Rain', 'name_by_user': None, 'primary_config_entry': , @@ -1013,7 +982,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'OpenTherm Modulating Thermostat', - 'model_id': None, 'name': 'Bureau Modulate', 'name_by_user': None, 'primary_config_entry': , @@ -1045,7 +1013,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Thermostat', - 'model_id': None, 'name': 'Livingroom', 'name_by_user': None, 'primary_config_entry': , @@ -1077,7 +1044,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Valve', - 'model_id': None, 'name': 'Valve1', 'name_by_user': None, 'primary_config_entry': , @@ -1109,7 +1075,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Valve', - 'model_id': None, 'name': 'Valve2', 'name_by_user': None, 'primary_config_entry': , @@ -1141,7 +1106,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Climate', - 'model_id': None, 'name': 'MYHOME', 'name_by_user': None, 'primary_config_entry': , @@ -1173,7 +1137,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Public Weather station', - 'model_id': None, 'name': 'Home avg', 'name_by_user': None, 'primary_config_entry': , @@ -1205,7 +1168,6 @@ }), 'manufacturer': 'Netatmo', 'model': 'Public Weather station', - 'model_id': None, 'name': 'Home max', 'name_by_user': None, 'primary_config_entry': , @@ -1215,35 +1177,3 @@ 'via_device_id': None, }) # --- -# name: test_devices[netatmo-Home min] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'https://weathermap.netatmo.com/', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'netatmo', - 'Home min', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Netatmo', - 'model': 'Public Weather station', - 'model_id': None, - 'name': 'Home min', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/netatmo/snapshots/test_sensor.ambr b/tests/components/netatmo/snapshots/test_sensor.ambr index ba18c2ca21a..6ab1e4b1e1a 100644 --- a/tests/components/netatmo/snapshots/test_sensor.ambr +++ b/tests/components/netatmo/snapshots/test_sensor.ambr @@ -1159,7 +1159,59 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'True', + 'state': 'unavailable', + }) +# --- +# name: test_entity[sensor.cold_water_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cold_water_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12:34:56:00:16:0e#8-12:34:56:00:16:0e#8-power', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.cold_water_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'power', + 'friendly_name': 'Cold water Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.cold_water_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', }) # --- # name: test_entity[sensor.consumption_meter_none-entry] @@ -1360,6 +1412,58 @@ 'state': 'unavailable', }) # --- +# name: test_entity[sensor.ecocompteur_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ecocompteur_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12:34:56:00:16:0e-12:34:56:00:16:0e-power', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.ecocompteur_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'power', + 'friendly_name': 'Écocompteur Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ecocompteur_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_entity[sensor.gas_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1404,7 +1508,59 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'True', + 'state': 'unavailable', + }) +# --- +# name: test_entity[sensor.gas_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gas_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12:34:56:00:16:0e#6-12:34:56:00:16:0e#6-power', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.gas_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'power', + 'friendly_name': 'Gas Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gas_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', }) # --- # name: test_entity[sensor.home_avg_atmospheric_pressure-entry] @@ -2507,556 +2663,6 @@ 'state': '15', }) # --- -# name: test_entity[sensor.home_min_atmospheric_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_atmospheric_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Atmospheric pressure', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-min-pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.home_min_atmospheric_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'atmospheric_pressure', - 'friendly_name': 'Home min Atmospheric pressure', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.home_min_atmospheric_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1005.4', - }) -# --- -# name: test_entity[sensor.home_min_gust_angle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_gust_angle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Gust angle', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'gust_angle', - 'unique_id': 'Home-min-gustangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_min_gust_angle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home min Gust angle', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_min_gust_angle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '217', - }) -# --- -# name: test_entity[sensor.home_min_gust_strength-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_gust_strength', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Gust strength', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'gust_strength', - 'unique_id': 'Home-min-guststrength', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.home_min_gust_strength-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'wind_speed', - 'friendly_name': 'Home min Gust strength', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.home_min_gust_strength', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '31', - }) -# --- -# name: test_entity[sensor.home_min_humidity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_humidity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Humidity', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-min-humidity', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entity[sensor.home_min_humidity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'humidity', - 'friendly_name': 'Home min Humidity', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.home_min_humidity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '56', - }) -# --- -# name: test_entity[sensor.home_min_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-min-windangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_min_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home min None', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_min_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17', - }) -# --- -# name: test_entity[sensor.home_min_precipitation-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_precipitation', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Precipitation', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-min-rain', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.home_min_precipitation-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'precipitation', - 'friendly_name': 'Home min Precipitation', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.home_min_precipitation', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_entity[sensor.home_min_precipitation_last_hour-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_precipitation_last_hour', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Precipitation last hour', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'sum_rain_1', - 'unique_id': 'Home-min-sum_rain_1', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.home_min_precipitation_last_hour-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'precipitation', - 'friendly_name': 'Home min Precipitation last hour', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.home_min_precipitation_last_hour', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_entity[sensor.home_min_precipitation_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_precipitation_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Precipitation today', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'sum_rain_24', - 'unique_id': 'Home-min-sum_rain_24', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.home_min_precipitation_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'precipitation', - 'friendly_name': 'Home min Precipitation today', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.home_min_precipitation_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '9.999', - }) -# --- -# name: test_entity[sensor.home_min_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-min-temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.home_min_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'temperature', - 'friendly_name': 'Home min Temperature', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.home_min_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '19.8', - }) -# --- -# name: test_entity[sensor.home_min_wind_speed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_wind_speed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Wind speed', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-min-windstrength', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.home_min_wind_speed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'wind_speed', - 'friendly_name': 'Home min Wind speed', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.home_min_wind_speed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- # name: test_entity[sensor.hot_water_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3101,7 +2707,59 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'True', + 'state': 'unavailable', + }) +# --- +# name: test_entity[sensor.hot_water_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hot_water_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12:34:56:00:16:0e#7-12:34:56:00:16:0e#7-power', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.hot_water_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'power', + 'friendly_name': 'Hot water Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hot_water_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', }) # --- # name: test_entity[sensor.kitchen_atmospheric_pressure-entry] @@ -3688,7 +3346,59 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'True', + 'state': 'unavailable', + }) +# --- +# name: test_entity[sensor.line_1_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.line_1_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12:34:56:00:16:0e#0-12:34:56:00:16:0e#0-power', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.line_1_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'power', + 'friendly_name': 'Line 1 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.line_1_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', }) # --- # name: test_entity[sensor.line_2_none-entry] @@ -3735,7 +3445,59 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'True', + 'state': 'unavailable', + }) +# --- +# name: test_entity[sensor.line_2_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.line_2_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12:34:56:00:16:0e#1-12:34:56:00:16:0e#1-power', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.line_2_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'power', + 'friendly_name': 'Line 2 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.line_2_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', }) # --- # name: test_entity[sensor.line_3_none-entry] @@ -3782,7 +3544,59 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'True', + 'state': 'unavailable', + }) +# --- +# name: test_entity[sensor.line_3_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.line_3_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12:34:56:00:16:0e#2-12:34:56:00:16:0e#2-power', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.line_3_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'power', + 'friendly_name': 'Line 3 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.line_3_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', }) # --- # name: test_entity[sensor.line_4_none-entry] @@ -3829,7 +3643,59 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'True', + 'state': 'unavailable', + }) +# --- +# name: test_entity[sensor.line_4_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.line_4_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12:34:56:00:16:0e#3-12:34:56:00:16:0e#3-power', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.line_4_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'power', + 'friendly_name': 'Line 4 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.line_4_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', }) # --- # name: test_entity[sensor.line_5_none-entry] @@ -3876,7 +3742,59 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'True', + 'state': 'unavailable', + }) +# --- +# name: test_entity[sensor.line_5_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.line_5_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12:34:56:00:16:0e#4-12:34:56:00:16:0e#4-power', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.line_5_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'power', + 'friendly_name': 'Line 5 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.line_5_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', }) # --- # name: test_entity[sensor.livingroom_atmospheric_pressure-entry] @@ -5154,7 +5072,59 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'True', + 'state': 'unavailable', + }) +# --- +# name: test_entity[sensor.total_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.total_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12:34:56:00:16:0e#5-12:34:56:00:16:0e#5-power', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.total_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'power', + 'friendly_name': 'Total Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.total_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', }) # --- # name: test_entity[sensor.valve1_battery-entry] diff --git a/tests/components/netatmo/test_camera.py b/tests/components/netatmo/test_camera.py index 43904ed8f71..c7398d64e1d 100644 --- a/tests/components/netatmo/test_camera.py +++ b/tests/components/netatmo/test_camera.py @@ -9,7 +9,7 @@ import pytest from syrupy import SnapshotAssertion from homeassistant.components import camera -from homeassistant.components.camera import CameraState +from homeassistant.components.camera import STATE_STREAMING from homeassistant.components.netatmo.const import ( NETATMO_EVENT, SERVICE_SET_CAMERA_LIGHT, @@ -176,7 +176,7 @@ async def test_camera_image_local( cam = hass.states.get(camera_entity_indoor) assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING assert cam.name == "Hall" stream_source = await camera.async_get_stream_source(hass, camera_entity_indoor) @@ -204,7 +204,7 @@ async def test_camera_image_vpn( cam = hass.states.get(camera_entity_indoor) assert cam is not None - assert cam.state == CameraState.STREAMING + assert cam.state == STATE_STREAMING stream_source = await camera.async_get_stream_source(hass, camera_entity_indoor) assert stream_source == stream_uri diff --git a/tests/components/netatmo/test_climate.py b/tests/components/netatmo/test_climate.py index dc0312f7acd..4b908580346 100644 --- a/tests/components/netatmo/test_climate.py +++ b/tests/components/netatmo/test_climate.py @@ -282,7 +282,7 @@ async def test_service_preset_mode_frost_guard_thermostat( assert hass.states.get(climate_entity_livingroom).state == "auto" assert ( hass.states.get(climate_entity_livingroom).attributes["preset_mode"] - == "frost_guard" + == "Frost Guard" ) # Test service setting the preset mode to "frost guard" @@ -779,7 +779,7 @@ async def test_service_preset_mode_already_boost_valves( assert hass.states.get(climate_entity_entrada).state == "auto" assert ( hass.states.get(climate_entity_entrada).attributes["preset_mode"] - == "frost_guard" + == "Frost Guard" ) assert hass.states.get(climate_entity_entrada).attributes["temperature"] == 7 diff --git a/tests/components/netatmo/test_config_flow.py b/tests/components/netatmo/test_config_flow.py index 436f75b12ec..29a065c3be3 100644 --- a/tests/components/netatmo/test_config_flow.py +++ b/tests/components/netatmo/test_config_flow.py @@ -23,7 +23,7 @@ from homeassistant.helpers import config_entry_oauth2_flow from .conftest import CLIENT_ID -from tests.common import MockConfigEntry, start_reauth_flow +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -282,7 +282,9 @@ async def test_reauth( assert len(mock_setup.mock_calls) == 1 # Should show form - result = await start_reauth_flow(hass, new_entry) + result = await hass.config_entries.flow.async_init( + "netatmo", context={"source": config_entries.SOURCE_REAUTH} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/netatmo/test_device_trigger.py b/tests/components/netatmo/test_device_trigger.py index 99709572024..ad1e9bd8cb9 100644 --- a/tests/components/netatmo/test_device_trigger.py +++ b/tests/components/netatmo/test_device_trigger.py @@ -22,9 +22,16 @@ from tests.common import ( MockConfigEntry, async_capture_events, async_get_device_automations, + async_mock_service, ) +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.mark.parametrize( ("platform", "device_type", "event_types"), [ @@ -106,7 +113,7 @@ async def test_get_triggers( ) async def test_if_fires_on_event( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, platform, @@ -168,8 +175,8 @@ async def test_if_fires_on_event( ) await hass.async_block_till_done() assert len(events) == 1 - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == f"{event_type} - device - {device.id}" + assert len(calls) == 1 + assert calls[0].data["some"] == f"{event_type} - device - {device.id}" @pytest.mark.parametrize( @@ -189,7 +196,7 @@ async def test_if_fires_on_event( ) async def test_if_fires_on_event_legacy( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, platform, @@ -251,8 +258,8 @@ async def test_if_fires_on_event_legacy( ) await hass.async_block_till_done() assert len(events) == 1 - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == f"{event_type} - device - {device.id}" + assert len(calls) == 1 + assert calls[0].data["some"] == f"{event_type} - device - {device.id}" @pytest.mark.parametrize( @@ -268,7 +275,7 @@ async def test_if_fires_on_event_legacy( ) async def test_if_fires_on_event_with_subtype( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, platform, @@ -336,11 +343,8 @@ async def test_if_fires_on_event_with_subtype( ) await hass.async_block_till_done() assert len(events) == 1 - assert len(service_calls) == 1 - assert ( - service_calls[0].data["some"] - == f"{event_type} - {sub_type} - device - {device.id}" - ) + assert len(calls) == 1 + assert calls[0].data["some"] == f"{event_type} - {sub_type} - device - {device.id}" @pytest.mark.parametrize( diff --git a/tests/components/netatmo/test_diagnostics.py b/tests/components/netatmo/test_diagnostics.py index 7a0bf11c652..48f021295e1 100644 --- a/tests/components/netatmo/test_diagnostics.py +++ b/tests/components/netatmo/test_diagnostics.py @@ -42,11 +42,4 @@ async def test_entry_diagnostics( assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry - ) == snapshot( - exclude=paths( - "info.data.token.expires_at", - "info.entry_id", - "info.created_at", - "info.modified_at", - ) - ) + ) == snapshot(exclude=paths("info.data.token.expires_at", "info.entry_id")) diff --git a/tests/components/netatmo/test_sensor.py b/tests/components/netatmo/test_sensor.py index 2c47cdefa60..3c16e6e60f9 100644 --- a/tests/components/netatmo/test_sensor.py +++ b/tests/components/netatmo/test_sensor.py @@ -81,12 +81,6 @@ async def test_public_weather_sensor( assert hass.states.get(f"{prefix}humidity").state == "76" assert hass.states.get(f"{prefix}atmospheric_pressure").state == "1014.4" - prefix = "sensor.home_min_" - - assert hass.states.get(f"{prefix}temperature").state == "19.8" - assert hass.states.get(f"{prefix}humidity").state == "56" - assert hass.states.get(f"{prefix}atmospheric_pressure").state == "1005.4" - prefix = "sensor.home_avg_" assert hass.states.get(f"{prefix}temperature").state == "22.7" diff --git a/tests/components/netgear_lte/snapshots/test_init.ambr b/tests/components/netgear_lte/snapshots/test_init.ambr index ca65c17cc8e..e893d36a06e 100644 --- a/tests/components/netgear_lte/snapshots/test_init.ambr +++ b/tests/components/netgear_lte/snapshots/test_init.ambr @@ -21,7 +21,6 @@ }), 'manufacturer': 'Netgear', 'model': 'LM1200', - 'model_id': None, 'name': 'Netgear LM1200', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/network/conftest.py b/tests/components/network/conftest.py index d5fbb95a814..36d9c449d27 100644 --- a/tests/components/network/conftest.py +++ b/tests/components/network/conftest.py @@ -1,9 +1,9 @@ """Tests for the Network Configuration integration.""" -from collections.abc import Generator from unittest.mock import _patch import pytest +from typing_extensions import Generator @pytest.fixture(autouse=True) diff --git a/tests/components/network/test_init.py b/tests/components/network/test_init.py index dca31106dba..57a12868d0a 100644 --- a/tests/components/network/test_init.py +++ b/tests/components/network/test_init.py @@ -886,42 +886,3 @@ async def test_async_get_announce_addresses_no_source_ip(hass: HomeAssistant) -> "172.16.1.5", "fe80::dead:beef:dead:beef", ] - - -async def test_websocket_network_url( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test the network/url websocket command.""" - assert await async_setup_component(hass, "network", {}) - - client = await hass_ws_client(hass) - - with ( - patch( - "homeassistant.helpers.network._get_internal_url", return_value="internal" - ), - patch("homeassistant.helpers.network._get_cloud_url", return_value="cloud"), - ): - await client.send_json({"id": 1, "type": "network/url"}) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == { - "internal": "internal", - "external": "cloud", - "cloud": "cloud", - } - - # Test with no cloud URL - with ( - patch( - "homeassistant.helpers.network._get_internal_url", return_value="internal" - ), - ): - await client.send_json({"id": 2, "type": "network/url"}) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == { - "internal": "internal", - "external": None, - "cloud": None, - } diff --git a/tests/components/nexia/test_init.py b/tests/components/nexia/test_init.py index 4e5c5118d6b..5984a0af721 100644 --- a/tests/components/nexia/test_init.py +++ b/tests/components/nexia/test_init.py @@ -1,19 +1,15 @@ """The init tests for the nexia platform.""" -from unittest.mock import patch - import aiohttp from homeassistant.components.nexia.const import DOMAIN from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from .util import async_init_integration -from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator @@ -52,20 +48,3 @@ async def test_device_remove_devices( ) response = await client.remove_device(dead_device_entry.id, entry_id) assert response["success"] - - -async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None: - """Test migrating a 1.1 config entry to 1.2.""" - with patch("homeassistant.components.nexia.async_setup_entry", return_value=True): - entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}, - version=1, - minor_version=1, - unique_id=123456, - ) - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) - assert entry.version == 1 - assert entry.minor_version == 2 - assert entry.unique_id == "123456" diff --git a/tests/components/nexia/util.py b/tests/components/nexia/util.py index 1104ffad63d..98d5312f0a1 100644 --- a/tests/components/nexia/util.py +++ b/tests/components/nexia/util.py @@ -54,10 +54,7 @@ async def async_init_integration( text=load_fixture(set_fan_speed_fixture), ) entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}, - minor_version=2, - unique_id="123456", + domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"} ) entry.add_to_hass(hass) diff --git a/tests/components/nextbus/__init__.py b/tests/components/nextbus/__init__.py index e0af11965c4..609e0bb574b 100644 --- a/tests/components/nextbus/__init__.py +++ b/tests/components/nextbus/__init__.py @@ -1,34 +1 @@ """The tests for the nexbus component.""" - -from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_STOP -from homeassistant.core import HomeAssistant - -from .const import VALID_AGENCY_TITLE, VALID_ROUTE_TITLE, VALID_STOP_TITLE - -from tests.common import MockConfigEntry - - -async def assert_setup_sensor( - hass: HomeAssistant, - config: dict[str, dict[str, str]], - expected_state=ConfigEntryState.LOADED, - route_title: str = VALID_ROUTE_TITLE, -) -> MockConfigEntry: - """Set up the sensor and assert it's been created.""" - unique_id = f"{config[DOMAIN][CONF_AGENCY]}_{config[DOMAIN][CONF_ROUTE]}_{config[DOMAIN][CONF_STOP]}" - config_entry = MockConfigEntry( - domain=DOMAIN, - data=config[DOMAIN], - title=f"{VALID_AGENCY_TITLE} {route_title} {VALID_STOP_TITLE}", - unique_id=unique_id, - ) - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is expected_state - - return config_entry diff --git a/tests/components/nextbus/conftest.py b/tests/components/nextbus/conftest.py index 3f687989313..84445905c2e 100644 --- a/tests/components/nextbus/conftest.py +++ b/tests/components/nextbus/conftest.py @@ -1,42 +1,22 @@ """Test helpers for NextBus tests.""" -from collections.abc import Generator from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock import pytest -from .const import BASIC_RESULTS - @pytest.fixture( params=[ + {"name": "Outbound", "stop": [{"tag": "5650"}]}, [ { "name": "Outbound", - "shortName": "Outbound", - "useForUi": True, - "stops": ["5184"], - }, - { - "name": "Outbound - Hidden", - "shortName": "Outbound - Hidden", - "useForUi": False, - "stops": ["5651"], - }, - ], - [ - { - "name": "Outbound", - "shortName": "Outbound", - "useForUi": True, - "stops": ["5184"], + "stop": [{"tag": "5650"}], }, { "name": "Inbound", - "shortName": "Inbound", - "useForUi": True, - "stops": ["5651"], + "stop": [{"tag": "5651"}], }, ], ] @@ -44,7 +24,7 @@ from .const import BASIC_RESULTS def route_config_direction(request: pytest.FixtureRequest) -> Any: """Generate alternative directions values. - When only one direction is returned, it is not returned as a list, but instead an object. + When only on edirection is returned, it is not returned as a list, but instead an object. """ return request.param @@ -55,97 +35,22 @@ def mock_nextbus_lists( ) -> MagicMock: """Mock all list functions in nextbus to test validate logic.""" instance = mock_nextbus.return_value - instance.agencies.return_value = [ - { - "id": "sfmta-cis", - "name": "San Francisco Muni CIS", - "shortName": "SF Muni CIS", - "region": "", - "website": "", - "logo": "", - "nxbs2RedirectUrl": "", - } - ] - - instance.routes.return_value = [ - { - "id": "F", - "rev": 1057, - "title": "F Market & Wharves", - "description": "7am-10pm daily", - "color": "", - "textColor": "", - "hidden": False, - "timestamp": "2024-06-23T03:06:58Z", - }, - { - "id": "G", - "rev": 1057, - "title": "F Market & Wharves", - "description": "7am-10pm daily", - "color": "", - "textColor": "", - "hidden": False, - "timestamp": "2024-06-23T03:06:58Z", - }, - ] - - def route_details_side_effect(agency: str, route: str) -> dict: - route = route.upper() - return { - "id": route, - "rev": 1057, - "title": f"{route} Market & Wharves", - "description": "7am-10pm daily", - "color": "", - "textColor": "", - "hidden": False, - "boundingBox": {}, - "stops": [ - { - "id": "5184", - "lat": 37.8071299, - "lon": -122.41732, - "name": "Jones St & Beach St", - "code": "15184", - "hidden": False, - "showDestinationSelector": True, - "directions": ["F_0_var1", "F_0_var0"], - }, - { - "id": "5651", - "lat": 37.8071299, - "lon": -122.41732, - "name": "Jones St & Beach St", - "code": "15651", - "hidden": False, - "showDestinationSelector": True, - "directions": ["F_0_var1", "F_0_var0"], - }, + instance.get_agency_list.return_value = { + "agency": [{"tag": "sf-muni", "title": "San Francisco Muni"}] + } + instance.get_route_list.return_value = { + "route": [{"tag": "F", "title": "F - Market & Wharves"}] + } + instance.get_route_config.return_value = { + "route": { + "stop": [ + {"tag": "5650", "title": "Market St & 7th St"}, + {"tag": "5651", "title": "Market St & 7th St"}, + # Error case test. Duplicate title with no unique direction + {"tag": "5652", "title": "Market St & 7th St"}, ], - "directions": route_config_direction, - "paths": [], - "timestamp": "2024-06-23T03:06:58Z", + "direction": route_config_direction, } - - instance.route_details.side_effect = route_details_side_effect + } return instance - - -@pytest.fixture -def mock_nextbus() -> Generator[MagicMock]: - """Create a mock py_nextbus module.""" - with patch("homeassistant.components.nextbus.coordinator.NextBusClient") as client: - yield client - - -@pytest.fixture -def mock_nextbus_predictions( - mock_nextbus: MagicMock, -) -> Generator[MagicMock]: - """Create a mock of NextBusClient predictions.""" - instance = mock_nextbus.return_value - instance.predictions_for_stop.return_value = BASIC_RESULTS - - return instance.predictions_for_stop diff --git a/tests/components/nextbus/const.py b/tests/components/nextbus/const.py deleted file mode 100644 index 66eb3635ca9..00000000000 --- a/tests/components/nextbus/const.py +++ /dev/null @@ -1,101 +0,0 @@ -"""Constants for NextBus tests.""" - -from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.const import CONF_STOP - -VALID_AGENCY = "sfmta-cis" -VALID_ROUTE = "F" -VALID_STOP = "5184" -VALID_COORDINATOR_KEY = f"{VALID_AGENCY}-{VALID_STOP}" -VALID_AGENCY_TITLE = "San Francisco Muni" -VALID_ROUTE_TITLE = "F-Market & Wharves" -VALID_STOP_TITLE = "Market St & 7th St" -SENSOR_ID = "sensor.san_francisco_muni_f_market_wharves_market_st_7th_st" - -ROUTE_2 = "G" -ROUTE_TITLE_2 = "G-Market & Wharves" -SENSOR_ID_2 = "sensor.san_francisco_muni_g_market_wharves_market_st_7th_st" - -PLATFORM_CONFIG = { - SENSOR_DOMAIN: { - "platform": DOMAIN, - CONF_AGENCY: VALID_AGENCY, - CONF_ROUTE: VALID_ROUTE, - CONF_STOP: VALID_STOP, - }, -} - - -CONFIG_BASIC = { - DOMAIN: { - CONF_AGENCY: VALID_AGENCY, - CONF_ROUTE: VALID_ROUTE, - CONF_STOP: VALID_STOP, - } -} - -CONFIG_BASIC_2 = { - DOMAIN: { - CONF_AGENCY: VALID_AGENCY, - CONF_ROUTE: ROUTE_2, - CONF_STOP: VALID_STOP, - } -} - -BASIC_RESULTS = [ - { - "route": { - "title": VALID_ROUTE_TITLE, - "id": VALID_ROUTE, - }, - "stop": { - "name": VALID_STOP_TITLE, - "id": VALID_STOP, - }, - "values": [ - {"minutes": 1, "timestamp": 1553807371000}, - {"minutes": 2, "timestamp": 1553807372000}, - {"minutes": 3, "timestamp": 1553807373000}, - {"minutes": 10, "timestamp": 1553807380000}, - ], - }, - { - "route": { - "title": ROUTE_TITLE_2, - "id": ROUTE_2, - }, - "stop": { - "name": VALID_STOP_TITLE, - "id": VALID_STOP, - }, - "values": [ - {"minutes": 90, "timestamp": 1553807379000}, - ], - }, -] - -NO_UPCOMING = [ - { - "route": { - "title": VALID_ROUTE_TITLE, - "id": VALID_ROUTE, - }, - "stop": { - "name": VALID_STOP_TITLE, - "id": VALID_STOP, - }, - "values": [], - }, - { - "route": { - "title": ROUTE_TITLE_2, - "id": ROUTE_2, - }, - "stop": { - "name": VALID_STOP_TITLE, - "id": VALID_STOP, - }, - "values": [], - }, -] diff --git a/tests/components/nextbus/test_config_flow.py b/tests/components/nextbus/test_config_flow.py index 4e5b933a189..0a64bc97d9a 100644 --- a/tests/components/nextbus/test_config_flow.py +++ b/tests/components/nextbus/test_config_flow.py @@ -1,9 +1,9 @@ """Test the NextBus config flow.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant import config_entries, setup from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN @@ -44,7 +44,7 @@ async def test_user_config( result = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_AGENCY: "sfmta-cis", + CONF_AGENCY: "sf-muni", }, ) await hass.async_block_till_done() @@ -68,16 +68,16 @@ async def test_user_config( result = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_STOP: "5184", + CONF_STOP: "5650", }, ) await hass.async_block_till_done() assert result.get("type") is FlowResultType.CREATE_ENTRY assert result.get("data") == { - "agency": "sfmta-cis", + "agency": "sf-muni", "route": "F", - "stop": "5184", + "stop": "5650", } assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/nextbus/test_init.py b/tests/components/nextbus/test_init.py deleted file mode 100644 index d44b8d1ecc0..00000000000 --- a/tests/components/nextbus/test_init.py +++ /dev/null @@ -1,27 +0,0 @@ -"""The tests for the nexbus sensor component.""" - -from unittest.mock import MagicMock -from urllib.error import HTTPError - -from homeassistant.components.nextbus.coordinator import NextBusHTTPError -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import assert_setup_sensor -from .const import CONFIG_BASIC - - -async def test_setup_retry( - hass: HomeAssistant, - mock_nextbus: MagicMock, - mock_nextbus_lists: MagicMock, - mock_nextbus_predictions: MagicMock, -) -> None: - """Verify that a list of messages are rendered correctly.""" - - mock_nextbus_predictions.side_effect = NextBusHTTPError( - "failed", HTTPError("url", 500, "error", MagicMock(), None) - ) - await assert_setup_sensor( - hass, CONFIG_BASIC, expected_state=ConfigEntryState.SETUP_RETRY - ) diff --git a/tests/components/nextbus/test_sensor.py b/tests/components/nextbus/test_sensor.py index 04140a17c4f..3630ff88855 100644 --- a/tests/components/nextbus/test_sensor.py +++ b/tests/components/nextbus/test_sensor.py @@ -1,55 +1,221 @@ """The tests for the nexbus sensor component.""" from copy import deepcopy -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from urllib.error import HTTPError -from freezegun.api import FrozenDateTimeFactory from py_nextbus.client import NextBusFormatError, NextBusHTTPError import pytest +from typing_extensions import Generator -from homeassistant.components.nextbus.const import DOMAIN +from homeassistant.components import sensor +from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN from homeassistant.components.nextbus.coordinator import NextBusDataUpdateCoordinator from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_NAME +from homeassistant.const import CONF_NAME, CONF_STOP from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import UpdateFailed -from . import assert_setup_sensor -from .const import ( - BASIC_RESULTS, - CONFIG_BASIC, - CONFIG_BASIC_2, - NO_UPCOMING, - ROUTE_TITLE_2, - SENSOR_ID, - SENSOR_ID_2, - VALID_AGENCY, - VALID_COORDINATOR_KEY, - VALID_ROUTE_TITLE, - VALID_STOP_TITLE, -) +from tests.common import MockConfigEntry -from tests.common import async_fire_time_changed +VALID_AGENCY = "sf-muni" +VALID_ROUTE = "F" +VALID_STOP = "5650" +VALID_AGENCY_TITLE = "San Francisco Muni" +VALID_ROUTE_TITLE = "F-Market & Wharves" +VALID_STOP_TITLE = "Market St & 7th St" +SENSOR_ID = "sensor.san_francisco_muni_f_market_wharves_market_st_7th_st" + +PLATFORM_CONFIG = { + sensor.DOMAIN: { + "platform": DOMAIN, + CONF_AGENCY: VALID_AGENCY, + CONF_ROUTE: VALID_ROUTE, + CONF_STOP: VALID_STOP, + }, +} -async def test_predictions( +CONFIG_BASIC = { + DOMAIN: { + CONF_AGENCY: VALID_AGENCY, + CONF_ROUTE: VALID_ROUTE, + CONF_STOP: VALID_STOP, + } +} + +BASIC_RESULTS = { + "predictions": { + "agencyTitle": VALID_AGENCY_TITLE, + "agencyTag": VALID_AGENCY, + "routeTitle": VALID_ROUTE_TITLE, + "routeTag": VALID_ROUTE, + "stopTitle": VALID_STOP_TITLE, + "stopTag": VALID_STOP, + "direction": { + "title": "Outbound", + "prediction": [ + {"minutes": "1", "epochTime": "1553807371000"}, + {"minutes": "2", "epochTime": "1553807372000"}, + {"minutes": "3", "epochTime": "1553807373000"}, + {"minutes": "10", "epochTime": "1553807380000"}, + ], + }, + } +} + + +@pytest.fixture +def mock_nextbus() -> Generator[MagicMock]: + """Create a mock py_nextbus module.""" + with patch("homeassistant.components.nextbus.coordinator.NextBusClient") as client: + yield client + + +@pytest.fixture +def mock_nextbus_predictions( + mock_nextbus: MagicMock, +) -> Generator[MagicMock]: + """Create a mock of NextBusClient predictions.""" + instance = mock_nextbus.return_value + instance.get_predictions_for_multi_stops.return_value = BASIC_RESULTS + + return instance.get_predictions_for_multi_stops + + +async def assert_setup_sensor( + hass: HomeAssistant, + config: dict[str, dict[str, str]], + expected_state=ConfigEntryState.LOADED, +) -> MockConfigEntry: + """Set up the sensor and assert it's been created.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data=config[DOMAIN], + title=f"{VALID_AGENCY_TITLE} {VALID_ROUTE_TITLE} {VALID_STOP_TITLE}", + unique_id=f"{VALID_AGENCY}_{VALID_ROUTE}_{VALID_STOP}", + ) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is expected_state + + return config_entry + + +async def test_message_dict( + hass: HomeAssistant, + mock_nextbus: MagicMock, + mock_nextbus_lists: MagicMock, + mock_nextbus_predictions: MagicMock, +) -> None: + """Verify that a single dict message is rendered correctly.""" + mock_nextbus_predictions.return_value = { + "predictions": { + "agencyTitle": VALID_AGENCY_TITLE, + "agencyTag": VALID_AGENCY, + "routeTitle": VALID_ROUTE_TITLE, + "routeTag": VALID_ROUTE, + "stopTitle": VALID_STOP_TITLE, + "stopTag": VALID_STOP, + "message": {"text": "Message"}, + "direction": { + "title": "Outbound", + "prediction": [ + {"minutes": "1", "epochTime": "1553807371000"}, + {"minutes": "2", "epochTime": "1553807372000"}, + {"minutes": "3", "epochTime": "1553807373000"}, + ], + }, + } + } + + await assert_setup_sensor(hass, CONFIG_BASIC) + + state = hass.states.get(SENSOR_ID) + assert state is not None + assert state.attributes["message"] == "Message" + + +async def test_message_list( hass: HomeAssistant, mock_nextbus: MagicMock, mock_nextbus_lists: MagicMock, mock_nextbus_predictions: MagicMock, ) -> None: """Verify that a list of messages are rendered correctly.""" + mock_nextbus_predictions.return_value = { + "predictions": { + "agencyTitle": VALID_AGENCY_TITLE, + "agencyTag": VALID_AGENCY, + "routeTitle": VALID_ROUTE_TITLE, + "routeTag": VALID_ROUTE, + "stopTitle": VALID_STOP_TITLE, + "stopTag": VALID_STOP, + "message": [{"text": "Message 1"}, {"text": "Message 2"}], + "direction": { + "title": "Outbound", + "prediction": [ + {"minutes": "1", "epochTime": "1553807371000"}, + {"minutes": "2", "epochTime": "1553807372000"}, + {"minutes": "3", "epochTime": "1553807373000"}, + ], + }, + } + } + + await assert_setup_sensor(hass, CONFIG_BASIC) + + state = hass.states.get(SENSOR_ID) + assert state is not None + assert state.attributes["message"] == "Message 1 -- Message 2" + + +async def test_direction_list( + hass: HomeAssistant, + mock_nextbus: MagicMock, + mock_nextbus_lists: MagicMock, + mock_nextbus_predictions: MagicMock, +) -> None: + """Verify that a list of messages are rendered correctly.""" + mock_nextbus_predictions.return_value = { + "predictions": { + "agencyTitle": VALID_AGENCY_TITLE, + "agencyTag": VALID_AGENCY, + "routeTitle": VALID_ROUTE_TITLE, + "routeTag": VALID_ROUTE, + "stopTitle": VALID_STOP_TITLE, + "stopTag": VALID_STOP, + "message": [{"text": "Message 1"}, {"text": "Message 2"}], + "direction": [ + { + "title": "Outbound", + "prediction": [ + {"minutes": "1", "epochTime": "1553807371000"}, + {"minutes": "2", "epochTime": "1553807372000"}, + {"minutes": "3", "epochTime": "1553807373000"}, + ], + }, + { + "title": "Outbound 2", + "prediction": {"minutes": "0", "epochTime": "1553807374000"}, + }, + ], + } + } await assert_setup_sensor(hass, CONFIG_BASIC) state = hass.states.get(SENSOR_ID) assert state is not None assert state.state == "2019-03-28T21:09:31+00:00" - assert state.attributes["agency"] == VALID_AGENCY + assert state.attributes["agency"] == VALID_AGENCY_TITLE assert state.attributes["route"] == VALID_ROUTE_TITLE assert state.attributes["stop"] == VALID_STOP_TITLE - assert state.attributes["upcoming"] == "1, 2, 3, 10" + assert state.attributes["direction"] == "Outbound, Outbound 2" + assert state.attributes["upcoming"] == "0, 1, 2, 3" @pytest.mark.parametrize( @@ -68,7 +234,7 @@ async def test_prediction_exceptions( ) -> None: """Test that some coodinator exceptions raise UpdateFailed exceptions.""" await assert_setup_sensor(hass, CONFIG_BASIC) - coordinator: NextBusDataUpdateCoordinator = hass.data[DOMAIN][VALID_COORDINATOR_KEY] + coordinator: NextBusDataUpdateCoordinator = hass.data[DOMAIN][VALID_AGENCY] mock_nextbus_predictions.side_effect = client_exception with pytest.raises(UpdateFailed): await coordinator._async_update_data() @@ -90,19 +256,27 @@ async def test_custom_name( assert state.name == "Custom Name" -async def test_verify_no_predictions( +@pytest.mark.parametrize( + "prediction_results", + [ + {}, + {"Error": "Failed"}, + ], +) +async def test_no_predictions( hass: HomeAssistant, mock_nextbus: MagicMock, - mock_nextbus_lists: MagicMock, mock_nextbus_predictions: MagicMock, + mock_nextbus_lists: MagicMock, + prediction_results: dict[str, str], ) -> None: - """Verify attributes are set despite no upcoming times.""" - mock_nextbus_predictions.return_value = [] + """Verify there are no exceptions when no predictions are returned.""" + mock_nextbus_predictions.return_value = prediction_results + await assert_setup_sensor(hass, CONFIG_BASIC) state = hass.states.get(SENSOR_ID) assert state is not None - assert "upcoming" not in state.attributes assert state.state == "unknown" @@ -113,61 +287,21 @@ async def test_verify_no_upcoming( mock_nextbus_predictions: MagicMock, ) -> None: """Verify attributes are set despite no upcoming times.""" - mock_nextbus_predictions.return_value = NO_UPCOMING + mock_nextbus_predictions.return_value = { + "predictions": { + "agencyTitle": VALID_AGENCY_TITLE, + "agencyTag": VALID_AGENCY, + "routeTitle": VALID_ROUTE_TITLE, + "routeTag": VALID_ROUTE, + "stopTitle": VALID_STOP_TITLE, + "stopTag": VALID_STOP, + "direction": {"title": "Outbound", "prediction": []}, + } + } + await assert_setup_sensor(hass, CONFIG_BASIC) state = hass.states.get(SENSOR_ID) assert state is not None - assert state.attributes["upcoming"] == "No upcoming predictions" assert state.state == "unknown" - - -async def test_unload_entry( - hass: HomeAssistant, - mock_nextbus: MagicMock, - mock_nextbus_lists: MagicMock, - mock_nextbus_predictions: MagicMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test that the sensor can be unloaded.""" - config_entry1 = await assert_setup_sensor(hass, CONFIG_BASIC) - await assert_setup_sensor(hass, CONFIG_BASIC_2, route_title=ROUTE_TITLE_2) - - # Verify the first sensor - state = hass.states.get(SENSOR_ID) - assert state is not None - assert state.state == "2019-03-28T21:09:31+00:00" - assert state.attributes["agency"] == VALID_AGENCY - assert state.attributes["route"] == VALID_ROUTE_TITLE - assert state.attributes["stop"] == VALID_STOP_TITLE - assert state.attributes["upcoming"] == "1, 2, 3, 10" - - # Verify the second sensor - state = hass.states.get(SENSOR_ID_2) - assert state is not None - assert state.state == "2019-03-28T21:09:39+00:00" - assert state.attributes["agency"] == VALID_AGENCY - assert state.attributes["route"] == ROUTE_TITLE_2 - assert state.attributes["stop"] == VALID_STOP_TITLE - assert state.attributes["upcoming"] == "90" - - # Update mock to return new predictions - new_predictions = deepcopy(BASIC_RESULTS) - new_predictions[1]["values"] = [{"minutes": 5, "timestamp": 1553807375000}] - mock_nextbus_predictions.return_value = new_predictions - - # Unload config entry 1 - await hass.config_entries.async_unload(config_entry1.entry_id) - await hass.async_block_till_done() - assert config_entry1.state is ConfigEntryState.NOT_LOADED - - # Skip ahead in time - freezer.tick(120) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - # Check update for new predictions - state = hass.states.get(SENSOR_ID_2) - assert state is not None - assert state.attributes["upcoming"] == "5" - assert state.state == "2019-03-28T21:09:35+00:00" + assert state.attributes["upcoming"] == "No upcoming predictions" diff --git a/tests/components/nextcloud/__init__.py b/tests/components/nextcloud/__init__.py index 4bc5a041650..e2102ed8c25 100644 --- a/tests/components/nextcloud/__init__.py +++ b/tests/components/nextcloud/__init__.py @@ -1,38 +1 @@ """Tests for the Nextcloud integration.""" - -from unittest.mock import Mock, patch - -from homeassistant.components.nextcloud.const import DOMAIN -from homeassistant.const import CONF_URL -from homeassistant.core import HomeAssistant - -from .const import MOCKED_ENTRY_ID - -from tests.common import MockConfigEntry - - -def mock_config_entry(config: dict) -> MockConfigEntry: - """Return a mocked config entry.""" - return MockConfigEntry( - domain=DOMAIN, title=config[CONF_URL], data=config, entry_id=MOCKED_ENTRY_ID - ) - - -async def init_integration( - hass: HomeAssistant, config: dict, data: dict -) -> MockConfigEntry: - """Set up the nextcloud integration.""" - entry = mock_config_entry(config) - entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.nextcloud.NextcloudMonitor", - ) as mock_nextcloud_monitor, - ): - mock_nextcloud_monitor.update = Mock(return_value=True) - mock_nextcloud_monitor.return_value.data = data - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - return entry diff --git a/tests/components/nextcloud/conftest.py b/tests/components/nextcloud/conftest.py index 3234e3773b8..d6cd39e7fc8 100644 --- a/tests/components/nextcloud/conftest.py +++ b/tests/components/nextcloud/conftest.py @@ -1,9 +1,17 @@ """Fixtrues for the Nextcloud integration tests.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import Generator + + +@pytest.fixture +def mock_nextcloud_monitor() -> Mock: + """Mock of NextcloudMonitor.""" + return Mock( + update=Mock(return_value=True), + ) @pytest.fixture diff --git a/tests/components/nextcloud/const.py b/tests/components/nextcloud/const.py deleted file mode 100644 index 2d328292b6f..00000000000 --- a/tests/components/nextcloud/const.py +++ /dev/null @@ -1,182 +0,0 @@ -"""Constants for nextcloud tests.""" - -from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL - -MOCKED_ENTRY_ID = "1234567890abcdef" - -VALID_CONFIG = { - CONF_URL: "https://my.nc_url.local", - CONF_USERNAME: "nc_user", - CONF_PASSWORD: "nc_pass", - CONF_VERIFY_SSL: True, -} - -NC_DATA = { - "nextcloud": { - "system": { - "version": "28.0.4.1", - "theme": "", - "enable_avatars": "yes", - "enable_previews": "yes", - "memcache.local": "\\OC\\Memcache\\APCu", - "memcache.distributed": "none", - "filelocking.enabled": "yes", - "memcache.locking": "none", - "debug": "no", - "freespace": 32769138688, - "cpuload": [2.06640625, 1.58447265625, 1.45263671875], - "mem_total": 30728192, - "mem_free": 6753280, - "swap_total": 10484736, - "swap_free": 10484736, - "apps": { - "num_installed": 41, - "num_updates_available": 0, - "app_updates": [], - }, - "update": {"lastupdatedat": 1713048517, "available": False}, - }, - "storage": { - "num_users": 2, - "num_files": 6783, - "num_storages": 4, - "num_storages_local": 1, - "num_storages_home": 2, - "num_storages_other": 1, - }, - "shares": { - "num_shares": 2, - "num_shares_user": 0, - "num_shares_groups": 0, - "num_shares_link": 2, - "num_shares_mail": 0, - "num_shares_room": 0, - "num_shares_link_no_password": 2, - "num_fed_shares_sent": 0, - "num_fed_shares_received": 1, - "permissions_3_17": 1, - "permissions_3_31": 1, - }, - }, - "server": { - "webserver": "Apache/2.4.57 (Debian)", - "php": { - "version": "8.2.18", - "memory_limit": 536870912, - "max_execution_time": 3600, - "upload_max_filesize": 536870912, - "opcache_revalidate_freq": 60, - "opcache": { - "opcache_enabled": True, - "cache_full": False, - "restart_pending": False, - "restart_in_progress": False, - "memory_usage": { - "used_memory": 72027112, - "free_memory": 62190616, - "wasted_memory": 0, - "current_wasted_percentage": 0, - }, - "interned_strings_usage": { - "buffer_size": 33554432, - "used_memory": 12630360, - "free_memory": 20924072, - "number_of_strings": 69242, - }, - "opcache_statistics": { - "num_cached_scripts": 1406, - "num_cached_keys": 2654, - "max_cached_keys": 16229, - "hits": 9739971, - "start_time": 1722222008, - "last_restart_time": 0, - "oom_restarts": 0, - "hash_restarts": 0, - "manual_restarts": 0, - "misses": 1406, - "blacklist_misses": 0, - "blacklist_miss_ratio": 0, - "opcache_hit_rate": 99.9855667222406, - }, - "jit": { - "enabled": True, - "on": True, - "kind": 5, - "opt_level": 5, - "opt_flags": 6, - "buffer_size": 134217712, - "buffer_free": 133190688, - }, - }, - "apcu": { - "cache": { - "num_slots": 4099, - "ttl": 0, - "num_hits": 590911, - "num_misses": 55250, - "num_inserts": 55421, - "num_entries": 102, - "expunges": 0, - "start_time": 1722222008, - "mem_size": 175296, - "memory_type": "mmap", - }, - "sma": {"num_seg": 1, "seg_size": 33554312, "avail_mem": 33342368}, - }, - "extensions": [ - "Core", - "date", - "libxml", - "openssl", - "pcre", - "sqlite3", - "zlib", - "ctype", - "curl", - "dom", - "fileinfo", - "filter", - "hash", - "iconv", - "json", - "mbstring", - "SPL", - "session", - "PDO", - "pdo_sqlite", - "standard", - "posix", - "random", - "Reflection", - "Phar", - "SimpleXML", - "tokenizer", - "xml", - "xmlreader", - "xmlwriter", - "mysqlnd", - "apache2handler", - "apcu", - "bcmath", - "exif", - "ftp", - "gd", - "gmp", - "imagick", - "intl", - "ldap", - "memcached", - "pcntl", - "pdo_mysql", - "pdo_pgsql", - "redis", - "sodium", - "sysvsem", - "zip", - "Zend OPcache", - ], - }, - "database": {"type": "sqlite3", "version": "3.40.1", "size": "4784128"}, - }, - "activeUsers": {"last5minutes": 0, "last1hour": 0, "last24hours": 0}, -} diff --git a/tests/components/nextcloud/snapshots/test_binary_sensor.ambr b/tests/components/nextcloud/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 1831419af52..00000000000 --- a/tests/components/nextcloud/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,277 +0,0 @@ -# serializer version: 1 -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_avatars_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.my_nc_url_local_avatars_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Avatars enabled', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_enable_avatars', - 'unique_id': '1234567890abcdef#system_enable_avatars', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_avatars_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Avatars enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.my_nc_url_local_avatars_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_debug_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.my_nc_url_local_debug_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Debug enabled', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_debug', - 'unique_id': '1234567890abcdef#system_debug', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_debug_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Debug enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.my_nc_url_local_debug_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_filelocking_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.my_nc_url_local_filelocking_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Filelocking enabled', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_filelocking_enabled', - 'unique_id': '1234567890abcdef#system_filelocking.enabled', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_filelocking_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Filelocking enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.my_nc_url_local_filelocking_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_active-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.my_nc_url_local_jit_active', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'JIT active', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_jit_on', - 'unique_id': '1234567890abcdef#jit_on', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_active-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local JIT active', - }), - 'context': , - 'entity_id': 'binary_sensor.my_nc_url_local_jit_active', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.my_nc_url_local_jit_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'JIT enabled', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_jit_enabled', - 'unique_id': '1234567890abcdef#jit_enabled', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local JIT enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.my_nc_url_local_jit_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_previews_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.my_nc_url_local_previews_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Previews enabled', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_enable_previews', - 'unique_id': '1234567890abcdef#system_enable_previews', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[binary_sensor.my_nc_url_local_previews_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Previews enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.my_nc_url_local_previews_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/nextcloud/snapshots/test_config_flow.ambr b/tests/components/nextcloud/snapshots/test_config_flow.ambr index e87db0a25c0..3334478ba24 100644 --- a/tests/components/nextcloud/snapshots/test_config_flow.ambr +++ b/tests/components/nextcloud/snapshots/test_config_flow.ambr @@ -1,8 +1,16 @@ # serializer version: 1 +# name: test_import + dict({ + 'password': 'nc_pass', + 'url': 'nc_url', + 'username': 'nc_user', + 'verify_ssl': True, + }) +# --- # name: test_reauth dict({ 'password': 'other_password', - 'url': 'https://my.nc_url.local', + 'url': 'nc_url', 'username': 'other_user', 'verify_ssl': True, }) @@ -10,7 +18,7 @@ # name: test_user_create_entry dict({ 'password': 'nc_pass', - 'url': 'https://my.nc_url.local', + 'url': 'nc_url', 'username': 'nc_user', 'verify_ssl': True, }) diff --git a/tests/components/nextcloud/snapshots/test_sensor.ambr b/tests/components/nextcloud/snapshots/test_sensor.ambr deleted file mode 100644 index c49ba3496da..00000000000 --- a/tests/components/nextcloud/snapshots/test_sensor.ambr +++ /dev/null @@ -1,3973 +0,0 @@ -# serializer version: 1 -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_5_minutes-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_5_minutes', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of active users last 5 minutes', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_activeusers_last5minutes', - 'unique_id': '1234567890abcdef#activeUsers_last5minutes', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_5_minutes-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of active users last 5 minutes', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_5_minutes', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of active users last day', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_activeusers_last24hours', - 'unique_id': '1234567890abcdef#activeUsers_last24hours', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of active users last day', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_hour-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_hour', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of active users last hour', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_activeusers_last1hour', - 'unique_id': '1234567890abcdef#activeUsers_last1hour', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_hour-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of active users last hour', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_hour', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_files-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_amount_of_files', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of files', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_storage_num_files', - 'unique_id': '1234567890abcdef#storage_num_files', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_files-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of files', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_files', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6783', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_group_shares-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_group_shares', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of group shares', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_shares_num_shares_groups', - 'unique_id': '1234567890abcdef#shares_num_shares_groups', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_group_shares-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of group shares', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_group_shares', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_link_shares-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_link_shares', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of link shares', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_shares_num_shares_link', - 'unique_id': '1234567890abcdef#shares_num_shares_link', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_link_shares-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of link shares', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_link_shares', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_local_storages-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_local_storages', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of local storages', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_storage_num_storages_local', - 'unique_id': '1234567890abcdef#storage_num_storages_local', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_local_storages-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of local storages', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_local_storages', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_mail_shares-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_mail_shares', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of mail shares', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_shares_num_shares_mail', - 'unique_id': '1234567890abcdef#shares_num_shares_mail', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_mail_shares-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of mail shares', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_mail_shares', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_other_storages-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_other_storages', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of other storages', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_storage_num_storages_other', - 'unique_id': '1234567890abcdef#storage_num_storages_other', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_other_storages-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of other storages', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_other_storages', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_passwordless_link_shares-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_passwordless_link_shares', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of passwordless link shares', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_shares_num_shares_link_no_password', - 'unique_id': '1234567890abcdef#shares_num_shares_link_no_password', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_passwordless_link_shares-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of passwordless link shares', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_passwordless_link_shares', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_room_shares-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_room_shares', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of room shares', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_shares_num_shares_room', - 'unique_id': '1234567890abcdef#shares_num_shares_room', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_room_shares-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of room shares', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_room_shares', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_amount_of_shares', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of shares', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_shares_num_shares', - 'unique_id': '1234567890abcdef#shares_num_shares', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of shares', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_shares', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_received-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_received', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of shares received', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_shares_num_fed_shares_received', - 'unique_id': '1234567890abcdef#shares_num_fed_shares_received', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_received-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of shares received', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_received', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_sent-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_sent', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of shares sent', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_shares_num_fed_shares_sent', - 'unique_id': '1234567890abcdef#shares_num_fed_shares_sent', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_sent-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of shares sent', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_sent', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_amount_of_storages', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of storages', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_storage_num_storages', - 'unique_id': '1234567890abcdef#storage_num_storages', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of storages', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_storages', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages_at_home-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_storages_at_home', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of storages at home', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_storage_num_storages_home', - 'unique_id': '1234567890abcdef#storage_num_storages_home', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages_at_home-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of storages at home', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_storages_at_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_amount_of_user', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of user', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_storage_num_users', - 'unique_id': '1234567890abcdef#storage_num_users', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of user', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_user', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user_shares-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_user_shares', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Amount of user shares', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_shares_num_shares_user', - 'unique_id': '1234567890abcdef#shares_num_shares_user', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user_shares-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Amount of user shares', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_amount_of_user_shares', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_apps_installed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_apps_installed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Apps installed', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_apps_num_installed', - 'unique_id': '1234567890abcdef#system_apps_num_installed', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_apps_installed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Apps installed', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_apps_installed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '41', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_expunges-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_cache_expunges', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache expunges', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_cache_expunges', - 'unique_id': '1234567890abcdef#cache_expunges', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_expunges-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Cache expunges', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cache_expunges', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_cache_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_cache_memory_type', - 'unique_id': '1234567890abcdef#cache_memory_type', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Cache memory', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cache_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'mmap', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory_size-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_cache_memory_size', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cache memory size', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_cache_mem_size', - 'unique_id': '1234567890abcdef#cache_mem_size', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory_size-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Cache memory size', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cache_memory_size', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.175296', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_entires-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_cache_number_of_entires', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache number of entires', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_cache_num_entries', - 'unique_id': '1234567890abcdef#cache_num_entries', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_entires-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Cache number of entires', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cache_number_of_entires', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '102', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_hits-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_cache_number_of_hits', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache number of hits', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_cache_num_hits', - 'unique_id': '1234567890abcdef#cache_num_hits', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_hits-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Cache number of hits', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cache_number_of_hits', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '590911', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_inserts-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_cache_number_of_inserts', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache number of inserts', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_cache_num_inserts', - 'unique_id': '1234567890abcdef#cache_num_inserts', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_inserts-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Cache number of inserts', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cache_number_of_inserts', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '55421', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_misses-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_cache_number_of_misses', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache number of misses', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_cache_num_misses', - 'unique_id': '1234567890abcdef#cache_num_misses', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_misses-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Cache number of misses', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cache_number_of_misses', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '55250', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_slots-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_cache_number_of_slots', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache number of slots', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_cache_num_slots', - 'unique_id': '1234567890abcdef#cache_num_slots', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_slots-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Cache number of slots', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cache_number_of_slots', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4099', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_start_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_cache_start_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cache start time', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_cache_start_time', - 'unique_id': '1234567890abcdef#cache_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_start_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'my.nc_url.local Cache start time', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cache_start_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-07-29T03:00:08+00:00', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_ttl-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_cache_ttl', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache ttl', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_cache_ttl', - 'unique_id': '1234567890abcdef#cache_ttl', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cache_ttl-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Cache ttl', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cache_ttl', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_15_minutes-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_15_minutes', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CPU Load last 15 minutes', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_cpuload_15', - 'unique_id': '1234567890abcdef#system_cpuload_15', - 'unit_of_measurement': 'load', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_15_minutes-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local CPU Load last 15 minutes', - 'unit_of_measurement': 'load', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_15_minutes', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.45263671875', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_1_minute-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_1_minute', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CPU Load last 1 minute', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_cpuload_1', - 'unique_id': '1234567890abcdef#system_cpuload_1', - 'unit_of_measurement': 'load', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_1_minute-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local CPU Load last 1 minute', - 'unit_of_measurement': 'load', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_1_minute', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.06640625', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_5_minutes-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_5_minutes', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CPU Load last 5 minutes', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_cpuload_5', - 'unique_id': '1234567890abcdef#system_cpuload_5', - 'unit_of_measurement': 'load', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_5_minutes-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local CPU Load last 5 minutes', - 'unit_of_measurement': 'load', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_5_minutes', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.58447265625', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_database_size-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_database_size', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Database size', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_database_size', - 'unique_id': '1234567890abcdef#database_size', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_database_size-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Database size', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_database_size', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.784128', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_database_type-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_database_type', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Database type', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_database_type', - 'unique_id': '1234567890abcdef#database_type', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_database_type-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Database type', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_database_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'sqlite3', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_database_version-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_database_version', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Database version', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_database_version', - 'unique_id': '1234567890abcdef#database_version', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_database_version-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Database version', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_database_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.40.1', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_free_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_free_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Free memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_mem_free', - 'unique_id': '1234567890abcdef#system_mem_free', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_free_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Free memory', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_free_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6.75328', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_free_space-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_free_space', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Free space', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_freespace', - 'unique_id': '1234567890abcdef#system_freespace', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_free_space-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Free space', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_free_space', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '32.769138688', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_free_swap_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_free_swap_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Free swap memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_swap_free', - 'unique_id': '1234567890abcdef#system_swap_free', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_free_swap_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Free swap memory', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_free_swap_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.484736', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_interned_buffer_size-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_interned_buffer_size', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Interned buffer size', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_interned_strings_usage_buffer_size', - 'unique_id': '1234567890abcdef#interned_strings_usage_buffer_size', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_interned_buffer_size-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Interned buffer size', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_interned_buffer_size', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '33.554432', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_interned_free_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_interned_free_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Interned free memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_interned_strings_usage_free_memory', - 'unique_id': '1234567890abcdef#interned_strings_usage_free_memory', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_interned_free_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Interned free memory', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_interned_free_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.924072', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_interned_number_of_strings-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_interned_number_of_strings', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Interned number of strings', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_interned_strings_usage_number_of_strings', - 'unique_id': '1234567890abcdef#interned_strings_usage_number_of_strings', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_interned_number_of_strings-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Interned number of strings', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_interned_number_of_strings', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '69242', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_interned_used_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_interned_used_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Interned used memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_interned_strings_usage_used_memory', - 'unique_id': '1234567890abcdef#interned_strings_usage_used_memory', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_interned_used_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Interned used memory', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_interned_used_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '12.63036', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_free-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_jit_buffer_free', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'JIT buffer free', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_jit_buffer_free', - 'unique_id': '1234567890abcdef#jit_buffer_free', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_free-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local JIT buffer free', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_jit_buffer_free', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '133.190688', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_size-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_jit_buffer_size', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'JIT buffer size', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_jit_buffer_size', - 'unique_id': '1234567890abcdef#jit_buffer_size', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_size-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local JIT buffer size', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_jit_buffer_size', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '134.217712', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_jit_kind-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_jit_kind', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'JIT kind', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_jit_kind', - 'unique_id': '1234567890abcdef#jit_kind', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_jit_kind-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local JIT kind', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_jit_kind', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_flags-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_jit_opt_flags', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'JIT opt flags', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_jit_opt_flags', - 'unique_id': '1234567890abcdef#jit_opt_flags', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_flags-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local JIT opt flags', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_jit_opt_flags', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_jit_opt_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'JIT opt level', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_jit_opt_level', - 'unique_id': '1234567890abcdef#jit_opt_level', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local JIT opt level', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_jit_opt_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_miss_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_miss_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache blacklist miss ratio', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_blacklist_miss_ratio', - 'unique_id': '1234567890abcdef#opcache_statistics_blacklist_miss_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_miss_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache blacklist miss ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_miss_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_misses-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_misses', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache blacklist misses', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_blacklist_misses', - 'unique_id': '1234567890abcdef#opcache_statistics_blacklist_misses', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_misses-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache blacklist misses', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_misses', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_keys-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_cached_keys', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache cached keys', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_num_cached_keys', - 'unique_id': '1234567890abcdef#opcache_statistics_num_cached_keys', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_keys-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache cached keys', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_cached_keys', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2654', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_scripts-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_cached_scripts', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache cached scripts', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_num_cached_scripts', - 'unique_id': '1234567890abcdef#opcache_statistics_num_cached_scripts', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_scripts-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache cached scripts', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_cached_scripts', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1406', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_current_wasted_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_current_wasted_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache current wasted percentage', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_server_php_opcache_memory_usage_current_wasted_percentage', - 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_current_wasted_percentage', - 'unit_of_measurement': '%', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_current_wasted_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache current wasted percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_current_wasted_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_free_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_free_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Opcache free memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_server_php_opcache_memory_usage_free_memory', - 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_free_memory', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_free_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Opcache free memory', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_free_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '62.190616', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hash_restarts-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_hash_restarts', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache hash restarts', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_hash_restarts', - 'unique_id': '1234567890abcdef#opcache_statistics_hash_restarts', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hash_restarts-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache hash restarts', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_hash_restarts', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hit_rate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_hit_rate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache hit rate', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_opcache_hit_rate', - 'unique_id': '1234567890abcdef#opcache_statistics_opcache_hit_rate', - 'unit_of_measurement': '%', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hit_rate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache hit rate', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_hit_rate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '99.9855667222406', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hits-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_hits', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache hits', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_hits', - 'unique_id': '1234567890abcdef#opcache_statistics_hits', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hits-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache hits', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_hits', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '9739971', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_last_restart_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_last_restart_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Opcache last restart time', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_last_restart_time', - 'unique_id': '1234567890abcdef#opcache_statistics_last_restart_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_last_restart_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'my.nc_url.local Opcache last restart time', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_last_restart_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1970-01-01T00:00:00+00:00', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_manual_restarts-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_manual_restarts', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache manual restarts', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_manual_restarts', - 'unique_id': '1234567890abcdef#opcache_statistics_manual_restarts', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_manual_restarts-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache manual restarts', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_manual_restarts', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_max_cached_keys-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_max_cached_keys', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache max cached keys', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_max_cached_keys', - 'unique_id': '1234567890abcdef#opcache_statistics_max_cached_keys', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_max_cached_keys-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache max cached keys', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_max_cached_keys', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '16229', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_misses-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_misses', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache misses', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_misses', - 'unique_id': '1234567890abcdef#opcache_statistics_misses', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_misses-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache misses', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_misses', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1406', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_out_of_memory_restarts-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_out_of_memory_restarts', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Opcache out of memory restarts', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_oom_restarts', - 'unique_id': '1234567890abcdef#opcache_statistics_oom_restarts', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_out_of_memory_restarts-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Opcache out of memory restarts', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_out_of_memory_restarts', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_start_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_start_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Opcache start time', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_opcache_statistics_start_time', - 'unique_id': '1234567890abcdef#opcache_statistics_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_start_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'my.nc_url.local Opcache start time', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_start_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-07-29T03:00:08+00:00', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_used_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_used_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Opcache used memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_server_php_opcache_memory_usage_used_memory', - 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_used_memory', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_used_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Opcache used memory', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_used_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '72.027112', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_wasted_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_opcache_wasted_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Opcache wasted memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_server_php_opcache_memory_usage_wasted_memory', - 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_wasted_memory', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_wasted_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Opcache wasted memory', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_opcache_wasted_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_php_max_execution_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_php_max_execution_time', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PHP max execution time', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_server_php_max_execution_time', - 'unique_id': '1234567890abcdef#server_php_max_execution_time', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_php_max_execution_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'my.nc_url.local PHP max execution time', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_php_max_execution_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3600', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_php_memory_limit-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_php_memory_limit', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PHP memory limit', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_server_php_memory_limit', - 'unique_id': '1234567890abcdef#server_php_memory_limit', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_php_memory_limit-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local PHP memory limit', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_php_memory_limit', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '536.870912', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_php_upload_maximum_filesize-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_php_upload_maximum_filesize', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PHP upload maximum filesize', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_server_php_upload_max_filesize', - 'unique_id': '1234567890abcdef#server_php_upload_max_filesize', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_php_upload_maximum_filesize-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local PHP upload maximum filesize', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_php_upload_maximum_filesize', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '536.870912', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_php_version-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_php_version', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'PHP version', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_server_php_version', - 'unique_id': '1234567890abcdef#server_php_version', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_php_version-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local PHP version', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_php_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.2.18', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_sma_available_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_sma_available_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'SMA available memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_sma_avail_mem', - 'unique_id': '1234567890abcdef#sma_avail_mem', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_sma_available_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local SMA available memory', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_sma_available_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '33.342368', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_sma_number_of_segments-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_sma_number_of_segments', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'SMA number of segments', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_sma_num_seg', - 'unique_id': '1234567890abcdef#sma_num_seg', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_sma_number_of_segments-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local SMA number of segments', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_sma_number_of_segments', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_sma_segment_size-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_sma_segment_size', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'SMA segment size', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_sma_seg_size', - 'unique_id': '1234567890abcdef#sma_seg_size', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_sma_segment_size-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local SMA segment size', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_sma_segment_size', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '33.554312', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_distributed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_system_memcache_distributed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'System memcache distributed', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_memcache_distributed', - 'unique_id': '1234567890abcdef#system_memcache.distributed', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_distributed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local System memcache distributed', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_system_memcache_distributed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'none', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_local-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_system_memcache_local', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'System memcache local', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_memcache_local', - 'unique_id': '1234567890abcdef#system_memcache.local', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_local-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local System memcache local', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_system_memcache_local', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '\\OC\\Memcache\\APCu', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_locking-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_system_memcache_locking', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'System memcache locking', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_memcache_locking', - 'unique_id': '1234567890abcdef#system_memcache.locking', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_locking-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local System memcache locking', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_system_memcache_locking', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'none', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_system_theme-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_system_theme', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'System theme', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_theme', - 'unique_id': '1234567890abcdef#system_theme', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_system_theme-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local System theme', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_system_theme', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_system_version-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_system_version', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'System version', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_version', - 'unique_id': '1234567890abcdef#system_version', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_system_version-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local System version', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_system_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '28.0.4.1', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_total_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_total_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Total memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_mem_total', - 'unique_id': '1234567890abcdef#system_mem_total', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_total_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Total memory', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_total_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30.728192', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_total_swap_memory-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_total_swap_memory', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Total swap memory', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_swap_total', - 'unique_id': '1234567890abcdef#system_swap_total', - 'unit_of_measurement': , - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_total_swap_memory-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'my.nc_url.local Total swap memory', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_total_swap_memory', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.484736', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_updates_available-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_nc_url_local_updates_available', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Updates available', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_system_apps_num_updates_available', - 'unique_id': '1234567890abcdef#system_apps_num_updates_available', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_updates_available-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Updates available', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_updates_available', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_webserver-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_nc_url_local_webserver', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Webserver', - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'nextcloud_server_webserver', - 'unique_id': '1234567890abcdef#server_webserver', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[sensor.my_nc_url_local_webserver-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Webserver', - }), - 'context': , - 'entity_id': 'sensor.my_nc_url_local_webserver', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Apache/2.4.57 (Debian)', - }) -# --- diff --git a/tests/components/nextcloud/snapshots/test_update.ambr b/tests/components/nextcloud/snapshots/test_update.ambr deleted file mode 100644 index 484106580b1..00000000000 --- a/tests/components/nextcloud/snapshots/test_update.ambr +++ /dev/null @@ -1,59 +0,0 @@ -# serializer version: 1 -# name: test_async_setup_entry[update.my_nc_url_local_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.my_nc_url_local_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'nextcloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1234567890abcdef#update', - 'unit_of_measurement': None, - }) -# --- -# name: test_async_setup_entry[update.my_nc_url_local_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/nextcloud/icon.png', - 'friendly_name': 'my.nc_url.local None', - 'in_progress': False, - 'installed_version': '28.0.4.1', - 'latest_version': '28.0.4.1', - 'release_summary': None, - 'release_url': 'https://nextcloud.com/changelog/#28-0-4', - 'skipped_version': None, - 'supported_features': , - 'title': None, - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.my_nc_url_local_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/nextcloud/test_binary_sensor.py b/tests/components/nextcloud/test_binary_sensor.py deleted file mode 100644 index dd53f4fb2cf..00000000000 --- a/tests/components/nextcloud/test_binary_sensor.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Tests for the Nextcloud binary sensors.""" - -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import init_integration -from .const import NC_DATA, VALID_CONFIG - -from tests.common import snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_async_setup_entry( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test a successful setup entry.""" - with patch( - "homeassistant.components.nextcloud.PLATFORMS", [Platform.BINARY_SENSOR] - ): - entry = await init_integration(hass, VALID_CONFIG, NC_DATA) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/nextcloud/test_config_flow.py b/tests/components/nextcloud/test_config_flow.py index 16b6bf3bc04..9a881197cf9 100644 --- a/tests/components/nextcloud/test_config_flow.py +++ b/tests/components/nextcloud/test_config_flow.py @@ -1,6 +1,6 @@ """Tests for the Nextcloud config flow.""" -from unittest.mock import patch +from unittest.mock import Mock, patch from nextcloudmonitor import ( NextcloudMonitorAuthorizationError, @@ -10,21 +10,26 @@ from nextcloudmonitor import ( import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.nextcloud.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.components.nextcloud import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import VALID_CONFIG - from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") +VALID_CONFIG = { + CONF_URL: "nc_url", + CONF_USERNAME: "nc_user", + CONF_PASSWORD: "nc_pass", + CONF_VERIFY_SSL: True, +} + async def test_user_create_entry( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, mock_nextcloud_monitor: Mock, snapshot: SnapshotAssertion ) -> None: """Test that the user step works.""" # start user flow @@ -80,7 +85,7 @@ async def test_user_create_entry( # test success with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=True, + return_value=mock_nextcloud_monitor, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -89,15 +94,17 @@ async def test_user_create_entry( await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "https://my.nc_url.local" + assert result["title"] == "nc_url" assert result["data"] == snapshot -async def test_user_already_configured(hass: HomeAssistant) -> None: +async def test_user_already_configured( + hass: HomeAssistant, mock_nextcloud_monitor: Mock +) -> None: """Test that errors are shown when duplicates are added.""" entry = MockConfigEntry( domain=DOMAIN, - title="https://my.nc_url.local", + title="nc_url", unique_id="nc_url", data=VALID_CONFIG, ) @@ -112,7 +119,7 @@ async def test_user_already_configured(hass: HomeAssistant) -> None: with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=True, + return_value=mock_nextcloud_monitor, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -124,18 +131,24 @@ async def test_user_already_configured(hass: HomeAssistant) -> None: assert result["reason"] == "already_configured" -async def test_reauth(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: +async def test_reauth( + hass: HomeAssistant, mock_nextcloud_monitor: Mock, snapshot: SnapshotAssertion +) -> None: """Test that the re-auth flow works.""" entry = MockConfigEntry( domain=DOMAIN, - title="https://my.nc_url.local", + title="nc_url", unique_id="nc_url", data=VALID_CONFIG, ) entry.add_to_hass(hass) # start reauth flow - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -193,7 +206,7 @@ async def test_reauth(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: # test success with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=True, + return_value=mock_nextcloud_monitor, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/nextcloud/test_coordinator.py b/tests/components/nextcloud/test_coordinator.py deleted file mode 100644 index 91f7e7967a3..00000000000 --- a/tests/components/nextcloud/test_coordinator.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Tests for the Nextcloud coordinator.""" - -from unittest.mock import Mock, patch - -from freezegun.api import FrozenDateTimeFactory -from nextcloudmonitor import ( - NextcloudMonitor, - NextcloudMonitorAuthorizationError, - NextcloudMonitorConnectionError, - NextcloudMonitorError, - NextcloudMonitorRequestError, -) -import pytest - -from homeassistant.components.nextcloud.const import DEFAULT_SCAN_INTERVAL -from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant - -from . import mock_config_entry -from .const import NC_DATA, VALID_CONFIG - -from tests.common import async_fire_time_changed - - -@pytest.mark.parametrize( - ("error"), - [ - (NextcloudMonitorAuthorizationError), - (NextcloudMonitorConnectionError), - (NextcloudMonitorRequestError), - ], -) -async def test_data_update( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, error: NextcloudMonitorError -) -> None: - """Test a coordinator data updates.""" - entry = mock_config_entry(VALID_CONFIG) - entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.nextcloud.NextcloudMonitor", spec=NextcloudMonitor - ) as mock_nextcloud_monitor, - ): - mock_nextcloud_monitor.return_value.update = Mock( - return_value=True, - side_effect=[None, error, None], - ) - mock_nextcloud_monitor.return_value.data = NC_DATA - assert await hass.config_entries.async_setup(entry.entry_id) - - # Test successful setup and first data fetch - await hass.async_block_till_done(wait_background_tasks=True) - states = hass.states.async_all() - assert (state != STATE_UNAVAILABLE for state in states) - - # Test states get unavailable on error - freezer.tick(DEFAULT_SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - states = hass.states.async_all() - assert (state == STATE_UNAVAILABLE for state in states) - - # Test successful data fetch - freezer.tick(DEFAULT_SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - states = hass.states.async_all() - assert (state != STATE_UNAVAILABLE for state in states) diff --git a/tests/components/nextcloud/test_init.py b/tests/components/nextcloud/test_init.py deleted file mode 100644 index 70c8f545c6b..00000000000 --- a/tests/components/nextcloud/test_init.py +++ /dev/null @@ -1,95 +0,0 @@ -"""Tests for the Nextcloud init.""" - -from unittest.mock import Mock, patch - -from nextcloudmonitor import ( - NextcloudMonitorAuthorizationError, - NextcloudMonitorConnectionError, - NextcloudMonitorError, - NextcloudMonitorRequestError, -) -import pytest - -from homeassistant.components.nextcloud.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_URL, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import init_integration, mock_config_entry -from .const import MOCKED_ENTRY_ID, NC_DATA, VALID_CONFIG - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_async_setup_entry( - hass: HomeAssistant, -) -> None: - """Test a successful setup entry.""" - assert await init_integration(hass, VALID_CONFIG, NC_DATA) - - -async def test_unique_id_migration( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, -) -> None: - """Test migration of unique ids to stable ones.""" - - object_id = "my_nc_url_local_system_version" - entity_id = f"{Platform.SENSOR}.{object_id}" - - entry = mock_config_entry(VALID_CONFIG) - entry.add_to_hass(hass) - - entity = entity_registry.async_get_or_create( - Platform.SENSOR, - DOMAIN, - f"{VALID_CONFIG[CONF_URL]}#nextcloud_system_version", - suggested_object_id=object_id, - config_entry=entry, - ) - - # test old unique id - assert entity.entity_id == entity_id - assert entity.unique_id == f"{VALID_CONFIG[CONF_URL]}#nextcloud_system_version" - - with ( - patch( - "homeassistant.components.nextcloud.NextcloudMonitor" - ) as mock_nextcloud_monitor, - ): - mock_nextcloud_monitor.update = Mock(return_value=True) - mock_nextcloud_monitor.return_value.data = NC_DATA - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - # test migrated unique id - reg_entry = entity_registry.async_get(entity_id) - assert reg_entry.unique_id == f"{MOCKED_ENTRY_ID}#system_version" - - -@pytest.mark.parametrize( - ("exception", "expcted_entry_state"), - [ - (NextcloudMonitorAuthorizationError, ConfigEntryState.SETUP_ERROR), - (NextcloudMonitorConnectionError, ConfigEntryState.SETUP_RETRY), - (NextcloudMonitorRequestError, ConfigEntryState.SETUP_RETRY), - ], -) -async def test_setup_entry_errors( - hass: HomeAssistant, - exception: NextcloudMonitorError, - expcted_entry_state: ConfigEntryState, -) -> None: - """Test a successful setup entry.""" - - entry = mock_config_entry(VALID_CONFIG) - entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.nextcloud.NextcloudMonitor", side_effect=exception - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state == expcted_entry_state diff --git a/tests/components/nextcloud/test_sensor.py b/tests/components/nextcloud/test_sensor.py deleted file mode 100644 index 2ccaf2b7770..00000000000 --- a/tests/components/nextcloud/test_sensor.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Tests for the Nextcloud sensors.""" - -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import init_integration -from .const import NC_DATA, VALID_CONFIG - -from tests.common import snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_async_setup_entry( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test a successful setup entry.""" - with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.SENSOR]): - entry = await init_integration(hass, VALID_CONFIG, NC_DATA) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/nextcloud/test_update.py b/tests/components/nextcloud/test_update.py deleted file mode 100644 index ed9b65ee55f..00000000000 --- a/tests/components/nextcloud/test_update.py +++ /dev/null @@ -1,77 +0,0 @@ -"""Tests for the Nextcloud update entity.""" - -from copy import deepcopy -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import STATE_OFF, STATE_ON, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import init_integration -from .const import NC_DATA, VALID_CONFIG - -from tests.common import snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_async_setup_entry( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test a successful setup entry.""" - with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): - entry = await init_integration(hass, VALID_CONFIG, NC_DATA) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) - - -async def test_setup_entity_without_update( - hass: HomeAssistant, snapshot: SnapshotAssertion -) -> None: - """Test update entity is created w/o available update.""" - with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): - await init_integration(hass, VALID_CONFIG, NC_DATA) - - states = hass.states.async_all() - assert len(states) == 1 - assert states[0].state == STATE_OFF - assert states[0].attributes["installed_version"] == "28.0.4.1" - assert states[0].attributes["latest_version"] == "28.0.4.1" - assert ( - states[0].attributes["release_url"] == "https://nextcloud.com/changelog/#28-0-4" - ) - - -async def test_setup_entity_with_update( - hass: HomeAssistant, snapshot: SnapshotAssertion -) -> None: - """Test update entity is created with available update.""" - data = deepcopy(NC_DATA) - data["nextcloud"]["system"]["update"]["available"] = True - data["nextcloud"]["system"]["update"]["available_version"] = "30.0.0.0" - with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): - await init_integration(hass, VALID_CONFIG, data) - - states = hass.states.async_all() - assert len(states) == 1 - assert states[0].state == STATE_ON - assert states[0].attributes["installed_version"] == "28.0.4.1" - assert states[0].attributes["latest_version"] == "30.0.0.0" - assert ( - states[0].attributes["release_url"] == "https://nextcloud.com/changelog/#30-0-0" - ) - - -async def test_setup_no_entity(hass: HomeAssistant) -> None: - """Test no update entity is created, when no data available.""" - data = deepcopy(NC_DATA) - data["nextcloud"]["system"].pop("update") # only nc<28.0.0 - with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): - await init_integration(hass, VALID_CONFIG, data) - - states = hass.states.async_all() - assert len(states) == 0 diff --git a/tests/components/nextdns/snapshots/test_binary_sensor.ambr b/tests/components/nextdns/snapshots/test_binary_sensor.ambr index 814b4c1ac16..bd4ecbba084 100644 --- a/tests/components/nextdns/snapshots/test_binary_sensor.ambr +++ b/tests/components/nextdns/snapshots/test_binary_sensor.ambr @@ -1,4 +1,1095 @@ # serializer version: 1 +# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_binary_sensor[binary_sensor.fake_profile_device_connection_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -93,3 +1184,1094 @@ 'state': 'off', }) # --- +# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextdns/snapshots/test_diagnostics.ambr b/tests/components/nextdns/snapshots/test_diagnostics.ambr index 827d6aeb6e5..5040c6e052e 100644 --- a/tests/components/nextdns/snapshots/test_diagnostics.ambr +++ b/tests/components/nextdns/snapshots/test_diagnostics.ambr @@ -7,8 +7,6 @@ 'profile_id': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'nextdns', 'entry_id': 'd9aa37407ddac7b964a99e86312288d6', 'minor_version': 1, diff --git a/tests/components/nextdns/snapshots/test_sensor.ambr b/tests/components/nextdns/snapshots/test_sensor.ambr index 14bebea53f8..34b40433e3b 100644 --- a/tests/components/nextdns/snapshots/test_sensor.ambr +++ b/tests/components/nextdns/snapshots/test_sensor.ambr @@ -1,4 +1,144 @@ # serializer version: 1 +# name: test_sensor[binary_sensor.fake_profile_device_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_connection_status', + 'unique_id': 'xyz12_this_device_nextdns_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[binary_sensor.fake_profile_device_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device profile connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_profile_connection_status', + 'unique_id': 'xyz12_this_device_profile_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device profile connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[button.fake_profile_clear_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.fake_profile_clear_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clear logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'clear_logs', + 'unique_id': 'xyz12_clear_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[button.fake_profile_clear_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Clear logs', + }), + 'context': , + 'entity_id': 'button.fake_profile_clear_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_sensor[sensor.fake_profile_dns_over_http_3_queries-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1249,3 +1389,3361 @@ 'state': '40', }) # --- +# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_9gag-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block 9GAG', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_amazon-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_amazon', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Amazon', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_amazon', + 'unique_id': 'xyz12_block_amazon', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_amazon-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Amazon', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_amazon', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_bereal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bereal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block BeReal', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bereal', + 'unique_id': 'xyz12_block_bereal', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_bereal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block BeReal', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bereal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_blizzard-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_blizzard', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Blizzard', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_blizzard', + 'unique_id': 'xyz12_block_blizzard', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_blizzard-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Blizzard', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_blizzard', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_chatgpt-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_chatgpt', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block ChatGPT', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_chatgpt', + 'unique_id': 'xyz12_block_chatgpt', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_chatgpt-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block ChatGPT', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_chatgpt', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_dailymotion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dailymotion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Dailymotion', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_dailymotion', + 'unique_id': 'xyz12_block_dailymotion', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_dailymotion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Dailymotion', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dailymotion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_dating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dating', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_dating', + 'unique_id': 'xyz12_block_dating', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_dating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dating', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_discord-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_discord', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Discord', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_discord', + 'unique_id': 'xyz12_block_discord', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_discord-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Discord', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_discord', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_disney_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disney_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Disney Plus', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disneyplus', + 'unique_id': 'xyz12_block_disneyplus', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_disney_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Disney Plus', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disney_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_ebay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_ebay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block eBay', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ebay', + 'unique_id': 'xyz12_block_ebay', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_ebay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block eBay', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_ebay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_facebook-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_facebook', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Facebook', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_facebook', + 'unique_id': 'xyz12_block_facebook', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_facebook-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Facebook', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_facebook', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_fortnite-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_fortnite', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Fortnite', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_fortnite', + 'unique_id': 'xyz12_block_fortnite', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_fortnite-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Fortnite', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_fortnite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_gambling-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_gambling', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block gambling', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_gambling', + 'unique_id': 'xyz12_block_gambling', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_gambling-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block gambling', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_gambling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_google_chat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_google_chat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Google Chat', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_google_chat', + 'unique_id': 'xyz12_block_google_chat', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_google_chat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Google Chat', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_google_chat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_hbo_max-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_hbo_max', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block HBO Max', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_hbomax', + 'unique_id': 'xyz12_block_hbomax', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_hbo_max-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block HBO Max', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_hbo_max', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_hulu-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_hulu', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Hulu', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xyz12_block_hulu', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_hulu-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Hulu', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_hulu', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_imgur-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_imgur', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Imgur', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_imgur', + 'unique_id': 'xyz12_block_imgur', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_imgur-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Imgur', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_imgur', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_instagram-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_instagram', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Instagram', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_instagram', + 'unique_id': 'xyz12_block_instagram', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_instagram-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Instagram', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_instagram', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_league_of_legends-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_league_of_legends', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block League of Legends', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_leagueoflegends', + 'unique_id': 'xyz12_block_leagueoflegends', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_league_of_legends-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block League of Legends', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_league_of_legends', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_mastodon-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_mastodon', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Mastodon', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_mastodon', + 'unique_id': 'xyz12_block_mastodon', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_mastodon-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Mastodon', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_mastodon', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_messenger-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_messenger', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Messenger', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_messenger', + 'unique_id': 'xyz12_block_messenger', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_messenger-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Messenger', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_messenger', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_minecraft-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_minecraft', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Minecraft', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_minecraft', + 'unique_id': 'xyz12_block_minecraft', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_minecraft-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Minecraft', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_minecraft', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_netflix-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_netflix', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Netflix', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_netflix', + 'unique_id': 'xyz12_block_netflix', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_netflix-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Netflix', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_netflix', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_online_gaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_online_gaming', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block online gaming', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_online_gaming', + 'unique_id': 'xyz12_block_online_gaming', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_online_gaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block online gaming', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_online_gaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_pinterest-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_pinterest', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Pinterest', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_pinterest', + 'unique_id': 'xyz12_block_pinterest', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_pinterest-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Pinterest', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_pinterest', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_piracy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_piracy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block piracy', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_piracy', + 'unique_id': 'xyz12_block_piracy', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_piracy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block piracy', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_piracy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_playstation_network-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_playstation_network', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block PlayStation Network', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_playstation_network', + 'unique_id': 'xyz12_block_playstation_network', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_playstation_network-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block PlayStation Network', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_playstation_network', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_porn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_porn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block porn', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_porn', + 'unique_id': 'xyz12_block_porn', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_porn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block porn', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_porn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_prime_video-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_prime_video', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Prime Video', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_primevideo', + 'unique_id': 'xyz12_block_primevideo', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_prime_video-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Prime Video', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_prime_video', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_reddit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_reddit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Reddit', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_reddit', + 'unique_id': 'xyz12_block_reddit', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_reddit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Reddit', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_reddit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_roblox-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_roblox', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Roblox', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_roblox', + 'unique_id': 'xyz12_block_roblox', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_roblox-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Roblox', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_roblox', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_signal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_signal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Signal', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_signal', + 'unique_id': 'xyz12_block_signal', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_signal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Signal', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_signal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_skype-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_skype', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Skype', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_skype', + 'unique_id': 'xyz12_block_skype', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_skype-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Skype', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_skype', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_snapchat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_snapchat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Snapchat', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_snapchat', + 'unique_id': 'xyz12_block_snapchat', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_snapchat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Snapchat', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_snapchat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_social_networks-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_social_networks', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block social networks', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_social_networks', + 'unique_id': 'xyz12_block_social_networks', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_social_networks-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block social networks', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_social_networks', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_spotify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_spotify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Spotify', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_spotify', + 'unique_id': 'xyz12_block_spotify', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_spotify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Spotify', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_spotify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_steam-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_steam', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Steam', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_steam', + 'unique_id': 'xyz12_block_steam', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_steam-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Steam', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_steam', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_telegram-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_telegram', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Telegram', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_telegram', + 'unique_id': 'xyz12_block_telegram', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_telegram-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Telegram', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_telegram', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_tiktok-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tiktok', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block TikTok', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tiktok', + 'unique_id': 'xyz12_block_tiktok', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_tiktok-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block TikTok', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tiktok', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_tinder-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tinder', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Tinder', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tinder', + 'unique_id': 'xyz12_block_tinder', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_tinder-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Tinder', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tinder', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_tumblr-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tumblr', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Tumblr', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tumblr', + 'unique_id': 'xyz12_block_tumblr', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_tumblr-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Tumblr', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tumblr', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_twitch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_twitch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Twitch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_twitch', + 'unique_id': 'xyz12_block_twitch', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_twitch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Twitch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_twitch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_video_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_video_streaming', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block video streaming', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_video_streaming', + 'unique_id': 'xyz12_block_video_streaming', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_video_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block video streaming', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_video_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_vimeo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_vimeo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Vimeo', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_vimeo', + 'unique_id': 'xyz12_block_vimeo', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_vimeo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Vimeo', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_vimeo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_vk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_vk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block VK', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_vk', + 'unique_id': 'xyz12_block_vk', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_vk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block VK', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_vk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_whatsapp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_whatsapp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block WhatsApp', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_whatsapp', + 'unique_id': 'xyz12_block_whatsapp', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_whatsapp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block WhatsApp', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_whatsapp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block X (formerly Twitter)', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_twitter', + 'unique_id': 'xyz12_block_twitter', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block X (formerly Twitter)', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_xbox_live-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_xbox_live', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Xbox Live', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_xboxlive', + 'unique_id': 'xyz12_block_xboxlive', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_xbox_live-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Xbox Live', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_xbox_live', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_youtube-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_youtube', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block YouTube', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_youtube', + 'unique_id': 'xyz12_block_youtube', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_youtube-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block YouTube', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_youtube', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_zoom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_zoom', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Zoom', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_zoom', + 'unique_id': 'xyz12_block_zoom', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_zoom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Zoom', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_zoom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextdns/snapshots/test_switch.ambr b/tests/components/nextdns/snapshots/test_switch.ambr index 3328e341a2e..8472f02e8c5 100644 --- a/tests/components/nextdns/snapshots/test_switch.ambr +++ b/tests/components/nextdns/snapshots/test_switch.ambr @@ -1,4 +1,1394 @@ # serializer version: 1 +# name: test_switch[binary_sensor.fake_profile_device_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_connection_status', + 'unique_id': 'xyz12_this_device_nextdns_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[binary_sensor.fake_profile_device_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device profile connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_profile_connection_status', + 'unique_id': 'xyz12_this_device_profile_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device profile connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[button.fake_profile_clear_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.fake_profile_clear_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clear logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'clear_logs', + 'unique_id': 'xyz12_clear_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[button.fake_profile_clear_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Clear logs', + }), + 'context': , + 'entity_id': 'button.fake_profile_clear_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTP/3 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh3_queries', + 'unique_id': 'xyz12_doh3_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTP/3 queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh3_queries_ratio', + 'unique_id': 'xyz12_doh3_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '13.0', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTPS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh_queries', + 'unique_id': 'xyz12_doh_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTPS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTPS queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh_queries_ratio', + 'unique_id': 'xyz12_doh_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTPS queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17.4', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-QUIC queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doq_queries', + 'unique_id': 'xyz12_doq_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-QUIC queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-QUIC queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doq_queries_ratio', + 'unique_id': 'xyz12_doq_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-QUIC queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.7', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-TLS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dot_queries', + 'unique_id': 'xyz12_dot_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-TLS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-TLS queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dot_queries_ratio', + 'unique_id': 'xyz12_dot_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-TLS queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26.1', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'all_queries', + 'unique_id': 'xyz12_all_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries blocked', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blocked_queries', + 'unique_id': 'xyz12_blocked_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries blocked', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries blocked ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blocked_queries_ratio', + 'unique_id': 'xyz12_blocked_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries blocked ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.0', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_relayed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_relayed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries relayed', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relayed_queries', + 'unique_id': 'xyz12_relayed_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_relayed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries relayed', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_relayed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC not validated queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'not_validated_queries', + 'unique_id': 'xyz12_not_validated_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC not validated queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC validated queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'validated_queries', + 'unique_id': 'xyz12_validated_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC validated queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC validated queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'validated_queries_ratio', + 'unique_id': 'xyz12_validated_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC validated queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75.0', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_encrypted_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Encrypted queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'encrypted_queries', + 'unique_id': 'xyz12_encrypted_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Encrypted queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_encrypted_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Encrypted queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'encrypted_queries_ratio', + 'unique_id': 'xyz12_encrypted_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Encrypted queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60.0', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv4_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv4_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv4 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv4_queries', + 'unique_id': 'xyz12_ipv4_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv4_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv4 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv4_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '90', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv6_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv6 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv6_queries', + 'unique_id': 'xyz12_ipv6_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv6 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv6_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv6 queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv6_queries_ratio', + 'unique_id': 'xyz12_ipv6_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv6 queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_tcp_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TCP queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tcp_queries', + 'unique_id': 'xyz12_tcp_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile TCP queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_tcp_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TCP queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tcp_queries_ratio', + 'unique_id': 'xyz12_tcp_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile TCP queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_udp_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'UDP queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'udp_queries', + 'unique_id': 'xyz12_udp_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile UDP queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_udp_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_udp_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'UDP queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'udp_queries_ratio', + 'unique_id': 'xyz12_udp_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile UDP queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_udp_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.8', + }) +# --- +# name: test_switch[sensor.fake_profile_unencrypted_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_unencrypted_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Unencrypted queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'unencrypted_queries', + 'unique_id': 'xyz12_unencrypted_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_unencrypted_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Unencrypted queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_unencrypted_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- # name: test_switch[switch.fake_profile_ai_driven_threat_detection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/nextdns/test_config_flow.py b/tests/components/nextdns/test_config_flow.py index 27a6cf1e7e0..9247288eebf 100644 --- a/tests/components/nextdns/test_config_flow.py +++ b/tests/components/nextdns/test_config_flow.py @@ -4,7 +4,6 @@ from unittest.mock import patch from nextdns import ApiError, InvalidApiKeyError import pytest -from tenacity import RetryError from homeassistant.components.nextdns.const import CONF_PROFILE_ID, DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -12,7 +11,7 @@ from homeassistant.const import CONF_API_KEY, CONF_PROFILE_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import PROFILES, init_integration, mock_nextdns +from . import PROFILES, init_integration async def test_form_create_entry(hass: HomeAssistant) -> None: @@ -58,7 +57,6 @@ async def test_form_create_entry(hass: HomeAssistant) -> None: [ (ApiError("API Error"), "cannot_connect"), (InvalidApiKeyError, "invalid_api_key"), - (RetryError("Retry Error"), "cannot_connect"), (TimeoutError, "cannot_connect"), (ValueError, "unknown"), ], @@ -101,60 +99,3 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -async def test_reauth_successful(hass: HomeAssistant) -> None: - """Test starting a reauthentication flow.""" - entry = await init_integration(hass) - - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - with ( - patch( - "homeassistant.components.nextdns.NextDns.get_profiles", - return_value=PROFILES, - ), - mock_nextdns(), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_API_KEY: "new_api_key"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - - -@pytest.mark.parametrize( - ("exc", "base_error"), - [ - (ApiError("API Error"), "cannot_connect"), - (InvalidApiKeyError, "invalid_api_key"), - (RetryError("Retry Error"), "cannot_connect"), - (TimeoutError, "cannot_connect"), - (ValueError, "unknown"), - ], -) -async def test_reauth_errors( - hass: HomeAssistant, exc: Exception, base_error: str -) -> None: - """Test reauthentication flow with errors.""" - entry = await init_integration(hass) - - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - with patch( - "homeassistant.components.nextdns.NextDns.get_profiles", side_effect=exc - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_API_KEY: "new_api_key"}, - ) - await hass.async_block_till_done() - - assert result["errors"] == {"base": base_error} diff --git a/tests/components/nextdns/test_coordinator.py b/tests/components/nextdns/test_coordinator.py deleted file mode 100644 index f2b353ea2c5..00000000000 --- a/tests/components/nextdns/test_coordinator.py +++ /dev/null @@ -1,76 +0,0 @@ -"""Tests for NextDNS coordinator.""" - -from datetime import timedelta -from unittest.mock import patch - -from freezegun.api import FrozenDateTimeFactory -from nextdns import InvalidApiKeyError - -from homeassistant.components.nextdns.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import init_integration - -from tests.common import async_fire_time_changed - - -async def test_auth_error( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, -) -> None: - """Test authentication error when polling data.""" - entry = await init_integration(hass) - - assert entry.state is ConfigEntryState.LOADED - - freezer.tick(timedelta(minutes=10)) - with ( - patch( - "homeassistant.components.nextdns.NextDns.get_profiles", - side_effect=InvalidApiKeyError, - ), - patch( - "homeassistant.components.nextdns.NextDns.get_analytics_status", - side_effect=InvalidApiKeyError, - ), - patch( - "homeassistant.components.nextdns.NextDns.get_analytics_encryption", - side_effect=InvalidApiKeyError, - ), - patch( - "homeassistant.components.nextdns.NextDns.get_analytics_dnssec", - side_effect=InvalidApiKeyError, - ), - patch( - "homeassistant.components.nextdns.NextDns.get_analytics_ip_versions", - side_effect=InvalidApiKeyError, - ), - patch( - "homeassistant.components.nextdns.NextDns.get_analytics_protocols", - side_effect=InvalidApiKeyError, - ), - patch( - "homeassistant.components.nextdns.NextDns.get_settings", - side_effect=InvalidApiKeyError, - ), - patch( - "homeassistant.components.nextdns.NextDns.connection_status", - side_effect=InvalidApiKeyError, - ), - ): - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert entry.state is ConfigEntryState.LOADED - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - - flow = flows[0] - assert flow.get("step_id") == "reauth_confirm" - assert flow.get("handler") == DOMAIN - - assert "context" in flow - assert flow["context"].get("source") == SOURCE_REAUTH - assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nextdns/test_diagnostics.py b/tests/components/nextdns/test_diagnostics.py index 3bb1fc3ee67..7652bc4f03e 100644 --- a/tests/components/nextdns/test_diagnostics.py +++ b/tests/components/nextdns/test_diagnostics.py @@ -1,7 +1,6 @@ """Test NextDNS diagnostics.""" from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -19,6 +18,4 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" entry = await init_integration(hass) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( - exclude=props("created_at", "modified_at") - ) + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot diff --git a/tests/components/nextdns/test_init.py b/tests/components/nextdns/test_init.py index 0a0bf3fc487..f7b85bb8a54 100644 --- a/tests/components/nextdns/test_init.py +++ b/tests/components/nextdns/test_init.py @@ -2,12 +2,10 @@ from unittest.mock import patch -from nextdns import ApiError, InvalidApiKeyError -import pytest -from tenacity import RetryError +from nextdns import ApiError from homeassistant.components.nextdns.const import CONF_PROFILE_ID, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_API_KEY, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -26,10 +24,7 @@ async def test_async_setup_entry(hass: HomeAssistant) -> None: assert state.state == "20.0" -@pytest.mark.parametrize( - "exc", [ApiError("API Error"), RetryError("Retry Error"), TimeoutError] -) -async def test_config_not_ready(hass: HomeAssistant, exc: Exception) -> None: +async def test_config_not_ready(hass: HomeAssistant) -> None: """Test for setup failure if the connection to the service fails.""" entry = MockConfigEntry( domain=DOMAIN, @@ -40,7 +35,7 @@ async def test_config_not_ready(hass: HomeAssistant, exc: Exception) -> None: with patch( "homeassistant.components.nextdns.NextDns.get_profiles", - side_effect=exc, + side_effect=ApiError("API Error"), ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -59,33 +54,3 @@ async def test_unload_entry(hass: HomeAssistant) -> None: assert entry.state is ConfigEntryState.NOT_LOADED assert not hass.data.get(DOMAIN) - - -async def test_config_auth_failed(hass: HomeAssistant) -> None: - """Test for setup failure if the auth fails.""" - entry = MockConfigEntry( - domain=DOMAIN, - title="Fake Profile", - unique_id="xyz12", - data={CONF_API_KEY: "fake_api_key", CONF_PROFILE_ID: "xyz12"}, - ) - entry.add_to_hass(hass) - - with patch( - "homeassistant.components.nextdns.NextDns.get_profiles", - side_effect=InvalidApiKeyError, - ): - await hass.config_entries.async_setup(entry.entry_id) - - assert entry.state is ConfigEntryState.SETUP_ERROR - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - - flow = flows[0] - assert flow.get("step_id") == "reauth_confirm" - assert flow.get("handler") == DOMAIN - - assert "context" in flow - assert flow["context"].get("source") == SOURCE_REAUTH - assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nextdns/test_switch.py b/tests/components/nextdns/test_switch.py index 6e344e34336..059585e9ffe 100644 --- a/tests/components/nextdns/test_switch.py +++ b/tests/components/nextdns/test_switch.py @@ -8,7 +8,6 @@ from aiohttp.client_exceptions import ClientConnectorError from nextdns import ApiError import pytest from syrupy import SnapshotAssertion -from tenacity import RetryError from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( @@ -95,15 +94,7 @@ async def test_switch_off(hass: HomeAssistant) -> None: mock_switch_on.assert_called_once() -@pytest.mark.parametrize( - "exc", - [ - ApiError("API Error"), - RetryError("Retry Error"), - TimeoutError, - ], -) -async def test_availability(hass: HomeAssistant, exc: Exception) -> None: +async def test_availability(hass: HomeAssistant) -> None: """Ensure that we mark the entities unavailable correctly when service causes an error.""" await init_integration(hass) @@ -115,7 +106,7 @@ async def test_availability(hass: HomeAssistant, exc: Exception) -> None: future = utcnow() + timedelta(minutes=10) with patch( "homeassistant.components.nextdns.NextDns.get_settings", - side_effect=exc, + side_effect=ApiError("API Error"), ): async_fire_time_changed(hass, future) await hass.async_block_till_done(wait_background_tasks=True) diff --git a/tests/components/nibe_heatpump/conftest.py b/tests/components/nibe_heatpump/conftest.py index 47b65772a24..c44875414e2 100644 --- a/tests/components/nibe_heatpump/conftest.py +++ b/tests/components/nibe_heatpump/conftest.py @@ -1,12 +1,12 @@ """Test configuration for Nibe Heat Pump.""" -from collections.abc import Generator from contextlib import ExitStack from unittest.mock import AsyncMock, Mock, patch from freezegun.api import FrozenDateTimeFactory from nibe.exceptions import CoilNotFoundException import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/nibe_heatpump/snapshots/test_climate.ambr b/tests/components/nibe_heatpump/snapshots/test_climate.ambr index 2db9a813bff..fb3e2d1003b 100644 --- a/tests/components/nibe_heatpump/snapshots/test_climate.ambr +++ b/tests/components/nibe_heatpump/snapshots/test_climate.ambr @@ -97,6 +97,12 @@ 'state': 'unavailable', }) # --- +# name: test_active_accessory[Model.S320-s2-climate.climate_system_21][initial] + None +# --- +# name: test_active_accessory[Model.S320-s2-climate.climate_system_s1][initial] + None +# --- # name: test_basic[Model.F1155-s2-climate.climate_system_s2][cooling] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/nibe_heatpump/test_config_flow.py b/tests/components/nibe_heatpump/test_config_flow.py index de5f577fa7d..471f7f4c593 100644 --- a/tests/components/nibe_heatpump/test_config_flow.py +++ b/tests/components/nibe_heatpump/test_config_flow.py @@ -38,7 +38,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def _get_connection_form( hass: HomeAssistant, connection_type: str -) -> config_entries.ConfigFlowResult: +) -> FlowResultType: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} diff --git a/tests/components/nice_go/__init__.py b/tests/components/nice_go/__init__.py deleted file mode 100644 index 0208795a12c..00000000000 --- a/tests/components/nice_go/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Tests for the Nice G.O. integration.""" - -from unittest.mock import patch - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration( - hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] -) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - with patch( - "homeassistant.components.nice_go.PLATFORMS", - platforms, - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/nice_go/conftest.py b/tests/components/nice_go/conftest.py deleted file mode 100644 index cf85cd7e092..00000000000 --- a/tests/components/nice_go/conftest.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Common fixtures for the Nice G.O. tests.""" - -from collections.abc import Generator -from datetime import datetime -from unittest.mock import AsyncMock, patch - -from nice_go import Barrier, BarrierState, ConnectionState -import pytest - -from homeassistant.components.nice_go.const import ( - CONF_REFRESH_TOKEN, - CONF_REFRESH_TOKEN_CREATION_TIME, - DOMAIN, -) -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD - -from tests.common import MockConfigEntry, load_json_array_fixture - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.nice_go.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_nice_go() -> Generator[AsyncMock]: - """Mock a Nice G.O. client.""" - with ( - patch( - "homeassistant.components.nice_go.coordinator.NiceGOApi", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.nice_go.config_flow.NiceGOApi", - new=mock_client, - ), - ): - client = mock_client.return_value - client.authenticate.return_value = "test-refresh-token" - client.authenticate_refresh.return_value = None - client.id_token = None - client.get_all_barriers.return_value = [ - Barrier( - id=barrier["id"], - type=barrier["type"], - controlLevel=barrier["controlLevel"], - attr=barrier["attr"], - state=BarrierState( - **barrier["state"], - connectionState=ConnectionState(**barrier["connectionState"]) - if barrier.get("connectionState") - else None, - ), - api=client, - ) - for barrier in load_json_array_fixture("get_all_barriers.json", DOMAIN) - ] - yield client - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - entry_id="acefdd4b3a4a0911067d1cf51414201e", - title="test-email", - data={ - CONF_EMAIL: "test-email", - CONF_PASSWORD: "test-password", - CONF_REFRESH_TOKEN: "test-refresh-token", - CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), - }, - version=1, - unique_id="test-email", - ) diff --git a/tests/components/nice_go/fixtures/device_state_update.json b/tests/components/nice_go/fixtures/device_state_update.json deleted file mode 100644 index 53d89c5411b..00000000000 --- a/tests/components/nice_go/fixtures/device_state_update.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "data": { - "devicesStatesUpdateFeed": { - "receiver": "ORG/0:2372", - "item": { - "deviceId": "1", - "desired": "{\"key\":\"value\"}", - "reported": "{\"displayName\":\"Test Garage 1\",\"autoDisabled\":false,\"migrationStatus\":\"DONE\",\"deviceId\":\"1\",\"lightStatus\":\"0,100\",\"vcnMode\":false,\"deviceFwVersion\":\"1.2.3.4.5.6\",\"barrierStatus\":\"0,0,1,0,-1,0,3,0\"}", - "timestamp": 123, - "version": 123, - "connectionState": { - "connected": true, - "updatedTimestamp": "123", - "__typename": "DeviceConnectionState" - }, - "__typename": "DeviceState" - }, - "__typename": "DeviceStateUpdateNotice" - } - } -} diff --git a/tests/components/nice_go/fixtures/device_state_update_1.json b/tests/components/nice_go/fixtures/device_state_update_1.json deleted file mode 100644 index cc718e8b093..00000000000 --- a/tests/components/nice_go/fixtures/device_state_update_1.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "data": { - "devicesStatesUpdateFeed": { - "receiver": "ORG/0:2372", - "item": { - "deviceId": "2", - "desired": "{\"key\":\"value\"}", - "reported": "{\"displayName\":\"Test Garage 2\",\"autoDisabled\":false,\"migrationStatus\":\"DONE\",\"deviceId\":\"2\",\"lightStatus\":\"1,100\",\"vcnMode\":false,\"deviceFwVersion\":\"1.2.3.4.5.6\",\"barrierStatus\":\"1,100,2,0,-1,0,3,0\"}", - "timestamp": 123, - "version": 123, - "connectionState": { - "connected": true, - "updatedTimestamp": "123", - "__typename": "DeviceConnectionState" - }, - "__typename": "DeviceState" - }, - "__typename": "DeviceStateUpdateNotice" - } - } -} diff --git a/tests/components/nice_go/fixtures/get_all_barriers.json b/tests/components/nice_go/fixtures/get_all_barriers.json deleted file mode 100644 index 84799e0dd32..00000000000 --- a/tests/components/nice_go/fixtures/get_all_barriers.json +++ /dev/null @@ -1,120 +0,0 @@ -[ - { - "id": "1", - "type": "WallStation", - "controlLevel": "Owner", - "attr": [ - { - "key": "organization", - "value": "test_organization" - } - ], - "state": { - "deviceId": "1", - "desired": { "key": "value" }, - "reported": { - "displayName": "Test Garage 1", - "autoDisabled": false, - "migrationStatus": "DONE", - "deviceId": "1", - "lightStatus": "1,100", - "vcnMode": false, - "deviceFwVersion": "1.2.3.4.5.6", - "barrierStatus": "0,0,0,0,-1,0,3,0" - }, - "timestamp": null, - "version": null - }, - "connectionState": { - "connected": true, - "updatedTimestamp": "123" - } - }, - { - "id": "2", - "type": "WallStation", - "controlLevel": "Owner", - "attr": [ - { - "key": "organization", - "value": "test_organization" - } - ], - "state": { - "deviceId": "2", - "desired": { "key": "value" }, - "reported": { - "displayName": "Test Garage 2", - "autoDisabled": false, - "migrationStatus": "DONE", - "deviceId": "2", - "lightStatus": "0,100", - "vcnMode": true, - "deviceFwVersion": "1.2.3.4.5.6", - "barrierStatus": "1,100,0,0,-1,0,3,0" - }, - "timestamp": null, - "version": null - }, - "connectionState": { - "connected": true, - "updatedTimestamp": "123" - } - }, - { - "id": "3", - "type": "Mms100", - "controlLevel": "Owner", - "attr": [ - { - "key": "organization", - "value": "test_organization" - } - ], - "state": { - "deviceId": "3", - "desired": { "key": "value" }, - "reported": { - "displayName": "Test Garage 3", - "autoDisabled": false, - "migrationStatus": "DONE", - "deviceId": "3", - "deviceFwVersion": "1.2.3.4.5.6", - "barrierStatus": "1,100,0,0,1,0,0,0", - "radioConnected": 1, - "powerLevel": "LOW" - }, - "timestamp": null, - "version": null - }, - "connectionState": null - }, - { - "id": "4", - "type": "unknown-device-type", - "controlLevel": "Owner", - "attr": [ - { - "key": "organization", - "value": "test_organization" - } - ], - "state": { - "deviceId": "4", - "desired": { "key": "value" }, - "reported": { - "displayName": "Test Garage 4", - "autoDisabled": false, - "migrationStatus": "DONE", - "deviceId": "4", - "deviceFwVersion": "1.2.3.4.5.6", - "barrierStatus": "1,100,0,0,1,0,0,0", - "radioConnected": 1, - "powerLevel": "LOW" - }, - "timestamp": null, - "version": null - }, - "connectionState": null - } -] diff --git a/tests/components/nice_go/snapshots/test_cover.ambr b/tests/components/nice_go/snapshots/test_cover.ambr deleted file mode 100644 index 49b5267df56..00000000000 --- a/tests/components/nice_go/snapshots/test_cover.ambr +++ /dev/null @@ -1,193 +0,0 @@ -# serializer version: 1 -# name: test_covers[cover.test_garage_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_garage_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'nice_go', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '1', - 'unit_of_measurement': None, - }) -# --- -# name: test_covers[cover.test_garage_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'garage', - 'friendly_name': 'Test Garage 1', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_garage_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_covers[cover.test_garage_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_garage_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'nice_go', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '2', - 'unit_of_measurement': None, - }) -# --- -# name: test_covers[cover.test_garage_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'garage', - 'friendly_name': 'Test Garage 2', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_garage_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_covers[cover.test_garage_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_garage_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'nice_go', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '3', - 'unit_of_measurement': None, - }) -# --- -# name: test_covers[cover.test_garage_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'gate', - 'friendly_name': 'Test Garage 3', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_garage_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_covers[cover.test_garage_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_garage_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'nice_go', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '4', - 'unit_of_measurement': None, - }) -# --- -# name: test_covers[cover.test_garage_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'garage', - 'friendly_name': 'Test Garage 4', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_garage_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr deleted file mode 100644 index f4ba363a421..00000000000 --- a/tests/components/nice_go/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,68 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'coordinator_data': dict({ - '1': dict({ - 'barrier_status': 'closed', - 'connected': True, - 'fw_version': '1.2.3.4.5.6', - 'id': '1', - 'light_status': True, - 'name': 'Test Garage 1', - 'type': 'WallStation', - 'vacation_mode': False, - }), - '2': dict({ - 'barrier_status': 'open', - 'connected': True, - 'fw_version': '1.2.3.4.5.6', - 'id': '2', - 'light_status': False, - 'name': 'Test Garage 2', - 'type': 'WallStation', - 'vacation_mode': True, - }), - '3': dict({ - 'barrier_status': 'open', - 'connected': True, - 'fw_version': '1.2.3.4.5.6', - 'id': '3', - 'light_status': None, - 'name': 'Test Garage 3', - 'type': 'Mms100', - 'vacation_mode': None, - }), - '4': dict({ - 'barrier_status': 'open', - 'connected': True, - 'fw_version': '1.2.3.4.5.6', - 'id': '4', - 'light_status': None, - 'name': 'Test Garage 4', - 'type': 'unknown-device-type', - 'vacation_mode': None, - }), - }), - 'entry': dict({ - 'data': dict({ - 'email': '**REDACTED**', - 'password': '**REDACTED**', - 'refresh_token': '**REDACTED**', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'nice_go', - 'entry_id': 'acefdd4b3a4a0911067d1cf51414201e', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': '**REDACTED**', - 'unique_id': '**REDACTED**', - 'version': 1, - }), - }) -# --- diff --git a/tests/components/nice_go/snapshots/test_init.ambr b/tests/components/nice_go/snapshots/test_init.ambr deleted file mode 100644 index ff389568d1b..00000000000 --- a/tests/components/nice_go/snapshots/test_init.ambr +++ /dev/null @@ -1,16 +0,0 @@ -# serializer version: 1 -# name: test_on_data_none_parsed - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'garage', - 'friendly_name': 'Test Garage 1', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_garage_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- diff --git a/tests/components/nice_go/snapshots/test_light.ambr b/tests/components/nice_go/snapshots/test_light.ambr deleted file mode 100644 index 529df95a570..00000000000 --- a/tests/components/nice_go/snapshots/test_light.ambr +++ /dev/null @@ -1,111 +0,0 @@ -# serializer version: 1 -# name: test_data[light.test_garage_1_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.test_garage_1_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light', - 'platform': 'nice_go', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light', - 'unique_id': '1', - 'unit_of_measurement': None, - }) -# --- -# name: test_data[light.test_garage_1_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'color_mode': , - 'friendly_name': 'Test Garage 1 Light', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.test_garage_1_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_data[light.test_garage_2_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.test_garage_2_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light', - 'platform': 'nice_go', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light', - 'unique_id': '2', - 'unit_of_measurement': None, - }) -# --- -# name: test_data[light.test_garage_2_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'color_mode': None, - 'friendly_name': 'Test Garage 2 Light', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.test_garage_2_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/nice_go/test_config_flow.py b/tests/components/nice_go/test_config_flow.py deleted file mode 100644 index 9c25a640c75..00000000000 --- a/tests/components/nice_go/test_config_flow.py +++ /dev/null @@ -1,181 +0,0 @@ -"""Test the Nice G.O. config flow.""" - -from unittest.mock import AsyncMock - -from freezegun.api import FrozenDateTimeFactory -from nice_go import AuthFailedError -import pytest - -from homeassistant.components.nice_go.const import ( - CONF_REFRESH_TOKEN, - CONF_REFRESH_TOKEN_CREATION_TIME, - DOMAIN, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_form( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_setup_entry: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert not result["errors"] - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-email", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "test-email" - assert result["data"][CONF_EMAIL] == "test-email" - assert result["data"][CONF_PASSWORD] == "test-password" - assert result["data"][CONF_REFRESH_TOKEN] == "test-refresh-token" - assert CONF_REFRESH_TOKEN_CREATION_TIME in result["data"] - assert result["result"].unique_id == "test-email" - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("side_effect", "expected_error"), - [(AuthFailedError, "invalid_auth"), (Exception, "unknown")], -) -async def test_form_exceptions( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_setup_entry: AsyncMock, - side_effect: Exception, - expected_error: str, -) -> None: - """Test we handle invalid auth.""" - mock_nice_go.authenticate.side_effect = side_effect - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-email", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": expected_error} - mock_nice_go.authenticate.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-email", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_duplicate_device( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_nice_go: AsyncMock, -) -> None: - """Test that duplicate devices are handled.""" - mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-email", - CONF_PASSWORD: "test-password", - }, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_reauth( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_nice_go: AsyncMock, -) -> None: - """Test reauth flow.""" - - await setup_integration(hass, mock_config_entry, []) - - result = await mock_config_entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-email", - CONF_PASSWORD: "other-fake-password", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert len(hass.config_entries.async_entries()) == 1 - - -@pytest.mark.parametrize( - ("side_effect", "expected_error"), - [(AuthFailedError, "invalid_auth"), (Exception, "unknown")], -) -async def test_reauth_exceptions( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_nice_go: AsyncMock, - side_effect: Exception, - expected_error: str, -) -> None: - """Test we handle invalid auth.""" - mock_nice_go.authenticate.side_effect = side_effect - await setup_integration(hass, mock_config_entry, []) - - result = await mock_config_entry.start_reauth_flow(hass) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-email", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": expected_error} - mock_nice_go.authenticate.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test-email", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert len(hass.config_entries.async_entries()) == 1 diff --git a/tests/components/nice_go/test_cover.py b/tests/components/nice_go/test_cover.py deleted file mode 100644 index f90c2d438b0..00000000000 --- a/tests/components/nice_go/test_cover.py +++ /dev/null @@ -1,156 +0,0 @@ -"""Test Nice G.O. cover.""" - -from unittest.mock import AsyncMock - -from aiohttp import ClientError -from freezegun.api import FrozenDateTimeFactory -from nice_go import ApiError -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.cover import ( - DOMAIN as COVER_DOMAIN, - SERVICE_CLOSE_COVER, - SERVICE_OPEN_COVER, - CoverState, -) -from homeassistant.components.nice_go.const import DOMAIN -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform - - -async def test_covers( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that data gets parsed and returned appropriately.""" - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_open_cover( - hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry -) -> None: - """Test that opening the cover works as intended.""" - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: "cover.test_garage_2"}, - blocking=True, - ) - - assert mock_nice_go.open_barrier.call_count == 0 - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: "cover.test_garage_1"}, - blocking=True, - ) - - assert mock_nice_go.open_barrier.call_count == 1 - - -async def test_close_cover( - hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry -) -> None: - """Test that closing the cover works as intended.""" - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: "cover.test_garage_1"}, - blocking=True, - ) - - assert mock_nice_go.close_barrier.call_count == 0 - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: "cover.test_garage_2"}, - blocking=True, - ) - - assert mock_nice_go.close_barrier.call_count == 1 - - -async def test_update_cover_state( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test that closing the cover works as intended.""" - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - assert hass.states.get("cover.test_garage_1").state == CoverState.CLOSED - assert hass.states.get("cover.test_garage_2").state == CoverState.OPEN - - device_update = load_json_object_fixture("device_state_update.json", DOMAIN) - await mock_config_entry.runtime_data.on_data(device_update) - device_update_1 = load_json_object_fixture("device_state_update_1.json", DOMAIN) - await mock_config_entry.runtime_data.on_data(device_update_1) - - assert hass.states.get("cover.test_garage_1").state == CoverState.OPENING - assert hass.states.get("cover.test_garage_2").state == CoverState.CLOSING - - -@pytest.mark.parametrize( - ("action", "error", "entity_id", "expected_error"), - [ - ( - SERVICE_OPEN_COVER, - ApiError, - "cover.test_garage_1", - "Error opening the barrier", - ), - ( - SERVICE_CLOSE_COVER, - ClientError, - "cover.test_garage_2", - "Error closing the barrier", - ), - ], -) -async def test_cover_exceptions( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - action: str, - error: Exception, - entity_id: str, - expected_error: str, -) -> None: - """Test that closing the cover works as intended.""" - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - mock_nice_go.open_barrier.side_effect = error - mock_nice_go.close_barrier.side_effect = error - - with pytest.raises(HomeAssistantError, match=expected_error): - await hass.services.async_call( - COVER_DOMAIN, - action, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) diff --git a/tests/components/nice_go/test_diagnostics.py b/tests/components/nice_go/test_diagnostics.py deleted file mode 100644 index 5c8647f3d6e..00000000000 --- a/tests/components/nice_go/test_diagnostics.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Test diagnostics of Nice G.O..""" - -from unittest.mock import AsyncMock - -import pytest -from syrupy import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -@pytest.mark.freeze_time("2024-08-27") -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test config entry diagnostics.""" - await setup_integration(hass, mock_config_entry, []) - result = await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) - assert result == snapshot( - exclude=props("created_at", "modified_at", "refresh_token_creation_time") - ) diff --git a/tests/components/nice_go/test_event.py b/tests/components/nice_go/test_event.py deleted file mode 100644 index 1c1b70532f4..00000000000 --- a/tests/components/nice_go/test_event.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Nice G.O. event tests.""" - -from unittest.mock import AsyncMock, MagicMock - -import pytest - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -@pytest.mark.freeze_time("2024-08-19") -async def test_barrier_obstructed( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test barrier obstructed.""" - mock_nice_go.listen = MagicMock() - await setup_integration(hass, mock_config_entry, [Platform.EVENT]) - - await mock_nice_go.listen.call_args_list[3][0][1]({"deviceId": "1"}) - await hass.async_block_till_done() - - event_state = hass.states.get("event.test_garage_1_barrier_obstructed") - - assert event_state.state == "2024-08-19T00:00:00.000+00:00" - assert event_state.attributes["event_type"] == "barrier_obstructed" diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py deleted file mode 100644 index 4eb3851516e..00000000000 --- a/tests/components/nice_go/test_init.py +++ /dev/null @@ -1,412 +0,0 @@ -"""Test Nice G.O. init.""" - -import asyncio -from datetime import timedelta -from unittest.mock import AsyncMock, MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -from nice_go import ApiError, AuthFailedError, Barrier, BarrierState -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.nice_go.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform -from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.helpers import issue_registry as ir - -from . import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_unload_entry( - hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry -) -> None: - """Test the unload entry.""" - - await setup_integration(hass, mock_config_entry, []) - assert mock_config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_setup_failure_api_error( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reauth trigger setup.""" - - mock_nice_go.authenticate_refresh.side_effect = ApiError() - - await setup_integration(hass, mock_config_entry, []) - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_setup_failure_auth_failed( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reauth trigger setup.""" - - mock_nice_go.authenticate_refresh.side_effect = AuthFailedError() - - await setup_integration(hass, mock_config_entry, []) - assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR - - assert any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) - - -async def test_firmware_update_required( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - issue_registry: ir.IssueRegistry, -) -> None: - """Test firmware update required.""" - - mock_nice_go.get_all_barriers.return_value = [ - Barrier( - id="test-device-id", - type="test-type", - controlLevel="test-control-level", - attr=[{"key": "test-attr", "value": "test-value"}], - state=BarrierState( - deviceId="test-device-id", - reported={ - "displayName": "test-display-name", - "migrationStatus": "NOT_STARTED", - }, - desired=None, - connectionState=None, - version=None, - timestamp=None, - ), - api=mock_nice_go, - ) - ] - - await setup_integration(hass, mock_config_entry, []) - - issue = issue_registry.async_get_issue( - DOMAIN, - "firmware_update_required_test-device-id", - ) - assert issue - - -async def test_update_refresh_token( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test updating refresh token.""" - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - assert mock_nice_go.authenticate_refresh.call_count == 1 - assert mock_nice_go.get_all_barriers.call_count == 1 - assert mock_nice_go.authenticate.call_count == 0 - - mock_nice_go.authenticate.return_value = "new-refresh-token" - freezer.tick(timedelta(days=30, seconds=1)) - async_fire_time_changed(hass) - assert await hass.config_entries.async_reload(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_nice_go.authenticate_refresh.call_count == 1 - assert mock_nice_go.authenticate.call_count == 1 - assert mock_nice_go.get_all_barriers.call_count == 2 - assert mock_config_entry.data["refresh_token"] == "new-refresh-token" - - -async def test_update_refresh_token_api_error( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test updating refresh token with error.""" - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - assert mock_nice_go.authenticate_refresh.call_count == 1 - assert mock_nice_go.get_all_barriers.call_count == 1 - assert mock_nice_go.authenticate.call_count == 0 - - mock_nice_go.authenticate.side_effect = ApiError - freezer.tick(timedelta(days=30)) - async_fire_time_changed(hass) - assert not await hass.config_entries.async_reload(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_nice_go.authenticate_refresh.call_count == 1 - assert mock_nice_go.authenticate.call_count == 1 - assert mock_nice_go.get_all_barriers.call_count == 1 - assert mock_config_entry.data["refresh_token"] == "test-refresh-token" - assert "API error" in caplog.text - - -async def test_update_refresh_token_auth_failed( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test updating refresh token with error.""" - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - assert mock_nice_go.authenticate_refresh.call_count == 1 - assert mock_nice_go.get_all_barriers.call_count == 1 - assert mock_nice_go.authenticate.call_count == 0 - - mock_nice_go.authenticate.side_effect = AuthFailedError - freezer.tick(timedelta(days=30)) - async_fire_time_changed(hass) - assert not await hass.config_entries.async_reload(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_nice_go.authenticate_refresh.call_count == 1 - assert mock_nice_go.authenticate.call_count == 1 - assert mock_nice_go.get_all_barriers.call_count == 1 - assert mock_config_entry.data["refresh_token"] == "test-refresh-token" - assert "Authentication failed" in caplog.text - assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR - assert any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) - - -async def test_client_listen_api_error( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - caplog: pytest.LogCaptureFixture, - freezer: FrozenDateTimeFactory, -) -> None: - """Test client listen with error.""" - - mock_nice_go.connect.side_effect = ApiError - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - assert "API error" in caplog.text - - mock_nice_go.connect.side_effect = None - - freezer.tick(timedelta(seconds=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert mock_nice_go.connect.call_count == 2 - - -async def test_on_data_none_parsed( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test on data with None parsed.""" - - mock_nice_go.listen = MagicMock() - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - await mock_nice_go.listen.call_args_list[1][0][1]( - { - "data": { - "devicesStatesUpdateFeed": { - "item": { - "deviceId": "1", - "desired": '{"key": "value"}', - "reported": '{"displayName":"test-display-name", "migrationStatus":"NOT_STARTED"}', - "connectionState": { - "connected": None, - "updatedTimestamp": None, - }, - "version": None, - "timestamp": None, - } - } - } - } - ) - - assert hass.states.get("cover.test_garage_1") == snapshot - - -async def test_on_connected( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test on connected.""" - - mock_nice_go.listen = MagicMock() - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - assert mock_nice_go.listen.call_count == 3 - - mock_nice_go.subscribe = AsyncMock() - await mock_nice_go.listen.call_args_list[0][0][1]() - - assert mock_nice_go.subscribe.call_count == 1 - - -async def test_on_connection_lost( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test on connection lost.""" - - mock_nice_go.listen = MagicMock() - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - assert mock_nice_go.listen.call_count == 3 - - with patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0): - await mock_nice_go.listen.call_args_list[2][0][1]( - {"exception": ValueError("test")} - ) - - assert hass.states.get("cover.test_garage_1").state == "unavailable" - - # Now fire connected - - mock_nice_go.subscribe = AsyncMock() - - await mock_nice_go.listen.call_args_list[0][0][1]() - - assert mock_nice_go.subscribe.call_count == 1 - - assert hass.states.get("cover.test_garage_1").state == "closed" - - -async def test_on_connection_lost_reconnect( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test on connection lost with reconnect.""" - - mock_nice_go.listen = MagicMock() - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - assert mock_nice_go.listen.call_count == 3 - - assert hass.states.get("cover.test_garage_1").state == "closed" - - with patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0): - await mock_nice_go.listen.call_args_list[2][0][1]( - {"exception": ValueError("test")} - ) - - assert hass.states.get("cover.test_garage_1").state == "unavailable" - - -async def test_no_connection_state( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test parsing barrier with no connection state.""" - - mock_nice_go.listen = MagicMock() - - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - assert mock_nice_go.listen.call_count == 3 - - await mock_nice_go.listen.call_args_list[1][0][1]( - { - "data": { - "devicesStatesUpdateFeed": { - "item": { - "deviceId": "1", - "desired": '{"key": "value"}', - "reported": '{"displayName":"Test Garage 1", "migrationStatus":"DONE", "barrierStatus": "1,100,0", "deviceFwVersion": "1.0.0", "lightStatus": "1,100", "vcnMode": false}', - "connectionState": None, - "version": None, - "timestamp": None, - } - } - } - } - ) - - assert hass.states.get("cover.test_garage_1").state == "open" - - -async def test_connection_attempts_exhausted( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test connection attempts exhausted.""" - - mock_nice_go.connect.side_effect = ApiError - - with ( - patch("homeassistant.components.nice_go.coordinator.RECONNECT_ATTEMPTS", 1), - patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0), - ): - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - - assert "API error" in caplog.text - assert "Error requesting Nice G.O. data" in caplog.text - - -async def test_reconnect_hass_stopping( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test reconnect with hass stopping.""" - - mock_nice_go.listen = MagicMock() - mock_nice_go.connect.side_effect = ApiError - - wait_for_hass = asyncio.Event() - - @callback - def _async_ha_stop(event: Event) -> None: - """Stop reconnecting if hass is stopping.""" - wait_for_hass.set() - - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_ha_stop) - - with ( - patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0.1), - patch("homeassistant.components.nice_go.coordinator.RECONNECT_ATTEMPTS", 20), - ): - await setup_integration(hass, mock_config_entry, [Platform.COVER]) - await hass.async_block_till_done() - hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) - await wait_for_hass.wait() - await hass.async_block_till_done(wait_background_tasks=True) - - assert mock_nice_go.connect.call_count < 10 - - assert len(hass._background_tasks) == 0 - - assert "API error" in caplog.text - assert ( - "Failed to connect to the websocket, reconnect attempts exhausted" - not in caplog.text - ) diff --git a/tests/components/nice_go/test_light.py b/tests/components/nice_go/test_light.py deleted file mode 100644 index b170a0ee3ab..00000000000 --- a/tests/components/nice_go/test_light.py +++ /dev/null @@ -1,162 +0,0 @@ -"""Test Nice G.O. light.""" - -from unittest.mock import AsyncMock - -from aiohttp import ClientError -from nice_go import ApiError -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.light import ( - DOMAIN as LIGHT_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) -from homeassistant.components.nice_go.const import DOMAIN -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform - - -async def test_data( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that data gets parsed and returned appropriately.""" - - await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_turn_on( - hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry -) -> None: - """Test that turning on the light works as intended.""" - - await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_garage_2_light"}, - blocking=True, - ) - - assert mock_nice_go.light_on.call_count == 1 - - -async def test_turn_off( - hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry -) -> None: - """Test that turning off the light works as intended.""" - - await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_garage_1_light"}, - blocking=True, - ) - - assert mock_nice_go.light_off.call_count == 1 - - -async def test_update_light_state( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that turning off the light works as intended.""" - - await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) - - assert hass.states.get("light.test_garage_1_light").state == STATE_ON - assert hass.states.get("light.test_garage_2_light").state == STATE_OFF - assert hass.states.get("light.test_garage_3_light") is None - - device_update = load_json_object_fixture("device_state_update.json", DOMAIN) - await mock_config_entry.runtime_data.on_data(device_update) - device_update_1 = load_json_object_fixture("device_state_update_1.json", DOMAIN) - await mock_config_entry.runtime_data.on_data(device_update_1) - - assert hass.states.get("light.test_garage_1_light").state == STATE_OFF - assert hass.states.get("light.test_garage_2_light").state == STATE_ON - assert hass.states.get("light.test_garage_3_light") is None - - -@pytest.mark.parametrize( - ("action", "error", "entity_id", "expected_error"), - [ - ( - SERVICE_TURN_OFF, - ApiError, - "light.test_garage_1_light", - "Error while turning off the light", - ), - ( - SERVICE_TURN_ON, - ClientError, - "light.test_garage_2_light", - "Error while turning on the light", - ), - ], -) -async def test_error( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - action: str, - error: Exception, - entity_id: str, - expected_error: str, -) -> None: - """Test that errors are handled appropriately.""" - - await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) - - mock_nice_go.light_on.side_effect = error - mock_nice_go.light_off.side_effect = error - - with pytest.raises(HomeAssistantError, match=expected_error): - await hass.services.async_call( - LIGHT_DOMAIN, - action, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - -async def test_unsupported_device_type( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that unsupported device types are handled appropriately.""" - - await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) - - assert hass.states.get("light.test_garage_4_light") is None - assert ( - "Device 'Test Garage 4' has unknown device type 'unknown-device-type'" - in caplog.text - ) - assert "which is not supported by this integration" in caplog.text - assert ( - "We try to support it with a cover and event entity, but nothing else." - in caplog.text - ) - assert ( - "Please create an issue with your device model in additional info" - in caplog.text - ) diff --git a/tests/components/nice_go/test_switch.py b/tests/components/nice_go/test_switch.py deleted file mode 100644 index d3a2141eb2b..00000000000 --- a/tests/components/nice_go/test_switch.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Nice G.O. switch tests.""" - -from unittest.mock import AsyncMock - -from aiohttp import ClientError -from nice_go import ApiError -import pytest - -from homeassistant.components.switch import ( - DOMAIN as SWITCH_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_turn_on( - hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry -) -> None: - """Test turn on switch.""" - await setup_integration(hass, mock_config_entry, [Platform.SWITCH]) - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_garage_1_vacation_mode"}, - blocking=True, - ) - mock_nice_go.vacation_mode_on.assert_called_once_with("1") - - -async def test_turn_off( - hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry -) -> None: - """Test turn off switch.""" - await setup_integration(hass, mock_config_entry, [Platform.SWITCH]) - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_garage_2_vacation_mode"}, - blocking=True, - ) - mock_nice_go.vacation_mode_off.assert_called_once_with("2") - - -@pytest.mark.parametrize( - ("action", "error", "entity_id", "expected_error"), - [ - ( - SERVICE_TURN_OFF, - ApiError, - "switch.test_garage_1_vacation_mode", - "Error while turning off the switch", - ), - ( - SERVICE_TURN_ON, - ClientError, - "switch.test_garage_2_vacation_mode", - "Error while turning on the switch", - ), - ], -) -async def test_error( - hass: HomeAssistant, - mock_nice_go: AsyncMock, - mock_config_entry: MockConfigEntry, - action: str, - error: Exception, - entity_id: str, - expected_error: str, -) -> None: - """Test that errors are handled appropriately.""" - - await setup_integration(hass, mock_config_entry, [Platform.SWITCH]) - - mock_nice_go.vacation_mode_on.side_effect = error - mock_nice_go.vacation_mode_off.side_effect = error - - with pytest.raises(HomeAssistantError, match=expected_error): - await hass.services.async_call( - SWITCH_DOMAIN, - action, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) diff --git a/tests/components/nina/test_config_flow.py b/tests/components/nina/test_config_flow.py index 309c8860c20..23ee8cbf797 100644 --- a/tests/components/nina/test_config_flow.py +++ b/tests/components/nina/test_config_flow.py @@ -89,9 +89,7 @@ async def test_step_user_unexpected_exception(hass: HomeAssistant) -> None: DOMAIN, context={"source": SOURCE_USER}, data=deepcopy(DUMMY_DATA) ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown"} - hass.config_entries.flow.async_abort(result["flow_id"]) + assert result["type"] is FlowResultType.ABORT async def test_step_user(hass: HomeAssistant) -> None: @@ -190,7 +188,7 @@ async def test_options_flow_init(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {} + assert result["data"] is None assert dict(config_entry.data) == { CONF_HEADLINE_FILTER: deepcopy(DUMMY_DATA[CONF_HEADLINE_FILTER]), @@ -302,9 +300,7 @@ async def test_options_flow_unexpected_exception(hass: HomeAssistant) -> None: result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown"} - hass.config_entries.options.async_abort(result["flow_id"]) + assert result["type"] is FlowResultType.ABORT async def test_options_flow_entity_removal( diff --git a/tests/components/nordpool/__init__.py b/tests/components/nordpool/__init__.py deleted file mode 100644 index 20d74d38486..00000000000 --- a/tests/components/nordpool/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Tests for the Nord Pool integration.""" - -from homeassistant.components.nordpool.const import CONF_AREAS -from homeassistant.const import CONF_CURRENCY - -ENTRY_CONFIG = { - CONF_AREAS: ["SE3", "SE4"], - CONF_CURRENCY: "SEK", -} diff --git a/tests/components/nordpool/conftest.py b/tests/components/nordpool/conftest.py deleted file mode 100644 index d1c1972c568..00000000000 --- a/tests/components/nordpool/conftest.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Fixtures for the Nord Pool integration.""" - -from __future__ import annotations - -from datetime import datetime -import json -from typing import Any -from unittest.mock import patch - -from pynordpool import NordPoolClient -from pynordpool.const import Currency -from pynordpool.model import DeliveryPeriodData -import pytest - -from homeassistant.components.nordpool.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util - -from . import ENTRY_CONFIG - -from tests.common import MockConfigEntry, load_fixture -from tests.test_util.aiohttp import AiohttpClientMocker - - -@pytest.fixture -async def load_int( - hass: HomeAssistant, get_data: DeliveryPeriodData -) -> MockConfigEntry: - """Set up the Nord Pool integration in Home Assistant.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - data=ENTRY_CONFIG, - ) - - config_entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry - - -@pytest.fixture(name="get_data") -async def get_data_from_library( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, load_json: dict[str, Any] -) -> DeliveryPeriodData: - """Retrieve data from Nord Pool library.""" - - client = NordPoolClient(aioclient_mock.create_session(hass.loop)) - with patch("pynordpool.NordPoolClient._get", return_value=load_json): - output = await client.async_get_delivery_period( - datetime(2024, 11, 5, 13, tzinfo=dt_util.UTC), Currency.SEK, ["SE3", "SE4"] - ) - await client._session.close() - return output - - -@pytest.fixture(name="load_json") -def load_json_from_fixture(load_data: str) -> dict[str, Any]: - """Load fixture with json data and return.""" - return json.loads(load_data) - - -@pytest.fixture(name="load_data", scope="package") -def load_data_from_fixture() -> str: - """Load fixture with fixture data and return.""" - return load_fixture("delivery_period.json", DOMAIN) diff --git a/tests/components/nordpool/fixtures/delivery_period.json b/tests/components/nordpool/fixtures/delivery_period.json deleted file mode 100644 index 77d51dc9433..00000000000 --- a/tests/components/nordpool/fixtures/delivery_period.json +++ /dev/null @@ -1,272 +0,0 @@ -{ - "deliveryDateCET": "2024-11-05", - "version": 3, - "updatedAt": "2024-11-04T12:15:03.9456464Z", - "deliveryAreas": ["SE3", "SE4"], - "market": "DayAhead", - "multiAreaEntries": [ - { - "deliveryStart": "2024-11-04T23:00:00Z", - "deliveryEnd": "2024-11-05T00:00:00Z", - "entryPerArea": { - "SE3": 250.73, - "SE4": 283.79 - } - }, - { - "deliveryStart": "2024-11-05T00:00:00Z", - "deliveryEnd": "2024-11-05T01:00:00Z", - "entryPerArea": { - "SE3": 76.36, - "SE4": 81.36 - } - }, - { - "deliveryStart": "2024-11-05T01:00:00Z", - "deliveryEnd": "2024-11-05T02:00:00Z", - "entryPerArea": { - "SE3": 73.92, - "SE4": 79.15 - } - }, - { - "deliveryStart": "2024-11-05T02:00:00Z", - "deliveryEnd": "2024-11-05T03:00:00Z", - "entryPerArea": { - "SE3": 61.69, - "SE4": 65.19 - } - }, - { - "deliveryStart": "2024-11-05T03:00:00Z", - "deliveryEnd": "2024-11-05T04:00:00Z", - "entryPerArea": { - "SE3": 64.6, - "SE4": 68.44 - } - }, - { - "deliveryStart": "2024-11-05T04:00:00Z", - "deliveryEnd": "2024-11-05T05:00:00Z", - "entryPerArea": { - "SE3": 453.27, - "SE4": 516.71 - } - }, - { - "deliveryStart": "2024-11-05T05:00:00Z", - "deliveryEnd": "2024-11-05T06:00:00Z", - "entryPerArea": { - "SE3": 996.28, - "SE4": 1240.85 - } - }, - { - "deliveryStart": "2024-11-05T06:00:00Z", - "deliveryEnd": "2024-11-05T07:00:00Z", - "entryPerArea": { - "SE3": 1406.14, - "SE4": 1648.25 - } - }, - { - "deliveryStart": "2024-11-05T07:00:00Z", - "deliveryEnd": "2024-11-05T08:00:00Z", - "entryPerArea": { - "SE3": 1346.54, - "SE4": 1570.5 - } - }, - { - "deliveryStart": "2024-11-05T08:00:00Z", - "deliveryEnd": "2024-11-05T09:00:00Z", - "entryPerArea": { - "SE3": 1150.28, - "SE4": 1345.37 - } - }, - { - "deliveryStart": "2024-11-05T09:00:00Z", - "deliveryEnd": "2024-11-05T10:00:00Z", - "entryPerArea": { - "SE3": 1031.32, - "SE4": 1206.51 - } - }, - { - "deliveryStart": "2024-11-05T10:00:00Z", - "deliveryEnd": "2024-11-05T11:00:00Z", - "entryPerArea": { - "SE3": 927.37, - "SE4": 1085.8 - } - }, - { - "deliveryStart": "2024-11-05T11:00:00Z", - "deliveryEnd": "2024-11-05T12:00:00Z", - "entryPerArea": { - "SE3": 925.05, - "SE4": 1081.72 - } - }, - { - "deliveryStart": "2024-11-05T12:00:00Z", - "deliveryEnd": "2024-11-05T13:00:00Z", - "entryPerArea": { - "SE3": 949.49, - "SE4": 1130.38 - } - }, - { - "deliveryStart": "2024-11-05T13:00:00Z", - "deliveryEnd": "2024-11-05T14:00:00Z", - "entryPerArea": { - "SE3": 1042.03, - "SE4": 1256.91 - } - }, - { - "deliveryStart": "2024-11-05T14:00:00Z", - "deliveryEnd": "2024-11-05T15:00:00Z", - "entryPerArea": { - "SE3": 1258.89, - "SE4": 1765.82 - } - }, - { - "deliveryStart": "2024-11-05T15:00:00Z", - "deliveryEnd": "2024-11-05T16:00:00Z", - "entryPerArea": { - "SE3": 1816.45, - "SE4": 2522.55 - } - }, - { - "deliveryStart": "2024-11-05T16:00:00Z", - "deliveryEnd": "2024-11-05T17:00:00Z", - "entryPerArea": { - "SE3": 2512.65, - "SE4": 3533.03 - } - }, - { - "deliveryStart": "2024-11-05T17:00:00Z", - "deliveryEnd": "2024-11-05T18:00:00Z", - "entryPerArea": { - "SE3": 1819.83, - "SE4": 2524.06 - } - }, - { - "deliveryStart": "2024-11-05T18:00:00Z", - "deliveryEnd": "2024-11-05T19:00:00Z", - "entryPerArea": { - "SE3": 1011.77, - "SE4": 1804.46 - } - }, - { - "deliveryStart": "2024-11-05T19:00:00Z", - "deliveryEnd": "2024-11-05T20:00:00Z", - "entryPerArea": { - "SE3": 835.53, - "SE4": 1112.57 - } - }, - { - "deliveryStart": "2024-11-05T20:00:00Z", - "deliveryEnd": "2024-11-05T21:00:00Z", - "entryPerArea": { - "SE3": 796.19, - "SE4": 1051.69 - } - }, - { - "deliveryStart": "2024-11-05T21:00:00Z", - "deliveryEnd": "2024-11-05T22:00:00Z", - "entryPerArea": { - "SE3": 522.3, - "SE4": 662.44 - } - }, - { - "deliveryStart": "2024-11-05T22:00:00Z", - "deliveryEnd": "2024-11-05T23:00:00Z", - "entryPerArea": { - "SE3": 289.14, - "SE4": 349.21 - } - } - ], - "blockPriceAggregates": [ - { - "blockName": "Off-peak 1", - "deliveryStart": "2024-11-04T23:00:00Z", - "deliveryEnd": "2024-11-05T07:00:00Z", - "averagePricePerArea": { - "SE3": { - "average": 422.87, - "min": 61.69, - "max": 1406.14 - }, - "SE4": { - "average": 497.97, - "min": 65.19, - "max": 1648.25 - } - } - }, - { - "blockName": "Peak", - "deliveryStart": "2024-11-05T07:00:00Z", - "deliveryEnd": "2024-11-05T19:00:00Z", - "averagePricePerArea": { - "SE3": { - "average": 1315.97, - "min": 925.05, - "max": 2512.65 - }, - "SE4": { - "average": 1735.59, - "min": 1081.72, - "max": 3533.03 - } - } - }, - { - "blockName": "Off-peak 2", - "deliveryStart": "2024-11-05T19:00:00Z", - "deliveryEnd": "2024-11-05T23:00:00Z", - "averagePricePerArea": { - "SE3": { - "average": 610.79, - "min": 289.14, - "max": 835.53 - }, - "SE4": { - "average": 793.98, - "min": 349.21, - "max": 1112.57 - } - } - } - ], - "currency": "SEK", - "exchangeRate": 11.6402, - "areaStates": [ - { - "state": "Final", - "areas": ["SE3", "SE4"] - } - ], - "areaAverages": [ - { - "areaCode": "SE3", - "price": 900.74 - }, - { - "areaCode": "SE4", - "price": 1166.12 - } - ] -} diff --git a/tests/components/nordpool/snapshots/test_diagnostics.ambr b/tests/components/nordpool/snapshots/test_diagnostics.ambr deleted file mode 100644 index dde2eca0022..00000000000 --- a/tests/components/nordpool/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,283 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'raw': dict({ - 'areaAverages': list([ - dict({ - 'areaCode': 'SE3', - 'price': 900.74, - }), - dict({ - 'areaCode': 'SE4', - 'price': 1166.12, - }), - ]), - 'areaStates': list([ - dict({ - 'areas': list([ - 'SE3', - 'SE4', - ]), - 'state': 'Final', - }), - ]), - 'blockPriceAggregates': list([ - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 422.87, - 'max': 1406.14, - 'min': 61.69, - }), - 'SE4': dict({ - 'average': 497.97, - 'max': 1648.25, - 'min': 65.19, - }), - }), - 'blockName': 'Off-peak 1', - 'deliveryEnd': '2024-11-05T07:00:00Z', - 'deliveryStart': '2024-11-04T23:00:00Z', - }), - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 1315.97, - 'max': 2512.65, - 'min': 925.05, - }), - 'SE4': dict({ - 'average': 1735.59, - 'max': 3533.03, - 'min': 1081.72, - }), - }), - 'blockName': 'Peak', - 'deliveryEnd': '2024-11-05T19:00:00Z', - 'deliveryStart': '2024-11-05T07:00:00Z', - }), - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 610.79, - 'max': 835.53, - 'min': 289.14, - }), - 'SE4': dict({ - 'average': 793.98, - 'max': 1112.57, - 'min': 349.21, - }), - }), - 'blockName': 'Off-peak 2', - 'deliveryEnd': '2024-11-05T23:00:00Z', - 'deliveryStart': '2024-11-05T19:00:00Z', - }), - ]), - 'currency': 'SEK', - 'deliveryAreas': list([ - 'SE3', - 'SE4', - ]), - 'deliveryDateCET': '2024-11-05', - 'exchangeRate': 11.6402, - 'market': 'DayAhead', - 'multiAreaEntries': list([ - dict({ - 'deliveryEnd': '2024-11-05T00:00:00Z', - 'deliveryStart': '2024-11-04T23:00:00Z', - 'entryPerArea': dict({ - 'SE3': 250.73, - 'SE4': 283.79, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T01:00:00Z', - 'deliveryStart': '2024-11-05T00:00:00Z', - 'entryPerArea': dict({ - 'SE3': 76.36, - 'SE4': 81.36, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T02:00:00Z', - 'deliveryStart': '2024-11-05T01:00:00Z', - 'entryPerArea': dict({ - 'SE3': 73.92, - 'SE4': 79.15, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T03:00:00Z', - 'deliveryStart': '2024-11-05T02:00:00Z', - 'entryPerArea': dict({ - 'SE3': 61.69, - 'SE4': 65.19, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T04:00:00Z', - 'deliveryStart': '2024-11-05T03:00:00Z', - 'entryPerArea': dict({ - 'SE3': 64.6, - 'SE4': 68.44, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T05:00:00Z', - 'deliveryStart': '2024-11-05T04:00:00Z', - 'entryPerArea': dict({ - 'SE3': 453.27, - 'SE4': 516.71, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T06:00:00Z', - 'deliveryStart': '2024-11-05T05:00:00Z', - 'entryPerArea': dict({ - 'SE3': 996.28, - 'SE4': 1240.85, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T07:00:00Z', - 'deliveryStart': '2024-11-05T06:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1406.14, - 'SE4': 1648.25, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T08:00:00Z', - 'deliveryStart': '2024-11-05T07:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1346.54, - 'SE4': 1570.5, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T09:00:00Z', - 'deliveryStart': '2024-11-05T08:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1150.28, - 'SE4': 1345.37, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T10:00:00Z', - 'deliveryStart': '2024-11-05T09:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1031.32, - 'SE4': 1206.51, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T11:00:00Z', - 'deliveryStart': '2024-11-05T10:00:00Z', - 'entryPerArea': dict({ - 'SE3': 927.37, - 'SE4': 1085.8, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T12:00:00Z', - 'deliveryStart': '2024-11-05T11:00:00Z', - 'entryPerArea': dict({ - 'SE3': 925.05, - 'SE4': 1081.72, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T13:00:00Z', - 'deliveryStart': '2024-11-05T12:00:00Z', - 'entryPerArea': dict({ - 'SE3': 949.49, - 'SE4': 1130.38, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T14:00:00Z', - 'deliveryStart': '2024-11-05T13:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1042.03, - 'SE4': 1256.91, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T15:00:00Z', - 'deliveryStart': '2024-11-05T14:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1258.89, - 'SE4': 1765.82, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T16:00:00Z', - 'deliveryStart': '2024-11-05T15:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1816.45, - 'SE4': 2522.55, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T17:00:00Z', - 'deliveryStart': '2024-11-05T16:00:00Z', - 'entryPerArea': dict({ - 'SE3': 2512.65, - 'SE4': 3533.03, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T18:00:00Z', - 'deliveryStart': '2024-11-05T17:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1819.83, - 'SE4': 2524.06, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T19:00:00Z', - 'deliveryStart': '2024-11-05T18:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1011.77, - 'SE4': 1804.46, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T20:00:00Z', - 'deliveryStart': '2024-11-05T19:00:00Z', - 'entryPerArea': dict({ - 'SE3': 835.53, - 'SE4': 1112.57, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T21:00:00Z', - 'deliveryStart': '2024-11-05T20:00:00Z', - 'entryPerArea': dict({ - 'SE3': 796.19, - 'SE4': 1051.69, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T22:00:00Z', - 'deliveryStart': '2024-11-05T21:00:00Z', - 'entryPerArea': dict({ - 'SE3': 522.3, - 'SE4': 662.44, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T23:00:00Z', - 'deliveryStart': '2024-11-05T22:00:00Z', - 'entryPerArea': dict({ - 'SE3': 289.14, - 'SE4': 349.21, - }), - }), - ]), - 'updatedAt': '2024-11-04T12:15:03.9456464Z', - 'version': 3, - }), - }) -# --- diff --git a/tests/components/nordpool/snapshots/test_sensor.ambr b/tests/components/nordpool/snapshots/test_sensor.ambr deleted file mode 100644 index 01600352861..00000000000 --- a/tests/components/nordpool/snapshots/test_sensor.ambr +++ /dev/null @@ -1,2215 +0,0 @@ -# serializer version: 1 -# name: test_sensor[sensor.nord_pool_se3_currency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se3_currency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Currency', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'currency', - 'unique_id': 'SE3-currency', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_currency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Currency', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_currency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'SEK', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_current_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_current_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Current price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_price', - 'unique_id': 'SE3-current_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_current_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Current price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_current_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.01177', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_daily_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_daily_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Daily average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_average', - 'unique_id': 'SE3-daily_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_daily_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Daily average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_daily_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.90074', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_exchange_rate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se3_exchange_rate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Exchange rate', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'exchange_rate', - 'unique_id': 'SE3-exchange_rate', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_exchange_rate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Exchange rate', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_exchange_rate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.6402', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_last_updated-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se3_last_updated', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last updated', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'updated_at', - 'unique_id': 'SE3-updated_at', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_last_updated-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Last updated', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_last_updated', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-04T12:15:03+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_next_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_next_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Next price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'next_price', - 'unique_id': 'SE3-next_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_next_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Next price', - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_next_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.83553', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'off_peak_1-SE3-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 1 average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.42287', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'off_peak_1-SE3-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 1 highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.40614', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'off_peak_1-SE3-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 1 lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.06169', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 1 time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'off_peak_1-SE3-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Off-peak 1 time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-04T23:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 1 time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'off_peak_1-SE3-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Off-peak 1 time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T07:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'off_peak_2-SE3-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 2 average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.61079', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'off_peak_2-SE3-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 2 highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.83553', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'off_peak_2-SE3-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 2 lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.28914', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 2 time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'off_peak_2-SE3-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Off-peak 2 time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T19:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 2 time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'off_peak_2-SE3-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Off-peak 2 time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T23:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_peak_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'peak-SE3-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Peak average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_peak_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.31597', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_peak_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'peak-SE3-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Peak highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_peak_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.51265', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_peak_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'peak-SE3-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Peak lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_peak_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.92505', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_peak_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Peak time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'peak-SE3-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Peak time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_peak_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T07:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_peak_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Peak time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'peak-SE3-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Peak time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_peak_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T19:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_previous_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_previous_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Previous price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_price', - 'unique_id': 'SE3-last_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_previous_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Previous price', - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_previous_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.81983', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_currency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se4_currency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Currency', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'currency', - 'unique_id': 'SE4-currency', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_currency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Currency', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_currency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'SEK', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_current_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_current_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Current price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_price', - 'unique_id': 'SE4-current_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_current_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Current price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_current_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.80446', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_daily_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_daily_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Daily average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_average', - 'unique_id': 'SE4-daily_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_daily_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Daily average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_daily_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.16612', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_exchange_rate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se4_exchange_rate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Exchange rate', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'exchange_rate', - 'unique_id': 'SE4-exchange_rate', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_exchange_rate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Exchange rate', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_exchange_rate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.6402', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_last_updated-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se4_last_updated', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last updated', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'updated_at', - 'unique_id': 'SE4-updated_at', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_last_updated-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Last updated', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_last_updated', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-04T12:15:03+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_next_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_next_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Next price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'next_price', - 'unique_id': 'SE4-next_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_next_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Next price', - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_next_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.11257', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'off_peak_1-SE4-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 1 average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.49797', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'off_peak_1-SE4-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 1 highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.64825', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'off_peak_1-SE4-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 1 lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.06519', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 1 time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'off_peak_1-SE4-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Off-peak 1 time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-04T23:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 1 time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'off_peak_1-SE4-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Off-peak 1 time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T07:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'off_peak_2-SE4-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 2 average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.79398', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'off_peak_2-SE4-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 2 highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.11257', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'off_peak_2-SE4-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 2 lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.34921', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 2 time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'off_peak_2-SE4-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Off-peak 2 time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T19:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 2 time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'off_peak_2-SE4-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Off-peak 2 time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T23:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_peak_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'peak-SE4-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Peak average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_peak_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.73559', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_peak_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'peak-SE4-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Peak highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_peak_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.53303', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_peak_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'peak-SE4-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Peak lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_peak_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.08172', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_peak_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Peak time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'peak-SE4-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Peak time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_peak_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T07:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_peak_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Peak time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'peak-SE4-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Peak time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_peak_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T19:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_previous_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_previous_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Previous price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_price', - 'unique_id': 'SE4-last_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_previous_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Previous price', - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_previous_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.52406', - }) -# --- diff --git a/tests/components/nordpool/test_config_flow.py b/tests/components/nordpool/test_config_flow.py deleted file mode 100644 index cfdfc63aca7..00000000000 --- a/tests/components/nordpool/test_config_flow.py +++ /dev/null @@ -1,206 +0,0 @@ -"""Test the Nord Pool config flow.""" - -from __future__ import annotations - -from unittest.mock import patch - -from pynordpool import ( - DeliveryPeriodData, - NordPoolConnectionError, - NordPoolEmptyResponseError, - NordPoolError, - NordPoolResponseError, -) -import pytest - -from homeassistant import config_entries -from homeassistant.components.nordpool.const import CONF_AREAS, DOMAIN -from homeassistant.const import CONF_CURRENCY -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from . import ENTRY_CONFIG - -from tests.common import MockConfigEntry - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: - """Test we get the form.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - ENTRY_CONFIG, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["version"] == 1 - assert result["title"] == "Nord Pool" - assert result["data"] == {"areas": ["SE3", "SE4"], "currency": "SEK"} - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -async def test_single_config_entry( - hass: HomeAssistant, load_int: None, get_data: DeliveryPeriodData -) -> None: - """Test abort for single config entry.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -@pytest.mark.parametrize( - ("error_message", "p_error"), - [ - (NordPoolConnectionError, "cannot_connect"), - (NordPoolEmptyResponseError, "no_data"), - (NordPoolError, "cannot_connect"), - (NordPoolResponseError, "cannot_connect"), - ], -) -async def test_cannot_connect( - hass: HomeAssistant, - get_data: DeliveryPeriodData, - error_message: Exception, - p_error: str, -) -> None: - """Test cannot connect error.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == config_entries.SOURCE_USER - - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - side_effect=error_message, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=ENTRY_CONFIG, - ) - - assert result["errors"] == {"base": p_error} - - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=ENTRY_CONFIG, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Nord Pool" - assert result["data"] == {"areas": ["SE3", "SE4"], "currency": "SEK"} - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -async def test_reconfigure( - hass: HomeAssistant, - load_int: MockConfigEntry, - get_data: DeliveryPeriodData, -) -> None: - """Test reconfiguration.""" - - result = await load_int.start_reconfigure_flow(hass) - - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_AREAS: ["SE3"], - CONF_CURRENCY: "EUR", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert load_int.data == { - "areas": [ - "SE3", - ], - "currency": "EUR", - } - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -@pytest.mark.parametrize( - ("error_message", "p_error"), - [ - (NordPoolConnectionError, "cannot_connect"), - (NordPoolEmptyResponseError, "no_data"), - (NordPoolError, "cannot_connect"), - (NordPoolResponseError, "cannot_connect"), - ], -) -async def test_reconfigure_cannot_connect( - hass: HomeAssistant, - load_int: MockConfigEntry, - get_data: DeliveryPeriodData, - error_message: Exception, - p_error: str, -) -> None: - """Test cannot connect error in a reeconfigure flow.""" - - result = await load_int.start_reconfigure_flow(hass) - - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - side_effect=error_message, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_AREAS: ["SE3"], - CONF_CURRENCY: "EUR", - }, - ) - - assert result["errors"] == {"base": p_error} - - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_AREAS: ["SE3"], - CONF_CURRENCY: "EUR", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert load_int.data == { - "areas": [ - "SE3", - ], - "currency": "EUR", - } diff --git a/tests/components/nordpool/test_coordinator.py b/tests/components/nordpool/test_coordinator.py deleted file mode 100644 index d2d912b1b99..00000000000 --- a/tests/components/nordpool/test_coordinator.py +++ /dev/null @@ -1,106 +0,0 @@ -"""The test for the Nord Pool coordinator.""" - -from __future__ import annotations - -from datetime import timedelta -from unittest.mock import patch - -from freezegun.api import FrozenDateTimeFactory -from pynordpool import ( - DeliveryPeriodData, - NordPoolAuthenticationError, - NordPoolEmptyResponseError, - NordPoolError, - NordPoolResponseError, -) -import pytest - -from homeassistant.components.nordpool.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant - -from . import ENTRY_CONFIG - -from tests.common import MockConfigEntry, async_fire_time_changed - - -@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") -async def test_coordinator( - hass: HomeAssistant, - get_data: DeliveryPeriodData, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the Nord Pool coordinator with errors.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - data=ENTRY_CONFIG, - ) - - config_entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - ) as mock_data, - ): - mock_data.return_value = get_data - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == "0.92737" - mock_data.reset_mock() - - mock_data.side_effect = NordPoolError("error") - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == STATE_UNAVAILABLE - mock_data.reset_mock() - - assert "Authentication error" not in caplog.text - mock_data.side_effect = NordPoolAuthenticationError("Authentication error") - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == STATE_UNAVAILABLE - assert "Authentication error" in caplog.text - mock_data.reset_mock() - - assert "Empty response" not in caplog.text - mock_data.side_effect = NordPoolEmptyResponseError("Empty response") - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == STATE_UNAVAILABLE - assert "Empty response" in caplog.text - mock_data.reset_mock() - - assert "Response error" not in caplog.text - mock_data.side_effect = NordPoolResponseError("Response error") - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == STATE_UNAVAILABLE - assert "Response error" in caplog.text - mock_data.reset_mock() - - mock_data.return_value = get_data - mock_data.side_effect = None - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == "1.81645" diff --git a/tests/components/nordpool/test_diagnostics.py b/tests/components/nordpool/test_diagnostics.py deleted file mode 100644 index 4639186ecf1..00000000000 --- a/tests/components/nordpool/test_diagnostics.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Test Nord Pool diagnostics.""" - -from __future__ import annotations - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant - -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - load_int: ConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test generating diagnostics for a config entry.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, load_int) == snapshot - ) diff --git a/tests/components/nordpool/test_init.py b/tests/components/nordpool/test_init.py deleted file mode 100644 index 5ec1c4b3a0b..00000000000 --- a/tests/components/nordpool/test_init.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Test for Nord Pool component Init.""" - -from __future__ import annotations - -from unittest.mock import patch - -from pynordpool import DeliveryPeriodData - -from homeassistant.components.nordpool.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER, ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import ENTRY_CONFIG - -from tests.common import MockConfigEntry - - -async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: - """Test load and unload an entry.""" - entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - data=ENTRY_CONFIG, - ) - entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - - assert entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/nordpool/test_sensor.py b/tests/components/nordpool/test_sensor.py deleted file mode 100644 index c7a305c8a40..00000000000 --- a/tests/components/nordpool/test_sensor.py +++ /dev/null @@ -1,25 +0,0 @@ -"""The test for the Nord Pool sensor platform.""" - -from __future__ import annotations - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import snapshot_platform - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor( - hass: HomeAssistant, - load_int: ConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Nord Pool sensor.""" - - await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) diff --git a/tests/components/notify/common.py b/tests/components/notify/common.py index 1b5c0d6d6ba..418de96d1aa 100644 --- a/tests/components/notify/common.py +++ b/tests/components/notify/common.py @@ -4,8 +4,6 @@ All containing methods are legacy helpers that should not be used by new components. Instead call the service directly. """ -from typing import Any - from homeassistant.components.notify import ( ATTR_DATA, ATTR_MESSAGE, @@ -13,14 +11,11 @@ from homeassistant.components.notify import ( DOMAIN, SERVICE_NOTIFY, ) -from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def send_message( - hass: HomeAssistant, message: str, title: str | None = None, data: Any = None -) -> None: +def send_message(hass, message, title=None, data=None): """Send a notification message.""" info = {ATTR_MESSAGE: message} diff --git a/tests/components/notify/conftest.py b/tests/components/notify/conftest.py index 91dc92a27fe..0efb3a4689d 100644 --- a/tests/components/notify/conftest.py +++ b/tests/components/notify/conftest.py @@ -1,8 +1,7 @@ """Fixtures for Notify platform tests.""" -from collections.abc import Generator - import pytest +from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/notify/test_legacy.py b/tests/components/notify/test_legacy.py index eeacf915b03..d6478c358bf 100644 --- a/tests/components/notify/test_legacy.py +++ b/tests/components/notify/test_legacy.py @@ -1,7 +1,7 @@ """The tests for legacy notify services.""" import asyncio -from collections.abc import Callable, Coroutine, Mapping +from collections.abc import Mapping from pathlib import Path from typing import Any from unittest.mock import MagicMock, Mock, patch @@ -19,7 +19,7 @@ from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component -from tests.common import MockPlatform, mock_platform +from tests.common import MockPlatform, async_get_persistent_notifications, mock_platform class NotificationService(notify.BaseNotificationService): @@ -63,16 +63,8 @@ def mock_notify_platform( hass: HomeAssistant, tmp_path: Path, integration: str = "notify", - async_get_service: Callable[ - [HomeAssistant, ConfigType, DiscoveryInfoType | None], - Coroutine[Any, Any, notify.BaseNotificationService], - ] - | None = None, - get_service: Callable[ - [HomeAssistant, ConfigType, DiscoveryInfoType | None], - notify.BaseNotificationService, - ] - | None = None, + async_get_service: Any = None, + get_service: Any = None, ): """Specialize the mock platform for legacy notify service.""" loaded_platform = MockNotifyPlatform(async_get_service, get_service) @@ -186,6 +178,24 @@ async def test_remove_targets(hass: HomeAssistant) -> None: assert test.registered_targets == {"test_c": 1} +async def test_warn_template( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test warning when template used.""" + assert await async_setup_component(hass, "notify", {}) + + await hass.services.async_call( + "notify", + "persistent_notification", + {"message": "{{ 1 + 1 }}", "title": "Test notif {{ 1 + 1 }}"}, + blocking=True, + ) + # We should only log it once + assert caplog.text.count("Passing templates to notify service is deprecated") == 1 + notifications = async_get_persistent_notifications(hass) + assert len(notifications) == 1 + + async def test_invalid_platform( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: @@ -216,11 +226,7 @@ async def test_invalid_service( ) -> None: """Test service setup with an invalid service object or platform.""" - def get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> notify.BaseNotificationService | None: + def get_service(hass, config, discovery_info=None): """Return None for an invalid notify service.""" return None @@ -253,13 +259,9 @@ async def test_platform_setup_with_error( ) -> None: """Test service setup with an invalid setup.""" - async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> notify.BaseNotificationService | None: + async def async_get_service(hass, config, discovery_info=None): """Return None for an invalid notify service.""" - raise Exception("Setup error") # noqa: TRY002 + raise Exception("Setup error") # pylint: disable=broad-exception-raised mock_notify_platform( hass, tmp_path, "testnotify", async_get_service=async_get_service @@ -277,15 +279,11 @@ async def test_platform_setup_with_error( async def test_reload_with_notify_builtin_platform_reload( - hass: HomeAssistant, tmp_path: Path + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: """Test reload using the legacy notify platform reload method.""" - async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> NotificationService: + async def async_get_service(hass, config, discovery_info=None): """Get notify service for mocked platform.""" targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") @@ -312,25 +310,19 @@ async def test_reload_with_notify_builtin_platform_reload( assert hass.services.has_service(notify.DOMAIN, "testnotify_b") -async def test_setup_platform_and_reload(hass: HomeAssistant, tmp_path: Path) -> None: +async def test_setup_platform_and_reload( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path +) -> None: """Test service setup and reload.""" get_service_called = Mock() - async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> NotificationService: + async def async_get_service(hass, config, discovery_info=None): """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") - async def async_get_service2( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> NotificationService: + async def async_get_service2(hass, config, discovery_info=None): """Get legacy notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"c": 3, "d": 4} @@ -409,26 +401,18 @@ async def test_setup_platform_and_reload(hass: HomeAssistant, tmp_path: Path) -> async def test_setup_platform_before_notify_setup( - hass: HomeAssistant, tmp_path: Path + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: """Test trying to setup a platform before legacy notify service is setup.""" get_service_called = Mock() - async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> NotificationService: + async def async_get_service(hass, config, discovery_info=None): """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") - async def async_get_service2( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> NotificationService: + async def async_get_service2(hass, config, discovery_info=None): """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"c": 3, "d": 4} @@ -467,26 +451,18 @@ async def test_setup_platform_before_notify_setup( async def test_setup_platform_after_notify_setup( - hass: HomeAssistant, tmp_path: Path + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: """Test trying to setup a platform after legacy notify service is set up.""" get_service_called = Mock() - async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> NotificationService: + async def async_get_service(hass, config, discovery_info=None): """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") - async def async_get_service2( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> NotificationService: + async def async_get_service2(hass, config, discovery_info=None): """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"c": 3, "d": 4} @@ -532,11 +508,27 @@ async def test_sending_none_message(hass: HomeAssistant, tmp_path: Path) -> None notify.DOMAIN, notify.SERVICE_NOTIFY, {notify.ATTR_MESSAGE: None} ) assert ( - str(exc.value) == "string value is None for dictionary value @ data['message']" + str(exc.value) + == "template value is None for dictionary value @ data['message']" ) send_message_mock.assert_not_called() +async def test_sending_templated_message(hass: HomeAssistant, tmp_path: Path) -> None: + """Send a templated message.""" + send_message_mock = await help_setup_notify(hass, tmp_path) + hass.states.async_set("sensor.temperature", 10) + data = { + notify.ATTR_MESSAGE: "{{states.sensor.temperature.state}}", + notify.ATTR_TITLE: "{{ states.sensor.temperature.name }}", + } + await hass.services.async_call(notify.DOMAIN, notify.SERVICE_NOTIFY, data) + await hass.async_block_till_done() + send_message_mock.assert_called_once_with( + "10", {"title": "temperature", "data": None} + ) + + async def test_method_forwards_correct_data( hass: HomeAssistant, tmp_path: Path ) -> None: diff --git a/tests/components/notify/test_repairs.py b/tests/components/notify/test_repairs.py index e77da5cea6f..fef5818e1e6 100644 --- a/tests/components/notify/test_repairs.py +++ b/tests/components/notify/test_repairs.py @@ -1,5 +1,6 @@ """Test repairs for notify entity component.""" +from http import HTTPStatus from unittest.mock import AsyncMock import pytest @@ -8,16 +9,18 @@ from homeassistant.components.notify import ( DOMAIN as NOTIFY_DOMAIN, migrate_notify_issue, ) +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, MockModule, mock_integration -from tests.components.repairs import ( - async_process_repairs_platforms, - process_repair_fix_flow, - start_repair_fix_flow, -) from tests.typing import ClientSessionGenerator THERMOSTAT_ID = 0 @@ -63,12 +66,20 @@ async def test_notify_migration_repair_flow( ) assert len(issue_registry.issues) == 1 - data = await start_repair_fix_flow(http_client, NOTIFY_DOMAIN, translation_key) + url = RepairsFlowIndexView.url + resp = await http_client.post( + url, json={"handler": NOTIFY_DOMAIN, "issue_id": translation_key} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "confirm" - data = await process_repair_fix_flow(http_client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await http_client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" # Test confirm step in repair flow await hass.async_block_till_done() diff --git a/tests/components/notify_events/test_notify.py b/tests/components/notify_events/test_notify.py index df6df078de1..dbfc354404b 100644 --- a/tests/components/notify_events/test_notify.py +++ b/tests/components/notify_events/test_notify.py @@ -1,10 +1,6 @@ """The tests for notify_events.""" -from homeassistant.components.notify import ( - ATTR_DATA, - ATTR_MESSAGE, - DOMAIN as NOTIFY_DOMAIN, -) +from homeassistant.components.notify import ATTR_DATA, ATTR_MESSAGE, DOMAIN from homeassistant.components.notify_events.notify import ( ATTR_LEVEL, ATTR_PRIORITY, @@ -17,10 +13,10 @@ from tests.common import async_mock_service async def test_send_msg(hass: HomeAssistant) -> None: """Test notify.events service.""" - notify_calls = async_mock_service(hass, NOTIFY_DOMAIN, "events") + notify_calls = async_mock_service(hass, DOMAIN, "events") await hass.services.async_call( - NOTIFY_DOMAIN, + DOMAIN, "events", { ATTR_MESSAGE: "message content", @@ -36,7 +32,7 @@ async def test_send_msg(hass: HomeAssistant) -> None: assert len(notify_calls) == 1 call = notify_calls[-1] - assert call.domain == NOTIFY_DOMAIN + assert call.domain == DOMAIN assert call.service == "events" assert call.data.get(ATTR_MESSAGE) == "message content" assert call.data.get(ATTR_DATA).get(ATTR_TOKEN) == "XYZ" diff --git a/tests/components/notion/conftest.py b/tests/components/notion/conftest.py index 6a6e150c960..17bea306ad8 100644 --- a/tests/components/notion/conftest.py +++ b/tests/components/notion/conftest.py @@ -1,6 +1,5 @@ """Define fixtures for Notion tests.""" -from collections.abc import Generator import json from unittest.mock import AsyncMock, Mock, patch @@ -9,6 +8,7 @@ from aionotion.listener.models import Listener from aionotion.sensor.models import Sensor from aionotion.user.models import UserPreferences import pytest +from typing_extensions import Generator from homeassistant.components.notion import CONF_REFRESH_TOKEN, CONF_USER_UUID, DOMAIN from homeassistant.const import CONF_USERNAME diff --git a/tests/components/notion/test_config_flow.py b/tests/components/notion/test_config_flow.py index 15c211c19cb..2cc5e3f04b7 100644 --- a/tests/components/notion/test_config_flow.py +++ b/tests/components/notion/test_config_flow.py @@ -6,15 +6,13 @@ from aionotion.errors import InvalidCredentialsError, NotionError import pytest from homeassistant.components.notion import CONF_REFRESH_TOKEN, CONF_USER_UUID, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_REFRESH_TOKEN, TEST_USER_UUID, TEST_USERNAME -from tests.common import MockConfigEntry - pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -92,13 +90,21 @@ async def test_duplicate_error(hass: HomeAssistant, config, config_entry) -> Non async def test_reauth( hass: HomeAssistant, config, - config_entry: MockConfigEntry, + config_entry, errors, get_client_with_exception, mock_aionotion, ) -> None: """Test that re-auth works.""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + data=config, + ) assert result["step_id"] == "reauth_confirm" # Test errors that can arise when getting a Notion API client: diff --git a/tests/components/notion/test_diagnostics.py b/tests/components/notion/test_diagnostics.py index 890ce2dfc4a..023b9369f03 100644 --- a/tests/components/notion/test_diagnostics.py +++ b/tests/components/notion/test_diagnostics.py @@ -4,7 +4,6 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.notion import DOMAIN from homeassistant.core import HomeAssistant -from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -34,9 +33,6 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, - "created_at": ANY, - "modified_at": ANY, - "discovery_keys": {}, }, "data": { "bridges": [ diff --git a/tests/components/nsw_fuel_station/test_sensor.py b/tests/components/nsw_fuel_station/test_sensor.py index dbf52d937f0..898d5757870 100644 --- a/tests/components/nsw_fuel_station/test_sensor.py +++ b/tests/components/nsw_fuel_station/test_sensor.py @@ -23,9 +23,7 @@ VALID_CONFIG_EXPECTED_ENTITY_IDS = ["my_fake_station_p95", "my_fake_station_e10" class MockPrice: """Mock Price implementation.""" - def __init__( - self, price, fuel_type, last_updated, price_unit, station_code - ) -> None: + def __init__(self, price, fuel_type, last_updated, price_unit, station_code): """Initialize a mock price instance.""" self.price = price self.fuel_type = fuel_type @@ -37,7 +35,7 @@ class MockPrice: class MockStation: """Mock Station implementation.""" - def __init__(self, name, code) -> None: + def __init__(self, name, code): """Initialize a mock Station instance.""" self.name = name self.code = code @@ -46,7 +44,7 @@ class MockStation: class MockGetFuelPricesResponse: """Mock GetFuelPricesResponse implementation.""" - def __init__(self, prices, stations) -> None: + def __init__(self, prices, stations): """Initialize a mock GetFuelPricesResponse instance.""" self.prices = prices self.stations = stations diff --git a/tests/components/nuki/snapshots/test_binary_sensor.ambr b/tests/components/nuki/snapshots/test_binary_sensor.ambr index 55976bcb433..4a122fa78f2 100644 --- a/tests/components/nuki/snapshots/test_binary_sensor.ambr +++ b/tests/components/nuki/snapshots/test_binary_sensor.ambr @@ -83,6 +83,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Community door Ring Action', + 'nuki_id': 2, }), 'context': , 'entity_id': 'binary_sensor.community_door_ring_action', @@ -130,6 +131,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'door', 'friendly_name': 'Home', + 'nuki_id': 1, }), 'context': , 'entity_id': 'binary_sensor.home', diff --git a/tests/components/nuki/snapshots/test_lock.ambr b/tests/components/nuki/snapshots/test_lock.ambr index 24c80e7b487..a0013fc37c1 100644 --- a/tests/components/nuki/snapshots/test_lock.ambr +++ b/tests/components/nuki/snapshots/test_lock.ambr @@ -35,7 +35,9 @@ # name: test_locks[lock.community_door-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'battery_critical': False, 'friendly_name': 'Community door', + 'nuki_id': 2, 'supported_features': , }), 'context': , @@ -82,7 +84,9 @@ # name: test_locks[lock.home-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'battery_critical': False, 'friendly_name': 'Home', + 'nuki_id': 1, 'supported_features': , }), 'context': , diff --git a/tests/components/nuki/snapshots/test_sensor.ambr b/tests/components/nuki/snapshots/test_sensor.ambr index a319104fbc3..3c1159aecba 100644 --- a/tests/components/nuki/snapshots/test_sensor.ambr +++ b/tests/components/nuki/snapshots/test_sensor.ambr @@ -37,6 +37,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Home Battery', + 'nuki_id': 1, 'unit_of_measurement': '%', }), 'context': , diff --git a/tests/components/nuki/test_config_flow.py b/tests/components/nuki/test_config_flow.py index d4ddc261f1e..cdd429c40c5 100644 --- a/tests/components/nuki/test_config_flow.py +++ b/tests/components/nuki/test_config_flow.py @@ -210,7 +210,9 @@ async def test_reauth_success(hass: HomeAssistant) -> None: """Test starting a reauthentication flow.""" entry = await setup_nuki_integration(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -239,7 +241,9 @@ async def test_reauth_invalid_auth(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with invalid auth.""" entry = await setup_nuki_integration(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -261,7 +265,9 @@ async def test_reauth_cannot_connect(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with cannot connect.""" entry = await setup_nuki_integration(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -283,7 +289,9 @@ async def test_reauth_unknown_exception(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with an unknown exception.""" entry = await setup_nuki_integration(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/numato/conftest.py b/tests/components/numato/conftest.py index f3ae4d5f32b..c6fd13a099e 100644 --- a/tests/components/numato/conftest.py +++ b/tests/components/numato/conftest.py @@ -1,18 +1,17 @@ """Fixtures for numato tests.""" from copy import deepcopy -from typing import Any import pytest from homeassistant.components import numato +from . import numato_mock from .common import NUMATO_CFG -from .numato_mock import NumatoModuleMock @pytest.fixture -def config() -> dict[str, Any]: +def config(): """Provide a copy of the numato domain's test configuration. This helps to quickly change certain aspects of the configuration scoped @@ -22,8 +21,8 @@ def config() -> dict[str, Any]: @pytest.fixture -def numato_fixture(monkeypatch: pytest.MonkeyPatch) -> NumatoModuleMock: +def numato_fixture(monkeypatch): """Inject the numato mockup into numato homeassistant module.""" - module_mock = NumatoModuleMock() + module_mock = numato_mock.NumatoModuleMock() monkeypatch.setattr(numato, "gpio", module_mock) return module_mock diff --git a/tests/components/numato/numato_mock.py b/tests/components/numato/numato_mock.py index 208beffe83f..097a785beb1 100644 --- a/tests/components/numato/numato_mock.py +++ b/tests/components/numato/numato_mock.py @@ -8,14 +8,14 @@ class NumatoModuleMock: NumatoGpioError = NumatoGpioError - def __init__(self) -> None: + def __init__(self): """Initialize the numato_gpio module mockup class.""" self.devices = {} class NumatoDeviceMock: """Mockup for the numato_gpio.NumatoUsbGpio class.""" - def __init__(self, device) -> None: + def __init__(self, device): """Initialize numato device mockup.""" self.device = device self.callbacks = {} diff --git a/tests/components/numato/test_binary_sensor.py b/tests/components/numato/test_binary_sensor.py index 08506349247..524589af198 100644 --- a/tests/components/numato/test_binary_sensor.py +++ b/tests/components/numato/test_binary_sensor.py @@ -21,7 +21,7 @@ MOCKUP_ENTITY_IDS = { async def test_failing_setups_no_entities( - hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, numato_fixture, monkeypatch ) -> None: """When port setup fails, no entity shall be created.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "setup", mockup_raise) diff --git a/tests/components/numato/test_init.py b/tests/components/numato/test_init.py index 4695265f37f..35dd102ec9e 100644 --- a/tests/components/numato/test_init.py +++ b/tests/components/numato/test_init.py @@ -11,7 +11,7 @@ from .common import NUMATO_CFG, mockup_raise, mockup_return async def test_setup_no_devices( - hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, numato_fixture, monkeypatch ) -> None: """Test handling of an 'empty' discovery. @@ -24,7 +24,7 @@ async def test_setup_no_devices( async def test_fail_setup_raising_discovery( - hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, numato_fixture, caplog: pytest.LogCaptureFixture, monkeypatch ) -> None: """Test handling of an exception during discovery. @@ -57,7 +57,7 @@ async def test_hass_numato_api_wrong_port_directions( async def test_hass_numato_api_errors( - hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, numato_fixture, monkeypatch ) -> None: """Test whether Home Assistant numato API (re-)raises errors.""" numato_fixture.discover() diff --git a/tests/components/numato/test_sensor.py b/tests/components/numato/test_sensor.py index c652df9b086..30a9f174941 100644 --- a/tests/components/numato/test_sensor.py +++ b/tests/components/numato/test_sensor.py @@ -1,7 +1,5 @@ """Tests for the numato sensor platform.""" -import pytest - from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import discovery @@ -15,7 +13,7 @@ MOCKUP_ENTITY_IDS = { async def test_failing_setups_no_entities( - hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, numato_fixture, monkeypatch ) -> None: """When port setup fails, no entity shall be created.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "setup", mockup_raise) @@ -26,7 +24,7 @@ async def test_failing_setups_no_entities( async def test_failing_sensor_update( - hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, numato_fixture, monkeypatch ) -> None: """Test condition when a sensor update fails.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "adc_read", mockup_raise) diff --git a/tests/components/numato/test_switch.py b/tests/components/numato/test_switch.py index 42102ea4869..e69b3481b1d 100644 --- a/tests/components/numato/test_switch.py +++ b/tests/components/numato/test_switch.py @@ -1,7 +1,5 @@ """Tests for the numato switch platform.""" -import pytest - from homeassistant.components import switch from homeassistant.const import ( ATTR_ENTITY_ID, @@ -22,7 +20,7 @@ MOCKUP_ENTITY_IDS = { async def test_failing_setups_no_entities( - hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, numato_fixture, monkeypatch ) -> None: """When port setup fails, no entity shall be created.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "setup", mockup_raise) @@ -71,7 +69,7 @@ async def test_regular_hass_operations(hass: HomeAssistant, numato_fixture) -> N async def test_failing_hass_operations( - hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, numato_fixture, monkeypatch ) -> None: """Test failing operations called from within Home Assistant. diff --git a/tests/components/number/test_init.py b/tests/components/number/test_init.py index 721b531e8cd..6f74a3126c0 100644 --- a/tests/components/number/test_init.py +++ b/tests/components/number/test_init.py @@ -1,10 +1,10 @@ """The tests for the Number component.""" -from collections.abc import Generator from typing import Any from unittest.mock import MagicMock import pytest +from typing_extensions import Generator from homeassistant.components.number import ( ATTR_MAX, @@ -121,7 +121,7 @@ class MockNumberEntityDescr(NumberEntity): Step is calculated based on the smaller max_value and min_value. """ - def __init__(self) -> None: + def __init__(self): """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", @@ -145,7 +145,7 @@ class MockNumberEntityAttrWithDescription(NumberEntity): members take precedence over the entity description. """ - def __init__(self) -> None: + def __init__(self): """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", @@ -223,7 +223,7 @@ class MockNumberEntityDescrDeprecated(NumberEntity): Step is calculated based on the smaller max_value and min_value. """ - def __init__(self) -> None: + def __init__(self): """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", @@ -646,7 +646,7 @@ async def test_restore_number_restore_state( assert entity0.native_min_value == native_min_value assert entity0.native_step == native_step assert entity0.native_value == native_value - assert type(entity0.native_value) is native_value_type + assert type(entity0.native_value) == native_value_type assert entity0.native_unit_of_measurement == uom diff --git a/tests/components/nut/test_diagnostics.py b/tests/components/nut/test_diagnostics.py index 2586f224d73..f91269f5196 100644 --- a/tests/components/nut/test_diagnostics.py +++ b/tests/components/nut/test_diagnostics.py @@ -39,5 +39,5 @@ async def test_diagnostics( result = await get_diagnostics_for_config_entry( hass, hass_client, mock_config_entry ) - assert result["entry"] == entry_dict | {"discovery_keys": {}} + assert result["entry"] == entry_dict assert result["nut_data"] == nut_data_dict diff --git a/tests/components/nut/test_init.py b/tests/components/nut/test_init.py index d5d85daa336..61a5187407b 100644 --- a/tests/components/nut/test_init.py +++ b/tests/components/nut/test_init.py @@ -8,9 +8,8 @@ from homeassistant.components.nut.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, CONF_PORT, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from .util import _get_mock_nutclient, async_init_integration +from .util import _get_mock_nutclient from tests.common import MockConfigEntry @@ -97,53 +96,3 @@ async def test_auth_fails(hass: HomeAssistant) -> None: flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 assert flows[0]["context"]["source"] == "reauth" - - -async def test_serial_number(hass: HomeAssistant) -> None: - """Test for serial number set on device.""" - mock_serial_number = "A00000000000" - await async_init_integration( - hass, - username="someuser", - password="somepassword", - list_vars={"ups.serial": mock_serial_number}, - list_ups={"ups1": "UPS 1"}, - list_commands_return_value=[], - ) - - device_registry = dr.async_get(hass) - assert device_registry is not None - - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_serial_number)} - ) - - assert device_entry is not None - assert device_entry.serial_number == mock_serial_number - - -async def test_device_location(hass: HomeAssistant) -> None: - """Test for suggested location on device.""" - mock_serial_number = "A00000000000" - mock_device_location = "XYZ Location" - await async_init_integration( - hass, - username="someuser", - password="somepassword", - list_vars={ - "ups.serial": mock_serial_number, - "device.location": mock_device_location, - }, - list_ups={"ups1": "UPS 1"}, - list_commands_return_value=[], - ) - - device_registry = dr.async_get(hass) - assert device_registry is not None - - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_serial_number)} - ) - - assert device_entry is not None - assert device_entry.suggested_area == mock_device_location diff --git a/tests/components/nws/const.py b/tests/components/nws/const.py index 39e954af15a..e5fc9df909f 100644 --- a/tests/components/nws/const.py +++ b/tests/components/nws/const.py @@ -66,7 +66,6 @@ CLEAR_NIGHT_OBSERVATION = DEFAULT_OBSERVATION.copy() CLEAR_NIGHT_OBSERVATION["iconTime"] = "night" SENSOR_EXPECTED_OBSERVATION_METRIC = { - "timestamp": "2019-08-12T23:53:00+00:00", "dewpoint": "5", "temperature": "10", "windChill": "5", @@ -81,7 +80,6 @@ SENSOR_EXPECTED_OBSERVATION_METRIC = { } SENSOR_EXPECTED_OBSERVATION_IMPERIAL = { - "timestamp": "2019-08-12T23:53:00+00:00", "dewpoint": str( round( TemperatureConverter.convert( @@ -187,7 +185,6 @@ DEFAULT_FORECAST = [ "temperature": 10, "windSpeedAvg": 10, "windBearing": 180, - "shortForecast": "A short forecast.", "detailedForecast": "A detailed forecast.", "timestamp": "2019-08-12T23:53:00+00:00", "iconTime": "night", diff --git a/tests/components/nws/snapshots/test_diagnostics.ambr b/tests/components/nws/snapshots/test_diagnostics.ambr index f8bd82a35c4..2db73f90054 100644 --- a/tests/components/nws/snapshots/test_diagnostics.ambr +++ b/tests/components/nws/snapshots/test_diagnostics.ambr @@ -21,7 +21,6 @@ 'number': 1, 'probabilityOfPrecipitation': 89, 'relativeHumidity': 75, - 'shortForecast': 'A short forecast.', 'startTime': '2019-08-12T20:00:00-04:00', 'temperature': 10, 'timestamp': '2019-08-12T23:53:00+00:00', @@ -49,7 +48,6 @@ 'number': 1, 'probabilityOfPrecipitation': 89, 'relativeHumidity': 75, - 'shortForecast': 'A short forecast.', 'startTime': '2019-08-12T20:00:00-04:00', 'temperature': 10, 'timestamp': '2019-08-12T23:53:00+00:00', diff --git a/tests/components/nws/snapshots/test_weather.ambr b/tests/components/nws/snapshots/test_weather.ambr index 1df1c2fa644..f4669f47615 100644 --- a/tests/components/nws/snapshots/test_weather.ambr +++ b/tests/components/nws/snapshots/test_weather.ambr @@ -1,44 +1,95 @@ # serializer version: 1 -# name: test_detailed_forecast_service[hourly] +# name: test_forecast_service[get_forecast] dict({ - 'weather.abc': dict({ - 'forecast': list([ - dict({ - 'datetime': '2019-08-12T20:00:00-04:00', - 'short_description': 'A short forecast.', - }), - ]), - }), + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'is_daytime': False, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), }) # --- -# name: test_detailed_forecast_service[twice_daily] +# name: test_forecast_service[get_forecast].1 dict({ - 'weather.abc': dict({ - 'forecast': list([ - dict({ - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'is_daytime': False, - 'short_description': 'A short forecast.', - }), - ]), - }), + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), }) # --- -# name: test_detailed_forecast_service_no_data[hourly] +# name: test_forecast_service[get_forecast].2 dict({ - 'weather.abc': dict({ - 'forecast': list([ - ]), - }), + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'is_daytime': False, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), }) # --- -# name: test_detailed_forecast_service_no_data[twice_daily] +# name: test_forecast_service[get_forecast].3 dict({ - 'weather.abc': dict({ - 'forecast': list([ - ]), - }), + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].4 + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].5 + dict({ + 'forecast': list([ + ]), }) # --- # name: test_forecast_service[get_forecasts] @@ -48,6 +99,7 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'is_daytime': False, @@ -67,6 +119,7 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -85,6 +138,7 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'is_daytime': False, @@ -104,6 +158,7 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -122,6 +177,7 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -146,6 +202,7 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -160,6 +217,7 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, diff --git a/tests/components/nws/test_weather.py b/tests/components/nws/test_weather.py index bbf808dbd1f..b4f4b5155a1 100644 --- a/tests/components/nws/test_weather.py +++ b/tests/components/nws/test_weather.py @@ -554,83 +554,3 @@ async def test_forecast_subscription_with_failing_coordinator( ) msg = await client.receive_json() assert not msg["success"] - - -@pytest.mark.parametrize( - ("forecast_type"), - [ - "hourly", - "twice_daily", - ], -) -async def test_detailed_forecast_service( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, - mock_simple_nws, - no_sensor, - forecast_type: str, -) -> None: - """Test detailed forecast.""" - - entry = MockConfigEntry( - domain=nws.DOMAIN, - data=NWS_CONFIG, - ) - entry.add_to_hass(hass) - - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - response = await hass.services.async_call( - nws.DOMAIN, - "get_forecasts_extra", - { - "entity_id": "weather.abc", - "type": forecast_type, - }, - blocking=True, - return_response=True, - ) - assert response == snapshot - - -@pytest.mark.parametrize( - ("forecast_type"), - [ - "hourly", - "twice_daily", - ], -) -async def test_detailed_forecast_service_no_data( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, - mock_simple_nws, - no_sensor, - forecast_type: str, -) -> None: - """Test detailed forecast.""" - instance = mock_simple_nws.return_value - instance.forecast = None - instance.forecast_hourly = None - entry = MockConfigEntry( - domain=nws.DOMAIN, - data=NWS_CONFIG, - ) - entry.add_to_hass(hass) - - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - response = await hass.services.async_call( - nws.DOMAIN, - "get_forecasts_extra", - { - "entity_id": "weather.abc", - "type": forecast_type, - }, - blocking=True, - return_response=True, - ) - assert response == snapshot diff --git a/tests/components/nx584/test_binary_sensor.py b/tests/components/nx584/test_binary_sensor.py index d59cbdcf69d..5c57feb471b 100644 --- a/tests/components/nx584/test_binary_sensor.py +++ b/tests/components/nx584/test_binary_sensor.py @@ -1,6 +1,5 @@ """The tests for the nx584 sensor platform.""" -from typing import Any from unittest import mock from nx584 import client as nx584_client @@ -100,9 +99,7 @@ def test_nx584_sensor_setup_full_config( assert mock_watcher.called -async def _test_assert_graceful_fail( - hass: HomeAssistant, config: dict[str, Any] -) -> None: +async def _test_assert_graceful_fail(hass, config): """Test the failing.""" assert not await async_setup_component(hass, "nx584", config) @@ -117,9 +114,7 @@ async def _test_assert_graceful_fail( ({"zone_types": {"notazone": "motion"}}), ], ) -async def test_nx584_sensor_setup_bad_config( - hass: HomeAssistant, config: dict[str, Any] -) -> None: +async def test_nx584_sensor_setup_bad_config(hass: HomeAssistant, config) -> None: """Test the setup with bad configuration.""" await _test_assert_graceful_fail(hass, config) @@ -221,8 +216,8 @@ def test_nx584_watcher_run_with_zone_events() -> None: """Return nothing twice, then some events.""" if empty_me: empty_me.pop() - return None - return fake_events + else: + return fake_events client = mock.MagicMock() fake_events = [ diff --git a/tests/components/nyt_games/__init__.py b/tests/components/nyt_games/__init__.py deleted file mode 100644 index 46dff12e5a1..00000000000 --- a/tests/components/nyt_games/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the NYT Games integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/nyt_games/conftest.py b/tests/components/nyt_games/conftest.py deleted file mode 100644 index 1004b6eb42a..00000000000 --- a/tests/components/nyt_games/conftest.py +++ /dev/null @@ -1,57 +0,0 @@ -"""NYTGames tests configuration.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -from nyt_games.models import ConnectionsStats, WordleStats -import pytest - -from homeassistant.components.nyt_games.const import DOMAIN -from homeassistant.const import CONF_TOKEN - -from tests.common import MockConfigEntry, load_fixture - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.nyt_games.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_nyt_games_client() -> Generator[AsyncMock]: - """Mock an NYTGames client.""" - with ( - patch( - "homeassistant.components.nyt_games.NYTGamesClient", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.nyt_games.config_flow.NYTGamesClient", - new=mock_client, - ), - ): - client = mock_client.return_value - client.get_latest_stats.return_value = WordleStats.from_json( - load_fixture("latest.json", DOMAIN) - ).player.stats - client.get_user_id.return_value = 218886794 - client.get_connections.return_value = ConnectionsStats.from_json( - load_fixture("connections.json", DOMAIN) - ).player.stats - yield client - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="NYTGames", - data={CONF_TOKEN: "token"}, - unique_id="218886794", - ) diff --git a/tests/components/nyt_games/fixtures/connections.json b/tests/components/nyt_games/fixtures/connections.json deleted file mode 100644 index 8c1ea18199a..00000000000 --- a/tests/components/nyt_games/fixtures/connections.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "states": [], - "user_id": 218886794, - "player": { - "user_id": 218886794, - "last_updated": 1727097528, - "stats": { - "connections": { - "puzzles_completed": 9, - "puzzles_won": 3, - "last_played_print_date": "2024-09-23", - "current_streak": 0, - "max_streak": 2, - "mistakes": { - "0": 2, - "1": 0, - "2": 1, - "3": 0, - "4": 6 - } - } - } - } -} diff --git a/tests/components/nyt_games/fixtures/latest.json b/tests/components/nyt_games/fixtures/latest.json deleted file mode 100644 index 73a6f440fc0..00000000000 --- a/tests/components/nyt_games/fixtures/latest.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "states": [], - "user_id": 218886794, - "player": { - "user_id": 218886794, - "last_updated": 1726831978, - "stats": { - "spelling_bee": { - "puzzles_started": 87, - "total_words": 362, - "total_pangrams": 15, - "longest_word": { - "word": "checkable", - "center_letter": "b", - "print_date": "2024-07-27" - }, - "ranks": { - "Beginner": 23, - "Good": 21, - "Good Start": 14, - "Moving Up": 16, - "Nice": 4, - "Solid": 9 - } - }, - "wordle": { - "legacyStats": { - "gamesPlayed": 70, - "gamesWon": 51, - "guesses": { - "1": 0, - "2": 1, - "3": 7, - "4": 11, - "5": 20, - "6": 12, - "fail": 19 - }, - "currentStreak": 1, - "maxStreak": 5, - "lastWonDayOffset": 1189, - "hasPlayed": true, - "autoOptInTimestamp": 1708273168957, - "hasMadeStatsChoice": false, - "timestamp": 1726831978 - }, - "calculatedStats": { - "gamesPlayed": 33, - "gamesWon": 26, - "guesses": { - "1": 0, - "2": 1, - "3": 4, - "4": 7, - "5": 10, - "6": 4, - "fail": 7 - }, - "currentStreak": 1, - "maxStreak": 5, - "lastWonPrintDate": "2024-09-20", - "lastCompletedPrintDate": "2024-09-20", - "hasPlayed": true, - "generation": 1 - } - } - } - } -} diff --git a/tests/components/nyt_games/fixtures/new_account.json b/tests/components/nyt_games/fixtures/new_account.json deleted file mode 100644 index ad4d8e2e416..00000000000 --- a/tests/components/nyt_games/fixtures/new_account.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "states": [], - "user_id": 260705259, - "player": { - "user_id": 260705259, - "last_updated": 1727358123, - "stats": { - "wordle": { - "legacyStats": { - "gamesPlayed": 1, - "gamesWon": 1, - "guesses": { - "1": 0, - "2": 0, - "3": 0, - "4": 0, - "5": 1, - "6": 0, - "fail": 0 - }, - "currentStreak": 0, - "maxStreak": 1, - "lastWonDayOffset": 1118, - "hasPlayed": true, - "autoOptInTimestamp": 1727357874700, - "hasMadeStatsChoice": false, - "timestamp": 1727358123 - }, - "calculatedStats": { - "gamesPlayed": 0, - "gamesWon": 0, - "guesses": { - "1": 0, - "2": 0, - "3": 0, - "4": 0, - "5": 0, - "6": 0, - "fail": 0 - }, - "currentStreak": 0, - "maxStreak": 1, - "lastWonPrintDate": "", - "lastCompletedPrintDate": "", - "hasPlayed": false, - "generation": 1 - } - } - } - } -} diff --git a/tests/components/nyt_games/snapshots/test_init.ambr b/tests/components/nyt_games/snapshots/test_init.ambr deleted file mode 100644 index 383bed0e106..00000000000 --- a/tests/components/nyt_games/snapshots/test_init.ambr +++ /dev/null @@ -1,97 +0,0 @@ -# serializer version: 1 -# name: test_device_info[device_connections] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'nyt_games', - '218886794_connections', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'New York Times', - 'model': None, - 'model_id': None, - 'name': 'Connections', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- -# name: test_device_info[device_spelling_bee] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'nyt_games', - '218886794_spelling_bee', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'New York Times', - 'model': None, - 'model_id': None, - 'name': 'Spelling Bee', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- -# name: test_device_info[device_wordle] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'nyt_games', - '218886794_wordle', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'New York Times', - 'model': None, - 'model_id': None, - 'name': 'Wordle', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/nyt_games/snapshots/test_sensor.ambr b/tests/components/nyt_games/snapshots/test_sensor.ambr deleted file mode 100644 index 84b74a26f0d..00000000000 --- a/tests/components/nyt_games/snapshots/test_sensor.ambr +++ /dev/null @@ -1,602 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[sensor.connections_current_streak-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.connections_current_streak', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Current streak', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'streak', - 'unique_id': '218886794-connections-connections_streak', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.connections_current_streak-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'Connections Current streak', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.connections_current_streak', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.connections_highest_streak-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.connections_highest_streak', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Highest streak', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'max_streak', - 'unique_id': '218886794-connections-connections_max_streak', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.connections_highest_streak-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'Connections Highest streak', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.connections_highest_streak', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_all_entities[sensor.connections_last_played-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.connections_last_played', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last played', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_played', - 'unique_id': '218886794-connections-connections_last_played', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.connections_last_played-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'Connections Last played', - }), - 'context': , - 'entity_id': 'sensor.connections_last_played', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-09-23', - }) -# --- -# name: test_all_entities[sensor.connections_played-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.connections_played', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Played', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'connections_played', - 'unique_id': '218886794-connections-connections_played', - 'unit_of_measurement': 'games', - }) -# --- -# name: test_all_entities[sensor.connections_played-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Connections Played', - 'state_class': , - 'unit_of_measurement': 'games', - }), - 'context': , - 'entity_id': 'sensor.connections_played', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '9', - }) -# --- -# name: test_all_entities[sensor.connections_won-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.connections_won', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Won', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'won', - 'unique_id': '218886794-connections-connections_won', - 'unit_of_measurement': 'games', - }) -# --- -# name: test_all_entities[sensor.connections_won-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Connections Won', - 'state_class': , - 'unit_of_measurement': 'games', - }), - 'context': , - 'entity_id': 'sensor.connections_won', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3', - }) -# --- -# name: test_all_entities[sensor.spelling_bee_played-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spelling_bee_played', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Played', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'spelling_bees_played', - 'unique_id': '218886794-spelling_bee-spelling_bees_played', - 'unit_of_measurement': 'games', - }) -# --- -# name: test_all_entities[sensor.spelling_bee_played-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spelling Bee Played', - 'state_class': , - 'unit_of_measurement': 'games', - }), - 'context': , - 'entity_id': 'sensor.spelling_bee_played', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '87', - }) -# --- -# name: test_all_entities[sensor.spelling_bee_total_pangrams_found-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spelling_bee_total_pangrams_found', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Total pangrams found', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_pangrams', - 'unique_id': '218886794-spelling_bee-spelling_bees_total_pangrams', - 'unit_of_measurement': 'pangrams', - }) -# --- -# name: test_all_entities[sensor.spelling_bee_total_pangrams_found-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spelling Bee Total pangrams found', - 'state_class': , - 'unit_of_measurement': 'pangrams', - }), - 'context': , - 'entity_id': 'sensor.spelling_bee_total_pangrams_found', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_all_entities[sensor.spelling_bee_total_words_found-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spelling_bee_total_words_found', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Total words found', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_words', - 'unique_id': '218886794-spelling_bee-spelling_bees_total_words', - 'unit_of_measurement': 'words', - }) -# --- -# name: test_all_entities[sensor.spelling_bee_total_words_found-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spelling Bee Total words found', - 'state_class': , - 'unit_of_measurement': 'words', - }), - 'context': , - 'entity_id': 'sensor.spelling_bee_total_words_found', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '362', - }) -# --- -# name: test_all_entities[sensor.wordle_current_streak-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.wordle_current_streak', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Current streak', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'streak', - 'unique_id': '218886794-wordle-wordles_streak', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.wordle_current_streak-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'Wordle Current streak', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.wordle_current_streak', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_all_entities[sensor.wordle_highest_streak-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.wordle_highest_streak', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Highest streak', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'max_streak', - 'unique_id': '218886794-wordle-wordles_max_streak', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.wordle_highest_streak-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'Wordle Highest streak', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.wordle_highest_streak', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5', - }) -# --- -# name: test_all_entities[sensor.wordle_played-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.wordle_played', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Played', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wordles_played', - 'unique_id': '218886794-wordle-wordles_played', - 'unit_of_measurement': 'games', - }) -# --- -# name: test_all_entities[sensor.wordle_played-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wordle Played', - 'state_class': , - 'unit_of_measurement': 'games', - }), - 'context': , - 'entity_id': 'sensor.wordle_played', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '70', - }) -# --- -# name: test_all_entities[sensor.wordle_won-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.wordle_won', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Won', - 'platform': 'nyt_games', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'won', - 'unique_id': '218886794-wordle-wordles_won', - 'unit_of_measurement': 'games', - }) -# --- -# name: test_all_entities[sensor.wordle_won-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wordle Won', - 'state_class': , - 'unit_of_measurement': 'games', - }), - 'context': , - 'entity_id': 'sensor.wordle_won', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '51', - }) -# --- diff --git a/tests/components/nyt_games/test_config_flow.py b/tests/components/nyt_games/test_config_flow.py deleted file mode 100644 index bd17724887e..00000000000 --- a/tests/components/nyt_games/test_config_flow.py +++ /dev/null @@ -1,125 +0,0 @@ -"""Tests for the NYT Games config flow.""" - -from unittest.mock import AsyncMock - -from nyt_games import NYTGamesAuthenticationError, NYTGamesError -import pytest - -from homeassistant.components.nyt_games.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_TOKEN -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_full_flow( - hass: HomeAssistant, - mock_nyt_games_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: "token"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "NYT Games" - assert result["data"] == {CONF_TOKEN: "token"} - assert result["result"].unique_id == "218886794" - - -async def test_stripping_token( - hass: HomeAssistant, - mock_nyt_games_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test stripping token.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: " token "}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {CONF_TOKEN: "token"} - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (NYTGamesAuthenticationError, "invalid_auth"), - (NYTGamesError, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_flow_errors( - hass: HomeAssistant, - mock_nyt_games_client: AsyncMock, - mock_setup_entry: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test flow errors.""" - mock_nyt_games_client.get_user_id.side_effect = exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: "token"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} - - mock_nyt_games_client.get_user_id.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: "token"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_duplicate( - hass: HomeAssistant, - mock_nyt_games_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test duplicate flow.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: "token"}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/nyt_games/test_init.py b/tests/components/nyt_games/test_init.py deleted file mode 100644 index 2e1a8c92f90..00000000000 --- a/tests/components/nyt_games/test_init.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Tests for the NYT Games integration.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.components.nyt_games.const import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_device_info( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_nyt_games_client: AsyncMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test device registry integration.""" - await setup_integration(hass, mock_config_entry) - for entity in ("wordle", "spelling_bee", "connections"): - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, f"{mock_config_entry.unique_id}_{entity}")} - ) - assert device_entry is not None - assert device_entry == snapshot(name=f"device_{entity}") diff --git a/tests/components/nyt_games/test_sensor.py b/tests/components/nyt_games/test_sensor.py deleted file mode 100644 index f35caf20b57..00000000000 --- a/tests/components/nyt_games/test_sensor.py +++ /dev/null @@ -1,77 +0,0 @@ -"""Tests for the NYT Games sensor platform.""" - -from datetime import timedelta -from unittest.mock import AsyncMock - -from freezegun.api import FrozenDateTimeFactory -from nyt_games import NYTGamesError, WordleStats -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.nyt_games.const import DOMAIN -from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_fixture, - snapshot_platform, -) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_nyt_games_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_updating_exception( - hass: HomeAssistant, - mock_nyt_games_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test handling an exception during update.""" - await setup_integration(hass, mock_config_entry) - - mock_nyt_games_client.get_latest_stats.side_effect = NYTGamesError - - freezer.tick(timedelta(minutes=15)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("sensor.wordle_played").state == STATE_UNAVAILABLE - - mock_nyt_games_client.get_latest_stats.side_effect = None - - freezer.tick(timedelta(minutes=15)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("sensor.wordle_played").state != STATE_UNAVAILABLE - - -async def test_new_account( - hass: HomeAssistant, - mock_nyt_games_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test handling an exception during update.""" - mock_nyt_games_client.get_latest_stats.return_value = WordleStats.from_json( - load_fixture("new_account.json", DOMAIN) - ).player.stats - await setup_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.spelling_bee_played") is None diff --git a/tests/components/nzbget/conftest.py b/tests/components/nzbget/conftest.py index 8a980d3ddb0..8f48a4306c7 100644 --- a/tests/components/nzbget/conftest.py +++ b/tests/components/nzbget/conftest.py @@ -1,6 +1,5 @@ """Define fixtures available for all tests.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest @@ -9,7 +8,7 @@ from . import MOCK_HISTORY, MOCK_STATUS, MOCK_VERSION @pytest.fixture -def nzbget_api() -> Generator[MagicMock]: +def nzbget_api(hass): """Mock NZBGetApi for easier testing.""" with patch("homeassistant.components.nzbget.coordinator.NZBGetAPI") as mock_api: instance = mock_api.return_value diff --git a/tests/components/nzbget/test_init.py b/tests/components/nzbget/test_init.py index baf0a37546d..a119bb953ce 100644 --- a/tests/components/nzbget/test_init.py +++ b/tests/components/nzbget/test_init.py @@ -3,7 +3,6 @@ from unittest.mock import patch from pynzbgetapi import NZBGetAPIException -import pytest from homeassistant.components.nzbget.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -14,8 +13,7 @@ from . import ENTRY_CONFIG, _patch_version, init_integration from tests.common import MockConfigEntry -@pytest.mark.usefixtures("nzbget_api") -async def test_unload_entry(hass: HomeAssistant) -> None: +async def test_unload_entry(hass: HomeAssistant, nzbget_api) -> None: """Test successful unload of entry.""" entry = await init_integration(hass) diff --git a/tests/components/nzbget/test_sensor.py b/tests/components/nzbget/test_sensor.py index 38f7d8a68c3..30a7f262b0b 100644 --- a/tests/components/nzbget/test_sensor.py +++ b/tests/components/nzbget/test_sensor.py @@ -3,8 +3,6 @@ from datetime import timedelta from unittest.mock import patch -import pytest - from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, @@ -18,8 +16,9 @@ from homeassistant.util import dt as dt_util from . import init_integration -@pytest.mark.usefixtures("nzbget_api") -async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_sensors( + hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api +) -> None: """Test the creation and values of the sensors.""" now = dt_util.utcnow().replace(microsecond=0) with patch("homeassistant.components.nzbget.sensor.utcnow", return_value=now): diff --git a/tests/components/nzbget/test_switch.py b/tests/components/nzbget/test_switch.py index afb88a7be82..1c518486b9f 100644 --- a/tests/components/nzbget/test_switch.py +++ b/tests/components/nzbget/test_switch.py @@ -1,7 +1,5 @@ """Test the NZBGet switches.""" -from unittest.mock import MagicMock - from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -18,7 +16,7 @@ from . import init_integration async def test_download_switch( - hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api: MagicMock + hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api ) -> None: """Test the creation and values of the download switch.""" instance = nzbget_api.return_value @@ -46,9 +44,7 @@ async def test_download_switch( assert state.state == STATE_OFF -async def test_download_switch_services( - hass: HomeAssistant, nzbget_api: MagicMock -) -> None: +async def test_download_switch_services(hass: HomeAssistant, nzbget_api) -> None: """Test download switch services.""" instance = nzbget_api.return_value diff --git a/tests/components/obihai/__init__.py b/tests/components/obihai/__init__.py index b88f0a5c874..d43aa6a9bb8 100644 --- a/tests/components/obihai/__init__.py +++ b/tests/components/obihai/__init__.py @@ -32,4 +32,3 @@ def get_schema_suggestion(schema, key): if k.description is None or "suggested_value" not in k.description: return None return k.description["suggested_value"] - return None diff --git a/tests/components/obihai/conftest.py b/tests/components/obihai/conftest.py index ef54c12ba26..c4edfdedf65 100644 --- a/tests/components/obihai/conftest.py +++ b/tests/components/obihai/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for Obihai.""" -from collections.abc import Generator from socket import gaierror from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/octoprint/test_config_flow.py b/tests/components/octoprint/test_config_flow.py index e0696486718..738fbea0887 100644 --- a/tests/components/octoprint/test_config_flow.py +++ b/tests/components/octoprint/test_config_flow.py @@ -580,7 +580,15 @@ async def test_reauth_form(hass: HomeAssistant) -> None: unique_id="1234", ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "entry_id": entry.entry_id, + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert not result["errors"] diff --git a/tests/components/ollama/__init__.py b/tests/components/ollama/__init__.py index 6ad77bb2217..22a576e94a4 100644 --- a/tests/components/ollama/__init__.py +++ b/tests/components/ollama/__init__.py @@ -1,7 +1,7 @@ """Tests for the Ollama integration.""" from homeassistant.components import ollama -from homeassistant.helpers import llm +from homeassistant.components.ollama.const import DEFAULT_PROMPT TEST_USER_DATA = { ollama.CONF_URL: "http://localhost:11434", @@ -9,6 +9,6 @@ TEST_USER_DATA = { } TEST_OPTIONS = { - ollama.CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT, + ollama.CONF_PROMPT: DEFAULT_PROMPT, ollama.CONF_MAX_HISTORY: 2, } diff --git a/tests/components/ollama/conftest.py b/tests/components/ollama/conftest.py index 7658d1cbfab..db1689bd416 100644 --- a/tests/components/ollama/conftest.py +++ b/tests/components/ollama/conftest.py @@ -1,14 +1,11 @@ """Tests Ollama integration.""" -from typing import Any from unittest.mock import patch import pytest from homeassistant.components import ollama -from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant -from homeassistant.helpers import llm from homeassistant.setup import async_setup_component from . import TEST_OPTIONS, TEST_USER_DATA @@ -17,36 +14,17 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry_options() -> dict[str, Any]: - """Fixture for configuration entry options.""" - return TEST_OPTIONS - - -@pytest.fixture -def mock_config_entry( - hass: HomeAssistant, mock_config_entry_options: dict[str, Any] -) -> MockConfigEntry: +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a config entry.""" entry = MockConfigEntry( domain=ollama.DOMAIN, data=TEST_USER_DATA, - options=mock_config_entry_options, + options=TEST_OPTIONS, ) entry.add_to_hass(hass) return entry -@pytest.fixture -def mock_config_entry_with_assist( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> MockConfigEntry: - """Mock a config entry with assist.""" - hass.config_entries.async_update_entry( - mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} - ) - return mock_config_entry - - @pytest.fixture async def mock_init_component(hass: HomeAssistant, mock_config_entry: MockConfigEntry): """Initialize integration.""" @@ -57,7 +35,6 @@ async def mock_init_component(hass: HomeAssistant, mock_config_entry: MockConfig ): assert await async_setup_component(hass, ollama.DOMAIN, {}) await hass.async_block_till_done() - yield @pytest.fixture(autouse=True) diff --git a/tests/components/ollama/snapshots/test_conversation.ambr b/tests/components/ollama/snapshots/test_conversation.ambr deleted file mode 100644 index e4dd7cd00bb..00000000000 --- a/tests/components/ollama/snapshots/test_conversation.ambr +++ /dev/null @@ -1,34 +0,0 @@ -# serializer version: 1 -# name: test_unknown_hass_api - dict({ - 'conversation_id': None, - 'response': IntentResponse( - card=dict({ - }), - error_code=, - failed_results=list([ - ]), - intent=None, - intent_targets=list([ - ]), - language='en', - matched_states=list([ - ]), - reprompt=dict({ - }), - response_type=, - speech=dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Error preparing LLM API: API non-existing not found', - }), - }), - speech_slots=dict({ - }), - success_results=list([ - ]), - unmatched_states=list([ - ]), - ), - }) -# --- diff --git a/tests/components/ollama/test_config_flow.py b/tests/components/ollama/test_config_flow.py index 7755f2208b4..b1b74197139 100644 --- a/tests/components/ollama/test_config_flow.py +++ b/tests/components/ollama/test_config_flow.py @@ -164,18 +164,13 @@ async def test_options( ) options = await hass.config_entries.options.async_configure( options_flow["flow_id"], - { - ollama.CONF_PROMPT: "test prompt", - ollama.CONF_MAX_HISTORY: 100, - ollama.CONF_NUM_CTX: 32768, - }, + {ollama.CONF_PROMPT: "test prompt", ollama.CONF_MAX_HISTORY: 100}, ) await hass.async_block_till_done() assert options["type"] is FlowResultType.CREATE_ENTRY assert options["data"] == { ollama.CONF_PROMPT: "test prompt", ollama.CONF_MAX_HISTORY: 100, - ollama.CONF_NUM_CTX: 32768, } diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index 66dc8a0c603..b6f0be3c414 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -1,19 +1,21 @@ """Tests for the Ollama integration.""" -from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, patch from ollama import Message, ResponseError import pytest -from syrupy.assertion import SnapshotAssertion -import voluptuous as vol from homeassistant.components import conversation, ollama from homeassistant.components.conversation import trace -from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API, MATCH_ALL +from homeassistant.components.homeassistant.exposed_entities import async_expose_entity +from homeassistant.const import ATTR_FRIENDLY_NAME, MATCH_ALL from homeassistant.core import Context, HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import intent, llm +from homeassistant.helpers import ( + area_registry as ar, + device_registry as dr, + entity_registry as er, + intent, +) from tests.common import MockConfigEntry @@ -23,6 +25,9 @@ async def test_chat( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, agent_id: str, ) -> None: """Test that the chat function is called with the appropriate arguments.""" @@ -30,8 +35,48 @@ async def test_chat( if agent_id is None: agent_id = mock_config_entry.entry_id + # Create some areas, devices, and entities + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") + area_bedroom = area_registry.async_get_or_create("bedroom_id") + area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") + area_office = area_registry.async_get_or_create("office_id") + area_office = area_registry.async_update(area_office.id, name="office") + entry = MockConfigEntry() entry.add_to_hass(hass) + kitchen_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("demo", "id-1234")}, + ) + device_registry.async_update_device(kitchen_device.id, area_id=area_kitchen.id) + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, device_id=kitchen_device.id + ) + hass.states.async_set( + kitchen_light.entity_id, "on", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} + ) + + bedroom_light = entity_registry.async_get_or_create("light", "demo", "5678") + bedroom_light = entity_registry.async_update_entity( + bedroom_light.entity_id, area_id=area_bedroom.id + ) + hass.states.async_set( + bedroom_light.entity_id, "on", attributes={ATTR_FRIENDLY_NAME: "bedroom light"} + ) + + # Hide the office light + office_light = entity_registry.async_get_or_create("light", "demo", "ABCD") + office_light = entity_registry.async_update_entity( + office_light.entity_id, area_id=area_office.id + ) + hass.states.async_set( + office_light.entity_id, "on", attributes={ATTR_FRIENDLY_NAME: "office light"} + ) + async_expose_entity(hass, conversation.DOMAIN, office_light.entity_id, False) with patch( "ollama.AsyncClient.chat", @@ -55,6 +100,12 @@ async def test_chat( Message({"role": "user", "content": "test message"}), ] + # Verify only exposed devices/areas are in prompt + assert "kitchen light" in prompt + assert "bedroom light" in prompt + assert "office light" not in prompt + assert "office" not in prompt + assert ( result.response.response_type == intent.IntentResponseType.ACTION_DONE ), result @@ -71,255 +122,7 @@ async def test_chat( ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] - assert "Current time is" in detail_event["data"]["messages"][0]["content"] - - -async def test_template_variables( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test that template variables work.""" - context = Context(user_id="12345") - mock_user = Mock() - mock_user.id = "12345" - mock_user.name = "Test User" - - hass.config_entries.async_update_entry( - mock_config_entry, - options={ - "prompt": ( - "The user name is {{ user_name }}. " - "The user id is {{ llm_context.context.user_id }}." - ), - }, - ) - with ( - patch("ollama.AsyncClient.list"), - patch( - "ollama.AsyncClient.chat", - return_value={"message": {"role": "assistant", "content": "test response"}}, - ) as mock_chat, - patch("homeassistant.auth.AuthManager.async_get_user", return_value=mock_user), - ): - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - result = await conversation.async_converse( - hass, "hello", None, context, agent_id=mock_config_entry.entry_id - ) - - assert ( - result.response.response_type == intent.IntentResponseType.ACTION_DONE - ), result - - args = mock_chat.call_args.kwargs - prompt = args["messages"][0]["content"] - - assert "The user name is Test User." in prompt - assert "The user id is 12345." in prompt - - -@pytest.mark.parametrize( - ("tool_args", "expected_tool_args"), - [ - ({"param1": "test_value"}, {"param1": "test_value"}), - ({"param2": 2}, {"param2": 2}), - ( - {"param1": "test_value", "floor": ""}, - {"param1": "test_value"}, # Omit empty arguments - ), - ( - {"domain": '["light"]'}, - {"domain": ["light"]}, # Repair invalid json arguments - ), - ( - {"domain": "['light']"}, - {"domain": "['light']"}, # Preserve invalid json that can't be parsed - ), - ], -) -@patch("homeassistant.components.ollama.conversation.llm.AssistAPI._async_get_tools") -async def test_function_call( - mock_get_tools, - hass: HomeAssistant, - mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, - tool_args: dict[str, Any], - expected_tool_args: dict[str, Any], -) -> None: - """Test function call from the assistant.""" - agent_id = mock_config_entry_with_assist.entry_id - context = Context() - - mock_tool = AsyncMock() - mock_tool.name = "test_tool" - mock_tool.description = "Test function" - mock_tool.parameters = vol.Schema( - {vol.Optional("param1", description="Test parameters"): str}, - extra=vol.ALLOW_EXTRA, - ) - mock_tool.async_call.return_value = "Test response" - - mock_get_tools.return_value = [mock_tool] - - def completion_result(*args, messages, **kwargs): - for message in messages: - if message["role"] == "tool": - return { - "message": { - "role": "assistant", - "content": "I have successfully called the function", - } - } - - return { - "message": { - "role": "assistant", - "tool_calls": [ - { - "function": { - "name": "test_tool", - "arguments": tool_args, - } - } - ], - } - } - - with patch( - "ollama.AsyncClient.chat", - side_effect=completion_result, - ) as mock_chat: - result = await conversation.async_converse( - hass, - "Please call the test function", - None, - context, - agent_id=agent_id, - ) - - assert mock_chat.call_count == 2 - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert ( - result.response.speech["plain"]["speech"] - == "I have successfully called the function" - ) - mock_tool.async_call.assert_awaited_once_with( - hass, - llm.ToolInput( - tool_name="test_tool", - tool_args=expected_tool_args, - ), - llm.LLMContext( - platform="ollama", - context=context, - user_prompt="Please call the test function", - language="en", - assistant="conversation", - device_id=None, - ), - ) - - -@patch("homeassistant.components.ollama.conversation.llm.AssistAPI._async_get_tools") -async def test_function_exception( - mock_get_tools, - hass: HomeAssistant, - mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, -) -> None: - """Test function call with exception.""" - agent_id = mock_config_entry_with_assist.entry_id - context = Context() - - mock_tool = AsyncMock() - mock_tool.name = "test_tool" - mock_tool.description = "Test function" - mock_tool.parameters = vol.Schema( - {vol.Optional("param1", description="Test parameters"): str} - ) - mock_tool.async_call.side_effect = HomeAssistantError("Test tool exception") - - mock_get_tools.return_value = [mock_tool] - - def completion_result(*args, messages, **kwargs): - for message in messages: - if message["role"] == "tool": - return { - "message": { - "role": "assistant", - "content": "There was an error calling the function", - } - } - - return { - "message": { - "role": "assistant", - "tool_calls": [ - { - "function": { - "name": "test_tool", - "arguments": {"param1": "test_value"}, - } - } - ], - } - } - - with patch( - "ollama.AsyncClient.chat", - side_effect=completion_result, - ) as mock_chat: - result = await conversation.async_converse( - hass, - "Please call the test function", - None, - context, - agent_id=agent_id, - ) - - assert mock_chat.call_count == 2 - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert ( - result.response.speech["plain"]["speech"] - == "There was an error calling the function" - ) - mock_tool.async_call.assert_awaited_once_with( - hass, - llm.ToolInput( - tool_name="test_tool", - tool_args={"param1": "test_value"}, - ), - llm.LLMContext( - platform="ollama", - context=context, - user_prompt="Please call the test function", - language="en", - assistant="conversation", - device_id=None, - ), - ) - - -async def test_unknown_hass_api( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - mock_init_component, -) -> None: - """Test when we reference an API that no longer exists.""" - hass.config_entries.async_update_entry( - mock_config_entry, - options={ - **mock_config_entry.options, - CONF_LLM_HASS_API: "non-existing", - }, - ) - await hass.async_block_till_done() - - result = await conversation.async_converse( - hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id - ) - - assert result == snapshot + assert "The current time is" in detail_event["data"]["messages"][0]["content"] async def test_message_history_trimming( @@ -483,10 +286,8 @@ async def test_message_history_unlimited( "ollama.AsyncClient.chat", return_value={"message": {"role": "assistant", "content": "test response"}}, ), + patch.object(mock_config_entry, "options", {ollama.CONF_MAX_HISTORY: 0}), ): - hass.config_entries.async_update_entry( - mock_config_entry, options={ollama.CONF_MAX_HISTORY: 0} - ) for i in range(100): result = await conversation.async_converse( hass, @@ -558,57 +359,3 @@ async def test_conversation_agent( mock_config_entry.entry_id ) assert agent.supported_languages == MATCH_ALL - - state = hass.states.get("conversation.mock_title") - assert state - assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 - - -async def test_conversation_agent_with_assist( - hass: HomeAssistant, - mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, -) -> None: - """Test OllamaConversationEntity.""" - agent = conversation.get_agent_manager(hass).async_get_agent( - mock_config_entry_with_assist.entry_id - ) - assert agent.supported_languages == MATCH_ALL - - state = hass.states.get("conversation.mock_title") - assert state - assert ( - state.attributes[ATTR_SUPPORTED_FEATURES] - == conversation.ConversationEntityFeature.CONTROL - ) - - -@pytest.mark.parametrize( - ("mock_config_entry_options", "expected_options"), - [ - ({}, {"num_ctx": 8192}), - ({"num_ctx": 16384}, {"num_ctx": 16384}), - ], -) -async def test_options( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - expected_options: dict[str, Any], -) -> None: - """Test that options are passed correctly to ollama client.""" - with patch( - "ollama.AsyncClient.chat", - return_value={"message": {"role": "assistant", "content": "test response"}}, - ) as mock_chat: - await conversation.async_converse( - hass, - "test message", - None, - Context(), - agent_id="conversation.mock_title", - ) - - assert mock_chat.call_count == 1 - args = mock_chat.call_args.kwargs - assert args.get("options") == expected_options diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index 35f6b7d739c..e9ba720adb3 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -1,13 +1,13 @@ """Test the onboarding views.""" import asyncio -from collections.abc import AsyncGenerator from http import HTTPStatus import os from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import Mock, patch import pytest +from typing_extensions import AsyncGenerator from homeassistant.components import onboarding from homeassistant.components.onboarding import const, views @@ -28,7 +28,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def auth_active(hass: HomeAssistant) -> None: +def auth_active(hass): """Ensure auth is always active.""" hass.loop.run_until_complete( register_auth_provider(hass, {"type": "homeassistant"}) @@ -70,13 +70,23 @@ async def no_rpi_fixture( @pytest.fixture(name="mock_supervisor") async def mock_supervisor_fixture( aioclient_mock: AiohttpClientMocker, - store_info: AsyncMock, - supervisor_is_connected: AsyncMock, - resolution_info: AsyncMock, ) -> AsyncGenerator[None]: """Mock supervisor.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -89,6 +99,10 @@ async def mock_supervisor_fixture( ) with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), + patch( + "homeassistant.components.hassio.HassIO.is_connected", + return_value=True, + ), patch( "homeassistant.components.hassio.HassIO.get_info", return_value={}, @@ -97,6 +111,10 @@ async def mock_supervisor_fixture( "homeassistant.components.hassio.HassIO.get_host_info", return_value={}, ), + patch( + "homeassistant.components.hassio.HassIO.get_store", + return_value={}, + ), patch( "homeassistant.components.hassio.HassIO.get_supervisor_info", return_value={"diagnostics": True}, diff --git a/tests/components/ondilo_ico/conftest.py b/tests/components/ondilo_ico/conftest.py index d35e5ac0003..6a03d6961c2 100644 --- a/tests/components/ondilo_ico/conftest.py +++ b/tests/components/ondilo_ico/conftest.py @@ -1,10 +1,10 @@ """Provide basic Ondilo fixture.""" -from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.ondilo_ico.const import DOMAIN @@ -46,37 +46,37 @@ def mock_ondilo_client( yield client -@pytest.fixture(scope="package") +@pytest.fixture(scope="session") def pool1() -> list[dict[str, Any]]: """First pool description.""" return [load_json_object_fixture("pool1.json", DOMAIN)] -@pytest.fixture(scope="package") +@pytest.fixture(scope="session") def pool2() -> list[dict[str, Any]]: """Second pool description.""" return [load_json_object_fixture("pool2.json", DOMAIN)] -@pytest.fixture(scope="package") +@pytest.fixture(scope="session") def ico_details1() -> dict[str, Any]: """ICO details of first pool.""" return load_json_object_fixture("ico_details1.json", DOMAIN) -@pytest.fixture(scope="package") +@pytest.fixture(scope="session") def ico_details2() -> dict[str, Any]: """ICO details of second pool.""" return load_json_object_fixture("ico_details2.json", DOMAIN) -@pytest.fixture(scope="package") +@pytest.fixture(scope="session") def last_measures() -> list[dict[str, Any]]: """Pool measurements.""" return load_json_array_fixture("last_measures.json", DOMAIN) -@pytest.fixture(scope="package") +@pytest.fixture(scope="session") def two_pools( pool1: list[dict[str, Any]], pool2: list[dict[str, Any]] ) -> list[dict[str, Any]]: diff --git a/tests/components/ondilo_ico/snapshots/test_init.ambr b/tests/components/ondilo_ico/snapshots/test_init.ambr index 44008ac907e..355c5902722 100644 --- a/tests/components/ondilo_ico/snapshots/test_init.ambr +++ b/tests/components/ondilo_ico/snapshots/test_init.ambr @@ -21,7 +21,6 @@ }), 'manufacturer': 'Ondilo', 'model': 'ICO', - 'model_id': None, 'name': 'Pool 1', 'name_by_user': None, 'primary_config_entry': , @@ -53,7 +52,6 @@ }), 'manufacturer': 'Ondilo', 'model': 'ICO', - 'model_id': None, 'name': 'Pool 2', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/ondilo_ico/test_init.py b/tests/components/ondilo_ico/test_init.py index 67f68f27b3e..707022e9145 100644 --- a/tests/components/ondilo_ico/test_init.py +++ b/tests/components/ondilo_ico/test_init.py @@ -3,8 +3,6 @@ from typing import Any from unittest.mock import MagicMock -from ondilo import OndiloError -import pytest from syrupy import SnapshotAssertion from homeassistant.config_entries import ConfigEntryState @@ -37,29 +35,6 @@ async def test_devices( assert device_entry == snapshot(name=f"{identifier[0]}-{identifier[1]}") -async def test_get_pools_error( - hass: HomeAssistant, - mock_ondilo_client: MagicMock, - config_entry: MockConfigEntry, -) -> None: - """Test get pools errors.""" - mock_ondilo_client.get_pools.side_effect = OndiloError( - 502, - ( - " 502 Bad Gateway " - "

502 Bad Gateway

" - ), - ) - await setup_integration(hass, config_entry, mock_ondilo_client) - - # No sensor should be created - assert not hass.states.async_all() - # We should not have tried to retrieve pool measures - assert mock_ondilo_client.get_ICO_details.call_count == 0 - assert mock_ondilo_client.get_last_pool_measures.call_count == 0 - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - async def test_init_with_no_ico_attached( hass: HomeAssistant, mock_ondilo_client: MagicMock, @@ -78,77 +53,3 @@ async def test_init_with_no_ico_attached( # We should not have tried to retrieve pool measures mock_ondilo_client.get_last_pool_measures.assert_not_called() assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -@pytest.mark.parametrize("api", ["get_ICO_details", "get_last_pool_measures"]) -async def test_details_error_all_pools( - hass: HomeAssistant, - mock_ondilo_client: MagicMock, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - pool1: dict[str, Any], - api: str, -) -> None: - """Test details and measures error for all pools.""" - mock_ondilo_client.get_pools.return_value = pool1 - client_api = getattr(mock_ondilo_client, api) - client_api.side_effect = OndiloError(400, "error") - - await setup_integration(hass, config_entry, mock_ondilo_client) - - device_entries = dr.async_entries_for_config_entry( - device_registry, config_entry.entry_id - ) - - assert not device_entries - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_details_error_one_pool( - hass: HomeAssistant, - mock_ondilo_client: MagicMock, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - ico_details2: dict[str, Any], -) -> None: - """Test details error for one pool and success for the other.""" - mock_ondilo_client.get_ICO_details.side_effect = [ - OndiloError( - 404, - "Not Found", - ), - ico_details2, - ] - - await setup_integration(hass, config_entry, mock_ondilo_client) - - device_entries = dr.async_entries_for_config_entry( - device_registry, config_entry.entry_id - ) - - assert len(device_entries) == 1 - - -async def test_measures_error_one_pool( - hass: HomeAssistant, - mock_ondilo_client: MagicMock, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - last_measures: list[dict[str, Any]], -) -> None: - """Test measures error for one pool and success for the other.""" - mock_ondilo_client.get_last_pool_measures.side_effect = [ - OndiloError( - 404, - "Not Found", - ), - last_measures, - ] - - await setup_integration(hass, config_entry, mock_ondilo_client) - - device_entries = dr.async_entries_for_config_entry( - device_registry, config_entry.entry_id - ) - - assert len(device_entries) == 1 diff --git a/tests/components/onewire/conftest.py b/tests/components/onewire/conftest.py index 65a86b58f2f..47b50ab10e0 100644 --- a/tests/components/onewire/conftest.py +++ b/tests/components/onewire/conftest.py @@ -1,10 +1,10 @@ """Provide common 1-Wire fixtures.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pyownet.protocol import ConnError import pytest +from typing_extensions import Generator from homeassistant.components.onewire.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntry diff --git a/tests/components/onewire/snapshots/test_binary_sensor.ambr b/tests/components/onewire/snapshots/test_binary_sensor.ambr index 450cc4c7486..b3d330291ab 100644 --- a/tests/components/onewire/snapshots/test_binary_sensor.ambr +++ b/tests/components/onewire/snapshots/test_binary_sensor.ambr @@ -34,7 +34,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2405', - 'model_id': None, 'name': '05.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -76,7 +75,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18S20', - 'model_id': None, 'name': '10.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -118,7 +116,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2406', - 'model_id': None, 'name': '12.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -260,7 +257,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', - 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -302,7 +298,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2409', - 'model_id': None, 'name': '1F.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -332,7 +327,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', - 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -374,7 +368,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1822', - 'model_id': None, 'name': '22.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -416,7 +409,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', - 'model_id': None, 'name': '26.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -458,7 +450,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', - 'model_id': None, 'name': '28.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -500,7 +491,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', - 'model_id': None, 'name': '28.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -542,7 +532,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', - 'model_id': None, 'name': '28.222222222223', 'name_by_user': None, 'primary_config_entry': , @@ -584,7 +573,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2408', - 'model_id': None, 'name': '29.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -978,7 +966,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2760', - 'model_id': None, 'name': '30.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1020,7 +1007,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2413', - 'model_id': None, 'name': '3A.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1150,7 +1136,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1825', - 'model_id': None, 'name': '3B.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1192,7 +1177,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS28EA00', - 'model_id': None, 'name': '42.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1234,7 +1218,6 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0068', - 'model_id': None, 'name': '7E.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1276,7 +1259,6 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0066', - 'model_id': None, 'name': '7E.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -1318,7 +1300,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', - 'model_id': None, 'name': 'A6.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1360,7 +1341,6 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HobbyBoards_EF', - 'model_id': None, 'name': 'EF.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1402,7 +1382,6 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_MOISTURE_METER', - 'model_id': None, 'name': 'EF.111111111112', 'name_by_user': None, 'primary_config_entry': , @@ -1444,7 +1423,6 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_HUB', - 'model_id': None, 'name': 'EF.111111111113', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/onewire/snapshots/test_sensor.ambr b/tests/components/onewire/snapshots/test_sensor.ambr index 5ad4cf2ef4b..acf9ea6a8c8 100644 --- a/tests/components/onewire/snapshots/test_sensor.ambr +++ b/tests/components/onewire/snapshots/test_sensor.ambr @@ -34,7 +34,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2405', - 'model_id': None, 'name': '05.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -76,7 +75,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18S20', - 'model_id': None, 'name': '10.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -167,7 +165,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2406', - 'model_id': None, 'name': '12.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -319,7 +316,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', - 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -457,7 +453,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2409', - 'model_id': None, 'name': '1F.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -487,7 +482,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', - 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -625,7 +619,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1822', - 'model_id': None, 'name': '22.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -716,7 +709,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', - 'model_id': None, 'name': '26.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1297,7 +1289,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', - 'model_id': None, 'name': '28.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1388,7 +1379,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', - 'model_id': None, 'name': '28.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -1479,7 +1469,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', - 'model_id': None, 'name': '28.222222222223', 'name_by_user': None, 'primary_config_entry': , @@ -1570,7 +1559,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2408', - 'model_id': None, 'name': '29.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1612,7 +1600,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2760', - 'model_id': None, 'name': '30.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1850,7 +1837,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2413', - 'model_id': None, 'name': '3A.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1892,7 +1878,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1825', - 'model_id': None, 'name': '3B.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1983,7 +1968,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS28EA00', - 'model_id': None, 'name': '42.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -2074,7 +2058,6 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0068', - 'model_id': None, 'name': '7E.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -2312,7 +2295,6 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0066', - 'model_id': None, 'name': '7E.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -2452,7 +2434,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', - 'model_id': None, 'name': 'A6.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -3033,7 +3014,6 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HobbyBoards_EF', - 'model_id': None, 'name': 'EF.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -3222,7 +3202,6 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_MOISTURE_METER', - 'model_id': None, 'name': 'EF.111111111112', 'name_by_user': None, 'primary_config_entry': , @@ -3460,7 +3439,6 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_HUB', - 'model_id': None, 'name': 'EF.111111111113', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/onewire/snapshots/test_switch.ambr b/tests/components/onewire/snapshots/test_switch.ambr index 3bc7a2d3def..d6cbb6f3fef 100644 --- a/tests/components/onewire/snapshots/test_switch.ambr +++ b/tests/components/onewire/snapshots/test_switch.ambr @@ -34,7 +34,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2405', - 'model_id': None, 'name': '05.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -120,7 +119,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18S20', - 'model_id': None, 'name': '10.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -162,7 +160,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2406', - 'model_id': None, 'name': '12.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -392,7 +389,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', - 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -434,7 +430,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2409', - 'model_id': None, 'name': '1F.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -464,7 +459,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', - 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -506,7 +500,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1822', - 'model_id': None, 'name': '22.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -548,7 +541,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', - 'model_id': None, 'name': '26.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -634,7 +626,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', - 'model_id': None, 'name': '28.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -676,7 +667,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', - 'model_id': None, 'name': '28.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -718,7 +708,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', - 'model_id': None, 'name': '28.222222222223', 'name_by_user': None, 'primary_config_entry': , @@ -760,7 +749,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2408', - 'model_id': None, 'name': '29.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1506,7 +1494,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2760', - 'model_id': None, 'name': '30.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1548,7 +1535,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2413', - 'model_id': None, 'name': '3A.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1678,7 +1664,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1825', - 'model_id': None, 'name': '3B.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1720,7 +1705,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS28EA00', - 'model_id': None, 'name': '42.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1762,7 +1746,6 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0068', - 'model_id': None, 'name': '7E.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1804,7 +1787,6 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0066', - 'model_id': None, 'name': '7E.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -1846,7 +1828,6 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', - 'model_id': None, 'name': 'A6.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1932,7 +1913,6 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HobbyBoards_EF', - 'model_id': None, 'name': 'EF.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1974,7 +1954,6 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_MOISTURE_METER', - 'model_id': None, 'name': 'EF.111111111112', 'name_by_user': None, 'primary_config_entry': , @@ -2368,7 +2347,6 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_HUB', - 'model_id': None, 'name': 'EF.111111111113', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/onewire/test_binary_sensor.py b/tests/components/onewire/test_binary_sensor.py index 31895f705ff..8b1129529d5 100644 --- a/tests/components/onewire/test_binary_sensor.py +++ b/tests/components/onewire/test_binary_sensor.py @@ -1,10 +1,10 @@ """Tests for 1-Wire binary sensors.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/onewire/test_config_flow.py b/tests/components/onewire/test_config_flow.py index c554624267d..c147a522a59 100644 --- a/tests/components/onewire/test_config_flow.py +++ b/tests/components/onewire/test_config_flow.py @@ -253,10 +253,6 @@ async def test_user_options_set_multiple( ) -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.onewire.options.abort.No configurable devices found."], -) async def test_user_options_no_devices( hass: HomeAssistant, config_entry: ConfigEntry ) -> None: diff --git a/tests/components/onewire/test_diagnostics.py b/tests/components/onewire/test_diagnostics.py index ecdae859597..62b045c4516 100644 --- a/tests/components/onewire/test_diagnostics.py +++ b/tests/components/onewire/test_diagnostics.py @@ -1,10 +1,10 @@ """Test 1-Wire diagnostics.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/onewire/test_sensor.py b/tests/components/onewire/test_sensor.py index ba0e21701f8..df0a81920c9 100644 --- a/tests/components/onewire/test_sensor.py +++ b/tests/components/onewire/test_sensor.py @@ -1,6 +1,5 @@ """Tests for 1-Wire sensors.""" -from collections.abc import Generator from copy import deepcopy import logging from unittest.mock import MagicMock, _patch_dict, patch @@ -8,6 +7,7 @@ from unittest.mock import MagicMock, _patch_dict, patch from pyownet.protocol import OwnetError import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/onewire/test_switch.py b/tests/components/onewire/test_switch.py index 936e83f66ec..b1b8e5ddbd0 100644 --- a/tests/components/onewire/test_switch.py +++ b/tests/components/onewire/test_switch.py @@ -1,10 +1,10 @@ """Tests for 1-Wire switches.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/onkyo/__init__.py b/tests/components/onkyo/__init__.py deleted file mode 100644 index 8900f189aea..00000000000 --- a/tests/components/onkyo/__init__.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Tests for the Onkyo integration.""" - -from unittest.mock import AsyncMock, Mock, patch - -from homeassistant.components.onkyo.receiver import Receiver, ReceiverInfo -from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -def create_receiver_info(id: int) -> ReceiverInfo: - """Create an empty receiver info object for testing.""" - return ReceiverInfo( - host=f"host {id}", - port=id, - model_name=f"type {id}", - identifier=f"id{id}", - ) - - -def create_config_entry_from_info(info: ReceiverInfo) -> MockConfigEntry: - """Create a config entry from receiver info.""" - data = {CONF_HOST: info.host} - options = { - "volume_resolution": 80, - "input_sources": {"12": "tv"}, - "max_volume": 100, - } - - return MockConfigEntry( - data=data, - options=options, - title=info.model_name, - domain="onkyo", - unique_id=info.identifier, - ) - - -def create_empty_config_entry() -> MockConfigEntry: - """Create an empty config entry for use in unit tests.""" - data = {CONF_HOST: ""} - options = { - "volume_resolution": 80, - "input_sources": {"12": "tv"}, - "max_volume": 100, - } - - return MockConfigEntry( - data=data, - options=options, - title="Unit test Onkyo", - domain="onkyo", - unique_id="onkyo_unique_id", - ) - - -async def setup_integration( - hass: HomeAssistant, config_entry: MockConfigEntry, receiver_info: ReceiverInfo -) -> None: - """Fixture for setting up the component.""" - - config_entry.add_to_hass(hass) - - mock_receiver = AsyncMock() - mock_receiver.conn.close = Mock() - mock_receiver.callbacks.connect = Mock() - mock_receiver.callbacks.update = Mock() - - with ( - patch( - "homeassistant.components.onkyo.async_interview", - return_value=receiver_info, - ), - patch.object(Receiver, "async_create", return_value=mock_receiver), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/onkyo/conftest.py b/tests/components/onkyo/conftest.py deleted file mode 100644 index c37966e3bae..00000000000 --- a/tests/components/onkyo/conftest.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Configure tests for the Onkyo integration.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.onkyo.const import DOMAIN - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.onkyo.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture(name="config_entry") -def mock_config_entry() -> MockConfigEntry: - """Create Onkyo entry in Home Assistant.""" - return MockConfigEntry( - domain=DOMAIN, - title="Onkyo", - data={}, - ) diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py deleted file mode 100644 index f230ab124bd..00000000000 --- a/tests/components/onkyo/test_config_flow.py +++ /dev/null @@ -1,532 +0,0 @@ -"""Test Onkyo config flow.""" - -from typing import Any -from unittest.mock import patch - -import pytest - -from homeassistant import config_entries -from homeassistant.components.onkyo import InputSource -from homeassistant.components.onkyo.config_flow import OnkyoConfigFlow -from homeassistant.components.onkyo.const import ( - DOMAIN, - OPTION_MAX_VOLUME, - OPTION_VOLUME_RESOLUTION, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType, InvalidData - -from . import ( - create_config_entry_from_info, - create_empty_config_entry, - create_receiver_info, - setup_integration, -) - -from tests.common import Mock, MockConfigEntry - - -async def test_user_initial_menu(hass: HomeAssistant) -> None: - """Test initial menu.""" - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - assert init_result["type"] is FlowResultType.MENU - # Check if the values are there, but ignore order - assert not set(init_result["menu_options"]) ^ {"manual", "eiscp_discovery"} - - -async def test_manual_valid_host(hass: HomeAssistant) -> None: - """Test valid host entered.""" - - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "manual"}, - ) - - mock_info = Mock() - mock_info.identifier = "mock_id" - mock_info.host = "mock_host" - mock_info.model_name = "mock_model" - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) - - assert select_result["step_id"] == "configure_receiver" - assert ( - select_result["description_placeholders"]["name"] - == "mock_model (mock_host)" - ) - - -async def test_manual_invalid_host(hass: HomeAssistant) -> None: - """Test invalid host entered.""" - - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "manual"}, - ) - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", return_value=None - ): - host_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) - - assert host_result["step_id"] == "manual" - assert host_result["errors"]["base"] == "cannot_connect" - - -async def test_manual_valid_host_unexpected_error(hass: HomeAssistant) -> None: - """Test valid host entered.""" - - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "manual"}, - ) - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - side_effect=Exception(), - ): - host_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) - - assert host_result["step_id"] == "manual" - assert host_result["errors"]["base"] == "unknown" - - -async def test_discovery_and_no_devices_discovered(hass: HomeAssistant) -> None: - """Test initial menu.""" - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - with patch( - "homeassistant.components.onkyo.config_flow.async_discover", return_value=[] - ): - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "eiscp_discovery"}, - ) - - assert form_result["type"] is FlowResultType.ABORT - assert form_result["reason"] == "no_devices_found" - - -async def test_discovery_with_exception(hass: HomeAssistant) -> None: - """Test discovery which throws an unexpected exception.""" - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - with patch( - "homeassistant.components.onkyo.config_flow.async_discover", - side_effect=Exception(), - ): - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "eiscp_discovery"}, - ) - - assert form_result["type"] is FlowResultType.ABORT - assert form_result["reason"] == "unknown" - - -async def test_discovery_with_new_and_existing_found(hass: HomeAssistant) -> None: - """Test discovery with a new and an existing entry.""" - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - infos = [create_receiver_info(1), create_receiver_info(2)] - - with ( - patch( - "homeassistant.components.onkyo.config_flow.async_discover", - return_value=infos, - ), - # Fake it like the first entry was already added - patch.object(OnkyoConfigFlow, "_async_current_ids", return_value=["id1"]), - ): - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "eiscp_discovery"}, - ) - - assert form_result["type"] is FlowResultType.FORM - - assert form_result["data_schema"] is not None - schema = form_result["data_schema"].schema - container = schema["device"].container - assert container == {"id2": "type 2 (host 2)"} - - -async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: - """Test discovery after a selection.""" - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - infos = [create_receiver_info(42), create_receiver_info(0)] - - with ( - patch( - "homeassistant.components.onkyo.config_flow.async_discover", - return_value=infos, - ), - ): - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "eiscp_discovery"}, - ) - - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={"device": "id42"}, - ) - - assert select_result["step_id"] == "configure_receiver" - assert select_result["description_placeholders"]["name"] == "type 42 (host 42)" - - -async def test_configure_empty_source_list(hass: HomeAssistant) -> None: - """Test receiver configuration with no sources set.""" - - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "manual"}, - ) - - mock_info = Mock() - mock_info.identifier = "mock_id" - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) - - configure_result = await hass.config_entries.flow.async_configure( - select_result["flow_id"], - user_input={"volume_resolution": 200, "input_sources": []}, - ) - - assert configure_result["errors"] == { - "input_sources": "empty_input_source_list" - } - - -async def test_configure_no_resolution(hass: HomeAssistant) -> None: - """Test receiver configure with no resolution set.""" - - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "manual"}, - ) - - mock_info = Mock() - mock_info.identifier = "mock_id" - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) - - with pytest.raises(InvalidData): - await hass.config_entries.flow.async_configure( - select_result["flow_id"], - user_input={"input_sources": ["TV"]}, - ) - - -async def test_configure_resolution_set(hass: HomeAssistant) -> None: - """Test receiver configure with specified resolution.""" - - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "manual"}, - ) - - receiver_info = create_receiver_info(1) - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=receiver_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) - - configure_result = await hass.config_entries.flow.async_configure( - select_result["flow_id"], - user_input={"volume_resolution": 200, "input_sources": ["TV"]}, - ) - - assert configure_result["type"] is FlowResultType.CREATE_ENTRY - assert configure_result["options"]["volume_resolution"] == 200 - - -async def test_configure_invalid_resolution_set(hass: HomeAssistant) -> None: - """Test receiver configure with invalid resolution.""" - - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "manual"}, - ) - - mock_info = Mock() - mock_info.identifier = "mock_id" - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) - - with pytest.raises(InvalidData): - await hass.config_entries.flow.async_configure( - select_result["flow_id"], - user_input={"volume_resolution": 42, "input_sources": ["TV"]}, - ) - - -async def test_reconfigure(hass: HomeAssistant) -> None: - """Test the reconfigure config flow.""" - receiver_info = create_receiver_info(1) - config_entry = create_config_entry_from_info(receiver_info) - await setup_integration(hass, config_entry, receiver_info) - - old_host = config_entry.data[CONF_HOST] - old_max_volume = config_entry.options[OPTION_MAX_VOLUME] - - result = await config_entry.start_reconfigure_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "manual" - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=receiver_info, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"host": receiver_info.host} - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "configure_receiver" - - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={"volume_resolution": 200, "input_sources": ["TUNER"]}, - ) - - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "reconfigure_successful" - - assert config_entry.data[CONF_HOST] == old_host - assert config_entry.options[OPTION_VOLUME_RESOLUTION] == 200 - assert config_entry.options[OPTION_MAX_VOLUME] == old_max_volume - - -async def test_reconfigure_new_device(hass: HomeAssistant) -> None: - """Test the reconfigure config flow with new device.""" - receiver_info = create_receiver_info(1) - config_entry = create_config_entry_from_info(receiver_info) - await setup_integration(hass, config_entry, receiver_info) - - old_unique_id = receiver_info.identifier - - result = await config_entry.start_reconfigure_flow(hass) - - receiver_info_2 = create_receiver_info(2) - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=receiver_info_2, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"host": receiver_info_2.host} - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "unique_id_mismatch" - - # unique id should remain unchanged - assert config_entry.unique_id == old_unique_id - - -@pytest.mark.parametrize( - ("user_input", "exception", "error"), - [ - ( - # No host, and thus no host reachable - { - CONF_HOST: None, - "receiver_max_volume": 100, - "max_volume": 100, - "sources": {}, - }, - None, - "cannot_connect", - ), - ( - # No host, and connection exception - { - CONF_HOST: None, - "receiver_max_volume": 100, - "max_volume": 100, - "sources": {}, - }, - Exception(), - "cannot_connect", - ), - ], -) -async def test_import_fail( - hass: HomeAssistant, - user_input: dict[str, Any], - exception: Exception, - error: str, -) -> None: - """Test import flow failed.""" - with ( - patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=None, - side_effect=exception, - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == error - - -async def test_import_success( - hass: HomeAssistant, -) -> None: - """Test import flow succeeded.""" - info = create_receiver_info(1) - - user_input = { - CONF_HOST: info.host, - "receiver_max_volume": 80, - "max_volume": 110, - "sources": { - InputSource("00"): "Auxiliary", - InputSource("01"): "Video", - }, - "info": info, - } - - import_result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input - ) - await hass.async_block_till_done() - - assert import_result["type"] is FlowResultType.CREATE_ENTRY - assert import_result["data"]["host"] == "host 1" - assert import_result["options"]["volume_resolution"] == 80 - assert import_result["options"]["max_volume"] == 100 - assert import_result["options"]["input_sources"] == { - "00": "Auxiliary", - "01": "Video", - } - - -async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Test options flow.""" - - receiver_info = create_receiver_info(1) - config_entry = create_empty_config_entry() - await setup_integration(hass, config_entry, receiver_info) - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "max_volume": 42, - "TV": "television", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - "volume_resolution": 80, - "max_volume": 42.0, - "input_sources": { - "12": "television", - }, - } diff --git a/tests/components/onkyo/test_init.py b/tests/components/onkyo/test_init.py deleted file mode 100644 index 17086a3088e..00000000000 --- a/tests/components/onkyo/test_init.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Test Onkyo component setup process.""" - -from __future__ import annotations - -from unittest.mock import patch - -import pytest - -from homeassistant.components.onkyo import async_setup_entry -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady - -from . import create_empty_config_entry, create_receiver_info, setup_integration - -from tests.common import MockConfigEntry - - -async def test_load_unload_entry( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test load and unload entry.""" - - config_entry = create_empty_config_entry() - receiver_info = create_receiver_info(1) - await setup_integration(hass, config_entry, receiver_info) - - assert config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_update_entry( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test update options.""" - - with patch.object(hass.config_entries, "async_reload", return_value=True): - config_entry = create_empty_config_entry() - receiver_info = create_receiver_info(1) - await setup_integration(hass, config_entry, receiver_info) - - # Force option change - assert hass.config_entries.async_update_entry( - config_entry, options={"option": "new_value"} - ) - await hass.async_block_till_done() - - hass.config_entries.async_reload.assert_called_with(config_entry.entry_id) - - -async def test_no_connection( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test update options.""" - - config_entry = create_empty_config_entry() - config_entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.onkyo.async_interview", - return_value=None, - ), - pytest.raises(ConfigEntryNotReady), - ): - await async_setup_entry(hass, config_entry) diff --git a/tests/components/onvif/__init__.py b/tests/components/onvif/__init__.py index 8a86538b977..0857dfef798 100644 --- a/tests/components/onvif/__init__.py +++ b/tests/components/onvif/__init__.py @@ -151,9 +151,7 @@ def setup_mock_device(mock_device, capabilities=None): pullpoint_manager=MagicMock(state=PullPointManagerState.PAUSED), ) - def mock_constructor( - hass: HomeAssistant, config: config_entries.ConfigEntry - ) -> MagicMock: + def mock_constructor(hass, config): """Fake the controller constructor.""" return mock_device diff --git a/tests/components/onvif/snapshots/test_diagnostics.ambr b/tests/components/onvif/snapshots/test_diagnostics.ambr index c8a9ff75d62..68c92ec755d 100644 --- a/tests/components/onvif/snapshots/test_diagnostics.ambr +++ b/tests/components/onvif/snapshots/test_diagnostics.ambr @@ -11,8 +11,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'onvif', 'entry_id': '1', 'minor_version': 1, diff --git a/tests/components/onvif/test_config_flow.py b/tests/components/onvif/test_config_flow.py index 5c01fb2d200..c0e5a6fe545 100644 --- a/tests/components/onvif/test_config_flow.py +++ b/tests/components/onvif/test_config_flow.py @@ -9,7 +9,7 @@ from homeassistant import config_entries from homeassistant.components import dhcp from homeassistant.components.onvif import DOMAIN, config_flow from homeassistant.config_entries import SOURCE_DHCP -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_USERNAME +from homeassistant.const import CONF_HOST, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr @@ -769,7 +769,11 @@ async def test_form_reauth(hass: HomeAssistant) -> None: """Test reauthenticate.""" entry, _, _ = await setup_onvif_integration(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert ( @@ -803,8 +807,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {config_flow.CONF_PASSWORD: "auth_failed"} assert result2["description_placeholders"] == { - CONF_NAME: "Mock Title", - "error": "not authorized (subcodes:NotAuthorized)", + "error": "not authorized (subcodes:NotAuthorized)" } with ( diff --git a/tests/components/onvif/test_diagnostics.py b/tests/components/onvif/test_diagnostics.py index ce8febe2341..d58c8008ea6 100644 --- a/tests/components/onvif/test_diagnostics.py +++ b/tests/components/onvif/test_diagnostics.py @@ -1,7 +1,6 @@ """Test ONVIF diagnostics.""" from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -20,6 +19,4 @@ async def test_diagnostics( entry, _, _ = await setup_onvif_integration(hass) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( - exclude=props("created_at", "modified_at") - ) + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot diff --git a/tests/components/open_meteo/conftest.py b/tests/components/open_meteo/conftest.py index 22138846915..0d3e1274693 100644 --- a/tests/components/open_meteo/conftest.py +++ b/tests/components/open_meteo/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import MagicMock, patch from open_meteo import Forecast import pytest +from typing_extensions import Generator from homeassistant.components.open_meteo.const import DOMAIN from homeassistant.const import CONF_ZONE diff --git a/tests/components/openai_conversation/conftest.py b/tests/components/openai_conversation/conftest.py index 4639d0dc8e0..6d770b51ce9 100644 --- a/tests/components/openai_conversation/conftest.py +++ b/tests/components/openai_conversation/conftest.py @@ -13,7 +13,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +def mock_config_entry(hass): """Mock a config entry.""" entry = MockConfigEntry( title="OpenAI", @@ -27,9 +27,7 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture -def mock_config_entry_with_assist( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> MockConfigEntry: +def mock_config_entry_with_assist(hass, mock_config_entry): """Mock a config entry with assist.""" hass.config_entries.async_update_entry( mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} @@ -38,9 +36,7 @@ def mock_config_entry_with_assist( @pytest.fixture -async def mock_init_component( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: +async def mock_init_component(hass, mock_config_entry): """Initialize integration.""" with patch( "openai.resources.models.AsyncModels.list", diff --git a/tests/components/openai_conversation/snapshots/test_conversation.ambr b/tests/components/openai_conversation/snapshots/test_conversation.ambr index eaa3a9de64c..e4dd7cd00bb 100644 --- a/tests/components/openai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/openai_conversation/snapshots/test_conversation.ambr @@ -20,7 +20,7 @@ speech=dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Error preparing LLM API', + 'speech': 'Error preparing LLM API: API non-existing not found', }), }), speech_slots=dict({ diff --git a/tests/components/openai_conversation/test_conversation.py b/tests/components/openai_conversation/test_conversation.py index e0665bc449f..1008482847c 100644 --- a/tests/components/openai_conversation/test_conversation.py +++ b/tests/components/openai_conversation/test_conversation.py @@ -27,33 +27,6 @@ from homeassistant.util import ulid from tests.common import MockConfigEntry -async def test_entity( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, -) -> None: - """Test entity properties.""" - state = hass.states.get("conversation.openai") - assert state - assert state.attributes["supported_features"] == 0 - - hass.config_entries.async_update_entry( - mock_config_entry, - options={ - **mock_config_entry.options, - CONF_LLM_HASS_API: "assist", - }, - ) - await hass.config_entries.async_reload(mock_config_entry.entry_id) - - state = hass.states.get("conversation.openai") - assert state - assert ( - state.attributes["supported_features"] - == conversation.ConversationEntityFeature.CONTROL - ) - - async def test_error_handling( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component ) -> None: @@ -294,7 +267,7 @@ async def test_function_call( assert [event["event_type"] for event in trace_events] == [ trace.ConversationTraceEventType.ASYNC_PROCESS, trace.ConversationTraceEventType.AGENT_DETAIL, - trace.ConversationTraceEventType.TOOL_CALL, + trace.ConversationTraceEventType.LLM_TOOL_CALL, ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] @@ -303,7 +276,6 @@ async def test_function_call( "Today's date is 2024-06-03." in trace_events[1]["data"]["messages"][0]["content"] ) - assert [t.name for t in detail_event["data"]["tools"]] == ["test_tool"] # Call it again, make sure we have updated prompt with ( @@ -521,8 +493,6 @@ async def test_unknown_hass_api( }, ) - await hass.async_block_till_done() - result = await conversation.async_converse( hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id ) diff --git a/tests/components/openai_conversation/test_init.py b/tests/components/openai_conversation/test_init.py index d78ce398c92..c9431aa1083 100644 --- a/tests/components/openai_conversation/test_init.py +++ b/tests/components/openai_conversation/test_init.py @@ -60,6 +60,33 @@ from tests.common import MockConfigEntry "style": "natural", }, ), + ( + {"prompt": "Picture of a dog", "size": "256"}, + { + "prompt": "Picture of a dog", + "size": "1024x1024", + "quality": "standard", + "style": "vivid", + }, + ), + ( + {"prompt": "Picture of a dog", "size": "512"}, + { + "prompt": "Picture of a dog", + "size": "1024x1024", + "quality": "standard", + "style": "vivid", + }, + ), + ( + {"prompt": "Picture of a dog", "size": "1024"}, + { + "prompt": "Picture of a dog", + "size": "1024x1024", + "quality": "standard", + "style": "vivid", + }, + ), ], ) async def test_generate_image_service( diff --git a/tests/components/openalpr_cloud/test_image_processing.py b/tests/components/openalpr_cloud/test_image_processing.py index 143513f9852..7115c3e7bf0 100644 --- a/tests/components/openalpr_cloud/test_image_processing.py +++ b/tests/components/openalpr_cloud/test_image_processing.py @@ -6,7 +6,7 @@ import pytest from homeassistant.components import camera, image_processing as ip from homeassistant.components.openalpr_cloud.image_processing import OPENALPR_API_URL -from homeassistant.core import Event, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import assert_setup_component, async_capture_events, load_fixture @@ -15,13 +15,13 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(autouse=True) -async def setup_homeassistant(hass: HomeAssistant) -> None: +async def setup_homeassistant(hass: HomeAssistant): """Set up the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) @pytest.fixture -async def setup_openalpr_cloud(hass: HomeAssistant) -> None: +async def setup_openalpr_cloud(hass): """Set up openalpr cloud.""" config = { ip.DOMAIN: { @@ -43,7 +43,7 @@ async def setup_openalpr_cloud(hass: HomeAssistant) -> None: @pytest.fixture -async def alpr_events(hass: HomeAssistant) -> list[Event]: +async def alpr_events(hass): """Listen for events.""" return async_capture_events(hass, "image_processing.found_plate") diff --git a/tests/components/openexchangerates/conftest.py b/tests/components/openexchangerates/conftest.py index 770432ebac3..6bd7da2c7af 100644 --- a/tests/components/openexchangerates/conftest.py +++ b/tests/components/openexchangerates/conftest.py @@ -1,9 +1,9 @@ """Provide common fixtures for tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.openexchangerates.const import DOMAIN diff --git a/tests/components/openexchangerates/test_config_flow.py b/tests/components/openexchangerates/test_config_flow.py index 0d4744c057a..30ea619d646 100644 --- a/tests/components/openexchangerates/test_config_flow.py +++ b/tests/components/openexchangerates/test_config_flow.py @@ -1,7 +1,6 @@ """Test the Open Exchange Rates config flow.""" import asyncio -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch @@ -10,6 +9,7 @@ from aioopenexchangerates import ( OpenExchangeRatesClientError, ) import pytest +from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.openexchangerates.const import DOMAIN @@ -200,7 +200,16 @@ async def test_reauth( ) -> None: """Test we can reauthenticate the config entry.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + flow_context = { + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + "title_placeholders": {"name": mock_config_entry.title}, + "unique_id": mock_config_entry.unique_id, + } + + result = await hass.config_entries.flow.async_init( + DOMAIN, context=flow_context, data=mock_config_entry.data + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/opengarage/conftest.py b/tests/components/opengarage/conftest.py index 2367692096b..c960e723289 100644 --- a/tests/components/opengarage/conftest.py +++ b/tests/components/opengarage/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.opengarage.const import CONF_DEVICE_KEY, DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT, CONF_VERIFY_SSL diff --git a/tests/components/opensky/conftest.py b/tests/components/opensky/conftest.py index 4664c48ef9e..c48f3bec8d8 100644 --- a/tests/components/opensky/conftest.py +++ b/tests/components/opensky/conftest.py @@ -1,10 +1,10 @@ """Configure tests for the OpenSky integration.""" -from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, patch import pytest from python_opensky import StatesResponse +from typing_extensions import AsyncGenerator, Generator from homeassistant.components.opensky.const import ( CONF_ALTITUDE, diff --git a/tests/components/opentherm_gw/conftest.py b/tests/components/opentherm_gw/conftest.py deleted file mode 100644 index 9c90c74b04b..00000000000 --- a/tests/components/opentherm_gw/conftest.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Test configuration for opentherm_gw.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -from pyotgw.vars import OTGW, OTGW_ABOUT -import pytest - -from homeassistant.components.opentherm_gw import DOMAIN -from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_NAME - -from tests.common import MockConfigEntry - -VERSION_TEST = "4.2.5" -MINIMAL_STATUS = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_TEST}"}} -MOCK_GATEWAY_ID = "mock_gateway" - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.opentherm_gw.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_pyotgw() -> Generator[MagicMock]: - """Mock a pyotgw.OpenThermGateway object.""" - with ( - patch( - "homeassistant.components.opentherm_gw.OpenThermGateway", - return_value=MagicMock( - connect=AsyncMock(return_value=MINIMAL_STATUS), - set_control_setpoint=AsyncMock(), - set_max_relative_mod=AsyncMock(), - disconnect=AsyncMock(), - ), - ) as mock_gateway, - patch( - "homeassistant.components.opentherm_gw.config_flow.pyotgw.OpenThermGateway", - new=mock_gateway, - ), - ): - yield mock_gateway - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock an OpenTherm Gateway config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="Mock Gateway", - data={ - CONF_NAME: "Mock Gateway", - CONF_DEVICE: "/dev/null", - CONF_ID: MOCK_GATEWAY_ID, - }, - options={}, - ) diff --git a/tests/components/opentherm_gw/test_button.py b/tests/components/opentherm_gw/test_button.py deleted file mode 100644 index b02a9d9fef0..00000000000 --- a/tests/components/opentherm_gw/test_button.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Test opentherm_gw buttons.""" - -from unittest.mock import AsyncMock, MagicMock - -from pyotgw.vars import OTGW_MODE_RESET - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN -from homeassistant.components.opentherm_gw.const import OpenThermDeviceIdentifier -from homeassistant.const import ATTR_ENTITY_ID, CONF_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .conftest import MINIMAL_STATUS - -from tests.common import MockConfigEntry - - -async def test_restart_button( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, -) -> None: - """Test restart button.""" - - mock_pyotgw.return_value.set_mode = AsyncMock(return_value=MINIMAL_STATUS) - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert ( - button_entity_id := entity_registry.async_get_entity_id( - BUTTON_DOMAIN, - OPENTHERM_DOMAIN, - f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-restart_button", - ) - ) is not None - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - { - ATTR_ENTITY_ID: button_entity_id, - }, - blocking=True, - ) - - mock_pyotgw.return_value.set_mode.assert_awaited_once_with(OTGW_MODE_RESET) diff --git a/tests/components/opentherm_gw/test_config_flow.py b/tests/components/opentherm_gw/test_config_flow.py index 57bea4e55dc..24b41df8124 100644 --- a/tests/components/opentherm_gw/test_config_flow.py +++ b/tests/components/opentherm_gw/test_config_flow.py @@ -1,12 +1,14 @@ """Test the Opentherm Gateway config flow.""" -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import patch +from pyotgw.vars import OTGW, OTGW_ABOUT from serial import SerialException from homeassistant import config_entries from homeassistant.components.opentherm_gw.const import ( CONF_FLOOR_TEMP, + CONF_PRECISION, CONF_READ_PRECISION, CONF_SET_PRECISION, CONF_TEMPORARY_OVRD_MODE, @@ -24,12 +26,10 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry +MINIMAL_STATUS = {OTGW: {OTGW_ABOUT: "OpenTherm Gateway 4.2.5"}} -async def test_form_user( - hass: HomeAssistant, - mock_pyotgw: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: + +async def test_form_user(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -38,10 +38,27 @@ async def test_form_user( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} - ) - await hass.async_block_till_done() + with ( + patch( + "homeassistant.components.opentherm_gw.async_setup", + return_value=True, + ) as mock_setup, + patch( + "homeassistant.components.opentherm_gw.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + patch( + "pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS + ) as mock_pyotgw_connect, + patch( + "pyotgw.OpenThermGateway.disconnect", return_value=None + ) as mock_pyotgw_disconnect, + patch("pyotgw.status.StatusManager._process_updates", return_value=None), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test Entry 1" @@ -50,22 +67,37 @@ async def test_form_user( CONF_DEVICE: "/dev/ttyUSB0", CONF_ID: "test_entry_1", } - assert mock_pyotgw.return_value.connect.await_count == 1 - assert mock_pyotgw.return_value.disconnect.await_count == 1 + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_pyotgw_connect.mock_calls) == 1 + assert len(mock_pyotgw_disconnect.mock_calls) == 1 -# Deprecated import from configuration.yaml, can be removed in 2025.4.0 -async def test_form_import( - hass: HomeAssistant, - mock_pyotgw: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: +async def test_form_import(hass: HomeAssistant) -> None: """Test import from existing config.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_ID: "legacy_gateway", CONF_DEVICE: "/dev/ttyUSB1"}, - ) + + with ( + patch( + "homeassistant.components.opentherm_gw.async_setup", + return_value=True, + ) as mock_setup, + patch( + "homeassistant.components.opentherm_gw.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + patch( + "pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS + ) as mock_pyotgw_connect, + patch( + "pyotgw.OpenThermGateway.disconnect", return_value=None + ) as mock_pyotgw_disconnect, + patch("pyotgw.status.StatusManager._process_updates", return_value=None), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_ID: "legacy_gateway", CONF_DEVICE: "/dev/ttyUSB1"}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "legacy_gateway" @@ -74,15 +106,13 @@ async def test_form_import( CONF_DEVICE: "/dev/ttyUSB1", CONF_ID: "legacy_gateway", } - assert mock_pyotgw.return_value.connect.await_count == 1 - assert mock_pyotgw.return_value.disconnect.await_count == 1 + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_pyotgw_connect.mock_calls) == 1 + assert len(mock_pyotgw_disconnect.mock_calls) == 1 -async def test_form_duplicate_entries( - hass: HomeAssistant, - mock_pyotgw: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: +async def test_form_duplicate_entries(hass: HomeAssistant) -> None: """Test duplicate device or id errors.""" flow1 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -94,76 +124,139 @@ async def test_form_duplicate_entries( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - result1 = await hass.config_entries.flow.async_configure( - flow1["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} - ) + with ( + patch( + "homeassistant.components.opentherm_gw.async_setup", + return_value=True, + ) as mock_setup, + patch( + "homeassistant.components.opentherm_gw.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + patch( + "pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS + ) as mock_pyotgw_connect, + patch( + "pyotgw.OpenThermGateway.disconnect", return_value=None + ) as mock_pyotgw_disconnect, + patch("pyotgw.status.StatusManager._process_updates", return_value=None), + ): + result1 = await hass.config_entries.flow.async_configure( + flow1["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} + ) + result2 = await hass.config_entries.flow.async_configure( + flow2["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB1"} + ) + result3 = await hass.config_entries.flow.async_configure( + flow3["flow_id"], {CONF_NAME: "Test Entry 2", CONF_DEVICE: "/dev/ttyUSB0"} + ) assert result1["type"] is FlowResultType.CREATE_ENTRY - - result2 = await hass.config_entries.flow.async_configure( - flow2["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB1"} - ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "id_exists"} - - result3 = await hass.config_entries.flow.async_configure( - flow3["flow_id"], {CONF_NAME: "Test Entry 2", CONF_DEVICE: "/dev/ttyUSB0"} - ) assert result3["type"] is FlowResultType.FORM assert result3["errors"] == {"base": "already_configured"} - - assert mock_pyotgw.return_value.connect.await_count == 1 - assert mock_pyotgw.return_value.disconnect.await_count == 1 + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_pyotgw_connect.mock_calls) == 1 + assert len(mock_pyotgw_disconnect.mock_calls) == 1 -async def test_form_connection_timeout( - hass: HomeAssistant, - mock_pyotgw: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: +async def test_form_connection_timeout(hass: HomeAssistant) -> None: """Test we handle connection timeout.""" - flow = await hass.config_entries.flow.async_init( + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_pyotgw.return_value.connect.side_effect = TimeoutError + with ( + patch( + "pyotgw.OpenThermGateway.connect", side_effect=(TimeoutError) + ) as mock_connect, + patch("pyotgw.status.StatusManager._process_updates", return_value=None), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_NAME: "Test Entry 1", CONF_DEVICE: "socket://192.0.2.254:1234"}, + ) - result = await hass.config_entries.flow.async_configure( - flow["flow_id"], - {CONF_NAME: "Test Entry 1", CONF_DEVICE: "socket://192.0.2.254:1234"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "timeout_connect"} - - assert mock_pyotgw.return_value.connect.await_count == 1 + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "timeout_connect"} + assert len(mock_connect.mock_calls) == 1 -async def test_form_connection_error( - hass: HomeAssistant, - mock_pyotgw: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: +async def test_form_connection_error(hass: HomeAssistant) -> None: """Test we handle serial connection error.""" - flow = await hass.config_entries.flow.async_init( + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_pyotgw.return_value.connect.side_effect = SerialException + with ( + patch( + "pyotgw.OpenThermGateway.connect", side_effect=(SerialException) + ) as mock_connect, + patch("pyotgw.status.StatusManager._process_updates", return_value=None), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} + ) - result = await hass.config_entries.flow.async_configure( - flow["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + assert len(mock_connect.mock_calls) == 1 + + +async def test_options_migration(hass: HomeAssistant) -> None: + """Test migration of precision option after update.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Mock Gateway", + data={ + CONF_NAME: "Test Entry 1", + CONF_DEVICE: "/dev/ttyUSB0", + CONF_ID: "test_entry_1", + }, + options={ + CONF_FLOOR_TEMP: True, + CONF_PRECISION: PRECISION_TENTHS, + }, ) + entry.add_to_hass(hass) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - assert mock_pyotgw.return_value.connect.await_count == 1 + with ( + patch( + "homeassistant.components.opentherm_gw.OpenThermGatewayDevice.connect_and_subscribe", + return_value=True, + ), + patch( + "homeassistant.components.opentherm_gw.async_setup", + return_value=True, + ), + patch( + "pyotgw.status.StatusManager._process_updates", + return_value=None, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init( + entry.entry_id, context={"source": config_entries.SOURCE_USER}, data=None + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_READ_PRECISION] == PRECISION_TENTHS + assert result["data"][CONF_SET_PRECISION] == PRECISION_TENTHS + assert result["data"][CONF_FLOOR_TEMP] is True -async def test_options_form( - hass: HomeAssistant, - mock_pyotgw: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: +async def test_options_form(hass: HomeAssistant) -> None: """Test the options form.""" entry = MockConfigEntry( domain=DOMAIN, @@ -177,17 +270,23 @@ async def test_options_form( ) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + with ( + patch("homeassistant.components.opentherm_gw.async_setup", return_value=True), + patch( + "homeassistant.components.opentherm_gw.async_setup_entry", return_value=True + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - flow = await hass.config_entries.options.async_init( + result = await hass.config_entries.options.async_init( entry.entry_id, context={"source": "test"}, data=None ) - assert flow["type"] is FlowResultType.FORM - assert flow["step_id"] == "init" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" result = await hass.config_entries.options.async_configure( - flow["flow_id"], + result["flow_id"], user_input={ CONF_FLOOR_TEMP: True, CONF_READ_PRECISION: PRECISION_HALVES, @@ -202,12 +301,12 @@ async def test_options_form( assert result["data"][CONF_TEMPORARY_OVRD_MODE] is True assert result["data"][CONF_FLOOR_TEMP] is True - flow = await hass.config_entries.options.async_init( + result = await hass.config_entries.options.async_init( entry.entry_id, context={"source": "test"}, data=None ) result = await hass.config_entries.options.async_configure( - flow["flow_id"], user_input={CONF_READ_PRECISION: 0} + result["flow_id"], user_input={CONF_READ_PRECISION: 0} ) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -216,12 +315,12 @@ async def test_options_form( assert result["data"][CONF_TEMPORARY_OVRD_MODE] is True assert result["data"][CONF_FLOOR_TEMP] is True - flow = await hass.config_entries.options.async_init( + result = await hass.config_entries.options.async_init( entry.entry_id, context={"source": "test"}, data=None ) result = await hass.config_entries.options.async_configure( - flow["flow_id"], + result["flow_id"], user_input={ CONF_FLOOR_TEMP: False, CONF_READ_PRECISION: PRECISION_TENTHS, diff --git a/tests/components/opentherm_gw/test_init.py b/tests/components/opentherm_gw/test_init.py index 3e85afbf782..a1ff5b75f47 100644 --- a/tests/components/opentherm_gw/test_init.py +++ b/tests/components/opentherm_gw/test_init.py @@ -1,177 +1,84 @@ """Test Opentherm Gateway init.""" -from unittest.mock import MagicMock +from unittest.mock import patch from pyotgw.vars import OTGW, OTGW_ABOUT +import pytest from homeassistant import setup -from homeassistant.components.opentherm_gw.const import ( - DOMAIN, - OpenThermDeviceIdentifier, -) -from homeassistant.const import CONF_ID +from homeassistant.components.opentherm_gw.const import DOMAIN +from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import ( - device_registry as dr, - entity_registry as er, - issue_registry as ir, -) - -from .conftest import MOCK_GATEWAY_ID, VERSION_TEST +from homeassistant.helpers import device_registry as dr from tests.common import MockConfigEntry +VERSION_OLD = "4.2.5" VERSION_NEW = "4.2.8.1" +MINIMAL_STATUS = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_OLD}"}} MINIMAL_STATUS_UPD = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_NEW}"}} +MOCK_GATEWAY_ID = "mock_gateway" +MOCK_CONFIG_ENTRY = MockConfigEntry( + domain=DOMAIN, + title="Mock Gateway", + data={ + CONF_NAME: "Mock Gateway", + CONF_DEVICE: "/dev/null", + CONF_ID: MOCK_GATEWAY_ID, + }, + options={}, +) +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_device_registry_insert( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, + hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: """Test that the device registry is initialized correctly.""" - mock_config_entry.add_to_hass(hass) + MOCK_CONFIG_ENTRY.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.opentherm_gw.OpenThermGatewayDevice.cleanup", + return_value=None, + ), + patch("pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS), + ): + await setup.async_setup_component(hass, DOMAIN, {}) - await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - gw_dev = device_registry.async_get_device( - identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} - ) - assert gw_dev is not None - assert gw_dev.sw_version == VERSION_TEST + gw_dev = device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)}) + assert gw_dev.sw_version == VERSION_OLD +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_device_registry_update( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, + hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: """Test that the device registry is updated correctly.""" - mock_config_entry.add_to_hass(hass) + MOCK_CONFIG_ENTRY.add_to_hass(hass) device_registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - identifiers={ - (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}") - }, + config_entry_id=MOCK_CONFIG_ENTRY.entry_id, + identifiers={(DOMAIN, MOCK_GATEWAY_ID)}, name="Mock Gateway", manufacturer="Schelte Bron", model="OpenTherm Gateway", - sw_version=VERSION_TEST, + sw_version=VERSION_OLD, ) - mock_pyotgw.return_value.connect.return_value = MINIMAL_STATUS_UPD + with ( + patch( + "homeassistant.components.opentherm_gw.OpenThermGatewayDevice.cleanup", + return_value=None, + ), + patch("pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS_UPD), + ): + await setup.async_setup_component(hass, DOMAIN, {}) - await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - - gw_dev = device_registry.async_get_device( - identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} - ) - assert gw_dev is not None + gw_dev = device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)}) assert gw_dev.sw_version == VERSION_NEW - - -# Device migration test can be removed in 2025.4.0 -async def test_device_migration( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, -) -> None: - """Test that the device registry is updated correctly.""" - mock_config_entry.add_to_hass(hass) - - device_registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - identifiers={ - (DOMAIN, MOCK_GATEWAY_ID), - }, - name="Mock Gateway", - manufacturer="Schelte Bron", - model="OpenTherm Gateway", - sw_version=VERSION_TEST, - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert ( - device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)}) - is None - ) - - gw_dev = device_registry.async_get_device( - identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} - ) - assert gw_dev is not None - - assert ( - device_registry.async_get_device( - identifiers={ - (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.BOILER}") - } - ) - is not None - ) - - assert ( - device_registry.async_get_device( - identifiers={ - (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.THERMOSTAT}") - } - ) - is not None - ) - - -# Entity migration test can be removed in 2025.4.0 -async def test_climate_entity_migration( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, -) -> None: - """Test that the climate entity unique_id gets migrated correctly.""" - mock_config_entry.add_to_hass(hass) - entry = entity_registry.async_get_or_create( - domain="climate", - platform="opentherm_gw", - unique_id=mock_config_entry.data[CONF_ID], - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - updated_entry = entity_registry.async_get(entry.entity_id) - assert updated_entry is not None - assert ( - updated_entry.unique_id - == f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity" - ) - - -# Deprecation test, can be removed in 2025.4.0 -async def test_configuration_yaml_deprecation( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, -) -> None: - """Test that existing configuration in configuration.yaml creates an issue.""" - - await setup.async_setup_component( - hass, DOMAIN, {DOMAIN: {"legacy_gateway": {"device": "/dev/null"}}} - ) - - await hass.async_block_till_done() - assert ( - issue_registry.async_get_issue( - DOMAIN, "deprecated_import_from_configuration_yaml" - ) - is not None - ) diff --git a/tests/components/opentherm_gw/test_select.py b/tests/components/opentherm_gw/test_select.py deleted file mode 100644 index f89224b3874..00000000000 --- a/tests/components/opentherm_gw/test_select.py +++ /dev/null @@ -1,226 +0,0 @@ -"""Test opentherm_gw select entities.""" - -from typing import Any -from unittest.mock import AsyncMock, MagicMock - -from pyotgw.vars import ( - OTGW_GPIO_A, - OTGW_GPIO_B, - OTGW_LED_A, - OTGW_LED_B, - OTGW_LED_C, - OTGW_LED_D, - OTGW_LED_E, - OTGW_LED_F, -) -import pytest - -from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN -from homeassistant.components.opentherm_gw.const import ( - DATA_GATEWAYS, - DATA_OPENTHERM_GW, - OpenThermDeviceIdentifier, -) -from homeassistant.components.opentherm_gw.select import ( - OpenThermSelectGPIOMode, - OpenThermSelectLEDMode, - PyotgwGPIOMode, - PyotgwLEDMode, -) -from homeassistant.components.select import ( - ATTR_OPTION, - DOMAIN as SELECT_DOMAIN, - SERVICE_SELECT_OPTION, -) -from homeassistant.const import ATTR_ENTITY_ID, CONF_ID, STATE_UNKNOWN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import async_dispatcher_send - -from tests.common import MockConfigEntry - - -@pytest.mark.parametrize( - ( - "entity_key", - "target_func_name", - "target_param_1", - "target_param_2", - "resulting_state", - ), - [ - ( - OTGW_GPIO_A, - "set_gpio_mode", - "A", - PyotgwGPIOMode.VCC, - OpenThermSelectGPIOMode.VCC, - ), - ( - OTGW_GPIO_B, - "set_gpio_mode", - "B", - PyotgwGPIOMode.HOME, - OpenThermSelectGPIOMode.HOME, - ), - ( - OTGW_LED_A, - "set_led_mode", - "A", - PyotgwLEDMode.TX_ANY, - OpenThermSelectLEDMode.TX_ANY, - ), - ( - OTGW_LED_B, - "set_led_mode", - "B", - PyotgwLEDMode.RX_ANY, - OpenThermSelectLEDMode.RX_ANY, - ), - ( - OTGW_LED_C, - "set_led_mode", - "C", - PyotgwLEDMode.BOILER_TRAFFIC, - OpenThermSelectLEDMode.BOILER_TRAFFIC, - ), - ( - OTGW_LED_D, - "set_led_mode", - "D", - PyotgwLEDMode.THERMOSTAT_TRAFFIC, - OpenThermSelectLEDMode.THERMOSTAT_TRAFFIC, - ), - ( - OTGW_LED_E, - "set_led_mode", - "E", - PyotgwLEDMode.FLAME_ON, - OpenThermSelectLEDMode.FLAME_ON, - ), - ( - OTGW_LED_F, - "set_led_mode", - "F", - PyotgwLEDMode.BOILER_MAINTENANCE_REQUIRED, - OpenThermSelectLEDMode.BOILER_MAINTENANCE_REQUIRED, - ), - ], -) -async def test_select_change_value( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, - entity_key: str, - target_func_name: str, - target_param_1: str, - target_param_2: str | int, - resulting_state: str, -) -> None: - """Test GPIO mode selector.""" - - setattr( - mock_pyotgw.return_value, - target_func_name, - AsyncMock(return_value=target_param_2), - ) - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert ( - select_entity_id := entity_registry.async_get_entity_id( - SELECT_DOMAIN, - OPENTHERM_DOMAIN, - f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}", - ) - ) is not None - assert hass.states.get(select_entity_id).state == STATE_UNKNOWN - - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: select_entity_id, ATTR_OPTION: resulting_state}, - blocking=True, - ) - assert hass.states.get(select_entity_id).state == resulting_state - - target = getattr(mock_pyotgw.return_value, target_func_name) - target.assert_awaited_once_with(target_param_1, target_param_2) - - -@pytest.mark.parametrize( - ("entity_key", "test_value", "resulting_state"), - [ - (OTGW_GPIO_A, PyotgwGPIOMode.AWAY, OpenThermSelectGPIOMode.AWAY), - (OTGW_GPIO_B, PyotgwGPIOMode.LED_F, OpenThermSelectGPIOMode.LED_F), - ( - OTGW_LED_A, - PyotgwLEDMode.SETPOINT_OVERRIDE_ACTIVE, - OpenThermSelectLEDMode.SETPOINT_OVERRIDE_ACTIVE, - ), - ( - OTGW_LED_B, - PyotgwLEDMode.CENTRAL_HEATING_ON, - OpenThermSelectLEDMode.CENTRAL_HEATING_ON, - ), - (OTGW_LED_C, PyotgwLEDMode.HOT_WATER_ON, OpenThermSelectLEDMode.HOT_WATER_ON), - ( - OTGW_LED_D, - PyotgwLEDMode.COMFORT_MODE_ON, - OpenThermSelectLEDMode.COMFORT_MODE_ON, - ), - ( - OTGW_LED_E, - PyotgwLEDMode.TX_ERROR_DETECTED, - OpenThermSelectLEDMode.TX_ERROR_DETECTED, - ), - ( - OTGW_LED_F, - PyotgwLEDMode.RAISED_POWER_MODE_ACTIVE, - OpenThermSelectLEDMode.RAISED_POWER_MODE_ACTIVE, - ), - ], -) -async def test_select_state_update( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, - entity_key: str, - test_value: Any, - resulting_state: str, -) -> None: - """Test GPIO mode selector.""" - - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert ( - select_entity_id := entity_registry.async_get_entity_id( - SELECT_DOMAIN, - OPENTHERM_DOMAIN, - f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}", - ) - ) is not None - assert hass.states.get(select_entity_id).state == STATE_UNKNOWN - - gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][ - mock_config_entry.data[CONF_ID] - ] - async_dispatcher_send( - hass, - gw_hub.update_signal, - { - OpenThermDeviceIdentifier.BOILER: {}, - OpenThermDeviceIdentifier.GATEWAY: {entity_key: test_value}, - OpenThermDeviceIdentifier.THERMOSTAT: {}, - }, - ) - await hass.async_block_till_done() - - assert hass.states.get(select_entity_id).state == resulting_state diff --git a/tests/components/opentherm_gw/test_switch.py b/tests/components/opentherm_gw/test_switch.py deleted file mode 100644 index 5eb8e906892..00000000000 --- a/tests/components/opentherm_gw/test_switch.py +++ /dev/null @@ -1,111 +0,0 @@ -"""Test opentherm_gw switches.""" - -from unittest.mock import AsyncMock, MagicMock, call - -import pytest - -from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN -from homeassistant.components.opentherm_gw.const import OpenThermDeviceIdentifier -from homeassistant.components.switch import ( - DOMAIN as SWITCH_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_ID, - STATE_OFF, - STATE_ON, - STATE_UNKNOWN, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry - - -@pytest.mark.parametrize( - "entity_key", ["central_heating_1_override", "central_heating_2_override"] -) -async def test_switch_added_disabled( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, - entity_key: str, -) -> None: - """Test switch gets added in disabled state.""" - - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert ( - switch_entity_id := entity_registry.async_get_entity_id( - SWITCH_DOMAIN, - OPENTHERM_DOMAIN, - f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}", - ) - ) is not None - - assert (entity_entry := entity_registry.async_get(switch_entity_id)) is not None - assert entity_entry.disabled_by == er.RegistryEntryDisabler.INTEGRATION - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.parametrize( - ("entity_key", "target_func"), - [ - ("central_heating_1_override", "set_ch_enable_bit"), - ("central_heating_2_override", "set_ch2_enable_bit"), - ], -) -async def test_ch_override_switch( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, - entity_key: str, - target_func: str, -) -> None: - """Test central heating override switch.""" - - setattr(mock_pyotgw.return_value, target_func, AsyncMock(side_effect=[0, 1])) - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert ( - switch_entity_id := entity_registry.async_get_entity_id( - SWITCH_DOMAIN, - OPENTHERM_DOMAIN, - f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}", - ) - ) is not None - assert hass.states.get(switch_entity_id).state == STATE_UNKNOWN - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: switch_entity_id, - }, - blocking=True, - ) - assert hass.states.get(switch_entity_id).state == STATE_OFF - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: switch_entity_id, - }, - blocking=True, - ) - assert hass.states.get(switch_entity_id).state == STATE_ON - - mock_func = getattr(mock_pyotgw.return_value, target_func) - assert mock_func.await_count == 2 - mock_func.assert_has_awaits([call(0), call(1)]) diff --git a/tests/components/openuv/conftest.py b/tests/components/openuv/conftest.py index 9bb1970bc2f..69563c94c64 100644 --- a/tests/components/openuv/conftest.py +++ b/tests/components/openuv/conftest.py @@ -1,11 +1,10 @@ """Define test fixtures for OpenUV.""" -from collections.abc import Generator import json -from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant.components.openuv import CONF_FROM_WINDOW, CONF_TO_WINDOW, DOMAIN from homeassistant.const import ( @@ -14,7 +13,6 @@ from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, ) -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -43,9 +41,7 @@ def client_fixture(data_protection_window, data_uv_index): @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, config: dict[str, Any] -) -> MockConfigEntry: +def config_entry_fixture(hass, config): """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -58,7 +54,7 @@ def config_entry_fixture( @pytest.fixture(name="config") -def config_fixture() -> dict[str, Any]: +def config_fixture(): """Define a config entry data fixture.""" return { CONF_API_KEY: TEST_API_KEY, @@ -93,9 +89,7 @@ async def mock_pyopenuv_fixture(client): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture( - hass: HomeAssistant, config_entry: MockConfigEntry, mock_pyopenuv: None -) -> None: +async def setup_config_entry_fixture(hass, config_entry, mock_pyopenuv): """Define a fixture to set up openuv.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/openuv/test_config_flow.py b/tests/components/openuv/test_config_flow.py index 182f66c887f..3d31cf53250 100644 --- a/tests/components/openuv/test_config_flow.py +++ b/tests/components/openuv/test_config_flow.py @@ -7,7 +7,7 @@ import pytest import voluptuous as vol from homeassistant.components.openuv import CONF_FROM_WINDOW, CONF_TO_WINDOW, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import ( CONF_API_KEY, CONF_ELEVATION, @@ -19,8 +19,6 @@ from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_API_KEY, TEST_ELEVATION, TEST_LATITUDE, TEST_LONGITUDE -from tests.common import MockConfigEntry - pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -107,10 +105,12 @@ async def test_options_flow( async def test_step_reauth( - hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry + hass: HomeAssistant, config, config_entry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH}, data=config + ) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/openuv/test_diagnostics.py b/tests/components/openuv/test_diagnostics.py index 61b68b5ad90..4b5114bccd1 100644 --- a/tests/components/openuv/test_diagnostics.py +++ b/tests/components/openuv/test_diagnostics.py @@ -4,7 +4,6 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -36,9 +35,6 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, - "created_at": ANY, - "modified_at": ANY, - "discovery_keys": {}, }, "data": { "protection_window": { diff --git a/tests/components/openweathermap/test_config_flow.py b/tests/components/openweathermap/test_config_flow.py index aec34360754..be02a6b01a9 100644 --- a/tests/components/openweathermap/test_config_flow.py +++ b/tests/components/openweathermap/test_config_flow.py @@ -7,7 +7,6 @@ from pyopenweathermap import ( CurrentWeather, DailyTemperature, DailyWeatherForecast, - MinutelyWeatherForecast, RequestError, WeatherCondition, WeatherReport, @@ -46,7 +45,7 @@ CONFIG = { VALID_YAML_CONFIG = {CONF_API_KEY: "foo"} -def _create_mocked_owm_factory(is_valid: bool): +def _create_mocked_owm_client(is_valid: bool): current_weather = CurrentWeather( date_time=datetime.fromtimestamp(1714063536, tz=UTC), temperature=6.84, @@ -106,12 +105,7 @@ def _create_mocked_owm_factory(is_valid: bool): rain=0, snow=0, ) - minutely_weather_forecast = MinutelyWeatherForecast( - date_time=1728672360, precipitation=2.54 - ) - weather_report = WeatherReport( - current_weather, [minutely_weather_forecast], [], [daily_weather_forecast] - ) + weather_report = WeatherReport(current_weather, [], [daily_weather_forecast]) mocked_owm_client = MagicMock() mocked_owm_client.validate_key = AsyncMock(return_value=is_valid) @@ -124,18 +118,18 @@ def _create_mocked_owm_factory(is_valid: bool): def mock_owm_client(): """Mock config_flow OWMClient.""" with patch( - "homeassistant.components.openweathermap.create_owm_client", - ) as mock: - yield mock + "homeassistant.components.openweathermap.OWMClient", + ) as owm_client_mock: + yield owm_client_mock @pytest.fixture(name="config_flow_owm_client_mock") def mock_config_flow_owm_client(): """Mock config_flow OWMClient.""" with patch( - "homeassistant.components.openweathermap.utils.create_owm_client", - ) as mock: - yield mock + "homeassistant.components.openweathermap.utils.OWMClient", + ) as config_flow_owm_client_mock: + yield config_flow_owm_client_mock async def test_successful_config_flow( @@ -144,7 +138,7 @@ async def test_successful_config_flow( config_flow_owm_client_mock, ) -> None: """Test that the form is served with valid input.""" - mock = _create_mocked_owm_factory(True) + mock = _create_mocked_owm_client(True) owm_client_mock.return_value = mock config_flow_owm_client_mock.return_value = mock @@ -183,7 +177,7 @@ async def test_abort_config_flow( config_flow_owm_client_mock, ) -> None: """Test that the form is served with same data.""" - mock = _create_mocked_owm_factory(True) + mock = _create_mocked_owm_client(True) owm_client_mock.return_value = mock config_flow_owm_client_mock.return_value = mock @@ -206,7 +200,7 @@ async def test_config_flow_options_change( config_flow_owm_client_mock, ) -> None: """Test that the options form.""" - mock = _create_mocked_owm_factory(True) + mock = _create_mocked_owm_client(True) owm_client_mock.return_value = mock config_flow_owm_client_mock.return_value = mock @@ -267,7 +261,7 @@ async def test_form_invalid_api_key( config_flow_owm_client_mock, ) -> None: """Test that the form is served with no input.""" - config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(False) + config_flow_owm_client_mock.return_value = _create_mocked_owm_client(False) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=CONFIG ) @@ -275,7 +269,7 @@ async def test_form_invalid_api_key( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_api_key"} - config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(True) + config_flow_owm_client_mock.return_value = _create_mocked_owm_client(True) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=CONFIG ) @@ -288,7 +282,7 @@ async def test_form_api_call_error( config_flow_owm_client_mock, ) -> None: """Test setting up with api call error.""" - config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(True) + config_flow_owm_client_mock.return_value = _create_mocked_owm_client(True) config_flow_owm_client_mock.side_effect = RequestError("oops") result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=CONFIG diff --git a/tests/components/opower/test_config_flow.py b/tests/components/opower/test_config_flow.py index 8134539b0a5..a236494f2c9 100644 --- a/tests/components/opower/test_config_flow.py +++ b/tests/components/opower/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Opower config flow.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch from opower import CannotConnect, InvalidAuth import pytest +from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.opower.const import DOMAIN diff --git a/tests/components/oralb/conftest.py b/tests/components/oralb/conftest.py index 3e5f38ffb73..fa4ba463357 100644 --- a/tests/components/oralb/conftest.py +++ b/tests/components/oralb/conftest.py @@ -1,10 +1,9 @@ """OralB session fixtures.""" -from collections.abc import Generator -from typing import Any from unittest import mock import pytest +from typing_extensions import Generator class MockServices: @@ -20,7 +19,7 @@ class MockBleakClient: services = MockServices() - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/components/osoenergy/conftest.py b/tests/components/osoenergy/conftest.py deleted file mode 100644 index bb14fec0241..00000000000 --- a/tests/components/osoenergy/conftest.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Common fixtures for the OSO Energy tests.""" - -from collections.abc import Generator -from typing import Any -from unittest.mock import AsyncMock, MagicMock, patch - -from apyosoenergyapi.waterheater import OSOEnergyWaterHeaterData -import pytest - -from homeassistant.components.osoenergy.const import DOMAIN -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_API_KEY -from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonObjectType - -from tests.common import MockConfigEntry, load_json_object_fixture - -MOCK_CONFIG = { - CONF_API_KEY: "secret_api_key", -} -TEST_USER_EMAIL = "test_user_email@domain.com" - - -@pytest.fixture -def water_heater_fixture() -> JsonObjectType: - """Load the water heater fixture.""" - return load_json_object_fixture("water_heater.json", DOMAIN) - - -@pytest.fixture -def mock_water_heater(water_heater_fixture) -> MagicMock: - """Water heater mock object.""" - mock_heater = MagicMock(OSOEnergyWaterHeaterData) - for key, value in water_heater_fixture.items(): - setattr(mock_heater, key, value) - return mock_heater - - -@pytest.fixture -def mock_entry_data() -> dict[str, Any]: - """Mock config entry data for fixture.""" - return MOCK_CONFIG - - -@pytest.fixture -def mock_config_entry( - hass: HomeAssistant, mock_entry_data: dict[str, Any] -) -> ConfigEntry: - """Mock a config entry setup for incomfort integration.""" - entry = MockConfigEntry(domain=DOMAIN, data=mock_entry_data) - entry.add_to_hass(hass) - return entry - - -@pytest.fixture -async def mock_osoenergy_client(mock_water_heater) -> Generator[AsyncMock]: - """Mock a OSO Energy client.""" - - with ( - patch( - "homeassistant.components.osoenergy.OSOEnergy", MagicMock() - ) as mock_client, - patch( - "homeassistant.components.osoenergy.config_flow.OSOEnergy", new=mock_client - ), - ): - mock_session = MagicMock() - mock_session.device_list = {"water_heater": [mock_water_heater]} - mock_session.start_session = AsyncMock( - return_value={"water_heater": [mock_water_heater]} - ) - mock_session.update_data = AsyncMock(return_value=True) - - mock_client().session = mock_session - - mock_hotwater = MagicMock() - mock_hotwater.get_water_heater = AsyncMock(return_value=mock_water_heater) - mock_hotwater.set_profile = AsyncMock(return_value=True) - mock_hotwater.set_v40_min = AsyncMock(return_value=True) - mock_hotwater.turn_on = AsyncMock(return_value=True) - mock_hotwater.turn_off = AsyncMock(return_value=True) - - mock_client().hotwater = mock_hotwater - - mock_client().get_user_email = AsyncMock(return_value=TEST_USER_EMAIL) - mock_client().start_session = AsyncMock( - return_value={"water_heater": [mock_water_heater]} - ) - - yield mock_client diff --git a/tests/components/osoenergy/fixtures/water_heater.json b/tests/components/osoenergy/fixtures/water_heater.json deleted file mode 100644 index 82bdafb5d8a..00000000000 --- a/tests/components/osoenergy/fixtures/water_heater.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "device_id": "osoenergy_water_heater", - "device_type": "SAGA S200", - "device_name": "TEST DEVICE", - "current_temperature": 60, - "min_temperature": 10, - "max_temperature": 75, - "target_temperature": 60, - "target_temperature_low": 57, - "target_temperature_high": 63, - "available": true, - "online": true, - "current_operation": "on", - "optimization_mode": "oso", - "heater_mode": "auto", - "profile": [ - 10, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, - 60, 60, 60, 60, 60 - ] -} diff --git a/tests/components/osoenergy/snapshots/test_water_heater.ambr b/tests/components/osoenergy/snapshots/test_water_heater.ambr deleted file mode 100644 index 5ebac405144..00000000000 --- a/tests/components/osoenergy/snapshots/test_water_heater.ambr +++ /dev/null @@ -1,57 +0,0 @@ -# serializer version: 1 -# name: test_water_heater[water_heater.test_device-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_temp': 75, - 'min_temp': 10, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'water_heater', - 'entity_category': None, - 'entity_id': 'water_heater.test_device', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'osoenergy', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'osoenergy_water_heater', - 'unit_of_measurement': None, - }) -# --- -# name: test_water_heater[water_heater.test_device-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 60, - 'friendly_name': 'TEST DEVICE', - 'max_temp': 75, - 'min_temp': 10, - 'supported_features': , - 'target_temp_high': 63, - 'target_temp_low': 57, - 'temperature': 60, - }), - 'context': , - 'entity_id': 'water_heater.test_device', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'eco', - }) -# --- diff --git a/tests/components/osoenergy/test_config_flow.py b/tests/components/osoenergy/test_config_flow.py index 0d77781a538..d9db5888cc3 100644 --- a/tests/components/osoenergy/test_config_flow.py +++ b/tests/components/osoenergy/test_config_flow.py @@ -65,11 +65,18 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: "homeassistant.components.osoenergy.config_flow.OSOEnergy.get_user_email", return_value=None, ): - result = await mock_config.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_config.unique_id, + "entry_id": mock_config.entry_id, + }, + data=mock_config.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] is None + assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.osoenergy.config_flow.OSOEnergy.get_user_email", diff --git a/tests/components/osoenergy/test_water_heater.py b/tests/components/osoenergy/test_water_heater.py deleted file mode 100644 index 851e710fa1c..00000000000 --- a/tests/components/osoenergy/test_water_heater.py +++ /dev/null @@ -1,276 +0,0 @@ -"""The water heater tests for the OSO Energy platform.""" - -from unittest.mock import ANY, MagicMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.osoenergy.const import DOMAIN -from homeassistant.components.osoenergy.water_heater import ( - ATTR_UNTIL_TEMP_LIMIT, - ATTR_V40MIN, - SERVICE_GET_PROFILE, - SERVICE_SET_PROFILE, - SERVICE_SET_V40MIN, -) -from homeassistant.components.water_heater import ( - DOMAIN as WATER_HEATER_DOMAIN, - SERVICE_SET_TEMPERATURE, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_TEMPERATURE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import snapshot_platform - - -@patch("homeassistant.components.osoenergy.PLATFORMS", [Platform.WATER_HEATER]) -async def test_water_heater( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_osoenergy_client: MagicMock, - snapshot: SnapshotAssertion, - mock_config_entry: ConfigEntry, -) -> None: - """Test states of the water heater.""" - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.freeze_time("2024-10-10 00:00:00") -async def test_get_profile( - hass: HomeAssistant, - mock_osoenergy_client: MagicMock, - mock_config_entry: ConfigEntry, -) -> None: - """Test getting the heater profile.""" - await hass.config_entries.async_setup(mock_config_entry.entry_id) - profile = await hass.services.async_call( - DOMAIN, - SERVICE_GET_PROFILE, - {ATTR_ENTITY_ID: "water_heater.test_device"}, - blocking=True, - return_response=True, - ) - - # The profile is returned in UTC format from the server - # Each index represents an hour from the current day (0-23). For example index 2 - 02:00 UTC - # Depending on the time zone and the DST the UTC hour is converted to local time and the value is placed in the correct index - # Example: time zone 'US/Pacific' and DST (-7 hours difference) - index 9 (09:00 UTC) will be converted to index 2 (02:00 Local) - assert profile == { - "water_heater.test_device": { - "profile": [ - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 10, - 60, - 60, - 60, - 60, - 60, - 60, - ], - }, - } - - -@pytest.mark.freeze_time("2024-10-10 00:00:00") -async def test_set_profile( - hass: HomeAssistant, - mock_osoenergy_client: MagicMock, - mock_config_entry: ConfigEntry, -) -> None: - """Test getting the heater profile.""" - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.services.async_call( - DOMAIN, - SERVICE_SET_PROFILE, - {ATTR_ENTITY_ID: "water_heater.test_device", "hour_01": 45}, - blocking=True, - ) - - # The server expects to receive the profile in UTC format - # Each field represents an hour from the current day (0-23). For example field hour_01 - 01:00 Local time - # Depending on the time zone and the DST the Local hour is converted to UTC time and the value is placed in the correct index - # Example: time zone 'US/Pacific' and DST (-7 hours difference) - index 1 (01:00 Local) will be converted to index 8 (08:00 Utc) - mock_osoenergy_client().hotwater.set_profile.assert_called_once_with( - ANY, - [ - 10, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 45, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - 60, - ], - ) - - -async def test_set_v40_min( - hass: HomeAssistant, - mock_osoenergy_client: MagicMock, - mock_config_entry: ConfigEntry, -) -> None: - """Test getting the heater profile.""" - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.services.async_call( - DOMAIN, - SERVICE_SET_V40MIN, - {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_V40MIN: 300}, - blocking=True, - ) - - mock_osoenergy_client().hotwater.set_v40_min.assert_called_once_with(ANY, 300) - - -async def test_set_temperature( - hass: HomeAssistant, - mock_osoenergy_client: MagicMock, - mock_config_entry: ConfigEntry, -) -> None: - """Test getting the heater profile.""" - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.services.async_call( - WATER_HEATER_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_TEMPERATURE: 45}, - blocking=True, - ) - - mock_osoenergy_client().hotwater.set_profile.assert_called_once_with( - ANY, - [ - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - 45, - ], - ) - - -async def test_turn_on( - hass: HomeAssistant, - mock_osoenergy_client: MagicMock, - mock_config_entry: ConfigEntry, -) -> None: - """Test turning the heater on.""" - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.services.async_call( - WATER_HEATER_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "water_heater.test_device"}, - blocking=True, - ) - - mock_osoenergy_client().hotwater.turn_on.assert_called_once_with(ANY, True) - - -async def test_turn_off( - hass: HomeAssistant, - mock_osoenergy_client: MagicMock, - mock_config_entry: ConfigEntry, -) -> None: - """Test getting the heater profile.""" - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.services.async_call( - WATER_HEATER_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "water_heater.test_device"}, - blocking=True, - ) - - mock_osoenergy_client().hotwater.turn_off.assert_called_once_with(ANY, True) - - -async def test_oso_turn_on( - hass: HomeAssistant, - mock_osoenergy_client: MagicMock, - mock_config_entry: ConfigEntry, -) -> None: - """Test turning the heater on.""" - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_UNTIL_TEMP_LIMIT: False}, - blocking=True, - ) - - mock_osoenergy_client().hotwater.turn_on.assert_called_once_with(ANY, False) - - -async def test_oso_turn_off( - hass: HomeAssistant, - mock_osoenergy_client: MagicMock, - mock_config_entry: ConfigEntry, -) -> None: - """Test getting the heater profile.""" - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_UNTIL_TEMP_LIMIT: False}, - blocking=True, - ) - - mock_osoenergy_client().hotwater.turn_off.assert_called_once_with(ANY, False) diff --git a/tests/components/otbr/__init__.py b/tests/components/otbr/__init__.py index 7d52318b477..2c9daa127c2 100644 --- a/tests/components/otbr/__init__.py +++ b/tests/components/otbr/__init__.py @@ -31,7 +31,6 @@ DATASET_INSECURE_PASSPHRASE = bytes.fromhex( TEST_BORDER_AGENT_EXTENDED_ADDRESS = bytes.fromhex("AEEB2F594B570BBF") TEST_BORDER_AGENT_ID = bytes.fromhex("230C6A1AC57F6F4BE262ACF32E5EF52C") -TEST_BORDER_AGENT_ID_2 = bytes.fromhex("230C6A1AC57F6F4BE262ACF32E5EF52D") ROUTER_DISCOVERY_HASS = { "type_": "_meshcop._udp.local.", diff --git a/tests/components/otbr/conftest.py b/tests/components/otbr/conftest.py index 5ab3e442183..ba0f43c4a71 100644 --- a/tests/components/otbr/conftest.py +++ b/tests/components/otbr/conftest.py @@ -1,8 +1,6 @@ """Test fixtures for the Open Thread Border Router integration.""" -from collections.abc import Generator -from typing import Any -from unittest.mock import AsyncMock, MagicMock, Mock, patch +from unittest.mock import MagicMock, Mock, patch import pytest @@ -20,94 +18,58 @@ from . import ( from tests.common import MockConfigEntry -@pytest.fixture(name="enable_compute_pskc") -def enable_compute_pskc_fixture() -> Any: - """Allow controlling if compute_pskc should be enabled.""" - return False - - -@pytest.fixture(name="compute_pskc", autouse=True) -def compute_pskc_fixture(enable_compute_pskc: bool) -> Any: - """Patch homeassistant.components.otbr.util.compute_pskc.""" - compute_pskc = otbr.util.compute_pskc if enable_compute_pskc else None - - with patch( - "homeassistant.components.otbr.util.compute_pskc", side_effect=compute_pskc - ) as compute_pskc_mock: - yield compute_pskc_mock - - -@pytest.fixture(name="dataset") -def dataset_fixture() -> Any: - """Return the discovery info from the supervisor.""" - return DATASET_CH16 - - -@pytest.fixture(name="get_active_dataset_tlvs") -def get_active_dataset_tlvs_fixture(dataset: Any) -> Generator[AsyncMock]: - """Mock get_active_dataset_tlvs.""" - with patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset - ) as get_active_dataset_tlvs: - yield get_active_dataset_tlvs - - -@pytest.fixture(name="get_border_agent_id") -def get_border_agent_id_fixture() -> Generator[AsyncMock]: - """Mock get_border_agent_id.""" - with patch( - "python_otbr_api.OTBR.get_border_agent_id", return_value=TEST_BORDER_AGENT_ID - ) as get_border_agent_id: - yield get_border_agent_id - - -@pytest.fixture(name="get_extended_address") -def get_extended_address_fixture() -> Generator[AsyncMock]: - """Mock get_extended_address.""" - with patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ) as get_extended_address: - yield get_extended_address - - @pytest.fixture(name="otbr_config_entry_multipan") -async def otbr_config_entry_multipan_fixture( - hass: HomeAssistant, - get_active_dataset_tlvs: AsyncMock, - get_border_agent_id: AsyncMock, - get_extended_address: AsyncMock, -) -> str: +async def otbr_config_entry_multipan_fixture(hass): """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, domain=otbr.DOMAIN, options={}, title="Open Thread Border Router", - unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - return config_entry.entry_id + with ( + patch( + "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 + ), + patch( + "python_otbr_api.OTBR.get_border_agent_id", + return_value=TEST_BORDER_AGENT_ID, + ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), + patch("homeassistant.components.otbr.util.compute_pskc"), + ): # Patch to speed up tests + assert await hass.config_entries.async_setup(config_entry.entry_id) @pytest.fixture(name="otbr_config_entry_thread") -async def otbr_config_entry_thread_fixture( - hass: HomeAssistant, - get_active_dataset_tlvs: AsyncMock, - get_border_agent_id: AsyncMock, - get_extended_address: AsyncMock, -) -> None: +async def otbr_config_entry_thread_fixture(hass): """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_THREAD, domain=otbr.DOMAIN, options={}, title="Open Thread Border Router", - unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + with ( + patch( + "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 + ), + patch( + "python_otbr_api.OTBR.get_border_agent_id", + return_value=TEST_BORDER_AGENT_ID, + ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), + patch("homeassistant.components.otbr.util.compute_pskc"), + ): # Patch to speed up tests + assert await hass.config_entries.async_setup(config_entry.entry_id) @pytest.fixture(autouse=True) diff --git a/tests/components/otbr/test_config_flow.py b/tests/components/otbr/test_config_flow.py index cd02c14e4eb..224f77931e5 100644 --- a/tests/components/otbr/test_config_flow.py +++ b/tests/components/otbr/test_config_flow.py @@ -3,29 +3,28 @@ import asyncio from http import HTTPStatus from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import patch import aiohttp import pytest import python_otbr_api -from homeassistant.components import otbr +from homeassistant.components import hassio, otbr from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo -from . import DATASET_CH15, DATASET_CH16, TEST_BORDER_AGENT_ID, TEST_BORDER_AGENT_ID_2 +from . import DATASET_CH15, DATASET_CH16 from tests.common import MockConfigEntry, MockModule, mock_integration from tests.test_util.aiohttp import AiohttpClientMocker -HASSIO_DATA = HassioServiceInfo( +HASSIO_DATA = hassio.HassioServiceInfo( config={"host": "core-silabs-multiprotocol", "port": 8081}, name="Silicon Labs Multiprotocol", slug="otbr", uuid="12345", ) -HASSIO_DATA_2 = HassioServiceInfo( +HASSIO_DATA_2 = hassio.HassioServiceInfo( config={"host": "core-silabs-multiprotocol_2", "port": 8082}, name="Silicon Labs Multiprotocol", slug="other_addon", @@ -33,116 +32,34 @@ HASSIO_DATA_2 = HassioServiceInfo( ) -@pytest.fixture(name="otbr_addon_info") -def otbr_addon_info_fixture(addon_info: AsyncMock, addon_installed) -> AsyncMock: - """Mock Supervisor otbr add-on info.""" - addon_info.return_value.available = True - addon_info.return_value.hostname = "" - addon_info.return_value.options = {} - addon_info.return_value.state = "unknown" - addon_info.return_value.update_available = False - addon_info.return_value.version = None - return addon_info +@pytest.fixture(name="addon_info") +def addon_info_fixture(): + """Mock Supervisor add-on info.""" + with patch( + "homeassistant.components.otbr.config_flow.async_get_addon_info", + ) as addon_info: + addon_info.return_value = { + "available": True, + "hostname": None, + "options": {}, + "state": None, + "update_available": False, + "version": None, + } + yield addon_info -@pytest.mark.parametrize( - "url", - [ - "http://custom_url:1234", - "http://custom_url:1234/", - "http://custom_url:1234//", - ], -) -@pytest.mark.usefixtures( - "get_active_dataset_tlvs", - "get_border_agent_id", -) async def test_user_flow( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, url: str -) -> None: - """Test the user flow.""" - await _finish_user_flow(hass, url) - - -@pytest.mark.usefixtures( - "get_active_dataset_tlvs", - "get_extended_address", -) -async def test_user_flow_additional_entry( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: - """Test more than a single entry is allowed.""" - url1 = "http://custom_url:1234" - url2 = "http://custom_url_2:1234" - aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) - aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) - - mock_integration(hass, MockModule("hassio")) - - # Setup a config entry - config_entry = MockConfigEntry( - data={"url": url2}, - domain=otbr.DOMAIN, - options={}, - title="Open Thread Border Router", - unique_id=TEST_BORDER_AGENT_ID_2.hex(), - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - - # Do a user flow - await _finish_user_flow(hass) - - -@pytest.mark.usefixtures( - "get_active_dataset_tlvs", - "get_extended_address", -) -async def test_user_flow_additional_entry_fail_get_address( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test more than a single entry is allowed. - - This tets the behavior when we can't read the extended address from the existing - config entry. - """ - url1 = "http://custom_url:1234" - url2 = "http://custom_url_2:1234" - aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) - - mock_integration(hass, MockModule("hassio")) - - # Setup a config entry - config_entry = MockConfigEntry( - data={"url": url2}, - domain=otbr.DOMAIN, - options={}, - title="Open Thread Border Router", - unique_id=TEST_BORDER_AGENT_ID_2.hex(), - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - - # Do a user flow - aioclient_mock.clear_requests() - aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) - aioclient_mock.get(f"{url2}/node/ba-id", status=HTTPStatus.NOT_FOUND) - await _finish_user_flow(hass) - assert f"Could not read border agent id from {url2}" in caplog.text - - -async def _finish_user_flow( - hass: HomeAssistant, url: str = "http://custom_url:1234" -) -> None: - """Finish a user flow.""" - stripped_url = "http://custom_url:1234" + """Test the user flow.""" + url = "http://custom_url:1234" + aioclient_mock.get(f"{url}/node/dataset/active", text="aa") result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": "user"} ) - expected_data = {"url": stripped_url} + expected_data = {"url": url} assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -163,56 +80,13 @@ async def _finish_user_flow( assert result["options"] == {} assert len(mock_setup_entry.mock_calls) == 1 - config_entry = result["result"] + config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] assert config_entry.data == expected_data assert config_entry.options == {} assert config_entry.title == "Open Thread Border Router" - assert config_entry.unique_id == TEST_BORDER_AGENT_ID.hex() + assert config_entry.unique_id == otbr.DOMAIN -@pytest.mark.usefixtures( - "get_active_dataset_tlvs", - "get_border_agent_id", - "get_extended_address", -) -async def test_user_flow_additional_entry_same_address( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test more than a single entry is allowed.""" - mock_integration(hass, MockModule("hassio")) - - # Setup a config entry - config_entry = MockConfigEntry( - data={"url": "http://custom_url:1234"}, - domain=otbr.DOMAIN, - options={}, - title="Open Thread Border Router", - unique_id=TEST_BORDER_AGENT_ID.hex(), - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - - # Start user flow - url = "http://custom_url:1234" - aioclient_mock.get(f"{url}/node/dataset/active", text="aa") - result = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "user"} - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "url": url, - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "already_configured"} - - -@pytest.mark.usefixtures("get_border_agent_id") async def test_user_flow_router_not_setup( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: @@ -276,11 +150,10 @@ async def test_user_flow_router_not_setup( assert config_entry.data == expected_data assert config_entry.options == {} assert config_entry.title == "Open Thread Border Router" - assert config_entry.unique_id == TEST_BORDER_AGENT_ID.hex() + assert config_entry.unique_id == otbr.DOMAIN -@pytest.mark.usefixtures("get_border_agent_id") -async def test_user_flow_get_dataset_404( +async def test_user_flow_404( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test the user flow.""" @@ -311,30 +184,7 @@ async def test_user_flow_get_dataset_404( aiohttp.ClientError, ], ) -async def test_user_flow_get_ba_id_connect_error( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, error -) -> None: - """Test the user flow.""" - await _test_user_flow_connect_error(hass, "get_border_agent_id", error) - - -@pytest.mark.usefixtures("get_border_agent_id") -@pytest.mark.parametrize( - "error", - [ - TimeoutError, - python_otbr_api.OTBRError, - aiohttp.ClientError, - ], -) -async def test_user_flow_get_dataset_connect_error( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, error -) -> None: - """Test the user flow.""" - await _test_user_flow_connect_error(hass, "get_active_dataset_tlvs", error) - - -async def _test_user_flow_connect_error(hass: HomeAssistant, func, error) -> None: +async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None: """Test the user flow.""" result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": "user"} @@ -343,7 +193,7 @@ async def _test_user_flow_connect_error(hass: HomeAssistant, func, error) -> Non assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with patch(f"python_otbr_api.OTBR.{func}", side_effect=error): + with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", side_effect=error): result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -354,9 +204,8 @@ async def _test_user_flow_connect_error(hass: HomeAssistant, func, error) -> Non assert result["errors"] == {"base": "cannot_connect"} -@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: """Test the hassio discovery flow.""" url = "http://core-silabs-multiprotocol:8081" @@ -387,16 +236,21 @@ async def test_hassio_discovery_flow( assert config_entry.unique_id == HASSIO_DATA.uuid -@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_yellow( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: """Test the hassio discovery flow.""" url = "http://core-silabs-multiprotocol:8081" aioclient_mock.get(f"{url}/node/dataset/active", text="aa") - otbr_addon_info.return_value.available = True - otbr_addon_info.return_value.options = {"device": "/dev/ttyAMA1"} + addon_info.return_value = { + "available": True, + "hostname": None, + "options": {"device": "/dev/ttyAMA1"}, + "state": None, + "update_available": False, + "version": None, + } with ( patch( @@ -439,20 +293,25 @@ async def test_hassio_discovery_flow_yellow( ), ], ) -@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_sky_connect( device: str, title: str, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - otbr_addon_info, + addon_info, ) -> None: """Test the hassio discovery flow.""" url = "http://core-silabs-multiprotocol:8081" aioclient_mock.get(f"{url}/node/dataset/active", text="aa") - otbr_addon_info.return_value.available = True - otbr_addon_info.return_value.options = {"device": device} + addon_info.return_value = { + "available": True, + "hostname": None, + "options": {"device": device}, + "state": None, + "update_available": False, + "version": None, + } with patch( "homeassistant.components.otbr.async_setup_entry", @@ -479,133 +338,59 @@ async def test_hassio_discovery_flow_sky_connect( assert config_entry.unique_id == HASSIO_DATA.uuid -@pytest.mark.usefixtures("get_active_dataset_tlvs", "get_extended_address") async def test_hassio_discovery_flow_2x_addons( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: """Test the hassio discovery flow when the user has 2 addons with otbr support.""" url1 = "http://core-silabs-multiprotocol:8081" url2 = "http://core-silabs-multiprotocol_2:8081" aioclient_mock.get(f"{url1}/node/dataset/active", text="aa") aioclient_mock.get(f"{url2}/node/dataset/active", text="bb") - aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) - aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) - async def _addon_info(slug: str) -> Mock: + async def _addon_info(hass, slug): await asyncio.sleep(0) if slug == "otbr": - device = ( - "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" - "9e2adbd75b8beb119fe564a0f320645d-if00-port0" - ) - else: - device = ( - "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" - "9e2adbd75b8beb119fe564a0f320645d-if00-port1" - ) - return Mock( - available=True, - hostname=otbr_addon_info.return_value.hostname, - options={"device": device}, - state=otbr_addon_info.return_value.state, - update_available=otbr_addon_info.return_value.update_available, - version=otbr_addon_info.return_value.version, + return { + "available": True, + "hostname": None, + "options": { + "device": ( + "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" + "9e2adbd75b8beb119fe564a0f320645d-if00-port0" + ) + }, + "state": None, + "update_available": False, + "version": None, + } + return { + "available": True, + "hostname": None, + "options": { + "device": ( + "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" + "9e2adbd75b8beb119fe564a0f320645d-if00-port1" + ) + }, + "state": None, + "update_available": False, + "version": None, + } + + addon_info.side_effect = _addon_info + + with patch( + "homeassistant.components.otbr.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result1 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA + ) + result2 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 ) - otbr_addon_info.side_effect = _addon_info - - result1 = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA - ) - result2 = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 - ) - - results = [result1, result2] - - expected_data = { - "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", - } - expected_data_2 = { - "url": f"http://{HASSIO_DATA_2.config['host']}:{HASSIO_DATA_2.config['port']}", - } - - assert results[0]["type"] is FlowResultType.CREATE_ENTRY - assert ( - results[0]["title"] == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" - ) - assert results[0]["data"] == expected_data - assert results[0]["options"] == {} - - assert results[1]["type"] is FlowResultType.CREATE_ENTRY - assert ( - results[1]["title"] == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" - ) - assert results[1]["data"] == expected_data_2 - assert results[1]["options"] == {} - - assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 2 - - config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] - assert config_entry.data == expected_data - assert config_entry.options == {} - assert ( - config_entry.title == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" - ) - assert config_entry.unique_id == HASSIO_DATA.uuid - - config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[1] - assert config_entry.data == expected_data_2 - assert config_entry.options == {} - assert ( - config_entry.title == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" - ) - assert config_entry.unique_id == HASSIO_DATA_2.uuid - - -@pytest.mark.usefixtures("get_active_dataset_tlvs", "get_extended_address") -async def test_hassio_discovery_flow_2x_addons_same_ext_address( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info -) -> None: - """Test the hassio discovery flow when the user has 2 addons with otbr support.""" - url1 = "http://core-silabs-multiprotocol:8081" - url2 = "http://core-silabs-multiprotocol_2:8081" - aioclient_mock.get(f"{url1}/node/dataset/active", text="aa") - aioclient_mock.get(f"{url2}/node/dataset/active", text="bb") - aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) - aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) - - async def _addon_info(slug: str) -> Mock: - await asyncio.sleep(0) - if slug == "otbr": - device = ( - "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" - "9e2adbd75b8beb119fe564a0f320645d-if00-port0" - ) - else: - device = ( - "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" - "9e2adbd75b8beb119fe564a0f320645d-if00-port1" - ) - return Mock( - available=True, - hostname=otbr_addon_info.return_value.hostname, - options={"device": device}, - state=otbr_addon_info.return_value.state, - update_available=otbr_addon_info.return_value.update_available, - version=otbr_addon_info.return_value.version, - ) - - otbr_addon_info.side_effect = _addon_info - - result1 = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA - ) - result2 = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 - ) - - results = [result1, result2] + results = [result1, result2] expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -618,8 +403,9 @@ async def test_hassio_discovery_flow_2x_addons_same_ext_address( assert results[0]["data"] == expected_data assert results[0]["options"] == {} assert results[1]["type"] is FlowResultType.ABORT - assert results[1]["reason"] == "already_configured" + assert results[1]["reason"] == "single_instance_allowed" assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 1 + assert len(mock_setup_entry.mock_calls) == 1 config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] assert config_entry.data == expected_data @@ -630,9 +416,8 @@ async def test_hassio_discovery_flow_2x_addons_same_ext_address( assert config_entry.unique_id == HASSIO_DATA.uuid -@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: """Test the hassio discovery flow when the border router has no dataset. @@ -688,9 +473,8 @@ async def test_hassio_discovery_flow_router_not_setup( assert config_entry.unique_id == HASSIO_DATA.uuid -@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup_has_preferred( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_addon_info + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: """Test the hassio discovery flow when the border router has no dataset. @@ -741,12 +525,11 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred( assert config_entry.unique_id == HASSIO_DATA.uuid -@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup_has_preferred_2( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, multiprotocol_addon_manager_mock, - otbr_addon_info, + addon_info, ) -> None: """Test the hassio discovery flow when the border router has no dataset. @@ -805,7 +588,6 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred_2( assert config_entry.unique_id == HASSIO_DATA.uuid -@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_404( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: @@ -820,7 +602,6 @@ async def test_hassio_discovery_flow_404( assert result["reason"] == "unknown" -@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_new_port_missing_unique_id( hass: HomeAssistant, ) -> None: @@ -844,7 +625,7 @@ async def test_hassio_discovery_flow_new_port_missing_unique_id( ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "single_instance_allowed" expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -853,7 +634,6 @@ async def test_hassio_discovery_flow_new_port_missing_unique_id( assert config_entry.data == expected_data -@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: """Test the port can be updated.""" mock_integration(hass, MockModule("hassio")) @@ -876,7 +656,7 @@ async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "single_instance_allowed" expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -885,12 +665,6 @@ async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: assert config_entry.data == expected_data -@pytest.mark.usefixtures( - "otbr_addon_info", - "get_active_dataset_tlvs", - "get_border_agent_id", - "get_extended_address", -) async def test_hassio_discovery_flow_new_port_other_addon(hass: HomeAssistant) -> None: """Test the port is not updated if we get data for another addon hosting OTBR.""" mock_integration(hass, MockModule("hassio")) @@ -909,34 +683,22 @@ async def test_hassio_discovery_flow_new_port_other_addon(hass: HomeAssistant) - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA ) - # Another entry will be created - assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" - # Make sure the data of the existing entry was not updated + # Make sure the data was not updated expected_data = { "url": f"http://openthread_border_router:{HASSIO_DATA.config['port']+1}", } - config_entry = hass.config_entries.async_get_entry(config_entry.entry_id) + config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] assert config_entry.data == expected_data -@pytest.mark.parametrize( - ("source", "data", "expected_result"), - [ - ("hassio", HASSIO_DATA, FlowResultType.CREATE_ENTRY), - ("user", None, FlowResultType.FORM), - ], -) -@pytest.mark.usefixtures( - "otbr_addon_info", - "get_active_dataset_tlvs", - "get_border_agent_id", - "get_extended_address", -) -async def test_config_flow_additional_entry( - hass: HomeAssistant, source: str, data: Any, expected_result: FlowResultType +@pytest.mark.parametrize(("source", "data"), [("hassio", HASSIO_DATA), ("user", None)]) +async def test_config_flow_single_entry( + hass: HomeAssistant, source: str, data: Any ) -> None: - """Test more than a single entry is allowed.""" + """Test only a single entry is allowed.""" mock_integration(hass, MockModule("hassio")) # Setup the config entry @@ -949,11 +711,13 @@ async def test_config_flow_additional_entry( config_entry.add_to_hass(hass) with patch( - "homeassistant.components.otbr.async_setup_entry", + "homeassistant.components.homeassistant_yellow.async_setup_entry", return_value=True, - ): + ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": source}, data=data ) - assert result["type"] is expected_result + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" + mock_setup_entry.assert_not_called() diff --git a/tests/components/otbr/test_init.py b/tests/components/otbr/test_init.py index faf13786107..0c56e9ac8da 100644 --- a/tests/components/otbr/test_init.py +++ b/tests/components/otbr/test_init.py @@ -1,6 +1,7 @@ """Test the Open Thread Border Router integration.""" import asyncio +from http import HTTPStatus from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, patch @@ -11,14 +12,15 @@ from zeroconf.asyncio import AsyncServiceInfo from homeassistant.components import otbr, thread from homeassistant.components.thread import discovery -from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_USER from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from . import ( BASE_URL, CONFIG_ENTRY_DATA_MULTIPAN, + CONFIG_ENTRY_DATA_THREAD, DATASET_CH15, DATASET_CH16, DATASET_INSECURE_NW_KEY, @@ -38,16 +40,6 @@ DATASET_NO_CHANNEL = bytes.fromhex( ) -@pytest.fixture(name="enable_mocks", autouse=True) -def enable_mocks_fixture( - get_active_dataset_tlvs: AsyncMock, - get_border_agent_id: AsyncMock, - get_extended_address: AsyncMock, -) -> None: - """Enable API mocks.""" - - -@pytest.mark.usefixtures("supervisor_client") async def test_import_dataset( hass: HomeAssistant, mock_async_zeroconf: MagicMock, @@ -72,11 +64,21 @@ async def test_import_dataset( domain=otbr.DOMAIN, options={}, title="My OTBR", - unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( + patch( + "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 + ), + patch( + "python_otbr_api.OTBR.get_border_agent_id", + return_value=TEST_BORDER_AGENT_ID, + ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "homeassistant.components.thread.dataset_store.BORDER_AGENT_DISCOVERY_TIMEOUT", 0.1, @@ -140,10 +142,20 @@ async def test_import_share_radio_channel_collision( domain=otbr.DOMAIN, options={}, title="My OTBR", - unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( + patch( + "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 + ), + patch( + "python_otbr_api.OTBR.get_border_agent_id", + return_value=TEST_BORDER_AGENT_ID, + ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -180,10 +192,18 @@ async def test_import_share_radio_no_channel_collision( domain=otbr.DOMAIN, options={}, title="My OTBR", - unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( + patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset), + patch( + "python_otbr_api.OTBR.get_border_agent_id", + return_value=TEST_BORDER_AGENT_ID, + ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -202,8 +222,6 @@ async def test_import_share_radio_no_channel_collision( ) -@pytest.mark.usefixtures("supervisor_client") -@pytest.mark.parametrize("enable_compute_pskc", [True]) @pytest.mark.parametrize( "dataset", [DATASET_INSECURE_NW_KEY, DATASET_INSECURE_PASSPHRASE] ) @@ -219,10 +237,18 @@ async def test_import_insecure_dataset( domain=otbr.DOMAIN, options={}, title="My OTBR", - unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( + patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset), + patch( + "python_otbr_api.OTBR.get_border_agent_id", + return_value=TEST_BORDER_AGENT_ID, + ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -248,9 +274,7 @@ async def test_import_insecure_dataset( aiohttp.ClientError, ], ) -async def test_config_entry_not_ready( - hass: HomeAssistant, get_active_dataset_tlvs: AsyncMock, error -) -> None: +async def test_config_entry_not_ready(hass: HomeAssistant, error) -> None: """Test raising ConfigEntryNotReady .""" config_entry = MockConfigEntry( @@ -258,16 +282,13 @@ async def test_config_entry_not_ready( domain=otbr.DOMAIN, options={}, title="My OTBR", - unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) - get_active_dataset_tlvs.side_effect = error - assert not await hass.config_entries.async_setup(config_entry.entry_id) + with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", side_effect=error): + assert not await hass.config_entries.async_setup(config_entry.entry_id) -async def test_border_agent_id_not_supported( - hass: HomeAssistant, get_border_agent_id: AsyncMock -) -> None: +async def test_border_agent_id_not_supported(hass: HomeAssistant) -> None: """Test border router does not support border agent ID.""" config_entry = MockConfigEntry( @@ -275,11 +296,18 @@ async def test_border_agent_id_not_supported( domain=otbr.DOMAIN, options={}, title="My OTBR", - unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) - get_border_agent_id.side_effect = python_otbr_api.GetBorderAgentIdNotSupportedError - assert not await hass.config_entries.async_setup(config_entry.entry_id) + with ( + patch( + "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 + ), + patch( + "python_otbr_api.OTBR.get_border_agent_id", + side_effect=python_otbr_api.GetBorderAgentIdNotSupportedError, + ), + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) async def test_config_entry_update(hass: HomeAssistant) -> None: @@ -289,7 +317,6 @@ async def test_config_entry_update(hass: HomeAssistant) -> None: domain=otbr.DOMAIN, options={}, title="My OTBR", - unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) mock_api = MagicMock() @@ -312,7 +339,6 @@ async def test_config_entry_update(hass: HomeAssistant) -> None: mock_otrb_api.assert_called_once_with(new_config_entry_data["url"], ANY, ANY) -@pytest.mark.usefixtures("supervisor_client") async def test_remove_entry( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan ) -> None: @@ -320,37 +346,104 @@ async def test_remove_entry( aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text="0E") + assert await otbr.async_get_active_dataset_tlvs(hass) == bytes.fromhex("0E") + config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] await hass.config_entries.async_remove(config_entry.entry_id) + with pytest.raises(HomeAssistantError): + assert await otbr.async_get_active_dataset_tlvs(hass) -@pytest.mark.parametrize( - ("source", "unique_id", "updated_unique_id"), - [ - (SOURCE_HASSIO, None, None), - (SOURCE_HASSIO, "abcd", "abcd"), - (SOURCE_USER, None, TEST_BORDER_AGENT_ID.hex()), - (SOURCE_USER, "abcd", TEST_BORDER_AGENT_ID.hex()), - ], -) -async def test_update_unique_id( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - source: str, - unique_id: str | None, - updated_unique_id: str | None, + +async def test_get_active_dataset_tlvs( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan ) -> None: - """Test we update the unique id if extended address has changed.""" + """Test async_get_active_dataset_tlvs.""" - config_entry = MockConfigEntry( + mock_response = ( + "0E080000000000010000000300001035060004001FFFE00208F642646DA209B1C00708FDF57B5A" + "0FE2AAF60510DE98B5BA1A528FEE049D4B4B01835375030D4F70656E5468726561642048410102" + "25A40410F5DD18371BFD29E1A601EF6FFAD94C030C0402A0F7F8" + ) + + aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text=mock_response) + + assert await otbr.async_get_active_dataset_tlvs(hass) == bytes.fromhex( + mock_response + ) + + +async def test_get_active_dataset_tlvs_empty( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan +) -> None: + """Test async_get_active_dataset_tlvs.""" + + aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.NO_CONTENT) + assert await otbr.async_get_active_dataset_tlvs(hass) is None + + +async def test_get_active_dataset_tlvs_addon_not_installed(hass: HomeAssistant) -> None: + """Test async_get_active_dataset_tlvs when the multi-PAN addon is not installed.""" + + with pytest.raises(HomeAssistantError): + await otbr.async_get_active_dataset_tlvs(hass) + + +async def test_get_active_dataset_tlvs_404( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan +) -> None: + """Test async_get_active_dataset_tlvs with error.""" + + aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.NOT_FOUND) + with pytest.raises(HomeAssistantError): + await otbr.async_get_active_dataset_tlvs(hass) + + +async def test_get_active_dataset_tlvs_201( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan +) -> None: + """Test async_get_active_dataset_tlvs with error.""" + + aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CREATED) + with pytest.raises(HomeAssistantError): + assert await otbr.async_get_active_dataset_tlvs(hass) + + +async def test_get_active_dataset_tlvs_invalid( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan +) -> None: + """Test async_get_active_dataset_tlvs with error.""" + + aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text="unexpected") + with pytest.raises(HomeAssistantError): + assert await otbr.async_get_active_dataset_tlvs(hass) + + +async def test_remove_extra_entries( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test we remove additional config entries.""" + + config_entry1 = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, domain=otbr.DOMAIN, options={}, - source=source, title="Open Thread Border Router", - unique_id=unique_id, ) - config_entry.add_to_hass(hass) - assert await async_setup_component(hass, otbr.DOMAIN, {}) - config_entry = hass.config_entries.async_get_entry(config_entry.entry_id) - assert config_entry.unique_id == updated_unique_id + config_entry2 = MockConfigEntry( + data=CONFIG_ENTRY_DATA_THREAD, + domain=otbr.DOMAIN, + options={}, + title="Open Thread Border Router", + ) + config_entry1.add_to_hass(hass) + config_entry2.add_to_hass(hass) + assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 2 + with ( + patch( + "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 + ), + patch("homeassistant.components.otbr.util.compute_pskc"), + ): # Patch to speed up tests + assert await async_setup_component(hass, otbr.DOMAIN, {}) + assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 1 diff --git a/tests/components/otbr/test_silabs_multiprotocol.py b/tests/components/otbr/test_silabs_multiprotocol.py index c4123c25660..8d7bed13df6 100644 --- a/tests/components/otbr/test_silabs_multiprotocol.py +++ b/tests/components/otbr/test_silabs_multiprotocol.py @@ -1,10 +1,11 @@ """Test OTBR Silicon Labs Multiprotocol support.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest from python_otbr_api import ActiveDataSet, tlv_parser +from homeassistant.components import otbr from homeassistant.components.otbr import ( silabs_multiprotocol as otbr_silabs_multiprotocol, ) @@ -31,15 +32,10 @@ DATASET_CH16_PENDING = ( ) -@pytest.fixture(autouse=True) -def mock_supervisor_client(supervisor_client: AsyncMock) -> None: - """Mock supervisor client.""" - - async def test_async_change_channel( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: - """Test async_change_channel.""" + """Test test_async_change_channel.""" store = await dataset_store.async_get_store(hass) assert len(store.datasets) == 1 @@ -67,7 +63,7 @@ async def test_async_change_channel( async def test_async_change_channel_no_pending( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: - """Test async_change_channel when the pending dataset already expired.""" + """Test test_async_change_channel when the pending dataset already expired.""" store = await dataset_store.async_get_store(hass) assert len(store.datasets) == 1 @@ -99,7 +95,7 @@ async def test_async_change_channel_no_pending( async def test_async_change_channel_no_update( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: - """Test async_change_channel when we didn't get a dataset from the OTBR.""" + """Test test_async_change_channel when we didn't get a dataset from the OTBR.""" store = await dataset_store.async_get_store(hass) assert len(store.datasets) == 1 @@ -130,17 +126,6 @@ async def test_async_change_channel_no_otbr(hass: HomeAssistant) -> None: mock_set_channel.assert_not_awaited() -async def test_async_change_channel_non_matching_url( - hass: HomeAssistant, otbr_config_entry_multipan: str -) -> None: - """Test async_change_channel when otbr is not configured.""" - config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) - config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL - with patch("python_otbr_api.OTBR.set_channel") as mock_set_channel: - await otbr_silabs_multiprotocol.async_change_channel(hass, 16, delay=0) - mock_set_channel.assert_not_awaited() - - async def test_async_get_channel( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: @@ -184,18 +169,7 @@ async def test_async_get_channel_no_otbr(hass: HomeAssistant) -> None: """Test test_async_get_channel when otbr is not configured.""" with patch("python_otbr_api.OTBR.get_active_dataset") as mock_get_active_dataset: - assert await otbr_silabs_multiprotocol.async_get_channel(hass) is None - mock_get_active_dataset.assert_not_awaited() - - -async def test_async_get_channel_non_matching_url( - hass: HomeAssistant, otbr_config_entry_multipan: str -) -> None: - """Test async_change_channel when otbr is not configured.""" - config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) - config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL - with patch("python_otbr_api.OTBR.get_active_dataset") as mock_get_active_dataset: - assert await otbr_silabs_multiprotocol.async_get_channel(hass) is None + await otbr_silabs_multiprotocol.async_get_channel(hass) mock_get_active_dataset.assert_not_awaited() @@ -204,11 +178,11 @@ async def test_async_get_channel_non_matching_url( [(OTBR_MULTIPAN_URL, True), (OTBR_NON_MULTIPAN_URL, False)], ) async def test_async_using_multipan( - hass: HomeAssistant, otbr_config_entry_multipan: str, url: str, expected: bool + hass: HomeAssistant, otbr_config_entry_multipan, url: str, expected: bool ) -> None: """Test async_change_channel when otbr is not configured.""" - config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) - config_entry.runtime_data.url = url + data: otbr.OTBRData = hass.data[otbr.DOMAIN] + data.url = url assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is expected @@ -217,12 +191,3 @@ async def test_async_using_multipan_no_otbr(hass: HomeAssistant) -> None: """Test async_change_channel when otbr is not configured.""" assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is False - - -async def test_async_using_multipan_non_matching_url( - hass: HomeAssistant, otbr_config_entry_multipan: str -) -> None: - """Test async_change_channel when otbr is not configured.""" - config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) - config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL - assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is False diff --git a/tests/components/otbr/test_util.py b/tests/components/otbr/test_util.py index c11d8fe5736..3b1edcfeb5b 100644 --- a/tests/components/otbr/test_util.py +++ b/tests/components/otbr/test_util.py @@ -1,6 +1,6 @@ """Test OTBR Utility functions.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest import python_otbr_api @@ -13,11 +13,6 @@ OTBR_MULTIPAN_URL = "http://core-silabs-multiprotocol:8081" OTBR_NON_MULTIPAN_URL = "/dev/ttyAMA1" -@pytest.fixture(autouse=True) -def mock_supervisor_client(supervisor_client: AsyncMock) -> None: - """Mock supervisor client.""" - - async def test_get_allowed_channel( hass: HomeAssistant, multiprotocol_addon_manager_mock ) -> None: @@ -36,37 +31,28 @@ async def test_get_allowed_channel( assert await otbr.util.get_allowed_channel(hass, OTBR_NON_MULTIPAN_URL) is None -async def test_factory_reset( - hass: HomeAssistant, - otbr_config_entry_multipan: str, - get_border_agent_id: AsyncMock, -) -> None: +async def test_factory_reset(hass: HomeAssistant, otbr_config_entry_multipan) -> None: """Test factory_reset.""" - new_ba_id = b"new_ba_id" - get_border_agent_id.return_value = new_ba_id - config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) - assert config_entry.unique_id != new_ba_id.hex() + data: otbr.OTBRData = hass.data[otbr.DOMAIN] + with ( patch("python_otbr_api.OTBR.factory_reset") as factory_reset_mock, patch( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, ): - await config_entry.runtime_data.factory_reset(hass) + await data.factory_reset() delete_active_dataset_mock.assert_not_called() factory_reset_mock.assert_called_once_with() - # Check the unique_id is updated - config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) - assert config_entry.unique_id == new_ba_id.hex() - async def test_factory_reset_not_supported( - hass: HomeAssistant, otbr_config_entry_multipan: str + hass: HomeAssistant, otbr_config_entry_multipan ) -> None: """Test factory_reset.""" - config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + data: otbr.OTBRData = hass.data[otbr.DOMAIN] + with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -76,17 +62,18 @@ async def test_factory_reset_not_supported( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, ): - await config_entry.runtime_data.factory_reset(hass) + await data.factory_reset() delete_active_dataset_mock.assert_called_once_with() factory_reset_mock.assert_called_once_with() async def test_factory_reset_error_1( - hass: HomeAssistant, otbr_config_entry_multipan: str + hass: HomeAssistant, otbr_config_entry_multipan ) -> None: """Test factory_reset.""" - config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + data: otbr.OTBRData = hass.data[otbr.DOMAIN] + with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -99,17 +86,18 @@ async def test_factory_reset_error_1( HomeAssistantError, ), ): - await config_entry.runtime_data.factory_reset(hass) + await data.factory_reset() delete_active_dataset_mock.assert_not_called() factory_reset_mock.assert_called_once_with() async def test_factory_reset_error_2( - hass: HomeAssistant, otbr_config_entry_multipan: str + hass: HomeAssistant, otbr_config_entry_multipan ) -> None: """Test factory_reset.""" - config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + data: otbr.OTBRData = hass.data[otbr.DOMAIN] + with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -123,7 +111,7 @@ async def test_factory_reset_error_2( HomeAssistantError, ), ): - await config_entry.runtime_data.factory_reset(hass) + await data.factory_reset() delete_active_dataset_mock.assert_called_once_with() factory_reset_mock.assert_called_once_with() diff --git a/tests/components/otbr/test_websocket_api.py b/tests/components/otbr/test_websocket_api.py index 7311b194df4..df55d38d3b7 100644 --- a/tests/components/otbr/test_websocket_api.py +++ b/tests/components/otbr/test_websocket_api.py @@ -1,6 +1,6 @@ """Test OTBR Websocket API.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest import python_otbr_api @@ -29,11 +29,6 @@ async def websocket_client( return await hass_ws_client(hass) -@pytest.fixture(autouse=True) -def mock_supervisor_client(supervisor_client: AsyncMock) -> None: - """Mock supervisor client.""" - - async def test_get_info( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -41,14 +36,11 @@ async def test_get_info( websocket_client, ) -> None: """Test async_get_info.""" - extended_pan_id = "ABCD1234" with ( patch( "python_otbr_api.OTBR.get_active_dataset", - return_value=python_otbr_api.ActiveDataSet( - channel=16, extended_pan_id=extended_pan_id - ), + return_value=python_otbr_api.ActiveDataSet(channel=16), ), patch( "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 @@ -66,16 +58,12 @@ async def test_get_info( msg = await websocket_client.receive_json() assert msg["success"] - extended_address = TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex() assert msg["result"] == { - extended_address: { - "url": BASE_URL, - "active_dataset_tlvs": DATASET_CH16.hex().lower(), - "channel": 16, - "border_agent_id": TEST_BORDER_AGENT_ID.hex(), - "extended_address": extended_address, - "extended_pan_id": extended_pan_id.lower(), - } + "url": BASE_URL, + "active_dataset_tlvs": DATASET_CH16.hex().lower(), + "channel": 16, + "border_agent_id": TEST_BORDER_AGENT_ID.hex(), + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), } @@ -133,10 +121,6 @@ async def test_create_network( patch( "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 ) as get_active_dataset_tlvs_mock, - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -145,12 +129,7 @@ async def test_create_network( return_value=0x1234, ), ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/create_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - } - ) + await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) msg = await websocket_client.receive_json() assert msg["success"] @@ -177,9 +156,7 @@ async def test_create_network_no_entry( """Test create network.""" await async_setup_component(hass, "otbr", {}) websocket_client = await hass_ws_client(hass) - await websocket_client.send_json_auto_id( - {"type": "otbr/create_network", "extended_address": "blah"} - ) + await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) msg = await websocket_client.receive_json() assert not msg["success"] @@ -193,22 +170,11 @@ async def test_create_network_fails_1( websocket_client, ) -> None: """Test create network.""" - with ( - patch( - "python_otbr_api.OTBR.set_enabled", - side_effect=python_otbr_api.OTBRError, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), + with patch( + "python_otbr_api.OTBR.set_enabled", + side_effect=python_otbr_api.OTBRError, ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/create_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - } - ) + await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) msg = await websocket_client.receive_json() assert not msg["success"] @@ -231,17 +197,8 @@ async def test_create_network_fails_2( side_effect=python_otbr_api.OTBRError, ), patch("python_otbr_api.OTBR.factory_reset"), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/create_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - } - ) + await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) msg = await websocket_client.receive_json() assert not msg["success"] @@ -266,17 +223,8 @@ async def test_create_network_fails_3( patch( "python_otbr_api.OTBR.factory_reset", ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/create_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - } - ) + await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) msg = await websocket_client.receive_json() assert not msg["success"] @@ -300,17 +248,8 @@ async def test_create_network_fails_4( patch( "python_otbr_api.OTBR.factory_reset", ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/create_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - } - ) + await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) msg = await websocket_client.receive_json() assert not msg["success"] @@ -329,17 +268,8 @@ async def test_create_network_fails_5( patch("python_otbr_api.OTBR.create_active_dataset"), patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=None), patch("python_otbr_api.OTBR.factory_reset"), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/create_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - } - ) + await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) msg = await websocket_client.receive_json() assert not msg["success"] @@ -361,69 +291,14 @@ async def test_create_network_fails_6( "python_otbr_api.OTBR.factory_reset", side_effect=python_otbr_api.OTBRError, ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/create_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - } - ) + await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "factory_reset_failed" -async def test_create_network_fails_7( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - otbr_config_entry_multipan, - websocket_client, -) -> None: - """Test create network.""" - with patch( - "python_otbr_api.OTBR.get_extended_address", - side_effect=python_otbr_api.OTBRError, - ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/create_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - } - ) - msg = await websocket_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == "get_extended_address_failed" - - -async def test_create_network_fails_8( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - otbr_config_entry_multipan, - websocket_client, -) -> None: - """Test create network.""" - with patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/create_network", - "extended_address": "blah", - } - ) - msg = await websocket_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == "unknown_router" - - async def test_set_network( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -437,10 +312,6 @@ async def test_set_network( dataset_id = list(dataset_store.datasets)[1] with ( - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "python_otbr_api.OTBR.set_active_dataset_tlvs" ) as set_active_dataset_tlvs_mock, @@ -449,7 +320,6 @@ async def test_set_network( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -475,7 +345,6 @@ async def test_set_network_no_entry( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": "abc", } ) @@ -499,19 +368,14 @@ async def test_set_network_channel_conflict( multiprotocol_addon_manager_mock.async_get_channel.return_value = 15 - with patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/set_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - "dataset_id": dataset_id, - } - ) + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_network", + "dataset_id": dataset_id, + } + ) - msg = await websocket_client.receive_json() + msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "channel_conflict" @@ -525,19 +389,14 @@ async def test_set_network_unknown_dataset( ) -> None: """Test set network.""" - with patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/set_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - "dataset_id": "abc", - } - ) + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_network", + "dataset_id": "abc", + } + ) - msg = await websocket_client.receive_json() + msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "unknown_dataset" @@ -554,20 +413,13 @@ async def test_set_network_fails_1( dataset_store = await thread.dataset_store.async_get_store(hass) dataset_id = list(dataset_store.datasets)[1] - with ( - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), - patch( - "python_otbr_api.OTBR.set_enabled", - side_effect=python_otbr_api.OTBRError, - ), + with patch( + "python_otbr_api.OTBR.set_enabled", + side_effect=python_otbr_api.OTBRError, ): await websocket_client.send_json_auto_id( { "type": "otbr/set_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -589,10 +441,6 @@ async def test_set_network_fails_2( dataset_id = list(dataset_store.datasets)[1] with ( - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "python_otbr_api.OTBR.set_enabled", ), @@ -604,7 +452,6 @@ async def test_set_network_fails_2( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -626,10 +473,6 @@ async def test_set_network_fails_3( dataset_id = list(dataset_store.datasets)[1] with ( - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "python_otbr_api.OTBR.set_enabled", side_effect=[None, python_otbr_api.OTBRError], @@ -641,7 +484,6 @@ async def test_set_network_fails_3( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -651,54 +493,6 @@ async def test_set_network_fails_3( assert msg["error"]["code"] == "set_enabled_failed" -async def test_set_network_fails_4( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - otbr_config_entry_multipan, - websocket_client, -) -> None: - """Test set network.""" - with patch( - "python_otbr_api.OTBR.get_extended_address", - side_effect=python_otbr_api.OTBRError, - ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/set_network", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - "dataset_id": "abc", - } - ) - msg = await websocket_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == "get_extended_address_failed" - - -async def test_set_network_fails_5( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - otbr_config_entry_multipan, - websocket_client, -) -> None: - """Test set network.""" - with patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/set_network", - "extended_address": "blah", - "dataset_id": "abc", - } - ) - msg = await websocket_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == "unknown_router" - - async def test_set_channel( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -707,19 +501,9 @@ async def test_set_channel( ) -> None: """Test set channel.""" - with ( - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), - patch("python_otbr_api.OTBR.set_channel"), - ): + with patch("python_otbr_api.OTBR.set_channel"): await websocket_client.send_json_auto_id( - { - "type": "otbr/set_channel", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - "channel": 12, - } + {"type": "otbr/set_channel", "channel": 12} ) msg = await websocket_client.receive_json() @@ -735,19 +519,9 @@ async def test_set_channel_multiprotocol( ) -> None: """Test set channel.""" - with ( - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), - patch("python_otbr_api.OTBR.set_channel"), - ): + with patch("python_otbr_api.OTBR.set_channel"): await websocket_client.send_json_auto_id( - { - "type": "otbr/set_channel", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - "channel": 12, - } + {"type": "otbr/set_channel", "channel": 12} ) msg = await websocket_client.receive_json() @@ -764,11 +538,7 @@ async def test_set_channel_no_entry( await async_setup_component(hass, "otbr", {}) websocket_client = await hass_ws_client(hass) await websocket_client.send_json_auto_id( - { - "type": "otbr/set_channel", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - "channel": 12, - } + {"type": "otbr/set_channel", "channel": 12} ) msg = await websocket_client.receive_json() @@ -776,79 +546,21 @@ async def test_set_channel_no_entry( assert msg["error"]["code"] == "not_loaded" -async def test_set_channel_fails_1( +async def test_set_channel_fails( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_thread, websocket_client, ) -> None: """Test set channel.""" - with ( - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), - patch( - "python_otbr_api.OTBR.set_channel", - side_effect=python_otbr_api.OTBRError, - ), + with patch( + "python_otbr_api.OTBR.set_channel", + side_effect=python_otbr_api.OTBRError, ): await websocket_client.send_json_auto_id( - { - "type": "otbr/set_channel", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - "channel": 12, - } + {"type": "otbr/set_channel", "channel": 12} ) msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "set_channel_failed" - - -async def test_set_channel_fails_2( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - otbr_config_entry_multipan, - websocket_client, -) -> None: - """Test set channel.""" - with patch( - "python_otbr_api.OTBR.get_extended_address", - side_effect=python_otbr_api.OTBRError, - ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/set_channel", - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), - "channel": 12, - } - ) - msg = await websocket_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == "get_extended_address_failed" - - -async def test_set_channel_fails_3( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - otbr_config_entry_multipan, - websocket_client, -) -> None: - """Test set channel.""" - with patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ): - await websocket_client.send_json_auto_id( - { - "type": "otbr/set_channel", - "extended_address": "blah", - "channel": 12, - } - ) - msg = await websocket_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == "unknown_router" diff --git a/tests/components/otp/conftest.py b/tests/components/otp/conftest.py index 7926be1e48e..7443d772c69 100644 --- a/tests/components/otp/conftest.py +++ b/tests/components/otp/conftest.py @@ -14,7 +14,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Override async_setup_entry.""" with patch( "homeassistant.components.otp.async_setup_entry", return_value=True @@ -23,7 +23,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_pyotp() -> Generator[MagicMock]: +def mock_pyotp() -> Generator[MagicMock, None, None]: """Mock a pyotp.""" with ( patch( diff --git a/tests/components/ourgroceries/__init__.py b/tests/components/ourgroceries/__init__.py index 4ebbea46229..6f90cb7ea1b 100644 --- a/tests/components/ourgroceries/__init__.py +++ b/tests/components/ourgroceries/__init__.py @@ -1,10 +1,6 @@ """Tests for the OurGroceries integration.""" -from typing import Any - -def items_to_shopping_list( - items: list, version_id: str = "1" -) -> dict[str, dict[str, Any]]: +def items_to_shopping_list(items: list, version_id: str = "1") -> dict[dict[list]]: """Convert a list of items into a shopping list.""" return {"list": {"versionId": version_id, "items": items}} diff --git a/tests/components/ourgroceries/conftest.py b/tests/components/ourgroceries/conftest.py index b3fb4e9bcc6..bc8c632b511 100644 --- a/tests/components/ourgroceries/conftest.py +++ b/tests/components/ourgroceries/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the OurGroceries tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.ourgroceries import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/ourgroceries/test_todo.py b/tests/components/ourgroceries/test_todo.py index d364881b624..672e2e14447 100644 --- a/tests/components/ourgroceries/test_todo.py +++ b/tests/components/ourgroceries/test_todo.py @@ -7,14 +7,8 @@ from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.ourgroceries.coordinator import SCAN_INTERVAL -from homeassistant.components.todo import ( - ATTR_ITEM, - ATTR_RENAME, - ATTR_STATUS, - DOMAIN as TODO_DOMAIN, - TodoServices, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE +from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity @@ -75,9 +69,9 @@ async def test_add_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Soda"}, - target={ATTR_ENTITY_ID: "todo.test_list"}, + "add_item", + {"item": "Soda"}, + target={"entity_id": "todo.test_list"}, blocking=True, ) @@ -114,9 +108,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "12345", ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: "todo.test_list"}, + "update_item", + {"item": "12345", "status": "completed"}, + target={"entity_id": "todo.test_list"}, blocking=True, ) assert ourgroceries.toggle_item_crossed_off.called @@ -138,9 +132,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "12345", ATTR_STATUS: "needs_action"}, - target={ATTR_ENTITY_ID: "todo.test_list"}, + "update_item", + {"item": "12345", "status": "needs_action"}, + target={"entity_id": "todo.test_list"}, blocking=True, ) assert ourgroceries.toggle_item_crossed_off.called @@ -187,9 +181,9 @@ async def test_update_todo_item_summary( await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "12345", ATTR_RENAME: "Milk"}, - target={ATTR_ENTITY_ID: "todo.test_list"}, + "update_item", + {"item": "12345", "rename": "Milk"}, + target={"entity_id": "todo.test_list"}, blocking=True, ) assert ourgroceries.change_item_on_list @@ -224,9 +218,9 @@ async def test_remove_todo_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: ["12345", "54321"]}, - target={ATTR_ENTITY_ID: "todo.test_list"}, + "remove_item", + {"item": ["12345", "54321"]}, + target={"entity_id": "todo.test_list"}, blocking=True, ) assert ourgroceries.remove_item_from_list.call_count == 2 diff --git a/tests/components/overkiz/conftest.py b/tests/components/overkiz/conftest.py index 151d0719ddb..8ab26e3587b 100644 --- a/tests/components/overkiz/conftest.py +++ b/tests/components/overkiz/conftest.py @@ -1,9 +1,9 @@ """Configuration for overkiz tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant.components.overkiz.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/overkiz/test_config_flow.py b/tests/components/overkiz/test_config_flow.py index cef5ef350a9..50870ae85fe 100644 --- a/tests/components/overkiz/test_config_flow.py +++ b/tests/components/overkiz/test_config_flow.py @@ -573,7 +573,15 @@ async def test_cloud_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "cloud" @@ -615,7 +623,15 @@ async def test_cloud_reauth_wrong_account(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "cloud" @@ -656,7 +672,15 @@ async def test_local_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "local_or_cloud" @@ -707,7 +731,15 @@ async def test_local_reauth_wrong_account(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "local_or_cloud" diff --git a/tests/components/ovo_energy/test_config_flow.py b/tests/components/ovo_energy/test_config_flow.py index cfe679a254a..00899e745b9 100644 --- a/tests/components/ovo_energy/test_config_flow.py +++ b/tests/components/ovo_energy/test_config_flow.py @@ -117,23 +117,23 @@ async def test_full_flow_implementation(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"][CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] assert result2["data"][CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] - assert result2["data"][CONF_ACCOUNT] == FIXTURE_USER_INPUT[CONF_ACCOUNT] async def test_reauth_authorization_error(hass: HomeAssistant) -> None: """Test we show user form on authorization error.""" - mock_config = MockConfigEntry( - domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT - ) - mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=False, ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=FIXTURE_USER_INPUT, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth" + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], FIXTURE_REAUTH_INPUT, @@ -141,26 +141,25 @@ async def test_reauth_authorization_error(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth_confirm" + assert result2["step_id"] == "reauth" assert result2["errors"] == {"base": "authorization_error"} async def test_reauth_connection_error(hass: HomeAssistant) -> None: """Test we show user form on connection error.""" - mock_config = MockConfigEntry( - domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT - ) - mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {} - with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", side_effect=aiohttp.ClientError, ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=FIXTURE_USER_INPUT, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth" + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], FIXTURE_REAUTH_INPUT, @@ -168,32 +167,29 @@ async def test_reauth_connection_error(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth_confirm" + assert result2["step_id"] == "reauth" assert result2["errors"] == {"base": "connection_error"} async def test_reauth_flow(hass: HomeAssistant) -> None: """Test reauth works.""" - mock_config = MockConfigEntry( - domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT - ) - mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {} - with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=False, ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - FIXTURE_REAUTH_INPUT, + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT ) + mock_config.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=FIXTURE_USER_INPUT, + ) + assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" + assert result["step_id"] == "reauth" assert result["errors"] == {"base": "authorization_error"} with ( diff --git a/tests/components/owntracks/test_config_flow.py b/tests/components/owntracks/test_config_flow.py index a80685e9b1e..818524c1c50 100644 --- a/tests/components/owntracks/test_config_flow.py +++ b/tests/components/owntracks/test_config_flow.py @@ -8,9 +8,9 @@ from homeassistant import config_entries from homeassistant.components.owntracks import config_flow from homeassistant.components.owntracks.config_flow import CONF_CLOUDHOOK, CONF_SECRET from homeassistant.components.owntracks.const import DOMAIN +from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -51,7 +51,7 @@ def mock_not_supports_encryption(): yield -async def init_config_flow(hass: HomeAssistant) -> config_flow.OwnTracksFlow: +async def init_config_flow(hass): """Init a configuration flow.""" await async_process_ha_core_config( hass, @@ -94,14 +94,13 @@ async def test_import_setup(hass: HomeAssistant) -> None: async def test_abort_if_already_setup(hass: HomeAssistant) -> None: """Test that we can't add more than one instance.""" + flow = await init_config_flow(hass) + MockConfigEntry(domain=DOMAIN, data={}).add_to_hass(hass) assert hass.config_entries.async_entries(DOMAIN) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - # Should fail, already setup (flow) + result = await flow.async_step_user({}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/owntracks/test_device_tracker.py b/tests/components/owntracks/test_device_tracker.py index 93f40d0ae3d..0648a94c70b 100644 --- a/tests/components/owntracks/test_device_tracker.py +++ b/tests/components/owntracks/test_device_tracker.py @@ -1,10 +1,8 @@ """The tests for the Owntracks device tracker.""" import base64 -from collections.abc import Callable, Generator import json import pickle -from typing import Any from unittest.mock import patch from nacl.encoding import Base64Encoder @@ -20,8 +18,6 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, async_fire_mqtt_message from tests.typing import ClientSessionGenerator, MqttMockHAClient -type OwnTracksContextFactory = Callable[[], owntracks.OwnTracksContext] - USER = "greg" DEVICE = "phone" @@ -295,7 +291,7 @@ def setup_comp( hass: HomeAssistant, mock_device_tracker_conf: list[Device], mqtt_mock: MqttMockHAClient, -) -> None: +): """Initialize components.""" hass.loop.run_until_complete(async_setup_component(hass, "device_tracker", {})) @@ -306,9 +302,7 @@ def setup_comp( hass.states.async_set("zone.outer", "zoning", OUTER_ZONE) -async def setup_owntracks( - hass: HomeAssistant, config: dict[str, Any], ctx_cls=owntracks.OwnTracksContext -) -> None: +async def setup_owntracks(hass, config, ctx_cls=owntracks.OwnTracksContext): """Set up OwnTracks.""" MockConfigEntry( domain="owntracks", data={"webhook_id": "owntracks_test", "secret": "abcd"} @@ -320,7 +314,7 @@ async def setup_owntracks( @pytest.fixture -def context(hass: HomeAssistant, setup_comp: None) -> OwnTracksContextFactory: +def context(hass, setup_comp): """Set up the mocked context.""" orig_context = owntracks.OwnTracksContext context = None @@ -350,9 +344,7 @@ def context(hass: HomeAssistant, setup_comp: None) -> OwnTracksContextFactory: return get_context -async def send_message( - hass: HomeAssistant, topic: str, message: dict[str, Any], corrupt: bool = False -) -> None: +async def send_message(hass, topic, message, corrupt=False): """Test the sending of a message.""" str_message = json.dumps(message) if corrupt: @@ -364,73 +356,65 @@ async def send_message( await hass.async_block_till_done() -def assert_location_state(hass: HomeAssistant, location: str) -> None: +def assert_location_state(hass, location): """Test the assertion of a location state.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.state == location -def assert_location_latitude(hass: HomeAssistant, latitude: float) -> None: +def assert_location_latitude(hass, latitude): """Test the assertion of a location latitude.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("latitude") == latitude -def assert_location_longitude(hass: HomeAssistant, longitude: float) -> None: +def assert_location_longitude(hass, longitude): """Test the assertion of a location longitude.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("longitude") == longitude -def assert_location_accuracy(hass: HomeAssistant, accuracy: int) -> None: +def assert_location_accuracy(hass, accuracy): """Test the assertion of a location accuracy.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("gps_accuracy") == accuracy -def assert_location_source_type(hass: HomeAssistant, source_type: str) -> None: +def assert_location_source_type(hass, source_type): """Test the assertion of source_type.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("source_type") == source_type -def assert_mobile_tracker_state( - hass: HomeAssistant, location: str, beacon: str = IBEACON_DEVICE -) -> None: +def assert_mobile_tracker_state(hass, location, beacon=IBEACON_DEVICE): """Test the assertion of a mobile beacon tracker state.""" dev_id = MOBILE_BEACON_FMT.format(beacon) state = hass.states.get(dev_id) assert state.state == location -def assert_mobile_tracker_latitude( - hass: HomeAssistant, latitude: float, beacon: str = IBEACON_DEVICE -) -> None: +def assert_mobile_tracker_latitude(hass, latitude, beacon=IBEACON_DEVICE): """Test the assertion of a mobile beacon tracker latitude.""" dev_id = MOBILE_BEACON_FMT.format(beacon) state = hass.states.get(dev_id) assert state.attributes.get("latitude") == latitude -def assert_mobile_tracker_accuracy( - hass: HomeAssistant, accuracy: int, beacon: str = IBEACON_DEVICE -) -> None: +def assert_mobile_tracker_accuracy(hass, accuracy, beacon=IBEACON_DEVICE): """Test the assertion of a mobile beacon tracker accuracy.""" dev_id = MOBILE_BEACON_FMT.format(beacon) state = hass.states.get(dev_id) assert state.attributes.get("gps_accuracy") == accuracy -@pytest.mark.usefixtures("context") -async def test_location_invalid_devid(hass: HomeAssistant) -> None: +async def test_location_invalid_devid(hass: HomeAssistant, context) -> None: """Test the update of a location.""" await send_message(hass, "owntracks/paulus/nexus-5x", LOCATION_MESSAGE) state = hass.states.get("device_tracker.paulus_nexus_5x") assert state.state == "outer" -@pytest.mark.usefixtures("context") -async def test_location_update(hass: HomeAssistant) -> None: +async def test_location_update(hass: HomeAssistant, context) -> None: """Test the update of a location.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -440,8 +424,7 @@ async def test_location_update(hass: HomeAssistant) -> None: assert_location_state(hass, "outer") -@pytest.mark.usefixtures("context") -async def test_location_update_no_t_key(hass: HomeAssistant) -> None: +async def test_location_update_no_t_key(hass: HomeAssistant, context) -> None: """Test the update of a location when message does not contain 't'.""" message = LOCATION_MESSAGE.copy() message.pop("t") @@ -453,8 +436,7 @@ async def test_location_update_no_t_key(hass: HomeAssistant) -> None: assert_location_state(hass, "outer") -@pytest.mark.usefixtures("context") -async def test_location_inaccurate_gps(hass: HomeAssistant) -> None: +async def test_location_inaccurate_gps(hass: HomeAssistant, context) -> None: """Test the location for inaccurate GPS information.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_INACCURATE) @@ -464,8 +446,7 @@ async def test_location_inaccurate_gps(hass: HomeAssistant) -> None: assert_location_longitude(hass, LOCATION_MESSAGE["lon"]) -@pytest.mark.usefixtures("context") -async def test_location_zero_accuracy_gps(hass: HomeAssistant) -> None: +async def test_location_zero_accuracy_gps(hass: HomeAssistant, context) -> None: """Ignore the location for zero accuracy GPS information.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_ZERO_ACCURACY) @@ -477,9 +458,7 @@ async def test_location_zero_accuracy_gps(hass: HomeAssistant) -> None: # ------------------------------------------------------------------------ # GPS based event entry / exit testing -async def test_event_gps_entry_exit( - hass: HomeAssistant, context: OwnTracksContextFactory -) -> None: +async def test_event_gps_entry_exit(hass: HomeAssistant, context) -> None: """Test the entry event.""" # Entering the owntracks circular region named "inner" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -517,9 +496,7 @@ async def test_event_gps_entry_exit( assert_location_accuracy(hass, LOCATION_MESSAGE["acc"]) -async def test_event_gps_with_spaces( - hass: HomeAssistant, context: OwnTracksContextFactory -) -> None: +async def test_event_gps_with_spaces(hass: HomeAssistant, context) -> None: """Test the entry event.""" message = build_message({"desc": "inner 2"}, REGION_GPS_ENTER_MESSAGE) await send_message(hass, EVENT_TOPIC, message) @@ -532,8 +509,7 @@ async def test_event_gps_with_spaces( assert not context().regions_entered[USER] -@pytest.mark.usefixtures("context") -async def test_event_gps_entry_inaccurate(hass: HomeAssistant) -> None: +async def test_event_gps_entry_inaccurate(hass: HomeAssistant, context) -> None: """Test the event for inaccurate entry.""" # Set location to the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -546,9 +522,7 @@ async def test_event_gps_entry_inaccurate(hass: HomeAssistant) -> None: assert_location_state(hass, "inner") -async def test_event_gps_entry_exit_inaccurate( - hass: HomeAssistant, context: OwnTracksContextFactory -) -> None: +async def test_event_gps_entry_exit_inaccurate(hass: HomeAssistant, context) -> None: """Test the event for inaccurate exit.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -568,9 +542,7 @@ async def test_event_gps_entry_exit_inaccurate( assert not context().regions_entered[USER] -async def test_event_gps_entry_exit_zero_accuracy( - hass: HomeAssistant, context: OwnTracksContextFactory -) -> None: +async def test_event_gps_entry_exit_zero_accuracy(hass: HomeAssistant, context) -> None: """Test entry/exit events with accuracy zero.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE_ZERO) @@ -590,8 +562,9 @@ async def test_event_gps_entry_exit_zero_accuracy( assert not context().regions_entered[USER] -@pytest.mark.usefixtures("context") -async def test_event_gps_exit_outside_zone_sets_away(hass: HomeAssistant) -> None: +async def test_event_gps_exit_outside_zone_sets_away( + hass: HomeAssistant, context +) -> None: """Test the event for exit zone.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) assert_location_state(hass, "inner") @@ -604,8 +577,7 @@ async def test_event_gps_exit_outside_zone_sets_away(hass: HomeAssistant) -> Non assert_location_state(hass, STATE_NOT_HOME) -@pytest.mark.usefixtures("context") -async def test_event_gps_entry_exit_right_order(hass: HomeAssistant) -> None: +async def test_event_gps_entry_exit_right_order(hass: HomeAssistant, context) -> None: """Test the event for ordering.""" # Enter inner zone # Set location to the outer zone. @@ -630,8 +602,7 @@ async def test_event_gps_entry_exit_right_order(hass: HomeAssistant) -> None: assert_location_state(hass, "outer") -@pytest.mark.usefixtures("context") -async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant) -> None: +async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant, context) -> None: """Test the event for wrong order.""" # Enter inner zone await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -654,8 +625,7 @@ async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant) -> None: assert_location_state(hass, "outer") -@pytest.mark.usefixtures("context") -async def test_event_gps_entry_unknown_zone(hass: HomeAssistant) -> None: +async def test_event_gps_entry_unknown_zone(hass: HomeAssistant, context) -> None: """Test the event for unknown zone.""" # Just treat as location update message = build_message({"desc": "unknown"}, REGION_GPS_ENTER_MESSAGE) @@ -664,8 +634,7 @@ async def test_event_gps_entry_unknown_zone(hass: HomeAssistant) -> None: assert_location_state(hass, "inner") -@pytest.mark.usefixtures("context") -async def test_event_gps_exit_unknown_zone(hass: HomeAssistant) -> None: +async def test_event_gps_exit_unknown_zone(hass: HomeAssistant, context) -> None: """Test the event for unknown zone.""" # Just treat as location update message = build_message({"desc": "unknown"}, REGION_GPS_LEAVE_MESSAGE) @@ -674,8 +643,7 @@ async def test_event_gps_exit_unknown_zone(hass: HomeAssistant) -> None: assert_location_state(hass, "outer") -@pytest.mark.usefixtures("context") -async def test_event_entry_zone_loading_dash(hass: HomeAssistant) -> None: +async def test_event_entry_zone_loading_dash(hass: HomeAssistant, context) -> None: """Test the event for zone landing.""" # Make sure the leading - is ignored # Owntracks uses this to switch on hold @@ -684,9 +652,7 @@ async def test_event_entry_zone_loading_dash(hass: HomeAssistant) -> None: assert_location_state(hass, "inner") -async def test_events_only_on( - hass: HomeAssistant, context: OwnTracksContextFactory -) -> None: +async def test_events_only_on(hass: HomeAssistant, context) -> None: """Test events_only config suppresses location updates.""" # Sending a location message that is not home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_NOT_HOME) @@ -707,9 +673,7 @@ async def test_events_only_on( assert_location_state(hass, STATE_NOT_HOME) -async def test_events_only_off( - hass: HomeAssistant, context: OwnTracksContextFactory -) -> None: +async def test_events_only_off(hass: HomeAssistant, context) -> None: """Test when events_only is False.""" # Sending a location message that is not home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_NOT_HOME) @@ -730,8 +694,7 @@ async def test_events_only_off( assert_location_state(hass, "outer") -@pytest.mark.usefixtures("context") -async def test_event_source_type_entry_exit(hass: HomeAssistant) -> None: +async def test_event_source_type_entry_exit(hass: HomeAssistant, context) -> None: """Test the entry and exit events of source type.""" # Entering the owntracks circular region named "inner" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -761,9 +724,7 @@ async def test_event_source_type_entry_exit(hass: HomeAssistant) -> None: # Region Beacon based event entry / exit testing -async def test_event_region_entry_exit( - hass: HomeAssistant, context: OwnTracksContextFactory -) -> None: +async def test_event_region_entry_exit(hass: HomeAssistant, context) -> None: """Test the entry event.""" # Seeing a beacon named "inner" await send_message(hass, EVENT_TOPIC, REGION_BEACON_ENTER_MESSAGE) @@ -802,9 +763,7 @@ async def test_event_region_entry_exit( assert_location_accuracy(hass, LOCATION_MESSAGE["acc"]) -async def test_event_region_with_spaces( - hass: HomeAssistant, context: OwnTracksContextFactory -) -> None: +async def test_event_region_with_spaces(hass: HomeAssistant, context) -> None: """Test the entry event.""" message = build_message({"desc": "inner 2"}, REGION_BEACON_ENTER_MESSAGE) await send_message(hass, EVENT_TOPIC, message) @@ -817,8 +776,9 @@ async def test_event_region_with_spaces( assert not context().regions_entered[USER] -@pytest.mark.usefixtures("context") -async def test_event_region_entry_exit_right_order(hass: HomeAssistant) -> None: +async def test_event_region_entry_exit_right_order( + hass: HomeAssistant, context +) -> None: """Test the event for ordering.""" # Enter inner zone # Set location to the outer zone. @@ -849,8 +809,9 @@ async def test_event_region_entry_exit_right_order(hass: HomeAssistant) -> None: assert_location_state(hass, "inner") -@pytest.mark.usefixtures("context") -async def test_event_region_entry_exit_wrong_order(hass: HomeAssistant) -> None: +async def test_event_region_entry_exit_wrong_order( + hass: HomeAssistant, context +) -> None: """Test the event for wrong order.""" # Enter inner zone await send_message(hass, EVENT_TOPIC, REGION_BEACON_ENTER_MESSAGE) @@ -877,8 +838,9 @@ async def test_event_region_entry_exit_wrong_order(hass: HomeAssistant) -> None: assert_location_state(hass, "inner_2") -@pytest.mark.usefixtures("context") -async def test_event_beacon_unknown_zone_no_location(hass: HomeAssistant) -> None: +async def test_event_beacon_unknown_zone_no_location( + hass: HomeAssistant, context +) -> None: """Test the event for unknown zone.""" # A beacon which does not match a HA zone is the # definition of a mobile beacon. In this case, "unknown" @@ -903,8 +865,7 @@ async def test_event_beacon_unknown_zone_no_location(hass: HomeAssistant) -> Non assert_mobile_tracker_state(hass, "unknown", "unknown") -@pytest.mark.usefixtures("context") -async def test_event_beacon_unknown_zone(hass: HomeAssistant) -> None: +async def test_event_beacon_unknown_zone(hass: HomeAssistant, context) -> None: """Test the event for unknown zone.""" # A beacon which does not match a HA zone is the # definition of a mobile beacon. In this case, "unknown" @@ -924,8 +885,9 @@ async def test_event_beacon_unknown_zone(hass: HomeAssistant) -> None: assert_mobile_tracker_state(hass, "outer", "unknown") -@pytest.mark.usefixtures("context") -async def test_event_beacon_entry_zone_loading_dash(hass: HomeAssistant) -> None: +async def test_event_beacon_entry_zone_loading_dash( + hass: HomeAssistant, context +) -> None: """Test the event for beacon zone landing.""" # Make sure the leading - is ignored # Owntracks uses this to switch on hold @@ -937,8 +899,7 @@ async def test_event_beacon_entry_zone_loading_dash(hass: HomeAssistant) -> None # ------------------------------------------------------------------------ # Mobile Beacon based event entry / exit testing -@pytest.mark.usefixtures("context") -async def test_mobile_enter_move_beacon(hass: HomeAssistant) -> None: +async def test_mobile_enter_move_beacon(hass: HomeAssistant, context) -> None: """Test the movement of a beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -962,8 +923,7 @@ async def test_mobile_enter_move_beacon(hass: HomeAssistant) -> None: assert_mobile_tracker_latitude(hass, not_home_lat) -@pytest.mark.usefixtures("context") -async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant) -> None: +async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant, context) -> None: """Test the enter and the exit of a mobile beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -986,8 +946,7 @@ async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant) -> None: assert_mobile_tracker_state(hass, "outer") -@pytest.mark.usefixtures("context") -async def test_mobile_exit_move_beacon(hass: HomeAssistant) -> None: +async def test_mobile_exit_move_beacon(hass: HomeAssistant, context) -> None: """Test the exit move of a beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -1009,9 +968,7 @@ async def test_mobile_exit_move_beacon(hass: HomeAssistant) -> None: assert_mobile_tracker_state(hass, "outer") -async def test_mobile_multiple_async_enter_exit( - hass: HomeAssistant, context: OwnTracksContextFactory -) -> None: +async def test_mobile_multiple_async_enter_exit(hass: HomeAssistant, context) -> None: """Test the multiple entering.""" # Test race condition for _ in range(20): @@ -1031,9 +988,7 @@ async def test_mobile_multiple_async_enter_exit( assert len(context().mobile_beacons_active["greg_phone"]) == 0 -async def test_mobile_multiple_enter_exit( - hass: HomeAssistant, context: OwnTracksContextFactory -) -> None: +async def test_mobile_multiple_enter_exit(hass: HomeAssistant, context) -> None: """Test the multiple entering.""" await send_message(hass, EVENT_TOPIC, MOBILE_BEACON_ENTER_EVENT_MESSAGE) await send_message(hass, EVENT_TOPIC, MOBILE_BEACON_ENTER_EVENT_MESSAGE) @@ -1042,8 +997,7 @@ async def test_mobile_multiple_enter_exit( assert len(context().mobile_beacons_active["greg_phone"]) == 0 -@pytest.mark.usefixtures("context") -async def test_complex_movement(hass: HomeAssistant) -> None: +async def test_complex_movement(hass: HomeAssistant, context) -> None: """Test a complex sequence representative of real-world use.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -1165,8 +1119,9 @@ async def test_complex_movement(hass: HomeAssistant) -> None: assert_mobile_tracker_state(hass, "outer") -@pytest.mark.usefixtures("context") -async def test_complex_movement_sticky_keys_beacon(hass: HomeAssistant) -> None: +async def test_complex_movement_sticky_keys_beacon( + hass: HomeAssistant, context +) -> None: """Test a complex sequence which was previously broken.""" # I am not_home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -1278,8 +1233,7 @@ async def test_complex_movement_sticky_keys_beacon(hass: HomeAssistant) -> None: assert_mobile_tracker_latitude(hass, INNER_ZONE["latitude"]) -@pytest.mark.usefixtures("context") -async def test_waypoint_import_simple(hass: HomeAssistant) -> None: +async def test_waypoint_import_simple(hass: HomeAssistant, context) -> None: """Test a simple import of list of waypoints.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message) @@ -1290,8 +1244,7 @@ async def test_waypoint_import_simple(hass: HomeAssistant) -> None: assert wayp is not None -@pytest.mark.usefixtures("context") -async def test_waypoint_import_block(hass: HomeAssistant) -> None: +async def test_waypoint_import_block(hass: HomeAssistant, context) -> None: """Test import of list of waypoints for blocked user.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC_BLOCKED, waypoints_message) @@ -1322,8 +1275,7 @@ async def test_waypoint_import_no_whitelist(hass: HomeAssistant, setup_comp) -> assert wayp is not None -@pytest.mark.usefixtures("context") -async def test_waypoint_import_bad_json(hass: HomeAssistant) -> None: +async def test_waypoint_import_bad_json(hass: HomeAssistant, context) -> None: """Test importing a bad JSON payload.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message, True) @@ -1334,8 +1286,7 @@ async def test_waypoint_import_bad_json(hass: HomeAssistant) -> None: assert wayp is None -@pytest.mark.usefixtures("context") -async def test_waypoint_import_existing(hass: HomeAssistant) -> None: +async def test_waypoint_import_existing(hass: HomeAssistant, context) -> None: """Test importing a zone that exists.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message) @@ -1348,8 +1299,7 @@ async def test_waypoint_import_existing(hass: HomeAssistant) -> None: assert wayp == new_wayp -@pytest.mark.usefixtures("context") -async def test_single_waypoint_import(hass: HomeAssistant) -> None: +async def test_single_waypoint_import(hass: HomeAssistant, context) -> None: """Test single waypoint message.""" waypoint_message = WAYPOINT_MESSAGE.copy() await send_message(hass, WAYPOINT_TOPIC, waypoint_message) @@ -1357,8 +1307,7 @@ async def test_single_waypoint_import(hass: HomeAssistant) -> None: assert wayp is not None -@pytest.mark.usefixtures("context") -async def test_not_implemented_message(hass: HomeAssistant) -> None: +async def test_not_implemented_message(hass: HomeAssistant, context) -> None: """Handle not implemented message type.""" patch_handler = patch( "homeassistant.components.owntracks.messages.async_handle_not_impl_msg", @@ -1369,8 +1318,7 @@ async def test_not_implemented_message(hass: HomeAssistant) -> None: patch_handler.stop() -@pytest.mark.usefixtures("context") -async def test_unsupported_message(hass: HomeAssistant) -> None: +async def test_unsupported_message(hass: HomeAssistant, context) -> None: """Handle not implemented message type.""" patch_handler = patch( "homeassistant.components.owntracks.messages.async_handle_unsupported_msg", @@ -1437,7 +1385,7 @@ def mock_cipher(): @pytest.fixture -def config_context(setup_comp: None) -> Generator[None]: +def config_context(hass, setup_comp): """Set up the mocked context.""" patch_load = patch( "homeassistant.components.device_tracker.async_load_config", @@ -1540,7 +1488,7 @@ async def test_encrypted_payload_wrong_topic_key( async def test_encrypted_payload_no_topic_key(hass: HomeAssistant, setup_comp) -> None: """Test encrypted payload with no topic key.""" await setup_owntracks( - hass, {CONF_SECRET: {f"owntracks/{USER}/otherdevice": "foobar"}} + hass, {CONF_SECRET: {"owntracks/{}/{}".format(USER, "otherdevice"): "foobar"}} ) await send_message(hass, LOCATION_TOPIC, MOCK_ENCRYPTED_LOCATION_MESSAGE) assert hass.states.get(DEVICE_TRACKER_STATE) is None diff --git a/tests/components/p1_monitor/conftest.py b/tests/components/p1_monitor/conftest.py index fbd39914536..1d5f349f858 100644 --- a/tests/components/p1_monitor/conftest.py +++ b/tests/components/p1_monitor/conftest.py @@ -7,7 +7,7 @@ from p1monitor import Phases, Settings, SmartMeter, WaterMeter import pytest from homeassistant.components.p1_monitor.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -19,9 +19,8 @@ def mock_config_entry() -> MockConfigEntry: return MockConfigEntry( title="monitor", domain=DOMAIN, - data={CONF_HOST: "example", CONF_PORT: 80}, + data={CONF_HOST: "example"}, unique_id="unique_thingy", - version=2, ) diff --git a/tests/components/p1_monitor/snapshots/test_init.ambr b/tests/components/p1_monitor/snapshots/test_init.ambr deleted file mode 100644 index d0a676fce1b..00000000000 --- a/tests/components/p1_monitor/snapshots/test_init.ambr +++ /dev/null @@ -1,45 +0,0 @@ -# serializer version: 1 -# name: test_migration - ConfigEntrySnapshot({ - 'data': dict({ - 'host': 'example', - 'port': 80, - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'p1_monitor', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': 'unique_thingy', - 'version': 2, - }) -# --- -# name: test_port_migration - ConfigEntrySnapshot({ - 'data': dict({ - 'host': 'example', - 'port': 80, - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'p1_monitor', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': 'unique_thingy', - 'version': 2, - }) -# --- diff --git a/tests/components/p1_monitor/test_config_flow.py b/tests/components/p1_monitor/test_config_flow.py index cbd89320074..12a6a6f5d11 100644 --- a/tests/components/p1_monitor/test_config_flow.py +++ b/tests/components/p1_monitor/test_config_flow.py @@ -6,7 +6,7 @@ from p1monitor import P1MonitorError from homeassistant.components.p1_monitor.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -30,13 +30,12 @@ async def test_full_user_flow(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={CONF_HOST: "example.com", CONF_PORT: 80}, + user_input={CONF_HOST: "example.com"}, ) assert result2.get("type") is FlowResultType.CREATE_ENTRY assert result2.get("title") == "P1 Monitor" - assert result2.get("data") == {CONF_HOST: "example.com", CONF_PORT: 80} - assert isinstance(result2["data"][CONF_PORT], int) + assert result2.get("data") == {CONF_HOST: "example.com"} assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_p1monitor.mock_calls) == 1 @@ -51,7 +50,7 @@ async def test_api_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "example.com", CONF_PORT: 80}, + data={CONF_HOST: "example.com"}, ) assert result.get("type") is FlowResultType.FORM diff --git a/tests/components/p1_monitor/test_diagnostics.py b/tests/components/p1_monitor/test_diagnostics.py index 396a3d3bd0d..55d4ccc5e67 100644 --- a/tests/components/p1_monitor/test_diagnostics.py +++ b/tests/components/p1_monitor/test_diagnostics.py @@ -21,7 +21,6 @@ async def test_diagnostics( "title": "monitor", "data": { "host": REDACTED, - "port": REDACTED, }, }, "data": { diff --git a/tests/components/p1_monitor/test_init.py b/tests/components/p1_monitor/test_init.py index 20714740385..02888b5ae97 100644 --- a/tests/components/p1_monitor/test_init.py +++ b/tests/components/p1_monitor/test_init.py @@ -3,11 +3,9 @@ from unittest.mock import AsyncMock, MagicMock, patch from p1monitor import P1MonitorConnectionError -from syrupy import SnapshotAssertion from homeassistant.components.p1_monitor.const import DOMAIN from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -46,35 +44,3 @@ async def test_config_entry_not_ready( assert mock_request.call_count == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_migration(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: - """Test config entry version 1 -> 2 migration.""" - mock_config_entry = MockConfigEntry( - unique_id="unique_thingy", - domain=DOMAIN, - data={CONF_HOST: "example"}, - version=1, - ) - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) == snapshot - - -async def test_port_migration(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: - """Test migration of host:port to separate host and port.""" - mock_config_entry = MockConfigEntry( - unique_id="unique_thingy", - domain=DOMAIN, - data={CONF_HOST: "example:80"}, - version=1, - ) - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) == snapshot diff --git a/tests/components/palazzetti/__init__.py b/tests/components/palazzetti/__init__.py deleted file mode 100644 index 0aafdf553ad..00000000000 --- a/tests/components/palazzetti/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the Palazzetti integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/palazzetti/conftest.py b/tests/components/palazzetti/conftest.py deleted file mode 100644 index 33dca845098..00000000000 --- a/tests/components/palazzetti/conftest.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Fixtures for Palazzetti integration tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.palazzetti.const import DOMAIN -from homeassistant.const import CONF_HOST - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.palazzetti.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - title="palazzetti", - domain=DOMAIN, - data={CONF_HOST: "127.0.0.1"}, - unique_id="11:22:33:44:55:66", - ) - - -@pytest.fixture -def mock_palazzetti_client() -> Generator[AsyncMock]: - """Return a mocked PalazzettiClient.""" - with ( - patch( - "homeassistant.components.palazzetti.coordinator.PalazzettiClient", - autospec=True, - ) as client, - patch( - "homeassistant.components.palazzetti.config_flow.PalazzettiClient", - new=client, - ), - ): - mock_client = client.return_value - mock_client.mac = "11:22:33:44:55:66" - mock_client.name = "Stove" - mock_client.sw_version = "0.0.0" - mock_client.hw_version = "1.1.1" - mock_client.fan_speed_min = 1 - mock_client.fan_speed_max = 5 - mock_client.has_fan_silent = True - mock_client.has_fan_high = True - mock_client.has_fan_auto = True - mock_client.has_on_off_switch = True - mock_client.connected = True - mock_client.is_heating = True - mock_client.room_temperature = 18 - mock_client.target_temperature = 21 - mock_client.target_temperature_min = 5 - mock_client.target_temperature_max = 50 - mock_client.fan_speed = 3 - mock_client.connect.return_value = True - mock_client.update_state.return_value = True - mock_client.set_on.return_value = True - mock_client.set_target_temperature.return_value = True - mock_client.set_fan_speed.return_value = True - mock_client.set_fan_silent.return_value = True - mock_client.set_fan_high.return_value = True - mock_client.set_fan_auto.return_value = True - yield mock_client diff --git a/tests/components/palazzetti/snapshots/test_climate.ambr b/tests/components/palazzetti/snapshots/test_climate.ambr deleted file mode 100644 index eb3b323272e..00000000000 --- a/tests/components/palazzetti/snapshots/test_climate.ambr +++ /dev/null @@ -1,86 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[climate.stove-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'fan_modes': list([ - 'silent', - '1', - '2', - '3', - '4', - '5', - 'high', - 'auto', - ]), - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 50, - 'min_temp': 5, - 'target_temp_step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.stove', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'palazzetti', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'palazzetti', - 'unique_id': '11:22:33:44:55:66', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[climate.stove-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 18, - 'fan_mode': '3', - 'fan_modes': list([ - 'silent', - '1', - '2', - '3', - '4', - '5', - 'high', - 'auto', - ]), - 'friendly_name': 'Stove', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 50, - 'min_temp': 5, - 'supported_features': , - 'target_temp_step': 1.0, - 'temperature': 21, - }), - 'context': , - 'entity_id': 'climate.stove', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- diff --git a/tests/components/palazzetti/snapshots/test_init.ambr b/tests/components/palazzetti/snapshots/test_init.ambr deleted file mode 100644 index abdee6b7f6f..00000000000 --- a/tests/components/palazzetti/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_device - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - tuple( - 'mac', - '11:22:33:44:55:66', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '1.1.1', - 'id': , - 'identifiers': set({ - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Palazzetti', - 'model': None, - 'model_id': None, - 'name': 'Stove', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': '0.0.0', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/palazzetti/test_climate.py b/tests/components/palazzetti/test_climate.py deleted file mode 100644 index 78af8f00bdb..00000000000 --- a/tests/components/palazzetti/test_climate.py +++ /dev/null @@ -1,174 +0,0 @@ -"""Tests for the Palazzetti climate platform.""" - -from unittest.mock import AsyncMock, patch - -from pypalazzetti.exceptions import CommunicationError, ValidationError -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.climate import ( - ATTR_FAN_MODE, - ATTR_HVAC_MODE, - DOMAIN as CLIMATE_DOMAIN, - SERVICE_SET_FAN_MODE, - SERVICE_SET_HVAC_MODE, - SERVICE_SET_TEMPERATURE, - HVACMode, -) -from homeassistant.components.palazzetti.const import FAN_AUTO, FAN_HIGH, FAN_SILENT -from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - -ENTITY_ID = "climate.stove" - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_palazzetti_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.palazzetti.PLATFORMS", [Platform.CLIMATE]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_async_set_data( - hass: HomeAssistant, - mock_palazzetti_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test setting climate data via service call.""" - await setup_integration(hass, mock_config_entry) - - # Set HVAC Mode: Success - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, - blocking=True, - ) - mock_palazzetti_client.set_on.assert_called_once_with(True) - mock_palazzetti_client.set_on.reset_mock() - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, - blocking=True, - ) - mock_palazzetti_client.set_on.assert_called_once_with(False) - mock_palazzetti_client.set_on.reset_mock() - - # Set HVAC Mode: Error - mock_palazzetti_client.set_on.side_effect = CommunicationError() - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, - blocking=True, - ) - - mock_palazzetti_client.set_on.side_effect = ValidationError() - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, - blocking=True, - ) - - # Set Temperature: Success - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 22}, - blocking=True, - ) - mock_palazzetti_client.set_target_temperature.assert_called_once_with(22) - mock_palazzetti_client.set_target_temperature.reset_mock() - - # Set Temperature: Error - mock_palazzetti_client.set_target_temperature.side_effect = CommunicationError() - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 22}, - blocking=True, - ) - - mock_palazzetti_client.set_target_temperature.side_effect = ValidationError() - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 22}, - blocking=True, - ) - - # Set Fan Mode: Success - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_FAN_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: FAN_SILENT}, - blocking=True, - ) - mock_palazzetti_client.set_fan_silent.assert_called_once() - mock_palazzetti_client.set_fan_silent.reset_mock() - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_FAN_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: FAN_HIGH}, - blocking=True, - ) - mock_palazzetti_client.set_fan_high.assert_called_once() - mock_palazzetti_client.set_fan_high.reset_mock() - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_FAN_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: FAN_AUTO}, - blocking=True, - ) - mock_palazzetti_client.set_fan_auto.assert_called_once() - mock_palazzetti_client.set_fan_auto.reset_mock() - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_FAN_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: "3"}, - blocking=True, - ) - mock_palazzetti_client.set_fan_speed.assert_called_once_with(3) - mock_palazzetti_client.set_fan_speed.reset_mock() - - # Set Fan Mode: Error - mock_palazzetti_client.set_fan_speed.side_effect = CommunicationError() - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_FAN_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: 3}, - blocking=True, - ) - - mock_palazzetti_client.set_fan_speed.side_effect = ValidationError() - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_FAN_MODE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: 3}, - blocking=True, - ) diff --git a/tests/components/palazzetti/test_config_flow.py b/tests/components/palazzetti/test_config_flow.py deleted file mode 100644 index 03c56c33d0c..00000000000 --- a/tests/components/palazzetti/test_config_flow.py +++ /dev/null @@ -1,140 +0,0 @@ -"""Test the Palazzetti config flow.""" - -from unittest.mock import AsyncMock - -from pypalazzetti.exceptions import CommunicationError - -from homeassistant.components import dhcp -from homeassistant.components.palazzetti.const import DOMAIN -from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER -from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_full_user_flow( - hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock -) -> None: - """Test the full user configuration flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_HOST: "192.168.1.1"}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Stove" - assert result["data"] == {CONF_HOST: "192.168.1.1"} - assert result["result"].unique_id == "11:22:33:44:55:66" - assert len(mock_palazzetti_client.connect.mock_calls) > 0 - - -async def test_invalid_host( - hass: HomeAssistant, - mock_palazzetti_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test cannot connect error.""" - - mock_palazzetti_client.connect.side_effect = CommunicationError() - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_HOST: "192.168.1.1"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - mock_palazzetti_client.connect.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_HOST: "192.168.1.1"}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_duplicate( - hass: HomeAssistant, - mock_palazzetti_client: AsyncMock, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test duplicate flow.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.1.1"}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_dhcp_flow( - hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock -) -> None: - """Test the DHCP flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - data=dhcp.DhcpServiceInfo( - hostname="connbox1234", ip="192.168.1.1", macaddress="11:22:33:44:55:66" - ), - context={"source": SOURCE_DHCP}, - ) - - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "discovery_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {}, - ) - - await hass.async_block_till_done() - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Stove" - assert result["result"].unique_id == "11:22:33:44:55:66" - - -async def test_dhcp_flow_error( - hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock -) -> None: - """Test the DHCP flow.""" - mock_palazzetti_client.connect.side_effect = CommunicationError() - - result = await hass.config_entries.flow.async_init( - DOMAIN, - data=dhcp.DhcpServiceInfo( - hostname="connbox1234", ip="192.168.1.1", macaddress="11:22:33:44:55:66" - ), - context={"source": SOURCE_DHCP}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" diff --git a/tests/components/palazzetti/test_init.py b/tests/components/palazzetti/test_init.py deleted file mode 100644 index 710144b2b7b..00000000000 --- a/tests/components/palazzetti/test_init.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Tests for the Palazzetti integration.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_load_unload_config_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_palazzetti_client: AsyncMock, -) -> None: - """Test the Palazzetti configuration entry loading/unloading.""" - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_device( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_palazzetti_client: AsyncMock, - snapshot: SnapshotAssertion, - device_registry: dr.DeviceRegistry, -) -> None: - """Test the device information.""" - await setup_integration(hass, mock_config_entry) - - device = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, "11:22:33:44:55:66")} - ) - assert device is not None - assert device == snapshot diff --git a/tests/components/panasonic_viera/test_remote.py b/tests/components/panasonic_viera/test_remote.py index 43f11c7d766..05254753d3f 100644 --- a/tests/components/panasonic_viera/test_remote.py +++ b/tests/components/panasonic_viera/test_remote.py @@ -18,7 +18,7 @@ from .conftest import MOCK_CONFIG_DATA, MOCK_DEVICE_INFO, MOCK_ENCRYPTION_DATA from tests.common import MockConfigEntry -async def setup_panasonic_viera(hass: HomeAssistant) -> None: +async def setup_panasonic_viera(hass): """Initialize integration for tests.""" mock_entry = MockConfigEntry( domain=DOMAIN, @@ -46,7 +46,7 @@ async def test_onoff(hass: HomeAssistant, mock_remote) -> None: await hass.services.async_call(REMOTE_DOMAIN, SERVICE_TURN_ON, data) await hass.async_block_till_done() - power = getattr(Keys.POWER, "value", Keys.POWER) + power = getattr(Keys.power, "value", Keys.power) assert mock_remote.send_key.call_args_list == [call(power), call(power)] diff --git a/tests/components/panel_iframe/__init__.py b/tests/components/panel_iframe/__init__.py new file mode 100644 index 00000000000..df7115d9e97 --- /dev/null +++ b/tests/components/panel_iframe/__init__.py @@ -0,0 +1 @@ +"""Tests for the panel_iframe component.""" diff --git a/tests/components/panel_iframe/test_init.py b/tests/components/panel_iframe/test_init.py new file mode 100644 index 00000000000..74e1b642df5 --- /dev/null +++ b/tests/components/panel_iframe/test_init.py @@ -0,0 +1,154 @@ +"""The tests for the panel_iframe component.""" + +from typing import Any + +import pytest + +from homeassistant.components.panel_iframe import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from tests.typing import WebSocketGenerator + +TEST_CONFIG = { + "router": { + "icon": "mdi:network-wireless", + "title": "Router", + "url": "http://192.168.1.1", + "require_admin": True, + }, + "weather": { + "icon": "mdi:weather", + "title": "Weather", + "url": "https://www.wunderground.com/us/ca/san-diego", + "require_admin": True, + }, + "api": {"icon": "mdi:weather", "title": "Api", "url": "/api"}, + "ftp": { + "icon": "mdi:weather", + "title": "FTP", + "url": "ftp://some/ftp", + }, +} + + +@pytest.mark.parametrize( + "config_to_try", + [ + {"invalid space": {"url": "https://home-assistant.io"}}, + {"router": {"url": "not-a-url"}}, + ], +) +async def test_wrong_config(hass: HomeAssistant, config_to_try) -> None: + """Test setup with wrong configuration.""" + assert not await async_setup_component( + hass, "panel_iframe", {"panel_iframe": config_to_try} + ) + + +async def test_import_config( + hass: HomeAssistant, + hass_storage: dict[str, Any], + hass_ws_client: WebSocketGenerator, +) -> None: + """Test import config.""" + client = await hass_ws_client(hass) + + assert await async_setup_component( + hass, + "panel_iframe", + {"panel_iframe": TEST_CONFIG}, + ) + + # List dashboards + await client.send_json_auto_id({"type": "lovelace/dashboards/list"}) + response = await client.receive_json() + assert response["success"] + assert response["result"] == [ + { + "icon": "mdi:network-wireless", + "id": "router", + "mode": "storage", + "require_admin": True, + "show_in_sidebar": True, + "title": "Router", + "url_path": "router", + }, + { + "icon": "mdi:weather", + "id": "weather", + "mode": "storage", + "require_admin": True, + "show_in_sidebar": True, + "title": "Weather", + "url_path": "weather", + }, + { + "icon": "mdi:weather", + "id": "api", + "mode": "storage", + "require_admin": False, + "show_in_sidebar": True, + "title": "Api", + "url_path": "api", + }, + { + "icon": "mdi:weather", + "id": "ftp", + "mode": "storage", + "require_admin": False, + "show_in_sidebar": True, + "title": "FTP", + "url_path": "ftp", + }, + ] + + for url_path in ("api", "ftp", "router", "weather"): + await client.send_json_auto_id( + {"type": "lovelace/config", "url_path": url_path} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "strategy": {"type": "iframe", "url": TEST_CONFIG[url_path]["url"]} + } + + assert hass_storage[DOMAIN]["data"] == {"migrated": True} + + +async def test_import_config_once( + hass: HomeAssistant, + hass_storage: dict[str, Any], + hass_ws_client: WebSocketGenerator, +) -> None: + """Test import config only happens once.""" + client = await hass_ws_client(hass) + + hass_storage[DOMAIN] = { + "version": 1, + "minor_version": 1, + "key": "map", + "data": {"migrated": True}, + } + + assert await async_setup_component( + hass, + "panel_iframe", + {"panel_iframe": TEST_CONFIG}, + ) + + # List dashboards + await client.send_json_auto_id({"type": "lovelace/dashboards/list"}) + response = await client.receive_json() + assert response["success"] + assert response["result"] == [] + + +async def test_create_issue_when_manually_configured( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test creating issue registry issues.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + + assert issue_registry.async_get_issue(DOMAIN, "deprecated_yaml") diff --git a/tests/components/pegel_online/snapshots/test_diagnostics.ambr b/tests/components/pegel_online/snapshots/test_diagnostics.ambr deleted file mode 100644 index 1e55805f867..00000000000 --- a/tests/components/pegel_online/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,39 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'data': dict({ - 'air_temperature': None, - 'clearance_height': None, - 'oxygen_level': None, - 'ph_value': None, - 'water_flow': dict({ - 'uom': 'm³/s', - 'value': 88.4, - }), - 'water_level': dict({ - 'uom': 'cm', - 'value': 62, - }), - 'water_speed': None, - 'water_temperature': None, - }), - 'entry': dict({ - 'data': dict({ - 'station': '70272185-xxxx-xxxx-xxxx-43bea330dcae', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'pegel_online', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': '70272185-xxxx-xxxx-xxxx-43bea330dcae', - 'version': 1, - }), - }) -# --- diff --git a/tests/components/pegel_online/test_diagnostics.py b/tests/components/pegel_online/test_diagnostics.py deleted file mode 100644 index 220f244b751..00000000000 --- a/tests/components/pegel_online/test_diagnostics.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Test pegel_online diagnostics.""" - -from unittest.mock import patch - -from syrupy import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.components.pegel_online.const import CONF_STATION, DOMAIN -from homeassistant.core import HomeAssistant - -from . import PegelOnlineMock -from .const import ( - MOCK_CONFIG_ENTRY_DATA_DRESDEN, - MOCK_STATION_DETAILS_DRESDEN, - MOCK_STATION_MEASUREMENT_DRESDEN, -) - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test config entry diagnostics.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=MOCK_CONFIG_ENTRY_DATA_DRESDEN, - unique_id=MOCK_CONFIG_ENTRY_DATA_DRESDEN[CONF_STATION], - ) - entry.add_to_hass(hass) - with patch("homeassistant.components.pegel_online.PegelOnline") as pegelonline: - pegelonline.return_value = PegelOnlineMock( - station_details=MOCK_STATION_DETAILS_DRESDEN, - station_measurements=MOCK_STATION_MEASUREMENT_DRESDEN, - ) - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/permobil/conftest.py b/tests/components/permobil/conftest.py index d3630d3f366..ed6a843b206 100644 --- a/tests/components/permobil/conftest.py +++ b/tests/components/permobil/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the MyPermobil tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from mypermobil import MyPermobil import pytest +from typing_extensions import Generator from .const import MOCK_REGION_NAME, MOCK_TOKEN, MOCK_URL diff --git a/tests/components/permobil/test_config_flow.py b/tests/components/permobil/test_config_flow.py index 7067566a74d..ea39e678459 100644 --- a/tests/components/permobil/test_config_flow.py +++ b/tests/components/permobil/test_config_flow.py @@ -284,21 +284,23 @@ async def test_config_flow_reauth_success( "homeassistant.components.permobil.config_flow.MyPermobil", return_value=my_permobil, ): - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": "reauth", "entry_id": mock_entry.entry_id}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "email_code" assert result["errors"] == {} - # request new token + # request request new token result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_CODE: reauth_code}, ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert mock_entry.data == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { CONF_EMAIL: MOCK_EMAIL, CONF_REGION: MOCK_URL, CONF_CODE: reauth_code, @@ -324,7 +326,10 @@ async def test_config_flow_reauth_fail_invalid_code( "homeassistant.components.permobil.config_flow.MyPermobil", return_value=my_permobil, ): - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": "reauth", "entry_id": mock_entry.entry_id}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "email_code" @@ -352,11 +357,16 @@ async def test_config_flow_reauth_fail_code_request( ) mock_entry.add_to_hass(hass) # test the reauth and have request_application_code fail leading to an abort + my_permobil.request_application_code.side_effect = MyPermobilAPIException + reauth_entry = hass.config_entries.async_entries(config_flow.DOMAIN)[0] with patch( "homeassistant.components.permobil.config_flow.MyPermobil", return_value=my_permobil, ): - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": "reauth", "entry_id": reauth_entry.entry_id}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "unknown" diff --git a/tests/components/persistent_notification/conftest.py b/tests/components/persistent_notification/conftest.py index 29ba5a6008a..d665c0075b3 100644 --- a/tests/components/persistent_notification/conftest.py +++ b/tests/components/persistent_notification/conftest.py @@ -3,11 +3,10 @@ import pytest import homeassistant.components.persistent_notification as pn -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) -async def setup_integration(hass: HomeAssistant) -> None: +async def setup_integration(hass): """Set up persistent notification integration.""" assert await async_setup_component(hass, pn.DOMAIN, {}) diff --git a/tests/components/person/conftest.py b/tests/components/person/conftest.py index a6dc95ccc9e..ecec42b003d 100644 --- a/tests/components/person/conftest.py +++ b/tests/components/person/conftest.py @@ -18,7 +18,7 @@ DEVICE_TRACKER_2 = "device_tracker.test_tracker_2" @pytest.fixture -def storage_collection(hass: HomeAssistant) -> person.PersonStorageCollection: +def storage_collection(hass): """Return an empty storage collection.""" id_manager = collection.IDManager() return person.PersonStorageCollection( diff --git a/tests/components/philips_js/conftest.py b/tests/components/philips_js/conftest.py index 4a79fce85a2..b6c78fe9e5e 100644 --- a/tests/components/philips_js/conftest.py +++ b/tests/components/philips_js/conftest.py @@ -1,18 +1,16 @@ """Standard setup for tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, create_autospec, patch from haphilipsjs import PhilipsTV import pytest +from typing_extensions import Generator from homeassistant.components.philips_js.const import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr from . import MOCK_CONFIG, MOCK_ENTITY_ID, MOCK_NAME, MOCK_SERIAL_NO, MOCK_SYSTEM -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, mock_device_registry @pytest.fixture @@ -29,6 +27,11 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry +@pytest.fixture(autouse=True) +async def setup_notification(hass): + """Configure notification system.""" + + @pytest.fixture(autouse=True) def mock_tv(): """Disable component actual use.""" @@ -59,7 +62,7 @@ def mock_tv(): @pytest.fixture -async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +async def mock_config_entry(hass): """Get standard player.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_CONFIG, title=MOCK_NAME, unique_id=MOCK_SERIAL_NO @@ -69,7 +72,13 @@ async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture -async def mock_entity(hass: HomeAssistant, mock_config_entry: MockConfigEntry) -> str: +def mock_device_reg(hass): + """Get standard device.""" + return mock_device_registry(hass) + + +@pytest.fixture +async def mock_entity(hass, mock_device_reg, mock_config_entry): """Get standard player.""" assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -77,13 +86,9 @@ async def mock_entity(hass: HomeAssistant, mock_config_entry: MockConfigEntry) - @pytest.fixture -def mock_device( - device_registry: dr.DeviceRegistry, - mock_entity: str, - mock_config_entry: MockConfigEntry, -) -> dr.DeviceEntry: +def mock_device(hass, mock_device_reg, mock_entity, mock_config_entry): """Get standard device.""" - return device_registry.async_get_or_create( + return mock_device_reg.async_get_or_create( config_entry_id=mock_config_entry.entry_id, identifiers={(DOMAIN, MOCK_SERIAL_NO)}, ) diff --git a/tests/components/philips_js/snapshots/test_diagnostics.ambr b/tests/components/philips_js/snapshots/test_diagnostics.ambr index 4f7a6176634..5cff47c7d62 100644 --- a/tests/components/philips_js/snapshots/test_diagnostics.ambr +++ b/tests/components/philips_js/snapshots/test_diagnostics.ambr @@ -85,8 +85,6 @@ }), }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'philips_js', 'minor_version': 1, 'options': dict({ diff --git a/tests/components/philips_js/test_config_flow.py b/tests/components/philips_js/test_config_flow.py index c08885634db..d7f539db9cf 100644 --- a/tests/components/philips_js/test_config_flow.py +++ b/tests/components/philips_js/test_config_flow.py @@ -60,7 +60,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry) -> None: async def test_reauth( - hass: HomeAssistant, mock_setup_entry, mock_config_entry: MockConfigEntry, mock_tv + hass: HomeAssistant, mock_setup_entry, mock_config_entry, mock_tv ) -> None: """Test we get the form.""" @@ -69,7 +69,15 @@ async def test_reauth( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) assert len(mock_setup_entry.mock_calls) == 1 - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -161,10 +169,6 @@ async def test_pairing(hass: HomeAssistant, mock_tv_pairable, mock_setup_entry) assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.philips_js.config.abort.pairing_failure"], -) async def test_pair_request_failed( hass: HomeAssistant, mock_tv_pairable, mock_setup_entry ) -> None: @@ -192,10 +196,6 @@ async def test_pair_request_failed( } -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.philips_js.config.abort.pairing_failure"], -) async def test_pair_grant_failed( hass: HomeAssistant, mock_tv_pairable, mock_setup_entry ) -> None: diff --git a/tests/components/philips_js/test_device_trigger.py b/tests/components/philips_js/test_device_trigger.py index 8f2e5543f1e..b9b7439d2fa 100644 --- a/tests/components/philips_js/test_device_trigger.py +++ b/tests/components/philips_js/test_device_trigger.py @@ -9,7 +9,7 @@ from homeassistant.components.philips_js.const import DOMAIN from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import async_get_device_automations +from tests.common import async_get_device_automations, async_mock_service @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -17,6 +17,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers(hass: HomeAssistant, mock_device) -> None: """Test we get the expected triggers.""" expected_triggers = [ @@ -36,11 +42,7 @@ async def test_get_triggers(hass: HomeAssistant, mock_device) -> None: async def test_if_fires_on_turn_on_request( - hass: HomeAssistant, - service_calls: list[ServiceCall], - mock_tv, - mock_entity, - mock_device, + hass: HomeAssistant, calls: list[ServiceCall], mock_tv, mock_entity, mock_device ) -> None: """Test for turn_on and turn_off triggers firing.""" @@ -78,10 +80,6 @@ async def test_if_fires_on_turn_on_request( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[0].domain == "media_player" - assert service_calls[0].service == "turn_on" - assert service_calls[1].domain == "test" - assert service_calls[1].service == "automation" - assert service_calls[1].data["some"] == mock_device.id - assert service_calls[1].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["some"] == mock_device.id + assert calls[0].data["id"] == 0 diff --git a/tests/components/philips_js/test_diagnostics.py b/tests/components/philips_js/test_diagnostics.py index d61546e52c3..cb3235b9780 100644 --- a/tests/components/philips_js/test_diagnostics.py +++ b/tests/components/philips_js/test_diagnostics.py @@ -63,4 +63,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) + assert result == snapshot(exclude=props("entry_id")) diff --git a/tests/components/pi_hole/__init__.py b/tests/components/pi_hole/__init__.py index 993f6a2571c..38231778624 100644 --- a/tests/components/pi_hole/__init__.py +++ b/tests/components/pi_hole/__init__.py @@ -33,7 +33,7 @@ ZERO_DATA = { "unique_domains": 0, } -SAMPLE_VERSIONS_WITH_UPDATES = { +SAMPLE_VERSIONS = { "core_current": "v5.5", "core_latest": "v5.6", "core_update": True, @@ -45,18 +45,6 @@ SAMPLE_VERSIONS_WITH_UPDATES = { "FTL_update": True, } -SAMPLE_VERSIONS_NO_UPDATES = { - "core_current": "v5.5", - "core_latest": "v5.5", - "core_update": False, - "web_current": "v5.7", - "web_latest": "v5.7", - "web_update": False, - "FTL_current": "v5.10", - "FTL_latest": "v5.10", - "FTL_update": False, -} - HOST = "1.2.3.4" PORT = 80 LOCATION = "location" @@ -115,9 +103,7 @@ CONFIG_ENTRY_WITHOUT_API_KEY = { SWITCH_ENTITY_ID = "switch.pi_hole" -def _create_mocked_hole( - raise_exception=False, has_versions=True, has_update=True, has_data=True -): +def _create_mocked_hole(raise_exception=False, has_versions=True, has_data=True): mocked_hole = MagicMock() type(mocked_hole).get_data = AsyncMock( side_effect=HoleError("") if raise_exception else None @@ -132,10 +118,7 @@ def _create_mocked_hole( else: mocked_hole.data = [] if has_versions: - if has_update: - mocked_hole.versions = SAMPLE_VERSIONS_WITH_UPDATES - else: - mocked_hole.versions = SAMPLE_VERSIONS_NO_UPDATES + mocked_hole.versions = SAMPLE_VERSIONS else: mocked_hole.versions = None return mocked_hole diff --git a/tests/components/pi_hole/snapshots/test_diagnostics.ambr b/tests/components/pi_hole/snapshots/test_diagnostics.ambr index 3094fcef24b..865494b5e9f 100644 --- a/tests/components/pi_hole/snapshots/test_diagnostics.ambr +++ b/tests/components/pi_hole/snapshots/test_diagnostics.ambr @@ -23,8 +23,6 @@ 'verify_ssl': True, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'pi_hole', 'entry_id': 'pi_hole_mock_entry', 'minor_version': 1, diff --git a/tests/components/pi_hole/test_config_flow.py b/tests/components/pi_hole/test_config_flow.py index d13712d6f76..326b01b9a7a 100644 --- a/tests/components/pi_hole/test_config_flow.py +++ b/tests/components/pi_hole/test_config_flow.py @@ -96,7 +96,7 @@ async def test_flow_user_without_api_key(hass: HomeAssistant) -> None: async def test_flow_user_invalid(hass: HomeAssistant) -> None: """Test user initialized flow with invalid server.""" - mocked_hole = _create_mocked_hole(raise_exception=True) + mocked_hole = _create_mocked_hole(True) with _patch_config_flow_hole(mocked_hole): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=CONFIG_FLOW_USER diff --git a/tests/components/pi_hole/test_diagnostics.py b/tests/components/pi_hole/test_diagnostics.py index 8d5a83e4622..c9fc9a0a9b8 100644 --- a/tests/components/pi_hole/test_diagnostics.py +++ b/tests/components/pi_hole/test_diagnostics.py @@ -1,7 +1,6 @@ """Test pi_hole component.""" from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.components import pi_hole from homeassistant.core import HomeAssistant @@ -29,6 +28,4 @@ async def test_diagnostics( await hass.async_block_till_done() - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( - exclude=props("created_at", "modified_at") - ) + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot diff --git a/tests/components/pi_hole/test_update.py b/tests/components/pi_hole/test_update.py index 705e9f9c08d..091b553c475 100644 --- a/tests/components/pi_hole/test_update.py +++ b/tests/components/pi_hole/test_update.py @@ -1,7 +1,7 @@ """Test pi_hole component.""" from homeassistant.components import pi_hole -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.const import STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant from . import CONFIG_DATA_DEFAULTS, _create_mocked_hole, _patch_init_hole @@ -80,44 +80,3 @@ async def test_update_no_versions(hass: HomeAssistant) -> None: assert state.attributes["installed_version"] is None assert state.attributes["latest_version"] is None assert state.attributes["release_url"] is None - - -async def test_update_no_updates(hass: HomeAssistant) -> None: - """Tests update entity when no latest data available.""" - mocked_hole = _create_mocked_hole(has_versions=True, has_update=False) - entry = MockConfigEntry(domain=pi_hole.DOMAIN, data=CONFIG_DATA_DEFAULTS) - entry.add_to_hass(hass) - with _patch_init_hole(mocked_hole): - assert await hass.config_entries.async_setup(entry.entry_id) - - await hass.async_block_till_done() - - state = hass.states.get("update.pi_hole_core_update_available") - assert state.name == "Pi-Hole Core update available" - assert state.state == STATE_OFF - assert state.attributes["installed_version"] == "v5.5" - assert state.attributes["latest_version"] == "v5.5" - assert ( - state.attributes["release_url"] - == "https://github.com/pi-hole/pi-hole/releases/tag/v5.5" - ) - - state = hass.states.get("update.pi_hole_ftl_update_available") - assert state.name == "Pi-Hole FTL update available" - assert state.state == STATE_OFF - assert state.attributes["installed_version"] == "v5.10" - assert state.attributes["latest_version"] == "v5.10" - assert ( - state.attributes["release_url"] - == "https://github.com/pi-hole/FTL/releases/tag/v5.10" - ) - - state = hass.states.get("update.pi_hole_web_update_available") - assert state.name == "Pi-Hole Web update available" - assert state.state == STATE_OFF - assert state.attributes["installed_version"] == "v5.7" - assert state.attributes["latest_version"] == "v5.7" - assert ( - state.attributes["release_url"] - == "https://github.com/pi-hole/AdminLTE/releases/tag/v5.7" - ) diff --git a/tests/components/picnic/test_config_flow.py b/tests/components/picnic/test_config_flow.py index 8d668b28c16..9ba18dac9a9 100644 --- a/tests/components/picnic/test_config_flow.py +++ b/tests/components/picnic/test_config_flow.py @@ -170,15 +170,16 @@ async def test_step_reauth(hass: HomeAssistant, picnic_api) -> None: # Create a mocked config entry conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - entry = MockConfigEntry( + MockConfigEntry( domain=DOMAIN, unique_id=picnic_api().get_user()["user_id"], data=conf, - ) - entry.add_to_hass(hass) + ).add_to_hass(hass) # Init a re-auth flow - result_init = await entry.start_reauth_flow(hass) + result_init = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf + ) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" @@ -209,15 +210,16 @@ async def test_step_reauth_failed(hass: HomeAssistant) -> None: user_id = "f29-2a6-o32n" conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - entry = MockConfigEntry( + MockConfigEntry( domain=DOMAIN, unique_id=user_id, data=conf, - ) - entry.add_to_hass(hass) + ).add_to_hass(hass) # Init a re-auth flow - result_init = await entry.start_reauth_flow(hass) + result_init = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf + ) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" @@ -247,15 +249,16 @@ async def test_step_reauth_different_account(hass: HomeAssistant, picnic_api) -> # Create a mocked config entry, unique_id should be different that the user id in the api response conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - entry = MockConfigEntry( + MockConfigEntry( domain=DOMAIN, unique_id="3fpawh-ues-af3ho", data=conf, - ) - entry.add_to_hass(hass) + ).add_to_hass(hass) # Init a re-auth flow - result_init = await entry.start_reauth_flow(hass) + result_init = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf + ) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" diff --git a/tests/components/picnic/test_todo.py b/tests/components/picnic/test_todo.py index 3a6e09f7ac0..cdd30967058 100644 --- a/tests/components/picnic/test_todo.py +++ b/tests/components/picnic/test_todo.py @@ -5,8 +5,7 @@ from unittest.mock import MagicMock, Mock import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import ATTR_ITEM, DOMAIN as TODO_DOMAIN, TodoServices -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.components.todo import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -91,10 +90,10 @@ async def test_create_todo_list_item( mock_picnic_api.add_product = Mock() await hass.services.async_call( - TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Melk"}, - target={ATTR_ENTITY_ID: ENTITY_ID}, + DOMAIN, + "add_item", + {"item": "Melk"}, + target={"entity_id": ENTITY_ID}, blocking=True, ) @@ -119,9 +118,9 @@ async def test_create_todo_list_item_not_found( with pytest.raises(ServiceValidationError): await hass.services.async_call( - TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Melk"}, - target={ATTR_ENTITY_ID: ENTITY_ID}, + DOMAIN, + "add_item", + {"item": "Melk"}, + target={"entity_id": ENTITY_ID}, blocking=True, ) diff --git a/tests/components/pilight/test_init.py b/tests/components/pilight/test_init.py index dfc62d30619..c48135f59eb 100644 --- a/tests/components/pilight/test_init.py +++ b/tests/components/pilight/test_init.py @@ -40,7 +40,7 @@ class PilightDaemonSim: "message": {"id": 0, "unit": 0, "off": 1}, } - def __init__(self, host, port) -> None: + def __init__(self, host, port): """Init pilight client, ignore parameters.""" def send_code(self, call): diff --git a/tests/components/pilight/test_sensor.py b/tests/components/pilight/test_sensor.py index e960e46b50a..97e031736e5 100644 --- a/tests/components/pilight/test_sensor.py +++ b/tests/components/pilight/test_sensor.py @@ -1,7 +1,6 @@ """The tests for the Pilight sensor platform.""" import logging -from typing import Any import pytest @@ -13,14 +12,12 @@ from tests.common import assert_setup_component, mock_component @pytest.fixture(autouse=True) -def setup_comp(hass: HomeAssistant) -> None: +def setup_comp(hass): """Initialize components.""" mock_component(hass, "pilight") -def fire_pilight_message( - hass: HomeAssistant, protocol: str, data: dict[str, Any] -) -> None: +def fire_pilight_message(hass, protocol, data): """Fire the fake Pilight message.""" message = {pilight.CONF_PROTOCOL: protocol} message.update(data) diff --git a/tests/components/ping/snapshots/test_binary_sensor.ambr b/tests/components/ping/snapshots/test_binary_sensor.ambr index 0196c2cbbfb..98ea9a8a847 100644 --- a/tests/components/ping/snapshots/test_binary_sensor.ambr +++ b/tests/components/ping/snapshots/test_binary_sensor.ambr @@ -1,4 +1,64 @@ # serializer version: 1 +# name: test_sensor + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.10_10_10_10', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': '10.10.10.10', + 'platform': 'ping', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor.1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': '10.10.10.10', + 'round_trip_time_avg': 4.333, + 'round_trip_time_max': 10, + 'round_trip_time_mdev': '', + 'round_trip_time_min': 1, + }), + 'context': , + 'entity_id': 'binary_sensor.10_10_10_10', + 'last_changed': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor.2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': '10.10.10.10', + }), + 'context': , + 'entity_id': 'binary_sensor.10_10_10_10', + 'last_changed': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_setup_and_update EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -36,6 +96,10 @@ 'attributes': ReadOnlyDict({ 'device_class': 'connectivity', 'friendly_name': '10.10.10.10', + 'round_trip_time_avg': 4.8, + 'round_trip_time_max': 10, + 'round_trip_time_mdev': None, + 'round_trip_time_min': 1, }), 'context': , 'entity_id': 'binary_sensor.10_10_10_10', @@ -50,6 +114,10 @@ 'attributes': ReadOnlyDict({ 'device_class': 'connectivity', 'friendly_name': '10.10.10.10', + 'round_trip_time_avg': None, + 'round_trip_time_max': None, + 'round_trip_time_mdev': None, + 'round_trip_time_min': None, }), 'context': , 'entity_id': 'binary_sensor.10_10_10_10', diff --git a/tests/components/ping/test_device_tracker.py b/tests/components/ping/test_device_tracker.py index 4a5d6ba94ed..5aa425226b3 100644 --- a/tests/components/ping/test_device_tracker.py +++ b/tests/components/ping/test_device_tracker.py @@ -1,12 +1,12 @@ """Test the binary sensor platform of ping.""" -from collections.abc import Generator from datetime import timedelta from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory from icmplib import Host import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er diff --git a/tests/components/plaato/test_config_flow.py b/tests/components/plaato/test_config_flow.py index ceadab7f832..efda354f20d 100644 --- a/tests/components/plaato/test_config_flow.py +++ b/tests/components/plaato/test_config_flow.py @@ -64,8 +64,8 @@ async def test_show_config_form_device_type_airlock(hass: HomeAssistant) -> None assert result["type"] is FlowResultType.FORM assert result["step_id"] == "api_method" - assert result["data_schema"].schema.get(CONF_TOKEN) is str - assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is bool + assert result["data_schema"].schema.get(CONF_TOKEN) == str + assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass: HomeAssistant) -> None: @@ -78,7 +78,7 @@ async def test_show_config_form_device_type_keg(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "api_method" - assert result["data_schema"].schema.get(CONF_TOKEN) is str + assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None diff --git a/tests/components/plex/conftest.py b/tests/components/plex/conftest.py index 53c032cb08b..a061d9c1105 100644 --- a/tests/components/plex/conftest.py +++ b/tests/components/plex/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Plex tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest import requests_mock +from typing_extensions import Generator from homeassistant.components.plex.const import DOMAIN, PLEX_SERVER_CONFIG, SERVERS from homeassistant.const import CONF_URL diff --git a/tests/components/plex/helpers.py b/tests/components/plex/helpers.py index 434c31996e4..4828b972d9d 100644 --- a/tests/components/plex/helpers.py +++ b/tests/components/plex/helpers.py @@ -5,7 +5,6 @@ from typing import Any from plexwebsocket import SIGNAL_CONNECTION_STATE, STATE_CONNECTED -from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import UNDEFINED, UndefinedType import homeassistant.util.dt as dt_util @@ -40,7 +39,7 @@ def trigger_plex_update( callback(msgtype, UPDATE_PAYLOAD if payload is UNDEFINED else payload, None) -async def wait_for_debouncer(hass: HomeAssistant) -> None: +async def wait_for_debouncer(hass): """Move time forward to wait for sensor debouncer.""" next_update = dt_util.utcnow() + timedelta(seconds=3) async_fire_time_changed(hass, next_update) diff --git a/tests/components/plex/mock_classes.py b/tests/components/plex/mock_classes.py index 92844f755d6..c6f1aeda9b7 100644 --- a/tests/components/plex/mock_classes.py +++ b/tests/components/plex/mock_classes.py @@ -67,7 +67,7 @@ GDM_CLIENT_PAYLOAD = [ class MockGDM: """Mock a GDM instance.""" - def __init__(self, disabled=False) -> None: + def __init__(self, disabled=False): """Initialize the object.""" self.entries = [] self.disabled = disabled diff --git a/tests/components/plex/test_config_flow.py b/tests/components/plex/test_config_flow.py index c4ec108bb6b..08733a7dd17 100644 --- a/tests/components/plex/test_config_flow.py +++ b/tests/components/plex/test_config_flow.py @@ -26,6 +26,7 @@ from homeassistant.components.plex.const import ( ) from homeassistant.config_entries import ( SOURCE_INTEGRATION_DISCOVERY, + SOURCE_REAUTH, SOURCE_USER, ConfigEntryState, ) @@ -536,7 +537,7 @@ async def test_manual_config(hass: HomeAssistant, mock_plex_calls) -> None: class WrongCertValidaitionException(requests.exceptions.SSLError): """Mock the exception showing an unmatched error.""" - def __init__(self) -> None: # pylint: disable=super-init-not-called + def __init__(self): # pylint: disable=super-init-not-called self.__context__ = ssl.SSLCertVerificationError( "some random message that doesn't match" ) @@ -743,7 +744,11 @@ async def test_reauth( """Test setup and reauthorization of a Plex token.""" entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH}, + data=entry.data, + ) flow_id = result["flow_id"] with ( @@ -790,7 +795,11 @@ async def test_reauth_multiple_servers_available( entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH}, + data=entry.data, + ) flow_id = result["flow_id"] diff --git a/tests/components/plex/test_init.py b/tests/components/plex/test_init.py index 490091998ff..15af78faf65 100644 --- a/tests/components/plex/test_init.py +++ b/tests/components/plex/test_init.py @@ -209,7 +209,7 @@ async def test_setup_when_certificate_changed( class WrongCertHostnameException(requests.exceptions.SSLError): """Mock the exception showing a mismatched hostname.""" - def __init__(self) -> None: # pylint: disable=super-init-not-called + def __init__(self): # pylint: disable=super-init-not-called self.__context__ = ssl.SSLCertVerificationError( f"hostname '{old_domain}' doesn't match" ) diff --git a/tests/components/plex/test_media_search.py b/tests/components/plex/test_media_search.py index 04d91e8825c..8219cbe27b6 100644 --- a/tests/components/plex/test_media_search.py +++ b/tests/components/plex/test_media_search.py @@ -57,31 +57,6 @@ async def test_media_lookups( ) assert "Media for key 123 not found" in str(excinfo.value) - # Search with a different specified username - with ( - patch( - "plexapi.library.LibrarySection.search", - __qualname__="search", - ) as search, - patch( - "plexapi.myplex.MyPlexAccount.user", - __qualname__="user", - ) as plex_account_user, - ): - plex_account_user.return_value.get_token.return_value = "token" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: media_player_id, - ATTR_MEDIA_CONTENT_TYPE: MediaType.EPISODE, - ATTR_MEDIA_CONTENT_ID: '{"library_name": "TV Shows", "show_name": "TV Show", "username": "Kids"}', - }, - True, - ) - search.assert_called_with(**{"show.title": "TV Show", "libtype": "show"}) - plex_account_user.assert_called_with("Kids") - # TV show searches with pytest.raises(MediaNotFound) as excinfo: await hass.services.async_call( diff --git a/tests/components/plex/test_playback.py b/tests/components/plex/test_playback.py index c4206bd5f3e..183a779c940 100644 --- a/tests/components/plex/test_playback.py +++ b/tests/components/plex/test_playback.py @@ -28,7 +28,7 @@ class MockPlexMedia: viewOffset = 333 _server = Mock(_baseurl=PLEX_DIRECT_URL) - def __init__(self, title, mediatype) -> None: + def __init__(self, title, mediatype): """Initialize the instance.""" self.listType = mediatype self.title = title diff --git a/tests/components/plex/test_update.py b/tests/components/plex/test_update.py index 7ad2481a726..942162665af 100644 --- a/tests/components/plex/test_update.py +++ b/tests/components/plex/test_update.py @@ -9,8 +9,7 @@ from homeassistant.components.update import ( SERVICE_INSTALL, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.core import HomeAssistant, HomeAssistantError from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry, async_fire_time_changed diff --git a/tests/components/plugwise/conftest.py b/tests/components/plugwise/conftest.py index f18c96d36c5..83826a0a543 100644 --- a/tests/components/plugwise/conftest.py +++ b/tests/components/plugwise/conftest.py @@ -2,14 +2,13 @@ from __future__ import annotations -from collections.abc import Generator import json from typing import Any from unittest.mock import AsyncMock, MagicMock, patch -from packaging.version import Version from plugwise import PlugwiseData import pytest +from typing_extensions import Generator from homeassistant.components.plugwise.const import DOMAIN from homeassistant.const import ( @@ -66,16 +65,15 @@ def mock_smile_config_flow() -> Generator[MagicMock]: smile = smile_mock.return_value smile.smile_hostname = "smile12345" smile.smile_model = "Test Model" - smile.smile_model_id = "Test Model ID" smile.smile_name = "Test Smile Name" - smile.connect.return_value = Version("4.3.2") + smile.connect.return_value = True yield smile @pytest.fixture def mock_smile_adam() -> Generator[MagicMock]: """Create a Mock Adam environment for testing exceptions.""" - chosen_env = "m_adam_multiple_devices_per_zone" + chosen_env = "adam_multiple_devices_per_zone" with patch( "homeassistant.components.plugwise.coordinator.Smile", autospec=True @@ -88,9 +86,8 @@ def mock_smile_adam() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" - smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = Version("3.0.15") + smile.connect.return_value = True all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -115,9 +112,8 @@ def mock_smile_adam_2() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" - smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = Version("3.6.4") + smile.connect.return_value = True all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -142,9 +138,8 @@ def mock_smile_adam_3() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" - smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = Version("3.6.4") + smile.connect.return_value = True all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -169,9 +164,8 @@ def mock_smile_adam_4() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" - smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = Version("3.2.8") + smile.connect.return_value = True all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -195,9 +189,8 @@ def mock_smile_anna() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" - smile.smile_model_id = "smile_thermo" smile.smile_name = "Smile Anna" - smile.connect.return_value = Version("4.0.15") + smile.connect.return_value = True all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -221,9 +214,8 @@ def mock_smile_anna_2() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" - smile.smile_model_id = "smile_thermo" smile.smile_name = "Smile Anna" - smile.connect.return_value = Version("4.0.15") + smile.connect.return_value = True all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -247,9 +239,8 @@ def mock_smile_anna_3() -> Generator[MagicMock]: smile.smile_type = "thermostat" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" - smile.smile_model_id = "smile_thermo" smile.smile_name = "Smile Anna" - smile.connect.return_value = Version("4.0.15") + smile.connect.return_value = True all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -273,9 +264,8 @@ def mock_smile_p1() -> Generator[MagicMock]: smile.smile_type = "power" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" - smile.smile_model_id = "smile" smile.smile_name = "Smile P1" - smile.connect.return_value = Version("4.4.2") + smile.connect.return_value = True all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -299,35 +289,8 @@ def mock_smile_p1_2() -> Generator[MagicMock]: smile.smile_type = "power" smile.smile_hostname = "smile98765" smile.smile_model = "Gateway" - smile.smile_model_id = "smile" smile.smile_name = "Smile P1" - smile.connect.return_value = Version("4.4.2") - all_data = _read_json(chosen_env, "all_data") - smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] - ) - - yield smile - - -@pytest.fixture -def mock_smile_legacy_anna() -> Generator[MagicMock]: - """Create a Mock legacy Anna environment for testing exceptions.""" - chosen_env = "legacy_anna" - with patch( - "homeassistant.components.plugwise.coordinator.Smile", autospec=True - ) as smile_mock: - smile = smile_mock.return_value - - smile.gateway_id = "0000aaaa0000aaaa0000aaaa0000aa00" - smile.heater_id = "04e4cbfe7f4340f090f85ec3b9e6a950" - smile.smile_version = "1.8.22" - smile.smile_type = "thermostat" - smile.smile_hostname = "smile98765" - smile.smile_model = "Gateway" - smile.smile_model_id = None - smile.smile_name = "Smile Anna" - smile.connect.return_value = Version("1.8.22") + smile.connect.return_value = True all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -351,9 +314,8 @@ def mock_stretch() -> Generator[MagicMock]: smile.smile_type = "stretch" smile.smile_hostname = "stretch98765" smile.smile_model = "Gateway" - smile.smile_model_id = None smile.smile_name = "Stretch" - smile.connect.return_value = Version("3.1.11") + smile.connect.return_value = True all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] diff --git a/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json b/tests/components/plugwise/fixtures/adam_multiple_devices_per_zone/all_data.json similarity index 91% rename from tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json rename to tests/components/plugwise/fixtures/adam_multiple_devices_per_zone/all_data.json index a182b1ac8dd..9c17df5072d 100644 --- a/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json +++ b/tests/components/plugwise/fixtures/adam_multiple_devices_per_zone/all_data.json @@ -2,11 +2,10 @@ "devices": { "02cf28bfec924855854c544690a609ef": { "available": true, - "dev_class": "vcr_plug", + "dev_class": "vcr", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", - "model_id": "160-01", "name": "NVR", "sensors": { "electricity_consumed": 34.0, @@ -23,11 +22,10 @@ }, "21f2b542c49845e6bb416884c55778d6": { "available": true, - "dev_class": "game_console_plug", + "dev_class": "game_console", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", - "model_id": "160-01", "name": "Playstation Smart Plug", "sensors": { "electricity_consumed": 84.1, @@ -44,11 +42,10 @@ }, "4a810418d5394b3f82727340b91ba740": { "available": true, - "dev_class": "router_plug", + "dev_class": "router", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", - "model_id": "160-01", "name": "USG Smart Plug", "sensors": { "electricity_consumed": 8.5, @@ -65,11 +62,10 @@ }, "675416a629f343c495449970e2ca37b5": { "available": true, - "dev_class": "router_plug", + "dev_class": "router", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", - "model_id": "160-01", "name": "Ziggo Modem", "sensors": { "electricity_consumed": 12.2, @@ -86,15 +82,11 @@ }, "680423ff840043738f42cc7f1ff97a36": { "available": true, - "binary_sensors": { - "low_battery": false - }, "dev_class": "thermo_sensor", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "08963fec7c53423ca5680aa4cb502c63", "model": "Tom/Floor", - "model_id": "106-03", "name": "Thermostatic Radiator Badkamer", "sensors": { "battery": 51, @@ -123,16 +115,12 @@ "CV Jessie", "off" ], - "binary_sensors": { - "low_battery": false - }, "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "82fa13f017d240daa0d0ea1775420f24", "mode": "auto", "model": "Lisa", - "model_id": "158-01", "name": "Zone Thermostat Jessie", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "select_schedule": "CV Jessie", @@ -158,11 +146,10 @@ }, "78d1126fc4c743db81b61c20e88342a7": { "available": true, - "dev_class": "central_heating_pump_plug", + "dev_class": "central_heating_pump", "firmware": "2019-06-21T02:00:00+02:00", "location": "c50f167537524366a5af7aa3942feb1e", "model": "Plug", - "model_id": "160-01", "name": "CV Pomp", "sensors": { "electricity_consumed": 35.6, @@ -192,11 +179,10 @@ }, "a28f588dc4a049a483fd03a30361ad3a": { "available": true, - "dev_class": "settop_plug", + "dev_class": "settop", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", - "model_id": "160-01", "name": "Fibaro HC2", "sensors": { "electricity_consumed": 12.5, @@ -213,15 +199,11 @@ }, "a2c3583e0a6349358998b760cea82d2a": { "available": true, - "binary_sensors": { - "low_battery": false - }, "dev_class": "thermo_sensor", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "12493538af164a409c6a1c79e38afe1c", "model": "Tom/Floor", - "model_id": "106-03", "name": "Bios Cv Thermostatic Radiator ", "sensors": { "battery": 62, @@ -246,7 +228,6 @@ "hardware": "1", "location": "c50f167537524366a5af7aa3942feb1e", "model": "Tom/Floor", - "model_id": "106-03", "name": "Floor kraan", "sensors": { "setpoint": 21.5, @@ -274,16 +255,12 @@ "CV Jessie", "off" ], - "binary_sensors": { - "low_battery": false - }, "dev_class": "zone_thermostat", "firmware": "2016-08-02T02:00:00+02:00", "hardware": "255", "location": "c50f167537524366a5af7aa3942feb1e", "mode": "auto", "model": "Lisa", - "model_id": "158-01", "name": "Zone Lisa WK", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "select_schedule": "GF7 Woonkamer", @@ -309,11 +286,10 @@ }, "cd0ddb54ef694e11ac18ed1cbce5dbbd": { "available": true, - "dev_class": "vcr_plug", + "dev_class": "vcr", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", - "model_id": "160-01", "name": "NAS", "sensors": { "electricity_consumed": 16.5, @@ -330,15 +306,11 @@ }, "d3da73bde12a47d5a6b8f9dad971f2ec": { "available": true, - "binary_sensors": { - "low_battery": false - }, "dev_class": "thermo_sensor", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "82fa13f017d240daa0d0ea1775420f24", "model": "Tom/Floor", - "model_id": "106-03", "name": "Thermostatic Radiator Jessie", "sensors": { "battery": 62, @@ -367,16 +339,12 @@ "CV Jessie", "off" ], - "binary_sensors": { - "low_battery": false - }, "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "12493538af164a409c6a1c79e38afe1c", "mode": "heat", "model": "Lisa", - "model_id": "158-01", "name": "Zone Lisa Bios", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "select_schedule": "off", @@ -403,18 +371,23 @@ "e7693eb9582644e5b865dba8d4447cf1": { "active_preset": "no_frost", "available": true, - "binary_sensors": { - "low_battery": false - }, + "available_schedules": [ + "CV Roan", + "Bios Schema met Film Avond", + "GF7 Woonkamer", + "Badkamer Schema", + "CV Jessie", + "off" + ], "dev_class": "thermostatic_radiator_valve", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "446ac08dd04d4eff8ac57489757b7314", "mode": "heat", "model": "Tom/Floor", - "model_id": "106-03", "name": "CV Kraan Garage", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "select_schedule": "off", "sensors": { "battery": 68, "setpoint": 5.5, @@ -448,16 +421,12 @@ "CV Jessie", "off" ], - "binary_sensors": { - "low_battery": false - }, "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "08963fec7c53423ca5680aa4cb502c63", "mode": "auto", "model": "Lisa", - "model_id": "158-01", "name": "Zone Thermostat Badkamer", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "select_schedule": "Badkamer Schema", @@ -491,7 +460,6 @@ "location": "1f9dcf83fd4e4b66b72ff787957bfe5d", "mac_address": "012345670001", "model": "Gateway", - "model_id": "smile_open_therm", "name": "Adam", "select_regulation_mode": "heating", "sensors": { @@ -505,7 +473,7 @@ "cooling_present": false, "gateway_id": "fe799307f1624099878210aa0b9f1475", "heater_id": "90986d591dcd426cae3ec3e8111ff730", - "item_count": 340, + "item_count": 315, "notifications": { "af82e4ccf9c548528166d38e560662a4": { "warning": "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device." diff --git a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json index b767f5531f2..5088281404a 100644 --- a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json +++ b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json @@ -10,7 +10,6 @@ "location": "a57efe5f145f498c9be62a9b63626fbf", "mac_address": "012345670001", "model": "Gateway", - "model_id": "smile_thermo", "name": "Smile Anna", "sensors": { "outdoor_temperature": 20.2 @@ -98,7 +97,7 @@ "cooling_present": true, "gateway_id": "015ae9ea3f964e668e490fa39da3870b", "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", - "item_count": 67, + "item_count": 66, "notifications": {}, "reboot": true, "smile_name": "Smile Anna" diff --git a/tests/components/plugwise/fixtures/legacy_anna/all_data.json b/tests/components/plugwise/fixtures/legacy_anna/all_data.json deleted file mode 100644 index 1eca4e285cc..00000000000 --- a/tests/components/plugwise/fixtures/legacy_anna/all_data.json +++ /dev/null @@ -1,68 +0,0 @@ -{ - "devices": { - "0000aaaa0000aaaa0000aaaa0000aa00": { - "dev_class": "gateway", - "firmware": "1.8.22", - "location": "0000aaaa0000aaaa0000aaaa0000aa00", - "mac_address": "01:23:45:67:89:AB", - "model": "Gateway", - "name": "Smile Anna", - "vendor": "Plugwise" - }, - "04e4cbfe7f4340f090f85ec3b9e6a950": { - "binary_sensors": { - "flame_state": true, - "heating_state": true - }, - "dev_class": "heater_central", - "location": "0000aaaa0000aaaa0000aaaa0000aa00", - "maximum_boiler_temperature": { - "lower_bound": 50.0, - "resolution": 1.0, - "setpoint": 50.0, - "upper_bound": 90.0 - }, - "model": "Generic heater", - "name": "OpenTherm", - "sensors": { - "dhw_temperature": 51.2, - "intended_boiler_temperature": 17.0, - "modulation_level": 0.0, - "return_temperature": 21.7, - "water_pressure": 1.2, - "water_temperature": 23.6 - }, - "vendor": "Bosch Thermotechniek B.V." - }, - "0d266432d64443e283b5d708ae98b455": { - "active_preset": "home", - "dev_class": "thermostat", - "firmware": "2017-03-13T11:54:58+01:00", - "hardware": "6539-1301-500", - "location": "0000aaaa0000aaaa0000aaaa0000aa00", - "mode": "heat", - "model": "ThermoTouch", - "name": "Anna", - "preset_modes": ["away", "vacation", "asleep", "home", "no_frost"], - "sensors": { - "illuminance": 151, - "setpoint": 20.5, - "temperature": 20.4 - }, - "thermostat": { - "lower_bound": 4.0, - "resolution": 0.1, - "setpoint": 20.5, - "upper_bound": 30.0 - }, - "vendor": "Plugwise" - } - }, - "gateway": { - "cooling_present": false, - "gateway_id": "0000aaaa0000aaaa0000aaaa0000aa00", - "heater_id": "04e4cbfe7f4340f090f85ec3b9e6a950", - "item_count": 41, - "smile_name": "Smile Anna" - } -} diff --git a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json index 166b13b84ff..759d0094dbb 100644 --- a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json @@ -28,15 +28,11 @@ }, "1772a4ea304041adb83f357b751341ff": { "available": true, - "binary_sensors": { - "low_battery": false - }, "dev_class": "thermo_sensor", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "f871b8c4d63549319221e294e4f88074", "model": "Tom/Floor", - "model_id": "106-03", "name": "Tom Badkamer", "sensors": { "battery": 99, @@ -68,7 +64,6 @@ "location": "f2bf9048bef64cc5b6d5110154e33c81", "mode": "cool", "model": "ThermoTouch", - "model_id": "143.1", "name": "Anna", "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], "select_schedule": "off", @@ -95,7 +90,6 @@ "location": "bc93488efab249e5bc54fd7e175a6f91", "mac_address": "012345679891", "model": "Gateway", - "model_id": "smile_open_therm", "name": "Adam", "regulation_modes": [ "bleeding_hot", @@ -122,9 +116,6 @@ "Weekschema", "off" ], - "binary_sensors": { - "low_battery": true - }, "control_state": "preheating", "dev_class": "zone_thermostat", "firmware": "2016-10-10T02:00:00+02:00", @@ -132,12 +123,11 @@ "location": "f871b8c4d63549319221e294e4f88074", "mode": "auto", "model": "Lisa", - "model_id": "158-01", "name": "Lisa Badkamer", "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], "select_schedule": "Badkamer", "sensors": { - "battery": 14, + "battery": 38, "setpoint": 23.5, "temperature": 23.9 }, @@ -173,7 +163,7 @@ "cooling_present": true, "gateway_id": "da224107914542988a88561b4452b0f6", "heater_id": "056ee145a816487eaa69243c3280f8bf", - "item_count": 157, + "item_count": 147, "notifications": {}, "reboot": true, "smile_name": "Adam" diff --git a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json index 61935f1306a..e2c23df42d6 100644 --- a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json @@ -33,15 +33,11 @@ }, "1772a4ea304041adb83f357b751341ff": { "available": true, - "binary_sensors": { - "low_battery": false - }, "dev_class": "thermo_sensor", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "f871b8c4d63549319221e294e4f88074", "model": "Tom/Floor", - "model_id": "106-03", "name": "Tom Badkamer", "sensors": { "battery": 99, @@ -73,7 +69,6 @@ "location": "f2bf9048bef64cc5b6d5110154e33c81", "mode": "heat", "model": "ThermoTouch", - "model_id": "143.1", "name": "Anna", "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], "select_schedule": "off", @@ -100,7 +95,6 @@ "location": "bc93488efab249e5bc54fd7e175a6f91", "mac_address": "012345679891", "model": "Gateway", - "model_id": "smile_open_therm", "name": "Adam", "regulation_modes": ["bleeding_hot", "bleeding_cold", "off", "heating"], "select_gateway_mode": "full", @@ -121,9 +115,6 @@ "Weekschema", "off" ], - "binary_sensors": { - "low_battery": true - }, "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-10T02:00:00+02:00", @@ -131,12 +122,11 @@ "location": "f871b8c4d63549319221e294e4f88074", "mode": "auto", "model": "Lisa", - "model_id": "158-01", "name": "Lisa Badkamer", "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], "select_schedule": "Badkamer", "sensors": { - "battery": 14, + "battery": 38, "setpoint": 15.0, "temperature": 17.9 }, @@ -172,7 +162,7 @@ "cooling_present": false, "gateway_id": "da224107914542988a88561b4452b0f6", "heater_id": "056ee145a816487eaa69243c3280f8bf", - "item_count": 157, + "item_count": 147, "notifications": {}, "reboot": true, "smile_name": "Adam" diff --git a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json index ec2095648b8..7888d777804 100644 --- a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json @@ -3,9 +3,6 @@ "1346fbd8498d4dbcab7e18d51b771f3d": { "active_preset": "no_frost", "available": true, - "binary_sensors": { - "low_battery": false - }, "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", @@ -13,7 +10,6 @@ "location": "06aecb3d00354375924f50c47af36bd2", "mode": "off", "model": "Lisa", - "model_id": "158-01", "name": "Slaapkamer", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { @@ -43,7 +39,6 @@ "hardware": "1", "location": "d58fec52899f4f1c92e4f8fad6d8c48c", "model": "Tom/Floor", - "model_id": "106-03", "name": "Tom Logeerkamer", "sensors": { "setpoint": 13.0, @@ -67,7 +62,6 @@ "hardware": "1", "location": "06aecb3d00354375924f50c47af36bd2", "model": "Tom/Floor", - "model_id": "106-03", "name": "Tom Slaapkamer", "sensors": { "setpoint": 13.0, @@ -86,10 +80,9 @@ }, "457ce8414de24596a2d5e7dbc9c7682f": { "available": true, - "dev_class": "zz_misc_plug", + "dev_class": "zz_misc", "location": "9e4433a9d69f40b3aefd15e74395eaec", - "model": "Aqara Smart Plug", - "model_id": "lumi.plug.maeu01", + "model": "lumi.plug.maeu01", "name": "Plug", "sensors": { "electricity_consumed_interval": 0.0 @@ -104,9 +97,6 @@ "6f3e9d7084214c21b9dfa46f6eeb8700": { "active_preset": "home", "available": true, - "binary_sensors": { - "low_battery": false - }, "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", @@ -114,7 +104,6 @@ "location": "d27aede973b54be484f6842d1b2802ad", "mode": "heat", "model": "Lisa", - "model_id": "158-01", "name": "Kinderkamer", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { @@ -144,7 +133,6 @@ "hardware": "1", "location": "13228dab8ce04617af318a2888b3c548", "model": "Tom/Floor", - "model_id": "106-03", "name": "Tom Woonkamer", "sensors": { "setpoint": 9.0, @@ -164,9 +152,6 @@ "a6abc6a129ee499c88a4d420cc413b47": { "active_preset": "home", "available": true, - "binary_sensors": { - "low_battery": false - }, "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", @@ -174,7 +159,6 @@ "location": "d58fec52899f4f1c92e4f8fad6d8c48c", "mode": "heat", "model": "Lisa", - "model_id": "158-01", "name": "Logeerkamer", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { @@ -208,7 +192,6 @@ "location": "9e4433a9d69f40b3aefd15e74395eaec", "mac_address": "012345670001", "model": "Gateway", - "model_id": "smile_open_therm", "name": "Adam", "regulation_modes": ["heating", "off", "bleeding_cold", "bleeding_hot"], "select_gateway_mode": "full", @@ -226,7 +209,6 @@ "hardware": "1", "location": "d27aede973b54be484f6842d1b2802ad", "model": "Tom/Floor", - "model_id": "106-03", "name": "Tom Kinderkamer", "sensors": { "setpoint": 13.0, @@ -264,8 +246,7 @@ "setpoint": 90.0, "upper_bound": 90.0 }, - "model": "Generic heater", - "model_id": "10.20", + "model": "10.20", "name": "OpenTherm", "sensors": { "intended_boiler_temperature": 0.0, @@ -282,9 +263,6 @@ "f61f1a2535f54f52ad006a3d18e459ca": { "active_preset": "home", "available": true, - "binary_sensors": { - "low_battery": false - }, "control_state": "off", "dev_class": "zone_thermometer", "firmware": "2020-09-01T02:00:00+02:00", @@ -292,7 +270,6 @@ "location": "13228dab8ce04617af318a2888b3c548", "mode": "heat", "model": "Jip", - "model_id": "168-01", "name": "Woonkamer", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { @@ -321,7 +298,7 @@ "cooling_present": false, "gateway_id": "b5c2386c6f6342669e50fe49dd05b188", "heater_id": "e4684553153b44afbef2200885f379dc", - "item_count": 228, + "item_count": 213, "notifications": {}, "reboot": true, "smile_name": "Adam" diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json index 05f5e0ffa46..cb30b919797 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json @@ -10,7 +10,6 @@ "location": "a57efe5f145f498c9be62a9b63626fbf", "mac_address": "012345670001", "model": "Gateway", - "model_id": "smile_thermo", "name": "Smile Anna", "sensors": { "outdoor_temperature": 28.2 @@ -98,7 +97,7 @@ "cooling_present": true, "gateway_id": "015ae9ea3f964e668e490fa39da3870b", "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", - "item_count": 67, + "item_count": 66, "notifications": {}, "reboot": true, "smile_name": "Smile Anna" diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json index 327a87f9409..660f6b5a76b 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json @@ -10,7 +10,6 @@ "location": "a57efe5f145f498c9be62a9b63626fbf", "mac_address": "012345670001", "model": "Gateway", - "model_id": "smile_thermo", "name": "Smile Anna", "sensors": { "outdoor_temperature": 28.2 @@ -98,7 +97,7 @@ "cooling_present": true, "gateway_id": "015ae9ea3f964e668e490fa39da3870b", "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", - "item_count": 67, + "item_count": 66, "notifications": {}, "reboot": true, "smile_name": "Smile Anna" diff --git a/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json b/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json index 3ea4bb01be2..7f152779252 100644 --- a/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json +++ b/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json @@ -10,7 +10,6 @@ "location": "a455b61e52394b2db5081ce025a430f3", "mac_address": "012345670001", "model": "Gateway", - "model_id": "smile", "name": "Smile P1", "vendor": "Plugwise" }, @@ -43,7 +42,7 @@ }, "gateway": { "gateway_id": "a455b61e52394b2db5081ce025a430f3", - "item_count": 32, + "item_count": 31, "notifications": {}, "reboot": true, "smile_name": "Smile P1" diff --git a/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json b/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json index b7476b24a1e..582c883a3a7 100644 --- a/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json +++ b/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json @@ -10,7 +10,6 @@ "location": "03e65b16e4b247a29ae0d75a78cb492e", "mac_address": "012345670001", "model": "Gateway", - "model_id": "smile", "name": "Smile P1", "vendor": "Plugwise" }, @@ -52,7 +51,7 @@ }, "gateway": { "gateway_id": "03e65b16e4b247a29ae0d75a78cb492e", - "item_count": 41, + "item_count": 40, "notifications": { "97a04c0c263049b29350a660b4cdd01e": { "warning": "The Smile P1 is not connected to a smart meter." diff --git a/tests/components/plugwise/snapshots/test_diagnostics.ambr b/tests/components/plugwise/snapshots/test_diagnostics.ambr index d187e0355bf..44f4023d014 100644 --- a/tests/components/plugwise/snapshots/test_diagnostics.ambr +++ b/tests/components/plugwise/snapshots/test_diagnostics.ambr @@ -4,11 +4,10 @@ 'devices': dict({ '02cf28bfec924855854c544690a609ef': dict({ 'available': True, - 'dev_class': 'vcr_plug', + 'dev_class': 'vcr', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', - 'model_id': '160-01', 'name': 'NVR', 'sensors': dict({ 'electricity_consumed': 34.0, @@ -25,11 +24,10 @@ }), '21f2b542c49845e6bb416884c55778d6': dict({ 'available': True, - 'dev_class': 'game_console_plug', + 'dev_class': 'game_console', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', - 'model_id': '160-01', 'name': 'Playstation Smart Plug', 'sensors': dict({ 'electricity_consumed': 84.1, @@ -46,11 +44,10 @@ }), '4a810418d5394b3f82727340b91ba740': dict({ 'available': True, - 'dev_class': 'router_plug', + 'dev_class': 'router', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', - 'model_id': '160-01', 'name': 'USG Smart Plug', 'sensors': dict({ 'electricity_consumed': 8.5, @@ -67,11 +64,10 @@ }), '675416a629f343c495449970e2ca37b5': dict({ 'available': True, - 'dev_class': 'router_plug', + 'dev_class': 'router', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', - 'model_id': '160-01', 'name': 'Ziggo Modem', 'sensors': dict({ 'electricity_consumed': 12.2, @@ -88,15 +84,11 @@ }), '680423ff840043738f42cc7f1ff97a36': dict({ 'available': True, - 'binary_sensors': dict({ - 'low_battery': False, - }), 'dev_class': 'thermo_sensor', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '08963fec7c53423ca5680aa4cb502c63', 'model': 'Tom/Floor', - 'model_id': '106-03', 'name': 'Thermostatic Radiator Badkamer', 'sensors': dict({ 'battery': 51, @@ -125,16 +117,12 @@ 'CV Jessie', 'off', ]), - 'binary_sensors': dict({ - 'low_battery': False, - }), 'dev_class': 'zone_thermostat', 'firmware': '2016-10-27T02:00:00+02:00', 'hardware': '255', 'location': '82fa13f017d240daa0d0ea1775420f24', 'mode': 'auto', 'model': 'Lisa', - 'model_id': '158-01', 'name': 'Zone Thermostat Jessie', 'preset_modes': list([ 'home', @@ -166,11 +154,10 @@ }), '78d1126fc4c743db81b61c20e88342a7': dict({ 'available': True, - 'dev_class': 'central_heating_pump_plug', + 'dev_class': 'central_heating_pump', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'c50f167537524366a5af7aa3942feb1e', 'model': 'Plug', - 'model_id': '160-01', 'name': 'CV Pomp', 'sensors': dict({ 'electricity_consumed': 35.6, @@ -200,11 +187,10 @@ }), 'a28f588dc4a049a483fd03a30361ad3a': dict({ 'available': True, - 'dev_class': 'settop_plug', + 'dev_class': 'settop', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', - 'model_id': '160-01', 'name': 'Fibaro HC2', 'sensors': dict({ 'electricity_consumed': 12.5, @@ -221,15 +207,11 @@ }), 'a2c3583e0a6349358998b760cea82d2a': dict({ 'available': True, - 'binary_sensors': dict({ - 'low_battery': False, - }), 'dev_class': 'thermo_sensor', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '12493538af164a409c6a1c79e38afe1c', 'model': 'Tom/Floor', - 'model_id': '106-03', 'name': 'Bios Cv Thermostatic Radiator ', 'sensors': dict({ 'battery': 62, @@ -254,7 +236,6 @@ 'hardware': '1', 'location': 'c50f167537524366a5af7aa3942feb1e', 'model': 'Tom/Floor', - 'model_id': '106-03', 'name': 'Floor kraan', 'sensors': dict({ 'setpoint': 21.5, @@ -282,16 +263,12 @@ 'CV Jessie', 'off', ]), - 'binary_sensors': dict({ - 'low_battery': False, - }), 'dev_class': 'zone_thermostat', 'firmware': '2016-08-02T02:00:00+02:00', 'hardware': '255', 'location': 'c50f167537524366a5af7aa3942feb1e', 'mode': 'auto', 'model': 'Lisa', - 'model_id': '158-01', 'name': 'Zone Lisa WK', 'preset_modes': list([ 'home', @@ -323,11 +300,10 @@ }), 'cd0ddb54ef694e11ac18ed1cbce5dbbd': dict({ 'available': True, - 'dev_class': 'vcr_plug', + 'dev_class': 'vcr', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', - 'model_id': '160-01', 'name': 'NAS', 'sensors': dict({ 'electricity_consumed': 16.5, @@ -344,15 +320,11 @@ }), 'd3da73bde12a47d5a6b8f9dad971f2ec': dict({ 'available': True, - 'binary_sensors': dict({ - 'low_battery': False, - }), 'dev_class': 'thermo_sensor', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '82fa13f017d240daa0d0ea1775420f24', 'model': 'Tom/Floor', - 'model_id': '106-03', 'name': 'Thermostatic Radiator Jessie', 'sensors': dict({ 'battery': 62, @@ -381,16 +353,12 @@ 'CV Jessie', 'off', ]), - 'binary_sensors': dict({ - 'low_battery': False, - }), 'dev_class': 'zone_thermostat', 'firmware': '2016-10-27T02:00:00+02:00', 'hardware': '255', 'location': '12493538af164a409c6a1c79e38afe1c', 'mode': 'heat', 'model': 'Lisa', - 'model_id': '158-01', 'name': 'Zone Lisa Bios', 'preset_modes': list([ 'home', @@ -423,16 +391,20 @@ 'e7693eb9582644e5b865dba8d4447cf1': dict({ 'active_preset': 'no_frost', 'available': True, - 'binary_sensors': dict({ - 'low_battery': False, - }), + 'available_schedules': list([ + 'CV Roan', + 'Bios Schema met Film Avond', + 'GF7 Woonkamer', + 'Badkamer Schema', + 'CV Jessie', + 'off', + ]), 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '446ac08dd04d4eff8ac57489757b7314', 'mode': 'heat', 'model': 'Tom/Floor', - 'model_id': '106-03', 'name': 'CV Kraan Garage', 'preset_modes': list([ 'home', @@ -441,6 +413,7 @@ 'vacation', 'no_frost', ]), + 'select_schedule': 'off', 'sensors': dict({ 'battery': 68, 'setpoint': 5.5, @@ -474,16 +447,12 @@ 'CV Jessie', 'off', ]), - 'binary_sensors': dict({ - 'low_battery': False, - }), 'dev_class': 'zone_thermostat', 'firmware': '2016-10-27T02:00:00+02:00', 'hardware': '255', 'location': '08963fec7c53423ca5680aa4cb502c63', 'mode': 'auto', 'model': 'Lisa', - 'model_id': '158-01', 'name': 'Zone Thermostat Badkamer', 'preset_modes': list([ 'home', @@ -523,7 +492,6 @@ 'location': '1f9dcf83fd4e4b66b72ff787957bfe5d', 'mac_address': '012345670001', 'model': 'Gateway', - 'model_id': 'smile_open_therm', 'name': 'Adam', 'select_regulation_mode': 'heating', 'sensors': dict({ @@ -537,7 +505,7 @@ 'cooling_present': False, 'gateway_id': 'fe799307f1624099878210aa0b9f1475', 'heater_id': '90986d591dcd426cae3ec3e8111ff730', - 'item_count': 340, + 'item_count': 315, 'notifications': dict({ 'af82e4ccf9c548528166d38e560662a4': dict({ 'warning': "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device.", diff --git a/tests/components/plugwise/test_binary_sensor.py b/tests/components/plugwise/test_binary_sensor.py index 5c0e3fbdd2e..878300bddb4 100644 --- a/tests/components/plugwise/test_binary_sensor.py +++ b/tests/components/plugwise/test_binary_sensor.py @@ -56,7 +56,7 @@ async def test_anna_climate_binary_sensor_change( async def test_adam_climate_binary_sensor_change( hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry ) -> None: - """Test of a climate related plugwise-notification binary_sensor.""" + """Test change of climate related binary_sensor entities.""" state = hass.states.get("binary_sensor.adam_plugwise_notification") assert state assert state.state == STATE_ON @@ -64,14 +64,3 @@ async def test_adam_climate_binary_sensor_change( assert "unreachable" in state.attributes["warning_msg"][0] assert not state.attributes.get("error_msg") assert not state.attributes.get("other_msg") - - -async def test_p1_v4_binary_sensor_entity( - hass: HomeAssistant, mock_smile_p1_2: MagicMock, init_integration: MockConfigEntry -) -> None: - """Test of a Smile P1 related plugwise-notification binary_sensor.""" - state = hass.states.get("binary_sensor.smile_p1_plugwise_notification") - assert state - assert state.state == STATE_ON - assert "warning_msg" in state.attributes - assert "connected" in state.attributes["warning_msg"][0] diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index f846e818b6e..c91e4d37ba6 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -3,7 +3,6 @@ from datetime import timedelta from unittest.mock import MagicMock, patch -from freezegun.api import FrozenDateTimeFactory from plugwise.exceptions import PlugwiseError import pytest @@ -15,7 +14,8 @@ from homeassistant.components.climate import ( HVACMode, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import HomeAssistantError +from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_fire_time_changed @@ -90,13 +90,11 @@ async def test_adam_2_climate_entity_attributes( async def test_adam_3_climate_entity_attributes( - hass: HomeAssistant, - mock_smile_adam_3: MagicMock, - init_integration: MockConfigEntry, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, mock_smile_adam_3: MagicMock, init_integration: MockConfigEntry ) -> None: """Test creation of adam climate device environment.""" state = hass.states.get("climate.anna") + assert state assert state.state == HVACMode.COOL assert state.attributes["hvac_action"] == "cooling" @@ -117,20 +115,17 @@ async def test_adam_3_climate_entity_attributes( "heating_state" ] = True with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() - - state = hass.states.get("climate.anna") - assert state - assert state.state == HVACMode.HEAT - assert state.attributes["hvac_action"] == "heating" - assert state.attributes["hvac_modes"] == [ - HVACMode.OFF, - HVACMode.AUTO, - HVACMode.HEAT, - ] - + state = hass.states.get("climate.anna") + assert state + assert state.state == HVACMode.HEAT + assert state.attributes["hvac_action"] == "heating" + assert state.attributes["hvac_modes"] == [ + HVACMode.OFF, + HVACMode.AUTO, + HVACMode.HEAT, + ] data = mock_smile_adam_3.async_update.return_value data.devices["da224107914542988a88561b4452b0f6"]["select_regulation_mode"] = ( "cooling" @@ -143,25 +138,23 @@ async def test_adam_3_climate_entity_attributes( "heating_state" ] = False with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() - - state = hass.states.get("climate.anna") - assert state - assert state.state == HVACMode.COOL - assert state.attributes["hvac_action"] == "cooling" - assert state.attributes["hvac_modes"] == [ - HVACMode.OFF, - HVACMode.AUTO, - HVACMode.COOL, - ] + state = hass.states.get("climate.anna") + assert state + assert state.state == HVACMode.COOL + assert state.attributes["hvac_action"] == "cooling" + assert state.attributes["hvac_modes"] == [ + HVACMode.OFF, + HVACMode.AUTO, + HVACMode.COOL, + ] async def test_adam_climate_adjust_negative_testing( hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry ) -> None: - """Test PlugwiseError exception.""" + """Test exceptions of climate entities.""" mock_smile_adam.set_temperature.side_effect = PlugwiseError with pytest.raises(HomeAssistantError): @@ -203,7 +196,7 @@ async def test_adam_climate_entity_climate_changes( "c50f167537524366a5af7aa3942feb1e", {"setpoint": 25.0} ) - with pytest.raises(ServiceValidationError): + with pytest.raises(ValueError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, @@ -363,7 +356,6 @@ async def test_anna_climate_entity_climate_changes( hass: HomeAssistant, mock_smile_anna: MagicMock, init_integration: MockConfigEntry, - freezer: FrozenDateTimeFactory, ) -> None: """Test handling of user requests in anna climate device environment.""" await hass.services.async_call( @@ -408,14 +400,11 @@ async def test_anna_climate_entity_climate_changes( mock_smile_anna.set_schedule_state.assert_called_with( "c784ee9fdab44e1395b8dee7d7a497d5", "off" ) - data = mock_smile_anna.async_update.return_value data.devices["3cb70739631c4d17a86b8b12e8a5161b"].pop("available_schedules") with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() - state = hass.states.get("climate.anna") assert state.state == HVACMode.HEAT assert state.attributes["hvac_modes"] == [HVACMode.HEAT_COOL] diff --git a/tests/components/plugwise/test_config_flow.py b/tests/components/plugwise/test_config_flow.py index baf6edea9c7..4b7c567baa8 100644 --- a/tests/components/plugwise/test_config_flow.py +++ b/tests/components/plugwise/test_config_flow.py @@ -1,18 +1,19 @@ """Test the Plugwise config flow.""" from ipaddress import ip_address -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock, MagicMock, patch from plugwise.exceptions import ( ConnectionFailedError, InvalidAuthentication, InvalidSetupError, InvalidXMLError, + ResponseError, UnsupportedDeviceError, ) import pytest -from homeassistant.components.plugwise.const import DEFAULT_PORT, DOMAIN +from homeassistant.components.plugwise.const import API, DEFAULT_PORT, DOMAIN, PW_TYPE from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import ( @@ -94,6 +95,22 @@ TEST_DISCOVERY_ADAM = ZeroconfServiceInfo( ) +@pytest.fixture(name="mock_smile") +def mock_smile(): + """Create a Mock Smile for testing exceptions.""" + with patch( + "homeassistant.components.plugwise.config_flow.Smile", + ) as smile_mock: + smile_mock.ConnectionFailedError = ConnectionFailedError + smile_mock.InvalidAuthentication = InvalidAuthentication + smile_mock.InvalidSetupError = InvalidSetupError + smile_mock.InvalidXMLError = InvalidXMLError + smile_mock.ResponseError = ResponseError + smile_mock.UnsupportedDeviceError = UnsupportedDeviceError + smile_mock.return_value.connect.return_value = True + yield smile_mock.return_value + + async def test_form( hass: HomeAssistant, mock_setup_entry: AsyncMock, @@ -123,6 +140,7 @@ async def test_form( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: DEFAULT_PORT, CONF_USERNAME: TEST_USERNAME, + PW_TYPE: API, } assert len(mock_setup_entry.mock_calls) == 1 @@ -147,12 +165,11 @@ async def test_zeroconf_flow( result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_ZEROCONF}, - data=TEST_DISCOVERY, + data=discovery, ) assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} assert result.get("step_id") == "user" - assert "flow_id" in result result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -166,7 +183,8 @@ async def test_zeroconf_flow( CONF_HOST: TEST_HOST, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: DEFAULT_PORT, - CONF_USERNAME: TEST_USERNAME, + CONF_USERNAME: username, + PW_TYPE: API, } assert len(mock_setup_entry.mock_calls) == 1 @@ -187,7 +205,6 @@ async def test_zeroconf_flow_stretch( assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} assert result.get("step_id") == "user" - assert "flow_id" in result result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -202,6 +219,7 @@ async def test_zeroconf_flow_stretch( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: DEFAULT_PORT, CONF_USERNAME: TEST_USERNAME2, + PW_TYPE: API, } assert len(mock_setup_entry.mock_calls) == 1 @@ -258,6 +276,7 @@ async def test_zercoconf_discovery_update_configuration( (InvalidAuthentication, "invalid_auth"), (InvalidSetupError, "invalid_setup"), (InvalidXMLError, "response_error"), + (ResponseError, "response_error"), (RuntimeError, "unknown"), (UnsupportedDeviceError, "unsupported"), ], @@ -277,7 +296,6 @@ async def test_flow_errors( assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} assert result.get("step_id") == "user" - assert "flow_id" in result mock_smile_config_flow.connect.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( @@ -305,6 +323,7 @@ async def test_flow_errors( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: DEFAULT_PORT, CONF_USERNAME: TEST_USERNAME, + PW_TYPE: API, } assert len(mock_setup_entry.mock_calls) == 1 @@ -336,9 +355,9 @@ async def test_zeroconf_abort_anna_with_adam(hass: HomeAssistant) -> None: assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "user" - flows_in_progress = hass.config_entries.flow._handler_progress_index[DOMAIN] + flows_in_progress = hass.config_entries.flow.async_progress() assert len(flows_in_progress) == 1 - assert list(flows_in_progress)[0].product == "smile_thermo" + assert flows_in_progress[0]["context"]["product"] == "smile_thermo" # Discover Adam, Anna should be aborted and no longer present result2 = await hass.config_entries.flow.async_init( @@ -350,9 +369,9 @@ async def test_zeroconf_abort_anna_with_adam(hass: HomeAssistant) -> None: assert result2.get("type") is FlowResultType.FORM assert result2.get("step_id") == "user" - flows_in_progress = hass.config_entries.flow._handler_progress_index[DOMAIN] + flows_in_progress = hass.config_entries.flow.async_progress() assert len(flows_in_progress) == 1 - assert list(flows_in_progress)[0].product == "smile_open_therm" + assert flows_in_progress[0]["context"]["product"] == "smile_open_therm" # Discover Anna again, Anna should be aborted directly result3 = await hass.config_entries.flow.async_init( @@ -364,6 +383,6 @@ async def test_zeroconf_abort_anna_with_adam(hass: HomeAssistant) -> None: assert result3.get("reason") == "anna_with_adam" # Adam should still be there - flows_in_progress = hass.config_entries.flow._handler_progress_index[DOMAIN] + flows_in_progress = hass.config_entries.flow.async_progress() assert len(flows_in_progress) == 1 - assert list(flows_in_progress)[0].product == "smile_open_therm" + assert flows_in_progress[0]["context"]["product"] == "smile_open_therm" diff --git a/tests/components/plugwise/test_init.py b/tests/components/plugwise/test_init.py index 5b276d5018d..26aedf864dc 100644 --- a/tests/components/plugwise/test_init.py +++ b/tests/components/plugwise/test_init.py @@ -3,7 +3,6 @@ from datetime import timedelta from unittest.mock import MagicMock, patch -from freezegun.api import FrozenDateTimeFactory from plugwise.exceptions import ( ConnectionFailedError, InvalidAuthentication, @@ -20,6 +19,7 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry, async_fire_time_changed @@ -40,9 +40,6 @@ TOM = { "location": "f871b8c4d63549319221e294e4f88074", "model": "Tom/Floor", "name": "Tom Zolder", - "binary_sensors": { - "low_battery": False, - }, "sensors": { "battery": 99, "temperature": 18.6, @@ -110,28 +107,6 @@ async def test_gateway_config_entry_not_ready( assert mock_config_entry.state is entry_state -async def test_device_in_dr( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_smile_p1: MagicMock, - device_registry: dr.DeviceRegistry, -) -> None: - """Test Gateway device registry data.""" - mock_config_entry.add_to_hass(hass) - assert await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() - - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, "a455b61e52394b2db5081ce025a430f3")} - ) - assert device_entry.hw_version == "AME Smile 2.0 board" - assert device_entry.manufacturer == "Plugwise" - assert device_entry.model == "Gateway" - assert device_entry.model_id == "smile" - assert device_entry.name == "Smile P1" - assert device_entry.sw_version == "4.4.2" - - @pytest.mark.parametrize( ("entitydata", "old_unique_id", "new_unique_id"), [ @@ -231,9 +206,9 @@ async def test_update_device( mock_smile_adam_2: MagicMock, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, ) -> None: """Test a clean-up of the device_registry.""" + utcnow = dt_util.utcnow() data = mock_smile_adam_2.async_update.return_value mock_config_entry.add_to_hass(hass) @@ -246,7 +221,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 31 + == 29 ) assert ( len( @@ -260,8 +235,7 @@ async def test_update_device( # Add a 2nd Tom/Floor data.devices.update(TOM) with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow + timedelta(minutes=1)) await hass.async_block_till_done() assert ( @@ -270,7 +244,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 37 + == 34 ) assert ( len( @@ -288,8 +262,7 @@ async def test_update_device( # Remove the existing Tom/Floor data.devices.pop("1772a4ea304041adb83f357b751341ff") with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow + timedelta(minutes=1)) await hass.async_block_till_done() assert ( @@ -298,7 +271,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 31 + == 29 ) assert ( len( diff --git a/tests/components/plugwise/test_select.py b/tests/components/plugwise/test_select.py index f521787714b..b9dec283bc4 100644 --- a/tests/components/plugwise/test_select.py +++ b/tests/components/plugwise/test_select.py @@ -77,12 +77,3 @@ async def test_adam_select_regulation_mode( "heating", "on", ) - - -async def test_legacy_anna_select_entities( - hass: HomeAssistant, - mock_smile_legacy_anna: MagicMock, - init_integration: MockConfigEntry, -) -> None: - """Test not creating a select-entity for a legacy Anna without a thermostat-schedule.""" - assert not hass.states.get("select.anna_thermostat_schedule") diff --git a/tests/components/plugwise/test_sensor.py b/tests/components/plugwise/test_sensor.py index 0745adb786a..9a20a37824d 100644 --- a/tests/components/plugwise/test_sensor.py +++ b/tests/components/plugwise/test_sensor.py @@ -3,10 +3,10 @@ from unittest.mock import MagicMock from homeassistant.components.plugwise.const import DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity -import homeassistant.helpers.entity_registry as er from tests.common import MockConfigEntry @@ -58,7 +58,7 @@ async def test_unique_id_migration_humidity( # Entry to migrate entity_registry.async_get_or_create( - SENSOR_DOMAIN, + Platform.SENSOR, DOMAIN, "f61f1a2535f54f52ad006a3d18e459ca-relative_humidity", config_entry=mock_config_entry, @@ -67,7 +67,7 @@ async def test_unique_id_migration_humidity( ) # Entry not needing migration entity_registry.async_get_or_create( - SENSOR_DOMAIN, + Platform.SENSOR, DOMAIN, "f61f1a2535f54f52ad006a3d18e459ca-battery", config_entry=mock_config_entry, diff --git a/tests/components/plugwise/test_switch.py b/tests/components/plugwise/test_switch.py index d9a4792ddb1..5da76bb0ebd 100644 --- a/tests/components/plugwise/test_switch.py +++ b/tests/components/plugwise/test_switch.py @@ -11,12 +11,11 @@ from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_OFF, STATE_ON, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -import homeassistant.helpers.entity_registry as er +from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry @@ -50,7 +49,7 @@ async def test_adam_climate_switch_negative_testing( assert mock_smile_adam.set_switch_state.call_count == 1 mock_smile_adam.set_switch_state.assert_called_with( - "78d1126fc4c743db81b61c20e88342a7", None, "relay", STATE_OFF + "78d1126fc4c743db81b61c20e88342a7", None, "relay", "off" ) with pytest.raises(HomeAssistantError): @@ -63,7 +62,7 @@ async def test_adam_climate_switch_negative_testing( assert mock_smile_adam.set_switch_state.call_count == 2 mock_smile_adam.set_switch_state.assert_called_with( - "a28f588dc4a049a483fd03a30361ad3a", None, "relay", STATE_ON + "a28f588dc4a049a483fd03a30361ad3a", None, "relay", "on" ) @@ -80,7 +79,7 @@ async def test_adam_climate_switch_changes( assert mock_smile_adam.set_switch_state.call_count == 1 mock_smile_adam.set_switch_state.assert_called_with( - "78d1126fc4c743db81b61c20e88342a7", None, "relay", STATE_OFF + "78d1126fc4c743db81b61c20e88342a7", None, "relay", "off" ) await hass.services.async_call( @@ -92,7 +91,7 @@ async def test_adam_climate_switch_changes( assert mock_smile_adam.set_switch_state.call_count == 2 mock_smile_adam.set_switch_state.assert_called_with( - "a28f588dc4a049a483fd03a30361ad3a", None, "relay", STATE_OFF + "a28f588dc4a049a483fd03a30361ad3a", None, "relay", "off" ) await hass.services.async_call( @@ -104,7 +103,7 @@ async def test_adam_climate_switch_changes( assert mock_smile_adam.set_switch_state.call_count == 3 mock_smile_adam.set_switch_state.assert_called_with( - "a28f588dc4a049a483fd03a30361ad3a", None, "relay", STATE_ON + "a28f588dc4a049a483fd03a30361ad3a", None, "relay", "on" ) @@ -133,7 +132,7 @@ async def test_stretch_switch_changes( ) assert mock_stretch.set_switch_state.call_count == 1 mock_stretch.set_switch_state.assert_called_with( - "e1c884e7dede431dadee09506ec4f859", None, "relay", STATE_OFF + "e1c884e7dede431dadee09506ec4f859", None, "relay", "off" ) await hass.services.async_call( @@ -144,7 +143,7 @@ async def test_stretch_switch_changes( ) assert mock_stretch.set_switch_state.call_count == 2 mock_stretch.set_switch_state.assert_called_with( - "cfe95cf3de1948c0b8955125bf754614", None, "relay", STATE_OFF + "cfe95cf3de1948c0b8955125bf754614", None, "relay", "off" ) await hass.services.async_call( @@ -155,7 +154,7 @@ async def test_stretch_switch_changes( ) assert mock_stretch.set_switch_state.call_count == 3 mock_stretch.set_switch_state.assert_called_with( - "cfe95cf3de1948c0b8955125bf754614", None, "relay", STATE_ON + "cfe95cf3de1948c0b8955125bf754614", None, "relay", "on" ) diff --git a/tests/components/point/__init__.py b/tests/components/point/__init__.py index 254eef2e936..9fb6eea9ac7 100644 --- a/tests/components/point/__init__.py +++ b/tests/components/point/__init__.py @@ -1,12 +1 @@ """Tests for the Point component.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/point/test_config_flow.py b/tests/components/point/test_config_flow.py index bd1e3cfac29..ec71b04b84b 100644 --- a/tests/components/point/test_config_flow.py +++ b/tests/components/point/test_config_flow.py @@ -1,172 +1,151 @@ -"""Test the Minut Point config flow.""" +"""Tests for the Point config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest -from homeassistant import config_entries -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) -from homeassistant.components.point.const import DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN -from homeassistant.config_entries import SOURCE_IMPORT +from homeassistant.components.point import DOMAIN, config_flow +from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_entry_oauth2_flow -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import ClientSessionGenerator - -CLIENT_ID = "1234" -CLIENT_SECRET = "5678" - -REDIRECT_URL = "https://example.com/auth/external/callback" -@pytest.fixture(autouse=True) -async def setup_credentials(hass: HomeAssistant) -> None: - """Fixture to setup credentials.""" - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential(CLIENT_ID, CLIENT_SECRET), +def init_config_flow(hass, side_effect=None): + """Init a configuration flow.""" + config_flow.register_flow_implementation(hass, DOMAIN, "id", "secret") + flow = config_flow.PointFlowHandler() + flow._get_authorization_url = AsyncMock( + return_value="https://example.com", side_effect=side_effect ) + flow.hass = hass + return flow -@pytest.mark.usefixtures("current_request_with_host") -async def test_full_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, -) -> None: - """Check full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - state = config_entry_oauth2_flow._encode_jwt( # noqa: SLF001 - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": REDIRECT_URL, - }, - ) +@pytest.fixture +def is_authorized(): + """Set PointSession authorized.""" + return True - assert result["url"] == ( - f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" - f"&redirect_uri={REDIRECT_URL}" - f"&state={state}" - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", - "type": "Bearer", - "expires_in": 60, - "user_id": "abcd", - }, - ) +@pytest.fixture +def mock_pypoint(is_authorized): + """Mock pypoint.""" with patch( - "homeassistant.components.point.async_setup_entry", return_value=True - ) as mock_setup: - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == 1 - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == "abcd" - assert result["result"].data["token"]["user_id"] == "abcd" - assert result["result"].data["token"]["type"] == "Bearer" - assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" - assert result["result"].data["token"]["expires_in"] == 60 - assert result["result"].data["token"]["access_token"] == "mock-access-token" - assert "webhook_id" in result["result"].data + "homeassistant.components.point.config_flow.PointSession" + ) as PointSession: + PointSession.return_value.get_access_token = AsyncMock( + return_value={"access_token": "boo"} + ) + PointSession.return_value.is_authorized = is_authorized + PointSession.return_value.user = AsyncMock( + return_value={"email": "john.doe@example.com"} + ) + yield PointSession -@pytest.mark.parametrize( - ("unique_id", "expected", "expected_unique_id"), - [ - ("abcd", "reauth_successful", "abcd"), - (None, "reauth_successful", "abcd"), - ("abcde", "wrong_account", "abcde"), - ], - ids=("correct-unique_id", "missing-unique_id", "wrong-unique_id-abort"), -) -@pytest.mark.usefixtures("current_request_with_host") -async def test_reauthentication_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - unique_id: str | None, - expected: str, - expected_unique_id: str, -) -> None: - """Test reauthentication flow.""" - old_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=unique_id, - version=1, - data={"id": "timmo", "auth_implementation": DOMAIN}, - ) - old_entry.add_to_hass(hass) - - result = await old_entry.start_reauth_flow(hass) - - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": REDIRECT_URL, - }, - ) - client = await hass_client_no_auth() - await client.get(f"/auth/external/callback?code=abcd&state={state}") - - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", - "type": "Bearer", - "expires_in": 60, - "user_id": "abcd", - }, - ) - - with ( - patch("homeassistant.components.point.api.AsyncConfigEntryAuth"), - patch( - f"homeassistant.components.{DOMAIN}.async_setup_entry", return_value=True - ), - ): - result = await hass.config_entries.flow.async_configure(result["flow_id"]) +async def test_abort_if_no_implementation_registered(hass: HomeAssistant) -> None: + """Test we abort if no implementation is registered.""" + flow = config_flow.PointFlowHandler() + flow.hass = hass + result = await flow.async_step_user() assert result["type"] is FlowResultType.ABORT - assert result["reason"] == expected - assert old_entry.unique_id == expected_unique_id + assert result["reason"] == "no_flows" -async def test_import_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, +async def test_abort_if_already_setup(hass: HomeAssistant) -> None: + """Test we abort if Point is already setup.""" + flow = init_config_flow(hass) + + with patch.object(hass.config_entries, "async_entries", return_value=[{}]): + result = await flow.async_step_user() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_setup" + + with patch.object(hass.config_entries, "async_entries", return_value=[{}]): + result = await flow.async_step_import() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_setup" + + +async def test_full_flow_implementation(hass: HomeAssistant, mock_pypoint) -> None: + """Test registering an implementation and finishing flow works.""" + config_flow.register_flow_implementation(hass, "test-other", None, None) + flow = init_config_flow(hass) + + result = await flow.async_step_user() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await flow.async_step_user({"flow_impl": "test"}) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["description_placeholders"] == { + "authorization_url": "https://example.com" + } + + result = await flow.async_step_code("123ABC") + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"]["refresh_args"] == { + CONF_CLIENT_ID: "id", + CONF_CLIENT_SECRET: "secret", + } + assert result["title"] == "john.doe@example.com" + assert result["data"]["token"] == {"access_token": "boo"} + + +async def test_step_import(hass: HomeAssistant, mock_pypoint) -> None: + """Test that we trigger import when configuring with client.""" + flow = init_config_flow(hass) + + result = await flow.async_step_import() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + + +@pytest.mark.parametrize("is_authorized", [False]) +async def test_wrong_code_flow_implementation( + hass: HomeAssistant, mock_pypoint ) -> None: - """Test import flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT} - ) - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == "pick_implementation" + """Test wrong code.""" + flow = init_config_flow(hass) + + result = await flow.async_step_code("123ABC") + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "auth_error" + + +async def test_not_pick_implementation_if_only_one(hass: HomeAssistant) -> None: + """Test we allow picking implementation if we have one flow_imp.""" + flow = init_config_flow(hass) + + result = await flow.async_step_user() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + + +async def test_abort_if_timeout_generating_auth_url(hass: HomeAssistant) -> None: + """Test we abort if generating authorize url fails.""" + flow = init_config_flow(hass, side_effect=TimeoutError) + + result = await flow.async_step_user() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "authorize_url_timeout" + + +async def test_abort_if_exception_generating_auth_url(hass: HomeAssistant) -> None: + """Test we abort if generating authorize url blows up.""" + flow = init_config_flow(hass, side_effect=ValueError) + + result = await flow.async_step_user() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown_authorize_url_generation" + + +async def test_abort_no_code(hass: HomeAssistant) -> None: + """Test if no code is given to step_code.""" + flow = init_config_flow(hass) + + result = await flow.async_step_code() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_code" diff --git a/tests/components/poolsense/conftest.py b/tests/components/poolsense/conftest.py index 6a842df7cfd..ac16ef23ff3 100644 --- a/tests/components/poolsense/conftest.py +++ b/tests/components/poolsense/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Poolsense tests.""" -from collections.abc import Generator from datetime import UTC, datetime from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.poolsense.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/powerwall/mocks.py b/tests/components/powerwall/mocks.py index 3081776483c..e43ccee16f1 100644 --- a/tests/components/powerwall/mocks.py +++ b/tests/components/powerwall/mocks.py @@ -17,7 +17,6 @@ from tesla_powerwall import ( ) from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonValueType from tests.common import load_fixture @@ -88,7 +87,7 @@ async def _mock_powerwall_return_value( return powerwall_mock -async def _mock_powerwall_site_name(hass: HomeAssistant, site_name: str) -> MagicMock: +async def _mock_powerwall_site_name(hass, site_name): powerwall_mock = MagicMock(Powerwall) powerwall_mock.__aenter__.return_value = powerwall_mock @@ -111,7 +110,7 @@ async def _mock_powerwall_side_effect(site_info=None): return powerwall_mock -async def _async_load_json_fixture(hass: HomeAssistant, path: str) -> JsonValueType: +async def _async_load_json_fixture(hass, path): fixture = await hass.async_add_executor_job( load_fixture, os.path.join("powerwall", path) ) diff --git a/tests/components/powerwall/test_config_flow.py b/tests/components/powerwall/test_config_flow.py index 1ff1470f81c..db0ef2e9884 100644 --- a/tests/components/powerwall/test_config_flow.py +++ b/tests/components/powerwall/test_config_flow.py @@ -336,14 +336,13 @@ async def test_form_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - flow = hass.config_entries.flow.async_get(result["flow_id"]) - assert flow["context"]["title_placeholders"] == { - "ip_address": VALID_CONFIG[CONF_IP_ADDRESS], - "name": entry.title, - } mock_powerwall = await _mock_powerwall_site_name(hass, "My site") diff --git a/tests/components/powerwall/test_switch.py b/tests/components/powerwall/test_switch.py index b4ff0ca724e..b01f60210a6 100644 --- a/tests/components/powerwall/test_switch.py +++ b/tests/components/powerwall/test_switch.py @@ -1,6 +1,6 @@ """Test for Powerwall off-grid switch.""" -from unittest.mock import MagicMock, patch +from unittest.mock import patch import pytest from tesla_powerwall import GridStatus, PowerwallError @@ -24,7 +24,7 @@ ENTITY_ID = "switch.mysite_off_grid_operation" @pytest.fixture(name="mock_powerwall") -async def mock_powerwall_fixture(hass: HomeAssistant) -> MagicMock: +async def mock_powerwall_fixture(hass): """Set up base powerwall fixture.""" mock_powerwall = await _mock_powerwall_with_fixtures(hass) diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 37940df437b..2eca84b43fe 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -5,7 +5,6 @@ from functools import lru_cache import logging import os from pathlib import Path -import sys from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory @@ -71,9 +70,6 @@ async def test_basic_usage(hass: HomeAssistant, tmp_path: Path) -> None: await hass.async_block_till_done() -@pytest.mark.skipif( - sys.version_info >= (3, 13), reason="not yet available on Python 3.13" -) async def test_memory_usage(hass: HomeAssistant, tmp_path: Path) -> None: """Test we can setup and the service is registered.""" test_dir = tmp_path / "profiles" @@ -105,24 +101,6 @@ async def test_memory_usage(hass: HomeAssistant, tmp_path: Path) -> None: await hass.async_block_till_done() -@pytest.mark.skipif(sys.version_info < (3, 13), reason="still works on python 3.12") -async def test_memory_usage_py313(hass: HomeAssistant, tmp_path: Path) -> None: - """Test raise an error on python3.13.""" - entry = MockConfigEntry(domain=DOMAIN) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert hass.services.has_service(DOMAIN, SERVICE_MEMORY) - with pytest.raises( - HomeAssistantError, - match="Memory profiling is not supported on Python 3.13. Please use Python 3.12.", - ): - await hass.services.async_call( - DOMAIN, SERVICE_MEMORY, {CONF_SECONDS: 0.000001}, blocking=True - ) - - async def test_object_growth_logging( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -198,12 +176,12 @@ async def test_dump_log_object( await hass.async_block_till_done() class DumpLogDummy: - def __init__(self, fail) -> None: + def __init__(self, fail): self.fail = fail def __repr__(self): if self.fail: - raise Exception("failed") # noqa: TRY002 + raise Exception("failed") # pylint: disable=broad-exception-raised return "" obj1 = DumpLogDummy(False) @@ -306,14 +284,14 @@ async def test_lru_stats(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) return 1 class DomainData: - def __init__(self) -> None: + def __init__(self): self._data = LRU(1) domain_data = DomainData() assert hass.services.has_service(DOMAIN, SERVICE_LRU_STATS) class LRUCache: - def __init__(self) -> None: + def __init__(self): self._data = {"sqlalchemy_test": 1} sqlalchemy_lru_cache = LRUCache() diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index 043a9cc4389..499d1a5df14 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -3,22 +3,19 @@ from dataclasses import dataclass import datetime from http import HTTPStatus -from typing import Any, Self +from typing import Any from unittest import mock from freezegun import freeze_time import prometheus_client -from prometheus_client.utils import floatToGoString import pytest from homeassistant.components import ( - alarm_control_panel, binary_sensor, climate, counter, cover, device_tracker, - fan, humidifier, input_boolean, input_number, @@ -31,28 +28,14 @@ from homeassistant.components import ( switch, update, ) -from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, - ATTR_FAN_MODE, - ATTR_FAN_MODES, ATTR_HUMIDITY, ATTR_HVAC_ACTION, - ATTR_HVAC_MODES, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, ) -from homeassistant.components.fan import ( - ATTR_DIRECTION, - ATTR_OSCILLATING, - ATTR_PERCENTAGE, - ATTR_PRESET_MODE, - ATTR_PRESET_MODES, - DIRECTION_FORWARD, - DIRECTION_REVERSE, -) from homeassistant.components.humidifier import ATTR_AVAILABLE_MODES -from homeassistant.components.lock import LockState from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ( ATTR_BATTERY_LEVEL, @@ -68,13 +51,14 @@ from homeassistant.const import ( STATE_CLOSED, STATE_CLOSING, STATE_HOME, + STATE_LOCKED, STATE_NOT_HOME, STATE_OFF, STATE_ON, STATE_OPEN, STATE_OPENING, STATE_UNAVAILABLE, - STATE_UNKNOWN, + STATE_UNLOCKED, UnitOfEnergy, UnitOfTemperature, ) @@ -88,77 +72,6 @@ from tests.typing import ClientSessionGenerator PROMETHEUS_PATH = "homeassistant.components.prometheus" -class EntityMetric: - """Represents a Prometheus metric for a Home Assistant entity.""" - - metric_name: str - labels: dict[str, str] - - @classmethod - def required_labels(cls) -> list[str]: - """List of all required labels for a Prometheus metric.""" - return [ - "domain", - "friendly_name", - "entity", - ] - - def __init__(self, metric_name: str, **kwargs: Any) -> None: - """Create a new EntityMetric based on metric name and labels.""" - self.metric_name = metric_name - self.labels = kwargs - - # Labels that are required for all entities. - for labelname in self.required_labels(): - assert labelname in self.labels - assert self.labels[labelname] != "" - - def withValue(self, value: float) -> Self: - """Return a metric with value.""" - return EntityMetricWithValue(self, value) - - @property - def _metric_name_string(self) -> str: - """Return a full metric name as a string.""" - labels = ",".join( - f'{key}="{value}"' for key, value in sorted(self.labels.items()) - ) - return f"{self.metric_name}{{{labels}}}" - - def _in_metrics(self, metrics: list[str]) -> bool: - """Report whether this metric exists in the provided Prometheus output.""" - return any(line.startswith(self._metric_name_string) for line in metrics) - - def assert_in_metrics(self, metrics: list[str]) -> None: - """Assert that this metric exists in the provided Prometheus output.""" - assert self._in_metrics(metrics) - - def assert_not_in_metrics(self, metrics: list[str]) -> None: - """Assert that this metric does not exist in Prometheus output.""" - assert not self._in_metrics(metrics) - - -class EntityMetricWithValue(EntityMetric): - """Represents a Prometheus metric with a value.""" - - value: float - - def __init__(self, metric: EntityMetric, value: float) -> None: - """Create a new metric with a value based on a metric.""" - super().__init__(metric.metric_name, **metric.labels) - self.value = value - - @property - def _metric_string(self) -> str: - """Return a full metric string.""" - value = floatToGoString(self.value) - return f"{self._metric_name_string} {value}" - - def assert_in_metrics(self, metrics: list[str]) -> None: - """Assert that this metric exists in the provided Prometheus output.""" - assert self._metric_string in metrics - - @dataclass class FilterTest: """Class for capturing a filter test.""" @@ -167,299 +80,6 @@ class FilterTest: should_pass: bool -def test_entity_metric_generates_metric_name_string_without_value() -> None: - """Test using EntityMetric to format a simple metric string without any value.""" - domain = "sensor" - object_id = "outside_temperature" - entity_metric = EntityMetric( - metric_name="homeassistant_sensor_temperature_celsius", - domain=domain, - friendly_name="Outside Temperature", - entity=f"{domain}.{object_id}", - ) - assert entity_metric._metric_name_string == ( - "homeassistant_sensor_temperature_celsius{" - 'domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"}' - ) - - -def test_entity_metric_generates_metric_string_with_value() -> None: - """Test using EntityMetric to format a simple metric string but with a metric value included.""" - domain = "sensor" - object_id = "outside_temperature" - entity_metric = EntityMetric( - metric_name="homeassistant_sensor_temperature_celsius", - domain=domain, - friendly_name="Outside Temperature", - entity=f"{domain}.{object_id}", - ).withValue(17.2) - assert entity_metric._metric_string == ( - "homeassistant_sensor_temperature_celsius{" - 'domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"}' - " 17.2" - ) - - -def test_entity_metric_raises_exception_without_required_labels() -> None: - """Test using EntityMetric to raise exception when required labels are missing.""" - domain = "sensor" - object_id = "outside_temperature" - test_kwargs = { - "metric_name": "homeassistant_sensor_temperature_celsius", - "domain": domain, - "friendly_name": "Outside Temperature", - "entity": f"{domain}.{object_id}", - } - - assert len(EntityMetric.required_labels()) > 0 - - for labelname in EntityMetric.required_labels(): - label_kwargs = dict(test_kwargs) - # Delete the required label and ensure we get an exception - del label_kwargs[labelname] - with pytest.raises(AssertionError): - EntityMetric(**label_kwargs) - - -def test_entity_metric_raises_exception_if_required_label_is_empty_string() -> None: - """Test using EntityMetric to raise exception when required label value is empty string.""" - domain = "sensor" - object_id = "outside_temperature" - test_kwargs = { - "metric_name": "homeassistant_sensor_temperature_celsius", - "domain": domain, - "friendly_name": "Outside Temperature", - "entity": f"{domain}.{object_id}", - } - - assert len(EntityMetric.required_labels()) > 0 - - for labelname in EntityMetric.required_labels(): - label_kwargs = dict(test_kwargs) - # Replace the required label with "" and ensure we get an exception - label_kwargs[labelname] = "" - with pytest.raises(AssertionError): - EntityMetric(**label_kwargs) - - -def test_entity_metric_generates_alphabetically_ordered_labels() -> None: - """Test using EntityMetric to format a simple metric string with labels alphabetically ordered.""" - domain = "sensor" - object_id = "outside_temperature" - - static_metric_string = ( - "homeassistant_sensor_temperature_celsius{" - 'domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature",' - 'zed_label="foo"' - "}" - " 17.2" - ) - - ordered_entity_metric = EntityMetric( - metric_name="homeassistant_sensor_temperature_celsius", - domain=domain, - entity=f"{domain}.{object_id}", - friendly_name="Outside Temperature", - zed_label="foo", - ).withValue(17.2) - assert ordered_entity_metric._metric_string == static_metric_string - - unordered_entity_metric = EntityMetric( - metric_name="homeassistant_sensor_temperature_celsius", - zed_label="foo", - entity=f"{domain}.{object_id}", - friendly_name="Outside Temperature", - domain=domain, - ).withValue(17.2) - assert unordered_entity_metric._metric_string == static_metric_string - - -def test_entity_metric_generates_metric_string_with_non_required_labels() -> None: - """Test using EntityMetric to format a simple metric string but with extra labels and values included.""" - mode_entity_metric = EntityMetric( - metric_name="climate_preset_mode", - domain="climate", - friendly_name="Ecobee", - entity="climate.ecobee", - mode="away", - ).withValue(1) - assert mode_entity_metric._metric_string == ( - "climate_preset_mode{" - 'domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee",' - 'mode="away"' - "}" - " 1.0" - ) - - action_entity_metric = EntityMetric( - metric_name="climate_action", - domain="climate", - friendly_name="HeatPump", - entity="climate.heatpump", - action="heating", - ).withValue(1) - assert action_entity_metric._metric_string == ( - "climate_action{" - 'action="heating",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"' - "}" - " 1.0" - ) - - state_entity_metric = EntityMetric( - metric_name="cover_state", - domain="cover", - friendly_name="Curtain", - entity="cover.curtain", - state="open", - ).withValue(1) - assert state_entity_metric._metric_string == ( - "cover_state{" - 'domain="cover",' - 'entity="cover.curtain",' - 'friendly_name="Curtain",' - 'state="open"' - "}" - " 1.0" - ) - - foo_entity_metric = EntityMetric( - metric_name="homeassistant_sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - foo="bar", - ).withValue(17.2) - assert foo_entity_metric._metric_string == ( - "homeassistant_sensor_temperature_celsius{" - 'domain="sensor",' - 'entity="sensor.outside_temperature",' - 'foo="bar",' - 'friendly_name="Outside Temperature"' - "}" - " 17.2" - ) - - -def test_entity_metric_assert_helpers() -> None: - """Test using EntityMetric for both assert_in_metrics and assert_not_in_metrics.""" - temp_metric = ( - "homeassistant_sensor_temperature_celsius{" - 'domain="sensor",' - 'entity="sensor.outside_temperature",' - 'foo="bar",' - 'friendly_name="Outside Temperature"' - "}" - ) - climate_metric = ( - "climate_preset_mode{" - 'domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee",' - 'mode="away"' - "}" - ) - excluded_cover_metric = ( - "cover_state{" - 'domain="cover",' - 'entity="cover.curtain",' - 'friendly_name="Curtain",' - 'state="open"' - "}" - ) - metrics = [ - temp_metric, - climate_metric, - ] - # First make sure the excluded metric is not present - assert excluded_cover_metric not in metrics - # now check for actual metrics - temp_entity_metric = EntityMetric( - metric_name="homeassistant_sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - foo="bar", - ) - assert temp_entity_metric._metric_name_string == temp_metric - temp_entity_metric.assert_in_metrics(metrics) - - climate_entity_metric = EntityMetric( - metric_name="climate_preset_mode", - domain="climate", - friendly_name="Ecobee", - entity="climate.ecobee", - mode="away", - ) - assert climate_entity_metric._metric_name_string == climate_metric - climate_entity_metric.assert_in_metrics(metrics) - - excluded_cover_entity_metric = EntityMetric( - metric_name="cover_state", - domain="cover", - friendly_name="Curtain", - entity="cover.curtain", - state="open", - ) - assert excluded_cover_entity_metric._metric_name_string == excluded_cover_metric - excluded_cover_entity_metric.assert_not_in_metrics(metrics) - - -def test_entity_metric_with_value_assert_helpers() -> None: - """Test using EntityMetricWithValue helpers, which is only assert_in_metrics.""" - temp_metric = ( - "homeassistant_sensor_temperature_celsius{" - 'domain="sensor",' - 'entity="sensor.outside_temperature",' - 'foo="bar",' - 'friendly_name="Outside Temperature"' - "}" - " 17.2" - ) - climate_metric = ( - "climate_preset_mode{" - 'domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee",' - 'mode="away"' - "}" - " 1.0" - ) - metrics = [ - temp_metric, - climate_metric, - ] - temp_entity_metric = EntityMetric( - metric_name="homeassistant_sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - foo="bar", - ).withValue(17.2) - assert temp_entity_metric._metric_string == temp_metric - temp_entity_metric.assert_in_metrics(metrics) - - climate_entity_metric = EntityMetric( - metric_name="climate_preset_mode", - domain="climate", - friendly_name="Ecobee", - entity="climate.ecobee", - mode="away", - ).withValue(1) - assert climate_entity_metric._metric_string == climate_metric - climate_entity_metric.assert_in_metrics(metrics) - - @pytest.fixture(name="client") async def setup_prometheus_client( hass: HomeAssistant, @@ -518,18 +138,16 @@ async def test_setup_enumeration( suggested_object_id="outside_temperature", original_name="Outside Temperature", ) - state = 12.3 - set_state_with_entry(hass, sensor_1, state, {}) + set_state_with_entry(hass, sensor_1, 12.3, {}) assert await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) client = await hass_client() body = await generate_latest_metrics(client) - EntityMetric( - metric_name="homeassistant_sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(state).assert_in_metrics(body) + assert ( + 'homeassistant_sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 12.3' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -545,19 +163,17 @@ async def test_view_empty_namespace( "Objects collected during gc" in body ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Radio Energy", - entity="sensor.radio_energy", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.radio_energy",' + 'friendly_name="Radio Energy"} 1.0' in body + ) - EntityMetric( - metric_name="last_updated_time_seconds", - domain="sensor", - friendly_name="Radio Energy", - entity="sensor.radio_energy", - ).withValue(86400.0).assert_in_metrics(body) + assert ( + 'last_updated_time_seconds{domain="sensor",' + 'entity="sensor.radio_energy",' + 'friendly_name="Radio Energy"} 86400.0' in body + ) @pytest.mark.parametrize("namespace", [None]) @@ -573,12 +189,11 @@ async def test_view_default_namespace( "Objects collected during gc" in body ) - EntityMetric( - metric_name="homeassistant_sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(15.6).assert_in_metrics(body) + assert ( + 'homeassistant_sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -588,33 +203,29 @@ async def test_sensor_unit( """Test prometheus metrics for sensors with a unit.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="sensor_unit_kwh", - domain="sensor", - friendly_name="Television Energy", - entity="sensor.television_energy", - ).withValue(74.0).assert_in_metrics(body) + assert ( + 'sensor_unit_kwh{domain="sensor",' + 'entity="sensor.television_energy",' + 'friendly_name="Television Energy"} 74.0' in body + ) - EntityMetric( - metric_name="sensor_unit_sek_per_kwh", - domain="sensor", - friendly_name="Electricity price", - entity="sensor.electricity_price", - ).withValue(0.123).assert_in_metrics(body) + assert ( + 'sensor_unit_sek_per_kwh{domain="sensor",' + 'entity="sensor.electricity_price",' + 'friendly_name="Electricity price"} 0.123' in body + ) - EntityMetric( - metric_name="sensor_unit_u0xb0", - domain="sensor", - friendly_name="Wind Direction", - entity="sensor.wind_direction", - ).withValue(25.0).assert_in_metrics(body) + assert ( + 'sensor_unit_u0xb0{domain="sensor",' + 'entity="sensor.wind_direction",' + 'friendly_name="Wind Direction"} 25.0' in body + ) - EntityMetric( - metric_name="sensor_unit_u0xb5g_per_mu0xb3", - domain="sensor", - friendly_name="SPS30 PM <1µm Weight concentration", - entity="sensor.sps30_pm_1um_weight_concentration", - ).withValue(3.7069).assert_in_metrics(body) + assert ( + 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' + 'entity="sensor.sps30_pm_1um_weight_concentration",' + 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -624,26 +235,23 @@ async def test_sensor_without_unit( """Test prometheus metrics for sensors without a unit.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="sensor_state", - domain="sensor", - friendly_name="Trend Gradient", - entity="sensor.trend_gradient", - ).withValue(0.002).assert_in_metrics(body) + assert ( + 'sensor_state{domain="sensor",' + 'entity="sensor.trend_gradient",' + 'friendly_name="Trend Gradient"} 0.002' in body + ) - EntityMetric( - metric_name="sensor_state", - domain="sensor", - friendly_name="Text", - entity="sensor.text", - ).assert_not_in_metrics(body) + assert ( + 'sensor_state{domain="sensor",' + 'entity="sensor.text",' + 'friendly_name="Text"} 0' not in body + ) - EntityMetric( - metric_name="sensor_unit_text", - domain="sensor", - friendly_name="Text Unit", - entity="sensor.text_unit", - ).assert_not_in_metrics(body) + assert ( + 'sensor_unit_text{domain="sensor",' + 'entity="sensor.text_unit",' + 'friendly_name="Text Unit"} 0' not in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -653,40 +261,35 @@ async def test_sensor_device_class( """Test prometheus metrics for sensor with a device_class.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Fahrenheit", - entity="sensor.fahrenheit", - ).withValue(10.0).assert_in_metrics(body) + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.fahrenheit",' + 'friendly_name="Fahrenheit"} 10.0' in body + ) - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(15.6).assert_in_metrics(body) + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) - EntityMetric( - metric_name="sensor_power_kwh", - domain="sensor", - friendly_name="Radio Energy", - entity="sensor.radio_energy", - ).withValue(14.0).assert_in_metrics(body) + assert ( + 'sensor_power_kwh{domain="sensor",' + 'entity="sensor.radio_energy",' + 'friendly_name="Radio Energy"} 14.0' in body + ) - EntityMetric( - metric_name="sensor_timestamp_seconds", - domain="sensor", - friendly_name="Timestamp", - entity="sensor.timestamp", - ).withValue(1.691445808136036e09).assert_in_metrics(body) + assert ( + 'sensor_timestamp_seconds{domain="sensor",' + 'entity="sensor.timestamp",' + 'friendly_name="Timestamp"} 1.691445808136036e+09' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -696,33 +299,23 @@ async def test_input_number( """Test prometheus metrics for input_number.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="input_number_state", - domain="input_number", - friendly_name="Threshold", - entity="input_number.threshold", - ).withValue(5.2).assert_in_metrics(body) + assert ( + 'input_number_state{domain="input_number",' + 'entity="input_number.threshold",' + 'friendly_name="Threshold"} 5.2' in body + ) - EntityMetric( - metric_name="input_number_state", - domain="input_number", - friendly_name="None", - entity="input_number.brightness", - ).withValue(60.0).assert_in_metrics(body) + assert ( + 'input_number_state{domain="input_number",' + 'entity="input_number.brightness",' + 'friendly_name="None"} 60.0' in body + ) - EntityMetric( - metric_name="input_number_state_celsius", - domain="input_number", - friendly_name="Target temperature", - entity="input_number.target_temperature", - ).withValue(22.7).assert_in_metrics(body) - - EntityMetric( - metric_name="input_number_state_celsius", - domain="input_number", - friendly_name="Converted temperature", - entity="input_number.converted_temperature", - ).withValue(100).assert_in_metrics(body) + assert ( + 'input_number_state_celsius{domain="input_number",' + 'entity="input_number.target_temperature",' + 'friendly_name="Target temperature"} 22.7' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -732,26 +325,23 @@ async def test_number( """Test prometheus metrics for number.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="number_state", - domain="number", - friendly_name="Threshold", - entity="number.threshold", - ).withValue(5.2).assert_in_metrics(body) + assert ( + 'number_state{domain="number",' + 'entity="number.threshold",' + 'friendly_name="Threshold"} 5.2' in body + ) - EntityMetric( - metric_name="number_state", - domain="number", - friendly_name="None", - entity="number.brightness", - ).withValue(60.0).assert_in_metrics(body) + assert ( + 'number_state{domain="number",' + 'entity="number.brightness",' + 'friendly_name="None"} 60.0' in body + ) - EntityMetric( - metric_name="number_state_celsius", - domain="number", - friendly_name="Target temperature", - entity="number.target_temperature", - ).withValue(22.7).assert_in_metrics(body) + assert ( + 'number_state_celsius{domain="number",' + 'entity="number.target_temperature",' + 'friendly_name="Target temperature"} 22.7' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -761,12 +351,11 @@ async def test_battery( """Test prometheus metrics for battery.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="battery_level_percent", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(12.0).assert_in_metrics(body) + assert ( + 'battery_level_percent{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 12.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -777,56 +366,35 @@ async def test_climate( """Test prometheus metrics for climate entities.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="climate_current_temperature_celsius", - domain="climate", - friendly_name="HeatPump", - entity="climate.heatpump", - ).withValue(25.0).assert_in_metrics(body) + assert ( + 'climate_current_temperature_celsius{domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump"} 25.0' in body + ) - EntityMetric( - metric_name="climate_target_temperature_celsius", - domain="climate", - friendly_name="HeatPump", - entity="climate.heatpump", - ).withValue(20.0).assert_in_metrics(body) + assert ( + 'climate_target_temperature_celsius{domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump"} 20.0' in body + ) - EntityMetric( - metric_name="climate_target_temperature_low_celsius", - domain="climate", - friendly_name="Ecobee", - entity="climate.ecobee", - ).withValue(21.0).assert_in_metrics(body) + assert ( + 'climate_target_temperature_low_celsius{domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee"} 21.0' in body + ) - EntityMetric( - metric_name="climate_target_temperature_high_celsius", - domain="climate", - friendly_name="Ecobee", - entity="climate.ecobee", - ).withValue(24.0).assert_in_metrics(body) + assert ( + 'climate_target_temperature_high_celsius{domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee"} 24.0' in body + ) - EntityMetric( - metric_name="climate_target_temperature_celsius", - domain="climate", - friendly_name="Fritz!DECT", - entity="climate.fritzdect", - ).withValue(0.0).assert_in_metrics(body) - - EntityMetric( - metric_name="climate_preset_mode", - domain="climate", - friendly_name="Ecobee", - entity="climate.ecobee", - mode="away", - ).withValue(1).assert_in_metrics(body) - - EntityMetric( - metric_name="climate_fan_mode", - domain="climate", - friendly_name="Ecobee", - entity="climate.ecobee", - mode="auto", - ).withValue(1).assert_in_metrics(body) + assert ( + 'climate_target_temperature_celsius{domain="climate",' + 'entity="climate.fritzdect",' + 'friendly_name="Fritz!DECT"} 0.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -837,35 +405,30 @@ async def test_humidifier( """Test prometheus metrics for humidifier entities.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="humidifier_target_humidity_percent", - domain="humidifier", - friendly_name="Humidifier", - entity="humidifier.humidifier", - ).withValue(68.0).assert_in_metrics(body) + assert ( + 'humidifier_target_humidity_percent{domain="humidifier",' + 'entity="humidifier.humidifier",' + 'friendly_name="Humidifier"} 68.0' in body + ) - EntityMetric( - metric_name="humidifier_state", - domain="humidifier", - friendly_name="Dehumidifier", - entity="humidifier.dehumidifier", - ).withValue(1).assert_in_metrics(body) + assert ( + 'humidifier_state{domain="humidifier",' + 'entity="humidifier.dehumidifier",' + 'friendly_name="Dehumidifier"} 1.0' in body + ) - EntityMetric( - metric_name="humidifier_mode", - domain="humidifier", - friendly_name="Hygrostat", - entity="humidifier.hygrostat", - mode="home", - ).withValue(1).assert_in_metrics(body) - - EntityMetric( - metric_name="humidifier_mode", - domain="humidifier", - friendly_name="Hygrostat", - entity="humidifier.hygrostat", - mode="eco", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'humidifier_mode{domain="humidifier",' + 'entity="humidifier.hygrostat",' + 'friendly_name="Hygrostat",' + 'mode="home"} 1.0' in body + ) + assert ( + 'humidifier_mode{domain="humidifier",' + 'entity="humidifier.hygrostat",' + 'friendly_name="Hygrostat",' + 'mode="eco"} 0.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -876,33 +439,29 @@ async def test_attributes( """Test prometheus metrics for entity attributes.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="switch_state", - domain="switch", - friendly_name="Boolean", - entity="switch.boolean", - ).withValue(1).assert_in_metrics(body) + assert ( + 'switch_state{domain="switch",' + 'entity="switch.boolean",' + 'friendly_name="Boolean"} 1.0' in body + ) - EntityMetric( - metric_name="switch_attr_boolean", - domain="switch", - friendly_name="Boolean", - entity="switch.boolean", - ).withValue(1).assert_in_metrics(body) + assert ( + 'switch_attr_boolean{domain="switch",' + 'entity="switch.boolean",' + 'friendly_name="Boolean"} 1.0' in body + ) - EntityMetric( - metric_name="switch_state", - domain="switch", - friendly_name="Number", - entity="switch.number", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'switch_state{domain="switch",' + 'entity="switch.number",' + 'friendly_name="Number"} 0.0' in body + ) - EntityMetric( - metric_name="switch_attr_number", - domain="switch", - friendly_name="Number", - entity="switch.number", - ).withValue(10.2).assert_in_metrics(body) + assert ( + 'switch_attr_number{domain="switch",' + 'entity="switch.number",' + 'friendly_name="Number"} 10.2' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -912,19 +471,17 @@ async def test_binary_sensor( """Test prometheus metrics for binary_sensor.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="binary_sensor_state", - domain="binary_sensor", - friendly_name="Door", - entity="binary_sensor.door", - ).withValue(1).assert_in_metrics(body) + assert ( + 'binary_sensor_state{domain="binary_sensor",' + 'entity="binary_sensor.door",' + 'friendly_name="Door"} 1.0' in body + ) - EntityMetric( - metric_name="binary_sensor_state", - domain="binary_sensor", - friendly_name="Window", - entity="binary_sensor.window", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'binary_sensor_state{domain="binary_sensor",' + 'entity="binary_sensor.window",' + 'friendly_name="Window"} 0.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -934,19 +491,17 @@ async def test_input_boolean( """Test prometheus metrics for input_boolean.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="input_boolean_state", - domain="input_boolean", - friendly_name="Test", - entity="input_boolean.test", - ).withValue(1).assert_in_metrics(body) + assert ( + 'input_boolean_state{domain="input_boolean",' + 'entity="input_boolean.test",' + 'friendly_name="Test"} 1.0' in body + ) - EntityMetric( - metric_name="input_boolean_state", - domain="input_boolean", - friendly_name="Helper", - entity="input_boolean.helper", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'input_boolean_state{domain="input_boolean",' + 'entity="input_boolean.helper",' + 'friendly_name="Helper"} 0.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -956,40 +511,35 @@ async def test_light( """Test prometheus metrics for lights.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="light_brightness_percent", - domain="light", - friendly_name="Desk", - entity="light.desk", - ).withValue(100.0).assert_in_metrics(body) + assert ( + 'light_brightness_percent{domain="light",' + 'entity="light.desk",' + 'friendly_name="Desk"} 100.0' in body + ) - EntityMetric( - metric_name="light_brightness_percent", - domain="light", - friendly_name="Wall", - entity="light.wall", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'light_brightness_percent{domain="light",' + 'entity="light.wall",' + 'friendly_name="Wall"} 0.0' in body + ) - EntityMetric( - metric_name="light_brightness_percent", - domain="light", - friendly_name="TV", - entity="light.tv", - ).withValue(100.0).assert_in_metrics(body) + assert ( + 'light_brightness_percent{domain="light",' + 'entity="light.tv",' + 'friendly_name="TV"} 100.0' in body + ) - EntityMetric( - metric_name="light_brightness_percent", - domain="light", - friendly_name="PC", - entity="light.pc", - ).withValue(70.58823529411765).assert_in_metrics(body) + assert ( + 'light_brightness_percent{domain="light",' + 'entity="light.pc",' + 'friendly_name="PC"} 70.58823529411765' in body + ) - EntityMetric( - metric_name="light_brightness_percent", - domain="light", - friendly_name="Hallway", - entity="light.hallway", - ).withValue(100.0).assert_in_metrics(body) + assert ( + 'light_brightness_percent{domain="light",' + 'entity="light.hallway",' + 'friendly_name="Hallway"} 100.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -999,111 +549,17 @@ async def test_lock( """Test prometheus metrics for lock.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="lock_state", - domain="lock", - friendly_name="Front Door", - entity="lock.front_door", - ).withValue(1).assert_in_metrics(body) + assert ( + 'lock_state{domain="lock",' + 'entity="lock.front_door",' + 'friendly_name="Front Door"} 1.0' in body + ) - EntityMetric( - metric_name="lock_state", - domain="lock", - friendly_name="Kitchen Door", - entity="lock.kitchen_door", - ).withValue(0.0).assert_in_metrics(body) - - -@pytest.mark.parametrize("namespace", [""]) -async def test_fan( - client: ClientSessionGenerator, fan_entities: dict[str, er.RegistryEntry] -) -> None: - """Test prometheus metrics for fan.""" - body = await generate_latest_metrics(client) - - EntityMetric( - metric_name="fan_state", - domain="fan", - friendly_name="Fan 1", - entity="fan.fan_1", - ).withValue(1).assert_in_metrics(body) - - EntityMetric( - metric_name="fan_speed_percent", - domain="fan", - friendly_name="Fan 1", - entity="fan.fan_1", - ).withValue(33.0).assert_in_metrics(body) - - EntityMetric( - metric_name="fan_is_oscillating", - domain="fan", - friendly_name="Fan 1", - entity="fan.fan_1", - ).withValue(1).assert_in_metrics(body) - - EntityMetric( - metric_name="fan_direction_reversed", - domain="fan", - friendly_name="Fan 1", - entity="fan.fan_1", - ).withValue(0.0).assert_in_metrics(body) - - EntityMetric( - metric_name="fan_preset_mode", - domain="fan", - friendly_name="Fan 1", - entity="fan.fan_1", - mode="LO", - ).withValue(1).assert_in_metrics(body) - - EntityMetric( - metric_name="fan_direction_reversed", - domain="fan", - friendly_name="Reverse Fan", - entity="fan.fan_2", - ).withValue(1).assert_in_metrics(body) - - -@pytest.mark.parametrize("namespace", [""]) -async def test_alarm_control_panel( - client: ClientSessionGenerator, - alarm_control_panel_entities: dict[str, er.RegistryEntry], -) -> None: - """Test prometheus metrics for alarm control panel.""" - body = await generate_latest_metrics(client) - - EntityMetric( - metric_name="alarm_control_panel_state", - domain="alarm_control_panel", - friendly_name="Alarm Control Panel 1", - entity="alarm_control_panel.alarm_control_panel_1", - state="armed_away", - ).withValue(1).assert_in_metrics(body) - - EntityMetric( - metric_name="alarm_control_panel_state", - domain="alarm_control_panel", - friendly_name="Alarm Control Panel 1", - entity="alarm_control_panel.alarm_control_panel_1", - state="disarmed", - ).withValue(0.0).assert_in_metrics(body) - - EntityMetric( - metric_name="alarm_control_panel_state", - domain="alarm_control_panel", - friendly_name="Alarm Control Panel 2", - entity="alarm_control_panel.alarm_control_panel_2", - state="armed_home", - ).withValue(1).assert_in_metrics(body) - - EntityMetric( - metric_name="alarm_control_panel_state", - domain="alarm_control_panel", - friendly_name="Alarm Control Panel 2", - entity="alarm_control_panel.alarm_control_panel_2", - state="armed_away", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'lock_state{domain="lock",' + 'entity="lock.kitchen_door",' + 'friendly_name="Kitchen Door"} 0.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -1116,61 +572,55 @@ async def test_cover( open_covers = ["cover_open", "cover_position", "cover_tilt_position"] for testcover in data: - EntityMetric( - metric_name="cover_state", - domain="cover", - friendly_name=cover_entities[testcover].original_name, - entity=cover_entities[testcover].entity_id, - state="open", - ).withValue( - 1.0 if cover_entities[testcover].unique_id in open_covers else 0.0 - ).assert_in_metrics(body) + open_metric = ( + f'cover_state{{domain="cover",' + f'entity="{cover_entities[testcover].entity_id}",' + f'friendly_name="{cover_entities[testcover].original_name}",' + f'state="open"}} {1.0 if cover_entities[testcover].unique_id in open_covers else 0.0}' + ) + assert open_metric in body - EntityMetric( - metric_name="cover_state", - domain="cover", - friendly_name=cover_entities[testcover].original_name, - entity=cover_entities[testcover].entity_id, - state="closed", - ).withValue( - 1.0 if cover_entities[testcover].unique_id == "cover_closed" else 0.0 - ).assert_in_metrics(body) + closed_metric = ( + f'cover_state{{domain="cover",' + f'entity="{cover_entities[testcover].entity_id}",' + f'friendly_name="{cover_entities[testcover].original_name}",' + f'state="closed"}} {1.0 if cover_entities[testcover].unique_id == "cover_closed" else 0.0}' + ) + assert closed_metric in body - EntityMetric( - metric_name="cover_state", - domain="cover", - friendly_name=cover_entities[testcover].original_name, - entity=cover_entities[testcover].entity_id, - state="opening", - ).withValue( - 1.0 if cover_entities[testcover].unique_id == "cover_opening" else 0.0 - ).assert_in_metrics(body) + opening_metric = ( + f'cover_state{{domain="cover",' + f'entity="{cover_entities[testcover].entity_id}",' + f'friendly_name="{cover_entities[testcover].original_name}",' + f'state="opening"}} {1.0 if cover_entities[testcover].unique_id == "cover_opening" else 0.0}' + ) + assert opening_metric in body - EntityMetric( - metric_name="cover_state", - domain="cover", - friendly_name=cover_entities[testcover].original_name, - entity=cover_entities[testcover].entity_id, - state="closing", - ).withValue( - 1.0 if cover_entities[testcover].unique_id == "cover_closing" else 0.0 - ).assert_in_metrics(body) + closing_metric = ( + f'cover_state{{domain="cover",' + f'entity="{cover_entities[testcover].entity_id}",' + f'friendly_name="{cover_entities[testcover].original_name}",' + f'state="closing"}} {1.0 if cover_entities[testcover].unique_id == "cover_closing" else 0.0}' + ) + assert closing_metric in body if testcover == "cover_position": - EntityMetric( - metric_name="cover_position", - domain="cover", - friendly_name=cover_entities[testcover].original_name, - entity=cover_entities[testcover].entity_id, - ).withValue(50.0).assert_in_metrics(body) + position_metric = ( + f'cover_position{{domain="cover",' + f'entity="{cover_entities[testcover].entity_id}",' + f'friendly_name="{cover_entities[testcover].original_name}"' + f"}} 50.0" + ) + assert position_metric in body if testcover == "cover_tilt_position": - EntityMetric( - metric_name="cover_tilt_position", - domain="cover", - friendly_name=cover_entities[testcover].original_name, - entity=cover_entities[testcover].entity_id, - ).withValue(50.0).assert_in_metrics(body) + tilt_position_metric = ( + f'cover_tilt_position{{domain="cover",' + f'entity="{cover_entities[testcover].entity_id}",' + f'friendly_name="{cover_entities[testcover].original_name}"' + f"}} 50.0" + ) + assert tilt_position_metric in body @pytest.mark.parametrize("namespace", [""]) @@ -1180,40 +630,16 @@ async def test_device_tracker( """Test prometheus metrics for device_tracker.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="device_tracker_state", - domain="device_tracker", - friendly_name="Phone", - entity="device_tracker.phone", - ).withValue(1).assert_in_metrics(body) - - EntityMetric( - metric_name="device_tracker_state", - domain="device_tracker", - friendly_name="Watch", - entity="device_tracker.watch", - ).withValue(0.0).assert_in_metrics(body) - - -@pytest.mark.parametrize("namespace", [""]) -async def test_person( - client: ClientSessionGenerator, person_entities: dict[str, er.RegistryEntry] -) -> None: - """Test prometheus metrics for person.""" - body = await generate_latest_metrics(client) - - EntityMetric( - metric_name="person_state", - domain="person", - friendly_name="Bob", - entity="person.bob", - ).withValue(1).assert_in_metrics(body) - EntityMetric( - metric_name="person_state", - domain="person", - friendly_name="Alice", - entity="person.alice", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'device_tracker_state{domain="device_tracker",' + 'entity="device_tracker.phone",' + 'friendly_name="Phone"} 1.0' in body + ) + assert ( + 'device_tracker_state{domain="device_tracker",' + 'entity="device_tracker.watch",' + 'friendly_name="Watch"} 0.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -1223,12 +649,11 @@ async def test_counter( """Test prometheus metrics for counter.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="counter_value", - domain="counter", - friendly_name="None", - entity="counter.counter", - ).withValue(2.0).assert_in_metrics(body) + assert ( + 'counter_value{domain="counter",' + 'entity="counter.counter",' + 'friendly_name="None"} 2.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -1238,18 +663,16 @@ async def test_update( """Test prometheus metrics for update.""" body = await generate_latest_metrics(client) - EntityMetric( - metric_name="update_state", - domain="update", - friendly_name="Firmware", - entity="update.firmware", - ).withValue(1).assert_in_metrics(body) - EntityMetric( - metric_name="update_state", - domain="update", - friendly_name="Addon", - entity="update.addon", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'update_state{domain="update",' + 'entity="update.firmware",' + 'friendly_name="Firmware"} 1.0' in body + ) + assert ( + 'update_state{domain="update",' + 'entity="update.addon",' + 'friendly_name="Addon"} 0.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -1264,49 +687,43 @@ async def test_renaming_entity_name( data = {**sensor_entities, **climate_entities} body = await generate_latest_metrics(client) - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(15.6).assert_in_metrics(body) + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) - EntityMetric( - metric_name="climate_action", - domain="climate", - friendly_name="HeatPump", - entity="climate.heatpump", - action="heating", - ).withValue(1).assert_in_metrics(body) + assert ( + 'climate_action{action="heating",' + 'domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump"} 1.0' in body + ) - EntityMetric( - metric_name="climate_action", - domain="climate", - friendly_name="HeatPump", - entity="climate.heatpump", - action="cooling", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'climate_action{action="cooling",' + 'domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump"} 0.0' in body + ) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -1344,50 +761,44 @@ async def test_renaming_entity_name( assert 'friendly_name="HeatPump"' not in body_line # Check if new metrics created - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature Renamed", - entity="sensor.outside_temperature", - ).withValue(15.6).assert_in_metrics(body) + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature Renamed"} 15.6' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Temperature Renamed", - entity="sensor.outside_temperature", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature Renamed"} 1.0' in body + ) - EntityMetric( - metric_name="climate_action", - domain="climate", - friendly_name="HeatPump Renamed", - entity="climate.heatpump", - action="heating", - ).withValue(1).assert_in_metrics(body) + assert ( + 'climate_action{action="heating",' + 'domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump Renamed"} 1.0' in body + ) - EntityMetric( - metric_name="climate_action", - domain="climate", - friendly_name="HeatPump Renamed", - entity="climate.heatpump", - action="cooling", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'climate_action{action="cooling",' + 'domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump Renamed"} 0.0' in body + ) # Keep other sensors - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -1402,33 +813,29 @@ async def test_renaming_entity_id( data = {**sensor_entities, **climate_entities} body = await generate_latest_metrics(client) - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(15.6).assert_in_metrics(body) + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -1448,33 +855,30 @@ async def test_renaming_entity_id( assert 'entity="sensor.outside_temperature"' not in body_line # Check if new metrics created - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature_renamed", - ).withValue(15.6).assert_in_metrics(body) + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature_renamed",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature_renamed", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature_renamed",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) # Keep other sensors - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -1489,49 +893,43 @@ async def test_deleting_entity( data = {**sensor_entities, **climate_entities} body = await generate_latest_metrics(client) - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(15.6).assert_in_metrics(body) + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) - EntityMetric( - metric_name="climate_action", - domain="climate", - friendly_name="HeatPump", - entity="climate.heatpump", - action="heating", - ).withValue(1).assert_in_metrics(body) + assert ( + 'climate_action{action="heating",' + 'domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump"} 1.0' in body + ) - EntityMetric( - metric_name="climate_action", - domain="climate", - friendly_name="HeatPump", - entity="climate.heatpump", - action="cooling", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'climate_action{action="cooling",' + 'domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump"} 0.0' in body + ) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -1549,19 +947,17 @@ async def test_deleting_entity( assert 'friendly_name="HeatPump"' not in body_line # Keep other sensors - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -1578,56 +974,50 @@ async def test_disabling_entity( await hass.async_block_till_done() body = await generate_latest_metrics(client) - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(15.6).assert_in_metrics(body) + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) - EntityMetric( - metric_name="state_change_total", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(1).assert_in_metrics(body) + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) - EntityMetric( - metric_name="state_change_created", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).assert_in_metrics(body) + assert any( + 'state_change_created{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"}' in metric + for metric in body + ) - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) - EntityMetric( - metric_name="climate_action", - domain="climate", - friendly_name="HeatPump", - entity="climate.heatpump", - action="heating", - ).withValue(1).assert_in_metrics(body) + assert ( + 'climate_action{action="heating",' + 'domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump"} 1.0' in body + ) - EntityMetric( - metric_name="climate_action", - domain="climate", - friendly_name="HeatPump", - entity="climate.heatpump", - action="cooling", - ).withValue(0.0).assert_in_metrics(body) + assert ( + 'climate_action{action="cooling",' + 'domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump"} 0.0' in body + ) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -1651,191 +1041,137 @@ async def test_disabling_entity( assert 'friendly_name="HeatPump"' not in body_line # Keep other sensors - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) @pytest.mark.parametrize("namespace", [""]) -@pytest.mark.parametrize("unavailable_state", [STATE_UNAVAILABLE, STATE_UNKNOWN]) -async def test_entity_becomes_unavailable( +async def test_entity_becomes_unavailable_with_export( hass: HomeAssistant, entity_registry: er.EntityRegistry, client: ClientSessionGenerator, sensor_entities: dict[str, er.RegistryEntry], - unavailable_state: str, ) -> None: - """Test an entity that becomes unavailable/unknown is no longer exported.""" + """Test an entity that becomes unavailable is still exported.""" data = {**sensor_entities} await hass.async_block_till_done() body = await generate_latest_metrics(client) - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(15.6).assert_in_metrics(body) + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) - EntityMetric( - metric_name="state_change_total", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(1).assert_in_metrics(body) + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) - EntityMetric( - metric_name="last_updated_time_seconds", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) - EntityMetric( - metric_name="battery_level_percent", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(12.0).assert_in_metrics(body) + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) - EntityMetric( - metric_name="state_change_total", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) - - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) - - # Make sensor_1 unavailable/unknown. + # Make sensor_1 unavailable. set_state_with_entry( - hass, data["sensor_1"], unavailable_state, data["sensor_1_attributes"] + hass, data["sensor_1"], STATE_UNAVAILABLE, data["sensor_1_attributes"] ) await hass.async_block_till_done() body = await generate_latest_metrics(client) - # Check that the availability changed on sensor_1 and the metric with the value is gone. - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).assert_not_in_metrics(body) + # Check that only the availability changed on sensor_1. + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) - EntityMetric( - metric_name="battery_level_percent", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).assert_not_in_metrics(body) + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 2.0' in body + ) - EntityMetric( - metric_name="state_change_total", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(2.0).assert_in_metrics(body) - - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(0.0).assert_in_metrics(body) - - EntityMetric( - metric_name="last_updated_time_seconds", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 0.0' in body + ) # The other sensor should be unchanged. - EntityMetric( - metric_name="sensor_humidity_percent", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(54.0).assert_in_metrics(body) + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) - EntityMetric( - metric_name="state_change_total", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Humidity", - entity="sensor.outside_humidity", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) - # Bring sensor_1 back and check that it returned. - set_state_with_entry(hass, data["sensor_1"], 201.0, data["sensor_1_attributes"]) + # Bring sensor_1 back and check that it is correct. + set_state_with_entry(hass, data["sensor_1"], 200.0, data["sensor_1_attributes"]) await hass.async_block_till_done() body = await generate_latest_metrics(client) - EntityMetric( - metric_name="sensor_temperature_celsius", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(201.0).assert_in_metrics(body) + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 200.0' in body + ) - EntityMetric( - metric_name="battery_level_percent", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(12.0).assert_in_metrics(body) + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 3.0' in body + ) - EntityMetric( - metric_name="state_change_total", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(3.0).assert_in_metrics(body) - - EntityMetric( - metric_name="entity_available", - domain="sensor", - friendly_name="Outside Temperature", - entity="sensor.outside_temperature", - ).withValue(1).assert_in_metrics(body) + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) @pytest.fixture(name="sensor_entities") @@ -2023,11 +1359,6 @@ async def climate_fixture( ATTR_TARGET_TEMP_LOW: 21, ATTR_TARGET_TEMP_HIGH: 24, ATTR_HVAC_ACTION: climate.HVACAction.COOLING, - ATTR_HVAC_MODES: ["off", "heat", "cool", "heat_cool"], - ATTR_PRESET_MODE: "away", - ATTR_PRESET_MODES: ["away", "home", "sleep"], - ATTR_FAN_MODE: "auto", - ATTR_FAN_MODES: ["auto", "on"], } set_state_with_entry( hass, climate_2, climate.HVACAction.HEATING, climate_2_attributes @@ -2125,7 +1456,7 @@ async def lock_fixture( suggested_object_id="front_door", original_name="Front Door", ) - set_state_with_entry(hass, lock_1, LockState.LOCKED) + set_state_with_entry(hass, lock_1, STATE_LOCKED) data["lock_1"] = lock_1 lock_2 = entity_registry.async_get_or_create( @@ -2135,7 +1466,7 @@ async def lock_fixture( suggested_object_id="kitchen_door", original_name="Kitchen Door", ) - set_state_with_entry(hass, lock_2, LockState.UNLOCKED) + set_state_with_entry(hass, lock_2, STATE_UNLOCKED) data["lock_2"] = lock_2 await hass.async_block_till_done() @@ -2252,17 +1583,6 @@ async def input_number_fixture( set_state_with_entry(hass, input_number_3, 22.7) data["input_number_3"] = input_number_3 - input_number_4 = entity_registry.async_get_or_create( - domain=input_number.DOMAIN, - platform="test", - unique_id="input_number_4", - suggested_object_id="converted_temperature", - original_name="Converted temperature", - unit_of_measurement=UnitOfTemperature.FAHRENHEIT, - ) - set_state_with_entry(hass, input_number_4, 212) - data["input_number_4"] = input_number_4 - await hass.async_block_till_done() return data @@ -2468,76 +1788,6 @@ async def switch_fixture( return data -@pytest.fixture(name="fan_entities") -async def fan_fixture( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> dict[str, er.RegistryEntry]: - """Simulate fan entities.""" - data = {} - fan_1 = entity_registry.async_get_or_create( - domain=fan.DOMAIN, - platform="test", - unique_id="fan_1", - suggested_object_id="fan_1", - original_name="Fan 1", - ) - fan_1_attributes = { - ATTR_DIRECTION: DIRECTION_FORWARD, - ATTR_OSCILLATING: True, - ATTR_PERCENTAGE: 33, - ATTR_PRESET_MODE: "LO", - ATTR_PRESET_MODES: ["LO", "OFF", "HI"], - } - set_state_with_entry(hass, fan_1, STATE_ON, fan_1_attributes) - data["fan_1"] = fan_1 - data["fan_1_attributes"] = fan_1_attributes - - fan_2 = entity_registry.async_get_or_create( - domain=fan.DOMAIN, - platform="test", - unique_id="fan_2", - suggested_object_id="fan_2", - original_name="Reverse Fan", - ) - fan_2_attributes = {ATTR_DIRECTION: DIRECTION_REVERSE} - set_state_with_entry(hass, fan_2, STATE_ON, fan_2_attributes) - data["fan_2"] = fan_2 - data["fan_2_attributes"] = fan_2_attributes - - await hass.async_block_till_done() - return data - - -@pytest.fixture(name="alarm_control_panel_entities") -async def alarm_control_panel_fixture( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> dict[str, er.RegistryEntry]: - """Simulate alarm control panel entities.""" - data = {} - alarm_control_panel_1 = entity_registry.async_get_or_create( - domain=alarm_control_panel.DOMAIN, - platform="test", - unique_id="alarm_control_panel_1", - suggested_object_id="alarm_control_panel_1", - original_name="Alarm Control Panel 1", - ) - set_state_with_entry(hass, alarm_control_panel_1, AlarmControlPanelState.ARMED_AWAY) - data["alarm_control_panel_1"] = alarm_control_panel_1 - - alarm_control_panel_2 = entity_registry.async_get_or_create( - domain=alarm_control_panel.DOMAIN, - platform="test", - unique_id="alarm_control_panel_2", - suggested_object_id="alarm_control_panel_2", - original_name="Alarm Control Panel 2", - ) - set_state_with_entry(hass, alarm_control_panel_2, AlarmControlPanelState.ARMED_HOME) - data["alarm_control_panel_2"] = alarm_control_panel_2 - - await hass.async_block_till_done() - return data - - @pytest.fixture(name="person_entities") async def person_fixture( hass: HomeAssistant, entity_registry: er.EntityRegistry diff --git a/tests/components/prosegur/test_alarm_control_panel.py b/tests/components/prosegur/test_alarm_control_panel.py index 4e3dcdc3fd8..b65b86b3049 100644 --- a/tests/components/prosegur/test_alarm_control_panel.py +++ b/tests/components/prosegur/test_alarm_control_panel.py @@ -1,15 +1,12 @@ """Tests for the Prosegur alarm control panel device.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch from pyprosegur.installation import Status import pytest +from typing_extensions import Generator -from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_DOMAIN, - AlarmControlPanelState, -) +from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -17,6 +14,9 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_DISARMED, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -93,13 +93,9 @@ async def test_connection_error( @pytest.mark.parametrize( ("code", "alarm_service", "alarm_state"), [ - (Status.ARMED, SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), - ( - Status.PARTIALLY, - SERVICE_ALARM_ARM_HOME, - AlarmControlPanelState.ARMED_HOME, - ), - (Status.DISARMED, SERVICE_ALARM_DISARM, AlarmControlPanelState.DISARMED), + (Status.ARMED, SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), + (Status.PARTIALLY, SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), + (Status.DISARMED, SERVICE_ALARM_DISARM, STATE_ALARM_DISARMED), ], ) async def test_arm( diff --git a/tests/components/prosegur/test_config_flow.py b/tests/components/prosegur/test_config_flow.py index 7c3f399ee09..9362cecc289 100644 --- a/tests/components/prosegur/test_config_flow.py +++ b/tests/components/prosegur/test_config_flow.py @@ -143,7 +143,15 @@ async def test_reauth_flow(hass: HomeAssistant, mock_list_contracts) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -200,7 +208,15 @@ async def test_reauth_flow_error(hass: HomeAssistant, exception, base_error) -> ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) with patch( "homeassistant.components.prosegur.config_flow.Installation.list", diff --git a/tests/components/proximity/snapshots/test_diagnostics.ambr b/tests/components/proximity/snapshots/test_diagnostics.ambr index 3d9673ffd90..68270dc3297 100644 --- a/tests/components/proximity/snapshots/test_diagnostics.ambr +++ b/tests/components/proximity/snapshots/test_diagnostics.ambr @@ -93,8 +93,6 @@ 'zone': 'zone.home', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'proximity', 'minor_version': 1, 'options': dict({ diff --git a/tests/components/proximity/test_config_flow.py b/tests/components/proximity/test_config_flow.py index 853026928bc..3ed9f5cba27 100644 --- a/tests/components/proximity/test_config_flow.py +++ b/tests/components/proximity/test_config_flow.py @@ -10,8 +10,8 @@ from homeassistant.components.proximity.const import ( CONF_TRACKED_ENTITIES, DOMAIN, ) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_ZONE +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT, CONF_ZONE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -120,6 +120,42 @@ async def test_options_flow(hass: HomeAssistant) -> None: } +async def test_import_flow(hass: HomeAssistant) -> None: + """Test import of yaml configuration.""" + with patch( + "homeassistant.components.proximity.async_setup_entry", return_value=True + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_NAME: "home", + CONF_ZONE: "zone.home", + CONF_TRACKED_ENTITIES: ["device_tracker.test1"], + CONF_IGNORED_ZONES: ["zone.work"], + CONF_TOLERANCE: 10, + CONF_UNIT_OF_MEASUREMENT: "km", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_NAME: "home", + CONF_ZONE: "zone.home", + CONF_TRACKED_ENTITIES: ["device_tracker.test1"], + CONF_IGNORED_ZONES: ["zone.work"], + CONF_TOLERANCE: 10, + CONF_UNIT_OF_MEASUREMENT: "km", + } + + zone = hass.states.get("zone.home") + assert result["title"] == zone.name + + await hass.async_block_till_done() + + assert mock_setup_entry.called + + async def test_abort_duplicated_entry(hass: HomeAssistant) -> None: """Test if we abort on duplicate user input data.""" DATA = { @@ -175,7 +211,7 @@ async def test_avoid_duplicated_title(hass: HomeAssistant) -> None: CONF_IGNORED_ZONES: ["zone.work"], CONF_TOLERANCE: 10, }, - unique_id=f"{DOMAIN}_home_3", + unique_id=f"{DOMAIN}_home", ).add_to_hass(hass) with patch( diff --git a/tests/components/proximity/test_diagnostics.py b/tests/components/proximity/test_diagnostics.py index e4f22236808..a60c592fcab 100644 --- a/tests/components/proximity/test_diagnostics.py +++ b/tests/components/proximity/test_diagnostics.py @@ -72,12 +72,5 @@ async def test_entry_diagnostics( assert await get_diagnostics_for_config_entry( hass, hass_client, mock_entry ) == snapshot( - exclude=props( - "entry_id", - "last_changed", - "last_reported", - "last_updated", - "created_at", - "modified_at", - ) + exclude=props("entry_id", "last_changed", "last_reported", "last_updated") ) diff --git a/tests/components/proximity/test_init.py b/tests/components/proximity/test_init.py index eeb181e0670..6c2b54cae29 100644 --- a/tests/components/proximity/test_init.py +++ b/tests/components/proximity/test_init.py @@ -2,12 +2,15 @@ import pytest +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity from homeassistant.components.proximity.const import ( CONF_IGNORED_ZONES, CONF_TOLERANCE, CONF_TRACKED_ENTITIES, DOMAIN, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.const import ( ATTR_FRIENDLY_NAME, CONF_ZONE, @@ -17,81 +20,109 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component from homeassistant.util import slugify from tests.common import MockConfigEntry -async def async_setup_single_entry( - hass: HomeAssistant, - zone: str, - tracked_entites: list[str], - ignored_zones: list[str], - tolerance: int, -) -> MockConfigEntry: - """Set up the proximity component with a single entry.""" - mock_config = MockConfigEntry( - domain=DOMAIN, - title="Home", - data={ - CONF_ZONE: zone, - CONF_TRACKED_ENTITIES: tracked_entites, - CONF_IGNORED_ZONES: ignored_zones, - CONF_TOLERANCE: tolerance, - }, - ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - return mock_config - - @pytest.mark.parametrize( - "config", + ("friendly_name", "config"), [ - { - CONF_IGNORED_ZONES: ["zone.work"], - CONF_TRACKED_ENTITIES: ["device_tracker.test1", "device_tracker.test2"], - CONF_TOLERANCE: 1, - CONF_ZONE: "zone.home", - }, - { - CONF_IGNORED_ZONES: [], - CONF_TRACKED_ENTITIES: ["device_tracker.test1"], - CONF_TOLERANCE: 1, - CONF_ZONE: "zone.work", - }, + ( + "home", + { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1", "device_tracker.test2"], + "tolerance": "1", + }, + ), + ( + "work", + { + "devices": ["device_tracker.test1"], + "tolerance": "1", + "zone": "work", + }, + ), ], ) -async def test_proximities(hass: HomeAssistant, config: dict) -> None: +async def test_proximities( + hass: HomeAssistant, friendly_name: str, config: dict +) -> None: """Test a list of proximities.""" - title = hass.states.get(config[CONF_ZONE]).name - mock_config = MockConfigEntry( - domain=DOMAIN, - title=title, - data=config, + assert await async_setup_component( + hass, DOMAIN, {"proximity": {friendly_name: config}} ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) await hass.async_block_till_done() - zone_name = slugify(title) + # proximity entity + state = hass.states.get(f"proximity.{friendly_name}") + assert state.state == "not set" + assert state.attributes.get("nearest") == "not set" + assert state.attributes.get("dir_of_travel") == "not set" + hass.states.async_set(f"proximity.{friendly_name}", "0") + await hass.async_block_till_done() + state = hass.states.get(f"proximity.{friendly_name}") + assert state.state == "0" # sensor entities - state = hass.states.get(f"sensor.{zone_name}_nearest_device") + state = hass.states.get(f"sensor.{friendly_name}_nearest_device") assert state.state == STATE_UNKNOWN - for device in config[CONF_TRACKED_ENTITIES]: - entity_base_name = f"sensor.{zone_name}_{slugify(device.split('.')[-1])}" + for device in config["devices"]: + entity_base_name = f"sensor.{friendly_name}_{slugify(device.split('.')[-1])}" state = hass.states.get(f"{entity_base_name}_distance") assert state.state == STATE_UNAVAILABLE state = hass.states.get(f"{entity_base_name}_direction_of_travel") assert state.state == STATE_UNAVAILABLE +async def test_legacy_setup(hass: HomeAssistant) -> None: + """Test legacy setup only on imported entries.""" + config = { + "proximity": { + "home": { + "devices": ["device_tracker.test1"], + "tolerance": "1", + }, + } + } + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done() + assert hass.states.get("proximity.home") + + mock_config = MockConfigEntry( + domain=DOMAIN, + title="work", + data={ + CONF_ZONE: "zone.work", + CONF_TRACKED_ENTITIES: ["device_tracker.test2"], + CONF_IGNORED_ZONES: [], + CONF_TOLERANCE: 1, + }, + unique_id=f"{DOMAIN}_work", + ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) + await hass.async_block_till_done() + + assert not hass.states.get("proximity.work") + + async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: """Test for tracker in zone.""" - await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) + config = { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1"], + "tolerance": "1", + } + } + } + + assert await async_setup_component(hass, DOMAIN, config) hass.states.async_set( "device_tracker.test1", @@ -100,6 +131,12 @@ async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.state == "0" + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "arrived" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -113,7 +150,17 @@ async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: async def test_device_tracker_test1_away(hass: HomeAssistant) -> None: """Test for tracker state away.""" - await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) + config = { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1"], + "tolerance": "1", + } + } + } + + assert await async_setup_component(hass, DOMAIN, config) hass.states.async_set( "device_tracker.test1", @@ -123,6 +170,11 @@ async def test_device_tracker_test1_away(hass: HomeAssistant) -> None: await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -138,7 +190,20 @@ async def test_device_tracker_test1_awayfurther( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state away further.""" - await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) + + await hass.async_block_till_done() + + config = { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1"], + "tolerance": "1", + } + } + } + + assert await async_setup_component(hass, DOMAIN, config) hass.states.async_set( "device_tracker.test1", @@ -147,6 +212,11 @@ async def test_device_tracker_test1_awayfurther( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -164,6 +234,11 @@ async def test_device_tracker_test1_awayfurther( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "away_from" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -179,7 +254,19 @@ async def test_device_tracker_test1_awaycloser( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state away closer.""" - await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) + await hass.async_block_till_done() + + config = { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1"], + "tolerance": "1", + } + } + } + + assert await async_setup_component(hass, DOMAIN, config) hass.states.async_set( "device_tracker.test1", @@ -188,6 +275,11 @@ async def test_device_tracker_test1_awaycloser( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -205,6 +297,11 @@ async def test_device_tracker_test1_awaycloser( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "towards" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -218,11 +315,27 @@ async def test_device_tracker_test1_awaycloser( async def test_all_device_trackers_in_ignored_zone(hass: HomeAssistant) -> None: """Test for tracker in ignored zone.""" - await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) + config = { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1"], + "tolerance": "1", + } + } + } + + assert await async_setup_component(hass, DOMAIN, config) hass.states.async_set("device_tracker.test1", "work", {"friendly_name": "test1"}) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.state == "not set" + assert state.attributes.get("nearest") == "not set" + assert state.attributes.get("dir_of_travel") == "not set" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == STATE_UNKNOWN @@ -236,13 +349,28 @@ async def test_all_device_trackers_in_ignored_zone(hass: HomeAssistant) -> None: async def test_device_tracker_test1_no_coordinates(hass: HomeAssistant) -> None: """Test for tracker with no coordinates.""" - await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) + config = { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1"], + "tolerance": "1", + } + } + } + + assert await async_setup_component(hass, DOMAIN, config) hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "not set" + assert state.attributes.get("dir_of_travel") == "not set" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == STATE_UNKNOWN @@ -256,8 +384,19 @@ async def test_device_tracker_test1_no_coordinates(hass: HomeAssistant) -> None: async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> None: """Test for tracker states.""" - await async_setup_single_entry( - hass, "zone.home", ["device_tracker.test1"], ["zone.work"], 1000 + assert await async_setup_component( + hass, + DOMAIN, + { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1"], + "tolerance": 1000, + "zone": "home", + } + } + }, ) hass.states.async_set( @@ -267,6 +406,11 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -284,6 +428,11 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "stationary" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -297,13 +446,17 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No async def test_device_trackers_in_zone(hass: HomeAssistant) -> None: """Test for trackers in zone.""" - await async_setup_single_entry( - hass, - "zone.home", - ["device_tracker.test1", "device_tracker.test2"], - ["zone.work"], - 1, - ) + config = { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1", "device_tracker.test2"], + "tolerance": "1", + } + } + } + + assert await async_setup_component(hass, DOMAIN, config) hass.states.async_set( "device_tracker.test1", @@ -318,6 +471,14 @@ async def test_device_trackers_in_zone(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.state == "0" + assert (state.attributes.get("nearest") == "test1, test2") or ( + state.attributes.get("nearest") == "test2, test1" + ) + assert state.attributes.get("dir_of_travel") == "arrived" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1, test2" @@ -334,18 +495,30 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( hass: HomeAssistant, config_zones ) -> None: """Test for tracker ordering.""" + await hass.async_block_till_done() + hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) + await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - await async_setup_single_entry( + await hass.async_block_till_done() + + assert await async_setup_component( hass, - "zone.home", - ["device_tracker.test1", "device_tracker.test2"], - ["zone.work"], - 1, + DOMAIN, + { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1", "device_tracker.test2"], + "tolerance": "1", + "zone": "home", + } + } + }, ) hass.states.async_set( @@ -355,6 +528,11 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -378,6 +556,11 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -399,19 +582,28 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( hass: HomeAssistant, config_zones ) -> None: """Test for tracker ordering.""" + await hass.async_block_till_done() + hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) + await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - - await async_setup_single_entry( + await hass.async_block_till_done() + assert await async_setup_component( hass, - "zone.home", - ["device_tracker.test1", "device_tracker.test2"], - ["zone.work"], - 1, + DOMAIN, + { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1", "device_tracker.test2"], + "zone": "home", + } + } + }, ) hass.states.async_set( @@ -421,6 +613,11 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test2" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -444,6 +641,11 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -468,15 +670,23 @@ async def test_device_tracker_test1_awayfurther_test2_in_ignored_zone( hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) + await hass.async_block_till_done() hass.states.async_set("device_tracker.test2", "work", {"friendly_name": "test2"}) - - await async_setup_single_entry( + await hass.async_block_till_done() + assert await async_setup_component( hass, - "zone.home", - ["device_tracker.test1", "device_tracker.test2"], - ["zone.work"], - 1, + DOMAIN, + { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1", "device_tracker.test2"], + "zone": "home", + } + } + }, ) + hass.states.async_set( "device_tracker.test1", "not_home", @@ -484,6 +694,11 @@ async def test_device_tracker_test1_awayfurther_test2_in_ignored_zone( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -505,19 +720,29 @@ async def test_device_tracker_test1_awayfurther_test2_first( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state.""" + await hass.async_block_till_done() + hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) + await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) + await hass.async_block_till_done() - await async_setup_single_entry( + assert await async_setup_component( hass, - "zone.home", - ["device_tracker.test1", "device_tracker.test2"], - ["zone.work"], - 1, + DOMAIN, + { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1", "device_tracker.test2"], + "zone": "home", + } + } + }, ) hass.states.async_set( @@ -551,6 +776,11 @@ async def test_device_tracker_test1_awayfurther_test2_first( hass.states.async_set("device_tracker.test1", "work", {"friendly_name": "test1"}) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test2" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -573,6 +803,7 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) -> None: """Test for tracker states.""" await hass.async_block_till_done() + hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) @@ -582,12 +813,18 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - await async_setup_single_entry( + assert await async_setup_component( hass, - "zone.home", - ["device_tracker.test1", "device_tracker.test2"], - ["zone.work"], - 1, + DOMAIN, + { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1", "device_tracker.test2"], + "zone": "home", + } + } + }, ) hass.states.async_set( @@ -597,6 +834,11 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -620,6 +862,11 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test2" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -643,6 +890,11 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() + # proximity entity + state = hass.states.get("proximity.home") + assert state.attributes.get("nearest") == "test1" + assert state.attributes.get("dir_of_travel") == "unknown" + # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -662,10 +914,22 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( async def test_nearest_sensors(hass: HomeAssistant, config_zones) -> None: """Test for nearest sensors.""" - await async_setup_single_entry( - hass, "zone.home", ["device_tracker.test1", "device_tracker.test2"], [], 1 + mock_config = MockConfigEntry( + domain=DOMAIN, + title="home", + data={ + CONF_ZONE: "zone.home", + CONF_TRACKED_ENTITIES: ["device_tracker.test1", "device_tracker.test2"], + CONF_IGNORED_ZONES: [], + CONF_TOLERANCE: 1, + }, + unique_id=f"{DOMAIN}_home", ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) + await hass.async_block_till_done() + hass.states.async_set( "device_tracker.test1", "not_home", @@ -774,6 +1038,71 @@ async def test_nearest_sensors(hass: HomeAssistant, config_zones) -> None: assert state.state == STATE_UNKNOWN +async def test_create_deprecated_proximity_issue( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we create an issue for deprecated proximity entities used in automations and scripts.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": "proximity.home"}, + "action": { + "service": "automation.turn_on", + "target": {"entity_id": "automation.test"}, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": "proximity.home", + "state": "home", + }, + ], + } + } + }, + ) + config = { + "proximity": { + "home": { + "ignored_zones": ["work"], + "devices": ["device_tracker.test1", "device_tracker.test2"], + "tolerance": "1", + }, + "work": {"tolerance": "1", "zone": "work"}, + } + } + + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done() + + automation_entities = automations_with_entity(hass, "proximity.home") + assert len(automation_entities) == 1 + assert automation_entities[0] == "automation.test" + + script_entites = scripts_with_entity(hass, "proximity.home") + + assert len(script_entites) == 1 + assert script_entites[0] == "script.test" + assert issue_registry.async_get_issue(DOMAIN, "deprecated_proximity_entity_home") + + assert not issue_registry.async_get_issue( + DOMAIN, "deprecated_proximity_entity_work" + ) + + async def test_create_removed_tracked_entity_issue( hass: HomeAssistant, issue_registry: ir.IssueRegistry, @@ -790,10 +1119,22 @@ async def test_create_removed_tracked_entity_issue( hass.states.async_set(t1.entity_id, "not_home") hass.states.async_set(t2.entity_id, "not_home") - await async_setup_single_entry( - hass, "zone.home", [t1.entity_id, t2.entity_id], [], 1 + mock_config = MockConfigEntry( + domain=DOMAIN, + title="home", + data={ + CONF_ZONE: "zone.home", + CONF_TRACKED_ENTITIES: [t1.entity_id, t2.entity_id], + CONF_IGNORED_ZONES: [], + CONF_TOLERANCE: 1, + }, + unique_id=f"{DOMAIN}_home", ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) + await hass.async_block_till_done() + sensor_t1 = f"sensor.home_{t1.entity_id.split('.')[-1]}_distance" sensor_t2 = f"sensor.home_{t2.entity_id.split('.')[-1]}_distance" @@ -827,10 +1168,22 @@ async def test_track_renamed_tracked_entity( hass.states.async_set(t1.entity_id, "not_home") - mock_config = await async_setup_single_entry( - hass, "zone.home", [t1.entity_id], ["zone.work"], 1 + mock_config = MockConfigEntry( + domain=DOMAIN, + title="home", + data={ + CONF_ZONE: "zone.home", + CONF_TRACKED_ENTITIES: [t1.entity_id], + CONF_IGNORED_ZONES: [], + CONF_TOLERANCE: 1, + }, + unique_id=f"{DOMAIN}_home", ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) + await hass.async_block_till_done() + sensor_t1 = f"sensor.home_{t1.entity_id.split('.')[-1]}_distance" entity = entity_registry.async_get(sensor_t1) @@ -863,60 +1216,31 @@ async def test_sensor_unique_ids( hass.states.async_set("device_tracker.test2", "not_home") - mock_config = await async_setup_single_entry( - hass, "zone.home", [t1.entity_id, "device_tracker.test2"], ["zone.work"], 1 + mock_config = MockConfigEntry( + domain=DOMAIN, + title="home", + data={ + CONF_ZONE: "zone.home", + CONF_TRACKED_ENTITIES: [t1.entity_id, "device_tracker.test2"], + CONF_IGNORED_ZONES: [], + CONF_TOLERANCE: 1, + }, + unique_id=f"{DOMAIN}_home", ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) + await hass.async_block_till_done() + sensor_t1 = "sensor.home_test_tracker_1_distance" entity = entity_registry.async_get(sensor_t1) assert entity assert entity.unique_id == f"{mock_config.entry_id}_{t1.id}_dist_to_zone" state = hass.states.get(sensor_t1) - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Home Test tracker 1 Distance" + assert state.attributes.get(ATTR_FRIENDLY_NAME) == "home Test tracker 1 Distance" entity = entity_registry.async_get("sensor.home_test2_distance") assert entity assert ( entity.unique_id == f"{mock_config.entry_id}_device_tracker.test2_dist_to_zone" ) - - -async def test_tracked_zone_is_removed(hass: HomeAssistant) -> None: - """Test that tracked zone is removed.""" - await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) - - hass.states.async_set( - "device_tracker.test1", - "home", - {"friendly_name": "test1", "latitude": 2.1, "longitude": 1.1}, - ) - await hass.async_block_till_done() - - # check sensor entities - state = hass.states.get("sensor.home_nearest_device") - assert state.state == "test1" - - entity_base_name = "sensor.home_test1" - state = hass.states.get(f"{entity_base_name}_distance") - assert state.state == "0" - state = hass.states.get(f"{entity_base_name}_direction_of_travel") - assert state.state == "arrived" - - # remove tracked zone and move tracked entity - assert hass.states.async_remove("zone.home") - hass.states.async_set( - "device_tracker.test1", - "home", - {"friendly_name": "test1", "latitude": 2.2, "longitude": 1.2}, - ) - await hass.async_block_till_done() - - # check sensor entities - state = hass.states.get("sensor.home_nearest_device") - assert state.state == STATE_UNKNOWN - - entity_base_name = "sensor.home_test1" - state = hass.states.get(f"{entity_base_name}_distance") - assert state.state == STATE_UNAVAILABLE - state = hass.states.get(f"{entity_base_name}_direction_of_travel") - assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/prusalink/conftest.py b/tests/components/prusalink/conftest.py index 9bcf45056cd..104e4d47afa 100644 --- a/tests/components/prusalink/conftest.py +++ b/tests/components/prusalink/conftest.py @@ -1,19 +1,16 @@ """Fixtures for PrusaLink.""" -from collections.abc import Generator -from typing import Any from unittest.mock import patch import pytest from homeassistant.components.prusalink import DOMAIN -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +def mock_config_entry(hass): """Mock a PrusaLink config entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -26,7 +23,7 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture -def mock_version_api() -> Generator[dict[str, str]]: +def mock_version_api(hass): """Mock PrusaLink version API.""" resp = { "api": "2.0.0", @@ -39,7 +36,7 @@ def mock_version_api() -> Generator[dict[str, str]]: @pytest.fixture -def mock_info_api() -> Generator[dict[str, Any]]: +def mock_info_api(hass): """Mock PrusaLink info API.""" resp = { "nozzle_diameter": 0.40, @@ -53,7 +50,7 @@ def mock_info_api() -> Generator[dict[str, Any]]: @pytest.fixture -def mock_get_legacy_printer() -> Generator[dict[str, Any]]: +def mock_get_legacy_printer(hass): """Mock PrusaLink printer API.""" resp = {"telemetry": {"material": "PLA"}} with patch("pyprusalink.PrusaLink.get_legacy_printer", return_value=resp): @@ -61,7 +58,7 @@ def mock_get_legacy_printer() -> Generator[dict[str, Any]]: @pytest.fixture -def mock_get_status_idle() -> Generator[dict[str, Any]]: +def mock_get_status_idle(hass): """Mock PrusaLink printer API.""" resp = { "storage": { @@ -89,7 +86,7 @@ def mock_get_status_idle() -> Generator[dict[str, Any]]: @pytest.fixture -def mock_get_status_printing() -> Generator[dict[str, Any]]: +def mock_get_status_printing(hass): """Mock PrusaLink printer API.""" resp = { "job": { @@ -117,7 +114,7 @@ def mock_get_status_printing() -> Generator[dict[str, Any]]: @pytest.fixture -def mock_job_api_idle() -> Generator[dict[str, Any]]: +def mock_job_api_idle(hass): """Mock PrusaLink job API having no job.""" resp = {} with patch("pyprusalink.PrusaLink.get_job", return_value=resp): @@ -125,7 +122,7 @@ def mock_job_api_idle() -> Generator[dict[str, Any]]: @pytest.fixture -def mock_job_api_idle_mk3() -> Generator[dict[str, Any]]: +def mock_job_api_idle_mk3(hass): """Mock PrusaLink job API having a job with idle state (MK3).""" resp = { "id": 129, @@ -151,7 +148,7 @@ def mock_job_api_idle_mk3() -> Generator[dict[str, Any]]: @pytest.fixture -def mock_job_api_printing() -> Generator[dict[str, Any]]: +def mock_job_api_printing(hass): """Mock PrusaLink printing.""" resp = { "id": 129, @@ -177,9 +174,7 @@ def mock_job_api_printing() -> Generator[dict[str, Any]]: @pytest.fixture -def mock_job_api_paused( - mock_get_status_printing: dict[str, Any], mock_job_api_printing: dict[str, Any] -) -> None: +def mock_job_api_paused(hass, mock_get_status_printing, mock_job_api_printing): """Mock PrusaLink paused printing.""" mock_job_api_printing["state"] = "PAUSED" mock_get_status_printing["printer"]["state"] = "PAUSED" @@ -187,10 +182,10 @@ def mock_job_api_paused( @pytest.fixture def mock_api( - mock_version_api: dict[str, str], - mock_info_api: dict[str, Any], - mock_get_legacy_printer: dict[str, Any], - mock_get_status_idle: dict[str, Any], - mock_job_api_idle: dict[str, Any], -) -> None: + mock_version_api, + mock_info_api, + mock_get_legacy_printer, + mock_get_status_idle, + mock_job_api_idle, +): """Mock PrusaLink API.""" diff --git a/tests/components/prusalink/test_binary_sensor.py b/tests/components/prusalink/test_binary_sensor.py deleted file mode 100644 index c39b15471c6..00000000000 --- a/tests/components/prusalink/test_binary_sensor.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Test Prusalink sensors.""" - -from unittest.mock import PropertyMock, patch - -import pytest - -from homeassistant.const import STATE_OFF, Platform -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - - -@pytest.fixture(autouse=True) -def setup_binary_sensor_platform_only(): - """Only setup sensor platform.""" - with ( - patch("homeassistant.components.prusalink.PLATFORMS", [Platform.BINARY_SENSOR]), - patch( - "homeassistant.helpers.entity.Entity.entity_registry_enabled_default", - PropertyMock(return_value=True), - ), - ): - yield - - -async def test_binary_sensors_no_job( - hass: HomeAssistant, mock_config_entry, mock_api -) -> None: - """Test sensors while no job active.""" - assert await async_setup_component(hass, "prusalink", {}) - - state = hass.states.get("binary_sensor.mock_title_mmu") - assert state is not None - assert state.state == STATE_OFF diff --git a/tests/components/prusalink/test_button.py b/tests/components/prusalink/test_button.py index f85e0232c74..54f3854161c 100644 --- a/tests/components/prusalink/test_button.py +++ b/tests/components/prusalink/test_button.py @@ -93,7 +93,7 @@ async def test_button_resume_cancel( with ( patch(f"pyprusalink.PrusaLink.{method}") as mock_meth, patch( - "homeassistant.components.prusalink.coordinator.PrusaLinkUpdateCoordinator._fetch_data" + "homeassistant.components.prusalink.PrusaLinkUpdateCoordinator._fetch_data" ), ): await hass.services.async_call( diff --git a/tests/components/prusalink/test_sensor.py b/tests/components/prusalink/test_sensor.py index c0693626600..b15e9198da6 100644 --- a/tests/components/prusalink/test_sensor.py +++ b/tests/components/prusalink/test_sensor.py @@ -101,10 +101,6 @@ async def test_sensors_no_job(hass: HomeAssistant, mock_config_entry, mock_api) assert state is not None assert state.state == "PLA" - state = hass.states.get("sensor.mock_title_nozzle_diameter") - assert state is not None - assert state.state == "0.4" - state = hass.states.get("sensor.mock_title_print_flow") assert state is not None assert state.state == "100" @@ -209,10 +205,6 @@ async def test_sensors_idle_job_mk3( assert state is not None assert state.state == "PLA" - state = hass.states.get("sensor.mock_title_nozzle_diameter") - assert state is not None - assert state.state == "0.4" - state = hass.states.get("sensor.mock_title_print_flow") assert state is not None assert state.state == "100" diff --git a/tests/components/ps4/conftest.py b/tests/components/ps4/conftest.py index c95cc78f53a..bc84ea3b4db 100644 --- a/tests/components/ps4/conftest.py +++ b/tests/components/ps4/conftest.py @@ -1,10 +1,10 @@ """Test configuration for PS4.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch from pyps4_2ndscreen.ddp import DEFAULT_UDP_PORT, DDPProtocol import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/ps4/test_init.py b/tests/components/ps4/test_init.py index d14f367b2bd..180f51295ac 100644 --- a/tests/components/ps4/test_init.py +++ b/tests/components/ps4/test_init.py @@ -199,7 +199,7 @@ async def test_media_player_is_setup(hass: HomeAssistant) -> None: assert len(hass.data[PS4_DATA].devices) == 1 -async def setup_mock_component(hass: HomeAssistant) -> None: +async def setup_mock_component(hass): """Set up Mock Media Player.""" entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA, version=VERSION) entry.add_to_manager(hass.config_entries) @@ -269,7 +269,9 @@ async def test_send_command(hass: HomeAssistant) -> None: """Test send_command service.""" await setup_mock_component(hass) - mock_func = "homeassistant.components.ps4.media_player.PS4Device.async_send_command" + mock_func = "{}{}".format( + "homeassistant.components.ps4", ".media_player.PS4Device.async_send_command" + ) mock_devices = hass.data[PS4_DATA].devices assert len(mock_devices) == 1 diff --git a/tests/components/ps4/test_media_player.py b/tests/components/ps4/test_media_player.py index 737cc3c9f1b..e0be9d508fc 100644 --- a/tests/components/ps4/test_media_player.py +++ b/tests/components/ps4/test_media_player.py @@ -1,6 +1,5 @@ """Tests for the PS4 media player platform.""" -from typing import Any from unittest.mock import MagicMock, patch from pyps4_2ndscreen.credential import get_ddp_message @@ -131,9 +130,7 @@ MOCK_CONFIG = MockConfigEntry(domain=DOMAIN, data=MOCK_DATA, entry_id=MOCK_ENTRY MOCK_LOAD = "homeassistant.components.ps4.media_player.load_games" -async def setup_mock_component( - hass: HomeAssistant, entry: MockConfigEntry | None = None -) -> str: +async def setup_mock_component(hass, entry=None): """Set up Mock Media Player.""" if entry is None: mock_entry = MockConfigEntry( @@ -153,9 +150,7 @@ async def setup_mock_component( return mock_entities[0] -async def mock_ddp_response( - hass: HomeAssistant, mock_status_data: dict[str, Any] -) -> None: +async def mock_ddp_response(hass, mock_status_data): """Mock raw UDP response from device.""" mock_protocol = hass.data[PS4_DATA].protocol assert mock_protocol.local_port == DEFAULT_UDP_PORT @@ -194,7 +189,10 @@ async def test_state_standby_is_set(hass: HomeAssistant) -> None: async def test_state_playing_is_set(hass: HomeAssistant) -> None: """Test that state is set to playing.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "homeassistant.components.ps4.media_player.pyps4.Ps4Async.async_get_ps_store_data" + mock_func = "{}{}".format( + "homeassistant.components.ps4.media_player.", + "pyps4.Ps4Async.async_get_ps_store_data", + ) with patch(mock_func, return_value=None): await mock_ddp_response(hass, MOCK_STATUS_PLAYING) @@ -221,7 +219,10 @@ async def test_state_none_is_set(hass: HomeAssistant) -> None: async def test_media_attributes_are_fetched(hass: HomeAssistant) -> None: """Test that media attributes are fetched.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "homeassistant.components.ps4.media_player.pyps4.Ps4Async.async_get_ps_store_data" + mock_func = "{}{}".format( + "homeassistant.components.ps4.media_player.", + "pyps4.Ps4Async.async_get_ps_store_data", + ) # Mock result from fetching data. mock_result = MagicMock() @@ -270,7 +271,8 @@ async def test_media_attributes_are_loaded( patch_load_json_object.return_value = {MOCK_TITLE_ID: MOCK_GAMES_DATA_LOCKED} with patch( - "homeassistant.components.ps4.media_player.pyps4.Ps4Async.async_get_ps_store_data", + "homeassistant.components.ps4.media_player." + "pyps4.Ps4Async.async_get_ps_store_data", return_value=None, ) as mock_fetch: await mock_ddp_response(hass, MOCK_STATUS_PLAYING) @@ -374,7 +376,9 @@ async def test_device_info_assummed_works( async def test_turn_on(hass: HomeAssistant) -> None: """Test that turn on service calls function.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "homeassistant.components.ps4.media_player.pyps4.Ps4Async.wakeup" + mock_func = "{}{}".format( + "homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.wakeup" + ) with patch(mock_func) as mock_call: await hass.services.async_call( @@ -388,7 +392,9 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test that turn off service calls function.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "homeassistant.components.ps4.media_player.pyps4.Ps4Async.standby" + mock_func = "{}{}".format( + "homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.standby" + ) with patch(mock_func) as mock_call: await hass.services.async_call( @@ -402,7 +408,9 @@ async def test_turn_off(hass: HomeAssistant) -> None: async def test_toggle(hass: HomeAssistant) -> None: """Test that toggle service calls function.""" mock_entity_id = await setup_mock_component(hass) - mock_func = "homeassistant.components.ps4.media_player.pyps4.Ps4Async.toggle" + mock_func = "{}{}".format( + "homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.toggle" + ) with patch(mock_func) as mock_call: await hass.services.async_call( @@ -416,8 +424,8 @@ async def test_toggle(hass: HomeAssistant) -> None: async def test_media_pause(hass: HomeAssistant) -> None: """Test that media pause service calls function.""" mock_entity_id = await setup_mock_component(hass) - mock_func = ( - "homeassistant.components.ps4.media_player.pyps4.Ps4Async.remote_control" + mock_func = "{}{}".format( + "homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.remote_control" ) with patch(mock_func) as mock_call: @@ -432,8 +440,8 @@ async def test_media_pause(hass: HomeAssistant) -> None: async def test_media_stop(hass: HomeAssistant) -> None: """Test that media stop service calls function.""" mock_entity_id = await setup_mock_component(hass) - mock_func = ( - "homeassistant.components.ps4.media_player.pyps4.Ps4Async.remote_control" + mock_func = "{}{}".format( + "homeassistant.components.ps4.media_player.", "pyps4.Ps4Async.remote_control" ) with patch(mock_func) as mock_call: diff --git a/tests/components/pure_energie/conftest.py b/tests/components/pure_energie/conftest.py index 9aa3a4cc1b4..7174befbf5b 100644 --- a/tests/components/pure_energie/conftest.py +++ b/tests/components/pure_energie/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Pure Energie integration tests.""" -from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from gridnet import Device as GridNetDevice, SmartBridge import pytest +from typing_extensions import Generator from homeassistant.components.pure_energie.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/purpleair/conftest.py b/tests/components/purpleair/conftest.py index 3d6776dd12e..1305c98308d 100644 --- a/tests/components/purpleair/conftest.py +++ b/tests/components/purpleair/conftest.py @@ -1,7 +1,5 @@ """Define fixtures for PurpleAir tests.""" -from collections.abc import Generator -from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiopurpleair.endpoints.sensors import NearbySensorResult @@ -9,7 +7,6 @@ from aiopurpleair.models.sensors import GetSensorsResponse import pytest from homeassistant.components.purpleair import DOMAIN -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -19,7 +16,7 @@ TEST_SENSOR_INDEX2 = 567890 @pytest.fixture(name="api") -def api_fixture(get_sensors_response: GetSensorsResponse) -> Mock: +def api_fixture(get_sensors_response): """Define a fixture to return a mocked aiopurple API object.""" return Mock( async_check_api_key=AsyncMock(), @@ -37,11 +34,7 @@ def api_fixture(get_sensors_response: GetSensorsResponse) -> Mock: @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, - config_entry_data: dict[str, Any], - config_entry_options: dict[str, Any], -) -> MockConfigEntry: +def config_entry_fixture(hass, config_entry_data, config_entry_options): """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -55,7 +48,7 @@ def config_entry_fixture( @pytest.fixture(name="config_entry_data") -def config_entry_data_fixture() -> dict[str, Any]: +def config_entry_data_fixture(): """Define a config entry data fixture.""" return { "api_key": TEST_API_KEY, @@ -63,7 +56,7 @@ def config_entry_data_fixture() -> dict[str, Any]: @pytest.fixture(name="config_entry_options") -def config_entry_options_fixture() -> dict[str, Any]: +def config_entry_options_fixture(): """Define a config entry options fixture.""" return { "sensor_indices": [TEST_SENSOR_INDEX1], @@ -71,7 +64,7 @@ def config_entry_options_fixture() -> dict[str, Any]: @pytest.fixture(name="get_sensors_response", scope="package") -def get_sensors_response_fixture() -> GetSensorsResponse: +def get_sensors_response_fixture(): """Define a fixture to mock an aiopurpleair GetSensorsResponse object.""" return GetSensorsResponse.parse_raw( load_fixture("get_sensors_response.json", "purpleair") @@ -79,7 +72,7 @@ def get_sensors_response_fixture() -> GetSensorsResponse: @pytest.fixture(name="mock_aiopurpleair") -def mock_aiopurpleair_fixture(api: Mock) -> Generator[Mock]: +async def mock_aiopurpleair_fixture(api): """Define a fixture to patch aiopurpleair.""" with ( patch("homeassistant.components.purpleair.config_flow.API", return_value=api), @@ -89,9 +82,7 @@ def mock_aiopurpleair_fixture(api: Mock) -> Generator[Mock]: @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture( - hass: HomeAssistant, config_entry: MockConfigEntry, mock_aiopurpleair: Mock -) -> None: +async def setup_config_entry_fixture(hass, config_entry, mock_aiopurpleair): """Define a fixture to set up purpleair.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/purpleair/test_config_flow.py b/tests/components/purpleair/test_config_flow.py index 998cb2b7878..2345d98b5e1 100644 --- a/tests/components/purpleair/test_config_flow.py +++ b/tests/components/purpleair/test_config_flow.py @@ -6,15 +6,13 @@ from aiopurpleair.errors import InvalidApiKeyError, PurpleAirError import pytest from homeassistant.components.purpleair import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr from .conftest import TEST_API_KEY, TEST_SENSOR_INDEX1, TEST_SENSOR_INDEX2 -from tests.common import MockConfigEntry - TEST_LATITUDE = 51.5285582 TEST_LONGITUDE = -0.2416796 @@ -129,11 +127,19 @@ async def test_reauth( mock_aiopurpleair, check_api_key_errors, check_api_key_mock, - config_entry: MockConfigEntry, + config_entry, setup_config_entry, ) -> None: """Test re-auth (including errors).""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + data={"api_key": TEST_API_KEY}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/purpleair/test_diagnostics.py b/tests/components/purpleair/test_diagnostics.py index ae4b28567be..13dcd1338e0 100644 --- a/tests/components/purpleair/test_diagnostics.py +++ b/tests/components/purpleair/test_diagnostics.py @@ -3,7 +3,6 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant -from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -35,9 +34,6 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, - "created_at": ANY, - "modified_at": ANY, - "discovery_keys": {}, }, "data": { "fields": [ diff --git a/tests/components/push/test_camera.py b/tests/components/push/test_camera.py index 0088aa6a9c2..df296e7cb57 100644 --- a/tests/components/push/test_camera.py +++ b/tests/components/push/test_camera.py @@ -4,8 +4,8 @@ from datetime import timedelta from http import HTTPStatus import io +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util diff --git a/tests/components/pushover/test_config_flow.py b/tests/components/pushover/test_config_flow.py index 58485bfb427..14347084288 100644 --- a/tests/components/pushover/test_config_flow.py +++ b/tests/components/pushover/test_config_flow.py @@ -149,7 +149,14 @@ async def test_reauth_success(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_CONFIG, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -173,7 +180,14 @@ async def test_reauth_failed(hass: HomeAssistant, mock_pushover: MagicMock) -> N ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_CONFIG, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -209,7 +223,14 @@ async def test_reauth_with_existing_config(hass: HomeAssistant) -> None: ) entry2.add_to_hass(hass) - result = await entry2.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_CONFIG, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/pushover/test_init.py b/tests/components/pushover/test_init.py index 85266e34d13..c3a653042ce 100644 --- a/tests/components/pushover/test_init.py +++ b/tests/components/pushover/test_init.py @@ -5,7 +5,6 @@ from unittest.mock import MagicMock, patch from pushover_complete import BadAPIRequestError import pytest import requests_mock -from urllib3.exceptions import MaxRetryError from homeassistant.components.pushover.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -94,18 +93,3 @@ async def test_async_setup_entry_failed_json_error( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_async_setup_entry_failed_urrlib3_error( - hass: HomeAssistant, mock_pushover: MagicMock -) -> None: - """Test pushover failed setup due to conn error.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=MOCK_CONFIG, - ) - entry.add_to_hass(hass) - mock_pushover.side_effect = MaxRetryError(MagicMock(), MagicMock()) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/pvoutput/conftest.py b/tests/components/pvoutput/conftest.py index a55bb21d2ae..d19f09d9e6c 100644 --- a/tests/components/pvoutput/conftest.py +++ b/tests/components/pvoutput/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pvo import Status, System import pytest +from typing_extensions import Generator from homeassistant.components.pvoutput.const import CONF_SYSTEM_ID, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/pvoutput/test_config_flow.py b/tests/components/pvoutput/test_config_flow.py index fc4335de00d..20e99f8e497 100644 --- a/tests/components/pvoutput/test_config_flow.py +++ b/tests/components/pvoutput/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock from pvo import PVOutputAuthenticationError, PVOutputConnectionError from homeassistant.components.pvoutput.const import CONF_SYSTEM_ID, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -150,7 +150,15 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -184,7 +192,15 @@ async def test_reauth_with_authentication_error( """ mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -228,7 +244,15 @@ async def test_reauth_api_error( """Test API error during reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" diff --git a/tests/components/pyload/conftest.py b/tests/components/pyload/conftest.py index c0f181396ab..1d7b11567c7 100644 --- a/tests/components/pyload/conftest.py +++ b/tests/components/pyload/conftest.py @@ -1,7 +1,7 @@ """Fixtures for pyLoad integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch from pyloadapi.types import LoginResponse, StatusServerResponse import pytest @@ -72,7 +72,7 @@ def pyload_config() -> ConfigType: @pytest.fixture -def mock_pyloadapi() -> Generator[MagicMock]: +def mock_pyloadapi() -> Generator[AsyncMock, None, None]: """Mock PyLoadAPI.""" with ( patch( diff --git a/tests/components/pyload/snapshots/test_sensor.ambr b/tests/components/pyload/snapshots/test_sensor.ambr index 69d0387fc8f..c1e5a9d6c3a 100644 --- a/tests/components/pyload/snapshots/test_sensor.ambr +++ b/tests/components/pyload/snapshots/test_sensor.ambr @@ -99,6 +99,56 @@ 'state': 'unavailable', }) # --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_finished_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_finished_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Finished downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_finished_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Finished downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_finished_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -207,56 +257,6 @@ 'state': 'unavailable', }) # --- -# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_total_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_total_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Total downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_total_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Total downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_total_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_active_downloads-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -357,6 +357,56 @@ 'state': 'unavailable', }) # --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_finished_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_finished_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Finished downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_finished_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Finished downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_finished_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -465,56 +515,6 @@ 'state': 'unavailable', }) # --- -# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_total_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_total_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Total downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_total_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Total downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_total_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_sensor_update_exceptions[ParserError][sensor.pyload_active_downloads-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -615,6 +615,56 @@ 'state': 'unavailable', }) # --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_finished_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_finished_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Finished downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_finished_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Finished downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_finished_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_sensor_update_exceptions[ParserError][sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -723,56 +773,6 @@ 'state': 'unavailable', }) # --- -# name: test_sensor_update_exceptions[ParserError][sensor.pyload_total_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_total_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Total downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_sensor_update_exceptions[ParserError][sensor.pyload_total_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Total downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_total_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_setup[sensor.pyload_active_downloads-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -873,6 +873,56 @@ 'state': '6', }) # --- +# name: test_setup[sensor.pyload_finished_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_finished_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Finished downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_setup[sensor.pyload_finished_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Finished downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_finished_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37', + }) +# --- # name: test_setup[sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -981,53 +1031,3 @@ 'state': '43.247704', }) # --- -# name: test_setup[sensor.pyload_total_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_total_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Total downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_setup[sensor.pyload_total_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Total downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_total_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '37', - }) -# --- diff --git a/tests/components/pyload/snapshots/test_switch.ambr b/tests/components/pyload/snapshots/test_switch.ambr index 0fcc45f8586..b6465341b0a 100644 --- a/tests/components/pyload/snapshots/test_switch.ambr +++ b/tests/components/pyload/snapshots/test_switch.ambr @@ -93,3 +93,50 @@ 'state': 'on', }) # --- +# name: test_state[switch.pyload_reconnect-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.pyload_reconnect', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reconnect', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_reconnect', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[switch.pyload_reconnect-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'pyLoad Reconnect', + }), + 'context': , + 'entity_id': 'switch.pyload_reconnect', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/pyload/test_button.py b/tests/components/pyload/test_button.py index 9a2f480bede..b5aa18ad3d9 100644 --- a/tests/components/pyload/test_button.py +++ b/tests/components/pyload/test_button.py @@ -1,9 +1,8 @@ """The tests for the button component.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, call, patch -from pyloadapi import CannotConnect, InvalidAuth import pytest from syrupy.assertion import SnapshotAssertion @@ -12,7 +11,6 @@ from homeassistant.components.pyload.button import PyLoadButtonEntity from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, snapshot_platform @@ -26,7 +24,7 @@ API_CALL = { @pytest.fixture(autouse=True) -def button_only() -> Generator[None]: +async def button_only() -> AsyncGenerator[None, None]: """Enable only the button platform.""" with patch( "homeassistant.components.pyload.PLATFORMS", @@ -80,43 +78,6 @@ async def test_button_press( {ATTR_ENTITY_ID: entity_entry.entity_id}, blocking=True, ) + await hass.async_block_till_done() assert API_CALL[entity_entry.translation_key] in mock_pyloadapi.method_calls mock_pyloadapi.reset_mock() - - -@pytest.mark.parametrize( - ("side_effect"), - [CannotConnect, InvalidAuth], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_button_press_errors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_pyloadapi: AsyncMock, - entity_registry: er.EntityRegistry, - side_effect: Exception, -) -> None: - """Test button press method.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - mock_pyloadapi.stop_all_downloads.side_effect = side_effect - mock_pyloadapi.restart_failed.side_effect = side_effect - mock_pyloadapi.delete_finished.side_effect = side_effect - mock_pyloadapi.restart.side_effect = side_effect - - for entity_entry in entity_entries: - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_entry.entity_id}, - blocking=True, - ) diff --git a/tests/components/pyload/test_config_flow.py b/tests/components/pyload/test_config_flow.py index 5ada856d78e..8c775412371 100644 --- a/tests/components/pyload/test_config_flow.py +++ b/tests/components/pyload/test_config_flow.py @@ -6,7 +6,12 @@ from pyloadapi.exceptions import CannotConnect, InvalidAuth, ParserError import pytest from homeassistant.components.pyload.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_IMPORT, + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_USER, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -175,7 +180,14 @@ async def test_reauth( config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -210,7 +222,14 @@ async def test_reauth_errors( config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -247,10 +266,17 @@ async def test_reconfiguration( config_entry.add_to_hass(hass) - result = await config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -282,10 +308,17 @@ async def test_reconfigure_errors( config_entry.add_to_hass(hass) - result = await config_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" mock_pyloadapi.login.side_effect = side_effect result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/pyload/test_sensor.py b/tests/components/pyload/test_sensor.py index 8c194a111ea..a44c9c8bf91 100644 --- a/tests/components/pyload/test_sensor.py +++ b/tests/components/pyload/test_sensor.py @@ -1,6 +1,6 @@ """Tests for the pyLoad Sensors.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory @@ -22,7 +22,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_plat @pytest.fixture(autouse=True) -def sensor_only() -> Generator[None]: +async def sensor_only() -> AsyncGenerator[None, None]: """Enable only the sensor platform.""" with patch( "homeassistant.components.pyload.PLATFORMS", @@ -157,25 +157,3 @@ async def test_deprecated_yaml( assert issue_registry.async_get_issue( domain=HOMEASSISTANT_DOMAIN, issue_id=f"deprecated_yaml_{DOMAIN}" ) - - -async def test_pyload_pre_0_5_0( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_pyloadapi: AsyncMock, -) -> None: - """Test setup of the pyload sensor platform.""" - mock_pyloadapi.get_status.return_value = { - "pause": False, - "active": 1, - "queue": 6, - "total": 37, - "speed": 5405963.0, - "download": True, - "reconnect": False, - } - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/pyload/test_switch.py b/tests/components/pyload/test_switch.py index 493dbd8c0da..42a6bfa6f14 100644 --- a/tests/components/pyload/test_switch.py +++ b/tests/components/pyload/test_switch.py @@ -1,9 +1,8 @@ """Tests for the pyLoad Switches.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, call, patch -from pyloadapi import CannotConnect, InvalidAuth import pytest from syrupy.assertion import SnapshotAssertion @@ -17,7 +16,6 @@ from homeassistant.components.switch import ( from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, snapshot_platform @@ -38,7 +36,7 @@ API_CALL = { @pytest.fixture(autouse=True) -def switch_only() -> Generator[None]: +async def switch_only() -> AsyncGenerator[None, None]: """Enable only the switch platform.""" with patch( "homeassistant.components.pyload.PLATFORMS", @@ -104,49 +102,3 @@ async def test_turn_on_off( in mock_pyloadapi.method_calls ) mock_pyloadapi.reset_mock() - - -@pytest.mark.parametrize( - ("service_call"), - [ - SERVICE_TURN_ON, - SERVICE_TURN_OFF, - SERVICE_TOGGLE, - ], -) -@pytest.mark.parametrize( - ("side_effect"), - [CannotConnect, InvalidAuth], -) -async def test_turn_on_off_errors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_pyloadapi: AsyncMock, - service_call: str, - entity_registry: er.EntityRegistry, - side_effect: Exception, -) -> None: - """Test switch turn on/off, toggle method.""" - - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - mock_pyloadapi.unpause.side_effect = side_effect - mock_pyloadapi.pause.side_effect = side_effect - mock_pyloadapi.toggle_pause.side_effect = side_effect - mock_pyloadapi.toggle_reconnect.side_effect = side_effect - - for entity_entry in entity_entries: - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - SWITCH_DOMAIN, - service_call, - {ATTR_ENTITY_ID: entity_entry.entity_id}, - blocking=True, - ) diff --git a/tests/components/python_script/test_init.py b/tests/components/python_script/test_init.py index c4dc00c448a..03fa73f076e 100644 --- a/tests/components/python_script/test_init.py +++ b/tests/components/python_script/test_init.py @@ -155,7 +155,7 @@ raise Exception('boom') task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) await hass.async_block_till_done(wait_background_tasks=True) - assert type(task.exception()) is HomeAssistantError + assert type(task.exception()) == HomeAssistantError assert "Error executing script (Exception): boom" in str(task.exception()) @@ -183,7 +183,7 @@ hass.async_stop() task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) await hass.async_block_till_done(wait_background_tasks=True) - assert type(task.exception()) is ServiceValidationError + assert type(task.exception()) == ServiceValidationError assert "Not allowed to access async methods" in str(task.exception()) @@ -233,7 +233,7 @@ async def test_accessing_forbidden_methods_with_response(hass: HomeAssistant) -> task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) await hass.async_block_till_done(wait_background_tasks=True) - assert type(task.exception()) is ServiceValidationError + assert type(task.exception()) == ServiceValidationError assert f"Not allowed to access {name}" in str(task.exception()) diff --git a/tests/components/qbittorrent/conftest.py b/tests/components/qbittorrent/conftest.py index 17fb8e15b47..b15e2a6865b 100644 --- a/tests/components/qbittorrent/conftest.py +++ b/tests/components/qbittorrent/conftest.py @@ -1,10 +1,10 @@ """Fixtures for testing qBittorrent component.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest import requests_mock +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/qnap/conftest.py b/tests/components/qnap/conftest.py index 2625f1805b6..c0947318f60 100644 --- a/tests/components/qnap/conftest.py +++ b/tests/components/qnap/conftest.py @@ -1,9 +1,9 @@ """Setup the QNAP tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator TEST_HOST = "1.2.3.4" TEST_USERNAME = "admin" diff --git a/tests/components/qnap_qsw/test_diagnostics.py b/tests/components/qnap_qsw/test_diagnostics.py index ccaac458b12..8bca9d8d989 100644 --- a/tests/components/qnap_qsw/test_diagnostics.py +++ b/tests/components/qnap_qsw/test_diagnostics.py @@ -25,7 +25,7 @@ from aioqsw.const import ( QSD_SYSTEM_TIME, QSD_TEMP, QSD_TEMP_MAX, - QSD_UPTIME_SECONDS, + QSD_UPTIME, QSD_VERSION, ) @@ -118,6 +118,6 @@ async def test_config_entry_diagnostics( assert ( sys_time_diag.items() >= { - QSD_UPTIME_SECONDS: sys_time_mock[API_UPTIME], + QSD_UPTIME: sys_time_mock[API_UPTIME], }.items() ) diff --git a/tests/components/qnap_qsw/test_sensor.py b/tests/components/qnap_qsw/test_sensor.py index 16335e878fd..646058add62 100644 --- a/tests/components/qnap_qsw/test_sensor.py +++ b/tests/components/qnap_qsw/test_sensor.py @@ -1,27 +1,19 @@ """The sensor tests for the QNAP QSW platform.""" -from unittest.mock import patch - -from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.qnap_qsw.const import ATTR_MAX, DOMAIN -from homeassistant.const import Platform +from homeassistant.components.qnap_qsw.const import ATTR_MAX from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, issue_registry as ir -from .util import async_init_integration, init_config_entry +from .util import async_init_integration @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_qnap_qsw_create_sensors( hass: HomeAssistant, - freezer: FrozenDateTimeFactory, ) -> None: """Test creation of sensors.""" - await hass.config.async_set_time_zone("UTC") - freezer.move_to("2024-07-25 12:00:00+00:00") await async_init_integration(hass) state = hass.states.get("sensor.qsw_m408_4c_fan_1_speed") @@ -53,8 +45,8 @@ async def test_qnap_qsw_create_sensors( state = hass.states.get("sensor.qsw_m408_4c_tx_speed") assert state.state == "0" - state = hass.states.get("sensor.qsw_m408_4c_uptime_timestamp") - assert state.state == "2024-07-25T11:58:29+00:00" + state = hass.states.get("sensor.qsw_m408_4c_uptime") + assert state.state == "91" # LACP Ports state = hass.states.get("sensor.qsw_m408_4c_lacp_port_1_link_speed") @@ -381,60 +373,3 @@ async def test_qnap_qsw_create_sensors( state = hass.states.get("sensor.qsw_m408_4c_port_12_tx_speed") assert state.state == "0" - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_deprecated_uptime_seconds( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, -) -> None: - """Test deprecation warning of the Uptime seconds sensor entity.""" - original_id = "sensor.qsw_m408_4c_uptime" - domain = Platform.SENSOR - - config_entry = init_config_entry(hass) - - entity = entity_registry.async_get_or_create( - domain=domain, - platform=DOMAIN, - unique_id=original_id, - config_entry=config_entry, - suggested_object_id=original_id, - disabled_by=None, - ) - - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - - with patch( - "homeassistant.components.qnap_qsw.sensor.automations_with_entity", - return_value=["item"], - ): - await async_init_integration(hass, config_entry=config_entry) - assert issue_registry.async_get_issue( - DOMAIN, f"uptime_seconds_deprecated_{entity.entity_id}_item" - ) - - -async def test_cleanup_deprecated_uptime_seconds( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, -) -> None: - """Test cleanup of the Uptime seconds sensor entity.""" - original_id = "sensor.qsw_m408_4c_uptime_seconds" - domain = Platform.SENSOR - - config_entry = init_config_entry(hass) - - entity_registry.async_get_or_create( - domain=domain, - platform=DOMAIN, - unique_id=original_id, - config_entry=config_entry, - suggested_object_id=original_id, - disabled_by=er.RegistryEntryDisabler.USER, - ) - - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - - await async_init_integration(hass, config_entry=config_entry) diff --git a/tests/components/qnap_qsw/util.py b/tests/components/qnap_qsw/util.py index 5132c1061ec..63238bb30a1 100644 --- a/tests/components/qnap_qsw/util.py +++ b/tests/components/qnap_qsw/util.py @@ -491,10 +491,11 @@ USERS_VERIFICATION_MOCK = { } -def init_config_entry( +async def async_init_integration( hass: HomeAssistant, -) -> MockConfigEntry: - """Set up the QNAP QSW entry in Home Assistant.""" +) -> None: + """Set up the QNAP QSW integration in Home Assistant.""" + config_entry = MockConfigEntry( data=CONFIG, domain=DOMAIN, @@ -502,18 +503,6 @@ def init_config_entry( ) config_entry.add_to_hass(hass) - return config_entry - - -async def async_init_integration( - hass: HomeAssistant, - config_entry: MockConfigEntry | None = None, -) -> None: - """Set up the QNAP QSW integration in Home Assistant.""" - - if config_entry is None: - config_entry = init_config_entry(hass) - with ( patch( "homeassistant.components.qnap_qsw.QnapQswApi.get_firmware_condition", diff --git a/tests/components/rabbitair/test_config_flow.py b/tests/components/rabbitair/test_config_flow.py index 7f9479339a5..2e0cfba38c0 100644 --- a/tests/components/rabbitair/test_config_flow.py +++ b/tests/components/rabbitair/test_config_flow.py @@ -2,12 +2,12 @@ from __future__ import annotations -from collections.abc import Generator from ipaddress import ip_address from unittest.mock import MagicMock, Mock, patch import pytest from rabbitair import Mode, Model, Speed +from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components import zeroconf diff --git a/tests/components/rachio/test_config_flow.py b/tests/components/rachio/test_config_flow.py index 586b31b092f..1eaec1bc46e 100644 --- a/tests/components/rachio/test_config_flow.py +++ b/tests/components/rachio/test_config_flow.py @@ -183,16 +183,3 @@ async def test_form_homekit_ignored(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -async def test_options_flow(hass: HomeAssistant) -> None: - """Test option flow.""" - entry = MockConfigEntry(domain=DOMAIN, data={CONF_API_KEY: "api_key"}) - entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - # This should be improved at a later stage to increase test coverage - hass.config_entries.options.async_abort(result["flow_id"]) diff --git a/tests/components/radarr/test_config_flow.py b/tests/components/radarr/test_config_flow.py index 096c78e1c4a..407b7b50c48 100644 --- a/tests/components/radarr/test_config_flow.py +++ b/tests/components/radarr/test_config_flow.py @@ -6,7 +6,7 @@ from aiopyarr import exceptions import pytest from homeassistant.components.radarr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -137,30 +137,21 @@ async def test_zero_conf(hass: HomeAssistant) -> None: assert result["data"] == CONF_DATA -async def test_url_rewrite(hass: HomeAssistant) -> None: - """Test auth flow url rewrite.""" - with patch( - "homeassistant.components.radarr.config_flow.RadarrClient.async_try_zeroconf", - return_value=("v3", API_KEY, "/test"), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={CONF_SOURCE: SOURCE_USER}, - data={CONF_URL: "https://192.168.1.100/test", CONF_VERIFY_SSL: False}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DEFAULT_NAME - assert result["data"][CONF_URL] == "https://192.168.1.100:443/test" - - @pytest.mark.freeze_time("2021-12-03 00:00:00+00:00") async def test_full_reauth_flow_implementation( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test the manual reauth flow from start to finish.""" entry = await setup_integration(hass, aioclient_mock) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/radio_browser/conftest.py b/tests/components/radio_browser/conftest.py index fc666b32c53..95fda545a6c 100644 --- a/tests/components/radio_browser/conftest.py +++ b/tests/components/radio_browser/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.radio_browser.const import DOMAIN diff --git a/tests/components/rainbird/conftest.py b/tests/components/rainbird/conftest.py index b0411d9d313..a2c26c71231 100644 --- a/tests/components/rainbird/conftest.py +++ b/tests/components/rainbird/conftest.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Generator from http import HTTPStatus import json from typing import Any @@ -10,6 +9,7 @@ from unittest.mock import patch from pyrainbird import encryption import pytest +from typing_extensions import Generator from homeassistant.components.rainbird import DOMAIN from homeassistant.components.rainbird.const import ( diff --git a/tests/components/rainbird/test_config_flow.py b/tests/components/rainbird/test_config_flow.py index 87506ad656c..cdcef95f458 100644 --- a/tests/components/rainbird/test_config_flow.py +++ b/tests/components/rainbird/test_config_flow.py @@ -1,11 +1,11 @@ """Tests for the Rain Bird config flow.""" -from collections.abc import AsyncGenerator from http import HTTPStatus from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import AsyncGenerator from homeassistant import config_entries from homeassistant.components.rainbird import DOMAIN @@ -40,7 +40,7 @@ def mock_responses() -> list[AiohttpClientMockResponse]: @pytest.fixture(autouse=True) -async def config_entry_data() -> dict[str, Any] | None: +async def config_entry_data() -> None: """Fixture to disable config entry setup for exercising config flow.""" return None diff --git a/tests/components/rainforest_eagle/conftest.py b/tests/components/rainforest_eagle/conftest.py index c3790a12e86..1aff693e61f 100644 --- a/tests/components/rainforest_eagle/conftest.py +++ b/tests/components/rainforest_eagle/conftest.py @@ -1,7 +1,6 @@ """Conftest for rainforest_eagle.""" -from collections.abc import AsyncGenerator -from unittest.mock import AsyncMock, MagicMock, Mock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest @@ -14,7 +13,6 @@ from homeassistant.components.rainforest_eagle.const import ( TYPE_EAGLE_200, ) from homeassistant.const import CONF_HOST, CONF_TYPE -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MOCK_200_RESPONSE_WITHOUT_PRICE, MOCK_CLOUD_ID @@ -23,7 +21,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def config_entry_200(hass: HomeAssistant) -> MockConfigEntry: +def config_entry_200(hass): """Return a config entry.""" entry = MockConfigEntry( domain="rainforest_eagle", @@ -40,9 +38,7 @@ def config_entry_200(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture -async def setup_rainforest_200( - hass: HomeAssistant, config_entry_200: MockConfigEntry -) -> AsyncGenerator[Mock]: +async def setup_rainforest_200(hass, config_entry_200): """Set up rainforest.""" with patch( "aioeagle.ElectricMeter.create_instance", @@ -57,7 +53,7 @@ async def setup_rainforest_200( @pytest.fixture -async def setup_rainforest_100(hass: HomeAssistant) -> AsyncGenerator[MagicMock]: +async def setup_rainforest_100(hass): """Set up rainforest.""" MockConfigEntry( domain="rainforest_eagle", diff --git a/tests/components/rainforest_eagle/test_diagnostics.py b/tests/components/rainforest_eagle/test_diagnostics.py index 5aa460415b3..ed13c33f7b8 100644 --- a/tests/components/rainforest_eagle/test_diagnostics.py +++ b/tests/components/rainforest_eagle/test_diagnostics.py @@ -27,7 +27,7 @@ async def test_entry_diagnostics( config_entry_dict["data"][CONF_CLOUD_ID] = REDACTED assert result == { - "config_entry": config_entry_dict | {"discovery_keys": {}}, + "config_entry": config_entry_dict, "data": { var["Name"]: var["Value"] for var in MOCK_200_RESPONSE_WITHOUT_PRICE.values() diff --git a/tests/components/rainforest_raven/__init__.py b/tests/components/rainforest_raven/__init__.py index ead1bb2ad3f..9d40652b42d 100644 --- a/tests/components/rainforest_raven/__init__.py +++ b/tests/components/rainforest_raven/__init__.py @@ -1,7 +1,5 @@ """Tests for the Rainforest RAVEn component.""" -from unittest.mock import AsyncMock - from homeassistant.components.rainforest_raven.const import DOMAIN from homeassistant.const import CONF_DEVICE, CONF_MAC @@ -16,7 +14,7 @@ from .const import ( SUMMATION, ) -from tests.common import MockConfigEntry +from tests.common import AsyncMock, MockConfigEntry def create_mock_device() -> AsyncMock: @@ -44,5 +42,4 @@ def create_mock_entry(no_meters: bool = False) -> MockConfigEntry: CONF_DEVICE: DISCOVERY_INFO.device, CONF_MAC: [] if no_meters else [METER_INFO[None].meter_mac_id.hex()], }, - entry_id="01JADXBJSPYEBAFPKGXDJWZBQ8", ) diff --git a/tests/components/rainforest_raven/conftest.py b/tests/components/rainforest_raven/conftest.py index 35ce4443032..0a809c6430a 100644 --- a/tests/components/rainforest_raven/conftest.py +++ b/tests/components/rainforest_raven/conftest.py @@ -1,9 +1,9 @@ """Fixtures for the Rainforest RAVEn tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr deleted file mode 100644 index e131bf3d952..00000000000 --- a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,107 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'config_entry': dict({ - 'data': dict({ - 'device': '/dev/ttyACM0', - 'mac': '**REDACTED**', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'rainforest_raven', - 'entry_id': '01JADXBJSPYEBAFPKGXDJWZBQ8', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }), - 'data': dict({ - 'Meters': dict({ - '**REDACTED0**': dict({ - 'CurrentSummationDelivered': dict({ - 'device_mac_id': '**REDACTED**', - 'meter_mac_id': '**REDACTED**', - 'summation_delivered': '23456.7890', - 'summation_received': '00000.0000', - 'time_stamp': None, - }), - 'InstantaneousDemand': dict({ - 'demand': '1.2345', - 'device_mac_id': '**REDACTED**', - 'meter_mac_id': '**REDACTED**', - 'time_stamp': None, - }), - 'PriceCluster': dict({ - 'currency': dict({ - '__type': "", - 'repr': "", - }), - 'device_mac_id': '**REDACTED**', - 'meter_mac_id': '**REDACTED**', - 'price': '0.10', - 'rate_label': 'Set by user', - 'tier': 3, - 'tier_label': 'Set by user', - 'time_stamp': None, - }), - }), - }), - 'NetworkInfo': dict({ - 'channel': 13, - 'coord_mac_id': None, - 'description': None, - 'device_mac_id': '**REDACTED**', - 'ext_pan_id': None, - 'link_strength': 100, - 'short_addr': None, - 'status': None, - 'status_code': None, - }), - }), - }) -# --- -# name: test_entry_diagnostics_no_meters - dict({ - 'config_entry': dict({ - 'data': dict({ - 'device': '/dev/ttyACM0', - 'mac': '**REDACTED**', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'rainforest_raven', - 'entry_id': '01JADXBJSPYEBAFPKGXDJWZBQ8', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }), - 'data': dict({ - 'Meters': dict({ - }), - 'NetworkInfo': dict({ - 'channel': 13, - 'coord_mac_id': None, - 'description': None, - 'device_mac_id': '**REDACTED**', - 'ext_pan_id': None, - 'link_strength': 100, - 'short_addr': None, - 'status': None, - 'status_code': None, - }), - }), - }) -# --- diff --git a/tests/components/rainforest_raven/snapshots/test_init.ambr b/tests/components/rainforest_raven/snapshots/test_init.ambr deleted file mode 100644 index 768bbc729d4..00000000000 --- a/tests/components/rainforest_raven/snapshots/test_init.ambr +++ /dev/null @@ -1,39 +0,0 @@ -# serializer version: 1 -# name: test_device_registry[None-0] - list([ - ]) -# --- -# name: test_device_registry[device_info0-1] - list([ - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '2.7.3', - 'id': , - 'identifiers': set({ - tuple( - 'rainforest_raven', - 'abcdef0123456789', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Rainforest Automation, Inc.', - 'model': 'Z105-2-EMU2-LEDD_JM', - 'model_id': 'Z105-2-EMU2-LEDD_JM', - 'name': 'RAVEn Device', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': '2.0.0 (7400)', - 'via_device_id': None, - }), - ]) -# --- diff --git a/tests/components/rainforest_raven/snapshots/test_sensor.ambr b/tests/components/rainforest_raven/snapshots/test_sensor.ambr deleted file mode 100644 index 34a5e031885..00000000000 --- a/tests/components/rainforest_raven/snapshots/test_sensor.ambr +++ /dev/null @@ -1,257 +0,0 @@ -# serializer version: 1 -# name: test_sensors[sensor.raven_device_meter_power_demand-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.raven_device_meter_power_demand', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Meter power demand', - 'platform': 'rainforest_raven', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_demand', - 'unique_id': '1234567890abcdef.InstantaneousDemand.demand', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.raven_device_meter_power_demand-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'RAVEn Device Meter power demand', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.raven_device_meter_power_demand', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.2345', - }) -# --- -# name: test_sensors[sensor.raven_device_meter_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.raven_device_meter_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Meter price', - 'platform': 'rainforest_raven', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'meter_price', - 'unique_id': '1234567890abcdef.PriceCluster.price', - 'unit_of_measurement': 'USD/kWh', - }) -# --- -# name: test_sensors[sensor.raven_device_meter_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'RAVEn Device Meter price', - 'rate_label': 'Set by user', - 'state_class': , - 'tier': 3, - 'unit_of_measurement': 'USD/kWh', - }), - 'context': , - 'entity_id': 'sensor.raven_device_meter_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.10', - }) -# --- -# name: test_sensors[sensor.raven_device_meter_signal_strength-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.raven_device_meter_signal_strength', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Meter signal strength', - 'platform': 'rainforest_raven', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'signal_strength', - 'unique_id': 'abcdef0123456789.NetworkInfo.link_strength', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.raven_device_meter_signal_strength-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'channel': 13, - 'friendly_name': 'RAVEn Device Meter signal strength', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.raven_device_meter_signal_strength', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[sensor.raven_device_total_meter_energy_delivered-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.raven_device_total_meter_energy_delivered', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Total meter energy delivered', - 'platform': 'rainforest_raven', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_energy_delivered', - 'unique_id': '1234567890abcdef.CurrentSummationDelivered.summation_delivered', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.raven_device_total_meter_energy_delivered-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'RAVEn Device Total meter energy delivered', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.raven_device_total_meter_energy_delivered', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '23456.7890', - }) -# --- -# name: test_sensors[sensor.raven_device_total_meter_energy_received-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.raven_device_total_meter_energy_received', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Total meter energy received', - 'platform': 'rainforest_raven', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_energy_received', - 'unique_id': '1234567890abcdef.CurrentSummationDelivered.summation_received', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.raven_device_total_meter_energy_received-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'RAVEn Device Total meter energy received', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.raven_device_total_meter_energy_received', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '00000.0000', - }) -# --- diff --git a/tests/components/rainforest_raven/test_config_flow.py b/tests/components/rainforest_raven/test_config_flow.py index da7e65882a4..7f7041cbcd8 100644 --- a/tests/components/rainforest_raven/test_config_flow.py +++ b/tests/components/rainforest_raven/test_config_flow.py @@ -1,11 +1,11 @@ """Test Rainforest RAVEn config flow.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch from aioraven.device import RAVEnConnectionError import pytest from serial.tools.list_ports_common import ListPortInfo +from typing_extensions import Generator from homeassistant.components.rainforest_raven.const import DOMAIN from homeassistant.config_entries import SOURCE_USB, SOURCE_USER diff --git a/tests/components/rainforest_raven/test_coordinator.py b/tests/components/rainforest_raven/test_coordinator.py new file mode 100644 index 00000000000..db70118f7b9 --- /dev/null +++ b/tests/components/rainforest_raven/test_coordinator.py @@ -0,0 +1,109 @@ +"""Tests for the Rainforest RAVEn data coordinator.""" + +import asyncio +import functools +from unittest.mock import AsyncMock + +from aioraven.device import RAVEnConnectionError +import pytest + +from homeassistant.components.rainforest_raven.coordinator import RAVEnDataCoordinator +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +from . import create_mock_entry + + +@pytest.mark.usefixtures("mock_device") +async def test_coordinator_device_info(hass: HomeAssistant) -> None: + """Test reporting device information from the coordinator.""" + entry = create_mock_entry() + coordinator = RAVEnDataCoordinator(hass, entry) + + assert coordinator.device_fw_version is None + assert coordinator.device_hw_version is None + assert coordinator.device_info is None + assert coordinator.device_mac_address is None + assert coordinator.device_manufacturer is None + assert coordinator.device_model is None + assert coordinator.device_name == "RAVEn Device" + + await coordinator.async_config_entry_first_refresh() + + assert coordinator.device_fw_version == "2.0.0 (7400)" + assert coordinator.device_hw_version == "2.7.3" + assert coordinator.device_info + assert coordinator.device_mac_address + assert coordinator.device_manufacturer == "Rainforest Automation, Inc." + assert coordinator.device_model == "Z105-2-EMU2-LEDD_JM" + assert coordinator.device_name == "RAVEn Device" + + +async def test_coordinator_cache_device( + hass: HomeAssistant, mock_device: AsyncMock +) -> None: + """Test that the device isn't re-opened for subsequent refreshes.""" + entry = create_mock_entry() + coordinator = RAVEnDataCoordinator(hass, entry) + + await coordinator.async_config_entry_first_refresh() + assert mock_device.get_network_info.call_count == 1 + assert mock_device.open.call_count == 1 + + await coordinator.async_refresh() + assert mock_device.get_network_info.call_count == 2 + assert mock_device.open.call_count == 1 + + +async def test_coordinator_device_error_setup( + hass: HomeAssistant, mock_device: AsyncMock +) -> None: + """Test handling of a device error during initialization.""" + entry = create_mock_entry() + coordinator = RAVEnDataCoordinator(hass, entry) + + mock_device.get_network_info.side_effect = RAVEnConnectionError + with pytest.raises(ConfigEntryNotReady): + await coordinator.async_config_entry_first_refresh() + + +async def test_coordinator_device_error_update( + hass: HomeAssistant, mock_device: AsyncMock +) -> None: + """Test handling of a device error during an update.""" + entry = create_mock_entry() + coordinator = RAVEnDataCoordinator(hass, entry) + + await coordinator.async_config_entry_first_refresh() + assert coordinator.last_update_success is True + + mock_device.get_network_info.side_effect = RAVEnConnectionError + await coordinator.async_refresh() + assert coordinator.last_update_success is False + + +async def test_coordinator_device_timeout_update( + hass: HomeAssistant, mock_device: AsyncMock +) -> None: + """Test handling of a device timeout during an update.""" + entry = create_mock_entry() + coordinator = RAVEnDataCoordinator(hass, entry) + + await coordinator.async_config_entry_first_refresh() + assert coordinator.last_update_success is True + + mock_device.get_network_info.side_effect = functools.partial(asyncio.sleep, 10) + await coordinator.async_refresh() + assert coordinator.last_update_success is False + + +async def test_coordinator_comm_error( + hass: HomeAssistant, mock_device: AsyncMock +) -> None: + """Test handling of an error parsing or reading raw device data.""" + entry = create_mock_entry() + coordinator = RAVEnDataCoordinator(hass, entry) + + mock_device.synchronize.side_effect = RAVEnConnectionError + with pytest.raises(ConfigEntryNotReady): + await coordinator.async_config_entry_first_refresh() diff --git a/tests/components/rainforest_raven/test_diagnostics.py b/tests/components/rainforest_raven/test_diagnostics.py index ae231b3c8c2..86a86032ac6 100644 --- a/tests/components/rainforest_raven/test_diagnostics.py +++ b/tests/components/rainforest_raven/test_diagnostics.py @@ -1,24 +1,22 @@ """Test the Rainforest Eagle diagnostics.""" -from unittest.mock import AsyncMock +from dataclasses import asdict import pytest -from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props +from homeassistant.components.diagnostics import REDACTED +from homeassistant.const import CONF_MAC from homeassistant.core import HomeAssistant from . import create_mock_entry +from .const import DEMAND, NETWORK_INFO, PRICE_CLUSTER, SUMMATION -from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @pytest.fixture -async def mock_entry_no_meters( - hass: HomeAssistant, mock_device: AsyncMock -) -> MockConfigEntry: +async def mock_entry_no_meters(hass: HomeAssistant, mock_device): """Mock a RAVEn config entry with no meters.""" mock_entry = create_mock_entry(True) mock_entry.add_to_hass(hass) @@ -30,23 +28,61 @@ async def mock_entry_no_meters( async def test_entry_diagnostics_no_meters( hass: HomeAssistant, hass_client: ClientSessionGenerator, - mock_entry_no_meters: MockConfigEntry, - snapshot: SnapshotAssertion, + mock_device, + mock_entry_no_meters, ) -> None: """Test RAVEn diagnostics before the coordinator has updated.""" result = await get_diagnostics_for_config_entry( hass, hass_client, mock_entry_no_meters ) - assert result == snapshot(exclude=props("created_at", "modified_at")) + + config_entry_dict = mock_entry_no_meters.as_dict() + config_entry_dict["data"][CONF_MAC] = REDACTED + + assert result == { + "config_entry": config_entry_dict, + "data": { + "Meters": {}, + "NetworkInfo": {**asdict(NETWORK_INFO), "device_mac_id": REDACTED}, + }, + } async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_entry: MockConfigEntry, - snapshot: SnapshotAssertion, + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_device, mock_entry ) -> None: """Test RAVEn diagnostics.""" result = await get_diagnostics_for_config_entry(hass, hass_client, mock_entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + config_entry_dict = mock_entry.as_dict() + config_entry_dict["data"][CONF_MAC] = REDACTED + + assert result == { + "config_entry": config_entry_dict, + "data": { + "Meters": { + "**REDACTED0**": { + "CurrentSummationDelivered": { + **asdict(SUMMATION), + "device_mac_id": REDACTED, + "meter_mac_id": REDACTED, + }, + "InstantaneousDemand": { + **asdict(DEMAND), + "device_mac_id": REDACTED, + "meter_mac_id": REDACTED, + }, + "PriceCluster": { + **asdict(PRICE_CLUSTER), + "device_mac_id": REDACTED, + "meter_mac_id": REDACTED, + "currency": { + "__type": str(type(PRICE_CLUSTER.currency)), + "repr": repr(PRICE_CLUSTER.currency), + }, + }, + }, + }, + "NetworkInfo": {**asdict(NETWORK_INFO), "device_mac_id": REDACTED}, + }, + } diff --git a/tests/components/rainforest_raven/test_init.py b/tests/components/rainforest_raven/test_init.py index acd1f606a07..974c45150a6 100644 --- a/tests/components/rainforest_raven/test_init.py +++ b/tests/components/rainforest_raven/test_init.py @@ -1,19 +1,8 @@ """Tests for the Rainforest RAVEn component initialisation.""" -from unittest.mock import AsyncMock - -from aioraven.data import DeviceInfo as RAVenDeviceInfo -from aioraven.device import RAVEnConnectionError -import pytest -from syrupy.assertion import SnapshotAssertion - from homeassistant.components.rainforest_raven.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import create_mock_entry -from .const import DEVICE_INFO from tests.common import MockConfigEntry @@ -29,55 +18,4 @@ async def test_load_unload_entry( await hass.async_block_till_done() assert mock_entry.state is ConfigEntryState.NOT_LOADED - - -@pytest.mark.parametrize( - ("device_info", "device_count"), - [(DEVICE_INFO, 1), (None, 0)], -) -async def test_device_registry( - hass: HomeAssistant, - mock_device: AsyncMock, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, - device_info: RAVenDeviceInfo | None, - device_count: int, -) -> None: - """Test device registry, including if get_device_info returns None.""" - mock_device.get_device_info.return_value = device_info - entry = create_mock_entry() - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - - assert entry.state is ConfigEntryState.LOADED - - assert len(hass.states.async_all()) == 5 - - entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id) - assert len(entries) == device_count - assert entries == snapshot - - -async def test_synchronize_error(hass: HomeAssistant, mock_device: AsyncMock) -> None: - """Test handling of an error parsing or reading raw device data.""" - entry = create_mock_entry() - entry.add_to_hass(hass) - - mock_device.synchronize.side_effect = RAVEnConnectionError - - await hass.config_entries.async_setup(entry.entry_id) - - assert entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_get_network_info_error( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test handling of a device error during initialization.""" - entry = create_mock_entry() - entry.add_to_hass(hass) - - mock_device.get_network_info.side_effect = RAVEnConnectionError - await hass.config_entries.async_setup(entry.entry_id) - - assert entry.state is ConfigEntryState.SETUP_RETRY + assert not hass.data.get(DOMAIN) diff --git a/tests/components/rainforest_raven/test_sensor.py b/tests/components/rainforest_raven/test_sensor.py index 2319b628374..3b859621cb4 100644 --- a/tests/components/rainforest_raven/test_sensor.py +++ b/tests/components/rainforest_raven/test_sensor.py @@ -1,102 +1,36 @@ """Tests for the Rainforest RAVEn sensors.""" -from datetime import timedelta -from unittest.mock import AsyncMock - -from aioraven.device import RAVEnConnectionError -from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy.assertion import SnapshotAssertion -from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .const import NETWORK_INFO - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.usefixtures("mock_entry") -async def test_sensors( - hass: HomeAssistant, - mock_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: +async def test_sensors(hass: HomeAssistant) -> None: """Test the sensors.""" assert len(hass.states.async_all()) == 5 - await snapshot_platform(hass, entity_registry, snapshot, mock_entry.entry_id) + demand = hass.states.get("sensor.raven_device_meter_power_demand") + assert demand is not None + assert demand.state == "1.2345" + assert demand.attributes["unit_of_measurement"] == "kW" + delivered = hass.states.get("sensor.raven_device_total_meter_energy_delivered") + assert delivered is not None + assert delivered.state == "23456.7890" + assert delivered.attributes["unit_of_measurement"] == "kWh" -@pytest.mark.usefixtures("mock_entry") -async def test_device_update_error( - hass: HomeAssistant, - mock_device: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test handling of a device error during an update.""" - mock_device.get_network_info.side_effect = (RAVEnConnectionError, NETWORK_INFO) + received = hass.states.get("sensor.raven_device_total_meter_energy_received") + assert received is not None + assert received.state == "00000.0000" + assert received.attributes["unit_of_measurement"] == "kWh" - states = hass.states.async_all() - assert len(states) == 5 - assert all(state.state != STATE_UNAVAILABLE for state in states) + price = hass.states.get("sensor.raven_device_meter_price") + assert price is not None + assert price.state == "0.10" + assert price.attributes["unit_of_measurement"] == "USD/kWh" - freezer.tick(timedelta(seconds=60)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - states = hass.states.async_all() - assert len(states) == 5 - assert all(state.state == STATE_UNAVAILABLE for state in states) - - freezer.tick(timedelta(seconds=60)) - async_fire_time_changed(hass) - - states = hass.states.async_all() - assert len(states) == 5 - assert all(state.state != STATE_UNAVAILABLE for state in states) - - -@pytest.mark.usefixtures("mock_entry") -async def test_device_update_timeout( - hass: HomeAssistant, mock_device: AsyncMock, freezer: FrozenDateTimeFactory -) -> None: - """Test handling of a device timeout during an update.""" - mock_device.get_network_info.side_effect = (TimeoutError, NETWORK_INFO) - - states = hass.states.async_all() - assert len(states) == 5 - assert all(state.state != STATE_UNAVAILABLE for state in states) - - freezer.tick(timedelta(seconds=60)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - states = hass.states.async_all() - assert len(states) == 5 - assert all(state.state == STATE_UNAVAILABLE for state in states) - - freezer.tick(timedelta(seconds=60)) - async_fire_time_changed(hass) - - states = hass.states.async_all() - assert len(states) == 5 - assert all(state.state != STATE_UNAVAILABLE for state in states) - - -@pytest.mark.usefixtures("mock_entry") -async def test_device_cache( - hass: HomeAssistant, mock_device: AsyncMock, freezer: FrozenDateTimeFactory -) -> None: - """Test that the device isn't re-opened for subsequent refreshes.""" - assert mock_device.get_network_info.call_count == 1 - assert mock_device.open.call_count == 1 - - freezer.tick(timedelta(seconds=60)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert mock_device.get_network_info.call_count == 2 - assert mock_device.open.call_count == 1 + signal = hass.states.get("sensor.raven_device_meter_signal_strength") + assert signal is not None + assert signal.state == "100" + assert signal.attributes["unit_of_measurement"] == "%" diff --git a/tests/components/rainmachine/conftest.py b/tests/components/rainmachine/conftest.py index 22ee807d187..717d74b421b 100644 --- a/tests/components/rainmachine/conftest.py +++ b/tests/components/rainmachine/conftest.py @@ -1,6 +1,5 @@ """Define test fixtures for RainMachine.""" -from collections.abc import AsyncGenerator import json from typing import Any from unittest.mock import AsyncMock, patch @@ -9,20 +8,19 @@ import pytest from homeassistant.components.rainmachine import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_SSL -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture @pytest.fixture(name="client") -def client_fixture(controller: AsyncMock, controller_mac: str) -> AsyncMock: +def client_fixture(controller, controller_mac): """Define a regenmaschine client.""" return AsyncMock(load_local=AsyncMock(), controllers={controller_mac: controller}) @pytest.fixture(name="config") -def config_fixture() -> dict[str, Any]: +def config_fixture(hass): """Define a config entry data fixture.""" return { CONF_IP_ADDRESS: "192.168.1.100", @@ -33,9 +31,7 @@ def config_fixture() -> dict[str, Any]: @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, config: dict[str, Any], controller_mac: str -) -> MockConfigEntry: +def config_entry_fixture(hass, config, controller_mac): """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -82,7 +78,7 @@ def controller_fixture( @pytest.fixture(name="controller_mac") -def controller_mac_fixture() -> str: +def controller_mac_fixture(): """Define a controller MAC address.""" return "aa:bb:cc:dd:ee:ff" @@ -149,9 +145,7 @@ def data_zones_fixture(): @pytest.fixture(name="setup_rainmachine") -async def setup_rainmachine_fixture( - hass: HomeAssistant, client: AsyncMock, config: dict[str, Any] -) -> AsyncGenerator[None]: +async def setup_rainmachine_fixture(hass, client, config): """Define a fixture to set up RainMachine.""" with ( patch("homeassistant.components.rainmachine.Client", return_value=client), diff --git a/tests/components/rainmachine/snapshots/test_diagnostics.ambr b/tests/components/rainmachine/snapshots/test_diagnostics.ambr index acd5fd165b4..9b5b5edc0c4 100644 --- a/tests/components/rainmachine/snapshots/test_diagnostics.ambr +++ b/tests/components/rainmachine/snapshots/test_diagnostics.ambr @@ -1131,8 +1131,6 @@ 'ssl': True, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'rainmachine', 'entry_id': '81bd010ed0a63b705f6da8407cb26d4b', 'minor_version': 1, @@ -2262,8 +2260,6 @@ 'ssl': True, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'rainmachine', 'entry_id': '81bd010ed0a63b705f6da8407cb26d4b', 'minor_version': 1, diff --git a/tests/components/rainmachine/test_diagnostics.py b/tests/components/rainmachine/test_diagnostics.py index ad5743957dd..1fc03ab357a 100644 --- a/tests/components/rainmachine/test_diagnostics.py +++ b/tests/components/rainmachine/test_diagnostics.py @@ -2,7 +2,6 @@ from regenmaschine.errors import RainMachineError from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -18,9 +17,10 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) async def test_entry_diagnostics_failed_controller_diagnostics( @@ -33,6 +33,7 @@ async def test_entry_diagnostics_failed_controller_diagnostics( ) -> None: """Test config entry diagnostics when the controller diagnostics API call fails.""" controller.diagnostics.current.side_effect = RainMachineError - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/rdw/conftest.py b/tests/components/rdw/conftest.py index 71c73a55441..3f45f44e3d8 100644 --- a/tests/components/rdw/conftest.py +++ b/tests/components/rdw/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from vehicle import Vehicle from homeassistant.components.rdw.const import CONF_LICENSE_PLATE, DOMAIN diff --git a/tests/components/recollect_waste/conftest.py b/tests/components/recollect_waste/conftest.py index 8384da3f388..360dd8aac98 100644 --- a/tests/components/recollect_waste/conftest.py +++ b/tests/components/recollect_waste/conftest.py @@ -1,7 +1,6 @@ """Define test fixtures for ReCollect Waste.""" from datetime import date -from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiorecollect.client import PickupEvent, PickupType @@ -12,7 +11,6 @@ from homeassistant.components.recollect_waste.const import ( CONF_SERVICE_ID, DOMAIN, ) -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -27,9 +25,7 @@ def client_fixture(pickup_events): @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, config: dict[str, Any] -) -> MockConfigEntry: +def config_entry_fixture(hass, config): """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=f"{TEST_PLACE_ID}, {TEST_SERVICE_ID}", data=config @@ -39,7 +35,7 @@ def config_entry_fixture( @pytest.fixture(name="config") -def config_fixture() -> dict[str, Any]: +def config_fixture(): """Define a config entry data fixture.""" return { CONF_PLACE_ID: TEST_PLACE_ID, @@ -58,7 +54,7 @@ def pickup_events_fixture(): @pytest.fixture(name="mock_aiorecollect") -def mock_aiorecollect_fixture(client): +async def mock_aiorecollect_fixture(client): """Define a fixture to patch aiorecollect.""" with ( patch( @@ -74,9 +70,7 @@ def mock_aiorecollect_fixture(client): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture( - hass: HomeAssistant, config_entry: MockConfigEntry, mock_aiorecollect: None -) -> None: +async def setup_config_entry_fixture(hass, config_entry, mock_aiorecollect): """Define a fixture to set up recollect_waste.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/recollect_waste/test_diagnostics.py b/tests/components/recollect_waste/test_diagnostics.py index 24c690bcb37..6c8549786e8 100644 --- a/tests/components/recollect_waste/test_diagnostics.py +++ b/tests/components/recollect_waste/test_diagnostics.py @@ -5,7 +5,6 @@ from homeassistant.core import HomeAssistant from .conftest import TEST_SERVICE_ID -from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -31,9 +30,6 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, - "created_at": ANY, - "modified_at": ANY, - "discovery_keys": {}, }, "data": [ { diff --git a/tests/components/recorder/auto_repairs/events/test_schema.py b/tests/components/recorder/auto_repairs/events/test_schema.py index cae181a6270..e3b2638eded 100644 --- a/tests/components/recorder/auto_repairs/events/test_schema.py +++ b/tests/components/recorder/auto_repairs/events/test_schema.py @@ -11,18 +11,11 @@ from ...common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -40,8 +33,8 @@ async def test_validate_db_schema_fix_float_issue( "homeassistant.components.recorder.migration._modify_columns" ) as modify_columns_mock, ): - async with async_test_recorder(hass): - await async_wait_recording_done(hass) + await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -57,8 +50,8 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_event_data( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -73,8 +66,8 @@ async def test_validate_db_schema_fix_utf8_issue_event_data( return_value={"event_data.4-byte UTF-8"}, ), ): - async with async_test_recorder(hass): - await async_wait_recording_done(hass) + await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -90,8 +83,8 @@ async def test_validate_db_schema_fix_utf8_issue_event_data( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -106,8 +99,8 @@ async def test_validate_db_schema_fix_collation_issue( return_value={"events.utf8mb4_unicode_ci"}, ), ): - async with async_test_recorder(hass): - await async_wait_recording_done(hass) + await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( diff --git a/tests/components/recorder/auto_repairs/states/test_schema.py b/tests/components/recorder/auto_repairs/states/test_schema.py index 915ac1f3500..58910a4441a 100644 --- a/tests/components/recorder/auto_repairs/states/test_schema.py +++ b/tests/components/recorder/auto_repairs/states/test_schema.py @@ -11,18 +11,11 @@ from ...common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -40,8 +33,8 @@ async def test_validate_db_schema_fix_float_issue( "homeassistant.components.recorder.migration._modify_columns" ) as modify_columns_mock, ): - async with async_test_recorder(hass): - await async_wait_recording_done(hass) + await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -59,8 +52,8 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_states( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -75,8 +68,8 @@ async def test_validate_db_schema_fix_utf8_issue_states( return_value={"states.4-byte UTF-8"}, ), ): - async with async_test_recorder(hass): - await async_wait_recording_done(hass) + await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -91,8 +84,8 @@ async def test_validate_db_schema_fix_utf8_issue_states( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_state_attributes( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -107,8 +100,8 @@ async def test_validate_db_schema_fix_utf8_issue_state_attributes( return_value={"state_attributes.4-byte UTF-8"}, ), ): - async with async_test_recorder(hass): - await async_wait_recording_done(hass) + await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -124,8 +117,8 @@ async def test_validate_db_schema_fix_utf8_issue_state_attributes( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -140,8 +133,8 @@ async def test_validate_db_schema_fix_collation_issue( return_value={"states.utf8mb4_unicode_ci"}, ), ): - async with async_test_recorder(hass): - await async_wait_recording_done(hass) + await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( diff --git a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py index 9e287d13594..175cb6ecd1a 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py +++ b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py @@ -1,6 +1,7 @@ """Test removing statistics duplicates.""" import importlib +from pathlib import Path import sys from unittest.mock import patch @@ -9,14 +10,17 @@ from sqlalchemy import create_engine from sqlalchemy.orm import Session from homeassistant.components import recorder -from homeassistant.components.recorder import statistics +from homeassistant.components.recorder import Recorder, statistics from homeassistant.components.recorder.auto_repairs.statistics.duplicates import ( delete_statistics_duplicates, delete_statistics_meta_duplicates, ) +from homeassistant.components.recorder.const import SQLITE_URL_PREFIX from homeassistant.components.recorder.statistics import async_add_external_statistics from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant +from homeassistant.helpers import recorder as recorder_helper +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from ...common import async_wait_recording_done @@ -27,15 +31,20 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" -@pytest.mark.usefixtures("recorder_mock") +@pytest.fixture +def setup_recorder(recorder_mock: Recorder) -> None: + """Set up recorder.""" + + async def test_delete_duplicates_no_duplicates( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, + setup_recorder: None, ) -> None: """Test removal of duplicated statistics.""" await async_wait_recording_done(hass) @@ -47,10 +56,10 @@ async def test_delete_duplicates_no_duplicates( assert "Found duplicated" not in caplog.text -@pytest.mark.usefixtures("recorder_mock") async def test_duplicate_statistics_handle_integrity_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, + setup_recorder: None, ) -> None: """Test the recorder does not blow up if statistics is duplicated.""" await async_wait_recording_done(hass) @@ -131,13 +140,15 @@ def _create_engine_28(*args, **kwargs): return engine -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_delete_metadata_duplicates( - async_test_recorder: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, + caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: """Test removal of duplicated statistics.""" + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + test_db_file = test_dir.joinpath("test_run_info.db") + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + module = "tests.components.recorder.db_schema_28" importlib.import_module(module) old_db_schema = sys.modules[module] @@ -189,18 +200,16 @@ async def test_delete_metadata_duplicates( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), - patch.object( - recorder.migration, "non_live_data_migration_needed", return_value=False - ), patch( "homeassistant.components.recorder.core.create_engine", new=_create_engine_28, ), ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass), - ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, "recorder", {"recorder": {"db_url": dburl}} + ) await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -219,10 +228,9 @@ async def test_delete_metadata_duplicates( await hass.async_stop() # Test that the duplicates are removed during migration from schema 28 - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass), - ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + await async_setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -239,13 +247,15 @@ async def test_delete_metadata_duplicates( await hass.async_stop() -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_delete_metadata_duplicates_many( - async_test_recorder: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, + caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: """Test removal of duplicated statistics.""" + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + test_db_file = test_dir.joinpath("test_run_info.db") + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + module = "tests.components.recorder.db_schema_28" importlib.import_module(module) old_db_schema = sys.modules[module] @@ -309,18 +319,16 @@ async def test_delete_metadata_duplicates_many( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), - patch.object( - recorder.migration, "non_live_data_migration_needed", return_value=False - ), patch( "homeassistant.components.recorder.core.create_engine", new=_create_engine_28, ), ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass), - ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, "recorder", {"recorder": {"db_url": dburl}} + ) await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -330,10 +338,9 @@ async def test_delete_metadata_duplicates_many( await hass.async_stop() # Test that the duplicates are removed during migration from schema 28 - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass), - ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + await async_setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -352,9 +359,8 @@ async def test_delete_metadata_duplicates_many( await hass.async_stop() -@pytest.mark.usefixtures("recorder_mock") async def test_delete_metadata_duplicates_no_duplicates( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, setup_recorder: None ) -> None: """Test removal of duplicated statistics.""" await async_wait_recording_done(hass) diff --git a/tests/components/recorder/auto_repairs/statistics/test_schema.py b/tests/components/recorder/auto_repairs/statistics/test_schema.py index 34a075afbc7..f4e1d74aadf 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_schema.py +++ b/tests/components/recorder/auto_repairs/statistics/test_schema.py @@ -11,18 +11,11 @@ from ...common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - @pytest.mark.parametrize("db_engine", ["mysql"]) @pytest.mark.parametrize("enable_schema_validation", [True]) async def test_validate_db_schema_fix_utf8_issue( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -37,8 +30,8 @@ async def test_validate_db_schema_fix_utf8_issue( return_value={"statistics_meta.4-byte UTF-8"}, ), ): - async with async_test_recorder(hass): - await async_wait_recording_done(hass) + await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -55,8 +48,8 @@ async def test_validate_db_schema_fix_utf8_issue( @pytest.mark.parametrize("table", ["statistics_short_term", "statistics"]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, table: str, db_engine: str, @@ -75,8 +68,8 @@ async def test_validate_db_schema_fix_float_issue( "homeassistant.components.recorder.migration._modify_columns" ) as modify_columns_mock, ): - async with async_test_recorder(hass): - await async_wait_recording_done(hass) + await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -99,8 +92,8 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, recorder_dialect_name: None, db_engine: str, @@ -115,8 +108,8 @@ async def test_validate_db_schema_fix_collation_issue( return_value={"statistics.utf8mb4_unicode_ci"}, ), ): - async with async_test_recorder(hass): - await async_wait_recording_done(hass) + await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( diff --git a/tests/components/recorder/auto_repairs/test_schema.py b/tests/components/recorder/auto_repairs/test_schema.py index 857c0f6572f..d921c0cdbf8 100644 --- a/tests/components/recorder/auto_repairs/test_schema.py +++ b/tests/components/recorder/auto_repairs/test_schema.py @@ -3,7 +3,6 @@ import pytest from sqlalchemy import text -from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.auto_repairs.schema import ( correct_db_schema_precision, correct_db_schema_utf8, @@ -13,7 +12,7 @@ from homeassistant.components.recorder.auto_repairs.schema import ( ) from homeassistant.components.recorder.db_schema import States from homeassistant.components.recorder.migration import _modify_columns -from homeassistant.components.recorder.util import session_scope +from homeassistant.components.recorder.util import get_instance, session_scope from homeassistant.core import HomeAssistant from ..common import async_wait_recording_done @@ -21,18 +20,11 @@ from ..common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -41,37 +33,46 @@ async def test_validate_db_schema( Note: The test uses SQLite, the purpose is only to exercise the code. """ + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert "Detected statistics schema errors" not in caplog.text assert "Database is about to correct DB schema errors" not in caplog.text -@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_utf8_issue_good_schema( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, + recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the schema is correct.""" + if not recorder_db_url.startswith("mysql://"): + # This problem only happens on MySQL + return + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - schema_errors = await recorder_mock.async_add_executor_job( - validate_table_schema_supports_utf8, recorder_mock, States, (States.state,) + instance = get_instance(hass) + schema_errors = await instance.async_add_executor_job( + validate_table_schema_supports_utf8, instance, States, (States.state,) ) assert schema_errors == set() -@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_utf8_issue_with_broken_schema( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, + recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the schema is broken and repairing it.""" + if not recorder_db_url.startswith("mysql://"): + # This problem only happens on MySQL + return + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - session_maker = recorder_mock.get_session + instance = get_instance(hass) + session_maker = instance.get_session def _break_states_schema(): with session_scope(session=session_maker()) as session: @@ -83,34 +84,38 @@ async def test_validate_db_schema_fix_utf8_issue_with_broken_schema( ) ) - await recorder_mock.async_add_executor_job(_break_states_schema) - schema_errors = await recorder_mock.async_add_executor_job( - validate_table_schema_supports_utf8, recorder_mock, States, (States.state,) + await instance.async_add_executor_job(_break_states_schema) + schema_errors = await instance.async_add_executor_job( + validate_table_schema_supports_utf8, instance, States, (States.state,) ) assert schema_errors == {"states.4-byte UTF-8"} # Now repair the schema - await recorder_mock.async_add_executor_job( - correct_db_schema_utf8, recorder_mock, States, schema_errors + await instance.async_add_executor_job( + correct_db_schema_utf8, instance, States, schema_errors ) # Now validate the schema again - schema_errors = await recorder_mock.async_add_executor_job( - validate_table_schema_supports_utf8, recorder_mock, States, ("state",) + schema_errors = await instance.async_add_executor_job( + validate_table_schema_supports_utf8, instance, States, ("state",) ) assert schema_errors == set() -@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_incorrect_collation( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, + recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the collation is incorrect.""" + if not recorder_db_url.startswith("mysql://"): + # This problem only happens on MySQL + return + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - session_maker = recorder_mock.get_session + instance = get_instance(hass) + session_maker = instance.get_session def _break_states_schema(): with session_scope(session=session_maker()) as session: @@ -121,51 +126,59 @@ async def test_validate_db_schema_fix_incorrect_collation( ) ) - await recorder_mock.async_add_executor_job(_break_states_schema) - schema_errors = await recorder_mock.async_add_executor_job( - validate_table_schema_has_correct_collation, recorder_mock, States + await instance.async_add_executor_job(_break_states_schema) + schema_errors = await instance.async_add_executor_job( + validate_table_schema_has_correct_collation, instance, States ) assert schema_errors == {"states.utf8mb4_unicode_ci"} # Now repair the schema - await recorder_mock.async_add_executor_job( - correct_db_schema_utf8, recorder_mock, States, schema_errors + await instance.async_add_executor_job( + correct_db_schema_utf8, instance, States, schema_errors ) # Now validate the schema again - schema_errors = await recorder_mock.async_add_executor_job( - validate_table_schema_has_correct_collation, recorder_mock, States + schema_errors = await instance.async_add_executor_job( + validate_table_schema_has_correct_collation, instance, States ) assert schema_errors == set() -@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_correct_collation( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, + recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is correct with the correct collation.""" + if not recorder_db_url.startswith("mysql://"): + # This problem only happens on MySQL + return + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - schema_errors = await recorder_mock.async_add_executor_job( + instance = get_instance(hass) + schema_errors = await instance.async_add_executor_job( validate_table_schema_has_correct_collation, - recorder_mock, + instance, States, ) assert schema_errors == set() -@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_utf8_issue_with_broken_schema_unrepairable( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, + recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the schema is broken and cannot be repaired.""" + if not recorder_db_url.startswith("mysql://"): + # This problem only happens on MySQL + return + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - session_maker = recorder_mock.get_session + instance = get_instance(hass) + session_maker = instance.get_session def _break_states_schema(): with session_scope(session=session_maker()) as session: @@ -176,55 +189,63 @@ async def test_validate_db_schema_fix_utf8_issue_with_broken_schema_unrepairable "LOCK=EXCLUSIVE;" ) ) - _modify_columns( - session_maker, - recorder_mock.engine, - "states", - [ - "entity_id VARCHAR(255) NOT NULL", - ], - ) + _modify_columns( + session_maker, + instance.engine, + "states", + [ + "entity_id VARCHAR(255) NOT NULL", + ], + ) - await recorder_mock.async_add_executor_job(_break_states_schema) - schema_errors = await recorder_mock.async_add_executor_job( - validate_table_schema_supports_utf8, recorder_mock, States, ("state",) + await instance.async_add_executor_job(_break_states_schema) + schema_errors = await instance.async_add_executor_job( + validate_table_schema_supports_utf8, instance, States, ("state",) ) assert schema_errors == set() assert "Error when validating DB schema" in caplog.text -@pytest.mark.skip_on_db_engine(["sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_good_schema( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, + recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is correct.""" + if not recorder_db_url.startswith(("mysql://", "postgresql://")): + # This problem only happens on MySQL and PostgreSQL + return + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - schema_errors = await recorder_mock.async_add_executor_job( + instance = get_instance(hass) + schema_errors = await instance.async_add_executor_job( validate_db_schema_precision, - recorder_mock, + instance, States, ) assert schema_errors == set() -@pytest.mark.skip_on_db_engine(["sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_with_broken_schema( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, + recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is broken and than repair it.""" + if not recorder_db_url.startswith(("mysql://", "postgresql://")): + # This problem only happens on MySQL and PostgreSQL + return + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - session_maker = recorder_mock.get_session + instance = get_instance(hass) + session_maker = instance.get_session def _break_states_schema(): _modify_columns( session_maker, - recorder_mock.engine, + instance.engine, "states", [ "last_updated_ts FLOAT(4)", @@ -232,44 +253,47 @@ async def test_validate_db_schema_precision_with_broken_schema( ], ) - await recorder_mock.async_add_executor_job(_break_states_schema) - schema_errors = await recorder_mock.async_add_executor_job( + await instance.async_add_executor_job(_break_states_schema) + schema_errors = await instance.async_add_executor_job( validate_db_schema_precision, - recorder_mock, + instance, States, ) assert schema_errors == {"states.double precision"} # Now repair the schema - await recorder_mock.async_add_executor_job( - correct_db_schema_precision, recorder_mock, States, schema_errors + await instance.async_add_executor_job( + correct_db_schema_precision, instance, States, schema_errors ) # Now validate the schema again - schema_errors = await recorder_mock.async_add_executor_job( + schema_errors = await instance.async_add_executor_job( validate_db_schema_precision, - recorder_mock, + instance, States, ) assert schema_errors == set() -@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_with_unrepairable_broken_schema( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is broken and cannot be repaired.""" + if not recorder_db_url.startswith("mysql://"): + # This problem only happens on MySQL + return + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - session_maker = recorder_mock.get_session + instance = get_instance(hass) + session_maker = instance.get_session def _break_states_schema(): _modify_columns( session_maker, - recorder_mock.engine, + instance.engine, "states", [ "state VARCHAR(255) NOT NULL", @@ -278,10 +302,10 @@ async def test_validate_db_schema_precision_with_unrepairable_broken_schema( ], ) - await recorder_mock.async_add_executor_job(_break_states_schema) - schema_errors = await recorder_mock.async_add_executor_job( + await instance.async_add_executor_job(_break_states_schema) + schema_errors = await instance.async_add_executor_job( validate_db_schema_precision, - recorder_mock, + instance, States, ) assert "Error when validating DB schema" in caplog.text diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index 60168f5e6ef..c72b1ac830b 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -79,18 +79,10 @@ async def async_block_recorder(hass: HomeAssistant, seconds: float) -> None: await event.wait() -def get_start_time(start: datetime) -> datetime: - """Calculate a valid start time for statistics.""" - start_minutes = start.minute - start.minute % 5 - return start.replace(minute=start_minutes, second=0, microsecond=0) - - def do_adhoc_statistics(hass: HomeAssistant, **kwargs: Any) -> None: """Trigger an adhoc statistics run.""" if not (start := kwargs.get("start")): start = statistics.get_start_time() - elif (start.minute % 5) != 0 or start.second != 0 or start.microsecond != 0: - raise ValueError(f"Statistics must start on 5 minute boundary got {start}") get_instance(hass).queue_task(StatisticsTask(start, False)) @@ -265,16 +257,12 @@ def assert_dict_of_states_equal_without_context_and_last_changed( ) -async def async_record_states( - hass: HomeAssistant, -) -> tuple[datetime, datetime, dict[str, list[State | None]]]: +async def async_record_states(hass: HomeAssistant): """Record some test states.""" return await hass.async_add_executor_job(record_states, hass) -def record_states( - hass: HomeAssistant, -) -> tuple[datetime, datetime, dict[str, list[State | None]]]: +def record_states(hass): """Record some test states. We inject a bunch of state updates temperature sensors. @@ -303,11 +291,11 @@ def record_states( wait_recording_done(hass) return hass.states.get(entity_id) - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=15 * 5) three = two + timedelta(seconds=30 * 5) - four = three + timedelta(seconds=14 * 5) + four = three + timedelta(seconds=15 * 5) states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []} with freeze_time(one) as freezer: @@ -428,14 +416,6 @@ def get_schema_module_path(schema_version_postfix: str) -> str: return f"tests.components.recorder.db_schema_{schema_version_postfix}" -@dataclass(slots=True) -class MockMigrationTask(migration.MigrationTask): - """Mock migration task which does nothing.""" - - def run(self, instance: Recorder) -> None: - """Run migration task.""" - - @contextmanager def old_db_schema(schema_version_postfix: str) -> Iterator[None]: """Fixture to initialize the db with the old schema.""" @@ -445,15 +425,16 @@ def old_db_schema(schema_version_postfix: str) -> Iterator[None]: with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration, "non_live_data_migration_needed", return_value=False), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", MockMigrationTask), + patch.object(migration.EntityIDMigration, "task", core.RecorderTask), patch( CREATE_ENGINE_TARGET, new=partial( diff --git a/tests/components/recorder/conftest.py b/tests/components/recorder/conftest.py index 9cdf9dbb372..4db573fa65f 100644 --- a/tests/components/recorder/conftest.py +++ b/tests/components/recorder/conftest.py @@ -1,46 +1,14 @@ """Fixtures for the recorder component tests.""" -from collections.abc import Callable, Generator -from contextlib import contextmanager -from dataclasses import dataclass -from functools import partial -import threading -from unittest.mock import Mock, patch +from unittest.mock import patch import pytest -from sqlalchemy.engine import Engine -from sqlalchemy.orm.session import Session +from typing_extensions import Generator from homeassistant.components import recorder -from homeassistant.components.recorder import db_schema -from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant -def pytest_configure(config): - """Add custom skip_on_db_engine marker.""" - config.addinivalue_line( - "markers", - "skip_on_db_engine(engine): mark test to run only on named DB engine(s)", - ) - - -@pytest.fixture -def skip_by_db_engine(request: pytest.FixtureRequest, recorder_db_url: str) -> None: - """Fixture to skip tests on unsupported DB engines. - - Mark the test with @pytest.mark.skip_on_db_engine("mysql") to skip on mysql, or - @pytest.mark.skip_on_db_engine(["mysql", "sqlite"]) to skip on mysql and sqlite. - """ - if request.node.get_closest_marker("skip_on_db_engine"): - skip_on_db_engine = request.node.get_closest_marker("skip_on_db_engine").args[0] - if isinstance(skip_on_db_engine, str): - skip_on_db_engine = [skip_on_db_engine] - db_engine = recorder_db_url.partition("://")[0] - if db_engine in skip_on_db_engine: - pytest.skip(f"skipped for DB engine: {db_engine}") - - @pytest.fixture def recorder_dialect_name(hass: HomeAssistant, db_engine: str) -> Generator[None]: """Patch the recorder dialect.""" @@ -54,139 +22,3 @@ def recorder_dialect_name(hass: HomeAssistant, db_engine: str) -> Generator[None "homeassistant.components.recorder.Recorder.dialect_name", db_engine ): yield - - -@dataclass(slots=True) -class InstrumentedMigration: - """Container to aid controlling migration progress.""" - - live_migration_done: threading.Event - live_migration_done_stall: threading.Event - migration_stall: threading.Event - migration_started: threading.Event - migration_version: int | None - non_live_migration_done: threading.Event - non_live_migration_done_stall: threading.Event - apply_update_mock: Mock - stall_on_schema_version: int | None - apply_update_stalled: threading.Event - apply_update_version: int | None - - -@pytest.fixture(name="instrument_migration") -def instrument_migration_fixture( - hass: HomeAssistant, -) -> Generator[InstrumentedMigration]: - """Instrument recorder migration.""" - with instrument_migration(hass) as instrumented_migration: - yield instrumented_migration - - -@contextmanager -def instrument_migration( - hass: HomeAssistant, -) -> Generator[InstrumentedMigration]: - """Instrument recorder migration.""" - - real_migrate_schema_live = recorder.migration.migrate_schema_live - real_migrate_schema_non_live = recorder.migration.migrate_schema_non_live - real_apply_update = recorder.migration._apply_update - - def _instrument_migrate_schema_live(real_func, *args): - """Control migration progress and check results.""" - return _instrument_migrate_schema( - real_func, - args, - instrumented_migration.live_migration_done, - instrumented_migration.live_migration_done_stall, - ) - - def _instrument_migrate_schema_non_live(real_func, *args): - """Control migration progress and check results.""" - return _instrument_migrate_schema( - real_func, - args, - instrumented_migration.non_live_migration_done, - instrumented_migration.non_live_migration_done_stall, - ) - - def _instrument_migrate_schema( - real_func, - args, - migration_done: threading.Event, - migration_done_stall: threading.Event, - ): - """Control migration progress and check results.""" - instrumented_migration.migration_started.set() - - try: - migration_result = real_func(*args) - except Exception: - migration_done.set() - migration_done_stall.wait() - raise - - # Check and report the outcome of the migration; if migration fails - # the recorder will silently create a new database. - with session_scope(hass=hass, read_only=True) as session: - res = ( - session.query(db_schema.SchemaChanges) - .order_by(db_schema.SchemaChanges.change_id.desc()) - .first() - ) - instrumented_migration.migration_version = res.schema_version - migration_done.set() - migration_done_stall.wait() - return migration_result - - def _instrument_apply_update( - instance: recorder.Recorder, - hass: HomeAssistant, - engine: Engine, - session_maker: Callable[[], Session], - new_version: int, - old_version: int, - ): - """Control migration progress.""" - instrumented_migration.apply_update_version = new_version - stall_version = instrumented_migration.stall_on_schema_version - if stall_version is None or stall_version == new_version: - instrumented_migration.apply_update_stalled.set() - instrumented_migration.migration_stall.wait() - real_apply_update( - instance, hass, engine, session_maker, new_version, old_version - ) - - with ( - patch( - "homeassistant.components.recorder.migration.migrate_schema_live", - wraps=partial(_instrument_migrate_schema_live, real_migrate_schema_live), - ), - patch( - "homeassistant.components.recorder.migration.migrate_schema_non_live", - wraps=partial( - _instrument_migrate_schema_non_live, real_migrate_schema_non_live - ), - ), - patch( - "homeassistant.components.recorder.migration._apply_update", - wraps=_instrument_apply_update, - ) as apply_update_mock, - ): - instrumented_migration = InstrumentedMigration( - live_migration_done=threading.Event(), - live_migration_done_stall=threading.Event(), - migration_stall=threading.Event(), - migration_started=threading.Event(), - migration_version=None, - non_live_migration_done=threading.Event(), - non_live_migration_done_stall=threading.Event(), - apply_update_mock=apply_update_mock, - stall_on_schema_version=None, - apply_update_stalled=threading.Event(), - apply_update_version=None, - ) - - instrumented_migration.live_migration_done_stall.set() - instrumented_migration.non_live_migration_done_stall.set() - yield instrumented_migration diff --git a/tests/components/recorder/db_schema_16.py b/tests/components/recorder/db_schema_16.py index d7ca35c9341..24786b1ad44 100644 --- a/tests/components/recorder/db_schema_16.py +++ b/tests/components/recorder/db_schema_16.py @@ -348,13 +348,15 @@ class LazyState(State): __slots__ = [ "_row", + "entity_id", + "state", "_attributes", "_last_changed", "_last_updated", "_context", ] - def __init__(self, row) -> None: # pylint: disable=super-init-not-called + def __init__(self, row): # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_18.py b/tests/components/recorder/db_schema_18.py index adb71dffb9e..db6fbb78f56 100644 --- a/tests/components/recorder/db_schema_18.py +++ b/tests/components/recorder/db_schema_18.py @@ -361,13 +361,15 @@ class LazyState(State): __slots__ = [ "_row", + "entity_id", + "state", "_attributes", "_last_changed", "_last_updated", "_context", ] - def __init__(self, row) -> None: # pylint: disable=super-init-not-called + def __init__(self, row): # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_22.py b/tests/components/recorder/db_schema_22.py index c0d607b12a7..cd0dc52a927 100644 --- a/tests/components/recorder/db_schema_22.py +++ b/tests/components/recorder/db_schema_22.py @@ -480,13 +480,15 @@ class LazyState(State): __slots__ = [ "_row", + "entity_id", + "state", "_attributes", "_last_changed", "_last_updated", "_context", ] - def __init__(self, row) -> None: # pylint: disable=super-init-not-called + def __init__(self, row): # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_23.py b/tests/components/recorder/db_schema_23.py index f60b7b49df4..9187d271216 100644 --- a/tests/components/recorder/db_schema_23.py +++ b/tests/components/recorder/db_schema_23.py @@ -470,13 +470,15 @@ class LazyState(State): __slots__ = [ "_row", + "entity_id", + "state", "_attributes", "_last_changed", "_last_updated", "_context", ] - def __init__(self, row) -> None: # pylint: disable=super-init-not-called + def __init__(self, row): # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_23_with_newer_columns.py b/tests/components/recorder/db_schema_23_with_newer_columns.py index 4cc1074de41..9f902523c64 100644 --- a/tests/components/recorder/db_schema_23_with_newer_columns.py +++ b/tests/components/recorder/db_schema_23_with_newer_columns.py @@ -594,13 +594,15 @@ class LazyState(State): __slots__ = [ "_row", + "entity_id", + "state", "_attributes", "_last_changed", "_last_updated", "_context", ] - def __init__(self, row) -> None: # pylint: disable=super-init-not-called + def __init__(self, row): # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_30.py b/tests/components/recorder/db_schema_30.py index 97c33334111..b82213cbc89 100644 --- a/tests/components/recorder/db_schema_30.py +++ b/tests/components/recorder/db_schema_30.py @@ -9,6 +9,7 @@ from __future__ import annotations from collections.abc import Callable from datetime import datetime, timedelta import logging +import time from typing import Any, Self, TypedDict, cast, overload import ciso8601 @@ -32,7 +33,6 @@ from sqlalchemy import ( type_coerce, ) from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite -from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.orm import aliased, declarative_base, relationship from sqlalchemy.orm.session import Session @@ -109,7 +109,7 @@ STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc] """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" - def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: + def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] """Offload the datetime parsing to ciso8601.""" return lambda value: None if value is None else ciso8601.parse_datetime(value) @@ -380,7 +380,7 @@ class States(Base): # type: ignore[misc,valid-type] ) # *** Not originally in v30, only added for recorder to startup ok last_updated = Column(DATETIME_TYPE, default=dt_util.utcnow, index=True) last_updated_ts = Column( - TIMESTAMP_TYPE, index=True + TIMESTAMP_TYPE, default=time.time, index=True ) # *** Not originally in v30, only added for recorder to startup ok old_state_id = Column(Integer, ForeignKey("states.state_id"), index=True) attributes_id = Column( diff --git a/tests/components/recorder/db_schema_32.py b/tests/components/recorder/db_schema_32.py index 6da0272da87..15b56e2fc86 100644 --- a/tests/components/recorder/db_schema_32.py +++ b/tests/components/recorder/db_schema_32.py @@ -33,7 +33,6 @@ from sqlalchemy import ( type_coerce, ) from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite -from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.orm import aliased, declarative_base, relationship from sqlalchemy.orm.session import Session @@ -110,7 +109,7 @@ STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc] """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" - def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: + def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] """Offload the datetime parsing to ciso8601.""" return lambda value: None if value is None else ciso8601.parse_datetime(value) @@ -224,7 +223,7 @@ class Events(Base): # type: ignore[misc,valid-type] data_id = Column(Integer, ForeignKey("event_data.data_id"), index=True) context_id_bin = Column( LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH) - ) # *** Not originally in v32, only added for recorder to startup ok + ) # *** Not originally in v3v320, only added for recorder to startup ok context_user_id_bin = Column( LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH) ) # *** Not originally in v32, only added for recorder to startup ok @@ -565,7 +564,6 @@ class StatisticsBase: id = Column(Integer, Identity(), primary_key=True) created = Column(DATETIME_TYPE, default=dt_util.utcnow) - # *** Not originally in v32, only added for recorder to startup ok created_ts = Column(TIMESTAMP_TYPE, default=time.time) metadata_id = Column( Integer, @@ -573,13 +571,11 @@ class StatisticsBase: index=True, ) start = Column(DATETIME_TYPE, index=True) - # *** Not originally in v32, only added for recorder to startup ok start_ts = Column(TIMESTAMP_TYPE, index=True) mean = Column(DOUBLE_TYPE) min = Column(DOUBLE_TYPE) max = Column(DOUBLE_TYPE) last_reset = Column(DATETIME_TYPE) - # *** Not originally in v32, only added for recorder to startup ok last_reset_ts = Column(TIMESTAMP_TYPE) state = Column(DOUBLE_TYPE) sum = Column(DOUBLE_TYPE) diff --git a/tests/components/recorder/db_schema_42.py b/tests/components/recorder/db_schema_42.py index 99bdbb28f2c..c0dfc70571d 100644 --- a/tests/components/recorder/db_schema_42.py +++ b/tests/components/recorder/db_schema_42.py @@ -171,7 +171,7 @@ def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" - def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: + def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] """Offload the datetime parsing to ciso8601.""" return lambda value: None if value is None else ciso8601.parse_datetime(value) @@ -179,7 +179,7 @@ class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): class NativeLargeBinary(LargeBinary): """A faster version of LargeBinary for engines that support python bytes natively.""" - def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: + def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] """No conversion needed for engines that support native bytes.""" return None diff --git a/tests/components/recorder/db_schema_43.py b/tests/components/recorder/db_schema_43.py deleted file mode 100644 index 26d8ecd6856..00000000000 --- a/tests/components/recorder/db_schema_43.py +++ /dev/null @@ -1,889 +0,0 @@ -"""Models for SQLAlchemy. - -This file contains the model definitions for schema version 43. -It is used to test the schema migration logic. -""" - -from __future__ import annotations - -from collections.abc import Callable -from datetime import datetime, timedelta -import logging -import time -from typing import Any, Self, cast - -import ciso8601 -from fnv_hash_fast import fnv1a_32 -from sqlalchemy import ( - CHAR, - JSON, - BigInteger, - Boolean, - ColumnElement, - DateTime, - Float, - ForeignKey, - Identity, - Index, - Integer, - LargeBinary, - SmallInteger, - String, - Text, - case, - type_coerce, -) -from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite -from sqlalchemy.engine.interfaces import Dialect -from sqlalchemy.ext.compiler import compiles -from sqlalchemy.orm import DeclarativeBase, Mapped, aliased, mapped_column, relationship -from sqlalchemy.types import TypeDecorator - -from homeassistant.components.recorder.const import ( - ALL_DOMAIN_EXCLUDE_ATTRS, - SupportedDialect, -) -from homeassistant.components.recorder.models import ( - StatisticData, - StatisticDataTimestamp, - StatisticMetaData, - bytes_to_ulid_or_none, - bytes_to_uuid_hex_or_none, - datetime_to_timestamp_or_none, - process_timestamp, - ulid_to_bytes_or_none, - uuid_hex_to_bytes_or_none, -) -from homeassistant.components.sensor import ATTR_STATE_CLASS -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_FRIENDLY_NAME, - ATTR_UNIT_OF_MEASUREMENT, - MATCH_ALL, - MAX_LENGTH_EVENT_EVENT_TYPE, - MAX_LENGTH_STATE_ENTITY_ID, - MAX_LENGTH_STATE_STATE, -) -from homeassistant.core import Context, Event, EventOrigin, EventStateChangedData, State -from homeassistant.helpers.json import JSON_DUMP, json_bytes, json_bytes_strip_null -import homeassistant.util.dt as dt_util -from homeassistant.util.json import ( - JSON_DECODE_EXCEPTIONS, - json_loads, - json_loads_object, -) - - -# SQLAlchemy Schema -class Base(DeclarativeBase): - """Base class for tables.""" - - -SCHEMA_VERSION = 43 - -_LOGGER = logging.getLogger(__name__) - -TABLE_EVENTS = "events" -TABLE_EVENT_DATA = "event_data" -TABLE_EVENT_TYPES = "event_types" -TABLE_STATES = "states" -TABLE_STATE_ATTRIBUTES = "state_attributes" -TABLE_STATES_META = "states_meta" -TABLE_RECORDER_RUNS = "recorder_runs" -TABLE_SCHEMA_CHANGES = "schema_changes" -TABLE_STATISTICS = "statistics" -TABLE_STATISTICS_META = "statistics_meta" -TABLE_STATISTICS_RUNS = "statistics_runs" -TABLE_STATISTICS_SHORT_TERM = "statistics_short_term" -TABLE_MIGRATION_CHANGES = "migration_changes" - -STATISTICS_TABLES = ("statistics", "statistics_short_term") - -MAX_STATE_ATTRS_BYTES = 16384 -MAX_EVENT_DATA_BYTES = 32768 - -PSQL_DIALECT = SupportedDialect.POSTGRESQL - -ALL_TABLES = [ - TABLE_STATES, - TABLE_STATE_ATTRIBUTES, - TABLE_EVENTS, - TABLE_EVENT_DATA, - TABLE_EVENT_TYPES, - TABLE_RECORDER_RUNS, - TABLE_SCHEMA_CHANGES, - TABLE_MIGRATION_CHANGES, - TABLE_STATES_META, - TABLE_STATISTICS, - TABLE_STATISTICS_META, - TABLE_STATISTICS_RUNS, - TABLE_STATISTICS_SHORT_TERM, -] - -TABLES_TO_CHECK = [ - TABLE_STATES, - TABLE_EVENTS, - TABLE_RECORDER_RUNS, - TABLE_SCHEMA_CHANGES, -] - -LAST_UPDATED_INDEX_TS = "ix_states_last_updated_ts" -METADATA_ID_LAST_UPDATED_INDEX_TS = "ix_states_metadata_id_last_updated_ts" -EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin" -STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" -LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id" -LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated_ts" -CONTEXT_ID_BIN_MAX_LENGTH = 16 - -MYSQL_COLLATE = "utf8mb4_unicode_ci" -MYSQL_DEFAULT_CHARSET = "utf8mb4" -MYSQL_ENGINE = "InnoDB" - -_DEFAULT_TABLE_ARGS = { - "mysql_default_charset": MYSQL_DEFAULT_CHARSET, - "mysql_collate": MYSQL_COLLATE, - "mysql_engine": MYSQL_ENGINE, - "mariadb_default_charset": MYSQL_DEFAULT_CHARSET, - "mariadb_collate": MYSQL_COLLATE, - "mariadb_engine": MYSQL_ENGINE, -} - -_MATCH_ALL_KEEP = { - ATTR_DEVICE_CLASS, - ATTR_STATE_CLASS, - ATTR_UNIT_OF_MEASUREMENT, - ATTR_FRIENDLY_NAME, -} - - -class UnusedDateTime(DateTime): - """An unused column type that behaves like a datetime.""" - - -class Unused(CHAR): - """An unused column type that behaves like a string.""" - - -@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] -@compiles(Unused, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] -def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: - """Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite.""" - return "CHAR(0)" # Uses 1 byte on MySQL (no change on sqlite) - - -@compiles(Unused, "postgresql") # type: ignore[misc,no-untyped-call] -def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: - """Compile Unused as CHAR(1) on postgresql.""" - return "CHAR(1)" # Uses 1 byte - - -class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): - """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" - - def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: - """Offload the datetime parsing to ciso8601.""" - return lambda value: None if value is None else ciso8601.parse_datetime(value) - - -class NativeLargeBinary(LargeBinary): - """A faster version of LargeBinary for engines that support python bytes natively.""" - - def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: - """No conversion needed for engines that support native bytes.""" - return None - - -# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32 -# for sqlite and postgresql we use a bigint -UINT_32_TYPE = BigInteger().with_variant( - mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call] - "mysql", - "mariadb", -) -JSON_VARIANT_CAST = Text().with_variant( - postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call] - "postgresql", -) -JSONB_VARIANT_CAST = Text().with_variant( - postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call] - "postgresql", -) -DATETIME_TYPE = ( - DateTime(timezone=True) - .with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql", "mariadb") # type: ignore[no-untyped-call] - .with_variant(FAST_PYSQLITE_DATETIME(), "sqlite") # type: ignore[no-untyped-call] -) -DOUBLE_TYPE = ( - Float() - .with_variant(mysql.DOUBLE(asdecimal=False), "mysql", "mariadb") # type: ignore[no-untyped-call] - .with_variant(oracle.DOUBLE_PRECISION(), "oracle") - .with_variant(postgresql.DOUBLE_PRECISION(), "postgresql") -) -UNUSED_LEGACY_COLUMN = Unused(0) -UNUSED_LEGACY_DATETIME_COLUMN = UnusedDateTime(timezone=True) -UNUSED_LEGACY_INTEGER_COLUMN = SmallInteger() -DOUBLE_PRECISION_TYPE_SQL = "DOUBLE PRECISION" -CONTEXT_BINARY_TYPE = LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH).with_variant( - NativeLargeBinary(CONTEXT_ID_BIN_MAX_LENGTH), "mysql", "mariadb", "sqlite" -) - -TIMESTAMP_TYPE = DOUBLE_TYPE - - -class JSONLiteral(JSON): - """Teach SA how to literalize json.""" - - def literal_processor(self, dialect: Dialect) -> Callable[[Any], str]: - """Processor to convert a value to JSON.""" - - def process(value: Any) -> str: - """Dump json.""" - return JSON_DUMP(value) - - return process - - -EVENT_ORIGIN_ORDER = [EventOrigin.local, EventOrigin.remote] - - -class Events(Base): - """Event history data.""" - - __table_args__ = ( - # Used for fetching events at a specific time - # see logbook - Index( - "ix_events_event_type_id_time_fired_ts", "event_type_id", "time_fired_ts" - ), - Index( - EVENTS_CONTEXT_ID_BIN_INDEX, - "context_id_bin", - mysql_length=CONTEXT_ID_BIN_MAX_LENGTH, - mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH, - ), - _DEFAULT_TABLE_ARGS, - ) - __tablename__ = TABLE_EVENTS - event_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - event_type: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - event_data: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - origin: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - origin_idx: Mapped[int | None] = mapped_column(SmallInteger) - time_fired: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) - time_fired_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) - context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - data_id: Mapped[int | None] = mapped_column( - Integer, ForeignKey("event_data.data_id"), index=True - ) - context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) - context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) - context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) - event_type_id: Mapped[int | None] = mapped_column( - Integer, ForeignKey("event_types.event_type_id") - ) - event_data_rel: Mapped[EventData | None] = relationship("EventData") - event_type_rel: Mapped[EventTypes | None] = relationship("EventTypes") - - def __repr__(self) -> str: - """Return string representation of instance for debugging.""" - return ( - "" - ) - - @property - def _time_fired_isotime(self) -> str | None: - """Return time_fired as an isotime string.""" - date_time: datetime | None - if self.time_fired_ts is not None: - date_time = dt_util.utc_from_timestamp(self.time_fired_ts) - else: - date_time = process_timestamp(self.time_fired) - if date_time is None: - return None - return date_time.isoformat(sep=" ", timespec="seconds") - - @staticmethod - def from_event(event: Event) -> Events: - """Create an event database object from a native event.""" - context = event.context - return Events( - event_type=None, - event_data=None, - origin_idx=event.origin.idx, - time_fired=None, - time_fired_ts=event.time_fired_timestamp, - context_id=None, - context_id_bin=ulid_to_bytes_or_none(context.id), - context_user_id=None, - context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), - context_parent_id=None, - context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), - ) - - def to_native(self, validate_entity_id: bool = True) -> Event | None: - """Convert to a native HA Event.""" - context = Context( - id=bytes_to_ulid_or_none(self.context_id_bin), - user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin), - parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin), - ) - try: - return Event( - self.event_type or "", - json_loads_object(self.event_data) if self.event_data else {}, - EventOrigin(self.origin) - if self.origin - else EVENT_ORIGIN_ORDER[self.origin_idx or 0], - self.time_fired_ts or 0, - context=context, - ) - except JSON_DECODE_EXCEPTIONS: - # When json_loads fails - _LOGGER.exception("Error converting to event: %s", self) - return None - - -class EventData(Base): - """Event data history.""" - - __table_args__ = (_DEFAULT_TABLE_ARGS,) - __tablename__ = TABLE_EVENT_DATA - data_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True) - # Note that this is not named attributes to avoid confusion with the states table - shared_data: Mapped[str | None] = mapped_column( - Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb") - ) - - def __repr__(self) -> str: - """Return string representation of instance for debugging.""" - return ( - "" - ) - - @staticmethod - def shared_data_bytes_from_event( - event: Event, dialect: SupportedDialect | None - ) -> bytes: - """Create shared_data from an event.""" - if dialect == SupportedDialect.POSTGRESQL: - bytes_result = json_bytes_strip_null(event.data) - bytes_result = json_bytes(event.data) - if len(bytes_result) > MAX_EVENT_DATA_BYTES: - _LOGGER.warning( - "Event data for %s exceed maximum size of %s bytes. " - "This can cause database performance issues; Event data " - "will not be stored", - event.event_type, - MAX_EVENT_DATA_BYTES, - ) - return b"{}" - return bytes_result - - @staticmethod - def hash_shared_data_bytes(shared_data_bytes: bytes) -> int: - """Return the hash of json encoded shared data.""" - return fnv1a_32(shared_data_bytes) - - def to_native(self) -> dict[str, Any]: - """Convert to an event data dictionary.""" - shared_data = self.shared_data - if shared_data is None: - return {} - try: - return cast(dict[str, Any], json_loads(shared_data)) - except JSON_DECODE_EXCEPTIONS: - _LOGGER.exception("Error converting row to event data: %s", self) - return {} - - -class EventTypes(Base): - """Event type history.""" - - __table_args__ = (_DEFAULT_TABLE_ARGS,) - __tablename__ = TABLE_EVENT_TYPES - event_type_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - event_type: Mapped[str | None] = mapped_column( - String(MAX_LENGTH_EVENT_EVENT_TYPE), index=True, unique=True - ) - - def __repr__(self) -> str: - """Return string representation of instance for debugging.""" - return ( - "" - ) - - -class States(Base): - """State change history.""" - - __table_args__ = ( - # Used for fetching the state of entities at a specific time - # (get_states in history.py) - Index(METADATA_ID_LAST_UPDATED_INDEX_TS, "metadata_id", "last_updated_ts"), - Index( - STATES_CONTEXT_ID_BIN_INDEX, - "context_id_bin", - mysql_length=CONTEXT_ID_BIN_MAX_LENGTH, - mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH, - ), - _DEFAULT_TABLE_ARGS, - ) - __tablename__ = TABLE_STATES - state_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - state: Mapped[str | None] = mapped_column(String(MAX_LENGTH_STATE_STATE)) - attributes: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - event_id: Mapped[int | None] = mapped_column(UNUSED_LEGACY_INTEGER_COLUMN) - last_changed: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) - last_changed_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) - last_reported_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) - last_updated: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) - last_updated_ts: Mapped[float | None] = mapped_column( - TIMESTAMP_TYPE, default=time.time, index=True - ) - old_state_id: Mapped[int | None] = mapped_column( - Integer, ForeignKey("states.state_id"), index=True - ) - attributes_id: Mapped[int | None] = mapped_column( - Integer, ForeignKey("state_attributes.attributes_id"), index=True - ) - context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) - origin_idx: Mapped[int | None] = mapped_column( - SmallInteger - ) # 0 is local, 1 is remote - old_state: Mapped[States | None] = relationship("States", remote_side=[state_id]) - state_attributes: Mapped[StateAttributes | None] = relationship("StateAttributes") - context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) - context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) - context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) - metadata_id: Mapped[int | None] = mapped_column( - Integer, ForeignKey("states_meta.metadata_id") - ) - states_meta_rel: Mapped[StatesMeta | None] = relationship("StatesMeta") - - def __repr__(self) -> str: - """Return string representation of instance for debugging.""" - return ( - f"" - ) - - @property - def _last_updated_isotime(self) -> str | None: - """Return last_updated as an isotime string.""" - date_time: datetime | None - if self.last_updated_ts is not None: - date_time = dt_util.utc_from_timestamp(self.last_updated_ts) - else: - date_time = process_timestamp(self.last_updated) - if date_time is None: - return None - return date_time.isoformat(sep=" ", timespec="seconds") - - @staticmethod - def from_event(event: Event[EventStateChangedData]) -> States: - """Create object from a state_changed event.""" - state = event.data["new_state"] - # None state means the state was removed from the state machine - if state is None: - state_value = "" - last_updated_ts = event.time_fired_timestamp - last_changed_ts = None - last_reported_ts = None - else: - state_value = state.state - last_updated_ts = state.last_updated_timestamp - if state.last_updated == state.last_changed: - last_changed_ts = None - else: - last_changed_ts = state.last_changed_timestamp - if state.last_updated == state.last_reported: - last_reported_ts = None - else: - last_reported_ts = state.last_reported_timestamp - context = event.context - return States( - state=state_value, - entity_id=event.data["entity_id"], - attributes=None, - context_id=None, - context_id_bin=ulid_to_bytes_or_none(context.id), - context_user_id=None, - context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), - context_parent_id=None, - context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), - origin_idx=event.origin.idx, - last_updated=None, - last_changed=None, - last_updated_ts=last_updated_ts, - last_changed_ts=last_changed_ts, - last_reported_ts=last_reported_ts, - ) - - def to_native(self, validate_entity_id: bool = True) -> State | None: - """Convert to an HA state object.""" - context = Context( - id=bytes_to_ulid_or_none(self.context_id_bin), - user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin), - parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin), - ) - try: - attrs = json_loads_object(self.attributes) if self.attributes else {} - except JSON_DECODE_EXCEPTIONS: - # When json_loads fails - _LOGGER.exception("Error converting row to state: %s", self) - return None - last_updated = dt_util.utc_from_timestamp(self.last_updated_ts or 0) - if self.last_changed_ts is None or self.last_changed_ts == self.last_updated_ts: - last_changed = dt_util.utc_from_timestamp(self.last_updated_ts or 0) - else: - last_changed = dt_util.utc_from_timestamp(self.last_changed_ts or 0) - if ( - self.last_reported_ts is None - or self.last_reported_ts == self.last_updated_ts - ): - last_reported = dt_util.utc_from_timestamp(self.last_updated_ts or 0) - else: - last_reported = dt_util.utc_from_timestamp(self.last_reported_ts or 0) - return State( - self.entity_id or "", - self.state, # type: ignore[arg-type] - # Join the state_attributes table on attributes_id to get the attributes - # for newer states - attrs, - last_changed=last_changed, - last_reported=last_reported, - last_updated=last_updated, - context=context, - validate_entity_id=validate_entity_id, - ) - - -class StateAttributes(Base): - """State attribute change history.""" - - __table_args__ = (_DEFAULT_TABLE_ARGS,) - __tablename__ = TABLE_STATE_ATTRIBUTES - attributes_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True) - # Note that this is not named attributes to avoid confusion with the states table - shared_attrs: Mapped[str | None] = mapped_column( - Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb") - ) - - def __repr__(self) -> str: - """Return string representation of instance for debugging.""" - return ( - f"" - ) - - @staticmethod - def shared_attrs_bytes_from_event( - event: Event[EventStateChangedData], - dialect: SupportedDialect | None, - ) -> bytes: - """Create shared_attrs from a state_changed event.""" - # None state means the state was removed from the state machine - if (state := event.data["new_state"]) is None: - return b"{}" - if state_info := state.state_info: - unrecorded_attributes = state_info["unrecorded_attributes"] - exclude_attrs = { - *ALL_DOMAIN_EXCLUDE_ATTRS, - *unrecorded_attributes, - } - if MATCH_ALL in unrecorded_attributes: - # Don't exclude device class, state class, unit of measurement - # or friendly name when using the MATCH_ALL exclude constant - exclude_attrs.update(state.attributes) - exclude_attrs -= _MATCH_ALL_KEEP - else: - exclude_attrs = ALL_DOMAIN_EXCLUDE_ATTRS - encoder = json_bytes_strip_null if dialect == PSQL_DIALECT else json_bytes - bytes_result = encoder( - {k: v for k, v in state.attributes.items() if k not in exclude_attrs} - ) - if len(bytes_result) > MAX_STATE_ATTRS_BYTES: - _LOGGER.warning( - "State attributes for %s exceed maximum size of %s bytes. " - "This can cause database performance issues; Attributes " - "will not be stored", - state.entity_id, - MAX_STATE_ATTRS_BYTES, - ) - return b"{}" - return bytes_result - - @staticmethod - def hash_shared_attrs_bytes(shared_attrs_bytes: bytes) -> int: - """Return the hash of json encoded shared attributes.""" - return fnv1a_32(shared_attrs_bytes) - - def to_native(self) -> dict[str, Any]: - """Convert to a state attributes dictionary.""" - shared_attrs = self.shared_attrs - if shared_attrs is None: - return {} - try: - return cast(dict[str, Any], json_loads(shared_attrs)) - except JSON_DECODE_EXCEPTIONS: - # When json_loads fails - _LOGGER.exception("Error converting row to state attributes: %s", self) - return {} - - -class StatesMeta(Base): - """Metadata for states.""" - - __table_args__ = (_DEFAULT_TABLE_ARGS,) - __tablename__ = TABLE_STATES_META - metadata_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - entity_id: Mapped[str | None] = mapped_column( - String(MAX_LENGTH_STATE_ENTITY_ID), index=True, unique=True - ) - - def __repr__(self) -> str: - """Return string representation of instance for debugging.""" - return ( - "" - ) - - -class StatisticsBase: - """Statistics base class.""" - - id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - created: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) - created_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, default=time.time) - metadata_id: Mapped[int | None] = mapped_column( - Integer, - ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"), - ) - start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) - start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) - mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE) - min: Mapped[float | None] = mapped_column(DOUBLE_TYPE) - max: Mapped[float | None] = mapped_column(DOUBLE_TYPE) - last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) - last_reset_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) - state: Mapped[float | None] = mapped_column(DOUBLE_TYPE) - sum: Mapped[float | None] = mapped_column(DOUBLE_TYPE) - - duration: timedelta - - @classmethod - def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: - """Create object from a statistics with datatime objects.""" - return cls( # type: ignore[call-arg] - metadata_id=metadata_id, - created=None, - created_ts=time.time(), - start=None, - start_ts=dt_util.utc_to_timestamp(stats["start"]), - mean=stats.get("mean"), - min=stats.get("min"), - max=stats.get("max"), - last_reset=None, - last_reset_ts=datetime_to_timestamp_or_none(stats.get("last_reset")), - state=stats.get("state"), - sum=stats.get("sum"), - ) - - @classmethod - def from_stats_ts(cls, metadata_id: int, stats: StatisticDataTimestamp) -> Self: - """Create object from a statistics with timestamps.""" - return cls( # type: ignore[call-arg] - metadata_id=metadata_id, - created=None, - created_ts=time.time(), - start=None, - start_ts=stats["start_ts"], - mean=stats.get("mean"), - min=stats.get("min"), - max=stats.get("max"), - last_reset=None, - last_reset_ts=stats.get("last_reset_ts"), - state=stats.get("state"), - sum=stats.get("sum"), - ) - - -class Statistics(Base, StatisticsBase): - """Long term statistics.""" - - duration = timedelta(hours=1) - - __table_args__ = ( - # Used for fetching statistics for a certain entity at a specific time - Index( - "ix_statistics_statistic_id_start_ts", - "metadata_id", - "start_ts", - unique=True, - ), - _DEFAULT_TABLE_ARGS, - ) - __tablename__ = TABLE_STATISTICS - - -class StatisticsShortTerm(Base, StatisticsBase): - """Short term statistics.""" - - duration = timedelta(minutes=5) - - __table_args__ = ( - # Used for fetching statistics for a certain entity at a specific time - Index( - "ix_statistics_short_term_statistic_id_start_ts", - "metadata_id", - "start_ts", - unique=True, - ), - _DEFAULT_TABLE_ARGS, - ) - __tablename__ = TABLE_STATISTICS_SHORT_TERM - - -class StatisticsMeta(Base): - """Statistics meta data.""" - - __table_args__ = (_DEFAULT_TABLE_ARGS,) - __tablename__ = TABLE_STATISTICS_META - id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - statistic_id: Mapped[str | None] = mapped_column( - String(255), index=True, unique=True - ) - source: Mapped[str | None] = mapped_column(String(32)) - unit_of_measurement: Mapped[str | None] = mapped_column(String(255)) - has_mean: Mapped[bool | None] = mapped_column(Boolean) - has_sum: Mapped[bool | None] = mapped_column(Boolean) - name: Mapped[str | None] = mapped_column(String(255)) - - @staticmethod - def from_meta(meta: StatisticMetaData) -> StatisticsMeta: - """Create object from meta data.""" - return StatisticsMeta(**meta) - - -class RecorderRuns(Base): - """Representation of recorder run.""" - - __table_args__ = ( - Index("ix_recorder_runs_start_end", "start", "end"), - _DEFAULT_TABLE_ARGS, - ) - __tablename__ = TABLE_RECORDER_RUNS - run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) - end: Mapped[datetime | None] = mapped_column(DATETIME_TYPE) - closed_incorrect: Mapped[bool] = mapped_column(Boolean, default=False) - created: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) - - def __repr__(self) -> str: - """Return string representation of instance for debugging.""" - end = ( - f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None - ) - return ( - f"" - ) - - def to_native(self, validate_entity_id: bool = True) -> Self: - """Return self, native format is this model.""" - return self - - -class MigrationChanges(Base): - """Representation of migration changes.""" - - __tablename__ = TABLE_MIGRATION_CHANGES - __table_args__ = (_DEFAULT_TABLE_ARGS,) - - migration_id: Mapped[str] = mapped_column(String(255), primary_key=True) - version: Mapped[int] = mapped_column(SmallInteger) - - -class SchemaChanges(Base): - """Representation of schema version changes.""" - - __tablename__ = TABLE_SCHEMA_CHANGES - __table_args__ = (_DEFAULT_TABLE_ARGS,) - - change_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - schema_version: Mapped[int | None] = mapped_column(Integer) - changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) - - def __repr__(self) -> str: - """Return string representation of instance for debugging.""" - return ( - "" - ) - - -class StatisticsRuns(Base): - """Representation of statistics run.""" - - __tablename__ = TABLE_STATISTICS_RUNS - __table_args__ = (_DEFAULT_TABLE_ARGS,) - - run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) - start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True) - - def __repr__(self) -> str: - """Return string representation of instance for debugging.""" - return ( - f"" - ) - - -EVENT_DATA_JSON = type_coerce( - EventData.shared_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True) -) -OLD_FORMAT_EVENT_DATA_JSON = type_coerce( - Events.event_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True) -) - -SHARED_ATTRS_JSON = type_coerce( - StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=True) -) -OLD_FORMAT_ATTRS_JSON = type_coerce( - States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=True) -) - -ENTITY_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["entity_id"] -OLD_ENTITY_ID_IN_EVENT: ColumnElement = OLD_FORMAT_EVENT_DATA_JSON["entity_id"] -DEVICE_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["device_id"] -OLD_STATE = aliased(States, name="old_state") - -SHARED_ATTR_OR_LEGACY_ATTRIBUTES = case( - (StateAttributes.shared_attrs.is_(None), States.attributes), - else_=StateAttributes.shared_attrs, -).label("attributes") -SHARED_DATA_OR_LEGACY_EVENT_DATA = case( - (EventData.shared_data.is_(None), Events.event_data), else_=EventData.shared_data -).label("event_data") diff --git a/tests/components/recorder/db_schema_9.py b/tests/components/recorder/db_schema_9.py deleted file mode 100644 index f9a8c2d2cad..00000000000 --- a/tests/components/recorder/db_schema_9.py +++ /dev/null @@ -1,233 +0,0 @@ -"""Models for SQLAlchemy. - -This file contains the model definitions for schema version 9, -used by Home Assistant Core 0.119.0. -It is used to test the schema migration logic. -""" - -import json -import logging - -from sqlalchemy import ( - Boolean, - Column, - DateTime, - ForeignKey, - Index, - Integer, - String, - Text, - distinct, -) -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import relationship -from sqlalchemy.orm.session import Session - -from homeassistant.core import Context, Event, EventOrigin, State, split_entity_id -from homeassistant.helpers.json import JSONEncoder -import homeassistant.util.dt as dt_util - -# SQLAlchemy Schema -Base = declarative_base() - -SCHEMA_VERSION = 9 - -_LOGGER = logging.getLogger(__name__) - -DB_TIMEZONE = "+00:00" - -TABLE_EVENTS = "events" -TABLE_STATES = "states" -TABLE_RECORDER_RUNS = "recorder_runs" -TABLE_SCHEMA_CHANGES = "schema_changes" - -ALL_TABLES = [TABLE_EVENTS, TABLE_STATES, TABLE_RECORDER_RUNS, TABLE_SCHEMA_CHANGES] - - -class Events(Base): # type: ignore[valid-type,misc] - """Event history data.""" - - __tablename__ = TABLE_EVENTS - event_id = Column(Integer, primary_key=True) - event_type = Column(String(32)) - event_data = Column(Text) - origin = Column(String(32)) - time_fired = Column(DateTime(timezone=True), index=True) - created = Column(DateTime(timezone=True), default=dt_util.utcnow) - context_id = Column(String(36), index=True) - context_user_id = Column(String(36), index=True) - context_parent_id = Column(String(36), index=True) - - __table_args__ = ( - # Used for fetching events at a specific time - # see logbook - Index("ix_events_event_type_time_fired", "event_type", "time_fired"), - ) - - @staticmethod - def from_event(event, event_data=None): - """Create an event database object from a native event.""" - return Events( - event_type=event.event_type, - event_data=event_data or json.dumps(event.data, cls=JSONEncoder), - origin=str(event.origin.value), - time_fired=event.time_fired, - context_id=event.context.id, - context_user_id=event.context.user_id, - context_parent_id=event.context.parent_id, - ) - - def to_native(self, validate_entity_id=True): - """Convert to a natve HA Event.""" - context = Context( - id=self.context_id, - user_id=self.context_user_id, - parent_id=self.context_parent_id, - ) - try: - return Event( - self.event_type, - json.loads(self.event_data), - EventOrigin(self.origin), - process_timestamp(self.time_fired), - context=context, - ) - except ValueError: - # When json.loads fails - _LOGGER.exception("Error converting to event: %s", self) - return None - - -class States(Base): # type: ignore[valid-type,misc] - """State change history.""" - - __tablename__ = TABLE_STATES - state_id = Column(Integer, primary_key=True) - domain = Column(String(64)) - entity_id = Column(String(255)) - state = Column(String(255)) - attributes = Column(Text) - event_id = Column(Integer, ForeignKey("events.event_id"), index=True) - last_changed = Column(DateTime(timezone=True), default=dt_util.utcnow) - last_updated = Column(DateTime(timezone=True), default=dt_util.utcnow, index=True) - created = Column(DateTime(timezone=True), default=dt_util.utcnow) - old_state_id = Column(Integer, ForeignKey("states.state_id")) - event = relationship("Events", uselist=False) - old_state = relationship("States", remote_side=[state_id]) - - __table_args__ = ( - # Used for fetching the state of entities at a specific time - # (get_states in history.py) - Index("ix_states_entity_id_last_updated", "entity_id", "last_updated"), - ) - - @staticmethod - def from_event(event): - """Create object from a state_changed event.""" - entity_id = event.data["entity_id"] - state = event.data.get("new_state") - - dbstate = States(entity_id=entity_id) - - # State got deleted - if state is None: - dbstate.state = "" - dbstate.domain = split_entity_id(entity_id)[0] - dbstate.attributes = "{}" - dbstate.last_changed = event.time_fired - dbstate.last_updated = event.time_fired - else: - dbstate.domain = state.domain - dbstate.state = state.state - dbstate.attributes = json.dumps(dict(state.attributes), cls=JSONEncoder) - dbstate.last_changed = state.last_changed - dbstate.last_updated = state.last_updated - - return dbstate - - def to_native(self, validate_entity_id=True): - """Convert to an HA state object.""" - try: - return State( - self.entity_id, - self.state, - json.loads(self.attributes), - process_timestamp(self.last_changed), - process_timestamp(self.last_updated), - # Join the events table on event_id to get the context instead - # as it will always be there for state_changed events - context=Context(id=None), - validate_entity_id=validate_entity_id, - ) - except ValueError: - # When json.loads fails - _LOGGER.exception("Error converting row to state: %s", self) - return None - - -class RecorderRuns(Base): # type: ignore[valid-type,misc] - """Representation of recorder run.""" - - __tablename__ = TABLE_RECORDER_RUNS - run_id = Column(Integer, primary_key=True) - start = Column(DateTime(timezone=True), default=dt_util.utcnow) - end = Column(DateTime(timezone=True)) - closed_incorrect = Column(Boolean, default=False) - created = Column(DateTime(timezone=True), default=dt_util.utcnow) - - __table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),) - - def entity_ids(self, point_in_time=None): - """Return the entity ids that existed in this run. - - Specify point_in_time if you want to know which existed at that point - in time inside the run. - """ - session = Session.object_session(self) - - assert session is not None, "RecorderRuns need to be persisted" - - query = session.query(distinct(States.entity_id)).filter( - States.last_updated >= self.start - ) - - if point_in_time is not None: - query = query.filter(States.last_updated < point_in_time) - elif self.end is not None: - query = query.filter(States.last_updated < self.end) - - return [row[0] for row in query] - - def to_native(self, validate_entity_id=True): - """Return self, native format is this model.""" - return self - - -class SchemaChanges(Base): # type: ignore[valid-type,misc] - """Representation of schema version changes.""" - - __tablename__ = TABLE_SCHEMA_CHANGES - change_id = Column(Integer, primary_key=True) - schema_version = Column(Integer) - changed = Column(DateTime(timezone=True), default=dt_util.utcnow) - - -def process_timestamp(ts): - """Process a timestamp into datetime object.""" - if ts is None: - return None - if ts.tzinfo is None: - return ts.replace(tzinfo=dt_util.UTC) - - return dt_util.as_utc(ts) - - -def process_timestamp_to_utc_isoformat(ts): - """Process a timestamp into UTC isotime.""" - if ts is None: - return None - if ts.tzinfo == dt_util.UTC: - return ts.isoformat() - if ts.tzinfo is None: - return f"{ts.isoformat()}{DB_TIMEZONE}" - return ts.astimezone(dt_util.UTC).isoformat() diff --git a/tests/components/recorder/test_entity_registry.py b/tests/components/recorder/test_entity_registry.py index ad438dcc525..a74992525b1 100644 --- a/tests/components/recorder/test_entity_registry.py +++ b/tests/components/recorder/test_entity_registry.py @@ -40,7 +40,7 @@ def _count_entity_id_in_states_meta( @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_filters.py b/tests/components/recorder/test_filters.py index 2841cabda1b..13a2a325f1e 100644 --- a/tests/components/recorder/test_filters.py +++ b/tests/components/recorder/test_filters.py @@ -7,8 +7,13 @@ from homeassistant.components.recorder.filters import ( extract_include_exclude_filter_conf, merge_include_exclude_filters, ) -from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE -from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS +from homeassistant.helpers.entityfilter import ( + CONF_DOMAINS, + CONF_ENTITIES, + CONF_ENTITY_GLOBS, + CONF_EXCLUDE, + CONF_INCLUDE, +) EMPTY_INCLUDE_FILTER = { CONF_INCLUDE: { diff --git a/tests/components/recorder/test_filters_with_entityfilter.py b/tests/components/recorder/test_filters_with_entityfilter.py index 97839803619..1ee127a9989 100644 --- a/tests/components/recorder/test_filters_with_entityfilter.py +++ b/tests/components/recorder/test_filters_with_entityfilter.py @@ -13,17 +13,14 @@ from homeassistant.components.recorder.filters import ( sqlalchemy_filter_from_include_exclude_conf, ) from homeassistant.components.recorder.util import session_scope -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_DOMAINS, - CONF_ENTITIES, - CONF_EXCLUDE, - CONF_INCLUDE, - STATE_ON, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers.entityfilter import ( + CONF_DOMAINS, + CONF_ENTITIES, CONF_ENTITY_GLOBS, + CONF_EXCLUDE, + CONF_INCLUDE, convert_include_exclude_filter, ) diff --git a/tests/components/recorder/test_filters_with_entityfilter_schema_37.py b/tests/components/recorder/test_filters_with_entityfilter_schema_37.py index d3024df4ed6..9c66d2ee169 100644 --- a/tests/components/recorder/test_filters_with_entityfilter_schema_37.py +++ b/tests/components/recorder/test_filters_with_entityfilter_schema_37.py @@ -1,12 +1,12 @@ """The tests for the recorder filter matching the EntityFilter component.""" -from collections.abc import AsyncGenerator import json from unittest.mock import patch import pytest from sqlalchemy import select from sqlalchemy.engine.row import Row +from typing_extensions import AsyncGenerator from homeassistant.components.recorder import Recorder, get_instance from homeassistant.components.recorder.db_schema import EventData, Events, States @@ -16,17 +16,14 @@ from homeassistant.components.recorder.filters import ( sqlalchemy_filter_from_include_exclude_conf, ) from homeassistant.components.recorder.util import session_scope -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_DOMAINS, - CONF_ENTITIES, - CONF_EXCLUDE, - CONF_INCLUDE, - STATE_ON, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers.entityfilter import ( + CONF_DOMAINS, + CONF_ENTITIES, CONF_ENTITY_GLOBS, + CONF_EXCLUDE, + CONF_INCLUDE, convert_include_exclude_filter, ) diff --git a/tests/components/recorder/test_history.py b/tests/components/recorder/test_history.py index 28b8275247c..af846353467 100644 --- a/tests/components/recorder/test_history.py +++ b/tests/components/recorder/test_history.py @@ -5,21 +5,30 @@ from __future__ import annotations from copy import copy from datetime import datetime, timedelta import json -from unittest.mock import sentinel +from unittest.mock import patch, sentinel from freezegun import freeze_time import pytest +from sqlalchemy import text from homeassistant.components import recorder -from homeassistant.components.recorder import Recorder, history +from homeassistant.components.recorder import Recorder, get_instance, history from homeassistant.components.recorder.db_schema import ( + Events, + RecorderRuns, StateAttributes, States, StatesMeta, ) from homeassistant.components.recorder.filters import Filters +from homeassistant.components.recorder.history import legacy from homeassistant.components.recorder.models import process_timestamp +from homeassistant.components.recorder.models.legacy import ( + LegacyLazyState, + LegacyLazyStatePreSchema31, +) from homeassistant.components.recorder.util import session_scope +import homeassistant.core as ha from homeassistant.core import HomeAssistant, State from homeassistant.helpers.json import JSONEncoder import homeassistant.util.dt as dt_util @@ -38,7 +47,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -48,6 +57,77 @@ def setup_recorder(recorder_mock: Recorder) -> recorder.Recorder: """Set up recorder.""" +async def _async_get_states( + hass: HomeAssistant, + utc_point_in_time: datetime, + entity_ids: list[str] | None = None, + run: RecorderRuns | None = None, + no_attributes: bool = False, +): + """Get states from the database.""" + + def _get_states_with_session(): + with session_scope(hass=hass, read_only=True) as session: + attr_cache = {} + pre_31_schema = get_instance(hass).schema_version < 31 + return [ + LegacyLazyStatePreSchema31(row, attr_cache, None) + if pre_31_schema + else LegacyLazyState( + row, + attr_cache, + None, + row.entity_id, + ) + for row in legacy._get_rows_with_session( + hass, + session, + utc_point_in_time, + entity_ids, + run, + no_attributes, + ) + ] + + return await recorder.get_instance(hass).async_add_executor_job( + _get_states_with_session + ) + + +def _add_db_entries( + hass: ha.HomeAssistant, point: datetime, entity_ids: list[str] +) -> None: + with session_scope(hass=hass) as session: + for idx, entity_id in enumerate(entity_ids): + session.add( + Events( + event_id=1001 + idx, + event_type="state_changed", + event_data="{}", + origin="LOCAL", + time_fired=point, + ) + ) + session.add( + States( + entity_id=entity_id, + state="on", + attributes='{"name":"the light"}', + last_changed=None, + last_updated=point, + event_id=1001 + idx, + attributes_id=1002 + idx, + ) + ) + session.add( + StateAttributes( + shared_attrs='{"name":"the shared light"}', + hash=1234 + idx, + attributes_id=1002 + idx, + ) + ) + + async def test_get_full_significant_states_with_session_entity_no_matches( hass: HomeAssistant, ) -> None: @@ -811,6 +891,175 @@ def record_states( return zero, four, states +async def test_state_changes_during_period_query_during_migration_to_schema_25( + hass: HomeAssistant, + recorder_db_url: str, +) -> None: + """Test we can query data prior to schema 25 and during migration to schema 25.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes + return + + instance = recorder.get_instance(hass) + + with patch.object(instance.states_meta_manager, "active", False): + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + entity_id = "light.test" + await recorder.get_instance(hass).async_add_executor_job( + _add_db_entries, hass, point, [entity_id] + ) + + no_attributes = True + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes, include_start_time_state=False + ) + state = hist[entity_id][0] + assert state.attributes == {} + + no_attributes = False + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes, include_start_time_state=False + ) + state = hist[entity_id][0] + assert state.attributes == {"name": "the shared light"} + + with instance.engine.connect() as conn: + conn.execute(text("update states set attributes_id=NULL;")) + conn.execute(text("drop table state_attributes;")) + conn.commit() + + with patch.object(instance, "schema_version", 24): + instance.states_meta_manager.active = False + no_attributes = True + hist = history.state_changes_during_period( + hass, + start, + end, + entity_id, + no_attributes, + include_start_time_state=False, + ) + state = hist[entity_id][0] + assert state.attributes == {} + + no_attributes = False + hist = history.state_changes_during_period( + hass, + start, + end, + entity_id, + no_attributes, + include_start_time_state=False, + ) + state = hist[entity_id][0] + assert state.attributes == {"name": "the light"} + + +async def test_get_states_query_during_migration_to_schema_25( + hass: HomeAssistant, + recorder_db_url: str, +) -> None: + """Test we can query data prior to schema 25 and during migration to schema 25.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes + return + + instance = recorder.get_instance(hass) + + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + entity_id = "light.test" + await instance.async_add_executor_job(_add_db_entries, hass, point, [entity_id]) + assert instance.states_meta_manager.active + + no_attributes = True + hist = await _async_get_states(hass, end, [entity_id], no_attributes=no_attributes) + state = hist[0] + assert state.attributes == {} + + no_attributes = False + hist = await _async_get_states(hass, end, [entity_id], no_attributes=no_attributes) + state = hist[0] + assert state.attributes == {"name": "the shared light"} + + with instance.engine.connect() as conn: + conn.execute(text("update states set attributes_id=NULL;")) + conn.execute(text("drop table state_attributes;")) + conn.commit() + + with patch.object(instance, "schema_version", 24): + instance.states_meta_manager.active = False + no_attributes = True + hist = await _async_get_states( + hass, end, [entity_id], no_attributes=no_attributes + ) + state = hist[0] + assert state.attributes == {} + + no_attributes = False + hist = await _async_get_states( + hass, end, [entity_id], no_attributes=no_attributes + ) + state = hist[0] + assert state.attributes == {"name": "the light"} + + +async def test_get_states_query_during_migration_to_schema_25_multiple_entities( + hass: HomeAssistant, + recorder_db_url: str, +) -> None: + """Test we can query data prior to schema 25 and during migration to schema 25.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes + return + + instance = recorder.get_instance(hass) + + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + entity_id_1 = "light.test" + entity_id_2 = "switch.test" + entity_ids = [entity_id_1, entity_id_2] + + await instance.async_add_executor_job(_add_db_entries, hass, point, entity_ids) + assert instance.states_meta_manager.active + + no_attributes = True + hist = await _async_get_states(hass, end, entity_ids, no_attributes=no_attributes) + assert hist[0].attributes == {} + assert hist[1].attributes == {} + + no_attributes = False + hist = await _async_get_states(hass, end, entity_ids, no_attributes=no_attributes) + assert hist[0].attributes == {"name": "the shared light"} + assert hist[1].attributes == {"name": "the shared light"} + + with instance.engine.connect() as conn: + conn.execute(text("update states set attributes_id=NULL;")) + conn.execute(text("drop table state_attributes;")) + conn.commit() + + with patch.object(instance, "schema_version", 24): + instance.states_meta_manager.active = False + no_attributes = True + hist = await _async_get_states( + hass, end, entity_ids, no_attributes=no_attributes + ) + assert hist[0].attributes == {} + assert hist[1].attributes == {} + + no_attributes = False + hist = await _async_get_states( + hass, end, entity_ids, no_attributes=no_attributes + ) + assert hist[0].attributes == {"name": "the light"} + assert hist[1].attributes == {"name": "the light"} + + async def test_get_full_significant_states_handles_empty_last_changed( hass: HomeAssistant, ) -> None: diff --git a/tests/components/recorder/test_history_db_schema_30.py b/tests/components/recorder/test_history_db_schema_30.py new file mode 100644 index 00000000000..e5e80b0cdb9 --- /dev/null +++ b/tests/components/recorder/test_history_db_schema_30.py @@ -0,0 +1,713 @@ +"""The tests the History component.""" + +from __future__ import annotations + +from copy import copy +from datetime import datetime, timedelta +import json +from unittest.mock import patch, sentinel + +from freezegun import freeze_time +import pytest + +from homeassistant.components import recorder +from homeassistant.components.recorder import Recorder, history +from homeassistant.components.recorder.filters import Filters +from homeassistant.components.recorder.models import process_timestamp +from homeassistant.components.recorder.util import session_scope +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers.json import JSONEncoder +import homeassistant.util.dt as dt_util + +from .common import ( + assert_dict_of_states_equal_without_context_and_last_changed, + assert_multiple_states_equal_without_context, + assert_multiple_states_equal_without_context_and_last_changed, + assert_states_equal_without_context, + async_wait_recording_done, + old_db_schema, +) + +from tests.typing import RecorderInstanceGenerator + + +@pytest.fixture +async def mock_recorder_before_hass( + async_setup_recorder_instance: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + +@pytest.fixture(autouse=True) +def db_schema_30(): + """Fixture to initialize the db with the old schema 30.""" + with old_db_schema("30"): + yield + + +@pytest.fixture(autouse=True) +def setup_recorder(db_schema_30, recorder_mock: Recorder) -> recorder.Recorder: + """Set up recorder.""" + + +async def test_get_full_significant_states_with_session_entity_no_matches( + hass: HomeAssistant, +) -> None: + """Test getting states at a specific point in time for entities that never have been recorded.""" + now = dt_util.utcnow() + time_before_recorder_ran = now - timedelta(days=1000) + instance = recorder.get_instance(hass) + with ( + session_scope(hass=hass) as session, + patch.object(instance.states_meta_manager, "active", False), + ): + assert ( + history.get_full_significant_states_with_session( + hass, session, time_before_recorder_ran, now, entity_ids=["demo.id"] + ) + == {} + ) + assert ( + history.get_full_significant_states_with_session( + hass, + session, + time_before_recorder_ran, + now, + entity_ids=["demo.id", "demo.id2"], + ) + == {} + ) + + +async def test_significant_states_with_session_entity_minimal_response_no_matches( + hass: HomeAssistant, +) -> None: + """Test getting states at a specific point in time for entities that never have been recorded.""" + now = dt_util.utcnow() + time_before_recorder_ran = now - timedelta(days=1000) + instance = recorder.get_instance(hass) + with ( + session_scope(hass=hass) as session, + patch.object(instance.states_meta_manager, "active", False), + ): + assert ( + history.get_significant_states_with_session( + hass, + session, + time_before_recorder_ran, + now, + entity_ids=["demo.id"], + minimal_response=True, + ) + == {} + ) + assert ( + history.get_significant_states_with_session( + hass, + session, + time_before_recorder_ran, + now, + entity_ids=["demo.id", "demo.id2"], + minimal_response=True, + ) + == {} + ) + + +@pytest.mark.parametrize( + ("attributes", "no_attributes", "limit"), + [ + ({"attr": True}, False, 5000), + ({}, True, 5000), + ({"attr": True}, False, 3), + ({}, True, 3), + ], +) +async def test_state_changes_during_period( + hass: HomeAssistant, attributes, no_attributes, limit +) -> None: + """Test state change during period.""" + entity_id = "media_player.test" + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + + def set_state(state): + """Set the state.""" + hass.states.async_set(entity_id, state, attributes) + return hass.states.get(entity_id) + + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + + with freeze_time(start) as freezer: + set_state("idle") + set_state("YouTube") + + freezer.move_to(point) + states = [ + set_state("idle"), + set_state("Netflix"), + set_state("Plex"), + set_state("YouTube"), + ] + + freezer.move_to(end) + set_state("Netflix") + set_state("Plex") + await async_wait_recording_done(hass) + + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes, limit=limit + ) + + assert_multiple_states_equal_without_context(states[:limit], hist[entity_id]) + + +async def test_state_changes_during_period_descending( + hass: HomeAssistant, +) -> None: + """Test state change during period descending.""" + entity_id = "media_player.test" + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + + def set_state(state): + """Set the state.""" + hass.states.async_set(entity_id, state, {"any": 1}) + return hass.states.get(entity_id) + + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + point2 = start + timedelta(seconds=1, microseconds=2) + point3 = start + timedelta(seconds=1, microseconds=3) + point4 = start + timedelta(seconds=1, microseconds=4) + end = point + timedelta(seconds=1) + + with freeze_time(start) as freezer: + set_state("idle") + set_state("YouTube") + + freezer.move_to(point) + + states = [set_state("idle")] + freezer.move_to(point2) + + states.append(set_state("Netflix")) + + freezer.move_to(point3) + states.append(set_state("Plex")) + + freezer.move_to(point4) + states.append(set_state("YouTube")) + + freezer.move_to(end) + set_state("Netflix") + set_state("Plex") + await async_wait_recording_done(hass) + + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes=False, descending=False + ) + assert_multiple_states_equal_without_context(states, hist[entity_id]) + + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes=False, descending=True + ) + assert_multiple_states_equal_without_context( + states, list(reversed(list(hist[entity_id]))) + ) + + +async def test_get_last_state_changes(hass: HomeAssistant) -> None: + """Test number of state changes.""" + entity_id = "sensor.test" + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + + def set_state(state): + """Set the state.""" + hass.states.async_set(entity_id, state) + return hass.states.get(entity_id) + + start = dt_util.utcnow() - timedelta(minutes=2) + point = start + timedelta(minutes=1) + point2 = point + timedelta(minutes=1, seconds=1) + states = [] + + with freeze_time(start) as freezer: + set_state("1") + + freezer.move_to(point) + states.append(set_state("2")) + + freezer.move_to(point2) + states.append(set_state("3")) + await async_wait_recording_done(hass) + + hist = history.get_last_state_changes(hass, 2, entity_id) + + assert_multiple_states_equal_without_context(states, hist[entity_id]) + + +async def test_ensure_state_can_be_copied( + hass: HomeAssistant, +) -> None: + """Ensure a state can pass though copy(). + + The filter integration uses copy() on states + from history. + """ + entity_id = "sensor.test" + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + + def set_state(state): + """Set the state.""" + hass.states.async_set(entity_id, state) + return hass.states.get(entity_id) + + start = dt_util.utcnow() - timedelta(minutes=2) + point = start + timedelta(minutes=1) + + with freeze_time(start) as freezer: + set_state("1") + + freezer.move_to(point) + set_state("2") + await async_wait_recording_done(hass) + + hist = history.get_last_state_changes(hass, 2, entity_id) + + assert_states_equal_without_context( + copy(hist[entity_id][0]), hist[entity_id][0] + ) + assert_states_equal_without_context( + copy(hist[entity_id][1]), hist[entity_id][1] + ) + + +async def test_get_significant_states(hass: HomeAssistant) -> None: + """Test that only significant states are returned. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + zero, four, states = record_states(hass) + await async_wait_recording_done(hass) + + hist = history.get_significant_states(hass, zero, four, entity_ids=list(states)) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def test_get_significant_states_minimal_response(hass: HomeAssistant) -> None: + """Test that only significant states are returned. + + When minimal responses is set only the first and + last states return a complete state. + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + zero, four, states = record_states(hass) + await async_wait_recording_done(hass) + + hist = history.get_significant_states( + hass, zero, four, minimal_response=True, entity_ids=list(states) + ) + entites_with_reducable_states = [ + "media_player.test", + "media_player.test3", + ] + + # All states for media_player.test state are reduced + # down to last_changed and state when minimal_response + # is set except for the first state. + # is set. We use JSONEncoder to make sure that are + # pre-encoded last_changed is always the same as what + # will happen with encoding a native state + for entity_id in entites_with_reducable_states: + entity_states = states[entity_id] + for state_idx in range(1, len(entity_states)): + input_state = entity_states[state_idx] + orig_last_changed = json.dumps( + process_timestamp(input_state.last_changed), + cls=JSONEncoder, + ).replace('"', "") + orig_state = input_state.state + entity_states[state_idx] = { + "last_changed": orig_last_changed, + "state": orig_state, + } + + assert len(hist) == len(states) + assert_states_equal_without_context( + states["media_player.test"][0], hist["media_player.test"][0] + ) + assert states["media_player.test"][1] == hist["media_player.test"][1] + assert states["media_player.test"][2] == hist["media_player.test"][2] + + assert_multiple_states_equal_without_context( + states["media_player.test2"], hist["media_player.test2"] + ) + assert_states_equal_without_context( + states["media_player.test3"][0], hist["media_player.test3"][0] + ) + assert states["media_player.test3"][1] == hist["media_player.test3"][1] + + assert_multiple_states_equal_without_context( + states["script.can_cancel_this_one"], hist["script.can_cancel_this_one"] + ) + assert_multiple_states_equal_without_context_and_last_changed( + states["thermostat.test"], hist["thermostat.test"] + ) + assert_multiple_states_equal_without_context_and_last_changed( + states["thermostat.test2"], hist["thermostat.test2"] + ) + + +async def test_get_significant_states_with_initial(hass: HomeAssistant) -> None: + """Test that only significant states are returned. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + zero, four, states = record_states(hass) + await async_wait_recording_done(hass) + + one = zero + timedelta(seconds=1) + one_with_microsecond = zero + timedelta(seconds=1, microseconds=1) + one_and_half = zero + timedelta(seconds=1.5) + for entity_id in states: + if entity_id == "media_player.test": + states[entity_id] = states[entity_id][1:] + for state in states[entity_id]: + if state.last_changed in (one, one_with_microsecond): + state.last_changed = one_and_half + state.last_updated = one_and_half + + hist = history.get_significant_states( + hass, + one_and_half, + four, + include_start_time_state=True, + entity_ids=list(states), + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def test_get_significant_states_without_initial(hass: HomeAssistant) -> None: + """Test that only significant states are returned. + + We should get back every thermostat change that + includes an attribute change, but only the state updates for + media player (attribute changes are not significant and not returned). + """ + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + zero, four, states = record_states(hass) + await async_wait_recording_done(hass) + + one = zero + timedelta(seconds=1) + one_with_microsecond = zero + timedelta(seconds=1, microseconds=1) + one_and_half = zero + timedelta(seconds=1.5) + for entity_id in states: + states[entity_id] = [ + s + for s in states[entity_id] + if s.last_changed not in (one, one_with_microsecond) + ] + del states["media_player.test2"] + + hist = history.get_significant_states( + hass, + one_and_half, + four, + include_start_time_state=False, + entity_ids=list(states), + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def test_get_significant_states_entity_id(hass: HomeAssistant) -> None: + """Test that only significant states are returned for one entity.""" + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + zero, four, states = record_states(hass) + await async_wait_recording_done(hass) + + del states["media_player.test2"] + del states["media_player.test3"] + del states["thermostat.test"] + del states["thermostat.test2"] + del states["script.can_cancel_this_one"] + + hist = history.get_significant_states(hass, zero, four, ["media_player.test"]) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + +async def test_get_significant_states_multiple_entity_ids(hass: HomeAssistant) -> None: + """Test that only significant states are returned for one entity.""" + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + zero, four, states = record_states(hass) + await async_wait_recording_done(hass) + + del states["media_player.test2"] + del states["media_player.test3"] + del states["thermostat.test2"] + del states["script.can_cancel_this_one"] + + hist = history.get_significant_states( + hass, + zero, + four, + ["media_player.test", "thermostat.test"], + ) + assert_multiple_states_equal_without_context_and_last_changed( + states["media_player.test"], hist["media_player.test"] + ) + assert_multiple_states_equal_without_context_and_last_changed( + states["thermostat.test"], hist["thermostat.test"] + ) + + +async def test_get_significant_states_are_ordered(hass: HomeAssistant) -> None: + """Test order of results from get_significant_states. + + When entity ids are given, the results should be returned with the data + in the same order. + """ + + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + zero, four, _states = record_states(hass) + await async_wait_recording_done(hass) + + entity_ids = ["media_player.test", "media_player.test2"] + hist = history.get_significant_states(hass, zero, four, entity_ids) + assert list(hist.keys()) == entity_ids + entity_ids = ["media_player.test2", "media_player.test"] + hist = history.get_significant_states(hass, zero, four, entity_ids) + assert list(hist.keys()) == entity_ids + + +async def test_get_significant_states_only(hass: HomeAssistant) -> None: + """Test significant states when significant_states_only is set.""" + entity_id = "sensor.test" + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + + def set_state(state, **kwargs): + """Set the state.""" + hass.states.async_set(entity_id, state, **kwargs) + return hass.states.get(entity_id) + + start = dt_util.utcnow() - timedelta(minutes=4) + points = [start + timedelta(minutes=i) for i in range(1, 4)] + + states = [] + with freeze_time(start) as freezer: + set_state("123", attributes={"attribute": 10.64}) + + freezer.move_to(points[0]) + # Attributes are different, state not + states.append(set_state("123", attributes={"attribute": 21.42})) + + freezer.move_to(points[1]) + # state is different, attributes not + states.append(set_state("32", attributes={"attribute": 21.42})) + + freezer.move_to(points[2]) + # everything is different + states.append(set_state("412", attributes={"attribute": 54.23})) + await async_wait_recording_done(hass) + + hist = history.get_significant_states( + hass, + start, + significant_changes_only=True, + entity_ids=list({state.entity_id for state in states}), + ) + + assert len(hist[entity_id]) == 2 + assert not any( + state.last_updated == states[0].last_updated for state in hist[entity_id] + ) + assert any( + state.last_updated == states[1].last_updated for state in hist[entity_id] + ) + assert any( + state.last_updated == states[2].last_updated for state in hist[entity_id] + ) + + hist = history.get_significant_states( + hass, + start, + significant_changes_only=False, + entity_ids=list({state.entity_id for state in states}), + ) + + assert len(hist[entity_id]) == 3 + assert_multiple_states_equal_without_context_and_last_changed( + states, hist[entity_id] + ) + + +def record_states( + hass: HomeAssistant, +) -> tuple[datetime, datetime, dict[str, list[State]]]: + """Record some test states. + + We inject a bunch of state updates from media player, zone and + thermostat. + """ + mp = "media_player.test" + mp2 = "media_player.test2" + mp3 = "media_player.test3" + therm = "thermostat.test" + therm2 = "thermostat.test2" + zone = "zone.home" + script_c = "script.can_cancel_this_one" + + def set_state(entity_id, state, **kwargs): + """Set the state.""" + hass.states.async_set(entity_id, state, **kwargs) + return hass.states.get(entity_id) + + zero = dt_util.utcnow() + one = zero + timedelta(seconds=1) + two = one + timedelta(seconds=1) + three = two + timedelta(seconds=1) + four = three + timedelta(seconds=1) + + states = {therm: [], therm2: [], mp: [], mp2: [], mp3: [], script_c: []} + with freeze_time(one) as freezer: + states[mp].append( + set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)}) + ) + states[mp2].append( + set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)}) + ) + states[mp3].append( + set_state(mp3, "idle", attributes={"media_title": str(sentinel.mt1)}) + ) + states[therm].append( + set_state(therm, 20, attributes={"current_temperature": 19.5}) + ) + + freezer.move_to(one + timedelta(microseconds=1)) + states[mp].append( + set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)}) + ) + + freezer.move_to(two) + # This state will be skipped only different in time + set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)}) + # This state will be skipped because domain is excluded + set_state(zone, "zoning") + states[script_c].append( + set_state(script_c, "off", attributes={"can_cancel": True}) + ) + states[therm].append( + set_state(therm, 21, attributes={"current_temperature": 19.8}) + ) + states[therm2].append( + set_state(therm2, 20, attributes={"current_temperature": 19}) + ) + + freezer.move_to(three) + states[mp].append( + set_state(mp, "Netflix", attributes={"media_title": str(sentinel.mt4)}) + ) + states[mp3].append( + set_state(mp3, "Netflix", attributes={"media_title": str(sentinel.mt3)}) + ) + # Attributes changed even though state is the same + states[therm].append( + set_state(therm, 21, attributes={"current_temperature": 20}) + ) + + return zero, four, states + + +async def test_state_changes_during_period_multiple_entities_single_test( + hass: HomeAssistant, +) -> None: + """Test state change during period with multiple entities in the same test. + + This test ensures the sqlalchemy query cache does not + generate incorrect results. + """ + instance = recorder.get_instance(hass) + with patch.object(instance.states_meta_manager, "active", False): + start = dt_util.utcnow() + test_entites = {f"sensor.{i}": str(i) for i in range(30)} + for entity_id, value in test_entites.items(): + hass.states.async_set(entity_id, value) + await async_wait_recording_done(hass) + + end = dt_util.utcnow() + + for entity_id, value in test_entites.items(): + hist = history.state_changes_during_period(hass, start, end, entity_id) + assert len(hist) == 1 + assert hist[entity_id][0].state == value + + +def test_get_significant_states_without_entity_ids_raises(hass: HomeAssistant) -> None: + """Test at least one entity id is required for get_significant_states.""" + now = dt_util.utcnow() + with pytest.raises(ValueError, match="entity_ids must be provided"): + history.get_significant_states(hass, now, None) + + +def test_state_changes_during_period_without_entity_ids_raises( + hass: HomeAssistant, +) -> None: + """Test at least one entity id is required for state_changes_during_period.""" + now = dt_util.utcnow() + with pytest.raises(ValueError, match="entity_id must be provided"): + history.state_changes_during_period(hass, now, None) + + +def test_get_significant_states_with_filters_raises(hass: HomeAssistant) -> None: + """Test passing filters is no longer supported.""" + now = dt_util.utcnow() + with pytest.raises(NotImplementedError, match="Filters are no longer supported"): + history.get_significant_states( + hass, now, None, ["media_player.test"], Filters() + ) + + +def test_get_significant_states_with_non_existent_entity_ids_returns_empty( + hass: HomeAssistant, +) -> None: + """Test get_significant_states returns an empty dict when entities not in the db.""" + now = dt_util.utcnow() + assert history.get_significant_states(hass, now, None, ["nonexistent.entity"]) == {} + + +def test_state_changes_during_period_with_non_existent_entity_ids_returns_empty( + hass: HomeAssistant, +) -> None: + """Test state_changes_during_period returns an empty dict when entities not in the db.""" + now = dt_util.utcnow() + assert ( + history.state_changes_during_period(hass, now, None, "nonexistent.entity") == {} + ) + + +def test_get_last_state_changes_with_non_existent_entity_ids_returns_empty( + hass: HomeAssistant, +) -> None: + """Test get_last_state_changes returns an empty dict when entities not in the db.""" + assert history.get_last_state_changes(hass, 1, "nonexistent.entity") == {} diff --git a/tests/components/recorder/test_history_db_schema_32.py b/tests/components/recorder/test_history_db_schema_32.py index 3ee6edd8e1e..8a3e6a58ab3 100644 --- a/tests/components/recorder/test_history_db_schema_32.py +++ b/tests/components/recorder/test_history_db_schema_32.py @@ -33,7 +33,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_history_db_schema_42.py b/tests/components/recorder/test_history_db_schema_42.py index 85badeea281..083d4c0930e 100644 --- a/tests/components/recorder/test_history_db_schema_42.py +++ b/tests/components/recorder/test_history_db_schema_42.py @@ -5,15 +5,21 @@ from __future__ import annotations from copy import copy from datetime import datetime, timedelta import json -from unittest.mock import sentinel +from unittest.mock import patch, sentinel from freezegun import freeze_time import pytest +from sqlalchemy import text from homeassistant.components import recorder -from homeassistant.components.recorder import Recorder, history +from homeassistant.components.recorder import Recorder, get_instance, history from homeassistant.components.recorder.filters import Filters +from homeassistant.components.recorder.history import legacy from homeassistant.components.recorder.models import process_timestamp +from homeassistant.components.recorder.models.legacy import ( + LegacyLazyState, + LegacyLazyStatePreSchema31, +) from homeassistant.components.recorder.util import session_scope import homeassistant.core as ha from homeassistant.core import HomeAssistant, State @@ -29,14 +35,14 @@ from .common import ( async_wait_recording_done, old_db_schema, ) -from .db_schema_42 import StateAttributes, States, StatesMeta +from .db_schema_42 import Events, RecorderRuns, StateAttributes, States, StatesMeta from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -53,6 +59,77 @@ def setup_recorder(db_schema_42, recorder_mock: Recorder) -> recorder.Recorder: """Set up recorder.""" +async def _async_get_states( + hass: HomeAssistant, + utc_point_in_time: datetime, + entity_ids: list[str] | None = None, + run: RecorderRuns | None = None, + no_attributes: bool = False, +): + """Get states from the database.""" + + def _get_states_with_session(): + with session_scope(hass=hass, read_only=True) as session: + attr_cache = {} + pre_31_schema = get_instance(hass).schema_version < 31 + return [ + LegacyLazyStatePreSchema31(row, attr_cache, None) + if pre_31_schema + else LegacyLazyState( + row, + attr_cache, + None, + row.entity_id, + ) + for row in legacy._get_rows_with_session( + hass, + session, + utc_point_in_time, + entity_ids, + run, + no_attributes, + ) + ] + + return await recorder.get_instance(hass).async_add_executor_job( + _get_states_with_session + ) + + +def _add_db_entries( + hass: ha.HomeAssistant, point: datetime, entity_ids: list[str] +) -> None: + with session_scope(hass=hass) as session: + for idx, entity_id in enumerate(entity_ids): + session.add( + Events( + event_id=1001 + idx, + event_type="state_changed", + event_data="{}", + origin="LOCAL", + time_fired=point, + ) + ) + session.add( + States( + entity_id=entity_id, + state="on", + attributes='{"name":"the light"}', + last_changed=None, + last_updated=point, + event_id=1001 + idx, + attributes_id=1002 + idx, + ) + ) + session.add( + StateAttributes( + shared_attrs='{"name":"the shared light"}', + hash=1234 + idx, + attributes_id=1002 + idx, + ) + ) + + async def test_get_full_significant_states_with_session_entity_no_matches( hass: HomeAssistant, ) -> None: @@ -816,6 +893,175 @@ def record_states( return zero, four, states +async def test_state_changes_during_period_query_during_migration_to_schema_25( + hass: HomeAssistant, + recorder_db_url: str, +) -> None: + """Test we can query data prior to schema 25 and during migration to schema 25.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes + return + + instance = recorder.get_instance(hass) + + with patch.object(instance.states_meta_manager, "active", False): + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + entity_id = "light.test" + await recorder.get_instance(hass).async_add_executor_job( + _add_db_entries, hass, point, [entity_id] + ) + + no_attributes = True + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes, include_start_time_state=False + ) + state = hist[entity_id][0] + assert state.attributes == {} + + no_attributes = False + hist = history.state_changes_during_period( + hass, start, end, entity_id, no_attributes, include_start_time_state=False + ) + state = hist[entity_id][0] + assert state.attributes == {"name": "the shared light"} + + with instance.engine.connect() as conn: + conn.execute(text("update states set attributes_id=NULL;")) + conn.execute(text("drop table state_attributes;")) + conn.commit() + + with patch.object(instance, "schema_version", 24): + instance.states_meta_manager.active = False + no_attributes = True + hist = history.state_changes_during_period( + hass, + start, + end, + entity_id, + no_attributes, + include_start_time_state=False, + ) + state = hist[entity_id][0] + assert state.attributes == {} + + no_attributes = False + hist = history.state_changes_during_period( + hass, + start, + end, + entity_id, + no_attributes, + include_start_time_state=False, + ) + state = hist[entity_id][0] + assert state.attributes == {"name": "the light"} + + +async def test_get_states_query_during_migration_to_schema_25( + hass: HomeAssistant, + recorder_db_url: str, +) -> None: + """Test we can query data prior to schema 25 and during migration to schema 25.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes + return + + instance = recorder.get_instance(hass) + + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + entity_id = "light.test" + await instance.async_add_executor_job(_add_db_entries, hass, point, [entity_id]) + assert instance.states_meta_manager.active + + no_attributes = True + hist = await _async_get_states(hass, end, [entity_id], no_attributes=no_attributes) + state = hist[0] + assert state.attributes == {} + + no_attributes = False + hist = await _async_get_states(hass, end, [entity_id], no_attributes=no_attributes) + state = hist[0] + assert state.attributes == {"name": "the shared light"} + + with instance.engine.connect() as conn: + conn.execute(text("update states set attributes_id=NULL;")) + conn.execute(text("drop table state_attributes;")) + conn.commit() + + with patch.object(instance, "schema_version", 24): + instance.states_meta_manager.active = False + no_attributes = True + hist = await _async_get_states( + hass, end, [entity_id], no_attributes=no_attributes + ) + state = hist[0] + assert state.attributes == {} + + no_attributes = False + hist = await _async_get_states( + hass, end, [entity_id], no_attributes=no_attributes + ) + state = hist[0] + assert state.attributes == {"name": "the light"} + + +async def test_get_states_query_during_migration_to_schema_25_multiple_entities( + hass: HomeAssistant, + recorder_db_url: str, +) -> None: + """Test we can query data prior to schema 25 and during migration to schema 25.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes + return + + instance = recorder.get_instance(hass) + + start = dt_util.utcnow() + point = start + timedelta(seconds=1) + end = point + timedelta(seconds=1) + entity_id_1 = "light.test" + entity_id_2 = "switch.test" + entity_ids = [entity_id_1, entity_id_2] + + await instance.async_add_executor_job(_add_db_entries, hass, point, entity_ids) + assert instance.states_meta_manager.active + + no_attributes = True + hist = await _async_get_states(hass, end, entity_ids, no_attributes=no_attributes) + assert hist[0].attributes == {} + assert hist[1].attributes == {} + + no_attributes = False + hist = await _async_get_states(hass, end, entity_ids, no_attributes=no_attributes) + assert hist[0].attributes == {"name": "the shared light"} + assert hist[1].attributes == {"name": "the shared light"} + + with instance.engine.connect() as conn: + conn.execute(text("update states set attributes_id=NULL;")) + conn.execute(text("drop table state_attributes;")) + conn.commit() + + with patch.object(instance, "schema_version", 24): + instance.states_meta_manager.active = False + no_attributes = True + hist = await _async_get_states( + hass, end, entity_ids, no_attributes=no_attributes + ) + assert hist[0].attributes == {} + assert hist[1].attributes == {} + + no_attributes = False + hist = await _async_get_states( + hass, end, entity_ids, no_attributes=no_attributes + ) + assert hist[0].attributes == {"name": "the light"} + assert hist[1].attributes == {"name": "the light"} + + async def test_get_full_significant_states_handles_empty_last_changed( hass: HomeAssistant, ) -> None: diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index d16712e0c70..52947ce0c19 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -3,10 +3,9 @@ from __future__ import annotations import asyncio -from collections.abc import Generator from datetime import datetime, timedelta +from pathlib import Path import sqlite3 -import sys import threading from typing import Any, cast from unittest.mock import MagicMock, Mock, patch @@ -15,9 +14,9 @@ from freezegun.api import FrozenDateTimeFactory import pytest from sqlalchemy.exc import DatabaseError, OperationalError, SQLAlchemyError from sqlalchemy.pool import QueuePool +from typing_extensions import Generator from homeassistant.components import recorder -from homeassistant.components.lock import LockState from homeassistant.components.recorder import ( CONF_AUTO_PURGE, CONF_AUTO_REPACK, @@ -27,6 +26,7 @@ from homeassistant.components.recorder import ( CONF_DB_URL, CONFIG_SCHEMA, DOMAIN, + SQLITE_URL_PREFIX, Recorder, db_schema, get_instance, @@ -70,14 +70,15 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, MATCH_ALL, + STATE_LOCKED, + STATE_UNLOCKED, ) -from homeassistant.core import Context, CoreState, Event, HomeAssistant, State, callback +from homeassistant.core import Context, CoreState, Event, HomeAssistant, callback from homeassistant.helpers import ( entity_registry as er, issue_registry as ir, recorder as recorder_helper, ) -from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.json import json_loads @@ -103,7 +104,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -123,7 +124,7 @@ def small_cache_size() -> Generator[None]: yield -def _default_recorder(hass: HomeAssistant) -> Recorder: +def _default_recorder(hass): """Return a recorder with reasonable defaults.""" return Recorder( hass, @@ -139,16 +140,19 @@ def _default_recorder(hass: HomeAssistant) -> Recorder: ) -@pytest.mark.parametrize("persistent_database", [True]) async def test_shutdown_before_startup_finishes( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, + recorder_db_url: str, + tmp_path: Path, ) -> None: - """Test shutdown before recorder starts is clean. - - On-disk database because this test does not play nice with the MutexPool. - """ + """Test shutdown before recorder starts is clean.""" + if recorder_db_url == "sqlite://": + # On-disk database because this test does not play nice with the + # MutexPool + recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") config = { + recorder.CONF_DB_URL: recorder_db_url, recorder.CONF_COMMIT_INTERVAL: 1, } hass.set_state(CoreState.not_running) @@ -165,10 +169,11 @@ async def test_shutdown_before_startup_finishes( await hass.async_block_till_done() await hass.async_stop() - # The database executor is shutdown so we must run the - # query in the main thread for testing - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - run_info = run_information_with_session(session) + def _run_information_with_session(): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + return run_information_with_session(session) + + run_info = await instance.async_add_executor_job(_run_information_with_session) assert run_info.run_id == 1 assert run_info.start is not None @@ -214,7 +219,8 @@ async def test_shutdown_closes_connections( instance = recorder.get_instance(hass) await instance.async_db_ready await hass.async_block_till_done() - pool = instance.engine + pool = instance.engine.pool + pool.shutdown = Mock() def _ensure_connected(): with session_scope(hass=hass, read_only=True) as session: @@ -222,11 +228,10 @@ async def test_shutdown_closes_connections( await instance.async_add_executor_job(_ensure_connected) - with patch.object(pool, "dispose", wraps=pool.dispose) as dispose: - hass.bus.async_fire(EVENT_HOMEASSISTANT_FINAL_WRITE) - await hass.async_block_till_done() + hass.bus.async_fire(EVENT_HOMEASSISTANT_FINAL_WRITE) + await hass.async_block_till_done() - assert len(dispose.mock_calls) == 1 + assert len(pool.shutdown.mock_calls) == 1 with pytest.raises(RuntimeError): assert instance.get_session() @@ -580,7 +585,7 @@ async def test_saving_state_with_commit_interval_zero( assert db_states[0].event_id is None -async def _add_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[State]: +async def _add_entities(hass, entity_ids): """Add entities.""" attributes = {"test_attr": 5, "test_attr_10": "nice"} for idx, entity_id in enumerate(entity_ids): @@ -604,7 +609,7 @@ async def _add_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[Stat return states -def _state_with_context(hass: HomeAssistant, entity_id: str) -> State | None: +def _state_with_context(hass, entity_id): # We don't restore context unless we need it by joining the # events table on the event_id for state_changed events return hass.states.get(entity_id) @@ -833,8 +838,8 @@ async def test_saving_state_and_removing_entity( ) -> None: """Test saving the state of a removed entity.""" entity_id = "lock.mine" - hass.states.async_set(entity_id, LockState.LOCKED) - hass.states.async_set(entity_id, LockState.UNLOCKED) + hass.states.async_set(entity_id, STATE_LOCKED) + hass.states.async_set(entity_id, STATE_UNLOCKED) hass.states.async_remove(entity_id) await async_wait_recording_done(hass) @@ -847,9 +852,9 @@ async def test_saving_state_and_removing_entity( ) assert len(states) == 3 assert states[0].entity_id == entity_id - assert states[0].state == LockState.LOCKED + assert states[0].state == STATE_LOCKED assert states[1].entity_id == entity_id - assert states[1].state == LockState.UNLOCKED + assert states[1].state == STATE_UNLOCKED assert states[2].entity_id == entity_id assert states[2].state is None @@ -900,19 +905,16 @@ async def test_saving_event_with_oversized_data( hass.bus.async_fire("test_event", event_data) hass.bus.async_fire("test_event_too_big", massive_dict) await async_wait_recording_done(hass) + events = {} with session_scope(hass=hass, read_only=True) as session: - events = { - event_type: data - for _, data, event_type in ( - session.query( - Events.event_id, EventData.shared_data, EventTypes.event_type - ) - .outerjoin(EventData, Events.data_id == EventData.data_id) - .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) - .where(EventTypes.event_type.in_(["test_event", "test_event_too_big"])) - ) - } + for _, data, event_type in ( + session.query(Events.event_id, EventData.shared_data, EventTypes.event_type) + .outerjoin(EventData, Events.data_id == EventData.data_id) + .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) + .where(EventTypes.event_type.in_(["test_event", "test_event_too_big"])) + ): + events[event_type] = data assert "test_event_too_big" in caplog.text @@ -930,19 +932,18 @@ async def test_saving_event_invalid_context_ulid( event_data = {"test_attr": 5, "test_attr_10": "nice"} hass.bus.async_fire("test_event", event_data, context=Context(id="invalid")) await async_wait_recording_done(hass) + events = {} with session_scope(hass=hass, read_only=True) as session: - events = { - event_type: data - for _, data, event_type in ( - session.query( - Events.event_id, EventData.shared_data, EventTypes.event_type - ) - .outerjoin(EventData, Events.data_id == EventData.data_id) - .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) - .where(EventTypes.event_type.in_(["test_event"])) - ) - } + for _, data, event_type in ( + session.query(Events.event_id, EventData.shared_data, EventTypes.event_type) + .outerjoin(EventData, Events.data_id == EventData.data_id) + .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) + .where(EventTypes.event_type.in_(["test_event"])) + ): + events[event_type] = data + + assert "invalid" in caplog.text assert len(events) == 1 assert json_loads(events["test_event"]) == event_data @@ -1003,7 +1004,7 @@ async def test_defaults_set(hass: HomeAssistant) -> None: """Test the config defaults are set.""" recorder_config = None - async def mock_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def mock_setup(hass, config): """Mock setup.""" nonlocal recorder_config recorder_config = config["recorder"] @@ -1364,27 +1365,28 @@ async def test_statistics_runs_initiated( @pytest.mark.freeze_time("2022-09-13 09:00:00+02:00") -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.parametrize("enable_missing_statistics", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_compile_missing_statistics( - async_test_recorder: RecorderInstanceGenerator, freezer: FrozenDateTimeFactory + tmp_path: Path, freezer: FrozenDateTimeFactory ) -> None: """Test missing statistics are compiled on startup.""" now = dt_util.utcnow().replace(minute=0, second=0, microsecond=0) + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + test_db_file = test_dir.joinpath("test_run_info.db") + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" def get_statistic_runs(hass: HomeAssistant) -> list: with session_scope(hass=hass, read_only=True) as session: return list(session.query(StatisticsRuns)) - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass, wait_recorder=False) as instance, - ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) + instance = recorder.get_instance(hass) statistics_runs = await instance.async_add_executor_job( get_statistic_runs, hass ) @@ -1410,10 +1412,7 @@ async def test_compile_missing_statistics( stats_hourly.append(event) freezer.tick(timedelta(hours=1)) - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass, wait_recorder=False) as instance, - ): + async with async_test_home_assistant() as hass: hass.bus.async_listen( EVENT_RECORDER_5MIN_STATISTICS_GENERATED, async_5min_stats_updated_listener ) @@ -1422,9 +1421,13 @@ async def test_compile_missing_statistics( async_hourly_stats_updated_listener, ) + recorder_helper.async_initialize_recorder(hass) + await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) + await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) + instance = recorder.get_instance(hass) statistics_runs = await instance.async_add_executor_job( get_statistic_runs, hass ) @@ -1624,24 +1627,24 @@ async def test_service_disable_states_not_recording( ) -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage -async def test_service_disable_run_information_recorded( - async_test_recorder: RecorderInstanceGenerator, -) -> None: +async def test_service_disable_run_information_recorded(tmp_path: Path) -> None: """Test that runs are still recorded when recorder is disabled.""" + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + test_db_file = test_dir.joinpath("test_run_info.db") + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" def get_recorder_runs(hass: HomeAssistant) -> list: with session_scope(hass=hass, read_only=True) as session: return list(session.query(RecorderRuns)) - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) await hass.async_start() await async_wait_recording_done(hass) + instance = recorder.get_instance(hass) db_run_info = await instance.async_add_executor_job(get_recorder_runs, hass) assert len(db_run_info) == 1 assert db_run_info[0].start is not None @@ -1657,13 +1660,13 @@ async def test_service_disable_run_information_recorded( await async_wait_recording_done(hass) await hass.async_stop() - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) await hass.async_start() await async_wait_recording_done(hass) + instance = recorder.get_instance(hass) db_run_info = await instance.async_add_executor_job(get_recorder_runs, hass) assert len(db_run_info) == 2 assert db_run_info[0].start is not None @@ -1678,17 +1681,23 @@ class CannotSerializeMe: """A class that the JSONEncoder cannot serialize.""" -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.parametrize("recorder_config", [{CONF_COMMIT_INTERVAL: 0}]) async def test_database_corruption_while_running( - hass: HomeAssistant, - recorder_mock: Recorder, - recorder_db_url: str, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture ) -> None: """Test we can recover from sqlite3 db corruption.""" + + def _create_tmpdir_for_test_db() -> Path: + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + return test_dir.joinpath("test.db") + + test_db_file = await hass.async_add_executor_job(_create_tmpdir_for_test_db) + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + + recorder_helper.async_initialize_recorder(hass) + assert await async_setup_component( + hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl, CONF_COMMIT_INTERVAL: 0}} + ) await hass.async_block_till_done() caplog.clear() @@ -1698,9 +1707,7 @@ async def test_database_corruption_while_running( hass.states.async_set("test.lost", "on", {}) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError( - "database disk image is malformed" - ) + sqlite3_exception.__cause__ = sqlite3.DatabaseError() await async_wait_recording_done(hass) with patch.object( @@ -1709,7 +1716,6 @@ async def test_database_corruption_while_running( side_effect=OperationalError("statement", {}, []), ): await async_wait_recording_done(hass) - test_db_file = recorder_db_url.removeprefix("sqlite:///") await hass.async_add_executor_job(corrupt_db_file, test_db_file) await async_wait_recording_done(hass) @@ -1803,21 +1809,23 @@ async def test_entity_id_filter( assert len(db_events) == idx + 1, data -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.parametrize("persistent_database", [True]) async def test_database_lock_and_unlock( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, + recorder_db_url: str, + tmp_path: Path, ) -> None: - """Test writing events during lock getting written after unlocking. + """Test writing events during lock getting written after unlocking.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # Database locking is only used for SQLite + return - This test is specific for SQLite: Locking is not implemented for other engines. - - Use file DB, in memory DB cannot do write locks. - """ + if recorder_db_url == "sqlite://": + # Use file DB, in memory DB cannot do write locks. + recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") config = { recorder.CONF_COMMIT_INTERVAL: 0, + recorder.CONF_DB_URL: recorder_db_url, } await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() @@ -1855,23 +1863,26 @@ async def test_database_lock_and_unlock( assert len(db_events) == 1 -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.parametrize("persistent_database", [True]) async def test_database_lock_and_overflow( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, + recorder_db_url: str, + tmp_path: Path, caplog: pytest.LogCaptureFixture, issue_registry: ir.IssueRegistry, ) -> None: - """Test writing events during lock leading to overflow the queue causes the database to unlock. + """Test writing events during lock leading to overflow the queue causes the database to unlock.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # Database locking is only used for SQLite + return pytest.skip("Database locking is only used for SQLite") - This test is specific for SQLite: Locking is not implemented for other engines. - - Use file DB, in memory DB cannot do write locks. - """ + # Use file DB, in memory DB cannot do write locks. + if recorder_db_url == "sqlite://": + # Use file DB, in memory DB cannot do write locks. + recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") config = { recorder.CONF_COMMIT_INTERVAL: 0, + recorder.CONF_DB_URL: recorder_db_url, } def _get_db_events(): @@ -1885,9 +1896,7 @@ async def test_database_lock_and_overflow( with ( patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), patch.object(recorder.core, "DB_LOCK_QUEUE_CHECK_TIMEOUT", 0.01), - patch.object( - recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize - ), + patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), ): await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() @@ -1920,23 +1929,25 @@ async def test_database_lock_and_overflow( assert start_time.count(":") == 2 -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.parametrize("persistent_database", [True]) async def test_database_lock_and_overflow_checks_available_memory( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, + recorder_db_url: str, + tmp_path: Path, caplog: pytest.LogCaptureFixture, issue_registry: ir.IssueRegistry, ) -> None: - """Test writing events during lock leading to overflow the queue causes the database to unlock. + """Test writing events during lock leading to overflow the queue causes the database to unlock.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + return pytest.skip("Database locking is only used for SQLite") - This test is specific for SQLite: Locking is not implemented for other engines. - - Use file DB, in memory DB cannot do write locks. - """ + # Use file DB, in memory DB cannot do write locks. + if recorder_db_url == "sqlite://": + # Use file DB, in memory DB cannot do write locks. + recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") config = { recorder.CONF_COMMIT_INTERVAL: 0, + recorder.CONF_DB_URL: recorder_db_url, } def _get_db_events(): @@ -1947,43 +1958,26 @@ async def test_database_lock_and_overflow_checks_available_memory( ) ) - with patch( - "homeassistant.components.recorder.core.QUEUE_CHECK_INTERVAL", - timedelta(seconds=1), - ): - await async_setup_recorder_instance(hass, config) - await hass.async_block_till_done() + await async_setup_recorder_instance(hass, config) + await hass.async_block_till_done() event_type = "EVENT_TEST" event_types = (event_type,) await async_wait_recording_done(hass) - min_available_memory = 256 * 1024**2 - - out_of_ram = False - - def _get_available_memory(*args: Any, **kwargs: Any) -> int: - nonlocal out_of_ram - return min_available_memory / 2 if out_of_ram else min_available_memory with ( patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object( - recorder.core, - "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", - min_available_memory, - ), + patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 1), patch.object(recorder.core, "DB_LOCK_QUEUE_CHECK_TIMEOUT", 0.01), patch.object( recorder.core.Recorder, "_available_memory", - side_effect=_get_available_memory, + return_value=recorder.core.ESTIMATED_QUEUE_ITEM_SIZE * 4, ), ): instance = get_instance(hass) - assert await instance.lock_database() + await instance.lock_database() - db_events = await instance.async_add_executor_job(_get_db_events) - assert len(db_events) == 0 # Record up to the extended limit (which takes into account the available memory) for _ in range(2): event_data = {"test_attr": 5, "test_attr_10": "nice"} @@ -2000,7 +1994,6 @@ async def test_database_lock_and_overflow_checks_available_memory( assert "Database queue backlog reached more than" not in caplog.text - out_of_ram = True # Record beyond the extended limit (which takes into account the available memory) for _ in range(20): event_data = {"test_attr": 5, "test_attr_10": "nice"} @@ -2026,15 +2019,13 @@ async def test_database_lock_and_overflow_checks_available_memory( assert start_time.count(":") == 2 -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_database_lock_timeout( hass: HomeAssistant, setup_recorder: None, recorder_db_url: str ) -> None: - """Test locking database timeout when recorder stopped. - - This test is specific for SQLite: Locking is not implemented for other engines. - """ + """Test locking database timeout when recorder stopped.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test is specific for SQLite: Locking is not implemented for other engines + return hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) @@ -2102,18 +2093,16 @@ async def test_database_connection_keep_alive( assert "Sending keepalive" in caplog.text -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_database_connection_keep_alive_disabled_on_sqlite( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, recorder_db_url: str, ) -> None: - """Test we do not do keep alive for sqlite. - - This test is specific for SQLite, keepalive runs on other engines. - """ + """Test we do not do keep alive for sqlite.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test is specific for SQLite, keepalive runs on other engines + return instance = await async_setup_recorder_instance(hass) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -2321,7 +2310,7 @@ async def test_connect_args_priority(hass: HomeAssistant, config_url) -> None: __bases__ = [] _has_events = False - def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def __init__(*args, **kwargs): ... @property def is_async(self): @@ -2568,13 +2557,7 @@ async def test_clean_shutdown_when_recorder_thread_raises_during_validate_db_sch assert instance.engine is None -@pytest.mark.parametrize( - ("func_to_patch", "expected_setup_result"), - [("migrate_schema_non_live", False), ("migrate_schema_live", False)], -) -async def test_clean_shutdown_when_schema_migration_fails( - hass: HomeAssistant, func_to_patch: str, expected_setup_result: bool -) -> None: +async def test_clean_shutdown_when_schema_migration_fails(hass: HomeAssistant) -> None: """Test we still shutdown cleanly when schema migration fails.""" with ( patch.object( @@ -2585,13 +2568,13 @@ async def test_clean_shutdown_when_schema_migration_fails( patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch.object( migration, - func_to_patch, + "migrate_schema", side_effect=Exception, ), ): if recorder.DOMAIN not in hass.data: recorder_helper.async_initialize_recorder(hass) - setup_result = await async_setup_component( + assert await async_setup_component( hass, recorder.DOMAIN, { @@ -2602,7 +2585,6 @@ async def test_clean_shutdown_when_schema_migration_fails( } }, ) - assert setup_result == expected_setup_result await hass.async_block_till_done() instance = recorder.get_instance(hass) @@ -2660,6 +2642,7 @@ async def test_commit_before_commits_pending_writes( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, recorder_db_url: str, + tmp_path: Path, ) -> None: """Test commit_before with a non-zero commit interval. @@ -2729,20 +2712,3 @@ async def test_all_tables_use_default_table_args(hass: HomeAssistant) -> None: """Test that all tables use the default table args.""" for table in db_schema.Base.metadata.tables.values(): assert table.kwargs.items() >= db_schema._DEFAULT_TABLE_ARGS.items() - - -async def test_empty_entity_id( - hass: HomeAssistant, - async_setup_recorder_instance: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the recorder can handle an empty entity_id.""" - await async_setup_recorder_instance( - hass, - { - "exclude": {"domains": "hidden_domain"}, - }, - ) - hass.bus.async_fire("hello", {"entity_id": ""}) - await async_wait_recording_done(hass) - assert "Invalid entity ID" not in caplog.text diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 14978bee5a9..a21f4771616 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -4,12 +4,11 @@ import datetime import importlib import sqlite3 import sys -from unittest.mock import ANY, Mock, PropertyMock, call, patch +import threading +from unittest.mock import Mock, PropertyMock, call, patch import pytest -from sqlalchemy import create_engine, inspect, text -from sqlalchemy.engine import Engine -from sqlalchemy.engine.interfaces import ReflectedForeignKeyConstraint +from sqlalchemy import create_engine, text from sqlalchemy.exc import ( DatabaseError, InternalError, @@ -17,37 +16,28 @@ from sqlalchemy.exc import ( ProgrammingError, SQLAlchemyError, ) -from sqlalchemy.orm import Session, scoped_session, sessionmaker +from sqlalchemy.orm import Session from sqlalchemy.pool import StaticPool +from homeassistant.bootstrap import async_setup_component from homeassistant.components import persistent_notification as pn, recorder from homeassistant.components.recorder import db_schema, migration from homeassistant.components.recorder.db_schema import ( SCHEMA_VERSION, - Events, RecorderRuns, States, ) from homeassistant.components.recorder.util import session_scope -from homeassistant.core import HomeAssistant, State +from homeassistant.core import HomeAssistant from homeassistant.helpers import recorder as recorder_helper import homeassistant.util.dt as dt_util from .common import async_wait_recording_done, create_engine_test -from .conftest import InstrumentedMigration from tests.common import async_fire_time_changed -from tests.typing import RecorderInstanceGenerator -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - -def _get_native_states(hass: HomeAssistant, entity_id: str) -> list[State]: +def _get_native_states(hass, entity_id): with session_scope(hass=hass, read_only=True) as session: instance = recorder.get_instance(hass) metadata_id = instance.states_meta_manager.get(entity_id, session, True) @@ -58,13 +48,12 @@ def _get_native_states(hass: HomeAssistant, entity_id: str) -> list[State]: return states -async def test_schema_update_calls( - hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator -) -> None: +async def test_schema_update_calls(recorder_db_url: str, hass: HomeAssistant) -> None: """Test that schema migrations occur in correct order.""" assert recorder.util.async_migration_in_progress(hass) is False with ( + patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, @@ -73,60 +62,26 @@ async def test_schema_update_calls( "homeassistant.components.recorder.migration._apply_update", wraps=migration._apply_update, ) as update, - patch( - "homeassistant.components.recorder.migration._migrate_schema", - wraps=migration._migrate_schema, - ) as migrate_schema, ): - await async_setup_recorder_instance(hass) + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, "recorder", {"recorder": {"db_url": recorder_db_url}} + ) await async_wait_recording_done(hass) assert recorder.util.async_migration_in_progress(hass) is False instance = recorder.get_instance(hass) engine = instance.engine session_maker = instance.get_session - assert update.mock_calls == [ - call(instance, hass, engine, session_maker, version + 1, 0) - for version in range(db_schema.SCHEMA_VERSION) - ] - assert migrate_schema.mock_calls == [ - call( - instance, - hass, - engine, - session_maker, - migration.SchemaValidationStatus( - current_version=0, - migration_needed=True, - non_live_data_migration_needed=True, - schema_errors=set(), - start_version=0, - ), - 42, - ), - call( - instance, - hass, - engine, - session_maker, - migration.SchemaValidationStatus( - current_version=42, - migration_needed=True, - non_live_data_migration_needed=True, - schema_errors=set(), - start_version=0, - ), - db_schema.SCHEMA_VERSION, - ), - ] + update.assert_has_calls( + [ + call(instance, hass, engine, session_maker, version + 1, 0) + for version in range(db_schema.SCHEMA_VERSION) + ] + ) -async def test_migration_in_progress( - hass: HomeAssistant, - recorder_db_url: str, - async_setup_recorder_instance: RecorderInstanceGenerator, - instrument_migration: InstrumentedMigration, -) -> None: +async def test_migration_in_progress(recorder_db_url: str, hass: HomeAssistant) -> None: """Test that we can check for migration in progress.""" if recorder_db_url.startswith("mysql://"): # The database drop at the end of this test currently hangs on MySQL @@ -139,55 +94,38 @@ async def test_migration_in_progress( assert recorder.util.async_migration_in_progress(hass) is False with ( + patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), ): - await async_setup_recorder_instance( - hass, wait_recorder=False, wait_recorder_setup=False + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, "recorder", {"recorder": {"db_url": recorder_db_url}} ) - await hass.async_add_executor_job(instrument_migration.migration_started.wait) + await recorder.get_instance(hass).async_migration_event.wait() assert recorder.util.async_migration_in_progress(hass) is True - - # Let migration finish - instrument_migration.migration_stall.set() await async_wait_recording_done(hass) assert recorder.util.async_migration_in_progress(hass) is False assert recorder.get_instance(hass).schema_version == SCHEMA_VERSION -@pytest.mark.parametrize( - ( - "func_to_patch", - "expected_setup_result", - "expected_pn_create", - "expected_pn_dismiss", - ), - [ - ("migrate_schema_non_live", False, 1, 0), - ("migrate_schema_live", True, 2, 1), - ], -) async def test_database_migration_failed( - hass: HomeAssistant, - async_setup_recorder_instance: RecorderInstanceGenerator, - func_to_patch: str, - expected_setup_result: bool, - expected_pn_create: int, - expected_pn_dismiss: int, + recorder_db_url: str, hass: HomeAssistant ) -> None: """Test we notify if the migration fails.""" assert recorder.util.async_migration_in_progress(hass) is False with ( + patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), patch( - f"homeassistant.components.recorder.migration.{func_to_patch}", + "homeassistant.components.recorder.migration._apply_update", side_effect=ValueError, ), patch( @@ -199,8 +137,9 @@ async def test_database_migration_failed( side_effect=pn.dismiss, ) as mock_dismiss, ): - await async_setup_recorder_instance( - hass, wait_recorder=False, expected_setup_result=expected_setup_result + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, "recorder", {"recorder": {"db_url": recorder_db_url}} ) hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) @@ -209,220 +148,67 @@ async def test_database_migration_failed( await hass.async_block_till_done() assert recorder.util.async_migration_in_progress(hass) is False - assert len(mock_create.mock_calls) == expected_pn_create - assert len(mock_dismiss.mock_calls) == expected_pn_dismiss + assert len(mock_create.mock_calls) == 2 + assert len(mock_dismiss.mock_calls) == 1 -@pytest.mark.parametrize( - ( - "patch_version", - "func_to_patch", - "expected_setup_result", - "expected_pn_create", - "expected_pn_dismiss", - ), - [ - # Test error handling in _update_states_table_with_foreign_key_options - (11, "homeassistant.components.recorder.migration.DropConstraint", False, 1, 0), - # Test error handling in _modify_columns - (12, "sqlalchemy.engine.base.Connection.execute", False, 1, 0), - # Test error handling in _drop_foreign_key_constraints - (46, "homeassistant.components.recorder.migration.DropConstraint", False, 2, 1), - ], -) -@pytest.mark.skip_on_db_engine(["sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_database_migration_failed_non_sqlite( - hass: HomeAssistant, - async_setup_recorder_instance: RecorderInstanceGenerator, - instrument_migration: InstrumentedMigration, - patch_version: int, - func_to_patch: str, - expected_setup_result: bool, - expected_pn_create: int, - expected_pn_dismiss: int, +async def test_database_migration_encounters_corruption( + recorder_db_url: str, hass: HomeAssistant ) -> None: - """Test we notify if the migration fails.""" - assert recorder.util.async_migration_in_progress(hass) is False - instrument_migration.stall_on_schema_version = patch_version - - with ( - patch( - "homeassistant.components.recorder.core.create_engine", - new=create_engine_test, - ), - patch( - "homeassistant.components.persistent_notification.create", - side_effect=pn.create, - ) as mock_create, - patch( - "homeassistant.components.persistent_notification.dismiss", - side_effect=pn.dismiss, - ) as mock_dismiss, - ): - await async_setup_recorder_instance( - hass, - wait_recorder=False, - wait_recorder_setup=False, - expected_setup_result=expected_setup_result, - ) - # Wait for migration to reach the schema version we want to break - await hass.async_add_executor_job( - instrument_migration.apply_update_stalled.wait - ) - - # Make it fail - with patch( - func_to_patch, - side_effect=OperationalError( - None, None, OSError("No space left on device") - ), - ): - instrument_migration.migration_stall.set() - hass.states.async_set("my.entity", "on", {}) - hass.states.async_set("my.entity", "off", {}) - await hass.async_block_till_done() - await hass.async_add_executor_job(recorder.get_instance(hass).join) - await hass.async_block_till_done() - - assert instrument_migration.apply_update_version == patch_version - assert recorder.util.async_migration_in_progress(hass) is False - assert len(mock_create.mock_calls) == expected_pn_create - assert len(mock_dismiss.mock_calls) == expected_pn_dismiss - - -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_live_database_migration_encounters_corruption( - hass: HomeAssistant, - recorder_db_url: str, - async_setup_recorder_instance: RecorderInstanceGenerator, -) -> None: - """Test we move away the database if its corrupt. - - This test is specific for SQLite, wiping the database on error only happens - with SQLite. - """ + """Test we move away the database if its corrupt.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test is specific for SQLite, wiping the database on error only happens + # with SQLite. + return assert recorder.util.async_migration_in_progress(hass) is False sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError( - "database disk image is malformed" - ) + sqlite3_exception.__cause__ = sqlite3.DatabaseError() with ( + patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.migration._schema_is_current", side_effect=[False], ), patch( - "homeassistant.components.recorder.migration.migrate_schema_live", + "homeassistant.components.recorder.migration.migrate_schema", side_effect=sqlite3_exception, ), patch( "homeassistant.components.recorder.core.move_away_broken_database" ) as move_away, patch( - "homeassistant.components.recorder.core.Recorder._setup_run", - autospec=True, - wraps=recorder.Recorder._setup_run, - ) as setup_run, + "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", + ), ): - await async_setup_recorder_instance(hass) + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, "recorder", {"recorder": {"db_url": recorder_db_url}} + ) hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) await async_wait_recording_done(hass) assert recorder.util.async_migration_in_progress(hass) is False - move_away.assert_called_once() - setup_run.assert_called_once() + assert move_away.called -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_non_live_database_migration_encounters_corruption( - hass: HomeAssistant, - recorder_db_url: str, - async_setup_recorder_instance: RecorderInstanceGenerator, -) -> None: - """Test we move away the database if its corrupt. - - This test is specific for SQLite, wiping the database on error only happens - with SQLite. - """ - - assert recorder.util.async_migration_in_progress(hass) is False - - sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError( - "database disk image is malformed" - ) - - with ( - patch( - "homeassistant.components.recorder.migration._schema_is_current", - side_effect=[False], - ), - patch( - "homeassistant.components.recorder.migration.migrate_schema_live", - ) as migrate_schema_live, - patch( - "homeassistant.components.recorder.migration.migrate_schema_non_live", - side_effect=sqlite3_exception, - ), - patch( - "homeassistant.components.recorder.core.move_away_broken_database" - ) as move_away, - patch( - "homeassistant.components.recorder.core.Recorder._setup_run", - autospec=True, - wraps=recorder.Recorder._setup_run, - ) as setup_run, - ): - await async_setup_recorder_instance(hass) - hass.states.async_set("my.entity", "on", {}) - hass.states.async_set("my.entity", "off", {}) - await async_wait_recording_done(hass) - - assert recorder.util.async_migration_in_progress(hass) is False - move_away.assert_called_once() - migrate_schema_live.assert_not_called() - setup_run.assert_called_once() - - -@pytest.mark.parametrize( - ( - "live_migration", - "func_to_patch", - "expected_setup_result", - "expected_pn_create", - "expected_pn_dismiss", - ), - [ - (True, "migrate_schema_live", True, 2, 1), - (False, "migrate_schema_non_live", False, 1, 0), - ], -) async def test_database_migration_encounters_corruption_not_sqlite( - hass: HomeAssistant, - async_setup_recorder_instance: RecorderInstanceGenerator, - live_migration: bool, - func_to_patch: str, - expected_setup_result: bool, - expected_pn_create: int, - expected_pn_dismiss: int, + recorder_db_url: str, hass: HomeAssistant ) -> None: """Test we fail on database error when we cannot recover.""" assert recorder.util.async_migration_in_progress(hass) is False with ( + patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.migration._schema_is_current", side_effect=[False], ), patch( - f"homeassistant.components.recorder.migration.{func_to_patch}", + "homeassistant.components.recorder.migration.migrate_schema", side_effect=DatabaseError("statement", {}, []), ), patch( @@ -436,13 +222,10 @@ async def test_database_migration_encounters_corruption_not_sqlite( "homeassistant.components.persistent_notification.dismiss", side_effect=pn.dismiss, ) as mock_dismiss, - patch( - "homeassistant.components.recorder.core.migration.live_migration", - return_value=live_migration, - ), ): - await async_setup_recorder_instance( - hass, wait_recorder=False, expected_setup_result=expected_setup_result + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, "recorder", {"recorder": {"db_url": recorder_db_url}} ) hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) @@ -452,39 +235,39 @@ async def test_database_migration_encounters_corruption_not_sqlite( assert recorder.util.async_migration_in_progress(hass) is False assert not move_away.called - assert len(mock_create.mock_calls) == expected_pn_create - assert len(mock_dismiss.mock_calls) == expected_pn_dismiss + assert len(mock_create.mock_calls) == 2 + assert len(mock_dismiss.mock_calls) == 1 async def test_events_during_migration_are_queued( - hass: HomeAssistant, - async_setup_recorder_instance: RecorderInstanceGenerator, - instrument_migration: InstrumentedMigration, + recorder_db_url: str, hass: HomeAssistant ) -> None: """Test that events during migration are queued.""" assert recorder.util.async_migration_in_progress(hass) is False with ( + patch( + "homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", + True, + ), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), ): - await async_setup_recorder_instance( - hass, {"commit_interval": 0}, wait_recorder=False, wait_recorder_setup=False + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, + "recorder", + {"recorder": {"db_url": recorder_db_url, "commit_interval": 0}}, ) - await hass.async_add_executor_job(instrument_migration.migration_started.wait) - assert recorder.util.async_migration_in_progress(hass) is True hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) - - # Let migration finish - instrument_migration.migration_stall.set() await recorder.get_instance(hass).async_recorder_ready.wait() await async_wait_recording_done(hass) @@ -496,29 +279,27 @@ async def test_events_during_migration_are_queued( async def test_events_during_migration_queue_exhausted( - hass: HomeAssistant, - async_setup_recorder_instance: RecorderInstanceGenerator, - instrument_migration: InstrumentedMigration, + recorder_db_url: str, hass: HomeAssistant ) -> None: """Test that events during migration takes so long the queue is exhausted.""" assert recorder.util.async_migration_in_progress(hass) is False with ( + patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object( - recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize - ), + patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), ): - await async_setup_recorder_instance( - hass, {"commit_interval": 0}, wait_recorder=False, wait_recorder_setup=False + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, + "recorder", + {"recorder": {"db_url": recorder_db_url, "commit_interval": 0}}, ) - await hass.async_add_executor_job(instrument_migration.migration_started.wait) - assert recorder.util.async_migration_in_progress(hass) is True hass.states.async_set("my.entity", "on", {}) await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) @@ -526,9 +307,6 @@ async def test_events_during_migration_queue_exhausted( async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() hass.states.async_set("my.entity", "off", {}) - - # Let migration finish - instrument_migration.migration_stall.set() await recorder.get_instance(hass).async_recorder_ready.wait() await async_wait_recording_done(hass) @@ -547,23 +325,10 @@ async def test_events_during_migration_queue_exhausted( @pytest.mark.parametrize( ("start_version", "live"), - [ - (0, False), - (9, False), - (16, False), - (18, False), - (22, False), - (25, False), - (43, True), - ], + [(0, True), (16, True), (18, True), (22, True), (25, True)], ) async def test_schema_migrate( - hass: HomeAssistant, - recorder_db_url: str, - async_setup_recorder_instance: RecorderInstanceGenerator, - instrument_migration: InstrumentedMigration, - start_version, - live, + recorder_db_url: str, hass: HomeAssistant, start_version, live ) -> None: """Test the full schema migration logic. @@ -572,6 +337,11 @@ async def test_schema_migrate( inspection could quickly become quite cumbersome. """ + migration_done = threading.Event() + migration_stall = threading.Event() + migration_version = None + real_migrate_schema = recorder.migration.migrate_schema + real_apply_update = recorder.migration._apply_update real_create_index = recorder.migration._create_index create_calls = 0 @@ -598,6 +368,33 @@ async def test_schema_migrate( start=self.recorder_runs_manager.recording_start, created=dt_util.utcnow() ) + def _instrument_migrate_schema(*args): + """Control migration progress and check results.""" + nonlocal migration_done + nonlocal migration_version + try: + real_migrate_schema(*args) + except Exception: + migration_done.set() + raise + + # Check and report the outcome of the migration; if migration fails + # the recorder will silently create a new database. + with session_scope(hass=hass, read_only=True) as session: + res = ( + session.query(db_schema.SchemaChanges) + .order_by(db_schema.SchemaChanges.change_id.desc()) + .first() + ) + migration_version = res.schema_version + migration_done.set() + + def _instrument_apply_update(*args): + """Control migration progress.""" + nonlocal migration_stall + migration_stall.wait() + real_apply_update(*args) + def _sometimes_failing_create_index(*args): """Make the first index create raise a retryable error to ensure we retry.""" if recorder_db_url.startswith("mysql://"): @@ -610,6 +407,7 @@ async def test_schema_migrate( real_create_index(*args) with ( + patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=_create_engine_test, @@ -619,11 +417,22 @@ async def test_schema_migrate( side_effect=_mock_setup_run, autospec=True, ) as setup_run, + patch( + "homeassistant.components.recorder.migration.migrate_schema", + wraps=_instrument_migrate_schema, + ), + patch( + "homeassistant.components.recorder.migration._apply_update", + wraps=_instrument_apply_update, + ) as apply_update_mock, patch("homeassistant.components.recorder.util.time.sleep"), patch( "homeassistant.components.recorder.migration._create_index", wraps=_sometimes_failing_create_index, ), + patch( + "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", + ), patch( "homeassistant.components.recorder.Recorder._process_state_changed_event_into_session", ), @@ -634,23 +443,24 @@ async def test_schema_migrate( "homeassistant.components.recorder.Recorder._pre_process_startup_events", ), ): - await async_setup_recorder_instance( - hass, wait_recorder=False, wait_recorder_setup=live + recorder_helper.async_initialize_recorder(hass) + hass.async_create_task( + async_setup_component( + hass, "recorder", {"recorder": {"db_url": recorder_db_url}} + ) ) - await hass.async_add_executor_job(instrument_migration.migration_started.wait) - assert recorder.util.async_migration_in_progress(hass) is True await recorder_helper.async_wait_recorder(hass) assert recorder.util.async_migration_in_progress(hass) is True assert recorder.util.async_migration_is_live(hass) == live - instrument_migration.migration_stall.set() + migration_stall.set() await hass.async_block_till_done() - await hass.async_add_executor_job(instrument_migration.live_migration_done.wait) + await hass.async_add_executor_job(migration_done.wait) await async_wait_recording_done(hass) - assert instrument_migration.migration_version == db_schema.SCHEMA_VERSION + assert migration_version == db_schema.SCHEMA_VERSION assert setup_run.called assert recorder.util.async_migration_in_progress(hass) is not True - assert instrument_migration.apply_update_mock.called + assert apply_update_mock.called def test_invalid_update(hass: HomeAssistant) -> None: @@ -823,662 +633,3 @@ def test_raise_if_exception_missing_empty_cause_str() -> None: with pytest.raises(ProgrammingError): migration.raise_if_exception_missing_str(programming_exc, ["not present"]) - - -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -def test_rebuild_sqlite_states_table(recorder_db_url: str) -> None: - """Test that we can rebuild the states table in SQLite. - - This test is specific for SQLite. - """ - engine = create_engine(recorder_db_url) - session_maker = scoped_session(sessionmaker(bind=engine, future=True)) - with session_scope(session=session_maker()) as session: - db_schema.Base.metadata.create_all(engine) - with session_scope(session=session_maker()) as session: - session.add(States(state="on")) - session.commit() - - assert migration.rebuild_sqlite_table(session_maker, engine, States) is True - - with session_scope(session=session_maker()) as session: - assert session.query(States).count() == 1 - assert session.query(States).first().state == "on" - - engine.dispose() - - -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -def test_rebuild_sqlite_states_table_missing_fails( - recorder_db_url: str, caplog: pytest.LogCaptureFixture -) -> None: - """Test handling missing states table when attempting rebuild. - - This test is specific for SQLite. - """ - engine = create_engine(recorder_db_url) - session_maker = scoped_session(sessionmaker(bind=engine, future=True)) - with session_scope(session=session_maker()) as session: - db_schema.Base.metadata.create_all(engine) - - with session_scope(session=session_maker()) as session: - session.add(Events(event_type="state_changed", event_data="{}")) - session.connection().execute(text("DROP TABLE states")) - session.commit() - - assert migration.rebuild_sqlite_table(session_maker, engine, States) is False - assert "Error recreating SQLite table states" in caplog.text - caplog.clear() - - # Now rebuild the events table to make sure the database did not - # get corrupted - assert migration.rebuild_sqlite_table(session_maker, engine, Events) is True - - with session_scope(session=session_maker()) as session: - assert session.query(Events).count() == 1 - assert session.query(Events).first().event_type == "state_changed" - assert session.query(Events).first().event_data == "{}" - - engine.dispose() - - -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -def test_rebuild_sqlite_states_table_extra_columns( - recorder_db_url: str, caplog: pytest.LogCaptureFixture -) -> None: - """Test handling extra columns when rebuilding the states table. - - This test is specific for SQLite. - """ - engine = create_engine(recorder_db_url) - session_maker = scoped_session(sessionmaker(bind=engine, future=True)) - with session_scope(session=session_maker()) as session: - db_schema.Base.metadata.create_all(engine) - with session_scope(session=session_maker()) as session: - session.add(States(state="on")) - session.commit() - session.connection().execute( - text("ALTER TABLE states ADD COLUMN extra_column TEXT") - ) - - assert migration.rebuild_sqlite_table(session_maker, engine, States) is True - assert "Error recreating SQLite table states" not in caplog.text - - with session_scope(session=session_maker()) as session: - assert session.query(States).count() == 1 - assert session.query(States).first().state == "on" - - engine.dispose() - - -@pytest.mark.skip_on_db_engine(["sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") -def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: - """Test we can drop and then restore foreign keys. - - This is not supported on SQLite - """ - - constraints_to_recreate = ( - ("events", "data_id", "event_data", "data_id"), - ("states", "event_id", None, None), # This won't be found - ("states", "old_state_id", "states", "state_id"), - ) - - db_engine = recorder_db_url.partition("://")[0] - - expected_dropped_constraints = { - "mysql": [ - ( - "events", - "data_id", - { - "constrained_columns": ["data_id"], - "name": ANY, - "options": {}, - "referred_columns": ["data_id"], - "referred_schema": None, - "referred_table": "event_data", - }, - ), - ( - "states", - "old_state_id", - { - "constrained_columns": ["old_state_id"], - "name": ANY, - "options": {}, - "referred_columns": ["state_id"], - "referred_schema": None, - "referred_table": "states", - }, - ), - ], - "postgresql": [ - ( - "events", - "data_id", - { - "comment": None, - "constrained_columns": ["data_id"], - "name": "events_data_id_fkey", - "options": {}, - "referred_columns": ["data_id"], - "referred_schema": None, - "referred_table": "event_data", - }, - ), - ( - "states", - "old_state_id", - { - "comment": None, - "constrained_columns": ["old_state_id"], - "name": "states_old_state_id_fkey", - "options": {}, - "referred_columns": ["state_id"], - "referred_schema": None, - "referred_table": "states", - }, - ), - ], - } - - def find_constraints( - engine: Engine, table: str, column: str - ) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: - inspector = inspect(engine) - return [ - (table, column, foreign_key) - for foreign_key in inspector.get_foreign_keys(table) - if foreign_key["name"] and foreign_key["constrained_columns"] == [column] - ] - - engine = create_engine(recorder_db_url) - db_schema.Base.metadata.create_all(engine) - - matching_constraints_1 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in find_constraints(engine, table, column) - ] - assert matching_constraints_1 == expected_dropped_constraints[db_engine] - - with Session(engine) as session: - session_maker = Mock(return_value=session) - for table, column, _, _ in constraints_to_recreate: - migration._drop_foreign_key_constraints( - session_maker, engine, table, column - ) - - # Check we don't find the constrained columns again (they are removed) - matching_constraints_2 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in find_constraints(engine, table, column) - ] - assert matching_constraints_2 == [] - - # Restore the constraints - with Session(engine) as session: - session_maker = Mock(return_value=session) - migration._restore_foreign_key_constraints( - session_maker, engine, constraints_to_recreate - ) - - # Check we do find the constrained columns again (they are restored) - matching_constraints_3 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in find_constraints(engine, table, column) - ] - assert matching_constraints_3 == expected_dropped_constraints[db_engine] - - engine.dispose() - - -@pytest.mark.skip_on_db_engine(["sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") -def test_restore_foreign_key_constraints_twice(recorder_db_url: str) -> None: - """Test we can drop and then restore foreign keys. - - This is not supported on SQLite - """ - - constraints_to_recreate = ( - ("events", "data_id", "event_data", "data_id"), - ("states", "event_id", None, None), # This won't be found - ("states", "old_state_id", "states", "state_id"), - ) - - db_engine = recorder_db_url.partition("://")[0] - - expected_dropped_constraints = { - "mysql": [ - ( - "events", - "data_id", - { - "constrained_columns": ["data_id"], - "name": ANY, - "options": {}, - "referred_columns": ["data_id"], - "referred_schema": None, - "referred_table": "event_data", - }, - ), - ( - "states", - "old_state_id", - { - "constrained_columns": ["old_state_id"], - "name": ANY, - "options": {}, - "referred_columns": ["state_id"], - "referred_schema": None, - "referred_table": "states", - }, - ), - ], - "postgresql": [ - ( - "events", - "data_id", - { - "comment": None, - "constrained_columns": ["data_id"], - "name": "events_data_id_fkey", - "options": {}, - "referred_columns": ["data_id"], - "referred_schema": None, - "referred_table": "event_data", - }, - ), - ( - "states", - "old_state_id", - { - "comment": None, - "constrained_columns": ["old_state_id"], - "name": "states_old_state_id_fkey", - "options": {}, - "referred_columns": ["state_id"], - "referred_schema": None, - "referred_table": "states", - }, - ), - ], - } - - def find_constraints( - engine: Engine, table: str, column: str - ) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: - inspector = inspect(engine) - return [ - (table, column, foreign_key) - for foreign_key in inspector.get_foreign_keys(table) - if foreign_key["name"] and foreign_key["constrained_columns"] == [column] - ] - - engine = create_engine(recorder_db_url) - db_schema.Base.metadata.create_all(engine) - - matching_constraints_1 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in find_constraints(engine, table, column) - ] - assert matching_constraints_1 == expected_dropped_constraints[db_engine] - - with Session(engine) as session: - session_maker = Mock(return_value=session) - for table, column, _, _ in constraints_to_recreate: - migration._drop_foreign_key_constraints( - session_maker, engine, table, column - ) - - # Check we don't find the constrained columns again (they are removed) - matching_constraints_2 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in find_constraints(engine, table, column) - ] - assert matching_constraints_2 == [] - - # Restore the constraints - with Session(engine) as session: - session_maker = Mock(return_value=session) - migration._restore_foreign_key_constraints( - session_maker, engine, constraints_to_recreate - ) - - # Restore the constraints again - with Session(engine) as session: - session_maker = Mock(return_value=session) - migration._restore_foreign_key_constraints( - session_maker, engine, constraints_to_recreate - ) - - # Check we do find a single the constrained columns again (they are restored - # only once, even though we called _restore_foreign_key_constraints twice) - matching_constraints_3 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in find_constraints(engine, table, column) - ] - assert matching_constraints_3 == expected_dropped_constraints[db_engine] - - engine.dispose() - - -@pytest.mark.skip_on_db_engine(["sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") -def test_drop_duplicated_foreign_key_constraints(recorder_db_url: str) -> None: - """Test we can drop and then restore foreign keys. - - This is not supported on SQLite - """ - - constraints_to_recreate = ( - ("events", "data_id", "event_data", "data_id"), - ("states", "event_id", None, None), # This won't be found - ("states", "old_state_id", "states", "state_id"), - ) - - db_engine = recorder_db_url.partition("://")[0] - - expected_dropped_constraints = { - "mysql": [ - ( - "events", - "data_id", - { - "constrained_columns": ["data_id"], - "name": ANY, - "options": {}, - "referred_columns": ["data_id"], - "referred_schema": None, - "referred_table": "event_data", - }, - ), - ( - "states", - "old_state_id", - { - "constrained_columns": ["old_state_id"], - "name": ANY, - "options": {}, - "referred_columns": ["state_id"], - "referred_schema": None, - "referred_table": "states", - }, - ), - ], - "postgresql": [ - ( - "events", - "data_id", - { - "comment": None, - "constrained_columns": ["data_id"], - "name": ANY, - "options": {}, - "referred_columns": ["data_id"], - "referred_schema": None, - "referred_table": "event_data", - }, - ), - ( - "states", - "old_state_id", - { - "comment": None, - "constrained_columns": ["old_state_id"], - "name": ANY, - "options": {}, - "referred_columns": ["state_id"], - "referred_schema": None, - "referred_table": "states", - }, - ), - ], - } - - def find_constraints( - engine: Engine, table: str, column: str - ) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: - inspector = inspect(engine) - return [ - (table, column, foreign_key) - for foreign_key in inspector.get_foreign_keys(table) - if foreign_key["name"] and foreign_key["constrained_columns"] == [column] - ] - - engine = create_engine(recorder_db_url) - db_schema.Base.metadata.create_all(engine) - - # Create a duplicate of the constraints - inspector = Mock(name="inspector") - inspector.get_foreign_keys = Mock(name="get_foreign_keys", return_value=[]) - with ( - patch( - "homeassistant.components.recorder.migration.sqlalchemy.inspect", - return_value=inspector, - ), - Session(engine) as session, - ): - session_maker = Mock(return_value=session) - migration._restore_foreign_key_constraints( - session_maker, engine, constraints_to_recreate - ) - - matching_constraints_1 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in find_constraints(engine, table, column) - ] - _expected_dropped_constraints = [ - _dropped_constraint - for dropped_constraint in expected_dropped_constraints[db_engine] - for _dropped_constraint in (dropped_constraint, dropped_constraint) - ] - assert matching_constraints_1 == _expected_dropped_constraints - - with Session(engine) as session: - session_maker = Mock(return_value=session) - for table, column, _, _ in constraints_to_recreate: - migration._drop_foreign_key_constraints( - session_maker, engine, table, column - ) - - # Check we don't find the constrained columns again (they are removed) - matching_constraints_2 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in find_constraints(engine, table, column) - ] - assert matching_constraints_2 == [] - - # Restore the constraints - with Session(engine) as session: - session_maker = Mock(return_value=session) - migration._restore_foreign_key_constraints( - session_maker, engine, constraints_to_recreate - ) - - # Check we do find a single the constrained columns again (they are restored - # only once, even though we called _restore_foreign_key_constraints twice) - matching_constraints_3 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in find_constraints(engine, table, column) - ] - assert matching_constraints_3 == expected_dropped_constraints[db_engine] - - engine.dispose() - - -def test_restore_foreign_key_constraints_with_error( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test we can drop and then restore foreign keys. - - This is not supported on SQLite - """ - - constraints_to_restore = [("events", "data_id", "event_data", "data_id")] - - connection = Mock() - connection.execute = Mock(side_effect=InternalError(None, None, None)) - session = Mock() - session.connection = Mock(return_value=connection) - instance = Mock() - instance.get_session = Mock(return_value=session) - engine = Mock() - inspector = Mock(name="inspector") - inspector.get_foreign_keys = Mock(name="get_foreign_keys", return_value=[]) - engine._sa_instance_state = inspector - - session_maker = Mock(return_value=session) - with pytest.raises(InternalError): - migration._restore_foreign_key_constraints( - session_maker, engine, constraints_to_restore - ) - - assert "Could not update foreign options in events table" in caplog.text - - -@pytest.mark.skip_on_db_engine(["sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") -def test_restore_foreign_key_constraints_with_integrity_error( - recorder_db_url: str, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test we can drop and then restore foreign keys. - - This is not supported on SQLite - """ - - constraints = ( - ("events", "data_id", "event_data", "data_id", Events), - ("states", "old_state_id", "states", "state_id", States), - ) - - engine = create_engine(recorder_db_url) - db_schema.Base.metadata.create_all(engine) - - # Drop constraints - with Session(engine) as session: - session_maker = Mock(return_value=session) - for table, column, _, _, _ in constraints: - migration._drop_foreign_key_constraints( - session_maker, engine, table, column - ) - - # Add rows violating the constraints - with Session(engine) as session: - for _, column, _, _, table_class in constraints: - session.add(table_class(**{column: 123})) - session.add(table_class()) - # Insert a States row referencing the row with an invalid foreign reference - session.add(States(old_state_id=1)) - session.commit() - - # Check we could insert the rows - with Session(engine) as session: - assert session.query(Events).count() == 2 - assert session.query(States).count() == 3 - - # Restore constraints - to_restore = [ - (table, column, foreign_table, foreign_column) - for table, column, foreign_table, foreign_column, _ in constraints - ] - with Session(engine) as session: - session_maker = Mock(return_value=session) - migration._restore_foreign_key_constraints(session_maker, engine, to_restore) - - # Check the violating row has been deleted from the Events table - with Session(engine) as session: - assert session.query(Events).count() == 1 - assert session.query(States).count() == 3 - - engine.dispose() - - assert ( - "Could not update foreign options in events table, " - "will delete violations and try again" - ) in caplog.text - - -def test_delete_foreign_key_violations_unsupported_engine( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test calling _delete_foreign_key_violations with an unsupported engine.""" - - connection = Mock() - connection.execute = Mock(side_effect=InternalError(None, None, None)) - session = Mock() - session.connection = Mock(return_value=connection) - instance = Mock() - instance.get_session = Mock(return_value=session) - engine = Mock() - engine.dialect = Mock() - engine.dialect.name = "sqlite" - - session_maker = Mock(return_value=session) - with pytest.raises( - RuntimeError, match="_delete_foreign_key_violations not supported for sqlite" - ): - migration._delete_foreign_key_violations(session_maker, engine, "", "", "", "") - - -def test_drop_foreign_key_constraints_unsupported_engine( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test calling _drop_foreign_key_constraints with an unsupported engine.""" - - connection = Mock() - connection.execute = Mock(side_effect=InternalError(None, None, None)) - session = Mock() - session.connection = Mock(return_value=connection) - instance = Mock() - instance.get_session = Mock(return_value=session) - engine = Mock() - engine.dialect = Mock() - engine.dialect.name = "sqlite" - - session_maker = Mock(return_value=session) - with pytest.raises( - RuntimeError, match="_drop_foreign_key_constraints not supported for sqlite" - ): - migration._drop_foreign_key_constraints(session_maker, engine, "", "") - - -def test_update_states_table_with_foreign_key_options_unsupported_engine( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test calling function with an unsupported engine. - - This tests _update_states_table_with_foreign_key_options. - """ - - connection = Mock() - connection.execute = Mock(side_effect=InternalError(None, None, None)) - session = Mock() - session.connection = Mock(return_value=connection) - instance = Mock() - instance.get_session = Mock(return_value=session) - engine = Mock() - engine.dialect = Mock() - engine.dialect.name = "sqlite" - - session_maker = Mock(return_value=session) - with pytest.raises( - RuntimeError, - match="_update_states_table_with_foreign_key_options not supported for sqlite", - ): - migration._update_states_table_with_foreign_key_options(session_maker, engine) diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index dcf2d792407..8fda495cf60 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -3,7 +3,6 @@ import datetime import importlib import sys -import threading from typing import Any from unittest.mock import patch import uuid @@ -13,6 +12,7 @@ import pytest from sqlalchemy import create_engine, inspect from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session +from typing_extensions import AsyncGenerator from homeassistant.components import recorder from homeassistant.components.recorder import ( @@ -25,7 +25,6 @@ from homeassistant.components.recorder import ( from homeassistant.components.recorder.db_schema import ( Events, EventTypes, - MigrationChanges, States, StatesMeta, ) @@ -34,9 +33,15 @@ from homeassistant.components.recorder.queries import ( get_migration_changes, select_event_type_ids, ) +from homeassistant.components.recorder.tasks import ( + EntityIDMigrationTask, + EntityIDPostMigrationTask, + EventsContextIDMigrationTask, + EventTypeIDMigrationTask, + StatesContextIDMigrationTask, +) from homeassistant.components.recorder.util import ( execute_stmt_lambda_element, - get_index_by_name, session_scope, ) from homeassistant.core import HomeAssistant @@ -44,27 +49,17 @@ import homeassistant.util.dt as dt_util from homeassistant.util.ulid import bytes_to_ulid, ulid_at_time, ulid_to_bytes from .common import ( - MockMigrationTask, async_attach_db_engine, async_recorder_block_till_done, async_wait_recording_done, ) -from .conftest import instrument_migration -from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE = "tests.components.recorder.db_schema_32" -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - async def _async_wait_migration_done(hass: HomeAssistant) -> None: """Wait for the migration to be done.""" await recorder.get_instance(hass).async_block_till_done() @@ -98,7 +93,7 @@ def _create_engine_test(*args, **kwargs): return engine -@pytest.fixture +@pytest.fixture(autouse=True) def db_schema_32(): """Fixture to initialize the db with the old schema.""" importlib.import_module(SCHEMA_MODULE) @@ -106,27 +101,37 @@ def db_schema_32(): with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration, "non_live_data_migration_needed", return_value=False), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", MockMigrationTask), + patch.object(migration.EntityIDMigration, "task", core.RecorderTask), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): yield -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +@pytest.fixture(name="legacy_recorder_mock") +async def legacy_recorder_mock_fixture( + recorder_mock: Recorder, +) -> AsyncGenerator[Recorder]: + """Fixture for legacy recorder mock.""" + with patch.object(recorder_mock.states_meta_manager, "active", False): + yield recorder_mock + + +@pytest.mark.parametrize("enable_migrate_context_ids", [True]) async def test_migrate_events_context_ids( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" + instance = await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -219,28 +224,17 @@ async def test_migrate_events_context_ids( ) ) - # Create database with old schema - with ( - patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration.EventsContextIDMigration, "migrate_data"), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await instance.async_add_executor_job(_insert_events) + await instance.async_add_executor_job(_insert_events) - await async_wait_recording_done(hass) - now = dt_util.utcnow() - expected_ulid_fallback_start = ulid_to_bytes(ulid_at_time(now.timestamp()))[ - 0:6 - ] - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + now = dt_util.utcnow() + expected_ulid_fallback_start = ulid_to_bytes(ulid_at_time(now.timestamp()))[0:6] + await _async_wait_migration_done(hass) - await hass.async_stop() - await hass.async_block_till_done() + with freeze_time(now): + # This is a threadsafe way to add a task to the recorder + instance.queue_task(EventsContextIDMigrationTask()) + await _async_wait_migration_done(hass) def _object_as_dict(obj): return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} @@ -266,38 +260,7 @@ async def test_migrate_events_context_ids( assert len(events) == 6 return {event.event_type: _object_as_dict(event) for event in events} - # Run again with new schema, let migration run - async with async_test_home_assistant() as hass: - with freeze_time(now), instrument_migration(hass) as instrumented_migration: - async with async_test_recorder( - hass, wait_recorder=False, wait_recorder_setup=False - ) as instance: - # Check the context ID migrator is considered non-live - assert recorder.util.async_migration_is_live(hass) is False - instrumented_migration.migration_stall.set() - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - - events_by_type = await instance.async_add_executor_job( - _fetch_migrated_events - ) - - migration_changes = await instance.async_add_executor_job( - _get_migration_id, hass - ) - - # Check the index which will be removed by the migrator no longer exists - with session_scope(hass=hass) as session: - assert ( - get_index_by_name(session, "events", "ix_events_context_id") - is None - ) - - await hass.async_stop() - await hass.async_block_till_done() + events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"] assert old_uuid_context_id_event["context_id"] is None @@ -368,127 +331,20 @@ async def test_migrate_events_context_ids( event_with_garbage_context_id_no_time_fired_ts["context_parent_id_bin"] is None ) + migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) assert ( migration_changes[migration.EventsContextIDMigration.migration_id] == migration.EventsContextIDMigration.migration_version ) -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage -async def test_finish_migrate_events_context_ids( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Test we re migrate old uuid context ids and ulid context ids to binary format. - - Before PR https://github.com/home-assistant/core/pull/125214, the migrator would - mark the migration as done before ensuring unused indices were dropped. This - test makes sure we drop the unused indices. - """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] - - def _insert_migration(): - with session_scope(hass=hass) as session: - session.merge( - MigrationChanges( - migration_id=migration.EventsContextIDMigration.migration_id, - version=1, - ) - ) - - # Create database with old schema - with ( - patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration.EventsContextIDMigration, "migrate_data"), - patch.object( - migration.EventIDPostMigration, - "needs_migrate_impl", - return_value=migration.DataMigrationStatus( - needs_migrate=False, migration_done=True - ), - ), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - - await hass.async_block_till_done() - await async_wait_recording_done(hass) - - # Check the index which will be removed by the migrator exists - with session_scope(hass=hass) as session: - assert get_index_by_name(session, "events", "ix_events_context_id") - - await hass.async_stop() - await hass.async_block_till_done() - - # Run once with new schema, fake migration did not complete - with ( - patch.object(migration.EventsContextIDMigration, "migrate_data"), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - - # Fake migration ran with old version - await instance.async_add_executor_job(_insert_migration) - await async_wait_recording_done(hass) - - # Check the index which will be removed by the migrator exists - with session_scope(hass=hass) as session: - assert get_index_by_name(session, "events", "ix_events_context_id") - - await hass.async_stop() - await hass.async_block_till_done() - - # Run again with new schema, let migration complete - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - - migration_changes = await instance.async_add_executor_job( - _get_migration_id, hass - ) - # Check migration ran again - assert ( - migration_changes[migration.EventsContextIDMigration.migration_id] - == migration.EventsContextIDMigration.migration_version - ) - - # Check the index which will be removed by the migrator no longer exists - with session_scope(hass=hass) as session: - assert get_index_by_name(session, "events", "ix_events_context_id") is None - - await hass.async_stop() - await hass.async_block_till_done() - - -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +@pytest.mark.parametrize("enable_migrate_context_ids", [True]) async def test_migrate_states_context_ids( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" + instance = await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -563,24 +419,11 @@ async def test_migrate_states_context_ids( ) ) - # Create database with old schema - with ( - patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration.StatesContextIDMigration, "migrate_data"), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await instance.async_add_executor_job(_insert_states) + await instance.async_add_executor_job(_insert_states) - await async_wait_recording_done(hass) - await _async_wait_migration_done(hass) - - await hass.async_stop() - await hass.async_block_till_done() + await async_wait_recording_done(hass) + instance.queue_task(StatesContextIDMigrationTask()) + await _async_wait_migration_done(hass) def _object_as_dict(obj): return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} @@ -606,38 +449,7 @@ async def test_migrate_states_context_ids( assert len(events) == 6 return {state.entity_id: _object_as_dict(state) for state in events} - # Run again with new schema, let migration run - async with async_test_home_assistant() as hass: - with instrument_migration(hass) as instrumented_migration: - async with async_test_recorder( - hass, wait_recorder=False, wait_recorder_setup=False - ) as instance: - # Check the context ID migrator is considered non-live - assert recorder.util.async_migration_is_live(hass) is False - instrumented_migration.migration_stall.set() - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - - states_by_entity_id = await instance.async_add_executor_job( - _fetch_migrated_states - ) - - migration_changes = await instance.async_add_executor_job( - _get_migration_id, hass - ) - - # Check the index which will be removed by the migrator no longer exists - with session_scope(hass=hass) as session: - assert ( - get_index_by_name(session, "states", "ix_states_context_id") - is None - ) - - await hass.async_stop() - await hass.async_block_till_done() + states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"] assert old_uuid_context_id["context_id"] is None @@ -712,126 +524,19 @@ async def test_migrate_states_context_ids( == b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee" ) + migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) assert ( migration_changes[migration.StatesContextIDMigration.migration_id] == migration.StatesContextIDMigration.migration_version ) -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage -async def test_finish_migrate_states_context_ids( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Test we re migrate old uuid context ids and ulid context ids to binary format. - - Before PR https://github.com/home-assistant/core/pull/125214, the migrator would - mark the migration as done before ensuring unused indices were dropped. This - test makes sure we drop the unused indices. - """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] - - def _insert_migration(): - with session_scope(hass=hass) as session: - session.merge( - MigrationChanges( - migration_id=migration.StatesContextIDMigration.migration_id, - version=1, - ) - ) - - # Create database with old schema - with ( - patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration.StatesContextIDMigration, "migrate_data"), - patch.object( - migration.EventIDPostMigration, - "needs_migrate_impl", - return_value=migration.DataMigrationStatus( - needs_migrate=False, migration_done=True - ), - ), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - - await hass.async_block_till_done() - await async_wait_recording_done(hass) - - # Check the index which will be removed by the migrator exists - with session_scope(hass=hass) as session: - assert get_index_by_name(session, "states", "ix_states_context_id") - - await hass.async_stop() - await hass.async_block_till_done() - - # Run once with new schema, fake migration did not complete - with ( - patch.object(migration.StatesContextIDMigration, "migrate_data"), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - - # Fake migration ran with old version - await instance.async_add_executor_job(_insert_migration) - await async_wait_recording_done(hass) - - # Check the index which will be removed by the migrator exists - with session_scope(hass=hass) as session: - assert get_index_by_name(session, "states", "ix_states_context_id") - - await hass.async_stop() - await hass.async_block_till_done() - - # Run again with new schema, let migration complete - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - - migration_changes = await instance.async_add_executor_job( - _get_migration_id, hass - ) - # Check migration ran again - assert ( - migration_changes[migration.StatesContextIDMigration.migration_id] - == migration.StatesContextIDMigration.migration_version - ) - - # Check the index which will be removed by the migrator no longer exists - with session_scope(hass=hass) as session: - assert get_index_by_name(session, "states", "ix_states_context_id") is None - - await hass.async_stop() - await hass.async_block_till_done() - - @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") async def test_migrate_event_type_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test we can migrate event_types to the EventTypes table.""" + instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -858,13 +563,11 @@ async def test_migrate_event_type_ids( ) ) - await recorder_mock.async_add_executor_job(_insert_events) + await instance.async_add_executor_job(_insert_events) await async_wait_recording_done(hass) # This is a threadsafe way to add a task to the recorder - migrator = migration.EventTypeIDMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) + instance.queue_task(EventTypeIDMigrationTask()) await _async_wait_migration_done(hass) def _fetch_migrated_events(): @@ -896,23 +599,21 @@ async def test_migrate_event_type_ids( ) return result - events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) + events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) assert len(events_by_type["event_type_one"]) == 2 assert len(events_by_type["event_type_two"]) == 1 def _get_many(): with session_scope(hass=hass, read_only=True) as session: - return recorder_mock.event_type_manager.get_many( + return instance.event_type_manager.get_many( ("event_type_one", "event_type_two"), session ) - mapped = await recorder_mock.async_add_executor_job(_get_many) + mapped = await instance.async_add_executor_job(_get_many) assert mapped["event_type_one"] is not None assert mapped["event_type_two"] is not None - migration_changes = await recorder_mock.async_add_executor_job( - _get_migration_id, hass - ) + migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) assert ( migration_changes[migration.EventTypeIDMigration.migration_id] == migration.EventTypeIDMigration.migration_version @@ -920,9 +621,11 @@ async def test_migrate_event_type_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") -async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) -> None: +async def test_migrate_entity_ids( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" + instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -949,13 +652,11 @@ async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) ) ) - await recorder_mock.async_add_executor_job(_insert_states) + await instance.async_add_executor_job(_insert_states) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - migrator = migration.EntityIDMigration(old_db_schema.SCHEMA_VERSION, {}) - recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) - await _async_wait_migration_done(hass) + instance.queue_task(EntityIDMigrationTask()) await _async_wait_migration_done(hass) def _fetch_migrated_states(): @@ -982,15 +683,11 @@ async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) ) return result - states_by_entity_id = await recorder_mock.async_add_executor_job( - _fetch_migrated_states - ) + states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) assert len(states_by_entity_id["sensor.two"]) == 2 assert len(states_by_entity_id["sensor.one"]) == 1 - migration_changes = await recorder_mock.async_add_executor_job( - _get_migration_id, hass - ) + migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) assert ( migration_changes[migration.EntityIDMigration.migration_id] == migration.EntityIDMigration.migration_version @@ -998,11 +695,11 @@ async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") async def test_post_migrate_entity_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" + instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -1029,13 +726,11 @@ async def test_post_migrate_entity_ids( ) ) - await recorder_mock.async_add_executor_job(_insert_events) + await instance.async_add_executor_job(_insert_events) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - migrator = migration.EntityIDPostMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) + instance.queue_task(EntityIDPostMigrationTask()) await _async_wait_migration_done(hass) def _fetch_migrated_states(): @@ -1047,18 +742,18 @@ async def test_post_migrate_entity_ids( assert len(states) == 3 return {state.state: state.entity_id for state in states} - states_by_state = await recorder_mock.async_add_executor_job(_fetch_migrated_states) + states_by_state = await instance.async_add_executor_job(_fetch_migrated_states) assert states_by_state["one_1"] is None assert states_by_state["two_2"] is None assert states_by_state["two_1"] is None @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") async def test_migrate_null_entity_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" + instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -1088,13 +783,11 @@ async def test_migrate_null_entity_ids( ), ) - await recorder_mock.async_add_executor_job(_insert_states) + await instance.async_add_executor_job(_insert_states) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - migrator = migration.EntityIDMigration(old_db_schema.SCHEMA_VERSION, {}) - recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) - await _async_wait_migration_done(hass) + instance.queue_task(EntityIDMigrationTask()) await _async_wait_migration_done(hass) def _fetch_migrated_states(): @@ -1121,9 +814,7 @@ async def test_migrate_null_entity_ids( ) return result - states_by_entity_id = await recorder_mock.async_add_executor_job( - _fetch_migrated_states - ) + states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000 assert len(states_by_entity_id["sensor.one"]) == 2 @@ -1131,7 +822,7 @@ async def test_migrate_null_entity_ids( with session_scope(hass=hass, read_only=True) as session: return dict(execute_stmt_lambda_element(session, get_migration_changes())) - migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id) + migration_changes = await instance.async_add_executor_job(_get_migration_id) assert ( migration_changes[migration.EntityIDMigration.migration_id] == migration.EntityIDMigration.migration_version @@ -1139,11 +830,11 @@ async def test_migrate_null_entity_ids( @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") async def test_migrate_null_event_type_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test we can migrate event_types to the EventTypes table when the event_type is NULL.""" + instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -1173,13 +864,11 @@ async def test_migrate_null_event_type_ids( ), ) - await recorder_mock.async_add_executor_job(_insert_events) + await instance.async_add_executor_job(_insert_events) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - migrator = migration.EventTypeIDMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) + instance.queue_task(EventTypeIDMigrationTask()) await _async_wait_migration_done(hass) def _fetch_migrated_events(): @@ -1211,7 +900,7 @@ async def test_migrate_null_event_type_ids( ) return result - events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) + events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) assert len(events_by_type["event_type_one"]) == 2 assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000 @@ -1219,18 +908,19 @@ async def test_migrate_null_event_type_ids( with session_scope(hass=hass, read_only=True) as session: return dict(execute_stmt_lambda_element(session, get_migration_changes())) - migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id) + migration_changes = await instance.async_add_executor_job(_get_migration_id) assert ( migration_changes[migration.EventTypeIDMigration.migration_id] == migration.EventTypeIDMigration.migration_version ) -@pytest.mark.usefixtures("db_schema_32") async def test_stats_timestamp_conversion_is_reentrant( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, ) -> None: """Test stats migration is reentrant.""" + instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) await async_attach_db_engine(hass) importlib.import_module(SCHEMA_MODULE) @@ -1242,7 +932,7 @@ async def test_stats_timestamp_conversion_is_reentrant( def _do_migration(): migration._migrate_statistics_columns_to_timestamp_removing_duplicates( - hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine + hass, instance, instance.get_session, instance.engine ) def _insert_fake_metadata(): @@ -1259,7 +949,7 @@ async def test_stats_timestamp_conversion_is_reentrant( ) ) - def _insert_pre_timestamp_stat(date_time: datetime.datetime) -> None: + def _insert_pre_timestamp_stat(date_time: datetime) -> None: with session_scope(hass=hass) as session: session.add( old_db_schema.StatisticsShortTerm( @@ -1274,7 +964,7 @@ async def test_stats_timestamp_conversion_is_reentrant( ) ) - def _insert_post_timestamp_stat(date_time: datetime.datetime) -> None: + def _insert_post_timestamp_stat(date_time: datetime) -> None: with session_scope(hass=hass) as session: session.add( db_schema.StatisticsShortTerm( @@ -1379,11 +1069,12 @@ async def test_stats_timestamp_conversion_is_reentrant( ] -@pytest.mark.usefixtures("db_schema_32") async def test_stats_timestamp_with_one_by_one( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, ) -> None: """Test stats migration with one by one.""" + instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) await async_attach_db_engine(hass) importlib.import_module(SCHEMA_MODULE) @@ -1400,7 +1091,7 @@ async def test_stats_timestamp_with_one_by_one( side_effect=IntegrityError("test", "test", "test"), ): migration._migrate_statistics_columns_to_timestamp_removing_duplicates( - hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine + hass, instance, instance.get_session, instance.engine ) def _insert_fake_metadata(): @@ -1417,7 +1108,7 @@ async def test_stats_timestamp_with_one_by_one( ) ) - def _insert_pre_timestamp_stat(date_time: datetime.datetime) -> None: + def _insert_pre_timestamp_stat(date_time: datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( @@ -1444,7 +1135,7 @@ async def test_stats_timestamp_with_one_by_one( ) ) - def _insert_post_timestamp_stat(date_time: datetime.datetime) -> None: + def _insert_post_timestamp_stat(date_time: datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( @@ -1599,11 +1290,12 @@ async def test_stats_timestamp_with_one_by_one( ] -@pytest.mark.usefixtures("db_schema_32") async def test_stats_timestamp_with_one_by_one_removes_duplicates( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, ) -> None: """Test stats migration with one by one removes duplicates.""" + instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) await async_attach_db_engine(hass) importlib.import_module(SCHEMA_MODULE) @@ -1627,7 +1319,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( ), ): migration._migrate_statistics_columns_to_timestamp_removing_duplicates( - hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine + hass, instance, instance.get_session, instance.engine ) def _insert_fake_metadata(): @@ -1644,7 +1336,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( ) ) - def _insert_pre_timestamp_stat(date_time: datetime.datetime) -> None: + def _insert_pre_timestamp_stat(date_time: datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( @@ -1671,7 +1363,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( ) ) - def _insert_post_timestamp_stat(date_time: datetime.datetime) -> None: + def _insert_post_timestamp_stat(date_time: datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( @@ -1794,159 +1486,3 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "sum": None, }, ] - - -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage -async def test_migrate_times( - async_test_recorder: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test we can migrate times in the statistics tables.""" - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] - now = dt_util.utcnow() - now_timestamp = now.timestamp() - - statistics_kwargs = { - "created": now, - "mean": 0, - "metadata_id": 1, - "min": 0, - "max": 0, - "last_reset": now, - "start": now, - "state": 0, - "sum": 0, - } - mock_metadata = old_db_schema.StatisticMetaData( - has_mean=False, - has_sum=False, - name="Test", - source="sensor", - statistic_id="sensor.test", - unit_of_measurement="cats", - ) - number_of_migrations = 5 - - def _get_index_names(table): - with session_scope(hass=hass) as session: - return inspect(session.connection()).get_indexes(table) - - with ( - patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration, "non_live_data_migration_needed", return_value=False), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - - def _add_data(): - with session_scope(hass=hass) as session: - session.add(old_db_schema.StatisticsMeta.from_meta(mock_metadata)) - with session_scope(hass=hass) as session: - session.add(old_db_schema.Statistics(**statistics_kwargs)) - session.add(old_db_schema.StatisticsShortTerm(**statistics_kwargs)) - - await instance.async_add_executor_job(_add_data) - await hass.async_block_till_done() - await instance.async_block_till_done() - - statistics_indexes = await instance.async_add_executor_job( - _get_index_names, "statistics" - ) - statistics_short_term_indexes = await instance.async_add_executor_job( - _get_index_names, "statistics_short_term" - ) - statistics_index_names = {index["name"] for index in statistics_indexes} - statistics_short_term_index_names = { - index["name"] for index in statistics_short_term_indexes - } - - await hass.async_stop() - await hass.async_block_till_done() - - assert "ix_statistics_statistic_id_start" in statistics_index_names - assert ( - "ix_statistics_short_term_statistic_id_start" - in statistics_short_term_index_names - ) - - # Test that the times are migrated during migration from schema 32 - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await hass.async_block_till_done() - - # We need to wait for all the migration tasks to complete - # before we can check the database. - for _ in range(number_of_migrations): - await instance.async_block_till_done() - await async_wait_recording_done(hass) - - def _get_test_data_from_db(): - with session_scope(hass=hass) as session: - statistics_result = list( - session.query(recorder.db_schema.Statistics) - .join( - recorder.db_schema.StatisticsMeta, - recorder.db_schema.Statistics.metadata_id - == recorder.db_schema.StatisticsMeta.id, - ) - .where( - recorder.db_schema.StatisticsMeta.statistic_id == "sensor.test" - ) - ) - statistics_short_term_result = list( - session.query(recorder.db_schema.StatisticsShortTerm) - .join( - recorder.db_schema.StatisticsMeta, - recorder.db_schema.StatisticsShortTerm.metadata_id - == recorder.db_schema.StatisticsMeta.id, - ) - .where( - recorder.db_schema.StatisticsMeta.statistic_id == "sensor.test" - ) - ) - session.expunge_all() - return statistics_result, statistics_short_term_result - - ( - statistics_result, - statistics_short_term_result, - ) = await instance.async_add_executor_job(_get_test_data_from_db) - - for results in (statistics_result, statistics_short_term_result): - assert len(results) == 1 - assert results[0].created is None - assert results[0].created_ts == now_timestamp - assert results[0].last_reset is None - assert results[0].last_reset_ts == now_timestamp - assert results[0].start is None - assert results[0].start_ts == now_timestamp - - statistics_indexes = await instance.async_add_executor_job( - _get_index_names, "statistics" - ) - statistics_short_term_indexes = await instance.async_add_executor_job( - _get_index_names, "statistics_short_term" - ) - statistics_index_names = {index["name"] for index in statistics_indexes} - statistics_short_term_index_names = { - index["name"] for index in statistics_short_term_indexes - } - - assert "ix_statistics_statistic_id_start" not in statistics_index_names - assert ( - "ix_statistics_short_term_statistic_id_start" - not in statistics_short_term_index_names - ) - - await hass.async_stop() diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index 93fa16b8364..4f59edb097f 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -1,6 +1,7 @@ """Test run time migrations are remembered in the migration_changes table.""" import importlib +from pathlib import Path import sys from unittest.mock import patch @@ -10,8 +11,8 @@ from sqlalchemy.orm import Session from homeassistant.components import recorder from homeassistant.components.recorder import core, migration, statistics -from homeassistant.components.recorder.migration import MigrationTask from homeassistant.components.recorder.queries import get_migration_changes +from homeassistant.components.recorder.tasks import StatesContextIDMigrationTask from homeassistant.components.recorder.util import ( execute_stmt_lambda_element, session_scope, @@ -19,11 +20,7 @@ from homeassistant.components.recorder.util import ( from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant -from .common import ( - MockMigrationTask, - async_recorder_block_till_done, - async_wait_recording_done, -) +from .common import async_recorder_block_till_done, async_wait_recording_done from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator @@ -32,13 +29,6 @@ CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE = "tests.components.recorder.db_schema_32" -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - async def _async_wait_migration_done(hass: HomeAssistant) -> None: """Wait for the migration to be done.""" await recorder.get_instance(hass).async_block_till_done() @@ -72,11 +62,11 @@ def _create_engine_test(*args, **kwargs): return engine -@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +@pytest.mark.parametrize("enable_migrate_context_ids", [True]) async def test_migration_changes_prevent_trying_to_migrate_again( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, + tmp_path: Path, + recorder_db_url: str, ) -> None: """Test that we do not try to migrate when migration_changes indicate its already migrated. @@ -86,29 +76,35 @@ async def test_migration_changes_prevent_trying_to_migrate_again( 2. With current schema so the migration happens 3. With current schema to verify we do not have to query to see if the migration is done """ + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test uses a test database between runs so its + # SQLite specific + return - config = {recorder.CONF_COMMIT_INTERVAL: 1} + config = { + recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db"), + recorder.CONF_COMMIT_INTERVAL: 1, + } importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] # Start with db schema that needs migration (version 32) with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration, "non_live_data_migration_needed", return_value=False), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", MockMigrationTask), + patch.object(migration.EntityIDMigration, "task", core.RecorderTask), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass, config), - ): + async with async_test_home_assistant() as hass: + await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() await async_wait_recording_done(hass) await _async_wait_migration_done(hass) @@ -117,7 +113,8 @@ async def test_migration_changes_prevent_trying_to_migrate_again( await hass.async_stop() # Now start again with current db schema - async with async_test_home_assistant() as hass, async_test_recorder(hass, config): + async with async_test_home_assistant() as hass: + await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() await async_wait_recording_done(hass) await _async_wait_migration_done(hass) @@ -141,21 +138,19 @@ async def test_migration_changes_prevent_trying_to_migrate_again( original_queue_task(self, task) # Finally verify we did not call needs_migrate_query on StatesContextIDMigration - with ( - patch( - "homeassistant.components.recorder.core.Recorder.queue_task", - _queue_task, - ), - patch.object( - migration.StatesContextIDMigration, - "needs_migrate_query", - side_effect=RuntimeError("Should not be called"), - ), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass, config), + async with async_test_home_assistant() as hass: + with ( + patch( + "homeassistant.components.recorder.core.Recorder.queue_task", + _queue_task, + ), + patch.object( + migration.StatesContextIDMigration, + "needs_migrate_query", + side_effect=RuntimeError("Should not be called"), + ), ): + await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() await async_wait_recording_done(hass) await _async_wait_migration_done(hass) @@ -172,6 +167,4 @@ async def test_migration_changes_prevent_trying_to_migrate_again( await hass.async_stop() for task in tasks: - if not isinstance(task, MigrationTask): - continue - assert not isinstance(task.migrator, migration.StatesContextIDMigration) + assert not isinstance(task, StatesContextIDMigrationTask) diff --git a/tests/components/recorder/test_models.py b/tests/components/recorder/test_models.py index 9078b2e861c..d06c4a629d7 100644 --- a/tests/components/recorder/test_models.py +++ b/tests/components/recorder/test_models.py @@ -3,6 +3,7 @@ from datetime import datetime, timedelta from unittest.mock import PropertyMock +from freezegun import freeze_time import pytest from homeassistant.components.recorder.const import SupportedDialect @@ -14,14 +15,17 @@ from homeassistant.components.recorder.db_schema import ( ) from homeassistant.components.recorder.models import ( LazyState, + bytes_to_ulid_or_none, + process_datetime_to_timestamp, process_timestamp, process_timestamp_to_utc_isoformat, + ulid_to_bytes_or_none, ) from homeassistant.const import EVENT_STATE_CHANGED import homeassistant.core as ha +from homeassistant.core import HomeAssistant from homeassistant.exceptions import InvalidEntityFormatError from homeassistant.util import dt as dt_util -from homeassistant.util.json import json_loads def test_from_event_to_db_event() -> None: @@ -42,18 +46,6 @@ def test_from_event_to_db_event() -> None: assert event.as_dict() == db_event.to_native().as_dict() -def test_from_event_to_db_event_with_null() -> None: - """Test converting event to EventData with a null with PostgreSQL.""" - event = ha.Event( - "test_event", - {"some_data": "withnull\0terminator"}, - ) - dialect = SupportedDialect.POSTGRESQL - event_data = EventData.shared_data_bytes_from_event(event, dialect) - decoded = json_loads(event_data) - assert decoded["some_data"] == "withnull" - - def test_from_event_to_db_state() -> None: """Test converting event to db state.""" state = ha.State( @@ -91,21 +83,6 @@ def test_from_event_to_db_state_attributes() -> None: assert db_attrs.to_native() == attrs -def test_from_event_to_db_state_attributes_with_null() -> None: - """Test converting a state to StateAttributes with a null with PostgreSQL.""" - attrs = {"this_attr": "withnull\0terminator"} - state = ha.State("sensor.temperature", "18", attrs) - event = ha.Event( - EVENT_STATE_CHANGED, - {"entity_id": "sensor.temperature", "old_state": None, "new_state": state}, - context=state.context, - ) - dialect = SupportedDialect.POSTGRESQL - shared_attrs = StateAttributes.shared_attrs_bytes_from_event(event, dialect) - decoded = json_loads(shared_attrs) - assert decoded["this_attr"] == "withnull" - - def test_repr() -> None: """Test converting event to db state repr.""" attrs = {"this_attr": True} @@ -379,3 +356,99 @@ async def test_lazy_state_handles_same_last_updated_and_last_changed( "last_updated": "2021-06-12T03:04:01.000323+00:00", "state": "off", } + + +@pytest.mark.parametrize( + "time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii", "UTC"] +) +async def test_process_datetime_to_timestamp(time_zone, hass: HomeAssistant) -> None: + """Test we can handle processing database datatimes to timestamps.""" + await hass.config.async_set_time_zone(time_zone) + utc_now = dt_util.utcnow() + assert process_datetime_to_timestamp(utc_now) == utc_now.timestamp() + now = dt_util.now() + assert process_datetime_to_timestamp(now) == now.timestamp() + + +@pytest.mark.parametrize( + "time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii", "UTC"] +) +async def test_process_datetime_to_timestamp_freeze_time( + time_zone, hass: HomeAssistant +) -> None: + """Test we can handle processing database datatimes to timestamps. + + This test freezes time to make sure everything matches. + """ + await hass.config.async_set_time_zone(time_zone) + utc_now = dt_util.utcnow() + with freeze_time(utc_now): + epoch = utc_now.timestamp() + assert process_datetime_to_timestamp(dt_util.utcnow()) == epoch + now = dt_util.now() + assert process_datetime_to_timestamp(now) == epoch + + +@pytest.mark.parametrize( + "time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii", "UTC"] +) +async def test_process_datetime_to_timestamp_mirrors_utc_isoformat_behavior( + time_zone, hass: HomeAssistant +) -> None: + """Test process_datetime_to_timestamp mirrors process_timestamp_to_utc_isoformat.""" + await hass.config.async_set_time_zone(time_zone) + datetime_with_tzinfo = datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt_util.UTC) + datetime_without_tzinfo = datetime(2016, 7, 9, 11, 0, 0) + est = dt_util.get_time_zone("US/Eastern") + datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est) + est = dt_util.get_time_zone("US/Eastern") + datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est) + nst = dt_util.get_time_zone("Canada/Newfoundland") + datetime_nst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=nst) + hst = dt_util.get_time_zone("US/Hawaii") + datetime_hst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=hst) + + assert ( + process_datetime_to_timestamp(datetime_with_tzinfo) + == dt_util.parse_datetime("2016-07-09T11:00:00+00:00").timestamp() + ) + assert ( + process_datetime_to_timestamp(datetime_without_tzinfo) + == dt_util.parse_datetime("2016-07-09T11:00:00+00:00").timestamp() + ) + assert ( + process_datetime_to_timestamp(datetime_est_timezone) + == dt_util.parse_datetime("2016-07-09T15:00:00+00:00").timestamp() + ) + assert ( + process_datetime_to_timestamp(datetime_nst_timezone) + == dt_util.parse_datetime("2016-07-09T13:30:00+00:00").timestamp() + ) + assert ( + process_datetime_to_timestamp(datetime_hst_timezone) + == dt_util.parse_datetime("2016-07-09T21:00:00+00:00").timestamp() + ) + + +def test_ulid_to_bytes_or_none(caplog: pytest.LogCaptureFixture) -> None: + """Test ulid_to_bytes_or_none.""" + + assert ( + ulid_to_bytes_or_none("01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1") + == b"\x01w\xaf\xf9w\xe5\xf8~\x1f\x87\xe1\xf8~\x1f\x87\xe1" + ) + assert ulid_to_bytes_or_none("invalid") is None + assert "invalid" in caplog.text + assert ulid_to_bytes_or_none(None) is None + + +def test_bytes_to_ulid_or_none(caplog: pytest.LogCaptureFixture) -> None: + """Test bytes_to_ulid_or_none.""" + + assert ( + bytes_to_ulid_or_none(b"\x01w\xaf\xf9w\xe5\xf8~\x1f\x87\xe1\xf8~\x1f\x87\xe1") + == "01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1" + ) + assert bytes_to_ulid_or_none(b"invalid") is None + assert "invalid" in caplog.text + assert bytes_to_ulid_or_none(None) is None diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 245acf4603d..1ccbaada265 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -1,6 +1,5 @@ """Test data purging.""" -from collections.abc import Generator from datetime import datetime, timedelta import json import sqlite3 @@ -10,9 +9,10 @@ from freezegun import freeze_time import pytest from sqlalchemy.exc import DatabaseError, OperationalError from sqlalchemy.orm.session import Session +from typing_extensions import Generator from voluptuous.error import MultipleInvalid -from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, Recorder +from homeassistant.components import recorder from homeassistant.components.recorder.const import SupportedDialect from homeassistant.components.recorder.db_schema import ( Events, @@ -35,6 +35,7 @@ from homeassistant.components.recorder.tasks import PurgeTask from homeassistant.components.recorder.util import session_scope from homeassistant.const import EVENT_STATE_CHANGED, EVENT_THEMES_UPDATED, STATE_ON from homeassistant.core import HomeAssistant +from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util from .common import ( @@ -57,13 +58,6 @@ TEST_EVENT_TYPES = ( ) -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - @pytest.fixture(name="use_sqlite") def mock_use_sqlite(request: pytest.FixtureRequest) -> Generator[None]: """Pytest fixture to switch purge method.""" @@ -76,42 +70,47 @@ def mock_use_sqlite(request: pytest.FixtureRequest) -> Generator[None]: yield -async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder) -> None: +async def test_purge_big_database( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: """Test deleting 2/3 old states from a big database.""" + + instance = await async_setup_recorder_instance(hass) + for _ in range(12): await _add_test_states(hass, wait_recording_done=False) await async_wait_recording_done(hass) with ( - patch.object(recorder_mock, "max_bind_vars", 72), - patch.object(recorder_mock.database_engine, "max_bind_vars", 72), + patch.object(instance, "max_bind_vars", 72), + patch.object(instance.database_engine, "max_bind_vars", 72), + session_scope(hass=hass) as session, ): - with session_scope(hass=hass) as session: - states = session.query(States) - state_attributes = session.query(StateAttributes) - assert states.count() == 72 - assert state_attributes.count() == 3 + states = session.query(States) + state_attributes = session.query(StateAttributes) + assert states.count() == 72 + assert state_attributes.count() == 3 purge_before = dt_util.utcnow() - timedelta(days=4) finished = purge_old_data( - recorder_mock, + instance, purge_before, states_batch_size=1, events_batch_size=1, repack=False, ) assert not finished - - with session_scope(hass=hass) as session: - states = session.query(States) - state_attributes = session.query(StateAttributes) - assert states.count() == 24 - assert state_attributes.count() == 1 + assert states.count() == 24 + assert state_attributes.count() == 1 -async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: +async def test_purge_old_states( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: """Test deleting old states.""" + instance = await async_setup_recorder_instance(hass) + await _add_test_states(hass) # make sure we start with 6 states @@ -126,30 +125,24 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 + assert "test.recorder2" in instance.states_manager._last_committed_id - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id + purge_before = dt_util.utcnow() - timedelta(days=4) - purge_before = dt_util.utcnow() - timedelta(days=4) - - # run purge_old_data() - finished = purge_old_data( - recorder_mock, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished - - with session_scope(hass=hass) as session: - states = session.query(States) - state_attributes = session.query(StateAttributes) + # run purge_old_data() + finished = purge_old_data( + instance, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id + assert "test.recorder2" in instance.states_manager._last_committed_id - with session_scope(hass=hass) as session: states_after_purge = list(session.query(States)) # Since these states are deleted in batches, we can't guarantee the order # but we can look them up by state @@ -160,33 +153,27 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> assert dontpurgeme_5.old_state_id == dontpurgeme_4.state_id assert dontpurgeme_4.old_state_id is None - finished = purge_old_data(recorder_mock, purge_before, repack=False) - assert finished - - with session_scope(hass=hass) as session: - states = session.query(States) - state_attributes = session.query(StateAttributes) + finished = purge_old_data(instance, purge_before, repack=False) + assert finished assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id + assert "test.recorder2" in instance.states_manager._last_committed_id - # run purge_old_data again - purge_before = dt_util.utcnow() - finished = purge_old_data( - recorder_mock, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished - - with session_scope(hass=hass) as session: + # run purge_old_data again + purge_before = dt_util.utcnow() + finished = purge_old_data( + instance, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished assert states.count() == 0 assert state_attributes.count() == 0 - assert "test.recorder2" not in recorder_mock.states_manager._last_committed_id + assert "test.recorder2" not in instance.states_manager._last_committed_id # Add some more states await _add_test_states(hass) @@ -200,27 +187,30 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id + assert "test.recorder2" in instance.states_manager._last_committed_id state_attributes = session.query(StateAttributes) assert state_attributes.count() == 3 -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("recorder_mock", "skip_by_db_engine") async def test_purge_old_states_encouters_database_corruption( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_db_url: str, ) -> None: - """Test database image image is malformed while deleting old states. + """Test database image image is malformed while deleting old states.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test is specific for SQLite, wiping the database on error only happens + # with SQLite. + return + + await async_setup_recorder_instance(hass) - This test is specific for SQLite, wiping the database on error only happens - with SQLite. - """ await _add_test_states(hass) await async_wait_recording_done(hass) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError("not a database") + sqlite3_exception.__cause__ = sqlite3.DatabaseError() with ( patch( @@ -231,7 +221,7 @@ async def test_purge_old_states_encouters_database_corruption( side_effect=sqlite3_exception, ), ): - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) @@ -244,11 +234,13 @@ async def test_purge_old_states_encouters_database_corruption( async def test_purge_old_states_encounters_temporary_mysql_error( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test retry on specific mysql operational errors.""" + instance = await async_setup_recorder_instance(hass) + await _add_test_states(hass) await async_wait_recording_done(hass) @@ -261,9 +253,9 @@ async def test_purge_old_states_encounters_temporary_mysql_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=[mysql_exception, None], ), - patch.object(recorder_mock.engine.dialect, "name", "mysql"), + patch.object(instance.engine.dialect, "name", "mysql"), ): - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -272,12 +264,14 @@ async def test_purge_old_states_encounters_temporary_mysql_error( assert sleep_mock.called -@pytest.mark.usefixtures("recorder_mock") async def test_purge_old_states_encounters_operational_error( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: """Test error on operational errors that are not mysql does not retry.""" + await async_setup_recorder_instance(hass) + await _add_test_states(hass) await async_wait_recording_done(hass) @@ -287,7 +281,7 @@ async def test_purge_old_states_encounters_operational_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=exception, ): - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -296,8 +290,12 @@ async def test_purge_old_states_encounters_operational_error( assert "Error executing purge" in caplog.text -async def test_purge_old_events(hass: HomeAssistant, recorder_mock: Recorder) -> None: +async def test_purge_old_events( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: """Test deleting old events.""" + instance = await async_setup_recorder_instance(hass) + await _add_test_events(hass) with session_scope(hass=hass) as session: @@ -306,46 +304,38 @@ async def test_purge_old_events(hass: HomeAssistant, recorder_mock: Recorder) -> ) assert events.count() == 6 - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert not finished - - with session_scope(hass=hass) as session: - events = session.query(Events).filter( - Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) + # run purge_old_data() + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, ) + assert not finished all_events = events.all() assert events.count() == 2, f"Should have 2 events left: {all_events}" - # we should only have 2 events left - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert finished - - with session_scope(hass=hass) as session: - events = session.query(Events).filter( - Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) + # we should only have 2 events left + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, ) + assert finished assert events.count() == 2 async def test_purge_old_recorder_runs( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test deleting old recorder runs keeps current run.""" + instance = await async_setup_recorder_instance(hass) + await _add_test_recorder_runs(hass) # make sure we start with 7 recorder runs @@ -353,36 +343,35 @@ async def test_purge_old_recorder_runs( recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert finished - - with session_scope(hass=hass) as session: - recorder_runs = session.query(RecorderRuns) + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished assert recorder_runs.count() == 1 async def test_purge_old_statistics_runs( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test deleting old statistics runs keeps the latest run.""" + instance = await async_setup_recorder_instance(hass) + await _add_test_statistics_runs(hass) # make sure we start with 7 statistics runs @@ -390,23 +379,20 @@ async def test_purge_old_statistics_runs( statistics_runs = session.query(StatisticsRuns) assert statistics_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data(recorder_mock, purge_before, repack=False) - assert not finished + # run purge_old_data() + finished = purge_old_data(instance, purge_before, repack=False) + assert not finished - finished = purge_old_data(recorder_mock, purge_before, repack=False) - assert finished - - with session_scope(hass=hass) as session: - statistics_runs = session.query(StatisticsRuns) + finished = purge_old_data(instance, purge_before, repack=False) + assert finished assert statistics_runs.count() == 1 @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) -@pytest.mark.usefixtures("recorder_mock") async def test_purge_method( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, use_sqlite: bool, @@ -424,6 +410,8 @@ async def test_purge_method( assert run1.run_id == run2.run_id assert run1.start == run2.start + await async_setup_recorder_instance(hass) + service_data = {"keep_days": 4} await _add_test_events(hass) await _add_test_states(hass) @@ -529,8 +517,8 @@ async def test_purge_method( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) async def test_purge_edge_case( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test states and events are purged even if they occurred shortly before purge_before.""" @@ -564,9 +552,11 @@ async def test_purge_edge_case( attributes_id=1002, ) ) - convert_pending_events_to_event_types(recorder_mock, session) - convert_pending_states_to_meta(recorder_mock, session) + instance = recorder.get_instance(hass) + convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(instance, session) + await async_setup_recorder_instance(hass, None) await async_wait_purge_done(hass) service_data = {"keep_days": 2} @@ -585,7 +575,7 @@ async def test_purge_edge_case( ) assert events.count() == 1 - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -600,7 +590,10 @@ async def test_purge_edge_case( assert events.count() == 0 -async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) -> None: +async def test_purge_cutoff_date( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, +) -> None: """Test states and events are purged only if they occurred before "now() - keep_days".""" async def _add_db_entries(hass: HomeAssistant, cutoff: datetime, rows: int) -> None: @@ -663,9 +656,10 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - attributes_id=1000 + row, ) ) - convert_pending_events_to_event_types(recorder_mock, session) - convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(instance, session) + instance = await async_setup_recorder_instance(hass, None) await async_wait_purge_done(hass) service_data = {"keep_days": 2} @@ -701,7 +695,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - == 1 ) - recorder_mock.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) + instance.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) await hass.async_block_till_done() await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -742,9 +736,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - ) # Make sure we can purge everything - recorder_mock.queue_task( - PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) - ) + instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -755,9 +747,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - assert state_attributes.count() == 0 # Make sure we can purge everything when the db is already empty - recorder_mock.queue_task( - PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) - ) + instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -769,16 +759,15 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) -@pytest.mark.parametrize( - "recorder_config", [{"exclude": {"entities": ["sensor.excluded"]}}] -) async def test_purge_filtered_states( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test filtered states are purged.""" - assert recorder_mock.entity_filter("sensor.excluded") is False + config: ConfigType = {"exclude": {"entities": ["sensor.excluded"]}} + instance = await async_setup_recorder_instance(hass, config) + assert instance.entity_filter("sensor.excluded") is False def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -861,8 +850,8 @@ async def test_purge_filtered_states( time_fired_ts=dt_util.utc_to_timestamp(timestamp), ) ) - convert_pending_states_to_meta(recorder_mock, session) - convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(instance, session) service_data = {"keep_days": 10} _add_db_entries(hass) @@ -876,7 +865,7 @@ async def test_purge_filtered_states( assert events_keep.count() == 1 # Normal purge doesn't remove excluded entities - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -892,7 +881,7 @@ async def test_purge_filtered_states( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -940,7 +929,7 @@ async def test_purge_filtered_states( assert session.query(StateAttributes).count() == 11 # Do it again to make sure nothing changes - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -952,7 +941,7 @@ async def test_purge_filtered_states( assert session.query(StateAttributes).count() == 11 service_data = {"keep_days": 0} - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -965,16 +954,15 @@ async def test_purge_filtered_states( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) -@pytest.mark.parametrize( - "recorder_config", [{"exclude": {"entities": ["sensor.excluded"]}}] -) async def test_purge_filtered_states_to_empty( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test filtered states are purged all the way to an empty db.""" - assert recorder_mock.entity_filter("sensor.excluded") is False + config: ConfigType = {"exclude": {"entities": ["sensor.excluded"]}} + instance = await async_setup_recorder_instance(hass, config) + assert instance.entity_filter("sensor.excluded") is False def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -989,7 +977,7 @@ async def test_purge_filtered_states_to_empty( timestamp, event_id * days, ) - convert_pending_states_to_meta(recorder_mock, session) + convert_pending_states_to_meta(instance, session) service_data = {"keep_days": 10} _add_db_entries(hass) @@ -1002,7 +990,7 @@ async def test_purge_filtered_states_to_empty( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -1014,22 +1002,21 @@ async def test_purge_filtered_states_to_empty( # Do it again to make sure nothing changes # Why do we do this? Should we check the end result? - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) -@pytest.mark.parametrize( - "recorder_config", [{"exclude": {"entities": ["sensor.old_format"]}}] -) async def test_purge_without_state_attributes_filtered_states_to_empty( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test filtered legacy states without state attributes are purged all the way to an empty db.""" - assert recorder_mock.entity_filter("sensor.old_format") is False + config: ConfigType = {"exclude": {"entities": ["sensor.old_format"]}} + instance = await async_setup_recorder_instance(hass, config) + assert instance.entity_filter("sensor.old_format") is False def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -1066,8 +1053,8 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( time_fired_ts=dt_util.utc_to_timestamp(timestamp), ) ) - convert_pending_states_to_meta(recorder_mock, session) - convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(instance, session) service_data = {"keep_days": 10} _add_db_entries(hass) @@ -1080,7 +1067,7 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -1092,18 +1079,18 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( # Do it again to make sure nothing changes # Why do we do this? Should we check the end result? - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) -@pytest.mark.parametrize( - "recorder_config", [{"exclude": {"event_types": ["EVENT_PURGE"]}}] -) async def test_purge_filtered_events( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, ) -> None: """Test filtered events are purged.""" + config: ConfigType = {"exclude": {"event_types": ["EVENT_PURGE"]}} + instance = await async_setup_recorder_instance(hass, config) await async_wait_recording_done(hass) def _add_db_entries(hass: HomeAssistant) -> None: @@ -1132,11 +1119,11 @@ async def test_purge_filtered_events( timestamp, event_id, ) - convert_pending_events_to_event_types(recorder_mock, session) - convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(instance, session) service_data = {"keep_days": 10} - await recorder_mock.async_add_executor_job(_add_db_entries, hass) + await instance.async_add_executor_job(_add_db_entries, hass) await async_wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: @@ -1148,7 +1135,7 @@ async def test_purge_filtered_events( assert states.count() == 10 # Normal purge doesn't remove excluded events - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -1164,7 +1151,7 @@ async def test_purge_filtered_events( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -1182,26 +1169,23 @@ async def test_purge_filtered_events( assert states.count() == 10 -@pytest.mark.parametrize( - "recorder_config", - [ - { - "exclude": { - "event_types": ["excluded_event"], - "entities": ["sensor.excluded", "sensor.old_format"], - } - } - ], -) async def test_purge_filtered_events_state_changed( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, ) -> None: """Test filtered state_changed events are purged. This should also remove all states.""" + config: ConfigType = { + "exclude": { + "event_types": ["excluded_event"], + "entities": ["sensor.excluded", "sensor.old_format"], + } + } + instance = await async_setup_recorder_instance(hass, config) # Assert entity_id is NOT excluded - assert recorder_mock.entity_filter("sensor.excluded") is False - assert recorder_mock.entity_filter("sensor.old_format") is False - assert recorder_mock.entity_filter("sensor.keep") is True - assert "excluded_event" in recorder_mock.exclude_event_types + assert instance.entity_filter("sensor.excluded") is False + assert instance.entity_filter("sensor.old_format") is False + assert instance.entity_filter("sensor.keep") is True + assert "excluded_event" in instance.exclude_event_types def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -1274,8 +1258,8 @@ async def test_purge_filtered_events_state_changed( last_updated_ts=dt_util.utc_to_timestamp(timestamp), ) ) - convert_pending_events_to_event_types(recorder_mock, session) - convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(instance, session) service_data = {"keep_days": 10, "apply_filter": True} _add_db_entries(hass) @@ -1293,7 +1277,7 @@ async def test_purge_filtered_events_state_changed( assert events_purge.count() == 1 assert states.count() == 64 - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() for _ in range(4): @@ -1327,12 +1311,13 @@ async def test_purge_filtered_events_state_changed( ) # should have been kept -async def test_purge_entities(hass: HomeAssistant, recorder_mock: Recorder) -> None: +async def test_purge_entities( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: """Test purging of specific entities.""" + instance = await async_setup_recorder_instance(hass) - async def _purge_entities( - hass: HomeAssistant, entity_ids: str, domains: str, entity_globs: str - ) -> None: + async def _purge_entities(hass, entity_ids, domains, entity_globs): service_data = { "entity_id": entity_ids, "domains": domains, @@ -1340,7 +1325,7 @@ async def test_purge_entities(hass: HomeAssistant, recorder_mock: Recorder) -> N } await hass.services.async_call( - RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, service_data + recorder.DOMAIN, SERVICE_PURGE_ENTITIES, service_data ) await hass.async_block_till_done() @@ -1378,8 +1363,8 @@ async def test_purge_entities(hass: HomeAssistant, recorder_mock: Recorder) -> N timestamp, event_id * days, ) - convert_pending_states_to_meta(recorder_mock, session) - convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(instance, session) def _add_keep_records(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -1393,8 +1378,8 @@ async def test_purge_entities(hass: HomeAssistant, recorder_mock: Recorder) -> N timestamp, event_id, ) - convert_pending_states_to_meta(recorder_mock, session) - convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(instance, session) _add_purge_records(hass) _add_keep_records(hass) @@ -1672,14 +1657,15 @@ def _add_state_with_state_attributes( @pytest.mark.timeout(30) async def test_purge_many_old_events( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test deleting old events.""" old_events_count = 5 + instance = await async_setup_recorder_instance(hass) with ( - patch.object(recorder_mock, "max_bind_vars", old_events_count), - patch.object(recorder_mock.database_engine, "max_bind_vars", old_events_count), + patch.object(instance, "max_bind_vars", old_events_count), + patch.object(instance.database_engine, "max_bind_vars", old_events_count), ): await _add_test_events(hass, old_events_count) @@ -1689,62 +1675,48 @@ async def test_purge_many_old_events( ) assert events.count() == old_events_count * 6 - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, - ) - assert not finished - - with session_scope(hass=hass) as session: - events = session.query(Events).filter( - Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) + # run purge_old_data() + finished = purge_old_data( + instance, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, ) + assert not finished assert events.count() == old_events_count * 3 - # we should only have 2 groups of events left - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, - ) - assert finished - - with session_scope(hass=hass) as session: - events = session.query(Events).filter( - Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) + # we should only have 2 groups of events left + finished = purge_old_data( + instance, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, ) + assert finished assert events.count() == old_events_count * 2 - # we should now purge everything - finished = purge_old_data( - recorder_mock, - dt_util.utcnow(), - repack=False, - states_batch_size=20, - events_batch_size=20, - ) - assert finished - - with session_scope(hass=hass) as session: - events = session.query(Events).filter( - Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) + # we should now purge everything + finished = purge_old_data( + instance, + dt_util.utcnow(), + repack=False, + states_batch_size=20, + events_batch_size=20, ) + assert finished assert events.count() == 0 async def test_purge_old_events_purges_the_event_type_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test deleting old events purges event type ids.""" - assert recorder_mock.event_type_manager.active is True + instance = await async_setup_recorder_instance(hass) + assert instance.event_type_manager.active is True utcnow = dt_util.utcnow() five_days_ago = utcnow - timedelta(days=5) @@ -1788,7 +1760,7 @@ async def test_purge_old_events_purges_the_event_type_ids( time_fired_ts=dt_util.utc_to_timestamp(timestamp), ) ) - return recorder_mock.event_type_manager.get_many( + return instance.event_type_manager.get_many( [ "EVENT_TEST_AUTOPURGE", "EVENT_TEST_PURGE", @@ -1798,7 +1770,7 @@ async def test_purge_old_events_purges_the_event_type_ids( session, ) - event_type_to_id = await recorder_mock.async_add_executor_job(_insert_events) + event_type_to_id = await instance.async_add_executor_job(_insert_events) test_event_type_ids = event_type_to_id.values() with session_scope(hass=hass) as session: events = session.query(Events).where( @@ -1811,70 +1783,47 @@ async def test_purge_old_events_purges_the_event_type_ids( assert events.count() == 30 assert event_types.count() == 4 - # run purge_old_data() - finished = purge_old_data( - recorder_mock, - far_past, - repack=False, - ) - assert finished - - with session_scope(hass=hass) as session: - events = session.query(Events).where( - Events.event_type_id.in_(test_event_type_ids) - ) - event_types = session.query(EventTypes).where( - EventTypes.event_type_id.in_(test_event_type_ids) + # run purge_old_data() + finished = purge_old_data( + instance, + far_past, + repack=False, ) + assert finished assert events.count() == 30 # We should remove the unused event type assert event_types.count() == 3 - assert "EVENT_TEST_UNUSED" not in recorder_mock.event_type_manager._id_map + assert "EVENT_TEST_UNUSED" not in instance.event_type_manager._id_map - # we should only have 10 events left since - # only one event type was recorded now - finished = purge_old_data( - recorder_mock, - utcnow, - repack=False, - ) - assert finished - - with session_scope(hass=hass) as session: - events = session.query(Events).where( - Events.event_type_id.in_(test_event_type_ids) - ) - event_types = session.query(EventTypes).where( - EventTypes.event_type_id.in_(test_event_type_ids) + # we should only have 10 events left since + # only one event type was recorded now + finished = purge_old_data( + instance, + utcnow, + repack=False, ) + assert finished assert events.count() == 10 assert event_types.count() == 1 - # Purge everything - finished = purge_old_data( - recorder_mock, - utcnow + timedelta(seconds=1), - repack=False, - ) - assert finished - - with session_scope(hass=hass) as session: - events = session.query(Events).where( - Events.event_type_id.in_(test_event_type_ids) - ) - event_types = session.query(EventTypes).where( - EventTypes.event_type_id.in_(test_event_type_ids) + # Purge everything + finished = purge_old_data( + instance, + utcnow + timedelta(seconds=1), + repack=False, ) + assert finished assert events.count() == 0 assert event_types.count() == 0 async def test_purge_old_states_purges_the_state_metadata_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test deleting old states purges state metadata_ids.""" - assert recorder_mock.states_meta_manager.active is True + instance = await async_setup_recorder_instance(hass) + assert instance.states_meta_manager.active is True utcnow = dt_util.utcnow() five_days_ago = utcnow - timedelta(days=5) @@ -1918,15 +1867,13 @@ async def test_purge_old_states_purges_the_state_metadata_ids( last_updated_ts=dt_util.utc_to_timestamp(timestamp), ) ) - return recorder_mock.states_meta_manager.get_many( + return instance.states_meta_manager.get_many( ["sensor.one", "sensor.two", "sensor.three", "sensor.unused"], session, True, ) - entity_id_to_metadata_id = await recorder_mock.async_add_executor_job( - _insert_states - ) + entity_id_to_metadata_id = await instance.async_add_executor_job(_insert_states) test_metadata_ids = entity_id_to_metadata_id.values() with session_scope(hass=hass) as session: states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) @@ -1937,63 +1884,47 @@ async def test_purge_old_states_purges_the_state_metadata_ids( assert states.count() == 30 assert states_meta.count() == 4 - # run purge_old_data() - finished = purge_old_data( - recorder_mock, - far_past, - repack=False, - ) - assert finished - - with session_scope(hass=hass) as session: - states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) - states_meta = session.query(StatesMeta).where( - StatesMeta.metadata_id.in_(test_metadata_ids) + # run purge_old_data() + finished = purge_old_data( + instance, + far_past, + repack=False, ) + assert finished assert states.count() == 30 # We should remove the unused entity_id assert states_meta.count() == 3 - assert "sensor.unused" not in recorder_mock.event_type_manager._id_map + assert "sensor.unused" not in instance.event_type_manager._id_map - # we should only have 10 states left since - # only one event type was recorded now - finished = purge_old_data( - recorder_mock, - utcnow, - repack=False, - ) - assert finished - - with session_scope(hass=hass) as session: - states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) - states_meta = session.query(StatesMeta).where( - StatesMeta.metadata_id.in_(test_metadata_ids) + # we should only have 10 states left since + # only one event type was recorded now + finished = purge_old_data( + instance, + utcnow, + repack=False, ) + assert finished assert states.count() == 10 assert states_meta.count() == 1 - # Purge everything - finished = purge_old_data( - recorder_mock, - utcnow + timedelta(seconds=1), - repack=False, - ) - assert finished - - with session_scope(hass=hass) as session: - states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) - states_meta = session.query(StatesMeta).where( - StatesMeta.metadata_id.in_(test_metadata_ids) + # Purge everything + finished = purge_old_data( + instance, + utcnow + timedelta(seconds=1), + repack=False, ) + assert finished assert states.count() == 0 assert states_meta.count() == 0 async def test_purge_entities_keep_days( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, ) -> None: """Test purging states with an entity filter and keep_days.""" + instance = await async_setup_recorder_instance(hass, {}) await hass.async_block_till_done() await async_wait_recording_done(hass) start = dt_util.utcnow() @@ -2015,7 +1946,7 @@ async def test_purge_entities_keep_days( hass.states.async_set("sensor.keep", "now") await async_recorder_block_till_done(hass) - states = await recorder_mock.async_add_executor_job( + states = await instance.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -2026,7 +1957,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 3 await hass.services.async_call( - RECORDER_DOMAIN, + recorder.DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -2036,7 +1967,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await recorder_mock.async_add_executor_job( + states = await instance.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -2047,7 +1978,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 1 await hass.services.async_call( - RECORDER_DOMAIN, + recorder.DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -2056,7 +1987,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await recorder_mock.async_add_executor_job( + states = await instance.async_add_executor_job( get_significant_states, hass, one_month_ago, diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index 0754b2e911c..fb636cfa9dc 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -1,6 +1,5 @@ """Test data purging.""" -from collections.abc import Generator from datetime import datetime, timedelta import json import sqlite3 @@ -11,12 +10,10 @@ import pytest from sqlalchemy import text, update from sqlalchemy.exc import DatabaseError, OperationalError from sqlalchemy.orm.session import Session +from typing_extensions import Generator -from homeassistant.components.recorder import ( - DOMAIN as RECORDER_DOMAIN, - Recorder, - migration, -) +from homeassistant.components import recorder +from homeassistant.components.recorder import migration from homeassistant.components.recorder.const import SupportedDialect from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.recorder.purge import purge_old_data @@ -50,13 +47,6 @@ from .db_schema_32 import ( from tests.typing import RecorderInstanceGenerator -@pytest.fixture -async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - - @pytest.fixture(autouse=True) def db_schema_32(): """Fixture to initialize the db with the old schema 32.""" @@ -76,8 +66,11 @@ def mock_use_sqlite(request: pytest.FixtureRequest) -> Generator[None]: yield -async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: +async def test_purge_old_states( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: """Test deleting old states.""" + instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_states(hass) @@ -94,27 +87,23 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id + assert "test.recorder2" in instance.states_manager._last_committed_id - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - recorder_mock, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished - - with session_scope(hass=hass) as session: - states = session.query(States) - state_attributes = session.query(StateAttributes) + # run purge_old_data() + finished = purge_old_data( + instance, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id + assert "test.recorder2" in instance.states_manager._last_committed_id states_after_purge = list(session.query(States)) # Since these states are deleted in batches, we can't guarantee the order @@ -126,35 +115,27 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> assert dontpurgeme_5.old_state_id == dontpurgeme_4.state_id assert dontpurgeme_4.old_state_id is None - finished = purge_old_data(recorder_mock, purge_before, repack=False) - assert finished - - with session_scope(hass=hass) as session: - states = session.query(States) - state_attributes = session.query(StateAttributes) + finished = purge_old_data(instance, purge_before, repack=False) + assert finished assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id + assert "test.recorder2" in instance.states_manager._last_committed_id - # run purge_old_data again - purge_before = dt_util.utcnow() - finished = purge_old_data( - recorder_mock, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished - - with session_scope(hass=hass) as session: - states = session.query(States) - state_attributes = session.query(StateAttributes) + # run purge_old_data again + purge_before = dt_util.utcnow() + finished = purge_old_data( + instance, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished assert states.count() == 0 assert state_attributes.count() == 0 - assert "test.recorder2" not in recorder_mock.states_manager._last_committed_id + assert "test.recorder2" not in instance.states_manager._last_committed_id # Add some more states await _add_test_states(hass) @@ -168,29 +149,31 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id + assert "test.recorder2" in instance.states_manager._last_committed_id state_attributes = session.query(StateAttributes) assert state_attributes.count() == 3 -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("recorder_mock", "skip_by_db_engine") async def test_purge_old_states_encouters_database_corruption( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_db_url: str, ) -> None: - """Test database image image is malformed while deleting old states. + """Test database image image is malformed while deleting old states.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test is specific for SQLite, wiping the database on error only happens + # with SQLite. + return - This test is specific for SQLite, wiping the database on error only happens - with SQLite. - """ + await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_states(hass) await async_wait_recording_done(hass) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError("not a database") + sqlite3_exception.__cause__ = sqlite3.DatabaseError() with ( patch( @@ -201,7 +184,7 @@ async def test_purge_old_states_encouters_database_corruption( side_effect=sqlite3_exception, ), ): - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) @@ -214,11 +197,12 @@ async def test_purge_old_states_encouters_database_corruption( async def test_purge_old_states_encounters_temporary_mysql_error( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test retry on specific mysql operational errors.""" + instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_states(hass) @@ -233,9 +217,9 @@ async def test_purge_old_states_encounters_temporary_mysql_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=[mysql_exception, None], ), - patch.object(recorder_mock.engine.dialect, "name", "mysql"), + patch.object(instance.engine.dialect, "name", "mysql"), ): - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -244,12 +228,13 @@ async def test_purge_old_states_encounters_temporary_mysql_error( assert sleep_mock.called -@pytest.mark.usefixtures("recorder_mock") async def test_purge_old_states_encounters_operational_error( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: """Test error on operational errors that are not mysql does not retry.""" + await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_states(hass) @@ -261,7 +246,7 @@ async def test_purge_old_states_encounters_operational_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=exception, ): - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -270,8 +255,11 @@ async def test_purge_old_states_encounters_operational_error( assert "Error executing purge" in caplog.text -async def test_purge_old_events(hass: HomeAssistant, recorder_mock: Recorder) -> None: +async def test_purge_old_events( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: """Test deleting old events.""" + instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_events(hass) @@ -282,39 +270,34 @@ async def test_purge_old_events(hass: HomeAssistant, recorder_mock: Recorder) -> purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert not finished - - with session_scope(hass=hass) as session: - events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) + # run purge_old_data() + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished assert events.count() == 2 - # we should only have 2 events left - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert finished - - with session_scope(hass=hass) as session: - events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) + # we should only have 2 events left + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished assert events.count() == 2 async def test_purge_old_recorder_runs( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test deleting old recorder runs keeps current run.""" + instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_recorder_runs(hass) @@ -324,36 +307,34 @@ async def test_purge_old_recorder_runs( recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert finished - - with session_scope(hass=hass) as session: - recorder_runs = session.query(RecorderRuns) + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished assert recorder_runs.count() == 1 async def test_purge_old_statistics_runs( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test deleting old statistics runs keeps the latest run.""" + instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_statistics_runs(hass) @@ -363,23 +344,20 @@ async def test_purge_old_statistics_runs( statistics_runs = session.query(StatisticsRuns) assert statistics_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data(recorder_mock, purge_before, repack=False) - assert not finished + # run purge_old_data() + finished = purge_old_data(instance, purge_before, repack=False) + assert not finished - finished = purge_old_data(recorder_mock, purge_before, repack=False) - assert finished - - with session_scope(hass=hass) as session: - statistics_runs = session.query(StatisticsRuns) + finished = purge_old_data(instance, purge_before, repack=False) + assert finished assert statistics_runs.count() == 1 @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) -@pytest.mark.usefixtures("recorder_mock") async def test_purge_method( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, use_sqlite: bool, @@ -397,6 +375,7 @@ async def test_purge_method( assert run1.run_id == run2.run_id assert run1.start == run2.start + await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) service_data = {"keep_days": 4} @@ -497,8 +476,11 @@ async def test_purge_method( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) -@pytest.mark.usefixtures("recorder_mock") -async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None: +async def test_purge_edge_case( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, + use_sqlite: bool, +) -> None: """Test states and events are purged even if they occurred shortly before purge_before.""" async def _add_db_entries(hass: HomeAssistant, timestamp: datetime) -> None: @@ -531,6 +513,7 @@ async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None: ) ) + await async_setup_recorder_instance(hass, None) await async_attach_db_engine(hass) await async_wait_purge_done(hass) @@ -549,7 +532,7 @@ async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None: events = session.query(Events).filter(Events.event_type == "EVENT_TEST_PURGE") assert events.count() == 1 - await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -562,7 +545,10 @@ async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None: assert events.count() == 0 -async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) -> None: +async def test_purge_cutoff_date( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, +) -> None: """Test states and events are purged only if they occurred before "now() - keep_days".""" async def _add_db_entries(hass: HomeAssistant, cutoff: datetime, rows: int) -> None: @@ -626,6 +612,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - ) ) + instance = await async_setup_recorder_instance(hass, None) await async_attach_db_engine(hass) await async_wait_purge_done(hass) @@ -654,7 +641,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - assert events.filter(Events.event_type == "PURGE").count() == rows - 1 assert events.filter(Events.event_type == "KEEP").count() == 1 - recorder_mock.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) + instance.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) await hass.async_block_till_done() await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -685,9 +672,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - assert events.filter(Events.event_type == "KEEP").count() == 1 # Make sure we can purge everything - recorder_mock.queue_task( - PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) - ) + instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -698,9 +683,7 @@ async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) - assert state_attributes.count() == 0 # Make sure we can purge everything when the db is already empty - recorder_mock.queue_task( - PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) - ) + instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -953,15 +936,16 @@ def _add_state_and_state_changed_event( async def test_purge_many_old_events( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test deleting old events.""" + instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) old_events_count = 5 with ( - patch.object(recorder_mock, "max_bind_vars", old_events_count), - patch.object(recorder_mock.database_engine, "max_bind_vars", old_events_count), + patch.object(instance, "max_bind_vars", old_events_count), + patch.object(instance.database_engine, "max_bind_vars", old_events_count), ): await _add_test_events(hass, old_events_count) @@ -969,70 +953,60 @@ async def test_purge_many_old_events( events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == old_events_count * 6 - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, - ) - assert not finished - - with session_scope(hass=hass) as session: - events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) + # run purge_old_data() + finished = purge_old_data( + instance, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, + ) + assert not finished assert events.count() == old_events_count * 3 - # we should only have 2 groups of events left - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, - ) - assert finished - - with session_scope(hass=hass) as session: - events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) + # we should only have 2 groups of events left + finished = purge_old_data( + instance, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, + ) + assert finished assert events.count() == old_events_count * 2 - # we should now purge everything - finished = purge_old_data( - recorder_mock, - dt_util.utcnow(), - repack=False, - states_batch_size=20, - events_batch_size=20, - ) - assert finished - - with session_scope(hass=hass) as session: - events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) + # we should now purge everything + finished = purge_old_data( + instance, + dt_util.utcnow(), + repack=False, + states_batch_size=20, + events_batch_size=20, + ) + assert finished assert events.count() == 0 async def test_purge_can_mix_legacy_and_new_format( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant ) -> None: """Test purging with legacy and new events.""" + instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await async_wait_recording_done(hass) # New databases are no longer created with the legacy events index - assert recorder_mock.use_legacy_events_index is False + assert instance.use_legacy_events_index is False def _recreate_legacy_events_index(): """Recreate the legacy events index since its no longer created on new instances.""" - migration._create_index( - recorder_mock.get_session, "states", "ix_states_event_id" - ) - recorder_mock.use_legacy_events_index = True + migration._create_index(instance.get_session, "states", "ix_states_event_id") + instance.use_legacy_events_index = True - await recorder_mock.async_add_executor_job(_recreate_legacy_events_index) - assert recorder_mock.use_legacy_events_index is True + await instance.async_add_executor_job(_recreate_legacy_events_index) + assert instance.use_legacy_events_index is True utcnow = dt_util.utcnow() eleven_days_ago = utcnow - timedelta(days=11) @@ -1071,65 +1045,39 @@ async def test_purge_can_mix_legacy_and_new_format( assert states_with_event_id.count() == 50 assert states_without_event_id.count() == 51 - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - ) - assert not finished - - with session_scope(hass=hass) as session: - states_with_event_id = session.query(States).filter( - States.event_id.is_not(None) - ) - states_without_event_id = session.query(States).filter( - States.event_id.is_(None) + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + instance, + purge_before, + repack=False, ) + assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 51 - - # At this point all the legacy states are gone - # and we switch methods - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - # Since we only allow one iteration, we won't - # check if we are finished this loop similar - # to the legacy method - assert not finished - - with session_scope(hass=hass) as session: - states_with_event_id = session.query(States).filter( - States.event_id.is_not(None) - ) - states_without_event_id = session.query(States).filter( - States.event_id.is_(None) + # At this point all the legacy states are gone + # and we switch methods + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, ) + # Since we only allow one iteration, we won't + # check if we are finished this loop similar + # to the legacy method + assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 - - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=100, - states_batch_size=100, - ) - assert finished - - with session_scope(hass=hass) as session: - states_with_event_id = session.query(States).filter( - States.event_id.is_not(None) - ) - states_without_event_id = session.query(States).filter( - States.event_id.is_(None) + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=100, + states_batch_size=100, ) + assert finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 _add_state_without_event_linkage( @@ -1137,53 +1085,41 @@ async def test_purge_can_mix_legacy_and_new_format( ) assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 2 - - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - ) - assert finished - - with session_scope(hass=hass) as session: - states_with_event_id = session.query(States).filter( - States.event_id.is_not(None) - ) - states_without_event_id = session.query(States).filter( - States.event_id.is_(None) + finished = purge_old_data( + instance, + purge_before, + repack=False, ) + assert finished # The broken state without a timestamp # does not prevent future purges. Its ignored. assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_purge_can_mix_legacy_and_new_format_with_detached_state( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_mock: Recorder, recorder_db_url: str, ) -> None: - """Test purging with legacy and new events with a detached state. + """Test purging with legacy and new events with a detached state.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + return pytest.skip("This tests disables foreign key checks on SQLite") - This tests disables foreign key checks on SQLite. - """ + instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await async_wait_recording_done(hass) # New databases are no longer created with the legacy events index - assert recorder_mock.use_legacy_events_index is False + assert instance.use_legacy_events_index is False def _recreate_legacy_events_index(): """Recreate the legacy events index since its no longer created on new instances.""" - migration._create_index( - recorder_mock.get_session, "states", "ix_states_event_id" - ) - recorder_mock.use_legacy_events_index = True + migration._create_index(instance.get_session, "states", "ix_states_event_id") + instance.use_legacy_events_index = True - await recorder_mock.async_add_executor_job(_recreate_legacy_events_index) - assert recorder_mock.use_legacy_events_index is True + await instance.async_add_executor_job(_recreate_legacy_events_index) + assert instance.use_legacy_events_index is True with session_scope(hass=hass) as session: session.execute(text("PRAGMA foreign_keys = OFF")) @@ -1253,65 +1189,39 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( assert states_with_event_id.count() == 52 assert states_without_event_id.count() == 51 - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - ) - assert not finished - - with session_scope(hass=hass) as session: - states_with_event_id = session.query(States).filter( - States.event_id.is_not(None) - ) - states_without_event_id = session.query(States).filter( - States.event_id.is_(None) + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + instance, + purge_before, + repack=False, ) + assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 51 - - # At this point all the legacy states are gone - # and we switch methods - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - # Since we only allow one iteration, we won't - # check if we are finished this loop similar - # to the legacy method - assert not finished - - with session_scope(hass=hass) as session: - states_with_event_id = session.query(States).filter( - States.event_id.is_not(None) - ) - states_without_event_id = session.query(States).filter( - States.event_id.is_(None) + # At this point all the legacy states are gone + # and we switch methods + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, ) + # Since we only allow one iteration, we won't + # check if we are finished this loop similar + # to the legacy method + assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 - - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - events_batch_size=100, - states_batch_size=100, - ) - assert finished - - with session_scope(hass=hass) as session: - states_with_event_id = session.query(States).filter( - States.event_id.is_not(None) - ) - states_without_event_id = session.query(States).filter( - States.event_id.is_(None) + finished = purge_old_data( + instance, + purge_before, + repack=False, + events_batch_size=100, + states_batch_size=100, ) + assert finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 _add_state_without_event_linkage( @@ -1319,21 +1229,12 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( ) assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 2 - - finished = purge_old_data( - recorder_mock, - purge_before, - repack=False, - ) - assert finished - - with session_scope(hass=hass) as session: - states_with_event_id = session.query(States).filter( - States.event_id.is_not(None) - ) - states_without_event_id = session.query(States).filter( - States.event_id.is_(None) + finished = purge_old_data( + instance, + purge_before, + repack=False, ) + assert finished # The broken state without a timestamp # does not prevent future purges. Its ignored. assert states_with_event_id.count() == 0 @@ -1341,9 +1242,11 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( async def test_purge_entities_keep_days( - hass: HomeAssistant, recorder_mock: Recorder + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, ) -> None: """Test purging states with an entity filter and keep_days.""" + instance = await async_setup_recorder_instance(hass, {}) await async_attach_db_engine(hass) await hass.async_block_till_done() @@ -1367,7 +1270,7 @@ async def test_purge_entities_keep_days( hass.states.async_set("sensor.keep", "now") await async_recorder_block_till_done(hass) - states = await recorder_mock.async_add_executor_job( + states = await instance.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -1378,7 +1281,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 3 await hass.services.async_call( - RECORDER_DOMAIN, + recorder.DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -1388,7 +1291,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await recorder_mock.async_add_executor_job( + states = await instance.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -1399,7 +1302,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 1 await hass.services.async_call( - RECORDER_DOMAIN, + recorder.DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -1408,7 +1311,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await recorder_mock.async_add_executor_job( + states = await instance.async_add_executor_job( get_significant_states, hass, one_month_ago, diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index bdf39c5ef4a..7d8bc6e3415 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -1,8 +1,7 @@ """The tests for sensor recorder platform.""" from datetime import timedelta -from typing import Any -from unittest.mock import ANY, Mock, patch +from unittest.mock import patch import pytest from sqlalchemy import select @@ -16,21 +15,17 @@ from homeassistant.components.recorder.models import ( ) from homeassistant.components.recorder.statistics import ( STATISTIC_UNIT_TO_UNIT_CONVERTER, - PlatformCompiledStatistics, _generate_max_mean_min_statistic_in_sub_period_stmt, _generate_statistics_at_time_stmt, _generate_statistics_during_period_stmt, async_add_external_statistics, async_import_statistics, - async_list_statistic_ids, get_last_short_term_statistics, get_last_statistics, get_latest_short_term_statistics_with_session, get_metadata, - get_metadata_with_session, get_short_term_statistics_run_cache, list_statistic_ids, - validate_statistics, ) from homeassistant.components.recorder.table_managers.statistics_meta import ( _generate_get_metadata_stmt, @@ -46,20 +41,17 @@ import homeassistant.util.dt as dt_util from .common import ( assert_dict_of_states_equal_without_context_and_last_changed, async_record_states, - async_recorder_block_till_done, async_wait_recording_done, do_adhoc_statistics, - get_start_time, statistics_during_period, ) -from tests.common import MockPlatform, mock_platform from tests.typing import RecorderInstanceGenerator, WebSocketGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -69,15 +61,6 @@ def setup_recorder(recorder_mock: Recorder) -> None: """Set up recorder.""" -async def _setup_mock_domain( - hass: HomeAssistant, - platform: Any | None = None, # There's no RecorderPlatform class yet -) -> None: - """Set up a mock domain.""" - mock_platform(hass, "some_domain.recorder", platform or MockPlatform()) - assert await async_setup_component(hass, "some_domain", {}) - - def test_converters_align_with_sensor() -> None: """Ensure STATISTIC_UNIT_TO_UNIT_CONVERTER is aligned with UNIT_CONVERTERS.""" for converter in UNIT_CONVERTERS.values(): @@ -310,17 +293,14 @@ def mock_sensor_statistics(): } def get_fake_stats(_hass, session, start, _end): - instance = recorder.get_instance(_hass) return statistics.PlatformCompiledStatistics( [ sensor_stats("sensor.test1", start), sensor_stats("sensor.test2", start), sensor_stats("sensor.test3", start), ], - get_metadata_with_session( - instance, - session, - statistic_ids={"sensor.test1", "sensor.test2", "sensor.test3"}, + get_metadata( + _hass, statistic_ids={"sensor.test1", "sensor.test2", "sensor.test3"} ), ) @@ -358,7 +338,7 @@ async def test_compile_periodic_statistics_exception( """Test exception handling when compiling periodic statistics.""" await async_setup_component(hass, "sensor", {}) - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() do_adhoc_statistics(hass, start=now) do_adhoc_statistics(hass, start=now + timedelta(minutes=5)) await async_wait_recording_done(hass) @@ -2488,162 +2468,3 @@ async def test_change_with_none( types={"change"}, ) assert stats == {} - - -async def test_recorder_platform_with_statistics( - hass: HomeAssistant, - setup_recorder: None, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test recorder platform.""" - instance = recorder.get_instance(hass) - recorder_data = hass.data["recorder"] - assert not recorder_data.recorder_platforms - - def _mock_compile_statistics(*args: Any) -> PlatformCompiledStatistics: - return PlatformCompiledStatistics([], {}) - - def _mock_list_statistic_ids(*args: Any, **kwargs: Any) -> dict: - return {} - - def _mock_validate_statistics(*args: Any) -> dict: - return {} - - recorder_platform = Mock( - compile_statistics=Mock(wraps=_mock_compile_statistics), - list_statistic_ids=Mock(wraps=_mock_list_statistic_ids), - update_statistics_issues=Mock(), - validate_statistics=Mock(wraps=_mock_validate_statistics), - ) - - await _setup_mock_domain(hass, recorder_platform) - - # Wait for the sensor recorder platform to be added - await async_recorder_block_till_done(hass) - assert recorder_data.recorder_platforms == {"some_domain": recorder_platform} - - recorder_platform.compile_statistics.assert_not_called() - recorder_platform.list_statistic_ids.assert_not_called() - recorder_platform.update_statistics_issues.assert_not_called() - recorder_platform.validate_statistics.assert_not_called() - - # Test compile statistics + update statistics issues - # Issues are updated hourly when minutes = 50, trigger one hour later to make - # sure statistics is not suppressed by an existing row in StatisticsRuns - zero = get_start_time(dt_util.utcnow()).replace(minute=50) + timedelta(hours=1) - do_adhoc_statistics(hass, start=zero) - await async_wait_recording_done(hass) - - recorder_platform.compile_statistics.assert_called_once_with( - hass, ANY, zero, zero + timedelta(minutes=5) - ) - recorder_platform.update_statistics_issues.assert_called_once_with(hass, ANY) - recorder_platform.list_statistic_ids.assert_not_called() - recorder_platform.validate_statistics.assert_not_called() - - # Test list statistic IDs - await async_list_statistic_ids(hass) - recorder_platform.compile_statistics.assert_called_once() - recorder_platform.list_statistic_ids.assert_called_once_with( - hass, statistic_ids=None, statistic_type=None - ) - recorder_platform.update_statistics_issues.assert_called_once() - recorder_platform.validate_statistics.assert_not_called() - - # Test validate statistics - await instance.async_add_executor_job( - validate_statistics, - hass, - ) - recorder_platform.compile_statistics.assert_called_once() - recorder_platform.list_statistic_ids.assert_called_once() - recorder_platform.update_statistics_issues.assert_called_once() - recorder_platform.validate_statistics.assert_called_once_with(hass) - - -async def test_recorder_platform_without_statistics( - hass: HomeAssistant, - setup_recorder: None, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test recorder platform.""" - recorder_data = hass.data["recorder"] - assert recorder_data.recorder_platforms == {} - - await _setup_mock_domain(hass) - - # Wait for the sensor recorder platform to be added - await async_recorder_block_till_done(hass) - assert recorder_data.recorder_platforms == {} - - -@pytest.mark.parametrize( - "supported_methods", - [ - ("compile_statistics",), - ("list_statistic_ids",), - ("update_statistics_issues",), - ("validate_statistics",), - ], -) -async def test_recorder_platform_with_partial_statistics_support( - hass: HomeAssistant, - setup_recorder: None, - caplog: pytest.LogCaptureFixture, - supported_methods: tuple[str, ...], -) -> None: - """Test recorder platform.""" - instance = recorder.get_instance(hass) - recorder_data = hass.data["recorder"] - assert not recorder_data.recorder_platforms - - def _mock_compile_statistics(*args: Any) -> PlatformCompiledStatistics: - return PlatformCompiledStatistics([], {}) - - def _mock_list_statistic_ids(*args: Any, **kwargs: Any) -> dict: - return {} - - def _mock_validate_statistics(*args: Any) -> dict: - return {} - - mock_impl = { - "compile_statistics": _mock_compile_statistics, - "list_statistic_ids": _mock_list_statistic_ids, - "update_statistics_issues": None, - "validate_statistics": _mock_validate_statistics, - } - - kwargs = {meth: Mock(wraps=mock_impl[meth]) for meth in supported_methods} - - recorder_platform = Mock( - spec=supported_methods, - **kwargs, - ) - - await _setup_mock_domain(hass, recorder_platform) - - # Wait for the sensor recorder platform to be added - await async_recorder_block_till_done(hass) - assert recorder_data.recorder_platforms == {"some_domain": recorder_platform} - - for meth in supported_methods: - getattr(recorder_platform, meth).assert_not_called() - - # Test compile statistics + update statistics issues - # Issues are updated hourly when minutes = 50, trigger one hour later to make - # sure statistics is not suppressed by an existing row in StatisticsRuns - zero = get_start_time(dt_util.utcnow()).replace(minute=50) + timedelta(hours=1) - do_adhoc_statistics(hass, start=zero) - await async_wait_recording_done(hass) - - # Test list statistic IDs - await async_list_statistic_ids(hass) - - # Test validate statistics - await instance.async_add_executor_job( - validate_statistics, - hass, - ) - - for meth in supported_methods: - getattr(recorder_platform, meth).assert_called_once() diff --git a/tests/components/recorder/test_statistics_v23_migration.py b/tests/components/recorder/test_statistics_v23_migration.py index 1f9be0cabee..af784692612 100644 --- a/tests/components/recorder/test_statistics_v23_migration.py +++ b/tests/components/recorder/test_statistics_v23_migration.py @@ -15,34 +15,32 @@ from unittest.mock import patch import pytest from homeassistant.components import recorder -from homeassistant.components.recorder import get_instance +from homeassistant.components.recorder import SQLITE_URL_PREFIX, get_instance from homeassistant.components.recorder.util import session_scope +from homeassistant.helpers import recorder as recorder_helper +from homeassistant.setup import setup_component import homeassistant.util.dt as dt_util from .common import ( CREATE_ENGINE_TARGET, - async_wait_recording_done, create_engine_test_for_schema_version_postfix, get_schema_module_path, + wait_recording_done, ) -from tests.common import async_test_home_assistant -from tests.typing import RecorderInstanceGenerator +from tests.common import get_test_home_assistant SCHEMA_VERSION_POSTFIX = "23_with_newer_columns" SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX) -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.parametrize("persistent_database", [True]) -async def test_delete_duplicates( - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture -) -> None: - """Test removal of duplicated statistics. +def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None: + """Test removal of duplicated statistics.""" + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + test_db_file = test_dir.joinpath("test_run_info.db") + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - The test only works with SQLite. - """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -168,9 +166,6 @@ async def test_delete_duplicates( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), - patch.object( - recorder.migration, "non_live_data_migration_needed", return_value=False - ), patch( CREATE_ENGINE_TARGET, new=partial( @@ -178,58 +173,57 @@ async def test_delete_duplicates( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), + get_test_home_assistant() as hass, ): - async with async_test_home_assistant() as hass, async_test_recorder(hass): - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) + recorder_helper.async_initialize_recorder(hass) + setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + wait_recording_done(hass) + wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta( - external_energy_metadata_1 - ) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta( - external_energy_metadata_2 - ) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) - ) - with session_scope(hass=hass) as session: - for stat in external_energy_statistics_1: - session.add(recorder.db_schema.Statistics.from_stats(1, stat)) - for stat in external_energy_statistics_2: - session.add(recorder.db_schema.Statistics.from_stats(2, stat)) - for stat in external_co2_statistics: - session.add(recorder.db_schema.Statistics.from_stats(3, stat)) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) + ) + with session_scope(hass=hass) as session: + for stat in external_energy_statistics_1: + session.add(recorder.db_schema.Statistics.from_stats(1, stat)) + for stat in external_energy_statistics_2: + session.add(recorder.db_schema.Statistics.from_stats(2, stat)) + for stat in external_co2_statistics: + session.add(recorder.db_schema.Statistics.from_stats(3, stat)) - await hass.async_stop() + hass.stop() # Test that the duplicates are removed during migration from schema 23 - async with async_test_home_assistant() as hass, async_test_recorder(hass): - await hass.async_start() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - await hass.async_stop() + with get_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + hass.start() + wait_recording_done(hass) + wait_recording_done(hass) + hass.stop() assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Found non identical" not in caplog.text assert "Found duplicated" not in caplog.text -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.parametrize("persistent_database", [True]) -async def test_delete_duplicates_many( - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture +def test_delete_duplicates_many( + caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: - """Test removal of duplicated statistics. + """Test removal of duplicated statistics.""" + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + test_db_file = test_dir.joinpath("test_run_info.db") + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - The test only works with SQLite. - """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -355,9 +349,6 @@ async def test_delete_duplicates_many( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), - patch.object( - recorder.migration, "non_live_data_migration_needed", return_value=False - ), patch( CREATE_ENGINE_TARGET, new=partial( @@ -365,48 +356,48 @@ async def test_delete_duplicates_many( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), + get_test_home_assistant() as hass, ): - async with async_test_home_assistant() as hass, async_test_recorder(hass): - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) + recorder_helper.async_initialize_recorder(hass) + setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + wait_recording_done(hass) + wait_recording_done(hass) - with session_scope(hass=hass) as session: + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) + ) + with session_scope(hass=hass) as session: + for stat in external_energy_statistics_1: + session.add(recorder.db_schema.Statistics.from_stats(1, stat)) + for _ in range(3000): session.add( - recorder.db_schema.StatisticsMeta.from_meta( - external_energy_metadata_1 + recorder.db_schema.Statistics.from_stats( + 1, external_energy_statistics_1[-1] ) ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta( - external_energy_metadata_2 - ) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) - ) - with session_scope(hass=hass) as session: - for stat in external_energy_statistics_1: - session.add(recorder.db_schema.Statistics.from_stats(1, stat)) - for _ in range(3000): - session.add( - recorder.db_schema.Statistics.from_stats( - 1, external_energy_statistics_1[-1] - ) - ) - for stat in external_energy_statistics_2: - session.add(recorder.db_schema.Statistics.from_stats(2, stat)) - for stat in external_co2_statistics: - session.add(recorder.db_schema.Statistics.from_stats(3, stat)) + for stat in external_energy_statistics_2: + session.add(recorder.db_schema.Statistics.from_stats(2, stat)) + for stat in external_co2_statistics: + session.add(recorder.db_schema.Statistics.from_stats(3, stat)) - await hass.async_stop() + hass.stop() # Test that the duplicates are removed during migration from schema 23 - async with async_test_home_assistant() as hass, async_test_recorder(hass): - await hass.async_start() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - await hass.async_stop() + with get_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + hass.start() + wait_recording_done(hass) + wait_recording_done(hass) + hass.stop() assert "Deleted 3002 duplicated statistics rows" in caplog.text assert "Found non identical" not in caplog.text @@ -414,18 +405,15 @@ async def test_delete_duplicates_many( @pytest.mark.freeze_time("2021-08-01 00:00:00+00:00") -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.parametrize("persistent_database", [True]) -async def test_delete_duplicates_non_identical( - async_test_recorder: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, - tmp_path: Path, +def test_delete_duplicates_non_identical( + caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: - """Test removal of duplicated statistics. + """Test removal of duplicated statistics.""" + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + test_db_file = test_dir.joinpath("test_run_info.db") + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - The test only works with SQLite. - """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -521,9 +509,6 @@ async def test_delete_duplicates_non_identical( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), - patch.object( - recorder.migration, "non_live_data_migration_needed", return_value=False - ), patch( CREATE_ENGINE_TARGET, new=partial( @@ -531,40 +516,38 @@ async def test_delete_duplicates_non_identical( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), + get_test_home_assistant() as hass, ): - async with async_test_home_assistant() as hass, async_test_recorder(hass): - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) + recorder_helper.async_initialize_recorder(hass) + setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + wait_recording_done(hass) + wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta( - external_energy_metadata_1 - ) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta( - external_energy_metadata_2 - ) - ) - with session_scope(hass=hass) as session: - for stat in external_energy_statistics_1: - session.add(recorder.db_schema.Statistics.from_stats(1, stat)) - for stat in external_energy_statistics_2: - session.add(recorder.db_schema.Statistics.from_stats(2, stat)) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) + ) + with session_scope(hass=hass) as session: + for stat in external_energy_statistics_1: + session.add(recorder.db_schema.Statistics.from_stats(1, stat)) + for stat in external_energy_statistics_2: + session.add(recorder.db_schema.Statistics.from_stats(2, stat)) - await hass.async_stop() + hass.stop() # Test that the duplicates are removed during migration from schema 23 - async with ( - async_test_home_assistant(config_dir=tmp_path) as hass, - async_test_recorder(hass), - ): - await hass.async_start() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - await hass.async_stop() + with get_test_home_assistant() as hass: + hass.config.config_dir = tmp_path + recorder_helper.async_initialize_recorder(hass) + setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + hass.start() + wait_recording_done(hass) + wait_recording_done(hass) + hass.stop() assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Deleted 1 non identical" in caplog.text @@ -573,11 +556,8 @@ async def test_delete_duplicates_non_identical( isotime = dt_util.utcnow().isoformat() backup_file_name = f".storage/deleted_statistics.{isotime}.json" - def read_backup(): - with open(hass.config.path(backup_file_name), encoding="utf8") as backup_file: - return json.load(backup_file) - - backup = await hass.async_add_executor_job(read_backup) + with open(hass.config.path(backup_file_name), encoding="utf8") as backup_file: + backup = json.load(backup_file) assert backup == [ { @@ -609,18 +589,15 @@ async def test_delete_duplicates_non_identical( ] -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_delete_duplicates_short_term( - async_test_recorder: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, - tmp_path: Path, +def test_delete_duplicates_short_term( + caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: - """Test removal of duplicated statistics. + """Test removal of duplicated statistics.""" + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + test_db_file = test_dir.joinpath("test_run_info.db") + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - The test only works with SQLite. - """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -647,9 +624,6 @@ async def test_delete_duplicates_short_term( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), - patch.object( - recorder.migration, "non_live_data_migration_needed", return_value=False - ), patch( CREATE_ENGINE_TARGET, new=partial( @@ -657,37 +631,37 @@ async def test_delete_duplicates_short_term( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), + get_test_home_assistant() as hass, ): - async with async_test_home_assistant() as hass, async_test_recorder(hass): - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) + recorder_helper.async_initialize_recorder(hass) + setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + wait_recording_done(hass) + wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta( - external_energy_metadata_1 - ) - ) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) - ) - session.add( - recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) - ) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) + ) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) + ) + session.add( + recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) + ) - await hass.async_stop() + hass.stop() # Test that the duplicates are removed during migration from schema 23 - async with ( - async_test_home_assistant(config_dir=tmp_path) as hass, - async_test_recorder(hass), - ): - await hass.async_start() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - await hass.async_stop() + with get_test_home_assistant() as hass: + hass.config.config_dir = tmp_path + recorder_helper.async_initialize_recorder(hass) + setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + hass.start() + wait_recording_done(hass) + wait_recording_done(hass) + hass.stop() assert "duplicated statistics rows" not in caplog.text assert "Found non identical" not in caplog.text diff --git a/tests/components/recorder/test_system_health.py b/tests/components/recorder/test_system_health.py index 0efaa82e5e5..fbcefa0b13e 100644 --- a/tests/components/recorder/test_system_health.py +++ b/tests/components/recorder/test_system_health.py @@ -15,15 +15,13 @@ from tests.common import get_system_health_info from tests.typing import RecorderInstanceGenerator -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_recorder_system_health( recorder_mock: Recorder, hass: HomeAssistant, recorder_db_url: str ) -> None: - """Test recorder system health. - - This test is specific for SQLite. - """ + """Test recorder system health.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test is specific for SQLite + return assert await async_setup_component(hass, "system_health", {}) await async_wait_recording_done(hass) @@ -102,17 +100,15 @@ async def test_recorder_system_health_db_url_missing_host( } -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_recorder_system_health_crashed_recorder_runs_table( async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test recorder system health with crashed recorder runs table. - - This test is specific for SQLite. - """ + """Test recorder system health with crashed recorder runs table.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test is specific for SQLite + return with patch( "homeassistant.components.recorder.table_managers.recorder_runs.RecorderRunsManager.load_from_db" diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 4904bdecc4d..d72978c57bb 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -1,12 +1,10 @@ """Test util methods.""" -from contextlib import AbstractContextManager, nullcontext as does_not_raise from datetime import UTC, datetime, timedelta import os from pathlib import Path import sqlite3 import threading -from typing import Any from unittest.mock import MagicMock, Mock, patch import pytest @@ -18,11 +16,7 @@ from sqlalchemy.sql.lambdas import StatementLambdaElement from homeassistant.components import recorder from homeassistant.components.recorder import Recorder, util -from homeassistant.components.recorder.const import ( - DOMAIN, - SQLITE_URL_PREFIX, - SupportedDialect, -) +from homeassistant.components.recorder.const import DOMAIN, SQLITE_URL_PREFIX from homeassistant.components.recorder.db_schema import RecorderRuns from homeassistant.components.recorder.history.modern import ( _get_single_entity_start_time_stmt, @@ -32,15 +26,9 @@ from homeassistant.components.recorder.models import ( process_timestamp, ) from homeassistant.components.recorder.util import ( - MIN_VERSION_SQLITE, - RETRYABLE_MYSQL_ERRORS, - UPCOMING_MIN_VERSION_SQLITE, - database_job_retry_wrapper, end_incomplete_runs, is_second_sunday, resolve_period, - retryable_database_job, - retryable_database_job_method, session_scope, ) from homeassistant.const import EVENT_HOMEASSISTANT_STOP @@ -60,7 +48,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -128,18 +116,12 @@ def test_validate_or_move_away_sqlite_database( assert util.validate_or_move_away_sqlite_database(dburl) is True -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_last_run_was_recently_clean( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, tmp_path: Path ) -> None: - """Test we can check if the last recorder run was recently clean. - - This is only implemented for SQLite. - """ + """Test we can check if the last recorder run was recently clean.""" config = { + recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db"), recorder.CONF_COMMIT_INTERVAL: 1, } async with async_test_home_assistant() as hass: @@ -235,9 +217,9 @@ def test_setup_connection_for_dialect_mysql(mysql_version) -> None: @pytest.mark.parametrize( "sqlite_version", - [str(UPCOMING_MIN_VERSION_SQLITE)], + ["3.31.0"], ) -def test_setup_connection_for_dialect_sqlite(sqlite_version: str) -> None: +def test_setup_connection_for_dialect_sqlite(sqlite_version) -> None: """Test setting up the connection for a sqlite dialect.""" instance_mock = MagicMock() execute_args = [] @@ -288,10 +270,10 @@ def test_setup_connection_for_dialect_sqlite(sqlite_version: str) -> None: @pytest.mark.parametrize( "sqlite_version", - [str(UPCOMING_MIN_VERSION_SQLITE)], + ["3.31.0"], ) def test_setup_connection_for_dialect_sqlite_zero_commit_interval( - sqlite_version: str, + sqlite_version, ) -> None: """Test setting up the connection for a sqlite dialect with a zero commit interval.""" instance_mock = MagicMock(commit_interval=0) @@ -515,6 +497,10 @@ def test_supported_pgsql(caplog: pytest.LogCaptureFixture, pgsql_version) -> Non "2.0.0", "Version 2.0.0 of SQLite is not supported; minimum supported version is 3.31.0.", ), + ( + "dogs", + "Version dogs of SQLite is not supported; minimum supported version is 3.31.0.", + ), ], ) def test_fail_outdated_sqlite( @@ -733,72 +719,14 @@ async def test_no_issue_for_mariadb_with_MDEV_25020( assert database_engine.optimizer.slow_range_in_select is False -async def test_issue_for_old_sqlite( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test we create and delete an issue for old sqlite versions.""" - instance_mock = MagicMock() - instance_mock.hass = hass - execute_args = [] - close_mock = MagicMock() - min_version = str(MIN_VERSION_SQLITE) - - def execute_mock(statement): - nonlocal execute_args - execute_args.append(statement) - - def fetchall_mock(): - nonlocal execute_args - if execute_args[-1] == "SELECT sqlite_version()": - return [[min_version]] - return None - - def _make_cursor_mock(*_): - return MagicMock(execute=execute_mock, close=close_mock, fetchall=fetchall_mock) - - dbapi_connection = MagicMock(cursor=_make_cursor_mock) - - database_engine = await hass.async_add_executor_job( - util.setup_connection_for_dialect, - instance_mock, - "sqlite", - dbapi_connection, - True, - ) - await hass.async_block_till_done() - - issue = issue_registry.async_get_issue(DOMAIN, "sqlite_too_old") - assert issue is not None - assert issue.translation_placeholders == { - "min_version": str(UPCOMING_MIN_VERSION_SQLITE), - "server_version": min_version, - } - - min_version = str(UPCOMING_MIN_VERSION_SQLITE) - database_engine = await hass.async_add_executor_job( - util.setup_connection_for_dialect, - instance_mock, - "sqlite", - dbapi_connection, - True, - ) - await hass.async_block_till_done() - - issue = issue_registry.async_get_issue(DOMAIN, "sqlite_too_old") - assert issue is None - assert database_engine is not None - - -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_basic_sanity_check( hass: HomeAssistant, setup_recorder: None, recorder_db_url: str ) -> None: - """Test the basic sanity checks with a missing table. + """Test the basic sanity checks with a missing table.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test is specific for SQLite + return - This test is specific for SQLite. - """ cursor = util.get_instance(hass).engine.raw_connection().cursor() assert util.basic_sanity_check(cursor) is True @@ -809,18 +737,17 @@ async def test_basic_sanity_check( util.basic_sanity_check(cursor) -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_combined_checks( hass: HomeAssistant, setup_recorder: None, caplog: pytest.LogCaptureFixture, recorder_db_url: str, ) -> None: - """Run Checks on the open database. + """Run Checks on the open database.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test is specific for SQLite + return - This test is specific for SQLite. - """ instance = util.get_instance(hass) instance.db_retry_wait = 0 @@ -902,15 +829,14 @@ async def test_end_incomplete_runs( assert "Ended unfinished session" in caplog.text -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") async def test_periodic_db_cleanups( hass: HomeAssistant, setup_recorder: None, recorder_db_url: str ) -> None: - """Test periodic db cleanups. + """Test periodic db cleanups.""" + if recorder_db_url.startswith(("mysql://", "postgresql://")): + # This test is specific for SQLite + return - This test is specific for SQLite. - """ with patch.object(util.get_instance(hass).engine, "connect") as connect_mock: util.periodic_db_cleanups(util.get_instance(hass)) @@ -921,22 +847,17 @@ async def test_periodic_db_cleanups( assert str(text_obj) == "PRAGMA wal_checkpoint(TRUNCATE);" -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.parametrize("persistent_database", [True]) async def test_write_lock_db( async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + tmp_path: Path, ) -> None: - """Test database write lock. + """Test database write lock.""" - This is only supported for SQLite. - - Use file DB, in memory DB cannot do write locks. - """ - - config = {recorder.CONF_DB_URL: recorder_db_url + "?timeout=0.1"} + # Use file DB, in memory DB cannot do write locks. + config = { + recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db?timeout=0.1") + } instance = await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() @@ -1000,7 +921,7 @@ async def test_execute_stmt_lambda_element( all_calls = 0 class MockExecutor: - def __init__(self, stmt) -> None: + def __init__(self, stmt): assert isinstance(stmt, StatementLambdaElement) def all(self): @@ -1127,129 +1048,3 @@ async def test_resolve_period(hass: HomeAssistant) -> None: } } ) == (now - timedelta(hours=1, minutes=25), now - timedelta(minutes=25)) - - -NonRetryable = OperationalError(None, None, BaseException()) -Retryable = OperationalError(None, None, BaseException(RETRYABLE_MYSQL_ERRORS[0], "")) - - -@pytest.mark.parametrize( - ("side_effect", "dialect", "retval", "expected_result", "num_calls"), - [ - (None, SupportedDialect.MYSQL, None, does_not_raise(), 1), - (ValueError, SupportedDialect.MYSQL, None, pytest.raises(ValueError), 1), - ( - NonRetryable, - SupportedDialect.MYSQL, - None, - pytest.raises(OperationalError), - 1, - ), - (Retryable, SupportedDialect.MYSQL, None, pytest.raises(OperationalError), 5), - ( - NonRetryable, - SupportedDialect.SQLITE, - None, - pytest.raises(OperationalError), - 1, - ), - (Retryable, SupportedDialect.SQLITE, None, pytest.raises(OperationalError), 1), - ], -) -def test_database_job_retry_wrapper( - side_effect: Any, - dialect: str, - retval: Any, - expected_result: AbstractContextManager, - num_calls: int, -) -> None: - """Test database_job_retry_wrapper.""" - - instance = Mock() - instance.db_retry_wait = 0 - instance.engine.dialect.name = dialect - mock_job = Mock(side_effect=side_effect) - - @database_job_retry_wrapper("test", 5) - def job(instance, *args, **kwargs) -> None: - mock_job() - return retval - - with expected_result: - assert job(instance) == retval - - assert len(mock_job.mock_calls) == num_calls - - -@pytest.mark.parametrize( - ("side_effect", "dialect", "retval", "expected_result"), - [ - (None, SupportedDialect.MYSQL, False, does_not_raise()), - (None, SupportedDialect.MYSQL, True, does_not_raise()), - (ValueError, SupportedDialect.MYSQL, False, pytest.raises(ValueError)), - (NonRetryable, SupportedDialect.MYSQL, True, does_not_raise()), - (Retryable, SupportedDialect.MYSQL, False, does_not_raise()), - (NonRetryable, SupportedDialect.SQLITE, True, does_not_raise()), - (Retryable, SupportedDialect.SQLITE, True, does_not_raise()), - ], -) -def test_retryable_database_job( - side_effect: Any, - retval: bool, - expected_result: AbstractContextManager, - dialect: str, -) -> None: - """Test retryable_database_job.""" - - instance = Mock() - instance.db_retry_wait = 0 - instance.engine.dialect.name = dialect - mock_job = Mock(side_effect=side_effect) - - @retryable_database_job(description="test") - def job(instance, *args, **kwargs) -> bool: - mock_job() - return retval - - with expected_result: - assert job(instance) == retval - - assert len(mock_job.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("side_effect", "dialect", "retval", "expected_result"), - [ - (None, SupportedDialect.MYSQL, False, does_not_raise()), - (None, SupportedDialect.MYSQL, True, does_not_raise()), - (ValueError, SupportedDialect.MYSQL, False, pytest.raises(ValueError)), - (NonRetryable, SupportedDialect.MYSQL, True, does_not_raise()), - (Retryable, SupportedDialect.MYSQL, False, does_not_raise()), - (NonRetryable, SupportedDialect.SQLITE, True, does_not_raise()), - (Retryable, SupportedDialect.SQLITE, True, does_not_raise()), - ], -) -def test_retryable_database_job_method( - side_effect: Any, - retval: bool, - expected_result: AbstractContextManager, - dialect: str, -) -> None: - """Test retryable_database_job_method.""" - - instance = Mock() - instance.db_retry_wait = 0 - instance.engine.dialect.name = dialect - mock_job = Mock(side_effect=side_effect) - - class Test: - @retryable_database_job_method(description="test") - def job(self, instance, *args, **kwargs) -> bool: - mock_job() - return retval - - test = Test() - with expected_result: - assert test.job(instance) == retval - - assert len(mock_job.mock_calls) == 1 diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index d59486b61f0..a07c63b3376 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -1,78 +1,63 @@ """The tests for recorder platform migrating data from v30.""" -from collections.abc import Callable from datetime import timedelta import importlib +from pathlib import Path import sys from unittest.mock import patch import pytest from sqlalchemy import create_engine, inspect -from sqlalchemy.exc import OperationalError, SQLAlchemyError from sqlalchemy.orm import Session from homeassistant.components import recorder -from homeassistant.components.recorder import core, migration, statistics +from homeassistant.components.recorder import SQLITE_URL_PREFIX, core, statistics from homeassistant.components.recorder.queries import select_event_type_ids from homeassistant.components.recorder.util import session_scope -from homeassistant.const import EVENT_STATE_CHANGED -from homeassistant.core import Event, EventOrigin, State +from homeassistant.core import EVENT_STATE_CHANGED, Event, EventOrigin, State +from homeassistant.helpers import recorder as recorder_helper +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from .common import async_wait_recording_done -from .conftest import instrument_migration from tests.common import async_test_home_assistant -from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" -SCHEMA_MODULE_30 = "tests.components.recorder.db_schema_30" -SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" +SCHEMA_MODULE = "tests.components.recorder.db_schema_32" -def _create_engine_test(schema_module: str) -> Callable: +def _create_engine_test(*args, **kwargs): """Test version of create_engine that initializes with old schema. This simulates an existing db with the old schema. """ - - def _create_engine_test(*args, **kwargs): - """Test version of create_engine that initializes with old schema. - - This simulates an existing db with the old schema. - """ - importlib.import_module(schema_module) - old_db_schema = sys.modules[schema_module] - engine = create_engine(*args, **kwargs) - old_db_schema.Base.metadata.create_all(engine) - with Session(engine) as session: - session.add( - recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + engine = create_engine(*args, **kwargs) + old_db_schema.Base.metadata.create_all(engine) + with Session(engine) as session: + session.add( + recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) + ) + session.add( + recorder.db_schema.SchemaChanges( + schema_version=old_db_schema.SCHEMA_VERSION ) - session.add( - recorder.db_schema.SchemaChanges( - schema_version=old_db_schema.SCHEMA_VERSION - ) - ) - session.commit() - return engine - - return _create_engine_test + ) + session.commit() + return engine -@pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) -@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) -@pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) -@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage -async def test_migrate_times( - async_test_recorder: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test we can migrate times in the events and states tables.""" - importlib.import_module(SCHEMA_MODULE_30) - old_db_schema = sys.modules[SCHEMA_MODULE_30] +async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None: + """Test we can migrate times.""" + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + test_db_file = test_dir.joinpath("test_run_info.db") + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) now_timestamp = now.timestamp() @@ -109,25 +94,37 @@ async def test_migrate_times( with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration, "non_live_data_migration_needed", return_value=False), - patch.object(migration, "post_migrate_entity_ids", return_value=False), - patch.object(migration.EventsContextIDMigration, "migrate_data"), - patch.object(migration.StatesContextIDMigration, "migrate_data"), - patch.object(migration.EventTypeIDMigration, "migrate_data"), - patch.object(migration.EntityIDMigration, "migrate_data"), - patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_30)), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch( + "homeassistant.components.recorder.Recorder._migrate_events_context_ids", + ), + patch( + "homeassistant.components.recorder.Recorder._migrate_states_context_ids", + ), + patch( + "homeassistant.components.recorder.Recorder._migrate_event_type_ids", + ), + patch( + "homeassistant.components.recorder.Recorder._migrate_entity_ids", + ), + patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), + patch( + "homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids" + ), ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + assert await async_setup_component( + hass, "recorder", {"recorder": {"db_url": dburl}} + ) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -137,15 +134,15 @@ async def test_migrate_times( session.add(old_db_schema.Events.from_event(custom_event)) session.add(old_db_schema.States.from_event(state_changed_event)) - await instance.async_add_executor_job(_add_data) + await recorder.get_instance(hass).async_add_executor_job(_add_data) await hass.async_block_till_done() - await instance.async_block_till_done() + await recorder.get_instance(hass).async_block_till_done() - states_indexes = await instance.async_add_executor_job( + states_indexes = await recorder.get_instance(hass).async_add_executor_job( _get_states_index_names ) states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is True + assert recorder.get_instance(hass).use_legacy_events_index is True await hass.async_stop() await hass.async_block_till_done() @@ -153,16 +150,17 @@ async def test_migrate_times( assert "ix_states_event_id" in states_index_names # Test that the duplicates are removed during migration from schema 23 - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + assert await async_setup_component( + hass, "recorder", {"recorder": {"db_url": dburl}} + ) await hass.async_block_till_done() # We need to wait for all the migration tasks to complete # before we can check the database. for _ in range(number_of_migrations): - await instance.async_block_till_done() + await recorder.get_instance(hass).async_block_till_done() await async_wait_recording_done(hass) def _get_test_data_from_db(): @@ -186,52 +184,54 @@ async def test_migrate_times( session.expunge_all() return events_result, states_result - events_result, states_result = await instance.async_add_executor_job( - _get_test_data_from_db - ) + events_result, states_result = await recorder.get_instance( + hass + ).async_add_executor_job(_get_test_data_from_db) assert len(events_result) == 1 assert events_result[0].time_fired_ts == now_timestamp - assert events_result[0].time_fired is None assert len(states_result) == 1 assert states_result[0].last_changed_ts == one_second_past_timestamp assert states_result[0].last_updated_ts == now_timestamp - assert states_result[0].last_changed is None - assert states_result[0].last_updated is None def _get_events_index_names(): with session_scope(hass=hass) as session: return inspect(session.connection()).get_indexes("events") - events_indexes = await instance.async_add_executor_job(_get_events_index_names) + events_indexes = await recorder.get_instance(hass).async_add_executor_job( + _get_events_index_names + ) events_index_names = {index["name"] for index in events_indexes} assert "ix_events_context_id_bin" in events_index_names assert "ix_events_context_id" not in events_index_names - states_indexes = await instance.async_add_executor_job(_get_states_index_names) + states_indexes = await recorder.get_instance(hass).async_add_executor_job( + _get_states_index_names + ) states_index_names = {index["name"] for index in states_indexes} - # sqlite does not support dropping foreign keys so we had to - # create a new table and copy the data over - assert "ix_states_event_id" not in states_index_names + # sqlite does not support dropping foreign keys so the + # ix_states_event_id index is not dropped in this case + # but use_legacy_events_index is still False + assert "ix_states_event_id" in states_index_names - assert instance.use_legacy_events_index is False + assert recorder.get_instance(hass).use_legacy_events_index is False await hass.async_stop() -@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_can_resume_entity_id_post_migration( - async_test_recorder: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, - recorder_db_url: str, + caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: """Test we resume the entity id post migration after a restart.""" - importlib.import_module(SCHEMA_MODULE_32) - old_db_schema = sys.modules[SCHEMA_MODULE_32] + test_dir = tmp_path.joinpath("sqlite") + test_dir.mkdir() + test_db_file = test_dir.joinpath("test_run_info.db") + dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) mock_state = State( @@ -265,21 +265,37 @@ async def test_migrate_can_resume_entity_id_post_migration( with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration.EventIDPostMigration, "migrate_data"), - patch.object(migration, "non_live_data_migration_needed", return_value=False), - patch.object(migration, "post_migrate_entity_ids", return_value=False), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch( + "homeassistant.components.recorder.Recorder._migrate_events_context_ids", + ), + patch( + "homeassistant.components.recorder.Recorder._migrate_states_context_ids", + ), + patch( + "homeassistant.components.recorder.Recorder._migrate_event_type_ids", + ), + patch( + "homeassistant.components.recorder.Recorder._migrate_entity_ids", + ), + patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), + patch( + "homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids" + ), ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + assert await async_setup_component( + hass, "recorder", {"recorder": {"db_url": dburl}} + ) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -289,15 +305,15 @@ async def test_migrate_can_resume_entity_id_post_migration( session.add(old_db_schema.Events.from_event(custom_event)) session.add(old_db_schema.States.from_event(state_changed_event)) - await instance.async_add_executor_job(_add_data) + await recorder.get_instance(hass).async_add_executor_job(_add_data) await hass.async_block_till_done() - await instance.async_block_till_done() + await recorder.get_instance(hass).async_block_till_done() - states_indexes = await instance.async_add_executor_job( + states_indexes = await recorder.get_instance(hass).async_add_executor_job( _get_states_index_names ) states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is True + assert recorder.get_instance(hass).use_legacy_events_index is True await hass.async_stop() await hass.async_block_till_done() @@ -305,513 +321,46 @@ async def test_migrate_can_resume_entity_id_post_migration( assert "ix_states_event_id" in states_index_names assert "ix_states_entity_id_last_updated_ts" in states_index_names - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await hass.async_block_till_done() - - # We need to wait for all the migration tasks to complete - # before we can check the database. - for _ in range(number_of_migrations): - await instance.async_block_till_done() - await async_wait_recording_done(hass) - - states_indexes = await instance.async_add_executor_job(_get_states_index_names) - states_index_names = {index["name"] for index in states_indexes} - assert "ix_states_entity_id_last_updated_ts" not in states_index_names - assert "ix_states_event_id" not in states_index_names - - await hass.async_stop() - - -@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -@pytest.mark.parametrize("enable_migrate_event_ids", [True]) -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage -async def test_migrate_can_resume_ix_states_event_id_removed( - async_test_recorder: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, - recorder_db_url: str, -) -> None: - """Test we resume the entity id post migration after a restart. - - This case tests the migration still happens if - ix_states_event_id is removed from the states table. - """ - importlib.import_module(SCHEMA_MODULE_32) - old_db_schema = sys.modules[SCHEMA_MODULE_32] - now = dt_util.utcnow() - one_second_past = now - timedelta(seconds=1) - mock_state = State( - "sensor.test", - "old", - {"last_reset": now.isoformat()}, - last_changed=one_second_past, - last_updated=now, - ) - state_changed_event = Event( - EVENT_STATE_CHANGED, - { - "entity_id": "sensor.test", - "old_state": None, - "new_state": mock_state, - }, - EventOrigin.local, - time_fired_timestamp=now.timestamp(), - ) - custom_event = Event( - "custom_event", - {"entity_id": "sensor.custom"}, - EventOrigin.local, - time_fired_timestamp=now.timestamp(), - ) - number_of_migrations = 5 - - def _get_event_id_foreign_keys(): - assert instance.engine is not None - return next( - ( - fk # type: ignore[misc] - for fk in inspect(instance.engine).get_foreign_keys("states") - if fk["constrained_columns"] == ["event_id"] - ), - None, - ) - - def _get_states_index_names(): - with session_scope(hass=hass) as session: - return inspect(session.connection()).get_indexes("states") - - with ( - patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration.EventIDPostMigration, "migrate_data"), - patch.object(migration, "non_live_data_migration_needed", return_value=False), - patch.object(migration, "post_migrate_entity_ids", return_value=False), - patch.object(core, "StatesMeta", old_db_schema.StatesMeta), - patch.object(core, "EventTypes", old_db_schema.EventTypes), - patch.object(core, "EventData", old_db_schema.EventData), - patch.object(core, "States", old_db_schema.States), - patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - - def _add_data(): - with session_scope(hass=hass) as session: - session.add(old_db_schema.Events.from_event(custom_event)) - session.add(old_db_schema.States.from_event(state_changed_event)) - - await instance.async_add_executor_job(_add_data) - await hass.async_block_till_done() - await instance.async_block_till_done() - - await instance.async_add_executor_job( - migration._drop_index, - instance.get_session, - "states", - "ix_states_event_id", + with patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + assert await async_setup_component( + hass, "recorder", {"recorder": {"db_url": dburl}} ) - - states_indexes = await instance.async_add_executor_job( - _get_states_index_names - ) - states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is True - assert ( - await instance.async_add_executor_job(_get_event_id_foreign_keys) - is not None - ) - - await hass.async_stop() - await hass.async_block_till_done() - - assert "ix_states_entity_id_last_updated_ts" in states_index_names - - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await hass.async_block_till_done() - - # We need to wait for all the migration tasks to complete - # before we can check the database. - for _ in range(number_of_migrations): - await instance.async_block_till_done() - await async_wait_recording_done(hass) - - states_indexes = await instance.async_add_executor_job(_get_states_index_names) - states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is False - assert "ix_states_entity_id_last_updated_ts" not in states_index_names - assert "ix_states_event_id" not in states_index_names - assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None - - await hass.async_stop() - - -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.parametrize("enable_migrate_event_ids", [True]) -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage -async def test_out_of_disk_space_while_rebuild_states_table( - async_test_recorder: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, - recorder_db_url: str, -) -> None: - """Test that we can recover from out of disk space while rebuilding the states table. - - This case tests the migration still happens if - ix_states_event_id is removed from the states table. - """ - importlib.import_module(SCHEMA_MODULE_32) - old_db_schema = sys.modules[SCHEMA_MODULE_32] - now = dt_util.utcnow() - one_second_past = now - timedelta(seconds=1) - mock_state = State( - "sensor.test", - "old", - {"last_reset": now.isoformat()}, - last_changed=one_second_past, - last_updated=now, - ) - state_changed_event = Event( - EVENT_STATE_CHANGED, - { - "entity_id": "sensor.test", - "old_state": None, - "new_state": mock_state, - }, - EventOrigin.local, - time_fired_timestamp=now.timestamp(), - ) - custom_event = Event( - "custom_event", - {"entity_id": "sensor.custom"}, - EventOrigin.local, - time_fired_timestamp=now.timestamp(), - ) - number_of_migrations = 5 - - def _get_event_id_foreign_keys(): - assert instance.engine is not None - return next( - ( - fk # type: ignore[misc] - for fk in inspect(instance.engine).get_foreign_keys("states") - if fk["constrained_columns"] == ["event_id"] - ), - None, - ) - - def _get_states_index_names(): - with session_scope(hass=hass) as session: - return inspect(session.connection()).get_indexes("states") - - with ( - patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration.EventIDPostMigration, "migrate_data"), - patch.object(migration, "non_live_data_migration_needed", return_value=False), - patch.object(migration, "post_migrate_entity_ids", return_value=False), - patch.object(core, "StatesMeta", old_db_schema.StatesMeta), - patch.object(core, "EventTypes", old_db_schema.EventTypes), - patch.object(core, "EventData", old_db_schema.EventData), - patch.object(core, "States", old_db_schema.States), - patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - - def _add_data(): - with session_scope(hass=hass) as session: - session.add(old_db_schema.Events.from_event(custom_event)) - session.add(old_db_schema.States.from_event(state_changed_event)) - - await instance.async_add_executor_job(_add_data) - await hass.async_block_till_done() - await instance.async_block_till_done() - - await instance.async_add_executor_job( - migration._drop_index, - instance.get_session, - "states", - "ix_states_event_id", - ) - - states_indexes = await instance.async_add_executor_job( - _get_states_index_names - ) - states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is True - assert ( - await instance.async_add_executor_job(_get_event_id_foreign_keys) - is not None - ) - - await hass.async_stop() - await hass.async_block_till_done() - - assert "ix_states_entity_id_last_updated_ts" in states_index_names - - # Simulate out of disk space while rebuilding the states table by - # - patching CreateTable to raise SQLAlchemyError for SQLite - # - patching DropConstraint to raise InternalError for MySQL and PostgreSQL - with ( - patch( - "homeassistant.components.recorder.migration.CreateTable", - side_effect=SQLAlchemyError, - ), - patch( - "homeassistant.components.recorder.migration.DropConstraint", - side_effect=OperationalError( - None, None, OSError("No space left on device") - ), - ), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): await hass.async_block_till_done() # We need to wait for all the migration tasks to complete # before we can check the database. for _ in range(number_of_migrations): - await instance.async_block_till_done() + await recorder.get_instance(hass).async_block_till_done() await async_wait_recording_done(hass) - states_indexes = await instance.async_add_executor_job( + states_indexes = await recorder.get_instance(hass).async_add_executor_job( _get_states_index_names ) states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is True - assert "Error recreating SQLite table states" in caplog.text - assert await instance.async_add_executor_job(_get_event_id_foreign_keys) - - await hass.async_stop() - - # Now run it again to verify the table rebuild tries again - caplog.clear() - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await hass.async_block_till_done() - - # We need to wait for all the migration tasks to complete - # before we can check the database. - for _ in range(number_of_migrations): - await instance.async_block_till_done() - await async_wait_recording_done(hass) - - states_indexes = await instance.async_add_executor_job(_get_states_index_names) - states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is False - assert "ix_states_entity_id_last_updated_ts" not in states_index_names - assert "ix_states_event_id" not in states_index_names - assert "Rebuilding SQLite table states finished" in caplog.text - assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None - - await hass.async_stop() - - -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.skip_on_db_engine(["sqlite"]) -@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -@pytest.mark.parametrize("enable_migrate_event_ids", [True]) -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage -async def test_out_of_disk_space_while_removing_foreign_key( - async_test_recorder: RecorderInstanceGenerator, - caplog: pytest.LogCaptureFixture, - recorder_db_url: str, -) -> None: - """Test that we can recover from out of disk space while removing the foreign key. - - This case tests the migration still happens if - ix_states_event_id is removed from the states table. - - Note that the test is somewhat forced; the states.event_id foreign key constraint is - removed when migrating to schema version 46, inspecting the schema in - EventIDPostMigration.migrate_data, is not likely to fail. - """ - importlib.import_module(SCHEMA_MODULE_32) - old_db_schema = sys.modules[SCHEMA_MODULE_32] - now = dt_util.utcnow() - one_second_past = now - timedelta(seconds=1) - mock_state = State( - "sensor.test", - "old", - {"last_reset": now.isoformat()}, - last_changed=one_second_past, - last_updated=now, - ) - state_changed_event = Event( - EVENT_STATE_CHANGED, - { - "entity_id": "sensor.test", - "old_state": None, - "new_state": mock_state, - }, - EventOrigin.local, - time_fired_timestamp=now.timestamp(), - ) - custom_event = Event( - "custom_event", - {"entity_id": "sensor.custom"}, - EventOrigin.local, - time_fired_timestamp=now.timestamp(), - ) - number_of_migrations = 5 - - def _get_event_id_foreign_keys(): - assert instance.engine is not None - return next( - ( - fk # type: ignore[misc] - for fk in inspect(instance.engine).get_foreign_keys("states") - if fk["constrained_columns"] == ["event_id"] - ), - None, - ) - - def _get_states_index_names(): - with session_scope(hass=hass) as session: - return inspect(session.connection()).get_indexes("states") - - with ( - patch.object(recorder, "db_schema", old_db_schema), - patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), - patch.object(migration.EventIDPostMigration, "migrate_data"), - patch.object(migration, "non_live_data_migration_needed", return_value=False), - patch.object(migration, "post_migrate_entity_ids", return_value=False), - patch.object(core, "StatesMeta", old_db_schema.StatesMeta), - patch.object(core, "EventTypes", old_db_schema.EventTypes), - patch.object(core, "EventData", old_db_schema.EventData), - patch.object(core, "States", old_db_schema.States), - patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) - - def _add_data(): - with session_scope(hass=hass) as session: - session.add(old_db_schema.Events.from_event(custom_event)) - session.add(old_db_schema.States.from_event(state_changed_event)) - - await instance.async_add_executor_job(_add_data) - await hass.async_block_till_done() - await instance.async_block_till_done() - - await instance.async_add_executor_job( - migration._drop_index, - instance.get_session, - "states", - "ix_states_event_id", - ) - - states_indexes = await instance.async_add_executor_job( - _get_states_index_names - ) - states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is True - assert ( - await instance.async_add_executor_job(_get_event_id_foreign_keys) - is not None - ) - await hass.async_stop() await hass.async_block_till_done() assert "ix_states_entity_id_last_updated_ts" in states_index_names async with async_test_home_assistant() as hass: - with instrument_migration(hass) as instrumented_migration: - # Allow migration to start, but stall when live migration is completed - instrumented_migration.migration_stall.set() - instrumented_migration.live_migration_done_stall.clear() - - async with async_test_recorder(hass, wait_recorder=False) as instance: - await hass.async_block_till_done() - - # Wait for live migration to complete - await hass.async_add_executor_job( - instrumented_migration.live_migration_done.wait - ) - - # Simulate out of disk space while removing the foreign key from the states table by - # - patching DropConstraint to raise InternalError for MySQL and PostgreSQL - with ( - patch( - "homeassistant.components.recorder.migration.sqlalchemy.inspect", - side_effect=OperationalError( - None, None, OSError("No space left on device") - ), - ), - ): - instrumented_migration.live_migration_done_stall.set() - # We need to wait for all the migration tasks to complete - # before we can check the database. - for _ in range(number_of_migrations): - await instance.async_block_till_done() - await async_wait_recording_done(hass) - - states_indexes = await instance.async_add_executor_job( - _get_states_index_names - ) - states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is True - # The states.event_id foreign key constraint was removed when - # migration to schema version 46 - assert ( - await instance.async_add_executor_job( - _get_event_id_foreign_keys - ) - is None - ) - - await hass.async_stop() - - # Now run it again to verify the table rebuild tries again - caplog.clear() - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): + recorder_helper.async_initialize_recorder(hass) + assert await async_setup_component( + hass, "recorder", {"recorder": {"db_url": dburl}} + ) await hass.async_block_till_done() # We need to wait for all the migration tasks to complete # before we can check the database. for _ in range(number_of_migrations): - await instance.async_block_till_done() + await recorder.get_instance(hass).async_block_till_done() await async_wait_recording_done(hass) - states_indexes = await instance.async_add_executor_job(_get_states_index_names) + states_indexes = await recorder.get_instance(hass).async_add_executor_job( + _get_states_index_names + ) states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is False assert "ix_states_entity_id_last_updated_ts" not in states_index_names - assert "ix_states_event_id" not in states_index_names - assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None await hass.async_stop() diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index 547288d1cc3..cc187a1e6ad 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -3,7 +3,7 @@ import datetime from datetime import timedelta from statistics import fmean -import sys +import threading from unittest.mock import ANY, patch from freezegun import freeze_time @@ -35,21 +35,11 @@ from .common import ( async_wait_recording_done, create_engine_test, do_adhoc_statistics, - get_start_time, statistics_during_period, ) -from .conftest import InstrumentedMigration from tests.common import async_fire_time_changed -from tests.typing import RecorderInstanceGenerator, WebSocketGenerator - - -@pytest.fixture -async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, -) -> None: - """Set up recorder.""" - +from tests.typing import WebSocketGenerator DISTANCE_SENSOR_FT_ATTRIBUTES = { "device_class": "distance", @@ -156,17 +146,12 @@ async def test_statistics_during_period( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistics_during_period.""" - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() hass.config.units = US_CUSTOMARY_SYSTEM await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set( - "sensor.test", - 10, - attributes=POWER_SENSOR_KW_ATTRIBUTES, - timestamp=now.timestamp(), - ) + hass.states.async_set("sensor.test", 10, attributes=POWER_SENSOR_KW_ATTRIBUTES) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -614,12 +599,7 @@ async def test_statistic_during_period( } # Test we can automatically convert units - hass.states.async_set( - "sensor.test", - None, - attributes=ENERGY_SENSOR_WH_ATTRIBUTES, - timestamp=now.timestamp(), - ) + hass.states.async_set("sensor.test", None, attributes=ENERGY_SENSOR_WH_ATTRIBUTES) await client.send_json_auto_id( { "type": "recorder/statistic_during_period", @@ -830,7 +810,7 @@ async def test_statistic_during_period_partial_overlap( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, - frozen_time: datetime.datetime, + frozen_time: datetime, ) -> None: """Test statistic_during_period.""" client = await hass_ws_client() @@ -1276,13 +1256,11 @@ async def test_statistics_during_period_unit_conversion( converted_value, ) -> None: """Test statistics_during_period.""" - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set( - "sensor.test", state, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", state, attributes=attributes) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -1363,16 +1341,12 @@ async def test_sum_statistics_during_period_unit_conversion( converted_value, ) -> None: """Test statistics_during_period.""" - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set( - "sensor.test", 0, attributes=attributes, timestamp=now.timestamp() - ) - hass.states.async_set( - "sensor.test", state, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 0, attributes=attributes) + hass.states.async_set("sensor.test", state, attributes=attributes) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -1488,7 +1462,7 @@ async def test_statistics_during_period_in_the_past( ) -> None: """Test statistics_during_period in the past.""" await hass.config.async_set_time_zone("UTC") - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow().replace() hass.config.units = US_CUSTOMARY_SYSTEM await async_setup_component(hass, "sensor", {}) @@ -1743,7 +1717,7 @@ async def test_list_statistic_ids( unit_class, ) -> None: """Test list_statistic_ids.""" - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() has_mean = attributes["state_class"] == "measurement" has_sum = not has_mean @@ -1757,9 +1731,7 @@ async def test_list_statistic_ids( assert response["success"] assert response["result"] == [] - hass.states.async_set( - "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 10, attributes=attributes) await async_wait_recording_done(hass) await client.send_json_auto_id({"type": "recorder/list_statistic_ids"}) @@ -1909,7 +1881,7 @@ async def test_list_statistic_ids_unit_change( unit_class, ) -> None: """Test list_statistic_ids.""" - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() has_mean = attributes["state_class"] == "measurement" has_sum = not has_mean @@ -1922,9 +1894,7 @@ async def test_list_statistic_ids_unit_change( assert response["success"] assert response["result"] == [] - hass.states.async_set( - "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 10, attributes=attributes) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -1947,9 +1917,7 @@ async def test_list_statistic_ids_unit_change( ] # Change the state unit - hass.states.async_set( - "sensor.test", 10, attributes=attributes2, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 10, attributes=attributes2) await client.send_json_auto_id({"type": "recorder/list_statistic_ids"}) response = await client.receive_json() @@ -1984,23 +1952,11 @@ async def test_validate_statistics( await assert_validation_result(client, {}) -async def test_update_statistics_issues( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test update_statistics_issues can be called.""" - - client = await hass_ws_client() - await client.send_json_auto_id({"type": "recorder/update_statistics_issues"}) - response = await client.receive_json() - assert response["success"] - assert response["result"] is None - - async def test_clear_statistics( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test removing statistics.""" - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES @@ -2010,15 +1966,9 @@ async def test_clear_statistics( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set( - "sensor.test1", state, attributes=attributes, timestamp=now.timestamp() - ) - hass.states.async_set( - "sensor.test2", state * 2, attributes=attributes, timestamp=now.timestamp() - ) - hass.states.async_set( - "sensor.test3", state * 3, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test1", state, attributes=attributes) + hass.states.async_set("sensor.test2", state * 2, attributes=attributes) + hass.states.async_set("sensor.test3", state * 3, attributes=attributes) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -2116,30 +2066,6 @@ async def test_clear_statistics( assert response["result"] == {"sensor.test2": expected_response["sensor.test2"]} -async def test_clear_statistics_time_out( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test removing statistics with time-out error.""" - client = await hass_ws_client() - - with ( - patch.object(recorder.tasks.ClearStatisticsTask, "run"), - patch.object(recorder.websocket_api, "CLEAR_STATISTICS_TIME_OUT", 0), - ): - await client.send_json_auto_id( - { - "type": "recorder/clear_statistics", - "statistic_ids": ["sensor.test"], - } - ) - response = await client.receive_json() - assert not response["success"] - assert response["error"] == { - "code": "timeout", - "message": "clear_statistics timed out", - } - - @pytest.mark.parametrize( ("new_unit", "new_unit_class", "new_display_unit"), [("dogs", None, "dogs"), (None, "unitless", None), ("W", "power", "kW")], @@ -2153,7 +2079,7 @@ async def test_update_statistics_metadata( new_display_unit, ) -> None: """Test removing statistics.""" - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES | {"device_class": None} @@ -2162,9 +2088,7 @@ async def test_update_statistics_metadata( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set( - "sensor.test", state, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", state, attributes=attributes) await async_wait_recording_done(hass) do_adhoc_statistics(hass, period="hourly", start=now) @@ -2240,36 +2164,11 @@ async def test_update_statistics_metadata( } -async def test_update_statistics_metadata_time_out( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test update statistics metadata with time-out error.""" - client = await hass_ws_client() - - with ( - patch.object(recorder.tasks.UpdateStatisticsMetadataTask, "run"), - patch.object(recorder.websocket_api, "UPDATE_STATISTICS_METADATA_TIME_OUT", 0), - ): - await client.send_json_auto_id( - { - "type": "recorder/update_statistics_metadata", - "statistic_id": "sensor.test", - "unit_of_measurement": "dogs", - } - ) - response = await client.receive_json() - assert not response["success"] - assert response["error"] == { - "code": "timeout", - "message": "update_statistics_metadata timed out", - } - - async def test_change_statistics_unit( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test change unit of recorded statistics.""" - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES | {"device_class": None} @@ -2278,9 +2177,7 @@ async def test_change_statistics_unit( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set( - "sensor.test", state, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", state, attributes=attributes) await async_wait_recording_done(hass) do_adhoc_statistics(hass, period="hourly", start=now) @@ -2416,7 +2313,7 @@ async def test_change_statistics_unit_errors( caplog: pytest.LogCaptureFixture, ) -> None: """Test change unit of recorded statistics.""" - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES | {"device_class": None} @@ -2470,9 +2367,7 @@ async def test_change_statistics_unit_errors( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set( - "sensor.test", state, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", state, attributes=attributes) await async_wait_recording_done(hass) do_adhoc_statistics(hass, period="hourly", start=now) @@ -2562,7 +2457,7 @@ async def test_recorder_info_bad_recorder_config( client = await hass_ws_client() - with patch("homeassistant.components.recorder.migration._migrate_schema"): + with patch("homeassistant.components.recorder.migration.migrate_schema"): recorder_helper.async_initialize_recorder(hass) assert not await async_setup_component( hass, recorder.DOMAIN, {recorder.DOMAIN: config} @@ -2587,7 +2482,7 @@ async def test_recorder_info_no_instance( client = await hass_ws_client() with patch( - "homeassistant.components.recorder.basic_websocket_api.get_instance", + "homeassistant.components.recorder.websocket_api.get_instance", return_value=None, ): await client.send_json_auto_id({"type": "recorder/info"}) @@ -2598,60 +2493,70 @@ async def test_recorder_info_no_instance( async def test_recorder_info_migration_queue_exhausted( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - async_test_recorder: RecorderInstanceGenerator, - instrument_migration: InstrumentedMigration, + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test getting recorder status when recorder queue is exhausted.""" assert recorder.util.async_migration_in_progress(hass) is False + migration_done = threading.Event() + + real_migration = recorder.migration._apply_update + + def stalled_migration(*args): + """Make migration stall.""" + nonlocal migration_done + migration_done.wait() + return real_migration(*args) + with ( + patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), + patch("homeassistant.components.recorder.Recorder.async_periodic_statistics"), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object( - recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize + patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), + patch( + "homeassistant.components.recorder.migration._apply_update", + wraps=stalled_migration, ), ): - async with async_test_recorder( - hass, wait_recorder=False, wait_recorder_setup=False - ): - await hass.async_add_executor_job( - instrument_migration.migration_started.wait + recorder_helper.async_initialize_recorder(hass) + hass.create_task( + async_setup_component( + hass, "recorder", {"recorder": {"db_url": "sqlite://"}} ) - assert recorder.util.async_migration_in_progress(hass) is True - await recorder_helper.async_wait_recorder(hass) - hass.states.async_set("my.entity", "on", {}) - await hass.async_block_till_done() + ) + await recorder_helper.async_wait_recorder(hass) + hass.states.async_set("my.entity", "on", {}) + await hass.async_block_till_done() - # Detect queue full - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=2)) - await hass.async_block_till_done() + # Detect queue full + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=2)) + await hass.async_block_till_done() - client = await hass_ws_client() + client = await hass_ws_client() - # Check the status - await client.send_json_auto_id({"type": "recorder/info"}) - response = await client.receive_json() - assert response["success"] - assert response["result"]["migration_in_progress"] is True - assert response["result"]["recording"] is False - assert response["result"]["thread_running"] is True + # Check the status + await client.send_json_auto_id({"type": "recorder/info"}) + response = await client.receive_json() + assert response["success"] + assert response["result"]["migration_in_progress"] is True + assert response["result"]["recording"] is False + assert response["result"]["thread_running"] is True - # Let migration finish - instrument_migration.migration_stall.set() - await async_wait_recording_done(hass) + # Let migration finish + migration_done.set() + await async_wait_recording_done(hass) - # Check the status after migration finished - await client.send_json_auto_id({"type": "recorder/info"}) - response = await client.receive_json() - assert response["success"] - assert response["result"]["migration_in_progress"] is False - assert response["result"]["recording"] is True - assert response["result"]["thread_running"] is True + # Check the status after migration finished + await client.send_json_auto_id({"type": "recorder/info"}) + response = await client.receive_json() + assert response["success"] + assert response["result"]["migration_in_progress"] is False + assert response["result"]["recording"] is True + assert response["result"]["thread_running"] is True async def test_backup_start_no_recorder( @@ -2697,7 +2602,7 @@ async def test_get_statistics_metadata( unit_class, ) -> None: """Test get_statistics_metadata.""" - now = get_start_time(dt_util.utcnow()) + now = dt_util.utcnow() has_mean = attributes["state_class"] == "measurement" has_sum = not has_mean @@ -2776,14 +2681,10 @@ async def test_get_statistics_metadata( } ] - hass.states.async_set( - "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 10, attributes=attributes) await async_wait_recording_done(hass) - hass.states.async_set( - "sensor.test2", 10, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test2", 10, attributes=attributes) await async_wait_recording_done(hass) await client.send_json_auto_id( diff --git a/tests/components/reddit/test_sensor.py b/tests/components/reddit/test_sensor.py index 98cf2b79db3..52dac07d621 100644 --- a/tests/components/reddit/test_sensor.py +++ b/tests/components/reddit/test_sensor.py @@ -66,7 +66,7 @@ INVALID_SORT_BY_CONFIG = { class ObjectView: """Use dict properties as attributes.""" - def __init__(self, d) -> None: + def __init__(self, d): """Set dict as internal dict.""" self.__dict__ = d diff --git a/tests/components/refoss/conftest.py b/tests/components/refoss/conftest.py index 5ded3e9489d..80b3f4d8b75 100644 --- a/tests/components/refoss/conftest.py +++ b/tests/components/refoss/conftest.py @@ -1,9 +1,9 @@ """Pytest module configuration.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/remote/test_device_action.py b/tests/components/remote/test_device_action.py index e224fcf4939..a6e890937b5 100644 --- a/tests/components/remote/test_device_action.py +++ b/tests/components/remote/test_device_action.py @@ -7,7 +7,7 @@ from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.remote import DOMAIN from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component @@ -24,6 +24,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_actions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -108,6 +114,7 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -182,6 +189,7 @@ async def test_action_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) diff --git a/tests/components/remote/test_device_condition.py b/tests/components/remote/test_device_condition.py index 6c9334aeac4..d13a0480355 100644 --- a/tests/components/remote/test_device_condition.py +++ b/tests/components/remote/test_device_condition.py @@ -20,6 +20,7 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, ) @@ -28,6 +29,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -176,7 +183,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -242,20 +249,20 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_off event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_off event - test_event2" @pytest.mark.usefixtures("enable_custom_integrations") @@ -263,7 +270,7 @@ async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -308,13 +315,13 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -322,7 +329,7 @@ async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for firing if condition is on with delay.""" point1 = dt_util.utcnow() @@ -371,26 +378,26 @@ async def test_if_fires_on_for_condition( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Time travel 10 secs into the future freezer.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Time travel 20 secs into the future freezer.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_off event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/remote/test_device_trigger.py b/tests/components/remote/test_device_trigger.py index c647faba2c1..8a1a0c318d7 100644 --- a/tests/components/remote/test_device_trigger.py +++ b/tests/components/remote/test_device_trigger.py @@ -20,6 +20,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, ) @@ -28,6 +29,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -174,7 +181,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -260,20 +267,20 @@ async def test_if_fires_on_state_change( ] }, ) - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { + assert len(calls) == 2 + assert {calls[0].data["some"], calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 4 - assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { + assert len(calls) == 4 + assert {calls[2].data["some"], calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -284,7 +291,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -328,13 +335,13 @@ async def test_if_fires_on_state_change_legacy( ] }, ) - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - None" ) @@ -344,7 +351,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -390,16 +397,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/renault/__init__.py b/tests/components/renault/__init__.py index a7c6b314ccb..86fddfd5bac 100644 --- a/tests/components/renault/__init__.py +++ b/tests/components/renault/__init__.py @@ -10,9 +10,9 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, - ATTR_MODEL_ID, ATTR_NAME, ATTR_STATE, + ATTR_SW_VERSION, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -46,7 +46,7 @@ def check_device_registry( assert registry_entry.manufacturer == expected_device[ATTR_MANUFACTURER] assert registry_entry.name == expected_device[ATTR_NAME] assert registry_entry.model == expected_device[ATTR_MODEL] - assert registry_entry.model_id == expected_device[ATTR_MODEL_ID] + assert registry_entry.sw_version == expected_device[ATTR_SW_VERSION] def check_entities( diff --git a/tests/components/renault/conftest.py b/tests/components/renault/conftest.py index 9be41eb7ba0..a5af01b504a 100644 --- a/tests/components/renault/conftest.py +++ b/tests/components/renault/conftest.py @@ -1,6 +1,5 @@ """Provide common Renault fixtures.""" -from collections.abc import Generator, Iterator import contextlib from types import MappingProxyType from typing import Any @@ -9,6 +8,7 @@ from unittest.mock import AsyncMock, patch import pytest from renault_api.kamereon import exceptions, schemas from renault_api.renault_account import RenaultAccount +from typing_extensions import Generator from homeassistant.components.renault.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntry @@ -200,7 +200,7 @@ def patch_fixtures_with_no_data(): @contextlib.contextmanager -def _patch_fixtures_with_side_effect(side_effect: Any) -> Iterator[None]: +def _patch_fixtures_with_side_effect(side_effect: Any): """Mock fixtures.""" with ( patch( diff --git a/tests/components/renault/const.py b/tests/components/renault/const.py index c552321ef97..19c40f6ec20 100644 --- a/tests/components/renault/const.py +++ b/tests/components/renault/const.py @@ -19,9 +19,9 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, - ATTR_MODEL_ID, ATTR_NAME, ATTR_STATE, + ATTR_SW_VERSION, ATTR_UNIT_OF_MEASUREMENT, CONF_PASSWORD, CONF_USERNAME, @@ -74,7 +74,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Zoe", ATTR_NAME: "REG-NUMBER", - ATTR_MODEL_ID: "X101VE", + ATTR_SW_VERSION: "X101VE", }, "endpoints": { "battery_status": "battery_status_charging.json", @@ -246,13 +246,7 @@ MOCK_VEHICLES = { ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, ATTR_ENTITY_ID: "sensor.reg_number_plug_state", ATTR_ICON: "mdi:power-plug", - ATTR_OPTIONS: [ - "unplugged", - "plugged", - "plugged_waiting_for_charge", - "plug_error", - "plug_unknown", - ], + ATTR_OPTIONS: ["unplugged", "plugged", "plug_error", "plug_unknown"], ATTR_STATE: "plugged", ATTR_UNIQUE_ID: "vf1aaaaa555777999_plug_state", }, @@ -275,7 +269,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Zoe", ATTR_NAME: "REG-NUMBER", - ATTR_MODEL_ID: "X102VE", + ATTR_SW_VERSION: "X102VE", }, "endpoints": { "battery_status": "battery_status_not_charging.json", @@ -493,13 +487,7 @@ MOCK_VEHICLES = { ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, ATTR_ENTITY_ID: "sensor.reg_number_plug_state", ATTR_ICON: "mdi:power-plug-off", - ATTR_OPTIONS: [ - "unplugged", - "plugged", - "plugged_waiting_for_charge", - "plug_error", - "plug_unknown", - ], + ATTR_OPTIONS: ["unplugged", "plugged", "plug_error", "plug_unknown"], ATTR_STATE: "unplugged", ATTR_UNIQUE_ID: "vf1aaaaa555777999_plug_state", }, @@ -529,7 +517,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Captur ii", ATTR_NAME: "REG-NUMBER", - ATTR_MODEL_ID: "XJB1SU", + ATTR_SW_VERSION: "XJB1SU", }, "endpoints": { "battery_status": "battery_status_charging.json", @@ -737,13 +725,7 @@ MOCK_VEHICLES = { ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, ATTR_ENTITY_ID: "sensor.reg_number_plug_state", ATTR_ICON: "mdi:power-plug", - ATTR_OPTIONS: [ - "unplugged", - "plugged", - "plugged_waiting_for_charge", - "plug_error", - "plug_unknown", - ], + ATTR_OPTIONS: ["unplugged", "plugged", "plug_error", "plug_unknown"], ATTR_STATE: "plugged", ATTR_UNIQUE_ID: "vf1aaaaa555777123_plug_state", }, @@ -773,7 +755,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Captur ii", ATTR_NAME: "REG-NUMBER", - ATTR_MODEL_ID: "XJB1SU", + ATTR_SW_VERSION: "XJB1SU", }, "endpoints": { "cockpit": "cockpit_fuel.json", diff --git a/tests/components/renault/fixtures/action.set_ac_schedules.json b/tests/components/renault/fixtures/action.set_ac_schedules.json deleted file mode 100644 index 601c1f6cf2d..00000000000 --- a/tests/components/renault/fixtures/action.set_ac_schedules.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "data": { - "type": "HvacSchedule", - "id": "guid", - "attributes": { - "schedules": [ - { - "id": 1, - "activated": true, - "tuesday": { "readyAtTime": "T04:30Z" }, - "wednesday": { "readyAtTime": "T22:30Z" }, - "thursday": { "readyAtTime": "T22:00Z" }, - "friday": { "readyAtTime": "T23:30Z" }, - "saturday": { "readyAtTime": "T18:30Z" }, - "sunday": { "readyAtTime": "T12:45Z" } - } - ] - } - } -} diff --git a/tests/components/renault/fixtures/hvac_settings.json b/tests/components/renault/fixtures/hvac_settings.json deleted file mode 100644 index 8dd37e56af4..00000000000 --- a/tests/components/renault/fixtures/hvac_settings.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "data": { - "type": "Car", - "id": "VF1AAAAA555777999", - "attributes": { - "dateTime": "2020-12-24T20:00:00.000Z", - "mode": "scheduled", - "schedules": [ - { - "id": 1, - "activated": false - }, - { - "id": 2, - "activated": true, - "wednesday": { "readyAtTime": "T15:15Z" }, - "friday": { "readyAtTime": "T15:15Z" } - }, - { - "id": 3, - "activated": false, - "monday": { "readyAtTime": "T23:30Z" }, - "tuesday": { "readyAtTime": "T23:30Z" }, - "wednesday": { "readyAtTime": "T23:30Z" }, - "thursday": { "readyAtTime": "T23:30Z" }, - "friday": { "readyAtTime": "T23:30Z" }, - "saturday": { "readyAtTime": "T23:30Z" }, - "sunday": { "readyAtTime": "T23:30Z" } - }, - { - "id": 4, - "activated": false - }, - { - "id": 5, - "activated": false - } - ] - } - } -} diff --git a/tests/components/renault/fixtures/hvac_status.1.json b/tests/components/renault/fixtures/hvac_status.1.json index f48cbae68ae..7cbd7a9fe37 100644 --- a/tests/components/renault/fixtures/hvac_status.1.json +++ b/tests/components/renault/fixtures/hvac_status.1.json @@ -2,6 +2,6 @@ "data": { "type": "Car", "id": "VF1AAAAA555777999", - "attributes": { "externalTemperature": 8.0, "hvacStatus": "off" } + "attributes": { "externalTemperature": 8.0, "hvacStatus": 1 } } } diff --git a/tests/components/renault/fixtures/hvac_status.2.json b/tests/components/renault/fixtures/hvac_status.2.json index a2ca08a71e9..8bb4f941e06 100644 --- a/tests/components/renault/fixtures/hvac_status.2.json +++ b/tests/components/renault/fixtures/hvac_status.2.json @@ -4,7 +4,7 @@ "id": "VF1AAAAA555777999", "attributes": { "socThreshold": 30.0, - "hvacStatus": "off", + "hvacStatus": 1, "lastUpdateTime": "2020-12-03T00:00:00Z" } } diff --git a/tests/components/renault/snapshots/test_binary_sensor.ambr b/tests/components/renault/snapshots/test_binary_sensor.ambr index 7142608b977..8f49d7ef761 100644 --- a/tests/components/renault/snapshots/test_binary_sensor.ambr +++ b/tests/components/renault/snapshots/test_binary_sensor.ambr @@ -22,13 +22,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -322,13 +321,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -708,13 +706,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X101VE', 'via_device_id': None, }), ]) @@ -878,13 +875,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X102VE', 'via_device_id': None, }), ]) @@ -1306,13 +1302,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -1606,13 +1601,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -1992,13 +1986,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X101VE', 'via_device_id': None, }), ]) @@ -2162,13 +2155,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X102VE', 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_button.ambr b/tests/components/renault/snapshots/test_button.ambr index e61255372c1..7fa37319b2e 100644 --- a/tests/components/renault/snapshots/test_button.ambr +++ b/tests/components/renault/snapshots/test_button.ambr @@ -22,13 +22,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -106,13 +105,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -274,13 +272,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X101VE', 'via_device_id': None, }), ]) @@ -442,13 +439,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X102VE', 'via_device_id': None, }), ]) @@ -610,13 +606,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -694,13 +689,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -862,13 +856,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X101VE', 'via_device_id': None, }), ]) @@ -1030,13 +1023,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X102VE', 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_device_tracker.ambr b/tests/components/renault/snapshots/test_device_tracker.ambr index f90cb92cc63..61232d0268d 100644 --- a/tests/components/renault/snapshots/test_device_tracker.ambr +++ b/tests/components/renault/snapshots/test_device_tracker.ambr @@ -22,13 +22,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -107,13 +106,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -192,13 +190,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X101VE', 'via_device_id': None, }), ]) @@ -234,13 +231,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X102VE', 'via_device_id': None, }), ]) @@ -319,13 +315,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -407,13 +402,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -495,13 +489,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X101VE', 'via_device_id': None, }), ]) @@ -537,13 +530,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X102VE', 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_diagnostics.ambr b/tests/components/renault/snapshots/test_diagnostics.ambr index a2921dff35e..ae90115fcb6 100644 --- a/tests/components/renault/snapshots/test_diagnostics.ambr +++ b/tests/components/renault/snapshots/test_diagnostics.ambr @@ -22,7 +22,7 @@ }), 'hvac_status': dict({ 'externalTemperature': 8.0, - 'hvacStatus': 'off', + 'hvacStatus': 1, }), 'res_state': dict({ }), @@ -227,7 +227,7 @@ }), 'hvac_status': dict({ 'externalTemperature': 8.0, - 'hvacStatus': 'off', + 'hvacStatus': 1, }), 'res_state': dict({ }), diff --git a/tests/components/renault/snapshots/test_select.ambr b/tests/components/renault/snapshots/test_select.ambr index 9974e21be75..30181fd3b9c 100644 --- a/tests/components/renault/snapshots/test_select.ambr +++ b/tests/components/renault/snapshots/test_select.ambr @@ -22,13 +22,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -64,13 +63,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -161,13 +159,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X101VE', 'via_device_id': None, }), ]) @@ -258,13 +255,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X102VE', 'via_device_id': None, }), ]) @@ -355,13 +351,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -397,13 +392,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -494,13 +488,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X101VE', 'via_device_id': None, }), ]) @@ -591,13 +584,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X102VE', 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_sensor.ambr b/tests/components/renault/snapshots/test_sensor.ambr index b092222c9f3..1ae033101d4 100644 --- a/tests/components/renault/snapshots/test_sensor.ambr +++ b/tests/components/renault/snapshots/test_sensor.ambr @@ -22,13 +22,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -332,13 +331,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -494,7 +492,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -922,7 +919,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -1089,13 +1085,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X101VE', 'via_device_id': None, }), ]) @@ -1251,7 +1246,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -1677,7 +1671,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -1842,13 +1835,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X102VE', 'via_device_id': None, }), ]) @@ -2004,7 +1996,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -2461,7 +2452,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -2638,13 +2628,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -2948,13 +2937,12 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'XJB1SU', 'via_device_id': None, }), ]) @@ -3110,7 +3098,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -3538,7 +3525,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -3705,13 +3691,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X101VE', 'via_device_id': None, }), ]) @@ -3867,7 +3852,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -4293,7 +4277,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -4458,13 +4441,12 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'X102VE', 'via_device_id': None, }), ]) @@ -4620,7 +4602,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -5077,7 +5058,6 @@ 'options': list([ 'unplugged', 'plugged', - 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), diff --git a/tests/components/renault/snapshots/test_services.ambr b/tests/components/renault/snapshots/test_services.ambr deleted file mode 100644 index 882b2ffbe34..00000000000 --- a/tests/components/renault/snapshots/test_services.ambr +++ /dev/null @@ -1,757 +0,0 @@ -# serializer version: 1 -# name: test_service_set_ac_schedule[zoe_40] - list([ - dict({ - 'activated': False, - 'friday': None, - 'id': 1, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 1, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - dict({ - 'activated': True, - 'friday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T15:15Z', - }), - 'readyAtTime': 'T15:15Z', - }), - 'id': 2, - 'monday': None, - 'raw_data': dict({ - 'activated': True, - 'friday': dict({ - 'readyAtTime': 'T15:15Z', - }), - 'id': 2, - 'wednesday': dict({ - 'readyAtTime': 'T15:15Z', - }), - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T15:15Z', - }), - 'readyAtTime': 'T15:15Z', - }), - }), - dict({ - 'activated': False, - 'friday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'readyAtTime': 'T23:30Z', - }), - 'id': 3, - 'monday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'readyAtTime': 'T23:30Z', - }), - 'raw_data': dict({ - 'activated': False, - 'friday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'id': 3, - 'monday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'saturday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'sunday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'thursday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'tuesday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'wednesday': dict({ - 'readyAtTime': 'T23:30Z', - }), - }), - 'saturday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'readyAtTime': 'T23:30Z', - }), - 'sunday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'readyAtTime': 'T23:30Z', - }), - 'thursday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'readyAtTime': 'T23:30Z', - }), - 'tuesday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'readyAtTime': 'T23:30Z', - }), - 'wednesday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'readyAtTime': 'T23:30Z', - }), - }), - dict({ - 'activated': False, - 'friday': None, - 'id': 4, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 4, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - dict({ - 'activated': False, - 'friday': None, - 'id': 5, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 5, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - ]) -# --- -# name: test_service_set_ac_schedule_multi[zoe_40] - list([ - dict({ - 'activated': False, - 'friday': None, - 'id': 1, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 1, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - dict({ - 'activated': True, - 'friday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T15:15Z', - }), - 'readyAtTime': 'T15:15Z', - }), - 'id': 2, - 'monday': None, - 'raw_data': dict({ - 'activated': True, - 'friday': dict({ - 'readyAtTime': 'T15:15Z', - }), - 'id': 2, - 'wednesday': dict({ - 'readyAtTime': 'T15:15Z', - }), - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T15:15Z', - }), - 'readyAtTime': 'T15:15Z', - }), - }), - dict({ - 'activated': True, - 'friday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T12:00Z', - }), - 'readyAtTime': 'T12:00Z', - }), - 'id': 3, - 'monday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T12:00Z', - }), - 'readyAtTime': 'T12:00Z', - }), - 'raw_data': dict({ - 'activated': False, - 'friday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'id': 3, - 'monday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'saturday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'sunday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'thursday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'tuesday': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'wednesday': dict({ - 'readyAtTime': 'T23:30Z', - }), - }), - 'saturday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T12:00Z', - }), - 'readyAtTime': 'T12:00Z', - }), - 'sunday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T12:00Z', - }), - 'readyAtTime': 'T12:00Z', - }), - 'thursday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T23:30Z', - }), - 'readyAtTime': 'T23:30Z', - }), - 'tuesday': dict({ - 'raw_data': dict({ - 'readyAtTime': 'T12:00Z', - }), - 'readyAtTime': 'T12:00Z', - }), - 'wednesday': None, - }), - dict({ - 'activated': False, - 'friday': None, - 'id': 4, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 4, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - dict({ - 'activated': False, - 'friday': None, - 'id': 5, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 5, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - ]) -# --- -# name: test_service_set_charge_schedule[zoe_40] - list([ - dict({ - 'activated': True, - 'friday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'id': 1, - 'monday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'raw_data': dict({ - 'activated': True, - 'friday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'id': 1, - 'monday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'saturday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'sunday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'thursday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'tuesday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'wednesday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - }), - 'saturday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'sunday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'thursday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'tuesday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'wednesday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - }), - dict({ - 'activated': True, - 'friday': dict({ - 'duration': 15, - 'raw_data': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'startTime': 'T23:30Z', - }), - 'id': 2, - 'monday': dict({ - 'duration': 15, - 'raw_data': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'startTime': 'T23:30Z', - }), - 'raw_data': dict({ - 'activated': True, - 'friday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'id': 2, - 'monday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'saturday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'sunday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'thursday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'tuesday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'wednesday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - }), - 'saturday': dict({ - 'duration': 15, - 'raw_data': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'startTime': 'T23:30Z', - }), - 'sunday': dict({ - 'duration': 15, - 'raw_data': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'startTime': 'T23:30Z', - }), - 'thursday': dict({ - 'duration': 15, - 'raw_data': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'startTime': 'T23:30Z', - }), - 'tuesday': dict({ - 'duration': 15, - 'raw_data': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'startTime': 'T23:30Z', - }), - 'wednesday': dict({ - 'duration': 15, - 'raw_data': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'startTime': 'T23:30Z', - }), - }), - dict({ - 'activated': False, - 'friday': None, - 'id': 3, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 3, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - dict({ - 'activated': False, - 'friday': None, - 'id': 4, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 4, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - dict({ - 'activated': False, - 'friday': None, - 'id': 5, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 5, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - ]) -# --- -# name: test_service_set_charge_schedule_multi[zoe_40] - list([ - dict({ - 'activated': True, - 'friday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'id': 1, - 'monday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'raw_data': dict({ - 'activated': True, - 'friday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'id': 1, - 'monday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'saturday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'sunday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'thursday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'tuesday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'wednesday': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - }), - 'saturday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'sunday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'thursday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'tuesday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - 'wednesday': dict({ - 'duration': 450, - 'raw_data': dict({ - 'duration': 450, - 'startTime': 'T00:00Z', - }), - 'startTime': 'T00:00Z', - }), - }), - dict({ - 'activated': True, - 'friday': dict({ - 'duration': 30, - 'raw_data': dict({ - 'duration': 30, - 'startTime': 'T12:00Z', - }), - 'startTime': 'T12:00Z', - }), - 'id': 2, - 'monday': dict({ - 'duration': 30, - 'raw_data': dict({ - 'duration': 30, - 'startTime': 'T12:00Z', - }), - 'startTime': 'T12:00Z', - }), - 'raw_data': dict({ - 'activated': True, - 'friday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'id': 2, - 'monday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'saturday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'sunday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'thursday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'tuesday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'wednesday': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - }), - 'saturday': dict({ - 'duration': 30, - 'raw_data': dict({ - 'duration': 30, - 'startTime': 'T12:00Z', - }), - 'startTime': 'T12:00Z', - }), - 'sunday': dict({ - 'duration': 30, - 'raw_data': dict({ - 'duration': 30, - 'startTime': 'T12:00Z', - }), - 'startTime': 'T12:00Z', - }), - 'thursday': dict({ - 'duration': 15, - 'raw_data': dict({ - 'duration': 15, - 'startTime': 'T23:30Z', - }), - 'startTime': 'T23:30Z', - }), - 'tuesday': dict({ - 'duration': 30, - 'raw_data': dict({ - 'duration': 30, - 'startTime': 'T12:00Z', - }), - 'startTime': 'T12:00Z', - }), - 'wednesday': None, - }), - dict({ - 'activated': False, - 'friday': None, - 'id': 3, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 3, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - dict({ - 'activated': False, - 'friday': None, - 'id': 4, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 4, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - dict({ - 'activated': False, - 'friday': None, - 'id': 5, - 'monday': None, - 'raw_data': dict({ - 'activated': False, - 'id': 5, - }), - 'saturday': None, - 'sunday': None, - 'thursday': None, - 'tuesday': None, - 'wednesday': None, - }), - ]) -# --- diff --git a/tests/components/renault/test_binary_sensor.py b/tests/components/renault/test_binary_sensor.py index 52b6de33f14..a0264493544 100644 --- a/tests/components/renault/test_binary_sensor.py +++ b/tests/components/renault/test_binary_sensor.py @@ -1,10 +1,10 @@ """Tests for Renault binary sensors.""" -from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/renault/test_button.py b/tests/components/renault/test_button.py index 32c5ce651ae..bed188d8881 100644 --- a/tests/components/renault/test_button.py +++ b/tests/components/renault/test_button.py @@ -1,11 +1,11 @@ """Tests for Renault sensors.""" -from collections.abc import Generator from unittest.mock import patch import pytest from renault_api.kamereon import schemas from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/renault/test_config_flow.py b/tests/components/renault/test_config_flow.py index 234d1dca069..7d40cf69314 100644 --- a/tests/components/renault/test_config_flow.py +++ b/tests/components/renault/test_config_flow.py @@ -13,12 +13,15 @@ from homeassistant.components.renault.const import ( CONF_LOCALE, DOMAIN, ) -from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import aiohttp_client -from tests.common import MockConfigEntry, load_fixture +from .const import MOCK_CONFIG + +from tests.common import load_fixture pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -217,17 +220,22 @@ async def test_config_flow_duplicate( assert len(mock_setup_entry.mock_calls) == 0 -async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: +async def test_reauth(hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Test the start of the config flow.""" assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + data=MOCK_CONFIG, + ) assert result["type"] is FlowResultType.FORM - assert result["description_placeholders"] == { - CONF_NAME: "Mock Title", - CONF_USERNAME: "email@test.com", - } + assert result["description_placeholders"] == {CONF_USERNAME: "email@test.com"} assert result["errors"] == {} # Failed credentials @@ -241,10 +249,7 @@ async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> Non ) assert result2["type"] is FlowResultType.FORM - assert result2["description_placeholders"] == { - CONF_NAME: "Mock Title", - CONF_USERNAME: "email@test.com", - } + assert result2["description_placeholders"] == {CONF_USERNAME: "email@test.com"} assert result2["errors"] == {"base": "invalid_credentials"} # Valid credentials diff --git a/tests/components/renault/test_device_tracker.py b/tests/components/renault/test_device_tracker.py index 39f37d12a4d..d8bee097eda 100644 --- a/tests/components/renault/test_device_tracker.py +++ b/tests/components/renault/test_device_tracker.py @@ -1,10 +1,10 @@ """Tests for Renault sensors.""" -from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/renault/test_init.py b/tests/components/renault/test_init.py index 0f9d9cbaf5b..90963fd3521 100644 --- a/tests/components/renault/test_init.py +++ b/tests/components/renault/test_init.py @@ -1,12 +1,12 @@ """Tests for Renault setup process.""" -from collections.abc import Generator from typing import Any from unittest.mock import Mock, patch import aiohttp import pytest from renault_api.gigya.exceptions import GigyaException, InvalidCredentialsException +from typing_extensions import Generator from homeassistant.components.renault.const import DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigEntryState diff --git a/tests/components/renault/test_select.py b/tests/components/renault/test_select.py index 7b589d86863..0577966d514 100644 --- a/tests/components/renault/test_select.py +++ b/tests/components/renault/test_select.py @@ -1,11 +1,11 @@ """Tests for Renault selects.""" -from collections.abc import Generator from unittest.mock import patch import pytest from renault_api.kamereon import schemas from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.components.select import ( ATTR_OPTION, diff --git a/tests/components/renault/test_sensor.py b/tests/components/renault/test_sensor.py index d69ab5c0b7f..7e8e4f24c77 100644 --- a/tests/components/renault/test_sensor.py +++ b/tests/components/renault/test_sensor.py @@ -1,10 +1,10 @@ """Tests for Renault sensors.""" -from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/renault/test_services.py b/tests/components/renault/test_services.py index bdb233f4d97..d30626e4117 100644 --- a/tests/components/renault/test_services.py +++ b/tests/components/renault/test_services.py @@ -1,14 +1,13 @@ """Tests for Renault sensors.""" -from collections.abc import Generator from datetime import datetime from unittest.mock import patch import pytest from renault_api.exceptions import RenaultException from renault_api.kamereon import schemas -from renault_api.kamereon.models import ChargeSchedule, HvacSchedule -from syrupy import SnapshotAssertion +from renault_api.kamereon.models import ChargeSchedule +from typing_extensions import Generator from homeassistant.components.renault.const import DOMAIN from homeassistant.components.renault.services import ( @@ -17,7 +16,6 @@ from homeassistant.components.renault.services import ( ATTR_VEHICLE, ATTR_WHEN, SERVICE_AC_CANCEL, - SERVICE_AC_SET_SCHEDULES, SERVICE_AC_START, SERVICE_CHARGE_SET_SCHEDULES, ) @@ -26,8 +24,8 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, - ATTR_MODEL_ID, ATTR_NAME, + ATTR_SW_VERSION, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -145,7 +143,7 @@ async def test_service_set_ac_start_with_date( async def test_service_set_charge_schedule( - hass: HomeAssistant, config_entry: ConfigEntry, snapshot: SnapshotAssertion + hass: HomeAssistant, config_entry: ConfigEntry ) -> None: """Test that service invokes renault_api with correct data.""" await hass.config_entries.async_setup(config_entry.entry_id) @@ -178,11 +176,11 @@ async def test_service_set_charge_schedule( ) assert len(mock_action.mock_calls) == 1 mock_call_data: list[ChargeSchedule] = mock_action.mock_calls[0][1][0] - assert mock_call_data == snapshot + assert mock_action.mock_calls[0][1] == (mock_call_data,) async def test_service_set_charge_schedule_multi( - hass: HomeAssistant, config_entry: ConfigEntry, snapshot: SnapshotAssertion + hass: HomeAssistant, config_entry: ConfigEntry ) -> None: """Test that service invokes renault_api with correct data.""" await hass.config_entries.async_setup(config_entry.entry_id) @@ -227,7 +225,7 @@ async def test_service_set_charge_schedule_multi( ) assert len(mock_action.mock_calls) == 1 mock_call_data: list[ChargeSchedule] = mock_action.mock_calls[0][1][0] - assert mock_call_data == snapshot + assert mock_action.mock_calls[0][1] == (mock_call_data,) # Monday updated with new values assert mock_call_data[1].monday.startTime == "T12:00Z" @@ -239,101 +237,6 @@ async def test_service_set_charge_schedule_multi( assert mock_call_data[1].thursday.duration == 15 -async def test_service_set_ac_schedule( - hass: HomeAssistant, config_entry: ConfigEntry, snapshot: SnapshotAssertion -) -> None: - """Test that service invokes renault_api with correct data.""" - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - schedules = {"id": 2} - data = { - ATTR_VEHICLE: get_device_id(hass), - ATTR_SCHEDULES: schedules, - } - - with ( - patch( - "renault_api.renault_vehicle.RenaultVehicle.get_hvac_settings", - return_value=schemas.KamereonVehicleDataResponseSchema.loads( - load_fixture("renault/hvac_settings.json") - ).get_attributes(schemas.KamereonVehicleHvacSettingsDataSchema), - ), - patch( - "renault_api.renault_vehicle.RenaultVehicle.set_hvac_schedules", - return_value=( - schemas.KamereonVehicleHvacScheduleActionDataSchema.loads( - load_fixture("renault/action.set_ac_schedules.json") - ) - ), - ) as mock_action, - ): - await hass.services.async_call( - DOMAIN, SERVICE_AC_SET_SCHEDULES, service_data=data, blocking=True - ) - assert len(mock_action.mock_calls) == 1 - mock_call_data: list[ChargeSchedule] = mock_action.mock_calls[0][1][0] - assert mock_call_data == snapshot - - -async def test_service_set_ac_schedule_multi( - hass: HomeAssistant, config_entry: ConfigEntry, snapshot: SnapshotAssertion -) -> None: - """Test that service invokes renault_api with correct data.""" - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - schedules = [ - { - "id": 3, - "activated": True, - "monday": {"readyAtTime": "T12:00Z"}, - "tuesday": {"readyAtTime": "T12:00Z"}, - "wednesday": None, - "friday": {"readyAtTime": "T12:00Z"}, - "saturday": {"readyAtTime": "T12:00Z"}, - "sunday": {"readyAtTime": "T12:00Z"}, - }, - {"id": 4}, - ] - data = { - ATTR_VEHICLE: get_device_id(hass), - ATTR_SCHEDULES: schedules, - } - - with ( - patch( - "renault_api.renault_vehicle.RenaultVehicle.get_hvac_settings", - return_value=schemas.KamereonVehicleDataResponseSchema.loads( - load_fixture("renault/hvac_settings.json") - ).get_attributes(schemas.KamereonVehicleHvacSettingsDataSchema), - ), - patch( - "renault_api.renault_vehicle.RenaultVehicle.set_hvac_schedules", - return_value=( - schemas.KamereonVehicleHvacScheduleActionDataSchema.loads( - load_fixture("renault/action.set_ac_schedules.json") - ) - ), - ) as mock_action, - ): - await hass.services.async_call( - DOMAIN, SERVICE_AC_SET_SCHEDULES, service_data=data, blocking=True - ) - assert len(mock_action.mock_calls) == 1 - mock_call_data: list[HvacSchedule] = mock_action.mock_calls[0][1][0] - assert mock_call_data == snapshot - - # Schedule is activated now - assert mock_call_data[2].activated is True - # Monday updated with new values - assert mock_call_data[2].monday.readyAtTime == "T12:00Z" - # Wednesday has original values cleared - assert mock_call_data[2].wednesday is None - # Thursday keeps original values - assert mock_call_data[2].thursday.readyAtTime == "T23:30Z" - - async def test_service_invalid_device_id( hass: HomeAssistant, config_entry: ConfigEntry ) -> None: @@ -364,7 +267,7 @@ async def test_service_invalid_device_id2( manufacturer=extra_vehicle[ATTR_MANUFACTURER], name=extra_vehicle[ATTR_NAME], model=extra_vehicle[ATTR_MODEL], - model_id=extra_vehicle[ATTR_MODEL_ID], + sw_version=extra_vehicle[ATTR_SW_VERSION], ) device_id = device_registry.async_get_device( identifiers=extra_vehicle[ATTR_IDENTIFIERS] diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index 94192c3502e..105815bae1d 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -1,15 +1,12 @@ """Setup the Reolink tests.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, create_autospec, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest -from reolink_aio.api import Chime -from reolink_aio.baichuan import Baichuan -from reolink_aio.exceptions import ReolinkError +from typing_extensions import Generator +from homeassistant.components.reolink import const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL -from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -35,14 +32,10 @@ TEST_UID = "ABC1234567D89EFG" TEST_UID_CAM = "DEF7654321D89GHT" TEST_PORT = 1234 TEST_NVR_NAME = "test_reolink_name" -TEST_CAM_NAME = "test_reolink_cam" TEST_NVR_NAME2 = "test2_reolink_name" -TEST_CAM_NAME = "test_reolink_cam" TEST_USE_HTTPS = True TEST_HOST_MODEL = "RLN8-410" -TEST_ITEM_NUMBER = "P000" TEST_CAM_MODEL = "RLC-123" -TEST_DUO_MODEL = "Reolink Duo PoE" @pytest.fixture @@ -54,10 +47,14 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -@pytest.fixture(scope="module") +@pytest.fixture def reolink_connect_class() -> Generator[MagicMock]: """Mock reolink connection and return both the host_mock and host_mock_class.""" with ( + patch( + "homeassistant.components.reolink.host.webhook.async_register", + return_value=True, + ), patch( "homeassistant.components.reolink.host.Host", autospec=True ) as host_mock_class, @@ -68,7 +65,6 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.check_new_firmware.return_value = False host_mock.unsubscribe.return_value = True host_mock.logout.return_value = True - host_mock.is_hub = False host_mock.mac_address = TEST_MAC host_mock.uid = TEST_UID host_mock.onvif_enabled = True @@ -82,20 +78,17 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.protocol = "rtsp" host_mock.channels = [0] host_mock.stream_channels = [0] - host_mock.new_devices = False host_mock.sw_version_update_required = False host_mock.hardware_version = "IPC_00000" host_mock.sw_version = "v1.0.0.0.0.0000" host_mock.manufacturer = "Reolink" host_mock.model = TEST_HOST_MODEL - host_mock.item_number = TEST_ITEM_NUMBER host_mock.camera_model.return_value = TEST_CAM_MODEL host_mock.camera_name.return_value = TEST_NVR_NAME host_mock.camera_hardware_version.return_value = "IPC_00001" host_mock.camera_sw_version.return_value = "v1.1.0.0.0.0000" host_mock.camera_sw_version_update_required.return_value = False host_mock.camera_uid.return_value = TEST_UID_CAM - host_mock.camera_online.return_value = True host_mock.channel_for_uid.return_value = 0 host_mock.get_encoding.return_value = "h264" host_mock.firmware_update_available.return_value = False @@ -112,20 +105,6 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.capabilities = {"Host": ["RTSP"], "0": ["motion_detection"]} host_mock.checked_api_versions = {"GetEvents": 1} host_mock.abilities = {"abilityChn": [{"aiTrack": {"permit": 0, "ver": 0}}]} - - # enums - host_mock.whiteled_mode.return_value = 1 - host_mock.whiteled_mode_list.return_value = ["off", "auto"] - host_mock.doorbell_led.return_value = "Off" - host_mock.doorbell_led_list.return_value = ["stayoff", "auto"] - host_mock.auto_track_method.return_value = 3 - host_mock.daynight_state.return_value = "Black&White" - - # Baichuan - host_mock.baichuan = create_autospec(Baichuan) - # Disable tcp push by default for tests - host_mock.baichuan.events_active = False - host_mock.baichuan.subscribe_events.side_effect = ReolinkError("Test error") yield host_mock_class @@ -148,14 +127,14 @@ def reolink_platforms() -> Generator[None]: def config_entry(hass: HomeAssistant) -> MockConfigEntry: """Add the reolink mock config entry to hass.""" config_entry = MockConfigEntry( - domain=DOMAIN, + domain=const.DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -164,26 +143,3 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: ) config_entry.add_to_hass(hass) return config_entry - - -@pytest.fixture -def test_chime(reolink_connect: MagicMock) -> None: - """Mock a reolink chime.""" - TEST_CHIME = Chime( - host=reolink_connect, - dev_id=12345678, - channel=0, - ) - TEST_CHIME.name = "Test chime" - TEST_CHIME.volume = 3 - TEST_CHIME.connect_state = 2 - TEST_CHIME.led_state = True - TEST_CHIME.event_info = { - "md": {"switch": 0, "musicId": 0}, - "people": {"switch": 0, "musicId": 1}, - "visitor": {"switch": 1, "musicId": 2}, - } - - reolink_connect.chime_list = [TEST_CHIME] - reolink_connect.chime.return_value = TEST_CHIME - return TEST_CHIME diff --git a/tests/components/reolink/snapshots/test_diagnostics.ambr b/tests/components/reolink/snapshots/test_diagnostics.ambr index 71c5397fbd1..00363023d14 100644 --- a/tests/components/reolink/snapshots/test_diagnostics.ambr +++ b/tests/components/reolink/snapshots/test_diagnostics.ambr @@ -77,10 +77,6 @@ '0': 1, 'null': 1, }), - 'GetDeviceAudioCfg': dict({ - '0': 2, - 'null': 4, - }), 'GetEmail': dict({ '0': 1, 'null': 2, @@ -118,8 +114,8 @@ 'null': 2, }), 'GetPtzCurPos': dict({ - '0': 2, - 'null': 2, + '0': 1, + 'null': 1, }), 'GetPtzGuard': dict({ '0': 2, @@ -137,9 +133,6 @@ '0': 1, 'null': 2, }), - 'GetStateLight': dict({ - 'null': 1, - }), 'GetWhiteLed': dict({ '0': 3, 'null': 3, diff --git a/tests/components/reolink/test_binary_sensor.py b/tests/components/reolink/test_binary_sensor.py deleted file mode 100644 index 71318c27b25..00000000000 --- a/tests/components/reolink/test_binary_sensor.py +++ /dev/null @@ -1,91 +0,0 @@ -"""Test the Reolink binary sensor platform.""" - -from collections.abc import Callable -from unittest.mock import MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory - -from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_OFF, STATE_ON, Platform -from homeassistant.core import HomeAssistant - -from .conftest import TEST_DUO_MODEL, TEST_HOST_MODEL, TEST_NVR_NAME - -from tests.common import MockConfigEntry, async_fire_time_changed -from tests.typing import ClientSessionGenerator - - -async def test_motion_sensor( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - freezer: FrozenDateTimeFactory, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test binary sensor entity with motion sensor.""" - reolink_connect.model = TEST_DUO_MODEL - reolink_connect.motion_detected.return_value = True - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_motion_lens_0" - assert hass.states.get(entity_id).state == STATE_ON - - reolink_connect.motion_detected.return_value = False - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_OFF - - # test ONVIF webhook callback - reolink_connect.motion_detected.return_value = True - reolink_connect.ONVIF_event_callback.return_value = [0] - webhook_id = config_entry.runtime_data.host.webhook_id - client = await hass_client_no_auth() - await client.post(f"/api/webhook/{webhook_id}", data="test_data") - - assert hass.states.get(entity_id).state == STATE_ON - - -async def test_tcp_callback( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test tcp callback using motion sensor.""" - - class callback_mock_class: - callback_func = None - - def register_callback( - self, callback_id: str, callback: Callable[[], None], *args, **key_args - ) -> None: - if callback_id.endswith("_motion"): - self.callback_func = callback - - callback_mock = callback_mock_class() - - reolink_connect.model = TEST_HOST_MODEL - reolink_connect.baichuan.events_active = True - reolink_connect.baichuan.subscribe_events.reset_mock(side_effect=True) - reolink_connect.baichuan.register_callback = callback_mock.register_callback - reolink_connect.motion_detected.return_value = True - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_motion" - assert hass.states.get(entity_id).state == STATE_ON - - # simulate a TCP push callback - reolink_connect.motion_detected.return_value = False - assert callback_mock.callback_func is not None - callback_mock.callback_func() - - assert hass.states.get(entity_id).state == STATE_OFF diff --git a/tests/components/reolink/test_button.py b/tests/components/reolink/test_button.py deleted file mode 100644 index 126fbb6b29a..00000000000 --- a/tests/components/reolink/test_button.py +++ /dev/null @@ -1,118 +0,0 @@ -"""Test the Reolink button platform.""" - -from unittest.mock import MagicMock, patch - -import pytest -from reolink_aio.exceptions import ReolinkError - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.components.reolink.button import ATTR_SPEED, SERVICE_PTZ_MOVE -from homeassistant.components.reolink.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from .conftest import TEST_NVR_NAME - -from tests.common import MockConfigEntry - - -async def test_button( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test button entity with ptz up.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.BUTTON}.{TEST_NVR_NAME}_ptz_up" - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.set_ptz_command.assert_called_once() - - reolink_connect.set_ptz_command.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - reolink_connect.set_ptz_command.reset_mock(side_effect=True) - - -async def test_ptz_move_service( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test ptz_move entity service using PTZ button entity.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.BUTTON}.{TEST_NVR_NAME}_ptz_up" - - await hass.services.async_call( - DOMAIN, - SERVICE_PTZ_MOVE, - {ATTR_ENTITY_ID: entity_id, ATTR_SPEED: 5}, - blocking=True, - ) - reolink_connect.set_ptz_command.assert_called_with(0, command="Up", speed=5) - - reolink_connect.set_ptz_command.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - DOMAIN, - SERVICE_PTZ_MOVE, - {ATTR_ENTITY_ID: entity_id, ATTR_SPEED: 5}, - blocking=True, - ) - - reolink_connect.set_ptz_command.reset_mock(side_effect=True) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_host_button( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test host button entity with reboot.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.BUTTON}.{TEST_NVR_NAME}_restart" - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.reboot.assert_called_once() - - reolink_connect.reboot.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - reolink_connect.reboot.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_camera.py b/tests/components/reolink/test_camera.py deleted file mode 100644 index 4f18f769e02..00000000000 --- a/tests/components/reolink/test_camera.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Test the Reolink camera platform.""" - -from unittest.mock import MagicMock, patch - -import pytest -from reolink_aio.exceptions import ReolinkError - -from homeassistant.components.camera import ( - CameraState, - async_get_image, - async_get_stream_source, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from .conftest import TEST_DUO_MODEL, TEST_NVR_NAME - -from tests.common import MockConfigEntry -from tests.typing import ClientSessionGenerator - - -async def test_camera( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test camera entity with fluent.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.CAMERA}.{TEST_NVR_NAME}_fluent" - assert hass.states.get(entity_id).state == CameraState.IDLE - - # check getting a image from the camera - reolink_connect.get_snapshot.return_value = b"image" - assert (await async_get_image(hass, entity_id)).content == b"image" - - reolink_connect.get_snapshot.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await async_get_image(hass, entity_id) - - # check getting the stream source - assert await async_get_stream_source(hass, entity_id) is not None - - reolink_connect.get_snapshot.reset_mock(side_effect=True) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_camera_no_stream_source( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test camera entity with no stream source.""" - reolink_connect.model = TEST_DUO_MODEL - reolink_connect.get_stream_source.return_value = None - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.CAMERA}.{TEST_NVR_NAME}_snapshots_fluent_lens_0" - assert hass.states.get(entity_id).state == CameraState.IDLE diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index bb896428b99..ba845dc1697 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -1,24 +1,17 @@ """Test the Reolink config flow.""" +from datetime import timedelta import json from typing import Any -from unittest.mock import ANY, AsyncMock, MagicMock, call +from unittest.mock import AsyncMock, MagicMock, call -from aiohttp import ClientSession -from freezegun.api import FrozenDateTimeFactory import pytest -from reolink_aio.exceptions import ( - ApiError, - CredentialsInvalidError, - LoginFirmwareError, - ReolinkError, -) +from reolink_aio.exceptions import ApiError, CredentialsInvalidError, ReolinkError from homeassistant import config_entries from homeassistant.components import dhcp -from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL, const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL -from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN from homeassistant.components.reolink.exceptions import ReolinkWebhookException from homeassistant.components.reolink.host import DEFAULT_TIMEOUT from homeassistant.config_entries import ConfigEntryState @@ -32,6 +25,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.device_registry import format_mac +from homeassistant.util.dt import utcnow from .conftest import ( DHCP_FORMATTED_MAC, @@ -57,7 +51,7 @@ async def test_config_flow_manual_success( ) -> None: """Successful flow manually initialized by the user.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + const.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM @@ -80,7 +74,7 @@ async def test_config_flow_manual_success( CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -92,7 +86,7 @@ async def test_config_flow_errors( ) -> None: """Successful flow manually initialized by the user after some errors.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + const.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM @@ -101,8 +95,6 @@ async def test_config_flow_errors( reolink_connect.is_admin = False reolink_connect.user_level = "guest" - reolink_connect.unsubscribe.side_effect = ReolinkError("Test error") - reolink_connect.logout.side_effect = ReolinkError("Test error") result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -174,37 +166,8 @@ async def test_config_flow_errors( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert result["errors"] == {CONF_PASSWORD: "invalid_auth"} + assert result["errors"] == {CONF_HOST: "invalid_auth"} - reolink_connect.get_host_data.side_effect = LoginFirmwareError("Test error") - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_HOST: TEST_HOST, - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "update_needed"} - - reolink_connect.valid_password.return_value = False - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_HOST: TEST_HOST, - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {CONF_PASSWORD: "password_incompatible"} - - reolink_connect.valid_password.return_value = True reolink_connect.get_host_data.side_effect = ApiError("Test error") result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -219,7 +182,7 @@ async def test_config_flow_errors( assert result["step_id"] == "user" assert result["errors"] == {CONF_HOST: "api_error"} - reolink_connect.get_host_data.reset_mock(side_effect=True) + reolink_connect.get_host_data.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -227,7 +190,7 @@ async def test_config_flow_errors( CONF_PASSWORD: TEST_PASSWORD, CONF_HOST: TEST_HOST, CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, }, ) @@ -238,27 +201,24 @@ async def test_config_flow_errors( CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, } - reolink_connect.unsubscribe.reset_mock(side_effect=True) - reolink_connect.logout.reset_mock(side_effect=True) - async def test_options_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: """Test specifying non default settings using options flow.""" config_entry = MockConfigEntry( - domain=DOMAIN, + domain=const.DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: "rtsp", @@ -291,14 +251,14 @@ async def test_change_connection_settings( ) -> None: """Test changing connection settings by issuing a second user config flow.""" config_entry = MockConfigEntry( - domain=DOMAIN, + domain=const.DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -308,7 +268,7 @@ async def test_change_connection_settings( config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + const.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM @@ -334,14 +294,14 @@ async def test_change_connection_settings( async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: """Test a reauth flow.""" config_entry = MockConfigEntry( - domain=DOMAIN, + domain=const.DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -353,7 +313,16 @@ async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + const.DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + "title_placeholders": {"name": TEST_NVR_NAME}, + "unique_id": format_mac(TEST_MAC), + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -391,7 +360,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No ) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data + const.DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data ) assert result["type"] is FlowResultType.FORM @@ -413,7 +382,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -455,7 +424,6 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No ) async def test_dhcp_ip_update( hass: HomeAssistant, - freezer: FrozenDateTimeFactory, reolink_connect_class: MagicMock, reolink_connect: MagicMock, last_update_success: bool, @@ -466,14 +434,14 @@ async def test_dhcp_ip_update( ) -> None: """Test dhcp discovery aborts if already configured where the IP is updated if appropriate.""" config_entry = MockConfigEntry( - domain=DOMAIN, + domain=const.DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -488,9 +456,10 @@ async def test_dhcp_ip_update( if not last_update_success: # ensure the last_update_succes is False for the device_coordinator. - reolink_connect.get_states.side_effect = ReolinkError("Test error") - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) + reolink_connect.get_states = AsyncMock(side_effect=ReolinkError("Test error")) + async_fire_time_changed( + hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(minutes=1) + ) await hass.async_block_till_done() dhcp_data = dhcp.DhcpServiceInfo( @@ -500,11 +469,10 @@ async def test_dhcp_ip_update( ) if attr is not None: - original = getattr(reolink_connect, attr) setattr(reolink_connect, attr, value) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data + const.DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data ) for host in host_call_list: @@ -516,66 +484,14 @@ async def test_dhcp_ip_update( use_https=TEST_USE_HTTPS, protocol=DEFAULT_PROTOCOL, timeout=DEFAULT_TIMEOUT, - aiohttp_get_session_callback=ANY, ) assert expected_call in reolink_connect_class.call_args_list for exc_call in reolink_connect_class.call_args_list: assert exc_call[0][0] in host_call_list - get_session = exc_call[1]["aiohttp_get_session_callback"] - assert isinstance(get_session(), ClientSession) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" await hass.async_block_till_done() assert config_entry.data[CONF_HOST] == expected - - reolink_connect.get_states.side_effect = None - reolink_connect_class.reset_mock() - if attr is not None: - setattr(reolink_connect, attr, original) - - -async def test_reconfig(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: - """Test a reconfiguration flow.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=format_mac(TEST_MAC), - data={ - CONF_HOST: TEST_HOST, - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, - }, - options={ - CONF_PROTOCOL: DEFAULT_PROTOCOL, - }, - title=TEST_NVR_NAME, - ) - config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await config_entry.start_reconfigure_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: TEST_HOST2, - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert config_entry.data[CONF_HOST] == TEST_HOST2 - assert config_entry.data[CONF_USERNAME] == TEST_USERNAME - assert config_entry.data[CONF_PASSWORD] == TEST_PASSWORD diff --git a/tests/components/reolink/test_host.py b/tests/components/reolink/test_host.py deleted file mode 100644 index 2286ca5d266..00000000000 --- a/tests/components/reolink/test_host.py +++ /dev/null @@ -1,461 +0,0 @@ -"""Test the Reolink host.""" - -from asyncio import CancelledError -from datetime import timedelta -from unittest.mock import AsyncMock, MagicMock, patch - -from aiohttp import ClientResponseError -from freezegun.api import FrozenDateTimeFactory -import pytest -from reolink_aio.enums import SubType -from reolink_aio.exceptions import NotSupportedError, ReolinkError, SubscriptionError - -from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL -from homeassistant.components.reolink.host import ( - FIRST_ONVIF_LONG_POLL_TIMEOUT, - FIRST_ONVIF_TIMEOUT, - FIRST_TCP_PUSH_TIMEOUT, - LONG_POLL_COOLDOWN, - LONG_POLL_ERROR_COOLDOWN, - POLL_INTERVAL_NO_PUSH, -) -from homeassistant.components.webhook import async_handle_webhook -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.network import NoURLAvailableError -from homeassistant.util.aiohttp import MockRequest - -from tests.common import MockConfigEntry, async_fire_time_changed -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_setup_with_tcp_push( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test successful setup of the integration with TCP push callbacks.""" - reolink_connect.baichuan.events_active = True - reolink_connect.baichuan.subscribe_events.reset_mock(side_effect=True) - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - freezer.tick(timedelta(seconds=FIRST_TCP_PUSH_TIMEOUT)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # ONVIF push subscription not called - assert not reolink_connect.subscribe.called - - reolink_connect.baichuan.events_active = False - reolink_connect.baichuan.subscribe_events.side_effect = ReolinkError("Test error") - - -async def test_unloading_with_tcp_push( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test successful unloading of the integration with TCP push callbacks.""" - reolink_connect.baichuan.events_active = True - reolink_connect.baichuan.subscribe_events.reset_mock(side_effect=True) - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - reolink_connect.baichuan.unsubscribe_events.side_effect = ReolinkError("Test error") - - # Unload the config entry - assert await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.NOT_LOADED - - reolink_connect.baichuan.events_active = False - reolink_connect.baichuan.subscribe_events.side_effect = ReolinkError("Test error") - reolink_connect.baichuan.unsubscribe_events.reset_mock(side_effect=True) - - -async def test_webhook_callback( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - freezer: FrozenDateTimeFactory, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - entity_registry: er.EntityRegistry, -) -> None: - """Test webhook callback with motion sensor.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - webhook_id = config_entry.runtime_data.host.webhook_id - - signal_all = MagicMock() - signal_ch = MagicMock() - async_dispatcher_connect(hass, f"{webhook_id}_all", signal_all) - async_dispatcher_connect(hass, f"{webhook_id}_0", signal_ch) - - client = await hass_client_no_auth() - - # test webhook callback success all channels - reolink_connect.ONVIF_event_callback.return_value = None - await client.post(f"/api/webhook/{webhook_id}") - signal_all.assert_called_once() - - freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # test webhook callback all channels with failure to read motion_state - signal_all.reset_mock() - reolink_connect.get_motion_state_all_ch.return_value = False - await client.post(f"/api/webhook/{webhook_id}") - signal_all.assert_not_called() - - # test webhook callback success single channel - reolink_connect.ONVIF_event_callback.return_value = [0] - await client.post(f"/api/webhook/{webhook_id}", data="test_data") - signal_ch.assert_called_once() - - # test webhook callback single channel with error in event callback - signal_ch.reset_mock() - reolink_connect.ONVIF_event_callback.side_effect = Exception("Test error") - await client.post(f"/api/webhook/{webhook_id}", data="test_data") - signal_ch.assert_not_called() - - # test failure to read date from webhook post - request = MockRequest( - method="POST", - content=bytes("test", "utf-8"), - mock_source="test", - ) - request.read = AsyncMock() - request.read.side_effect = ConnectionResetError("Test error") - await async_handle_webhook(hass, webhook_id, request) - signal_all.assert_not_called() - - request.read.side_effect = ClientResponseError("Test error", "Test") - await async_handle_webhook(hass, webhook_id, request) - signal_all.assert_not_called() - - request.read.side_effect = CancelledError("Test error") - with pytest.raises(CancelledError): - await async_handle_webhook(hass, webhook_id, request) - signal_all.assert_not_called() - - reolink_connect.ONVIF_event_callback.reset_mock(side_effect=True) - - -async def test_no_mac( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test setup of host with no mac.""" - original = reolink_connect.mac_address - reolink_connect.mac_address = None - assert not await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - reolink_connect.mac_address = original - - -async def test_subscribe_error( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test error when subscribing to ONVIF does not block startup.""" - reolink_connect.subscribe.side_effect = ReolinkError("Test Error") - reolink_connect.subscribed.return_value = False - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - reolink_connect.subscribe.reset_mock(side_effect=True) - - -async def test_subscribe_unsuccesfull( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test that a unsuccessful ONVIF subscription does not block startup.""" - reolink_connect.subscribed.return_value = False - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - -async def test_initial_ONVIF_not_supported( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test setup when initial ONVIF is not supported.""" - - def test_supported(ch, key): - """Test supported function.""" - if key == "initial_ONVIF_state": - return False - return True - - reolink_connect.supported = test_supported - - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - -async def test_ONVIF_not_supported( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test setup is not blocked when ONVIF API returns NotSupportedError.""" - - def test_supported(ch, key): - """Test supported function.""" - if key == "initial_ONVIF_state": - return False - return True - - reolink_connect.supported = test_supported - reolink_connect.subscribed.return_value = False - reolink_connect.subscribe.side_effect = NotSupportedError("Test error") - - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - reolink_connect.subscribe.reset_mock(side_effect=True) - reolink_connect.subscribed.return_value = True - - -async def test_renew( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test renew of the ONVIF subscription.""" - reolink_connect.renewtimer.return_value = 1 - - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - reolink_connect.renew.assert_called() - - reolink_connect.renew.side_effect = SubscriptionError("Test error") - - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - reolink_connect.subscribe.assert_called() - - reolink_connect.subscribe.reset_mock() - reolink_connect.subscribe.side_effect = SubscriptionError("Test error") - - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - reolink_connect.subscribe.assert_called() - - reolink_connect.renew.reset_mock(side_effect=True) - reolink_connect.subscribe.reset_mock(side_effect=True) - - -async def test_long_poll_renew_fail( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test ONVIF long polling errors while renewing.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - reolink_connect.subscribe.side_effect = NotSupportedError("Test error") - - freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # ensure long polling continues - reolink_connect.pull_point_request.assert_called() - - reolink_connect.subscribe.reset_mock(side_effect=True) - - -async def test_register_webhook_errors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test errors while registering the webhook.""" - with patch( - "homeassistant.components.reolink.host.get_url", - side_effect=NoURLAvailableError("Test error"), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) is False - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_long_poll_stop_when_push( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - freezer: FrozenDateTimeFactory, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test ONVIF long polling stops when ONVIF push comes in.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - # start ONVIF long polling because ONVIF push did not came in - freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # simulate ONVIF push callback - client = await hass_client_no_auth() - reolink_connect.ONVIF_event_callback.return_value = None - webhook_id = config_entry.runtime_data.host.webhook_id - await client.post(f"/api/webhook/{webhook_id}") - - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - reolink_connect.unsubscribe.assert_called_with(sub_type=SubType.long_poll) - - -async def test_long_poll_errors( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test errors during ONVIF long polling.""" - reolink_connect.pull_point_request.reset_mock() - - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - reolink_connect.pull_point_request.side_effect = ReolinkError("Test error") - - # start ONVIF long polling because ONVIF push did not came in - freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - reolink_connect.pull_point_request.assert_called_once() - reolink_connect.pull_point_request.side_effect = Exception("Test error") - - freezer.tick(timedelta(seconds=LONG_POLL_ERROR_COOLDOWN)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - freezer.tick(timedelta(seconds=LONG_POLL_COOLDOWN)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - reolink_connect.unsubscribe.assert_called_with(sub_type=SubType.long_poll) - - reolink_connect.pull_point_request.reset_mock(side_effect=True) - - -async def test_fast_polling_errors( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test errors during ONVIF fast polling.""" - reolink_connect.get_motion_state_all_ch.reset_mock() - reolink_connect.get_motion_state_all_ch.side_effect = ReolinkError("Test error") - reolink_connect.pull_point_request.side_effect = ReolinkError("Test error") - - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - # start ONVIF long polling because ONVIF push did not came in - freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # start ONVIF fast polling because ONVIF long polling did not came in - freezer.tick(timedelta(seconds=FIRST_ONVIF_LONG_POLL_TIMEOUT)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert reolink_connect.get_motion_state_all_ch.call_count == 1 - - freezer.tick(timedelta(seconds=POLL_INTERVAL_NO_PUSH)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # fast polling continues despite errors - assert reolink_connect.get_motion_state_all_ch.call_count == 2 - - reolink_connect.get_motion_state_all_ch.reset_mock(side_effect=True) - reolink_connect.pull_point_request.reset_mock(side_effect=True) - - -async def test_diagnostics_event_connection( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_client_no_auth: ClientSessionGenerator, - freezer: FrozenDateTimeFactory, - reolink_connect: MagicMock, - config_entry: MockConfigEntry, -) -> None: - """Test Reolink diagnostics event connection return values.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert diag["event connection"] == "Fast polling" - - # start ONVIF long polling because ONVIF push did not came in - freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert diag["event connection"] == "ONVIF long polling" - - # simulate ONVIF push callback - client = await hass_client_no_auth() - reolink_connect.ONVIF_event_callback.return_value = None - webhook_id = config_entry.runtime_data.host.webhook_id - await client.post(f"/api/webhook/{webhook_id}") - - diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert diag["event connection"] == "ONVIF push" - - # set TCP push as active - reolink_connect.baichuan.events_active = True - diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert diag["event connection"] == "TCP push" diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index 67ac2db8262..a6c798f9415 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -1,50 +1,46 @@ """Test the Reolink init.""" import asyncio +from datetime import timedelta from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch -from freezegun.api import FrozenDateTimeFactory import pytest -from reolink_aio.api import Chime from reolink_aio.exceptions import CredentialsInvalidError, ReolinkError from homeassistant.components.reolink import ( DEVICE_UPDATE_INTERVAL, FIRMWARE_UPDATE_INTERVAL, NUM_CRED_ERRORS, + const, ) -from homeassistant.components.reolink.const import DOMAIN +from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_PORT, STATE_OFF, STATE_UNAVAILABLE, Platform -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.core_config import async_process_ha_core_config +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform +from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant from homeassistant.helpers import ( device_registry as dr, entity_registry as er, issue_registry as ir, ) from homeassistant.setup import async_setup_component +from homeassistant.util.dt import utcnow from .conftest import ( TEST_CAM_MODEL, TEST_HOST_MODEL, TEST_MAC, TEST_NVR_NAME, - TEST_PORT, TEST_UID, TEST_UID_CAM, ) from tests.common import MockConfigEntry, async_fire_time_changed -from tests.typing import WebSocketGenerator pytestmark = pytest.mark.usefixtures("reolink_connect", "reolink_platforms") -CHIME_MODEL = "Reolink Chime" - -async def test_wait(*args, **key_args) -> None: +async def test_wait(*args, **key_args): """Ensure a mocked function takes a bit of time to be able to timeout in test.""" await asyncio.sleep(0) @@ -93,7 +89,6 @@ async def test_failures_parametrized( expected: ConfigEntryState, ) -> None: """Test outcomes when changing errors.""" - original = getattr(reolink_connect, attr) setattr(reolink_connect, attr, value) assert await hass.config_entries.async_setup(config_entry.entry_id) is ( expected is ConfigEntryState.LOADED @@ -102,60 +97,58 @@ async def test_failures_parametrized( assert config_entry.state == expected - setattr(reolink_connect, attr, original) - async def test_firmware_error_twice( hass: HomeAssistant, - freezer: FrozenDateTimeFactory, reolink_connect: MagicMock, config_entry: MockConfigEntry, ) -> None: """Test when the firmware update fails 2 times.""" - reolink_connect.check_new_firmware.side_effect = ReolinkError("Test error") + reolink_connect.check_new_firmware = AsyncMock( + side_effect=ReolinkError("Test error") + ) with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(config_entry.entry_id) is True await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED entity_id = f"{Platform.UPDATE}.{TEST_NVR_NAME}_firmware" - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.is_state(entity_id, STATE_OFF) - freezer.tick(FIRMWARE_UPDATE_INTERVAL) - async_fire_time_changed(hass) + async_fire_time_changed( + hass, utcnow() + FIRMWARE_UPDATE_INTERVAL + timedelta(minutes=1) + ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - reolink_connect.check_new_firmware.reset_mock(side_effect=True) + assert hass.states.is_state(entity_id, STATE_UNAVAILABLE) async def test_credential_error_three( hass: HomeAssistant, - freezer: FrozenDateTimeFactory, reolink_connect: MagicMock, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry, ) -> None: """Test when the update gives credential error 3 times.""" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(config_entry.entry_id) is True await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED - reolink_connect.get_states.side_effect = CredentialsInvalidError("Test error") + reolink_connect.get_states = AsyncMock( + side_effect=CredentialsInvalidError("Test error") + ) - issue_id = f"config_entry_reauth_{DOMAIN}_{config_entry.entry_id}" + issue_id = f"config_entry_reauth_{const.DOMAIN}_{config_entry.entry_id}" for _ in range(NUM_CRED_ERRORS): - assert (HOMEASSISTANT_DOMAIN, issue_id) not in issue_registry.issues - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) + assert (HA_DOMAIN, issue_id) not in issue_registry.issues + async_fire_time_changed( + hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) + ) await hass.async_block_till_done() - assert (HOMEASSISTANT_DOMAIN, issue_id) in issue_registry.issues - - reolink_connect.get_states.reset_mock(side_effect=True) + assert (HA_DOMAIN, issue_id) in issue_registry.issues async def test_entry_reloading( @@ -165,7 +158,6 @@ async def test_entry_reloading( ) -> None: """Test the entry is reloaded correctly when settings change.""" reolink_connect.is_nvr = False - reolink_connect.logout.reset_mock() assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -178,8 +170,6 @@ async def test_entry_reloading( assert reolink_connect.logout.call_count == 1 assert config_entry.title == "New Name" - reolink_connect.is_nvr = True - @pytest.mark.parametrize( ("attr", "value", "expected_models"), @@ -189,27 +179,16 @@ async def test_entry_reloading( None, [TEST_HOST_MODEL, TEST_CAM_MODEL], ), - ( - "is_nvr", - False, - [TEST_HOST_MODEL, TEST_CAM_MODEL], - ), ("channels", [], [TEST_HOST_MODEL]), ( - "camera_online", - Mock(return_value=False), - [TEST_HOST_MODEL], - ), - ( - "channel_for_uid", - Mock(return_value=-1), - [TEST_HOST_MODEL], + "camera_model", + Mock(return_value="RLC-567"), + [TEST_HOST_MODEL, "RLC-567"], ), ], ) -async def test_removing_disconnected_cams( +async def test_cleanup_disconnected_cams( hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, config_entry: MockConfigEntry, reolink_connect: MagicMock, device_registry: dr.DeviceRegistry, @@ -218,10 +197,8 @@ async def test_removing_disconnected_cams( value: Any, expected_models: list[str], ) -> None: - """Test device and entity registry are cleaned up when camera is removed.""" + """Test device and entity registry are cleaned up when camera is disconnected from NVR.""" reolink_connect.channels = [0] - assert await async_setup_component(hass, "config", {}) - client = await hass_ws_client(hass) # setup CH 0 and NVR switch entities/device with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -233,91 +210,11 @@ async def test_removing_disconnected_cams( device_models = [device.model for device in device_entries] assert sorted(device_models) == sorted([TEST_HOST_MODEL, TEST_CAM_MODEL]) - # Try to remove the device after 'disconnecting' a camera. + # reload integration after 'disconnecting' a camera. if attr is not None: - original = getattr(reolink_connect, attr) setattr(reolink_connect, attr, value) - expected_success = TEST_CAM_MODEL not in expected_models - for device in device_entries: - if device.model == TEST_CAM_MODEL: - response = await client.remove_device(device.id, config_entry.entry_id) - assert response["success"] == expected_success - - device_entries = dr.async_entries_for_config_entry( - device_registry, config_entry.entry_id - ) - device_models = [device.model for device in device_entries] - assert sorted(device_models) == sorted(expected_models) - - if attr is not None: - setattr(reolink_connect, attr, original) - - -@pytest.mark.parametrize( - ("attr", "value", "expected_models"), - [ - ( - None, - None, - [TEST_HOST_MODEL, TEST_CAM_MODEL, CHIME_MODEL], - ), - ( - "connect_state", - -1, - [TEST_HOST_MODEL, TEST_CAM_MODEL], - ), - ( - "remove", - -1, - [TEST_HOST_MODEL, TEST_CAM_MODEL], - ), - ], -) -async def test_removing_chime( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - test_chime: Chime, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - attr: str | None, - value: Any, - expected_models: list[str], -) -> None: - """Test removing a chime.""" - reolink_connect.channels = [0] - assert await async_setup_component(hass, "config", {}) - client = await hass_ws_client(hass) - # setup CH 0 and NVR switch entities/device with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - device_entries = dr.async_entries_for_config_entry( - device_registry, config_entry.entry_id - ) - device_models = [device.model for device in device_entries] - assert sorted(device_models) == sorted( - [TEST_HOST_MODEL, TEST_CAM_MODEL, CHIME_MODEL] - ) - - if attr == "remove": - - async def test_remove_chime(*args, **key_args): - """Remove chime.""" - test_chime.connect_state = -1 - - test_chime.remove = test_remove_chime - elif attr is not None: - setattr(test_chime, attr, value) - - # Try to remove the device after 'disconnecting' a chime. - expected_success = CHIME_MODEL not in expected_models - for device in device_entries: - if device.model == CHIME_MODEL: - response = await client.remove_device(device.id, config_entry.entry_id) - assert response["success"] == expected_success + assert await hass.config_entries.async_reload(config_entry.entry_id) device_entries = dr.async_entries_for_config_entry( device_registry, config_entry.entry_id @@ -364,15 +261,6 @@ async def test_removing_chime( True, False, ), - ( - f"{TEST_MAC}_chime123456789_play_ringtone", - f"{TEST_UID}_chime123456789_play_ringtone", - f"{TEST_MAC}_chime123456789", - f"{TEST_UID}_chime123456789", - Platform.SELECT, - True, - False, - ), ( f"{TEST_MAC}_0_record_audio", f"{TEST_MAC}_{TEST_UID_CAM}_record_audio", @@ -429,14 +317,14 @@ async def test_migrate_entity_ids( reolink_connect.supported = mock_supported dev_entry = device_registry.async_get_or_create( - identifiers={(DOMAIN, original_dev_id)}, + identifiers={(const.DOMAIN, original_dev_id)}, config_entry_id=config_entry.entry_id, disabled_by=None, ) entity_registry.async_get_or_create( domain=domain, - platform=DOMAIN, + platform=const.DOMAIN, unique_id=original_id, config_entry=config_entry, suggested_object_id=original_id, @@ -444,13 +332,16 @@ async def test_migrate_entity_ids( device_id=dev_entry.id, ) - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id) is None + assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + assert entity_registry.async_get_entity_id(domain, const.DOMAIN, new_id) is None - assert device_registry.async_get_device(identifiers={(DOMAIN, original_dev_id)}) + assert device_registry.async_get_device( + identifiers={(const.DOMAIN, original_dev_id)} + ) if new_dev_id != original_dev_id: assert ( - device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)}) is None + device_registry.async_get_device(identifiers={(const.DOMAIN, new_dev_id)}) + is None ) # setup CH 0 and host entities/device @@ -458,15 +349,19 @@ async def test_migrate_entity_ids( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) is None - assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id) + assert ( + entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) is None + ) + assert entity_registry.async_get_entity_id(domain, const.DOMAIN, new_id) if new_dev_id != original_dev_id: assert ( - device_registry.async_get_device(identifiers={(DOMAIN, original_dev_id)}) + device_registry.async_get_device( + identifiers={(const.DOMAIN, original_dev_id)} + ) is None ) - assert device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)}) + assert device_registry.async_get_device(identifiers={(const.DOMAIN, new_dev_id)}) async def test_no_repair_issue( @@ -480,11 +375,11 @@ async def test_no_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (DOMAIN, "https_webhook") not in issue_registry.issues - assert (DOMAIN, "webhook_url") not in issue_registry.issues - assert (DOMAIN, "enable_port") not in issue_registry.issues - assert (DOMAIN, "firmware_update") not in issue_registry.issues - assert (DOMAIN, "ssl") not in issue_registry.issues + assert (const.DOMAIN, "https_webhook") not in issue_registry.issues + assert (const.DOMAIN, "webhook_url") not in issue_registry.issues + assert (const.DOMAIN, "enable_port") not in issue_registry.issues + assert (const.DOMAIN, "firmware_update") not in issue_registry.issues + assert (const.DOMAIN, "ssl") not in issue_registry.issues async def test_https_repair_issue( @@ -511,7 +406,7 @@ async def test_https_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (DOMAIN, "https_webhook") in issue_registry.issues + assert (const.DOMAIN, "https_webhook") in issue_registry.issues async def test_ssl_repair_issue( @@ -541,7 +436,7 @@ async def test_ssl_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (DOMAIN, "ssl") in issue_registry.issues + assert (const.DOMAIN, "ssl") in issue_registry.issues @pytest.mark.parametrize("protocol", ["rtsp", "rtmp"]) @@ -553,7 +448,7 @@ async def test_port_repair_issue( issue_registry: ir.IssueRegistry, ) -> None: """Test repairs issue is raised when auto enable of ports fails.""" - reolink_connect.set_net_port.side_effect = ReolinkError("Test error") + reolink_connect.set_net_port = AsyncMock(side_effect=ReolinkError("Test error")) reolink_connect.onvif_enabled = False reolink_connect.rtsp_enabled = False reolink_connect.rtmp_enabled = False @@ -561,9 +456,7 @@ async def test_port_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (DOMAIN, "enable_port") in issue_registry.issues - - reolink_connect.set_net_port.reset_mock(side_effect=True) + assert (const.DOMAIN, "enable_port") in issue_registry.issues async def test_webhook_repair_issue( @@ -586,7 +479,7 @@ async def test_webhook_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (DOMAIN, "webhook_url") in issue_registry.issues + assert (const.DOMAIN, "webhook_url") in issue_registry.issues async def test_firmware_repair_issue( @@ -600,42 +493,4 @@ async def test_firmware_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (DOMAIN, "firmware_update_host") in issue_registry.issues - - -async def test_new_device_discovered( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - reolink_connect: MagicMock, - config_entry: MockConfigEntry, -) -> None: - """Test the entry is reloaded when a new camera or chime is detected.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - reolink_connect.logout.reset_mock() - - assert reolink_connect.logout.call_count == 0 - reolink_connect.new_devices = True - - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert reolink_connect.logout.call_count == 1 - - -async def test_port_changed( - hass: HomeAssistant, - reolink_connect: MagicMock, - config_entry: MockConfigEntry, -) -> None: - """Test config_entry port update when it has changed during initial login.""" - assert config_entry.data[CONF_PORT] == TEST_PORT - reolink_connect.port = 4567 - - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.data[CONF_PORT] == 4567 + assert (const.DOMAIN, "firmware_update_host") in issue_registry.issues diff --git a/tests/components/reolink/test_light.py b/tests/components/reolink/test_light.py deleted file mode 100644 index 948a7fce0fe..00000000000 --- a/tests/components/reolink/test_light.py +++ /dev/null @@ -1,249 +0,0 @@ -"""Test the Reolink light platform.""" - -from unittest.mock import MagicMock, call, patch - -import pytest -from reolink_aio.exceptions import InvalidParameterError, ReolinkError - -from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_ON, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from .conftest import TEST_NVR_NAME - -from tests.common import MockConfigEntry - - -async def test_light_state( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test light entity state with floodlight.""" - reolink_connect.whiteled_state.return_value = True - reolink_connect.whiteled_brightness.return_value = 100 - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" - - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes["brightness"] == 255 - - -async def test_light_brightness_none( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test light entity with floodlight and brightness returning None.""" - reolink_connect.whiteled_state.return_value = True - reolink_connect.whiteled_brightness.return_value = None - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" - - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes["brightness"] is None - - -async def test_light_turn_off( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test light turn off service.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.set_whiteled.assert_called_with(0, state=False) - - reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - reolink_connect.set_whiteled.reset_mock(side_effect=True) - - -async def test_light_turn_on( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test light turn on service.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 51}, - blocking=True, - ) - reolink_connect.set_whiteled.assert_has_calls( - [call(0, brightness=20), call(0, state=True)] - ) - - reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 51}, - blocking=True, - ) - - reolink_connect.set_whiteled.side_effect = InvalidParameterError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 51}, - blocking=True, - ) - - reolink_connect.set_whiteled.reset_mock(side_effect=True) - - -async def test_host_light_state( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test host light entity state with status led.""" - reolink_connect.state_light = True - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_status_led" - - state = hass.states.get(entity_id) - assert state.state == STATE_ON - - -async def test_host_light_turn_off( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test host light turn off service.""" - - def mock_supported(ch, capability): - if capability == "power_led": - return False - return True - - reolink_connect.supported = mock_supported - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_status_led" - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.set_state_light.assert_called_with(False) - - reolink_connect.set_state_light.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - reolink_connect.set_state_light.reset_mock(side_effect=True) - - -async def test_host_light_turn_on( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test host light turn on service.""" - - def mock_supported(ch, capability): - if capability == "power_led": - return False - return True - - reolink_connect.supported = mock_supported - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_status_led" - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.set_state_light.assert_called_with(True) - - reolink_connect.set_state_light.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index 32afd1f73ca..0d86106e8e5 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -10,12 +10,13 @@ from reolink_aio.exceptions import ReolinkError from homeassistant.components.media_source import ( DOMAIN as MEDIA_SOURCE_DOMAIN, URI_SCHEME, - Unresolvable, async_browse_media, async_resolve_media, ) +from homeassistant.components.media_source.error import Unresolvable +from homeassistant.components.reolink import const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL -from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN +from homeassistant.components.reolink.const import DOMAIN from homeassistant.components.stream import DOMAIN as MEDIA_STREAM_DOMAIN from homeassistant.const import ( CONF_HOST, @@ -32,7 +33,6 @@ from homeassistant.setup import async_setup_component from .conftest import ( TEST_HOST2, - TEST_HOST_MODEL, TEST_MAC2, TEST_NVR_NAME, TEST_NVR_NAME2, @@ -54,7 +54,6 @@ TEST_FILE_NAME = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00" TEST_FILE_NAME_MP4 = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00.mp4" TEST_STREAM = "main" TEST_CHANNEL = "0" -TEST_CAM_NAME = "Cam new name" TEST_MIME_TYPE = "application/x-mpegURL" TEST_MIME_TYPE_MP4 = "video/mp4" @@ -130,8 +129,7 @@ async def test_browsing( ) -> None: """Test browsing the Reolink three.""" entry_id = config_entry.entry_id - reolink_connect.supported.return_value = 1 - reolink_connect.model = "Reolink TrackMix PoE" + reolink_connect.api_version.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): assert await hass.config_entries.async_setup(entry_id) is True @@ -139,7 +137,7 @@ async def test_browsing( entries = dr.async_entries_for_config_entry(device_registry, entry_id) assert len(entries) > 0 - device_registry.async_update_device(entries[0].id, name_by_user=TEST_CAM_NAME) + device_registry.async_update_device(entries[0].id, name_by_user="Cam new name") caplog.set_level(logging.DEBUG) @@ -151,7 +149,6 @@ async def test_browsing( assert browse.title == "Reolink" assert browse.identifier is None assert browse.children[0].identifier == browse_root_id - assert browse.children[0].title == f"{TEST_CAM_NAME} lens 0" # browse resolution select browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_root_id}") @@ -162,7 +159,7 @@ async def test_browsing( browse_res_AT_sub_id = f"RES|{entry_id}|{TEST_CHANNEL}|autotrack_sub" browse_res_AT_main_id = f"RES|{entry_id}|{TEST_CHANNEL}|autotrack_main" assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} lens 0" + assert browse.title == TEST_NVR_NAME assert browse.identifier == browse_resolution_id assert browse.children[0].identifier == browse_res_sub_id assert browse.children[1].identifier == browse_res_main_id @@ -178,19 +175,19 @@ async def test_browsing( browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_sub_id}") assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} lens 0 Low res." + assert browse.title == f"{TEST_NVR_NAME} Low res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_AT_sub_id}" ) assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} lens 0 Autotrack low res." + assert browse.title == f"{TEST_NVR_NAME} Autotrack low res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_AT_main_id}" ) assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} lens 0 Autotrack high res." + assert browse.title == f"{TEST_NVR_NAME} Autotrack high res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_main_id}" @@ -200,7 +197,7 @@ async def test_browsing( browse_day_0_id = f"DAY|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY}" browse_day_1_id = f"DAY|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY2}" assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} lens 0 High res." + assert browse.title == f"{TEST_NVR_NAME} High res." assert browse.identifier == browse_days_id assert browse.children[0].identifier == browse_day_0_id assert browse.children[1].identifier == browse_day_1_id @@ -220,14 +217,11 @@ async def test_browsing( browse_file_id = f"FILE|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_FILE_NAME}" assert browse.domain == DOMAIN assert ( - browse.title - == f"{TEST_NVR_NAME} lens 0 High res. {TEST_YEAR}/{TEST_MONTH}/{TEST_DAY}" + browse.title == f"{TEST_NVR_NAME} High res. {TEST_YEAR}/{TEST_MONTH}/{TEST_DAY}" ) assert browse.identifier == browse_files_id assert browse.children[0].identifier == browse_file_id - reolink_connect.model = TEST_HOST_MODEL - async def test_browsing_unsupported_encoding( hass: HomeAssistant, @@ -275,10 +269,10 @@ async def test_browsing_rec_playback_unsupported( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera which does not support playback of recordings.""" - reolink_connect.supported.return_value = 0 + reolink_connect.api_version.return_value = 0 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(config_entry.entry_id) is True await hass.async_block_till_done() # browse root @@ -296,10 +290,10 @@ async def test_browsing_errors( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera errors.""" - reolink_connect.supported.return_value = 1 + reolink_connect.api_version.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(config_entry.entry_id) is True await hass.async_block_till_done() # browse root @@ -315,22 +309,22 @@ async def test_browsing_not_loaded( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera integration which is not loaded.""" - reolink_connect.supported.return_value = 1 + reolink_connect.api_version.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(config_entry.entry_id) is True await hass.async_block_till_done() - reolink_connect.get_host_data.side_effect = ReolinkError("Test error") + reolink_connect.get_host_data = AsyncMock(side_effect=ReolinkError("Test error")) config_entry2 = MockConfigEntry( - domain=DOMAIN, + domain=const.DOMAIN, unique_id=format_mac(TEST_MAC2), data={ CONF_HOST: TEST_HOST2, CONF_USERNAME: TEST_USERNAME2, CONF_PASSWORD: TEST_PASSWORD2, CONF_PORT: TEST_PORT, - CONF_USE_HTTPS: TEST_USE_HTTPS, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -348,5 +342,3 @@ async def test_browsing_not_loaded( assert browse.title == "Reolink" assert browse.identifier is None assert len(browse.children) == 1 - - reolink_connect.get_host_data.side_effect = None diff --git a/tests/components/reolink/test_number.py b/tests/components/reolink/test_number.py deleted file mode 100644 index c6507fa36c1..00000000000 --- a/tests/components/reolink/test_number.py +++ /dev/null @@ -1,159 +0,0 @@ -"""Test the Reolink number platform.""" - -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from reolink_aio.api import Chime -from reolink_aio.exceptions import InvalidParameterError, ReolinkError - -from homeassistant.components.number import ( - ATTR_VALUE, - DOMAIN as NUMBER_DOMAIN, - SERVICE_SET_VALUE, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from .conftest import TEST_NVR_NAME - -from tests.common import MockConfigEntry - - -async def test_number( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test number entity with volume.""" - reolink_connect.volume.return_value = 80 - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_volume" - - assert hass.states.get(entity_id).state == "80" - - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, - blocking=True, - ) - reolink_connect.set_volume.assert_called_with(0, volume=50) - - reolink_connect.set_volume.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, - blocking=True, - ) - - reolink_connect.set_volume.side_effect = InvalidParameterError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, - blocking=True, - ) - - reolink_connect.set_volume.reset_mock(side_effect=True) - - -async def test_host_number( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test number entity with volume.""" - reolink_connect.alarm_volume = 85 - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_alarm_volume" - - assert hass.states.get(entity_id).state == "85" - - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 45}, - blocking=True, - ) - reolink_connect.set_hub_audio.assert_called_with(alarm_volume=45) - - reolink_connect.set_hub_audio.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 45}, - blocking=True, - ) - - reolink_connect.set_hub_audio.side_effect = InvalidParameterError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 45}, - blocking=True, - ) - - -async def test_chime_number( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - test_chime: Chime, -) -> None: - """Test number entity of a chime with chime volume.""" - test_chime.volume = 3 - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.NUMBER}.test_chime_volume" - - assert hass.states.get(entity_id).state == "3" - - test_chime.set_option = AsyncMock() - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 2}, - blocking=True, - ) - test_chime.set_option.assert_called_with(volume=2) - - test_chime.set_option.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 1}, - blocking=True, - ) - - test_chime.set_option.side_effect = InvalidParameterError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 1}, - blocking=True, - ) - - test_chime.set_option.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py deleted file mode 100644 index 7910174380a..00000000000 --- a/tests/components/reolink/test_select.py +++ /dev/null @@ -1,161 +0,0 @@ -"""Test the Reolink select platform.""" - -from unittest.mock import AsyncMock, MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from reolink_aio.api import Chime -from reolink_aio.exceptions import InvalidParameterError, ReolinkError - -from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL -from homeassistant.components.select import DOMAIN as SELECT_DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_SELECT_OPTION, - STATE_UNKNOWN, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from .conftest import TEST_NVR_NAME - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_floodlight_mode_select( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - entity_registry: er.EntityRegistry, -) -> None: - """Test select entity with floodlight_mode.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_floodlight_mode" - assert hass.states.get(entity_id).state == "auto" - - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": "off"}, - blocking=True, - ) - reolink_connect.set_whiteled.assert_called_once() - - reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": "off"}, - blocking=True, - ) - - reolink_connect.set_whiteled.side_effect = InvalidParameterError("Test error") - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": "off"}, - blocking=True, - ) - - reolink_connect.whiteled_mode.return_value = -99 # invalid value - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_UNKNOWN - - reolink_connect.set_whiteled.reset_mock(side_effect=True) - - -async def test_play_quick_reply_message( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - entity_registry: er.EntityRegistry, -) -> None: - """Test select play_quick_reply_message entity.""" - reolink_connect.quick_reply_dict.return_value = {0: "off", 1: "test message"} - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_play_quick_reply_message" - assert hass.states.get(entity_id).state == STATE_UNKNOWN - - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": "test message"}, - blocking=True, - ) - reolink_connect.play_quick_reply.assert_called_once() - - reolink_connect.quick_reply_dict = MagicMock() - - -async def test_chime_select( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - test_chime: Chime, - entity_registry: er.EntityRegistry, -) -> None: - """Test chime select entity.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" - assert hass.states.get(entity_id).state == "pianokey" - - # Test selecting chime ringtone option - test_chime.set_tone = AsyncMock() - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": "off"}, - blocking=True, - ) - test_chime.set_tone.assert_called_once() - - test_chime.set_tone.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": "off"}, - blocking=True, - ) - - test_chime.set_tone.side_effect = InvalidParameterError("Test error") - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": "off"}, - blocking=True, - ) - - # Test unavailable - test_chime.event_info = {} - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_UNKNOWN - - test_chime.set_tone.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_sensor.py b/tests/components/reolink/test_sensor.py deleted file mode 100644 index df164634355..00000000000 --- a/tests/components/reolink/test_sensor.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Test the Reolink sensor platform.""" - -from unittest.mock import MagicMock, patch - -import pytest - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant - -from .conftest import TEST_NVR_NAME - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test sensor entities.""" - reolink_connect.ptz_pan_position.return_value = 1200 - reolink_connect.wifi_connection = True - reolink_connect.wifi_signal = 3 - reolink_connect.hdd_list = [0] - reolink_connect.hdd_storage.return_value = 95 - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SENSOR]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_ptz_pan_position" - assert hass.states.get(entity_id).state == "1200" - - entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_wi_fi_signal" - assert hass.states.get(entity_id).state == "3" - - entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_sd_0_storage" - assert hass.states.get(entity_id).state == "95" - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_hdd_sensors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test hdd sensor entity.""" - reolink_connect.hdd_list = [0] - reolink_connect.hdd_type.return_value = "HDD" - reolink_connect.hdd_storage.return_value = 85 - reolink_connect.hdd_available.return_value = False - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SENSOR]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_hdd_0_storage" - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/reolink/test_services.py b/tests/components/reolink/test_services.py deleted file mode 100644 index a4b7d8f0da4..00000000000 --- a/tests/components/reolink/test_services.py +++ /dev/null @@ -1,116 +0,0 @@ -"""Test the Reolink services.""" - -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from reolink_aio.api import Chime -from reolink_aio.exceptions import InvalidParameterError, ReolinkError - -from homeassistant.components.reolink.const import DOMAIN as REOLINK_DOMAIN -from homeassistant.components.reolink.services import ATTR_RINGTONE -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_DEVICE_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry - - -async def test_play_chime_service_entity( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - test_chime: Chime, - entity_registry: er.EntityRegistry, -) -> None: - """Test chime play service.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" - entity = entity_registry.async_get(entity_id) - assert entity is not None - device_id = entity.device_id - - # Test chime play service with device - test_chime.play = AsyncMock() - await hass.services.async_call( - REOLINK_DOMAIN, - "play_chime", - {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, - blocking=True, - ) - test_chime.play.assert_called_once() - - # Test errors - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - REOLINK_DOMAIN, - "play_chime", - {ATTR_DEVICE_ID: ["invalid_id"], ATTR_RINGTONE: "attraction"}, - blocking=True, - ) - - test_chime.play = AsyncMock(side_effect=ReolinkError("Test error")) - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - REOLINK_DOMAIN, - "play_chime", - {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, - blocking=True, - ) - - test_chime.play = AsyncMock(side_effect=InvalidParameterError("Test error")) - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - REOLINK_DOMAIN, - "play_chime", - {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, - blocking=True, - ) - - reolink_connect.chime.return_value = None - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - REOLINK_DOMAIN, - "play_chime", - {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, - blocking=True, - ) - - -async def test_play_chime_service_unloaded( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - test_chime: Chime, - entity_registry: er.EntityRegistry, -) -> None: - """Test chime play service when config entry is unloaded.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" - entity = entity_registry.async_get(entity_id) - assert entity is not None - device_id = entity.device_id - - # Unload the config entry - assert await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.NOT_LOADED - - # Test chime play service - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - REOLINK_DOMAIN, - "play_chime", - {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, - blocking=True, - ) diff --git a/tests/components/reolink/test_siren.py b/tests/components/reolink/test_siren.py deleted file mode 100644 index f6ba8e0ea77..00000000000 --- a/tests/components/reolink/test_siren.py +++ /dev/null @@ -1,138 +0,0 @@ -"""Test the Reolink siren platform.""" - -from typing import Any -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from reolink_aio.exceptions import InvalidParameterError, ReolinkError - -from homeassistant.components.siren import ( - ATTR_DURATION, - ATTR_VOLUME_LEVEL, - DOMAIN as SIREN_DOMAIN, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_UNKNOWN, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError - -from .conftest import TEST_NVR_NAME - -from tests.common import MockConfigEntry - - -async def test_siren( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test siren entity.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SIREN]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SIREN}.{TEST_NVR_NAME}_siren" - assert hass.states.get(entity_id).state == STATE_UNKNOWN - - # test siren turn on - await hass.services.async_call( - SIREN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.set_volume.assert_not_called() - reolink_connect.set_siren.assert_called_with(0, True, None) - - await hass.services.async_call( - SIREN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_VOLUME_LEVEL: 0.85, ATTR_DURATION: 2}, - blocking=True, - ) - reolink_connect.set_volume.assert_called_with(0, volume=85) - reolink_connect.set_siren.assert_called_with(0, True, 2) - - # test siren turn off - await hass.services.async_call( - SIREN_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.set_siren.assert_called_with(0, False, None) - - -@pytest.mark.parametrize("attr", ["set_volume", "set_siren"]) -@pytest.mark.parametrize( - ("value", "expected"), - [ - ( - AsyncMock(side_effect=ReolinkError("Test error")), - HomeAssistantError, - ), - ( - AsyncMock(side_effect=InvalidParameterError("Test error")), - ServiceValidationError, - ), - ], -) -async def test_siren_turn_on_errors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - attr: str, - value: Any, - expected: Any, -) -> None: - """Test errors when calling siren turn on service.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SIREN]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SIREN}.{TEST_NVR_NAME}_siren" - - original = getattr(reolink_connect, attr) - setattr(reolink_connect, attr, value) - with pytest.raises(expected): - await hass.services.async_call( - SIREN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_VOLUME_LEVEL: 0.85, ATTR_DURATION: 2}, - blocking=True, - ) - - setattr(reolink_connect, attr, original) - - -async def test_siren_turn_off_errors( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, -) -> None: - """Test errors when calling siren turn off service.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SIREN]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SIREN}.{TEST_NVR_NAME}_siren" - - reolink_connect.set_siren.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SIREN_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - reolink_connect.set_siren.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_switch.py b/tests/components/reolink/test_switch.py deleted file mode 100644 index b2e82040ad4..00000000000 --- a/tests/components/reolink/test_switch.py +++ /dev/null @@ -1,442 +0,0 @@ -"""Test the Reolink switch platform.""" - -from unittest.mock import AsyncMock, MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from reolink_aio.api import Chime -from reolink_aio.exceptions import ReolinkError - -from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL -from homeassistant.components.reolink.const import DOMAIN -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, - STATE_UNAVAILABLE, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er, issue_registry as ir - -from .conftest import TEST_CAM_NAME, TEST_NVR_NAME, TEST_UID - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_cleanup_hdr_switch( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - entity_registry: er.EntityRegistry, -) -> None: - """Test cleanup of the HDR switch entity.""" - original_id = f"{TEST_UID}_hdr" - domain = Platform.SWITCH - - reolink_connect.channels = [0] - reolink_connect.supported.return_value = True - - entity_registry.async_get_or_create( - domain=domain, - platform=DOMAIN, - unique_id=original_id, - config_entry=config_entry, - suggested_object_id=original_id, - disabled_by=er.RegistryEntryDisabler.USER, - ) - - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - - # setup CH 0 and host entities/device - with patch("homeassistant.components.reolink.PLATFORMS", [domain]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) is None - - -@pytest.mark.parametrize( - ( - "original_id", - "capability", - ), - [ - ( - f"{TEST_UID}_record", - "recording", - ), - ( - f"{TEST_UID}_ftp_upload", - "ftp", - ), - ( - f"{TEST_UID}_push_notifications", - "push", - ), - ( - f"{TEST_UID}_email", - "email", - ), - ( - f"{TEST_UID}_buzzer", - "buzzer", - ), - ], -) -async def test_cleanup_hub_switches( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - entity_registry: er.EntityRegistry, - original_id: str, - capability: str, -) -> None: - """Test entity ids that need to be migrated.""" - - def mock_supported(ch, cap): - if cap == capability: - return False - return True - - domain = Platform.SWITCH - - reolink_connect.channels = [0] - reolink_connect.is_hub = True - reolink_connect.supported = mock_supported - - entity_registry.async_get_or_create( - domain=domain, - platform=DOMAIN, - unique_id=original_id, - config_entry=config_entry, - suggested_object_id=original_id, - disabled_by=er.RegistryEntryDisabler.USER, - ) - - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - - # setup CH 0 and host entities/device - with patch("homeassistant.components.reolink.PLATFORMS", [domain]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) is None - - reolink_connect.is_hub = False - reolink_connect.supported.return_value = True - - -async def test_hdr_switch_deprecated_repair_issue( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, -) -> None: - """Test repairs issue is raised when hdr switch entity used.""" - original_id = f"{TEST_UID}_hdr" - domain = Platform.SWITCH - - reolink_connect.channels = [0] - reolink_connect.supported.return_value = True - - entity_registry.async_get_or_create( - domain=domain, - platform=DOMAIN, - unique_id=original_id, - config_entry=config_entry, - suggested_object_id=original_id, - disabled_by=None, - ) - - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - - # setup CH 0 and host entities/device - with patch("homeassistant.components.reolink.PLATFORMS", [domain]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - - assert (DOMAIN, "hdr_switch_deprecated") in issue_registry.issues - - -@pytest.mark.parametrize( - ( - "original_id", - "capability", - ), - [ - ( - f"{TEST_UID}_record", - "recording", - ), - ( - f"{TEST_UID}_ftp_upload", - "ftp", - ), - ( - f"{TEST_UID}_push_notifications", - "push", - ), - ( - f"{TEST_UID}_email", - "email", - ), - ( - f"{TEST_UID}_buzzer", - "buzzer", - ), - ], -) -async def test_hub_switches_repair_issue( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, - original_id: str, - capability: str, -) -> None: - """Test entity ids that need to be migrated.""" - - def mock_supported(ch, cap): - if cap == capability: - return False - return True - - domain = Platform.SWITCH - - reolink_connect.channels = [0] - reolink_connect.is_hub = True - reolink_connect.supported = mock_supported - - entity_registry.async_get_or_create( - domain=domain, - platform=DOMAIN, - unique_id=original_id, - config_entry=config_entry, - suggested_object_id=original_id, - disabled_by=None, - ) - - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - - # setup CH 0 and host entities/device - with patch("homeassistant.components.reolink.PLATFORMS", [domain]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - assert (DOMAIN, "hub_switch_deprecated") in issue_registry.issues - - reolink_connect.is_hub = False - reolink_connect.supported.return_value = True - - -async def test_switch( - hass: HomeAssistant, - config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - reolink_connect: MagicMock, -) -> None: - """Test switch entity.""" - reolink_connect.camera_name.return_value = TEST_CAM_NAME - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SWITCH}.{TEST_CAM_NAME}_record" - assert hass.states.get(entity_id).state == STATE_ON - - reolink_connect.recording_enabled.return_value = False - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_OFF - - # test switch turn on - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.set_recording.assert_called_with(0, True) - - reolink_connect.set_recording.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - # test switch turn off - reolink_connect.set_recording.reset_mock(side_effect=True) - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.set_recording.assert_called_with(0, False) - - reolink_connect.set_recording.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - reolink_connect.set_recording.reset_mock(side_effect=True) - - reolink_connect.camera_online.return_value = False - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - reolink_connect.camera_online.return_value = True - - -async def test_host_switch( - hass: HomeAssistant, - config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - reolink_connect: MagicMock, -) -> None: - """Test host switch entity.""" - reolink_connect.camera_name.return_value = TEST_CAM_NAME - reolink_connect.recording_enabled.return_value = True - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SWITCH}.{TEST_NVR_NAME}_record" - assert hass.states.get(entity_id).state == STATE_ON - - reolink_connect.recording_enabled.return_value = False - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_OFF - - # test switch turn on - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.set_recording.assert_called_with(None, True) - - reolink_connect.set_recording.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - # test switch turn off - reolink_connect.set_recording.reset_mock(side_effect=True) - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.set_recording.assert_called_with(None, False) - - reolink_connect.set_recording.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - reolink_connect.set_recording.reset_mock(side_effect=True) - - -async def test_chime_switch( - hass: HomeAssistant, - config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - reolink_connect: MagicMock, - test_chime: Chime, -) -> None: - """Test host switch entity.""" - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.SWITCH}.test_chime_led" - assert hass.states.get(entity_id).state == STATE_ON - - test_chime.led_state = False - freezer.tick(DEVICE_UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_OFF - - # test switch turn on - test_chime.set_option = AsyncMock() - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - test_chime.set_option.assert_called_with(led=True) - - test_chime.set_option.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - # test switch turn off - test_chime.set_option.reset_mock(side_effect=True) - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - test_chime.set_option.assert_called_with(led=False) - - test_chime.set_option.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - test_chime.set_option.reset_mock(side_effect=True) diff --git a/tests/components/reolink/test_update.py b/tests/components/reolink/test_update.py deleted file mode 100644 index a13009204d7..00000000000 --- a/tests/components/reolink/test_update.py +++ /dev/null @@ -1,134 +0,0 @@ -"""Test the Reolink update platform.""" - -from unittest.mock import MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from reolink_aio.exceptions import ReolinkError -from reolink_aio.software_version import NewSoftwareVersion - -from homeassistant.components.reolink.update import POLL_AFTER_INSTALL -from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from .conftest import TEST_CAM_NAME, TEST_NVR_NAME - -from tests.common import MockConfigEntry, async_fire_time_changed -from tests.typing import WebSocketGenerator - -TEST_DOWNLOAD_URL = "https://reolink.com/test" -TEST_RELEASE_NOTES = "bugfix 1, bugfix 2" - - -@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) -async def test_no_update( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - entity_name: str, -) -> None: - """Test update state when no update available.""" - reolink_connect.camera_name.return_value = TEST_CAM_NAME - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" - assert hass.states.get(entity_id).state == STATE_OFF - - -@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) -async def test_update_str( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - entity_name: str, -) -> None: - """Test update state when update available with string from API.""" - reolink_connect.camera_name.return_value = TEST_CAM_NAME - reolink_connect.firmware_update_available.return_value = "New firmware available" - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" - assert hass.states.get(entity_id).state == STATE_ON - - -@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) -async def test_update_firm( - hass: HomeAssistant, - config_entry: MockConfigEntry, - reolink_connect: MagicMock, - hass_ws_client: WebSocketGenerator, - freezer: FrozenDateTimeFactory, - entity_name: str, -) -> None: - """Test update state when update available with firmware info from reolink.com.""" - reolink_connect.camera_name.return_value = TEST_CAM_NAME - reolink_connect.camera_sw_version.return_value = "v1.1.0.0.0.0000" - new_firmware = NewSoftwareVersion( - version_string="v3.3.0.226_23031644", - download_url=TEST_DOWNLOAD_URL, - release_notes=TEST_RELEASE_NOTES, - ) - reolink_connect.firmware_update_available.return_value = new_firmware - - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" - assert hass.states.get(entity_id).state == STATE_ON - - # release notes - client = await hass_ws_client(hass) - await hass.async_block_till_done() - - await client.send_json( - { - "id": 1, - "type": "update/release_notes", - "entity_id": entity_id, - } - ) - result = await client.receive_json() - assert TEST_DOWNLOAD_URL in result["result"] - assert TEST_RELEASE_NOTES in result["result"] - - # test install - await hass.services.async_call( - UPDATE_DOMAIN, - SERVICE_INSTALL, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - reolink_connect.update_firmware.assert_called() - - reolink_connect.update_firmware.side_effect = ReolinkError("Test error") - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - UPDATE_DOMAIN, - SERVICE_INSTALL, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - # test _async_update_future - reolink_connect.camera_sw_version.return_value = "v3.3.0.226_23031644" - reolink_connect.firmware_update_available.return_value = False - freezer.tick(POLL_AFTER_INSTALL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_OFF - - reolink_connect.update_firmware.side_effect = None diff --git a/tests/components/repairs/__init__.py b/tests/components/repairs/__init__.py index e787d657e5c..a6786db9685 100644 --- a/tests/components/repairs/__init__.py +++ b/tests/components/repairs/__init__.py @@ -1,17 +1,5 @@ """Tests for the repairs integration.""" -from http import HTTPStatus -from typing import Any - -from aiohttp.test_utils import TestClient - -from homeassistant.components.repairs.issue_handler import ( # noqa: F401 - async_process_repairs_platforms, -) -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -39,23 +27,3 @@ async def get_repairs( assert msg["result"] return msg["result"]["issues"] - - -async def start_repair_fix_flow( - client: TestClient, handler: str, issue_id: int -) -> dict[str, Any]: - """Start a flow from an issue.""" - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": handler, "issue_id": issue_id}) - assert resp.status == HTTPStatus.OK - return await resp.json() - - -async def process_repair_fix_flow( - client: TestClient, flow_id: int, json: dict[str, Any] | None = None -) -> dict[str, Any]: - """Return the repairs list of issues.""" - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json=json) - assert resp.status == HTTPStatus.OK - return await resp.json() diff --git a/tests/components/repairs/test_websocket_api.py b/tests/components/repairs/test_websocket_api.py index bb3d50f9eb5..60d0364b985 100644 --- a/tests/components/repairs/test_websocket_api.py +++ b/tests/components/repairs/test_websocket_api.py @@ -18,11 +18,7 @@ from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from tests.common import MockUser, mock_platform -from tests.typing import ( - ClientSessionGenerator, - MockHAClientWebSocket, - WebSocketGenerator, -) +from tests.typing import ClientSessionGenerator, WebSocketGenerator DEFAULT_ISSUES = [ { @@ -38,11 +34,7 @@ DEFAULT_ISSUES = [ ] -async def create_issues( - hass: HomeAssistant, - ws_client: MockHAClientWebSocket, - issues: list[dict[str, Any]] | None = None, -) -> list[dict[str, Any]]: +async def create_issues(hass, ws_client, issues=None): """Create issues.""" def api_issue(issue): @@ -123,15 +115,11 @@ class MockFixFlowAbort(RepairsFlow): @pytest.fixture(autouse=True) -async def mock_repairs_integration(hass: HomeAssistant) -> None: +async def mock_repairs_integration(hass): """Mock a repairs integration.""" hass.config.components.add("fake_integration") - def async_create_fix_flow( - hass: HomeAssistant, - issue_id: str, - data: dict[str, str | int | float | None] | None, - ) -> RepairsFlow: + def async_create_fix_flow(hass, issue_id, data): assert issue_id in EXPECTED_DATA assert data == EXPECTED_DATA[issue_id] diff --git a/tests/components/rest/test_init.py b/tests/components/rest/test_init.py index c401362d604..0fda89cc329 100644 --- a/tests/components/rest/test_init.py +++ b/tests/components/rest/test_init.py @@ -12,12 +12,11 @@ from homeassistant import config as hass_config from homeassistant.components.rest.const import DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, - CONF_PACKAGES, SERVICE_RELOAD, STATE_UNAVAILABLE, UnitOfInformation, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -469,7 +468,7 @@ async def test_config_schema_via_packages(hass: HomeAssistant) -> None: "pack_11": {"rest": {"resource": "http://url1"}}, "pack_list": {"rest": [{"resource": "http://url2"}]}, } - config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}} + config = {hass_config.HA_DOMAIN: {hass_config.CONF_PACKAGES: packages}} await hass_config.merge_packages_config(hass, config, packages) assert len(config) == 2 diff --git a/tests/components/rflink/test_binary_sensor.py b/tests/components/rflink/test_binary_sensor.py index 9329edb3a00..c92eaa30fe8 100644 --- a/tests/components/rflink/test_binary_sensor.py +++ b/tests/components/rflink/test_binary_sensor.py @@ -7,7 +7,6 @@ automatic sensor creation. from datetime import timedelta from freezegun import freeze_time -import pytest from homeassistant.components.rflink import CONF_RECONNECT_INTERVAL from homeassistant.const import ( @@ -46,9 +45,7 @@ CONFIG = { } -async def test_default_setup( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: """Test all basic functionality of the rflink sensor component.""" # setup mocking rflink module event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch) @@ -87,9 +84,7 @@ async def test_default_setup( assert hass.states.get("binary_sensor.test").state == STATE_OFF -async def test_entity_availability( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_entity_availability(hass: HomeAssistant, monkeypatch) -> None: """If Rflink device is disconnected, entities should become unavailable.""" # Make sure Rflink mock does not 'recover' to quickly from the # disconnect or else the unavailability cannot be measured @@ -130,7 +125,7 @@ async def test_entity_availability( assert hass.states.get("binary_sensor.test").state == STATE_ON -async def test_off_delay(hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch) -> None: +async def test_off_delay(hass: HomeAssistant, monkeypatch) -> None: """Test off_delay option.""" # setup mocking rflink module event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch) @@ -193,9 +188,7 @@ async def test_off_delay(hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch) - assert len(events) == 3 -async def test_restore_state( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: """Ensure states are restored on startup.""" mock_restore_cache( hass, (State(f"{DOMAIN}.test", STATE_ON), State(f"{DOMAIN}.test2", STATE_ON)) diff --git a/tests/components/rflink/test_cover.py b/tests/components/rflink/test_cover.py index 578221c7051..0829fddef51 100644 --- a/tests/components/rflink/test_cover.py +++ b/tests/components/rflink/test_cover.py @@ -5,11 +5,14 @@ control of RFLink cover devices. """ -import pytest - -from homeassistant.components.cover import CoverState -from homeassistant.components.rflink.entity import EVENT_BUTTON_PRESSED -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER +from homeassistant.components.rflink import EVENT_BUTTON_PRESSED +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + STATE_CLOSED, + STATE_OPEN, +) from homeassistant.core import CoreState, HomeAssistant, State, callback from .test_init import mock_rflink @@ -34,9 +37,7 @@ CONFIG = { } -async def test_default_setup( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: """Test all basic functionality of the RFLink cover component.""" # setup mocking rflink module event_callback, create, protocol, _ = await mock_rflink( @@ -48,7 +49,7 @@ async def test_default_setup( # test default state of cover loaded from config cover_initial = hass.states.get(f"{DOMAIN}.test") - assert cover_initial.state == CoverState.CLOSED + assert cover_initial.state == STATE_CLOSED assert cover_initial.attributes["assumed_state"] # cover should follow state of the hardware device by interpreting @@ -59,7 +60,7 @@ async def test_default_setup( await hass.async_block_till_done() cover_after_first_command = hass.states.get(f"{DOMAIN}.test") - assert cover_after_first_command.state == CoverState.OPEN + assert cover_after_first_command.state == STATE_OPEN # not sure why, but cover have always assumed_state=true assert cover_after_first_command.attributes.get("assumed_state") @@ -67,34 +68,34 @@ async def test_default_setup( event_callback({"id": "protocol_0_0", "command": "down"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED # should respond to group command event_callback({"id": "protocol_0_0", "command": "allon"}) await hass.async_block_till_done() cover_after_first_command = hass.states.get(f"{DOMAIN}.test") - assert cover_after_first_command.state == CoverState.OPEN + assert cover_after_first_command.state == STATE_OPEN # should respond to group command event_callback({"id": "protocol_0_0", "command": "alloff"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED # test following aliases # mock incoming command event for this device alias event_callback({"id": "test_alias_0_0", "command": "up"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN # test changing state from HA propagates to RFLink await hass.services.async_call( DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: f"{DOMAIN}.test"} ) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED assert protocol.send_command_ack.call_args_list[0][0][0] == "protocol_0_0" assert protocol.send_command_ack.call_args_list[0][0][1] == "DOWN" @@ -102,13 +103,11 @@ async def test_default_setup( DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: f"{DOMAIN}.test"} ) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN assert protocol.send_command_ack.call_args_list[1][0][1] == "UP" -async def test_firing_bus_event( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_firing_bus_event(hass: HomeAssistant, monkeypatch) -> None: """Incoming RFLink command events should be put on the HA event bus.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -143,9 +142,7 @@ async def test_firing_bus_event( assert calls[0].data == {"state": "down", "entity_id": f"{DOMAIN}.test"} -async def test_signal_repetitions( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_signal_repetitions(hass: HomeAssistant, monkeypatch) -> None: """Command should be sent amount of configured repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -183,9 +180,7 @@ async def test_signal_repetitions( assert protocol.send_command_ack.call_count == 5 -async def test_signal_repetitions_alternation( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) -> None: """Simultaneously switching entities must alternate repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -216,9 +211,7 @@ async def test_signal_repetitions_alternation( assert protocol.send_command_ack.call_args_list[3][0][0] == "protocol_0_1" -async def test_signal_repetitions_cancelling( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_signal_repetitions_cancelling(hass: HomeAssistant, monkeypatch) -> None: """Cancel outstanding repetitions when state changed.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -247,9 +240,7 @@ async def test_signal_repetitions_cancelling( assert protocol.send_command_ack.call_args_list[3][0][1] == "UP" -async def test_group_alias( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: """Group aliases should only respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -264,24 +255,22 @@ async def test_group_alias( # setup mocking rflink module event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch) - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED # test sending group command to group alias event_callback({"id": "test_group_0_0", "command": "allon"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN # test sending group command to group alias event_callback({"id": "test_group_0_0", "command": "down"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN -async def test_nogroup_alias( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: """Non group aliases should not respond to group commands.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -299,24 +288,22 @@ async def test_nogroup_alias( # setup mocking rflink module event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch) - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED # test sending group command to nogroup alias event_callback({"id": "test_nogroup_0_0", "command": "allon"}) await hass.async_block_till_done() # should not affect state - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED # test sending group command to nogroup alias event_callback({"id": "test_nogroup_0_0", "command": "up"}) await hass.async_block_till_done() # should affect state - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN -async def test_nogroup_device_id( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: """Device id that do not respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -329,24 +316,22 @@ async def test_nogroup_device_id( # setup mocking rflink module event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch) - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED # test sending group command to nogroup event_callback({"id": "test_nogroup_0_0", "command": "allon"}) await hass.async_block_till_done() # should not affect state - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED # test sending group command to nogroup event_callback({"id": "test_nogroup_0_0", "command": "up"}) await hass.async_block_till_done() # should affect state - assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN -async def test_restore_state( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -362,11 +347,7 @@ async def test_restore_state( } mock_restore_cache( - hass, - ( - State(f"{DOMAIN}.c1", CoverState.OPEN), - State(f"{DOMAIN}.c2", CoverState.CLOSED), - ), + hass, (State(f"{DOMAIN}.c1", STATE_OPEN), State(f"{DOMAIN}.c2", STATE_CLOSED)) ) hass.set_state(CoreState.starting) @@ -376,29 +357,27 @@ async def test_restore_state( state = hass.states.get(f"{DOMAIN}.c1") assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN state = hass.states.get(f"{DOMAIN}.c2") assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED state = hass.states.get(f"{DOMAIN}.c3") assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED # not cached cover must default values state = hass.states.get(f"{DOMAIN}.c4") assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes["assumed_state"] # The code checks the ID, it will use the # 'inverted' class when the name starts with # 'newkaku' -async def test_inverted_cover( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_inverted_cover(hass: HomeAssistant, monkeypatch) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -434,7 +413,7 @@ async def test_inverted_cover( # test default state of cover loaded from config standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_standard") - assert standard_cover.state == CoverState.CLOSED + assert standard_cover.state == STATE_CLOSED assert standard_cover.attributes["assumed_state"] # mock incoming up command event for nonkaku_device_1 @@ -442,7 +421,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_standard") - assert standard_cover.state == CoverState.OPEN + assert standard_cover.state == STATE_OPEN assert standard_cover.attributes.get("assumed_state") # mock incoming up command event for nonkaku_device_2 @@ -450,7 +429,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_none") - assert standard_cover.state == CoverState.OPEN + assert standard_cover.state == STATE_OPEN assert standard_cover.attributes.get("assumed_state") # mock incoming up command event for nonkaku_device_3 @@ -459,7 +438,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == CoverState.OPEN + assert inverted_cover.state == STATE_OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming up command event for newkaku_device_4 @@ -468,7 +447,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == CoverState.OPEN + assert inverted_cover.state == STATE_OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming up command event for newkaku_device_5 @@ -477,7 +456,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == CoverState.OPEN + assert inverted_cover.state == STATE_OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming up command event for newkaku_device_6 @@ -486,7 +465,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == CoverState.OPEN + assert inverted_cover.state == STATE_OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for nonkaku_device_1 @@ -495,7 +474,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_standard") - assert standard_cover.state == CoverState.CLOSED + assert standard_cover.state == STATE_CLOSED assert standard_cover.attributes.get("assumed_state") # mock incoming down command event for nonkaku_device_2 @@ -504,7 +483,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_none") - assert standard_cover.state == CoverState.CLOSED + assert standard_cover.state == STATE_CLOSED assert standard_cover.attributes.get("assumed_state") # mock incoming down command event for nonkaku_device_3 @@ -513,7 +492,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == CoverState.CLOSED + assert inverted_cover.state == STATE_CLOSED assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for newkaku_device_4 @@ -522,7 +501,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == CoverState.CLOSED + assert inverted_cover.state == STATE_CLOSED assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for newkaku_device_5 @@ -531,7 +510,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == CoverState.CLOSED + assert inverted_cover.state == STATE_CLOSED assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for newkaku_device_6 @@ -540,7 +519,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == CoverState.CLOSED + assert inverted_cover.state == STATE_CLOSED assert inverted_cover.attributes.get("assumed_state") # We are only testing the 'inverted' devices, the 'standard' devices @@ -552,7 +531,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == CoverState.CLOSED + assert inverted_cover.state == STATE_CLOSED # should respond to group command event_callback({"id": "nonkaku_device_3", "command": "allon"}) @@ -560,7 +539,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == CoverState.OPEN + assert inverted_cover.state == STATE_OPEN # should respond to group command event_callback({"id": "newkaku_device_4", "command": "alloff"}) @@ -568,7 +547,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == CoverState.CLOSED + assert inverted_cover.state == STATE_CLOSED # should respond to group command event_callback({"id": "newkaku_device_4", "command": "allon"}) @@ -576,7 +555,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == CoverState.OPEN + assert inverted_cover.state == STATE_OPEN # should respond to group command event_callback({"id": "newkaku_device_5", "command": "alloff"}) @@ -584,7 +563,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == CoverState.CLOSED + assert inverted_cover.state == STATE_CLOSED # should respond to group command event_callback({"id": "newkaku_device_5", "command": "allon"}) @@ -592,7 +571,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == CoverState.OPEN + assert inverted_cover.state == STATE_OPEN # should respond to group command event_callback({"id": "newkaku_device_6", "command": "alloff"}) @@ -600,7 +579,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == CoverState.CLOSED + assert inverted_cover.state == STATE_CLOSED # should respond to group command event_callback({"id": "newkaku_device_6", "command": "allon"}) @@ -608,7 +587,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == CoverState.OPEN + assert inverted_cover.state == STATE_OPEN # Sending the close command from HA should result # in an 'DOWN' command sent to a non-newkaku device @@ -621,7 +600,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == STATE_CLOSED assert protocol.send_command_ack.call_args_list[0][0][0] == "nonkaku_device_1" assert protocol.send_command_ack.call_args_list[0][0][1] == "DOWN" @@ -636,7 +615,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == STATE_OPEN assert protocol.send_command_ack.call_args_list[1][0][0] == "nonkaku_device_1" assert protocol.send_command_ack.call_args_list[1][0][1] == "UP" @@ -649,7 +628,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == STATE_CLOSED assert protocol.send_command_ack.call_args_list[2][0][0] == "nonkaku_device_2" assert protocol.send_command_ack.call_args_list[2][0][1] == "DOWN" @@ -662,7 +641,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == STATE_OPEN assert protocol.send_command_ack.call_args_list[3][0][0] == "nonkaku_device_2" assert protocol.send_command_ack.call_args_list[3][0][1] == "UP" @@ -677,7 +656,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == STATE_CLOSED assert protocol.send_command_ack.call_args_list[4][0][0] == "nonkaku_device_3" assert protocol.send_command_ack.call_args_list[4][0][1] == "UP" @@ -692,7 +671,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == STATE_OPEN assert protocol.send_command_ack.call_args_list[5][0][0] == "nonkaku_device_3" assert protocol.send_command_ack.call_args_list[5][0][1] == "DOWN" @@ -707,7 +686,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == STATE_CLOSED assert protocol.send_command_ack.call_args_list[6][0][0] == "newkaku_device_4" assert protocol.send_command_ack.call_args_list[6][0][1] == "DOWN" @@ -722,7 +701,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == STATE_OPEN assert protocol.send_command_ack.call_args_list[7][0][0] == "newkaku_device_4" assert protocol.send_command_ack.call_args_list[7][0][1] == "UP" @@ -735,7 +714,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == STATE_CLOSED assert protocol.send_command_ack.call_args_list[8][0][0] == "newkaku_device_5" assert protocol.send_command_ack.call_args_list[8][0][1] == "UP" @@ -748,7 +727,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == STATE_OPEN assert protocol.send_command_ack.call_args_list[9][0][0] == "newkaku_device_5" assert protocol.send_command_ack.call_args_list[9][0][1] == "DOWN" @@ -763,7 +742,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == CoverState.CLOSED + assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == STATE_CLOSED assert protocol.send_command_ack.call_args_list[10][0][0] == "newkaku_device_6" assert protocol.send_command_ack.call_args_list[10][0][1] == "UP" @@ -778,6 +757,6 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == CoverState.OPEN + assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == STATE_OPEN assert protocol.send_command_ack.call_args_list[11][0][0] == "newkaku_device_6" assert protocol.send_command_ack.call_args_list[11][0][1] == "DOWN" diff --git a/tests/components/rflink/test_init.py b/tests/components/rflink/test_init.py index 1caae302748..f901e46aea1 100644 --- a/tests/components/rflink/test_init.py +++ b/tests/components/rflink/test_init.py @@ -5,6 +5,7 @@ from unittest.mock import Mock import pytest from voluptuous.error import MultipleInvalid +from homeassistant.bootstrap import async_setup_component from homeassistant.components.rflink import ( CONF_KEEPALIVE_IDLE, CONF_RECONNECT_INTERVAL, @@ -27,16 +28,10 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component async def mock_rflink( - hass: HomeAssistant, - config, - domain, - monkeypatch: pytest.MonkeyPatch, - failures=None, - failcommand=False, + hass, config, domain, monkeypatch, failures=None, failcommand=False ): """Create mock RFLink asyncio protocol, test component setup.""" transport, protocol = (Mock(), Mock()) @@ -82,9 +77,7 @@ async def mock_rflink( return event_callback, mock_create, protocol, disconnect_callback -async def test_version_banner( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_version_banner(hass: HomeAssistant, monkeypatch) -> None: """Test sending unknown commands doesn't cause issues.""" # use sensor domain during testing main platform domain = "sensor" @@ -109,9 +102,7 @@ async def test_version_banner( ) -async def test_send_no_wait( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_send_no_wait(hass: HomeAssistant, monkeypatch) -> None: """Test command sending without ack.""" domain = "switch" config = { @@ -135,9 +126,7 @@ async def test_send_no_wait( assert protocol.send_command.call_args_list[0][0][1] == "off" -async def test_cover_send_no_wait( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_cover_send_no_wait(hass: HomeAssistant, monkeypatch) -> None: """Test command sending to a cover device without ack.""" domain = "cover" config = { @@ -161,9 +150,7 @@ async def test_cover_send_no_wait( assert protocol.send_command.call_args_list[0][0][1] == "STOP" -async def test_send_command( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_send_command(hass: HomeAssistant, monkeypatch) -> None: """Test send_command service.""" domain = "rflink" config = {"rflink": {"port": "/dev/ttyABC0"}} @@ -181,9 +168,7 @@ async def test_send_command( assert protocol.send_command_ack.call_args_list[0][0][1] == "on" -async def test_send_command_invalid_arguments( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_send_command_invalid_arguments(hass: HomeAssistant, monkeypatch) -> None: """Test send_command service.""" domain = "rflink" config = {"rflink": {"port": "/dev/ttyABC0"}} @@ -216,9 +201,7 @@ async def test_send_command_invalid_arguments( assert not success, "send command should not succeed for unknown command" -async def test_send_command_event_propagation( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_send_command_event_propagation(hass: HomeAssistant, monkeypatch) -> None: """Test event propagation for send_command service.""" domain = "light" config = { @@ -260,9 +243,7 @@ async def test_send_command_event_propagation( assert hass.states.get(f"{domain}.test1").state == "off" -async def test_reconnecting_after_disconnect( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_reconnecting_after_disconnect(hass: HomeAssistant, monkeypatch) -> None: """An unexpected disconnect should cause a reconnect.""" domain = "sensor" config = { @@ -286,9 +267,7 @@ async def test_reconnecting_after_disconnect( assert mock_create.call_count == 2 -async def test_reconnecting_after_failure( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_reconnecting_after_failure(hass: HomeAssistant, monkeypatch) -> None: """A failure to reconnect should be retried.""" domain = "sensor" config = { @@ -315,9 +294,7 @@ async def test_reconnecting_after_failure( assert mock_create.call_count == 3 -async def test_error_when_not_connected( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_error_when_not_connected(hass: HomeAssistant, monkeypatch) -> None: """Sending command should error when not connected.""" domain = "switch" config = { @@ -347,9 +324,7 @@ async def test_error_when_not_connected( assert not success, "changing state should not succeed when disconnected" -async def test_async_send_command_error( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_async_send_command_error(hass: HomeAssistant, monkeypatch) -> None: """Sending command should error when protocol fails.""" domain = "rflink" config = {"rflink": {"port": "/dev/ttyABC0"}} @@ -370,9 +345,7 @@ async def test_async_send_command_error( assert protocol.send_command_ack.call_args_list[0][0][1] == SERVICE_TURN_OFF -async def test_race_condition( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_race_condition(hass: HomeAssistant, monkeypatch) -> None: """Test race condition for unknown components.""" domain = "light" config = {"rflink": {"port": "/dev/ttyABC0"}, domain: {"platform": "rflink"}} @@ -408,7 +381,7 @@ async def test_race_condition( assert new_sensor.state == "on" -async def test_not_connected() -> None: +async def test_not_connected(hass: HomeAssistant, monkeypatch) -> None: """Test Error when sending commands to a disconnected device.""" test_device = RflinkCommand("DUMMY_DEVICE") RflinkCommand.set_rflink_protocol(None) @@ -417,9 +390,7 @@ async def test_not_connected() -> None: async def test_keepalive( - hass: HomeAssistant, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture ) -> None: """Validate negative keepalive values.""" keepalive_value = -3 @@ -447,9 +418,7 @@ async def test_keepalive( async def test_keepalive_2( - hass: HomeAssistant, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture ) -> None: """Validate very short keepalive values.""" keepalive_value = 30 @@ -477,9 +446,7 @@ async def test_keepalive_2( async def test_keepalive_3( - hass: HomeAssistant, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture ) -> None: """Validate keepalive=0 value.""" domain = RFLINK_DOMAIN @@ -499,9 +466,7 @@ async def test_keepalive_3( async def test_default_keepalive( - hass: HomeAssistant, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture ) -> None: """Validate keepalive=0 value.""" domain = RFLINK_DOMAIN @@ -520,9 +485,7 @@ async def test_default_keepalive( async def test_unique_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - monkeypatch: pytest.MonkeyPatch, + hass: HomeAssistant, entity_registry: er.EntityRegistry, monkeypatch ) -> None: """Validate the device unique_id.""" diff --git a/tests/components/rflink/test_light.py b/tests/components/rflink/test_light.py index e76d5b4f783..5ee2375bc36 100644 --- a/tests/components/rflink/test_light.py +++ b/tests/components/rflink/test_light.py @@ -5,10 +5,8 @@ control of RFLink switch devices. """ -import pytest - from homeassistant.components.light import ATTR_BRIGHTNESS -from homeassistant.components.rflink.entity import EVENT_BUTTON_PRESSED +from homeassistant.components.rflink import EVENT_BUTTON_PRESSED from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -40,9 +38,7 @@ CONFIG = { } -async def test_default_setup( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: """Test all basic functionality of the RFLink switch component.""" # setup mocking rflink module event_callback, create, protocol, _ = await mock_rflink( @@ -150,9 +146,7 @@ async def test_default_setup( assert protocol.send_command_ack.call_args_list[5][0][1] == "7" -async def test_firing_bus_event( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_firing_bus_event(hass: HomeAssistant, monkeypatch) -> None: """Incoming RFLink command events should be put on the HA event bus.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -187,9 +181,7 @@ async def test_firing_bus_event( assert calls[0].data == {"state": "off", "entity_id": f"{DOMAIN}.test"} -async def test_signal_repetitions( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_signal_repetitions(hass: HomeAssistant, monkeypatch) -> None: """Command should be sent amount of configured repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -245,9 +237,7 @@ async def test_signal_repetitions( assert protocol.send_command_ack.call_count == 8 -async def test_signal_repetitions_alternation( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) -> None: """Simultaneously switching entities must alternate repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -278,9 +268,7 @@ async def test_signal_repetitions_alternation( assert protocol.send_command_ack.call_args_list[3][0][0] == "protocol_0_1" -async def test_signal_repetitions_cancelling( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_signal_repetitions_cancelling(hass: HomeAssistant, monkeypatch) -> None: """Cancel outstanding repetitions when state changed.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -314,9 +302,7 @@ async def test_signal_repetitions_cancelling( ] -async def test_type_toggle( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_type_toggle(hass: HomeAssistant, monkeypatch) -> None: """Test toggle type lights (on/on).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -361,9 +347,7 @@ async def test_type_toggle( assert hass.states.get(f"{DOMAIN}.toggle_test").state == "off" -async def test_set_level_command( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_set_level_command(hass: HomeAssistant, monkeypatch) -> None: """Test 'set_level=XX' events.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -450,9 +434,7 @@ async def test_set_level_command( assert state.attributes[ATTR_BRIGHTNESS] == 0 -async def test_group_alias( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: """Group aliases should only respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -489,9 +471,7 @@ async def test_group_alias( assert hass.states.get(f"{DOMAIN}.test2").state == "on" -async def test_nogroup_alias( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: """Non group aliases should not respond to group commands.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -524,9 +504,7 @@ async def test_nogroup_alias( assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_nogroup_device_id( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: """Device id that do not respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -554,9 +532,7 @@ async def test_nogroup_device_id( assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_disable_automatic_add( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_disable_automatic_add(hass: HomeAssistant, monkeypatch) -> None: """If disabled new devices should not be automatically added.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -574,9 +550,7 @@ async def test_disable_automatic_add( assert not hass.states.get(f"{DOMAIN}.protocol_0_0") -async def test_restore_state( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, diff --git a/tests/components/rflink/test_sensor.py b/tests/components/rflink/test_sensor.py index 278dd45a114..e375f3ae863 100644 --- a/tests/components/rflink/test_sensor.py +++ b/tests/components/rflink/test_sensor.py @@ -5,8 +5,6 @@ automatic sensor creation. """ -import pytest - from homeassistant.components.rflink import ( CONF_RECONNECT_INTERVAL, DATA_ENTITY_LOOKUP, @@ -41,9 +39,7 @@ CONFIG = { } -async def test_default_setup( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: """Test all basic functionality of the rflink sensor component.""" # setup mocking rflink module event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch) @@ -104,9 +100,7 @@ async def test_default_setup( assert bat_sensor.attributes[ATTR_ICON] == "mdi:battery" -async def test_disable_automatic_add( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_disable_automatic_add(hass: HomeAssistant, monkeypatch) -> None: """If disabled new devices should not be automatically added.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -131,9 +125,7 @@ async def test_disable_automatic_add( assert not hass.states.get("sensor.test2") -async def test_entity_availability( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_entity_availability(hass: HomeAssistant, monkeypatch) -> None: """If Rflink device is disconnected, entities should become unavailable.""" # Make sure Rflink mock does not 'recover' to quickly from the # disconnect or else the unavailability cannot be measured @@ -168,7 +160,7 @@ async def test_entity_availability( assert hass.states.get("sensor.test").state == STATE_UNKNOWN -async def test_aliases(hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch) -> None: +async def test_aliases(hass: HomeAssistant, monkeypatch) -> None: """Validate the response to sensor's alias (with aliases).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -210,9 +202,7 @@ async def test_aliases(hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch) -> assert updated_sensor.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE -async def test_race_condition( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_race_condition(hass: HomeAssistant, monkeypatch) -> None: """Test race condition for unknown components.""" config = {"rflink": {"port": "/dev/ttyABC0"}, DOMAIN: {"platform": "rflink"}} tmp_entity = TMP_ENTITY.format("test3") @@ -251,9 +241,7 @@ async def test_race_condition( assert new_sensor.state == "ko" -async def test_sensor_attributes( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_sensor_attributes(hass: HomeAssistant, monkeypatch) -> None: """Validate the sensor attributes.""" config = { diff --git a/tests/components/rflink/test_switch.py b/tests/components/rflink/test_switch.py index f81c41f03d5..705856565ae 100644 --- a/tests/components/rflink/test_switch.py +++ b/tests/components/rflink/test_switch.py @@ -5,9 +5,7 @@ control of Rflink switch devices. """ -import pytest - -from homeassistant.components.rflink.entity import EVENT_BUTTON_PRESSED +from homeassistant.components.rflink import EVENT_BUTTON_PRESSED from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -35,9 +33,7 @@ CONFIG = { } -async def test_default_setup( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: """Test all basic functionality of the rflink switch component.""" # setup mocking rflink module event_callback, create, protocol, _ = await mock_rflink( @@ -97,9 +93,7 @@ async def test_default_setup( assert protocol.send_command_ack.call_args_list[1][0][1] == "on" -async def test_group_alias( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: """Group aliases should only respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -129,9 +123,7 @@ async def test_group_alias( assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_nogroup_alias( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: """Non group aliases should not respond to group commands.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -164,9 +156,7 @@ async def test_nogroup_alias( assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_nogroup_device_id( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: """Device id that do not respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -194,9 +184,7 @@ async def test_nogroup_device_id( assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_device_defaults( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_device_defaults(hass: HomeAssistant, monkeypatch) -> None: """Event should fire if device_defaults config says so.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -228,9 +216,7 @@ async def test_device_defaults( assert calls[0].data == {"state": "off", "entity_id": f"{DOMAIN}.test"} -async def test_not_firing_default( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_not_firing_default(hass: HomeAssistant, monkeypatch) -> None: """By default no bus events should be fired.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -260,9 +246,7 @@ async def test_not_firing_default( assert not calls, "an event has been fired" -async def test_restore_state( - hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, diff --git a/tests/components/rflink/test_utils.py b/tests/components/rflink/test_utils.py index 38804d14ecc..170a05f8623 100644 --- a/tests/components/rflink/test_utils.py +++ b/tests/components/rflink/test_utils.py @@ -4,9 +4,10 @@ from homeassistant.components.rflink.utils import ( brightness_to_rflink, rflink_to_brightness, ) +from homeassistant.core import HomeAssistant -async def test_utils() -> None: +async def test_utils(hass: HomeAssistant, monkeypatch) -> None: """Test all utils methods.""" # test brightness_to_rflink assert brightness_to_rflink(0) == 0 diff --git a/tests/components/rfxtrx/conftest.py b/tests/components/rfxtrx/conftest.py index be5c72e6483..88450638d6c 100644 --- a/tests/components/rfxtrx/conftest.py +++ b/tests/components/rfxtrx/conftest.py @@ -2,9 +2,7 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine, Generator -from typing import Any -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import Mock, patch from freezegun import freeze_time import pytest @@ -69,7 +67,7 @@ async def setup_rfx_test_cfg( @pytest.fixture(autouse=True) -def transport_mock() -> Generator[Mock]: +async def transport_mock(hass): """Fixture that make sure all transports are fake.""" transport = Mock(spec=RFXtrxTransport) with ( @@ -80,14 +78,14 @@ def transport_mock() -> Generator[Mock]: @pytest.fixture(autouse=True) -def connect_mock() -> Generator[MagicMock]: +async def connect_mock(hass): """Fixture that make sure connect class is mocked.""" with patch("RFXtrx.Connect") as connect: yield connect @pytest.fixture(autouse=True, name="rfxtrx") -def rfxtrx_fixture(hass: HomeAssistant, connect_mock: MagicMock) -> Mock: +def rfxtrx_fixture(hass, connect_mock): """Fixture that cleans up threads from integration.""" rfx = Mock(spec=Connect) @@ -116,21 +114,19 @@ def rfxtrx_fixture(hass: HomeAssistant, connect_mock: MagicMock) -> Mock: @pytest.fixture(name="rfxtrx_automatic") -async def rfxtrx_automatic_fixture(hass: HomeAssistant, rfxtrx: Mock) -> Mock: +async def rfxtrx_automatic_fixture(hass, rfxtrx): """Fixture that starts up with automatic additions.""" await setup_rfx_test_cfg(hass, automatic_add=True, devices={}) return rfxtrx @pytest.fixture -def timestep( - hass: HomeAssistant, -) -> Generator[Callable[[int], Coroutine[Any, Any, None]]]: +async def timestep(hass): """Step system time forward.""" with freeze_time(utcnow()) as frozen_time: - async def delay(seconds: int) -> None: + async def delay(seconds): """Trigger delay in system.""" frozen_time.tick(delta=seconds) async_fire_time_changed(hass) diff --git a/tests/components/rfxtrx/test_config_flow.py b/tests/components/rfxtrx/test_config_flow.py index 1e23bdaf982..b61440c31b6 100644 --- a/tests/components/rfxtrx/test_config_flow.py +++ b/tests/components/rfxtrx/test_config_flow.py @@ -29,9 +29,7 @@ def com_port(): return port -async def start_options_flow( - hass: HomeAssistant, entry: MockConfigEntry -) -> config_entries.ConfigFlowResult: +async def start_options_flow(hass, entry): """Start the options flow with the entry under test.""" entry.add_to_hass(hass) diff --git a/tests/components/rfxtrx/test_device_action.py b/tests/components/rfxtrx/test_device_action.py index a3522934c57..c678f2dfc62 100644 --- a/tests/components/rfxtrx/test_device_action.py +++ b/tests/components/rfxtrx/test_device_action.py @@ -47,7 +47,7 @@ async def test_device_test_data(rfxtrx, device: DeviceTestData) -> None: } -async def setup_entry(hass: HomeAssistant, devices: dict[str, Any]) -> None: +async def setup_entry(hass, devices): """Construct a config setup.""" entry_data = create_rfx_test_cfg(devices=devices) mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data) @@ -79,10 +79,7 @@ def _get_expected_actions(data): ], ) async def test_get_actions( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - device: DeviceTestData, - expected, + hass: HomeAssistant, device_registry: dr.DeviceRegistry, device, expected ) -> None: """Test we get the expected actions from a rfxtrx.""" await setup_entry(hass, {device.code: {}}) @@ -139,7 +136,7 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, rfxtrx: RFXtrx.Connect, - device: DeviceTestData, + device, config, expected, ) -> None: diff --git a/tests/components/rfxtrx/test_device_trigger.py b/tests/components/rfxtrx/test_device_trigger.py index 9c56951761b..38f7cccc072 100644 --- a/tests/components/rfxtrx/test_device_trigger.py +++ b/tests/components/rfxtrx/test_device_trigger.py @@ -46,7 +46,7 @@ EVENT_FIREALARM_1 = EventTestData( ) -async def setup_entry(hass: HomeAssistant, devices: dict[str, Any]) -> None: +async def setup_entry(hass, devices): """Construct a config setup.""" entry_data = create_rfx_test_cfg(devices=devices) mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data) diff --git a/tests/components/ridwell/conftest.py b/tests/components/ridwell/conftest.py index 6ea9d91f8e9..32907ac8037 100644 --- a/tests/components/ridwell/conftest.py +++ b/tests/components/ridwell/conftest.py @@ -1,8 +1,6 @@ """Define test fixtures for Ridwell.""" -from collections.abc import Generator from datetime import date -from typing import Any from unittest.mock import AsyncMock, Mock, patch from aioridwell.model import EventState, RidwellPickup, RidwellPickupEvent @@ -10,7 +8,6 @@ import pytest from homeassistant.components.ridwell.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -22,7 +19,7 @@ TEST_USER_ID = "12345" @pytest.fixture(name="account") -def account_fixture() -> Mock: +def account_fixture(): """Define a Ridwell account.""" return Mock( account_id=TEST_ACCOUNT_ID, @@ -47,7 +44,7 @@ def account_fixture() -> Mock: @pytest.fixture(name="client") -def client_fixture(account: Mock) -> Mock: +def client_fixture(account): """Define an aioridwell client.""" return Mock( async_authenticate=AsyncMock(), @@ -58,9 +55,7 @@ def client_fixture(account: Mock) -> Mock: @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, config: dict[str, Any] -) -> MockConfigEntry: +def config_entry_fixture(hass, config): """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -73,7 +68,7 @@ def config_entry_fixture( @pytest.fixture(name="config") -def config_fixture() -> dict[str, Any]: +def config_fixture(hass): """Define a config entry data fixture.""" return { CONF_USERNAME: TEST_USERNAME, @@ -82,7 +77,7 @@ def config_fixture() -> dict[str, Any]: @pytest.fixture(name="mock_aioridwell") -def mock_aioridwell_fixture(client: Mock, config: dict[str, Any]) -> Generator[None]: +async def mock_aioridwell_fixture(hass, client, config): """Define a fixture to patch aioridwell.""" with ( patch( @@ -98,9 +93,7 @@ def mock_aioridwell_fixture(client: Mock, config: dict[str, Any]) -> Generator[N @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture( - hass: HomeAssistant, config_entry: MockConfigEntry, mock_aioridwell: None -) -> None: +async def setup_config_entry_fixture(hass, config_entry, mock_aioridwell): """Define a fixture to set up ridwell.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/ridwell/snapshots/test_diagnostics.ambr b/tests/components/ridwell/snapshots/test_diagnostics.ambr index b03d87c7a89..d32b1d3f446 100644 --- a/tests/components/ridwell/snapshots/test_diagnostics.ambr +++ b/tests/components/ridwell/snapshots/test_diagnostics.ambr @@ -34,8 +34,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'ridwell', 'entry_id': '11554ec901379b9cc8f5a6c1d11ce978', 'minor_version': 1, diff --git a/tests/components/ridwell/test_config_flow.py b/tests/components/ridwell/test_config_flow.py index 6dd00344c5b..601ac182670 100644 --- a/tests/components/ridwell/test_config_flow.py +++ b/tests/components/ridwell/test_config_flow.py @@ -13,8 +13,6 @@ from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_USERNAME -from tests.common import MockConfigEntry - @pytest.mark.parametrize( ("get_client_response", "errors"), @@ -67,10 +65,12 @@ async def test_duplicate_error(hass: HomeAssistant, config, setup_config_entry) async def test_step_reauth( - hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry + hass: HomeAssistant, config, config_entry, setup_config_entry ) -> None: """Test a full reauth flow.""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=config + ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_PASSWORD: "new_password"}, diff --git a/tests/components/ridwell/test_diagnostics.py b/tests/components/ridwell/test_diagnostics.py index 45683bba903..adfbb525283 100644 --- a/tests/components/ridwell/test_diagnostics.py +++ b/tests/components/ridwell/test_diagnostics.py @@ -1,7 +1,6 @@ """Test Ridwell diagnostics.""" from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -17,6 +16,7 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/ring/common.py b/tests/components/ring/common.py index 22fa1c2bf32..b129623aa95 100644 --- a/tests/components/ring/common.py +++ b/tests/components/ring/common.py @@ -2,36 +2,17 @@ from unittest.mock import patch -from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN from homeassistant.components.ring import DOMAIN -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -async def setup_platform(hass: HomeAssistant, platform: Platform) -> None: +async def setup_platform(hass, platform): """Set up the ring platform and prerequisites.""" - if not hass.config_entries.async_has_entries(DOMAIN): - MockConfigEntry( - domain=DOMAIN, data={"username": "foo", "token": {}} - ).add_to_hass(hass) + MockConfigEntry(domain=DOMAIN, data={"username": "foo", "token": {}}).add_to_hass( + hass + ) with patch("homeassistant.components.ring.PLATFORMS", [platform]): assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done(wait_background_tasks=True) - - -async def setup_automation(hass: HomeAssistant, alias: str, entity_id: str) -> None: - """Set up an automation for tests.""" - assert await async_setup_component( - hass, - AUTOMATION_DOMAIN, - { - AUTOMATION_DOMAIN: { - "alias": alias, - "trigger": {"platform": "state", "entity_id": entity_id, "to": "on"}, - "action": {"action": "notify.notify", "metadata": {}, "data": {}}, - } - }, - ) diff --git a/tests/components/ring/conftest.py b/tests/components/ring/conftest.py index 1296c2f58c5..58e77184f55 100644 --- a/tests/components/ring/conftest.py +++ b/tests/components/ring/conftest.py @@ -1,24 +1,21 @@ """Configuration for Ring tests.""" -from collections.abc import Generator from itertools import chain from unittest.mock import AsyncMock, Mock, create_autospec, patch import pytest import ring_doorbell +from typing_extensions import Generator from homeassistant.components.ring import DOMAIN -from homeassistant.components.ring.const import CONF_CONFIG_ENTRY_MINOR_VERSION -from homeassistant.const import CONF_DEVICE_ID, CONF_USERNAME +from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant -from .device_mocks import get_devices_data, get_mock_devices +from .device_mocks import get_active_alerts, get_devices_data, get_mock_devices from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 -MOCK_HARDWARE_ID = "foo-bar" - @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -29,23 +26,13 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -@pytest.fixture -def mock_ring_init_auth_class(): - """Mock ring_doorbell.Auth in init and return the mock class.""" - with patch("homeassistant.components.ring.Auth", autospec=True) as mock_ring_auth: - mock_ring_auth.return_value.async_fetch_token.return_value = { - "access_token": "mock-token" - } - yield mock_ring_auth - - @pytest.fixture def mock_ring_auth(): """Mock ring_doorbell.Auth.""" with patch( "homeassistant.components.ring.config_flow.Auth", autospec=True ) as mock_ring_auth: - mock_ring_auth.return_value.async_fetch_token.return_value = { + mock_ring_auth.return_value.fetch_token.return_value = { "access_token": "mock-token" } yield mock_ring_auth.return_value @@ -106,7 +93,7 @@ def mock_ring_client(mock_ring_auth, mock_ring_devices): mock_client = create_autospec(ring_doorbell.Ring) mock_client.return_value.devices_data = get_devices_data() mock_client.return_value.devices.return_value = mock_ring_devices - mock_client.return_value.active_alerts.return_value = [] + mock_client.return_value.active_alerts.side_effect = get_active_alerts with patch("homeassistant.components.ring.Ring", new=mock_client): yield mock_client.return_value @@ -119,13 +106,10 @@ def mock_config_entry() -> MockConfigEntry: title="Ring", domain=DOMAIN, data={ - CONF_DEVICE_ID: MOCK_HARDWARE_ID, CONF_USERNAME: "foo@bar.com", "token": {"access_token": "mock-token"}, }, unique_id="foo@bar.com", - version=1, - minor_version=CONF_CONFIG_ENTRY_MINOR_VERSION, ) @@ -141,14 +125,3 @@ async def mock_added_config_entry( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() return mock_config_entry - - -@pytest.fixture(autouse=True) -def mock_ring_event_listener_class(): - """Fixture to mock the ring event listener.""" - - with patch( - "homeassistant.components.ring.coordinator.RingEventListener", autospec=True - ) as mock_ring_listener: - mock_ring_listener.return_value.started = True - yield mock_ring_listener diff --git a/tests/components/ring/device_mocks.py b/tests/components/ring/device_mocks.py index a1833aaa8bd..f43370c918d 100644 --- a/tests/components/ring/device_mocks.py +++ b/tests/components/ring/device_mocks.py @@ -7,9 +7,10 @@ Each device entry in the devices.json will have a MagicMock instead of the RingO Mocks the api calls on the devices such as history() and health(). """ +from copy import deepcopy from datetime import datetime -from functools import partial -from unittest.mock import AsyncMock, MagicMock +from time import time +from unittest.mock import MagicMock from ring_doorbell import ( RingCapability, @@ -18,7 +19,6 @@ from ring_doorbell import ( RingOther, RingStickUpCam, ) -from ring_doorbell.const import DOORBELL_EXISTING_TYPE from homeassistant.components.ring.const import DOMAIN from homeassistant.util import dt as dt_util @@ -30,12 +30,7 @@ DOORBOT_HISTORY = load_json_value_fixture("doorbot_history.json", DOMAIN) INTERCOM_HISTORY = load_json_value_fixture("intercom_history.json", DOMAIN) DOORBOT_HEALTH = load_json_value_fixture("doorbot_health_attrs.json", DOMAIN) CHIME_HEALTH = load_json_value_fixture("chime_health_attrs.json", DOMAIN) - -FRONT_DOOR_DEVICE_ID = 987654 -INGRESS_DEVICE_ID = 185036587 -FRONT_DEVICE_ID = 765432 -INTERNAL_DEVICE_ID = 345678 -DOWNSTAIRS_DEVICE_ID = 123456 +DEVICE_ALERTS = load_json_value_fixture("ding_active.json", DOMAIN) def get_mock_devices(): @@ -59,6 +54,14 @@ def get_devices_data(): } +def get_active_alerts(): + """Return active alerts set to now.""" + dings_fixture = deepcopy(DEVICE_ALERTS) + for ding in dings_fixture: + ding["now"] = time() + return dings_fixture + + DEVICE_TYPES = { "doorbots": RingDoorBell, "authorized_doorbots": RingDoorBell, @@ -73,7 +76,6 @@ DEVICE_CAPABILITIES = { RingCapability.VOLUME, RingCapability.MOTION_DETECTION, RingCapability.VIDEO, - RingCapability.DING, RingCapability.HISTORY, ], RingStickUpCam: [ @@ -86,7 +88,7 @@ DEVICE_CAPABILITIES = { RingCapability.LIGHT, ], RingChime: [RingCapability.VOLUME], - RingOther: [RingCapability.OPEN, RingCapability.HISTORY, RingCapability.DING], + RingOther: [RingCapability.OPEN, RingCapability.HISTORY], } @@ -130,26 +132,20 @@ def _mocked_ring_device(device_dict, device_family, device_class, capabilities): # Configure common methods mock_device.has_capability.side_effect = has_capability - mock_device.async_update_health_data.side_effect = lambda: update_health_data( + mock_device.update_health_data.side_effect = lambda: update_health_data( DOORBOT_HEALTH if device_family != "chimes" else CHIME_HEALTH ) # Configure methods based on capability if has_capability(RingCapability.HISTORY): mock_device.configure_mock(last_history=[]) - mock_device.async_history.side_effect = lambda *_, **__: update_history_data( + mock_device.history.side_effect = lambda *_, **__: update_history_data( DOORBOT_HISTORY if device_family != "other" else INTERCOM_HISTORY ) - if has_capability(RingCapability.VIDEO): - mock_device.async_recording_url = AsyncMock(return_value="http://dummy.url") - if has_capability(RingCapability.MOTION_DETECTION): mock_device.configure_mock( motion_detection=device_dict["settings"].get("motion_detection_enabled"), ) - mock_device.async_set_motion_detection.side_effect = ( - lambda i: mock_device.configure_mock(motion_detection=i) - ) if has_capability(RingCapability.LIGHT): mock_device.configure_mock(lights=device_dict.get("led_status")) @@ -160,17 +156,11 @@ def _mocked_ring_device(device_dict, device_family, device_class, capabilities): "doorbell_volume", device_dict["settings"].get("volume") ) ) - mock_device.async_set_volume.side_effect = lambda i: mock_device.configure_mock( - volume=i - ) if has_capability(RingCapability.SIREN): mock_device.configure_mock( siren=device_dict["siren_status"].get("seconds_remaining") ) - mock_device.async_set_siren.side_effect = lambda i: mock_device.configure_mock( - siren=i - ) if has_capability(RingCapability.BATTERY): mock_device.configure_mock( @@ -179,30 +169,11 @@ def _mocked_ring_device(device_dict, device_family, device_class, capabilities): ) ) - if device_family == "doorbots": - mock_device.configure_mock( - existing_doorbell_type=DOORBELL_EXISTING_TYPE[ - device_dict["settings"]["chime_settings"].get("type", 2) - ] - ) - mock_device.configure_mock( - existing_doorbell_type_enabled=device_dict["settings"][ - "chime_settings" - ].get("enable", False) - ) - mock_device.async_set_existing_doorbell_type_enabled.side_effect = ( - lambda i: mock_device.configure_mock(existing_doorbell_type_enabled=i) - ) - if device_family == "other": - for prop in ("doorbell_volume", "mic_volume", "voice_volume"): - mock_device.configure_mock( - **{ - prop: device_dict["settings"].get(prop), - f"async_set_{prop}.side_effect": partial( - setattr, mock_device, prop - ), - } - ) + mock_device.configure_mock( + doorbell_volume=device_dict["settings"].get("doorbell_volume"), + mic_volume=device_dict["settings"].get("mic_volume"), + voice_volume=device_dict["settings"].get("voice_volume"), + ) return mock_device diff --git a/tests/components/ring/snapshots/test_binary_sensor.ambr b/tests/components/ring/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 2f8e4d8a219..00000000000 --- a/tests/components/ring/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,241 +0,0 @@ -# serializer version: 1 -# name: test_states[binary_sensor.front_door_ding-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.front_door_ding', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ding', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ding', - 'unique_id': '987654-ding', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[binary_sensor.front_door_ding-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'occupancy', - 'friendly_name': 'Front Door Ding', - }), - 'context': , - 'entity_id': 'binary_sensor.front_door_ding', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_states[binary_sensor.front_door_motion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.front_door_motion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'motion', - 'unique_id': '987654-motion', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[binary_sensor.front_door_motion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'motion', - 'friendly_name': 'Front Door Motion', - }), - 'context': , - 'entity_id': 'binary_sensor.front_door_motion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_states[binary_sensor.front_motion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.front_motion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'motion', - 'unique_id': '765432-motion', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[binary_sensor.front_motion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'motion', - 'friendly_name': 'Front Motion', - }), - 'context': , - 'entity_id': 'binary_sensor.front_motion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_states[binary_sensor.ingress_ding-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.ingress_ding', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ding', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ding', - 'unique_id': '185036587-ding', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[binary_sensor.ingress_ding-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'occupancy', - 'friendly_name': 'Ingress Ding', - }), - 'context': , - 'entity_id': 'binary_sensor.ingress_ding', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_states[binary_sensor.internal_motion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.internal_motion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'motion', - 'unique_id': '345678-motion', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[binary_sensor.internal_motion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'motion', - 'friendly_name': 'Internal Motion', - }), - 'context': , - 'entity_id': 'binary_sensor.internal_motion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/ring/snapshots/test_button.ambr b/tests/components/ring/snapshots/test_button.ambr deleted file mode 100644 index 01f6525450b..00000000000 --- a/tests/components/ring/snapshots/test_button.ambr +++ /dev/null @@ -1,48 +0,0 @@ -# serializer version: 1 -# name: test_states[button.ingress_open_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.ingress_open_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Open door', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'open_door', - 'unique_id': '185036587-open_door', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[button.ingress_open_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Open door', - }), - 'context': , - 'entity_id': 'button.ingress_open_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/ring/snapshots/test_camera.ambr b/tests/components/ring/snapshots/test_camera.ambr deleted file mode 100644 index 4347f302c72..00000000000 --- a/tests/components/ring/snapshots/test_camera.ambr +++ /dev/null @@ -1,159 +0,0 @@ -# serializer version: 1 -# name: test_states[camera.front-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'camera', - 'entity_category': None, - 'entity_id': 'camera.front', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '765432', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[camera.front-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'access_token': '1caab5c3b3', - 'attribution': 'Data provided by Ring.com', - 'entity_picture': '/api/camera_proxy/camera.front?token=1caab5c3b3', - 'friendly_name': 'Front', - 'last_video_id': None, - 'supported_features': , - 'video_url': None, - }), - 'context': , - 'entity_id': 'camera.front', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- -# name: test_states[camera.front_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'camera', - 'entity_category': None, - 'entity_id': 'camera.front_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '987654', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[camera.front_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'access_token': '1caab5c3b3', - 'attribution': 'Data provided by Ring.com', - 'entity_picture': '/api/camera_proxy/camera.front_door?token=1caab5c3b3', - 'friendly_name': 'Front Door', - 'last_video_id': None, - 'motion_detection': True, - 'supported_features': , - 'video_url': None, - }), - 'context': , - 'entity_id': 'camera.front_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- -# name: test_states[camera.internal-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'camera', - 'entity_category': None, - 'entity_id': 'camera.internal', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '345678', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[camera.internal-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'access_token': '1caab5c3b3', - 'attribution': 'Data provided by Ring.com', - 'entity_picture': '/api/camera_proxy/camera.internal?token=1caab5c3b3', - 'friendly_name': 'Internal', - 'last_video_id': None, - 'motion_detection': True, - 'supported_features': , - 'video_url': None, - }), - 'context': , - 'entity_id': 'camera.internal', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- diff --git a/tests/components/ring/snapshots/test_event.ambr b/tests/components/ring/snapshots/test_event.ambr deleted file mode 100644 index e97a01516bb..00000000000 --- a/tests/components/ring/snapshots/test_event.ambr +++ /dev/null @@ -1,337 +0,0 @@ -# serializer version: 1 -# name: test_states[event.front_door_ding-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'ding', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.front_door_ding', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ding', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ding', - 'unique_id': '987654-ding', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[event.front_door_ding-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'doorbell', - 'event_type': None, - 'event_types': list([ - 'ding', - ]), - 'friendly_name': 'Front Door Ding', - }), - 'context': , - 'entity_id': 'event.front_door_ding', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[event.front_door_motion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'motion', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.front_door_motion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'motion', - 'unique_id': '987654-motion', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[event.front_door_motion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'motion', - 'event_type': None, - 'event_types': list([ - 'motion', - ]), - 'friendly_name': 'Front Door Motion', - }), - 'context': , - 'entity_id': 'event.front_door_motion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[event.front_motion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'motion', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.front_motion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'motion', - 'unique_id': '765432-motion', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[event.front_motion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'motion', - 'event_type': None, - 'event_types': list([ - 'motion', - ]), - 'friendly_name': 'Front Motion', - }), - 'context': , - 'entity_id': 'event.front_motion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[event.ingress_ding-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'ding', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.ingress_ding', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ding', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ding', - 'unique_id': '185036587-ding', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[event.ingress_ding-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'doorbell', - 'event_type': None, - 'event_types': list([ - 'ding', - ]), - 'friendly_name': 'Ingress Ding', - }), - 'context': , - 'entity_id': 'event.ingress_ding', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[event.ingress_intercom_unlock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'intercom_unlock', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.ingress_intercom_unlock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Intercom unlock', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'intercom_unlock', - 'unique_id': '185036587-intercom_unlock', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[event.ingress_intercom_unlock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'button', - 'event_type': None, - 'event_types': list([ - 'intercom_unlock', - ]), - 'friendly_name': 'Ingress Intercom unlock', - }), - 'context': , - 'entity_id': 'event.ingress_intercom_unlock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[event.internal_motion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'event_types': list([ - 'motion', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'event', - 'entity_category': None, - 'entity_id': 'event.internal_motion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'motion', - 'unique_id': '345678-motion', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[event.internal_motion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'motion', - 'event_type': None, - 'event_types': list([ - 'motion', - ]), - 'friendly_name': 'Internal Motion', - }), - 'context': , - 'entity_id': 'event.internal_motion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/ring/snapshots/test_light.ambr b/tests/components/ring/snapshots/test_light.ambr deleted file mode 100644 index 73874fda259..00000000000 --- a/tests/components/ring/snapshots/test_light.ambr +++ /dev/null @@ -1,113 +0,0 @@ -# serializer version: 1 -# name: test_states[light.front_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.front_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light', - 'unique_id': '765432', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[light.front_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'color_mode': None, - 'friendly_name': 'Front Light', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.front_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_states[light.internal_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.internal_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light', - 'unique_id': '345678', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[light.internal_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'color_mode': , - 'friendly_name': 'Internal Light', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.internal_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/ring/snapshots/test_number.ambr b/tests/components/ring/snapshots/test_number.ambr deleted file mode 100644 index 0873319b837..00000000000 --- a/tests/components/ring/snapshots/test_number.ambr +++ /dev/null @@ -1,393 +0,0 @@ -# serializer version: 1 -# name: test_states[number.downstairs_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.downstairs_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '123456-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.downstairs_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Downstairs Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.downstairs_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_states[number.front_door_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_door_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '987654-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_door_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Door Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_door_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.0', - }) -# --- -# name: test_states[number.front_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '765432-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_doorbell_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_doorbell_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Doorbell volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doorbell_volume', - 'unique_id': '185036587-doorbell_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_doorbell_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Doorbell volume', - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_doorbell_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.0', - }) -# --- -# name: test_states[number.ingress_mic_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_mic_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mic volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mic_volume', - 'unique_id': '185036587-mic_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_mic_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Mic volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_mic_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_voice_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_voice_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Voice volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voice_volume', - 'unique_id': '185036587-voice_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_voice_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Voice volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_voice_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.internal_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.internal_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '345678-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.internal_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Internal Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.internal_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- diff --git a/tests/components/ring/snapshots/test_sensor.ambr b/tests/components/ring/snapshots/test_sensor.ambr deleted file mode 100644 index 9fd1ac7ba84..00000000000 --- a/tests/components/ring/snapshots/test_sensor.ambr +++ /dev/null @@ -1,1116 +0,0 @@ -# serializer version: 1 -# name: test_states[sensor.downstairs_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.downstairs_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '123456-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.downstairs_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Downstairs Volume', - }), - 'context': , - 'entity_id': 'sensor.downstairs_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_states[sensor.downstairs_wifi_signal_category-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.downstairs_wifi_signal_category', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Wi-Fi signal category', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_category', - 'unique_id': '123456-wifi_signal_category', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.downstairs_wifi_signal_category-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Downstairs Wi-Fi signal category', - }), - 'context': , - 'entity_id': 'sensor.downstairs_wifi_signal_category', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.downstairs_wifi_signal_strength-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.downstairs_wifi_signal_strength', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Wi-Fi signal strength', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_strength', - 'unique_id': '123456-wifi_signal_strength', - 'unit_of_measurement': 'dBm', - }) -# --- -# name: test_states[sensor.downstairs_wifi_signal_strength-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'signal_strength', - 'friendly_name': 'Downstairs Wi-Fi signal strength', - 'unit_of_measurement': 'dBm', - }), - 'context': , - 'entity_id': 'sensor.downstairs_wifi_signal_strength', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.front_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.front_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '765432-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_states[sensor.front_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'battery', - 'friendly_name': 'Front Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.front_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80', - }) -# --- -# name: test_states[sensor.front_door_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.front_door_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '987654-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_states[sensor.front_door_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'battery', - 'friendly_name': 'Front Door Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.front_door_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_states[sensor.front_door_last_activity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.front_door_last_activity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last activity', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_activity', - 'unique_id': '987654-last_activity', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.front_door_last_activity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'timestamp', - 'friendly_name': 'Front Door Last activity', - }), - 'context': , - 'entity_id': 'sensor.front_door_last_activity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.front_door_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.front_door_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '765432-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.front_door_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Volume', - }), - 'context': , - 'entity_id': 'sensor.front_door_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11', - }) -# --- -# name: test_states[sensor.front_door_wifi_signal_category-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.front_door_wifi_signal_category', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Wi-Fi signal category', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_category', - 'unique_id': '987654-wifi_signal_category', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.front_door_wifi_signal_category-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Door Wi-Fi signal category', - }), - 'context': , - 'entity_id': 'sensor.front_door_wifi_signal_category', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.front_door_wifi_signal_strength-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.front_door_wifi_signal_strength', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Wi-Fi signal strength', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_strength', - 'unique_id': '987654-wifi_signal_strength', - 'unit_of_measurement': 'dBm', - }) -# --- -# name: test_states[sensor.front_door_wifi_signal_strength-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'signal_strength', - 'friendly_name': 'Front Door Wi-Fi signal strength', - 'unit_of_measurement': 'dBm', - }), - 'context': , - 'entity_id': 'sensor.front_door_wifi_signal_strength', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.front_last_activity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.front_last_activity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last activity', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_activity', - 'unique_id': '765432-last_activity', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.front_last_activity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'timestamp', - 'friendly_name': 'Front Last activity', - }), - 'context': , - 'entity_id': 'sensor.front_last_activity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.front_wifi_signal_category-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.front_wifi_signal_category', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Wi-Fi signal category', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_category', - 'unique_id': '765432-wifi_signal_category', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.front_wifi_signal_category-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Wi-Fi signal category', - }), - 'context': , - 'entity_id': 'sensor.front_wifi_signal_category', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.front_wifi_signal_strength-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.front_wifi_signal_strength', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Wi-Fi signal strength', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_strength', - 'unique_id': '765432-wifi_signal_strength', - 'unit_of_measurement': 'dBm', - }) -# --- -# name: test_states[sensor.front_wifi_signal_strength-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'signal_strength', - 'friendly_name': 'Front Wi-Fi signal strength', - 'unit_of_measurement': 'dBm', - }), - 'context': , - 'entity_id': 'sensor.front_wifi_signal_strength', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.ingress_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.ingress_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '185036587-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_states[sensor.ingress_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'battery', - 'friendly_name': 'Ingress Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.ingress_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '52', - }) -# --- -# name: test_states[sensor.ingress_doorbell_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ingress_doorbell_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Doorbell volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doorbell_volume', - 'unique_id': '185036587-doorbell_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.ingress_doorbell_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Doorbell volume', - }), - 'context': , - 'entity_id': 'sensor.ingress_doorbell_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8', - }) -# --- -# name: test_states[sensor.ingress_last_activity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ingress_last_activity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last activity', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_activity', - 'unique_id': '185036587-last_activity', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.ingress_last_activity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'timestamp', - 'friendly_name': 'Ingress Last activity', - }), - 'context': , - 'entity_id': 'sensor.ingress_last_activity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.ingress_mic_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ingress_mic_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mic volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mic_volume', - 'unique_id': '185036587-mic_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.ingress_mic_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Mic volume', - }), - 'context': , - 'entity_id': 'sensor.ingress_mic_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11', - }) -# --- -# name: test_states[sensor.ingress_voice_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ingress_voice_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Voice volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voice_volume', - 'unique_id': '185036587-voice_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.ingress_voice_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Voice volume', - }), - 'context': , - 'entity_id': 'sensor.ingress_voice_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11', - }) -# --- -# name: test_states[sensor.ingress_wifi_signal_category-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.ingress_wifi_signal_category', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Wi-Fi signal category', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_category', - 'unique_id': '185036587-wifi_signal_category', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.ingress_wifi_signal_category-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Wi-Fi signal category', - }), - 'context': , - 'entity_id': 'sensor.ingress_wifi_signal_category', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.ingress_wifi_signal_strength-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.ingress_wifi_signal_strength', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Wi-Fi signal strength', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_strength', - 'unique_id': '185036587-wifi_signal_strength', - 'unit_of_measurement': 'dBm', - }) -# --- -# name: test_states[sensor.ingress_wifi_signal_strength-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'signal_strength', - 'friendly_name': 'Ingress Wi-Fi signal strength', - 'unit_of_measurement': 'dBm', - }), - 'context': , - 'entity_id': 'sensor.ingress_wifi_signal_strength', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.internal_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.internal_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '345678-battery', - 'unit_of_measurement': '%', - }) -# --- -# name: test_states[sensor.internal_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'battery', - 'friendly_name': 'Internal Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.internal_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80', - }) -# --- -# name: test_states[sensor.internal_last_activity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.internal_last_activity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last activity', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_activity', - 'unique_id': '345678-last_activity', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.internal_last_activity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'timestamp', - 'friendly_name': 'Internal Last activity', - }), - 'context': , - 'entity_id': 'sensor.internal_last_activity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.internal_wifi_signal_category-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.internal_wifi_signal_category', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Wi-Fi signal category', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_category', - 'unique_id': '345678-wifi_signal_category', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.internal_wifi_signal_category-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Internal Wi-Fi signal category', - }), - 'context': , - 'entity_id': 'sensor.internal_wifi_signal_category', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[sensor.internal_wifi_signal_strength-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.internal_wifi_signal_strength', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Wi-Fi signal strength', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_strength', - 'unique_id': '345678-wifi_signal_strength', - 'unit_of_measurement': 'dBm', - }) -# --- -# name: test_states[sensor.internal_wifi_signal_strength-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'device_class': 'signal_strength', - 'friendly_name': 'Internal Wi-Fi signal strength', - 'unit_of_measurement': 'dBm', - }), - 'context': , - 'entity_id': 'sensor.internal_wifi_signal_strength', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/ring/snapshots/test_siren.ambr b/tests/components/ring/snapshots/test_siren.ambr deleted file mode 100644 index c49ab2cb30f..00000000000 --- a/tests/components/ring/snapshots/test_siren.ambr +++ /dev/null @@ -1,154 +0,0 @@ -# serializer version: 1 -# name: test_states[siren.downstairs_siren-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'available_tones': list([ - 'ding', - 'motion', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'siren', - 'entity_category': None, - 'entity_id': 'siren.downstairs_siren', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Siren', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'siren', - 'unique_id': '123456-siren', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[siren.downstairs_siren-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'available_tones': list([ - 'ding', - 'motion', - ]), - 'friendly_name': 'Downstairs Siren', - 'supported_features': , - }), - 'context': , - 'entity_id': 'siren.downstairs_siren', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_states[siren.front_siren-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'siren', - 'entity_category': None, - 'entity_id': 'siren.front_siren', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Siren', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'siren', - 'unique_id': '765432', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[siren.front_siren-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Siren', - 'supported_features': , - }), - 'context': , - 'entity_id': 'siren.front_siren', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_states[siren.internal_siren-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'siren', - 'entity_category': None, - 'entity_id': 'siren.internal_siren', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Siren', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'siren', - 'unique_id': '345678', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[siren.internal_siren-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Internal Siren', - 'supported_features': , - }), - 'context': , - 'entity_id': 'siren.internal_siren', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/ring/snapshots/test_switch.ambr b/tests/components/ring/snapshots/test_switch.ambr deleted file mode 100644 index 57c27cfedfa..00000000000 --- a/tests/components/ring/snapshots/test_switch.ambr +++ /dev/null @@ -1,283 +0,0 @@ -# serializer version: 1 -# name: test_states[switch.front_door_in_home_chime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.front_door_in_home_chime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'In-home chime', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'in_home_chime', - 'unique_id': '987654-in_home_chime', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[switch.front_door_in_home_chime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Door In-home chime', - }), - 'context': , - 'entity_id': 'switch.front_door_in_home_chime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_states[switch.front_door_motion_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.front_door_motion_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Motion detection', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'motion_detection', - 'unique_id': '987654-motion_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[switch.front_door_motion_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Door Motion detection', - }), - 'context': , - 'entity_id': 'switch.front_door_motion_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_states[switch.front_motion_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.front_motion_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Motion detection', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'motion_detection', - 'unique_id': '765432-motion_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[switch.front_motion_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Motion detection', - }), - 'context': , - 'entity_id': 'switch.front_motion_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_states[switch.front_siren-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.front_siren', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Siren', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'siren', - 'unique_id': '765432-siren', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[switch.front_siren-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Siren', - }), - 'context': , - 'entity_id': 'switch.front_siren', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_states[switch.internal_motion_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.internal_motion_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Motion detection', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'motion_detection', - 'unique_id': '345678-motion_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[switch.internal_motion_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Internal Motion detection', - }), - 'context': , - 'entity_id': 'switch.internal_motion_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_states[switch.internal_siren-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.internal_siren', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Siren', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'siren', - 'unique_id': '345678-siren', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[switch.internal_siren-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Internal Siren', - }), - 'context': , - 'entity_id': 'switch.internal_siren', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/ring/test_binary_sensor.py b/tests/components/ring/test_binary_sensor.py index 81d7d6e6687..16bc6e872c1 100644 --- a/tests/components/ring/test_binary_sensor.py +++ b/tests/components/ring/test_binary_sensor.py @@ -1,243 +1,24 @@ """The tests for the Ring binary sensor platform.""" -import time -from unittest.mock import Mock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from ring_doorbell import Ring -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.components.ring.binary_sensor import RingEvent -from homeassistant.components.ring.const import DOMAIN -from homeassistant.components.ring.coordinator import RingEventListener -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, issue_registry as ir -from homeassistant.setup import async_setup_component -from .common import MockConfigEntry, setup_automation, setup_platform -from .device_mocks import ( - FRONT_DEVICE_ID, - FRONT_DOOR_DEVICE_ID, - INGRESS_DEVICE_ID, - INTERNAL_DEVICE_ID, -) - -from tests.common import async_fire_time_changed, snapshot_platform +from .common import setup_platform -@pytest.fixture -def create_deprecated_binary_sensor_entities( - hass: HomeAssistant, - mock_config_entry: ConfigEntry, - entity_registry: er.EntityRegistry, -): - """Create the entity so it is not ignored by the deprecation check.""" - mock_config_entry.add_to_hass(hass) - - def create_entry(device_name, device_id, key): - unique_id = f"{device_id}-{key}" - - entity_registry.async_get_or_create( - domain=BINARY_SENSOR_DOMAIN, - platform=DOMAIN, - unique_id=unique_id, - suggested_object_id=f"{device_name}_{key}", - config_entry=mock_config_entry, - ) - - create_entry("front", FRONT_DEVICE_ID, "motion") - create_entry("front_door", FRONT_DOOR_DEVICE_ID, "motion") - create_entry("internal", INTERNAL_DEVICE_ID, "motion") - - create_entry("ingress", INGRESS_DEVICE_ID, "ding") - create_entry("front_door", FRONT_DOOR_DEVICE_ID, "ding") - - -async def test_states( - hass: HomeAssistant, - mock_ring_client: Mock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - create_deprecated_binary_sensor_entities, -) -> None: - """Test states.""" - await setup_platform(hass, Platform.BINARY_SENSOR) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize( - ("device_id", "device_name", "alert_kind", "device_class"), - [ - pytest.param( - FRONT_DOOR_DEVICE_ID, - "front_door", - "motion", - "motion", - id="front_door_motion", - ), - pytest.param( - FRONT_DOOR_DEVICE_ID, - "front_door", - "ding", - "occupancy", - id="front_door_ding", - ), - pytest.param( - INGRESS_DEVICE_ID, "ingress", "ding", "occupancy", id="ingress_ding" - ), - ], -) -async def test_binary_sensor( - hass: HomeAssistant, - mock_config_entry: ConfigEntry, - mock_ring_client: Ring, - mock_ring_event_listener_class: RingEventListener, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, - device_id: int, - device_name: str, - alert_kind: str, - device_class: str, -) -> None: +async def test_binary_sensor(hass: HomeAssistant, mock_ring_client) -> None: """Test the Ring binary sensors.""" - # Create the entity so it is not ignored by the deprecation check - mock_config_entry.add_to_hass(hass) + await setup_platform(hass, Platform.BINARY_SENSOR) - entity_id = f"binary_sensor.{device_name}_{alert_kind}" - unique_id = f"{device_id}-{alert_kind}" + motion_state = hass.states.get("binary_sensor.front_door_motion") + assert motion_state is not None + assert motion_state.state == "on" + assert motion_state.attributes["device_class"] == "motion" - entity_registry.async_get_or_create( - domain=BINARY_SENSOR_DOMAIN, - platform=DOMAIN, - unique_id=unique_id, - suggested_object_id=f"{device_name}_{alert_kind}", - config_entry=mock_config_entry, - ) - with patch("homeassistant.components.ring.PLATFORMS", [Platform.BINARY_SENSOR]): - assert await async_setup_component(hass, DOMAIN, {}) + front_ding_state = hass.states.get("binary_sensor.front_door_ding") + assert front_ding_state is not None + assert front_ding_state.state == "off" - on_event_cb = mock_ring_event_listener_class.return_value.add_notification_callback.call_args.args[ - 0 - ] - - # Default state is set to off - - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_OFF - assert state.attributes["device_class"] == device_class - - # A new alert sets to on - event = RingEvent( - 1234546, device_id, "Foo", "Bar", time.time(), 180, kind=alert_kind, state=None - ) - mock_ring_client.active_alerts.return_value = [event] - on_event_cb(event) - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_ON - - # Test that another event resets the expiry callback - freezer.tick(60) - async_fire_time_changed(hass) - await hass.async_block_till_done() - event = RingEvent( - 1234546, device_id, "Foo", "Bar", time.time(), 180, kind=alert_kind, state=None - ) - mock_ring_client.active_alerts.return_value = [event] - on_event_cb(event) - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_ON - - freezer.tick(120) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_ON - - # Test the second alert has expired - freezer.tick(60) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_OFF - - -async def test_binary_sensor_not_exists_with_deprecation( - hass: HomeAssistant, - mock_config_entry: ConfigEntry, - mock_ring_client: Ring, - entity_registry: er.EntityRegistry, -) -> None: - """Test the deprecated Ring binary sensors are deleted or raise issues.""" - mock_config_entry.add_to_hass(hass) - - entity_id = "binary_sensor.front_door_motion" - - assert not hass.states.get(entity_id) - with patch("homeassistant.components.ring.PLATFORMS", [Platform.BINARY_SENSOR]): - assert await async_setup_component(hass, DOMAIN, {}) - - assert not entity_registry.async_get(entity_id) - assert not er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id - ) - assert not hass.states.get(entity_id) - - -@pytest.mark.parametrize( - ("entity_disabled", "entity_has_automations"), - [ - pytest.param(False, False, id="without-automations"), - pytest.param(False, True, id="with-automations"), - pytest.param(True, False, id="disabled"), - ], -) -async def test_binary_sensor_exists_with_deprecation( - hass: HomeAssistant, - mock_config_entry: ConfigEntry, - mock_ring_client: Ring, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, - entity_disabled: bool, - entity_has_automations: bool, -) -> None: - """Test the deprecated Ring binary sensors are deleted or raise issues.""" - mock_config_entry.add_to_hass(hass) - - entity_id = "binary_sensor.front_door_motion" - unique_id = f"{FRONT_DOOR_DEVICE_ID}-motion" - issue_id = f"deprecated_entity_{entity_id}_automation.test_automation" - - if entity_has_automations: - await setup_automation(hass, "test_automation", entity_id) - - entity = entity_registry.async_get_or_create( - domain=BINARY_SENSOR_DOMAIN, - platform=DOMAIN, - unique_id=unique_id, - suggested_object_id="front_door_motion", - config_entry=mock_config_entry, - disabled_by=er.RegistryEntryDisabler.USER if entity_disabled else None, - ) - assert entity.entity_id == entity_id - assert not hass.states.get(entity_id) - with patch("homeassistant.components.ring.PLATFORMS", [Platform.BINARY_SENSOR]): - assert await async_setup_component(hass, DOMAIN, {}) - - entity = entity_registry.async_get(entity_id) - # entity and state will be none if removed from registry - assert (entity is None) == entity_disabled - assert (hass.states.get(entity_id) is None) == entity_disabled - - assert ( - issue_registry.async_get_issue(DOMAIN, issue_id) is not None - ) == entity_has_automations + ingress_ding_state = hass.states.get("binary_sensor.ingress_ding") + assert ingress_ding_state is not None + assert ingress_ding_state.state == "off" diff --git a/tests/components/ring/test_button.py b/tests/components/ring/test_button.py index ada02f206f5..6fef3295159 100644 --- a/tests/components/ring/test_button.py +++ b/tests/components/ring/test_button.py @@ -1,29 +1,22 @@ """The tests for the Ring button platform.""" -from unittest.mock import Mock - -from syrupy.assertion import SnapshotAssertion - from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import MockConfigEntry, setup_platform - -from tests.common import snapshot_platform +from .common import setup_platform -async def test_states( +async def test_entity_registry( hass: HomeAssistant, - mock_ring_client: Mock, - mock_config_entry: MockConfigEntry, + mock_ring_client, entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, ) -> None: - """Test states.""" - mock_config_entry.add_to_hass(hass) + """Tests that the devices are registered in the entity registry.""" await setup_platform(hass, Platform.BUTTON) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + entry = entity_registry.async_get("button.ingress_open_door") + assert entry.unique_id == "185036587-open_door" async def test_button_opens_door( @@ -35,11 +28,11 @@ async def test_button_opens_door( await setup_platform(hass, Platform.BUTTON) mock_intercom = mock_ring_devices.get_device(185036587) - mock_intercom.async_open_door.assert_not_called() + mock_intercom.open_door.assert_not_called() await hass.services.async_call( "button", "press", {"entity_id": "button.ingress_open_door"}, blocking=True ) await hass.async_block_till_done(wait_background_tasks=True) - mock_intercom.async_open_door.assert_called_once() + mock_intercom.open_door.assert_called_once() diff --git a/tests/components/ring/test_camera.py b/tests/components/ring/test_camera.py index 94ddc335dac..20a9ed5f0c9 100644 --- a/tests/components/ring/test_camera.py +++ b/tests/components/ring/test_camera.py @@ -1,48 +1,32 @@ """The tests for the Ring switch platform.""" -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import PropertyMock -from aiohttp.test_utils import make_mocked_request -from freezegun.api import FrozenDateTimeFactory import pytest import ring_doorbell -from syrupy.assertion import SnapshotAssertion -from homeassistant.components import camera -from homeassistant.components.ring.camera import FORCE_REFRESH_INTERVAL -from homeassistant.components.ring.const import SCAN_INTERVAL from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from homeassistant.util.aiohttp import MockStreamReader -from .common import MockConfigEntry, setup_platform - -from tests.common import async_fire_time_changed, snapshot_platform - -SMALLEST_VALID_JPEG = ( - "ffd8ffe000104a46494600010101004800480000ffdb00430003020202020203020202030303030406040404040408060" - "6050609080a0a090809090a0c0f0c0a0b0e0b09090d110d0e0f101011100a0c12131210130f101010ffc9000b08000100" - "0101011100ffcc000600101005ffda0008010100003f00d2cf20ffd9" -) -SMALLEST_VALID_JPEG_BYTES = bytes.fromhex(SMALLEST_VALID_JPEG) +from .common import setup_platform -async def test_states( +async def test_entity_registry( hass: HomeAssistant, - mock_ring_client: Mock, - mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, + mock_ring_client, ) -> None: - """Test states.""" - mock_config_entry.add_to_hass(hass) - # Patch getrandbits so the access_token doesn't change on camera attributes - with patch("random.SystemRandom.getrandbits", return_value=123123123123): - await setup_platform(hass, Platform.CAMERA) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + """Tests that the devices are registered in the entity registry.""" + await setup_platform(hass, Platform.CAMERA) + + entry = entity_registry.async_get("camera.front") + assert entry.unique_id == "765432" + + entry = entity_registry.async_get("camera.internal") + assert entry.unique_id == "345678" @pytest.mark.parametrize( @@ -68,7 +52,7 @@ async def test_camera_motion_detection_state_reports_correctly( assert state.attributes.get("friendly_name") == friendly_name -async def test_camera_motion_detection_can_be_turned_on_and_off( +async def test_camera_motion_detection_can_be_turned_on( hass: HomeAssistant, mock_ring_client ) -> None: """Tests the siren turns on correctly.""" @@ -89,54 +73,24 @@ async def test_camera_motion_detection_can_be_turned_on_and_off( state = hass.states.get("camera.front") assert state.attributes.get("motion_detection") is True - await hass.services.async_call( - "camera", - "disable_motion_detection", - {"entity_id": "camera.front"}, - blocking=True, - ) - await hass.async_block_till_done() - - state = hass.states.get("camera.front") - assert state.attributes.get("motion_detection") is None - - -async def test_camera_motion_detection_not_supported( - hass: HomeAssistant, - mock_ring_client, - mock_ring_devices, - caplog: pytest.LogCaptureFixture, +async def test_updates_work( + hass: HomeAssistant, mock_ring_client, mock_ring_devices ) -> None: - """Tests the siren turns on correctly.""" - front_camera_mock = mock_ring_devices.get_device(765432) - has_capability = front_camera_mock.has_capability.side_effect - - def _has_capability(capability): - if capability == "motion_detection": - return False - return has_capability(capability) - - front_camera_mock.has_capability.side_effect = _has_capability - + """Tests the update service works correctly.""" await setup_platform(hass, Platform.CAMERA) + state = hass.states.get("camera.internal") + assert state.attributes.get("motion_detection") is True - state = hass.states.get("camera.front") - assert state.attributes.get("motion_detection") is None + internal_camera_mock = mock_ring_devices.get_device(345678) + internal_camera_mock.motion_detection = False - await hass.services.async_call( - "camera", - "enable_motion_detection", - {"entity_id": "camera.front"}, - blocking=True, - ) + await hass.services.async_call("ring", "update", {}, blocking=True) await hass.async_block_till_done() - state = hass.states.get("camera.front") - assert state.attributes.get("motion_detection") is None - assert ( - "Entity camera.front does not have motion detection capability" in caplog.text - ) + + state = hass.states.get("camera.internal") + assert state.attributes.get("motion_detection") is not True @pytest.mark.parametrize( @@ -162,7 +116,8 @@ async def test_motion_detection_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_camera_mock = mock_ring_devices.get_device(765432) - front_camera_mock.async_set_motion_detection.side_effect = exception_type + p = PropertyMock(side_effect=exception_type) + type(front_camera_mock).motion_detection = p with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -172,7 +127,7 @@ async def test_motion_detection_errors_when_turned_on( blocking=True, ) await hass.async_block_till_done() - front_camera_mock.async_set_motion_detection.assert_called_once() + p.assert_called_once() assert ( any( flow @@ -181,117 +136,3 @@ async def test_motion_detection_errors_when_turned_on( ) == reauth_expected ) - - -async def test_camera_handle_mjpeg_stream( - hass: HomeAssistant, - mock_ring_client, - mock_ring_devices, - freezer: FrozenDateTimeFactory, -) -> None: - """Test camera returns handle mjpeg stream when available.""" - await setup_platform(hass, Platform.CAMERA) - - front_camera_mock = mock_ring_devices.get_device(765432) - front_camera_mock.async_recording_url.return_value = None - - state = hass.states.get("camera.front") - assert state is not None - - mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) - - # history not updated yet - front_camera_mock.async_history.assert_not_called() - front_camera_mock.async_recording_url.assert_not_called() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") - assert stream is None - - # Video url will be none so no stream - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.async_history.assert_called_once() - front_camera_mock.async_recording_url.assert_called_once() - - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") - assert stream is None - - # Stop the history updating so we can update the values manually - front_camera_mock.async_history = AsyncMock() - front_camera_mock.last_history[0]["recording"]["status"] = "not ready" - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.async_recording_url.assert_called_once() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") - assert stream is None - - # If the history id hasn't changed the camera will not check again for the video url - # until the FORCE_REFRESH_INTERVAL has passed - front_camera_mock.last_history[0]["recording"]["status"] = "ready" - front_camera_mock.async_recording_url = AsyncMock(return_value="http://dummy.url") - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.async_recording_url.assert_not_called() - - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") - assert stream is None - - freezer.tick(FORCE_REFRESH_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.async_recording_url.assert_called_once() - - # Now the stream should be returned - stream_reader = MockStreamReader(SMALLEST_VALID_JPEG_BYTES) - with patch("homeassistant.components.ring.camera.CameraMjpeg") as mock_camera: - mock_camera.return_value.get_reader = AsyncMock(return_value=stream_reader) - mock_camera.return_value.open_camera = AsyncMock() - mock_camera.return_value.close = AsyncMock() - - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") - assert stream is not None - # Check the stream has been read - assert not await stream_reader.read(-1) - - -async def test_camera_image( - hass: HomeAssistant, - mock_ring_client, - mock_ring_devices, - freezer: FrozenDateTimeFactory, -) -> None: - """Test camera will return still image when available.""" - await setup_platform(hass, Platform.CAMERA) - - front_camera_mock = mock_ring_devices.get_device(765432) - - state = hass.states.get("camera.front") - assert state is not None - - # history not updated yet - front_camera_mock.async_history.assert_not_called() - front_camera_mock.async_recording_url.assert_not_called() - with ( - patch( - "homeassistant.components.ring.camera.ffmpeg.async_get_image", - return_value=SMALLEST_VALID_JPEG_BYTES, - ), - pytest.raises(HomeAssistantError), - ): - image = await camera.async_get_image(hass, "camera.front") - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - # history updated so image available - front_camera_mock.async_history.assert_called_once() - front_camera_mock.async_recording_url.assert_called_once() - - with patch( - "homeassistant.components.ring.camera.ffmpeg.async_get_image", - return_value=SMALLEST_VALID_JPEG_BYTES, - ): - image = await camera.async_get_image(hass, "camera.front") - assert image.content == SMALLEST_VALID_JPEG_BYTES diff --git a/tests/components/ring/test_config_flow.py b/tests/components/ring/test_config_flow.py index 409cdac55aa..2420bb9cc50 100644 --- a/tests/components/ring/test_config_flow.py +++ b/tests/components/ring/test_config_flow.py @@ -1,19 +1,15 @@ """Test the Ring config flow.""" -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, Mock import pytest import ring_doorbell from homeassistant import config_entries -from homeassistant.components import dhcp from homeassistant.components.ring import DOMAIN -from homeassistant.const import CONF_DEVICE_ID, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import device_registry as dr - -from .conftest import MOCK_HARDWARE_ID from tests.common import MockConfigEntry @@ -31,19 +27,17 @@ async def test_form( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "hello@home-assistant.io", "password": "test-password"}, - ) - await hass.async_block_till_done() + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": "hello@home-assistant.io", "password": "test-password"}, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "hello@home-assistant.io" assert result2["data"] == { - CONF_DEVICE_ID: MOCK_HARDWARE_ID, - CONF_USERNAME: "hello@home-assistant.io", - CONF_TOKEN: {"access_token": "mock-token"}, + "username": "hello@home-assistant.io", + "token": {"access_token": "mock-token"}, } assert len(mock_setup_entry.mock_calls) == 1 @@ -63,7 +57,7 @@ async def test_form_error( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_ring_auth.async_fetch_token.side_effect = error_type + mock_ring_auth.fetch_token.side_effect = error_type result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": "hello@home-assistant.io", "password": "test-password"}, @@ -85,38 +79,36 @@ async def test_form_2fa( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError - with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "foo@bar.com", - CONF_PASSWORD: "fake-password", - }, - ) + mock_ring_auth.fetch_token.side_effect = ring_doorbell.Requires2FAError + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "foo@bar.com", + CONF_PASSWORD: "fake-password", + }, + ) await hass.async_block_till_done() - mock_ring_auth.async_fetch_token.assert_called_once_with( + mock_ring_auth.fetch_token.assert_called_once_with( "foo@bar.com", "fake-password", None ) assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "2fa" - mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) - mock_ring_auth.async_fetch_token.return_value = "new-foobar" + mock_ring_auth.fetch_token.reset_mock(side_effect=True) + mock_ring_auth.fetch_token.return_value = "new-foobar" result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={"2fa": "123456"}, ) - mock_ring_auth.async_fetch_token.assert_called_once_with( + mock_ring_auth.fetch_token.assert_called_once_with( "foo@bar.com", "fake-password", "123456" ) assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "foo@bar.com" assert result3["data"] == { - CONF_DEVICE_ID: MOCK_HARDWARE_ID, - CONF_USERNAME: "foo@bar.com", - CONF_TOKEN: "new-foobar", + "username": "foo@bar.com", + "token": "new-foobar", } assert len(mock_setup_entry.mock_calls) == 1 @@ -136,7 +128,7 @@ async def test_reauth( [result] = flows assert result["step_id"] == "reauth_confirm" - mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError + mock_ring_auth.fetch_token.side_effect = ring_doorbell.Requires2FAError result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -144,27 +136,26 @@ async def test_reauth( }, ) - mock_ring_auth.async_fetch_token.assert_called_once_with( + mock_ring_auth.fetch_token.assert_called_once_with( "foo@bar.com", "other_fake_password", None ) assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "2fa" - mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) - mock_ring_auth.async_fetch_token.return_value = "new-foobar" + mock_ring_auth.fetch_token.reset_mock(side_effect=True) + mock_ring_auth.fetch_token.return_value = "new-foobar" result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={"2fa": "123456"}, ) - mock_ring_auth.async_fetch_token.assert_called_once_with( + mock_ring_auth.fetch_token.assert_called_once_with( "foo@bar.com", "other_fake_password", "123456" ) assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "reauth_successful" assert mock_added_config_entry.data == { - CONF_DEVICE_ID: MOCK_HARDWARE_ID, - CONF_USERNAME: "foo@bar.com", - CONF_TOKEN: "new-foobar", + "username": "foo@bar.com", + "token": "new-foobar", } assert len(mock_setup_entry.mock_calls) == 1 @@ -194,7 +185,7 @@ async def test_reauth_error( [result] = flows assert result["step_id"] == "reauth_confirm" - mock_ring_auth.async_fetch_token.side_effect = error_type + mock_ring_auth.fetch_token.side_effect = error_type result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -203,15 +194,15 @@ async def test_reauth_error( ) await hass.async_block_till_done() - mock_ring_auth.async_fetch_token.assert_called_once_with( + mock_ring_auth.fetch_token.assert_called_once_with( "foo@bar.com", "error_fake_password", None ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": errors_msg} # Now test reauth can go on to succeed - mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) - mock_ring_auth.async_fetch_token.return_value = "new-foobar" + mock_ring_auth.fetch_token.reset_mock(side_effect=True) + mock_ring_auth.fetch_token.return_value = "new-foobar" result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={ @@ -219,191 +210,13 @@ async def test_reauth_error( }, ) - mock_ring_auth.async_fetch_token.assert_called_once_with( + mock_ring_auth.fetch_token.assert_called_once_with( "foo@bar.com", "other_fake_password", None ) assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "reauth_successful" assert mock_added_config_entry.data == { - CONF_DEVICE_ID: MOCK_HARDWARE_ID, - CONF_USERNAME: "foo@bar.com", - CONF_TOKEN: "new-foobar", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_account_configured( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_added_config_entry: Mock, -) -> None: - """Test that user cannot configure the same account twice.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "foo@bar.com", "password": "test-password"}, - ) - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "already_configured" - - -async def test_dhcp_discovery( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_ring_client: Mock, - device_registry: dr.DeviceRegistry, -) -> None: - """Test discovery by dhcp.""" - mac_address = "1234567890abcd" - hostname = "Ring-90abcd" - ip_address = "127.0.0.1" - username = "hello@home-assistant.io" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - ip=ip_address, macaddress=mac_address, hostname=hostname - ), - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - assert result["step_id"] == "user" - with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": username, "password": "test-password"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "hello@home-assistant.io" - assert result["data"] == { - CONF_DEVICE_ID: MOCK_HARDWARE_ID, - CONF_USERNAME: username, - CONF_TOKEN: {"access_token": "mock-token"}, - } - - config_entry = hass.config_entries.async_entry_for_domain_unique_id( - DOMAIN, username - ) - assert config_entry - - # Create a device entry under the config entry just created - device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, mac_address)}, - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - ip=ip_address, macaddress=mac_address, hostname=hostname - ), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_reconfigure( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_ring_client: Mock, - mock_added_config_entry: MockConfigEntry, -) -> None: - """Test the reconfigure config flow.""" - - assert mock_added_config_entry.data[CONF_DEVICE_ID] == MOCK_HARDWARE_ID - - result = await mock_added_config_entry.start_reconfigure_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - with patch("uuid.uuid4", return_value="new-hardware-id"): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"password": "test-password"}, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" - assert mock_added_config_entry.data[CONF_DEVICE_ID] == "new-hardware-id" - - -@pytest.mark.parametrize( - ("error_type", "errors_msg"), - [ - (ring_doorbell.AuthenticationError, "invalid_auth"), - (Exception, "unknown"), - ], - ids=["invalid-auth", "unknown-error"], -) -async def test_reconfigure_errors( - hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, - mock_ring_auth: Mock, - error_type, - errors_msg, -) -> None: - """Test errors during the reconfigure config flow.""" - result = await mock_added_config_entry.start_reconfigure_flow(hass) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - mock_ring_auth.async_fetch_token.side_effect = error_type - with patch("uuid.uuid4", return_value="new-hardware-id"): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PASSWORD: "error_fake_password", - }, - ) - await hass.async_block_till_done() - mock_ring_auth.async_fetch_token.assert_called_with( - "foo@bar.com", "error_fake_password", None - ) - mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={ - CONF_PASSWORD: "other_fake_password", - }, - ) - - mock_ring_auth.async_fetch_token.assert_called_with( - "foo@bar.com", "other_fake_password", None - ) - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "2fa" - - # Now test reconfigure can go on to succeed - mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) - mock_ring_auth.async_fetch_token.return_value = "new-foobar" - - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], - user_input={"2fa": "123456"}, - ) - - mock_ring_auth.async_fetch_token.assert_called_with( - "foo@bar.com", "other_fake_password", "123456" - ) - - assert result4["type"] is FlowResultType.ABORT - assert result4["reason"] == "reconfigure_successful" - assert mock_added_config_entry.data == { - CONF_DEVICE_ID: "new-hardware-id", - CONF_USERNAME: "foo@bar.com", - CONF_TOKEN: "new-foobar", + "username": "foo@bar.com", + "token": "new-foobar", } assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/ring/test_event.py b/tests/components/ring/test_event.py deleted file mode 100644 index 5cd60382a97..00000000000 --- a/tests/components/ring/test_event.py +++ /dev/null @@ -1,98 +0,0 @@ -"""The tests for the Ring event platform.""" - -from datetime import datetime -import time -from unittest.mock import Mock - -from freezegun.api import FrozenDateTimeFactory -import pytest -from ring_doorbell import Ring -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.ring.binary_sensor import RingEvent -from homeassistant.components.ring.coordinator import RingEventListener -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .common import MockConfigEntry, setup_platform -from .device_mocks import FRONT_DOOR_DEVICE_ID, INGRESS_DEVICE_ID - -from tests.common import snapshot_platform - - -async def test_states( - hass: HomeAssistant, - mock_ring_client: Mock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test states.""" - mock_config_entry.add_to_hass(hass) - await setup_platform(hass, Platform.EVENT) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize( - ("device_id", "device_name", "alert_kind", "device_class"), - [ - pytest.param( - FRONT_DOOR_DEVICE_ID, - "front_door", - "motion", - "motion", - id="front_door_motion", - ), - pytest.param( - FRONT_DOOR_DEVICE_ID, "front_door", "ding", "doorbell", id="front_door_ding" - ), - pytest.param( - INGRESS_DEVICE_ID, "ingress", "ding", "doorbell", id="ingress_ding" - ), - pytest.param( - INGRESS_DEVICE_ID, - "ingress", - "intercom_unlock", - "button", - id="ingress_unlock", - ), - ], -) -async def test_event( - hass: HomeAssistant, - mock_ring_client: Ring, - mock_ring_event_listener_class: RingEventListener, - freezer: FrozenDateTimeFactory, - device_id: int, - device_name: str, - alert_kind: str, - device_class: str, -) -> None: - """Test the Ring event platforms.""" - - await setup_platform(hass, Platform.EVENT) - - start_time_str = "2024-09-04T15:32:53.892+00:00" - start_time = datetime.strptime(start_time_str, "%Y-%m-%dT%H:%M:%S.%f%z") - freezer.move_to(start_time) - on_event_cb = mock_ring_event_listener_class.return_value.add_notification_callback.call_args.args[ - 0 - ] - - # Default state is unknown - entity_id = f"event.{device_name}_{alert_kind}" - state = hass.states.get(entity_id) - assert state is not None - assert state.state == "unknown" - assert state.attributes["device_class"] == device_class - - # A new alert sets to on - event = RingEvent( - 1234546, device_id, "Foo", "Bar", time.time(), 180, kind=alert_kind, state=None - ) - mock_ring_client.active_alerts.return_value = [event] - on_event_cb(event) - state = hass.states.get(entity_id) - assert state is not None - assert state.state == start_time_str diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py index 1b5ee68c659..d8529e874b9 100644 --- a/tests/components/ring/test_init.py +++ b/tests/components/ring/test_init.py @@ -1,27 +1,20 @@ """The tests for the Ring component.""" -from unittest.mock import AsyncMock, patch - from freezegun.api import FrozenDateTimeFactory import pytest -from ring_doorbell import AuthenticationError, Ring, RingError, RingTimeout +from ring_doorbell import AuthenticationError, RingError, RingTimeout from homeassistant.components import ring -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.ring import DOMAIN -from homeassistant.components.ring.const import CONF_LISTEN_CREDENTIALS, SCAN_INTERVAL -from homeassistant.components.ring.coordinator import RingEventListener +from homeassistant.components.ring.const import SCAN_INTERVAL from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import CONF_DEVICE_ID, CONF_TOKEN, CONF_USERNAME +from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.setup import async_setup_component -from .conftest import MOCK_HARDWARE_ID -from .device_mocks import FRONT_DOOR_DEVICE_ID - from tests.common import MockConfigEntry, async_fire_time_changed @@ -49,11 +42,11 @@ async def test_setup_entry_device_update( """Test devices are updating after setup entry.""" front_door_doorbell = mock_ring_devices.get_device(987654) - front_door_doorbell.async_history.assert_not_called() + front_door_doorbell.history.assert_not_called() freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_door_doorbell.async_history.assert_called_once() + front_door_doorbell.history.assert_called_once() async def test_auth_failed_on_setup( @@ -63,7 +56,7 @@ async def test_auth_failed_on_setup( ) -> None: """Test auth failure on setup entry.""" mock_config_entry.add_to_hass(hass) - mock_ring_client.async_update_data.side_effect = AuthenticationError + mock_ring_client.update_data.side_effect = AuthenticationError assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -97,7 +90,7 @@ async def test_error_on_setup( """Test non-auth errors on setup entry.""" mock_config_entry.add_to_hass(hass) - mock_ring_client.async_update_data.side_effect = error_type + mock_ring_client.update_data.side_effect = error_type await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -120,7 +113,7 @@ async def test_auth_failure_on_global_update( await hass.async_block_till_done() assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) - mock_ring_client.async_update_devices.side_effect = AuthenticationError + mock_ring_client.update_devices.side_effect = AuthenticationError freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -146,7 +139,7 @@ async def test_auth_failure_on_device_update( assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_door_doorbell = mock_ring_devices.get_device(987654) - front_door_doorbell.async_history.side_effect = AuthenticationError + front_door_doorbell.history.side_effect = AuthenticationError freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -185,7 +178,7 @@ async def test_error_on_global_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_ring_client.async_update_devices.side_effect = error_type + mock_ring_client.update_devices.side_effect = error_type freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -193,7 +186,7 @@ async def test_error_on_global_update( assert log_msg in caplog.text - assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert mock_config_entry.entry_id in hass.data[DOMAIN] @pytest.mark.parametrize( @@ -226,14 +219,41 @@ async def test_error_on_device_update( await hass.async_block_till_done() front_door_doorbell = mock_ring_devices.get_device(765432) - front_door_doorbell.async_history.side_effect = error_type + front_door_doorbell.history.side_effect = error_type freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert log_msg in caplog.text - assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert mock_config_entry.entry_id in hass.data[DOMAIN] + + +async def test_issue_deprecated_service_ring_update( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + caplog: pytest.LogCaptureFixture, + mock_ring_client, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the issue is raised on deprecated service ring.update.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call(DOMAIN, "update", {}, blocking=True) + + issue = issue_registry.async_get_issue("ring", "deprecated_service_ring_update") + assert issue + assert issue.issue_domain == "ring" + assert issue.issue_id == "deprecated_service_ring_update" + assert issue.translation_key == "deprecated_service_ring_update" + + assert ( + "Detected use of service 'ring.update'. " + "This is deprecated and will stop working in Home Assistant 2024.10. " + "Use 'homeassistant.update_entity' instead which updates all ring entities" + ) in caplog.text @pytest.mark.parametrize( @@ -366,119 +386,3 @@ async def test_update_unique_id_no_update( assert entity_migrated assert entity_migrated.unique_id == correct_unique_id assert "Fixing non string unique id" not in caplog.text - - -async def test_token_updated( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_config_entry: MockConfigEntry, - mock_ring_client, - mock_ring_init_auth_class, -) -> None: - """Test that the token value is updated in the config entry. - - This simulates the api calling the callback. - """ - mock_config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert mock_ring_init_auth_class.call_count == 1 - token_updater = mock_ring_init_auth_class.call_args.args[2] - assert mock_config_entry.data[CONF_TOKEN] == {"access_token": "mock-token"} - - mock_ring_client.async_update_devices.side_effect = lambda: token_updater( - {"access_token": "new-mock-token"} - ) - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert mock_config_entry.data[CONF_TOKEN] == {"access_token": "new-mock-token"} - - -async def test_listen_token_updated( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_config_entry: MockConfigEntry, - mock_ring_client, - mock_ring_event_listener_class, -) -> None: - """Test that the listener token value is updated in the config entry. - - This simulates the api calling the callback. - """ - mock_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert mock_ring_event_listener_class.call_count == 1 - token_updater = mock_ring_event_listener_class.call_args.args[2] - - assert mock_config_entry.data.get(CONF_LISTEN_CREDENTIALS) is None - token_updater({"listen_access_token": "mock-token"}) - assert mock_config_entry.data.get(CONF_LISTEN_CREDENTIALS) == { - "listen_access_token": "mock-token" - } - - -async def test_no_listen_start( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - entity_registry: er.EntityRegistry, - mock_ring_event_listener_class: type[RingEventListener], - mock_ring_client: Ring, -) -> None: - """Test behaviour if listener doesn't start.""" - mock_entry = MockConfigEntry( - domain=DOMAIN, - version=1, - data={"username": "foo", "token": {}}, - ) - # Create a binary sensor entity so it is not ignored by the deprecation check - # and the listener will start - entity_registry.async_get_or_create( - domain=BINARY_SENSOR_DOMAIN, - platform=DOMAIN, - unique_id=f"{FRONT_DOOR_DEVICE_ID}-motion", - suggested_object_id=f"{FRONT_DOOR_DEVICE_ID}_motion", - config_entry=mock_entry, - ) - mock_ring_event_listener_class.do_not_start = True - - mock_ring_event_listener_class.return_value.started = False - - mock_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - assert "Ring event listener failed to start after 10 seconds" in [ - record.message for record in caplog.records if record.levelname == "WARNING" - ] - - -async def test_migrate_create_device_id( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test migration creates new device id created.""" - entry = MockConfigEntry( - title="Ring", - domain=DOMAIN, - data={ - CONF_USERNAME: "foo@bar.com", - "token": {"access_token": "mock-token"}, - }, - unique_id="foo@bar.com", - version=1, - minor_version=1, - ) - entry.add_to_hass(hass) - with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.minor_version == 2 - assert CONF_DEVICE_ID in entry.data - assert entry.data[CONF_DEVICE_ID] == MOCK_HARDWARE_ID - - assert "Migration to version 1.2 complete" in caplog.text diff --git a/tests/components/ring/test_light.py b/tests/components/ring/test_light.py index 0be314c3135..c2d21a22951 100644 --- a/tests/components/ring/test_light.py +++ b/tests/components/ring/test_light.py @@ -1,10 +1,9 @@ """The tests for the Ring light platform.""" -from unittest.mock import Mock +from unittest.mock import PropertyMock import pytest import ring_doorbell -from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import Platform @@ -12,22 +11,22 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from .common import MockConfigEntry, setup_platform - -from tests.common import snapshot_platform +from .common import setup_platform -async def test_states( +async def test_entity_registry( hass: HomeAssistant, - mock_ring_client: Mock, - mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, + mock_ring_client, ) -> None: - """Test states.""" - mock_config_entry.add_to_hass(hass) + """Tests that the devices are registered in the entity registry.""" await setup_platform(hass, Platform.LIGHT) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + entry = entity_registry.async_get("light.front_light") + assert entry.unique_id == "765432" + + entry = entity_registry.async_get("light.internal_light") + assert entry.unique_id == "345678" async def test_light_off_reports_correctly( @@ -68,6 +67,25 @@ async def test_light_can_be_turned_on(hass: HomeAssistant, mock_ring_client) -> assert state.state == "on" +async def test_updates_work( + hass: HomeAssistant, mock_ring_client, mock_ring_devices +) -> None: + """Tests the update service works correctly.""" + await setup_platform(hass, Platform.LIGHT) + state = hass.states.get("light.front_light") + assert state.state == "off" + + front_light_mock = mock_ring_devices.get_device(765432) + front_light_mock.lights = "on" + + await hass.services.async_call("ring", "update", {}, blocking=True) + + await hass.async_block_till_done() + + state = hass.states.get("light.front_light") + assert state.state == "on" + + @pytest.mark.parametrize( ("exception_type", "reauth_expected"), [ @@ -91,14 +109,15 @@ async def test_light_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_light_mock = mock_ring_devices.get_device(765432) - front_light_mock.async_set_lights.side_effect = exception_type + p = PropertyMock(side_effect=exception_type) + type(front_light_mock).lights = p with pytest.raises(HomeAssistantError): await hass.services.async_call( "light", "turn_on", {"entity_id": "light.front_light"}, blocking=True ) await hass.async_block_till_done() - front_light_mock.async_set_lights.assert_called_once() + p.assert_called_once() assert ( any( diff --git a/tests/components/ring/test_number.py b/tests/components/ring/test_number.py deleted file mode 100644 index aa484c6a7b2..00000000000 --- a/tests/components/ring/test_number.py +++ /dev/null @@ -1,95 +0,0 @@ -"""The tests for the Ring number platform.""" - -from unittest.mock import Mock - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.number import ( - ATTR_VALUE, - DOMAIN as NUMBER_DOMAIN, - SERVICE_SET_VALUE, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .common import MockConfigEntry, setup_platform - -from tests.common import snapshot_platform - - -@pytest.mark.parametrize( - ("entity_id", "unique_id"), - [ - ("number.downstairs_volume", "123456-volume"), - ("number.front_door_volume", "987654-volume"), - ("number.ingress_doorbell_volume", "185036587-doorbell_volume"), - ("number.ingress_mic_volume", "185036587-mic_volume"), - ("number.ingress_voice_volume", "185036587-voice_volume"), - ], -) -async def test_entity_registry( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_ring_client: Mock, - entity_id: str, - unique_id: str, -) -> None: - """Tests that the devices are registered in the entity registry.""" - await setup_platform(hass, Platform.NUMBER) - - entry = entity_registry.async_get(entity_id) - assert entry is not None and entry.unique_id == unique_id - - -async def test_states( - hass: HomeAssistant, - mock_ring_client: Mock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test states.""" - - mock_config_entry.add_to_hass(hass) - await setup_platform(hass, Platform.NUMBER) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize( - ("entity_id", "new_value"), - [ - ("number.downstairs_volume", "4.0"), - ("number.front_door_volume", "3.0"), - ("number.ingress_doorbell_volume", "7.0"), - ("number.ingress_mic_volume", "2.0"), - ("number.ingress_voice_volume", "5.0"), - ], -) -async def test_volume_can_be_changed( - hass: HomeAssistant, - mock_ring_client: Mock, - entity_id: str, - new_value: str, -) -> None: - """Tests the volume can be changed correctly.""" - await setup_platform(hass, Platform.NUMBER) - - state = hass.states.get(entity_id) - assert state is not None - old_value = state.state - - # otherwise this test would be pointless - assert old_value != new_value - - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: new_value}, - blocking=True, - ) - - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state is not None and state.state == new_value diff --git a/tests/components/ring/test_sensor.py b/tests/components/ring/test_sensor.py index 48f679c4524..1f05c120251 100644 --- a/tests/components/ring/test_sensor.py +++ b/tests/components/ring/test_sensor.py @@ -1,84 +1,52 @@ """The tests for the Ring sensor platform.""" import logging -from unittest.mock import Mock, patch from freezegun.api import FrozenDateTimeFactory import pytest -from ring_doorbell import Ring -from syrupy.assertion import SnapshotAssertion -from homeassistant.components.ring.const import DOMAIN, SCAN_INTERVAL -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.config_entries import ConfigEntry +from homeassistant.components.ring.const import SCAN_INTERVAL +from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component -from .common import MockConfigEntry, setup_platform -from .device_mocks import ( - DOWNSTAIRS_DEVICE_ID, - FRONT_DEVICE_ID, - FRONT_DOOR_DEVICE_ID, - INGRESS_DEVICE_ID, - INTERNAL_DEVICE_ID, -) +from .common import setup_platform -from tests.common import async_fire_time_changed, snapshot_platform +from tests.common import async_fire_time_changed -@pytest.fixture -def create_deprecated_and_disabled_sensor_entities( - hass: HomeAssistant, - mock_config_entry: ConfigEntry, - entity_registry: er.EntityRegistry, -): - """Create the entity so it is not ignored by the deprecation check.""" - mock_config_entry.add_to_hass(hass) +async def test_sensor(hass: HomeAssistant, mock_ring_client) -> None: + """Test the Ring sensors.""" + await setup_platform(hass, "sensor") - def create_entry( - device_name, - description, - device_id, - ): - unique_id = f"{device_id}-{description}" - entity_registry.async_get_or_create( - domain=SENSOR_DOMAIN, - platform=DOMAIN, - unique_id=unique_id, - suggested_object_id=f"{device_name}_{description}", - config_entry=mock_config_entry, - ) + front_battery_state = hass.states.get("sensor.front_battery") + assert front_battery_state is not None + assert front_battery_state.state == "80" + assert ( + front_battery_state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT + ) - # Deprecated - create_entry("downstairs", "volume", DOWNSTAIRS_DEVICE_ID) - create_entry("front_door", "volume", FRONT_DEVICE_ID) - create_entry("ingress", "doorbell_volume", INGRESS_DEVICE_ID) - create_entry("ingress", "mic_volume", INGRESS_DEVICE_ID) - create_entry("ingress", "voice_volume", INGRESS_DEVICE_ID) + front_door_battery_state = hass.states.get("sensor.front_door_battery") + assert front_door_battery_state is not None + assert front_door_battery_state.state == "100" + assert ( + front_door_battery_state.attributes[ATTR_STATE_CLASS] + == SensorStateClass.MEASUREMENT + ) - # Disabled - for desc in ("wifi_signal_category", "wifi_signal_strength"): - create_entry("downstairs", desc, DOWNSTAIRS_DEVICE_ID) - create_entry("front", desc, FRONT_DEVICE_ID) - create_entry("ingress", desc, INGRESS_DEVICE_ID) - create_entry("front_door", desc, FRONT_DOOR_DEVICE_ID) - create_entry("internal", desc, INTERNAL_DEVICE_ID) + downstairs_volume_state = hass.states.get("sensor.downstairs_volume") + assert downstairs_volume_state is not None + assert downstairs_volume_state.state == "2" + ingress_mic_volume_state = hass.states.get("sensor.ingress_mic_volume") + assert ingress_mic_volume_state.state == "11" -async def test_states( - hass: HomeAssistant, - mock_ring_client: Mock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - create_deprecated_and_disabled_sensor_entities, -) -> None: - """Test states.""" - mock_config_entry.add_to_hass(hass) - await setup_platform(hass, Platform.SENSOR) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + ingress_doorbell_volume_state = hass.states.get("sensor.ingress_doorbell_volume") + assert ingress_doorbell_volume_state.state == "8" + + ingress_voice_volume_state = hass.states.get("sensor.ingress_voice_volume") + assert ingress_voice_volume_state.state == "11" @pytest.mark.parametrize( @@ -139,23 +107,13 @@ async def test_health_sensor( @pytest.mark.parametrize( - ("device_id", "device_name", "sensor_name", "expected_value"), + ("device_name", "sensor_name", "expected_value"), [ - ( - FRONT_DOOR_DEVICE_ID, - "front_door", - "last_motion", - "2017-03-05T15:03:40+00:00", - ), - (FRONT_DOOR_DEVICE_ID, "front_door", "last_ding", "2018-03-05T15:03:40+00:00"), - ( - FRONT_DOOR_DEVICE_ID, - "front_door", - "last_activity", - "2018-03-05T15:03:40+00:00", - ), - (FRONT_DEVICE_ID, "front", "last_motion", "2017-03-05T15:03:40+00:00"), - (INGRESS_DEVICE_ID, "ingress", "last_activity", "2024-02-02T11:21:24+00:00"), + ("front_door", "last_motion", "2017-03-05T15:03:40+00:00"), + ("front_door", "last_ding", "2018-03-05T15:03:40+00:00"), + ("front_door", "last_activity", "2018-03-05T15:03:40+00:00"), + ("front", "last_motion", "2017-03-05T15:03:40+00:00"), + ("ingress", "last_activity", "2024-02-02T11:21:24+00:00"), ], ids=[ "doorbell-motion", @@ -167,31 +125,14 @@ async def test_health_sensor( ) async def test_history_sensor( hass: HomeAssistant, - mock_ring_client: Ring, - mock_config_entry: ConfigEntry, - entity_registry: er.EntityRegistry, + mock_ring_client, freezer: FrozenDateTimeFactory, - device_id: int, - device_name: str, - sensor_name: str, - expected_value: str, + device_name, + sensor_name, + expected_value, ) -> None: """Test the Ring sensors.""" - # Create the entity so it is not ignored by the deprecation check - mock_config_entry.add_to_hass(hass) - - entity_id = f"sensor.{device_name}_{sensor_name}" - unique_id = f"{device_id}-{sensor_name}" - - entity_registry.async_get_or_create( - domain=SENSOR_DOMAIN, - platform=DOMAIN, - unique_id=unique_id, - suggested_object_id=f"{device_name}_{sensor_name}", - config_entry=mock_config_entry, - ) - with patch("homeassistant.components.ring.PLATFORMS", [Platform.SENSOR]): - assert await async_setup_component(hass, DOMAIN, {}) + await setup_platform(hass, "sensor") entity_id = f"sensor.{device_name}_{sensor_name}" sensor_state = hass.states.get(entity_id) diff --git a/tests/components/ring/test_siren.py b/tests/components/ring/test_siren.py index 6cfe8aecd57..695b54c3971 100644 --- a/tests/components/ring/test_siren.py +++ b/tests/components/ring/test_siren.py @@ -1,28 +1,15 @@ """The tests for the Ring button platform.""" -from unittest.mock import Mock - import pytest import ring_doorbell -from syrupy.assertion import SnapshotAssertion -from homeassistant.components.siren import DOMAIN as SIREN_DOMAIN from homeassistant.config_entries import SOURCE_REAUTH -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, - Platform, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from .common import MockConfigEntry, setup_platform - -from tests.common import snapshot_platform +from .common import setup_platform async def test_entity_registry( @@ -37,20 +24,6 @@ async def test_entity_registry( assert entry.unique_id == "123456-siren" -async def test_states( - hass: HomeAssistant, - mock_ring_client: Mock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test states.""" - - mock_config_entry.add_to_hass(hass) - await setup_platform(hass, Platform.SIREN) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - async def test_sirens_report_correctly(hass: HomeAssistant, mock_ring_client) -> None: """Tests that the initial state of a device that should be on is correct.""" await setup_platform(hass, Platform.SIREN) @@ -76,7 +49,7 @@ async def test_default_ding_chime_can_be_played( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="ding") + downstairs_chime_mock.test_sound.assert_called_once_with(kind="ding") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -98,7 +71,7 @@ async def test_turn_on_plays_default_chime( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="ding") + downstairs_chime_mock.test_sound.assert_called_once_with(kind="ding") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -122,7 +95,7 @@ async def test_explicit_ding_chime_can_be_played( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="ding") + downstairs_chime_mock.test_sound.assert_called_once_with(kind="ding") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -144,7 +117,7 @@ async def test_motion_chime_can_be_played( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="motion") + downstairs_chime_mock.test_sound.assert_called_once_with(kind="motion") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -173,7 +146,7 @@ async def test_siren_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.async_test_sound.side_effect = exception_type + downstairs_chime_mock.test_sound.side_effect = exception_type with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -182,8 +155,7 @@ async def test_siren_errors_when_turned_on( {"entity_id": "siren.downstairs_siren", "tone": "motion"}, blocking=True, ) - downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="motion") - await hass.async_block_till_done() + downstairs_chime_mock.test_sound.assert_called_once_with(kind="motion") assert ( any( flow @@ -192,44 +164,3 @@ async def test_siren_errors_when_turned_on( ) == reauth_expected ) - - -async def test_camera_siren_on_off( - hass: HomeAssistant, mock_ring_client, mock_ring_devices -) -> None: - """Tests siren on a ring camera turns on and off.""" - await setup_platform(hass, Platform.SIREN) - - entity_id = "siren.front_siren" - - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - - await hass.services.async_call( - SIREN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state == STATE_ON - - downstairs_chime_mock = mock_ring_devices.get_device(765432) - downstairs_chime_mock.async_set_siren.assert_called_once_with(1) - - downstairs_chime_mock.async_set_siren.reset_mock() - - await hass.services.async_call( - SIREN_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - downstairs_chime_mock.async_set_siren.assert_called_once_with(0) - - assert state.state == STATE_OFF diff --git a/tests/components/ring/test_switch.py b/tests/components/ring/test_switch.py index 22b90253c23..405f20420b7 100644 --- a/tests/components/ring/test_switch.py +++ b/tests/components/ring/test_switch.py @@ -1,72 +1,37 @@ """The tests for the Ring switch platform.""" -from unittest.mock import Mock +from unittest.mock import PropertyMock import pytest import ring_doorbell -from syrupy.assertion import SnapshotAssertion -from homeassistant.components.ring.const import DOMAIN -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, - Platform, -) +from homeassistant.config_entries import SOURCE_REAUTH +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component -from .common import MockConfigEntry, setup_platform - -from tests.common import snapshot_platform +from .common import setup_platform -@pytest.fixture -def create_deprecated_siren_entity( +async def test_entity_registry( hass: HomeAssistant, - mock_config_entry: ConfigEntry, entity_registry: er.EntityRegistry, -): - """Create the entity so it is not ignored by the deprecation check.""" - mock_config_entry.add_to_hass(hass) - - def create_entry(device_name, device_id): - unique_id = f"{device_id}-siren" - - entity_registry.async_get_or_create( - domain=SWITCH_DOMAIN, - platform=DOMAIN, - unique_id=unique_id, - suggested_object_id=f"{device_name}_siren", - config_entry=mock_config_entry, - ) - - create_entry("front", 765432) - create_entry("internal", 345678) - - -async def test_states( - hass: HomeAssistant, - mock_ring_client: Mock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - create_deprecated_siren_entity, + mock_ring_client, ) -> None: - """Test states.""" - - mock_config_entry.add_to_hass(hass) + """Tests that the devices are registered in the entity registry.""" await setup_platform(hass, Platform.SWITCH) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + entry = entity_registry.async_get("switch.front_siren") + assert entry.unique_id == "765432-siren" + + entry = entity_registry.async_get("switch.internal_siren") + assert entry.unique_id == "345678-siren" async def test_siren_off_reports_correctly( - hass: HomeAssistant, mock_ring_client, create_deprecated_siren_entity + hass: HomeAssistant, mock_ring_client ) -> None: """Tests that the initial state of a device that should be off is correct.""" await setup_platform(hass, Platform.SWITCH) @@ -77,7 +42,7 @@ async def test_siren_off_reports_correctly( async def test_siren_on_reports_correctly( - hass: HomeAssistant, mock_ring_client, create_deprecated_siren_entity + hass: HomeAssistant, mock_ring_client ) -> None: """Tests that the initial state of a device that should be on is correct.""" await setup_platform(hass, Platform.SWITCH) @@ -87,46 +52,45 @@ async def test_siren_on_reports_correctly( assert state.attributes.get("friendly_name") == "Internal Siren" -@pytest.mark.parametrize( - ("entity_id"), - [ - ("switch.front_siren"), - ("switch.front_door_in_home_chime"), - ("switch.front_motion_detection"), - ], -) -async def test_switch_can_be_turned_on_and_off( - hass: HomeAssistant, - mock_ring_client, - create_deprecated_siren_entity, - entity_id, -) -> None: - """Tests the switch turns on and off correctly.""" +async def test_siren_can_be_turned_on(hass: HomeAssistant, mock_ring_client) -> None: + """Tests the siren turns on correctly.""" await setup_platform(hass, Platform.SWITCH) - assert hass.states.get(entity_id) + state = hass.states.get("switch.front_siren") + assert state.state == "off" await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, + "switch", "turn_on", {"entity_id": "switch.front_siren"}, blocking=True + ) + + await hass.async_block_till_done() + state = hass.states.get("switch.front_siren") + assert state.state == "on" + + +async def test_updates_work( + hass: HomeAssistant, mock_ring_client, mock_ring_devices +) -> None: + """Tests the update service works correctly.""" + await setup_platform(hass, Platform.SWITCH) + state = hass.states.get("switch.front_siren") + assert state.state == "off" + + front_siren_mock = mock_ring_devices.get_device(765432) + front_siren_mock.siren = 20 + + await async_setup_component(hass, "homeassistant", {}) + await hass.services.async_call( + "homeassistant", + "update_entity", + {ATTR_ENTITY_ID: ["switch.front_siren"]}, blocking=True, ) await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_ON - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_OFF + state = hass.states.get("switch.front_siren") + assert state.state == "on" @pytest.mark.parametrize( @@ -144,7 +108,6 @@ async def test_switch_errors_when_turned_on( mock_ring_devices, exception_type, reauth_expected, - create_deprecated_siren_entity, ) -> None: """Tests the switch turns on correctly.""" await setup_platform(hass, Platform.SWITCH) @@ -153,14 +116,15 @@ async def test_switch_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_siren_mock = mock_ring_devices.get_device(765432) - front_siren_mock.async_set_siren.side_effect = exception_type + p = PropertyMock(side_effect=exception_type) + type(front_siren_mock).siren = p with pytest.raises(HomeAssistantError): await hass.services.async_call( "switch", "turn_on", {"entity_id": "switch.front_siren"}, blocking=True ) await hass.async_block_till_done() - front_siren_mock.async_set_siren.assert_called_once() + p.assert_called_once() assert ( any( flow diff --git a/tests/components/risco/conftest.py b/tests/components/risco/conftest.py index 3961d85d694..ab3b64b245d 100644 --- a/tests/components/risco/conftest.py +++ b/tests/components/risco/conftest.py @@ -1,10 +1,7 @@ """Fixtures for Risco tests.""" -from collections.abc import AsyncGenerator -from typing import Any from unittest.mock import MagicMock, PropertyMock, patch -from pyrisco.cloud.event import Event import pytest from homeassistant.components.risco.const import DOMAIN, TYPE_LOCAL @@ -16,7 +13,6 @@ from homeassistant.const import ( CONF_TYPE, CONF_USERNAME, ) -from homeassistant.core import HomeAssistant from .util import TEST_SITE_NAME, TEST_SITE_UUID, system_mock, zone_mock @@ -120,19 +116,19 @@ def two_zone_local(): @pytest.fixture -def options() -> dict[str, Any]: +def options(): """Fixture for default (empty) options.""" return {} @pytest.fixture -def events() -> list[Event]: +def events(): """Fixture for default (empty) events.""" return [] @pytest.fixture -def cloud_config_entry(hass: HomeAssistant, options: dict[str, Any]) -> MockConfigEntry: +def cloud_config_entry(hass, options): """Fixture for a cloud config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -155,9 +151,7 @@ def login_with_error(exception): @pytest.fixture -async def setup_risco_cloud( - hass: HomeAssistant, cloud_config_entry: MockConfigEntry, events: list[Event] -) -> AsyncGenerator[MockConfigEntry]: +async def setup_risco_cloud(hass, cloud_config_entry, events): """Set up a Risco integration for testing.""" with ( patch( @@ -187,7 +181,7 @@ async def setup_risco_cloud( @pytest.fixture -def local_config_entry(hass: HomeAssistant, options: dict[str, Any]) -> MockConfigEntry: +def local_config_entry(hass, options): """Fixture for a local config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, data=TEST_LOCAL_CONFIG, options=options @@ -207,9 +201,7 @@ def connect_with_error(exception): @pytest.fixture -async def setup_risco_local( - hass: HomeAssistant, local_config_entry: MockConfigEntry -) -> AsyncGenerator[MockConfigEntry]: +async def setup_risco_local(hass, local_config_entry): """Set up a local Risco integration for testing.""" with ( patch( diff --git a/tests/components/risco/test_alarm_control_panel.py b/tests/components/risco/test_alarm_control_panel.py index 8caef1fbfc4..53d5b9573b6 100644 --- a/tests/components/risco/test_alarm_control_panel.py +++ b/tests/components/risco/test_alarm_control_panel.py @@ -1,7 +1,5 @@ """Tests for the Risco alarm control panel device.""" -from collections.abc import Callable -from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch import pytest @@ -9,7 +7,6 @@ import pytest from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_DOMAIN, AlarmControlPanelEntityFeature, - AlarmControlPanelState, ) from homeassistant.components.risco import CannotConnectError, UnauthorizedError from homeassistant.components.risco.const import DOMAIN @@ -19,6 +16,13 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -36,25 +40,25 @@ SECOND_LOCAL_ENTITY_ID = "alarm_control_panel.name_1" CODES_REQUIRED_OPTIONS = {"code_arm_required": True, "code_disarm_required": True} TEST_RISCO_TO_HA = { - "arm": AlarmControlPanelState.ARMED_AWAY, - "partial_arm": AlarmControlPanelState.ARMED_HOME, - "A": AlarmControlPanelState.ARMED_HOME, - "B": AlarmControlPanelState.ARMED_HOME, - "C": AlarmControlPanelState.ARMED_NIGHT, - "D": AlarmControlPanelState.ARMED_NIGHT, + "arm": STATE_ALARM_ARMED_AWAY, + "partial_arm": STATE_ALARM_ARMED_HOME, + "A": STATE_ALARM_ARMED_HOME, + "B": STATE_ALARM_ARMED_HOME, + "C": STATE_ALARM_ARMED_NIGHT, + "D": STATE_ALARM_ARMED_NIGHT, } TEST_FULL_RISCO_TO_HA = { **TEST_RISCO_TO_HA, - "D": AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + "D": STATE_ALARM_ARMED_CUSTOM_BYPASS, } TEST_HA_TO_RISCO = { - AlarmControlPanelState.ARMED_AWAY: "arm", - AlarmControlPanelState.ARMED_HOME: "partial_arm", - AlarmControlPanelState.ARMED_NIGHT: "C", + STATE_ALARM_ARMED_AWAY: "arm", + STATE_ALARM_ARMED_HOME: "partial_arm", + STATE_ALARM_ARMED_NIGHT: "C", } TEST_FULL_HA_TO_RISCO = { **TEST_HA_TO_RISCO, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS: "D", + STATE_ALARM_ARMED_CUSTOM_BYPASS: "D", } CUSTOM_MAPPING_OPTIONS = { "risco_states_to_ha": TEST_RISCO_TO_HA, @@ -176,13 +180,8 @@ async def test_cloud_setup( async def _check_cloud_state( - hass: HomeAssistant, - partitions: dict[int, Any], - property: str, - state: str, - entity_id: str, - partition_id: int, -) -> None: + hass, partitions, property, state, entity_id, partition_id +): with patch.object(partitions[partition_id], property, return_value=True): await async_update_entity(hass, entity_id) await hass.async_block_till_done() @@ -204,7 +203,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "triggered", - AlarmControlPanelState.TRIGGERED, + STATE_ALARM_TRIGGERED, entity_id, partition_id, ) @@ -212,7 +211,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "arming", - AlarmControlPanelState.ARMING, + STATE_ALARM_ARMING, entity_id, partition_id, ) @@ -220,7 +219,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "armed", - AlarmControlPanelState.ARMED_AWAY, + STATE_ALARM_ARMED_AWAY, entity_id, partition_id, ) @@ -228,7 +227,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "partially_armed", - AlarmControlPanelState.ARMED_HOME, + STATE_ALARM_ARMED_HOME, entity_id, partition_id, ) @@ -236,7 +235,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "disarmed", - AlarmControlPanelState.DISARMED, + STATE_ALARM_DISARMED, entity_id, partition_id, ) @@ -251,15 +250,13 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "partially_armed", - AlarmControlPanelState.ARMED_NIGHT, + STATE_ALARM_ARMED_NIGHT, entity_id, partition_id, ) -async def _call_alarm_service( - hass: HomeAssistant, service: str, entity_id: str, **kwargs: Any -) -> None: +async def _call_alarm_service(hass, service, entity_id, **kwargs): data = {"entity_id": entity_id, **kwargs} await hass.services.async_call( @@ -268,27 +265,16 @@ async def _call_alarm_service( async def _test_cloud_service_call( - hass: HomeAssistant, - service: str, - method: str, - entity_id: str, - partition_id: int, - *args: Any, - **kwargs: Any, -) -> None: + hass, service, method, entity_id, partition_id, *args, **kwargs +): with patch(f"homeassistant.components.risco.RiscoCloud.{method}") as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_awaited_once_with(partition_id, *args) async def _test_cloud_no_service_call( - hass: HomeAssistant, - service: str, - method: str, - entity_id: str, - partition_id: int, - **kwargs: Any, -) -> None: + hass, service, method, entity_id, partition_id, **kwargs +): with patch(f"homeassistant.components.risco.RiscoCloud.{method}") as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_not_awaited() @@ -545,14 +531,8 @@ async def test_local_setup( async def _check_local_state( - hass: HomeAssistant, - partitions: dict[int, Any], - property: str, - state: str, - entity_id: str, - partition_id: int, - callback: Callable, -) -> None: + hass, partitions, property, state, entity_id, partition_id, callback +): with patch.object(partitions[partition_id], property, return_value=True): await callback(partition_id, partitions[partition_id]) @@ -589,7 +569,7 @@ async def test_local_states( hass, two_part_local_alarm, "triggered", - AlarmControlPanelState.TRIGGERED, + STATE_ALARM_TRIGGERED, entity_id, partition_id, callback, @@ -598,7 +578,7 @@ async def test_local_states( hass, two_part_local_alarm, "arming", - AlarmControlPanelState.ARMING, + STATE_ALARM_ARMING, entity_id, partition_id, callback, @@ -607,7 +587,7 @@ async def test_local_states( hass, two_part_local_alarm, "armed", - AlarmControlPanelState.ARMED_AWAY, + STATE_ALARM_ARMED_AWAY, entity_id, partition_id, callback, @@ -616,7 +596,7 @@ async def test_local_states( hass, two_part_local_alarm, "partially_armed", - AlarmControlPanelState.ARMED_HOME, + STATE_ALARM_ARMED_HOME, entity_id, partition_id, callback, @@ -625,7 +605,7 @@ async def test_local_states( hass, two_part_local_alarm, "disarmed", - AlarmControlPanelState.DISARMED, + STATE_ALARM_DISARMED, entity_id, partition_id, callback, @@ -641,7 +621,7 @@ async def test_local_states( hass, two_part_local_alarm, "partially_armed", - AlarmControlPanelState.ARMED_NIGHT, + STATE_ALARM_ARMED_NIGHT, entity_id, partition_id, callback, @@ -649,27 +629,16 @@ async def test_local_states( async def _test_local_service_call( - hass: HomeAssistant, - service: str, - method: str, - entity_id: str, - partition: int, - *args: Any, - **kwargs: Any, -) -> None: + hass, service, method, entity_id, partition, *args, **kwargs +): with patch.object(partition, method, AsyncMock()) as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_awaited_once_with(*args) async def _test_local_no_service_call( - hass: HomeAssistant, - service: str, - method: str, - entity_id: str, - partition: int, - **kwargs: Any, -) -> None: + hass, service, method, entity_id, partition, **kwargs +): with patch.object(partition, method, AsyncMock()) as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_not_awaited() diff --git a/tests/components/risco/test_binary_sensor.py b/tests/components/risco/test_binary_sensor.py index 600cfa02c0e..b6ff29a0bce 100644 --- a/tests/components/risco/test_binary_sensor.py +++ b/tests/components/risco/test_binary_sensor.py @@ -1,8 +1,6 @@ """Tests for the Risco binary sensors.""" -from collections.abc import Callable -from typing import Any -from unittest.mock import MagicMock, PropertyMock, patch +from unittest.mock import PropertyMock, patch import pytest @@ -61,13 +59,7 @@ async def test_cloud_setup( assert device.manufacturer == "Risco" -async def _check_cloud_state( - hass: HomeAssistant, - zones: dict[int, Any], - triggered: bool, - entity_id: str, - zone_id: int, -) -> None: +async def _check_cloud_state(hass, zones, triggered, entity_id, zone_id): with patch.object( zones[zone_id], "triggered", @@ -138,14 +130,8 @@ async def test_local_setup( async def _check_local_state( - hass: HomeAssistant, - zones: dict[int, Any], - entity_property: str, - value: bool, - entity_id: str, - zone_id: int, - callback: Callable, -) -> None: + hass, zones, entity_property, value, entity_id, zone_id, callback +): with patch.object( zones[zone_id], entity_property, @@ -232,13 +218,7 @@ async def test_armed_local_states( ) -async def _check_system_state( - hass: HomeAssistant, - system: MagicMock, - entity_property: str, - value: bool, - callback: Callable, -) -> None: +async def _check_system_state(hass, system, entity_property, value, callback): with patch.object( system, entity_property, diff --git a/tests/components/risco/test_config_flow.py b/tests/components/risco/test_config_flow.py index cff5f80e6c4..9fade18ea96 100644 --- a/tests/components/risco/test_config_flow.py +++ b/tests/components/risco/test_config_flow.py @@ -154,12 +154,14 @@ async def test_form_cloud_already_exists(hass: HomeAssistant) -> None: assert result3["reason"] == "already_configured" -async def test_form_reauth( - hass: HomeAssistant, cloud_config_entry: MockConfigEntry -) -> None: +async def test_form_reauth(hass: HomeAssistant, cloud_config_entry) -> None: """Test reauthenticate.""" - result = await cloud_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=cloud_config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -192,11 +194,15 @@ async def test_form_reauth( async def test_form_reauth_with_new_username( - hass: HomeAssistant, cloud_config_entry: MockConfigEntry + hass: HomeAssistant, cloud_config_entry ) -> None: """Test reauthenticate with new username.""" - result = await cloud_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=cloud_config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/risco/test_sensor.py b/tests/components/risco/test_sensor.py index 6a3ac6f42e3..72444bdc9f2 100644 --- a/tests/components/risco/test_sensor.py +++ b/tests/components/risco/test_sensor.py @@ -136,7 +136,7 @@ async def test_error_on_login( assert not entity_registry.async_is_registered(entity_id) -def _check_state(hass: HomeAssistant, category: str, entity_id: str) -> None: +def _check_state(hass, category, entity_id): event_index = CATEGORIES_TO_EVENTS[category] event = TEST_EVENTS[event_index] state = hass.states.get(entity_id) @@ -160,7 +160,7 @@ def _check_state(hass: HomeAssistant, category: str, entity_id: str) -> None: @pytest.fixture -async def _set_utc_time_zone(hass: HomeAssistant) -> None: +async def _set_utc_time_zone(hass): await hass.config.async_set_time_zone("UTC") @@ -174,10 +174,11 @@ def save_mock(): @pytest.mark.parametrize("events", [TEST_EVENTS]) -@pytest.mark.usefixtures("two_zone_cloud", "_set_utc_time_zone") async def test_cloud_setup( hass: HomeAssistant, entity_registry: er.EntityRegistry, + two_zone_cloud, + _set_utc_time_zone, save_mock, setup_risco_cloud, ) -> None: @@ -206,9 +207,11 @@ async def test_cloud_setup( _check_state(hass, category, entity_id) -@pytest.mark.usefixtures("setup_risco_local", "_no_zones_and_partitions") async def test_local_setup( + hass: HomeAssistant, entity_registry: er.EntityRegistry, + setup_risco_local, + _no_zones_and_partitions, ) -> None: """Test entity setup.""" for entity_id in ENTITY_IDS.values(): diff --git a/tests/components/risco/test_switch.py b/tests/components/risco/test_switch.py index 54e7bc3ca0c..acf80462d54 100644 --- a/tests/components/risco/test_switch.py +++ b/tests/components/risco/test_switch.py @@ -1,7 +1,5 @@ """Tests for the Risco binary sensors.""" -from collections.abc import Callable -from typing import Any from unittest.mock import PropertyMock, patch import pytest @@ -42,13 +40,7 @@ async def test_cloud_setup( assert entity_registry.async_is_registered(SECOND_ENTITY_ID) -async def _check_cloud_state( - hass: HomeAssistant, - zones: dict[int, Any], - bypassed: bool, - entity_id: str, - zone_id: int, -) -> None: +async def _check_cloud_state(hass, zones, bypassed, entity_id, zone_id): with patch.object( zones[zone_id], "bypassed", @@ -125,14 +117,7 @@ async def test_local_setup( assert entity_registry.async_is_registered(SECOND_ENTITY_ID) -async def _check_local_state( - hass: HomeAssistant, - zones: dict[int, Any], - bypassed: bool, - entity_id: str, - zone_id: int, - callback: Callable, -) -> None: +async def _check_local_state(hass, zones, bypassed, entity_id, zone_id, callback): with patch.object( zones[zone_id], "bypassed", diff --git a/tests/components/rmvtransport/test_sensor.py b/tests/components/rmvtransport/test_sensor.py index 47728be438c..c17eaac2105 100644 --- a/tests/components/rmvtransport/test_sensor.py +++ b/tests/components/rmvtransport/test_sensor.py @@ -32,23 +32,6 @@ VALID_CONFIG_MISC = { } VALID_CONFIG_DEST = { - "sensor": { - "platform": "rmvtransport", - "next_departure": [ - { - "station": "3000010", - "destinations": [ - "Frankfurt (Main) Flughafen Regionalbahnhof", - "Frankfurt (Main) Stadion", - ], - "lines": [12, "S8"], - "time_offset": 15, - } - ], - } -} - -VALID_CONFIG_DEST_ONLY = { "sensor": { "platform": "rmvtransport", "next_departure": [ @@ -161,19 +144,6 @@ def get_departures_mock(): "info_long": None, "icon": "https://products/32_pic.png", }, - { - "product": "Bus", - "number": 12, - "trainId": "1234568", - "direction": "Frankfurt (Main) Hugo-Junkers-Straße/Schleife", - "departure_time": datetime.datetime(2018, 8, 6, 14, 30), - "minutes": 16, - "delay": 0, - "stops": ["Frankfurt (Main) Stadion"], - "info": None, - "info_long": None, - "icon": "https://products/32_pic.png", - }, ], } @@ -245,26 +215,6 @@ async def test_rmvtransport_dest_config(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "sensor", VALID_CONFIG_DEST) await hass.async_block_till_done() - state = hass.states.get("sensor.frankfurt_main_hauptbahnhof") - assert state is not None - assert state.state == "16" - assert ( - state.attributes["direction"] == "Frankfurt (Main) Hugo-Junkers-Straße/Schleife" - ) - assert state.attributes["line"] == 12 - assert state.attributes["minutes"] == 16 - assert state.attributes["departure_time"] == datetime.datetime(2018, 8, 6, 14, 30) - - -async def test_rmvtransport_dest_only_config(hass: HomeAssistant) -> None: - """Test destination configuration.""" - with patch( - "RMVtransport.RMVtransport.get_departures", - return_value=get_departures_mock(), - ): - assert await async_setup_component(hass, "sensor", VALID_CONFIG_DEST_ONLY) - await hass.async_block_till_done() - state = hass.states.get("sensor.frankfurt_main_hauptbahnhof") assert state.state == "11" assert ( diff --git a/tests/components/roborock/conftest.py b/tests/components/roborock/conftest.py index 357c644e2fe..a7ebbf10af3 100644 --- a/tests/components/roborock/conftest.py +++ b/tests/components/roborock/conftest.py @@ -4,8 +4,8 @@ from copy import deepcopy from unittest.mock import patch import pytest -from roborock import RoborockCategory, RoomMapping -from roborock.code_mappings import DyadError, RoborockDyadStateCode, ZeoError, ZeoState +from roborock import RoomMapping +from roborock.code_mappings import DyadError, RoborockDyadStateCode from roborock.roborock_message import RoborockDyadDataProtocol, RoborockZeoProtocol from roborock.version_a01_apis import RoborockMqttClientA01 @@ -38,22 +38,14 @@ class A01Mock(RoborockMqttClientA01): def __init__(self, user_data, device_info, category) -> None: """Initialize the A01Mock.""" super().__init__(user_data, device_info, category) - if category == RoborockCategory.WET_DRY_VAC: - self.protocol_responses = { - RoborockDyadDataProtocol.STATUS: RoborockDyadStateCode.drying.name, - RoborockDyadDataProtocol.POWER: 100, - RoborockDyadDataProtocol.MESH_LEFT: 111, - RoborockDyadDataProtocol.BRUSH_LEFT: 222, - RoborockDyadDataProtocol.ERROR: DyadError.none.name, - RoborockDyadDataProtocol.TOTAL_RUN_TIME: 213, - } - elif category == RoborockCategory.WASHING_MACHINE: - self.protocol_responses: list[RoborockZeoProtocol] = { - RoborockZeoProtocol.STATE: ZeoState.drying.name, - RoborockZeoProtocol.COUNTDOWN: 0, - RoborockZeoProtocol.WASHING_LEFT: 253, - RoborockZeoProtocol.ERROR: ZeoError.none.name, - } + self.protocol_responses = { + RoborockDyadDataProtocol.STATUS: RoborockDyadStateCode.drying.name, + RoborockDyadDataProtocol.POWER: 100, + RoborockDyadDataProtocol.MESH_LEFT: 111, + RoborockDyadDataProtocol.BRUSH_LEFT: 222, + RoborockDyadDataProtocol.ERROR: DyadError.none.name, + RoborockDyadDataProtocol.TOTAL_RUN_TIME: 213, + } async def update_values( self, dyad_data_protocols: list[RoborockDyadDataProtocol | RoborockZeoProtocol] diff --git a/tests/components/roborock/snapshots/test_diagnostics.ambr b/tests/components/roborock/snapshots/test_diagnostics.ambr index 26ecb729312..4318b537a2c 100644 --- a/tests/components/roborock/snapshots/test_diagnostics.ambr +++ b/tests/components/roborock/snapshots/test_diagnostics.ambr @@ -102,7 +102,6 @@ 'id': '120', 'mode': 'ro', 'name': '错误代码', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -110,7 +109,6 @@ 'id': '121', 'mode': 'ro', 'name': '设备状态', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -118,7 +116,6 @@ 'id': '122', 'mode': 'ro', 'name': '设备电量', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -126,7 +123,6 @@ 'id': '123', 'mode': 'rw', 'name': '清扫模式', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -134,7 +130,6 @@ 'id': '124', 'mode': 'rw', 'name': '拖地模式', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -142,7 +137,6 @@ 'id': '125', 'mode': 'rw', 'name': '主刷寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -150,7 +144,6 @@ 'id': '126', 'mode': 'rw', 'name': '边刷寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -158,7 +151,6 @@ 'id': '127', 'mode': 'rw', 'name': '滤网寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -389,7 +381,6 @@ 'id': '120', 'mode': 'ro', 'name': '错误代码', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -397,7 +388,6 @@ 'id': '121', 'mode': 'ro', 'name': '设备状态', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -405,7 +395,6 @@ 'id': '122', 'mode': 'ro', 'name': '设备电量', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -413,7 +402,6 @@ 'id': '123', 'mode': 'rw', 'name': '清扫模式', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -421,7 +409,6 @@ 'id': '124', 'mode': 'rw', 'name': '拖地模式', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -429,7 +416,6 @@ 'id': '125', 'mode': 'rw', 'name': '主刷寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -437,7 +423,6 @@ 'id': '126', 'mode': 'rw', 'name': '边刷寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -445,7 +430,6 @@ 'id': '127', 'mode': 'rw', 'name': '滤网寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -967,355 +951,6 @@ }), }), }), - '**REDACTED-3**': dict({ - 'api': dict({ - 'misc_info': dict({ - }), - }), - 'roborock_device_info': dict({ - 'device': dict({ - 'activeTime': 1699964128, - 'deviceStatus': dict({ - '10001': '{"f":"t"}', - '10005': '{"sn":"zeo_sn","ssid":"internet","timezone":"Europe/Berlin","posix_timezone":"CET-1CEST,M3.5.0,M10.5.0/3","ip":"192.111.11.11","mac":"b0:4a:00:00:00:00","rssi":-57,"oba":{"language":"en","name":"A.03.0403_CE","bom":"A.03.0403","location":"de","wifiplan":"EU","timezone":"CET-1CEST,M3.5.0,M10.5.0/3;Europe/Berlin","logserver":"awsde0","loglevel":"4","featureset":"0"}}', - '10007': '{"mqttOtaData":{"mqttOtaStatus":{"status":"IDLE"}}}', - '200': 1, - '201': 0, - '202': 1, - '203': 7, - '204': 1, - '205': 33, - '206': 0, - '207': 4, - '208': 2, - '209': 7, - '210': 1, - '211': 1, - '212': 1, - '213': 2, - '214': 2, - '217': 0, - '218': 227, - '219': 0, - '220': 0, - '221': 0, - '222': 347414, - '223': 0, - '224': 21, - '225': 0, - '226': 0, - '227': 1, - '232': 0, - }), - 'duid': '**REDACTED**', - 'f': False, - 'featureSet': '0', - 'fv': '01.00.94', - 'iconUrl': '', - 'localKey': '**REDACTED**', - 'name': 'Zeo One', - 'newFeatureSet': '40', - 'online': True, - 'productId': 'zeo_id', - 'pv': 'A01', - 'share': True, - 'shareTime': 1712763572, - 'silentOtaSwitch': False, - 'sn': 'zeo_sn', - 'timeZoneId': 'Europe/Berlin', - 'tuyaMigrated': False, - }), - 'product': dict({ - 'capability': 2, - 'category': 'roborock.wm', - 'id': 'zeo_id', - 'model': 'roborock.wm.a102', - 'name': 'Zeo One', - 'schema': list([ - dict({ - 'code': 'drying_status', - 'id': '134', - 'mode': 'ro', - 'name': '烘干状态', - 'type': 'RAW', - }), - dict({ - 'code': 'start', - 'id': '200', - 'mode': 'rw', - 'name': '启动', - 'type': 'BOOL', - }), - dict({ - 'code': 'pause', - 'id': '201', - 'mode': 'rw', - 'name': '暂停', - 'type': 'BOOL', - }), - dict({ - 'code': 'shutdown', - 'id': '202', - 'mode': 'rw', - 'name': '关机', - 'type': 'BOOL', - }), - dict({ - 'code': 'status', - 'id': '203', - 'mode': 'ro', - 'name': '状态', - 'type': 'VALUE', - }), - dict({ - 'code': 'mode', - 'id': '204', - 'mode': 'rw', - 'name': '模式', - 'type': 'VALUE', - }), - dict({ - 'code': 'program', - 'id': '205', - 'mode': 'rw', - 'name': '程序', - 'type': 'VALUE', - }), - dict({ - 'code': 'child_lock', - 'id': '206', - 'mode': 'rw', - 'name': '童锁', - 'type': 'BOOL', - }), - dict({ - 'code': 'temp', - 'id': '207', - 'mode': 'rw', - 'name': '洗涤温度', - 'type': 'VALUE', - }), - dict({ - 'code': 'rinse_times', - 'id': '208', - 'mode': 'rw', - 'name': '漂洗次数', - 'type': 'VALUE', - }), - dict({ - 'code': 'spin_level', - 'id': '209', - 'mode': 'rw', - 'name': '滚筒转速', - 'type': 'VALUE', - }), - dict({ - 'code': 'drying_mode', - 'id': '210', - 'mode': 'rw', - 'name': '干燥度', - 'type': 'VALUE', - }), - dict({ - 'code': 'detergent_set', - 'id': '211', - 'mode': 'rw', - 'name': '自动投放-洗衣液', - 'type': 'BOOL', - }), - dict({ - 'code': 'softener_set', - 'id': '212', - 'mode': 'rw', - 'name': '自动投放-柔顺剂', - 'type': 'BOOL', - }), - dict({ - 'code': 'detergent_type', - 'id': '213', - 'mode': 'rw', - 'name': '洗衣液投放量', - 'type': 'VALUE', - }), - dict({ - 'code': 'softener_type', - 'id': '214', - 'mode': 'rw', - 'name': '柔顺剂投放量', - 'type': 'VALUE', - }), - dict({ - 'code': 'countdown', - 'id': '217', - 'mode': 'rw', - 'name': '预约时间', - 'type': 'VALUE', - }), - dict({ - 'code': 'washing_left', - 'id': '218', - 'mode': 'ro', - 'name': '洗衣剩余时间', - 'type': 'VALUE', - }), - dict({ - 'code': 'doorlock_state', - 'id': '219', - 'mode': 'ro', - 'name': '门锁状态', - 'type': 'BOOL', - }), - dict({ - 'code': 'error', - 'id': '220', - 'mode': 'ro', - 'name': '故障', - 'type': 'VALUE', - }), - dict({ - 'code': 'custom_param_save', - 'id': '221', - 'mode': 'rw', - 'name': '云程序设置', - 'type': 'VALUE', - }), - dict({ - 'code': 'custom_param_get', - 'id': '222', - 'mode': 'ro', - 'name': '云程序读取', - 'type': 'VALUE', - }), - dict({ - 'code': 'sound_set', - 'id': '223', - 'mode': 'rw', - 'name': '提示音', - 'type': 'BOOL', - }), - dict({ - 'code': 'times_after_clean', - 'id': '224', - 'mode': 'ro', - 'name': '距离上次筒自洁次数', - 'type': 'VALUE', - }), - dict({ - 'code': 'default_setting', - 'id': '225', - 'mode': 'rw', - 'name': '记忆洗衣偏好开关', - 'type': 'BOOL', - }), - dict({ - 'code': 'detergent_empty', - 'id': '226', - 'mode': 'ro', - 'name': '洗衣液用尽', - 'type': 'BOOL', - }), - dict({ - 'code': 'softener_empty', - 'id': '227', - 'mode': 'ro', - 'name': '柔顺剂用尽', - 'type': 'BOOL', - }), - dict({ - 'code': 'light_setting', - 'id': '229', - 'mode': 'rw', - 'name': '筒灯设定', - 'type': 'BOOL', - }), - dict({ - 'code': 'detergent_volume', - 'id': '230', - 'mode': 'rw', - 'name': '洗衣液投放量(单次)', - 'type': 'VALUE', - }), - dict({ - 'code': 'softener_volume', - 'id': '231', - 'mode': 'rw', - 'name': '柔顺剂投放量(单次)', - 'type': 'VALUE', - }), - dict({ - 'code': 'app_authorization', - 'id': '232', - 'mode': 'rw', - 'name': '远程控制授权', - 'type': 'VALUE', - }), - dict({ - 'code': 'id_query', - 'id': '10000', - 'mode': 'rw', - 'name': 'ID点查询', - 'type': 'STRING', - }), - dict({ - 'code': 'f_c', - 'id': '10001', - 'mode': 'ro', - 'name': '防串货', - 'type': 'STRING', - }), - dict({ - 'code': 'snd_state', - 'id': '10004', - 'mode': 'rw', - 'name': '语音包/OBA信息', - 'type': 'STRING', - }), - dict({ - 'code': 'product_info', - 'id': '10005', - 'mode': 'ro', - 'name': '产品信息', - 'type': 'STRING', - }), - dict({ - 'code': 'privacy_info', - 'id': '10006', - 'mode': 'rw', - 'name': '隐私协议', - 'type': 'STRING', - }), - dict({ - 'code': 'ota_nfo', - 'id': '10007', - 'mode': 'rw', - 'name': 'OTA info', - 'type': 'STRING', - }), - dict({ - 'code': 'washing_log', - 'id': '10008', - 'mode': 'ro', - 'name': '洗衣记录', - 'type': 'BOOL', - }), - dict({ - 'code': 'rpc_req', - 'id': '10101', - 'mode': 'wo', - 'name': 'rpc req', - 'type': 'STRING', - }), - dict({ - 'code': 'rpc_resp', - 'id': '10102', - 'mode': 'ro', - 'name': 'rpc resp', - 'type': 'STRING', - }), - ]), - }), - }), - }), }), }) # --- diff --git a/tests/components/roborock/test_button.py b/tests/components/roborock/test_button.py index 43ef043f79c..88cf5beab15 100644 --- a/tests/components/roborock/test_button.py +++ b/tests/components/roborock/test_button.py @@ -3,11 +3,9 @@ from unittest.mock import patch import pytest -import roborock from homeassistant.components.button import SERVICE_PRESS from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry @@ -18,7 +16,7 @@ from tests.common import MockConfigEntry ("button.roborock_s7_maxv_reset_sensor_consumable"), ("button.roborock_s7_maxv_reset_air_filter_consumable"), ("button.roborock_s7_maxv_reset_side_brush_consumable"), - ("button.roborock_s7_maxv_reset_main_brush_consumable"), + "button.roborock_s7_maxv_reset_main_brush_consumable", ], ) @pytest.mark.freeze_time("2023-10-30 08:50:00") @@ -43,37 +41,3 @@ async def test_update_success( ) assert mock_send_message.assert_called_once assert hass.states.get(entity_id).state == "2023-10-30T08:50:00+00:00" - - -@pytest.mark.parametrize( - ("entity_id"), - [ - ("button.roborock_s7_maxv_reset_air_filter_consumable"), - ], -) -@pytest.mark.freeze_time("2023-10-30 08:50:00") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_update_failure( - hass: HomeAssistant, - bypass_api_fixture, - setup_entry: MockConfigEntry, - entity_id: str, -) -> None: - """Test failure while pressing the button entity.""" - # Ensure that the entity exist, as these test can pass even if there is no entity. - assert hass.states.get(entity_id).state == "unknown" - with ( - patch( - "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.send_message", - side_effect=roborock.exceptions.RoborockTimeout, - ) as mock_send_message, - pytest.raises(HomeAssistantError, match="Error while calling RESET_CONSUMABLE"), - ): - await hass.services.async_call( - "button", - SERVICE_PRESS, - blocking=True, - target={"entity_id": entity_id}, - ) - assert mock_send_message.assert_called_once - assert hass.states.get(entity_id).state == "2023-10-30T08:50:00+00:00" diff --git a/tests/components/roborock/test_config_flow.py b/tests/components/roborock/test_config_flow.py index 39d8117847c..a5a86e44372 100644 --- a/tests/components/roborock/test_config_flow.py +++ b/tests/components/roborock/test_config_flow.py @@ -4,7 +4,6 @@ from copy import deepcopy from unittest.mock import patch import pytest -from roborock import RoborockTooFrequentCodeRequests from roborock.exceptions import ( RoborockAccountDoesNotExist, RoborockException, @@ -72,7 +71,6 @@ async def test_config_flow_success( (RoborockException(), {"base": "unknown_roborock"}), (RoborockAccountDoesNotExist(), {"base": "invalid_email"}), (RoborockInvalidEmail(), {"base": "invalid_email_format"}), - (RoborockTooFrequentCodeRequests(), {"base": "too_frequent_code_requests"}), (RoborockUrlException(), {"base": "unknown_url"}), (Exception(), {"base": "unknown"}), ], diff --git a/tests/components/roborock/test_init.py b/tests/components/roborock/test_init.py index cace9a8ed67..704f093d3fd 100644 --- a/tests/components/roborock/test_init.py +++ b/tests/components/roborock/test_init.py @@ -176,21 +176,3 @@ async def test_not_supported_protocol( await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() assert "because its protocol version random" in caplog.text - - -async def test_not_supported_a01_device( - hass: HomeAssistant, - bypass_api_fixture, - mock_roborock_entry: MockConfigEntry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that we output a message on incorrect category.""" - home_data_copy = deepcopy(HOME_DATA) - home_data_copy.products[2].category = "random" - with patch( - "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", - return_value=home_data_copy, - ): - await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() - assert "The device you added is not yet supported" in caplog.text diff --git a/tests/components/roborock/test_number.py b/tests/components/roborock/test_number.py index 7e87b49253e..3291dd2a7dc 100644 --- a/tests/components/roborock/test_number.py +++ b/tests/components/roborock/test_number.py @@ -3,11 +3,9 @@ from unittest.mock import patch import pytest -import roborock from homeassistant.components.number import ATTR_VALUE, SERVICE_SET_VALUE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry @@ -39,36 +37,3 @@ async def test_update_success( target={"entity_id": entity_id}, ) assert mock_send_message.assert_called_once - - -@pytest.mark.parametrize( - ("entity_id", "value"), - [ - ("number.roborock_s7_maxv_volume", 3.0), - ], -) -async def test_update_failed( - hass: HomeAssistant, - bypass_api_fixture, - setup_entry: MockConfigEntry, - entity_id: str, - value: float, -) -> None: - """Test allowed changing values for number entities.""" - # Ensure that the entity exist, as these test can pass even if there is no entity. - assert hass.states.get(entity_id) is not None - with ( - patch( - "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.send_message", - side_effect=roborock.exceptions.RoborockTimeout, - ) as mock_send_message, - pytest.raises(HomeAssistantError, match="Failed to update Roborock options"), - ): - await hass.services.async_call( - "number", - SERVICE_SET_VALUE, - service_data={ATTR_VALUE: value}, - blocking=True, - target={"entity_id": entity_id}, - ) - assert mock_send_message.assert_called_once diff --git a/tests/components/roborock/test_select.py b/tests/components/roborock/test_select.py index 784150e24c7..c8626818749 100644 --- a/tests/components/roborock/test_select.py +++ b/tests/components/roborock/test_select.py @@ -1,18 +1,13 @@ """Test Roborock Select platform.""" -import copy from unittest.mock import patch import pytest from roborock.exceptions import RoborockException -from homeassistant.components.roborock import DOMAIN -from homeassistant.const import SERVICE_SELECT_OPTION, STATE_UNKNOWN +from homeassistant.const import SERVICE_SELECT_OPTION from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.setup import async_setup_component - -from .mock_data import PROP from tests.common import MockConfigEntry @@ -22,7 +17,6 @@ from tests.common import MockConfigEntry [ ("select.roborock_s7_maxv_mop_mode", "deep"), ("select.roborock_s7_maxv_mop_intensity", "mild"), - ("select.roborock_s7_maxv_selected_map", "Downstairs"), ], ) async def test_update_success( @@ -59,7 +53,7 @@ async def test_update_failure( "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.send_message", side_effect=RoborockException(), ), - pytest.raises(HomeAssistantError, match="Error while calling SET_MOP_MOD"), + pytest.raises(HomeAssistantError), ): await hass.services.async_call( "select", @@ -68,21 +62,3 @@ async def test_update_failure( blocking=True, target={"entity_id": "select.roborock_s7_maxv_mop_mode"}, ) - - -async def test_none_map_select( - hass: HomeAssistant, - bypass_api_fixture, - mock_roborock_entry: MockConfigEntry, -) -> None: - """Test that the select entity correctly handles not having a current map.""" - prop = copy.deepcopy(PROP) - # Set map status to None so that current map is never set - prop.status.map_status = None - with patch( - "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", - return_value=prop, - ): - await async_setup_component(hass, DOMAIN, {}) - select_entity = hass.states.get("select.roborock_s7_maxv_selected_map") - assert select_entity.state == STATE_UNKNOWN diff --git a/tests/components/roborock/test_sensor.py b/tests/components/roborock/test_sensor.py index 908754f3b92..e608895ca43 100644 --- a/tests/components/roborock/test_sensor.py +++ b/tests/components/roborock/test_sensor.py @@ -21,7 +21,7 @@ from tests.common import MockConfigEntry async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> None: """Test sensors and check test values are correctly set.""" - assert len(hass.states.async_all("sensor")) == 38 + assert len(hass.states.async_all("sensor")) == 34 assert hass.states.get("sensor.roborock_s7_maxv_main_brush_time_left").state == str( MAIN_BRUSH_REPLACE_TIME - 74382 ) @@ -60,10 +60,6 @@ async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> Non assert hass.states.get("sensor.dyad_pro_roller_left").state == "222" assert hass.states.get("sensor.dyad_pro_error").state == "none" assert hass.states.get("sensor.dyad_pro_total_cleaning_time").state == "213" - assert hass.states.get("sensor.zeo_one_state").state == "drying" - assert hass.states.get("sensor.zeo_one_countdown").state == "0" - assert hass.states.get("sensor.zeo_one_washing_left").state == "253" - assert hass.states.get("sensor.zeo_one_error").state == "none" async def test_listener_update( diff --git a/tests/components/roborock/test_switch.py b/tests/components/roborock/test_switch.py index 5de3c208c1e..3afa72b319d 100644 --- a/tests/components/roborock/test_switch.py +++ b/tests/components/roborock/test_switch.py @@ -3,11 +3,9 @@ from unittest.mock import patch import pytest -import roborock from homeassistant.components.switch import SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry @@ -51,37 +49,3 @@ async def test_update_success( target={"entity_id": entity_id}, ) assert mock_send_message.assert_called_once - - -@pytest.mark.parametrize( - ("entity_id", "service"), - [ - ("switch.roborock_s7_maxv_status_indicator_light", SERVICE_TURN_ON), - ("switch.roborock_s7_maxv_status_indicator_light", SERVICE_TURN_OFF), - ], -) -async def test_update_failed( - hass: HomeAssistant, - bypass_api_fixture, - setup_entry: MockConfigEntry, - entity_id: str, - service: str, -) -> None: - """Test a failure while updating a switch.""" - # Ensure that the entity exist, as these test can pass even if there is no entity. - assert hass.states.get(entity_id) is not None - with ( - patch( - "homeassistant.components.roborock.coordinator.RoborockLocalClientV1._send_command", - side_effect=roborock.exceptions.RoborockTimeout, - ) as mock_send_message, - pytest.raises(HomeAssistantError, match="Failed to update Roborock options"), - ): - await hass.services.async_call( - "switch", - service, - service_data=None, - blocking=True, - target={"entity_id": entity_id}, - ) - assert mock_send_message.assert_called_once diff --git a/tests/components/roborock/test_time.py b/tests/components/roborock/test_time.py index 836a86bd114..ca6507f887b 100644 --- a/tests/components/roborock/test_time.py +++ b/tests/components/roborock/test_time.py @@ -4,11 +4,9 @@ from datetime import time from unittest.mock import patch import pytest -import roborock from homeassistant.components.time import SERVICE_SET_VALUE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry @@ -40,35 +38,3 @@ async def test_update_success( target={"entity_id": entity_id}, ) assert mock_send_message.assert_called_once - - -@pytest.mark.parametrize( - ("entity_id"), - [ - ("time.roborock_s7_maxv_do_not_disturb_begin"), - ], -) -async def test_update_failure( - hass: HomeAssistant, - bypass_api_fixture, - setup_entry: MockConfigEntry, - entity_id: str, -) -> None: - """Test turning switch entities on and off.""" - # Ensure that the entity exist, as these test can pass even if there is no entity. - assert hass.states.get(entity_id) is not None - with ( - patch( - "homeassistant.components.roborock.coordinator.RoborockLocalClientV1._send_command", - side_effect=roborock.exceptions.RoborockTimeout, - ) as mock_send_message, - pytest.raises(HomeAssistantError, match="Failed to update Roborock options"), - ): - await hass.services.async_call( - "time", - SERVICE_SET_VALUE, - service_data={"time": time(hour=1, minute=1)}, - blocking=True, - target={"entity_id": entity_id}, - ) - assert mock_send_message.assert_called_once diff --git a/tests/components/roborock/test_vacuum.py b/tests/components/roborock/test_vacuum.py index 5080711d0f9..15a64cbecf3 100644 --- a/tests/components/roborock/test_vacuum.py +++ b/tests/components/roborock/test_vacuum.py @@ -24,7 +24,7 @@ from homeassistant.components.vacuum import ( from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from .mock_data import PROP @@ -38,17 +38,12 @@ DEVICE_ID = "abc123" async def test_registry_entries( hass: HomeAssistant, entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, bypass_api_fixture, setup_entry: MockConfigEntry, ) -> None: """Tests devices are registered in the entity registry.""" - entity_entry = entity_registry.async_get(ENTITY_ID) - assert entity_entry.unique_id == DEVICE_ID - - device_entry = device_registry.async_get(entity_entry.device_id) - assert device_entry is not None - assert device_entry.model_id == "roborock.vacuum.a27" + entry = entity_registry.async_get(ENTITY_ID) + assert entry.unique_id == DEVICE_ID @pytest.mark.parametrize( diff --git a/tests/components/roku/conftest.py b/tests/components/roku/conftest.py index 7ac332a1a6c..160a1bf3127 100644 --- a/tests/components/roku/conftest.py +++ b/tests/components/roku/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Roku integration tests.""" -from collections.abc import Generator import json from unittest.mock import MagicMock, patch import pytest from rokuecp import Device as RokuDevice +from typing_extensions import Generator from homeassistant.components.roku.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/roku/test_config_flow.py b/tests/components/roku/test_config_flow.py index 7144c77cad9..3cf5627f342 100644 --- a/tests/components/roku/test_config_flow.py +++ b/tests/components/roku/test_config_flow.py @@ -6,7 +6,7 @@ from unittest.mock import MagicMock import pytest from rokuecp import RokuConnectionError -from homeassistant.components.roku.const import CONF_PLAY_MEDIA_APP_ID, DOMAIN +from homeassistant.components.roku.const import DOMAIN from homeassistant.config_entries import SOURCE_HOMEKIT, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_NAME, CONF_SOURCE from homeassistant.core import HomeAssistant @@ -254,25 +254,3 @@ async def test_ssdp_discovery( assert result["data"] assert result["data"][CONF_HOST] == HOST assert result["data"][CONF_NAME] == UPNP_FRIENDLY_NAME - - -async def test_options_flow( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test options config flow.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "init" - - result2 = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={CONF_PLAY_MEDIA_APP_ID: "782875"}, - ) - - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("data") == { - CONF_PLAY_MEDIA_APP_ID: "782875", - } diff --git a/tests/components/roku/test_media_player.py b/tests/components/roku/test_media_player.py index 5f8a41d16ac..9aff8f581d7 100644 --- a/tests/components/roku/test_media_player.py +++ b/tests/components/roku/test_media_player.py @@ -32,12 +32,12 @@ from homeassistant.components.roku.const import ( ATTR_FORMAT, ATTR_KEYWORD, ATTR_MEDIA_TYPE, - DEFAULT_PLAY_MEDIA_APP_ID, DOMAIN, SERVICE_SEARCH, ) from homeassistant.components.stream import FORMAT_CONTENT_TYPE, HLS_PROVIDER from homeassistant.components.websocket_api import TYPE_RESULT +from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, @@ -59,7 +59,6 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -496,7 +495,7 @@ async def test_services_play_media( blocking=True, ) - assert mock_roku.launch.call_count == 0 + assert mock_roku.play_on_roku.call_count == 0 await hass.services.async_call( MP_DOMAIN, @@ -510,7 +509,7 @@ async def test_services_play_media( blocking=True, ) - assert mock_roku.launch.call_count == 0 + assert mock_roku.play_on_roku.call_count == 0 @pytest.mark.parametrize( @@ -547,10 +546,9 @@ async def test_services_play_media_audio( }, blocking=True, ) - mock_roku.launch.assert_called_once_with( - DEFAULT_PLAY_MEDIA_APP_ID, + mock_roku.play_on_roku.assert_called_once_with( + content_id, { - "u": content_id, "t": "a", "songName": resolved_name, "songFormat": resolved_format, @@ -593,11 +591,9 @@ async def test_services_play_media_video( }, blocking=True, ) - mock_roku.launch.assert_called_once_with( - DEFAULT_PLAY_MEDIA_APP_ID, + mock_roku.play_on_roku.assert_called_once_with( + content_id, { - "u": content_id, - "t": "v", "videoName": resolved_name, "videoFormat": resolved_format, }, @@ -621,12 +617,10 @@ async def test_services_camera_play_stream( blocking=True, ) - assert mock_roku.launch.call_count == 1 - mock_roku.launch.assert_called_with( - DEFAULT_PLAY_MEDIA_APP_ID, + assert mock_roku.play_on_roku.call_count == 1 + mock_roku.play_on_roku.assert_called_with( + "https://awesome.tld/api/hls/api_token/master_playlist.m3u8", { - "u": "https://awesome.tld/api/hls/api_token/master_playlist.m3u8", - "t": "v", "videoName": "Camera Stream", "videoFormat": "hls", }, @@ -659,21 +653,14 @@ async def test_services_play_media_local_source( blocking=True, ) - assert mock_roku.launch.call_count == 1 - assert mock_roku.launch.call_args - call_args = mock_roku.launch.call_args.args - assert call_args[0] == DEFAULT_PLAY_MEDIA_APP_ID - assert "u" in call_args[1] - assert "/local/Epic%20Sax%20Guy%2010%20Hours.mp4?authSig=" in call_args[1]["u"] - assert "t" in call_args[1] - assert call_args[1]["t"] == "v" - assert "videoFormat" in call_args[1] - assert call_args[1]["videoFormat"] == "mp4" - assert "videoName" in call_args[1] - assert ( - call_args[1]["videoName"] - == "media-source://media_source/local/Epic Sax Guy 10 Hours.mp4" - ) + assert mock_roku.play_on_roku.call_count == 1 + assert mock_roku.play_on_roku.call_args + call_args = mock_roku.play_on_roku.call_args.args + assert "/local/Epic%20Sax%20Guy%2010%20Hours.mp4?authSig=" in call_args[0] + assert call_args[1] == { + "videoFormat": "mp4", + "videoName": "media-source://media_source/local/Epic Sax Guy 10 Hours.mp4", + } @pytest.mark.parametrize("mock_device", ["roku/rokutv-7820x.json"], indirect=True) diff --git a/tests/components/roomba/test_config_flow.py b/tests/components/roomba/test_config_flow.py index dedccc14249..e5f882afa36 100644 --- a/tests/components/roomba/test_config_flow.py +++ b/tests/components/roomba/test_config_flow.py @@ -8,12 +8,7 @@ from roombapy import RoombaConnectionError, RoombaInfo from homeassistant.components import dhcp, zeroconf from homeassistant.components.roomba import config_flow -from homeassistant.components.roomba.const import ( - CONF_BLID, - CONF_CONTINUOUS, - DEFAULT_DELAY, - DOMAIN, -) +from homeassistant.components.roomba.const import CONF_BLID, CONF_CONTINUOUS, DOMAIN from homeassistant.config_entries import ( SOURCE_DHCP, SOURCE_IGNORE, @@ -211,7 +206,7 @@ async def test_form_user_discovery_and_password_fetch(hass: HomeAssistant) -> No assert result3["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: DEFAULT_DELAY, + CONF_DELAY: 1, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -336,7 +331,7 @@ async def test_form_user_discovery_manual_and_auto_password_fetch( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: DEFAULT_DELAY, + CONF_DELAY: 1, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -473,7 +468,7 @@ async def test_form_user_discovery_no_devices_found_and_auto_password_fetch( assert result3["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: DEFAULT_DELAY, + CONF_DELAY: 1, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -546,7 +541,7 @@ async def test_form_user_discovery_no_devices_found_and_password_fetch_fails( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: DEFAULT_DELAY, + CONF_DELAY: 1, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -682,7 +677,7 @@ async def test_form_user_discovery_and_password_fetch_gets_connection_refused( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: DEFAULT_DELAY, + CONF_DELAY: 1, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -743,7 +738,7 @@ async def test_dhcp_discovery_and_roomba_discovery_finds( assert result2["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: DEFAULT_DELAY, + CONF_DELAY: 1, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -821,7 +816,7 @@ async def test_dhcp_discovery_falls_back_to_manual( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: DEFAULT_DELAY, + CONF_DELAY: 1, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -891,7 +886,7 @@ async def test_dhcp_discovery_no_devices_falls_back_to_manual( assert result3["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: DEFAULT_DELAY, + CONF_DELAY: 1, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -1060,43 +1055,6 @@ async def test_dhcp_discovery_partial_hostname(hass: HomeAssistant) -> None: assert current_flows[0]["flow_id"] == result2["flow_id"] -async def test_dhcp_discovery_when_user_flow_in_progress(hass: HomeAssistant) -> None: - """Test discovery flow when user flow is in progress.""" - - # Start a DHCP flow - with patch( - "homeassistant.components.roomba.config_flow.RoombaDiscovery", _mocked_discovery - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - # Start a user flow - unique ID not set - with patch( - "homeassistant.components.roomba.config_flow.RoombaDiscovery", _mocked_discovery - ): - result2 = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - ip=MOCK_IP, - macaddress="aabbccddeeff", - hostname="irobot-blidthatislonger", - ), - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "link" - - current_flows = hass.config_entries.flow.async_progress() - assert len(current_flows) == 2 - - async def test_options_flow( hass: HomeAssistant, ) -> None: @@ -1124,10 +1082,10 @@ async def test_options_flow( result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={CONF_CONTINUOUS: True, CONF_DELAY: DEFAULT_DELAY}, + user_input={CONF_CONTINUOUS: True, CONF_DELAY: 1}, ) await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {CONF_CONTINUOUS: True, CONF_DELAY: DEFAULT_DELAY} - assert config_entry.options == {CONF_CONTINUOUS: True, CONF_DELAY: DEFAULT_DELAY} + assert result["data"] == {CONF_CONTINUOUS: True, CONF_DELAY: 1} + assert config_entry.options == {CONF_CONTINUOUS: True, CONF_DELAY: 1} diff --git a/tests/components/roon/test_config_flow.py b/tests/components/roon/test_config_flow.py index 9539a9c0f5b..9822c88fa48 100644 --- a/tests/components/roon/test_config_flow.py +++ b/tests/components/roon/test_config_flow.py @@ -48,7 +48,7 @@ class RoonApiMockException(RoonApiMock): @property def token(self): """Throw exception.""" - raise Exception # noqa: TRY002 + raise Exception # pylint: disable=broad-exception-raised class RoonDiscoveryMock: diff --git a/tests/components/rova/snapshots/test_init.ambr b/tests/components/rova/snapshots/test_init.ambr index 5e607e6a8df..ffb08ee082e 100644 --- a/tests/components/rova/snapshots/test_init.ambr +++ b/tests/components/rova/snapshots/test_init.ambr @@ -21,7 +21,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': '8381BE 13', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/rova/test_config_flow.py b/tests/components/rova/test_config_flow.py index 608f4ec105b..d9d1df3e188 100644 --- a/tests/components/rova/test_config_flow.py +++ b/tests/components/rova/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.components.rova.const import ( CONF_ZIP_CODE, DOMAIN, ) -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -167,3 +167,104 @@ async def test_abort_if_api_throws_exception( CONF_HOUSE_NUMBER: HOUSE_NUMBER, CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, } + + +async def test_import(hass: HomeAssistant, mock_rova: MagicMock) -> None: + """Test import flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_ZIP_CODE: ZIP_CODE, + CONF_HOUSE_NUMBER: HOUSE_NUMBER, + CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"{ZIP_CODE} {HOUSE_NUMBER} {HOUSE_NUMBER_SUFFIX}" + assert result["data"] == { + CONF_ZIP_CODE: ZIP_CODE, + CONF_HOUSE_NUMBER: HOUSE_NUMBER, + CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, + } + + +async def test_import_already_configured( + hass: HomeAssistant, mock_rova: MagicMock +) -> None: + """Test we abort import flow when entry is already configured.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=f"{ZIP_CODE}{HOUSE_NUMBER}{HOUSE_NUMBER_SUFFIX}", + data={ + CONF_ZIP_CODE: ZIP_CODE, + CONF_HOUSE_NUMBER: HOUSE_NUMBER, + CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, + }, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_ZIP_CODE: ZIP_CODE, + CONF_HOUSE_NUMBER: HOUSE_NUMBER, + CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import_if_not_rova_area( + hass: HomeAssistant, mock_rova: MagicMock +) -> None: + """Test we abort if rova does not collect at the given address.""" + + # test with area where rova does not collect + mock_rova.return_value.is_rova_area.return_value = False + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_ZIP_CODE: ZIP_CODE, + CONF_HOUSE_NUMBER: HOUSE_NUMBER, + CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, + }, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "invalid_rova_area" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (ConnectTimeout(), "cannot_connect"), + (HTTPError(), "cannot_connect"), + ], +) +async def test_import_connection_errors( + hass: HomeAssistant, exception: Exception, error: str, mock_rova: MagicMock +) -> None: + """Test import connection errors flow.""" + + # test with HTTPError + mock_rova.return_value.is_rova_area.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_ZIP_CODE: ZIP_CODE, + CONF_HOUSE_NUMBER: HOUSE_NUMBER, + CONF_HOUSE_NUMBER_SUFFIX: HOUSE_NUMBER_SUFFIX, + }, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == error diff --git a/tests/components/rpi_power/test_binary_sensor.py b/tests/components/rpi_power/test_binary_sensor.py index a5776a22fb0..1643df6c993 100644 --- a/tests/components/rpi_power/test_binary_sensor.py +++ b/tests/components/rpi_power/test_binary_sensor.py @@ -24,7 +24,7 @@ ENTITY_ID = "binary_sensor.rpi_power_status" MODULE = "homeassistant.components.rpi_power.binary_sensor.new_under_voltage" -async def _async_setup_component(hass: HomeAssistant, detected: bool) -> MagicMock: +async def _async_setup_component(hass, detected): mocked_under_voltage = MagicMock() type(mocked_under_voltage).get = MagicMock(return_value=detected) entry = MockConfigEntry(domain=DOMAIN) @@ -68,6 +68,6 @@ async def test_new_detected( assert state.state == STATE_OFF assert ( binary_sensor.__name__, - logging.DEBUG, + logging.INFO, DESCRIPTION_NORMALIZED, ) in caplog.record_tuples diff --git a/tests/components/rtsp_to_webrtc/conftest.py b/tests/components/rtsp_to_webrtc/conftest.py index 956825f6372..6e790b4ff00 100644 --- a/tests/components/rtsp_to_webrtc/conftest.py +++ b/tests/components/rtsp_to_webrtc/conftest.py @@ -2,12 +2,13 @@ from __future__ import annotations -from collections.abc import AsyncGenerator, Awaitable, Callable +from collections.abc import Awaitable, Callable from typing import Any from unittest.mock import patch import pytest import rtsp_to_webrtc +from typing_extensions import AsyncGenerator from homeassistant.components import camera from homeassistant.components.rtsp_to_webrtc import DOMAIN diff --git a/tests/components/rtsp_to_webrtc/test_config_flow.py b/tests/components/rtsp_to_webrtc/test_config_flow.py index d3afa80b0b4..504ede68ac7 100644 --- a/tests/components/rtsp_to_webrtc/test_config_flow.py +++ b/tests/components/rtsp_to_webrtc/test_config_flow.py @@ -7,11 +7,11 @@ from unittest.mock import patch import rtsp_to_webrtc from homeassistant import config_entries +from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.rtsp_to_webrtc import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .conftest import ComponentSetup @@ -25,7 +25,7 @@ async def test_web_full_flow(hass: HomeAssistant) -> None: ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "user" - assert result.get("data_schema").schema.get("server_url") is str + assert result.get("data_schema").schema.get("server_url") == str assert not result.get("errors") with ( patch("rtsp_to_webrtc.client.Client.heartbeat"), @@ -64,7 +64,7 @@ async def test_invalid_url(hass: HomeAssistant) -> None: ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "user" - assert result.get("data_schema").schema.get("server_url") is str + assert result.get("data_schema").schema.get("server_url") == str assert not result.get("errors") result = await hass.config_entries.flow.async_configure( result["flow_id"], {"server_url": "not-a-url"} diff --git a/tests/components/rtsp_to_webrtc/test_init.py b/tests/components/rtsp_to_webrtc/test_init.py index 85155855a09..3071c3d9d08 100644 --- a/tests/components/rtsp_to_webrtc/test_init.py +++ b/tests/components/rtsp_to_webrtc/test_init.py @@ -10,7 +10,7 @@ import aiohttp import pytest import rtsp_to_webrtc -from homeassistant.components.rtsp_to_webrtc import DOMAIN +from homeassistant.components.rtsp_to_webrtc import CONF_STUN_SERVER, DOMAIN from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -18,6 +18,7 @@ from homeassistant.setup import async_setup_component from .conftest import SERVER_URL, STREAM_SOURCE, ComponentSetup +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator @@ -86,11 +87,12 @@ async def test_setup_communication_failure( assert entries[0].state is ConfigEntryState.SETUP_RETRY -@pytest.mark.usefixtures("mock_camera", "rtsp_to_webrtc_client") async def test_offer_for_stream_source( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + mock_camera: Any, + rtsp_to_webrtc_client: Any, setup_integration: ComponentSetup, ) -> None: """Test successful response from RTSPtoWebRTC server.""" @@ -102,33 +104,21 @@ async def test_offer_for_stream_source( ) client = await hass_ws_client(hass) - await client.send_json_auto_id( + await client.send_json( { - "type": "camera/webrtc/offer", + "id": 1, + "type": "camera/web_rtc_offer", "entity_id": "camera.demo_camera", "offer": OFFER_SDP, } ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "answer", - "answer": ANSWER_SDP, - } + assert response.get("id") == 1 + assert response.get("type") == TYPE_RESULT + assert response.get("success") + assert "result" in response + assert response["result"].get("answer") == ANSWER_SDP + assert "error" not in response # Validate request parameters were sent correctly assert len(aioclient_mock.mock_calls) == 1 @@ -138,11 +128,12 @@ async def test_offer_for_stream_source( } -@pytest.mark.usefixtures("mock_camera", "rtsp_to_webrtc_client") async def test_offer_failure( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, + mock_camera: Any, + rtsp_to_webrtc_client: Any, setup_integration: ComponentSetup, ) -> None: """Test a transient failure talking to RTSPtoWebRTC server.""" @@ -154,31 +145,86 @@ async def test_offer_failure( ) client = await hass_ws_client(hass) - await client.send_json_auto_id( + await client.send_json( { - "type": "camera/webrtc/offer", + "id": 2, + "type": "camera/web_rtc_offer", "entity_id": "camera.demo_camera", "offer": OFFER_SDP, } ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] + assert response.get("id") == 2 + assert response.get("type") == TYPE_RESULT + assert "success" in response + assert not response.get("success") + assert "error" in response + assert response["error"].get("code") == "web_rtc_offer_failed" + assert "message" in response["error"] + assert "RTSPtoWebRTC server communication failure" in response["error"]["message"] - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - # Answer +async def test_no_stun_server( + hass: HomeAssistant, + rtsp_to_webrtc_client: Any, + setup_integration: ComponentSetup, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test successful setup and unload.""" + await setup_integration() + + client = await hass_ws_client(hass) + await client.send_json( + { + "id": 2, + "type": "rtsp_to_webrtc/get_settings", + } + ) response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "error", - "code": "webrtc_offer_failed", - "message": "RTSPtoWebRTC server communication failure: ", - } + assert response.get("id") == 2 + assert response.get("type") == TYPE_RESULT + assert "result" in response + assert response["result"].get("stun_server") == "" + + +@pytest.mark.parametrize( + "config_entry_options", [{CONF_STUN_SERVER: "example.com:1234"}] +) +async def test_stun_server( + hass: HomeAssistant, + rtsp_to_webrtc_client: Any, + setup_integration: ComponentSetup, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test successful setup and unload.""" + await setup_integration() + + client = await hass_ws_client(hass) + await client.send_json( + { + "id": 3, + "type": "rtsp_to_webrtc/get_settings", + } + ) + response = await client.receive_json() + assert response.get("id") == 3 + assert response.get("type") == TYPE_RESULT + assert "result" in response + assert response["result"].get("stun_server") == "example.com:1234" + + # Simulate an options flow change, clearing the stun server and verify the change is reflected + hass.config_entries.async_update_entry(config_entry, options={}) + await hass.async_block_till_done() + + await client.send_json( + { + "id": 4, + "type": "rtsp_to_webrtc/get_settings", + } + ) + response = await client.receive_json() + assert response.get("id") == 4 + assert response.get("type") == TYPE_RESULT + assert "result" in response + assert response["result"].get("stun_server") == "" diff --git a/tests/components/ruckus_unleashed/__init__.py b/tests/components/ruckus_unleashed/__init__.py index b6c9c86953a..ccbf404cce0 100644 --- a/tests/components/ruckus_unleashed/__init__.py +++ b/tests/components/ruckus_unleashed/__init__.py @@ -1,4 +1,4 @@ -"""Tests for the Ruckus integration.""" +"""Tests for the Ruckus Unleashed integration.""" from __future__ import annotations @@ -78,7 +78,7 @@ DEFAULT_UNIQUEID = DEFAULT_SYSTEM_INFO[API_SYS_SYSINFO][API_SYS_SYSINFO_SERIAL] def mock_config_entry() -> MockConfigEntry: - """Return a Ruckus mock config entry.""" + """Return a Ruckus Unleashed mock config entry.""" return MockConfigEntry( domain=DOMAIN, title=DEFAULT_TITLE, @@ -89,7 +89,7 @@ def mock_config_entry() -> MockConfigEntry: async def init_integration(hass: HomeAssistant) -> MockConfigEntry: - """Set up the Ruckus integration in Home Assistant.""" + """Set up the Ruckus Unleashed integration in Home Assistant.""" entry = mock_config_entry() entry.add_to_hass(hass) # Make device tied to other integration so device tracker entities get enabled diff --git a/tests/components/ruckus_unleashed/test_config_flow.py b/tests/components/ruckus_unleashed/test_config_flow.py index 61f689f3030..5bfe2d941d5 100644 --- a/tests/components/ruckus_unleashed/test_config_flow.py +++ b/tests/components/ruckus_unleashed/test_config_flow.py @@ -1,4 +1,4 @@ -"""Test the config flow.""" +"""Test the Ruckus Unleashed config flow.""" from copy import deepcopy from datetime import timedelta @@ -83,7 +83,15 @@ async def test_form_user_reauth(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -113,7 +121,15 @@ async def test_form_user_reauth_different_unique_id(hass: HomeAssistant) -> None entry = mock_config_entry() entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -145,7 +161,15 @@ async def test_form_user_reauth_invalid_auth(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -177,7 +201,15 @@ async def test_form_user_reauth_cannot_connect(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -209,7 +241,15 @@ async def test_form_user_reauth_general_exception(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/ruckus_unleashed/test_device_tracker.py b/tests/components/ruckus_unleashed/test_device_tracker.py index 460c64c9651..79d7c2dfda4 100644 --- a/tests/components/ruckus_unleashed/test_device_tracker.py +++ b/tests/components/ruckus_unleashed/test_device_tracker.py @@ -1,4 +1,4 @@ -"""The sensor tests for the Ruckus platform.""" +"""The sensor tests for the Ruckus Unleashed platform.""" from datetime import timedelta from unittest.mock import AsyncMock diff --git a/tests/components/ruckus_unleashed/test_init.py b/tests/components/ruckus_unleashed/test_init.py index a7514677f20..8147f040bde 100644 --- a/tests/components/ruckus_unleashed/test_init.py +++ b/tests/components/ruckus_unleashed/test_init.py @@ -1,4 +1,4 @@ -"""Test the Ruckus config flow.""" +"""Test the Ruckus Unleashed config flow.""" from unittest.mock import AsyncMock diff --git a/tests/components/russound_rio/__init__.py b/tests/components/russound_rio/__init__.py deleted file mode 100644 index d0e6d77f1ee..00000000000 --- a/tests/components/russound_rio/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the Russound RIO integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py deleted file mode 100644 index 09cccd7d83f..00000000000 --- a/tests/components/russound_rio/conftest.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Test fixtures for Russound RIO integration.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, Mock, patch - -from aiorussound import Controller, RussoundTcpConnectionHandler, Source -from aiorussound.rio import ZoneControlSurface -from aiorussound.util import controller_device_str, zone_device_str -import pytest - -from homeassistant.components.russound_rio.const import DOMAIN -from homeassistant.core import HomeAssistant - -from .const import HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT - -from tests.common import MockConfigEntry, load_json_object_fixture - - -@pytest.fixture -def mock_setup_entry(): - """Prevent setup.""" - with patch( - "homeassistant.components.russound_rio.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: - """Mock a Russound RIO config entry.""" - return MockConfigEntry( - domain=DOMAIN, data=MOCK_CONFIG, unique_id=HARDWARE_MAC, title=MODEL - ) - - -@pytest.fixture -def mock_russound_client() -> Generator[AsyncMock]: - """Mock the Russound RIO client.""" - with ( - patch( - "homeassistant.components.russound_rio.RussoundClient", autospec=True - ) as mock_client, - patch( - "homeassistant.components.russound_rio.config_flow.RussoundClient", - new=mock_client, - ), - ): - client = mock_client.return_value - zones = { - int(k): ZoneControlSurface.from_dict(v) - for k, v in load_json_object_fixture("get_zones.json", DOMAIN).items() - } - client.sources = { - int(k): Source.from_dict(v) - for k, v in load_json_object_fixture("get_sources.json", DOMAIN).items() - } - for k, v in zones.items(): - v.device_str = zone_device_str(1, k) - v.fetch_current_source = Mock( - side_effect=lambda current_source=v.current_source: client.sources.get( - int(current_source) - ) - ) - - client.controllers = { - 1: Controller( - 1, "MCA-C5", client, controller_device_str(1), HARDWARE_MAC, None, zones - ) - } - client.connection_handler = RussoundTcpConnectionHandler(HOST, PORT) - client.is_connected = Mock(return_value=True) - client.unregister_state_update_callbacks.return_value = True - yield client diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py deleted file mode 100644 index 3d2924693d2..00000000000 --- a/tests/components/russound_rio/const.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Constants for russound_rio tests.""" - -from collections import namedtuple - -from homeassistant.components.media_player import DOMAIN as MP_DOMAIN - -HOST = "127.0.0.1" -PORT = 9621 -MODEL = "MCA-C5" -HARDWARE_MAC = "00:11:22:33:44:55" - -MOCK_CONFIG = { - "host": HOST, - "port": PORT, -} - -_CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 -MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} - -DEVICE_NAME = "mca_c5" -NAME_ZONE_1 = "backyard" -ENTITY_ID_ZONE_1 = f"{MP_DOMAIN}.{DEVICE_NAME}_{NAME_ZONE_1}" diff --git a/tests/components/russound_rio/fixtures/get_sources.json b/tests/components/russound_rio/fixtures/get_sources.json deleted file mode 100644 index e39d702b8a1..00000000000 --- a/tests/components/russound_rio/fixtures/get_sources.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "1": { - "name": "Aux", - "type": "Miscellaneous Audio" - }, - "2": { - "name": "Spotify", - "type": "Russound Media Streamer" - } -} diff --git a/tests/components/russound_rio/fixtures/get_zones.json b/tests/components/russound_rio/fixtures/get_zones.json deleted file mode 100644 index 396310339b3..00000000000 --- a/tests/components/russound_rio/fixtures/get_zones.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "1": { - "name": "Backyard", - "volume": "10", - "status": "ON", - "enabled": "True", - "current_source": "1" - }, - "2": { - "name": "Kitchen", - "volume": "50", - "status": "OFF", - "enabled": "True", - "current_source": "2" - }, - "3": { - "name": "Bedroom", - "volume": "10", - "status": "OFF", - "enabled": "False" - } -} diff --git a/tests/components/russound_rio/snapshots/test_init.ambr b/tests/components/russound_rio/snapshots/test_init.ambr deleted file mode 100644 index fcd59dd06f7..00000000000 --- a/tests/components/russound_rio/snapshots/test_init.ambr +++ /dev/null @@ -1,37 +0,0 @@ -# serializer version: 1 -# name: test_device_info - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://127.0.0.1', - 'connections': set({ - tuple( - 'mac', - '00:11:22:33:44:55', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'russound_rio', - '00:11:22:33:44:55', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Russound', - 'model': 'MCA-C5', - 'model_id': None, - 'name': 'MCA-C5', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py deleted file mode 100644 index cf754852731..00000000000 --- a/tests/components/russound_rio/test_config_flow.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Test the Russound RIO config flow.""" - -from unittest.mock import AsyncMock - -from homeassistant.components.russound_rio.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .const import MOCK_CONFIG, MODEL - - -async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock -) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_CONFIG, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == MODEL - assert result["data"] == MOCK_CONFIG - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_cannot_connect( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock -) -> None: - """Test we handle cannot connect error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - mock_russound_client.connect.side_effect = TimeoutError - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_CONFIG, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - # Recover with correct information - mock_russound_client.connect.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_CONFIG, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == MODEL - assert result["data"] == MOCK_CONFIG - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_import( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock -) -> None: - """Test we import a config entry.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=MOCK_CONFIG, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == MODEL - assert result["data"] == MOCK_CONFIG - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_import_cannot_connect( - hass: HomeAssistant, mock_russound_client: AsyncMock -) -> None: - """Test we handle import cannot connect error.""" - mock_russound_client.connect.side_effect = TimeoutError - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" diff --git a/tests/components/russound_rio/test_init.py b/tests/components/russound_rio/test_init.py deleted file mode 100644 index 6787ee37c79..00000000000 --- a/tests/components/russound_rio/test_init.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Tests for the Russound RIO integration.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.components.russound_rio.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_config_entry_not_ready( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_russound_client: AsyncMock, -) -> None: - """Test the Cambridge Audio configuration entry not ready.""" - mock_russound_client.connect.side_effect = TimeoutError - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - mock_russound_client.connect = AsyncMock(return_value=True) - - -async def test_device_info( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_russound_client: AsyncMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test device registry integration.""" - await setup_integration(hass, mock_config_entry) - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - assert device_entry is not None - assert device_entry == snapshot diff --git a/tests/components/russound_rio/test_media_player.py b/tests/components/russound_rio/test_media_player.py deleted file mode 100644 index e720e2c7f65..00000000000 --- a/tests/components/russound_rio/test_media_player.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Tests for the Russound RIO media player.""" - -from unittest.mock import AsyncMock - -from aiorussound.models import CallbackType, PlayStatus -import pytest - -from homeassistant.const import ( - STATE_BUFFERING, - STATE_IDLE, - STATE_OFF, - STATE_ON, - STATE_PAUSED, - STATE_PLAYING, -) -from homeassistant.core import HomeAssistant - -from . import setup_integration -from .const import ENTITY_ID_ZONE_1 - -from tests.common import MockConfigEntry - - -async def mock_state_update(client: AsyncMock) -> None: - """Trigger a callback in the media player.""" - for callback in client.register_state_update_callbacks.call_args_list: - await callback[0][0](client, CallbackType.STATE) - - -@pytest.mark.parametrize( - ("zone_status", "source_play_status", "media_player_state"), - [ - (True, None, STATE_ON), - (True, PlayStatus.PLAYING, STATE_PLAYING), - (True, PlayStatus.PAUSED, STATE_PAUSED), - (True, PlayStatus.TRANSITIONING, STATE_BUFFERING), - (True, PlayStatus.STOPPED, STATE_IDLE), - (False, None, STATE_OFF), - (False, PlayStatus.STOPPED, STATE_OFF), - ], -) -async def test_entity_state( - hass: HomeAssistant, - mock_russound_client: AsyncMock, - mock_config_entry: MockConfigEntry, - zone_status: bool, - source_play_status: PlayStatus | None, - media_player_state: str, -) -> None: - """Test media player state.""" - await setup_integration(hass, mock_config_entry) - mock_russound_client.controllers[1].zones[1].status = zone_status - mock_russound_client.sources[1].play_status = source_play_status - await mock_state_update(mock_russound_client) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID_ZONE_1) - assert state.state == media_player_state diff --git a/tests/components/rympro/test_config_flow.py b/tests/components/rympro/test_config_flow.py index 7770889bdeb..e92b7c23357 100644 --- a/tests/components/rympro/test_config_flow.py +++ b/tests/components/rympro/test_config_flow.py @@ -160,10 +160,17 @@ async def test_form_already_exists(hass: HomeAssistant, config_entry) -> None: assert result2["reason"] == "already_configured" -async def test_form_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: +async def test_form_reauth(hass: HomeAssistant, config_entry) -> None: """Test reauthentication.""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -196,12 +203,17 @@ async def test_form_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) - assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_reauth_with_new_account( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> None: +async def test_form_reauth_with_new_account(hass: HomeAssistant, config_entry) -> None: """Test reauthentication with new account.""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/sabnzbd/conftest.py b/tests/components/sabnzbd/conftest.py index b5450e5134f..7d68d3108f0 100644 --- a/tests/components/sabnzbd/conftest.py +++ b/tests/components/sabnzbd/conftest.py @@ -1,9 +1,9 @@ """Configuration for Sabnzbd tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/samsungtv/conftest.py b/tests/components/samsungtv/conftest.py index ec12031ef96..8d38adad06d 100644 --- a/tests/components/samsungtv/conftest.py +++ b/tests/components/samsungtv/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable from datetime import datetime from socket import AddressFamily # pylint: disable=no-name-in-module from typing import Any @@ -19,12 +19,16 @@ from samsungtvws.encrypted.remote import SamsungTVEncryptedWSAsyncRemote from samsungtvws.event import ED_INSTALLED_APP_EVENT from samsungtvws.exceptions import ResponseError from samsungtvws.remote import ChannelEmitCommand +from typing_extensions import Generator from homeassistant.components.samsungtv.const import WEBSOCKET_SSL_PORT +from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.util.dt as dt_util from .const import SAMPLE_DEVICE_INFO_UE48JU6400, SAMPLE_DEVICE_INFO_WIFI +from tests.common import async_mock_service + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -36,7 +40,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(autouse=True) -def silent_ssdp_scanner() -> Generator[None]: +async def silent_ssdp_scanner(hass): """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), @@ -179,7 +183,7 @@ def rest_api_fixture_non_ssl_only() -> Mock: class MockSamsungTVAsyncRest: """Mock for a MockSamsungTVAsyncRest.""" - def __init__(self, host, session, port, timeout) -> None: + def __init__(self, host, session, port, timeout): """Mock a MockSamsungTVAsyncRest.""" self.port = port self.host = host @@ -296,3 +300,9 @@ def mac_address_fixture() -> Mock: """Patch getmac.get_mac_address.""" with patch("getmac.get_mac_address", return_value=None) as mac: yield mac + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") diff --git a/tests/components/samsungtv/snapshots/test_init.ambr b/tests/components/samsungtv/snapshots/test_init.ambr index 017a2bc3e60..42a3f4fb396 100644 --- a/tests/components/samsungtv/snapshots/test_init.ambr +++ b/tests/components/samsungtv/snapshots/test_init.ambr @@ -30,10 +30,8 @@ }), 'manufacturer': None, 'model': '82GXARRS', - 'model_id': None, 'name': 'fake', 'name_by_user': None, - 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -52,10 +50,6 @@ 'mac', 'aa:bb:cc:dd:ee:ff', ), - tuple( - 'mac', - 'none', - ), }), 'disabled_by': None, 'entry_type': None, @@ -72,10 +66,8 @@ }), 'manufacturer': None, 'model': '82GXARRS', - 'model_id': '82GXARRS', 'name': 'fake', 'name_by_user': None, - 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/samsungtv/test_config_flow.py b/tests/components/samsungtv/test_config_flow.py index 7e707376b6f..6c325ae3b04 100644 --- a/tests/components/samsungtv/test_config_flow.py +++ b/tests/components/samsungtv/test_config_flow.py @@ -22,7 +22,6 @@ from websockets.exceptions import ( from homeassistant import config_entries from homeassistant.components import dhcp, ssdp, zeroconf -from homeassistant.components.samsungtv.config_flow import SamsungTVConfigFlow from homeassistant.components.samsungtv.const import ( CONF_MANUFACTURER, CONF_SESSION_ID, @@ -57,7 +56,7 @@ from homeassistant.const import ( CONF_TOKEN, ) from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import BaseServiceInfo, FlowResultType +from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component from .const import ( @@ -983,78 +982,6 @@ async def test_dhcp_wired(hass: HomeAssistant, rest_api: Mock) -> None: assert result["result"].unique_id == "be9554b9-c9fb-41f4-8920-22da015376a4" -@pytest.mark.usefixtures("remotews", "rest_api_non_ssl_only", "remoteencws_failing") -@pytest.mark.parametrize( - ("source1", "data1", "source2", "data2", "is_matching_result"), - [ - ( - config_entries.SOURCE_DHCP, - MOCK_DHCP_DATA, - config_entries.SOURCE_DHCP, - MOCK_DHCP_DATA, - True, - ), - ( - config_entries.SOURCE_DHCP, - MOCK_DHCP_DATA, - config_entries.SOURCE_ZEROCONF, - MOCK_ZEROCONF_DATA, - False, - ), - ( - config_entries.SOURCE_ZEROCONF, - MOCK_ZEROCONF_DATA, - config_entries.SOURCE_DHCP, - MOCK_DHCP_DATA, - False, - ), - ( - config_entries.SOURCE_ZEROCONF, - MOCK_ZEROCONF_DATA, - config_entries.SOURCE_ZEROCONF, - MOCK_ZEROCONF_DATA, - True, - ), - ], -) -async def test_dhcp_zeroconf_already_in_progress( - hass: HomeAssistant, - source1: str, - data1: BaseServiceInfo, - source2: str, - data2: BaseServiceInfo, - is_matching_result: bool, -) -> None: - """Test starting a flow from dhcp or zeroconf when already in progress.""" - # confirm to add the entry - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": source1}, data=data1 - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - - real_is_matching = SamsungTVConfigFlow.is_matching - return_values = [] - - def is_matching(self, other_flow) -> bool: - return_values.append(real_is_matching(self, other_flow)) - return return_values[-1] - - with patch.object( - SamsungTVConfigFlow, "is_matching", wraps=is_matching, autospec=True - ): - # confirm to add the entry - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": source2}, data=data2 - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == RESULT_ALREADY_IN_PROGRESS - # Ensure the is_matching method returned the expected value - assert return_values == [is_matching_result] - - @pytest.mark.usefixtures("remotews", "rest_api", "remoteencws_failing") async def test_zeroconf(hass: HomeAssistant) -> None: """Test starting a flow from zeroconf.""" @@ -1822,7 +1749,11 @@ async def test_form_reauth_legacy(hass: HomeAssistant) -> None: """Test reauthenticate legacy.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_OLD_ENTRY) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1842,7 +1773,11 @@ async def test_form_reauth_websocket(hass: HomeAssistant) -> None: entry.add_to_hass(hass) assert entry.state is ConfigEntryState.NOT_LOADED - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1863,7 +1798,11 @@ async def test_form_reauth_websocket_cannot_connect( """Test reauthenticate websocket when we cannot connect on the first attempt.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_ENTRYDATA_WS) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1891,7 +1830,11 @@ async def test_form_reauth_websocket_not_supported(hass: HomeAssistant) -> None: """Test reauthenticate websocket when the device is not supported.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_ENTRYDATA_WS) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1920,7 +1863,11 @@ async def test_form_reauth_encrypted(hass: HomeAssistant) -> None: entry.add_to_hass(hass) assert entry.state is ConfigEntryState.NOT_LOADED - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/samsungtv/test_device_trigger.py b/tests/components/samsungtv/test_device_trigger.py index fa6efd08076..e16ea718cbb 100644 --- a/tests/components/samsungtv/test_device_trigger.py +++ b/tests/components/samsungtv/test_device_trigger.py @@ -7,8 +7,7 @@ from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, ) -from homeassistant.components.samsungtv import device_trigger -from homeassistant.components.samsungtv.const import DOMAIN +from homeassistant.components.samsungtv import DOMAIN, device_trigger from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError @@ -46,9 +45,7 @@ async def test_get_triggers( @pytest.mark.usefixtures("remoteencws", "rest_api") async def test_if_fires_on_turn_on_request( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] ) -> None: """Test for turn_on and turn_off triggers firing.""" await setup_samsungtv_entry(hass, MOCK_ENTRYDATA_ENCRYPTED_WS) @@ -98,11 +95,11 @@ async def test_if_fires_on_turn_on_request( ) await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[1].data["some"] == device.id - assert service_calls[1].data["id"] == 0 - assert service_calls[2].data["some"] == entity_id - assert service_calls[2].data["id"] == 0 + assert len(calls) == 2 + assert calls[0].data["some"] == device.id + assert calls[0].data["id"] == 0 + assert calls[1].data["some"] == entity_id + assert calls[1].data["id"] == 0 @pytest.mark.usefixtures("remoteencws", "rest_api") diff --git a/tests/components/samsungtv/test_diagnostics.py b/tests/components/samsungtv/test_diagnostics.py index 0319d5dd8dd..7b20002ae5b 100644 --- a/tests/components/samsungtv/test_diagnostics.py +++ b/tests/components/samsungtv/test_diagnostics.py @@ -16,7 +16,6 @@ from .const import ( SAMPLE_DEVICE_INFO_WIFI, ) -from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -30,7 +29,6 @@ async def test_entry_diagnostics( assert await get_diagnostics_for_config_entry(hass, hass_client, config_entry) == { "entry": { - "created_at": ANY, "data": { "host": "fake_host", "ip_address": "test", @@ -42,11 +40,9 @@ async def test_entry_diagnostics( "token": REDACTED, }, "disabled_by": None, - "discovery_keys": {}, "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, - "modified_at": ANY, "options": {}, "pref_disable_new_entities": False, "pref_disable_polling": False, @@ -69,7 +65,6 @@ async def test_entry_diagnostics_encrypted( assert await get_diagnostics_for_config_entry(hass, hass_client, config_entry) == { "entry": { - "created_at": ANY, "data": { "host": "fake_host", "ip_address": "test", @@ -82,11 +77,9 @@ async def test_entry_diagnostics_encrypted( "session_id": REDACTED, }, "disabled_by": None, - "discovery_keys": {}, "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, - "modified_at": ANY, "options": {}, "pref_disable_new_entities": False, "pref_disable_polling": False, @@ -109,7 +102,6 @@ async def test_entry_diagnostics_encrypte_offline( assert await get_diagnostics_for_config_entry(hass, hass_client, config_entry) == { "entry": { - "created_at": ANY, "data": { "host": "fake_host", "ip_address": "test", @@ -121,11 +113,9 @@ async def test_entry_diagnostics_encrypte_offline( "session_id": REDACTED, }, "disabled_by": None, - "discovery_keys": {}, "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, - "modified_at": ANY, "options": {}, "pref_disable_new_entities": False, "pref_disable_polling": False, diff --git a/tests/components/samsungtv/test_trigger.py b/tests/components/samsungtv/test_trigger.py index e1d26043bb0..6607c60b8e8 100644 --- a/tests/components/samsungtv/test_trigger.py +++ b/tests/components/samsungtv/test_trigger.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from homeassistant.components import automation -from homeassistant.components.samsungtv.const import DOMAIN +from homeassistant.components.samsungtv import DOMAIN from homeassistant.const import SERVICE_RELOAD, SERVICE_TURN_ON from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr @@ -21,7 +21,7 @@ from tests.common import MockEntity, MockEntityPlatform @pytest.mark.parametrize("entity_domain", ["media_player", "remote"]) async def test_turn_on_trigger_device_id( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_domain: str, ) -> None: @@ -60,14 +60,14 @@ async def test_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == device.id - assert service_calls[1].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["some"] == device.id + assert calls[0].data["id"] == 0 with patch("homeassistant.config.load_yaml_dict", return_value={}): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) - service_calls.clear() + calls.clear() # Ensure WOL backup is called when trigger not present with patch( @@ -78,14 +78,14 @@ async def test_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 0 mock_send_magic_packet.assert_called() @pytest.mark.usefixtures("remoteencws", "rest_api") @pytest.mark.parametrize("entity_domain", ["media_player", "remote"]) async def test_turn_on_trigger_entity_id( - hass: HomeAssistant, service_calls: list[ServiceCall], entity_domain: str + hass: HomeAssistant, calls: list[ServiceCall], entity_domain: str ) -> None: """Test for turn_on triggers by entity_id firing.""" await setup_samsungtv_entry(hass, MOCK_ENTRYDATA_ENCRYPTED_WS) @@ -119,9 +119,9 @@ async def test_turn_on_trigger_entity_id( ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == entity_id - assert service_calls[1].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["some"] == entity_id + assert calls[0].data["id"] == 0 @pytest.mark.usefixtures("remoteencws", "rest_api") diff --git a/tests/components/sanix/conftest.py b/tests/components/sanix/conftest.py index 405cad8b60b..86eaa870770 100644 --- a/tests/components/sanix/conftest.py +++ b/tests/components/sanix/conftest.py @@ -1,6 +1,5 @@ """Sanix tests configuration.""" -from collections.abc import Generator from datetime import datetime from unittest.mock import AsyncMock, patch from zoneinfo import ZoneInfo @@ -17,6 +16,7 @@ from sanix import ( ATTR_API_TIME, ) from sanix.models import Measurement +from typing_extensions import Generator from homeassistant.components.sanix.const import CONF_SERIAL_NUMBER, DOMAIN from homeassistant.const import CONF_TOKEN diff --git a/tests/components/scene/common.py b/tests/components/scene/common.py index 39f86818744..e20da63c402 100644 --- a/tests/components/scene/common.py +++ b/tests/components/scene/common.py @@ -6,12 +6,11 @@ components. Instead call the service directly. from homeassistant.components.scene import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_ON -from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def activate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def activate(hass, entity_id=ENTITY_MATCH_ALL): """Activate a scene.""" data = {} diff --git a/tests/components/scene/test_init.py b/tests/components/scene/test_init.py index 3747610298d..5afdebda9da 100644 --- a/tests/components/scene/test_init.py +++ b/tests/components/scene/test_init.py @@ -222,7 +222,7 @@ async def test_restore_state_does_not_restore_unavailable( assert hass.states.get("scene.test").state == STATE_UNKNOWN -async def activate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def activate(hass, entity_id=ENTITY_MATCH_ALL): """Activate a scene.""" data = {} @@ -241,9 +241,7 @@ async def test_services_registered(hass: HomeAssistant) -> None: assert hass.services.has_service("scene", "apply") -async def setup_lights( - hass: HomeAssistant, entities: list[MockLight] -) -> tuple[MockLight, MockLight]: +async def setup_lights(hass, entities): """Set up the light component.""" assert await async_setup_component( hass, light.DOMAIN, {light.DOMAIN: {"platform": "test"}} @@ -263,7 +261,7 @@ async def setup_lights( return light_1, light_2 -async def turn_off_lights(hass: HomeAssistant, entity_ids: list[str]) -> None: +async def turn_off_lights(hass, entity_ids): """Turn lights off.""" await hass.services.async_call( "light", diff --git a/tests/components/schedule/test_init.py b/tests/components/schedule/test_init.py index 18346122bfd..c43b2500ccb 100644 --- a/tests/components/schedule/test_init.py +++ b/tests/components/schedule/test_init.py @@ -12,7 +12,6 @@ import pytest from homeassistant.components.schedule import STORAGE_VERSION, STORAGE_VERSION_MINOR from homeassistant.components.schedule.const import ( ATTR_NEXT_EVENT, - CONF_DATA, CONF_FRIDAY, CONF_FROM, CONF_MONDAY, @@ -32,12 +31,11 @@ from homeassistant.const import ( CONF_ICON, CONF_ID, CONF_NAME, - EVENT_STATE_CHANGED, SERVICE_RELOAD, STATE_OFF, STATE_ON, ) -from homeassistant.core import Context, HomeAssistant +from homeassistant.core import EVENT_STATE_CHANGED, Context, HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -67,21 +65,13 @@ def schedule_setup( CONF_NAME: "from storage", CONF_ICON: "mdi:party-popper", CONF_FRIDAY: [ - { - CONF_FROM: "17:00:00", - CONF_TO: "23:59:59", - CONF_DATA: {"party_level": "epic"}, - }, + {CONF_FROM: "17:00:00", CONF_TO: "23:59:59"}, ], CONF_SATURDAY: [ {CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}, ], CONF_SUNDAY: [ - { - CONF_FROM: "00:00:00", - CONF_TO: "24:00:00", - CONF_DATA: {"entry": "VIPs only"}, - }, + {CONF_FROM: "00:00:00", CONF_TO: "24:00:00"}, ], } ] @@ -104,21 +94,9 @@ def schedule_setup( CONF_TUESDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], CONF_WEDNESDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], CONF_THURSDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], - CONF_FRIDAY: [ - { - CONF_FROM: "00:00:00", - CONF_TO: "23:59:59", - CONF_DATA: {"party_level": "epic"}, - } - ], + CONF_FRIDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], CONF_SATURDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], - CONF_SUNDAY: [ - { - CONF_FROM: "00:00:00", - CONF_TO: "23:59:59", - CONF_DATA: {"entry": "VIPs only"}, - } - ], + CONF_SUNDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}], } } } @@ -578,13 +556,13 @@ async def test_ws_list( assert len(result) == 1 assert result["from_storage"][ATTR_NAME] == "from storage" assert result["from_storage"][CONF_FRIDAY] == [ - {CONF_FROM: "17:00:00", CONF_TO: "23:59:59", CONF_DATA: {"party_level": "epic"}} + {CONF_FROM: "17:00:00", CONF_TO: "23:59:59"} ] assert result["from_storage"][CONF_SATURDAY] == [ {CONF_FROM: "00:00:00", CONF_TO: "23:59:59"} ] assert result["from_storage"][CONF_SUNDAY] == [ - {CONF_FROM: "00:00:00", CONF_TO: "24:00:00", CONF_DATA: {"entry": "VIPs only"}} + {CONF_FROM: "00:00:00", CONF_TO: "24:00:00"} ] assert "from_yaml" not in result diff --git a/tests/components/schedule/test_recorder.py b/tests/components/schedule/test_recorder.py index 85aef3e1990..a7410472a44 100644 --- a/tests/components/schedule/test_recorder.py +++ b/tests/components/schedule/test_recorder.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import timedelta -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.recorder.history import get_significant_states @@ -19,11 +18,8 @@ from tests.components.recorder.common import async_wait_recording_done @pytest.mark.usefixtures("recorder_mock", "enable_custom_integrations") -async def test_exclude_attributes( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: +async def test_exclude_attributes(hass: HomeAssistant) -> None: """Test attributes to be excluded.""" - freezer.move_to("2024-08-02 06:30:00-07:00") # Before Friday event now = dt_util.utcnow() assert await async_setup_component( hass, @@ -37,13 +33,9 @@ async def test_exclude_attributes( "tuesday": [{"from": "2:00", "to": "3:00"}], "wednesday": [{"from": "3:00", "to": "4:00"}], "thursday": [{"from": "5:00", "to": "6:00"}], - "friday": [ - {"from": "7:00", "to": "8:00", "data": {"party_level": "epic"}} - ], + "friday": [{"from": "7:00", "to": "8:00"}], "saturday": [{"from": "9:00", "to": "10:00"}], - "sunday": [ - {"from": "11:00", "to": "12:00", "data": {"entry": "VIPs only"}} - ], + "sunday": [{"from": "11:00", "to": "12:00"}], } } }, @@ -56,25 +48,8 @@ async def test_exclude_attributes( assert state.attributes[ATTR_ICON] assert state.attributes[ATTR_NEXT_EVENT] - # Move to during Friday event - freezer.move_to("2024-08-02 07:30:00-07:00") - async_fire_time_changed(hass, fire_all=True) await hass.async_block_till_done() - state = hass.states.get("schedule.test") - assert "entry" not in state.attributes - assert state.attributes["party_level"] == "epic" - - # Move to during Sunday event - freezer.move_to("2024-08-04 11:30:00-07:00") - async_fire_time_changed(hass, fire_all=True) - await hass.async_block_till_done() - state = hass.states.get("schedule.test") - assert "party_level" not in state.attributes - assert state.attributes["entry"] == "VIPs only" - - await hass.async_block_till_done() - freezer.tick(timedelta(minutes=5)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) await hass.async_block_till_done() await async_wait_recording_done(hass) @@ -88,5 +63,3 @@ async def test_exclude_attributes( assert ATTR_FRIENDLY_NAME in state.attributes assert ATTR_ICON in state.attributes assert ATTR_NEXT_EVENT not in state.attributes - assert "entry" not in state.attributes - assert "party_level" not in state.attributes diff --git a/tests/components/schlage/conftest.py b/tests/components/schlage/conftest.py index f774b8cfb89..dcb6bc52a7b 100644 --- a/tests/components/schlage/conftest.py +++ b/tests/components/schlage/conftest.py @@ -1,11 +1,10 @@ """Common fixtures for the Schlage tests.""" -from collections.abc import Generator -from typing import Any from unittest.mock import AsyncMock, Mock, create_autospec, patch from pyschlage.lock import Lock import pytest +from typing_extensions import Generator from homeassistant.components.schlage.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME @@ -71,28 +70,21 @@ def mock_pyschlage_auth() -> Mock: @pytest.fixture -def mock_lock(mock_lock_attrs: dict[str, Any]) -> Mock: +def mock_lock() -> Mock: """Mock Lock fixture.""" mock_lock = create_autospec(Lock) - mock_lock.configure_mock(**mock_lock_attrs) + mock_lock.configure_mock( + device_id="test", + name="Vault Door", + model_name="", + is_locked=False, + is_jammed=False, + battery_level=20, + firmware_version="1.0", + lock_and_leave_enabled=True, + beeper_enabled=True, + ) mock_lock.logs.return_value = [] mock_lock.last_changed_by.return_value = "thumbturn" mock_lock.keypad_disabled.return_value = False return mock_lock - - -@pytest.fixture -def mock_lock_attrs() -> dict[str, Any]: - """Attributes for a mock lock.""" - return { - "device_id": "test", - "name": "Vault Door", - "model_name": "", - "is_locked": False, - "is_jammed": False, - "battery_level": 20, - "auto_lock_time": 15, - "firmware_version": "1.0", - "lock_and_leave_enabled": True, - "beeper_enabled": True, - } diff --git a/tests/components/schlage/snapshots/test_init.ambr b/tests/components/schlage/snapshots/test_init.ambr deleted file mode 100644 index c7049443ab7..00000000000 --- a/tests/components/schlage/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_lock_device_registry - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'schlage', - 'test', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Schlage', - 'model': '', - 'model_id': None, - 'name': 'Vault Door', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': '1.0', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/schlage/test_binary_sensor.py b/tests/components/schlage/test_binary_sensor.py index 91bd996ba5b..97f11577b86 100644 --- a/tests/components/schlage/test_binary_sensor.py +++ b/tests/components/schlage/test_binary_sensor.py @@ -3,47 +3,37 @@ from datetime import timedelta from unittest.mock import Mock -from freezegun.api import FrozenDateTimeFactory from pyschlage.exceptions import UnknownError from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant +from homeassistant.util.dt import utcnow from tests.common import async_fire_time_changed async def test_keypad_disabled_binary_sensor( - hass: HomeAssistant, - mock_schlage: Mock, - mock_lock: Mock, - mock_added_config_entry: ConfigEntry, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry ) -> None: """Test the keypad_disabled binary_sensor.""" mock_lock.keypad_disabled.reset_mock() mock_lock.keypad_disabled.return_value = True # Make the coordinator refresh data. - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) await hass.async_block_till_done(wait_background_tasks=True) keypad = hass.states.get("binary_sensor.vault_door_keypad_disabled") assert keypad is not None - assert keypad.state == STATE_ON + assert keypad.state == "on" assert keypad.attributes["device_class"] == BinarySensorDeviceClass.PROBLEM mock_lock.keypad_disabled.assert_called_once_with([]) async def test_keypad_disabled_binary_sensor_use_previous_logs_on_failure( - hass: HomeAssistant, - mock_schlage: Mock, - mock_lock: Mock, - mock_added_config_entry: ConfigEntry, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry ) -> None: """Test the keypad_disabled binary_sensor.""" mock_lock.keypad_disabled.reset_mock() @@ -52,13 +42,12 @@ async def test_keypad_disabled_binary_sensor_use_previous_logs_on_failure( mock_lock.logs.side_effect = UnknownError("Cannot load logs") # Make the coordinator refresh data. - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) await hass.async_block_till_done(wait_background_tasks=True) keypad = hass.states.get("binary_sensor.vault_door_keypad_disabled") assert keypad is not None - assert keypad.state == STATE_ON + assert keypad.state == "on" assert keypad.attributes["device_class"] == BinarySensorDeviceClass.PROBLEM mock_lock.keypad_disabled.assert_called_once_with([]) diff --git a/tests/components/schlage/test_config_flow.py b/tests/components/schlage/test_config_flow.py index 7f4a40f9b53..15ef3858c0c 100644 --- a/tests/components/schlage/test_config_flow.py +++ b/tests/components/schlage/test_config_flow.py @@ -15,18 +15,8 @@ from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -@pytest.mark.parametrize( - "username", - [ - "test-username", - "TEST-USERNAME", - ], -) async def test_form( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_pyschlage_auth: Mock, - username: str, + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_pyschlage_auth: Mock ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -38,7 +28,7 @@ async def test_form( result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": username, + "username": "test-username", "password": "test-password", }, ) diff --git a/tests/components/schlage/test_init.py b/tests/components/schlage/test_init.py index e40fc83a7ac..0fe7af1982b 100644 --- a/tests/components/schlage/test_init.py +++ b/tests/components/schlage/test_init.py @@ -1,21 +1,14 @@ """Tests for the Schlage integration.""" -from typing import Any -from unittest.mock import Mock, create_autospec, patch +from unittest.mock import Mock, patch -from freezegun.api import FrozenDateTimeFactory from pycognito.exceptions import WarrantException from pyschlage.exceptions import Error, NotAuthorizedError -from pyschlage.lock import Lock -from syrupy.assertion import SnapshotAssertion -from homeassistant.components.schlage.const import DOMAIN, UPDATE_INTERVAL -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -import homeassistant.helpers.device_registry as dr -from homeassistant.helpers.device_registry import DeviceRegistry -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry @patch( @@ -101,74 +94,3 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_lock_device_registry( - hass: HomeAssistant, - device_registry: DeviceRegistry, - mock_added_config_entry: ConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test lock is added to device registry.""" - device = device_registry.async_get_device(identifiers={(DOMAIN, "test")}) - assert device == snapshot - - -async def test_auto_add_device( - hass: HomeAssistant, - device_registry: DeviceRegistry, - mock_added_config_entry: ConfigEntry, - mock_schlage: Mock, - mock_lock: Mock, - mock_lock_attrs: dict[str, Any], - freezer: FrozenDateTimeFactory, -) -> None: - """Test new devices are auto-added to the device registry.""" - device = device_registry.async_get_device(identifiers={(DOMAIN, "test")}) - assert device is not None - all_devices = dr.async_entries_for_config_entry( - device_registry, mock_added_config_entry.entry_id - ) - assert len(all_devices) == 1 - - mock_lock_attrs["device_id"] = "test2" - new_mock_lock = create_autospec(Lock) - new_mock_lock.configure_mock(**mock_lock_attrs) - mock_schlage.locks.return_value = [mock_lock, new_mock_lock] - - # Make the coordinator refresh data. - freezer.tick(UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - new_device = device_registry.async_get_device(identifiers={(DOMAIN, "test2")}) - assert new_device is not None - - all_devices = dr.async_entries_for_config_entry( - device_registry, mock_added_config_entry.entry_id - ) - assert len(all_devices) == 2 - - -async def test_auto_remove_device( - hass: HomeAssistant, - device_registry: DeviceRegistry, - mock_added_config_entry: ConfigEntry, - mock_schlage: Mock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test new devices are auto-added to the device registry.""" - assert device_registry.async_get_device(identifiers={(DOMAIN, "test")}) is not None - - mock_schlage.locks.return_value = [] - - # Make the coordinator refresh data. - freezer.tick(UPDATE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert device_registry.async_get_device(identifiers={(DOMAIN, "test")}) is None - all_devices = dr.async_entries_for_config_entry( - device_registry, mock_added_config_entry.entry_id - ) - assert len(all_devices) == 0 diff --git a/tests/components/schlage/test_lock.py b/tests/components/schlage/test_lock.py index 518c723d581..6c06f124693 100644 --- a/tests/components/schlage/test_lock.py +++ b/tests/components/schlage/test_lock.py @@ -3,38 +3,27 @@ from datetime import timedelta from unittest.mock import Mock -from freezegun.api import FrozenDateTimeFactory - -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_UNLOCK from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.util.dt import utcnow from tests.common import async_fire_time_changed -async def test_lock_attributes( +async def test_lock_device_registry( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, mock_added_config_entry: ConfigEntry, - mock_schlage: Mock, - mock_lock: Mock, - freezer: FrozenDateTimeFactory, ) -> None: - """Test lock attributes.""" - lock = hass.states.get("lock.vault_door") - assert lock is not None - assert lock.state == LockState.UNLOCKED - assert lock.attributes["changed_by"] == "thumbturn" - - mock_lock.is_locked = False - mock_lock.is_jammed = True - # Make the coordinator refresh data. - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - lock = hass.states.get("lock.vault_door") - assert lock is not None - assert lock.state == LockState.JAMMED + """Test lock is added to device registry.""" + device = device_registry.async_get_device(identifiers={("schlage", "test")}) + assert device.model == "" + assert device.sw_version == "1.0" + assert device.name == "Vault Door" + assert device.manufacturer == "Schlage" async def test_lock_services( @@ -63,20 +52,16 @@ async def test_lock_services( async def test_changed_by( - hass: HomeAssistant, - mock_lock: Mock, - mock_added_config_entry: ConfigEntry, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry ) -> None: """Test population of the changed_by attribute.""" mock_lock.last_changed_by.reset_mock() mock_lock.last_changed_by.return_value = "access code - foo" # Make the coordinator refresh data. - freezer.tick(timedelta(seconds=30)) - async_fire_time_changed(hass) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) await hass.async_block_till_done(wait_background_tasks=True) - mock_lock.last_changed_by.assert_called_with() + mock_lock.last_changed_by.assert_called_once_with() lock_device = hass.states.get("lock.vault_door") assert lock_device is not None diff --git a/tests/components/schlage/test_select.py b/tests/components/schlage/test_select.py deleted file mode 100644 index c27fd4c8813..00000000000 --- a/tests/components/schlage/test_select.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Test Schlage select.""" - -from unittest.mock import Mock - -from homeassistant.components.select import ( - ATTR_OPTION, - DOMAIN as SELECT_DOMAIN, - SERVICE_SELECT_OPTION, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant - - -async def test_select( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry -) -> None: - """Test the auto-lock time select entity.""" - entity_id = "select.vault_door_auto_lock_time" - - select = hass.states.get(entity_id) - assert select is not None - assert select.state == "15" - - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "30"}, - blocking=True, - ) - mock_lock.set_auto_lock_time.assert_called_once_with(30) diff --git a/tests/components/schlage/test_sensor.py b/tests/components/schlage/test_sensor.py index 9fa90edecbb..2c0cabbb1e8 100644 --- a/tests/components/schlage/test_sensor.py +++ b/tests/components/schlage/test_sensor.py @@ -4,6 +4,20 @@ from homeassistant.components.sensor import SensorDeviceClass from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + + +async def test_sensor_device_registry( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_added_config_entry: ConfigEntry, +) -> None: + """Test sensor is added to device registry.""" + device = device_registry.async_get_device(identifiers={("schlage", "test")}) + assert device.model == "" + assert device.sw_version == "1.0" + assert device.name == "Vault Door" + assert device.manufacturer == "Schlage" async def test_battery_sensor( diff --git a/tests/components/schlage/test_switch.py b/tests/components/schlage/test_switch.py index 52b8da81670..f1cded3ce22 100644 --- a/tests/components/schlage/test_switch.py +++ b/tests/components/schlage/test_switch.py @@ -6,6 +6,20 @@ from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + + +async def test_switch_device_registry( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_added_config_entry: ConfigEntry, +) -> None: + """Test switch is added to device registry.""" + device = device_registry.async_get_device(identifiers={("schlage", "test")}) + assert device.model == "" + assert device.sw_version == "1.0" + assert device.name == "Vault Door" + assert device.manufacturer == "Schlage" async def test_beeper_services( diff --git a/tests/components/scrape/conftest.py b/tests/components/scrape/conftest.py index 5b84f4fd44a..f6109dbc19a 100644 --- a/tests/components/scrape/conftest.py +++ b/tests/components/scrape/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch import uuid import pytest +from typing_extensions import Generator from homeassistant.components.rest.data import DEFAULT_TIMEOUT from homeassistant.components.rest.schema import DEFAULT_METHOD, DEFAULT_VERIFY_SSL diff --git a/tests/components/screenlogic/__init__.py b/tests/components/screenlogic/__init__.py index 169c1f28900..9c8a21b1ba4 100644 --- a/tests/components/screenlogic/__init__.py +++ b/tests/components/screenlogic/__init__.py @@ -20,7 +20,7 @@ GATEWAY_IMPORT_PATH = "homeassistant.components.screenlogic.ScreenLogicGateway" GATEWAY_DISCOVERY_IMPORT_PATH = "homeassistant.components.screenlogic.coordinator.async_discover_gateways_by_unique_id" -def num_key_string_to_int(data: dict) -> dict: +def num_key_string_to_int(data: dict) -> None: """Convert all string number dict keys to integer. This needed for screenlogicpy's data dict format. diff --git a/tests/components/screenlogic/snapshots/test_diagnostics.ambr b/tests/components/screenlogic/snapshots/test_diagnostics.ambr index 237d3eab257..534c77223d6 100644 --- a/tests/components/screenlogic/snapshots/test_diagnostics.ambr +++ b/tests/components/screenlogic/snapshots/test_diagnostics.ambr @@ -7,8 +7,6 @@ 'port': 80, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'screenlogic', 'entry_id': 'screenlogictest', 'minor_version': 1, diff --git a/tests/components/screenlogic/test_diagnostics.py b/tests/components/screenlogic/test_diagnostics.py index 77e1ce58dad..c6d6ea60e87 100644 --- a/tests/components/screenlogic/test_diagnostics.py +++ b/tests/components/screenlogic/test_diagnostics.py @@ -4,7 +4,6 @@ from unittest.mock import DEFAULT, patch from screenlogicpy import ScreenLogicGateway from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -57,4 +56,4 @@ async def test_diagnostics( hass, hass_client, mock_config_entry ) - assert diag == snapshot(exclude=props("created_at", "modified_at")) + assert diag == snapshot diff --git a/tests/components/screenlogic/test_services.py b/tests/components/screenlogic/test_services.py index 8a414ba2596..d175ea27c84 100644 --- a/tests/components/screenlogic/test_services.py +++ b/tests/components/screenlogic/test_services.py @@ -1,12 +1,12 @@ """Tests for ScreenLogic integration service calls.""" -from collections.abc import AsyncGenerator from typing import Any from unittest.mock import DEFAULT, AsyncMock, patch import pytest from screenlogicpy import ScreenLogicGateway from screenlogicpy.device_const.system import COLOR_MODE +from typing_extensions import AsyncGenerator from homeassistant.components.screenlogic import DOMAIN from homeassistant.components.screenlogic.const import ( @@ -18,9 +18,11 @@ from homeassistant.components.screenlogic.const import ( SERVICE_STOP_SUPER_CHLORINATION, ) from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_AREA_ID, ATTR_DEVICE_ID, ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr +from homeassistant.util import slugify from . import ( DATA_FULL_CHEM, @@ -100,6 +102,22 @@ async def setup_screenlogic_services_fixture( }, None, ), + ( + { + ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), + }, + { + ATTR_AREA_ID: MOCK_DEVICE_AREA, + }, + ), + ( + { + ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), + }, + { + ATTR_ENTITY_ID: f"{Platform.SENSOR}.{slugify(f'{MOCK_ADAPTER_NAME} Air Temperature')}", + }, + ), ], ) async def test_service_set_color_mode( @@ -130,6 +148,30 @@ async def test_service_set_color_mode( mocked_async_set_color_lights.assert_awaited_once() +async def test_service_set_color_mode_with_device( + hass: HomeAssistant, + service_fixture: dict[str, Any], +) -> None: + """Test set_color_mode service with a device target.""" + mocked_async_set_color_lights: AsyncMock = service_fixture["gateway"][ + "async_set_color_lights" + ] + + assert hass.services.has_service(DOMAIN, SERVICE_SET_COLOR_MODE) + + sl_device: dr.DeviceEntry = service_fixture["device"] + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_COLOR_MODE, + service_data={ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower()}, + blocking=True, + target={ATTR_DEVICE_ID: sl_device.id}, + ) + + mocked_async_set_color_lights.assert_awaited_once() + + @pytest.mark.parametrize( ("data", "target", "error_msg"), [ @@ -151,6 +193,36 @@ async def test_service_set_color_mode( f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry " "'test' is not a screenlogic config", ), + ( + { + ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), + }, + { + ATTR_AREA_ID: "invalidareaid", + }, + f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry for " + "target not found", + ), + ( + { + ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), + }, + { + ATTR_DEVICE_ID: "invaliddeviceid", + }, + f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry for " + "target not found", + ), + ( + { + ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), + }, + { + ATTR_ENTITY_ID: "sensor.invalidentityid", + }, + f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry for " + "target not found", + ), ], ) async def test_service_set_color_mode_error( diff --git a/tests/components/script/test_blueprint.py b/tests/components/script/test_blueprint.py index 7f03a89c548..b956aa588cb 100644 --- a/tests/components/script/test_blueprint.py +++ b/tests/components/script/test_blueprint.py @@ -9,11 +9,7 @@ from unittest.mock import patch import pytest from homeassistant.components import script -from homeassistant.components.blueprint import ( - BLUEPRINT_SCHEMA, - Blueprint, - DomainBlueprints, -) +from homeassistant.components.blueprint.models import Blueprint, DomainBlueprints from homeassistant.config_entries import ConfigEntryState from homeassistant.core import Context, HomeAssistant, callback from homeassistant.helpers import device_registry as dr, template @@ -37,10 +33,7 @@ def patch_blueprint(blueprint_path: str, data_path: str) -> Iterator[None]: return orig_load(self, path) return Blueprint( - yaml.load_yaml(data_path), - expected_domain=self.domain, - path=path, - schema=BLUEPRINT_SCHEMA, + yaml.load_yaml(data_path), expected_domain=self.domain, path=path ) with patch( @@ -81,7 +74,7 @@ async def test_confirmable_notification( "message": "Throw ring in mountain?", "confirm_action": [ { - "action": "homeassistant.turn_on", + "service": "homeassistant.turn_on", "target": {"entity_id": "mount.doom"}, } ], @@ -116,6 +109,7 @@ async def test_confirmable_notification( assert len(mock_call_action.mock_calls) == 1 _hass, config, variables, _context = mock_call_action.mock_calls[0][1] + template.attach(hass, config) rendered_config = template.render_complex(config, variables) assert rendered_config == { diff --git a/tests/components/script/test_init.py b/tests/components/script/test_init.py index a5eda3757a9..2352e9c64e6 100644 --- a/tests/components/script/test_init.py +++ b/tests/components/script/test_init.py @@ -3,7 +3,7 @@ import asyncio from datetime import timedelta from typing import Any -from unittest.mock import ANY, Mock, patch +from unittest.mock import Mock, patch import pytest @@ -29,8 +29,8 @@ from homeassistant.core import ( callback, split_entity_id, ) -from homeassistant.exceptions import ServiceNotFound, TemplateError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.exceptions import ServiceNotFound +from homeassistant.helpers import device_registry as dr, entity_registry as er, template from homeassistant.helpers.event import async_track_state_change from homeassistant.helpers.script import ( SCRIPT_MODE_CHOICES, @@ -47,13 +47,11 @@ import homeassistant.util.dt as dt_util from tests.common import ( MockConfigEntry, - MockUser, async_fire_time_changed, async_mock_service, mock_restore_cache, ) from tests.components.logbook.common import MockRow, mock_humanify -from tests.components.repairs import get_repairs from tests.typing import WebSocketGenerator ENTITY_ID = "script.test" @@ -85,7 +83,7 @@ async def test_passing_variables(hass: HomeAssistant) -> None: "script": { "test": { "sequence": { - "action": "test.script", + "service": "test.script", "data_template": {"hello": "{{ greeting }}"}, } } @@ -115,14 +113,8 @@ async def test_passing_variables(hass: HomeAssistant) -> None: @pytest.mark.parametrize("toggle", [False, True]) -@pytest.mark.parametrize("action_schema_variations", ["action", "service"]) -async def test_turn_on_off_toggle( - hass: HomeAssistant, toggle: bool, action_schema_variations: str -) -> None: - """Verify turn_on, turn_off & toggle services. - - Ensures backward compatibility with the old service action schema is maintained. - """ +async def test_turn_on_off_toggle(hass: HomeAssistant, toggle) -> None: + """Verify turn_on, turn_off & toggle services.""" event = "test_event" event_mock = Mock() @@ -138,15 +130,9 @@ async def test_turn_on_off_toggle( async_track_state_change(hass, ENTITY_ID, state_listener, to_state="on") if toggle: - turn_off_step = { - action_schema_variations: "script.toggle", - "entity_id": ENTITY_ID, - } + turn_off_step = {"service": "script.toggle", "entity_id": ENTITY_ID} else: - turn_off_step = { - action_schema_variations: "script.turn_off", - "entity_id": ENTITY_ID, - } + turn_off_step = {"service": "script.turn_off", "entity_id": ENTITY_ID} assert await async_setup_component( hass, "script", @@ -177,7 +163,7 @@ async def test_turn_on_off_toggle( invalid_configs = [ {"test": {}}, {"test hello world": {"sequence": [{"event": "bla"}]}}, - {"test": {"sequence": {"event": "test_event", "action": "homeassistant.turn_on"}}}, + {"test": {"sequence": {"event": "test_event", "service": "homeassistant.turn_on"}}}, ] @@ -192,7 +178,7 @@ invalid_configs = [ "test": { "sequence": { "event": "test_event", - "action": "homeassistant.turn_on", + "service": "homeassistant.turn_on", } } }, @@ -247,7 +233,7 @@ async def test_bad_config_validation_critical( "good_script": { "alias": "good_script", "sequence": { - "action": "test.automation", + "service": "test.automation", "entity_id": "hello.world", }, }, @@ -266,14 +252,13 @@ async def test_bad_config_validation_critical( @pytest.mark.parametrize( - ("object_id", "broken_config", "problem", "details", "issue"), + ("object_id", "broken_config", "problem", "details"), [ ( "bad_script", {}, "could not be validated", "required key not provided @ data['sequence']", - "validation_failed_schema", ), ( "bad_script", @@ -285,22 +270,18 @@ async def test_bad_config_validation_critical( "state": "blah", }, }, - "failed to setup sequence", + "failed to setup actions", "Unknown entity registry entry abcdabcdabcdabcdabcdabcdabcdabcd.", - "validation_failed_sequence", ), ], ) async def test_bad_config_validation( hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, - hass_admin_user: MockUser, object_id, broken_config, problem, details, - issue, ) -> None: """Test bad script configuration which can be detected during validation.""" assert await async_setup_component( @@ -312,7 +293,7 @@ async def test_bad_config_validation( "good_script": { "alias": "good_script", "sequence": { - "action": "test.automation", + "service": "test.automation", "entity_id": "hello.world", }, }, @@ -320,22 +301,11 @@ async def test_bad_config_validation( }, ) - # Check we get the expected error message and issue + # Check we get the expected error message assert ( f"Script with alias 'bad_script' {problem} and has been disabled: {details}" in caplog.text ) - issues = await get_repairs(hass, hass_ws_client) - assert len(issues) == 1 - assert issues[0]["issue_id"] == f"script.bad_script_{issue}" - assert issues[0]["translation_key"] == issue - assert issues[0]["translation_placeholders"] == { - "edit": "/config/script/edit/bad_script", - "entity_id": "script.bad_script", - "error": ANY, - "name": "bad_script", - } - assert issues[0]["translation_placeholders"]["error"].startswith(details) # Make sure both scripts are setup assert set(hass.states.async_entity_ids("script")) == { @@ -345,31 +315,6 @@ async def test_bad_config_validation( # The script failing validation should be unavailable assert hass.states.get("script.bad_script").state == STATE_UNAVAILABLE - # Reloading the automation with fixed config should clear the issue - with patch( - "homeassistant.config.load_yaml_config_file", - autospec=True, - return_value={ - script.DOMAIN: { - object_id: { - "alias": "bad_script", - "sequence": { - "action": "test.automation", - "entity_id": "hello.world", - }, - }, - } - }, - ): - await hass.services.async_call( - script.DOMAIN, - SERVICE_RELOAD, - context=Context(user_id=hass_admin_user.id), - blocking=True, - ) - issues = await get_repairs(hass, hass_ws_client) - assert len(issues) == 0 - @pytest.mark.parametrize("running", ["no", "same", "different"]) async def test_reload_service(hass: HomeAssistant, running) -> None: @@ -442,7 +387,7 @@ async def test_reload_unchanged_does_not_stop( "sequence": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"action": "test.script"}, + {"service": "test.script"}, ], } } @@ -485,13 +430,13 @@ async def test_reload_unchanged_does_not_stop( [ { "test": { - "sequence": [{"action": "test.script"}], + "sequence": [{"service": "test.script"}], } }, # A script using templates { "test": { - "sequence": [{"action": "{{ 'test.script' }}"}], + "sequence": [{"service": "{{ 'test.script' }}"}], } }, # A script using blueprint @@ -678,7 +623,7 @@ async def test_logging_script_error( assert await async_setup_component( hass, "script", - {"script": {"hello": {"sequence": [{"action": "non.existing"}]}}}, + {"script": {"hello": {"sequence": [{"service": "non.existing"}]}}}, ) with pytest.raises(ServiceNotFound) as err: await hass.services.async_call("script", "hello", blocking=True) @@ -702,7 +647,7 @@ async def test_async_get_descriptions_script(hass: HomeAssistant) -> None: """Test async_set_service_schema for the script integration.""" script_config = { DOMAIN: { - "test1": {"sequence": [{"action": "homeassistant.restart"}]}, + "test1": {"sequence": [{"service": "homeassistant.restart"}]}, "test2": { "description": "test2", "fields": { @@ -711,7 +656,7 @@ async def test_async_get_descriptions_script(hass: HomeAssistant) -> None: "example": "param_example", } }, - "sequence": [{"action": "homeassistant.restart"}], + "sequence": [{"service": "homeassistant.restart"}], }, } } @@ -807,11 +752,11 @@ async def test_extraction_functions( "test1": { "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.in_first"}, }, { @@ -821,15 +766,15 @@ async def test_extraction_functions( "device_id": device_in_both.id, }, { - "action": "test.test", + "service": "test.test", "target": {"area_id": "area-in-both"}, }, { - "action": "test.test", + "service": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "action": "test.test", + "service": "test.test", "target": {"label_id": "label-in-both"}, }, ] @@ -837,7 +782,7 @@ async def test_extraction_functions( "test2": { "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -863,7 +808,7 @@ async def test_extraction_functions( "test3": { "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -873,27 +818,27 @@ async def test_extraction_functions( }, {"scene": "scene.hello"}, { - "action": "test.test", + "service": "test.test", "target": {"area_id": "area-in-both"}, }, { - "action": "test.test", + "service": "test.test", "target": {"area_id": "area-in-last"}, }, { - "action": "test.test", + "service": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "action": "test.test", + "service": "test.test", "target": {"floor_id": "floor-in-last"}, }, { - "action": "test.test", + "service": "test.test", "target": {"label_id": "label-in-both"}, }, { - "action": "test.test", + "service": "test.test", "target": {"label_id": "label-in-last"}, }, ], @@ -1040,11 +985,11 @@ async def test_concurrent_script(hass: HomeAssistant, concurrently) -> None: """Test calling script concurrently or not.""" if concurrently: call_script_2 = { - "action": "script.turn_on", + "service": "script.turn_on", "data": {"entity_id": "script.script2"}, } else: - call_script_2 = {"action": "script.script2"} + call_script_2 = {"service": "script.script2"} assert await async_setup_component( hass, "script", @@ -1057,17 +1002,17 @@ async def test_concurrent_script(hass: HomeAssistant, concurrently) -> None: { "wait_template": "{{ is_state('input_boolean.test1', 'on') }}" }, - {"action": "test.script", "data": {"value": "script1"}}, + {"service": "test.script", "data": {"value": "script1"}}, ], }, "script2": { "mode": "parallel", "sequence": [ - {"action": "test.script", "data": {"value": "script2a"}}, + {"service": "test.script", "data": {"value": "script2a"}}, { "wait_template": "{{ is_state('input_boolean.test2', 'on') }}" }, - {"action": "test.script", "data": {"value": "script2b"}}, + {"service": "test.script", "data": {"value": "script2b"}}, ], }, } @@ -1138,7 +1083,7 @@ async def test_script_variables( }, "sequence": [ { - "action": "test.script", + "service": "test.script", "data": { "value": "{{ test_var }}", "templated_config_var": "{{ templated_config_var }}", @@ -1154,7 +1099,7 @@ async def test_script_variables( }, "sequence": [ { - "action": "test.script", + "service": "test.script", "data": { "value": "{{ test_var }}", }, @@ -1167,7 +1112,7 @@ async def test_script_variables( }, "sequence": [ { - "action": "test.script", + "service": "test.script", "data": { "value": "{{ test_var }}", }, @@ -1209,7 +1154,7 @@ async def test_script_variables( assert mock_calls[2].data["value"] == "from_service" assert "Error rendering variables" not in caplog.text - with pytest.raises(TemplateError): + with pytest.raises(template.TemplateError): await hass.services.async_call("script", "script3", blocking=True) assert "Error rendering variables" in caplog.text assert len(mock_calls) == 3 @@ -1233,7 +1178,7 @@ async def test_script_this_var_always( "script1": { "sequence": [ { - "action": "test.script", + "service": "test.script", "data": { "this_template": "{{this.entity_id}}", }, @@ -1318,8 +1263,8 @@ async def test_recursive_script( "script1": { "mode": script_mode, "sequence": [ - {"action": "script.script1"}, - {"action": "test.script"}, + {"service": "script.script1"}, + {"service": "test.script"}, ], }, } @@ -1368,26 +1313,26 @@ async def test_recursive_script_indirect( "script1": { "mode": script_mode, "sequence": [ - {"action": "script.script2"}, + {"service": "script.script2"}, ], }, "script2": { "mode": script_mode, "sequence": [ - {"action": "script.script3"}, + {"service": "script.script3"}, ], }, "script3": { "mode": script_mode, "sequence": [ - {"action": "script.script4"}, + {"service": "script.script4"}, ], }, "script4": { "mode": script_mode, "sequence": [ - {"action": "script.script1"}, - {"action": "test.script"}, + {"service": "script.script1"}, + {"service": "test.script"}, ], }, } @@ -1452,10 +1397,10 @@ async def test_recursive_script_turn_on( "condition": "template", "value_template": "{{ request == 'step_2' }}", }, - "sequence": {"action": "test.script_done"}, + "sequence": {"service": "test.script_done"}, }, "default": { - "action": "script.turn_on", + "service": "script.turn_on", "data": { "entity_id": "script.script1", "variables": {"request": "step_2"}, @@ -1463,7 +1408,7 @@ async def test_recursive_script_turn_on( }, }, { - "action": "script.turn_on", + "service": "script.turn_on", "data": {"entity_id": "script.script1"}, }, ], @@ -1525,7 +1470,7 @@ async def test_websocket_config( """Test config command.""" config = { "alias": "hello", - "sequence": [{"action": "light.turn_on"}], + "sequence": [{"service": "light.turn_on"}], } assert await async_setup_component( hass, @@ -1589,7 +1534,7 @@ async def test_script_service_changed_entity_id( "script": { "test": { "sequence": { - "action": "test.script", + "service": "test.script", "data_template": {"entity_id": "{{ this.entity_id }}"}, } } @@ -1618,7 +1563,9 @@ async def test_script_service_changed_entity_id( assert calls[1].data["entity_id"] == "script.custom_entity_id_2" -async def test_blueprint_script(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_blueprint_automation( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: """Test blueprint script.""" assert await async_setup_component( hass, @@ -1670,13 +1617,12 @@ async def test_blueprint_script(hass: HomeAssistant, calls: list[ServiceCall]) - "a_number": 5, }, "Blueprint 'Call service' generated invalid script", - "value should be a string for dictionary value @ data['sequence'][0]['action']", + "value should be a string for dictionary value @ data['sequence'][0]['service']", ), ], ) async def test_blueprint_script_bad_config( hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, blueprint_inputs, problem, @@ -1700,24 +1646,9 @@ async def test_blueprint_script_bad_config( assert problem in caplog.text assert details in caplog.text - issues = await get_repairs(hass, hass_ws_client) - assert len(issues) == 1 - issue = "validation_failed_blueprint" - assert issues[0]["issue_id"] == f"script.test_script_{issue}" - assert issues[0]["translation_key"] == issue - assert issues[0]["translation_placeholders"] == { - "edit": "/config/script/edit/test_script", - "entity_id": "script.test_script", - "error": ANY, - "name": "test_script", - } - assert issues[0]["translation_placeholders"]["error"].startswith(details) - async def test_blueprint_script_fails_substitution( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test blueprint script with bad inputs.""" with patch( @@ -1746,18 +1677,6 @@ async def test_blueprint_script_fails_substitution( in caplog.text ) - issues = await get_repairs(hass, hass_ws_client) - assert len(issues) == 1 - issue = "validation_failed_blueprint" - assert issues[0]["issue_id"] == f"script.test_script_{issue}" - assert issues[0]["translation_key"] == issue - assert issues[0]["translation_placeholders"] == { - "edit": "/config/script/edit/test_script", - "entity_id": "script.test_script", - "error": "No substitution found for input blah", - "name": "test_script", - } - @pytest.mark.parametrize("response", [{"value": 5}, '{"value": 5}']) async def test_responses(hass: HomeAssistant, response: Any) -> None: @@ -1851,10 +1770,10 @@ async def test_script_queued_mode(hass: HomeAssistant) -> None: "sequence": [ { "parallel": [ - {"action": "script.test_sub"}, - {"action": "script.test_sub"}, - {"action": "script.test_sub"}, - {"action": "script.test_sub"}, + {"service": "script.test_sub"}, + {"service": "script.test_sub"}, + {"service": "script.test_sub"}, + {"service": "script.test_sub"}, ] } ] @@ -1862,7 +1781,7 @@ async def test_script_queued_mode(hass: HomeAssistant) -> None: "test_sub": { "mode": "queued", "sequence": [ - {"action": "test.simulated_remote"}, + {"service": "test.simulated_remote"}, ], }, } diff --git a/tests/components/script/test_recorder.py b/tests/components/script/test_recorder.py index 6358093014a..ca915cede6f 100644 --- a/tests/components/script/test_recorder.py +++ b/tests/components/script/test_recorder.py @@ -52,7 +52,7 @@ async def test_exclude_attributes( "script": { "test": { "sequence": { - "action": "test.script", + "service": "test.script", "data_template": {"hello": "{{ greeting }}"}, } } diff --git a/tests/components/search/test_init.py b/tests/components/search/test_init.py index 2c00c3bf6f2..a817fbfc39e 100644 --- a/tests/components/search/test_init.py +++ b/tests/components/search/test_init.py @@ -250,7 +250,7 @@ async def test_search( { "id": "unique_id", "alias": "blueprint_automation_1", - "triggers": {"platform": "template", "value_template": "true"}, + "trigger": {"platform": "template", "value_template": "true"}, "use_blueprint": { "path": "test_event_service.yaml", "input": { @@ -262,7 +262,7 @@ async def test_search( }, { "alias": "blueprint_automation_2", - "triggers": {"platform": "template", "value_template": "true"}, + "trigger": {"platform": "template", "value_template": "true"}, "use_blueprint": { "path": "test_event_service.yaml", "input": { @@ -534,14 +534,12 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, - ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.AUTOMATION, "automation.wled_device") == { ItemType.AREA: {living_room_area.id}, ItemType.CONFIG_ENTRY: {wled_config_entry.entry_id}, ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {first_floor.floor_id}, - ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.AUTOMATION, "automation.floor") == { ItemType.FLOOR: {first_floor.floor_id}, @@ -563,7 +561,6 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled_hue"}, - ItemType.INTEGRATION: {"hue", "wled"}, } assert search(ItemType.AUTOMATION, "automation.scene") == { ItemType.AREA: {bedroom_area.id, kitchen_area.id, living_room_area.id}, @@ -577,7 +574,6 @@ async def test_search( scene_wled_hue_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, - ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert search(ItemType.AUTOMATION, "automation.script") == { @@ -593,7 +589,6 @@ async def test_search( script_scene_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, - ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -616,7 +611,6 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, - ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.device", "script.hue"}, } @@ -630,7 +624,6 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, - ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, } @@ -646,7 +639,6 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, - ItemType.INTEGRATION: {"wled"}, ItemType.LABEL: {label_christmas.label_id}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, @@ -660,7 +652,6 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, - ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.device", "script.hue"}, } @@ -673,7 +664,6 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, - ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, } @@ -683,7 +673,6 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, - ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert search(ItemType.ENTITY, hue_segment_1_entity.entity_id) == { @@ -692,7 +681,6 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, - ItemType.INTEGRATION: {"hue"}, ItemType.LABEL: {label_energy.label_id}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.hue"}, @@ -703,7 +691,6 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, - ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert not search(ItemType.ENTITY, "automation.wled") @@ -735,7 +722,6 @@ async def test_search( } assert search(ItemType.ENTITY, "light.wled_config_entry_source") == { ItemType.CONFIG_ENTRY: {wled_config_entry.entry_id}, - ItemType.INTEGRATION: {"wled"}, } assert not search(ItemType.FLOOR, "unknown") @@ -794,7 +780,6 @@ async def test_search( wled_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, - ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.GROUP, "group.hue") == { ItemType.AREA: {kitchen_area.id}, @@ -805,7 +790,6 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id}, - ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.GROUP, "group.wled_hue") == { ItemType.AREA: {bedroom_area.id, living_room_area.id, kitchen_area.id}, @@ -819,7 +803,6 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, - ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCRIPT: {"script.group"}, } @@ -858,7 +841,6 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, - ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.SCENE, "scene.scene_hue_seg_1") == { ItemType.AREA: {kitchen_area.id}, @@ -866,7 +848,6 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.ENTITY: {hue_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, - ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCENE, scene_wled_hue_entity.entity_id) == { ItemType.AREA: {bedroom_area.id, living_room_area.id, kitchen_area.id}, @@ -880,7 +861,6 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, - ItemType.INTEGRATION: {"hue", "wled"}, ItemType.LABEL: {label_other.label_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -900,7 +880,6 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, - ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.SCRIPT, "script.hue") == { ItemType.AREA: {kitchen_area.id}, @@ -908,7 +887,6 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.ENTITY: {hue_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, - ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCRIPT, "script.script_with_templated_services") == {} assert search(ItemType.SCRIPT, "script.device") == { @@ -916,7 +894,6 @@ async def test_search( ItemType.CONFIG_ENTRY: {hue_config_entry.entry_id}, ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, - ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCRIPT, "script.floor") == { ItemType.FLOOR: {first_floor.floor_id}, @@ -938,7 +915,6 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled_hue"}, - ItemType.INTEGRATION: {"hue", "wled"}, } assert search(ItemType.SCRIPT, script_scene_entity.entity_id) == { ItemType.AREA: {bedroom_area.id, kitchen_area.id, living_room_area.id}, @@ -952,7 +928,6 @@ async def test_search( scene_wled_hue_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, - ItemType.INTEGRATION: {"hue", "wled"}, ItemType.LABEL: {label_other.label_id}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } @@ -969,7 +944,6 @@ async def test_search( script_scene_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, - ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -1007,7 +981,6 @@ async def test_search( ), ItemType.CONFIG_ENTRY: [hue_config_entry.entry_id], ItemType.FLOOR: [first_floor.floor_id], - ItemType.INTEGRATION: ["hue"], ItemType.SCENE: unordered( ["scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id] ), diff --git a/tests/components/season/conftest.py b/tests/components/season/conftest.py index c7458b0a2e1..a45a2078d9b 100644 --- a/tests/components/season/conftest.py +++ b/tests/components/season/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.season.const import DOMAIN, TYPE_ASTRONOMICAL from homeassistant.const import CONF_TYPE diff --git a/tests/components/season/test_sensor.py b/tests/components/season/test_sensor.py index 881192c95f0..ffc8e9f1a07 100644 --- a/tests/components/season/test_sensor.py +++ b/tests/components/season/test_sensor.py @@ -70,7 +70,6 @@ def idfn(val): """Provide IDs for pytest parametrize.""" if isinstance(val, (datetime)): return val.strftime("%Y%m%d") - return None @pytest.mark.parametrize(("type", "day", "expected"), NORTHERN_PARAMETERS, ids=idfn) diff --git a/tests/components/select/test_device_condition.py b/tests/components/select/test_device_condition.py index fc35757fa67..e60df688658 100644 --- a/tests/components/select/test_device_condition.py +++ b/tests/components/select/test_device_condition.py @@ -21,7 +21,17 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") async def test_get_conditions( @@ -105,7 +115,7 @@ async def test_get_conditions_hidden_auxiliary( async def test_if_selected_option( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -171,7 +181,7 @@ async def test_if_selected_option( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set( entry.entity_id, "option1", {"options": ["option1", "option2"]} @@ -179,8 +189,8 @@ async def test_if_selected_option( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["result"] == "option1 - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["result"] == "option1 - event - test_event1" hass.states.async_set( entry.entity_id, "option2", {"options": ["option1", "option2"]} @@ -188,13 +198,13 @@ async def test_if_selected_option( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["result"] == "option2 - event - test_event2" + assert len(calls) == 2 + assert calls[1].data["result"] == "option2 - event - test_event2" async def test_if_selected_option_legacy( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -242,8 +252,8 @@ async def test_if_selected_option_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["result"] == "option1 - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["result"] == "option1 - event - test_event1" async def test_get_condition_capabilities( diff --git a/tests/components/select/test_device_trigger.py b/tests/components/select/test_device_trigger.py index dbb4e23d785..c7a55c56202 100644 --- a/tests/components/select/test_device_trigger.py +++ b/tests/components/select/test_device_trigger.py @@ -21,7 +21,17 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") async def test_get_triggers( @@ -107,7 +117,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -200,27 +210,27 @@ async def test_if_fires_on_state_change( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "option2") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"to - device - {entry.entity_id} - option1 - option2 - None - 0" ) # Test triggering device trigger with a from state hass.states.async_set(entry.entity_id, "option3") await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"from - device - {entry.entity_id} - option2 - option3 - None - 0" ) # Test triggering device trigger with both a from and to state hass.states.async_set(entry.entity_id, "option1") await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 3 assert ( - service_calls[2].data["some"] + calls[2].data["some"] == f"from-to - device - {entry.entity_id} - option3 - option1 - None - 0" ) @@ -229,7 +239,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -279,9 +289,9 @@ async def test_if_fires_on_state_change_legacy( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "option2") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"to - device - {entry.entity_id} - option1 - option2 - None - 0" ) diff --git a/tests/components/sense/__init__.py b/tests/components/sense/__init__.py index d604bcba737..bf0a87737b9 100644 --- a/tests/components/sense/__init__.py +++ b/tests/components/sense/__init__.py @@ -1,23 +1 @@ """Tests for the Sense integration.""" - -from unittest.mock import patch - -from homeassistant.components.sense.const import DOMAIN -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry - - -async def setup_platform( - hass: HomeAssistant, config_entry: MockConfigEntry, platform: Platform -) -> MockConfigEntry: - """Set up the Sense platform.""" - config_entry.add_to_hass(hass) - - with patch("homeassistant.components.sense.PLATFORMS", [platform]): - assert await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/sense/conftest.py b/tests/components/sense/conftest.py deleted file mode 100644 index 7cf1626f40e..00000000000 --- a/tests/components/sense/conftest.py +++ /dev/null @@ -1,84 +0,0 @@ -"""Common methods for Sense.""" - -from __future__ import annotations - -from collections.abc import Generator -import datetime -from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch - -import pytest -from sense_energy import Scale - -from homeassistant.components.sense.binary_sensor import SenseDevice -from homeassistant.components.sense.const import DOMAIN - -from .const import ( - DEVICE_1_DAY_ENERGY, - DEVICE_1_ID, - DEVICE_1_NAME, - DEVICE_1_POWER, - DEVICE_2_DAY_ENERGY, - DEVICE_2_ID, - DEVICE_2_NAME, - DEVICE_2_POWER, - MOCK_CONFIG, - MONITOR_ID, -) - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.sense.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def config_entry() -> MockConfigEntry: - """Mock sense config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data=MOCK_CONFIG, - unique_id="test-email", - ) - - -@pytest.fixture -def mock_sense() -> Generator[MagicMock]: - """Mock an ASyncSenseable object with a split foundation.""" - with patch("homeassistant.components.sense.ASyncSenseable", autospec=True) as mock: - gateway = mock.return_value - gateway.sense_monitor_id = MONITOR_ID - gateway.get_monitor_data.return_value = None - gateway.update_realtime.return_value = None - gateway.fetch_devices.return_value = None - gateway.update_trend_data.return_value = None - - type(gateway).active_power = PropertyMock(return_value=100) - type(gateway).active_solar_power = PropertyMock(return_value=500) - type(gateway).active_voltage = PropertyMock(return_value=[120, 240]) - gateway.get_stat.return_value = 15 - gateway.trend_start.return_value = datetime.datetime.fromisoformat( - "2024-01-01 01:01:00+00:00" - ) - - device_1 = SenseDevice(DEVICE_1_ID) - device_1.name = DEVICE_1_NAME - device_1.icon = "car" - device_1.is_on = False - device_1.power_w = DEVICE_1_POWER - device_1.energy_kwh[Scale.DAY] = DEVICE_1_DAY_ENERGY - - device_2 = SenseDevice(DEVICE_2_ID) - device_2.name = DEVICE_2_NAME - device_2.icon = "stove" - device_2.is_on = False - device_2.power_w = DEVICE_2_POWER - device_2.energy_kwh[Scale.DAY] = DEVICE_2_DAY_ENERGY - type(gateway).devices = PropertyMock(return_value=[device_1, device_2]) - - yield gateway diff --git a/tests/components/sense/const.py b/tests/components/sense/const.py deleted file mode 100644 index d040c0bc38c..00000000000 --- a/tests/components/sense/const.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Cosntants for the Sense integration tests.""" - -MONITOR_ID = "456" - -MOCK_CONFIG = { - "timeout": 6, - "email": "test-email", - "password": "test-password", - "access_token": "ABC", - "user_id": "123", - "monitor_id": MONITOR_ID, - "device_id": "789", - "refresh_token": "XYZ", -} - - -DEVICE_1_NAME = "Car" -DEVICE_1_ID = "abc123" -DEVICE_1_ICON = "car-electric" -DEVICE_1_POWER = 100.0 -DEVICE_1_DAY_ENERGY = 500 - -DEVICE_2_NAME = "Oven" -DEVICE_2_ID = "def456" -DEVICE_2_ICON = "stove" -DEVICE_2_POWER = 50.0 -DEVICE_2_DAY_ENERGY = 42 - -MONITOR_ID = "12345" diff --git a/tests/components/sense/snapshots/test_binary_sensor.ambr b/tests/components/sense/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 339830b16d3..00000000000 --- a/tests/components/sense/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,99 +0,0 @@ -# serializer version: 1 -# name: test_binary_sensors[binary_sensor.car_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.car_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:car-electric', - 'original_name': 'Power', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-abc123', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[binary_sensor.car_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'power', - 'friendly_name': 'Car Power', - 'icon': 'mdi:car-electric', - }), - 'context': , - 'entity_id': 'binary_sensor.car_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[binary_sensor.oven_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.oven_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:stove', - 'original_name': 'Power', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-def456', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[binary_sensor.oven_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'power', - 'friendly_name': 'Oven Power', - 'icon': 'mdi:stove', - }), - 'context': , - 'entity_id': 'binary_sensor.oven_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/sense/snapshots/test_sensor.ambr b/tests/components/sense/snapshots/test_sensor.ambr deleted file mode 100644 index 4a3507880a1..00000000000 --- a/tests/components/sense/snapshots/test_sensor.ambr +++ /dev/null @@ -1,2680 +0,0 @@ -# serializer version: 1 -# name: test_sensors[sensor.car_bill_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.car_bill_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:car-electric', - 'original_name': 'Bill energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'bill_energy', - 'unique_id': '12345-abc123-bill-energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.car_bill_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Car Bill energy', - 'icon': 'mdi:car-electric', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.car_bill_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.car_daily_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.car_daily_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:car-electric', - 'original_name': 'Daily energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_energy', - 'unique_id': '12345-abc123-daily-energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.car_daily_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Car Daily energy', - 'icon': 'mdi:car-electric', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.car_daily_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '500', - }) -# --- -# name: test_sensors[sensor.car_monthly_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.car_monthly_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:car-electric', - 'original_name': 'Monthly energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'monthly_energy', - 'unique_id': '12345-abc123-monthly-energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.car_monthly_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Car Monthly energy', - 'icon': 'mdi:car-electric', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.car_monthly_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.car_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.car_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:car-electric', - 'original_name': 'Power', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-abc123-usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.car_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'power', - 'friendly_name': 'Car Power', - 'icon': 'mdi:car-electric', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.car_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100.0', - }) -# --- -# name: test_sensors[sensor.car_weekly_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.car_weekly_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:car-electric', - 'original_name': 'Weekly energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'weekly_energy', - 'unique_id': '12345-abc123-weekly-energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.car_weekly_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Car Weekly energy', - 'icon': 'mdi:car-electric', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.car_weekly_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.car_yearly_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.car_yearly_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:car-electric', - 'original_name': 'Yearly energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yearly_energy', - 'unique_id': '12345-abc123-yearly-energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.car_yearly_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Car Yearly energy', - 'icon': 'mdi:car-electric', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.car_yearly_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.oven_bill_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.oven_bill_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:stove', - 'original_name': 'Bill energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'bill_energy', - 'unique_id': '12345-def456-bill-energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.oven_bill_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Oven Bill energy', - 'icon': 'mdi:stove', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.oven_bill_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.oven_daily_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.oven_daily_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:stove', - 'original_name': 'Daily energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_energy', - 'unique_id': '12345-def456-daily-energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.oven_daily_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Oven Daily energy', - 'icon': 'mdi:stove', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.oven_daily_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '42', - }) -# --- -# name: test_sensors[sensor.oven_monthly_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.oven_monthly_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:stove', - 'original_name': 'Monthly energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'monthly_energy', - 'unique_id': '12345-def456-monthly-energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.oven_monthly_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Oven Monthly energy', - 'icon': 'mdi:stove', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.oven_monthly_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.oven_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.oven_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:stove', - 'original_name': 'Power', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-def456-usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.oven_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'power', - 'friendly_name': 'Oven Power', - 'icon': 'mdi:stove', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.oven_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '50.0', - }) -# --- -# name: test_sensors[sensor.oven_weekly_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.oven_weekly_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:stove', - 'original_name': 'Weekly energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'weekly_energy', - 'unique_id': '12345-def456-weekly-energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.oven_weekly_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Oven Weekly energy', - 'icon': 'mdi:stove', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.oven_weekly_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.oven_yearly_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.oven_yearly_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:stove', - 'original_name': 'Yearly energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yearly_energy', - 'unique_id': '12345-def456-yearly-energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.oven_yearly_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Oven Yearly energy', - 'icon': 'mdi:stove', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.oven_yearly_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_bill_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Bill Energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-bill-usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Bill Energy', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_bill_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_bill_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Bill From Grid', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-bill-from_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Bill From Grid', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_bill_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_net_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_bill_net_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Bill Net Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-bill-net_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_net_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Bill Net Production', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_bill_net_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_net_production_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_bill_net_production_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Bill Net Production Percentage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-bill-production_pct', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_net_production_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'friendly_name': 'Sense 12345 Bill Net Production Percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.sense_12345_bill_net_production_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_bill_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Bill Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-bill-production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Bill Production', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_bill_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_solar_powered_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_bill_solar_powered_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Bill Solar Powered Percentage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-bill-solar_powered', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_solar_powered_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'friendly_name': 'Sense 12345 Bill Solar Powered Percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.sense_12345_bill_solar_powered_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_to_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_bill_to_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Bill To Grid', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-bill-to_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_bill_to_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Bill To Grid', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_bill_to_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_daily_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Daily Energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-daily-usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Daily Energy', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_daily_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_daily_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Daily From Grid', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-daily-from_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Daily From Grid', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_daily_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_net_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_daily_net_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Daily Net Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-daily-net_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_net_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Daily Net Production', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_daily_net_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_net_production_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_daily_net_production_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Daily Net Production Percentage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-daily-production_pct', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_net_production_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'friendly_name': 'Sense 12345 Daily Net Production Percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.sense_12345_daily_net_production_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_daily_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Daily Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-daily-production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Daily Production', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_daily_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_solar_powered_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_daily_solar_powered_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Daily Solar Powered Percentage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-daily-solar_powered', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_solar_powered_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'friendly_name': 'Sense 12345 Daily Solar Powered Percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.sense_12345_daily_solar_powered_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_to_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_daily_to_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Daily To Grid', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-daily-to_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_daily_to_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Daily To Grid', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_daily_to_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-active-usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'power', - 'friendly_name': 'Sense 12345 Energy', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_sensors[sensor.sense_12345_l1_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_l1_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'L1 Voltage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-L1', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_l1_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'voltage', - 'friendly_name': 'Sense 12345 L1 Voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_l1_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '120', - }) -# --- -# name: test_sensors[sensor.sense_12345_l2_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_l2_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'L2 Voltage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-L2', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_l2_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'voltage', - 'friendly_name': 'Sense 12345 L2 Voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_l2_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '240', - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_monthly_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Monthly Energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-monthly-usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Monthly Energy', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_monthly_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_monthly_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Monthly From Grid', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-monthly-from_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Monthly From Grid', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_monthly_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_net_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_monthly_net_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Monthly Net Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-monthly-net_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_net_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Monthly Net Production', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_monthly_net_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_net_production_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_monthly_net_production_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Monthly Net Production Percentage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-monthly-production_pct', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_net_production_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'friendly_name': 'Sense 12345 Monthly Net Production Percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.sense_12345_monthly_net_production_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_monthly_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Monthly Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-monthly-production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Monthly Production', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_monthly_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_solar_powered_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_monthly_solar_powered_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Monthly Solar Powered Percentage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-monthly-solar_powered', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_solar_powered_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'friendly_name': 'Sense 12345 Monthly Solar Powered Percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.sense_12345_monthly_solar_powered_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_to_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_monthly_to_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Monthly To Grid', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-monthly-to_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_monthly_to_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Monthly To Grid', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_monthly_to_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-active-production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'power', - 'friendly_name': 'Sense 12345 Production', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '500', - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_weekly_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Weekly Energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-weekly-usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Weekly Energy', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_weekly_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_weekly_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Weekly From Grid', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-weekly-from_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Weekly From Grid', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_weekly_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_net_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_weekly_net_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Weekly Net Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-weekly-net_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_net_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Weekly Net Production', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_weekly_net_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_net_production_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_weekly_net_production_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Weekly Net Production Percentage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-weekly-production_pct', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_net_production_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'friendly_name': 'Sense 12345 Weekly Net Production Percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.sense_12345_weekly_net_production_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_weekly_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Weekly Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-weekly-production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Weekly Production', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_weekly_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_solar_powered_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_weekly_solar_powered_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Weekly Solar Powered Percentage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-weekly-solar_powered', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_solar_powered_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'friendly_name': 'Sense 12345 Weekly Solar Powered Percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.sense_12345_weekly_solar_powered_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_to_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_weekly_to_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Weekly To Grid', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-weekly-to_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_weekly_to_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Weekly To Grid', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_weekly_to_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_yearly_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yearly Energy', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-yearly-usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Yearly Energy', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_yearly_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_yearly_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yearly From Grid', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-yearly-from_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Yearly From Grid', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_yearly_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_net_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_yearly_net_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yearly Net Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-yearly-net_production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_net_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Yearly Net Production', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_yearly_net_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_net_production_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_yearly_net_production_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Yearly Net Production Percentage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-yearly-production_pct', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_net_production_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'friendly_name': 'Sense 12345 Yearly Net Production Percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.sense_12345_yearly_net_production_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_yearly_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yearly Production', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-yearly-production', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Yearly Production', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_yearly_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_solar_powered_percentage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_yearly_solar_powered_percentage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Yearly Solar Powered Percentage', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-yearly-solar_powered', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_solar_powered_percentage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'friendly_name': 'Sense 12345 Yearly Solar Powered Percentage', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.sense_12345_yearly_solar_powered_percentage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_to_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.sense_12345_yearly_to_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yearly To Grid', - 'platform': 'sense', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12345-yearly-to_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.sense_12345_yearly_to_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Sense.com', - 'device_class': 'energy', - 'friendly_name': 'Sense 12345 Yearly To Grid', - 'last_reset': '2024-01-01T01:01:00+00:00', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.sense_12345_yearly_to_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- diff --git a/tests/components/sense/test_binary_sensor.py b/tests/components/sense/test_binary_sensor.py deleted file mode 100644 index ae91b7a9a21..00000000000 --- a/tests/components/sense/test_binary_sensor.py +++ /dev/null @@ -1,68 +0,0 @@ -"""The tests for Sense binary sensor platform.""" - -from datetime import timedelta -from unittest.mock import MagicMock - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.components.sense.const import ACTIVE_UPDATE_RATE -from homeassistant.const import STATE_OFF, STATE_ON, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.util.dt import utcnow - -from . import setup_platform -from .const import DEVICE_1_NAME, DEVICE_2_NAME - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - - -async def test_binary_sensors( - hass: HomeAssistant, - mock_sense: MagicMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test Sensor.""" - await setup_platform(hass, config_entry, Platform.BINARY_SENSOR) - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -async def test_on_off_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_sense: MagicMock, - config_entry: MockConfigEntry, -) -> None: - """Test the Sense binary sensors.""" - await setup_platform(hass, config_entry, BINARY_SENSOR_DOMAIN) - device_1, device_2 = mock_sense.devices - - state = hass.states.get(f"binary_sensor.{DEVICE_1_NAME.lower()}_power") - assert state.state == STATE_OFF - - state = hass.states.get(f"binary_sensor.{DEVICE_2_NAME.lower()}_power") - assert state.state == STATE_OFF - - device_1.is_on = True - async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) - await hass.async_block_till_done() - - state = hass.states.get(f"binary_sensor.{DEVICE_1_NAME.lower()}_power") - assert state.state == STATE_ON - - state = hass.states.get(f"binary_sensor.{DEVICE_2_NAME.lower()}_power") - assert state.state == STATE_OFF - - device_1.is_on = False - device_2.is_on = True - async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) - await hass.async_block_till_done() - - state = hass.states.get(f"binary_sensor.{DEVICE_1_NAME.lower()}_power") - assert state.state == STATE_OFF - - state = hass.states.get(f"binary_sensor.{DEVICE_2_NAME.lower()}_power") - assert state.state == STATE_ON diff --git a/tests/components/sense/test_config_flow.py b/tests/components/sense/test_config_flow.py index acef82dd0ba..e564603ea87 100644 --- a/tests/components/sense/test_config_flow.py +++ b/tests/components/sense/test_config_flow.py @@ -16,10 +16,19 @@ from homeassistant.const import CONF_CODE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import MOCK_CONFIG - from tests.common import MockConfigEntry +MOCK_CONFIG = { + "timeout": 6, + "email": "test-email", + "password": "test-password", + "access_token": "ABC", + "user_id": "123", + "monitor_id": "456", + "device_id": "789", + "refresh_token": "XYZ", +} + @pytest.fixture(name="mock_sense") def mock_sense(): @@ -259,7 +268,9 @@ async def test_reauth_no_form(hass: HomeAssistant, mock_sense) -> None: "homeassistant.config_entries.ConfigEntries.async_reload", return_value=True, ): - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=MOCK_CONFIG + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" @@ -277,7 +288,9 @@ async def test_reauth_password(hass: HomeAssistant, mock_sense) -> None: mock_sense.return_value.authenticate.side_effect = SenseAuthenticationException # Reauth success without user input - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data + ) assert result["type"] is FlowResultType.FORM mock_sense.return_value.authenticate.side_effect = None diff --git a/tests/components/sense/test_sensor.py b/tests/components/sense/test_sensor.py deleted file mode 100644 index d43b422ec38..00000000000 --- a/tests/components/sense/test_sensor.py +++ /dev/null @@ -1,234 +0,0 @@ -"""The tests for Sense sensor platform.""" - -from datetime import timedelta -from unittest.mock import MagicMock, PropertyMock - -from freezegun.api import FrozenDateTimeFactory -import pytest -from sense_energy import Scale -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.sense.const import ACTIVE_UPDATE_RATE, TREND_UPDATE_RATE -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.util.dt import utcnow - -from . import setup_platform -from .const import ( - DEVICE_1_DAY_ENERGY, - DEVICE_1_NAME, - DEVICE_2_DAY_ENERGY, - DEVICE_2_NAME, - DEVICE_2_POWER, - MONITOR_ID, -) - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensors( - hass: HomeAssistant, - mock_sense: MagicMock, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test Sensor.""" - await setup_platform(hass, config_entry, Platform.SENSOR) - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -async def test_device_power_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_sense: MagicMock, - config_entry: MockConfigEntry, -) -> None: - """Test the Sense device power sensors.""" - device_1, device_2 = mock_sense.devices - device_1.power_w = 0 - device_2.power_w = 0 - await setup_platform(hass, config_entry, SENSOR_DOMAIN) - device_1, device_2 = mock_sense.devices - - state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_power") - assert state.state == "0" - - state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_power") - assert state.state == "0" - - device_2.power_w = DEVICE_2_POWER - async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) - await hass.async_block_till_done() - - state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_power") - assert state.state == "0" - - state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_power") - assert state.state == f"{DEVICE_2_POWER:.1f}" - - -async def test_device_energy_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_sense: MagicMock, - config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the Sense device power sensors.""" - await setup_platform(hass, config_entry, SENSOR_DOMAIN) - device_1, device_2 = mock_sense.devices - - state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_daily_energy") - assert state.state == f"{DEVICE_1_DAY_ENERGY:.0f}" - - state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_daily_energy") - assert state.state == f"{DEVICE_2_DAY_ENERGY:.0f}" - - device_1.energy_kwh[Scale.DAY] = 0 - device_2.energy_kwh[Scale.DAY] = 0 - freezer.tick(timedelta(seconds=TREND_UPDATE_RATE)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_daily_energy") - assert state.state == "0" - - state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_daily_energy") - assert state.state == "0" - - device_2.energy_kwh[Scale.DAY] = DEVICE_1_DAY_ENERGY - freezer.tick(timedelta(seconds=TREND_UPDATE_RATE)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_daily_energy") - assert state.state == "0" - - state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_daily_energy") - assert state.state == f"{DEVICE_1_DAY_ENERGY:.0f}" - - -async def test_voltage_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_sense: MagicMock, - config_entry: MockConfigEntry, -) -> None: - """Test the Sense voltage sensors.""" - - type(mock_sense).active_voltage = PropertyMock(return_value=[120, 121]) - - await setup_platform(hass, config_entry, SENSOR_DOMAIN) - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l1_voltage") - assert state.state == "120" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l2_voltage") - assert state.state == "121" - - type(mock_sense).active_voltage = PropertyMock(return_value=[122, 123]) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) - await hass.async_block_till_done() - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l1_voltage") - assert state.state == "122" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l2_voltage") - assert state.state == "123" - - -async def test_active_power_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_sense: MagicMock, - config_entry: MockConfigEntry, -) -> None: - """Test the Sense power sensors.""" - - type(mock_sense).active_power = PropertyMock(return_value=400) - type(mock_sense).active_solar_power = PropertyMock(return_value=500) - - await setup_platform(hass, config_entry, SENSOR_DOMAIN) - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_energy") - assert state.state == "400" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_production") - assert state.state == "500" - - type(mock_sense).active_power = PropertyMock(return_value=600) - type(mock_sense).active_solar_power = PropertyMock(return_value=700) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) - await hass.async_block_till_done() - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_energy") - assert state.state == "600" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_production") - assert state.state == "700" - - -async def test_trend_energy_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_sense: MagicMock, - config_entry: MockConfigEntry, -) -> None: - """Test the Sense power sensors.""" - mock_sense.get_stat.side_effect = lambda sensor_type, variant: { - (Scale.DAY, "usage"): 100, - (Scale.DAY, "production"): 200, - (Scale.DAY, "from_grid"): 300, - (Scale.DAY, "to_grid"): 400, - (Scale.DAY, "net_production"): 500, - (Scale.DAY, "production_pct"): 600, - (Scale.DAY, "solar_powered"): 700, - }.get((sensor_type, variant), 0) - - await setup_platform(hass, config_entry, SENSOR_DOMAIN) - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_energy") - assert state.state == "100" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_production") - assert state.state == "200" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_from_grid") - assert state.state == "300" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_to_grid") - assert state.state == "400" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_net_production") - assert state.state == "500" - - mock_sense.get_stat.side_effect = lambda sensor_type, variant: { - (Scale.DAY, "usage"): 1000, - (Scale.DAY, "production"): 2000, - (Scale.DAY, "from_grid"): 3000, - (Scale.DAY, "to_grid"): 4000, - (Scale.DAY, "net_production"): 5000, - (Scale.DAY, "production_pct"): 6000, - (Scale.DAY, "solar_powered"): 7000, - }.get((sensor_type, variant), 0) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=600)) - await hass.async_block_till_done() - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_energy") - assert state.state == "1000" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_production") - assert state.state == "2000" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_from_grid") - assert state.state == "3000" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_to_grid") - assert state.state == "4000" - - state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_net_production") - assert state.state == "5000" diff --git a/tests/components/sensibo/snapshots/test_diagnostics.ambr b/tests/components/sensibo/snapshots/test_diagnostics.ambr index cc77318239e..c911a7629be 100644 --- a/tests/components/sensibo/snapshots/test_diagnostics.ambr +++ b/tests/components/sensibo/snapshots/test_diagnostics.ambr @@ -1,5 +1,246 @@ # serializer version: 1 # name: test_diagnostics + dict({ + 'modes': dict({ + 'auto': dict({ + 'fanLevels': list([ + 'quiet', + 'low', + 'medium', + ]), + 'horizontalSwing': list([ + 'stopped', + 'fixedLeft', + 'fixedCenterLeft', + ]), + 'light': list([ + 'on', + 'off', + ]), + 'swing': list([ + 'stopped', + 'fixedTop', + 'fixedMiddleTop', + ]), + 'temperatures': dict({ + 'C': dict({ + 'isNative': True, + 'values': list([ + 10, + 16, + 17, + 18, + 19, + 20, + ]), + }), + 'F': dict({ + 'isNative': False, + 'values': list([ + 64, + 66, + 68, + ]), + }), + }), + }), + 'cool': dict({ + 'fanLevels': list([ + 'quiet', + 'low', + 'medium', + ]), + 'horizontalSwing': list([ + 'stopped', + 'fixedLeft', + 'fixedCenterLeft', + ]), + 'light': list([ + 'on', + 'off', + ]), + 'swing': list([ + 'stopped', + 'fixedTop', + 'fixedMiddleTop', + ]), + 'temperatures': dict({ + 'C': dict({ + 'isNative': True, + 'values': list([ + 10, + 16, + 17, + 18, + 19, + 20, + ]), + }), + 'F': dict({ + 'isNative': False, + 'values': list([ + 64, + 66, + 68, + ]), + }), + }), + }), + 'dry': dict({ + 'horizontalSwing': list([ + 'stopped', + 'fixedLeft', + 'fixedCenterLeft', + ]), + 'light': list([ + 'on', + 'off', + ]), + 'swing': list([ + 'stopped', + 'fixedTop', + 'fixedMiddleTop', + ]), + 'temperatures': dict({ + 'C': dict({ + 'isNative': True, + 'values': list([ + 10, + 16, + 17, + 18, + 19, + 20, + ]), + }), + 'F': dict({ + 'isNative': False, + 'values': list([ + 64, + 66, + 68, + ]), + }), + }), + }), + 'fan': dict({ + 'fanLevels': list([ + 'quiet', + 'low', + 'medium', + ]), + 'horizontalSwing': list([ + 'stopped', + 'fixedLeft', + 'fixedCenterLeft', + ]), + 'light': list([ + 'on', + 'off', + ]), + 'swing': list([ + 'stopped', + 'fixedTop', + 'fixedMiddleTop', + ]), + 'temperatures': dict({ + }), + }), + 'heat': dict({ + 'fanLevels': list([ + 'quiet', + 'low', + 'medium', + ]), + 'horizontalSwing': list([ + 'stopped', + 'fixedLeft', + 'fixedCenterLeft', + ]), + 'light': list([ + 'on', + 'off', + ]), + 'swing': list([ + 'stopped', + 'fixedTop', + 'fixedMiddleTop', + ]), + 'temperatures': dict({ + 'C': dict({ + 'isNative': True, + 'values': list([ + 10, + 16, + 17, + 18, + 19, + 20, + ]), + }), + 'F': dict({ + 'isNative': False, + 'values': list([ + 63, + 64, + 66, + ]), + }), + }), + }), + }), + }) +# --- +# name: test_diagnostics.1 + dict({ + 'low': 'low', + 'medium': 'medium', + 'quiet': 'quiet', + }) +# --- +# name: test_diagnostics.2 + dict({ + 'fixedmiddletop': 'fixedMiddleTop', + 'fixedtop': 'fixedTop', + 'stopped': 'stopped', + }) +# --- +# name: test_diagnostics.3 + dict({ + 'fixedcenterleft': 'fixedCenterLeft', + 'fixedleft': 'fixedLeft', + 'stopped': 'stopped', + }) +# --- +# name: test_diagnostics.4 + dict({ + 'fanlevel': 'low', + 'horizontalswing': 'stopped', + 'light': 'on', + 'mode': 'heat', + 'on': True, + 'swing': 'stopped', + 'targettemperature': 21, + 'temperatureunit': 'c', + }) +# --- +# name: test_diagnostics.5 + dict({ + 'fanlevel': 'high', + 'horizontalswing': 'stopped', + 'light': 'on', + 'mode': 'cool', + 'on': True, + 'swing': 'stopped', + 'targettemperature': 21, + 'temperatureunit': 'c', + }) +# --- +# name: test_diagnostics.6 + dict({ + }) +# --- +# name: test_diagnostics[full_snapshot] dict({ 'AAZZAAZZ': dict({ 'ac_states': dict({ @@ -91,8 +332,7 @@ 'motion_sensors': dict({ }), 'name': 'Kitchen', - 'pm25': None, - 'pm25_pure': 1, + 'pm25': 1, 'pure_ac_integration': False, 'pure_boost_enabled': False, 'pure_conf': dict({ @@ -425,7 +665,6 @@ }), 'name': 'Hallway', 'pm25': None, - 'pm25_pure': None, 'pure_ac_integration': None, 'pure_boost_enabled': None, 'pure_conf': dict({ @@ -552,8 +791,7 @@ 'motion_sensors': dict({ }), 'name': 'Bedroom', - 'pm25': None, - 'pm25_pure': 1, + 'pm25': 1, 'pure_ac_integration': False, 'pure_boost_enabled': False, 'pure_conf': dict({ diff --git a/tests/components/sensibo/snapshots/test_sensor.ambr b/tests/components/sensibo/snapshots/test_sensor.ambr index cd8d510b6cc..d645bdbd383 100644 --- a/tests/components/sensibo/snapshots/test_sensor.ambr +++ b/tests/components/sensibo/snapshots/test_sensor.ambr @@ -1,13 +1,10 @@ # serializer version: 1 # name: test_sensor ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Kitchen Pure AQI', - 'options': list([ - 'good', - 'moderate', - 'bad', - ]), + 'device_class': 'pm25', + 'friendly_name': 'Kitchen PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', }) # --- # name: test_sensor.1 diff --git a/tests/components/sensibo/test_climate.py b/tests/components/sensibo/test_climate.py index b5a7be7bde0..6b4aedab828 100644 --- a/tests/components/sensibo/test_climate.py +++ b/tests/components/sensibo/test_climate.py @@ -400,10 +400,6 @@ async def test_climate_temperatures( "homeassistant.components.sensibo.util.SensiboClient.async_set_ac_state_property", return_value={"result": {"status": "Success"}}, ), - pytest.raises( - ServiceValidationError, - match="Provided temperature 24.0 is not valid. Accepted range is 10 to 20", - ), ): await hass.services.async_call( CLIMATE_DOMAIN, @@ -414,7 +410,7 @@ async def test_climate_temperatures( await hass.async_block_till_done() state2 = hass.states.get("climate.hallway") - assert state2.attributes["temperature"] == 19 + assert state2.attributes["temperature"] == 20 with ( patch( diff --git a/tests/components/sensibo/test_config_flow.py b/tests/components/sensibo/test_config_flow.py index d6edb1c7ae0..e994402b09f 100644 --- a/tests/components/sensibo/test_config_flow.py +++ b/tests/components/sensibo/test_config_flow.py @@ -192,7 +192,15 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -246,7 +254,15 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) with patch( "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", @@ -322,7 +338,15 @@ async def test_flow_reauth_no_username_or_device( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -348,171 +372,3 @@ async def test_flow_reauth_no_username_or_device( assert result2["step_id"] == "reauth_confirm" assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": p_error} - - -async def test_reconfigure_flow(hass: HomeAssistant) -> None: - """Test a reconfigure flow.""" - entry = MockConfigEntry( - version=2, - domain=DOMAIN, - unique_id="username", - data={"api_key": "1234567890"}, - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - assert result["step_id"] == "reconfigure" - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with ( - patch( - "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", - return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, - ), - patch( - "homeassistant.components.sensibo.util.SensiboClient.async_get_me", - return_value={"result": {"username": "username"}}, - ) as mock_sensibo, - patch( - "homeassistant.components.sensibo.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_KEY: "1234567891"}, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" - assert entry.data == {"api_key": "1234567891"} - - assert len(mock_sensibo.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("sideeffect", "p_error"), - [ - (aiohttp.ClientConnectionError, "cannot_connect"), - (TimeoutError, "cannot_connect"), - (AuthenticationError, "invalid_auth"), - (SensiboError, "cannot_connect"), - ], -) -async def test_reconfigure_flow_error( - hass: HomeAssistant, sideeffect: Exception, p_error: str -) -> None: - """Test a reconfigure flow with error.""" - entry = MockConfigEntry( - version=2, - domain=DOMAIN, - unique_id="username", - data={"api_key": "1234567890"}, - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - - with patch( - "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", - side_effect=sideeffect, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_KEY: "1234567890"}, - ) - await hass.async_block_till_done() - - assert result2["step_id"] == "reconfigure" - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": p_error} - - with ( - patch( - "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", - return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, - ), - patch( - "homeassistant.components.sensibo.util.SensiboClient.async_get_me", - return_value={"result": {"username": "username"}}, - ), - patch( - "homeassistant.components.sensibo.async_setup_entry", - return_value=True, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_KEY: "1234567891"}, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" - assert entry.data == {"api_key": "1234567891"} - - -@pytest.mark.parametrize( - ("get_devices", "get_me", "p_error"), - [ - ( - {"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, - {"result": {}}, - "no_username", - ), - ( - {"result": []}, - {"result": {"username": "username"}}, - "no_devices", - ), - ( - {"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, - {"result": {"username": "username2"}}, - "incorrect_api_key", - ), - ], -) -async def test_flow_reconfigure_no_username_or_device( - hass: HomeAssistant, - get_devices: dict[str, Any], - get_me: dict[str, Any], - p_error: str, -) -> None: - """Test config flow get no username from api.""" - entry = MockConfigEntry( - version=2, - domain=DOMAIN, - unique_id="username", - data={"api_key": "1234567890"}, - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" - - with ( - patch( - "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", - return_value=get_devices, - ), - patch( - "homeassistant.components.sensibo.util.SensiboClient.async_get_me", - return_value=get_me, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_API_KEY: "1234567890", - }, - ) - await hass.async_block_till_done() - - assert result2["step_id"] == "reconfigure" - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": p_error} diff --git a/tests/components/sensibo/test_diagnostics.py b/tests/components/sensibo/test_diagnostics.py index 0dc1f2c25e9..1fe72cca0f3 100644 --- a/tests/components/sensibo/test_diagnostics.py +++ b/tests/components/sensibo/test_diagnostics.py @@ -3,7 +3,6 @@ from __future__ import annotations from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -11,6 +10,8 @@ from homeassistant.core import HomeAssistant from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator +EXCLUDE_ATTRIBUTES = {"full_features"} + async def test_diagnostics( hass: HomeAssistant, @@ -23,6 +24,16 @@ async def test_diagnostics( diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert diag == snapshot( - exclude=props("full_features", "created_at", "modified_at"), - ) + assert diag["ABC999111"]["full_capabilities"] == snapshot + assert diag["ABC999111"]["fan_modes_translated"] == snapshot + assert diag["ABC999111"]["swing_modes_translated"] == snapshot + assert diag["ABC999111"]["horizontal_swing_modes_translated"] == snapshot + assert diag["ABC999111"]["smart_low_state"] == snapshot + assert diag["ABC999111"]["smart_high_state"] == snapshot + assert diag["ABC999111"]["pure_conf"] == snapshot + + def limit_attrs(prop, path): + exclude_attrs = EXCLUDE_ATTRIBUTES + return prop in exclude_attrs + + assert diag == snapshot(name="full_snapshot", exclude=limit_attrs) diff --git a/tests/components/sensibo/test_sensor.py b/tests/components/sensibo/test_sensor.py index 5fc761f178a..3c6fb584a6e 100644 --- a/tests/components/sensibo/test_sensor.py +++ b/tests/components/sensibo/test_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import timedelta from unittest.mock import patch -from pysensibo.model import PureAQI, SensiboData +from pysensibo.model import SensiboData import pytest from syrupy.assertion import SnapshotAssertion @@ -27,17 +27,17 @@ async def test_sensor( """Test the Sensibo sensor.""" state1 = hass.states.get("sensor.hallway_motion_sensor_battery_voltage") - state2 = hass.states.get("sensor.kitchen_pure_aqi") + state2 = hass.states.get("sensor.kitchen_pm2_5") state3 = hass.states.get("sensor.kitchen_pure_sensitivity") state4 = hass.states.get("sensor.hallway_climate_react_low_temperature_threshold") assert state1.state == "3000" - assert state2.state == "good" + assert state2.state == "1" assert state3.state == "n" assert state4.state == "0.0" assert state2.attributes == snapshot assert state4.attributes == snapshot - monkeypatch.setattr(get_data.parsed["AAZZAAZZ"], "pm25_pure", PureAQI(2)) + monkeypatch.setattr(get_data.parsed["AAZZAAZZ"], "pm25", 2) with patch( "homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data", @@ -49,5 +49,5 @@ async def test_sensor( ) await hass.async_block_till_done() - state1 = hass.states.get("sensor.kitchen_pure_aqi") - assert state1.state == "moderate" + state1 = hass.states.get("sensor.kitchen_pm2_5") + assert state1.state == "2" diff --git a/tests/components/sensor/common.py b/tests/components/sensor/common.py index 458009b2690..53a93b73da3 100644 --- a/tests/components/sensor/common.py +++ b/tests/components/sensor/common.py @@ -10,11 +10,11 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, + POWER_VOLT_AMPERE_REACTIVE, SIGNAL_STRENGTH_DECIBELS, UnitOfApparentPower, UnitOfFrequency, UnitOfPressure, - UnitOfReactivePower, UnitOfVolume, ) @@ -44,7 +44,7 @@ UNITS_OF_MEASUREMENT = { SensorDeviceClass.ENERGY: "kWh", # energy (Wh/kWh/MWh) SensorDeviceClass.FREQUENCY: UnitOfFrequency.GIGAHERTZ, # energy (Hz/kHz/MHz/GHz) SensorDeviceClass.POWER_FACTOR: PERCENTAGE, # power factor (no unit, min: -1.0, max: 1.0) - SensorDeviceClass.REACTIVE_POWER: UnitOfReactivePower.VOLT_AMPERE_REACTIVE, # reactive power (var) + SensorDeviceClass.REACTIVE_POWER: POWER_VOLT_AMPERE_REACTIVE, # reactive power (var) SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, # µg/m³ of vocs SensorDeviceClass.VOLTAGE: "V", # voltage (V) SensorDeviceClass.GAS: UnitOfVolume.CUBIC_METERS, # gas (m³) diff --git a/tests/components/sensor/test_device_condition.py b/tests/components/sensor/test_device_condition.py index a9781e0b800..3bc9a660e93 100644 --- a/tests/components/sensor/test_device_condition.py +++ b/tests/components/sensor/test_device_condition.py @@ -27,6 +27,7 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, setup_test_component_platform, ) @@ -36,6 +37,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.mark.parametrize( "device_class", [ @@ -51,6 +58,7 @@ def test_matches_device_classes(device_class: SensorDeviceClass) -> None: SensorDeviceClass.BATTERY: "CONF_IS_BATTERY_LEVEL", SensorDeviceClass.CO: "CONF_IS_CO", SensorDeviceClass.CO2: "CONF_IS_CO2", + SensorDeviceClass.CONDUCTIVITY: "CONF_IS_CONDUCTIVITY", SensorDeviceClass.ENERGY_STORAGE: "CONF_IS_ENERGY", SensorDeviceClass.VOLUME_STORAGE: "CONF_IS_VOLUME", }.get(device_class, f"CONF_IS_{device_class.value.upper()}") @@ -59,6 +67,7 @@ def test_matches_device_classes(device_class: SensorDeviceClass) -> None: # Ensure it has correct value constant_value = { SensorDeviceClass.BATTERY: "is_battery_level", + SensorDeviceClass.CONDUCTIVITY: "is_conductivity", SensorDeviceClass.ENERGY_STORAGE: "is_energy", SensorDeviceClass.VOLUME_STORAGE: "is_volume", }.get(device_class, f"is_{device_class.value}") @@ -461,6 +470,7 @@ async def test_if_state_not_above_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test for bad value conditions.""" @@ -503,7 +513,7 @@ async def test_if_state_above( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -549,22 +559,22 @@ async def test_if_state_above( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -572,7 +582,7 @@ async def test_if_state_above_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -618,22 +628,22 @@ async def test_if_state_above_legacy( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -641,7 +651,7 @@ async def test_if_state_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -687,22 +697,22 @@ async def test_if_state_below( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -710,7 +720,7 @@ async def test_if_state_between( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -757,30 +767,30 @@ async def test_if_state_between( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "event - test_event1" hass.states.async_set(entry.entity_id, 21) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.states.async_set(entry.entity_id, 19) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "event - test_event1" + assert len(calls) == 2 + assert calls[1].data["some"] == "event - test_event1" diff --git a/tests/components/sensor/test_device_trigger.py b/tests/components/sensor/test_device_trigger.py index f50e92bc9df..87a6d9929c3 100644 --- a/tests/components/sensor/test_device_trigger.py +++ b/tests/components/sensor/test_device_trigger.py @@ -31,6 +31,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, setup_test_component_platform, ) @@ -40,6 +41,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.mark.parametrize( "device_class", [ @@ -55,6 +62,7 @@ def test_matches_device_classes(device_class: SensorDeviceClass) -> None: SensorDeviceClass.BATTERY: "CONF_BATTERY_LEVEL", SensorDeviceClass.CO: "CONF_CO", SensorDeviceClass.CO2: "CONF_CO2", + SensorDeviceClass.CONDUCTIVITY: "CONF_CONDUCTIVITY", SensorDeviceClass.ENERGY_STORAGE: "CONF_ENERGY", SensorDeviceClass.VOLUME_STORAGE: "CONF_VOLUME", }.get(device_class, f"CONF_{device_class.value.upper()}") @@ -63,6 +71,7 @@ def test_matches_device_classes(device_class: SensorDeviceClass) -> None: # Ensure it has correct value constant_value = { SensorDeviceClass.BATTERY: "battery_level", + SensorDeviceClass.CONDUCTIVITY: "conductivity", SensorDeviceClass.ENERGY_STORAGE: "energy", SensorDeviceClass.VOLUME_STORAGE: "volume", }.get(device_class, device_class.value) @@ -418,6 +427,7 @@ async def test_if_fires_not_on_above_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test for value triggers firing.""" @@ -457,7 +467,7 @@ async def test_if_fires_on_state_above( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -503,18 +513,17 @@ async def test_if_fires_on_state_above( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] - == f"bat_low device - {entry.entity_id} - 9 - 11 - None" + calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 9 - 11 - None" ) @@ -523,7 +532,7 @@ async def test_if_fires_on_state_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -569,18 +578,17 @@ async def test_if_fires_on_state_below( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] - == f"bat_low device - {entry.entity_id} - 11 - 9 - None" + calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 11 - 9 - None" ) @@ -589,7 +597,7 @@ async def test_if_fires_on_state_between( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -636,30 +644,28 @@ async def test_if_fires_on_state_between( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] - == f"bat_low device - {entry.entity_id} - 9 - 11 - None" + calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 9 - 11 - None" ) hass.states.async_set(entry.entity_id, 21) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 hass.states.async_set(entry.entity_id, 19) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[1].data["some"] - == f"bat_low device - {entry.entity_id} - 21 - 19 - None" + calls[1].data["some"] == f"bat_low device - {entry.entity_id} - 21 - 19 - None" ) @@ -668,7 +674,7 @@ async def test_if_fires_on_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -714,18 +720,17 @@ async def test_if_fires_on_state_legacy( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] - == f"bat_low device - {entry.entity_id} - 9 - 11 - None" + calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 9 - 11 - None" ) @@ -734,7 +739,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -781,17 +786,17 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, 10) hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - 10 - 11 - 0:00:05" ) diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 2504ea80d84..126e327f364 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -2,13 +2,13 @@ from __future__ import annotations -from collections.abc import Generator from datetime import UTC, date, datetime from decimal import Decimal from types import ModuleType from typing import Any import pytest +from typing_extensions import Generator from homeassistant.components import sensor from homeassistant.components.number import NumberDeviceClass @@ -418,7 +418,7 @@ async def test_restore_sensor_save_state( assert state["entity_id"] == entity0.entity_id extra_data = hass_storage[RESTORE_STATE_KEY]["data"][0]["extra_data"] assert extra_data == expected_extra_data - assert type(extra_data["native_value"]) is native_value_type + assert type(extra_data["native_value"]) == native_value_type @pytest.mark.parametrize( @@ -479,7 +479,7 @@ async def test_restore_sensor_restore_state( assert hass.states.get(entity0.entity_id) assert entity0.native_value == native_value - assert type(entity0.native_value) is native_value_type + assert type(entity0.native_value) == native_value_type assert entity0.native_unit_of_measurement == uom @@ -942,21 +942,7 @@ async def test_custom_unit_change( "1000000", "1093613", SensorDeviceClass.DISTANCE, - ), - # Volume Storage (subclass of Volume) - ( - US_CUSTOMARY_SYSTEM, - UnitOfVolume.LITERS, - UnitOfVolume.GALLONS, - UnitOfVolume.GALLONS, - UnitOfVolume.FLUID_OUNCES, - 1000, - "1000", - "264", - "264", - "33814", - SensorDeviceClass.VOLUME_STORAGE, - ), + ) ], ) async def test_unit_conversion_priority( diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 0e8c2a5e188..62cb66d2053 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -1,11 +1,10 @@ """The tests for sensor recorder platform.""" -from collections.abc import Iterable from datetime import datetime, timedelta import math from statistics import mean -from typing import Any, Literal -from unittest.mock import ANY, patch +from typing import Literal +from unittest.mock import patch from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory @@ -38,7 +37,6 @@ from homeassistant.components.recorder.util import get_instance, session_scope from homeassistant.components.sensor import ATTR_OPTIONS, DOMAIN, SensorDeviceClass from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant, State -from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM @@ -52,14 +50,9 @@ from tests.components.recorder.common import ( async_recorder_block_till_done, async_wait_recording_done, do_adhoc_statistics, - get_start_time, statistics_during_period, ) -from tests.typing import ( - MockHAClientWebSocket, - RecorderInstanceGenerator, - WebSocketGenerator, -) +from tests.typing import RecorderInstanceGenerator, WebSocketGenerator BATTERY_SENSOR_ATTRIBUTES = { "device_class": "battery", @@ -102,7 +95,7 @@ KW_SENSOR_ATTRIBUTES = { @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: """Set up recorder patches.""" @@ -112,24 +105,6 @@ def setup_recorder(recorder_mock: Recorder) -> Recorder: """Set up recorder.""" -@pytest.fixture(autouse=True) -def disable_mariadb_issue() -> None: - """Disable creating issue about outdated MariaDB version.""" - with patch( - "homeassistant.components.recorder.util._async_create_mariadb_range_index_regression_issue" - ): - yield - - -@pytest.fixture(autouse=True) -def disable_sqlite_issue() -> None: - """Disable creating issue about outdated SQLite version.""" - with patch( - "homeassistant.components.recorder.util._async_create_issue_deprecated_version" - ): - yield - - async def async_list_statistic_ids( hass: HomeAssistant, statistic_ids: set[str] | None = None, @@ -141,79 +116,6 @@ async def async_list_statistic_ids( ) -async def assert_statistic_ids( - hass: HomeAssistant, - expected_result: list[dict[str, Any]], -) -> None: - """Assert statistic ids.""" - with session_scope(hass=hass, read_only=True) as session: - db_states = list(session.query(StatisticsMeta)) - assert len(db_states) == len(expected_result) - for i, db_state in enumerate(db_states): - assert db_state.statistic_id == expected_result[i]["statistic_id"] - assert ( - db_state.unit_of_measurement - == expected_result[i]["unit_of_measurement"] - ) - - -def assert_issues( - hass: HomeAssistant, - expected_issues: dict[str, dict[str, Any]], -) -> None: - """Assert statistics issues.""" - issue_registry = ir.async_get(hass) - assert len(issue_registry.issues) == len(expected_issues) - for issue_id, expected_issue_data in expected_issues.items(): - expected_translation_placeholders = dict(expected_issue_data) - expected_translation_placeholders.pop("issue_type") - expected_issue = ir.IssueEntry( - active=True, - breaks_in_ha_version=None, - created=ANY, - data=expected_issue_data, - dismissed_version=None, - domain=DOMAIN, - is_fixable=False, - is_persistent=False, - issue_domain=None, - issue_id=issue_id, - learn_more_url=None, - severity=ir.IssueSeverity.WARNING, - translation_key=expected_issue_data["issue_type"], - translation_placeholders=expected_translation_placeholders, - ) - assert (DOMAIN, issue_id) in issue_registry.issues - assert issue_registry.issues[(DOMAIN, issue_id)] == expected_issue - - -async def assert_validation_result( - hass: HomeAssistant, - client: MockHAClientWebSocket, - expected_validation_result: dict[str, list[dict[str, Any]]], - expected_issues: Iterable[str], -) -> None: - """Assert statistics validation result.""" - await client.send_json_auto_id({"type": "recorder/validate_statistics"}) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_validation_result - await hass.async_block_till_done() - - # Check we get corresponding issues - await client.send_json_auto_id({"type": "recorder/update_statistics_issues"}) - response = await client.receive_json() - assert response["success"] - expected_issue_registry_issues = { - f"{issue['type']}_{statistic_id}": issue["data"] | {"issue_type": issue["type"]} - for statistic_id, issues in expected_validation_result.items() - for issue in issues - if issue["type"] in expected_issues - } - - assert_issues(hass, expected_issue_registry_issues) - - @pytest.mark.parametrize( ( "device_class", @@ -261,7 +163,7 @@ async def test_compile_hourly_statistics( max, ) -> None: """Test compiling hourly statistics.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -345,7 +247,7 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( If the last updated value is the same we will have a zero duration. """ - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -459,7 +361,7 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( If the last updated value is the same we will have a zero duration. """ - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -565,7 +467,7 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period( max, ) -> None: """Test compiling hourly statistics when the only state at end of period.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -659,7 +561,7 @@ async def test_compile_hourly_statistics_purged_state_changes( unit_class, ) -> None: """Test compiling hourly statistics.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -730,7 +632,7 @@ async def test_compile_hourly_statistics_wrong_unit( attributes, ) -> None: """Test compiling hourly statistics for sensor with unit not matching device class.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -954,7 +856,7 @@ async def test_compile_hourly_sum_statistics_amount( factor, ) -> None: """Test compiling hourly statistics.""" - period0 = get_start_time(dt_util.utcnow()) + period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1138,7 +1040,7 @@ async def test_compile_hourly_sum_statistics_amount_reset_every_state_change( factor, ) -> None: """Test compiling hourly statistics.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -1261,7 +1163,7 @@ async def test_compile_hourly_sum_statistics_amount_invalid_last_reset( factor, ) -> None: """Test compiling hourly statistics.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -1361,7 +1263,7 @@ async def test_compile_hourly_sum_statistics_nan_inf_state( factor, ) -> None: """Test compiling hourly statistics with nan and inf states.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -1496,7 +1398,7 @@ async def test_compile_hourly_sum_statistics_negative_state( offset, ) -> None: """Test compiling hourly statistics with negative states.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() hass.data.pop(loader.DATA_CUSTOM_COMPONENTS) mocksensor = MockSensor(name="custom_sensor") @@ -1504,11 +1406,10 @@ async def test_compile_hourly_sum_statistics_negative_state( setup_test_component_platform(hass, DOMAIN, [mocksensor], built_in=False) await async_setup_component(hass, "homeassistant", {}) - with freeze_time(zero) as freezer: - await async_setup_component( - hass, "sensor", {"sensor": [{"platform": "demo"}, {"platform": "test"}]} - ) - await hass.async_block_till_done() + await async_setup_component( + hass, "sensor", {"sensor": [{"platform": "demo"}, {"platform": "test"}]} + ) + await hass.async_block_till_done() attributes = { "device_class": device_class, "state_class": state_class, @@ -1609,7 +1510,7 @@ async def test_compile_hourly_sum_statistics_total_no_reset( factor, ) -> None: """Test compiling hourly statistics.""" - period0 = get_start_time(dt_util.utcnow()) + period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1722,7 +1623,7 @@ async def test_compile_hourly_sum_statistics_total_increasing( factor, ) -> None: """Test compiling hourly statistics.""" - period0 = get_start_time(dt_util.utcnow()) + period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1835,7 +1736,7 @@ async def test_compile_hourly_sum_statistics_total_increasing_small_dip( factor, ) -> None: """Test small dips in sensor readings do not trigger a reset.""" - period0 = get_start_time(dt_util.utcnow()) + period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1937,7 +1838,7 @@ async def test_compile_hourly_energy_statistics_unsupported( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics.""" - period0 = get_start_time(dt_util.utcnow()) + period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -2041,7 +1942,7 @@ async def test_compile_hourly_energy_statistics_multiple( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling multiple hourly statistics.""" - period0 = get_start_time(dt_util.utcnow()) + period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -2255,7 +2156,7 @@ async def test_compile_hourly_statistics_unchanged( value, ) -> None: """Test compiling hourly statistics, with no changes during the hour.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2298,7 +2199,7 @@ async def test_compile_hourly_statistics_partially_unavailable( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics, with the sensor being partially unavailable.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2367,7 +2268,7 @@ async def test_compile_hourly_statistics_unavailable( sensor.test1 is unavailable and should not have statistics generated sensor.test2 should have statistics generated """ - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2414,7 +2315,7 @@ async def test_compile_hourly_statistics_fails( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics throws.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2532,29 +2433,30 @@ async def test_list_statistic_ids( @pytest.mark.parametrize( - "energy_attributes", + "_attributes", [{**ENERGY_SENSOR_ATTRIBUTES, "last_reset": 0}, TEMPERATURE_SENSOR_ATTRIBUTES], ) async def test_list_statistic_ids_unsupported( hass: HomeAssistant, - energy_attributes: dict[str, Any], + caplog: pytest.LogCaptureFixture, + _attributes, ) -> None: """Test listing future statistic ids for unsupported sensor.""" await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) - attributes = dict(energy_attributes) + attributes = dict(_attributes) hass.states.async_set("sensor.test1", 0, attributes=attributes) if "last_reset" in attributes: attributes.pop("unit_of_measurement") hass.states.async_set("last_reset.test2", 0, attributes=attributes) - attributes = dict(energy_attributes) + attributes = dict(_attributes) if "unit_of_measurement" in attributes: attributes["unit_of_measurement"] = "invalid" hass.states.async_set("sensor.test3", 0, attributes=attributes) attributes.pop("unit_of_measurement") hass.states.async_set("sensor.test4", 0, attributes=attributes) - attributes = dict(energy_attributes) + attributes = dict(_attributes) attributes["state_class"] = "invalid" hass.states.async_set("sensor.test5", 0, attributes=attributes) attributes.pop("state_class") @@ -2590,7 +2492,7 @@ async def test_compile_hourly_statistics_changing_units_1( This tests the case where the recorder cannot convert between the units. """ - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2719,7 +2621,7 @@ async def test_compile_hourly_statistics_changing_units_2( This tests the behaviour when the sensor units are note supported by any unit converter. """ - zero = get_start_time(dt_util.utcnow()) - timedelta(seconds=30 * 5) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2798,7 +2700,7 @@ async def test_compile_hourly_statistics_changing_units_3( This tests the behaviour when the sensor units are note supported by any unit converter. """ - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2919,7 +2821,7 @@ async def test_compile_hourly_statistics_convert_units_1( This tests the case where the recorder can convert between the units. """ - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3078,7 +2980,7 @@ async def test_compile_hourly_statistics_equivalent_units_1( max, ) -> None: """Test compiling hourly statistics where units change from one hour to the next.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3203,7 +3105,7 @@ async def test_compile_hourly_statistics_equivalent_units_2( max, ) -> None: """Test compiling hourly statistics where units change during an hour.""" - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3227,7 +3129,7 @@ async def test_compile_hourly_statistics_equivalent_units_2( ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - do_adhoc_statistics(hass, start=zero + timedelta(seconds=30 * 10)) + do_adhoc_statistics(hass, start=zero + timedelta(seconds=30 * 5)) await async_wait_recording_done(hass) assert "The unit of sensor.test1 is changing" not in caplog.text assert "and matches the unit of already compiled statistics" not in caplog.text @@ -3249,9 +3151,9 @@ async def test_compile_hourly_statistics_equivalent_units_2( "sensor.test1": [ { "start": process_timestamp( - zero + timedelta(seconds=30 * 10) + zero + timedelta(seconds=30 * 5) ).timestamp(), - "end": process_timestamp(zero + timedelta(seconds=30 * 20)).timestamp(), + "end": process_timestamp(zero + timedelta(seconds=30 * 15)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), @@ -3296,7 +3198,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( Device class is ignored, meaning changing device class should not influence the statistics. """ - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3507,7 +3409,7 @@ async def test_compile_hourly_statistics_changing_device_class_2( Device class is ignored, meaning changing device class should not influence the statistics. """ - zero = get_start_time(dt_util.utcnow()) + zero = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3645,7 +3547,7 @@ async def test_compile_hourly_statistics_changing_state_class( max, ) -> None: """Test compiling hourly statistics where state class changes.""" - period0 = get_start_time(dt_util.utcnow()) + period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period0 + timedelta(minutes=10) await async_setup_component(hass, "sensor", {}) @@ -4215,7 +4117,7 @@ async def async_record_states( one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=10 * 5) three = two + timedelta(seconds=40 * 5) - four = three + timedelta(seconds=9 * 5) + four = three + timedelta(seconds=10 * 5) states = {entity_id: []} freezer.move_to(one) @@ -4233,8 +4135,8 @@ async def async_record_states( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -4276,8 +4178,22 @@ async def test_validate_unit_change_convertible( The test also asserts that the sensor's device class is ignored. """ + msg_id = 1 - now = get_start_time(dt_util.utcnow()) + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + now = dt_util.utcnow() hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4285,36 +4201,27 @@ async def test_validate_unit_change_convertible( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, unit in state matching device class - empty response hass.states.async_set( - "sensor.test", - 10, - attributes={**attributes, "unit_of_measurement": unit}, - timestamp=now.timestamp(), + "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit} ) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, unit in state not matching device class - empty response hass.states.async_set( - "sensor.test", - 11, - attributes={**attributes, "unit_of_measurement": "dogs"}, - timestamp=now.timestamp(), + "sensor.test", 11, attributes={**attributes, "unit_of_measurement": "dogs"} ) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Statistics has run, incompatible unit - expect error await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) hass.states.async_set( - "sensor.test", - 12, - attributes={**attributes, "unit_of_measurement": "dogs"}, - timestamp=now.timestamp(), + "sensor.test", 12, attributes={**attributes, "unit_of_measurement": "dogs"} ) await async_recorder_block_till_done(hass) expected = { @@ -4330,57 +4237,31 @@ async def test_validate_unit_change_convertible( } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) - - # Unavailable state - empty response - hass.states.async_set( - "sensor.test", - "unavailable", - attributes={**attributes, "unit_of_measurement": "dogs"}, - timestamp=now.timestamp(), - ) - await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) - - # Unknown state - empty response - hass.states.async_set( - "sensor.test", - "unknown", - attributes={**attributes, "unit_of_measurement": "dogs"}, - timestamp=now.timestamp(), - ) - await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, expected) # Valid state - empty response hass.states.async_set( - "sensor.test", - 13, - attributes={**attributes, "unit_of_measurement": unit}, - timestamp=now.timestamp(), + "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit} ) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Valid state, statistic runs again - empty response do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Valid state in compatible unit - empty response hass.states.async_set( - "sensor.test", - 13, - attributes={**attributes, "unit_of_measurement": unit2}, - timestamp=now.timestamp(), + "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit2} ) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Valid state, statistic runs again - empty response do_adhoc_statistics(hass, start=now + timedelta(hours=2)) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Remove the state - expect error about missing state hass.states.async_remove("sensor.test") @@ -4392,7 +4273,7 @@ async def test_validate_unit_change_convertible( } ], } - await assert_validation_result(hass, client, expected, {}) + await assert_validation_result(client, expected) @pytest.mark.parametrize( @@ -4411,7 +4292,22 @@ async def test_validate_statistics_unit_ignore_device_class( The test asserts that the sensor's device class is ignored. """ - now = get_start_time(dt_util.utcnow()) + msg_id = 1 + + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + now = dt_util.utcnow() hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4419,34 +4315,29 @@ async def test_validate_statistics_unit_ignore_device_class( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, no device class - empty response initial_attributes = {"state_class": "measurement", "unit_of_measurement": "dogs"} - hass.states.async_set( - "sensor.test", 10, attributes=initial_attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 10, attributes=initial_attributes) await hass.async_block_till_done() - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Statistics has run, device class set not matching unit - empty response do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) hass.states.async_set( - "sensor.test", - 12, - attributes={**attributes, "unit_of_measurement": "dogs"}, - timestamp=now.timestamp(), + "sensor.test", 12, attributes={**attributes, "unit_of_measurement": "dogs"} ) await hass.async_block_till_done() - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -4493,10 +4384,24 @@ async def test_validate_statistics_unit_change_no_device_class( conversion, and the unit is then changed to a unit which can and cannot be converted to the original unit. """ + msg_id = 1 attributes = dict(attributes) attributes.pop("device_class") - now = get_start_time(dt_util.utcnow()) + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + now = dt_util.utcnow() hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4504,36 +4409,27 @@ async def test_validate_statistics_unit_change_no_device_class( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, sensor state set - empty response hass.states.async_set( - "sensor.test", - 10, - attributes={**attributes, "unit_of_measurement": unit}, - timestamp=now.timestamp(), + "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit} ) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, sensor state set to an incompatible unit - empty response hass.states.async_set( - "sensor.test", - 11, - attributes={**attributes, "unit_of_measurement": "dogs"}, - timestamp=now.timestamp(), + "sensor.test", 11, attributes={**attributes, "unit_of_measurement": "dogs"} ) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Statistics has run, incompatible unit - expect error await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) hass.states.async_set( - "sensor.test", - 12, - attributes={**attributes, "unit_of_measurement": "dogs"}, - timestamp=now.timestamp(), + "sensor.test", 12, attributes={**attributes, "unit_of_measurement": "dogs"} ) await async_recorder_block_till_done(hass) expected = { @@ -4549,57 +4445,31 @@ async def test_validate_statistics_unit_change_no_device_class( } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) - - # Unavailable state - empty response - hass.states.async_set( - "sensor.test", - "unavailable", - attributes={**attributes, "unit_of_measurement": "dogs"}, - timestamp=now.timestamp(), - ) - await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) - - # Unknown state - empty response - hass.states.async_set( - "sensor.test", - "unknown", - attributes={**attributes, "unit_of_measurement": "dogs"}, - timestamp=now.timestamp(), - ) - await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, expected) # Valid state - empty response hass.states.async_set( - "sensor.test", - 13, - attributes={**attributes, "unit_of_measurement": unit}, - timestamp=now.timestamp(), + "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit} ) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Valid state, statistic runs again - empty response do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Valid state in compatible unit - empty response hass.states.async_set( - "sensor.test", - 13, - attributes={**attributes, "unit_of_measurement": unit2}, - timestamp=now.timestamp(), + "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit2} ) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Valid state, statistic runs again - empty response do_adhoc_statistics(hass, start=now + timedelta(hours=2)) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Remove the state - expect error about missing state hass.states.async_remove("sensor.test") @@ -4611,7 +4481,7 @@ async def test_validate_statistics_unit_change_no_device_class( } ], } - await assert_validation_result(hass, client, expected, {}) + await assert_validation_result(client, expected) @pytest.mark.parametrize( @@ -4620,7 +4490,7 @@ async def test_validate_statistics_unit_change_no_device_class( (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"), ], ) -async def test_validate_statistics_state_class_removed( +async def test_validate_statistics_unsupported_state_class( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, units, @@ -4628,7 +4498,22 @@ async def test_validate_statistics_state_class_removed( unit, ) -> None: """Test validate_statistics.""" - now = get_start_time(dt_util.utcnow()) + msg_id = 1 + + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + now = dt_util.utcnow() hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4636,109 +4521,35 @@ async def test_validate_statistics_state_class_removed( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, valid state - empty response - hass.states.async_set( - "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 10, attributes=attributes) await hass.async_block_till_done() - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Statistics has run, empty response do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # State update with invalid state class, expect error _attributes = dict(attributes) _attributes.pop("state_class") - hass.states.async_set( - "sensor.test", 12, attributes=_attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 12, attributes=_attributes) await hass.async_block_till_done() expected = { "sensor.test": [ { - "data": {"statistic_id": "sensor.test"}, - "type": "state_class_removed", + "data": { + "state_class": None, + "statistic_id": "sensor.test", + }, + "type": "unsupported_state_class", } ], } - await assert_validation_result(hass, client, expected, {"state_class_removed"}) - - # Unavailable state - empty response - hass.states.async_set( - "sensor.test", "unavailable", attributes=_attributes, timestamp=now.timestamp() - ) - await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) - - # Unknown state - empty response - hass.states.async_set( - "sensor.test", "unknown", attributes=_attributes, timestamp=now.timestamp() - ) - await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) - - -@pytest.mark.parametrize( - ("units", "attributes", "unit"), - [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"), - ], -) -async def test_validate_statistics_state_class_removed_issue_cleaned_up( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - units, - attributes, - unit, -) -> None: - """Test validate_statistics.""" - now = get_start_time(dt_util.utcnow()) - - hass.config.units = units - await async_setup_component(hass, "sensor", {}) - await async_recorder_block_till_done(hass) - client = await hass_ws_client() - - # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) - - # No statistics, valid state - empty response - hass.states.async_set( - "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() - ) - await hass.async_block_till_done() - await assert_validation_result(hass, client, {}, {}) - - # Statistics has run, empty response - do_adhoc_statistics(hass, start=now) - await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) - - # State update with invalid state class, expect error - _attributes = dict(attributes) - _attributes.pop("state_class") - hass.states.async_set( - "sensor.test", 12, attributes=_attributes, timestamp=now.timestamp() - ) - await hass.async_block_till_done() - expected = { - "sensor.test": [ - { - "data": {"statistic_id": "sensor.test"}, - "type": "state_class_removed", - } - ], - } - await assert_validation_result(hass, client, expected, {"state_class_removed"}) - - # Remove the statistics - empty response - get_instance(hass).async_clear_statistics(["sensor.test"]) - await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, expected) @pytest.mark.parametrize( @@ -4755,7 +4566,22 @@ async def test_validate_statistics_sensor_no_longer_recorded( unit, ) -> None: """Test validate_statistics.""" - now = get_start_time(dt_util.utcnow()) + msg_id = 1 + + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + now = dt_util.utcnow() hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4763,19 +4589,17 @@ async def test_validate_statistics_sensor_no_longer_recorded( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, valid state - empty response - hass.states.async_set( - "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 10, attributes=attributes) await hass.async_block_till_done() - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Statistics has run, empty response do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Sensor no longer recorded, expect error expected = { @@ -4792,7 +4616,7 @@ async def test_validate_statistics_sensor_no_longer_recorded( "entity_filter", return_value=False, ): - await assert_validation_result(hass, client, expected, {}) + await assert_validation_result(client, expected) @pytest.mark.parametrize( @@ -4809,7 +4633,22 @@ async def test_validate_statistics_sensor_not_recorded( unit, ) -> None: """Test validate_statistics.""" - now = get_start_time(dt_util.utcnow()) + msg_id = 1 + + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + now = dt_util.utcnow() hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4817,7 +4656,7 @@ async def test_validate_statistics_sensor_not_recorded( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Sensor not recorded, expect error expected = { @@ -4834,16 +4673,14 @@ async def test_validate_statistics_sensor_not_recorded( "entity_filter", return_value=False, ): - hass.states.async_set( - "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 10, attributes=attributes) await hass.async_block_till_done() - await assert_validation_result(hass, client, expected, {}) + await assert_validation_result(client, expected) # Statistics has run, expect same error do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, expected, {}) + await assert_validation_result(client, expected) @pytest.mark.parametrize( @@ -4860,7 +4697,22 @@ async def test_validate_statistics_sensor_removed( unit, ) -> None: """Test validate_statistics.""" - now = get_start_time(dt_util.utcnow()) + msg_id = 1 + + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + now = dt_util.utcnow() hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4868,19 +4720,17 @@ async def test_validate_statistics_sensor_removed( client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, valid state - empty response - hass.states.async_set( - "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() - ) + hass.states.async_set("sensor.test", 10, attributes=attributes) await hass.async_block_till_done() - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Statistics has run, empty response do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Sensor removed, expect error hass.states.async_remove("sensor.test") @@ -4892,7 +4742,7 @@ async def test_validate_statistics_sensor_removed( } ], } - await assert_validation_result(hass, client, expected, {}) + await assert_validation_result(client, expected) @pytest.mark.parametrize( @@ -4910,63 +4760,77 @@ async def test_validate_statistics_unit_change_no_conversion( unit2, ) -> None: """Test validate_statistics.""" - now = get_start_time(dt_util.utcnow()) + msg_id = 1 + + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + async def assert_statistic_ids(expected_result): + with session_scope(hass=hass, read_only=True) as session: + db_states = list(session.query(StatisticsMeta)) + assert len(db_states) == len(expected_result) + for i, db_state in enumerate(db_states): + assert db_state.statistic_id == expected_result[i]["statistic_id"] + assert ( + db_state.unit_of_measurement + == expected_result[i]["unit_of_measurement"] + ) + + now = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", - 10, - attributes={**attributes, "unit_of_measurement": unit1}, - timestamp=now.timestamp(), + "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit1} ) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, changed unit - empty response hass.states.async_set( - "sensor.test", - 11, - attributes={**attributes, "unit_of_measurement": unit2}, - timestamp=now.timestamp(), + "sensor.test", 11, attributes={**attributes, "unit_of_measurement": unit2} ) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Run statistics, no statistics will be generated because of conflicting units await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_statistic_ids(hass, []) + await assert_statistic_ids([]) # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", - 12, - attributes={**attributes, "unit_of_measurement": unit1}, - timestamp=now.timestamp(), + "sensor.test", 12, attributes={**attributes, "unit_of_measurement": unit1} ) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Run statistics one hour later, only the state with unit1 will be considered await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) await assert_statistic_ids( - hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Change unit - expect error hass.states.async_set( - "sensor.test", - 13, - attributes={**attributes, "unit_of_measurement": unit2}, - timestamp=now.timestamp(), + "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit2} ) await async_recorder_block_till_done(hass) expected = { @@ -4982,43 +4846,20 @@ async def test_validate_statistics_unit_change_no_conversion( } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) - - # Unavailable state - empty response - hass.states.async_set( - "sensor.test", - "unavailable", - attributes={**attributes, "unit_of_measurement": unit2}, - timestamp=now.timestamp(), - ) - await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) - - # Unknown state - empty response - hass.states.async_set( - "sensor.test", - "unknown", - attributes={**attributes, "unit_of_measurement": unit2}, - timestamp=now.timestamp(), - ) - await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, expected) # Original unit - empty response hass.states.async_set( - "sensor.test", - 14, - attributes={**attributes, "unit_of_measurement": unit1}, - timestamp=now.timestamp(), + "sensor.test", 14, attributes={**attributes, "unit_of_measurement": unit1} ) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Valid state, statistic runs again - empty response await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now + timedelta(hours=2)) await async_recorder_block_till_done(hass) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Remove the state - expect error hass.states.async_remove("sensor.test") @@ -5030,7 +4871,7 @@ async def test_validate_statistics_unit_change_no_conversion( } ], } - await assert_validation_result(hass, client, expected, {}) + await assert_validation_result(client, expected) @pytest.mark.parametrize( @@ -5053,49 +4894,69 @@ async def test_validate_statistics_unit_change_equivalent_units( This tests no validation issue is created when a sensor's unit changes to an equivalent unit. """ - now = get_start_time(dt_util.utcnow()) + msg_id = 1 + + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + async def assert_statistic_ids(expected_result): + with session_scope(hass=hass, read_only=True) as session: + db_states = list(session.query(StatisticsMeta)) + assert len(db_states) == len(expected_result) + for i, db_state in enumerate(db_states): + assert db_state.statistic_id == expected_result[i]["statistic_id"] + assert ( + db_state.unit_of_measurement + == expected_result[i]["unit_of_measurement"] + ) + + now = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", - 10, - attributes={**attributes, "unit_of_measurement": unit1}, - timestamp=now.timestamp(), + "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit1} ) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Run statistics await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) await assert_statistic_ids( - hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) # Units changed to an equivalent unit - empty response hass.states.async_set( - "sensor.test", - 12, - attributes={**attributes, "unit_of_measurement": unit2}, - timestamp=now.timestamp() + 1, + "sensor.test", 12, attributes={**attributes, "unit_of_measurement": unit2} ) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Run statistics one hour later, metadata will be updated await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) await assert_statistic_ids( - hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit2}] + [{"statistic_id": "sensor.test", "unit_of_measurement": unit2}] ) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) @pytest.mark.parametrize( @@ -5117,38 +4978,59 @@ async def test_validate_statistics_unit_change_equivalent_units_2( This tests a validation issue is created when a sensor's unit changes to an equivalent unit which is not known to the unit converters. """ - now = get_start_time(dt_util.utcnow()) + + msg_id = 1 + + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + async def assert_statistic_ids(expected_result): + with session_scope(hass=hass, read_only=True) as session: + db_states = list(session.query(StatisticsMeta)) + assert len(db_states) == len(expected_result) + for i, db_state in enumerate(db_states): + assert db_state.statistic_id == expected_result[i]["statistic_id"] + assert ( + db_state.unit_of_measurement + == expected_result[i]["unit_of_measurement"] + ) + + now = dt_util.utcnow() await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) client = await hass_ws_client() # No statistics, no state - empty response - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", - 10, - attributes={**attributes, "unit_of_measurement": unit1}, - timestamp=now.timestamp(), + "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit1} ) - await assert_validation_result(hass, client, {}, {}) + await assert_validation_result(client, {}) # Run statistics await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) await assert_statistic_ids( - hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) # Units changed to an equivalent unit which is not known by the unit converters hass.states.async_set( - "sensor.test", - 12, - attributes={**attributes, "unit_of_measurement": unit2}, - timestamp=now.timestamp(), + "sensor.test", 12, attributes={**attributes, "unit_of_measurement": unit2} ) expected = { "sensor.test": [ @@ -5163,22 +5045,37 @@ async def test_validate_statistics_unit_change_equivalent_units_2( } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(client, expected) # Run statistics one hour later, metadata will not be updated await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) await assert_statistic_ids( - hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(client, expected) async def test_validate_statistics_other_domain( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test sensor does not raise issues for statistics for other domains.""" + msg_id = 1 + + def next_id(): + nonlocal msg_id + msg_id += 1 + return msg_id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) client = await hass_ws_client() @@ -5205,67 +5102,7 @@ async def test_validate_statistics_other_domain( await async_recorder_block_till_done(hass) # We should not get complains about the missing number entity - await assert_validation_result(hass, client, {}, {}) - - -@pytest.mark.parametrize( - ("units", "attributes", "unit"), - [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"), - ], -) -async def test_update_statistics_issues( - hass: HomeAssistant, - units, - attributes, - unit, -) -> None: - """Test update_statistics_issues.""" - - async def one_hour_stats(start: datetime) -> datetime: - """Generate 5-minute statistics for one hour.""" - for _ in range(12): - do_adhoc_statistics(hass, start=start) - await async_wait_recording_done(hass) - start += timedelta(minutes=5) - return start - - now = get_start_time(dt_util.utcnow()) - - hass.config.units = units - await async_setup_component(hass, "sensor", {}) - await async_recorder_block_till_done(hass) - - # No statistics, no state - no issues - now = await one_hour_stats(now) - assert_issues(hass, {}) - - # Statistics, valid state - no issues - hass.states.async_set( - "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() - ) - await hass.async_block_till_done() - now = await one_hour_stats(now) - assert_issues(hass, {}) - - # State update with invalid state class, statistics did not run again - _attributes = dict(attributes) - _attributes.pop("state_class") - hass.states.async_set( - "sensor.test", 12, attributes=_attributes, timestamp=now.timestamp() - ) - await hass.async_block_till_done() - assert_issues(hass, {}) - - # Let statistics run for one hour, expect issue - now = await one_hour_stats(now) - expected = { - "state_class_removed_sensor.test": { - "issue_type": "state_class_removed", - "statistic_id": "sensor.test", - } - } - assert_issues(hass, expected) + await assert_validation_result(client, {}) async def async_record_meter_states( @@ -5360,9 +5197,7 @@ async def async_record_meter_state( return states -async def async_record_states_partially_unavailable( - hass: HomeAssistant, zero: datetime, entity_id: str, attributes: dict[str, Any] -) -> tuple[datetime, dict[str, list[State]]]: +async def async_record_states_partially_unavailable(hass, zero, entity_id, attributes): """Record some test states. We inject a bunch of state updates temperature sensors. @@ -5376,7 +5211,7 @@ async def async_record_states_partially_unavailable( one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=15 * 5) three = two + timedelta(seconds=30 * 5) - four = three + timedelta(seconds=14 * 5) + four = three + timedelta(seconds=15 * 5) states = {entity_id: []} with freeze_time(one) as freezer: @@ -5430,51 +5265,3 @@ async def test_exclude_attributes(hass: HomeAssistant) -> None: assert len(states) == 1 assert ATTR_OPTIONS not in states[0].attributes assert ATTR_FRIENDLY_NAME in states[0].attributes - - -async def test_clean_up_repairs( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test cleaning up repairs.""" - await async_setup_component(hass, "sensor", {}) - issue_registry = ir.async_get(hass) - client = await hass_ws_client() - - # Create some issues - def create_issue(domain: str, issue_id: str, data: dict | None) -> None: - ir.async_create_issue( - hass, - domain, - issue_id, - data=data, - is_fixable=False, - severity=ir.IssueSeverity.WARNING, - translation_key="", - ) - - create_issue("test", "test_issue", None) - create_issue(DOMAIN, "test_issue_1", None) - create_issue(DOMAIN, "test_issue_2", {"issue_type": "another_issue"}) - create_issue(DOMAIN, "test_issue_3", {"issue_type": "state_class_removed"}) - create_issue(DOMAIN, "test_issue_4", {"issue_type": "units_changed"}) - - # Check the issues - assert set(issue_registry.issues) == { - ("test", "test_issue"), - ("sensor", "test_issue_1"), - ("sensor", "test_issue_2"), - ("sensor", "test_issue_3"), - ("sensor", "test_issue_4"), - } - - # Request update of issues - await client.send_json_auto_id({"type": "recorder/update_statistics_issues"}) - response = await client.receive_json() - assert response["success"] - - # Check the issues - assert set(issue_registry.issues) == { - ("test", "test_issue"), - ("sensor", "test_issue_1"), - ("sensor", "test_issue_2"), - } diff --git a/tests/components/sensor/test_recorder_missing_stats.py b/tests/components/sensor/test_recorder_missing_stats.py index 43e18b89e72..d770c459426 100644 --- a/tests/components/sensor/test_recorder_missing_stats.py +++ b/tests/components/sensor/test_recorder_missing_stats.py @@ -1,6 +1,7 @@ """The tests for sensor recorder platform can catch up.""" from datetime import datetime, timedelta +from pathlib import Path import threading from unittest.mock import patch @@ -16,15 +17,11 @@ from homeassistant.components.recorder.statistics import ( from homeassistant.components.recorder.util import session_scope from homeassistant.core import CoreState from homeassistant.helpers import recorder as recorder_helper -from homeassistant.setup import async_setup_component +from homeassistant.setup import setup_component import homeassistant.util.dt as dt_util -from tests.common import async_test_home_assistant -from tests.components.recorder.common import ( - async_wait_recording_done, - do_adhoc_statistics, -) -from tests.typing import RecorderInstanceGenerator +from tests.common import get_test_home_assistant +from tests.components.recorder.common import do_adhoc_statistics, wait_recording_done POWER_SENSOR_ATTRIBUTES = { "device_class": "energy", @@ -43,34 +40,37 @@ def disable_db_issue_creation(): @pytest.mark.timeout(25) -@pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.parametrize("enable_missing_statistics", [True]) -@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage -async def test_compile_missing_statistics( - async_test_recorder: RecorderInstanceGenerator, freezer: FrozenDateTimeFactory +def test_compile_missing_statistics( + freezer: FrozenDateTimeFactory, recorder_db_url: str, tmp_path: Path ) -> None: """Test compile missing statistics.""" + if recorder_db_url == "sqlite://": + # On-disk database because we need to stop and start hass + # and have it persist. + recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") + config = { + "db_url": recorder_db_url, + } three_days_ago = datetime(2021, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC) start_time = three_days_ago + timedelta(days=3) freezer.move_to(three_days_ago) - async with ( - async_test_home_assistant(initial_state=CoreState.not_running) as hass, - async_test_recorder(hass, wait_recorder=False), - ): + with get_test_home_assistant() as hass: + hass.set_state(CoreState.not_running) recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, "sensor", {}) + setup_component(hass, "sensor", {}) + setup_component(hass, "recorder", {"recorder": config}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - await hass.async_start() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) + hass.start() + wait_recording_done(hass) + wait_recording_done(hass) - hass.states.async_set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) - await async_wait_recording_done(hass) + hass.states.set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) + wait_recording_done(hass) two_days_ago = three_days_ago + timedelta(days=1) freezer.move_to(two_days_ago) do_adhoc_statistics(hass, start=two_days_ago) - await async_wait_recording_done(hass) + wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: latest = get_latest_short_term_statistics_with_session( hass, session, {"sensor.test1"}, {"state", "sum"} @@ -82,32 +82,29 @@ async def test_compile_missing_statistics( past_time = two_days_ago while past_time <= start_time: freezer.move_to(past_time) - hass.states.async_set("sensor.test1", str(count), POWER_SENSOR_ATTRIBUTES) + hass.states.set("sensor.test1", str(count), POWER_SENSOR_ATTRIBUTES) past_time += timedelta(minutes=5) count += 1 - await async_wait_recording_done(hass) + wait_recording_done(hass) states = get_significant_states( hass, three_days_ago, past_time, ["sensor.test1"] ) assert len(states["sensor.test1"]) == 577 - await hass.async_stop() - await hass.async_block_till_done() - + hass.stop() freezer.move_to(start_time) - async with ( - async_test_home_assistant(initial_state=CoreState.not_running) as hass, - async_test_recorder(hass, wait_recorder=False), - ): + with get_test_home_assistant() as hass: + hass.set_state(CoreState.not_running) recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, "sensor", {}) - hass.states.async_set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) + setup_component(hass, "sensor", {}) + hass.states.set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) + setup_component(hass, "recorder", {"recorder": config}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - await hass.async_start() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) + hass.start() + wait_recording_done(hass) + wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: latest = get_latest_short_term_statistics_with_session( hass, session, {"sensor.test1"}, {"state", "sum", "max", "mean", "min"} @@ -131,4 +128,4 @@ async def test_compile_missing_statistics( assert len(stats["sensor.test1"]) == 48 # Make sure the last mean is 570.5 assert stats["sensor.test1"][-1]["mean"] == 570.5 - await hass.async_stop() + hass.stop() diff --git a/tests/components/sensor/test_websocket_api.py b/tests/components/sensor/test_websocket_api.py index b1dafa04c94..6f4eeb252e2 100644 --- a/tests/components/sensor/test_websocket_api.py +++ b/tests/components/sensor/test_websocket_api.py @@ -36,13 +36,11 @@ async def test_device_class_units( "ft/s", "in/d", "in/h", - "in/s", "km/h", "kn", "m/s", "mm/d", "mm/h", - "mm/s", "mph", ] } diff --git a/tests/components/sensoterra/__init__.py b/tests/components/sensoterra/__init__.py deleted file mode 100644 index f70fede6c09..00000000000 --- a/tests/components/sensoterra/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Sensoterra integration.""" diff --git a/tests/components/sensoterra/conftest.py b/tests/components/sensoterra/conftest.py deleted file mode 100644 index 0f6b7a3014b..00000000000 --- a/tests/components/sensoterra/conftest.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Common fixtures for the Sensoterra tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest - -from .const import API_TOKEN - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.sensoterra.async_setup_entry", - return_value=True, - ) as mock_entry: - yield mock_entry - - -@pytest.fixture -def mock_customer_api_client() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with ( - patch( - "homeassistant.components.sensoterra.config_flow.CustomerApi", - autospec=True, - ) as mock_client, - ): - mock = mock_client.return_value - mock.get_token.return_value = API_TOKEN - yield mock diff --git a/tests/components/sensoterra/const.py b/tests/components/sensoterra/const.py deleted file mode 100644 index cc80610645d..00000000000 --- a/tests/components/sensoterra/const.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Constants for the test Sensoterra integration.""" - -API_TOKEN = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE4NTYzMDQwMDAsInN1YiI6IjM5In0.yxdXXlc1DqopqDRHfAVzFrMqZJl6nKLpu1dV8alHvVY" -API_EMAIL = "test-email@example.com" -API_PASSWORD = "test-password" -HASS_UUID = "phony-unique-id" diff --git a/tests/components/sensoterra/test_config_flow.py b/tests/components/sensoterra/test_config_flow.py deleted file mode 100644 index 20921406883..00000000000 --- a/tests/components/sensoterra/test_config_flow.py +++ /dev/null @@ -1,124 +0,0 @@ -"""Test the Sensoterra config flow.""" - -from unittest.mock import AsyncMock - -from jwt import DecodeError -import pytest -from sensoterra.customerapi import InvalidAuth as StInvalidAuth, Timeout as StTimeout - -from homeassistant.components.sensoterra.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TOKEN -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .const import API_EMAIL, API_PASSWORD, API_TOKEN, HASS_UUID - -from tests.common import MockConfigEntry - - -async def test_full_flow( - hass: HomeAssistant, - mock_customer_api_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test we can finish a config flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - hass.data["core.uuid"] = HASS_UUID - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: API_EMAIL, - CONF_PASSWORD: API_PASSWORD, - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == API_EMAIL - assert result["data"] == { - CONF_TOKEN: API_TOKEN, - CONF_EMAIL: API_EMAIL, - } - - assert len(mock_customer_api_client.mock_calls) == 1 - - -async def test_form_unique_id( - hass: HomeAssistant, mock_customer_api_client: AsyncMock -) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - hass.data["core.uuid"] = HASS_UUID - - entry = MockConfigEntry(unique_id="39", domain=DOMAIN) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: API_EMAIL, - CONF_PASSWORD: API_PASSWORD, - }, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - assert len(mock_customer_api_client.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (StTimeout, "cannot_connect"), - (StInvalidAuth("Invalid credentials"), "invalid_auth"), - (DecodeError("Bad API token"), "invalid_access_token"), - ], -) -async def test_form_exceptions( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_customer_api_client: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test we handle config form exceptions.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - hass.data["core.uuid"] = HASS_UUID - - mock_customer_api_client.get_token.side_effect = exception - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: API_EMAIL, - CONF_PASSWORD: API_PASSWORD, - }, - ) - assert result["errors"] == {"base": error} - assert result["type"] is FlowResultType.FORM - - mock_customer_api_client.get_token.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: API_EMAIL, - CONF_PASSWORD: API_PASSWORD, - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == API_EMAIL - assert result["data"] == { - CONF_TOKEN: API_TOKEN, - CONF_EMAIL: API_EMAIL, - } - assert len(mock_customer_api_client.mock_calls) == 2 diff --git a/tests/components/sentry/conftest.py b/tests/components/sentry/conftest.py index 663f8ee6aa6..781250b2753 100644 --- a/tests/components/sentry/conftest.py +++ b/tests/components/sentry/conftest.py @@ -6,7 +6,7 @@ from typing import Any import pytest -from homeassistant.components.sentry.const import DOMAIN +from homeassistant.components.sentry import DOMAIN from tests.common import MockConfigEntry diff --git a/tests/components/seventeentrack/conftest.py b/tests/components/seventeentrack/conftest.py index 0d02a7ab5f1..1ab4eed11ee 100644 --- a/tests/components/seventeentrack/conftest.py +++ b/tests/components/seventeentrack/conftest.py @@ -1,10 +1,10 @@ """Configuration for 17Track tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch -from pyseventeentrack.package import Package +from py17track.package import Package import pytest +from typing_extensions import Generator from homeassistant.components.seventeentrack.const import ( CONF_SHOW_ARCHIVED, @@ -40,11 +40,6 @@ NEW_SUMMARY_DATA = { "Returned": 1, } -ARCHIVE_PACKAGE_NUMBER = "123" -CONFIG_ENTRY_ID_KEY = "config_entry_id" -PACKAGE_TRACKING_NUMBER_KEY = "package_tracking_number" -PACKAGE_STATE_KEY = "package_state" - VALID_CONFIG = { CONF_USERNAME: "test", CONF_PASSWORD: "test", diff --git a/tests/components/seventeentrack/snapshots/test_services.ambr b/tests/components/seventeentrack/snapshots/test_services.ambr index e172a2de594..185a1d44fe0 100644 --- a/tests/components/seventeentrack/snapshots/test_services.ambr +++ b/tests/components/seventeentrack/snapshots/test_services.ambr @@ -3,39 +3,27 @@ dict({ 'packages': list([ dict({ - 'destination_country': 'Belgium', 'friendly_name': 'friendly name 3', 'info_text': 'info text 1', 'location': 'location 1', - 'origin_country': 'Belgium', - 'package_type': 'Registered Parcel', 'status': 'Expired', - 'timestamp': '2020-08-10T10:32:00+00:00', - 'tracking_info_language': 'Unknown', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), 'tracking_number': '123', }), dict({ - 'destination_country': 'Belgium', 'friendly_name': 'friendly name 1', 'info_text': 'info text 1', 'location': 'location 1', - 'origin_country': 'Belgium', - 'package_type': 'Registered Parcel', 'status': 'In Transit', - 'timestamp': '2020-08-10T10:32:00+00:00', - 'tracking_info_language': 'Unknown', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), 'tracking_number': '456', }), dict({ - 'destination_country': 'Belgium', 'friendly_name': 'friendly name 2', 'info_text': 'info text 1', 'location': 'location 1', - 'origin_country': 'Belgium', - 'package_type': 'Registered Parcel', 'status': 'Delivered', - 'timestamp': '2020-08-10T10:32:00+00:00', - 'tracking_info_language': 'Unknown', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), 'tracking_number': '789', }), ]), @@ -45,56 +33,19 @@ dict({ 'packages': list([ dict({ - 'destination_country': 'Belgium', 'friendly_name': 'friendly name 1', 'info_text': 'info text 1', 'location': 'location 1', - 'origin_country': 'Belgium', - 'package_type': 'Registered Parcel', 'status': 'In Transit', - 'timestamp': '2020-08-10T10:32:00+00:00', - 'tracking_info_language': 'Unknown', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), 'tracking_number': '456', }), dict({ - 'destination_country': 'Belgium', 'friendly_name': 'friendly name 2', 'info_text': 'info text 1', 'location': 'location 1', - 'origin_country': 'Belgium', - 'package_type': 'Registered Parcel', 'status': 'Delivered', - 'timestamp': '2020-08-10T10:32:00+00:00', - 'tracking_info_language': 'Unknown', - 'tracking_number': '789', - }), - ]), - }) -# --- -# name: test_packages_with_none_timestamp - dict({ - 'packages': list([ - dict({ - 'destination_country': 'Belgium', - 'friendly_name': 'friendly name 1', - 'info_text': 'info text 1', - 'location': 'location 1', - 'origin_country': 'Belgium', - 'package_type': 'Registered Parcel', - 'status': 'In Transit', - 'tracking_info_language': 'Unknown', - 'tracking_number': '456', - }), - dict({ - 'destination_country': 'Belgium', - 'friendly_name': 'friendly name 2', - 'info_text': 'info text 1', - 'location': 'location 1', - 'origin_country': 'Belgium', - 'package_type': 'Registered Parcel', - 'status': 'Delivered', - 'timestamp': '2020-08-10T10:32:00+00:00', - 'tracking_info_language': 'Unknown', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), 'tracking_number': '789', }), ]), diff --git a/tests/components/seventeentrack/test_config_flow.py b/tests/components/seventeentrack/test_config_flow.py index 9ad592419c3..380146ed276 100644 --- a/tests/components/seventeentrack/test_config_flow.py +++ b/tests/components/seventeentrack/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from pyseventeentrack.errors import SeventeenTrackError +from py17track.errors import SeventeenTrackError import pytest from homeassistant import config_entries @@ -11,7 +11,7 @@ from homeassistant.components.seventeentrack.const import ( CONF_SHOW_ARCHIVED, CONF_SHOW_DELIVERED, ) -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -105,6 +105,55 @@ async def test_flow_fails( } +async def test_import_flow(hass: HomeAssistant, mock_seventeentrack: AsyncMock) -> None: + """Test the import configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=VALID_CONFIG_OLD, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "someemail@gmail.com" + assert result["data"][CONF_USERNAME] == "someemail@gmail.com" + assert result["data"][CONF_PASSWORD] == "edc3eee7330e4fdda04489e3fbc283d0" + + +@pytest.mark.parametrize( + ("return_value", "side_effect", "error"), + [ + ( + False, + None, + "invalid_auth", + ), + ( + True, + SeventeenTrackError(), + "cannot_connect", + ), + ], +) +async def test_import_flow_cannot_connect_error( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + return_value, + side_effect, + error, +) -> None: + """Test the import configuration flow with error.""" + mock_seventeentrack.return_value.profile.login.return_value = return_value + mock_seventeentrack.return_value.profile.login.side_effect = side_effect + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=VALID_CONFIG_OLD, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == error + + async def test_option_flow(hass: HomeAssistant, mock_seventeentrack: AsyncMock) -> None: """Test option flow.""" entry = MockConfigEntry( @@ -132,3 +181,28 @@ async def test_option_flow(hass: HomeAssistant, mock_seventeentrack: AsyncMock) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"][CONF_SHOW_ARCHIVED] assert not result["data"][CONF_SHOW_DELIVERED] + + +async def test_import_flow_already_configured( + hass: HomeAssistant, mock_seventeentrack: AsyncMock +) -> None: + """Test the import configuration flow with error.""" + entry = MockConfigEntry( + domain=DOMAIN, + data=VALID_CONFIG, + unique_id=ACCOUNT_ID, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result_aborted = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + await hass.async_block_till_done() + + assert result_aborted["type"] is FlowResultType.ABORT + assert result_aborted["reason"] == "already_configured" diff --git a/tests/components/seventeentrack/test_repairs.py b/tests/components/seventeentrack/test_repairs.py deleted file mode 100644 index 44d1f078432..00000000000 --- a/tests/components/seventeentrack/test_repairs.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Tests for the seventeentrack repair flow.""" - -from unittest.mock import AsyncMock - -from freezegun.api import FrozenDateTimeFactory - -from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN -from homeassistant.components.seventeentrack import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from . import goto_future, init_integration -from .conftest import DEFAULT_SUMMARY_LENGTH, get_package - -from tests.common import MockConfigEntry -from tests.components.repairs import process_repair_fix_flow, start_repair_fix_flow -from tests.typing import ClientSessionGenerator - - -async def test_repair( - hass: HomeAssistant, - mock_seventeentrack: AsyncMock, - issue_registry: ir.IssueRegistry, - hass_client: ClientSessionGenerator, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Ensure everything starts correctly.""" - await init_integration(hass, mock_config_entry) # 2 - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH - assert len(issue_registry.issues) == 1 - - package = get_package() - mock_seventeentrack.return_value.profile.packages.return_value = [package] - await goto_future(hass, freezer) - - assert hass.states.get("sensor.17track_package_friendly_name_1") - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - - assert "deprecated" not in mock_config_entry.data - - repair_issue = issue_registry.async_get_issue( - domain=DOMAIN, issue_id=f"deprecate_sensor_{mock_config_entry.entry_id}" - ) - - assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) - - client = await hass_client() - - data = await start_repair_fix_flow(client, DOMAIN, repair_issue.issue_id) - - flow_id = data["flow_id"] - assert data == { - "type": "form", - "flow_id": flow_id, - "handler": DOMAIN, - "step_id": "confirm", - "data_schema": [], - "errors": None, - "description_placeholders": None, - "last_step": None, - "preview": None, - } - - data = await process_repair_fix_flow(client, flow_id) - - flow_id = data["flow_id"] - assert data == { - "type": "create_entry", - "handler": DOMAIN, - "flow_id": flow_id, - "description": None, - "description_placeholders": None, - } - - assert mock_config_entry.data["deprecated"] - - repair_issue = issue_registry.async_get_issue( - domain=DOMAIN, issue_id="deprecate_sensor" - ) - - assert repair_issue is None - - await goto_future(hass, freezer) - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH diff --git a/tests/components/seventeentrack/test_sensor.py b/tests/components/seventeentrack/test_sensor.py index a631996b4eb..75cc6435073 100644 --- a/tests/components/seventeentrack/test_sensor.py +++ b/tests/components/seventeentrack/test_sensor.py @@ -5,9 +5,10 @@ from __future__ import annotations from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory -from pyseventeentrack.errors import SeventeenTrackError +from py17track.errors import SeventeenTrackError from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from . import goto_future, init_integration @@ -305,3 +306,15 @@ async def test_non_valid_platform_config( assert await async_setup_component(hass, "sensor", VALID_PLATFORM_CONFIG_FULL) await hass.async_block_till_done() assert len(hass.states.async_entity_ids()) == 0 + + +async def test_full_valid_platform_config( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Ensure everything starts correctly.""" + assert await async_setup_component(hass, "sensor", VALID_PLATFORM_CONFIG_FULL) + await hass.async_block_till_done() + assert len(hass.states.async_entity_ids()) == len(DEFAULT_SUMMARY.keys()) + assert len(issue_registry.issues) == 1 diff --git a/tests/components/seventeentrack/test_services.py b/tests/components/seventeentrack/test_services.py index bbd5644ad63..4347189a5c0 100644 --- a/tests/components/seventeentrack/test_services.py +++ b/tests/components/seventeentrack/test_services.py @@ -5,24 +5,14 @@ from unittest.mock import AsyncMock import pytest from syrupy import SnapshotAssertion -from homeassistant.components.seventeentrack import DOMAIN -from homeassistant.components.seventeentrack.const import ( - SERVICE_ARCHIVE_PACKAGE, - SERVICE_GET_PACKAGES, -) +from homeassistant.components.seventeentrack import DOMAIN, SERVICE_GET_PACKAGES from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from . import init_integration -from .conftest import ( - ARCHIVE_PACKAGE_NUMBER, - CONFIG_ENTRY_ID_KEY, - PACKAGE_STATE_KEY, - PACKAGE_TRACKING_NUMBER_KEY, - get_package, -) +from .conftest import get_package from tests.common import MockConfigEntry @@ -40,8 +30,8 @@ async def test_get_packages_from_list( DOMAIN, SERVICE_GET_PACKAGES, { - CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, - PACKAGE_STATE_KEY: ["in_transit", "delivered"], + "config_entry_id": mock_config_entry.entry_id, + "package_state": ["in_transit", "delivered"], }, blocking=True, return_response=True, @@ -63,7 +53,7 @@ async def test_get_all_packages( DOMAIN, SERVICE_GET_PACKAGES, { - CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, + "config_entry_id": mock_config_entry.entry_id, }, blocking=True, return_response=True, @@ -86,7 +76,7 @@ async def test_service_called_with_unloaded_entry( DOMAIN, SERVICE_GET_PACKAGES, { - CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, + "config_entry_id": mock_config_entry.entry_id, }, blocking=True, return_response=True, @@ -120,58 +110,13 @@ async def test_service_called_with_non_17track_device( DOMAIN, SERVICE_GET_PACKAGES, { - CONFIG_ENTRY_ID_KEY: device_entry.id, + "config_entry_id": device_entry.id, }, blocking=True, return_response=True, ) -async def test_archive_package( - hass: HomeAssistant, - mock_seventeentrack: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Ensure service archives package.""" - await _mock_packages(mock_seventeentrack) - await init_integration(hass, mock_config_entry) - await hass.services.async_call( - DOMAIN, - SERVICE_ARCHIVE_PACKAGE, - { - CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, - PACKAGE_TRACKING_NUMBER_KEY: ARCHIVE_PACKAGE_NUMBER, - }, - blocking=True, - ) - mock_seventeentrack.return_value.profile.archive_package.assert_called_once_with( - ARCHIVE_PACKAGE_NUMBER - ) - - -async def test_packages_with_none_timestamp( - hass: HomeAssistant, - mock_seventeentrack: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Ensure service returns all packages when non provided.""" - await _mock_invalid_packages(mock_seventeentrack) - await init_integration(hass, mock_config_entry) - service_response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_PACKAGES, - { - CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, - }, - blocking=True, - return_response=True, - ) - - assert service_response == snapshot - - async def _mock_packages(mock_seventeentrack): package1 = get_package(status=10) package2 = get_package( @@ -189,19 +134,3 @@ async def _mock_packages(mock_seventeentrack): package2, package3, ] - - -async def _mock_invalid_packages(mock_seventeentrack): - package1 = get_package( - status=10, - timestamp=None, - ) - package2 = get_package( - tracking_number="789", - friendly_name="friendly name 2", - status=40, - ) - mock_seventeentrack.return_value.profile.packages.return_value = [ - package1, - package2, - ] diff --git a/tests/components/sfr_box/conftest.py b/tests/components/sfr_box/conftest.py index 7c1f8bbab5c..e86cd06650e 100644 --- a/tests/components/sfr_box/conftest.py +++ b/tests/components/sfr_box/conftest.py @@ -1,11 +1,11 @@ """Provide common SFR Box fixtures.""" -from collections.abc import Generator import json from unittest.mock import AsyncMock, patch import pytest from sfrbox_api.models import DslInfo, FtthInfo, SystemInfo, WanInfo +from typing_extensions import Generator from homeassistant.components.sfr_box.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntry diff --git a/tests/components/sfr_box/snapshots/test_binary_sensor.ambr b/tests/components/sfr_box/snapshots/test_binary_sensor.ambr index 15308fad91f..f14ec98a418 100644 --- a/tests/components/sfr_box/snapshots/test_binary_sensor.ambr +++ b/tests/components/sfr_box/snapshots/test_binary_sensor.ambr @@ -22,7 +22,6 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , @@ -150,7 +149,6 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/snapshots/test_button.ambr b/tests/components/sfr_box/snapshots/test_button.ambr index 67b2198fd2b..eee419bf373 100644 --- a/tests/components/sfr_box/snapshots/test_button.ambr +++ b/tests/components/sfr_box/snapshots/test_button.ambr @@ -22,7 +22,6 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/snapshots/test_sensor.ambr b/tests/components/sfr_box/snapshots/test_sensor.ambr index 7645a4ad8bf..649c94c89dc 100644 --- a/tests/components/sfr_box/snapshots/test_sensor.ambr +++ b/tests/components/sfr_box/snapshots/test_sensor.ambr @@ -22,7 +22,6 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/test_binary_sensor.py b/tests/components/sfr_box/test_binary_sensor.py index 6152f8e2721..8dba537f6cb 100644 --- a/tests/components/sfr_box/test_binary_sensor.py +++ b/tests/components/sfr_box/test_binary_sensor.py @@ -1,11 +1,11 @@ """Test the SFR Box binary sensors.""" -from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.models import SystemInfo from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/sfr_box/test_button.py b/tests/components/sfr_box/test_button.py index f555ccebbf9..4f20a2f34a3 100644 --- a/tests/components/sfr_box/test_button.py +++ b/tests/components/sfr_box/test_button.py @@ -1,11 +1,11 @@ """Test the SFR Box buttons.""" -from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.exceptions import SFRBoxError from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/sfr_box/test_config_flow.py b/tests/components/sfr_box/test_config_flow.py index 6bf610de661..08c12e9817b 100644 --- a/tests/components/sfr_box/test_config_flow.py +++ b/tests/components/sfr_box/test_config_flow.py @@ -207,7 +207,15 @@ async def test_reauth(hass: HomeAssistant, config_entry_with_auth: ConfigEntry) """Test the start of the config flow.""" assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await config_entry_with_auth.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry_with_auth.entry_id, + "unique_id": config_entry_with_auth.unique_id, + }, + data=config_entry_with_auth.data, + ) assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} diff --git a/tests/components/sfr_box/test_diagnostics.py b/tests/components/sfr_box/test_diagnostics.py index d31d97cbcf8..597631d12f1 100644 --- a/tests/components/sfr_box/test_diagnostics.py +++ b/tests/components/sfr_box/test_diagnostics.py @@ -1,11 +1,11 @@ """Test the SFR Box diagnostics.""" -from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.models import SystemInfo from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant diff --git a/tests/components/sfr_box/test_init.py b/tests/components/sfr_box/test_init.py index 19e15491be1..14688009c5c 100644 --- a/tests/components/sfr_box/test_init.py +++ b/tests/components/sfr_box/test_init.py @@ -1,10 +1,10 @@ """Test the SFR Box setup process.""" -from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.exceptions import SFRBoxAuthenticationError, SFRBoxError +from typing_extensions import Generator from homeassistant.components.sfr_box.const import DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigEntryState diff --git a/tests/components/sfr_box/test_sensor.py b/tests/components/sfr_box/test_sensor.py index dd4a67b42f6..506e1ed8962 100644 --- a/tests/components/sfr_box/test_sensor.py +++ b/tests/components/sfr_box/test_sensor.py @@ -1,10 +1,10 @@ """Test the SFR Box sensors.""" -from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/sharkiq/test_config_flow.py b/tests/components/sharkiq/test_config_flow.py index 22a77678c0d..cf75bff1686 100644 --- a/tests/components/sharkiq/test_config_flow.py +++ b/tests/components/sharkiq/test_config_flow.py @@ -96,18 +96,18 @@ async def test_form_error(hass: HomeAssistant, exc: Exception, base_error: str) async def test_reauth_success(hass: HomeAssistant) -> None: """Test reauth flow.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) - mock_config.add_to_hass(hass) - - result = await mock_config.start_reauth_flow(hass) - with patch("sharkiq.AylaApi.async_sign_in", return_value=True): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=CONFIG + mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) + mock_config.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, + data=CONFIG, ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" @pytest.mark.parametrize( @@ -127,15 +127,13 @@ async def test_reauth( msg: str, ) -> None: """Test reauth failures.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) - mock_config.add_to_hass(hass) - - result = await mock_config.start_reauth_flow(hass) - with patch("sharkiq.AylaApi.async_sign_in", side_effect=side_effect): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=CONFIG + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, + data=CONFIG, ) + msg_value = result[msg_field] if msg_field == "errors": msg_value = msg_value.get("base") diff --git a/tests/components/sharkiq/test_vacuum.py b/tests/components/sharkiq/test_vacuum.py index 3748cfd6dc4..e5154008f56 100644 --- a/tests/components/sharkiq/test_vacuum.py +++ b/tests/components/sharkiq/test_vacuum.py @@ -141,7 +141,7 @@ class MockShark(SharkIqVacuum): @pytest.fixture(autouse=True) @patch("sharkiq.ayla_api.AylaApi", MockAyla) -async def setup_integration(hass: HomeAssistant) -> None: +async def setup_integration(hass): """Build the mock integration.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=TEST_USERNAME, data=CONFIG, entry_id=ENTRY_ID diff --git a/tests/components/shelly/__init__.py b/tests/components/shelly/__init__.py index 7de45eeee98..4631a17969e 100644 --- a/tests/components/shelly/__init__.py +++ b/tests/components/shelly/__init__.py @@ -23,7 +23,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, - DeviceEntry, DeviceRegistry, format_mac, ) @@ -112,7 +111,6 @@ def register_entity( unique_id: str, config_entry: ConfigEntry | None = None, capabilities: Mapping[str, Any] | None = None, - device_id: str | None = None, ) -> str: """Register enabled entity, return entity_id.""" entity_registry = er.async_get(hass) @@ -124,7 +122,6 @@ def register_entity( disabled_by=None, config_entry=config_entry, capabilities=capabilities, - device_id=device_id, ) return f"{domain}.{object_id}" @@ -148,11 +145,9 @@ def get_entity_state(hass: HomeAssistant, entity_id: str) -> str: return entity.state -def register_device( - device_registry: DeviceRegistry, config_entry: ConfigEntry -) -> DeviceEntry: +def register_device(device_registry: DeviceRegistry, config_entry: ConfigEntry) -> None: """Register Shelly device.""" - return device_registry.async_get_or_create( + device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(CONNECTION_NETWORK_MAC, format_mac(MOCK_MAC))}, ) diff --git a/tests/components/shelly/bluetooth/test_scanner.py b/tests/components/shelly/bluetooth/test_scanner.py index 1076691a768..c7bbb5cb708 100644 --- a/tests/components/shelly/bluetooth/test_scanner.py +++ b/tests/components/shelly/bluetooth/test_scanner.py @@ -12,9 +12,7 @@ from homeassistant.core import HomeAssistant from .. import init_integration, inject_rpc_device_event -async def test_scanner_v1( - hass: HomeAssistant, mock_rpc_device, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_scanner_v1(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> None: """Test injecting data into the scanner v1.""" await init_integration( hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} @@ -52,9 +50,7 @@ async def test_scanner_v1( assert ble_device is None -async def test_scanner_v2( - hass: HomeAssistant, mock_rpc_device, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_scanner_v2(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> None: """Test injecting data into the scanner v2.""" await init_integration( hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} @@ -97,7 +93,7 @@ async def test_scanner_v2( async def test_scanner_ignores_non_ble_events( - hass: HomeAssistant, mock_rpc_device, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, mock_rpc_device, monkeypatch ) -> None: """Test injecting non ble data into the scanner.""" await init_integration( @@ -123,10 +119,7 @@ async def test_scanner_ignores_non_ble_events( async def test_scanner_ignores_wrong_version_and_logs( - hass: HomeAssistant, - mock_rpc_device, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mock_rpc_device, monkeypatch, caplog: pytest.LogCaptureFixture ) -> None: """Test injecting wrong version of ble data into the scanner.""" await init_integration( @@ -159,10 +152,7 @@ async def test_scanner_ignores_wrong_version_and_logs( async def test_scanner_warns_on_corrupt_event( - hass: HomeAssistant, - mock_rpc_device, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mock_rpc_device, monkeypatch, caplog: pytest.LogCaptureFixture ) -> None: """Test injecting garbage ble data into the scanner.""" await init_integration( diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index d453d25698c..a16cc62fbae 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -11,11 +11,11 @@ from homeassistant.components.shelly.const import ( EVENT_SHELLY_CLICK, REST_SENSORS_UPDATE_INTERVAL, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall from . import MOCK_MAC -from tests.common import async_capture_events +from tests.common import async_capture_events, async_mock_service MOCK_SETTINGS = { "name": "Test name", @@ -166,20 +166,8 @@ MOCK_BLOCKS = [ MOCK_CONFIG = { "input:0": {"id": 0, "name": "Test name input 0", "type": "button"}, - "input:1": { - "id": 1, - "type": "analog", - "enable": True, - "xpercent": {"expr": None, "unit": None}, - }, - "input:2": { - "id": 2, - "name": "Gas", - "type": "count", - "enable": True, - "xcounts": {"expr": None, "unit": None}, - "xfreq": {"expr": None, "unit": None}, - }, + "input:1": {"id": 1, "type": "analog", "enable": True}, + "input:2": {"id": 2, "name": "Gas", "type": "count", "enable": True}, "light:0": {"name": "test light_0"}, "light:1": {"name": "test light_1"}, "light:2": {"name": "test light_2"}, @@ -198,7 +186,6 @@ MOCK_CONFIG = { "device": {"name": "Test name"}, }, "wifi": {"sta": {"enable": True}, "sta1": {"enable": False}}, - "ws": {"enable": False, "server": None}, } MOCK_SHELLY_COAP = { @@ -226,9 +213,9 @@ MOCK_STATUS_COAP = { "update": { "status": "pending", "has_update": True, - "beta_version": "20231107-162609/v1.14.1-rc1-g0617c15", - "new_version": "20230913-111730/v1.14.0-gcb84623", - "old_version": "20230913-111730/v1.14.0-gcb84623", + "beta_version": "some_beta_version", + "new_version": "some_new_version", + "old_version": "some_old_version", }, "uptime": 5 * REST_SENSORS_UPDATE_INTERVAL, "wifi_sta": {"rssi": -64}, @@ -241,9 +228,7 @@ MOCK_STATUS_RPC = { "input:1": {"id": 1, "percent": 89, "xpercent": 8.9}, "input:2": { "id": 2, - "counts": {"total": 56174, "xtotal": 561.74}, - "freq": 208.00, - "xfreq": 6.11, + "counts": {"total": 56174, "xtotal": 561.74, "freq": 208.00, "xfreq": 6.11}, }, "light:0": {"output": True, "brightness": 53.0}, "light:1": {"output": True, "brightness": 53.0}, @@ -305,6 +290,12 @@ def mock_ws_server(): yield +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.fixture def events(hass: HomeAssistant): """Yield caught shelly_click events.""" @@ -366,7 +357,6 @@ def _mock_rpc_device(version: str | None = None): status=MOCK_STATUS_RPC, firmware_version="some fw string", initialized=True, - connected=True, ) type(device).name = PropertyMock(return_value="Test name") return device diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index fadfe28db3e..3bfbf350f7e 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -1,6 +1,5 @@ """Tests for Shelly binary sensor platform.""" -from copy import deepcopy from unittest.mock import Mock from aioshelly.const import MODEL_MOTION @@ -11,7 +10,6 @@ from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAI from homeassistant.components.shelly.const import UPDATE_PERIOD_MULTIPLIER from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry @@ -169,14 +167,9 @@ async def test_block_restored_sleeping_binary_sensor( ) -> None: """Test block restored sleeping binary sensor.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( - hass, - BINARY_SENSOR_DOMAIN, - "test_name_motion", - "sensor_0-motion", - entry, - device_id=device.id, + hass, BINARY_SENSOR_DOMAIN, "test_name_motion", "sensor_0-motion", entry ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) monkeypatch.setattr(mock_block_device, "initialized", False) @@ -201,14 +194,9 @@ async def test_block_restored_sleeping_binary_sensor_no_last_state( ) -> None: """Test block restored sleeping binary sensor missing last state.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( - hass, - BINARY_SENSOR_DOMAIN, - "test_name_motion", - "sensor_0-motion", - entry, - device_id=device.id, + hass, BINARY_SENSOR_DOMAIN, "test_name_motion", "sensor_0-motion", entry ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) @@ -275,7 +263,6 @@ async def test_rpc_sleeping_binary_sensor( ) -> None: """Test RPC online sleeping binary sensor.""" entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_cloud" - monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) config_entry = await init_integration(hass, 2, sleep_period=1000) @@ -315,14 +302,9 @@ async def test_rpc_restored_sleeping_binary_sensor( ) -> None: """Test RPC restored binary sensor.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( - hass, - BINARY_SENSOR_DOMAIN, - "test_name_cloud", - "cloud-cloud", - entry, - device_id=device.id, + hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) @@ -349,14 +331,9 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( ) -> None: """Test RPC restored sleeping binary sensor missing last state.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( - hass, - BINARY_SENSOR_DOMAIN, - "test_name_cloud", - "cloud-cloud", - entry, - device_id=device.id, + hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry ) monkeypatch.setattr(mock_rpc_device, "initialized", False) @@ -376,104 +353,3 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_OFF - - -@pytest.mark.parametrize( - ("name", "entity_id"), - [ - ("Virtual binary sensor", "binary_sensor.test_name_virtual_binary_sensor"), - (None, "binary_sensor.test_name_boolean_203"), - ], -) -async def test_rpc_device_virtual_binary_sensor( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - name: str | None, - entity_id: str, -) -> None: - """Test a virtual binary sensor for RPC device.""" - config = deepcopy(mock_rpc_device.config) - config["boolean:203"] = { - "name": name, - "meta": {"ui": {"view": "label"}}, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["boolean:203"] = {"value": True} - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_ON - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-boolean:203-boolean" - - monkeypatch.setitem(mock_rpc_device.status["boolean:203"], "value", False) - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF - - -async def test_rpc_remove_virtual_binary_sensor_when_mode_toggle( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test if the virtual binary sensor will be removed if the mode has been changed to a toggle.""" - config = deepcopy(mock_rpc_device.config) - config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "toggle"}}} - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["boolean:200"] = {"value": True} - monkeypatch.setattr(mock_rpc_device, "status", status) - - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - BINARY_SENSOR_DOMAIN, - "test_name_boolean_200", - "boolean:200-boolean", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -async def test_rpc_remove_virtual_binary_sensor_when_orphaned( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, -) -> None: - """Check whether the virtual binary sensor will be removed if it has been removed from the device configuration.""" - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - BINARY_SENSOR_DOMAIN, - "test_name_boolean_200", - "boolean:200-boolean", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry diff --git a/tests/components/shelly/test_climate.py b/tests/components/shelly/test_climate.py index aeeeca30edd..fea46b1d2d1 100644 --- a/tests/components/shelly/test_climate.py +++ b/tests/components/shelly/test_climate.py @@ -13,6 +13,8 @@ from homeassistant.components.climate import ( ATTR_HVAC_ACTION, ATTR_HVAC_MODE, ATTR_PRESET_MODE, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, DOMAIN as CLIMATE_DOMAIN, PRESET_NONE, SERVICE_SET_HVAC_MODE, @@ -136,6 +138,19 @@ async def test_climate_set_temperature( assert state.state == HVACMode.OFF assert state.attributes[ATTR_TEMPERATURE] == 4 + # Test set temperature without target temperature + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_TARGET_TEMP_LOW: 20, + ATTR_TARGET_TEMP_HIGH: 30, + }, + blocking=True, + ) + mock_block_device.http_request.assert_not_called() + # Test set temperature await hass.services.async_call( CLIMATE_DOMAIN, @@ -239,14 +254,13 @@ async def test_block_restored_climate( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) monkeypatch.delattr(mock_block_device.blocks[EMETER_BLOCK_ID], "targetTemp") entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, - device_id=device.id, ) attrs = {"current_temperature": 20.5, "temperature": 4.0} extra_data = {"last_target_temp": 22.0} @@ -307,14 +321,13 @@ async def test_block_restored_climate_us_customery( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) monkeypatch.delattr(mock_block_device.blocks[EMETER_BLOCK_ID], "targetTemp") entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, - device_id=device.id, ) attrs = {"current_temperature": 67, "temperature": 39} extra_data = {"last_target_temp": 10.0} @@ -377,14 +390,13 @@ async def test_block_restored_climate_unavailable( monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, - device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_UNAVAILABLE)]) @@ -405,14 +417,13 @@ async def test_block_restored_climate_set_preset_before_online( monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, - device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, HVACMode.HEAT)]) @@ -507,14 +518,13 @@ async def test_block_restored_climate_auth_error( monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, - device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, HVACMode.HEAT)]) @@ -594,25 +604,23 @@ async def test_rpc_climate_hvac_mode( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test climate hvac mode service.""" - entity_id = "climate.test_name_thermostat_0" - await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.HEAT assert state.attributes[ATTR_TEMPERATURE] == 23 assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 12.3 assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING assert state.attributes[ATTR_CURRENT_HUMIDITY] == 44.4 - entry = entity_registry.async_get(entity_id) + entry = entity_registry.async_get(ENTITY_ID) assert entry assert entry.unique_id == "123456789ABC-thermostat:0" monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "output", False) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + state = hass.states.get(ENTITY_ID) assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE assert state.attributes[ATTR_CURRENT_HUMIDITY] == 44.4 @@ -620,7 +628,7 @@ async def test_rpc_climate_hvac_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_HVAC_MODE: HVACMode.OFF}, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, blocking=True, ) mock_rpc_device.mock_update() @@ -628,7 +636,7 @@ async def test_rpc_climate_hvac_mode( mock_rpc_device.call_rpc.assert_called_once_with( "Thermostat.SetConfig", {"config": {"id": 0, "enable": False}} ) - state = hass.states.get(entity_id) + state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.OFF @@ -639,21 +647,20 @@ async def test_rpc_climate_without_humidity( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test climate entity without the humidity value.""" - entity_id = "climate.test_name_thermostat_0" new_status = deepcopy(mock_rpc_device.status) new_status.pop("humidity:0") monkeypatch.setattr(mock_rpc_device, "status", new_status) await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.HEAT assert state.attributes[ATTR_TEMPERATURE] == 23 assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 12.3 assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING assert ATTR_CURRENT_HUMIDITY not in state.attributes - entry = entity_registry.async_get(entity_id) + entry = entity_registry.async_get(ENTITY_ID) assert entry assert entry.unique_id == "123456789ABC-thermostat:0" @@ -662,18 +669,29 @@ async def test_rpc_climate_set_temperature( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate set target temperature.""" - entity_id = "climate.test_name_thermostat_0" - await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + state = hass.states.get(ENTITY_ID) assert state.attributes[ATTR_TEMPERATURE] == 23 + # test set temperature without target temperature + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_TARGET_TEMP_LOW: 20, + ATTR_TARGET_TEMP_HIGH: 30, + }, + blocking=True, + ) + mock_rpc_device.call_rpc.assert_not_called() + monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "target_C", 28) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 28}, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 28}, blocking=True, ) mock_rpc_device.mock_update() @@ -681,7 +699,7 @@ async def test_rpc_climate_set_temperature( mock_rpc_device.call_rpc.assert_called_once_with( "Thermostat.SetConfig", {"config": {"id": 0, "target_C": 28}} ) - state = hass.states.get(entity_id) + state = hass.states.get(ENTITY_ID) assert state.attributes[ATTR_TEMPERATURE] == 28 @@ -689,14 +707,13 @@ async def test_rpc_climate_hvac_mode_cool( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate with hvac mode cooling.""" - entity_id = "climate.test_name_thermostat_0" new_config = deepcopy(mock_rpc_device.config) new_config["thermostat:0"]["type"] = "cooling" monkeypatch.setattr(mock_rpc_device, "config", new_config) await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + state = hass.states.get(ENTITY_ID) assert state.state == HVACMode.COOL assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING @@ -708,7 +725,7 @@ async def test_wall_display_thermostat_mode( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test Wall Display in thermostat mode.""" - climate_entity_id = "climate.test_name_thermostat_0" + climate_entity_id = "climate.test_name" switch_entity_id = "switch.test_switch_0" await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) @@ -735,7 +752,7 @@ async def test_wall_display_thermostat_mode_external_actuator( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test Wall Display in thermostat mode with an external actuator.""" - climate_entity_id = "climate.test_name_thermostat_0" + climate_entity_id = "climate.test_name" switch_entity_id = "switch.test_switch_0" new_status = deepcopy(mock_rpc_device.status) diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index 93b3a46910c..a26c6eac405 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -4,7 +4,7 @@ from dataclasses import replace from datetime import timedelta from ipaddress import ip_address from typing import Any -from unittest.mock import AsyncMock, Mock, call, patch +from unittest.mock import AsyncMock, Mock, patch from aioshelly.const import DEFAULT_HTTP_PORT, MODEL_1, MODEL_PLUS_2PM from aioshelly.exceptions import ( @@ -23,6 +23,7 @@ from homeassistant.components.shelly.const import ( BLEScannerMode, ) from homeassistant.components.shelly.coordinator import ENTRY_RELOAD_COOLDOWN +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -818,15 +819,20 @@ async def test_reauth_successful( domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.shelly.config_flow.get_info", return_value={"mac": "test-mac", "type": MODEL_1, "auth": True, "gen": gen}, ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input, @@ -852,9 +858,6 @@ async def test_reauth_unsuccessful( domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with ( patch( @@ -870,6 +873,15 @@ async def test_reauth_unsuccessful( new=AsyncMock(side_effect=InvalidAuthError), ), ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input, @@ -885,14 +897,20 @@ async def test_reauth_get_info_error(hass: HomeAssistant) -> None: domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": 2} ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.shelly.config_flow.get_info", side_effect=DeviceConnectionError, ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"password": "test2 password"}, @@ -1096,7 +1114,6 @@ async def test_zeroconf_sleeping_device_not_triggers_refresh( caplog: pytest.LogCaptureFixture, ) -> None: """Test zeroconf discovery does not triggers refresh for sleeping device.""" - monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) entry = MockConfigEntry( domain="shelly", @@ -1135,182 +1152,6 @@ async def test_zeroconf_sleeping_device_not_triggers_refresh( assert "device did not update" not in caplog.text -async def test_zeroconf_sleeping_device_attempts_configure( - hass: HomeAssistant, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test zeroconf discovery configures a sleeping device outbound websocket.""" - monkeypatch.setattr(mock_rpc_device, "connected", False) - monkeypatch.setattr(mock_rpc_device, "initialized", False) - monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) - entry = MockConfigEntry( - domain="shelly", - unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, - ) - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - mock_rpc_device.mock_disconnected() - await hass.async_block_till_done() - - mock_rpc_device.mock_online() - await hass.async_block_till_done(wait_background_tasks=True) - - assert "online, resuming setup" in caplog.text - assert len(mock_rpc_device.initialize.mock_calls) == 1 - - with patch( - "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - data=DISCOVERY_INFO, - context={"source": config_entries.SOURCE_ZEROCONF}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - assert mock_rpc_device.update_outbound_websocket.mock_calls == [] - - monkeypatch.setattr(mock_rpc_device, "connected", True) - monkeypatch.setattr(mock_rpc_device, "initialized", True) - mock_rpc_device.mock_initialized() - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(seconds=ENTRY_RELOAD_COOLDOWN) - ) - await hass.async_block_till_done() - assert "device did not update" not in caplog.text - - monkeypatch.setattr(mock_rpc_device, "connected", False) - mock_rpc_device.mock_disconnected() - assert mock_rpc_device.update_outbound_websocket.mock_calls == [ - call("ws://10.10.10.10:8123/api/shelly/ws") - ] - - -async def test_zeroconf_sleeping_device_attempts_configure_ws_disabled( - hass: HomeAssistant, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test zeroconf discovery configures a sleeping device outbound websocket when its disabled.""" - monkeypatch.setattr(mock_rpc_device, "connected", False) - monkeypatch.setattr(mock_rpc_device, "initialized", False) - monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) - monkeypatch.setitem( - mock_rpc_device.config, "ws", {"enable": False, "server": "ws://oldha"} - ) - entry = MockConfigEntry( - domain="shelly", - unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, - ) - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - mock_rpc_device.mock_disconnected() - await hass.async_block_till_done() - - mock_rpc_device.mock_online() - await hass.async_block_till_done(wait_background_tasks=True) - - assert "online, resuming setup" in caplog.text - assert len(mock_rpc_device.initialize.mock_calls) == 1 - - with patch( - "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - data=DISCOVERY_INFO, - context={"source": config_entries.SOURCE_ZEROCONF}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - assert mock_rpc_device.update_outbound_websocket.mock_calls == [] - - monkeypatch.setattr(mock_rpc_device, "connected", True) - monkeypatch.setattr(mock_rpc_device, "initialized", True) - mock_rpc_device.mock_initialized() - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(seconds=ENTRY_RELOAD_COOLDOWN) - ) - await hass.async_block_till_done() - assert "device did not update" not in caplog.text - - monkeypatch.setattr(mock_rpc_device, "connected", False) - mock_rpc_device.mock_disconnected() - assert mock_rpc_device.update_outbound_websocket.mock_calls == [ - call("ws://10.10.10.10:8123/api/shelly/ws") - ] - - -async def test_zeroconf_sleeping_device_attempts_configure_no_url_available( - hass: HomeAssistant, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test zeroconf discovery for sleeping device with no hass url.""" - hass.config.internal_url = None - hass.config.external_url = None - hass.config.api = None - monkeypatch.setattr(mock_rpc_device, "connected", False) - monkeypatch.setattr(mock_rpc_device, "initialized", False) - monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) - entry = MockConfigEntry( - domain="shelly", - unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, - ) - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - mock_rpc_device.mock_disconnected() - await hass.async_block_till_done() - - mock_rpc_device.mock_online() - await hass.async_block_till_done(wait_background_tasks=True) - - assert "online, resuming setup" in caplog.text - assert len(mock_rpc_device.initialize.mock_calls) == 1 - - with patch( - "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - data=DISCOVERY_INFO, - context={"source": config_entries.SOURCE_ZEROCONF}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - assert mock_rpc_device.update_outbound_websocket.mock_calls == [] - - monkeypatch.setattr(mock_rpc_device, "connected", True) - monkeypatch.setattr(mock_rpc_device, "initialized", True) - mock_rpc_device.mock_initialized() - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(seconds=ENTRY_RELOAD_COOLDOWN) - ) - await hass.async_block_till_done() - assert "device did not update" not in caplog.text - - monkeypatch.setattr(mock_rpc_device, "connected", False) - mock_rpc_device.mock_disconnected() - # No url available so no attempt to configure the device - assert mock_rpc_device.update_outbound_websocket.mock_calls == [] - - async def test_sleeping_device_gen2_with_new_firmware( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch ) -> None: @@ -1361,10 +1202,17 @@ async def test_reconfigure_successful( ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" with patch( "homeassistant.components.shelly.config_flow.get_info", @@ -1393,10 +1241,17 @@ async def test_reconfigure_unsuccessful( ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" with patch( "homeassistant.components.shelly.config_flow.get_info", @@ -1430,10 +1285,17 @@ async def test_reconfigure_with_exception( ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure" + assert result["step_id"] == "reconfigure_confirm" with patch("homeassistant.components.shelly.config_flow.get_info", side_effect=exc): result = await hass.config_entries.flow.async_configure( @@ -1442,22 +1304,3 @@ async def test_reconfigure_with_exception( ) assert result["errors"] == {"base": base_error} - - -async def test_zeroconf_rejects_ipv6(hass: HomeAssistant) -> None: - """Test zeroconf discovery rejects ipv6.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, - data=zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("fd00::b27c:63bb:cc85:4ea0"), - ip_addresses=[ip_address("fd00::b27c:63bb:cc85:4ea0")], - hostname="mock_hostname", - name="shelly1pm-12345", - port=None, - properties={zeroconf.ATTR_PROPERTIES_ID: "shelly1pm-12345"}, - type="mock_type", - ), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "ipv6_not_supported" diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index 47c338e3fad..35123a2db91 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -16,7 +16,6 @@ from homeassistant.components.shelly.const import ( ATTR_DEVICE, ATTR_GENERATION, CONF_BLE_SCANNER_MODE, - CONF_SLEEP_PERIOD, DOMAIN, ENTRY_RELOAD_COOLDOWN, MAX_PUSH_UPDATE_FAILURES, @@ -546,7 +545,6 @@ async def test_rpc_update_entry_sleep_period( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC update entry sleep period.""" - monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 600) entry = await init_integration(hass, 2, sleep_period=600) register_entity( @@ -580,7 +578,6 @@ async def test_rpc_sleeping_device_no_periodic_updates( ) -> None: """Test RPC sleeping device no periodic updates.""" entity_id = f"{SENSOR_DOMAIN}.test_name_temperature" - monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) entry = await init_integration(hass, 2, sleep_period=1000) register_entity( @@ -612,7 +609,6 @@ async def test_rpc_sleeping_device_firmware_unsupported( issue_registry: ir.IssueRegistry, ) -> None: """Test RPC sleeping device firmware not supported.""" - monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "firmware_supported", False) entry = await init_integration(hass, 2, sleep_period=3600) @@ -678,7 +674,7 @@ async def test_rpc_polling_auth_error( monkeypatch.setattr( mock_rpc_device, - "poll", + "update_status", AsyncMock( side_effect=InvalidAuthError, ), @@ -768,7 +764,7 @@ async def test_rpc_polling_connection_error( monkeypatch.setattr( mock_rpc_device, - "poll", + "update_status", AsyncMock( side_effect=DeviceConnectionError, ), @@ -855,27 +851,6 @@ async def test_rpc_runs_connected_events_when_initialized( assert call.script_list() in mock_rpc_device.mock_calls -async def test_rpc_sleeping_device_unload_ignore_ble_scanner( - hass: HomeAssistant, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test RPC sleeping device does not stop ble scanner on unload.""" - monkeypatch.setattr(mock_rpc_device, "connected", True) - entry = await init_integration(hass, 2, sleep_period=1000) - - # Make device online - mock_rpc_device.mock_online() - await hass.async_block_till_done(wait_background_tasks=True) - - # Unload - await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - # BLE script list is called during stop ble scanner - assert call.script_list() not in mock_rpc_device.mock_calls - - async def test_block_sleeping_device_connection_error( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -887,14 +862,9 @@ async def test_block_sleeping_device_connection_error( """Test block sleeping device connection error during initialize.""" sleep_period = 1000 entry = await init_integration(hass, 1, sleep_period=sleep_period, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( - hass, - BINARY_SENSOR_DOMAIN, - "test_name_motion", - "sensor_0-motion", - entry, - device_id=device.id, + hass, BINARY_SENSOR_DOMAIN, "test_name_motion", "sensor_0-motion", entry ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) monkeypatch.setattr(mock_block_device, "initialized", False) @@ -937,17 +907,11 @@ async def test_rpc_sleeping_device_connection_error( """Test RPC sleeping device connection error during initialize.""" sleep_period = 1000 entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( - hass, - BINARY_SENSOR_DOMAIN, - "test_name_cloud", - "cloud-cloud", - entry, - device_id=device.id, + hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) - monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -975,44 +939,3 @@ async def test_rpc_sleeping_device_connection_error( assert "Sleeping device did not update" in caplog.text assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE - - -async def test_rpc_sleeping_device_late_setup( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test RPC sleeping device creates entities if they do not exist yet.""" - entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) - assert entry.data[CONF_SLEEP_PERIOD] == 1000 - register_device(device_registry, entry) - monkeypatch.setattr(mock_rpc_device, "connected", False) - monkeypatch.setattr(mock_rpc_device, "initialized", False) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - monkeypatch.setattr(mock_rpc_device, "initialized", True) - mock_rpc_device.mock_online() - await hass.async_block_till_done(wait_background_tasks=True) - monkeypatch.setattr(mock_rpc_device, "connected", True) - mock_rpc_device.mock_initialized() - await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.test_name_temperature") is not None - - -async def test_rpc_already_connected( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_rpc_device: Mock, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test RPC ignore connect event if already connected.""" - await init_integration(hass, 2) - - mock_rpc_device.mock_online() - await hass.async_block_till_done(wait_background_tasks=True) - - assert "already connected" in caplog.text - mock_rpc_device.initialize.assert_called_once() diff --git a/tests/components/shelly/test_cover.py b/tests/components/shelly/test_cover.py index 40a364fd435..cd5efb76cfe 100644 --- a/tests/components/shelly/test_cover.py +++ b/tests/components/shelly/test_cover.py @@ -1,25 +1,21 @@ """Tests for Shelly cover platform.""" -from copy import deepcopy from unittest.mock import Mock import pytest from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, - ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, - ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, - SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, - SERVICE_OPEN_COVER_TILT, SERVICE_SET_COVER_POSITION, - SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, - SERVICE_STOP_COVER_TILT, - CoverState, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -56,7 +52,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.OPENING + assert hass.states.get(entity_id).state == STATE_OPENING await hass.services.async_call( COVER_DOMAIN, @@ -64,7 +60,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert hass.states.get(entity_id).state == STATE_CLOSING await hass.services.async_call( COVER_DOMAIN, @@ -72,7 +68,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED entry = entity_registry.async_get(entity_id) assert entry @@ -86,11 +82,11 @@ async def test_block_device_update( monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 0) await init_integration(hass, 1) - assert hass.states.get("cover.test_name").state == CoverState.CLOSED + assert hass.states.get("cover.test_name").state == STATE_CLOSED monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 100) mock_block_device.mock_update() - assert hass.states.get("cover.test_name").state == CoverState.OPEN + assert hass.states.get("cover.test_name").state == STATE_OPEN async def test_block_device_no_roller_blocks( @@ -131,7 +127,7 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.OPENING + assert hass.states.get(entity_id).state == STATE_OPENING mutate_rpc_device_status( monkeypatch, mock_rpc_device, "cover:0", "state", "closing" @@ -143,7 +139,7 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert hass.states.get(entity_id).state == STATE_CLOSING mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await hass.services.async_call( @@ -153,7 +149,7 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED entry = entity_registry.async_get(entity_id) assert entry @@ -175,11 +171,11 @@ async def test_rpc_device_update( """Test RPC device update.""" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == CoverState.CLOSED + assert hass.states.get("cover.test_cover_0").state == STATE_CLOSED mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "open") mock_rpc_device.mock_update() - assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN + assert hass.states.get("cover.test_cover_0").state == STATE_OPEN async def test_rpc_device_no_position_control( @@ -190,73 +186,4 @@ async def test_rpc_device_no_position_control( monkeypatch, mock_rpc_device, "cover:0", "pos_control", False ) await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN - - -async def test_rpc_cover_tilt( - hass: HomeAssistant, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - entity_registry: EntityRegistry, -) -> None: - """Test RPC cover that supports tilt.""" - entity_id = "cover.test_cover_0" - - config = deepcopy(mock_rpc_device.config) - config["cover:0"]["slat"] = {"enable": True} - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["cover:0"]["slat_pos"] = 0 - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-cover:0" - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_SET_COVER_TILT_POSITION, - {ATTR_ENTITY_ID: entity_id, ATTR_TILT_POSITION: 50}, - blocking=True, - ) - mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 50) - mock_rpc_device.mock_update() - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER_TILT, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 100) - mock_rpc_device.mock_update() - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 - - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER_TILT, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER_TILT, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 10) - mock_rpc_device.mock_update() - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 10 + assert hass.states.get("cover.test_cover_0").state == STATE_OPEN diff --git a/tests/components/shelly/test_device_trigger.py b/tests/components/shelly/test_device_trigger.py index fb68393304b..d47cca17460 100644 --- a/tests/components/shelly/test_device_trigger.py +++ b/tests/components/shelly/test_device_trigger.py @@ -178,7 +178,7 @@ async def test_get_triggers_for_invalid_device_id( async def test_if_fires_on_click_event_block_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_block_device: Mock, ) -> None: """Test for click_event trigger firing for block device.""" @@ -215,14 +215,14 @@ async def test_if_fires_on_click_event_block_device( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_single_click" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_single_click" async def test_if_fires_on_click_event_rpc_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_rpc_device: Mock, ) -> None: """Test for click_event trigger firing for rpc device.""" @@ -259,14 +259,14 @@ async def test_if_fires_on_click_event_rpc_device( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_single_push" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_single_push" async def test_validate_trigger_block_device_not_ready( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -304,14 +304,14 @@ async def test_validate_trigger_block_device_not_ready( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_single_click" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_single_click" async def test_validate_trigger_rpc_device_not_ready( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -349,8 +349,8 @@ async def test_validate_trigger_rpc_device_not_ready( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_single_push" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_single_push" async def test_validate_trigger_invalid_triggers( @@ -391,7 +391,7 @@ async def test_validate_trigger_invalid_triggers( async def test_rpc_no_runtime_data( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -429,14 +429,14 @@ async def test_rpc_no_runtime_data( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_single_push" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_single_push" async def test_block_no_runtime_data( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -474,5 +474,5 @@ async def test_block_no_runtime_data( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_single" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_single" diff --git a/tests/components/shelly/test_diagnostics.py b/tests/components/shelly/test_diagnostics.py index f576524ba60..4fc8ea6ca8f 100644 --- a/tests/components/shelly/test_diagnostics.py +++ b/tests/components/shelly/test_diagnostics.py @@ -1,6 +1,5 @@ """Tests for Shelly diagnostics platform.""" -from copy import deepcopy from unittest.mock import ANY, Mock, PropertyMock from aioshelly.ble.const import BLE_SCAN_RESULT_EVENT @@ -45,7 +44,7 @@ async def test_block_config_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, entry) assert result == { - "entry": entry_dict | {"discovery_keys": {}}, + "entry": entry_dict, "bluetooth": "not initialized", "device_info": { "name": "Test name", @@ -105,7 +104,7 @@ async def test_rpc_config_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, entry) assert result == { - "entry": entry_dict | {"discovery_keys": {}}, + "entry": entry_dict, "bluetooth": { "scanner": { "connectable": False, @@ -152,7 +151,7 @@ async def test_rpc_config_entry_diagnostics( "model": MODEL_25, "sw_version": "some fw string", }, - "device_settings": {"ws_outbound_enabled": False}, + "device_settings": {}, "device_status": { "sys": { "available_updates": { @@ -165,30 +164,3 @@ async def test_rpc_config_entry_diagnostics( }, "last_error": "DeviceConnectionError()", } - - -@pytest.mark.parametrize( - ("ws_outbound_server", "ws_outbound_server_valid"), - [("ws://10.10.10.10:8123/api/shelly/ws", True), ("wrong_url", False)], -) -async def test_rpc_config_entry_diagnostics_ws_outbound( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - ws_outbound_server: str, - ws_outbound_server_valid: bool, -) -> None: - """Test config entry diagnostics for rpc device with websocket outbound.""" - config = deepcopy(mock_rpc_device.config) - config["ws"] = {"enable": True, "server": ws_outbound_server} - monkeypatch.setattr(mock_rpc_device, "config", config) - - entry = await init_integration(hass, 2, sleep_period=60) - - result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - - assert ( - result["device_settings"]["ws_outbound_server_valid"] - == ws_outbound_server_valid - ) diff --git a/tests/components/shelly/test_init.py b/tests/components/shelly/test_init.py index b5516485501..998d56fc6cc 100644 --- a/tests/components/shelly/test_init.py +++ b/tests/components/shelly/test_init.py @@ -279,7 +279,6 @@ async def test_sleeping_rpc_device_online( caplog: pytest.LogCaptureFixture, ) -> None: """Test sleeping RPC device online.""" - monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", device_sleep) entry = await init_integration(hass, 2, sleep_period=entry_sleep) assert "will resume when device is online" in caplog.text @@ -298,7 +297,6 @@ async def test_sleeping_rpc_device_online_new_firmware( caplog: pytest.LogCaptureFixture, ) -> None: """Test sleeping device Gen2 with firmware 1.0.0 or later.""" - monkeypatch.setattr(mock_rpc_device, "connected", False) entry = await init_integration(hass, 2, sleep_period=None) assert "will resume when device is online" in caplog.text @@ -310,52 +308,6 @@ async def test_sleeping_rpc_device_online_new_firmware( assert entry.data["sleep_period"] == 1500 -async def test_sleeping_rpc_device_online_during_setup( - hass: HomeAssistant, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test sleeping device Gen2 woke up by user during setup.""" - monkeypatch.setattr(mock_rpc_device, "connected", False) - monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) - await init_integration(hass, 2, sleep_period=1000) - await hass.async_block_till_done(wait_background_tasks=True) - - assert "will resume when device is online" in caplog.text - assert "is online (source: setup)" in caplog.text - assert hass.states.get("sensor.test_name_temperature") is not None - - -async def test_sleeping_rpc_device_offline_during_setup( - hass: HomeAssistant, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test sleeping device Gen2 woke up by user during setup.""" - monkeypatch.setattr(mock_rpc_device, "connected", False) - monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) - monkeypatch.setattr( - mock_rpc_device, "initialize", AsyncMock(side_effect=DeviceConnectionError) - ) - - # Init integration, should fail since device is offline - await init_integration(hass, 2, sleep_period=1000) - await hass.async_block_till_done(wait_background_tasks=True) - - assert "will resume when device is online" in caplog.text - assert "is online (source: setup)" in caplog.text - assert hass.states.get("sensor.test_name_temperature") is None - - # Create an online event and verify that device is init successfully - monkeypatch.setattr(mock_rpc_device, "initialize", AsyncMock()) - mock_rpc_device.mock_online() - await hass.async_block_till_done(wait_background_tasks=True) - - assert hass.states.get("sensor.test_name_temperature") is not None - - @pytest.mark.parametrize( ("gen", "entity_id"), [ diff --git a/tests/components/shelly/test_light.py b/tests/components/shelly/test_light.py index 482821aa966..2c464a8c39c 100644 --- a/tests/components/shelly/test_light.py +++ b/tests/components/shelly/test_light.py @@ -1,6 +1,5 @@ """Tests for Shelly light platform.""" -from copy import deepcopy from unittest.mock import AsyncMock, Mock from aioshelly.const import ( @@ -16,13 +15,10 @@ import pytest from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_BRIGHTNESS_PCT, ATTR_COLOR_MODE, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, - ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_SUPPORTED_COLOR_MODES, @@ -33,6 +29,7 @@ from homeassistant.components.light import ( ColorMode, LightEntityFeature, ) +from homeassistant.components.shelly.const import SHELLY_PLUS_RGBW_CHANNELS from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, @@ -40,21 +37,13 @@ from homeassistant.const import ( STATE_ON, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry -from . import ( - get_entity, - init_integration, - mutate_rpc_device_status, - register_device, - register_entity, -) +from . import get_entity, init_integration, mutate_rpc_device_status, register_entity from .conftest import mock_white_light_set_state RELAY_BLOCK_ID = 0 LIGHT_BLOCK_ID = 2 -SHELLY_PLUS_RGBW_CHANNELS = 4 async def test_block_device_rgbw_bulb( @@ -693,39 +682,21 @@ async def test_rpc_rgbw_device_light_mode_remove_others( hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry, - device_registry: DeviceRegistry, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test Shelly RPC RGBW device in light mode removes RGB/RGBW entities.""" + # register lights monkeypatch.delitem(mock_rpc_device.status, "rgb:0") monkeypatch.delitem(mock_rpc_device.status, "rgbw:0") - - # register rgb and rgbw lights - config_entry = await init_integration(hass, 2, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - register_entity( - hass, - LIGHT_DOMAIN, - "test_rgb_0", - "rgb:0", - config_entry, - device_id=device_entry.id, - ) - register_entity( - hass, - LIGHT_DOMAIN, - "test_rgbw_0", - "rgbw:0", - config_entry, - device_id=device_entry.id, - ) + register_entity(hass, LIGHT_DOMAIN, "test_rgb_0", "rgb:0") + register_entity(hass, LIGHT_DOMAIN, "test_rgbw_0", "rgbw:0") # verify RGB & RGBW entities created assert get_entity(hass, LIGHT_DOMAIN, "rgb:0") is not None assert get_entity(hass, LIGHT_DOMAIN, "rgbw:0") is not None - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + # init to remove RGB & RGBW + await init_integration(hass, 2) # verify we have 4 lights for i in range(SHELLY_PLUS_RGBW_CHANNELS): @@ -751,45 +722,27 @@ async def test_rpc_rgbw_device_rgb_w_modes_remove_others( hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry, - device_registry: DeviceRegistry, monkeypatch: pytest.MonkeyPatch, active_mode: str, removed_mode: str, ) -> None: """Test Shelly RPC RGBW device in RGB/W modes other lights.""" removed_key = f"{removed_mode}:0" - config_entry = await init_integration(hass, 2, skip_setup=True) - device_entry = register_device(device_registry, config_entry) # register lights for i in range(SHELLY_PLUS_RGBW_CHANNELS): monkeypatch.delitem(mock_rpc_device.status, f"light:{i}") entity_id = f"light.test_light_{i}" - register_entity( - hass, - LIGHT_DOMAIN, - entity_id, - f"light:{i}", - config_entry, - device_id=device_entry.id, - ) + register_entity(hass, LIGHT_DOMAIN, entity_id, f"light:{i}") monkeypatch.delitem(mock_rpc_device.status, f"{removed_mode}:0") - register_entity( - hass, - LIGHT_DOMAIN, - f"test_{removed_key}", - removed_key, - config_entry, - device_id=device_entry.id, - ) + register_entity(hass, LIGHT_DOMAIN, f"test_{removed_key}", removed_key) # verify lights entities created for i in range(SHELLY_PLUS_RGBW_CHANNELS): assert get_entity(hass, LIGHT_DOMAIN, f"light:{i}") is not None assert get_entity(hass, LIGHT_DOMAIN, removed_key) is not None - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await init_integration(hass, 2) # verify we have RGB/w light entity_id = f"light.test_{active_mode}_0" @@ -802,126 +755,3 @@ async def test_rpc_rgbw_device_rgb_w_modes_remove_others( for i in range(SHELLY_PLUS_RGBW_CHANNELS): assert get_entity(hass, LIGHT_DOMAIN, f"light:{i}") is None assert get_entity(hass, LIGHT_DOMAIN, removed_key) is None - - -async def test_rpc_cct_light( - hass: HomeAssistant, - mock_rpc_device: Mock, - entity_registry: EntityRegistry, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test RPC CCT light.""" - entity_id = f"{LIGHT_DOMAIN}.test_name_cct_light_0" - - config = deepcopy(mock_rpc_device.config) - config["cct:0"] = {"id": 0, "name": None, "ct_range": [3333, 5555]} - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["cct:0"] = {"id": 0, "output": False, "brightness": 77, "ct": 3666} - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 2) - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-cct:0" - - # Turn off - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - mock_rpc_device.call_rpc.assert_called_once_with("CCT.Set", {"id": 0, "on": False}) - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - - # Turn on - mock_rpc_device.call_rpc.reset_mock() - mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cct:0", "output", True) - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - mock_rpc_device.mock_update() - mock_rpc_device.call_rpc.assert_called_once_with("CCT.Set", {"id": 0, "on": True}) - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP - assert state.attributes[ATTR_BRIGHTNESS] == 196 # 77% of 255 - assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3666 - assert state.attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 3333 - assert state.attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 5555 - - # Turn on, brightness = 88 - mock_rpc_device.call_rpc.reset_mock() - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS_PCT: 88}, - blocking=True, - ) - - mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cct:0", "brightness", 88) - mock_rpc_device.mock_update() - - mock_rpc_device.call_rpc.assert_called_once_with( - "CCT.Set", {"id": 0, "on": True, "brightness": 88} - ) - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_BRIGHTNESS] == 224 # 88% of 255 - - # Turn on, color temp = 4444 K - mock_rpc_device.call_rpc.reset_mock() - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 4444}, - blocking=True, - ) - - mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cct:0", "ct", 4444) - - mock_rpc_device.mock_update() - - mock_rpc_device.call_rpc.assert_called_once_with( - "CCT.Set", {"id": 0, "on": True, "ct": 4444} - ) - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 4444 - - -async def test_rpc_remove_cct_light( - hass: HomeAssistant, - mock_rpc_device: Mock, - device_registry: DeviceRegistry, -) -> None: - """Test Shelly RPC remove orphaned CCT light entity.""" - # register CCT light entity - config_entry = await init_integration(hass, 2, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - register_entity( - hass, - LIGHT_DOMAIN, - "cct_light_0", - "cct:0", - config_entry, - device_id=device_entry.id, - ) - - # verify CCT light entity created - assert get_entity(hass, LIGHT_DOMAIN, "cct:0") is not None - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - # there is no cct:0 in the status, so the CCT light entity should be removed - assert get_entity(hass, LIGHT_DOMAIN, "cct:0") is None diff --git a/tests/components/shelly/test_number.py b/tests/components/shelly/test_number.py index 6c1cc394b64..ff453b3251c 100644 --- a/tests/components/shelly/test_number.py +++ b/tests/components/shelly/test_number.py @@ -1,24 +1,18 @@ """Tests for Shelly number platform.""" -from copy import deepcopy from unittest.mock import AsyncMock, Mock from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError import pytest from homeassistant.components.number import ( - ATTR_MAX, - ATTR_MIN, - ATTR_MODE, - ATTR_STEP, ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, - NumberMode, ) from homeassistant.components.shelly.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceRegistry @@ -72,7 +66,7 @@ async def test_block_restored_number( ) -> None: """Test block restored number.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) capabilities = { "min": 0, "max": 100, @@ -86,7 +80,6 @@ async def test_block_restored_number( "device_0-valvePos", entry, capabilities, - device_id=device.id, ) extra_data = { "native_max_value": 100, @@ -119,7 +112,7 @@ async def test_block_restored_number_no_last_state( ) -> None: """Test block restored number missing last state.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) capabilities = { "min": 0, "max": 100, @@ -133,7 +126,6 @@ async def test_block_restored_number_no_last_state( "device_0-valvePos", entry, capabilities, - device_id=device.id, ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) @@ -248,145 +240,3 @@ async def test_block_set_value_auth_error( assert "context" in flow assert flow["context"].get("source") == SOURCE_REAUTH assert flow["context"].get("entry_id") == entry.entry_id - - -@pytest.mark.parametrize( - ("name", "entity_id", "original_unit", "expected_unit", "view", "mode"), - [ - ( - "Virtual number", - "number.test_name_virtual_number", - "%", - "%", - "field", - NumberMode.BOX, - ), - (None, "number.test_name_number_203", "", None, "field", NumberMode.BOX), - ( - "Virtual slider", - "number.test_name_virtual_slider", - "Hz", - "Hz", - "slider", - NumberMode.SLIDER, - ), - ], -) -async def test_rpc_device_virtual_number( - hass: HomeAssistant, - entity_registry: EntityRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - name: str | None, - entity_id: str, - original_unit: str, - expected_unit: str | None, - view: str, - mode: NumberMode, -) -> None: - """Test a virtual number for RPC device.""" - config = deepcopy(mock_rpc_device.config) - config["number:203"] = { - "name": name, - "min": 0, - "max": 100, - "meta": {"ui": {"step": 0.1, "unit": original_unit, "view": view}}, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["number:203"] = {"value": 12.3} - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert state - assert state.state == "12.3" - assert state.attributes.get(ATTR_MIN) == 0 - assert state.attributes.get(ATTR_MAX) == 100 - assert state.attributes.get(ATTR_STEP) == 0.1 - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - assert state.attributes.get(ATTR_MODE) is mode - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-number:203-number" - - monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 78.9) - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "78.9" - - monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 56.7}, - blocking=True, - ) - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "56.7" - - -async def test_rpc_remove_virtual_number_when_mode_label( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test if the virtual number will be removed if the mode has been changed to a label.""" - config = deepcopy(mock_rpc_device.config) - config["number:200"] = { - "name": None, - "min": -1000, - "max": 1000, - "meta": {"ui": {"step": 1, "unit": "", "view": "label"}}, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["number:200"] = {"value": 123} - monkeypatch.setattr(mock_rpc_device, "status", status) - - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - NUMBER_DOMAIN, - "test_name_number_200", - "number:200-number", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -async def test_rpc_remove_virtual_number_when_orphaned( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, -) -> None: - """Check whether the virtual number will be removed if it has been removed from the device configuration.""" - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - NUMBER_DOMAIN, - "test_name_number_200", - "number:200-number", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry diff --git a/tests/components/shelly/test_select.py b/tests/components/shelly/test_select.py deleted file mode 100644 index 0a6eb2a5843..00000000000 --- a/tests/components/shelly/test_select.py +++ /dev/null @@ -1,151 +0,0 @@ -"""Tests for Shelly select platform.""" - -from copy import deepcopy -from unittest.mock import Mock - -import pytest - -from homeassistant.components.select import ( - ATTR_OPTION, - ATTR_OPTIONS, - DOMAIN as SELECT_PLATFORM, - SERVICE_SELECT_OPTION, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceRegistry -from homeassistant.helpers.entity_registry import EntityRegistry - -from . import init_integration, register_device, register_entity - - -@pytest.mark.parametrize( - ("name", "entity_id", "value", "expected_state"), - [ - ("Virtual enum", "select.test_name_virtual_enum", "option 1", "Title 1"), - (None, "select.test_name_enum_203", None, STATE_UNKNOWN), - ], -) -async def test_rpc_device_virtual_enum( - hass: HomeAssistant, - entity_registry: EntityRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - name: str | None, - entity_id: str, - value: str | None, - expected_state: str, -) -> None: - """Test a virtual enum for RPC device.""" - config = deepcopy(mock_rpc_device.config) - config["enum:203"] = { - "name": name, - "options": ["option 1", "option 2", "option 3"], - "meta": { - "ui": { - "view": "dropdown", - "titles": {"option 1": "Title 1", "option 2": None}, - } - }, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["enum:203"] = {"value": value} - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert state - assert state.state == expected_state - assert state.attributes.get(ATTR_OPTIONS) == [ - "Title 1", - "option 2", - "option 3", - ] - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-enum:203-enum" - - monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 2") - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "option 2" - - monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 1") - await hass.services.async_call( - SELECT_PLATFORM, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "Title 1"}, - blocking=True, - ) - # 'Title 1' corresponds to 'option 1' - assert mock_rpc_device.call_rpc.call_args[0][1] == {"id": 203, "value": "option 1"} - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "Title 1" - - -async def test_rpc_remove_virtual_enum_when_mode_label( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test if the virtual enum will be removed if the mode has been changed to a label.""" - config = deepcopy(mock_rpc_device.config) - config["enum:200"] = { - "name": None, - "options": ["one", "two"], - "meta": { - "ui": {"view": "label", "titles": {"one": "Title 1", "two": "Title 2"}} - }, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["enum:200"] = {"value": "one"} - monkeypatch.setattr(mock_rpc_device, "status", status) - - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - SELECT_PLATFORM, - "test_name_enum_200", - "enum:200-enum", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -async def test_rpc_remove_virtual_enum_when_orphaned( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, -) -> None: - """Check whether the virtual enum will be removed if it has been removed from the device configuration.""" - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - SELECT_PLATFORM, - "test_name_enum_200", - "enum:200-enum", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index 18c3d874c55..513bcd875e2 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -11,7 +11,6 @@ from homeassistant.components.homeassistant import ( SERVICE_UPDATE_ENTITY, ) from homeassistant.components.sensor import ( - ATTR_OPTIONS, ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, @@ -25,12 +24,8 @@ from homeassistant.const import ( PERCENTAGE, STATE_UNAVAILABLE, STATE_UNKNOWN, - UnitOfElectricCurrent, - UnitOfElectricPotential, UnitOfEnergy, UnitOfFrequency, - UnitOfPower, - UnitOfTemperature, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.device_registry import DeviceRegistry @@ -47,7 +42,7 @@ from . import ( register_entity, ) -from tests.common import async_fire_time_changed, mock_restore_cache_with_extra_data +from tests.common import mock_restore_cache_with_extra_data RELAY_BLOCK_ID = 0 SENSOR_BLOCK_ID = 3 @@ -193,14 +188,9 @@ async def test_block_restored_sleeping_sensor( ) -> None: """Test block restored sleeping sensor.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( - hass, - SENSOR_DOMAIN, - "test_name_temperature", - "sensor_0-temp", - entry, - device_id=device.id, + hass, SENSOR_DOMAIN, "test_name_temperature", "sensor_0-temp", entry ) extra_data = {"native_value": "20.4", "native_unit_of_measurement": "°C"} @@ -231,14 +221,9 @@ async def test_block_restored_sleeping_sensor_no_last_state( ) -> None: """Test block restored sleeping sensor missing last state.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( - hass, - SENSOR_DOMAIN, - "test_name_temperature", - "sensor_0-temp", - entry, - device_id=device.id, + hass, SENSOR_DOMAIN, "test_name_temperature", "sensor_0-temp", entry ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) @@ -303,14 +288,9 @@ async def test_block_not_matched_restored_sleeping_sensor( ) -> None: """Test block not matched to restored sleeping sensor.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( - hass, - SENSOR_DOMAIN, - "test_name_temperature", - "sensor_0-temp", - entry, - device_id=device.id, + hass, SENSOR_DOMAIN, "test_name_temperature", "sensor_0-temp", entry ) extra_data = {"native_value": "20.4", "native_unit_of_measurement": "°C"} @@ -469,7 +449,6 @@ async def test_rpc_sleeping_sensor( ) -> None: """Test RPC online sleeping sensor.""" entity_id = f"{SENSOR_DOMAIN}.test_name_temperature" - monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) entry = await init_integration(hass, 2, sleep_period=1000) @@ -504,14 +483,13 @@ async def test_rpc_restored_sleeping_sensor( ) -> None: """Test RPC restored sensor.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, SENSOR_DOMAIN, "test_name_temperature", "temperature:0-temperature_0", entry, - device_id=device.id, ) extra_data = {"native_value": "21.0", "native_unit_of_measurement": "°C"} @@ -543,14 +521,13 @@ async def test_rpc_restored_sleeping_sensor_no_last_state( ) -> None: """Test RPC restored sensor missing last state.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, SENSOR_DOMAIN, "test_name_temperature", "temperature:0-temperature_0", entry, - device_id=device.id, ) monkeypatch.setattr(mock_rpc_device, "initialized", False) @@ -623,7 +600,6 @@ async def test_rpc_sleeping_update_entity_service( await async_setup_component(hass, "homeassistant", {}) entity_id = f"{SENSOR_DOMAIN}.test_name_temperature" - monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) await init_integration(hass, 2, sleep_period=1000) @@ -706,41 +682,21 @@ async def test_block_sleeping_update_entity_service( ) -@pytest.mark.parametrize( - ("original_unit", "expected_unit"), - [ - ("m/s", "m/s"), - (None, None), - ("", None), - ], -) async def test_rpc_analog_input_sensors( - hass: HomeAssistant, - mock_rpc_device: Mock, - entity_registry: EntityRegistry, - monkeypatch: pytest.MonkeyPatch, - original_unit: str | None, - expected_unit: str | None, + hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry ) -> None: """Test RPC analog input xpercent sensor.""" - config = deepcopy(mock_rpc_device.config) - config["input:1"]["xpercent"] = {"expr": "x*0.2995", "unit": original_unit} - monkeypatch.setattr(mock_rpc_device, "config", config) - await init_integration(hass, 2) - entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" + entity_id = f"{SENSOR_DOMAIN}.test_name_analog_input" assert hass.states.get(entity_id).state == "89" entry = entity_registry.async_get(entity_id) assert entry assert entry.unique_id == "123456789ABC-input:1-analoginput" - entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" - state = hass.states.get(entity_id) - assert state - assert state.state == "8.9" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit + entity_id = f"{SENSOR_DOMAIN}.test_name_analog_value" + assert hass.states.get(entity_id).state == "8.9" entry = entity_registry.async_get(entity_id) assert entry @@ -757,10 +713,10 @@ async def test_rpc_disabled_analog_input_sensors( await init_integration(hass, 2) - entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" + entity_id = f"{SENSOR_DOMAIN}.test_name_analog_input" assert hass.states.get(entity_id) is None - entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" + entity_id = f"{SENSOR_DOMAIN}.test_name_analog_value" assert hass.states.get(entity_id) is None @@ -777,34 +733,20 @@ async def test_rpc_disabled_xpercent( ) await init_integration(hass, 2) - entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" + entity_id = f"{SENSOR_DOMAIN}.test_name_analog_input" assert hass.states.get(entity_id).state == "89" - entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" + entity_id = f"{SENSOR_DOMAIN}.test_name_analog_value" assert hass.states.get(entity_id) is None -@pytest.mark.parametrize( - ("original_unit", "expected_unit"), - [ - ("l/h", "l/h"), - (None, None), - ("", None), - ], -) async def test_rpc_pulse_counter_sensors( hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry, monkeypatch: pytest.MonkeyPatch, - original_unit: str | None, - expected_unit: str | None, ) -> None: """Test RPC counter sensor.""" - config = deepcopy(mock_rpc_device.config) - config["input:2"]["xcounts"] = {"expr": "x/10", "unit": original_unit} - monkeypatch.setattr(mock_rpc_device, "config", config) - await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter" @@ -818,10 +760,7 @@ async def test_rpc_pulse_counter_sensors( assert entry.unique_id == "123456789ABC-input:2-pulse_counter" entity_id = f"{SENSOR_DOMAIN}.gas_counter_value" - state = hass.states.get(entity_id) - assert state - assert state.state == "561.74" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit + assert hass.states.get(entity_id).state == "561.74" entry = entity_registry.async_get(entity_id) assert entry @@ -865,27 +804,12 @@ async def test_rpc_disabled_xtotal_counter( assert hass.states.get(entity_id) is None -@pytest.mark.parametrize( - ("original_unit", "expected_unit"), - [ - ("W", "W"), - (None, None), - ("", None), - ], -) async def test_rpc_pulse_counter_frequency_sensors( hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry, - monkeypatch: pytest.MonkeyPatch, - original_unit: str | None, - expected_unit: str | None, ) -> None: """Test RPC counter sensor.""" - config = deepcopy(mock_rpc_device.config) - config["input:2"]["xfreq"] = {"expr": "x**2", "unit": original_unit} - monkeypatch.setattr(mock_rpc_device, "config", config) - await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency" @@ -899,487 +823,8 @@ async def test_rpc_pulse_counter_frequency_sensors( assert entry.unique_id == "123456789ABC-input:2-counter_frequency" entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" - state = hass.states.get(entity_id) - assert state - assert state.state == "6.11" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit + assert hass.states.get(entity_id).state == "6.11" entry = entity_registry.async_get(entity_id) assert entry assert entry.unique_id == "123456789ABC-input:2-counter_frequency_value" - - -async def test_rpc_disabled_xfreq( - hass: HomeAssistant, - mock_rpc_device: Mock, - entity_registry: EntityRegistry, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test RPC input with the xfreq sensor disabled.""" - status = deepcopy(mock_rpc_device.status) - status["input:2"] = { - "id": 2, - "counts": {"total": 56174, "xtotal": 561.74}, - "freq": 208.00, - } - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 2) - - entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" - - state = hass.states.get(entity_id) - assert not state - - entry = entity_registry.async_get(entity_id) - assert not entry - - -@pytest.mark.parametrize( - ("name", "entity_id"), - [ - ("Virtual sensor", "sensor.test_name_virtual_sensor"), - (None, "sensor.test_name_text_203"), - ], -) -async def test_rpc_device_virtual_text_sensor( - hass: HomeAssistant, - entity_registry: EntityRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - name: str | None, - entity_id: str, -) -> None: - """Test a virtual text sensor for RPC device.""" - config = deepcopy(mock_rpc_device.config) - config["text:203"] = { - "name": name, - "meta": {"ui": {"view": "label"}}, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["text:203"] = {"value": "lorem ipsum"} - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert state - assert state.state == "lorem ipsum" - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-text:203-text" - - monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "dolor sit amet" - - -async def test_rpc_remove_text_virtual_sensor_when_mode_field( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test if the virtual text sensor will be removed if the mode has been changed to a field.""" - config = deepcopy(mock_rpc_device.config) - config["text:200"] = {"name": None, "meta": {"ui": {"view": "field"}}} - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["text:200"] = {"value": "lorem ipsum"} - monkeypatch.setattr(mock_rpc_device, "status", status) - - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - SENSOR_DOMAIN, - "test_name_text_200", - "text:200-text", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -async def test_rpc_remove_text_virtual_sensor_when_orphaned( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, -) -> None: - """Check whether the virtual text sensor will be removed if it has been removed from the device configuration.""" - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - SENSOR_DOMAIN, - "test_name_text_200", - "text:200-text", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -@pytest.mark.parametrize( - ("name", "entity_id", "original_unit", "expected_unit"), - [ - ("Virtual number sensor", "sensor.test_name_virtual_number_sensor", "W", "W"), - (None, "sensor.test_name_number_203", "", None), - ], -) -async def test_rpc_device_virtual_number_sensor( - hass: HomeAssistant, - entity_registry: EntityRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - name: str | None, - entity_id: str, - original_unit: str, - expected_unit: str | None, -) -> None: - """Test a virtual number sensor for RPC device.""" - config = deepcopy(mock_rpc_device.config) - config["number:203"] = { - "name": name, - "min": 0, - "max": 100, - "meta": {"ui": {"step": 0.1, "unit": original_unit, "view": "label"}}, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["number:203"] = {"value": 34.5} - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert state - assert state.state == "34.5" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-number:203-number" - - monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "56.7" - - -async def test_rpc_remove_number_virtual_sensor_when_mode_field( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test if the virtual number sensor will be removed if the mode has been changed to a field.""" - config = deepcopy(mock_rpc_device.config) - config["number:200"] = { - "name": None, - "min": 0, - "max": 100, - "meta": {"ui": {"step": 1, "unit": "", "view": "field"}}, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["number:200"] = {"value": 67.8} - monkeypatch.setattr(mock_rpc_device, "status", status) - - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - SENSOR_DOMAIN, - "test_name_number_200", - "number:200-number", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -async def test_rpc_remove_number_virtual_sensor_when_orphaned( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, -) -> None: - """Check whether the virtual number sensor will be removed if it has been removed from the device configuration.""" - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - SENSOR_DOMAIN, - "test_name_number_200", - "number:200-number", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -@pytest.mark.parametrize( - ("name", "entity_id", "value", "expected_state"), - [ - ( - "Virtual enum sensor", - "sensor.test_name_virtual_enum_sensor", - "one", - "Title 1", - ), - (None, "sensor.test_name_enum_203", None, STATE_UNKNOWN), - ], -) -async def test_rpc_device_virtual_enum_sensor( - hass: HomeAssistant, - entity_registry: EntityRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - name: str | None, - entity_id: str, - value: str | None, - expected_state: str, -) -> None: - """Test a virtual enum sensor for RPC device.""" - config = deepcopy(mock_rpc_device.config) - config["enum:203"] = { - "name": name, - "options": ["one", "two", "three"], - "meta": {"ui": {"view": "label", "titles": {"one": "Title 1", "two": None}}}, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["enum:203"] = {"value": value} - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert state - assert state.state == expected_state - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM - assert state.attributes.get(ATTR_OPTIONS) == ["Title 1", "two", "three"] - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-enum:203-enum" - - monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "two") - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "two" - - -async def test_rpc_remove_enum_virtual_sensor_when_mode_dropdown( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test if the virtual enum sensor will be removed if the mode has been changed to a dropdown.""" - config = deepcopy(mock_rpc_device.config) - config["enum:200"] = { - "name": None, - "options": ["option 1", "option 2", "option 3"], - "meta": { - "ui": { - "view": "dropdown", - "titles": {"option 1": "Title 1", "option 2": None}, - } - }, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["enum:200"] = {"value": "option 2"} - monkeypatch.setattr(mock_rpc_device, "status", status) - - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - SENSOR_DOMAIN, - "test_name_enum_200", - "enum:200-enum", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -async def test_rpc_remove_enum_virtual_sensor_when_orphaned( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, -) -> None: - """Check whether the virtual enum sensor will be removed if it has been removed from the device configuration.""" - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - SENSOR_DOMAIN, - "test_name_enum_200", - "enum:200-enum", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.parametrize("light_type", ["rgb", "rgbw"]) -async def test_rpc_rgbw_sensors( - hass: HomeAssistant, - entity_registry: EntityRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - light_type: str, -) -> None: - """Test sensors for RGB/RGBW light.""" - config = deepcopy(mock_rpc_device.config) - config[f"{light_type}:0"] = {"id": 0} - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status[f"{light_type}:0"] = { - "temperature": {"tC": 54.3, "tF": 129.7}, - "aenergy": {"total": 45.141}, - "apower": 12.2, - "current": 0.23, - "voltage": 12.4, - } - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 2) - - entity_id = f"sensor.test_name_{light_type}_light_0_power" - - state = hass.states.get(entity_id) - assert state - assert state.state == "12.2" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.WATT - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == f"123456789ABC-{light_type}:0-power_{light_type}" - - entity_id = f"sensor.test_name_{light_type}_light_0_energy" - - state = hass.states.get(entity_id) - assert state - assert state.state == "0.045141" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == f"123456789ABC-{light_type}:0-energy_{light_type}" - - entity_id = f"sensor.test_name_{light_type}_light_0_current" - - state = hass.states.get(entity_id) - assert state - assert state.state == "0.23" - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricCurrent.AMPERE - ) - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == f"123456789ABC-{light_type}:0-current_{light_type}" - - entity_id = f"sensor.test_name_{light_type}_light_0_voltage" - - state = hass.states.get(entity_id) - assert state - assert state.state == "12.4" - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricPotential.VOLT - ) - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == f"123456789ABC-{light_type}:0-voltage_{light_type}" - - entity_id = f"sensor.test_name_{light_type}_light_0_device_temperature" - - state = hass.states.get(entity_id) - assert state - assert state.state == "54.3" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == f"123456789ABC-{light_type}:0-temperature_{light_type}" - - -async def test_rpc_device_sensor_goes_unavailable_on_disconnect( - hass: HomeAssistant, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test RPC device with sensor goes unavailable on disconnect.""" - await init_integration(hass, 2) - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state is not None - assert temp_sensor_state.state != STATE_UNAVAILABLE - monkeypatch.setattr(mock_rpc_device, "connected", False) - monkeypatch.setattr(mock_rpc_device, "initialized", False) - mock_rpc_device.mock_disconnected() - await hass.async_block_till_done() - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state.state == STATE_UNAVAILABLE - - freezer.tick(60) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert "NotInitialized" not in caplog.text - - monkeypatch.setattr(mock_rpc_device, "connected", True) - monkeypatch.setattr(mock_rpc_device, "initialized", True) - mock_rpc_device.mock_initialized() - await hass.async_block_till_done() - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state.state != STATE_UNAVAILABLE diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index 5c7933afd7e..637a92a7fbe 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -25,7 +25,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry @@ -118,14 +117,13 @@ async def test_block_restored_motion_switch( entry = await init_integration( hass, 1, sleep_period=1000, model=model, skip_setup=True ) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, SWITCH_DOMAIN, "test_name_motion_detection", "sensor_0-motionActive", entry, - device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_OFF)]) @@ -155,14 +153,13 @@ async def test_block_restored_motion_switch_no_last_state( entry = await init_integration( hass, 1, sleep_period=1000, model=model, skip_setup=True ) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, SWITCH_DOMAIN, "test_name_motion_detection", "sensor_0-motionActive", entry, - device_id=device.id, ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) @@ -190,7 +187,7 @@ async def test_block_device_unique_ids( async def test_block_set_state_connection_error( - hass: HomeAssistant, mock_block_device, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, mock_block_device, monkeypatch ) -> None: """Test block device set state connection error.""" monkeypatch.setattr( @@ -433,201 +430,3 @@ async def test_wall_display_relay_mode( entry = entity_registry.async_get(switch_entity_id) assert entry assert entry.unique_id == "123456789ABC-switch:0" - - -@pytest.mark.parametrize( - ("name", "entity_id"), - [ - ("Virtual switch", "switch.test_name_virtual_switch"), - (None, "switch.test_name_boolean_200"), - ], -) -async def test_rpc_device_virtual_switch( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - name: str | None, - entity_id: str, -) -> None: - """Test a virtual switch for RPC device.""" - config = deepcopy(mock_rpc_device.config) - config["boolean:200"] = { - "name": name, - "meta": {"ui": {"view": "toggle"}}, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["boolean:200"] = {"value": True} - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_ON - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-boolean:200-boolean" - - monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", False) - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF - - monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", True) - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON - - -async def test_rpc_device_virtual_binary_sensor( - hass: HomeAssistant, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test that a switch entity has not been created for a virtual binary sensor.""" - config = deepcopy(mock_rpc_device.config) - config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["boolean:200"] = {"value": True} - monkeypatch.setattr(mock_rpc_device, "status", status) - - entity_id = "switch.test_name_boolean_200" - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert not state - - -async def test_rpc_remove_virtual_switch_when_mode_label( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test if the virtual switch will be removed if the mode has been changed to a label.""" - config = deepcopy(mock_rpc_device.config) - config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["boolean:200"] = {"value": True} - monkeypatch.setattr(mock_rpc_device, "status", status) - - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - SWITCH_DOMAIN, - "test_name_boolean_200", - "boolean:200-boolean", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -async def test_rpc_remove_virtual_switch_when_orphaned( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, -) -> None: - """Check whether the virtual switch will be removed if it has been removed from the device configuration.""" - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - SWITCH_DOMAIN, - "test_name_boolean_200", - "boolean:200-boolean", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_rpc_device_script_switch( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test a script switch for RPC device.""" - config = deepcopy(mock_rpc_device.config) - key = "script:1" - script_name = "aioshelly_ble_integration" - entity_id = f"switch.test_name_{script_name}" - config[key] = { - "id": 1, - "name": script_name, - "enable": False, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status[key] = { - "running": True, - } - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == f"123456789ABC-{key}-script" - - monkeypatch.setitem(mock_rpc_device.status[key], "running", False) - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_rpc_device.mock_update() - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - monkeypatch.setitem(mock_rpc_device.status[key], "running", True) - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_rpc_device.mock_update() - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_ON diff --git a/tests/components/shelly/test_text.py b/tests/components/shelly/test_text.py deleted file mode 100644 index 19acb856f35..00000000000 --- a/tests/components/shelly/test_text.py +++ /dev/null @@ -1,129 +0,0 @@ -"""Tests for Shelly text platform.""" - -from copy import deepcopy -from unittest.mock import Mock - -import pytest - -from homeassistant.components.text import ( - ATTR_VALUE, - DOMAIN as TEXT_PLATFORM, - SERVICE_SET_VALUE, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceRegistry -from homeassistant.helpers.entity_registry import EntityRegistry - -from . import init_integration, register_device, register_entity - - -@pytest.mark.parametrize( - ("name", "entity_id"), - [ - ("Virtual text", "text.test_name_virtual_text"), - (None, "text.test_name_text_203"), - ], -) -async def test_rpc_device_virtual_text( - hass: HomeAssistant, - entity_registry: EntityRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - name: str | None, - entity_id: str, -) -> None: - """Test a virtual text for RPC device.""" - config = deepcopy(mock_rpc_device.config) - config["text:203"] = { - "name": name, - "meta": {"ui": {"view": "field"}}, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["text:203"] = {"value": "lorem ipsum"} - monkeypatch.setattr(mock_rpc_device, "status", status) - - await init_integration(hass, 3) - - state = hass.states.get(entity_id) - assert state - assert state.state == "lorem ipsum" - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-text:203-text" - - monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "dolor sit amet" - - monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "sed do eiusmod") - await hass.services.async_call( - TEXT_PLATFORM, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: "sed do eiusmod"}, - blocking=True, - ) - mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "sed do eiusmod" - - -async def test_rpc_remove_virtual_text_when_mode_label( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test if the virtual text will be removed if the mode has been changed to a label.""" - config = deepcopy(mock_rpc_device.config) - config["text:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} - monkeypatch.setattr(mock_rpc_device, "config", config) - - status = deepcopy(mock_rpc_device.status) - status["text:200"] = {"value": "lorem ipsum"} - monkeypatch.setattr(mock_rpc_device, "status", status) - - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - TEXT_PLATFORM, - "test_name_text_200", - "text:200-text", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry - - -async def test_rpc_remove_virtual_text_when_orphaned( - hass: HomeAssistant, - entity_registry: EntityRegistry, - device_registry: DeviceRegistry, - mock_rpc_device: Mock, -) -> None: - """Check whether the virtual text will be removed if it has been removed from the device configuration.""" - config_entry = await init_integration(hass, 3, skip_setup=True) - device_entry = register_device(device_registry, config_entry) - entity_id = register_entity( - hass, - TEXT_PLATFORM, - "test_name_text_200", - "text:200-text", - config_entry, - device_id=device_entry.id, - ) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - entry = entity_registry.async_get(entity_id) - assert not entry diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index cd4cdf877a5..8448c116815 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -16,7 +16,6 @@ from homeassistant.components.update import ( ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, ATTR_RELEASE_URL, - ATTR_UPDATE_PERCENTAGE, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, UpdateEntityFeature, @@ -54,18 +53,17 @@ async def test_block_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test block device update entity.""" - entity_id = "update.test_name_firmware" - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1.0.0") - monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2.0.0") + entity_id = "update.test_name_firmware_update" + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1") + monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2") monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" - assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1" + assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None supported_feat = state.attributes[ATTR_SUPPORTED_FEATURES] assert supported_feat == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS @@ -79,21 +77,19 @@ async def test_block_update( state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" - assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1" + assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is True - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] == GEN1_RELEASE_URL - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0") + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2") await mock_rest_update(hass, freezer) state = hass.states.get(entity_id) assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0" - assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "2" + assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -109,31 +105,27 @@ async def test_block_beta_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test block device beta update entity.""" - entity_id = "update.test_name_beta_firmware" - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1.0.0") - monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2.0.0") + entity_id = "update.test_name_beta_firmware_update" + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1") + monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2") monkeypatch.setitem(mock_block_device.status["update"], "beta_version", "") monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) state = hass.states.get(entity_id) assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" - assert state.attributes[ATTR_LATEST_VERSION] == "1.0.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1" + assert state.attributes[ATTR_LATEST_VERSION] == "1" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - monkeypatch.setitem( - mock_block_device.status["update"], "beta_version", "2.0.0-beta" - ) + monkeypatch.setitem(mock_block_device.status["update"], "beta_version", "2b") await mock_rest_update(hass, freezer) state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" - assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1" + assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] is None await hass.services.async_call( @@ -146,20 +138,18 @@ async def test_block_beta_update( state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" - assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1" + assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is True - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0-beta") + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2b") await mock_rest_update(hass, freezer) state = hass.states.get(entity_id) assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0-beta" - assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" + assert state.attributes[ATTR_INSTALLED_VERSION] == "2b" + assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -174,8 +164,8 @@ async def test_block_update_connection_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test block device update connection error.""" - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1.0.0") - monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2.0.0") + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1") + monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2") monkeypatch.setattr( mock_block_device, "trigger_ota_update", @@ -187,7 +177,7 @@ async def test_block_update_connection_error( await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.test_name_firmware"}, + {ATTR_ENTITY_ID: "update.test_name_firmware_update"}, blocking=True, ) assert "Error starting OTA update" in str(excinfo.value) @@ -200,8 +190,8 @@ async def test_block_update_auth_error( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test block device update authentication error.""" - monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1.0.0") - monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2.0.0") + monkeypatch.setitem(mock_block_device.status["update"], "old_version", "1") + monkeypatch.setitem(mock_block_device.status["update"], "new_version", "2") monkeypatch.setattr( mock_block_device, "trigger_ota_update", @@ -214,7 +204,7 @@ async def test_block_update_auth_error( await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.test_name_firmware"}, + {ATTR_ENTITY_ID: "update.test_name_firmware_update"}, blocking=True, ) @@ -232,51 +222,6 @@ async def test_block_update_auth_error( assert flow["context"].get("entry_id") == entry.entry_id -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_block_version_compare( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_block_device: Mock, - entity_registry: EntityRegistry, - monkeypatch: pytest.MonkeyPatch, -) -> None: - """Test block device custom firmware version comparison.""" - - STABLE = "20230913-111730/v1.14.0-gcb84623" - BETA = "20231107-162609/v1.14.1-rc1-g0617c15" - - entity_id_beta = "update.test_name_beta_firmware" - entity_id_latest = "update.test_name_firmware" - monkeypatch.setitem(mock_block_device.status["update"], "old_version", STABLE) - monkeypatch.setitem(mock_block_device.status["update"], "new_version", "") - monkeypatch.setitem(mock_block_device.status["update"], "beta_version", BETA) - monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) - await init_integration(hass, 1) - - state = hass.states.get(entity_id_latest) - assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == STABLE - assert state.attributes[ATTR_LATEST_VERSION] == STABLE - state = hass.states.get(entity_id_beta) - assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == STABLE - assert state.attributes[ATTR_LATEST_VERSION] == BETA - - monkeypatch.setitem(mock_block_device.status["update"], "old_version", BETA) - monkeypatch.setitem(mock_block_device.status["update"], "new_version", STABLE) - monkeypatch.setitem(mock_block_device.status["update"], "beta_version", BETA) - await mock_rest_update(hass, freezer) - - state = hass.states.get(entity_id_latest) - assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == BETA - assert state.attributes[ATTR_LATEST_VERSION] == STABLE - state = hass.states.get(entity_id_beta) - assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == BETA - assert state.attributes[ATTR_LATEST_VERSION] == BETA - - async def test_rpc_update( hass: HomeAssistant, mock_rpc_device: Mock, @@ -284,7 +229,7 @@ async def test_rpc_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC device update entity.""" - entity_id = "update.test_name_firmware" + entity_id = "update.test_name_firmware_update" monkeypatch.setitem(mock_rpc_device.shelly, "ver", "1") monkeypatch.setitem( mock_rpc_device.status["sys"], @@ -300,7 +245,6 @@ async def test_rpc_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None supported_feat = state.attributes[ATTR_SUPPORTED_FEATURES] assert supported_feat == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS @@ -318,7 +262,6 @@ async def test_rpc_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is True - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] == GEN2_RELEASE_URL inject_rpc_device_event( @@ -336,9 +279,7 @@ async def test_rpc_update( }, ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_IN_PROGRESS] is True - assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 0 + assert hass.states.get(entity_id).attributes[ATTR_IN_PROGRESS] == 0 inject_rpc_device_event( monkeypatch, @@ -356,9 +297,7 @@ async def test_rpc_update( }, ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_IN_PROGRESS] is True - assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 + assert hass.states.get(entity_id).attributes[ATTR_IN_PROGRESS] == 50 inject_rpc_device_event( monkeypatch, @@ -382,7 +321,6 @@ async def test_rpc_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -396,7 +334,6 @@ async def test_rpc_sleeping_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC sleeping device update entity.""" - monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) monkeypatch.setitem(mock_rpc_device.shelly, "ver", "1") monkeypatch.setitem( @@ -406,7 +343,7 @@ async def test_rpc_sleeping_update( "stable": {"version": "2"}, }, ) - entity_id = f"{UPDATE_DOMAIN}.test_name_firmware" + entity_id = f"{UPDATE_DOMAIN}.test_name_firmware_update" await init_integration(hass, 2, sleep_period=1000) # Entity should be created when device is online @@ -421,7 +358,6 @@ async def test_rpc_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) assert state.attributes[ATTR_RELEASE_URL] == GEN2_RELEASE_URL @@ -433,7 +369,6 @@ async def test_rpc_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) entry = entity_registry.async_get(entity_id) @@ -449,14 +384,13 @@ async def test_rpc_restored_sleeping_update( ) -> None: """Test RPC restored update entity.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, UPDATE_DOMAIN, - "test_name_firmware", + "test_name_firmware_update", "sys-fwupdate", entry, - device_id=device.id, ) attr = {ATTR_INSTALLED_VERSION: "1", ATTR_LATEST_VERSION: "2"} @@ -473,7 +407,6 @@ async def test_rpc_restored_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) # Make device online @@ -490,7 +423,6 @@ async def test_rpc_restored_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) @@ -510,14 +442,13 @@ async def test_rpc_restored_sleeping_update_no_last_state( }, ) entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - device = register_device(device_registry, entry) + register_device(device_registry, entry) entity_id = register_entity( hass, UPDATE_DOMAIN, - "test_name_firmware", + "test_name_firmware_update", "sys-fwupdate", entry, - device_id=device.id, ) monkeypatch.setattr(mock_rpc_device, "initialized", False) @@ -541,7 +472,6 @@ async def test_rpc_restored_sleeping_update_no_last_state( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) @@ -554,7 +484,7 @@ async def test_rpc_beta_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC device beta update entity.""" - entity_id = "update.test_name_beta_firmware" + entity_id = "update.test_name_beta_firmware_update" monkeypatch.setitem(mock_rpc_device.shelly, "ver", "1") monkeypatch.setitem( mock_rpc_device.status["sys"], @@ -571,7 +501,6 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "1" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] is None monkeypatch.setitem( @@ -589,7 +518,6 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None await hass.services.async_call( UPDATE_DOMAIN, @@ -618,8 +546,7 @@ async def test_rpc_beta_update( assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" - assert state.attributes[ATTR_IN_PROGRESS] is True - assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 0 + assert state.attributes[ATTR_IN_PROGRESS] == 0 inject_rpc_device_event( monkeypatch, @@ -637,9 +564,7 @@ async def test_rpc_beta_update( }, ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_IN_PROGRESS] is True - assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 40 + assert hass.states.get(entity_id).attributes[ATTR_IN_PROGRESS] == 40 inject_rpc_device_event( monkeypatch, @@ -663,7 +588,6 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2b" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -705,7 +629,7 @@ async def test_rpc_update_errors( await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.test_name_firmware"}, + {ATTR_ENTITY_ID: "update.test_name_firmware_update"}, blocking=True, ) assert error in str(excinfo.value) @@ -740,7 +664,7 @@ async def test_rpc_update_auth_error( await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.test_name_firmware"}, + {ATTR_ENTITY_ID: "update.test_name_firmware_update"}, blocking=True, ) diff --git a/tests/components/shelly/test_utils.py b/tests/components/shelly/test_utils.py index 17bcd6e3d40..7c4ea8accae 100644 --- a/tests/components/shelly/test_utils.py +++ b/tests/components/shelly/test_utils.py @@ -23,7 +23,6 @@ from homeassistant.components.shelly.utils import ( get_block_device_sleep_period, get_block_input_triggers, get_device_uptime, - get_host, get_number_of_channels, get_release_url, get_rpc_channel_name, @@ -236,42 +235,7 @@ async def test_get_block_input_triggers( async def test_get_rpc_channel_name(mock_rpc_device: Mock) -> None: """Test get RPC channel name.""" assert get_rpc_channel_name(mock_rpc_device, "input:0") == "Test name input 0" - assert get_rpc_channel_name(mock_rpc_device, "input:3") == "Test name Input 3" - - -@pytest.mark.parametrize( - ("component", "expected"), - [ - ("cover", "Cover"), - ("input", "Input"), - ("light", "Light"), - ("rgb", "RGB light"), - ("rgbw", "RGBW light"), - ("switch", "Switch"), - ("thermostat", "Thermostat"), - ], -) -async def test_get_rpc_channel_name_multiple_components( - mock_rpc_device: Mock, - monkeypatch: pytest.MonkeyPatch, - component: str, - expected: str, -) -> None: - """Test get RPC channel name when there is more components of the same type.""" - config = { - f"{component}:0": {"name": None}, - f"{component}:1": {"name": None}, - } - monkeypatch.setattr(mock_rpc_device, "config", config) - - assert ( - get_rpc_channel_name(mock_rpc_device, f"{component}:0") - == f"Test name {expected} 0" - ) - assert ( - get_rpc_channel_name(mock_rpc_device, f"{component}:1") - == f"Test name {expected} 1" - ) + assert get_rpc_channel_name(mock_rpc_device, "input:3") == "Test name input_3" async def test_get_rpc_input_triggers( @@ -310,19 +274,3 @@ def test_get_release_url( result = get_release_url(gen, model, beta) assert result is expected - - -@pytest.mark.parametrize( - ("host", "expected"), - [ - ("shelly_device.local", "shelly_device.local"), - ("192.168.178.12", "192.168.178.12"), - ( - "2001:0db8:85a3:0000:0000:8a2e:0370:7334", - "[2001:0db8:85a3:0000:0000:8a2e:0370:7334]", - ), - ], -) -def test_get_host(host: str, expected: str) -> None: - """Test get_host function.""" - assert get_host(host) == expected diff --git a/tests/components/shelly/test_valve.py b/tests/components/shelly/test_valve.py index b35ce98b664..58b55e4f2dd 100644 --- a/tests/components/shelly/test_valve.py +++ b/tests/components/shelly/test_valve.py @@ -5,8 +5,16 @@ from unittest.mock import Mock from aioshelly.const import MODEL_GAS import pytest -from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN, ValveState -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE +from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_CLOSE_VALVE, + SERVICE_OPEN_VALVE, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -29,7 +37,7 @@ async def test_block_device_gas_valve( assert entry assert entry.unique_id == "123456789ABC-valve_0-valve" - assert hass.states.get(entity_id).state == ValveState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED await hass.services.async_call( VALVE_DOMAIN, @@ -40,7 +48,7 @@ async def test_block_device_gas_valve( state = hass.states.get(entity_id) assert state - assert state.state == ValveState.OPENING + assert state.state == STATE_OPENING monkeypatch.setattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "valve", "opened") mock_block_device.mock_update() @@ -48,7 +56,7 @@ async def test_block_device_gas_valve( state = hass.states.get(entity_id) assert state - assert state.state == ValveState.OPEN + assert state.state == STATE_OPEN await hass.services.async_call( VALVE_DOMAIN, @@ -59,7 +67,7 @@ async def test_block_device_gas_valve( state = hass.states.get(entity_id) assert state - assert state.state == ValveState.CLOSING + assert state.state == STATE_CLOSING monkeypatch.setattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "valve", "closed") mock_block_device.mock_update() @@ -67,4 +75,4 @@ async def test_block_device_gas_valve( state = hass.states.get(entity_id) assert state - assert state.state == ValveState.CLOSED + assert state.state == STATE_CLOSED diff --git a/tests/components/shopping_list/test_init.py b/tests/components/shopping_list/test_init.py index 276602f794e..4e758764e3d 100644 --- a/tests/components/shopping_list/test_init.py +++ b/tests/components/shopping_list/test_init.py @@ -32,10 +32,8 @@ async def test_add_item(hass: HomeAssistant, sl_setup) -> None: """Test adding an item intent.""" response = await intent.async_handle( - hass, "test", "HassShoppingListAddItem", {"item": {"value": " beer "}} + hass, "test", "HassShoppingListAddItem", {"item": {"value": "beer"}} ) - assert len(hass.data[DOMAIN].items) == 1 - assert hass.data[DOMAIN].items[0]["name"] == "beer" # name was trimmed # Response text is now handled by default conversation agent assert response.response_type == intent.IntentResponseType.ACTION_DONE diff --git a/tests/components/shopping_list/test_todo.py b/tests/components/shopping_list/test_todo.py index c54a6abfd6f..173544d0be2 100644 --- a/tests/components/shopping_list/test_todo.py +++ b/tests/components/shopping_list/test_todo.py @@ -1,18 +1,11 @@ """Test shopping list todo platform.""" -from collections.abc import Callable, Coroutine +from collections.abc import Awaitable, Callable from typing import Any import pytest -from homeassistant.components.todo import ( - ATTR_ITEM, - ATTR_RENAME, - ATTR_STATUS, - DOMAIN as TODO_DOMAIN, - TodoServices, -) -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.components.todo import DOMAIN as TODO_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -20,12 +13,11 @@ from tests.typing import WebSocketGenerator TEST_ENTITY = "todo.shopping_list" -type WsGetItemsType = Callable[[], Coroutine[Any, Any, list[dict[str, str]]]] -type WsMoveItemType = Callable[[str, str | None], Coroutine[Any, Any, dict[str, Any]]] - @pytest.fixture -async def ws_get_items(hass_ws_client: WebSocketGenerator) -> WsGetItemsType: +async def ws_get_items( + hass_ws_client: WebSocketGenerator, +) -> Callable[[], Awaitable[dict[str, str]]]: """Fixture to fetch items from the todo websocket.""" async def get() -> list[dict[str, str]]: @@ -45,7 +37,9 @@ async def ws_get_items(hass_ws_client: WebSocketGenerator) -> WsGetItemsType: @pytest.fixture -async def ws_move_item(hass_ws_client: WebSocketGenerator) -> WsMoveItemType: +async def ws_move_item( + hass_ws_client: WebSocketGenerator, +) -> Callable[[str, str | None], Awaitable[None]]: """Fixture to move an item in the todo list.""" async def move(uid: str, previous_uid: str | None) -> dict[str, Any]: @@ -68,7 +62,7 @@ async def test_get_items( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, sl_setup: None, - ws_get_items: WsGetItemsType, + ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test creating a shopping list item with the WS API and verifying with To-do API.""" client = await hass_ws_client(hass) @@ -99,16 +93,16 @@ async def test_get_items( async def test_add_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: WsGetItemsType, + ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test adding shopping_list item and listing it.""" await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, + "add_item", { - ATTR_ITEM: "soda", + "item": "soda", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -126,14 +120,14 @@ async def test_add_item( async def test_remove_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: WsGetItemsType, + ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test removing a todo item.""" await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "soda"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "soda"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) items = await ws_get_items() @@ -148,11 +142,11 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, + "remove_item", { - ATTR_ITEM: [items[0]["uid"]], + "item": [items[0]["uid"]], }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -167,18 +161,18 @@ async def test_remove_item( async def test_bulk_remove( hass: HomeAssistant, sl_setup: None, - ws_get_items: WsGetItemsType, + ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test removing a todo item.""" for _i in range(5): await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, + "add_item", { - ATTR_ITEM: "soda", + "item": "soda", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -192,11 +186,11 @@ async def test_bulk_remove( await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, + "remove_item", { - ATTR_ITEM: uids, + "item": uids, }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -211,18 +205,18 @@ async def test_bulk_remove( async def test_update_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: WsGetItemsType, + ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test updating a todo item.""" # Create new item await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, + "add_item", { - ATTR_ITEM: "soda", + "item": "soda", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -240,12 +234,12 @@ async def test_update_item( # Mark item completed await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, + "update_item", { - ATTR_ITEM: "soda", - ATTR_STATUS: "completed", + "item": "soda", + "status": "completed", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -264,18 +258,18 @@ async def test_update_item( async def test_partial_update_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: WsGetItemsType, + ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test updating a todo item with partial information.""" # Create new item await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, + "add_item", { - ATTR_ITEM: "soda", + "item": "soda", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -293,12 +287,12 @@ async def test_partial_update_item( # Mark item completed without changing the summary await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, + "update_item", { - ATTR_ITEM: item["uid"], - ATTR_STATUS: "completed", + "item": item["uid"], + "status": "completed", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -316,12 +310,12 @@ async def test_partial_update_item( # Change the summary without changing the status await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, + "update_item", { - ATTR_ITEM: item["uid"], - ATTR_RENAME: "other summary", + "item": item["uid"], + "rename": "other summary", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -340,19 +334,19 @@ async def test_partial_update_item( async def test_update_invalid_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: WsGetItemsType, + ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test updating a todo item that does not exist.""" with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, + "update_item", { - ATTR_ITEM: "invalid-uid", - ATTR_RENAME: "Example task", + "item": "invalid-uid", + "rename": "Example task", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -386,8 +380,8 @@ async def test_update_invalid_item( async def test_move_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: WsGetItemsType, - ws_move_item: WsMoveItemType, + ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_move_item: Callable[[str, str | None], Awaitable[dict[str, Any]]], src_idx: int, dst_idx: int | None, expected_items: list[str], @@ -397,11 +391,11 @@ async def test_move_item( for i in range(1, 5): await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, + "add_item", { - ATTR_ITEM: f"item {i}", + "item": f"item {i}", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -428,16 +422,16 @@ async def test_move_item( async def test_move_invalid_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: WsGetItemsType, - ws_move_item: WsMoveItemType, + ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_move_item: Callable[[str, int | None], Awaitable[dict[str, Any]]], ) -> None: """Test moving an item that does not exist.""" await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "soda"}, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + "add_item", + {"item": "soda"}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -462,11 +456,11 @@ async def test_subscribe_item( # Create new item await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, + "add_item", { - ATTR_ITEM: "soda", + "item": "soda", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -497,12 +491,12 @@ async def test_subscribe_item( # Rename item item completed await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, + "update_item", { - ATTR_ITEM: "soda", - ATTR_RENAME: "milk", + "item": "soda", + "rename": "milk", }, - target={ATTR_ENTITY_ID: TEST_ENTITY}, + target={"entity_id": TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/sia/test_config_flow.py b/tests/components/sia/test_config_flow.py index b0d83855a25..95de53d7fbe 100644 --- a/tests/components/sia/test_config_flow.py +++ b/tests/components/sia/test_config_flow.py @@ -1,6 +1,5 @@ """Test the sia config flow.""" -from collections.abc import Generator from unittest.mock import patch import pytest @@ -17,7 +16,6 @@ from homeassistant.components.sia.const import ( CONF_ZONES, DOMAIN, ) -from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_PORT, CONF_PROTOCOL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -107,7 +105,7 @@ ADDITIONAL_OPTIONS = { @pytest.fixture -async def flow_at_user_step(hass: HomeAssistant) -> ConfigFlowResult: +async def flow_at_user_step(hass): """Return a initialized flow.""" return await hass.config_entries.flow.async_init( DOMAIN, @@ -116,9 +114,7 @@ async def flow_at_user_step(hass: HomeAssistant) -> ConfigFlowResult: @pytest.fixture -async def entry_with_basic_config( - hass: HomeAssistant, flow_at_user_step: ConfigFlowResult -) -> ConfigFlowResult: +async def entry_with_basic_config(hass, flow_at_user_step): """Return a entry with a basic config.""" with patch("homeassistant.components.sia.async_setup_entry", return_value=True): return await hass.config_entries.flow.async_configure( @@ -127,9 +123,7 @@ async def entry_with_basic_config( @pytest.fixture -async def flow_at_add_account_step( - hass: HomeAssistant, flow_at_user_step: ConfigFlowResult -) -> ConfigFlowResult: +async def flow_at_add_account_step(hass, flow_at_user_step): """Return a initialized flow at the additional account step.""" return await hass.config_entries.flow.async_configure( flow_at_user_step["flow_id"], BASIC_CONFIG_ADDITIONAL @@ -137,9 +131,7 @@ async def flow_at_add_account_step( @pytest.fixture -async def entry_with_additional_account_config( - hass: HomeAssistant, flow_at_add_account_step: ConfigFlowResult -) -> ConfigFlowResult: +async def entry_with_additional_account_config(hass, flow_at_add_account_step): """Return a entry with a two account config.""" with patch("homeassistant.components.sia.async_setup_entry", return_value=True): return await hass.config_entries.flow.async_configure( @@ -147,7 +139,7 @@ async def entry_with_additional_account_config( ) -async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: +async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry): """Add mock config to HASS.""" assert await async_setup_component(hass, DOMAIN, {}) config_entry.add_to_hass(hass) @@ -155,21 +147,23 @@ async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: await hass.async_block_till_done() -async def test_form_start_user(flow_at_user_step: ConfigFlowResult) -> None: +async def test_form_start_user(hass: HomeAssistant, flow_at_user_step) -> None: """Start the form and check if you get the right id and schema for the user step.""" assert flow_at_user_step["step_id"] == "user" assert flow_at_user_step["errors"] is None assert flow_at_user_step["data_schema"] == HUB_SCHEMA -async def test_form_start_account(flow_at_add_account_step: ConfigFlowResult) -> None: +async def test_form_start_account( + hass: HomeAssistant, flow_at_add_account_step +) -> None: """Start the form and check if you get the right id and schema for the additional account step.""" assert flow_at_add_account_step["step_id"] == "add_account" assert flow_at_add_account_step["errors"] is None assert flow_at_add_account_step["data_schema"] == ACCOUNT_SCHEMA -async def test_create(entry_with_basic_config: ConfigFlowResult) -> None: +async def test_create(hass: HomeAssistant, entry_with_basic_config) -> None: """Test we create a entry through the form.""" assert entry_with_basic_config["type"] is FlowResultType.CREATE_ENTRY assert ( @@ -181,7 +175,7 @@ async def test_create(entry_with_basic_config: ConfigFlowResult) -> None: async def test_create_additional_account( - entry_with_additional_account_config: ConfigFlowResult, + hass: HomeAssistant, entry_with_additional_account_config ) -> None: """Test we create a config with two accounts.""" assert entry_with_additional_account_config["type"] is FlowResultType.CREATE_ENTRY @@ -216,7 +210,7 @@ async def test_abort_form(hass: HomeAssistant) -> None: @pytest.fixture(autouse=True) -def mock_sia() -> Generator[None]: +def mock_sia(): """Mock SIAClient.""" with patch("homeassistant.components.sia.hub.SIAClient", autospec=True): yield diff --git a/tests/components/simplefin/__init__.py b/tests/components/simplefin/__init__.py deleted file mode 100644 index e4c7848ba9a..00000000000 --- a/tests/components/simplefin/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for SimpleFin.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/simplefin/conftest.py b/tests/components/simplefin/conftest.py deleted file mode 100644 index 328e16ccbd0..00000000000 --- a/tests/components/simplefin/conftest.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Test fixtures for SimpleFIN.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from simplefin4py import FinancialData -from simplefin4py.exceptions import SimpleFinInvalidClaimTokenError - -from homeassistant.components.simplefin import CONF_ACCESS_URL -from homeassistant.components.simplefin.const import DOMAIN - -from tests.common import MockConfigEntry, load_fixture - -MOCK_ACCESS_URL = "https://i:am@yomama.house.com" - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Mock setting up a config entry.""" - with patch( - "homeassistant.components.simplefin.async_setup_entry", return_value=True - ) as mock_setup: - yield mock_setup - - -@pytest.fixture -async def mock_config_entry() -> MockConfigEntry: - """Fixture for MockConfigEntry.""" - return MockConfigEntry( - domain=DOMAIN, - data={CONF_ACCESS_URL: MOCK_ACCESS_URL}, - version=1, - ) - - -@pytest.fixture -def mock_claim_setup_token() -> str: - """Fixture to mock the claim_setup_token method of SimpleFin.""" - with patch( - "homeassistant.components.simplefin.config_flow.SimpleFin.claim_setup_token", - ) as mock_claim_setup_token: - mock_claim_setup_token.return_value = "https://i:am@yomama.comma" - yield - - -@pytest.fixture -def mock_decode_claim_token_invalid_then_good() -> str: - """Fixture to mock the decode_claim_token method of SimpleFin.""" - return_values = [SimpleFinInvalidClaimTokenError, "valid_return_value"] - with patch( - "homeassistant.components.simplefin.config_flow.SimpleFin.decode_claim_token", - new_callable=lambda: MagicMock(side_effect=return_values), - ): - yield - - -@pytest.fixture -def mock_simplefin_client() -> Generator[AsyncMock]: - """Mock a SimpleFin client.""" - - with ( - patch( - "homeassistant.components.simplefin.SimpleFin", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.simplefin.config_flow.SimpleFin", - new=mock_client, - ), - ): - mock_client.claim_setup_token.return_value = MOCK_ACCESS_URL - client = mock_client.return_value - - fixture_data = load_fixture("fin_data.json", DOMAIN) - fin_data = FinancialData.from_json(fixture_data) - - assert fin_data.accounts != [] - client.fetch_data.return_value = fin_data - - client.access_url = MOCK_ACCESS_URL - - yield mock_client diff --git a/tests/components/simplefin/fixtures/fin_data.json b/tests/components/simplefin/fixtures/fin_data.json deleted file mode 100644 index bd35945c12b..00000000000 --- a/tests/components/simplefin/fixtures/fin_data.json +++ /dev/null @@ -1,173 +0,0 @@ -{ - "errors": [ - "Connection to Investments may need attention", - "Connection to The Bank of Go may need attention" - ], - "accounts": [ - { - "org": { - "domain": "www.newwealthfront.com", - "name": "The Bank of Go", - "sfin-url": "https://beta-bridge.simplefin.org/simplefin", - "url": "https://www.newwealthfront.com" - }, - "id": "ACT-1a2b3c4d-5e6f-7g8h-9i0j", - "name": "The Bank", - "currency": "USD", - "balance": "7777.77", - "available-balance": "7777.77", - "balance-date": 1705413843, - "transactions": [ - { - "id": "12394832938403", - "posted": 793090572, - "amount": "-1234.56", - "description": "Enchanted Bait Shop", - "payee": "Uncle Frank", - "memo": "Some memo", - "transacted_at": 793080572 - } - ], - "extra": { - "account-open-date": 978360153 - }, - "holdings": [] - }, - { - "org": { - "domain": "www.newfidelity.com", - "name": "Investments", - "sfin-url": "https://beta-bridge.simplefin.org/simplefin", - "url": "https://www.newfidelity.com" - }, - "id": "ACT-1k2l3m4n-5o6p-7q8r-9s0t", - "name": "My Checking", - "currency": "USD", - "balance": "12345.67", - "available-balance": "5432.10", - "balance-date": 1705413319, - "transactions": [], - "holdings": [] - }, - { - "org": { - "domain": "www.newhfcu.org", - "name": "The Bank of Go", - "sfin-url": "https://beta-bridge.simplefin.org/simplefin", - "url": "https://www.newhfcu.org/" - }, - "id": "ACT-2a3b4c5d-6e7f-8g9h-0i1j", - "name": "PRIME SAVINGS", - "currency": "EUR", - "balance": "9876.54", - "available-balance": "8765.43", - "balance-date": 1705428861, - "transactions": [], - "holdings": [] - }, - { - "org": { - "domain": "www.randombank2.com", - "name": "Random Bank", - "sfin-url": "https://beta-bridge.simplefin.org/simplefin", - "url": "https://www.randombank2.com/" - }, - "id": "ACT-3a4b5c6d-7e8f-9g0h-1i2j", - "name": "Costco Anywhere Visa® Card", - "currency": "USD", - "balance": "-532.69", - "available-balance": "4321.98", - "balance-date": 1705429002, - "transactions": [], - "holdings": [] - }, - { - "org": { - "domain": "www.newfidelity.com", - "name": "Investments", - "sfin-url": "https://beta-bridge.simplefin.org/simplefin", - "url": "https://www.newfidelity.com" - }, - "id": "ACT-4k5l6m7n-8o9p-1q2r-3s4t", - "name": "Dr Evil", - "currency": "USD", - "balance": "1000000.00", - "available-balance": "13579.24", - "balance-date": 1705413319, - "transactions": [], - "holdings": [ - { - "id": "HOL-62eb5bb6-4aed-4fe1-bdbe-f28e127e359b", - "created": 1705413320, - "currency": "", - "cost_basis": "10000.00", - "description": "Fantastic FID GROWTH CO K6", - "market_value": "15000.00", - "purchase_price": "0.00", - "shares": "200.00", - "symbol": "FGKFX" - } - ] - }, - { - "org": { - "domain": "www.newfidelity.com", - "name": "Investments", - "sfin-url": "https://beta-bridge.simplefin.org/simplefin", - "url": "https://www.newfidelity.com" - }, - "id": "ACT-5k6l7m8n-9o0p-1q2r-3s4t", - "name": "NerdCorp Series B", - "currency": "EUR", - "balance": "13579.24", - "available-balance": "9876.54", - "balance-date": 1705413319, - "transactions": [], - "holdings": [ - { - "id": "HOL-08f775cd-eedf-4ee5-9f53-241c8efa5bf3", - "created": 1705413321, - "currency": "", - "cost_basis": "7500.00", - "description": "Mythical FID GROWTH CO K6", - "market_value": "9876.54", - "purchase_price": "0.00", - "shares": "150.00", - "symbol": "FGKFX" - } - ] - }, - { - "org": { - "domain": "www.randombank2.com", - "name": "Mythical RandomSavings", - "sfin-url": "https://beta-bridge.simplefin.org/simplefin", - "url": "https://www.randombank2.com/" - }, - "id": "ACT-6a7b8c9d-0e1f-2g3h-4i5j", - "name": "Unicorn Pot", - "currency": "USD", - "balance": "10000.00", - "available-balance": "7500.00", - "balance-date": 1705429002, - "transactions": [], - "holdings": [] - }, - { - "org": { - "domain": "www.randombank2.com", - "name": "Mythical RandomSavings", - "sfin-url": "https://beta-bridge.simplefin.org/simplefin", - "url": "https://www.randombank2.com/" - }, - "id": "ACT-7a8b9c0d-1e2f-3g4h-5i6j", - "name": "Castle Mortgage", - "currency": "USD", - "balance": "7500.00", - "available-balance": "5000.00", - "balance-date": 1705429002, - "transactions": [], - "holdings": [] - } - ] -} diff --git a/tests/components/simplefin/snapshots/test_binary_sensor.ambr b/tests/components/simplefin/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 44fe2a10b78..00000000000 --- a/tests/components/simplefin/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,385 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[binary_sensor.investments_dr_evil_possible_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.investments_dr_evil_possible_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Possible error', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.investments_dr_evil_possible_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Investments Dr Evil Possible error', - }), - 'context': , - 'entity_id': 'binary_sensor.investments_dr_evil_possible_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_entities[binary_sensor.investments_my_checking_possible_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.investments_my_checking_possible_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Possible error', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.investments_my_checking_possible_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Investments My Checking Possible error', - }), - 'context': , - 'entity_id': 'binary_sensor.investments_my_checking_possible_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_possible_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_possible_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Possible error', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_possible_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Investments NerdCorp Series B Possible error', - }), - 'context': , - 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_possible_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_possible_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_possible_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Possible error', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_possible_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Mythical RandomSavings Castle Mortgage Possible error', - }), - 'context': , - 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_possible_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_possible_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_possible_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Possible error', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_possible_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Mythical RandomSavings Unicorn Pot Possible error', - }), - 'context': , - 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_possible_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Possible error', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Possible error', - }), - 'context': , - 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_possible_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_possible_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Possible error', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_possible_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'The Bank of Go PRIME SAVINGS Possible error', - }), - 'context': , - 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_possible_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_possible_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_possible_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Possible error', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_possible_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'The Bank of Go The Bank Possible error', - }), - 'context': , - 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_possible_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/simplefin/snapshots/test_sensor.ambr b/tests/components/simplefin/snapshots/test_sensor.ambr deleted file mode 100644 index c7dced9300e..00000000000 --- a/tests/components/simplefin/snapshots/test_sensor.ambr +++ /dev/null @@ -1,809 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[sensor.investments_dr_evil_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.investments_dr_evil_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': , - 'original_name': 'Balance', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_balance', - 'unit_of_measurement': 'USD', - }) -# --- -# name: test_all_entities[sensor.investments_dr_evil_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'monetary', - 'friendly_name': 'Investments Dr Evil Balance', - 'icon': , - 'state_class': , - 'unit_of_measurement': 'USD', - }), - 'context': , - 'entity_id': 'sensor.investments_dr_evil_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1000000.00', - }) -# --- -# name: test_all_entities[sensor.investments_dr_evil_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.investments_dr_evil_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.investments_dr_evil_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'timestamp', - 'friendly_name': 'Investments Dr Evil Data age', - }), - 'context': , - 'entity_id': 'sensor.investments_dr_evil_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-01-16T13:55:19+00:00', - }) -# --- -# name: test_all_entities[sensor.investments_my_checking_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.investments_my_checking_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': , - 'original_name': 'Balance', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_balance', - 'unit_of_measurement': 'USD', - }) -# --- -# name: test_all_entities[sensor.investments_my_checking_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'monetary', - 'friendly_name': 'Investments My Checking Balance', - 'icon': , - 'state_class': , - 'unit_of_measurement': 'USD', - }), - 'context': , - 'entity_id': 'sensor.investments_my_checking_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '12345.67', - }) -# --- -# name: test_all_entities[sensor.investments_my_checking_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.investments_my_checking_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.investments_my_checking_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'timestamp', - 'friendly_name': 'Investments My Checking Data age', - }), - 'context': , - 'entity_id': 'sensor.investments_my_checking_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-01-16T13:55:19+00:00', - }) -# --- -# name: test_all_entities[sensor.investments_nerdcorp_series_b_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.investments_nerdcorp_series_b_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': , - 'original_name': 'Balance', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_balance', - 'unit_of_measurement': 'EUR', - }) -# --- -# name: test_all_entities[sensor.investments_nerdcorp_series_b_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'monetary', - 'friendly_name': 'Investments NerdCorp Series B Balance', - 'icon': , - 'state_class': , - 'unit_of_measurement': 'EUR', - }), - 'context': , - 'entity_id': 'sensor.investments_nerdcorp_series_b_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '13579.24', - }) -# --- -# name: test_all_entities[sensor.investments_nerdcorp_series_b_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.investments_nerdcorp_series_b_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.investments_nerdcorp_series_b_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'timestamp', - 'friendly_name': 'Investments NerdCorp Series B Data age', - }), - 'context': , - 'entity_id': 'sensor.investments_nerdcorp_series_b_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-01-16T13:55:19+00:00', - }) -# --- -# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': , - 'original_name': 'Balance', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_balance', - 'unit_of_measurement': 'USD', - }) -# --- -# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'monetary', - 'friendly_name': 'Mythical RandomSavings Castle Mortgage Balance', - 'icon': , - 'state_class': , - 'unit_of_measurement': 'USD', - }), - 'context': , - 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '7500.00', - }) -# --- -# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'timestamp', - 'friendly_name': 'Mythical RandomSavings Castle Mortgage Data age', - }), - 'context': , - 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-01-16T18:16:42+00:00', - }) -# --- -# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': , - 'original_name': 'Balance', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_balance', - 'unit_of_measurement': 'USD', - }) -# --- -# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'monetary', - 'friendly_name': 'Mythical RandomSavings Unicorn Pot Balance', - 'icon': , - 'state_class': , - 'unit_of_measurement': 'USD', - }), - 'context': , - 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10000.00', - }) -# --- -# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'timestamp', - 'friendly_name': 'Mythical RandomSavings Unicorn Pot Data age', - }), - 'context': , - 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-01-16T18:16:42+00:00', - }) -# --- -# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': , - 'original_name': 'Balance', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_balance', - 'unit_of_measurement': 'USD', - }) -# --- -# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'monetary', - 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Balance', - 'icon': , - 'state_class': , - 'unit_of_measurement': 'USD', - }), - 'context': , - 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '-532.69', - }) -# --- -# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'timestamp', - 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Data age', - }), - 'context': , - 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-01-16T18:16:42+00:00', - }) -# --- -# name: test_all_entities[sensor.the_bank_of_go_prime_savings_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.the_bank_of_go_prime_savings_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': , - 'original_name': 'Balance', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_balance', - 'unit_of_measurement': 'EUR', - }) -# --- -# name: test_all_entities[sensor.the_bank_of_go_prime_savings_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'monetary', - 'friendly_name': 'The Bank of Go PRIME SAVINGS Balance', - 'icon': , - 'state_class': , - 'unit_of_measurement': 'EUR', - }), - 'context': , - 'entity_id': 'sensor.the_bank_of_go_prime_savings_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '9876.54', - }) -# --- -# name: test_all_entities[sensor.the_bank_of_go_prime_savings_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.the_bank_of_go_prime_savings_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.the_bank_of_go_prime_savings_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'timestamp', - 'friendly_name': 'The Bank of Go PRIME SAVINGS Data age', - }), - 'context': , - 'entity_id': 'sensor.the_bank_of_go_prime_savings_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-01-16T18:14:21+00:00', - }) -# --- -# name: test_all_entities[sensor.the_bank_of_go_the_bank_balance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.the_bank_of_go_the_bank_balance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': , - 'original_name': 'Balance', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'balance', - 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_balance', - 'unit_of_measurement': 'USD', - }) -# --- -# name: test_all_entities[sensor.the_bank_of_go_the_bank_balance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'monetary', - 'friendly_name': 'The Bank of Go The Bank Balance', - 'icon': , - 'state_class': , - 'unit_of_measurement': 'USD', - }), - 'context': , - 'entity_id': 'sensor.the_bank_of_go_the_bank_balance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '7777.77', - }) -# --- -# name: test_all_entities[sensor.the_bank_of_go_the_bank_data_age-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.the_bank_of_go_the_bank_data_age', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Data age', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'age', - 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_age', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.the_bank_of_go_the_bank_data_age-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'timestamp', - 'friendly_name': 'The Bank of Go The Bank Data age', - }), - 'context': , - 'entity_id': 'sensor.the_bank_of_go_the_bank_data_age', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-01-16T14:04:03+00:00', - }) -# --- diff --git a/tests/components/simplefin/test_binary_sensor.py b/tests/components/simplefin/test_binary_sensor.py deleted file mode 100644 index 40c6882153d..00000000000 --- a/tests/components/simplefin/test_binary_sensor.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Test SimpleFin Sensor with Snapshot data.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_simplefin_client: AsyncMock, -) -> None: - """Test all entities.""" - with patch( - "homeassistant.components.simplefin.PLATFORMS", [Platform.BINARY_SENSOR] - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/simplefin/test_config_flow.py b/tests/components/simplefin/test_config_flow.py deleted file mode 100644 index c83f2aed62e..00000000000 --- a/tests/components/simplefin/test_config_flow.py +++ /dev/null @@ -1,164 +0,0 @@ -"""Test config flow.""" - -from unittest.mock import AsyncMock - -import pytest -from simplefin4py.exceptions import ( - SimpleFinAuthError, - SimpleFinClaimError, - SimpleFinInvalidAccountURLError, - SimpleFinInvalidClaimTokenError, - SimpleFinPaymentRequiredError, -) - -from homeassistant.components.simplefin import CONF_ACCESS_URL -from homeassistant.components.simplefin.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import entity_registry as er - -from .conftest import MOCK_ACCESS_URL - -from tests.common import MockConfigEntry - - -async def test_successful_claim( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_simplefin_client: AsyncMock, -) -> None: - """Test successful token claim in config flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert not result["errors"] - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_ACCESS_URL: "donJulio"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "SimpleFIN" - assert result["data"] == {CONF_ACCESS_URL: MOCK_ACCESS_URL} - - -async def test_already_setup( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_simplefin_client: AsyncMock, -) -> None: - """Test all entities.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_ACCESS_URL: MOCK_ACCESS_URL}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_access_url( - hass: HomeAssistant, - mock_simplefin_client: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test standard config flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_ACCESS_URL: "http://user:password@string"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_ACCESS_URL] == "http://user:password@string" - assert result["title"] == "SimpleFIN" - - -@pytest.mark.parametrize( - ("side_effect", "error_key"), - [ - (SimpleFinInvalidAccountURLError, "url_error"), - (SimpleFinPaymentRequiredError, "payment_required"), - (SimpleFinAuthError, "invalid_auth"), - ], -) -async def test_access_url_errors( - hass: HomeAssistant, - mock_simplefin_client: AsyncMock, - side_effect: Exception, - error_key: str, -) -> None: - """Test the various errors we can get in access_url mode.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - mock_simplefin_client.claim_setup_token.side_effect = side_effect - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_ACCESS_URL: "donJulio"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error_key} - - mock_simplefin_client.claim_setup_token.side_effect = None - - # Pass the entry creation - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_ACCESS_URL: "http://user:password@string"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {CONF_ACCESS_URL: "http://user:password@string"} - assert result["title"] == "SimpleFIN" - - -@pytest.mark.parametrize( - ("side_effect", "error_key"), - [ - (SimpleFinInvalidClaimTokenError, "invalid_claim_token"), - (SimpleFinClaimError, "claim_error"), - ], -) -async def test_claim_token_errors( - hass: HomeAssistant, - mock_simplefin_client: AsyncMock, - side_effect: Exception, - error_key: str, -) -> None: - """Test config flow with various token claim errors.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - mock_simplefin_client.claim_setup_token.side_effect = side_effect - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_ACCESS_URL: "donJulio"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error_key} - - mock_simplefin_client.claim_setup_token.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_ACCESS_URL: "donJulio"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {CONF_ACCESS_URL: "https://i:am@yomama.house.com"} - assert result["title"] == "SimpleFIN" diff --git a/tests/components/simplefin/test_sensor.py b/tests/components/simplefin/test_sensor.py deleted file mode 100644 index 495f249d4e1..00000000000 --- a/tests/components/simplefin/test_sensor.py +++ /dev/null @@ -1,94 +0,0 @@ -"""Test SimpleFin Sensor with Snapshot data.""" - -from datetime import timedelta -from unittest.mock import AsyncMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from simplefin4py.exceptions import SimpleFinAuthError, SimpleFinPaymentRequiredError -from syrupy import SnapshotAssertion - -from homeassistant.const import STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_simplefin_client: AsyncMock, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.simplefin.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize( - ("side_effect"), - [ - (SimpleFinAuthError), - (SimpleFinPaymentRequiredError), - ], -) -async def test_update_errors( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_simplefin_client: AsyncMock, - freezer: FrozenDateTimeFactory, - side_effect: Exception, -) -> None: - """Test connection error.""" - await setup_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.the_bank_of_go_the_bank_balance").state == "7777.77" - assert hass.states.get("sensor.investments_my_checking_balance").state == "12345.67" - assert ( - hass.states.get("sensor.the_bank_of_go_prime_savings_balance").state - == "9876.54" - ) - assert ( - hass.states.get("sensor.random_bank_costco_anywhere_visa_r_card_balance").state - == "-532.69" - ) - assert hass.states.get("sensor.investments_dr_evil_balance").state == "1000000.00" - assert ( - hass.states.get("sensor.investments_nerdcorp_series_b_balance").state - == "13579.24" - ) - assert ( - hass.states.get("sensor.mythical_randomsavings_unicorn_pot_balance").state - == "10000.00" - ) - assert ( - hass.states.get("sensor.mythical_randomsavings_castle_mortgage_balance").state - == "7500.00" - ) - - mock_simplefin_client.return_value.fetch_data.side_effect = side_effect - freezer.tick(timedelta(days=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - sensors = [ - "sensor.the_bank_of_go_the_bank_balance", - "sensor.investments_my_checking_balance", - "sensor.the_bank_of_go_prime_savings_balance", - "sensor.random_bank_costco_anywhere_visa_r_card_balance", - "sensor.investments_dr_evil_balance", - "sensor.investments_nerdcorp_series_b_balance", - "sensor.mythical_randomsavings_unicorn_pot_balance", - "sensor.mythical_randomsavings_castle_mortgage_balance", - ] - - for sensor in sensors: - assert hass.states.get(sensor).state == STATE_UNAVAILABLE diff --git a/tests/components/simplisafe/conftest.py b/tests/components/simplisafe/conftest.py index 12ed845c7d2..aaf853863e5 100644 --- a/tests/components/simplisafe/conftest.py +++ b/tests/components/simplisafe/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for SimpliSafe.""" -from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, Mock, patch import pytest from simplipy.system.v3 import SystemV3 +from typing_extensions import AsyncGenerator from homeassistant.components.simplisafe.const import DOMAIN from homeassistant.const import CONF_CODE, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME diff --git a/tests/components/simplisafe/test_config_flow.py b/tests/components/simplisafe/test_config_flow.py index 9270fc43c30..dde7e37b891 100644 --- a/tests/components/simplisafe/test_config_flow.py +++ b/tests/components/simplisafe/test_config_flow.py @@ -8,13 +8,11 @@ from simplipy.errors import InvalidCredentialsError, SimplipyError from homeassistant.components.simplisafe import DOMAIN from homeassistant.components.simplisafe.config_flow import CONF_AUTH_CODE -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_CODE, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry - VALID_AUTH_CODE = "code12345123451234512345123451234512345123451" @@ -92,11 +90,13 @@ async def test_options_flow(config_entry, hass: HomeAssistant) -> None: assert config_entry.options == {CONF_CODE: "4321"} -async def test_step_reauth( - config_entry: MockConfigEntry, hass: HomeAssistant, setup_simplisafe -) -> None: +async def test_step_reauth(config_entry, hass: HomeAssistant, setup_simplisafe) -> None: """Test the re-auth step.""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH}, + data={CONF_USERNAME: "12345", CONF_TOKEN: "token123"}, + ) assert result["step_id"] == "user" with ( @@ -118,10 +118,14 @@ async def test_step_reauth( @pytest.mark.parametrize("unique_id", ["some_other_id"]) async def test_step_reauth_wrong_account( - config_entry: MockConfigEntry, hass: HomeAssistant, setup_simplisafe + config_entry, hass: HomeAssistant, setup_simplisafe ) -> None: """Test the re-auth step where the wrong account is used during login.""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH}, + data={CONF_USERNAME: "12345", CONF_TOKEN: "token123"}, + ) assert result["step_id"] == "user" with ( diff --git a/tests/components/simplisafe/test_diagnostics.py b/tests/components/simplisafe/test_diagnostics.py index d5479f00b06..6948f98b159 100644 --- a/tests/components/simplisafe/test_diagnostics.py +++ b/tests/components/simplisafe/test_diagnostics.py @@ -3,7 +3,6 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant -from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -29,9 +28,6 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, - "created_at": ANY, - "modified_at": ANY, - "discovery_keys": {}, }, "subscription_data": { "12345": { diff --git a/tests/components/simulated/test_sensor.py b/tests/components/simulated/test_sensor.py index b167147367a..d32eca8c66e 100644 --- a/tests/components/simulated/test_sensor.py +++ b/tests/components/simulated/test_sensor.py @@ -16,17 +16,13 @@ from homeassistant.components.simulated.sensor import ( DEFAULT_PHASE, DEFAULT_RELATIVE_TO_EPOCH, DEFAULT_SEED, - DOMAIN, ) from homeassistant.const import CONF_FRIENDLY_NAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component -async def test_simulated_sensor_default_config( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: +async def test_simulated_sensor_default_config(hass: HomeAssistant) -> None: """Test default config.""" config = {"sensor": {"platform": "simulated"}} assert await async_setup_component(hass, "sensor", config) @@ -44,7 +40,3 @@ async def test_simulated_sensor_default_config( assert state.attributes.get(CONF_FWHM) == DEFAULT_FWHM assert state.attributes.get(CONF_SEED) == DEFAULT_SEED assert state.attributes.get(CONF_RELATIVE_TO_EPOCH) == DEFAULT_RELATIVE_TO_EPOCH - - issue = issue_registry.async_get_issue(DOMAIN, DOMAIN) - assert issue.issue_id == DOMAIN - assert issue.translation_key == "simulated_deprecation" diff --git a/tests/components/siren/test_init.py b/tests/components/siren/test_init.py index 475b32540b4..168300d0abe 100644 --- a/tests/components/siren/test_init.py +++ b/tests/components/siren/test_init.py @@ -27,7 +27,7 @@ class MockSirenEntity(SirenEntity): supported_features=0, available_tones_as_attr=None, available_tones_in_desc=None, - ) -> None: + ): """Initialize mock siren entity.""" self._attr_supported_features = supported_features if available_tones_as_attr is not None: diff --git a/tests/components/sky_remote/__init__.py b/tests/components/sky_remote/__init__.py deleted file mode 100644 index 83d68330d5b..00000000000 --- a/tests/components/sky_remote/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the Sky Remote component.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_mock_entry(hass: HomeAssistant, entry: MockConfigEntry): - """Initialize a mock config entry.""" - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - - await hass.async_block_till_done() diff --git a/tests/components/sky_remote/conftest.py b/tests/components/sky_remote/conftest.py deleted file mode 100644 index d6c453d81f7..00000000000 --- a/tests/components/sky_remote/conftest.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Test mocks and fixtures.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest - -from homeassistant.components.sky_remote.const import DEFAULT_PORT, DOMAIN -from homeassistant.const import CONF_HOST, CONF_PORT - -from tests.common import MockConfigEntry - -SAMPLE_CONFIG = {CONF_HOST: "example.com", CONF_PORT: DEFAULT_PORT} - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry(domain=DOMAIN, data=SAMPLE_CONFIG) - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Stub out setup function.""" - with patch( - "homeassistant.components.sky_remote.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_remote_control(request: pytest.FixtureRequest) -> Generator[MagicMock]: - """Mock skyboxremote library.""" - with ( - patch( - "homeassistant.components.sky_remote.RemoteControl" - ) as mock_remote_control, - patch( - "homeassistant.components.sky_remote.config_flow.RemoteControl", - mock_remote_control, - ), - ): - mock_remote_control._instance_mock = MagicMock(host="example.com") - mock_remote_control._instance_mock.check_connectable = AsyncMock(True) - mock_remote_control.return_value = mock_remote_control._instance_mock - yield mock_remote_control diff --git a/tests/components/sky_remote/test_config_flow.py b/tests/components/sky_remote/test_config_flow.py deleted file mode 100644 index aaeda20788c..00000000000 --- a/tests/components/sky_remote/test_config_flow.py +++ /dev/null @@ -1,125 +0,0 @@ -"""Test the Sky Remote config flow.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -import pytest -from skyboxremote import LEGACY_PORT, SkyBoxConnectionError - -from homeassistant.components.sky_remote.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .conftest import SAMPLE_CONFIG - - -async def test_user_flow( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_remote_control -) -> None: - """Test we can setup an entry.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == SAMPLE_CONFIG - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_device_exists_abort( - hass: HomeAssistant, mock_config_entry, mock_remote_control -) -> None: - """Test we abort flow if device already configured.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_HOST: mock_config_entry.data[CONF_HOST]}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize("mock_remote_control", [LEGACY_PORT], indirect=True) -async def test_user_flow_legacy_device( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_remote_control, -) -> None: - """Test we can setup an entry with a legacy port.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - async def mock_check_connectable(): - if mock_remote_control.call_args[0][1] == LEGACY_PORT: - return True - raise SkyBoxConnectionError("Wrong port") - - mock_remote_control._instance_mock.check_connectable = mock_check_connectable - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {**SAMPLE_CONFIG, CONF_PORT: LEGACY_PORT} - - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize("mock_remote_control", [6], indirect=True) -async def test_user_flow_unconnectable( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_remote_control, -) -> None: - """Test we can setup an entry.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - mock_remote_control._instance_mock.check_connectable = AsyncMock( - side_effect=SkyBoxConnectionError("Example") - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - assert len(mock_setup_entry.mock_calls) == 0 - - mock_remote_control._instance_mock.check_connectable = AsyncMock(True) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == SAMPLE_CONFIG - - assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/sky_remote/test_init.py b/tests/components/sky_remote/test_init.py deleted file mode 100644 index fe316baa6bf..00000000000 --- a/tests/components/sky_remote/test_init.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Tests for the Sky Remote component.""" - -from unittest.mock import AsyncMock - -from skyboxremote import SkyBoxConnectionError - -from homeassistant.components.sky_remote.const import DEFAULT_PORT, DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_mock_entry - -from tests.common import MockConfigEntry - - -async def test_setup_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_remote_control, - device_registry: dr.DeviceRegistry, -) -> None: - """Test successful setup of entry.""" - await setup_mock_entry(hass, mock_config_entry) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - mock_remote_control.assert_called_once_with("example.com", DEFAULT_PORT) - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.entry_id)} - ) - assert device_entry is not None - assert device_entry.name == "example.com" - - -async def test_setup_unconnectable_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_remote_control, -) -> None: - """Test unsuccessful setup of entry.""" - mock_remote_control._instance_mock.check_connectable = AsyncMock( - side_effect=SkyBoxConnectionError() - ) - - await setup_mock_entry(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_unload_entry( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_remote_control -) -> None: - """Test unload an entry.""" - await setup_mock_entry(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/sky_remote/test_remote.py b/tests/components/sky_remote/test_remote.py deleted file mode 100644 index 301375bc039..00000000000 --- a/tests/components/sky_remote/test_remote.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Test sky_remote remote.""" - -import pytest - -from homeassistant.components.remote import ( - ATTR_COMMAND, - DOMAIN as REMOTE_DOMAIN, - SERVICE_SEND_COMMAND, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError - -from . import setup_mock_entry - -ENTITY_ID = "remote.example_com" - - -async def test_send_command( - hass: HomeAssistant, mock_config_entry, mock_remote_control -) -> None: - """Test "send_command" method.""" - await setup_mock_entry(hass, mock_config_entry) - await hass.services.async_call( - REMOTE_DOMAIN, - SERVICE_SEND_COMMAND, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_COMMAND: ["sky"]}, - blocking=True, - ) - mock_remote_control._instance_mock.send_keys.assert_called_once_with(["sky"]) - - -async def test_send_invalid_command( - hass: HomeAssistant, mock_config_entry, mock_remote_control -) -> None: - """Test "send_command" method.""" - await setup_mock_entry(hass, mock_config_entry) - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - REMOTE_DOMAIN, - SERVICE_SEND_COMMAND, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_COMMAND: ["apple"]}, - blocking=True, - ) - mock_remote_control._instance_mock.send_keys.assert_not_called() diff --git a/tests/components/skybell/test_config_flow.py b/tests/components/skybell/test_config_flow.py index f415fef077e..cb62f808efc 100644 --- a/tests/components/skybell/test_config_flow.py +++ b/tests/components/skybell/test_config_flow.py @@ -5,9 +5,10 @@ from unittest.mock import patch from aioskybell import exceptions import pytest +from homeassistant import config_entries from homeassistant.components.skybell.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_PASSWORD +from homeassistant.const import CONF_PASSWORD, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -103,7 +104,15 @@ async def test_step_reauth(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -121,7 +130,15 @@ async def test_step_reauth_failed(hass: HomeAssistant, skybell_mock) -> None: entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/sleepiq/conftest.py b/tests/components/sleepiq/conftest.py index a9456bd3cc6..fd07cc414e7 100644 --- a/tests/components/sleepiq/conftest.py +++ b/tests/components/sleepiq/conftest.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, create_autospec, patch from asyncsleepiq import ( @@ -18,6 +17,7 @@ from asyncsleepiq import ( SleepIQSleeper, ) import pytest +from typing_extensions import Generator from homeassistant.components.sleepiq import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/sleepiq/test_binary_sensor.py b/tests/components/sleepiq/test_binary_sensor.py index 689834aba35..65654de74ac 100644 --- a/tests/components/sleepiq/test_binary_sensor.py +++ b/tests/components/sleepiq/test_binary_sensor.py @@ -1,9 +1,6 @@ """The tests for SleepIQ binary sensor platform.""" -from homeassistant.components.binary_sensor import ( - DOMAIN as BINARY_SENSOR_DOMAIN, - BinarySensorDeviceClass, -) +from homeassistant.components.binary_sensor import DOMAIN, BinarySensorDeviceClass from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, @@ -31,7 +28,7 @@ async def test_binary_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ binary sensors.""" - await setup_platform(hass, BINARY_SENSOR_DOMAIN) + await setup_platform(hass, DOMAIN) state = hass.states.get( f"binary_sensor.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_is_in_bed" diff --git a/tests/components/sleepiq/test_button.py b/tests/components/sleepiq/test_button.py index e1c4203c937..33ad4d72b46 100644 --- a/tests/components/sleepiq/test_button.py +++ b/tests/components/sleepiq/test_button.py @@ -1,6 +1,6 @@ """The tests for SleepIQ binary sensor platform.""" -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN +from homeassistant.components.button import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -12,7 +12,7 @@ async def test_button_calibrate( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ calibrate button.""" - await setup_platform(hass, BUTTON_DOMAIN) + await setup_platform(hass, DOMAIN) state = hass.states.get(f"button.sleepnumber_{BED_NAME_LOWER}_calibrate") assert ( @@ -24,7 +24,7 @@ async def test_button_calibrate( assert entity.unique_id == f"{BED_ID}-calibrate" await hass.services.async_call( - BUTTON_DOMAIN, + DOMAIN, "press", {ATTR_ENTITY_ID: f"button.sleepnumber_{BED_NAME_LOWER}_calibrate"}, blocking=True, @@ -38,7 +38,7 @@ async def test_button_stop_pump( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ stop pump button.""" - await setup_platform(hass, BUTTON_DOMAIN) + await setup_platform(hass, DOMAIN) state = hass.states.get(f"button.sleepnumber_{BED_NAME_LOWER}_stop_pump") assert ( @@ -50,7 +50,7 @@ async def test_button_stop_pump( assert entity.unique_id == f"{BED_ID}-stop-pump" await hass.services.async_call( - BUTTON_DOMAIN, + DOMAIN, "press", {ATTR_ENTITY_ID: f"button.sleepnumber_{BED_NAME_LOWER}_stop_pump"}, blocking=True, diff --git a/tests/components/sleepiq/test_config_flow.py b/tests/components/sleepiq/test_config_flow.py index 26007d42e7d..af08f5aa9fe 100644 --- a/tests/components/sleepiq/test_config_flow.py +++ b/tests/components/sleepiq/test_config_flow.py @@ -101,7 +101,19 @@ async def test_reauth_password(hass: HomeAssistant) -> None: # set up initially entry = await setup_platform(hass) - result = await entry.start_reauth_flow(hass) + with patch( + "homeassistant.components.sleepiq.config_flow.AsyncSleepIQ.login", + side_effect=SleepIQLoginException, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) with patch( "homeassistant.components.sleepiq.config_flow.AsyncSleepIQ.login", diff --git a/tests/components/sleepiq/test_light.py b/tests/components/sleepiq/test_light.py index d1284dc3e41..9564bca7a99 100644 --- a/tests/components/sleepiq/test_light.py +++ b/tests/components/sleepiq/test_light.py @@ -1,6 +1,6 @@ """The tests for SleepIQ light platform.""" -from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.light import DOMAIN from homeassistant.components.sleepiq.coordinator import LONGER_UPDATE_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant @@ -16,7 +16,7 @@ async def test_setup( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test for successfully setting up the SleepIQ platform.""" - entry = await setup_platform(hass, LIGHT_DOMAIN) + entry = await setup_platform(hass, DOMAIN) assert len(entity_registry.entities) == 2 @@ -33,10 +33,10 @@ async def test_setup( async def test_light_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test light change.""" - await setup_platform(hass, LIGHT_DOMAIN) + await setup_platform(hass, DOMAIN) await hass.services.async_call( - LIGHT_DOMAIN, + DOMAIN, "turn_on", {ATTR_ENTITY_ID: f"light.sleepnumber_{BED_NAME_LOWER}_light_1"}, blocking=True, @@ -45,7 +45,7 @@ async def test_light_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: mock_asyncsleepiq.beds[BED_ID].foundation.lights[0].turn_on.assert_called_once() await hass.services.async_call( - LIGHT_DOMAIN, + DOMAIN, "turn_off", {ATTR_ENTITY_ID: f"light.sleepnumber_{BED_NAME_LOWER}_light_1"}, blocking=True, @@ -56,7 +56,7 @@ async def test_light_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: async def test_switch_get_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test light update.""" - await setup_platform(hass, LIGHT_DOMAIN) + await setup_platform(hass, DOMAIN) assert ( hass.states.get(f"light.sleepnumber_{BED_NAME_LOWER}_light_1").state diff --git a/tests/components/sleepiq/test_number.py b/tests/components/sleepiq/test_number.py index f0739aabc9d..52df2eb27aa 100644 --- a/tests/components/sleepiq/test_number.py +++ b/tests/components/sleepiq/test_number.py @@ -5,7 +5,7 @@ from homeassistant.components.number import ( ATTR_MIN, ATTR_STEP, ATTR_VALUE, - DOMAIN as NUMBER_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, ) from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, ATTR_ICON @@ -30,7 +30,7 @@ async def test_firmness( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ firmness number values for a bed with two sides.""" - entry = await setup_platform(hass, NUMBER_DOMAIN) + entry = await setup_platform(hass, DOMAIN) state = hass.states.get( f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_firmness" @@ -71,7 +71,7 @@ async def test_firmness( assert entry.unique_id == f"{SLEEPER_R_ID}_firmness" await hass.services.async_call( - NUMBER_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_firmness", @@ -89,7 +89,7 @@ async def test_actuators( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ actuator position values for a bed with adjustable head and foot.""" - entry = await setup_platform(hass, NUMBER_DOMAIN) + entry = await setup_platform(hass, DOMAIN) state = hass.states.get(f"number.sleepnumber_{BED_NAME_LOWER}_right_head_position") assert state.state == "60.0" @@ -143,7 +143,7 @@ async def test_actuators( assert entry.unique_id == f"{BED_ID}_F" await hass.services.async_call( - NUMBER_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: f"number.sleepnumber_{BED_NAME_LOWER}_right_head_position", @@ -165,7 +165,7 @@ async def test_foot_warmer_timer( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ foot warmer number values for a bed with two sides.""" - entry = await setup_platform(hass, NUMBER_DOMAIN) + entry = await setup_platform(hass, DOMAIN) state = hass.states.get( f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warming_timer" @@ -187,7 +187,7 @@ async def test_foot_warmer_timer( assert entry.unique_id == f"{BED_ID}_L_foot_warming_timer" await hass.services.async_call( - NUMBER_DOMAIN, + DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warming_timer", diff --git a/tests/components/sleepiq/test_select.py b/tests/components/sleepiq/test_select.py index bbfb612e9cb..ef4c7fb6df0 100644 --- a/tests/components/sleepiq/test_select.py +++ b/tests/components/sleepiq/test_select.py @@ -4,10 +4,7 @@ from unittest.mock import MagicMock from asyncsleepiq import FootWarmingTemps -from homeassistant.components.select import ( - DOMAIN as SELECT_DOMAIN, - SERVICE_SELECT_OPTION, -) +from homeassistant.components.select import DOMAIN, SERVICE_SELECT_OPTION from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -40,7 +37,7 @@ async def test_split_foundation_preset( mock_asyncsleepiq: MagicMock, ) -> None: """Test the SleepIQ select entity for split foundation presets.""" - entry = await setup_platform(hass, SELECT_DOMAIN) + entry = await setup_platform(hass, DOMAIN) state = hass.states.get( f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset_right" @@ -75,7 +72,7 @@ async def test_split_foundation_preset( assert entry.unique_id == f"{BED_ID}_preset_L" await hass.services.async_call( - SELECT_DOMAIN, + DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset_left", @@ -97,7 +94,7 @@ async def test_single_foundation_preset( mock_asyncsleepiq_single_foundation: MagicMock, ) -> None: """Test the SleepIQ select entity for single foundation presets.""" - entry = await setup_platform(hass, SELECT_DOMAIN) + entry = await setup_platform(hass, DOMAIN) state = hass.states.get(f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset") assert state.state == PRESET_R_STATE @@ -114,7 +111,7 @@ async def test_single_foundation_preset( assert entry.unique_id == f"{BED_ID}_preset" await hass.services.async_call( - SELECT_DOMAIN, + DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset", @@ -138,7 +135,7 @@ async def test_foot_warmer( mock_asyncsleepiq: MagicMock, ) -> None: """Test the SleepIQ select entity for foot warmers.""" - entry = await setup_platform(hass, SELECT_DOMAIN) + entry = await setup_platform(hass, DOMAIN) state = hass.states.get( f"select.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warmer" @@ -157,7 +154,7 @@ async def test_foot_warmer( assert entry.unique_id == f"{SLEEPER_L_ID}_foot_warmer" await hass.services.async_call( - SELECT_DOMAIN, + DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warmer", @@ -188,7 +185,7 @@ async def test_foot_warmer( assert entry.unique_id == f"{SLEEPER_R_ID}_foot_warmer" await hass.services.async_call( - SELECT_DOMAIN, + DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_R_NAME_LOWER}_foot_warmer", diff --git a/tests/components/sleepiq/test_sensor.py b/tests/components/sleepiq/test_sensor.py index eb558850fb3..ae25958419c 100644 --- a/tests/components/sleepiq/test_sensor.py +++ b/tests/components/sleepiq/test_sensor.py @@ -1,6 +1,6 @@ """The tests for SleepIQ sensor platform.""" -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.sensor import DOMAIN from homeassistant.const import ATTR_FRIENDLY_NAME, ATTR_ICON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -22,7 +22,7 @@ async def test_sleepnumber_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ sleepnumber for a bed with two sides.""" - entry = await setup_platform(hass, SENSOR_DOMAIN) + entry = await setup_platform(hass, DOMAIN) state = hass.states.get( f"sensor.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_sleepnumber" @@ -61,7 +61,7 @@ async def test_pressure_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ pressure for a bed with two sides.""" - entry = await setup_platform(hass, SENSOR_DOMAIN) + entry = await setup_platform(hass, DOMAIN) state = hass.states.get( f"sensor.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_pressure" diff --git a/tests/components/sleepiq/test_switch.py b/tests/components/sleepiq/test_switch.py index 5dd3e77fd66..7c41b6b9d19 100644 --- a/tests/components/sleepiq/test_switch.py +++ b/tests/components/sleepiq/test_switch.py @@ -1,7 +1,7 @@ """The tests for SleepIQ switch platform.""" from homeassistant.components.sleepiq.coordinator import LONGER_UPDATE_INTERVAL -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.switch import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -16,7 +16,7 @@ async def test_setup( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test for successfully setting up the SleepIQ platform.""" - entry = await setup_platform(hass, SWITCH_DOMAIN) + entry = await setup_platform(hass, DOMAIN) assert len(entity_registry.entities) == 1 @@ -28,10 +28,10 @@ async def test_setup( async def test_switch_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test button press.""" - await setup_platform(hass, SWITCH_DOMAIN) + await setup_platform(hass, DOMAIN) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, "turn_off", {ATTR_ENTITY_ID: f"switch.sleepnumber_{BED_NAME_LOWER}_pause_mode"}, blocking=True, @@ -40,7 +40,7 @@ async def test_switch_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None mock_asyncsleepiq.beds[BED_ID].set_pause_mode.assert_called_with(False) await hass.services.async_call( - SWITCH_DOMAIN, + DOMAIN, "turn_on", {ATTR_ENTITY_ID: f"switch.sleepnumber_{BED_NAME_LOWER}_pause_mode"}, blocking=True, @@ -51,7 +51,7 @@ async def test_switch_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None async def test_switch_get_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test button press.""" - await setup_platform(hass, SWITCH_DOMAIN) + await setup_platform(hass, DOMAIN) assert ( hass.states.get(f"switch.sleepnumber_{BED_NAME_LOWER}_pause_mode").state diff --git a/tests/components/slimproto/conftest.py b/tests/components/slimproto/conftest.py index 1bb2d7f2628..ece30d3e5cf 100644 --- a/tests/components/slimproto/conftest.py +++ b/tests/components/slimproto/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.slimproto.const import DOMAIN diff --git a/tests/components/sma/__init__.py b/tests/components/sma/__init__.py index 80837c718a9..aefb99cf1b1 100644 --- a/tests/components/sma/__init__.py +++ b/tests/components/sma/__init__.py @@ -6,7 +6,7 @@ MOCK_DEVICE = { "manufacturer": "SMA", "name": "SMA Device Name", "type": "Sunny Boy 3.6", - "serial": 123456789, + "serial": "123456789", } MOCK_USER_INPUT = { diff --git a/tests/components/sma/conftest.py b/tests/components/sma/conftest.py index dd47a0f1055..a98eda673e4 100644 --- a/tests/components/sma/conftest.py +++ b/tests/components/sma/conftest.py @@ -9,7 +9,6 @@ import pytest from homeassistant import config_entries from homeassistant.components.sma.const import DOMAIN -from homeassistant.core import HomeAssistant from . import MOCK_DEVICE, MOCK_USER_INPUT @@ -17,22 +16,19 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry() -> MockConfigEntry: +def mock_config_entry(): """Return the default mocked config entry.""" return MockConfigEntry( domain=DOMAIN, title=MOCK_DEVICE["name"], - unique_id=str(MOCK_DEVICE["serial"]), + unique_id=MOCK_DEVICE["serial"], data=MOCK_USER_INPUT, source=config_entries.SOURCE_IMPORT, - minor_version=2, ) @pytest.fixture -async def init_integration( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> MockConfigEntry: +async def init_integration(hass, mock_config_entry): """Create a fake SMA Config Entry.""" mock_config_entry.add_to_hass(hass) diff --git a/tests/components/sma/test_init.py b/tests/components/sma/test_init.py deleted file mode 100644 index 0cc82f49a41..00000000000 --- a/tests/components/sma/test_init.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Test the sma init file.""" - -from homeassistant.components.sma.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT -from homeassistant.core import HomeAssistant - -from . import MOCK_DEVICE, MOCK_USER_INPUT, _patch_async_setup_entry - -from tests.common import MockConfigEntry - - -async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None: - """Test migrating a 1.1 config entry to 1.2.""" - with _patch_async_setup_entry(): - entry = MockConfigEntry( - domain=DOMAIN, - title=MOCK_DEVICE["name"], - unique_id=MOCK_DEVICE["serial"], # Not converted to str - data=MOCK_USER_INPUT, - source=SOURCE_IMPORT, - minor_version=1, - ) - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) - assert entry.version == 1 - assert entry.minor_version == 2 - assert entry.unique_id == str(MOCK_DEVICE["serial"]) diff --git a/tests/components/smart_meter_texas/conftest.py b/tests/components/smart_meter_texas/conftest.py index 14ba6199c3d..d06571fe05e 100644 --- a/tests/components/smart_meter_texas/conftest.py +++ b/tests/components/smart_meter_texas/conftest.py @@ -2,7 +2,6 @@ from http import HTTPStatus import json -from typing import Any import pytest from smart_meter_texas.const import ( @@ -20,11 +19,9 @@ from homeassistant.components.homeassistant import ( ) from homeassistant.components.smart_meter_texas.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture -from tests.test_util.aiohttp import AiohttpClientMocker TEST_ENTITY_ID = "sensor.electric_meter_123456789" @@ -35,23 +32,14 @@ def load_smt_fixture(name): return json.loads(json_fixture) -async def setup_integration( - hass: HomeAssistant, - config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, - **kwargs: Any, -) -> None: +async def setup_integration(hass, config_entry, aioclient_mock, **kwargs): """Initialize the Smart Meter Texas integration for testing.""" mock_connection(aioclient_mock, **kwargs) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() -async def refresh_data( - hass: HomeAssistant, - config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, -) -> None: +async def refresh_data(hass, config_entry, aioclient_mock): """Request a DataUpdateCoordinator refresh.""" mock_connection(aioclient_mock) await async_setup_component(hass, HA_DOMAIN, {}) @@ -103,7 +91,7 @@ def mock_connection( @pytest.fixture(name="config_entry") -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +def mock_config_entry(hass): """Return a mock config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index 71a36c7885a..17e2c781989 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -38,6 +38,7 @@ from homeassistant.components.smartthings.const import ( STORAGE_KEY, STORAGE_VERSION, ) +from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import ( CONF_ACCESS_TOKEN, @@ -46,7 +47,6 @@ from homeassistant.const import ( CONF_WEBHOOK_ID, ) from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -91,7 +91,7 @@ async def setup_component( await async_setup_component(hass, "smartthings", {}) -def _create_location() -> Mock: +def _create_location(): loc = Mock(Location) loc.name = "Test Location" loc.location_id = str(uuid4()) @@ -99,19 +99,19 @@ def _create_location() -> Mock: @pytest.fixture(name="location") -def location_fixture() -> Mock: +def location_fixture(): """Fixture for a single location.""" return _create_location() @pytest.fixture(name="locations") -def locations_fixture(location: Mock) -> list[Mock]: +def locations_fixture(location): """Fixture for 2 locations.""" return [location, _create_location()] @pytest.fixture(name="app") -async def app_fixture(hass: HomeAssistant, config_file: dict[str, str]) -> Mock: +async def app_fixture(hass, config_file): """Fixture for a single app.""" app = Mock(AppEntity) app.app_name = APP_NAME_PREFIX + str(uuid4()) @@ -133,7 +133,7 @@ async def app_fixture(hass: HomeAssistant, config_file: dict[str, str]) -> Mock: @pytest.fixture(name="app_oauth_client") -def app_oauth_client_fixture() -> Mock: +def app_oauth_client_fixture(): """Fixture for a single app's oauth.""" client = Mock(AppOAuthClient) client.client_id = str(uuid4()) @@ -150,7 +150,7 @@ def app_settings_fixture(app, config_file): return settings -def _create_installed_app(location_id: str, app_id: str) -> Mock: +def _create_installed_app(location_id, app_id): item = Mock(InstalledApp) item.installed_app_id = str(uuid4()) item.installed_app_status = InstalledAppStatus.AUTHORIZED @@ -161,7 +161,7 @@ def _create_installed_app(location_id: str, app_id: str) -> Mock: @pytest.fixture(name="installed_app") -def installed_app_fixture(location: Mock, app: Mock) -> Mock: +def installed_app_fixture(location, app): """Fixture for a single installed app.""" return _create_installed_app(location.location_id, app.app_id) @@ -222,7 +222,7 @@ def device_fixture(location): @pytest.fixture(name="config_entry") -def config_entry_fixture(installed_app: Mock, location: Mock) -> MockConfigEntry: +def config_entry_fixture(hass, installed_app, location): """Fixture representing a config entry.""" data = { CONF_ACCESS_TOKEN: str(uuid4()), diff --git a/tests/components/smartthings/test_climate.py b/tests/components/smartthings/test_climate.py index d39ee2d6bed..e4b8cb6d373 100644 --- a/tests/components/smartthings/test_climate.py +++ b/tests/components/smartthings/test_climate.py @@ -88,26 +88,6 @@ def basic_thermostat_fixture(device_factory): return device -@pytest.fixture(name="minimal_thermostat") -def minimal_thermostat_fixture(device_factory): - """Fixture returns a minimal thermostat without cooling.""" - device = device_factory( - "Minimal Thermostat", - capabilities=[ - Capability.temperature_measurement, - Capability.thermostat_heating_setpoint, - Capability.thermostat_mode, - ], - status={ - Attribute.heating_setpoint: 68, - Attribute.thermostat_mode: "off", - Attribute.supported_thermostat_modes: ["off", "heat"], - }, - ) - device.status.attributes[Attribute.temperature] = Status(70, "F", None) - return device - - @pytest.fixture(name="thermostat") def thermostat_fixture(device_factory): """Fixture returns a fully-featured thermostat.""" @@ -330,28 +310,6 @@ async def test_basic_thermostat_entity_state( assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 21.1 # celsius -async def test_minimal_thermostat_entity_state( - hass: HomeAssistant, minimal_thermostat -) -> None: - """Tests the state attributes properly match the thermostat type.""" - await setup_platform(hass, CLIMATE_DOMAIN, devices=[minimal_thermostat]) - state = hass.states.get("climate.minimal_thermostat") - assert state.state == HVACMode.OFF - assert ( - state.attributes[ATTR_SUPPORTED_FEATURES] - == ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - | ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON - ) - assert ATTR_HVAC_ACTION not in state.attributes - assert sorted(state.attributes[ATTR_HVAC_MODES]) == [ - HVACMode.HEAT, - HVACMode.OFF, - ] - assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 21.1 # celsius - - async def test_thermostat_entity_state(hass: HomeAssistant, thermostat) -> None: """Tests the state attributes properly match the thermostat type.""" await setup_platform(hass, CLIMATE_DOMAIN, devices=[thermostat]) diff --git a/tests/components/smartthings/test_config_flow.py b/tests/components/smartthings/test_config_flow.py index 3621e58bc3d..49444e47780 100644 --- a/tests/components/smartthings/test_config_flow.py +++ b/tests/components/smartthings/test_config_flow.py @@ -16,9 +16,9 @@ from homeassistant.components.smartthings.const import ( CONF_LOCATION_ID, DOMAIN, ) +from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry diff --git a/tests/components/smartthings/test_cover.py b/tests/components/smartthings/test_cover.py index 31443c12ab2..bb292b53ee8 100644 --- a/tests/components/smartthings/test_cover.py +++ b/tests/components/smartthings/test_cover.py @@ -13,7 +13,10 @@ from homeassistant.components.cover import ( SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, - CoverState, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, ) from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE from homeassistant.config_entries import ConfigEntryState @@ -84,7 +87,7 @@ async def test_open(hass: HomeAssistant, device_factory) -> None: for entity_id in entity_ids: state = hass.states.get(entity_id) assert state is not None - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING async def test_close(hass: HomeAssistant, device_factory) -> None: @@ -109,7 +112,7 @@ async def test_close(hass: HomeAssistant, device_factory) -> None: for entity_id in entity_ids: state = hass.states.get(entity_id) assert state is not None - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING async def test_set_cover_position_switch_level( @@ -133,7 +136,7 @@ async def test_set_cover_position_switch_level( state = hass.states.get("cover.shade") # Result of call does not update state - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING assert state.attributes[ATTR_BATTERY_LEVEL] == 95 assert state.attributes[ATTR_CURRENT_POSITION] == 10 # Ensure API called @@ -164,7 +167,7 @@ async def test_set_cover_position(hass: HomeAssistant, device_factory) -> None: state = hass.states.get("cover.shade") # Result of call does not update state - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING assert state.attributes[ATTR_BATTERY_LEVEL] == 95 assert state.attributes[ATTR_CURRENT_POSITION] == 10 # Ensure API called @@ -205,14 +208,14 @@ async def test_update_to_open_from_signal(hass: HomeAssistant, device_factory) - ) await setup_platform(hass, COVER_DOMAIN, devices=[device]) device.status.update_attribute_value(Attribute.door, "open") - assert hass.states.get("cover.garage").state == CoverState.OPENING + assert hass.states.get("cover.garage").state == STATE_OPENING # Act async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id]) # Assert await hass.async_block_till_done() state = hass.states.get("cover.garage") assert state is not None - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN async def test_update_to_closed_from_signal( @@ -225,14 +228,14 @@ async def test_update_to_closed_from_signal( ) await setup_platform(hass, COVER_DOMAIN, devices=[device]) device.status.update_attribute_value(Attribute.door, "closed") - assert hass.states.get("cover.garage").state == CoverState.CLOSING + assert hass.states.get("cover.garage").state == STATE_CLOSING # Act async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id]) # Assert await hass.async_block_till_done() state = hass.states.get("cover.garage") assert state is not None - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED async def test_unload_config_entry(hass: HomeAssistant, device_factory) -> None: diff --git a/tests/components/smartthings/test_fan.py b/tests/components/smartthings/test_fan.py index b78c453b402..043c022b225 100644 --- a/tests/components/smartthings/test_fan.py +++ b/tests/components/smartthings/test_fan.py @@ -39,12 +39,7 @@ async def test_entity_state(hass: HomeAssistant, device_factory) -> None: # Dimmer 1 state = hass.states.get("fan.fan_1") assert state.state == "on" - assert ( - state.attributes[ATTR_SUPPORTED_FEATURES] - == FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + assert state.attributes[ATTR_SUPPORTED_FEATURES] == FanEntityFeature.SET_SPEED assert state.attributes[ATTR_PERCENTAGE] == 66 @@ -105,12 +100,7 @@ async def test_setup_mode_capability(hass: HomeAssistant, device_factory) -> Non # Assert state = hass.states.get("fan.fan_1") assert state is not None - assert ( - state.attributes[ATTR_SUPPORTED_FEATURES] - == FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + assert state.attributes[ATTR_SUPPORTED_FEATURES] == FanEntityFeature.PRESET_MODE assert state.attributes[ATTR_PRESET_MODE] == "high" assert state.attributes[ATTR_PRESET_MODES] == ["high", "low", "medium"] @@ -132,12 +122,7 @@ async def test_setup_speed_capability(hass: HomeAssistant, device_factory) -> No # Assert state = hass.states.get("fan.fan_1") assert state is not None - assert ( - state.attributes[ATTR_SUPPORTED_FEATURES] - == FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + assert state.attributes[ATTR_SUPPORTED_FEATURES] == FanEntityFeature.SET_SPEED assert state.attributes[ATTR_PERCENTAGE] == 66 @@ -166,10 +151,7 @@ async def test_setup_both_capabilities(hass: HomeAssistant, device_factory) -> N assert state is not None assert ( state.attributes[ATTR_SUPPORTED_FEATURES] - == FanEntityFeature.SET_SPEED - | FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON + == FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE ) assert state.attributes[ATTR_PERCENTAGE] == 66 assert state.attributes[ATTR_PRESET_MODE] == "high" diff --git a/tests/components/smartthings/test_init.py b/tests/components/smartthings/test_init.py index e518f84aecb..ae8a288e3a5 100644 --- a/tests/components/smartthings/test_init.py +++ b/tests/components/smartthings/test_init.py @@ -1,9 +1,6 @@ """Tests for the SmartThings component init module.""" -from collections.abc import Callable, Coroutine -from datetime import datetime, timedelta from http import HTTPStatus -from typing import Any from unittest.mock import Mock, patch from uuid import uuid4 @@ -23,8 +20,8 @@ from homeassistant.components.smartthings.const import ( PLATFORMS, SIGNAL_SMARTTHINGS_UPDATE, ) +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -422,11 +419,7 @@ async def test_broker_regenerates_token(hass: HomeAssistant, config_entry) -> No stored_action = None config_entry.add_to_hass(hass) - def async_track_time_interval( - hass: HomeAssistant, - action: Callable[[datetime], Coroutine[Any, Any, None] | None], - interval: timedelta, - ) -> None: + def async_track_time_interval(hass, action, interval): nonlocal stored_action stored_action = action diff --git a/tests/components/smarttub/conftest.py b/tests/components/smarttub/conftest.py index 06780f8fb1e..c05762a903d 100644 --- a/tests/components/smarttub/conftest.py +++ b/tests/components/smarttub/conftest.py @@ -1,6 +1,5 @@ """Common fixtures for smarttub tests.""" -from typing import Any from unittest.mock import create_autospec, patch import pytest @@ -8,20 +7,19 @@ import smarttub from homeassistant.components.smarttub.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @pytest.fixture -def config_data() -> dict[str, Any]: +def config_data(): """Provide configuration data for tests.""" return {CONF_EMAIL: "test-email", CONF_PASSWORD: "test-password"} @pytest.fixture -def config_entry(config_data: dict[str, Any]) -> MockConfigEntry: +def config_entry(config_data): """Create a mock config entry.""" return MockConfigEntry( domain=DOMAIN, @@ -31,7 +29,7 @@ def config_entry(config_data: dict[str, Any]) -> MockConfigEntry: @pytest.fixture -async def setup_component(hass: HomeAssistant) -> None: +async def setup_component(hass): """Set up the component.""" assert await async_setup_component(hass, DOMAIN, {}) is True @@ -164,7 +162,7 @@ def mock_api(account, spa): @pytest.fixture -async def setup_entry(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: +async def setup_entry(hass, config_entry): """Initialize the config entry.""" config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/smarttub/test_config_flow.py b/tests/components/smarttub/test_config_flow.py index 5832841641c..c625f217405 100644 --- a/tests/components/smarttub/test_config_flow.py +++ b/tests/components/smarttub/test_config_flow.py @@ -66,7 +66,15 @@ async def test_reauth_success(hass: HomeAssistant, smarttub_api, account) -> Non ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -99,7 +107,15 @@ async def test_reauth_wrong_account(hass: HomeAssistant, smarttub_api, account) # we try to reauth account #2, and the user successfully authenticates to account #1 account.id = mock_entry1.unique_id - result = await mock_entry2.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry2.unique_id, + "entry_id": mock_entry2.entry_id, + }, + data=mock_entry2.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/smarty/__init__.py b/tests/components/smarty/__init__.py deleted file mode 100644 index c5ae7f2d382..00000000000 --- a/tests/components/smarty/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the Smarty integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Set up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/smarty/conftest.py b/tests/components/smarty/conftest.py deleted file mode 100644 index a9b518d88f4..00000000000 --- a/tests/components/smarty/conftest.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Smarty tests configuration.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.smarty import DOMAIN -from homeassistant.const import CONF_HOST - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override integration setup.""" - with patch( - "homeassistant.components.smarty.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_smarty() -> Generator[AsyncMock]: - """Mock a Smarty client.""" - with ( - patch( - "homeassistant.components.smarty.coordinator.Smarty", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.smarty.config_flow.Smarty", - new=mock_client, - ), - ): - client = mock_client.return_value - client.update.return_value = True - client.fan_speed = 100 - client.warning = False - client.alarm = False - client.boost = False - client.enable_boost.return_value = True - client.disable_boost.return_value = True - client.supply_air_temperature = 20 - client.extract_air_temperature = 23 - client.outdoor_air_temperature = 24 - client.supply_fan_speed = 66 - client.extract_fan_speed = 100 - client.filter_timer = 31 - client.get_configuration_version.return_value = 111 - client.get_software_version.return_value = 127 - client.reset_filters_timer.return_value = True - yield client - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data={CONF_HOST: "192.168.0.2"}, - entry_id="01JAZ5DPW8C62D620DGYNG2R8H", - ) diff --git a/tests/components/smarty/snapshots/test_binary_sensor.ambr b/tests/components/smarty/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 2f943a25012..00000000000 --- a/tests/components/smarty/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,141 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[binary_sensor.mock_title_alarm-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.mock_title_alarm', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Alarm', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'alarm', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_alarm', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_alarm-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Mock Title Alarm', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_alarm', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_boost_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.mock_title_boost_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Boost state', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'boost_state', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_boost_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Boost state', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_boost_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_warning-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.mock_title_warning', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Warning', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'warning', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_warning', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_warning-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Mock Title Warning', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_warning', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/smarty/snapshots/test_button.ambr b/tests/components/smarty/snapshots/test_button.ambr deleted file mode 100644 index 38849bd2b2e..00000000000 --- a/tests/components/smarty/snapshots/test_button.ambr +++ /dev/null @@ -1,47 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[button.mock_title_reset_filters_timer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.mock_title_reset_filters_timer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Reset filters timer', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reset_filters_timer', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_reset_filters_timer', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[button.mock_title_reset_filters_timer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Reset filters timer', - }), - 'context': , - 'entity_id': 'button.mock_title_reset_filters_timer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/smarty/snapshots/test_fan.ambr b/tests/components/smarty/snapshots/test_fan.ambr deleted file mode 100644 index 8ca95beeb86..00000000000 --- a/tests/components/smarty/snapshots/test_fan.ambr +++ /dev/null @@ -1,54 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[fan.mock_title-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'preset_modes': None, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'fan', - 'entity_category': None, - 'entity_id': 'fan.mock_title', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'fan', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[fan.mock_title-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title', - 'percentage': 0, - 'percentage_step': 33.333333333333336, - 'preset_mode': None, - 'preset_modes': None, - 'supported_features': , - }), - 'context': , - 'entity_id': 'fan.mock_title', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/smarty/snapshots/test_init.ambr b/tests/components/smarty/snapshots/test_init.ambr deleted file mode 100644 index b25cdb9dc3a..00000000000 --- a/tests/components/smarty/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_device - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': 111, - 'id': , - 'identifiers': set({ - tuple( - 'smarty', - '01JAZ5DPW8C62D620DGYNG2R8H', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Salda', - 'model': None, - 'model_id': None, - 'name': 'Mock Title', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': 127, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/smarty/snapshots/test_sensor.ambr b/tests/components/smarty/snapshots/test_sensor.ambr deleted file mode 100644 index 2f713db7f83..00000000000 --- a/tests/components/smarty/snapshots/test_sensor.ambr +++ /dev/null @@ -1,286 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[sensor.mock_title_extract_air_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_extract_air_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Extract air temperature', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'extract_air_temperature', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_extract_air_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.mock_title_extract_air_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Mock Title Extract air temperature', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_extract_air_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '23', - }) -# --- -# name: test_all_entities[sensor.mock_title_extract_fan_speed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_extract_fan_speed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Extract fan speed', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'extract_fan_speed', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_extract_fan_speed', - 'unit_of_measurement': 'rpm', - }) -# --- -# name: test_all_entities[sensor.mock_title_extract_fan_speed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Extract fan speed', - 'unit_of_measurement': 'rpm', - }), - 'context': , - 'entity_id': 'sensor.mock_title_extract_fan_speed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_all_entities[sensor.mock_title_filter_days_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_filter_days_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Filter days left', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'filter_days_left', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_filter_days_left', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.mock_title_filter_days_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Filter days left', - }), - 'context': , - 'entity_id': 'sensor.mock_title_filter_days_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2023-11-21T01:00:00+00:00', - }) -# --- -# name: test_all_entities[sensor.mock_title_outdoor_air_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_outdoor_air_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outdoor air temperature', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outdoor_air_temperature', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_outdoor_air_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.mock_title_outdoor_air_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Mock Title Outdoor air temperature', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_outdoor_air_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '24', - }) -# --- -# name: test_all_entities[sensor.mock_title_supply_air_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_supply_air_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Supply air temperature', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'supply_air_temperature', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_supply_air_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.mock_title_supply_air_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Mock Title Supply air temperature', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_supply_air_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_all_entities[sensor.mock_title_supply_fan_speed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_supply_fan_speed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Supply fan speed', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'supply_fan_speed', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_supply_fan_speed', - 'unit_of_measurement': 'rpm', - }) -# --- -# name: test_all_entities[sensor.mock_title_supply_fan_speed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Supply fan speed', - 'unit_of_measurement': 'rpm', - }), - 'context': , - 'entity_id': 'sensor.mock_title_supply_fan_speed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '66', - }) -# --- diff --git a/tests/components/smarty/snapshots/test_switch.ambr b/tests/components/smarty/snapshots/test_switch.ambr deleted file mode 100644 index be1da7c6961..00000000000 --- a/tests/components/smarty/snapshots/test_switch.ambr +++ /dev/null @@ -1,47 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[switch.mock_title_boost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.mock_title_boost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Boost', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'boost', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[switch.mock_title_boost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Boost', - }), - 'context': , - 'entity_id': 'switch.mock_title_boost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/smarty/test_binary_sensor.py b/tests/components/smarty/test_binary_sensor.py deleted file mode 100644 index d28fb44e1ce..00000000000 --- a/tests/components/smarty/test_binary_sensor.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Tests for the Smarty binary sensor platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_smarty: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.smarty.PLATFORMS", [Platform.BINARY_SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/smarty/test_button.py b/tests/components/smarty/test_button.py deleted file mode 100644 index 0a7b67f2be6..00000000000 --- a/tests/components/smarty/test_button.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Tests for the Smarty button platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_smarty: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.smarty.PLATFORMS", [Platform.BUTTON]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_setting_value( - hass: HomeAssistant, - mock_smarty: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test setting value.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - target={ATTR_ENTITY_ID: "button.mock_title_reset_filters_timer"}, - blocking=True, - ) - mock_smarty.reset_filters_timer.assert_called_once_with() diff --git a/tests/components/smarty/test_config_flow.py b/tests/components/smarty/test_config_flow.py deleted file mode 100644 index fad4f27ca1c..00000000000 --- a/tests/components/smarty/test_config_flow.py +++ /dev/null @@ -1,165 +0,0 @@ -"""Test the smarty config flow.""" - -from unittest.mock import AsyncMock - -from homeassistant.components.smarty.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_NAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_full_flow( - hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock -) -> None: - """Test the full flow.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.0.2"}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "192.168.0.2" - assert result["data"] == {CONF_HOST: "192.168.0.2"} - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_cannot_connect( - hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock -) -> None: - """Test we handle cannot connect error.""" - - mock_smarty.update.return_value = False - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.0.2"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - mock_smarty.update.return_value = True - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.0.2"}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_unknown_error( - hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock -) -> None: - """Test we handle unknown error.""" - - mock_smarty.update.side_effect = Exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.0.2"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown"} - - mock_smarty.update.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.0.2"}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_existing_entry( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test we handle existing entry.""" - mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "192.168.0.2"}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_import_flow( - hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock -) -> None: - """Test the import flow.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "192.168.0.2", CONF_NAME: "Smarty"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Smarty" - assert result["data"] == {CONF_HOST: "192.168.0.2"} - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_import_cannot_connect( - hass: HomeAssistant, mock_smarty: AsyncMock -) -> None: - """Test we handle cannot connect error.""" - - mock_smarty.update.return_value = False - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "192.168.0.2", CONF_NAME: "Smarty"}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" - - -async def test_import_unknown_error( - hass: HomeAssistant, mock_smarty: AsyncMock -) -> None: - """Test we handle unknown error.""" - - mock_smarty.update.side_effect = Exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "192.168.0.2", CONF_NAME: "Smarty"}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unknown" diff --git a/tests/components/smarty/test_fan.py b/tests/components/smarty/test_fan.py deleted file mode 100644 index 2c0135b7aa2..00000000000 --- a/tests/components/smarty/test_fan.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Tests for the Smarty fan platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_smarty: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.smarty.PLATFORMS", [Platform.FAN]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/smarty/test_init.py b/tests/components/smarty/test_init.py deleted file mode 100644 index 0366ea9eade..00000000000 --- a/tests/components/smarty/test_init.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Tests for the Smarty component.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.components.smarty import DOMAIN -from homeassistant.const import CONF_HOST, CONF_NAME -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.helpers import device_registry as dr, issue_registry as ir -from homeassistant.setup import async_setup_component - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_import_flow( - hass: HomeAssistant, - mock_smarty: AsyncMock, - issue_registry: ir.IssueRegistry, - mock_setup_entry: AsyncMock, -) -> None: - """Test import flow.""" - assert await async_setup_component( - hass, DOMAIN, {DOMAIN: {CONF_HOST: "192.168.0.2", CONF_NAME: "smarty"}} - ) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert (HOMEASSISTANT_DOMAIN, "deprecated_yaml_smarty") in issue_registry.issues - - -async def test_import_flow_already_exists( - hass: HomeAssistant, - mock_smarty: AsyncMock, - issue_registry: ir.IssueRegistry, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test import flow when entry already exists.""" - mock_config_entry.add_to_hass(hass) - assert await async_setup_component( - hass, DOMAIN, {DOMAIN: {CONF_HOST: "192.168.0.2", CONF_NAME: "smarty"}} - ) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert (HOMEASSISTANT_DOMAIN, "deprecated_yaml_smarty") in issue_registry.issues - - -async def test_import_flow_error( - hass: HomeAssistant, - mock_smarty: AsyncMock, - issue_registry: ir.IssueRegistry, - mock_setup_entry: AsyncMock, -) -> None: - """Test import flow when error occurs.""" - mock_smarty.update.return_value = False - assert await async_setup_component( - hass, DOMAIN, {DOMAIN: {CONF_HOST: "192.168.0.2", CONF_NAME: "smarty"}} - ) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(DOMAIN)) == 0 - assert ( - DOMAIN, - "deprecated_yaml_import_issue_cannot_connect", - ) in issue_registry.issues - - -async def test_device( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_smarty: AsyncMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test device.""" - await setup_integration(hass, mock_config_entry) - device = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.entry_id)} - ) - assert device - assert device == snapshot diff --git a/tests/components/smarty/test_sensor.py b/tests/components/smarty/test_sensor.py deleted file mode 100644 index a534a2ebb0f..00000000000 --- a/tests/components/smarty/test_sensor.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Tests for the Smarty sensor platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.freeze_time("2023-10-21") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_smarty: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.smarty.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/smarty/test_switch.py b/tests/components/smarty/test_switch.py deleted file mode 100644 index 1a6748e2d23..00000000000 --- a/tests/components/smarty/test_switch.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Tests for the Smarty switch platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_smarty: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.smarty.PLATFORMS", [Platform.SWITCH]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_setting_value( - hass: HomeAssistant, - mock_smarty: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test setting value.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - target={ATTR_ENTITY_ID: "switch.mock_title_boost"}, - blocking=True, - ) - mock_smarty.enable_boost.assert_called_once_with() - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - target={ATTR_ENTITY_ID: "switch.mock_title_boost"}, - blocking=True, - ) - mock_smarty.disable_boost.assert_called_once_with() diff --git a/tests/components/smhi/common.py b/tests/components/smhi/common.py new file mode 100644 index 00000000000..7339ba76ac1 --- /dev/null +++ b/tests/components/smhi/common.py @@ -0,0 +1,11 @@ +"""Common test utilities.""" + +from unittest.mock import Mock + + +class AsyncMock(Mock): + """Implements Mock async.""" + + async def __call__(self, *args, **kwargs): + """Hack for async support for Mock.""" + return super().__call__(*args, **kwargs) diff --git a/tests/components/smhi/snapshots/test_weather.ambr b/tests/components/smhi/snapshots/test_weather.ambr index 2c0884d804d..0d2f6b3b3bf 100644 --- a/tests/components/smhi/snapshots/test_weather.ambr +++ b/tests/components/smhi/snapshots/test_weather.ambr @@ -6,12 +6,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T00:00:00+00:00', + 'datetime': '2023-08-08T00:00:00', 'humidity': 100, 'precipitation': 0.0, - 'pressure': 992.4, - 'temperature': 18.2, - 'templow': 18.2, + 'pressure': 992.0, + 'temperature': 18.0, + 'templow': 18.0, 'wind_bearing': 103, 'wind_gust_speed': 23.76, 'wind_speed': 9.72, @@ -19,12 +19,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T01:00:00+00:00', + 'datetime': '2023-08-08T01:00:00', 'humidity': 100, 'precipitation': 0.0, - 'pressure': 992.4, - 'temperature': 17.5, - 'templow': 17.5, + 'pressure': 992.0, + 'temperature': 18.0, + 'templow': 18.0, 'wind_bearing': 104, 'wind_gust_speed': 27.36, 'wind_speed': 9.72, @@ -32,12 +32,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T02:00:00+00:00', + 'datetime': '2023-08-08T02:00:00', 'humidity': 97, 'precipitation': 0.0, - 'pressure': 992.2, - 'temperature': 17.6, - 'templow': 17.6, + 'pressure': 992.0, + 'temperature': 18.0, + 'templow': 18.0, 'wind_bearing': 109, 'wind_gust_speed': 32.4, 'wind_speed': 12.96, @@ -45,12 +45,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'sunny', - 'datetime': '2023-08-08T03:00:00+00:00', + 'datetime': '2023-08-08T03:00:00', 'humidity': 96, 'precipitation': 0.0, - 'pressure': 991.7, - 'temperature': 17.1, - 'templow': 17.1, + 'pressure': 991.0, + 'temperature': 17.0, + 'templow': 17.0, 'wind_bearing': 114, 'wind_gust_speed': 32.76, 'wind_speed': 10.08, @@ -66,10 +66,10 @@ 'friendly_name': 'test', 'humidity': 100, 'precipitation_unit': , - 'pressure': 992.4, + 'pressure': 992.0, 'pressure_unit': , 'supported_features': , - 'temperature': 18.4, + 'temperature': 18.0, 'temperature_unit': , 'thunder_probability': 37, 'visibility': 0.4, @@ -80,6 +80,142 @@ 'wind_speed_unit': , }) # --- +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-07T12:00:00', + 'humidity': 96, + 'precipitation': 0.0, + 'pressure': 991.0, + 'temperature': 18.0, + 'templow': 15.0, + 'wind_bearing': 114, + 'wind_gust_speed': 32.76, + 'wind_speed': 10.08, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2023-08-08T12:00:00', + 'humidity': 97, + 'precipitation': 10.6, + 'pressure': 984.0, + 'temperature': 15.0, + 'templow': 11.0, + 'wind_bearing': 183, + 'wind_gust_speed': 27.36, + 'wind_speed': 11.16, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2023-08-09T12:00:00', + 'humidity': 95, + 'precipitation': 6.3, + 'pressure': 1001.0, + 'temperature': 12.0, + 'templow': 11.0, + 'wind_bearing': 166, + 'wind_gust_speed': 48.24, + 'wind_speed': 18.0, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-10T12:00:00', + 'humidity': 75, + 'precipitation': 4.8, + 'pressure': 1011.0, + 'temperature': 14.0, + 'templow': 10.0, + 'wind_bearing': 174, + 'wind_gust_speed': 29.16, + 'wind_speed': 11.16, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-11T12:00:00', + 'humidity': 69, + 'precipitation': 0.6, + 'pressure': 1015.0, + 'temperature': 18.0, + 'templow': 12.0, + 'wind_bearing': 197, + 'wind_gust_speed': 27.36, + 'wind_speed': 10.08, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-12T12:00:00', + 'humidity': 82, + 'precipitation': 0.0, + 'pressure': 1014.0, + 'temperature': 17.0, + 'templow': 12.0, + 'wind_bearing': 225, + 'wind_gust_speed': 28.08, + 'wind_speed': 8.64, + }), + dict({ + 'cloud_coverage': 75, + 'condition': 'partlycloudy', + 'datetime': '2023-08-13T12:00:00', + 'humidity': 59, + 'precipitation': 0.0, + 'pressure': 1013.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 234, + 'wind_gust_speed': 35.64, + 'wind_speed': 14.76, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'partlycloudy', + 'datetime': '2023-08-14T12:00:00', + 'humidity': 56, + 'precipitation': 0.0, + 'pressure': 1015.0, + 'temperature': 21.0, + 'templow': 14.0, + 'wind_bearing': 216, + 'wind_gust_speed': 33.12, + 'wind_speed': 13.68, + }), + dict({ + 'cloud_coverage': 88, + 'condition': 'partlycloudy', + 'datetime': '2023-08-15T12:00:00', + 'humidity': 64, + 'precipitation': 3.6, + 'pressure': 1014.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 226, + 'wind_gust_speed': 33.12, + 'wind_speed': 13.68, + }), + dict({ + 'cloud_coverage': 75, + 'condition': 'partlycloudy', + 'datetime': '2023-08-16T12:00:00', + 'humidity': 61, + 'precipitation': 2.4, + 'pressure': 1014.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 233, + 'wind_gust_speed': 33.48, + 'wind_speed': 14.04, + }), + ]), + }) +# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.smhi_test': dict({ @@ -87,12 +223,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00+00:00', + 'datetime': '2023-08-07T12:00:00', 'humidity': 96, 'precipitation': 0.0, - 'pressure': 991.7, - 'temperature': 18.4, - 'templow': 14.8, + 'pressure': 991.0, + 'temperature': 18.0, + 'templow': 15.0, 'wind_bearing': 114, 'wind_gust_speed': 32.76, 'wind_speed': 10.08, @@ -100,12 +236,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'rainy', - 'datetime': '2023-08-08T12:00:00+00:00', + 'datetime': '2023-08-08T12:00:00', 'humidity': 97, 'precipitation': 10.6, - 'pressure': 984.1, - 'temperature': 14.8, - 'templow': 10.6, + 'pressure': 984.0, + 'temperature': 15.0, + 'templow': 11.0, 'wind_bearing': 183, 'wind_gust_speed': 27.36, 'wind_speed': 11.16, @@ -113,11 +249,11 @@ dict({ 'cloud_coverage': 100, 'condition': 'rainy', - 'datetime': '2023-08-09T12:00:00+00:00', + 'datetime': '2023-08-09T12:00:00', 'humidity': 95, 'precipitation': 6.3, - 'pressure': 1001.4, - 'temperature': 12.5, + 'pressure': 1001.0, + 'temperature': 12.0, 'templow': 11.0, 'wind_bearing': 166, 'wind_gust_speed': 48.24, @@ -126,12 +262,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-10T12:00:00+00:00', + 'datetime': '2023-08-10T12:00:00', 'humidity': 75, 'precipitation': 4.8, - 'pressure': 1011.1, - 'temperature': 13.9, - 'templow': 10.4, + 'pressure': 1011.0, + 'temperature': 14.0, + 'templow': 10.0, 'wind_bearing': 174, 'wind_gust_speed': 29.16, 'wind_speed': 11.16, @@ -139,12 +275,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-11T12:00:00+00:00', + 'datetime': '2023-08-11T12:00:00', 'humidity': 69, 'precipitation': 0.6, - 'pressure': 1015.3, - 'temperature': 17.6, - 'templow': 11.7, + 'pressure': 1015.0, + 'temperature': 18.0, + 'templow': 12.0, 'wind_bearing': 197, 'wind_gust_speed': 27.36, 'wind_speed': 10.08, @@ -152,12 +288,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-12T12:00:00+00:00', + 'datetime': '2023-08-12T12:00:00', 'humidity': 82, 'precipitation': 0.0, 'pressure': 1014.0, 'temperature': 17.0, - 'templow': 12.3, + 'templow': 12.0, 'wind_bearing': 225, 'wind_gust_speed': 28.08, 'wind_speed': 8.64, @@ -165,12 +301,12 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00+00:00', + 'datetime': '2023-08-13T12:00:00', 'humidity': 59, 'precipitation': 0.0, - 'pressure': 1013.6, + 'pressure': 1013.0, 'temperature': 20.0, - 'templow': 13.6, + 'templow': 14.0, 'wind_bearing': 234, 'wind_gust_speed': 35.64, 'wind_speed': 14.76, @@ -178,12 +314,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'partlycloudy', - 'datetime': '2023-08-14T12:00:00+00:00', + 'datetime': '2023-08-14T12:00:00', 'humidity': 56, 'precipitation': 0.0, - 'pressure': 1015.3, - 'temperature': 20.8, - 'templow': 13.5, + 'pressure': 1015.0, + 'temperature': 21.0, + 'templow': 14.0, 'wind_bearing': 216, 'wind_gust_speed': 33.12, 'wind_speed': 13.68, @@ -191,12 +327,12 @@ dict({ 'cloud_coverage': 88, 'condition': 'partlycloudy', - 'datetime': '2023-08-15T12:00:00+00:00', + 'datetime': '2023-08-15T12:00:00', 'humidity': 64, 'precipitation': 3.6, - 'pressure': 1014.3, - 'temperature': 20.4, - 'templow': 14.3, + 'pressure': 1014.0, + 'temperature': 20.0, + 'templow': 14.0, 'wind_bearing': 226, 'wind_gust_speed': 33.12, 'wind_speed': 13.68, @@ -204,12 +340,12 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-16T12:00:00+00:00', + 'datetime': '2023-08-16T12:00:00', 'humidity': 61, 'precipitation': 2.4, 'pressure': 1014.0, - 'temperature': 20.2, - 'templow': 13.8, + 'temperature': 20.0, + 'templow': 14.0, 'wind_bearing': 233, 'wind_gust_speed': 33.48, 'wind_speed': 14.04, @@ -222,12 +358,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00+00:00', + 'datetime': '2023-08-07T12:00:00', 'humidity': 96, 'precipitation': 0.0, - 'pressure': 991.7, - 'temperature': 18.4, - 'templow': 14.8, + 'pressure': 991.0, + 'temperature': 18.0, + 'templow': 15.0, 'wind_bearing': 114, 'wind_gust_speed': 32.76, 'wind_speed': 10.08, @@ -237,12 +373,12 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00+00:00', + 'datetime': '2023-08-13T12:00:00', 'humidity': 59, 'precipitation': 0.0, - 'pressure': 1013.6, + 'pressure': 1013.0, 'temperature': 20.0, - 'templow': 13.6, + 'templow': 14.0, 'wind_bearing': 234, 'wind_gust_speed': 35.64, 'wind_speed': 14.76, @@ -252,12 +388,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'fog', - 'datetime': '2023-08-07T09:00:00+00:00', + 'datetime': '2023-08-07T09:00:00', 'humidity': 100, 'precipitation': 0.0, - 'pressure': 992.4, - 'temperature': 18.2, - 'templow': 18.2, + 'pressure': 992.0, + 'temperature': 18.0, + 'templow': 18.0, 'wind_bearing': 103, 'wind_gust_speed': 23.76, 'wind_speed': 9.72, @@ -267,12 +403,12 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T15:00:00+00:00', + 'datetime': '2023-08-07T15:00:00', 'humidity': 89, 'precipitation': 0.0, - 'pressure': 991.7, - 'temperature': 16.2, - 'templow': 16.2, + 'pressure': 991.0, + 'temperature': 16.0, + 'templow': 16.0, 'wind_bearing': 108, 'wind_gust_speed': 31.68, 'wind_speed': 12.24, @@ -285,10 +421,10 @@ 'friendly_name': 'test', 'humidity': 100, 'precipitation_unit': , - 'pressure': 992.4, + 'pressure': 992.0, 'pressure_unit': , 'supported_features': , - 'temperature': 18.4, + 'temperature': 18.0, 'temperature_unit': , 'thunder_probability': 37, 'visibility': 0.4, diff --git a/tests/components/smhi/test_config_flow.py b/tests/components/smhi/test_config_flow.py index 4195d1e5d52..a771bcc1e1d 100644 --- a/tests/components/smhi/test_config_flow.py +++ b/tests/components/smhi/test_config_flow.py @@ -217,7 +217,13 @@ async def test_reconfigure_flow( name=entry.title, ) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM with patch( diff --git a/tests/components/smlight/__init__.py b/tests/components/smlight/__init__.py deleted file mode 100644 index e518e0573ba..00000000000 --- a/tests/components/smlight/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Tests for the SMLIGHT Zigbee adapter integration.""" - -from collections.abc import Callable -from unittest.mock import MagicMock - -from pysmlight.const import Events as SmEvents -from pysmlight.sse import MessageEvent - - -def get_mock_event_function( - mock: MagicMock, event: SmEvents -) -> Callable[[MessageEvent], None]: - """Extract event function from mock call_args.""" - return next( - ( - call_args[0][1] - for call_args in mock.sse.register_callback.call_args_list - if call_args[0][0] == event - ), - None, - ) diff --git a/tests/components/smlight/conftest.py b/tests/components/smlight/conftest.py deleted file mode 100644 index 665a55ba880..00000000000 --- a/tests/components/smlight/conftest.py +++ /dev/null @@ -1,122 +0,0 @@ -"""Common fixtures for the SMLIGHT Zigbee tests.""" - -from collections.abc import AsyncGenerator, Generator -from unittest.mock import AsyncMock, MagicMock, patch - -from pysmlight.sse import sseClient -from pysmlight.web import CmdWrapper, Firmware, Info, Sensors -import pytest - -from homeassistant.components.smlight import PLATFORMS -from homeassistant.components.smlight.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform -from homeassistant.core import HomeAssistant - -from tests.common import ( - MockConfigEntry, - load_json_array_fixture, - load_json_object_fixture, -) - -MOCK_HOST = "slzb-06.local" -MOCK_USERNAME = "test-user" -MOCK_PASSWORD = "test-pass" - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: MOCK_HOST, - CONF_USERNAME: MOCK_USERNAME, - CONF_PASSWORD: MOCK_PASSWORD, - }, - unique_id="aa:bb:cc:dd:ee:ff", - ) - - -@pytest.fixture -def mock_config_entry_host() -> MockConfigEntry: - """Return the default mocked config entry, no credentials.""" - return MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: MOCK_HOST, - }, - unique_id="aa:bb:cc:dd:ee:ff", - ) - - -@pytest.fixture -def platforms() -> list[Platform]: - """Platforms, which should be loaded during the test.""" - return PLATFORMS - - -@pytest.fixture(autouse=True) -async def mock_patch_platforms(platforms: list[str]) -> AsyncGenerator[None]: - """Fixture to set up platforms for tests.""" - with patch(f"homeassistant.components.{DOMAIN}.PLATFORMS", platforms): - yield - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.smlight.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_smlight_client(request: pytest.FixtureRequest) -> Generator[MagicMock]: - """Mock the SMLIGHT API client.""" - with ( - patch("homeassistant.components.smlight.Api2", autospec=True) as smlight_mock, - patch("homeassistant.components.smlight.config_flow.Api2", new=smlight_mock), - ): - api = smlight_mock.return_value - api.host = MOCK_HOST - api.get_info.return_value = Info.from_dict( - load_json_object_fixture("info.json", DOMAIN) - ) - api.get_sensors.return_value = Sensors.from_dict( - load_json_object_fixture("sensors.json", DOMAIN) - ) - - def get_firmware_side_effect(*args, **kwargs) -> list[Firmware]: - """Return the firmware version.""" - fw_list = [] - if kwargs.get("mode") == "zigbee": - fw_list = load_json_array_fixture("zb_firmware.json", DOMAIN) - else: - fw_list = load_json_array_fixture("esp_firmware.json", DOMAIN) - - return [Firmware.from_dict(fw) for fw in fw_list] - - api.get_firmware_version.side_effect = get_firmware_side_effect - - api.check_auth_needed.return_value = False - api.authenticate.return_value = True - - api.cmds = AsyncMock(spec_set=CmdWrapper) - api.set_toggle = AsyncMock() - api.sse = MagicMock(spec_set=sseClient) - - yield api - - -async def setup_integration( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> MockConfigEntry: - """Set up the integration.""" - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - return mock_config_entry diff --git a/tests/components/smlight/fixtures/esp_firmware.json b/tests/components/smlight/fixtures/esp_firmware.json deleted file mode 100644 index 6ea0e1a8b44..00000000000 --- a/tests/components/smlight/fixtures/esp_firmware.json +++ /dev/null @@ -1,35 +0,0 @@ -[ - { - "mode": "ESP", - "type": null, - "notes": "CHANGELOG (Current 2.5.2 vs. Previous 2.3.6):\\r\\nFixed incorrect device type detection for some devices\\r\\nFixed web interface not working on some devices\\r\\nFixed disabled SSID/pass fields\\r\\n", - "rev": "20240830", - "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/core/slzb-06-v2.5.2-ota.bin", - "ver": "v2.5.2", - "dev": false, - "prod": true, - "baud": null - }, - { - "mode": "ESP", - "type": null, - "notes": "Read/write IEEE for CC chips\\r\\nDefault black theme\\r\\nAdd device mac to MDNS ZeroConf\\r\\nBreaking change! socket_uptime in /ha_sensors and /metrics now in seconds\\r\\nNew 5 languages\\r\\nAdd manual ZB OTA for 06M\\r\\nAdd warning modal for ZB manual OTA\\r\\nWireGuard can now use hostname instead of IP\\r\\nWiFi AP fixes and improvements\\r\\nImproved management of socket clients\\r\\nFix \"Disable web server when socket is connected\"\\r\\nFix events tag for log\\r\\nFix ZB maual OTA header text\\r\\nFix feedback page stack overflow\\r\\nFix sta drop in AP mode after scan start", - "rev": "20240815", - "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/core/slzb-06-v2.3.6-ota.bin", - "ver": "v2.3.6", - "dev": false, - "prod": true, - "baud": null - }, - { - "mode": "ESP", - "type": null, - "notes": "release of previous version", - "rev": "10112023", - "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/core/slzb-06-0.9.9-ota.bin", - "ver": "0.9.9", - "dev": false, - "prod": true, - "baud": null - } -] diff --git a/tests/components/smlight/fixtures/info.json b/tests/components/smlight/fixtures/info.json deleted file mode 100644 index e3defb4410e..00000000000 --- a/tests/components/smlight/fixtures/info.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "coord_mode": 0, - "device_ip": "192.168.1.161", - "fs_total": 3456, - "fw_channel": "dev", - "legacy_api": 0, - "hostname": "SLZB-06p7", - "MAC": "AA:BB:CC:DD:EE:FF", - "model": "SLZB-06p7", - "ram_total": 296, - "sw_version": "v2.3.6", - "wifi_mode": 0, - "zb_flash_size": 704, - "zb_channel": 0, - "zb_hw": "CC2652P7", - "zb_ram_size": 152, - "zb_version": "20240314", - "zb_type": 0 -} diff --git a/tests/components/smlight/fixtures/logs.txt b/tests/components/smlight/fixtures/logs.txt deleted file mode 100644 index f04dc881514..00000000000 --- a/tests/components/smlight/fixtures/logs.txt +++ /dev/null @@ -1 +0,0 @@ -[04:28:51] setup | Starting firmware: v2.3.6\n[04:28:52] ConfigHelper | LittleFS mounted\n[04:28:52] ConfigHelper | load config\n[04:28:52] ConfigHelper | config open: Ok\n[04:28:52] setup | Config loaded\n[04:28:52] setup | Reboot reason: 3\n[04:28:52] setup | Coordinator mode: LAN\n[04:28:52] setup | Device type: SLZB-06P10\n[04:28:52] setup | Radio mode: \"ZB COORD\" Radio FW version: 20240716 Radio FW CH: PROD\n[04:28:52] Network | init\n[04:28:52] L_Y,L_B | status: 1\n[04:28:54] Network | EVENT_ETH_START\n[04:28:54] Network | EVENT_ETH_CONNECTED\n[04:28:54] Network | [MDNS] Started\n[04:28:54] Network | EVENT_ETH_GOT_IP\n[04:28:54] Network | ETH MAC: AA:BB:CC:DD:EE:FF IPv4: 192.168.0.11 GW: 192.168.0.1 Speed: 100Mbps DNS1: 192.168.0.1 DNS2: 0.0.0.0\n[04:28:54] Network | fireNetworkUp\n[04:28:54] taskZB | Waiting for zbChk\n[04:28:54] Web | Webserver started \ No newline at end of file diff --git a/tests/components/smlight/fixtures/sensors.json b/tests/components/smlight/fixtures/sensors.json deleted file mode 100644 index ea1fb9c1899..00000000000 --- a/tests/components/smlight/fixtures/sensors.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "esp32_temp": 35.0, - "zb_temp": 32.7, - "uptime": 508125, - "socket_uptime": 127, - "ram_usage": 99, - "fs_used": 188, - "ethernet": true, - "wifi_connected": false, - "wifi_status": 255, - "disable_leds": false, - "night_mode": true, - "auto_zigbee": false, - "vpn_enabled": false, - "vpn_status": true -} diff --git a/tests/components/smlight/fixtures/zb_firmware.json b/tests/components/smlight/fixtures/zb_firmware.json deleted file mode 100644 index ca9d10f87ac..00000000000 --- a/tests/components/smlight/fixtures/zb_firmware.json +++ /dev/null @@ -1,46 +0,0 @@ -[ - { - "mode": "ZB", - "type": 0, - "notes": "SMLIGHT latest Coordinator release for CC2674P10 chips [16-Jul-2024]:
- +20dB TRANSMIT POWER SUPPORT;
- SDK 7.41 based (latest);
", - "rev": "20240716", - "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/znp-SLZB-06P10-20240716.bin", - "ver": "20240716", - "dev": false, - "prod": true, - "baud": 115200 - }, - { - "mode": "ZB", - "type": 1, - "notes": "SMLIGHT latest ROUTER release for CC2674P10 chips [16-Jul-2024]:
- SDK 7.41 based (latest);
Terms of use", - "rev": "20240716", - "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/zr-ZR_SLZB-06P10-20240716.bin", - "ver": "20240716", - "dev": false, - "prod": true, - "baud": 0 - }, - { - "mode": "ZB", - "type": 0, - "notes": "SMLIGHT Coordinator release for CC2674P10 chips [15-Mar-2024]:
- Engineering (dev) version, not recommended (INT);
- SDK 7.40 based (latest);
- Baudrate: 115200;
Terms of use", - "rev": "20240315", - "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/znp_LP_EM_CC2674P10_SM_tirtos7_ticlangNR.bin", - "ver": "20240315", - "dev": false, - "prod": false, - "baud": 115200 - }, - { - "mode": "ZB", - "type": 0, - "notes": "SMLIGHT Coordinator release for CC2674P10 chips [14-Mar-2024]:
- Factory flashed firmware (EXT);
- SDK 7.40 based (latest);
- Baudrate: 115200;
Terms of use", - "rev": "20240314", - "link": "https://smlight.tech/flasher/firmware/bin/slzb06x/zigbee/slzb06p10/znp_LP_EM_CC2674P10_SM_tirtos7_ticlangNP.bin", - "ver": "20240314", - "dev": false, - "prod": false, - "baud": 115200 - } -] diff --git a/tests/components/smlight/snapshots/test_binary_sensor.ambr b/tests/components/smlight/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 8becf5b2567..00000000000 --- a/tests/components/smlight/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,189 +0,0 @@ -# serializer version: 1 -# name: test_all_binary_sensors[binary_sensor.mock_title_ethernet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_ethernet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ethernet', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ethernet', - 'unique_id': 'aa:bb:cc:dd:ee:ff_ethernet', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensors[binary_sensor.mock_title_ethernet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Mock Title Ethernet', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_ethernet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_binary_sensors[binary_sensor.mock_title_internet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_internet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Internet', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'internet', - 'unique_id': 'aa:bb:cc:dd:ee:ff_internet', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensors[binary_sensor.mock_title_internet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Mock Title Internet', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_internet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_binary_sensors[binary_sensor.mock_title_vpn-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_vpn', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VPN', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vpn', - 'unique_id': 'aa:bb:cc:dd:ee:ff_vpn', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensors[binary_sensor.mock_title_vpn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Mock Title VPN', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_vpn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_binary_sensors[binary_sensor.mock_title_wi_fi-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_wi_fi', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Wi-Fi', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi', - 'unique_id': 'aa:bb:cc:dd:ee:ff_wifi', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_binary_sensors[binary_sensor.mock_title_wi_fi-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Mock Title Wi-Fi', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_wi_fi', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/smlight/snapshots/test_diagnostics.ambr b/tests/components/smlight/snapshots/test_diagnostics.ambr deleted file mode 100644 index 97177de1704..00000000000 --- a/tests/components/smlight/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,27 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'info': dict({ - 'MAC': 'AA:BB:CC:DD:EE:FF', - 'coord_mode': 0, - 'device_ip': '192.168.1.161', - 'fs_total': 3456, - 'fw_channel': 'dev', - 'hostname': 'SLZB-06p7', - 'legacy_api': 0, - 'model': 'SLZB-06p7', - 'ram_total': 296, - 'sw_version': 'v2.3.6', - 'wifi_mode': 0, - 'zb_channel': 0, - 'zb_flash_size': 704, - 'zb_hw': 'CC2652P7', - 'zb_ram_size': 152, - 'zb_type': 0, - 'zb_version': '20240314', - }), - 'log': list([ - '[04:28:51] setup | Starting firmware: v2.3.6\\n[04:28:52] ConfigHelper | LittleFS mounted\\n[04:28:52] ConfigHelper | load config\\n[04:28:52] ConfigHelper | config open: Ok\\n[04:28:52] setup | Config loaded\\n[04:28:52] setup | Reboot reason: 3\\n[04:28:52] setup | Coordinator mode: LAN\\n[04:28:52] setup | Device type: SLZB-06P10\\n[04:28:52] setup | Radio mode: \\"ZB COORD\\" Radio FW version: 20240716 Radio FW CH: PROD\\n[04:28:52] Network | init\\n[04:28:52] L_Y,L_B | status: 1\\n[04:28:54] Network | EVENT_ETH_START\\n[04:28:54] Network | EVENT_ETH_CONNECTED\\n[04:28:54] Network | [MDNS] Started\\n[04:28:54] Network | EVENT_ETH_GOT_IP\\n[04:28:54] Network | ETH MAC: AA:BB:CC:DD:EE:FF IPv4: 192.168.0.11 GW: 192.168.0.1 Speed: 100Mbps DNS1: 192.168.0.1 DNS2: 0.0.0.0\\n[04:28:54] Network | fireNetworkUp\\n[04:28:54] taskZB | Waiting for zbChk\\n[04:28:54] Web | Webserver started', - ]), - }) -# --- diff --git a/tests/components/smlight/snapshots/test_init.ambr b/tests/components/smlight/snapshots/test_init.ambr deleted file mode 100644 index 598166e537b..00000000000 --- a/tests/components/smlight/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_device_info - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://slzb-06.local', - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'SMLIGHT', - 'model': 'SLZB-06p7', - 'model_id': None, - 'name': 'Mock Title', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': 'core: v2.3.6 / zigbee: 20240314', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/smlight/snapshots/test_sensor.ambr b/tests/components/smlight/snapshots/test_sensor.ambr deleted file mode 100644 index 262ecfe1544..00000000000 --- a/tests/components/smlight/snapshots/test_sensor.ambr +++ /dev/null @@ -1,471 +0,0 @@ -# serializer version: 1 -# name: test_sensors[sensor.mock_title_connection_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'eth', - 'wifi', - 'usb', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_connection_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Connection mode', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_mode', - 'unique_id': 'aa:bb:cc:dd:ee:ff_device_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.mock_title_connection_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Mock Title Connection mode', - 'options': list([ - 'eth', - 'wifi', - 'usb', - ]), - }), - 'context': , - 'entity_id': 'sensor.mock_title_connection_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'eth', - }) -# --- -# name: test_sensors[sensor.mock_title_core_chip_temp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_core_chip_temp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Core chip temp', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'core_temperature', - 'unique_id': 'aa:bb:cc:dd:ee:ff_core_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.mock_title_core_chip_temp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Mock Title Core chip temp', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_core_chip_temp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '35.0', - }) -# --- -# name: test_sensors[sensor.mock_title_core_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_core_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Core uptime', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'core_uptime', - 'unique_id': 'aa:bb:cc:dd:ee:ff_core_uptime', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.mock_title_core_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Core uptime', - }), - 'context': , - 'entity_id': 'sensor.mock_title_core_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-06-25T02:51:15+00:00', - }) -# --- -# name: test_sensors[sensor.mock_title_filesystem_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_filesystem_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Filesystem usage', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'fs_usage', - 'unique_id': 'aa:bb:cc:dd:ee:ff_fs_usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.mock_title_filesystem_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'Mock Title Filesystem usage', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_filesystem_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '188', - }) -# --- -# name: test_sensors[sensor.mock_title_firmware_channel-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'dev', - 'release', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_firmware_channel', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Firmware channel', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'firmware_channel', - 'unique_id': 'aa:bb:cc:dd:ee:ff_firmware_channel', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.mock_title_firmware_channel-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Mock Title Firmware channel', - 'options': list([ - 'dev', - 'release', - ]), - }), - 'context': , - 'entity_id': 'sensor.mock_title_firmware_channel', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'dev', - }) -# --- -# name: test_sensors[sensor.mock_title_ram_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_ram_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'RAM usage', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ram_usage', - 'unique_id': 'aa:bb:cc:dd:ee:ff_ram_usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.mock_title_ram_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': 'Mock Title RAM usage', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_ram_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '99', - }) -# --- -# name: test_sensors[sensor.mock_title_zigbee_chip_temp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_zigbee_chip_temp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Zigbee chip temp', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'zigbee_temperature', - 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.mock_title_zigbee_chip_temp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Mock Title Zigbee chip temp', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_title_zigbee_chip_temp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '32.7', - }) -# --- -# name: test_sensors[sensor.mock_title_zigbee_type-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'coordinator', - 'router', - 'thread', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_zigbee_type', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Zigbee type', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'zigbee_type', - 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_type', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.mock_title_zigbee_type-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Mock Title Zigbee type', - 'options': list([ - 'coordinator', - 'router', - 'thread', - ]), - }), - 'context': , - 'entity_id': 'sensor.mock_title_zigbee_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'coordinator', - }) -# --- -# name: test_sensors[sensor.mock_title_zigbee_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_title_zigbee_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Zigbee uptime', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'socket_uptime', - 'unique_id': 'aa:bb:cc:dd:ee:ff_socket_uptime', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.mock_title_zigbee_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Zigbee uptime', - }), - 'context': , - 'entity_id': 'sensor.mock_title_zigbee_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-06-30T23:57:53+00:00', - }) -# --- diff --git a/tests/components/smlight/snapshots/test_switch.ambr b/tests/components/smlight/snapshots/test_switch.ambr deleted file mode 100644 index 733d002be0f..00000000000 --- a/tests/components/smlight/snapshots/test_switch.ambr +++ /dev/null @@ -1,189 +0,0 @@ -# serializer version: 1 -# name: test_switch_setup[switch.mock_title_auto_zigbee_update-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_title_auto_zigbee_update', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Auto Zigbee update', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'auto_zigbee_update', - 'unique_id': 'aa:bb:cc:dd:ee:ff-auto_zigbee_update', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[switch.mock_title_auto_zigbee_update-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Mock Title Auto Zigbee update', - }), - 'context': , - 'entity_id': 'switch.mock_title_auto_zigbee_update', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch_setup[switch.mock_title_disable_leds-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.mock_title_disable_leds', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disable LEDs', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disable_led', - 'unique_id': 'aa:bb:cc:dd:ee:ff-disable_led', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[switch.mock_title_disable_leds-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Mock Title Disable LEDs', - }), - 'context': , - 'entity_id': 'switch.mock_title_disable_leds', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch_setup[switch.mock_title_led_night_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.mock_title_led_night_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'LED night mode', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'night_mode', - 'unique_id': 'aa:bb:cc:dd:ee:ff-night_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[switch.mock_title_led_night_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Mock Title LED night mode', - }), - 'context': , - 'entity_id': 'switch.mock_title_led_night_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_setup[switch.mock_title_vpn_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.mock_title_vpn_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VPN enabled', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vpn_enabled', - 'unique_id': 'aa:bb:cc:dd:ee:ff-vpn_enabled', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch_setup[switch.mock_title_vpn_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Mock Title VPN enabled', - }), - 'context': , - 'entity_id': 'switch.mock_title_vpn_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/smlight/snapshots/test_update.ambr b/tests/components/smlight/snapshots/test_update.ambr deleted file mode 100644 index ed0085dcdc8..00000000000 --- a/tests/components/smlight/snapshots/test_update.ambr +++ /dev/null @@ -1,119 +0,0 @@ -# serializer version: 1 -# name: test_update_setup[update.mock_title_core_firmware-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.mock_title_core_firmware', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Core firmware', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'core_update', - 'unique_id': 'aa:bb:cc:dd:ee:ff-core_update', - 'unit_of_measurement': None, - }) -# --- -# name: test_update_setup[update.mock_title_core_firmware-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'device_class': 'firmware', - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/smlight/icon.png', - 'friendly_name': 'Mock Title Core firmware', - 'in_progress': False, - 'installed_version': 'v2.3.6', - 'latest_version': 'v2.5.2', - 'release_summary': None, - 'release_url': None, - 'skipped_version': None, - 'supported_features': , - 'title': None, - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.mock_title_core_firmware', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_update_setup[update.mock_title_zigbee_firmware-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.mock_title_zigbee_firmware', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Zigbee firmware', - 'platform': 'smlight', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'zigbee_update', - 'unique_id': 'aa:bb:cc:dd:ee:ff-zigbee_update', - 'unit_of_measurement': None, - }) -# --- -# name: test_update_setup[update.mock_title_zigbee_firmware-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'device_class': 'firmware', - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/smlight/icon.png', - 'friendly_name': 'Mock Title Zigbee firmware', - 'in_progress': False, - 'installed_version': '20240314', - 'latest_version': '20240716', - 'release_summary': None, - 'release_url': None, - 'skipped_version': None, - 'supported_features': , - 'title': None, - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.mock_title_zigbee_firmware', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/smlight/test_binary_sensor.py b/tests/components/smlight/test_binary_sensor.py deleted file mode 100644 index b1d72b66dcf..00000000000 --- a/tests/components/smlight/test_binary_sensor.py +++ /dev/null @@ -1,107 +0,0 @@ -"""Tests for the SMLIGHT binary sensor platform.""" - -from unittest.mock import MagicMock - -from freezegun.api import FrozenDateTimeFactory -from pysmlight.const import Events -from pysmlight.sse import MessageEvent -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.smlight.const import SCAN_INTERNET_INTERVAL -from homeassistant.const import STATE_ON, STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import get_mock_event_function -from .conftest import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - -pytestmark = [ - pytest.mark.usefixtures( - "mock_smlight_client", - ) -] - -MOCK_INET_STATE = MessageEvent( - type="EVENT_INET_STATE", - message="EVENT_INET_STATE", - data="ok", - origin="http://slzb-06.local", - last_event_id="", -) - - -@pytest.fixture -def platforms() -> list[Platform]: - """Platforms, which should be loaded during the test.""" - return [Platform.BINARY_SENSOR] - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_binary_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test the SMLIGHT binary sensors.""" - entry = await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) - - await hass.config_entries.async_unload(entry.entry_id) - - -async def test_disabled_by_default_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, -) -> None: - """Test wifi sensor is disabled by default .""" - await setup_integration(hass, mock_config_entry) - - for sensor in ("wi_fi", "vpn"): - assert not hass.states.get(f"binary_sensor.mock_title_{sensor}") - - assert ( - entry := entity_registry.async_get(f"binary_sensor.mock_title_{sensor}") - ) - assert entry.disabled - assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - - -async def test_internet_sensor_event( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test internet sensor event.""" - await setup_integration(hass, mock_config_entry) - - state = hass.states.get("binary_sensor.mock_title_internet") - assert state is not None - assert state.state == STATE_UNKNOWN - - assert len(mock_smlight_client.get_param.mock_calls) == 1 - mock_smlight_client.get_param.assert_called_with("inetState") - - freezer.tick(SCAN_INTERNET_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert len(mock_smlight_client.get_param.mock_calls) == 2 - mock_smlight_client.get_param.assert_called_with("inetState") - - event_function = get_mock_event_function( - mock_smlight_client, Events.EVENT_INET_STATE - ) - - event_function(MOCK_INET_STATE) - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.mock_title_internet") - assert state is not None - assert state.state == STATE_ON diff --git a/tests/components/smlight/test_button.py b/tests/components/smlight/test_button.py deleted file mode 100644 index 3721ee815e6..00000000000 --- a/tests/components/smlight/test_button.py +++ /dev/null @@ -1,117 +0,0 @@ -"""Tests for SMLIGHT SLZB-06 button entities.""" - -from unittest.mock import MagicMock - -from freezegun.api import FrozenDateTimeFactory -from pysmlight import Info -import pytest - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.components.smlight.const import SCAN_INTERVAL -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .conftest import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed - - -@pytest.fixture -def platforms() -> Platform | list[Platform]: - """Platforms, which should be loaded during the test.""" - return [Platform.BUTTON] - - -MOCK_ROUTER = Info(MAC="AA:BB:CC:DD:EE:FF", zb_type=1) - - -@pytest.mark.parametrize( - ("entity_id", "method"), - [ - ("core_restart", "reboot"), - ("zigbee_flash_mode", "zb_bootloader"), - ("zigbee_restart", "zb_restart"), - ("reconnect_zigbee_router", "zb_router"), - ], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_buttons( - hass: HomeAssistant, - entity_id: str, - entity_registry: er.EntityRegistry, - method: str, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test creation of button entities.""" - mock_smlight_client.get_info.return_value = MOCK_ROUTER - await setup_integration(hass, mock_config_entry) - - state = hass.states.get(f"button.mock_title_{entity_id}") - assert state is not None - assert state.state == STATE_UNKNOWN - - entry = entity_registry.async_get(f"button.mock_title_{entity_id}") - assert entry is not None - assert entry.unique_id == f"aa:bb:cc:dd:ee:ff-{entity_id}" - - mock_method = getattr(mock_smlight_client.cmds, method) - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: f"button.mock_title_{entity_id}"}, - blocking=True, - ) - - assert len(mock_method.mock_calls) == 1 - mock_method.assert_called_with() - - -@pytest.mark.parametrize("entity_id", ["zigbee_flash_mode", "reconnect_zigbee_router"]) -async def test_disabled_by_default_buttons( - hass: HomeAssistant, - entity_id: str, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test the disabled by default buttons.""" - mock_smlight_client.get_info.return_value = MOCK_ROUTER - await setup_integration(hass, mock_config_entry) - - assert not hass.states.get(f"button.mock_{entity_id}") - - assert (entry := entity_registry.async_get(f"button.mock_title_{entity_id}")) - assert entry.disabled - assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - - -async def test_remove_router_reconnect( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test removal of orphaned router reconnect button.""" - save_mock = mock_smlight_client.get_info.return_value - mock_smlight_client.get_info.return_value = MOCK_ROUTER - mock_config_entry = await setup_integration(hass, mock_config_entry) - - entities = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id - ) - assert len(entities) == 4 - assert entities[3].unique_id == "aa:bb:cc:dd:ee:ff-reconnect_zigbee_router" - - mock_smlight_client.get_info.return_value = save_mock - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - - await hass.async_block_till_done() - - entity = entity_registry.async_get("button.mock_title_reconnect_zigbee_router") - assert entity is None diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py deleted file mode 100644 index 2fd39f75704..00000000000 --- a/tests/components/smlight/test_config_flow.py +++ /dev/null @@ -1,494 +0,0 @@ -"""Test the SMLIGHT SLZB config flow.""" - -from ipaddress import ip_address -from unittest.mock import AsyncMock, MagicMock - -from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError -import pytest - -from homeassistant.components import zeroconf -from homeassistant.components.smlight.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .conftest import MOCK_HOST, MOCK_PASSWORD, MOCK_USERNAME - -from tests.common import MockConfigEntry - -DISCOVERY_INFO = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("127.0.0.1"), - ip_addresses=[ip_address("127.0.0.1")], - hostname="slzb-06.local.", - name="mock_name", - port=6638, - properties={"mac": "AA:BB:CC:DD:EE:FF"}, - type="mock_type", -) - -DISCOVERY_INFO_LEGACY = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("127.0.0.1"), - ip_addresses=[ip_address("127.0.0.1")], - hostname="slzb-06.local.", - name="mock_name", - port=6638, - properties={}, - type="mock_type", -) - - -@pytest.mark.usefixtures("mock_smlight_client") -async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: - """Test the full manual user flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: MOCK_HOST, - }, - ) - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "SLZB-06p7" - assert result2["data"] == { - CONF_HOST: MOCK_HOST, - } - assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_zeroconf_flow( - hass: HomeAssistant, - mock_smlight_client: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test the zeroconf flow.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_ZEROCONF}, data=DISCOVERY_INFO - ) - - assert result["description_placeholders"] == {"host": MOCK_HOST} - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_discovery" - - progress = hass.config_entries.flow.async_progress() - assert len(progress) == 1 - assert progress[0]["flow_id"] == result["flow_id"] - assert progress[0]["context"]["confirm_only"] is True - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["context"]["source"] == "zeroconf" - assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - assert result2["title"] == "slzb-06" - assert result2["data"] == { - CONF_HOST: MOCK_HOST, - } - - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 1 - - -async def test_zeroconf_flow_auth( - hass: HomeAssistant, - mock_smlight_client: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test the full zeroconf flow including authentication.""" - mock_smlight_client.check_auth_needed.return_value = True - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_ZEROCONF}, data=DISCOVERY_INFO - ) - - assert result["description_placeholders"] == {"host": MOCK_HOST} - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_discovery" - - progress = hass.config_entries.flow.async_progress() - assert len(progress) == 1 - assert progress[0]["flow_id"] == result["flow_id"] - assert progress[0]["context"]["confirm_only"] is True - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "auth" - - progress2 = hass.config_entries.flow.async_progress() - assert len(progress2) == 1 - assert progress2[0]["flow_id"] == result["flow_id"] - - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: MOCK_USERNAME, - CONF_PASSWORD: MOCK_PASSWORD, - }, - ) - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["context"]["source"] == "zeroconf" - assert result3["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - assert result3["title"] == "slzb-06" - assert result3["data"] == { - CONF_USERNAME: MOCK_USERNAME, - CONF_PASSWORD: MOCK_PASSWORD, - CONF_HOST: MOCK_HOST, - } - - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 1 - - -@pytest.mark.usefixtures("mock_smlight_client") -async def test_user_device_exists_abort( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test we abort user flow if device already configured.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - CONF_HOST: MOCK_HOST, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.usefixtures("mock_smlight_client") -async def test_zeroconf_device_exists_abort( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test we abort zeroconf flow if device already configured.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=DISCOVERY_INFO, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_user_invalid_auth( - hass: HomeAssistant, mock_smlight_client: MagicMock, mock_setup_entry: AsyncMock -) -> None: - """Test we handle invalid auth.""" - mock_smlight_client.check_auth_needed.return_value = True - mock_smlight_client.authenticate.side_effect = SmlightAuthError - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - CONF_HOST: MOCK_HOST, - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test", - CONF_PASSWORD: "bad", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_auth"} - assert result2["step_id"] == "auth" - - mock_smlight_client.authenticate.side_effect = None - - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test", - CONF_PASSWORD: "good", - }, - ) - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "SLZB-06p7" - assert result3["data"] == { - CONF_HOST: MOCK_HOST, - CONF_USERNAME: "test", - CONF_PASSWORD: "good", - } - - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 1 - - -async def test_user_cannot_connect( - hass: HomeAssistant, mock_smlight_client: MagicMock, mock_setup_entry: AsyncMock -) -> None: - """Test we handle user cannot connect error.""" - mock_smlight_client.check_auth_needed.side_effect = SmlightConnectionError - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "unknown.local", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - assert result["step_id"] == "user" - - mock_smlight_client.check_auth_needed.side_effect = None - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: MOCK_HOST, - }, - ) - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "SLZB-06p7" - - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 1 - - -async def test_auth_cannot_connect( - hass: HomeAssistant, mock_smlight_client: MagicMock -) -> None: - """Test we abort auth step on cannot connect error.""" - mock_smlight_client.check_auth_needed.return_value = True - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: MOCK_HOST, - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - - mock_smlight_client.check_auth_needed.side_effect = SmlightConnectionError - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: MOCK_USERNAME, - CONF_PASSWORD: MOCK_PASSWORD, - }, - ) - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "cannot_connect" - - -async def test_zeroconf_cannot_connect( - hass: HomeAssistant, mock_smlight_client: MagicMock -) -> None: - """Test we abort flow on zeroconf cannot connect error.""" - mock_smlight_client.check_auth_needed.side_effect = SmlightConnectionError - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=DISCOVERY_INFO, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_discovery" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {}, - ) - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "cannot_connect" - - -async def test_zeroconf_legacy_cannot_connect( - hass: HomeAssistant, mock_smlight_client: MagicMock -) -> None: - """Test we abort flow on zeroconf discovery unsupported firmware.""" - mock_smlight_client.get_info.side_effect = SmlightConnectionError - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=DISCOVERY_INFO_LEGACY, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" - - -@pytest.mark.usefixtures("mock_smlight_client") -async def test_zeroconf_legacy_mac( - hass: HomeAssistant, mock_smlight_client: MagicMock, mock_setup_entry: AsyncMock -) -> None: - """Test we can get unique id MAC address for older firmwares.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=DISCOVERY_INFO_LEGACY, - ) - - assert result["description_placeholders"] == {"host": MOCK_HOST} - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["context"]["source"] == "zeroconf" - assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - assert result2["title"] == "slzb-06" - assert result2["data"] == { - CONF_HOST: MOCK_HOST, - } - - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 2 - - -async def test_reauth_flow( - hass: HomeAssistant, - mock_smlight_client: MagicMock, - mock_config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, -) -> None: - """Test reauth flow completes successfully.""" - mock_smlight_client.check_auth_needed.return_value = True - mock_config_entry.add_to_hass(hass) - - result = await mock_config_entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: MOCK_USERNAME, - CONF_PASSWORD: MOCK_PASSWORD, - }, - ) - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" - assert mock_config_entry.data == { - CONF_USERNAME: MOCK_USERNAME, - CONF_PASSWORD: MOCK_PASSWORD, - CONF_HOST: MOCK_HOST, - } - - assert len(mock_smlight_client.authenticate.mock_calls) == 1 - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - - -async def test_reauth_auth_error( - hass: HomeAssistant, - mock_smlight_client: MagicMock, - mock_config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, -) -> None: - """Test reauth flow with authentication error.""" - mock_smlight_client.check_auth_needed.return_value = True - mock_smlight_client.authenticate.side_effect = SmlightAuthError - - mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: MOCK_USERNAME, - CONF_PASSWORD: "test-bad", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth_confirm" - - mock_smlight_client.authenticate.side_effect = None - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: MOCK_USERNAME, - CONF_PASSWORD: MOCK_PASSWORD, - }, - ) - - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "reauth_successful" - - assert mock_config_entry.data == { - CONF_USERNAME: MOCK_USERNAME, - CONF_PASSWORD: MOCK_PASSWORD, - CONF_HOST: MOCK_HOST, - } - - assert len(mock_smlight_client.authenticate.mock_calls) == 2 - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - - -async def test_reauth_connect_error( - hass: HomeAssistant, - mock_smlight_client: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test reauth flow with error.""" - mock_smlight_client.check_auth_needed.return_value = True - mock_smlight_client.authenticate.side_effect = SmlightConnectionError - - mock_config_entry.add_to_hass(hass) - - result = await mock_config_entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: MOCK_USERNAME, - CONF_PASSWORD: MOCK_PASSWORD, - }, - ) - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "cannot_connect" - assert len(mock_smlight_client.authenticate.mock_calls) == 1 diff --git a/tests/components/smlight/test_diagnostics.py b/tests/components/smlight/test_diagnostics.py deleted file mode 100644 index d0c756bfd87..00000000000 --- a/tests/components/smlight/test_diagnostics.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Test SMLIGHT diagnostics.""" - -from unittest.mock import MagicMock - -from syrupy import SnapshotAssertion - -from homeassistant.components.smlight.const import DOMAIN -from homeassistant.core import HomeAssistant - -from .conftest import setup_integration - -from tests.common import MockConfigEntry, load_fixture -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, - snapshot: SnapshotAssertion, -) -> None: - """Test config entry diagnostics.""" - mock_smlight_client.get.return_value = load_fixture("logs.txt", DOMAIN) - entry = await setup_integration(hass, mock_config_entry) - - result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - - assert result == snapshot diff --git a/tests/components/smlight/test_init.py b/tests/components/smlight/test_init.py deleted file mode 100644 index afc53932fb0..00000000000 --- a/tests/components/smlight/test_init.py +++ /dev/null @@ -1,144 +0,0 @@ -"Test SMLIGHT SLZB device integration initialization." - -from unittest.mock import MagicMock - -from freezegun.api import FrozenDateTimeFactory -from pysmlight import Info -from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError, SmlightError -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.smlight.const import DOMAIN, SCAN_INTERVAL -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.issue_registry import IssueRegistry - -from .conftest import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed - -pytestmark = [ - pytest.mark.usefixtures( - "mock_smlight_client", - ) -] - - -async def test_async_setup_entry( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test async_setup_entry.""" - entry = await setup_integration(hass, mock_config_entry) - - assert entry.state is ConfigEntryState.LOADED - assert entry.unique_id == "aa:bb:cc:dd:ee:ff" - - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.NOT_LOADED - - -async def test_async_setup_auth_failed( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test async_setup_entry when authentication fails.""" - mock_smlight_client.check_auth_needed.return_value = True - mock_smlight_client.authenticate.side_effect = SmlightAuthError - entry = await setup_integration(hass, mock_config_entry) - - assert entry.state is ConfigEntryState.SETUP_ERROR - - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.NOT_LOADED - - -async def test_async_setup_missing_credentials( - hass: HomeAssistant, - mock_config_entry_host: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test we trigger reauth when credentials are missing.""" - mock_smlight_client.check_auth_needed.return_value = True - - await setup_integration(hass, mock_config_entry_host) - - progress = hass.config_entries.flow.async_progress() - assert len(progress) == 1 - assert progress[0]["step_id"] == "reauth_confirm" - assert progress[0]["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - - -@pytest.mark.parametrize("error", [SmlightConnectionError, SmlightAuthError]) -async def test_update_failed( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, - freezer: FrozenDateTimeFactory, - error: SmlightError, -) -> None: - """Test update failed due to error.""" - - await setup_integration(hass, mock_config_entry) - entity = hass.states.get("sensor.mock_title_core_chip_temp") - assert entity.state is not STATE_UNAVAILABLE - - mock_smlight_client.get_info.side_effect = error - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - entity = hass.states.get("sensor.mock_title_core_chip_temp") - assert entity is not None - assert entity.state == STATE_UNAVAILABLE - - -async def test_device_info( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test device registry information.""" - entry = await setup_integration(hass, mock_config_entry) - - device_entry = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, entry.unique_id)} - ) - assert device_entry is not None - assert device_entry == snapshot - - -async def test_device_legacy_firmware( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, - device_registry: dr.DeviceRegistry, - issue_registry: IssueRegistry, -) -> None: - """Test device setup for old firmware version that dont support required API.""" - LEGACY_VERSION = "v0.9.9" - mock_smlight_client.get_sensors.side_effect = SmlightError - mock_smlight_client.get_info.return_value = Info( - legacy_api=2, sw_version=LEGACY_VERSION, MAC="AA:BB:CC:DD:EE:FF" - ) - entry = await setup_integration(hass, mock_config_entry) - - assert entry.unique_id == "aa:bb:cc:dd:ee:ff" - - device_entry = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, entry.unique_id)} - ) - assert LEGACY_VERSION in device_entry.sw_version - - issue = issue_registry.async_get_issue( - domain=DOMAIN, issue_id="unsupported_firmware" - ) - assert issue is not None - assert issue.domain == DOMAIN - assert issue.issue_id == "unsupported_firmware" diff --git a/tests/components/smlight/test_sensor.py b/tests/components/smlight/test_sensor.py deleted file mode 100644 index f130d7ccf30..00000000000 --- a/tests/components/smlight/test_sensor.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Tests for the SMLIGHT sensor platform.""" - -from unittest.mock import MagicMock - -from pysmlight import Sensors -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er - -from .conftest import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - -pytestmark = [ - pytest.mark.usefixtures( - "mock_smlight_client", - ) -] - - -@pytest.fixture -def platforms() -> list[Platform]: - """Platforms, which should be loaded during the test.""" - return [Platform.SENSOR] - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.freeze_time("2024-07-01 00:00:00+00:00") -async def test_sensors( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test the SMLIGHT sensors.""" - entry = await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) - - -async def test_disabled_by_default_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the disabled by default SMLIGHT sensors.""" - await setup_integration(hass, mock_config_entry) - - for sensor in ("core_uptime", "filesystem_usage", "ram_usage", "zigbee_uptime"): - assert not hass.states.get(f"sensor.mock_title_{sensor}") - - assert (entry := entity_registry.async_get(f"sensor.mock_title_{sensor}")) - assert entry.disabled - assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_zigbee_uptime_disconnected( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test for uptime when zigbee socket is disconnected. - - In this case zigbee uptime state should be unknown. - """ - mock_smlight_client.get_sensors.return_value = Sensors(socket_uptime=0) - await setup_integration(hass, mock_config_entry) - - state = hass.states.get("sensor.mock_title_zigbee_uptime") - assert state.state == STATE_UNKNOWN diff --git a/tests/components/smlight/test_switch.py b/tests/components/smlight/test_switch.py deleted file mode 100644 index da02814a1c5..00000000000 --- a/tests/components/smlight/test_switch.py +++ /dev/null @@ -1,132 +0,0 @@ -"""Tests for the SMLIGHT switch platform.""" - -from collections.abc import Callable -from unittest.mock import MagicMock - -from pysmlight import SettingsEvent -from pysmlight.const import Settings -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.switch import ( - DOMAIN as SWITCH_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .conftest import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - -pytestmark = [ - pytest.mark.usefixtures( - "mock_smlight_client", - ) -] - - -@pytest.fixture -def platforms() -> list[Platform]: - """Platforms, which should be loaded during the test.""" - return [Platform.SWITCH] - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_switch_setup( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test setup of SMLIGHT switches.""" - entry = await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) - - -async def test_disabled_by_default_switch( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, -) -> None: - """Test vpn enabled switch is disabled by default .""" - await setup_integration(hass, mock_config_entry) - for entity in ("vpn_enabled", "auto_zigbee_update"): - assert not hass.states.get(f"switch.mock_title_{entity}") - - assert (entry := entity_registry.async_get(f"switch.mock_title_{entity}")) - assert entry.disabled - assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.parametrize( - ("entity", "setting"), - [ - ("disable_leds", Settings.DISABLE_LEDS), - ("led_night_mode", Settings.NIGHT_MODE), - ("auto_zigbee_update", Settings.ZB_AUTOUPDATE), - ("vpn_enabled", Settings.ENABLE_VPN), - ], -) -async def test_switches( - hass: HomeAssistant, - entity: str, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, - setting: Settings, -) -> None: - """Test the SMLIGHT switches.""" - await setup_integration(hass, mock_config_entry) - - _page, _toggle = setting.value - - entity_id = f"switch.mock_title_{entity}" - state = hass.states.get(entity_id) - assert state is not None - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert len(mock_smlight_client.set_toggle.mock_calls) == 1 - mock_smlight_client.set_toggle.assert_called_once_with(_page, _toggle, True) - - event_function: Callable[[SettingsEvent], None] = next( - ( - call_args[0][1] - for call_args in mock_smlight_client.sse.register_settings_cb.call_args_list - if setting == call_args[0][0] - ), - None, - ) - - async def _call_event_function(state: bool = True): - event_function(SettingsEvent(page=_page, origin="ha", setting={_toggle: state})) - await hass.async_block_till_done() - - await _call_event_function(state=True) - - state = hass.states.get(entity_id) - assert state.state == STATE_ON - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert len(mock_smlight_client.set_toggle.mock_calls) == 2 - mock_smlight_client.set_toggle.assert_called_with(_page, _toggle, False) - - await _call_event_function(state=False) - - state = hass.states.get(entity_id) - assert state.state == STATE_OFF diff --git a/tests/components/smlight/test_update.py b/tests/components/smlight/test_update.py deleted file mode 100644 index 0bb2e34d7ca..00000000000 --- a/tests/components/smlight/test_update.py +++ /dev/null @@ -1,313 +0,0 @@ -"""Tests for the SMLIGHT update platform.""" - -from datetime import timedelta -from unittest.mock import MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -from pysmlight import Firmware, Info -from pysmlight.const import Events as SmEvents -from pysmlight.sse import MessageEvent -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.smlight.const import SCAN_FIRMWARE_INTERVAL -from homeassistant.components.update import ( - ATTR_IN_PROGRESS, - ATTR_INSTALLED_VERSION, - ATTR_LATEST_VERSION, - ATTR_UPDATE_PERCENTAGE, - DOMAIN as PLATFORM, - SERVICE_INSTALL, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er - -from . import get_mock_event_function -from .conftest import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -from tests.typing import WebSocketGenerator - -pytestmark = [ - pytest.mark.usefixtures( - "mock_smlight_client", - ) -] - -MOCK_FIRMWARE_DONE = MessageEvent( - type="FW_UPD_done", - message="FW_UPD_done", - data="", - origin="http://slzb-06p10.local", - last_event_id="", -) - -MOCK_FIRMWARE_PROGRESS = MessageEvent( - type="ZB_FW_prgs", - message="ZB_FW_prgs", - data="50", - origin="http://slzb-06p10.local", - last_event_id="", -) - -MOCK_FIRMWARE_FAIL = MessageEvent( - type="ZB_FW_err", - message="ZB_FW_err", - data="", - origin="http://slzb-06p10.local", - last_event_id="", -) - -MOCK_FIRMWARE_NOTES = [ - Firmware( - ver="v2.3.6", - mode="ESP", - notes=None, - ) -] - - -@pytest.fixture -def platforms() -> list[Platform]: - """Platforms, which should be loaded during the test.""" - return [Platform.UPDATE] - - -async def test_update_setup( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test setup of SMLIGHT switches.""" - entry = await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) - - await hass.config_entries.async_unload(entry.entry_id) - - -@patch("homeassistant.components.smlight.update.asyncio.sleep", return_value=None) -async def test_update_firmware( - mock_sleep: MagicMock, - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test firmware updates.""" - await setup_integration(hass, mock_config_entry) - entity_id = "update.mock_title_core_firmware" - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.3.6" - assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" - - await hass.services.async_call( - PLATFORM, - SERVICE_INSTALL, - {ATTR_ENTITY_ID: entity_id}, - blocking=False, - ) - - assert len(mock_smlight_client.fw_update.mock_calls) == 1 - - event_function = get_mock_event_function(mock_smlight_client, SmEvents.ZB_FW_prgs) - - event_function(MOCK_FIRMWARE_PROGRESS) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_IN_PROGRESS] is True - assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 - - event_function = get_mock_event_function(mock_smlight_client, SmEvents.FW_UPD_done) - - event_function(MOCK_FIRMWARE_DONE) - - mock_smlight_client.get_info.return_value = Info( - sw_version="v2.5.2", - ) - - freezer.tick(timedelta(seconds=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.5.2" - assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" - - -async def test_update_legacy_firmware_v2( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test firmware update for legacy v2 firmware.""" - mock_smlight_client.get_info.return_value = Info( - sw_version="v2.0.18", - legacy_api=1, - MAC="AA:BB:CC:DD:EE:FF", - ) - await setup_integration(hass, mock_config_entry) - entity_id = "update.mock_title_core_firmware" - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.0.18" - assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" - - await hass.services.async_call( - PLATFORM, - SERVICE_INSTALL, - {ATTR_ENTITY_ID: entity_id}, - blocking=False, - ) - - assert len(mock_smlight_client.fw_update.mock_calls) == 1 - - event_function = get_mock_event_function(mock_smlight_client, SmEvents.ESP_UPD_done) - - event_function(MOCK_FIRMWARE_DONE) - - mock_smlight_client.get_info.return_value = Info( - sw_version="v2.5.2", - ) - - freezer.tick(SCAN_FIRMWARE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.5.2" - assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" - - -async def test_update_firmware_failed( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test firmware updates.""" - await setup_integration(hass, mock_config_entry) - entity_id = "update.mock_title_core_firmware" - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.3.6" - assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" - - await hass.services.async_call( - PLATFORM, - SERVICE_INSTALL, - {ATTR_ENTITY_ID: entity_id}, - blocking=False, - ) - - assert len(mock_smlight_client.fw_update.mock_calls) == 1 - - event_function = get_mock_event_function(mock_smlight_client, SmEvents.ZB_FW_err) - - async def _call_event_function(event: MessageEvent): - event_function(event) - - with pytest.raises(HomeAssistantError): - await _call_event_function(MOCK_FIRMWARE_FAIL) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - - -@patch("homeassistant.components.smlight.const.LOGGER.warning") -async def test_update_reboot_timeout( - mock_warning: MagicMock, - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, -) -> None: - """Test firmware updates.""" - await setup_integration(hass, mock_config_entry) - entity_id = "update.mock_title_core_firmware" - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.3.6" - assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" - - with ( - patch( - "homeassistant.components.smlight.update.asyncio.timeout", - side_effect=TimeoutError, - ), - patch( - "homeassistant.components.smlight.update.asyncio.sleep", - return_value=None, - ), - ): - await hass.services.async_call( - PLATFORM, - SERVICE_INSTALL, - {ATTR_ENTITY_ID: entity_id}, - blocking=False, - ) - - assert len(mock_smlight_client.fw_update.mock_calls) == 1 - - event_function = get_mock_event_function( - mock_smlight_client, SmEvents.FW_UPD_done - ) - - event_function(MOCK_FIRMWARE_DONE) - - freezer.tick(timedelta(seconds=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - mock_warning.assert_called_once() - - -async def test_update_release_notes( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_config_entry: MockConfigEntry, - mock_smlight_client: MagicMock, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test firmware release notes.""" - await setup_integration(hass, mock_config_entry) - ws_client = await hass_ws_client(hass) - await hass.async_block_till_done() - entity_id = "update.mock_title_core_firmware" - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_ON - - await ws_client.send_json( - { - "id": 1, - "type": "update/release_notes", - "entity_id": entity_id, - } - ) - result = await ws_client.receive_json() - assert result["result"] is not None - - mock_smlight_client.get_firmware_version.side_effect = None - mock_smlight_client.get_firmware_version.return_value = MOCK_FIRMWARE_NOTES - - freezer.tick(SCAN_FIRMWARE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - await ws_client.send_json( - { - "id": 2, - "type": "update/release_notes", - "entity_id": entity_id, - } - ) - result = await ws_client.receive_json() - await hass.async_block_till_done() - assert result["result"] is None diff --git a/tests/components/snapcast/conftest.py b/tests/components/snapcast/conftest.py index bcc0ac5bc30..e5806ac5f40 100644 --- a/tests/components/snapcast/conftest.py +++ b/tests/components/snapcast/conftest.py @@ -1,9 +1,9 @@ """Test the snapcast config flow.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/snips/test_init.py b/tests/components/snips/test_init.py index 82dbf1cd281..89ee211b38f 100644 --- a/tests/components/snips/test_init.py +++ b/tests/components/snips/test_init.py @@ -6,10 +6,10 @@ import logging import pytest import voluptuous as vol +from homeassistant.bootstrap import async_setup_component from homeassistant.components import snips from homeassistant.core import HomeAssistant from homeassistant.helpers.intent import ServiceIntentHandler, async_register -from homeassistant.setup import async_setup_component from tests.common import async_fire_mqtt_message, async_mock_intent, async_mock_service from tests.typing import MqttMockHAClient diff --git a/tests/components/snmp/test_integer_sensor.py b/tests/components/snmp/test_integer_sensor.py index 8e7e0f166ef..dab2b080c97 100644 --- a/tests/components/snmp/test_integer_sensor.py +++ b/tests/components/snmp/test_integer_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from pysnmp.proto.rfc1902 import Integer32 +from pysnmp.hlapi import Integer32 import pytest from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN diff --git a/tests/components/snmp/test_negative_sensor.py b/tests/components/snmp/test_negative_sensor.py index 66a111b68d0..dba09ea75bd 100644 --- a/tests/components/snmp/test_negative_sensor.py +++ b/tests/components/snmp/test_negative_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from pysnmp.proto.rfc1902 import Integer32 +from pysnmp.hlapi import Integer32 import pytest from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN diff --git a/tests/components/snmp/test_switch.py b/tests/components/snmp/test_switch.py deleted file mode 100644 index fe1c3922ff0..00000000000 --- a/tests/components/snmp/test_switch.py +++ /dev/null @@ -1,67 +0,0 @@ -"""SNMP switch tests.""" - -from unittest.mock import patch - -from pysnmp.proto.rfc1902 import Integer32 -import pytest - -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -config = { - SWITCH_DOMAIN: { - "platform": "snmp", - "host": "192.168.1.32", - # ippower-mib::ippoweroutlet1.0 - "baseoid": "1.3.6.1.4.1.38107.1.3.1.0", - "payload_on": 1, - "payload_off": 0, - }, -} - - -async def test_snmp_integer_switch_off(hass: HomeAssistant) -> None: - """Test snmp switch returning int 0 for off.""" - - mock_data = Integer32(0) - with patch( - "homeassistant.components.snmp.switch.getCmd", - return_value=(None, None, None, [[mock_data]]), - ): - assert await async_setup_component(hass, SWITCH_DOMAIN, config) - await hass.async_block_till_done() - state = hass.states.get("switch.snmp") - assert state.state == STATE_OFF - - -async def test_snmp_integer_switch_on(hass: HomeAssistant) -> None: - """Test snmp switch returning int 1 for on.""" - - mock_data = Integer32(1) - with patch( - "homeassistant.components.snmp.switch.getCmd", - return_value=(None, None, None, [[mock_data]]), - ): - assert await async_setup_component(hass, SWITCH_DOMAIN, config) - await hass.async_block_till_done() - state = hass.states.get("switch.snmp") - assert state.state == STATE_ON - - -async def test_snmp_integer_switch_unknown( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test snmp switch returning int 3 (not a configured payload) for unknown.""" - - mock_data = Integer32(3) - with patch( - "homeassistant.components.snmp.switch.getCmd", - return_value=(None, None, None, [[mock_data]]), - ): - assert await async_setup_component(hass, SWITCH_DOMAIN, config) - await hass.async_block_till_done() - state = hass.states.get("switch.snmp") - assert state.state == STATE_UNKNOWN - assert "Invalid payload '3' received for entity" in caplog.text diff --git a/tests/components/snooz/__init__.py b/tests/components/snooz/__init__.py index f27ef91fe5a..c314fde5c90 100644 --- a/tests/components/snooz/__init__.py +++ b/tests/components/snooz/__init__.py @@ -6,7 +6,7 @@ from dataclasses import dataclass from unittest.mock import patch from pysnooz.commands import SnoozCommandData -from pysnooz.device import DisconnectionReason, SnoozConnectionStatus +from pysnooz.device import DisconnectionReason from pysnooz.testing import MockSnoozDevice as ParentMockSnoozDevice from homeassistant.components.snooz.const import DOMAIN @@ -70,31 +70,13 @@ class SnoozFixture: class MockSnoozDevice(ParentMockSnoozDevice): """Used for testing integration with Bleak. - Adjusted for https://github.com/AustinBrunkhorst/pysnooz/pull/19 + Adjusted for https://github.com/AustinBrunkhorst/pysnooz/issues/6 """ - async def async_disconnect(self) -> None: - """Disconnect from the device.""" - self._is_manually_disconnecting = True - try: - self._cancel_current_command() - if ( - self._reconnection_task is not None - and not self._reconnection_task.done() - ): - self._reconnection_task.cancel() - - if self._connection_task is not None and not self._connection_task.done(): - self._connection_task.cancel() - - if self._api is not None: - await self._api.async_disconnect() - - if self.connection_status != SnoozConnectionStatus.DISCONNECTED: - self._machine.device_disconnected(reason=DisconnectionReason.USER) - - finally: - self._is_manually_disconnecting = False + def _on_device_disconnected(self, e) -> None: + if self._is_manually_disconnecting: + e.kwargs.set("reason", DisconnectionReason.USER) + return super()._on_device_disconnected(e) async def create_mock_snooz( diff --git a/tests/components/snooz/test_fan.py b/tests/components/snooz/test_fan.py index 127895d7de7..ddc93a4ba1f 100644 --- a/tests/components/snooz/test_fan.py +++ b/tests/components/snooz/test_fan.py @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import timedelta -from unittest.mock import Mock, patch +from unittest.mock import Mock from pysnooz.api import SnoozDeviceState, UnknownSnoozState from pysnooz.commands import SnoozCommandResult, SnoozCommandResultStatus @@ -32,8 +32,6 @@ from homeassistant.helpers import entity_registry as er from . import SnoozFixture, create_mock_snooz, create_mock_snooz_config_entry -from tests.components.bluetooth import generate_ble_device - async def test_turn_on(hass: HomeAssistant, snooz_fan_entity_id: str) -> None: """Test turning on the device.""" @@ -151,6 +149,8 @@ async def test_transition_off(hass: HomeAssistant, snooz_fan_entity_id: str) -> assert ATTR_ASSUMED_STATE not in state.attributes +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_push_events( hass: HomeAssistant, mock_connected_snooz: SnoozFixture, snooz_fan_entity_id: str ) -> None: @@ -174,10 +174,9 @@ async def test_push_events( state = hass.states.get(snooz_fan_entity_id) assert state.attributes[ATTR_ASSUMED_STATE] is True - # Don't attempt to reconnect - await mock_connected_snooz.device.async_disconnect() - +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_restore_state( hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: @@ -202,14 +201,7 @@ async def test_restore_state( assert state.state == STATE_UNAVAILABLE # reload entry - with ( - patch("homeassistant.components.snooz.SnoozDevice", return_value=device), - patch( - "homeassistant.components.snooz.async_ble_device_from_address", - return_value=generate_ble_device(device.address, device.name), - ), - ): - await hass.config_entries.async_setup(entry.entry_id) + await create_mock_snooz_config_entry(hass, device) # should match last known state state = hass.states.get(entity_id) @@ -234,14 +226,7 @@ async def test_restore_unknown_state( assert state.state == STATE_UNAVAILABLE # reload entry - with ( - patch("homeassistant.components.snooz.SnoozDevice", return_value=device), - patch( - "homeassistant.components.snooz.async_ble_device_from_address", - return_value=generate_ble_device(device.address, device.name), - ), - ): - await hass.config_entries.async_setup(entry.entry_id) + await create_mock_snooz_config_entry(hass, device) # should match last known state state = hass.states.get(entity_id) diff --git a/tests/components/snooz/test_init.py b/tests/components/snooz/test_init.py index edcd7913792..b1ab06fcc8e 100644 --- a/tests/components/snooz/test_init.py +++ b/tests/components/snooz/test_init.py @@ -2,11 +2,15 @@ from __future__ import annotations +import pytest + from homeassistant.core import HomeAssistant from . import SnoozFixture +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_removing_entry_cleans_up_connections( hass: HomeAssistant, mock_connected_snooz: SnoozFixture ) -> None: @@ -17,6 +21,8 @@ async def test_removing_entry_cleans_up_connections( assert not mock_connected_snooz.device.is_connected +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_reloading_entry_cleans_up_connections( hass: HomeAssistant, mock_connected_snooz: SnoozFixture ) -> None: diff --git a/tests/components/solarlog/__init__.py b/tests/components/solarlog/__init__.py index c2c0296d9e2..74b19bd297e 100644 --- a/tests/components/solarlog/__init__.py +++ b/tests/components/solarlog/__init__.py @@ -17,5 +17,3 @@ async def setup_platform( with patch("homeassistant.components.solarlog.PLATFORMS", platforms): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/solarlog/conftest.py b/tests/components/solarlog/conftest.py index 2d4b4e32522..08340487d99 100644 --- a/tests/components/solarlog/conftest.py +++ b/tests/components/solarlog/conftest.py @@ -1,33 +1,22 @@ """Test helpers.""" from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch import pytest -from solarlog_cli.solarlog_models import InverterData, SolarlogData -from homeassistant.components.solarlog.const import ( - CONF_HAS_PWD, - DOMAIN as SOLARLOG_DOMAIN, -) -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD +from homeassistant.components.solarlog.const import DOMAIN as SOLARLOG_DOMAIN +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import HomeAssistant from .const import HOST, NAME -from tests.common import MockConfigEntry, load_json_object_fixture - -DEVICE_LIST = { - 0: InverterData(name="Inverter 1", enabled=True), - 1: InverterData(name="Inverter 2", enabled=True), -} -INVERTER_DATA = { - 0: InverterData( - name="Inverter 1", enabled=True, consumption_year=354687, current_power=5 - ), - 1: InverterData( - name="Inverter 2", enabled=True, consumption_year=354, current_power=6 - ), -} +from tests.common import ( + MockConfigEntry, + load_json_object_fixture, + mock_device_registry, + mock_registry, +) @pytest.fixture @@ -39,10 +28,9 @@ def mock_config_entry() -> MockConfigEntry: data={ CONF_HOST: HOST, CONF_NAME: NAME, - CONF_HAS_PWD: True, - CONF_PASSWORD: "pwd", + "extended_data": True, }, - minor_version=3, + minor_version=2, entry_id="ce5f5431554d101905d31797e1232da8", ) @@ -51,23 +39,11 @@ def mock_config_entry() -> MockConfigEntry: def mock_solarlog_connector(): """Build a fixture for the SolarLog API that connects successfully and returns one device.""" - data = SolarlogData.from_dict( - load_json_object_fixture("solarlog_data.json", SOLARLOG_DOMAIN) - ) - data.inverter_data = INVERTER_DATA - mock_solarlog_api = AsyncMock() - mock_solarlog_api.set_enabled_devices = MagicMock() - mock_solarlog_api.test_connection.return_value = True - mock_solarlog_api.test_extended_data_available.return_value = True - mock_solarlog_api.extended_data.return_value = True - mock_solarlog_api.update_data.return_value = data - mock_solarlog_api.update_device_list.return_value = DEVICE_LIST - mock_solarlog_api.update_inverter_data.return_value = INVERTER_DATA - mock_solarlog_api.device_name = {0: "Inverter 1", 1: "Inverter 2"}.get - mock_solarlog_api.device_enabled = {0: True, 1: True}.get - mock_solarlog_api.password.return_value = "pwd" - + mock_solarlog_api.test_connection = AsyncMock(return_value=True) + mock_solarlog_api.update_data.return_value = load_json_object_fixture( + "solarlog_data.json", SOLARLOG_DOMAIN + ) with ( patch( "homeassistant.components.solarlog.coordinator.SolarLogConnector", @@ -84,7 +60,7 @@ def mock_solarlog_connector(): @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Override async_setup_entry.""" with patch( "homeassistant.components.solarlog.async_setup_entry", return_value=True @@ -100,3 +76,15 @@ def mock_test_connection(): return_value=True, ): yield + + +@pytest.fixture(name="device_reg") +def device_reg_fixture(hass: HomeAssistant): + """Return an empty, loaded, registry.""" + return mock_device_registry(hass) + + +@pytest.fixture(name="entity_reg") +def entity_reg_fixture(hass: HomeAssistant): + """Return an empty, loaded, registry.""" + return mock_registry(hass) diff --git a/tests/components/solarlog/fixtures/solarlog_data.json b/tests/components/solarlog/fixtures/solarlog_data.json index 339ab4a4dfc..4976f4fa8b7 100644 --- a/tests/components/solarlog/fixtures/solarlog_data.json +++ b/tests/components/solarlog/fixtures/solarlog_data.json @@ -17,9 +17,8 @@ "total_power": 120, "self_consumption_year": 545, "alternator_loss": 2, - "efficiency": 98.1, - "usage": 54.8, + "efficiency": 0.9804, + "usage": 0.5487, "power_available": 45.13, - "capacity": 85.5, - "last_updated": "2024-08-01T15:20:45Z" + "capacity": 0.85 } diff --git a/tests/components/solarlog/snapshots/test_diagnostics.ambr b/tests/components/solarlog/snapshots/test_diagnostics.ambr deleted file mode 100644 index 4b37ea63dce..00000000000 --- a/tests/components/solarlog/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,67 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'config_entry': dict({ - 'data': dict({ - 'has_password': True, - 'host': '**REDACTED**', - 'name': 'Solarlog test 1 2 3', - 'password': 'pwd', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'solarlog', - 'entry_id': 'ce5f5431554d101905d31797e1232da8', - 'minor_version': 3, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'solarlog', - 'unique_id': None, - 'version': 1, - }), - 'solarlog_data': dict({ - 'alternator_loss': 2.0, - 'capacity': 85.5, - 'consumption_ac': 54.87, - 'consumption_day': 5.31, - 'consumption_month': 758.0, - 'consumption_total': 354687.0, - 'consumption_year': 4587.0, - 'consumption_yesterday': 7.34, - 'efficiency': 98.1, - 'inverter_data': dict({ - '0': dict({ - 'consumption_year': 354687, - 'current_power': 5, - 'enabled': True, - 'name': 'Inverter 1', - }), - '1': dict({ - 'consumption_year': 354, - 'current_power': 6, - 'enabled': True, - 'name': 'Inverter 2', - }), - }), - 'last_updated': '2024-08-01T15:20:45+00:00', - 'power_ac': 100.0, - 'power_available': 45.13, - 'power_dc': 102.0, - 'production_year': None, - 'self_consumption_year': 545.0, - 'total_power': 120.0, - 'usage': 54.8, - 'voltage_ac': 100.0, - 'voltage_dc': 100.0, - 'yield_day': 4.21, - 'yield_month': 515.0, - 'yield_total': 56513.0, - 'yield_year': 1023.0, - 'yield_yesterday': 5.21, - }), - }) -# --- diff --git a/tests/components/solarlog/snapshots/test_sensor.ambr b/tests/components/solarlog/snapshots/test_sensor.ambr index 32be560fc62..5fb369bc3b6 100644 --- a/tests/components/solarlog/snapshots/test_sensor.ambr +++ b/tests/components/solarlog/snapshots/test_sensor.ambr @@ -1,220 +1,4 @@ # serializer version: 1 -# name: test_all_entities[sensor.inverter_1_consumption_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_consumption_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption year', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_1_consumption_year', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.inverter_1_consumption_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Inverter 1 Consumption year', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_1_consumption_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '354.687', - }) -# --- -# name: test_all_entities[sensor.inverter_1_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_1_current_power', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.inverter_1_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter 1 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_1_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5', - }) -# --- -# name: test_all_entities[sensor.inverter_2_consumption_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_2_consumption_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption year', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_2_consumption_year', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.inverter_2_consumption_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Inverter 2 Consumption year', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_2_consumption_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.354', - }) -# --- -# name: test_all_entities[sensor.inverter_2_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_2_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_2_current_power', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.inverter_2_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter 2 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_2_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6', - }) -# --- # name: test_all_entities[sensor.solarlog_alternator_loss-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -263,7 +47,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.0', + 'state': '2', }) # --- # name: test_all_entities[sensor.solarlog_capacity-entry] @@ -289,9 +73,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), }), 'original_device_class': , 'original_icon': None, @@ -317,7 +98,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '85.5', + 'state': '85.0', }) # --- # name: test_all_entities[sensor.solarlog_consumption_ac-entry] @@ -376,9 +157,7 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -394,12 +173,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), 'original_device_class': , 'original_icon': None, @@ -417,7 +190,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Consumption day', - 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -425,7 +197,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.00531', + 'state': '0.005', }) # --- # name: test_all_entities[sensor.solarlog_consumption_month-entry] @@ -433,9 +205,7 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -451,12 +221,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), 'original_device_class': , 'original_icon': None, @@ -474,7 +238,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Consumption month', - 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -508,12 +271,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), 'original_device_class': , 'original_icon': None, @@ -547,9 +304,7 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -565,12 +320,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), 'original_device_class': , 'original_icon': None, @@ -588,7 +337,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Consumption year', - 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -620,12 +368,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), 'original_device_class': , 'original_icon': None, @@ -650,7 +392,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.00734', + 'state': '0.007', }) # --- # name: test_all_entities[sensor.solarlog_efficiency-entry] @@ -676,9 +418,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), }), 'original_device_class': , 'original_icon': None, @@ -704,7 +443,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '98.1', + 'state': '98.0', }) # --- # name: test_all_entities[sensor.solarlog_installed_peak_power-entry] @@ -712,9 +451,7 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -747,7 +484,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'solarlog Installed peak power', - 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -755,7 +491,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '120.0', + 'state': '120', }) # --- # name: test_all_entities[sensor.solarlog_last_update-entry] @@ -802,7 +538,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2024-08-01T15:20:45+00:00', + 'state': 'unknown', }) # --- # name: test_all_entities[sensor.solarlog_power_ac-entry] @@ -853,7 +589,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100.0', + 'state': '100', }) # --- # name: test_all_entities[sensor.solarlog_power_available-entry] @@ -955,7 +691,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '102.0', + 'state': '102', }) # --- # name: test_all_entities[sensor.solarlog_self_consumption_year-entry] @@ -1006,7 +742,1098 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '545.0', + 'state': '545', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_alternator_loss-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_alternator_loss', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alternator loss', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'alternator_loss', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_alternator_loss', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_alternator_loss-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'solarlog_test_1_2_3 Alternator loss', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_alternator_loss', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Capacity', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'capacity', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_capacity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'solarlog_test_1_2_3 Capacity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '85.0', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_ac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_ac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption AC', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_ac', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_ac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'solarlog_test_1_2_3 Consumption AC', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_ac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54.87', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption day', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_day', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_day', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'solarlog_test_1_2_3 Consumption day', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.005', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption month', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_month', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_month', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'solarlog_test_1_2_3 Consumption month', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.758', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption total', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_total', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'solarlog_test_1_2_3 Consumption total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '354.687', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption year', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_year', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'solarlog_test_1_2_3 Consumption year', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.587', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_yesterday-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_yesterday', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption yesterday', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_yesterday', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_yesterday', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_yesterday-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'solarlog_test_1_2_3 Consumption yesterday', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_yesterday', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.007', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_efficiency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_efficiency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Efficiency', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'efficiency', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_efficiency', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_efficiency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'solarlog_test_1_2_3 Efficiency', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_efficiency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '98.0', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_installed_peak_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_installed_peak_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Installed peak power', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_power', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_total_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_installed_peak_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'solarlog_test_1_2_3 Installed peak power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_installed_peak_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '120', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_last_update-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_last_update', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last update', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_update', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_last_updated', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_last_update-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'solarlog_test_1_2_3 Last update', + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_last_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_power_ac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_power_ac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_power_ac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'solarlog_test_1_2_3 Power AC', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_power_ac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_power_available-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_power_available', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power available', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_available', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_available', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_power_available-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'solarlog_test_1_2_3 Power available', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_power_available', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45.13', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_power_dc-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_power_dc', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power DC', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_dc', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_power_dc-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'solarlog_test_1_2_3 Power DC', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_power_dc', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '102', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Usage', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'usage', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_usage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'solarlog_test_1_2_3 Usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54.9', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_ac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_ac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage AC', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_ac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'solarlog_test_1_2_3 Voltage AC', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_ac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_dc-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_dc', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage DC', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_dc-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'solarlog_test_1_2_3 Voltage DC', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_dc', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_yield_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yield day', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yield_day', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_day', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'solarlog_test_1_2_3 Yield day', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_yield_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.004', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_yield_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yield month', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yield_month', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_month', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'solarlog_test_1_2_3 Yield month', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_yield_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.515', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_yield_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yield total', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yield_total', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'solarlog_test_1_2_3 Yield total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_yield_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '56.513', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_yield_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yield year', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yield_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_year', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'solarlog_test_1_2_3 Yield year', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_yield_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.023', + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_yesterday-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarlog_test_1_2_3_yield_yesterday', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yield yesterday', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yield_yesterday', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_yesterday', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_yesterday-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'solarlog_test_1_2_3 Yield yesterday', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarlog_test_1_2_3_yield_yesterday', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.005', }) # --- # name: test_all_entities[sensor.solarlog_usage-entry] @@ -1032,9 +1859,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), }), 'original_device_class': , 'original_icon': None, @@ -1060,7 +1884,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '54.8', + 'state': '54.9', }) # --- # name: test_all_entities[sensor.solarlog_voltage_ac-entry] @@ -1111,7 +1935,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100.0', + 'state': '100', }) # --- # name: test_all_entities[sensor.solarlog_voltage_dc-entry] @@ -1162,7 +1986,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100.0', + 'state': '100', }) # --- # name: test_all_entities[sensor.solarlog_yield_day-entry] @@ -1170,9 +1994,7 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1188,12 +2010,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), 'original_device_class': , 'original_icon': None, @@ -1211,7 +2027,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Yield day', - 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -1219,7 +2034,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.00421', + 'state': '0.004', }) # --- # name: test_all_entities[sensor.solarlog_yield_month-entry] @@ -1227,9 +2042,7 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1245,12 +2058,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), 'original_device_class': , 'original_icon': None, @@ -1268,7 +2075,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Yield month', - 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -1302,12 +2108,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), 'original_device_class': , 'original_icon': None, @@ -1341,9 +2141,7 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1359,9 +2157,6 @@ }), 'name': None, 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), 'original_device_class': , 'original_icon': None, @@ -1379,7 +2174,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Yield year', - 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -1387,7 +2181,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1.0230', + 'state': '1.023', }) # --- # name: test_all_entities[sensor.solarlog_yield_yesterday-entry] @@ -1411,12 +2205,6 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), 'original_device_class': , 'original_icon': None, @@ -1441,6 +2229,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.00521', + 'state': '0.005', }) # --- diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index 8a34407ff54..34da13cdf8f 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -1,18 +1,14 @@ """Test the solarlog config flow.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch import pytest -from solarlog_cli.solarlog_exceptions import ( - SolarLogAuthenticationError, - SolarLogConnectionError, - SolarLogError, -) +from solarlog_cli.solarlog_exceptions import SolarLogConnectionError, SolarLogError +from homeassistant import config_entries from homeassistant.components.solarlog import config_flow -from homeassistant.components.solarlog.const import CONF_HAS_PWD, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD +from homeassistant.components.solarlog.const import DEFAULT_HOST, DOMAIN +from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -21,30 +17,35 @@ from .const import HOST, NAME from tests.common import MockConfigEntry -@pytest.mark.usefixtures("test_connect") async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: HOST, CONF_NAME: NAME, CONF_HAS_PWD: False}, - ) - await hass.async_block_till_done() + with ( + patch( + "homeassistant.components.solarlog.config_flow.SolarLogConfigFlow._test_connection", + return_value=True, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST, CONF_NAME: NAME, "extended_data": False}, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "solarlog_test_1_2_3" assert result2["data"][CONF_HOST] == "http://1.1.1.1" - assert result2["data"][CONF_HAS_PWD] is False + assert result2["data"]["extended_data"] is False assert len(mock_setup_entry.mock_calls) == 1 -def init_config_flow(hass: HomeAssistant) -> config_flow.SolarLogConfigFlow: +def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarLogConfigFlow() flow.hass = hass @@ -59,14 +60,14 @@ async def test_user( ) -> None: """Test user config.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} # tests with all provided result = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: HOST, CONF_NAME: NAME, CONF_HAS_PWD: False} + result["flow_id"], {CONF_HOST: HOST, CONF_NAME: NAME, "extended_data": False} ) await hass.async_block_till_done() @@ -77,23 +78,16 @@ async def test_user( @pytest.mark.parametrize( - ("exception1", "error1", "exception2", "error2"), + ("exception", "error"), [ - ( - SolarLogConnectionError, - {CONF_HOST: "cannot_connect"}, - SolarLogAuthenticationError, - {CONF_HOST: "password_error"}, - ), - (SolarLogError, {CONF_HOST: "unknown"}, SolarLogError, {CONF_HOST: "unknown"}), + (SolarLogConnectionError, {CONF_HOST: "cannot_connect"}), + (SolarLogError, {CONF_HOST: "unknown"}), ], ) async def test_form_exceptions( hass: HomeAssistant, - exception1: Exception, - error1: dict[str, str], - exception2: Exception, - error2: dict[str, str], + exception: Exception, + error: dict[str, str], mock_solarlog_connector: AsyncMock, ) -> None: """Test we can handle Form exceptions.""" @@ -103,95 +97,101 @@ async def test_form_exceptions( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_solarlog_connector.test_connection.side_effect = exception1 + mock_solarlog_connector.test_connection.side_effect = exception # tests with connection error result = await flow.async_step_user( - {CONF_NAME: NAME, CONF_HOST: HOST, CONF_HAS_PWD: False} + {CONF_NAME: NAME, CONF_HOST: HOST, "extended_data": False} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert result["errors"] == error1 + assert result["errors"] == error - # tests with password error mock_solarlog_connector.test_connection.side_effect = None - mock_solarlog_connector.test_extended_data_available.side_effect = exception2 + # tests with all provided result = await flow.async_step_user( - {CONF_NAME: NAME, CONF_HOST: HOST, CONF_HAS_PWD: True} - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "password" - - result = await flow.async_step_password({CONF_PASSWORD: "pwd"}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "password" - assert result["errors"] == error2 - - mock_solarlog_connector.test_extended_data_available.side_effect = None - - # tests with all provided (no password) - result = await flow.async_step_user( - {CONF_NAME: NAME, CONF_HOST: HOST, CONF_HAS_PWD: False} + {CONF_NAME: NAME, CONF_HOST: HOST, "extended_data": False} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "solarlog_test_1_2_3" assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_HAS_PWD] is False + assert result["data"]["extended_data"] is False - # tests with all provided (password) - result = await flow.async_step_password({CONF_PASSWORD: "pwd"}) + +async def test_import(hass: HomeAssistant, test_connect) -> None: + """Test import step.""" + flow = init_config_flow(hass) + + # import with only host + result = await flow.async_step_import({CONF_HOST: HOST}) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "solarlog" + assert result["data"][CONF_HOST] == HOST + + # import with only name + result = await flow.async_step_import({CONF_NAME: NAME}) await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "solarlog_test_1_2_3" - assert result["data"][CONF_PASSWORD] == "pwd" + assert result["data"][CONF_HOST] == DEFAULT_HOST + + # import with host and name + result = await flow.async_step_import({CONF_HOST: HOST, CONF_NAME: NAME}) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "solarlog_test_1_2_3" + assert result["data"][CONF_HOST] == HOST -async def test_abort_if_already_setup(hass: HomeAssistant, test_connect: None) -> None: +async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None: """Test we abort if the device is already setup.""" + flow = init_config_flow(hass) + MockConfigEntry( + domain="solarlog", data={CONF_NAME: NAME, CONF_HOST: HOST} + ).add_to_hass(hass) - MockConfigEntry(domain=DOMAIN, data={CONF_NAME: NAME, CONF_HOST: HOST}).add_to_hass( - hass - ) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: HOST, CONF_NAME: "solarlog_test_7_8_9", CONF_HAS_PWD: False}, + # Should fail, same HOST different NAME (default) + result = await flow.async_step_import( + {CONF_HOST: HOST, CONF_NAME: "solarlog_test_7_8_9", "extended_data": False} ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + # Should fail, same HOST and NAME + result = await flow.async_step_user({CONF_HOST: HOST, CONF_NAME: NAME}) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {CONF_HOST: "already_configured"} + + # SHOULD pass, diff HOST (without http://), different NAME + result = await flow.async_step_import( + {CONF_HOST: "2.2.2.2", CONF_NAME: "solarlog_test_7_8_9", "extended_data": False} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "solarlog_test_7_8_9" + assert result["data"][CONF_HOST] == "http://2.2.2.2" + + # SHOULD pass, diff HOST, same NAME + result = await flow.async_step_import( + {CONF_HOST: "http://2.2.2.2", CONF_NAME: NAME, "extended_data": False} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "solarlog_test_1_2_3" + assert result["data"][CONF_HOST] == "http://2.2.2.2" + -@pytest.mark.parametrize( - ("has_password", "password"), - [ - (True, "pwd"), - (False, ""), - ], -) async def test_reconfigure_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_solarlog_connector: AsyncMock, - has_password: bool, - password: str, + hass: HomeAssistant, mock_setup_entry: AsyncMock ) -> None: """Test config flow options.""" entry = MockConfigEntry( @@ -199,87 +199,26 @@ async def test_reconfigure_flow( title="solarlog_test_1_2_3", data={ CONF_HOST: HOST, - CONF_HAS_PWD: False, + "extended_data": False, }, - minor_version=3, ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" - # test with all data provided result = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HAS_PWD: True, CONF_PASSWORD: password} + result["flow_id"], {"extended_data": True} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" assert len(mock_setup_entry.mock_calls) == 1 - - entry = hass.config_entries.async_get_entry(entry.entry_id) - assert entry - assert entry.title == "solarlog_test_1_2_3" - assert entry.data[CONF_HAS_PWD] == has_password - assert entry.data[CONF_PASSWORD] == password - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (SolarLogAuthenticationError, {CONF_HOST: "password_error"}), - (SolarLogError, {CONF_HOST: "unknown"}), - ], -) -async def test_reauth( - hass: HomeAssistant, - exception: Exception, - error: dict[str, str], - mock_solarlog_connector: AsyncMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test reauth-flow works.""" - - entry = MockConfigEntry( - domain=DOMAIN, - title="solarlog_test_1_2_3", - data={ - CONF_HOST: HOST, - CONF_HAS_PWD: True, - CONF_PASSWORD: "pwd", - }, - minor_version=3, - ) - entry.add_to_hass(hass) - - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - mock_solarlog_connector.test_extended_data_available.side_effect = exception - - # tests with connection error - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_PASSWORD: "other_pwd"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == error - - mock_solarlog_connector.test_extended_data_available.side_effect = None - - # tests with all information provided - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_PASSWORD: "other_pwd"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert entry.data[CONF_PASSWORD] == "other_pwd" diff --git a/tests/components/solarlog/test_diagnostics.py b/tests/components/solarlog/test_diagnostics.py deleted file mode 100644 index bc0b020462d..00000000000 --- a/tests/components/solarlog/test_diagnostics.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Test Solarlog diagnostics.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from . import setup_platform - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_config_entry: MockConfigEntry, - mock_solarlog_connector: AsyncMock, - snapshot: SnapshotAssertion, -) -> None: - """Test config entry diagnostics.""" - await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) - - result = await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) - - assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/solarlog/test_init.py b/tests/components/solarlog/test_init.py index b4ef270e78b..f9f00ef601b 100644 --- a/tests/components/solarlog/test_init.py +++ b/tests/components/solarlog/test_init.py @@ -2,19 +2,12 @@ from unittest.mock import AsyncMock -import pytest -from solarlog_cli.solarlog_exceptions import ( - SolarLogAuthenticationError, - SolarLogConnectionError, - SolarLogError, - SolarLogUpdateError, -) +from solarlog_cli.solarlog_exceptions import SolarLogConnectionError -from homeassistant.components.solarlog.const import CONF_HAS_PWD, DOMAIN +from homeassistant.components.solarlog.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry @@ -39,103 +32,25 @@ async def test_load_unload( assert mock_config_entry.state is ConfigEntryState.NOT_LOADED -@pytest.mark.parametrize( - ("exception", "error"), - [ - (SolarLogAuthenticationError, ConfigEntryState.SETUP_ERROR), - (SolarLogUpdateError, ConfigEntryState.SETUP_RETRY), - ], -) -async def test_setup_error( +async def test_raise_config_entry_not_ready_when_offline( hass: HomeAssistant, - exception: SolarLogError, - error: str, mock_config_entry: MockConfigEntry, mock_solarlog_connector: AsyncMock, ) -> None: - """Test errors in setting up coordinator (i.e. login error).""" + """Config entry state is SETUP_RETRY when Solarlog is offline.""" - mock_solarlog_connector.login.side_effect = exception + mock_solarlog_connector.update_data.side_effect = SolarLogConnectionError await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) await hass.async_block_till_done() - assert mock_config_entry.state == error - - if error == ConfigEntryState.SETUP_RETRY: - assert len(hass.config_entries.flow.async_progress()) == 0 - - -@pytest.mark.parametrize( - ("login_side_effect", "login_return_value", "entry_state"), - [ - (SolarLogAuthenticationError, False, ConfigEntryState.SETUP_ERROR), - (ConfigEntryNotReady, False, ConfigEntryState.SETUP_RETRY), - (None, False, ConfigEntryState.SETUP_ERROR), - (None, True, ConfigEntryState.SETUP_RETRY), - ], -) -async def test_auth_error_during_first_refresh( - hass: HomeAssistant, - login_side_effect: Exception | None, - login_return_value: bool, - entry_state: str, - mock_config_entry: MockConfigEntry, - mock_solarlog_connector: AsyncMock, -) -> None: - """Test the correct exceptions are thrown for auth error during first refresh.""" - - mock_solarlog_connector.password.return_value = "" - mock_solarlog_connector.update_data.side_effect = SolarLogAuthenticationError - - mock_solarlog_connector.login.return_value = login_return_value - mock_solarlog_connector.login.side_effect = login_side_effect - - await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) - await hass.async_block_till_done() - - assert mock_config_entry.state == entry_state - - -@pytest.mark.parametrize( - ("exception"), - [ - (SolarLogConnectionError), - (SolarLogUpdateError), - ], -) -async def test_other_exceptions_during_first_refresh( - hass: HomeAssistant, - exception: SolarLogError, - mock_config_entry: MockConfigEntry, - mock_solarlog_connector: AsyncMock, -) -> None: - """Test the correct exceptions are thrown during first refresh.""" - - mock_solarlog_connector.update_data.side_effect = exception - - await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) - await hass.async_block_till_done() - - assert mock_config_entry.state == ConfigEntryState.SETUP_RETRY + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY assert len(hass.config_entries.flow.async_progress()) == 0 -@pytest.mark.parametrize( - ("minor_version", "suffix"), - [ - (1, "time"), - (2, "last_updated"), - ], -) async def test_migrate_config_entry( - hass: HomeAssistant, - minor_version: int, - suffix: str, - device_registry: DeviceRegistry, - entity_registry: EntityRegistry, - mock_solarlog_connector: AsyncMock, + hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry ) -> None: """Test successful migration of entry data.""" entry = MockConfigEntry( @@ -145,38 +60,36 @@ async def test_migrate_config_entry( CONF_HOST: HOST, }, version=1, - minor_version=minor_version, + minor_version=1, ) entry.add_to_hass(hass) - device = device_registry.async_get_or_create( + device = device_reg.async_get_or_create( config_entry_id=entry.entry_id, identifiers={(DOMAIN, entry.entry_id)}, manufacturer="Solar-Log", name="solarlog", ) - uid = f"{entry.entry_id}_{suffix}" - - sensor_entity = entity_registry.async_get_or_create( + sensor_entity = entity_reg.async_get_or_create( config_entry=entry, platform=DOMAIN, domain=Platform.SENSOR, - unique_id=uid, + unique_id=f"{entry.entry_id}_time", device_id=device.id, ) assert entry.version == 1 - assert entry.minor_version == minor_version - assert sensor_entity.unique_id == f"{entry.entry_id}_{suffix}" + assert entry.minor_version == 1 + assert sensor_entity.unique_id == f"{entry.entry_id}_time" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - entity_migrated = entity_registry.async_get(sensor_entity.entity_id) + entity_migrated = entity_reg.async_get(sensor_entity.entity_id) assert entity_migrated assert entity_migrated.unique_id == f"{entry.entry_id}_last_updated" assert entry.version == 1 - assert entry.minor_version == 3 + assert entry.minor_version == 2 assert entry.data[CONF_HOST] == HOST - assert entry.data[CONF_HAS_PWD] is False + assert entry.data["extended_data"] is False diff --git a/tests/components/solarlog/test_sensor.py b/tests/components/solarlog/test_sensor.py index 77aa0308cda..bc90e8b25c0 100644 --- a/tests/components/solarlog/test_sensor.py +++ b/tests/components/solarlog/test_sensor.py @@ -9,13 +9,11 @@ from solarlog_cli.solarlog_exceptions import ( SolarLogConnectionError, SolarLogUpdateError, ) -from solarlog_cli.solarlog_models import InverterData from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceRegistry -from homeassistant.helpers.entity_registry import EntityRegistry +from homeassistant.helpers import entity_registry as er from . import setup_platform @@ -27,7 +25,7 @@ async def test_all_entities( snapshot: SnapshotAssertion, mock_solarlog_connector: AsyncMock, mock_config_entry: MockConfigEntry, - entity_registry: EntityRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test all entities.""" @@ -35,49 +33,6 @@ async def test_all_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -async def test_add_remove_entities( - hass: HomeAssistant, - mock_solarlog_connector: AsyncMock, - mock_config_entry: MockConfigEntry, - device_registry: DeviceRegistry, - entity_registry: EntityRegistry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test if entities are added and old are removed.""" - await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) - - assert hass.states.get("sensor.inverter_1_consumption_year").state == "354.687" - - # test no changes (coordinator.py line 114) - freezer.tick(delta=timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - mock_solarlog_connector.update_device_list.return_value = { - 0: InverterData(name="Inv 1", enabled=True), - 2: InverterData(name="Inverter 3", enabled=True), - } - mock_solarlog_connector.update_inverter_data.return_value = { - 0: InverterData( - name="Inv 1", enabled=True, consumption_year=354687, current_power=5 - ), - 2: InverterData( - name="Inverter 3", enabled=True, consumption_year=454, current_power=7 - ), - } - mock_solarlog_connector.device_name = {0: "Inv 1", 2: "Inverter 3"}.get - mock_solarlog_connector.device_enabled = {0: True, 2: True}.get - - freezer.tick(delta=timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("sensor.inverter_1_consumption_year") is None - assert hass.states.get("sensor.inv_1_consumption_year").state == "354.687" - assert hass.states.get("sensor.inverter_2_consumption_year") is None - assert hass.states.get("sensor.inverter_3_consumption_year").state == "0.454" - - @pytest.mark.parametrize( "exception", [ diff --git a/tests/components/soma/test_config_flow.py b/tests/components/soma/test_config_flow.py index 67109e37c6d..8b8548bfe3e 100644 --- a/tests/components/soma/test_config_flow.py +++ b/tests/components/soma/test_config_flow.py @@ -5,8 +5,7 @@ from unittest.mock import patch from api.soma_api import SomaApi from requests import RequestException -from homeassistant.components.soma import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.components.soma import DOMAIN, config_flow from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -18,66 +17,57 @@ MOCK_PORT = 3000 async def test_form(hass: HomeAssistant) -> None: """Test user form showing.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) + flow = config_flow.SomaFlowHandler() + flow.hass = hass + result = await flow.async_step_user() assert result["type"] is FlowResultType.FORM async def test_import_abort(hass: HomeAssistant) -> None: """Test configuration from YAML aborting with existing entity.""" + flow = config_flow.SomaFlowHandler() + flow.hass = hass MockConfigEntry(domain=DOMAIN).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT} - ) + result = await flow.async_step_import() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_setup" async def test_import_create(hass: HomeAssistant) -> None: """Test configuration from YAML.""" + flow = config_flow.SomaFlowHandler() + flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "success"}): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={"host": MOCK_HOST, "port": MOCK_PORT}, - ) + result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) assert result["type"] is FlowResultType.CREATE_ENTRY async def test_error_status(hass: HomeAssistant) -> None: """Test Connect successfully returning error status.""" + flow = config_flow.SomaFlowHandler() + flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "error"}): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={"host": MOCK_HOST, "port": MOCK_PORT}, - ) + result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "result_error" async def test_key_error(hass: HomeAssistant) -> None: """Test Connect returning empty string.""" - + flow = config_flow.SomaFlowHandler() + flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={}): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={"host": MOCK_HOST, "port": MOCK_PORT}, - ) + result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "connection_error" async def test_exception(hass: HomeAssistant) -> None: """Test if RequestException fires when no connection can be made.""" + flow = config_flow.SomaFlowHandler() + flow.hass = hass with patch.object(SomaApi, "list_devices", side_effect=RequestException()): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={"host": MOCK_HOST, "port": MOCK_PORT}, - ) + result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "connection_error" @@ -85,10 +75,8 @@ async def test_exception(hass: HomeAssistant) -> None: async def test_full_flow(hass: HomeAssistant) -> None: """Check classic use case.""" hass.data[DOMAIN] = {} + flow = config_flow.SomaFlowHandler() + flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "success"}): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={"host": MOCK_HOST, "port": MOCK_PORT}, - ) + result = await flow.async_step_user({"host": MOCK_HOST, "port": MOCK_PORT}) assert result["type"] is FlowResultType.CREATE_ENTRY diff --git a/tests/components/sonarr/__init__.py b/tests/components/sonarr/__init__.py index 660102ed082..b6050808a34 100644 --- a/tests/components/sonarr/__init__.py +++ b/tests/components/sonarr/__init__.py @@ -5,6 +5,6 @@ from homeassistant.const import CONF_API_KEY, CONF_URL MOCK_REAUTH_INPUT = {CONF_API_KEY: "test-api-key-reauth"} MOCK_USER_INPUT = { - CONF_URL: "http://192.168.1.189:8989/", + CONF_URL: "http://192.168.1.189:8989", CONF_API_KEY: "MOCK_API_KEY", } diff --git a/tests/components/sonarr/conftest.py b/tests/components/sonarr/conftest.py index de7a3f781d7..739880a99aa 100644 --- a/tests/components/sonarr/conftest.py +++ b/tests/components/sonarr/conftest.py @@ -1,6 +1,5 @@ """Fixtures for Sonarr integration tests.""" -from collections.abc import Generator import json from unittest.mock import MagicMock, patch @@ -14,6 +13,7 @@ from aiopyarr import ( SystemStatus, ) import pytest +from typing_extensions import Generator from homeassistant.components.sonarr.const import ( CONF_BASE_PATH, diff --git a/tests/components/sonarr/test_config_flow.py b/tests/components/sonarr/test_config_flow.py index efbfbd749b3..6bd14e8b581 100644 --- a/tests/components/sonarr/test_config_flow.py +++ b/tests/components/sonarr/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.components.sonarr.const import ( DEFAULT_WANTED_MAX_ITEMS, DOMAIN, ) -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -50,34 +50,6 @@ async def test_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} -async def test_url_rewrite( - hass: HomeAssistant, - mock_sonarr_config_flow: MagicMock, - mock_setup_entry: None, -) -> None: - """Test the full manual user flow from start to finish.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={CONF_SOURCE: SOURCE_USER}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - user_input = MOCK_USER_INPUT.copy() - user_input[CONF_URL] = "https://192.168.1.189" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=user_input, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "192.168.1.189" - - assert result["data"] - assert result["data"][CONF_URL] == "https://192.168.1.189:443/" - - async def test_invalid_auth( hass: HomeAssistant, mock_sonarr_config_flow: MagicMock ) -> None: @@ -124,7 +96,15 @@ async def test_full_reauth_flow_implementation( """Test the manual reauth flow from start to finish.""" entry = init_integration - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -173,7 +153,7 @@ async def test_full_user_flow_implementation( assert result["title"] == "192.168.1.189" assert result["data"] - assert result["data"][CONF_URL] == "http://192.168.1.189:8989/" + assert result["data"][CONF_URL] == "http://192.168.1.189:8989" async def test_full_user_flow_advanced_options( @@ -203,7 +183,7 @@ async def test_full_user_flow_advanced_options( assert result["title"] == "192.168.1.189" assert result["data"] - assert result["data"][CONF_URL] == "http://192.168.1.189:8989/" + assert result["data"][CONF_URL] == "http://192.168.1.189:8989" assert result["data"][CONF_VERIFY_SSL] diff --git a/tests/components/songpal/test_config_flow.py b/tests/components/songpal/test_config_flow.py index 5215e9b3c0e..8f503360702 100644 --- a/tests/components/songpal/test_config_flow.py +++ b/tests/components/songpal/test_config_flow.py @@ -6,12 +6,7 @@ from unittest.mock import patch from homeassistant.components import ssdp from homeassistant.components.songpal.const import CONF_ENDPOINT, DOMAIN -from homeassistant.config_entries import ( - SOURCE_IMPORT, - SOURCE_SSDP, - SOURCE_USER, - ConfigFlowResult, -) +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -47,7 +42,7 @@ SSDP_DATA = ssdp.SsdpServiceInfo( ) -def _flow_next(hass: HomeAssistant, flow_id: str) -> ConfigFlowResult: +def _flow_next(hass, flow_id): return next( flow for flow in hass.config_entries.flow.async_progress() @@ -148,7 +143,7 @@ async def test_flow_import_without_name(hass: HomeAssistant) -> None: mocked_device.get_interface_information.assert_called_once() -def _create_mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +def _create_mock_config_entry(hass): MockConfigEntry( domain=DOMAIN, unique_id="uuid:0000", diff --git a/tests/components/songpal/test_media_player.py b/tests/components/songpal/test_media_player.py index 2baea6cb5c9..8f56170b839 100644 --- a/tests/components/songpal/test_media_player.py +++ b/tests/components/songpal/test_media_player.py @@ -2,7 +2,6 @@ from datetime import timedelta import logging -from typing import Any from unittest.mock import AsyncMock, MagicMock, call, patch import pytest @@ -55,12 +54,12 @@ SUPPORT_SONGPAL = ( ) -def _get_attributes(hass: HomeAssistant) -> dict[str, Any]: +def _get_attributes(hass): state = hass.states.get(ENTITY_ID) return state.as_dict()["attributes"] -async def _call(hass: HomeAssistant, service: str, **argv: Any) -> None: +async def _call(hass, service, **argv): await hass.services.async_call( media_player.DOMAIN, service, diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 04b35e2c021..51dd2b9047c 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -1,21 +1,15 @@ """Configuration for Sonos tests.""" import asyncio -from collections.abc import Callable, Coroutine, Generator +from collections.abc import Callable from copy import copy from ipaddress import ip_address -from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from soco import SoCo from soco.alarms import Alarms -from soco.data_structures import ( - DidlFavorite, - DidlMusicTrack, - DidlPlaylistContainer, - SearchResult, -) +from soco.data_structures import DidlFavorite, SearchResult from soco.events_base import Event as SonosEvent from homeassistant.components import ssdp, zeroconf @@ -23,7 +17,6 @@ from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.components.sonos import DOMAIN from homeassistant.const import CONF_HOSTS from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture, load_json_value_fixture @@ -85,7 +78,7 @@ class SonosMockService: class SonosMockEvent: """Mock a sonos Event used in callbacks.""" - def __init__(self, soco, service, variables) -> None: + def __init__(self, soco, service, variables): """Initialize the instance.""" self.sid = f"{soco.uid}_sub0000000001" self.seq = "0" @@ -126,9 +119,7 @@ async def async_autosetup_sonos(async_setup_sonos): @pytest.fixture -def async_setup_sonos( - hass: HomeAssistant, config_entry: MockConfigEntry, fire_zgs_event -) -> Callable[[], Coroutine[Any, Any, None]]: +def async_setup_sonos(hass, config_entry, fire_zgs_event): """Return a coroutine to set up a Sonos integration instance on demand.""" async def _wrapper(): @@ -144,7 +135,7 @@ def async_setup_sonos( @pytest.fixture(name="config_entry") -def config_entry_fixture() -> MockConfigEntry: +def config_entry_fixture(): """Create a mock Sonos config entry.""" return MockConfigEntry(domain=DOMAIN, title="Sonos") @@ -189,8 +180,6 @@ class SoCoMockFactory: current_track_info_empty, battery_info, alarm_clock, - sonos_playlists: SearchResult, - sonos_queue: list[DidlMusicTrack], ) -> None: """Initialize the mock factory.""" self.mock_list: dict[str, MockSoCo] = {} @@ -199,8 +188,6 @@ class SoCoMockFactory: self.current_track_info = current_track_info_empty self.battery_info = battery_info self.alarm_clock = alarm_clock - self.sonos_playlists = sonos_playlists - self.sonos_queue = sonos_queue def cache_mock( self, mock_soco: MockSoCo, ip_address: str, name: str = "Zone A" @@ -213,8 +200,6 @@ class SoCoMockFactory: mock_soco.music_library = self.music_library mock_soco.get_current_track_info.return_value = self.current_track_info mock_soco.music_source_from_uri = SoCo.music_source_from_uri - mock_soco.get_sonos_playlists.return_value = self.sonos_playlists - mock_soco.get_queue.return_value = self.sonos_queue my_speaker_info = self.speaker_info.copy() my_speaker_info["zone_name"] = name my_speaker_info["uid"] = mock_soco.uid @@ -263,39 +248,13 @@ def soco_sharelink(): yield mock_instance -@pytest.fixture(name="sonos_websocket") -def sonos_websocket(): - """Fixture to mock SonosWebSocket.""" - with patch( - "homeassistant.components.sonos.speaker.SonosWebsocket" - ) as mock_sonos_ws: - mock_instance = AsyncMock() - mock_instance.play_clip = AsyncMock() - mock_instance.play_clip.return_value = [{"success": 1}, {}] - mock_sonos_ws.return_value = mock_instance - yield mock_instance - - @pytest.fixture(name="soco_factory") def soco_factory( - music_library, - speaker_info, - current_track_info_empty, - battery_info, - alarm_clock, - sonos_playlists: SearchResult, - sonos_websocket, - sonos_queue: list[DidlMusicTrack], + music_library, speaker_info, current_track_info_empty, battery_info, alarm_clock ): """Create factory for instantiating SoCo mocks.""" factory = SoCoMockFactory( - music_library, - speaker_info, - current_track_info_empty, - battery_info, - alarm_clock, - sonos_playlists, - sonos_queue=sonos_queue, + music_library, speaker_info, current_track_info_empty, battery_info, alarm_clock ) with ( patch("homeassistant.components.sonos.SoCo", new=factory.get_mock), @@ -312,7 +271,7 @@ def soco_fixture(soco_factory): @pytest.fixture(autouse=True) -def silent_ssdp_scanner() -> Generator[None]: +async def silent_ssdp_scanner(hass): """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), @@ -332,13 +291,7 @@ def silent_ssdp_scanner() -> Generator[None]: def discover_fixture(soco): """Create a mock soco discover fixture.""" - def do_callback( - hass: HomeAssistant, - callback: Callable[ - [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None - ], - match_dict: dict[str, str] | None = None, - ) -> MagicMock: + def do_callback(hass, callback, *args, **kwargs): callback( ssdp.SsdpServiceInfo( ssdp_location=f"http://{soco.ip_address}/", @@ -372,21 +325,6 @@ def sonos_favorites_fixture() -> SearchResult: return SearchResult(favorite_list, "favorites", 3, 3, 1) -@pytest.fixture(name="sonos_playlists") -def sonos_playlists_fixture() -> SearchResult: - """Create sonos playlist fixture.""" - playlists = load_json_value_fixture("sonos_playlists.json", "sonos") - playlists_list = [DidlPlaylistContainer.from_dict(pl) for pl in playlists] - return SearchResult(playlists_list, "sonos_playlists", 1, 1, 0) - - -@pytest.fixture(name="sonos_queue") -def sonos_queue() -> list[DidlMusicTrack]: - """Create sonos queue fixture.""" - queue = load_json_value_fixture("sonos_queue.json", "sonos") - return [DidlMusicTrack.from_dict(track) for track in queue] - - class MockMusicServiceItem: """Mocks a Soco MusicServiceItem.""" @@ -515,7 +453,6 @@ def mock_get_music_library_information( "object.container.album.musicAlbum", ) ] - return [] @pytest.fixture(name="music_library_browse_categories") @@ -711,9 +648,7 @@ def zgs_discovery_fixture(): @pytest.fixture(name="fire_zgs_event") -def zgs_event_fixture( - hass: HomeAssistant, soco: SoCo, zgs_discovery: str -) -> Callable[[], Coroutine[Any, Any, None]]: +def zgs_event_fixture(hass: HomeAssistant, soco: SoCo, zgs_discovery: str): """Create alarm_event fixture.""" variables = {"ZoneGroupState": zgs_discovery} @@ -725,26 +660,3 @@ def zgs_event_fixture( await hass.async_block_till_done(wait_background_tasks=True) return _wrapper - - -@pytest.fixture(name="sonos_setup_two_speakers") -async def sonos_setup_two_speakers( - hass: HomeAssistant, soco_factory: SoCoMockFactory -) -> list[MockSoCo]: - """Set up home assistant with two Sonos Speakers.""" - soco_lr = soco_factory.cache_mock(MockSoCo(), "10.10.10.1", "Living Room") - soco_br = soco_factory.cache_mock(MockSoCo(), "10.10.10.2", "Bedroom") - await async_setup_component( - hass, - DOMAIN, - { - DOMAIN: { - "media_player": { - "interface_addr": "127.0.0.1", - "hosts": ["10.10.10.1", "10.10.10.2"], - } - } - }, - ) - await hass.async_block_till_done() - return [soco_lr, soco_br] diff --git a/tests/components/sonos/fixtures/av_transport.json b/tests/components/sonos/fixtures/av_transport.json deleted file mode 100644 index 743ac61e3ff..00000000000 --- a/tests/components/sonos/fixtures/av_transport.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "transport_state": "PLAYING", - "current_play_mode": "NORMAL", - "current_crossfade_mode": "0", - "number_of_tracks": "1", - "current_track": "1", - "current_section": "0", - "current_track_uri": "x-rincon:RINCON_test_10.10.10.2", - "current_track_duration": "", - "current_track_meta_data": "", - "next_track_uri": "", - "next_track_meta_data": "", - "enqueued_transport_uri": "", - "enqueued_transport_uri_meta_data": "", - "playback_storage_medium": "NETWORK", - "av_transport_uri": "x-rincon:RINCON_test_10.10.10.2", - "av_transport_uri_meta_data": "", - "next_av_transport_uri": "", - "next_av_transport_uri_meta_data": "", - "current_transport_actions": "Stop, Play", - "current_valid_play_modes": "CROSSFADE", - "direct_control_client_id": "", - "direct_control_is_suspended": "0", - "direct_control_account_id": "", - "transport_status": "OK", - "sleep_timer_generation": "0", - "alarm_running": "0", - "snooze_running": "0", - "restart_pending": "0", - "transport_play_speed": "NOT_IMPLEMENTED", - "current_media_duration": "NOT_IMPLEMENTED", - "record_storage_medium": "NOT_IMPLEMENTED", - "possible_playback_storage_media": "NONE, NETWORK", - "possible_record_storage_media": "NOT_IMPLEMENTED", - "record_medium_write_status": "NOT_IMPLEMENTED", - "current_record_quality_mode": "NOT_IMPLEMENTED", - "possible_record_quality_modes": "NOT_IMPLEMENTED" -} diff --git a/tests/components/sonos/fixtures/sonos_playlists.json b/tests/components/sonos/fixtures/sonos_playlists.json deleted file mode 100644 index f0731467697..00000000000 --- a/tests/components/sonos/fixtures/sonos_playlists.json +++ /dev/null @@ -1,13 +0,0 @@ -[ - { - "title": "sample playlist", - "parent_id": "SQ:", - "item_id": "SQ:0", - "resources": [ - { - "uri": "file:///jffs/settings/savedqueues.rsq#0", - "protocol_info": "file:*:audio/mpegurl:*" - } - ] - } -] diff --git a/tests/components/sonos/fixtures/sonos_queue.json b/tests/components/sonos/fixtures/sonos_queue.json deleted file mode 100644 index 50689a00e1d..00000000000 --- a/tests/components/sonos/fixtures/sonos_queue.json +++ /dev/null @@ -1,30 +0,0 @@ -[ - { - "title": "Something", - "album": "Abbey Road", - "creator": "The Beatles", - "item_id": "Q:0/1", - "parent_id": "Q:0", - "original_track_number": 3, - "resources": [ - { - "uri": "x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/03%20Something.mp3", - "protocol_info": "file:*:audio/mpegurl:*" - } - ] - }, - { - "title": "Come Together", - "album": "Abbey Road", - "creator": "The Beatles", - "item_id": "Q:0/2", - "parent_id": "Q:0", - "original_track_number": 1, - "resources": [ - { - "uri": "x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3", - "protocol_info": "file:*:audio/mpegurl:*" - } - ] - } -] diff --git a/tests/components/sonos/fixtures/zgs_group.xml b/tests/components/sonos/fixtures/zgs_group.xml deleted file mode 100644 index 58f40be0049..00000000000 --- a/tests/components/sonos/fixtures/zgs_group.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/tests/components/sonos/fixtures/zgs_two_single.xml b/tests/components/sonos/fixtures/zgs_two_single.xml deleted file mode 100644 index 18c3c9231c6..00000000000 --- a/tests/components/sonos/fixtures/zgs_two_single.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - diff --git a/tests/components/sonos/snapshots/test_media_player.ambr b/tests/components/sonos/snapshots/test_media_player.ambr deleted file mode 100644 index f382d341de6..00000000000 --- a/tests/components/sonos/snapshots/test_media_player.ambr +++ /dev/null @@ -1,76 +0,0 @@ -# serializer version: 1 -# name: test_entity_basic[media_player.zone_a-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'media_player', - 'entity_category': None, - 'entity_id': 'media_player.zone_a', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'sonos', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'RINCON_test', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_basic[media_player.zone_a-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'speaker', - 'friendly_name': 'Zone A', - 'group_members': list([ - 'media_player.zone_a', - ]), - 'is_volume_muted': False, - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'supported_features': , - 'volume_level': 0.19, - }), - 'context': , - 'entity_id': 'media_player.zone_a', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- -# name: test_media_get_queue - dict({ - 'media_player.zone_a': list([ - dict({ - 'media_album_name': 'Abbey Road', - 'media_artist': 'The Beatles', - 'media_content_id': 'x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/03%20Something.mp3', - 'media_title': 'Something', - }), - dict({ - 'media_album_name': 'Abbey Road', - 'media_artist': 'The Beatles', - 'media_content_id': 'x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3', - 'media_title': 'Come Together', - }), - ]), - }) -# --- diff --git a/tests/components/sonos/test_init.py b/tests/components/sonos/test_init.py index 36a6571f3b0..85ab8f4dd5a 100644 --- a/tests/components/sonos/test_init.py +++ b/tests/components/sonos/test_init.py @@ -138,7 +138,7 @@ async def test_async_poll_manual_hosts_warnings( await manager.async_poll_manual_hosts() assert len(caplog.messages) == 1 record = caplog.records[0] - assert record.levelname == "WARNING" + assert record.levelname == "INFO" assert "Connection reestablished to Sonos device" in record.message assert mock_async_call_later.call_count == 3 diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index 63b2c8889ec..ab9b598bb04 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -1,66 +1,29 @@ """Tests for the Sonos Media Player platform.""" +import logging from typing import Any -from unittest.mock import patch import pytest -from soco.data_structures import SearchResult -from sonos_websocket.exception import SonosWebsocketError -from syrupy import SnapshotAssertion from homeassistant.components.media_player import ( - ATTR_INPUT_SOURCE, - ATTR_MEDIA_ANNOUNCE, - ATTR_MEDIA_CONTENT_ID, - ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_ENQUEUE, - ATTR_MEDIA_EXTRA, - ATTR_MEDIA_REPEAT, - ATTR_MEDIA_SHUFFLE, - ATTR_MEDIA_VOLUME_LEVEL, DOMAIN as MP_DOMAIN, - SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, MediaPlayerEnqueue, - RepeatMode, -) -from homeassistant.components.sonos.const import ( - DOMAIN as SONOS_DOMAIN, - SOURCE_LINEIN, - SOURCE_TV, -) -from homeassistant.components.sonos.media_player import ( - LONG_SERVICE_TIMEOUT, - SERVICE_GET_QUEUE, - SERVICE_RESTORE, - SERVICE_SNAPSHOT, - VOLUME_INCREMENT, -) -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_MEDIA_NEXT_TRACK, - SERVICE_MEDIA_PAUSE, - SERVICE_MEDIA_PLAY, - SERVICE_MEDIA_PREVIOUS_TRACK, - SERVICE_MEDIA_STOP, - SERVICE_REPEAT_SET, - SERVICE_SHUFFLE_SET, - SERVICE_VOLUME_DOWN, - SERVICE_VOLUME_SET, - SERVICE_VOLUME_UP, ) +from homeassistant.components.sonos.const import SOURCE_LINEIN, SOURCE_TV +from homeassistant.components.sonos.media_player import LONG_SERVICE_TIMEOUT +from homeassistant.const import STATE_IDLE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import entity_registry as er +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, CONNECTION_UPNP, DeviceRegistry, ) -from homeassistant.setup import async_setup_component -from .conftest import MockMusicServiceItem, MockSoCo, SoCoMockFactory, SonosMockEvent +from .conftest import MockMusicServiceItem, SoCoMockFactory async def test_device_registry( @@ -72,7 +35,6 @@ async def test_device_registry( ) assert reg_device is not None assert reg_device.model == "Model Name" - assert reg_device.model_id == "S12" assert reg_device.sw_version == "13.1" assert reg_device.connections == { (CONNECTION_NETWORK_MAC, "00:11:22:33:44:55"), @@ -99,18 +61,15 @@ async def test_device_registry_not_portable( async def test_entity_basic( - hass: HomeAssistant, - async_autosetup_sonos, - discover, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, + hass: HomeAssistant, async_autosetup_sonos, discover ) -> None: """Test basic state and attributes.""" - entity_id = "media_player.zone_a" - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - state = hass.states.get(entity_entry.entity_id) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + state = hass.states.get("media_player.zone_a") + assert state.state == STATE_IDLE + attributes = state.attributes + assert attributes["friendly_name"] == "Zone A" + assert attributes["is_volume_muted"] is False + assert attributes["volume_level"] == 0.19 @pytest.mark.parametrize( @@ -198,9 +157,9 @@ async def test_play_media_library( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: media_content_type, - ATTR_MEDIA_CONTENT_ID: media_content_id, + "entity_id": "media_player.zone_a", + "media_content_type": media_content_type, + "media_content_id": media_content_id, ATTR_MEDIA_ENQUEUE: enqueue, }, blocking=True, @@ -233,45 +192,6 @@ async def test_play_media_library( ) -@pytest.mark.parametrize( - ("media_content_type", "media_content_id", "message"), - [ - ( - "artist", - "A:ALBUM/UnknowAlbum", - "Could not find media in library: A:ALBUM/UnknowAlbum", - ), - ( - "UnknownContent", - "A:ALBUM/UnknowAlbum", - "Sonos does not support media content type: UnknownContent", - ), - ], -) -async def test_play_media_library_content_error( - hass: HomeAssistant, - async_autosetup_sonos, - media_content_type, - media_content_id, - message, -) -> None: - """Test playing local library errors on content and content type.""" - with pytest.raises( - ServiceValidationError, - match=message, - ): - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: media_content_type, - ATTR_MEDIA_CONTENT_ID: media_content_id, - }, - blocking=True, - ) - - _track_url = "S://192.168.42.100/music/iTunes/The%20Beatles/A%20Hard%20Day%2fs%I%20Should%20Have%20Known%20Better.mp3" @@ -286,9 +206,9 @@ async def test_play_media_lib_track_play( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "track", - ATTR_MEDIA_CONTENT_ID: _track_url, + "entity_id": "media_player.zone_a", + "media_content_type": "track", + "media_content_id": _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, }, blocking=True, @@ -315,9 +235,9 @@ async def test_play_media_lib_track_next( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "track", - ATTR_MEDIA_CONTENT_ID: _track_url, + "entity_id": "media_player.zone_a", + "media_content_type": "track", + "media_content_id": _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.NEXT, }, blocking=True, @@ -343,9 +263,9 @@ async def test_play_media_lib_track_replace( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "track", - ATTR_MEDIA_CONTENT_ID: _track_url, + "entity_id": "media_player.zone_a", + "media_content_type": "track", + "media_content_id": _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.REPLACE, }, blocking=True, @@ -366,9 +286,9 @@ async def test_play_media_lib_track_add( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "track", - ATTR_MEDIA_CONTENT_ID: _track_url, + "entity_id": "media_player.zone_a", + "media_content_type": "track", + "media_content_id": _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, }, blocking=True, @@ -396,9 +316,9 @@ async def test_play_media_share_link_add( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "playlist", - ATTR_MEDIA_CONTENT_ID: _share_link, + "entity_id": "media_player.zone_a", + "media_content_type": "playlist", + "media_content_id": _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, }, blocking=True, @@ -424,9 +344,9 @@ async def test_play_media_share_link_next( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "playlist", - ATTR_MEDIA_CONTENT_ID: _share_link, + "entity_id": "media_player.zone_a", + "media_content_type": "playlist", + "media_content_id": _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.NEXT, }, blocking=True, @@ -456,9 +376,9 @@ async def test_play_media_share_link_play( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "playlist", - ATTR_MEDIA_CONTENT_ID: _share_link, + "entity_id": "media_player.zone_a", + "media_content_type": "playlist", + "media_content_id": _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, }, blocking=True, @@ -490,9 +410,9 @@ async def test_play_media_share_link_replace( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "playlist", - ATTR_MEDIA_CONTENT_ID: _share_link, + "entity_id": "media_player.zone_a", + "media_content_type": "playlist", + "media_content_id": _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.REPLACE, }, blocking=True, @@ -555,9 +475,9 @@ async def test_play_media_music_library_playlist( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "playlist", - ATTR_MEDIA_CONTENT_ID: media_content_id, + "entity_id": "media_player.zone_a", + "media_content_type": "playlist", + "media_content_id": media_content_id, }, blocking=True, ) @@ -579,68 +499,21 @@ async def test_play_media_music_library_playlist_dne( soco_mock = soco_factory.mock_list.get("192.168.42.2") soco_mock.music_library.get_playlists.return_value = _mock_playlists - with pytest.raises( - ServiceValidationError, - match=f"Could not find Sonos playlist: {media_content_id}", - ): + with caplog.at_level(logging.ERROR): + caplog.clear() await hass.services.async_call( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "playlist", - ATTR_MEDIA_CONTENT_ID: media_content_id, + "entity_id": "media_player.zone_a", + "media_content_type": "playlist", + "media_content_id": media_content_id, }, blocking=True, ) assert soco_mock.play_uri.call_count == 0 - - -async def test_play_sonos_playlist( - hass: HomeAssistant, - async_autosetup_sonos, - soco: MockSoCo, - sonos_playlists: SearchResult, -) -> None: - """Test that sonos playlists can be played.""" - - # Test a successful call - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "playlist", - ATTR_MEDIA_CONTENT_ID: "sample playlist", - }, - blocking=True, - ) - assert soco.clear_queue.call_count == 1 - assert soco.add_to_queue.call_count == 1 - soco.add_to_queue.asset_called_with( - sonos_playlists[0], timeout=LONG_SERVICE_TIMEOUT - ) - - # Test playing a non-existent playlist - soco.clear_queue.reset_mock() - soco.add_to_queue.reset_mock() - media_content_id: str = "bad playlist" - with pytest.raises( - ServiceValidationError, - match=f"Could not find Sonos playlist: {media_content_id}", - ): - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "playlist", - ATTR_MEDIA_CONTENT_ID: media_content_id, - }, - blocking=True, - ) - assert soco.clear_queue.call_count == 0 - assert soco.add_to_queue.call_count == 0 + assert media_content_id in caplog.text + assert "playlist" in caplog.text @pytest.mark.parametrize( @@ -673,8 +546,8 @@ async def test_select_source_line_in_tv( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_INPUT_SOURCE: source, + "entity_id": "media_player.zone_a", + "source": source, }, blocking=True, ) @@ -716,8 +589,8 @@ async def test_select_source_play_uri( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_INPUT_SOURCE: source, + "entity_id": "media_player.zone_a", + "source": source, }, blocking=True, ) @@ -756,8 +629,8 @@ async def test_select_source_play_queue( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_INPUT_SOURCE: source, + "entity_id": "media_player.zone_a", + "source": source, }, blocking=True, ) @@ -785,8 +658,8 @@ async def test_select_source_error( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_INPUT_SOURCE: "invalid_source", + "entity_id": "media_player.zone_a", + "source": "invalid_source", }, blocking=True, ) @@ -794,147 +667,6 @@ async def test_select_source_error( assert "Could not find a Sonos favorite" in str(sve.value) -async def test_shuffle_set( - hass: HomeAssistant, - soco: MockSoCo, - async_autosetup_sonos, -) -> None: - """Test the set shuffle method.""" - assert soco.play_mode == "NORMAL" - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_SHUFFLE_SET, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_SHUFFLE: True, - }, - blocking=True, - ) - assert soco.play_mode == "SHUFFLE_NOREPEAT" - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_SHUFFLE_SET, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_SHUFFLE: False, - }, - blocking=True, - ) - assert soco.play_mode == "NORMAL" - - -async def test_shuffle_get( - hass: HomeAssistant, - soco: MockSoCo, - async_autosetup_sonos, - no_media_event: SonosMockEvent, -) -> None: - """Test the get shuffle attribute by simulating a Sonos Event.""" - subscription = soco.avTransport.subscribe.return_value - sub_callback = subscription.callback - - state = hass.states.get("media_player.zone_a") - assert state.attributes[ATTR_MEDIA_SHUFFLE] is False - - no_media_event.variables["current_play_mode"] = "SHUFFLE_NOREPEAT" - sub_callback(no_media_event) - await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get("media_player.zone_a") - assert state.attributes[ATTR_MEDIA_SHUFFLE] is True - - # The integration keeps a copy of the last event to check for - # changes, so we create a new event. - no_media_event = SonosMockEvent( - soco, soco.avTransport, no_media_event.variables.copy() - ) - no_media_event.variables["current_play_mode"] = "NORMAL" - sub_callback(no_media_event) - await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get("media_player.zone_a") - assert state.attributes[ATTR_MEDIA_SHUFFLE] is False - - -async def test_repeat_set( - hass: HomeAssistant, - soco: MockSoCo, - async_autosetup_sonos, -) -> None: - """Test the set repeat method.""" - assert soco.play_mode == "NORMAL" - await hass.services.async_call( - MP_DOMAIN, - SERVICE_REPEAT_SET, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_REPEAT: RepeatMode.ALL, - }, - blocking=True, - ) - assert soco.play_mode == "REPEAT_ALL" - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_REPEAT_SET, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_REPEAT: RepeatMode.ONE, - }, - blocking=True, - ) - assert soco.play_mode == "REPEAT_ONE" - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_REPEAT_SET, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_REPEAT: RepeatMode.OFF, - }, - blocking=True, - ) - assert soco.play_mode == "NORMAL" - - -async def test_repeat_get( - hass: HomeAssistant, - soco: MockSoCo, - async_autosetup_sonos, - no_media_event: SonosMockEvent, -) -> None: - """Test the get repeat attribute by simulating a Sonos Event.""" - subscription = soco.avTransport.subscribe.return_value - sub_callback = subscription.callback - - state = hass.states.get("media_player.zone_a") - assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.OFF - - no_media_event.variables["current_play_mode"] = "REPEAT_ALL" - sub_callback(no_media_event) - await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get("media_player.zone_a") - assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ALL - - no_media_event = SonosMockEvent( - soco, soco.avTransport, no_media_event.variables.copy() - ) - no_media_event.variables["current_play_mode"] = "REPEAT_ONE" - sub_callback(no_media_event) - await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get("media_player.zone_a") - assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ONE - - no_media_event = SonosMockEvent( - soco, soco.avTransport, no_media_event.variables.copy() - ) - no_media_event.variables["current_play_mode"] = "NORMAL" - sub_callback(no_media_event) - await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get("media_player.zone_a") - assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.OFF - - async def test_play_media_favorite_item_id( hass: HomeAssistant, soco_factory: SoCoMockFactory, @@ -946,9 +678,9 @@ async def test_play_media_favorite_item_id( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "favorite_item_id", - ATTR_MEDIA_CONTENT_ID: "FV:2/4", + "entity_id": "media_player.zone_a", + "media_content_type": "favorite_item_id", + "media_content_id": "FV:2/4", }, blocking=True, ) @@ -968,240 +700,10 @@ async def test_play_media_favorite_item_id( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "favorite_item_id", - ATTR_MEDIA_CONTENT_ID: "UNKNOWN_ID", + "entity_id": "media_player.zone_a", + "media_content_type": "favorite_item_id", + "media_content_id": "UNKNOWN_ID", }, blocking=True, ) assert "UNKNOWN_ID" in str(sve.value) - - -async def _setup_hass(hass: HomeAssistant): - await async_setup_component( - hass, - SONOS_DOMAIN, - { - "sonos": { - "media_player": { - "interface_addr": "127.0.0.1", - "hosts": ["10.10.10.1", "10.10.10.2"], - } - } - }, - ) - await hass.async_block_till_done() - - -async def test_service_snapshot_restore( - hass: HomeAssistant, - soco_factory: SoCoMockFactory, -) -> None: - """Test the snapshot and restore services.""" - soco_factory.cache_mock(MockSoCo(), "10.10.10.1", "Living Room") - soco_factory.cache_mock(MockSoCo(), "10.10.10.2", "Bedroom") - await _setup_hass(hass) - with patch( - "homeassistant.components.sonos.speaker.Snapshot.snapshot" - ) as mock_snapshot: - await hass.services.async_call( - SONOS_DOMAIN, - SERVICE_SNAPSHOT, - { - ATTR_ENTITY_ID: ["media_player.living_room", "media_player.bedroom"], - }, - blocking=True, - ) - assert mock_snapshot.call_count == 2 - - with patch( - "homeassistant.components.sonos.speaker.Snapshot.restore" - ) as mock_restore: - await hass.services.async_call( - SONOS_DOMAIN, - SERVICE_RESTORE, - { - ATTR_ENTITY_ID: ["media_player.living_room", "media_player.bedroom"], - }, - blocking=True, - ) - assert mock_restore.call_count == 2 - - -async def test_volume( - hass: HomeAssistant, - soco: MockSoCo, - async_autosetup_sonos, -) -> None: - """Test the media player volume services.""" - initial_volume = soco.volume - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_VOLUME_UP, - { - ATTR_ENTITY_ID: "media_player.zone_a", - }, - blocking=True, - ) - assert soco.volume == initial_volume + VOLUME_INCREMENT - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_VOLUME_DOWN, - { - ATTR_ENTITY_ID: "media_player.zone_a", - }, - blocking=True, - ) - assert soco.volume == initial_volume - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_VOLUME_SET, - {ATTR_ENTITY_ID: "media_player.zone_a", ATTR_MEDIA_VOLUME_LEVEL: 0.30}, - blocking=True, - ) - # SoCo uses 0..100 for its range. - assert soco.volume == 30 - - -@pytest.mark.parametrize( - ("service", "client_call"), - [ - (SERVICE_MEDIA_PLAY, "play"), - (SERVICE_MEDIA_PAUSE, "pause"), - (SERVICE_MEDIA_STOP, "stop"), - (SERVICE_MEDIA_NEXT_TRACK, "next"), - (SERVICE_MEDIA_PREVIOUS_TRACK, "previous"), - (SERVICE_CLEAR_PLAYLIST, "clear_queue"), - ], -) -async def test_media_transport( - hass: HomeAssistant, - soco: MockSoCo, - async_autosetup_sonos, - service: str, - client_call: str, -) -> None: - """Test the media player transport services.""" - await hass.services.async_call( - MP_DOMAIN, - service, - { - ATTR_ENTITY_ID: "media_player.zone_a", - }, - blocking=True, - ) - assert getattr(soco, client_call).call_count == 1 - - -async def test_play_media_announce( - hass: HomeAssistant, - soco: MockSoCo, - async_autosetup_sonos, - sonos_websocket, -) -> None: - """Test playing media with the announce.""" - content_id: str = "http://10.0.0.1:8123/local/sounds/doorbell.mp3" - volume: float = 0.30 - - # Test the success path - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "music", - ATTR_MEDIA_CONTENT_ID: content_id, - ATTR_MEDIA_ANNOUNCE: True, - ATTR_MEDIA_EXTRA: {"volume": volume}, - }, - blocking=True, - ) - assert sonos_websocket.play_clip.call_count == 1 - sonos_websocket.play_clip.assert_called_with(content_id, volume=volume) - - # Test receiving a websocket exception - sonos_websocket.play_clip.reset_mock() - sonos_websocket.play_clip.side_effect = SonosWebsocketError("Error Message") - with pytest.raises( - HomeAssistantError, match="Error when calling Sonos websocket: Error Message" - ): - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "music", - ATTR_MEDIA_CONTENT_ID: content_id, - ATTR_MEDIA_ANNOUNCE: True, - }, - blocking=True, - ) - assert sonos_websocket.play_clip.call_count == 1 - sonos_websocket.play_clip.assert_called_with(content_id, volume=None) - - # Test receiving a non success result - sonos_websocket.play_clip.reset_mock() - sonos_websocket.play_clip.side_effect = None - retval = {"success": 0} - sonos_websocket.play_clip.return_value = [retval, {}] - with pytest.raises( - HomeAssistantError, match=f"Announcing clip {content_id} failed {retval}" - ): - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "music", - ATTR_MEDIA_CONTENT_ID: content_id, - ATTR_MEDIA_ANNOUNCE: True, - }, - blocking=True, - ) - assert sonos_websocket.play_clip.call_count == 1 - - # Test speakers that do not support announce. This - # will result in playing the clip directly via play_uri - sonos_websocket.play_clip.reset_mock() - sonos_websocket.play_clip.side_effect = None - retval = {"success": 0, "type": "globalError"} - sonos_websocket.play_clip.return_value = [retval, {}] - - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.zone_a", - ATTR_MEDIA_CONTENT_TYPE: "music", - ATTR_MEDIA_CONTENT_ID: content_id, - ATTR_MEDIA_ANNOUNCE: True, - }, - blocking=True, - ) - assert sonos_websocket.play_clip.call_count == 1 - soco.play_uri.assert_called_with(content_id, force_radio=False) - - -async def test_media_get_queue( - hass: HomeAssistant, - soco: MockSoCo, - async_autosetup_sonos, - soco_factory, - snapshot: SnapshotAssertion, -) -> None: - """Test getting the media queue.""" - soco_mock = soco_factory.mock_list.get("192.168.42.2") - result = await hass.services.async_call( - SONOS_DOMAIN, - SERVICE_GET_QUEUE, - { - ATTR_ENTITY_ID: "media_player.zone_a", - }, - blocking=True, - return_response=True, - ) - soco_mock.get_queue.assert_called_with(max_items=0) - assert result == snapshot diff --git a/tests/components/sonos/test_plex_playback.py b/tests/components/sonos/test_plex_playback.py index 01a66f640d5..428e970697e 100644 --- a/tests/components/sonos/test_plex_playback.py +++ b/tests/components/sonos/test_plex_playback.py @@ -8,24 +8,17 @@ import pytest from homeassistant.components.media_player import ( ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, - ATTR_MEDIA_ENQUEUE, DOMAIN as MP_DOMAIN, SERVICE_PLAY_MEDIA, - MediaPlayerEnqueue, MediaType, ) from homeassistant.components.plex import DOMAIN as PLEX_DOMAIN, PLEX_URI_SCHEME -from homeassistant.components.sonos.media_player import LONG_SERVICE_TIMEOUT from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from .conftest import MockSoCo - -async def test_plex_play_media( - hass: HomeAssistant, soco: MockSoCo, async_autosetup_sonos -) -> None: +async def test_plex_play_media(hass: HomeAssistant, async_autosetup_sonos) -> None: """Test playing media via the Plex integration.""" mock_plex_server = Mock() mock_lookup = mock_plex_server.lookup_media @@ -62,9 +55,6 @@ async def test_plex_play_media( assert not mock_shuffle.called assert mock_lookup.mock_calls[0][1][0] == MediaType.MUSIC assert mock_lookup.mock_calls[0][2] == json.loads(media_content_id) - assert soco.clear_queue.call_count == 1 - assert soco.play_from_queue.call_count == 1 - soco.play_from_queue.assert_called_with(0) # Test handling shuffle in payload mock_lookup.reset_mock() @@ -140,41 +130,3 @@ async def test_plex_play_media( assert mock_shuffle.called assert mock_lookup.mock_calls[0][1][0] == PLEX_DOMAIN assert mock_lookup.mock_calls[0][2] == {"plex_key": plex_item_key} - - mock_add_to_queue.reset_mock() - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: media_player, - ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, - ATTR_MEDIA_CONTENT_ID: f"{PLEX_URI_SCHEME}{media_content_id}", - ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, - }, - blocking=True, - ) - assert mock_add_to_queue.call_count == 1 - mock_add_to_queue.assert_called_with( - mock_lookup(), timeout=LONG_SERVICE_TIMEOUT - ) - - soco.play_from_queue.reset_mock() - mock_add_to_queue.reset_mock() - mock_add_to_queue.return_value = 9 - await hass.services.async_call( - MP_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: media_player, - ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, - ATTR_MEDIA_CONTENT_ID: f"{PLEX_URI_SCHEME}{media_content_id}", - ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, - }, - blocking=True, - ) - assert mock_add_to_queue.call_count == 1 - mock_add_to_queue.assert_called_with( - mock_lookup(), position=1, timeout=LONG_SERVICE_TIMEOUT - ) - assert soco.play_from_queue.call_count == 1 - soco.play_from_queue.assert_called_with(mock_add_to_queue.return_value - 1) diff --git a/tests/components/sonos/test_speaker.py b/tests/components/sonos/test_speaker.py index 40d126c64f2..2c4357060be 100644 --- a/tests/components/sonos/test_speaker.py +++ b/tests/components/sonos/test_speaker.py @@ -4,18 +4,11 @@ from unittest.mock import patch import pytest -from homeassistant.components.media_player import ( - DOMAIN as MP_DOMAIN, - SERVICE_MEDIA_PLAY, -) -from homeassistant.components.sonos import DOMAIN from homeassistant.components.sonos.const import DATA_SONOS, SCAN_INTERVAL from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from .conftest import MockSoCo, SonosMockEvent - -from tests.common import async_fire_time_changed, load_fixture, load_json_value_fixture +from tests.common import async_fire_time_changed async def test_fallback_to_polling( @@ -74,140 +67,3 @@ async def test_subscription_creation_fails( await hass.async_block_till_done() assert speaker._subscriptions - - -def _create_zgs_sonos_event( - fixture_file: str, soco_1: MockSoCo, soco_2: MockSoCo, create_uui_ds: bool = True -) -> SonosMockEvent: - """Create a Sonos Event for zone group state, with the option of creating the uui_ds_in_group.""" - zgs = load_fixture(fixture_file, DOMAIN) - variables = {} - variables["ZoneGroupState"] = zgs - # Sonos does not always send this variable with zgs events - if create_uui_ds: - variables["zone_player_uui_ds_in_group"] = f"{soco_1.uid},{soco_2.uid}" - event = SonosMockEvent(soco_1, soco_1.zoneGroupTopology, variables) - if create_uui_ds: - event.zone_player_uui_ds_in_group = f"{soco_1.uid},{soco_2.uid}" - return event - - -def _create_avtransport_sonos_event( - fixture_file: str, soco: MockSoCo -) -> SonosMockEvent: - """Create a Sonos Event for an AVTransport update.""" - variables = load_json_value_fixture(fixture_file, DOMAIN) - return SonosMockEvent(soco, soco.avTransport, variables) - - -async def _media_play(hass: HomeAssistant, entity: str) -> None: - """Call media play service.""" - await hass.services.async_call( - MP_DOMAIN, - SERVICE_MEDIA_PLAY, - { - "entity_id": entity, - }, - blocking=True, - ) - - -async def test_zgs_event_group_speakers( - hass: HomeAssistant, sonos_setup_two_speakers: list[MockSoCo] -) -> None: - """Tests grouping and ungrouping two speakers.""" - # When Sonos speakers are grouped; one of the speakers is the coordinator and is in charge - # of playback across both speakers. Hence, service calls to play or pause on media_players - # that are part of the group are routed to the coordinator. - soco_lr = sonos_setup_two_speakers[0] - soco_br = sonos_setup_two_speakers[1] - - # Test 1 - Initial state - speakers are not grouped - state = hass.states.get("media_player.living_room") - assert state.attributes["group_members"] == ["media_player.living_room"] - state = hass.states.get("media_player.bedroom") - assert state.attributes["group_members"] == ["media_player.bedroom"] - # Each speaker is its own coordinator and calls should route to their SoCos - await _media_play(hass, "media_player.living_room") - assert soco_lr.play.call_count == 1 - await _media_play(hass, "media_player.bedroom") - assert soco_br.play.call_count == 1 - - soco_lr.play.reset_mock() - soco_br.play.reset_mock() - - # Test 2 - Group the speakers, living room is the coordinator - event = _create_zgs_sonos_event( - "zgs_group.xml", soco_lr, soco_br, create_uui_ds=True - ) - soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) - soco_br.zoneGroupTopology.subscribe.return_value._callback(event) - await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get("media_player.living_room") - assert state.attributes["group_members"] == [ - "media_player.living_room", - "media_player.bedroom", - ] - state = hass.states.get("media_player.bedroom") - assert state.attributes["group_members"] == [ - "media_player.living_room", - "media_player.bedroom", - ] - # Play calls should route to the living room SoCo - await _media_play(hass, "media_player.living_room") - await _media_play(hass, "media_player.bedroom") - assert soco_lr.play.call_count == 2 - assert soco_br.play.call_count == 0 - - soco_lr.play.reset_mock() - soco_br.play.reset_mock() - - # Test 3 - Ungroup the speakers - event = _create_zgs_sonos_event( - "zgs_two_single.xml", soco_lr, soco_br, create_uui_ds=False - ) - soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) - soco_br.zoneGroupTopology.subscribe.return_value._callback(event) - await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get("media_player.living_room") - assert state.attributes["group_members"] == ["media_player.living_room"] - state = hass.states.get("media_player.bedroom") - assert state.attributes["group_members"] == ["media_player.bedroom"] - # Calls should route to each speakers Soco - await _media_play(hass, "media_player.living_room") - assert soco_lr.play.call_count == 1 - await _media_play(hass, "media_player.bedroom") - assert soco_br.play.call_count == 1 - - -async def test_zgs_avtransport_group_speakers( - hass: HomeAssistant, sonos_setup_two_speakers: list[MockSoCo] -) -> None: - """Test processing avtransport and zgs events to change group membership.""" - soco_lr = sonos_setup_two_speakers[0] - soco_br = sonos_setup_two_speakers[1] - - # Test 1 - Send a transport event changing the coordinator - # for the living room speaker to the bedroom speaker. - event = _create_avtransport_sonos_event("av_transport.json", soco_lr) - soco_lr.avTransport.subscribe.return_value._callback(event) - await hass.async_block_till_done(wait_background_tasks=True) - # Call should route to the new coodinator which is the bedroom - await _media_play(hass, "media_player.living_room") - assert soco_lr.play.call_count == 0 - assert soco_br.play.call_count == 1 - - soco_lr.play.reset_mock() - soco_br.play.reset_mock() - - # Test 2- Send a zgs event to return living room to its own coordinator - event = _create_zgs_sonos_event( - "zgs_two_single.xml", soco_lr, soco_br, create_uui_ds=False - ) - soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) - soco_br.zoneGroupTopology.subscribe.return_value._callback(event) - await hass.async_block_till_done(wait_background_tasks=True) - # Call should route to the living room - await _media_play(hass, "media_player.living_room") - assert soco_lr.play.call_count == 1 - assert soco_br.play.call_count == 0 diff --git a/tests/components/spc/conftest.py b/tests/components/spc/conftest.py deleted file mode 100644 index 1ccda31e314..00000000000 --- a/tests/components/spc/conftest.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Tests for Vanderbilt SPC component.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pyspcwebgw -import pytest - - -@pytest.fixture -def mock_client() -> Generator[AsyncMock]: - """Mock the SPC client.""" - - with patch( - "homeassistant.components.spc.SpcWebGateway", autospec=True - ) as mock_client: - client = mock_client.return_value - client.async_load_parameters.return_value = True - mock_area = AsyncMock(spec=pyspcwebgw.area.Area) - mock_area.id = "1" - mock_area.mode = pyspcwebgw.const.AreaMode.FULL_SET - mock_area.last_changed_by = "Sven" - mock_area.name = "House" - mock_area.verified_alarm = False - client.areas = {"1": mock_area} - yield mock_client diff --git a/tests/components/spc/test_alarm_control_panel.py b/tests/components/spc/test_alarm_control_panel.py deleted file mode 100644 index 12fb885b92b..00000000000 --- a/tests/components/spc/test_alarm_control_panel.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Tests for Vanderbilt SPC component.""" - -from unittest.mock import AsyncMock - -from pyspcwebgw.const import AreaMode - -from homeassistant.components.alarm_control_panel import AlarmControlPanelState -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - - -async def test_update_alarm_device(hass: HomeAssistant, mock_client: AsyncMock) -> None: - """Test that alarm panel state changes on incoming websocket data.""" - - config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} - assert await async_setup_component(hass, "spc", config) is True - - await hass.async_block_till_done() - - entity_id = "alarm_control_panel.house" - - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY - assert hass.states.get(entity_id).attributes["changed_by"] == "Sven" - - mock_area = mock_client.return_value.areas["1"] - - mock_area.mode = AreaMode.UNSET - mock_area.last_changed_by = "Anna" - - await mock_client.call_args_list[0][1]["async_callback"](mock_area) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED - assert hass.states.get(entity_id).attributes["changed_by"] == "Anna" diff --git a/tests/components/spc/test_init.py b/tests/components/spc/test_init.py index dc407dc2c5b..3dfea94a4bd 100644 --- a/tests/components/spc/test_init.py +++ b/tests/components/spc/test_init.py @@ -1,22 +1,73 @@ """Tests for Vanderbilt SPC component.""" -from unittest.mock import AsyncMock +from unittest.mock import Mock, PropertyMock, patch +import pyspcwebgw +from pyspcwebgw.const import AreaMode + +from homeassistant.bootstrap import async_setup_component +from homeassistant.components.spc import DATA_API +from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component -async def test_valid_device_config(hass: HomeAssistant, mock_client: AsyncMock) -> None: +async def test_valid_device_config(hass: HomeAssistant, monkeypatch) -> None: """Test valid device config.""" config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} - assert await async_setup_component(hass, "spc", config) is True + with patch( + "homeassistant.components.spc.SpcWebGateway.async_load_parameters", + return_value=True, + ): + assert await async_setup_component(hass, "spc", config) is True -async def test_invalid_device_config( - hass: HomeAssistant, mock_client: AsyncMock -) -> None: +async def test_invalid_device_config(hass: HomeAssistant, monkeypatch) -> None: """Test valid device config.""" config = {"spc": {"api_url": "http://localhost/"}} - assert await async_setup_component(hass, "spc", config) is False + with patch( + "homeassistant.components.spc.SpcWebGateway.async_load_parameters", + return_value=True, + ): + assert await async_setup_component(hass, "spc", config) is False + + +async def test_update_alarm_device(hass: HomeAssistant) -> None: + """Test that alarm panel state changes on incoming websocket data.""" + + config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} + + area_mock = Mock( + spec=pyspcwebgw.area.Area, + id="1", + mode=AreaMode.FULL_SET, + last_changed_by="Sven", + ) + area_mock.name = "House" + area_mock.verified_alarm = False + + with patch( + "homeassistant.components.spc.SpcWebGateway.areas", new_callable=PropertyMock + ) as mock_areas: + mock_areas.return_value = {"1": area_mock} + with patch( + "homeassistant.components.spc.SpcWebGateway.async_load_parameters", + return_value=True, + ): + assert await async_setup_component(hass, "spc", config) is True + + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.house" + + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).attributes["changed_by"] == "Sven" + + area_mock.mode = AreaMode.UNSET + area_mock.last_changed_by = "Anna" + await hass.data[DATA_API]._async_callback(area_mock) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).attributes["changed_by"] == "Anna" diff --git a/tests/components/spider/__init__.py b/tests/components/spider/__init__.py index 4d9139a501e..d145f4efc09 100644 --- a/tests/components/spider/__init__.py +++ b/tests/components/spider/__init__.py @@ -1 +1 @@ -"""Tests for the Spider integration.""" +"""Tests for the Spider component.""" diff --git a/tests/components/spider/test_config_flow.py b/tests/components/spider/test_config_flow.py new file mode 100644 index 00000000000..69f97130f8c --- /dev/null +++ b/tests/components/spider/test_config_flow.py @@ -0,0 +1,112 @@ +"""Tests for the Spider config flow.""" + +from unittest.mock import Mock, patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.spider.const import DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +USERNAME = "spider-username" +PASSWORD = "spider-password" + +SPIDER_USER_DATA = { + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, +} + + +@pytest.fixture(name="spider") +def spider_fixture() -> Mock: + """Patch libraries.""" + with patch("homeassistant.components.spider.config_flow.SpiderApi") as spider: + yield spider + + +async def test_user(hass: HomeAssistant, spider) -> None: + """Test user config.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + with ( + patch( + "homeassistant.components.spider.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.spider.async_setup_entry", return_value=True + ) as mock_setup_entry, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=SPIDER_USER_DATA + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DOMAIN + assert result["data"][CONF_USERNAME] == USERNAME + assert result["data"][CONF_PASSWORD] == PASSWORD + assert not result["result"].unique_id + + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import(hass: HomeAssistant, spider) -> None: + """Test import step.""" + + with ( + patch( + "homeassistant.components.spider.async_setup", + return_value=True, + ) as mock_setup, + patch( + "homeassistant.components.spider.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data=SPIDER_USER_DATA, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DOMAIN + assert result["data"][CONF_USERNAME] == USERNAME + assert result["data"][CONF_PASSWORD] == PASSWORD + assert not result["result"].unique_id + + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_abort_if_already_setup(hass: HomeAssistant, spider) -> None: + """Test we abort if Spider is already setup.""" + MockConfigEntry(domain=DOMAIN, data=SPIDER_USER_DATA).add_to_hass(hass) + + # Should fail, config exist (import) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=SPIDER_USER_DATA + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" + + # Should fail, config exist (flow) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=SPIDER_USER_DATA + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/spider/test_init.py b/tests/components/spider/test_init.py deleted file mode 100644 index 6d1d87cfa6a..00000000000 --- a/tests/components/spider/test_init.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Tests for the Spider integration.""" - -from homeassistant.components.spider import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from tests.common import MockConfigEntry - - -async def test_spider_repair_issue( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test the Spider configuration entry loading/unloading handles the repair.""" - config_entry_1 = MockConfigEntry( - title="Example 1", - domain=DOMAIN, - ) - config_entry_1.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry_1.entry_id) - await hass.async_block_till_done() - assert config_entry_1.state is ConfigEntryState.LOADED - - # Add a second one - config_entry_2 = MockConfigEntry( - title="Example 2", - domain=DOMAIN, - ) - config_entry_2.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry_2.entry_id) - await hass.async_block_till_done() - - assert config_entry_2.state is ConfigEntryState.LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) - - # Remove the first one - await hass.config_entries.async_remove(config_entry_1.entry_id) - await hass.async_block_till_done() - - assert config_entry_1.state is ConfigEntryState.NOT_LOADED - assert config_entry_2.state is ConfigEntryState.LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) - - # Remove the second one - await hass.config_entries.async_remove(config_entry_2.entry_id) - await hass.async_block_till_done() - - assert config_entry_1.state is ConfigEntryState.NOT_LOADED - assert config_entry_2.state is ConfigEntryState.NOT_LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) is None diff --git a/tests/components/spotify/__init__.py b/tests/components/spotify/__init__.py index 4730530b4f3..51e3404d3ad 100644 --- a/tests/components/spotify/__init__.py +++ b/tests/components/spotify/__init__.py @@ -1,13 +1 @@ -"""Tests for the Spotify component.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Set up the component.""" - config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() +"""Tests for the Spotify integration.""" diff --git a/tests/components/spotify/conftest.py b/tests/components/spotify/conftest.py deleted file mode 100644 index d3fc418f1cd..00000000000 --- a/tests/components/spotify/conftest.py +++ /dev/null @@ -1,166 +0,0 @@ -"""Common test fixtures.""" - -from collections.abc import Generator -import time -from unittest.mock import AsyncMock, patch - -import pytest -from spotifyaio.models import ( - Album, - Artist, - ArtistResponse, - AudioFeatures, - CategoriesResponse, - Category, - CategoryPlaylistResponse, - Devices, - FeaturedPlaylistResponse, - NewReleasesResponse, - NewReleasesResponseInner, - PlaybackState, - PlayedTrackResponse, - Playlist, - PlaylistResponse, - SavedAlbumResponse, - SavedShowResponse, - SavedTrackResponse, - Show, - ShowEpisodesResponse, - TopArtistsResponse, - TopTracksResponse, - UserProfile, -) - -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) -from homeassistant.components.spotify.const import DOMAIN, SPOTIFY_SCOPES -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry, load_fixture - -SCOPES = " ".join(SPOTIFY_SCOPES) - - -@pytest.fixture(name="expires_at") -def mock_expires_at() -> int: - """Fixture to set the oauth token expiration time.""" - return time.time() + 3600 - - -@pytest.fixture -def mock_config_entry(expires_at: int) -> MockConfigEntry: - """Create Spotify entry in Home Assistant.""" - return MockConfigEntry( - domain=DOMAIN, - title="spotify_1", - unique_id="1112264111", - data={ - "auth_implementation": DOMAIN, - "token": { - "access_token": "mock-access-token", - "refresh_token": "mock-refresh-token", - "expires_at": expires_at, - "scope": SCOPES, - }, - "id": "1112264111", - "name": "spotify_account_1", - }, - entry_id="01J5TX5A0FF6G5V0QJX6HBC94T", - ) - - -@pytest.fixture -async def setup_credentials(hass: HomeAssistant) -> None: - """Fixture to setup credentials.""" - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential("CLIENT_ID", "CLIENT_SECRET"), - DOMAIN, - ) - - -@pytest.fixture(autouse=True) -async def patch_sleep() -> Generator[AsyncMock]: - """Fixture to setup credentials.""" - with patch("homeassistant.components.spotify.media_player.AFTER_REQUEST_SLEEP", 0): - yield - - -@pytest.fixture -def mock_spotify() -> Generator[AsyncMock]: - """Mock the Spotify API.""" - with ( - patch( - "homeassistant.components.spotify.SpotifyClient", autospec=True - ) as spotify_mock, - patch( - "homeassistant.components.spotify.config_flow.SpotifyClient", - new=spotify_mock, - ), - ): - client = spotify_mock.return_value - # All these fixtures can be retrieved using the Web API client at - # https://developer.spotify.com/documentation/web-api - for fixture, method, obj in ( - ( - "current_user_playlist.json", - "get_playlists_for_current_user", - PlaylistResponse, - ), - ("saved_albums.json", "get_saved_albums", SavedAlbumResponse), - ("saved_tracks.json", "get_saved_tracks", SavedTrackResponse), - ("saved_shows.json", "get_saved_shows", SavedShowResponse), - ( - "recently_played_tracks.json", - "get_recently_played_tracks", - PlayedTrackResponse, - ), - ("top_artists.json", "get_top_artists", TopArtistsResponse), - ("top_tracks.json", "get_top_tracks", TopTracksResponse), - ("show_episodes.json", "get_show_episodes", ShowEpisodesResponse), - ("artist_albums.json", "get_artist_albums", NewReleasesResponseInner), - ): - getattr(client, method).return_value = obj.from_json( - load_fixture(fixture, DOMAIN) - ).items - for fixture, method, obj in ( - ( - "playback.json", - "get_playback", - PlaybackState, - ), - ("current_user.json", "get_current_user", UserProfile), - ("category.json", "get_category", Category), - ("playlist.json", "get_playlist", Playlist), - ("album.json", "get_album", Album), - ("artist.json", "get_artist", Artist), - ("show.json", "get_show", Show), - ("audio_features.json", "get_audio_features", AudioFeatures), - ): - getattr(client, method).return_value = obj.from_json( - load_fixture(fixture, DOMAIN) - ) - client.get_followed_artists.return_value = ArtistResponse.from_json( - load_fixture("followed_artists.json", DOMAIN) - ).artists.items - client.get_featured_playlists.return_value = FeaturedPlaylistResponse.from_json( - load_fixture("featured_playlists.json", DOMAIN) - ).playlists.items - client.get_categories.return_value = CategoriesResponse.from_json( - load_fixture("categories.json", DOMAIN) - ).categories.items - client.get_category_playlists.return_value = CategoryPlaylistResponse.from_json( - load_fixture("category_playlists.json", DOMAIN) - ).playlists.items - client.get_new_releases.return_value = NewReleasesResponse.from_json( - load_fixture("new_releases.json", DOMAIN) - ).albums.items - client.get_devices.return_value = Devices.from_json( - load_fixture("devices.json", DOMAIN) - ).devices - yield spotify_mock diff --git a/tests/components/spotify/fixtures/album.json b/tests/components/spotify/fixtures/album.json deleted file mode 100644 index d7240298e9f..00000000000 --- a/tests/components/spotify/fixtures/album.json +++ /dev/null @@ -1,128 +0,0 @@ -{ - "album_type": "album", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/3jULn43a6xfzqleyeFjPIq" - }, - "href": "https://api.spotify.com/v1/artists/3jULn43a6xfzqleyeFjPIq", - "id": "3jULn43a6xfzqleyeFjPIq", - "name": "Area 11", - "type": "artist", - "uri": "spotify:artist:3jULn43a6xfzqleyeFjPIq" - } - ], - "available_markets": [], - "copyrights": [ - { - "text": "2020 Smihilism Records", - "type": "C" - }, - { - "text": "2020 Smihilism Records", - "type": "P" - } - ], - "external_ids": { - "upc": "195916707034" - }, - "external_urls": { - "spotify": "https://open.spotify.com/album/3IqzqH6ShrRtie9Yd2ODyG" - }, - "genres": [], - "href": "https://api.spotify.com/v1/albums/3IqzqH6ShrRtie9Yd2ODyG", - "id": "3IqzqH6ShrRtie9Yd2ODyG", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab67616d0000b273a61a28c2f084761f8833bce6", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67616d00001e02a61a28c2f084761f8833bce6", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab67616d00004851a61a28c2f084761f8833bce6", - "width": 64 - } - ], - "label": "Smihilism Records", - "name": "SINGLARITY", - "popularity": 29, - "release_date": "2020-12-18", - "release_date_precision": "day", - "total_tracks": 11, - "tracks": { - "href": "https://api.spotify.com/v1/albums/3IqzqH6ShrRtie9Yd2ODyG/tracks?offset=0&limit=50&locale=en-US,en;q=0.5", - "items": [ - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/3jULn43a6xfzqleyeFjPIq" - }, - "href": "https://api.spotify.com/v1/artists/3jULn43a6xfzqleyeFjPIq", - "id": "3jULn43a6xfzqleyeFjPIq", - "name": "Area 11", - "type": "artist", - "uri": "spotify:artist:3jULn43a6xfzqleyeFjPIq" - } - ], - "available_markets": [], - "disc_number": 1, - "duration_ms": 260372, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/6akJGriy4njdP8fZTPGjwz" - }, - "href": "https://api.spotify.com/v1/tracks/6akJGriy4njdP8fZTPGjwz", - "id": "6akJGriy4njdP8fZTPGjwz", - "is_local": false, - "name": "All Your Friends", - "preview_url": "https://p.scdn.co/mp3-preview/484344e579edfdb8e8f872d73299aff2c3d0369d?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 1, - "type": "track", - "uri": "spotify:track:6akJGriy4njdP8fZTPGjwz" - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/3jULn43a6xfzqleyeFjPIq" - }, - "href": "https://api.spotify.com/v1/artists/3jULn43a6xfzqleyeFjPIq", - "id": "3jULn43a6xfzqleyeFjPIq", - "name": "Area 11", - "type": "artist", - "uri": "spotify:artist:3jULn43a6xfzqleyeFjPIq" - } - ], - "available_markets": [], - "disc_number": 1, - "duration_ms": 206613, - "explicit": true, - "external_urls": { - "spotify": "https://open.spotify.com/track/7N02bJK1amhplZ8yAapRS5" - }, - "href": "https://api.spotify.com/v1/tracks/7N02bJK1amhplZ8yAapRS5", - "id": "7N02bJK1amhplZ8yAapRS5", - "is_local": false, - "name": "New Magiks", - "preview_url": "https://p.scdn.co/mp3-preview/b59a5a73ed2e9a61be471822993e91210d5f255a?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 2, - "type": "track", - "uri": "spotify:track:7N02bJK1amhplZ8yAapRS5" - } - ], - "limit": 50, - "next": null, - "offset": 0, - "previous": null, - "total": 11 - }, - "type": "album", - "uri": "spotify:album:3IqzqH6ShrRtie9Yd2ODyG" -} diff --git a/tests/components/spotify/fixtures/artist.json b/tests/components/spotify/fixtures/artist.json deleted file mode 100644 index e60429fa030..00000000000 --- a/tests/components/spotify/fixtures/artist.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "external_urls": { - "spotify": "https://open.spotify.com/artist/0TnOYISbd1XYRBk9myaseg" - }, - "followers": { - "href": null, - "total": 10817055 - }, - "genres": ["dance pop", "miami hip hop", "pop"], - "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg?locale=en-US%2Cen%3Bq%3D0.5", - "id": "0TnOYISbd1XYRBk9myaseg", - "images": [ - { - "url": "https://i.scdn.co/image/ab6761610000e5ebee07b5820dd91d15d397e29c", - "height": 640, - "width": 640 - }, - { - "url": "https://i.scdn.co/image/ab67616100005174ee07b5820dd91d15d397e29c", - "height": 320, - "width": 320 - }, - { - "url": "https://i.scdn.co/image/ab6761610000f178ee07b5820dd91d15d397e29c", - "height": 160, - "width": 160 - } - ], - "name": "Pitbull", - "popularity": 85, - "type": "artist", - "uri": "spotify:artist:0TnOYISbd1XYRBk9myaseg" -} diff --git a/tests/components/spotify/fixtures/artist_albums.json b/tests/components/spotify/fixtures/artist_albums.json deleted file mode 100644 index 2cc66d1ac0b..00000000000 --- a/tests/components/spotify/fixtures/artist_albums.json +++ /dev/null @@ -1,472 +0,0 @@ -{ - "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg/albums?offset=0&limit=20&locale=en-US,en;q%3D0.5&include_groups=album,single,compilation,appears_on", - "limit": 20, - "next": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg/albums?offset=20&limit=20&locale=en-US,en;q%3D0.5&include_groups=album,single,compilation,appears_on", - "offset": 0, - "previous": null, - "total": 903, - "items": [ - { - "album_type": "album", - "total_tracks": 7, - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/56jg3KJcYmfL7RzYmG2O1Q" - }, - "href": "https://api.spotify.com/v1/albums/56jg3KJcYmfL7RzYmG2O1Q", - "id": "56jg3KJcYmfL7RzYmG2O1Q", - "images": [ - { - "url": "https://i.scdn.co/image/ab67616d0000b273a0bac1996f26274685db1520", - "height": 640, - "width": 640 - }, - { - "url": "https://i.scdn.co/image/ab67616d00001e02a0bac1996f26274685db1520", - "height": 300, - "width": 300 - }, - { - "url": "https://i.scdn.co/image/ab67616d00004851a0bac1996f26274685db1520", - "height": 64, - "width": 64 - } - ], - "name": "Trackhouse (Daytona 500 Edition)", - "release_date": "2024-02-16", - "release_date_precision": "day", - "type": "album", - "uri": "spotify:album:56jg3KJcYmfL7RzYmG2O1Q", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0TnOYISbd1XYRBk9myaseg" - }, - "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg", - "id": "0TnOYISbd1XYRBk9myaseg", - "name": "Pitbull", - "type": "artist", - "uri": "spotify:artist:0TnOYISbd1XYRBk9myaseg" - } - ], - "album_group": "album" - }, - { - "album_type": "album", - "total_tracks": 14, - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/1l86t4bTNT2j1X0ZBCIv6R" - }, - "href": "https://api.spotify.com/v1/albums/1l86t4bTNT2j1X0ZBCIv6R", - "id": "1l86t4bTNT2j1X0ZBCIv6R", - "images": [ - { - "url": "https://i.scdn.co/image/ab67616d0000b27333a4ba8f73271a749c5d953d", - "height": 640, - "width": 640 - }, - { - "url": "https://i.scdn.co/image/ab67616d00001e0233a4ba8f73271a749c5d953d", - "height": 300, - "width": 300 - }, - { - "url": "https://i.scdn.co/image/ab67616d0000485133a4ba8f73271a749c5d953d", - "height": 64, - "width": 64 - } - ], - "name": "Trackhouse", - "release_date": "2023-10-06", - "release_date_precision": "day", - "type": "album", - "uri": "spotify:album:1l86t4bTNT2j1X0ZBCIv6R", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0TnOYISbd1XYRBk9myaseg" - }, - "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg", - "id": "0TnOYISbd1XYRBk9myaseg", - "name": "Pitbull", - "type": "artist", - "uri": "spotify:artist:0TnOYISbd1XYRBk9myaseg" - } - ], - "album_group": "album" - } - ] -} diff --git a/tests/components/spotify/fixtures/audio_features.json b/tests/components/spotify/fixtures/audio_features.json deleted file mode 100644 index 52dfee060f7..00000000000 --- a/tests/components/spotify/fixtures/audio_features.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "danceability": 0.696, - "energy": 0.905, - "key": 3, - "loudness": -2.743, - "mode": 1, - "speechiness": 0.103, - "acousticness": 0.011, - "instrumentalness": 0.000905, - "liveness": 0.302, - "valence": 0.625, - "tempo": 114.944, - "type": "audio_features", - "id": "11dFghVXANMlKmJXsNCbNl", - "uri": "spotify:track:11dFghVXANMlKmJXsNCbNl", - "track_href": "https://api.spotify.com/v1/tracks/11dFghVXANMlKmJXsNCbNl", - "analysis_url": "https://api.spotify.com/v1/audio-analysis/11dFghVXANMlKmJXsNCbNl", - "duration_ms": 207960, - "time_signature": 4 -} diff --git a/tests/components/spotify/fixtures/categories.json b/tests/components/spotify/fixtures/categories.json deleted file mode 100644 index ed873c95c30..00000000000 --- a/tests/components/spotify/fixtures/categories.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "categories": { - "href": "https://api.spotify.com/v1/browse/categories?offset=0&limit=20&locale=en-US,en;q%3D0.5", - "items": [ - { - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAt0tbjZptfcdMSKl3", - "id": "0JQ5DAt0tbjZptfcdMSKl3", - "icons": [ - { - "height": 274, - "url": "https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg", - "width": 274 - } - ], - "name": "Made For You" - }, - { - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFz6FAsUtgAab", - "id": "0JQ5DAqbMKFz6FAsUtgAab", - "icons": [ - { - "height": 274, - "url": "https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg", - "width": 274 - } - ], - "name": "New Releases" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/browse/categories?offset=20&limit=20&locale=en-US,en;q%3D0.5", - "offset": 0, - "previous": null, - "total": 56 - } -} diff --git a/tests/components/spotify/fixtures/category.json b/tests/components/spotify/fixtures/category.json deleted file mode 100644 index d60605cf94f..00000000000 --- a/tests/components/spotify/fixtures/category.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0", - "id": "0JQ5DAqbMKFRY5ok2pxXJ0", - "icons": [ - { - "height": 274, - "url": "https://t.scdn.co/media/original/dinner_1b6506abba0ba52c54e6d695c8571078_274x274.jpg", - "width": 274 - } - ], - "name": "Cooking & Dining" -} diff --git a/tests/components/spotify/fixtures/category_playlists.json b/tests/components/spotify/fixtures/category_playlists.json deleted file mode 100644 index c2262708d5a..00000000000 --- a/tests/components/spotify/fixtures/category_playlists.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "playlists": { - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0/playlists?country=NL&offset=0&limit=20", - "items": [ - { - "collaborative": false, - "description": "Lekker eten en lang natafelen? Daar hoort muziek bij.", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DX7yhuKT9G4qk" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX7yhuKT9G4qk", - "id": "37i9dQZF1DX7yhuKT9G4qk", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f0000000343319faa9428405f3312b588", - "width": null - } - ], - "name": "eten met vrienden", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTcwMTY5Njk3NywwMDAwMDAwMDkyY2JjZDA1MjA2YTBmNzMxMmFlNGI0YzRhMjg0ZWZl", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX7yhuKT9G4qk/tracks", - "total": 313 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DX7yhuKT9G4qk" - }, - { - "collaborative": false, - "description": "From new retro to classic country blues, honky tonk, rockabilly, and more.", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DXbvE0SE0Cczh" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DXbvE0SE0Cczh", - "id": "37i9dQZF1DXbvE0SE0Cczh", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f00000003b93c270883619dde61725fc8", - "width": null - } - ], - "name": "Jukebox Joint", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTY4NjkxODgwMiwwMDAwMDAwMGUwNWRkNjY5N2UzM2Q4NzI4NzRiZmNhMGVmMzAyZTA5", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DXbvE0SE0Cczh/tracks", - "total": 60 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DXbvE0SE0Cczh" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0/playlists?country=NL&offset=20&limit=20", - "offset": 0, - "previous": null, - "total": 46 - } -} diff --git a/tests/components/spotify/fixtures/current_user.json b/tests/components/spotify/fixtures/current_user.json deleted file mode 100644 index a4f95b6c33e..00000000000 --- a/tests/components/spotify/fixtures/current_user.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "display_name": "Henk", - "external_urls": { - "spotify": "https://open.spotify.com/user/1112264111" - }, - "href": "https://api.spotify.com/v1/users/1112264111", - "id": "1112264111", - "images": [ - { - "url": "https://i.scdn.co/image/ab67757000003b8246569a64d252247acb1491bc", - "height": 64, - "width": 64 - }, - { - "url": "https://i.scdn.co/image/ab6775700000ee8546569a64d252247acb1491bc", - "height": 300, - "width": 300 - } - ], - "type": "user", - "uri": "spotify:user:1112264111", - "followers": { - "href": null, - "total": 21 - }, - "country": "NL", - "product": "premium", - "explicit_content": { - "filter_enabled": false, - "filter_locked": false - }, - "email": "henk@outlook.com" -} diff --git a/tests/components/spotify/fixtures/current_user_playlist.json b/tests/components/spotify/fixtures/current_user_playlist.json deleted file mode 100644 index c9d306504db..00000000000 --- a/tests/components/spotify/fixtures/current_user_playlist.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "href": "https://api.spotify.com/v1/users/1112264111/playlists?offset=0&limit=20", - "items": [ - { - "collaborative": false, - "description": "", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/4WkWJ0EjHEFASDevhM8oPw" - }, - "href": "https://api.spotify.com/v1/playlists/4WkWJ0EjHEFASDevhM8oPw", - "id": "4WkWJ0EjHEFASDevhM8oPw", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab67616d0000b273d061f5bfae8d38558f3698c1", - "width": 640 - } - ], - "name": "Hyper", - "owner": { - "display_name": "Henk", - "external_urls": { - "spotify": "https://open.spotify.com/user/1112264111" - }, - "href": "https://api.spotify.com/v1/users/1112264111", - "id": "1112264111", - "type": "user", - "uri": "spotify:user:1112264111" - }, - "primary_color": null, - "public": true, - "snapshot_id": "Myw2ZjkyN2Q1ZWEwMjU1YWJjM2EwOWQ5YzA2ZDJjYjIzNTEzNzVmYmVl", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/4WkWJ0EjHEFASDevhM8oPw/tracks", - "total": 1 - }, - "type": "playlist", - "uri": "spotify:playlist:4WkWJ0EjHEFASDevhM8oPw" - }, - { - "collaborative": false, - "description": "", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/1RHirWgH1weMsBLi4KOK9d" - }, - "href": "https://api.spotify.com/v1/playlists/1RHirWgH1weMsBLi4KOK9d", - "id": "1RHirWgH1weMsBLi4KOK9d", - "images": [ - { - "height": 640, - "url": "https://mosaic.scdn.co/640/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6", - "width": 640 - }, - { - "height": 300, - "url": "https://mosaic.scdn.co/300/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6", - "width": 300 - }, - { - "height": 60, - "url": "https://mosaic.scdn.co/60/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6", - "width": 60 - } - ], - "name": "Ain’t got shit on me", - "owner": { - "display_name": "Rens Boeser", - "external_urls": { - "spotify": "https://open.spotify.com/user/317g2sbpe3ccycu45fes6lfr5lpe" - }, - "href": "https://api.spotify.com/v1/users/317g2sbpe3ccycu45fes6lfr5lpe", - "id": "317g2sbpe3ccycu45fes6lfr5lpe", - "type": "user", - "uri": "spotify:user:317g2sbpe3ccycu45fes6lfr5lpe" - }, - "primary_color": null, - "public": false, - "snapshot_id": "MjksMTdlMGU4ZGIxZWY5NWRkNjVkMzQ1YzUxYjk3YWZkMDdhNzRjNWE0Zg==", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/1RHirWgH1weMsBLi4KOK9d/tracks", - "total": 28 - }, - "type": "playlist", - "uri": "spotify:playlist:1RHirWgH1weMsBLi4KOK9d" - } - ], - "limit": 18, - "next": "https://api.spotify.com/v1/users/1112264111/playlists?offset=18&limit=20", - "offset": 0, - "previous": null, - "total": 101 -} diff --git a/tests/components/spotify/fixtures/devices.json b/tests/components/spotify/fixtures/devices.json deleted file mode 100644 index 2dd8dfd7c3b..00000000000 --- a/tests/components/spotify/fixtures/devices.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "devices": [ - { - "id": "21dac6b0e0a1f181870fdc9749b2656466557666", - "is_active": false, - "is_private_session": false, - "is_restricted": false, - "name": "DESKTOP-BKC5SIK", - "supports_volume": true, - "type": "Computer", - "volume_percent": 69 - } - ] -} diff --git a/tests/components/spotify/fixtures/featured_playlists.json b/tests/components/spotify/fixtures/featured_playlists.json deleted file mode 100644 index 5e6e53a7ee1..00000000000 --- a/tests/components/spotify/fixtures/featured_playlists.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "message": "Popular Playlists", - "playlists": { - "href": "https://api.spotify.com/v1/browse/featured-playlists?country=NL×tamp=2023-12-18T18%3A35%3A35&offset=0&limit=20", - "items": [ - { - "collaborative": false, - "description": "De ideale playlist voor het fijne kerstgevoel bij de boom!", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DX4dopZ9vOp1t" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX4dopZ9vOp1t", - "id": "37i9dQZF1DX4dopZ9vOp1t", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f000000037d14c267b8ee5fea2246a8fe", - "width": null - } - ], - "name": "Kerst Hits 2023", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTcwMjU2ODI4MSwwMDAwMDAwMDE1ZGRiNzI3OGY4OGU2MzA1MWNkZGMyNTdmNDUwMTc1", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX4dopZ9vOp1t/tracks", - "total": 298 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DX4dopZ9vOp1t" - }, - { - "collaborative": false, - "description": "De 50 populairste hits van Nederland. Cover: Jack Harlow", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DWSBi5svWQ9Nk" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DWSBi5svWQ9Nk", - "id": "37i9dQZF1DWSBi5svWQ9Nk", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f00000003f7b99051789611a49101c1cf", - "width": null - } - ], - "name": "Top Hits NL", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTcwMjU5NDgwMCwwMDAwMDAwMDU4NWY2MTE4NmU4NmIwMDdlMGE4ZGRkOTZkN2U2MzAx", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DWSBi5svWQ9Nk/tracks", - "total": 50 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DWSBi5svWQ9Nk" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/browse/featured-playlists?country=NL×tamp=2023-12-18T18%3A35%3A35&offset=20&limit=20", - "offset": 0, - "previous": null, - "total": 24 - } -} diff --git a/tests/components/spotify/fixtures/followed_artists.json b/tests/components/spotify/fixtures/followed_artists.json deleted file mode 100644 index 4e03ed8291b..00000000000 --- a/tests/components/spotify/fixtures/followed_artists.json +++ /dev/null @@ -1,87 +0,0 @@ -{ - "artists": { - "items": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0lLY20XpZ9yDobkbHI7u1y" - }, - "followers": { - "href": null, - "total": 349437 - }, - "genres": [ - "brostep", - "complextro", - "danish electronic", - "edm", - "electro house", - "glitch", - "speedrun" - ], - "href": "https://api.spotify.com/v1/artists/0lLY20XpZ9yDobkbHI7u1y", - "id": "0lLY20XpZ9yDobkbHI7u1y", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab6761610000e5eb0fb1220e7e3ace47ebad023e", - "width": 640 - }, - { - "height": 320, - "url": "https://i.scdn.co/image/ab676161000051740fb1220e7e3ace47ebad023e", - "width": 320 - }, - { - "height": 160, - "url": "https://i.scdn.co/image/ab6761610000f1780fb1220e7e3ace47ebad023e", - "width": 160 - } - ], - "name": "Pegboard Nerds", - "popularity": 52, - "type": "artist", - "uri": "spotify:artist:0lLY20XpZ9yDobkbHI7u1y" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0p4nmQO2msCgU4IF37Wi3j" - }, - "followers": { - "href": null, - "total": 11296082 - }, - "genres": ["canadian pop", "candy pop", "dance pop", "pop"], - "href": "https://api.spotify.com/v1/artists/0p4nmQO2msCgU4IF37Wi3j", - "id": "0p4nmQO2msCgU4IF37Wi3j", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab6761610000e5eb5c3349ddba6b8e064c1bab16", - "width": 640 - }, - { - "height": 320, - "url": "https://i.scdn.co/image/ab676161000051745c3349ddba6b8e064c1bab16", - "width": 320 - }, - { - "height": 160, - "url": "https://i.scdn.co/image/ab6761610000f1785c3349ddba6b8e064c1bab16", - "width": 160 - } - ], - "name": "Avril Lavigne", - "popularity": 78, - "type": "artist", - "uri": "spotify:artist:0p4nmQO2msCgU4IF37Wi3j" - } - ], - "next": "https://api.spotify.com/v1/me/following?type=artist&limit=20&locale=en-US,en;q=0.5&after=2NZMqINcyfepvLxQJdzcZk", - "total": 74, - "cursors": { - "after": "2NZMqINcyfepvLxQJdzcZk" - }, - "limit": 20, - "href": "https://api.spotify.com/v1/me/following?type=artist&limit=20&locale=en-US,en;q=0.5" - } -} diff --git a/tests/components/spotify/fixtures/new_releases.json b/tests/components/spotify/fixtures/new_releases.json deleted file mode 100644 index b6948ef79a5..00000000000 --- a/tests/components/spotify/fixtures/new_releases.json +++ /dev/null @@ -1,469 +0,0 @@ -{ - "albums": { - "href": "https://api.spotify.com/v1/browse/new-releases?offset=0&limit=20&locale=en-US,en;q%3D0.5", - "items": [ - { - "album_type": "album", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4gzpq5DPGxSnKTe4SA8HAU" - }, - "href": "https://api.spotify.com/v1/artists/4gzpq5DPGxSnKTe4SA8HAU", - "id": "4gzpq5DPGxSnKTe4SA8HAU", - "name": "Coldplay", - "type": "artist", - "uri": "spotify:artist:4gzpq5DPGxSnKTe4SA8HAU" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/5SGtrmYbIo0Dsg4kJ4qjM6" - }, - "href": "https://api.spotify.com/v1/albums/5SGtrmYbIo0Dsg4kJ4qjM6", - "id": "5SGtrmYbIo0Dsg4kJ4qjM6", - "images": [ - { - "height": 300, - "url": "https://i.scdn.co/image/ab67616d00001e0209ba52a5116e0c3e8461f58b", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab67616d0000485109ba52a5116e0c3e8461f58b", - "width": 64 - }, - { - "height": 640, - "url": "https://i.scdn.co/image/ab67616d0000b27309ba52a5116e0c3e8461f58b", - "width": 640 - } - ], - "name": "Moon Music", - "release_date": "2024-10-04", - "release_date_precision": "day", - "total_tracks": 10, - "type": "album", - "uri": "spotify:album:5SGtrmYbIo0Dsg4kJ4qjM6" - }, - { - "album_type": "album", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4U9nsRTH2mr9L4UXEWqG5e" - }, - "href": "https://api.spotify.com/v1/artists/4U9nsRTH2mr9L4UXEWqG5e", - "id": "4U9nsRTH2mr9L4UXEWqG5e", - "name": "Bente", - "type": "artist", - "uri": "spotify:artist:4U9nsRTH2mr9L4UXEWqG5e" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/713lZ7AF55fEFSQgcttj9y" - }, - "href": "https://api.spotify.com/v1/albums/713lZ7AF55fEFSQgcttj9y", - "id": "713lZ7AF55fEFSQgcttj9y", - "images": [ - { - "height": 300, - "url": "https://i.scdn.co/image/ab67616d00001e02ab9953b1d18f8233f6b26027", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab67616d00004851ab9953b1d18f8233f6b26027", - "width": 64 - }, - { - "height": 640, - "url": "https://i.scdn.co/image/ab67616d0000b273ab9953b1d18f8233f6b26027", - "width": 640 - } - ], - "name": "drift", - "release_date": "2024-10-03", - "release_date_precision": "day", - "total_tracks": 14, - "type": "album", - "uri": "spotify:album:713lZ7AF55fEFSQgcttj9y" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/browse/new-releases?offset=20&limit=20&locale=en-US,en;q%3D0.5", - "offset": 0, - "previous": null, - "total": 100 - } -} diff --git a/tests/components/spotify/fixtures/playback.json b/tests/components/spotify/fixtures/playback.json deleted file mode 100644 index d0bf8e0478a..00000000000 --- a/tests/components/spotify/fixtures/playback.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "device": { - "id": "a19f7a03a25aff3e43f457a328a8ba67a8c44789", - "is_active": true, - "is_private_session": false, - "is_restricted": false, - "name": "Master Bathroom Speaker", - "type": "Speaker", - "volume_percent": 25 - }, - "shuffle_state": false, - "repeat_state": "off", - "timestamp": 1689639030791, - "context": { - "external_urls": { - "spotify": "https://open.spotify.com/playlist/2r35vbe6hHl6yDSMfjKgmm" - }, - "href": "https://api.spotify.com/v1/playlists/2r35vbe6hHl6yDSMfjKgmm", - "type": "playlist", - "uri": "spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm" - }, - "progress_ms": 249367, - "item": { - "album": { - "album_type": "album", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/2Hkut4rAAyrQxRdof7FVJq" - }, - "href": "https://api.spotify.com/v1/artists/2Hkut4rAAyrQxRdof7FVJq", - "id": "2Hkut4rAAyrQxRdof7FVJq", - "name": "Rush", - "type": "artist", - "uri": "spotify:artist:2Hkut4rAAyrQxRdof7FVJq" - } - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/3nUNxSh2szhmN7iifAKv5i" - }, - "href": "https://api.spotify.com/v1/albums/3nUNxSh2szhmN7iifAKv5i", - "id": "3nUNxSh2szhmN7iifAKv5i", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab67616d0000b27306c0d7ebcabad0c39b566983", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67616d00001e0206c0d7ebcabad0c39b566983", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab67616d0000485106c0d7ebcabad0c39b566983", - "width": 64 - } - ], - "name": "Permanent Waves", - "release_date": "1980-01-01", - "release_date_precision": "day", - "total_tracks": 6, - "type": "album", - "uri": "spotify:album:3nUNxSh2szhmN7iifAKv5i" - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/2Hkut4rAAyrQxRdof7FVJq" - }, - "href": "https://api.spotify.com/v1/artists/2Hkut4rAAyrQxRdof7FVJq", - "id": "2Hkut4rAAyrQxRdof7FVJq", - "name": "Rush", - "type": "artist", - "uri": "spotify:artist:2Hkut4rAAyrQxRdof7FVJq" - } - ], - "disc_number": 1, - "duration_ms": 296466, - "explicit": false, - "external_ids": { - "isrc": "USMR18070028" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/4e9hUiLsN4mx61ARosFi7p" - }, - "href": "https://api.spotify.com/v1/tracks/4e9hUiLsN4mx61ARosFi7p", - "id": "4e9hUiLsN4mx61ARosFi7p", - "is_local": false, - "name": "The Spirit Of Radio", - "popularity": 68, - "preview_url": "https://p.scdn.co/mp3-preview/75cc52f458b2416f33f15c499783c51119ba9a93?cid=20bbc62823a3412ba5267ea5398e52d0", - "track_number": 1, - "type": "track", - "uri": "spotify:track:4e9hUiLsN4mx61ARosFi7p" - }, - "currently_playing_type": "track", - "actions": { - "disallows": { - "skipping_prev": true, - "toggling_repeat_track": true - } - }, - "is_playing": true -} diff --git a/tests/components/spotify/fixtures/playback_episode.json b/tests/components/spotify/fixtures/playback_episode.json deleted file mode 100644 index 6a9de50a534..00000000000 --- a/tests/components/spotify/fixtures/playback_episode.json +++ /dev/null @@ -1,110 +0,0 @@ -{ - "device": { - "id": null, - "is_active": true, - "is_private_session": false, - "is_restricted": true, - "name": "Sonos Roam SL", - "supports_volume": true, - "type": "Speaker", - "volume_percent": 46 - }, - "shuffle_state": false, - "smart_shuffle": false, - "repeat_state": "off", - "timestamp": 1728219605131, - "context": { - "external_urls": { - "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" - }, - "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD", - "type": "show", - "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD" - }, - "progress_ms": 5410, - "item": { - "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/06lRxUmh8UNVTByuyxLYqh/clip_132296_192296.mp3", - "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", - "duration_ms": 3690161, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW" - }, - "href": "https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW", - "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", - "id": "3o0RYoo5iOMKSmEbunsbvW", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", - "width": 64 - } - ], - "is_externally_hosted": false, - "is_playable": true, - "language": "en-US", - "languages": ["en-US"], - "name": "My Squirrel Has Brain Damage - Safety Third 119", - "release_date": "2024-07-26", - "release_date_precision": "day", - "resume_point": { - "fully_played": false, - "resume_position_ms": 0 - }, - "show": { - "copyrights": [], - "description": "Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube \"Scientists\". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.", - "explicit": true, - "external_urls": { - "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" - }, - "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD", - "html_description": "

Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.

", - "id": "1Y9ExMgMxoBVrgrfU7u0nD", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8b", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", - "width": 64 - } - ], - "is_externally_hosted": false, - "languages": ["en-US"], - "media_type": "audio", - "name": "Safety Third", - "publisher": "Safety Third ", - "total_episodes": 120, - "type": "show", - "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD" - }, - "type": "episode", - "uri": "spotify:episode:3o0RYoo5iOMKSmEbunsbvW" - }, - "currently_playing_type": "episode", - "actions": { - "disallows": { - "resuming": true - } - }, - "is_playing": true -} diff --git a/tests/components/spotify/fixtures/playlist.json b/tests/components/spotify/fixtures/playlist.json deleted file mode 100644 index 36c28cc814b..00000000000 --- a/tests/components/spotify/fixtures/playlist.json +++ /dev/null @@ -1,520 +0,0 @@ -{ - "collaborative": false, - "external_urls": { - "spotify": "https://open.spotify.com/playlist/3cEYpjA9oz9GiPac4AsH4n" - }, - "followers": { - "href": null, - "total": 562 - }, - "href": "https://api.spotify.com/v1/playlists/3cEYpjA9oz9GiPac4AsH4n?locale=en-US%2Cen%3Bq%3D0.5", - "id": "3cEYpjA9oz9GiPac4AsH4n", - "images": [ - { - "url": "https://i.scdn.co/image/ab67706c0000da848d0ce13d55f634e290f744ba", - "height": null, - "width": null - } - ], - "primary_color": null, - "name": "Spotify Web API Testing playlist", - "description": "A playlist for testing pourposes", - "type": "playlist", - "uri": "spotify:playlist:3cEYpjA9oz9GiPac4AsH4n", - "owner": { - "href": "https://api.spotify.com/v1/users/jmperezperez", - "id": "jmperezperez", - "type": "user", - "uri": "spotify:user:jmperezperez", - "display_name": "JMPerez²", - "external_urls": { - "spotify": "https://open.spotify.com/user/jmperezperez" - } - }, - "public": true, - "snapshot_id": "MTgsZWFmNmZiNTIzYTg4ODM0OGQzZWQzOGI4NTdkNTJlMjU0OWFkYTUxMA==", - "tracks": { - "limit": 100, - "next": null, - "offset": 0, - "previous": null, - "href": "https://api.spotify.com/v1/playlists/3cEYpjA9oz9GiPac4AsH4n/tracks?offset=0&limit=100&locale=en-US%2Cen%3Bq%3D0.5", - "total": 5, - "items": [ - { - "added_at": "2015-01-15T12:39:22Z", - "primary_color": null, - "video_thumbnail": { - "url": null - }, - "is_local": false, - "added_by": { - "external_urls": { - "spotify": "https://open.spotify.com/user/jmperezperez" - }, - "id": "jmperezperez", - "type": "user", - "uri": "spotify:user:jmperezperez", - "href": "https://api.spotify.com/v1/users/jmperezperez" - }, - "track": { - "preview_url": "https://p.scdn.co/mp3-preview/04599a1fe12ffac01d2bcb08340f84c0dd2cc335?cid=c7c59b798aab4892ac040a25f7dd1575", - "explicit": false, - "type": "track", - "episode": false, - "track": true, - "album": { - "type": "album", - "album_type": "compilation", - "href": "https://api.spotify.com/v1/albums/2pANdqPvxInB0YvcDiw4ko", - "id": "2pANdqPvxInB0YvcDiw4ko", - "images": [ - { - "url": "https://i.scdn.co/image/ab67616d0000b273ce6d0eef0c1ce77e5f95bbbc", - "width": 640, - "height": 640 - }, - { - "url": "https://i.scdn.co/image/ab67616d00001e02ce6d0eef0c1ce77e5f95bbbc", - "width": 300, - "height": 300 - }, - { - "url": "https://i.scdn.co/image/ab67616d00004851ce6d0eef0c1ce77e5f95bbbc", - "width": 64, - "height": 64 - } - ], - "name": "Progressive Psy Trance Picks Vol.8", - "release_date": "2012-04-02", - "release_date_precision": "day", - "uri": "spotify:album:2pANdqPvxInB0YvcDiw4ko", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0LyfQWJT6nXafLPZqxe9Of" - }, - "href": "https://api.spotify.com/v1/artists/0LyfQWJT6nXafLPZqxe9Of", - "id": "0LyfQWJT6nXafLPZqxe9Of", - "name": "Various Artists", - "type": "artist", - "uri": "spotify:artist:0LyfQWJT6nXafLPZqxe9Of" - } - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/2pANdqPvxInB0YvcDiw4ko" - }, - "total_tracks": 20 - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6eSdhw46riw2OUHgMwR8B5" - }, - "href": "https://api.spotify.com/v1/artists/6eSdhw46riw2OUHgMwR8B5", - "id": "6eSdhw46riw2OUHgMwR8B5", - "name": "Odiseo", - "type": "artist", - "uri": "spotify:artist:6eSdhw46riw2OUHgMwR8B5" - } - ], - "disc_number": 1, - "track_number": 10, - "duration_ms": 376000, - "external_ids": { - "isrc": "DEKC41200989" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/4rzfv0JLZfVhOhbSQ8o5jZ" - }, - "href": "https://api.spotify.com/v1/tracks/4rzfv0JLZfVhOhbSQ8o5jZ", - "id": "4rzfv0JLZfVhOhbSQ8o5jZ", - "name": "Api", - "popularity": 2, - "uri": "spotify:track:4rzfv0JLZfVhOhbSQ8o5jZ", - "is_local": false - } - }, - { - "added_at": "2015-01-15T12:40:03Z", - "primary_color": null, - "video_thumbnail": { - "url": null - }, - "is_local": false, - "added_by": { - "external_urls": { - "spotify": "https://open.spotify.com/user/jmperezperez" - }, - "id": "jmperezperez", - "type": "user", - "uri": "spotify:user:jmperezperez", - "href": "https://api.spotify.com/v1/users/jmperezperez" - }, - "track": { - "preview_url": "https://p.scdn.co/mp3-preview/d61fbb7016904624373008ea056d45e6df891071?cid=c7c59b798aab4892ac040a25f7dd1575", - "available_markets": [], - "explicit": false, - "type": "track", - "episode": false, - "track": true, - "album": { - "available_markets": [], - "type": "album", - "album_type": "compilation", - "href": "https://api.spotify.com/v1/albums/6nlfkk5GoXRL1nktlATNsy", - "id": "6nlfkk5GoXRL1nktlATNsy", - "images": [ - { - "url": "https://i.scdn.co/image/ab67616d0000b273aa2ff29970d9a63a49dfaeb2", - "width": 640, - "height": 640 - }, - { - "url": "https://i.scdn.co/image/ab67616d00001e02aa2ff29970d9a63a49dfaeb2", - "width": 300, - "height": 300 - }, - { - "url": "https://i.scdn.co/image/ab67616d00004851aa2ff29970d9a63a49dfaeb2", - "width": 64, - "height": 64 - } - ], - "name": "Wellness & Dreaming Source", - "release_date": "2015-01-09", - "release_date_precision": "day", - "uri": "spotify:album:6nlfkk5GoXRL1nktlATNsy", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0LyfQWJT6nXafLPZqxe9Of" - }, - "href": "https://api.spotify.com/v1/artists/0LyfQWJT6nXafLPZqxe9Of", - "id": "0LyfQWJT6nXafLPZqxe9Of", - "name": "Various Artists", - "type": "artist", - "uri": "spotify:artist:0LyfQWJT6nXafLPZqxe9Of" - } - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/6nlfkk5GoXRL1nktlATNsy" - }, - "total_tracks": 25 - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/5VQE4WOzPu9h3HnGLuBoA6" - }, - "href": "https://api.spotify.com/v1/artists/5VQE4WOzPu9h3HnGLuBoA6", - "id": "5VQE4WOzPu9h3HnGLuBoA6", - "name": "Vlasta Marek", - "type": "artist", - "uri": "spotify:artist:5VQE4WOzPu9h3HnGLuBoA6" - } - ], - "disc_number": 1, - "track_number": 21, - "duration_ms": 730066, - "external_ids": { - "isrc": "FR2X41475057" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/5o3jMYOSbaVz3tkgwhELSV" - }, - "href": "https://api.spotify.com/v1/tracks/5o3jMYOSbaVz3tkgwhELSV", - "id": "5o3jMYOSbaVz3tkgwhELSV", - "name": "Is", - "popularity": 0, - "uri": "spotify:track:5o3jMYOSbaVz3tkgwhELSV", - "is_local": false - } - }, - { - "added_at": "2015-01-15T12:22:30Z", - "primary_color": null, - "video_thumbnail": { - "url": null - }, - "is_local": false, - "added_by": { - "external_urls": { - "spotify": "https://open.spotify.com/user/jmperezperez" - }, - "id": "jmperezperez", - "type": "user", - "uri": "spotify:user:jmperezperez", - "href": "https://api.spotify.com/v1/users/jmperezperez" - }, - "track": { - "preview_url": "https://p.scdn.co/mp3-preview/cc680ec0f5fd5ff21f0cd11ac47e10d3cbb92190?cid=c7c59b798aab4892ac040a25f7dd1575", - "explicit": false, - "type": "track", - "episode": false, - "track": true, - "album": { - "type": "album", - "album_type": "album", - "href": "https://api.spotify.com/v1/albums/4hnqM0JK4CM1phwfq1Ldyz", - "id": "4hnqM0JK4CM1phwfq1Ldyz", - "images": [ - { - "url": "https://i.scdn.co/image/ab67616d0000b273ee0d0dce888c6c8a70db6e8b", - "width": 640, - "height": 640 - }, - { - "url": "https://i.scdn.co/image/ab67616d00001e02ee0d0dce888c6c8a70db6e8b", - "width": 300, - "height": 300 - }, - { - "url": "https://i.scdn.co/image/ab67616d00004851ee0d0dce888c6c8a70db6e8b", - "width": 64, - "height": 64 - } - ], - "name": "This Is Happening", - "release_date": "2010-05-17", - "release_date_precision": "day", - "uri": "spotify:album:4hnqM0JK4CM1phwfq1Ldyz", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/066X20Nz7iquqkkCW6Jxy6" - }, - "href": "https://api.spotify.com/v1/artists/066X20Nz7iquqkkCW6Jxy6", - "id": "066X20Nz7iquqkkCW6Jxy6", - "name": "LCD Soundsystem", - "type": "artist", - "uri": "spotify:artist:066X20Nz7iquqkkCW6Jxy6" - } - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/4hnqM0JK4CM1phwfq1Ldyz" - }, - "total_tracks": 9 - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/066X20Nz7iquqkkCW6Jxy6" - }, - "href": "https://api.spotify.com/v1/artists/066X20Nz7iquqkkCW6Jxy6", - "id": "066X20Nz7iquqkkCW6Jxy6", - "name": "LCD Soundsystem", - "type": "artist", - "uri": "spotify:artist:066X20Nz7iquqkkCW6Jxy6" - } - ], - "disc_number": 1, - "track_number": 4, - "duration_ms": 401440, - "external_ids": { - "isrc": "US4GE1000022" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/4Cy0NHJ8Gh0xMdwyM9RkQm" - }, - "href": "https://api.spotify.com/v1/tracks/4Cy0NHJ8Gh0xMdwyM9RkQm", - "id": "4Cy0NHJ8Gh0xMdwyM9RkQm", - "name": "All I Want", - "popularity": 45, - "uri": "spotify:track:4Cy0NHJ8Gh0xMdwyM9RkQm", - "is_local": false - } - }, - { - "added_at": "2015-01-15T12:40:35Z", - "primary_color": null, - "video_thumbnail": { - "url": null - }, - "is_local": false, - "added_by": { - "external_urls": { - "spotify": "https://open.spotify.com/user/jmperezperez" - }, - "id": "jmperezperez", - "type": "user", - "uri": "spotify:user:jmperezperez", - "href": "https://api.spotify.com/v1/users/jmperezperez" - }, - "track": { - "preview_url": "https://p.scdn.co/mp3-preview/d6ecf1f98d0b1fdc8c535de8e2010d0d8b8d040b?cid=c7c59b798aab4892ac040a25f7dd1575", - "explicit": false, - "type": "track", - "episode": false, - "track": true, - "album": { - "type": "album", - "album_type": "album", - "href": "https://api.spotify.com/v1/albums/2usKFntxa98WHMcyW6xJBz", - "id": "2usKFntxa98WHMcyW6xJBz", - "images": [ - { - "url": "https://i.scdn.co/image/ab67616d0000b2738b7447ac3daa1da18811cf7b", - "width": 640, - "height": 640 - }, - { - "url": "https://i.scdn.co/image/ab67616d00001e028b7447ac3daa1da18811cf7b", - "width": 300, - "height": 300 - }, - { - "url": "https://i.scdn.co/image/ab67616d000048518b7447ac3daa1da18811cf7b", - "width": 64, - "height": 64 - } - ], - "name": "Glenn Horiuchi Trio / Gelenn Horiuchi Quartet: Mercy / Jump Start / Endpoints / Curl Out / Earthworks / Mind Probe / Null Set / Another Space (A)", - "release_date": "2011-04-01", - "release_date_precision": "day", - "uri": "spotify:album:2usKFntxa98WHMcyW6xJBz", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/272ArH9SUAlslQqsSgPJA2" - }, - "href": "https://api.spotify.com/v1/artists/272ArH9SUAlslQqsSgPJA2", - "id": "272ArH9SUAlslQqsSgPJA2", - "name": "Glenn Horiuchi Trio", - "type": "artist", - "uri": "spotify:artist:272ArH9SUAlslQqsSgPJA2" - } - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/2usKFntxa98WHMcyW6xJBz" - }, - "total_tracks": 8 - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/272ArH9SUAlslQqsSgPJA2" - }, - "href": "https://api.spotify.com/v1/artists/272ArH9SUAlslQqsSgPJA2", - "id": "272ArH9SUAlslQqsSgPJA2", - "name": "Glenn Horiuchi Trio", - "type": "artist", - "uri": "spotify:artist:272ArH9SUAlslQqsSgPJA2" - } - ], - "disc_number": 1, - "track_number": 2, - "duration_ms": 358760, - "external_ids": { - "isrc": "USB8U1025969" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/6hvFrZNocdt2FcKGCSY5NI" - }, - "href": "https://api.spotify.com/v1/tracks/6hvFrZNocdt2FcKGCSY5NI", - "id": "6hvFrZNocdt2FcKGCSY5NI", - "name": "Endpoints", - "popularity": 0, - "uri": "spotify:track:6hvFrZNocdt2FcKGCSY5NI", - "is_local": false - } - }, - { - "added_at": "2015-01-15T12:41:10Z", - "primary_color": null, - "video_thumbnail": { - "url": null - }, - "is_local": false, - "added_by": { - "external_urls": { - "spotify": "https://open.spotify.com/user/jmperezperez" - }, - "id": "jmperezperez", - "type": "user", - "uri": "spotify:user:jmperezperez", - "href": "https://api.spotify.com/v1/users/jmperezperez" - }, - "track": { - "preview_url": "https://p.scdn.co/mp3-preview/47b974e463b1e862c7b3c18fa2ceedc513f2106b?cid=c7c59b798aab4892ac040a25f7dd1575", - "available_markets": [], - "explicit": false, - "type": "track", - "episode": false, - "track": true, - "album": { - "available_markets": [], - "type": "album", - "album_type": "album", - "href": "https://api.spotify.com/v1/albums/0ivM6kSawaug0j3tZVusG2", - "id": "0ivM6kSawaug0j3tZVusG2", - "images": [ - { - "url": "https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71", - "width": 640, - "height": 640 - }, - { - "url": "https://i.scdn.co/image/ab67616d00001e0204e57d181ff062f8339d6c71", - "width": 300, - "height": 300 - }, - { - "url": "https://i.scdn.co/image/ab67616d0000485104e57d181ff062f8339d6c71", - "width": 64, - "height": 64 - } - ], - "name": "All The Best (Spanish Version)", - "release_date": "2007-01-01", - "release_date_precision": "day", - "uri": "spotify:album:0ivM6kSawaug0j3tZVusG2", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/2KftmGt9sk1yLjsAoloC3M" - }, - "href": "https://api.spotify.com/v1/artists/2KftmGt9sk1yLjsAoloC3M", - "id": "2KftmGt9sk1yLjsAoloC3M", - "name": "Zucchero", - "type": "artist", - "uri": "spotify:artist:2KftmGt9sk1yLjsAoloC3M" - } - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/0ivM6kSawaug0j3tZVusG2" - }, - "total_tracks": 18 - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/2KftmGt9sk1yLjsAoloC3M" - }, - "href": "https://api.spotify.com/v1/artists/2KftmGt9sk1yLjsAoloC3M", - "id": "2KftmGt9sk1yLjsAoloC3M", - "name": "Zucchero", - "type": "artist", - "uri": "spotify:artist:2KftmGt9sk1yLjsAoloC3M" - } - ], - "disc_number": 1, - "track_number": 18, - "duration_ms": 176093, - "external_ids": { - "isrc": "ITUM70701043" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/2E2znCPaS8anQe21GLxcvJ" - }, - "href": "https://api.spotify.com/v1/tracks/2E2znCPaS8anQe21GLxcvJ", - "id": "2E2znCPaS8anQe21GLxcvJ", - "name": "You Are So Beautiful", - "popularity": 0, - "uri": "spotify:track:2E2znCPaS8anQe21GLxcvJ", - "is_local": false - } - } - ] - } -} diff --git a/tests/components/spotify/fixtures/recently_played_tracks.json b/tests/components/spotify/fixtures/recently_played_tracks.json deleted file mode 100644 index f000d76a52f..00000000000 --- a/tests/components/spotify/fixtures/recently_played_tracks.json +++ /dev/null @@ -1,964 +0,0 @@ -{ - "items": [ - { - "track": { - "album": { - "album_type": "single", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" - }, - "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", - "id": "6emHCSoB4tJxTVXakbrpPz", - "name": "Karen O", - "type": "artist", - "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" - }, - "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", - "id": "2dBj3prW7gP9bCCOIQeDUf", - "name": "Danger Mouse", - "type": "artist", - "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/6Ab1VSoMD5fvlagOW2QDOJ" - }, - "href": "https://api.spotify.com/v1/albums/6Ab1VSoMD5fvlagOW2QDOJ", - "id": "6Ab1VSoMD5fvlagOW2QDOJ", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67616d00001e02cdac047e7894fb56a0dfdcde", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab67616d00004851cdac047e7894fb56a0dfdcde", - "width": 64 - } - ], - "name": "Super Breath", - "release_date": "2024-07-24", - "release_date_precision": "day", - "total_tracks": 1, - "type": "album", - "uri": "spotify:album:6Ab1VSoMD5fvlagOW2QDOJ" - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" - }, - "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", - "id": "6emHCSoB4tJxTVXakbrpPz", - "name": "Karen O", - "type": "artist", - "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" - }, - "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", - "id": "2dBj3prW7gP9bCCOIQeDUf", - "name": "Danger Mouse", - "type": "artist", - "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 211800, - "explicit": false, - "external_ids": { - "isrc": "QMB622409101" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/71dMjqJ8UJV700zYs5YZCh" - }, - "href": "https://api.spotify.com/v1/tracks/71dMjqJ8UJV700zYs5YZCh", - "id": "71dMjqJ8UJV700zYs5YZCh", - "is_local": false, - "name": "Super Breath", - "popularity": 58, - "preview_url": "https://p.scdn.co/mp3-preview/f1ee3ade75c6eb5cb227ed8c96de8674d8ce581f?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 1, - "type": "track", - "uri": "spotify:track:71dMjqJ8UJV700zYs5YZCh" - }, - "played_at": "2024-10-06T18:09:18.556Z", - "context": null - }, - { - "track": { - "album": { - "album_type": "single", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" - }, - "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", - "id": "6emHCSoB4tJxTVXakbrpPz", - "name": "Karen O", - "type": "artist", - "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" - }, - "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", - "id": "2dBj3prW7gP9bCCOIQeDUf", - "name": "Danger Mouse", - "type": "artist", - "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/6Ab1VSoMD5fvlagOW2QDOJ" - }, - "href": "https://api.spotify.com/v1/albums/6Ab1VSoMD5fvlagOW2QDOJ", - "id": "6Ab1VSoMD5fvlagOW2QDOJ", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67616d00001e02cdac047e7894fb56a0dfdcde", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab67616d00004851cdac047e7894fb56a0dfdcde", - "width": 64 - } - ], - "name": "Super Breath", - "release_date": "2024-07-24", - "release_date_precision": "day", - "total_tracks": 1, - "type": "album", - "uri": "spotify:album:6Ab1VSoMD5fvlagOW2QDOJ" - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" - }, - "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", - "id": "6emHCSoB4tJxTVXakbrpPz", - "name": "Karen O", - "type": "artist", - "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" - }, - "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", - "id": "2dBj3prW7gP9bCCOIQeDUf", - "name": "Danger Mouse", - "type": "artist", - "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 211800, - "explicit": false, - "external_ids": { - "isrc": "QMB622409101" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/71dMjqJ8UJV700zYs5YZCh" - }, - "href": "https://api.spotify.com/v1/tracks/71dMjqJ8UJV700zYs5YZCh", - "id": "71dMjqJ8UJV700zYs5YZCh", - "is_local": false, - "name": "Super Breath", - "popularity": 58, - "preview_url": "https://p.scdn.co/mp3-preview/f1ee3ade75c6eb5cb227ed8c96de8674d8ce581f?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 1, - "type": "track", - "uri": "spotify:track:71dMjqJ8UJV700zYs5YZCh" - }, - "played_at": "2024-10-06T18:05:33.902Z", - "context": { - "type": "album", - "href": "https://api.spotify.com/v1/albums/57MSBg5pBQZH5bfLVDmeuP", - "external_urls": { - "spotify": "https://open.spotify.com/album/57MSBg5pBQZH5bfLVDmeuP" - }, - "uri": "spotify:album:57MSBg5pBQZH5bfLVDmeuP" - } - } - ], - "next": "https://api.spotify.com/v1/me/player/recently-played?before=1728234176022", - "cursors": { - "after": "1728238158556", - "before": "1728234176022" - }, - "limit": 20, - "href": "https://api.spotify.com/v1/me/player/recently-played" -} diff --git a/tests/components/spotify/fixtures/saved_albums.json b/tests/components/spotify/fixtures/saved_albums.json deleted file mode 100644 index 0d58ecb89ea..00000000000 --- a/tests/components/spotify/fixtures/saved_albums.json +++ /dev/null @@ -1,7637 +0,0 @@ -{ - "href": "https://api.spotify.com/v1/me/albums?offset=0&limit=20&locale=en-US,en;q%3D0.5", - "items": [ - { - "added_at": "2024-09-19T22:00:00Z", - "album": { - "album_type": "album", - "total_tracks": 12, - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/57MSBg5pBQZH5bfLVDmeuP" - }, - "href": "https://api.spotify.com/v1/albums/57MSBg5pBQZH5bfLVDmeuP?locale=en-US%2Cen%3Bq%3D0.5", - "id": "57MSBg5pBQZH5bfLVDmeuP", - "images": [ - { - "url": "https://i.scdn.co/image/ab67616d0000b2733126a95bb7ed4146a80c7fc6", - "height": 640, - "width": 640 - }, - { - "url": "https://i.scdn.co/image/ab67616d00001e023126a95bb7ed4146a80c7fc6", - "height": 300, - "width": 300 - }, - { - "url": "https://i.scdn.co/image/ab67616d000048513126a95bb7ed4146a80c7fc6", - "height": 64, - "width": 64 - } - ], - "name": "In Waves", - "release_date": "2024-09-20", - "release_date_precision": "day", - "type": "album", - "uri": "spotify:album:57MSBg5pBQZH5bfLVDmeuP", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - } - ], - "tracks": { - "href": "https://api.spotify.com/v1/albums/57MSBg5pBQZH5bfLVDmeuP/tracks?offset=0&limit=50&locale=en-US,en;q%3D0.5", - "limit": 50, - "next": null, - "offset": 0, - "previous": null, - "total": 12, - "items": [ - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 135835, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/7uLBdV19ad7kAjU2oB1l6p" - }, - "href": "https://api.spotify.com/v1/tracks/7uLBdV19ad7kAjU2oB1l6p", - "id": "7uLBdV19ad7kAjU2oB1l6p", - "name": "Wanna", - "preview_url": "https://p.scdn.co/mp3-preview/fc112f83fe770b09e4c1bd586e5b9c144e384bd7?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 1, - "type": "track", - "uri": "spotify:track:7uLBdV19ad7kAjU2oB1l6p", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 240580, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/3pjX4hC8adabkXGu3X9GTC" - }, - "href": "https://api.spotify.com/v1/tracks/3pjX4hC8adabkXGu3X9GTC", - "id": "3pjX4hC8adabkXGu3X9GTC", - "name": "Treat Each Other Right", - "preview_url": "https://p.scdn.co/mp3-preview/a518fdb34284daa9a2298fd5491d6cede24a3e01?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 2, - "type": "track", - "uri": "spotify:track:3pjX4hC8adabkXGu3X9GTC", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/3X2DdnmoANw8Rg8luHyZQb" - }, - "href": "https://api.spotify.com/v1/artists/3X2DdnmoANw8Rg8luHyZQb", - "id": "3X2DdnmoANw8Rg8luHyZQb", - "name": "Romy", - "type": "artist", - "uri": "spotify:artist:3X2DdnmoANw8Rg8luHyZQb" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4KDu9uqzqseVCpQXMa8Pvm" - }, - "href": "https://api.spotify.com/v1/artists/4KDu9uqzqseVCpQXMa8Pvm", - "id": "4KDu9uqzqseVCpQXMa8Pvm", - "name": "Oliver Sim", - "type": "artist", - "uri": "spotify:artist:4KDu9uqzqseVCpQXMa8Pvm" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/3iOvXCl6edW5Um0fXEBRXy" - }, - "href": "https://api.spotify.com/v1/artists/3iOvXCl6edW5Um0fXEBRXy", - "id": "3iOvXCl6edW5Um0fXEBRXy", - "name": "The xx", - "type": "artist", - "uri": "spotify:artist:3iOvXCl6edW5Um0fXEBRXy" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 208334, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/4gBniy3TwR9o2JDBx48TlD" - }, - "href": "https://api.spotify.com/v1/tracks/4gBniy3TwR9o2JDBx48TlD", - "id": "4gBniy3TwR9o2JDBx48TlD", - "name": "Waited All Night", - "preview_url": "https://p.scdn.co/mp3-preview/b7820ac10349ca374242240f69887c073a4980f2?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 3, - "type": "track", - "uri": "spotify:track:4gBniy3TwR9o2JDBx48TlD", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0XfQBWgzisaS9ltDV9bXAS" - }, - "href": "https://api.spotify.com/v1/artists/0XfQBWgzisaS9ltDV9bXAS", - "id": "0XfQBWgzisaS9ltDV9bXAS", - "name": "Honey Dijon", - "type": "artist", - "uri": "spotify:artist:0XfQBWgzisaS9ltDV9bXAS" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 222315, - "explicit": true, - "external_urls": { - "spotify": "https://open.spotify.com/track/79gWc6dZ1dXH7rC67DTunz" - }, - "href": "https://api.spotify.com/v1/tracks/79gWc6dZ1dXH7rC67DTunz", - "id": "79gWc6dZ1dXH7rC67DTunz", - "name": "Baddy On The Floor", - "preview_url": "https://p.scdn.co/mp3-preview/c260664dd5adc2290fce52cb51aa8667e39c2118?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 4, - "type": "track", - "uri": "spotify:track:79gWc6dZ1dXH7rC67DTunz", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0fEfMW5bypHZ0A8eLnhwj5" - }, - "href": "https://api.spotify.com/v1/artists/0fEfMW5bypHZ0A8eLnhwj5", - "id": "0fEfMW5bypHZ0A8eLnhwj5", - "name": "Kelsey Lu", - "type": "artist", - "uri": "spotify:artist:0fEfMW5bypHZ0A8eLnhwj5" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0FNfiTQCR5o3ounOlWzm1d" - }, - "href": "https://api.spotify.com/v1/artists/0FNfiTQCR5o3ounOlWzm1d", - "id": "0FNfiTQCR5o3ounOlWzm1d", - "name": "John Glacier", - "type": "artist", - "uri": "spotify:artist:0FNfiTQCR5o3ounOlWzm1d" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/1R84VlXnFFULOsWWV8IrCQ" - }, - "href": "https://api.spotify.com/v1/artists/1R84VlXnFFULOsWWV8IrCQ", - "id": "1R84VlXnFFULOsWWV8IrCQ", - "name": "Panda Bear", - "type": "artist", - "uri": "spotify:artist:1R84VlXnFFULOsWWV8IrCQ" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 212339, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/1gRMKwvMvp6LcQVMpMXQg2" - }, - "href": "https://api.spotify.com/v1/tracks/1gRMKwvMvp6LcQVMpMXQg2", - "id": "1gRMKwvMvp6LcQVMpMXQg2", - "name": "Dafodil", - "preview_url": "https://p.scdn.co/mp3-preview/173fad98e5e51a6cfb02b3cb394ab46c70d44303?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 5, - "type": "track", - "uri": "spotify:track:1gRMKwvMvp6LcQVMpMXQg2", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 205638, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/27D9YN3uHPD3PTXvzNtbto" - }, - "href": "https://api.spotify.com/v1/tracks/27D9YN3uHPD3PTXvzNtbto", - "id": "27D9YN3uHPD3PTXvzNtbto", - "name": "Still Summer", - "preview_url": "https://p.scdn.co/mp3-preview/e959ae6394e9d19e00cd474ed2b76bb43b6063d9?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 6, - "type": "track", - "uri": "spotify:track:27D9YN3uHPD3PTXvzNtbto", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6UE7nl9mha6s8z0wFQFIZ2" - }, - "href": "https://api.spotify.com/v1/artists/6UE7nl9mha6s8z0wFQFIZ2", - "id": "6UE7nl9mha6s8z0wFQFIZ2", - "name": "Robyn", - "type": "artist", - "uri": "spotify:artist:6UE7nl9mha6s8z0wFQFIZ2" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 202648, - "explicit": true, - "external_urls": { - "spotify": "https://open.spotify.com/track/0pMj03SiaZ9bkFlXQWNhtZ" - }, - "href": "https://api.spotify.com/v1/tracks/0pMj03SiaZ9bkFlXQWNhtZ", - "id": "0pMj03SiaZ9bkFlXQWNhtZ", - "name": "Life", - "preview_url": "https://p.scdn.co/mp3-preview/261bc3bd3192ef4158b1ca42e95262113241a326?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 7, - "type": "track", - "uri": "spotify:track:0pMj03SiaZ9bkFlXQWNhtZ", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 222365, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/7gb0pekqHQYTGo6NWLBvT5" - }, - "href": "https://api.spotify.com/v1/tracks/7gb0pekqHQYTGo6NWLBvT5", - "id": "7gb0pekqHQYTGo6NWLBvT5", - "name": "The Feeling I Get From You", - "preview_url": "https://p.scdn.co/mp3-preview/da24fadc4bca20394435e53f5d61e8f6c36f9614?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 8, - "type": "track", - "uri": "spotify:track:7gb0pekqHQYTGo6NWLBvT5", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 376918, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/6pOzbdJKEr4hvXkX7VkfY6" - }, - "href": "https://api.spotify.com/v1/tracks/6pOzbdJKEr4hvXkX7VkfY6", - "id": "6pOzbdJKEr4hvXkX7VkfY6", - "name": "Breather", - "preview_url": "https://p.scdn.co/mp3-preview/dc7cd612c205968f5d6cb32696305656ae7ad888?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 9, - "type": "track", - "uri": "spotify:track:6pOzbdJKEr4hvXkX7VkfY6", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/3C8RpaI3Go0yFF9whvKoED" - }, - "href": "https://api.spotify.com/v1/artists/3C8RpaI3Go0yFF9whvKoED", - "id": "3C8RpaI3Go0yFF9whvKoED", - "name": "The Avalanches", - "type": "artist", - "uri": "spotify:artist:3C8RpaI3Go0yFF9whvKoED" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 254142, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/3cfgisz6DhZmooQk08P4Eu" - }, - "href": "https://api.spotify.com/v1/tracks/3cfgisz6DhZmooQk08P4Eu", - "id": "3cfgisz6DhZmooQk08P4Eu", - "name": "All You Children", - "preview_url": "https://p.scdn.co/mp3-preview/ff3fc064f340e47347d4677332daf6da8155ae38?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 10, - "type": "track", - "uri": "spotify:track:3cfgisz6DhZmooQk08P4Eu", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 71680, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/1wpcJ6TCrKpH6KdBmrp9yN" - }, - "href": "https://api.spotify.com/v1/tracks/1wpcJ6TCrKpH6KdBmrp9yN", - "id": "1wpcJ6TCrKpH6KdBmrp9yN", - "name": "Every Single Weekend - Interlude", - "preview_url": "https://p.scdn.co/mp3-preview/2c46e4cea66da846807b70c7974d19b7837eba52?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 11, - "type": "track", - "uri": "spotify:track:1wpcJ6TCrKpH6KdBmrp9yN", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" - }, - "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", - "id": "7A0awCXkE1FtSU8B0qwOJQ", - "name": "Jamie xx", - "type": "artist", - "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/2Q4FR4Ss0mh6EvbiQBHEOU" - }, - "href": "https://api.spotify.com/v1/artists/2Q4FR4Ss0mh6EvbiQBHEOU", - "id": "2Q4FR4Ss0mh6EvbiQBHEOU", - "name": "Oona Doherty", - "type": "artist", - "uri": "spotify:artist:2Q4FR4Ss0mh6EvbiQBHEOU" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 337414, - "explicit": true, - "external_urls": { - "spotify": "https://open.spotify.com/track/08Jhu8OZ6gCIGWQn6vP3uI" - }, - "href": "https://api.spotify.com/v1/tracks/08Jhu8OZ6gCIGWQn6vP3uI", - "id": "08Jhu8OZ6gCIGWQn6vP3uI", - "name": "Falling Together", - "preview_url": "https://p.scdn.co/mp3-preview/2fa5fc5e733495719170f672a07b172bf678a89f?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 12, - "type": "track", - "uri": "spotify:track:08Jhu8OZ6gCIGWQn6vP3uI", - "is_local": false - } - ] - }, - "copyrights": [ - { - "text": "2024 Young", - "type": "C" - }, - { - "text": "2024 Young", - "type": "P" - } - ], - "external_ids": { - "upc": "889030035653" - }, - "genres": [], - "label": "Young", - "popularity": 73 - } - }, - { - "added_at": "2024-09-05T22:00:00Z", - "album": { - "album_type": "album", - "total_tracks": 20, - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/3DQueEd1Ft9PHWgovDzPKh" - }, - "href": "https://api.spotify.com/v1/albums/3DQueEd1Ft9PHWgovDzPKh?locale=en-US%2Cen%3Bq%3D0.5", - "id": "3DQueEd1Ft9PHWgovDzPKh", - "images": [ - { - "url": "https://i.scdn.co/image/ab67616d0000b2736b8a4828e057b7dc1c4a4d39", - "height": 640, - "width": 640 - }, - { - "url": "https://i.scdn.co/image/ab67616d00001e026b8a4828e057b7dc1c4a4d39", - "height": 300, - "width": 300 - }, - { - "url": "https://i.scdn.co/image/ab67616d000048516b8a4828e057b7dc1c4a4d39", - "height": 64, - "width": 64 - } - ], - "name": "ten days", - "release_date": "2024-09-06", - "release_date_precision": "day", - "type": "album", - "uri": "spotify:album:3DQueEd1Ft9PHWgovDzPKh", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "tracks": { - "href": "https://api.spotify.com/v1/albums/3DQueEd1Ft9PHWgovDzPKh/tracks?offset=0&limit=50&locale=en-US,en;q%3D0.5", - "limit": 50, - "next": null, - "offset": 0, - "previous": null, - "total": 20, - "items": [ - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 30857, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/00nDbqJkHBGUFdim9M0xGc" - }, - "href": "https://api.spotify.com/v1/tracks/00nDbqJkHBGUFdim9M0xGc", - "id": "00nDbqJkHBGUFdim9M0xGc", - "name": ".one", - "preview_url": "https://p.scdn.co/mp3-preview/52224422e178fa35baa9ffbf097372b7031fbecf?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 1, - "type": "track", - "uri": "spotify:track:00nDbqJkHBGUFdim9M0xGc", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6l7R1jntPahGxwJt7Tky8h" - }, - "href": "https://api.spotify.com/v1/artists/6l7R1jntPahGxwJt7Tky8h", - "id": "6l7R1jntPahGxwJt7Tky8h", - "name": "Obongjayar", - "type": "artist", - "uri": "spotify:artist:6l7R1jntPahGxwJt7Tky8h" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 220653, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/1rf4SX7dduNbrNnOmupLzi" - }, - "href": "https://api.spotify.com/v1/tracks/1rf4SX7dduNbrNnOmupLzi", - "id": "1rf4SX7dduNbrNnOmupLzi", - "name": "adore u", - "preview_url": "https://p.scdn.co/mp3-preview/49ddf22bfe3925899cbb9ecf5d5157525becdcb4?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 2, - "type": "track", - "uri": "spotify:track:1rf4SX7dduNbrNnOmupLzi", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 10670, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/0lt9clHEwYyheuC9rik9UH" - }, - "href": "https://api.spotify.com/v1/tracks/0lt9clHEwYyheuC9rik9UH", - "id": "0lt9clHEwYyheuC9rik9UH", - "name": ".two", - "preview_url": "https://p.scdn.co/mp3-preview/59a26651d9742fa1856469cf1c0f8c7c55819525?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 3, - "type": "track", - "uri": "spotify:track:0lt9clHEwYyheuC9rik9UH", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6Ja6zFB5d7XRihhfMo6KzY" - }, - "href": "https://api.spotify.com/v1/artists/6Ja6zFB5d7XRihhfMo6KzY", - "id": "6Ja6zFB5d7XRihhfMo6KzY", - "name": "Jozzy", - "type": "artist", - "uri": "spotify:artist:6Ja6zFB5d7XRihhfMo6KzY" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7IrBqZo6diq3hV3GpUhrs2" - }, - "href": "https://api.spotify.com/v1/artists/7IrBqZo6diq3hV3GpUhrs2", - "id": "7IrBqZo6diq3hV3GpUhrs2", - "name": "Jim Legxacy", - "type": "artist", - "uri": "spotify:artist:7IrBqZo6diq3hV3GpUhrs2" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 181545, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/6twB0uYXJYW9t5GHfYaQ3i" - }, - "href": "https://api.spotify.com/v1/tracks/6twB0uYXJYW9t5GHfYaQ3i", - "id": "6twB0uYXJYW9t5GHfYaQ3i", - "name": "ten", - "preview_url": "https://p.scdn.co/mp3-preview/99fc4c0f25e64d30af9e619ea820bed60aa2b1c6?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 4, - "type": "track", - "uri": "spotify:track:6twB0uYXJYW9t5GHfYaQ3i", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 15034, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/6G7TRmzTt9tnrM0QqSVpJW" - }, - "href": "https://api.spotify.com/v1/tracks/6G7TRmzTt9tnrM0QqSVpJW", - "id": "6G7TRmzTt9tnrM0QqSVpJW", - "name": ".three", - "preview_url": "https://p.scdn.co/mp3-preview/7aeb75b213d74995df23a41d86494834bc801d78?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 5, - "type": "track", - "uri": "spotify:track:6G7TRmzTt9tnrM0QqSVpJW", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/2WoVwexZuODvclzULjPQtm" - }, - "href": "https://api.spotify.com/v1/artists/2WoVwexZuODvclzULjPQtm", - "id": "2WoVwexZuODvclzULjPQtm", - "name": "Sampha", - "type": "artist", - "uri": "spotify:artist:2WoVwexZuODvclzULjPQtm" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 214469, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/4IHblO52meh2jwqES1BA7X" - }, - "href": "https://api.spotify.com/v1/tracks/4IHblO52meh2jwqES1BA7X", - "id": "4IHblO52meh2jwqES1BA7X", - "name": "fear less", - "preview_url": "https://p.scdn.co/mp3-preview/c0952ae5c7423cc08ca7a53f0f182a6f20586cde?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 6, - "type": "track", - "uri": "spotify:track:4IHblO52meh2jwqES1BA7X", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 9856, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/1wU9pfdw6ht8HKfxz6wMNq" - }, - "href": "https://api.spotify.com/v1/tracks/1wU9pfdw6ht8HKfxz6wMNq", - "id": "1wU9pfdw6ht8HKfxz6wMNq", - "name": ".four", - "preview_url": "https://p.scdn.co/mp3-preview/a4a6f591cb0cf93a7d57df33ad70ac1d8b7db349?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 7, - "type": "track", - "uri": "spotify:track:1wU9pfdw6ht8HKfxz6wMNq", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4PLsMEk2DCRVlVL2a9aZAv" - }, - "href": "https://api.spotify.com/v1/artists/4PLsMEk2DCRVlVL2a9aZAv", - "id": "4PLsMEk2DCRVlVL2a9aZAv", - "name": "SOAK", - "type": "artist", - "uri": "spotify:artist:4PLsMEk2DCRVlVL2a9aZAv" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 260997, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/2D9a9CXeo3HFtVeaNlzp4a" - }, - "href": "https://api.spotify.com/v1/tracks/2D9a9CXeo3HFtVeaNlzp4a", - "id": "2D9a9CXeo3HFtVeaNlzp4a", - "name": "just stand there", - "preview_url": "https://p.scdn.co/mp3-preview/06a95f2285831e3f4848718f5c8c2f7deeafaf80?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 8, - "type": "track", - "uri": "spotify:track:2D9a9CXeo3HFtVeaNlzp4a", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 15254, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/3vTHKAYJy0hY1OkVv1qLNM" - }, - "href": "https://api.spotify.com/v1/tracks/3vTHKAYJy0hY1OkVv1qLNM", - "id": "3vTHKAYJy0hY1OkVv1qLNM", - "name": ".five", - "preview_url": "https://p.scdn.co/mp3-preview/29846c63d0cf33c05ee69ea92d412a2f473e1604?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 9, - "type": "track", - "uri": "spotify:track:3vTHKAYJy0hY1OkVv1qLNM", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/3jK9MiCrA42lLAdMGUZpwa" - }, - "href": "https://api.spotify.com/v1/artists/3jK9MiCrA42lLAdMGUZpwa", - "id": "3jK9MiCrA42lLAdMGUZpwa", - "name": "Anderson .Paak", - "type": "artist", - "uri": "spotify:artist:3jK9MiCrA42lLAdMGUZpwa" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6UtYvUtXnmg5EtllDFlWp8" - }, - "href": "https://api.spotify.com/v1/artists/6UtYvUtXnmg5EtllDFlWp8", - "id": "6UtYvUtXnmg5EtllDFlWp8", - "name": "CHIKA", - "type": "artist", - "uri": "spotify:artist:6UtYvUtXnmg5EtllDFlWp8" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 224073, - "explicit": true, - "external_urls": { - "spotify": "https://open.spotify.com/track/1qfJ6OvxrspQTmcvdIEoX6" - }, - "href": "https://api.spotify.com/v1/tracks/1qfJ6OvxrspQTmcvdIEoX6", - "id": "1qfJ6OvxrspQTmcvdIEoX6", - "name": "places to be", - "preview_url": "https://p.scdn.co/mp3-preview/5c1c520365bbd3c9e2e84be42d9d70b0ec71ed01?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 10, - "type": "track", - "uri": "spotify:track:1qfJ6OvxrspQTmcvdIEoX6", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 28836, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/13H2XgH3k8SEptaoD5qeLG" - }, - "href": "https://api.spotify.com/v1/tracks/13H2XgH3k8SEptaoD5qeLG", - "id": "13H2XgH3k8SEptaoD5qeLG", - "name": ".six", - "preview_url": "https://p.scdn.co/mp3-preview/e630a09889f8e86bca24bcb54a6448e8c969936f?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 11, - "type": "track", - "uri": "spotify:track:13H2XgH3k8SEptaoD5qeLG", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/59MDSNIYoOY0WRYuodzJPD" - }, - "href": "https://api.spotify.com/v1/artists/59MDSNIYoOY0WRYuodzJPD", - "id": "59MDSNIYoOY0WRYuodzJPD", - "name": "Duskus", - "type": "artist", - "uri": "spotify:artist:59MDSNIYoOY0WRYuodzJPD" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7Eu1txygG6nJttLHbZdQOh" - }, - "href": "https://api.spotify.com/v1/artists/7Eu1txygG6nJttLHbZdQOh", - "id": "7Eu1txygG6nJttLHbZdQOh", - "name": "Four Tet", - "type": "artist", - "uri": "spotify:artist:7Eu1txygG6nJttLHbZdQOh" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/3pK4EcflBpG1Kpmjk5LK2R" - }, - "href": "https://api.spotify.com/v1/artists/3pK4EcflBpG1Kpmjk5LK2R", - "id": "3pK4EcflBpG1Kpmjk5LK2R", - "name": "Joy Anonymous", - "type": "artist", - "uri": "spotify:artist:3pK4EcflBpG1Kpmjk5LK2R" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/5he5w2lnU9x7JFhnwcekXX" - }, - "href": "https://api.spotify.com/v1/artists/5he5w2lnU9x7JFhnwcekXX", - "id": "5he5w2lnU9x7JFhnwcekXX", - "name": "Skrillex", - "type": "artist", - "uri": "spotify:artist:5he5w2lnU9x7JFhnwcekXX" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 453068, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/3i9QKRl5Ql3pgUfNdYBVTc" - }, - "href": "https://api.spotify.com/v1/tracks/3i9QKRl5Ql3pgUfNdYBVTc", - "id": "3i9QKRl5Ql3pgUfNdYBVTc", - "name": "glow", - "preview_url": "https://p.scdn.co/mp3-preview/4ddd31cf8fe9f76b8aa72e2a1b5d51ccc9e00e5a?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 12, - "type": "track", - "uri": "spotify:track:3i9QKRl5Ql3pgUfNdYBVTc", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 31749, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/2OLH9ukOFDVBMuVUuy2sFW" - }, - "href": "https://api.spotify.com/v1/tracks/2OLH9ukOFDVBMuVUuy2sFW", - "id": "2OLH9ukOFDVBMuVUuy2sFW", - "name": ".seven", - "preview_url": "https://p.scdn.co/mp3-preview/cc0e8af8b91eff643b65fefdbc6b32fe2a7ad7db?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 13, - "type": "track", - "uri": "spotify:track:2OLH9ukOFDVBMuVUuy2sFW", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 220656, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/3DzWFxyzsAVblVNndiU9CW" - }, - "href": "https://api.spotify.com/v1/tracks/3DzWFxyzsAVblVNndiU9CW", - "id": "3DzWFxyzsAVblVNndiU9CW", - "name": "i saw you", - "preview_url": "https://p.scdn.co/mp3-preview/e2b23e98a35b1ccbce037d34c2c38c49b2371142?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 14, - "type": "track", - "uri": "spotify:track:3DzWFxyzsAVblVNndiU9CW", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 15037, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/1aTcAf7K1ym8lBcuu8nmJA" - }, - "href": "https://api.spotify.com/v1/tracks/1aTcAf7K1ym8lBcuu8nmJA", - "id": "1aTcAf7K1ym8lBcuu8nmJA", - "name": ".eight", - "preview_url": "https://p.scdn.co/mp3-preview/d2910a98ace82ead87c06aad442b0f8104263feb?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 15, - "type": "track", - "uri": "spotify:track:1aTcAf7K1ym8lBcuu8nmJA", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/5s6TJEuHTr9GR894wc6VfP" - }, - "href": "https://api.spotify.com/v1/artists/5s6TJEuHTr9GR894wc6VfP", - "id": "5s6TJEuHTr9GR894wc6VfP", - "name": "Emmylou Harris", - "type": "artist", - "uri": "spotify:artist:5s6TJEuHTr9GR894wc6VfP" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 200737, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/4S05mkyTtAiWy5l4umch0X" - }, - "href": "https://api.spotify.com/v1/tracks/4S05mkyTtAiWy5l4umch0X", - "id": "4S05mkyTtAiWy5l4umch0X", - "name": "where will i be", - "preview_url": "https://p.scdn.co/mp3-preview/c8b398eaced8e21a97b1460480ab58a2c44364dd?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 16, - "type": "track", - "uri": "spotify:track:4S05mkyTtAiWy5l4umch0X", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 19060, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/5aNwAqN5Gk5oZIwW5KfhXN" - }, - "href": "https://api.spotify.com/v1/tracks/5aNwAqN5Gk5oZIwW5KfhXN", - "id": "5aNwAqN5Gk5oZIwW5KfhXN", - "name": ".nine", - "preview_url": "https://p.scdn.co/mp3-preview/d444f5f0921bee7a12beff1649a3cf295a822c76?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 17, - "type": "track", - "uri": "spotify:track:5aNwAqN5Gk5oZIwW5KfhXN", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/3pK4EcflBpG1Kpmjk5LK2R" - }, - "href": "https://api.spotify.com/v1/artists/3pK4EcflBpG1Kpmjk5LK2R", - "id": "3pK4EcflBpG1Kpmjk5LK2R", - "name": "Joy Anonymous", - "type": "artist", - "uri": "spotify:artist:3pK4EcflBpG1Kpmjk5LK2R" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 344068, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/4A8tKYA7gwZzQ4jVwIv1sv" - }, - "href": "https://api.spotify.com/v1/tracks/4A8tKYA7gwZzQ4jVwIv1sv", - "id": "4A8tKYA7gwZzQ4jVwIv1sv", - "name": "peace u need", - "preview_url": "https://p.scdn.co/mp3-preview/d333ce79ff70629051c9db4c5850b2b22288df71?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 18, - "type": "track", - "uri": "spotify:track:4A8tKYA7gwZzQ4jVwIv1sv", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 29540, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/2feEZkLf7dZUueeVBNsdor" - }, - "href": "https://api.spotify.com/v1/tracks/2feEZkLf7dZUueeVBNsdor", - "id": "2feEZkLf7dZUueeVBNsdor", - "name": ".ten", - "preview_url": "https://p.scdn.co/mp3-preview/72d66fa681d50abf590a9cca9553b112fa03c1ee?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 19, - "type": "track", - "uri": "spotify:track:2feEZkLf7dZUueeVBNsdor", - "is_local": false - }, - { - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" - }, - "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", - "id": "4oLeXFyACqeem2VImYeBFe", - "name": "Fred again..", - "type": "artist", - "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/3IunaFjvNKj98JW89JYv9u" - }, - "href": "https://api.spotify.com/v1/artists/3IunaFjvNKj98JW89JYv9u", - "id": "3IunaFjvNKj98JW89JYv9u", - "name": "The Japanese House", - "type": "artist", - "uri": "spotify:artist:3IunaFjvNKj98JW89JYv9u" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6M98IZJK2tx6x2YVyHua9K" - }, - "href": "https://api.spotify.com/v1/artists/6M98IZJK2tx6x2YVyHua9K", - "id": "6M98IZJK2tx6x2YVyHua9K", - "name": "Scott Hardkiss", - "type": "artist", - "uri": "spotify:artist:6M98IZJK2tx6x2YVyHua9K" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 314007, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/track/61pyjiweMDS1h930OgS0XO" - }, - "href": "https://api.spotify.com/v1/tracks/61pyjiweMDS1h930OgS0XO", - "id": "61pyjiweMDS1h930OgS0XO", - "name": "backseat", - "preview_url": "https://p.scdn.co/mp3-preview/f14667711679c1f2c09e356ed12f1a1fad7464ac?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 20, - "type": "track", - "uri": "spotify:track:61pyjiweMDS1h930OgS0XO", - "is_local": false - } - ] - }, - "copyrights": [ - { - "text": "Under exclusive licence to Warner Music UK Limited. An Atlantic Records UK., © 2024 Fred Gibson", - "type": "C" - }, - { - "text": "Under exclusive licence to Warner Music UK Limited. An Atlantic Records UK., ℗ 2024 Fred Gibson", - "type": "P" - } - ], - "external_ids": { - "upc": "5021732457110" - }, - "genres": [], - "label": "Atlantic Records UK", - "popularity": 80 - } - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/me/albums?offset=20&limit=20&locale=en-US,en;q%3D0.5", - "offset": 0, - "previous": null, - "total": 34 -} diff --git a/tests/components/spotify/fixtures/saved_shows.json b/tests/components/spotify/fixtures/saved_shows.json deleted file mode 100644 index acfd5a1b465..00000000000 --- a/tests/components/spotify/fixtures/saved_shows.json +++ /dev/null @@ -1,462 +0,0 @@ -{ - "href": "https://api.spotify.com/v1/me/shows?offset=0&limit=20&locale=en-US,en;q%3D0.5", - "items": [ - { - "added_at": "2023-08-10T08:17:09Z", - "show": { - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "copyrights": [], - "description": "We’ll all giggle along at naughty jokes, your dating horror stories and give questionable recommendations on movies, food and relationships. This podcast is hot, fun garbage and we (Toni Lodge and Ryan Jon here in Melbourne, Australia) would love you to climb aboard and be our friends. Hosted on Acast. See acast.com/privacy for more information.", - "explicit": true, - "external_urls": { - "spotify": "https://open.spotify.com/show/5OzkclFjD6iAjtAuo7aIYt" - }, - "href": "https://api.spotify.com/v1/shows/5OzkclFjD6iAjtAuo7aIYt", - "html_description": "We’ll all giggle along at naughty jokes, your dating horror stories and give questionable recommendations on movies, food and relationships. This podcast is hot, fun garbage and we (Toni Lodge and Ryan Jon here in Melbourne, Australia) would love you to climb aboard and be our friends.

Hosted on Acast. See acast.com/privacy for more information.

", - "id": "5OzkclFjD6iAjtAuo7aIYt", - "images": [ - { - "height": 64, - "url": "https://i.scdn.co/image/ab6765630000f68db5f65a943ef4f707bf79949b", - "width": 64 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67656300005f1fb5f65a943ef4f707bf79949b", - "width": 300 - }, - { - "height": 640, - "url": "https://i.scdn.co/image/ab6765630000ba8ab5f65a943ef4f707bf79949b", - "width": 640 - } - ], - "is_externally_hosted": false, - "languages": ["en"], - "media_type": "audio", - "name": "Toni and Ryan", - "publisher": "Toni Lodge and Ryan Jon", - "total_episodes": 741, - "type": "show", - "uri": "spotify:show:5OzkclFjD6iAjtAuo7aIYt" - } - }, - { - "added_at": "2022-09-15T23:48:23Z", - "show": { - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "copyrights": [], - "description": "Welcome to BLAST Push To Talk, Counter-Strike like you’ve never heard it before.Join our host Moses and our field reporters Scrawny and Launders as they interview pro players, share their hot takes on the latest and greatest news in the CS world courtesy of EPOS.", - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/show/6XYRres0KZtnTqKcLavWR2" - }, - "href": "https://api.spotify.com/v1/shows/6XYRres0KZtnTqKcLavWR2", - "html_description": "Welcome to BLAST Push To Talk, Counter-Strike like you’ve never heard it before.

Join our host Moses and our field reporters Scrawny and Launders as they interview pro players, share their hot takes on the latest and greatest news in the CS world courtesy of EPOS.", - "id": "6XYRres0KZtnTqKcLavWR2", - "images": [ - { - "height": 64, - "url": "https://i.scdn.co/image/ab6765630000f68d5fccb05c5685c081d5c2ad9c", - "width": 64 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67656300005f1f5fccb05c5685c081d5c2ad9c", - "width": 300 - }, - { - "height": 640, - "url": "https://i.scdn.co/image/ab6765630000ba8a5fccb05c5685c081d5c2ad9c", - "width": 640 - } - ], - "is_externally_hosted": false, - "languages": ["en"], - "media_type": "audio", - "name": "BLAST Push To Talk", - "publisher": "BLAST Premier", - "total_episodes": 19, - "type": "show", - "uri": "spotify:show:6XYRres0KZtnTqKcLavWR2" - } - } - ], - "limit": 20, - "next": null, - "offset": 0, - "previous": null, - "total": 10 -} diff --git a/tests/components/spotify/fixtures/saved_tracks.json b/tests/components/spotify/fixtures/saved_tracks.json deleted file mode 100644 index e80d5b39dcd..00000000000 --- a/tests/components/spotify/fixtures/saved_tracks.json +++ /dev/null @@ -1,978 +0,0 @@ -{ - "href": "https://api.spotify.com/v1/me/tracks?offset=0&limit=20&locale=en-US,en;q%3D0.5", - "items": [ - { - "added_at": "2024-10-06T11:35:02Z", - "track": { - "album": { - "album_type": "single", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7zrkALJ9ayRjzysp4QYoEg" - }, - "href": "https://api.spotify.com/v1/artists/7zrkALJ9ayRjzysp4QYoEg", - "id": "7zrkALJ9ayRjzysp4QYoEg", - "name": "Maribou State", - "type": "artist", - "uri": "spotify:artist:7zrkALJ9ayRjzysp4QYoEg" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/5vssQp6TyMHsx4mihKVAsC" - }, - "href": "https://api.spotify.com/v1/artists/5vssQp6TyMHsx4mihKVAsC", - "id": "5vssQp6TyMHsx4mihKVAsC", - "name": "Holly Walker", - "type": "artist", - "uri": "spotify:artist:5vssQp6TyMHsx4mihKVAsC" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/3BYf1IG8EqDbhzdpljcFWY" - }, - "href": "https://api.spotify.com/v1/albums/3BYf1IG8EqDbhzdpljcFWY", - "id": "3BYf1IG8EqDbhzdpljcFWY", - "images": [ - { - "height": 640, - "width": 640, - "url": "https://i.scdn.co/image/ab67616d0000b273ac9dd449e38e5e8952fd22ad" - }, - { - "height": 300, - "width": 300, - "url": "https://i.scdn.co/image/ab67616d00001e02ac9dd449e38e5e8952fd22ad" - }, - { - "height": 64, - "width": 64, - "url": "https://i.scdn.co/image/ab67616d00004851ac9dd449e38e5e8952fd22ad" - } - ], - "is_playable": true, - "name": "Otherside", - "release_date": "2024-10-02", - "release_date_precision": "day", - "total_tracks": 2, - "type": "album", - "uri": "spotify:album:3BYf1IG8EqDbhzdpljcFWY" - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/7zrkALJ9ayRjzysp4QYoEg" - }, - "href": "https://api.spotify.com/v1/artists/7zrkALJ9ayRjzysp4QYoEg", - "id": "7zrkALJ9ayRjzysp4QYoEg", - "name": "Maribou State", - "type": "artist", - "uri": "spotify:artist:7zrkALJ9ayRjzysp4QYoEg" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/5vssQp6TyMHsx4mihKVAsC" - }, - "href": "https://api.spotify.com/v1/artists/5vssQp6TyMHsx4mihKVAsC", - "id": "5vssQp6TyMHsx4mihKVAsC", - "name": "Holly Walker", - "type": "artist", - "uri": "spotify:artist:5vssQp6TyMHsx4mihKVAsC" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 233211, - "explicit": false, - "external_ids": { - "isrc": "GBCFB2300767" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/2pj2A25YQK4uMxhZheNx7R" - }, - "href": "https://api.spotify.com/v1/tracks/2pj2A25YQK4uMxhZheNx7R", - "id": "2pj2A25YQK4uMxhZheNx7R", - "is_local": false, - "is_playable": true, - "name": "Otherside", - "popularity": 47, - "preview_url": "https://p.scdn.co/mp3-preview/f18011c5d9a973f85ed8dce6d698e6043efdcf60?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 1, - "type": "track", - "uri": "spotify:track:2pj2A25YQK4uMxhZheNx7R" - } - }, - { - "added_at": "2024-10-06T07:37:53Z", - "track": { - "album": { - "album_type": "single", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0HHa7ZJZxUQlg5l2mB0N0f" - }, - "href": "https://api.spotify.com/v1/artists/0HHa7ZJZxUQlg5l2mB0N0f", - "id": "0HHa7ZJZxUQlg5l2mB0N0f", - "name": "Marlon Hoffstadt", - "type": "artist", - "uri": "spotify:artist:0HHa7ZJZxUQlg5l2mB0N0f" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/68sTQgQtPe9e4Bb7OtoqET" - }, - "href": "https://api.spotify.com/v1/artists/68sTQgQtPe9e4Bb7OtoqET", - "id": "68sTQgQtPe9e4Bb7OtoqET", - "name": "Crybaby", - "type": "artist", - "uri": "spotify:artist:68sTQgQtPe9e4Bb7OtoqET" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4lBSzo2LS8asEzoePv6VLM" - }, - "href": "https://api.spotify.com/v1/artists/4lBSzo2LS8asEzoePv6VLM", - "id": "4lBSzo2LS8asEzoePv6VLM", - "name": "DJ Daddy Trance", - "type": "artist", - "uri": "spotify:artist:4lBSzo2LS8asEzoePv6VLM" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/1ElP3WFqq5sgMcc3ScIR4l" - }, - "href": "https://api.spotify.com/v1/albums/1ElP3WFqq5sgMcc3ScIR4l", - "id": "1ElP3WFqq5sgMcc3ScIR4l", - "images": [ - { - "height": 640, - "width": 640, - "url": "https://i.scdn.co/image/ab67616d0000b2733d710ab088ff797e80cc5aed" - }, - { - "height": 300, - "width": 300, - "url": "https://i.scdn.co/image/ab67616d00001e023d710ab088ff797e80cc5aed" - }, - { - "height": 64, - "width": 64, - "url": "https://i.scdn.co/image/ab67616d000048513d710ab088ff797e80cc5aed" - } - ], - "is_playable": true, - "name": "I Think I Need A DJ", - "release_date": "2024-09-20", - "release_date_precision": "day", - "total_tracks": 1, - "type": "album", - "uri": "spotify:album:1ElP3WFqq5sgMcc3ScIR4l" - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0HHa7ZJZxUQlg5l2mB0N0f" - }, - "href": "https://api.spotify.com/v1/artists/0HHa7ZJZxUQlg5l2mB0N0f", - "id": "0HHa7ZJZxUQlg5l2mB0N0f", - "name": "Marlon Hoffstadt", - "type": "artist", - "uri": "spotify:artist:0HHa7ZJZxUQlg5l2mB0N0f" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/68sTQgQtPe9e4Bb7OtoqET" - }, - "href": "https://api.spotify.com/v1/artists/68sTQgQtPe9e4Bb7OtoqET", - "id": "68sTQgQtPe9e4Bb7OtoqET", - "name": "Crybaby", - "type": "artist", - "uri": "spotify:artist:68sTQgQtPe9e4Bb7OtoqET" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4lBSzo2LS8asEzoePv6VLM" - }, - "href": "https://api.spotify.com/v1/artists/4lBSzo2LS8asEzoePv6VLM", - "id": "4lBSzo2LS8asEzoePv6VLM", - "name": "DJ Daddy Trance", - "type": "artist", - "uri": "spotify:artist:4lBSzo2LS8asEzoePv6VLM" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 155000, - "explicit": false, - "external_ids": { - "isrc": "DEKF22400978" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/2lKOI1nwP5qZtZC7TGQVY8" - }, - "href": "https://api.spotify.com/v1/tracks/2lKOI1nwP5qZtZC7TGQVY8", - "id": "2lKOI1nwP5qZtZC7TGQVY8", - "is_local": false, - "is_playable": true, - "name": "I Think I Need A DJ", - "popularity": 53, - "preview_url": "https://p.scdn.co/mp3-preview/ad1c9d47d0f5ed500118e9dfc2558bd77612cae3?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 1, - "type": "track", - "uri": "spotify:track:2lKOI1nwP5qZtZC7TGQVY8" - } - } - ], - "limit": 2, - "next": "https://api.spotify.com/v1/me/tracks?offset=20&limit=20&locale=en-US,en;q%3D0.5", - "offset": 0, - "previous": null, - "total": 4816 -} diff --git a/tests/components/spotify/fixtures/show.json b/tests/components/spotify/fixtures/show.json deleted file mode 100644 index d9a89b2cc8d..00000000000 --- a/tests/components/spotify/fixtures/show.json +++ /dev/null @@ -1,317 +0,0 @@ -{ - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "BY", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "copyrights": [], - "description": "Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube \"Scientists\". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.", - "html_description": "

Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.

", - "explicit": true, - "external_urls": { - "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" - }, - "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD?locale=en-US%2Cen%3Bq%3D0.5", - "id": "1Y9ExMgMxoBVrgrfU7u0nD", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", - "width": 64 - } - ], - "is_externally_hosted": false, - "languages": ["en-US"], - "media_type": "audio", - "name": "Safety Third", - "publisher": "Safety Third ", - "type": "show", - "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD", - "total_episodes": 120, - "episodes": { - "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD/episodes?offset=0&limit=50&locale=en-US,en;q%3D0.5", - "limit": 50, - "next": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD/episodes?offset=50&limit=50&locale=en-US,en;q%3D0.5", - "offset": 0, - "previous": null, - "total": 120, - "items": [ - { - "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/06lRxUmh8UNVTByuyxLYqh/clip_132296_192296.mp3", - "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", - "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", - "duration_ms": 3690161, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW" - }, - "href": "https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW", - "id": "3o0RYoo5iOMKSmEbunsbvW", - "images": [ - { - "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", - "height": 640, - "width": 640 - }, - { - "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", - "height": 300, - "width": 300 - }, - { - "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", - "height": 64, - "width": 64 - } - ], - "is_externally_hosted": true, - "is_playable": true, - "language": "en-US", - "languages": ["en-US"], - "name": "My Squirrel Has Brain Damage - Safety Third 119", - "release_date": "2024-07-26", - "release_date_precision": "day", - "resume_point": { - "fully_played": false, - "resume_position_ms": 0 - }, - "type": "episode", - "uri": "spotify:episode:3o0RYoo5iOMKSmEbunsbvW" - }, - { - "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/6msRFio3561me28DofTad7/clip_570865_630865.mp3", - "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", - "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", - "duration_ms": 5690591, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/episode/7CbsFHQq8ljztiUSGw46Fj" - }, - "href": "https://api.spotify.com/v1/episodes/7CbsFHQq8ljztiUSGw46Fj", - "id": "7CbsFHQq8ljztiUSGw46Fj", - "images": [ - { - "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", - "height": 640, - "width": 640 - }, - { - "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", - "height": 300, - "width": 300 - }, - { - "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", - "height": 64, - "width": 64 - } - ], - "is_externally_hosted": true, - "is_playable": true, - "language": "en-US", - "languages": ["en-US"], - "name": "Math Haters vs Math Nerd - Safety Third 118", - "release_date": "2024-07-18", - "release_date_precision": "day", - "resume_point": { - "fully_played": false, - "resume_position_ms": 0 - }, - "type": "episode", - "uri": "spotify:episode:7CbsFHQq8ljztiUSGw46Fj" - } - ] - } -} diff --git a/tests/components/spotify/fixtures/show_episodes.json b/tests/components/spotify/fixtures/show_episodes.json deleted file mode 100644 index 0189fb10c11..00000000000 --- a/tests/components/spotify/fixtures/show_episodes.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "href": "https://api.spotify.com/v1/shows/0e30iIgSffe6xJhFKe35Db/episodes?offset=0&limit=20&locale=en-US,en;q%3D0.5", - "items": [ - { - "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/2O4OLlf7wsvLzCeUbNB3UK/clip_1204000_1256300.mp3", - "description": "The Great War of 2077 and how the Fallout world diverged from our own.Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecastBuy cool stuff and support the show!Fallout 76: https://amzn.to/3h99B3UFallout Cookbook: https://amzn.to/3aGjeodFallout Boardgame: https://amzn.to/2EgmBq3The Art of Fallout 4: https://amzn.to/3gfQST3Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zGFallout Funco Pop Figures: https://amzn.to/3gcYsOcLinks: Live Shows every Monday Night and game streams: twitch.tv/robotsradioFallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hubTalk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhMStay plugged in on Twitter: twitter.com/falloutlorecastRobots Radio Youtube: youtube.com/c/r0b0tsSend me a note! Email: falloutlorecast@gmail.com www.robotsradio.netOur Sponsors:* Check out Bandai Namco: unknown9.com/FALLOUTLOREAdvertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", - "duration_ms": 2117616, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/episode/3ssmxnilHYaKhwRWoBGMbU" - }, - "href": "https://api.spotify.com/v1/episodes/3ssmxnilHYaKhwRWoBGMbU", - "html_description": "

The Great War of 2077 and how the Fallout world diverged from our own.

Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecast

Buy cool stuff and support the show!

Fallout 76: https://amzn.to/3h99B3U

Fallout Cookbook: https://amzn.to/3aGjeod

Fallout Boardgame: https://amzn.to/2EgmBq3

The Art of Fallout 4: https://amzn.to/3gfQST3

Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zG

Fallout Funco Pop Figures: https://amzn.to/3gcYsOc

Links: Live Shows every Monday Night and game streams: twitch.tv/robotsradio

Fallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hub

Talk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhM

Stay plugged in on Twitter: twitter.com/falloutlorecast

Robots Radio Youtube: youtube.com/c/r0b0ts

Send me a note! Email: falloutlorecast@gmail.com www.robotsradio.net



Our Sponsors:
* Check out Bandai Namco: unknown9.com/FALLOUTLORE


Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", - "id": "3ssmxnilHYaKhwRWoBGMbU", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab6765630000ba8af44e9ef63c2d6fb44cb0c9bf", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67656300005f1ff44e9ef63c2d6fb44cb0c9bf", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab6765630000f68df44e9ef63c2d6fb44cb0c9bf", - "width": 64 - } - ], - "is_externally_hosted": false, - "is_playable": true, - "language": "en-US", - "languages": ["en-US"], - "name": "The Great War - Fallout Lorecast EP 1", - "release_date": "2019-01-09", - "release_date_precision": "day", - "resume_point": { - "fully_played": false, - "resume_position_ms": 0 - }, - "type": "episode", - "uri": "spotify:episode:3ssmxnilHYaKhwRWoBGMbU" - }, - { - "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/0PGDORXTYiO2Til9131l6X/clip_310950_371500.mp3", - "description": "Support the show to keep it going, plus get great rewards at patreon.com/falloutlorecast Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecast Audiobooks.com - Get 3 FREE Audiobooks! https://www.dpbolvw.net/click-100173810-11099382?sid=flore Gamefly - Want 2 months of rentals for the price of 1 at Gamefly? https://www.dpbolvw.net/click-100173810-10495782?sid=flore Loot Crate - 15% off Loot Crate. Click the link and use coupon code: ROBOTSRADIO https://www.dpbolvw.net/click-100173810-13902093?sid=flore GreenMan Gaming - Get awesome discounts on games. https://www.dpbolvw.net/click-100173810-13764551?sid=flore NordVPN - Stay Safe on the Internet and get 68% off. https://www.dpbolvw.net/click-100173810-12814552?sid=flore Buy cool stuff and support the show! Fallout 76: https://amzn.to/3h99B3U Fallout Cookbook: https://amzn.to/3aGjeod Fallout Boardgame: https://amzn.to/2EgmBq3 The Art of Fallout 4: https://amzn.to/3gfQST3 Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zG Fallout Funco Pop Figures: https://amzn.to/3gcYsOc Links: Live Shows every Monday Night and game streams: twitch.tv/robotsradio Fallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hub Talk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhM Stay plugged in on Twitter: twitter.com/falloutlorecast Robots Radio Youtube: youtube.com/c/r0b0ts Send me a note! Email: falloutlorecast@gmail.com www.robotsradio.netOur Sponsors:* Check out Bandai Namco: unknown9.com/FALLOUTLOREAdvertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", - "duration_ms": 2376881, - "explicit": false, - "external_urls": { - "spotify": "https://open.spotify.com/episode/1bbj9aqeeZ3UMUlcWN0S03" - }, - "href": "https://api.spotify.com/v1/episodes/1bbj9aqeeZ3UMUlcWN0S03", - "html_description": "

Support the show to keep it going, plus get great rewards at patreon.com/falloutlorecast Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecast Audiobooks.com - Get 3 FREE Audiobooks! https://www.dpbolvw.net/click-100173810-11099382?sid=flore Gamefly - Want 2 months of rentals for the price of 1 at Gamefly? https://www.dpbolvw.net/click-100173810-10495782?sid=flore Loot Crate - 15% off Loot Crate. Click the link and use coupon code: ROBOTSRADIO https://www.dpbolvw.net/click-100173810-13902093?sid=flore GreenMan Gaming - Get awesome discounts on games. https://www.dpbolvw.net/click-100173810-13764551?sid=flore NordVPN - Stay Safe on the Internet and get 68% off. https://www.dpbolvw.net/click-100173810-12814552?sid=flore Buy cool stuff and support the show! Fallout 76: https://amzn.to/3h99B3U Fallout Cookbook: https://amzn.to/3aGjeod Fallout Boardgame: https://amzn.to/2EgmBq3 The Art of Fallout 4: https://amzn.to/3gfQST3 Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zG Fallout Funco Pop Figures: https://amzn.to/3gcYsOc Links: Live Shows every Monday Night and game streams: twitch.tv/robotsradio Fallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hub Talk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhM Stay plugged in on Twitter: twitter.com/falloutlorecast Robots Radio Youtube: youtube.com/c/r0b0ts Send me a note! Email: falloutlorecast@gmail.com www.robotsradio.net



Our Sponsors:
* Check out Bandai Namco: unknown9.com/FALLOUTLORE


Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", - "id": "1bbj9aqeeZ3UMUlcWN0S03", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab6765630000ba8a655b54a66471089d27dbb03f", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67656300005f1f655b54a66471089d27dbb03f", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab6765630000f68d655b54a66471089d27dbb03f", - "width": 64 - } - ], - "is_externally_hosted": false, - "is_playable": true, - "language": "en-US", - "languages": ["en-US"], - "name": "Who Dropped the First Bomb?", - "release_date": "2019-01-15", - "release_date_precision": "day", - "resume_point": { - "fully_played": false, - "resume_position_ms": 0 - }, - "type": "episode", - "uri": "spotify:episode:1bbj9aqeeZ3UMUlcWN0S03" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/shows/0e30iIgSffe6xJhFKe35Db/episodes?offset=20&limit=20&locale=en-US,en;q%3D0.5", - "offset": 0, - "previous": null, - "total": 323 -} diff --git a/tests/components/spotify/fixtures/top_artists.json b/tests/components/spotify/fixtures/top_artists.json deleted file mode 100644 index cd39d57e4ee..00000000000 --- a/tests/components/spotify/fixtures/top_artists.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "items": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/74Yus6IHfa3tWZzXXAYtS2" - }, - "followers": { - "href": null, - "total": 488 - }, - "genres": [], - "href": "https://api.spotify.com/v1/artists/74Yus6IHfa3tWZzXXAYtS2", - "id": "74Yus6IHfa3tWZzXXAYtS2", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab6761610000e5ebf749f53f8bb5ffccf6105ce3", - "width": 640 - }, - { - "height": 320, - "url": "https://i.scdn.co/image/ab67616100005174f749f53f8bb5ffccf6105ce3", - "width": 320 - }, - { - "height": 160, - "url": "https://i.scdn.co/image/ab6761610000f178f749f53f8bb5ffccf6105ce3", - "width": 160 - } - ], - "name": "Onkruid", - "popularity": 7, - "type": "artist", - "uri": "spotify:artist:74Yus6IHfa3tWZzXXAYtS2" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6s5ubAp65wXoTZefE01RNR" - }, - "followers": { - "href": null, - "total": 805497 - }, - "genres": [], - "href": "https://api.spotify.com/v1/artists/6s5ubAp65wXoTZefE01RNR", - "id": "6s5ubAp65wXoTZefE01RNR", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab6761610000e5eb8e750249623067fe3c557cf0", - "width": 640 - }, - { - "height": 320, - "url": "https://i.scdn.co/image/ab676161000051748e750249623067fe3c557cf0", - "width": 320 - }, - { - "height": 160, - "url": "https://i.scdn.co/image/ab6761610000f1788e750249623067fe3c557cf0", - "width": 160 - } - ], - "name": "Joost", - "popularity": 69, - "type": "artist", - "uri": "spotify:artist:6s5ubAp65wXoTZefE01RNR" - } - ], - "total": 192, - "limit": 20, - "offset": 0, - "href": "https://api.spotify.com/v1/me/top/artists?locale=en-US,en;q%3D0.5", - "next": "https://api.spotify.com/v1/me/top/artists?offset=20&limit=20&locale=en-US,en;q%3D0.5", - "previous": null -} diff --git a/tests/components/spotify/fixtures/top_tracks.json b/tests/components/spotify/fixtures/top_tracks.json deleted file mode 100644 index 9b99b5974f3..00000000000 --- a/tests/components/spotify/fixtures/top_tracks.json +++ /dev/null @@ -1,922 +0,0 @@ -{ - "items": [ - { - "album": { - "album_type": "SINGLE", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0PCCGZ0wGLizHt2KZ7hhA2" - }, - "href": "https://api.spotify.com/v1/artists/0PCCGZ0wGLizHt2KZ7hhA2", - "id": "0PCCGZ0wGLizHt2KZ7hhA2", - "name": "Artemas", - "type": "artist", - "uri": "spotify:artist:0PCCGZ0wGLizHt2KZ7hhA2" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/45Qix7gFNajr6IofEIhhE4" - }, - "href": "https://api.spotify.com/v1/albums/45Qix7gFNajr6IofEIhhE4", - "id": "45Qix7gFNajr6IofEIhhE4", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab67616d0000b273c88e6a4447087f41eb388b14", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67616d00001e02c88e6a4447087f41eb388b14", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab67616d00004851c88e6a4447087f41eb388b14", - "width": 64 - } - ], - "name": "i like the way you kiss me (burnt)", - "release_date": "2024-03-26", - "release_date_precision": "day", - "total_tracks": 2, - "type": "album", - "uri": "spotify:album:45Qix7gFNajr6IofEIhhE4" - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0PCCGZ0wGLizHt2KZ7hhA2" - }, - "href": "https://api.spotify.com/v1/artists/0PCCGZ0wGLizHt2KZ7hhA2", - "id": "0PCCGZ0wGLizHt2KZ7hhA2", - "name": "Artemas", - "type": "artist", - "uri": "spotify:artist:0PCCGZ0wGLizHt2KZ7hhA2" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "PR", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 142514, - "explicit": false, - "external_ids": { - "isrc": "QZJ842400387" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/3oRoMXsP2NRzm51lldj1RO" - }, - "href": "https://api.spotify.com/v1/tracks/3oRoMXsP2NRzm51lldj1RO", - "id": "3oRoMXsP2NRzm51lldj1RO", - "is_local": false, - "name": "i like the way you kiss me", - "popularity": 51, - "preview_url": "https://p.scdn.co/mp3-preview/6ce9233edb212fe7cf02273f4369d2c60c28e887?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 2, - "type": "track", - "uri": "spotify:track:3oRoMXsP2NRzm51lldj1RO" - }, - { - "album": { - "album_type": "SINGLE", - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4YLtscXsxbVgi031ovDDdh" - }, - "href": "https://api.spotify.com/v1/artists/4YLtscXsxbVgi031ovDDdh", - "id": "4YLtscXsxbVgi031ovDDdh", - "name": "Chris Stapleton", - "type": "artist", - "uri": "spotify:artist:4YLtscXsxbVgi031ovDDdh" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6M2wZ9GZgrQXHCFfjv46we" - }, - "href": "https://api.spotify.com/v1/artists/6M2wZ9GZgrQXHCFfjv46we", - "id": "6M2wZ9GZgrQXHCFfjv46we", - "name": "Dua Lipa", - "type": "artist", - "uri": "spotify:artist:6M2wZ9GZgrQXHCFfjv46we" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "external_urls": { - "spotify": "https://open.spotify.com/album/3pjMBXbDLg2oGL7HtVxWgY" - }, - "href": "https://api.spotify.com/v1/albums/3pjMBXbDLg2oGL7HtVxWgY", - "id": "3pjMBXbDLg2oGL7HtVxWgY", - "images": [ - { - "height": 640, - "url": "https://i.scdn.co/image/ab67616d0000b27386f028311a5a746aa46b412f", - "width": 640 - }, - { - "height": 300, - "url": "https://i.scdn.co/image/ab67616d00001e0286f028311a5a746aa46b412f", - "width": 300 - }, - { - "height": 64, - "url": "https://i.scdn.co/image/ab67616d0000485186f028311a5a746aa46b412f", - "width": 64 - } - ], - "name": "Think I'm In Love With You (With Dua Lipa) (Live From The 59th ACM Awards)", - "release_date": "2024-05-01", - "release_date_precision": "day", - "total_tracks": 1, - "type": "album", - "uri": "spotify:album:3pjMBXbDLg2oGL7HtVxWgY" - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/4YLtscXsxbVgi031ovDDdh" - }, - "href": "https://api.spotify.com/v1/artists/4YLtscXsxbVgi031ovDDdh", - "id": "4YLtscXsxbVgi031ovDDdh", - "name": "Chris Stapleton", - "type": "artist", - "uri": "spotify:artist:4YLtscXsxbVgi031ovDDdh" - }, - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/6M2wZ9GZgrQXHCFfjv46we" - }, - "href": "https://api.spotify.com/v1/artists/6M2wZ9GZgrQXHCFfjv46we", - "id": "6M2wZ9GZgrQXHCFfjv46we", - "name": "Dua Lipa", - "type": "artist", - "uri": "spotify:artist:6M2wZ9GZgrQXHCFfjv46we" - } - ], - "available_markets": [ - "AR", - "AU", - "AT", - "BE", - "BO", - "BR", - "BG", - "CA", - "CL", - "CO", - "CR", - "CY", - "CZ", - "DK", - "DO", - "DE", - "EC", - "EE", - "SV", - "FI", - "FR", - "GR", - "GT", - "HN", - "HK", - "HU", - "IS", - "IE", - "IT", - "LV", - "LT", - "LU", - "MY", - "MT", - "MX", - "NL", - "NZ", - "NI", - "NO", - "PA", - "PY", - "PE", - "PH", - "PL", - "PT", - "SG", - "SK", - "ES", - "SE", - "CH", - "TW", - "TR", - "UY", - "US", - "GB", - "AD", - "LI", - "MC", - "ID", - "JP", - "TH", - "VN", - "RO", - "IL", - "ZA", - "SA", - "AE", - "BH", - "QA", - "OM", - "KW", - "EG", - "MA", - "DZ", - "TN", - "LB", - "JO", - "PS", - "IN", - "KZ", - "MD", - "UA", - "AL", - "BA", - "HR", - "ME", - "MK", - "RS", - "SI", - "KR", - "BD", - "PK", - "LK", - "GH", - "KE", - "NG", - "TZ", - "UG", - "AG", - "AM", - "BS", - "BB", - "BZ", - "BT", - "BW", - "BF", - "CV", - "CW", - "DM", - "FJ", - "GM", - "GE", - "GD", - "GW", - "GY", - "HT", - "JM", - "KI", - "LS", - "LR", - "MW", - "MV", - "ML", - "MH", - "FM", - "NA", - "NR", - "NE", - "PW", - "PG", - "WS", - "SM", - "ST", - "SN", - "SC", - "SL", - "SB", - "KN", - "LC", - "VC", - "SR", - "TL", - "TO", - "TT", - "TV", - "VU", - "AZ", - "BN", - "BI", - "KH", - "CM", - "TD", - "KM", - "GQ", - "SZ", - "GA", - "GN", - "KG", - "LA", - "MO", - "MR", - "MN", - "NP", - "RW", - "TG", - "UZ", - "ZW", - "BJ", - "MG", - "MU", - "MZ", - "AO", - "CI", - "DJ", - "ZM", - "CD", - "CG", - "IQ", - "LY", - "TJ", - "VE", - "ET", - "XK" - ], - "disc_number": 1, - "duration_ms": 277066, - "explicit": false, - "external_ids": { - "isrc": "USUG12403278" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/69zgu5rlAie3IPZOEXLxyS" - }, - "href": "https://api.spotify.com/v1/tracks/69zgu5rlAie3IPZOEXLxyS", - "id": "69zgu5rlAie3IPZOEXLxyS", - "is_local": false, - "name": "Think I'm In Love With You (With Dua Lipa) (Live From The 59th ACM Awards)", - "popularity": 60, - "preview_url": "https://p.scdn.co/mp3-preview/c4fa0377538248e0a3c7e92bcf5a58be2f32b342?cid=cfe923b2d660439caf2b557b21f31221", - "track_number": 1, - "type": "track", - "uri": "spotify:track:69zgu5rlAie3IPZOEXLxyS" - } - ], - "total": 2951, - "limit": 20, - "offset": 0, - "href": "https://api.spotify.com/v1/me/top/tracks?locale=en-US,en;q%3D0.5", - "next": "https://api.spotify.com/v1/me/top/tracks?offset=20&limit=20&locale=en-US,en;q%3D0.5", - "previous": null -} diff --git a/tests/components/spotify/snapshots/test_diagnostics.ambr b/tests/components/spotify/snapshots/test_diagnostics.ambr deleted file mode 100644 index 161b6025ff3..00000000000 --- a/tests/components/spotify/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,432 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics_polling_instance - dict({ - 'devices': list([ - dict({ - 'device_id': '21dac6b0e0a1f181870fdc9749b2656466557666', - 'device_type': 'Computer', - 'is_active': False, - 'is_private_session': False, - 'is_restricted': False, - 'name': 'DESKTOP-BKC5SIK', - 'supports_volume': True, - 'volume_percent': 69, - }), - ]), - 'playback': dict({ - 'audio_features': dict({ - 'acousticness': 0.011, - 'danceability': 0.696, - 'energy': 0.905, - 'instrumentalness': 0.000905, - 'key': 3, - 'liveness': 0.302, - 'loudness': -2.743, - 'mode': 1, - 'speechiness': 0.103, - 'tempo': 114.944, - 'time_signature': 4, - 'valence': 0.625, - }), - 'current_playback': dict({ - 'context': dict({ - 'context_type': 'playlist', - 'external_urls': dict({ - 'spotify': 'https://open.spotify.com/playlist/2r35vbe6hHl6yDSMfjKgmm', - }), - 'href': 'https://api.spotify.com/v1/playlists/2r35vbe6hHl6yDSMfjKgmm', - 'uri': 'spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm', - }), - 'currently_playing_type': 'track', - 'device': dict({ - 'device_id': 'a19f7a03a25aff3e43f457a328a8ba67a8c44789', - 'device_type': 'Speaker', - 'is_active': True, - 'is_private_session': False, - 'is_restricted': False, - 'name': 'Master Bathroom Speaker', - 'supports_volume': True, - 'volume_percent': 25, - }), - 'is_playing': True, - 'item': dict({ - 'album': dict({ - 'album_id': '3nUNxSh2szhmN7iifAKv5i', - 'album_type': 'album', - 'artists': list([ - dict({ - 'artist_id': '2Hkut4rAAyrQxRdof7FVJq', - 'name': 'Rush', - 'uri': 'spotify:artist:2Hkut4rAAyrQxRdof7FVJq', - }), - ]), - 'images': list([ - dict({ - 'height': 640, - 'url': 'https://i.scdn.co/image/ab67616d0000b27306c0d7ebcabad0c39b566983', - 'width': 640, - }), - dict({ - 'height': 300, - 'url': 'https://i.scdn.co/image/ab67616d00001e0206c0d7ebcabad0c39b566983', - 'width': 300, - }), - dict({ - 'height': 64, - 'url': 'https://i.scdn.co/image/ab67616d0000485106c0d7ebcabad0c39b566983', - 'width': 64, - }), - ]), - 'name': 'Permanent Waves', - 'release_date': '1980-01-01', - 'release_date_precision': 'day', - 'total_tracks': 6, - 'uri': 'spotify:album:3nUNxSh2szhmN7iifAKv5i', - }), - 'artists': list([ - dict({ - 'artist_id': '2Hkut4rAAyrQxRdof7FVJq', - 'name': 'Rush', - 'uri': 'spotify:artist:2Hkut4rAAyrQxRdof7FVJq', - }), - ]), - 'disc_number': 1, - 'duration_ms': 296466, - 'explicit': False, - 'external_urls': dict({ - 'spotify': 'https://open.spotify.com/track/4e9hUiLsN4mx61ARosFi7p', - }), - 'href': 'https://api.spotify.com/v1/tracks/4e9hUiLsN4mx61ARosFi7p', - 'is_local': False, - 'name': 'The Spirit Of Radio', - 'track_id': '4e9hUiLsN4mx61ARosFi7p', - 'track_number': 1, - 'type': 'track', - 'uri': 'spotify:track:4e9hUiLsN4mx61ARosFi7p', - }), - 'progress_ms': 249367, - 'repeat_mode': 'off', - 'shuffle': False, - }), - 'dj_playlist': False, - 'playlist': dict({ - 'collaborative': False, - 'description': 'A playlist for testing pourposes', - 'external_urls': dict({ - 'spotify': 'https://open.spotify.com/playlist/3cEYpjA9oz9GiPac4AsH4n', - }), - 'images': list([ - dict({ - 'height': None, - 'url': 'https://i.scdn.co/image/ab67706c0000da848d0ce13d55f634e290f744ba', - 'width': None, - }), - ]), - 'name': 'Spotify Web API Testing playlist', - 'object_type': 'playlist', - 'owner': dict({ - 'display_name': 'JMPerez²', - 'external_urls': dict({ - 'spotify': 'https://open.spotify.com/user/jmperezperez', - }), - 'href': 'https://api.spotify.com/v1/users/jmperezperez', - 'object_type': 'user', - 'owner_id': 'jmperezperez', - 'uri': 'spotify:user:jmperezperez', - }), - 'playlist_id': '3cEYpjA9oz9GiPac4AsH4n', - 'public': True, - 'tracks': dict({ - 'items': list([ - dict({ - 'track': dict({ - 'album': dict({ - 'album_id': '2pANdqPvxInB0YvcDiw4ko', - 'album_type': 'compilation', - 'artists': list([ - dict({ - 'artist_id': '0LyfQWJT6nXafLPZqxe9Of', - 'name': 'Various Artists', - 'uri': 'spotify:artist:0LyfQWJT6nXafLPZqxe9Of', - }), - ]), - 'images': list([ - dict({ - 'height': 640, - 'url': 'https://i.scdn.co/image/ab67616d0000b273ce6d0eef0c1ce77e5f95bbbc', - 'width': 640, - }), - dict({ - 'height': 300, - 'url': 'https://i.scdn.co/image/ab67616d00001e02ce6d0eef0c1ce77e5f95bbbc', - 'width': 300, - }), - dict({ - 'height': 64, - 'url': 'https://i.scdn.co/image/ab67616d00004851ce6d0eef0c1ce77e5f95bbbc', - 'width': 64, - }), - ]), - 'name': 'Progressive Psy Trance Picks Vol.8', - 'release_date': '2012-04-02', - 'release_date_precision': 'day', - 'total_tracks': 20, - 'uri': 'spotify:album:2pANdqPvxInB0YvcDiw4ko', - }), - 'artists': list([ - dict({ - 'artist_id': '6eSdhw46riw2OUHgMwR8B5', - 'name': 'Odiseo', - 'uri': 'spotify:artist:6eSdhw46riw2OUHgMwR8B5', - }), - ]), - 'disc_number': 1, - 'duration_ms': 376000, - 'explicit': False, - 'external_urls': dict({ - 'spotify': 'https://open.spotify.com/track/4rzfv0JLZfVhOhbSQ8o5jZ', - }), - 'href': 'https://api.spotify.com/v1/tracks/4rzfv0JLZfVhOhbSQ8o5jZ', - 'is_local': False, - 'name': 'Api', - 'track_id': '4rzfv0JLZfVhOhbSQ8o5jZ', - 'track_number': 10, - 'type': 'track', - 'uri': 'spotify:track:4rzfv0JLZfVhOhbSQ8o5jZ', - }), - }), - dict({ - 'track': dict({ - 'album': dict({ - 'album_id': '6nlfkk5GoXRL1nktlATNsy', - 'album_type': 'compilation', - 'artists': list([ - dict({ - 'artist_id': '0LyfQWJT6nXafLPZqxe9Of', - 'name': 'Various Artists', - 'uri': 'spotify:artist:0LyfQWJT6nXafLPZqxe9Of', - }), - ]), - 'images': list([ - dict({ - 'height': 640, - 'url': 'https://i.scdn.co/image/ab67616d0000b273aa2ff29970d9a63a49dfaeb2', - 'width': 640, - }), - dict({ - 'height': 300, - 'url': 'https://i.scdn.co/image/ab67616d00001e02aa2ff29970d9a63a49dfaeb2', - 'width': 300, - }), - dict({ - 'height': 64, - 'url': 'https://i.scdn.co/image/ab67616d00004851aa2ff29970d9a63a49dfaeb2', - 'width': 64, - }), - ]), - 'name': 'Wellness & Dreaming Source', - 'release_date': '2015-01-09', - 'release_date_precision': 'day', - 'total_tracks': 25, - 'uri': 'spotify:album:6nlfkk5GoXRL1nktlATNsy', - }), - 'artists': list([ - dict({ - 'artist_id': '5VQE4WOzPu9h3HnGLuBoA6', - 'name': 'Vlasta Marek', - 'uri': 'spotify:artist:5VQE4WOzPu9h3HnGLuBoA6', - }), - ]), - 'disc_number': 1, - 'duration_ms': 730066, - 'explicit': False, - 'external_urls': dict({ - 'spotify': 'https://open.spotify.com/track/5o3jMYOSbaVz3tkgwhELSV', - }), - 'href': 'https://api.spotify.com/v1/tracks/5o3jMYOSbaVz3tkgwhELSV', - 'is_local': False, - 'name': 'Is', - 'track_id': '5o3jMYOSbaVz3tkgwhELSV', - 'track_number': 21, - 'type': 'track', - 'uri': 'spotify:track:5o3jMYOSbaVz3tkgwhELSV', - }), - }), - dict({ - 'track': dict({ - 'album': dict({ - 'album_id': '4hnqM0JK4CM1phwfq1Ldyz', - 'album_type': 'album', - 'artists': list([ - dict({ - 'artist_id': '066X20Nz7iquqkkCW6Jxy6', - 'name': 'LCD Soundsystem', - 'uri': 'spotify:artist:066X20Nz7iquqkkCW6Jxy6', - }), - ]), - 'images': list([ - dict({ - 'height': 640, - 'url': 'https://i.scdn.co/image/ab67616d0000b273ee0d0dce888c6c8a70db6e8b', - 'width': 640, - }), - dict({ - 'height': 300, - 'url': 'https://i.scdn.co/image/ab67616d00001e02ee0d0dce888c6c8a70db6e8b', - 'width': 300, - }), - dict({ - 'height': 64, - 'url': 'https://i.scdn.co/image/ab67616d00004851ee0d0dce888c6c8a70db6e8b', - 'width': 64, - }), - ]), - 'name': 'This Is Happening', - 'release_date': '2010-05-17', - 'release_date_precision': 'day', - 'total_tracks': 9, - 'uri': 'spotify:album:4hnqM0JK4CM1phwfq1Ldyz', - }), - 'artists': list([ - dict({ - 'artist_id': '066X20Nz7iquqkkCW6Jxy6', - 'name': 'LCD Soundsystem', - 'uri': 'spotify:artist:066X20Nz7iquqkkCW6Jxy6', - }), - ]), - 'disc_number': 1, - 'duration_ms': 401440, - 'explicit': False, - 'external_urls': dict({ - 'spotify': 'https://open.spotify.com/track/4Cy0NHJ8Gh0xMdwyM9RkQm', - }), - 'href': 'https://api.spotify.com/v1/tracks/4Cy0NHJ8Gh0xMdwyM9RkQm', - 'is_local': False, - 'name': 'All I Want', - 'track_id': '4Cy0NHJ8Gh0xMdwyM9RkQm', - 'track_number': 4, - 'type': 'track', - 'uri': 'spotify:track:4Cy0NHJ8Gh0xMdwyM9RkQm', - }), - }), - dict({ - 'track': dict({ - 'album': dict({ - 'album_id': '2usKFntxa98WHMcyW6xJBz', - 'album_type': 'album', - 'artists': list([ - dict({ - 'artist_id': '272ArH9SUAlslQqsSgPJA2', - 'name': 'Glenn Horiuchi Trio', - 'uri': 'spotify:artist:272ArH9SUAlslQqsSgPJA2', - }), - ]), - 'images': list([ - dict({ - 'height': 640, - 'url': 'https://i.scdn.co/image/ab67616d0000b2738b7447ac3daa1da18811cf7b', - 'width': 640, - }), - dict({ - 'height': 300, - 'url': 'https://i.scdn.co/image/ab67616d00001e028b7447ac3daa1da18811cf7b', - 'width': 300, - }), - dict({ - 'height': 64, - 'url': 'https://i.scdn.co/image/ab67616d000048518b7447ac3daa1da18811cf7b', - 'width': 64, - }), - ]), - 'name': 'Glenn Horiuchi Trio / Gelenn Horiuchi Quartet: Mercy / Jump Start / Endpoints / Curl Out / Earthworks / Mind Probe / Null Set / Another Space (A)', - 'release_date': '2011-04-01', - 'release_date_precision': 'day', - 'total_tracks': 8, - 'uri': 'spotify:album:2usKFntxa98WHMcyW6xJBz', - }), - 'artists': list([ - dict({ - 'artist_id': '272ArH9SUAlslQqsSgPJA2', - 'name': 'Glenn Horiuchi Trio', - 'uri': 'spotify:artist:272ArH9SUAlslQqsSgPJA2', - }), - ]), - 'disc_number': 1, - 'duration_ms': 358760, - 'explicit': False, - 'external_urls': dict({ - 'spotify': 'https://open.spotify.com/track/6hvFrZNocdt2FcKGCSY5NI', - }), - 'href': 'https://api.spotify.com/v1/tracks/6hvFrZNocdt2FcKGCSY5NI', - 'is_local': False, - 'name': 'Endpoints', - 'track_id': '6hvFrZNocdt2FcKGCSY5NI', - 'track_number': 2, - 'type': 'track', - 'uri': 'spotify:track:6hvFrZNocdt2FcKGCSY5NI', - }), - }), - dict({ - 'track': dict({ - 'album': dict({ - 'album_id': '0ivM6kSawaug0j3tZVusG2', - 'album_type': 'album', - 'artists': list([ - dict({ - 'artist_id': '2KftmGt9sk1yLjsAoloC3M', - 'name': 'Zucchero', - 'uri': 'spotify:artist:2KftmGt9sk1yLjsAoloC3M', - }), - ]), - 'images': list([ - dict({ - 'height': 640, - 'url': 'https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71', - 'width': 640, - }), - dict({ - 'height': 300, - 'url': 'https://i.scdn.co/image/ab67616d00001e0204e57d181ff062f8339d6c71', - 'width': 300, - }), - dict({ - 'height': 64, - 'url': 'https://i.scdn.co/image/ab67616d0000485104e57d181ff062f8339d6c71', - 'width': 64, - }), - ]), - 'name': 'All The Best (Spanish Version)', - 'release_date': '2007-01-01', - 'release_date_precision': 'day', - 'total_tracks': 18, - 'uri': 'spotify:album:0ivM6kSawaug0j3tZVusG2', - }), - 'artists': list([ - dict({ - 'artist_id': '2KftmGt9sk1yLjsAoloC3M', - 'name': 'Zucchero', - 'uri': 'spotify:artist:2KftmGt9sk1yLjsAoloC3M', - }), - ]), - 'disc_number': 1, - 'duration_ms': 176093, - 'explicit': False, - 'external_urls': dict({ - 'spotify': 'https://open.spotify.com/track/2E2znCPaS8anQe21GLxcvJ', - }), - 'href': 'https://api.spotify.com/v1/tracks/2E2znCPaS8anQe21GLxcvJ', - 'is_local': False, - 'name': 'You Are So Beautiful', - 'track_id': '2E2znCPaS8anQe21GLxcvJ', - 'track_number': 18, - 'type': 'track', - 'uri': 'spotify:track:2E2znCPaS8anQe21GLxcvJ', - }), - }), - ]), - }), - 'uri': 'spotify:playlist:3cEYpjA9oz9GiPac4AsH4n', - }), - }), - }) -# --- diff --git a/tests/components/spotify/snapshots/test_media_browser.ambr b/tests/components/spotify/snapshots/test_media_browser.ambr deleted file mode 100644 index e1ff42cb7c8..00000000000 --- a/tests/components/spotify/snapshots/test_media_browser.ambr +++ /dev/null @@ -1,821 +0,0 @@ -# serializer version: 1 -# name: test_browse_media_categories - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_playlists', - 'media_content_type': 'spotify://current_user_playlists', - 'thumbnail': None, - 'title': 'Playlists', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_followed_artists', - 'media_content_type': 'spotify://current_user_followed_artists', - 'thumbnail': None, - 'title': 'Artists', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_albums', - 'media_content_type': 'spotify://current_user_saved_albums', - 'thumbnail': None, - 'title': 'Albums', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_tracks', - 'media_content_type': 'spotify://current_user_saved_tracks', - 'thumbnail': None, - 'title': 'Tracks', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_shows', - 'media_content_type': 'spotify://current_user_saved_shows', - 'thumbnail': None, - 'title': 'Podcasts', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_recently_played', - 'media_content_type': 'spotify://current_user_recently_played', - 'thumbnail': None, - 'title': 'Recently played', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_top_artists', - 'media_content_type': 'spotify://current_user_top_artists', - 'thumbnail': None, - 'title': 'Top Artists', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_top_tracks', - 'media_content_type': 'spotify://current_user_top_tracks', - 'thumbnail': None, - 'title': 'Top Tracks', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/categories', - 'media_content_type': 'spotify://categories', - 'thumbnail': None, - 'title': 'Categories', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/featured_playlists', - 'media_content_type': 'spotify://featured_playlists', - 'thumbnail': None, - 'title': 'Featured Playlists', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/new_releases', - 'media_content_type': 'spotify://new_releases', - 'thumbnail': None, - 'title': 'New Releases', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/library', - 'media_content_type': 'spotify://library', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Media Library', - }) -# --- -# name: test_browse_media_playlists[01J5TX5A0FF6G5V0QJX6HBC94T] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:4WkWJ0EjHEFASDevhM8oPw', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273d061f5bfae8d38558f3698c1', - 'title': 'Hyper', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:1RHirWgH1weMsBLi4KOK9d', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://mosaic.scdn.co/640/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6', - 'title': 'Ain’t got shit on me', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_playlists', - 'media_content_type': 'spotify://current_user_playlists', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Playlists', - }) -# --- -# name: test_browse_media_playlists[32oesphrnacjcf7vw5bf6odx3] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://32oesphrnacjcf7vw5bf6odx3/spotify:playlist:4WkWJ0EjHEFASDevhM8oPw', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273d061f5bfae8d38558f3698c1', - 'title': 'Hyper', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://32oesphrnacjcf7vw5bf6odx3/spotify:playlist:1RHirWgH1weMsBLi4KOK9d', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://mosaic.scdn.co/640/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6', - 'title': 'Ain’t got shit on me', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://32oesphrnacjcf7vw5bf6odx3/current_user_playlists', - 'media_content_type': 'spotify://current_user_playlists', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Playlists', - }) -# --- -# name: test_browse_media_root - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01J5TX5A0FF6G5V0QJX6HBC94T', - 'media_content_type': 'spotify://library', - 'thumbnail': 'https://brands.home-assistant.io/_/spotify/logo.png', - 'title': 'spotify_1', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://32oesphrnacjcf7vw5bf6odx3', - 'media_content_type': 'spotify://library', - 'thumbnail': 'https://brands.home-assistant.io/_/spotify/logo.png', - 'title': 'spotify_2', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://', - 'media_content_type': 'spotify', - 'not_shown': 0, - 'thumbnail': 'https://brands.home-assistant.io/_/spotify/logo.png', - 'title': 'Spotify', - }) -# --- -# name: test_browsing[album-spotify:album:3IqzqH6ShrRtie9Yd2ODyG] - dict({ - 'can_expand': True, - 'can_play': True, - 'children': list([ - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:6akJGriy4njdP8fZTPGjwz', - 'media_content_type': 'spotify://track', - 'thumbnail': None, - 'title': 'All Your Friends', - }), - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:7N02bJK1amhplZ8yAapRS5', - 'media_content_type': 'spotify://track', - 'thumbnail': None, - 'title': 'New Magiks', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:3IqzqH6ShrRtie9Yd2ODyG', - 'media_content_type': 'spotify://album', - 'not_shown': 0, - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273a61a28c2f084761f8833bce6', - 'title': 'SINGLARITY', - }) -# --- -# name: test_browsing[artist-spotify:artist:0TnOYISbd1XYRBk9myaseg] - dict({ - 'can_expand': True, - 'can_play': True, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:56jg3KJcYmfL7RzYmG2O1Q', - 'media_content_type': 'spotify://album', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273a0bac1996f26274685db1520', - 'title': 'Trackhouse (Daytona 500 Edition)', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:1l86t4bTNT2j1X0ZBCIv6R', - 'media_content_type': 'spotify://album', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27333a4ba8f73271a749c5d953d', - 'title': 'Trackhouse', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:0TnOYISbd1XYRBk9myaseg', - 'media_content_type': 'spotify://artist', - 'not_shown': 0, - 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5ebee07b5820dd91d15d397e29c', - 'title': 'Pitbull', - }) -# --- -# name: test_browsing[categories-categories] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/0JQ5DAt0tbjZptfcdMSKl3', - 'media_content_type': 'spotify://category_playlists', - 'thumbnail': 'https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg', - 'title': 'Made For You', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/0JQ5DAqbMKFz6FAsUtgAab', - 'media_content_type': 'spotify://category_playlists', - 'thumbnail': 'https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg', - 'title': 'New Releases', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/categories', - 'media_content_type': 'spotify://categories', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Categories', - }) -# --- -# name: test_browsing[category_playlists-dinner] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DX7yhuKT9G4qk', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f0000000343319faa9428405f3312b588', - 'title': 'eten met vrienden', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DXbvE0SE0Cczh', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f00000003b93c270883619dde61725fc8', - 'title': 'Jukebox Joint', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/dinner', - 'media_content_type': 'spotify://category_playlists', - 'not_shown': 0, - 'thumbnail': 'https://t.scdn.co/media/original/dinner_1b6506abba0ba52c54e6d695c8571078_274x274.jpg', - 'title': 'Cooking & Dining', - }) -# --- -# name: test_browsing[current_user_followed_artists-current_user_followed_artists] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:0lLY20XpZ9yDobkbHI7u1y', - 'media_content_type': 'spotify://artist', - 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5eb0fb1220e7e3ace47ebad023e', - 'title': 'Pegboard Nerds', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:0p4nmQO2msCgU4IF37Wi3j', - 'media_content_type': 'spotify://artist', - 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5eb5c3349ddba6b8e064c1bab16', - 'title': 'Avril Lavigne', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_followed_artists', - 'media_content_type': 'spotify://current_user_followed_artists', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Artists', - }) -# --- -# name: test_browsing[current_user_playlists-current_user_playlists] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:4WkWJ0EjHEFASDevhM8oPw', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273d061f5bfae8d38558f3698c1', - 'title': 'Hyper', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:1RHirWgH1weMsBLi4KOK9d', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://mosaic.scdn.co/640/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6', - 'title': 'Ain’t got shit on me', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_playlists', - 'media_content_type': 'spotify://current_user_playlists', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Playlists', - }) -# --- -# name: test_browsing[current_user_recently_played-current_user_recently_played] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:71dMjqJ8UJV700zYs5YZCh', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde', - 'title': 'Super Breath', - }), - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:71dMjqJ8UJV700zYs5YZCh', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde', - 'title': 'Super Breath', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_recently_played', - 'media_content_type': 'spotify://current_user_recently_played', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Recently played', - }) -# --- -# name: test_browsing[current_user_saved_albums-current_user_saved_albums] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:57MSBg5pBQZH5bfLVDmeuP', - 'media_content_type': 'spotify://album', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2733126a95bb7ed4146a80c7fc6', - 'title': 'In Waves', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:3DQueEd1Ft9PHWgovDzPKh', - 'media_content_type': 'spotify://album', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2736b8a4828e057b7dc1c4a4d39', - 'title': 'ten days', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_albums', - 'media_content_type': 'spotify://current_user_saved_albums', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Albums', - }) -# --- -# name: test_browsing[current_user_saved_shows-current_user_saved_shows] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:show:5OzkclFjD6iAjtAuo7aIYt', - 'media_content_type': 'spotify://show', - 'thumbnail': 'https://i.scdn.co/image/ab6765630000f68db5f65a943ef4f707bf79949b', - 'title': 'Toni and Ryan', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:show:6XYRres0KZtnTqKcLavWR2', - 'media_content_type': 'spotify://show', - 'thumbnail': 'https://i.scdn.co/image/ab6765630000f68d5fccb05c5685c081d5c2ad9c', - 'title': 'BLAST Push To Talk', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_shows', - 'media_content_type': 'spotify://current_user_saved_shows', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Podcasts', - }) -# --- -# name: test_browsing[current_user_saved_tracks-current_user_saved_tracks] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:2pj2A25YQK4uMxhZheNx7R', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273ac9dd449e38e5e8952fd22ad', - 'title': 'Otherside', - }), - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:2lKOI1nwP5qZtZC7TGQVY8', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2733d710ab088ff797e80cc5aed', - 'title': 'I Think I Need A DJ', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_tracks', - 'media_content_type': 'spotify://current_user_saved_tracks', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Tracks', - }) -# --- -# name: test_browsing[current_user_top_artists-current_user_top_artists] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:74Yus6IHfa3tWZzXXAYtS2', - 'media_content_type': 'spotify://artist', - 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5ebf749f53f8bb5ffccf6105ce3', - 'title': 'Onkruid', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:6s5ubAp65wXoTZefE01RNR', - 'media_content_type': 'spotify://artist', - 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5eb8e750249623067fe3c557cf0', - 'title': 'Joost', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_top_artists', - 'media_content_type': 'spotify://current_user_top_artists', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Top Artists', - }) -# --- -# name: test_browsing[current_user_top_tracks-current_user_top_tracks] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:3oRoMXsP2NRzm51lldj1RO', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273c88e6a4447087f41eb388b14', - 'title': 'i like the way you kiss me', - }), - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:69zgu5rlAie3IPZOEXLxyS', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27386f028311a5a746aa46b412f', - 'title': "Think I'm In Love With You (With Dua Lipa) (Live From The 59th ACM Awards)", - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_top_tracks', - 'media_content_type': 'spotify://current_user_top_tracks', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Top Tracks', - }) -# --- -# name: test_browsing[featured_playlists-featured_playlists] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DX4dopZ9vOp1t', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f000000037d14c267b8ee5fea2246a8fe', - 'title': 'Kerst Hits 2023', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DWSBi5svWQ9Nk', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f00000003f7b99051789611a49101c1cf', - 'title': 'Top Hits NL', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/featured_playlists', - 'media_content_type': 'spotify://featured_playlists', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Featured Playlists', - }) -# --- -# name: test_browsing[new_releases-new_releases] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:5SGtrmYbIo0Dsg4kJ4qjM6', - 'media_content_type': 'spotify://album', - 'thumbnail': 'https://i.scdn.co/image/ab67616d00001e0209ba52a5116e0c3e8461f58b', - 'title': 'Moon Music', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:713lZ7AF55fEFSQgcttj9y', - 'media_content_type': 'spotify://album', - 'thumbnail': 'https://i.scdn.co/image/ab67616d00001e02ab9953b1d18f8233f6b26027', - 'title': 'drift', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/new_releases', - 'media_content_type': 'spotify://new_releases', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'New Releases', - }) -# --- -# name: test_browsing[playlist-spotify:playlist:3cEYpjA9oz9GiPac4AsH4n] - dict({ - 'can_expand': True, - 'can_play': True, - 'children': list([ - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:4rzfv0JLZfVhOhbSQ8o5jZ', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273ce6d0eef0c1ce77e5f95bbbc', - 'title': 'Api', - }), - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:5o3jMYOSbaVz3tkgwhELSV', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273aa2ff29970d9a63a49dfaeb2', - 'title': 'Is', - }), - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:4Cy0NHJ8Gh0xMdwyM9RkQm', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273ee0d0dce888c6c8a70db6e8b', - 'title': 'All I Want', - }), - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:6hvFrZNocdt2FcKGCSY5NI', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2738b7447ac3daa1da18811cf7b', - 'title': 'Endpoints', - }), - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:2E2znCPaS8anQe21GLxcvJ', - 'media_content_type': 'spotify://track', - 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71', - 'title': 'You Are So Beautiful', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:3cEYpjA9oz9GiPac4AsH4n', - 'media_content_type': 'spotify://playlist', - 'not_shown': 0, - 'thumbnail': 'https://i.scdn.co/image/ab67706c0000da848d0ce13d55f634e290f744ba', - 'title': 'Spotify Web API Testing playlist', - }) -# --- -# name: test_browsing[show-spotify:show:1Y9ExMgMxoBVrgrfU7u0nD] - dict({ - 'can_expand': True, - 'can_play': True, - 'children': list([ - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:episode:3ssmxnilHYaKhwRWoBGMbU', - 'media_content_type': 'spotify://episode', - 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8af44e9ef63c2d6fb44cb0c9bf', - 'title': 'The Great War - Fallout Lorecast EP 1', - }), - dict({ - 'can_expand': False, - 'can_play': True, - 'children_media_class': None, - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:episode:1bbj9aqeeZ3UMUlcWN0S03', - 'media_content_type': 'spotify://episode', - 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8a655b54a66471089d27dbb03f', - 'title': 'Who Dropped the First Bomb?', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:show:1Y9ExMgMxoBVrgrfU7u0nD', - 'media_content_type': 'spotify://show', - 'not_shown': 0, - 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', - 'title': 'Safety Third', - }) -# --- diff --git a/tests/components/spotify/snapshots/test_media_player.ambr b/tests/components/spotify/snapshots/test_media_player.ambr deleted file mode 100644 index 9692d59cfd1..00000000000 --- a/tests/components/spotify/snapshots/test_media_player.ambr +++ /dev/null @@ -1,137 +0,0 @@ -# serializer version: 1 -# name: test_entities[media_player.spotify_spotify_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'source_list': list([ - 'DESKTOP-BKC5SIK', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'media_player', - 'entity_category': None, - 'entity_id': 'media_player.spotify_spotify_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'spotify', - 'unique_id': '1112264111', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[media_player.spotify_spotify_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': '/api/media_player_proxy/media_player.spotify_spotify_1?token=mock-token&cache=7bb89748322acb6c', - 'friendly_name': 'Spotify spotify_1', - 'media_album_name': 'Permanent Waves', - 'media_artist': 'Rush', - 'media_content_id': 'spotify:track:4e9hUiLsN4mx61ARosFi7p', - 'media_content_type': , - 'media_duration': 296, - 'media_playlist': 'Spotify Web API Testing playlist', - 'media_position': 249, - 'media_position_updated_at': HAFakeDatetime(2023, 10, 21, 0, 0, tzinfo=datetime.timezone.utc), - 'media_title': 'The Spirit Of Radio', - 'media_track': 1, - 'repeat': , - 'shuffle': False, - 'source': 'Master Bathroom Speaker', - 'source_list': list([ - 'DESKTOP-BKC5SIK', - ]), - 'supported_features': , - 'volume_level': 0.25, - }), - 'context': , - 'entity_id': 'media_player.spotify_spotify_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_podcast[media_player.spotify_spotify_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'source_list': list([ - 'DESKTOP-BKC5SIK', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'media_player', - 'entity_category': None, - 'entity_id': 'media_player.spotify_spotify_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'spotify', - 'unique_id': '1112264111', - 'unit_of_measurement': None, - }) -# --- -# name: test_podcast[media_player.spotify_spotify_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': '/api/media_player_proxy/media_player.spotify_spotify_1?token=mock-token&cache=cf1e6e1e830f08d3', - 'friendly_name': 'Spotify spotify_1', - 'media_album_name': 'Safety Third', - 'media_artist': 'Safety Third ', - 'media_content_id': 'spotify:episode:3o0RYoo5iOMKSmEbunsbvW', - 'media_content_type': , - 'media_duration': 3690, - 'media_position': 5, - 'media_position_updated_at': HAFakeDatetime(2023, 10, 21, 0, 0, tzinfo=datetime.timezone.utc), - 'media_title': 'My Squirrel Has Brain Damage - Safety Third 119', - 'repeat': , - 'shuffle': False, - 'source': 'Sonos Roam SL', - 'source_list': list([ - 'DESKTOP-BKC5SIK', - ]), - 'supported_features': , - 'volume_level': 0.46, - }), - 'context': , - 'entity_id': 'media_player.spotify_spotify_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- diff --git a/tests/components/spotify/snapshots/test_sensor.ambr b/tests/components/spotify/snapshots/test_sensor.ambr deleted file mode 100644 index ce77dda479f..00000000000 --- a/tests/components/spotify/snapshots/test_sensor.ambr +++ /dev/null @@ -1,595 +0,0 @@ -# serializer version: 1 -# name: test_entities[sensor.spotify_spotify_1_song_acousticness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_acousticness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song acousticness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'acousticness', - 'unique_id': '1112264111_acousticness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_acousticness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song acousticness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_acousticness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.1', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_danceability-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_danceability', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song danceability', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'danceability', - 'unique_id': '1112264111_danceability', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_danceability-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song danceability', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_danceability', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '69.6', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song energy', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy', - 'unique_id': '1112264111_energy', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song energy', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '90.5', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_instrumentalness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_instrumentalness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song instrumentalness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'instrumentalness', - 'unique_id': '1112264111_instrumentalness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_instrumentalness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song instrumentalness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_instrumentalness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0905', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_key-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'C', - 'C♯/D♭', - 'D', - 'D♯/E♭', - 'E', - 'F', - 'F♯/G♭', - 'G', - 'G♯/A♭', - 'A', - 'A♯/B♭', - 'B', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_key', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Song key', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'key', - 'unique_id': '1112264111_key', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_key-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Spotify spotify_1 Song key', - 'options': list([ - 'C', - 'C♯/D♭', - 'D', - 'D♯/E♭', - 'E', - 'F', - 'F♯/G♭', - 'G', - 'G♯/A♭', - 'A', - 'A♯/B♭', - 'B', - ]), - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_key', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'D♯/E♭', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_liveness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_liveness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song liveness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'liveness', - 'unique_id': '1112264111_liveness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_liveness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song liveness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_liveness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30.2', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'major', - 'minor', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Song mode', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '1112264111_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Spotify spotify_1 Song mode', - 'options': list([ - 'major', - 'minor', - ]), - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'major', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_speechiness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_speechiness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song speechiness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'speechiness', - 'unique_id': '1112264111_speechiness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_speechiness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song speechiness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_speechiness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.3', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_tempo-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_tempo', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song tempo', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'song_tempo', - 'unique_id': '1112264111_bpm', - 'unit_of_measurement': 'bpm', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_tempo-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song tempo', - 'unit_of_measurement': 'bpm', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_tempo', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '114.944', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_time_signature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - '3/4', - '4/4', - '5/4', - '6/4', - '7/4', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_time_signature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Song time signature', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'time_signature', - 'unique_id': '1112264111_time_signature', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_time_signature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Spotify spotify_1 Song time signature', - 'options': list([ - '3/4', - '4/4', - '5/4', - '6/4', - '7/4', - ]), - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_time_signature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4/4', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_valence-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_valence', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song valence', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'valence', - 'unique_id': '1112264111_valence', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_valence-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song valence', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_valence', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '62.5', - }) -# --- diff --git a/tests/components/spotify/test_config_flow.py b/tests/components/spotify/test_config_flow.py index cb942a63568..6040fcd84f2 100644 --- a/tests/components/spotify/test_config_flow.py +++ b/tests/components/spotify/test_config_flow.py @@ -2,17 +2,22 @@ from http import HTTPStatus from ipaddress import ip_address -from unittest.mock import MagicMock, patch +from unittest.mock import patch import pytest -from spotifyaio import SpotifyConnectionError +from spotipy import SpotifyException from homeassistant.components import zeroconf +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) from homeassistant.components.spotify.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, SOURCE_ZEROCONF from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -29,6 +34,19 @@ BLANK_ZEROCONF_INFO = zeroconf.ZeroconfServiceInfo( ) +@pytest.fixture +async def component_setup(hass: HomeAssistant) -> None: + """Fixture for setting up the integration.""" + result = await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + await async_import_client_credential( + hass, DOMAIN, ClientCredential("client", "secret"), "cred" + ) + + assert result + + async def test_abort_if_no_configuration(hass: HomeAssistant) -> None: """Check flow aborts when no configuration is present.""" result = await hass.config_entries.flow.async_init( @@ -59,12 +77,11 @@ async def test_zeroconf_abort_if_existing_entry(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("current_request_with_host") -@pytest.mark.usefixtures("setup_credentials") async def test_full_flow( hass: HomeAssistant, + component_setup, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_spotify: MagicMock, ) -> None: """Check a full flow.""" result = await hass.config_entries.flow.async_init( @@ -82,7 +99,7 @@ async def test_full_flow( assert result["type"] is FlowResultType.EXTERNAL_STEP assert result["url"] == ( "https://accounts.spotify.com/authorize" - "?response_type=code&client_id=CLIENT_ID" + "?response_type=code&client_id=client" "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" "&scope=user-modify-playback-state,user-read-playback-state,user-read-private," @@ -95,7 +112,6 @@ async def test_full_flow( assert resp.status == HTTPStatus.OK assert resp.headers["content-type"] == "text/html; charset=utf-8" - aioclient_mock.clear_requests() aioclient_mock.post( "https://accounts.spotify.com/api/token", json={ @@ -108,31 +124,31 @@ async def test_full_flow( with ( patch("homeassistant.components.spotify.async_setup_entry", return_value=True), + patch("homeassistant.components.spotify.config_flow.Spotify") as spotify_mock, ): + spotify_mock.return_value.current_user.return_value = { + "id": "fake_id", + "display_name": "frenck", + } result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert len(hass.config_entries.async_entries(DOMAIN)) == 1, result - - assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"]["auth_implementation"] == "cred" result["data"]["token"].pop("expires_at") - assert result["data"]["name"] == "Henk" + assert result["data"]["name"] == "frenck" assert result["data"]["token"] == { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", "type": "Bearer", "expires_in": 60, } - assert result["result"].unique_id == "1112264111" @pytest.mark.usefixtures("current_request_with_host") -@pytest.mark.usefixtures("setup_credentials") async def test_abort_if_spotify_error( hass: HomeAssistant, + component_setup, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_spotify: MagicMock, ) -> None: """Check Spotify errors causes flow to abort.""" result = await hass.config_entries.flow.async_init( @@ -159,84 +175,46 @@ async def test_abort_if_spotify_error( }, ) - mock_spotify.return_value.get_current_user.side_effect = SpotifyConnectionError - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) + with patch( + "homeassistant.components.spotify.config_flow.Spotify.current_user", + side_effect=SpotifyException(400, -1, "message"), + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "connection_error" @pytest.mark.usefixtures("current_request_with_host") -@pytest.mark.usefixtures("setup_credentials") async def test_reauthentication( hass: HomeAssistant, + component_setup, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, ) -> None: """Test Spotify reauthentication.""" - mock_config_entry.add_to_hass(hass) - - result = await mock_config_entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, + old_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=123, + version=1, + data={"id": "frenck", "auth_implementation": "cred"}, ) - client = await hass_client_no_auth() - await client.get(f"/auth/external/callback?code=abcd&state={state}") + old_entry.add_to_hass(hass) - aioclient_mock.post( - "https://accounts.spotify.com/api/token", - json={ - "refresh_token": "new-refresh-token", - "access_token": "new-access-token", - "type": "Bearer", - "expires_in": 60, + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, }, + data=old_entry.data, ) - with ( - patch("homeassistant.components.spotify.async_setup_entry", return_value=True), - ): - result = await hass.config_entries.flow.async_configure(result["flow_id"]) + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - mock_config_entry.data["token"].pop("expires_at") - assert mock_config_entry.data["token"] == { - "refresh_token": "new-refresh-token", - "access_token": "new-access-token", - "type": "Bearer", - "expires_in": 60, - } - - -@pytest.mark.usefixtures("current_request_with_host") -@pytest.mark.usefixtures("setup_credentials") -async def test_reauth_account_mismatch( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test Spotify reauthentication with different account.""" - mock_config_entry.add_to_hass(hass) - - result = await mock_config_entry.start_reauth_flow(hass) - - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) state = config_entry_oauth2_flow._encode_jwt( hass, @@ -258,10 +236,85 @@ async def test_reauth_account_mismatch( }, ) - mock_spotify.return_value.get_current_user.return_value.user_id = ( - "different_user_id" + with ( + patch("homeassistant.components.spotify.async_setup_entry", return_value=True), + patch("homeassistant.components.spotify.config_flow.Spotify") as spotify_mock, + ): + spotify_mock.return_value.current_user.return_value = {"id": "frenck"} + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["data"]["auth_implementation"] == "cred" + result["data"]["token"].pop("expires_at") + assert result["data"]["token"] == { + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + } + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth_account_mismatch( + hass: HomeAssistant, + component_setup, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test Spotify reauthentication with different account.""" + old_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=123, + version=1, + data={"id": "frenck", "auth_implementation": "cred"}, ) - result = await hass.config_entries.flow.async_configure(result["flow_id"]) + old_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) + + flows = hass.config_entries.flow.async_progress() + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + client = await hass_client_no_auth() + await client.get(f"/auth/external/callback?code=abcd&state={state}") + + aioclient_mock.post( + "https://accounts.spotify.com/api/token", + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch("homeassistant.components.spotify.config_flow.Spotify") as spotify_mock: + spotify_mock.return_value.current_user.return_value = {"id": "fake_id"} + result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_account_mismatch" + + +async def test_abort_if_no_reauth_entry(hass: HomeAssistant) -> None: + """Check flow aborts when no entry is known when entring reauth confirmation.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reauth_confirm"} + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reauth_account_mismatch" diff --git a/tests/components/spotify/test_diagnostics.py b/tests/components/spotify/test_diagnostics.py deleted file mode 100644 index 6744ca11a00..00000000000 --- a/tests/components/spotify/test_diagnostics.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Tests for the diagnostics data provided by the Spotify integration.""" - -from unittest.mock import AsyncMock - -import pytest -from syrupy import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -@pytest.mark.usefixtures("setup_credentials") -async def test_diagnostics_polling_instance( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_spotify: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test diagnostics.""" - await setup_integration(hass, mock_config_entry) - - assert await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) == snapshot(exclude=props("position_updated_at")) diff --git a/tests/components/spotify/test_init.py b/tests/components/spotify/test_init.py deleted file mode 100644 index 21129d20c07..00000000000 --- a/tests/components/spotify/test_init.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Tests for the Spotify initialization.""" - -from unittest.mock import MagicMock - -import pytest -from spotifyaio import SpotifyConnectionError - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("setup_credentials") -async def test_setup( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify setup.""" - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED - - -@pytest.mark.usefixtures("setup_credentials") -@pytest.mark.parametrize( - "method", - [ - "get_current_user", - "get_devices", - ], -) -async def test_setup_with_required_calls_failing( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - method: str, -) -> None: - """Test the Spotify setup with required calls failing.""" - getattr(mock_spotify.return_value, method).side_effect = SpotifyConnectionError - mock_config_entry.add_to_hass(hass) - - assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/spotify/test_media_browser.py b/tests/components/spotify/test_media_browser.py deleted file mode 100644 index dcacc23bbee..00000000000 --- a/tests/components/spotify/test_media_browser.py +++ /dev/null @@ -1,180 +0,0 @@ -"""Test the media browser interface.""" - -from unittest.mock import MagicMock - -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.media_player import BrowseError -from homeassistant.components.spotify import DOMAIN -from homeassistant.components.spotify.browse_media import async_browse_media -from homeassistant.const import CONF_ID -from homeassistant.core import HomeAssistant - -from . import setup_integration -from .conftest import SCOPES - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("setup_credentials") -async def test_browse_media_root( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - expires_at: int, -) -> None: - """Test browsing the root.""" - await setup_integration(hass, mock_config_entry) - # We add a second config entry to test that lowercase entry_ids also work - config_entry = MockConfigEntry( - domain=DOMAIN, - title="spotify_2", - unique_id="second_fake_id", - data={ - CONF_ID: "second_fake_id", - "name": "spotify_account_2", - "auth_implementation": DOMAIN, - "token": { - "access_token": "mock-access-token", - "refresh_token": "mock-refresh-token", - "expires_at": expires_at, - "scope": SCOPES, - }, - }, - entry_id="32oesphrnacjcf7vw5bf6odx3", - ) - await setup_integration(hass, config_entry) - response = await async_browse_media(hass, None, None) - assert response.as_dict() == snapshot - - -@pytest.mark.usefixtures("setup_credentials") -async def test_browse_media_categories( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test browsing categories.""" - await setup_integration(hass, mock_config_entry) - response = await async_browse_media( - hass, "spotify://library", f"spotify://{mock_config_entry.entry_id}" - ) - assert response.as_dict() == snapshot - - -@pytest.mark.parametrize( - ("config_entry_id"), [("01J5TX5A0FF6G5V0QJX6HBC94T"), ("32oesphrnacjcf7vw5bf6odx3")] -) -@pytest.mark.usefixtures("setup_credentials") -async def test_browse_media_playlists( - hass: HomeAssistant, - config_entry_id: str, - mock_spotify: MagicMock, - snapshot: SnapshotAssertion, - expires_at: int, -) -> None: - """Test browsing playlists for the two config entries.""" - mock_config_entry = MockConfigEntry( - domain=DOMAIN, - title="Spotify", - unique_id="1112264649", - data={ - "auth_implementation": DOMAIN, - "token": { - "access_token": "mock-access-token", - "refresh_token": "mock-refresh-token", - "expires_at": expires_at, - "scope": SCOPES, - }, - }, - entry_id=config_entry_id, - ) - await setup_integration(hass, mock_config_entry) - response = await async_browse_media( - hass, - "spotify://current_user_playlists", - f"spotify://{config_entry_id}/current_user_playlists", - ) - assert response.as_dict() == snapshot - - -@pytest.mark.parametrize( - ("media_content_type", "media_content_id"), - [ - ("current_user_playlists", "current_user_playlists"), - ("current_user_followed_artists", "current_user_followed_artists"), - ("current_user_saved_albums", "current_user_saved_albums"), - ("current_user_saved_tracks", "current_user_saved_tracks"), - ("current_user_saved_shows", "current_user_saved_shows"), - ("current_user_recently_played", "current_user_recently_played"), - ("current_user_top_artists", "current_user_top_artists"), - ("current_user_top_tracks", "current_user_top_tracks"), - ("featured_playlists", "featured_playlists"), - ("categories", "categories"), - ("category_playlists", "dinner"), - ("new_releases", "new_releases"), - ("playlist", "spotify:playlist:3cEYpjA9oz9GiPac4AsH4n"), - ("album", "spotify:album:3IqzqH6ShrRtie9Yd2ODyG"), - ("artist", "spotify:artist:0TnOYISbd1XYRBk9myaseg"), - ("show", "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD"), - ], -) -@pytest.mark.usefixtures("setup_credentials") -async def test_browsing( - hass: HomeAssistant, - mock_spotify: MagicMock, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - media_content_type: str, - media_content_id: str, -) -> None: - """Test browsing playlists for the two config entries.""" - await setup_integration(hass, mock_config_entry) - response = await async_browse_media( - hass, - f"spotify://{media_content_type}", - f"spotify://{mock_config_entry.entry_id}/{media_content_id}", - ) - assert response.as_dict() == snapshot - - -@pytest.mark.parametrize( - ("media_content_id"), - [ - "artist", - None, - ], -) -@pytest.mark.usefixtures("setup_credentials") -async def test_invalid_spotify_url( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - media_content_id: str | None, -) -> None: - """Test browsing with an invalid Spotify URL.""" - await setup_integration(hass, mock_config_entry) - with pytest.raises(BrowseError, match="Invalid Spotify URL specified"): - await async_browse_media( - hass, - "spotify://artist", - media_content_id, - ) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_browsing_not_loaded_entry( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test browsing with an unloaded config entry.""" - with pytest.raises(BrowseError, match="Invalid Spotify account specified"): - await async_browse_media( - hass, - "spotify://artist", - f"spotify://{mock_config_entry.entry_id}/spotify:artist:0TnOYISbd1XYRBk9myaseg", - ) diff --git a/tests/components/spotify/test_media_player.py b/tests/components/spotify/test_media_player.py deleted file mode 100644 index b03424f8459..00000000000 --- a/tests/components/spotify/test_media_player.py +++ /dev/null @@ -1,550 +0,0 @@ -"""Tests for the Spotify media player platform.""" - -from datetime import timedelta -from unittest.mock import MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from spotifyaio import ( - PlaybackState, - ProductType, - RepeatMode as SpotifyRepeatMode, - SpotifyConnectionError, -) -from syrupy import SnapshotAssertion - -from homeassistant.components.media_player import ( - ATTR_INPUT_SOURCE, - ATTR_INPUT_SOURCE_LIST, - ATTR_MEDIA_CONTENT_ID, - ATTR_MEDIA_CONTENT_TYPE, - ATTR_MEDIA_ENQUEUE, - ATTR_MEDIA_REPEAT, - ATTR_MEDIA_SEEK_POSITION, - ATTR_MEDIA_SHUFFLE, - ATTR_MEDIA_VOLUME_LEVEL, - DOMAIN as MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - SERVICE_SELECT_SOURCE, - MediaPlayerEnqueue, - MediaPlayerEntityFeature, - MediaPlayerState, - MediaType, - RepeatMode, -) -from homeassistant.components.spotify import DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_ENTITY_PICTURE, - SERVICE_MEDIA_NEXT_TRACK, - SERVICE_MEDIA_PAUSE, - SERVICE_MEDIA_PLAY, - SERVICE_MEDIA_PREVIOUS_TRACK, - SERVICE_MEDIA_SEEK, - SERVICE_REPEAT_SET, - SERVICE_SHUFFLE_SET, - SERVICE_VOLUME_SET, - STATE_UNAVAILABLE, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_fixture, - snapshot_platform, -) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_entities( - hass: HomeAssistant, - mock_spotify: MagicMock, - freezer: FrozenDateTimeFactory, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify entities.""" - freezer.move_to("2023-10-21") - with ( - patch("secrets.token_hex", return_value="mock-token"), - patch("homeassistant.components.spotify.PLATFORMS", [Platform.MEDIA_PLAYER]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform( - hass, entity_registry, snapshot, mock_config_entry.entry_id - ) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_podcast( - hass: HomeAssistant, - mock_spotify: MagicMock, - freezer: FrozenDateTimeFactory, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify entities while listening a podcast.""" - freezer.move_to("2023-10-21") - mock_spotify.return_value.get_playback.return_value = PlaybackState.from_json( - load_fixture("playback_episode.json", DOMAIN) - ) - with ( - patch("secrets.token_hex", return_value="mock-token"), - patch("homeassistant.components.spotify.PLATFORMS", [Platform.MEDIA_PLAYER]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform( - hass, entity_registry, snapshot, mock_config_entry.entry_id - ) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_free_account( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify entities with a free account.""" - mock_spotify.return_value.get_current_user.return_value.product = ProductType.FREE - await setup_integration(hass, mock_config_entry) - state = hass.states.get("media_player.spotify_spotify_1") - assert state - assert state.attributes["supported_features"] == 0 - - -@pytest.mark.usefixtures("setup_credentials") -async def test_restricted_device( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify entities with a restricted device.""" - mock_spotify.return_value.get_playback.return_value.device.is_restricted = True - await setup_integration(hass, mock_config_entry) - state = hass.states.get("media_player.spotify_spotify_1") - assert state - assert ( - state.attributes["supported_features"] == MediaPlayerEntityFeature.SELECT_SOURCE - ) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_spotify_dj_list( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify entities with a Spotify DJ playlist.""" - mock_spotify.return_value.get_playback.return_value.context.uri = ( - "spotify:playlist:37i9dQZF1EYkqdzj48dyYq" - ) - await setup_integration(hass, mock_config_entry) - state = hass.states.get("media_player.spotify_spotify_1") - assert state - assert state.attributes["media_playlist"] == "DJ" - - -@pytest.mark.usefixtures("setup_credentials") -async def test_fetching_playlist_does_not_fail( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test failing fetching playlist does not fail update.""" - mock_spotify.return_value.get_playlist.side_effect = SpotifyConnectionError - await setup_integration(hass, mock_config_entry) - state = hass.states.get("media_player.spotify_spotify_1") - assert state - assert "media_playlist" not in state.attributes - - -@pytest.mark.usefixtures("setup_credentials") -async def test_idle( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify entities in idle state.""" - mock_spotify.return_value.get_playback.return_value = {} - await setup_integration(hass, mock_config_entry) - state = hass.states.get("media_player.spotify_spotify_1") - assert state - assert state.state == MediaPlayerState.IDLE - assert ( - state.attributes["supported_features"] == MediaPlayerEntityFeature.SELECT_SOURCE - ) - - -@pytest.mark.usefixtures("setup_credentials") -@pytest.mark.parametrize( - ("service", "method"), - [ - (SERVICE_MEDIA_PLAY, "start_playback"), - (SERVICE_MEDIA_PAUSE, "pause_playback"), - (SERVICE_MEDIA_PREVIOUS_TRACK, "previous_track"), - (SERVICE_MEDIA_NEXT_TRACK, "next_track"), - ], -) -async def test_simple_actions( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - service: str, - method: str, -) -> None: - """Test the Spotify media player.""" - await setup_integration(hass, mock_config_entry) - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - service, - {ATTR_ENTITY_ID: "media_player.spotify_spotify_1"}, - blocking=True, - ) - getattr(mock_spotify.return_value, method).assert_called_once_with() - - -@pytest.mark.usefixtures("setup_credentials") -async def test_repeat_mode( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player repeat mode.""" - await setup_integration(hass, mock_config_entry) - for mode, spotify_mode in ( - (RepeatMode.ALL, SpotifyRepeatMode.CONTEXT), - (RepeatMode.ONE, SpotifyRepeatMode.TRACK), - (RepeatMode.OFF, SpotifyRepeatMode.OFF), - ): - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_REPEAT_SET, - {ATTR_ENTITY_ID: "media_player.spotify_spotify_1", ATTR_MEDIA_REPEAT: mode}, - blocking=True, - ) - mock_spotify.return_value.set_repeat.assert_called_once_with(spotify_mode) - mock_spotify.return_value.set_repeat.reset_mock() - - -@pytest.mark.usefixtures("setup_credentials") -async def test_shuffle( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player shuffle.""" - await setup_integration(hass, mock_config_entry) - for shuffle in (True, False): - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_SHUFFLE_SET, - { - ATTR_ENTITY_ID: "media_player.spotify_spotify_1", - ATTR_MEDIA_SHUFFLE: shuffle, - }, - blocking=True, - ) - mock_spotify.return_value.set_shuffle.assert_called_once_with(state=shuffle) - mock_spotify.return_value.set_shuffle.reset_mock() - - -@pytest.mark.usefixtures("setup_credentials") -async def test_volume_level( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player volume level.""" - await setup_integration(hass, mock_config_entry) - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_SET, - { - ATTR_ENTITY_ID: "media_player.spotify_spotify_1", - ATTR_MEDIA_VOLUME_LEVEL: 0.5, - }, - blocking=True, - ) - mock_spotify.return_value.set_volume.assert_called_with(50) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_seek( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player seeking.""" - await setup_integration(hass, mock_config_entry) - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_SEEK, - { - ATTR_ENTITY_ID: "media_player.spotify_spotify_1", - ATTR_MEDIA_SEEK_POSITION: 100, - }, - blocking=True, - ) - mock_spotify.return_value.seek_track.assert_called_with(100000) - - -@pytest.mark.usefixtures("setup_credentials") -@pytest.mark.parametrize( - ("media_type", "media_id"), - [ - ("spotify://track", "spotify:track:3oRoMXsP2NRzm51lldj1RO"), - ("spotify://episode", "spotify:episode:3oRoMXsP2NRzm51lldj1RO"), - (MediaType.MUSIC, "spotify:track:3oRoMXsP2NRzm51lldj1RO"), - ], -) -async def test_play_media_in_queue( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - media_type: str, - media_id: str, -) -> None: - """Test the Spotify media player play media.""" - await setup_integration(hass, mock_config_entry) - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.spotify_spotify_1", - ATTR_MEDIA_CONTENT_TYPE: media_type, - ATTR_MEDIA_CONTENT_ID: media_id, - ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, - }, - blocking=True, - ) - mock_spotify.return_value.add_to_queue.assert_called_with(media_id, None) - - -@pytest.mark.usefixtures("setup_credentials") -@pytest.mark.parametrize( - ("media_type", "media_id", "called_with"), - [ - ( - "spotify://artist", - "spotify:artist:74Yus6IHfa3tWZzXXAYtS2", - {"context_uri": "spotify:artist:74Yus6IHfa3tWZzXXAYtS2"}, - ), - ( - "spotify://playlist", - "spotify:playlist:74Yus6IHfa3tWZzXXAYtS2", - {"context_uri": "spotify:playlist:74Yus6IHfa3tWZzXXAYtS2"}, - ), - ( - "spotify://album", - "spotify:album:74Yus6IHfa3tWZzXXAYtS2", - {"context_uri": "spotify:album:74Yus6IHfa3tWZzXXAYtS2"}, - ), - ( - "spotify://show", - "spotify:show:74Yus6IHfa3tWZzXXAYtS2", - {"context_uri": "spotify:show:74Yus6IHfa3tWZzXXAYtS2"}, - ), - ( - MediaType.MUSIC, - "spotify:track:3oRoMXsP2NRzm51lldj1RO", - {"uris": ["spotify:track:3oRoMXsP2NRzm51lldj1RO"]}, - ), - ( - "spotify://track", - "spotify:track:3oRoMXsP2NRzm51lldj1RO", - {"uris": ["spotify:track:3oRoMXsP2NRzm51lldj1RO"]}, - ), - ( - "spotify://episode", - "spotify:episode:3oRoMXsP2NRzm51lldj1RO", - {"uris": ["spotify:episode:3oRoMXsP2NRzm51lldj1RO"]}, - ), - ], -) -async def test_play_media( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - media_type: str, - media_id: str, - called_with: dict, -) -> None: - """Test the Spotify media player play media.""" - await setup_integration(hass, mock_config_entry) - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.spotify_spotify_1", - ATTR_MEDIA_CONTENT_TYPE: media_type, - ATTR_MEDIA_CONTENT_ID: media_id, - }, - blocking=True, - ) - mock_spotify.return_value.start_playback.assert_called_with(**called_with) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_add_unsupported_media_to_queue( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player add unsupported media to queue.""" - await setup_integration(hass, mock_config_entry) - with pytest.raises( - ValueError, match="Media type playlist is not supported when enqueue is ADD" - ): - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.spotify_spotify_1", - ATTR_MEDIA_CONTENT_TYPE: "spotify://playlist", - ATTR_MEDIA_CONTENT_ID: "spotify:playlist:74Yus6IHfa3tWZzXXAYtS2", - ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, - }, - blocking=True, - ) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_play_unsupported_media( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player play media.""" - await setup_integration(hass, mock_config_entry) - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.spotify_spotify_1", - ATTR_MEDIA_CONTENT_TYPE: MediaType.COMPOSER, - ATTR_MEDIA_CONTENT_ID: "spotify:track:3oRoMXsP2NRzm51lldj1RO", - }, - blocking=True, - ) - assert mock_spotify.return_value.start_playback.call_count == 0 - assert mock_spotify.return_value.add_to_queue.call_count == 0 - - -@pytest.mark.usefixtures("setup_credentials") -async def test_select_source( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player source select.""" - await setup_integration(hass, mock_config_entry) - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_SELECT_SOURCE, - { - ATTR_ENTITY_ID: "media_player.spotify_spotify_1", - ATTR_INPUT_SOURCE: "DESKTOP-BKC5SIK", - }, - blocking=True, - ) - mock_spotify.return_value.transfer_playback.assert_called_with( - "21dac6b0e0a1f181870fdc9749b2656466557666" - ) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_source_devices( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the Spotify media player available source devices.""" - await setup_integration(hass, mock_config_entry) - state = hass.states.get("media_player.spotify_spotify_1") - - assert state.attributes[ATTR_INPUT_SOURCE_LIST] == ["DESKTOP-BKC5SIK"] - - mock_spotify.return_value.get_devices.side_effect = SpotifyConnectionError - freezer.tick(timedelta(minutes=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get("media_player.spotify_spotify_1") - assert state - assert state.state != STATE_UNAVAILABLE - assert state.attributes[ATTR_INPUT_SOURCE_LIST] == ["DESKTOP-BKC5SIK"] - - -@pytest.mark.usefixtures("setup_credentials") -async def test_paused_playback( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player with paused playback.""" - mock_spotify.return_value.get_playback.return_value.is_playing = False - await setup_integration(hass, mock_config_entry) - state = hass.states.get("media_player.spotify_spotify_1") - assert state - assert state.state == MediaPlayerState.PAUSED - - -@pytest.mark.usefixtures("setup_credentials") -async def test_fallback_show_image( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player with a fallback image.""" - playback = PlaybackState.from_json(load_fixture("playback_episode.json", DOMAIN)) - playback.item.images = [] - mock_spotify.return_value.get_playback.return_value = playback - with patch("secrets.token_hex", return_value="mock-token"): - await setup_integration(hass, mock_config_entry) - state = hass.states.get("media_player.spotify_spotify_1") - assert state - assert ( - state.attributes[ATTR_ENTITY_PICTURE] - == "/api/media_player_proxy/media_player.spotify_spotify_1?token=mock-token&cache=16ff384dbae94fea" - ) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_no_episode_images( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player with no episode images.""" - playback = PlaybackState.from_json(load_fixture("playback_episode.json", DOMAIN)) - playback.item.images = [] - playback.item.show.images = [] - mock_spotify.return_value.get_playback.return_value = playback - await setup_integration(hass, mock_config_entry) - state = hass.states.get("media_player.spotify_spotify_1") - assert state - assert ATTR_ENTITY_PICTURE not in state.attributes - - -@pytest.mark.usefixtures("setup_credentials") -async def test_no_album_images( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the Spotify media player with no album images.""" - mock_spotify.return_value.get_playback.return_value.item.album.images = [] - await setup_integration(hass, mock_config_entry) - state = hass.states.get("media_player.spotify_spotify_1") - assert state - assert ATTR_ENTITY_PICTURE not in state.attributes diff --git a/tests/components/spotify/test_sensor.py b/tests/components/spotify/test_sensor.py deleted file mode 100644 index 11ce361034a..00000000000 --- a/tests/components/spotify/test_sensor.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Tests for the Spotify sensor platform.""" - -from unittest.mock import MagicMock, patch - -import pytest -from spotifyaio import PlaybackState -from syrupy import SnapshotAssertion - -from homeassistant.components.spotify import DOMAIN -from homeassistant.const import STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, load_fixture, snapshot_platform - - -@pytest.mark.usefixtures("setup_credentials") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_entities( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify entities.""" - with patch("homeassistant.components.spotify.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_audio_features_unavailable( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify entities.""" - mock_spotify.return_value.get_audio_features.return_value = None - - await setup_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.spotify_spotify_1_song_tempo").state == STATE_UNKNOWN - - -@pytest.mark.usefixtures("setup_credentials") -async def test_audio_features_unknown_during_podcast( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify audio features sensor during a podcast.""" - mock_spotify.return_value.get_playback.return_value = PlaybackState.from_json( - load_fixture("playback_episode.json", DOMAIN) - ) - - await setup_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.spotify_spotify_1_song_tempo").state == STATE_UNKNOWN diff --git a/tests/components/squeezebox/conftest.py b/tests/components/squeezebox/conftest.py deleted file mode 100644 index 2dc0cabeaa6..00000000000 --- a/tests/components/squeezebox/conftest.py +++ /dev/null @@ -1,289 +0,0 @@ -"""Setup the squeezebox tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest - -from homeassistant.components.media_player import MediaType -from homeassistant.components.squeezebox import const -from homeassistant.components.squeezebox.browse_media import ( - MEDIA_TYPE_TO_SQUEEZEBOX, - SQUEEZEBOX_ID_BY_TYPE, -) -from homeassistant.components.squeezebox.const import ( - STATUS_QUERY_LIBRARYNAME, - STATUS_QUERY_MAC, - STATUS_QUERY_UUID, - STATUS_QUERY_VERSION, - STATUS_SENSOR_INFO_TOTAL_ALBUMS, - STATUS_SENSOR_INFO_TOTAL_ARTISTS, - STATUS_SENSOR_INFO_TOTAL_DURATION, - STATUS_SENSOR_INFO_TOTAL_GENRES, - STATUS_SENSOR_INFO_TOTAL_SONGS, - STATUS_SENSOR_LASTSCAN, - STATUS_SENSOR_OTHER_PLAYER_COUNT, - STATUS_SENSOR_PLAYER_COUNT, - STATUS_SENSOR_RESCAN, -) -from homeassistant.const import CONF_HOST, CONF_PORT, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import format_mac - -# from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry - -TEST_HOST = "1.2.3.4" -TEST_PORT = "9000" -TEST_USE_HTTPS = False -SERVER_UUIDS = [ - "12345678-1234-1234-1234-123456789012", - "87654321-4321-4321-4321-210987654321", -] -TEST_MAC = ["aa:bb:cc:dd:ee:ff", "ff:ee:dd:cc:bb:aa"] -TEST_PLAYER_NAME = "Test Player" -TEST_SERVER_NAME = "Test Server" -FAKE_VALID_ITEM_ID = "1234" -FAKE_INVALID_ITEM_ID = "4321" - -FAKE_IP = "42.42.42.42" -FAKE_MAC = "deadbeefdead" -FAKE_UUID = "deadbeefdeadbeefbeefdeafbeef42" -FAKE_PORT = 9000 -FAKE_VERSION = "42.0" - -FAKE_QUERY_RESPONSE = { - STATUS_QUERY_UUID: FAKE_UUID, - STATUS_QUERY_MAC: FAKE_MAC, - STATUS_QUERY_VERSION: FAKE_VERSION, - STATUS_SENSOR_RESCAN: 1, - STATUS_SENSOR_LASTSCAN: 0, - STATUS_QUERY_LIBRARYNAME: "FakeLib", - STATUS_SENSOR_INFO_TOTAL_ALBUMS: 4, - STATUS_SENSOR_INFO_TOTAL_ARTISTS: 2, - STATUS_SENSOR_INFO_TOTAL_DURATION: 500, - STATUS_SENSOR_INFO_TOTAL_GENRES: 1, - STATUS_SENSOR_INFO_TOTAL_SONGS: 42, - STATUS_SENSOR_PLAYER_COUNT: 10, - STATUS_SENSOR_OTHER_PLAYER_COUNT: 0, - "players_loop": [ - { - "isplaying": 0, - "name": "SqueezeLite-HA-Addon", - "seq_no": 0, - "modelname": "SqueezeLite-HA-Addon", - "playerindex": "status", - "model": "squeezelite", - "uuid": FAKE_UUID, - "canpoweroff": 1, - "ip": "192.168.78.86:57700", - "displaytype": "none", - "playerid": "f9:23:cd:37:c5:ff", - "power": 0, - "isplayer": 1, - "connected": 1, - "firmware": "v2.0.0-1488", - } - ], - "count": 1, -} - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.squeezebox.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def config_entry(hass: HomeAssistant) -> MockConfigEntry: - """Add the squeezebox mock config entry to hass.""" - config_entry = MockConfigEntry( - domain=const.DOMAIN, - unique_id=SERVER_UUIDS[0], - data={ - CONF_HOST: TEST_HOST, - CONF_PORT: TEST_PORT, - const.CONF_HTTPS: TEST_USE_HTTPS, - }, - ) - config_entry.add_to_hass(hass) - return config_entry - - -async def mock_async_browse( - media_type: MediaType, limit: int, browse_id: tuple | None = None -) -> dict | None: - """Mock the async_browse method of pysqueezebox.Player.""" - child_types = { - "favorites": "favorites", - "new music": "album", - "albums": "album", - "album": "track", - "genres": "genre", - "genre": "album", - "artists": "artist", - "artist": "album", - "titles": "title", - "title": "title", - "playlists": "playlist", - "playlist": "title", - } - fake_items = [ - { - "title": "Fake Item 1", - "id": FAKE_VALID_ITEM_ID, - "hasitems": False, - "item_type": child_types[media_type], - "artwork_track_id": "b35bb9e9", - "url": "file:///var/lib/squeezeboxserver/music/track_1.mp3", - }, - { - "title": "Fake Item 2", - "id": FAKE_VALID_ITEM_ID + "_2", - "hasitems": media_type == "favorites", - "item_type": child_types[media_type], - "image_url": "http://lms.internal:9000/html/images/favorites.png", - "url": "file:///var/lib/squeezeboxserver/music/track_2.mp3", - }, - { - "title": "Fake Item 3", - "id": FAKE_VALID_ITEM_ID + "_3", - "hasitems": media_type == "favorites", - "album_id": FAKE_VALID_ITEM_ID if media_type == "favorites" else None, - "url": "file:///var/lib/squeezeboxserver/music/track_3.mp3", - }, - ] - - if browse_id: - search_type, search_id = browse_id - if search_id: - if search_type == "playlist_id": - return ( - { - "title": "Fake Item 1", - "items": fake_items, - } - if search_id == FAKE_VALID_ITEM_ID - else None - ) - if search_type in SQUEEZEBOX_ID_BY_TYPE.values(): - for item in fake_items: - if item["id"] == search_id: - return { - "title": item["title"], - "items": [item], - } - return None - if search_type in SQUEEZEBOX_ID_BY_TYPE.values(): - return { - "title": search_type, - "items": fake_items, - } - return None - if media_type in MEDIA_TYPE_TO_SQUEEZEBOX.values(): - return { - "title": media_type, - "items": fake_items, - } - return None - - -@pytest.fixture -def player() -> MagicMock: - """Return a mock player.""" - return mock_pysqueezebox_player() - - -@pytest.fixture -def player_factory() -> MagicMock: - """Return a factory for creating mock players.""" - return mock_pysqueezebox_player - - -def mock_pysqueezebox_player(uuid: str) -> MagicMock: - """Mock a Lyrion Media Server player.""" - with patch( - "homeassistant.components.squeezebox.Player", autospec=True - ) as mock_player: - mock_player.async_browse = AsyncMock(side_effect=mock_async_browse) - mock_player.generate_image_url_from_track_id = MagicMock( - return_value="http://lms.internal:9000/html/images/favorites.png" - ) - mock_player.name = TEST_PLAYER_NAME - mock_player.player_id = uuid - mock_player.mode = "stop" - mock_player.playlist = None - mock_player.album = None - mock_player.artist = None - mock_player.remote_title = None - mock_player.title = None - mock_player.image_url = None - mock_player.model = "SqueezeLite" - - return mock_player - - -@pytest.fixture -def lms_factory(player_factory: MagicMock) -> MagicMock: - """Return a factory for creating mock Lyrion Media Servers with arbitrary number of players.""" - return lambda player_count, uuid: mock_pysqueezebox_server( - player_factory, player_count, uuid - ) - - -@pytest.fixture -def lms(player_factory: MagicMock) -> MagicMock: - """Mock a Lyrion Media Server with one mock player attached.""" - return mock_pysqueezebox_server(player_factory, 1, uuid=TEST_MAC[0]) - - -def mock_pysqueezebox_server( - player_factory: MagicMock, player_count: int, uuid: str -) -> MagicMock: - """Create a mock Lyrion Media Server with the given number of mock players attached.""" - with patch("homeassistant.components.squeezebox.Server", autospec=True) as mock_lms: - players = [player_factory(TEST_MAC[index]) for index in range(player_count)] - mock_lms.async_get_players = AsyncMock(return_value=players) - - mock_lms.uuid = uuid - mock_lms.name = TEST_SERVER_NAME - mock_lms.async_query = AsyncMock(return_value={"uuid": format_mac(uuid)}) - mock_lms.async_status = AsyncMock(return_value={"uuid": format_mac(uuid)}) - return mock_lms - - -async def configure_squeezebox_media_player_platform( - hass: HomeAssistant, - config_entry: MockConfigEntry, - lms: MagicMock, -) -> None: - """Configure a squeezebox config entry with appropriate mocks for media_player.""" - with ( - patch("homeassistant.components.squeezebox.PLATFORMS", [Platform.MEDIA_PLAYER]), - patch("homeassistant.components.squeezebox.Server", return_value=lms), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - - -@pytest.fixture -async def configured_player( - hass: HomeAssistant, config_entry: MockConfigEntry, lms: MagicMock -) -> MagicMock: - """Fixture mocking calls to pysqueezebox Player from a configured squeezebox.""" - await configure_squeezebox_media_player_platform(hass, config_entry, lms) - return (await lms.async_get_players())[0] - - -@pytest.fixture -async def configured_players( - hass: HomeAssistant, config_entry: MockConfigEntry, lms_factory: MagicMock -) -> list[MagicMock]: - """Fixture mocking calls to two pysqueezebox Players from a configured squeezebox.""" - lms = lms_factory(2, uuid=SERVER_UUIDS[0]) - await configure_squeezebox_media_player_platform(hass, config_entry, lms) - return await lms.async_get_players() diff --git a/tests/components/squeezebox/snapshots/test_media_player.ambr b/tests/components/squeezebox/snapshots/test_media_player.ambr deleted file mode 100644 index ddd5b9868a1..00000000000 --- a/tests/components/squeezebox/snapshots/test_media_player.ambr +++ /dev/null @@ -1,99 +0,0 @@ -# serializer version: 1 -# name: test_device_registry - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'squeezebox', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Ralph Irving', - 'model': 'SqueezeLite', - 'model_id': None, - 'name': 'Test Player', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': , - }) -# --- -# name: test_entity_registry[media_player.test_player-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'media_player', - 'entity_category': None, - 'entity_id': 'media_player.test_player', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'squeezebox', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'aa:bb:cc:dd:ee:ff', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_registry[media_player.test_player-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Player', - 'group_members': list([ - ]), - 'is_volume_muted': True, - 'media_album_name': 'None', - 'media_artist': 'None', - 'media_channel': 'None', - 'media_duration': 1, - 'media_position': 1, - 'media_title': 'None', - 'query_result': dict({ - }), - 'repeat': , - 'shuffle': False, - 'supported_features': , - 'volume_level': 0.01, - }), - 'context': , - 'entity_id': 'media_player.test_player', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- diff --git a/tests/components/squeezebox/test_binary_sensor.py b/tests/components/squeezebox/test_binary_sensor.py deleted file mode 100644 index 71cb5ceb105..00000000000 --- a/tests/components/squeezebox/test_binary_sensor.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Test squeezebox binary sensors.""" - -from copy import deepcopy -from unittest.mock import patch - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from .conftest import FAKE_QUERY_RESPONSE - -from tests.common import MockConfigEntry - - -async def test_binary_sensor( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test binary sensor states and attributes.""" - with ( - patch( - "homeassistant.components.squeezebox.PLATFORMS", - [Platform.BINARY_SENSOR], - ), - patch( - "homeassistant.components.squeezebox.Server.async_query", - return_value=deepcopy(FAKE_QUERY_RESPONSE), - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - - state = hass.states.get("binary_sensor.fakelib_needs_restart") - - assert state is not None - assert state.state == "off" diff --git a/tests/components/squeezebox/test_init.py b/tests/components/squeezebox/test_init.py deleted file mode 100644 index 9074f57cdcb..00000000000 --- a/tests/components/squeezebox/test_init.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Test squeezebox initialization.""" - -from unittest.mock import patch - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def test_init_api_fail( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test init fail due to API fail.""" - - # Setup component to fail... - with ( - patch( - "homeassistant.components.squeezebox.Server.async_query", - return_value=False, - ), - ): - assert not await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/squeezebox/test_media_browser.py b/tests/components/squeezebox/test_media_browser.py deleted file mode 100644 index c03c1b6344d..00000000000 --- a/tests/components/squeezebox/test_media_browser.py +++ /dev/null @@ -1,216 +0,0 @@ -"""Test the media browser interface.""" - -from unittest.mock import MagicMock, patch - -import pytest - -from homeassistant.components.media_player import ( - ATTR_MEDIA_CONTENT_ID, - ATTR_MEDIA_CONTENT_TYPE, - DOMAIN as MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - BrowseError, - MediaType, -) -from homeassistant.components.squeezebox.browse_media import ( - LIBRARY, - MEDIA_TYPE_TO_SQUEEZEBOX, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry -from tests.typing import WebSocketGenerator - - -@pytest.fixture(autouse=True) -async def setup_integration( - hass: HomeAssistant, config_entry: MockConfigEntry, lms: MagicMock -) -> None: - """Fixture for setting up the component.""" - with ( - patch("homeassistant.components.squeezebox.Server", return_value=lms), - patch( - "homeassistant.components.squeezebox.PLATFORMS", - [Platform.MEDIA_PLAYER], - ), - patch( - "homeassistant.components.squeezebox.media_player.start_server_discovery" - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - - -async def test_async_browse_media_root( - hass: HomeAssistant, - config_entry: MockConfigEntry, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test the async_browse_media function at the root level.""" - - client = await hass_ws_client() - await client.send_json( - { - "id": 1, - "type": "media_player/browse_media", - "entity_id": "media_player.test_player", - "media_content_id": "", - "media_content_type": "library", - } - ) - response = await client.receive_json() - assert response["success"] - result = response["result"] - for idx, item in enumerate(result["children"]): - assert item["title"] == LIBRARY[idx] - - -async def test_async_browse_media_with_subitems( - hass: HomeAssistant, - config_entry: MockConfigEntry, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test each category with subitems.""" - for category in ( - "Favorites", - "Artists", - "Albums", - "Playlists", - "Genres", - "New Music", - ): - with patch( - "homeassistant.components.squeezebox.browse_media.is_internal_request", - return_value=False, - ): - client = await hass_ws_client() - await client.send_json( - { - "id": 1, - "type": "media_player/browse_media", - "entity_id": "media_player.test_player", - "media_content_id": "", - "media_content_type": category, - } - ) - response = await client.receive_json() - assert response["success"] - category_level = response["result"] - assert category_level["title"] == MEDIA_TYPE_TO_SQUEEZEBOX[category] - assert category_level["children"][0]["title"] == "Fake Item 1" - - # Look up a subitem - search_type = category_level["children"][0]["media_content_type"] - search_id = category_level["children"][0]["media_content_id"] - await client.send_json( - { - "id": 2, - "type": "media_player/browse_media", - "entity_id": "media_player.test_player", - "media_content_id": search_id, - "media_content_type": search_type, - } - ) - response = await client.receive_json() - assert response["success"] - search = response["result"] - assert search["title"] == "Fake Item 1" - - -async def test_async_browse_tracks( - hass: HomeAssistant, - config_entry: MockConfigEntry, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test tracks (no subitems).""" - with patch( - "homeassistant.components.squeezebox.browse_media.is_internal_request", - return_value=True, - ): - client = await hass_ws_client() - await client.send_json( - { - "id": 1, - "type": "media_player/browse_media", - "entity_id": "media_player.test_player", - "media_content_id": "", - "media_content_type": "Tracks", - } - ) - response = await client.receive_json() - assert response["success"] - tracks = response["result"] - assert tracks["title"] == "titles" - assert len(tracks["children"]) == 3 - - -async def test_async_browse_error( - hass: HomeAssistant, - config_entry: MockConfigEntry, - hass_ws_client: WebSocketGenerator, -) -> None: - """Search for a non-existent item and assert error.""" - client = await hass_ws_client() - await client.send_json( - { - "id": 1, - "type": "media_player/browse_media", - "entity_id": "media_player.test_player", - "media_content_id": "0", - "media_content_type": MediaType.ALBUM, - } - ) - response = await client.receive_json() - assert not response["success"] - - -async def test_play_browse_item( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test play browse item.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_CONTENT_ID: "1234", - ATTR_MEDIA_CONTENT_TYPE: "album", - }, - ) - - -async def test_play_browse_item_nonexistent( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> None: - """Test trying to play an item that doesn't exist.""" - with pytest.raises(BrowseError): - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_CONTENT_ID: "0", - ATTR_MEDIA_CONTENT_TYPE: "album", - }, - blocking=True, - ) - - -async def test_play_browse_item_bad_category( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test trying to play an item whose category doesn't exist.""" - with pytest.raises(BrowseError): - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_CONTENT_ID: "1234", - ATTR_MEDIA_CONTENT_TYPE: "bad_category", - }, - blocking=True, - ) diff --git a/tests/components/squeezebox/test_media_player.py b/tests/components/squeezebox/test_media_player.py deleted file mode 100644 index 080a2161b4d..00000000000 --- a/tests/components/squeezebox/test_media_player.py +++ /dev/null @@ -1,816 +0,0 @@ -"""Tests for the squeezebox media player component.""" - -from datetime import timedelta -import json -from unittest.mock import AsyncMock, MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.media_player import ( - ATTR_GROUP_MEMBERS, - ATTR_MEDIA_CONTENT_ID, - ATTR_MEDIA_CONTENT_TYPE, - ATTR_MEDIA_ENQUEUE, - ATTR_MEDIA_POSITION, - ATTR_MEDIA_POSITION_UPDATED_AT, - ATTR_MEDIA_REPEAT, - ATTR_MEDIA_SEEK_POSITION, - ATTR_MEDIA_SHUFFLE, - ATTR_MEDIA_VOLUME_LEVEL, - ATTR_MEDIA_VOLUME_MUTED, - DOMAIN as MEDIA_PLAYER_DOMAIN, - SERVICE_CLEAR_PLAYLIST, - SERVICE_JOIN, - SERVICE_PLAY_MEDIA, - SERVICE_UNJOIN, - MediaPlayerEnqueue, - MediaPlayerState, - MediaType, - RepeatMode, -) -from homeassistant.components.squeezebox.const import ( - DISCOVERY_INTERVAL, - DOMAIN, - PLAYER_UPDATE_INTERVAL, - SENSOR_UPDATE_INTERVAL, -) -from homeassistant.components.squeezebox.media_player import ( - ATTR_PARAMETERS, - SERVICE_CALL_METHOD, - SERVICE_CALL_QUERY, -) -from homeassistant.const import ( - ATTR_COMMAND, - ATTR_ENTITY_ID, - SERVICE_MEDIA_NEXT_TRACK, - SERVICE_MEDIA_PAUSE, - SERVICE_MEDIA_PLAY, - SERVICE_MEDIA_PLAY_PAUSE, - SERVICE_MEDIA_PREVIOUS_TRACK, - SERVICE_MEDIA_SEEK, - SERVICE_MEDIA_STOP, - SERVICE_REPEAT_SET, - SERVICE_SHUFFLE_SET, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - SERVICE_VOLUME_DOWN, - SERVICE_VOLUME_MUTE, - SERVICE_VOLUME_SET, - SERVICE_VOLUME_UP, - STATE_UNAVAILABLE, - STATE_UNKNOWN, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers.device_registry import DeviceRegistry -from homeassistant.helpers.entity_registry import EntityRegistry -from homeassistant.util.dt import utcnow - -from .conftest import FAKE_VALID_ITEM_ID, TEST_MAC - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - - -async def test_device_registry( - hass: HomeAssistant, - device_registry: DeviceRegistry, - configured_player: MagicMock, - snapshot: SnapshotAssertion, -) -> None: - """Test squeezebox device registered in the device registry.""" - reg_device = device_registry.async_get_device(identifiers={(DOMAIN, TEST_MAC[0])}) - assert reg_device is not None - assert reg_device == snapshot - - -async def test_entity_registry( - hass: HomeAssistant, - entity_registry: EntityRegistry, - configured_player: MagicMock, - snapshot: SnapshotAssertion, - config_entry: MockConfigEntry, -) -> None: - """Test squeezebox media_player entity registered in the entity registry.""" - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -async def test_squeezebox_player_rediscovery( - hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory -) -> None: - """Test rediscovery of a squeezebox player.""" - - assert hass.states.get("media_player.test_player").state == MediaPlayerState.IDLE - - # Make the player appear unavailable - configured_player.connected = False - freezer.tick(timedelta(seconds=PLAYER_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert hass.states.get("media_player.test_player").state == STATE_UNAVAILABLE - - # Make the player available again - configured_player.connected = True - freezer.tick(timedelta(seconds=DISCOVERY_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - freezer.tick(timedelta(seconds=PLAYER_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert hass.states.get("media_player.test_player").state == MediaPlayerState.IDLE - - -async def test_squeezebox_turn_on( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test turn on service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_set_power.assert_called_once_with(True) - - -async def test_squeezebox_turn_off( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test turn off service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_set_power.assert_called_once_with(False) - - -async def test_squeezebox_state( - hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory -) -> None: - """Test determining the MediaPlayerState.""" - - configured_player.power = True - configured_player.mode = "stop" - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert hass.states.get("media_player.test_player").state == MediaPlayerState.IDLE - - configured_player.mode = "play" - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert hass.states.get("media_player.test_player").state == MediaPlayerState.PLAYING - - configured_player.mode = "pause" - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert hass.states.get("media_player.test_player").state == MediaPlayerState.PAUSED - - configured_player.power = False - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert hass.states.get("media_player.test_player").state == MediaPlayerState.OFF - - -async def test_squeezebox_volume_up( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test volume up service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_UP, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_set_volume.assert_called_once_with("+5") - - -async def test_squeezebox_volume_down( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test volume down service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_DOWN, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_set_volume.assert_called_once_with("-5") - - -async def test_squeezebox_volume_set( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test volume set service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_SET, - {ATTR_ENTITY_ID: "media_player.test_player", ATTR_MEDIA_VOLUME_LEVEL: 0.5}, - blocking=True, - ) - configured_player.async_set_volume.assert_called_once_with("50") - - -async def test_squeezebox_volume_property( - hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory -) -> None: - """Test volume property.""" - - configured_player.volume = 50 - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_VOLUME_LEVEL] - == 0.5 - ) - - configured_player.volume = None - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - ATTR_MEDIA_VOLUME_LEVEL - not in hass.states.get("media_player.test_player").attributes - ) - - -async def test_squeezebox_mute( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test mute service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_MUTE, - {ATTR_ENTITY_ID: "media_player.test_player", ATTR_MEDIA_VOLUME_MUTED: True}, - blocking=True, - ) - configured_player.async_set_muting.assert_called_once_with(True) - - -async def test_squeezebox_unmute( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test unmute service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_MUTE, - {ATTR_ENTITY_ID: "media_player.test_player", ATTR_MEDIA_VOLUME_MUTED: False}, - blocking=True, - ) - configured_player.async_set_muting.assert_called_once_with(False) - - -async def test_squeezebox_mute_property( - hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory -) -> None: - """Test the mute property.""" - - configured_player.muting = True - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_VOLUME_MUTED] - is True - ) - - configured_player.muting = False - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_VOLUME_MUTED] - is False - ) - - -async def test_squeezebox_repeat_mode( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test set repeat mode service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_REPEAT_SET, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_REPEAT: RepeatMode.ALL, - }, - blocking=True, - ) - configured_player.async_set_repeat.assert_called_once_with("playlist") - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_REPEAT_SET, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_REPEAT: RepeatMode.ONE, - }, - blocking=True, - ) - configured_player.async_set_repeat.assert_called_with("song") - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_REPEAT_SET, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_REPEAT: RepeatMode.OFF, - }, - blocking=True, - ) - configured_player.async_set_repeat.assert_called_with("none") - - -async def test_squeezebox_repeat_mode_property( - hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory -) -> None: - """Test the repeat mode property.""" - configured_player.repeat = "playlist" - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_REPEAT] - == RepeatMode.ALL - ) - - configured_player.repeat = "song" - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_REPEAT] - == RepeatMode.ONE - ) - - configured_player.repeat = "none" - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_REPEAT] - == RepeatMode.OFF - ) - - -async def test_squeezebox_shuffle( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test set shuffle service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_SHUFFLE_SET, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_SHUFFLE: True, - }, - blocking=True, - ) - configured_player.async_set_shuffle.assert_called_once_with("song") - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_SHUFFLE_SET, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_SHUFFLE: False, - }, - blocking=True, - ) - configured_player.async_set_shuffle.assert_called_with("none") - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_SHUFFLE] - is False - ) - - -async def test_squeezebox_shuffle_property( - hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory -) -> None: - """Test the shuffle property.""" - - configured_player.shuffle = "song" - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_SHUFFLE] - is True - ) - - configured_player.shuffle = "none" - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_SHUFFLE] - is False - ) - - -async def test_squeezebox_play( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test play service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_PLAY, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_play.assert_called_once() - - -async def test_squeezebox_play_pause( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test play/pause service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_PLAY_PAUSE, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_toggle_pause.assert_called_once() - - -async def test_squeezebox_pause( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test pause service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_PAUSE, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_pause.assert_called_once() - - -async def test_squeezebox_seek( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test seek service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, - ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, - }, - blocking=True, - ) - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_SEEK, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_SEEK_POSITION: 100, - }, - blocking=True, - ) - configured_player.async_time.assert_called_once_with(100) - - -async def test_squeezebox_stop( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test stop service call.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_STOP, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_stop.assert_called_once() - - -async def test_squeezebox_load_playlist( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test load a playlist.""" - # load a playlist by number - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, - ATTR_MEDIA_CONTENT_TYPE: MediaType.PLAYLIST, - }, - blocking=True, - ) - assert configured_player.async_load_playlist.call_count == 1 - - # load a list of urls - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_CONTENT_ID: json.dumps( - { - "urls": [ - {"url": FAKE_VALID_ITEM_ID}, - {"url": FAKE_VALID_ITEM_ID + "_2"}, - ], - "index": "0", - } - ), - ATTR_MEDIA_CONTENT_TYPE: MediaType.PLAYLIST, - }, - blocking=True, - ) - assert configured_player.async_load_playlist.call_count == 2 - - # clear the playlist - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_CLEAR_PLAYLIST, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_clear_playlist.assert_called_once() - - -async def test_squeezebox_enqueue( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test the various enqueue service calls.""" - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, - ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, - ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, - }, - blocking=True, - ) - configured_player.async_load_url.assert_called_once_with(FAKE_VALID_ITEM_ID, "add") - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, - ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, - ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.NEXT, - }, - blocking=True, - ) - configured_player.async_load_url.assert_called_with(FAKE_VALID_ITEM_ID, "insert") - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, - ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, - ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, - }, - blocking=True, - ) - configured_player.async_load_url.assert_called_with(FAKE_VALID_ITEM_ID, "play_now") - - -async def test_squeezebox_skip_tracks( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test track skipping service calls.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_MEDIA_CONTENT_ID: FAKE_VALID_ITEM_ID, - ATTR_MEDIA_CONTENT_TYPE: MediaType.PLAYLIST, - }, - blocking=True, - ) - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_NEXT_TRACK, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_index.assert_called_once_with("+1") - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_PREVIOUS_TRACK, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_index.assert_called_with("-1") - - -async def test_squeezebox_call_query( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test query service call.""" - await hass.services.async_call( - DOMAIN, - SERVICE_CALL_QUERY, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_COMMAND: "test_command", - ATTR_PARAMETERS: ["param1", "param2"], - }, - blocking=True, - ) - configured_player.async_query.assert_called_once_with( - "test_command", "param1", "param2" - ) - - -async def test_squeezebox_call_method( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test method call service call.""" - await hass.services.async_call( - DOMAIN, - SERVICE_CALL_METHOD, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_COMMAND: "test_command", - ATTR_PARAMETERS: ["param1", "param2"], - }, - blocking=True, - ) - configured_player.async_query.assert_called_once_with( - "test_command", "param1", "param2" - ) - - -async def test_squeezebox_invalid_state( - hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory -) -> None: - """Test handling an unexpected state from pysqueezebox.""" - configured_player.mode = "invalid" - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert hass.states.get("media_player.test_player").state == STATE_UNKNOWN - - -async def test_squeezebox_server_discovery( - hass: HomeAssistant, - lms: MagicMock, - lms_factory: MagicMock, - config_entry: MockConfigEntry, -) -> None: - """Test discovery of a squeezebox server.""" - - async def mock_async_discover(callback): - """Mock the async_discover function of pysqueezebox.""" - return callback(lms_factory(2)) - - with ( - patch( - "homeassistant.components.squeezebox.Server", - return_value=lms, - ), - patch( - "homeassistant.components.squeezebox.media_player.async_discover", - mock_async_discover, - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - # how do we check that a config flow started? - - -async def test_squeezebox_join(hass: HomeAssistant, configured_players: list) -> None: - """Test joining a squeezebox player.""" - - # join a valid player - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_JOIN, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_GROUP_MEMBERS: ["media_player.test_player_2"], - }, - blocking=True, - ) - configured_players[0].async_sync.assert_called_once_with( - configured_players[1].player_id - ) - - # try to join an invalid player - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_JOIN, - { - ATTR_ENTITY_ID: "media_player.test_player", - ATTR_GROUP_MEMBERS: ["media_player.invalid"], - }, - blocking=True, - ) - - -async def test_squeezebox_unjoin( - hass: HomeAssistant, configured_player: MagicMock -) -> None: - """Test unjoining a squeezebox player.""" - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_UNJOIN, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) - configured_player.async_unsync.assert_called_once() - - -async def test_squeezebox_media_content_properties( - hass: HomeAssistant, - configured_player: MagicMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test media_content_id and media_content_type properties.""" - playlist_urls = [ - {"url": "test_title"}, - {"url": "test_title_2"}, - ] - configured_player.current_index = 0 - configured_player.playlist = playlist_urls - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert hass.states.get("media_player.test_player").attributes[ - ATTR_MEDIA_CONTENT_ID - ] == json.dumps({"index": 0, "urls": playlist_urls}) - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_CONTENT_TYPE] - == MediaType.PLAYLIST - ) - - configured_player.url = "test_url" - configured_player.playlist = [{"url": "test_url"}] - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_CONTENT_ID] - == "test_url" - ) - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_CONTENT_TYPE] - == MediaType.MUSIC - ) - - configured_player.playlist = None - configured_player.url = None - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - ATTR_MEDIA_CONTENT_ID - not in hass.states.get("media_player.test_player").attributes - ) - assert ( - ATTR_MEDIA_CONTENT_TYPE - not in hass.states.get("media_player.test_player").attributes - ) - - -async def test_squeezebox_media_position_property( - hass: HomeAssistant, configured_player: MagicMock, freezer: FrozenDateTimeFactory -) -> None: - """Test media_position property.""" - configured_player.time = 100 - configured_player.async_update = AsyncMock( - side_effect=lambda: setattr(configured_player, "time", 105) - ) - last_update = utcnow() - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert ( - hass.states.get("media_player.test_player").attributes[ATTR_MEDIA_POSITION] - == 105 - ) - assert ( - ( - hass.states.get("media_player.test_player").attributes[ - ATTR_MEDIA_POSITION_UPDATED_AT - ] - ) - > last_update - ) diff --git a/tests/components/squeezebox/test_sensor.py b/tests/components/squeezebox/test_sensor.py deleted file mode 100644 index c262c2a0e7c..00000000000 --- a/tests/components/squeezebox/test_sensor.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Test squeezebox sensors.""" - -from copy import deepcopy -from unittest.mock import patch - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from .conftest import FAKE_QUERY_RESPONSE - -from tests.common import MockConfigEntry - - -async def test_sensor(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Test sensor states and attributes.""" - - # Setup component - with ( - patch( - "homeassistant.components.squeezebox.PLATFORMS", - [Platform.SENSOR], - ), - patch( - "homeassistant.components.squeezebox.Server.async_query", - return_value=deepcopy(FAKE_QUERY_RESPONSE), - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - - state = hass.states.get("sensor.fakelib_player_count") - - assert state is not None - assert state.state == "10" diff --git a/tests/components/srp_energy/conftest.py b/tests/components/srp_energy/conftest.py index b612bc9f3f3..45eb726443f 100644 --- a/tests/components/srp_energy/conftest.py +++ b/tests/components/srp_energy/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations -from collections.abc import Generator import datetime as dt from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory import pytest +from typing_extensions import Generator from homeassistant.components.srp_energy.const import DOMAIN, PHOENIX_TIME_ZONE from homeassistant.const import CONF_ID diff --git a/tests/components/srp_energy/test_sensor.py b/tests/components/srp_energy/test_sensor.py index 025d9fe49ca..7369d07f77a 100644 --- a/tests/components/srp_energy/test_sensor.py +++ b/tests/components/srp_energy/test_sensor.py @@ -1,5 +1,6 @@ """Tests for the srp_energy sensor platform.""" +import time from unittest.mock import patch from requests.models import HTTPError @@ -79,7 +80,7 @@ async def test_srp_entity_timeout( ): client = srp_energy_mock.return_value client.validate.return_value = True - client.usage = lambda _, __, ___: None + client.usage = lambda _, __, ___: time.sleep(1) mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/ssdp/conftest.py b/tests/components/ssdp/conftest.py index ac0ac7298a8..8b06163cd95 100644 --- a/tests/components/ssdp/conftest.py +++ b/tests/components/ssdp/conftest.py @@ -1,14 +1,11 @@ """Configuration for SSDP tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch from async_upnp_client.server import UpnpServer from async_upnp_client.ssdp_listener import SsdpListener import pytest -from homeassistant.core import HomeAssistant - @pytest.fixture(autouse=True) async def silent_ssdp_listener(): @@ -35,7 +32,7 @@ async def disabled_upnp_server(): @pytest.fixture -def mock_flow_init(hass: HomeAssistant) -> Generator[AsyncMock]: +def mock_flow_init(hass): """Mock hass.config_entries.flow.async_init.""" with patch.object( hass.config_entries.flow, "async_init", return_value=AsyncMock() diff --git a/tests/components/ssdp/test_init.py b/tests/components/ssdp/test_init.py index 7dc0f0095d4..d10496500d2 100644 --- a/tests/components/ssdp/test_init.py +++ b/tests/components/ssdp/test_init.py @@ -18,16 +18,10 @@ from homeassistant.const import ( MATCH_ALL, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import ( - MockConfigEntry, - MockModule, - async_fire_time_changed, - mock_integration, -) +from tests.common import async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker @@ -71,8 +65,7 @@ async def test_ssdp_flow_dispatched_on_st( assert len(mock_flow_init.mock_calls) == 1 assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" assert mock_flow_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), - "source": config_entries.SOURCE_SSDP, + "source": config_entries.SOURCE_SSDP } mock_call_data: ssdp.SsdpServiceInfo = mock_flow_init.mock_calls[0][2]["data"] assert mock_call_data.ssdp_st == "mock-st" @@ -115,8 +108,7 @@ async def test_ssdp_flow_dispatched_on_manufacturer_url( assert len(mock_flow_init.mock_calls) == 1 assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" assert mock_flow_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), - "source": config_entries.SOURCE_SSDP, + "source": config_entries.SOURCE_SSDP } mock_call_data: ssdp.SsdpServiceInfo = mock_flow_init.mock_calls[0][2]["data"] assert mock_call_data.ssdp_st == "mock-st" @@ -171,8 +163,7 @@ async def test_scan_match_upnp_devicedesc_manufacturer( assert len(mock_flow_init.mock_calls) == 1 assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" assert mock_flow_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), - "source": config_entries.SOURCE_SSDP, + "source": config_entries.SOURCE_SSDP } @@ -217,8 +208,7 @@ async def test_scan_match_upnp_devicedesc_devicetype( assert len(mock_flow_init.mock_calls) == 1 assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" assert mock_flow_init.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), - "source": config_entries.SOURCE_SSDP, + "source": config_entries.SOURCE_SSDP } @@ -349,14 +339,7 @@ async def test_flow_start_only_alive( await hass.async_block_till_done(wait_background_tasks=True) mock_flow_init.assert_awaited_once_with( - "mock-domain", - context={ - "discovery_key": DiscoveryKey( - domain="ssdp", key="uuid:mock-udn", version=1 - ), - "source": config_entries.SOURCE_SSDP, - }, - data=ANY, + "mock-domain", context={"source": config_entries.SOURCE_SSDP}, data=ANY ) # ssdp:alive advertisement should start a flow @@ -373,14 +356,7 @@ async def test_flow_start_only_alive( ssdp_listener._on_alive(mock_ssdp_advertisement) await hass.async_block_till_done() mock_flow_init.assert_awaited_once_with( - "mock-domain", - context={ - "discovery_key": DiscoveryKey( - domain="ssdp", key="uuid:mock-udn", version=1 - ), - "source": config_entries.SOURCE_SSDP, - }, - data=ANY, + "mock-domain", context={"source": config_entries.SOURCE_SSDP}, data=ANY ) # ssdp:byebye advertisement should not start a flow @@ -396,14 +372,7 @@ async def test_flow_start_only_alive( ssdp_listener._on_update(mock_ssdp_advertisement) await hass.async_block_till_done() mock_flow_init.assert_awaited_once_with( - "mock-domain", - context={ - "discovery_key": DiscoveryKey( - domain="ssdp", key="uuid:mock-udn", version=1 - ), - "source": config_entries.SOURCE_SSDP, - }, - data=ANY, + "mock-domain", context={"source": config_entries.SOURCE_SSDP}, data=ANY ) @@ -855,14 +824,7 @@ async def test_flow_dismiss_on_byebye( await hass.async_block_till_done(wait_background_tasks=True) mock_flow_init.assert_awaited_once_with( - "mock-domain", - context={ - "discovery_key": DiscoveryKey( - domain="ssdp", key="uuid:mock-udn", version=1 - ), - "source": config_entries.SOURCE_SSDP, - }, - data=ANY, + "mock-domain", context={"source": config_entries.SOURCE_SSDP}, data=ANY ) # ssdp:alive advertisement should start a flow @@ -879,14 +841,7 @@ async def test_flow_dismiss_on_byebye( ssdp_listener._on_alive(mock_ssdp_advertisement) await hass.async_block_till_done(wait_background_tasks=True) mock_flow_init.assert_awaited_once_with( - "mock-domain", - context={ - "discovery_key": DiscoveryKey( - domain="ssdp", key="uuid:mock-udn", version=1 - ), - "source": config_entries.SOURCE_SSDP, - }, - data=ANY, + "mock-domain", context={"source": config_entries.SOURCE_SSDP}, data=ANY ) mock_ssdp_advertisement["nts"] = "ssdp:byebye" @@ -904,193 +859,3 @@ async def test_flow_dismiss_on_byebye( assert len(mock_async_progress_by_init_data_type.mock_calls) == 1 assert mock_async_abort.mock_calls[0][1][0] == "mock_flow_id" - - -@patch( - "homeassistant.components.ssdp.async_get_ssdp", - return_value={"mock-domain": [{"st": "mock-st"}]}, -) -@pytest.mark.parametrize( - ( - "entry_domain", - "entry_discovery_keys", - ), - [ - # Matching discovery key - ( - "mock-domain", - {"ssdp": (DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1),)}, - ), - # Matching discovery key - ( - "mock-domain", - { - "ssdp": (DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1),), - "other": (DiscoveryKey(domain="other", key="blah", version=1),), - }, - ), - # Matching discovery key, other domain - # Note: Rediscovery is not currently restricted to the domain of the removed - # entry. Such a check can be added if needed. - ( - "comp", - {"ssdp": (DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1),)}, - ), - ], -) -@pytest.mark.parametrize( - "entry_source", - [ - config_entries.SOURCE_IGNORE, - config_entries.SOURCE_SSDP, - config_entries.SOURCE_USER, - ], -) -async def test_ssdp_rediscover( - mock_get_ssdp, - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - mock_flow_init, - entry_domain: str, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], - entry_source: str, -) -> None: - """Test we reinitiate flows when an ignored config entry is removed.""" - entry = MockConfigEntry( - domain=entry_domain, - discovery_keys=entry_discovery_keys, - unique_id="mock-unique-id", - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - mock_ssdp_search_response = _ssdp_headers( - { - "st": "mock-st", - "location": "http://1.1.1.1", - "usn": "uuid:mock-udn::mock-st", - "server": "mock-server", - "ext": "", - "_source": "search", - } - ) - aioclient_mock.get( - "http://1.1.1.1", - text=""" - - - Paulus - Paulus - - - """, - ) - ssdp_listener = await init_ssdp_component(hass) - ssdp_listener._on_search(mock_ssdp_search_response) - await hass.async_block_till_done() - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await hass.async_block_till_done() - - expected_context = { - "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), - "source": config_entries.SOURCE_SSDP, - } - assert len(mock_flow_init.mock_calls) == 1 - assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" - assert mock_flow_init.mock_calls[0][2]["context"] == expected_context - mock_call_data: ssdp.SsdpServiceInfo = mock_flow_init.mock_calls[0][2]["data"] - assert mock_call_data.ssdp_st == "mock-st" - assert mock_call_data.ssdp_location == "http://1.1.1.1" - - await hass.config_entries.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert len(mock_flow_init.mock_calls) == 2 - assert mock_flow_init.mock_calls[1][1][0] == "mock-domain" - assert mock_flow_init.mock_calls[1][2]["context"] == expected_context - assert ( - mock_flow_init.mock_calls[1][2]["data"] - == mock_flow_init.mock_calls[0][2]["data"] - ) - - -@patch( - "homeassistant.components.ssdp.async_get_ssdp", - return_value={"mock-domain": [{"st": "mock-st"}]}, -) -@pytest.mark.parametrize( - ( - "entry_domain", - "entry_discovery_keys", - "entry_source", - "entry_unique_id", - ), - [ - # Discovery key from other domain - ( - "mock-domain", - {"dhcp": (DiscoveryKey(domain="dhcp", key="uuid:mock-udn", version=1),)}, - config_entries.SOURCE_IGNORE, - "mock-unique-id", - ), - # Discovery key from the future - ( - "mock-domain", - {"ssdp": (DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=2),)}, - config_entries.SOURCE_IGNORE, - "mock-unique-id", - ), - ], -) -async def test_ssdp_rediscover_no_match( - mock_get_ssdp, - hass: HomeAssistant, - mock_flow_init, - entry_domain: str, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], - entry_source: str, - entry_unique_id: str, -) -> None: - """Test we don't reinitiate flows when a non matching config entry is removed.""" - mock_integration(hass, MockModule(entry_domain)) - entry = MockConfigEntry( - domain=entry_domain, - discovery_keys=entry_discovery_keys, - unique_id=entry_unique_id, - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - mock_ssdp_search_response = _ssdp_headers( - { - "st": "mock-st", - "location": "http://1.1.1.1", - "usn": "uuid:mock-udn::mock-st", - "server": "mock-server", - "ext": "", - "_source": "search", - } - ) - ssdp_listener = await init_ssdp_component(hass) - ssdp_listener._on_search(mock_ssdp_search_response) - await hass.async_block_till_done() - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await hass.async_block_till_done() - - expected_context = { - "discovery_key": DiscoveryKey(domain="ssdp", key="uuid:mock-udn", version=1), - "source": config_entries.SOURCE_SSDP, - } - assert len(mock_flow_init.mock_calls) == 1 - assert mock_flow_init.mock_calls[0][1][0] == "mock-domain" - assert mock_flow_init.mock_calls[0][2]["context"] == expected_context - mock_call_data: ssdp.SsdpServiceInfo = mock_flow_init.mock_calls[0][2]["data"] - assert mock_call_data.ssdp_st == "mock-st" - assert mock_call_data.ssdp_location == "http://1.1.1.1" - - await hass.config_entries.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert len(mock_flow_init.mock_calls) == 1 diff --git a/tests/components/startca/test_sensor.py b/tests/components/startca/test_sensor.py index be5524eb650..b0d43af1cae 100644 --- a/tests/components/startca/test_sensor.py +++ b/tests/components/startca/test_sensor.py @@ -2,11 +2,11 @@ from http import HTTPStatus +from homeassistant.bootstrap import async_setup_component from homeassistant.components.startca.sensor import StartcaData from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, UnitOfInformation from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.setup import async_setup_component from tests.test_util.aiohttp import AiohttpClientMocker diff --git a/tests/components/statistics/snapshots/test_config_flow.ambr b/tests/components/statistics/snapshots/test_config_flow.ambr deleted file mode 100644 index 5f79c56dec7..00000000000 --- a/tests/components/statistics/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,48 +0,0 @@ -# serializer version: 1 -# name: test_config_flow_preview_success[missing_size_and_age] - dict({ - 'attributes': dict({ - 'friendly_name': 'Statistical characteristic', - 'icon': 'mdi:calculator', - }), - 'state': 'unavailable', - }) -# --- -# name: test_config_flow_preview_success[success] - dict({ - 'attributes': dict({ - 'buffer_usage_ratio': 0.1, - 'friendly_name': 'Statistical characteristic', - 'icon': 'mdi:calculator', - 'source_value_valid': True, - 'state_class': 'measurement', - }), - 'state': '16.0', - }) -# --- -# name: test_options_flow_preview - dict({ - 'attributes': dict({ - 'age_coverage_ratio': 0.0, - 'buffer_usage_ratio': 0.05, - 'friendly_name': 'Statistical characteristic', - 'icon': 'mdi:calculator', - 'source_value_valid': True, - 'state_class': 'measurement', - }), - 'state': '16.0', - }) -# --- -# name: test_options_flow_preview[updated] - dict({ - 'attributes': dict({ - 'age_coverage_ratio': 0.0, - 'buffer_usage_ratio': 0.1, - 'friendly_name': 'Statistical characteristic', - 'icon': 'mdi:calculator', - 'source_value_valid': True, - 'state_class': 'measurement', - }), - 'state': '20.0', - }) -# --- diff --git a/tests/components/statistics/test_config_flow.py b/tests/components/statistics/test_config_flow.py index 77ccba5ba4c..7c9ed5bed47 100644 --- a/tests/components/statistics/test_config_flow.py +++ b/tests/components/statistics/test_config_flow.py @@ -4,11 +4,7 @@ from __future__ import annotations from unittest.mock import AsyncMock -import pytest -from syrupy import SnapshotAssertion - from homeassistant import config_entries -from homeassistant.components.recorder import Recorder from homeassistant.components.statistics import DOMAIN from homeassistant.components.statistics.sensor import ( CONF_KEEP_LAST_SAMPLE, @@ -20,14 +16,12 @@ from homeassistant.components.statistics.sensor import ( DEFAULT_NAME, STAT_AVERAGE_LINEAR, STAT_COUNT, - STAT_VALUE_MAX, ) from homeassistant.const import CONF_ENTITY_ID, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -from tests.typing import WebSocketGenerator async def test_form_sensor(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: @@ -277,204 +271,3 @@ async def test_entry_already_exist( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize( - "user_input", - [ - ( - { - CONF_SAMPLES_MAX_BUFFER_SIZE: 10.0, - CONF_KEEP_LAST_SAMPLE: False, - CONF_PERCENTILE: 50, - CONF_PRECISION: 2, - } - ), - ( - { - CONF_KEEP_LAST_SAMPLE: False, - CONF_PERCENTILE: 50, - CONF_PRECISION: 2, - } - ), - ], - ids=("success", "missing_size_and_age"), -) -async def test_config_flow_preview_success( - recorder_mock: Recorder, - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - user_input: str, - snapshot: SnapshotAssertion, -) -> None: - """Test the config flow preview.""" - client = await hass_ws_client(hass) - - # add state for the tests - hass.states.async_set("sensor.test_monitored", "16") - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] is None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "sensor.test_monitored", - }, - ) - await hass.async_block_till_done() - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_STATE_CHARACTERISTIC: STAT_VALUE_MAX, - }, - ) - await hass.async_block_till_done() - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == "options" - assert result["errors"] is None - assert result["preview"] == "statistics" - - await client.send_json_auto_id( - { - "type": "statistics/start_preview", - "flow_id": result["flow_id"], - "flow_type": "config_flow", - "user_input": user_input, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] is None - - msg = await client.receive_json() - assert msg["event"] == snapshot - assert len(hass.states.async_all()) == 1 - - -async def test_options_flow_preview( - recorder_mock: Recorder, - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the options flow preview.""" - client = await hass_ws_client(hass) - - # add state for the tests - hass.states.async_set("sensor.test_monitored", "16") - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "sensor.test_monitored", - CONF_STATE_CHARACTERISTIC: STAT_VALUE_MAX, - CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, - CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, - CONF_KEEP_LAST_SAMPLE: False, - CONF_PERCENTILE: 50.0, - CONF_PRECISION: 2.0, - }, - title=DEFAULT_NAME, - ) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] == FlowResultType.FORM - assert result["errors"] is None - assert result["preview"] == "statistics" - - await client.send_json_auto_id( - { - "type": "statistics/start_preview", - "flow_id": result["flow_id"], - "flow_type": "options_flow", - "user_input": { - CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, - CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, - CONF_KEEP_LAST_SAMPLE: False, - CONF_PERCENTILE: 50.0, - CONF_PRECISION: 2.0, - }, - } - ) - - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] is None - - msg = await client.receive_json() - assert msg["event"] == snapshot - assert len(hass.states.async_all()) == 2 - - # add state for the tests - hass.states.async_set("sensor.test_monitored", "20") - await hass.async_block_till_done() - - msg = await client.receive_json() - assert msg["event"] == snapshot(name="updated") - - -async def test_options_flow_sensor_preview_config_entry_removed( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test the option flow preview where the config entry is removed.""" - client = await hass_ws_client(hass) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - CONF_NAME: DEFAULT_NAME, - CONF_ENTITY_ID: "sensor.test_monitored", - CONF_STATE_CHARACTERISTIC: STAT_AVERAGE_LINEAR, - CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, - CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, - CONF_KEEP_LAST_SAMPLE: False, - CONF_PERCENTILE: 50.0, - CONF_PRECISION: 2.0, - }, - title=DEFAULT_NAME, - ) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] == FlowResultType.FORM - assert result["errors"] is None - assert result["preview"] == "statistics" - - await hass.config_entries.async_remove(config_entry.entry_id) - - await client.send_json_auto_id( - { - "type": "statistics/start_preview", - "flow_id": result["flow_id"], - "flow_type": "options_flow", - "user_input": { - CONF_SAMPLES_MAX_BUFFER_SIZE: 25.0, - CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, - CONF_KEEP_LAST_SAMPLE: False, - CONF_PERCENTILE: 50.0, - CONF_PRECISION: 2.0, - }, - } - ) - msg = await client.receive_json() - assert not msg["success"] - assert msg["error"] == { - "code": "home_assistant_error", - "message": "Config entry not found", - } diff --git a/tests/components/statistics/test_init.py b/tests/components/statistics/test_init.py index 64829ea7d66..6cb943c0687 100644 --- a/tests/components/statistics/test_init.py +++ b/tests/components/statistics/test_init.py @@ -2,10 +2,8 @@ from __future__ import annotations -from homeassistant.components.statistics import DOMAIN as STATISTICS_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry @@ -17,93 +15,3 @@ async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) assert await hass.config_entries.async_unload(loaded_entry.entry_id) await hass.async_block_till_done() assert loaded_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_device_cleaning( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the cleaning of devices linked to the helper Statistics.""" - - # Source entity device config entry - source_config_entry = MockConfigEntry() - source_config_entry.add_to_hass(hass) - - # Device entry of the source entity - source_device1_entry = device_registry.async_get_or_create( - config_entry_id=source_config_entry.entry_id, - identifiers={("sensor", "identifier_test1")}, - connections={("mac", "30:31:32:33:34:01")}, - ) - - # Source entity registry - source_entity = entity_registry.async_get_or_create( - "sensor", - "test", - "source", - config_entry=source_config_entry, - device_id=source_device1_entry.id, - ) - await hass.async_block_till_done() - assert entity_registry.async_get("sensor.test_source") is not None - - # Configure the configuration entry for Statistics - statistics_config_entry = MockConfigEntry( - data={}, - domain=STATISTICS_DOMAIN, - options={ - "name": "Statistics", - "entity_id": "sensor.test_source", - "state_characteristic": "mean", - "keep_last_sample": False, - "percentile": 50.0, - "precision": 2.0, - "sampling_size": 20.0, - }, - title="Statistics", - ) - statistics_config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(statistics_config_entry.entry_id) - await hass.async_block_till_done() - - # Confirm the link between the source entity device and the statistics sensor - statistics_entity = entity_registry.async_get("sensor.statistics") - assert statistics_entity is not None - assert statistics_entity.device_id == source_entity.device_id - - # Device entry incorrectly linked to Statistics config entry - device_registry.async_get_or_create( - config_entry_id=statistics_config_entry.entry_id, - identifiers={("sensor", "identifier_test2")}, - connections={("mac", "30:31:32:33:34:02")}, - ) - device_registry.async_get_or_create( - config_entry_id=statistics_config_entry.entry_id, - identifiers={("sensor", "identifier_test3")}, - connections={("mac", "30:31:32:33:34:03")}, - ) - await hass.async_block_till_done() - - # Before reloading the config entry, two devices are expected to be linked - devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( - statistics_config_entry.entry_id - ) - assert len(devices_before_reload) == 3 - - # Config entry reload - await hass.config_entries.async_reload(statistics_config_entry.entry_id) - await hass.async_block_till_done() - - # Confirm the link between the source entity device and the statistics sensor - statistics_entity = entity_registry.async_get("sensor.statistics") - assert statistics_entity is not None - assert statistics_entity.device_id == source_entity.device_id - - # After reloading the config entry, only one linked device is expected - devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( - statistics_config_entry.entry_id - ) - assert len(devices_after_reload) == 1 - - assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/statistics/test_sensor.py b/tests/components/statistics/test_sensor.py index 7e2bc1cb16b..269c17e34b9 100644 --- a/tests/components/statistics/test_sensor.py +++ b/tests/components/statistics/test_sensor.py @@ -2,11 +2,9 @@ from __future__ import annotations -from asyncio import Event as AsyncioEvent from collections.abc import Sequence from datetime import datetime, timedelta import statistics -from threading import Event from typing import Any from unittest.mock import patch @@ -14,7 +12,7 @@ from freezegun import freeze_time import pytest from homeassistant import config as hass_config -from homeassistant.components.recorder import Recorder, history +from homeassistant.components.recorder import Recorder from homeassistant.components.sensor import ( ATTR_STATE_CLASS, SensorDeviceClass, @@ -43,7 +41,7 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -52,7 +50,6 @@ from tests.components.recorder.common import async_wait_recording_done VALUES_BINARY = ["on", "off", "on", "off", "on", "off", "on", "off", "on"] VALUES_NUMERIC = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] -VALUES_NUMERIC_LINEAR = [1, 2, 3, 4, 5, 6, 7, 8, 9] async def test_unique_id( @@ -250,15 +247,8 @@ async def test_sensor_defaults_binary(hass: HomeAssistant) -> None: assert "age_coverage_ratio" not in state.attributes -async def test_sensor_state_reported(hass: HomeAssistant) -> None: - """Test the behavior of the sensor with a sequence of identical values. - - Forced updates no longer make a difference, since the statistics are now reacting not - only to state change events but also to state report events (EVENT_STATE_REPORTED). - This means repeating values will be added to the buffer repeatedly in both cases. - This fixes problems with time based averages and some other functions that behave - differently when repeating values are reported. - """ +async def test_sensor_source_with_force_update(hass: HomeAssistant) -> None: + """Test the behavior of the sensor when the source sensor force-updates with same value.""" repeating_values = [18, 0, 0, 0, 0, 0, 0, 0, 9] assert await async_setup_component( hass, @@ -301,9 +291,9 @@ async def test_sensor_state_reported(hass: HomeAssistant) -> None: state_normal = hass.states.get("sensor.test_normal") state_force = hass.states.get("sensor.test_force") assert state_normal and state_force - assert state_normal.state == str(round(sum(repeating_values) / 9, 2)) + assert state_normal.state == str(round(sum(repeating_values) / 3, 2)) assert state_force.state == str(round(sum(repeating_values) / 9, 2)) - assert state_normal.attributes.get("buffer_usage_ratio") == round(9 / 20, 2) + assert state_normal.attributes.get("buffer_usage_ratio") == round(3 / 20, 2) assert state_force.attributes.get("buffer_usage_ratio") == round(9 / 20, 2) @@ -1023,7 +1013,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "average_linear", "value_0": STATE_UNKNOWN, - "value_1": 6.0, + "value_1": STATE_UNKNOWN, "value_9": 10.68, "unit": "°C", }, @@ -1031,7 +1021,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "average_step", "value_0": STATE_UNKNOWN, - "value_1": 6.0, + "value_1": STATE_UNKNOWN, "value_9": 11.36, "unit": "°C", }, @@ -1123,7 +1113,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "distance_95_percent_of_values", "value_0": STATE_UNKNOWN, - "value_1": 0.0, + "value_1": STATE_UNKNOWN, "value_9": float(round(2 * 1.96 * statistics.stdev(VALUES_NUMERIC), 2)), "unit": "°C", }, @@ -1131,7 +1121,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "distance_99_percent_of_values", "value_0": STATE_UNKNOWN, - "value_1": 0.0, + "value_1": STATE_UNKNOWN, "value_9": float(round(2 * 2.58 * statistics.stdev(VALUES_NUMERIC), 2)), "unit": "°C", }, @@ -1171,7 +1161,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "noisiness", "value_0": STATE_UNKNOWN, - "value_1": 0.0, + "value_1": STATE_UNKNOWN, "value_9": float(round(sum([3, 4.8, 10.2, 1.2, 5.4, 2.5, 7.3, 8]) / 8, 2)), "unit": "°C", }, @@ -1179,7 +1169,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "percentile", "value_0": STATE_UNKNOWN, - "value_1": 6.0, + "value_1": STATE_UNKNOWN, "value_9": 9.2, "unit": "°C", }, @@ -1187,7 +1177,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "standard_deviation", "value_0": STATE_UNKNOWN, - "value_1": 0.0, + "value_1": STATE_UNKNOWN, "value_9": float(round(statistics.stdev(VALUES_NUMERIC), 2)), "unit": "°C", }, @@ -1203,7 +1193,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "sum_differences", "value_0": STATE_UNKNOWN, - "value_1": 0.0, + "value_1": STATE_UNKNOWN, "value_9": float( sum( [ @@ -1224,7 +1214,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "sum_differences_nonnegative", "value_0": STATE_UNKNOWN, - "value_1": 0.0, + "value_1": STATE_UNKNOWN, "value_9": float( sum( [ @@ -1269,7 +1259,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "variance", "value_0": STATE_UNKNOWN, - "value_1": 0.0, + "value_1": STATE_UNKNOWN, "value_9": float(round(statistics.variance(VALUES_NUMERIC), 2)), "unit": "°C²", }, @@ -1277,7 +1267,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "binary_sensor", "name": "average_step", "value_0": STATE_UNKNOWN, - "value_1": 100.0, + "value_1": STATE_UNKNOWN, "value_9": 50.0, "unit": "%", }, @@ -1664,371 +1654,3 @@ async def test_reload(recorder_mock: Recorder, hass: HomeAssistant) -> None: assert hass.states.get("sensor.test") is None assert hass.states.get("sensor.cputest") - - -async def test_device_id( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test for source entity device for Statistics.""" - source_config_entry = MockConfigEntry() - source_config_entry.add_to_hass(hass) - source_device_entry = device_registry.async_get_or_create( - config_entry_id=source_config_entry.entry_id, - identifiers={("sensor", "identifier_test")}, - connections={("mac", "30:31:32:33:34:35")}, - ) - source_entity = entity_registry.async_get_or_create( - "sensor", - "test", - "source", - config_entry=source_config_entry, - device_id=source_device_entry.id, - ) - await hass.async_block_till_done() - assert entity_registry.async_get("sensor.test_source") is not None - - statistics_config_entry = MockConfigEntry( - data={}, - domain=STATISTICS_DOMAIN, - options={ - "name": "Statistics", - "entity_id": "sensor.test_source", - "state_characteristic": "mean", - "keep_last_sample": False, - "percentile": 50.0, - "precision": 2.0, - "sampling_size": 20.0, - }, - title="Statistics", - ) - statistics_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(statistics_config_entry.entry_id) - await hass.async_block_till_done() - - statistics_entity = entity_registry.async_get("sensor.statistics") - assert statistics_entity is not None - assert statistics_entity.device_id == source_entity.device_id - - -async def test_update_before_load(recorder_mock: Recorder, hass: HomeAssistant) -> None: - """Verify that updates happening before reloading from the database are handled correctly.""" - - current_time = dt_util.utcnow() - - # enable and pre-fill the recorder - await hass.async_block_till_done() - await async_wait_recording_done(hass) - - with ( - freeze_time(current_time) as freezer, - ): - for value in VALUES_NUMERIC_LINEAR: - hass.states.async_set( - "sensor.test_monitored", - str(value), - {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, - ) - await hass.async_block_till_done() - current_time += timedelta(seconds=1) - freezer.move_to(current_time) - - await async_wait_recording_done(hass) - - # some synchronisation is needed to prevent that loading from the database finishes too soon - # we want this to take long enough to be able to try to add a value BEFORE loading is done - state_changes_during_period_called_evt = AsyncioEvent() - state_changes_during_period_stall_evt = Event() - real_state_changes_during_period = history.state_changes_during_period - - def mock_state_changes_during_period(*args, **kwargs): - states = real_state_changes_during_period(*args, **kwargs) - hass.loop.call_soon_threadsafe(state_changes_during_period_called_evt.set) - state_changes_during_period_stall_evt.wait() - return states - - # create the statistics component, get filled from database - with patch( - "homeassistant.components.statistics.sensor.history.state_changes_during_period", - mock_state_changes_during_period, - ): - assert await async_setup_component( - hass, - "sensor", - { - "sensor": [ - { - "platform": "statistics", - "name": "test", - "entity_id": "sensor.test_monitored", - "state_characteristic": "average_step", - "max_age": {"seconds": 10}, - }, - ] - }, - ) - # adding this value is going to be ignored, since loading from the database hasn't finished yet - # if this value would be added before loading from the database is done - # it would mess up the order of the internal queue which is supposed to be sorted by time - await state_changes_during_period_called_evt.wait() - hass.states.async_set( - "sensor.test_monitored", - "10", - {ATTR_UNIT_OF_MEASUREMENT: DEGREE}, - ) - state_changes_during_period_stall_evt.set() - await hass.async_block_till_done() - - # we will end up with a buffer of [1 .. 9] (10 wasn't added) - # so the computed average_step is 1+2+3+4+5+6+7+8/8 = 4.5 - assert float(hass.states.get("sensor.test").state) == pytest.approx(4.5) - - -async def test_average_linear_unevenly_timed(hass: HomeAssistant) -> None: - """Test the average_linear state characteristic with unevenly distributed values. - - This also implicitly tests the correct timing of repeating values. - """ - values_and_times = [[5.0, 2], [10.0, 1], [10.0, 1], [10.0, 2], [5.0, 1]] - - current_time = dt_util.utcnow() - - with ( - freeze_time(current_time) as freezer, - ): - assert await async_setup_component( - hass, - "sensor", - { - "sensor": [ - { - "platform": "statistics", - "name": "test_sensor_average_linear", - "entity_id": "sensor.test_monitored", - "state_characteristic": "average_linear", - "max_age": {"seconds": 10}, - }, - ] - }, - ) - await hass.async_block_till_done() - - for value_and_time in values_and_times: - hass.states.async_set( - "sensor.test_monitored", - str(value_and_time[0]), - {ATTR_UNIT_OF_MEASUREMENT: DEGREE}, - ) - current_time += timedelta(seconds=value_and_time[1]) - freezer.move_to(current_time) - - await hass.async_block_till_done() - - state = hass.states.get("sensor.test_sensor_average_linear") - assert state is not None - assert state.state == "8.33", ( - "value mismatch for characteristic 'sensor/average_linear' - " - f"assert {state.state} == 8.33" - ) - - -async def test_sensor_unit_gets_removed(hass: HomeAssistant) -> None: - """Test when input lose its unit of measurement.""" - assert await async_setup_component( - hass, - "sensor", - { - "sensor": [ - { - "platform": "statistics", - "name": "test", - "entity_id": "sensor.test_monitored", - "state_characteristic": "mean", - "sampling_size": 10, - }, - ] - }, - ) - await hass.async_block_till_done() - - input_attributes = { - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, - ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, - } - - for value in VALUES_NUMERIC: - hass.states.async_set( - "sensor.test_monitored", - str(value), - input_attributes, - ) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test") - assert state is not None - assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - hass.states.async_set( - "sensor.test_monitored", - str(VALUES_NUMERIC[0]), - { - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, - }, - ) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test") - assert state is not None - assert state.state == "11.39" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - # Temperature device class is not valid with no unit of measurement - assert state.attributes.get(ATTR_DEVICE_CLASS) is None - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - for value in VALUES_NUMERIC: - hass.states.async_set( - "sensor.test_monitored", - str(value), - input_attributes, - ) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test") - assert state is not None - assert state.state == "11.39" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - -async def test_sensor_device_class_gets_removed(hass: HomeAssistant) -> None: - """Test when device class gets removed.""" - assert await async_setup_component( - hass, - "sensor", - { - "sensor": [ - { - "platform": "statistics", - "name": "test", - "entity_id": "sensor.test_monitored", - "state_characteristic": "mean", - "sampling_size": 10, - }, - ] - }, - ) - await hass.async_block_till_done() - - input_attributes = { - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, - ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, - } - - for value in VALUES_NUMERIC: - hass.states.async_set( - "sensor.test_monitored", - str(value), - input_attributes, - ) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test") - assert state is not None - assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - hass.states.async_set( - "sensor.test_monitored", - str(VALUES_NUMERIC[0]), - { - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, - }, - ) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test") - assert state is not None - assert state.state == "11.39" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - assert state.attributes.get(ATTR_DEVICE_CLASS) is None - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - for value in VALUES_NUMERIC: - hass.states.async_set( - "sensor.test_monitored", - str(value), - input_attributes, - ) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test") - assert state is not None - assert state.state == "11.39" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - -async def test_not_valid_device_class(hass: HomeAssistant) -> None: - """Test when not valid device class.""" - assert await async_setup_component( - hass, - "sensor", - { - "sensor": [ - { - "platform": "statistics", - "name": "test", - "entity_id": "sensor.test_monitored", - "state_characteristic": "mean", - "sampling_size": 10, - }, - ] - }, - ) - await hass.async_block_till_done() - - for value in VALUES_NUMERIC: - hass.states.async_set( - "sensor.test_monitored", - str(value), - { - ATTR_DEVICE_CLASS: SensorDeviceClass.DATE, - }, - ) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test") - assert state is not None - assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_DEVICE_CLASS) is None - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - hass.states.async_set( - "sensor.test_monitored", - str(10), - { - ATTR_DEVICE_CLASS: "not_exist", - }, - ) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test") - assert state is not None - assert state.state == "10.69" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_DEVICE_CLASS) is None - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT diff --git a/tests/components/steam_online/test_config_flow.py b/tests/components/steam_online/test_config_flow.py index 140a8309ff9..a5bce80d890 100644 --- a/tests/components/steam_online/test_config_flow.py +++ b/tests/components/steam_online/test_config_flow.py @@ -5,8 +5,8 @@ from unittest.mock import patch import steam from homeassistant.components.steam_online.const import CONF_ACCOUNTS, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_API_KEY +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.const import CONF_API_KEY, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import entity_registry as er @@ -111,10 +111,18 @@ async def test_flow_user_already_configured(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test reauth step.""" entry = create_entry(hass) - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with patch_interface(): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=CONF_DATA, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={}, diff --git a/tests/components/stream/conftest.py b/tests/components/stream/conftest.py index 39e4de13fed..3cf3de54940 100644 --- a/tests/components/stream/conftest.py +++ b/tests/components/stream/conftest.py @@ -13,14 +13,13 @@ so that it can inspect the output. from __future__ import annotations import asyncio -from collections.abc import Generator import logging import threading -from typing import Any from unittest.mock import Mock, patch from aiohttp import web import pytest +from typing_extensions import Generator from homeassistant.components.stream.core import StreamOutput from homeassistant.components.stream.worker import StreamState @@ -33,7 +32,7 @@ TEST_TIMEOUT = 7.0 # Lower than 9s home assistant timeout class WorkerSync: """Test fixture that intercepts stream worker calls to StreamOutput.""" - def __init__(self) -> None: + def __init__(self): """Initialize WorkerSync.""" self._event = None self._original = StreamState.discontinuity @@ -61,7 +60,7 @@ class WorkerSync: @pytest.fixture -def stream_worker_sync() -> Generator[WorkerSync]: +def stream_worker_sync(hass): """Patch StreamOutput to allow test to synchronize worker stream end.""" sync = WorkerSync() with patch( @@ -75,7 +74,7 @@ def stream_worker_sync() -> Generator[WorkerSync]: class HLSSync: """Test fixture that intercepts stream worker calls to StreamOutput.""" - def __init__(self) -> None: + def __init__(self): """Initialize HLSSync.""" self._request_event = asyncio.Event() self._original_recv = StreamOutput.recv @@ -92,7 +91,7 @@ class HLSSync: self.check_requests_ready() class SyncResponse(web.Response): - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) on_resp() diff --git a/tests/components/stream/test_hls.py b/tests/components/stream/test_hls.py index babd7c0b748..ce66848a2b1 100644 --- a/tests/components/stream/test_hls.py +++ b/tests/components/stream/test_hls.py @@ -54,7 +54,7 @@ async def setup_component(hass: HomeAssistant) -> None: class HlsClient: """Test fixture for fetching the hls stream.""" - def __init__(self, http_client, parsed_url) -> None: + def __init__(self, http_client, parsed_url): """Initialize HlsClient.""" self.http_client = http_client self.parsed_url = parsed_url diff --git a/tests/components/stream/test_recorder.py b/tests/components/stream/test_recorder.py index 8e079cded45..515f3fff82d 100644 --- a/tests/components/stream/test_recorder.py +++ b/tests/components/stream/test_recorder.py @@ -35,7 +35,7 @@ from tests.common import async_fire_time_changed @pytest.fixture(autouse=True) -async def stream_component(hass: HomeAssistant) -> None: +async def stream_component(hass): """Set up the component before each test.""" await async_setup_component(hass, "stream", {"stream": {}}) @@ -305,5 +305,7 @@ async def test_record_stream_rotate(hass: HomeAssistant, filename, h264_video) - # Assert assert os.path.exists(filename) - data = await hass.async_add_executor_job(Path(filename).read_bytes) - assert_mp4_has_transform_matrix(data, stream.dynamic_stream_settings.orientation) + with open(filename, "rb") as rotated_mp4: + assert_mp4_has_transform_matrix( + rotated_mp4.read(), stream.dynamic_stream_settings.orientation + ) diff --git a/tests/components/stream/test_worker.py b/tests/components/stream/test_worker.py index 73c51087ef1..2cb90c5ee9a 100644 --- a/tests/components/stream/test_worker.py +++ b/tests/components/stream/test_worker.py @@ -83,7 +83,7 @@ def filename(tmp_path: Path) -> str: @pytest.fixture(autouse=True) -def mock_stream_settings(hass: HomeAssistant) -> None: +def mock_stream_settings(hass): """Set the stream settings data in hass before each test.""" hass.data[DOMAIN] = { ATTR_SETTINGS: StreamSettings( @@ -100,7 +100,7 @@ def mock_stream_settings(hass: HomeAssistant) -> None: class FakeAvInputStream: """A fake pyav Stream.""" - def __init__(self, name, time_base) -> None: + def __init__(self, name, time_base): """Initialize the stream.""" self.name = name self.time_base = time_base @@ -142,7 +142,7 @@ class PacketSequence: exercise corner cases. """ - def __init__(self, num_packets) -> None: + def __init__(self, num_packets): """Initialize the sequence with the number of packets it provides.""" self.packet = 0 self.num_packets = num_packets @@ -160,7 +160,7 @@ class PacketSequence: class FakePacket(bytearray): # Be a bytearray so that memoryview works - def __init__(self) -> None: + def __init__(self): super().__init__(3) time_base = VIDEO_TIME_BASE @@ -181,7 +181,7 @@ class PacketSequence: class FakePyAvContainer: """A fake container returned by mock av.open for a stream.""" - def __init__(self, video_stream, audio_stream) -> None: + def __init__(self, video_stream, audio_stream): """Initialize the fake container.""" # Tests can override this to trigger different worker behavior self.packets = PacketSequence(0) @@ -209,7 +209,7 @@ class FakePyAvContainer: class FakePyAvBuffer: """Holds outputs of the decoded stream for tests to assert on results.""" - def __init__(self) -> None: + def __init__(self): """Initialize the FakePyAvBuffer.""" self.segments = [] self.audio_packets = [] @@ -220,7 +220,7 @@ class FakePyAvBuffer: """Create an output buffer that captures packets for test to examine.""" class FakeAvOutputStream: - def __init__(self, capture_packets) -> None: + def __init__(self, capture_packets): self.capture_packets = capture_packets self.type = "ignored-type" @@ -266,7 +266,7 @@ class FakePyAvBuffer: class MockPyAv: """Mocks out av.open.""" - def __init__(self, video=True, audio=False) -> None: + def __init__(self, video=True, audio=False): """Initialize the MockPyAv.""" video_stream = VIDEO_STREAM if video else None audio_stream = AUDIO_STREAM if audio else None @@ -283,12 +283,7 @@ class MockPyAv: return self.container -def run_worker( - hass: HomeAssistant, - stream: Stream, - stream_source: str, - stream_settings: StreamSettings | None = None, -) -> None: +def run_worker(hass, stream, stream_source, stream_settings=None): """Run the stream worker under test.""" stream_state = StreamState(hass, stream.outputs, stream._diagnostics) stream_worker( @@ -301,12 +296,7 @@ def run_worker( ) -async def async_decode_stream( - hass: HomeAssistant, - packets: PacketSequence, - py_av: MockPyAv | None = None, - stream_settings: StreamSettings | None = None, -) -> FakePyAvBuffer: +async def async_decode_stream(hass, packets, py_av=None, stream_settings=None): """Start a stream worker that decodes incoming stream packets into output segments.""" stream = Stream( hass, @@ -782,15 +772,12 @@ async def test_worker_log( with patch("av.open") as av_open: # pylint: disable-next=c-extension-no-member - av_open.side_effect = av.error.InvalidDataError( - code=-2, message="Invalid data", filename=stream_url - ) + av_open.side_effect = av.error.InvalidDataError(-2, "error") with pytest.raises(StreamWorkerError) as err: run_worker(hass, stream, stream_url) await hass.async_block_till_done() assert ( - str(err.value) - == f"Error opening stream (ERRORTYPE_-2, Invalid data, {redacted_url})" + str(err.value) == f"Error opening stream (ERRORTYPE_-2, error) {redacted_url}" ) assert stream_url not in caplog.text diff --git a/tests/components/streamlabswater/conftest.py b/tests/components/streamlabswater/conftest.py index 1bbdd3e9a08..5a53c7204fa 100644 --- a/tests/components/streamlabswater/conftest.py +++ b/tests/components/streamlabswater/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the StreamLabs tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from streamlabswater.streamlabswater import StreamlabsClient +from typing_extensions import Generator from homeassistant.components.streamlabswater import DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/stt/common.py b/tests/components/stt/common.py index f964fca6b67..e6c36c5b350 100644 --- a/tests/components/stt/common.py +++ b/tests/components/stt/common.py @@ -2,22 +2,11 @@ from __future__ import annotations -from collections.abc import AsyncIterable, Callable, Coroutine +from collections.abc import Callable, Coroutine from pathlib import Path from typing import Any -from homeassistant.components.stt import ( - AudioBitRates, - AudioChannels, - AudioCodecs, - AudioFormats, - AudioSampleRates, - Provider, - SpeechMetadata, - SpeechResult, - SpeechResultState, - SpeechToTextEntity, -) +from homeassistant.components.stt import Provider from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -25,80 +14,6 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from tests.common import MockPlatform, mock_platform -TEST_DOMAIN = "test" - - -class BaseProvider: - """Mock STT provider.""" - - fail_process_audio = False - - def __init__( - self, *, supported_languages: list[str] | None = None, text: str = "test_result" - ) -> None: - """Init test provider.""" - self._supported_languages = supported_languages or ["de", "de-CH", "en"] - self.calls: list[tuple[SpeechMetadata, AsyncIterable[bytes]]] = [] - self.received: list[bytes] = [] - self.text = text - - @property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return self._supported_languages - - @property - def supported_formats(self) -> list[AudioFormats]: - """Return a list of supported formats.""" - return [AudioFormats.WAV, AudioFormats.OGG] - - @property - def supported_codecs(self) -> list[AudioCodecs]: - """Return a list of supported codecs.""" - return [AudioCodecs.PCM, AudioCodecs.OPUS] - - @property - def supported_bit_rates(self) -> list[AudioBitRates]: - """Return a list of supported bitrates.""" - return [AudioBitRates.BITRATE_16] - - @property - def supported_sample_rates(self) -> list[AudioSampleRates]: - """Return a list of supported samplerates.""" - return [AudioSampleRates.SAMPLERATE_16000] - - @property - def supported_channels(self) -> list[AudioChannels]: - """Return a list of supported channels.""" - return [AudioChannels.CHANNEL_MONO] - - async def async_process_audio_stream( - self, metadata: SpeechMetadata, stream: AsyncIterable[bytes] - ) -> SpeechResult: - """Process an audio stream.""" - self.calls.append((metadata, stream)) - async for data in stream: - if not data: - break - self.received.append(data) - if self.fail_process_audio: - return SpeechResult(None, SpeechResultState.ERROR) - - return SpeechResult(self.text, SpeechResultState.SUCCESS) - - -class MockSTTProvider(BaseProvider, Provider): - """Mock provider.""" - - url_path = TEST_DOMAIN - - -class MockSTTProviderEntity(BaseProvider, SpeechToTextEntity): - """Mock provider entity.""" - - url_path = "stt.test" - _attr_name = "test" - class MockSTTPlatform(MockPlatform): """Help to set up test stt service.""" diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index 92225123995..d28d9c308a7 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -1,15 +1,25 @@ """Test STT component setup.""" -from collections.abc import Generator, Iterable -from contextlib import ExitStack +from collections.abc import AsyncIterable from http import HTTPStatus from pathlib import Path from unittest.mock import AsyncMock import pytest +from typing_extensions import Generator from homeassistant.components.stt import ( DOMAIN, + AudioBitRates, + AudioChannels, + AudioCodecs, + AudioFormats, + AudioSampleRates, + Provider, + SpeechMetadata, + SpeechResult, + SpeechResultState, + SpeechToTextEntity, async_default_engine, async_get_provider, async_get_speech_to_text_engine, @@ -19,13 +29,7 @@ from homeassistant.core import HomeAssistant, State from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component -from .common import ( - TEST_DOMAIN, - MockSTTProvider, - MockSTTProviderEntity, - mock_stt_entity_platform, - mock_stt_platform, -) +from .common import mock_stt_entity_platform, mock_stt_platform from tests.common import ( MockConfigEntry, @@ -37,40 +41,102 @@ from tests.common import ( ) from tests.typing import ClientSessionGenerator, WebSocketGenerator +TEST_DOMAIN = "test" + + +class BaseProvider: + """Mock provider.""" + + fail_process_audio = False + + def __init__(self) -> None: + """Init test provider.""" + self.calls: list[tuple[SpeechMetadata, AsyncIterable[bytes]]] = [] + + @property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return ["de", "de-CH", "en"] + + @property + def supported_formats(self) -> list[AudioFormats]: + """Return a list of supported formats.""" + return [AudioFormats.WAV, AudioFormats.OGG] + + @property + def supported_codecs(self) -> list[AudioCodecs]: + """Return a list of supported codecs.""" + return [AudioCodecs.PCM, AudioCodecs.OPUS] + + @property + def supported_bit_rates(self) -> list[AudioBitRates]: + """Return a list of supported bitrates.""" + return [AudioBitRates.BITRATE_16] + + @property + def supported_sample_rates(self) -> list[AudioSampleRates]: + """Return a list of supported samplerates.""" + return [AudioSampleRates.SAMPLERATE_16000] + + @property + def supported_channels(self) -> list[AudioChannels]: + """Return a list of supported channels.""" + return [AudioChannels.CHANNEL_MONO] + + async def async_process_audio_stream( + self, metadata: SpeechMetadata, stream: AsyncIterable[bytes] + ) -> SpeechResult: + """Process an audio stream.""" + self.calls.append((metadata, stream)) + if self.fail_process_audio: + return SpeechResult(None, SpeechResultState.ERROR) + + return SpeechResult("test_result", SpeechResultState.SUCCESS) + + +class MockProvider(BaseProvider, Provider): + """Mock provider.""" + + url_path = TEST_DOMAIN + + +class MockProviderEntity(BaseProvider, SpeechToTextEntity): + """Mock provider entity.""" + + url_path = "stt.test" + _attr_name = "test" + @pytest.fixture -def mock_provider() -> MockSTTProvider: +def mock_provider() -> MockProvider: """Test provider fixture.""" - return MockSTTProvider() + return MockProvider() @pytest.fixture -def mock_provider_entity() -> MockSTTProviderEntity: +def mock_provider_entity() -> MockProviderEntity: """Test provider entity fixture.""" - return MockSTTProviderEntity() + return MockProviderEntity() class STTFlow(ConfigFlow): """Test flow.""" -@pytest.fixture(name="config_flow_test_domains") -def config_flow_test_domain_fixture() -> Iterable[str]: +@pytest.fixture(name="config_flow_test_domain") +def config_flow_test_domain_fixture() -> str: """Test domain fixture.""" - return (TEST_DOMAIN,) + return TEST_DOMAIN @pytest.fixture(autouse=True) def config_flow_fixture( - hass: HomeAssistant, config_flow_test_domains: Iterable[str] + hass: HomeAssistant, config_flow_test_domain: str ) -> Generator[None]: """Mock config flow.""" - for domain in config_flow_test_domains: - mock_platform(hass, f"{domain}.config_flow") + mock_platform(hass, f"{config_flow_test_domain}.config_flow") - with ExitStack() as stack: - for domain in config_flow_test_domains: - stack.enter_context(mock_config_flow(domain, STTFlow)) + with mock_config_flow(config_flow_test_domain, STTFlow): yield @@ -79,14 +145,14 @@ async def setup_fixture( hass: HomeAssistant, tmp_path: Path, request: pytest.FixtureRequest, -) -> MockSTTProvider | MockSTTProviderEntity: +) -> MockProvider | MockProviderEntity: """Set up the test environment.""" - provider: MockSTTProvider | MockSTTProviderEntity + provider: MockProvider | MockProviderEntity if request.param == "mock_setup": - provider = MockSTTProvider() + provider = MockProvider() await mock_setup(hass, tmp_path, provider) elif request.param == "mock_config_entry_setup": - provider = MockSTTProviderEntity() + provider = MockProviderEntity() await mock_config_entry_setup(hass, tmp_path, provider) else: raise RuntimeError("Invalid setup fixture") @@ -97,7 +163,7 @@ async def setup_fixture( async def mock_setup( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockSTTProvider, + mock_provider: MockProvider, ) -> None: """Set up a test provider.""" mock_stt_platform( @@ -113,7 +179,7 @@ async def mock_setup( async def mock_config_entry_setup( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockSTTProviderEntity, + mock_provider_entity: MockProviderEntity, test_domain: str = TEST_DOMAIN, ) -> MockConfigEntry: """Set up a test provider via config entry.""" @@ -165,7 +231,7 @@ async def mock_config_entry_setup( async def test_get_provider_info( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockSTTProvider | MockSTTProviderEntity, + setup: MockProvider | MockProviderEntity, ) -> None: """Test engine that doesn't exist.""" client = await hass_client() @@ -187,7 +253,7 @@ async def test_get_provider_info( async def test_non_existing_provider( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockSTTProvider | MockSTTProviderEntity, + setup: MockProvider | MockProviderEntity, ) -> None: """Test streaming to engine that doesn't exist.""" client = await hass_client() @@ -213,7 +279,7 @@ async def test_non_existing_provider( async def test_stream_audio( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockSTTProvider | MockSTTProviderEntity, + setup: MockProvider | MockProviderEntity, ) -> None: """Test streaming audio and getting response.""" client = await hass_client() @@ -274,7 +340,7 @@ async def test_metadata_errors( header: str | None, status: int, error: str, - setup: MockSTTProvider | MockSTTProviderEntity, + setup: MockProvider | MockProviderEntity, ) -> None: """Test metadata errors.""" client = await hass_client() @@ -290,7 +356,7 @@ async def test_metadata_errors( async def test_get_provider( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockSTTProvider, + mock_provider: MockProvider, ) -> None: """Test we can get STT providers.""" await mock_setup(hass, tmp_path, mock_provider) @@ -301,7 +367,7 @@ async def test_get_provider( async def test_config_entry_unload( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity ) -> None: """Test we can unload config entry.""" config_entry = await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) @@ -313,7 +379,7 @@ async def test_config_entry_unload( async def test_restore_state( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockSTTProviderEntity, + mock_provider_entity: MockProviderEntity, ) -> None: """Test we restore state in the integration.""" entity_id = f"{DOMAIN}.{TEST_DOMAIN}" @@ -330,19 +396,15 @@ async def test_restore_state( @pytest.mark.parametrize( - ("setup", "engine_id", "extra_data"), - [ - ("mock_setup", "test", {"name": "test"}), - ("mock_config_entry_setup", "stt.test", {}), - ], + ("setup", "engine_id"), + [("mock_setup", "test"), ("mock_config_entry_setup", "stt.test")], indirect=["setup"], ) async def test_ws_list_engines( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - setup: MockSTTProvider | MockSTTProviderEntity, + setup: MockProvider | MockProviderEntity, engine_id: str, - extra_data: dict[str, str], ) -> None: """Test listing speech-to-text engines.""" client = await hass_ws_client() @@ -354,7 +416,6 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de", "de-CH", "en"]} - | extra_data ] } @@ -363,7 +424,7 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": []} | extra_data] + "providers": [{"engine_id": engine_id, "supported_languages": []}] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "en"}) @@ -371,9 +432,7 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [ - {"engine_id": engine_id, "supported_languages": ["en"]} | extra_data - ] + "providers": [{"engine_id": engine_id, "supported_languages": ["en"]}] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "en-UK"}) @@ -381,9 +440,7 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [ - {"engine_id": engine_id, "supported_languages": ["en"]} | extra_data - ] + "providers": [{"engine_id": engine_id, "supported_languages": ["en"]}] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "de"}) @@ -391,10 +448,7 @@ async def test_ws_list_engines( assert msg["type"] == "result" assert msg["success"] assert msg["result"] == { - "providers": [ - {"engine_id": engine_id, "supported_languages": ["de", "de-CH"]} - | extra_data - ] + "providers": [{"engine_id": engine_id, "supported_languages": ["de", "de-CH"]}] } await client.send_json_auto_id( @@ -404,10 +458,7 @@ async def test_ws_list_engines( assert msg["type"] == "result" assert msg["success"] assert msg["result"] == { - "providers": [ - {"engine_id": engine_id, "supported_languages": ["de-CH", "de"]} - | extra_data - ] + "providers": [{"engine_id": engine_id, "supported_languages": ["de-CH", "de"]}] } @@ -422,7 +473,7 @@ async def test_default_engine_none(hass: HomeAssistant, tmp_path: Path) -> None: async def test_default_engine( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockSTTProvider, + mock_provider: MockProvider, ) -> None: """Test async_default_engine.""" mock_stt_platform( @@ -438,7 +489,7 @@ async def test_default_engine( async def test_default_engine_entity( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity ) -> None: """Test async_default_engine.""" await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) @@ -446,25 +497,21 @@ async def test_default_engine_entity( assert async_default_engine(hass) == f"{DOMAIN}.{TEST_DOMAIN}" -@pytest.mark.parametrize("config_flow_test_domains", [("new_test",)]) -async def test_default_engine_prefer_entity( +@pytest.mark.parametrize("config_flow_test_domain", ["new_test"]) +async def test_default_engine_prefer_provider( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockSTTProviderEntity, - mock_provider: MockSTTProvider, - config_flow_test_domains: str, + mock_provider_entity: MockProviderEntity, + mock_provider: MockProvider, + config_flow_test_domain: str, ) -> None: - """Test async_default_engine. - - In this tests there's an entity and a legacy provider. - The test asserts async_default_engine returns the entity. - """ + """Test async_default_engine.""" mock_provider_entity.url_path = "stt.new_test" mock_provider_entity._attr_name = "New test" await mock_setup(hass, tmp_path, mock_provider) await mock_config_entry_setup( - hass, tmp_path, mock_provider_entity, test_domain=config_flow_test_domains[0] + hass, tmp_path, mock_provider_entity, test_domain=config_flow_test_domain ) await hass.async_block_till_done() @@ -474,53 +521,11 @@ async def test_default_engine_prefer_entity( provider_engine = async_get_speech_to_text_engine(hass, "test") assert provider_engine is not None assert provider_engine.name == "test" - assert async_default_engine(hass) == "stt.new_test" - - -@pytest.mark.parametrize( - "config_flow_test_domains", - [ - # Test different setup order to ensure the default is not influenced - # by setup order. - ("cloud", "new_test"), - ("new_test", "cloud"), - ], -) -async def test_default_engine_prefer_cloud_entity( - hass: HomeAssistant, - tmp_path: Path, - mock_provider: MockSTTProvider, - config_flow_test_domains: str, -) -> None: - """Test async_default_engine. - - In this tests there's an entity from domain cloud, an entity from domain new_test - and a legacy provider. - The test asserts async_default_engine returns the entity from domain cloud. - """ - await mock_setup(hass, tmp_path, mock_provider) - for domain in config_flow_test_domains: - entity = MockSTTProviderEntity() - entity.url_path = f"stt.{domain}" - entity._attr_name = f"{domain} STT entity" - await mock_config_entry_setup(hass, tmp_path, entity, test_domain=domain) - await hass.async_block_till_done() - - for domain in config_flow_test_domains: - entity_engine = async_get_speech_to_text_engine( - hass, f"stt.{domain}_stt_entity" - ) - assert entity_engine is not None - assert entity_engine.name == f"{domain} STT entity" - - provider_engine = async_get_speech_to_text_engine(hass, "test") - assert provider_engine is not None - assert provider_engine.name == "test" - assert async_default_engine(hass) == "stt.cloud_stt_entity" + assert async_default_engine(hass) == "test" async def test_get_engine_legacy( - hass: HomeAssistant, tmp_path: Path, mock_provider: MockSTTProvider + hass: HomeAssistant, tmp_path: Path, mock_provider: MockProvider ) -> None: """Test async_get_speech_to_text_engine.""" mock_stt_platform( @@ -545,7 +550,7 @@ async def test_get_engine_legacy( async def test_get_engine_entity( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity ) -> None: """Test async_get_speech_to_text_engine.""" await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) diff --git a/tests/components/stt/test_legacy.py b/tests/components/stt/test_legacy.py index 20fa86b4d20..04068b012f1 100644 --- a/tests/components/stt/test_legacy.py +++ b/tests/components/stt/test_legacy.py @@ -41,7 +41,7 @@ async def test_platform_setup_with_error( discovery_info: DiscoveryInfoType | None = None, ) -> Provider: """Raise exception during platform setup.""" - raise Exception("Setup error") # noqa: TRY002 + raise Exception("Setup error") # pylint: disable=broad-exception-raised mock_stt_platform(hass, tmp_path, "bad_stt", async_get_engine=async_get_engine) diff --git a/tests/components/subaru/conftest.py b/tests/components/subaru/conftest.py index e18ea8fd398..f769eba252c 100644 --- a/tests/components/subaru/conftest.py +++ b/tests/components/subaru/conftest.py @@ -100,7 +100,7 @@ TEST_DEVICE_NAME = "test_vehicle_2" TEST_ENTITY_ID = f"sensor.{TEST_DEVICE_NAME}_odometer" -def advance_time_to_next_fetch(hass: HomeAssistant) -> None: +def advance_time_to_next_fetch(hass): """Fast forward time to next fetch.""" future = dt_util.utcnow() + timedelta(seconds=FETCH_INTERVAL + 30) async_fire_time_changed(hass, future) @@ -181,7 +181,7 @@ async def setup_subaru_config_entry( @pytest.fixture -async def subaru_config_entry(hass: HomeAssistant) -> MockConfigEntry: +async def subaru_config_entry(hass): """Create a Subaru config entry prior to setup.""" await async_setup_component(hass, HA_DOMAIN, {}) config_entry = MockConfigEntry(**TEST_CONFIG_ENTRY) @@ -190,9 +190,7 @@ async def subaru_config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture -async def ev_entry( - hass: HomeAssistant, subaru_config_entry: MockConfigEntry -) -> MockConfigEntry: +async def ev_entry(hass, subaru_config_entry): """Create a Subaru entry representing an EV vehicle with full STARLINK subscription.""" await setup_subaru_config_entry(hass, subaru_config_entry) assert DOMAIN in hass.config_entries.async_domains() diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 6abc544c92a..9bddeeee051 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -10,7 +10,6 @@ from subarulink.exceptions import InvalidCredentials, InvalidPIN, SubaruExceptio from homeassistant import config_entries from homeassistant.components.subaru import config_flow from homeassistant.components.subaru.const import CONF_UPDATE_ENABLED, DOMAIN -from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_DEVICE_ID, CONF_PIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -390,7 +389,7 @@ async def test_option_flow(hass: HomeAssistant, options_form) -> None: @pytest.fixture -async def user_form(hass: HomeAssistant) -> ConfigFlowResult: +async def user_form(hass): """Return initial form for Subaru config flow.""" return await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -398,9 +397,7 @@ async def user_form(hass: HomeAssistant) -> ConfigFlowResult: @pytest.fixture -async def two_factor_start_form( - hass: HomeAssistant, user_form: ConfigFlowResult -) -> ConfigFlowResult: +async def two_factor_start_form(hass, user_form): """Return two factor form for Subaru config flow.""" with ( patch(MOCK_API_CONNECT, return_value=True), @@ -413,9 +410,7 @@ async def two_factor_start_form( @pytest.fixture -async def two_factor_verify_form( - hass: HomeAssistant, two_factor_start_form: ConfigFlowResult -) -> ConfigFlowResult: +async def two_factor_verify_form(hass, two_factor_start_form): """Return two factor form for Subaru config flow.""" with ( patch( @@ -432,9 +427,7 @@ async def two_factor_verify_form( @pytest.fixture -async def pin_form( - hass: HomeAssistant, two_factor_verify_form: ConfigFlowResult -) -> ConfigFlowResult: +async def pin_form(hass, two_factor_verify_form): """Return PIN input form for Subaru config flow.""" with ( patch( @@ -450,7 +443,7 @@ async def pin_form( @pytest.fixture -async def options_form(hass: HomeAssistant) -> ConfigFlowResult: +async def options_form(hass): """Return options form for Subaru config flow.""" entry = MockConfigEntry(domain=DOMAIN, data={}, options=None) entry.add_to_hass(hass) diff --git a/tests/components/suez_water/__init__.py b/tests/components/suez_water/__init__.py index a90df738454..4605e06344a 100644 --- a/tests/components/suez_water/__init__.py +++ b/tests/components/suez_water/__init__.py @@ -1,15 +1 @@ """Tests for the Suez Water integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Init suez water integration.""" - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/suez_water/conftest.py b/tests/components/suez_water/conftest.py index f634a053c65..51ade6009dc 100644 --- a/tests/components/suez_water/conftest.py +++ b/tests/components/suez_water/conftest.py @@ -1,32 +1,9 @@ """Common fixtures for the Suez Water tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch -from pysuez import AggregatedData, PriceResult -from pysuez.const import ATTRIBUTION import pytest - -from homeassistant.components.suez_water.const import DOMAIN - -from tests.common import MockConfigEntry - -MOCK_DATA = { - "username": "test-username", - "password": "test-password", - "counter_id": "test-counter", -} - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Create mock config_entry needed by suez_water integration.""" - return MockConfigEntry( - unique_id=MOCK_DATA["username"], - domain=DOMAIN, - title="Suez mock device", - data=MOCK_DATA, - ) +from typing_extensions import Generator @pytest.fixture @@ -36,45 +13,3 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.suez_water.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry - - -@pytest.fixture(name="suez_client") -def mock_suez_client() -> Generator[AsyncMock]: - """Create mock for suez_water external api.""" - with ( - patch( - "homeassistant.components.suez_water.coordinator.SuezClient", autospec=True - ) as mock_client, - patch( - "homeassistant.components.suez_water.config_flow.SuezClient", - new=mock_client, - ), - ): - suez_client = mock_client.return_value - suez_client.check_credentials.return_value = True - - result = AggregatedData( - value=160, - current_month={ - "2024-01-01": 130, - "2024-01-02": 145, - }, - previous_month={ - "2024-12-01": 154, - "2024-12-02": 166, - }, - current_year=1500, - previous_year=1000, - attribution=ATTRIBUTION, - highest_monthly_consumption=2558, - history={ - "2024-01-01": 130, - "2024-01-02": 145, - "2024-12-01": 154, - "2024-12-02": 166, - }, - ) - - suez_client.fetch_aggregated_data.return_value = result - suez_client.get_price.return_value = PriceResult("4.74") - yield suez_client diff --git a/tests/components/suez_water/snapshots/test_sensor.ambr b/tests/components/suez_water/snapshots/test_sensor.ambr deleted file mode 100644 index da0ed3df7dd..00000000000 --- a/tests/components/suez_water/snapshots/test_sensor.ambr +++ /dev/null @@ -1,116 +0,0 @@ -# serializer version: 1 -# name: test_sensors_valid_state[sensor.suez_mock_device_water_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.suez_mock_device_water_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water price', - 'platform': 'suez_water', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_price', - 'unique_id': 'test-counter_water_price', - 'unit_of_measurement': '€', - }) -# --- -# name: test_sensors_valid_state[sensor.suez_mock_device_water_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by toutsurmoneau.fr', - 'device_class': 'monetary', - 'friendly_name': 'Suez mock device Water price', - 'unit_of_measurement': '€', - }), - 'context': , - 'entity_id': 'sensor.suez_mock_device_water_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.74', - }) -# --- -# name: test_sensors_valid_state[sensor.suez_mock_device_water_usage_yesterday-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.suez_mock_device_water_usage_yesterday', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water usage yesterday', - 'platform': 'suez_water', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_usage_yesterday', - 'unique_id': 'test-counter_water_usage_yesterday', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors_valid_state[sensor.suez_mock_device_water_usage_yesterday-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by toutsurmoneau.fr', - 'device_class': 'water', - 'friendly_name': 'Suez mock device Water usage yesterday', - 'highest_monthly_consumption': 2558, - 'history': dict({ - '2024-01-01': 130, - '2024-01-02': 145, - '2024-12-01': 154, - '2024-12-02': 166, - }), - 'last_year_overall': 1000, - 'previous_month_consumption': dict({ - '2024-12-01': 154, - '2024-12-02': 166, - }), - 'this_month_consumption': dict({ - '2024-01-01': 130, - '2024-01-02': 145, - }), - 'this_year_overall': 1500, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.suez_mock_device_water_usage_yesterday', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '160', - }) -# --- diff --git a/tests/components/suez_water/test_config_flow.py b/tests/components/suez_water/test_config_flow.py index 6779b4c7d02..3170a6779f0 100644 --- a/tests/components/suez_water/test_config_flow.py +++ b/tests/components/suez_water/test_config_flow.py @@ -1,23 +1,25 @@ """Test the Suez Water config flow.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch -from pysuez.exception import PySuezError +from pysuez.client import PySuezError import pytest from homeassistant import config_entries -from homeassistant.components.suez_water.const import CONF_COUNTER_ID, DOMAIN +from homeassistant.components.suez_water.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .conftest import MOCK_DATA - from tests.common import MockConfigEntry +MOCK_DATA = { + "username": "test-username", + "password": "test-password", + "counter_id": "test-counter", +} -async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock -) -> None: + +async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -25,11 +27,12 @@ async def test_form( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) - await hass.async_block_till_done() + with patch("homeassistant.components.suez_water.config_flow.SuezClient"): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -39,28 +42,37 @@ async def test_form( async def test_form_invalid_auth( - hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock ) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - suez_client.check_credentials.return_value = False - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + with ( + patch( + "homeassistant.components.suez_water.config_flow.SuezClient.__init__", + return_value=None, + ), + patch( + "homeassistant.components.suez_water.config_flow.SuezClient.check_credentials", + return_value=False, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_auth"} - suez_client.check_credentials.return_value = True - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) - await hass.async_block_till_done() + with patch("homeassistant.components.suez_water.config_flow.SuezClient"): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -96,71 +108,34 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: ("exception", "error"), [(PySuezError, "cannot_connect"), (Exception, "unknown")] ) async def test_form_error( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - exception: Exception, - suez_client: AsyncMock, - error: str, + hass: HomeAssistant, mock_setup_entry: AsyncMock, exception: Exception, error: str ) -> None: """Test we handle errors.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - suez_client.check_credentials.side_effect = exception - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + with patch( + "homeassistant.components.suez_water.config_flow.SuezClient", + side_effect=exception, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": error} - suez_client.check_credentials.return_value = True - suez_client.check_credentials.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + with patch( + "homeassistant.components.suez_water.config_flow.SuezClient", + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" assert result["data"] == MOCK_DATA assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_auto_counter( - hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock -) -> None: - """Test form set counter if not set by user.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - partial_form = {**MOCK_DATA} - partial_form.pop(CONF_COUNTER_ID) - suez_client.find_counter.side_effect = PySuezError("test counter not found") - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - partial_form, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "counter_not_found"} - - suez_client.find_counter.side_effect = None - suez_client.find_counter.return_value = MOCK_DATA[CONF_COUNTER_ID] - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - partial_form, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "test-username" - assert result["result"].unique_id == "test-username" - assert result["data"] == MOCK_DATA - assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/suez_water/test_init.py b/tests/components/suez_water/test_init.py deleted file mode 100644 index 78d086af38f..00000000000 --- a/tests/components/suez_water/test_init.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Test Suez_water integration initialization.""" - -from unittest.mock import AsyncMock - -from homeassistant.components.suez_water.coordinator import PySuezError -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_initialization_invalid_credentials( - hass: HomeAssistant, - suez_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that suez_water can't be loaded with invalid credentials.""" - - suez_client.check_credentials.return_value = False - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR - - -async def test_initialization_setup_api_error( - hass: HomeAssistant, - suez_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that suez_water needs to retry loading if api failed to connect.""" - - suez_client.check_credentials.side_effect = PySuezError("Test failure") - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/suez_water/test_sensor.py b/tests/components/suez_water/test_sensor.py deleted file mode 100644 index cb578432f62..00000000000 --- a/tests/components/suez_water/test_sensor.py +++ /dev/null @@ -1,67 +0,0 @@ -"""Test Suez_water sensor platform.""" - -from unittest.mock import AsyncMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.suez_water.const import DATA_REFRESH_INTERVAL -from homeassistant.components.suez_water.coordinator import PySuezError -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - - -async def test_sensors_valid_state( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - suez_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test that suez_water sensor is loaded and in a valid state.""" - with patch("homeassistant.components.suez_water.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.LOADED - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize("method", [("fetch_aggregated_data"), ("get_price")]) -async def test_sensors_failed_update( - hass: HomeAssistant, - suez_client: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - method: str, -) -> None: - """Test that suez_water sensor reflect failure when api fails.""" - - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.LOADED - - entity_ids = await hass.async_add_executor_job(hass.states.entity_ids) - assert len(entity_ids) == 2 - - for entity in entity_ids: - state = hass.states.get(entity) - assert entity - assert state.state != STATE_UNAVAILABLE - - getattr(suez_client, method).side_effect = PySuezError("Should fail to update") - - freezer.tick(DATA_REFRESH_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(True) - - for entity in entity_ids: - state = hass.states.get(entity) - assert entity - assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/sun/test_trigger.py b/tests/components/sun/test_trigger.py index 303ca3b80cd..fc1af35faea 100644 --- a/tests/components/sun/test_trigger.py +++ b/tests/components/sun/test_trigger.py @@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed, mock_component +from tests.common import async_fire_time_changed, async_mock_service, mock_component @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -26,8 +26,14 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.fixture(autouse=True) -def setup_comp(hass: HomeAssistant) -> None: +def setup_comp(hass): """Initialize components.""" mock_component(hass, "group") hass.loop.run_until_complete( @@ -35,9 +41,7 @@ def setup_comp(hass: HomeAssistant) -> None: ) -async def test_sunset_trigger( - hass: HomeAssistant, service_calls: list[ServiceCall] -) -> None: +async def test_sunset_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test the sunset trigger.""" now = datetime(2015, 9, 15, 23, tzinfo=dt_util.UTC) trigger_time = datetime(2015, 9, 16, 2, tzinfo=dt_util.UTC) @@ -63,11 +67,10 @@ async def test_sunset_trigger( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 1 async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 0 with freeze_time(now): await hass.services.async_call( @@ -76,17 +79,14 @@ async def test_sunset_trigger( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["id"] == 0 -async def test_sunrise_trigger( - hass: HomeAssistant, service_calls: list[ServiceCall] -) -> None: +async def test_sunrise_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test the sunrise trigger.""" now = datetime(2015, 9, 13, 23, tzinfo=dt_util.UTC) trigger_time = datetime(2015, 9, 16, 14, tzinfo=dt_util.UTC) @@ -105,11 +105,11 @@ async def test_sunrise_trigger( async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_sunset_trigger_with_offset( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test the sunset trigger with offset.""" now = datetime(2015, 9, 15, 23, tzinfo=dt_util.UTC) @@ -142,12 +142,12 @@ async def test_sunset_trigger_with_offset( async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "sun - sunset - 0:30:00" + assert len(calls) == 1 + assert calls[0].data["some"] == "sun - sunset - 0:30:00" async def test_sunrise_trigger_with_offset( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test the sunrise trigger with offset.""" now = datetime(2015, 9, 13, 23, tzinfo=dt_util.UTC) @@ -171,4 +171,4 @@ async def test_sunrise_trigger_with_offset( async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 diff --git a/tests/components/sunweg/test_config_flow.py b/tests/components/sunweg/test_config_flow.py index 8103003d7fb..80b6a946749 100644 --- a/tests/components/sunweg/test_config_flow.py +++ b/tests/components/sunweg/test_config_flow.py @@ -69,7 +69,14 @@ async def test_reauth(hass: HomeAssistant, plant_fixture, inverter_fixture) -> N assert entries[0].data[CONF_USERNAME] == SUNWEG_MOCK_ENTRY.data[CONF_USERNAME] assert entries[0].data[CONF_PASSWORD] == SUNWEG_MOCK_ENTRY.data[CONF_PASSWORD] - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/sunweg/test_init.py b/tests/components/sunweg/test_init.py index 6cbe38a128b..41edda38a5a 100644 --- a/tests/components/sunweg/test_init.py +++ b/tests/components/sunweg/test_init.py @@ -7,7 +7,7 @@ from sunweg.api import APIHelper, SunWegApiError from homeassistant.components.sunweg import SunWEGData from homeassistant.components.sunweg.const import DOMAIN, DeviceType -from homeassistant.components.sunweg.sensor.sensor_entity_description import ( +from homeassistant.components.sunweg.sensor_types.sensor_entity_description import ( SunWEGSensorEntityDescription, ) from homeassistant.config_entries import ConfigEntryState diff --git a/tests/components/surepetcare/conftest.py b/tests/components/surepetcare/conftest.py index 5dcc5dfdadc..9ae1bfe310a 100644 --- a/tests/components/surepetcare/conftest.py +++ b/tests/components/surepetcare/conftest.py @@ -17,7 +17,6 @@ from tests.common import MockConfigEntry async def _mock_call(method, resource): if method == "GET" and resource == MESTART_RESOURCE: return {"data": MOCK_API_DATA} - return None @pytest.fixture diff --git a/tests/components/surepetcare/test_config_flow.py b/tests/components/surepetcare/test_config_flow.py index 1140a2c54ef..c3c13195aca 100644 --- a/tests/components/surepetcare/test_config_flow.py +++ b/tests/components/surepetcare/test_config_flow.py @@ -6,7 +6,6 @@ from surepy.exceptions import SurePetcareAuthenticationError, SurePetcareError from homeassistant import config_entries from homeassistant.components.surepetcare.const import DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -25,7 +24,7 @@ async def test_form(hass: HomeAssistant, surepetcare: NonCallableMagicMock) -> N DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert not result["errors"] + assert result["errors"] is None with patch( "homeassistant.components.surepetcare.async_setup_entry", @@ -147,44 +146,42 @@ async def test_flow_entry_already_exists( assert result["reason"] == "already_configured" -async def test_reauthentication( - hass: HomeAssistant, surepetcare: NonCallableMagicMock -) -> None: +async def test_reauthentication(hass: HomeAssistant) -> None: """Test surepetcare reauthentication.""" old_entry = MockConfigEntry( domain="surepetcare", - data={ - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_TOKEN: "token", - }, + data=INPUT_DATA, unique_id="test-username", ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} assert result["step_id"] == "reauth_confirm" - surepetcare.get_token.return_value = "token2" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"password": "test-password2"}, - ) - await hass.async_block_till_done() + with patch( + "homeassistant.components.surepetcare.config_flow.surepy.client.SureAPIClient.get_token", + return_value={"token": "token"}, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"password": "test-password"}, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" - assert old_entry.data == { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password2", - CONF_TOKEN: "token2", - } - async def test_reauthentication_failure(hass: HomeAssistant) -> None: """Test surepetcare reauthentication failure.""" @@ -195,7 +192,15 @@ async def test_reauthentication_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -225,7 +230,15 @@ async def test_reauthentication_cannot_connect(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -255,7 +268,15 @@ async def test_reauthentication_unknown_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/swiss_public_transport/__init__.py b/tests/components/swiss_public_transport/__init__.py index 98262324b11..3859a630c31 100644 --- a/tests/components/swiss_public_transport/__init__.py +++ b/tests/components/swiss_public_transport/__init__.py @@ -1,13 +1 @@ """Tests for the swiss_public_transport integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/swiss_public_transport/conftest.py b/tests/components/swiss_public_transport/conftest.py index 88bd233765b..c139b99e54d 100644 --- a/tests/components/swiss_public_transport/conftest.py +++ b/tests/components/swiss_public_transport/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the swiss_public_transport tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/swiss_public_transport/fixtures/connections.json b/tests/components/swiss_public_transport/fixtures/connections.json deleted file mode 100644 index 7e61206c366..00000000000 --- a/tests/components/swiss_public_transport/fixtures/connections.json +++ /dev/null @@ -1,146 +0,0 @@ -[ - { - "departure": "2024-01-06T18:03:00+0100", - "number": 0, - "platform": 0, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:04:00+0100", - "number": 1, - "platform": 1, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": null - }, - { - "departure": "2024-01-06T18:05:00+0100", - "number": 2, - "platform": 2, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:06:00+0100", - "number": 3, - "platform": 3, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:07:00+0100", - "number": 4, - "platform": 4, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:08:00+0100", - "number": 5, - "platform": 5, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:09:00+0100", - "number": 6, - "platform": 6, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:10:00+0100", - "number": 7, - "platform": 7, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:11:00+0100", - "number": 8, - "platform": 8, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:12:00+0100", - "number": 9, - "platform": 9, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:13:00+0100", - "number": 10, - "platform": 10, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "invalid", - "number": 11, - "platform": 11, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:15:00+0100", - "number": 12, - "platform": 12, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:16:00+0100", - "number": 13, - "platform": 13, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:17:00+0100", - "number": 14, - "platform": 14, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - }, - { - "departure": "2024-01-06T18:18:00+0100", - "number": 15, - "platform": 15, - "transfers": 0, - "duration": "10", - "delay": 0, - "line": "T10" - } -] diff --git a/tests/components/swiss_public_transport/test_init.py b/tests/components/swiss_public_transport/test_init.py index 9ad4a8d50b0..47360f93cf2 100644 --- a/tests/components/swiss_public_transport/test_init.py +++ b/tests/components/swiss_public_transport/test_init.py @@ -1,4 +1,4 @@ -"""Test the swiss_public_transport integration.""" +"""Test the swiss_public_transport config flow.""" from unittest.mock import AsyncMock, patch @@ -36,7 +36,6 @@ CONNECTIONS = [ "transfers": 0, "duration": "10", "delay": 0, - "line": "T10", }, { "departure": "2024-01-06T18:04:00+0100", @@ -45,7 +44,6 @@ CONNECTIONS = [ "transfers": 0, "duration": "10", "delay": 0, - "line": "T10", }, { "departure": "2024-01-06T18:05:00+0100", @@ -54,7 +52,6 @@ CONNECTIONS = [ "transfers": 0, "duration": "10", "delay": 0, - "line": "T10", }, ] diff --git a/tests/components/swiss_public_transport/test_service.py b/tests/components/swiss_public_transport/test_service.py deleted file mode 100644 index 4009327e77d..00000000000 --- a/tests/components/swiss_public_transport/test_service.py +++ /dev/null @@ -1,224 +0,0 @@ -"""Test the swiss_public_transport service.""" - -import json -import logging -from unittest.mock import AsyncMock, patch - -from opendata_transport.exceptions import ( - OpendataTransportConnectionError, - OpendataTransportError, -) -import pytest -from voluptuous import error as vol_er - -from homeassistant.components.swiss_public_transport.const import ( - ATTR_CONFIG_ENTRY_ID, - ATTR_LIMIT, - CONF_DESTINATION, - CONF_START, - CONNECTIONS_COUNT, - CONNECTIONS_MAX, - DOMAIN, - SERVICE_FETCH_CONNECTIONS, -) -from homeassistant.components.swiss_public_transport.helper import unique_id_from_config -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError - -from . import setup_integration - -from tests.common import MockConfigEntry, load_fixture - -_LOGGER = logging.getLogger(__name__) - -MOCK_DATA_STEP_BASE = { - CONF_START: "test_start", - CONF_DESTINATION: "test_destination", -} - - -@pytest.mark.parametrize( - ("data", "config_data"), - [ - ({ATTR_LIMIT: 1}, MOCK_DATA_STEP_BASE), - ({ATTR_LIMIT: 2}, MOCK_DATA_STEP_BASE), - ({ATTR_LIMIT: 3}, MOCK_DATA_STEP_BASE), - ({ATTR_LIMIT: CONNECTIONS_MAX}, MOCK_DATA_STEP_BASE), - ({}, MOCK_DATA_STEP_BASE), - ], -) -async def test_service_call_fetch_connections_success( - hass: HomeAssistant, - data: dict, - config_data, -) -> None: - """Test the fetch_connections service.""" - - unique_id = unique_id_from_config(config_data) - - config_entry = MockConfigEntry( - domain=DOMAIN, - data=config_data, - title=f"Service test call with data={data}", - unique_id=unique_id, - entry_id=f"entry_{unique_id}", - ) - - with patch( - "homeassistant.components.swiss_public_transport.OpendataTransport", - return_value=AsyncMock(), - ) as mock: - mock().connections = json.loads(load_fixture("connections.json", DOMAIN))[ - 0 : data.get(ATTR_LIMIT, CONNECTIONS_COUNT) + 2 - ] - - await setup_integration(hass, config_entry) - - data[ATTR_CONFIG_ENTRY_ID] = config_entry.entry_id - assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) - response = await hass.services.async_call( - domain=DOMAIN, - service=SERVICE_FETCH_CONNECTIONS, - service_data=data, - blocking=True, - return_response=True, - ) - await hass.async_block_till_done() - assert response["connections"] is not None - assert len(response["connections"]) == data.get(ATTR_LIMIT, CONNECTIONS_COUNT) - - -@pytest.mark.parametrize( - ("limit", "config_data", "expected_result", "raise_error"), - [ - (-1, MOCK_DATA_STEP_BASE, pytest.raises(vol_er.MultipleInvalid), None), - (0, MOCK_DATA_STEP_BASE, pytest.raises(vol_er.MultipleInvalid), None), - ( - CONNECTIONS_MAX + 1, - MOCK_DATA_STEP_BASE, - pytest.raises(vol_er.MultipleInvalid), - None, - ), - ( - 1, - MOCK_DATA_STEP_BASE, - pytest.raises(HomeAssistantError), - OpendataTransportConnectionError(), - ), - ( - 2, - MOCK_DATA_STEP_BASE, - pytest.raises(HomeAssistantError), - OpendataTransportError(), - ), - ], -) -async def test_service_call_fetch_connections_error( - hass: HomeAssistant, - limit, - config_data, - expected_result, - raise_error, -) -> None: - """Test service call with standard error.""" - - unique_id = unique_id_from_config(config_data) - - config_entry = MockConfigEntry( - domain=DOMAIN, - data=config_data, - title=f"Service test call with limit={limit} and error={raise_error}", - unique_id=unique_id, - entry_id=f"entry_{unique_id}", - ) - - with patch( - "homeassistant.components.swiss_public_transport.OpendataTransport", - return_value=AsyncMock(), - ) as mock: - mock().connections = json.loads(load_fixture("connections.json", DOMAIN)) - - await setup_integration(hass, config_entry) - - assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) - mock().async_get_data.side_effect = raise_error - with expected_result: - await hass.services.async_call( - domain=DOMAIN, - service=SERVICE_FETCH_CONNECTIONS, - service_data={ - ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, - ATTR_LIMIT: limit, - }, - blocking=True, - return_response=True, - ) - - -async def test_service_call_load_unload( - hass: HomeAssistant, -) -> None: - """Test service call with integration error.""" - - unique_id = unique_id_from_config(MOCK_DATA_STEP_BASE) - - config_entry = MockConfigEntry( - domain=DOMAIN, - data=MOCK_DATA_STEP_BASE, - title="Service test call for unloaded entry", - unique_id=unique_id, - entry_id=f"entry_{unique_id}", - ) - - bad_entry_id = "bad_entry_id" - - with patch( - "homeassistant.components.swiss_public_transport.OpendataTransport", - return_value=AsyncMock(), - ) as mock: - mock().connections = json.loads(load_fixture("connections.json", DOMAIN)) - - await setup_integration(hass, config_entry) - - assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) - response = await hass.services.async_call( - domain=DOMAIN, - service=SERVICE_FETCH_CONNECTIONS, - service_data={ - ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, - }, - blocking=True, - return_response=True, - ) - await hass.async_block_till_done() - assert response["connections"] is not None - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - with pytest.raises( - ServiceValidationError, match=f"{config_entry.title} is not loaded" - ): - await hass.services.async_call( - domain=DOMAIN, - service=SERVICE_FETCH_CONNECTIONS, - service_data={ - ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, - }, - blocking=True, - return_response=True, - ) - - with pytest.raises( - ServiceValidationError, - match=f'Swiss public transport integration instance "{bad_entry_id}" not found', - ): - await hass.services.async_call( - domain=DOMAIN, - service=SERVICE_FETCH_CONNECTIONS, - service_data={ - ATTR_CONFIG_ENTRY_ID: bad_entry_id, - }, - blocking=True, - return_response=True, - ) diff --git a/tests/components/switch/common.py b/tests/components/switch/common.py index 96c79fb7d55..e9764d59d7c 100644 --- a/tests/components/switch/common.py +++ b/tests/components/switch/common.py @@ -15,31 +15,28 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, ) -from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def turn_on(hass, entity_id=ENTITY_MATCH_ALL): """Turn all or specified switch on.""" hass.add_job(async_turn_on, hass, entity_id) -async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): """Turn all or specified switch on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) @bind_hass -def turn_off(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def turn_off(hass, entity_id=ENTITY_MATCH_ALL): """Turn all or specified switch off.""" hass.add_job(async_turn_off, hass, entity_id) -async def async_turn_off( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): """Turn all or specified switch off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) diff --git a/tests/components/switch/test_device_action.py b/tests/components/switch/test_device_action.py index 9751721cbc7..0b41ce7992d 100644 --- a/tests/components/switch/test_device_action.py +++ b/tests/components/switch/test_device_action.py @@ -7,7 +7,7 @@ from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.switch import DOMAIN from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component @@ -24,6 +24,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_actions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -109,6 +115,7 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -183,6 +190,7 @@ async def test_action_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) diff --git a/tests/components/switch/test_device_condition.py b/tests/components/switch/test_device_condition.py index 7c4f434b0a4..2ba2c6adb5c 100644 --- a/tests/components/switch/test_device_condition.py +++ b/tests/components/switch/test_device_condition.py @@ -20,6 +20,7 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, ) @@ -28,6 +29,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -176,7 +183,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -242,20 +249,20 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_off event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_off event - test_event2" @pytest.mark.usefixtures("enable_custom_integrations") @@ -263,7 +270,7 @@ async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -308,12 +315,12 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_on event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_on event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -321,7 +328,7 @@ async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for firing if condition is on with delay.""" point1 = dt_util.utcnow() @@ -370,26 +377,26 @@ async def test_if_fires_on_for_condition( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Time travel 10 secs into the future freezer.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Time travel 20 secs into the future freezer.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_off event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/switch/test_device_trigger.py b/tests/components/switch/test_device_trigger.py index 08e6ab6d0f6..092b7a964bb 100644 --- a/tests/components/switch/test_device_trigger.py +++ b/tests/components/switch/test_device_trigger.py @@ -20,6 +20,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, ) @@ -28,6 +29,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -174,7 +181,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -261,20 +268,20 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { + assert len(calls) == 2 + assert {calls[0].data["some"], calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 4 - assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { + assert len(calls) == 4 + assert {calls[2].data["some"], calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -285,7 +292,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -330,13 +337,13 @@ async def test_if_fires_on_state_change_legacy( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - None" ) @@ -346,7 +353,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -392,16 +399,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/switch_as_x/__init__.py b/tests/components/switch_as_x/__init__.py index 2addb832462..de6f1bac790 100644 --- a/tests/components/switch_as_x/__init__.py +++ b/tests/components/switch_as_x/__init__.py @@ -1,7 +1,14 @@ """The tests for Switch as X platforms.""" -from homeassistant.components.lock import LockState -from homeassistant.const import STATE_CLOSED, STATE_OFF, STATE_ON, STATE_OPEN, Platform +from homeassistant.const import ( + STATE_CLOSED, + STATE_LOCKED, + STATE_OFF, + STATE_ON, + STATE_OPEN, + STATE_UNLOCKED, + Platform, +) PLATFORMS_TO_TEST = ( Platform.COVER, @@ -17,7 +24,7 @@ STATE_MAP = { Platform.COVER: {STATE_ON: STATE_OPEN, STATE_OFF: STATE_CLOSED}, Platform.FAN: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, Platform.LIGHT: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, - Platform.LOCK: {STATE_ON: LockState.UNLOCKED, STATE_OFF: LockState.LOCKED}, + Platform.LOCK: {STATE_ON: STATE_UNLOCKED, STATE_OFF: STATE_LOCKED}, Platform.SIREN: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, Platform.VALVE: {STATE_ON: STATE_OPEN, STATE_OFF: STATE_CLOSED}, }, @@ -25,7 +32,7 @@ STATE_MAP = { Platform.COVER: {STATE_ON: STATE_CLOSED, STATE_OFF: STATE_OPEN}, Platform.FAN: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, Platform.LIGHT: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, - Platform.LOCK: {STATE_ON: LockState.LOCKED, STATE_OFF: LockState.UNLOCKED}, + Platform.LOCK: {STATE_ON: STATE_LOCKED, STATE_OFF: STATE_UNLOCKED}, Platform.SIREN: {STATE_ON: STATE_ON, STATE_OFF: STATE_OFF}, Platform.VALVE: {STATE_ON: STATE_CLOSED, STATE_OFF: STATE_OPEN}, }, diff --git a/tests/components/switch_as_x/conftest.py b/tests/components/switch_as_x/conftest.py index f8328f38b54..88a86892d2d 100644 --- a/tests/components/switch_as_x/conftest.py +++ b/tests/components/switch_as_x/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/switch_as_x/test_cover.py b/tests/components/switch_as_x/test_cover.py index acb382a635a..78a76c20beb 100644 --- a/tests/components/switch_as_x/test_cover.py +++ b/tests/components/switch_as_x/test_cover.py @@ -1,6 +1,6 @@ """Tests for the Switch as X Cover platform.""" -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, CoverState +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.switch_as_x.config_flow import SwitchAsXConfigFlowHandler from homeassistant.components.switch_as_x.const import ( @@ -15,8 +15,10 @@ from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, + STATE_CLOSED, STATE_OFF, STATE_ON, + STATE_OPEN, Platform, ) from homeassistant.core import HomeAssistant @@ -69,7 +71,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN + assert hass.states.get("cover.decorative_lights").state == STATE_OPEN await hass.services.async_call( COVER_DOMAIN, @@ -79,7 +81,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED + assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED await hass.services.async_call( COVER_DOMAIN, @@ -89,7 +91,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN + assert hass.states.get("cover.decorative_lights").state == STATE_OPEN await hass.services.async_call( COVER_DOMAIN, @@ -99,7 +101,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED + assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -109,7 +111,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN + assert hass.states.get("cover.decorative_lights").state == STATE_OPEN await hass.services.async_call( SWITCH_DOMAIN, @@ -119,7 +121,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED + assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -129,7 +131,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN + assert hass.states.get("cover.decorative_lights").state == STATE_OPEN async def test_service_calls_inverted(hass: HomeAssistant) -> None: @@ -152,7 +154,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED + assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED await hass.services.async_call( COVER_DOMAIN, @@ -162,7 +164,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN + assert hass.states.get("cover.decorative_lights").state == STATE_OPEN await hass.services.async_call( COVER_DOMAIN, @@ -172,7 +174,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN + assert hass.states.get("cover.decorative_lights").state == STATE_OPEN await hass.services.async_call( COVER_DOMAIN, @@ -182,7 +184,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED + assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -192,7 +194,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED + assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -202,7 +204,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN + assert hass.states.get("cover.decorative_lights").state == STATE_OPEN await hass.services.async_call( SWITCH_DOMAIN, @@ -212,4 +214,4 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED + assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED diff --git a/tests/components/switch_as_x/test_fan.py b/tests/components/switch_as_x/test_fan.py index a33490dab45..fd4296bd616 100644 --- a/tests/components/switch_as_x/test_fan.py +++ b/tests/components/switch_as_x/test_fan.py @@ -44,7 +44,7 @@ async def test_default_state(hass: HomeAssistant) -> None: state = hass.states.get("fan.wind_machine") assert state is not None assert state.state == "unavailable" - assert state.attributes["supported_features"] == 48 + assert state.attributes["supported_features"] == 0 async def test_service_calls(hass: HomeAssistant) -> None: diff --git a/tests/components/switch_as_x/test_init.py b/tests/components/switch_as_x/test_init.py index cd80fab69bc..e250cacb7ac 100644 --- a/tests/components/switch_as_x/test_init.py +++ b/tests/components/switch_as_x/test_init.py @@ -7,7 +7,6 @@ from unittest.mock import patch import pytest from homeassistant.components.homeassistant import exposed_entities -from homeassistant.components.lock import LockState from homeassistant.components.switch_as_x.config_flow import SwitchAsXConfigFlowHandler from homeassistant.components.switch_as_x.const import ( CONF_INVERT, @@ -18,9 +17,11 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( CONF_ENTITY_ID, STATE_CLOSED, + STATE_LOCKED, STATE_OFF, STATE_ON, STATE_OPEN, + STATE_UNLOCKED, EntityCategory, Platform, ) @@ -73,7 +74,7 @@ async def test_config_entry_unregistered_uuid( (Platform.COVER, STATE_OPEN, STATE_CLOSED), (Platform.FAN, STATE_ON, STATE_OFF), (Platform.LIGHT, STATE_ON, STATE_OFF), - (Platform.LOCK, LockState.UNLOCKED, LockState.LOCKED), + (Platform.LOCK, STATE_UNLOCKED, STATE_LOCKED), (Platform.SIREN, STATE_ON, STATE_OFF), (Platform.VALVE, STATE_OPEN, STATE_CLOSED), ], diff --git a/tests/components/switch_as_x/test_lock.py b/tests/components/switch_as_x/test_lock.py index c2a0806778d..f7d61cf6895 100644 --- a/tests/components/switch_as_x/test_lock.py +++ b/tests/components/switch_as_x/test_lock.py @@ -1,6 +1,6 @@ """Tests for the Switch as X Lock platform.""" -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.switch_as_x.config_flow import SwitchAsXConfigFlowHandler from homeassistant.components.switch_as_x.const import ( @@ -15,8 +15,10 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, SERVICE_UNLOCK, + STATE_LOCKED, STATE_OFF, STATE_ON, + STATE_UNLOCKED, Platform, ) from homeassistant.core import HomeAssistant @@ -68,7 +70,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -78,7 +80,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -88,7 +90,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -98,7 +100,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -108,7 +110,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -118,7 +120,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED async def test_service_calls_inverted(hass: HomeAssistant) -> None: @@ -141,7 +143,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -151,7 +153,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED await hass.services.async_call( LOCK_DOMAIN, @@ -161,7 +163,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -171,7 +173,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -181,7 +183,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("lock.decorative_lights").state == LockState.LOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_LOCKED await hass.services.async_call( SWITCH_DOMAIN, @@ -191,4 +193,4 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("lock.decorative_lights").state == LockState.UNLOCKED + assert hass.states.get("lock.decorative_lights").state == STATE_UNLOCKED diff --git a/tests/components/switch_as_x/test_valve.py b/tests/components/switch_as_x/test_valve.py index 6f6ef719ae1..854f693404f 100644 --- a/tests/components/switch_as_x/test_valve.py +++ b/tests/components/switch_as_x/test_valve.py @@ -7,7 +7,7 @@ from homeassistant.components.switch_as_x.const import ( CONF_TARGET_DOMAIN, DOMAIN, ) -from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN, ValveState +from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN from homeassistant.const import ( CONF_ENTITY_ID, SERVICE_CLOSE_VALVE, @@ -15,8 +15,10 @@ from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, + STATE_CLOSED, STATE_OFF, STATE_ON, + STATE_OPEN, Platform, ) from homeassistant.core import HomeAssistant @@ -69,7 +71,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN + assert hass.states.get("valve.decorative_lights").state == STATE_OPEN await hass.services.async_call( VALVE_DOMAIN, @@ -79,7 +81,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED + assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED await hass.services.async_call( VALVE_DOMAIN, @@ -89,7 +91,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN + assert hass.states.get("valve.decorative_lights").state == STATE_OPEN await hass.services.async_call( VALVE_DOMAIN, @@ -99,7 +101,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED + assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -109,7 +111,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN + assert hass.states.get("valve.decorative_lights").state == STATE_OPEN await hass.services.async_call( SWITCH_DOMAIN, @@ -119,7 +121,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED + assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -129,7 +131,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN + assert hass.states.get("valve.decorative_lights").state == STATE_OPEN async def test_service_calls_inverted(hass: HomeAssistant) -> None: @@ -152,7 +154,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED + assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED await hass.services.async_call( VALVE_DOMAIN, @@ -162,7 +164,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN + assert hass.states.get("valve.decorative_lights").state == STATE_OPEN await hass.services.async_call( VALVE_DOMAIN, @@ -172,7 +174,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN + assert hass.states.get("valve.decorative_lights").state == STATE_OPEN await hass.services.async_call( VALVE_DOMAIN, @@ -182,7 +184,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED + assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -192,7 +194,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED + assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -202,7 +204,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("valve.decorative_lights").state == ValveState.OPEN + assert hass.states.get("valve.decorative_lights").state == STATE_OPEN await hass.services.async_call( SWITCH_DOMAIN, @@ -212,4 +214,4 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("valve.decorative_lights").state == ValveState.CLOSED + assert hass.states.get("valve.decorative_lights").state == STATE_CLOSED diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index bd3985ff062..b2a8445546e 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -205,28 +205,3 @@ NOT_SWITCHBOT_INFO = BluetoothServiceInfoBleak( connectable=True, tx_power=-127, ) - - -WOMETERTHPC_SERVICE_INFO = BluetoothServiceInfoBleak( - name="WoTHPc", - manufacturer_data={ - 2409: b"\xb0\xe9\xfeT2\x15\xb7\xe4\x07\x9b\xa4\x007\x02\xd5\x00" - }, - service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"5\x00d"}, - service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], - address="AA:BB:CC:DD:EE:AA", - rssi=-60, - source="local", - advertisement=generate_advertisement_data( - local_name="WoTHPc", - manufacturer_data={ - 2409: b"\xb0\xe9\xfeT2\x15\xb7\xe4\x07\x9b\xa4\x007\x02\xd5\x00" - }, - service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"5\x00d"}, - service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], - ), - device=generate_ble_device("AA:BB:CC:DD:EE:AA", "WoTHPc"), - time=0, - connectable=True, - tx_power=-127, -) diff --git a/tests/components/switchbot/test_config_flow.py b/tests/components/switchbot/test_config_flow.py index b0fba2a5f18..182e9457f22 100644 --- a/tests/components/switchbot/test_config_flow.py +++ b/tests/components/switchbot/test_config_flow.py @@ -7,7 +7,6 @@ from switchbot import SwitchbotAccountConnectionError, SwitchbotAuthenticationEr from homeassistant.components.switchbot.const import ( CONF_ENCRYPTION_KEY, CONF_KEY_ID, - CONF_LOCK_NIGHTLATCH, CONF_RETRY_COUNT, ) from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER @@ -783,65 +782,3 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 assert entry.options[CONF_RETRY_COUNT] == 6 - - -async def test_options_flow_lock_pro(hass: HomeAssistant) -> None: - """Test updating options.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - CONF_NAME: "test-name", - CONF_PASSWORD: "test-password", - CONF_SENSOR_TYPE: "lock_pro", - }, - options={CONF_RETRY_COUNT: 10}, - unique_id="aabbccddeeff", - ) - entry.add_to_hass(hass) - - # Test Force night_latch should be disabled by default. - with patch_async_setup_entry() as mock_setup_entry: - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - assert result["errors"] is None - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - CONF_RETRY_COUNT: 3, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_LOCK_NIGHTLATCH] is False - - assert len(mock_setup_entry.mock_calls) == 1 - - # Test Set force night_latch to be enabled. - - with patch_async_setup_entry() as mock_setup_entry: - result = await hass.config_entries.options.async_init(entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - assert result["errors"] is None - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - CONF_LOCK_NIGHTLATCH: True, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_LOCK_NIGHTLATCH] is True - - assert len(mock_setup_entry.mock_calls) == 0 - - assert entry.options[CONF_LOCK_NIGHTLATCH] is True diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index 3adeaef936c..030a477596c 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -15,7 +15,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from . import WOHAND_SERVICE_INFO, WOMETERTHPC_SERVICE_INFO +from . import WOHAND_SERVICE_INFO from tests.common import MockConfigEntry from tests.components.bluetooth import inject_bluetooth_service_info @@ -59,49 +59,3 @@ async def test_sensors(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_co2_sensor(hass: HomeAssistant) -> None: - """Test setting up creates the co2 sensor for a WoTHPc.""" - await async_setup_component(hass, DOMAIN, {}) - inject_bluetooth_service_info(hass, WOMETERTHPC_SERVICE_INFO) - - entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_ADDRESS: "AA:BB:CC:DD:EE:AA", - CONF_NAME: "test-name", - CONF_PASSWORD: "test-password", - CONF_SENSOR_TYPE: "hygrometer_co2", - }, - unique_id="aabbccddeeaa", - ) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert len(hass.states.async_all("sensor")) == 5 - - battery_sensor = hass.states.get("sensor.test_name_battery") - battery_sensor_attrs = battery_sensor.attributes - assert battery_sensor.state == "100" - assert battery_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Battery" - assert battery_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" - assert battery_sensor_attrs[ATTR_STATE_CLASS] == "measurement" - - rssi_sensor = hass.states.get("sensor.test_name_bluetooth_signal") - rssi_sensor_attrs = rssi_sensor.attributes - assert rssi_sensor.state == "-60" - assert rssi_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Bluetooth signal" - assert rssi_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "dBm" - - co2_sensor = hass.states.get("sensor.test_name_carbon_dioxide") - co2_sensor_attrs = co2_sensor.attributes - assert co2_sensor.state == "725" - assert co2_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Carbon dioxide" - assert co2_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "ppm" - - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/switchbot_cloud/conftest.py b/tests/components/switchbot_cloud/conftest.py index 09c953da06b..ed233ff2de9 100644 --- a/tests/components/switchbot_cloud/conftest.py +++ b/tests/components/switchbot_cloud/conftest.py @@ -1,11 +1,9 @@ """Common fixtures for the SwitchBot via API tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest - -from homeassistant.components.switchbot_cloud import SwitchBotAPI +from typing_extensions import Generator @pytest.fixture @@ -16,17 +14,3 @@ def mock_setup_entry() -> Generator[AsyncMock]: return_value=True, ) as mock_setup_entry: yield mock_setup_entry - - -@pytest.fixture -def mock_list_devices(): - """Mock list_devices.""" - with patch.object(SwitchBotAPI, "list_devices") as mock_list_devices: - yield mock_list_devices - - -@pytest.fixture -def mock_get_status(): - """Mock get_status.""" - with patch.object(SwitchBotAPI, "get_status") as mock_get_status: - yield mock_get_status diff --git a/tests/components/switchbot_cloud/test_init.py b/tests/components/switchbot_cloud/test_init.py index 43431ae04c0..25ea370efe5 100644 --- a/tests/components/switchbot_cloud/test_init.py +++ b/tests/components/switchbot_cloud/test_init.py @@ -50,18 +50,6 @@ async def test_setup_entry_success( remoteType="DIY Plug", hubDeviceId="test-hub-id", ), - Remote( - deviceId="meter-pro-1", - deviceName="meter-pro-name-1", - deviceType="MeterPro(CO2)", - hubDeviceId="test-hub-id", - ), - Remote( - deviceId="hub2-1", - deviceName="hub2-name-1", - deviceType="Hub 2", - hubDeviceId="test-hub-id", - ), ] mock_get_status.return_value = {"power": PowerState.ON.value} entry = configure_integration(hass) diff --git a/tests/components/switchbot_cloud/test_lock.py b/tests/components/switchbot_cloud/test_lock.py deleted file mode 100644 index a09d7241794..00000000000 --- a/tests/components/switchbot_cloud/test_lock.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Test for the switchbot_cloud lock.""" - -from unittest.mock import patch - -from switchbot_api import Device - -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState -from homeassistant.components.switchbot_cloud import SwitchBotAPI -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_UNLOCK -from homeassistant.core import HomeAssistant - -from . import configure_integration - - -async def test_lock(hass: HomeAssistant, mock_list_devices, mock_get_status) -> None: - """Test locking and unlocking.""" - mock_list_devices.return_value = [ - Device( - deviceId="lock-id-1", - deviceName="lock-1", - deviceType="Smart Lock", - hubDeviceId="test-hub-id", - ), - ] - - mock_get_status.return_value = {"lockState": "locked"} - - entry = configure_integration(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.state is ConfigEntryState.LOADED - - lock_id = "lock.lock_1" - assert hass.states.get(lock_id).state == LockState.LOCKED - - with patch.object(SwitchBotAPI, "send_command"): - await hass.services.async_call( - LOCK_DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: lock_id}, blocking=True - ) - assert hass.states.get(lock_id).state == LockState.UNLOCKED - - with patch.object(SwitchBotAPI, "send_command"): - await hass.services.async_call( - LOCK_DOMAIN, SERVICE_LOCK, {ATTR_ENTITY_ID: lock_id}, blocking=True - ) - assert hass.states.get(lock_id).state == LockState.LOCKED diff --git a/tests/components/switcher_kis/__init__.py b/tests/components/switcher_kis/__init__.py index b9b44eb6d72..3f08afcbc9f 100644 --- a/tests/components/switcher_kis/__init__.py +++ b/tests/components/switcher_kis/__init__.py @@ -1,23 +1,14 @@ """Test cases and object for the Switcher integration tests.""" from homeassistant.components.switcher_kis.const import DOMAIN -from homeassistant.const import CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -async def init_integration( - hass: HomeAssistant, username: str | None = None, token: str | None = None -) -> MockConfigEntry: +async def init_integration(hass: HomeAssistant) -> MockConfigEntry: """Set up the Switcher integration in Home Assistant.""" - data = {} - if username is not None: - data[CONF_USERNAME] = username - if token is not None: - data[CONF_TOKEN] = token - - entry = MockConfigEntry(domain=DOMAIN, data=data, unique_id=DOMAIN) + entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/switcher_kis/conftest.py b/tests/components/switcher_kis/conftest.py index 2cf123af2b0..8ff395fcab3 100644 --- a/tests/components/switcher_kis/conftest.py +++ b/tests/components/switcher_kis/conftest.py @@ -1,9 +1,9 @@ """Common fixtures and objects for the Switcher integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/switcher_kis/consts.py b/tests/components/switcher_kis/consts.py index fe77ee0236b..3c5f3ff241e 100644 --- a/tests/components/switcher_kis/consts.py +++ b/tests/components/switcher_kis/consts.py @@ -4,11 +4,8 @@ from aioswitcher.device import ( DeviceState, DeviceType, ShutterDirection, - SwitcherDualShutterSingleLight, - SwitcherLight, SwitcherPowerPlug, SwitcherShutter, - SwitcherSingleShutterDualLight, SwitcherThermostat, SwitcherWaterHeater, ThermostatFanLevel, @@ -22,29 +19,14 @@ DUMMY_DEVICE_ID1 = "a123bc" DUMMY_DEVICE_ID2 = "cafe12" DUMMY_DEVICE_ID3 = "bada77" DUMMY_DEVICE_ID4 = "bbd164" -DUMMY_DEVICE_ID5 = "bcdb64" -DUMMY_DEVICE_ID6 = "bcdc64" -DUMMY_DEVICE_ID7 = "bcdd64" -DUMMY_DEVICE_ID8 = "bcde64" -DUMMY_DEVICE_ID9 = "bcdf64" DUMMY_DEVICE_KEY1 = "18" DUMMY_DEVICE_KEY2 = "01" DUMMY_DEVICE_KEY3 = "12" DUMMY_DEVICE_KEY4 = "07" -DUMMY_DEVICE_KEY5 = "15" -DUMMY_DEVICE_KEY6 = "16" -DUMMY_DEVICE_KEY7 = "17" -DUMMY_DEVICE_KEY8 = "18" -DUMMY_DEVICE_KEY9 = "19" DUMMY_DEVICE_NAME1 = "Plug 23BC" DUMMY_DEVICE_NAME2 = "Heater FE12" DUMMY_DEVICE_NAME3 = "Breeze AB39" DUMMY_DEVICE_NAME4 = "Runner DD77" -DUMMY_DEVICE_NAME5 = "RunnerS11 6CF5" -DUMMY_DEVICE_NAME6 = "RunnerS12 A9BE" -DUMMY_DEVICE_NAME7 = "Light 36BB" -DUMMY_DEVICE_NAME8 = "Light 36CB" -DUMMY_DEVICE_NAME9 = "Light 36DB" DUMMY_DEVICE_PASSWORD = "12345678" DUMMY_ELECTRIC_CURRENT1 = 0.5 DUMMY_ELECTRIC_CURRENT2 = 12.8 @@ -52,29 +34,10 @@ DUMMY_IP_ADDRESS1 = "192.168.100.157" DUMMY_IP_ADDRESS2 = "192.168.100.158" DUMMY_IP_ADDRESS3 = "192.168.100.159" DUMMY_IP_ADDRESS4 = "192.168.100.160" -DUMMY_IP_ADDRESS5 = "192.168.100.161" -DUMMY_IP_ADDRESS6 = "192.168.100.162" -DUMMY_IP_ADDRESS7 = "192.168.100.163" -DUMMY_IP_ADDRESS8 = "192.168.100.164" -DUMMY_IP_ADDRESS9 = "192.168.100.165" DUMMY_MAC_ADDRESS1 = "A1:B2:C3:45:67:D8" DUMMY_MAC_ADDRESS2 = "A1:B2:C3:45:67:D9" DUMMY_MAC_ADDRESS3 = "A1:B2:C3:45:67:DA" DUMMY_MAC_ADDRESS4 = "A1:B2:C3:45:67:DB" -DUMMY_MAC_ADDRESS5 = "A1:B2:C3:45:67:DC" -DUMMY_MAC_ADDRESS6 = "A1:B2:C3:45:67:DD" -DUMMY_MAC_ADDRESS7 = "A1:B2:C3:45:67:DE" -DUMMY_MAC_ADDRESS8 = "A1:B2:C3:45:67:DF" -DUMMY_MAC_ADDRESS9 = "A1:B2:C3:45:67:DG" -DUMMY_TOKEN_NEEDED1 = False -DUMMY_TOKEN_NEEDED2 = False -DUMMY_TOKEN_NEEDED3 = False -DUMMY_TOKEN_NEEDED4 = False -DUMMY_TOKEN_NEEDED5 = True -DUMMY_TOKEN_NEEDED6 = True -DUMMY_TOKEN_NEEDED7 = True -DUMMY_TOKEN_NEEDED8 = True -DUMMY_TOKEN_NEEDED9 = True DUMMY_PHONE_ID = "1234" DUMMY_POWER_CONSUMPTION1 = 100 DUMMY_POWER_CONSUMPTION2 = 2780 @@ -86,15 +49,8 @@ DUMMY_TARGET_TEMPERATURE = 23 DUMMY_FAN_LEVEL = ThermostatFanLevel.LOW DUMMY_SWING = ThermostatSwing.OFF DUMMY_REMOTE_ID = "ELEC7001" -DUMMY_POSITION = [54] -DUMMY_POSITION_2 = [54, 54] -DUMMY_DIRECTION = [ShutterDirection.SHUTTER_STOP] -DUMMY_DIRECTION_2 = [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_STOP] -DUMMY_USERNAME = "email" -DUMMY_TOKEN = "zvVvd7JxtN7CgvkD1Psujw==" -DUMMY_LIGHT = [DeviceState.ON] -DUMMY_LIGHT_2 = [DeviceState.ON, DeviceState.ON] -DUMMY_LIGHT_3 = [DeviceState.ON, DeviceState.ON, DeviceState.ON] +DUMMY_POSITION = 54 +DUMMY_DIRECTION = ShutterDirection.SHUTTER_STOP DUMMY_PLUG_DEVICE = SwitcherPowerPlug( DeviceType.POWER_PLUG, @@ -104,7 +60,6 @@ DUMMY_PLUG_DEVICE = SwitcherPowerPlug( DUMMY_IP_ADDRESS1, DUMMY_MAC_ADDRESS1, DUMMY_DEVICE_NAME1, - DUMMY_TOKEN_NEEDED1, DUMMY_POWER_CONSUMPTION1, DUMMY_ELECTRIC_CURRENT1, ) @@ -117,7 +72,6 @@ DUMMY_WATER_HEATER_DEVICE = SwitcherWaterHeater( DUMMY_IP_ADDRESS2, DUMMY_MAC_ADDRESS2, DUMMY_DEVICE_NAME2, - DUMMY_TOKEN_NEEDED2, DUMMY_POWER_CONSUMPTION2, DUMMY_ELECTRIC_CURRENT2, DUMMY_REMAINING_TIME, @@ -132,39 +86,10 @@ DUMMY_SHUTTER_DEVICE = SwitcherShutter( DUMMY_IP_ADDRESS4, DUMMY_MAC_ADDRESS4, DUMMY_DEVICE_NAME4, - DUMMY_TOKEN_NEEDED4, DUMMY_POSITION, DUMMY_DIRECTION, ) -DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE = SwitcherSingleShutterDualLight( - DeviceType.RUNNER_S11, - DeviceState.ON, - DUMMY_DEVICE_ID5, - DUMMY_DEVICE_KEY5, - DUMMY_IP_ADDRESS5, - DUMMY_MAC_ADDRESS5, - DUMMY_DEVICE_NAME5, - DUMMY_TOKEN_NEEDED5, - DUMMY_POSITION, - DUMMY_DIRECTION, - DUMMY_LIGHT_2, -) - -DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE = SwitcherDualShutterSingleLight( - DeviceType.RUNNER_S12, - DeviceState.ON, - DUMMY_DEVICE_ID6, - DUMMY_DEVICE_KEY6, - DUMMY_IP_ADDRESS6, - DUMMY_MAC_ADDRESS6, - DUMMY_DEVICE_NAME6, - DUMMY_TOKEN_NEEDED6, - DUMMY_POSITION_2, - DUMMY_DIRECTION_2, - DUMMY_LIGHT, -) - DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( DeviceType.BREEZE, DeviceState.ON, @@ -173,7 +98,6 @@ DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( DUMMY_IP_ADDRESS3, DUMMY_MAC_ADDRESS3, DUMMY_DEVICE_NAME3, - DUMMY_TOKEN_NEEDED3, DUMMY_THERMOSTAT_MODE, DUMMY_TEMPERATURE, DUMMY_TARGET_TEMPERATURE, @@ -182,40 +106,4 @@ DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( DUMMY_REMOTE_ID, ) -DUMMY_LIGHT_DEVICE = SwitcherLight( - DeviceType.LIGHT_SL01, - DeviceState.ON, - DUMMY_DEVICE_ID7, - DUMMY_DEVICE_KEY7, - DUMMY_IP_ADDRESS7, - DUMMY_MAC_ADDRESS7, - DUMMY_DEVICE_NAME7, - DUMMY_TOKEN_NEEDED7, - DUMMY_LIGHT, -) - -DUMMY_DUAL_LIGHT_DEVICE = SwitcherLight( - DeviceType.LIGHT_SL02, - DeviceState.ON, - DUMMY_DEVICE_ID8, - DUMMY_DEVICE_KEY8, - DUMMY_IP_ADDRESS8, - DUMMY_MAC_ADDRESS8, - DUMMY_DEVICE_NAME8, - DUMMY_TOKEN_NEEDED8, - DUMMY_LIGHT_2, -) - -DUMMY_TRIPLE_LIGHT_DEVICE = SwitcherLight( - DeviceType.LIGHT_SL03, - DeviceState.ON, - DUMMY_DEVICE_ID9, - DUMMY_DEVICE_KEY9, - DUMMY_IP_ADDRESS9, - DUMMY_MAC_ADDRESS9, - DUMMY_DEVICE_NAME9, - DUMMY_TOKEN_NEEDED9, - DUMMY_LIGHT_3, -) - DUMMY_SWITCHER_DEVICES = [DUMMY_PLUG_DEVICE, DUMMY_WATER_HEATER_DEVICE] diff --git a/tests/components/switcher_kis/test_button.py b/tests/components/switcher_kis/test_button.py index d0604487370..264c163e111 100644 --- a/tests/components/switcher_kis/test_button.py +++ b/tests/components/switcher_kis/test_button.py @@ -63,12 +63,7 @@ async def test_assume_button( ) @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_swing_button( - hass: HomeAssistant, - entity, - swing, - mock_bridge, - mock_api, - monkeypatch: pytest.MonkeyPatch, + hass: HomeAssistant, entity, swing, mock_bridge, mock_api, monkeypatch ) -> None: """Test vertical swing on/off button.""" monkeypatch.setattr(DEVICE, "remote_id", "ELEC7022") @@ -93,7 +88,7 @@ async def test_swing_button( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_control_device_fail( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch ) -> None: """Test control device fail.""" await init_integration(hass) diff --git a/tests/components/switcher_kis/test_climate.py b/tests/components/switcher_kis/test_climate.py index c9f7abf34dc..759f7f1bd98 100644 --- a/tests/components/switcher_kis/test_climate.py +++ b/tests/components/switcher_kis/test_climate.py @@ -37,7 +37,7 @@ ENTITY_ID = f"{CLIMATE_DOMAIN}.{slugify(DEVICE.name)}" @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_hvac_mode( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch ) -> None: """Test climate hvac mode service.""" await init_integration(hass) @@ -92,16 +92,12 @@ async def test_climate_hvac_mode( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_temperature( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch ) -> None: """Test climate temperature service.""" await init_integration(hass) assert mock_bridge - monkeypatch.setattr(DEVICE, "mode", ThermostatMode.HEAT) - mock_bridge.mock_callbacks([DEVICE]) - await hass.async_block_till_done() - # Test initial target temperature state = hass.states.get(ENTITY_ID) assert state.attributes["temperature"] == 23 @@ -130,7 +126,7 @@ async def test_climate_temperature( with patch( "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", ) as mock_control_device: - with pytest.raises(ServiceValidationError): + with pytest.raises(ValueError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, @@ -148,7 +144,7 @@ async def test_climate_temperature( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_fan_level( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch ) -> None: """Test climate fan level service.""" await init_integration(hass) @@ -183,7 +179,7 @@ async def test_climate_fan_level( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_swing( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch ) -> None: """Test climate swing service.""" await init_integration(hass) @@ -238,7 +234,9 @@ async def test_climate_swing( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) -async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) -> None: +async def test_control_device_fail( + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch +) -> None: """Test control device fail.""" await init_integration(hass) assert mock_bridge @@ -297,7 +295,7 @@ async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) - @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_bad_update_discard( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch ) -> None: """Test that a bad update from device is discarded.""" await init_integration(hass) @@ -320,7 +318,7 @@ async def test_bad_update_discard( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_control_errors( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch ) -> None: """Test control with settings not supported by device.""" await init_integration(hass) diff --git a/tests/components/switcher_kis/test_config_flow.py b/tests/components/switcher_kis/test_config_flow.py index 48cc0beacb8..e42b8ac484d 100644 --- a/tests/components/switcher_kis/test_config_flow.py +++ b/tests/components/switcher_kis/test_config_flow.py @@ -6,18 +6,10 @@ import pytest from homeassistant import config_entries from homeassistant.components.switcher_kis.const import DOMAIN -from homeassistant.const import CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .consts import ( - DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE, - DUMMY_PLUG_DEVICE, - DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE, - DUMMY_TOKEN, - DUMMY_USERNAME, - DUMMY_WATER_HEATER_DEVICE, -) +from .consts import DUMMY_PLUG_DEVICE, DUMMY_WATER_HEATER_DEVICE from tests.common import MockConfigEntry @@ -51,98 +43,13 @@ async def test_user_setup( assert mock_bridge.is_running is False assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Switcher" - assert result2["result"].data == {CONF_USERNAME: None, CONF_TOKEN: None} + assert result2["result"].data == {} await hass.async_block_till_done() assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize( - "mock_bridge", - [ - [ - DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE, - DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE, - ] - ], - indirect=True, -) -async def test_user_setup_found_token_device_valid_token( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_bridge -) -> None: - """Test we can finish a config flow with token device found.""" - with patch("homeassistant.components.switcher_kis.utils.DISCOVERY_TIME_SEC", 0): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - - result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - - assert mock_bridge.is_running is False - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "credentials" - - with patch( - "homeassistant.components.switcher_kis.config_flow.validate_token", - return_value=True, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - {CONF_USERNAME: DUMMY_USERNAME, CONF_TOKEN: DUMMY_TOKEN}, - ) - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Switcher" - assert result3["result"].data == { - CONF_USERNAME: DUMMY_USERNAME, - CONF_TOKEN: DUMMY_TOKEN, - } - - -@pytest.mark.parametrize( - "mock_bridge", - [ - [ - DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE, - DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE, - ] - ], - indirect=True, -) -async def test_user_setup_found_token_device_invalid_token( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_bridge -) -> None: - """Test we can finish a config flow with token device found.""" - with patch("homeassistant.components.switcher_kis.utils.DISCOVERY_TIME_SEC", 0): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - - result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "credentials" - - with patch( - "homeassistant.components.switcher_kis.config_flow.validate_token", - return_value=False, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - {CONF_USERNAME: DUMMY_USERNAME, CONF_TOKEN: DUMMY_TOKEN}, - ) - - assert result3["type"] is FlowResultType.FORM - assert result3["errors"] == {"base": "invalid_auth"} - - async def test_user_setup_abort_no_devices_found( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_bridge ) -> None: @@ -177,62 +84,3 @@ async def test_single_instance(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" - - -@pytest.mark.parametrize( - ("user_input"), - [ - ({CONF_USERNAME: DUMMY_USERNAME, CONF_TOKEN: DUMMY_TOKEN}), - ], -) -async def test_reauth_successful( - hass: HomeAssistant, - user_input: dict[str, str], -) -> None: - """Test starting a reauthentication flow.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_USERNAME: DUMMY_USERNAME, CONF_TOKEN: DUMMY_TOKEN}, - ) - entry.add_to_hass(hass) - - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - with patch( - "homeassistant.components.switcher_kis.config_flow.validate_token", - return_value=True, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=user_input, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - - -async def test_reauth_invalid_auth(hass: HomeAssistant) -> None: - """Test reauthentication flow with invalid credentials.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_USERNAME: DUMMY_USERNAME, CONF_TOKEN: DUMMY_TOKEN}, - ) - entry.add_to_hass(hass) - - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - with patch( - "homeassistant.components.switcher_kis.config_flow.validate_token", - return_value=False, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_USERNAME: "invalid_user", CONF_TOKEN: "invalid_token"}, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_auth"} diff --git a/tests/components/switcher_kis/test_cover.py b/tests/components/switcher_kis/test_cover.py index d26fff8754c..07f349d1a72 100644 --- a/tests/components/switcher_kis/test_cover.py +++ b/tests/components/switcher_kis/test_cover.py @@ -14,7 +14,10 @@ from homeassistant.components.cover import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - CoverState, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -22,96 +25,20 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.util import slugify from . import init_integration -from .consts import ( - DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE as DEVICE3, - DUMMY_SHUTTER_DEVICE as DEVICE, - DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE as DEVICE2, - DUMMY_TOKEN as TOKEN, - DUMMY_USERNAME as USERNAME, -) +from .consts import DUMMY_SHUTTER_DEVICE as DEVICE ENTITY_ID = f"{COVER_DOMAIN}.{slugify(DEVICE.name)}" -ENTITY_ID2 = f"{COVER_DOMAIN}.{slugify(DEVICE2.name)}" -ENTITY_ID3 = f"{COVER_DOMAIN}.{slugify(DEVICE3.name)}_cover_1" -ENTITY_ID3_2 = f"{COVER_DOMAIN}.{slugify(DEVICE3.name)}_cover_2" -@pytest.mark.parametrize( - ( - "device", - "entity_id", - "cover_id", - "position_open", - "position_close", - "direction_open", - "direction_close", - "direction_stop", - ), - [ - ( - DEVICE, - ENTITY_ID, - 0, - [77], - [0], - [ShutterDirection.SHUTTER_UP], - [ShutterDirection.SHUTTER_DOWN], - [ShutterDirection.SHUTTER_STOP], - ), - ( - DEVICE2, - ENTITY_ID2, - 0, - [77], - [0], - [ShutterDirection.SHUTTER_UP], - [ShutterDirection.SHUTTER_DOWN], - [ShutterDirection.SHUTTER_STOP], - ), - ( - DEVICE3, - ENTITY_ID3, - 0, - [77, 0], - [0, 0], - [ShutterDirection.SHUTTER_UP, ShutterDirection.SHUTTER_STOP], - [ShutterDirection.SHUTTER_DOWN, ShutterDirection.SHUTTER_STOP], - [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_STOP], - ), - ( - DEVICE3, - ENTITY_ID3_2, - 1, - [0, 77], - [0, 0], - [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_UP], - [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_DOWN], - [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_STOP], - ), - ], -) -@pytest.mark.parametrize("mock_bridge", [[DEVICE, DEVICE2, DEVICE3]], indirect=True) -async def test_cover( - hass: HomeAssistant, - mock_bridge, - mock_api, - monkeypatch: pytest.MonkeyPatch, - device, - entity_id: str, - cover_id: int, - position_open: list[int], - position_close: list[int], - direction_open: list[ShutterDirection], - direction_close: list[ShutterDirection], - direction_stop: list[ShutterDirection], -) -> None: +@pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) +async def test_cover(hass: HomeAssistant, mock_bridge, mock_api, monkeypatch) -> None: """Test cover services.""" - await init_integration(hass, USERNAME, TOKEN) + await init_integration(hass) assert mock_bridge # Test initial state - open - state = hass.states.get(entity_id) - assert state.state == CoverState.OPEN + state = hass.states.get(ENTITY_ID) + assert state.state == STATE_OPEN # Test set position with patch( @@ -120,18 +47,18 @@ async def test_cover( await hass.services.async_call( COVER_DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 77}, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_POSITION: 77}, blocking=True, ) - monkeypatch.setattr(device, "position", position_open) - mock_bridge.mock_callbacks([device]) + monkeypatch.setattr(DEVICE, "position", 77) + mock_bridge.mock_callbacks([DEVICE]) await hass.async_block_till_done() assert mock_api.call_count == 2 - mock_control_device.assert_called_once_with(77, cover_id) - state = hass.states.get(entity_id) - assert state.state == CoverState.OPEN + mock_control_device.assert_called_once_with(77) + state = hass.states.get(ENTITY_ID) + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 77 # Test open @@ -141,18 +68,18 @@ async def test_cover( await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: entity_id}, + {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, ) - monkeypatch.setattr(device, "direction", direction_open) - mock_bridge.mock_callbacks([device]) + monkeypatch.setattr(DEVICE, "direction", ShutterDirection.SHUTTER_UP) + mock_bridge.mock_callbacks([DEVICE]) await hass.async_block_till_done() assert mock_api.call_count == 4 - mock_control_device.assert_called_once_with(100, cover_id) - state = hass.states.get(entity_id) - assert state.state == CoverState.OPENING + mock_control_device.assert_called_once_with(100) + state = hass.states.get(ENTITY_ID) + assert state.state == STATE_OPENING # Test close with patch( @@ -161,74 +88,58 @@ async def test_cover( await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: entity_id}, + {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, ) - monkeypatch.setattr(device, "direction", direction_close) - mock_bridge.mock_callbacks([device]) + monkeypatch.setattr(DEVICE, "direction", ShutterDirection.SHUTTER_DOWN) + mock_bridge.mock_callbacks([DEVICE]) await hass.async_block_till_done() assert mock_api.call_count == 6 - mock_control_device.assert_called_once_with(0, cover_id) - state = hass.states.get(entity_id) - assert state.state == CoverState.CLOSING + mock_control_device.assert_called_once_with(0) + state = hass.states.get(ENTITY_ID) + assert state.state == STATE_CLOSING # Test stop with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.stop_shutter" + "homeassistant.components.switcher_kis.cover.SwitcherType2Api.stop" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: entity_id}, + {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, ) - monkeypatch.setattr(device, "direction", direction_stop) - mock_bridge.mock_callbacks([device]) + monkeypatch.setattr(DEVICE, "direction", ShutterDirection.SHUTTER_STOP) + mock_bridge.mock_callbacks([DEVICE]) await hass.async_block_till_done() assert mock_api.call_count == 8 - mock_control_device.assert_called_once_with(cover_id) - state = hass.states.get(entity_id) - assert state.state == CoverState.OPEN + mock_control_device.assert_called_once() + state = hass.states.get(ENTITY_ID) + assert state.state == STATE_OPEN # Test closed on position == 0 - monkeypatch.setattr(device, "position", position_close) - mock_bridge.mock_callbacks([device]) + monkeypatch.setattr(DEVICE, "position", 0) + mock_bridge.mock_callbacks([DEVICE]) await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == CoverState.CLOSED + state = hass.states.get(ENTITY_ID) + assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 -@pytest.mark.parametrize( - ("device", "entity_id", "cover_id"), - [ - (DEVICE, ENTITY_ID, 0), - (DEVICE2, ENTITY_ID2, 0), - (DEVICE3, ENTITY_ID3, 0), - (DEVICE3, ENTITY_ID3_2, 1), - ], -) -@pytest.mark.parametrize("mock_bridge", [[DEVICE, DEVICE2, DEVICE3]], indirect=True) -async def test_cover_control_fail( - hass: HomeAssistant, - mock_bridge, - mock_api, - device, - entity_id: str, - cover_id: int, -) -> None: +@pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) +async def test_cover_control_fail(hass: HomeAssistant, mock_bridge, mock_api) -> None: """Test cover control fail.""" - await init_integration(hass, USERNAME, TOKEN) + await init_integration(hass) assert mock_bridge # Test initial state - open - state = hass.states.get(entity_id) - assert state.state == CoverState.OPEN + state = hass.states.get(ENTITY_ID) + assert state.state == STATE_OPEN # Test exception during set position with patch( @@ -239,21 +150,21 @@ async def test_cover_control_fail( await hass.services.async_call( COVER_DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 44}, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_POSITION: 44}, blocking=True, ) assert mock_api.call_count == 2 - mock_control_device.assert_called_once_with(44, cover_id) - state = hass.states.get(entity_id) + mock_control_device.assert_called_once_with(44) + state = hass.states.get(ENTITY_ID) assert state.state == STATE_UNAVAILABLE # Make device available again - mock_bridge.mock_callbacks([device]) + mock_bridge.mock_callbacks([DEVICE]) await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == CoverState.OPEN + state = hass.states.get(ENTITY_ID) + assert state.state == STATE_OPEN # Test error response during set position with patch( @@ -264,22 +175,11 @@ async def test_cover_control_fail( await hass.services.async_call( COVER_DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 27}, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_POSITION: 27}, blocking=True, ) assert mock_api.call_count == 4 - mock_control_device.assert_called_once_with(27, cover_id) - state = hass.states.get(entity_id) + mock_control_device.assert_called_once_with(27) + state = hass.states.get(ENTITY_ID) assert state.state == STATE_UNAVAILABLE - - -@pytest.mark.parametrize("mock_bridge", [[DEVICE2, DEVICE3]], indirect=True) -async def test_cover2_no_token( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch -) -> None: - """Test cover with token needed without token specified.""" - await init_integration(hass) - assert mock_bridge - - assert mock_api.call_count == 0 diff --git a/tests/components/switcher_kis/test_diagnostics.py b/tests/components/switcher_kis/test_diagnostics.py index 53572085f9b..f49ab99ba6c 100644 --- a/tests/components/switcher_kis/test_diagnostics.py +++ b/tests/components/switcher_kis/test_diagnostics.py @@ -1,23 +1,17 @@ """Tests for the diagnostics data provided by Switcher.""" -import pytest - from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant from . import init_integration from .consts import DUMMY_WATER_HEATER_DEVICE -from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_bridge, - monkeypatch: pytest.MonkeyPatch, + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge, monkeypatch ) -> None: """Test diagnostics.""" entry = await init_integration(hass) @@ -40,7 +34,7 @@ async def test_diagnostics( "__type": "", "repr": ( ", False)>" + "1, )>" ), }, "electric_current": 12.8, @@ -50,7 +44,6 @@ async def test_diagnostics( "name": "Heater FE12", "power_consumption": 2780, "remaining_time": "01:29:32", - "token_needed": False, } ], "entry": { @@ -66,8 +59,5 @@ async def test_diagnostics( "source": "user", "unique_id": "switcher_kis", "disabled_by": None, - "created_at": ANY, - "modified_at": ANY, - "discovery_keys": {}, }, } diff --git a/tests/components/switcher_kis/test_light.py b/tests/components/switcher_kis/test_light.py deleted file mode 100644 index 60c851bf6a9..00000000000 --- a/tests/components/switcher_kis/test_light.py +++ /dev/null @@ -1,195 +0,0 @@ -"""Test the Switcher light platform.""" - -from unittest.mock import patch - -from aioswitcher.api import SwitcherBaseResponse -from aioswitcher.device import DeviceState -import pytest - -from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, - STATE_UNAVAILABLE, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.util import slugify - -from . import init_integration -from .consts import ( - DUMMY_DUAL_LIGHT_DEVICE as DEVICE4, - DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE as DEVICE2, - DUMMY_LIGHT_DEVICE as DEVICE3, - DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE as DEVICE, - DUMMY_TOKEN as TOKEN, - DUMMY_TRIPLE_LIGHT_DEVICE as DEVICE5, - DUMMY_USERNAME as USERNAME, -) - -ENTITY_ID = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_1" -ENTITY_ID_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_2" -ENTITY_ID2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE2.name)}" -ENTITY_ID3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE3.name)}" -ENTITY_ID4 = f"{LIGHT_DOMAIN}.{slugify(DEVICE4.name)}_light_1" -ENTITY_ID4_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE4.name)}_light_2" -ENTITY_ID5 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_1" -ENTITY_ID5_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_2" -ENTITY_ID5_3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_3" - - -@pytest.mark.parametrize( - ("device", "entity_id", "light_id", "device_state"), - [ - (DEVICE, ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), - (DEVICE, ENTITY_ID_2, 1, [DeviceState.ON, DeviceState.OFF]), - (DEVICE2, ENTITY_ID2, 0, [DeviceState.OFF]), - (DEVICE3, ENTITY_ID3, 0, [DeviceState.OFF]), - (DEVICE4, ENTITY_ID4, 0, [DeviceState.OFF, DeviceState.ON]), - (DEVICE4, ENTITY_ID4_2, 1, [DeviceState.ON, DeviceState.OFF]), - (DEVICE5, ENTITY_ID5, 0, [DeviceState.OFF, DeviceState.ON, DeviceState.ON]), - (DEVICE5, ENTITY_ID5_2, 1, [DeviceState.ON, DeviceState.OFF, DeviceState.ON]), - (DEVICE5, ENTITY_ID5_3, 2, [DeviceState.ON, DeviceState.ON, DeviceState.OFF]), - ], -) -@pytest.mark.parametrize( - "mock_bridge", [[DEVICE, DEVICE2, DEVICE3, DEVICE4, DEVICE5]], indirect=True -) -async def test_light( - hass: HomeAssistant, - mock_bridge, - mock_api, - monkeypatch: pytest.MonkeyPatch, - device, - entity_id: str, - light_id: int, - device_state: list[DeviceState], -) -> None: - """Test the light.""" - await init_integration(hass, USERNAME, TOKEN) - assert mock_bridge - - # Test initial state - light on - state = hass.states.get(entity_id) - assert state.state == STATE_ON - - # Test state change on --> off for light - monkeypatch.setattr(device, "light", device_state) - mock_bridge.mock_callbacks([device]) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - - # Test turning on light - with patch( - "homeassistant.components.switcher_kis.light.SwitcherType2Api.set_light", - ) as mock_set_light: - await hass.services.async_call( - LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - - assert mock_api.call_count == 2 - mock_set_light.assert_called_once_with(DeviceState.ON, light_id) - state = hass.states.get(entity_id) - assert state.state == STATE_ON - - # Test turning off light - with patch( - "homeassistant.components.switcher_kis.light.SwitcherType2Api.set_light" - ) as mock_set_light: - await hass.services.async_call( - LIGHT_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - - assert mock_api.call_count == 4 - mock_set_light.assert_called_once_with(DeviceState.OFF, light_id) - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - - -@pytest.mark.parametrize( - ("device", "entity_id", "light_id", "device_state"), - [ - (DEVICE, ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), - (DEVICE, ENTITY_ID_2, 1, [DeviceState.ON, DeviceState.OFF]), - (DEVICE2, ENTITY_ID2, 0, [DeviceState.OFF]), - (DEVICE3, ENTITY_ID3, 0, [DeviceState.OFF]), - (DEVICE4, ENTITY_ID4, 0, [DeviceState.OFF, DeviceState.ON]), - (DEVICE4, ENTITY_ID4_2, 1, [DeviceState.ON, DeviceState.OFF]), - (DEVICE5, ENTITY_ID5, 0, [DeviceState.OFF, DeviceState.ON, DeviceState.ON]), - (DEVICE5, ENTITY_ID5_2, 1, [DeviceState.ON, DeviceState.OFF, DeviceState.ON]), - (DEVICE5, ENTITY_ID5_3, 2, [DeviceState.ON, DeviceState.ON, DeviceState.OFF]), - ], -) -@pytest.mark.parametrize( - "mock_bridge", [[DEVICE, DEVICE2, DEVICE3, DEVICE4, DEVICE5]], indirect=True -) -async def test_light_control_fail( - hass: HomeAssistant, - mock_bridge, - mock_api, - monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, - device, - entity_id: str, - light_id: int, - device_state: list[DeviceState], -) -> None: - """Test light control fail.""" - await init_integration(hass, USERNAME, TOKEN) - assert mock_bridge - - # Test initial state - light off - monkeypatch.setattr(device, "light", device_state) - mock_bridge.mock_callbacks([device]) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - - # Test exception during turn on - with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_light", - side_effect=RuntimeError("fake error"), - ) as mock_control_device: - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert mock_api.call_count == 2 - mock_control_device.assert_called_once_with(DeviceState.ON, light_id) - state = hass.states.get(entity_id) - assert state.state == STATE_UNAVAILABLE - - # Make device available again - mock_bridge.mock_callbacks([device]) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - - # Test error response during turn on - with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_light", - return_value=SwitcherBaseResponse(None), - ) as mock_control_device: - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert mock_api.call_count == 4 - mock_control_device.assert_called_once_with(DeviceState.ON, light_id) - state = hass.states.get(entity_id) - assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/switcher_kis/test_sensor.py b/tests/components/switcher_kis/test_sensor.py index 8ccc33f2d37..1be2efed987 100644 --- a/tests/components/switcher_kis/test_sensor.py +++ b/tests/components/switcher_kis/test_sensor.py @@ -74,9 +74,7 @@ async def test_sensor_disabled( @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) -async def test_sensor_update( - hass: HomeAssistant, mock_bridge, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_sensor_update(hass: HomeAssistant, mock_bridge, monkeypatch) -> None: """Test sensor update.""" await init_integration(hass) assert mock_bridge diff --git a/tests/components/switcher_kis/test_services.py b/tests/components/switcher_kis/test_services.py index 26c54ee53ed..039daec4c97 100644 --- a/tests/components/switcher_kis/test_services.py +++ b/tests/components/switcher_kis/test_services.py @@ -30,7 +30,7 @@ from .consts import ( @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) async def test_turn_on_with_timer_service( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch ) -> None: """Test the turn on with timer service.""" await init_integration(hass) diff --git a/tests/components/switcher_kis/test_switch.py b/tests/components/switcher_kis/test_switch.py index f14a8f5b1ca..058546ac2ae 100644 --- a/tests/components/switcher_kis/test_switch.py +++ b/tests/components/switcher_kis/test_switch.py @@ -23,9 +23,7 @@ from .consts import DUMMY_PLUG_DEVICE, DUMMY_WATER_HEATER_DEVICE @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) -async def test_switch( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_switch(hass: HomeAssistant, mock_bridge, mock_api, monkeypatch) -> None: """Test the switch.""" await init_integration(hass) assert mock_bridge @@ -77,7 +75,7 @@ async def test_switch_control_fail( hass: HomeAssistant, mock_bridge, mock_api, - monkeypatch: pytest.MonkeyPatch, + monkeypatch, caplog: pytest.LogCaptureFixture, ) -> None: """Test switch control fail.""" diff --git a/tests/components/synology_dsm/conftest.py b/tests/components/synology_dsm/conftest.py index 0e8f79ffd40..2f05d0187be 100644 --- a/tests/components/synology_dsm/conftest.py +++ b/tests/components/synology_dsm/conftest.py @@ -1,9 +1,9 @@ """Configure Synology DSM tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/synology_dsm/test_config_flow.py b/tests/components/synology_dsm/test_config_flow.py index e5494b7179f..1574526a701 100644 --- a/tests/components/synology_dsm/test_config_flow.py +++ b/tests/components/synology_dsm/test_config_flow.py @@ -21,7 +21,12 @@ from homeassistant.components.synology_dsm.const import ( DEFAULT_SNAPSHOT_QUALITY, DOMAIN, ) -from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_SSDP, + SOURCE_USER, + SOURCE_ZEROCONF, +) from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -292,7 +297,24 @@ async def test_reauth(hass: HomeAssistant, service: MagicMock) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + with patch( + "homeassistant.config_entries.ConfigEntries.async_reload", + return_value=True, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + "title_placeholders": {"name": entry.title}, + }, + data={ + CONF_HOST: HOST, + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/synology_dsm/test_media_source.py b/tests/components/synology_dsm/test_media_source.py index 0c7ab6bc1cc..433a4b15c23 100644 --- a/tests/components/synology_dsm/test_media_source.py +++ b/tests/components/synology_dsm/test_media_source.py @@ -4,7 +4,6 @@ from pathlib import Path import tempfile from unittest.mock import AsyncMock, MagicMock, patch -from aiohttp import web import pytest from synology_dsm.api.photos import SynoPhotosAlbum, SynoPhotosItem from synology_dsm.exceptions import SynologyDSMException @@ -31,7 +30,7 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant -from homeassistant.util.aiohttp import MockRequest +from homeassistant.util.aiohttp import MockRequest, web from .consts import HOST, MACS, PASSWORD, PORT, USE_SSL, USERNAME @@ -48,15 +47,11 @@ def dsm_with_photos() -> MagicMock: dsm.surveillance_station.update = AsyncMock(return_value=True) dsm.upgrade.update = AsyncMock(return_value=True) - dsm.photos.get_albums = AsyncMock( - return_value=[SynoPhotosAlbum(1, "Album 1", 10, "")] - ) + dsm.photos.get_albums = AsyncMock(return_value=[SynoPhotosAlbum(1, "Album 1", 10)]) dsm.photos.get_items_from_album = AsyncMock( return_value=[ - SynoPhotosItem( - 10, "", "filename.jpg", 12345, "10_1298753", "sm", False, "" - ), - SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", True, ""), + SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", False), + SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", True), ] ) dsm.photos.get_item_thumbnail_url = AsyncMock( @@ -100,22 +95,17 @@ async def test_resolve_media_bad_identifier( [ ( "ABC012345/10/27643_876876/filename.jpg", - "/synology_dsm/ABC012345/27643_876876/filename.jpg/", + "/synology_dsm/ABC012345/27643_876876/filename.jpg", "image/jpeg", ), ( "ABC012345/12/12631_47189/filename.png", - "/synology_dsm/ABC012345/12631_47189/filename.png/", + "/synology_dsm/ABC012345/12631_47189/filename.png", "image/png", ), ( "ABC012345/12/12631_47189/filename.png_shared", - "/synology_dsm/ABC012345/12631_47189/filename.png_shared/", - "image/png", - ), - ( - "ABC012345/12_dmypass/12631_47189/filename.png", - "/synology_dsm/ABC012345/12631_47189/filename.png/dmypass", + "/synology_dsm/ABC012345/12631_47189/filename.png_shared", "image/png", ), ], @@ -259,7 +249,7 @@ async def test_browse_media_get_albums( assert result.children[0].identifier == "mocked_syno_dsm_entry/0" assert result.children[0].title == "All images" assert isinstance(result.children[1], BrowseMedia) - assert result.children[1].identifier == "mocked_syno_dsm_entry/1_" + assert result.children[1].identifier == "mocked_syno_dsm_entry/1" assert result.children[1].title == "Album 1" @@ -391,7 +381,7 @@ async def test_browse_media_get_items( assert len(result.children) == 2 item = result.children[0] assert isinstance(item, BrowseMedia) - assert item.identifier == "mocked_syno_dsm_entry/1_/10_1298753/filename.jpg" + assert item.identifier == "mocked_syno_dsm_entry/1/10_1298753/filename.jpg" assert item.title == "filename.jpg" assert item.media_class == MediaClass.IMAGE assert item.media_content_type == "image/jpeg" @@ -400,7 +390,7 @@ async def test_browse_media_get_items( assert item.thumbnail == "http://my.thumbnail.url" item = result.children[1] assert isinstance(item, BrowseMedia) - assert item.identifier == "mocked_syno_dsm_entry/1_/10_1298753/filename.jpg_shared" + assert item.identifier == "mocked_syno_dsm_entry/1/10_1298753/filename.jpg_shared" assert item.title == "filename.jpg" assert item.media_class == MediaClass.IMAGE assert item.media_content_type == "image/jpeg" @@ -444,24 +434,24 @@ async def test_media_view( assert await hass.config_entries.async_setup(entry.entry_id) with pytest.raises(web.HTTPNotFound): - await view.get(request, "", "10_1298753/filename/") + await view.get(request, "", "10_1298753/filename") # exception in download_item() dsm_with_photos.photos.download_item = AsyncMock( side_effect=SynologyDSMException("", None) ) with pytest.raises(web.HTTPNotFound): - await view.get(request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg/") + await view.get(request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg") # success dsm_with_photos.photos.download_item = AsyncMock(return_value=b"xxxx") with patch.object(tempfile, "tempdir", tmp_path): result = await view.get( - request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg/" + request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg" ) assert isinstance(result, web.Response) with patch.object(tempfile, "tempdir", tmp_path): result = await view.get( - request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg_shared/" + request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg_shared" ) assert isinstance(result, web.Response) diff --git a/tests/components/system_bridge/__init__.py b/tests/components/system_bridge/__init__.py index 0606ce8e258..edbe5469705 100644 --- a/tests/components/system_bridge/__init__.py +++ b/tests/components/system_bridge/__init__.py @@ -1,52 +1,38 @@ """Tests for the System Bridge integration.""" from collections.abc import Awaitable, Callable +from dataclasses import asdict from ipaddress import ip_address from typing import Any -from systembridgemodels.fixtures.modules.battery import FIXTURE_BATTERY -from systembridgemodels.fixtures.modules.cpu import FIXTURE_CPU -from systembridgemodels.fixtures.modules.disks import FIXTURE_DISKS -from systembridgemodels.fixtures.modules.displays import FIXTURE_DISPLAYS -from systembridgemodels.fixtures.modules.gpus import FIXTURE_GPUS -from systembridgemodels.fixtures.modules.media import FIXTURE_MEDIA -from systembridgemodels.fixtures.modules.memory import FIXTURE_MEMORY -from systembridgemodels.fixtures.modules.processes import FIXTURE_PROCESSES -from systembridgemodels.fixtures.modules.system import FIXTURE_SYSTEM -from systembridgemodels.modules import Module, ModulesData +from systembridgeconnector.const import TYPE_DATA_UPDATE +from systembridgemodels.const import MODEL_SYSTEM +from systembridgemodels.modules import System +from systembridgemodels.response import Response from homeassistant.components import zeroconf from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN -from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry +FIXTURE_MAC_ADDRESS = "aa:bb:cc:dd:ee:ff" +FIXTURE_UUID = "e91bf575-56f3-4c83-8f42-70ac17adcd33" -FIXTURE_TITLE = "TestSystem" - -FIXTURE_REQUEST_ID = "test" - -FIXTURE_MAC_ADDRESS = FIXTURE_SYSTEM.mac_address -FIXTURE_UUID = FIXTURE_SYSTEM.uuid - -FIXTURE_AUTH_INPUT = { - CONF_TOKEN: "abc-123-def-456-ghi", -} +FIXTURE_AUTH_INPUT = {CONF_TOKEN: "abc-123-def-456-ghi"} FIXTURE_USER_INPUT = { CONF_TOKEN: "abc-123-def-456-ghi", - CONF_HOST: "127.0.0.1", + CONF_HOST: "test-bridge", CONF_PORT: "9170", } FIXTURE_ZEROCONF_INPUT = { CONF_TOKEN: "abc-123-def-456-ghi", - CONF_HOST: FIXTURE_USER_INPUT[CONF_HOST], + CONF_HOST: "1.1.1.1", CONF_PORT: "9170", } FIXTURE_ZEROCONF = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address(FIXTURE_USER_INPUT[CONF_HOST]), - ip_addresses=[ip_address(FIXTURE_USER_INPUT[CONF_HOST])], + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1")], port=9170, hostname="test-bridge.local.", type="_system-bridge._tcp.local.", @@ -55,7 +41,7 @@ FIXTURE_ZEROCONF = zeroconf.ZeroconfServiceInfo( "address": "http://test-bridge:9170", "fqdn": "test-bridge", "host": "test-bridge", - "ip": FIXTURE_USER_INPUT[CONF_HOST], + "ip": "1.1.1.1", "mac": FIXTURE_MAC_ADDRESS, "port": "9170", "uuid": FIXTURE_UUID, @@ -63,8 +49,8 @@ FIXTURE_ZEROCONF = zeroconf.ZeroconfServiceInfo( ) FIXTURE_ZEROCONF_BAD = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address(FIXTURE_USER_INPUT[CONF_HOST]), - ip_addresses=[ip_address(FIXTURE_USER_INPUT[CONF_HOST])], + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1")], port=9170, hostname="test-bridge.local.", type="_system-bridge._tcp.local.", @@ -74,37 +60,57 @@ FIXTURE_ZEROCONF_BAD = zeroconf.ZeroconfServiceInfo( }, ) -FIXTURE_DATA_RESPONSE = ModulesData( - system=FIXTURE_SYSTEM, + +FIXTURE_SYSTEM = System( + boot_time=1, + fqdn="", + hostname="1.1.1.1", + ip_address_4="1.1.1.1", + mac_address=FIXTURE_MAC_ADDRESS, + platform="", + platform_version="", + uptime=1, + uuid=FIXTURE_UUID, + version="", + version_latest="", + version_newer_available=False, + users=[], +) + +FIXTURE_DATA_RESPONSE = Response( + id="1234", + type=TYPE_DATA_UPDATE, + subtype=None, + message="Data received", + module=MODEL_SYSTEM, + data=asdict(FIXTURE_SYSTEM), +) + +FIXTURE_DATA_RESPONSE_BAD = Response( + id="1234", + type=TYPE_DATA_UPDATE, + subtype=None, + message="Data received", + module=MODEL_SYSTEM, + data={}, +) + +FIXTURE_DATA_RESPONSE_BAD = Response( + id="1234", + type=TYPE_DATA_UPDATE, + subtype=None, + message="Data received", + module=MODEL_SYSTEM, + data={}, ) -async def setup_integration( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> bool: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - setup_result = await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return setup_result - - async def mock_data_listener( + self, callback: Callable[[str, Any], Awaitable[None]] | None = None, _: bool = False, ): """Mock websocket data listener.""" if callback is not None: # Simulate data received from the websocket - await callback(Module.BATTERY, FIXTURE_BATTERY) - await callback(Module.CPU, FIXTURE_CPU) - await callback(Module.DISKS, FIXTURE_DISKS) - await callback(Module.DISPLAYS, FIXTURE_DISPLAYS) - await callback(Module.GPUS, FIXTURE_GPUS) - await callback(Module.MEDIA, FIXTURE_MEDIA) - await callback(Module.MEMORY, FIXTURE_MEMORY) - await callback(Module.PROCESSES, FIXTURE_PROCESSES) - await callback(Module.SYSTEM, FIXTURE_SYSTEM) + await callback(MODEL_SYSTEM, FIXTURE_SYSTEM) diff --git a/tests/components/system_bridge/conftest.py b/tests/components/system_bridge/conftest.py deleted file mode 100644 index 2f1f87485e7..00000000000 --- a/tests/components/system_bridge/conftest.py +++ /dev/null @@ -1,195 +0,0 @@ -"""Fixtures for System Bridge integration tests.""" - -from __future__ import annotations - -from collections.abc import Generator -from typing import Final -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from systembridgeconnector.const import EventKey, EventType -from systembridgemodels.fixtures.modules.battery import FIXTURE_BATTERY -from systembridgemodels.fixtures.modules.cpu import FIXTURE_CPU -from systembridgemodels.fixtures.modules.disks import FIXTURE_DISKS -from systembridgemodels.fixtures.modules.displays import FIXTURE_DISPLAYS -from systembridgemodels.fixtures.modules.gpus import FIXTURE_GPUS -from systembridgemodels.fixtures.modules.media import FIXTURE_MEDIA -from systembridgemodels.fixtures.modules.memory import FIXTURE_MEMORY -from systembridgemodels.fixtures.modules.networks import FIXTURE_NETWORKS -from systembridgemodels.fixtures.modules.processes import FIXTURE_PROCESSES -from systembridgemodels.fixtures.modules.sensors import FIXTURE_SENSORS -from systembridgemodels.fixtures.modules.system import FIXTURE_SYSTEM -from systembridgemodels.media_directories import MediaDirectory -from systembridgemodels.media_files import MediaFile, MediaFiles -from systembridgemodels.modules import Module, ModulesData, RegisterDataListener -from systembridgemodels.response import Response - -from homeassistant.components.system_bridge.config_flow import SystemBridgeConfigFlow -from homeassistant.components.system_bridge.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN -from homeassistant.core import HomeAssistant - -from . import ( - FIXTURE_REQUEST_ID, - FIXTURE_TITLE, - FIXTURE_USER_INPUT, - FIXTURE_UUID, - mock_data_listener, - setup_integration, -) - -from tests.common import MockConfigEntry - -REGISTER_MODULES: Final[list[Module]] = [ - Module.SYSTEM, -] - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock ConfigEntry.""" - return MockConfigEntry( - title=FIXTURE_TITLE, - domain=DOMAIN, - unique_id=FIXTURE_UUID, - version=SystemBridgeConfigFlow.VERSION, - minor_version=SystemBridgeConfigFlow.MINOR_VERSION, - data={ - CONF_HOST: FIXTURE_USER_INPUT[CONF_HOST], - CONF_PORT: FIXTURE_USER_INPUT[CONF_PORT], - CONF_TOKEN: FIXTURE_USER_INPUT[CONF_TOKEN], - }, - ) - - -@pytest.fixture(autouse=True) -def mock_setup_notify_platform() -> Generator[AsyncMock]: - """Mock notify platform setup.""" - with patch( - "homeassistant.helpers.discovery.async_load_platform", - ) as mock_setup_notify_platform: - yield mock_setup_notify_platform - - -@pytest.fixture -def mock_version() -> Generator[AsyncMock]: - """Return a mocked Version class.""" - with patch( - "homeassistant.components.system_bridge.Version", - autospec=True, - ) as mock_version: - version = mock_version.return_value - version.check_supported.return_value = True - - yield version - - -@pytest.fixture -def mock_websocket_client( - register_data_listener_model: RegisterDataListener = RegisterDataListener( - modules=REGISTER_MODULES, - ), -) -> Generator[MagicMock]: - """Return a mocked WebSocketClient client.""" - - with ( - patch( - "homeassistant.components.system_bridge.coordinator.WebSocketClient", - autospec=True, - ) as mock_websocket_client, - patch( - "homeassistant.components.system_bridge.config_flow.WebSocketClient", - new=mock_websocket_client, - ), - ): - websocket_client = mock_websocket_client.return_value - websocket_client.connected = False - websocket_client.get_data.return_value = ModulesData( - battery=FIXTURE_BATTERY, - cpu=FIXTURE_CPU, - disks=FIXTURE_DISKS, - displays=FIXTURE_DISPLAYS, - gpus=FIXTURE_GPUS, - media=FIXTURE_MEDIA, - memory=FIXTURE_MEMORY, - networks=FIXTURE_NETWORKS, - processes=FIXTURE_PROCESSES, - sensors=FIXTURE_SENSORS, - system=FIXTURE_SYSTEM, - ) - websocket_client.register_data_listener.return_value = Response( - id=FIXTURE_REQUEST_ID, - type=EventType.DATA_LISTENER_REGISTERED, - message="Data listener registered", - data={EventKey.MODULES: register_data_listener_model.modules}, - ) - # Trigger callback when listener is registered - websocket_client.listen.side_effect = mock_data_listener - - websocket_client.get_directories.return_value = [ - MediaDirectory( - key="documents", - path="/home/user/documents", - ) - ] - websocket_client.get_files.return_value = MediaFiles( - files=[ - MediaFile( - name="testsubdirectory", - path="testsubdirectory", - fullpath="/home/user/documents/testsubdirectory", - size=100, - last_accessed=1630000000, - created=1630000000, - modified=1630000000, - is_directory=True, - is_file=False, - is_link=False, - ), - MediaFile( - name="testfile.txt", - path="testfile.txt", - fullpath="/home/user/documents/testfile.txt", - size=100, - last_accessed=1630000000, - created=1630000000, - modified=1630000000, - is_directory=False, - is_file=True, - is_link=False, - mime_type="text/plain", - ), - MediaFile( - name="testfile.jpg", - path="testfile.jpg", - fullpath="/home/user/documents/testimage.jpg", - size=100, - last_accessed=1630000000, - created=1630000000, - modified=1630000000, - is_directory=False, - is_file=True, - is_link=False, - mime_type="image/jpeg", - ), - ], - path="", - ) - - yield websocket_client - - -@pytest.fixture -async def init_integration( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_version: MagicMock, - mock_websocket_client: MagicMock, -) -> MockConfigEntry: - """Initialize the System Bridge integration.""" - assert await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.LOADED - - return mock_config_entry diff --git a/tests/components/system_bridge/snapshots/test_media_source.ambr b/tests/components/system_bridge/snapshots/test_media_source.ambr deleted file mode 100644 index 53e0e8416e9..00000000000 --- a/tests/components/system_bridge/snapshots/test_media_source.ambr +++ /dev/null @@ -1,61 +0,0 @@ -# serializer version: 1 -# name: test_directory[system_bridge_media_source_directory] - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_type': '', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'TestSystem - documents', - }) -# --- -# name: test_entry[system_bridge_media_source_entry] - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_type': '', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'TestSystem', - }) -# --- -# name: test_file[system_bridge_media_source_file_image] - dict({ - 'mime_type': 'image/jpeg', - 'url': 'http://127.0.0.1:9170/api/media/file/data?token=abc-123-def-456-ghi&base=documents&path=testimage.jpg', - }) -# --- -# name: test_file[system_bridge_media_source_file_text] - dict({ - 'mime_type': 'text/plain', - 'url': 'http://127.0.0.1:9170/api/media/file/data?token=abc-123-def-456-ghi&base=documents&path=testfile.txt', - }) -# --- -# name: test_root[system_bridge_media_source_root] - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_type': '', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'System Bridge', - }) -# --- -# name: test_subdirectory[system_bridge_media_source_subdirectory] - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_type': '', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'TestSystem - documents/testsubdirectory', - }) -# --- diff --git a/tests/components/system_bridge/test_config_flow.py b/tests/components/system_bridge/test_config_flow.py index ada44de2d12..16a6f5d0f56 100644 --- a/tests/components/system_bridge/test_config_flow.py +++ b/tests/components/system_bridge/test_config_flow.py @@ -69,7 +69,7 @@ async def test_user_flow(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "127.0.0.1" + assert result2["title"] == "test-bridge" assert result2["data"] == FIXTURE_USER_INPUT assert len(mock_setup_entry.mock_calls) == 1 @@ -259,12 +259,9 @@ async def test_form_unknown_error(hass: HomeAssistant) -> None: async def test_reauth_authorization_error(hass: HomeAssistant) -> None: """Test we show user form on authorization error.""" - mock_config = MockConfigEntry( - domain=DOMAIN, unique_id=FIXTURE_UUID, data=FIXTURE_USER_INPUT + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT ) - mock_config.add_to_hass(hass) - - result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" @@ -294,12 +291,9 @@ async def test_reauth_authorization_error(hass: HomeAssistant) -> None: async def test_reauth_connection_error(hass: HomeAssistant) -> None: """Test we show user form on connection error.""" - mock_config = MockConfigEntry( - domain=DOMAIN, unique_id=FIXTURE_UUID, data=FIXTURE_USER_INPUT + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT ) - mock_config.add_to_hass(hass) - - result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" @@ -342,12 +336,9 @@ async def test_reauth_connection_error(hass: HomeAssistant) -> None: async def test_reauth_connection_closed_error(hass: HomeAssistant) -> None: """Test we show user form on connection error.""" - mock_config = MockConfigEntry( - domain=DOMAIN, unique_id=FIXTURE_UUID, data=FIXTURE_USER_INPUT + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT ) - mock_config.add_to_hass(hass) - - result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" @@ -382,7 +373,9 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" @@ -448,7 +441,7 @@ async def test_zeroconf_flow(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "127.0.0.1" + assert result2["title"] == "1.1.1.1" assert result2["data"] == FIXTURE_ZEROCONF_INPUT assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/system_bridge/test_media_source.py b/tests/components/system_bridge/test_media_source.py deleted file mode 100644 index 58ee4ebe05c..00000000000 --- a/tests/components/system_bridge/test_media_source.py +++ /dev/null @@ -1,148 +0,0 @@ -"""Test the System Bridge integration.""" - -import pytest -from syrupy.assertion import SnapshotAssertion -from syrupy.filters import paths - -from homeassistant.components.media_player import BrowseError -from homeassistant.components.media_source import ( - DOMAIN as MEDIA_SOURCE_DOMAIN, - URI_SCHEME, - async_browse_media, - async_resolve_media, -) -from homeassistant.components.system_bridge.const import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry - - -@pytest.fixture(autouse=True) -async def setup_component(hass: HomeAssistant) -> None: - """Set up component.""" - assert await async_setup_component( - hass, - MEDIA_SOURCE_DOMAIN, - {}, - ) - - -async def test_root( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - init_integration: MockConfigEntry, -) -> None: - """Test root media browsing.""" - browse_media_root = await async_browse_media( - hass, - f"{URI_SCHEME}{DOMAIN}", - ) - - assert browse_media_root.as_dict() == snapshot( - name=f"{DOMAIN}_media_source_root", - exclude=paths("children", "media_content_id"), - ) - - -async def test_entry( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - init_integration: MockConfigEntry, -) -> None: - """Test browsing entry.""" - browse_media_entry = await async_browse_media( - hass, - f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}", - ) - - assert browse_media_entry.as_dict() == snapshot( - name=f"{DOMAIN}_media_source_entry", - exclude=paths("children", "media_content_id"), - ) - - -async def test_directory( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - init_integration: MockConfigEntry, -) -> None: - """Test browsing directory.""" - browse_media_directory = await async_browse_media( - hass, - f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents", - ) - - assert browse_media_directory.as_dict() == snapshot( - name=f"{DOMAIN}_media_source_directory", - exclude=paths("children", "media_content_id"), - ) - - -async def test_subdirectory( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - init_integration: MockConfigEntry, -) -> None: - """Test browsing directory.""" - browse_media_directory = await async_browse_media( - hass, - f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents/testsubdirectory", - ) - - assert browse_media_directory.as_dict() == snapshot( - name=f"{DOMAIN}_media_source_subdirectory", - exclude=paths("children", "media_content_id"), - ) - - -async def test_file( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - init_integration: MockConfigEntry, -) -> None: - """Test browsing file.""" - resolve_media_file = await async_resolve_media( - hass, - f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents/testfile.txt~~text/plain", - None, - ) - - assert resolve_media_file == snapshot( - name=f"{DOMAIN}_media_source_file_text", - ) - - resolve_media_file = await async_resolve_media( - hass, - f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents/testimage.jpg~~image/jpeg", - None, - ) - - assert resolve_media_file == snapshot( - name=f"{DOMAIN}_media_source_file_image", - ) - - -async def test_bad_entry( - hass: HomeAssistant, - init_integration: MockConfigEntry, -) -> None: - """Test invalid entry raises BrowseError.""" - with pytest.raises(BrowseError): - await async_browse_media( - hass, - f"{URI_SCHEME}{DOMAIN}/badentryid", - ) - - with pytest.raises(BrowseError): - await async_browse_media( - hass, - f"{URI_SCHEME}{DOMAIN}/badentryid~~baddirectory", - ) - - with pytest.raises(ValueError): - await async_resolve_media( - hass, - f"{URI_SCHEME}{DOMAIN}/badentryid~~baddirectory/badfile.txt~~text/plain", - None, - ) diff --git a/tests/components/system_health/test_init.py b/tests/components/system_health/test_init.py index 2237edc9647..e51ab8fab99 100644 --- a/tests/components/system_health/test_init.py +++ b/tests/components/system_health/test_init.py @@ -1,6 +1,5 @@ """Tests for the system health component init.""" -from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiohttp.client_exceptions import ClientError @@ -15,9 +14,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator -async def gather_system_health_info( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> dict[str, Any]: +async def gather_system_health_info(hass, hass_ws_client): """Gather all info.""" client = await hass_ws_client(hass) @@ -75,7 +72,7 @@ async def test_info_endpoint_register_callback( ) -> None: """Test that the info endpoint allows registering callbacks.""" - async def mock_info(hass: HomeAssistant) -> dict[str, Any]: + async def mock_info(hass): return {"storage": "YAML"} async_register_info(hass, "lovelace", mock_info) @@ -95,7 +92,7 @@ async def test_info_endpoint_register_callback_timeout( ) -> None: """Test that the info endpoint timing out.""" - async def mock_info(hass: HomeAssistant) -> dict[str, Any]: + async def mock_info(hass): raise TimeoutError async_register_info(hass, "lovelace", mock_info) @@ -112,8 +109,8 @@ async def test_info_endpoint_register_callback_exc( ) -> None: """Test that the info endpoint requires auth.""" - async def mock_info(hass: HomeAssistant) -> dict[str, Any]: - raise Exception("TEST ERROR") # noqa: TRY002 + async def mock_info(hass): + raise Exception("TEST ERROR") # pylint: disable=broad-exception-raised async_register_info(hass, "lovelace", mock_info) assert await async_setup_component(hass, "system_health", {}) diff --git a/tests/components/system_log/test_init.py b/tests/components/system_log/test_init.py index a81a92681f2..918d995fab9 100644 --- a/tests/components/system_log/test_init.py +++ b/tests/components/system_log/test_init.py @@ -10,10 +10,10 @@ import traceback from typing import Any from unittest.mock import MagicMock, patch +from homeassistant.bootstrap import async_setup_component from homeassistant.components import system_log from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.typing import ConfigType -from homeassistant.setup import async_setup_component from tests.common import async_capture_events from tests.typing import WebSocketGenerator @@ -36,7 +36,7 @@ async def get_error_log(hass_ws_client): def _generate_and_log_exception(exception, log): try: - raise Exception(exception) # noqa: TRY002, TRY301 + raise Exception(exception) # pylint: disable=broad-exception-raised except Exception: _LOGGER.exception(log) @@ -371,9 +371,7 @@ def get_frame(path: str, previous_frame: MagicMock | None) -> MagicMock: ) -async def async_log_error_from_test_path( - hass: HomeAssistant, path: str, watcher: WatchLogErrorHandler -) -> None: +async def async_log_error_from_test_path(hass, path, watcher): """Log error while mocking the path.""" call_path = "internal_path.py" main_frame = get_frame("main_path/main.py", None) @@ -463,7 +461,7 @@ async def test__figure_out_source(hass: HomeAssistant) -> None: in a test because the test is not a component. """ try: - raise ValueError("test") # noqa: TRY301 + raise ValueError("test") except ValueError as ex: exc_info = (type(ex), ex, ex.__traceback__) mock_record = MagicMock( @@ -488,7 +486,7 @@ async def test__figure_out_source(hass: HomeAssistant) -> None: async def test_formatting_exception(hass: HomeAssistant) -> None: """Test that exceptions are formatted correctly.""" try: - raise ValueError("test") # noqa: TRY301 + raise ValueError("test") except ValueError as ex: exc_info = (type(ex), ex, ex.__traceback__) mock_record = MagicMock( diff --git a/tests/components/systemmonitor/conftest.py b/tests/components/systemmonitor/conftest.py index 5f0a7a5c76d..e16debdf263 100644 --- a/tests/components/systemmonitor/conftest.py +++ b/tests/components/systemmonitor/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations -from collections.abc import Generator import socket from unittest.mock import AsyncMock, Mock, NonCallableMock, patch from psutil import NoSuchProcess, Process from psutil._common import sdiskpart, sdiskusage, shwtemp, snetio, snicaddr, sswap import pytest +from typing_extensions import Generator from homeassistant.components.systemmonitor.const import DOMAIN from homeassistant.components.systemmonitor.coordinator import VirtualMemory @@ -174,11 +174,11 @@ def mock_psutil(mock_process: list[MockProcess]) -> Generator: "cpu0-thermal": [shwtemp("cpu0-thermal", 50.0, 60.0, 70.0)] } mock_psutil.disk_partitions.return_value = [ - sdiskpart("test", "/", "ext4", ""), - sdiskpart("test2", "/media/share", "ext4", ""), - sdiskpart("test3", "/incorrect", "", ""), - sdiskpart("hosts", "/etc/hosts", "bind", ""), - sdiskpart("proc", "/proc/run", "proc", ""), + sdiskpart("test", "/", "ext4", "", 1, 1), + sdiskpart("test2", "/media/share", "ext4", "", 1, 1), + sdiskpart("test3", "/incorrect", "", "", 1, 1), + sdiskpart("hosts", "/etc/hosts", "bind", "", 1, 1), + sdiskpart("proc", "/proc/run", "proc", "", 1, 1), ] mock_psutil.boot_time.return_value = 1708786800.0 mock_psutil.NoSuchProcess = NoSuchProcess diff --git a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr index 75d942fc601..b50e051c816 100644 --- a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr +++ b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr @@ -34,65 +34,8 @@ 'data': dict({ }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'systemmonitor', - 'minor_version': 3, - 'options': dict({ - 'binary_sensor': dict({ - 'process': list([ - 'python3', - 'pip', - ]), - }), - 'resources': list([ - 'disk_use_percent_/', - 'disk_use_percent_/home/notexist/', - 'memory_free_', - 'network_out_eth0', - 'process_python3', - ]), - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'System Monitor', - 'unique_id': None, - 'version': 1, - }), - }) -# --- -# name: test_diagnostics_missing_items[test_diagnostics_missing_items] - dict({ - 'coordinators': dict({ - 'data': dict({ - 'addresses': None, - 'boot_time': '2024-02-24 15:00:00+00:00', - 'cpu_percent': '10.0', - 'disk_usage': dict({ - '/': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)', - '/home/notexist/': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)', - '/media/share': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)', - }), - 'io_counters': None, - 'load': '(1, 2, 3)', - 'memory': 'VirtualMemory(total=104857600, available=41943040, percent=40.0, used=62914560, free=31457280)', - 'processes': "[tests.components.systemmonitor.conftest.MockProcess(pid=1, name='python3', status='sleeping', started='2024-02-23 15:00:00'), tests.components.systemmonitor.conftest.MockProcess(pid=1, name='pip', status='sleeping', started='2024-02-23 15:00:00')]", - 'swap': 'sswap(total=104857600, used=62914560, free=41943040, percent=60.0, sin=1, sout=1)', - 'temperatures': dict({ - 'cpu0-thermal': "[shwtemp(label='cpu0-thermal', current=50.0, high=60.0, critical=70.0)]", - }), - }), - 'last_update_success': True, - }), - 'entry': dict({ - 'data': dict({ - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'systemmonitor', - 'minor_version': 3, + 'minor_version': 2, 'options': dict({ 'binary_sensor': dict({ 'process': list([ diff --git a/tests/components/systemmonitor/snapshots/test_repairs.ambr b/tests/components/systemmonitor/snapshots/test_repairs.ambr new file mode 100644 index 00000000000..dc659918b5f --- /dev/null +++ b/tests/components/systemmonitor/snapshots/test_repairs.ambr @@ -0,0 +1,73 @@ +# serializer version: 1 +# name: test_migrate_process_sensor[after_migration] + list([ + ConfigEntrySnapshot({ + 'data': dict({ + }), + 'disabled_by': None, + 'domain': 'systemmonitor', + 'entry_id': , + 'minor_version': 2, + 'options': dict({ + 'binary_sensor': dict({ + 'process': list([ + 'python3', + 'pip', + ]), + }), + 'resources': list([ + 'disk_use_percent_/', + 'disk_use_percent_/home/notexist/', + 'memory_free_', + 'network_out_eth0', + 'process_python3', + ]), + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'System Monitor', + 'unique_id': None, + 'version': 1, + }), + ]) +# --- +# name: test_migrate_process_sensor[before_migration] + list([ + ConfigEntrySnapshot({ + 'data': dict({ + }), + 'disabled_by': None, + 'domain': 'systemmonitor', + 'entry_id': , + 'minor_version': 2, + 'options': dict({ + 'binary_sensor': dict({ + 'process': list([ + 'python3', + 'pip', + ]), + }), + 'resources': list([ + 'disk_use_percent_/', + 'disk_use_percent_/home/notexist/', + 'memory_free_', + 'network_out_eth0', + 'process_python3', + ]), + 'sensor': dict({ + 'process': list([ + 'python3', + 'pip', + ]), + }), + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'System Monitor', + 'unique_id': None, + 'version': 1, + }), + ]) +# --- diff --git a/tests/components/systemmonitor/snapshots/test_sensor.ambr b/tests/components/systemmonitor/snapshots/test_sensor.ambr index 1ee9067a528..3fe9ae7e809 100644 --- a/tests/components/systemmonitor/snapshots/test_sensor.ambr +++ b/tests/components/systemmonitor/snapshots/test_sensor.ambr @@ -300,6 +300,24 @@ # name: test_sensor[System Monitor Packets out eth1 - state] '150' # --- +# name: test_sensor[System Monitor Process pip - attributes] + ReadOnlyDict({ + 'friendly_name': 'System Monitor Process pip', + 'icon': 'mdi:cpu-64-bit', + }) +# --- +# name: test_sensor[System Monitor Process pip - state] + 'on' +# --- +# name: test_sensor[System Monitor Process python3 - attributes] + ReadOnlyDict({ + 'friendly_name': 'System Monitor Process python3', + 'icon': 'mdi:cpu-64-bit', + }) +# --- +# name: test_sensor[System Monitor Process python3 - state] + 'on' +# --- # name: test_sensor[System Monitor Processor temperature - attributes] ReadOnlyDict({ 'device_class': 'temperature', diff --git a/tests/components/systemmonitor/test_diagnostics.py b/tests/components/systemmonitor/test_diagnostics.py index 26e421e6574..78128aad5f4 100644 --- a/tests/components/systemmonitor/test_diagnostics.py +++ b/tests/components/systemmonitor/test_diagnostics.py @@ -2,7 +2,6 @@ from unittest.mock import Mock -from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion from syrupy.filters import props @@ -24,27 +23,4 @@ async def test_diagnostics( """Test diagnostics.""" assert await get_diagnostics_for_config_entry( hass, hass_client, mock_added_config_entry - ) == snapshot(exclude=props("last_update", "entry_id", "created_at", "modified_at")) - - -async def test_diagnostics_missing_items( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_psutil: Mock, - mock_os: Mock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - freezer: FrozenDateTimeFactory, -) -> None: - """Test diagnostics.""" - mock_psutil.net_if_addrs.return_value = None - mock_psutil.net_io_counters.return_value = None - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - assert await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) == snapshot( - exclude=props("last_update", "entry_id", "created_at", "modified_at"), - name="test_diagnostics_missing_items", - ) + ) == snapshot(exclude=props("last_update", "entry_id")) diff --git a/tests/components/systemmonitor/test_init.py b/tests/components/systemmonitor/test_init.py index 6c1e4e6316c..97f4a41b96c 100644 --- a/tests/components/systemmonitor/test_init.py +++ b/tests/components/systemmonitor/test_init.py @@ -95,49 +95,9 @@ async def test_migrate_process_sensor_to_binary_sensors( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() + process_sensor = hass.states.get("sensor.system_monitor_process_python3") + assert process_sensor is not None + assert process_sensor.state == STATE_ON process_sensor = hass.states.get("binary_sensor.system_monitor_process_python3") assert process_sensor is not None assert process_sensor.state == STATE_ON - - assert mock_config_entry.minor_version == 3 - assert mock_config_entry.options == { - "binary_sensor": {"process": ["python3", "pip"]}, - "resources": [ - "disk_use_percent_/", - "disk_use_percent_/home/notexist/", - "memory_free_", - "network_out_eth0", - "process_python3", - ], - } - - -async def test_migration_from_future_version( - hass: HomeAssistant, - mock_psutil: Mock, - mock_os: Mock, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test migration from future version.""" - mock_config_entry = MockConfigEntry( - title="System Monitor", - domain=DOMAIN, - version=2, - data={}, - options={ - "sensor": {"process": ["python3", "pip"]}, - "resources": [ - "disk_use_percent_/", - "disk_use_percent_/home/notexist/", - "memory_free_", - "network_out_eth0", - "process_python3", - ], - }, - ) - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/systemmonitor/test_repairs.py b/tests/components/systemmonitor/test_repairs.py new file mode 100644 index 00000000000..6c1ff9dfd16 --- /dev/null +++ b/tests/components/systemmonitor/test_repairs.py @@ -0,0 +1,199 @@ +"""Test repairs for System Monitor.""" + +from __future__ import annotations + +from http import HTTPStatus +from unittest.mock import Mock + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.components.systemmonitor.const import DOMAIN +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component + +from tests.common import ANY, MockConfigEntry +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_migrate_process_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_psutil: Mock, + mock_os: Mock, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test migrating process sensor to binary sensor.""" + mock_config_entry = MockConfigEntry( + title="System Monitor", + domain=DOMAIN, + data={}, + options={ + "binary_sensor": {"process": ["python3", "pip"]}, + "sensor": {"process": ["python3", "pip"]}, + "resources": [ + "disk_use_percent_/", + "disk_use_percent_/home/notexist/", + "memory_free_", + "network_out_eth0", + "process_python3", + ], + }, + ) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert hass.config_entries.async_entries(DOMAIN) == snapshot( + name="before_migration" + ) + + assert await async_setup_component(hass, "repairs", {}) + await hass.async_block_till_done() + + entity = "sensor.system_monitor_process_python3" + state = hass.states.get(entity) + assert state + + assert entity_registry.async_get(entity) + + ws_client = await hass_ws_client(hass) + client = await hass_client() + + await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + + assert msg["success"] + assert len(msg["result"]["issues"]) > 0 + issue = None + for i in msg["result"]["issues"]: + if i["issue_id"] == "process_sensor": + issue = i + assert issue is not None + + url = RepairsFlowIndexView.url + resp = await client.post( + url, json={"handler": DOMAIN, "issue_id": "process_sensor"} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "migrate_process_sensor" + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url, json={}) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + # Cannot use identity `is` check here as the value is parsed from JSON + assert data["type"] == FlowResultType.CREATE_ENTRY.value + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.system_monitor_process_python3") + assert state + + await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + + assert msg["success"] + issue = None + for i in msg["result"]["issues"]: + if i["issue_id"] == "migrate_process_sensor": + issue = i + assert not issue + + entity = "sensor.system_monitor_process_python3" + state = hass.states.get(entity) + assert not state + + assert not entity_registry.async_get(entity) + + assert hass.config_entries.async_entries(DOMAIN) == snapshot(name="after_migration") + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_other_fixable_issues( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, + mock_added_config_entry: ConfigEntry, +) -> None: + """Test fixing other issues.""" + assert await async_setup_component(hass, "repairs", {}) + await hass.async_block_till_done() + + ws_client = await hass_ws_client(hass) + client = await hass_client() + + await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + + assert msg["success"] + + issue = { + "breaks_in_ha_version": "2022.9.0dev0", + "domain": DOMAIN, + "issue_id": "issue_1", + "is_fixable": True, + "learn_more_url": "", + "severity": "error", + "translation_key": "issue_1", + } + ir.async_create_issue( + hass, + issue["domain"], + issue["issue_id"], + breaks_in_ha_version=issue["breaks_in_ha_version"], + is_fixable=issue["is_fixable"], + is_persistent=False, + learn_more_url=None, + severity=issue["severity"], + translation_key=issue["translation_key"], + ) + + await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + + assert msg["success"] + results = msg["result"]["issues"] + assert { + "breaks_in_ha_version": "2022.9.0dev0", + "created": ANY, + "dismissed_version": None, + "domain": DOMAIN, + "is_fixable": True, + "issue_domain": None, + "issue_id": "issue_1", + "learn_more_url": None, + "severity": "error", + "translation_key": "issue_1", + "translation_placeholders": None, + "ignored": False, + } in results + + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "issue_1"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "confirm" + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + # Cannot use identity `is` check here as the value is parsed from JSON + assert data["type"] == FlowResultType.CREATE_ENTRY.value + await hass.async_block_till_done() diff --git a/tests/components/systemmonitor/test_sensor.py b/tests/components/systemmonitor/test_sensor.py index 6d22c5354a4..ce15083da67 100644 --- a/tests/components/systemmonitor/test_sensor.py +++ b/tests/components/systemmonitor/test_sensor.py @@ -14,10 +14,12 @@ from homeassistant.components.systemmonitor.const import DOMAIN from homeassistant.components.systemmonitor.coordinator import VirtualMemory from homeassistant.components.systemmonitor.sensor import get_cpu_icon from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from .conftest import MockProcess + from tests.common import MockConfigEntry, async_fire_time_changed @@ -36,6 +38,7 @@ async def test_sensor( data={}, options={ "binary_sensor": {"process": ["python3", "pip"]}, + "sensor": {"process": ["python3", "pip"]}, "resources": [ "disk_use_percent_/", "disk_use_percent_/home/notexist/", @@ -59,6 +62,10 @@ async def test_sensor( "friendly_name": "System Monitor Memory free", } + process_sensor = hass.states.get("sensor.system_monitor_process_python3") + assert process_sensor is not None + assert process_sensor.state == STATE_ON + for entity in er.async_entries_for_config_entry( entity_registry, mock_config_entry.entry_id ): @@ -147,6 +154,7 @@ async def test_sensor_updating( data={}, options={ "binary_sensor": {"process": ["python3", "pip"]}, + "sensor": {"process": ["python3", "pip"]}, "resources": [ "disk_use_percent_/", "disk_use_percent_/home/notexist/", @@ -164,6 +172,10 @@ async def test_sensor_updating( assert memory_sensor is not None assert memory_sensor.state == "40.0" + process_sensor = hass.states.get("sensor.system_monitor_process_python3") + assert process_sensor is not None + assert process_sensor.state == STATE_ON + mock_psutil.virtual_memory.side_effect = Exception("Failed to update") freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) @@ -190,6 +202,53 @@ async def test_sensor_updating( assert memory_sensor.state == "25.0" +async def test_sensor_process_fails( + hass: HomeAssistant, + mock_psutil: Mock, + mock_os: Mock, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test process not exist failure.""" + mock_config_entry = MockConfigEntry( + title="System Monitor", + domain=DOMAIN, + data={}, + options={ + "binary_sensor": {"process": ["python3", "pip"]}, + "sensor": {"process": ["python3", "pip"]}, + "resources": [ + "disk_use_percent_/", + "disk_use_percent_/home/notexist/", + "memory_free_", + "network_out_eth0", + "process_python3", + ], + }, + ) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + process_sensor = hass.states.get("sensor.system_monitor_process_python3") + assert process_sensor is not None + assert process_sensor.state == STATE_ON + + _process = MockProcess("python3", True) + + mock_psutil.process_iter.return_value = [_process] + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + process_sensor = hass.states.get("sensor.system_monitor_process_python3") + assert process_sensor is not None + assert process_sensor.state == STATE_OFF + + assert "Failed to load process with ID: 1, old name: python3" in caplog.text + + @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_network_sensors( freezer: FrozenDateTimeFactory, diff --git a/tests/components/systemmonitor/test_util.py b/tests/components/systemmonitor/test_util.py index 582707f3574..b35c7b2e96c 100644 --- a/tests/components/systemmonitor/test_util.py +++ b/tests/components/systemmonitor/test_util.py @@ -50,19 +50,21 @@ async def test_disk_util( """Test the disk failures.""" mock_psutil.psutil.disk_partitions.return_value = [ - sdiskpart("test", "/", "ext4", ""), # Should be ok - sdiskpart("test2", "/media/share", "ext4", ""), # Should be ok - sdiskpart("test3", "/incorrect", "", ""), # Should be skipped as no type + sdiskpart("test", "/", "ext4", "", 1, 1), # Should be ok + sdiskpart("test2", "/media/share", "ext4", "", 1, 1), # Should be ok + sdiskpart("test3", "/incorrect", "", "", 1, 1), # Should be skipped as no type sdiskpart( - "proc", "/proc/run", "proc", "" + "proc", "/proc/run", "proc", "", 1, 1 ), # Should be skipped as in skipped disk types sdiskpart( "test4", "/tmpfs/", # noqa: S108 "tmpfs", "", + 1, + 1, ), # Should be skipped as in skipped disk types - sdiskpart("test5", "E:", "cd", "cdrom"), # Should be skipped as cdrom + sdiskpart("test5", "E:", "cd", "cdrom", 1, 1), # Should be skipped as cdrom ] mock_config_entry.add_to_hass(hass) diff --git a/tests/components/tado/fixtures/home.json b/tests/components/tado/fixtures/home.json deleted file mode 100644 index 3431c1c2471..00000000000 --- a/tests/components/tado/fixtures/home.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "id": 1, - "name": "My Home", - "dateTimeZone": "Europe/Berlin", - "dateCreated": "2019-03-24T16:16:19.541Z", - "temperatureUnit": "CELSIUS", - "partner": null, - "simpleSmartScheduleEnabled": true, - "awayRadiusInMeters": 100.0, - "installationCompleted": true, - "incidentDetection": { "supported": true, "enabled": true }, - "generation": "PRE_LINE_X", - "zonesCount": 7, - "language": "de-DE", - "skills": ["AUTO_ASSIST"], - "christmasModeEnabled": true, - "showAutoAssistReminders": true, - "contactDetails": { - "name": "Max Mustermann", - "email": "max@example.com", - "phone": "+493023125431" - }, - "address": { - "addressLine1": "Musterstrasse 123", - "addressLine2": null, - "zipCode": "12345", - "city": "Berlin", - "state": null, - "country": "DEU" - }, - "geolocation": { "latitude": 52.0, "longitude": 13.0 }, - "consentGrantSkippable": true, - "enabledFeatures": [ - "EIQ_SETTINGS_AS_WEBVIEW", - "HIDE_BOILER_REPAIR_SERVICE", - "INTERCOM_ENABLED", - "MORE_AS_WEBVIEW", - "OWD_SETTINGS_AS_WEBVIEW", - "SETTINGS_OVERVIEW_AS_WEBVIEW" - ], - "isAirComfortEligible": true, - "isBalanceAcEligible": false, - "isEnergyIqEligible": true, - "isHeatSourceInstalled": false, - "isHeatPumpInstalled": false, - "supportsFlowTemperatureOptimization": false -} diff --git a/tests/components/tado/fixtures/smartac4.with_fanlevel.json b/tests/components/tado/fixtures/smartac4.with_fanlevel.json deleted file mode 100644 index ea1f9cbd8e5..00000000000 --- a/tests/components/tado/fixtures/smartac4.with_fanlevel.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "tadoMode": "HOME", - "geolocationOverride": false, - "geolocationOverrideDisableTime": null, - "preparation": null, - "setting": { - "type": "AIR_CONDITIONING", - "power": "ON", - "mode": "HEAT", - "temperature": { - "celsius": 25.0, - "fahrenheit": 77.0 - }, - "fanLevel": "LEVEL3", - "verticalSwing": "ON", - "horizontalSwing": "ON" - }, - "overlayType": "MANUAL", - "overlay": { - "type": "MANUAL", - "setting": { - "type": "AIR_CONDITIONING", - "power": "ON", - "mode": "HEAT", - "temperature": { - "celsius": 25.0, - "fahrenheit": 77.0 - }, - "fanLevel": "LEVEL3", - "verticalSwing": "ON" - }, - "termination": { - "type": "MANUAL", - "typeSkillBasedApp": "MANUAL", - "projectedExpiry": null - } - }, - "openWindow": null, - "nextScheduleChange": { - "start": "2024-07-01T05: 45: 00Z", - "setting": { - "type": "AIR_CONDITIONING", - "power": "ON", - "mode": "HEAT", - "temperature": { - "celsius": 24.0, - "fahrenheit": 75.2 - }, - "fanLevel": "LEVEL3", - "verticalSwing": "ON", - "horizontalSwing": "ON" - } - }, - "nextTimeBlock": { - "start": "2024-07-01T05: 45: 00.000Z" - }, - "link": { - "state": "ONLINE" - }, - "runningOfflineSchedule": false, - "activityDataPoints": { - "acPower": { - "timestamp": "2022-07-13T18: 06: 58.183Z", - "type": "POWER", - "value": "ON" - } - }, - "sensorDataPoints": { - "insideTemperature": { - "celsius": 24.3, - "fahrenheit": 75.74, - "timestamp": "2024-06-28T22: 23: 15.679Z", - "type": "TEMPERATURE", - "precision": { - "celsius": 0.1, - "fahrenheit": 0.1 - } - }, - "humidity": { - "type": "PERCENTAGE", - "percentage": 70.9, - "timestamp": "2024-06-28T22: 23: 15.679Z" - } - }, - "terminationCondition": { - "type": "MANUAL" - } -} diff --git a/tests/components/tado/fixtures/zone_states.json b/tests/components/tado/fixtures/zone_states.json index df1a99a80f3..64d457f3b50 100644 --- a/tests/components/tado/fixtures/zone_states.json +++ b/tests/components/tado/fixtures/zone_states.json @@ -287,79 +287,6 @@ "timestamp": "2020-03-28T02:09:27.830Z" } } - }, - "6": { - "tadoMode": "HOME", - "geolocationOverride": false, - "geolocationOverrideDisableTime": null, - "preparation": null, - "setting": { - "type": "AIR_CONDITIONING", - "power": "OFF" - }, - "overlayType": "MANUAL", - "overlay": { - "type": "MANUAL", - "setting": { - "type": "AIR_CONDITIONING", - "power": "OFF" - }, - "termination": { - "type": "MANUAL", - "typeSkillBasedApp": "MANUAL", - "projectedExpiry": null - } - }, - "openWindow": null, - "nextScheduleChange": { - "start": "2024-07-01T05: 45: 00Z", - "setting": { - "type": "AIR_CONDITIONING", - "power": "ON", - "mode": "HEAT", - "temperature": { - "celsius": 24.0, - "fahrenheit": 75.2 - }, - "fanLevel": "LEVEL3", - "verticalSwing": "ON", - "horizontalSwing": "ON" - } - }, - "nextTimeBlock": { - "start": "2024-07-01T05: 45: 00.000Z" - }, - "link": { - "state": "ONLINE" - }, - "runningOfflineSchedule": false, - "activityDataPoints": { - "acPower": { - "timestamp": "2022-07-13T18: 06: 58.183Z", - "type": "POWER", - "value": "OFF" - } - }, - "sensorDataPoints": { - "insideTemperature": { - "celsius": 24.21, - "fahrenheit": 75.58, - "timestamp": "2024-06-28T21: 43: 51.067Z", - "type": "TEMPERATURE", - "precision": { - "celsius": 0.1, - "fahrenheit": 0.1 - } - }, - "humidity": { - "type": "PERCENTAGE", - "percentage": 71.4, - "timestamp": "2024-06-28T21: 43: 51.067Z" - } - }, - "terminationCondition": { - "type": "MANUAL" - } } } } diff --git a/tests/components/tado/fixtures/zone_with_fanlevel_horizontal_vertical_swing.json b/tests/components/tado/fixtures/zone_with_fanlevel_horizontal_vertical_swing.json deleted file mode 100644 index 51ba70b4065..00000000000 --- a/tests/components/tado/fixtures/zone_with_fanlevel_horizontal_vertical_swing.json +++ /dev/null @@ -1,130 +0,0 @@ -{ - "type": "AIR_CONDITIONING", - "COOL": { - "temperatures": { - "celsius": { - "min": 16, - "max": 31, - "step": 1.0 - }, - "fahrenheit": { - "min": 61, - "max": 88, - "step": 1.0 - } - }, - "fanLevel": ["LEVEL3", "LEVEL2", "AUTO", "LEVEL1", "LEVEL4", "LEVEL5"], - "verticalSwing": ["MID_UP", "MID_DOWN", "ON", "OFF", "UP", "MID", "DOWN"], - "horizontalSwing": ["OFF", "ON"], - "light": ["ON", "OFF"] - }, - "FAN": { - "temperatures": { - "celsius": { - "min": 16, - "max": 31, - "step": 1.0 - }, - "fahrenheit": { - "min": 61, - "max": 88, - "step": 1.0 - } - }, - "fanLevel": ["LEVEL3", "LEVEL2", "AUTO", "LEVEL1", "LEVEL4", "LEVEL5"], - "verticalSwing": ["MID_UP", "MID_DOWN", "ON", "OFF", "UP", "MID", "DOWN"], - "horizontalSwing": ["OFF", "ON"], - "light": ["ON", "OFF"] - }, - "AUTO": { - "fanLevel": ["LEVEL3", "LEVEL2", "AUTO", "LEVEL1", "LEVEL4", "LEVEL5"], - "verticalSwing": ["MID_UP", "MID_DOWN", "ON", "OFF", "UP", "MID", "DOWN"], - "horizontalSwing": ["OFF", "ON"], - "light": ["ON", "OFF"] - }, - "HEAT": { - "temperatures": { - "celsius": { - "min": 16, - "max": 31, - "step": 1.0 - }, - "fahrenheit": { - "min": 61, - "max": 88, - "step": 1.0 - } - }, - "fanLevel": ["LEVEL3", "LEVEL2", "AUTO", "LEVEL1", "LEVEL4", "LEVEL5"], - "verticalSwing": ["MID_UP", "MID_DOWN", "ON", "OFF", "UP", "MID", "DOWN"], - "horizontalSwing": ["OFF", "ON"], - "light": ["ON", "OFF"] - }, - "DRY": { - "temperatures": { - "celsius": { - "min": 16, - "max": 31, - "step": 1.0 - }, - "fahrenheit": { - "min": 61, - "max": 88, - "step": 1.0 - } - }, - "verticalSwing": ["MID_UP", "MID_DOWN", "ON", "OFF", "UP", "MID", "DOWN"], - "horizontalSwing": ["OFF", "ON"], - "light": ["ON", "OFF"] - }, - "initialStates": { - "mode": "COOL", - "modes": { - "COOL": { - "temperature": { - "celsius": 24, - "fahrenheit": 75 - }, - "fanLevel": "LEVEL3", - "verticalSwing": "OFF", - "horizontalSwing": "OFF", - "light": "ON" - }, - "HEAT": { - "temperature": { - "celsius": 24, - "fahrenheit": 75 - }, - "fanLevel": "LEVEL3", - "verticalSwing": "OFF", - "horizontalSwing": "OFF", - "light": "ON" - }, - "DRY": { - "temperature": { - "celsius": 24, - "fahrenheit": 75 - }, - "verticalSwing": "OFF", - "horizontalSwing": "OFF", - "light": "ON" - }, - "FAN": { - "temperature": { - "celsius": 24, - "fahrenheit": 75 - }, - "fanLevel": "LEVEL3", - "verticalSwing": "OFF", - "horizontalSwing": "OFF", - "light": "ON" - }, - "AUTO": { - "fanLevel": "LEVEL3", - "verticalSwing": "OFF", - "horizontalSwing": "OFF", - "light": "ON" - } - } - } -} diff --git a/tests/components/tado/fixtures/zones.json b/tests/components/tado/fixtures/zones.json index e1d2ec759ba..5ef7374a660 100644 --- a/tests/components/tado/fixtures/zones.json +++ b/tests/components/tado/fixtures/zones.json @@ -178,45 +178,5 @@ "deviceTypes": ["WR02"], "reportAvailable": false, "type": "AIR_CONDITIONING" - }, - { - "id": 6, - "name": "Air Conditioning with fanlevel", - "type": "AIR_CONDITIONING", - "dateCreated": "2022-07-13T18: 06: 58.183Z", - "deviceTypes": ["WR02"], - "devices": [ - { - "deviceType": "WR02", - "serialNo": "WR5", - "shortSerialNo": "WR5", - "currentFwVersion": "118.7", - "connectionState": { - "value": true, - "timestamp": "2024-06-28T21: 04: 23.463Z" - }, - "characteristics": { - "capabilities": ["INSIDE_TEMPERATURE_MEASUREMENT", "IDENTIFY"] - }, - "accessPointWiFi": { - "ssid": "tado8480" - }, - "commandTableUploadState": "FINISHED", - "duties": ["ZONE_UI", "ZONE_DRIVER", "ZONE_LEADER"] - } - ], - "reportAvailable": false, - "showScheduleSetup": false, - "supportsDazzle": true, - "dazzleEnabled": true, - "dazzleMode": { - "supported": true, - "enabled": true - }, - "openWindowDetection": { - "supported": true, - "enabled": true, - "timeoutInSeconds": 900 - } } ] diff --git a/tests/components/tado/test_climate.py b/tests/components/tado/test_climate.py index 5a43c728b6e..98fd2d753a4 100644 --- a/tests/components/tado/test_climate.py +++ b/tests/components/tado/test_climate.py @@ -89,35 +89,3 @@ async def test_smartac_with_swing(hass: HomeAssistant) -> None: # Only test for a subset of attributes in case # HA changes the implementation and a new one appears assert all(item in state.attributes.items() for item in expected_attributes.items()) - - -async def test_smartac_with_fanlevel_vertical_and_horizontal_swing( - hass: HomeAssistant, -) -> None: - """Test creation of smart ac with swing climate.""" - - await async_init_integration(hass) - - state = hass.states.get("climate.air_conditioning_with_fanlevel") - assert state.state == "heat" - - expected_attributes = { - "current_humidity": 70.9, - "current_temperature": 24.3, - "fan_mode": "high", - "fan_modes": ["high", "medium", "auto", "low"], - "friendly_name": "Air Conditioning with fanlevel", - "hvac_action": "heating", - "hvac_modes": ["off", "auto", "heat", "cool", "heat_cool", "dry", "fan_only"], - "max_temp": 31.0, - "min_temp": 16.0, - "preset_mode": "auto", - "preset_modes": ["away", "home", "auto"], - "swing_modes": ["vertical", "horizontal", "both", "off"], - "supported_features": 441, - "target_temp_step": 1.0, - "temperature": 25.0, - } - # Only test for a subset of attributes in case - # HA changes the implementation and a new one appears - assert all(item in state.attributes.items() for item in expected_attributes.items()) diff --git a/tests/components/tado/test_config_flow.py b/tests/components/tado/test_config_flow.py index 63b17dad13e..4f5f4180fb5 100644 --- a/tests/components/tado/test_config_flow.py +++ b/tests/components/tado/test_config_flow.py @@ -295,7 +295,13 @@ async def test_reconfigure_flow( ) entry.add_to_hass(hass) - result = await entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM diff --git a/tests/components/tado/util.py b/tests/components/tado/util.py index a76858ab98e..dd7c108c984 100644 --- a/tests/components/tado/util.py +++ b/tests/components/tado/util.py @@ -20,7 +20,6 @@ async def async_init_integration( mobile_devices_fixture = "tado/mobile_devices.json" me_fixture = "tado/me.json" weather_fixture = "tado/weather.json" - home_fixture = "tado/home.json" home_state_fixture = "tado/home_state.json" zones_fixture = "tado/zones.json" zone_states_fixture = "tado/zone_states.json" @@ -28,12 +27,6 @@ async def async_init_integration( # WR1 Device device_wr1_fixture = "tado/device_wr1.json" - # Smart AC with fanLevel, Vertical and Horizontal swings - zone_6_state_fixture = "tado/smartac4.with_fanlevel.json" - zone_6_capabilities_fixture = ( - "tado/zone_with_fanlevel_horizontal_vertical_swing.json" - ) - # Smart AC with Swing zone_5_state_fixture = "tado/smartac3.with_swing.json" zone_5_capabilities_fixture = "tado/zone_with_swing_capabilities.json" @@ -66,10 +59,6 @@ async def async_init_integration( "https://my.tado.com/api/v2/me", text=load_fixture(me_fixture), ) - m.get( - "https://my.tado.com/api/v2/homes/1/", - text=load_fixture(home_fixture), - ) m.get( "https://my.tado.com/api/v2/homes/1/weather", text=load_fixture(weather_fixture), @@ -106,10 +95,6 @@ async def async_init_integration( "https://my.tado.com/api/v2/homes/1/zoneStates", text=load_fixture(zone_states_fixture), ) - m.get( - "https://my.tado.com/api/v2/homes/1/zones/6/capabilities", - text=load_fixture(zone_6_capabilities_fixture), - ) m.get( "https://my.tado.com/api/v2/homes/1/zones/5/capabilities", text=load_fixture(zone_5_capabilities_fixture), @@ -150,14 +135,6 @@ async def async_init_integration( "https://my.tado.com/api/v2/homes/1/zones/5/defaultOverlay", text=load_fixture(zone_def_overlay), ) - m.get( - "https://my.tado.com/api/v2/homes/1/zones/6/defaultOverlay", - text=load_fixture(zone_def_overlay), - ) - m.get( - "https://my.tado.com/api/v2/homes/1/zones/6/state", - text=load_fixture(zone_6_state_fixture), - ) m.get( "https://my.tado.com/api/v2/homes/1/zones/5/state", text=load_fixture(zone_5_state_fixture), diff --git a/tests/components/tag/test_init.py b/tests/components/tag/test_init.py index 5c1e80c2d8b..6f309391d2b 100644 --- a/tests/components/tag/test_init.py +++ b/tests/components/tag/test_init.py @@ -294,10 +294,6 @@ async def test_entity_created_and_removed( assert item["id"] == "1234567890" assert item["name"] == "Kitchen tag" - await hass.async_block_till_done() - er_entity = entity_registry.async_get("tag.kitchen_tag") - assert er_entity.name == "Kitchen tag" - entity = hass.states.get("tag.kitchen_tag") assert entity assert entity.state == STATE_UNKNOWN diff --git a/tests/components/tag/test_trigger.py b/tests/components/tag/test_trigger.py index 5c7e515d322..60d45abb7b9 100644 --- a/tests/components/tag/test_trigger.py +++ b/tests/components/tag/test_trigger.py @@ -11,6 +11,8 @@ from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component +from tests.common import async_mock_service + @pytest.fixture(autouse=True, name="stub_blueprint_populate") def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @@ -37,8 +39,14 @@ def tag_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_triggers( - hass: HomeAssistant, tag_setup, service_calls: list[ServiceCall] + hass: HomeAssistant, tag_setup, calls: list[ServiceCall] ) -> None: """Test tag triggers.""" assert await tag_setup() @@ -67,9 +75,9 @@ async def test_triggers( await async_scan_tag(hass, "abc123", None) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["message"] == "service called" - assert service_calls[0].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["message"] == "service called" + assert calls[0].data["id"] == 0 await hass.services.async_call( automation.DOMAIN, @@ -77,16 +85,15 @@ async def test_triggers( {ATTR_ENTITY_ID: "automation.test"}, blocking=True, ) - assert len(service_calls) == 2 await async_scan_tag(hass, "abc123", None) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 async def test_exception_bad_trigger( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture ) -> None: """Test for exception on event triggers firing.""" @@ -110,7 +117,7 @@ async def test_exception_bad_trigger( async def test_multiple_tags_and_devices_trigger( - hass: HomeAssistant, tag_setup, service_calls: list[ServiceCall] + hass: HomeAssistant, tag_setup, calls: list[ServiceCall] ) -> None: """Test multiple tags and devices triggers.""" assert await tag_setup() @@ -151,8 +158,8 @@ async def test_multiple_tags_and_devices_trigger( await async_scan_tag(hass, "def456", device_id="jkl0123") await hass.async_block_till_done() - assert len(service_calls) == 4 - assert service_calls[0].data["message"] == "service called" - assert service_calls[1].data["message"] == "service called" - assert service_calls[2].data["message"] == "service called" - assert service_calls[3].data["message"] == "service called" + assert len(calls) == 4 + assert calls[0].data["message"] == "service called" + assert calls[1].data["message"] == "service called" + assert calls[2].data["message"] == "service called" + assert calls[3].data["message"] == "service called" diff --git a/tests/components/tailscale/conftest.py b/tests/components/tailscale/conftest.py index 5514678f530..cb7419daf89 100644 --- a/tests/components/tailscale/conftest.py +++ b/tests/components/tailscale/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest from tailscale.models import Devices +from typing_extensions import Generator from homeassistant.components.tailscale.const import CONF_TAILNET, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/tailscale/test_config_flow.py b/tests/components/tailscale/test_config_flow.py index 3a67f46a496..86daa40d8dc 100644 --- a/tests/components/tailscale/test_config_flow.py +++ b/tests/components/tailscale/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock from tailscale import TailscaleAuthenticationError, TailscaleConnectionError from homeassistant.components.tailscale.const import CONF_TAILNET, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -128,7 +128,15 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -162,7 +170,15 @@ async def test_reauth_with_authentication_error( """ mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -206,7 +222,15 @@ async def test_reauth_api_error( """Test API error during reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" diff --git a/tests/components/tailwind/conftest.py b/tests/components/tailwind/conftest.py index ea87c120308..f23463548bc 100644 --- a/tests/components/tailwind/conftest.py +++ b/tests/components/tailwind/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from collections.abc import Generator -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from gotailwind import TailwindDeviceStatus import pytest +from typing_extensions import Generator from homeassistant.components.tailwind.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_TOKEN @@ -36,7 +36,7 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_setup_entry() -> Generator[None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" with patch( "homeassistant.components.tailwind.async_setup_entry", return_value=True diff --git a/tests/components/tailwind/snapshots/test_binary_sensor.ambr b/tests/components/tailwind/snapshots/test_binary_sensor.ambr index 064b391c43a..20a3282db55 100644 --- a/tests/components/tailwind/snapshots/test_binary_sensor.ambr +++ b/tests/components/tailwind/snapshots/test_binary_sensor.ambr @@ -68,7 +68,6 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', - 'model_id': None, 'name': 'Door 1', 'name_by_user': None, 'primary_config_entry': , @@ -147,7 +146,6 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', - 'model_id': None, 'name': 'Door 2', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tailwind/snapshots/test_button.ambr b/tests/components/tailwind/snapshots/test_button.ambr index 17b656ec5fd..3ddbbb3f81d 100644 --- a/tests/components/tailwind/snapshots/test_button.ambr +++ b/tests/components/tailwind/snapshots/test_button.ambr @@ -72,7 +72,6 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', - 'model_id': None, 'name': 'Tailwind iQ3', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tailwind/snapshots/test_config_flow.ambr b/tests/components/tailwind/snapshots/test_config_flow.ambr index 09bf25cb96e..5c01f35e09c 100644 --- a/tests/components/tailwind/snapshots/test_config_flow.ambr +++ b/tests/components/tailwind/snapshots/test_config_flow.ambr @@ -22,8 +22,6 @@ 'token': '987654', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'tailwind', 'entry_id': , 'minor_version': 1, @@ -68,8 +66,6 @@ 'token': '987654', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'tailwind', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/tailwind/snapshots/test_cover.ambr b/tests/components/tailwind/snapshots/test_cover.ambr index b69bd9e6410..4ac6d6adc7d 100644 --- a/tests/components/tailwind/snapshots/test_cover.ambr +++ b/tests/components/tailwind/snapshots/test_cover.ambr @@ -69,7 +69,6 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', - 'model_id': None, 'name': 'Door 1', 'name_by_user': None, 'primary_config_entry': , @@ -149,7 +148,6 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', - 'model_id': None, 'name': 'Door 2', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tailwind/snapshots/test_number.ambr b/tests/components/tailwind/snapshots/test_number.ambr index 3e2e0577ad5..b4e73f4b2aa 100644 --- a/tests/components/tailwind/snapshots/test_number.ambr +++ b/tests/components/tailwind/snapshots/test_number.ambr @@ -81,7 +81,6 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', - 'model_id': None, 'name': 'Tailwind iQ3', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tailwind/test_config_flow.py b/tests/components/tailwind/test_config_flow.py index d2d15172718..f70ab6e27ff 100644 --- a/tests/components/tailwind/test_config_flow.py +++ b/tests/components/tailwind/test_config_flow.py @@ -14,7 +14,12 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import zeroconf from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.tailwind.const import DOMAIN -from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import ( + SOURCE_DHCP, + SOURCE_REAUTH, + SOURCE_USER, + SOURCE_ZEROCONF, +) from homeassistant.const import CONF_HOST, CONF_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -306,7 +311,15 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) assert mock_config_entry.data[CONF_TOKEN] == "123456" - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -341,7 +354,15 @@ async def test_reauth_flow_errors( mock_config_entry.add_to_hass(hass) mock_tailwind.status.side_effect = side_effect - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/tailwind/test_cover.py b/tests/components/tailwind/test_cover.py index a658f842885..8ccb8947624 100644 --- a/tests/components/tailwind/test_cover.py +++ b/tests/components/tailwind/test_cover.py @@ -3,7 +3,6 @@ from unittest.mock import ANY, MagicMock from gotailwind import ( - TailwindDoorAlreadyInStateError, TailwindDoorDisabledError, TailwindDoorLockedOutError, TailwindDoorOperationCommand, @@ -182,28 +181,3 @@ async def test_cover_operations( ) assert excinfo.value.translation_domain == DOMAIN assert excinfo.value.translation_key == "communication_error" - - # Test door already in state - mock_tailwind.operate.side_effect = TailwindDoorAlreadyInStateError( - "Door is already in the requested state" - ) - - # This call should not raise an exception - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - { - ATTR_ENTITY_ID: "cover.door_1", - }, - blocking=True, - ) - - # This call should not raise an exception - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - { - ATTR_ENTITY_ID: "cover.door_1", - }, - blocking=True, - ) diff --git a/tests/components/tami4/conftest.py b/tests/components/tami4/conftest.py index 2b4acac0b3f..84b96c04735 100644 --- a/tests/components/tami4/conftest.py +++ b/tests/components/tami4/conftest.py @@ -1,6 +1,5 @@ """Common fixutres with default mocks as well as common test helper methods.""" -from collections.abc import Generator from datetime import datetime from unittest.mock import AsyncMock, MagicMock, patch @@ -8,6 +7,7 @@ import pytest from Tami4EdgeAPI.device import Device from Tami4EdgeAPI.device_metadata import DeviceMetadata from Tami4EdgeAPI.water_quality import UV, Filter, WaterQuality +from typing_extensions import Generator from homeassistant.components.tami4.const import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.core import HomeAssistant @@ -60,31 +60,6 @@ def mock__get_devices_metadata(request: pytest.FixtureRequest) -> Generator[None yield -@pytest.fixture -def mock__get_devices_metadata_no_name( - request: pytest.FixtureRequest, -) -> Generator[None]: - """Fixture to mock _get_devices which makes a call to the API.""" - - side_effect = getattr(request, "param", None) - - device_metadata = DeviceMetadata( - id=1, - name=None, - connected=True, - psn="psn", - type="type", - device_firmware="v1.1", - ) - - with patch( - "Tami4EdgeAPI.Tami4EdgeAPI.Tami4EdgeAPI._get_devices_metadata", - return_value=[device_metadata], - side_effect=side_effect, - ): - yield - - @pytest.fixture def mock_get_device( request: pytest.FixtureRequest, diff --git a/tests/components/tami4/test_config_flow.py b/tests/components/tami4/test_config_flow.py index 4dfc27bba94..4210c391d70 100644 --- a/tests/components/tami4/test_config_flow.py +++ b/tests/components/tami4/test_config_flow.py @@ -120,39 +120,6 @@ async def test_step_otp_valid( assert "refresh_token" in result["data"] -@pytest.mark.usefixtures( - "mock_setup_entry", - "mock_request_otp", - "mock_submit_otp", - "mock__get_devices_metadata_no_name", -) -async def test_step_otp_valid_device_no_name(hass: HomeAssistant) -> None: - """Test user step with valid phone number.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={CONF_PHONE: "+972555555555"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "otp" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"otp": "123456"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Tami4" - assert "refresh_token" in result["data"] - - @pytest.mark.parametrize( ("mock_submit_otp", "expected_error"), [ diff --git a/tests/components/tankerkoenig/conftest.py b/tests/components/tankerkoenig/conftest.py index 1517c3d2060..8f2e2c2fb53 100644 --- a/tests/components/tankerkoenig/conftest.py +++ b/tests/components/tankerkoenig/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Tankerkoenig integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.tankerkoenig import DOMAIN from homeassistant.const import CONF_SHOW_ON_MAP diff --git a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr index 3180c7c0b1d..f52cb3a88a5 100644 --- a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr +++ b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr @@ -26,8 +26,6 @@ ]), }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'tankerkoenig', 'entry_id': '8036b4412f2fae6bb9dbab7fe8e37f87', 'minor_version': 1, diff --git a/tests/components/tankerkoenig/test_config_flow.py b/tests/components/tankerkoenig/test_config_flow.py index bb1e943bbb9..022b49fd3f8 100644 --- a/tests/components/tankerkoenig/test_config_flow.py +++ b/tests/components/tankerkoenig/test_config_flow.py @@ -9,7 +9,7 @@ from homeassistant.components.tankerkoenig.const import ( CONF_STATIONS, DOMAIN, ) -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, @@ -162,10 +162,6 @@ async def test_user_no_stations(hass: HomeAssistant) -> None: async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test starting a flow by user to re-auth.""" config_entry.add_to_hass(hass) - # re-auth initialized - result = await config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with ( patch( @@ -175,6 +171,15 @@ async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> Non "homeassistant.components.tankerkoenig.config_flow.Tankerkoenig.nearby_stations", ) as mock_nearby_stations, ): + # re-auth initialized + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, + data=config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + # re-auth unsuccessful mock_nearby_stations.side_effect = TankerkoenigInvalidKeyError("Booom!") result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/tankerkoenig/test_diagnostics.py b/tests/components/tankerkoenig/test_diagnostics.py index e7b479a0c32..441268659f3 100644 --- a/tests/components/tankerkoenig/test_diagnostics.py +++ b/tests/components/tankerkoenig/test_diagnostics.py @@ -4,7 +4,6 @@ from __future__ import annotations import pytest from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -22,4 +21,4 @@ async def test_entry_diagnostics( ) -> None: """Test config entry diagnostics.""" result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/tasmota/conftest.py b/tests/components/tasmota/conftest.py index e6bb8c61994..07ca8b31825 100644 --- a/tests/components/tasmota/conftest.py +++ b/tests/components/tasmota/conftest.py @@ -10,12 +10,35 @@ from homeassistant.components.tasmota.const import ( DEFAULT_PREFIX, DOMAIN, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall -from tests.common import MockConfigEntry +from tests.common import ( + MockConfigEntry, + async_mock_service, + mock_device_registry, + mock_registry, +) from tests.components.light.conftest import mock_light_profiles # noqa: F401 +@pytest.fixture +def device_reg(hass): + """Return an empty, loaded, registry.""" + return mock_device_registry(hass) + + +@pytest.fixture +def entity_reg(hass): + """Return an empty, loaded, registry.""" + return mock_registry(hass) + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.fixture(autouse=True) def disable_debounce(): """Set MQTT debounce timer to zero.""" @@ -37,7 +60,7 @@ def disable_status_sensor(status_sensor_disabled): yield -async def setup_tasmota_helper(hass: HomeAssistant) -> None: +async def setup_tasmota_helper(hass): """Set up Tasmota.""" hass.config.components.add("tasmota") @@ -56,6 +79,6 @@ async def setup_tasmota_helper(hass: HomeAssistant) -> None: @pytest.fixture -async def setup_tasmota(hass: HomeAssistant) -> None: +async def setup_tasmota(hass): """Set up Tasmota.""" await setup_tasmota_helper(hass) diff --git a/tests/components/tasmota/snapshots/test_sensor.ambr b/tests/components/tasmota/snapshots/test_sensor.ambr index be011e595b9..c5d70487749 100644 --- a/tests/components/tasmota/snapshots/test_sensor.ambr +++ b/tests/components/tasmota/snapshots/test_sensor.ambr @@ -280,102 +280,6 @@ 'unit_of_measurement': , }) # --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].10 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Tasmota ENERGY ExportTariff 0', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_energy_exporttariff_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5.6', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].11 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Tasmota ENERGY ExportTariff 1', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_energy_exporttariff_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '7.8', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].12 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Tasmota ENERGY TotalTariff 0', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_energy_totaltariff_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5.6', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].13 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Tasmota ENERGY TotalTariff 1', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_energy_totaltariff_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '7.8', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].14 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Tasmota ENERGY ExportTariff 0', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_energy_exporttariff_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.2', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].15 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Tasmota ENERGY ExportTariff 1', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_energy_exporttariff_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.4', - }) -# --- # name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].2 StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -428,108 +332,6 @@ }) # --- # name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].4 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Tasmota ENERGY ExportTariff 0', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_energy_exporttariff_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].5 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.tasmota_energy_exporttariff_0', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'ENERGY ExportTariff 0', - 'platform': 'tasmota', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_ExportTariff_0', - 'unit_of_measurement': , - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].6 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Tasmota ENERGY ExportTariff 1', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_energy_exporttariff_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].7 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.tasmota_energy_exporttariff_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'ENERGY ExportTariff 1', - 'platform': 'tasmota', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_ExportTariff_1', - 'unit_of_measurement': , - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].8 StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -545,7 +347,7 @@ 'state': '1.2', }) # --- -# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].9 +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].5 StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -561,6 +363,38 @@ 'state': '3.4', }) # --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].6 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY TotalTariff 0', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_totaltariff_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.6', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].7 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY TotalTariff 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_totaltariff_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.8', + }) +# --- # name: test_controlling_state_via_mqtt[sensor_config3-entity_ids3-messages3] StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -1712,301 +1546,3 @@ 'state': '2300', }) # --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR1 Unknown', - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor1_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.tasmota_sensor1_unknown', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'SENSOR1 Unknown', - 'platform': 'tasmota', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000049A3BC_sensor_sensor_SENSOR1_Unknown', - 'unit_of_measurement': None, - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].10 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR3 Unknown', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor3_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.5', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].11 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR4 Unknown', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor4_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.5', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].12 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR1 Unknown', - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor1_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].13 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR2 Unknown', - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor2_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].14 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR3 Unknown', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor3_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].15 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR4 Unknown', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor4_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].2 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR2 Unknown', - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor2_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].3 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.tasmota_sensor2_unknown', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'SENSOR2 Unknown', - 'platform': 'tasmota', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000049A3BC_sensor_sensor_SENSOR2_Unknown', - 'unit_of_measurement': None, - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].4 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR3 Unknown', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor3_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].5 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.tasmota_sensor3_unknown', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'SENSOR3 Unknown', - 'platform': 'tasmota', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000049A3BC_sensor_sensor_SENSOR3_Unknown', - 'unit_of_measurement': None, - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].6 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR4 Unknown', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor4_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].7 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.tasmota_sensor4_unknown', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'SENSOR4 Unknown', - 'platform': 'tasmota', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000049A3BC_sensor_sensor_SENSOR4_Unknown', - 'unit_of_measurement': None, - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].8 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR1 Unknown', - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor1_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.5', - }) -# --- -# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].9 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Tasmota SENSOR2 Unknown', - }), - 'context': , - 'entity_id': 'sensor.tasmota_sensor2_unknown', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.5', - }) -# --- diff --git a/tests/components/tasmota/test_common.py b/tests/components/tasmota/test_common.py index 4d2c821fff4..f3d85f019f3 100644 --- a/tests/components/tasmota/test_common.py +++ b/tests/components/tasmota/test_common.py @@ -2,8 +2,7 @@ import copy import json -from typing import Any -from unittest.mock import ANY, AsyncMock +from unittest.mock import ANY from hatasmota.const import ( CONF_DEEP_SLEEP, @@ -20,7 +19,6 @@ from hatasmota.utils import ( get_topic_tele_state, get_topic_tele_will, ) -import pytest from homeassistant.components.tasmota.const import DEFAULT_PREFIX, DOMAIN from homeassistant.const import STATE_UNAVAILABLE @@ -28,7 +26,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import async_fire_mqtt_message -from tests.typing import MqttMockHAClient, MqttMockPahoClient, WebSocketGenerator +from tests.typing import WebSocketGenerator DEFAULT_CONFIG = { "ip": "192.168.15.10", @@ -127,14 +125,14 @@ async def remove_device( async def help_test_availability_when_connection_lost( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock: MqttMockHAClient, - domain: str, - config: dict[str, Any], - sensor_config: dict[str, Any] | None = None, - object_id: str = "tasmota_test", -) -> None: + hass, + mqtt_client_mock, + mqtt_mock, + domain, + config, + sensor_config=None, + object_id="tasmota_test", +): """Test availability after MQTT disconnection. This is a test helper for the TasmotaAvailability mixin. @@ -193,14 +191,14 @@ async def help_test_availability_when_connection_lost( async def help_test_deep_sleep_availability_when_connection_lost( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock: MqttMockHAClient, - domain: str, - config: dict[str, Any], - sensor_config: dict[str, Any] | None = None, - object_id: str = "tasmota_test", -) -> None: + hass, + mqtt_client_mock, + mqtt_mock, + domain, + config, + sensor_config=None, + object_id="tasmota_test", +): """Test availability after MQTT disconnection when deep sleep is enabled. This is a test helper for the TasmotaAvailability mixin. @@ -263,13 +261,13 @@ async def help_test_deep_sleep_availability_when_connection_lost( async def help_test_availability( - hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - domain: str, - config: dict[str, Any], - sensor_config: dict[str, Any] | None = None, - object_id: str = "tasmota_test", -) -> None: + hass, + mqtt_mock, + domain, + config, + sensor_config=None, + object_id="tasmota_test", +): """Test availability. This is a test helper for the TasmotaAvailability mixin. @@ -311,13 +309,13 @@ async def help_test_availability( async def help_test_deep_sleep_availability( - hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - domain: str, - config: dict[str, Any], - sensor_config: dict[str, Any] | None = None, - object_id: str = "tasmota_test", -) -> None: + hass, + mqtt_mock, + domain, + config, + sensor_config=None, + object_id="tasmota_test", +): """Test availability when deep sleep is enabled. This is a test helper for the TasmotaAvailability mixin. @@ -360,13 +358,13 @@ async def help_test_deep_sleep_availability( async def help_test_availability_discovery_update( - hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - domain: str, - config: dict[str, Any], - sensor_config: dict[str, Any] | None = None, - object_id: str = "tasmota_test", -) -> None: + hass, + mqtt_mock, + domain, + config, + sensor_config=None, + object_id="tasmota_test", +): """Test update of discovered TasmotaAvailability. This is a test helper for the TasmotaAvailability mixin. @@ -436,15 +434,15 @@ async def help_test_availability_discovery_update( async def help_test_availability_poll_state( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock: MqttMockHAClient, - domain: str, - config: dict[str, Any], - poll_topic: str, - poll_payload: str, - sensor_config: dict[str, Any] | None = None, -) -> None: + hass, + mqtt_client_mock, + mqtt_mock, + domain, + config, + poll_topic, + poll_payload, + sensor_config=None, +): """Test polling of state when device is available. This is a test helper for the TasmotaAvailability mixin. @@ -505,17 +503,17 @@ async def help_test_availability_poll_state( async def help_test_discovery_removal( - hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - domain: str, - config1: dict[str, Any], - config2: dict[str, Any], - sensor_config1: dict[str, Any] | None = None, - sensor_config2: dict[str, Any] | None = None, - object_id: str = "tasmota_test", - name: str = "Tasmota Test", -) -> None: + hass, + mqtt_mock, + caplog, + domain, + config1, + config2, + sensor_config1=None, + sensor_config2=None, + object_id="tasmota_test", + name="Tasmota Test", +): """Test removal of discovered entity.""" device_reg = dr.async_get(hass) entity_reg = er.async_get(hass) @@ -571,16 +569,16 @@ async def help_test_discovery_removal( async def help_test_discovery_update_unchanged( - hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - domain: str, - config: dict[str, Any], - discovery_update: AsyncMock, - sensor_config: dict[str, Any] | None = None, - object_id: str = "tasmota_test", - name: str = "Tasmota Test", -) -> None: + hass, + mqtt_mock, + caplog, + domain, + config, + discovery_update, + sensor_config=None, + object_id="tasmota_test", + name="Tasmota Test", +): """Test update of discovered component with and without changes. This is a test helper for the MqttDiscoveryUpdate mixin. @@ -625,13 +623,8 @@ async def help_test_discovery_update_unchanged( async def help_test_discovery_device_remove( - hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - domain: str, - unique_id: str, - config: dict[str, Any], - sensor_config: dict[str, Any] | None = None, -) -> None: + hass, mqtt_mock, domain, unique_id, config, sensor_config=None +): """Test domain entity is removed when device is removed.""" device_reg = dr.async_get(hass) entity_reg = er.async_get(hass) @@ -666,14 +659,14 @@ async def help_test_discovery_device_remove( async def help_test_entity_id_update_subscriptions( - hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - domain: str, - config: dict[str, Any], - topics: list[str] | None = None, - sensor_config: dict[str, Any] | None = None, - object_id: str = "tasmota_test", -) -> None: + hass, + mqtt_mock, + domain, + config, + topics=None, + sensor_config=None, + object_id="tasmota_test", +): """Test MQTT subscriptions are managed when entity_id is updated.""" entity_reg = er.async_get(hass) @@ -718,13 +711,8 @@ async def help_test_entity_id_update_subscriptions( async def help_test_entity_id_update_discovery_update( - hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - domain: str, - config: dict[str, Any], - sensor_config: dict[str, Any] | None = None, - object_id: str = "tasmota_test", -) -> None: + hass, mqtt_mock, domain, config, sensor_config=None, object_id="tasmota_test" +): """Test MQTT discovery update after entity_id is updated.""" entity_reg = er.async_get(hass) diff --git a/tests/components/tasmota/test_cover.py b/tests/components/tasmota/test_cover.py index 70bf33d0105..7da3cdbd1ec 100644 --- a/tests/components/tasmota/test_cover.py +++ b/tests/components/tasmota/test_cover.py @@ -2,7 +2,6 @@ import copy import json -from typing import Any from unittest.mock import patch from hatasmota.utils import ( @@ -465,9 +464,7 @@ async def test_controlling_state_via_mqtt_inverted( assert state.attributes["current_position"] == 0 -async def call_service( - hass: HomeAssistant, entity_id: str, service: str, **kwargs: Any -) -> None: +async def call_service(hass, entity_id, service, **kwargs): """Call a fan service.""" await hass.services.async_call( cover.DOMAIN, diff --git a/tests/components/tasmota/test_device_trigger.py b/tests/components/tasmota/test_device_trigger.py index bb474358006..450ad678ff6 100644 --- a/tests/components/tasmota/test_device_trigger.py +++ b/tests/components/tasmota/test_device_trigger.py @@ -30,7 +30,8 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: async def test_get_triggers_btn( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, + device_reg, + entity_reg, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -45,7 +46,7 @@ async def test_get_triggers_btn( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) expected_triggers = [ @@ -76,7 +77,8 @@ async def test_get_triggers_btn( async def test_get_triggers_swc( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, + device_reg, + entity_reg, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -88,7 +90,7 @@ async def test_get_triggers_swc( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) expected_triggers = [ @@ -110,7 +112,8 @@ async def test_get_triggers_swc( async def test_get_unknown_triggers( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, + device_reg, + entity_reg, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -123,7 +126,7 @@ async def test_get_unknown_triggers( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -158,7 +161,8 @@ async def test_get_unknown_triggers( async def test_get_non_existing_triggers( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, + device_reg, + entity_reg, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -171,7 +175,7 @@ async def test_get_non_existing_triggers( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) triggers = await async_get_device_automations( @@ -183,7 +187,8 @@ async def test_get_non_existing_triggers( @pytest.mark.no_fail_on_log_exception async def test_discover_bad_triggers( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, + device_reg, + entity_reg, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -202,7 +207,7 @@ async def test_discover_bad_triggers( ) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) triggers = await async_get_device_automations( @@ -238,7 +243,7 @@ async def test_discover_bad_triggers( ) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) triggers = await async_get_device_automations( @@ -269,7 +274,8 @@ async def test_discover_bad_triggers( async def test_update_remove_triggers( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, + device_reg, + entity_reg, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -290,7 +296,7 @@ async def test_update_remove_triggers( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -345,8 +351,8 @@ async def test_update_remove_triggers( async def test_if_fires_on_mqtt_message_btn( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + device_reg, + calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -360,7 +366,7 @@ async def test_if_fires_on_mqtt_message_btn( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -406,22 +412,22 @@ async def test_if_fires_on_mqtt_message_btn( hass, "tasmota_49A3BC/stat/RESULT", '{"Button1":{"Action":"SINGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "short_press_1" + assert len(calls) == 1 + assert calls[0].data["some"] == "short_press_1" # Fake button 3 single press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Button3":{"Action":"SINGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "short_press_3" + assert len(calls) == 2 + assert calls[1].data["some"] == "short_press_3" async def test_if_fires_on_mqtt_message_swc( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + device_reg, + calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -436,7 +442,7 @@ async def test_if_fires_on_mqtt_message_swc( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -496,30 +502,30 @@ async def test_if_fires_on_mqtt_message_swc( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "short_press_1" + assert len(calls) == 1 + assert calls[0].data["some"] == "short_press_1" # Fake switch 2 short press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch2":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "short_press_2" + assert len(calls) == 2 + assert calls[1].data["some"] == "short_press_2" # Fake switch 3 long press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"custom_switch":{"Action":"HOLD"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].data["some"] == "long_press_3" + assert len(calls) == 3 + assert calls[2].data["some"] == "long_press_3" async def test_if_fires_on_mqtt_message_late_discover( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + device_reg, + calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -538,7 +544,7 @@ async def test_if_fires_on_mqtt_message_late_discover( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -587,22 +593,22 @@ async def test_if_fires_on_mqtt_message_late_discover( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "short_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"custom_switch":{"Action":"HOLD"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "double_press" + assert len(calls) == 2 + assert calls[1].data["some"] == "double_press" async def test_if_fires_on_mqtt_message_after_update( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + device_reg, + calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -618,7 +624,7 @@ async def test_if_fires_on_mqtt_message_after_update( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -650,7 +656,7 @@ async def test_if_fires_on_mqtt_message_after_update( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Update the trigger with different topic async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config2)) @@ -660,13 +666,13 @@ async def test_if_fires_on_mqtt_message_after_update( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async_fire_mqtt_message( hass, "tasmota_49A3BC/status/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 # Update the trigger with same topic async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config2)) @@ -676,20 +682,17 @@ async def test_if_fires_on_mqtt_message_after_update( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 async_fire_mqtt_message( hass, "tasmota_49A3BC/status/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 3 + assert len(calls) == 3 async def test_no_resubscribe_same_topic( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, - setup_tasmota, + hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota ) -> None: """Test subscription to topics without change.""" # Discover a device with device trigger @@ -702,7 +705,7 @@ async def test_no_resubscribe_same_topic( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -738,8 +741,8 @@ async def test_no_resubscribe_same_topic( async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + device_reg, + calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -754,7 +757,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -786,7 +789,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Remove the trigger config["swc"][0] = -1 @@ -797,7 +800,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Rediscover the trigger config["swc"][0] = 0 @@ -808,14 +811,14 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + device_reg, + calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -831,7 +834,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -863,7 +866,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 # Remove the device await remove_device(hass, hass_ws_client, device_entry.id) @@ -873,14 +876,11 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_attach_remove( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, - setup_tasmota, + hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota ) -> None: """Test attach and removal of trigger.""" # Discover a device with device trigger @@ -893,14 +893,14 @@ async def test_attach_remove( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - service_calls = [] + calls = [] def callback(trigger, context): - service_calls.append(trigger["trigger"]["description"]) + calls.append(trigger["trigger"]["description"]) remove = await async_initialize_triggers( hass, @@ -925,8 +925,8 @@ async def test_attach_remove( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0] == "event 'tasmota_event'" + assert len(calls) == 1 + assert calls[0] == "event 'tasmota_event'" # Remove the trigger remove() @@ -937,14 +937,11 @@ async def test_attach_remove( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_attach_remove_late( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, - setup_tasmota, + hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota ) -> None: """Test attach and removal of trigger.""" # Discover a device without device triggers @@ -959,14 +956,14 @@ async def test_attach_remove_late( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - service_calls = [] + calls = [] def callback(trigger, context): - service_calls.append(trigger["trigger"]["description"]) + calls.append(trigger["trigger"]["description"]) remove = await async_initialize_triggers( hass, @@ -991,7 +988,7 @@ async def test_attach_remove_late( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config2)) await hass.async_block_till_done() @@ -1001,8 +998,8 @@ async def test_attach_remove_late( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0] == "event 'tasmota_event'" + assert len(calls) == 1 + assert calls[0] == "event 'tasmota_event'" # Remove the trigger remove() @@ -1013,14 +1010,11 @@ async def test_attach_remove_late( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_attach_remove_late2( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, - setup_tasmota, + hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota ) -> None: """Test attach and removal of trigger.""" # Discover a device without device triggers @@ -1035,14 +1029,14 @@ async def test_attach_remove_late2( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - service_calls = [] + calls = [] def callback(trigger, context): - service_calls.append(trigger["trigger"]["description"]) + calls.append(trigger["trigger"]["description"]) remove = await async_initialize_triggers( hass, @@ -1074,14 +1068,11 @@ async def test_attach_remove_late2( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_attach_remove_unknown1( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, - setup_tasmota, + hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota ) -> None: """Test attach and removal of unknown trigger.""" # Discover a device without device triggers @@ -1092,7 +1083,7 @@ async def test_attach_remove_unknown1( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -1122,7 +1113,7 @@ async def test_attach_remove_unknown1( async def test_attach_unknown_remove_device_from_registry( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - device_registry: dr.DeviceRegistry, + device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -1145,7 +1136,7 @@ async def test_attach_unknown_remove_device_from_registry( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -1173,10 +1164,7 @@ async def test_attach_unknown_remove_device_from_registry( async def test_attach_remove_config_entry( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, - setup_tasmota, + hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota ) -> None: """Test trigger cleanup when removing a Tasmota config entry.""" # Discover a device with device trigger @@ -1189,14 +1177,14 @@ async def test_attach_remove_config_entry( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - service_calls = [] + calls = [] def callback(trigger, context): - service_calls.append(trigger["trigger"]["description"]) + calls.append(trigger["trigger"]["description"]) await async_initialize_triggers( hass, @@ -1221,8 +1209,8 @@ async def test_attach_remove_config_entry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0] == "event 'tasmota_event'" + assert len(calls) == 1 + assert calls[0] == "event 'tasmota_event'" # Remove the Tasmota config entry config_entries = hass.config_entries.async_entries("tasmota") @@ -1234,4 +1222,4 @@ async def test_attach_remove_config_entry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 diff --git a/tests/components/tasmota/test_discovery.py b/tests/components/tasmota/test_discovery.py index 35ea79f7749..91832f1f2f0 100644 --- a/tests/components/tasmota/test_discovery.py +++ b/tests/components/tasmota/test_discovery.py @@ -124,8 +124,9 @@ async def test_invalid_mac( async def test_correct_config_discovery( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, ) -> None: """Test receiving valid discovery message.""" @@ -141,11 +142,11 @@ async def test_correct_config_discovery( await hass.async_block_till_done() # Verify device and registry entries are created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None - entity_entry = entity_registry.async_get("switch.tasmota_test") + entity_entry = entity_reg.async_get("switch.tasmota_test") assert entity_entry is not None state = hass.states.get("switch.tasmota_test") @@ -158,7 +159,9 @@ async def test_correct_config_discovery( async def test_device_discover( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, ) -> None: """Test setting up a device.""" @@ -173,7 +176,7 @@ async def test_device_discover( await hass.async_block_till_done() # Verify device and registry entries are created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -187,7 +190,9 @@ async def test_device_discover( async def test_device_discover_deprecated( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, ) -> None: """Test setting up a device with deprecated discovery message.""" @@ -202,7 +207,7 @@ async def test_device_discover_deprecated( await hass.async_block_till_done() # Verify device and registry entries are created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -215,7 +220,9 @@ async def test_device_discover_deprecated( async def test_device_update( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, ) -> None: """Test updating a device.""" @@ -233,7 +240,7 @@ async def test_device_update( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -251,7 +258,7 @@ async def test_device_update( await hass.async_block_till_done() # Verify device entry is updated - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -263,7 +270,9 @@ async def test_device_update( async def test_device_remove( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, ) -> None: """Test removing a discovered device.""" @@ -278,7 +287,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -291,7 +300,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -300,7 +309,9 @@ async def test_device_remove( async def test_device_remove_multiple_config_entries_1( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, ) -> None: """Test removing a discovered device.""" @@ -310,7 +321,7 @@ async def test_device_remove_multiple_config_entries_1( mock_entry = MockConfigEntry(domain="test") mock_entry.add_to_hass(hass) - device_registry.async_get_or_create( + device_reg.async_get_or_create( config_entry_id=mock_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) @@ -325,7 +336,7 @@ async def test_device_remove_multiple_config_entries_1( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -339,7 +350,7 @@ async def test_device_remove_multiple_config_entries_1( await hass.async_block_till_done() # Verify device entry is not removed - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -349,7 +360,9 @@ async def test_device_remove_multiple_config_entries_1( async def test_device_remove_multiple_config_entries_2( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, ) -> None: """Test removing a discovered device.""" @@ -359,12 +372,12 @@ async def test_device_remove_multiple_config_entries_2( mock_entry = MockConfigEntry(domain="test") mock_entry.add_to_hass(hass) - device_registry.async_get_or_create( + device_reg.async_get_or_create( config_entry_id=mock_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) - other_device_entry = device_registry.async_get_or_create( + other_device_entry = device_reg.async_get_or_create( config_entry_id=mock_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "other_device")}, ) @@ -379,7 +392,7 @@ async def test_device_remove_multiple_config_entries_2( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -387,13 +400,13 @@ async def test_device_remove_multiple_config_entries_2( assert other_device_entry.id != device_entry.id # Remove other config entry from the device - device_registry.async_update_device( + device_reg.async_update_device( device_entry.id, remove_config_entry_id=mock_entry.entry_id ) await hass.async_block_till_done() # Verify device entry is not removed - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -401,7 +414,7 @@ async def test_device_remove_multiple_config_entries_2( mqtt_mock.async_publish.assert_not_called() # Remove other config entry from the other device - Tasmota should not do any cleanup - device_registry.async_update_device( + device_reg.async_update_device( other_device_entry.id, remove_config_entry_id=mock_entry.entry_id ) await hass.async_block_till_done() @@ -412,7 +425,8 @@ async def test_device_remove_stale( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, setup_tasmota, ) -> None: """Test removing a stale (undiscovered) device does not throw.""" @@ -422,13 +436,13 @@ async def test_device_remove_stale( config_entry = hass.config_entries.async_entries("tasmota")[0] # Create a device - device_registry.async_get_or_create( + device_reg.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) # Verify device entry was created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -437,7 +451,7 @@ async def test_device_remove_stale( await remove_device(hass, hass_ws_client, device_entry.id) # Verify device entry is removed - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -446,7 +460,9 @@ async def test_device_remove_stale( async def test_device_rediscover( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, ) -> None: """Test removing a device.""" @@ -461,7 +477,7 @@ async def test_device_rediscover( await hass.async_block_till_done() # Verify device entry is created - device_entry1 = device_registry.async_get_device( + device_entry1 = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry1 is not None @@ -474,7 +490,7 @@ async def test_device_rediscover( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -487,7 +503,7 @@ async def test_device_rediscover( await hass.async_block_till_done() # Verify device entry is created, and id is reused - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -560,8 +576,9 @@ async def test_entity_duplicate_removal( async def test_same_topic( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, issue_registry: ir.IssueRegistry, ) -> None: @@ -588,7 +605,7 @@ async def test_same_topic( # Verify device registry entries are created for both devices for config in configs[0:2]: - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) assert device_entry is not None @@ -599,14 +616,14 @@ async def test_same_topic( assert device_entry.sw_version == config["sw"] # Verify entities are created only for the first device - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[0]["mac"])} ) - assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 - device_entry = device_registry.async_get_device( + assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[1]["mac"])} ) - assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 0 + assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 0 # Verify a repairs issue was created issue_id = "topic_duplicated_tasmota_49A3BC/cmnd/" @@ -622,7 +639,7 @@ async def test_same_topic( await hass.async_block_till_done() # Verify device registry entries was created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[2]["mac"])} ) assert device_entry is not None @@ -633,10 +650,10 @@ async def test_same_topic( assert device_entry.sw_version == configs[2]["sw"] # Verify no entities were created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[2]["mac"])} ) - assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 0 + assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 0 # Verify the repairs issue has been updated issue = issue_registry.async_get_issue("tasmota", issue_id) @@ -652,10 +669,10 @@ async def test_same_topic( await hass.async_block_till_done() # Verify entities are created also for the third device - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[2]["mac"])} ) - assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 + assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 # Verify the repairs issue has been updated issue = issue_registry.async_get_issue("tasmota", issue_id) @@ -671,10 +688,10 @@ async def test_same_topic( await hass.async_block_till_done() # Verify entities are created also for the second device - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[1]["mac"])} ) - assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 + assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 # Verify the repairs issue has been removed assert issue_registry.async_get_issue("tasmota", issue_id) is None @@ -683,8 +700,9 @@ async def test_same_topic( async def test_topic_no_prefix( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, issue_registry: ir.IssueRegistry, ) -> None: @@ -701,7 +719,7 @@ async def test_topic_no_prefix( await hass.async_block_till_done() # Verify device registry entry is created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) assert device_entry is not None @@ -712,10 +730,10 @@ async def test_topic_no_prefix( assert device_entry.sw_version == config["sw"] # Verify entities are not created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) - assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 0 + assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 0 # Verify a repairs issue was created issue_id = "topic_no_prefix_00000049A3BC" @@ -731,10 +749,10 @@ async def test_topic_no_prefix( await hass.async_block_till_done() # Verify entities are created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) - assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 + assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 # Verify the repairs issue has been removed assert ("tasmota", issue_id) not in issue_registry.issues diff --git a/tests/components/tasmota/test_fan.py b/tests/components/tasmota/test_fan.py index 49d1d36ce20..654b8c955d2 100644 --- a/tests/components/tasmota/test_fan.py +++ b/tests/components/tasmota/test_fan.py @@ -61,12 +61,7 @@ async def test_controlling_state_via_mqtt( state = hass.states.get("fan.tasmota") assert state.state == STATE_OFF assert state.attributes["percentage"] is None - assert ( - state.attributes["supported_features"] - == fan.FanEntityFeature.SET_SPEED - | fan.FanEntityFeature.TURN_OFF - | fan.FanEntityFeature.TURN_ON - ) + assert state.attributes["supported_features"] == fan.FanEntityFeature.SET_SPEED assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"FanSpeed":1}') diff --git a/tests/components/tasmota/test_init.py b/tests/components/tasmota/test_init.py index 2765ed724ea..0123421d5ae 100644 --- a/tests/components/tasmota/test_init.py +++ b/tests/components/tasmota/test_init.py @@ -4,8 +4,9 @@ import copy import json from unittest.mock import call +import pytest + from homeassistant.components.tasmota.const import DEFAULT_PREFIX, DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -25,7 +26,9 @@ async def test_device_remove( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, - device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + device_reg, + entity_reg, setup_tasmota, ) -> None: """Test removing a discovered device through device registry.""" @@ -41,7 +44,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -50,7 +53,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -67,7 +70,7 @@ async def test_device_remove( async def test_device_remove_non_tasmota_device( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, + device_reg, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, setup_tasmota, @@ -75,9 +78,7 @@ async def test_device_remove_non_tasmota_device( """Test removing a non Tasmota device through device registry.""" assert await async_setup_component(hass, "config", {}) - async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry - ) -> bool: + async def async_remove_config_entry_device(hass, config_entry, device_entry): return True mock_integration( @@ -91,7 +92,7 @@ async def test_device_remove_non_tasmota_device( config_entry.add_to_hass(hass) mac = "12:34:56:AB:CD:EF" - device_entry = device_registry.async_get_or_create( + device_entry = device_reg.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) @@ -101,7 +102,7 @@ async def test_device_remove_non_tasmota_device( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -112,7 +113,7 @@ async def test_device_remove_non_tasmota_device( async def test_device_remove_stale_tasmota_device( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, + device_reg, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, setup_tasmota, @@ -122,7 +123,7 @@ async def test_device_remove_stale_tasmota_device( config_entry = hass.config_entries.async_entries("tasmota")[0] mac = "12:34:56:AB:CD:EF" - device_entry = device_registry.async_get_or_create( + device_entry = device_reg.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) @@ -132,7 +133,7 @@ async def test_device_remove_stale_tasmota_device( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -143,7 +144,8 @@ async def test_device_remove_stale_tasmota_device( async def test_tasmota_ws_remove_discovered_device( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, + device_reg, + entity_reg, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, setup_tasmota, @@ -157,7 +159,7 @@ async def test_tasmota_ws_remove_discovered_device( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -168,7 +170,7 @@ async def test_tasmota_ws_remove_discovered_device( ) # Verify device entry is cleared - device_entry = device_registry.async_get_device( + device_entry = device_reg.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None diff --git a/tests/components/tasmota/test_light.py b/tests/components/tasmota/test_light.py index f5802c509bf..c4c3f0ec8dc 100644 --- a/tests/components/tasmota/test_light.py +++ b/tests/components/tasmota/test_light.py @@ -2,7 +2,6 @@ import copy import json -from typing import Any from unittest.mock import patch from hatasmota.const import CONF_MAC @@ -1479,13 +1478,7 @@ async def test_relay_as_light( assert state is not None -async def _test_split_light( - hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - config: dict[str, Any], - num_lights: int, - num_switches: int, -) -> None: +async def _test_split_light(hass, mqtt_mock, config, num_lights, num_switches): """Test multi-channel light split to single-channel dimmers.""" mac = config["mac"] @@ -1560,12 +1553,7 @@ async def test_split_light2( await _test_split_light(hass, mqtt_mock, config, 5, 2) -async def _test_unlinked_light( - hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - config: dict[str, Any], - num_switches: int, -) -> None: +async def _test_unlinked_light(hass, mqtt_mock, config, num_switches): """Test rgbww light split to rgb+ww.""" mac = config["mac"] num_lights = 2 diff --git a/tests/components/tasmota/test_sensor.py b/tests/components/tasmota/test_sensor.py index 78235f7ebf5..c01485d12a7 100644 --- a/tests/components/tasmota/test_sensor.py +++ b/tests/components/tasmota/test_sensor.py @@ -50,17 +50,6 @@ BAD_LIST_SENSOR_CONFIG_3 = { } } -# This configuration has sensors which type we can't guess -DEFAULT_SENSOR_CONFIG_UNKNOWN = { - "sn": { - "Time": "2020-09-25T12:47:15", - "SENSOR1": {"Unknown": None}, - "SENSOR2": {"Unknown": "123"}, - "SENSOR3": {"Unknown": 123}, - "SENSOR4": {"Unknown": 123.0}, - } -} - # This configuration has some sensors where values are lists # Home Assistant maps this to one sensor for each list item LIST_SENSOR_CONFIG = { @@ -209,12 +198,10 @@ TEMPERATURE_SENSOR_CONFIG = { [ "sensor.tasmota_energy_totaltariff_0", "sensor.tasmota_energy_totaltariff_1", - "sensor.tasmota_energy_exporttariff_0", - "sensor.tasmota_energy_exporttariff_1", ], ( - '{"ENERGY":{"ExportTariff":[5.6,7.8],"TotalTariff":[1.2,3.4]}}', - '{"StatusSNS":{"ENERGY":{"ExportTariff":[1.2,3.4],"TotalTariff":[5.6,7.8]}}}', + '{"ENERGY":{"TotalTariff":[1.2,3.4]}}', + '{"StatusSNS":{"ENERGY":{"TotalTariff":[5.6,7.8]}}}', ), ), ( @@ -292,20 +279,6 @@ TEMPERATURE_SENSOR_CONFIG = { ), ), ), - # Test we automatically set state class to measurement on unknown numerical sensors - ( - DEFAULT_SENSOR_CONFIG_UNKNOWN, - [ - "sensor.tasmota_sensor1_unknown", - "sensor.tasmota_sensor2_unknown", - "sensor.tasmota_sensor3_unknown", - "sensor.tasmota_sensor4_unknown", - ], - ( - '{"SENSOR1":{"Unknown":20.5},"SENSOR2":{"Unknown":20.5},"SENSOR3":{"Unknown":20.5},"SENSOR4":{"Unknown":20.5}}', - '{"StatusSNS":{"SENSOR1":{"Unknown":20},"SENSOR2":{"Unknown":20},"SENSOR3":{"Unknown":20},"SENSOR4":{"Unknown":20}}}', - ), - ), ], ) async def test_controlling_state_via_mqtt( diff --git a/tests/components/tautulli/test_config_flow.py b/tests/components/tautulli/test_config_flow.py index 722fd0a7616..ca563cfad77 100644 --- a/tests/components/tautulli/test_config_flow.py +++ b/tests/components/tautulli/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from pytautulli import exceptions from homeassistant.components.tautulli.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -156,7 +156,15 @@ async def test_flow_reauth( """Test reauth flow.""" with patch("homeassistant.components.tautulli.PLATFORMS", []): entry = await setup_integration(hass, aioclient_mock) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + CONF_SOURCE: SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=CONF_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -185,7 +193,14 @@ async def test_flow_reauth_error( """Test reauth flow with invalid authentication.""" with patch("homeassistant.components.tautulli.PLATFORMS", []): entry = await setup_integration(hass, aioclient_mock) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + ) with patch_config_flow_tautulli(AsyncMock()) as tautullimock: tautullimock.side_effect = exceptions.PyTautulliAuthenticationException result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/tcp/test_binary_sensor.py b/tests/components/tcp/test_binary_sensor.py index c84a36016ad..05aa2a471db 100644 --- a/tests/components/tcp/test_binary_sensor.py +++ b/tests/components/tcp/test_binary_sensor.py @@ -23,9 +23,9 @@ TEST_ENTITY = "binary_sensor.test_name" def mock_socket_fixture(): """Mock the socket.""" with ( - patch("homeassistant.components.tcp.entity.socket.socket") as mock_socket, + patch("homeassistant.components.tcp.common.socket.socket") as mock_socket, patch( - "homeassistant.components.tcp.entity.select.select", + "homeassistant.components.tcp.common.select.select", return_value=(True, False, False), ), ): diff --git a/tests/components/tcp/test_sensor.py b/tests/components/tcp/test_sensor.py index 27003df46cd..04fbb2c667e 100644 --- a/tests/components/tcp/test_sensor.py +++ b/tests/components/tcp/test_sensor.py @@ -43,7 +43,7 @@ socket_test_value = "123" @pytest.fixture(name="mock_socket") def mock_socket_fixture(mock_select): """Mock socket.""" - with patch("homeassistant.components.tcp.entity.socket.socket") as mock_socket: + with patch("homeassistant.components.tcp.common.socket.socket") as mock_socket: socket_instance = mock_socket.return_value.__enter__.return_value socket_instance.recv.return_value = socket_test_value.encode() yield socket_instance @@ -53,7 +53,7 @@ def mock_socket_fixture(mock_select): def mock_select_fixture(): """Mock select.""" with patch( - "homeassistant.components.tcp.entity.select.select", + "homeassistant.components.tcp.common.select.select", return_value=(True, False, False), ) as mock_select: yield mock_select @@ -63,7 +63,7 @@ def mock_select_fixture(): def mock_ssl_context_fixture(): """Mock select.""" with patch( - "homeassistant.components.tcp.entity.ssl.create_default_context", + "homeassistant.components.tcp.common.ssl.create_default_context", ) as mock_ssl_context: mock_ssl_context.return_value.wrap_socket.return_value.recv.return_value = ( socket_test_value + "567" diff --git a/tests/components/technove/conftest.py b/tests/components/technove/conftest.py index a81575f1edf..be34ebfefa5 100644 --- a/tests/components/technove/conftest.py +++ b/tests/components/technove/conftest.py @@ -1,10 +1,10 @@ """Fixtures for TechnoVE integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest from technove import Station as TechnoVEStation +from typing_extensions import Generator from homeassistant.components.technove.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/technove/fixtures/station_charging.json b/tests/components/technove/fixtures/station_charging.json index 4f50bf1a645..ea98dc0b071 100644 --- a/tests/components/technove/fixtures/station_charging.json +++ b/tests/components/technove/fixtures/station_charging.json @@ -6,12 +6,12 @@ "current": 23.75, "network_ssid": "Connecting...", "id": "AA:AA:AA:AA:AA:BB", - "auto_charge": false, + "auto_charge": true, "highChargePeriodActive": false, "normalPeriodActive": false, "maxChargePourcentage": 0.9, "isBatteryProtected": false, - "inSharingMode": false, + "inSharingMode": true, "energySession": 12.34, "energyTotal": 1234, "version": "1.82", diff --git a/tests/components/technove/snapshots/test_binary_sensor.ambr b/tests/components/technove/snapshots/test_binary_sensor.ambr index cc2dcf4a04a..140526b9391 100644 --- a/tests/components/technove/snapshots/test_binary_sensor.ambr +++ b/tests/components/technove/snapshots/test_binary_sensor.ambr @@ -181,7 +181,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'off', + 'state': 'on', }) # --- # name: test_sensors[binary_sensor.technove_station_static_ip-entry] diff --git a/tests/components/technove/snapshots/test_diagnostics.ambr b/tests/components/technove/snapshots/test_diagnostics.ambr deleted file mode 100644 index 175e8f2022a..00000000000 --- a/tests/components/technove/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,36 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'auto_charge': False, - 'conflict_in_sharing_config': False, - 'current': 23.75, - 'energy_session': 12.34, - 'energy_total': 1234, - 'high_charge_period_active': False, - 'in_sharing_mode': False, - 'is_battery_protected': False, - 'is_session_active': True, - 'is_static_ip': False, - 'is_up_to_date': True, - 'last_charge': ''' - 1701072080,0,17.39 - - ''', - 'mac_address': '**REDACTED**', - 'max_charge_percentage': 0.9, - 'max_current': 24, - 'max_station_current': 32, - 'name': 'TechnoVE Station', - 'network_ssid': 'Connecting...', - 'normal_period_active': False, - 'rssi': -82, - 'status': dict({ - '__type': "", - 'repr': "", - }), - 'time': 1701000000, - 'version': '1.82', - 'voltage_in': 238, - 'voltage_out': 238, - }) -# --- diff --git a/tests/components/technove/snapshots/test_number.ambr b/tests/components/technove/snapshots/test_number.ambr deleted file mode 100644 index 622c04d542a..00000000000 --- a/tests/components/technove/snapshots/test_number.ambr +++ /dev/null @@ -1,57 +0,0 @@ -# serializer version: 1 -# name: test_numbers[number.technove_station_maximum_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 32, - 'min': 8, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.technove_station_maximum_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Maximum current', - 'platform': 'technove', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'max_current', - 'unique_id': 'AA:AA:AA:AA:AA:BB_max_current', - 'unit_of_measurement': None, - }) -# --- -# name: test_numbers[number.technove_station_maximum_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'TechnoVE Station Maximum current', - 'max': 32, - 'min': 8, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.technove_station_maximum_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '24', - }) -# --- diff --git a/tests/components/technove/snapshots/test_switch.ambr b/tests/components/technove/snapshots/test_switch.ambr index 6febc8c768c..1a707971fc8 100644 --- a/tests/components/technove/snapshots/test_switch.ambr +++ b/tests/components/technove/snapshots/test_switch.ambr @@ -42,52 +42,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches[switch.technove_station_charging_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.technove_station_charging_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Charging Enabled', - 'platform': 'technove', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'session_active', - 'unique_id': 'AA:AA:AA:AA:AA:BB_session_active', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[switch.technove_station_charging_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'TechnoVE Station Charging Enabled', - }), - 'context': , - 'entity_id': 'switch.technove_station_charging_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , 'state': 'on', }) # --- diff --git a/tests/components/technove/test_binary_sensor.py b/tests/components/technove/test_binary_sensor.py index 0a90093779e..0ee4f3f3db7 100644 --- a/tests/components/technove/test_binary_sensor.py +++ b/tests/components/technove/test_binary_sensor.py @@ -8,7 +8,7 @@ import pytest from syrupy import SnapshotAssertion from technove import TechnoVEError -from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -43,10 +43,7 @@ async def test_sensors( @pytest.mark.parametrize( "entity_id", - [ - "binary_sensor.technove_station_static_ip", - "binary_sensor.technove_station_charging", - ], + ["binary_sensor.technove_station_static_ip"], ) @pytest.mark.usefixtures("init_integration") async def test_disabled_by_default_binary_sensors( @@ -67,9 +64,9 @@ async def test_binary_sensor_update_failure( freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator update failure.""" - entity_id = "binary_sensor.technove_station_power_sharing_mode" + entity_id = "binary_sensor.technove_station_charging" - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_ON mock_technove.update.side_effect = TechnoVEError("Test error") freezer.tick(timedelta(minutes=5, seconds=1)) diff --git a/tests/components/technove/test_diagnostics.py b/tests/components/technove/test_diagnostics.py deleted file mode 100644 index 878b084c0c3..00000000000 --- a/tests/components/technove/test_diagnostics.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Tests for TechnoVE diagnostics.""" - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - init_integration: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, init_integration) - == snapshot - ) diff --git a/tests/components/technove/test_number.py b/tests/components/technove/test_number.py deleted file mode 100644 index c9f39cd9200..00000000000 --- a/tests/components/technove/test_number.py +++ /dev/null @@ -1,201 +0,0 @@ -"""Tests for the TechnoVE number platform.""" - -from unittest.mock import MagicMock - -import pytest -from syrupy.assertion import SnapshotAssertion -from technove import TechnoVEConnectionError, TechnoVEError - -from homeassistant.components.number import ( - ATTR_VALUE, - DOMAIN as NUMBER_DOMAIN, - SERVICE_SET_VALUE, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from . import setup_with_selected_platforms - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default", "mock_technove") -async def test_numbers( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test the creation and values of the TechnoVE numbers.""" - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.NUMBER]) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize( - ("entity_id", "method", "called_with_value"), - [ - ( - "number.technove_station_maximum_current", - "set_max_current", - {"max_current": 10}, - ), - ], -) -@pytest.mark.usefixtures("init_integration") -async def test_number_expected_value( - hass: HomeAssistant, - mock_technove: MagicMock, - entity_id: str, - method: str, - called_with_value: dict[str, bool | int], -) -> None: - """Test set value services with valid values.""" - state = hass.states.get(entity_id) - method_mock = getattr(mock_technove, method) - - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: called_with_value["max_current"]}, - blocking=True, - ) - - assert method_mock.call_count == 1 - method_mock.assert_called_with(**called_with_value) - - -@pytest.mark.parametrize( - ("entity_id", "value"), - [ - ( - "number.technove_station_maximum_current", - 1, - ), - ( - "number.technove_station_maximum_current", - 1000, - ), - ], -) -@pytest.mark.usefixtures("init_integration") -async def test_number_out_of_bound( - hass: HomeAssistant, - entity_id: str, - value: float, -) -> None: - """Test set value services with out of bound values.""" - state = hass.states.get(entity_id) - - with pytest.raises(ServiceValidationError, match="is outside valid range"): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: value}, - blocking=True, - ) - - assert (state := hass.states.get(state.entity_id)) - assert state.state != STATE_UNAVAILABLE - - -@pytest.mark.usefixtures("init_integration") -async def test_set_max_current_sharing_mode( - hass: HomeAssistant, - mock_technove: MagicMock, -) -> None: - """Test failure to set the max current when the station is in sharing mode.""" - entity_id = "number.technove_station_maximum_current" - state = hass.states.get(entity_id) - - # Enable power sharing mode - device = mock_technove.update.return_value - device.info.in_sharing_mode = True - - with pytest.raises( - ServiceValidationError, - match="power sharing mode is enabled", - ): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: entity_id, - ATTR_VALUE: 10, - }, - blocking=True, - ) - - assert (state := hass.states.get(state.entity_id)) - assert state.state != STATE_UNAVAILABLE - - -@pytest.mark.parametrize( - ("entity_id", "method"), - [ - ( - "number.technove_station_maximum_current", - "set_max_current", - ), - ], -) -@pytest.mark.usefixtures("init_integration") -async def test_invalid_response( - hass: HomeAssistant, - mock_technove: MagicMock, - entity_id: str, - method: str, -) -> None: - """Test invalid response, not becoming unavailable.""" - state = hass.states.get(entity_id) - method_mock = getattr(mock_technove, method) - - method_mock.side_effect = TechnoVEError - with pytest.raises(HomeAssistantError, match="Invalid response from TechnoVE API"): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: 10}, - blocking=True, - ) - - assert method_mock.call_count == 1 - assert (state := hass.states.get(state.entity_id)) - assert state.state != STATE_UNAVAILABLE - - -@pytest.mark.parametrize( - ("entity_id", "method"), - [ - ( - "number.technove_station_maximum_current", - "set_max_current", - ), - ], -) -@pytest.mark.usefixtures("init_integration") -async def test_connection_error( - hass: HomeAssistant, - mock_technove: MagicMock, - entity_id: str, - method: str, -) -> None: - """Test connection error, leading to becoming unavailable.""" - state = hass.states.get(entity_id) - method_mock = getattr(mock_technove, method) - - method_mock.side_effect = TechnoVEConnectionError - with pytest.raises( - HomeAssistantError, match="Error communicating with TechnoVE API" - ): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: 10}, - blocking=True, - ) - - assert method_mock.call_count == 1 - assert (state := hass.states.get(state.entity_id)) - assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/technove/test_switch.py b/tests/components/technove/test_switch.py index dc0293b6443..b1a66607f66 100644 --- a/tests/components/technove/test_switch.py +++ b/tests/components/technove/test_switch.py @@ -15,7 +15,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_with_selected_platforms @@ -53,12 +53,6 @@ async def test_switches( {"enabled": True}, {"enabled": False}, ), - ( - "switch.technove_station_charging_enabled", - "set_charging_enabled", - {"enabled": True}, - {"enabled": False}, - ), ], ) @pytest.mark.usefixtures("init_integration") @@ -102,10 +96,6 @@ async def test_switch_on_off( "switch.technove_station_auto_charge", "set_auto_charge", ), - ( - "switch.technove_station_charging_enabled", - "set_charging_enabled", - ), ], ) @pytest.mark.usefixtures("init_integration") @@ -140,10 +130,6 @@ async def test_invalid_response( "switch.technove_station_auto_charge", "set_auto_charge", ), - ( - "switch.technove_station_charging_enabled", - "set_charging_enabled", - ), ], ) @pytest.mark.usefixtures("init_integration") @@ -171,31 +157,3 @@ async def test_connection_error( assert method_mock.call_count == 1 assert (state := hass.states.get(state.entity_id)) assert state.state == STATE_UNAVAILABLE - - -@pytest.mark.usefixtures("init_integration") -async def test_disable_charging_auto_charge( - hass: HomeAssistant, - mock_technove: MagicMock, -) -> None: - """Test failure to disable charging when the station is in auto charge mode.""" - entity_id = "switch.technove_station_charging_enabled" - state = hass.states.get(entity_id) - - # Enable auto-charge mode - device = mock_technove.update.return_value - device.info.auto_charge = True - - with pytest.raises( - ServiceValidationError, - match="auto-charge is enabled", - ): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert (state := hass.states.get(state.entity_id)) - assert state.state != STATE_UNAVAILABLE diff --git a/tests/components/tedee/conftest.py b/tests/components/tedee/conftest.py index 8e028cb5300..295e34fd541 100644 --- a/tests/components/tedee/conftest.py +++ b/tests/components/tedee/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations -from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch -from aiotedee.bridge import TedeeBridge -from aiotedee.lock import TedeeLock +from pytedee_async.bridge import TedeeBridge +from pytedee_async.lock import TedeeLock import pytest +from typing_extensions import Generator from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID diff --git a/tests/components/tedee/snapshots/test_binary_sensor.ambr b/tests/components/tedee/snapshots/test_binary_sensor.ambr index 385e4ac9bc1..8c9dca1bd12 100644 --- a/tests/components/tedee/snapshots/test_binary_sensor.ambr +++ b/tests/components/tedee/snapshots/test_binary_sensor.ambr @@ -32,39 +32,6 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[entry-lock_uncalibrated] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Lock uncalibrated', - 'platform': 'tedee', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'uncalibrated', - 'unique_id': '12345-uncalibrated', - 'unit_of_measurement': None, - }) -# --- # name: test_binary_sensors[entry-pullspring_enabled] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -145,20 +112,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensors[state-lock_uncalibrated] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Lock-1A2B Lock uncalibrated', - }), - 'context': , - 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensors[state-pullspring_enabled] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/tedee/snapshots/test_init.ambr b/tests/components/tedee/snapshots/test_init.ambr index 20d6bfcdc2a..c91fb3ca484 100644 --- a/tests/components/tedee/snapshots/test_init.ambr +++ b/tests/components/tedee/snapshots/test_init.ambr @@ -21,7 +21,6 @@ }), 'manufacturer': 'Tedee', 'model': 'Bridge', - 'model_id': None, 'name': 'Bridge-AB1C', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tedee/snapshots/test_lock.ambr b/tests/components/tedee/snapshots/test_lock.ambr index 3eba6f3f0af..8fa8ab7668d 100644 --- a/tests/components/tedee/snapshots/test_lock.ambr +++ b/tests/components/tedee/snapshots/test_lock.ambr @@ -68,7 +68,6 @@ }), 'manufacturer': 'Tedee', 'model': 'Tedee PRO', - 'model_id': 'Tedee PRO', 'name': 'Lock-1A2B', 'name_by_user': None, 'primary_config_entry': , @@ -147,7 +146,6 @@ }), 'manufacturer': 'Tedee', 'model': 'Tedee GO', - 'model_id': 'Tedee GO', 'name': 'Lock-2C3D', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tedee/test_binary_sensor.py b/tests/components/tedee/test_binary_sensor.py index dfe70e7a2ea..ee8c318d2dd 100644 --- a/tests/components/tedee/test_binary_sensor.py +++ b/tests/components/tedee/test_binary_sensor.py @@ -3,8 +3,8 @@ from datetime import timedelta from unittest.mock import MagicMock -from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory +from pytedee_async import TedeeLock import pytest from syrupy import SnapshotAssertion @@ -15,17 +15,20 @@ from tests.common import async_fire_time_changed pytestmark = pytest.mark.usefixtures("init_integration") -BINARY_SENSORS = ("charging", "semi_locked", "pullspring_enabled", "lock_uncalibrated") +BINARY_SENSORS = ( + "charging", + "semi_locked", + "pullspring_enabled", +) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: - """Test tedee binary sensor.""" + """Test tedee battery charging sensor.""" for key in BINARY_SENSORS: state = hass.states.get(f"binary_sensor.lock_1a2b_{key}") assert state @@ -36,7 +39,6 @@ async def test_binary_sensors( assert entry == snapshot(name=f"entry-{key}") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_new_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, diff --git a/tests/components/tedee/test_config_flow.py b/tests/components/tedee/test_config_flow.py index 825e01aca70..588e63f693b 100644 --- a/tests/components/tedee/test_config_flow.py +++ b/tests/components/tedee/test_config_flow.py @@ -2,16 +2,15 @@ from unittest.mock import MagicMock, patch -from aiotedee import ( +from pytedee_async import ( TedeeClientException, TedeeDataUpdateException, TedeeLocalAuthException, ) -from aiotedee.bridge import TedeeBridge import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN -from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -123,7 +122,18 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - reauth_result = await mock_config_entry.start_reauth_flow(hass) + reauth_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data={ + CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, + CONF_HOST: "192.168.1.42", + }, + ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], @@ -133,55 +143,3 @@ async def test_reauth_flow( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" - - -async def __do_reconfigure_flow( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> ConfigFlowResult: - """Initialize a reconfigure flow.""" - mock_config_entry.add_to_hass(hass) - - reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) - - assert reconfigure_result["type"] is FlowResultType.FORM - assert reconfigure_result["step_id"] == "reconfigure" - - return await hass.config_entries.flow.async_configure( - reconfigure_result["flow_id"], - {CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, CONF_HOST: "192.168.1.43"}, - ) - - -async def test_reconfigure_flow( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock -) -> None: - """Test that the reconfigure flow works.""" - - result = await __do_reconfigure_flow(hass, mock_config_entry) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - - entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) - assert entry - assert entry.title == "My Tedee" - assert entry.data == { - CONF_HOST: "192.168.1.43", - CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, - CONF_WEBHOOK_ID: WEBHOOK_ID, - } - - -async def test_reconfigure_unique_id_mismatch( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock -) -> None: - """Ensure reconfigure flow aborts when the bride changes.""" - - mock_tedee.get_local_bridge.return_value = TedeeBridge( - 0, "1111-1111", "Bridge-R2D2" - ) - - result = await __do_reconfigure_flow(hass, mock_config_entry) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unique_id_mismatch" diff --git a/tests/components/tedee/test_init.py b/tests/components/tedee/test_init.py index 63701bb1788..d4ac1c9d290 100644 --- a/tests/components/tedee/test_init.py +++ b/tests/components/tedee/test_init.py @@ -5,7 +5,7 @@ from typing import Any from unittest.mock import MagicMock, patch from urllib.parse import urlparse -from aiotedee.exception import ( +from pytedee_async.exception import ( TedeeAuthException, TedeeClientException, TedeeWebhookException, diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index 45eae6e22d9..ffc4a8c30d6 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -4,13 +4,13 @@ from datetime import timedelta from unittest.mock import MagicMock from urllib.parse import urlparse -from aiotedee import TedeeLock, TedeeLockState -from aiotedee.exception import ( +from freezegun.api import FrozenDateTimeFactory +from pytedee_async import TedeeLock, TedeeLockState +from pytedee_async.exception import ( TedeeClientException, TedeeDataUpdateException, TedeeLocalAuthException, ) -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion @@ -19,10 +19,13 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_OPEN, SERVICE_UNLOCK, - LockState, + STATE_LOCKED, + STATE_LOCKING, + STATE_UNLOCKED, + STATE_UNLOCKING, ) from homeassistant.components.webhook import async_generate_url -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -72,7 +75,7 @@ async def test_lock( mock_tedee.lock.assert_called_once_with(12345) state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == LockState.LOCKING + assert state.state == STATE_LOCKING await hass.services.async_call( LOCK_DOMAIN, @@ -87,7 +90,7 @@ async def test_lock( mock_tedee.unlock.assert_called_once_with(12345) state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == LockState.UNLOCKING + assert state.state == STATE_UNLOCKING await hass.services.async_call( LOCK_DOMAIN, @@ -102,7 +105,7 @@ async def test_lock( mock_tedee.open.assert_called_once_with(12345) state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == LockState.UNLOCKING + assert state.state == STATE_UNLOCKING async def test_lock_without_pullspring( @@ -152,7 +155,7 @@ async def test_lock_errors( ) -> None: """Test event errors.""" mock_tedee.lock.side_effect = TedeeClientException("Boom") - with pytest.raises(HomeAssistantError) as exc_info: + with pytest.raises(HomeAssistantError, match="Failed to lock the door. Lock 12345"): await hass.services.async_call( LOCK_DOMAIN, SERVICE_LOCK, @@ -161,10 +164,11 @@ async def test_lock_errors( }, blocking=True, ) - assert exc_info.value.translation_key == "lock_failed" mock_tedee.unlock.side_effect = TedeeClientException("Boom") - with pytest.raises(HomeAssistantError) as exc_info: + with pytest.raises( + HomeAssistantError, match="Failed to unlock the door. Lock 12345" + ): await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, @@ -173,10 +177,11 @@ async def test_lock_errors( }, blocking=True, ) - assert exc_info.value.translation_key == "unlock_failed" mock_tedee.open.side_effect = TedeeClientException("Boom") - with pytest.raises(HomeAssistantError) as exc_info: + with pytest.raises( + HomeAssistantError, match="Failed to unlatch the door. Lock 12345" + ): await hass.services.async_call( LOCK_DOMAIN, SERVICE_OPEN, @@ -185,7 +190,6 @@ async def test_lock_errors( }, blocking=True, ) - assert exc_info.value.translation_key == "open_failed" @pytest.mark.parametrize( @@ -272,31 +276,21 @@ async def test_new_lock( assert state -@pytest.mark.parametrize( - ("lib_state", "expected_state"), - [ - (TedeeLockState.LOCKED, LockState.LOCKED), - (TedeeLockState.HALF_OPEN, STATE_UNKNOWN), - (TedeeLockState.UNKNOWN, STATE_UNKNOWN), - (TedeeLockState.UNCALIBRATED, STATE_UNAVAILABLE), - ], -) async def test_webhook_update( hass: HomeAssistant, mock_tedee: MagicMock, hass_client_no_auth: ClientSessionGenerator, - lib_state: TedeeLockState, - expected_state: str, ) -> None: """Test updated data set through webhook.""" state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED - webhook_data = {"dummystate": lib_state.value} - # is updated in the lib, so mock and assert below - mock_tedee.locks_dict[12345].state = lib_state + webhook_data = {"dummystate": 6} + mock_tedee.locks_dict[ + 12345 + ].state = TedeeLockState.LOCKED # is updated in the lib, so mock and assert in L296 client = await hass_client_no_auth() webhook_url = async_generate_url(hass, WEBHOOK_ID) @@ -308,4 +302,4 @@ async def test_webhook_update( state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == expected_state + assert state.state == STATE_LOCKED diff --git a/tests/components/tedee/test_sensor.py b/tests/components/tedee/test_sensor.py index ddbcd5086af..72fbd9cbe8d 100644 --- a/tests/components/tedee/test_sensor.py +++ b/tests/components/tedee/test_sensor.py @@ -3,8 +3,8 @@ from datetime import timedelta from unittest.mock import MagicMock -from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory +from pytedee_async import TedeeLock import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/telegram_bot/conftest.py b/tests/components/telegram_bot/conftest.py index 93137c3815e..6ea5d1446dd 100644 --- a/tests/components/telegram_bot/conftest.py +++ b/tests/components/telegram_bot/conftest.py @@ -1,12 +1,10 @@ """Tests for the telegram_bot integration.""" -from collections.abc import AsyncGenerator, Generator from datetime import datetime -from typing import Any from unittest.mock import patch import pytest -from telegram import Bot, Chat, Message, User +from telegram import Chat, Message, User from telegram.constants import ChatType from homeassistant.components.telegram_bot import ( @@ -20,12 +18,11 @@ from homeassistant.const import ( CONF_URL, EVENT_HOMEASSISTANT_START, ) -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @pytest.fixture -def config_webhooks() -> dict[str, Any]: +def config_webhooks(): """Fixture for a webhooks platform configuration.""" return { DOMAIN: [ @@ -46,7 +43,7 @@ def config_webhooks() -> dict[str, Any]: @pytest.fixture -def config_polling() -> dict[str, Any]: +def config_polling(): """Fixture for a polling platform configuration.""" return { DOMAIN: [ @@ -65,7 +62,7 @@ def config_polling() -> dict[str, Any]: @pytest.fixture -def mock_register_webhook() -> Generator[None]: +def mock_register_webhook(): """Mock calls made by telegram_bot when (de)registering webhook.""" with ( patch( @@ -81,7 +78,7 @@ def mock_register_webhook() -> Generator[None]: @pytest.fixture -def mock_external_calls() -> Generator[None]: +def mock_external_calls(): """Mock calls that make calls to the live Telegram API.""" test_user = User(123456, "Testbot", True) message = Message( @@ -89,29 +86,30 @@ def mock_external_calls() -> Generator[None]: date=datetime.now(), chat=Chat(id=123456, type=ChatType.PRIVATE), ) - - class BotMock(Bot): - """Mock bot class.""" - - __slots__ = () - - def __init__(self, *args: Any, **kwargs: Any) -> None: - """Initialize BotMock instance.""" - super().__init__(*args, **kwargs) - self._bot_user = test_user - with ( - patch("homeassistant.components.telegram_bot.Bot", BotMock), - patch.object(BotMock, "get_me", return_value=test_user), - patch.object(BotMock, "bot", test_user), - patch.object(BotMock, "send_message", return_value=message), + patch( + "telegram.Bot.get_me", + return_value=test_user, + ), + patch( + "telegram.Bot._bot_user", + test_user, + ), + patch( + "telegram.Bot.bot", + test_user, + ), + patch( + "telegram.Bot.send_message", + return_value=message, + ), patch("telegram.ext.Updater._bootstrap"), ): yield @pytest.fixture -def mock_generate_secret_token() -> Generator[str]: +def mock_generate_secret_token(): """Mock secret token generated for webhook.""" mock_secret_token = "DEADBEEF12345678DEADBEEF87654321" with patch( @@ -219,12 +217,12 @@ def update_callback_query(): @pytest.fixture async def webhook_platform( - hass: HomeAssistant, - config_webhooks: dict[str, Any], - mock_register_webhook: None, - mock_external_calls: None, - mock_generate_secret_token: str, -) -> AsyncGenerator[None]: + hass, + config_webhooks, + mock_register_webhook, + mock_external_calls, + mock_generate_secret_token, +): """Fixture for setting up the webhooks platform using appropriate config and mocks.""" await async_setup_component( hass, @@ -237,9 +235,7 @@ async def webhook_platform( @pytest.fixture -async def polling_platform( - hass: HomeAssistant, config_polling: dict[str, Any], mock_external_calls: None -) -> None: +async def polling_platform(hass, config_polling, mock_external_calls): """Fixture for setting up the polling platform using appropriate config and mocks.""" await async_setup_component( hass, diff --git a/tests/components/telegram_bot/test_telegram_bot.py b/tests/components/telegram_bot/test_telegram_bot.py index bdf6ba72fcc..aad758827ca 100644 --- a/tests/components/telegram_bot/test_telegram_bot.py +++ b/tests/components/telegram_bot/test_telegram_bot.py @@ -1,11 +1,8 @@ """Tests for the telegram_bot component.""" -from typing import Any -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch -import pytest from telegram import Update -from telegram.error import NetworkError, RetryAfter, TelegramError, TimedOut from homeassistant.components.telegram_bot import ( ATTR_MESSAGE, @@ -14,7 +11,6 @@ from homeassistant.components.telegram_bot import ( SERVICE_SEND_MESSAGE, ) from homeassistant.components.telegram_bot.webhooks import TELEGRAM_WEBHOOK_URL -from homeassistant.const import EVENT_HOMEASSISTANT_START from homeassistant.core import Context, HomeAssistant from homeassistant.setup import async_setup_component @@ -192,103 +188,6 @@ async def test_polling_platform_message_text_update( assert isinstance(events[0].context, Context) -@pytest.mark.parametrize( - ("error", "log_message"), - [ - ( - TelegramError("Telegram error"), - 'caused error: "Telegram error"', - ), - (NetworkError("Network error"), ""), - (RetryAfter(42), ""), - (TimedOut("TimedOut error"), ""), - ], -) -async def test_polling_platform_add_error_handler( - hass: HomeAssistant, - config_polling: dict[str, Any], - update_message_text: dict[str, Any], - caplog: pytest.LogCaptureFixture, - error: Exception, - log_message: str, -) -> None: - """Test polling add error handler.""" - with patch( - "homeassistant.components.telegram_bot.polling.ApplicationBuilder" - ) as application_builder_class: - await async_setup_component( - hass, - DOMAIN, - config_polling, - ) - await hass.async_block_till_done() - - application = ( - application_builder_class.return_value.bot.return_value.build.return_value - ) - application.updater.stop = AsyncMock() - application.stop = AsyncMock() - application.shutdown = AsyncMock() - process_error = application.add_error_handler.call_args[0][0] - application.bot.defaults.tzinfo = None - update = Update.de_json(update_message_text, application.bot) - - await process_error(update, MagicMock(error=error)) - - assert log_message in caplog.text - - -@pytest.mark.parametrize( - ("error", "log_message"), - [ - ( - TelegramError("Telegram error"), - "TelegramError: Telegram error", - ), - (NetworkError("Network error"), ""), - (RetryAfter(42), ""), - (TimedOut("TimedOut error"), ""), - ], -) -async def test_polling_platform_start_polling_error_callback( - hass: HomeAssistant, - config_polling: dict[str, Any], - caplog: pytest.LogCaptureFixture, - error: Exception, - log_message: str, -) -> None: - """Test polling add error handler.""" - with patch( - "homeassistant.components.telegram_bot.polling.ApplicationBuilder" - ) as application_builder_class: - await async_setup_component( - hass, - DOMAIN, - config_polling, - ) - await hass.async_block_till_done() - - application = ( - application_builder_class.return_value.bot.return_value.build.return_value - ) - application.initialize = AsyncMock() - application.updater.start_polling = AsyncMock() - application.start = AsyncMock() - application.updater.stop = AsyncMock() - application.stop = AsyncMock() - application.shutdown = AsyncMock() - - hass.bus.async_fire(EVENT_HOMEASSISTANT_START) - await hass.async_block_till_done() - error_callback = application.updater.start_polling.call_args.kwargs[ - "error_callback" - ] - - error_callback(error) - - assert log_message in caplog.text - - async def test_webhook_endpoint_unauthorized_update_doesnt_generate_telegram_text_event( hass: HomeAssistant, webhook_platform, diff --git a/tests/components/tellduslive/test_config_flow.py b/tests/components/tellduslive/test_config_flow.py index abce2858bf3..c575e7fb5c1 100644 --- a/tests/components/tellduslive/test_config_flow.py +++ b/tests/components/tellduslive/test_config_flow.py @@ -20,9 +20,7 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -def init_config_flow( - hass: HomeAssistant, side_effect: type[Exception] | None = None -) -> config_flow.FlowHandler: +def init_config_flow(hass, side_effect=None): """Init a configuration flow.""" flow = config_flow.FlowHandler() flow.hass = hass diff --git a/tests/components/template/conftest.py b/tests/components/template/conftest.py index bdca84ba071..b400d443be7 100644 --- a/tests/components/template/conftest.py +++ b/tests/components/template/conftest.py @@ -3,7 +3,6 @@ import pytest from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component from tests.common import assert_setup_component, async_mock_service @@ -17,8 +16,8 @@ def calls(hass: HomeAssistant) -> list[ServiceCall]: @pytest.fixture async def start_ha( - hass: HomeAssistant, count: int, domain: str, config: ConfigType -) -> None: + hass: HomeAssistant, count, domain, config, caplog: pytest.LogCaptureFixture +): """Do setup of integration.""" with assert_setup_component(count, domain): assert await async_setup_component( @@ -36,8 +35,3 @@ async def start_ha( async def caplog_setup_text(caplog: pytest.LogCaptureFixture) -> str: """Return setup log of integration.""" return caplog.text - - -@pytest.fixture(autouse=True, name="stub_blueprint_populate") -def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: - """Stub copying the blueprints to the config folder.""" diff --git a/tests/components/template/snapshots/test_alarm_control_panel.ambr b/tests/components/template/snapshots/test_alarm_control_panel.ambr deleted file mode 100644 index 9772c31220e..00000000000 --- a/tests/components/template/snapshots/test_alarm_control_panel.ambr +++ /dev/null @@ -1,18 +0,0 @@ -# serializer version: 1 -# name: test_setup_config_entry - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'changed_by': None, - 'code_arm_required': True, - 'code_format': , - 'friendly_name': 'My template', - 'supported_features': , - }), - 'context': , - 'entity_id': 'alarm_control_panel.my_template', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'armed_away', - }) -# --- diff --git a/tests/components/template/snapshots/test_button.ambr b/tests/components/template/snapshots/test_button.ambr deleted file mode 100644 index 3d96ad66050..00000000000 --- a/tests/components/template/snapshots/test_button.ambr +++ /dev/null @@ -1,28 +0,0 @@ -# serializer version: 1 -# name: test_setup_config_entry[config_entry_extra_options0] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'My template', - }), - 'context': , - 'entity_id': 'button.my_template', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_setup_config_entry[config_entry_extra_options1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'update', - 'friendly_name': 'My template', - }), - 'context': , - 'entity_id': 'button.my_template', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/template/snapshots/test_number.ambr b/tests/components/template/snapshots/test_number.ambr deleted file mode 100644 index d6f5b1e338d..00000000000 --- a/tests/components/template/snapshots/test_number.ambr +++ /dev/null @@ -1,18 +0,0 @@ -# serializer version: 1 -# name: test_setup_config_entry - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'My template', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 0.1, - }), - 'context': , - 'entity_id': 'number.my_template', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.0', - }) -# --- \ No newline at end of file diff --git a/tests/components/template/snapshots/test_select.ambr b/tests/components/template/snapshots/test_select.ambr deleted file mode 100644 index e2142394cba..00000000000 --- a/tests/components/template/snapshots/test_select.ambr +++ /dev/null @@ -1,19 +0,0 @@ -# serializer version: 1 -# name: test_setup_config_entry - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'My template', - 'options': Wrapper([ - 'off', - 'on', - 'auto', - ]), - }), - 'context': , - 'entity_id': 'select.my_template', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/template/snapshots/test_switch.ambr b/tests/components/template/snapshots/test_switch.ambr deleted file mode 100644 index c240a9436a0..00000000000 --- a/tests/components/template/snapshots/test_switch.ambr +++ /dev/null @@ -1,14 +0,0 @@ -# serializer version: 1 -# name: test_setup_config_entry - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'My template', - }), - 'context': , - 'entity_id': 'switch.my_template', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/template/snapshots/test_weather.ambr b/tests/components/template/snapshots/test_weather.ambr index bdda5b44e94..9b0cf2b9471 100644 --- a/tests/components/template/snapshots/test_weather.ambr +++ b/tests/components/template/snapshots/test_weather.ambr @@ -1,4 +1,87 @@ # serializer version: 1 +# name: test_forecasts[config0-1-weather-forecast] + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 14.2, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-forecast].1 + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 14.2, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-forecast].2 + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'fog', + 'datetime': '2023-02-17T14:00:00+00:00', + 'is_daytime': True, + 'temperature': 14.2, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-forecast].3 + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 16.9, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecast] + dict({ + 'forecast': list([ + ]), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecast].1 + dict({ + 'forecast': list([ + ]), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecast].2 + dict({ + 'forecast': list([ + dict({ + 'condition': 'fog', + 'datetime': '2023-02-17T14:00:00+00:00', + 'is_daytime': True, + 'temperature': 14.2, + }), + ]), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecast].3 + dict({ + 'forecast': list([ + ]), + }) +# --- # name: test_forecasts[config0-1-weather-get_forecasts] dict({ 'weather.forecast': dict({ @@ -37,6 +120,51 @@ }), }) # --- +# name: test_forecasts[config0-1-weather] + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 14.2, + }), + ]), + }) +# --- +# name: test_forecasts[config0-1-weather].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 14.2, + }), + ]), + }) +# --- +# name: test_forecasts[config0-1-weather].2 + dict({ + 'forecast': list([ + dict({ + 'condition': 'fog', + 'datetime': '2023-02-17T14:00:00+00:00', + 'is_daytime': True, + 'temperature': 14.2, + }), + ]), + }) +# --- +# name: test_forecasts[config0-1-weather].3 + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 16.9, + }), + ]), + }) +# --- # name: test_restore_weather_save_state dict({ 'last_apparent_temperature': None, @@ -52,6 +180,92 @@ 'last_wind_speed': None, }) # --- +# name: test_trigger_weather_services[config0-1-template-forecast] + dict({ + 'weather.test': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-forecast].1 + dict({ + 'weather.test': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-forecast].2 + dict({ + 'weather.test': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'is_daytime': True, + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-get_forecast].2 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'is_daytime': True, + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }) +# --- # name: test_trigger_weather_services[config0-1-template-get_forecasts] dict({ 'weather.test': dict({ @@ -98,3 +312,43 @@ }), }) # --- +# name: test_trigger_weather_services[config0-1-template] + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }) +# --- +# name: test_trigger_weather_services[config0-1-template].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }) +# --- +# name: test_trigger_weather_services[config0-1-template].2 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'is_daytime': True, + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }) +# --- diff --git a/tests/components/template/test_alarm_control_panel.py b/tests/components/template/test_alarm_control_panel.py index 4b259fabac2..6a2a95a64eb 100644 --- a/tests/components/template/test_alarm_control_panel.py +++ b/tests/components/template/test_alarm_control_panel.py @@ -1,26 +1,24 @@ """The tests for the Template alarm control panel platform.""" import pytest -from syrupy.assertion import SnapshotAssertion -from homeassistant.components import template -from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_DOMAIN, - AlarmControlPanelState, -) +from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN from homeassistant.const import ( ATTR_DOMAIN, ATTR_ENTITY_ID, ATTR_SERVICE_DATA, EVENT_CALL_SERVICE, - STATE_UNAVAILABLE, - STATE_UNKNOWN, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_PENDING, + STATE_ALARM_TRIGGERED, ) -from homeassistant.core import Event, HomeAssistant, State, callback -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry, assert_setup_component, mock_restore_cache +from homeassistant.core import Event, HomeAssistant, callback TEMPLATE_NAME = "alarm_control_panel.test_template_panel" PANEL_NAME = "alarm_control_panel.test" @@ -102,20 +100,19 @@ TEMPLATE_ALARM_CONFIG = { }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_state_text(hass: HomeAssistant) -> None: +async def test_template_state_text(hass: HomeAssistant, start_ha) -> None: """Test the state text of a template.""" for set_state in ( - AlarmControlPanelState.ARMED_HOME, - AlarmControlPanelState.ARMED_AWAY, - AlarmControlPanelState.ARMED_NIGHT, - AlarmControlPanelState.ARMED_VACATION, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - AlarmControlPanelState.ARMING, - AlarmControlPanelState.DISARMED, - AlarmControlPanelState.PENDING, - AlarmControlPanelState.TRIGGERED, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_PENDING, + STATE_ALARM_TRIGGERED, ): hass.states.async_set(PANEL_NAME, set_state) await hass.async_block_till_done() @@ -128,41 +125,6 @@ async def test_template_state_text(hass: HomeAssistant) -> None: assert state.state == "unknown" -async def test_setup_config_entry( - hass: HomeAssistant, snapshot: SnapshotAssertion -) -> None: - """Test the config flow.""" - value_template = "{{ states('alarm_control_panel.one') }}" - - hass.states.async_set("alarm_control_panel.one", "armed_away", {}) - - template_config_entry = MockConfigEntry( - data={}, - domain=template.DOMAIN, - options={ - "name": "My template", - "value_template": value_template, - "template_type": "alarm_control_panel", - "code_arm_required": True, - "code_format": "number", - }, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("alarm_control_panel.my_template") - assert state is not None - assert state == snapshot - - hass.states.async_set("alarm_control_panel.one", "disarmed", {}) - await hass.async_block_till_done() - state = hass.states.get("alarm_control_panel.my_template") - assert state.state == AlarmControlPanelState.DISARMED - - @pytest.mark.parametrize(("count", "domain"), [(1, "alarm_control_panel")]) @pytest.mark.parametrize( "config", @@ -175,8 +137,7 @@ async def test_setup_config_entry( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_optimistic_states(hass: HomeAssistant) -> None: +async def test_optimistic_states(hass: HomeAssistant, start_ha) -> None: """Test the optimistic state.""" state = hass.states.get(TEMPLATE_NAME) @@ -184,13 +145,13 @@ async def test_optimistic_states(hass: HomeAssistant) -> None: assert state.state == "unknown" for service, set_state in ( - ("alarm_arm_away", AlarmControlPanelState.ARMED_AWAY), - ("alarm_arm_home", AlarmControlPanelState.ARMED_HOME), - ("alarm_arm_night", AlarmControlPanelState.ARMED_NIGHT), - ("alarm_arm_vacation", AlarmControlPanelState.ARMED_VACATION), - ("alarm_arm_custom_bypass", AlarmControlPanelState.ARMED_CUSTOM_BYPASS), - ("alarm_disarm", AlarmControlPanelState.DISARMED), - ("alarm_trigger", AlarmControlPanelState.TRIGGERED), + ("alarm_arm_away", STATE_ALARM_ARMED_AWAY), + ("alarm_arm_home", STATE_ALARM_ARMED_HOME), + ("alarm_arm_night", STATE_ALARM_ARMED_NIGHT), + ("alarm_arm_vacation", STATE_ALARM_ARMED_VACATION), + ("alarm_arm_custom_bypass", STATE_ALARM_ARMED_CUSTOM_BYPASS), + ("alarm_disarm", STATE_ALARM_DISARMED), + ("alarm_trigger", STATE_ALARM_TRIGGERED), ): await hass.services.async_call( ALARM_DOMAIN, @@ -266,9 +227,8 @@ async def test_optimistic_states(hass: HomeAssistant) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") async def test_template_syntax_error( - hass: HomeAssistant, msg, caplog_setup_text + hass: HomeAssistant, msg, start_ha, caplog_setup_text ) -> None: """Test templating syntax error.""" assert len(hass.states.async_all("alarm_control_panel")) == 0 @@ -284,7 +244,7 @@ async def test_template_syntax_error( "platform": "template", "panels": { "test_template_panel": { - "name": '{{ "Template Alarm Panel" }}', + "name": "Template Alarm Panel", "value_template": "disarmed", **OPTIMISTIC_TEMPLATE_ALARM_CONFIG, } @@ -293,8 +253,7 @@ async def test_template_syntax_error( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_name(hass: HomeAssistant) -> None: +async def test_name(hass: HomeAssistant, start_ha) -> None: """Test the accessibility of the name attribute.""" state = hass.states.get(TEMPLATE_NAME) assert state is not None @@ -325,9 +284,8 @@ async def test_name(hass: HomeAssistant) -> None: "alarm_trigger", ], ) -@pytest.mark.usefixtures("start_ha") async def test_actions( - hass: HomeAssistant, service, call_service_events: list[Event] + hass: HomeAssistant, service, start_ha, call_service_events: list[Event] ) -> None: """Test alarm actions.""" await hass.services.async_call( @@ -363,8 +321,7 @@ async def test_actions( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_unique_id(hass: HomeAssistant) -> None: +async def test_unique_id(hass: HomeAssistant, start_ha) -> None: """Test unique_id option only creates one alarm control panel per id.""" assert len(hass.states.async_all()) == 1 @@ -436,130 +393,10 @@ async def test_unique_id(hass: HomeAssistant) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_code_config(hass: HomeAssistant, code_format, code_arm_required) -> None: +async def test_code_config( + hass: HomeAssistant, code_format, code_arm_required, start_ha +) -> None: """Test configuration options related to alarm code.""" state = hass.states.get(TEMPLATE_NAME) assert state.attributes.get("code_format") == code_format assert state.attributes.get("code_arm_required") == code_arm_required - - -@pytest.mark.parametrize(("count", "domain"), [(1, "alarm_control_panel")]) -@pytest.mark.parametrize( - "config", - [ - { - "alarm_control_panel": { - "platform": "template", - "panels": {"test_template_panel": TEMPLATE_ALARM_CONFIG}, - } - }, - ], -) -@pytest.mark.parametrize( - ("restored_state", "initial_state"), - [ - ( - AlarmControlPanelState.ARMED_AWAY, - AlarmControlPanelState.ARMED_AWAY, - ), - ( - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - ), - ( - AlarmControlPanelState.ARMED_HOME, - AlarmControlPanelState.ARMED_HOME, - ), - ( - AlarmControlPanelState.ARMED_NIGHT, - AlarmControlPanelState.ARMED_NIGHT, - ), - ( - AlarmControlPanelState.ARMED_VACATION, - AlarmControlPanelState.ARMED_VACATION, - ), - (AlarmControlPanelState.ARMING, AlarmControlPanelState.ARMING), - (AlarmControlPanelState.DISARMED, AlarmControlPanelState.DISARMED), - (AlarmControlPanelState.PENDING, AlarmControlPanelState.PENDING), - ( - AlarmControlPanelState.TRIGGERED, - AlarmControlPanelState.TRIGGERED, - ), - (STATE_UNAVAILABLE, STATE_UNKNOWN), - (STATE_UNKNOWN, STATE_UNKNOWN), - ("faulty_state", STATE_UNKNOWN), - ], -) -async def test_restore_state( - hass: HomeAssistant, - count, - domain, - config, - restored_state, - initial_state, -) -> None: - """Test restoring template alarm control panel.""" - - fake_state = State( - "alarm_control_panel.test_template_panel", - restored_state, - {}, - ) - mock_restore_cache(hass, (fake_state,)) - with assert_setup_component(count, domain): - assert await async_setup_component( - hass, - domain, - config, - ) - - await hass.async_block_till_done() - - await hass.async_start() - await hass.async_block_till_done() - - state = hass.states.get("alarm_control_panel.test_template_panel") - assert state.state == initial_state - - -async def test_device_id( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test for device for button template.""" - - device_config_entry = MockConfigEntry() - device_config_entry.add_to_hass(hass) - device_entry = device_registry.async_get_or_create( - config_entry_id=device_config_entry.entry_id, - identifiers={("test", "identifier_test")}, - connections={("mac", "30:31:32:33:34:35")}, - ) - await hass.async_block_till_done() - assert device_entry is not None - assert device_entry.id is not None - - template_config_entry = MockConfigEntry( - data={}, - domain=template.DOMAIN, - options={ - "name": "My template", - "value_template": "disarmed", - "template_type": "alarm_control_panel", - "code_arm_required": True, - "code_format": "number", - "device_id": device_entry.id, - }, - title="My template", - ) - - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - template_entity = entity_registry.async_get("alarm_control_panel.my_template") - assert template_entity is not None - assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_binary_sensor.py b/tests/components/template/test_binary_sensor.py index 3e3a629b4be..50cad5be9e1 100644 --- a/tests/components/template/test_binary_sensor.py +++ b/tests/components/template/test_binary_sensor.py @@ -1,6 +1,5 @@ """The tests for the Template Binary sensor platform.""" -from copy import deepcopy from datetime import UTC, datetime, timedelta import logging from unittest.mock import patch @@ -33,6 +32,9 @@ from tests.common import ( mock_restore_cache_with_extra_data, ) +ON = "on" +OFF = "off" + @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( @@ -69,13 +71,14 @@ from tests.common import ( ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_setup_minimal(hass: HomeAssistant, entity_id, name, attributes) -> None: +async def test_setup_minimal( + hass: HomeAssistant, start_ha, entity_id, name, attributes +) -> None: """Test the setup.""" state = hass.states.get(entity_id) assert state is not None assert state.name == name - assert state.state == STATE_ON + assert state.state == ON assert state.attributes == attributes @@ -114,13 +117,12 @@ async def test_setup_minimal(hass: HomeAssistant, entity_id, name, attributes) - ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_setup(hass: HomeAssistant, entity_id) -> None: +async def test_setup(hass: HomeAssistant, start_ha, entity_id) -> None: """Test the setup.""" state = hass.states.get(entity_id) assert state is not None assert state.name == "virtual thingy" - assert state.state == STATE_ON + assert state.state == ON assert state.attributes["device_class"] == "motion" @@ -231,8 +233,7 @@ async def test_setup_config_entry( ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_setup_invalid_sensors(hass: HomeAssistant, count) -> None: +async def test_setup_invalid_sensors(hass: HomeAssistant, count, start_ha) -> None: """Test setup with no sensors.""" assert len(hass.states.async_entity_ids("binary_sensor")) == count @@ -250,7 +251,7 @@ async def test_setup_invalid_sensors(hass: HomeAssistant, count) -> None: "value_template": "{{ states.sensor.xyz.state }}", "icon_template": "{% if " "states.binary_sensor.test_state.state == " - "'on' %}" + "'Works' %}" "mdi:check" "{% endif %}", }, @@ -267,7 +268,7 @@ async def test_setup_invalid_sensors(hass: HomeAssistant, count) -> None: "state": "{{ states.sensor.xyz.state }}", "icon": "{% if " "states.binary_sensor.test_state.state == " - "'on' %}" + "'Works' %}" "mdi:check" "{% endif %}", }, @@ -278,13 +279,12 @@ async def test_setup_invalid_sensors(hass: HomeAssistant, count) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_icon_template(hass: HomeAssistant, entity_id) -> None: +async def test_icon_template(hass: HomeAssistant, start_ha, entity_id) -> None: """Test icon template.""" state = hass.states.get(entity_id) assert state.attributes.get("icon") == "" - hass.states.async_set("binary_sensor.test_state", STATE_ON) + hass.states.async_set("binary_sensor.test_state", "Works") await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes["icon"] == "mdi:check" @@ -303,7 +303,7 @@ async def test_icon_template(hass: HomeAssistant, entity_id) -> None: "value_template": "{{ states.sensor.xyz.state }}", "entity_picture_template": "{% if " "states.binary_sensor.test_state.state == " - "'on' %}" + "'Works' %}" "/local/sensor.png" "{% endif %}", }, @@ -320,7 +320,7 @@ async def test_icon_template(hass: HomeAssistant, entity_id) -> None: "state": "{{ states.sensor.xyz.state }}", "picture": "{% if " "states.binary_sensor.test_state.state == " - "'on' %}" + "'Works' %}" "/local/sensor.png" "{% endif %}", }, @@ -331,13 +331,14 @@ async def test_icon_template(hass: HomeAssistant, entity_id) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_entity_picture_template(hass: HomeAssistant, entity_id) -> None: +async def test_entity_picture_template( + hass: HomeAssistant, start_ha, entity_id +) -> None: """Test entity_picture template.""" state = hass.states.get(entity_id) assert state.attributes.get("entity_picture") == "" - hass.states.async_set("binary_sensor.test_state", STATE_ON) + hass.states.async_set("binary_sensor.test_state", "Works") await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes["entity_picture"] == "/local/sensor.png" @@ -380,8 +381,7 @@ async def test_entity_picture_template(hass: HomeAssistant, entity_id) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_attribute_templates(hass: HomeAssistant, entity_id) -> None: +async def test_attribute_templates(hass: HomeAssistant, start_ha, entity_id) -> None: """Test attribute_templates template.""" state = hass.states.get(entity_id) assert state.attributes.get("test_attribute") == "It ." @@ -425,8 +425,7 @@ async def setup_mock(): }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_match_all(hass: HomeAssistant, setup_mock) -> None: +async def test_match_all(hass: HomeAssistant, setup_mock, start_ha) -> None: """Test template that is rerendered on any state lifecycle.""" init_calls = len(setup_mock.mock_calls) @@ -453,17 +452,16 @@ async def test_match_all(hass: HomeAssistant, setup_mock) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_event(hass: HomeAssistant) -> None: +async def test_event(hass: HomeAssistant, start_ha) -> None: """Test the event.""" state = hass.states.get("binary_sensor.test") - assert state.state == STATE_OFF + assert state.state == OFF - hass.states.async_set("sensor.test_state", STATE_ON) + hass.states.async_set("sensor.test_state", ON) await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == STATE_ON + assert state.state == ON @pytest.mark.parametrize( @@ -564,46 +562,45 @@ async def test_event(hass: HomeAssistant) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_delay_on_off(hass: HomeAssistant) -> None: +async def test_template_delay_on_off(hass: HomeAssistant, start_ha) -> None: """Test binary sensor template delay on.""" # Ensure the initial state is not on - assert hass.states.get("binary_sensor.test_on").state != STATE_ON - assert hass.states.get("binary_sensor.test_off").state != STATE_ON + assert hass.states.get("binary_sensor.test_on").state != ON + assert hass.states.get("binary_sensor.test_off").state != ON hass.states.async_set("input_number.delay", 5) - hass.states.async_set("sensor.test_state", STATE_ON) + hass.states.async_set("sensor.test_state", ON) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == STATE_OFF - assert hass.states.get("binary_sensor.test_off").state == STATE_ON + assert hass.states.get("binary_sensor.test_on").state == OFF + assert hass.states.get("binary_sensor.test_off").state == ON future = dt_util.utcnow() + timedelta(seconds=5) async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == STATE_ON - assert hass.states.get("binary_sensor.test_off").state == STATE_ON + assert hass.states.get("binary_sensor.test_on").state == ON + assert hass.states.get("binary_sensor.test_off").state == ON # check with time changes - hass.states.async_set("sensor.test_state", STATE_OFF) + hass.states.async_set("sensor.test_state", OFF) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == STATE_OFF - assert hass.states.get("binary_sensor.test_off").state == STATE_ON + assert hass.states.get("binary_sensor.test_on").state == OFF + assert hass.states.get("binary_sensor.test_off").state == ON - hass.states.async_set("sensor.test_state", STATE_ON) + hass.states.async_set("sensor.test_state", ON) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == STATE_OFF - assert hass.states.get("binary_sensor.test_off").state == STATE_ON + assert hass.states.get("binary_sensor.test_on").state == OFF + assert hass.states.get("binary_sensor.test_off").state == ON - hass.states.async_set("sensor.test_state", STATE_OFF) + hass.states.async_set("sensor.test_state", OFF) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == STATE_OFF - assert hass.states.get("binary_sensor.test_off").state == STATE_ON + assert hass.states.get("binary_sensor.test_on").state == OFF + assert hass.states.get("binary_sensor.test_off").state == ON future = dt_util.utcnow() + timedelta(seconds=5) async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == STATE_OFF - assert hass.states.get("binary_sensor.test_off").state == STATE_OFF + assert hass.states.get("binary_sensor.test_on").state == OFF + assert hass.states.get("binary_sensor.test_off").state == OFF @pytest.mark.parametrize("count", [1]) @@ -643,9 +640,8 @@ async def test_template_delay_on_off(hass: HomeAssistant) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") async def test_available_without_availability_template( - hass: HomeAssistant, entity_id + hass: HomeAssistant, start_ha, entity_id ) -> None: """Ensure availability is true without an availability_template.""" state = hass.states.get(entity_id) @@ -693,8 +689,7 @@ async def test_available_without_availability_template( ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_availability_template(hass: HomeAssistant, entity_id) -> None: +async def test_availability_template(hass: HomeAssistant, start_ha, entity_id) -> None: """Test availability template.""" hass.states.async_set("sensor.test_state", STATE_OFF) await hass.async_block_till_done() @@ -729,12 +724,11 @@ async def test_availability_template(hass: HomeAssistant, entity_id) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_invalid_attribute_template( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test that errors are logged if rendering template fails.""" - hass.states.async_set("binary_sensor.test_sensor", STATE_ON) + hass.states.async_set("binary_sensor.test_sensor", "true") assert len(hass.states.async_all()) == 2 assert ("test_attribute") in caplog_setup_text assert ("TemplateError") in caplog_setup_text @@ -757,9 +751,8 @@ async def test_invalid_attribute_template( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" @@ -799,7 +792,7 @@ async def test_no_update_template_match_all( }, ) await hass.async_block_till_done() - hass.states.async_set("binary_sensor.test_sensor", STATE_ON) + hass.states.async_set("binary_sensor.test_sensor", "true") assert len(hass.states.async_all()) == 5 assert hass.states.get("binary_sensor.all_state").state == STATE_UNKNOWN @@ -810,29 +803,29 @@ async def test_no_update_template_match_all( hass.bus.async_fire(EVENT_HOMEASSISTANT_START) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.all_state").state == STATE_ON - assert hass.states.get("binary_sensor.all_icon").state == STATE_ON - assert hass.states.get("binary_sensor.all_entity_picture").state == STATE_ON - assert hass.states.get("binary_sensor.all_attribute").state == STATE_ON + assert hass.states.get("binary_sensor.all_state").state == ON + assert hass.states.get("binary_sensor.all_icon").state == ON + assert hass.states.get("binary_sensor.all_entity_picture").state == ON + assert hass.states.get("binary_sensor.all_attribute").state == ON - hass.states.async_set("binary_sensor.test_sensor", STATE_OFF) + hass.states.async_set("binary_sensor.test_sensor", "false") await hass.async_block_till_done() - assert hass.states.get("binary_sensor.all_state").state == STATE_ON + assert hass.states.get("binary_sensor.all_state").state == ON # Will now process because we have one valid template - assert hass.states.get("binary_sensor.all_icon").state == STATE_OFF - assert hass.states.get("binary_sensor.all_entity_picture").state == STATE_OFF - assert hass.states.get("binary_sensor.all_attribute").state == STATE_OFF + assert hass.states.get("binary_sensor.all_icon").state == OFF + assert hass.states.get("binary_sensor.all_entity_picture").state == OFF + assert hass.states.get("binary_sensor.all_attribute").state == OFF await async_update_entity(hass, "binary_sensor.all_state") await async_update_entity(hass, "binary_sensor.all_icon") await async_update_entity(hass, "binary_sensor.all_entity_picture") await async_update_entity(hass, "binary_sensor.all_attribute") - assert hass.states.get("binary_sensor.all_state").state == STATE_ON - assert hass.states.get("binary_sensor.all_icon").state == STATE_OFF - assert hass.states.get("binary_sensor.all_entity_picture").state == STATE_OFF - assert hass.states.get("binary_sensor.all_attribute").state == STATE_OFF + assert hass.states.get("binary_sensor.all_state").state == ON + assert hass.states.get("binary_sensor.all_icon").state == OFF + assert hass.states.get("binary_sensor.all_entity_picture").state == OFF + assert hass.states.get("binary_sensor.all_attribute").state == OFF @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @@ -845,7 +838,7 @@ async def test_no_update_template_match_all( "binary_sensor": { "name": "top-level", "unique_id": "sensor-id", - "state": STATE_ON, + "state": ON, }, }, "binary_sensor": { @@ -864,9 +857,8 @@ async def test_no_update_template_match_all( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry ) -> None: """Test unique_id option only creates one binary sensor per id.""" assert len(hass.states.async_all()) == 2 @@ -900,9 +892,8 @@ async def test_unique_id( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_template_validation_error( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, start_ha ) -> None: """Test binary sensor template delay on.""" caplog.set_level(logging.ERROR) @@ -965,8 +956,9 @@ async def test_template_validation_error( ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_availability_icon_picture(hass: HomeAssistant, entity_id) -> None: +async def test_availability_icon_picture( + hass: HomeAssistant, start_ha, entity_id +) -> None: """Test name, icon and picture templates are rendered at setup.""" state = hass.states.get(entity_id) assert state.state == "unavailable" @@ -1003,32 +995,20 @@ async def test_availability_icon_picture(hass: HomeAssistant, entity_id) -> None ], ) @pytest.mark.parametrize( - ("extra_config", "source_state", "restored_state", "initial_state"), + ("extra_config", "restored_state", "initial_state"), [ - ({}, STATE_OFF, STATE_ON, STATE_OFF), - ({}, STATE_OFF, STATE_OFF, STATE_OFF), - ({}, STATE_OFF, STATE_UNAVAILABLE, STATE_OFF), - ({}, STATE_OFF, STATE_UNKNOWN, STATE_OFF), - ({"delay_off": 5}, STATE_OFF, STATE_ON, STATE_ON), - ({"delay_off": 5}, STATE_OFF, STATE_OFF, STATE_OFF), - ({"delay_off": 5}, STATE_OFF, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_off": 5}, STATE_OFF, STATE_UNKNOWN, STATE_UNKNOWN), - ({"delay_on": 5}, STATE_OFF, STATE_ON, STATE_OFF), - ({"delay_on": 5}, STATE_OFF, STATE_OFF, STATE_OFF), - ({"delay_on": 5}, STATE_OFF, STATE_UNAVAILABLE, STATE_OFF), - ({"delay_on": 5}, STATE_OFF, STATE_UNKNOWN, STATE_OFF), - ({}, STATE_ON, STATE_ON, STATE_ON), - ({}, STATE_ON, STATE_OFF, STATE_ON), - ({}, STATE_ON, STATE_UNAVAILABLE, STATE_ON), - ({}, STATE_ON, STATE_UNKNOWN, STATE_ON), - ({"delay_off": 5}, STATE_ON, STATE_ON, STATE_ON), - ({"delay_off": 5}, STATE_ON, STATE_OFF, STATE_ON), - ({"delay_off": 5}, STATE_ON, STATE_UNAVAILABLE, STATE_ON), - ({"delay_off": 5}, STATE_ON, STATE_UNKNOWN, STATE_ON), - ({"delay_on": 5}, STATE_ON, STATE_ON, STATE_ON), - ({"delay_on": 5}, STATE_ON, STATE_OFF, STATE_OFF), - ({"delay_on": 5}, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_on": 5}, STATE_ON, STATE_UNKNOWN, STATE_UNKNOWN), + ({}, ON, OFF), + ({}, OFF, OFF), + ({}, STATE_UNAVAILABLE, OFF), + ({}, STATE_UNKNOWN, OFF), + ({"delay_off": 5}, ON, ON), + ({"delay_off": 5}, OFF, OFF), + ({"delay_off": 5}, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_off": 5}, STATE_UNKNOWN, STATE_UNKNOWN), + ({"delay_on": 5}, ON, ON), + ({"delay_on": 5}, OFF, OFF), + ({"delay_on": 5}, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_on": 5}, STATE_UNKNOWN, STATE_UNKNOWN), ], ) async def test_restore_state( @@ -1037,20 +1017,18 @@ async def test_restore_state( domain, config, extra_config, - source_state, restored_state, initial_state, ) -> None: """Test restoring template binary sensor.""" - hass.states.async_set("sensor.test_state", source_state) fake_state = State( "binary_sensor.test", restored_state, {}, ) mock_restore_cache(hass, (fake_state,)) - config = deepcopy(config) + config = dict(config) config["template"]["binary_sensor"].update(**extra_config) with assert_setup_component(count, domain): assert await async_setup_component( @@ -1123,9 +1101,8 @@ async def test_restore_state( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_trigger_entity( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry ) -> None: """Test trigger entity works.""" await hass.async_block_till_done() @@ -1142,7 +1119,7 @@ async def test_trigger_entity( await hass.async_block_till_done() state = hass.states.get("binary_sensor.hello_name") - assert state.state == STATE_ON + assert state.state == ON assert state.attributes.get("device_class") == "battery" assert state.attributes.get("icon") == "mdi:pirate" assert state.attributes.get("entity_picture") == "/local/dogs.png" @@ -1160,7 +1137,7 @@ async def test_trigger_entity( ) state = hass.states.get("binary_sensor.via_list") - assert state.state == STATE_ON + assert state.state == ON assert state.attributes.get("device_class") == "battery" assert state.attributes.get("icon") == "mdi:pirate" assert state.attributes.get("entity_picture") == "/local/dogs.png" @@ -1172,7 +1149,7 @@ async def test_trigger_entity( hass.bus.async_fire("test_event", {"beer": 2, "uno_mas": "si"}) await hass.async_block_till_done() state = hass.states.get("binary_sensor.via_list") - assert state.state == STATE_ON + assert state.state == ON assert state.attributes.get("another") == "si" @@ -1194,8 +1171,9 @@ async def test_trigger_entity( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_with_trigger_templated_delay_on(hass: HomeAssistant) -> None: +async def test_template_with_trigger_templated_delay_on( + hass: HomeAssistant, start_ha +) -> None: """Test binary sensor template with template delay on.""" state = hass.states.get("binary_sensor.test") assert state.state == STATE_UNKNOWN @@ -1214,7 +1192,7 @@ async def test_template_with_trigger_templated_delay_on(hass: HomeAssistant) -> await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == STATE_ON + assert state.state == ON # Now wait for the auto-off future = dt_util.utcnow() + timedelta(seconds=2) @@ -1222,7 +1200,7 @@ async def test_template_with_trigger_templated_delay_on(hass: HomeAssistant) -> await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == STATE_OFF + assert state.state == OFF @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @@ -1250,8 +1228,8 @@ async def test_template_with_trigger_templated_delay_on(hass: HomeAssistant) -> @pytest.mark.parametrize( ("restored_state", "initial_state", "initial_attributes"), [ - (STATE_ON, STATE_ON, ["entity_picture", "icon", "plus_one"]), - (STATE_OFF, STATE_OFF, ["entity_picture", "icon", "plus_one"]), + (ON, ON, ["entity_picture", "icon", "plus_one"]), + (OFF, OFF, ["entity_picture", "icon", "plus_one"]), (STATE_UNAVAILABLE, STATE_UNKNOWN, []), (STATE_UNKNOWN, STATE_UNKNOWN, []), ], @@ -1306,7 +1284,7 @@ async def test_trigger_entity_restore_state( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == STATE_ON + assert state.state == ON assert state.attributes["icon"] == "mdi:pirate" assert state.attributes["entity_picture"] == "/local/dogs.png" assert state.attributes["plus_one"] == 3 @@ -1330,7 +1308,7 @@ async def test_trigger_entity_restore_state( }, ], ) -@pytest.mark.parametrize("restored_state", [STATE_ON, STATE_OFF]) +@pytest.mark.parametrize("restored_state", [ON, OFF]) async def test_trigger_entity_restore_state_auto_off( hass: HomeAssistant, count, @@ -1374,7 +1352,7 @@ async def test_trigger_entity_restore_state_auto_off( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == STATE_OFF + assert state.state == OFF @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @@ -1402,7 +1380,7 @@ async def test_trigger_entity_restore_state_auto_off_expired( freezer.move_to("2022-02-02 12:02:00+00:00") fake_state = State( "binary_sensor.test", - STATE_ON, + ON, {}, ) fake_extra_data = { @@ -1424,7 +1402,7 @@ async def test_trigger_entity_restore_state_auto_off_expired( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == STATE_OFF + assert state.state == OFF async def test_device_id( diff --git a/tests/components/template/test_blueprint.py b/tests/components/template/test_blueprint.py deleted file mode 100644 index 1df9e738b06..00000000000 --- a/tests/components/template/test_blueprint.py +++ /dev/null @@ -1,242 +0,0 @@ -"""Test blueprints.""" - -from collections.abc import Iterator -import contextlib -from os import PathLike -import pathlib -from unittest.mock import MagicMock, patch - -import pytest - -from homeassistant.components import template -from homeassistant.components.blueprint import ( - BLUEPRINT_SCHEMA, - Blueprint, - BlueprintInUse, - DomainBlueprints, -) -from homeassistant.components.template import DOMAIN, SERVICE_RELOAD -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import device_registry as dr -from homeassistant.setup import async_setup_component -from homeassistant.util import yaml - -from tests.common import async_mock_service - -BUILTIN_BLUEPRINT_FOLDER = pathlib.Path(template.__file__).parent / "blueprints" - - -@contextlib.contextmanager -def patch_blueprint( - blueprint_path: str, data_path: str | PathLike[str] -) -> Iterator[None]: - """Patch blueprint loading from a different source.""" - orig_load = DomainBlueprints._load_blueprint - - @callback - def mock_load_blueprint(self, path): - if path != blueprint_path: - pytest.fail(f"Unexpected blueprint {path}") - return orig_load(self, path) - - return Blueprint( - yaml.load_yaml(data_path), - expected_domain=self.domain, - path=path, - schema=BLUEPRINT_SCHEMA, - ) - - with patch( - "homeassistant.components.blueprint.models.DomainBlueprints._load_blueprint", - mock_load_blueprint, - ): - yield - - -@contextlib.contextmanager -def patch_invalid_blueprint() -> Iterator[None]: - """Patch blueprint returning an invalid one.""" - - @callback - def mock_load_blueprint(self, path): - return Blueprint( - { - "blueprint": { - "domain": "template", - "name": "Invalid template blueprint", - }, - "binary_sensor": {}, - "sensor": {}, - }, - expected_domain=self.domain, - path=path, - schema=BLUEPRINT_SCHEMA, - ) - - with patch( - "homeassistant.components.blueprint.models.DomainBlueprints._load_blueprint", - mock_load_blueprint, - ): - yield - - -async def test_inverted_binary_sensor( - hass: HomeAssistant, device_registry: dr.DeviceRegistry -) -> None: - """Test inverted binary sensor blueprint.""" - hass.states.async_set("binary_sensor.foo", "on", {"friendly_name": "Foo"}) - hass.states.async_set("binary_sensor.bar", "off", {"friendly_name": "Bar"}) - - with patch_blueprint( - "inverted_binary_sensor.yaml", - BUILTIN_BLUEPRINT_FOLDER / "inverted_binary_sensor.yaml", - ): - assert await async_setup_component( - hass, - "template", - { - "template": [ - { - "use_blueprint": { - "path": "inverted_binary_sensor.yaml", - "input": {"reference_entity": "binary_sensor.foo"}, - }, - "name": "Inverted foo", - }, - { - "use_blueprint": { - "path": "inverted_binary_sensor.yaml", - "input": {"reference_entity": "binary_sensor.bar"}, - }, - "name": "Inverted bar", - }, - ] - }, - ) - - hass.states.async_set("binary_sensor.foo", "off", {"friendly_name": "Foo"}) - hass.states.async_set("binary_sensor.bar", "on", {"friendly_name": "Bar"}) - await hass.async_block_till_done() - - assert hass.states.get("binary_sensor.foo").state == "off" - assert hass.states.get("binary_sensor.bar").state == "on" - - inverted_foo = hass.states.get("binary_sensor.inverted_foo") - assert inverted_foo - assert inverted_foo.state == "on" - - inverted_bar = hass.states.get("binary_sensor.inverted_bar") - assert inverted_bar - assert inverted_bar.state == "off" - - foo_template = template.helpers.blueprint_in_template(hass, "binary_sensor.foo") - inverted_foo_template = template.helpers.blueprint_in_template( - hass, "binary_sensor.inverted_foo" - ) - assert foo_template is None - assert inverted_foo_template == "inverted_binary_sensor.yaml" - - inverted_binary_sensor_blueprint_entity_ids = ( - template.helpers.templates_with_blueprint(hass, "inverted_binary_sensor.yaml") - ) - assert len(inverted_binary_sensor_blueprint_entity_ids) == 2 - - assert len(template.helpers.templates_with_blueprint(hass, "dummy.yaml")) == 0 - - with pytest.raises(BlueprintInUse): - await template.async_get_blueprints(hass).async_remove_blueprint( - "inverted_binary_sensor.yaml" - ) - - -async def test_domain_blueprint(hass: HomeAssistant) -> None: - """Test DomainBlueprint services.""" - reload_handler_calls = async_mock_service(hass, DOMAIN, SERVICE_RELOAD) - mock_create_file = MagicMock() - mock_create_file.return_value = True - - with patch( - "homeassistant.components.blueprint.models.DomainBlueprints._create_file", - mock_create_file, - ): - await template.async_get_blueprints(hass).async_add_blueprint( - Blueprint( - { - "blueprint": { - "domain": DOMAIN, - "name": "Test", - }, - }, - expected_domain="template", - path="xxx", - schema=BLUEPRINT_SCHEMA, - ), - "xxx", - True, - ) - assert len(reload_handler_calls) == 1 - - -async def test_invalid_blueprint( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test an invalid blueprint definition.""" - - with patch_invalid_blueprint(): - assert await async_setup_component( - hass, - "template", - { - "template": [ - { - "use_blueprint": { - "path": "invalid.yaml", - }, - "name": "Invalid blueprint instance", - }, - ] - }, - ) - - assert "more than one platform defined per blueprint" in caplog.text - assert await template.async_get_blueprints(hass).async_get_blueprints() == {} - - -async def test_no_blueprint(hass: HomeAssistant) -> None: - """Test templates without blueprints.""" - with patch_blueprint( - "inverted_binary_sensor.yaml", - BUILTIN_BLUEPRINT_FOLDER / "inverted_binary_sensor.yaml", - ): - assert await async_setup_component( - hass, - "template", - { - "template": [ - {"binary_sensor": {"name": "test entity", "state": "off"}}, - { - "use_blueprint": { - "path": "inverted_binary_sensor.yaml", - "input": {"reference_entity": "binary_sensor.foo"}, - }, - "name": "inverted entity", - }, - ] - }, - ) - - hass.states.async_set("binary_sensor.foo", "off", {"friendly_name": "Foo"}) - await hass.async_block_till_done() - - assert ( - len( - template.helpers.templates_with_blueprint( - hass, "inverted_binary_sensor.yaml" - ) - ) - == 1 - ) - assert ( - template.helpers.blueprint_in_template(hass, "binary_sensor.test_entity") - is None - ) diff --git a/tests/components/template/test_button.py b/tests/components/template/test_button.py index b201385240c..c861c7874d4 100644 --- a/tests/components/template/test_button.py +++ b/tests/components/template/test_button.py @@ -1,15 +1,11 @@ """The tests for the Template button platform.""" import datetime as dt -from typing import Any from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion from homeassistant import setup from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.components.template import DOMAIN from homeassistant.components.template.button import DEFAULT_NAME from homeassistant.const import ( CONF_DEVICE_CLASS, @@ -19,58 +15,14 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, assert_setup_component +from tests.common import assert_setup_component _TEST_BUTTON = "button.template_button" _TEST_OPTIONS_BUTTON = "button.test" -@pytest.mark.parametrize( - "config_entry_extra_options", - [ - {}, - { - "device_class": "update", - }, - ], -) -async def test_setup_config_entry( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - config_entry_extra_options: dict[str, str], -) -> None: - """Test the config flow.""" - - template_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - "name": "My template", - "template_type": "button", - "press": [ - { - "service": "input_boolean.toggle", - "metadata": {}, - "data": {}, - "target": {"entity_id": "input_boolean.test"}, - } - ], - } - | config_entry_extra_options, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("button.my_template") - assert state is not None - assert state == snapshot - - async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -233,11 +185,11 @@ async def test_unique_id(hass: HomeAssistant) -> None: def _verify( - hass: HomeAssistant, - expected_value: str, - attributes: dict[str, Any] | None = None, - entity_id: str = _TEST_BUTTON, -) -> None: + hass, + expected_value, + attributes=None, + entity_id=_TEST_BUTTON, +): """Verify button's state.""" attributes = attributes or {} if CONF_FRIENDLY_NAME not in attributes: @@ -245,49 +197,3 @@ def _verify( state = hass.states.get(entity_id) assert state.state == expected_value assert state.attributes == attributes - - -async def test_device_id( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test for device for button template.""" - - device_config_entry = MockConfigEntry() - device_config_entry.add_to_hass(hass) - device_entry = device_registry.async_get_or_create( - config_entry_id=device_config_entry.entry_id, - identifiers={("test", "identifier_test")}, - connections={("mac", "30:31:32:33:34:35")}, - ) - await hass.async_block_till_done() - assert device_entry is not None - assert device_entry.id is not None - - template_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - "name": "My template", - "template_type": "button", - "device_id": device_entry.id, - "press": [ - { - "service": "input_boolean.toggle", - "metadata": {}, - "data": {}, - "target": {"entity_id": "input_boolean.test"}, - } - ], - }, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - template_entity = entity_registry.async_get("button.my_template") - assert template_entity is not None - assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index a3e53aab9e1..f277b918661 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -29,21 +29,9 @@ from tests.typing import WebSocketGenerator "extra_attrs", ), [ - ( - "alarm_control_panel", - {"value_template": "{{ states('alarm_control_panel.one') }}"}, - "armed_away", - {"one": "armed_away", "two": "disarmed"}, - {}, - {}, - {"code_arm_required": True, "code_format": "number"}, - {}, - ), ( "binary_sensor", - { - "state": "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}" - }, + "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}", "on", {"one": "on", "two": "off"}, {}, @@ -53,9 +41,7 @@ from tests.typing import WebSocketGenerator ), ( "sensor", - { - "state": "{{ float(states('sensor.one')) + float(states('sensor.two')) }}" - }, + "{{ float(states('sensor.one')) + float(states('sensor.two')) }}", "50.0", {"one": "30.0", "two": "20.0"}, {}, @@ -63,107 +49,18 @@ from tests.typing import WebSocketGenerator {}, {}, ), - ( - "button", - {}, - "unknown", - {"one": "30.0", "two": "20.0"}, - {}, - { - "device_class": "restart", - "press": [ - { - "action": "input_boolean.toggle", - "target": {"entity_id": "input_boolean.test"}, - "data": {}, - } - ], - }, - { - "device_class": "restart", - "press": [ - { - "action": "input_boolean.toggle", - "target": {"entity_id": "input_boolean.test"}, - "data": {}, - } - ], - }, - {}, - ), - ( - "image", - {"url": "{{ states('sensor.one') }}"}, - "2024-07-09T00:00:00+00:00", - {"one": "http://www.test.com", "two": ""}, - {}, - {"verify_ssl": True}, - {"verify_ssl": True}, - {}, - ), - ( - "number", - {"state": "{{ states('number.one') }}"}, - "30.0", - {"one": "30.0", "two": "20.0"}, - {}, - { - "min": "0", - "max": "100", - "step": "0.1", - "unit_of_measurement": "cm", - "set_value": { - "action": "input_number.set_value", - "target": {"entity_id": "input_number.test"}, - "data": {"value": "{{ value }}"}, - }, - }, - { - "min": 0, - "max": 100, - "step": 0.1, - "unit_of_measurement": "cm", - "set_value": { - "action": "input_number.set_value", - "target": {"entity_id": "input_number.test"}, - "data": {"value": "{{ value }}"}, - }, - }, - {}, - ), - ( - "select", - {"state": "{{ states('select.one') }}"}, - "on", - {"one": "on", "two": "off"}, - {}, - {"options": "{{ ['off', 'on', 'auto'] }}"}, - {"options": "{{ ['off', 'on', 'auto'] }}"}, - {}, - ), - ( - "switch", - {"value_template": "{{ states('switch.one') }}"}, - "on", - {"one": "on", "two": "off"}, - {}, - {}, - {}, - {}, - ), ], ) -@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_config_flow( hass: HomeAssistant, - template_type: str, - state_template: dict[str, Any], - template_state: str, - input_states: dict[str, Any], - input_attributes: dict[str, Any], - extra_input: dict[str, Any], - extra_options: dict[str, Any], - extra_attrs: dict[str, Any], + template_type, + state_template, + template_state, + input_states, + input_attributes, + extra_input, + extra_options, + extra_attrs, ) -> None: """Test the config flow.""" input_entities = ["one", "two"] @@ -194,7 +91,7 @@ async def test_config_flow( result["flow_id"], { "name": "My template", - **state_template, + "state": state_template, **extra_input, }, ) @@ -205,8 +102,8 @@ async def test_config_flow( assert result["data"] == {} assert result["options"] == { "name": "My template", + "state": state_template, "template_type": template_type, - **state_template, **extra_options, } assert len(mock_setup_entry.mock_calls) == 1 @@ -215,8 +112,8 @@ async def test_config_flow( assert config_entry.data == {} assert config_entry.options == { "name": "My template", + "state": state_template, "template_type": template_type, - **state_template, **extra_options, } @@ -230,76 +127,22 @@ async def test_config_flow( ( "template_type", "state_template", - "extra_input", - "extra_options", ), [ ( "sensor", - {"state": "{{ 15 }}"}, - {}, - {}, + "{{ 15 }}", ), ( "binary_sensor", - {"state": "{{ false }}"}, - {}, - {}, - ), - ( - "switch", - {"value_template": "{{ false }}"}, - {}, - {}, - ), - ( - "button", - {}, - {}, - {}, - ), - ( - "image", - { - "url": "{{ states('sensor.one') }}", - }, - {"verify_ssl": True}, - {"verify_ssl": True}, - ), - ( - "number", - {"state": "{{ states('number.one') }}"}, - { - "min": "0", - "max": "100", - "step": "0.1", - }, - { - "min": 0, - "max": 100, - "step": 0.1, - }, - ), - ( - "alarm_control_panel", - {"value_template": "{{ states('alarm_control_panel.one') }}"}, - {"code_arm_required": True, "code_format": "number"}, - {"code_arm_required": True, "code_format": "number"}, - ), - ( - "select", - {"state": "{{ states('select.one') }}"}, - {"options": "{{ ['off', 'on', 'auto'] }}"}, - {"options": "{{ ['off', 'on', 'auto'] }}"}, + "{{ false }}", ), ], ) async def test_config_flow_device( hass: HomeAssistant, template_type: str, - state_template: dict[str, Any], - extra_input: dict[str, Any], - extra_options: dict[str, Any], + state_template: str, device_registry: dr.DeviceRegistry, ) -> None: """Test remove the device registry configuration entry when the device changes.""" @@ -337,9 +180,8 @@ async def test_config_flow_device( result["flow_id"], { "name": "My template", + "state": state_template, "device_id": device_id, - **state_template, - **extra_input, }, ) await hass.async_block_till_done() @@ -349,10 +191,9 @@ async def test_config_flow_device( assert result["data"] == {} assert result["options"] == { "name": "My template", + "state": state_template, "template_type": template_type, "device_id": device_id, - **state_template, - **extra_options, } assert len(mock_setup_entry.mock_calls) == 1 @@ -360,10 +201,9 @@ async def test_config_flow_device( assert config_entry.data == {} assert config_entry.options == { "name": "My template", + "state": state_template, "template_type": template_type, "device_id": device_id, - **state_template, - **extra_options, } @@ -374,8 +214,8 @@ def get_suggested(schema, key): if k.description is None or "suggested_value" not in k.description: return None return k.description["suggested_value"] - # If the desired key is missing from the schema, return None - return None + # Wanted key absent from schema + raise KeyError("Wanted key absent from schema") @pytest.mark.parametrize( @@ -387,154 +227,37 @@ def get_suggested(schema, key): "input_states", "extra_options", "options_options", - "key_template", ), [ ( "binary_sensor", - { - "state": "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}" - }, - { - "state": "{{ states('binary_sensor.one') == 'on' and states('binary_sensor.two') == 'on' }}" - }, + "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}", + "{{ states('binary_sensor.one') == 'on' and states('binary_sensor.two') == 'on' }}", ["on", "off"], {"one": "on", "two": "off"}, {}, {}, - "state", ), ( "sensor", - { - "state": "{{ float(states('sensor.one')) + float(states('sensor.two')) }}" - }, - { - "state": "{{ float(states('sensor.one')) - float(states('sensor.two')) }}" - }, + "{{ float(states('sensor.one')) + float(states('sensor.two')) }}", + "{{ float(states('sensor.one')) - float(states('sensor.two')) }}", ["50.0", "10.0"], {"one": "30.0", "two": "20.0"}, {}, {}, - "state", - ), - ( - "button", - {}, - {}, - ["unknown", "unknown"], - {"one": "30.0", "two": "20.0"}, - { - "device_class": "restart", - "press": [ - { - "action": "input_boolean.toggle", - "target": {"entity_id": "input_boolean.test"}, - "data": {}, - } - ], - }, - { - "press": [ - { - "action": "input_boolean.toggle", - "target": {"entity_id": "input_boolean.test"}, - "data": {}, - } - ], - }, - "state", - ), - ( - "image", - { - "url": "{{ states('sensor.one') }}", - }, - { - "url": "{{ states('sensor.two') }}", - }, - ["2024-07-09T00:00:00+00:00", "2024-07-09T00:00:00+00:00"], - {"one": "http://www.test.com", "two": "http://www.test2.com"}, - {"verify_ssl": True}, - { - "url": "{{ states('sensor.two') }}", - "verify_ssl": True, - }, - "url", - ), - ( - "number", - {"state": "{{ states('number.one') }}"}, - {"state": "{{ states('number.two') }}"}, - ["30.0", "20.0"], - {"one": "30.0", "two": "20.0"}, - { - "min": 0, - "max": 100, - "step": 0.1, - "unit_of_measurement": "cm", - "set_value": { - "action": "input_number.set_value", - "target": {"entity_id": "input_number.test"}, - "data": {"value": "{{ value }}"}, - }, - }, - { - "min": 0, - "max": 100, - "step": 0.1, - "unit_of_measurement": "cm", - "set_value": { - "action": "input_number.set_value", - "target": {"entity_id": "input_number.test"}, - "data": {"value": "{{ value }}"}, - }, - }, - "state", - ), - ( - "alarm_control_panel", - {"value_template": "{{ states('alarm_control_panel.one') }}"}, - {"value_template": "{{ states('alarm_control_panel.two') }}"}, - ["armed_away", "disarmed"], - {"one": "armed_away", "two": "disarmed"}, - {"code_arm_required": True, "code_format": "number"}, - {"code_arm_required": True, "code_format": "number"}, - "value_template", - ), - ( - "select", - {"state": "{{ states('select.one') }}"}, - {"state": "{{ states('select.two') }}"}, - ["on", "off"], - {"one": "on", "two": "off"}, - {"options": "{{ ['off', 'on', 'auto'] }}"}, - {"options": "{{ ['off', 'on', 'auto'] }}"}, - "state", - ), - ( - "switch", - {"value_template": "{{ states('switch.one') }}"}, - {"value_template": "{{ states('switch.two') }}"}, - ["on", "off"], - {"one": "on", "two": "off"}, - {}, - {}, - "value_template", ), ], ) -@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_options( hass: HomeAssistant, - template_type: str, - old_state_template: dict[str, Any], - new_state_template: dict[str, Any], - template_state: list[str], - input_states: dict[str, Any], - extra_options: dict[str, Any], - options_options: dict[str, Any], - key_template: str, + template_type, + old_state_template, + new_state_template, + template_state, + input_states, + extra_options, + options_options, ) -> None: """Test reconfiguring.""" input_entities = ["one", "two"] @@ -549,8 +272,8 @@ async def test_options( domain=DOMAIN, options={ "name": "My template", + "state": old_state_template, "template_type": template_type, - **old_state_template, **extra_options, }, title="My template", @@ -568,30 +291,25 @@ async def test_options( result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == template_type - assert get_suggested( - result["data_schema"].schema, key_template - ) == old_state_template.get(key_template) + assert get_suggested(result["data_schema"].schema, "state") == old_state_template assert "name" not in result["data_schema"].schema result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={ - **new_state_template, - **options_options, - }, + user_input={"state": new_state_template, **options_options}, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "name": "My template", + "state": new_state_template, "template_type": template_type, - **new_state_template, **extra_options, } assert config_entry.data == {} assert config_entry.options == { "name": "My template", + "state": new_state_template, "template_type": template_type, - **new_state_template, **extra_options, } assert config_entry.title == "My template" @@ -616,7 +334,7 @@ async def test_options( assert result["step_id"] == template_type assert get_suggested(result["data_schema"].schema, "name") is None - assert get_suggested(result["data_schema"].schema, key_template) is None + assert get_suggested(result["data_schema"].schema, "state") is None @pytest.mark.parametrize( @@ -656,7 +374,7 @@ async def test_config_flow_preview( template_type: str, state_template: str, extra_user_input: dict[str, Any], - input_states: dict[str, Any], + input_states: list[str], template_states: str, extra_attributes: list[dict[str, Any]], listeners: list[list[str]], @@ -794,7 +512,7 @@ EARLY_END_ERROR = "invalid template (TemplateSyntaxError: unexpected 'end of tem ), "unit_of_measurement": ( "'None' is not a valid unit for device class 'energy'; " - "expected one of 'cal', 'Gcal', 'GJ', 'GWh', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'TWh', 'Wh'" + "expected one of 'GJ', 'kWh', 'MJ', 'MWh', 'Wh'" ), }, ), @@ -806,7 +524,7 @@ async def test_config_flow_preview_bad_input( template_type: str, state_template: str, extra_user_input: dict[str, str], - error: dict[str, str], + error: str, ) -> None: """Test the config flow preview.""" client = await hass_ws_client(hass) @@ -1118,7 +836,7 @@ async def test_option_flow_preview( new_state_template: str, extra_config_flow_data: dict[str, Any], extra_user_input: dict[str, Any], - input_states: dict[str, Any], + input_states: list[str], template_state: str, extra_attributes: dict[str, Any], listeners: list[str], @@ -1225,77 +943,22 @@ async def test_option_flow_sensor_preview_config_entry_removed( ( "template_type", "state_template", - "extra_input", - "extra_options", ), [ ( "sensor", - {"state": "{{ 15 }}"}, - {}, - {}, + "{{ 15 }}", ), ( "binary_sensor", - {"state": "{{ false }}"}, - {}, - {}, - ), - ( - "button", - {}, - {}, - {}, - ), - ( - "image", - { - "url": "{{ states('sensor.one') }}", - "verify_ssl": True, - }, - {}, - {}, - ), - ( - "number", - {"state": "{{ states('number.one') }}"}, - { - "min": 0, - "max": 100, - "step": 0.1, - }, - { - "min": 0, - "max": 100, - "step": 0.1, - }, - ), - ( - "alarm_control_panel", - {"value_template": "{{ states('alarm_control_panel.one') }}"}, - {"code_arm_required": True, "code_format": "number"}, - {"code_arm_required": True, "code_format": "number"}, - ), - ( - "select", - {"state": "{{ states('select.one') }}"}, - {"options": "{{ ['off', 'on', 'auto'] }}"}, - {"options": "{{ ['off', 'on', 'auto'] }}"}, - ), - ( - "switch", - {"value_template": "{{ false }}"}, - {}, - {}, + "{{ false }}", ), ], ) async def test_options_flow_change_device( hass: HomeAssistant, template_type: str, - state_template: dict[str, Any], - extra_input: dict[str, Any], - extra_options: dict[str, Any], + state_template: str, device_registry: dr.DeviceRegistry, ) -> None: """Test remove the device registry configuration entry when the device changes.""" @@ -1329,12 +992,11 @@ async def test_options_flow_change_device( domain=DOMAIN, options={ "template_type": template_type, - "name": "My template", + "name": "Test", + "state": state_template, "device_id": device_id1, - **state_template, - **extra_options, }, - title="Template", + title="Sensor template", ) template_config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(template_config_entry.entry_id) @@ -1349,26 +1011,23 @@ async def test_options_flow_change_device( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ + "state": state_template, "device_id": device_id2, - **state_template, - **extra_input, }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "template_type": template_type, - "name": "My template", + "name": "Test", + "state": state_template, "device_id": device_id2, - **state_template, - **extra_input, } assert template_config_entry.data == {} assert template_config_entry.options == { "template_type": template_type, - "name": "My template", + "name": "Test", + "state": state_template, "device_id": device_id2, - **state_template, - **extra_options, } # Remove link with device @@ -1380,23 +1039,20 @@ async def test_options_flow_change_device( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ - **state_template, - **extra_input, + "state": state_template, }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "template_type": template_type, - "name": "My template", - **state_template, - **extra_input, + "name": "Test", + "state": state_template, } assert template_config_entry.data == {} assert template_config_entry.options == { "template_type": template_type, - "name": "My template", - **state_template, - **extra_options, + "name": "Test", + "state": state_template, } # Change to link to device 1 @@ -1408,24 +1064,21 @@ async def test_options_flow_change_device( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ + "state": state_template, "device_id": device_id1, - **state_template, - **extra_input, }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "template_type": template_type, - "name": "My template", + "name": "Test", + "state": state_template, "device_id": device_id1, - **state_template, - **extra_input, } assert template_config_entry.data == {} assert template_config_entry.options == { "template_type": template_type, - "name": "My template", + "name": "Test", + "state": state_template, "device_id": device_id1, - **state_template, - **extra_options, } diff --git a/tests/components/template/test_cover.py b/tests/components/template/test_cover.py index c49db59c2ee..2674b9697ed 100644 --- a/tests/components/template/test_cover.py +++ b/tests/components/template/test_cover.py @@ -5,12 +5,7 @@ from typing import Any import pytest from homeassistant import setup -from homeassistant.components.cover import ( - ATTR_POSITION, - ATTR_TILT_POSITION, - DOMAIN as COVER_DOMAIN, - CoverState, -) +from homeassistant.components.cover import ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, @@ -22,8 +17,12 @@ from homeassistant.const import ( SERVICE_STOP_COVER, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, + STATE_CLOSED, + STATE_CLOSING, STATE_OFF, STATE_ON, + STATE_OPEN, + STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -52,13 +51,13 @@ OPEN_CLOSE_COVER_CONFIG = { } -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( ("config", "states"), [ ( { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -69,24 +68,10 @@ OPEN_CLOSE_COVER_CONFIG = { } }, [ - ("cover.test_state", CoverState.OPEN, CoverState.OPEN, {}, -1, ""), - ("cover.test_state", CoverState.CLOSED, CoverState.CLOSED, {}, -1, ""), - ( - "cover.test_state", - CoverState.OPENING, - CoverState.OPENING, - {}, - -1, - "", - ), - ( - "cover.test_state", - CoverState.CLOSING, - CoverState.CLOSING, - {}, - -1, - "", - ), + ("cover.test_state", STATE_OPEN, STATE_OPEN, {}, -1, ""), + ("cover.test_state", STATE_CLOSED, STATE_CLOSED, {}, -1, ""), + ("cover.test_state", STATE_OPENING, STATE_OPENING, {}, -1, ""), + ("cover.test_state", STATE_CLOSING, STATE_CLOSING, {}, -1, ""), ( "cover.test_state", "dog", @@ -95,7 +80,7 @@ OPEN_CLOSE_COVER_CONFIG = { -1, "Received invalid cover is_on state: dog", ), - ("cover.test_state", CoverState.OPEN, CoverState.OPEN, {}, -1, ""), + ("cover.test_state", STATE_OPEN, STATE_OPEN, {}, -1, ""), ( "cover.test_state", "cat", @@ -104,7 +89,7 @@ OPEN_CLOSE_COVER_CONFIG = { -1, "Received invalid cover is_on state: cat", ), - ("cover.test_state", CoverState.CLOSED, CoverState.CLOSED, {}, -1, ""), + ("cover.test_state", STATE_CLOSED, STATE_CLOSED, {}, -1, ""), ( "cover.test_state", "bear", @@ -117,7 +102,7 @@ OPEN_CLOSE_COVER_CONFIG = { ), ( { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -131,45 +116,17 @@ OPEN_CLOSE_COVER_CONFIG = { } }, [ - ("cover.test_state", CoverState.OPEN, STATE_UNKNOWN, {}, -1, ""), - ("cover.test_state", CoverState.CLOSED, STATE_UNKNOWN, {}, -1, ""), - ( - "cover.test_state", - CoverState.OPENING, - CoverState.OPENING, - {}, - -1, - "", - ), - ( - "cover.test_state", - CoverState.CLOSING, - CoverState.CLOSING, - {}, - -1, - "", - ), - ( - "cover.test", - CoverState.CLOSED, - CoverState.CLOSING, - {"position": 0}, - 0, - "", - ), - ("cover.test_state", CoverState.OPEN, CoverState.CLOSED, {}, -1, ""), - ( - "cover.test", - CoverState.CLOSED, - CoverState.OPEN, - {"position": 10}, - 10, - "", - ), + ("cover.test_state", STATE_OPEN, STATE_UNKNOWN, {}, -1, ""), + ("cover.test_state", STATE_CLOSED, STATE_UNKNOWN, {}, -1, ""), + ("cover.test_state", STATE_OPENING, STATE_OPENING, {}, -1, ""), + ("cover.test_state", STATE_CLOSING, STATE_CLOSING, {}, -1, ""), + ("cover.test", STATE_CLOSED, STATE_CLOSING, {"position": 0}, 0, ""), + ("cover.test_state", STATE_OPEN, STATE_CLOSED, {}, -1, ""), + ("cover.test", STATE_CLOSED, STATE_OPEN, {"position": 10}, 10, ""), ( "cover.test_state", "dog", - CoverState.OPEN, + STATE_OPEN, {}, -1, "Received invalid cover is_on state: dog", @@ -178,9 +135,8 @@ OPEN_CLOSE_COVER_CONFIG = { ), ], ) -@pytest.mark.usefixtures("start_ha") async def test_template_state_text( - hass: HomeAssistant, states, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, states, start_ha, caplog: pytest.LogCaptureFixture ) -> None: """Test the state text of a template.""" state = hass.states.get("cover.test_template_cover") @@ -196,13 +152,13 @@ async def test_template_state_text( assert text in caplog.text -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( ("config", "entity", "set_state", "test_state", "attr"), [ ( { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -222,7 +178,7 @@ async def test_template_state_text( ), ( { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -242,13 +198,13 @@ async def test_template_state_text( ), ], ) -@pytest.mark.usefixtures("start_ha") async def test_template_state_text_ignored_if_none_or_empty( hass: HomeAssistant, entity: str, set_state: str, test_state: str, attr: dict[str, Any], + start_ha, caplog: pytest.LogCaptureFixture, ) -> None: """Test ignoring an empty state text of a template.""" @@ -262,12 +218,12 @@ async def test_template_state_text_ignored_if_none_or_empty( assert "ERROR" not in caplog.text -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -279,19 +235,18 @@ async def test_template_state_text_ignored_if_none_or_empty( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_state_boolean(hass: HomeAssistant) -> None: +async def test_template_state_boolean(hass: HomeAssistant, start_ha) -> None: """Test the value_template attribute.""" state = hass.states.get("cover.test_template_cover") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -305,18 +260,17 @@ async def test_template_state_boolean(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_template_position( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture ) -> None: """Test the position_template attribute.""" - hass.states.async_set("cover.test", CoverState.OPEN) + hass.states.async_set("cover.test", STATE_OPEN) attrs = {} for set_state, pos, test_state in ( - (CoverState.CLOSED, 42, CoverState.OPEN), - (CoverState.OPEN, 0.0, CoverState.CLOSED), - (CoverState.CLOSED, None, STATE_UNKNOWN), + (STATE_CLOSED, 42, STATE_OPEN), + (STATE_OPEN, 0.0, STATE_CLOSED), + (STATE_CLOSED, None, STATE_UNKNOWN), ): attrs["position"] = pos hass.states.async_set("cover.test", set_state, attributes=attrs) @@ -327,12 +281,12 @@ async def test_template_position( assert "ValueError" not in caplog.text -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -344,20 +298,19 @@ async def test_template_position( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_not_optimistic(hass: HomeAssistant) -> None: +async def test_template_not_optimistic(hass: HomeAssistant, start_ha) -> None: """Test the is_closed attribute.""" state = hass.states.get("cover.test_template_cover") assert state.state == STATE_UNKNOWN -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( ("config", "tilt_position"), [ ( { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -372,7 +325,7 @@ async def test_template_not_optimistic(hass: HomeAssistant) -> None: ), ( { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -387,19 +340,20 @@ async def test_template_not_optimistic(hass: HomeAssistant) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_tilt(hass: HomeAssistant, tilt_position: float | None) -> None: +async def test_template_tilt( + hass: HomeAssistant, tilt_position: float | None, start_ha +) -> None: """Test the tilt_template attribute.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_tilt_position") == tilt_position -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -411,7 +365,7 @@ async def test_template_tilt(hass: HomeAssistant, tilt_position: float | None) - } }, { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -430,26 +384,25 @@ async def test_template_tilt(hass: HomeAssistant, tilt_position: float | None) - }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_out_of_bounds(hass: HomeAssistant) -> None: +async def test_template_out_of_bounds(hass: HomeAssistant, start_ha) -> None: """Test template out-of-bounds condition.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_tilt_position") is None assert state.attributes.get("current_position") is None -@pytest.mark.parametrize(("count", "domain"), [(0, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(0, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": {"test_template_cover": {"value_template": "{{ 1 == 1 }}"}}, } }, { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -467,21 +420,20 @@ async def test_template_out_of_bounds(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_template_open_or_position( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test that at least one of open_cover or set_position is used.""" assert hass.states.async_all("cover") == [] assert "Invalid config for 'cover' from integration 'template'" in caplog_setup_text -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -493,14 +445,15 @@ async def test_template_open_or_position( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_open_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_open_action( + hass: HomeAssistant, start_ha, calls: list[ServiceCall] +) -> None: """Test the open_cover command.""" state = hass.states.get("cover.test_template_cover") - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() @@ -509,12 +462,12 @@ async def test_open_action(hass: HomeAssistant, calls: list[ServiceCall]) -> Non assert calls[0].data["caller"] == "cover.test_template_cover" -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -533,19 +486,20 @@ async def test_open_action(hass: HomeAssistant, calls: list[ServiceCall]) -> Non }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_close_stop_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_close_stop_action( + hass: HomeAssistant, start_ha, calls: list[ServiceCall] +) -> None: """Test the close-cover and stop_cover commands.""" state = hass.states.get("cover.test_template_cover") - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() await hass.services.async_call( - COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() @@ -563,8 +517,9 @@ async def test_close_stop_action(hass: HomeAssistant, calls: list[ServiceCall]) {"input_number": {"test": {"min": "0", "max": "100", "initial": "42"}}}, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_set_position(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_set_position( + hass: HomeAssistant, start_ha, calls: list[ServiceCall] +) -> None: """Test the set_position command.""" with assert_setup_component(1, "cover"): assert await setup.async_setup_component( @@ -599,7 +554,7 @@ async def test_set_position(hass: HomeAssistant, calls: list[ServiceCall]) -> No assert state.state == STATE_UNKNOWN await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -610,7 +565,7 @@ async def test_set_position(hass: HomeAssistant, calls: list[ServiceCall]) -> No assert calls[-1].data["position"] == 100 await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -621,7 +576,7 @@ async def test_set_position(hass: HomeAssistant, calls: list[ServiceCall]) -> No assert calls[-1].data["position"] == 0 await hass.services.async_call( - COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -632,7 +587,7 @@ async def test_set_position(hass: HomeAssistant, calls: list[ServiceCall]) -> No assert calls[-1].data["position"] == 100 await hass.services.async_call( - COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -643,7 +598,7 @@ async def test_set_position(hass: HomeAssistant, calls: list[ServiceCall]) -> No assert calls[-1].data["position"] == 0 await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_POSITION: 25}, blocking=True, @@ -657,12 +612,12 @@ async def test_set_position(hass: HomeAssistant, calls: list[ServiceCall]) -> No assert calls[-1].data["position"] == 25 -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -693,17 +648,17 @@ async def test_set_position(hass: HomeAssistant, calls: list[ServiceCall]) -> No (SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, 0), ], ) -@pytest.mark.usefixtures("start_ha") async def test_set_tilt_position( hass: HomeAssistant, service, attr, + start_ha, calls: list[ServiceCall], tilt_position, ) -> None: """Test the set_tilt_position command.""" await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, service, attr, blocking=True, @@ -716,12 +671,12 @@ async def test_set_tilt_position( assert calls[-1].data["tilt_position"] == tilt_position -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -732,16 +687,15 @@ async def test_set_tilt_position( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_set_position_optimistic( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test optimistic position mode.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_position") is None await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_POSITION: 42}, blocking=True, @@ -751,25 +705,25 @@ async def test_set_position_optimistic( assert state.attributes.get("current_position") == 42.0 for service, test_state in ( - (SERVICE_CLOSE_COVER, CoverState.CLOSED), - (SERVICE_OPEN_COVER, CoverState.OPEN), - (SERVICE_TOGGLE, CoverState.CLOSED), - (SERVICE_TOGGLE, CoverState.OPEN), + (SERVICE_CLOSE_COVER, STATE_CLOSED), + (SERVICE_OPEN_COVER, STATE_OPEN), + (SERVICE_TOGGLE, STATE_CLOSED), + (SERVICE_TOGGLE, STATE_OPEN), ): await hass.services.async_call( - COVER_DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") assert state.state == test_state -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -782,16 +736,15 @@ async def test_set_position_optimistic( }, ], ) -@pytest.mark.usefixtures("calls", "start_ha") async def test_set_tilt_position_optimistic( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test the optimistic tilt_position mode.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_tilt_position") is None await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_TILT_POSITION: 42}, blocking=True, @@ -807,19 +760,19 @@ async def test_set_tilt_position_optimistic( (SERVICE_TOGGLE_COVER_TILT, 100.0), ): await hass.services.async_call( - COVER_DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_tilt_position") == pos -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -834,13 +787,12 @@ async def test_set_tilt_position_optimistic( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_icon_template(hass: HomeAssistant) -> None: +async def test_icon_template(hass: HomeAssistant, start_ha) -> None: """Test icon template.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("icon") == "" - state = hass.states.async_set("cover.test_state", CoverState.OPEN) + state = hass.states.async_set("cover.test_state", STATE_OPEN) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -848,12 +800,12 @@ async def test_icon_template(hass: HomeAssistant) -> None: assert state.attributes["icon"] == "mdi:check" -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -870,13 +822,12 @@ async def test_icon_template(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_entity_picture_template(hass: HomeAssistant) -> None: +async def test_entity_picture_template(hass: HomeAssistant, start_ha) -> None: """Test icon template.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("entity_picture") == "" - state = hass.states.async_set("cover.test_state", CoverState.OPEN) + state = hass.states.async_set("cover.test_state", STATE_OPEN) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -884,12 +835,12 @@ async def test_entity_picture_template(hass: HomeAssistant) -> None: assert state.attributes["entity_picture"] == "/local/cover.png" -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -904,8 +855,7 @@ async def test_entity_picture_template(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_availability_template(hass: HomeAssistant) -> None: +async def test_availability_template(hass: HomeAssistant, start_ha) -> None: """Test availability template.""" hass.states.async_set("availability_state.state", STATE_OFF) await hass.async_block_till_done() @@ -918,12 +868,12 @@ async def test_availability_template(hass: HomeAssistant) -> None: assert hass.states.get("cover.test_template_cover").state != STATE_UNAVAILABLE -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -935,19 +885,20 @@ async def test_availability_template(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_availability_without_availability_template(hass: HomeAssistant) -> None: +async def test_availability_without_availability_template( + hass: HomeAssistant, start_ha +) -> None: """Test that component is available if there is no.""" state = hass.states.get("cover.test_template_cover") assert state.state != STATE_UNAVAILABLE -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -960,21 +911,20 @@ async def test_availability_without_availability_template(hass: HomeAssistant) - }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("cover.test_template_cover") != STATE_UNAVAILABLE assert "UndefinedError: 'x' is undefined" in caplog_setup_text -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -987,19 +937,18 @@ async def test_invalid_availability_template_keeps_component_available( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_device_class(hass: HomeAssistant) -> None: +async def test_device_class(hass: HomeAssistant, start_ha) -> None: """Test device class.""" state = hass.states.get("cover.test_template_cover") assert state.attributes.get("device_class") == "door" -@pytest.mark.parametrize(("count", "domain"), [(0, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(0, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -1012,19 +961,18 @@ async def test_device_class(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_invalid_device_class(hass: HomeAssistant) -> None: +async def test_invalid_device_class(hass: HomeAssistant, start_ha) -> None: """Test device class.""" state = hass.states.get("cover.test_template_cover") assert not state -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "test_template_cover_01": { @@ -1042,18 +990,17 @@ async def test_invalid_device_class(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_unique_id(hass: HomeAssistant) -> None: +async def test_unique_id(hass: HomeAssistant, start_ha) -> None: """Test unique_id option only creates one cover per id.""" assert len(hass.states.async_all()) == 1 -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "garage_door": { @@ -1068,8 +1015,7 @@ async def test_unique_id(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_state_gets_lowercased(hass: HomeAssistant) -> None: +async def test_state_gets_lowercased(hass: HomeAssistant, start_ha) -> None: """Test True/False is lowercased.""" hass.states.async_set("binary_sensor.garage_door_sensor", "off") @@ -1077,18 +1023,18 @@ async def test_state_gets_lowercased(hass: HomeAssistant) -> None: assert len(hass.states.async_all()) == 2 - assert hass.states.get("cover.garage_door").state == CoverState.OPEN + assert hass.states.get("cover.garage_door").state == STATE_OPEN hass.states.async_set("binary_sensor.garage_door_sensor", "on") await hass.async_block_till_done() - assert hass.states.get("cover.garage_door").state == CoverState.CLOSED + assert hass.states.get("cover.garage_door").state == STATE_CLOSED -@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - COVER_DOMAIN: { + DOMAIN: { "platform": "template", "covers": { "office": { @@ -1115,9 +1061,8 @@ async def test_state_gets_lowercased(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_self_referencing_icon_with_no_template_is_not_a_loop( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture ) -> None: """Test a self referencing icon with no value template is not a loop.""" assert len(hass.states.async_all()) == 1 diff --git a/tests/components/template/test_fan.py b/tests/components/template/test_fan.py index e92bc82f5ae..82ad4ede91c 100644 --- a/tests/components/template/test_fan.py +++ b/tests/components/template/test_fan.py @@ -11,7 +11,7 @@ from homeassistant.components.fan import ( ATTR_PRESET_MODE, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN as FAN_DOMAIN, + DOMAIN, FanEntityFeature, NotValidPresetModeError, ) @@ -36,12 +36,12 @@ _OSC_INPUT = "input_select.osc" _DIRECTION_INPUT_SELECT = "input_select.direction" -@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -54,18 +54,17 @@ _DIRECTION_INPUT_SELECT = "input_select.direction" }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_missing_optional_config(hass: HomeAssistant) -> None: +async def test_missing_optional_config(hass: HomeAssistant, start_ha) -> None: """Test: missing optional template is ok.""" _verify(hass, STATE_ON, None, None, None, None) -@pytest.mark.parametrize(("count", "domain"), [(0, FAN_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(0, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "platform": "template", @@ -79,7 +78,7 @@ async def test_missing_optional_config(hass: HomeAssistant) -> None: } }, { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "platform": "template", @@ -93,7 +92,7 @@ async def test_missing_optional_config(hass: HomeAssistant) -> None: } }, { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "platform": "template", @@ -108,18 +107,17 @@ async def test_missing_optional_config(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_wrong_template_config(hass: HomeAssistant) -> None: +async def test_wrong_template_config(hass: HomeAssistant, start_ha) -> None: """Test: missing 'value_template' will fail.""" assert hass.states.async_all("fan") == [] -@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -151,8 +149,7 @@ async def test_wrong_template_config(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_templates_with_entities(hass: HomeAssistant) -> None: +async def test_templates_with_entities(hass: HomeAssistant, start_ha) -> None: """Test tempalates with values from other entities.""" _verify(hass, STATE_OFF, 0, None, None, None) @@ -176,13 +173,13 @@ async def test_templates_with_entities(hass: HomeAssistant) -> None: _verify(hass, STATE_OFF, 0, True, DIRECTION_FORWARD, None) -@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( ("config", "entity", "tests"), [ ( { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -206,7 +203,7 @@ async def test_templates_with_entities(hass: HomeAssistant) -> None: ), ( { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -232,8 +229,9 @@ async def test_templates_with_entities(hass: HomeAssistant) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_templates_with_entities2(hass: HomeAssistant, entity, tests) -> None: +async def test_templates_with_entities2( + hass: HomeAssistant, entity, tests, start_ha +) -> None: """Test templates with values from other entities.""" for set_percentage, test_percentage, test_type in tests: hass.states.async_set(entity, set_percentage) @@ -241,12 +239,12 @@ async def test_templates_with_entities2(hass: HomeAssistant, entity, tests) -> N _verify(hass, STATE_ON, test_percentage, None, None, test_type) -@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -264,8 +262,9 @@ async def test_templates_with_entities2(hass: HomeAssistant, entity, tests) -> N }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_availability_template_with_entities(hass: HomeAssistant) -> None: +async def test_availability_template_with_entities( + hass: HomeAssistant, start_ha +) -> None: """Test availability tempalates with values from other entities.""" for state, test_assert in ((STATE_ON, True), (STATE_OFF, False)): hass.states.async_set(_STATE_AVAILABILITY_BOOLEAN, state) @@ -273,13 +272,13 @@ async def test_availability_template_with_entities(hass: HomeAssistant) -> None: assert (hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE) == test_assert -@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( ("config", "states"), [ ( { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -294,7 +293,7 @@ async def test_availability_template_with_entities(hass: HomeAssistant) -> None: ), ( { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -312,7 +311,7 @@ async def test_availability_template_with_entities(hass: HomeAssistant) -> None: ), ( { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -330,7 +329,7 @@ async def test_availability_template_with_entities(hass: HomeAssistant) -> None: ), ( { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -348,18 +347,19 @@ async def test_availability_template_with_entities(hass: HomeAssistant) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_with_unavailable_entities(hass: HomeAssistant, states) -> None: +async def test_template_with_unavailable_entities( + hass: HomeAssistant, states, start_ha +) -> None: """Test unavailability with value_template.""" _verify(hass, states[0], states[1], states[2], states[3], None) -@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -378,9 +378,8 @@ async def test_template_with_unavailable_entities(hass: HomeAssistant, states) - }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("fan.test_fan").state != STATE_UNAVAILABLE @@ -700,13 +699,13 @@ async def test_set_invalid_osc(hass: HomeAssistant, calls: list[ServiceCall]) -> def _verify( - hass: HomeAssistant, - expected_state: str, - expected_percentage: int | None, - expected_oscillating: bool | None, - expected_direction: str | None, - expected_preset_mode: str | None, -) -> None: + hass, + expected_state, + expected_percentage, + expected_oscillating, + expected_direction, + expected_preset_mode, +): """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes @@ -717,7 +716,7 @@ def _verify( assert attributes.get(ATTR_PRESET_MODE) == expected_preset_mode -async def _register_fan_sources(hass: HomeAssistant) -> None: +async def _register_fan_sources(hass): with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} @@ -761,11 +760,8 @@ async def _register_fan_sources(hass: HomeAssistant) -> None: async def _register_components( - hass: HomeAssistant, - speed_list: list[str] | None = None, - preset_modes: list[str] | None = None, - speed_count: int | None = None, -) -> None: + hass, speed_list=None, preset_modes=None, speed_count=None +): """Register basic components for testing.""" await _register_fan_sources(hass) @@ -904,12 +900,12 @@ async def _register_components( await hass.async_block_till_done() -@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "test_template_fan_01": { @@ -941,8 +937,7 @@ async def _register_components( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_unique_id(hass: HomeAssistant) -> None: +async def test_unique_id(hass: HomeAssistant, start_ha) -> None: """Test unique_id option only creates one fan per id.""" assert len(hass.states.async_all()) == 1 @@ -1026,12 +1021,12 @@ async def test_implemented_percentage( assert attributes.get("supported_features") & FanEntityFeature.SET_SPEED -@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) @pytest.mark.parametrize( "config", [ { - FAN_DOMAIN: { + DOMAIN: { "platform": "template", "fans": { "mechanical_ventilation": { @@ -1084,8 +1079,7 @@ async def test_implemented_percentage( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_implemented_preset_mode(hass: HomeAssistant) -> None: +async def test_implemented_preset_mode(hass: HomeAssistant, start_ha) -> None: """Test a fan that implements preset_mode.""" assert len(hass.states.async_all()) == 1 diff --git a/tests/components/template/test_image.py b/tests/components/template/test_image.py index 101b475956a..bda9e2530ca 100644 --- a/tests/components/template/test_image.py +++ b/tests/components/template/test_image.py @@ -8,7 +8,6 @@ import httpx from PIL import Image import pytest import respx -from syrupy.assertion import SnapshotAssertion from homeassistant import setup from homeassistant.components.input_text import ( @@ -16,13 +15,12 @@ from homeassistant.components.input_text import ( DOMAIN as INPUT_TEXT_DOMAIN, SERVICE_SET_VALUE as INPUT_TEXT_SERVICE_SET_VALUE, ) -from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ENTITY_PICTURE, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import MockConfigEntry, assert_setup_component +from tests.common import assert_setup_component from tests.typing import ClientSessionGenerator _DEFAULT = object() @@ -76,39 +74,6 @@ async def _assert_state( assert body == expected_image -@respx.mock -@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") -async def test_setup_config_entry( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - imgbytes_jpg, -) -> None: - """Test the config flow.""" - - respx.get("http://example.com").respond( - stream=imgbytes_jpg, content_type="image/jpeg" - ) - - template_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - "name": "My template", - "template_type": "image", - "url": "http://example.com", - }, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("image.my_template") - assert state is not None - assert state.state == "2024-07-09T00:00:00+00:00" - - @respx.mock @pytest.mark.freeze_time("2023-04-01 00:00:00+00:00") async def test_platform_config( @@ -538,47 +503,3 @@ async def test_trigger_image_custom_entity_picture( imgbytes_jpg, expected_entity_picture="http://example2.com", ) - - -@respx.mock -async def test_device_id( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test for device for image template.""" - - device_config_entry = MockConfigEntry() - device_config_entry.add_to_hass(hass) - device_entry = device_registry.async_get_or_create( - config_entry_id=device_config_entry.entry_id, - identifiers={("test", "identifier_test")}, - connections={("mac", "30:31:32:33:34:35")}, - ) - await hass.async_block_till_done() - assert device_entry is not None - assert device_entry.id is not None - - respx.get("http://example.com").respond( - stream=imgbytes_jpg, content_type="image/jpeg" - ) - - template_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - "name": "My template", - "template_type": "image", - "url": "http://example.com", - "device_id": device_entry.id, - }, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - template_entity = entity_registry.async_get("image.my_template") - assert template_entity is not None - assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_init.py b/tests/components/template/test_init.py index cab940d4c66..d13fd9035b0 100644 --- a/tests/components/template/test_init.py +++ b/tests/components/template/test_init.py @@ -1,4 +1,4 @@ -"""Test for Template helper.""" +"""The test for the Template sensor platform.""" from datetime import timedelta from unittest.mock import patch @@ -7,9 +7,9 @@ import pytest from homeassistant import config from homeassistant.components.template import DOMAIN -from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.reload import SERVICE_RELOAD from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -51,8 +51,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed, get_fixture_p }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_reloadable(hass: HomeAssistant) -> None: +async def test_reloadable(hass: HomeAssistant, start_ha) -> None: """Test that we can reload.""" hass.states.async_set("sensor.test_sensor", "mytest") await hass.async_block_till_done() @@ -103,8 +102,7 @@ async def test_reloadable(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_reloadable_can_remove(hass: HomeAssistant) -> None: +async def test_reloadable_can_remove(hass: HomeAssistant, start_ha) -> None: """Test that we can reload and remove all template sensors.""" hass.states.async_set("sensor.test_sensor", "mytest") await hass.async_block_till_done() @@ -134,8 +132,9 @@ async def test_reloadable_can_remove(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_reloadable_stops_on_invalid_config(hass: HomeAssistant) -> None: +async def test_reloadable_stops_on_invalid_config( + hass: HomeAssistant, start_ha +) -> None: """Test we stop the reload if configuration.yaml is completely broken.""" hass.states.async_set("sensor.test_sensor", "mytest") await hass.async_block_till_done() @@ -163,8 +162,9 @@ async def test_reloadable_stops_on_invalid_config(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_reloadable_handles_partial_valid_config(hass: HomeAssistant) -> None: +async def test_reloadable_handles_partial_valid_config( + hass: HomeAssistant, start_ha +) -> None: """Test we can still setup valid sensors when configuration.yaml has a broken entry.""" hass.states.async_set("sensor.test_sensor", "mytest") await hass.async_block_till_done() @@ -195,8 +195,7 @@ async def test_reloadable_handles_partial_valid_config(hass: HomeAssistant) -> N }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_reloadable_multiple_platforms(hass: HomeAssistant) -> None: +async def test_reloadable_multiple_platforms(hass: HomeAssistant, start_ha) -> None: """Test that we can reload.""" hass.states.async_set("sensor.test_sensor", "mytest") await async_setup_component( @@ -240,9 +239,8 @@ async def test_reloadable_multiple_platforms(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_reload_sensors_that_reference_other_template_sensors( - hass: HomeAssistant, + hass: HomeAssistant, start_ha ) -> None: """Test that we can reload sensor that reference other template sensors.""" await async_yaml_patch_helper(hass, "ref_configuration.yaml") @@ -260,7 +258,7 @@ async def test_reload_sensors_that_reference_other_template_sensors( assert hass.states.get("sensor.test3").state == "2" -async def async_yaml_patch_helper(hass: HomeAssistant, filename: str) -> None: +async def async_yaml_patch_helper(hass, filename): """Help update configuration.yaml.""" yaml_path = get_fixture_path(filename, "template") with patch.object(config, "YAML_CONFIG_FILE", yaml_path): @@ -273,113 +271,13 @@ async def async_yaml_patch_helper(hass: HomeAssistant, filename: str) -> None: await hass.async_block_till_done() -@pytest.mark.parametrize( - ( - "config_entry_options", - "config_user_input", - ), - [ - ( - { - "name": "My template", - "state": "{{10}}", - "template_type": "sensor", - }, - { - "state": "{{12}}", - }, - ), - ( - { - "template_type": "binary_sensor", - "name": "My template", - "state": "{{1 == 1}}", - }, - { - "state": "{{1 == 2}}", - }, - ), - ( - { - "template_type": "image", - "name": "My template", - "url": "http://example.com", - }, - { - "url": "http://example.com", - }, - ), - ( - { - "template_type": "button", - "name": "My template", - }, - {}, - ), - ( - { - "template_type": "number", - "name": "My template", - "state": "{{ 10 }}", - "min": 0, - "max": 100, - "step": 0.1, - "set_value": { - "action": "input_number.set_value", - "target": {"entity_id": "input_number.test"}, - "data": {"value": "{{ value }}"}, - }, - }, - { - "state": "{{ 11 }}", - "min": 0, - "max": 100, - "step": 0.1, - "set_value": { - "action": "input_number.set_value", - "target": {"entity_id": "input_number.test"}, - "data": {"value": "{{ value }}"}, - }, - }, - ), - ( - { - "template_type": "select", - "name": "My template", - "state": "{{ 'on' }}", - "options": "{{ ['off', 'on', 'auto'] }}", - }, - { - "state": "{{ 'on' }}", - "options": "{{ ['off', 'on', 'auto'] }}", - }, - ), - ( - { - "template_type": "switch", - "name": "My template", - "value_template": "{{ true }}", - }, - { - "value_template": "{{ true }}", - }, - ), - ], -) async def test_change_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - config_entry_options: dict[str, str], - config_user_input: dict[str, str], ) -> None: - """Test the link between the device and the config entry. + """Test remove the device registry configuration entry when the device changes.""" - Test, for each platform, that the device was linked to the - config entry and the link was removed when the device is - changed in the integration options. - """ - - # Configure devices registry + # Configure a device registry entry_device1 = MockConfigEntry() entry_device1.add_to_hass(hass) device1 = device_registry.async_get_or_create( @@ -402,94 +300,60 @@ async def test_change_device( device_id2 = device2.id assert device_id2 is not None - # Setup the config entry - template_config_entry = MockConfigEntry( + # Setup the config entry (binary_sensor) + sensor_config_entry = MockConfigEntry( data={}, domain=DOMAIN, - options=config_entry_options | {"device_id": device_id1}, - title="Template", + options={ + "template_type": "binary_sensor", + "name": "Teste", + "state": "{{15}}", + "device_id": device_id1, + }, + title="Binary sensor template", ) - template_config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(template_config_entry.entry_id) + sensor_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(sensor_config_entry.entry_id) await hass.async_block_till_done() - # Confirm that the config entry has been added to the device 1 registry (current) + # Confirm that the configuration entry has been added to the device 1 registry (current) current_device = device_registry.async_get(device_id=device_id1) - assert template_config_entry.entry_id in current_device.config_entries + assert sensor_config_entry.entry_id in current_device.config_entries - # Change config options to use device 2 and reload the integration - result = await hass.config_entries.options.async_init( - template_config_entry.entry_id - ) + # Change configuration options to use device 2 and reload the integration + result = await hass.config_entries.options.async_init(sensor_config_entry.entry_id) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input=config_user_input | {"device_id": device_id2}, + user_input={ + "state": "{{15}}", + "device_id": device_id2, + }, ) await hass.async_block_till_done() - # Confirm that the config entry has been removed from the device 1 registry + # Confirm that the configuration entry has been removed from the device 1 registry (previous) previous_device = device_registry.async_get(device_id=device_id1) - assert template_config_entry.entry_id not in previous_device.config_entries + assert sensor_config_entry.entry_id not in previous_device.config_entries - # Confirm that the config entry has been added to the device 2 registry (current) + # Confirm that the configuration entry has been added to the device 2 registry (current) current_device = device_registry.async_get(device_id=device_id2) - assert template_config_entry.entry_id in current_device.config_entries + assert sensor_config_entry.entry_id in current_device.config_entries - # Change the config options to remove the device and reload the integration - result = await hass.config_entries.options.async_init( - template_config_entry.entry_id - ) + result = await hass.config_entries.options.async_init(sensor_config_entry.entry_id) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input=config_user_input, + user_input={ + "state": "{{15}}", + }, ) await hass.async_block_till_done() - # Confirm that the config entry has been removed from the device 2 registry + # Confirm that the configuration entry has been removed from the device 2 registry (previous) previous_device = device_registry.async_get(device_id=device_id2) - assert template_config_entry.entry_id not in previous_device.config_entries + assert sensor_config_entry.entry_id not in previous_device.config_entries - # Confirm that there is no device with the helper config entry + # Confirm that there is no device with the helper configuration entry assert ( - dr.async_entries_for_config_entry( - device_registry, template_config_entry.entry_id - ) + dr.async_entries_for_config_entry(device_registry, sensor_config_entry.entry_id) == [] ) - - -async def test_fail_non_numerical_number_settings( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test that non numerical number options causes config entry setup to fail. - - Support for non numerical max, min and step was added in HA Core 2024.9.0 and - removed in HA Core 2024.9.1. - """ - - options = { - "template_type": "number", - "name": "My template", - "state": "{{ 10 }}", - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", - "set_value": { - "action": "input_number.set_value", - "target": {"entity_id": "input_number.test"}, - "data": {"value": "{{ value }}"}, - }, - } - # Setup the config entry - template_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options=options, - title="Template", - ) - template_config_entry.add_to_hass(hass) - assert not await hass.config_entries.async_setup(template_config_entry.entry_id) - assert ( - "The 'My template' number template needs to be reconfigured, " - "max must be a number, got '{{ 100 }}'" in caplog.text - ) diff --git a/tests/components/template/test_light.py b/tests/components/template/test_light.py index 065a1488dc9..ad97146d0fb 100644 --- a/tests/components/template/test_light.py +++ b/tests/components/template/test_light.py @@ -1,7 +1,5 @@ """The tests for the Template light platform.""" -from typing import Any - import pytest from homeassistant.components import light @@ -154,9 +152,7 @@ OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG = { } -async def async_setup_light( - hass: HomeAssistant, count: int, light_config: dict[str, Any] -) -> None: +async def async_setup_light(hass, count, light_config): """Do setup of light integration.""" config = {"light": {"platform": "template", "lights": light_config}} @@ -173,9 +169,7 @@ async def async_setup_light( @pytest.fixture -async def setup_light( - hass: HomeAssistant, count: int, light_config: dict[str, Any] -) -> None: +async def setup_light(hass, count, light_config): """Do setup of light integration.""" await async_setup_light(hass, count, light_config) diff --git a/tests/components/template/test_lock.py b/tests/components/template/test_lock.py index 186a84d5365..f4e81cbfd63 100644 --- a/tests/components/template/test_lock.py +++ b/tests/components/template/test_lock.py @@ -4,7 +4,6 @@ import pytest from homeassistant import setup from homeassistant.components import lock -from homeassistant.components.lock import LockState from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, @@ -66,20 +65,19 @@ OPTIMISTIC_CODED_LOCK_CONFIG = { }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_state(hass: HomeAssistant) -> None: +async def test_template_state(hass: HomeAssistant, start_ha) -> None: """Test template.""" hass.states.async_set("switch.test_state", STATE_ON) await hass.async_block_till_done() state = hass.states.get("lock.test_template_lock") - assert state.state == LockState.LOCKED + assert state.state == lock.STATE_LOCKED hass.states.async_set("switch.test_state", STATE_OFF) await hass.async_block_till_done() state = hass.states.get("lock.test_template_lock") - assert state.state == LockState.UNLOCKED + assert state.state == lock.STATE_UNLOCKED @pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) @@ -94,11 +92,10 @@ async def test_template_state(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_state_boolean_on(hass: HomeAssistant) -> None: +async def test_template_state_boolean_on(hass: HomeAssistant, start_ha) -> None: """Test the setting of the state with boolean on.""" state = hass.states.get("lock.template_lock") - assert state.state == LockState.LOCKED + assert state.state == lock.STATE_LOCKED @pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) @@ -113,11 +110,10 @@ async def test_template_state_boolean_on(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_state_boolean_off(hass: HomeAssistant) -> None: +async def test_template_state_boolean_off(hass: HomeAssistant, start_ha) -> None: """Test the setting of the state with off.""" state = hass.states.get("lock.template_lock") - assert state.state == LockState.UNLOCKED + assert state.state == lock.STATE_UNLOCKED @pytest.mark.parametrize(("count", "domain"), [(0, lock.DOMAIN)]) @@ -184,8 +180,7 @@ async def test_template_state_boolean_off(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_syntax_error(hass: HomeAssistant) -> None: +async def test_template_syntax_error(hass: HomeAssistant, start_ha) -> None: """Test templating syntax errors don't create entities.""" assert hass.states.async_all("lock") == [] @@ -202,16 +197,15 @@ async def test_template_syntax_error(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_static(hass: HomeAssistant) -> None: +async def test_template_static(hass: HomeAssistant, start_ha) -> None: """Test that we allow static templates.""" state = hass.states.get("lock.template_lock") - assert state.state == LockState.UNLOCKED + assert state.state == lock.STATE_UNLOCKED - hass.states.async_set("lock.template_lock", LockState.LOCKED) + hass.states.async_set("lock.template_lock", lock.STATE_LOCKED) await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == LockState.LOCKED + assert state.state == lock.STATE_LOCKED @pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) @@ -226,15 +220,16 @@ async def test_template_static(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_lock_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_lock_action( + hass: HomeAssistant, start_ha, calls: list[ServiceCall] +) -> None: """Test lock action.""" await setup.async_setup_component(hass, "switch", {}) hass.states.async_set("switch.test_state", STATE_OFF) await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == LockState.UNLOCKED + assert state.state == lock.STATE_UNLOCKED await hass.services.async_call( lock.DOMAIN, @@ -260,15 +255,16 @@ async def test_lock_action(hass: HomeAssistant, calls: list[ServiceCall]) -> Non }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_unlock_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_unlock_action( + hass: HomeAssistant, start_ha, calls: list[ServiceCall] +) -> None: """Test unlock action.""" await setup.async_setup_component(hass, "switch", {}) hass.states.async_set("switch.test_state", STATE_ON) await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == LockState.LOCKED + assert state.state == lock.STATE_LOCKED await hass.services.async_call( lock.DOMAIN, @@ -295,9 +291,8 @@ async def test_unlock_action(hass: HomeAssistant, calls: list[ServiceCall]) -> N }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_lock_action_with_code( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test lock action with defined code format and supplied lock code.""" await setup.async_setup_component(hass, "switch", {}) @@ -305,7 +300,7 @@ async def test_lock_action_with_code( await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == LockState.UNLOCKED + assert state.state == lock.STATE_UNLOCKED await hass.services.async_call( lock.DOMAIN, @@ -333,9 +328,8 @@ async def test_lock_action_with_code( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_unlock_action_with_code( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test unlock action with code format and supplied unlock code.""" await setup.async_setup_component(hass, "switch", {}) @@ -343,7 +337,7 @@ async def test_unlock_action_with_code( await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == LockState.LOCKED + assert state.state == lock.STATE_LOCKED await hass.services.async_call( lock.DOMAIN, @@ -378,9 +372,8 @@ async def test_unlock_action_with_code( lock.SERVICE_UNLOCK, ], ) -@pytest.mark.usefixtures("start_ha") async def test_lock_actions_fail_with_invalid_code( - hass: HomeAssistant, calls: list[ServiceCall], test_action + hass: HomeAssistant, start_ha, calls: list[ServiceCall], test_action ) -> None: """Test invalid lock codes.""" await hass.services.async_call( @@ -411,9 +404,8 @@ async def test_lock_actions_fail_with_invalid_code( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_lock_actions_dont_execute_with_code_template_rendering_error( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test lock code format rendering fails block lock/unlock actions.""" await hass.services.async_call( @@ -445,9 +437,8 @@ async def test_lock_actions_dont_execute_with_code_template_rendering_error( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_actions_with_none_as_codeformat_ignores_code( - hass: HomeAssistant, action, calls: list[ServiceCall] + hass: HomeAssistant, action, start_ha, calls: list[ServiceCall] ) -> None: """Test lock actions with supplied lock code.""" await setup.async_setup_component(hass, "switch", {}) @@ -455,7 +446,7 @@ async def test_actions_with_none_as_codeformat_ignores_code( await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == LockState.UNLOCKED + assert state.state == lock.STATE_UNLOCKED await hass.services.async_call( lock.DOMAIN, @@ -484,9 +475,8 @@ async def test_actions_with_none_as_codeformat_ignores_code( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_actions_with_invalid_regexp_as_codeformat_never_execute( - hass: HomeAssistant, action, calls: list[ServiceCall] + hass: HomeAssistant, action, start_ha, calls: list[ServiceCall] ) -> None: """Test lock actions don't execute with invalid regexp.""" await setup.async_setup_component(hass, "switch", {}) @@ -494,7 +484,7 @@ async def test_actions_with_invalid_regexp_as_codeformat_never_execute( await hass.async_block_till_done() state = hass.states.get("lock.template_lock") - assert state.state == LockState.UNLOCKED + assert state.state == lock.STATE_UNLOCKED await hass.services.async_call( lock.DOMAIN, @@ -529,10 +519,9 @@ async def test_actions_with_invalid_regexp_as_codeformat_never_execute( ], ) @pytest.mark.parametrize( - "test_state", [LockState.UNLOCKING, LockState.LOCKING, LockState.JAMMED] + "test_state", [lock.STATE_UNLOCKING, lock.STATE_LOCKING, lock.STATE_JAMMED] ) -@pytest.mark.usefixtures("start_ha") -async def test_lock_state(hass: HomeAssistant, test_state) -> None: +async def test_lock_state(hass: HomeAssistant, test_state, start_ha) -> None: """Test value template.""" hass.states.async_set("input_select.test_state", test_state) await hass.async_block_till_done() @@ -554,8 +543,7 @@ async def test_lock_state(hass: HomeAssistant, test_state) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_available_template_with_entities(hass: HomeAssistant) -> None: +async def test_available_template_with_entities(hass: HomeAssistant, start_ha) -> None: """Test availability templates with values from other entities.""" # When template returns true.. hass.states.async_set("availability_state.state", STATE_ON) @@ -585,9 +573,8 @@ async def test_available_template_with_entities(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("lock.template_lock").state != STATE_UNAVAILABLE @@ -608,8 +595,7 @@ async def test_invalid_availability_template_keeps_component_available( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_unique_id(hass: HomeAssistant) -> None: +async def test_unique_id(hass: HomeAssistant, start_ha) -> None: """Test unique_id option only creates one lock per id.""" await setup.async_setup_component( hass, diff --git a/tests/helpers/test_trigger_template_entity.py b/tests/components/template/test_manual_trigger_entity.py similarity index 100% rename from tests/helpers/test_trigger_template_entity.py rename to tests/components/template/test_manual_trigger_entity.py diff --git a/tests/components/template/test_number.py b/tests/components/template/test_number.py index ec96245b4d0..bf04151fd36 100644 --- a/tests/components/template/test_number.py +++ b/tests/components/template/test_number.py @@ -1,7 +1,5 @@ """The tests for the Template number platform.""" -from syrupy.assertion import SnapshotAssertion - from homeassistant import setup from homeassistant.components.input_number import ( ATTR_VALUE as INPUT_NUMBER_ATTR_VALUE, @@ -16,17 +14,11 @@ from homeassistant.components.number import ( DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE as NUMBER_SERVICE_SET_VALUE, ) -from homeassistant.components.template import DOMAIN -from homeassistant.const import ( - ATTR_ICON, - CONF_ENTITY_ID, - CONF_UNIT_OF_MEASUREMENT, - STATE_UNKNOWN, -) +from homeassistant.const import ATTR_ICON, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant, ServiceCall -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, assert_setup_component, async_capture_events +from tests.common import assert_setup_component, async_capture_events _TEST_NUMBER = "number.template_number" # Represent for number's value @@ -50,40 +42,6 @@ _VALUE_INPUT_NUMBER_CONFIG = { } -async def test_setup_config_entry( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: - """Test the config flow.""" - - template_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - "name": "My template", - "template_type": "number", - "state": "{{ 10 }}", - "min": 0, - "max": 100, - "step": 0.1, - "set_value": { - "action": "input_number.set_value", - "target": {"entity_id": "input_number.test"}, - "data": {"value": "{{ value }}"}, - }, - }, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("number.my_template") - assert state is not None - assert state == snapshot - - async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -105,7 +63,7 @@ async def test_missing_optional_config(hass: HomeAssistant) -> None: await hass.async_start() await hass.async_block_till_done() - _verify(hass, 4, 1, 0.0, 100.0, None) + _verify(hass, 4, 1, 0.0, 100.0) async def test_missing_required_keys(hass: HomeAssistant) -> None: @@ -157,7 +115,6 @@ async def test_all_optional_config(hass: HomeAssistant) -> None: "min": "{{ 3 }}", "max": "{{ 5 }}", "step": "{{ 1 }}", - "unit_of_measurement": "beer", } } }, @@ -167,7 +124,7 @@ async def test_all_optional_config(hass: HomeAssistant) -> None: await hass.async_start() await hass.async_block_till_done() - _verify(hass, 4, 1, 3, 5, "beer") + _verify(hass, 4, 1, 3, 5) async def test_templates_with_entities( @@ -255,7 +212,7 @@ async def test_templates_with_entities( assert entry assert entry.unique_id == "b-a" - _verify(hass, 4, 1, 3, 5, None) + _verify(hass, 4, 1, 3, 5) await hass.services.async_call( INPUT_NUMBER_DOMAIN, @@ -264,7 +221,7 @@ async def test_templates_with_entities( blocking=True, ) await hass.async_block_till_done() - _verify(hass, 5, 1, 3, 5, None) + _verify(hass, 5, 1, 3, 5) await hass.services.async_call( INPUT_NUMBER_DOMAIN, @@ -273,7 +230,7 @@ async def test_templates_with_entities( blocking=True, ) await hass.async_block_till_done() - _verify(hass, 5, 2, 3, 5, None) + _verify(hass, 5, 2, 3, 5) await hass.services.async_call( INPUT_NUMBER_DOMAIN, @@ -282,7 +239,7 @@ async def test_templates_with_entities( blocking=True, ) await hass.async_block_till_done() - _verify(hass, 5, 2, 2, 5, None) + _verify(hass, 5, 2, 2, 5) await hass.services.async_call( INPUT_NUMBER_DOMAIN, @@ -291,7 +248,7 @@ async def test_templates_with_entities( blocking=True, ) await hass.async_block_till_done() - _verify(hass, 5, 2, 2, 6, None) + _verify(hass, 5, 2, 2, 6) await hass.services.async_call( NUMBER_DOMAIN, @@ -299,7 +256,7 @@ async def test_templates_with_entities( {CONF_ENTITY_ID: _TEST_NUMBER, NUMBER_ATTR_VALUE: 2}, blocking=True, ) - _verify(hass, 2, 2, 2, 6, None) + _verify(hass, 2, 2, 2, 6) # Check this variable can be used in set_value script assert len(calls) == 1 @@ -329,7 +286,6 @@ async def test_trigger_number(hass: HomeAssistant) -> None: "min": "{{ trigger.event.data.min_beers }}", "max": "{{ trigger.event.data.max_beers }}", "step": "{{ trigger.event.data.step }}", - "unit_of_measurement": "beer", "set_value": {"event": "test_number_event"}, "optimistic": True, }, @@ -349,17 +305,11 @@ async def test_trigger_number(hass: HomeAssistant) -> None: assert state.attributes["min"] == 0.0 assert state.attributes["max"] == 100.0 assert state.attributes["step"] == 1.0 - assert state.attributes["unit_of_measurement"] == "beer" context = Context() hass.bus.async_fire( "test_event", - { - "beers_drank": 3, - "min_beers": 1.0, - "max_beers": 5.0, - "step": 0.5, - }, + {"beers_drank": 3, "min_beers": 1.0, "max_beers": 5.0, "step": 0.5}, context=context, ) await hass.async_block_till_done() @@ -382,13 +332,12 @@ async def test_trigger_number(hass: HomeAssistant) -> None: def _verify( - hass: HomeAssistant, - expected_value: int, - expected_step: int, - expected_minimum: int, - expected_maximum: int, - expected_unit_of_measurement: str | None, -) -> None: + hass, + expected_value, + expected_step, + expected_minimum, + expected_maximum, +): """Verify number's state.""" state = hass.states.get(_TEST_NUMBER) attributes = state.attributes @@ -396,7 +345,6 @@ def _verify( assert attributes.get(ATTR_STEP) == float(expected_step) assert attributes.get(ATTR_MAX) == float(expected_maximum) assert attributes.get(ATTR_MIN) == float(expected_minimum) - assert attributes.get(CONF_UNIT_OF_MEASUREMENT) == expected_unit_of_measurement async def test_icon_template(hass: HomeAssistant) -> None: @@ -512,50 +460,3 @@ async def test_icon_template_with_trigger(hass: HomeAssistant) -> None: state = hass.states.get(_TEST_NUMBER) assert float(state.state) == 51 assert state.attributes[ATTR_ICON] == "mdi:greater" - - -async def test_device_id( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test for device for number template.""" - - device_config_entry = MockConfigEntry() - device_config_entry.add_to_hass(hass) - device_entry = device_registry.async_get_or_create( - config_entry_id=device_config_entry.entry_id, - identifiers={("test", "identifier_test")}, - connections={("mac", "30:31:32:33:34:35")}, - ) - await hass.async_block_till_done() - assert device_entry is not None - assert device_entry.id is not None - - template_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - "name": "My template", - "template_type": "number", - "state": "{{ 10 }}", - "min": 0, - "max": 100, - "step": 0.1, - "set_value": { - "action": "input_number.set_value", - "target": {"entity_id": "input_number.test"}, - "data": {"value": "{{ value }}"}, - }, - "device_id": device_entry.id, - }, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - template_entity = entity_registry.async_get("number.my_template") - assert template_entity is not None - assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_select.py b/tests/components/template/test_select.py index 5b4723a3034..4106abdd469 100644 --- a/tests/components/template/test_select.py +++ b/tests/components/template/test_select.py @@ -1,7 +1,5 @@ """The tests for the Template select platform.""" -from syrupy.assertion import SnapshotAssertion - from homeassistant import setup from homeassistant.components.input_select import ( ATTR_OPTION as INPUT_SELECT_ATTR_OPTION, @@ -16,45 +14,17 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION as SELECT_SERVICE_SELECT_OPTION, ) -from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ICON, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant, ServiceCall -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, assert_setup_component, async_capture_events +from tests.common import assert_setup_component, async_capture_events _TEST_SELECT = "select.template_select" # Represent for select's current_option _OPTION_INPUT_SELECT = "input_select.option" -async def test_setup_config_entry( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: - """Test the config flow.""" - - template_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - "name": "My template", - "template_type": "select", - "state": "{{ 'on' }}", - "options": "{{ ['off', 'on', 'auto'] }}", - }, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("select.my_template") - assert state is not None - assert state == snapshot - - async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -318,12 +288,7 @@ async def test_trigger_select(hass: HomeAssistant) -> None: assert events[0].event_type == "test_number_event" -def _verify( - hass: HomeAssistant, - expected_current_option: str, - expected_options: list[str], - entity_name: str = _TEST_SELECT, -) -> None: +def _verify(hass, expected_current_option, expected_options, entity_name=_TEST_SELECT): """Verify select's state.""" state = hass.states.get(entity_name) attributes = state.attributes @@ -463,43 +428,3 @@ async def test_template_icon_with_trigger(hass: HomeAssistant) -> None: state = hass.states.get(_TEST_SELECT) assert state.state == "a" assert state.attributes[ATTR_ICON] == "mdi:greater" - - -async def test_device_id( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test for device for select template.""" - - device_config_entry = MockConfigEntry() - device_config_entry.add_to_hass(hass) - device_entry = device_registry.async_get_or_create( - config_entry_id=device_config_entry.entry_id, - identifiers={("test", "identifier_test")}, - connections={("mac", "30:31:32:33:34:35")}, - ) - await hass.async_block_till_done() - assert device_entry is not None - assert device_entry.id is not None - - template_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={ - "name": "My template", - "template_type": "select", - "state": "{{ 'on' }}", - "options": "{{ ['off', 'on', 'auto'] }}", - "device_id": device_entry.id, - }, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - template_entity = entity_registry.async_get("select.my_template") - assert template_entity is not None - assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_sensor.py b/tests/components/template/test_sensor.py index 929a890ab38..37d6d120491 100644 --- a/tests/components/template/test_sensor.py +++ b/tests/components/template/test_sensor.py @@ -12,7 +12,6 @@ from homeassistant.components import sensor, template from homeassistant.components.template.sensor import TriggerSensorEntity from homeassistant.const import ( ATTR_ENTITY_PICTURE, - ATTR_FRIENDLY_NAME, ATTR_ICON, EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START, @@ -24,9 +23,7 @@ from homeassistant.const import ( from homeassistant.core import Context, CoreState, HomeAssistant, State, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity -from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.template import Template -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import ATTR_COMPONENT, async_setup_component import homeassistant.util.dt as dt_util @@ -107,8 +104,7 @@ async def test_setup_config_entry( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_legacy(hass: HomeAssistant) -> None: +async def test_template_legacy(hass: HomeAssistant, start_ha) -> None: """Test template.""" assert hass.states.get(TEST_NAME).state == "It ." @@ -137,8 +133,7 @@ async def test_template_legacy(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_icon_template(hass: HomeAssistant) -> None: +async def test_icon_template(hass: HomeAssistant, start_ha) -> None: """Test icon template.""" assert hass.states.get(TEST_NAME).attributes.get("icon") == "" @@ -167,8 +162,7 @@ async def test_icon_template(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_entity_picture_template(hass: HomeAssistant) -> None: +async def test_entity_picture_template(hass: HomeAssistant, start_ha) -> None: """Test entity_picture template.""" assert hass.states.get(TEST_NAME).attributes.get("entity_picture") == "" @@ -247,8 +241,9 @@ async def test_entity_picture_template(hass: HomeAssistant) -> None: ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_friendly_name_template(hass: HomeAssistant, attribute, expected) -> None: +async def test_friendly_name_template( + hass: HomeAssistant, attribute, expected, start_ha +) -> None: """Test friendly_name template with an unknown value_template.""" assert hass.states.get(TEST_NAME).attributes.get(attribute) == expected[0] @@ -317,8 +312,7 @@ async def test_friendly_name_template(hass: HomeAssistant, attribute, expected) }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_syntax_error(hass: HomeAssistant) -> None: +async def test_template_syntax_error(hass: HomeAssistant, start_ha) -> None: """Test setup with invalid device_class.""" assert hass.states.async_all("sensor") == [] @@ -340,8 +334,7 @@ async def test_template_syntax_error(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_attribute_missing(hass: HomeAssistant) -> None: +async def test_template_attribute_missing(hass: HomeAssistant, start_ha) -> None: """Test missing attribute template.""" assert hass.states.get(TEST_NAME).state == STATE_UNAVAILABLE @@ -367,8 +360,7 @@ async def test_template_attribute_missing(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_setup_valid_device_class(hass: HomeAssistant) -> None: +async def test_setup_valid_device_class(hass: HomeAssistant, start_ha) -> None: """Test setup with valid device_class.""" hass.states.async_set("sensor.test_sensor", "75") await hass.async_block_till_done() @@ -382,7 +374,7 @@ async def test_creating_sensor_loads_group(hass: HomeAssistant) -> None: order = [] after_dep_event = Event() - async def async_setup_group(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup_group(hass, config): # Make sure group takes longer to load, so that it won't # be loaded first by chance await after_dep_event.wait() @@ -391,11 +383,8 @@ async def test_creating_sensor_loads_group(hass: HomeAssistant) -> None: return True async def async_setup_template( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, - ) -> bool: + hass, config, async_add_entities, discovery_info=None + ): order.append("sensor.template") return True @@ -440,8 +429,7 @@ async def test_creating_sensor_loads_group(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_available_template_with_entities(hass: HomeAssistant) -> None: +async def test_available_template_with_entities(hass: HomeAssistant, start_ha) -> None: """Test availability tempalates with values from other entities.""" hass.states.async_set("sensor.availability_sensor", STATE_OFF) @@ -479,9 +467,8 @@ async def test_available_template_with_entities(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_invalid_attribute_template( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, caplog_setup_text + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, start_ha, caplog_setup_text ) -> None: """Test that errors are logged if rendering template fails.""" hass.states.async_set("sensor.test_sensor", "startup") @@ -516,9 +503,8 @@ async def test_invalid_attribute_template( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("sensor.my_sensor").state != STATE_UNAVAILABLE @@ -634,9 +620,8 @@ async def test_no_template_match_all( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry ) -> None: """Test unique_id option only creates one sensor per id.""" assert len(hass.states.async_all()) == 2 @@ -671,8 +656,7 @@ async def test_unique_id( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_sun_renders_once_per_sensor(hass: HomeAssistant) -> None: +async def test_sun_renders_once_per_sensor(hass: HomeAssistant, start_ha) -> None: """Test sun change renders the template only once per sensor.""" now = dt_util.utcnow() @@ -741,8 +725,7 @@ async def test_sun_renders_once_per_sensor(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_this_variable(hass: HomeAssistant) -> None: +async def test_this_variable(hass: HomeAssistant, start_ha) -> None: """Test template.""" assert hass.states.get(TEST_NAME).state == "It: " + TEST_NAME @@ -887,9 +870,8 @@ async def test_this_variable_early_hass_running( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_self_referencing_sensor_loop( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test a self referencing sensor does not loop forever.""" assert len(hass.states.async_all()) == 1 @@ -918,9 +900,8 @@ async def test_self_referencing_sensor_loop( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_self_referencing_sensor_with_icon_loop( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test a self referencing sensor loops forever with a valid self referencing icon.""" assert len(hass.states.async_all()) == 1 @@ -954,9 +935,8 @@ async def test_self_referencing_sensor_with_icon_loop( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_self_referencing_sensor_with_icon_and_picture_entity_loop( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test a self referencing sensor loop forevers with a valid self referencing icon.""" assert len(hass.states.async_all()) == 1 @@ -984,16 +964,14 @@ async def test_self_referencing_sensor_with_icon_and_picture_entity_loop( "test": { "value_template": "{{ 1 }}", "entity_picture_template": "{{ ((states.sensor.test.attributes['entity_picture'] or 0) | int) + 1 }}", - "friendly_name_template": "{{ ((states.sensor.test.attributes['friendly_name'] or 0) | int) + 1 }}", }, }, } }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_self_referencing_entity_picture_loop( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test a self referencing sensor does not loop forever with a looping self referencing entity picture.""" assert len(hass.states.async_all()) == 1 @@ -1009,8 +987,7 @@ async def test_self_referencing_entity_picture_loop( state = hass.states.get("sensor.test") assert int(state.state) == 1 - assert state.attributes[ATTR_ENTITY_PICTURE] == "3" - assert state.attributes[ATTR_FRIENDLY_NAME] == "3" + assert state.attributes[ATTR_ENTITY_PICTURE] == 2 await hass.async_block_till_done() assert int(state.state) == 1 @@ -1110,8 +1087,7 @@ async def test_self_referencing_icon_with_no_loop( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_duplicate_templates(hass: HomeAssistant) -> None: +async def test_duplicate_templates(hass: HomeAssistant, start_ha) -> None: """Test template entity where the value and friendly name as the same template.""" hass.states.async_set("sensor.test_state", "Abc") await hass.async_block_till_done() @@ -1180,9 +1156,8 @@ async def test_duplicate_templates(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_trigger_entity( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry ) -> None: """Test trigger entity works.""" state = hass.states.get("sensor.hello_name") @@ -1227,127 +1202,6 @@ async def test_trigger_entity( assert state.context is context -@pytest.mark.parametrize(("count", "domain"), [(1, template.DOMAIN)]) -@pytest.mark.parametrize( - "config", - [ - { - "template": [ - { - "unique_id": "listening-test-event", - "trigger": {"platform": "event", "event_type": "test_event"}, - "condition": [ - { - "condition": "template", - "value_template": "{{ trigger.event.data.beer >= 42 }}", - } - ], - "sensor": [ - { - "name": "Enough Name", - "unique_id": "enough-id", - "state": "You had enough Beer.", - } - ], - }, - ], - }, - ], -) -@pytest.mark.usefixtures("start_ha") -async def test_trigger_conditional_entity(hass: HomeAssistant) -> None: - """Test conditional trigger entity works.""" - state = hass.states.get("sensor.enough_name") - assert state is not None - assert state.state == STATE_UNKNOWN - - hass.bus.async_fire("test_event", {"beer": 2}) - await hass.async_block_till_done() - - state = hass.states.get("sensor.enough_name") - assert state.state == STATE_UNKNOWN - - hass.bus.async_fire("test_event", {"beer": 42}) - await hass.async_block_till_done() - - state = hass.states.get("sensor.enough_name") - assert state.state == "You had enough Beer." - - -@pytest.mark.parametrize(("count", "domain"), [(1, template.DOMAIN)]) -@pytest.mark.parametrize( - "config", - [ - { - "template": [ - { - "unique_id": "listening-test-event", - "trigger": {"platform": "event", "event_type": "test_event"}, - "condition": [ - { - "condition": "template", - "value_template": "{{ trigger.event.data.beer / 0 == 'narf' }}", - } - ], - "sensor": [ - { - "name": "Enough Name", - "unique_id": "enough-id", - "state": "You had enough Beer.", - } - ], - }, - ], - }, - ], -) -@pytest.mark.usefixtures("start_ha") -async def test_trigger_conditional_entity_evaluation_error( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test trigger entity is not updated when condition evaluation fails.""" - hass.bus.async_fire("test_event", {"beer": 1}) - await hass.async_block_till_done() - - state = hass.states.get("sensor.enough_name") - assert state is not None - assert state.state == STATE_UNKNOWN - - assert "Error evaluating condition in 'template entity'" in caplog.text - - -@pytest.mark.parametrize(("count", "domain"), [(0, template.DOMAIN)]) -@pytest.mark.parametrize( - "config", - [ - { - "template": [ - { - "unique_id": "listening-test-event", - "trigger": {"platform": "event", "event_type": "test_event"}, - "condition": [ - {"condition": "template", "value_template": "{{ invalid"} - ], - "sensor": [ - { - "name": "Will Not Exist Name", - "state": "Unimportant", - } - ], - }, - ], - }, - ], -) -@pytest.mark.usefixtures("start_ha") -async def test_trigger_conditional_entity_invalid_condition( - hass: HomeAssistant, -) -> None: - """Test trigger entity is not created when condition is invalid.""" - state = hass.states.get("sensor.will_not_exist_name") - assert state is None - - @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @pytest.mark.parametrize( "config", @@ -1373,8 +1227,9 @@ async def test_trigger_conditional_entity_invalid_condition( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_trigger_entity_runs_once(hass: HomeAssistant) -> None: +async def test_trigger_entity_runs_once( + hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry +) -> None: """Test trigger entity handles a trigger once.""" state = hass.states.get("sensor.hello_name") assert state is not None @@ -1407,9 +1262,8 @@ async def test_trigger_entity_runs_once(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_trigger_entity_render_error( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry ) -> None: """Test trigger entity handles render error.""" state = hass.states.get("sensor.hello") @@ -1445,9 +1299,8 @@ async def test_trigger_entity_render_error( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_trigger_not_allowed_platform_config( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test we throw a helpful warning if a trigger is configured in platform config.""" state = hass.states.get(TEST_NAME) @@ -1475,8 +1328,7 @@ async def test_trigger_not_allowed_platform_config( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_config_top_level(hass: HomeAssistant) -> None: +async def test_config_top_level(hass: HomeAssistant, start_ha) -> None: """Test unique_id option only creates one sensor per id.""" assert len(hass.states.async_all()) == 1 state = hass.states.get("sensor.top_level") @@ -2022,8 +1874,9 @@ async def test_trigger_entity_restore_state( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_trigger_action(hass: HomeAssistant) -> None: +async def test_trigger_action( + hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry +) -> None: """Test trigger entity with an action works.""" event = "test_event2" context = Context() @@ -2045,53 +1898,6 @@ async def test_trigger_action(hass: HomeAssistant) -> None: assert events[0].context.parent_id == context.id -@pytest.mark.parametrize(("count", "domain"), [(1, template.DOMAIN)]) -@pytest.mark.parametrize( - "config", - [ - { - "template": [ - { - "unique_id": "listening-test-event", - "trigger": {"platform": "event", "event_type": "test_event"}, - "condition": [ - { - "condition": "template", - "value_template": "{{ trigger.event.data.beer >= 42 }}", - } - ], - "action": [ - {"event": "test_event_by_action"}, - ], - "sensor": [ - { - "name": "Not That Important", - "state": "Really not.", - } - ], - }, - ], - }, - ], -) -@pytest.mark.usefixtures("start_ha") -async def test_trigger_conditional_action(hass: HomeAssistant) -> None: - """Test conditional trigger entity with an action works.""" - - event = "test_event_by_action" - events = async_capture_events(hass, event) - - hass.bus.async_fire("test_event", {"beer": 1}) - await hass.async_block_till_done() - - assert len(events) == 0 - - hass.bus.async_fire("test_event", {"beer": 42}) - await hass.async_block_till_done() - - assert len(events) == 1 - - async def test_device_id( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/template/test_switch.py b/tests/components/template/test_switch.py index 2fc0f29acaf..68cca990ef1 100644 --- a/tests/components/template/test_switch.py +++ b/tests/components/template/test_switch.py @@ -1,10 +1,8 @@ """The tests for the Template switch platform.""" import pytest -from syrupy.assertion import SnapshotAssertion from homeassistant import setup -from homeassistant.components import template from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -15,15 +13,9 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import CoreState, HomeAssistant, ServiceCall, State -from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - assert_setup_component, - mock_component, - mock_restore_cache, -) +from tests.common import assert_setup_component, mock_component, mock_restore_cache OPTIMISTIC_SWITCH_CONFIG = { "turn_on": { @@ -43,38 +35,6 @@ OPTIMISTIC_SWITCH_CONFIG = { } -async def test_setup_config_entry( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: - """Test the config flow.""" - - hass.states.async_set( - "switch.one", - "on", - {}, - ) - - template_config_entry = MockConfigEntry( - data={}, - domain=template.DOMAIN, - options={ - "name": "My template", - "value_template": "{{ states('switch.one') }}", - "template_type": SWITCH_DOMAIN, - }, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("switch.my_template") - assert state is not None - assert state == snapshot - - async def test_template_state_text(hass: HomeAssistant) -> None: """Test the state text of a template.""" with assert_setup_component(1, "switch"): @@ -695,42 +655,3 @@ async def test_unique_id(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(hass.states.async_all("switch")) == 1 - - -async def test_device_id( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test for device for Template.""" - - device_config_entry = MockConfigEntry() - device_config_entry.add_to_hass(hass) - device_entry = device_registry.async_get_or_create( - config_entry_id=device_config_entry.entry_id, - identifiers={("test", "identifier_test")}, - connections={("mac", "30:31:32:33:34:35")}, - ) - await hass.async_block_till_done() - assert device_entry is not None - assert device_entry.id is not None - - template_config_entry = MockConfigEntry( - data={}, - domain=template.DOMAIN, - options={ - "name": "My template", - "value_template": "{{ true }}", - "template_type": "switch", - "device_id": device_entry.id, - }, - title="My template", - ) - template_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(template_config_entry.entry_id) - await hass.async_block_till_done() - - template_entity = entity_registry.async_get("switch.my_template") - assert template_entity is not None - assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_template_entity.py b/tests/components/template/test_template_entity.py index c09a09750fe..dcceea95181 100644 --- a/tests/components/template/test_template_entity.py +++ b/tests/components/template/test_template_entity.py @@ -11,14 +11,14 @@ async def test_template_entity_requires_hass_set(hass: HomeAssistant) -> None: """Test template entity requires hass to be set before accepting templates.""" entity = template_entity.TemplateEntity(hass) - with pytest.raises(ValueError, match="^hass cannot be None"): + with pytest.raises(AssertionError): entity.add_template_attribute("_hello", template.Template("Hello")) entity.hass = object() - with pytest.raises(ValueError, match="^template.hass cannot be None"): - entity.add_template_attribute("_hello", template.Template("Hello", None)) + entity.add_template_attribute("_hello", template.Template("Hello", None)) tpl_with_hass = template.Template("Hello", entity.hass) entity.add_template_attribute("_hello", tpl_with_hass) - assert len(entity._template_attrs.get(tpl_with_hass, [])) == 1 + # Because hass is set in `add_template_attribute`, both templates match `tpl_with_hass` + assert len(entity._template_attrs.get(tpl_with_hass, [])) == 2 diff --git a/tests/components/template/test_trigger.py b/tests/components/template/test_trigger.py index a131f5f606b..98b03be3c64 100644 --- a/tests/components/template/test_trigger.py +++ b/tests/components/template/test_trigger.py @@ -48,9 +48,8 @@ def setup_comp(hass: HomeAssistant, calls: list[ServiceCall]) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_bool( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on boolean change.""" assert len(calls) == 0 @@ -272,9 +271,8 @@ async def test_if_fires_on_change_bool( ), ], ) -@pytest.mark.usefixtures("start_ha") async def test_general( - hass: HomeAssistant, call_setup, calls: list[ServiceCall] + hass: HomeAssistant, call_setup, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change.""" assert len(calls) == 0 @@ -310,9 +308,8 @@ async def test_general( ), ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_not_fires_because_fail( - hass: HomeAssistant, call_setup, calls: list[ServiceCall] + hass: HomeAssistant, call_setup, start_ha, calls: list[ServiceCall] ) -> None: """Test for not firing after TemplateError.""" assert len(calls) == 0 @@ -349,9 +346,8 @@ async def test_if_not_fires_because_fail( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_template_advanced( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change with template advanced.""" context = Context() @@ -382,8 +378,9 @@ async def test_if_fires_on_change_with_template_advanced( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_if_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_if_action( + hass: HomeAssistant, start_ha, calls: list[ServiceCall] +) -> None: """Test for firing if action.""" # Condition is not true yet hass.bus.async_fire("test_event") @@ -413,9 +410,8 @@ async def test_if_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_bad_template( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change with bad template.""" assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE @@ -451,9 +447,8 @@ async def test_if_fires_on_change_with_bad_template( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_wait_template_with_trigger( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test using wait template with 'trigger.entity_id'.""" await hass.async_block_till_done() @@ -524,9 +519,8 @@ async def test_if_fires_on_change_with_for( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_advanced( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change with for advanced.""" context = Context() @@ -569,9 +563,8 @@ async def test_if_fires_on_change_with_for_advanced( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_0_advanced( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change with for: 0 advanced.""" context = Context() @@ -611,9 +604,8 @@ async def test_if_fires_on_change_with_for_0_advanced( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_2( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change with for.""" context = Context() @@ -643,9 +635,8 @@ async def test_if_fires_on_change_with_for_2( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_not_fires_on_change_with_for( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change with for.""" hass.states.async_set("test.entity", "world") @@ -678,9 +669,8 @@ async def test_if_not_fires_on_change_with_for( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_not_fires_when_turned_off_with_for( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change with for.""" hass.states.async_set("test.entity", "world") @@ -717,9 +707,8 @@ async def test_if_not_fires_when_turned_off_with_for( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_template_1( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", "world") @@ -746,9 +735,8 @@ async def test_if_fires_on_change_with_for_template_1( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_template_2( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", "world") @@ -775,9 +763,8 @@ async def test_if_fires_on_change_with_for_template_2( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_if_fires_on_change_with_for_template_3( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", "world") @@ -804,9 +791,8 @@ async def test_if_fires_on_change_with_for_template_3( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_invalid_for_template_1( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, start_ha, calls: list[ServiceCall] ) -> None: """Test for invalid for template.""" with mock.patch.object(template_trigger, "_LOGGER") as mock_logger: diff --git a/tests/components/template/test_vacuum.py b/tests/components/template/test_vacuum.py index ff428c5d4b4..8b1d082a62b 100644 --- a/tests/components/template/test_vacuum.py +++ b/tests/components/template/test_vacuum.py @@ -94,8 +94,9 @@ _BATTERY_LEVEL_INPUT_NUMBER = "input_number.battery_level" ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_valid_configs(hass: HomeAssistant, count, parm1, parm2) -> None: +async def test_valid_configs( + hass: HomeAssistant, count, parm1, parm2, start_ha +) -> None: """Test: configs.""" assert len(hass.states.async_all("vacuum")) == count _verify(hass, parm1, parm2) @@ -117,8 +118,7 @@ async def test_valid_configs(hass: HomeAssistant, count, parm1, parm2) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_invalid_configs(hass: HomeAssistant, count) -> None: +async def test_invalid_configs(hass: HomeAssistant, count, start_ha) -> None: """Test: configs.""" assert len(hass.states.async_all("vacuum")) == count @@ -144,8 +144,7 @@ async def test_invalid_configs(hass: HomeAssistant, count) -> None: ) ], ) -@pytest.mark.usefixtures("start_ha") -async def test_templates_with_entities(hass: HomeAssistant) -> None: +async def test_templates_with_entities(hass: HomeAssistant, start_ha) -> None: """Test templates with values from other entities.""" _verify(hass, STATE_UNKNOWN, None) @@ -175,8 +174,7 @@ async def test_templates_with_entities(hass: HomeAssistant) -> None: ) ], ) -@pytest.mark.usefixtures("start_ha") -async def test_available_template_with_entities(hass: HomeAssistant) -> None: +async def test_available_template_with_entities(hass: HomeAssistant, start_ha) -> None: """Test availability templates with values from other entities.""" # When template returns true.. @@ -214,9 +212,8 @@ async def test_available_template_with_entities(hass: HomeAssistant) -> None: ) ], ) -@pytest.mark.usefixtures("start_ha") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("vacuum.test_template_vacuum") != STATE_UNAVAILABLE @@ -246,8 +243,7 @@ async def test_invalid_availability_template_keeps_component_available( ) ], ) -@pytest.mark.usefixtures("start_ha") -async def test_attribute_templates(hass: HomeAssistant) -> None: +async def test_attribute_templates(hass: HomeAssistant, start_ha) -> None: """Test attribute_templates template.""" state = hass.states.get("vacuum.test_template_vacuum") assert state.attributes["test_attribute"] == "It ." @@ -282,9 +278,8 @@ async def test_attribute_templates(hass: HomeAssistant) -> None: ) ], ) -@pytest.mark.usefixtures("start_ha") async def test_invalid_attribute_template( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, start_ha, caplog_setup_text ) -> None: """Test that errors are logged if rendering template fails.""" assert len(hass.states.async_all("vacuum")) == 1 @@ -318,8 +313,7 @@ async def test_invalid_attribute_template( ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_unique_id(hass: HomeAssistant) -> None: +async def test_unique_id(hass: HomeAssistant, start_ha) -> None: """Test unique_id option only creates one vacuum per id.""" assert len(hass.states.async_all("vacuum")) == 1 @@ -490,9 +484,7 @@ async def test_set_invalid_fan_speed( assert hass.states.get(_FAN_SPEED_INPUT_SELECT).state == "high" -def _verify( - hass: HomeAssistant, expected_state: str, expected_battery_level: int -) -> None: +def _verify(hass, expected_state, expected_battery_level): """Verify vacuum's state and speed.""" state = hass.states.get(_TEST_VACUUM) attributes = state.attributes @@ -500,7 +492,7 @@ def _verify( assert attributes.get(ATTR_BATTERY_LEVEL) == expected_battery_level -async def _register_basic_vacuum(hass: HomeAssistant) -> None: +async def _register_basic_vacuum(hass): """Register basic vacuum with only required options for testing.""" with assert_setup_component(1, "input_select"): assert await setup.async_setup_component( @@ -536,7 +528,7 @@ async def _register_basic_vacuum(hass: HomeAssistant) -> None: await hass.async_block_till_done() -async def _register_components(hass: HomeAssistant) -> None: +async def _register_components(hass): """Register basic components for testing.""" with assert_setup_component(2, "input_boolean"): assert await setup.async_setup_component( diff --git a/tests/components/template/test_weather.py b/tests/components/template/test_weather.py index 081028b6f5b..fd7694cfbed 100644 --- a/tests/components/template/test_weather.py +++ b/tests/components/template/test_weather.py @@ -23,6 +23,7 @@ from homeassistant.components.weather import ( ) from homeassistant.const import ATTR_ATTRIBUTION, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant, State +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.restore_state import STORAGE_KEY as RESTORE_STATE_KEY from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -64,8 +65,7 @@ ATTR_FORECAST = "forecast" }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_state_text(hass: HomeAssistant) -> None: +async def test_template_state_text(hass: HomeAssistant, start_ha) -> None: """Test the state text of a template.""" for attr, v_attr, value in ( ( @@ -117,9 +117,8 @@ async def test_template_state_text(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_forecasts( - hass: HomeAssistant, snapshot: SnapshotAssertion, service: str + hass: HomeAssistant, start_ha, snapshot: SnapshotAssertion, service: str ) -> None: """Test forecast service.""" for attr, _v_attr, value in ( @@ -242,9 +241,9 @@ async def test_forecasts( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_forecast_invalid( hass: HomeAssistant, + start_ha, caplog: pytest.LogCaptureFixture, service: str, expected: dict[str, Any], @@ -324,9 +323,9 @@ async def test_forecast_invalid( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_forecast_invalid_is_daytime_missing_in_twice_daily( hass: HomeAssistant, + start_ha, caplog: pytest.LogCaptureFixture, service: str, expected: dict[str, Any], @@ -392,9 +391,9 @@ async def test_forecast_invalid_is_daytime_missing_in_twice_daily( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_forecast_invalid_datetime_missing( hass: HomeAssistant, + start_ha, caplog: pytest.LogCaptureFixture, service: str, expected: dict[str, Any], @@ -459,9 +458,8 @@ async def test_forecast_invalid_datetime_missing( }, ], ) -@pytest.mark.usefixtures("start_ha") async def test_forecast_format_error( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, service: str + hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture, service: str ) -> None: """Test forecast service invalid on incorrect format.""" for attr, _v_attr, value in ( @@ -651,8 +649,9 @@ async def test_trigger_entity_restore_state( }, ], ) -@pytest.mark.usefixtures("start_ha") -async def test_trigger_action(hass: HomeAssistant) -> None: +async def test_trigger_action( + hass: HomeAssistant, start_ha, entity_registry: er.EntityRegistry +) -> None: """Test trigger entity with an action works.""" state = hass.states.get("weather.hello_name") assert state is not None @@ -721,10 +720,11 @@ async def test_trigger_action(hass: HomeAssistant) -> None: }, ], ) -@pytest.mark.usefixtures("start_ha") @pytest.mark.freeze_time("2023-10-19 13:50:05") async def test_trigger_weather_services( hass: HomeAssistant, + start_ha, + entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, service: str, ) -> None: diff --git a/tests/components/tesla_fleet/__init__.py b/tests/components/tesla_fleet/__init__.py deleted file mode 100644 index 78159402bff..00000000000 --- a/tests/components/tesla_fleet/__init__.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Tests for the Tesla Fleet integration.""" - -from unittest.mock import patch - -from syrupy import SnapshotAssertion - -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) -from homeassistant.components.tesla_fleet.const import CLIENT_ID, DOMAIN -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry - - -async def setup_platform( - hass: HomeAssistant, - config_entry: MockConfigEntry, - platforms: list[Platform] | None = None, -) -> None: - """Set up the Tesla Fleet platform.""" - - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential(CLIENT_ID, "", "Home Assistant"), - DOMAIN, - ) - - config_entry.add_to_hass(hass) - - if platforms is None: - await hass.config_entries.async_setup(config_entry.entry_id) - else: - with patch("homeassistant.components.tesla_fleet.PLATFORMS", platforms): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - -def assert_entities( - hass: HomeAssistant, - entry_id: str, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test that all entities match their snapshot.""" - - entity_entries = er.async_entries_for_config_entry(entity_registry, entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") - - -def assert_entities_alt( - hass: HomeAssistant, - entry_id: str, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test that all entities match their alt snapshot.""" - entity_entries = er.async_entries_for_config_entry(entity_registry, entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-statealt") diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py deleted file mode 100644 index 0dc5d87984f..00000000000 --- a/tests/components/tesla_fleet/conftest.py +++ /dev/null @@ -1,179 +0,0 @@ -"""Fixtures for Tessie.""" - -from __future__ import annotations - -from collections.abc import Generator -from copy import deepcopy -import time -from unittest.mock import AsyncMock, patch - -import jwt -import pytest -from tesla_fleet_api.const import Scope - -from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES - -from .const import ( - COMMAND_OK, - LIVE_STATUS, - PRODUCTS, - SITE_INFO, - VEHICLE_DATA, - VEHICLE_ONLINE, -) - -from tests.common import MockConfigEntry - -UID = "abc-123" - - -@pytest.fixture(name="expires_at") -def mock_expires_at() -> int: - """Fixture to set the oauth token expiration time.""" - return time.time() + 3600 - - -def create_config_entry(expires_at: int, scopes: list[Scope]) -> MockConfigEntry: - """Create Tesla Fleet entry in Home Assistant.""" - access_token = jwt.encode( - { - "sub": UID, - "aud": [], - "scp": scopes, - "ou_code": "NA", - }, - key="", - algorithm="none", - ) - - return MockConfigEntry( - domain=DOMAIN, - title=UID, - unique_id=UID, - data={ - "auth_implementation": DOMAIN, - "token": { - "status": 0, - "userid": UID, - "access_token": access_token, - "refresh_token": "mock-refresh-token", - "expires_at": expires_at, - "scope": ",".join(scopes), - }, - }, - ) - - -@pytest.fixture -def normal_config_entry(expires_at: int) -> MockConfigEntry: - """Create Tesla Fleet entry in Home Assistant.""" - return create_config_entry(expires_at, SCOPES) - - -@pytest.fixture -def noscope_config_entry(expires_at: int) -> MockConfigEntry: - """Create Tesla Fleet entry in Home Assistant without scopes.""" - return create_config_entry(expires_at, [Scope.OPENID, Scope.OFFLINE_ACCESS]) - - -@pytest.fixture -def readonly_config_entry(expires_at: int) -> MockConfigEntry: - """Create Tesla Fleet entry in Home Assistant without scopes.""" - return create_config_entry( - expires_at, - [ - Scope.OPENID, - Scope.OFFLINE_ACCESS, - Scope.VEHICLE_DEVICE_DATA, - Scope.ENERGY_DEVICE_DATA, - ], - ) - - -@pytest.fixture(autouse=True) -def mock_products() -> Generator[AsyncMock]: - """Mock Tesla Fleet Api products method.""" - with patch( - "homeassistant.components.tesla_fleet.TeslaFleetApi.products", - return_value=PRODUCTS, - ) as mock_products: - yield mock_products - - -@pytest.fixture(autouse=True) -def mock_vehicle_state() -> Generator[AsyncMock]: - """Mock Tesla Fleet API Vehicle Specific vehicle method.""" - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle", - return_value=VEHICLE_ONLINE, - ) as mock_vehicle: - yield mock_vehicle - - -@pytest.fixture(autouse=True) -def mock_vehicle_data() -> Generator[AsyncMock]: - """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle_data", - return_value=VEHICLE_DATA, - ) as mock_vehicle_data: - yield mock_vehicle_data - - -@pytest.fixture(autouse=True) -def mock_wake_up() -> Generator[AsyncMock]: - """Mock Tesla Fleet API Vehicle Specific wake_up method.""" - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.wake_up", - return_value=VEHICLE_ONLINE, - ) as mock_wake_up: - yield mock_wake_up - - -@pytest.fixture(autouse=True) -def mock_live_status() -> Generator[AsyncMock]: - """Mock Tesla Fleet API Energy Specific live_status method.""" - with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.live_status", - side_effect=lambda: deepcopy(LIVE_STATUS), - ) as mock_live_status: - yield mock_live_status - - -@pytest.fixture(autouse=True) -def mock_site_info() -> Generator[AsyncMock]: - """Mock Tesla Fleet API Energy Specific site_info method.""" - with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.site_info", - side_effect=lambda: deepcopy(SITE_INFO), - ) as mock_live_status: - yield mock_live_status - - -@pytest.fixture -def mock_find_server() -> Generator[AsyncMock]: - """Mock Tesla Fleet find server method.""" - with patch( - "homeassistant.components.tesla_fleet.TeslaFleetApi.find_server", - ) as mock_find_server: - yield mock_find_server - - -@pytest.fixture -def mock_request(): - """Mock all Tesla Fleet API requests.""" - with patch( - "homeassistant.components.tesla_fleet.TeslaFleetApi._request", - return_value=COMMAND_OK, - ) as mock_request: - yield mock_request - - -@pytest.fixture(autouse=True) -def mock_signed_command() -> Generator[AsyncMock]: - """Mock Tesla Fleet Api signed_command method.""" - with patch( - "homeassistant.components.tesla_fleet.VehicleSigned.signed_command", - return_value=COMMAND_OK, - ) as mock_signed_command: - yield mock_signed_command diff --git a/tests/components/tesla_fleet/const.py b/tests/components/tesla_fleet/const.py deleted file mode 100644 index 76b4ae20092..00000000000 --- a/tests/components/tesla_fleet/const.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Constants for the Tesla Fleet tests.""" - -from homeassistant.components.tesla_fleet.const import DOMAIN, TeslaFleetState - -from tests.common import load_json_object_fixture - -VEHICLE_ONLINE = {"response": {"state": TeslaFleetState.ONLINE}, "error": None} -VEHICLE_ASLEEP = {"response": {"state": TeslaFleetState.ASLEEP}, "error": None} - -PRODUCTS = load_json_object_fixture("products.json", DOMAIN) -VEHICLE_DATA = load_json_object_fixture("vehicle_data.json", DOMAIN) -VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) -LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) -SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) - -COMMAND_OK = {"response": {"result": True, "reason": ""}} -COMMAND_REASON = {"response": {"result": False, "reason": "already closed"}} -COMMAND_IGNORED_REASON = {"response": {"result": False, "reason": "already_set"}} -COMMAND_NOREASON = {"response": {"result": False}} # Unexpected -COMMAND_ERROR = { - "response": None, - "error": "vehicle unavailable: vehicle is offline or asleep", - "error_description": "", -} -COMMAND_NOERROR = {"answer": 42} -COMMAND_ERRORS = (COMMAND_REASON, COMMAND_NOREASON, COMMAND_ERROR, COMMAND_NOERROR) - -RESPONSE_OK = {"response": {}, "error": None} diff --git a/tests/components/tesla_fleet/fixtures/live_status.json b/tests/components/tesla_fleet/fixtures/live_status.json deleted file mode 100644 index 486f9f4fadd..00000000000 --- a/tests/components/tesla_fleet/fixtures/live_status.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "response": { - "solar_power": 1185, - "energy_left": 38896.47368421053, - "total_pack_energy": 40727, - "percentage_charged": 95.50537403739663, - "backup_capable": true, - "battery_power": 5060, - "load_power": 6245, - "grid_status": "Active", - "grid_services_active": false, - "grid_power": 0, - "grid_services_power": 0, - "generator_power": 0, - "island_status": "on_grid", - "storm_mode_active": false, - "timestamp": "2024-01-01T00:00:00+00:00", - "wall_connectors": [ - { - "din": "abd-123", - "wall_connector_state": 2, - "wall_connector_fault_state": 2, - "wall_connector_power": 0 - }, - { - "din": "bcd-234", - "wall_connector_state": 2, - "wall_connector_fault_state": 2, - "wall_connector_power": 0 - } - ] - } -} diff --git a/tests/components/tesla_fleet/fixtures/products.json b/tests/components/tesla_fleet/fixtures/products.json deleted file mode 100644 index 8da921a33f4..00000000000 --- a/tests/components/tesla_fleet/fixtures/products.json +++ /dev/null @@ -1,131 +0,0 @@ -{ - "response": [ - { - "id": 1234, - "user_id": 1234, - "vehicle_id": 1234, - "vin": "LRWXF7EK4KC700000", - "color": null, - "access_type": "OWNER", - "display_name": "Test", - "option_codes": null, - "cached_data": null, - "granular_access": { "hide_private": false }, - "tokens": ["abc", "def"], - "state": "asleep", - "in_service": false, - "id_s": "1234", - "calendar_enabled": true, - "api_version": 71, - "backseat_token": null, - "backseat_token_updated_at": null, - "ble_autopair_enrolled": false, - "vehicle_config": { - "aux_park_lamps": "Eu", - "badge_version": 1, - "can_accept_navigation_requests": true, - "can_actuate_trunks": true, - "car_special_type": "base", - "car_type": "model3", - "charge_port_type": "CCS", - "cop_user_set_temp_supported": false, - "dashcam_clip_save_supported": true, - "default_charge_to_max": false, - "driver_assist": "TeslaAP3", - "ece_restrictions": false, - "efficiency_package": "M32021", - "eu_vehicle": true, - "exterior_color": "DeepBlue", - "exterior_trim": "Black", - "exterior_trim_override": "", - "has_air_suspension": false, - "has_ludicrous_mode": false, - "has_seat_cooling": false, - "headlamp_type": "Global", - "interior_trim_type": "White2", - "key_version": 2, - "motorized_charge_port": true, - "paint_color_override": "0,9,25,0.7,0.04", - "performance_package": "Base", - "plg": true, - "pws": true, - "rear_drive_unit": "PM216MOSFET", - "rear_seat_heaters": 1, - "rear_seat_type": 0, - "rhd": true, - "roof_color": "RoofColorGlass", - "seat_type": null, - "spoiler_type": "None", - "sun_roof_installed": null, - "supports_qr_pairing": false, - "third_row_seats": "None", - "timestamp": 1705701487912, - "trim_badging": "74d", - "use_range_badging": true, - "utc_offset": 36000, - "webcam_selfie_supported": true, - "webcam_supported": true, - "wheel_type": "Pinwheel18CapKit" - }, - "command_signing": "allowed", - "release_notes_supported": true - }, - { - "energy_site_id": 123456, - "resource_type": "battery", - "site_name": "Energy Site", - "id": "ABC123", - "gateway_id": "ABC123", - "asset_site_id": "c0ffee", - "warp_site_number": "GA123456", - "energy_left": 23286.105263157893, - "total_pack_energy": 40804, - "percentage_charged": 57.068192488868476, - "battery_type": "ac_powerwall", - "backup_capable": true, - "battery_power": 14990, - "go_off_grid_test_banner_enabled": null, - "storm_mode_enabled": true, - "powerwall_onboarding_settings_set": true, - "powerwall_tesla_electric_interested_in": null, - "vpp_tour_enabled": null, - "sync_grid_alert_enabled": true, - "breaker_alert_enabled": true, - "components": { - "battery": true, - "battery_type": "ac_powerwall", - "solar": true, - "solar_type": "pv_panel", - "grid": true, - "load_meter": true, - "market_type": "residential", - "wall_connectors": [ - { - "device_id": "abc-123", - "din": "123-abc", - "is_active": true - }, - { - "device_id": "bcd-234", - "din": "234-bcd", - "is_active": true - } - ] - }, - "features": { - "rate_plan_manager_no_pricing_constraint": true - } - }, - { - "energy_site_id": 98765, - "components": { - "battery": false, - "solar": false, - "grid": false, - "load_meter": false, - "market_type": "residential" - } - } - ], - "count": 3 -} diff --git a/tests/components/tesla_fleet/fixtures/site_info.json b/tests/components/tesla_fleet/fixtures/site_info.json deleted file mode 100644 index 60958bbabbb..00000000000 --- a/tests/components/tesla_fleet/fixtures/site_info.json +++ /dev/null @@ -1,127 +0,0 @@ -{ - "response": { - "id": "1233-abcd", - "site_name": "Site", - "backup_reserve_percent": 0, - "default_real_mode": "self_consumption", - "installation_date": "2022-01-01T00:00:00+00:00", - "user_settings": { - "go_off_grid_test_banner_enabled": false, - "storm_mode_enabled": true, - "powerwall_onboarding_settings_set": true, - "powerwall_tesla_electric_interested_in": false, - "vpp_tour_enabled": true, - "sync_grid_alert_enabled": true, - "breaker_alert_enabled": false - }, - "components": { - "solar": true, - "solar_type": "pv_panel", - "battery": true, - "grid": true, - "backup": true, - "gateway": "teg", - "load_meter": true, - "tou_capable": true, - "storm_mode_capable": true, - "flex_energy_request_capable": false, - "car_charging_data_supported": false, - "off_grid_vehicle_charging_reserve_supported": true, - "vehicle_charging_performance_view_enabled": false, - "vehicle_charging_solar_offset_view_enabled": false, - "battery_solar_offset_view_enabled": true, - "solar_value_enabled": true, - "energy_value_header": "Energy Value", - "energy_value_subheader": "Estimated Value", - "energy_service_self_scheduling_enabled": true, - "show_grid_import_battery_source_cards": true, - "set_islanding_mode_enabled": true, - "wifi_commissioning_enabled": true, - "backup_time_remaining_enabled": true, - "battery_type": "ac_powerwall", - "configurable": true, - "grid_services_enabled": false, - "gateways": [ - { - "device_id": "gateway-id", - "din": "gateway-din", - "serial_number": "CN00000000J50D", - "part_number": "1152100-14-J", - "part_type": 10, - "part_name": "Tesla Backup Gateway 2", - "is_active": true, - "site_id": "1234-abcd", - "firmware_version": "24.4.0 0fe780c9", - "updated_datetime": "2024-05-14T00:00:00.000Z" - } - ], - "batteries": [ - { - "device_id": "battery-1-id", - "din": "battery-1-din", - "serial_number": "TG000000001DA5", - "part_number": "3012170-10-B", - "part_type": 2, - "part_name": "Powerwall 2", - "nameplate_max_charge_power": 5000, - "nameplate_max_discharge_power": 5000, - "nameplate_energy": 13500 - }, - { - "device_id": "battery-2-id", - "din": "battery-2-din", - "serial_number": "TG000000002DA5", - "part_number": "3012170-05-C", - "part_type": 2, - "part_name": "Powerwall 2", - "nameplate_max_charge_power": 5000, - "nameplate_max_discharge_power": 5000, - "nameplate_energy": 13500 - } - ], - "wall_connectors": [ - { - "device_id": "123abc", - "din": "abd-123", - "part_name": "Gen 3 Wall Connector", - "is_active": true - }, - { - "device_id": "234bcd", - "din": "bcd-234", - "part_name": "Gen 3 Wall Connector", - "is_active": true - } - ], - "disallow_charge_from_grid_with_solar_installed": true, - "customer_preferred_export_rule": "pv_only", - "net_meter_mode": "battery_ok", - "system_alerts_enabled": true - }, - "version": "23.44.0 eb113390", - "battery_count": 2, - "tou_settings": { - "optimization_strategy": "economics", - "schedule": [ - { - "target": "off_peak", - "week_days": [1, 0], - "start_seconds": 0, - "end_seconds": 3600 - }, - { - "target": "peak", - "week_days": [1, 0], - "start_seconds": 3600, - "end_seconds": 0 - } - ] - }, - "nameplate_power": 15000, - "nameplate_energy": 40500, - "installation_time_zone": "", - "max_site_meter_power_ac": 1000000000, - "min_site_meter_power_ac": -1000000000, - "vpp_backup_reserve_percent": 0 - } -} diff --git a/tests/components/tesla_fleet/fixtures/vehicle_data.json b/tests/components/tesla_fleet/fixtures/vehicle_data.json deleted file mode 100644 index d99bc8de5a8..00000000000 --- a/tests/components/tesla_fleet/fixtures/vehicle_data.json +++ /dev/null @@ -1,283 +0,0 @@ -{ - "response": { - "id": 1234, - "user_id": 1234, - "vehicle_id": 1234, - "vin": "LRWXF7EK4KC700000", - "color": null, - "access_type": "OWNER", - "granular_access": { - "hide_private": false - }, - "tokens": ["abc", "def"], - "state": "online", - "in_service": false, - "id_s": "1234", - "calendar_enabled": true, - "api_version": 71, - "backseat_token": null, - "backseat_token_updated_at": null, - "ble_autopair_enrolled": false, - "charge_state": { - "battery_heater_on": false, - "battery_level": 77, - "battery_range": 266.87, - "charge_amps": 16, - "charge_current_request": 16, - "charge_current_request_max": 16, - "charge_enable_request": true, - "charge_energy_added": 0, - "charge_limit_soc": 80, - "charge_limit_soc_max": 100, - "charge_limit_soc_min": 50, - "charge_limit_soc_std": 80, - "charge_miles_added_ideal": 0, - "charge_miles_added_rated": 0, - "charge_port_cold_weather_mode": false, - "charge_port_color": "", - "charge_port_door_open": true, - "charge_port_latch": "Engaged", - "charge_rate": 0, - "charger_actual_current": 0, - "charger_phases": null, - "charger_pilot_current": 16, - "charger_power": 0, - "charger_voltage": 2, - "charging_state": "Stopped", - "conn_charge_cable": "IEC", - "est_battery_range": 275.04, - "fast_charger_brand": "", - "fast_charger_present": false, - "fast_charger_type": "ACSingleWireCAN", - "ideal_battery_range": 266.87, - "max_range_charge_counter": 0, - "minutes_to_full_charge": 0, - "not_enough_power_to_heat": null, - "off_peak_charging_enabled": false, - "off_peak_charging_times": "all_week", - "off_peak_hours_end_time": 900, - "preconditioning_enabled": false, - "preconditioning_times": "all_week", - "scheduled_charging_mode": "Off", - "scheduled_charging_pending": false, - "scheduled_charging_start_time": null, - "scheduled_charging_start_time_app": 600, - "scheduled_departure_time": 1704837600, - "scheduled_departure_time_minutes": 480, - "supercharger_session_trip_planner": false, - "time_to_full_charge": 0, - "timestamp": 1705707520649, - "trip_charging": false, - "usable_battery_level": 77, - "user_charge_enable_request": null - }, - "climate_state": { - "allow_cabin_overheat_protection": true, - "auto_seat_climate_left": true, - "auto_seat_climate_right": true, - "auto_steering_wheel_heat": false, - "battery_heater": false, - "battery_heater_no_power": null, - "cabin_overheat_protection": "On", - "cabin_overheat_protection_actively_cooling": false, - "climate_keeper_mode": "keep", - "cop_activation_temperature": "High", - "defrost_mode": 0, - "driver_temp_setting": 22, - "fan_status": 0, - "hvac_auto_request": "On", - "inside_temp": 29.8, - "is_auto_conditioning_on": false, - "is_climate_on": true, - "is_front_defroster_on": false, - "is_preconditioning": false, - "is_rear_defroster_on": false, - "left_temp_direction": 251, - "max_avail_temp": 28, - "min_avail_temp": 15, - "outside_temp": 30, - "passenger_temp_setting": 22, - "remote_heater_control_enabled": false, - "right_temp_direction": 251, - "seat_heater_left": 0, - "seat_heater_rear_center": 0, - "seat_heater_rear_left": 0, - "seat_heater_rear_right": 0, - "seat_heater_right": 0, - "side_mirror_heaters": false, - "steering_wheel_heat_level": 0, - "steering_wheel_heater": false, - "supports_fan_only_cabin_overheat_protection": true, - "timestamp": 1705707520649, - "wiper_blade_heater": false - }, - "drive_state": { - "active_route_destination": "Home", - "active_route_latitude": 30.2226265, - "active_route_longitude": -97.6236871, - "active_route_miles_to_arrival": 0.039491, - "active_route_minutes_to_arrival": 0.103577, - "active_route_traffic_minutes_delay": 0, - "gps_as_of": 1701129612, - "heading": 185, - "latitude": -30.222626, - "longitude": -97.6236871, - "native_latitude": -30.222626, - "native_location_supported": 1, - "native_longitude": -97.6236871, - "native_type": "wgs", - "power": -7, - "shift_state": null, - "speed": null, - "timestamp": 1705707520649 - }, - "gui_settings": { - "gui_24_hour_time": false, - "gui_charge_rate_units": "kW", - "gui_distance_units": "km/hr", - "gui_range_display": "Rated", - "gui_temperature_units": "C", - "gui_tirepressure_units": "Psi", - "show_range_units": false, - "timestamp": 1705707520649 - }, - "vehicle_config": { - "aux_park_lamps": "Eu", - "badge_version": 1, - "can_accept_navigation_requests": true, - "can_actuate_trunks": true, - "car_special_type": "base", - "car_type": "model3", - "charge_port_type": "CCS", - "cop_user_set_temp_supported": true, - "dashcam_clip_save_supported": true, - "default_charge_to_max": false, - "driver_assist": "TeslaAP3", - "ece_restrictions": false, - "efficiency_package": "M32021", - "eu_vehicle": true, - "exterior_color": "DeepBlue", - "exterior_trim": "Black", - "exterior_trim_override": "", - "has_air_suspension": false, - "has_ludicrous_mode": false, - "has_seat_cooling": false, - "headlamp_type": "Global", - "interior_trim_type": "White2", - "key_version": 2, - "motorized_charge_port": true, - "paint_color_override": "0,9,25,0.7,0.04", - "performance_package": "Base", - "plg": true, - "pws": true, - "rear_drive_unit": "PM216MOSFET", - "rear_seat_heaters": 1, - "rear_seat_type": 0, - "rhd": true, - "roof_color": "RoofColorGlass", - "seat_type": null, - "spoiler_type": "None", - "sun_roof_installed": true, - "supports_qr_pairing": false, - "third_row_seats": "None", - "timestamp": 1705707520649, - "trim_badging": "74d", - "use_range_badging": true, - "utc_offset": 36000, - "webcam_selfie_supported": true, - "webcam_supported": true, - "wheel_type": "Pinwheel18CapKit" - }, - "vehicle_state": { - "api_version": 71, - "autopark_state_v2": "unavailable", - "calendar_supported": true, - "car_version": "2023.44.30.8 06f534d46010", - "center_display_state": 0, - "dashcam_clip_save_available": true, - "dashcam_state": "Recording", - "df": 0, - "dr": 0, - "fd_window": 0, - "feature_bitmask": "fbdffbff,187f", - "fp_window": 0, - "ft": 0, - "is_user_present": false, - "locked": false, - "media_info": { - "a2dp_source_name": "Pixel 8 Pro", - "audio_volume": 1.6667, - "audio_volume_increment": 0.333333, - "audio_volume_max": 10.333333, - "media_playback_status": "Playing", - "now_playing_album": "Elon Musk", - "now_playing_artist": "Walter Isaacson", - "now_playing_duration": 651000, - "now_playing_elapsed": 1000, - "now_playing_source": "Audible", - "now_playing_station": "Elon Musk", - "now_playing_title": "Chapter 51: Cybertruck: Tesla, 2018–2019" - }, - "media_state": { - "remote_control_enabled": true - }, - "notifications_supported": true, - "odometer": 6481.019282, - "parsed_calendar_supported": true, - "pf": 0, - "pr": 0, - "rd_window": 0, - "remote_start": false, - "remote_start_enabled": true, - "remote_start_supported": true, - "rp_window": 0, - "rt": 0, - "santa_mode": 0, - "sentry_mode": false, - "sentry_mode_available": true, - "service_mode": false, - "service_mode_plus": false, - "software_update": { - "download_perc": 100, - "expected_duration_sec": 2700, - "install_perc": 1, - "status": "available", - "version": "2024.12.0.0" - }, - "speed_limit_mode": { - "active": false, - "current_limit_mph": 69, - "max_limit_mph": 120, - "min_limit_mph": 50, - "pin_code_set": true - }, - "sun_roof_state": "open", - "vehicle_state_sun_roof_percent_open": 20, - "timestamp": 1705707520649, - "tpms_hard_warning_fl": false, - "tpms_hard_warning_fr": false, - "tpms_hard_warning_rl": false, - "tpms_hard_warning_rr": false, - "tpms_last_seen_pressure_time_fl": 1705700812, - "tpms_last_seen_pressure_time_fr": 1705700793, - "tpms_last_seen_pressure_time_rl": 1705700794, - "tpms_last_seen_pressure_time_rr": 1705700823, - "tpms_pressure_fl": 2.775, - "tpms_pressure_fr": 2.8, - "tpms_pressure_rl": 2.775, - "tpms_pressure_rr": 2.775, - "tpms_rcp_front_value": 2.9, - "tpms_rcp_rear_value": 2.9, - "tpms_soft_warning_fl": false, - "tpms_soft_warning_fr": false, - "tpms_soft_warning_rl": false, - "tpms_soft_warning_rr": false, - "valet_mode": false, - "valet_pin_needed": false, - "vehicle_name": "Test", - "vehicle_self_test_progress": 0, - "vehicle_self_test_requested": false, - "webcam_available": true - } - } -} diff --git a/tests/components/tesla_fleet/fixtures/vehicle_data_alt.json b/tests/components/tesla_fleet/fixtures/vehicle_data_alt.json deleted file mode 100644 index 76416982eba..00000000000 --- a/tests/components/tesla_fleet/fixtures/vehicle_data_alt.json +++ /dev/null @@ -1,279 +0,0 @@ -{ - "response": { - "id": 1234, - "user_id": 1234, - "vehicle_id": 1234, - "vin": "LRWXF7EK4KC700000", - "color": null, - "access_type": "OWNER", - "granular_access": { - "hide_private": false - }, - "tokens": ["abc", "def"], - "state": "online", - "in_service": false, - "id_s": "1234", - "calendar_enabled": true, - "api_version": 71, - "backseat_token": null, - "backseat_token_updated_at": null, - "ble_autopair_enrolled": false, - "charge_state": { - "battery_heater_on": true, - "battery_level": 77, - "battery_range": 266.87, - "charge_amps": 16, - "charge_current_request": 16, - "charge_current_request_max": 16, - "charge_enable_request": true, - "charge_energy_added": 0, - "charge_limit_soc": 80, - "charge_limit_soc_max": 100, - "charge_limit_soc_min": 50, - "charge_limit_soc_std": 80, - "charge_miles_added_ideal": 0, - "charge_miles_added_rated": 0, - "charge_port_cold_weather_mode": false, - "charge_port_color": "", - "charge_port_door_open": true, - "charge_port_latch": "Engaged", - "charge_rate": 0, - "charger_actual_current": 0, - "charger_phases": null, - "charger_pilot_current": 16, - "charger_power": 0, - "charger_voltage": 2, - "charging_state": "Stopped", - "conn_charge_cable": "IEC", - "est_battery_range": 275.04, - "fast_charger_brand": "", - "fast_charger_present": false, - "fast_charger_type": "ACSingleWireCAN", - "ideal_battery_range": 266.87, - "max_range_charge_counter": 0, - "minutes_to_full_charge": "bad value", - "not_enough_power_to_heat": null, - "off_peak_charging_enabled": false, - "off_peak_charging_times": "all_week", - "off_peak_hours_end_time": 900, - "preconditioning_enabled": false, - "preconditioning_times": "all_week", - "scheduled_charging_mode": "Off", - "scheduled_charging_pending": false, - "scheduled_charging_start_time": null, - "scheduled_charging_start_time_app": 600, - "scheduled_departure_time": 1704837600, - "scheduled_departure_time_minutes": 480, - "supercharger_session_trip_planner": false, - "time_to_full_charge": null, - "timestamp": null, - "trip_charging": false, - "usable_battery_level": 77, - "user_charge_enable_request": true - }, - "climate_state": { - "allow_cabin_overheat_protection": true, - "auto_seat_climate_left": false, - "auto_seat_climate_right": false, - "auto_steering_wheel_heat": false, - "battery_heater": true, - "battery_heater_no_power": null, - "cabin_overheat_protection": "Off", - "cabin_overheat_protection_actively_cooling": false, - "climate_keeper_mode": "off", - "cop_activation_temperature": "Low", - "defrost_mode": 0, - "driver_temp_setting": 22, - "fan_status": 0, - "hvac_auto_request": "On", - "inside_temp": 29.8, - "is_auto_conditioning_on": false, - "is_climate_on": false, - "is_front_defroster_on": false, - "is_preconditioning": false, - "is_rear_defroster_on": false, - "left_temp_direction": 251, - "max_avail_temp": 28, - "min_avail_temp": 15, - "outside_temp": 30, - "passenger_temp_setting": 22, - "remote_heater_control_enabled": false, - "right_temp_direction": 251, - "seat_heater_left": 0, - "seat_heater_rear_center": 0, - "seat_heater_rear_left": 0, - "seat_heater_rear_right": 0, - "seat_heater_right": 0, - "side_mirror_heaters": false, - "steering_wheel_heat_level": 0, - "steering_wheel_heater": false, - "supports_fan_only_cabin_overheat_protection": true, - "timestamp": 1705707520649, - "wiper_blade_heater": false - }, - "drive_state": { - "active_route_latitude": 30.2226265, - "active_route_longitude": -97.6236871, - "active_route_miles_to_arrival": 0, - "active_route_minutes_to_arrival": 0, - "active_route_traffic_minutes_delay": 0, - "gps_as_of": 1701129612, - "heading": 185, - "latitude": -30.222626, - "longitude": -97.6236871, - "native_latitude": -30.222626, - "native_location_supported": 1, - "native_longitude": -97.6236871, - "native_type": "wgs", - "power": -7, - "shift_state": null, - "speed": null, - "timestamp": 1705707520649 - }, - "gui_settings": { - "gui_24_hour_time": false, - "gui_charge_rate_units": "kW", - "gui_distance_units": "km/hr", - "gui_range_display": "Rated", - "gui_temperature_units": "C", - "gui_tirepressure_units": "Psi", - "show_range_units": false, - "timestamp": 1705707520649 - }, - "vehicle_config": { - "aux_park_lamps": "Eu", - "badge_version": 1, - "can_accept_navigation_requests": true, - "can_actuate_trunks": true, - "car_special_type": "base", - "car_type": "model3", - "charge_port_type": "CCS", - "cop_user_set_temp_supported": false, - "dashcam_clip_save_supported": true, - "default_charge_to_max": false, - "driver_assist": "TeslaAP3", - "ece_restrictions": false, - "efficiency_package": "M32021", - "eu_vehicle": true, - "exterior_color": "DeepBlue", - "exterior_trim": "Black", - "exterior_trim_override": "", - "has_air_suspension": false, - "has_ludicrous_mode": false, - "has_seat_cooling": false, - "headlamp_type": "Global", - "interior_trim_type": "White2", - "key_version": 2, - "motorized_charge_port": true, - "paint_color_override": "0,9,25,0.7,0.04", - "performance_package": "Base", - "plg": true, - "pws": true, - "rear_drive_unit": "PM216MOSFET", - "rear_seat_heaters": 1, - "rear_seat_type": 0, - "rhd": true, - "roof_color": "RoofColorGlass", - "seat_type": null, - "spoiler_type": "None", - "sun_roof_installed": null, - "supports_qr_pairing": false, - "third_row_seats": "None", - "timestamp": 1705707520649, - "trim_badging": "74d", - "use_range_badging": true, - "utc_offset": 36000, - "webcam_selfie_supported": true, - "webcam_supported": true, - "wheel_type": "Pinwheel18CapKit" - }, - "vehicle_state": { - "api_version": 71, - "autopark_state_v2": "unavailable", - "calendar_supported": true, - "car_version": "2023.44.30.8 06f534d46010", - "center_display_state": 0, - "dashcam_clip_save_available": true, - "dashcam_state": "Recording", - "df": 0, - "dr": 0, - "fd_window": 1, - "feature_bitmask": "fbdffbff,187f", - "fp_window": 1, - "ft": 1, - "is_user_present": true, - "locked": false, - "media_info": { - "audio_volume": 2.6667, - "audio_volume_increment": 0.333333, - "audio_volume_max": 10.333333, - "media_playback_status": "Stopped", - "now_playing_album": "", - "now_playing_artist": "", - "now_playing_duration": 0, - "now_playing_elapsed": 0, - "now_playing_source": "Spotify", - "now_playing_station": "", - "now_playing_title": "" - }, - "media_state": { - "remote_control_enabled": true - }, - "notifications_supported": true, - "odometer": 6481.019282, - "parsed_calendar_supported": true, - "pf": 0, - "pr": 0, - "rd_window": 1, - "remote_start": false, - "remote_start_enabled": true, - "remote_start_supported": true, - "rp_window": 1, - "rt": 1, - "santa_mode": 0, - "sentry_mode": false, - "sentry_mode_available": true, - "service_mode": false, - "service_mode_plus": false, - "software_update": { - "download_perc": 0, - "expected_duration_sec": 2700, - "install_perc": 1, - "status": "", - "version": " " - }, - "speed_limit_mode": { - "active": false, - "current_limit_mph": 69, - "max_limit_mph": 120, - "min_limit_mph": 50, - "pin_code_set": true - }, - "timestamp": 1705707520649, - "tpms_hard_warning_fl": false, - "tpms_hard_warning_fr": false, - "tpms_hard_warning_rl": false, - "tpms_hard_warning_rr": false, - "tpms_last_seen_pressure_time_fl": 1705700812, - "tpms_last_seen_pressure_time_fr": 1705700793, - "tpms_last_seen_pressure_time_rl": 1705700794, - "tpms_last_seen_pressure_time_rr": 1705700823, - "tpms_pressure_fl": 2.775, - "tpms_pressure_fr": 2.8, - "tpms_pressure_rl": 2.775, - "tpms_pressure_rr": 2.775, - "tpms_rcp_front_value": 2.9, - "tpms_rcp_rear_value": 2.9, - "tpms_soft_warning_fl": false, - "tpms_soft_warning_fr": false, - "tpms_soft_warning_rl": false, - "tpms_soft_warning_rr": false, - "valet_mode": false, - "valet_pin_needed": false, - "vehicle_name": "Test", - "vehicle_self_test_progress": 0, - "vehicle_self_test_requested": false, - "webcam_available": true - } - } -} diff --git a/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr b/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr deleted file mode 100644 index 479d647e1c7..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr +++ /dev/null @@ -1,1630 +0,0 @@ -# serializer version: 1 -# name: test_binary_sensor[binary_sensor.energy_site_backup_capable-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.energy_site_backup_capable', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Backup capable', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'backup_capable', - 'unique_id': '123456-backup_capable', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.energy_site_backup_capable-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Backup capable', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_backup_capable', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[binary_sensor.energy_site_grid_services_active-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.energy_site_grid_services_active', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Grid services active', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_services_active', - 'unique_id': '123456-grid_services_active', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.energy_site_grid_services_active-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Grid services active', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_grid_services_active', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.energy_site_grid_services_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Grid services enabled', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'components_grid_services_enabled', - 'unique_id': '123456-components_grid_services_enabled', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.energy_site_grid_services_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Grid services enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.energy_site_storm_watch_active-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.energy_site_storm_watch_active', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Storm watch active', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storm_mode_active', - 'unique_id': '123456-storm_mode_active', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.energy_site_storm_watch_active-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Storm watch active', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_storm_watch_active', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_battery_heater-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_battery_heater', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery heater', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_battery_heater_on', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_heater_on', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_battery_heater-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'heat', - 'friendly_name': 'Test Battery heater', - }), - 'context': , - 'entity_id': 'binary_sensor.test_battery_heater', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_cabin_overheat_protection_actively_cooling-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_cabin_overheat_protection_actively_cooling', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cabin overheat protection actively cooling', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_cabin_overheat_protection_actively_cooling', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection_actively_cooling', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_cabin_overheat_protection_actively_cooling-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'heat', - 'friendly_name': 'Test Cabin overheat protection actively cooling', - }), - 'context': , - 'entity_id': 'binary_sensor.test_cabin_overheat_protection_actively_cooling', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_charge_cable-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_charge_cable', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charge cable', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_conn_charge_cable', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_conn_charge_cable', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_charge_cable-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Test Charge cable', - }), - 'context': , - 'entity_id': 'binary_sensor.test_charge_cable', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_charger_has_multiple_phases-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.test_charger_has_multiple_phases', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Charger has multiple phases', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charger_phases', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_phases', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_charger_has_multiple_phases-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Charger has multiple phases', - }), - 'context': , - 'entity_id': 'binary_sensor.test_charger_has_multiple_phases', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_dashcam-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_dashcam', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Dashcam', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_dashcam_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_dashcam_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_dashcam-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Test Dashcam', - }), - 'context': , - 'entity_id': 'binary_sensor.test_dashcam', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_front_driver_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_front_driver_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front driver door', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_df', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_df', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_front_driver_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Front driver door', - }), - 'context': , - 'entity_id': 'binary_sensor.test_front_driver_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_front_driver_window-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_front_driver_window', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front driver window', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_fd_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_fd_window', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_front_driver_window-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Front driver window', - }), - 'context': , - 'entity_id': 'binary_sensor.test_front_driver_window', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_front_passenger_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_front_passenger_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front passenger door', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_pf', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_pf', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_front_passenger_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Front passenger door', - }), - 'context': , - 'entity_id': 'binary_sensor.test_front_passenger_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_front_passenger_window-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_front_passenger_window', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Front passenger window', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_fp_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_fp_window', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_front_passenger_window-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Front passenger window', - }), - 'context': , - 'entity_id': 'binary_sensor.test_front_passenger_window', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_preconditioning-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_preconditioning', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Preconditioning', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_is_preconditioning', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_is_preconditioning', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_preconditioning-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Preconditioning', - }), - 'context': , - 'entity_id': 'binary_sensor.test_preconditioning', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_preconditioning_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_preconditioning_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Preconditioning enabled', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_preconditioning_enabled', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_preconditioning_enabled', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_preconditioning_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Preconditioning enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.test_preconditioning_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_rear_driver_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_rear_driver_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear driver door', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_dr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_dr', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_rear_driver_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Rear driver door', - }), - 'context': , - 'entity_id': 'binary_sensor.test_rear_driver_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_rear_driver_window-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_rear_driver_window', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear driver window', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_rd_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rd_window', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_rear_driver_window-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Rear driver window', - }), - 'context': , - 'entity_id': 'binary_sensor.test_rear_driver_window', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_rear_passenger_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_rear_passenger_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear passenger door', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_pr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_pr', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_rear_passenger_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Rear passenger door', - }), - 'context': , - 'entity_id': 'binary_sensor.test_rear_passenger_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_rear_passenger_window-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_rear_passenger_window', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rear passenger window', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_rp_window', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rp_window', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_rear_passenger_window-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Rear passenger window', - }), - 'context': , - 'entity_id': 'binary_sensor.test_rear_passenger_window', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_scheduled_charging_pending-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_scheduled_charging_pending', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Scheduled charging pending', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_scheduled_charging_pending', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_scheduled_charging_pending', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_scheduled_charging_pending-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Scheduled charging pending', - }), - 'context': , - 'entity_id': 'binary_sensor.test_scheduled_charging_pending', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.test_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Status', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'state', - 'unique_id': 'LRWXF7EK4KC700000-state', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Test Status', - }), - 'context': , - 'entity_id': 'binary_sensor.test_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tire pressure warning front left', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_tpms_soft_warning_fl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_fl', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Test Tire pressure warning front left', - }), - 'context': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tire pressure warning front right', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_tpms_soft_warning_fr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_fr', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Test Tire pressure warning front right', - }), - 'context': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tire pressure warning rear left', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_tpms_soft_warning_rl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_rl', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Test Tire pressure warning rear left', - }), - 'context': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tire pressure warning rear right', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_tpms_soft_warning_rr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_rr', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Test Tire pressure warning rear right', - }), - 'context': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_trip_charging-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_trip_charging', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Trip charging', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_trip_charging', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_trip_charging', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_trip_charging-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Trip charging', - }), - 'context': , - 'entity_id': 'binary_sensor.test_trip_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.test_user_present-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.test_user_present', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'User present', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_is_user_present', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_is_user_present', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.test_user_present-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'presence', - 'friendly_name': 'Test User present', - }), - 'context': , - 'entity_id': 'binary_sensor.test_user_present', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.energy_site_backup_capable-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Backup capable', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_backup_capable', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.energy_site_grid_services_active-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Grid services active', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_grid_services_active', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.energy_site_grid_services_enabled-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Grid services enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.energy_site_storm_watch_active-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Storm watch active', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_storm_watch_active', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_battery_heater-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'heat', - 'friendly_name': 'Test Battery heater', - }), - 'context': , - 'entity_id': 'binary_sensor.test_battery_heater', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_cabin_overheat_protection_actively_cooling-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'heat', - 'friendly_name': 'Test Cabin overheat protection actively cooling', - }), - 'context': , - 'entity_id': 'binary_sensor.test_cabin_overheat_protection_actively_cooling', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_charge_cable-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Test Charge cable', - }), - 'context': , - 'entity_id': 'binary_sensor.test_charge_cable', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_charger_has_multiple_phases-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Charger has multiple phases', - }), - 'context': , - 'entity_id': 'binary_sensor.test_charger_has_multiple_phases', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_dashcam-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Test Dashcam', - }), - 'context': , - 'entity_id': 'binary_sensor.test_dashcam', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_front_driver_door-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Front driver door', - }), - 'context': , - 'entity_id': 'binary_sensor.test_front_driver_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_front_driver_window-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Front driver window', - }), - 'context': , - 'entity_id': 'binary_sensor.test_front_driver_window', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_front_passenger_door-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Front passenger door', - }), - 'context': , - 'entity_id': 'binary_sensor.test_front_passenger_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_front_passenger_window-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Front passenger window', - }), - 'context': , - 'entity_id': 'binary_sensor.test_front_passenger_window', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_preconditioning-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Preconditioning', - }), - 'context': , - 'entity_id': 'binary_sensor.test_preconditioning', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_preconditioning_enabled-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Preconditioning enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.test_preconditioning_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_rear_driver_door-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Rear driver door', - }), - 'context': , - 'entity_id': 'binary_sensor.test_rear_driver_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_rear_driver_window-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Rear driver window', - }), - 'context': , - 'entity_id': 'binary_sensor.test_rear_driver_window', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_rear_passenger_door-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Rear passenger door', - }), - 'context': , - 'entity_id': 'binary_sensor.test_rear_passenger_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_rear_passenger_window-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Rear passenger window', - }), - 'context': , - 'entity_id': 'binary_sensor.test_rear_passenger_window', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_scheduled_charging_pending-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Scheduled charging pending', - }), - 'context': , - 'entity_id': 'binary_sensor.test_scheduled_charging_pending', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_status-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Test Status', - }), - 'context': , - 'entity_id': 'binary_sensor.test_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_front_left-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Test Tire pressure warning front left', - }), - 'context': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_front_right-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Test Tire pressure warning front right', - }), - 'context': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_rear_left-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Test Tire pressure warning rear left', - }), - 'context': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_rear_right-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Test Tire pressure warning rear right', - }), - 'context': , - 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_trip_charging-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Trip charging', - }), - 'context': , - 'entity_id': 'binary_sensor.test_trip_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor_refresh[binary_sensor.test_user_present-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'presence', - 'friendly_name': 'Test User present', - }), - 'context': , - 'entity_id': 'binary_sensor.test_user_present', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_button.ambr b/tests/components/tesla_fleet/snapshots/test_button.ambr deleted file mode 100644 index 8b5270d4852..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_button.ambr +++ /dev/null @@ -1,277 +0,0 @@ -# serializer version: 1 -# name: test_button[button.test_flash_lights-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_flash_lights', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Flash lights', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flash_lights', - 'unique_id': 'LRWXF7EK4KC700000-flash_lights', - 'unit_of_measurement': None, - }) -# --- -# name: test_button[button.test_flash_lights-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Flash lights', - }), - 'context': , - 'entity_id': 'button.test_flash_lights', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button[button.test_homelink-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_homelink', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Homelink', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'homelink', - 'unique_id': 'LRWXF7EK4KC700000-homelink', - 'unit_of_measurement': None, - }) -# --- -# name: test_button[button.test_homelink-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Homelink', - }), - 'context': , - 'entity_id': 'button.test_homelink', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button[button.test_honk_horn-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_honk_horn', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Honk horn', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'honk', - 'unique_id': 'LRWXF7EK4KC700000-honk', - 'unit_of_measurement': None, - }) -# --- -# name: test_button[button.test_honk_horn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Honk horn', - }), - 'context': , - 'entity_id': 'button.test_honk_horn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button[button.test_keyless_driving-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_keyless_driving', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Keyless driving', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'enable_keyless_driving', - 'unique_id': 'LRWXF7EK4KC700000-enable_keyless_driving', - 'unit_of_measurement': None, - }) -# --- -# name: test_button[button.test_keyless_driving-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Keyless driving', - }), - 'context': , - 'entity_id': 'button.test_keyless_driving', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button[button.test_play_fart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_play_fart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Play fart', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'boombox', - 'unique_id': 'LRWXF7EK4KC700000-boombox', - 'unit_of_measurement': None, - }) -# --- -# name: test_button[button.test_play_fart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Play fart', - }), - 'context': , - 'entity_id': 'button.test_play_fart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_button[button.test_wake-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.test_wake', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Wake', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wake', - 'unique_id': 'LRWXF7EK4KC700000-wake', - 'unit_of_measurement': None, - }) -# --- -# name: test_button[button.test_wake-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Wake', - }), - 'context': , - 'entity_id': 'button.test_wake', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_climate.ambr b/tests/components/tesla_fleet/snapshots/test_climate.ambr deleted file mode 100644 index 696f8c37f08..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_climate.ambr +++ /dev/null @@ -1,422 +0,0 @@ -# serializer version: 1 -# name: test_climate[climate.test_cabin_overheat_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 40, - 'min_temp': 30, - 'target_temp_step': 5, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_cabin_overheat_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cabin overheat protection', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate[climate.test_cabin_overheat_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 30, - 'friendly_name': 'Test Cabin overheat protection', - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 40, - 'min_temp': 30, - 'supported_features': , - 'target_temp_step': 5, - 'temperature': 40, - }), - 'context': , - 'entity_id': 'climate.test_cabin_overheat_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'cool', - }) -# --- -# name: test_climate[climate.test_climate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 15.0, - 'preset_modes': list([ - 'off', - 'keep', - 'dog', - 'camp', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_climate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Climate', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate[climate.test_climate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 30.0, - 'friendly_name': 'Test Climate', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 15.0, - 'preset_mode': 'keep', - 'preset_modes': list([ - 'off', - 'keep', - 'dog', - 'camp', - ]), - 'supported_features': , - 'temperature': 22.0, - }), - 'context': , - 'entity_id': 'climate.test_climate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat_cool', - }) -# --- -# name: test_climate_alt[climate.test_cabin_overheat_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 40, - 'min_temp': 30, - 'target_temp_step': 5, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_cabin_overheat_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cabin overheat protection', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_alt[climate.test_cabin_overheat_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 30, - 'friendly_name': 'Test Cabin overheat protection', - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 40, - 'min_temp': 30, - 'supported_features': , - 'target_temp_step': 5, - }), - 'context': , - 'entity_id': 'climate.test_cabin_overheat_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_climate_alt[climate.test_climate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 15.0, - 'preset_modes': list([ - 'off', - 'keep', - 'dog', - 'camp', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_climate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Climate', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_alt[climate.test_climate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 30.0, - 'friendly_name': 'Test Climate', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 15.0, - 'preset_mode': 'off', - 'preset_modes': list([ - 'off', - 'keep', - 'dog', - 'camp', - ]), - 'supported_features': , - 'temperature': 22.0, - }), - 'context': , - 'entity_id': 'climate.test_climate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_climate_offline[climate.test_cabin_overheat_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 40, - 'min_temp': 30, - 'target_temp_step': 5, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_cabin_overheat_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cabin overheat protection', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_offline[climate.test_cabin_overheat_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Test Cabin overheat protection', - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': 40, - 'min_temp': 30, - 'supported_features': , - 'target_temp_step': 5, - }), - 'context': , - 'entity_id': 'climate.test_cabin_overheat_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_climate_offline[climate.test_climate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 15.0, - 'preset_modes': list([ - 'off', - 'keep', - 'dog', - 'camp', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_climate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Climate', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_offline[climate.test_climate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Test Climate', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 28.0, - 'min_temp': 15.0, - 'preset_mode': None, - 'preset_modes': list([ - 'off', - 'keep', - 'dog', - 'camp', - ]), - 'supported_features': , - 'temperature': None, - }), - 'context': , - 'entity_id': 'climate.test_climate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_cover.ambr b/tests/components/tesla_fleet/snapshots/test_cover.ambr deleted file mode 100644 index dbdb003d802..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_cover.ambr +++ /dev/null @@ -1,721 +0,0 @@ -# serializer version: 1 -# name: test_cover[cover.test_charge_port_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_charge_port_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charge port door', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_charge_port_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Charge port door', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_charge_port_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover[cover.test_frunk-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_frunk', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Frunk', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_frunk-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Frunk', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_frunk', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_cover[cover.test_sunroof-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_sunroof', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sunroof', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_sunroof-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Sunroof', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_sunroof', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover[cover.test_trunk-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_trunk', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Trunk', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_trunk-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Trunk', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_trunk', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_cover[cover.test_windows-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_windows', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Windows', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_windows-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Windows', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_windows', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_cover_alt[cover.test_charge_port_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_charge_port_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charge port door', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_charge_port_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Charge port door', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_charge_port_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover_alt[cover.test_frunk-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_frunk', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Frunk', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_frunk-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Frunk', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_frunk', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover_alt[cover.test_sunroof-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_sunroof', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sunroof', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_sunroof-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Sunroof', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_sunroof', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_cover_alt[cover.test_trunk-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_trunk', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Trunk', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_trunk-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Trunk', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_trunk', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover_alt[cover.test_windows-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_windows', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Windows', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_windows-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Windows', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_windows', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover_readonly[cover.test_charge_port_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_charge_port_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charge port door', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_readonly[cover.test_charge_port_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Charge port door', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_charge_port_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover_readonly[cover.test_frunk-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_frunk', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Frunk', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_readonly[cover.test_frunk-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Frunk', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_frunk', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_cover_readonly[cover.test_sunroof-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_sunroof', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sunroof', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_readonly[cover.test_sunroof-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Sunroof', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_sunroof', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover_readonly[cover.test_trunk-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_trunk', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Trunk', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_readonly[cover.test_trunk-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test Trunk', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_trunk', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_cover_readonly[cover.test_windows-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_windows', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Windows', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_readonly[cover.test_windows-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Windows', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_windows', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr b/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr deleted file mode 100644 index 02ad4b01002..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr +++ /dev/null @@ -1,101 +0,0 @@ -# serializer version: 1 -# name: test_device_tracker[device_tracker.test_location-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'device_tracker', - 'entity_category': , - 'entity_id': 'device_tracker.test_location', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Location', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'location', - 'unique_id': 'LRWXF7EK4KC700000-location', - 'unit_of_measurement': None, - }) -# --- -# name: test_device_tracker[device_tracker.test_location-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Location', - 'gps_accuracy': 0, - 'latitude': -30.222626, - 'longitude': -97.6236871, - 'source_type': , - }), - 'context': , - 'entity_id': 'device_tracker.test_location', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_home', - }) -# --- -# name: test_device_tracker[device_tracker.test_route-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'device_tracker', - 'entity_category': , - 'entity_id': 'device_tracker.test_route', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Route', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'route', - 'unique_id': 'LRWXF7EK4KC700000-route', - 'unit_of_measurement': None, - }) -# --- -# name: test_device_tracker[device_tracker.test_route-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Route', - 'gps_accuracy': 0, - 'latitude': 30.2226265, - 'longitude': -97.6236871, - 'source_type': , - }), - 'context': , - 'entity_id': 'device_tracker.test_route', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'home', - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr deleted file mode 100644 index eb8c57910a4..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,437 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'energysites': list([ - dict({ - 'info': dict({ - 'backup_reserve_percent': 0, - 'battery_count': 2, - 'components_backup': True, - 'components_backup_time_remaining_enabled': True, - 'components_batteries': list([ - dict({ - 'device_id': 'battery-1-id', - 'din': 'battery-1-din', - 'nameplate_energy': 13500, - 'nameplate_max_charge_power': 5000, - 'nameplate_max_discharge_power': 5000, - 'part_name': 'Powerwall 2', - 'part_number': '3012170-10-B', - 'part_type': 2, - 'serial_number': '**REDACTED**', - }), - dict({ - 'device_id': 'battery-2-id', - 'din': 'battery-2-din', - 'nameplate_energy': 13500, - 'nameplate_max_charge_power': 5000, - 'nameplate_max_discharge_power': 5000, - 'part_name': 'Powerwall 2', - 'part_number': '3012170-05-C', - 'part_type': 2, - 'serial_number': '**REDACTED**', - }), - ]), - 'components_battery': True, - 'components_battery_solar_offset_view_enabled': True, - 'components_battery_type': 'ac_powerwall', - 'components_car_charging_data_supported': False, - 'components_configurable': True, - 'components_customer_preferred_export_rule': 'pv_only', - 'components_disallow_charge_from_grid_with_solar_installed': True, - 'components_energy_service_self_scheduling_enabled': True, - 'components_energy_value_header': 'Energy Value', - 'components_energy_value_subheader': 'Estimated Value', - 'components_flex_energy_request_capable': False, - 'components_gateway': 'teg', - 'components_gateways': list([ - dict({ - 'device_id': 'gateway-id', - 'din': 'gateway-din', - 'firmware_version': '24.4.0 0fe780c9', - 'is_active': True, - 'part_name': 'Tesla Backup Gateway 2', - 'part_number': '1152100-14-J', - 'part_type': 10, - 'serial_number': '**REDACTED**', - 'site_id': '1234-abcd', - 'updated_datetime': '2024-05-14T00:00:00.000Z', - }), - ]), - 'components_grid': True, - 'components_grid_services_enabled': False, - 'components_load_meter': True, - 'components_net_meter_mode': 'battery_ok', - 'components_off_grid_vehicle_charging_reserve_supported': True, - 'components_set_islanding_mode_enabled': True, - 'components_show_grid_import_battery_source_cards': True, - 'components_solar': True, - 'components_solar_type': 'pv_panel', - 'components_solar_value_enabled': True, - 'components_storm_mode_capable': True, - 'components_system_alerts_enabled': True, - 'components_tou_capable': True, - 'components_vehicle_charging_performance_view_enabled': False, - 'components_vehicle_charging_solar_offset_view_enabled': False, - 'components_wall_connectors': list([ - dict({ - 'device_id': '123abc', - 'din': 'abd-123', - 'is_active': True, - 'part_name': 'Gen 3 Wall Connector', - }), - dict({ - 'device_id': '234bcd', - 'din': 'bcd-234', - 'is_active': True, - 'part_name': 'Gen 3 Wall Connector', - }), - ]), - 'components_wifi_commissioning_enabled': True, - 'default_real_mode': 'self_consumption', - 'id': '1233-abcd', - 'installation_date': '**REDACTED**', - 'installation_time_zone': '', - 'max_site_meter_power_ac': 1000000000, - 'min_site_meter_power_ac': -1000000000, - 'nameplate_energy': 40500, - 'nameplate_power': 15000, - 'site_name': 'Site', - 'tou_settings_optimization_strategy': 'economics', - 'tou_settings_schedule': list([ - dict({ - 'end_seconds': 3600, - 'start_seconds': 0, - 'target': 'off_peak', - 'week_days': list([ - 1, - 0, - ]), - }), - dict({ - 'end_seconds': 0, - 'start_seconds': 3600, - 'target': 'peak', - 'week_days': list([ - 1, - 0, - ]), - }), - ]), - 'user_settings_breaker_alert_enabled': False, - 'user_settings_go_off_grid_test_banner_enabled': False, - 'user_settings_powerwall_onboarding_settings_set': True, - 'user_settings_powerwall_tesla_electric_interested_in': False, - 'user_settings_storm_mode_enabled': True, - 'user_settings_sync_grid_alert_enabled': True, - 'user_settings_vpp_tour_enabled': True, - 'version': '23.44.0 eb113390', - 'vpp_backup_reserve_percent': 0, - }), - 'live': dict({ - 'backup_capable': True, - 'battery_power': 5060, - 'energy_left': 38896.47368421053, - 'generator_power': 0, - 'grid_power': 0, - 'grid_services_active': False, - 'grid_services_power': 0, - 'grid_status': 'Active', - 'island_status': 'on_grid', - 'load_power': 6245, - 'percentage_charged': 95.50537403739663, - 'solar_power': 1185, - 'storm_mode_active': False, - 'timestamp': '2024-01-01T00:00:00+00:00', - 'total_pack_energy': 40727, - 'wall_connectors': dict({ - 'abd-123': dict({ - 'din': 'abd-123', - 'wall_connector_fault_state': 2, - 'wall_connector_power': 0, - 'wall_connector_state': 2, - }), - 'bcd-234': dict({ - 'din': 'bcd-234', - 'wall_connector_fault_state': 2, - 'wall_connector_power': 0, - 'wall_connector_state': 2, - }), - }), - }), - }), - ]), - 'scopes': list([ - 'openid', - 'offline_access', - 'vehicle_device_data', - 'vehicle_cmds', - 'vehicle_charging_cmds', - 'energy_device_data', - 'energy_cmds', - ]), - 'vehicles': list([ - dict({ - 'data': dict({ - 'access_type': 'OWNER', - 'api_version': 71, - 'backseat_token': None, - 'backseat_token_updated_at': None, - 'ble_autopair_enrolled': False, - 'calendar_enabled': True, - 'charge_state_battery_heater_on': False, - 'charge_state_battery_level': 77, - 'charge_state_battery_range': 266.87, - 'charge_state_charge_amps': 16, - 'charge_state_charge_current_request': 16, - 'charge_state_charge_current_request_max': 16, - 'charge_state_charge_enable_request': True, - 'charge_state_charge_energy_added': 0, - 'charge_state_charge_limit_soc': 80, - 'charge_state_charge_limit_soc_max': 100, - 'charge_state_charge_limit_soc_min': 50, - 'charge_state_charge_limit_soc_std': 80, - 'charge_state_charge_miles_added_ideal': 0, - 'charge_state_charge_miles_added_rated': 0, - 'charge_state_charge_port_cold_weather_mode': False, - 'charge_state_charge_port_color': '', - 'charge_state_charge_port_door_open': True, - 'charge_state_charge_port_latch': 'Engaged', - 'charge_state_charge_rate': 0, - 'charge_state_charger_actual_current': 0, - 'charge_state_charger_phases': None, - 'charge_state_charger_pilot_current': 16, - 'charge_state_charger_power': 0, - 'charge_state_charger_voltage': 2, - 'charge_state_charging_state': 'Stopped', - 'charge_state_conn_charge_cable': 'IEC', - 'charge_state_est_battery_range': 275.04, - 'charge_state_fast_charger_brand': '', - 'charge_state_fast_charger_present': False, - 'charge_state_fast_charger_type': 'ACSingleWireCAN', - 'charge_state_ideal_battery_range': 266.87, - 'charge_state_max_range_charge_counter': 0, - 'charge_state_minutes_to_full_charge': 0, - 'charge_state_not_enough_power_to_heat': None, - 'charge_state_off_peak_charging_enabled': False, - 'charge_state_off_peak_charging_times': 'all_week', - 'charge_state_off_peak_hours_end_time': 900, - 'charge_state_preconditioning_enabled': False, - 'charge_state_preconditioning_times': 'all_week', - 'charge_state_scheduled_charging_mode': 'Off', - 'charge_state_scheduled_charging_pending': False, - 'charge_state_scheduled_charging_start_time': None, - 'charge_state_scheduled_charging_start_time_app': 600, - 'charge_state_scheduled_departure_time': 1704837600, - 'charge_state_scheduled_departure_time_minutes': 480, - 'charge_state_supercharger_session_trip_planner': False, - 'charge_state_time_to_full_charge': 0, - 'charge_state_timestamp': 1705707520649, - 'charge_state_trip_charging': False, - 'charge_state_usable_battery_level': 77, - 'charge_state_user_charge_enable_request': None, - 'climate_state_allow_cabin_overheat_protection': True, - 'climate_state_auto_seat_climate_left': True, - 'climate_state_auto_seat_climate_right': True, - 'climate_state_auto_steering_wheel_heat': False, - 'climate_state_battery_heater': False, - 'climate_state_battery_heater_no_power': None, - 'climate_state_cabin_overheat_protection': 'On', - 'climate_state_cabin_overheat_protection_actively_cooling': False, - 'climate_state_climate_keeper_mode': 'keep', - 'climate_state_cop_activation_temperature': 'High', - 'climate_state_defrost_mode': 0, - 'climate_state_driver_temp_setting': 22, - 'climate_state_fan_status': 0, - 'climate_state_hvac_auto_request': 'On', - 'climate_state_inside_temp': 29.8, - 'climate_state_is_auto_conditioning_on': False, - 'climate_state_is_climate_on': True, - 'climate_state_is_front_defroster_on': False, - 'climate_state_is_preconditioning': False, - 'climate_state_is_rear_defroster_on': False, - 'climate_state_left_temp_direction': 251, - 'climate_state_max_avail_temp': 28, - 'climate_state_min_avail_temp': 15, - 'climate_state_outside_temp': 30, - 'climate_state_passenger_temp_setting': 22, - 'climate_state_remote_heater_control_enabled': False, - 'climate_state_right_temp_direction': 251, - 'climate_state_seat_heater_left': 0, - 'climate_state_seat_heater_rear_center': 0, - 'climate_state_seat_heater_rear_left': 0, - 'climate_state_seat_heater_rear_right': 0, - 'climate_state_seat_heater_right': 0, - 'climate_state_side_mirror_heaters': False, - 'climate_state_steering_wheel_heat_level': 0, - 'climate_state_steering_wheel_heater': False, - 'climate_state_supports_fan_only_cabin_overheat_protection': True, - 'climate_state_timestamp': 1705707520649, - 'climate_state_wiper_blade_heater': False, - 'color': None, - 'drive_state_active_route_destination': 'Home', - 'drive_state_active_route_latitude': '**REDACTED**', - 'drive_state_active_route_longitude': '**REDACTED**', - 'drive_state_active_route_miles_to_arrival': 0.039491, - 'drive_state_active_route_minutes_to_arrival': 0.103577, - 'drive_state_active_route_traffic_minutes_delay': 0, - 'drive_state_gps_as_of': 1701129612, - 'drive_state_heading': 185, - 'drive_state_latitude': '**REDACTED**', - 'drive_state_longitude': '**REDACTED**', - 'drive_state_native_latitude': '**REDACTED**', - 'drive_state_native_location_supported': 1, - 'drive_state_native_longitude': '**REDACTED**', - 'drive_state_native_type': 'wgs', - 'drive_state_power': -7, - 'drive_state_shift_state': None, - 'drive_state_speed': None, - 'drive_state_timestamp': 1705707520649, - 'granular_access_hide_private': False, - 'gui_settings_gui_24_hour_time': False, - 'gui_settings_gui_charge_rate_units': 'kW', - 'gui_settings_gui_distance_units': 'km/hr', - 'gui_settings_gui_range_display': 'Rated', - 'gui_settings_gui_temperature_units': 'C', - 'gui_settings_gui_tirepressure_units': 'Psi', - 'gui_settings_show_range_units': False, - 'gui_settings_timestamp': 1705707520649, - 'id': '**REDACTED**', - 'id_s': '**REDACTED**', - 'in_service': False, - 'state': 'online', - 'tokens': '**REDACTED**', - 'user_id': '**REDACTED**', - 'vehicle_config_aux_park_lamps': 'Eu', - 'vehicle_config_badge_version': 1, - 'vehicle_config_can_accept_navigation_requests': True, - 'vehicle_config_can_actuate_trunks': True, - 'vehicle_config_car_special_type': 'base', - 'vehicle_config_car_type': 'model3', - 'vehicle_config_charge_port_type': 'CCS', - 'vehicle_config_cop_user_set_temp_supported': True, - 'vehicle_config_dashcam_clip_save_supported': True, - 'vehicle_config_default_charge_to_max': False, - 'vehicle_config_driver_assist': 'TeslaAP3', - 'vehicle_config_ece_restrictions': False, - 'vehicle_config_efficiency_package': 'M32021', - 'vehicle_config_eu_vehicle': True, - 'vehicle_config_exterior_color': 'DeepBlue', - 'vehicle_config_exterior_trim': 'Black', - 'vehicle_config_exterior_trim_override': '', - 'vehicle_config_has_air_suspension': False, - 'vehicle_config_has_ludicrous_mode': False, - 'vehicle_config_has_seat_cooling': False, - 'vehicle_config_headlamp_type': 'Global', - 'vehicle_config_interior_trim_type': 'White2', - 'vehicle_config_key_version': 2, - 'vehicle_config_motorized_charge_port': True, - 'vehicle_config_paint_color_override': '0,9,25,0.7,0.04', - 'vehicle_config_performance_package': 'Base', - 'vehicle_config_plg': True, - 'vehicle_config_pws': True, - 'vehicle_config_rear_drive_unit': 'PM216MOSFET', - 'vehicle_config_rear_seat_heaters': 1, - 'vehicle_config_rear_seat_type': 0, - 'vehicle_config_rhd': True, - 'vehicle_config_roof_color': 'RoofColorGlass', - 'vehicle_config_seat_type': None, - 'vehicle_config_spoiler_type': 'None', - 'vehicle_config_sun_roof_installed': True, - 'vehicle_config_supports_qr_pairing': False, - 'vehicle_config_third_row_seats': 'None', - 'vehicle_config_timestamp': 1705707520649, - 'vehicle_config_trim_badging': '74d', - 'vehicle_config_use_range_badging': True, - 'vehicle_config_utc_offset': 36000, - 'vehicle_config_webcam_selfie_supported': True, - 'vehicle_config_webcam_supported': True, - 'vehicle_config_wheel_type': 'Pinwheel18CapKit', - 'vehicle_id': '**REDACTED**', - 'vehicle_state_api_version': 71, - 'vehicle_state_autopark_state_v2': 'unavailable', - 'vehicle_state_calendar_supported': True, - 'vehicle_state_car_version': '2023.44.30.8 06f534d46010', - 'vehicle_state_center_display_state': 0, - 'vehicle_state_dashcam_clip_save_available': True, - 'vehicle_state_dashcam_state': 'Recording', - 'vehicle_state_df': 0, - 'vehicle_state_dr': 0, - 'vehicle_state_fd_window': 0, - 'vehicle_state_feature_bitmask': 'fbdffbff,187f', - 'vehicle_state_fp_window': 0, - 'vehicle_state_ft': 0, - 'vehicle_state_is_user_present': False, - 'vehicle_state_locked': False, - 'vehicle_state_media_info_a2dp_source_name': 'Pixel 8 Pro', - 'vehicle_state_media_info_audio_volume': 1.6667, - 'vehicle_state_media_info_audio_volume_increment': 0.333333, - 'vehicle_state_media_info_audio_volume_max': 10.333333, - 'vehicle_state_media_info_media_playback_status': 'Playing', - 'vehicle_state_media_info_now_playing_album': 'Elon Musk', - 'vehicle_state_media_info_now_playing_artist': 'Walter Isaacson', - 'vehicle_state_media_info_now_playing_duration': 651000, - 'vehicle_state_media_info_now_playing_elapsed': 1000, - 'vehicle_state_media_info_now_playing_source': 'Audible', - 'vehicle_state_media_info_now_playing_station': 'Elon Musk', - 'vehicle_state_media_info_now_playing_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', - 'vehicle_state_media_state_remote_control_enabled': True, - 'vehicle_state_notifications_supported': True, - 'vehicle_state_odometer': 6481.019282, - 'vehicle_state_parsed_calendar_supported': True, - 'vehicle_state_pf': 0, - 'vehicle_state_pr': 0, - 'vehicle_state_rd_window': 0, - 'vehicle_state_remote_start': False, - 'vehicle_state_remote_start_enabled': True, - 'vehicle_state_remote_start_supported': True, - 'vehicle_state_rp_window': 0, - 'vehicle_state_rt': 0, - 'vehicle_state_santa_mode': 0, - 'vehicle_state_sentry_mode': False, - 'vehicle_state_sentry_mode_available': True, - 'vehicle_state_service_mode': False, - 'vehicle_state_service_mode_plus': False, - 'vehicle_state_software_update_download_perc': 100, - 'vehicle_state_software_update_expected_duration_sec': 2700, - 'vehicle_state_software_update_install_perc': 1, - 'vehicle_state_software_update_status': 'available', - 'vehicle_state_software_update_version': '2024.12.0.0', - 'vehicle_state_speed_limit_mode_active': False, - 'vehicle_state_speed_limit_mode_current_limit_mph': 69, - 'vehicle_state_speed_limit_mode_max_limit_mph': 120, - 'vehicle_state_speed_limit_mode_min_limit_mph': 50, - 'vehicle_state_speed_limit_mode_pin_code_set': True, - 'vehicle_state_sun_roof_state': 'open', - 'vehicle_state_timestamp': 1705707520649, - 'vehicle_state_tpms_hard_warning_fl': False, - 'vehicle_state_tpms_hard_warning_fr': False, - 'vehicle_state_tpms_hard_warning_rl': False, - 'vehicle_state_tpms_hard_warning_rr': False, - 'vehicle_state_tpms_last_seen_pressure_time_fl': 1705700812, - 'vehicle_state_tpms_last_seen_pressure_time_fr': 1705700793, - 'vehicle_state_tpms_last_seen_pressure_time_rl': 1705700794, - 'vehicle_state_tpms_last_seen_pressure_time_rr': 1705700823, - 'vehicle_state_tpms_pressure_fl': 2.775, - 'vehicle_state_tpms_pressure_fr': 2.8, - 'vehicle_state_tpms_pressure_rl': 2.775, - 'vehicle_state_tpms_pressure_rr': 2.775, - 'vehicle_state_tpms_rcp_front_value': 2.9, - 'vehicle_state_tpms_rcp_rear_value': 2.9, - 'vehicle_state_tpms_soft_warning_fl': False, - 'vehicle_state_tpms_soft_warning_fr': False, - 'vehicle_state_tpms_soft_warning_rl': False, - 'vehicle_state_tpms_soft_warning_rr': False, - 'vehicle_state_valet_mode': False, - 'vehicle_state_valet_pin_needed': False, - 'vehicle_state_vehicle_name': 'Test', - 'vehicle_state_vehicle_self_test_progress': 0, - 'vehicle_state_vehicle_self_test_requested': False, - 'vehicle_state_vehicle_state_sun_roof_percent_open': 20, - 'vehicle_state_webcam_available': True, - 'vin': '**REDACTED**', - }), - }), - ]), - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_init.ambr b/tests/components/tesla_fleet/snapshots/test_init.ambr deleted file mode 100644 index e9828db9f1b..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_init.ambr +++ /dev/null @@ -1,129 +0,0 @@ -# serializer version: 1 -# name: test_devices[{('tesla_fleet', '123456')}] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'tesla_fleet', - '123456', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Tesla', - 'model': 'Powerwall 2, Tesla Backup Gateway 2', - 'model_id': None, - 'name': 'Energy Site', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '123456', - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- -# name: test_devices[{('tesla_fleet', 'LRWXF7EK4KC700000')}] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'tesla_fleet', - 'LRWXF7EK4KC700000', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Tesla', - 'model': 'Model X', - 'model_id': None, - 'name': 'Test', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': 'LRWXF7EK4KC700000', - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- -# name: test_devices[{('tesla_fleet', 'abd-123')}] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'tesla_fleet', - 'abd-123', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Tesla', - 'model': 'Gen 3 Wall Connector', - 'model_id': None, - 'name': 'Wall Connector', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '123', - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': , - }) -# --- -# name: test_devices[{('tesla_fleet', 'bcd-234')}] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'tesla_fleet', - 'bcd-234', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Tesla', - 'model': 'Gen 3 Wall Connector', - 'model_id': None, - 'name': 'Wall Connector', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '234', - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': , - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_lock.ambr b/tests/components/tesla_fleet/snapshots/test_lock.ambr deleted file mode 100644 index 3384bb0eb97..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_lock.ambr +++ /dev/null @@ -1,95 +0,0 @@ -# serializer version: 1 -# name: test_lock[lock.test_charge_cable_lock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'lock', - 'entity_category': None, - 'entity_id': 'lock.test_charge_cable_lock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Charge cable lock', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charge_port_latch', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_latch', - 'unit_of_measurement': None, - }) -# --- -# name: test_lock[lock.test_charge_cable_lock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Charge cable lock', - 'supported_features': , - }), - 'context': , - 'entity_id': 'lock.test_charge_cable_lock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'locked', - }) -# --- -# name: test_lock[lock.test_lock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'lock', - 'entity_category': None, - 'entity_id': 'lock.test_lock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lock', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_locked', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_locked', - 'unit_of_measurement': None, - }) -# --- -# name: test_lock[lock.test_lock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Lock', - 'supported_features': , - }), - 'context': , - 'entity_id': 'lock.test_lock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unlocked', - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_media_player.ambr b/tests/components/tesla_fleet/snapshots/test_media_player.ambr deleted file mode 100644 index cc3018364a5..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_media_player.ambr +++ /dev/null @@ -1,136 +0,0 @@ -# serializer version: 1 -# name: test_media_player[media_player.test_media_player-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'media_player', - 'entity_category': None, - 'entity_id': 'media_player.test_media_player', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Media player', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'media', - 'unique_id': 'LRWXF7EK4KC700000-media', - 'unit_of_measurement': None, - }) -# --- -# name: test_media_player[media_player.test_media_player-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'speaker', - 'friendly_name': 'Test Media player', - 'media_album_name': 'Elon Musk', - 'media_artist': 'Walter Isaacson', - 'media_duration': 651.0, - 'media_playlist': 'Elon Musk', - 'media_position': 1.0, - 'media_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', - 'source': 'Audible', - 'supported_features': , - 'volume_level': 0.16129355359011466, - }), - 'context': , - 'entity_id': 'media_player.test_media_player', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_media_player_alt[media_player.test_media_player-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'speaker', - 'friendly_name': 'Test Media player', - 'media_album_name': '', - 'media_artist': '', - 'media_playlist': '', - 'media_title': '', - 'source': 'Spotify', - 'supported_features': , - 'volume_level': 0.25806775026025003, - }), - 'context': , - 'entity_id': 'media_player.test_media_player', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- -# name: test_media_player_noscope[media_player.test_media_player-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'media_player', - 'entity_category': None, - 'entity_id': 'media_player.test_media_player', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Media player', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'media', - 'unique_id': 'LRWXF7EK4KC700000-media', - 'unit_of_measurement': None, - }) -# --- -# name: test_media_player_noscope[media_player.test_media_player-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'speaker', - 'friendly_name': 'Test Media player', - 'media_album_name': 'Elon Musk', - 'media_artist': 'Walter Isaacson', - 'media_duration': 651.0, - 'media_playlist': 'Elon Musk', - 'media_position': 1.0, - 'media_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', - 'source': 'Audible', - 'supported_features': , - 'volume_level': 0.16129355359011466, - }), - 'context': , - 'entity_id': 'media_player.test_media_player', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_number.ambr b/tests/components/tesla_fleet/snapshots/test_number.ambr deleted file mode 100644 index 00dd67015fe..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_number.ambr +++ /dev/null @@ -1,231 +0,0 @@ -# serializer version: 1 -# name: test_number[number.energy_site_backup_reserve-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.energy_site_backup_reserve', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-alert', - 'original_name': 'Backup reserve', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'backup_reserve_percent', - 'unique_id': '123456-backup_reserve_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_number[number.energy_site_backup_reserve-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Energy Site Backup reserve', - 'icon': 'mdi:battery-alert', - 'max': 100, - 'min': 0, - 'mode': , - 'step': 1, - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'number.energy_site_backup_reserve', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_number[number.energy_site_off_grid_reserve-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.energy_site_off_grid_reserve', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-unknown', - 'original_name': 'Off grid reserve', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'off_grid_vehicle_charging_reserve_percent', - 'unique_id': '123456-off_grid_vehicle_charging_reserve_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_number[number.energy_site_off_grid_reserve-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Energy Site Off grid reserve', - 'icon': 'mdi:battery-unknown', - 'max': 100, - 'min': 0, - 'mode': , - 'step': 1, - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'number.energy_site_off_grid_reserve', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_number[number.test_charge_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 16, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.test_charge_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charge current', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charge_current_request', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_current_request', - 'unit_of_measurement': , - }) -# --- -# name: test_number[number.test_charge_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Test Charge current', - 'max': 16, - 'min': 0, - 'mode': , - 'step': 1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.test_charge_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '16', - }) -# --- -# name: test_number[number.test_charge_limit-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100, - 'min': 50, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.test_charge_limit', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charge limit', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charge_limit_soc', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_limit_soc', - 'unit_of_measurement': '%', - }) -# --- -# name: test_number[number.test_charge_limit-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Test Charge limit', - 'max': 100, - 'min': 50, - 'mode': , - 'step': 1, - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'number.test_charge_limit', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80', - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_select.ambr b/tests/components/tesla_fleet/snapshots/test_select.ambr deleted file mode 100644 index f29ce841113..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_select.ambr +++ /dev/null @@ -1,585 +0,0 @@ -# serializer version: 1 -# name: test_select[select.energy_site_allow_export-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.energy_site_allow_export', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow export', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'components_customer_preferred_export_rule', - 'unique_id': '123456-components_customer_preferred_export_rule', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.energy_site_allow_export-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Allow export', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'select.energy_site_allow_export', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'pv_only', - }) -# --- -# name: test_select[select.energy_site_operation_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.energy_site_operation_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Operation mode', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'default_real_mode', - 'unique_id': '123456-default_real_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.energy_site_operation_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Operation mode', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'select.energy_site_operation_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'self_consumption', - }) -# --- -# name: test_select[select.test_seat_heater_front_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_front_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater front left', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_left', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_front_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater front left', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_front_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_select[select.test_seat_heater_front_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_front_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater front right', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_right', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_front_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater front right', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_front_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_select[select.test_seat_heater_rear_center-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_rear_center', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater rear center', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_rear_center', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_center', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_rear_center-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater rear center', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_rear_center', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_select[select.test_seat_heater_rear_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_rear_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater rear left', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_rear_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_left', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_rear_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater rear left', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_rear_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_select[select.test_seat_heater_rear_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_rear_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater rear right', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_rear_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_rear_right', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_rear_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater rear right', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_rear_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_select[select.test_seat_heater_third_row_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_third_row_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater third row left', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_third_row_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_third_row_left', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_third_row_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater third row left', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_third_row_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_select[select.test_seat_heater_third_row_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_heater_third_row_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat heater third row right', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_heater_third_row_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_seat_heater_third_row_right', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_heater_third_row_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat heater third row right', - 'options': list([ - 'off', - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_heater_third_row_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_select[select.test_steering_wheel_heater-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'off', - 'low', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_steering_wheel_heater', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Steering wheel heater', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_steering_wheel_heat_level', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_steering_wheel_heat_level', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_steering_wheel_heater-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Steering wheel heater', - 'options': list([ - 'off', - 'low', - 'high', - ]), - }), - 'context': , - 'entity_id': 'select.test_steering_wheel_heater', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_sensor.ambr b/tests/components/tesla_fleet/snapshots/test_sensor.ambr deleted file mode 100644 index 2c3780749ca..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_sensor.ambr +++ /dev/null @@ -1,3446 +0,0 @@ -# serializer version: 1 -# name: test_sensors[sensor.energy_site_battery_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_battery_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery power', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_power', - 'unique_id': '123456-battery_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_battery_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Battery power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5.06', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_power-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Battery power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5.06', - }) -# --- -# name: test_sensors[sensor.energy_site_energy_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.energy_site_energy_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy left', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy_left', - 'unique_id': '123456-energy_left', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_energy_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy_storage', - 'friendly_name': 'Energy Site Energy left', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_energy_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '38.8964736842105', - }) -# --- -# name: test_sensors[sensor.energy_site_energy_left-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy_storage', - 'friendly_name': 'Energy Site Energy left', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_energy_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '38.8964736842105', - }) -# --- -# name: test_sensors[sensor.energy_site_generator_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_generator_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Generator power', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'generator_power', - 'unique_id': '123456-generator_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_generator_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Generator power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_generator_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_generator_power-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Generator power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_generator_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_grid_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Grid power', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_power', - 'unique_id': '123456-grid_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_grid_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Grid power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_power-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Grid power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_services_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_grid_services_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Grid services power', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_services_power', - 'unique_id': '123456-grid_services_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_grid_services_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Grid services power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_services_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_services_power-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Grid services power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_services_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'island_status_unknown', - 'on_grid', - 'off_grid', - 'off_grid_unintentional', - 'off_grid_intentional', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_grid_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Grid Status', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'island_status', - 'unique_id': '123456-island_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.energy_site_grid_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Energy Site Grid Status', - 'options': list([ - 'island_status_unknown', - 'on_grid', - 'off_grid', - 'off_grid_unintentional', - 'off_grid_intentional', - ]), - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on_grid', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_status-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Energy Site Grid Status', - 'options': list([ - 'island_status_unknown', - 'on_grid', - 'off_grid', - 'off_grid_unintentional', - 'off_grid_intentional', - ]), - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on_grid', - }) -# --- -# name: test_sensors[sensor.energy_site_load_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_load_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Load power', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'load_power', - 'unique_id': '123456-load_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_load_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Load power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_load_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6.245', - }) -# --- -# name: test_sensors[sensor.energy_site_load_power-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Load power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_load_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6.245', - }) -# --- -# name: test_sensors[sensor.energy_site_percentage_charged-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_percentage_charged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Percentage charged', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'percentage_charged', - 'unique_id': '123456-percentage_charged', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.energy_site_percentage_charged-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Energy Site Percentage charged', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.energy_site_percentage_charged', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '95.5053740373966', - }) -# --- -# name: test_sensors[sensor.energy_site_percentage_charged-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Energy Site Percentage charged', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.energy_site_percentage_charged', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '95.5053740373966', - }) -# --- -# name: test_sensors[sensor.energy_site_solar_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_solar_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Solar power', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'solar_power', - 'unique_id': '123456-solar_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_solar_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Solar power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_solar_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.185', - }) -# --- -# name: test_sensors[sensor.energy_site_solar_power-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Energy Site Solar power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_solar_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.185', - }) -# --- -# name: test_sensors[sensor.energy_site_total_pack_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.energy_site_total_pack_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Total pack energy', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_pack_energy', - 'unique_id': '123456-total_pack_energy', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_total_pack_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy_storage', - 'friendly_name': 'Energy Site Total pack energy', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_total_pack_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.727', - }) -# --- -# name: test_sensors[sensor.energy_site_total_pack_energy-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy_storage', - 'friendly_name': 'Energy Site Total pack energy', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_total_pack_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.727', - }) -# --- -# name: test_sensors[sensor.energy_site_version-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_version', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'version', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'version', - 'unique_id': '123456-version', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.energy_site_version-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site version', - }), - 'context': , - 'entity_id': 'sensor.energy_site_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '23.44.0 eb113390', - }) -# --- -# name: test_sensors[sensor.energy_site_version-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site version', - }), - 'context': , - 'entity_id': 'sensor.energy_site_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '23.44.0 eb113390', - }) -# --- -# name: test_sensors[sensor.energy_site_vpp_backup_reserve-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.energy_site_vpp_backup_reserve', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VPP backup reserve', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vpp_backup_reserve_percent', - 'unique_id': '123456-vpp_backup_reserve_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.energy_site_vpp_backup_reserve-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Energy Site VPP backup reserve', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.energy_site_vpp_backup_reserve', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.energy_site_vpp_backup_reserve-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Energy Site VPP backup reserve', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.energy_site_vpp_backup_reserve', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery level', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_battery_level', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_level', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.test_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Test Battery level', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '77', - }) -# --- -# name: test_sensors[sensor.test_battery_level-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Test Battery level', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '77', - }) -# --- -# name: test_sensors[sensor.test_battery_range-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_battery_range', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery range', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_battery_range', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_range', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_battery_range-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Test Battery range', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_battery_range', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '429.48563328', - }) -# --- -# name: test_sensors[sensor.test_battery_range-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Test Battery range', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_battery_range', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '429.48563328', - }) -# --- -# name: test_sensors[sensor.test_charge_cable-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_charge_cable', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Charge cable', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_conn_charge_cable', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_conn_charge_cable', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_charge_cable-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Charge cable', - }), - 'context': , - 'entity_id': 'sensor.test_charge_cable', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'IEC', - }) -# --- -# name: test_sensors[sensor.test_charge_cable-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Charge cable', - }), - 'context': , - 'entity_id': 'sensor.test_charge_cable', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'IEC', - }) -# --- -# name: test_sensors[sensor.test_charge_energy_added-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_charge_energy_added', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charge energy added', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charge_energy_added', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_energy_added', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_charge_energy_added-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Test Charge energy added', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_charge_energy_added', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_charge_energy_added-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Test Charge energy added', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_charge_energy_added', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_charge_rate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_charge_rate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charge rate', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charge_rate', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_rate', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_charge_rate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'speed', - 'friendly_name': 'Test Charge rate', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_charge_rate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_charge_rate-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'speed', - 'friendly_name': 'Test Charge rate', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_charge_rate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_charger_current-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_charger_current', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charger current', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charger_actual_current', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_actual_current', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_charger_current-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Test Charger current', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_charger_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_charger_current-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'current', - 'friendly_name': 'Test Charger current', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_charger_current', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_charger_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_charger_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charger power', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charger_power', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_charger_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Test Charger power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_charger_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_charger_power-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Test Charger power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_charger_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_charger_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_charger_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charger voltage', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charger_voltage', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_voltage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_charger_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Test Charger voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_charger_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensors[sensor.test_charger_voltage-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'Test Charger voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_charger_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensors[sensor.test_charging-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'starting', - 'charging', - 'stopped', - 'complete', - 'disconnected', - 'no_power', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_charging', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charging', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_charging_state', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charging_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_charging-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Charging', - 'options': list([ - 'starting', - 'charging', - 'stopped', - 'complete', - 'disconnected', - 'no_power', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'stopped', - }) -# --- -# name: test_sensors[sensor.test_charging-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Charging', - 'options': list([ - 'starting', - 'charging', - 'stopped', - 'complete', - 'disconnected', - 'no_power', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'stopped', - }) -# --- -# name: test_sensors[sensor.test_distance_to_arrival-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_distance_to_arrival', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Distance to arrival', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'drive_state_active_route_miles_to_arrival', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_miles_to_arrival', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_distance_to_arrival-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Test Distance to arrival', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_distance_to_arrival', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.063555', - }) -# --- -# name: test_sensors[sensor.test_distance_to_arrival-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Test Distance to arrival', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_distance_to_arrival', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_driver_temperature_setting-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_driver_temperature_setting', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Driver temperature setting', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_driver_temp_setting', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_driver_temp_setting', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_driver_temperature_setting-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Driver temperature setting', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_driver_temperature_setting', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '22', - }) -# --- -# name: test_sensors[sensor.test_driver_temperature_setting-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Driver temperature setting', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_driver_temperature_setting', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '22', - }) -# --- -# name: test_sensors[sensor.test_estimate_battery_range-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_estimate_battery_range', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Estimate battery range', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_est_battery_range', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_est_battery_range', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_estimate_battery_range-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Test Estimate battery range', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_estimate_battery_range', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '442.63397376', - }) -# --- -# name: test_sensors[sensor.test_estimate_battery_range-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Test Estimate battery range', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_estimate_battery_range', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '442.63397376', - }) -# --- -# name: test_sensors[sensor.test_fast_charger_type-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_fast_charger_type', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Fast charger type', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_fast_charger_type', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_fast_charger_type', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_fast_charger_type-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Fast charger type', - }), - 'context': , - 'entity_id': 'sensor.test_fast_charger_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'ACSingleWireCAN', - }) -# --- -# name: test_sensors[sensor.test_fast_charger_type-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Fast charger type', - }), - 'context': , - 'entity_id': 'sensor.test_fast_charger_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'ACSingleWireCAN', - }) -# --- -# name: test_sensors[sensor.test_ideal_battery_range-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_ideal_battery_range', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ideal battery range', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_ideal_battery_range', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_ideal_battery_range', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_ideal_battery_range-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Test Ideal battery range', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_ideal_battery_range', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '429.48563328', - }) -# --- -# name: test_sensors[sensor.test_ideal_battery_range-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Test Ideal battery range', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_ideal_battery_range', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '429.48563328', - }) -# --- -# name: test_sensors[sensor.test_inside_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_inside_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Inside temperature', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_inside_temp', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_inside_temp', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_inside_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Inside temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_inside_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '29.8', - }) -# --- -# name: test_sensors[sensor.test_inside_temperature-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Inside temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_inside_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '29.8', - }) -# --- -# name: test_sensors[sensor.test_odometer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_odometer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Odometer', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_odometer', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_odometer', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_odometer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Test Odometer', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_odometer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10430.189495371', - }) -# --- -# name: test_sensors[sensor.test_odometer-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'distance', - 'friendly_name': 'Test Odometer', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_odometer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10430.189495371', - }) -# --- -# name: test_sensors[sensor.test_outside_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_outside_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outside temperature', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_outside_temp', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_outside_temp', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_outside_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Outside temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_outside_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30', - }) -# --- -# name: test_sensors[sensor.test_outside_temperature-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Outside temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_outside_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30', - }) -# --- -# name: test_sensors[sensor.test_passenger_temperature_setting-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_passenger_temperature_setting', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Passenger temperature setting', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_passenger_temp_setting', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_passenger_temp_setting', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_passenger_temperature_setting-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Passenger temperature setting', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_passenger_temperature_setting', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '22', - }) -# --- -# name: test_sensors[sensor.test_passenger_temperature_setting-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Passenger temperature setting', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_passenger_temperature_setting', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '22', - }) -# --- -# name: test_sensors[sensor.test_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'drive_state_power', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Test Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '-7', - }) -# --- -# name: test_sensors[sensor.test_power-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Test Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '-7', - }) -# --- -# name: test_sensors[sensor.test_shift_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'p', - 'd', - 'r', - 'n', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_shift_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Shift state', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'drive_state_shift_state', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_shift_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_shift_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Shift state', - 'options': list([ - 'p', - 'd', - 'r', - 'n', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_shift_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'p', - }) -# --- -# name: test_sensors[sensor.test_shift_state-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Shift state', - 'options': list([ - 'p', - 'd', - 'r', - 'n', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_shift_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'p', - }) -# --- -# name: test_sensors[sensor.test_speed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_speed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Speed', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'drive_state_speed', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_speed', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_speed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'speed', - 'friendly_name': 'Test Speed', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_speed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_speed-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'speed', - 'friendly_name': 'Test Speed', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_speed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_state_of_charge_at_arrival-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_state_of_charge_at_arrival', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'State of charge at arrival', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'drive_state_active_route_energy_at_arrival', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_energy_at_arrival', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.test_state_of_charge_at_arrival-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Test State of charge at arrival', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_state_of_charge_at_arrival', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors[sensor.test_state_of_charge_at_arrival-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Test State of charge at arrival', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_state_of_charge_at_arrival', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors[sensor.test_time_to_arrival-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_time_to_arrival', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Time to arrival', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'drive_state_active_route_minutes_to_arrival', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_minutes_to_arrival', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_time_to_arrival-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Test Time to arrival', - }), - 'context': , - 'entity_id': 'sensor.test_time_to_arrival', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-01-01T00:00:06+00:00', - }) -# --- -# name: test_sensors[sensor.test_time_to_arrival-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Test Time to arrival', - }), - 'context': , - 'entity_id': 'sensor.test_time_to_arrival', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_sensors[sensor.test_time_to_full_charge-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_time_to_full_charge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Time to full charge', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_minutes_to_full_charge', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_minutes_to_full_charge', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.test_time_to_full_charge-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Test Time to full charge', - }), - 'context': , - 'entity_id': 'sensor.test_time_to_full_charge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_sensors[sensor.test_time_to_full_charge-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Test Time to full charge', - }), - 'context': , - 'entity_id': 'sensor.test_time_to_full_charge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_front_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_tire_pressure_front_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tire pressure front left', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_tpms_pressure_fl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_fl', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_front_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Test Tire pressure front left', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_tire_pressure_front_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.2479739314961', - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_front_left-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Test Tire pressure front left', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_tire_pressure_front_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.2479739314961', - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_front_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_tire_pressure_front_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tire pressure front right', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_tpms_pressure_fr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_fr', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_front_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Test Tire pressure front right', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_tire_pressure_front_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.6105682912393', - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_front_right-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Test Tire pressure front right', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_tire_pressure_front_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.6105682912393', - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_rear_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_tire_pressure_rear_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tire pressure rear left', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_tpms_pressure_rl', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_rl', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_rear_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Test Tire pressure rear left', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_tire_pressure_rear_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.2479739314961', - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_rear_left-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Test Tire pressure rear left', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_tire_pressure_rear_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.2479739314961', - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_rear_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_tire_pressure_rear_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tire pressure rear right', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_tpms_pressure_rr', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_rr', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_rear_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Test Tire pressure rear right', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_tire_pressure_rear_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.2479739314961', - }) -# --- -# name: test_sensors[sensor.test_tire_pressure_rear_right-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'pressure', - 'friendly_name': 'Test Tire pressure rear right', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_tire_pressure_rear_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40.2479739314961', - }) -# --- -# name: test_sensors[sensor.test_traffic_delay-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_traffic_delay', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Traffic delay', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'drive_state_active_route_traffic_minutes_delay', - 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_traffic_minutes_delay', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.test_traffic_delay-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'Test Traffic delay', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_traffic_delay', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_traffic_delay-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'Test Traffic delay', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_traffic_delay', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[sensor.test_usable_battery_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_usable_battery_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Usable battery level', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_usable_battery_level', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_usable_battery_level', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensors[sensor.test_usable_battery_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Test Usable battery level', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_usable_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '77', - }) -# --- -# name: test_sensors[sensor.test_usable_battery_level-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Test Usable battery level', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_usable_battery_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '77', - }) -# --- -# name: test_sensors[sensor.wall_connector_fault_state_code-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.wall_connector_fault_state_code', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Fault state code', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wall_connector_fault_state', - 'unique_id': '123456-abd-123-wall_connector_fault_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.wall_connector_fault_state_code-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector Fault state code', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_fault_state_code', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensors[sensor.wall_connector_fault_state_code-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector Fault state code', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_fault_state_code', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensors[sensor.wall_connector_fault_state_code_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.wall_connector_fault_state_code_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Fault state code', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wall_connector_fault_state', - 'unique_id': '123456-bcd-234-wall_connector_fault_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.wall_connector_fault_state_code_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector Fault state code', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_fault_state_code_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensors[sensor.wall_connector_fault_state_code_2-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector Fault state code', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_fault_state_code_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensors[sensor.wall_connector_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.wall_connector_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wall_connector_power', - 'unique_id': '123456-abd-123-wall_connector_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.wall_connector_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Wall Connector Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.wall_connector_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.wall_connector_power-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Wall Connector Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.wall_connector_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.wall_connector_power_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.wall_connector_power_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wall_connector_power', - 'unique_id': '123456-bcd-234-wall_connector_power', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.wall_connector_power_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Wall Connector Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.wall_connector_power_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.wall_connector_power_2-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Wall Connector Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.wall_connector_power_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.wall_connector_state_code-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.wall_connector_state_code', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'State code', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wall_connector_state', - 'unique_id': '123456-abd-123-wall_connector_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.wall_connector_state_code-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector State code', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_state_code', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensors[sensor.wall_connector_state_code-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector State code', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_state_code', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensors[sensor.wall_connector_state_code_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.wall_connector_state_code_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'State code', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wall_connector_state', - 'unique_id': '123456-bcd-234-wall_connector_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.wall_connector_state_code_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector State code', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_state_code_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensors[sensor.wall_connector_state_code_2-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector State code', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_state_code_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_sensors[sensor.wall_connector_vehicle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.wall_connector_vehicle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Vehicle', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vin', - 'unique_id': '123456-abd-123-vin', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.wall_connector_vehicle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector Vehicle', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_vehicle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors[sensor.wall_connector_vehicle-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector Vehicle', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_vehicle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors[sensor.wall_connector_vehicle_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.wall_connector_vehicle_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Vehicle', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vin', - 'unique_id': '123456-bcd-234-vin', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.wall_connector_vehicle_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector Vehicle', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_vehicle_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors[sensor.wall_connector_vehicle_2-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Wall Connector Vehicle', - }), - 'context': , - 'entity_id': 'sensor.wall_connector_vehicle_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_switch.ambr b/tests/components/tesla_fleet/snapshots/test_switch.ambr deleted file mode 100644 index 2d69a7d314a..00000000000 --- a/tests/components/tesla_fleet/snapshots/test_switch.ambr +++ /dev/null @@ -1,489 +0,0 @@ -# serializer version: 1 -# name: test_switch[switch.energy_site_allow_charging_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.energy_site_allow_charging_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Allow charging from grid', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'components_disallow_charge_from_grid_with_solar_installed', - 'unique_id': '123456-components_disallow_charge_from_grid_with_solar_installed', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[switch.energy_site_allow_charging_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Energy Site Allow charging from grid', - }), - 'context': , - 'entity_id': 'switch.energy_site_allow_charging_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch[switch.energy_site_storm_watch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.energy_site_storm_watch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Storm watch', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'user_settings_storm_mode_enabled', - 'unique_id': '123456-user_settings_storm_mode_enabled', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[switch.energy_site_storm_watch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Energy Site Storm watch', - }), - 'context': , - 'entity_id': 'switch.energy_site_storm_watch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[switch.test_auto_seat_climate_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_auto_seat_climate_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Auto seat climate left', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_auto_seat_climate_left', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_seat_climate_left', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[switch.test_auto_seat_climate_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Auto seat climate left', - }), - 'context': , - 'entity_id': 'switch.test_auto_seat_climate_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[switch.test_auto_seat_climate_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_auto_seat_climate_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Auto seat climate right', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_auto_seat_climate_right', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_seat_climate_right', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[switch.test_auto_seat_climate_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Auto seat climate right', - }), - 'context': , - 'entity_id': 'switch.test_auto_seat_climate_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[switch.test_auto_steering_wheel_heater-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_auto_steering_wheel_heater', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Auto steering wheel heater', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_auto_steering_wheel_heat', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_auto_steering_wheel_heat', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[switch.test_auto_steering_wheel_heater-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Auto steering wheel heater', - }), - 'context': , - 'entity_id': 'switch.test_auto_steering_wheel_heater', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch[switch.test_charge-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_charge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Charge', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_user_charge_enable_request', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_user_charge_enable_request', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[switch.test_charge-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Charge', - }), - 'context': , - 'entity_id': 'switch.test_charge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[switch.test_defrost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_defrost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Defrost', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_defrost_mode', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_defrost_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[switch.test_defrost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Defrost', - }), - 'context': , - 'entity_id': 'switch.test_defrost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch[switch.test_sentry_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.test_sentry_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sentry mode', - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_sentry_mode', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sentry_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[switch.test_sentry_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Sentry mode', - }), - 'context': , - 'entity_id': 'switch.test_sentry_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch_alt[switch.energy_site_allow_charging_from_grid-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Energy Site Allow charging from grid', - }), - 'context': , - 'entity_id': 'switch.energy_site_allow_charging_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch_alt[switch.energy_site_storm_watch-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Energy Site Storm watch', - }), - 'context': , - 'entity_id': 'switch.energy_site_storm_watch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_alt[switch.test_auto_seat_climate_left-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Auto seat climate left', - }), - 'context': , - 'entity_id': 'switch.test_auto_seat_climate_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch_alt[switch.test_auto_seat_climate_right-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Auto seat climate right', - }), - 'context': , - 'entity_id': 'switch.test_auto_seat_climate_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch_alt[switch.test_auto_steering_wheel_heater-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Auto steering wheel heater', - }), - 'context': , - 'entity_id': 'switch.test_auto_steering_wheel_heater', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch_alt[switch.test_charge-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Charge', - }), - 'context': , - 'entity_id': 'switch.test_charge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch_alt[switch.test_defrost-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Defrost', - }), - 'context': , - 'entity_id': 'switch.test_defrost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch_alt[switch.test_sentry_mode-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Test Sentry mode', - }), - 'context': , - 'entity_id': 'switch.test_sentry_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/tesla_fleet/test_binary_sensors.py b/tests/components/tesla_fleet/test_binary_sensors.py deleted file mode 100644 index a759e5ced70..00000000000 --- a/tests/components/tesla_fleet/test_binary_sensors.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Test the Tesla Fleet binary sensor platform.""" - -from unittest.mock import AsyncMock - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline - -from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL -from homeassistant.const import STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, assert_entities_alt, setup_platform -from .const import VEHICLE_DATA_ALT - -from tests.common import MockConfigEntry, async_fire_time_changed - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_binary_sensor( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the binary sensor entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.BINARY_SENSOR]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_binary_sensor_refresh( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, - freezer: FrozenDateTimeFactory, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the binary sensor entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.BINARY_SENSOR]) - - # Refresh - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_binary_sensor_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the binary sensor entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, normal_config_entry, [Platform.BINARY_SENSOR]) - state = hass.states.get("binary_sensor.test_status") - assert state.state == STATE_UNKNOWN diff --git a/tests/components/tesla_fleet/test_button.py b/tests/components/tesla_fleet/test_button.py deleted file mode 100644 index ef1cfd90357..00000000000 --- a/tests/components/tesla_fleet/test_button.py +++ /dev/null @@ -1,99 +0,0 @@ -"""Test the Tesla Fleet button platform.""" - -from copy import deepcopy -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import NotOnWhitelistFault - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, setup_platform -from .const import COMMAND_OK - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_button( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - normal_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Tests that the button entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: ["button.test_wake"]}, - blocking=True, - ) - - -@pytest.mark.parametrize( - ("name", "func"), - [ - ("flash_lights", "flash_lights"), - ("honk_horn", "honk_horn"), - ("keyless_driving", "remote_start_drive"), - ("play_fart", "remote_boombox"), - ("homelink", "trigger_homelink"), - ], -) -async def test_press( - hass: HomeAssistant, normal_config_entry: MockConfigEntry, name: str, func: str -) -> None: - """Test pressing the API buttons.""" - await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) - - with patch( - f"homeassistant.components.tesla_fleet.VehicleSpecific.{func}", - return_value=COMMAND_OK, - ) as command: - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: [f"button.test_{name}"]}, - blocking=True, - ) - command.assert_called_once() - - -async def test_press_signing_error( - hass: HomeAssistant, normal_config_entry: MockConfigEntry, mock_products: AsyncMock -) -> None: - """Test pressing a button with a signing error.""" - # Enable Signing - new_product = deepcopy(mock_products.return_value) - new_product["response"][0]["command_signing"] = "required" - mock_products.return_value = new_product - - with ( - patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), - ): - await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) - - with ( - patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), - patch( - "homeassistant.components.tesla_fleet.VehicleSigned.flash_lights", - side_effect=NotOnWhitelistFault, - ), - pytest.raises(HomeAssistantError) as error, - ): - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: ["button.test_flash_lights"]}, - blocking=True, - ) - assert error.from_exception(NotOnWhitelistFault) diff --git a/tests/components/tesla_fleet/test_climate.py b/tests/components/tesla_fleet/test_climate.py deleted file mode 100644 index b8cb7f1269b..00000000000 --- a/tests/components/tesla_fleet/test_climate.py +++ /dev/null @@ -1,451 +0,0 @@ -"""Test the Tesla Fleet climate platform.""" - -from unittest.mock import AsyncMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import InvalidCommand, VehicleOffline - -from homeassistant.components.climate import ( - ATTR_HVAC_MODE, - ATTR_PRESET_MODE, - ATTR_TARGET_TEMP_HIGH, - ATTR_TARGET_TEMP_LOW, - ATTR_TEMPERATURE, - DOMAIN as CLIMATE_DOMAIN, - SERVICE_SET_HVAC_MODE, - SERVICE_SET_PRESET_MODE, - SERVICE_SET_TEMPERATURE, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - HVACMode, -) -from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, setup_platform -from .const import ( - COMMAND_ERRORS, - COMMAND_IGNORED_REASON, - VEHICLE_ASLEEP, - VEHICLE_DATA_ALT, - VEHICLE_ONLINE, -) - -from tests.common import MockConfigEntry, async_fire_time_changed - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_climate( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the climate entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_climate_services( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, - mock_request: AsyncMock, -) -> None: - """Tests that the climate services work.""" - - await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) - entity_id = "climate.test_climate" - - # Turn On and Set Temp - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: [entity_id], - ATTR_TEMPERATURE: 20, - ATTR_HVAC_MODE: HVACMode.HEAT_COOL, - }, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_TEMPERATURE] == 20 - assert state.state == HVACMode.HEAT_COOL - - # Set Temp - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: [entity_id], - ATTR_TEMPERATURE: 21, - }, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_TEMPERATURE] == 21 - - # Set Preset - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: [entity_id], ATTR_PRESET_MODE: "keep"}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == "keep" - - # Set Preset - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: [entity_id], ATTR_PRESET_MODE: "off"}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == "off" - - # Turn Off - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVACMode.OFF}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == HVACMode.OFF - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_climate_overheat_protection_services( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, - mock_request: AsyncMock, -) -> None: - """Tests that the climate overheat protection services work.""" - - await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) - entity_id = "climate.test_cabin_overheat_protection" - - # Turn On and Set Low - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: [entity_id], - ATTR_TEMPERATURE: 30, - ATTR_HVAC_MODE: HVACMode.FAN_ONLY, - }, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_TEMPERATURE] == 30 - assert state.state == HVACMode.FAN_ONLY - - # Set Temp Medium - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: [entity_id], - ATTR_TEMPERATURE: 35, - }, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_TEMPERATURE] == 35 - - # Set Temp High - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: [entity_id], - ATTR_TEMPERATURE: 40, - }, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_TEMPERATURE] == 40 - - # Turn Off - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == HVACMode.OFF - - # Turn On - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == HVACMode.COOL - - # Call set temp with invalid temperature - with pytest.raises( - ServiceValidationError, - match="Cabin overheat protection does not support that temperature", - ): - # Invalid Temp - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 34}, - blocking=True, - ) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_climate_alt( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the climate entity is correct.""" - - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_climate_offline( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the climate entity is correct.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_invalid_error( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests service error is handled.""" - - await setup_platform(hass, normal_config_entry, platforms=[Platform.CLIMATE]) - entity_id = "climate.test_climate" - - with ( - patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", - side_effect=InvalidCommand, - ) as mock_on, - pytest.raises( - HomeAssistantError, - match="Command failed: The data request or command is unknown.", - ), - ): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - mock_on.assert_called_once() - - -@pytest.mark.parametrize("response", COMMAND_ERRORS) -async def test_errors( - hass: HomeAssistant, response: str, normal_config_entry: MockConfigEntry -) -> None: - """Tests service reason is handled.""" - - await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) - entity_id = "climate.test_climate" - - with ( - patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", - return_value=response, - ) as mock_on, - pytest.raises(HomeAssistantError), - ): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - mock_on.assert_called_once() - - -async def test_ignored_error( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests ignored error is handled.""" - - await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) - entity_id = "climate.test_climate" - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", - return_value=COMMAND_IGNORED_REASON, - ) as mock_on: - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - mock_on.assert_called_once() - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_asleep_or_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - mock_wake_up: AsyncMock, - mock_vehicle_state: AsyncMock, - freezer: FrozenDateTimeFactory, - normal_config_entry: MockConfigEntry, - mock_request: AsyncMock, -) -> None: - """Tests asleep is handled.""" - - await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) - entity_id = "climate.test_climate" - mock_vehicle_data.assert_called_once() - - # Put the vehicle alseep - mock_vehicle_data.reset_mock() - mock_vehicle_data.side_effect = VehicleOffline - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_vehicle_data.assert_called_once() - mock_wake_up.reset_mock() - - # Run a command but fail trying to wake up the vehicle - mock_wake_up.side_effect = InvalidCommand - with pytest.raises( - HomeAssistantError, match="The data request or command is unknown." - ): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - mock_wake_up.assert_called_once() - - mock_wake_up.side_effect = None - mock_wake_up.reset_mock() - - # Run a command but timeout trying to wake up the vehicle - mock_wake_up.return_value = VEHICLE_ASLEEP - mock_vehicle_state.return_value = VEHICLE_ASLEEP - with ( - patch("homeassistant.components.tesla_fleet.helpers.asyncio.sleep"), - pytest.raises(HomeAssistantError, match="Could not wake up vehicle"), - ): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - mock_wake_up.assert_called_once() - mock_vehicle_state.assert_called() - - mock_wake_up.reset_mock() - mock_vehicle_state.reset_mock() - mock_wake_up.return_value = VEHICLE_ONLINE - mock_vehicle_state.return_value = VEHICLE_ONLINE - - # Run a command and wake up the vehicle immediately - await hass.services.async_call( - CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: [entity_id]}, blocking=True - ) - await hass.async_block_till_done() - mock_wake_up.assert_called_once() - - -async def test_climate_noscope( - hass: HomeAssistant, - readonly_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Tests with no command scopes.""" - await setup_platform(hass, readonly_config_entry, [Platform.CLIMATE]) - entity_id = "climate.test_climate" - - with pytest.raises( - ServiceValidationError, match="Climate mode off is not supported" - ): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_HVAC_MODE, - {ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVACMode.OFF}, - blocking=True, - ) - - with pytest.raises( - HomeAssistantError, - match="Entity climate.test_climate does not support this service.", - ): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 20}, - blocking=True, - ) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.parametrize( - ("entity_id", "low", "high"), - [ - ("climate.test_climate", 16, 28), - ("climate.test_cabin_overheat_protection", 30, 40), - ], -) -async def test_climate_notemp( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - entity_id: str, - high: int, - low: int, -) -> None: - """Tests that set temp fails without a temp attribute.""" - - await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) - - with pytest.raises( - ServiceValidationError, - match="Set temperature action was used with the target temperature low/high parameter but the entity does not support it", - ): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_TEMPERATURE, - { - ATTR_ENTITY_ID: [entity_id], - ATTR_TARGET_TEMP_HIGH: high, - ATTR_TARGET_TEMP_LOW: low, - }, - blocking=True, - ) diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py deleted file mode 100644 index b49e090cd5d..00000000000 --- a/tests/components/tesla_fleet/test_config_flow.py +++ /dev/null @@ -1,293 +0,0 @@ -"""Test the Tesla Fleet config flow.""" - -from unittest.mock import patch -from urllib.parse import parse_qs, urlparse - -import pytest - -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) -from homeassistant.components.tesla_fleet.const import ( - AUTHORIZE_URL, - CLIENT_ID, - DOMAIN, - SCOPES, - TOKEN_URL, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_entry_oauth2_flow -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import ClientSessionGenerator - -REDIRECT = "https://example.com/auth/external/callback" -UNIQUE_ID = "uid" - - -@pytest.fixture -async def access_token(hass: HomeAssistant) -> str: - """Return a valid access token.""" - return config_entry_oauth2_flow._encode_jwt( - hass, - { - "sub": UNIQUE_ID, - "aud": [], - "scp": [ - "vehicle_device_data", - "vehicle_cmds", - "vehicle_charging_cmds", - "energy_device_data", - "energy_cmds", - "offline_access", - "openid", - ], - "ou_code": "NA", - }, - ) - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_full_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - access_token: str, -) -> None: - """Check full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": REDIRECT, - }, - ) - - assert result["type"] is FlowResultType.EXTERNAL_STEP - - assert result["url"].startswith(AUTHORIZE_URL) - parsed_url = urlparse(result["url"]) - parsed_query = parse_qs(parsed_url.query) - assert parsed_query["response_type"][0] == "code" - assert parsed_query["client_id"][0] == CLIENT_ID - assert parsed_query["redirect_uri"][0] == REDIRECT - assert parsed_query["state"][0] == state - assert parsed_query["scope"][0] == " ".join(SCOPES) - assert parsed_query["code_challenge"][0] is not None - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.clear_requests() - aioclient_mock.post( - TOKEN_URL, - json={ - "refresh_token": "mock-refresh-token", - "access_token": access_token, - "type": "Bearer", - "expires_in": 60, - }, - ) - with patch( - "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True - ) as mock_setup: - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == 1 - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == UNIQUE_ID - assert "result" in result - assert result["result"].unique_id == UNIQUE_ID - assert "token" in result["result"].data - assert result["result"].data["token"]["access_token"] == access_token - assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_full_flow_user_cred( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - access_token: str, -) -> None: - """Check full flow.""" - - # Create user application credential - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential("user_client_id", "user_client_secret"), - "user_cred", - ) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"implementation": "user_cred"} - ) - assert result["type"] is FlowResultType.EXTERNAL_STEP - - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": REDIRECT, - }, - ) - - assert result["url"].startswith(AUTHORIZE_URL) - parsed_url = urlparse(result["url"]) - parsed_query = parse_qs(parsed_url.query) - assert parsed_query["response_type"][0] == "code" - assert parsed_query["client_id"][0] == "user_client_id" - assert parsed_query["redirect_uri"][0] == REDIRECT - assert parsed_query["state"][0] == state - assert parsed_query["scope"][0] == " ".join(SCOPES) - assert "code_challenge" not in parsed_query # Ensure not a PKCE flow - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.clear_requests() - aioclient_mock.post( - TOKEN_URL, - json={ - "refresh_token": "mock-refresh-token", - "access_token": access_token, - "type": "Bearer", - "expires_in": 60, - }, - ) - with patch( - "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True - ) as mock_setup: - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup.mock_calls) == 1 - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == UNIQUE_ID - assert "result" in result - assert result["result"].unique_id == UNIQUE_ID - assert "token" in result["result"].data - assert result["result"].data["token"]["access_token"] == access_token - assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_reauthentication( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - access_token: str, -) -> None: - """Test Tesla Fleet reauthentication.""" - old_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=UNIQUE_ID, - version=1, - data={}, - ) - old_entry.add_to_hass(hass) - - result = await old_entry.start_reauth_flow(hass) - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - - result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) - - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": REDIRECT, - }, - ) - client = await hass_client_no_auth() - await client.get(f"/auth/external/callback?code=abcd&state={state}") - - aioclient_mock.post( - TOKEN_URL, - json={ - "refresh_token": "mock-refresh-token", - "access_token": access_token, - "type": "Bearer", - "expires_in": 60, - }, - ) - - with patch( - "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True - ): - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_reauth_account_mismatch( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - access_token: str, -) -> None: - """Test Tesla Fleet reauthentication with different account.""" - old_entry = MockConfigEntry(domain=DOMAIN, unique_id="baduid", version=1, data={}) - old_entry.add_to_hass(hass) - - result = await old_entry.start_reauth_flow(hass) - - flows = hass.config_entries.flow.async_progress() - result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) - - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": REDIRECT, - }, - ) - client = await hass_client_no_auth() - await client.get(f"/auth/external/callback?code=abcd&state={state}") - - aioclient_mock.post( - TOKEN_URL, - json={ - "refresh_token": "mock-refresh-token", - "access_token": access_token, - "type": "Bearer", - "expires_in": 60, - }, - ) - - with patch( - "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True - ): - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_account_mismatch" diff --git a/tests/components/tesla_fleet/test_cover.py b/tests/components/tesla_fleet/test_cover.py deleted file mode 100644 index ac5307b2fdd..00000000000 --- a/tests/components/tesla_fleet/test_cover.py +++ /dev/null @@ -1,235 +0,0 @@ -"""Test the Teslemetry cover platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline - -from homeassistant.components.cover import ( - DOMAIN as COVER_DOMAIN, - SERVICE_CLOSE_COVER, - SERVICE_OPEN_COVER, - SERVICE_STOP_COVER, - CoverState, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, setup_platform -from .const import COMMAND_OK, VEHICLE_DATA_ALT - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_cover( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the cover entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.COVER]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_cover_alt( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the cover entities are correct with alternate values.""" - - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - await setup_platform(hass, normal_config_entry, [Platform.COVER]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_cover_readonly( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - readonly_config_entry: MockConfigEntry, -) -> None: - """Tests that the cover entities are correct without scopes.""" - - await setup_platform(hass, readonly_config_entry, [Platform.COVER]) - assert_entities(hass, readonly_config_entry.entry_id, entity_registry, snapshot) - - -async def test_cover_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the cover entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, normal_config_entry, [Platform.COVER]) - state = hass.states.get("cover.test_windows") - assert state.state == STATE_UNKNOWN - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_cover_services( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the cover entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.COVER]) - - # Vent Windows - entity_id = "cover.test_windows" - with patch( - "homeassistant.components.teslemetry.VehicleSpecific.window_control", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.OPEN - - call.reset_mock() - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: ["cover.test_windows"]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.CLOSED - - # Charge Port Door - entity_id = "cover.test_charge_port_door" - with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.OPEN - - with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.CLOSED - - # Frunk - entity_id = "cover.test_frunk" - with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.OPEN - - # Trunk - entity_id = "cover.test_trunk" - with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.OPEN - - call.reset_mock() - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.CLOSED - - # Sunroof - entity_id = "cover.test_sunroof" - with patch( - "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.OPEN - - call.reset_mock() - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.OPEN - - call.reset_mock() - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.CLOSED diff --git a/tests/components/tesla_fleet/test_device_tracker.py b/tests/components/tesla_fleet/test_device_tracker.py deleted file mode 100644 index e6f483d7953..00000000000 --- a/tests/components/tesla_fleet/test_device_tracker.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Test the Tesla Fleet device tracker platform.""" - -from unittest.mock import AsyncMock - -from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline - -from homeassistant.const import STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, setup_platform - -from tests.common import MockConfigEntry - - -async def test_device_tracker( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the device tracker entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.DEVICE_TRACKER]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_device_tracker_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the device tracker entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, normal_config_entry, [Platform.DEVICE_TRACKER]) - state = hass.states.get("device_tracker.test_location") - assert state.state == STATE_UNKNOWN diff --git a/tests/components/tesla_fleet/test_diagnostics.py b/tests/components/tesla_fleet/test_diagnostics.py deleted file mode 100644 index e0ef24097bb..00000000000 --- a/tests/components/tesla_fleet/test_diagnostics.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Test the Tesla Fleet Diagnostics.""" - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from . import setup_platform - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, - normal_config_entry: MockConfigEntry, -) -> None: - """Test diagnostics.""" - - await setup_platform(hass, normal_config_entry) - - diag = await get_diagnostics_for_config_entry( - hass, hass_client, normal_config_entry - ) - assert diag == snapshot diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py deleted file mode 100644 index 7c17f986663..00000000000 --- a/tests/components/tesla_fleet/test_init.py +++ /dev/null @@ -1,426 +0,0 @@ -"""Test the Tesla Fleet init.""" - -from copy import deepcopy -from unittest.mock import AsyncMock, patch - -from aiohttp import RequestInfo -from aiohttp.client_exceptions import ClientResponseError -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import ( - InvalidRegion, - InvalidToken, - LibraryError, - LoginRequired, - OAuthExpired, - RateLimited, - TeslaFleetError, - VehicleOffline, -) - -from homeassistant.components.tesla_fleet.const import AUTHORIZE_URL -from homeassistant.components.tesla_fleet.coordinator import ( - ENERGY_INTERVAL, - ENERGY_INTERVAL_SECONDS, - VEHICLE_INTERVAL, - VEHICLE_INTERVAL_SECONDS, - VEHICLE_WAIT, -) -from homeassistant.components.tesla_fleet.models import TeslaFleetData -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_platform -from .const import VEHICLE_ASLEEP, VEHICLE_DATA_ALT - -from tests.common import MockConfigEntry, async_fire_time_changed - -ERRORS = [ - (InvalidToken, ConfigEntryState.SETUP_ERROR), - (OAuthExpired, ConfigEntryState.SETUP_ERROR), - (LoginRequired, ConfigEntryState.SETUP_ERROR), - (TeslaFleetError, ConfigEntryState.SETUP_RETRY), -] - - -async def test_load_unload( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, -) -> None: - """Test load and unload.""" - - await setup_platform(hass, normal_config_entry) - - assert normal_config_entry.state is ConfigEntryState.LOADED - assert isinstance(normal_config_entry.runtime_data, TeslaFleetData) - assert await hass.config_entries.async_unload(normal_config_entry.entry_id) - await hass.async_block_till_done() - assert normal_config_entry.state is ConfigEntryState.NOT_LOADED - assert not hasattr(normal_config_entry, "runtime_data") - - -@pytest.mark.parametrize(("side_effect", "state"), ERRORS) -async def test_init_error( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_products: AsyncMock, - side_effect: TeslaFleetError, - state: ConfigEntryState, -) -> None: - """Test init with errors.""" - - mock_products.side_effect = side_effect - await setup_platform(hass, normal_config_entry) - assert normal_config_entry.state is state - - -async def test_oauth_refresh_expired( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_products: AsyncMock, -) -> None: - """Test init with expired Oauth token.""" - - # Patch the token refresh to raise an error - with patch( - "homeassistant.components.tesla_fleet.OAuth2Session.async_ensure_token_valid", - side_effect=ClientResponseError( - RequestInfo(AUTHORIZE_URL, "POST", {}, AUTHORIZE_URL), None, status=401 - ), - ) as mock_async_ensure_token_valid: - # Trigger an unmocked function call - mock_products.side_effect = InvalidRegion - await setup_platform(hass, normal_config_entry) - - mock_async_ensure_token_valid.assert_called_once() - assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR - - -async def test_oauth_refresh_error( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_products: AsyncMock, -) -> None: - """Test init with Oauth refresh failure.""" - - # Patch the token refresh to raise an error - with patch( - "homeassistant.components.tesla_fleet.OAuth2Session.async_ensure_token_valid", - side_effect=ClientResponseError( - RequestInfo(AUTHORIZE_URL, "POST", {}, AUTHORIZE_URL), None, status=400 - ), - ) as mock_async_ensure_token_valid: - # Trigger an unmocked function call - mock_products.side_effect = InvalidRegion - await setup_platform(hass, normal_config_entry) - - mock_async_ensure_token_valid.assert_called_once() - assert normal_config_entry.state is ConfigEntryState.SETUP_RETRY - - -# Test devices -async def test_devices( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test device registry.""" - await setup_platform(hass, normal_config_entry) - devices = dr.async_entries_for_config_entry( - device_registry, normal_config_entry.entry_id - ) - - for device in devices: - assert device == snapshot(name=f"{device.identifiers}") - - -# Vehicle Coordinator -async def test_vehicle_refresh_offline( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_vehicle_state: AsyncMock, - mock_vehicle_data: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator refresh with an error.""" - await setup_platform(hass, normal_config_entry) - assert normal_config_entry.state is ConfigEntryState.LOADED - - mock_vehicle_state.assert_called_once() - mock_vehicle_data.assert_called_once() - mock_vehicle_state.reset_mock() - mock_vehicle_data.reset_mock() - - # Then the vehicle goes offline - mock_vehicle_data.side_effect = VehicleOffline - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - mock_vehicle_state.assert_not_called() - mock_vehicle_data.assert_called_once() - mock_vehicle_data.reset_mock() - - # And stays offline - mock_vehicle_state.return_value = VEHICLE_ASLEEP - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - mock_vehicle_state.assert_called_once() - mock_vehicle_data.assert_not_called() - - -@pytest.mark.parametrize(("side_effect"), ERRORS) -async def test_vehicle_refresh_error( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_vehicle_data: AsyncMock, - side_effect: TeslaFleetError, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator refresh makes entity unavailable.""" - - await setup_platform(hass, normal_config_entry) - - mock_vehicle_data.side_effect = side_effect - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert (state := hass.states.get("sensor.test_battery_level")) - assert state.state == "unavailable" - - -async def test_vehicle_refresh_ratelimited( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_vehicle_data: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator refresh handles 429.""" - - mock_vehicle_data.side_effect = RateLimited( - {"after": VEHICLE_INTERVAL_SECONDS + 10} - ) - await setup_platform(hass, normal_config_entry) - - assert (state := hass.states.get("sensor.test_battery_level")) - assert state.state == "unknown" - assert mock_vehicle_data.call_count == 1 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Should not call for another 10 seconds - assert mock_vehicle_data.call_count == 1 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert mock_vehicle_data.call_count == 2 - - -async def test_vehicle_sleep( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_vehicle_data: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator refresh with an error.""" - await setup_platform(hass, normal_config_entry) - assert mock_vehicle_data.call_count == 1 - - freezer.tick(VEHICLE_WAIT + VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # Let vehicle sleep, no updates for 15 minutes - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 2 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # No polling, call_count should not increase - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 2 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # No polling, call_count should not increase - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 2 - - freezer.tick(VEHICLE_WAIT) - async_fire_time_changed(hass) - # Vehicle didn't sleep, go back to normal - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 3 - - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # Regular polling - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 4 - - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - # Vehicle active - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 5 - - freezer.tick(VEHICLE_WAIT) - async_fire_time_changed(hass) - # Dont let sleep when active - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 6 - - freezer.tick(VEHICLE_WAIT) - async_fire_time_changed(hass) - # Dont let sleep when active - await hass.async_block_till_done() - assert mock_vehicle_data.call_count == 7 - - -# Test Energy Live Coordinator -@pytest.mark.parametrize(("side_effect", "state"), ERRORS) -async def test_energy_live_refresh_error( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_live_status: AsyncMock, - side_effect: TeslaFleetError, - state: ConfigEntryState, -) -> None: - """Test coordinator refresh with an error.""" - mock_live_status.side_effect = side_effect - await setup_platform(hass, normal_config_entry) - assert normal_config_entry.state is state - - -# Test Energy Site Coordinator -@pytest.mark.parametrize(("side_effect", "state"), ERRORS) -async def test_energy_site_refresh_error( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_site_info: AsyncMock, - side_effect: TeslaFleetError, - state: ConfigEntryState, -) -> None: - """Test coordinator refresh with an error.""" - mock_site_info.side_effect = side_effect - await setup_platform(hass, normal_config_entry) - assert normal_config_entry.state is state - - -async def test_energy_live_refresh_ratelimited( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_live_status, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator refresh handles 429.""" - - await setup_platform(hass, normal_config_entry) - - mock_live_status.side_effect = RateLimited({"after": ENERGY_INTERVAL_SECONDS + 10}) - freezer.tick(ENERGY_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert mock_live_status.call_count == 2 - - freezer.tick(ENERGY_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Should not call for another 10 seconds - assert mock_live_status.call_count == 2 - - freezer.tick(ENERGY_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert mock_live_status.call_count == 3 - - -async def test_energy_info_refresh_ratelimited( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_site_info: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator refresh handles 429.""" - - await setup_platform(hass, normal_config_entry) - - mock_site_info.side_effect = RateLimited({"after": ENERGY_INTERVAL_SECONDS + 10}) - freezer.tick(ENERGY_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert mock_site_info.call_count == 2 - - freezer.tick(ENERGY_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Should not call for another 10 seconds - assert mock_site_info.call_count == 2 - - freezer.tick(ENERGY_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert mock_site_info.call_count == 3 - - -async def test_init_region_issue( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_products: AsyncMock, - mock_find_server: AsyncMock, -) -> None: - """Test init with region issue.""" - - mock_products.side_effect = InvalidRegion - await setup_platform(hass, normal_config_entry) - mock_find_server.assert_called_once() - assert normal_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_init_region_issue_failed( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_products: AsyncMock, - mock_find_server: AsyncMock, -) -> None: - """Test init with unresolvable region issue.""" - - mock_products.side_effect = InvalidRegion - mock_find_server.side_effect = LibraryError - await setup_platform(hass, normal_config_entry) - mock_find_server.assert_called_once() - assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR - - -async def test_signing( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, - mock_products: AsyncMock, -) -> None: - """Tests when a vehicle requires signing.""" - - # Make the vehicle require command signing - products = deepcopy(mock_products.return_value) - products["response"][0]["command_signing"] = "required" - mock_products.return_value = products - - with patch( - "homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key" - ) as mock_get_private_key: - await setup_platform(hass, normal_config_entry) - mock_get_private_key.assert_called_once() diff --git a/tests/components/tesla_fleet/test_lock.py b/tests/components/tesla_fleet/test_lock.py deleted file mode 100644 index 00b77aefcaf..00000000000 --- a/tests/components/tesla_fleet/test_lock.py +++ /dev/null @@ -1,111 +0,0 @@ -"""Test the Tesla Fleet lock platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline - -from homeassistant.components.lock import ( - DOMAIN as LOCK_DOMAIN, - SERVICE_LOCK, - SERVICE_UNLOCK, - LockState, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, setup_platform -from .const import COMMAND_OK - -from tests.common import MockConfigEntry - - -async def test_lock( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the lock entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.LOCK]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_lock_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the lock entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, normal_config_entry, [Platform.LOCK]) - state = hass.states.get("lock.test_lock") - assert state.state == STATE_UNKNOWN - - -async def test_lock_services( - hass: HomeAssistant, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the lock services work.""" - - await setup_platform(hass, normal_config_entry, [Platform.LOCK]) - - entity_id = "lock.test_lock" - - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.door_lock", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_LOCK, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == LockState.LOCKED - call.assert_called_once() - - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.door_unlock", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_UNLOCK, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == LockState.UNLOCKED - call.assert_called_once() - - entity_id = "lock.test_charge_cable_lock" - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_LOCK, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.charge_port_door_open", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_UNLOCK, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == LockState.UNLOCKED - call.assert_called_once() diff --git a/tests/components/tesla_fleet/test_media_player.py b/tests/components/tesla_fleet/test_media_player.py deleted file mode 100644 index 4c833e7499f..00000000000 --- a/tests/components/tesla_fleet/test_media_player.py +++ /dev/null @@ -1,157 +0,0 @@ -"""Test the Tesla Fleet media player platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline - -from homeassistant.components.media_player import ( - ATTR_MEDIA_VOLUME_LEVEL, - DOMAIN as MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_NEXT_TRACK, - SERVICE_MEDIA_PAUSE, - SERVICE_MEDIA_PLAY, - SERVICE_MEDIA_PREVIOUS_TRACK, - SERVICE_VOLUME_SET, - MediaPlayerState, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, assert_entities_alt, setup_platform -from .const import COMMAND_OK, VEHICLE_DATA_ALT - -from tests.common import MockConfigEntry - - -async def test_media_player( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the media player entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.MEDIA_PLAYER]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_media_player_alt( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the media player entities are correct.""" - - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - await setup_platform(hass, normal_config_entry, [Platform.MEDIA_PLAYER]) - assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_media_player_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the media player entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, normal_config_entry, [Platform.MEDIA_PLAYER]) - state = hass.states.get("media_player.test_media_player") - assert state.state == MediaPlayerState.OFF - - -async def test_media_player_noscope( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - readonly_config_entry: MockConfigEntry, -) -> None: - """Tests that the media player entities are correct without required scope.""" - - await setup_platform(hass, readonly_config_entry, [Platform.MEDIA_PLAYER]) - assert_entities(hass, readonly_config_entry.entry_id, entity_registry, snapshot) - - -async def test_media_player_services( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the media player services work.""" - - await setup_platform(hass, normal_config_entry, [Platform.MEDIA_PLAYER]) - - entity_id = "media_player.test_media_player" - - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.adjust_volume", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_SET, - {ATTR_ENTITY_ID: entity_id, ATTR_MEDIA_VOLUME_LEVEL: 0.5}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.5 - call.assert_called_once() - - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_toggle_playback", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_PAUSE, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == MediaPlayerState.PAUSED - call.assert_called_once() - - # This test will fail without the previous call to pause playback - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_toggle_playback", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_PLAY, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == MediaPlayerState.PLAYING - call.assert_called_once() - - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_next_track", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_NEXT_TRACK, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - call.assert_called_once() - - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_prev_track", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_MEDIA_PREVIOUS_TRACK, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - call.assert_called_once() diff --git a/tests/components/tesla_fleet/test_number.py b/tests/components/tesla_fleet/test_number.py deleted file mode 100644 index 8551a99ee29..00000000000 --- a/tests/components/tesla_fleet/test_number.py +++ /dev/null @@ -1,119 +0,0 @@ -"""Test the Tesla Fleet number platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline - -from homeassistant.components.number import ( - ATTR_VALUE, - DOMAIN as NUMBER_DOMAIN, - SERVICE_SET_VALUE, -) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, setup_platform -from .const import COMMAND_OK, VEHICLE_DATA_ALT - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_number( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the number entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.NUMBER]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_number_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the number entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, normal_config_entry, [Platform.NUMBER]) - state = hass.states.get("number.test_charge_current") - assert state.state == STATE_UNKNOWN - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_number_services( - hass: HomeAssistant, mock_vehicle_data, normal_config_entry: MockConfigEntry -) -> None: - """Tests that the number services work.""" - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - await setup_platform(hass, normal_config_entry, [Platform.NUMBER]) - - entity_id = "number.test_charge_current" - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.set_charging_amps", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 16}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == "16" - call.assert_called_once() - - entity_id = "number.test_charge_limit" - with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.set_charge_limit", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 60}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == "60" - call.assert_called_once() - - entity_id = "number.energy_site_backup_reserve" - with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.backup", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: entity_id, - ATTR_VALUE: 80, - }, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == "80" - call.assert_called_once() - - entity_id = "number.energy_site_off_grid_reserve" - with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.off_grid_vehicle_charging_reserve", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 88}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == "88" - call.assert_called_once() diff --git a/tests/components/tesla_fleet/test_select.py b/tests/components/tesla_fleet/test_select.py deleted file mode 100644 index 902b28ddb7a..00000000000 --- a/tests/components/tesla_fleet/test_select.py +++ /dev/null @@ -1,136 +0,0 @@ -"""Test the Tesla Fleet select platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode -from tesla_fleet_api.exceptions import VehicleOffline - -from homeassistant.components.select import ( - ATTR_OPTION, - DOMAIN as SELECT_DOMAIN, - SERVICE_SELECT_OPTION, -) -from homeassistant.components.tesla_fleet.select import LOW -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, setup_platform -from .const import COMMAND_OK, VEHICLE_DATA_ALT - -from tests.common import MockConfigEntry - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_select( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the select entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.SELECT]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_select_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the select entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, normal_config_entry, [Platform.SELECT]) - state = hass.states.get("select.test_seat_heater_front_left") - assert state.state == STATE_UNKNOWN - - -async def test_select_services( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the select services work.""" - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - await setup_platform(hass, normal_config_entry, [Platform.SELECT]) - - entity_id = "select.test_seat_heater_front_left" - with ( - patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.remote_seat_heater_request", - return_value=COMMAND_OK, - ) as remote_seat_heater_request, - patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", - return_value=COMMAND_OK, - ) as auto_conditioning_start, - ): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: LOW}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == LOW - auto_conditioning_start.assert_called_once() - remote_seat_heater_request.assert_called_once() - - entity_id = "select.test_steering_wheel_heater" - with ( - patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.remote_steering_wheel_heat_level_request", - return_value=COMMAND_OK, - ) as remote_steering_wheel_heat_level_request, - patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", - return_value=COMMAND_OK, - ) as auto_conditioning_start, - ): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: LOW}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == LOW - auto_conditioning_start.assert_called_once() - remote_steering_wheel_heat_level_request.assert_called_once() - - entity_id = "select.energy_site_operation_mode" - with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.operation", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: entity_id, - ATTR_OPTION: EnergyOperationMode.AUTONOMOUS.value, - }, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == EnergyOperationMode.AUTONOMOUS.value - call.assert_called_once() - - entity_id = "select.energy_site_allow_export" - with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.grid_import_export", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: EnergyExportMode.BATTERY_OK.value}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == EnergyExportMode.BATTERY_OK.value - call.assert_called_once() diff --git a/tests/components/tesla_fleet/test_sensor.py b/tests/components/tesla_fleet/test_sensor.py deleted file mode 100644 index 5faebbc47e2..00000000000 --- a/tests/components/tesla_fleet/test_sensor.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Test the Tesla Fleet sensor platform.""" - -from unittest.mock import AsyncMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline - -from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL -from homeassistant.const import STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, assert_entities_alt, setup_platform -from .const import VEHICLE_DATA_ALT - -from tests.common import MockConfigEntry, async_fire_time_changed - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensors( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - normal_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, - mock_vehicle_data: AsyncMock, -) -> None: - """Tests that the sensor entities are correct.""" - - freezer.move_to("2024-01-01 00:00:00+00:00") - - await setup_platform(hass, normal_config_entry, [Platform.SENSOR]) - - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - # Coordinator refresh - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -@pytest.mark.parametrize( - ("entity_id", "initial", "restored"), - [ - ("sensor.test_battery_level", "77", "77"), - ("sensor.test_outside_temperature", "30", "30"), - ("sensor.test_time_to_arrival", "2024-01-01T00:00:06+00:00", STATE_UNAVAILABLE), - ], -) -async def test_sensors_restore( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - normal_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, - mock_vehicle_data: AsyncMock, - entity_id: str, - initial: str, - restored: str, -) -> None: - """Test if the sensor should restore it's state or not when vehicle is offline.""" - - freezer.move_to("2024-01-01 00:00:00+00:00") - - await setup_platform(hass, normal_config_entry, [Platform.SENSOR]) - - assert hass.states.get(entity_id).state == initial - - mock_vehicle_data.side_effect = VehicleOffline - - with patch("homeassistant.components.tesla_fleet.PLATFORMS", [Platform.SENSOR]): - assert await hass.config_entries.async_reload(normal_config_entry.entry_id) - - assert hass.states.get(entity_id).state == restored diff --git a/tests/components/tesla_fleet/test_switch.py b/tests/components/tesla_fleet/test_switch.py deleted file mode 100644 index fba4fc05cc4..00000000000 --- a/tests/components/tesla_fleet/test_switch.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Test the tesla_fleet switch platform.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from syrupy import SnapshotAssertion -from tesla_fleet_api.exceptions import VehicleOffline - -from homeassistant.components.switch import ( - DOMAIN as SWITCH_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_OFF, - STATE_ON, - STATE_UNKNOWN, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from . import assert_entities, assert_entities_alt, setup_platform -from .const import COMMAND_OK, VEHICLE_DATA_ALT - -from tests.common import MockConfigEntry - - -async def test_switch( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the switch entities are correct.""" - - await setup_platform(hass, normal_config_entry, [Platform.SWITCH]) - assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_switch_alt( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the switch entities are correct.""" - - mock_vehicle_data.return_value = VEHICLE_DATA_ALT - await setup_platform(hass, normal_config_entry, [Platform.SWITCH]) - assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) - - -async def test_switch_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the switch entities are correct when offline.""" - - mock_vehicle_data.side_effect = VehicleOffline - await setup_platform(hass, normal_config_entry, [Platform.SWITCH]) - state = hass.states.get("switch.test_auto_seat_climate_left") - assert state.state == STATE_UNKNOWN - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.parametrize( - ("name", "on", "off"), - [ - ("test_charge", "VehicleSpecific.charge_start", "VehicleSpecific.charge_stop"), - ( - "test_auto_seat_climate_left", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", - ), - ( - "test_auto_seat_climate_right", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", - ), - ( - "test_auto_steering_wheel_heater", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", - ), - ( - "test_defrost", - "VehicleSpecific.set_preconditioning_max", - "VehicleSpecific.set_preconditioning_max", - ), - ( - "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", - ), - ( - "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", - ), - ( - "test_sentry_mode", - "VehicleSpecific.set_sentry_mode", - "VehicleSpecific.set_sentry_mode", - ), - ], -) -async def test_switch_services( - hass: HomeAssistant, - name: str, - on: str, - off: str, - normal_config_entry: MockConfigEntry, -) -> None: - """Tests that the switch service calls work.""" - - await setup_platform(hass, normal_config_entry, [Platform.SWITCH]) - - entity_id = f"switch.{name}" - with patch( - f"homeassistant.components.tesla_fleet.{on}", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == STATE_ON - call.assert_called_once() - - with patch( - f"homeassistant.components.tesla_fleet.{off}", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - call.assert_called_once() - - -async def test_switch_no_scope( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - readonly_config_entry: MockConfigEntry, -) -> None: - """Tests that the switch entities are correct.""" - - await setup_platform(hass, readonly_config_entry, [Platform.SWITCH]) - with pytest.raises(ServiceValidationError, match="Missing vehicle commands scope"): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_auto_steering_wheel_heater"}, - blocking=True, - ) diff --git a/tests/components/teslemetry/__init__.py b/tests/components/teslemetry/__init__.py index b6b9df7eb4b..c4fbdaf3fbd 100644 --- a/tests/components/teslemetry/__init__.py +++ b/tests/components/teslemetry/__init__.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from homeassistant.components.teslemetry.const import DOMAIN from homeassistant.const import Platform diff --git a/tests/components/teslemetry/conftest.py b/tests/components/teslemetry/conftest.py index 256428aa703..410eaa62b69 100644 --- a/tests/components/teslemetry/conftest.py +++ b/tests/components/teslemetry/conftest.py @@ -1,16 +1,14 @@ -"""Fixtures for Teslemetry.""" +"""Fixtures for Tessie.""" from __future__ import annotations -from collections.abc import Generator from copy import deepcopy -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest from .const import ( COMMAND_OK, - ENERGY_HISTORY, LIVE_STATUS, METADATA, PRODUCTS, @@ -39,7 +37,7 @@ def mock_products(): @pytest.fixture(autouse=True) -def mock_vehicle_data() -> Generator[AsyncMock]: +def mock_vehicle_data(): """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" with patch( "homeassistant.components.teslemetry.VehicleSpecific.vehicle_data", @@ -59,7 +57,7 @@ def mock_wake_up(): @pytest.fixture(autouse=True) -def mock_vehicle() -> Generator[AsyncMock]: +def mock_vehicle(): """Mock Tesla Fleet API Vehicle Specific vehicle method.""" with patch( "homeassistant.components.teslemetry.VehicleSpecific.vehicle", @@ -96,22 +94,3 @@ def mock_site_info(): side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status - - -@pytest.fixture(autouse=True) -def mock_energy_history(): - """Mock Teslemetry Energy Specific site_info method.""" - with patch( - "homeassistant.components.teslemetry.EnergySpecific.energy_history", - return_value=ENERGY_HISTORY, - ) as mock_live_status: - yield mock_live_status - - -@pytest.fixture(autouse=True) -def mock_listen(): - """Mock Teslemetry Stream listen method.""" - with patch( - "homeassistant.components.teslemetry.TeslemetryStream.listen", - ) as mock_listen: - yield mock_listen diff --git a/tests/components/teslemetry/const.py b/tests/components/teslemetry/const.py index e459379ccf7..6a3a657a1b1 100644 --- a/tests/components/teslemetry/const.py +++ b/tests/components/teslemetry/const.py @@ -15,7 +15,6 @@ VEHICLE_DATA = load_json_object_fixture("vehicle_data.json", DOMAIN) VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) -ENERGY_HISTORY = load_json_object_fixture("energy_history.json", DOMAIN) COMMAND_OK = {"response": {"result": True, "reason": ""}} COMMAND_REASON = {"response": {"result": False, "reason": "already closed"}} diff --git a/tests/components/teslemetry/fixtures/energy_history.json b/tests/components/teslemetry/fixtures/energy_history.json deleted file mode 100644 index 2b787beafac..00000000000 --- a/tests/components/teslemetry/fixtures/energy_history.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "response": { - "serial_number": "xxxxxx", - "period": "day", - "installation_time_zone": "Australia/Brisbane", - "time_series": [ - { - "timestamp": "2024-09-18T00:00:00+10:00", - "solar_energy_exported": 0, - "generator_energy_exported": 0, - "grid_energy_imported": 0, - "grid_services_energy_imported": 0, - "grid_services_energy_exported": 0, - "grid_energy_exported_from_solar": 0, - "grid_energy_exported_from_generator": 0, - "grid_energy_exported_from_battery": 0, - "battery_energy_exported": 36, - "battery_energy_imported_from_grid": 0, - "battery_energy_imported_from_solar": 0, - "battery_energy_imported_from_generator": 0, - "consumer_energy_imported_from_grid": 0, - "consumer_energy_imported_from_solar": 0, - "consumer_energy_imported_from_battery": 36, - "consumer_energy_imported_from_generator": 0, - "raw_timestamp": "2024-09-18T00:00:00+10:00", - "total_home_usage": 36, - "total_battery_discharge": 36 - }, - { - "timestamp": "2024-09-18T08:45:00+10:00", - "solar_energy_exported": 724, - "generator_energy_exported": 0, - "grid_energy_imported": 0, - "grid_services_energy_imported": 0, - "grid_services_energy_exported": 0, - "grid_energy_exported_from_solar": 2, - "grid_energy_exported_from_generator": 0, - "grid_energy_exported_from_battery": 0, - "battery_energy_exported": 0, - "battery_energy_imported_from_grid": 0, - "battery_energy_imported_from_solar": 684, - "battery_energy_imported_from_generator": 0, - "consumer_energy_imported_from_grid": 0, - "consumer_energy_imported_from_solar": 38, - "consumer_energy_imported_from_battery": 0, - "consumer_energy_imported_from_generator": 0, - "raw_timestamp": "2024-09-18T08:45:00+10:00", - "total_home_usage": 38, - "total_solar_generation": 724, - "total_battery_charge": 684, - "total_grid_energy_exported": 2 - } - ] - } -} diff --git a/tests/components/teslemetry/fixtures/products.json b/tests/components/teslemetry/fixtures/products.json index 8da921a33f4..e1b76e4cefb 100644 --- a/tests/components/teslemetry/fixtures/products.json +++ b/tests/components/teslemetry/fixtures/products.json @@ -115,17 +115,7 @@ "features": { "rate_plan_manager_no_pricing_constraint": true } - }, - { - "energy_site_id": 98765, - "components": { - "battery": false, - "solar": false, - "grid": false, - "load_meter": false, - "market_type": "residential" - } } ], - "count": 3 + "count": 2 } diff --git a/tests/components/teslemetry/fixtures/vehicle_data.json b/tests/components/teslemetry/fixtures/vehicle_data.json index d99bc8de5a8..6c787df4897 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data.json +++ b/tests/components/teslemetry/fixtures/vehicle_data.json @@ -112,7 +112,6 @@ "wiper_blade_heater": false }, "drive_state": { - "active_route_destination": "Home", "active_route_latitude": 30.2226265, "active_route_longitude": -97.6236871, "active_route_miles_to_arrival": 0.039491, @@ -177,7 +176,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": true, + "sun_roof_installed": null, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1705707520649, @@ -251,8 +250,6 @@ "min_limit_mph": 50, "pin_code_set": true }, - "sun_roof_state": "open", - "vehicle_state_sun_roof_percent_open": 20, "timestamp": 1705707520649, "tpms_hard_warning_fl": false, "tpms_hard_warning_fr": false, diff --git a/tests/components/teslemetry/snapshots/test_binary_sensors.ambr b/tests/components/teslemetry/snapshots/test_binary_sensors.ambr index 383db58b336..6f35fe9da25 100644 --- a/tests/components/teslemetry/snapshots/test_binary_sensors.ambr +++ b/tests/components/teslemetry/snapshots/test_binary_sensors.ambr @@ -137,52 +137,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensor[binary_sensor.energy_site_storm_watch_active-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.energy_site_storm_watch_active', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Storm watch active', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storm_mode_active', - 'unique_id': '123456-storm_mode_active', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.energy_site_storm_watch_active-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Storm watch active', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_storm_watch_active', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensor[binary_sensor.test_battery_heater-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1298,19 +1252,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensor_refresh[binary_sensor.energy_site_storm_watch_active-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Storm watch active', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_storm_watch_active', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensor_refresh[binary_sensor.test_battery_heater-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/teslemetry/snapshots/test_climate.ambr b/tests/components/teslemetry/snapshots/test_climate.ambr index 9d5e3827ffc..b65796fe10e 100644 --- a/tests/components/teslemetry/snapshots/test_climate.ambr +++ b/tests/components/teslemetry/snapshots/test_climate.ambr @@ -1,10 +1,4 @@ # serializer version: 1 -# name: test_asleep_or_offline[HomeAssistantError] - 'Timed out trying to wake up vehicle' -# --- -# name: test_asleep_or_offline[InvalidCommand] - 'Failed to wake up vehicle: The data request or command is unknown.' -# --- # name: test_climate[climate.test_cabin_overheat_protection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -286,85 +280,6 @@ 'state': 'off', }) # --- -# name: test_climate_noscope[climate.test_cabin_overheat_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - ]), - 'max_temp': 40, - 'min_temp': 30, - 'target_temp_step': 5, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_cabin_overheat_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cabin overheat protection', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_cabin_overheat_protection', - 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_climate_noscope[climate.test_climate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - ]), - 'max_temp': 28.0, - 'min_temp': 15.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.test_climate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Climate', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'LRWXF7EK4KC700000-driver_temp', - 'unit_of_measurement': None, - }) -# --- # name: test_climate_offline[climate.test_cabin_overheat_protection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -505,6 +420,3 @@ 'state': 'unknown', }) # --- -# name: test_invalid_error[error] - 'Command returned exception: The data request or command is unknown.' -# --- diff --git a/tests/components/teslemetry/snapshots/test_cover.ambr b/tests/components/teslemetry/snapshots/test_cover.ambr index 7ffb9c4a1f9..7689a08a373 100644 --- a/tests/components/teslemetry/snapshots/test_cover.ambr +++ b/tests/components/teslemetry/snapshots/test_cover.ambr @@ -95,54 +95,6 @@ 'state': 'closed', }) # --- -# name: test_cover[cover.test_sunroof-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_sunroof', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sunroof', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_sunroof-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Sunroof', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_sunroof', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- # name: test_cover[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -335,54 +287,6 @@ 'state': 'open', }) # --- -# name: test_cover_alt[cover.test_sunroof-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_sunroof', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sunroof', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_sunroof-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Sunroof', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_sunroof', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_cover_alt[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -575,54 +479,6 @@ 'state': 'closed', }) # --- -# name: test_cover_noscope[cover.test_sunroof-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_sunroof', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sunroof', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_noscope[cover.test_sunroof-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Sunroof', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_sunroof', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- # name: test_cover_noscope[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/teslemetry/snapshots/test_device_tracker.ambr b/tests/components/teslemetry/snapshots/test_device_tracker.ambr index 6c18cdf75c6..9859d9db360 100644 --- a/tests/components/teslemetry/snapshots/test_device_tracker.ambr +++ b/tests/components/teslemetry/snapshots/test_device_tracker.ambr @@ -96,6 +96,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'home', + 'state': 'not_home', }) # --- diff --git a/tests/components/teslemetry/snapshots/test_diagnostics.ambr b/tests/components/teslemetry/snapshots/test_diagnostics.ambr index 3b96d6f70c0..4a942daa508 100644 --- a/tests/components/teslemetry/snapshots/test_diagnostics.ambr +++ b/tests/components/teslemetry/snapshots/test_diagnostics.ambr @@ -270,7 +270,6 @@ 'climate_state_timestamp': 1705707520649, 'climate_state_wiper_blade_heater': False, 'color': None, - 'drive_state_active_route_destination': 'Home', 'drive_state_active_route_latitude': '**REDACTED**', 'drive_state_active_route_longitude': '**REDACTED**', 'drive_state_active_route_miles_to_arrival': 0.039491, @@ -338,7 +337,7 @@ 'vehicle_config_roof_color': 'RoofColorGlass', 'vehicle_config_seat_type': None, 'vehicle_config_spoiler_type': 'None', - 'vehicle_config_sun_roof_installed': True, + 'vehicle_config_sun_roof_installed': None, 'vehicle_config_supports_qr_pairing': False, 'vehicle_config_third_row_seats': 'None', 'vehicle_config_timestamp': 1705707520649, @@ -403,7 +402,6 @@ 'vehicle_state_speed_limit_mode_max_limit_mph': 120, 'vehicle_state_speed_limit_mode_min_limit_mph': 50, 'vehicle_state_speed_limit_mode_pin_code_set': True, - 'vehicle_state_sun_roof_state': 'open', 'vehicle_state_timestamp': 1705707520649, 'vehicle_state_tpms_hard_warning_fl': False, 'vehicle_state_tpms_hard_warning_fr': False, @@ -428,7 +426,6 @@ 'vehicle_state_vehicle_name': 'Test', 'vehicle_state_vehicle_self_test_progress': 0, 'vehicle_state_vehicle_self_test_requested': False, - 'vehicle_state_vehicle_state_sun_roof_percent_open': 20, 'vehicle_state_webcam_available': True, 'vin': '**REDACTED**', }), diff --git a/tests/components/teslemetry/snapshots/test_init.ambr b/tests/components/teslemetry/snapshots/test_init.ambr index e07f075b7d8..e5dd23ada6e 100644 --- a/tests/components/teslemetry/snapshots/test_init.ambr +++ b/tests/components/teslemetry/snapshots/test_init.ambr @@ -21,7 +21,6 @@ }), 'manufacturer': 'Tesla', 'model': 'Powerwall 2, Tesla Backup Gateway 2', - 'model_id': None, 'name': 'Energy Site', 'name_by_user': None, 'primary_config_entry': , @@ -53,7 +52,6 @@ }), 'manufacturer': 'Tesla', 'model': 'Model X', - 'model_id': None, 'name': 'Test', 'name_by_user': None, 'primary_config_entry': , @@ -85,7 +83,6 @@ }), 'manufacturer': 'Tesla', 'model': 'Gen 3 Wall Connector', - 'model_id': None, 'name': 'Wall Connector', 'name_by_user': None, 'primary_config_entry': , @@ -117,7 +114,6 @@ }), 'manufacturer': 'Tesla', 'model': 'Gen 3 Wall Connector', - 'model_id': None, 'name': 'Wall Connector', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/teslemetry/snapshots/test_sensor.ambr b/tests/components/teslemetry/snapshots/test_sensor.ambr index 96cebc2b01f..0b664e78626 100644 --- a/tests/components/teslemetry/snapshots/test_sensor.ambr +++ b/tests/components/teslemetry/snapshots/test_sensor.ambr @@ -1,442 +1,4 @@ # serializer version: 1 -# name: test_sensors[sensor.energy_site_battery_charged-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_battery_charged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery charged', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_battery_charge', - 'unique_id': '123456-total_battery_charge', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_battery_charged-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery charged', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_charged', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.684', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_charged-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery charged', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_charged', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.684', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_discharged-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_battery_discharged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery discharged', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_battery_discharge', - 'unique_id': '123456-total_battery_discharge', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_battery_discharged-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery discharged', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_discharged', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.036', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_discharged-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery discharged', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_discharged', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.036', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_exported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_battery_exported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery exported', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_energy_exported', - 'unique_id': '123456-battery_energy_exported', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_battery_exported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery exported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_exported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.036', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_exported-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery exported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_exported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.036', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_imported_from_generator-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_battery_imported_from_generator', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery imported from generator', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_energy_imported_from_generator', - 'unique_id': '123456-battery_energy_imported_from_generator', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_battery_imported_from_generator-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery imported from generator', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_imported_from_generator', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_imported_from_generator-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery imported from generator', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_imported_from_generator', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_imported_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_battery_imported_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery imported from grid', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_energy_imported_from_grid', - 'unique_id': '123456-battery_energy_imported_from_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_battery_imported_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery imported from grid', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_imported_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_imported_from_grid-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery imported from grid', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_imported_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_imported_from_solar-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_battery_imported_from_solar', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery imported from solar', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_energy_imported_from_solar', - 'unique_id': '123456-battery_energy_imported_from_solar', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_battery_imported_from_solar-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery imported from solar', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_imported_from_solar', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.684', - }) -# --- -# name: test_sensors[sensor.energy_site_battery_imported_from_solar-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Battery imported from solar', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_battery_imported_from_solar', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.684', - }) -# --- # name: test_sensors[sensor.energy_site_battery_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -510,298 +72,6 @@ 'state': '5.06', }) # --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_consumer_imported_from_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumer imported from battery', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumer_energy_imported_from_battery', - 'unique_id': '123456-consumer_energy_imported_from_battery', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Consumer imported from battery', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_consumer_imported_from_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.036', - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_battery-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Consumer imported from battery', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_consumer_imported_from_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.036', - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_generator-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_consumer_imported_from_generator', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumer imported from generator', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumer_energy_imported_from_generator', - 'unique_id': '123456-consumer_energy_imported_from_generator', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_generator-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Consumer imported from generator', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_consumer_imported_from_generator', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_generator-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Consumer imported from generator', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_consumer_imported_from_generator', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_consumer_imported_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumer imported from grid', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumer_energy_imported_from_grid', - 'unique_id': '123456-consumer_energy_imported_from_grid', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Consumer imported from grid', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_consumer_imported_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_grid-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Consumer imported from grid', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_consumer_imported_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_solar-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_consumer_imported_from_solar', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumer imported from solar', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumer_energy_imported_from_solar', - 'unique_id': '123456-consumer_energy_imported_from_solar', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_solar-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Consumer imported from solar', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_consumer_imported_from_solar', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.038', - }) -# --- -# name: test_sensors[sensor.energy_site_consumer_imported_from_solar-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Consumer imported from solar', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_consumer_imported_from_solar', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.038', - }) -# --- # name: test_sensors[sensor.energy_site_energy_left-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -875,79 +145,6 @@ 'state': '38.8964736842105', }) # --- -# name: test_sensors[sensor.energy_site_generator_exported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_generator_exported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Generator exported', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'generator_energy_exported', - 'unique_id': '123456-generator_energy_exported', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_generator_exported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Generator exported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_generator_exported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_generator_exported-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Generator exported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_generator_exported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- # name: test_sensors[sensor.energy_site_generator_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1021,371 +218,6 @@ 'state': '0.0', }) # --- -# name: test_sensors[sensor.energy_site_grid_exported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_grid_exported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Grid exported', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_grid_energy_exported', - 'unique_id': '123456-total_grid_energy_exported', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid exported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_exported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.002', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid exported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_exported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.002', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported_from_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_grid_exported_from_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Grid exported from battery', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_energy_exported_from_battery', - 'unique_id': '123456-grid_energy_exported_from_battery', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported_from_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid exported from battery', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_exported_from_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported_from_battery-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid exported from battery', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_exported_from_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported_from_generator-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_grid_exported_from_generator', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Grid exported from generator', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_energy_exported_from_generator', - 'unique_id': '123456-grid_energy_exported_from_generator', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported_from_generator-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid exported from generator', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_exported_from_generator', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported_from_generator-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid exported from generator', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_exported_from_generator', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported_from_solar-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_grid_exported_from_solar', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Grid exported from solar', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_energy_exported_from_solar', - 'unique_id': '123456-grid_energy_exported_from_solar', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported_from_solar-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid exported from solar', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_exported_from_solar', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.002', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_exported_from_solar-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid exported from solar', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_exported_from_solar', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.002', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_imported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_grid_imported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Grid imported', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_energy_imported', - 'unique_id': '123456-grid_energy_imported', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_grid_imported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid imported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_imported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_imported-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid imported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_imported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- # name: test_sensors[sensor.energy_site_grid_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1459,152 +291,6 @@ 'state': '0.0', }) # --- -# name: test_sensors[sensor.energy_site_grid_services_exported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_grid_services_exported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Grid services exported', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_services_energy_exported', - 'unique_id': '123456-grid_services_energy_exported', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_grid_services_exported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid services exported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_services_exported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_services_exported-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid services exported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_services_exported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_services_imported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_grid_services_imported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Grid services imported', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_services_energy_imported', - 'unique_id': '123456-grid_services_energy_imported', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_grid_services_imported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid services imported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_services_imported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_sensors[sensor.energy_site_grid_services_imported-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Grid services imported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_grid_services_imported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- # name: test_sensors[sensor.energy_site_grid_services_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1678,162 +364,6 @@ 'state': '0.0', }) # --- -# name: test_sensors[sensor.energy_site_home_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_home_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Home usage', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_home_usage', - 'unique_id': '123456-total_home_usage', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_home_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Home usage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_home_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.074', - }) -# --- -# name: test_sensors[sensor.energy_site_home_usage-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Home usage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_home_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.074', - }) -# --- -# name: test_sensors[sensor.energy_site_island_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'on_grid', - 'off_grid', - 'off_grid_intentional', - 'off_grid_unintentional', - 'island_status_unknown', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_island_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Island status', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'island_status', - 'unique_id': '123456-island_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.energy_site_island_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Energy Site Island status', - 'options': list([ - 'on_grid', - 'off_grid', - 'off_grid_intentional', - 'off_grid_unintentional', - 'island_status_unknown', - ]), - }), - 'context': , - 'entity_id': 'sensor.energy_site_island_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on_grid', - }) -# --- -# name: test_sensors[sensor.energy_site_island_status-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Energy Site Island status', - 'options': list([ - 'on_grid', - 'off_grid', - 'off_grid_intentional', - 'off_grid_unintentional', - 'island_status_unknown', - ]), - }), - 'context': , - 'entity_id': 'sensor.energy_site_island_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on_grid', - }) -# --- # name: test_sensors[sensor.energy_site_load_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1907,6 +437,67 @@ 'state': '6.245', }) # --- +# name: test_sensors[sensor.energy_site_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'island_status', + 'unique_id': '123456-island_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.energy_site_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Energy Site None', + }), + 'context': , + 'entity_id': 'sensor.energy_site_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on_grid', + }) +# --- +# name: test_sensors[sensor.energy_site_none-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Energy Site None', + }), + 'context': , + 'entity_id': 'sensor.energy_site_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on_grid', + }) +# --- # name: test_sensors[sensor.energy_site_percentage_charged-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1977,152 +568,6 @@ 'state': '95.5053740373966', }) # --- -# name: test_sensors[sensor.energy_site_solar_exported-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_solar_exported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Solar exported', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'solar_energy_exported', - 'unique_id': '123456-solar_energy_exported', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_solar_exported-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Solar exported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_solar_exported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.724', - }) -# --- -# name: test_sensors[sensor.energy_site_solar_exported-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Solar exported', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_solar_exported', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.724', - }) -# --- -# name: test_sensors[sensor.energy_site_solar_generated-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_solar_generated', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Solar generated', - 'platform': 'teslemetry', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_solar_generation', - 'unique_id': '123456-total_solar_generation', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[sensor.energy_site_solar_generated-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Solar generated', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_solar_generated', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.724', - }) -# --- -# name: test_sensors[sensor.energy_site_solar_generated-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Energy Site Solar generated', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.energy_site_solar_generated', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.724', - }) -# --- # name: test_sensors[sensor.energy_site_solar_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/teslemetry/snapshots/test_update.ambr b/tests/components/teslemetry/snapshots/test_update.ambr index a1213f3d94b..19dac161516 100644 --- a/tests/components/teslemetry/snapshots/test_update.ambr +++ b/tests/components/teslemetry/snapshots/test_update.ambr @@ -36,7 +36,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, - 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, @@ -47,7 +46,6 @@ 'skipped_version': None, 'supported_features': , 'title': None, - 'update_percentage': None, }), 'context': , 'entity_id': 'update.test_update', @@ -94,7 +92,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, - 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, @@ -105,7 +102,6 @@ 'skipped_version': None, 'supported_features': , 'title': None, - 'update_percentage': None, }), 'context': , 'entity_id': 'update.test_update', diff --git a/tests/components/teslemetry/test_binary_sensors.py b/tests/components/teslemetry/test_binary_sensors.py index 95fccde5f25..a7a8c03c174 100644 --- a/tests/components/teslemetry/test_binary_sensors.py +++ b/tests/components/teslemetry/test_binary_sensors.py @@ -1,10 +1,8 @@ """Test the Teslemetry binary sensor platform.""" -from unittest.mock import AsyncMock - from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL @@ -35,7 +33,7 @@ async def test_binary_sensor_refresh( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, freezer: FrozenDateTimeFactory, ) -> None: """Tests that the binary sensor entities are correct.""" @@ -53,7 +51,7 @@ async def test_binary_sensor_refresh( async def test_binary_sensor_offline( hass: HomeAssistant, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the binary sensor entities are correct when offline.""" diff --git a/tests/components/teslemetry/test_button.py b/tests/components/teslemetry/test_button.py index 04edf668765..a10e3efdff2 100644 --- a/tests/components/teslemetry/test_button.py +++ b/tests/components/teslemetry/test_button.py @@ -3,7 +3,7 @@ from unittest.mock import patch import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, Platform diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index 55f99caa13c..250413396c1 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -1,15 +1,17 @@ """Test the Teslemetry climate platform.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import InvalidCommand, VehicleOffline from homeassistant.components.climate import ( ATTR_HVAC_MODE, ATTR_PRESET_MODE, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, ATTR_TEMPERATURE, DOMAIN as CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, @@ -173,6 +175,17 @@ async def test_climate( state = hass.states.get(entity_id) assert state.state == HVACMode.COOL + # Set Temp do nothing + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TARGET_TEMP_HIGH: 30, + ATTR_TARGET_TEMP_LOW: 30, + }, + blocking=True, + ) state = hass.states.get(entity_id) assert state.attributes[ATTR_TEMPERATURE] == 40 assert state.state == HVACMode.COOL @@ -186,7 +199,7 @@ async def test_climate( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 34}, + {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 25}, blocking=True, ) @@ -196,7 +209,7 @@ async def test_climate_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the climate entity is correct.""" @@ -210,7 +223,7 @@ async def test_climate_offline( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the climate entity is correct.""" @@ -219,7 +232,7 @@ async def test_climate_offline( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -async def test_invalid_error(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: +async def test_invalid_error(hass: HomeAssistant) -> None: """Tests service error is handled.""" await setup_platform(hass, platforms=[Platform.CLIMATE]) @@ -239,7 +252,10 @@ async def test_invalid_error(hass: HomeAssistant, snapshot: SnapshotAssertion) - blocking=True, ) mock_on.assert_called_once() - assert str(error.value) == snapshot(name="error") + assert ( + str(error.value) + == "Teslemetry command failed, The data request or command is unknown." + ) @pytest.mark.parametrize("response", COMMAND_ERRORS) @@ -288,11 +304,10 @@ async def test_ignored_error( @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_asleep_or_offline( hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - mock_wake_up: AsyncMock, - mock_vehicle: AsyncMock, + mock_vehicle_data, + mock_wake_up, + mock_vehicle, freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, ) -> None: """Tests asleep is handled.""" @@ -318,7 +333,7 @@ async def test_asleep_or_offline( {ATTR_ENTITY_ID: [entity_id]}, blocking=True, ) - assert str(error.value) == snapshot(name="InvalidCommand") + assert str(error.value) == "The data request or command is unknown." mock_wake_up.assert_called_once() mock_wake_up.side_effect = None @@ -337,7 +352,7 @@ async def test_asleep_or_offline( {ATTR_ENTITY_ID: [entity_id]}, blocking=True, ) - assert str(error.value) == snapshot(name="HomeAssistantError") + assert str(error.value) == "Could not wake up vehicle" mock_wake_up.assert_called_once() mock_vehicle.assert_called() @@ -356,21 +371,12 @@ async def test_asleep_or_offline( async def test_climate_noscope( hass: HomeAssistant, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, - mock_metadata: AsyncMock, + mock_metadata, ) -> None: """Tests that the climate entity is correct.""" mock_metadata.return_value = METADATA_NOSCOPE - entry = await setup_platform(hass, [Platform.CLIMATE]) - - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - + await setup_platform(hass, [Platform.CLIMATE]) entity_id = "climate.test_climate" with pytest.raises(ServiceValidationError): diff --git a/tests/components/teslemetry/test_config_flow.py b/tests/components/teslemetry/test_config_flow.py index aeee3a620d4..fa35142dc07 100644 --- a/tests/components/teslemetry/test_config_flow.py +++ b/tests/components/teslemetry/test_config_flow.py @@ -1,6 +1,6 @@ """Test the Teslemetry config flow.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch from aiohttp import ClientConnectionError import pytest @@ -60,10 +60,7 @@ async def test_form( ], ) async def test_form_errors( - hass: HomeAssistant, - side_effect: TeslaFleetError, - error: dict[str, str], - mock_metadata: AsyncMock, + hass: HomeAssistant, side_effect, error, mock_metadata ) -> None: """Test errors are handled.""" @@ -89,7 +86,7 @@ async def test_form_errors( assert result3["type"] is FlowResultType.CREATE_ENTRY -async def test_reauth(hass: HomeAssistant, mock_metadata: AsyncMock) -> None: +async def test_reauth(hass: HomeAssistant, mock_metadata) -> None: """Test reauth flow.""" mock_entry = MockConfigEntry( @@ -97,7 +94,14 @@ async def test_reauth(hass: HomeAssistant, mock_metadata: AsyncMock) -> None: ) mock_entry.add_to_hass(hass) - result1 = await mock_entry.start_reauth_flow(hass) + result1 = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_entry.entry_id, + }, + data=BAD_CONFIG, + ) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "reauth_confirm" @@ -130,10 +134,7 @@ async def test_reauth(hass: HomeAssistant, mock_metadata: AsyncMock) -> None: ], ) async def test_reauth_errors( - hass: HomeAssistant, - mock_metadata: AsyncMock, - side_effect: TeslaFleetError, - error: dict[str, str], + hass: HomeAssistant, mock_metadata, side_effect, error ) -> None: """Test reauth flows that fail.""" @@ -143,7 +144,15 @@ async def test_reauth_errors( ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=BAD_CONFIG, + ) mock_metadata.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( @@ -184,7 +193,7 @@ async def test_unique_id_abort( assert result2["type"] is FlowResultType.ABORT -async def test_migrate_from_1_1(hass: HomeAssistant, mock_metadata: AsyncMock) -> None: +async def test_migrate_from_1_1(hass: HomeAssistant, mock_metadata) -> None: """Test config migration.""" mock_entry = MockConfigEntry( @@ -205,9 +214,7 @@ async def test_migrate_from_1_1(hass: HomeAssistant, mock_metadata: AsyncMock) - assert entry.unique_id == METADATA["uid"] -async def test_migrate_error_from_1_1( - hass: HomeAssistant, mock_metadata: AsyncMock -) -> None: +async def test_migrate_error_from_1_1(hass: HomeAssistant, mock_metadata) -> None: """Test config migration handles errors.""" mock_metadata.side_effect = TeslaFleetError @@ -228,9 +235,7 @@ async def test_migrate_error_from_1_1( assert entry.state is ConfigEntryState.MIGRATION_ERROR -async def test_migrate_error_from_future( - hass: HomeAssistant, mock_metadata: AsyncMock -) -> None: +async def test_migrate_error_from_future(hass: HomeAssistant, mock_metadata) -> None: """Test a future version isn't migrated.""" mock_metadata.side_effect = TeslaFleetError diff --git a/tests/components/teslemetry/test_cover.py b/tests/components/teslemetry/test_cover.py index 5801a356ac5..5f99a5d9c79 100644 --- a/tests/components/teslemetry/test_cover.py +++ b/tests/components/teslemetry/test_cover.py @@ -1,19 +1,22 @@ """Test the Teslemetry cover platform.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch -import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, - SERVICE_STOP_COVER, - CoverState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_CLOSED, + STATE_OPEN, + STATE_UNKNOWN, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -21,7 +24,6 @@ from . import assert_entities, setup_platform from .const import COMMAND_OK, METADATA_NOSCOPE, VEHICLE_DATA_ALT -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -33,26 +35,24 @@ async def test_cover( assert_entities(hass, entry.entry_id, entity_registry, snapshot) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: - """Tests that the cover entities are correct with alternate values.""" + """Tests that the cover entities are correct without scopes.""" mock_vehicle_data.return_value = VEHICLE_DATA_ALT entry = await setup_platform(hass, [Platform.COVER]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_noscope( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_metadata: AsyncMock, + mock_metadata, ) -> None: """Tests that the cover entities are correct without scopes.""" @@ -63,7 +63,7 @@ async def test_cover_noscope( async def test_cover_offline( hass: HomeAssistant, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the cover entities are correct when offline.""" @@ -73,7 +73,6 @@ async def test_cover_offline( assert state.state == STATE_UNKNOWN -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_services( hass: HomeAssistant, ) -> None: @@ -96,7 +95,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state is STATE_OPEN call.reset_mock() await hass.services.async_call( @@ -108,7 +107,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state is STATE_CLOSED # Charge Port Door entity_id = "cover.test_charge_port_door" @@ -125,7 +124,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state is STATE_OPEN with patch( "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", @@ -140,7 +139,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED + assert state.state is STATE_CLOSED # Frunk entity_id = "cover.test_frunk" @@ -157,7 +156,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state is STATE_OPEN # Trunk entity_id = "cover.test_trunk" @@ -174,7 +173,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state is STATE_OPEN call.reset_mock() await hass.services.async_call( @@ -186,45 +185,4 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state == CoverState.CLOSED - - # Sunroof - entity_id = "cover.test_sunroof" - with patch( - "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", - return_value=COMMAND_OK, - ) as call: - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.OPEN - - call.reset_mock() - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.OPEN - - call.reset_mock() - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - call.assert_called_once() - state = hass.states.get(entity_id) - assert state - assert state.state == CoverState.CLOSED + assert state.state is STATE_CLOSED diff --git a/tests/components/teslemetry/test_device_tracker.py b/tests/components/teslemetry/test_device_tracker.py index a3fcd428c66..55deaefdab5 100644 --- a/tests/components/teslemetry/test_device_tracker.py +++ b/tests/components/teslemetry/test_device_tracker.py @@ -1,6 +1,6 @@ """Test the Teslemetry device tracker platform.""" -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.const import STATE_UNKNOWN, Platform diff --git a/tests/components/teslemetry/test_init.py b/tests/components/teslemetry/test_init.py index 2a33e1def66..31b4202b521 100644 --- a/tests/components/teslemetry/test_init.py +++ b/tests/components/teslemetry/test_init.py @@ -1,10 +1,8 @@ """Test the Teslemetry init.""" -from unittest.mock import AsyncMock - from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, @@ -18,12 +16,12 @@ from homeassistant.components.teslemetry.coordinator import ( ) from homeassistant.components.teslemetry.models import TeslemetryData from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from . import setup_platform -from .const import VEHICLE_DATA_ALT, WAKE_UP_ASLEEP +from .const import VEHICLE_DATA_ALT from tests.common import async_fire_time_changed @@ -48,10 +46,7 @@ async def test_load_unload(hass: HomeAssistant) -> None: @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_init_error( - hass: HomeAssistant, - mock_products: AsyncMock, - side_effect: TeslaFleetError, - state: ConfigEntryState, + hass: HomeAssistant, mock_products, side_effect, state ) -> None: """Test init with errors.""" @@ -73,23 +68,8 @@ async def test_devices( # Vehicle Coordinator -async def test_vehicle_refresh_asleep( - hass: HomeAssistant, - mock_vehicle: AsyncMock, - mock_vehicle_data: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test coordinator refresh with an error.""" - - mock_vehicle.return_value = WAKE_UP_ASLEEP - entry = await setup_platform(hass, [Platform.CLIMATE]) - assert entry.state is ConfigEntryState.LOADED - mock_vehicle.assert_called_once() - mock_vehicle_data.assert_not_called() - - async def test_vehicle_refresh_offline( - hass: HomeAssistant, mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory + hass: HomeAssistant, mock_vehicle_data, freezer: FrozenDateTimeFactory ) -> None: """Test coordinator refresh with an error.""" entry = await setup_platform(hass, [Platform.CLIMATE]) @@ -106,10 +86,7 @@ async def test_vehicle_refresh_offline( @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_vehicle_refresh_error( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - side_effect: TeslaFleetError, - state: ConfigEntryState, + hass: HomeAssistant, mock_vehicle_data, side_effect, state ) -> None: """Test coordinator refresh with an error.""" mock_vehicle_data.side_effect = side_effect @@ -118,7 +95,7 @@ async def test_vehicle_refresh_error( async def test_vehicle_sleep( - hass: HomeAssistant, mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory + hass: HomeAssistant, mock_vehicle_data, freezer: FrozenDateTimeFactory ) -> None: """Test coordinator refresh with an error.""" await setup_platform(hass, [Platform.CLIMATE]) @@ -177,10 +154,7 @@ async def test_vehicle_sleep( # Test Energy Live Coordinator @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_energy_live_refresh_error( - hass: HomeAssistant, - mock_live_status: AsyncMock, - side_effect: TeslaFleetError, - state: ConfigEntryState, + hass: HomeAssistant, mock_live_status, side_effect, state ) -> None: """Test coordinator refresh with an error.""" mock_live_status.side_effect = side_effect @@ -191,70 +165,9 @@ async def test_energy_live_refresh_error( # Test Energy Site Coordinator @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_energy_site_refresh_error( - hass: HomeAssistant, - mock_site_info: AsyncMock, - side_effect: TeslaFleetError, - state: ConfigEntryState, + hass: HomeAssistant, mock_site_info, side_effect, state ) -> None: """Test coordinator refresh with an error.""" mock_site_info.side_effect = side_effect entry = await setup_platform(hass) assert entry.state is state - - -# Test Energy History Coordinator -@pytest.mark.parametrize(("side_effect", "state"), ERRORS) -async def test_energy_history_refresh_error( - hass: HomeAssistant, - mock_energy_history: AsyncMock, - side_effect: TeslaFleetError, - state: ConfigEntryState, -) -> None: - """Test coordinator refresh with an error.""" - mock_energy_history.side_effect = side_effect - entry = await setup_platform(hass) - assert entry.state is state - - -async def test_vehicle_stream( - hass: HomeAssistant, - mock_listen: AsyncMock, - snapshot: SnapshotAssertion, -) -> None: - """Test vehicle stream events.""" - - entry = await setup_platform(hass, [Platform.BINARY_SENSOR]) - mock_listen.assert_called_once() - - state = hass.states.get("binary_sensor.test_status") - assert state.state == STATE_ON - - state = hass.states.get("binary_sensor.test_user_present") - assert state.state == STATE_OFF - - runtime_data: TeslemetryData = entry.runtime_data - for listener, _ in runtime_data.vehicles[0].stream._listeners.values(): - listener( - { - "vin": VEHICLE_DATA_ALT["response"]["vin"], - "vehicle_data": VEHICLE_DATA_ALT["response"], - "createdAt": "2024-10-04T10:45:17.537Z", - } - ) - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.test_user_present") - assert state.state == STATE_ON - - for listener, _ in runtime_data.vehicles[0].stream._listeners.values(): - listener( - { - "vin": VEHICLE_DATA_ALT["response"]["vin"], - "state": "offline", - "createdAt": "2024-10-04T10:45:17.537Z", - } - ) - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.test_status") - assert state.state == STATE_OFF diff --git a/tests/components/teslemetry/test_lock.py b/tests/components/teslemetry/test_lock.py index b1460e870f0..a50e97fe6ad 100644 --- a/tests/components/teslemetry/test_lock.py +++ b/tests/components/teslemetry/test_lock.py @@ -1,18 +1,23 @@ """Test the Teslemetry lock platform.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, - LockState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_LOCKED, + STATE_UNKNOWN, + STATE_UNLOCKED, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er @@ -34,7 +39,7 @@ async def test_lock( async def test_lock_offline( hass: HomeAssistant, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the lock entities are correct when offline.""" @@ -64,7 +69,7 @@ async def test_lock_services( blocking=True, ) state = hass.states.get(entity_id) - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED call.assert_called_once() with patch( @@ -78,7 +83,7 @@ async def test_lock_services( blocking=True, ) state = hass.states.get(entity_id) - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED call.assert_called_once() entity_id = "lock.test_charge_cable_lock" @@ -102,5 +107,5 @@ async def test_lock_services( blocking=True, ) state = hass.states.get(entity_id) - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED call.assert_called_once() diff --git a/tests/components/teslemetry/test_media_player.py b/tests/components/teslemetry/test_media_player.py index 0d30750d10d..8544c11a625 100644 --- a/tests/components/teslemetry/test_media_player.py +++ b/tests/components/teslemetry/test_media_player.py @@ -1,8 +1,8 @@ """Test the Teslemetry media player platform.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.media_player import ( @@ -38,7 +38,7 @@ async def test_media_player_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the media player entities are correct.""" @@ -49,7 +49,7 @@ async def test_media_player_alt( async def test_media_player_offline( hass: HomeAssistant, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the media player entities are correct when offline.""" @@ -63,7 +63,7 @@ async def test_media_player_noscope( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_metadata: AsyncMock, + mock_metadata, ) -> None: """Tests that the media player entities are correct without required scope.""" diff --git a/tests/components/teslemetry/test_number.py b/tests/components/teslemetry/test_number.py index 5df948b475c..728d37c4d7c 100644 --- a/tests/components/teslemetry/test_number.py +++ b/tests/components/teslemetry/test_number.py @@ -1,9 +1,9 @@ """Test the Teslemetry number platform.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.number import ( @@ -33,7 +33,7 @@ async def test_number( async def test_number_offline( hass: HomeAssistant, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the number entities are correct when offline.""" @@ -44,9 +44,7 @@ async def test_number_offline( @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_number_services( - hass: HomeAssistant, mock_vehicle_data: AsyncMock -) -> None: +async def test_number_services(hass: HomeAssistant, mock_vehicle_data) -> None: """Tests that the number services work.""" mock_vehicle_data.return_value = VEHICLE_DATA_ALT await setup_platform(hass, [Platform.NUMBER]) diff --git a/tests/components/teslemetry/test_select.py b/tests/components/teslemetry/test_select.py index caf0b9c1deb..3b1c8c436bf 100644 --- a/tests/components/teslemetry/test_select.py +++ b/tests/components/teslemetry/test_select.py @@ -1,9 +1,9 @@ """Test the Teslemetry select platform.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode from tesla_fleet_api.exceptions import VehicleOffline @@ -35,7 +35,7 @@ async def test_select( async def test_select_offline( hass: HomeAssistant, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the select entities are correct when offline.""" diff --git a/tests/components/teslemetry/test_sensor.py b/tests/components/teslemetry/test_sensor.py index f0b472a7183..c5bdd15d712 100644 --- a/tests/components/teslemetry/test_sensor.py +++ b/tests/components/teslemetry/test_sensor.py @@ -1,10 +1,8 @@ """Test the Teslemetry sensor platform.""" -from unittest.mock import AsyncMock - from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.const import Platform @@ -23,7 +21,7 @@ async def test_sensors( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the sensor entities are correct.""" diff --git a/tests/components/teslemetry/test_switch.py b/tests/components/teslemetry/test_switch.py index dae3ce6fbf8..47a2843eb8f 100644 --- a/tests/components/teslemetry/test_switch.py +++ b/tests/components/teslemetry/test_switch.py @@ -1,9 +1,9 @@ """Test the Teslemetry switch platform.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.switch import ( @@ -40,7 +40,7 @@ async def test_switch_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the switch entities are correct.""" @@ -51,7 +51,7 @@ async def test_switch_alt( async def test_switch_offline( hass: HomeAssistant, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the switch entities are correct when offline.""" diff --git a/tests/components/teslemetry/test_update.py b/tests/components/teslemetry/test_update.py index f02f09cd19a..62bbcc94516 100644 --- a/tests/components/teslemetry/test_update.py +++ b/tests/components/teslemetry/test_update.py @@ -1,10 +1,10 @@ """Test the Teslemetry update platform.""" import copy -from unittest.mock import AsyncMock, patch +from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL @@ -35,7 +35,7 @@ async def test_update_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the update entities are correct.""" @@ -46,7 +46,7 @@ async def test_update_alt( async def test_update_offline( hass: HomeAssistant, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, ) -> None: """Tests that the update entities are correct when offline.""" @@ -58,7 +58,7 @@ async def test_update_offline( async def test_update_services( hass: HomeAssistant, - mock_vehicle_data: AsyncMock, + mock_vehicle_data, freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, ) -> None: diff --git a/tests/components/tessie/common.py b/tests/components/tessie/common.py index 37a38fffaa4..3d24c6b233a 100644 --- a/tests/components/tessie/common.py +++ b/tests/components/tessie/common.py @@ -54,17 +54,6 @@ LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) RESPONSE_OK = {"response": {}, "error": None} COMMAND_OK = {"response": {"result": True, "reason": ""}} -SCOPES = [ - "user_data", - "vehicle_device_data", - "vehicle_cmds", - "vehicle_charging_cmds", - "energy_device_data", - "energy_cmds", - "offline_access", - "openid", -] -NO_SCOPES = ["user_data", "offline_access", "openid"] async def setup_platform( diff --git a/tests/components/tessie/conftest.py b/tests/components/tessie/conftest.py index e0aba73af17..79cc9aa44c6 100644 --- a/tests/components/tessie/conftest.py +++ b/tests/components/tessie/conftest.py @@ -11,7 +11,6 @@ from .common import ( COMMAND_OK, LIVE_STATUS, PRODUCTS, - SCOPES, SITE_INFO, TEST_STATE_OF_ALL_VEHICLES, TEST_VEHICLE_STATE_ONLINE, @@ -52,16 +51,6 @@ def mock_get_state_of_all_vehicles(): # Fleet API -@pytest.fixture(autouse=True) -def mock_scopes(): - """Mock scopes function.""" - with patch( - "homeassistant.components.tessie.Tessie.scopes", - return_value=SCOPES, - ) as mock_scopes: - yield mock_scopes - - @pytest.fixture(autouse=True) def mock_products(): """Mock Tesla Fleet Api products method.""" diff --git a/tests/components/tessie/fixtures/online.json b/tests/components/tessie/fixtures/online.json index 38b904cdffb..ed49b4bfd75 100644 --- a/tests/components/tessie/fixtures/online.json +++ b/tests/components/tessie/fixtures/online.json @@ -98,8 +98,6 @@ "passenger_temp_setting": 22.5, "remote_heater_control_enabled": false, "right_temp_direction": 234, - "seat_fan_front_left": 0, - "seat_fan_front_right": 0, "seat_heater_left": 0, "seat_heater_rear_center": 0, "seat_heater_rear_left": 0, @@ -159,7 +157,7 @@ "exterior_trim_override": "", "has_air_suspension": false, "has_ludicrous_mode": false, - "has_seat_cooling": true, + "has_seat_cooling": false, "headlamp_type": "Global", "interior_trim_type": "White2", "key_version": 2, @@ -175,7 +173,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": true, + "sun_roof_installed": null, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1701139037461, diff --git a/tests/components/tessie/fixtures/products.json b/tests/components/tessie/fixtures/products.json index 8da921a33f4..e1b76e4cefb 100644 --- a/tests/components/tessie/fixtures/products.json +++ b/tests/components/tessie/fixtures/products.json @@ -115,17 +115,7 @@ "features": { "rate_plan_manager_no_pricing_constraint": true } - }, - { - "energy_site_id": 98765, - "components": { - "battery": false, - "solar": false, - "grid": false, - "load_meter": false, - "market_type": "residential" - } } ], - "count": 3 + "count": 2 } diff --git a/tests/components/tessie/fixtures/vehicles.json b/tests/components/tessie/fixtures/vehicles.json index 622b31bae69..359e23f9cdd 100644 --- a/tests/components/tessie/fixtures/vehicles.json +++ b/tests/components/tessie/fixtures/vehicles.json @@ -111,8 +111,6 @@ "passenger_temp_setting": 22.5, "remote_heater_control_enabled": false, "right_temp_direction": 234, - "seat_fan_front_left": 0, - "seat_fan_front_right": 0, "seat_heater_left": 0, "seat_heater_rear_center": 0, "seat_heater_rear_left": 0, @@ -176,7 +174,7 @@ "exterior_trim_override": "", "has_air_suspension": false, "has_ludicrous_mode": false, - "has_seat_cooling": true, + "has_seat_cooling": false, "headlamp_type": "Global", "interior_trim_type": "White2", "key_version": 2, @@ -192,7 +190,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": true, + "sun_roof_installed": null, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1701139037461, diff --git a/tests/components/tessie/snapshots/test_binary_sensors.ambr b/tests/components/tessie/snapshots/test_binary_sensors.ambr index 6c0da044df2..7bc191de6ed 100644 --- a/tests/components/tessie/snapshots/test_binary_sensors.ambr +++ b/tests/components/tessie/snapshots/test_binary_sensors.ambr @@ -1,188 +1,4 @@ # serializer version: 1 -# name: test_binary_sensors[binary_sensor.energy_site_backup_capable-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.energy_site_backup_capable', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Backup capable', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'backup_capable', - 'unique_id': '123456-backup_capable', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[binary_sensor.energy_site_backup_capable-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Backup capable', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_backup_capable', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[binary_sensor.energy_site_grid_services_active-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.energy_site_grid_services_active', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Grid services active', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'grid_services_active', - 'unique_id': '123456-grid_services_active', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[binary_sensor.energy_site_grid_services_active-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Grid services active', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_grid_services_active', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[binary_sensor.energy_site_grid_services_enabled-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Grid services enabled', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'components_grid_services_enabled', - 'unique_id': '123456-components_grid_services_enabled', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[binary_sensor.energy_site_grid_services_enabled-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Grid services enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[binary_sensor.energy_site_storm_watch_active-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.energy_site_storm_watch_active', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Storm watch active', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storm_mode_active', - 'unique_id': '123456-storm_mode_active', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[binary_sensor.energy_site_storm_watch_active-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Storm watch active', - }), - 'context': , - 'entity_id': 'binary_sensor.energy_site_storm_watch_active', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensors[binary_sensor.test_auto_seat_climate_left-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_cover.ambr b/tests/components/tessie/snapshots/test_cover.ambr index 6338758afb7..ff04c528244 100644 --- a/tests/components/tessie/snapshots/test_cover.ambr +++ b/tests/components/tessie/snapshots/test_cover.ambr @@ -95,54 +95,6 @@ 'state': 'closed', }) # --- -# name: test_covers[cover.test_sunroof-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_sunroof', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sunroof', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'VINVINVIN-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_covers[cover.test_sunroof-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test Sunroof', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_sunroof', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- # name: test_covers[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_diagnostics.ambr b/tests/components/tessie/snapshots/test_diagnostics.ambr deleted file mode 100644 index 8eef7cbd549..00000000000 --- a/tests/components/tessie/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,428 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'energysites': list([ - dict({ - 'info': dict({ - 'backup_reserve_percent': 0, - 'battery_count': 2, - 'components_backup': True, - 'components_backup_time_remaining_enabled': True, - 'components_batteries': list([ - dict({ - 'device_id': 'battery-1-id', - 'din': 'battery-1-din', - 'nameplate_energy': 13500, - 'nameplate_max_charge_power': 5000, - 'nameplate_max_discharge_power': 5000, - 'part_name': 'Powerwall 2', - 'part_number': '3012170-10-B', - 'part_type': 2, - 'serial_number': '**REDACTED**', - }), - dict({ - 'device_id': 'battery-2-id', - 'din': 'battery-2-din', - 'nameplate_energy': 13500, - 'nameplate_max_charge_power': 5000, - 'nameplate_max_discharge_power': 5000, - 'part_name': 'Powerwall 2', - 'part_number': '3012170-05-C', - 'part_type': 2, - 'serial_number': '**REDACTED**', - }), - ]), - 'components_battery': True, - 'components_battery_solar_offset_view_enabled': True, - 'components_battery_type': 'ac_powerwall', - 'components_car_charging_data_supported': False, - 'components_configurable': True, - 'components_customer_preferred_export_rule': 'pv_only', - 'components_disallow_charge_from_grid_with_solar_installed': True, - 'components_energy_service_self_scheduling_enabled': True, - 'components_energy_value_header': 'Energy Value', - 'components_energy_value_subheader': 'Estimated Value', - 'components_flex_energy_request_capable': False, - 'components_gateway': 'teg', - 'components_gateways': list([ - dict({ - 'device_id': 'gateway-id', - 'din': 'gateway-din', - 'firmware_version': '24.4.0 0fe780c9', - 'is_active': True, - 'part_name': 'Tesla Backup Gateway 2', - 'part_number': '1152100-14-J', - 'part_type': 10, - 'serial_number': '**REDACTED**', - 'site_id': '1234-abcd', - 'updated_datetime': '2024-05-14T00:00:00.000Z', - }), - ]), - 'components_grid': True, - 'components_grid_services_enabled': False, - 'components_load_meter': True, - 'components_net_meter_mode': 'battery_ok', - 'components_off_grid_vehicle_charging_reserve_supported': True, - 'components_set_islanding_mode_enabled': True, - 'components_show_grid_import_battery_source_cards': True, - 'components_solar': True, - 'components_solar_type': 'pv_panel', - 'components_solar_value_enabled': True, - 'components_storm_mode_capable': True, - 'components_system_alerts_enabled': True, - 'components_tou_capable': True, - 'components_vehicle_charging_performance_view_enabled': False, - 'components_vehicle_charging_solar_offset_view_enabled': False, - 'components_wall_connectors': list([ - dict({ - 'device_id': '123abc', - 'din': 'abc123', - 'is_active': True, - }), - dict({ - 'device_id': '234bcd', - 'din': 'bcd234', - 'is_active': True, - }), - ]), - 'components_wifi_commissioning_enabled': True, - 'default_real_mode': 'self_consumption', - 'id': '1233-abcd', - 'installation_date': '**REDACTED**', - 'installation_time_zone': '', - 'max_site_meter_power_ac': 1000000000, - 'min_site_meter_power_ac': -1000000000, - 'nameplate_energy': 40500, - 'nameplate_power': 15000, - 'site_name': 'Site', - 'tou_settings_optimization_strategy': 'economics', - 'tou_settings_schedule': list([ - dict({ - 'end_seconds': 3600, - 'start_seconds': 0, - 'target': 'off_peak', - 'week_days': list([ - 1, - 0, - ]), - }), - dict({ - 'end_seconds': 0, - 'start_seconds': 3600, - 'target': 'peak', - 'week_days': list([ - 1, - 0, - ]), - }), - ]), - 'user_settings_breaker_alert_enabled': False, - 'user_settings_go_off_grid_test_banner_enabled': False, - 'user_settings_powerwall_onboarding_settings_set': True, - 'user_settings_powerwall_tesla_electric_interested_in': False, - 'user_settings_storm_mode_enabled': True, - 'user_settings_sync_grid_alert_enabled': True, - 'user_settings_vpp_tour_enabled': True, - 'version': '23.44.0 eb113390', - 'vpp_backup_reserve_percent': 0, - }), - 'live': dict({ - 'backup_capable': True, - 'battery_power': 5060, - 'energy_left': 38896.47368421053, - 'generator_power': 0, - 'grid_power': 0, - 'grid_services_active': False, - 'grid_services_power': 0, - 'grid_status': 'Active', - 'island_status': 'on_grid', - 'load_power': 6245, - 'percentage_charged': 95.50537403739663, - 'solar_power': 1185, - 'storm_mode_active': False, - 'timestamp': '2024-01-01T00:00:00+00:00', - 'total_pack_energy': 40727, - 'wall_connectors': dict({ - 'abd-123': dict({ - 'din': 'abd-123', - 'wall_connector_fault_state': 2, - 'wall_connector_power': 0, - 'wall_connector_state': 2, - }), - 'bcd-234': dict({ - 'din': 'bcd-234', - 'wall_connector_fault_state': 2, - 'wall_connector_power': 0, - 'wall_connector_state': 2, - }), - }), - }), - }), - ]), - 'vehicles': list([ - dict({ - 'data': dict({ - 'access_type': 'OWNER', - 'api_version': 67, - 'backseat_token': None, - 'backseat_token_updated_at': None, - 'ble_autopair_enrolled': False, - 'calendar_enabled': True, - 'charge_state_battery_heater_on': False, - 'charge_state_battery_level': 75, - 'charge_state_battery_range': 263.68, - 'charge_state_charge_amps': 32, - 'charge_state_charge_current_request': 32, - 'charge_state_charge_current_request_max': 32, - 'charge_state_charge_enable_request': True, - 'charge_state_charge_energy_added': 18.47, - 'charge_state_charge_limit_soc': 80, - 'charge_state_charge_limit_soc_max': 100, - 'charge_state_charge_limit_soc_min': 50, - 'charge_state_charge_limit_soc_std': 80, - 'charge_state_charge_miles_added_ideal': 84, - 'charge_state_charge_miles_added_rated': 84, - 'charge_state_charge_port_cold_weather_mode': False, - 'charge_state_charge_port_color': '', - 'charge_state_charge_port_door_open': True, - 'charge_state_charge_port_latch': 'Engaged', - 'charge_state_charge_rate': 30.6, - 'charge_state_charger_actual_current': 32, - 'charge_state_charger_phases': 1, - 'charge_state_charger_pilot_current': 32, - 'charge_state_charger_power': 7, - 'charge_state_charger_voltage': 224, - 'charge_state_charging_state': 'Charging', - 'charge_state_conn_charge_cable': 'IEC', - 'charge_state_est_battery_range': 324.73, - 'charge_state_fast_charger_brand': '', - 'charge_state_fast_charger_present': False, - 'charge_state_fast_charger_type': 'ACSingleWireCAN', - 'charge_state_ideal_battery_range': 263.68, - 'charge_state_max_range_charge_counter': 0, - 'charge_state_minutes_to_full_charge': 0, - 'charge_state_not_enough_power_to_heat': None, - 'charge_state_off_peak_charging_enabled': False, - 'charge_state_off_peak_charging_times': 'all_week', - 'charge_state_off_peak_hours_end_time': 900, - 'charge_state_preconditioning_enabled': False, - 'charge_state_preconditioning_times': 'all_week', - 'charge_state_scheduled_charging_mode': 'StartAt', - 'charge_state_scheduled_charging_pending': False, - 'charge_state_scheduled_charging_start_time': 1701216000, - 'charge_state_scheduled_charging_start_time_app': 600, - 'charge_state_scheduled_charging_start_time_minutes': 600, - 'charge_state_scheduled_departure_time': 1694899800, - 'charge_state_scheduled_departure_time_minutes': 450, - 'charge_state_supercharger_session_trip_planner': False, - 'charge_state_time_to_full_charge': 0, - 'charge_state_timestamp': 1701139037461, - 'charge_state_trip_charging': False, - 'charge_state_usable_battery_level': 75, - 'charge_state_user_charge_enable_request': None, - 'climate_state_allow_cabin_overheat_protection': True, - 'climate_state_auto_seat_climate_left': True, - 'climate_state_auto_seat_climate_right': True, - 'climate_state_auto_steering_wheel_heat': True, - 'climate_state_battery_heater': False, - 'climate_state_battery_heater_no_power': None, - 'climate_state_cabin_overheat_protection': 'On', - 'climate_state_cabin_overheat_protection_actively_cooling': False, - 'climate_state_climate_keeper_mode': 'off', - 'climate_state_cop_activation_temperature': 'High', - 'climate_state_defrost_mode': 0, - 'climate_state_driver_temp_setting': 22.5, - 'climate_state_fan_status': 0, - 'climate_state_hvac_auto_request': 'On', - 'climate_state_inside_temp': 30.4, - 'climate_state_is_auto_conditioning_on': False, - 'climate_state_is_climate_on': False, - 'climate_state_is_front_defroster_on': False, - 'climate_state_is_preconditioning': False, - 'climate_state_is_rear_defroster_on': False, - 'climate_state_left_temp_direction': 234, - 'climate_state_max_avail_temp': 28, - 'climate_state_min_avail_temp': 15, - 'climate_state_outside_temp': 30.5, - 'climate_state_passenger_temp_setting': 22.5, - 'climate_state_remote_heater_control_enabled': False, - 'climate_state_right_temp_direction': 234, - 'climate_state_seat_fan_front_left': 0, - 'climate_state_seat_fan_front_right': 0, - 'climate_state_seat_heater_left': 0, - 'climate_state_seat_heater_rear_center': 0, - 'climate_state_seat_heater_rear_left': 0, - 'climate_state_seat_heater_rear_right': 0, - 'climate_state_seat_heater_right': 0, - 'climate_state_side_mirror_heaters': False, - 'climate_state_steering_wheel_heat_level': 0, - 'climate_state_steering_wheel_heater': False, - 'climate_state_supports_fan_only_cabin_overheat_protection': True, - 'climate_state_timestamp': 1701139037461, - 'climate_state_wiper_blade_heater': False, - 'color': None, - 'display_name': 'Test', - 'drive_state_active_route_destination': 'Giga Texas', - 'drive_state_active_route_energy_at_arrival': 65, - 'drive_state_active_route_latitude': '**REDACTED**', - 'drive_state_active_route_longitude': '**REDACTED**', - 'drive_state_active_route_miles_to_arrival': 46.707353, - 'drive_state_active_route_minutes_to_arrival': 59.2, - 'drive_state_active_route_traffic_minutes_delay': 0, - 'drive_state_gps_as_of': 1701129612, - 'drive_state_heading': 185, - 'drive_state_latitude': '**REDACTED**', - 'drive_state_longitude': '**REDACTED**', - 'drive_state_native_latitude': '**REDACTED**', - 'drive_state_native_location_supported': 1, - 'drive_state_native_longitude': '**REDACTED**', - 'drive_state_native_type': 'wgs', - 'drive_state_power': -7, - 'drive_state_shift_state': None, - 'drive_state_speed': None, - 'drive_state_timestamp': 1701139037461, - 'granular_access_hide_private': False, - 'gui_settings_gui_24_hour_time': False, - 'gui_settings_gui_charge_rate_units': 'kW', - 'gui_settings_gui_distance_units': 'km/hr', - 'gui_settings_gui_range_display': 'Rated', - 'gui_settings_gui_temperature_units': 'C', - 'gui_settings_gui_tirepressure_units': 'Psi', - 'gui_settings_show_range_units': False, - 'gui_settings_timestamp': 1701139037461, - 'id': '**REDACTED**', - 'id_s': '**REDACTED**', - 'in_service': False, - 'state': 'online', - 'tokens': '**REDACTED**', - 'user_id': '**REDACTED**', - 'vehicle_config_aux_park_lamps': 'Eu', - 'vehicle_config_badge_version': 1, - 'vehicle_config_can_accept_navigation_requests': True, - 'vehicle_config_can_actuate_trunks': True, - 'vehicle_config_car_special_type': 'base', - 'vehicle_config_car_type': 'model3', - 'vehicle_config_charge_port_type': 'CCS', - 'vehicle_config_cop_user_set_temp_supported': False, - 'vehicle_config_dashcam_clip_save_supported': True, - 'vehicle_config_default_charge_to_max': False, - 'vehicle_config_driver_assist': 'TeslaAP3', - 'vehicle_config_ece_restrictions': False, - 'vehicle_config_efficiency_package': 'M32021', - 'vehicle_config_eu_vehicle': True, - 'vehicle_config_exterior_color': 'DeepBlue', - 'vehicle_config_exterior_trim': 'Black', - 'vehicle_config_exterior_trim_override': '', - 'vehicle_config_has_air_suspension': False, - 'vehicle_config_has_ludicrous_mode': False, - 'vehicle_config_has_seat_cooling': True, - 'vehicle_config_headlamp_type': 'Global', - 'vehicle_config_interior_trim_type': 'White2', - 'vehicle_config_key_version': 2, - 'vehicle_config_motorized_charge_port': True, - 'vehicle_config_paint_color_override': '0,9,25,0.7,0.04', - 'vehicle_config_performance_package': 'Base', - 'vehicle_config_plg': True, - 'vehicle_config_pws': False, - 'vehicle_config_rear_drive_unit': 'PM216MOSFET', - 'vehicle_config_rear_seat_heaters': 1, - 'vehicle_config_rear_seat_type': 0, - 'vehicle_config_rhd': True, - 'vehicle_config_roof_color': 'RoofColorGlass', - 'vehicle_config_seat_type': None, - 'vehicle_config_spoiler_type': 'None', - 'vehicle_config_sun_roof_installed': True, - 'vehicle_config_supports_qr_pairing': False, - 'vehicle_config_third_row_seats': 'None', - 'vehicle_config_timestamp': 1701139037461, - 'vehicle_config_trim_badging': '74d', - 'vehicle_config_use_range_badging': True, - 'vehicle_config_utc_offset': 36000, - 'vehicle_config_webcam_selfie_supported': True, - 'vehicle_config_webcam_supported': True, - 'vehicle_config_wheel_type': 'Pinwheel18CapKit', - 'vehicle_id': '**REDACTED**', - 'vehicle_state_api_version': 67, - 'vehicle_state_autopark_state_v2': 'unavailable', - 'vehicle_state_calendar_supported': True, - 'vehicle_state_car_version': '2023.38.6 c1f85ddb415f', - 'vehicle_state_center_display_state': 0, - 'vehicle_state_dashcam_clip_save_available': True, - 'vehicle_state_dashcam_state': 'Recording', - 'vehicle_state_df': 0, - 'vehicle_state_dr': 0, - 'vehicle_state_fd_window': 0, - 'vehicle_state_feature_bitmask': 'fbdffbff,7f', - 'vehicle_state_fp_window': 0, - 'vehicle_state_ft': 0, - 'vehicle_state_is_user_present': False, - 'vehicle_state_locked': True, - 'vehicle_state_media_info_audio_volume': 2.3333, - 'vehicle_state_media_info_audio_volume_increment': 0.333333, - 'vehicle_state_media_info_audio_volume_max': 10.333333, - 'vehicle_state_media_info_media_playback_status': 'Stopped', - 'vehicle_state_media_info_now_playing_album': '', - 'vehicle_state_media_info_now_playing_artist': '', - 'vehicle_state_media_info_now_playing_duration': 0, - 'vehicle_state_media_info_now_playing_elapsed': 0, - 'vehicle_state_media_info_now_playing_source': '', - 'vehicle_state_media_info_now_playing_station': '', - 'vehicle_state_media_info_now_playing_title': '', - 'vehicle_state_media_state_remote_control_enabled': False, - 'vehicle_state_notifications_supported': True, - 'vehicle_state_odometer': 5454.495383, - 'vehicle_state_parsed_calendar_supported': True, - 'vehicle_state_pf': 0, - 'vehicle_state_pr': 0, - 'vehicle_state_rd_window': 0, - 'vehicle_state_remote_start': False, - 'vehicle_state_remote_start_enabled': True, - 'vehicle_state_remote_start_supported': True, - 'vehicle_state_rp_window': 0, - 'vehicle_state_rt': 0, - 'vehicle_state_santa_mode': 0, - 'vehicle_state_sentry_mode': False, - 'vehicle_state_sentry_mode_available': True, - 'vehicle_state_service_mode': False, - 'vehicle_state_service_mode_plus': False, - 'vehicle_state_software_update_download_perc': 100, - 'vehicle_state_software_update_expected_duration_sec': 2700, - 'vehicle_state_software_update_install_perc': 1, - 'vehicle_state_software_update_status': 'available', - 'vehicle_state_software_update_version': '2023.44.30.4', - 'vehicle_state_speed_limit_mode_active': False, - 'vehicle_state_speed_limit_mode_current_limit_mph': 74.564543, - 'vehicle_state_speed_limit_mode_max_limit_mph': 120, - 'vehicle_state_speed_limit_mode_min_limit_mph': 50, - 'vehicle_state_speed_limit_mode_pin_code_set': True, - 'vehicle_state_timestamp': 1701139037461, - 'vehicle_state_tpms_hard_warning_fl': False, - 'vehicle_state_tpms_hard_warning_fr': False, - 'vehicle_state_tpms_hard_warning_rl': False, - 'vehicle_state_tpms_hard_warning_rr': False, - 'vehicle_state_tpms_last_seen_pressure_time_fl': 1701062077, - 'vehicle_state_tpms_last_seen_pressure_time_fr': 1701062047, - 'vehicle_state_tpms_last_seen_pressure_time_rl': 1701062077, - 'vehicle_state_tpms_last_seen_pressure_time_rr': 1701062047, - 'vehicle_state_tpms_pressure_fl': 2.975, - 'vehicle_state_tpms_pressure_fr': 2.975, - 'vehicle_state_tpms_pressure_rl': 2.95, - 'vehicle_state_tpms_pressure_rr': 2.95, - 'vehicle_state_tpms_rcp_front_value': 2.9, - 'vehicle_state_tpms_rcp_rear_value': 2.9, - 'vehicle_state_tpms_soft_warning_fl': False, - 'vehicle_state_tpms_soft_warning_fr': False, - 'vehicle_state_tpms_soft_warning_rl': False, - 'vehicle_state_tpms_soft_warning_rr': False, - 'vehicle_state_valet_mode': False, - 'vehicle_state_valet_pin_needed': False, - 'vehicle_state_vehicle_name': 'Test', - 'vehicle_state_vehicle_self_test_progress': 0, - 'vehicle_state_vehicle_self_test_requested': False, - 'vehicle_state_webcam_available': True, - 'vin': '**REDACTED**', - }), - }), - ]), - }) -# --- diff --git a/tests/components/tessie/snapshots/test_number.ambr b/tests/components/tessie/snapshots/test_number.ambr index 6e641bdf5b7..c91fb74adeb 100644 --- a/tests/components/tessie/snapshots/test_number.ambr +++ b/tests/components/tessie/snapshots/test_number.ambr @@ -1,120 +1,4 @@ # serializer version: 1 -# name: test_numbers[number.energy_site_backup_reserve-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.energy_site_backup_reserve', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-alert', - 'original_name': 'Backup reserve', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'backup_reserve_percent', - 'unique_id': '123456-backup_reserve_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_numbers[number.energy_site_backup_reserve-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Energy Site Backup reserve', - 'icon': 'mdi:battery-alert', - 'max': 100, - 'min': 0, - 'mode': , - 'step': 1, - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'number.energy_site_backup_reserve', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_numbers[number.energy_site_off_grid_reserve-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.energy_site_off_grid_reserve', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:battery-unknown', - 'original_name': 'Off grid reserve', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'off_grid_vehicle_charging_reserve_percent', - 'unique_id': '123456-off_grid_vehicle_charging_reserve_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_numbers[number.energy_site_off_grid_reserve-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Energy Site Off grid reserve', - 'icon': 'mdi:battery-unknown', - 'max': 100, - 'min': 0, - 'mode': , - 'step': 1, - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'number.energy_site_off_grid_reserve', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_numbers[number.test_charge_current-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_select.ambr b/tests/components/tessie/snapshots/test_select.ambr index acc1946aab5..fc076aabf14 100644 --- a/tests/components/tessie/snapshots/test_select.ambr +++ b/tests/components/tessie/snapshots/test_select.ambr @@ -1,236 +1,4 @@ # serializer version: 1 -# name: test_select[select.energy_site_allow_export-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.energy_site_allow_export', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow export', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'components_customer_preferred_export_rule', - 'unique_id': '123456-components_customer_preferred_export_rule', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.energy_site_allow_export-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Allow export', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'select.energy_site_allow_export', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'pv_only', - }) -# --- -# name: test_select[select.energy_site_operation_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.energy_site_operation_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Operation mode', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'default_real_mode', - 'unique_id': '123456-default_real_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.energy_site_operation_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Operation mode', - 'options': list([ - , - , - , - ]), - }), - 'context': , - 'entity_id': 'select.energy_site_operation_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'self_consumption', - }) -# --- -# name: test_select[select.test_seat_cooler_left-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_cooler_left', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat cooler left', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_fan_front_left', - 'unique_id': 'VINVINVIN-climate_state_seat_fan_front_left', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_cooler_left-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat cooler left', - 'options': list([ - , - , - , - , - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_cooler_left', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_select[select.test_seat_cooler_right-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.test_seat_cooler_right', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Seat cooler right', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'climate_state_seat_fan_front_right', - 'unique_id': 'VINVINVIN-climate_state_seat_fan_front_right', - 'unit_of_measurement': None, - }) -# --- -# name: test_select[select.test_seat_cooler_right-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Seat cooler right', - 'options': list([ - , - , - , - , - ]), - }), - 'context': , - 'entity_id': 'select.test_seat_cooler_right', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_select[select.test_seat_heater_left-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_sensor.ambr b/tests/components/tessie/snapshots/test_sensor.ambr index 0a5ff4603aa..afe229feba0 100644 --- a/tests/components/tessie/snapshots/test_sensor.ambr +++ b/tests/components/tessie/snapshots/test_sensor.ambr @@ -2120,7 +2120,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.0', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.wall_connector_power_2-entry] @@ -2177,7 +2177,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.0', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.wall_connector_state-entry] @@ -2249,7 +2249,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'disconnected', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.wall_connector_state_2-entry] @@ -2321,7 +2321,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'disconnected', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.wall_connector_vehicle-entry] diff --git a/tests/components/tessie/snapshots/test_switch.ambr b/tests/components/tessie/snapshots/test_switch.ambr index 3b7a3623de8..db06e028198 100644 --- a/tests/components/tessie/snapshots/test_switch.ambr +++ b/tests/components/tessie/snapshots/test_switch.ambr @@ -1,96 +1,4 @@ # serializer version: 1 -# name: test_switches[switch.energy_site_allow_charging_from_grid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.energy_site_allow_charging_from_grid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow charging from grid', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'components_disallow_charge_from_grid_with_solar_installed', - 'unique_id': '123456-components_disallow_charge_from_grid_with_solar_installed', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[switch.energy_site_allow_charging_from_grid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Allow charging from grid', - }), - 'context': , - 'entity_id': 'switch.energy_site_allow_charging_from_grid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switches[switch.energy_site_storm_watch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.energy_site_storm_watch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Storm watch', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'user_settings_storm_mode_enabled', - 'unique_id': '123456-user_settings_storm_mode_enabled', - 'unit_of_measurement': None, - }) -# --- -# name: test_switches[switch.energy_site_storm_watch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Energy Site Storm watch', - }), - 'context': , - 'entity_id': 'switch.energy_site_storm_watch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_switches[switch.test_charge-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_update.ambr b/tests/components/tessie/snapshots/test_update.ambr index 1728c13b0ad..622cf69c7f0 100644 --- a/tests/components/tessie/snapshots/test_update.ambr +++ b/tests/components/tessie/snapshots/test_update.ambr @@ -36,7 +36,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, - 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/tessie/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, @@ -47,7 +46,6 @@ 'skipped_version': None, 'supported_features': , 'title': None, - 'update_percentage': None, }), 'context': , 'entity_id': 'update.test_update', diff --git a/tests/components/tessie/test_config_flow.py b/tests/components/tessie/test_config_flow.py index d51d467002d..f3dc98e6e18 100644 --- a/tests/components/tessie/test_config_flow.py +++ b/tests/components/tessie/test_config_flow.py @@ -67,33 +67,6 @@ async def test_form( assert result2["data"] == TEST_CONFIG -async def test_abort( - hass: HomeAssistant, - mock_config_flow_get_state_of_all_vehicles, - mock_async_setup_entry, -) -> None: - """Test a duplicate entry aborts.""" - - mock_entry = MockConfigEntry( - domain=DOMAIN, - data=TEST_CONFIG, - ) - mock_entry.add_to_hass(hass) - - result1 = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], - TEST_CONFIG, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "already_configured" - - @pytest.mark.parametrize( ("side_effect", "error"), [ @@ -143,7 +116,14 @@ async def test_reauth( ) mock_entry.add_to_hass(hass) - result1 = await mock_entry.start_reauth_flow(hass) + result1 = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_entry.entry_id, + }, + data=TEST_CONFIG, + ) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "reauth_confirm" @@ -187,7 +167,15 @@ async def test_reauth_errors( ) mock_entry.add_to_hass(hass) - result1 = await mock_entry.start_reauth_flow(hass) + result1 = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=TEST_CONFIG, + ) result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], diff --git a/tests/components/tessie/test_cover.py b/tests/components/tessie/test_cover.py index 451d1758e56..b731add10f8 100644 --- a/tests/components/tessie/test_cover.py +++ b/tests/components/tessie/test_cover.py @@ -9,7 +9,8 @@ from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, - CoverState, + STATE_CLOSED, + STATE_OPEN, ) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant @@ -41,7 +42,6 @@ async def test_covers( ("cover.test_charge_port_door", "open_unlock_charge_port", "close_charge_port"), ("cover.test_frunk", "open_front_trunk", False), ("cover.test_trunk", "open_close_rear_trunk", "open_close_rear_trunk"), - ("cover.test_sunroof", "vent_sunroof", "close_sunroof"), ): # Test open windows if openfunc: @@ -56,7 +56,7 @@ async def test_covers( blocking=True, ) mock_open.assert_called_once() - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN # Test close windows if closefunc: @@ -71,7 +71,7 @@ async def test_covers( blocking=True, ) mock_close.assert_called_once() - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED async def test_errors(hass: HomeAssistant) -> None: diff --git a/tests/components/tessie/test_diagnostics.py b/tests/components/tessie/test_diagnostics.py deleted file mode 100644 index 5f60c1a06ca..00000000000 --- a/tests/components/tessie/test_diagnostics.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Test the Tessie Diagnostics.""" - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from .common import setup_platform - -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test diagnostics.""" - - entry = await setup_platform(hass) - - diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert diag == snapshot diff --git a/tests/components/tessie/test_init.py b/tests/components/tessie/test_init.py index 921ef93b1ae..e37512ea8c4 100644 --- a/tests/components/tessie/test_init.py +++ b/tests/components/tessie/test_init.py @@ -50,21 +50,11 @@ async def test_connection_failure( assert entry.state is ConfigEntryState.SETUP_RETRY -async def test_products_error(hass: HomeAssistant) -> None: - """Test init with a fleet error on products.""" +async def test_fleet_error(hass: HomeAssistant) -> None: + """Test init with a fleet error.""" with patch( "homeassistant.components.tessie.Tessie.products", side_effect=TeslaFleetError ): entry = await setup_platform(hass) assert entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_scopes_error(hass: HomeAssistant) -> None: - """Test init with a fleet error on scopes.""" - - with patch( - "homeassistant.components.tessie.Tessie.scopes", side_effect=TeslaFleetError - ): - entry = await setup_platform(hass) - assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/tessie/test_lock.py b/tests/components/tessie/test_lock.py index 1208bb17d55..cfb6168b399 100644 --- a/tests/components/tessie/test_lock.py +++ b/tests/components/tessie/test_lock.py @@ -6,17 +6,17 @@ import pytest from syrupy import SnapshotAssertion from homeassistant.components.lock import ( + ATTR_CODE, DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, - LockState, ) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_LOCKED, STATE_UNLOCKED, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir -from .common import assert_entities, setup_platform +from .common import DOMAIN, assert_entities, setup_platform async def test_locks( @@ -24,6 +24,17 @@ async def test_locks( ) -> None: """Tests that the lock entity is correct.""" + # Create the deprecated speed limit lock entity + entity_registry.async_get_or_create( + LOCK_DOMAIN, + DOMAIN, + "VINVINVIN-vehicle_state_speed_limit_mode_active", + original_name="Charge cable lock", + has_entity_name=True, + translation_key="vehicle_state_speed_limit_mode_active", + disabled_by=er.RegistryEntryDisabler.INTEGRATION, + ) + entry = await setup_platform(hass, [Platform.LOCK]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) @@ -38,7 +49,7 @@ async def test_locks( blocking=True, ) mock_run.assert_called_once() - assert hass.states.get(entity_id).state == LockState.LOCKED + assert hass.states.get(entity_id).state == STATE_LOCKED with patch("homeassistant.components.tessie.lock.unlock") as mock_run: await hass.services.async_call( @@ -48,7 +59,7 @@ async def test_locks( blocking=True, ) mock_run.assert_called_once() - assert hass.states.get(entity_id).state == LockState.UNLOCKED + assert hass.states.get(entity_id).state == STATE_UNLOCKED # Test charge cable lock set value functions entity_id = "lock.test_charge_cable_lock" @@ -69,5 +80,67 @@ async def test_locks( {ATTR_ENTITY_ID: [entity_id]}, blocking=True, ) - assert hass.states.get(entity_id).state == LockState.UNLOCKED + assert hass.states.get(entity_id).state == STATE_UNLOCKED mock_run.assert_called_once() + + +async def test_speed_limit_lock( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, +) -> None: + """Tests that the deprecated speed limit lock entity is correct.""" + # Create the deprecated speed limit lock entity + entity = entity_registry.async_get_or_create( + LOCK_DOMAIN, + DOMAIN, + "VINVINVIN-vehicle_state_speed_limit_mode_active", + original_name="Charge cable lock", + has_entity_name=True, + translation_key="vehicle_state_speed_limit_mode_active", + ) + + with patch( + "homeassistant.components.tessie.lock.automations_with_entity", + return_value=["item"], + ): + await setup_platform(hass, [Platform.LOCK]) + assert issue_registry.async_get_issue( + DOMAIN, f"deprecated_speed_limit_{entity.entity_id}_item" + ) + + # Test lock set value functions + with patch( + "homeassistant.components.tessie.lock.enable_speed_limit" + ) as mock_enable_speed_limit: + await hass.services.async_call( + LOCK_DOMAIN, + SERVICE_LOCK, + {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "1234"}, + blocking=True, + ) + assert hass.states.get(entity.entity_id).state == STATE_LOCKED + mock_enable_speed_limit.assert_called_once() + # Assert issue has been raised in the issue register + assert issue_registry.async_get_issue(DOMAIN, "deprecated_speed_limit_locked") + + with patch( + "homeassistant.components.tessie.lock.disable_speed_limit" + ) as mock_disable_speed_limit: + await hass.services.async_call( + LOCK_DOMAIN, + SERVICE_UNLOCK, + {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "1234"}, + blocking=True, + ) + assert hass.states.get(entity.entity_id).state == STATE_UNLOCKED + mock_disable_speed_limit.assert_called_once() + assert issue_registry.async_get_issue(DOMAIN, "deprecated_speed_limit_unlocked") + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + LOCK_DOMAIN, + SERVICE_UNLOCK, + {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "abc"}, + blocking=True, + ) diff --git a/tests/components/tessie/test_number.py b/tests/components/tessie/test_number.py index 0fb13779183..8a3d1a649c7 100644 --- a/tests/components/tessie/test_number.py +++ b/tests/components/tessie/test_number.py @@ -4,16 +4,12 @@ from unittest.mock import patch from syrupy import SnapshotAssertion -from homeassistant.components.number import ( - ATTR_VALUE, - DOMAIN as NUMBER_DOMAIN, - SERVICE_SET_VALUE, -) +from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import TEST_RESPONSE, assert_entities, setup_platform +from .common import assert_entities, setup_platform async def test_numbers( @@ -33,7 +29,7 @@ async def test_numbers( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: [entity_id], ATTR_VALUE: 16}, + {ATTR_ENTITY_ID: [entity_id], "value": 16}, blocking=True, ) mock_set_charging_amps.assert_called_once() @@ -46,7 +42,7 @@ async def test_numbers( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: [entity_id], ATTR_VALUE: 80}, + {ATTR_ENTITY_ID: [entity_id], "value": 80}, blocking=True, ) mock_set_charge_limit.assert_called_once() @@ -59,41 +55,8 @@ async def test_numbers( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: [entity_id], ATTR_VALUE: 60}, + {ATTR_ENTITY_ID: [entity_id], "value": 60}, blocking=True, ) mock_set_speed_limit.assert_called_once() assert hass.states.get(entity_id).state == "60.0" - - entity_id = "number.energy_site_backup_reserve" - with patch( - "homeassistant.components.teslemetry.EnergySpecific.backup", - return_value=TEST_RESPONSE, - ) as call: - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: entity_id, - ATTR_VALUE: 80, - }, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == "80" - call.assert_called_once() - - entity_id = "number.energy_site_off_grid_reserve" - with patch( - "homeassistant.components.teslemetry.EnergySpecific.off_grid_vehicle_charging_reserve", - return_value=TEST_RESPONSE, - ) as call: - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 88}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == "88" - call.assert_called_once() diff --git a/tests/components/tessie/test_select.py b/tests/components/tessie/test_select.py index c78923fbf5b..f9526bf0a47 100644 --- a/tests/components/tessie/test_select.py +++ b/tests/components/tessie/test_select.py @@ -4,17 +4,12 @@ from unittest.mock import patch import pytest from syrupy import SnapshotAssertion -from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode -from tesla_fleet_api.exceptions import UnsupportedVehicle from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.components.tessie.const import ( - TessieSeatCoolerOptions, - TessieSeatHeaterOptions, -) +from homeassistant.components.tessie.const import TessieSeatHeaterOptions from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -32,8 +27,9 @@ async def test_select( assert_entities(hass, entry.entry_id, entity_registry, snapshot) - # Test changing select entity_id = "select.test_seat_heater_left" + + # Test changing select with patch( "homeassistant.components.tessie.select.set_seat_heat", return_value=TEST_RESPONSE, @@ -49,64 +45,14 @@ async def test_select( assert mock_set.call_args[1]["level"] == 1 assert hass.states.get(entity_id) == snapshot(name=SERVICE_SELECT_OPTION) - # Test site operation mode - entity_id = "select.energy_site_operation_mode" - with patch( - "homeassistant.components.teslemetry.EnergySpecific.operation", - return_value=TEST_RESPONSE, - ) as call: - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: entity_id, - ATTR_OPTION: EnergyOperationMode.AUTONOMOUS.value, - }, - blocking=True, - ) - assert (state := hass.states.get(entity_id)) - assert state.state == EnergyOperationMode.AUTONOMOUS.value - call.assert_called_once() - - # Test site export mode - entity_id = "select.energy_site_allow_export" - with patch( - "homeassistant.components.teslemetry.EnergySpecific.grid_import_export", - return_value=TEST_RESPONSE, - ) as call: - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: EnergyExportMode.BATTERY_OK.value}, - blocking=True, - ) - assert (state := hass.states.get(entity_id)) - assert state.state == EnergyExportMode.BATTERY_OK.value - call.assert_called_once() - - # Test changing select - entity_id = "select.test_seat_cooler_left" - with patch( - "homeassistant.components.tessie.select.set_seat_cool", - return_value=TEST_RESPONSE, - ) as mock_set: - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: [entity_id], ATTR_OPTION: TessieSeatCoolerOptions.LOW}, - blocking=True, - ) - mock_set.assert_called_once() - assert mock_set.call_args[1]["seat"] == "front_left" - assert mock_set.call_args[1]["level"] == 1 - async def test_errors(hass: HomeAssistant) -> None: """Tests unknown error is handled.""" await setup_platform(hass, [Platform.SELECT]) + entity_id = "select.test_seat_heater_left" - # Test changing vehicle select with unknown error + # Test setting cover open with unknown error with ( patch( "homeassistant.components.tessie.select.set_seat_heat", @@ -117,31 +63,8 @@ async def test_errors(hass: HomeAssistant) -> None: await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: ["select.test_seat_heater_left"], - ATTR_OPTION: TessieSeatHeaterOptions.LOW, - }, + {ATTR_ENTITY_ID: [entity_id], ATTR_OPTION: TessieSeatHeaterOptions.LOW}, blocking=True, ) mock_set.assert_called_once() assert error.value.__cause__ == ERROR_UNKNOWN - - # Test changing energy select with unknown error - with ( - patch( - "homeassistant.components.tessie.EnergySpecific.operation", - side_effect=UnsupportedVehicle, - ) as mock_set, - pytest.raises(HomeAssistantError) as error, - ): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: ["select.energy_site_operation_mode"], - ATTR_OPTION: EnergyOperationMode.AUTONOMOUS.value, - }, - blocking=True, - ) - mock_set.assert_called_once() - assert isinstance(error.value.__cause__, UnsupportedVehicle) diff --git a/tests/components/tessie/test_switch.py b/tests/components/tessie/test_switch.py index 499e529b2e8..907be29ddcc 100644 --- a/tests/components/tessie/test_switch.py +++ b/tests/components/tessie/test_switch.py @@ -2,7 +2,6 @@ from unittest.mock import patch -import pytest from syrupy import SnapshotAssertion from homeassistant.components.switch import ( @@ -10,11 +9,11 @@ from homeassistant.components.switch import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import RESPONSE_OK, assert_entities, setup_platform +from .common import assert_entities, setup_platform async def test_switches( @@ -53,56 +52,3 @@ async def test_switches( mock_stop_charging.assert_called_once() assert hass.states.get(entity_id) == snapshot(name=SERVICE_TURN_OFF) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.parametrize( - ("name", "on", "off"), - [ - ( - "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", - ), - ( - "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", - ), - ], -) -async def test_switch_services( - hass: HomeAssistant, name: str, on: str, off: str -) -> None: - """Tests that the switch service calls work.""" - - await setup_platform(hass, [Platform.SWITCH]) - - entity_id = f"switch.{name}" - with patch( - f"homeassistant.components.teslemetry.{on}", - return_value=RESPONSE_OK, - ) as call: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == STATE_ON - call.assert_called_once() - - with patch( - f"homeassistant.components.teslemetry.{off}", - return_value=RESPONSE_OK, - ) as call: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - call.assert_called_once() diff --git a/tests/components/thethingsnetwork/test_config_flow.py b/tests/components/thethingsnetwork/test_config_flow.py index 99c4a080e17..107d84e099b 100644 --- a/tests/components/thethingsnetwork/test_config_flow.py +++ b/tests/components/thethingsnetwork/test_config_flow.py @@ -4,7 +4,7 @@ import pytest from ttn_client import TTNAuthError from homeassistant.components.thethingsnetwork.const import CONF_APP_ID, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -12,8 +12,6 @@ from homeassistant.data_entry_flow import FlowResultType from . import init_integration from .conftest import API_KEY, APP_ID, HOST -from tests.common import MockConfigEntry - USER_DATA = {CONF_HOST: HOST, CONF_APP_ID: APP_ID, CONF_API_KEY: API_KEY} @@ -94,13 +92,21 @@ async def test_duplicate_entry( async def test_step_reauth( - hass: HomeAssistant, mock_ttnclient, mock_config_entry: MockConfigEntry + hass: HomeAssistant, mock_ttnclient, mock_config_entry ) -> None: """Test that the reauth step works.""" await init_integration(hass, mock_config_entry) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": APP_ID, + "entry_id": mock_config_entry.entry_id, + }, + data=USER_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert not result["errors"] diff --git a/tests/components/thethingsnetwork/test_init.py b/tests/components/thethingsnetwork/test_init.py index e39c764d5f9..1e0b64c933d 100644 --- a/tests/components/thethingsnetwork/test_init.py +++ b/tests/components/thethingsnetwork/test_init.py @@ -4,6 +4,22 @@ import pytest from ttn_client import TTNAuthError from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from .conftest import DOMAIN + + +async def test_error_configuration( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test issue is logged when deprecated configuration is used.""" + await async_setup_component( + hass, DOMAIN, {DOMAIN: {"app_id": "123", "access_key": "42"}} + ) + await hass.async_block_till_done() + assert issue_registry.async_get_issue(DOMAIN, "manual_migration") @pytest.mark.parametrize(("exception_class"), [TTNAuthError, Exception]) diff --git a/tests/components/thread/test_discovery.py b/tests/components/thread/test_discovery.py index 3cf195ad40e..d9895aa72b2 100644 --- a/tests/components/thread/test_discovery.py +++ b/tests/components/thread/test_discovery.py @@ -74,7 +74,6 @@ async def test_discover_routers( assert discovered[-1] == ( "aeeb2f594b570bbf", discovery.ThreadRouterDiscoveryData( - instance_name="HomeAssistant OpenThreadBorderRouter #0BBF", addresses=["192.168.0.115"], border_agent_id="230c6a1ac57f6f4be262acf32e5ef52c", brand="homeassistant", @@ -102,7 +101,6 @@ async def test_discover_routers( assert discovered[-1] == ( "f6a99b425a67abed", discovery.ThreadRouterDiscoveryData( - instance_name="Google-Nest-Hub-#ABED", addresses=["192.168.0.124"], border_agent_id="bc3740c3e963aa8735bebecd7cc503c7", brand="google", @@ -182,7 +180,6 @@ async def test_discover_routers_unconfigured( router_discovered_removed.assert_called_once_with( "aeeb2f594b570bbf", discovery.ThreadRouterDiscoveryData( - instance_name="HomeAssistant OpenThreadBorderRouter #0BBF", addresses=["192.168.0.115"], border_agent_id="230c6a1ac57f6f4be262acf32e5ef52c", brand="homeassistant", @@ -229,7 +226,6 @@ async def test_discover_routers_bad_or_missing_optional_data( router_discovered_removed.assert_called_once_with( "aeeb2f594b570bbf", discovery.ThreadRouterDiscoveryData( - instance_name="HomeAssistant OpenThreadBorderRouter #0BBF", addresses=["192.168.0.115"], border_agent_id="230c6a1ac57f6f4be262acf32e5ef52c", brand=None, diff --git a/tests/components/thread/test_websocket_api.py b/tests/components/thread/test_websocket_api.py index fb429acc3e0..f3390a9d8b8 100644 --- a/tests/components/thread/test_websocket_api.py +++ b/tests/components/thread/test_websocket_api.py @@ -353,7 +353,6 @@ async def test_discover_routers( assert msg == { "event": { "data": { - "instance_name": "HomeAssistant OpenThreadBorderRouter #0BBF", "addresses": ["192.168.0.115"], "border_agent_id": "230c6a1ac57f6f4be262acf32e5ef52c", "brand": "homeassistant", @@ -389,7 +388,6 @@ async def test_discover_routers( "brand": "google", "extended_address": "f6a99b425a67abed", "extended_pan_id": "9e75e256f61409a3", - "instance_name": "Google-Nest-Hub-#ABED", "model_name": "Google Nest Hub", "network_name": "NEST-PAN-E1AF", "server": "2d99f293-cd8e-2770-8dd2-6675de9fa000.local.", diff --git a/tests/components/threshold/test_binary_sensor.py b/tests/components/threshold/test_binary_sensor.py index 259009c6319..53a8446c210 100644 --- a/tests/components/threshold/test_binary_sensor.py +++ b/tests/components/threshold/test_binary_sensor.py @@ -2,36 +2,11 @@ import pytest -from homeassistant.components.threshold.const import ( - ATTR_HYSTERESIS, - ATTR_LOWER, - ATTR_POSITION, - ATTR_SENSOR_VALUE, - ATTR_TYPE, - ATTR_UPPER, - CONF_HYSTERESIS, - CONF_LOWER, - CONF_UPPER, - DOMAIN, - POSITION_ABOVE, - POSITION_BELOW, - POSITION_IN_RANGE, - POSITION_UNKNOWN, - TYPE_LOWER, - TYPE_RANGE, - TYPE_UPPER, -) +from homeassistant.components.threshold.const import DOMAIN from homeassistant.const import ( - ATTR_ENTITY_ID, ATTR_UNIT_OF_MEASUREMENT, - CONF_ENTITY_ID, - CONF_NAME, - CONF_PLATFORM, - STATE_OFF, - STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN, - Platform, UnitOfTemperature, ) from homeassistant.core import HomeAssistant @@ -41,378 +16,461 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -@pytest.mark.parametrize( - ("vals", "expected_position", "expected_state"), - [ - ([15], POSITION_BELOW, STATE_OFF), # at threshold - ([15, 16], POSITION_ABOVE, STATE_ON), - ([15, 16, 14], POSITION_BELOW, STATE_OFF), - ([15, 16, 14, 15], POSITION_BELOW, STATE_OFF), # below -> threshold - ([15, 16, 14, 15, "cat"], POSITION_UNKNOWN, STATE_UNKNOWN), - ([15, 16, 14, 15, "cat", 15], POSITION_BELOW, STATE_OFF), - ([15, None], POSITION_UNKNOWN, STATE_UNKNOWN), - ], -) -async def test_sensor_upper( - hass: HomeAssistant, - vals: list[float | str | None], - expected_position: str, - expected_state: str, -) -> None: +async def test_sensor_upper(hass: HomeAssistant) -> None: """Test if source is above threshold.""" config = { - Platform.BINARY_SENSOR: { - CONF_PLATFORM: "threshold", - CONF_UPPER: "15", - CONF_ENTITY_ID: "sensor.test_monitored", + "binary_sensor": { + "platform": "threshold", + "upper": "15", + "entity_id": "sensor.test_monitored", } } - assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) + assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + # Set the monitored sensor's state to the threshold + hass.states.async_set("sensor.test_monitored", 15) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" - assert state.attributes[ATTR_UPPER] == float( - config[Platform.BINARY_SENSOR][CONF_UPPER] + assert state.attributes["position"] == "below" + assert state.state == "off" + + hass.states.async_set( + "sensor.test_monitored", + 16, + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, ) - assert state.attributes[ATTR_HYSTERESIS] == 0.0 - assert state.attributes[ATTR_TYPE] == TYPE_UPPER - - for val in vals: - hass.states.async_set("sensor.test_monitored", val) - await hass.async_block_till_done() + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_POSITION] == expected_position - assert state.state == expected_state + assert state.attributes["entity_id"] == "sensor.test_monitored" + assert state.attributes["sensor_value"] == 16 + assert state.attributes["position"] == "above" + assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) + assert state.attributes["hysteresis"] == 0.0 + assert state.attributes["type"] == "upper" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 14) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 15) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", "cat") + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "unknown" + assert state.state == "unknown" + + hass.states.async_set("sensor.test_monitored", 15) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "off" -@pytest.mark.parametrize( - ("vals", "expected_position", "expected_state"), - [ - ([15], POSITION_ABOVE, STATE_OFF), # at threshold - ([15, 16], POSITION_ABOVE, STATE_OFF), - ([15, 16, 14], POSITION_BELOW, STATE_ON), - ([15, 16, 14, 15], POSITION_BELOW, STATE_ON), - ([15, 16, 14, 15, "cat"], POSITION_UNKNOWN, STATE_UNKNOWN), - ([15, 16, 14, 15, "cat", 15], POSITION_ABOVE, STATE_OFF), - ([15, None], POSITION_UNKNOWN, STATE_UNKNOWN), - ], -) -async def test_sensor_lower( - hass: HomeAssistant, - vals: list[float | str | None], - expected_position: str, - expected_state: str, -) -> None: +async def test_sensor_lower(hass: HomeAssistant) -> None: """Test if source is below threshold.""" config = { - Platform.BINARY_SENSOR: { - CONF_PLATFORM: "threshold", - CONF_LOWER: "15", - CONF_ENTITY_ID: "sensor.test_monitored", + "binary_sensor": { + "platform": "threshold", + "lower": "15", + "entity_id": "sensor.test_monitored", } } - assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) + assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + # Set the monitored sensor's state to the threshold + hass.states.async_set("sensor.test_monitored", 15) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" - assert state.attributes[ATTR_LOWER] == float( - config[Platform.BINARY_SENSOR][CONF_LOWER] - ) - assert state.attributes[ATTR_HYSTERESIS] == 0.0 - assert state.attributes[ATTR_TYPE] == TYPE_LOWER + assert state.attributes["position"] == "above" + assert state.state == "off" - for val in vals: - hass.states.async_set("sensor.test_monitored", val) - await hass.async_block_till_done() + hass.states.async_set("sensor.test_monitored", 16) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_POSITION] == expected_position - assert state.state == expected_state + assert state.attributes["position"] == "above" + assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) + assert state.attributes["hysteresis"] == 0.0 + assert state.attributes["type"] == "lower" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 14) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 15) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", "cat") + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "unknown" + assert state.state == "unknown" + + hass.states.async_set("sensor.test_monitored", 15) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "off" -@pytest.mark.parametrize( - ("vals", "expected_position", "expected_state"), - [ - ([17.5], POSITION_BELOW, STATE_OFF), # threshold + hysteresis - ([17.5, 12.5], POSITION_BELOW, STATE_OFF), # threshold - hysteresis - ([17.5, 12.5, 20], POSITION_ABOVE, STATE_ON), - ([17.5, 12.5, 20, 13], POSITION_ABOVE, STATE_ON), - ([17.5, 12.5, 20, 13, 12], POSITION_BELOW, STATE_OFF), - ([17.5, 12.5, 20, 13, 12, 17], POSITION_BELOW, STATE_OFF), - ([17.5, 12.5, 20, 13, 12, 17, 18], POSITION_ABOVE, STATE_ON), - ([17.5, 12.5, 20, 13, 12, 17, 18, "cat"], POSITION_UNKNOWN, STATE_UNKNOWN), - ([17.5, 12.5, 20, 13, 12, 17, 18, "cat", 18], POSITION_ABOVE, STATE_ON), - ([18, None], POSITION_UNKNOWN, STATE_UNKNOWN), - # below within -> above - ([14, 17.6], POSITION_ABOVE, STATE_ON), - # above within -> below - ([16, 12.4], POSITION_BELOW, STATE_OFF), - # below within -> above within - ([14, 16], POSITION_BELOW, STATE_OFF), - # above within -> below within - ([16, 14], POSITION_BELOW, STATE_OFF), - # above -> above within -> below within - ([20, 16, 14], POSITION_ABOVE, STATE_ON), - # below -> below within -> above within - ([10, 14, 16], POSITION_BELOW, STATE_OFF), - ], -) -async def test_sensor_upper_hysteresis( - hass: HomeAssistant, - vals: list[float | str | None], - expected_position: str, - expected_state: str, -) -> None: +async def test_sensor_upper_hysteresis(hass: HomeAssistant) -> None: """Test if source is above threshold using hysteresis.""" config = { - Platform.BINARY_SENSOR: { - CONF_PLATFORM: "threshold", - CONF_UPPER: "15", - CONF_HYSTERESIS: "2.5", - CONF_ENTITY_ID: "sensor.test_monitored", + "binary_sensor": { + "platform": "threshold", + "upper": "15", + "hysteresis": "2.5", + "entity_id": "sensor.test_monitored", } } - assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) + assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + # Set the monitored sensor's state to the threshold + hysteresis + hass.states.async_set("sensor.test_monitored", 17.5) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" - assert state.attributes[ATTR_UPPER] == float( - config[Platform.BINARY_SENSOR][CONF_UPPER] - ) - assert state.attributes[ATTR_HYSTERESIS] == 2.5 - assert state.attributes[ATTR_TYPE] == TYPE_UPPER + assert state.attributes["position"] == "below" + assert state.state == "off" - for val in vals: - hass.states.async_set("sensor.test_monitored", val) - await hass.async_block_till_done() + # Set the monitored sensor's state to the threshold - hysteresis + hass.states.async_set("sensor.test_monitored", 12.5) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_POSITION] == expected_position - assert state.state == expected_state + assert state.attributes["position"] == "below" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 20) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) + assert state.attributes["hysteresis"] == 2.5 + assert state.attributes["type"] == "upper" + assert state.attributes["position"] == "above" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 13) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 12) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 17) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 18) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", "cat") + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "unknown" + assert state.state == "unknown" + + hass.states.async_set("sensor.test_monitored", 18) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "on" -@pytest.mark.parametrize( - ("vals", "expected_position", "expected_state"), - [ - ([17.5], POSITION_ABOVE, STATE_OFF), # threshold + hysteresis - ([17.5, 12.5], POSITION_ABOVE, STATE_OFF), # threshold - hysteresis - ([17.5, 12.5, 20], POSITION_ABOVE, STATE_OFF), - ([17.5, 12.5, 20, 13], POSITION_ABOVE, STATE_OFF), - ([17.5, 12.5, 20, 13, 12], POSITION_BELOW, STATE_ON), - ([17.5, 12.5, 20, 13, 12, 17], POSITION_BELOW, STATE_ON), - ([17.5, 12.5, 20, 13, 12, 17, 18], POSITION_ABOVE, STATE_OFF), - ([17.5, 12.5, 20, 13, 12, 17, 18, "cat"], POSITION_UNKNOWN, STATE_UNKNOWN), - ([17.5, 12.5, 20, 13, 12, 17, 18, "cat", 18], POSITION_ABOVE, STATE_OFF), - ([18, None], POSITION_UNKNOWN, STATE_UNKNOWN), - # below within -> above - ([14, 17.6], POSITION_ABOVE, STATE_OFF), - # above within -> below - ([16, 12.4], POSITION_BELOW, STATE_ON), - # below within -> above within - ([14, 16], POSITION_ABOVE, STATE_OFF), - # above within -> below within - ([16, 14], POSITION_ABOVE, STATE_OFF), - # above -> above within -> below within - ([20, 16, 14], POSITION_ABOVE, STATE_OFF), - # below -> below within -> above within - ([10, 14, 16], POSITION_BELOW, STATE_ON), - ], -) -async def test_sensor_lower_hysteresis( - hass: HomeAssistant, - vals: list[float | str | None], - expected_position: str, - expected_state: str, -) -> None: +async def test_sensor_lower_hysteresis(hass: HomeAssistant) -> None: """Test if source is below threshold using hysteresis.""" config = { - Platform.BINARY_SENSOR: { - CONF_PLATFORM: "threshold", - CONF_LOWER: "15", - CONF_HYSTERESIS: "2.5", - CONF_ENTITY_ID: "sensor.test_monitored", + "binary_sensor": { + "platform": "threshold", + "lower": "15", + "hysteresis": "2.5", + "entity_id": "sensor.test_monitored", } } - assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) + assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + # Set the monitored sensor's state to the threshold + hysteresis + hass.states.async_set("sensor.test_monitored", 17.5) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" - assert state.attributes[ATTR_LOWER] == float( - config[Platform.BINARY_SENSOR][CONF_LOWER] - ) - assert state.attributes[ATTR_HYSTERESIS] == 2.5 - assert state.attributes[ATTR_TYPE] == TYPE_LOWER + assert state.attributes["position"] == "above" + assert state.state == "off" - for val in vals: - hass.states.async_set("sensor.test_monitored", val) - await hass.async_block_till_done() + # Set the monitored sensor's state to the threshold - hysteresis + hass.states.async_set("sensor.test_monitored", 12.5) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_POSITION] == expected_position - assert state.state == expected_state + assert state.attributes["position"] == "above" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 20) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) + assert state.attributes["hysteresis"] == 2.5 + assert state.attributes["type"] == "lower" + assert state.attributes["position"] == "above" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 13) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 12) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 17) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 18) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", "cat") + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "unknown" + assert state.state == "unknown" + + hass.states.async_set("sensor.test_monitored", 18) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "off" -@pytest.mark.parametrize( - ("vals", "expected_position", "expected_state"), - [ - ([10], POSITION_IN_RANGE, STATE_ON), # at lower threshold - ([10, 20], POSITION_IN_RANGE, STATE_ON), # lower threshold -> upper threshold - ([10, 20, 16], POSITION_IN_RANGE, STATE_ON), - ([10, 20, 16, 9], POSITION_BELOW, STATE_OFF), - ([10, 20, 16, 9, 21], POSITION_ABOVE, STATE_OFF), - ([10, 20, 16, 9, 21, "cat"], POSITION_UNKNOWN, STATE_UNKNOWN), - ([10, 20, 16, 9, 21, "cat", 21], POSITION_ABOVE, STATE_OFF), - ([21, None], POSITION_UNKNOWN, STATE_UNKNOWN), - # upper threshold -> lower threshold - ([20, 10], POSITION_IN_RANGE, STATE_ON), - # in-range -> upper threshold - ([15, 20], POSITION_IN_RANGE, STATE_ON), - # in-range -> lower threshold - ([15, 10], POSITION_IN_RANGE, STATE_ON), - # below -> above - ([5, 25], POSITION_ABOVE, STATE_OFF), - # above -> below - ([25, 5], POSITION_BELOW, STATE_OFF), - # in-range -> above - ([15, 25], POSITION_ABOVE, STATE_OFF), - # in-range -> below - ([15, 5], POSITION_BELOW, STATE_OFF), - ], -) -async def test_sensor_in_range_no_hysteresis( - hass: HomeAssistant, - vals: list[float | str | None], - expected_position: str, - expected_state: str, -) -> None: +async def test_sensor_in_range_no_hysteresis(hass: HomeAssistant) -> None: """Test if source is within the range.""" config = { - Platform.BINARY_SENSOR: { - CONF_PLATFORM: "threshold", - CONF_LOWER: "10", - CONF_UPPER: "20", - CONF_ENTITY_ID: "sensor.test_monitored", + "binary_sensor": { + "platform": "threshold", + "lower": "10", + "upper": "20", + "entity_id": "sensor.test_monitored", } } - assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) + assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + # Set the monitored sensor's state to the lower threshold + hass.states.async_set("sensor.test_monitored", 10) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" - assert state.attributes[ATTR_LOWER] == float( - config[Platform.BINARY_SENSOR][CONF_LOWER] - ) - assert state.attributes[ATTR_UPPER] == float( - config[Platform.BINARY_SENSOR][CONF_UPPER] - ) - assert state.attributes[ATTR_HYSTERESIS] == 0.0 - assert state.attributes[ATTR_TYPE] == TYPE_RANGE + assert state.attributes["position"] == "in_range" + assert state.state == "on" - for val in vals: - hass.states.async_set("sensor.test_monitored", val) - await hass.async_block_till_done() + # Set the monitored sensor's state to the upper threshold + hass.states.async_set("sensor.test_monitored", 20) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_POSITION] == expected_position - assert state.state == expected_state + assert state.attributes["position"] == "in_range" + assert state.state == "on" + + hass.states.async_set( + "sensor.test_monitored", + 16, + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["entity_id"] == "sensor.test_monitored" + assert state.attributes["sensor_value"] == 16 + assert state.attributes["position"] == "in_range" + assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) + assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) + assert state.attributes["hysteresis"] == 0.0 + assert state.attributes["type"] == "range" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 9) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 21) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", "cat") + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "unknown" + assert state.state == "unknown" + + hass.states.async_set("sensor.test_monitored", 21) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "off" -@pytest.mark.parametrize( - ("vals", "expected_position", "expected_state"), - [ - ([12], POSITION_IN_RANGE, STATE_ON), # lower threshold + hysteresis - ([12, 22], POSITION_IN_RANGE, STATE_ON), # upper threshold + hysteresis - ([12, 22, 18], POSITION_IN_RANGE, STATE_ON), # upper threshold - hysteresis - ([12, 22, 18, 16], POSITION_IN_RANGE, STATE_ON), - ([12, 22, 18, 16, 8], POSITION_IN_RANGE, STATE_ON), - ([12, 22, 18, 16, 8, 7], POSITION_BELOW, STATE_OFF), - ([12, 22, 18, 16, 8, 7, 12], POSITION_BELOW, STATE_OFF), - ([12, 22, 18, 16, 8, 7, 12, 13], POSITION_IN_RANGE, STATE_ON), - ([12, 22, 18, 16, 8, 7, 12, 13, 22], POSITION_IN_RANGE, STATE_ON), - ([12, 22, 18, 16, 8, 7, 12, 13, 22, 23], POSITION_ABOVE, STATE_OFF), - ([12, 22, 18, 16, 8, 7, 12, 13, 22, 23, 18], POSITION_ABOVE, STATE_OFF), - ([12, 22, 18, 16, 8, 7, 12, 13, 22, 23, 18, 17], POSITION_IN_RANGE, STATE_ON), - ( - [12, 22, 18, 16, 8, 7, 12, 13, 22, 23, 18, 17, "cat"], - POSITION_UNKNOWN, - STATE_UNKNOWN, - ), - ( - [12, 22, 18, 16, 8, 7, 12, 13, 22, 23, 18, 17, "cat", 17], - POSITION_IN_RANGE, - STATE_ON, - ), - ([17, None], POSITION_UNKNOWN, STATE_UNKNOWN), - # upper threshold -> lower threshold - ([20, 10], POSITION_IN_RANGE, STATE_ON), - # in-range -> upper threshold - ([15, 20], POSITION_IN_RANGE, STATE_ON), - # in-range -> lower threshold - ([15, 10], POSITION_IN_RANGE, STATE_ON), - # below -> above - ([5, 25], POSITION_ABOVE, STATE_OFF), - # above -> below - ([25, 5], POSITION_BELOW, STATE_OFF), - # in-range -> above - ([15, 25], POSITION_ABOVE, STATE_OFF), - # in-range -> below - ([15, 5], POSITION_BELOW, STATE_OFF), - # below -> lower threshold - ([5, 10], POSITION_BELOW, STATE_OFF), - # below -> in-range -> lower threshold - ([5, 15, 10], POSITION_IN_RANGE, STATE_ON), - # above -> upper threshold - ([25, 20], POSITION_ABOVE, STATE_OFF), - # above -> in-range -> upper threshold - ([25, 15, 20], POSITION_IN_RANGE, STATE_ON), - ([15, 22.1], POSITION_ABOVE, STATE_OFF), # in-range -> above hysteresis edge - ([15, 7.9], POSITION_BELOW, STATE_OFF), # in-range -> below hysteresis edge - ([7, 11.9], POSITION_BELOW, STATE_OFF), - ([23, 18.1], POSITION_ABOVE, STATE_OFF), - ], -) -async def test_sensor_in_range_with_hysteresis( - hass: HomeAssistant, - vals: list[float | str | None], - expected_position: str, - expected_state: str, -) -> None: +async def test_sensor_in_range_with_hysteresis(hass: HomeAssistant) -> None: """Test if source is within the range.""" config = { - Platform.BINARY_SENSOR: { - CONF_PLATFORM: "threshold", - CONF_LOWER: "10", - CONF_UPPER: "20", - CONF_HYSTERESIS: "2", - CONF_ENTITY_ID: "sensor.test_monitored", + "binary_sensor": { + "platform": "threshold", + "lower": "10", + "upper": "20", + "hysteresis": "2", + "entity_id": "sensor.test_monitored", } } - assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) + assert await async_setup_component(hass, "binary_sensor", config) + await hass.async_block_till_done() + + # Set the monitored sensor's state to the lower threshold - hysteresis + hass.states.async_set("sensor.test_monitored", 8) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "in_range" + assert state.state == "on" + + # Set the monitored sensor's state to the lower threshold + hysteresis + hass.states.async_set("sensor.test_monitored", 12) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "in_range" + assert state.state == "on" + + # Set the monitored sensor's state to the upper threshold + hysteresis + hass.states.async_set("sensor.test_monitored", 22) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "in_range" + assert state.state == "on" + + # Set the monitored sensor's state to the upper threshold - hysteresis + hass.states.async_set("sensor.test_monitored", 18) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "in_range" + assert state.state == "on" + + hass.states.async_set( + "sensor.test_monitored", + 16, + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" - assert state.attributes[ATTR_LOWER] == float( - config[Platform.BINARY_SENSOR][CONF_LOWER] - ) - assert state.attributes[ATTR_UPPER] == float( - config[Platform.BINARY_SENSOR][CONF_UPPER] - ) - assert state.attributes[ATTR_HYSTERESIS] == 2.0 - assert state.attributes[ATTR_TYPE] == TYPE_RANGE - for val in vals: - hass.states.async_set("sensor.test_monitored", val) - await hass.async_block_till_done() + assert state.attributes["entity_id"] == "sensor.test_monitored" + assert state.attributes["sensor_value"] == 16 + assert state.attributes["position"] == "in_range" + assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) + assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) + assert state.attributes["hysteresis"] == float( + config["binary_sensor"]["hysteresis"] + ) + assert state.attributes["type"] == "range" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 8) + await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_POSITION] == expected_position - assert state.state == expected_state + assert state.attributes["position"] == "in_range" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 7) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 12) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "below" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 13) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "in_range" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 22) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "in_range" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", 23) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 18) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "above" + assert state.state == "off" + + hass.states.async_set("sensor.test_monitored", 17) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "in_range" + assert state.state == "on" + + hass.states.async_set("sensor.test_monitored", "cat") + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "unknown" + assert state.state == "unknown" + + hass.states.async_set("sensor.test_monitored", 17) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.threshold") + assert state.attributes["position"] == "in_range" + assert state.state == "on" async def test_sensor_in_range_unknown_state( @@ -420,15 +478,15 @@ async def test_sensor_in_range_unknown_state( ) -> None: """Test if source is within the range.""" config = { - Platform.BINARY_SENSOR: { - CONF_PLATFORM: "threshold", - CONF_LOWER: "10", - CONF_UPPER: "20", - CONF_ENTITY_ID: "sensor.test_monitored", + "binary_sensor": { + "platform": "threshold", + "lower": "10", + "upper": "20", + "entity_id": "sensor.test_monitored", } } - assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) + assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() hass.states.async_set( @@ -440,30 +498,26 @@ async def test_sensor_in_range_unknown_state( state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_ENTITY_ID] == "sensor.test_monitored" - assert state.attributes[ATTR_SENSOR_VALUE] == 16 - assert state.attributes[ATTR_POSITION] == POSITION_IN_RANGE - assert state.attributes[ATTR_LOWER] == float( - config[Platform.BINARY_SENSOR][CONF_LOWER] - ) - assert state.attributes[ATTR_UPPER] == float( - config[Platform.BINARY_SENSOR][CONF_UPPER] - ) - assert state.attributes[ATTR_HYSTERESIS] == 0.0 - assert state.attributes[ATTR_TYPE] == TYPE_RANGE - assert state.state == STATE_ON + assert state.attributes["entity_id"] == "sensor.test_monitored" + assert state.attributes["sensor_value"] == 16 + assert state.attributes["position"] == "in_range" + assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) + assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) + assert state.attributes["hysteresis"] == 0.0 + assert state.attributes["type"] == "range" + assert state.state == "on" hass.states.async_set("sensor.test_monitored", STATE_UNKNOWN) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_POSITION] == POSITION_UNKNOWN - assert state.state == STATE_UNKNOWN + assert state.attributes["position"] == "unknown" + assert state.state == "unknown" hass.states.async_set("sensor.test_monitored", STATE_UNAVAILABLE) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_POSITION] == POSITION_UNKNOWN - assert state.state == STATE_UNKNOWN + assert state.attributes["position"] == "unknown" + assert state.state == "unknown" assert "State is not numerical" not in caplog.text @@ -471,57 +525,53 @@ async def test_sensor_in_range_unknown_state( async def test_sensor_lower_zero_threshold(hass: HomeAssistant) -> None: """Test if a lower threshold of zero is set.""" config = { - Platform.BINARY_SENSOR: { - CONF_PLATFORM: "threshold", - CONF_LOWER: "0", - CONF_ENTITY_ID: "sensor.test_monitored", + "binary_sensor": { + "platform": "threshold", + "lower": "0", + "entity_id": "sensor.test_monitored", } } - assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) + assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() hass.states.async_set("sensor.test_monitored", 16) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_TYPE] == TYPE_LOWER - assert state.attributes[ATTR_LOWER] == float( - config[Platform.BINARY_SENSOR][CONF_LOWER] - ) - assert state.state == STATE_OFF + assert state.attributes["type"] == "lower" + assert state.attributes["lower"] == float(config["binary_sensor"]["lower"]) + assert state.state == "off" hass.states.async_set("sensor.test_monitored", -3) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.state == STATE_ON + assert state.state == "on" async def test_sensor_upper_zero_threshold(hass: HomeAssistant) -> None: """Test if an upper threshold of zero is set.""" config = { - Platform.BINARY_SENSOR: { - CONF_PLATFORM: "threshold", - CONF_UPPER: "0", - CONF_ENTITY_ID: "sensor.test_monitored", + "binary_sensor": { + "platform": "threshold", + "upper": "0", + "entity_id": "sensor.test_monitored", } } - assert await async_setup_component(hass, Platform.BINARY_SENSOR, config) + assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() hass.states.async_set("sensor.test_monitored", -10) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.attributes[ATTR_TYPE] == TYPE_UPPER - assert state.attributes[ATTR_UPPER] == float( - config[Platform.BINARY_SENSOR][CONF_UPPER] - ) - assert state.state == STATE_OFF + assert state.attributes["type"] == "upper" + assert state.attributes["upper"] == float(config["binary_sensor"]["upper"]) + assert state.state == "off" hass.states.async_set("sensor.test_monitored", 2) await hass.async_block_till_done() state = hass.states.get("binary_sensor.threshold") - assert state.state == STATE_ON + assert state.state == "on" async def test_sensor_no_lower_upper( @@ -529,16 +579,16 @@ async def test_sensor_no_lower_upper( ) -> None: """Test if no lower or upper has been provided.""" config = { - Platform.BINARY_SENSOR: { - CONF_PLATFORM: "threshold", - CONF_ENTITY_ID: "sensor.test_monitored", + "binary_sensor": { + "platform": "threshold", + "entity_id": "sensor.test_monitored", } } - await async_setup_component(hass, Platform.BINARY_SENSOR, config) + await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() - assert "Lower or Upper thresholds are not provided" in caplog.text + assert "Lower or Upper thresholds not provided" in caplog.text async def test_device_id( @@ -568,11 +618,11 @@ async def test_device_id( data={}, domain=DOMAIN, options={ - CONF_ENTITY_ID: "sensor.test_source", - CONF_HYSTERESIS: 0.0, - CONF_LOWER: -2.0, - CONF_NAME: "Threshold", - CONF_UPPER: None, + "entity_id": "sensor.test_source", + "hysteresis": 0.0, + "lower": -2.0, + "name": "Threshold", + "upper": None, }, title="Threshold", ) diff --git a/tests/components/tibber/conftest.py b/tests/components/tibber/conftest.py index 441a9d0b888..fc6596444c5 100644 --- a/tests/components/tibber/conftest.py +++ b/tests/components/tibber/conftest.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch import pytest -from homeassistant.components.recorder import Recorder from homeassistant.components.tibber.const import DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant @@ -27,8 +26,8 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture async def mock_tibber_setup( - recorder_mock: Recorder, config_entry: MockConfigEntry, hass: HomeAssistant -) -> AsyncGenerator[MagicMock]: + config_entry: MockConfigEntry, hass: HomeAssistant +) -> AsyncGenerator[None, MagicMock]: """Mock tibber entry setup.""" unique_user_id = "unique_user_id" title = "title" diff --git a/tests/components/tibber/test_config_flow.py b/tests/components/tibber/test_config_flow.py index 0c12c4a247b..28b590a29d2 100644 --- a/tests/components/tibber/test_config_flow.py +++ b/tests/components/tibber/test_config_flow.py @@ -5,11 +5,7 @@ from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from aiohttp import ClientError import pytest -from tibber import ( - FatalHttpExceptionError, - InvalidLoginError, - RetryableHttpExceptionError, -) +from tibber import FatalHttpException, InvalidLogin, RetryableHttpException from homeassistant import config_entries from homeassistant.components.recorder import Recorder @@ -70,9 +66,9 @@ async def test_create_entry(recorder_mock: Recorder, hass: HomeAssistant) -> Non [ (TimeoutError, ERR_TIMEOUT), (ClientError, ERR_CLIENT), - (InvalidLoginError(401), ERR_TOKEN), - (RetryableHttpExceptionError(503), ERR_CLIENT), - (FatalHttpExceptionError(404), ERR_CLIENT), + (InvalidLogin(401), ERR_TOKEN), + (RetryableHttpException(503), ERR_CLIENT), + (FatalHttpException(404), ERR_CLIENT), ], ) async def test_create_entry_exceptions( diff --git a/tests/components/tibber/test_diagnostics.py b/tests/components/tibber/test_diagnostics.py index 16c735596d0..34ecb63dfec 100644 --- a/tests/components/tibber/test_diagnostics.py +++ b/tests/components/tibber/test_diagnostics.py @@ -19,9 +19,12 @@ async def test_entry_diagnostics( config_entry, ) -> None: """Test config entry diagnostics.""" - with patch( - "tibber.Tibber.update_info", - return_value=None, + with ( + patch( + "tibber.Tibber.update_info", + return_value=None, + ), + patch("homeassistant.components.tibber.discovery.async_load_platform"), ): assert await async_setup_component(hass, "tibber", {}) diff --git a/tests/components/tibber/test_notify.py b/tests/components/tibber/test_notify.py index 9b731e78bf6..69af92c4d5d 100644 --- a/tests/components/tibber/test_notify.py +++ b/tests/components/tibber/test_notify.py @@ -6,6 +6,7 @@ from unittest.mock import MagicMock import pytest from homeassistant.components.recorder import Recorder +from homeassistant.components.tibber import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -18,8 +19,18 @@ async def test_notification_services( notify_state = hass.states.get("notify.tibber") assert notify_state is not None + # Assert legacy notify service hass been added + assert hass.services.has_service("notify", DOMAIN) + + # Test legacy notify service + service = "tibber" + service_data = {"message": "The message", "title": "A title"} + await hass.services.async_call("notify", service, service_data, blocking=True) calls: MagicMock = mock_tibber_setup.send_notification + calls.assert_called_once_with(message="The message", title="A title") + calls.reset_mock() + # Test notify entity service service = "send_message" service_data = { @@ -33,6 +44,15 @@ async def test_notification_services( calls.side_effect = TimeoutError + with pytest.raises(HomeAssistantError): + # Test legacy notify service + await hass.services.async_call( + "notify", + service="tibber", + service_data={"message": "The message", "title": "A title"}, + blocking=True, + ) + with pytest.raises(HomeAssistantError): # Test notify entity service await hass.services.async_call( diff --git a/tests/components/tibber/test_repairs.py b/tests/components/tibber/test_repairs.py new file mode 100644 index 00000000000..89e85e5f8e1 --- /dev/null +++ b/tests/components/tibber/test_repairs.py @@ -0,0 +1,66 @@ +"""Test loading of the Tibber config entry.""" + +from http import HTTPStatus +from unittest.mock import MagicMock + +from homeassistant.components.recorder import Recorder +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from tests.typing import ClientSessionGenerator + + +async def test_repair_flow( + recorder_mock: Recorder, + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + mock_tibber_setup: MagicMock, + hass_client: ClientSessionGenerator, +) -> None: + """Test unloading the entry.""" + + # Test legacy notify service + service = "tibber" + service_data = {"message": "The message", "title": "A title"} + await hass.services.async_call("notify", service, service_data, blocking=True) + calls: MagicMock = mock_tibber_setup.send_notification + + calls.assert_called_once_with(message="The message", title="A title") + calls.reset_mock() + + http_client = await hass_client() + # Assert the issue is present + assert issue_registry.async_get_issue( + domain="notify", + issue_id=f"migrate_notify_tibber_{service}", + ) + assert len(issue_registry.issues) == 1 + + url = RepairsFlowIndexView.url + resp = await http_client.post( + url, json={"handler": "notify", "issue_id": f"migrate_notify_tibber_{service}"} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "confirm" + + # Simulate the users confirmed the repair flow + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await http_client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data["type"] == "create_entry" + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue( + domain="notify", + issue_id=f"migrate_notify_tibber_{service}", + ) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/tibber/test_services.py b/tests/components/tibber/test_services.py index dc6f5d2789d..fe437e421d7 100644 --- a/tests/components/tibber/test_services.py +++ b/tests/components/tibber/test_services.py @@ -1,159 +1,163 @@ """Test service for Tibber integration.""" +import asyncio import datetime as dt from unittest.mock import MagicMock -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.tibber.const import DOMAIN -from homeassistant.components.tibber.services import PRICE_SERVICE_NAME -from homeassistant.core import HomeAssistant +from homeassistant.components.tibber.services import PRICE_SERVICE_NAME, __get_prices +from homeassistant.core import ServiceCall from homeassistant.exceptions import ServiceValidationError -START_TIME = dt.datetime.fromtimestamp(1615766400).replace(tzinfo=dt.UTC) - def generate_mock_home_data(): """Create mock data from the tibber connection.""" - tomorrow = START_TIME + dt.timedelta(days=1) + today = remove_microseconds(dt.datetime.now()) + tomorrow = remove_microseconds(today + dt.timedelta(days=1)) mock_homes = [ MagicMock( name="first_home", - price_total={ - START_TIME.isoformat(): 0.36914, - (START_TIME + dt.timedelta(hours=1)).isoformat(): 0.36914, - tomorrow.isoformat(): 0.46914, - (tomorrow + dt.timedelta(hours=1)).isoformat(): 0.46914, - }, - price_level={ - START_TIME.isoformat(): "VERY_EXPENSIVE", - (START_TIME + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", - tomorrow.isoformat(): "VERY_EXPENSIVE", - (tomorrow + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", + info={ + "viewer": { + "home": { + "currentSubscription": { + "priceInfo": { + "today": [ + { + "startsAt": today.isoformat(), + "total": 0.46914, + "level": "VERY_EXPENSIVE", + }, + { + "startsAt": ( + today + dt.timedelta(hours=1) + ).isoformat(), + "total": 0.46914, + "level": "VERY_EXPENSIVE", + }, + ], + "tomorrow": [ + { + "startsAt": tomorrow.isoformat(), + "total": 0.46914, + "level": "VERY_EXPENSIVE", + }, + { + "startsAt": ( + tomorrow + dt.timedelta(hours=1) + ).isoformat(), + "total": 0.46914, + "level": "VERY_EXPENSIVE", + }, + ], + } + } + } + } }, ), MagicMock( name="second_home", - price_total={ - START_TIME.isoformat(): 0.36914, - (START_TIME + dt.timedelta(hours=1)).isoformat(): 0.36914, - tomorrow.isoformat(): 0.46914, - (tomorrow + dt.timedelta(hours=1)).isoformat(): 0.46914, - }, - price_level={ - START_TIME.isoformat(): "VERY_EXPENSIVE", - (START_TIME + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", - tomorrow.isoformat(): "VERY_EXPENSIVE", - (tomorrow + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", + info={ + "viewer": { + "home": { + "currentSubscription": { + "priceInfo": { + "today": [ + { + "startsAt": today.isoformat(), + "total": 0.46914, + "level": "VERY_EXPENSIVE", + }, + { + "startsAt": ( + today + dt.timedelta(hours=1) + ).isoformat(), + "total": 0.46914, + "level": "VERY_EXPENSIVE", + }, + ], + "tomorrow": [ + { + "startsAt": tomorrow.isoformat(), + "total": 0.46914, + "level": "VERY_EXPENSIVE", + }, + { + "startsAt": ( + tomorrow + dt.timedelta(hours=1) + ).isoformat(), + "total": 0.46914, + "level": "VERY_EXPENSIVE", + }, + ], + } + } + } + } }, ), ] - # set name again, as the name is special in mock objects - # see documentation: https://docs.python.org/3/library/unittest.mock.html#mock-names-and-the-name-attribute mock_homes[0].name = "first_home" mock_homes[1].name = "second_home" return mock_homes -@pytest.mark.parametrize( - "data", - [ - {}, - {"start": START_TIME.isoformat()}, - { - "start": START_TIME.isoformat(), - "end": (START_TIME + dt.timedelta(days=1)).isoformat(), - }, - ], -) -async def test_get_prices( - mock_tibber_setup: MagicMock, - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - data, -) -> None: - """Test get_prices with mock data.""" - freezer.move_to(START_TIME) - mock_tibber_setup.get_homes.return_value = generate_mock_home_data() - - result = await hass.services.async_call( - DOMAIN, PRICE_SERVICE_NAME, data, blocking=True, return_response=True - ) - await hass.async_block_till_done() - - assert result == { - "prices": { - "first_home": [ - { - "start_time": START_TIME.isoformat(), - "price": 0.36914, - "level": "VERY_EXPENSIVE", - }, - { - "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), - "price": 0.36914, - "level": "VERY_EXPENSIVE", - }, - ], - "second_home": [ - { - "start_time": START_TIME.isoformat(), - "price": 0.36914, - "level": "VERY_EXPENSIVE", - }, - { - "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), - "price": 0.36914, - "level": "VERY_EXPENSIVE", - }, - ], - } - } +def create_mock_tibber_connection(): + """Create a mock tibber connection.""" + tibber_connection = MagicMock() + tibber_connection.get_homes.return_value = generate_mock_home_data() + return tibber_connection -async def test_get_prices_start_tomorrow( - mock_tibber_setup: MagicMock, - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, -) -> None: - """Test get_prices with start date tomorrow.""" - freezer.move_to(START_TIME) - tomorrow = START_TIME + dt.timedelta(days=1) +def create_mock_hass(): + """Create a mock hass object.""" + mock_hass = MagicMock + mock_hass.data = {"tibber": create_mock_tibber_connection()} + return mock_hass - mock_tibber_setup.get_homes.return_value = generate_mock_home_data() - result = await hass.services.async_call( +def remove_microseconds(dt): + """Remove microseconds from a datetime object.""" + return dt.replace(microsecond=0) + + +async def test_get_prices(): + """Test __get_prices with mock data.""" + today = remove_microseconds(dt.datetime.now()) + tomorrow = remove_microseconds(dt.datetime.now() + dt.timedelta(days=1)) + call = ServiceCall( DOMAIN, PRICE_SERVICE_NAME, - {"start": tomorrow.isoformat()}, - blocking=True, - return_response=True, + {"start": today.date().isoformat(), "end": tomorrow.date().isoformat()}, ) - await hass.async_block_till_done() + + result = await __get_prices(call, hass=create_mock_hass()) assert result == { "prices": { "first_home": [ { - "start_time": tomorrow.isoformat(), + "start_time": today, "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": (tomorrow + dt.timedelta(hours=1)).isoformat(), + "start_time": today + dt.timedelta(hours=1), "price": 0.46914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": tomorrow.isoformat(), + "start_time": today, "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": (tomorrow + dt.timedelta(hours=1)).isoformat(), + "start_time": today + dt.timedelta(hours=1), "price": 0.46914, "level": "VERY_EXPENSIVE", }, @@ -162,58 +166,36 @@ async def test_get_prices_start_tomorrow( } -@pytest.mark.parametrize( - "start_time", - [ - START_TIME.isoformat(), - (START_TIME + dt.timedelta(hours=4)) - .replace(tzinfo=dt.timezone(dt.timedelta(hours=4))) - .isoformat(), - ], -) -async def test_get_prices_with_timezones( - mock_tibber_setup: MagicMock, - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - start_time: str, -) -> None: - """Test get_prices with timezone and without.""" - freezer.move_to(START_TIME) +async def test_get_prices_no_input(): + """Test __get_prices with no input.""" + today = remove_microseconds(dt.datetime.now()) + call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {}) - mock_tibber_setup.get_homes.return_value = generate_mock_home_data() - - result = await hass.services.async_call( - DOMAIN, - PRICE_SERVICE_NAME, - {"start": start_time}, - blocking=True, - return_response=True, - ) - await hass.async_block_till_done() + result = await __get_prices(call, hass=create_mock_hass()) assert result == { "prices": { "first_home": [ { - "start_time": START_TIME.isoformat(), - "price": 0.36914, + "start_time": today, + "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), - "price": 0.36914, + "start_time": today + dt.timedelta(hours=1), + "price": 0.46914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": START_TIME.isoformat(), - "price": 0.36914, + "start_time": today, + "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), - "price": 0.36914, + "start_time": today + dt.timedelta(hours=1), + "price": 0.46914, "level": "VERY_EXPENSIVE", }, ], @@ -221,56 +203,52 @@ async def test_get_prices_with_timezones( } -@pytest.mark.parametrize( - "start_time", - [ - (START_TIME + dt.timedelta(hours=2)).isoformat(), - (START_TIME + dt.timedelta(hours=2)) - .astimezone(tz=dt.timezone(dt.timedelta(hours=5))) - .isoformat(), - (START_TIME + dt.timedelta(hours=2)) - .astimezone(tz=dt.timezone(dt.timedelta(hours=8))) - .isoformat(), - (START_TIME + dt.timedelta(hours=2)) - .astimezone(tz=dt.timezone(dt.timedelta(hours=-8))) - .isoformat(), - ], -) -async def test_get_prices_with_wrong_timezones( - mock_tibber_setup: MagicMock, - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - start_time: str, -) -> None: - """Test get_prices with incorrect time and/or timezone. We expect an empty list.""" - freezer.move_to(START_TIME) - tomorrow = START_TIME + dt.timedelta(days=1) - - mock_tibber_setup.get_homes.return_value = generate_mock_home_data() - - result = await hass.services.async_call( - DOMAIN, - PRICE_SERVICE_NAME, - {"start": start_time, "end": tomorrow.isoformat()}, - blocking=True, - return_response=True, +async def test_get_prices_start_tomorrow(): + """Test __get_prices with start date tomorrow.""" + tomorrow = remove_microseconds(dt.datetime.now() + dt.timedelta(days=1)) + call = ServiceCall( + DOMAIN, PRICE_SERVICE_NAME, {"start": tomorrow.date().isoformat()} ) - await hass.async_block_till_done() - assert result == {"prices": {"first_home": [], "second_home": []}} + result = await __get_prices(call, hass=create_mock_hass()) + + assert result == { + "prices": { + "first_home": [ + { + "start_time": tomorrow, + "price": 0.46914, + "level": "VERY_EXPENSIVE", + }, + { + "start_time": tomorrow + dt.timedelta(hours=1), + "price": 0.46914, + "level": "VERY_EXPENSIVE", + }, + ], + "second_home": [ + { + "start_time": tomorrow, + "price": 0.46914, + "level": "VERY_EXPENSIVE", + }, + { + "start_time": tomorrow + dt.timedelta(hours=1), + "price": 0.46914, + "level": "VERY_EXPENSIVE", + }, + ], + } + } -async def test_get_prices_invalid_input( - mock_tibber_setup: MagicMock, - hass: HomeAssistant, -) -> None: - """Test get_prices with invalid input.""" +async def test_get_prices_invalid_input(): + """Test __get_prices with invalid input.""" - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - DOMAIN, - PRICE_SERVICE_NAME, - {"start": "test"}, - blocking=True, - return_response=True, - ) + call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {"start": "test"}) + task = asyncio.create_task(__get_prices(call, hass=create_mock_hass())) + + with pytest.raises(ServiceValidationError) as excinfo: + await task + + assert "Invalid datetime provided." in str(excinfo.value) diff --git a/tests/components/tile/conftest.py b/tests/components/tile/conftest.py index 01a711d9261..e3b55c49ae7 100644 --- a/tests/components/tile/conftest.py +++ b/tests/components/tile/conftest.py @@ -1,8 +1,6 @@ """Define test fixtures for Tile.""" -from collections.abc import Generator import json -from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest @@ -10,7 +8,6 @@ from pytile.tile import Tile from homeassistant.components.tile.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -19,7 +16,7 @@ TEST_USERNAME = "user@host.com" @pytest.fixture(name="api") -def api_fixture(data_tile_details: dict[str, Any]) -> Mock: +def api_fixture(hass, data_tile_details): """Define a pytile API object.""" tile = Tile(None, data_tile_details) tile.async_update = AsyncMock() @@ -32,9 +29,7 @@ def api_fixture(data_tile_details: dict[str, Any]) -> Mock: @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, config: dict[str, Any] -) -> MockConfigEntry: +def config_entry_fixture(hass, config): """Define a config entry fixture.""" entry = MockConfigEntry(domain=DOMAIN, unique_id=config[CONF_USERNAME], data=config) entry.add_to_hass(hass) @@ -42,7 +37,7 @@ def config_entry_fixture( @pytest.fixture(name="config") -def config_fixture() -> dict[str, Any]: +def config_fixture(): """Define a config entry data fixture.""" return { CONF_USERNAME: TEST_USERNAME, @@ -57,7 +52,7 @@ def data_tile_details_fixture(): @pytest.fixture(name="mock_pytile") -def mock_pytile_fixture(api: Mock) -> Generator[None]: +async def mock_pytile_fixture(api): """Define a fixture to patch pytile.""" with ( patch( @@ -69,9 +64,7 @@ def mock_pytile_fixture(api: Mock) -> Generator[None]: @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture( - hass: HomeAssistant, config_entry: MockConfigEntry, mock_pytile: None -) -> None: +async def setup_config_entry_fixture(hass, config_entry, mock_pytile): """Define a fixture to set up tile.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/tile/test_config_flow.py b/tests/components/tile/test_config_flow.py index 849be41d560..87fe976ca3f 100644 --- a/tests/components/tile/test_config_flow.py +++ b/tests/components/tile/test_config_flow.py @@ -6,15 +6,13 @@ import pytest from pytile.errors import InvalidAuthError, TileError from homeassistant.components.tile import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_USERNAME -from tests.common import MockConfigEntry - @pytest.mark.parametrize( ("mock_login_response", "errors"), @@ -79,10 +77,12 @@ async def test_import_entry(hass: HomeAssistant, config, mock_pytile) -> None: async def test_step_reauth( - hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry + hass: HomeAssistant, config, config_entry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH}, data=config + ) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/time_date/conftest.py b/tests/components/time_date/conftest.py index 7841b6d0b83..4bcaa887b6f 100644 --- a/tests/components/time_date/conftest.py +++ b/tests/components/time_date/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Time & Date integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/tod/test_binary_sensor.py b/tests/components/tod/test_binary_sensor.py index b4b6b13d8e3..c4b28b527cb 100644 --- a/tests/components/tod/test_binary_sensor.py +++ b/tests/components/tod/test_binary_sensor.py @@ -1,6 +1,6 @@ """Test Times of the Day Binary Sensor.""" -from datetime import datetime, timedelta, tzinfo +from datetime import datetime, timedelta from freezegun.api import FrozenDateTimeFactory import pytest @@ -16,13 +16,13 @@ from tests.common import assert_setup_component, async_fire_time_changed @pytest.fixture -def hass_time_zone() -> str: +def hass_time_zone(): """Return default hass timezone.""" return "US/Pacific" @pytest.fixture(autouse=True) -async def setup_fixture(hass: HomeAssistant, hass_time_zone: str) -> None: +async def setup_fixture(hass, hass_time_zone): """Set up things to be run when tests are started.""" hass.config.latitude = 50.27583 hass.config.longitude = 18.98583 @@ -30,7 +30,7 @@ async def setup_fixture(hass: HomeAssistant, hass_time_zone: str) -> None: @pytest.fixture -def hass_tz_info(hass: HomeAssistant) -> tzinfo | None: +def hass_tz_info(hass): """Return timezone info for the hass timezone.""" return dt_util.get_time_zone(hass.config.time_zone) diff --git a/tests/components/todo/__init__.py b/tests/components/todo/__init__.py index 0138e561fad..dfee74599cd 100644 --- a/tests/components/todo/__init__.py +++ b/tests/components/todo/__init__.py @@ -1,63 +1 @@ """Tests for the To-do integration.""" - -from homeassistant.components.todo import DOMAIN, TodoItem, TodoListEntity -from homeassistant.config_entries import ConfigEntry, ConfigFlow -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from tests.common import MockConfigEntry, MockPlatform, mock_platform - -TEST_DOMAIN = "test" - - -class MockFlow(ConfigFlow): - """Test flow.""" - - -class MockTodoListEntity(TodoListEntity): - """Test todo list entity.""" - - def __init__(self, items: list[TodoItem] | None = None) -> None: - """Initialize entity.""" - self._attr_todo_items = items or [] - - @property - def items(self) -> list[TodoItem]: - """Return the items in the To-do list.""" - return self._attr_todo_items - - async def async_create_todo_item(self, item: TodoItem) -> None: - """Add an item to the To-do list.""" - self._attr_todo_items.append(item) - - async def async_delete_todo_items(self, uids: list[str]) -> None: - """Delete an item in the To-do list.""" - self._attr_todo_items = [item for item in self.items if item.uid not in uids] - - -async def create_mock_platform( - hass: HomeAssistant, - entities: list[TodoListEntity], -) -> MockConfigEntry: - """Create a todo platform with the specified entities.""" - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test event platform via config entry.""" - async_add_entities(entities) - - mock_platform( - hass, - f"{TEST_DOMAIN}.{DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), - ) - - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/todo/conftest.py b/tests/components/todo/conftest.py deleted file mode 100644 index bcee60e1d96..00000000000 --- a/tests/components/todo/conftest.py +++ /dev/null @@ -1,92 +0,0 @@ -"""Fixtures for the todo component tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock - -import pytest - -from homeassistant.components.todo import ( - DOMAIN, - TodoItem, - TodoItemStatus, - TodoListEntity, - TodoListEntityFeature, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from . import TEST_DOMAIN, MockFlow, MockTodoListEntity - -from tests.common import MockModule, mock_config_flow, mock_integration, mock_platform - - -@pytest.fixture(autouse=True) -def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: - """Mock config flow.""" - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - - with mock_config_flow(TEST_DOMAIN, MockFlow): - yield - - -@pytest.fixture(autouse=True) -def mock_setup_integration(hass: HomeAssistant) -> None: - """Fixture to set up a mock integration.""" - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_unload_entry_init( - hass: HomeAssistant, - config_entry: ConfigEntry, - ) -> bool: - await hass.config_entries.async_unload_platforms(config_entry, [Platform.TODO]) - return True - - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - async_unload_entry=async_unload_entry_init, - ), - ) - - -@pytest.fixture(autouse=True) -async def set_time_zone(hass: HomeAssistant) -> None: - """Set the time zone for the tests that keesp UTC-6 all year round.""" - await hass.config.async_set_time_zone("America/Regina") - - -@pytest.fixture(name="test_entity_items") -def mock_test_entity_items() -> list[TodoItem]: - """Fixture that creates the items returned by the test entity.""" - return [ - TodoItem(summary="Item #1", uid="1", status=TodoItemStatus.NEEDS_ACTION), - TodoItem(summary="Item #2", uid="2", status=TodoItemStatus.COMPLETED), - ] - - -@pytest.fixture(name="test_entity") -def mock_test_entity(test_entity_items: list[TodoItem]) -> TodoListEntity: - """Fixture that creates a test TodoList entity with mock service calls.""" - entity1 = MockTodoListEntity(test_entity_items) - entity1.entity_id = "todo.entity1" - entity1._attr_supported_features = ( - TodoListEntityFeature.CREATE_TODO_ITEM - | TodoListEntityFeature.UPDATE_TODO_ITEM - | TodoListEntityFeature.DELETE_TODO_ITEM - | TodoListEntityFeature.MOVE_TODO_ITEM - ) - entity1.async_create_todo_item = AsyncMock(wraps=entity1.async_create_todo_item) - entity1.async_update_todo_item = AsyncMock() - entity1.async_delete_todo_items = AsyncMock(wraps=entity1.async_delete_todo_items) - entity1.async_move_todo_item = AsyncMock() - return entity1 diff --git a/tests/components/todo/test_init.py b/tests/components/todo/test_init.py index fd052a7f8a3..5999b4b9fbe 100644 --- a/tests/components/todo/test_init.py +++ b/tests/components/todo/test_init.py @@ -2,39 +2,42 @@ import datetime from typing import Any +from unittest.mock import AsyncMock import zoneinfo import pytest +from typing_extensions import Generator import voluptuous as vol from homeassistant.components import conversation from homeassistant.components.homeassistant.exposed_entities import async_expose_entity from homeassistant.components.todo import ( - ATTR_DESCRIPTION, - ATTR_DUE_DATE, - ATTR_DUE_DATETIME, - ATTR_ITEM, - ATTR_RENAME, - ATTR_STATUS, DOMAIN, TodoItem, TodoItemStatus, TodoListEntity, TodoListEntityFeature, - TodoServices, intent as todo_intent, ) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES +from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import intent +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component -from . import MockTodoListEntity, create_mock_platform - +from tests.common import ( + MockConfigEntry, + MockModule, + MockPlatform, + mock_config_flow, + mock_integration, + mock_platform, +) from tests.typing import WebSocketGenerator +TEST_DOMAIN = "test" ITEM_1 = { "uid": "1", "summary": "Item #1", @@ -49,6 +52,130 @@ TEST_TIMEZONE = zoneinfo.ZoneInfo("America/Regina") TEST_OFFSET = "-06:00" +class MockFlow(ConfigFlow): + """Test flow.""" + + +class MockTodoListEntity(TodoListEntity): + """Test todo list entity.""" + + def __init__(self, items: list[TodoItem] | None = None) -> None: + """Initialize entity.""" + self._attr_todo_items = items or [] + + @property + def items(self) -> list[TodoItem]: + """Return the items in the To-do list.""" + return self._attr_todo_items + + async def async_create_todo_item(self, item: TodoItem) -> None: + """Add an item to the To-do list.""" + self._attr_todo_items.append(item) + + async def async_delete_todo_items(self, uids: list[str]) -> None: + """Delete an item in the To-do list.""" + self._attr_todo_items = [item for item in self.items if item.uid not in uids] + + +@pytest.fixture(autouse=True) +def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: + """Mock config flow.""" + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") + + with mock_config_flow(TEST_DOMAIN, MockFlow): + yield + + +@pytest.fixture(autouse=True) +def mock_setup_integration(hass: HomeAssistant) -> None: + """Fixture to set up a mock integration.""" + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, + config_entry: ConfigEntry, + ) -> bool: + await hass.config_entries.async_unload_platforms(config_entry, [Platform.TODO]) + return True + + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + + +@pytest.fixture(autouse=True) +async def set_time_zone(hass: HomeAssistant) -> None: + """Set the time zone for the tests that keesp UTC-6 all year round.""" + await hass.config.async_set_time_zone("America/Regina") + + +async def create_mock_platform( + hass: HomeAssistant, + entities: list[TodoListEntity], +) -> MockConfigEntry: + """Create a todo platform with the specified entities.""" + + async def async_setup_entry_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test event platform via config entry.""" + async_add_entities(entities) + + mock_platform( + hass, + f"{TEST_DOMAIN}.{DOMAIN}", + MockPlatform(async_setup_entry=async_setup_entry_platform), + ) + + config_entry = MockConfigEntry(domain=TEST_DOMAIN) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry + + +@pytest.fixture(name="test_entity_items") +def mock_test_entity_items() -> list[TodoItem]: + """Fixture that creates the items returned by the test entity.""" + return [ + TodoItem(summary="Item #1", uid="1", status=TodoItemStatus.NEEDS_ACTION), + TodoItem(summary="Item #2", uid="2", status=TodoItemStatus.COMPLETED), + ] + + +@pytest.fixture(name="test_entity") +def mock_test_entity(test_entity_items: list[TodoItem]) -> TodoListEntity: + """Fixture that creates a test TodoList entity with mock service calls.""" + entity1 = MockTodoListEntity(test_entity_items) + entity1.entity_id = "todo.entity1" + entity1._attr_supported_features = ( + TodoListEntityFeature.CREATE_TODO_ITEM + | TodoListEntityFeature.UPDATE_TODO_ITEM + | TodoListEntityFeature.DELETE_TODO_ITEM + | TodoListEntityFeature.MOVE_TODO_ITEM + ) + entity1.async_create_todo_item = AsyncMock(wraps=entity1.async_create_todo_item) + entity1.async_update_todo_item = AsyncMock() + entity1.async_delete_todo_items = AsyncMock(wraps=entity1.async_delete_todo_items) + entity1.async_move_todo_item = AsyncMock() + return entity1 + + async def test_unload_entry( hass: HomeAssistant, test_entity: TodoListEntity, @@ -103,11 +230,11 @@ async def test_list_todo_items( [ ({}, [ITEM_1, ITEM_2]), ( - {ATTR_STATUS: [TodoItemStatus.COMPLETED, TodoItemStatus.NEEDS_ACTION]}, + {"status": [TodoItemStatus.COMPLETED, TodoItemStatus.NEEDS_ACTION]}, [ITEM_1, ITEM_2], ), - ({ATTR_STATUS: [TodoItemStatus.NEEDS_ACTION]}, [ITEM_1]), - ({ATTR_STATUS: [TodoItemStatus.COMPLETED]}, [ITEM_2]), + ({"status": [TodoItemStatus.NEEDS_ACTION]}, [ITEM_1]), + ({"status": [TodoItemStatus.COMPLETED]}, [ITEM_2]), ], ) async def test_get_items_service( @@ -124,13 +251,13 @@ async def test_get_items_service( state = hass.states.get("todo.entity1") assert state assert state.state == "1" - assert state.attributes == {ATTR_SUPPORTED_FEATURES: 15} + assert state.attributes == {"supported_features": 15} result = await hass.services.async_call( DOMAIN, - TodoServices.GET_ITEMS, + "get_items", service_data, - target={ATTR_ENTITY_ID: "todo.entity1"}, + target={"entity_id": "todo.entity1"}, blocking=True, return_response=True, ) @@ -170,9 +297,9 @@ async def test_add_item_service( await hass.services.async_call( DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "New item"}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "add_item", + {"item": "New item"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -197,9 +324,9 @@ async def test_add_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "New item"}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "add_item", + {"item": "New item"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -208,21 +335,21 @@ async def test_add_item_service_raises( ("item_data", "expected_exception", "expected_error"), [ ({}, vol.Invalid, "required key not provided"), - ({ATTR_ITEM: ""}, vol.Invalid, "length of value must be at least 1"), + ({"item": ""}, vol.Invalid, "length of value must be at least 1"), ( - {ATTR_ITEM: "Submit forms", ATTR_DESCRIPTION: "Submit tax forms"}, + {"item": "Submit forms", "description": "Submit tax forms"}, ServiceValidationError, "does not support setting field: description", ), ( - {ATTR_ITEM: "Submit forms", ATTR_DUE_DATE: "2023-11-17"}, + {"item": "Submit forms", "due_date": "2023-11-17"}, ServiceValidationError, "does not support setting field: due_date", ), ( { - ATTR_ITEM: "Submit forms", - ATTR_DUE_DATETIME: f"2023-11-17T17:00:00{TEST_OFFSET}", + "item": "Submit forms", + "due_datetime": f"2023-11-17T17:00:00{TEST_OFFSET}", }, ServiceValidationError, "does not support setting field: due_datetime", @@ -243,9 +370,9 @@ async def test_add_item_service_invalid_input( with pytest.raises(expected_exception) as exc: await hass.services.async_call( DOMAIN, - TodoServices.ADD_ITEM, + "add_item", item_data, - target={ATTR_ENTITY_ID: "todo.entity1"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -257,7 +384,7 @@ async def test_add_item_service_invalid_input( [ ( TodoListEntityFeature.SET_DUE_DATE_ON_ITEM, - {ATTR_ITEM: "New item", ATTR_DUE_DATE: "2023-11-13"}, + {"item": "New item", "due_date": "2023-11-13"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -266,10 +393,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - { - ATTR_ITEM: "New item", - ATTR_DUE_DATETIME: f"2023-11-13T17:00:00{TEST_OFFSET}", - }, + {"item": "New item", "due_datetime": f"2023-11-13T17:00:00{TEST_OFFSET}"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -278,7 +402,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {ATTR_ITEM: "New item", ATTR_DUE_DATETIME: "2023-11-13T17:00:00+00:00"}, + {"item": "New item", "due_datetime": "2023-11-13T17:00:00+00:00"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -287,7 +411,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {ATTR_ITEM: "New item", ATTR_DUE_DATETIME: "2023-11-13"}, + {"item": "New item", "due_datetime": "2023-11-13"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -296,7 +420,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM, - {ATTR_ITEM: "New item", ATTR_DESCRIPTION: "Submit revised draft"}, + {"item": "New item", "description": "Submit revised draft"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -319,9 +443,9 @@ async def test_add_item_service_extended_fields( await hass.services.async_call( DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "New item", **item_data}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "add_item", + {"item": "New item", **item_data}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -341,9 +465,9 @@ async def test_update_todo_item_service_by_id( await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "1", ATTR_RENAME: "Updated item", ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "1", "rename": "Updated item", "status": "completed"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -366,9 +490,9 @@ async def test_update_todo_item_service_by_id_status_only( await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "1", ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "1", "status": "completed"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -391,9 +515,9 @@ async def test_update_todo_item_service_by_id_rename( await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "1", "rename": "Updated item"}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "1", "rename": "Updated item"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -416,9 +540,9 @@ async def test_update_todo_item_service_raises( await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "1", "rename": "Updated item", "status": "completed"}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "1", "rename": "Updated item", "status": "completed"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -426,9 +550,9 @@ async def test_update_todo_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "1", "rename": "Updated item", "status": "completed"}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "1", "rename": "Updated item", "status": "completed"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -443,9 +567,9 @@ async def test_update_todo_item_service_by_summary( await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "Item #1", "rename": "Something else", "status": "completed"}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "Item #1", "rename": "Something else", "status": "completed"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -468,9 +592,9 @@ async def test_update_todo_item_service_by_summary_only_status( await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "Item #1", "rename": "Something else"}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "Item #1", "rename": "Something else"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -494,9 +618,9 @@ async def test_update_todo_item_service_by_summary_not_found( with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "Item #7", "status": "completed"}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "Item #7", "status": "completed"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -528,7 +652,7 @@ async def test_update_item_service_invalid_input( DOMAIN, "update_item", item_data, - target={ATTR_ENTITY_ID: "todo.entity1"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -553,9 +677,9 @@ async def test_update_todo_item_field_unsupported( with pytest.raises(ServiceValidationError, match="does not support"): await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "1", **update_data}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "1", **update_data}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -609,9 +733,9 @@ async def test_update_todo_item_extended_fields( await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "1", **update_data}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "1", **update_data}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -699,9 +823,9 @@ async def test_update_todo_item_extended_fields_overwrite_existing_values( await hass.services.async_call( DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "1", **update_data}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "update_item", + {"item": "1", **update_data}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -721,9 +845,9 @@ async def test_remove_todo_item_service_by_id( await hass.services.async_call( DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: ["1", "2"]}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "remove_item", + {"item": ["1", "2"]}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -744,9 +868,9 @@ async def test_remove_todo_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: ["1", "2"]}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "remove_item", + {"item": ["1", "2"]}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -764,9 +888,9 @@ async def test_remove_todo_item_service_invalid_input( ): await hass.services.async_call( DOMAIN, - TodoServices.REMOVE_ITEM, + "remove_item", {}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -781,9 +905,9 @@ async def test_remove_todo_item_service_by_summary( await hass.services.async_call( DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: ["Item #1"]}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "remove_item", + {"item": ["Item #1"]}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -803,9 +927,9 @@ async def test_remove_todo_item_service_by_summary_not_found( with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: ["Item #7"]}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "remove_item", + {"item": ["Item #7"]}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -911,26 +1035,26 @@ async def test_move_todo_item_service_invalid_input( ("service_name", "payload"), [ ( - TodoServices.ADD_ITEM, + "add_item", { - ATTR_ITEM: "New item", + "item": "New item", }, ), ( - TodoServices.REMOVE_ITEM, + "remove_item", { - ATTR_ITEM: ["1"], + "item": ["1"], }, ), ( - TodoServices.UPDATE_ITEM, + "update_item", { - ATTR_ITEM: "1", - ATTR_RENAME: "Updated item", + "item": "1", + "rename": "Updated item", }, ), ( - TodoServices.REMOVE_COMPLETED_ITEMS, + "remove_completed_items", None, ), ], @@ -954,7 +1078,7 @@ async def test_unsupported_service( DOMAIN, service_name, payload, - target={ATTR_ENTITY_ID: "todo.entity1"}, + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -1007,17 +1131,14 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {ATTR_ITEM: {"value": " beer "}, "name": {"value": "list 1"}}, + {"item": {"value": "beer"}, "name": {"value": "list 1"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE - assert response.success_results[0].name == "list 1" - assert response.success_results[0].type == intent.IntentResponseTargetType.ENTITY - assert response.success_results[0].id == entity1.entity_id assert len(entity1.items) == 1 assert len(entity2.items) == 0 - assert entity1.items[0].summary == "beer" # summary is trimmed + assert entity1.items[0].summary == "beer" assert entity1.items[0].status == TodoItemStatus.NEEDS_ACTION entity1.items.clear() @@ -1026,7 +1147,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {ATTR_ITEM: {"value": "cheese"}, "name": {"value": "List 2"}}, + {"item": {"value": "cheese"}, "name": {"value": "List 2"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1041,7 +1162,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {ATTR_ITEM: {"value": "wine"}, "name": {"value": "lIST 2"}}, + {"item": {"value": "wine"}, "name": {"value": "lIST 2"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1103,8 +1224,8 @@ async def test_remove_completed_items_service( await hass.services.async_call( DOMAIN, - TodoServices.REMOVE_COMPLETED_ITEMS, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "remove_completed_items", + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -1117,8 +1238,8 @@ async def test_remove_completed_items_service( # calling service multiple times will not call the entity method await hass.services.async_call( DOMAIN, - TodoServices.REMOVE_COMPLETED_ITEMS, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "remove_completed_items", + target={"entity_id": "todo.entity1"}, blocking=True, ) test_entity.async_delete_todo_items.assert_not_called() @@ -1136,8 +1257,8 @@ async def test_remove_completed_items_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - TodoServices.REMOVE_COMPLETED_ITEMS, - target={ATTR_ENTITY_ID: "todo.entity1"}, + "remove_completed_items", + target={"entity_id": "todo.entity1"}, blocking=True, ) @@ -1302,7 +1423,7 @@ async def test_list_todo_items_extended_fields( DOMAIN, "get_items", {}, - target={ATTR_ENTITY_ID: "todo.entity1"}, + target={"entity_id": "todo.entity1"}, blocking=True, return_response=True, ) diff --git a/tests/components/todoist/conftest.py b/tests/components/todoist/conftest.py index 4b2bfea2e30..386385a0ddb 100644 --- a/tests/components/todoist/conftest.py +++ b/tests/components/todoist/conftest.py @@ -1,13 +1,13 @@ """Common fixtures for the todoist tests.""" -from collections.abc import Generator from http import HTTPStatus from unittest.mock import AsyncMock, patch import pytest from requests.exceptions import HTTPError from requests.models import Response -from todoist_api_python.models import Collaborator, Due, Label, Project, Section, Task +from todoist_api_python.models import Collaborator, Due, Label, Project, Task +from typing_extensions import Generator from homeassistant.components.todoist import DOMAIN from homeassistant.const import CONF_TOKEN, Platform @@ -18,7 +18,6 @@ from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry PROJECT_ID = "project-id-1" -SECTION_ID = "section-id-1" SUMMARY = "A task" TOKEN = "some-token" TODAY = dt_util.now().strftime("%Y-%m-%d") @@ -99,14 +98,6 @@ def mock_api(tasks: list[Task]) -> AsyncMock: view_style="list", ) ] - api.get_sections.return_value = [ - Section( - id=SECTION_ID, - project_id=PROJECT_ID, - name="Section Name", - order=1, - ) - ] api.get_labels.return_value = [ Label(id="1", name="Label1", color="1", order=1, is_favorite=False) ] diff --git a/tests/components/todoist/test_calendar.py b/tests/components/todoist/test_calendar.py index 071a14a70ae..d8123af3231 100644 --- a/tests/components/todoist/test_calendar.py +++ b/tests/components/todoist/test_calendar.py @@ -18,17 +18,15 @@ from homeassistant.components.todoist.const import ( DOMAIN, LABELS, PROJECT_NAME, - SECTION_NAME, SERVICE_NEW_TASK, ) from homeassistant.const import CONF_TOKEN, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from homeassistant.util import dt as dt_util -from .conftest import PROJECT_ID, SECTION_ID, SUMMARY +from .conftest import PROJECT_ID, SUMMARY from tests.typing import ClientSessionGenerator @@ -271,51 +269,6 @@ async def test_create_task_service_call(hass: HomeAssistant, api: AsyncMock) -> ) -async def test_create_task_service_call_raises( - hass: HomeAssistant, api: AsyncMock -) -> None: - """Test adding an item to an invalid project raises an error.""" - - with pytest.raises(ServiceValidationError, match="project_invalid"): - await hass.services.async_call( - DOMAIN, - SERVICE_NEW_TASK, - { - ASSIGNEE: "user", - CONTENT: "task", - LABELS: ["Label1"], - PROJECT_NAME: "Missing Project", - }, - blocking=True, - ) - - -async def test_create_task_service_call_with_section( - hass: HomeAssistant, api: AsyncMock -) -> None: - """Test api is called correctly when section is included.""" - await hass.services.async_call( - DOMAIN, - SERVICE_NEW_TASK, - { - ASSIGNEE: "user", - CONTENT: "task", - LABELS: ["Label1"], - PROJECT_NAME: "Name", - SECTION_NAME: "Section Name", - }, - ) - await hass.async_block_till_done() - - api.add_task.assert_called_with( - "task", - project_id=PROJECT_ID, - section_id=SECTION_ID, - labels=["Label1"], - assignee_id="1", - ) - - @pytest.mark.parametrize( ("due"), [ diff --git a/tests/components/todoist/test_todo.py b/tests/components/todoist/test_todo.py index 1c2da67fb02..2aabfcc5755 100644 --- a/tests/components/todoist/test_todo.py +++ b/tests/components/todoist/test_todo.py @@ -6,17 +6,8 @@ from unittest.mock import AsyncMock import pytest from todoist_api_python.models import Due, Task -from homeassistant.components.todo import ( - ATTR_DESCRIPTION, - ATTR_DUE_DATE, - ATTR_DUE_DATETIME, - ATTR_ITEM, - ATTR_RENAME, - ATTR_STATUS, - DOMAIN as TODO_DOMAIN, - TodoServices, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity @@ -95,7 +86,7 @@ async def test_todo_item_state( ), ( [], - {ATTR_DUE_DATE: "2023-11-18"}, + {"due_date": "2023-11-18"}, [ make_api_task( id="task-id-1", @@ -114,7 +105,7 @@ async def test_todo_item_state( ), ( [], - {ATTR_DUE_DATETIME: "2023-11-18T06:30:00"}, + {"due_datetime": "2023-11-18T06:30:00"}, [ make_api_task( id="task-id-1", @@ -141,7 +132,7 @@ async def test_todo_item_state( ), ( [], - {ATTR_DESCRIPTION: "6-pack"}, + {"description": "6-pack"}, [ make_api_task( id="task-id-1", @@ -182,9 +173,9 @@ async def test_add_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Soda", **item_data}, - target={ATTR_ENTITY_ID: "todo.name"}, + "add_item", + {"item": "Soda", **item_data}, + target={"entity_id": "todo.name"}, blocking=True, ) @@ -199,9 +190,9 @@ async def test_add_todo_list_item( result = await hass.services.async_call( TODO_DOMAIN, - TodoServices.GET_ITEMS, + "get_items", {}, - target={ATTR_ENTITY_ID: "todo.name"}, + target={"entity_id": "todo.name"}, blocking=True, return_response=True, ) @@ -232,9 +223,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "task-id-1", ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: "todo.name"}, + "update_item", + {"item": "task-id-1", "status": "completed"}, + target={"entity_id": "todo.name"}, blocking=True, ) assert api.close_task.called @@ -255,9 +246,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "task-id-1", ATTR_STATUS: "needs_action"}, - target={ATTR_ENTITY_ID: "todo.name"}, + "update_item", + {"item": "task-id-1", "status": "needs_action"}, + target={"entity_id": "todo.name"}, blocking=True, ) assert api.reopen_task.called @@ -283,7 +274,7 @@ async def test_update_todo_item_status( description="desc", ) ], - {ATTR_RENAME: "Milk"}, + {"rename": "Milk"}, [ make_api_task( id="task-id-1", @@ -307,7 +298,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {ATTR_DUE_DATE: "2023-11-18"}, + {"due_date": "2023-11-18"}, [ make_api_task( id="task-id-1", @@ -331,7 +322,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {ATTR_DUE_DATETIME: "2023-11-18T06:30:00"}, + {"due_datetime": "2023-11-18T06:30:00"}, [ make_api_task( id="task-id-1", @@ -360,7 +351,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {ATTR_DESCRIPTION: "6-pack"}, + {"description": "6-pack"}, [ make_api_task( id="task-id-1", @@ -391,7 +382,7 @@ async def test_update_todo_item_status( is_completed=False, ) ], - {ATTR_DESCRIPTION: None}, + {"description": None}, [ make_api_task( id="task-id-1", @@ -424,7 +415,7 @@ async def test_update_todo_item_status( due=Due(date="2024-01-01", is_recurring=True, string="every day"), ) ], - {ATTR_DUE_DATE: "2024-02-01"}, + {"due_date": "2024-02-01"}, [ make_api_task( id="task-id-1", @@ -481,9 +472,9 @@ async def test_update_todo_items( await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "task-id-1", **update_data}, - target={ATTR_ENTITY_ID: "todo.name"}, + "update_item", + {"item": "task-id-1", **update_data}, + target={"entity_id": "todo.name"}, blocking=True, ) assert api.update_task.called @@ -493,9 +484,9 @@ async def test_update_todo_items( result = await hass.services.async_call( TODO_DOMAIN, - TodoServices.GET_ITEMS, + "get_items", {}, - target={ATTR_ENTITY_ID: "todo.name"}, + target={"entity_id": "todo.name"}, blocking=True, return_response=True, ) @@ -528,9 +519,9 @@ async def test_remove_todo_item( await hass.services.async_call( TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: ["task-id-1", "task-id-2"]}, - target={ATTR_ENTITY_ID: "todo.name"}, + "remove_item", + {"item": ["task-id-1", "task-id-2"]}, + target={"entity_id": "todo.name"}, blocking=True, ) assert api.delete_task.call_count == 2 @@ -584,9 +575,9 @@ async def test_subscribe( ] await hass.services.async_call( TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "Cheese", ATTR_RENAME: "Wine"}, - target={ATTR_ENTITY_ID: "todo.name"}, + "update_item", + {"item": "Cheese", "rename": "Wine"}, + target={"entity_id": "todo.name"}, blocking=True, ) diff --git a/tests/components/tolo/test_config_flow.py b/tests/components/tolo/test_config_flow.py index 73382944cf0..9dcca4b704f 100644 --- a/tests/components/tolo/test_config_flow.py +++ b/tests/components/tolo/test_config_flow.py @@ -31,7 +31,7 @@ def coordinator_toloclient() -> Mock: Throw exception to abort entry setup and prevent socket IO. Only testing config flow. """ with patch( - "homeassistant.components.tolo.coordinator.ToloClient", side_effect=Exception + "homeassistant.components.tolo.ToloClient", side_effect=Exception ) as toloclient: yield toloclient diff --git a/tests/components/tomato/test_device_tracker.py b/tests/components/tomato/test_device_tracker.py index f50d999548f..099a2c2b40a 100644 --- a/tests/components/tomato/test_device_tracker.py +++ b/tests/components/tomato/test_device_tracker.py @@ -7,7 +7,7 @@ import requests import requests_mock import voluptuous as vol -from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN +from homeassistant.components.device_tracker import DOMAIN import homeassistant.components.tomato.device_tracker as tomato from homeassistant.const import ( CONF_HOST, @@ -25,7 +25,7 @@ def mock_session_response(*args, **kwargs): """Mock data generation for session response.""" class MockSessionResponse: - def __init__(self, text, status_code) -> None: + def __init__(self, text, status_code): self.text = text self.status_code = status_code @@ -68,9 +68,9 @@ def mock_session_send(): def test_config_missing_optional_params(hass: HomeAssistant, mock_session_send) -> None: """Test the setup without optional parameters.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "password", @@ -94,9 +94,9 @@ def test_config_missing_optional_params(hass: HomeAssistant, mock_session_send) def test_config_default_nonssl_port(hass: HomeAssistant, mock_session_send) -> None: """Test the setup without a default port set without ssl enabled.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "password", @@ -113,9 +113,9 @@ def test_config_default_nonssl_port(hass: HomeAssistant, mock_session_send) -> N def test_config_default_ssl_port(hass: HomeAssistant, mock_session_send) -> None: """Test the setup without a default port set with ssl enabled.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_SSL: True, CONF_USERNAME: "foo", @@ -135,9 +135,9 @@ def test_config_verify_ssl_but_no_ssl_enabled( ) -> None: """Test the setup with a string with ssl_verify but ssl not enabled.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: False, @@ -169,9 +169,9 @@ def test_config_valid_verify_ssl_path(hass: HomeAssistant, mock_session_send) -> Representing the absolute path to a CA certificate bundle. """ config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, @@ -200,9 +200,9 @@ def test_config_valid_verify_ssl_path(hass: HomeAssistant, mock_session_send) -> def test_config_valid_verify_ssl_bool(hass: HomeAssistant, mock_session_send) -> None: """Test the setup with a bool for ssl_verify.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, @@ -233,7 +233,7 @@ def test_config_errors() -> None: with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, # No Host, CONF_PORT: 1234, CONF_SSL: True, @@ -246,7 +246,7 @@ def test_config_errors() -> None: with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: -123456789, # Bad Port CONF_SSL: True, @@ -259,7 +259,7 @@ def test_config_errors() -> None: with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, @@ -272,7 +272,7 @@ def test_config_errors() -> None: with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, @@ -285,7 +285,7 @@ def test_config_errors() -> None: with pytest.raises(vol.Invalid): tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_PORT: 1234, CONF_SSL: True, @@ -301,9 +301,9 @@ def test_config_errors() -> None: def test_config_bad_credentials(hass: HomeAssistant, mock_exception_logger) -> None: """Test the setup with bad credentials.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "i_am", CONF_PASSWORD: "an_imposter", @@ -324,9 +324,9 @@ def test_config_bad_credentials(hass: HomeAssistant, mock_exception_logger) -> N def test_bad_response(hass: HomeAssistant, mock_exception_logger) -> None: """Test the setup with bad response from router.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", @@ -347,9 +347,9 @@ def test_bad_response(hass: HomeAssistant, mock_exception_logger) -> None: def test_scan_devices(hass: HomeAssistant, mock_exception_logger) -> None: """Test scanning for new devices.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", @@ -366,9 +366,9 @@ def test_scan_devices(hass: HomeAssistant, mock_exception_logger) -> None: def test_bad_connection(hass: HomeAssistant, mock_exception_logger) -> None: """Test the router with a connection error.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", @@ -394,9 +394,9 @@ def test_bad_connection(hass: HomeAssistant, mock_exception_logger) -> None: def test_router_timeout(hass: HomeAssistant, mock_exception_logger) -> None: """Test the router with a timeout error.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", @@ -422,9 +422,9 @@ def test_router_timeout(hass: HomeAssistant, mock_exception_logger) -> None: def test_get_device_name(hass: HomeAssistant, mock_exception_logger) -> None: """Test getting device names.""" config = { - DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( + DOMAIN: tomato.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", CONF_USERNAME: "foo", CONF_PASSWORD: "bar", diff --git a/tests/components/tomorrowio/snapshots/test_weather.ambr b/tests/components/tomorrowio/snapshots/test_weather.ambr index 6278b50b7f7..fe65925e4c7 100644 --- a/tests/components/tomorrowio/snapshots/test_weather.ambr +++ b/tests/components/tomorrowio/snapshots/test_weather.ambr @@ -735,6 +735,1126 @@ }), ]) # --- +# name: test_v4_forecast_service + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T11:00:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.9, + 'templow': 26.1, + 'wind_bearing': 239.6, + 'wind_speed': 34.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 49.4, + 'templow': 26.3, + 'wind_bearing': 262.82, + 'wind_speed': 26.06, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-09T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 67.0, + 'templow': 31.5, + 'wind_bearing': 229.3, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-10T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 65.3, + 'templow': 37.3, + 'wind_bearing': 149.91, + 'wind_speed': 38.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-11T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 66.2, + 'templow': 48.3, + 'wind_bearing': 210.45, + 'wind_speed': 56.48, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-12T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 67.9, + 'templow': 53.8, + 'wind_bearing': 217.98, + 'wind_speed': 44.28, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-13T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 54.5, + 'templow': 42.9, + 'wind_bearing': 58.79, + 'wind_speed': 34.99, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-14T10:00:00+00:00', + 'precipitation': 0.94, + 'precipitation_probability': 95, + 'temperature': 42.9, + 'templow': 33.4, + 'wind_bearing': 70.25, + 'wind_speed': 58.5, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-15T10:00:00+00:00', + 'precipitation': 0.06, + 'precipitation_probability': 55, + 'temperature': 43.7, + 'templow': 29.4, + 'wind_bearing': 84.47, + 'wind_speed': 57.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-16T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 43.0, + 'templow': 29.1, + 'wind_bearing': 103.85, + 'wind_speed': 24.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-17T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 52.4, + 'templow': 34.3, + 'wind_bearing': 145.41, + 'wind_speed': 26.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-18T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 54.1, + 'templow': 41.3, + 'wind_bearing': 62.99, + 'wind_speed': 23.69, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-19T10:00:00+00:00', + 'precipitation': 0.12, + 'precipitation_probability': 55, + 'temperature': 48.9, + 'templow': 39.4, + 'wind_bearing': 68.54, + 'wind_speed': 50.08, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-20T10:00:00+00:00', + 'precipitation': 0.05, + 'precipitation_probability': 33, + 'temperature': 40.1, + 'templow': 35.1, + 'wind_bearing': 56.98, + 'wind_speed': 62.46, + }), + ]), + }) +# --- +# name: test_v4_forecast_service.1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T17:48:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.1, + 'wind_bearing': 315.14, + 'wind_speed': 33.59, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T18:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.8, + 'wind_bearing': 321.71, + 'wind_speed': 31.82, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T19:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.8, + 'wind_bearing': 323.38, + 'wind_speed': 32.04, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T20:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.3, + 'wind_bearing': 318.43, + 'wind_speed': 33.73, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T21:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.6, + 'wind_bearing': 320.9, + 'wind_speed': 28.98, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T22:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 41.9, + 'wind_bearing': 322.11, + 'wind_speed': 15.7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T23:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 38.9, + 'wind_bearing': 295.94, + 'wind_speed': 17.78, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T00:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 36.2, + 'wind_bearing': 11.94, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T01:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 34.3, + 'wind_bearing': 13.68, + 'wind_speed': 20.05, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T02:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 32.9, + 'wind_bearing': 14.93, + 'wind_speed': 19.48, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T03:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.9, + 'wind_bearing': 26.07, + 'wind_speed': 16.6, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T04:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 51.27, + 'wind_speed': 9.32, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T05:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.4, + 'wind_bearing': 343.25, + 'wind_speed': 11.92, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T06:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.7, + 'wind_bearing': 341.46, + 'wind_speed': 15.37, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T07:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.4, + 'wind_bearing': 322.34, + 'wind_speed': 12.71, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T08:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.1, + 'wind_bearing': 294.69, + 'wind_speed': 13.14, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T09:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 30.1, + 'wind_bearing': 325.32, + 'wind_speed': 11.52, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T10:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.0, + 'wind_bearing': 322.27, + 'wind_speed': 10.22, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T11:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.2, + 'wind_bearing': 310.14, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T12:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 324.8, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T13:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 33.2, + 'wind_bearing': 335.16, + 'wind_speed': 23.26, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T14:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 37.0, + 'wind_bearing': 324.49, + 'wind_speed': 21.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T15:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 40.0, + 'wind_bearing': 310.68, + 'wind_speed': 19.98, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T16:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 42.4, + 'wind_bearing': 304.18, + 'wind_speed': 19.66, + }), + ]), + }) +# --- +# name: test_v4_forecast_service[forecast] + dict({ + 'weather.tomorrow_io_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T11:00:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.9, + 'templow': 26.1, + 'wind_bearing': 239.6, + 'wind_speed': 34.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 49.4, + 'templow': 26.3, + 'wind_bearing': 262.82, + 'wind_speed': 26.06, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-09T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 67.0, + 'templow': 31.5, + 'wind_bearing': 229.3, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-10T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 65.3, + 'templow': 37.3, + 'wind_bearing': 149.91, + 'wind_speed': 38.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-11T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 66.2, + 'templow': 48.3, + 'wind_bearing': 210.45, + 'wind_speed': 56.48, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-12T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 67.9, + 'templow': 53.8, + 'wind_bearing': 217.98, + 'wind_speed': 44.28, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-13T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 54.5, + 'templow': 42.9, + 'wind_bearing': 58.79, + 'wind_speed': 34.99, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-14T10:00:00+00:00', + 'precipitation': 0.94, + 'precipitation_probability': 95, + 'temperature': 42.9, + 'templow': 33.4, + 'wind_bearing': 70.25, + 'wind_speed': 58.5, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-15T10:00:00+00:00', + 'precipitation': 0.06, + 'precipitation_probability': 55, + 'temperature': 43.7, + 'templow': 29.4, + 'wind_bearing': 84.47, + 'wind_speed': 57.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-16T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 43.0, + 'templow': 29.1, + 'wind_bearing': 103.85, + 'wind_speed': 24.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-17T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 52.4, + 'templow': 34.3, + 'wind_bearing': 145.41, + 'wind_speed': 26.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-18T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 54.1, + 'templow': 41.3, + 'wind_bearing': 62.99, + 'wind_speed': 23.69, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-19T10:00:00+00:00', + 'precipitation': 0.12, + 'precipitation_probability': 55, + 'temperature': 48.9, + 'templow': 39.4, + 'wind_bearing': 68.54, + 'wind_speed': 50.08, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-20T10:00:00+00:00', + 'precipitation': 0.05, + 'precipitation_probability': 33, + 'temperature': 40.1, + 'templow': 35.1, + 'wind_bearing': 56.98, + 'wind_speed': 62.46, + }), + ]), + }), + }) +# --- +# name: test_v4_forecast_service[forecast].1 + dict({ + 'weather.tomorrow_io_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T17:48:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.1, + 'wind_bearing': 315.14, + 'wind_speed': 33.59, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T18:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.8, + 'wind_bearing': 321.71, + 'wind_speed': 31.82, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T19:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.8, + 'wind_bearing': 323.38, + 'wind_speed': 32.04, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T20:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.3, + 'wind_bearing': 318.43, + 'wind_speed': 33.73, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T21:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.6, + 'wind_bearing': 320.9, + 'wind_speed': 28.98, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T22:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 41.9, + 'wind_bearing': 322.11, + 'wind_speed': 15.7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T23:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 38.9, + 'wind_bearing': 295.94, + 'wind_speed': 17.78, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T00:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 36.2, + 'wind_bearing': 11.94, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T01:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 34.3, + 'wind_bearing': 13.68, + 'wind_speed': 20.05, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T02:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 32.9, + 'wind_bearing': 14.93, + 'wind_speed': 19.48, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T03:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.9, + 'wind_bearing': 26.07, + 'wind_speed': 16.6, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T04:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 51.27, + 'wind_speed': 9.32, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T05:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.4, + 'wind_bearing': 343.25, + 'wind_speed': 11.92, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T06:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.7, + 'wind_bearing': 341.46, + 'wind_speed': 15.37, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T07:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.4, + 'wind_bearing': 322.34, + 'wind_speed': 12.71, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T08:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.1, + 'wind_bearing': 294.69, + 'wind_speed': 13.14, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T09:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 30.1, + 'wind_bearing': 325.32, + 'wind_speed': 11.52, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T10:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.0, + 'wind_bearing': 322.27, + 'wind_speed': 10.22, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T11:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.2, + 'wind_bearing': 310.14, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T12:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 324.8, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T13:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 33.2, + 'wind_bearing': 335.16, + 'wind_speed': 23.26, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T14:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 37.0, + 'wind_bearing': 324.49, + 'wind_speed': 21.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T15:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 40.0, + 'wind_bearing': 310.68, + 'wind_speed': 19.98, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T16:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 42.4, + 'wind_bearing': 304.18, + 'wind_speed': 19.66, + }), + ]), + }), + }) +# --- +# name: test_v4_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T11:00:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.9, + 'templow': 26.1, + 'wind_bearing': 239.6, + 'wind_speed': 34.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 49.4, + 'templow': 26.3, + 'wind_bearing': 262.82, + 'wind_speed': 26.06, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-09T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 67.0, + 'templow': 31.5, + 'wind_bearing': 229.3, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-10T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 65.3, + 'templow': 37.3, + 'wind_bearing': 149.91, + 'wind_speed': 38.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-11T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 66.2, + 'templow': 48.3, + 'wind_bearing': 210.45, + 'wind_speed': 56.48, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-12T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 67.9, + 'templow': 53.8, + 'wind_bearing': 217.98, + 'wind_speed': 44.28, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-13T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 54.5, + 'templow': 42.9, + 'wind_bearing': 58.79, + 'wind_speed': 34.99, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-14T10:00:00+00:00', + 'precipitation': 0.94, + 'precipitation_probability': 95, + 'temperature': 42.9, + 'templow': 33.4, + 'wind_bearing': 70.25, + 'wind_speed': 58.5, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-15T10:00:00+00:00', + 'precipitation': 0.06, + 'precipitation_probability': 55, + 'temperature': 43.7, + 'templow': 29.4, + 'wind_bearing': 84.47, + 'wind_speed': 57.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-16T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 43.0, + 'templow': 29.1, + 'wind_bearing': 103.85, + 'wind_speed': 24.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-17T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 52.4, + 'templow': 34.3, + 'wind_bearing': 145.41, + 'wind_speed': 26.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-18T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 54.1, + 'templow': 41.3, + 'wind_bearing': 62.99, + 'wind_speed': 23.69, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-19T10:00:00+00:00', + 'precipitation': 0.12, + 'precipitation_probability': 55, + 'temperature': 48.9, + 'templow': 39.4, + 'wind_bearing': 68.54, + 'wind_speed': 50.08, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-20T10:00:00+00:00', + 'precipitation': 0.05, + 'precipitation_probability': 33, + 'temperature': 40.1, + 'templow': 35.1, + 'wind_bearing': 56.98, + 'wind_speed': 62.46, + }), + ]), + }) +# --- +# name: test_v4_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T17:48:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.1, + 'wind_bearing': 315.14, + 'wind_speed': 33.59, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T18:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.8, + 'wind_bearing': 321.71, + 'wind_speed': 31.82, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T19:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.8, + 'wind_bearing': 323.38, + 'wind_speed': 32.04, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T20:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.3, + 'wind_bearing': 318.43, + 'wind_speed': 33.73, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T21:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.6, + 'wind_bearing': 320.9, + 'wind_speed': 28.98, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T22:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 41.9, + 'wind_bearing': 322.11, + 'wind_speed': 15.7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T23:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 38.9, + 'wind_bearing': 295.94, + 'wind_speed': 17.78, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T00:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 36.2, + 'wind_bearing': 11.94, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T01:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 34.3, + 'wind_bearing': 13.68, + 'wind_speed': 20.05, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T02:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 32.9, + 'wind_bearing': 14.93, + 'wind_speed': 19.48, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T03:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.9, + 'wind_bearing': 26.07, + 'wind_speed': 16.6, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T04:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 51.27, + 'wind_speed': 9.32, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T05:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.4, + 'wind_bearing': 343.25, + 'wind_speed': 11.92, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T06:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.7, + 'wind_bearing': 341.46, + 'wind_speed': 15.37, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T07:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.4, + 'wind_bearing': 322.34, + 'wind_speed': 12.71, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T08:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.1, + 'wind_bearing': 294.69, + 'wind_speed': 13.14, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T09:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 30.1, + 'wind_bearing': 325.32, + 'wind_speed': 11.52, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T10:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.0, + 'wind_bearing': 322.27, + 'wind_speed': 10.22, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T11:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.2, + 'wind_bearing': 310.14, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T12:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 324.8, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T13:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 33.2, + 'wind_bearing': 335.16, + 'wind_speed': 23.26, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T14:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 37.0, + 'wind_bearing': 324.49, + 'wind_speed': 21.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T15:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 40.0, + 'wind_bearing': 310.68, + 'wind_speed': 19.98, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T16:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 42.4, + 'wind_bearing': 304.18, + 'wind_speed': 19.66, + }), + ]), + }) +# --- # name: test_v4_forecast_service[get_forecasts] dict({ 'weather.tomorrow_io_daily': dict({ diff --git a/tests/components/toon/test_config_flow.py b/tests/components/toon/test_config_flow.py index 7855379db5b..588924b416f 100644 --- a/tests/components/toon/test_config_flow.py +++ b/tests/components/toon/test_config_flow.py @@ -6,11 +6,11 @@ from unittest.mock import patch import pytest from toonapi import Agreement, ToonError -from homeassistant.components.toon.const import CONF_AGREEMENT, DOMAIN +from homeassistant.components.toon.const import CONF_AGREEMENT, CONF_MIGRATE, DOMAIN +from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.setup import async_setup_component @@ -20,7 +20,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator -async def setup_component(hass: HomeAssistant) -> None: +async def setup_component(hass): """Set up Toon component.""" await async_process_ha_core_config( hass, @@ -249,10 +249,6 @@ async def test_agreement_already_set_up( assert result3["reason"] == "already_configured" -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.toon.config.abort.connection_error"], -) @pytest.mark.usefixtures("current_request_with_host") async def test_toon_abort( hass: HomeAssistant, @@ -328,8 +324,7 @@ async def test_import_migration( flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 - flow = hass.config_entries.flow._progress[flows[0]["flow_id"]] - assert flow.migrate_entry == old_entry.entry_id + assert flows[0]["context"][CONF_MIGRATE] == old_entry.entry_id state = config_entry_oauth2_flow._encode_jwt( hass, diff --git a/tests/components/totalconnect/common.py b/tests/components/totalconnect/common.py index 828cad71e07..1ceb893112c 100644 --- a/tests/components/totalconnect/common.py +++ b/tests/components/totalconnect/common.py @@ -1,23 +1,16 @@ """Common methods used across tests for TotalConnect.""" -from typing import Any from unittest.mock import patch from total_connect_client import ArmingState, ResultCode, ZoneStatus, ZoneType -from homeassistant.components.totalconnect.const import ( - AUTO_BYPASS, - CODE_REQUIRED, - CONF_USERCODES, - DOMAIN, -) +from homeassistant.components.totalconnect.const import CONF_USERCODES, DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -LOCATION_ID = 123456 +LOCATION_ID = "123456" DEVICE_INFO_BASIC_1 = { "DeviceID": "987654", @@ -347,7 +340,7 @@ RESPONSE_ZONE_BYPASS_FAILURE = { USERNAME = "username@me.com" PASSWORD = "password" -USERCODES = {LOCATION_ID: "7890"} +USERCODES = {123456: "7890"} CONFIG_DATA = { CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, @@ -355,9 +348,6 @@ CONFIG_DATA = { } CONFIG_DATA_NO_USERCODES = {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD} -OPTIONS_DATA = {AUTO_BYPASS: False, CODE_REQUIRED: False} -OPTIONS_DATA_CODE_REQUIRED = {AUTO_BYPASS: False, CODE_REQUIRED: True} - PARTITION_DETAILS_1 = { "PartitionID": 1, "ArmingState": ArmingState.DISARMED.value, @@ -404,19 +394,10 @@ TOTALCONNECT_REQUEST = ( ) -async def setup_platform( - hass: HomeAssistant, platform: Any, code_required: bool = False -) -> MockConfigEntry: +async def setup_platform(hass, platform): """Set up the TotalConnect platform.""" # first set up a config entry and add it to hass - if code_required: - mock_entry = MockConfigEntry( - domain=DOMAIN, data=CONFIG_DATA, options=OPTIONS_DATA_CODE_REQUIRED - ) - else: - mock_entry = MockConfigEntry( - domain=DOMAIN, data=CONFIG_DATA, options=OPTIONS_DATA - ) + mock_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_DATA) mock_entry.add_to_hass(hass) responses = [ @@ -441,10 +422,10 @@ async def setup_platform( return mock_entry -async def init_integration(hass: HomeAssistant) -> MockConfigEntry: +async def init_integration(hass): """Set up the TotalConnect integration.""" # first set up a config entry and add it to hass - mock_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_DATA, options=OPTIONS_DATA) + mock_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_DATA) mock_entry.add_to_hass(hass) responses = [ diff --git a/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr index ef7cb386b33..0b8b8bb79ac 100644 --- a/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr +++ b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr @@ -41,7 +41,7 @@ 'code_format': None, 'cover_tampered': False, 'friendly_name': 'test', - 'location_id': 123456, + 'location_id': '123456', 'location_name': 'test', 'low_battery': False, 'partition': 1, @@ -99,7 +99,7 @@ 'code_format': None, 'cover_tampered': False, 'friendly_name': 'test Partition 2', - 'location_id': 123456, + 'location_id': '123456', 'location_name': 'test partition 2', 'low_battery': False, 'partition': 2, diff --git a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr index 1eccff1dfc3..54089c6f192 100644 --- a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr +++ b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr @@ -37,7 +37,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'smoke', 'friendly_name': 'Fire', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '2', }), @@ -87,7 +87,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Fire Battery', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '2', }), @@ -137,7 +137,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Fire Tamper', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '2', }), @@ -187,7 +187,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'gas', 'friendly_name': 'Gas', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '3', }), @@ -237,7 +237,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Gas Battery', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '3', }), @@ -287,7 +287,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Gas Tamper', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '3', }), @@ -337,7 +337,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'safety', 'friendly_name': 'Medical', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '5', }), @@ -387,7 +387,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'motion', 'friendly_name': 'Motion', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '4', }), @@ -437,7 +437,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Motion Battery', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '4', }), @@ -487,7 +487,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Motion Tamper', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '4', }), @@ -537,7 +537,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'door', 'friendly_name': 'Security', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '1', }), @@ -587,7 +587,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Security Battery', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '1', }), @@ -637,7 +637,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Security Tamper', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '1', }), @@ -687,7 +687,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'problem', 'friendly_name': 'Temperature', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': 7, }), @@ -737,7 +737,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Temperature Battery', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': 7, }), @@ -787,7 +787,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Temperature Tamper', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': 7, }), @@ -837,7 +837,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'test Battery', - 'location_id': 123456, + 'location_id': '123456', }), 'context': , 'entity_id': 'binary_sensor.test_battery', @@ -847,101 +847,6 @@ 'state': 'off', }) # --- -# name: test_entity_registry[binary_sensor.test_carbon_monoxide-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.test_carbon_monoxide', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Carbon monoxide', - 'platform': 'totalconnect', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123456_carbon_monoxide', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_registry[binary_sensor.test_carbon_monoxide-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'carbon_monoxide', - 'friendly_name': 'test Carbon monoxide', - 'location_id': 123456, - }), - 'context': , - 'entity_id': 'binary_sensor.test_carbon_monoxide', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_entity_registry[binary_sensor.test_police_emergency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.test_police_emergency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Police emergency', - 'platform': 'totalconnect', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'police', - 'unique_id': '123456_police', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_registry[binary_sensor.test_police_emergency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'test Police emergency', - 'location_id': 123456, - }), - 'context': , - 'entity_id': 'binary_sensor.test_police_emergency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_entity_registry[binary_sensor.test_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -980,7 +885,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'test Power', - 'location_id': 123456, + 'location_id': '123456', }), 'context': , 'entity_id': 'binary_sensor.test_power', @@ -990,54 +895,6 @@ 'state': 'off', }) # --- -# name: test_entity_registry[binary_sensor.test_smoke-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.test_smoke', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Smoke', - 'platform': 'totalconnect', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '123456_smoke', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_registry[binary_sensor.test_smoke-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'smoke', - 'friendly_name': 'test Smoke', - 'location_id': 123456, - }), - 'context': , - 'entity_id': 'binary_sensor.test_smoke', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_entity_registry[binary_sensor.test_tamper-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1076,7 +933,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'test Tamper', - 'location_id': 123456, + 'location_id': '123456', }), 'context': , 'entity_id': 'binary_sensor.test_tamper', @@ -1124,7 +981,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'door', 'friendly_name': 'Unknown', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '6', }), @@ -1174,7 +1031,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Unknown Battery', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '6', }), @@ -1224,7 +1081,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Unknown Tamper', - 'location_id': 123456, + 'location_id': '123456', 'partition': '1', 'zone_id': '6', }), diff --git a/tests/components/totalconnect/test_alarm_control_panel.py b/tests/components/totalconnect/test_alarm_control_panel.py index bc76f7243ca..a4f8333e8a8 100644 --- a/tests/components/totalconnect/test_alarm_control_panel.py +++ b/tests/components/totalconnect/test_alarm_control_panel.py @@ -3,7 +3,6 @@ from datetime import timedelta from unittest.mock import patch -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion from total_connect_client.exceptions import ( @@ -12,10 +11,7 @@ from total_connect_client.exceptions import ( TotalConnectError, ) -from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_DOMAIN, - AlarmControlPanelState, -) +from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN from homeassistant.components.totalconnect.alarm_control_panel import ( SERVICE_ALARM_ARM_AWAY_INSTANT, SERVICE_ALARM_ARM_HOME_INSTANT, @@ -29,15 +25,23 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_DISARMING, + STATE_ALARM_TRIGGERED, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity +from homeassistant.util import dt as dt_util from .common import ( - LOCATION_ID, RESPONSE_ARM_FAILURE, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY, @@ -56,7 +60,6 @@ from .common import ( RESPONSE_UNKNOWN, RESPONSE_USER_CODE_INVALID, TOTALCONNECT_REQUEST, - USERCODES, setup_platform, ) @@ -86,17 +89,15 @@ async def test_attributes( assert mock_request.call_count == 1 -async def test_arm_home_success( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: +async def test_arm_home_success(hass: HomeAssistant) -> None: """Test arm home method success.""" responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_STAY] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED - assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -104,13 +105,12 @@ async def test_arm_home_success( ) assert mock_request.call_count == 2 - freezer.tick(DELAY) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + DELAY) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_HOME + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_HOME # second partition should not be armed - assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED async def test_arm_home_failure(hass: HomeAssistant) -> None: @@ -120,7 +120,7 @@ async def test_arm_home_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -128,34 +128,32 @@ async def test_arm_home_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_ARM_HOME, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Failed to arm home test" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert f"{err.value}" == "TotalConnect failed to arm home test." + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 2 - # config entry usercode is invalid + # usercode is invalid with pytest.raises(HomeAssistantError) as err: await hass.services.async_call( ALARM_DOMAIN, SERVICE_ALARM_ARM_HOME, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Usercode is invalid, did not arm home" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert f"{err.value}" == "TotalConnect usercode is invalid. Did not arm home" + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_arm_home_instant_success( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: +async def test_arm_home_instant_success(hass: HomeAssistant) -> None: """Test arm home instant method success.""" responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_STAY] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED - assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -163,11 +161,10 @@ async def test_arm_home_instant_success( ) assert mock_request.call_count == 2 - freezer.tick(DELAY) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + DELAY) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_HOME + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_HOME async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: @@ -177,7 +174,7 @@ async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -185,8 +182,8 @@ async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: DOMAIN, SERVICE_ALARM_ARM_HOME_INSTANT, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Failed to arm home instant test" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert f"{err.value}" == "TotalConnect failed to arm home instant test." + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -195,24 +192,25 @@ async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: DOMAIN, SERVICE_ALARM_ARM_HOME_INSTANT, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Usercode is invalid, did not arm home instant" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert ( + f"{err.value}" + == "TotalConnect usercode is invalid. Did not arm home instant" + ) + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_arm_away_instant_success( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: +async def test_arm_away_instant_success(hass: HomeAssistant) -> None: """Test arm home instant method success.""" responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED - assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -220,11 +218,10 @@ async def test_arm_away_instant_success( ) assert mock_request.call_count == 2 - freezer.tick(DELAY) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + DELAY) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: @@ -234,7 +231,7 @@ async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -242,8 +239,8 @@ async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: DOMAIN, SERVICE_ALARM_ARM_AWAY_INSTANT, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Failed to arm away instant test" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert f"{err.value}" == "TotalConnect failed to arm away instant test." + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -252,23 +249,24 @@ async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: DOMAIN, SERVICE_ALARM_ARM_AWAY_INSTANT, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Usercode is invalid, did not arm away instant" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert ( + f"{err.value}" + == "TotalConnect usercode is invalid. Did not arm away instant" + ) + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_arm_away_success( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: +async def test_arm_away_success(hass: HomeAssistant) -> None: """Test arm away method success.""" responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -276,11 +274,10 @@ async def test_arm_away_success( ) assert mock_request.call_count == 2 - freezer.tick(DELAY) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + DELAY) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY async def test_arm_away_failure(hass: HomeAssistant) -> None: @@ -290,7 +287,7 @@ async def test_arm_away_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -298,8 +295,8 @@ async def test_arm_away_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_ARM_AWAY, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Failed to arm away test" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert f"{err.value}" == "TotalConnect failed to arm away test." + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -308,23 +305,21 @@ async def test_arm_away_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_ARM_AWAY, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Usercode is invalid, did not arm away" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert f"{err.value}" == "TotalConnect usercode is invalid. Did not arm away" + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_disarm_success( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: +async def test_disarm_success(hass: HomeAssistant) -> None: """Test disarm method success.""" responses = [RESPONSE_ARMED_AWAY, RESPONSE_DISARM_SUCCESS, RESPONSE_DISARMED] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY assert mock_request.call_count == 1 await hass.services.async_call( @@ -332,11 +327,10 @@ async def test_disarm_success( ) assert mock_request.call_count == 2 - freezer.tick(DELAY) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + DELAY) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED async def test_disarm_failure(hass: HomeAssistant) -> None: @@ -350,7 +344,7 @@ async def test_disarm_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -358,8 +352,8 @@ async def test_disarm_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Failed to disarm test" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY + assert f"{err.value}" == "TotalConnect failed to disarm test." + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY assert mock_request.call_count == 2 # usercode is invalid @@ -368,61 +362,21 @@ async def test_disarm_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Usercode is invalid, did not disarm" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY + assert f"{err.value}" == "TotalConnect usercode is invalid. Did not disarm" + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_disarm_code_required( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: - """Test disarm with code.""" - responses = [RESPONSE_ARMED_AWAY, RESPONSE_DISARM_SUCCESS, RESPONSE_DISARMED] - await setup_platform(hass, ALARM_DOMAIN, code_required=True) - with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: - await async_update_entity(hass, ENTITY_ID) - await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY - assert mock_request.call_count == 1 - - # runtime user entered code is bad - DATA_WITH_CODE = DATA.copy() - DATA_WITH_CODE["code"] = "666" - with pytest.raises(ServiceValidationError, match="Incorrect code entered"): - await hass.services.async_call( - ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA_WITH_CODE, blocking=True - ) - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY - # code check means the call to total_connect never happens - assert mock_request.call_count == 1 - - # runtime user entered code that is in config - DATA_WITH_CODE["code"] = USERCODES[LOCATION_ID] - await hass.services.async_call( - ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA_WITH_CODE, blocking=True - ) - await hass.async_block_till_done() - assert mock_request.call_count == 2 - - freezer.tick(DELAY) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED - - -async def test_arm_night_success( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: +async def test_arm_night_success(hass: HomeAssistant) -> None: """Test arm night method success.""" responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_NIGHT] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -430,11 +384,10 @@ async def test_arm_night_success( ) assert mock_request.call_count == 2 - freezer.tick(DELAY) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + DELAY) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_NIGHT + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_NIGHT async def test_arm_night_failure(hass: HomeAssistant) -> None: @@ -444,7 +397,7 @@ async def test_arm_night_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -452,8 +405,8 @@ async def test_arm_night_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_ARM_NIGHT, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Failed to arm night test" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert f"{err.value}" == "TotalConnect failed to arm night test." + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -462,21 +415,21 @@ async def test_arm_night_failure(hass: HomeAssistant) -> None: ALARM_DOMAIN, SERVICE_ALARM_ARM_NIGHT, DATA, blocking=True ) await hass.async_block_till_done() - assert f"{err.value}" == "Usercode is invalid, did not arm night" - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert f"{err.value}" == "TotalConnect usercode is invalid. Did not arm night" + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 -async def test_arming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> None: +async def test_arming(hass: HomeAssistant) -> None: """Test arming.""" responses = [RESPONSE_DISARMED, RESPONSE_SUCCESS, RESPONSE_ARMING] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -484,21 +437,20 @@ async def test_arming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> No ) assert mock_request.call_count == 2 - freezer.tick(DELAY) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + DELAY) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMING + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMING -async def test_disarming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> None: +async def test_disarming(hass: HomeAssistant) -> None: """Test disarming.""" responses = [RESPONSE_ARMED_AWAY, RESPONSE_SUCCESS, RESPONSE_DISARMING] await setup_platform(hass, ALARM_DOMAIN) with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY assert mock_request.call_count == 1 await hass.services.async_call( @@ -506,11 +458,10 @@ async def test_disarming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> ) assert mock_request.call_count == 2 - freezer.tick(DELAY) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + DELAY) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMING + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMING async def test_triggered_fire(hass: HomeAssistant) -> None: @@ -521,7 +472,7 @@ async def test_triggered_fire(hass: HomeAssistant) -> None: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_TRIGGERED assert state.attributes.get("triggered_source") == "Fire/Smoke" assert mock_request.call_count == 1 @@ -534,7 +485,7 @@ async def test_triggered_police(hass: HomeAssistant) -> None: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_TRIGGERED assert state.attributes.get("triggered_source") == "Police/Medical" assert mock_request.call_count == 1 @@ -547,7 +498,7 @@ async def test_triggered_carbon_monoxide(hass: HomeAssistant) -> None: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) - assert state.state == AlarmControlPanelState.TRIGGERED + assert state.state == STATE_ALARM_TRIGGERED assert state.attributes.get("triggered_source") == "Carbon Monoxide" assert mock_request.call_count == 1 @@ -559,10 +510,7 @@ async def test_armed_custom(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert ( - hass.states.get(ENTITY_ID).state - == AlarmControlPanelState.ARMED_CUSTOM_BYPASS - ) + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_CUSTOM_BYPASS assert mock_request.call_count == 1 @@ -577,9 +525,7 @@ async def test_unknown(hass: HomeAssistant) -> None: assert mock_request.call_count == 1 -async def test_other_update_failures( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: +async def test_other_update_failures(hass: HomeAssistant) -> None: """Test other failures seen during updates.""" responses = [ RESPONSE_DISARMED, @@ -594,40 +540,35 @@ async def test_other_update_failures( # first things work as planned await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 1 # then an error: ServiceUnavailable --> UpdateFailed - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE assert mock_request.call_count == 2 # works again - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 2) await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 3 # then an error: TotalConnectError --> UpdateFailed - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 3) await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE assert mock_request.call_count == 4 # works again - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 4) await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED assert mock_request.call_count == 5 # unknown TotalConnect status via ValueError - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) + async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 5) await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE assert mock_request.call_count == 6 diff --git a/tests/components/totalconnect/test_config_flow.py b/tests/components/totalconnect/test_config_flow.py index 86419bff817..98de748faea 100644 --- a/tests/components/totalconnect/test_config_flow.py +++ b/tests/components/totalconnect/test_config_flow.py @@ -6,11 +6,10 @@ from total_connect_client.exceptions import AuthenticationError from homeassistant.components.totalconnect.const import ( AUTO_BYPASS, - CODE_REQUIRED, CONF_USERCODES, DOMAIN, ) -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -142,7 +141,9 @@ async def test_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH}, data=entry.data + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -239,11 +240,11 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["step_id"] == "init" result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={AUTO_BYPASS: True, CODE_REQUIRED: False} + result["flow_id"], user_input={AUTO_BYPASS: True} ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert config_entry.options == {AUTO_BYPASS: True, CODE_REQUIRED: False} + assert config_entry.options == {AUTO_BYPASS: True} await hass.async_block_till_done() assert await hass.config_entries.async_unload(config_entry.entry_id) diff --git a/tests/components/touchline_sl/__init__.py b/tests/components/touchline_sl/__init__.py deleted file mode 100644 index c22e9d329db..00000000000 --- a/tests/components/touchline_sl/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Roth Touchline SL integration.""" diff --git a/tests/components/touchline_sl/conftest.py b/tests/components/touchline_sl/conftest.py deleted file mode 100644 index 4edeb048f5b..00000000000 --- a/tests/components/touchline_sl/conftest.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Common fixtures for the Roth Touchline SL tests.""" - -from collections.abc import Generator -from typing import NamedTuple -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.touchline_sl.const import DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME - -from tests.common import MockConfigEntry - - -class FakeModule(NamedTuple): - """Fake Module used for unit testing only.""" - - name: str - id: str - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.touchline_sl.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_touchlinesl_client() -> Generator[AsyncMock]: - """Mock a pytouchlinesl client.""" - with ( - patch( - "homeassistant.components.touchline_sl.TouchlineSL", - autospec=True, - ) as mock_client, - patch( - "homeassistant.components.touchline_sl.config_flow.TouchlineSL", - new=mock_client, - ), - ): - client = mock_client.return_value - client.user_id.return_value = 12345 - client.modules.return_value = [FakeModule(name="Foobar", id="deadbeef")] - yield client - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="TouchlineSL", - data={ - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - unique_id="12345", - ) diff --git a/tests/components/touchline_sl/test_config_flow.py b/tests/components/touchline_sl/test_config_flow.py deleted file mode 100644 index 992fa2bdb3e..00000000000 --- a/tests/components/touchline_sl/test_config_flow.py +++ /dev/null @@ -1,113 +0,0 @@ -"""Test the Roth Touchline SL config flow.""" - -from unittest.mock import AsyncMock - -import pytest -from pytouchlinesl.client import RothAPIError - -from homeassistant.components.touchline_sl.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - -RESULT_UNIQUE_ID = "12345" - -CONFIG_DATA = { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", -} - - -async def test_config_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_touchlinesl_client: AsyncMock -) -> None: - """Test the happy path where the provided username/password result in a new entry.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], CONFIG_DATA - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "test-username" - assert result["data"] == CONFIG_DATA - assert result["result"].unique_id == RESULT_UNIQUE_ID - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("exception", "error_base"), - [ - (RothAPIError(status=401), "invalid_auth"), - (RothAPIError(status=502), "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_config_flow_failure_api_exceptions( - hass: HomeAssistant, - exception: Exception, - error_base: str, - mock_setup_entry: AsyncMock, - mock_touchlinesl_client: AsyncMock, -) -> None: - """Test for invalid credentials or API connection errors, and that the form can recover.""" - mock_touchlinesl_client.user_id.side_effect = exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], CONFIG_DATA - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error_base} - - # "Fix" the problem, and try again. - mock_touchlinesl_client.user_id.side_effect = None - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], CONFIG_DATA - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "test-username" - assert result["data"] == CONFIG_DATA - assert result["result"].unique_id == RESULT_UNIQUE_ID - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_config_flow_failure_adding_non_unique_account( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_touchlinesl_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that the config flow fails when user tries to add duplicate accounts.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], CONFIG_DATA - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index 75eab8eeb73..d12858017cc 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -18,18 +18,14 @@ from kasa import ( ) from kasa.interfaces import Fan, Light, LightEffect, LightState from kasa.protocol import BaseProtocol -from kasa.smart.modules.alarm import Alarm from syrupy import SnapshotAssertion -from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN from homeassistant.components.tplink import ( - CONF_AES_KEYS, CONF_ALIAS, - CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, + CONF_DEVICE_CONFIG, CONF_HOST, CONF_MODEL, - CONF_USES_HTTP, Credentials, ) from homeassistant.components.tplink.const import DOMAIN @@ -43,7 +39,7 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_json_value_fixture -ColorTempRange = namedtuple("ColorTempRange", ["min", "max"]) # noqa: PYI024 +ColorTempRange = namedtuple("ColorTempRange", ["min", "max"]) MODULE = "homeassistant.components.tplink" MODULE_CONFIG_FLOW = "homeassistant.components.tplink.config_flow" @@ -58,61 +54,54 @@ DHCP_FORMATTED_MAC_ADDRESS = MAC_ADDRESS.replace(":", "") MAC_ADDRESS2 = "11:22:33:44:55:66" DEFAULT_ENTRY_TITLE = f"{ALIAS} {MODEL}" CREDENTIALS_HASH_LEGACY = "" -CONN_PARAMS_LEGACY = DeviceConnectionParameters( - DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Xor -) DEVICE_CONFIG_LEGACY = DeviceConfig(IP_ADDRESS) DEVICE_CONFIG_DICT_LEGACY = DEVICE_CONFIG_LEGACY.to_dict(exclude_credentials=True) CREDENTIALS = Credentials("foo", "bar") -CREDENTIALS_HASH_AES = "AES/abcdefghijklmnopqrstuvabcdefghijklmnopqrstuv==" -CREDENTIALS_HASH_KLAP = "KLAP/abcdefghijklmnopqrstuv==" -CONN_PARAMS_KLAP = DeviceConnectionParameters( - DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Klap -) -DEVICE_CONFIG_KLAP = DeviceConfig( +CREDENTIALS_HASH_AUTH = "abcdefghijklmnopqrstuv==" +DEVICE_CONFIG_AUTH = DeviceConfig( IP_ADDRESS, credentials=CREDENTIALS, - connection_type=CONN_PARAMS_KLAP, + connection_type=DeviceConnectionParameters( + DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Klap + ), uses_http=True, ) -CONN_PARAMS_AES = DeviceConnectionParameters( - DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes -) -AES_KEYS = {"private": "foo", "public": "bar"} -DEVICE_CONFIG_AES = DeviceConfig( +DEVICE_CONFIG_AUTH2 = DeviceConfig( IP_ADDRESS2, credentials=CREDENTIALS, - connection_type=CONN_PARAMS_AES, + connection_type=DeviceConnectionParameters( + DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Klap + ), uses_http=True, - aes_keys=AES_KEYS, ) -DEVICE_CONFIG_DICT_KLAP = DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True) -DEVICE_CONFIG_DICT_AES = DEVICE_CONFIG_AES.to_dict(exclude_credentials=True) +DEVICE_CONFIG_DICT_AUTH = DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True) +DEVICE_CONFIG_DICT_AUTH2 = DEVICE_CONFIG_AUTH2.to_dict(exclude_credentials=True) + CREATE_ENTRY_DATA_LEGACY = { CONF_HOST: IP_ADDRESS, CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, - CONF_CONNECTION_PARAMETERS: CONN_PARAMS_LEGACY.to_dict(), - CONF_USES_HTTP: False, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY, } -CREATE_ENTRY_DATA_KLAP = { +CREATE_ENTRY_DATA_AUTH = { CONF_HOST: IP_ADDRESS, CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, - CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_KLAP, - CONF_CONNECTION_PARAMETERS: CONN_PARAMS_KLAP.to_dict(), - CONF_USES_HTTP: True, + CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, } -CREATE_ENTRY_DATA_AES = { +CREATE_ENTRY_DATA_AUTH2 = { CONF_HOST: IP_ADDRESS2, CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, - CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AES, - CONF_CONNECTION_PARAMETERS: CONN_PARAMS_AES.to_dict(), - CONF_USES_HTTP: True, - CONF_AES_KEYS: AES_KEYS, + CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH2, } +NEW_CONNECTION_TYPE = DeviceConnectionParameters( + DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Aes +) +NEW_CONNECTION_TYPE_DICT = NEW_CONNECTION_TYPE.to_dict() def _load_feature_fixtures(): @@ -168,18 +157,12 @@ async def snapshot_platform( ), "Please limit the loaded platforms to 1 platform." translations = await async_get_translations(hass, "en", "entity", [DOMAIN]) - unique_device_classes = [] for entity_entry in entity_entries: if entity_entry.translation_key: key = f"component.{DOMAIN}.entity.{entity_entry.domain}.{entity_entry.translation_key}.name" - single_device_class_translation = False - if key not in translations and entity_entry.original_device_class: - if entity_entry.original_device_class not in unique_device_classes: - single_device_class_translation = True - unique_device_classes.append(entity_entry.original_device_class) assert ( - (key in translations) or single_device_class_translation - ), f"No translation or non unique device_class for entity {entity_entry.unique_id}, expected {key}" + key in translations + ), f"No translation for entity {entity_entry.unique_id}, expected {key}" assert entity_entry == snapshot( name=f"{entity_entry.entity_id}-entry" ), f"entity entry snapshot failed for {entity_entry.entity_id}" @@ -191,21 +174,6 @@ async def snapshot_platform( ), f"state snapshot failed for {entity_entry.entity_id}" -async def setup_automation(hass: HomeAssistant, alias: str, entity_id: str) -> None: - """Set up an automation for tests.""" - assert await async_setup_component( - hass, - AUTOMATION_DOMAIN, - { - AUTOMATION_DOMAIN: { - "alias": alias, - "trigger": {"platform": "state", "entity_id": entity_id, "to": "on"}, - "action": {"action": "notify.notify", "metadata": {}, "data": {}}, - } - }, - ) - - def _mock_protocol() -> BaseProtocol: protocol = MagicMock(spec=BaseProtocol) protocol.close = AsyncMock() @@ -219,7 +187,7 @@ def _mocked_device( device_id=DEVICE_ID, alias=ALIAS, model=MODEL, - ip_address: str | None = None, + ip_address=IP_ADDRESS, modules: list[str] | None = None, children: list[Device] | None = None, features: list[str | Feature] | None = None, @@ -234,21 +202,15 @@ def _mocked_device( device.mac = mac device.alias = alias device.model = model + device.host = ip_address device.device_id = device_id device.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} device.modules = {} device.features = {} - if not ip_address: - ip_address = IP_ADDRESS - else: - device_config.host = ip_address - device.host = ip_address - if modules: device.modules = { - module_name: MODULE_TO_MOCK_GEN[module_name](device) - for module_name in modules + module_name: MODULE_TO_MOCK_GEN[module_name]() for module_name in modules } if features: @@ -336,7 +298,7 @@ def _mocked_feature( return feature -def _mocked_light_module(device) -> Light: +def _mocked_light_module() -> Light: light = MagicMock(spec=Light, name="Mocked light module") light.update = AsyncMock() light.brightness = 50 @@ -352,73 +314,32 @@ def _mocked_light_module(device) -> Light: light.hsv = (10, 30, 5) light.valid_temperature_range = ColorTempRange(min=4000, max=9000) light.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} - - async def _set_state(state, *_, **__): - light.state = state - - light.set_state = AsyncMock(wraps=_set_state) - - async def _set_brightness(brightness, *_, **__): - light.state.brightness = brightness - light.state.light_on = brightness > 0 - - light.set_brightness = AsyncMock(wraps=_set_brightness) - - async def _set_hsv(h, s, v, *_, **__): - light.state.hue = h - light.state.saturation = s - light.state.brightness = v - light.state.light_on = True - - light.set_hsv = AsyncMock(wraps=_set_hsv) - - async def _set_color_temp(temp, *_, **__): - light.state.color_temp = temp - light.state.light_on = True - - light.set_color_temp = AsyncMock(wraps=_set_color_temp) + light.set_state = AsyncMock() + light.set_brightness = AsyncMock() + light.set_hsv = AsyncMock() + light.set_color_temp = AsyncMock() light.protocol = _mock_protocol() return light -def _mocked_light_effect_module(device) -> LightEffect: +def _mocked_light_effect_module() -> LightEffect: effect = MagicMock(spec=LightEffect, name="Mocked light effect") effect.has_effects = True effect.has_custom_effects = True effect.effect = "Effect1" effect.effect_list = ["Off", "Effect1", "Effect2"] - - async def _set_effect(effect_name, *_, **__): - assert ( - effect_name in effect.effect_list - ), f"set_effect '{effect_name}' not in {effect.effect_list}" - assert device.modules[ - Module.Light - ], "Need a light module to test set_effect method" - device.modules[Module.Light].state.light_on = True - effect.effect = effect_name - - effect.set_effect = AsyncMock(wraps=_set_effect) + effect.set_effect = AsyncMock() effect.set_custom_effect = AsyncMock() return effect -def _mocked_fan_module(effect) -> Fan: +def _mocked_fan_module() -> Fan: fan = MagicMock(auto_spec=Fan, name="Mocked fan") fan.fan_speed_level = 0 fan.set_fan_speed_level = AsyncMock() return fan -def _mocked_alarm_module(device): - alarm = MagicMock(auto_spec=Alarm, name="Mocked alarm") - alarm.active = False - alarm.play = AsyncMock() - alarm.stop = AsyncMock() - - return alarm - - def _mocked_strip_children(features=None, alias=None) -> list[Device]: plug0 = _mocked_device( alias="Plug0" if alias is None else alias, @@ -485,15 +406,14 @@ MODULE_TO_MOCK_GEN = { Module.Light: _mocked_light_module, Module.LightEffect: _mocked_light_effect_module, Module.Fan: _mocked_fan_module, - Module.Alarm: _mocked_alarm_module, } -def _patch_discovery(device=None, no_device=False, ip_address=IP_ADDRESS): +def _patch_discovery(device=None, no_device=False): async def _discovery(*args, **kwargs): if no_device: return {} - return {ip_address: device if device else _mocked_device()} + return {IP_ADDRESS: _mocked_device()} return patch("homeassistant.components.tplink.Discover.discover", new=_discovery) diff --git a/tests/components/tplink/conftest.py b/tests/components/tplink/conftest.py index 25a4bd20270..f8d933de71e 100644 --- a/tests/components/tplink/conftest.py +++ b/tests/components/tplink/conftest.py @@ -1,20 +1,18 @@ """tplink conftest.""" -from collections.abc import Generator +import copy from unittest.mock import DEFAULT, AsyncMock, patch -from kasa import DeviceConfig import pytest +from typing_extensions import Generator from homeassistant.components.tplink import DOMAIN from homeassistant.core import HomeAssistant from . import ( CREATE_ENTRY_DATA_LEGACY, - CREDENTIALS_HASH_AES, - CREDENTIALS_HASH_KLAP, - DEVICE_CONFIG_AES, - DEVICE_CONFIG_KLAP, + CREDENTIALS_HASH_AUTH, + DEVICE_CONFIG_AUTH, IP_ADDRESS, IP_ADDRESS2, MAC_ADDRESS, @@ -22,7 +20,7 @@ from . import ( _mocked_device, ) -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, mock_device_registry, mock_registry @pytest.fixture @@ -32,23 +30,21 @@ def mock_discovery(): "homeassistant.components.tplink.Discover", discover=DEFAULT, discover_single=DEFAULT, - try_connect_all=DEFAULT, ) as mock_discovery: device = _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), - credentials_hash=CREDENTIALS_HASH_KLAP, - alias="My Bulb", + device_config=copy.deepcopy(DEVICE_CONFIG_AUTH), + credentials_hash=CREDENTIALS_HASH_AUTH, + alias=None, ) devices = { "127.0.0.1": _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), - credentials_hash=CREDENTIALS_HASH_KLAP, + device_config=copy.deepcopy(DEVICE_CONFIG_AUTH), + credentials_hash=CREDENTIALS_HASH_AUTH, alias=None, ) } mock_discovery["discover"].return_value = devices mock_discovery["discover_single"].return_value = device - mock_discovery["try_connect_all"].return_value = device mock_discovery["mock_device"] = device yield mock_discovery @@ -59,15 +55,12 @@ def mock_connect(): with patch("homeassistant.components.tplink.Device.connect") as mock_connect: devices = { IP_ADDRESS: _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), - credentials_hash=CREDENTIALS_HASH_KLAP, - ip_address=IP_ADDRESS, + device_config=DEVICE_CONFIG_AUTH, credentials_hash=CREDENTIALS_HASH_AUTH ), IP_ADDRESS2: _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_AES.to_dict()), - credentials_hash=CREDENTIALS_HASH_AES, + device_config=DEVICE_CONFIG_AUTH, + credentials_hash=CREDENTIALS_HASH_AUTH, mac=MAC_ADDRESS2, - ip_address=IP_ADDRESS2, ), } @@ -79,6 +72,18 @@ def mock_connect(): yield {"connect": mock_connect, "mock_devices": devices} +@pytest.fixture(name="device_reg") +def device_reg_fixture(hass): + """Return an empty, loaded, registry.""" + return mock_device_registry(hass) + + +@pytest.fixture(name="entity_reg") +def entity_reg_fixture(hass): + """Return an empty, loaded, registry.""" + return mock_registry(hass) + + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index f60132fd2c2..daf86a74643 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -34,16 +34,6 @@ "type": "Switch", "category": "Config" }, - "child_lock": { - "value": true, - "type": "Switch", - "category": "Config" - }, - "pir_enabled": { - "value": true, - "type": "Switch", - "category": "Config" - }, "current_consumption": { "value": 5.23, "type": "Sensor", @@ -83,7 +73,7 @@ "value": 121.1, "type": "Sensor", "category": "Primary", - "unit": "V", + "unit": "v", "precision_hint": 1 }, "device_id": { @@ -160,11 +150,6 @@ "type": "Sensor", "category": "Debug" }, - "check_latest_firmware": { - "value": "", - "type": "Action", - "category": "Info" - }, "thermostat_mode": { "value": "off", "type": "Sensor", @@ -210,21 +195,6 @@ "type": "BinarySensor", "category": "Primary" }, - "motion_detected": { - "value": false, - "type": "BinarySensor", - "category": "Primary" - }, - "alarm": { - "value": false, - "type": "BinarySensor", - "category": "Info" - }, - "reboot": { - "value": "", - "type": "Action", - "category": "Debug" - }, "test_alarm": { "value": "", "type": "Action", @@ -313,10 +283,5 @@ "type": "Choice", "category": "Config", "choices": ["low", "normal", "high"] - }, - "water_alert_timestamp": { - "type": "Sensor", - "category": "Info", - "value": "2024-06-24 10:03:11.046643+01:00" } } diff --git a/tests/components/tplink/snapshots/test_binary_sensor.ambr b/tests/components/tplink/snapshots/test_binary_sensor.ambr index 4a1cfe5b411..b45494d1001 100644 --- a/tests/components/tplink/snapshots/test_binary_sensor.ambr +++ b/tests/components/tplink/snapshots/test_binary_sensor.ambr @@ -206,53 +206,6 @@ 'state': 'off', }) # --- -# name: test_states[binary_sensor.my_device_motion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.my_device_motion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion', - 'platform': 'tplink', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'motion_detected', - 'unique_id': '123456789ABCDEFGH_motion_detected', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[binary_sensor.my_device_motion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'my_device Motion', - }), - 'context': , - 'entity_id': 'binary_sensor.my_device_motion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_states[binary_sensor.my_device_overheated-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -333,6 +286,53 @@ 'unit_of_measurement': None, }) # --- +# name: test_states[binary_sensor.my_device_update-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_device_update', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Update', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'update_available', + 'unique_id': '123456789ABCDEFGH_update_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.my_device_update-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'update', + 'friendly_name': 'my_device Update', + }), + 'context': , + 'entity_id': 'binary_sensor.my_device_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_states[my_device-entry] DeviceRegistryEntrySnapshot({ 'area_id': None, @@ -359,7 +359,6 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', - 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_button.ambr b/tests/components/tplink/snapshots/test_button.ambr index bb75f4642e1..0167256877d 100644 --- a/tests/components/tplink/snapshots/test_button.ambr +++ b/tests/components/tplink/snapshots/test_button.ambr @@ -1,37 +1,4 @@ # serializer version: 1 -# name: test_states[button.my_device_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.my_device_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'tplink', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reboot', - 'unique_id': '123456789ABCDEFGH_reboot', - 'unit_of_measurement': None, - }) -# --- # name: test_states[button.my_device_stop_alarm-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -150,7 +117,6 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', - 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_climate.ambr b/tests/components/tplink/snapshots/test_climate.ambr index 8236f332046..4bdfe52b9b1 100644 --- a/tests/components/tplink/snapshots/test_climate.ambr +++ b/tests/components/tplink/snapshots/test_climate.ambr @@ -42,7 +42,7 @@ # name: test_states[climate.thermostat-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'current_temperature': 20.2, + 'current_temperature': 20, 'friendly_name': 'thermostat', 'hvac_action': , 'hvac_modes': list([ @@ -52,7 +52,7 @@ 'max_temp': 65536, 'min_temp': None, 'supported_features': , - 'temperature': 22.2, + 'temperature': 22, }), 'context': , 'entity_id': 'climate.thermostat', @@ -84,7 +84,6 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', - 'model_id': None, 'name': 'thermostat', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_fan.ambr b/tests/components/tplink/snapshots/test_fan.ambr index 1a7392dc63a..0a51909affe 100644 --- a/tests/components/tplink/snapshots/test_fan.ambr +++ b/tests/components/tplink/snapshots/test_fan.ambr @@ -28,7 +28,7 @@ 'original_name': None, 'platform': 'tplink', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '123456789ABCDEFGH', 'unit_of_measurement': None, @@ -42,7 +42,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.my_device', @@ -81,7 +81,7 @@ 'original_name': 'my_fan_0', 'platform': 'tplink', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '123456789ABCDEFGH00', 'unit_of_measurement': None, @@ -95,7 +95,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.my_device_my_fan_0', @@ -134,7 +134,7 @@ 'original_name': 'my_fan_1', 'platform': 'tplink', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '123456789ABCDEFGH01', 'unit_of_measurement': None, @@ -148,7 +148,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.my_device_my_fan_1', @@ -184,7 +184,6 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', - 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_number.ambr b/tests/components/tplink/snapshots/test_number.ambr index 977d2098fb9..8cda0a728b3 100644 --- a/tests/components/tplink/snapshots/test_number.ambr +++ b/tests/components/tplink/snapshots/test_number.ambr @@ -25,7 +25,6 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', - 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , @@ -43,7 +42,7 @@ 'capabilities': dict({ 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'config_entry_id': , @@ -79,7 +78,7 @@ 'friendly_name': 'my_device Smooth off', 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'context': , @@ -98,7 +97,7 @@ 'capabilities': dict({ 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'config_entry_id': , @@ -134,7 +133,7 @@ 'friendly_name': 'my_device Smooth on', 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'context': , @@ -153,7 +152,7 @@ 'capabilities': dict({ 'max': 65536, 'min': -10, - 'mode': , + 'mode': , 'step': 1.0, }), 'config_entry_id': , @@ -189,7 +188,7 @@ 'friendly_name': 'my_device Temperature offset', 'max': 65536, 'min': -10, - 'mode': , + 'mode': , 'step': 1.0, }), 'context': , @@ -208,7 +207,7 @@ 'capabilities': dict({ 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'config_entry_id': , @@ -244,7 +243,7 @@ 'friendly_name': 'my_device Turn off in', 'max': 65536, 'min': 0, - 'mode': , + 'mode': , 'step': 1.0, }), 'context': , diff --git a/tests/components/tplink/snapshots/test_select.ambr b/tests/components/tplink/snapshots/test_select.ambr index c851979f34c..555b0eb74d1 100644 --- a/tests/components/tplink/snapshots/test_select.ambr +++ b/tests/components/tplink/snapshots/test_select.ambr @@ -25,7 +25,6 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', - 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_sensor.ambr b/tests/components/tplink/snapshots/test_sensor.ambr index 739f02e51f0..46fe897500f 100644 --- a/tests/components/tplink/snapshots/test_sensor.ambr +++ b/tests/components/tplink/snapshots/test_sensor.ambr @@ -25,7 +25,6 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', - 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , @@ -358,53 +357,6 @@ 'state': '12', }) # --- -# name: test_states[sensor.my_device_last_water_leak_alert-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.my_device_last_water_leak_alert', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last water leak alert', - 'platform': 'tplink', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_alert_timestamp', - 'unique_id': '123456789ABCDEFGH_water_alert_timestamp', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[sensor.my_device_last_water_leak_alert-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'my_device Last water leak alert', - }), - 'context': , - 'entity_id': 'sensor.my_device_last_water_leak_alert', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-06-24T09:03:11+00:00', - }) -# --- # name: test_states[sensor.my_device_on_since-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -593,9 +545,7 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -820,7 +770,7 @@ 'supported_features': 0, 'translation_key': 'voltage', 'unique_id': '123456789ABCDEFGH_voltage', - 'unit_of_measurement': 'V', + 'unit_of_measurement': 'v', }) # --- # name: test_states[sensor.my_device_voltage-state] @@ -829,7 +779,7 @@ 'device_class': 'voltage', 'friendly_name': 'my_device Voltage', 'state_class': , - 'unit_of_measurement': 'V', + 'unit_of_measurement': 'v', }), 'context': , 'entity_id': 'sensor.my_device_voltage', diff --git a/tests/components/tplink/snapshots/test_siren.ambr b/tests/components/tplink/snapshots/test_siren.ambr deleted file mode 100644 index b144288bd1c..00000000000 --- a/tests/components/tplink/snapshots/test_siren.ambr +++ /dev/null @@ -1,84 +0,0 @@ -# serializer version: 1 -# name: test_states[hub-entry] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '1.0.0', - 'id': , - 'identifiers': set({ - tuple( - 'tplink', - '123456789ABCDEFGH', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'TP-Link', - 'model': 'HS100', - 'model_id': None, - 'name': 'hub', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': '1.0.0', - 'via_device_id': None, - }) -# --- -# name: test_states[siren.hub-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'siren', - 'entity_category': None, - 'entity_id': 'siren.hub', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'tplink', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '123456789ABCDEFGH', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[siren.hub-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'hub', - 'supported_features': , - }), - 'context': , - 'entity_id': 'siren.hub', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/tplink/snapshots/test_switch.ambr b/tests/components/tplink/snapshots/test_switch.ambr index 36c630474c8..65eead6ddf4 100644 --- a/tests/components/tplink/snapshots/test_switch.ambr +++ b/tests/components/tplink/snapshots/test_switch.ambr @@ -25,7 +25,6 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', - 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , @@ -173,52 +172,6 @@ 'state': 'on', }) # --- -# name: test_states[switch.my_device_child_lock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.my_device_child_lock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Child lock', - 'platform': 'tplink', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'child_lock', - 'unique_id': '123456789ABCDEFGH_child_lock', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[switch.my_device_child_lock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my_device Child lock', - }), - 'context': , - 'entity_id': 'switch.my_device_child_lock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_states[switch.my_device_fan_sleep_mode-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -311,52 +264,6 @@ 'state': 'on', }) # --- -# name: test_states[switch.my_device_motion_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.my_device_motion_sensor', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Motion sensor', - 'platform': 'tplink', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'pir_enabled', - 'unique_id': '123456789ABCDEFGH_pir_enabled', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[switch.my_device_motion_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'my_device Motion sensor', - }), - 'context': , - 'entity_id': 'switch.my_device_motion_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_states[switch.my_device_smooth_transitions-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tplink/test_button.py b/tests/components/tplink/test_button.py index a3eb8950336..143a882a6cb 100644 --- a/tests/components/tplink/test_button.py +++ b/tests/components/tplink/test_button.py @@ -11,11 +11,7 @@ from homeassistant.components.tplink.const import DOMAIN from homeassistant.components.tplink.entity import EXCLUDED_FEATURES from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import ( - device_registry as dr, - entity_registry as er, - issue_registry as ir, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from . import ( @@ -26,7 +22,6 @@ from . import ( _mocked_strip_children, _patch_connect, _patch_discovery, - setup_automation, setup_platform_for_device, snapshot_platform, ) @@ -34,53 +29,6 @@ from . import ( from tests.common import MockConfigEntry -@pytest.fixture -def create_deprecated_button_entities( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -): - """Create the entity so it is not ignored by the deprecation check.""" - mock_config_entry.add_to_hass(hass) - - def create_entry(device_name, device_id, key): - unique_id = f"{device_id}_{key}" - - entity_registry.async_get_or_create( - domain=BUTTON_DOMAIN, - platform=DOMAIN, - unique_id=unique_id, - suggested_object_id=f"{device_name}_{key}", - config_entry=mock_config_entry, - ) - - create_entry("my_device", "123456789ABCDEFGH", "stop_alarm") - create_entry("my_device", "123456789ABCDEFGH", "test_alarm") - - -@pytest.fixture -def create_deprecated_child_button_entities( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -): - """Create the entity so it is not ignored by the deprecation check.""" - - def create_entry(device_name, key): - for plug_id in range(2): - unique_id = f"PLUG{plug_id}DEVICEID_{key}" - entity_registry.async_get_or_create( - domain=BUTTON_DOMAIN, - platform=DOMAIN, - unique_id=unique_id, - suggested_object_id=f"my_device_plug{plug_id}_{key}", - config_entry=mock_config_entry, - ) - - create_entry("my_device", "stop_alarm") - create_entry("my_device", "test_alarm") - - @pytest.fixture def mocked_feature_button() -> Feature: """Return mocked tplink binary sensor feature.""" @@ -99,7 +47,6 @@ async def test_states( entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion, - create_deprecated_button_entities, ) -> None: """Test a sensor unique ids.""" features = {description.key for description in BUTTON_DESCRIPTIONS} @@ -119,17 +66,21 @@ async def test_button( hass: HomeAssistant, entity_registry: er.EntityRegistry, mocked_feature_button: Feature, - create_deprecated_button_entities, ) -> None: """Test a sensor unique ids.""" mocked_feature = mocked_feature_button - plug = _mocked_device(alias="my_device", features=[mocked_feature]) + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + + plug = _mocked_device(alias="my_plug", features=[mocked_feature]) with _patch_discovery(device=plug), _patch_connect(device=plug): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() # The entity_id is based on standard name from core. - entity_id = "button.my_device_test_alarm" + entity_id = "button.my_plug_test_alarm" entity = entity_registry.async_get(entity_id) assert entity assert entity.unique_id == f"{DEVICE_ID}_{mocked_feature.id}" @@ -140,13 +91,15 @@ async def test_button_children( entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, mocked_feature_button: Feature, - create_deprecated_button_entities, - create_deprecated_child_button_entities, ) -> None: """Test a sensor unique ids.""" mocked_feature = mocked_feature_button + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) plug = _mocked_device( - alias="my_device", + alias="my_plug", features=[mocked_feature], children=_mocked_strip_children(features=[mocked_feature]), ) @@ -154,13 +107,13 @@ async def test_button_children( await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "button.my_device_test_alarm" + entity_id = "button.my_plug_test_alarm" entity = entity_registry.async_get(entity_id) assert entity device = device_registry.async_get(entity.device_id) for plug_id in range(2): - child_entity_id = f"button.my_device_plug{plug_id}_test_alarm" + child_entity_id = f"button.my_plug_plug{plug_id}_test_alarm" child_entity = entity_registry.async_get(child_entity_id) assert child_entity assert child_entity.unique_id == f"PLUG{plug_id}DEVICEID_{mocked_feature.id}" @@ -174,16 +127,19 @@ async def test_button_press( hass: HomeAssistant, entity_registry: er.EntityRegistry, mocked_feature_button: Feature, - create_deprecated_button_entities, ) -> None: """Test a number entity limits and setting values.""" mocked_feature = mocked_feature_button - plug = _mocked_device(alias="my_device", features=[mocked_feature]) + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + plug = _mocked_device(alias="my_plug", features=[mocked_feature]) with _patch_discovery(device=plug), _patch_connect(device=plug): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "button.my_device_test_alarm" + entity_id = "button.my_plug_test_alarm" entity = entity_registry.async_get(entity_id) assert entity assert entity.unique_id == f"{DEVICE_ID}_test_alarm" @@ -195,84 +151,3 @@ async def test_button_press( blocking=True, ) mocked_feature.set_value.assert_called_with(True) - - -async def test_button_not_exists_with_deprecation( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mocked_feature_button: Feature, -) -> None: - """Test deprecated buttons are not created if they don't previously exist.""" - config_entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS - ) - config_entry.add_to_hass(hass) - entity_id = "button.my_device_test_alarm" - - assert not hass.states.get(entity_id) - mocked_feature = mocked_feature_button - dev = _mocked_device(alias="my_device", features=[mocked_feature]) - with _patch_discovery(device=dev), _patch_connect(device=dev): - await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) - await hass.async_block_till_done() - - assert not entity_registry.async_get(entity_id) - assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) - assert not hass.states.get(entity_id) - - -@pytest.mark.parametrize( - ("entity_disabled", "entity_has_automations"), - [ - pytest.param(False, False, id="without-automations"), - pytest.param(False, True, id="with-automations"), - pytest.param(True, False, id="disabled"), - ], -) -async def test_button_exists_with_deprecation( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, - mocked_feature_button: Feature, - entity_disabled: bool, - entity_has_automations: bool, -) -> None: - """Test the deprecated buttons are deleted or raise issues.""" - config_entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS - ) - config_entry.add_to_hass(hass) - - object_id = "my_device_test_alarm" - entity_id = f"button.{object_id}" - unique_id = f"{DEVICE_ID}_test_alarm" - issue_id = f"deprecated_entity_{entity_id}_automation.test_automation" - - if entity_has_automations: - await setup_automation(hass, "test_automation", entity_id) - - entity = entity_registry.async_get_or_create( - domain=BUTTON_DOMAIN, - platform=DOMAIN, - unique_id=unique_id, - suggested_object_id=object_id, - config_entry=config_entry, - disabled_by=er.RegistryEntryDisabler.USER if entity_disabled else None, - ) - assert entity.entity_id == entity_id - assert not hass.states.get(entity_id) - - mocked_feature = mocked_feature_button - dev = _mocked_device(alias="my_device", features=[mocked_feature]) - with _patch_discovery(device=dev), _patch_connect(device=dev): - await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) - await hass.async_block_till_done() - - entity = entity_registry.async_get(entity_id) - # entity and state will be none if removed from registry - assert (entity is None) == entity_disabled - assert (hass.states.get(entity_id) is None) == entity_disabled - - assert ( - issue_registry.async_get_issue(DOMAIN, issue_id) is not None - ) == entity_has_automations diff --git a/tests/components/tplink/test_climate.py b/tests/components/tplink/test_climate.py index 3a54048e1d6..a80a74a5697 100644 --- a/tests/components/tplink/test_climate.py +++ b/tests/components/tplink/test_climate.py @@ -45,11 +45,11 @@ async def mocked_hub(hass: HomeAssistant) -> Device: features = [ _mocked_feature( - "temperature", value=20.2, category=Feature.Category.Primary, unit="celsius" + "temperature", value=20, category=Feature.Category.Primary, unit="celsius" ), _mocked_feature( "target_temperature", - value=22.2, + value=22, type_=Feature.Type.Number, category=Feature.Category.Primary, unit="celsius", @@ -94,8 +94,8 @@ async def test_climate( state = hass.states.get(ENTITY_ID) assert state.attributes[ATTR_HVAC_ACTION] is HVACAction.HEATING - assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.2 - assert state.attributes[ATTR_TEMPERATURE] == 22.2 + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20 + assert state.attributes[ATTR_TEMPERATURE] == 22 async def test_states( @@ -120,13 +120,12 @@ async def test_set_temperature( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mocked_hub: Device ) -> None: """Test that set_temperature service calls the setter.""" - mocked_thermostat = mocked_hub.children[0] - mocked_thermostat.features["target_temperature"].minimum_value = 0 - await setup_platform_for_device( hass, mock_config_entry, Platform.CLIMATE, mocked_hub ) + mocked_thermostat = mocked_hub.children[0] + await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index 2697696c667..e9ae7957520 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -1,8 +1,6 @@ """Test the tplink config flow.""" -from contextlib import contextmanager -import logging -from unittest.mock import ANY, AsyncMock, patch +from unittest.mock import AsyncMock, patch from kasa import TimeoutError import pytest @@ -13,13 +11,11 @@ from homeassistant.components.tplink import ( DOMAIN, AuthenticationError, Credentials, - Device, DeviceConfig, KasaException, ) -from homeassistant.components.tplink.config_flow import TPLinkConfigFlow from homeassistant.components.tplink.const import ( - CONF_CONNECTION_PARAMETERS, + CONF_CONNECTION_TYPE, CONF_CREDENTIALS_HASH, CONF_DEVICE_CONFIG, ) @@ -30,33 +26,26 @@ from homeassistant.const import ( CONF_HOST, CONF_MAC, CONF_PASSWORD, - CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import ( - AES_KEYS, ALIAS, - CONN_PARAMS_AES, - CONN_PARAMS_KLAP, - CONN_PARAMS_LEGACY, - CREATE_ENTRY_DATA_AES, - CREATE_ENTRY_DATA_KLAP, + CREATE_ENTRY_DATA_AUTH, + CREATE_ENTRY_DATA_AUTH2, CREATE_ENTRY_DATA_LEGACY, - CREDENTIALS_HASH_AES, - CREDENTIALS_HASH_KLAP, + CREDENTIALS_HASH_AUTH, DEFAULT_ENTRY_TITLE, - DEVICE_CONFIG_AES, - DEVICE_CONFIG_DICT_KLAP, - DEVICE_CONFIG_KLAP, - DEVICE_CONFIG_LEGACY, + DEVICE_CONFIG_DICT_AUTH, + DEVICE_CONFIG_DICT_LEGACY, DHCP_FORMATTED_MAC_ADDRESS, IP_ADDRESS, MAC_ADDRESS, MAC_ADDRESS2, MODULE, + NEW_CONNECTION_TYPE_DICT, _mocked_device, _patch_connect, _patch_discovery, @@ -66,44 +55,9 @@ from . import ( from tests.common import MockConfigEntry -@contextmanager -def override_side_effect(mock: AsyncMock, effect): - """Temporarily override a mock side effect and replace afterwards.""" - try: - default_side_effect = mock.side_effect - mock.side_effect = effect - yield mock - finally: - mock.side_effect = default_side_effect - - -@pytest.mark.parametrize( - ("device_config", "expected_entry_data", "credentials_hash"), - [ - pytest.param( - DEVICE_CONFIG_KLAP, CREATE_ENTRY_DATA_KLAP, CREDENTIALS_HASH_KLAP, id="KLAP" - ), - pytest.param( - DEVICE_CONFIG_AES, CREATE_ENTRY_DATA_AES, CREDENTIALS_HASH_AES, id="AES" - ), - pytest.param(DEVICE_CONFIG_LEGACY, CREATE_ENTRY_DATA_LEGACY, None, id="Legacy"), - ], -) -async def test_discovery( - hass: HomeAssistant, device_config, expected_entry_data, credentials_hash -) -> None: +async def test_discovery(hass: HomeAssistant) -> None: """Test setting up discovery.""" - ip_address = device_config.host - device = _mocked_device( - device_config=device_config, - credentials_hash=credentials_hash, - ip_address=ip_address, - ) - with ( - _patch_discovery(device, ip_address=ip_address), - _patch_single_discovery(device), - _patch_connect(device), - ): + with _patch_discovery(), _patch_single_discovery(), _patch_connect(): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -133,9 +87,9 @@ async def test_discovery( assert not result2["errors"] with ( - _patch_discovery(device, ip_address=ip_address), - _patch_single_discovery(device), - _patch_connect(device), + _patch_discovery(), + _patch_single_discovery(), + _patch_connect(), patch(f"{MODULE}.async_setup", return_value=True) as mock_setup, patch(f"{MODULE}.async_setup_entry", return_value=True) as mock_setup_entry, ): @@ -147,7 +101,7 @@ async def test_discovery( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == DEFAULT_ENTRY_TITLE - assert result3["data"] == expected_entry_data + assert result3["data"] == CREATE_ENTRY_DATA_LEGACY mock_setup.assert_called_once() mock_setup_entry.assert_called_once() @@ -172,25 +126,24 @@ async def test_discovery_auth( ) -> None: """Test authenticated discovery.""" - mock_device = mock_connect["mock_devices"][IP_ADDRESS] - assert mock_device.config == DEVICE_CONFIG_KLAP + mock_discovery["mock_device"].update.side_effect = AuthenticationError - with override_side_effect(mock_connect["connect"], AuthenticationError): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE: mock_device, - }, - ) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + }, + ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "discovery_auth_confirm" assert not result["errors"] + mock_discovery["mock_device"].update.reset_mock(side_effect=True) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -201,7 +154,7 @@ async def test_discovery_auth( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == DEFAULT_ENTRY_TITLE - assert result2["data"] == CREATE_ENTRY_DATA_KLAP + assert result2["data"] == CREATE_ENTRY_DATA_AUTH assert result2["context"]["unique_id"] == MAC_ADDRESS @@ -215,43 +168,40 @@ async def test_discovery_auth( ) async def test_discovery_auth_errors( hass: HomeAssistant, + mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init, error_type, errors_msg, error_placement, ) -> None: - """Test handling of discovery authentication errors. + """Test handling of discovery authentication errors.""" + mock_discovery["mock_device"].update.side_effect = AuthenticationError + default_connect_side_effect = mock_connect["connect"].side_effect + mock_connect["connect"].side_effect = error_type - Tests for errors received during credential - entry during discovery_auth_confirm. - """ - mock_device = mock_connect["mock_devices"][IP_ADDRESS] - - with override_side_effect(mock_connect["connect"], AuthenticationError): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE: mock_device, - }, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + }, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "discovery_auth_confirm" assert not result["errors"] - with override_side_effect(mock_connect["connect"], error_type): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {error_placement: errors_msg} @@ -259,6 +209,7 @@ async def test_discovery_auth_errors( await hass.async_block_till_done() + mock_connect["connect"].side_effect = default_connect_side_effect result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], { @@ -267,35 +218,35 @@ async def test_discovery_auth_errors( }, ) assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["data"] == CREATE_ENTRY_DATA_KLAP + assert result3["data"] == CREATE_ENTRY_DATA_AUTH assert result3["context"]["unique_id"] == MAC_ADDRESS async def test_discovery_new_credentials( hass: HomeAssistant, + mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init, ) -> None: """Test setting up discovery with new credentials.""" - mock_device = mock_connect["mock_devices"][IP_ADDRESS] + mock_discovery["mock_device"].update.side_effect = AuthenticationError - with override_side_effect(mock_connect["connect"], AuthenticationError): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE: mock_device, - }, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + }, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "discovery_auth_confirm" assert not result["errors"] - assert mock_connect["connect"].call_count == 1 + assert mock_connect["connect"].call_count == 0 with patch( "homeassistant.components.tplink.config_flow.get_credentials", @@ -305,7 +256,7 @@ async def test_discovery_new_credentials( result["flow_id"], ) - assert mock_connect["connect"].call_count == 2 + assert mock_connect["connect"].call_count == 1 assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "discovery_confirm" @@ -316,60 +267,54 @@ async def test_discovery_new_credentials( {}, ) assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["data"] == CREATE_ENTRY_DATA_KLAP + assert result3["data"] == CREATE_ENTRY_DATA_AUTH assert result3["context"]["unique_id"] == MAC_ADDRESS async def test_discovery_new_credentials_invalid( hass: HomeAssistant, + mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init, ) -> None: """Test setting up discovery with new invalid credentials.""" - mock_device = mock_connect["mock_devices"][IP_ADDRESS] + mock_discovery["mock_device"].update.side_effect = AuthenticationError + default_connect_side_effect = mock_connect["connect"].side_effect - with ( - patch("homeassistant.components.tplink.Discover.discover", return_value={}), - patch( - "homeassistant.components.tplink.config_flow.get_credentials", - return_value=None, - ), - override_side_effect(mock_connect["connect"], AuthenticationError), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE: mock_device, - }, - ) - await hass.async_block_till_done() + mock_connect["connect"].side_effect = AuthenticationError + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + }, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "discovery_auth_confirm" assert not result["errors"] - assert mock_connect["connect"].call_count == 1 + assert mock_connect["connect"].call_count == 0 - with ( - patch( - "homeassistant.components.tplink.config_flow.get_credentials", - return_value=Credentials("fake_user", "fake_pass"), - ), - override_side_effect(mock_connect["connect"], AuthenticationError), + with patch( + "homeassistant.components.tplink.config_flow.get_credentials", + return_value=Credentials("fake_user", "fake_pass"), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], ) - assert mock_connect["connect"].call_count == 2 + assert mock_connect["connect"].call_count == 1 assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "discovery_auth_confirm" await hass.async_block_till_done() + mock_connect["connect"].side_effect = default_connect_side_effect result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], { @@ -378,7 +323,7 @@ async def test_discovery_new_credentials_invalid( }, ) assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["data"] == CREATE_ENTRY_DATA_KLAP + assert result3["data"] == CREATE_ENTRY_DATA_AUTH assert result3["context"]["unique_id"] == MAC_ADDRESS @@ -598,7 +543,7 @@ async def test_manual_auth( await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == DEFAULT_ENTRY_TITLE - assert result3["data"] == CREATE_ENTRY_DATA_KLAP + assert result3["data"] == CREATE_ENTRY_DATA_AUTH assert result3["context"]["unique_id"] == MAC_ADDRESS @@ -628,84 +573,18 @@ async def test_manual_auth_errors( assert not result["errors"] mock_discovery["mock_device"].update.side_effect = AuthenticationError - - with override_side_effect(mock_connect["connect"], error_type): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: IP_ADDRESS} - ) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "user_auth_confirm" - assert not result2["errors"] - - await hass.async_block_till_done() - with override_side_effect(mock_connect["connect"], error_type): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "user_auth_confirm" - assert result3["errors"] == {error_placement: errors_msg} - assert result3["description_placeholders"]["error"] == str(error_type) - - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], - { - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) - assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["data"] == CREATE_ENTRY_DATA_KLAP - assert result4["context"]["unique_id"] == MAC_ADDRESS - - await hass.async_block_till_done() - - -@pytest.mark.parametrize( - ("host_str", "host", "port"), - [ - (f"{IP_ADDRESS}:1234", IP_ADDRESS, 1234), - ("[2001:db8:0::1]:4321", "2001:db8:0::1", 4321), - ], -) -async def test_manual_port_override( - hass: HomeAssistant, - mock_connect: AsyncMock, - mock_discovery: AsyncMock, - host_str, - host, - port, -) -> None: - """Test manually setup.""" - mock_discovery["mock_device"].config.port_override = port - mock_discovery["mock_device"].host = host - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert not result["errors"] - - # side_effects to cause auth confirm as the port override usually only - # works with direct connections. - mock_discovery["discover_single"].side_effect = TimeoutError - mock_connect["connect"].side_effect = AuthenticationError + default_connect_side_effect = mock_connect["connect"].side_effect + mock_connect["connect"].side_effect = error_type result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: host_str} + result["flow_id"], user_input={CONF_HOST: IP_ADDRESS} ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "user_auth_confirm" assert not result2["errors"] - creds = Credentials("fake_username", "fake_password") + await hass.async_block_till_done() + result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={ @@ -714,44 +593,25 @@ async def test_manual_port_override( }, ) await hass.async_block_till_done() - mock_discovery["try_connect_all"].assert_called_once_with( - host, credentials=creds, port=port, http_client=ANY - ) - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == DEFAULT_ENTRY_TITLE - assert result3["data"] == { - **CREATE_ENTRY_DATA_KLAP, - CONF_PORT: port, - CONF_HOST: host, - } - assert result3["context"]["unique_id"] == MAC_ADDRESS + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "user_auth_confirm" + assert result3["errors"] == {error_placement: errors_msg} + assert result3["description_placeholders"]["error"] == str(error_type) - -async def test_manual_port_override_invalid( - hass: HomeAssistant, mock_connect: AsyncMock, mock_discovery: AsyncMock -) -> None: - """Test manually setup.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + mock_connect["connect"].side_effect = default_connect_side_effect + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + { + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert not result["errors"] + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["data"] == CREATE_ENTRY_DATA_AUTH + assert result4["context"]["unique_id"] == MAC_ADDRESS - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: f"{IP_ADDRESS}:foo"} - ) await hass.async_block_till_done() - mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=None, port=None - ) - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == DEFAULT_ENTRY_TITLE - assert result2["data"] == CREATE_ENTRY_DATA_KLAP - assert result2["context"]["unique_id"] == MAC_ADDRESS - async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: """Test we get the form with discovery and abort for dhcp source when we get both.""" @@ -764,26 +624,14 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE: _mocked_device(device_config=DEVICE_CONFIG_LEGACY), + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY, }, ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["errors"] is None - real_is_matching = TPLinkConfigFlow.is_matching - return_values = [] - - def is_matching(self, other_flow) -> bool: - return_values.append(real_is_matching(self, other_flow)) - return return_values[-1] - - with ( - _patch_discovery(), - _patch_single_discovery(), - _patch_connect(), - patch.object(TPLinkConfigFlow, "is_matching", wraps=is_matching, autospec=True), - ): + with _patch_discovery(), _patch_single_discovery(), _patch_connect(): result2 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -794,8 +642,6 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_in_progress" - # Ensure the is_matching method returned True - assert return_values == [True] with _patch_discovery(), _patch_single_discovery(), _patch_connect(): result3 = await hass.config_entries.flow.async_init( @@ -841,7 +687,7 @@ async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE: _mocked_device(device_config=DEVICE_CONFIG_LEGACY), + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY, }, ), ], @@ -895,7 +741,7 @@ async def test_discovered_by_dhcp_or_discovery( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE: _mocked_device(device_config=DEVICE_CONFIG_LEGACY), + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY, }, ), ], @@ -925,11 +771,9 @@ async def test_integration_discovery_with_ip_change( mock_connect: AsyncMock, ) -> None: """Test reauth flow.""" + mock_connect["connect"].side_effect = KasaException() mock_config_entry.add_to_hass(hass) - with ( - patch("homeassistant.components.tplink.Discover.discover", return_value={}), - override_side_effect(mock_connect["connect"], KasaException()), - ): + with patch("homeassistant.components.tplink.Discover.discover", return_value={}): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -937,57 +781,39 @@ async def test_integration_discovery_with_ip_change( flows = hass.config_entries.flow.async_progress() assert len(flows) == 0 - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] - == CONN_PARAMS_LEGACY.to_dict() - ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY + assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.1" - mocked_device = _mocked_device(device_config=DEVICE_CONFIG_KLAP) - with override_side_effect(mock_connect["connect"], lambda *_, **__: mocked_device): - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: "127.0.0.2", - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE: mocked_device, - }, - ) + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: "127.0.0.2", + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + }, + ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() - ) + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" - config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_KLAP) + config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_AUTH) - # Do a reload here and check that the - # new config is picked up in setup_entry mock_connect["connect"].reset_mock(side_effect=True) bulb = _mocked_device( device_config=config, mac=mock_config_entry.unique_id, ) - - with ( - patch( - "homeassistant.components.tplink.async_create_clientsession", - return_value="Foo", - ), - override_side_effect(mock_connect["connect"], lambda *_, **__: bulb), - ): - await hass.config_entries.async_reload(mock_config_entry.entry_id) - await hass.async_block_till_done() + mock_connect["connect"].return_value = bulb + await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED # Check that init set the new host correctly before calling connect assert config.host == "127.0.0.1" config.host = "127.0.0.2" - config.uses_http = False # Not passed in to new config class - config.http_client = "Foo" mock_connect["connect"].assert_awaited_once_with(config=config) @@ -1001,17 +827,16 @@ async def test_integration_discovery_with_connection_change( And that connection_hash is removed as it will be invalid. """ + mock_connect["connect"].side_effect = KasaException() + mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data=CREATE_ENTRY_DATA_AES, - unique_id=MAC_ADDRESS2, + data=CREATE_ENTRY_DATA_AUTH, + unique_id=MAC_ADDRESS, ) mock_config_entry.add_to_hass(hass) - with ( - patch("homeassistant.components.tplink.Discover.discover", return_value={}), - override_side_effect(mock_connect["connect"], KasaException()), - ): + with patch("homeassistant.components.tplink.Discover.discover", return_value={}): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) @@ -1024,58 +849,42 @@ async def test_integration_discovery_with_connection_change( ) == 0 ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_AES.to_dict() - ) - assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AES + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.1" + assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AUTH - mock_connect["connect"].reset_mock() NEW_DEVICE_CONFIG = { - **DEVICE_CONFIG_DICT_KLAP, - "connection_type": CONN_PARAMS_KLAP.to_dict(), - CONF_HOST: "127.0.0.2", + **DEVICE_CONFIG_DICT_AUTH, + CONF_CONNECTION_TYPE: NEW_CONNECTION_TYPE_DICT, } config = DeviceConfig.from_dict(NEW_DEVICE_CONFIG) # Reset the connect mock so when the config flow reloads the entry it succeeds - + mock_connect["connect"].reset_mock(side_effect=True) bulb = _mocked_device( device_config=config, mac=mock_config_entry.unique_id, ) + mock_connect["connect"].return_value = bulb - with ( - patch( - "homeassistant.components.tplink.async_create_clientsession", - return_value="Foo", - ), - override_side_effect(mock_connect["connect"], lambda *_, **__: bulb), - ): - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: "127.0.0.2", - CONF_MAC: MAC_ADDRESS2, - CONF_ALIAS: ALIAS, - CONF_DEVICE: bulb, - }, - ) + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: "127.0.0.1", + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE_CONFIG: NEW_DEVICE_CONFIG, + }, + ) await hass.async_block_till_done(wait_background_tasks=True) assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() - ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" - assert CREDENTIALS_HASH_AES not in mock_config_entry.data + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == NEW_DEVICE_CONFIG + assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + assert CREDENTIALS_HASH_AUTH not in mock_config_entry.data assert mock_config_entry.state is ConfigEntryState.LOADED - config.host = "127.0.0.2" - config.uses_http = False # Not passed in to new config class - config.http_client = "Foo" - config.aes_keys = AES_KEYS mock_connect["connect"].assert_awaited_once_with(config=config) @@ -1086,18 +895,17 @@ async def test_dhcp_discovery_with_ip_change( mock_connect: AsyncMock, ) -> None: """Test dhcp discovery with an IP change.""" + mock_connect["connect"].side_effect = KasaException() mock_config_entry.add_to_hass(hass) - with ( - patch("homeassistant.components.tplink.Discover.discover", return_value={}), - override_side_effect(mock_connect["connect"], KasaException()), - ): + with patch("homeassistant.components.tplink.Discover.discover", return_value={}): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY flows = hass.config_entries.flow.async_progress() assert len(flows) == 0 - assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY + assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.1" discovery_result = await hass.config_entries.flow.async_init( DOMAIN, @@ -1111,30 +919,6 @@ async def test_dhcp_discovery_with_ip_change( assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" -async def test_dhcp_discovery_discover_fail( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_discovery: AsyncMock, - mock_connect: AsyncMock, -) -> None: - """Test dhcp discovery source cannot discover_single.""" - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 0 - assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" - - with override_side_effect(mock_discovery["discover_single"], TimeoutError): - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - ip="127.0.0.2", macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS - ), - ) - assert discovery_result["type"] is FlowResultType.ABORT - assert discovery_result["reason"] == "cannot_connect" - - async def test_reauth( hass: HomeAssistant, mock_added_config_entry: MockConfigEntry, @@ -1160,7 +944,7 @@ async def test_reauth( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + "127.0.0.1", credentials=credentials ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT @@ -1169,152 +953,6 @@ async def test_reauth( await hass.async_block_till_done() -async def test_reauth_try_connect_all( - hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, - mock_discovery: AsyncMock, - mock_connect: AsyncMock, -) -> None: - """Test reauth flow.""" - mock_added_config_entry.async_start_reauth(hass) - await hass.async_block_till_done() - - assert mock_added_config_entry.state is ConfigEntryState.LOADED - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - [result] = flows - assert result["step_id"] == "reauth_confirm" - - with override_side_effect(mock_discovery["discover_single"], TimeoutError): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) - credentials = Credentials("fake_username", "fake_password") - mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None - ) - mock_discovery["try_connect_all"].assert_called_once() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" - - await hass.async_block_till_done() - - -async def test_reauth_try_connect_all_fail( - hass: HomeAssistant, - mock_added_config_entry: MockConfigEntry, - mock_discovery: AsyncMock, - mock_connect: AsyncMock, -) -> None: - """Test reauth flow.""" - mock_added_config_entry.async_start_reauth(hass) - await hass.async_block_till_done() - - assert mock_added_config_entry.state is ConfigEntryState.LOADED - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - [result] = flows - assert result["step_id"] == "reauth_confirm" - - with ( - override_side_effect(mock_discovery["discover_single"], TimeoutError), - override_side_effect(mock_discovery["try_connect_all"], lambda *_, **__: None), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) - credentials = Credentials("fake_username", "fake_password") - mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None - ) - mock_discovery["try_connect_all"].assert_called_once() - assert result2["errors"] == {"base": "cannot_connect"} - - -async def test_reauth_update_with_encryption_change( - hass: HomeAssistant, - mock_discovery: AsyncMock, - mock_connect: AsyncMock, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test reauth flow.""" - - mock_config_entry = MockConfigEntry( - title="TPLink", - domain=DOMAIN, - data={**CREATE_ENTRY_DATA_AES}, - unique_id=MAC_ADDRESS2, - ) - mock_config_entry.add_to_hass(hass) - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_AES.to_dict() - ) - assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AES - - with ( - patch("homeassistant.components.tplink.Discover.discover", return_value={}), - override_side_effect(mock_connect["connect"], AuthenticationError()), - ): - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR - - caplog.set_level(logging.DEBUG) - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - [result] = flows - assert result["step_id"] == "reauth_confirm" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_AES.to_dict() - ) - assert CONF_CREDENTIALS_HASH not in mock_config_entry.data - - new_config = DeviceConfig( - "127.0.0.2", - credentials=None, - connection_type=Device.ConnectionParameters( - Device.Family.SmartTapoPlug, Device.EncryptionType.Klap - ), - uses_http=True, - ) - mock_discovery["mock_device"].host = "127.0.0.2" - mock_discovery["mock_device"].config = new_config - mock_discovery["mock_device"].credentials_hash = None - mock_connect["mock_devices"]["127.0.0.2"].config = new_config - mock_connect["mock_devices"]["127.0.0.2"].credentials_hash = CREDENTIALS_HASH_KLAP - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert "Connection type changed for 127.0.0.2" in caplog.text - credentials = Credentials("fake_username", "fake_password") - mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.2", credentials=credentials, port=None - ) - mock_discovery["mock_device"].update.assert_called_once_with() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" - assert mock_config_entry.state is ConfigEntryState.LOADED - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() - ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" - assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_KLAP - - async def test_reauth_update_from_discovery( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -1322,11 +960,9 @@ async def test_reauth_update_from_discovery( mock_connect: AsyncMock, ) -> None: """Test reauth flow.""" + mock_connect["connect"].side_effect = AuthenticationError mock_config_entry.add_to_hass(hass) - with ( - patch("homeassistant.components.tplink.Discover.discover", return_value={}), - override_side_effect(mock_connect["connect"], AuthenticationError()), - ): + with patch("homeassistant.components.tplink.Discover.discover", return_value={}): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -1336,32 +972,22 @@ async def test_reauth_update_from_discovery( assert len(flows) == 1 [result] = flows assert result["step_id"] == "reauth_confirm" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] - == CONN_PARAMS_LEGACY.to_dict() - ) + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY - device = _mocked_device( - device_config=DEVICE_CONFIG_KLAP, - mac=mock_config_entry.unique_id, + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + }, ) - with override_side_effect(mock_connect["connect"], lambda *_, **__: device): - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE: device, - }, - ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() - ) + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH async def test_reauth_update_from_discovery_with_ip_change( @@ -1371,11 +997,9 @@ async def test_reauth_update_from_discovery_with_ip_change( mock_connect: AsyncMock, ) -> None: """Test reauth flow.""" + mock_connect["connect"].side_effect = AuthenticationError() mock_config_entry.add_to_hass(hass) - with ( - patch("homeassistant.components.tplink.Discover.discover", return_value={}), - override_side_effect(mock_connect["connect"], AuthenticationError()), - ): + with patch("homeassistant.components.tplink.Discover.discover", return_value={}): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR @@ -1384,32 +1008,22 @@ async def test_reauth_update_from_discovery_with_ip_change( assert len(flows) == 1 [result] = flows assert result["step_id"] == "reauth_confirm" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] - == CONN_PARAMS_LEGACY.to_dict() - ) + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY - device = _mocked_device( - device_config=DEVICE_CONFIG_KLAP, - mac=mock_config_entry.unique_id, + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: "127.0.0.2", + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + }, ) - with override_side_effect(mock_connect["connect"], lambda *_, **__: device): - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: "127.0.0.2", - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE: device, - }, - ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() - ) + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" @@ -1420,49 +1034,39 @@ async def test_reauth_no_update_if_config_and_ip_the_same( mock_connect: AsyncMock, ) -> None: """Test reauth discovery does not update when the host and config are the same.""" + mock_connect["connect"].side_effect = AuthenticationError() mock_config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry( mock_config_entry, data={ **mock_config_entry.data, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, }, ) - with override_side_effect(mock_connect["connect"], AuthenticationError()): - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 [result] = flows assert result["step_id"] == "reauth_confirm" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() - ) + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH - device = _mocked_device( - device_config=DEVICE_CONFIG_KLAP, - mac=mock_config_entry.unique_id, + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS, + CONF_MAC: MAC_ADDRESS, + CONF_ALIAS: ALIAS, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + }, ) - with override_side_effect(mock_connect["connect"], lambda *_, **__: device): - discovery_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_MAC: MAC_ADDRESS, - CONF_ALIAS: ALIAS, - CONF_DEVICE: device, - }, - ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() - ) + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS @@ -1504,7 +1108,7 @@ async def test_reauth_errors( credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + "127.0.0.1", credentials=credentials ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.FORM @@ -1522,7 +1126,7 @@ async def test_reauth_errors( ) mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + "127.0.0.1", credentials=credentials ) mock_discovery["mock_device"].update.assert_called_once_with() @@ -1560,15 +1164,17 @@ async def test_pick_device_errors( assert result2["step_id"] == "pick_device" assert not result2["errors"] - with override_side_effect(mock_connect["connect"], error_type): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - {CONF_DEVICE: MAC_ADDRESS}, - ) - await hass.async_block_till_done() + default_connect_side_effect = mock_connect["connect"].side_effect + mock_connect["connect"].side_effect = error_type + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + {CONF_DEVICE: MAC_ADDRESS}, + ) + await hass.async_block_till_done() assert result3["type"] == expected_flow if expected_flow != FlowResultType.ABORT: + mock_connect["connect"].side_effect = default_connect_side_effect result4 = await hass.config_entries.flow.async_configure( result3["flow_id"], user_input={ @@ -1580,7 +1186,7 @@ async def test_pick_device_errors( assert result4["context"]["unique_id"] == MAC_ADDRESS -async def test_discovery_timeout_try_connect_all( +async def test_discovery_timeout_connect( hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, @@ -1606,111 +1212,30 @@ async def test_discovery_timeout_try_connect_all( assert mock_connect["connect"].call_count == 1 -async def test_discovery_timeout_try_connect_all_needs_creds( - hass: HomeAssistant, - mock_discovery: AsyncMock, - mock_connect: AsyncMock, - mock_init, -) -> None: - """Test discovery tries legacy connect on timeout.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - mock_discovery["discover_single"].side_effect = TimeoutError - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert not result["errors"] - assert mock_connect["connect"].call_count == 0 - - with override_side_effect(mock_connect["connect"], KasaException): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: IP_ADDRESS} - ) - await hass.async_block_till_done() - assert result2["step_id"] == "user_auth_confirm" - assert result2["type"] is FlowResultType.FORM - - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["context"]["unique_id"] == MAC_ADDRESS - assert mock_connect["connect"].call_count == 1 - - -async def test_discovery_timeout_try_connect_all_fail( - hass: HomeAssistant, - mock_discovery: AsyncMock, - mock_connect: AsyncMock, - mock_init, -) -> None: - """Test discovery tries legacy connect on timeout.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - mock_discovery["discover_single"].side_effect = TimeoutError - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert not result["errors"] - assert mock_connect["connect"].call_count == 0 - - with override_side_effect(mock_connect["connect"], KasaException): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: IP_ADDRESS} - ) - await hass.async_block_till_done() - assert result2["step_id"] == "user_auth_confirm" - assert result2["type"] is FlowResultType.FORM - - with override_side_effect(mock_discovery["try_connect_all"], lambda *_, **__: None): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) - await hass.async_block_till_done() - assert result3["errors"] == {"base": "cannot_connect"} - assert mock_connect["connect"].call_count == 1 - - async def test_reauth_update_other_flows( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, mock_discovery: AsyncMock, mock_connect: AsyncMock, ) -> None: """Test reauth updates other reauth flows.""" - mock_config_entry = MockConfigEntry( - title="TPLink", - domain=DOMAIN, - data={**CREATE_ENTRY_DATA_KLAP}, - unique_id=MAC_ADDRESS, - ) mock_config_entry2 = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_AES}, + data={**CREATE_ENTRY_DATA_AUTH2}, unique_id=MAC_ADDRESS2, ) + default_side_effect = mock_connect["connect"].side_effect + mock_connect["connect"].side_effect = AuthenticationError() mock_config_entry.add_to_hass(hass) mock_config_entry2.add_to_hass(hass) - with ( - patch("homeassistant.components.tplink.Discover.discover", return_value={}), - override_side_effect(mock_connect["connect"], AuthenticationError()), - ): + with patch("homeassistant.components.tplink.Discover.discover", return_value={}): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry2.state is ConfigEntryState.SETUP_ERROR assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + mock_connect["connect"].side_effect = default_side_effect await hass.async_block_till_done() @@ -1719,9 +1244,7 @@ async def test_reauth_update_other_flows( flows_by_entry_id = {flow["context"]["entry_id"]: flow for flow in flows} result = flows_by_entry_id[mock_config_entry.entry_id] assert result["step_id"] == "reauth_confirm" - assert ( - mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() - ) + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -1731,7 +1254,7 @@ async def test_reauth_update_other_flows( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + "127.0.0.1", credentials=credentials ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index dd01c381adf..c5c5e2ce6db 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -4,7 +4,6 @@ from __future__ import annotations import copy from datetime import timedelta -from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from freezegun.api import FrozenDateTimeFactory @@ -14,18 +13,14 @@ import pytest from homeassistant import setup from homeassistant.components import tplink from homeassistant.components.tplink.const import ( - CONF_AES_KEYS, - CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, CONF_DEVICE_CONFIG, DOMAIN, ) from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( - CONF_ALIAS, CONF_AUTHENTICATION, CONF_HOST, - CONF_MODEL, CONF_PASSWORD, CONF_USERNAME, STATE_ON, @@ -38,20 +33,13 @@ from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from . import ( - ALIAS, - CREATE_ENTRY_DATA_AES, - CREATE_ENTRY_DATA_KLAP, + CREATE_ENTRY_DATA_AUTH, CREATE_ENTRY_DATA_LEGACY, - CREDENTIALS_HASH_AES, - CREDENTIALS_HASH_KLAP, - DEVICE_CONFIG_AES, - DEVICE_CONFIG_KLAP, - DEVICE_CONFIG_LEGACY, + DEVICE_CONFIG_AUTH, DEVICE_ID, DEVICE_ID_MAC, IP_ADDRESS, MAC_ADDRESS, - MODEL, _mocked_device, _patch_connect, _patch_discovery, @@ -119,7 +107,7 @@ async def test_config_entry_retry(hass: HomeAssistant) -> None: async def test_dimmer_switch_unique_id_fix_original_entity_still_exists( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_reg: er.EntityRegistry ) -> None: """Test no migration happens if the original entity id still exists.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=MAC_ADDRESS) @@ -127,14 +115,14 @@ async def test_dimmer_switch_unique_id_fix_original_entity_still_exists( dimmer = _mocked_device(alias="My dimmer", modules=[Module.Light]) rollout_unique_id = MAC_ADDRESS.replace(":", "").upper() original_unique_id = tplink.legacy_device_id(dimmer) - original_dimmer_entity_reg = entity_registry.async_get_or_create( + original_dimmer_entity_reg = entity_reg.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", unique_id=original_unique_id, original_name="Original dimmer", ) - rollout_dimmer_entity_reg = entity_registry.async_get_or_create( + rollout_dimmer_entity_reg = entity_reg.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -150,7 +138,7 @@ async def test_dimmer_switch_unique_id_fix_original_entity_still_exists( await setup.async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done(wait_background_tasks=True) - migrated_dimmer_entity_reg = entity_registry.async_get_or_create( + migrated_dimmer_entity_reg = entity_reg.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -190,7 +178,7 @@ async def test_config_entry_device_config( mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_KLAP}, + data={**CREATE_ENTRY_DATA_AUTH}, unique_id=MAC_ADDRESS, ) mock_config_entry.add_to_hass(hass) @@ -209,7 +197,7 @@ async def test_config_entry_with_stored_credentials( mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_KLAP}, + data={**CREATE_ENTRY_DATA_AUTH}, unique_id=MAC_ADDRESS, ) auth = { @@ -219,29 +207,24 @@ async def test_config_entry_with_stored_credentials( hass.data.setdefault(DOMAIN, {})[CONF_AUTHENTICATION] = auth mock_config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.tplink.async_create_clientsession", return_value="Foo" - ): - await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED - config = DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()) - config.uses_http = False - config.http_client = "Foo" + config = DEVICE_CONFIG_AUTH assert config.credentials != stored_credentials config.credentials = stored_credentials mock_connect["connect"].assert_called_once_with(config=config) -async def test_config_entry_conn_params_invalid( +async def test_config_entry_device_config_invalid( hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, caplog: pytest.LogCaptureFixture, ) -> None: """Test that an invalid device config logs an error and loads the config entry.""" - entry_data = copy.deepcopy(CREATE_ENTRY_DATA_KLAP) - entry_data[CONF_CONNECTION_PARAMETERS] = {"foo": "bar"} + entry_data = copy.deepcopy(CREATE_ENTRY_DATA_AUTH) + entry_data[CONF_DEVICE_CONFIG] = {"foo": "bar"} mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, @@ -254,7 +237,7 @@ async def test_config_entry_conn_params_invalid( assert mock_config_entry.state is ConfigEntryState.LOADED assert ( - f"Invalid connection parameters dict for {IP_ADDRESS}: {entry_data.get(CONF_CONNECTION_PARAMETERS)}" + f"Invalid connection type dict for {IP_ADDRESS}: {entry_data.get(CONF_DEVICE_CONFIG)}" in caplog.text ) @@ -280,7 +263,7 @@ async def test_config_entry_errors( mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_KLAP}, + data={**CREATE_ENTRY_DATA_AUTH}, unique_id=MAC_ADDRESS, ) mock_config_entry.add_to_hass(hass) @@ -512,9 +495,8 @@ async def test_unlink_devices( } assert device_entries[0].identifiers == set(test_identifiers) - with patch("homeassistant.components.tplink.CONF_CONFIG_ENTRY_MINOR_VERSION", 3): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() device_entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id) @@ -522,7 +504,7 @@ async def test_unlink_devices( assert device_entries[0].identifiers == set(expected_identifiers) assert entry.version == 1 - assert entry.minor_version == 3 + assert entry.minor_version == 4 assert update_msg in caplog.text assert "Migration to version 1.3 complete" in caplog.text @@ -538,11 +520,11 @@ async def test_move_credentials_hash( from the device. """ device_config = { - **DEVICE_CONFIG_KLAP.to_dict( + **DEVICE_CONFIG_AUTH.to_dict( exclude_credentials=True, credentials_hash="theHash" ) } - entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} + entry_data = {**CREATE_ENTRY_DATA_AUTH, CONF_DEVICE_CONFIG: device_config} entry = MockConfigEntry( title="TPLink", @@ -563,7 +545,6 @@ async def test_move_credentials_hash( with ( patch("homeassistant.components.tplink.Device.connect", new=_connect), patch("homeassistant.components.tplink.PLATFORMS", []), - patch("homeassistant.components.tplink.CONF_CONFIG_ENTRY_MINOR_VERSION", 4), ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -586,11 +567,11 @@ async def test_move_credentials_hash_auth_error( in async_setup_entry. """ device_config = { - **DEVICE_CONFIG_KLAP.to_dict( + **DEVICE_CONFIG_AUTH.to_dict( exclude_credentials=True, credentials_hash="theHash" ) } - entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} + entry_data = {**CREATE_ENTRY_DATA_AUTH, CONF_DEVICE_CONFIG: device_config} entry = MockConfigEntry( title="TPLink", @@ -608,7 +589,6 @@ async def test_move_credentials_hash_auth_error( side_effect=AuthenticationError, ), patch("homeassistant.components.tplink.PLATFORMS", []), - patch("homeassistant.components.tplink.CONF_CONFIG_ENTRY_MINOR_VERSION", 4), ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -630,11 +610,11 @@ async def test_move_credentials_hash_other_error( at the end of the test. """ device_config = { - **DEVICE_CONFIG_KLAP.to_dict( + **DEVICE_CONFIG_AUTH.to_dict( exclude_credentials=True, credentials_hash="theHash" ) } - entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} + entry_data = {**CREATE_ENTRY_DATA_AUTH, CONF_DEVICE_CONFIG: device_config} entry = MockConfigEntry( title="TPLink", @@ -651,7 +631,6 @@ async def test_move_credentials_hash_other_error( "homeassistant.components.tplink.Device.connect", side_effect=KasaException ), patch("homeassistant.components.tplink.PLATFORMS", []), - patch("homeassistant.components.tplink.CONF_CONFIG_ENTRY_MINOR_VERSION", 4), ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -668,8 +647,10 @@ async def test_credentials_hash( hass: HomeAssistant, ) -> None: """Test credentials_hash used to call connect.""" + device_config = {**DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True)} entry_data = { - **CREATE_ENTRY_DATA_KLAP, + **CREATE_ENTRY_DATA_AUTH, + CONF_DEVICE_CONFIG: device_config, CONF_CREDENTIALS_HASH: "theHash", } @@ -693,7 +674,9 @@ async def test_credentials_hash( await hass.async_block_till_done() assert entry.state is ConfigEntryState.LOADED + assert CONF_CREDENTIALS_HASH not in entry.data[CONF_DEVICE_CONFIG] assert CONF_CREDENTIALS_HASH in entry.data + assert entry.data[CONF_DEVICE_CONFIG] == device_config assert entry.data[CONF_CREDENTIALS_HASH] == "theHash" @@ -701,8 +684,10 @@ async def test_credentials_hash_auth_error( hass: HomeAssistant, ) -> None: """Test credentials_hash is deleted after an auth failure.""" + device_config = {**DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True)} entry_data = { - **CREATE_ENTRY_DATA_KLAP, + **CREATE_ENTRY_DATA_AUTH, + CONF_DEVICE_CONFIG: device_config, CONF_CREDENTIALS_HASH: "theHash", } @@ -715,10 +700,6 @@ async def test_credentials_hash_auth_error( with ( patch("homeassistant.components.tplink.PLATFORMS", []), - patch( - "homeassistant.components.tplink.async_create_clientsession", - return_value="Foo", - ), patch( "homeassistant.components.tplink.Device.connect", side_effect=AuthenticationError, @@ -729,78 +710,8 @@ async def test_credentials_hash_auth_error( await hass.async_block_till_done() expected_config = DeviceConfig.from_dict( - DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True, credentials_hash="theHash") + DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True, credentials_hash="theHash") ) - expected_config.uses_http = False - expected_config.http_client = "Foo" connect_mock.assert_called_with(config=expected_config) assert entry.state is ConfigEntryState.SETUP_ERROR assert CONF_CREDENTIALS_HASH not in entry.data - - -@pytest.mark.parametrize( - ("device_config", "expected_entry_data", "credentials_hash"), - [ - pytest.param( - DEVICE_CONFIG_KLAP, CREATE_ENTRY_DATA_KLAP, CREDENTIALS_HASH_KLAP, id="KLAP" - ), - pytest.param( - DEVICE_CONFIG_AES, CREATE_ENTRY_DATA_AES, CREDENTIALS_HASH_AES, id="AES" - ), - pytest.param(DEVICE_CONFIG_LEGACY, CREATE_ENTRY_DATA_LEGACY, None, id="Legacy"), - ], -) -async def test_migrate_remove_device_config( - hass: HomeAssistant, - mock_connect: AsyncMock, - caplog: pytest.LogCaptureFixture, - device_config: DeviceConfig, - expected_entry_data: dict[str, Any], - credentials_hash: str, -) -> None: - """Test credentials hash moved to parent. - - As async_setup_entry will succeed the hash on the parent is updated - from the device. - """ - OLD_CREATE_ENTRY_DATA = { - CONF_HOST: expected_entry_data[CONF_HOST], - CONF_ALIAS: ALIAS, - CONF_MODEL: MODEL, - CONF_DEVICE_CONFIG: device_config.to_dict(exclude_credentials=True), - } - - entry = MockConfigEntry( - title="TPLink", - domain=DOMAIN, - data=OLD_CREATE_ENTRY_DATA, - entry_id="123456", - unique_id=MAC_ADDRESS, - version=1, - minor_version=4, - ) - entry.add_to_hass(hass) - - async def _connect(config): - config.credentials_hash = credentials_hash - config.aes_keys = expected_entry_data.get(CONF_AES_KEYS) - return _mocked_device(device_config=config, credentials_hash=credentials_hash) - - with ( - patch("homeassistant.components.tplink.Device.connect", new=_connect), - patch("homeassistant.components.tplink.PLATFORMS", []), - patch( - "homeassistant.components.tplink.async_create_clientsession", - return_value="Foo", - ), - patch("homeassistant.components.tplink.CONF_CONFIG_ENTRY_MINOR_VERSION", 5), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.minor_version == 5 - assert entry.state is ConfigEntryState.LOADED - assert CONF_DEVICE_CONFIG not in entry.data - assert entry.data == expected_entry_data - - assert "Migration to version 1.5 complete" in caplog.text diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index 6998d8fbcc7..c2f40f47e3d 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -5,7 +5,6 @@ from __future__ import annotations from datetime import timedelta from unittest.mock import MagicMock, PropertyMock -from freezegun.api import FrozenDateTimeFactory from kasa import ( AuthenticationError, DeviceType, @@ -37,13 +36,7 @@ from homeassistant.components.light import ( ) from homeassistant.components.tplink.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_HOST, - STATE_OFF, - STATE_ON, - STATE_UNKNOWN, -) +from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er @@ -147,17 +140,13 @@ async def test_color_light( assert state.state == "on" attributes = state.attributes assert attributes[ATTR_BRIGHTNESS] == 128 + assert attributes[ATTR_COLOR_MODE] == "hs" assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] - # If effect is active, only the brightness can be controlled - if attributes.get(ATTR_EFFECT) is not None: - assert attributes[ATTR_COLOR_MODE] == "brightness" - else: - assert attributes[ATTR_COLOR_MODE] == "hs" - assert attributes[ATTR_MIN_MIREDS] == 111 - assert attributes[ATTR_MAX_MIREDS] == 250 - assert attributes[ATTR_HS_COLOR] == (10, 30) - assert attributes[ATTR_RGB_COLOR] == (255, 191, 178) - assert attributes[ATTR_XY_COLOR] == (0.42, 0.336) + assert attributes[ATTR_MIN_MIREDS] == 111 + assert attributes[ATTR_MAX_MIREDS] == 250 + assert attributes[ATTR_HS_COLOR] == (10, 30) + assert attributes[ATTR_RGB_COLOR] == (255, 191, 178) + assert attributes[ATTR_XY_COLOR] == (0.42, 0.336) await hass.services.async_call( LIGHT_DOMAIN, "turn_off", BASE_PAYLOAD, blocking=True @@ -505,9 +494,7 @@ async def test_dimmer_turn_on_fix(hass: HomeAssistant) -> None: light.set_state.reset_mock() -async def test_smart_strip_effects( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: +async def test_smart_strip_effects(hass: HomeAssistant) -> None: """Test smart strip effects.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS @@ -535,16 +522,16 @@ async def test_smart_strip_effects( assert state.attributes[ATTR_EFFECT_LIST] == ["Off", "Effect1", "Effect2"] # Ensure setting color temp when an effect - # is in progress calls set_effect to clear the effect + # is in progress calls set_hsv to clear the effect await hass.services.async_call( LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) - light_effect.set_effect.assert_called_once_with(LightEffect.LIGHT_EFFECTS_OFF) + light.set_hsv.assert_called_once_with(0, 0, None) light.set_color_temp.assert_called_once_with(4000, brightness=None, transition=None) - light_effect.set_effect.reset_mock() + light.set_hsv.reset_mock() light.set_color_temp.reset_mock() await hass.services.async_call( @@ -557,40 +544,6 @@ async def test_smart_strip_effects( "Effect2", brightness=None, transition=None ) light_effect.set_effect.reset_mock() - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_EFFECT] == "Effect2" - - # Test setting light effect off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "off"}, - blocking=True, - ) - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_EFFECT] == "off" - light.set_state.assert_not_called() - - # Test setting light effect to invalid value - caplog.clear() - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "Effect3"}, - blocking=True, - ) - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_ON - assert state.attributes[ATTR_EFFECT] == "off" - assert "Invalid effect Effect3 for" in caplog.text light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) @@ -963,82 +916,3 @@ async def test_light_child( assert child_entity assert child_entity.unique_id == f"{DEVICE_ID}0{light_id}" assert child_entity.device_id == entity.device_id - - -async def test_scene_effect_light( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, -) -> None: - """Test activating a scene works with effects. - - i.e. doesn't try to set the effect to 'off' - """ - already_migrated_config_entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS - ) - already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device( - modules=[Module.Light, Module.LightEffect], alias="my_light" - ) - light_effect = device.modules[Module.LightEffect] - light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF - - with _patch_discovery(device=device), _patch_connect(device=device): - assert await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) - assert await async_setup_component(hass, "scene", {}) - await hass.async_block_till_done() - - entity_id = "light.my_light" - - await hass.services.async_call( - LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - await hass.async_block_till_done() - freezer.tick(5) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state is STATE_ON - assert state.attributes["effect"] is EFFECT_OFF - - await hass.services.async_call( - "scene", - "create", - {"scene_id": "effect_off_scene", "snapshot_entities": [entity_id]}, - blocking=True, - ) - await hass.async_block_till_done() - scene_state = hass.states.get("scene.effect_off_scene") - assert scene_state.state is STATE_UNKNOWN - - await hass.services.async_call( - LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - await hass.async_block_till_done() - freezer.tick(5) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state is STATE_OFF - - await hass.services.async_call( - "scene", - "turn_on", - { - "entity_id": "scene.effect_off_scene", - }, - blocking=True, - ) - await hass.async_block_till_done() - scene_state = hass.states.get("scene.effect_off_scene") - assert scene_state.state is not STATE_UNKNOWN - - freezer.tick(5) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state is STATE_ON - assert state.attributes["effect"] is EFFECT_OFF diff --git a/tests/components/tplink/test_siren.py b/tests/components/tplink/test_siren.py deleted file mode 100644 index 8c3328558b0..00000000000 --- a/tests/components/tplink/test_siren.py +++ /dev/null @@ -1,76 +0,0 @@ -"""Tests for siren platform.""" - -from __future__ import annotations - -from kasa import Device, Module -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.siren import ( - DOMAIN as SIREN_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er - -from . import _mocked_device, setup_platform_for_device, snapshot_platform - -from tests.common import MockConfigEntry - -ENTITY_ID = "siren.hub" - - -@pytest.fixture -async def mocked_hub(hass: HomeAssistant) -> Device: - """Return mocked tplink hub with an alarm module.""" - - return _mocked_device( - alias="hub", - modules=[Module.Alarm], - device_type=Device.Type.Hub, - ) - - -async def test_states( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, - mocked_hub: Device, -) -> None: - """Snapshot test.""" - await setup_platform_for_device(hass, mock_config_entry, Platform.SIREN, mocked_hub) - - await snapshot_platform( - hass, entity_registry, device_registry, snapshot, mock_config_entry.entry_id - ) - - -async def test_turn_on_and_off( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mocked_hub: Device -) -> None: - """Test that turn_on and turn_off services work as expected.""" - await setup_platform_for_device(hass, mock_config_entry, Platform.SIREN, mocked_hub) - - alarm_module = mocked_hub.modules[Module.Alarm] - - await hass.services.async_call( - SIREN_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: [ENTITY_ID]}, - blocking=True, - ) - - alarm_module.stop.assert_called() - - await hass.services.async_call( - SIREN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [ENTITY_ID]}, - blocking=True, - ) - - alarm_module.play.assert_called() diff --git a/tests/components/tplink_omada/conftest.py b/tests/components/tplink_omada/conftest.py index b9bdb5ef94a..c29fcb633e4 100644 --- a/tests/components/tplink_omada/conftest.py +++ b/tests/components/tplink_omada/conftest.py @@ -1,6 +1,6 @@ """Test fixtures for TP-Link Omada integration.""" -from collections.abc import AsyncIterable, Generator +from collections.abc import AsyncIterable import json from unittest.mock import AsyncMock, MagicMock, patch @@ -17,6 +17,7 @@ from tplink_omada_client.devices import ( OmadaSwitch, OmadaSwitchPortDetails, ) +from typing_extensions import Generator from homeassistant.components.tplink_omada.config_flow import CONF_SITE from homeassistant.components.tplink_omada.const import DOMAIN @@ -129,7 +130,6 @@ def _get_mock_client(mac: str) -> OmadaNetworkClient: if c["wireless"]: return OmadaWirelessClient(c) return OmadaWiredClient(c) - raise ValueError(f"Client with MAC {mac} not found in mock data") @pytest.fixture @@ -163,10 +163,21 @@ def mock_omada_clients_only_client( @pytest.fixture async def init_integration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, mock_omada_client: MagicMock, ) -> MockConfigEntry: """Set up the TP-Link Omada integration for testing.""" + mock_config_entry = MockConfigEntry( + title="Test Omada Controller", + domain=DOMAIN, + data={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "mocked-password", + CONF_USERNAME: "mocked-user", + CONF_VERIFY_SSL: False, + CONF_SITE: "Default", + }, + unique_id="12345", + ) mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/tplink_omada/snapshots/test_sensor.ambr b/tests/components/tplink_omada/snapshots/test_sensor.ambr deleted file mode 100644 index 6c332eb9696..00000000000 --- a/tests/components/tplink_omada/snapshots/test_sensor.ambr +++ /dev/null @@ -1,333 +0,0 @@ -# serializer version: 1 -# name: test_entities[sensor.test_poe_switch_cpu_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_poe_switch_cpu_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CPU usage', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cpu_usage', - 'unique_id': '54-AF-97-00-00-01_cpu_usage', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.test_poe_switch_cpu_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch CPU usage', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_poe_switch_cpu_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_entities[sensor.test_poe_switch_device_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'disconnected', - 'connected', - 'pending', - 'heartbeat_missed', - 'isolated', - 'adopt_failed', - 'managed_externally', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_poe_switch_device_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device status', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_status', - 'unique_id': '54-AF-97-00-00-01_device_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.test_poe_switch_device_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test PoE Switch Device status', - 'options': list([ - 'disconnected', - 'connected', - 'pending', - 'heartbeat_missed', - 'isolated', - 'adopt_failed', - 'managed_externally', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_poe_switch_device_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'connected', - }) -# --- -# name: test_entities[sensor.test_poe_switch_memory_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_poe_switch_memory_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Memory usage', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mem_usage', - 'unique_id': '54-AF-97-00-00-01_mem_usage', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.test_poe_switch_memory_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Memory usage', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_poe_switch_memory_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_entities[sensor.test_router_cpu_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_router_cpu_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CPU usage', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cpu_usage', - 'unique_id': 'AA-BB-CC-DD-EE-FF_cpu_usage', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.test_router_cpu_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Router CPU usage', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_router_cpu_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '16', - }) -# --- -# name: test_entities[sensor.test_router_device_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'disconnected', - 'connected', - 'pending', - 'heartbeat_missed', - 'isolated', - 'adopt_failed', - 'managed_externally', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_router_device_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device status', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_status', - 'unique_id': 'AA-BB-CC-DD-EE-FF_device_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.test_router_device_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Router Device status', - 'options': list([ - 'disconnected', - 'connected', - 'pending', - 'heartbeat_missed', - 'isolated', - 'adopt_failed', - 'managed_externally', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_router_device_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'connected', - }) -# --- -# name: test_entities[sensor.test_router_memory_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.test_router_memory_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Memory usage', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mem_usage', - 'unique_id': 'AA-BB-CC-DD-EE-FF_mem_usage', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.test_router_memory_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Router Memory usage', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.test_router_memory_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '47', - }) -# --- diff --git a/tests/components/tplink_omada/snapshots/test_switch.ambr b/tests/components/tplink_omada/snapshots/test_switch.ambr index a13d386e721..282d2a4a6a5 100644 --- a/tests/components/tplink_omada/snapshots/test_switch.ambr +++ b/tests/components/tplink_omada/snapshots/test_switch.ambr @@ -25,6 +25,19 @@ 'state': 'on', }) # --- +# name: test_gateway_disappear_disables_switches + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Router Port 4 Internet Connected', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.test_router_port_4_internet_connected', + 'last_changed': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_gateway_port_change_disables_switch_entities StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -97,6 +110,144 @@ 'unit_of_measurement': None, }) # --- +# name: test_poe_switches.10 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test PoE Switch Port 6 PoE', + }), + 'context': , + 'entity_id': 'switch.test_poe_switch_port_6_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_poe_switches.11 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.test_poe_switch_port_6_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Port 6 PoE', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'poe_control', + 'unique_id': '54-AF-97-00-00-01_000000000000000000000006_poe', + 'unit_of_measurement': None, + }) +# --- +# name: test_poe_switches.12 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test PoE Switch Port 7 PoE', + }), + 'context': , + 'entity_id': 'switch.test_poe_switch_port_7_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_poe_switches.13 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.test_poe_switch_port_7_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Port 7 PoE', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'poe_control', + 'unique_id': '54-AF-97-00-00-01_000000000000000000000007_poe', + 'unit_of_measurement': None, + }) +# --- +# name: test_poe_switches.14 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test PoE Switch Port 8 PoE', + }), + 'context': , + 'entity_id': 'switch.test_poe_switch_port_8_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_poe_switches.15 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.test_poe_switch_port_8_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Port 8 PoE', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'poe_control', + 'unique_id': '54-AF-97-00-00-01_000000000000000000000008_poe', + 'unit_of_measurement': None, + }) +# --- # name: test_poe_switches.2 StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -143,3 +294,141 @@ 'unit_of_measurement': None, }) # --- +# name: test_poe_switches.4 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test PoE Switch Port 3 PoE', + }), + 'context': , + 'entity_id': 'switch.test_poe_switch_port_3_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_poe_switches.5 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.test_poe_switch_port_3_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Port 3 PoE', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'poe_control', + 'unique_id': '54-AF-97-00-00-01_000000000000000000000003_poe', + 'unit_of_measurement': None, + }) +# --- +# name: test_poe_switches.6 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test PoE Switch Port 4 PoE', + }), + 'context': , + 'entity_id': 'switch.test_poe_switch_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_poe_switches.7 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.test_poe_switch_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Port 4 PoE', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'poe_control', + 'unique_id': '54-AF-97-00-00-01_000000000000000000000004_poe', + 'unit_of_measurement': None, + }) +# --- +# name: test_poe_switches.8 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test PoE Switch Port 5 PoE', + }), + 'context': , + 'entity_id': 'switch.test_poe_switch_port_5_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_poe_switches.9 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.test_poe_switch_port_5_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Port 5 PoE', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'poe_control', + 'unique_id': '54-AF-97-00-00-01_000000000000000000000005_poe', + 'unit_of_measurement': None, + }) +# --- diff --git a/tests/components/tplink_omada/test_config_flow.py b/tests/components/tplink_omada/test_config_flow.py index 28ef0da170f..08606fe126c 100644 --- a/tests/components/tplink_omada/test_config_flow.py +++ b/tests/components/tplink_omada/test_config_flow.py @@ -251,7 +251,14 @@ async def test_async_step_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -291,7 +298,14 @@ async def test_async_step_reauth_invalid_auth(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/tplink_omada/test_init.py b/tests/components/tplink_omada/test_init.py deleted file mode 100644 index 762168df9d6..00000000000 --- a/tests/components/tplink_omada/test_init.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Tests for TP-Link Omada integration init.""" - -from unittest.mock import MagicMock - -from homeassistant.components.tplink_omada.const import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from tests.common import MockConfigEntry - -MOCK_ENTRY_DATA = { - "host": "https://fake.omada.host", - "verify_ssl": True, - "site": "SiteId", - "username": "test-username", - "password": "test-password", -} - - -async def test_missing_devices_removed_at_startup( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_omada_client: MagicMock, -) -> None: - """Test missing devices are removed at startup.""" - mock_config_entry = MockConfigEntry( - title="Test Omada Controller", - domain=DOMAIN, - data=dict(MOCK_ENTRY_DATA), - unique_id="12345", - ) - mock_config_entry.add_to_hass(hass) - - device_entry = device_registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - identifiers={(DOMAIN, "AA:BB:CC:DD:EE:FF")}, - manufacturer="TPLink", - name="Old Device", - model="Some old model", - ) - - assert device_registry.async_get(device_entry.id) == device_entry - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert device_registry.async_get(device_entry.id) is None diff --git a/tests/components/tplink_omada/test_sensor.py b/tests/components/tplink_omada/test_sensor.py deleted file mode 100644 index 54df7c5bcad..00000000000 --- a/tests/components/tplink_omada/test_sensor.py +++ /dev/null @@ -1,117 +0,0 @@ -"""Tests for TP-Link Omada sensor entities.""" - -from datetime import timedelta -import json -from unittest.mock import MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy.assertion import SnapshotAssertion -from tplink_omada_client.definitions import DeviceStatus, DeviceStatusCategory -from tplink_omada_client.devices import OmadaGatewayPortStatus, OmadaListDevice - -from homeassistant.components.tplink_omada.const import DOMAIN -from homeassistant.components.tplink_omada.coordinator import POLL_DEVICES -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_fixture, - snapshot_platform, -) - -POLL_INTERVAL = timedelta(seconds=POLL_DEVICES) - - -@pytest.fixture -async def init_integration( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_omada_client: MagicMock, -) -> MockConfigEntry: - """Set up the TP-Link Omada integration for testing.""" - mock_config_entry.add_to_hass(hass) - - with patch("homeassistant.components.tplink_omada.PLATFORMS", ["sensor"]): - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - return mock_config_entry - - -async def test_entities( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - init_integration: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test the creation of the TP-Link Omada sensor entities.""" - await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) - - -async def test_device_specific_status( - hass: HomeAssistant, - init_integration: MockConfigEntry, - mock_omada_site_client: MagicMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test a connection status is reported from known detailed status.""" - entity_id = "sensor.test_poe_switch_device_status" - entity = hass.states.get(entity_id) - assert entity is not None - assert entity.state == "connected" - - _set_test_device_status( - mock_omada_site_client, - DeviceStatus.ADOPT_FAILED.value, - DeviceStatusCategory.CONNECTED.value, - ) - - freezer.tick(POLL_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - entity = hass.states.get(entity_id) - assert entity.state == "adopt_failed" - - -async def test_device_category_status( - hass: HomeAssistant, - init_integration: MockConfigEntry, - mock_omada_site_client: MagicMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test a connection status is reported, with fallback to status category.""" - entity_id = "sensor.test_poe_switch_device_status" - entity = hass.states.get(entity_id) - assert entity is not None - assert entity.state == "connected" - - _set_test_device_status( - mock_omada_site_client, - DeviceStatus.PENDING_WIRELESS, - DeviceStatusCategory.PENDING.value, - ) - - freezer.tick(POLL_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - entity = hass.states.get(entity_id) - assert entity.state == "pending" - - -def _set_test_device_status( - mock_omada_site_client: MagicMock, - status: int, - status_category: int, -) -> OmadaGatewayPortStatus: - devices_data = json.loads(load_fixture("devices.json", DOMAIN)) - devices_data[1]["status"] = status - devices_data[1]["statusCategory"] = status_category - devices = [OmadaListDevice(d) for d in devices_data] - - mock_omada_site_client.get_devices.reset_mock() - mock_omada_site_client.get_devices.return_value = devices diff --git a/tests/components/tplink_omada/test_switch.py b/tests/components/tplink_omada/test_switch.py index abce87714a9..7d83140cc95 100644 --- a/tests/components/tplink_omada/test_switch.py +++ b/tests/components/tplink_omada/test_switch.py @@ -19,7 +19,7 @@ from tplink_omada_client.exceptions import InvalidDevice from homeassistant.components import switch from homeassistant.components.tplink_omada.coordinator import POLL_GATEWAY from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant, ServiceResponse +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util.dt import utcnow @@ -336,7 +336,7 @@ def _get_updated_gateway_port_status( return OmadaGatewayPortStatus(gateway_data["portStats"][port]) -def call_service(hass: HomeAssistant, service: str, entity_id: str) -> ServiceResponse: +def call_service(hass: HomeAssistant, service: str, entity_id: str): """Call any service on entity.""" return hass.services.async_call( switch.DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True diff --git a/tests/components/traccar/test_init.py b/tests/components/traccar/test_init.py index fb90262a084..feacbb7b13f 100644 --- a/tests/components/traccar/test_init.py +++ b/tests/components/traccar/test_init.py @@ -11,9 +11,9 @@ from homeassistant.components import traccar, zone from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.traccar import DOMAIN, TRACKER_UPDATE +from homeassistant.config import async_process_ha_core_config from homeassistant.const import STATE_HOME, STATE_NOT_HOME from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import DATA_DISPATCHER @@ -45,7 +45,7 @@ async def traccar_client( @pytest.fixture(autouse=True) -async def setup_zones(hass: HomeAssistant) -> None: +async def setup_zones(hass): """Set up Zone config in HA.""" assert await async_setup_component( hass, @@ -63,7 +63,7 @@ async def setup_zones(hass: HomeAssistant) -> None: @pytest.fixture(name="webhook_id") -async def webhook_id_fixture(hass: HomeAssistant, client: TestClient) -> str: +async def webhook_id_fixture(hass, client): """Initialize the Traccar component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -121,14 +121,18 @@ async def test_enter_and_exit( req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}").state + state_name = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"]) + ).state assert state_name == STATE_HOME # Enter Home again req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}").state + state_name = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"]) + ).state assert state_name == STATE_HOME data["lon"] = 0 @@ -138,7 +142,9 @@ async def test_enter_and_exit( req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}").state + state_name = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"]) + ).state assert state_name == STATE_NOT_HOME assert len(device_registry.devices) == 1 @@ -165,7 +171,7 @@ async def test_enter_with_attrs(hass: HomeAssistant, client, webhook_id) -> None req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}") + state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"])) assert state.state == STATE_NOT_HOME assert state.attributes["gps_accuracy"] == 10.5 assert state.attributes["battery_level"] == 10.0 @@ -188,7 +194,7 @@ async def test_enter_with_attrs(hass: HomeAssistant, client, webhook_id) -> None req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}") + state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"])) assert state.state == STATE_HOME assert state.attributes["gps_accuracy"] == 123 assert state.attributes["battery_level"] == 23 @@ -208,7 +214,7 @@ async def test_two_devices(hass: HomeAssistant, client, webhook_id) -> None: await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_1['id']}") + state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["id"])) assert state.state == "not_home" # Enter Home @@ -220,9 +226,9 @@ async def test_two_devices(hass: HomeAssistant, client, webhook_id) -> None: await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_2['id']}") + state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_2["id"])) assert state.state == "home" - state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data_device_1['id']}") + state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["id"])) assert state.state == "not_home" @@ -238,7 +244,9 @@ async def test_load_unload_entry(hass: HomeAssistant, client, webhook_id) -> Non req = await client.post(url, params=data) await hass.async_block_till_done() assert req.status == HTTPStatus.OK - state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['id']}").state + state_name = hass.states.get( + "{}.{}".format(DEVICE_TRACKER_DOMAIN, data["id"]) + ).state assert state_name == STATE_HOME assert len(hass.data[DATA_DISPATCHER][TRACKER_UPDATE]) == 1 diff --git a/tests/components/traccar_server/conftest.py b/tests/components/traccar_server/conftest.py index 0013b3249bd..6a8e428e7a2 100644 --- a/tests/components/traccar_server/conftest.py +++ b/tests/components/traccar_server/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Traccar Server tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from pytraccar import ApiClient, SubscriptionStatus +from typing_extensions import Generator from homeassistant.components.traccar_server.const import ( CONF_CUSTOM_ATTRIBUTES, diff --git a/tests/components/traccar_server/test_config_flow.py b/tests/components/traccar_server/test_config_flow.py index 0418e4a5a72..5da6f592957 100644 --- a/tests/components/traccar_server/test_config_flow.py +++ b/tests/components/traccar_server/test_config_flow.py @@ -1,18 +1,21 @@ """Test the Traccar Server config flow.""" -from collections.abc import Generator +from typing import Any from unittest.mock import AsyncMock import pytest from pytraccar import TraccarException +from typing_extensions import Generator from homeassistant import config_entries +from homeassistant.components.traccar.device_tracker import PLATFORM_SCHEMA from homeassistant.components.traccar_server.const import ( CONF_CUSTOM_ATTRIBUTES, CONF_EVENTS, CONF_MAX_ACCURACY, CONF_SKIP_ACCURACY_FILTER_FOR, DOMAIN, + EVENTS, ) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( @@ -150,6 +153,127 @@ async def test_options( } +@pytest.mark.parametrize( + ("imported", "data", "options"), + [ + ( + { + CONF_HOST: "1.1.1.1", + CONF_PORT: 443, + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_HOST: "1.1.1.1", + CONF_PORT: "443", + CONF_VERIFY_SSL: True, + CONF_SSL: False, + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_EVENTS: [], + CONF_CUSTOM_ATTRIBUTES: [], + CONF_SKIP_ACCURACY_FILTER_FOR: [], + CONF_MAX_ACCURACY: 0, + }, + ), + ( + { + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_SSL: True, + "event": ["device_online", "device_offline"], + }, + { + CONF_HOST: "1.1.1.1", + CONF_PORT: "8082", + CONF_VERIFY_SSL: True, + CONF_SSL: True, + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_EVENTS: ["device_online", "device_offline"], + CONF_CUSTOM_ATTRIBUTES: [], + CONF_SKIP_ACCURACY_FILTER_FOR: [], + CONF_MAX_ACCURACY: 0, + }, + ), + ( + { + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_SSL: True, + "event": ["device_online", "device_offline", "all_events"], + }, + { + CONF_HOST: "1.1.1.1", + CONF_PORT: "8082", + CONF_VERIFY_SSL: True, + CONF_SSL: True, + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_EVENTS: list(EVENTS.values()), + CONF_CUSTOM_ATTRIBUTES: [], + CONF_SKIP_ACCURACY_FILTER_FOR: [], + CONF_MAX_ACCURACY: 0, + }, + ), + ], +) +async def test_import_from_yaml( + hass: HomeAssistant, + imported: dict[str, Any], + data: dict[str, Any], + options: dict[str, Any], + mock_traccar_api_client: Generator[AsyncMock], +) -> None: + """Test importing configuration from YAML.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data=PLATFORM_SCHEMA({"platform": "traccar", **imported}), + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"{data[CONF_HOST]}:{data[CONF_PORT]}" + assert result["data"] == data + assert result["options"] == options + assert result["result"].state is ConfigEntryState.LOADED + + +async def test_abort_import_already_configured(hass: HomeAssistant) -> None: + """Test abort for existing server while importing.""" + + config_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "1.1.1.1", CONF_PORT: "8082"}, + ) + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data=PLATFORM_SCHEMA( + { + "platform": "traccar", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_HOST: "1.1.1.1", + CONF_PORT: "8082", + } + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_abort_already_configured( hass: HomeAssistant, mock_config_entry: MockConfigEntry, diff --git a/tests/components/traccar_server/test_diagnostics.py b/tests/components/traccar_server/test_diagnostics.py index 738fea1a45d..15d74ef9ef5 100644 --- a/tests/components/traccar_server/test_diagnostics.py +++ b/tests/components/traccar_server/test_diagnostics.py @@ -1,9 +1,9 @@ """Test Traccar Server diagnostics.""" -from collections.abc import Generator from unittest.mock import AsyncMock from syrupy import SnapshotAssertion +from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er diff --git a/tests/components/trace/test_websocket_api.py b/tests/components/trace/test_websocket_api.py index 43664c6e7ce..92ba2c67020 100644 --- a/tests/components/trace/test_websocket_api.py +++ b/tests/components/trace/test_websocket_api.py @@ -9,11 +9,11 @@ from unittest.mock import patch import pytest from pytest_unordered import unordered +from homeassistant.bootstrap import async_setup_component from homeassistant.components.trace.const import DEFAULT_STORED_TRACES from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import Context, CoreState, HomeAssistant, callback from homeassistant.helpers.typing import UNDEFINED -from homeassistant.setup import async_setup_component from homeassistant.util.uuid import random_uuid_hex from tests.common import load_fixture @@ -39,15 +39,11 @@ def _find_traces(traces, trace_type, item_id): async def _setup_automation_or_script( - hass: HomeAssistant, - domain: str, - configs: list[dict[str, Any]], - script_config: dict[str, Any] | None = None, - stored_traces: int | None = None, -) -> None: + hass, domain, configs, script_config=None, stored_traces=None +): """Set up automations or scripts from automation config.""" if domain == "script": - configs = {config["id"]: {"sequence": config["actions"]} for config in configs} + configs = {config["id"]: {"sequence": config["action"]} for config in configs} if script_config: if domain == "automation": @@ -70,13 +66,7 @@ async def _setup_automation_or_script( assert await async_setup_component(hass, domain, {domain: configs}) -async def _run_automation_or_script( - hass: HomeAssistant, - domain: str, - config: dict[str, Any], - event: str, - context: dict[str, Any] | None = None, -) -> None: +async def _run_automation_or_script(hass, domain, config, event, context=None): if domain == "automation": hass.bus.async_fire(event, context=context) else: @@ -85,7 +75,7 @@ async def _run_automation_or_script( def _assert_raw_config(domain, config, trace): if domain == "script": - config = {"sequence": config["actions"]} + config = {"sequence": config["action"]} assert trace["config"] == config @@ -152,20 +142,20 @@ async def test_get_trace( sun_config = { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": {"service": "test.automation"}, + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": {"service": "test.automation"}, } moon_config = { "id": "moon", - "triggers": [ + "trigger": [ {"platform": "event", "event_type": "test_event2"}, {"platform": "event", "event_type": "test_event3"}, ], - "conditions": { + "condition": { "condition": "template", "value_template": "{{ trigger.event.event_type=='test_event2' }}", }, - "actions": {"event": "another_event"}, + "action": {"event": "another_event"}, } sun_action = { @@ -217,7 +207,7 @@ async def test_get_trace( _assert_raw_config(domain, sun_config, trace) assert trace["blueprint_inputs"] is None assert trace["context"] - assert trace["error"] == "Action test.automation not found" + assert trace["error"] == "Service test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == "error" assert trace["item_id"] == "sun" @@ -551,13 +541,13 @@ async def test_trace_overflow( sun_config = { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": {"event": "some_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": {"event": "some_event"}, } moon_config = { "id": "moon", - "triggers": {"platform": "event", "event_type": "test_event2"}, - "actions": {"event": "another_event"}, + "trigger": {"platform": "event", "event_type": "test_event2"}, + "action": {"event": "another_event"}, } await _setup_automation_or_script( hass, domain, [sun_config, moon_config], stored_traces=stored_traces @@ -632,13 +622,13 @@ async def test_restore_traces_overflow( hass_storage["trace.saved_traces"] = saved_traces sun_config = { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": {"event": "some_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": {"event": "some_event"}, } moon_config = { "id": "moon", - "triggers": {"platform": "event", "event_type": "test_event2"}, - "actions": {"event": "another_event"}, + "trigger": {"platform": "event", "event_type": "test_event2"}, + "action": {"event": "another_event"}, } await _setup_automation_or_script(hass, domain, [sun_config, moon_config]) await hass.async_start() @@ -713,13 +703,13 @@ async def test_restore_traces_late_overflow( hass_storage["trace.saved_traces"] = saved_traces sun_config = { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": {"event": "some_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": {"event": "some_event"}, } moon_config = { "id": "moon", - "triggers": {"platform": "event", "event_type": "test_event2"}, - "actions": {"event": "another_event"}, + "trigger": {"platform": "event", "event_type": "test_event2"}, + "action": {"event": "another_event"}, } await _setup_automation_or_script(hass, domain, [sun_config, moon_config]) await hass.async_start() @@ -765,8 +755,8 @@ async def test_trace_no_traces( sun_config = { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": {"event": "some_event"}, + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": {"event": "some_event"}, } await _setup_automation_or_script(hass, domain, [sun_config], stored_traces=0) @@ -832,20 +822,20 @@ async def test_list_traces( sun_config = { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": {"service": "test.automation"}, + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": {"service": "test.automation"}, } moon_config = { "id": "moon", - "triggers": [ + "trigger": [ {"platform": "event", "event_type": "test_event2"}, {"platform": "event", "event_type": "test_event3"}, ], - "conditions": { + "condition": { "condition": "template", "value_template": "{{ trigger.event.event_type=='test_event2' }}", }, - "actions": {"event": "another_event"}, + "action": {"event": "another_event"}, } await _setup_automation_or_script(hass, domain, [sun_config, moon_config]) @@ -909,7 +899,7 @@ async def test_list_traces( assert len(_find_traces(response["result"], domain, "sun")) == 1 trace = _find_traces(response["result"], domain, "sun")[0] assert trace["last_step"] == last_step[0].format(prefix=prefix) - assert trace["error"] == "Action test.automation not found" + assert trace["error"] == "Service test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == script_execution[0] assert trace["timestamp"] @@ -965,8 +955,8 @@ async def test_nested_traces( sun_config = { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": {"service": "script.moon"}, + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": {"service": "script.moon"}, } moon_config = {"moon": {"sequence": {"event": "another_event"}}} await _setup_automation_or_script(hass, domain, [sun_config], moon_config) @@ -1036,8 +1026,8 @@ async def test_breakpoints( sun_config = { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [ + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [ {"event": "event0"}, {"event": "event1"}, {"event": "event2"}, @@ -1206,8 +1196,8 @@ async def test_breakpoints_2( sun_config = { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [ + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [ {"event": "event0"}, {"event": "event1"}, {"event": "event2"}, @@ -1311,8 +1301,8 @@ async def test_breakpoints_3( sun_config = { "id": "sun", - "triggers": {"platform": "event", "event_type": "test_event"}, - "actions": [ + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": [ {"event": "event0"}, {"event": "event1"}, {"event": "event2"}, @@ -1649,7 +1639,7 @@ async def test_trace_blueprint_automation( assert trace["config"]["id"] == "sun" assert trace["blueprint_inputs"] == sun_config assert trace["context"] - assert trace["error"] == "Action test.automation not found" + assert trace["error"] == "Service test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == "error" assert trace["item_id"] == "sun" diff --git a/tests/components/tractive/conftest.py b/tests/components/tractive/conftest.py index 88c68a4b62f..9a17a557c49 100644 --- a/tests/components/tractive/conftest.py +++ b/tests/components/tractive/conftest.py @@ -1,16 +1,15 @@ """Common fixtures for the Tractive tests.""" -from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiotractive.trackable_object import TrackableObject from aiotractive.tracker import Tracker import pytest +from typing_extensions import Generator from homeassistant.components.tractive.const import DOMAIN, SERVER_UNAVAILABLE from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_send from tests.common import MockConfigEntry, load_json_object_fixture @@ -77,7 +76,7 @@ def mock_tractive_client() -> Generator[AsyncMock]: } entry.runtime_data.client._send_switch_update(event) - def send_server_unavailable_event(hass: HomeAssistant) -> None: + def send_server_unavailable_event(hass): """Send server unavailable event.""" async_dispatcher_send(hass, f"{SERVER_UNAVAILABLE}-12345") diff --git a/tests/components/tractive/snapshots/test_binary_sensor.ambr b/tests/components/tractive/snapshots/test_binary_sensor.ambr index 4b610e927d5..c6d50fb0fbb 100644 --- a/tests/components/tractive/snapshots/test_binary_sensor.ambr +++ b/tests/components/tractive/snapshots/test_binary_sensor.ambr @@ -46,3 +46,50 @@ 'state': 'on', }) # --- +# name: test_sensor[binary_sensor.test_pet_tracker_battery_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_pet_tracker_battery_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tracker battery charging', + 'platform': 'tractive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tracker_battery_charging', + 'unique_id': 'pet_id_123_battery_charging', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[binary_sensor.test_pet_tracker_battery_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery_charging', + 'friendly_name': 'Test Pet Tracker battery charging', + }), + 'context': , + 'entity_id': 'binary_sensor.test_pet_tracker_battery_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/tractive/snapshots/test_device_tracker.ambr b/tests/components/tractive/snapshots/test_device_tracker.ambr index 4e7c5bfe173..3a145a48b5a 100644 --- a/tests/components/tractive/snapshots/test_device_tracker.ambr +++ b/tests/components/tractive/snapshots/test_device_tracker.ambr @@ -50,3 +50,54 @@ 'state': 'not_home', }) # --- +# name: test_sensor[device_tracker.test_pet_tracker-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.test_pet_tracker', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tracker', + 'platform': 'tractive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tracker', + 'unique_id': 'pet_id_123', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[device_tracker.test_pet_tracker-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'battery_level': 88, + 'friendly_name': 'Test Pet Tracker', + 'gps_accuracy': 99, + 'latitude': 22.333, + 'longitude': 44.555, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.test_pet_tracker', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- diff --git a/tests/components/tractive/snapshots/test_diagnostics.ambr b/tests/components/tractive/snapshots/test_diagnostics.ambr index 11427a84801..a66247749b7 100644 --- a/tests/components/tractive/snapshots/test_diagnostics.ambr +++ b/tests/components/tractive/snapshots/test_diagnostics.ambr @@ -7,8 +7,6 @@ 'password': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'tractive', 'entry_id': '3bd2acb0e4f0476d40865546d0d91921', 'minor_version': 1, diff --git a/tests/components/tractive/snapshots/test_switch.ambr b/tests/components/tractive/snapshots/test_switch.ambr index 08e0c984d0c..ea9ea9d9e48 100644 --- a/tests/components/tractive/snapshots/test_switch.ambr +++ b/tests/components/tractive/snapshots/test_switch.ambr @@ -1,4 +1,142 @@ # serializer version: 1 +# name: test_sensor[switch.test_pet_live_tracking-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.test_pet_live_tracking', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Live tracking', + 'platform': 'tractive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'live_tracking', + 'unique_id': 'pet_id_123_live_tracking', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.test_pet_live_tracking-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Pet Live tracking', + }), + 'context': , + 'entity_id': 'switch.test_pet_live_tracking', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.test_pet_tracker_buzzer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.test_pet_tracker_buzzer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tracker buzzer', + 'platform': 'tractive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tracker_buzzer', + 'unique_id': 'pet_id_123_buzzer', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.test_pet_tracker_buzzer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Pet Tracker buzzer', + }), + 'context': , + 'entity_id': 'switch.test_pet_tracker_buzzer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.test_pet_tracker_led-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.test_pet_tracker_led', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tracker LED', + 'platform': 'tractive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tracker_led', + 'unique_id': 'pet_id_123_led', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.test_pet_tracker_led-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Pet Tracker LED', + }), + 'context': , + 'entity_id': 'switch.test_pet_tracker_led', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_switch[switch.test_pet_live_tracking-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tractive/test_config_flow.py b/tests/components/tractive/test_config_flow.py index 691bf671afd..5cedb51e5af 100644 --- a/tests/components/tractive/test_config_flow.py +++ b/tests/components/tractive/test_config_flow.py @@ -110,7 +110,15 @@ async def test_reauthentication(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -143,7 +151,15 @@ async def test_reauthentication_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -173,7 +189,15 @@ async def test_reauthentication_unknown_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -203,7 +227,15 @@ async def test_reauthentication_failure_no_existing_entry(hass: HomeAssistant) - ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/tractive/test_diagnostics.py b/tests/components/tractive/test_diagnostics.py index ce07b4d6e2a..cc4fcdeba15 100644 --- a/tests/components/tractive/test_diagnostics.py +++ b/tests/components/tractive/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -28,4 +27,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot diff --git a/tests/components/tradfri/conftest.py b/tests/components/tradfri/conftest.py index 4b0b742850b..08afe77b4a3 100644 --- a/tests/components/tradfri/conftest.py +++ b/tests/components/tradfri/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Callable, Generator +from collections.abc import Callable import json from typing import Any from unittest.mock import AsyncMock, MagicMock, patch @@ -12,6 +12,7 @@ from pytradfri.command import Command from pytradfri.const import ATTR_FIRMWARE_VERSION, ATTR_GATEWAY_ID from pytradfri.device import Device from pytradfri.gateway import Gateway +from typing_extensions import Generator from homeassistant.components.tradfri.const import DOMAIN diff --git a/tests/components/tradfri/test_config_flow.py b/tests/components/tradfri/test_config_flow.py index 5c06851782c..af2fdc22d2a 100644 --- a/tests/components/tradfri/test_config_flow.py +++ b/tests/components/tradfri/test_config_flow.py @@ -86,10 +86,6 @@ async def test_user_connection_timeout( assert result["errors"] == {"base": "timeout"} -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.tradfri.config.error.invalid_security_code"], -) async def test_user_connection_bad_key( hass: HomeAssistant, mock_auth, mock_entry_setup ) -> None: diff --git a/tests/components/tradfri/test_cover.py b/tests/components/tradfri/test_cover.py index 59f3f8a956a..5aa4e75728d 100644 --- a/tests/components/tradfri/test_cover.py +++ b/tests/components/tradfri/test_cover.py @@ -8,12 +8,8 @@ import pytest from pytradfri.const import ATTR_REACHABLE_STATE from pytradfri.device import Device -from homeassistant.components.cover import ( - ATTR_CURRENT_POSITION, - DOMAIN as COVER_DOMAIN, - CoverState, -) -from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.components.cover import ATTR_CURRENT_POSITION, DOMAIN as COVER_DOMAIN +from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from .common import CommandStore, setup_integration @@ -31,7 +27,7 @@ async def test_cover_available( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 assert state.attributes["model"] == "FYRTUR block-out roller blind" @@ -48,11 +44,11 @@ async def test_cover_available( @pytest.mark.parametrize( ("service", "service_data", "expected_state", "expected_position"), [ - ("set_cover_position", {"position": 100}, CoverState.OPEN, 100), - ("set_cover_position", {"position": 0}, CoverState.CLOSED, 0), - ("open_cover", {}, CoverState.OPEN, 100), - ("close_cover", {}, CoverState.CLOSED, 0), - ("stop_cover", {}, CoverState.OPEN, 60), + ("set_cover_position", {"position": 100}, STATE_OPEN, 100), + ("set_cover_position", {"position": 0}, STATE_CLOSED, 0), + ("open_cover", {}, STATE_OPEN, 100), + ("close_cover", {}, STATE_CLOSED, 0), + ("stop_cover", {}, STATE_OPEN, 60), ], ) async def test_cover_services( @@ -70,7 +66,7 @@ async def test_cover_services( state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 await hass.services.async_call( diff --git a/tests/components/tradfri/test_fan.py b/tests/components/tradfri/test_fan.py index 4f72e4709e9..2abe03d629a 100644 --- a/tests/components/tradfri/test_fan.py +++ b/tests/components/tradfri/test_fan.py @@ -52,7 +52,7 @@ async def test_fan_available( assert state.attributes[ATTR_PERCENTAGE_STEP] == pytest.approx(2.040816) assert state.attributes[ATTR_PRESET_MODES] == ["Auto"] assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_SUPPORTED_FEATURES] == 57 + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 9 await command_store.trigger_observe_callback( hass, device, {ATTR_REACHABLE_STATE: 0} @@ -172,7 +172,7 @@ async def test_services( assert state.attributes[ATTR_PERCENTAGE_STEP] == pytest.approx(2.040816) assert state.attributes[ATTR_PRESET_MODES] == ["Auto"] assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_SUPPORTED_FEATURES] == 57 + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 9 await hass.services.async_call( FAN_DOMAIN, diff --git a/tests/components/trafikverket_camera/conftest.py b/tests/components/trafikverket_camera/conftest.py index cef85af2228..61eebb623b2 100644 --- a/tests/components/trafikverket_camera/conftest.py +++ b/tests/components/trafikverket_camera/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime from unittest.mock import patch import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket.trafikverket_camera import CameraInfo from homeassistant.components.trafikverket_camera.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -21,9 +21,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(name="load_int") async def load_integration_from_entry( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - get_camera: CameraInfoModel, + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, get_camera: CameraInfo ) -> MockConfigEntry: """Set up the Trafikverket Camera integration in Home Assistant.""" aioclient_mock.get( @@ -53,10 +51,10 @@ async def load_integration_from_entry( @pytest.fixture(name="get_camera") -def fixture_get_camera() -> CameraInfoModel: +def fixture_get_camera() -> CameraInfo: """Construct Camera Mock.""" - return CameraInfoModel( + return CameraInfo( camera_name="Test Camera", camera_id="1234", active=True, @@ -74,10 +72,10 @@ def fixture_get_camera() -> CameraInfoModel: @pytest.fixture(name="get_camera2") -def fixture_get_camera2() -> CameraInfoModel: +def fixture_get_camera2() -> CameraInfo: """Construct Camera Mock 2.""" - return CameraInfoModel( + return CameraInfo( camera_name="Test Camera2", camera_id="5678", active=True, @@ -95,11 +93,11 @@ def fixture_get_camera2() -> CameraInfoModel: @pytest.fixture(name="get_cameras") -def fixture_get_cameras() -> CameraInfoModel: +def fixture_get_cameras() -> CameraInfo: """Construct Camera Mock with multiple cameras.""" return [ - CameraInfoModel( + CameraInfo( camera_name="Test Camera", camera_id="1234", active=True, @@ -114,7 +112,7 @@ def fixture_get_cameras() -> CameraInfoModel: status="Running", camera_type="Road", ), - CameraInfoModel( + CameraInfo( camera_name="Test Camera2", camera_id="5678", active=True, @@ -133,10 +131,10 @@ def fixture_get_cameras() -> CameraInfoModel: @pytest.fixture(name="get_camera_no_location") -def fixture_get_camera_no_location() -> CameraInfoModel: +def fixture_get_camera_no_location() -> CameraInfo: """Construct Camera Mock.""" - return CameraInfoModel( + return CameraInfo( camera_name="Test Camera", camera_id="1234", active=True, diff --git a/tests/components/trafikverket_camera/test_binary_sensor.py b/tests/components/trafikverket_camera/test_binary_sensor.py index 6750c05772b..6c694f76233 100644 --- a/tests/components/trafikverket_camera/test_binary_sensor.py +++ b/tests/components/trafikverket_camera/test_binary_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket.trafikverket_camera import CameraInfo from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_ON @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, - get_camera: CameraInfoModel, + get_camera: CameraInfo, ) -> None: """Test the Trafikverket Camera binary sensor.""" diff --git a/tests/components/trafikverket_camera/test_camera.py b/tests/components/trafikverket_camera/test_camera.py index 51d4563c19b..1bf742b5f08 100644 --- a/tests/components/trafikverket_camera/test_camera.py +++ b/tests/components/trafikverket_camera/test_camera.py @@ -7,7 +7,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket.trafikverket_camera import CameraInfo from homeassistant.components.camera import async_get_image from homeassistant.config_entries import ConfigEntry @@ -24,7 +24,7 @@ async def test_camera( freezer: FrozenDateTimeFactory, monkeypatch: pytest.MonkeyPatch, aioclient_mock: AiohttpClientMocker, - get_camera: CameraInfoModel, + get_camera: CameraInfo, ) -> None: """Test the Trafikverket Camera sensor.""" state1 = hass.states.get("camera.test_camera") diff --git a/tests/components/trafikverket_camera/test_config_flow.py b/tests/components/trafikverket_camera/test_config_flow.py index 48162a17e2c..8162db076fa 100644 --- a/tests/components/trafikverket_camera/test_config_flow.py +++ b/tests/components/trafikverket_camera/test_config_flow.py @@ -6,7 +6,7 @@ from unittest.mock import patch import pytest from pytrafikverket.exceptions import InvalidAuthentication, NoCameraFound, UnknownError -from pytrafikverket.models import CameraInfoModel +from pytrafikverket.trafikverket_camera import CameraInfo from homeassistant import config_entries from homeassistant.components.trafikverket_camera.const import DOMAIN @@ -17,7 +17,7 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -async def test_form(hass: HomeAssistant, get_camera: CameraInfoModel) -> None: +async def test_form(hass: HomeAssistant, get_camera: CameraInfo) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -56,9 +56,7 @@ async def test_form(hass: HomeAssistant, get_camera: CameraInfoModel) -> None: async def test_form_multiple_cameras( - hass: HomeAssistant, - get_cameras: list[CameraInfoModel], - get_camera2: CameraInfoModel, + hass: HomeAssistant, get_cameras: list[CameraInfo], get_camera2: CameraInfo ) -> None: """Test we get the form with multiple cameras.""" @@ -110,7 +108,7 @@ async def test_form_multiple_cameras( async def test_form_no_location_data( - hass: HomeAssistant, get_camera_no_location: CameraInfoModel + hass: HomeAssistant, get_camera_no_location: CameraInfo ) -> None: """Test we get the form.""" @@ -208,7 +206,15 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -272,7 +278,15 @@ async def test_reauth_flow_error( entry.add_to_hass(hass) await hass.async_block_till_done() - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) with patch( "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", @@ -309,150 +323,3 @@ async def test_reauth_flow_error( "api_key": "1234567891", "id": "1234", } - - -async def test_reconfigure_flow( - hass: HomeAssistant, - get_cameras: list[CameraInfoModel], - get_camera2: CameraInfoModel, -) -> None: - """Test a reconfigure flow.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_API_KEY: "1234567890", - CONF_ID: "1234", - }, - unique_id="1234", - version=3, - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - assert result["step_id"] == "reconfigure" - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with patch( - "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", - return_value=get_cameras, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_API_KEY: "1234567890", - CONF_LOCATION: "Test loc", - }, - ) - await hass.async_block_till_done() - - with ( - patch( - "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", - return_value=[get_camera2], - ), - patch( - "homeassistant.components.trafikverket_camera.async_setup_entry", - return_value=True, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ID: "5678", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert entry.data == { - "api_key": "1234567890", - "id": "5678", - } - - -@pytest.mark.parametrize( - ("side_effect", "error_key", "p_error"), - [ - ( - InvalidAuthentication, - "base", - "invalid_auth", - ), - ( - NoCameraFound, - "location", - "invalid_location", - ), - ( - UnknownError, - "base", - "cannot_connect", - ), - ], -) -async def test_reconfigure_flow_error( - hass: HomeAssistant, - get_camera: CameraInfoModel, - side_effect: Exception, - error_key: str, - p_error: str, -) -> None: - """Test a reauthentication flow with error.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_API_KEY: "1234567890", - CONF_ID: "1234", - }, - unique_id="1234", - version=3, - ) - entry.add_to_hass(hass) - await hass.async_block_till_done() - - result = await entry.start_reconfigure_flow(hass) - - with patch( - "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", - side_effect=side_effect, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_API_KEY: "1234567890", - CONF_LOCATION: "Test loc", - }, - ) - await hass.async_block_till_done() - - assert result2["step_id"] == "reconfigure" - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {error_key: p_error} - - with ( - patch( - "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", - return_value=[get_camera], - ), - patch( - "homeassistant.components.trafikverket_camera.async_setup_entry", - return_value=True, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_API_KEY: "1234567891", - CONF_LOCATION: "Test loc", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" - assert entry.data == { - CONF_ID: "1234", - CONF_API_KEY: "1234567891", - } diff --git a/tests/components/trafikverket_camera/test_coordinator.py b/tests/components/trafikverket_camera/test_coordinator.py index f50ab56724e..3f37ad05575 100644 --- a/tests/components/trafikverket_camera/test_coordinator.py +++ b/tests/components/trafikverket_camera/test_coordinator.py @@ -11,9 +11,9 @@ from pytrafikverket.exceptions import ( NoCameraFound, UnknownError, ) -from pytrafikverket.models import CameraInfoModel from homeassistant.components.trafikverket_camera.const import DOMAIN +from homeassistant.components.trafikverket_camera.coordinator import CameraData from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed @@ -28,7 +28,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def test_coordinator( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - get_camera: CameraInfoModel, + get_camera: CameraData, ) -> None: """Test the Trafikverket Camera coordinator.""" aioclient_mock.get( @@ -86,7 +86,7 @@ async def test_coordinator( async def test_coordinator_failed_update( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - get_camera: CameraInfoModel, + get_camera: CameraData, sideeffect: str, p_error: Exception, entry_state: str, @@ -123,7 +123,7 @@ async def test_coordinator_failed_update( async def test_coordinator_failed_get_image( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - get_camera: CameraInfoModel, + get_camera: CameraData, ) -> None: """Test the Trafikverket Camera coordinator.""" aioclient_mock.get( diff --git a/tests/components/trafikverket_camera/test_init.py b/tests/components/trafikverket_camera/test_init.py index aaa4c3cfed7..f21d36fda27 100644 --- a/tests/components/trafikverket_camera/test_init.py +++ b/tests/components/trafikverket_camera/test_init.py @@ -7,7 +7,7 @@ from unittest.mock import patch import pytest from pytrafikverket.exceptions import UnknownError -from pytrafikverket.models import CameraInfoModel +from pytrafikverket.trafikverket_camera import CameraInfo from homeassistant.components.trafikverket_camera import async_migrate_entry from homeassistant.components.trafikverket_camera.const import DOMAIN @@ -23,7 +23,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def test_setup_entry( hass: HomeAssistant, - get_camera: CameraInfoModel, + get_camera: CameraInfo, aioclient_mock: AiohttpClientMocker, ) -> None: """Test setup entry.""" @@ -55,7 +55,7 @@ async def test_setup_entry( async def test_unload_entry( hass: HomeAssistant, - get_camera: CameraInfoModel, + get_camera: CameraInfo, aioclient_mock: AiohttpClientMocker, ) -> None: """Test unload an entry.""" @@ -89,7 +89,7 @@ async def test_unload_entry( async def test_migrate_entry( hass: HomeAssistant, - get_camera: CameraInfoModel, + get_camera: CameraInfo, aioclient_mock: AiohttpClientMocker, ) -> None: """Test migrate entry to version 2.""" @@ -136,7 +136,7 @@ async def test_migrate_entry( ) async def test_migrate_entry_fails_with_error( hass: HomeAssistant, - get_camera: CameraInfoModel, + get_camera: CameraInfo, aioclient_mock: AiohttpClientMocker, version: int, unique_id: str, @@ -205,7 +205,7 @@ async def test_migrate_entry_fails_no_id( ) entry.add_to_hass(hass) - _camera = CameraInfoModel( + _camera = CameraInfo( camera_name="Test_camera", camera_id=None, active=True, @@ -236,7 +236,7 @@ async def test_migrate_entry_fails_no_id( async def test_no_migration_needed( hass: HomeAssistant, - get_camera: CameraInfoModel, + get_camera: CameraInfo, aioclient_mock: AiohttpClientMocker, ) -> None: """Test migrate entry fails, camera returns no id.""" diff --git a/tests/components/trafikverket_camera/test_recorder.py b/tests/components/trafikverket_camera/test_recorder.py index d9778ab851a..23ebd3f2189 100644 --- a/tests/components/trafikverket_camera/test_recorder.py +++ b/tests/components/trafikverket_camera/test_recorder.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket.trafikverket_camera import CameraInfo from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states @@ -22,7 +22,7 @@ async def test_exclude_attributes( load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, aioclient_mock: AiohttpClientMocker, - get_camera: CameraInfoModel, + get_camera: CameraInfo, ) -> None: """Test camera has description and location excluded from recording.""" state1 = hass.states.get("camera.test_camera") diff --git a/tests/components/trafikverket_camera/test_sensor.py b/tests/components/trafikverket_camera/test_sensor.py index 0f4ef02a850..18ccbe56070 100644 --- a/tests/components/trafikverket_camera/test_sensor.py +++ b/tests/components/trafikverket_camera/test_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket.trafikverket_camera import CameraInfo from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, - get_camera: CameraInfoModel, + get_camera: CameraInfo, ) -> None: """Test the Trafikverket Camera sensor.""" diff --git a/tests/components/trafikverket_ferry/conftest.py b/tests/components/trafikverket_ferry/conftest.py index 99f3ad10636..3491b8474af 100644 --- a/tests/components/trafikverket_ferry/conftest.py +++ b/tests/components/trafikverket_ferry/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime, timedelta from unittest.mock import patch import pytest -from pytrafikverket.models import FerryStopModel +from pytrafikverket.trafikverket_ferry import FerryStop from homeassistant.components.trafikverket_ferry.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -20,7 +20,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="load_int") async def load_integration_from_entry( - hass: HomeAssistant, get_ferries: list[FerryStopModel] + hass: HomeAssistant, get_ferries: list[FerryStop] ) -> MockConfigEntry: """Set up the Trafikverket Ferry integration in Home Assistant.""" config_entry = MockConfigEntry( @@ -44,51 +44,40 @@ async def load_integration_from_entry( @pytest.fixture(name="get_ferries") -def fixture_get_ferries() -> list[FerryStopModel]: +def fixture_get_ferries() -> list[FerryStop]: """Construct FerryStop Mock.""" - depart1 = FerryStopModel( - ferry_stop_id="13", - ferry_stop_name="Harbor1lane", - short_name="Harle", - deleted=False, - departure_time=datetime( - dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC - ), - other_information=[""], - deviation_id="0", - modified_time=datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), - from_harbor_name="Harbor 1", - to_harbor_name="Harbor 2", - type_name="Turnaround", + depart1 = FerryStop( + "13", + False, + datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC), + [""], + "0", + datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), + "Harbor 1", + "Harbor 2", ) - depart2 = FerryStopModel( - ferry_stop_id="14", - ferry_stop_name="Harbor1lane", - short_name="Harle", - deleted=False, - departure_time=datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + depart2 = FerryStop( + "14", + False, + datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=15), - other_information=[""], - deviation_id="0", - modified_time=datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), - from_harbor_name="Harbor 1", - to_harbor_name="Harbor 2", - type_name="Turnaround", + [""], + "0", + datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), + "Harbor 1", + "Harbor 2", ) - depart3 = FerryStopModel( - ferry_stop_id="15", - ferry_stop_name="Harbor1lane", - short_name="Harle", - deleted=False, - departure_time=datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + depart3 = FerryStop( + "15", + False, + datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=30), - other_information=[""], - deviation_id="0", - modified_time=datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), - from_harbor_name="Harbor 1", - to_harbor_name="Harbor 2", - type_name="Turnaround", + [""], + "0", + datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), + "Harbor 1", + "Harbor 2", ) return [depart1, depart2, depart3] diff --git a/tests/components/trafikverket_ferry/test_config_flow.py b/tests/components/trafikverket_ferry/test_config_flow.py index 5671d9d3fb7..1c170a917cc 100644 --- a/tests/components/trafikverket_ferry/test_config_flow.py +++ b/tests/components/trafikverket_ferry/test_config_flow.py @@ -62,7 +62,9 @@ async def test_form(hass: HomeAssistant) -> None: "weekday": ["mon", "fri"], } assert len(mock_setup_entry.mock_calls) == 1 - assert result2["result"].unique_id == "eker\u00f6-slagsta-10:00-['mon', 'fri']" + assert result2["result"].unique_id == "{}-{}-{}-{}".format( + "eker\u00f6", "slagsta", "10:00", "['mon', 'fri']" + ) @pytest.mark.parametrize( @@ -126,7 +128,15 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -193,7 +203,15 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) with patch( "homeassistant.components.trafikverket_ferry.config_flow.TrafikverketFerry.async_get_next_ferry_stop", diff --git a/tests/components/trafikverket_ferry/test_coordinator.py b/tests/components/trafikverket_ferry/test_coordinator.py index ae9a8fc3626..ef6329bfd82 100644 --- a/tests/components/trafikverket_ferry/test_coordinator.py +++ b/tests/components/trafikverket_ferry/test_coordinator.py @@ -8,7 +8,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest from pytrafikverket.exceptions import InvalidAuthentication, NoFerryFound -from pytrafikverket.models import FerryStopModel +from pytrafikverket.trafikverket_ferry import FerryStop from homeassistant.components.trafikverket_ferry.const import DOMAIN from homeassistant.components.trafikverket_ferry.coordinator import next_departuredate @@ -27,7 +27,7 @@ async def test_coordinator( hass: HomeAssistant, freezer: FrozenDateTimeFactory, monkeypatch: pytest.MonkeyPatch, - get_ferries: list[FerryStopModel], + get_ferries: list[FerryStop], ) -> None: """Test the Trafikverket Ferry coordinator.""" entry = MockConfigEntry( diff --git a/tests/components/trafikverket_ferry/test_init.py b/tests/components/trafikverket_ferry/test_init.py index 827711363ff..22ada7e0f40 100644 --- a/tests/components/trafikverket_ferry/test_init.py +++ b/tests/components/trafikverket_ferry/test_init.py @@ -4,7 +4,7 @@ from __future__ import annotations from unittest.mock import patch -from pytrafikverket.models import FerryStopModel +from pytrafikverket.trafikverket_ferry import FerryStop from homeassistant.components.trafikverket_ferry.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntryState @@ -15,9 +15,7 @@ from . import ENTRY_CONFIG from tests.common import MockConfigEntry -async def test_setup_entry( - hass: HomeAssistant, get_ferries: list[FerryStopModel] -) -> None: +async def test_setup_entry(hass: HomeAssistant, get_ferries: list[FerryStop]) -> None: """Test setup entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -39,9 +37,7 @@ async def test_setup_entry( assert len(mock_tvt_ferry.mock_calls) == 1 -async def test_unload_entry( - hass: HomeAssistant, get_ferries: list[FerryStopModel] -) -> None: +async def test_unload_entry(hass: HomeAssistant, get_ferries: list[FerryStop]) -> None: """Test unload an entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/trafikverket_ferry/test_sensor.py b/tests/components/trafikverket_ferry/test_sensor.py index bc5510b0b1d..fc8fa557714 100644 --- a/tests/components/trafikverket_ferry/test_sensor.py +++ b/tests/components/trafikverket_ferry/test_sensor.py @@ -6,7 +6,7 @@ from datetime import timedelta from unittest.mock import patch import pytest -from pytrafikverket.models import FerryStopModel +from pytrafikverket.trafikverket_ferry import FerryStop from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -19,7 +19,7 @@ async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, - get_ferries: list[FerryStopModel], + get_ferries: list[FerryStop], ) -> None: """Test the Trafikverket Ferry sensor.""" state1 = hass.states.get("sensor.harbor1_departure_from") diff --git a/tests/components/trafikverket_train/conftest.py b/tests/components/trafikverket_train/conftest.py index 14671d27252..7221d96bae2 100644 --- a/tests/components/trafikverket_train/conftest.py +++ b/tests/components/trafikverket_train/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime, timedelta from unittest.mock import patch import pytest -from pytrafikverket.models import TrainStopModel +from pytrafikverket.trafikverket_train import TrainStop from homeassistant.components.trafikverket_train.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -21,8 +21,8 @@ from tests.common import MockConfigEntry @pytest.fixture(name="load_int") async def load_integration_from_entry( hass: HomeAssistant, - get_trains: list[TrainStopModel], - get_train_stop: TrainStopModel, + get_trains: list[TrainStop], + get_train_stop: TrainStop, ) -> MockConfigEntry: """Set up the Trafikverket Train integration in Home Assistant.""" @@ -38,7 +38,7 @@ async def load_integration_from_entry( return_value=get_train_stop, ), patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", ), ): await hass.config_entries.async_setup(config_entry_id) @@ -69,11 +69,11 @@ async def load_integration_from_entry( @pytest.fixture(name="get_trains") -def fixture_get_trains() -> list[TrainStopModel]: +def fixture_get_trains() -> list[TrainStop]: """Construct TrainStop Mock.""" - depart1 = TrainStopModel( - train_stop_id=13, + depart1 = TrainStop( + id=13, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), estimated_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), @@ -83,8 +83,8 @@ def fixture_get_trains() -> list[TrainStopModel]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart2 = TrainStopModel( - train_stop_id=14, + depart2 = TrainStop( + id=14, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=15), @@ -95,8 +95,8 @@ def fixture_get_trains() -> list[TrainStopModel]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart3 = TrainStopModel( - train_stop_id=15, + depart3 = TrainStop( + id=15, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=30), @@ -112,11 +112,11 @@ def fixture_get_trains() -> list[TrainStopModel]: @pytest.fixture(name="get_trains_next") -def fixture_get_trains_next() -> list[TrainStopModel]: +def fixture_get_trains_next() -> list[TrainStop]: """Construct TrainStop Mock.""" - depart1 = TrainStopModel( - train_stop_id=13, + depart1 = TrainStop( + id=13, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC), estimated_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC), @@ -126,8 +126,8 @@ def fixture_get_trains_next() -> list[TrainStopModel]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart2 = TrainStopModel( - train_stop_id=14, + depart2 = TrainStop( + id=14, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC) + timedelta(minutes=15), @@ -138,8 +138,8 @@ def fixture_get_trains_next() -> list[TrainStopModel]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart3 = TrainStopModel( - train_stop_id=15, + depart3 = TrainStop( + id=15, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC) + timedelta(minutes=30), @@ -155,11 +155,11 @@ def fixture_get_trains_next() -> list[TrainStopModel]: @pytest.fixture(name="get_train_stop") -def fixture_get_train_stop() -> TrainStopModel: +def fixture_get_train_stop() -> TrainStop: """Construct TrainStop Mock.""" - return TrainStopModel( - train_stop_id=13, + return TrainStop( + id=13, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 11, 0, tzinfo=dt_util.UTC), estimated_time_at_location=None, diff --git a/tests/components/trafikverket_train/test_config_flow.py b/tests/components/trafikverket_train/test_config_flow.py index 9fe02994f05..a6ba82a85bc 100644 --- a/tests/components/trafikverket_train/test_config_flow.py +++ b/tests/components/trafikverket_train/test_config_flow.py @@ -12,7 +12,7 @@ from pytrafikverket.exceptions import ( NoTrainStationFound, UnknownError, ) -from pytrafikverket.models import TrainStopModel +from pytrafikverket.trafikverket_train import TrainStop from homeassistant import config_entries from homeassistant.components.trafikverket_train.const import ( @@ -73,7 +73,9 @@ async def test_form(hass: HomeAssistant) -> None: } assert result["options"] == {"filter_product": None} assert len(mock_setup_entry.mock_calls) == 1 - assert result["result"].unique_id == "stockholmc-uppsalac-10:00-['mon', 'fri']" + assert result["result"].unique_id == "{}-{}-{}-{}".format( + "stockholmc", "uppsalac", "10:00", "['mon', 'fri']" + ) async def test_form_entry_already_exist(hass: HomeAssistant) -> None: @@ -244,7 +246,15 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -318,7 +328,15 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) with ( patch( @@ -400,7 +418,15 @@ async def test_reauth_flow_error_departures( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) with ( patch( @@ -453,8 +479,8 @@ async def test_reauth_flow_error_departures( async def test_options_flow( hass: HomeAssistant, - get_trains: list[TrainStopModel], - get_train_stop: TrainStopModel, + get_trains: list[TrainStop], + get_train_stop: TrainStop, ) -> None: """Test a reauthentication flow.""" entry = MockConfigEntry( @@ -473,7 +499,7 @@ async def test_options_flow( with ( patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", diff --git a/tests/components/trafikverket_train/test_init.py b/tests/components/trafikverket_train/test_init.py index c8fea174e83..329d8d716d0 100644 --- a/tests/components/trafikverket_train/test_init.py +++ b/tests/components/trafikverket_train/test_init.py @@ -5,7 +5,7 @@ from __future__ import annotations from unittest.mock import patch from pytrafikverket.exceptions import InvalidAuthentication, NoTrainStationFound -from pytrafikverket.models import TrainStopModel +from pytrafikverket.trafikverket_train import TrainStop from syrupy.assertion import SnapshotAssertion from homeassistant.components.trafikverket_train.const import DOMAIN @@ -18,9 +18,7 @@ from . import ENTRY_CONFIG, OPTIONS_CONFIG from tests.common import MockConfigEntry -async def test_unload_entry( - hass: HomeAssistant, get_trains: list[TrainStopModel] -) -> None: +async def test_unload_entry(hass: HomeAssistant, get_trains: list[TrainStop]) -> None: """Test unload an entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -34,7 +32,7 @@ async def test_unload_entry( with ( patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", @@ -54,7 +52,7 @@ async def test_unload_entry( async def test_auth_failed( hass: HomeAssistant, - get_trains: list[TrainStopModel], + get_trains: list[TrainStop], snapshot: SnapshotAssertion, ) -> None: """Test authentication failed.""" @@ -69,7 +67,7 @@ async def test_auth_failed( entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", side_effect=InvalidAuthentication, ): await hass.config_entries.async_setup(entry.entry_id) @@ -84,7 +82,7 @@ async def test_auth_failed( async def test_no_stations( hass: HomeAssistant, - get_trains: list[TrainStopModel], + get_trains: list[TrainStop], snapshot: SnapshotAssertion, ) -> None: """Test stations are missing.""" @@ -99,7 +97,7 @@ async def test_no_stations( entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", side_effect=NoTrainStationFound, ): await hass.config_entries.async_setup(entry.entry_id) @@ -110,7 +108,7 @@ async def test_no_stations( async def test_migrate_entity_unique_id( hass: HomeAssistant, - get_trains: list[TrainStopModel], + get_trains: list[TrainStop], snapshot: SnapshotAssertion, entity_registry: EntityRegistry, ) -> None: @@ -135,7 +133,7 @@ async def test_migrate_entity_unique_id( with ( patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", diff --git a/tests/components/trafikverket_train/test_sensor.py b/tests/components/trafikverket_train/test_sensor.py index f4da3526cb2..f21561dd287 100644 --- a/tests/components/trafikverket_train/test_sensor.py +++ b/tests/components/trafikverket_train/test_sensor.py @@ -8,7 +8,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest from pytrafikverket.exceptions import InvalidAuthentication, NoTrainAnnouncementFound -from pytrafikverket.models import TrainStopModel +from pytrafikverket.trafikverket_train import TrainStop from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry @@ -23,8 +23,8 @@ async def test_sensor_next( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStopModel], - get_train_stop: TrainStopModel, + get_trains_next: list[TrainStop], + get_train_stop: TrainStop, snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor.""" @@ -70,7 +70,7 @@ async def test_sensor_single_stop( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStopModel], + get_trains_next: list[TrainStop], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor.""" @@ -86,7 +86,7 @@ async def test_sensor_update_auth_failure( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStopModel], + get_trains_next: list[TrainStop], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor with authentication update failure.""" @@ -119,7 +119,7 @@ async def test_sensor_update_failure( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStopModel], + get_trains_next: list[TrainStop], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor with update failure.""" @@ -149,7 +149,7 @@ async def test_sensor_update_failure_no_state( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStopModel], + get_trains_next: list[TrainStop], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor with update failure from empty state.""" diff --git a/tests/components/trafikverket_weatherstation/test_config_flow.py b/tests/components/trafikverket_weatherstation/test_config_flow.py index f8a0f636718..771336301ff 100644 --- a/tests/components/trafikverket_weatherstation/test_config_flow.py +++ b/tests/components/trafikverket_weatherstation/test_config_flow.py @@ -116,7 +116,14 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -175,7 +182,14 @@ async def test_reauth_flow_fails( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -192,111 +206,3 @@ async def test_reauth_flow_fails( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": base_error} - - -async def test_reconfigure_flow(hass: HomeAssistant) -> None: - """Test a reconfigure flow.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_API_KEY: "1234567890", - CONF_STATION: "Vallby", - }, - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - assert result["step_id"] == "reconfigure" - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with ( - patch( - "homeassistant.components.trafikverket_weatherstation.config_flow.TrafikverketWeather.async_get_weather", - ), - patch( - "homeassistant.components.trafikverket_weatherstation.async_setup_entry", - return_value=True, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_KEY: "1234567891", CONF_STATION: "Vallby_new"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert entry.data == {"api_key": "1234567891", "station": "Vallby_new"} - - -@pytest.mark.parametrize( - ("side_effect", "base_error"), - [ - ( - InvalidAuthentication, - "invalid_auth", - ), - ( - NoWeatherStationFound, - "invalid_station", - ), - ( - MultipleWeatherStationsFound, - "more_stations", - ), - ( - Exception, - "cannot_connect", - ), - ], -) -async def test_reconfigure_flow_fails( - hass: HomeAssistant, side_effect: Exception, base_error: str -) -> None: - """Test a reauthentication flow.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_API_KEY: "1234567890", - CONF_STATION: "Vallby", - }, - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - assert result["step_id"] == "reconfigure" - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with patch( - "homeassistant.components.trafikverket_weatherstation.config_flow.TrafikverketWeather.async_get_weather", - side_effect=side_effect(), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_KEY: "1234567891", CONF_STATION: "Vallby_new"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": base_error} - - with ( - patch( - "homeassistant.components.trafikverket_weatherstation.config_flow.TrafikverketWeather.async_get_weather", - ), - patch( - "homeassistant.components.trafikverket_weatherstation.async_setup_entry", - return_value=True, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_API_KEY: "1234567891", CONF_STATION: "Vallby_new"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert entry.data == {"api_key": "1234567891", "station": "Vallby_new"} diff --git a/tests/components/transmission/test_config_flow.py b/tests/components/transmission/test_config_flow.py index b724a91f7a1..e6c523bf1f6 100644 --- a/tests/components/transmission/test_config_flow.py +++ b/tests/components/transmission/test_config_flow.py @@ -160,14 +160,18 @@ async def test_reauth_success(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=transmission.DOMAIN, data=MOCK_CONFIG_DATA) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + transmission.DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_CONFIG_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == { - "username": "user", - "name": "Mock Title", - } + assert result["description_placeholders"] == {"username": "user"} with patch( "homeassistant.components.transmission.async_setup_entry", @@ -193,14 +197,18 @@ async def test_reauth_failed(hass: HomeAssistant, mock_api: MagicMock) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + transmission.DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_CONFIG_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == { - "username": "user", - "name": "Mock Title", - } + assert result["description_placeholders"] == {"username": "user"} mock_api.side_effect = TransmissionAuthError() result2 = await hass.config_entries.flow.async_configure( @@ -224,14 +232,18 @@ async def test_reauth_failed_connection_error( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + transmission.DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=MOCK_CONFIG_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == { - "username": "user", - "name": "Mock Title", - } + assert result["description_placeholders"] == {"username": "user"} mock_api.side_effect = TransmissionConnectError() result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/triggercmd/__init__.py b/tests/components/triggercmd/__init__.py deleted file mode 100644 index 90562a67386..00000000000 --- a/tests/components/triggercmd/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the triggercmd integration.""" diff --git a/tests/components/triggercmd/conftest.py b/tests/components/triggercmd/conftest.py deleted file mode 100644 index 5e2ac250d61..00000000000 --- a/tests/components/triggercmd/conftest.py +++ /dev/null @@ -1,15 +0,0 @@ -"""triggercmd conftest.""" - -from unittest.mock import patch - -import pytest - - -@pytest.fixture -def mock_async_setup_entry(): - """Mock async_setup_entry.""" - with patch( - "homeassistant.components.triggercmd.async_setup_entry", - return_value=True, - ) as mock_async_setup_entry: - yield mock_async_setup_entry diff --git a/tests/components/triggercmd/test_config_flow.py b/tests/components/triggercmd/test_config_flow.py deleted file mode 100644 index f12fcfef768..00000000000 --- a/tests/components/triggercmd/test_config_flow.py +++ /dev/null @@ -1,161 +0,0 @@ -"""Define tests for the triggercmd config flow.""" - -from unittest.mock import patch - -import pytest -from triggercmd import TRIGGERcmdConnectionError - -from homeassistant.components.triggercmd.const import CONF_TOKEN, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - -invalid_token_with_length_100_or_more = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjEyMzQ1Njc4OTBxd2VydHl1aW9wYXNkZiIsImlhdCI6MTcxOTg4MTU4M30.E4T2S4RQfuI2ww74sUkkT-wyTGrV5_VDkgUdae5yo4E" -invalid_token_id = "1234567890qwertyuiopasdf" -invalid_token_with_length_100_or_more_and_no_id = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJub2lkIjoiMTIzNDU2Nzg5MHF3ZXJ0eXVpb3Bhc2RmIiwiaWF0IjoxNzE5ODgxNTgzfQ.MaJLNWPGCE51Zibhbq-Yz7h3GkUxLurR2eoM2frnO6Y" - - -async def test_full_flow( - hass: HomeAssistant, -) -> None: - """Test config flow happy path.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - assert result["errors"] == {} - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - with ( - patch( - "homeassistant.components.triggercmd.client.async_connection_test", - return_value=200, - ), - patch( - "homeassistant.components.triggercmd.ha.Hub", - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: invalid_token_with_length_100_or_more}, - ) - - assert result["data"] == {CONF_TOKEN: invalid_token_with_length_100_or_more} - assert result["result"].unique_id == invalid_token_id - assert result["type"] is FlowResultType.CREATE_ENTRY - - -@pytest.mark.parametrize( - ("test_input", "expected"), - [ - (invalid_token_with_length_100_or_more_and_no_id, {"base": "unknown"}), - ("not-a-token", {CONF_TOKEN: "invalid_token"}), - ], -) -async def test_config_flow_user_invalid_token( - hass: HomeAssistant, - test_input: str, - expected: dict, -) -> None: - """Test the initial step of the config flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - with ( - patch( - "homeassistant.components.triggercmd.client.async_connection_test", - return_value=200, - ), - patch( - "homeassistant.components.triggercmd.ha.Hub", - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: test_input}, - ) - - assert result["errors"] == expected - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: invalid_token_with_length_100_or_more}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_config_flow_entry_already_configured(hass: HomeAssistant) -> None: - """Test user input for config_entry that already exists.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - MockConfigEntry( - domain=DOMAIN, - data={CONF_TOKEN: invalid_token_with_length_100_or_more}, - unique_id=invalid_token_id, - ).add_to_hass(hass) - - with ( - patch( - "homeassistant.components.triggercmd.client.async_connection_test", - return_value=200, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: invalid_token_with_length_100_or_more}, - ) - - assert result["reason"] == "already_configured" - assert result["type"] is FlowResultType.ABORT - - -async def test_config_flow_connection_error(hass: HomeAssistant) -> None: - """Test a connection error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - - with ( - patch( - "homeassistant.components.triggercmd.client.async_connection_test", - side_effect=TRIGGERcmdConnectionError, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: invalid_token_with_length_100_or_more}, - ) - - assert result["errors"] == { - "base": "cannot_connect", - } - assert result["type"] is FlowResultType.FORM - - with ( - patch( - "homeassistant.components.triggercmd.client.async_connection_test", - return_value=200, - ), - patch( - "homeassistant.components.triggercmd.ha.Hub", - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_TOKEN: invalid_token_with_length_100_or_more}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY diff --git a/tests/components/tts/common.py b/tests/components/tts/common.py index b1eae12d694..b99e6400273 100644 --- a/tests/components/tts/common.py +++ b/tests/components/tts/common.py @@ -2,13 +2,13 @@ from __future__ import annotations -from collections.abc import Generator from http import HTTPStatus from pathlib import Path from typing import Any from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator import voluptuous as vol from homeassistant.components import media_source @@ -130,8 +130,6 @@ class BaseProvider: def __init__(self, lang: str) -> None: """Initialize test provider.""" self._lang = lang - self._supported_languages = SUPPORT_LANGUAGES - self._supported_options = ["voice", "age"] @property def default_language(self) -> str: @@ -141,7 +139,7 @@ class BaseProvider: @property def supported_languages(self) -> list[str]: """Return list of supported languages.""" - return self._supported_languages + return SUPPORT_LANGUAGES @callback def async_get_supported_voices(self, language: str) -> list[Voice] | None: @@ -156,7 +154,7 @@ class BaseProvider: @property def supported_options(self) -> list[str]: """Return list of supported options like voice, emotions.""" - return self._supported_options + return ["voice", "age"] def get_tts_audio( self, message: str, language: str, options: dict[str, Any] @@ -165,7 +163,7 @@ class BaseProvider: return ("mp3", b"") -class MockTTSProvider(BaseProvider, Provider): +class MockProvider(BaseProvider, Provider): """Test speech API provider.""" def __init__(self, lang: str) -> None: @@ -177,7 +175,10 @@ class MockTTSProvider(BaseProvider, Provider): class MockTTSEntity(BaseProvider, TextToSpeechEntity): """Test speech API provider.""" - _attr_name = "Test" + @property + def name(self) -> str: + """Return the name of the entity.""" + return "Test" class MockTTS(MockPlatform): @@ -187,7 +188,7 @@ class MockTTS(MockPlatform): {vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES)} ) - def __init__(self, provider: MockTTSProvider, **kwargs: Any) -> None: + def __init__(self, provider: MockProvider, **kwargs: Any) -> None: """Initialize.""" super().__init__(**kwargs) self._provider = provider @@ -204,7 +205,7 @@ class MockTTS(MockPlatform): async def mock_setup( hass: HomeAssistant, - mock_provider: MockTTSProvider, + mock_provider: MockProvider, ) -> None: """Set up a test provider.""" mock_integration(hass, MockModule(domain=TEST_DOMAIN)) @@ -217,9 +218,7 @@ async def mock_setup( async def mock_config_entry_setup( - hass: HomeAssistant, - tts_entity: MockTTSEntity, - test_domain: str = TEST_DOMAIN, + hass: HomeAssistant, tts_entity: MockTTSEntity ) -> MockConfigEntry: """Set up a test tts platform via config entry.""" @@ -240,7 +239,7 @@ async def mock_config_entry_setup( mock_integration( hass, MockModule( - test_domain, + TEST_DOMAIN, async_setup_entry=async_setup_entry_init, async_unload_entry=async_unload_entry_init, ), @@ -255,9 +254,9 @@ async def mock_config_entry_setup( async_add_entities([tts_entity]) loaded_platform = MockPlatform(async_setup_entry=async_setup_entry_platform) - mock_platform(hass, f"{test_domain}.{TTS_DOMAIN}", loaded_platform) + mock_platform(hass, f"{TEST_DOMAIN}.{TTS_DOMAIN}", loaded_platform) - config_entry = MockConfigEntry(domain=test_domain) + config_entry = MockConfigEntry(domain=TEST_DOMAIN) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/tts/conftest.py b/tests/components/tts/conftest.py index ddef3ee0c28..b8abb086260 100644 --- a/tests/components/tts/conftest.py +++ b/tests/components/tts/conftest.py @@ -3,23 +3,22 @@ From http://doc.pytest.org/en/latest/example/simple.html#making-test-result-information-available-in-fixtures """ -from collections.abc import Generator, Iterable -from contextlib import ExitStack from pathlib import Path from unittest.mock import MagicMock import pytest +from typing_extensions import Generator +from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from .common import ( DEFAULT_LANG, TEST_DOMAIN, + MockProvider, MockTTS, MockTTSEntity, - MockTTSProvider, mock_config_entry_setup, mock_setup, ) @@ -67,9 +66,9 @@ async def mock_tts(hass: HomeAssistant, mock_provider) -> None: @pytest.fixture -def mock_provider() -> MockTTSProvider: +def mock_provider() -> MockProvider: """Test TTS provider.""" - return MockTTSProvider(DEFAULT_LANG) + return MockProvider(DEFAULT_LANG) @pytest.fixture @@ -82,23 +81,12 @@ class TTSFlow(ConfigFlow): """Test flow.""" -@pytest.fixture(name="config_flow_test_domains") -def config_flow_test_domain_fixture() -> Iterable[str]: - """Test domain fixture.""" - return (TEST_DOMAIN,) - - @pytest.fixture(autouse=True) -def config_flow_fixture( - hass: HomeAssistant, config_flow_test_domains: Iterable[str] -) -> Generator[None]: +def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: """Mock config flow.""" - for domain in config_flow_test_domains: - mock_platform(hass, f"{domain}.config_flow") + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - with ExitStack() as stack: - for domain in config_flow_test_domains: - stack.enter_context(mock_config_flow(domain, TTSFlow)) + with mock_config_flow(TEST_DOMAIN, TTSFlow): yield @@ -106,7 +94,7 @@ def config_flow_fixture( async def setup_fixture( hass: HomeAssistant, request: pytest.FixtureRequest, - mock_provider: MockTTSProvider, + mock_provider: MockProvider, mock_tts_entity: MockTTSEntity, ) -> None: """Set up the test environment.""" diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 2ab6dc16629..e0354170b06 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -30,22 +30,15 @@ from .common import ( DEFAULT_LANG, SUPPORT_LANGUAGES, TEST_DOMAIN, - MockTTS, + MockProvider, MockTTSEntity, - MockTTSProvider, get_media_source_url, mock_config_entry_setup, mock_setup, retrieve_media, ) -from tests.common import ( - MockModule, - async_mock_service, - mock_integration, - mock_platform, - mock_restore_cache, -) +from tests.common import async_mock_service, mock_restore_cache from tests.typing import ClientSessionGenerator, WebSocketGenerator ORIG_WRITE_TAGS = tts.SpeechManager.write_tags @@ -54,8 +47,15 @@ ORIG_WRITE_TAGS = tts.SpeechManager.write_tags class DefaultEntity(tts.TextToSpeechEntity): """Test entity.""" - _attr_supported_languages = SUPPORT_LANGUAGES - _attr_default_language = DEFAULT_LANG + @property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return SUPPORT_LANGUAGES + + @property + def default_language(self) -> str: + """Return the default language.""" + return DEFAULT_LANG async def test_default_entity_attributes() -> None: @@ -220,7 +220,7 @@ async def test_service( @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockTTSProvider("de_DE"), MockTTSEntity("de_DE"))], + [(MockProvider("de_DE"), MockTTSEntity("de_DE"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -281,7 +281,7 @@ async def test_service_default_language( @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockTTSProvider("en_US"), MockTTSEntity("en_US"))], + [(MockProvider("en_US"), MockTTSEntity("en_US"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -511,7 +511,7 @@ async def test_service_options( ).is_file() -class MockProviderWithDefaults(MockTTSProvider): +class MockProviderWithDefaults(MockProvider): """Mock provider with default options.""" @property @@ -523,7 +523,10 @@ class MockProviderWithDefaults(MockTTSProvider): class MockEntityWithDefaults(MockTTSEntity): """Mock entity with default options.""" - _attr_default_options = {"voice": "alex"} + @property + def default_options(self): + """Return a mapping with the default options.""" + return {"voice": "alex"} @pytest.mark.parametrize( @@ -854,7 +857,7 @@ async def test_service_receive_voice( @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockTTSProvider("de_DE"), MockTTSEntity("de_DE"))], + [(MockProvider("de_DE"), MockTTSEntity("de_DE"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -1015,7 +1018,7 @@ async def test_service_without_cache( ).is_file() -class MockProviderBoom(MockTTSProvider): +class MockProviderBoom(MockProvider): """Mock provider that blows up.""" def get_tts_audio( @@ -1023,7 +1026,7 @@ class MockProviderBoom(MockTTSProvider): ) -> tts.TtsAudioType: """Load TTS dat.""" # This should not be called, data should be fetched from cache - raise Exception("Boom!") # noqa: TRY002 + raise Exception("Boom!") # pylint: disable=broad-exception-raised class MockEntityBoom(MockTTSEntity): @@ -1034,14 +1037,14 @@ class MockEntityBoom(MockTTSEntity): ) -> tts.TtsAudioType: """Load TTS dat.""" # This should not be called, data should be fetched from cache - raise Exception("Boom!") # noqa: TRY002 + raise Exception("Boom!") # pylint: disable=broad-exception-raised @pytest.mark.parametrize("mock_provider", [MockProviderBoom(DEFAULT_LANG)]) async def test_setup_legacy_cache_dir( hass: HomeAssistant, mock_tts_cache_dir: Path, - mock_provider: MockTTSProvider, + mock_provider: MockProvider, ) -> None: """Set up a TTS platform with cache and call service without cache.""" calls = async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) @@ -1051,7 +1054,9 @@ async def test_setup_legacy_cache_dir( mock_tts_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_test.mp3" ) - await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) + with open(cache_file, "wb") as voice_file: + voice_file.write(tts_data) + await mock_setup(hass, mock_provider) await hass.services.async_call( @@ -1085,7 +1090,9 @@ async def test_setup_cache_dir( "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" ) - await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) + with open(cache_file, "wb") as voice_file: + voice_file.write(tts_data) + await mock_config_entry_setup(hass, mock_tts_entity) await hass.services.async_call( @@ -1106,7 +1113,7 @@ async def test_setup_cache_dir( await hass.async_block_till_done() -class MockProviderEmpty(MockTTSProvider): +class MockProviderEmpty(MockProvider): """Mock provider with empty get_tts_audio.""" def get_tts_audio( @@ -1178,7 +1185,7 @@ async def test_service_get_tts_error( async def test_load_cache_legacy_retrieve_without_mem_cache( hass: HomeAssistant, - mock_provider: MockTTSProvider, + mock_provider: MockProvider, mock_tts_cache_dir: Path, hass_client: ClientSessionGenerator, ) -> None: @@ -1188,7 +1195,9 @@ async def test_load_cache_legacy_retrieve_without_mem_cache( mock_tts_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_test.mp3" ) - await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) + with open(cache_file, "wb") as voice_file: + voice_file.write(tts_data) + await mock_setup(hass, mock_provider) client = await hass_client() @@ -1212,7 +1221,9 @@ async def test_load_cache_retrieve_without_mem_cache( "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" ) - await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) + with open(cache_file, "wb") as voice_file: + voice_file.write(tts_data) + await mock_config_entry_setup(hass, mock_tts_entity) client = await hass_client() @@ -1318,16 +1329,10 @@ async def test_tags_with_wave() -> None: @pytest.mark.parametrize( ("engine", "language", "options", "cache", "result_query"), [ - (None, None, None, None, "&tts_options=null"), - (None, "de_DE", None, None, "&language=de_DE&tts_options=null"), - ( - None, - "de_DE", - {"voice": "henk"}, - None, - "&language=de_DE&tts_options=%7B%22voice%22:%22henk%22%7D", - ), - (None, "de_DE", None, True, "&cache=true&language=de_DE&tts_options=null"), + (None, None, None, None, ""), + (None, "de_DE", None, None, "language=de_DE"), + (None, "de_DE", {"voice": "henk"}, None, "language=de_DE&voice=henk"), + (None, "de_DE", None, True, "cache=true&language=de_DE"), ], ) async def test_generate_media_source_id( @@ -1349,9 +1354,8 @@ async def test_generate_media_source_id( _, _, engine_query = media_source_id.rpartition("/") engine, _, query = engine_query.partition("?") assert engine == result_engine - query_prefix = "message=msg" - assert query.startswith(query_prefix) - assert query[len(query_prefix) :] == result_query + assert query.startswith("message=msg") + assert query[12:] == result_query @pytest.mark.parametrize( @@ -1403,6 +1407,9 @@ def test_resolve_engine(hass: HomeAssistant, setup: str, engine_id: str) -> None ): assert tts.async_resolve_engine(hass, None) is None + with patch.dict(hass.data[tts.DATA_TTS_MANAGER].providers, {"cloud": object()}): + assert tts.async_resolve_engine(hass, None) == "cloud" + @pytest.mark.parametrize( ("setup", "engine_id"), @@ -1433,7 +1440,7 @@ async def test_legacy_fetching_in_async( """Test async fetching of data for a legacy provider.""" tts_audio: asyncio.Future[bytes] = asyncio.Future() - class ProviderWithAsyncFetching(MockTTSProvider): + class ProviderWithAsyncFetching(MockProvider): """Provider that supports audio output option.""" @property @@ -1572,19 +1579,15 @@ async def test_fetching_in_async( @pytest.mark.parametrize( - ("setup", "engine_id", "extra_data"), + ("setup", "engine_id"), [ - ("mock_setup", "test", {"name": "Test"}), - ("mock_config_entry_setup", "tts.test", {}), + ("mock_setup", "test"), + ("mock_config_entry_setup", "tts.test"), ], indirect=["setup"], ) async def test_ws_list_engines( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - setup: str, - engine_id: str, - extra_data: dict[str, str], + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, setup: str, engine_id: str ) -> None: """Test listing tts engines and supported languages.""" client = await hass_ws_client() @@ -1599,7 +1602,6 @@ async def test_ws_list_engines( "engine_id": engine_id, "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], } - | extra_data ] } @@ -1608,7 +1610,7 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": []} | extra_data] + "providers": [{"engine_id": engine_id, "supported_languages": []}] } await client.send_json_auto_id({"type": "tts/engine/list", "language": "en"}) @@ -1618,7 +1620,6 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["en_US", "en_GB"]} - | extra_data ] } @@ -1629,7 +1630,6 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["en_GB", "en_US"]} - | extra_data ] } @@ -1640,7 +1640,6 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de_DE", "de_CH"]} - | extra_data ] } @@ -1653,74 +1652,20 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de_CH", "de_DE"]} - | extra_data - ] - } - - -async def test_ws_list_engines_deprecated( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - mock_tts_entity: MockTTSEntity, -) -> None: - """Test listing tts engines. - - This test asserts the deprecated flag is set on a legacy engine whose integration - also provides tts entities. - """ - - mock_provider = MockTTSProvider(DEFAULT_LANG) - mock_provider_2 = MockTTSProvider(DEFAULT_LANG) - mock_integration(hass, MockModule(domain="test")) - mock_platform(hass, "test.tts", MockTTS(mock_provider)) - mock_integration(hass, MockModule(domain="test_2")) - mock_platform(hass, "test_2.tts", MockTTS(mock_provider_2)) - await async_setup_component( - hass, "tts", {"tts": [{"platform": "test"}, {"platform": "test_2"}]} - ) - await mock_config_entry_setup(hass, mock_tts_entity) - - client = await hass_ws_client() - - await client.send_json_auto_id({"type": "tts/engine/list"}) - - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == { - "providers": [ - { - "engine_id": "tts.test", - "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], - }, - { - "deprecated": True, - "engine_id": "test", - "name": "Test", - "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], - }, - { - "engine_id": "test_2", - "name": "Test", - "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], - }, ] } @pytest.mark.parametrize( - ("setup", "engine_id", "extra_data"), + ("setup", "engine_id"), [ - ("mock_setup", "test", {"name": "Test"}), - ("mock_config_entry_setup", "tts.test", {}), + ("mock_setup", "test"), + ("mock_config_entry_setup", "tts.test"), ], indirect=["setup"], ) async def test_ws_get_engine( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - setup: str, - engine_id: str, - extra_data: dict[str, str], + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, setup: str, engine_id: str ) -> None: """Test getting an tts engine.""" client = await hass_ws_client() @@ -1734,7 +1679,6 @@ async def test_ws_get_engine( "engine_id": engine_id, "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], } - | extra_data } @@ -1822,151 +1766,3 @@ async def test_async_convert_audio_error(hass: HomeAssistant) -> None: with pytest.raises(RuntimeError): # Simulate a bad WAV file await tts.async_convert_audio(hass, "wav", bytes(0), "mp3") - - -async def test_ttsentity_subclass_properties( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test for errors when subclasses of the TextToSpeechEntity are missing required properties.""" - - class TestClass1(tts.TextToSpeechEntity): - _attr_default_language = DEFAULT_LANG - _attr_supported_languages = SUPPORT_LANGUAGES - - await mock_config_entry_setup(hass, TestClass1()) - - class TestClass2(tts.TextToSpeechEntity): - @property - def default_language(self) -> str: - return DEFAULT_LANG - - @property - def supported_languages(self) -> list[str]: - return SUPPORT_LANGUAGES - - await mock_config_entry_setup(hass, TestClass2()) - - assert all(record.exc_info is None for record in caplog.records) - - caplog.clear() - - class TestClass3(tts.TextToSpeechEntity): - _attr_default_language = DEFAULT_LANG - - await mock_config_entry_setup(hass, TestClass3()) - - assert ( - "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" - in [ - str(record.exc_info[1]) - for record in caplog.records - if record.exc_info is not None - ] - ) - caplog.clear() - - class TestClass4(tts.TextToSpeechEntity): - _attr_supported_languages = SUPPORT_LANGUAGES - - await mock_config_entry_setup(hass, TestClass4()) - - assert ( - "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" - in [ - str(record.exc_info[1]) - for record in caplog.records - if record.exc_info is not None - ] - ) - caplog.clear() - - class TestClass5(tts.TextToSpeechEntity): - @property - def default_language(self) -> str: - return DEFAULT_LANG - - await mock_config_entry_setup(hass, TestClass5()) - - assert ( - "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" - in [ - str(record.exc_info[1]) - for record in caplog.records - if record.exc_info is not None - ] - ) - caplog.clear() - - class TestClass6(tts.TextToSpeechEntity): - @property - def supported_languages(self) -> list[str]: - return SUPPORT_LANGUAGES - - await mock_config_entry_setup(hass, TestClass6()) - - assert ( - "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" - in [ - str(record.exc_info[1]) - for record in caplog.records - if record.exc_info is not None - ] - ) - - -async def test_default_engine_prefer_entity( - hass: HomeAssistant, - mock_tts_entity: MockTTSEntity, - mock_provider: MockTTSProvider, -) -> None: - """Test async_default_engine. - - In this tests there's an entity and a legacy provider. - The test asserts async_default_engine returns the entity. - """ - mock_tts_entity._attr_name = "New test" - - await mock_setup(hass, mock_provider) - await mock_config_entry_setup(hass, mock_tts_entity) - await hass.async_block_till_done() - - entity_engine = tts.async_resolve_engine(hass, "tts.new_test") - assert entity_engine == "tts.new_test" - provider_engine = tts.async_resolve_engine(hass, "test") - assert provider_engine == "test" - assert tts.async_default_engine(hass) == "tts.new_test" - - -@pytest.mark.parametrize( - "config_flow_test_domains", - [ - # Test different setup order to ensure the default is not influenced - # by setup order. - ("cloud", "new_test"), - ("new_test", "cloud"), - ], -) -async def test_default_engine_prefer_cloud_entity( - hass: HomeAssistant, - mock_provider: MockTTSProvider, - config_flow_test_domains: str, -) -> None: - """Test async_default_engine. - - In this tests there's an entity from domain cloud, an entity from domain new_test - and a legacy provider. - The test asserts async_default_engine returns the entity from domain cloud. - """ - await mock_setup(hass, mock_provider) - for domain in config_flow_test_domains: - entity = MockTTSEntity(DEFAULT_LANG) - entity._attr_name = f"{domain} TTS entity" - await mock_config_entry_setup(hass, entity, test_domain=domain) - await hass.async_block_till_done() - - for domain in config_flow_test_domains: - entity_engine = tts.async_resolve_engine(hass, f"tts.{domain}_tts_entity") - assert entity_engine == f"tts.{domain}_tts_entity" - provider_engine = tts.async_resolve_engine(hass, "test") - assert provider_engine == "test" - assert tts.async_default_engine(hass) == "tts.cloud_tts_entity" diff --git a/tests/components/tts/test_legacy.py b/tests/components/tts/test_legacy.py index 22e8ac35f16..05bb6dec10f 100644 --- a/tests/components/tts/test_legacy.py +++ b/tests/components/tts/test_legacy.py @@ -17,7 +17,7 @@ from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component -from .common import SUPPORT_LANGUAGES, MockTTS, MockTTSProvider +from .common import SUPPORT_LANGUAGES, MockProvider, MockTTS from tests.common import ( MockModule, @@ -75,9 +75,7 @@ async def test_invalid_platform( async def test_platform_setup_without_provider( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mock_provider: MockTTSProvider, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_provider: MockProvider ) -> None: """Test platform setup without provider returned.""" @@ -111,7 +109,7 @@ async def test_platform_setup_without_provider( async def test_platform_setup_with_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - mock_provider: MockTTSProvider, + mock_provider: MockProvider, ) -> None: """Test platform setup with an error during setup.""" @@ -125,7 +123,7 @@ async def test_platform_setup_with_error( discovery_info: DiscoveryInfoType | None = None, ) -> Provider: """Raise exception during platform setup.""" - raise Exception("Setup error") # noqa: TRY002 + raise Exception("Setup error") # pylint: disable=broad-exception-raised mock_integration(hass, MockModule(domain="bad_tts")) mock_platform(hass, "bad_tts.tts", BadPlatform(mock_provider)) diff --git a/tests/components/tts/test_media_source.py b/tests/components/tts/test_media_source.py index d90923b02ab..4c10d8f0b08 100644 --- a/tests/components/tts/test_media_source.py +++ b/tests/components/tts/test_media_source.py @@ -1,25 +1,19 @@ """Tests for TTS media source.""" from http import HTTPStatus -import re from unittest.mock import MagicMock import pytest from homeassistant.components import media_source -from homeassistant.components.media_player import BrowseError -from homeassistant.components.tts.media_source import ( - MediaSourceOptions, - generate_media_source_id, - media_source_id_to_kwargs, -) +from homeassistant.components.media_player.errors import BrowseError from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from .common import ( DEFAULT_LANG, + MockProvider, MockTTSEntity, - MockTTSProvider, mock_config_entry_setup, mock_setup, retrieve_media, @@ -34,7 +28,7 @@ class MSEntity(MockTTSEntity): get_tts_audio = MagicMock(return_value=("mp3", b"")) -class MSProvider(MockTTSProvider): +class MSProvider(MockProvider): """Test speech API provider.""" get_tts_audio = MagicMock(return_value=("mp3", b"")) @@ -98,24 +92,14 @@ async def test_browsing(hass: HomeAssistant, setup: str) -> None: await media_source.async_browse_media(hass, "media-source://tts/non-existing") -@pytest.mark.parametrize( - ("mock_provider", "extra_options"), - [ - (MSProvider(DEFAULT_LANG), "&tts_options=%7B%22voice%22%3A%22Paulus%22%7D"), - (MSProvider(DEFAULT_LANG), "&voice=Paulus"), - ], -) +@pytest.mark.parametrize("mock_provider", [MSProvider(DEFAULT_LANG)]) async def test_legacy_resolving( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_provider: MSProvider, - extra_options: str, + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_provider: MSProvider ) -> None: """Test resolving legacy provider.""" await mock_setup(hass, mock_provider) mock_get_tts_audio = mock_provider.get_tts_audio - mock_get_tts_audio.reset_mock() media_id = "media-source://tts/test?message=Hello%20World" media = await media_source.async_resolve_media(hass, media_id, None) assert media.url.startswith("/api/tts_proxy/") @@ -130,9 +114,7 @@ async def test_legacy_resolving( # Pass language and options mock_get_tts_audio.reset_mock() - media_id = ( - f"media-source://tts/test?message=Bye%20World&language=de_DE{extra_options}" - ) + media_id = "media-source://tts/test?message=Bye%20World&language=de_DE&voice=Paulus" media = await media_source.async_resolve_media(hass, media_id, None) assert media.url.startswith("/api/tts_proxy/") assert media.mime_type == "audio/mpeg" @@ -145,24 +127,14 @@ async def test_legacy_resolving( assert mock_get_tts_audio.mock_calls[0][2]["options"] == {"voice": "Paulus"} -@pytest.mark.parametrize( - ("mock_tts_entity", "extra_options"), - [ - (MSEntity(DEFAULT_LANG), "&tts_options=%7B%22voice%22%3A%22Paulus%22%7D"), - (MSEntity(DEFAULT_LANG), "&voice=Paulus"), - ], -) +@pytest.mark.parametrize("mock_tts_entity", [MSEntity(DEFAULT_LANG)]) async def test_resolving( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_tts_entity: MSEntity, - extra_options: str, + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts_entity: MSEntity ) -> None: """Test resolving entity.""" await mock_config_entry_setup(hass, mock_tts_entity) mock_get_tts_audio = mock_tts_entity.get_tts_audio - mock_get_tts_audio.reset_mock() media_id = "media-source://tts/tts.test?message=Hello%20World" media = await media_source.async_resolve_media(hass, media_id, None) assert media.url.startswith("/api/tts_proxy/") @@ -178,7 +150,7 @@ async def test_resolving( # Pass language and options mock_get_tts_audio.reset_mock() media_id = ( - f"media-source://tts/tts.test?message=Bye%20World&language=de_DE{extra_options}" + "media-source://tts/tts.test?message=Bye%20World&language=de_DE&voice=Paulus" ) media = await media_source.async_resolve_media(hass, media_id, None) assert media.url.startswith("/api/tts_proxy/") @@ -197,108 +169,29 @@ async def test_resolving( [(MSProvider(DEFAULT_LANG), MSEntity(DEFAULT_LANG))], ) @pytest.mark.parametrize( - ("setup", "engine"), + "setup", [ - ("mock_setup", "test"), - ("mock_config_entry_setup", "tts.test"), + "mock_setup", + "mock_config_entry_setup", ], indirect=["setup"], ) -async def test_resolving_errors(hass: HomeAssistant, setup: str, engine: str) -> None: +async def test_resolving_errors(hass: HomeAssistant, setup: str) -> None: """Test resolving.""" # No message added with pytest.raises(media_source.Unresolvable): await media_source.async_resolve_media(hass, "media-source://tts/test", None) # Non-existing provider - with pytest.raises( - media_source.Unresolvable, match="Provider non-existing not found" - ): + with pytest.raises(media_source.Unresolvable): await media_source.async_resolve_media( hass, "media-source://tts/non-existing?message=bla", None ) - # Non-JSON tts options - with pytest.raises( - media_source.Unresolvable, - match="Invalid TTS options: Expecting property name enclosed in double quotes", - ): - await media_source.async_resolve_media( - hass, - f"media-source://tts/{engine}?message=bla&tts_options=%7Binvalid json", - None, - ) - # Non-existing option - with pytest.raises( - media_source.Unresolvable, - match=re.escape("Invalid options found: ['non_existing_option']"), - ): + with pytest.raises(media_source.Unresolvable): await media_source.async_resolve_media( hass, - f"media-source://tts/{engine}?message=bla&tts_options=%7B%22non_existing_option%22%3A%22bla%22%7D", + "media-source://tts/non-existing?message=bla&non_existing_option=bla", None, ) - - -@pytest.mark.parametrize( - ("setup", "result_engine"), - [ - ("mock_setup", "test"), - ("mock_config_entry_setup", "tts.test"), - ], - indirect=["setup"], -) -async def test_generate_media_source_id_and_media_source_id_to_kwargs( - hass: HomeAssistant, - setup: str, - result_engine: str, -) -> None: - """Test media_source_id and media_source_id_to_kwargs.""" - kwargs: MediaSourceOptions = { - "engine": None, - "message": "hello", - "language": "en_US", - "options": {"age": 5}, - "cache": True, - } - media_source_id = generate_media_source_id(hass, **kwargs) - assert media_source_id_to_kwargs(media_source_id) == { - "engine": result_engine, - "message": "hello", - "language": "en_US", - "options": {"age": 5}, - "cache": True, - } - - kwargs = { - "engine": None, - "message": "hello", - "language": "en_US", - "options": {"age": [5, 6]}, - "cache": True, - } - media_source_id = generate_media_source_id(hass, **kwargs) - assert media_source_id_to_kwargs(media_source_id) == { - "engine": result_engine, - "message": "hello", - "language": "en_US", - "options": {"age": [5, 6]}, - "cache": True, - } - - kwargs = { - "engine": None, - "message": "hello", - "language": "en_US", - "options": {"age": {"k1": [5, 6], "k2": "v2"}}, - "cache": True, - } - media_source_id = generate_media_source_id(hass, **kwargs) - assert media_source_id_to_kwargs(media_source_id) == { - "engine": result_engine, - "message": "hello", - "language": "en_US", - "options": {"age": {"k1": [5, 6], "k2": "v2"}}, - "cache": True, - } diff --git a/tests/components/tts/test_notify.py b/tests/components/tts/test_notify.py index 00cdae2934f..07ba2f2f3f5 100644 --- a/tests/components/tts/test_notify.py +++ b/tests/components/tts/test_notify.py @@ -9,8 +9,8 @@ from homeassistant.components.media_player import ( DOMAIN as DOMAIN_MP, SERVICE_PLAY_MEDIA, ) +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from .common import MockTTSEntity, mock_config_entry_setup diff --git a/tests/components/tuya/conftest.py b/tests/components/tuya/conftest.py index 4fffb3ae389..981e12ecceb 100644 --- a/tests/components/tuya/conftest.py +++ b/tests/components/tuya/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.tuya.const import CONF_APP_TYPE, CONF_USER_CODE, DOMAIN @@ -35,7 +35,7 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_setup_entry() -> Generator[None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" with patch("homeassistant.components.tuya.async_setup_entry", return_value=True): yield diff --git a/tests/components/tuya/snapshots/test_config_flow.ambr b/tests/components/tuya/snapshots/test_config_flow.ambr index a5a68a12a22..416a656c238 100644 --- a/tests/components/tuya/snapshots/test_config_flow.ambr +++ b/tests/components/tuya/snapshots/test_config_flow.ambr @@ -14,8 +14,6 @@ 'user_code': '12345', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'tuya', 'entry_id': , 'minor_version': 1, @@ -44,8 +42,6 @@ 'user_code': '12345', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'tuya', 'entry_id': , 'minor_version': 1, @@ -97,8 +93,6 @@ 'user_code': '12345', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'tuya', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/tuya/test_config_flow.py b/tests/components/tuya/test_config_flow.py index 247aec02cd1..6e971262bc8 100644 --- a/tests/components/tuya/test_config_flow.py +++ b/tests/components/tuya/test_config_flow.py @@ -8,7 +8,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.tuya.const import CONF_APP_TYPE, CONF_USER_CODE, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -145,7 +145,15 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "scan" @@ -177,7 +185,15 @@ async def test_reauth_flow_migration( assert CONF_APP_TYPE in mock_old_config_entry.data assert CONF_USER_CODE not in mock_old_config_entry.data - result = await mock_old_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_old_config_entry.unique_id, + "entry_id": mock_old_config_entry.entry_id, + }, + data=mock_old_config_entry.data, + ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_user_code" @@ -213,7 +229,15 @@ async def test_reauth_flow_failed_qr_code( """Test an error occurring while retrieving the QR code.""" mock_old_config_entry.add_to_hass(hass) - result = await mock_old_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": mock_old_config_entry.unique_id, + "entry_id": mock_old_config_entry.entry_id, + }, + data=mock_old_config_entry.data, + ) # Something went wrong getting the QR code (like an invalid user code) mock_tuya_login_control.qr_code.return_value["success"] = False diff --git a/tests/components/twentemilieu/conftest.py b/tests/components/twentemilieu/conftest.py index 7ecf1657ce9..7b157572824 100644 --- a/tests/components/twentemilieu/conftest.py +++ b/tests/components/twentemilieu/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations -from collections.abc import Generator from datetime import date from unittest.mock import MagicMock, patch import pytest from twentemilieu import WasteType +from typing_extensions import Generator from homeassistant.components.twentemilieu.const import ( CONF_HOUSE_LETTER, diff --git a/tests/components/twentemilieu/snapshots/test_calendar.ambr b/tests/components/twentemilieu/snapshots/test_calendar.ambr index 1df4beb4232..e6de21fdca1 100644 --- a/tests/components/twentemilieu/snapshots/test_calendar.ambr +++ b/tests/components/twentemilieu/snapshots/test_calendar.ambr @@ -99,7 +99,6 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, - 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/twentemilieu/snapshots/test_config_flow.ambr b/tests/components/twentemilieu/snapshots/test_config_flow.ambr index a98119e81c9..00b96062052 100644 --- a/tests/components/twentemilieu/snapshots/test_config_flow.ambr +++ b/tests/components/twentemilieu/snapshots/test_config_flow.ambr @@ -26,8 +26,6 @@ 'post_code': '1234AB', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'twentemilieu', 'entry_id': , 'minor_version': 1, @@ -72,8 +70,6 @@ 'post_code': '1234AB', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'twentemilieu', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/twentemilieu/snapshots/test_sensor.ambr b/tests/components/twentemilieu/snapshots/test_sensor.ambr index 86ffc171082..22dcb0331cd 100644 --- a/tests/components/twentemilieu/snapshots/test_sensor.ambr +++ b/tests/components/twentemilieu/snapshots/test_sensor.ambr @@ -68,7 +68,6 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, - 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , @@ -147,7 +146,6 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, - 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , @@ -226,7 +224,6 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, - 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , @@ -305,7 +302,6 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, - 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , @@ -384,7 +380,6 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, - 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/twilio/test_init.py b/tests/components/twilio/test_init.py index 9c07bd6f3d8..8efa1c24742 100644 --- a/tests/components/twilio/test_init.py +++ b/tests/components/twilio/test_init.py @@ -2,8 +2,8 @@ from homeassistant import config_entries from homeassistant.components import twilio +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, callback -from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from tests.typing import ClientSessionGenerator diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index 28ec98cf572..0601159ca4c 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -27,8 +27,6 @@ 'name': 'twinkly_test_device_name', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'twinkly', 'entry_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', 'minor_version': 1, diff --git a/tests/components/twinkly/test_diagnostics.py b/tests/components/twinkly/test_diagnostics.py index f9cf0bc562c..5cb9fc1fe9e 100644 --- a/tests/components/twinkly/test_diagnostics.py +++ b/tests/components/twinkly/test_diagnostics.py @@ -3,7 +3,6 @@ from collections.abc import Awaitable, Callable from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -27,6 +26,4 @@ async def test_diagnostics( await setup_integration() entry = hass.config_entries.async_entries(DOMAIN)[0] - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( - exclude=props("created_at", "modified_at") - ) + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot diff --git a/tests/components/twitch/__init__.py b/tests/components/twitch/__init__.py index 2d70aaf9649..0238bbdadba 100644 --- a/tests/components/twitch/__init__.py +++ b/tests/components/twitch/__init__.py @@ -1,9 +1,10 @@ """Tests for the Twitch component.""" -from collections.abc import AsyncGenerator, AsyncIterator +from collections.abc import AsyncIterator from typing import Any, Generic, TypeVar from twitchAPI.object.base import TwitchObject +from typing_extensions import AsyncGenerator from homeassistant.components.twitch import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/twitch/conftest.py b/tests/components/twitch/conftest.py index 07732de1b0c..6c243a8dbbf 100644 --- a/tests/components/twitch/conftest.py +++ b/tests/components/twitch/conftest.py @@ -1,11 +1,11 @@ """Configure tests for the Twitch integration.""" -from collections.abc import Generator import time from unittest.mock import AsyncMock, patch import pytest from twitchAPI.object.api import FollowedChannel, Stream, TwitchUser, UserSubscription +from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, @@ -111,8 +111,8 @@ def twitch_mock() -> Generator[AsyncMock]: mock_client.return_value.get_followed_channels.return_value = TwitchIterObject( "get_followed_channels.json", FollowedChannel ) - mock_client.return_value.get_followed_streams.return_value = get_generator( - "get_followed_streams.json", Stream + mock_client.return_value.get_streams.return_value = get_generator( + "get_streams.json", Stream ) mock_client.return_value.check_user_subscription.return_value = ( UserSubscription( diff --git a/tests/components/twitch/fixtures/check_user_subscription.json b/tests/components/twitch/fixtures/check_user_subscription.json index 5e710b72699..b1b2a3d852a 100644 --- a/tests/components/twitch/fixtures/check_user_subscription.json +++ b/tests/components/twitch/fixtures/check_user_subscription.json @@ -1,4 +1,3 @@ { - "is_gift": true, - "tier": "2000" + "is_gift": true } diff --git a/tests/components/twitch/fixtures/check_user_subscription_2.json b/tests/components/twitch/fixtures/check_user_subscription_2.json index 38a1f063f96..94d56c5ee12 100644 --- a/tests/components/twitch/fixtures/check_user_subscription_2.json +++ b/tests/components/twitch/fixtures/check_user_subscription_2.json @@ -1,4 +1,3 @@ { - "is_gift": false, - "tier": "1000" + "is_gift": false } diff --git a/tests/components/twitch/fixtures/get_followed_channels.json b/tests/components/twitch/fixtures/get_followed_channels.json index 990fac390e9..4add7cc0a98 100644 --- a/tests/components/twitch/fixtures/get_followed_channels.json +++ b/tests/components/twitch/fixtures/get_followed_channels.json @@ -1,11 +1,9 @@ [ { - "broadcaster_id": 123, "broadcaster_login": "internetofthings", "followed_at": "2023-08-01" }, { - "broadcaster_id": 456, "broadcaster_login": "homeassistant", "followed_at": "2023-08-01" } diff --git a/tests/components/twitch/fixtures/get_followed_streams.json b/tests/components/twitch/fixtures/get_followed_streams.json deleted file mode 100644 index e02c594c4cc..00000000000 --- a/tests/components/twitch/fixtures/get_followed_streams.json +++ /dev/null @@ -1,10 +0,0 @@ -[ - { - "user_id": 123, - "game_name": "Good game", - "title": "Title", - "thumbnail_url": "stream-medium.png", - "started_at": "2021-03-10T03:18:11Z", - "viewer_count": 42 - } -] diff --git a/tests/components/twitch/fixtures/get_streams.json b/tests/components/twitch/fixtures/get_streams.json new file mode 100644 index 00000000000..3714d97aaef --- /dev/null +++ b/tests/components/twitch/fixtures/get_streams.json @@ -0,0 +1,7 @@ +[ + { + "game_name": "Good game", + "title": "Title", + "thumbnail_url": "stream-medium.png" + } +] diff --git a/tests/components/twitch/test_config_flow.py b/tests/components/twitch/test_config_flow.py index fc53b17551c..6935943a4d3 100644 --- a/tests/components/twitch/test_config_flow.py +++ b/tests/components/twitch/test_config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components.twitch.const import ( DOMAIN, OAUTH2_AUTHORIZE, ) -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -109,7 +109,14 @@ async def test_reauth( ) -> None: """Check reauth flow.""" await setup_integration(hass, config_entry) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -177,7 +184,14 @@ async def test_reauth_wrong_account( twitch_mock.return_value.get_users = lambda *args, **kwargs: get_generator( "get_users_2.json", TwitchUser ) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/twitch/test_sensor.py b/tests/components/twitch/test_sensor.py index 613c0919c49..e5cddf8e192 100644 --- a/tests/components/twitch/test_sensor.py +++ b/tests/components/twitch/test_sensor.py @@ -3,7 +3,6 @@ from datetime import datetime from unittest.mock import AsyncMock -from dateutil.tz import tzutc from twitchAPI.object.api import FollowedChannel, Stream, UserSubscription from twitchAPI.type import TwitchResourceNotFound @@ -21,8 +20,8 @@ async def test_offline( hass: HomeAssistant, twitch_mock: AsyncMock, config_entry: MockConfigEntry ) -> None: """Test offline state.""" - twitch_mock.return_value.get_followed_streams.return_value = ( - get_generator_from_data([], Stream) + twitch_mock.return_value.get_streams.return_value = get_generator_from_data( + [], Stream ) await setup_integration(hass, config_entry) @@ -42,10 +41,6 @@ async def test_streaming( assert sensor_state.attributes["entity_picture"] == "stream-medium.png" assert sensor_state.attributes["game"] == "Good game" assert sensor_state.attributes["title"] == "Title" - assert sensor_state.attributes["started_at"] == datetime( - year=2021, month=3, day=10, hour=3, minute=18, second=11, tzinfo=tzutc() - ) - assert sensor_state.attributes["viewers"] == 42 async def test_oauth_without_sub_and_follow( @@ -80,7 +75,6 @@ async def test_oauth_with_sub( sensor_state = hass.states.get(ENTITY_ID) assert sensor_state.attributes["subscribed"] is True assert sensor_state.attributes["subscription_is_gifted"] is False - assert sensor_state.attributes["subscription_tier"] == 1 assert sensor_state.attributes["following"] is False diff --git a/tests/components/ukraine_alarm/test_config_flow.py b/tests/components/ukraine_alarm/test_config_flow.py index de9bdd618de..58b5dde2bac 100644 --- a/tests/components/ukraine_alarm/test_config_flow.py +++ b/tests/components/ukraine_alarm/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Ukraine Alarm config flow.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch from aiohttp import ClientConnectionError, ClientError, ClientResponseError, RequestInfo import pytest +from typing_extensions import Generator from yarl import URL from homeassistant import config_entries diff --git a/tests/components/unifi/conftest.py b/tests/components/unifi/conftest.py index 798b613b18d..4a7d86eea38 100644 --- a/tests/components/unifi/conftest.py +++ b/tests/components/unifi/conftest.py @@ -3,19 +3,21 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Coroutine, Generator +from collections.abc import Callable from datetime import timedelta from types import MappingProxyType -from typing import Any, Protocol +from typing import Any from unittest.mock import AsyncMock, patch from aiounifi.models.message import MessageKey import orjson import pytest +from typing_extensions import Generator from homeassistant.components.unifi import STORAGE_KEY, STORAGE_VERSION from homeassistant.components.unifi.const import CONF_SITE_ID, DOMAIN as UNIFI_DOMAIN from homeassistant.components.unifi.hub.websocket import RETRY_TIMER +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -51,20 +53,6 @@ CONTROLLER_HOST = { "uptime": 1562600160, } -type ConfigEntryFactoryType = Callable[[], Coroutine[Any, Any, MockConfigEntry]] - - -class WebsocketMessageMock(Protocol): - """Fixture to mock websocket message.""" - - def __call__( - self, - *, - message: MessageKey | None = None, - data: list[dict[str, Any]] | dict[str, Any] | None = None, - ) -> None: - """Send websocket message.""" - @pytest.fixture(autouse=True, name="mock_discovery") def fixture_discovery(): @@ -109,7 +97,7 @@ def fixture_config_entry( hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any], config_entry_options: MappingProxyType[str, Any], -) -> MockConfigEntry: +) -> ConfigEntry: """Define a config entry fixture.""" config_entry = MockConfigEntry( domain=UNIFI_DOMAIN, @@ -173,7 +161,6 @@ def fixture_request( dpi_app_payload: list[dict[str, Any]], dpi_group_payload: list[dict[str, Any]], port_forward_payload: list[dict[str, Any]], - traffic_rule_payload: list[dict[str, Any]], site_payload: list[dict[str, Any]], system_information_payload: list[dict[str, Any]], wlan_payload: list[dict[str, Any]], @@ -184,16 +171,9 @@ def fixture_request( url = f"https://{host}:{DEFAULT_PORT}" def mock_get_request(path: str, payload: list[dict[str, Any]]) -> None: - # APIV2 request respoonses have `meta` and `data` automatically appended - json = {} - if path.startswith("/v2"): - json = payload - else: - json = {"meta": {"rc": "OK"}, "data": payload} - aioclient_mock.get( f"{url}{path}", - json=json, + json={"meta": {"rc": "OK"}, "data": payload}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -203,7 +183,6 @@ def fixture_request( json={"data": "login successful", "meta": {"rc": "ok"}}, headers={"content-type": CONTENT_TYPE_JSON}, ) - mock_get_request("/api/self/sites", site_payload) mock_get_request(f"/api/s/{site_id}/stat/sta", client_payload) mock_get_request(f"/api/s/{site_id}/rest/user", clients_all_payload) @@ -213,7 +192,6 @@ def fixture_request( mock_get_request(f"/api/s/{site_id}/rest/portforward", port_forward_payload) mock_get_request(f"/api/s/{site_id}/stat/sysinfo", system_information_payload) mock_get_request(f"/api/s/{site_id}/rest/wlanconf", wlan_payload) - mock_get_request(f"/v2/api/site/{site_id}/trafficrules", traffic_rule_payload) return __mock_requests @@ -285,12 +263,6 @@ def fixture_system_information_data() -> list[dict[str, Any]]: ] -@pytest.fixture(name="traffic_rule_payload") -def traffic_rule_payload_data() -> list[dict[str, Any]]: - """Traffic rule data.""" - return [] - - @pytest.fixture(name="wlan_payload") def fixture_wlan_data() -> list[dict[str, Any]]: """WLAN data.""" @@ -308,12 +280,12 @@ def fixture_default_requests( @pytest.fixture(name="config_entry_factory") async def fixture_config_entry_factory( hass: HomeAssistant, - config_entry: MockConfigEntry, + config_entry: ConfigEntry, mock_requests: Callable[[str, str], None], -) -> ConfigEntryFactoryType: +) -> Callable[[], ConfigEntry]: """Fixture factory that can set up UniFi network integration.""" - async def __mock_setup_config_entry() -> MockConfigEntry: + async def __mock_setup_config_entry() -> ConfigEntry: mock_requests(config_entry.data[CONF_HOST], config_entry.data[CONF_SITE_ID]) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -324,8 +296,8 @@ async def fixture_config_entry_factory( @pytest.fixture(name="config_entry_setup") async def fixture_config_entry_setup( - config_entry_factory: ConfigEntryFactoryType, -) -> MockConfigEntry: + hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] +) -> ConfigEntry: """Fixture providing a set up instance of UniFi network integration.""" return await config_entry_factory() @@ -395,15 +367,13 @@ def fixture_aiounifi_websocket_state( @pytest.fixture(name="mock_websocket_message") -def fixture_aiounifi_websocket_message( - _mock_websocket: AsyncMock, -) -> WebsocketMessageMock: +def fixture_aiounifi_websocket_message(_mock_websocket: AsyncMock): """No real websocket allowed.""" def make_websocket_call( *, message: MessageKey | None = None, - data: list[dict[str, Any]] | dict[str, Any] | None = None, + data: list[dict] | dict | None = None, ) -> None: """Generate a websocket call.""" message_handler = _mock_websocket.call_args[0][0] diff --git a/tests/components/unifi/snapshots/test_button.ambr b/tests/components/unifi/snapshots/test_button.ambr deleted file mode 100644 index 3729bd31cf0..00000000000 --- a/tests/components/unifi/snapshots/test_button.ambr +++ /dev/null @@ -1,142 +0,0 @@ -# serializer version: 1 -# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.ssid_1_regenerate_password-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.ssid_1_regenerate_password', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Regenerate Password', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wlan_regenerate_password', - 'unique_id': 'regenerate_password-012345678910111213141516', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.ssid_1_regenerate_password-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'update', - 'friendly_name': 'SSID 1 Regenerate Password', - }), - 'context': , - 'entity_id': 'button.ssid_1_regenerate_password', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_port_1_power_cycle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.switch_port_1_power_cycle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 1 Power Cycle', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'power_cycle-00:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_port_1_power_cycle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'switch Port 1 Power Cycle', - }), - 'context': , - 'entity_id': 'button.switch_port_1_power_cycle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.switch_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'device_restart-00:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'switch Restart', - }), - 'context': , - 'entity_id': 'button.switch_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/unifi/snapshots/test_device_tracker.ambr b/tests/components/unifi/snapshots/test_device_tracker.ambr deleted file mode 100644 index 3debd512050..00000000000 --- a/tests/components/unifi/snapshots/test_device_tracker.ambr +++ /dev/null @@ -1,149 +0,0 @@ -# serializer version: 1 -# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.switch_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'device_tracker', - 'entity_category': , - 'entity_id': 'device_tracker.switch_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Switch 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.switch_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Switch 1', - 'ip': '10.0.1.1', - 'mac': '00:00:00:00:01:01', - 'source_type': , - }), - 'context': , - 'entity_id': 'device_tracker.switch_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'home', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.wd_client_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'device_tracker', - 'entity_category': , - 'entity_id': 'device_tracker.wd_client_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'wd_client_1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'site_id-00:00:00:00:00:02', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.wd_client_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'wd_client_1', - 'host_name': 'wd_client_1', - 'mac': '00:00:00:00:00:02', - 'source_type': , - }), - 'context': , - 'entity_id': 'device_tracker.wd_client_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_home', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.ws_client_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'device_tracker', - 'entity_category': , - 'entity_id': 'device_tracker.ws_client_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'ws_client_1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'site_id-00:00:00:00:00:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.ws_client_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'ws_client_1', - 'host_name': 'ws_client_1', - 'ip': '10.0.0.1', - 'mac': '00:00:00:00:00:01', - 'source_type': , - }), - 'context': , - 'entity_id': 'device_tracker.ws_client_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_home', - }) -# --- diff --git a/tests/components/unifi/snapshots/test_diagnostics.ambr b/tests/components/unifi/snapshots/test_diagnostics.ambr index 4ba90a00113..fb7415c59ab 100644 --- a/tests/components/unifi/snapshots/test_diagnostics.ambr +++ b/tests/components/unifi/snapshots/test_diagnostics.ambr @@ -27,8 +27,6 @@ 'verify_ssl': False, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'unifi', 'entry_id': '1', 'minor_version': 1, diff --git a/tests/components/unifi/snapshots/test_image.ambr b/tests/components/unifi/snapshots/test_image.ambr index 32e1a5ff622..83d76688ea3 100644 --- a/tests/components/unifi/snapshots/test_image.ambr +++ b/tests/components/unifi/snapshots/test_image.ambr @@ -1,51 +1,9 @@ # serializer version: 1 -# name: test_entity_and_device_data[site_payload0-wlan_payload0][image.ssid_1_qr_code-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'image', - 'entity_category': , - 'entity_id': 'image.ssid_1_qr_code', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'QR Code', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wlan_qr_code', - 'unique_id': 'qr_code-012345678910111213141516', - 'unit_of_measurement': None, - }) +# name: test_wlan_qr_code + b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x84\x00\x00\x00\x84\x01\x00\x00\x00\x00y?\xbe\n\x00\x00\x00\xcaIDATx\xda\xedV[\n\xc30\x0c\x13\xbb\x80\xef\x7fK\xdd\xc0\x93\x94\xfd\xac\x1fcL\xfbl(\xc4\x04*\xacG\xdcb/\x8b\xb8O\xdeO\x00\xccP\x95\x8b\xe5\x03\xd7\xf5\xcd\x89pF\xcf\x8c \\48\x08\nS\x948\x03p\xfe\x80C\xa8\x9d\x16\xc7P\xabvJ}\xe2\xd7\x84[\xe5W\xfc7\xbbS\xfd\xde\xcfB\xf115\xa2\xe3%\x99\xad\x93\xa0:\xbf6\xbeS\xec\x1a^\xb4\xed\xfb\xb2\xab\xd1\x99\xc9\xcdAjx\x89\x0e\xc5\xea\xf4T\xf9\xee\xe40m58\xb6<\x1b\xab~\xf4\xban\xd7:\xceu\x9e\x05\xc4I\xa6\xbb\xfb%q<7:\xbf\xa2\x90wo\xf5, - 'entity_id': 'image.ssid_1_qr_code', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T01:01:00+00:00', - }) +# name: test_wlan_qr_code.1 + b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x94\x00\x00\x00\x94\x01\x00\x00\x00\x00]G=y\x00\x00\x00\xfdIDATx\xda\xedV1\x8e\x041\x0cB\xf7\x01\xff\xff\x97\xfc\xc0\x0bd\xb6\xda\xe6\xeeB\xb9V\xa4dR \xc7`<\xd8\x8f \xbew\x7f\xb9\x030\x98!\xb5\xe9\xb8\xfc\xc1g\xfc\xf6Nx\xa3%\x9c\x84\xbf\xae\xf1\x84\xb5 \xe796\xf0\\\npjx~1[xZ\\\xbfy+\xf5\xc3\x9b\x8c\xe9\xf0\xeb\xd0k]\xbe\xa3\xa1\xeb\xfaI\x850\xa2Ex\x9f\x1f-\xeb\xe46!\xba\xc0G\x18\xde\xb0|\x8f\x07e8\xca\xd0\xc0,\xd4/\xed&PA\x1a\xf5\xbe~R2m\x07\x8fa\\\xe3\x9d\xc4DnG\x7f\xb0F&\xc4L\xa3~J\xcciy\xdfF\xff\x9a`i\xda$w\xfcom\xcc\x02Kw\x14\xf4\xc2\xd3fn\xba-\xf0A&A\xe2\x0c\x92\x8e\xbfL<\xcb.\xd8\xf1?0~o\xc14\xfcy\xdc\xc48\xa6\xd0\x98\x1f\x99\xbd\xfb\xd0\xd3\x98o\xd1tFR\x07\x8f\xe95lo\xbeE\x88`\x8f\xdf\x8c`lE\x7f\xdf\xff\xc4\x7f\xde\xbd\x00\xfc\xb3\x80\x95k\x06#\x19\x00\x00\x00\x00IEND\xaeB`\x82' # --- # name: test_wlan_qr_code[wlan_payload0] b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x84\x00\x00\x00\x84\x01\x00\x00\x00\x00y?\xbe\n\x00\x00\x00\xcaIDATx\xda\xedV[\n\xc30\x0c\x13\xbb\x80\xef\x7fK\xdd\xc0\x93\x94\xfd\xac\x1fcL\xfbl(\xc4\x04*\xacG\xdcb/\x8b\xb8O\xdeO\x00\xccP\x95\x8b\xe5\x03\xd7\xf5\xcd\x89pF\xcf\x8c \\48\x08\nS\x948\x03p\xfe\x80C\xa8\x9d\x16\xc7P\xabvJ}\xe2\xd7\x84[\xe5W\xfc7\xbbS\xfd\xde\xcfB\xf115\xa2\xe3%\x99\xad\x93\xa0:\xbf6\xbeS\xec\x1a^\xb4\xed\xfb\xb2\xab\xd1\x99\xc9\xcdAjx\x89\x0e\xc5\xea\xf4T\xf9\xee\xe40m58\xb6<\x1b\xab~\xf4\xban\xd7:\xceu\x9e\x05\xc4I\xa6\xbb\xfb%q<7:\xbf\xa2\x90wo\xf5, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.device_clients', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Clients', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_clients', - 'unique_id': 'device_clients-20:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_clients-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device Clients', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.device_clients', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.device_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'State', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_state', - 'unique_id': 'device_state-20:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Device State', - 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', - ]), - }), - 'context': , - 'entity_id': 'sensor.device_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Connected', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.device_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'device_temperature-20:00:00:00:01:01', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Device Temperature', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.device_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.device_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Uptime', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'device_uptime-20:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Device Uptime', - }), - 'context': , - 'entity_id': 'sensor.device_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T01:00:00+00:00', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_budget-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_budget', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AC Power Budget', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'ac_power_budget-01:02:03:04:05:ff', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_budget-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Dummy USP-PDU-Pro AC Power Budget', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_budget', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1875.000', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_consumption-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'AC Power Consumption', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'ac_power_conumption-01:02:03:04:05:ff', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Dummy USP-PDU-Pro AC Power Consumption', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '201.683', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_clients-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_clients', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Clients', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_clients', - 'unique_id': 'device_clients-01:02:03:04:05:ff', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_clients-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dummy USP-PDU-Pro Clients', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_clients', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_cpu_utilization-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_cpu_utilization', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CPU utilization', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_cpu_utilization', - 'unique_id': 'cpu_utilization-01:02:03:04:05:ff', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_cpu_utilization-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dummy USP-PDU-Pro CPU utilization', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_cpu_utilization', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.4', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_memory_utilization-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_memory_utilization', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Memory utilization', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_memory_utilization', - 'unique_id': 'memory_utilization-01:02:03:04:05:ff', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_memory_utilization-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dummy USP-PDU-Pro Memory utilization', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_memory_utilization', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '28.9', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_outlet_2_outlet_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_outlet_2_outlet_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 2 Outlet Power', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet_power-01:02:03:04:05:ff_2', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_outlet_2_outlet_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2 Outlet Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_outlet_2_outlet_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '73.827', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'State', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_state', - 'unique_id': 'device_state-01:02:03:04:05:ff', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Dummy USP-PDU-Pro State', - 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', - ]), - }), - 'context': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Connected', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Uptime', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'device_uptime-01:02:03:04:05:ff', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Dummy USP-PDU-Pro Uptime', - }), - 'context': , - 'entity_id': 'sensor.dummy_usp_pdu_pro_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2020-12-18T05:36:58+00:00', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_clients-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_clients', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Clients', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_clients', - 'unique_id': 'device_clients-10:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_clients-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'mock-name Clients', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_clients', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan2_latency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_cloudflare_wan2_latency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cloudflare WAN2 latency', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'cloudflare_wan2_latency-10:00:00:00:01:01', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan2_latency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'mock-name Cloudflare WAN2 latency', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_cloudflare_wan2_latency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan_latency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_cloudflare_wan_latency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Cloudflare WAN latency', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'cloudflare_wan_latency-10:00:00:00:01:01', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan_latency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'mock-name Cloudflare WAN latency', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_cloudflare_wan_latency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan2_latency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_google_wan2_latency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Google WAN2 latency', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'google_wan2_latency-10:00:00:00:01:01', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan2_latency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'mock-name Google WAN2 latency', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_google_wan2_latency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan_latency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_google_wan_latency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Google WAN latency', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'google_wan_latency-10:00:00:00:01:01', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan_latency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'mock-name Google WAN latency', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_google_wan_latency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '53', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan2_latency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_microsoft_wan2_latency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Microsoft WAN2 latency', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'microsoft_wan2_latency-10:00:00:00:01:01', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan2_latency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'mock-name Microsoft WAN2 latency', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_microsoft_wan2_latency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan_latency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_microsoft_wan_latency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Microsoft WAN latency', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'microsoft_wan_latency-10:00:00:00:01:01', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan_latency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'duration', - 'friendly_name': 'mock-name Microsoft WAN latency', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_microsoft_wan_latency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '56', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_poe_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_1_poe_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 1 PoE Power', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe_power-10:00:00:00:01:01_1', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_poe_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'mock-name Port 1 PoE Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_1_poe_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.56', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_rx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_1_rx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 1 RX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'port_bandwidth_rx', - 'unique_id': 'port_rx-10:00:00:00:01:01_1', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_rx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'mock-name Port 1 RX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_1_rx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.00000', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_tx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_1_tx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 1 TX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'port_bandwidth_tx', - 'unique_id': 'port_tx-10:00:00:00:01:01_1', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_tx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'mock-name Port 1 TX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_1_tx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.00000', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_poe_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_2_poe_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 2 PoE Power', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe_power-10:00:00:00:01:01_2', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_poe_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'mock-name Port 2 PoE Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_2_poe_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.56', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_rx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_2_rx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 2 RX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'port_bandwidth_rx', - 'unique_id': 'port_rx-10:00:00:00:01:01_2', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_rx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'mock-name Port 2 RX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_2_rx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.00000', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_tx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_2_tx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 2 TX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'port_bandwidth_tx', - 'unique_id': 'port_tx-10:00:00:00:01:01_2', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_tx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'mock-name Port 2 TX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_2_tx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.00000', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_rx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_3_rx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 3 RX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'port_bandwidth_rx', - 'unique_id': 'port_rx-10:00:00:00:01:01_3', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_rx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'mock-name Port 3 RX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_3_rx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.00000', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_tx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_3_tx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 3 TX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'port_bandwidth_tx', - 'unique_id': 'port_tx-10:00:00:00:01:01_3', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_tx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'mock-name Port 3 TX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_3_tx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.00000', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_poe_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_4_poe_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 4 PoE Power', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe_power-10:00:00:00:01:01_4', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_poe_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'mock-name Port 4 PoE Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_4_poe_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.00', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_rx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_4_rx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 4 RX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'port_bandwidth_rx', - 'unique_id': 'port_rx-10:00:00:00:01:01_4', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_rx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'mock-name Port 4 RX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_4_rx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.00000', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_tx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_port_4_tx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 4 TX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'port_bandwidth_tx', - 'unique_id': 'port_tx-10:00:00:00:01:01_4', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_tx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'mock-name Port 4 TX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.mock_name_port_4_tx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.00000', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_state-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_state', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'State', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_state', - 'unique_id': 'device_state-10:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_state-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'mock-name State', - 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', - ]), - }), - 'context': , - 'entity_id': 'sensor.mock_name_state', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Connected', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_name_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Uptime', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'device_uptime-10:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'mock-name Uptime', - }), - 'context': , - 'entity_id': 'sensor.mock_name_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.ssid_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.ssid_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wlan_clients', - 'unique_id': 'wlan_clients-012345678910111213141516', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.ssid_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'SSID 1', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.ssid_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_rx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.wired_client_rx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'RX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'client_bandwidth_rx', - 'unique_id': 'rx-00:00:00:00:00:01', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_rx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'Wired client RX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.wired_client_rx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1234.0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_tx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.wired_client_tx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'TX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'client_bandwidth_tx', - 'unique_id': 'tx-00:00:00:00:00:01', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_tx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'Wired client TX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.wired_client_tx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5678.0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.wired_client_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Uptime', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'uptime-00:00:00:00:00:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Wired client Uptime', - }), - 'context': , - 'entity_id': 'sensor.wired_client_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2020-09-14T14:41:45+00:00', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_rx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.wireless_client_rx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'RX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'client_bandwidth_rx', - 'unique_id': 'rx-00:00:00:00:00:02', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_rx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'Wireless client RX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.wireless_client_rx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2345.0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_tx-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.wireless_client_tx', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'TX', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'client_bandwidth_tx', - 'unique_id': 'tx-00:00:00:00:00:02', - 'unit_of_measurement': , - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_tx-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_rate', - 'friendly_name': 'Wireless client TX', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.wireless_client_tx', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6789.0', - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_uptime-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.wireless_client_uptime', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Uptime', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'uptime-00:00:00:00:00:02', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_uptime-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Wireless client Uptime', - }), - 'context': , - 'entity_id': 'sensor.wireless_client_uptime', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T01:00:00+00:00', - }) +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0] + 'data_rate' +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0].1 + 'data_rate' +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0].2 + +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0].3 + +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0].4 + '1234.0' +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0] + 'rx-00:00:00:00:00:01' +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].2 + 'data_rate' +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].3 + 'Wired client RX' +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].4 + +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].5 + +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].6 + '1234.0' +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0] + 'uptime-00:00:00:00:00:01' +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].2 + 'timestamp' +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].3 + 'Wired client Uptime' +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].4 + None +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].5 + None +# --- +# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].6 + '2020-09-14T14:41:45+00:00' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0] + 'rx-00:00:00:00:00:01' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].2 + 'data_rate' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].3 + 'Wired client RX' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].4 + +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].5 + +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].6 + '1234.0' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0] + 'data_rate' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].1 + 'data_rate' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].2 + +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].3 + +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].4 + '5678.0' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0] + 'tx-00:00:00:00:00:01' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].2 + 'data_rate' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].3 + 'Wired client TX' +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].4 + +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].5 + +# --- +# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].6 + '5678.0' +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0] + 'tx-00:00:00:00:00:01' +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].2 + 'data_rate' +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].3 + 'Wired client TX' +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].4 + +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].5 + +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].6 + '5678.0' +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0] + 'uptime-00:00:00:00:00:01' +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].2 + 'timestamp' +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].3 + 'Wired client Uptime' +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].4 + None +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].5 + None +# --- +# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].6 + '2020-09-14T14:41:45+00:00' +# --- +# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0] + 'rx-00:00:00:00:00:02' +# --- +# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].2 + 'data_rate' +# --- +# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].3 + 'Wireless client RX' +# --- +# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].4 + +# --- +# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].5 + +# --- +# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].6 + '2345.0' +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0] + 'rx-00:00:00:00:00:01' +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].2 + 'data_rate' +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].3 + 'Wireless client RX' +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].4 + +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].5 + +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].6 + '2345.0' +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0] + 'tx-00:00:00:00:00:02' +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].2 + 'data_rate' +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].3 + 'Wireless client TX' +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].4 + +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].5 + +# --- +# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].6 + '6789.0' +# --- +# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0] + 'tx-00:00:00:00:00:01' +# --- +# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].2 + 'data_rate' +# --- +# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].3 + 'Wireless client TX' +# --- +# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].4 + +# --- +# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].5 + +# --- +# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].6 + '6789.0' +# --- +# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0] + 'uptime-00:00:00:00:00:01' +# --- +# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].1 + +# --- +# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].2 + 'timestamp' +# --- +# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].3 + 'Wireless client Uptime' +# --- +# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].4 + None +# --- +# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].5 + None +# --- +# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].6 + '2021-01-01T01:00:00+00:00' # --- diff --git a/tests/components/unifi/snapshots/test_switch.ambr b/tests/components/unifi/snapshots/test_switch.ambr deleted file mode 100644 index 45e6188a3f4..00000000000 --- a/tests/components/unifi/snapshots/test_switch.ambr +++ /dev/null @@ -1,517 +0,0 @@ -# serializer version: 1 -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_client_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_client', - 'unique_id': 'block-00:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Block Client 1', - }), - 'context': , - 'entity_id': 'switch.block_client_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_media_streaming', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Media Streaming', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dpi_restriction', - 'unique_id': '5f976f4ae3c58f018ec7dff6', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Block Media Streaming', - }), - 'context': , - 'entity_id': 'switch.block_media_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 2', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'USB Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_port_control', - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_port_control', - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_port_control', - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.plug_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Plug Outlet 1', - }), - 'context': , - 'entity_id': 'switch.plug_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.ssid_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wlan_control', - 'unique_id': 'wlan-012345678910111213141516', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'SSID 1', - }), - 'context': , - 'entity_id': 'switch.ssid_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.unifi_network_plex', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'plex', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'port_forward_control', - 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'UniFi Network plex', - }), - 'context': , - 'entity_id': 'switch.unifi_network_plex', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_test_traffic_rule-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.unifi_network_test_traffic_rule', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Test Traffic Rule', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'traffic_rule_control', - 'unique_id': 'traffic_rule-6452cd9b859d5b11aa002ea1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_test_traffic_rule-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'UniFi Network Test Traffic Rule', - }), - 'context': , - 'entity_id': 'switch.unifi_network_test_traffic_rule', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/unifi/snapshots/test_update.ambr b/tests/components/unifi/snapshots/test_update.ambr deleted file mode 100644 index 405cb9d52a6..00000000000 --- a/tests/components/unifi/snapshots/test_update.ambr +++ /dev/null @@ -1,237 +0,0 @@ -# serializer version: 1 -# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.device_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'device_update-00:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'device_class': 'firmware', - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', - 'friendly_name': 'Device 1', - 'in_progress': False, - 'installed_version': '4.0.42.10433', - 'latest_version': '4.3.17.11279', - 'release_summary': None, - 'release_url': None, - 'skipped_version': None, - 'supported_features': , - 'title': None, - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.device_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.device_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'device_update-00:00:00:00:01:02', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'device_class': 'firmware', - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', - 'friendly_name': 'Device 2', - 'in_progress': False, - 'installed_version': '4.0.42.10433', - 'latest_version': '4.0.42.10433', - 'release_summary': None, - 'release_url': None, - 'skipped_version': None, - 'supported_features': , - 'title': None, - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.device_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.device_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'device_update-00:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'device_class': 'firmware', - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', - 'friendly_name': 'Device 1', - 'in_progress': False, - 'installed_version': '4.0.42.10433', - 'latest_version': '4.3.17.11279', - 'release_summary': None, - 'release_url': None, - 'skipped_version': None, - 'supported_features': , - 'title': None, - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.device_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'update', - 'entity_category': , - 'entity_id': 'update.device_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'device_update-00:00:00:00:01:02', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'auto_update': False, - 'device_class': 'firmware', - 'display_precision': 0, - 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', - 'friendly_name': 'Device 2', - 'in_progress': False, - 'installed_version': '4.0.42.10433', - 'latest_version': '4.0.42.10433', - 'release_summary': None, - 'release_url': None, - 'skipped_version': None, - 'supported_features': , - 'title': None, - 'update_percentage': None, - }), - 'context': , - 'entity_id': 'update.device_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/unifi/test_button.py b/tests/components/unifi/test_button.py index fc3aeccea9f..b58d01e7724 100644 --- a/tests/components/unifi/test_button.py +++ b/tests/components/unifi/test_button.py @@ -1,35 +1,27 @@ """UniFi Network button platform tests.""" -from copy import deepcopy from datetime import timedelta from typing import Any from unittest.mock import patch -from aiounifi.models.message import MessageKey import pytest -from syrupy import SnapshotAssertion -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass from homeassistant.components.unifi.const import CONF_SITE_ID -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry from homeassistant.const import ( + ATTR_DEVICE_CLASS, CONF_HOST, CONTENT_TYPE_JSON, STATE_UNAVAILABLE, - Platform, + EntityCategory, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler import homeassistant.util.dt as dt_util -from .conftest import ( - ConfigEntryFactoryType, - WebsocketMessageMock, - WebsocketStateManager, -) - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker RANDOM_TOKEN = "random_token" @@ -127,44 +119,33 @@ WLAN_REGENERATE_PASSWORD = [ ] -@pytest.mark.parametrize("device_payload", [DEVICE_RESTART + DEVICE_POWER_CYCLE_POE]) -@pytest.mark.parametrize("wlan_payload", [WLAN_REGENERATE_PASSWORD]) -@pytest.mark.parametrize( - "site_payload", - [ - [{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}], - [{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}], - ], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_entity_and_device_data( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - site_payload: dict[str, Any], - snapshot: SnapshotAssertion, -) -> None: - """Validate entity and device data with and without admin rights.""" - with patch("homeassistant.components.unifi.PLATFORMS", [Platform.BUTTON]): - config_entry = await config_entry_factory() - if site_payload[0]["role"] == "admin": - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - else: - assert len(hass.states.async_entity_ids(BUTTON_DOMAIN)) == 0 - - async def _test_button_entity( hass: HomeAssistant, + entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_state: WebsocketStateManager, - config_entry: MockConfigEntry, + mock_websocket_state, + config_entry: ConfigEntry, + entity_count: int, entity_id: str, + unique_id: str, + device_class: ButtonDeviceClass, request_method: str, request_path: str, request_data: dict[str, Any], call: dict[str, str], ) -> None: """Test button entity.""" + assert len(hass.states.async_entity_ids(BUTTON_DOMAIN)) == entity_count + + ent_reg_entry = entity_registry.async_get(entity_id) + assert ent_reg_entry.unique_id == unique_id + assert ent_reg_entry.entity_category is EntityCategory.CONFIG + + # Validate state object + button = hass.states.get(entity_id) + assert button is not None + assert button.attributes.get(ATTR_DEVICE_CLASS) == device_class + # Send and validate device command aioclient_mock.clear_requests() aioclient_mock.request( @@ -194,7 +175,10 @@ async def _test_button_entity( @pytest.mark.parametrize( ( "device_payload", + "entity_count", "entity_id", + "unique_id", + "device_class", "request_method", "request_path", "call", @@ -202,7 +186,10 @@ async def _test_button_entity( [ ( DEVICE_RESTART, + 1, "button.switch_restart", + "device_restart-00:00:00:00:01:01", + ButtonDeviceClass.RESTART, "post", "/cmd/devmgr", { @@ -213,7 +200,10 @@ async def _test_button_entity( ), ( DEVICE_POWER_CYCLE_POE, + 2, "button.switch_port_1_power_cycle", + "power_cycle-00:00:00:00:01:01_1", + ButtonDeviceClass.RESTART, "post", "/cmd/devmgr", { @@ -226,10 +216,14 @@ async def _test_button_entity( ) async def test_device_button_entities( hass: HomeAssistant, + entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, - mock_websocket_state: WebsocketStateManager, + config_entry_setup: ConfigEntry, + mock_websocket_state, + entity_count: int, entity_id: str, + unique_id: str, + device_class: ButtonDeviceClass, request_method: str, request_path: str, call: dict[str, str], @@ -237,10 +231,14 @@ async def test_device_button_entities( """Test button entities based on device sources.""" await _test_button_entity( hass, + entity_registry, aioclient_mock, mock_websocket_state, config_entry_setup, + entity_count, entity_id, + unique_id, + device_class, request_method, request_path, {}, @@ -251,7 +249,10 @@ async def test_device_button_entities( @pytest.mark.parametrize( ( "wlan_payload", + "entity_count", "entity_id", + "unique_id", + "device_class", "request_method", "request_path", "request_data", @@ -260,7 +261,10 @@ async def test_device_button_entities( [ ( WLAN_REGENERATE_PASSWORD, + 1, "button.ssid_1_regenerate_password", + "regenerate_password-012345678910111213141516", + ButtonDeviceClass.UPDATE, "put", f"/rest/wlanconf/{WLAN_REGENERATE_PASSWORD[0]["_id"]}", { @@ -275,9 +279,12 @@ async def test_wlan_button_entities( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, - mock_websocket_state: WebsocketStateManager, + config_entry_setup: ConfigEntry, + mock_websocket_state, + entity_count: int, entity_id: str, + unique_id: str, + device_class: ButtonDeviceClass, request_method: str, request_path: str, request_data: dict[str, Any], @@ -299,42 +306,16 @@ async def test_wlan_button_entities( await _test_button_entity( hass, + entity_registry, aioclient_mock, mock_websocket_state, config_entry_setup, + entity_count, entity_id, + unique_id, + device_class, request_method, request_path, request_data, call, ) - - -@pytest.mark.parametrize("device_payload", [DEVICE_POWER_CYCLE_POE]) -@pytest.mark.usefixtures("config_entry_setup") -async def test_power_cycle_availability( - hass: HomeAssistant, - mock_websocket_message: WebsocketMessageMock, - device_payload: dict[str, Any], -) -> None: - """Verify that disabling PoE marks entity as unavailable.""" - entity_id = "button.switch_port_1_power_cycle" - - assert hass.states.get(entity_id).state != STATE_UNAVAILABLE - - # PoE disabled - - device_1 = deepcopy(device_payload[0]) - device_1["port_table"][0]["poe_enable"] = False - mock_websocket_message(message=MessageKey.DEVICE, data=device_1) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # PoE enabled - device_1 = deepcopy(device_payload[0]) - device_1["port_table"][0]["poe_enable"] = True - mock_websocket_message(message=MessageKey.DEVICE, data=device_1) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state != STATE_UNAVAILABLE diff --git a/tests/components/unifi/test_config_flow.py b/tests/components/unifi/test_config_flow.py index 71b196550da..7b37437cd1d 100644 --- a/tests/components/unifi/test_config_flow.py +++ b/tests/components/unifi/test_config_flow.py @@ -24,6 +24,7 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_WIRED_CLIENTS, DOMAIN as UNIFI_DOMAIN, ) +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -34,9 +35,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .conftest import ConfigEntryFactoryType - from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker CLIENTS = [{"mac": "00:00:00:00:00:01"}] @@ -136,7 +136,9 @@ async def test_flow_works(hass: HomeAssistant, mock_discovery) -> None: } -async def test_flow_works_negative_discovery(hass: HomeAssistant) -> None: +async def test_flow_works_negative_discovery( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: """Test config flow with a negative outcome of async_discovery_unifi.""" result = await hass.config_entries.flow.async_init( UNIFI_DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -296,12 +298,20 @@ async def test_flow_fails_hub_unavailable(hass: HomeAssistant) -> None: async def test_reauth_flow_update_configuration( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, config_entry_setup: ConfigEntry ) -> None: """Verify reauth flow can update hub configuration.""" config_entry = config_entry_setup - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + UNIFI_DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": config_entry.unique_id, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -328,42 +338,12 @@ async def test_reauth_flow_update_configuration( assert config_entry.data[CONF_PASSWORD] == "new_pass" -async def test_reauth_flow_update_configuration_on_not_loaded_entry( - hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType -) -> None: - """Verify reauth flow can update hub configuration on a not loaded entry.""" - with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError): - config_entry = await config_entry_factory() - - result = await config_entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_HOST: "1.2.3.4", - CONF_USERNAME: "new_name", - CONF_PASSWORD: "new_pass", - CONF_PORT: 1234, - CONF_VERIFY_SSL: True, - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert config_entry.data[CONF_HOST] == "1.2.3.4" - assert config_entry.data[CONF_USERNAME] == "new_name" - assert config_entry.data[CONF_PASSWORD] == "new_pass" - - @pytest.mark.parametrize("client_payload", [CLIENTS]) @pytest.mark.parametrize("device_payload", [DEVICES]) @pytest.mark.parametrize("wlan_payload", [WLANS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) async def test_advanced_option_flow( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, config_entry_setup: ConfigEntry ) -> None: """Test advanced config flow options.""" config_entry = config_entry_setup @@ -447,7 +427,7 @@ async def test_advanced_option_flow( @pytest.mark.parametrize("client_payload", [CLIENTS]) async def test_simple_option_flow( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, config_entry_setup: ConfigEntry ) -> None: """Test simple config flow options.""" config_entry = config_entry_setup @@ -516,8 +496,9 @@ async def test_form_ssdp(hass: HomeAssistant) -> None: } -@pytest.mark.usefixtures("config_entry") -async def test_form_ssdp_aborts_if_host_already_exists(hass: HomeAssistant) -> None: +async def test_form_ssdp_aborts_if_host_already_exists( + hass: HomeAssistant, config_entry: ConfigEntry +) -> None: """Test we abort if the host is already configured.""" result = await hass.config_entries.flow.async_init( UNIFI_DOMAIN, @@ -537,8 +518,9 @@ async def test_form_ssdp_aborts_if_host_already_exists(hass: HomeAssistant) -> N assert result["reason"] == "already_configured" -@pytest.mark.usefixtures("config_entry") -async def test_form_ssdp_aborts_if_serial_already_exists(hass: HomeAssistant) -> None: +async def test_form_ssdp_aborts_if_serial_already_exists( + hass: HomeAssistant, config_entry: ConfigEntry +) -> None: """Test we abort if the serial is already configured.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/unifi/test_device_tracker.py b/tests/components/unifi/test_device_tracker.py index c653370656d..984fe50753f 100644 --- a/tests/components/unifi/test_device_tracker.py +++ b/tests/components/unifi/test_device_tracker.py @@ -1,20 +1,20 @@ """The tests for the UniFi Network device tracker platform.""" +from collections.abc import Callable from datetime import timedelta from types import MappingProxyType from typing import Any -from unittest.mock import patch from aiounifi.models.event import EventKey from aiounifi.models.message import MessageKey from freezegun.api import FrozenDateTimeFactory, freeze_time import pytest -from syrupy import SnapshotAssertion from homeassistant.components.device_tracker import DOMAIN as TRACKER_DOMAIN from homeassistant.components.unifi.const import ( CONF_BLOCK_CLIENT, CONF_CLIENT_SOURCE, + CONF_DETECTION_TIME, CONF_IGNORE_WIRED_BUG, CONF_SSID_FILTER, CONF_TRACK_CLIENTS, @@ -23,18 +23,13 @@ from homeassistant.components.unifi.const import ( DEFAULT_DETECTION_TIME, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE, Platform +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util -from .conftest import ( - ConfigEntryFactoryType, - WebsocketMessageMock, - WebsocketStateManager, -) - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import async_fire_time_changed WIRED_CLIENT_1 = { "hostname": "wd_client_1", @@ -90,25 +85,6 @@ SWITCH_1 = { } -@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT_1, WIRELESS_CLIENT_1]]) -@pytest.mark.parametrize("device_payload", [[SWITCH_1]]) -@pytest.mark.parametrize( - "site_payload", - [[{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}]], -) -@pytest.mark.usefixtures("mock_device_registry") -async def test_entity_and_device_data( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - snapshot: SnapshotAssertion, -) -> None: - """Validate entity and device data with and without admin rights.""" - with patch("homeassistant.components.unifi.PLATFORMS", [Platform.DEVICE_TRACKER]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - @pytest.mark.parametrize( "client_payload", [[WIRELESS_CLIENT_1, WIRED_BUG_CLIENT, UNSEEN_CLIENT]] ) @@ -116,8 +92,8 @@ async def test_entity_and_device_data( @pytest.mark.usefixtures("mock_device_registry") async def test_client_state_update( hass: HomeAssistant, - mock_websocket_message: WebsocketMessageMock, - config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message, + config_entry_factory: Callable[[], ConfigEntry], client_payload: list[dict[str, Any]], ) -> None: """Verify tracking of wireless clients.""" @@ -169,7 +145,7 @@ async def test_client_state_update( async def test_client_state_from_event_source( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - mock_websocket_message: WebsocketMessageMock, + mock_websocket_message, client_payload: list[dict[str, Any]], ) -> None: """Verify update state of client based on event source.""" @@ -237,40 +213,67 @@ async def test_client_state_from_event_source( assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME -@pytest.mark.parametrize("device_payload", [[SWITCH_1]]) -@pytest.mark.usefixtures("mock_device_registry") @pytest.mark.parametrize( - ("state", "interval", "expected"), + "device_payload", [ - # Start home, new signal but still home, heartbeat timer triggers away - (1, 20, (STATE_HOME, STATE_HOME, STATE_NOT_HOME)), - # Start away, new signal but still home, heartbeat time do not trigger - (0, 40, (STATE_NOT_HOME, STATE_HOME, STATE_HOME)), + [ + { + "board_rev": 3, + "device_id": "mock-id", + "has_fan": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device 1", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "version": "4.0.42.10433", + }, + { + "board_rev": 3, + "device_id": "mock-id", + "has_fan": True, + "ip": "10.0.1.2", + "mac": "00:00:00:00:01:02", + "model": "US16P150", + "name": "Device 2", + "next_interval": 20, + "state": 0, + "type": "usw", + "version": "4.0.42.10433", + }, + ] ], ) -async def test_tracked_device_state_change( +@pytest.mark.usefixtures("config_entry_setup") +@pytest.mark.usefixtures("mock_device_registry") +async def test_tracked_devices( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - config_entry_factory: ConfigEntryFactoryType, - mock_websocket_message: WebsocketMessageMock, + mock_websocket_message, device_payload: list[dict[str, Any]], - state: int, - interval: int, - expected: list[str], ) -> None: """Test the update_items function with some devices.""" - device_payload[0] = device_payload[0] | {"state": state} - await config_entry_factory() - assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 1 - assert hass.states.get("device_tracker.switch_1").state == expected[0] + assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 + assert hass.states.get("device_tracker.device_1").state == STATE_HOME + assert hass.states.get("device_tracker.device_2").state == STATE_NOT_HOME # State change signalling work - switch_1 = device_payload[0] | {"state": 1, "next_interval": interval} - mock_websocket_message(message=MessageKey.DEVICE, data=[switch_1]) + device_1 = device_payload[0] + device_1["next_interval"] = 20 + device_2 = device_payload[1] + device_2["state"] = 1 + device_2["next_interval"] = 50 + mock_websocket_message(message=MessageKey.DEVICE, data=[device_1, device_2]) await hass.async_block_till_done() - # Too little time has passed - assert hass.states.get("device_tracker.switch_1").state == expected[1] + assert hass.states.get("device_tracker.device_1").state == STATE_HOME + assert hass.states.get("device_tracker.device_2").state == STATE_HOME # Change of time can mark device not_home outside of expected reporting interval new_time = dt_util.utcnow() + timedelta(seconds=90) @@ -278,24 +281,23 @@ async def test_tracked_device_state_change( async_fire_time_changed(hass, new_time) await hass.async_block_till_done() - # Heartbeat to update state is interval + 60 seconds - assert hass.states.get("device_tracker.switch_1").state == expected[2] + assert hass.states.get("device_tracker.device_1").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.device_2").state == STATE_HOME # Disabled device is unavailable - switch_1["disabled"] = True - mock_websocket_message(message=MessageKey.DEVICE, data=switch_1) + device_1["disabled"] = True + mock_websocket_message(message=MessageKey.DEVICE, data=device_1) await hass.async_block_till_done() - assert hass.states.get("device_tracker.switch_1").state == STATE_UNAVAILABLE + assert hass.states.get("device_tracker.device_1").state == STATE_UNAVAILABLE + assert hass.states.get("device_tracker.device_2").state == STATE_HOME @pytest.mark.parametrize("client_payload", [[WIRELESS_CLIENT_1, WIRED_CLIENT_1]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("mock_device_registry") async def test_remove_clients( - hass: HomeAssistant, - mock_websocket_message: WebsocketMessageMock, - client_payload: list[dict[str, Any]], + hass: HomeAssistant, mock_websocket_message, client_payload: list[dict[str, Any]] ) -> None: """Test the remove_items function with some clients.""" assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 @@ -311,35 +313,68 @@ async def test_remove_clients( assert hass.states.get("device_tracker.wd_client_1") -@pytest.mark.parametrize("client_payload", [[WIRELESS_CLIENT_1]]) -@pytest.mark.parametrize("device_payload", [[SWITCH_1]]) +@pytest.mark.parametrize( + "client_payload", + [ + [ + { + "essid": "ssid", + "hostname": "client", + "is_wired": False, + "last_seen": 1562600145, + "mac": "00:00:00:00:00:01", + } + ] + ], +) +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 3, + "device_id": "mock-id", + "has_fan": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "version": "4.0.42.10433", + } + ] + ], +) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("mock_device_registry") -async def test_hub_state_change( - hass: HomeAssistant, - mock_websocket_state: WebsocketStateManager, -) -> None: +async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: """Verify entities state reflect on hub connection becoming unavailable.""" assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 - assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME - assert hass.states.get("device_tracker.switch_1").state == STATE_HOME + assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.device").state == STATE_HOME # Controller unavailable await mock_websocket_state.disconnect() - assert hass.states.get("device_tracker.ws_client_1").state == STATE_UNAVAILABLE - assert hass.states.get("device_tracker.switch_1").state == STATE_UNAVAILABLE + assert hass.states.get("device_tracker.client").state == STATE_UNAVAILABLE + assert hass.states.get("device_tracker.device").state == STATE_UNAVAILABLE # Controller available await mock_websocket_state.reconnect() - assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME - assert hass.states.get("device_tracker.switch_1").state == STATE_HOME + assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.device").state == STATE_HOME @pytest.mark.usefixtures("mock_device_registry") async def test_option_ssid_filter( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: ConfigEntryFactoryType, + config_entry_factory: Callable[[], ConfigEntry], client_payload: list[dict[str, Any]], ) -> None: """Test the SSID filter works. @@ -348,7 +383,13 @@ async def test_option_ssid_filter( Client on SSID2 will be removed on change of options. """ client_payload += [ - WIRELESS_CLIENT_1 | {"last_seen": dt_util.as_timestamp(dt_util.utcnow())}, + { + "essid": "ssid", + "hostname": "client", + "is_wired": False, + "last_seen": dt_util.as_timestamp(dt_util.utcnow()), + "mac": "00:00:00:00:00:01", + }, { "essid": "ssid2", "hostname": "client_on_ssid2", @@ -360,7 +401,7 @@ async def test_option_ssid_filter( config_entry = await config_entry_factory() assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 - assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME + assert hass.states.get("device_tracker.client").state == STATE_HOME assert hass.states.get("device_tracker.client_on_ssid2").state == STATE_NOT_HOME # Setting SSID filter will remove clients outside of filter @@ -370,29 +411,33 @@ async def test_option_ssid_filter( await hass.async_block_till_done() # Not affected by SSID filter - assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME + assert hass.states.get("device_tracker.client").state == STATE_HOME # Removed due to SSID filter assert not hass.states.get("device_tracker.client_on_ssid2") # Roams to SSID outside of filter - ws_client_1 = client_payload[0] | {"essid": "other_ssid"} - mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) + client = client_payload[0] + client["essid"] = "other_ssid" + mock_websocket_message(message=MessageKey.CLIENT, data=client) # Data update while SSID filter is in effect shouldn't create the client - client_on_ssid2 = client_payload[1] | { - "last_seen": dt_util.as_timestamp(dt_util.utcnow()) - } + client_on_ssid2 = client_payload[1] + client_on_ssid2["last_seen"] = dt_util.as_timestamp(dt_util.utcnow()) mock_websocket_message(message=MessageKey.CLIENT, data=client_on_ssid2) await hass.async_block_till_done() - new_time = dt_util.utcnow() + timedelta(seconds=(DEFAULT_DETECTION_TIME + 1)) + new_time = dt_util.utcnow() + timedelta( + seconds=( + config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME) + 1 + ) + ) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() # SSID filter marks client as away - assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME # SSID still outside of filter assert not hass.states.get("device_tracker.client_on_ssid2") @@ -401,23 +446,25 @@ async def test_option_ssid_filter( hass.config_entries.async_update_entry(config_entry, options={CONF_SSID_FILTER: []}) await hass.async_block_till_done() - ws_client_1["last_seen"] += 1 + client["last_seen"] += 1 client_on_ssid2["last_seen"] += 1 - mock_websocket_message( - message=MessageKey.CLIENT, data=[ws_client_1, client_on_ssid2] - ) + mock_websocket_message(message=MessageKey.CLIENT, data=[client, client_on_ssid2]) await hass.async_block_till_done() - assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME + assert hass.states.get("device_tracker.client").state == STATE_HOME assert hass.states.get("device_tracker.client_on_ssid2").state == STATE_HOME # Time pass to mark client as away - new_time += timedelta(seconds=(DEFAULT_DETECTION_TIME + 1)) + new_time += timedelta( + seconds=( + config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME) + 1 + ) + ) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() - assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME client_on_ssid2["last_seen"] += 1 mock_websocket_message(message=MessageKey.CLIENT, data=client_on_ssid2) @@ -431,7 +478,9 @@ async def test_option_ssid_filter( mock_websocket_message(message=MessageKey.CLIENT, data=client_on_ssid2) await hass.async_block_till_done() - new_time += timedelta(seconds=DEFAULT_DETECTION_TIME) + new_time += timedelta( + seconds=(config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME)) + ) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() @@ -443,7 +492,7 @@ async def test_option_ssid_filter( async def test_wireless_client_go_wired_issue( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: ConfigEntryFactoryType, + config_entry_factory: Callable[[], ConfigEntry], client_payload: list[dict[str, Any]], ) -> None: """Test the solution to catch wireless device go wired UniFi issue. @@ -451,51 +500,64 @@ async def test_wireless_client_go_wired_issue( UniFi Network has a known issue that when a wireless device goes away it sometimes gets marked as wired. """ client_payload.append( - WIRELESS_CLIENT_1 | {"last_seen": dt_util.as_timestamp(dt_util.utcnow())} + { + "essid": "ssid", + "hostname": "client", + "ip": "10.0.0.1", + "is_wired": False, + "last_seen": dt_util.as_timestamp(dt_util.utcnow()), + "mac": "00:00:00:00:00:01", + } ) - await config_entry_factory() + config_entry = await config_entry_factory() assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 1 # Client is wireless - assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME + client_state = hass.states.get("device_tracker.client") + assert client_state.state == STATE_HOME # Trigger wired bug - ws_client_1 = client_payload[0] | { - "last_seen": dt_util.as_timestamp(dt_util.utcnow()), - "is_wired": True, - } - mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) + client = client_payload[0] + client["last_seen"] = dt_util.as_timestamp(dt_util.utcnow()) + client["is_wired"] = True + mock_websocket_message(message=MessageKey.CLIENT, data=client) await hass.async_block_till_done() # Wired bug fix keeps client marked as wireless - assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME + client_state = hass.states.get("device_tracker.client") + assert client_state.state == STATE_HOME # Pass time - new_time = dt_util.utcnow() + timedelta(seconds=DEFAULT_DETECTION_TIME) + new_time = dt_util.utcnow() + timedelta( + seconds=(config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME)) + ) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() # Marked as home according to the timer - assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME + client_state = hass.states.get("device_tracker.client") + assert client_state.state == STATE_NOT_HOME # Try to mark client as connected - ws_client_1["last_seen"] += 1 - mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) + client["last_seen"] += 1 + mock_websocket_message(message=MessageKey.CLIENT, data=client) await hass.async_block_till_done() # Make sure it don't go online again until wired bug disappears - assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME + client_state = hass.states.get("device_tracker.client") + assert client_state.state == STATE_NOT_HOME # Make client wireless - ws_client_1["last_seen"] += 1 - ws_client_1["is_wired"] = False - mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) + client["last_seen"] += 1 + client["is_wired"] = False + mock_websocket_message(message=MessageKey.CLIENT, data=client) await hass.async_block_till_done() # Client is no longer affected by wired bug and can be marked online - assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME + client_state = hass.states.get("device_tracker.client") + assert client_state.state == STATE_HOME @pytest.mark.parametrize("config_entry_options", [{CONF_IGNORE_WIRED_BUG: True}]) @@ -503,54 +565,69 @@ async def test_wireless_client_go_wired_issue( async def test_option_ignore_wired_bug( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: ConfigEntryFactoryType, + config_entry_factory: Callable[[], ConfigEntry], client_payload: list[dict[str, Any]], ) -> None: """Test option to ignore wired bug.""" client_payload.append( - WIRELESS_CLIENT_1 | {"last_seen": dt_util.as_timestamp(dt_util.utcnow())} + { + "ap_mac": "00:00:00:00:02:01", + "essid": "ssid", + "hostname": "client", + "ip": "10.0.0.1", + "is_wired": False, + "last_seen": dt_util.as_timestamp(dt_util.utcnow()), + "mac": "00:00:00:00:00:01", + } ) - await config_entry_factory() + config_entry = await config_entry_factory() assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 1 # Client is wireless - assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME + client_state = hass.states.get("device_tracker.client") + assert client_state.state == STATE_HOME # Trigger wired bug - ws_client_1 = client_payload[0] - ws_client_1["is_wired"] = True - mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) + client = client_payload[0] + client["is_wired"] = True + mock_websocket_message(message=MessageKey.CLIENT, data=client) await hass.async_block_till_done() # Wired bug in effect - assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME + client_state = hass.states.get("device_tracker.client") + assert client_state.state == STATE_HOME - # Pass time - new_time = dt_util.utcnow() + timedelta(seconds=DEFAULT_DETECTION_TIME) + # pass time + new_time = dt_util.utcnow() + timedelta( + seconds=config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME) + ) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() # Timer marks client as away - assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME + client_state = hass.states.get("device_tracker.client") + assert client_state.state == STATE_NOT_HOME # Mark client as connected again - ws_client_1["last_seen"] += 1 - mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) + client["last_seen"] += 1 + mock_websocket_message(message=MessageKey.CLIENT, data=client) await hass.async_block_till_done() # Ignoring wired bug allows client to go home again even while affected - assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME + client_state = hass.states.get("device_tracker.client") + assert client_state.state == STATE_HOME # Make client wireless - ws_client_1["last_seen"] += 1 - ws_client_1["is_wired"] = False - mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) + client["last_seen"] += 1 + client["is_wired"] = False + mock_websocket_message(message=MessageKey.CLIENT, data=client) await hass.async_block_till_done() # Client is wireless and still connected - assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME + client_state = hass.states.get("device_tracker.client") + assert client_state.state == STATE_HOME @pytest.mark.parametrize( @@ -580,8 +657,8 @@ async def test_option_ignore_wired_bug( async def test_restoring_client( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry: MockConfigEntry, - config_entry_factory: ConfigEntryFactoryType, + config_entry: ConfigEntry, + config_entry_factory: Callable[[], ConfigEntry], client_payload: list[dict[str, Any]], clients_all_payload: list[dict[str, Any]], ) -> None: @@ -654,10 +731,10 @@ async def test_restoring_client( @pytest.mark.usefixtures("mock_device_registry") async def test_config_entry_options_track( hass: HomeAssistant, - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, config_entry_options: MappingProxyType[str, Any], counts: tuple[int], - expected: tuple[tuple[bool | None, ...], ...], + expected: dict[tuple[bool | None]], ) -> None: """Test the different config entry options. diff --git a/tests/components/unifi/test_diagnostics.py b/tests/components/unifi/test_diagnostics.py index 80359a9c75c..fcaba59cbad 100644 --- a/tests/components/unifi/test_diagnostics.py +++ b/tests/components/unifi/test_diagnostics.py @@ -2,16 +2,15 @@ import pytest from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.unifi.const import ( CONF_ALLOW_BANDWIDTH_SENSORS, CONF_ALLOW_UPTIME_SENSORS, CONF_BLOCK_CLIENT, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -122,10 +121,11 @@ DPI_GROUP_DATA = [ async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert await get_diagnostics_for_config_entry( - hass, hass_client, config_entry_setup - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry_setup) + == snapshot + ) diff --git a/tests/components/unifi/test_hub.py b/tests/components/unifi/test_hub.py index af134c7449b..0d75a83c5f5 100644 --- a/tests/components/unifi/test_hub.py +++ b/tests/components/unifi/test_hub.py @@ -1,5 +1,6 @@ """Test UniFi Network.""" +from collections.abc import Callable from http import HTTPStatus from types import MappingProxyType from typing import Any @@ -11,21 +12,18 @@ import pytest from homeassistant.components.unifi.const import DOMAIN as UNIFI_DOMAIN from homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect from homeassistant.components.unifi.hub import get_unifi_api -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr import homeassistant.util.dt as dt_util -from .conftest import ConfigEntryFactoryType, WebsocketStateManager - -from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker async def test_hub_setup( device_registry: dr.DeviceRegistry, - config_entry_factory: ConfigEntryFactoryType, + config_entry_factory: Callable[[], ConfigEntry], ) -> None: """Successful setup.""" with patch( @@ -56,7 +54,7 @@ async def test_hub_setup( async def test_reset_after_successful_setup( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, config_entry_setup: ConfigEntry ) -> None: """Calling reset when the entry has been setup.""" assert config_entry_setup.state is ConfigEntryState.LOADED @@ -66,7 +64,7 @@ async def test_reset_after_successful_setup( async def test_reset_fails( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, config_entry_setup: ConfigEntry ) -> None: """Calling reset when the entry has been setup can return false.""" assert config_entry_setup.state is ConfigEntryState.LOADED @@ -82,8 +80,8 @@ async def test_reset_fails( @pytest.mark.usefixtures("mock_device_registry") async def test_connection_state_signalling( hass: HomeAssistant, - config_entry_factory: ConfigEntryFactoryType, - mock_websocket_state: WebsocketStateManager, + mock_websocket_state, + config_entry_factory: Callable[[], ConfigEntry], client_payload: list[dict[str, Any]], ) -> None: """Verify connection statesignalling and connection state are working.""" @@ -112,8 +110,8 @@ async def test_connection_state_signalling( async def test_reconnect_mechanism( aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, - mock_websocket_state: WebsocketStateManager, + mock_websocket_state, + config_entry_setup: ConfigEntry, ) -> None: """Verify reconnect prints only on first reconnection try.""" aioclient_mock.clear_requests() @@ -142,10 +140,7 @@ async def test_reconnect_mechanism( ], ) @pytest.mark.usefixtures("config_entry_setup") -async def test_reconnect_mechanism_exceptions( - mock_websocket_state: WebsocketStateManager, - exception: Exception, -) -> None: +async def test_reconnect_mechanism_exceptions(mock_websocket_state, exception) -> None: """Verify async_reconnect calls expected methods.""" with ( patch("aiounifi.Controller.login", side_effect=exception), @@ -175,8 +170,8 @@ async def test_reconnect_mechanism_exceptions( ) async def test_get_unifi_api_fails_to_connect( hass: HomeAssistant, - side_effect: Exception, - raised_exception: Exception, + side_effect, + raised_exception, config_entry_data: MappingProxyType[str, Any], ) -> None: """Check that get_unifi_api can handle UniFi Network being unavailable.""" diff --git a/tests/components/unifi/test_image.py b/tests/components/unifi/test_image.py index dc37d7cb8b7..75d2f02900d 100644 --- a/tests/components/unifi/test_image.py +++ b/tests/components/unifi/test_image.py @@ -3,41 +3,22 @@ from copy import deepcopy from datetime import timedelta from http import HTTPStatus -from typing import Any -from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY -from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_UNAVAILABLE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.util import dt as dt_util -from .conftest import ( - ConfigEntryFactoryType, - WebsocketMessageMock, - WebsocketStateManager, -) - -from tests.common import async_fire_time_changed, snapshot_platform +from tests.common import async_fire_time_changed from tests.typing import ClientSessionGenerator - -@pytest.fixture(autouse=True) -def mock_getrandbits(): - """Mock image access token which normally is randomized.""" - with patch( - "homeassistant.components.image.SystemRandom.getrandbits", - return_value=1, - ): - yield - - WLAN = { "_id": "012345678910111213141516", "bc_filter_enabled": False, @@ -75,32 +56,6 @@ WLAN = { } -@pytest.mark.parametrize("wlan_payload", [[WLAN]]) -@pytest.mark.parametrize( - "site_payload", - [ - [{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}], - [{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}], - ], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.freeze_time("2021-01-01 01:01:00") -async def test_entity_and_device_data( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - site_payload: dict[str, Any], - snapshot: SnapshotAssertion, -) -> None: - """Validate entity and device data with and without admin rights.""" - with patch("homeassistant.components.unifi.PLATFORMS", [Platform.IMAGE]): - config_entry = await config_entry_factory() - if site_payload[0]["role"] == "admin": - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - else: - assert len(hass.states.async_entity_ids(IMAGE_DOMAIN)) == 0 - - @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") async def test_wlan_qr_code( @@ -108,13 +63,16 @@ async def test_wlan_qr_code( entity_registry: er.EntityRegistry, hass_client: ClientSessionGenerator, snapshot: SnapshotAssertion, - mock_websocket_message: WebsocketMessageMock, + mock_websocket_message, + mock_websocket_state, ) -> None: """Test the update_clients function when no clients are found.""" assert len(hass.states.async_entity_ids(IMAGE_DOMAIN)) == 0 ent_reg_entry = entity_registry.async_get("image.ssid_1_qr_code") + assert ent_reg_entry.unique_id == "qr_code-012345678910111213141516" assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -126,6 +84,10 @@ async def test_wlan_qr_code( ) await hass.async_block_till_done() + # Validate state object + image_state_1 = hass.states.get("image.ssid_1_qr_code") + assert image_state_1.name == "SSID 1 QR Code" + # Validate image client = await hass_client() resp = await client.get("/api/image_proxy/image.ssid_1_qr_code") @@ -134,8 +96,8 @@ async def test_wlan_qr_code( assert body == snapshot # Update state object - same password - no change to state - image_state_1 = hass.states.get("image.ssid_1_qr_code") mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=WLAN) + await hass.async_block_till_done() image_state_2 = hass.states.get("image.ssid_1_qr_code") assert image_state_1.state == image_state_2.state @@ -143,6 +105,7 @@ async def test_wlan_qr_code( data = deepcopy(WLAN) data["x_passphrase"] = "new password" mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=data) + await hass.async_block_till_done() image_state_3 = hass.states.get("image.ssid_1_qr_code") assert image_state_1.state != image_state_3.state @@ -153,41 +116,25 @@ async def test_wlan_qr_code( body = await resp.read() assert body == snapshot + # Availability signalling -@pytest.mark.parametrize("wlan_payload", [[WLAN]]) -@pytest.mark.usefixtures("config_entry_setup") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_hub_state_change( - hass: HomeAssistant, mock_websocket_state: WebsocketStateManager -) -> None: - """Verify entities state reflect on hub becoming unavailable.""" - assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE - - # Controller unavailable + # Controller disconnects await mock_websocket_state.disconnect() assert hass.states.get("image.ssid_1_qr_code").state == STATE_UNAVAILABLE - # Controller available + # Controller reconnects await mock_websocket_state.reconnect() assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE - -@pytest.mark.parametrize("wlan_payload", [[WLAN]]) -@pytest.mark.usefixtures("config_entry_setup") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_source_availability( - hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock -) -> None: - """Verify entities state reflect on source becoming unavailable.""" - assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE - # WLAN gets disabled wlan_1 = deepcopy(WLAN) wlan_1["enabled"] = False mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=wlan_1) + await hass.async_block_till_done() assert hass.states.get("image.ssid_1_qr_code").state == STATE_UNAVAILABLE # WLAN gets re-enabled wlan_1["enabled"] = True mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=wlan_1) + await hass.async_block_till_done() assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE diff --git a/tests/components/unifi/test_init.py b/tests/components/unifi/test_init.py index 68f80555cd6..7cd203ab8fd 100644 --- a/tests/components/unifi/test_init.py +++ b/tests/components/unifi/test_init.py @@ -1,5 +1,6 @@ """Test UniFi Network integration setup process.""" +from collections.abc import Callable from typing import Any from unittest.mock import patch @@ -12,25 +13,29 @@ from homeassistant.components.unifi.const import ( CONF_ALLOW_UPTIME_SENSORS, CONF_TRACK_CLIENTS, CONF_TRACK_DEVICES, + DOMAIN as UNIFI_DOMAIN, ) from homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from .conftest import ( - DEFAULT_CONFIG_ENTRY_ID, - ConfigEntryFactoryType, - WebsocketMessageMock, -) +from .conftest import DEFAULT_CONFIG_ENTRY_ID from tests.common import flush_store +from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator +async def test_setup_with_no_config(hass: HomeAssistant) -> None: + """Test that we do not discover anything or try to set up a hub.""" + assert await async_setup_component(hass, UNIFI_DOMAIN, {}) is True + assert UNIFI_DOMAIN not in hass.data + + async def test_setup_entry_fails_config_entry_not_ready( - hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType + hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] ) -> None: """Failed authentication trigger a reauthentication flow.""" with patch( @@ -43,7 +48,7 @@ async def test_setup_entry_fails_config_entry_not_ready( async def test_setup_entry_fails_trigger_reauth_flow( - hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType + hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] ) -> None: """Failed authentication trigger a reauthentication flow.""" with ( @@ -81,7 +86,7 @@ async def test_setup_entry_fails_trigger_reauth_flow( async def test_wireless_clients( hass: HomeAssistant, hass_storage: dict[str, Any], - config_entry_factory: ConfigEntryFactoryType, + config_entry_factory: Callable[[], ConfigEntry], ) -> None: """Verify wireless clients class.""" hass_storage[unifi.STORAGE_KEY] = { @@ -165,11 +170,13 @@ async def test_wireless_clients( ) async def test_remove_config_entry_device( hass: HomeAssistant, + hass_storage: dict[str, Any], + aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - config_entry_factory: ConfigEntryFactoryType, + config_entry_factory: Callable[[], ConfigEntry], client_payload: list[dict[str, Any]], device_payload: list[dict[str, Any]], - mock_websocket_message: WebsocketMessageMock, + mock_websocket_message, hass_ws_client: WebSocketGenerator, ) -> None: """Verify removing a device manually.""" diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index 3c94d12018d..960a5d3e529 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -1,5 +1,6 @@ """UniFi Network sensor platform tests.""" +from collections.abc import Callable from copy import deepcopy from datetime import datetime, timedelta from types import MappingProxyType @@ -10,12 +11,14 @@ from aiounifi.models.device import DeviceState from aiounifi.models.message import MessageKey from freezegun.api import FrozenDateTimeFactory, freeze_time import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN, SCAN_INTERVAL, SensorDeviceClass, + SensorStateClass, ) from homeassistant.components.unifi.const import ( CONF_ALLOW_BANDWIDTH_SENSORS, @@ -26,44 +29,20 @@ from homeassistant.components.unifi.const import ( DEFAULT_DETECTION_TIME, DEVICE_STATES, ) -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry from homeassistant.const import ( ATTR_DEVICE_CLASS, + ATTR_FRIENDLY_NAME, + ATTR_UNIT_OF_MEASUREMENT, STATE_UNAVAILABLE, EntityCategory, - Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler import homeassistant.util.dt as dt_util -from .conftest import ( - ConfigEntryFactoryType, - WebsocketMessageMock, - WebsocketStateManager, -) - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - -WIRED_CLIENT = { - "hostname": "Wired client", - "is_wired": True, - "mac": "00:00:00:00:00:01", - "oui": "Producer", - "wired-rx_bytes-r": 1234000000, - "wired-tx_bytes-r": 5678000000, - "uptime": 1600094505, -} -WIRELESS_CLIENT = { - "is_wired": False, - "mac": "00:00:00:00:00:02", - "name": "Wireless client", - "oui": "Producer", - "rx_bytes-r": 2345000000.0, - "tx_bytes-r": 6789000000.0, - "uptime": 60, -} +from tests.common import async_fire_time_changed DEVICE_1 = { "board_rev": 2, @@ -337,114 +316,6 @@ PDU_OUTLETS_UPDATE_DATA = [ ] -@pytest.mark.parametrize( - "config_entry_options", - [ - { - CONF_ALLOW_BANDWIDTH_SENSORS: True, - CONF_ALLOW_UPTIME_SENSORS: True, - } - ], -) -@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) -@pytest.mark.parametrize( - "device_payload", - [ - [ - DEVICE_1, - PDU_DEVICE_1, - { # Temperature - "board_rev": 3, - "device_id": "mock-id", - "general_temperature": 30, - "has_fan": True, - "has_temperature": True, - "fan_level": 0, - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "20:00:00:00:01:01", - "model": "US16P150", - "name": "Device", - "next_interval": 20, - "overheating": True, - "state": 1, - "type": "usw", - "upgradable": True, - "uptime": 60, - "version": "4.0.42.10433", - }, - { # Latency monitors - "board_rev": 2, - "device_id": "mock-id", - "ip": "10.0.1.1", - "mac": "10:00:00:00:01:01", - "last_seen": 1562600145, - "model": "US16P150", - "name": "mock-name", - "port_overrides": [], - "uptime_stats": { - "WAN": { - "availability": 100.0, - "latency_average": 39, - "monitors": [ - { - "availability": 100.0, - "latency_average": 56, - "target": "www.microsoft.com", - "type": "icmp", - }, - { - "availability": 100.0, - "latency_average": 53, - "target": "google.com", - "type": "icmp", - }, - { - "availability": 100.0, - "latency_average": 30, - "target": "1.1.1.1", - "type": "icmp", - }, - ], - }, - "WAN2": { - "monitors": [ - { - "availability": 0.0, - "target": "www.microsoft.com", - "type": "icmp", - }, - { - "availability": 0.0, - "target": "google.com", - "type": "icmp", - }, - {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, - ], - }, - }, - "state": 1, - "type": "usw", - "version": "4.0.42.10433", - }, - ] - ], -) -@pytest.mark.parametrize("wlan_payload", [[WLAN]]) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.freeze_time("2021-01-01 01:01:00") -async def test_entity_and_device_data( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory, - snapshot: SnapshotAssertion, -) -> None: - """Validate entity and device data.""" - with patch("homeassistant.components.unifi.PLATFORMS", [Platform.SENSOR]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - @pytest.mark.parametrize( "config_entry_options", [{CONF_ALLOW_BANDWIDTH_SENSORS: True, CONF_ALLOW_UPTIME_SENSORS: True}], @@ -466,17 +337,64 @@ async def test_no_clients(hass: HomeAssistant) -> None: } ], ) -@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) +@pytest.mark.parametrize( + "client_payload", + [ + [ + { + "hostname": "Wired client", + "is_wired": True, + "mac": "00:00:00:00:00:01", + "oui": "Producer", + "wired-rx_bytes-r": 1234000000, + "wired-tx_bytes-r": 5678000000, + }, + { + "is_wired": False, + "mac": "00:00:00:00:00:02", + "name": "Wireless client", + "oui": "Producer", + "rx_bytes-r": 2345000000.0, + "tx_bytes-r": 6789000000.0, + }, + ] + ], +) async def test_bandwidth_sensors( hass: HomeAssistant, - mock_websocket_message: WebsocketMessageMock, + mock_websocket_message, config_entry_options: MappingProxyType[str, Any], - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify that bandwidth sensors are working as expected.""" + assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 4 + + # Verify sensor attributes and state + + wrx_sensor = hass.states.get("sensor.wired_client_rx") + assert wrx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert wrx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert wrx_sensor.state == "1234.0" + + wtx_sensor = hass.states.get("sensor.wired_client_tx") + assert wtx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert wtx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert wtx_sensor.state == "5678.0" + + wlrx_sensor = hass.states.get("sensor.wireless_client_rx") + assert wlrx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert wlrx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert wlrx_sensor.state == "2345.0" + + wltx_sensor = hass.states.get("sensor.wireless_client_tx") + assert wltx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert wltx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert wltx_sensor.state == "6789.0" + # Verify state update - wireless_client = deepcopy(client_payload[1]) + wireless_client = client_payload[1] wireless_client["rx_bytes-r"] = 3456000000 wireless_client["tx_bytes-r"] = 7891000000 @@ -541,17 +459,138 @@ async def test_bandwidth_sensors( assert hass.states.get("sensor.wired_client_tx") +@pytest.mark.parametrize( + "config_entry_options", + [ + { + CONF_ALLOW_BANDWIDTH_SENSORS: False, + CONF_ALLOW_UPTIME_SENSORS: True, + CONF_TRACK_CLIENTS: False, + CONF_TRACK_DEVICES: False, + } + ], +) +@pytest.mark.parametrize( + "client_payload", + [ + [ + { + "mac": "00:00:00:00:00:01", + "name": "client1", + "oui": "Producer", + "uptime": 0, + } + ] + ], +) +@pytest.mark.parametrize( + ("initial_uptime", "event_uptime", "new_uptime"), + [ + # Uptime listed in epoch time should never change + (1609462800, 1609462800, 1612141200), + # Uptime counted in seconds increases with every event + (60, 64, 60), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_uptime_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + mock_websocket_message, + config_entry_options: MappingProxyType[str, Any], + config_entry_factory: Callable[[], ConfigEntry], + client_payload: list[dict[str, Any]], + initial_uptime, + event_uptime, + new_uptime, +) -> None: + """Verify that uptime sensors are working as expected.""" + uptime_client = client_payload[0] + uptime_client["uptime"] = initial_uptime + freezer.move_to(datetime(2021, 1, 1, 1, 1, 0, tzinfo=dt_util.UTC)) + config_entry = await config_entry_factory() + + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1 + assert hass.states.get("sensor.client1_uptime").state == "2021-01-01T01:00:00+00:00" + assert ( + entity_registry.async_get("sensor.client1_uptime").entity_category + is EntityCategory.DIAGNOSTIC + ) + + # Verify normal new event doesn't change uptime + # 4 seconds has passed + uptime_client["uptime"] = event_uptime + now = datetime(2021, 1, 1, 1, 1, 4, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + mock_websocket_message(message=MessageKey.CLIENT, data=uptime_client) + await hass.async_block_till_done() + + assert hass.states.get("sensor.client1_uptime").state == "2021-01-01T01:00:00+00:00" + + # Verify new event change uptime + # 1 month has passed + uptime_client["uptime"] = new_uptime + now = datetime(2021, 2, 1, 1, 1, 0, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + mock_websocket_message(message=MessageKey.CLIENT, data=uptime_client) + await hass.async_block_till_done() + + assert hass.states.get("sensor.client1_uptime").state == "2021-02-01T01:00:00+00:00" + + # Disable option + options = deepcopy(config_entry_options) + options[CONF_ALLOW_UPTIME_SENSORS] = False + hass.config_entries.async_update_entry(config_entry, options=options) + await hass.async_block_till_done() + + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 0 + assert hass.states.get("sensor.client1_uptime") is None + + # Enable option + options = deepcopy(config_entry_options) + options[CONF_ALLOW_UPTIME_SENSORS] = True + with patch("homeassistant.util.dt.now", return_value=now): + hass.config_entries.async_update_entry(config_entry, options=options) + await hass.async_block_till_done() + + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1 + assert hass.states.get("sensor.client1_uptime") + + @pytest.mark.parametrize( "config_entry_options", [{CONF_ALLOW_BANDWIDTH_SENSORS: True, CONF_ALLOW_UPTIME_SENSORS: True}], ) -@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) +@pytest.mark.parametrize( + "client_payload", + [ + [ + { + "hostname": "Wired client", + "is_wired": True, + "mac": "00:00:00:00:00:01", + "oui": "Producer", + "wired-rx_bytes": 1234000000, + "wired-tx_bytes": 5678000000, + "uptime": 1600094505, + }, + { + "is_wired": False, + "mac": "00:00:00:00:00:02", + "name": "Wireless client", + "oui": "Producer", + "rx_bytes": 2345000000, + "tx_bytes": 6789000000, + "uptime": 60, + }, + ] + ], +) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_remove_sensors( - hass: HomeAssistant, - mock_websocket_message: WebsocketMessageMock, - client_payload: list[dict[str, Any]], + hass: HomeAssistant, mock_websocket_message, client_payload: list[dict[str, Any]] ) -> None: """Verify removing of clients work as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6 @@ -580,14 +619,15 @@ async def test_remove_sensors( async def test_poe_port_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message: WebsocketMessageMock, - mock_websocket_state: WebsocketStateManager, + mock_websocket_message, + mock_websocket_state, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 ent_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_poe_power") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -652,9 +692,10 @@ async def test_poe_port_switches( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) async def test_wlan_client_sensors( hass: HomeAssistant, - config_entry_factory: ConfigEntryFactoryType, - mock_websocket_message: WebsocketMessageMock, - mock_websocket_state: WebsocketStateManager, + entity_registry: er.EntityRegistry, + mock_websocket_message, + mock_websocket_state, + config_entry_factory: Callable[[], ConfigEntry], client_payload: list[dict[str, Any]], ) -> None: """Verify that WLAN client sensors are working as expected.""" @@ -684,8 +725,14 @@ async def test_wlan_client_sensors( assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1 + ent_reg_entry = entity_registry.async_get("sensor.ssid_1") + assert ent_reg_entry.unique_id == "wlan_clients-012345678910111213141516" + assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC + # Validate state object - assert hass.states.get("sensor.ssid_1").state == "1" + ssid_1 = hass.states.get("sensor.ssid_1") + assert ssid_1 is not None + assert ssid_1.state == "1" # Verify state update - increasing number wireless_client_1 = client_payload[0] @@ -754,6 +801,7 @@ async def test_wlan_client_sensors( @pytest.mark.parametrize( ( "entity_id", + "expected_unique_id", "expected_value", "changed_data", "expected_update_value", @@ -761,18 +809,21 @@ async def test_wlan_client_sensors( [ ( "dummy_usp_pdu_pro_outlet_2_outlet_power", + "outlet_power-01:02:03:04:05:ff_2", "73.827", {"outlet_table": PDU_OUTLETS_UPDATE_DATA}, "123.45", ), ( "dummy_usp_pdu_pro_ac_power_budget", + "ac_power_budget-01:02:03:04:05:ff", "1875.000", None, None, ), ( "dummy_usp_pdu_pro_ac_power_consumption", + "ac_power_conumption-01:02:03:04:05:ff", "201.683", {"outlet_ac_power_consumption": "456.78"}, "456.78", @@ -783,18 +834,26 @@ async def test_wlan_client_sensors( @pytest.mark.usefixtures("config_entry_setup") async def test_outlet_power_readings( hass: HomeAssistant, - mock_websocket_message: WebsocketMessageMock, + entity_registry: er.EntityRegistry, + mock_websocket_message, device_payload: list[dict[str, Any]], entity_id: str, - expected_value: str, - changed_data: dict[str, Any] | None, - expected_update_value: str | None, + expected_unique_id: str, + expected_value: any, + changed_data: dict | None, + expected_update_value: any, ) -> None: """Test the outlet power reporting on PDU devices.""" assert len(hass.states.async_all()) == 13 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 7 - assert hass.states.get(f"sensor.{entity_id}").state == expected_value + ent_reg_entry = entity_registry.async_get(f"sensor.{entity_id}") + assert ent_reg_entry.unique_id == expected_unique_id + assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC + + sensor_data = hass.states.get(f"sensor.{entity_id}") + assert sensor_data.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.POWER + assert sensor_data.state == expected_value if changed_data is not None: updated_device_data = deepcopy(device_payload[0]) @@ -803,7 +862,73 @@ async def test_outlet_power_readings( mock_websocket_message(message=MessageKey.DEVICE, data=updated_device_data) await hass.async_block_till_done() - assert hass.states.get(f"sensor.{entity_id}").state == expected_update_value + sensor_data = hass.states.get(f"sensor.{entity_id}") + assert sensor_data.state == expected_update_value + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 3, + "device_id": "mock-id", + "has_fan": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + } + ] + ], +) +async def test_device_uptime( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_websocket_message, + config_entry_factory: Callable[[], ConfigEntry], + device_payload: list[dict[str, Any]], +) -> None: + """Verify that uptime sensors are working as expected.""" + now = datetime(2021, 1, 1, 1, 1, 0, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + await config_entry_factory() + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + assert hass.states.get("sensor.device_uptime").state == "2021-01-01T01:00:00+00:00" + + assert ( + entity_registry.async_get("sensor.device_uptime").entity_category + is EntityCategory.DIAGNOSTIC + ) + + # Verify normal new event doesn't change uptime + # 4 seconds has passed + device = device_payload[0] + device["uptime"] = 64 + now = datetime(2021, 1, 1, 1, 1, 4, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + mock_websocket_message(message=MessageKey.DEVICE, data=device) + + assert hass.states.get("sensor.device_uptime").state == "2021-01-01T01:00:00+00:00" + + # Verify new event change uptime + # 1 month has passed + + device["uptime"] = 60 + now = datetime(2021, 2, 1, 1, 1, 0, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + mock_websocket_message(message=MessageKey.DEVICE, data=device) + + assert hass.states.get("sensor.device_uptime").state == "2021-02-01T01:00:00+00:00" @pytest.mark.parametrize( @@ -836,12 +961,17 @@ async def test_outlet_power_readings( @pytest.mark.usefixtures("config_entry_setup") async def test_device_temperature( hass: HomeAssistant, - mock_websocket_message: WebsocketMessageMock, + entity_registry: er.EntityRegistry, + mock_websocket_message, device_payload: list[dict[str, Any]], ) -> None: """Verify that temperature sensors are working as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 assert hass.states.get("sensor.device_temperature").state == "30" + assert ( + entity_registry.async_get("sensor.device_temperature").entity_category + is EntityCategory.DIAGNOSTIC + ) # Verify new event change temperature device = device_payload[0] @@ -881,11 +1011,15 @@ async def test_device_temperature( async def test_device_state( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message: WebsocketMessageMock, + mock_websocket_message, device_payload: list[dict[str, Any]], ) -> None: """Verify that state sensors are working as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 + assert ( + entity_registry.async_get("sensor.device_state").entity_category + is EntityCategory.DIAGNOSTIC + ) device = device_payload[0] for i in list(map(int, DeviceState)): @@ -913,7 +1047,8 @@ async def test_device_state( @pytest.mark.usefixtures("config_entry_setup") async def test_device_system_stats( hass: HomeAssistant, - mock_websocket_message: WebsocketMessageMock, + entity_registry: er.EntityRegistry, + mock_websocket_message, device_payload: list[dict[str, Any]], ) -> None: """Verify that device stats sensors are working as expected.""" @@ -923,6 +1058,16 @@ async def test_device_system_stats( assert hass.states.get("sensor.device_cpu_utilization").state == "5.8" assert hass.states.get("sensor.device_memory_utilization").state == "31.1" + assert ( + entity_registry.async_get("sensor.device_cpu_utilization").entity_category + is EntityCategory.DIAGNOSTIC + ) + + assert ( + entity_registry.async_get("sensor.device_memory_utilization").entity_category + is EntityCategory.DIAGNOSTIC + ) + # Verify new event change system-stats device = device_payload[0] device["system-stats"] = {"cpu": 7.7, "mem": 33.3, "uptime": 7316} @@ -998,9 +1143,9 @@ async def test_device_system_stats( async def test_bandwidth_port_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_setup: MockConfigEntry, + mock_websocket_message, + config_entry_setup: ConfigEntry, config_entry_options: MappingProxyType[str, Any], - mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that port bandwidth sensors are working as expected.""" @@ -1009,9 +1154,11 @@ async def test_bandwidth_port_sensors( p1rx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_rx") assert p1rx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert p1rx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC p1tx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_tx") assert p1tx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert p1tx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -1038,11 +1185,26 @@ async def test_bandwidth_port_sensors( assert len(hass.states.async_all()) == 9 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6 - # Verify sensor state - assert hass.states.get("sensor.mock_name_port_1_rx").state == "0.00921" - assert hass.states.get("sensor.mock_name_port_1_tx").state == "0.04089" - assert hass.states.get("sensor.mock_name_port_2_rx").state == "0.01229" - assert hass.states.get("sensor.mock_name_port_2_tx").state == "0.02892" + # Verify sensor attributes and state + p1rx_sensor = hass.states.get("sensor.mock_name_port_1_rx") + assert p1rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p1rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p1rx_sensor.state == "0.00921" + + p1tx_sensor = hass.states.get("sensor.mock_name_port_1_tx") + assert p1tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p1tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p1tx_sensor.state == "0.04089" + + p2rx_sensor = hass.states.get("sensor.mock_name_port_2_rx") + assert p2rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p2rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p2rx_sensor.state == "0.01229" + + p2tx_sensor = hass.states.get("sensor.mock_name_port_2_tx") + assert p2tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p2tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p2tx_sensor.state == "0.02892" # Verify state update device_1 = device_payload[0] @@ -1098,9 +1260,9 @@ async def test_bandwidth_port_sensors( async def test_device_client_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - mock_websocket_message: WebsocketMessageMock, - client_payload: dict[str, Any], + config_entry_factory, + mock_websocket_message, + client_payload, ) -> None: """Verify that WLAN client sensors are working as expected.""" client_payload += [ @@ -1136,9 +1298,13 @@ async def test_device_client_sensors( ent_reg_entry = entity_registry.async_get("sensor.wired_device_clients") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC + assert ent_reg_entry.unique_id == "device_clients-01:00:00:00:00:00" ent_reg_entry = entity_registry.async_get("sensor.wireless_device_clients") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC + assert ent_reg_entry.unique_id == "device_clients-02:00:00:00:00:00" # Enable entity entity_registry.async_update_entity( @@ -1175,578 +1341,67 @@ async def test_device_client_sensors( assert hass.states.get("sensor.wireless_device_clients").state == "0" -async def _test_uptime_entity( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_websocket_message: WebsocketMessageMock, - config_entry_factory: ConfigEntryFactoryType, - payload: dict[str, Any], - entity_id: str, - message_key: MessageKey, - initial_uptime: int, - event_uptime: int, - small_variation_uptime: int, - new_uptime: int, -) -> None: - """Verify that uptime entities are working as expected.""" - payload["uptime"] = initial_uptime - freezer.move_to(datetime(2021, 1, 1, 1, 1, 0, tzinfo=dt_util.UTC)) - config_entry = await config_entry_factory() - - assert hass.states.get(entity_id).state == "2021-01-01T01:00:00+00:00" - - # Verify normal new event doesn't change uptime - # 4 minutes have passed - - payload["uptime"] = event_uptime - now = datetime(2021, 1, 1, 1, 4, 0, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=message_key, data=payload) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == "2021-01-01T01:00:00+00:00" - - # Verify small variation of uptime (<120 seconds) is ignored - # 15 seconds variation after 8 minutes - - payload["uptime"] = small_variation_uptime - now = datetime(2021, 1, 1, 1, 8, 15, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=message_key, data=payload) - - assert hass.states.get(entity_id).state == "2021-01-01T01:00:00+00:00" - - # Verify new event change uptime - # 1 month has passed - - payload["uptime"] = new_uptime - now = datetime(2021, 2, 1, 1, 1, 0, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=message_key, data=payload) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == "2021-02-01T01:00:00+00:00" - - return config_entry +WIRED_CLIENT = { + "hostname": "Wired client", + "is_wired": True, + "mac": "00:00:00:00:00:01", + "oui": "Producer", + "wired-rx_bytes-r": 1234000000, + "wired-tx_bytes-r": 5678000000, + "uptime": 1600094505, +} +WIRELESS_CLIENT = { + "is_wired": False, + "mac": "00:00:00:00:00:01", + "name": "Wireless client", + "oui": "Producer", + "rx_bytes-r": 2345000000.0, + "tx_bytes-r": 6789000000.0, + "uptime": 60, +} -@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_UPTIME_SENSORS: True}]) -@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT]]) @pytest.mark.parametrize( - ("initial_uptime", "event_uptime", "small_variation_uptime", "new_uptime"), + "config_entry_options", [ - # Uptime listed in epoch time should never change - (1609462800, 1609462800, 1609462800, 1612141200), - # Uptime counted in seconds increases with every event - (60, 240, 480, 60), + { + CONF_ALLOW_BANDWIDTH_SENSORS: True, + CONF_ALLOW_UPTIME_SENSORS: True, + CONF_TRACK_CLIENTS: False, + CONF_TRACK_DEVICES: False, + } ], ) +@pytest.mark.parametrize( + ("client_payload", "entity_id", "unique_id_prefix"), + [ + ([WIRED_CLIENT], "sensor.wired_client_rx", "rx-"), + ([WIRED_CLIENT], "sensor.wired_client_tx", "tx-"), + ([WIRED_CLIENT], "sensor.wired_client_uptime", "uptime-"), + ([WIRELESS_CLIENT], "sensor.wireless_client_rx", "rx-"), + ([WIRELESS_CLIENT], "sensor.wireless_client_tx", "tx-"), + ([WIRELESS_CLIENT], "sensor.wireless_client_uptime", "uptime-"), + ], +) +@pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_client_uptime( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry_options: MappingProxyType[str, Any], - config_entry_factory: ConfigEntryFactoryType, - mock_websocket_message: WebsocketMessageMock, - client_payload: list[dict[str, Any]], - initial_uptime, - event_uptime, - small_variation_uptime, - new_uptime, -) -> None: - """Verify that client uptime sensors are working as expected.""" - config_entry = await _test_uptime_entity( - hass, - freezer, - mock_websocket_message, - config_entry_factory, - payload=client_payload[0], - entity_id="sensor.wired_client_uptime", - message_key=MessageKey.CLIENT, - initial_uptime=initial_uptime, - event_uptime=event_uptime, - small_variation_uptime=small_variation_uptime, - new_uptime=new_uptime, - ) - - # Disable option - options = deepcopy(config_entry_options) - options[CONF_ALLOW_UPTIME_SENSORS] = False - hass.config_entries.async_update_entry(config_entry, options=options) - await hass.async_block_till_done() - - assert hass.states.get("sensor.wired_client_uptime") is None - - # Enable option - options = deepcopy(config_entry_options) - options[CONF_ALLOW_UPTIME_SENSORS] = True - hass.config_entries.async_update_entry(config_entry, options=options) - await hass.async_block_till_done() - - assert hass.states.get("sensor.wired_client_uptime") - - -@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) -async def test_device_uptime( +@pytest.mark.freeze_time("2021-01-01 01:01:00") +async def test_sensor_sources( hass: HomeAssistant, entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, - config_entry_factory: ConfigEntryFactoryType, - mock_websocket_message: WebsocketMessageMock, - device_payload: list[dict[str, Any]], + snapshot: SnapshotAssertion, + entity_id: str, + unique_id_prefix: str, ) -> None: - """Verify that device uptime sensors are working as expected.""" - await _test_uptime_entity( - hass, - freezer, - mock_websocket_message, - config_entry_factory, - payload=device_payload[0], - entity_id="sensor.mock_name_uptime", - message_key=MessageKey.DEVICE, - initial_uptime=60, - event_uptime=240, - small_variation_uptime=480, - new_uptime=60, - ) + """Test sensor sources and the entity description.""" + ent_reg_entry = entity_registry.async_get(entity_id) + assert ent_reg_entry.unique_id.startswith(unique_id_prefix) + assert ent_reg_entry.unique_id == snapshot + assert ent_reg_entry.entity_category == snapshot - -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 2, - "device_id": "mock-id", - "ip": "10.0.1.1", - "mac": "10:00:00:00:01:01", - "last_seen": 1562600145, - "model": "US16P150", - "name": "mock-name", - "port_overrides": [], - "uptime_stats": { - "WAN": { - "availability": 100.0, - "latency_average": 39, - "monitors": [ - { - "availability": 100.0, - "latency_average": 56, - "target": "www.microsoft.com", - "type": "icmp", - }, - { - "availability": 100.0, - "latency_average": 53, - "target": "google.com", - "type": "icmp", - }, - { - "availability": 100.0, - "latency_average": 30, - "target": "1.1.1.1", - "type": "icmp", - }, - ], - }, - "WAN2": { - "monitors": [ - { - "availability": 0.0, - "target": "www.microsoft.com", - "type": "icmp", - }, - { - "availability": 0.0, - "target": "google.com", - "type": "icmp", - }, - {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, - ], - }, - }, - "state": 1, - "type": "usw", - "version": "4.0.42.10433", - } - ] - ], -) -@pytest.mark.parametrize( - ("monitor_id", "state", "updated_state", "index_to_update"), - [ - # Microsoft - ("microsoft_wan", "56", "20", 0), - # Google - ("google_wan", "53", "90", 1), - # Cloudflare - ("cloudflare_wan", "30", "80", 2), - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_wan_monitor_latency( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_websocket_message: WebsocketMessageMock, - device_payload: list[dict[str, Any]], - monitor_id: str, - state: str, - updated_state: str, - index_to_update: int, -) -> None: - """Verify that wan latency sensors are working as expected.""" - entity_id = f"sensor.mock_name_{monitor_id}_latency" - - assert len(hass.states.async_all()) == 6 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 - - latency_entry = entity_registry.async_get(entity_id) - assert latency_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - - # Enable entity - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - - await hass.async_block_till_done() - - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 7 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 - - # Verify sensor state - assert hass.states.get(entity_id).state == state - - # Verify state update - device = device_payload[0] - device["uptime_stats"]["WAN"]["monitors"][index_to_update]["latency_average"] = ( - updated_state - ) - - mock_websocket_message(message=MessageKey.DEVICE, data=device) - - assert hass.states.get(entity_id).state == updated_state - - -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 2, - "device_id": "mock-id", - "ip": "10.0.1.1", - "mac": "10:00:00:00:01:01", - "last_seen": 1562600145, - "model": "US16P150", - "name": "mock-name", - "port_overrides": [], - "uptime_stats": { - "WAN": { - "monitors": [ - { - "availability": 100.0, - "latency_average": 30, - "target": "1.2.3.4", - "type": "icmp", - }, - ], - }, - "WAN2": { - "monitors": [ - { - "availability": 0.0, - "target": "www.microsoft.com", - "type": "icmp", - }, - { - "availability": 0.0, - "target": "google.com", - "type": "icmp", - }, - {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, - ], - }, - }, - "state": 1, - "type": "usw", - "version": "4.0.42.10433", - } - ] - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_wan_monitor_latency_with_no_entries( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, -) -> None: - """Verify that wan latency sensors is not created if there is no data.""" - - assert len(hass.states.async_all()) == 6 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 - - latency_entry = entity_registry.async_get("sensor.mock_name_google_wan_latency") - assert latency_entry is None - - -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 2, - "device_id": "mock-id", - "ip": "10.0.1.1", - "mac": "10:00:00:00:01:01", - "last_seen": 1562600145, - "model": "US16P150", - "name": "mock-name", - "port_overrides": [], - "state": 1, - "type": "usw", - "version": "4.0.42.10433", - } - ] - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_wan_monitor_latency_with_no_uptime( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, -) -> None: - """Verify that wan latency sensors is not created if there is no data.""" - - assert len(hass.states.async_all()) == 6 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 - - latency_entry = entity_registry.async_get("sensor.mock_name_google_wan_latency") - assert latency_entry is None - - -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "fan_level": 0, - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:01", - "model": "US16P150", - "name": "Device", - "next_interval": 20, - "overheating": True, - "state": 1, - "type": "usw", - "upgradable": True, - "uptime": 60, - "version": "4.0.42.10433", - "temperatures": [ - {"name": "CPU", "type": "cpu", "value": 66.0}, - {"name": "Local", "type": "board", "value": 48.75}, - {"name": "PHY", "type": "board", "value": 50.25}, - ], - } - ] - ], -) -@pytest.mark.parametrize( - ("temperature_id", "state", "updated_state", "index_to_update"), - [ - ("device_cpu", "66.0", "20", 0), - ("device_local", "48.75", "90.64", 1), - ("device_phy", "50.25", "80", 2), - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_device_temperatures( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_websocket_message, - device_payload: list[dict[str, Any]], - temperature_id: str, - state: str, - updated_state: str, - index_to_update: int, -) -> None: - """Verify that device temperatures sensors are working as expected.""" - - entity_id = f"sensor.device_{temperature_id}_temperature" - - assert len(hass.states.async_all()) == 6 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 - - temperature_entity = entity_registry.async_get(entity_id) - assert temperature_entity.disabled_by == RegistryEntryDisabler.INTEGRATION - - # Enable entity - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - - await hass.async_block_till_done() - - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 7 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 - - # Verify sensor state - assert hass.states.get(entity_id).state == state - - # # Verify state update - device = device_payload[0] - device["temperatures"][index_to_update]["value"] = updated_state - - mock_websocket_message(message=MessageKey.DEVICE, data=device) - - assert hass.states.get(entity_id).state == updated_state - - -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 2, - "device_id": "mock-id", - "ip": "10.0.1.1", - "mac": "10:00:00:00:01:01", - "last_seen": 1562600145, - "model": "US16P150", - "name": "mock-name", - "port_overrides": [], - "state": 1, - "type": "usw", - "version": "4.0.42.10433", - } - ] - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_device_with_no_temperature( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, -) -> None: - """Verify that device temperature sensors is not created if there is no data.""" - - assert len(hass.states.async_all()) == 6 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 - - temperature_entity = entity_registry.async_get( - "sensor.device_device_cpu_temperature" - ) - - assert temperature_entity is None - - -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 2, - "device_id": "mock-id", - "ip": "10.0.1.1", - "mac": "10:00:00:00:01:01", - "last_seen": 1562600145, - "model": "US16P150", - "name": "mock-name", - "port_overrides": [], - "state": 1, - "type": "usw", - "version": "4.0.42.10433", - "temperatures": [ - {"name": "MEM", "type": "mem", "value": 66.0}, - ], - } - ] - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_device_with_no_matching_temperatures( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, -) -> None: - """Verify that device temperature sensors is not created if there is no matching data.""" - - assert len(hass.states.async_all()) == 6 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 - - temperature_entity = entity_registry.async_get( - "sensor.device_device_cpu_temperature" - ) - - assert temperature_entity is None - - -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 3, - "device_id": "device-with-uplink", - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:01", - "model": "US16P150", - "name": "Device", - "next_interval": 20, - "state": 1, - "type": "usw", - "upgradable": True, - "uptime": 60, - "version": "4.0.42.10433", - "uplink": { - "uplink_mac": "00:00:00:00:00:02", - "port_idx": 1, - }, - }, - { - "board_rev": 3, - "device_id": "device-without-uplink", - "ip": "10.0.1.2", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:02", - "model": "US16P150", - "name": "Other Device", - "next_interval": 20, - "state": 1, - "type": "usw", - "upgradable": True, - "uptime": 60, - "version": "4.0.42.10433", - "uplink": {}, - }, - ], - ], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_device_uplink( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_websocket_message, - device_payload: list[dict[str, Any]], -) -> None: - """Verify that uplink sensors are working as expected.""" - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 5 - assert hass.states.get("sensor.device_uplink_mac").state == "00:00:00:00:00:02" - assert ( - entity_registry.async_get("sensor.device_uplink_mac").entity_category - is EntityCategory.DIAGNOSTIC - ) - - # Verify new event change temperature - device = device_payload[0] - device["uplink"]["uplink_mac"] = "00:00:00:00:00:03" - mock_websocket_message(message=MessageKey.DEVICE, data=device) - assert hass.states.get("sensor.device_uplink_mac").state == "00:00:00:00:00:03" + state = hass.states.get(entity_id) + assert state.attributes.get(ATTR_DEVICE_CLASS) == snapshot + assert state.attributes.get(ATTR_FRIENDLY_NAME) == snapshot + assert state.attributes.get(ATTR_STATE_CLASS) == snapshot + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == snapshot + assert state.state == snapshot diff --git a/tests/components/unifi/test_services.py b/tests/components/unifi/test_services.py index a7968a92e22..e3b03bc868d 100644 --- a/tests/components/unifi/test_services.py +++ b/tests/components/unifi/test_services.py @@ -10,11 +10,11 @@ from homeassistant.components.unifi.services import ( SERVICE_RECONNECT_CLIENT, SERVICE_REMOVE_CLIENTS, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_DEVICE_ID, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -25,7 +25,7 @@ async def test_reconnect_client( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify call to reconnect client is performed as expected.""" @@ -69,7 +69,7 @@ async def test_reconnect_device_without_mac( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, ) -> None: """Verify no call is made if device does not have a known mac.""" aioclient_mock.clear_requests() @@ -95,7 +95,7 @@ async def test_reconnect_client_hub_unavailable( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify no call is made if hub is unavailable.""" @@ -127,7 +127,7 @@ async def test_reconnect_client_unknown_mac( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, ) -> None: """Verify no call is made if trying to reconnect a mac unknown to hub.""" aioclient_mock.clear_requests() @@ -152,7 +152,7 @@ async def test_reconnect_wired_client( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify no call is made if client is wired.""" @@ -204,7 +204,7 @@ async def test_reconnect_wired_client( async def test_remove_clients( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, ) -> None: """Verify removing different variations of clients work.""" aioclient_mock.clear_requests() @@ -288,8 +288,8 @@ async def test_services_handle_unloaded_config_entry( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - config_entry_setup: MockConfigEntry, - clients_all_payload: dict[str, Any], + config_entry_setup: ConfigEntry, + clients_all_payload, ) -> None: """Verify no call is made if config entry is unloaded.""" await hass.config_entries.async_unload(config_entry_setup.entry_id) diff --git a/tests/components/unifi/test_switch.py b/tests/components/unifi/test_switch.py index ef93afa7e3e..b0ae8bde445 100644 --- a/tests/components/unifi/test_switch.py +++ b/tests/components/unifi/test_switch.py @@ -1,18 +1,18 @@ """UniFi Network switch platform tests.""" +from collections.abc import Callable from copy import deepcopy from datetime import timedelta from typing import Any -from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest -from syrupy import SnapshotAssertion from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON, + SwitchDeviceClass, ) from homeassistant.components.unifi.const import ( CONF_BLOCK_CLIENT, @@ -22,28 +22,24 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_DEVICES, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry from homeassistant.const import ( + ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, CONF_HOST, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, - Platform, + EntityCategory, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.util import dt as dt_util -from .conftest import ( - CONTROLLER_HOST, - ConfigEntryFactoryType, - WebsocketMessageMock, - WebsocketStateManager, -) +from .conftest import CONTROLLER_HOST -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker CLIENT_1 = { @@ -778,65 +774,6 @@ PORT_FORWARD_PLEX = { "src": "any", } -TRAFFIC_RULE = { - "_id": "6452cd9b859d5b11aa002ea1", - "action": "BLOCK", - "app_category_ids": [], - "app_ids": [], - "bandwidth_limit": { - "download_limit_kbps": 1024, - "enabled": False, - "upload_limit_kbps": 1024, - }, - "description": "Test Traffic Rule", - "name": "Test Traffic Rule", - "domains": [], - "enabled": True, - "ip_addresses": [], - "ip_ranges": [], - "matching_target": "INTERNET", - "network_ids": [], - "regions": [], - "schedule": { - "date_end": "2023-05-10", - "date_start": "2023-05-03", - "mode": "ALWAYS", - "repeat_on_days": [], - "time_all_day": False, - "time_range_end": "12:00", - "time_range_start": "09:00", - }, - "target_devices": [{"client_mac": CLIENT_1["mac"], "type": "CLIENT"}], -} - - -@pytest.mark.parametrize( - "config_entry_options", [{CONF_BLOCK_CLIENT: [BLOCKED["mac"]]}] -) -@pytest.mark.parametrize("client_payload", [[BLOCKED]]) -@pytest.mark.parametrize("device_payload", [[DEVICE_1, OUTLET_UP1, PDU_DEVICE_1]]) -@pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) -@pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) -@pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) -@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) -@pytest.mark.parametrize("wlan_payload", [[WLAN]]) -@pytest.mark.parametrize( - "site_payload", - [[{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}]], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_entity_and_device_data( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - site_payload: dict[str, Any], - snapshot: SnapshotAssertion, -) -> None: - """Validate entity and device data with and without admin rights.""" - with patch("homeassistant.components.unifi.PLATFORMS", [Platform.SWITCH]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - @pytest.mark.parametrize("client_payload", [[CONTROLLER_HOST]]) @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @@ -847,6 +784,18 @@ async def test_hub_not_client(hass: HomeAssistant) -> None: assert hass.states.get("switch.cloud_key") is None +@pytest.mark.parametrize("client_payload", [[CLIENT_1]]) +@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) +@pytest.mark.parametrize( + "site_payload", + [[{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}]], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_not_admin(hass: HomeAssistant) -> None: + """Test that switch platform only work on an admin account.""" + assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0 + + @pytest.mark.parametrize( "config_entry_options", [ @@ -857,17 +806,41 @@ async def test_hub_not_client(hass: HomeAssistant) -> None: } ], ) +@pytest.mark.parametrize("client_payload", [[CLIENT_4]]) @pytest.mark.parametrize("clients_all_payload", [[BLOCKED, UNBLOCKED, CLIENT_1]]) @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) +@pytest.mark.usefixtures("config_entry_setup") async def test_switches( hass: HomeAssistant, + entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 3 + switch_4 = hass.states.get("switch.poe_client_4") + assert switch_4 is None + + blocked = hass.states.get("switch.block_client_1") + assert blocked is not None + assert blocked.state == "off" + + unblocked = hass.states.get("switch.block_client_2") + assert unblocked is not None + assert unblocked.state == "on" + + dpi_switch = hass.states.get("switch.block_media_streaming") + assert dpi_switch is not None + assert dpi_switch.state == "on" + assert dpi_switch.attributes["icon"] == "mdi:network" + + for entry_id in ("switch.block_client_1", "switch.block_media_streaming"): + assert ( + entity_registry.async_get(entry_id).entity_category is EntityCategory.CONFIG + ) + # Block and unblock client aioclient_mock.clear_requests() aioclient_mock.post( @@ -926,9 +899,7 @@ async def test_switches( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") -async def test_remove_switches( - hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock -) -> None: +async def test_remove_switches(hass: HomeAssistant, mock_websocket_message) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 @@ -965,8 +936,8 @@ async def test_remove_switches( async def test_block_switches( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_message: WebsocketMessageMock, - config_entry_setup: MockConfigEntry, + mock_websocket_message, + config_entry_setup: ConfigEntry, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 @@ -1025,13 +996,14 @@ async def test_block_switches( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") -async def test_dpi_switches( - hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock -) -> None: +async def test_dpi_switches(hass: HomeAssistant, mock_websocket_message) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - assert hass.states.get("switch.block_media_streaming").state == STATE_ON + dpi_switch = hass.states.get("switch.block_media_streaming") + assert dpi_switch is not None + assert dpi_switch.state == STATE_ON + assert dpi_switch.attributes["icon"] == "mdi:network" mock_websocket_message(data=DPI_APP_DISABLED_EVENT) await hass.async_block_till_done() @@ -1050,7 +1022,7 @@ async def test_dpi_switches( @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") async def test_dpi_switches_add_second_app( - hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock + hass: HomeAssistant, mock_websocket_message ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1100,60 +1072,6 @@ async def test_dpi_switches_add_second_app( assert hass.states.get("switch.block_media_streaming").state == STATE_ON -@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) -async def test_traffic_rules( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, - traffic_rule_payload: list[dict[str, Any]], -) -> None: - """Test control of UniFi traffic rules.""" - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - - # Validate state object - assert hass.states.get("switch.unifi_network_test_traffic_rule").state == STATE_ON - - traffic_rule = deepcopy(traffic_rule_payload[0]) - - # Disable traffic rule - aioclient_mock.put( - f"https://{config_entry_setup.data[CONF_HOST]}:1234" - f"/v2/api/site/{config_entry_setup.data[CONF_SITE_ID]}" - f"/trafficrules/{traffic_rule['_id']}", - ) - - call_count = aioclient_mock.call_count - - await hass.services.async_call( - SWITCH_DOMAIN, - "turn_off", - {"entity_id": "switch.unifi_network_test_traffic_rule"}, - blocking=True, - ) - # Updating the value for traffic rules will make another call to retrieve the values - assert aioclient_mock.call_count == call_count + 2 - expected_disable_call = deepcopy(traffic_rule) - expected_disable_call["enabled"] = False - - assert aioclient_mock.mock_calls[call_count][2] == expected_disable_call - - call_count = aioclient_mock.call_count - - # Enable traffic rule - await hass.services.async_call( - SWITCH_DOMAIN, - "turn_on", - {"entity_id": "switch.unifi_network_test_traffic_rule"}, - blocking=True, - ) - - expected_enable_call = deepcopy(traffic_rule) - expected_enable_call["enabled"] = True - - assert aioclient_mock.call_count == call_count + 2 - assert aioclient_mock.mock_calls[call_count][2] == expected_enable_call - - @pytest.mark.parametrize( ("device_payload", "entity_id", "outlet_index", "expected_switches"), [ @@ -1165,8 +1083,8 @@ async def test_traffic_rules( async def test_outlet_switches( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_message: WebsocketMessageMock, - config_entry_setup: MockConfigEntry, + mock_websocket_message, + config_entry_setup: ConfigEntry, device_payload: list[dict[str, Any]], entity_id: str, outlet_index: int, @@ -1176,7 +1094,10 @@ async def test_outlet_switches( assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == expected_switches # Validate state object - assert hass.states.get(f"switch.{entity_id}").state == STATE_ON + switch_1 = hass.states.get(f"switch.{entity_id}") + assert switch_1 is not None + assert switch_1.state == STATE_ON + assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.OUTLET # Update state object device_1 = deepcopy(device_payload[0]) @@ -1235,6 +1156,15 @@ async def test_outlet_switches( await hass.async_block_till_done() assert hass.states.get(f"switch.{entity_id}").state == STATE_OFF + # Unload config entry + await hass.config_entries.async_unload(config_entry_setup.entry_id) + assert hass.states.get(f"switch.{entity_id}").state == STATE_UNAVAILABLE + + # Remove config entry + await hass.config_entries.async_remove(config_entry_setup.entry_id) + await hass.async_block_till_done() + assert hass.states.get(f"switch.{entity_id}") is None + @pytest.mark.parametrize( "config_entry_options", @@ -1249,7 +1179,7 @@ async def test_outlet_switches( ) @pytest.mark.usefixtures("config_entry_setup") async def test_new_client_discovered_on_block_control( - hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock + hass: HomeAssistant, mock_websocket_message ) -> None: """Test if 2nd update has a new client.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0 @@ -1267,9 +1197,7 @@ async def test_new_client_discovered_on_block_control( ) @pytest.mark.parametrize("clients_all_payload", [[BLOCKED, UNBLOCKED]]) async def test_option_block_clients( - hass: HomeAssistant, - config_entry_setup: MockConfigEntry, - clients_all_payload: list[dict[str, Any]], + hass: HomeAssistant, config_entry_setup: ConfigEntry, clients_all_payload ) -> None: """Test the changes to option reflects accordingly.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1317,7 +1245,7 @@ async def test_option_block_clients( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) async def test_option_remove_switches( - hass: HomeAssistant, config_entry_setup: MockConfigEntry + hass: HomeAssistant, config_entry_setup: ConfigEntry ) -> None: """Test removal of DPI switch when options updated.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1335,8 +1263,8 @@ async def test_poe_port_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, - mock_websocket_message: WebsocketMessageMock, + mock_websocket_message, + config_entry_setup: ConfigEntry, device_payload: list[dict[str, Any]], ) -> None: """Test PoE port entities work.""" @@ -1344,6 +1272,7 @@ async def test_poe_port_switches( ent_reg_entry = entity_registry.async_get("switch.mock_name_port_1_poe") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert ent_reg_entry.entity_category is EntityCategory.CONFIG # Enable entity entity_registry.async_update_entity( @@ -1360,7 +1289,10 @@ async def test_poe_port_switches( await hass.async_block_till_done() # Validate state object - assert hass.states.get("switch.mock_name_port_1_poe").state == STATE_ON + switch_1 = hass.states.get("switch.mock_name_port_1_poe") + assert switch_1 is not None + assert switch_1.state == STATE_ON + assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.OUTLET # Update state object device_1 = deepcopy(device_payload[0]) @@ -1428,16 +1360,24 @@ async def test_poe_port_switches( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) async def test_wlan_switches( hass: HomeAssistant, + entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, - mock_websocket_message: WebsocketMessageMock, + mock_websocket_message, + config_entry_setup: ConfigEntry, wlan_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi WLAN availability.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 + ent_reg_entry = entity_registry.async_get("switch.ssid_1") + assert ent_reg_entry.unique_id == "wlan-012345678910111213141516" + assert ent_reg_entry.entity_category is EntityCategory.CONFIG + # Validate state object - assert hass.states.get("switch.ssid_1").state == STATE_ON + switch_1 = hass.states.get("switch.ssid_1") + assert switch_1 is not None + assert switch_1.state == STATE_ON + assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH # Update state object wlan = deepcopy(wlan_payload[0]) @@ -1476,16 +1416,24 @@ async def test_wlan_switches( @pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) async def test_port_forwarding_switches( hass: HomeAssistant, + entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, - mock_websocket_message: WebsocketMessageMock, + mock_websocket_message, + config_entry_setup: ConfigEntry, port_forward_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi port forwarding.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 + ent_reg_entry = entity_registry.async_get("switch.unifi_network_plex") + assert ent_reg_entry.unique_id == "port_forward-5a32aa4ee4b0412345678911" + assert ent_reg_entry.entity_category is EntityCategory.CONFIG + # Validate state object - assert hass.states.get("switch.unifi_network_plex").state == STATE_ON + switch_1 = hass.states.get("switch.unifi_network_plex") + assert switch_1 is not None + assert switch_1.state == STATE_ON + assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH # Update state object data = port_forward_payload[0].copy() @@ -1569,9 +1517,9 @@ async def test_port_forwarding_switches( async def test_updating_unique_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - config_entry: MockConfigEntry, - device_payload: list[dict[str, Any]], + config_entry_factory: Callable[[], ConfigEntry], + config_entry: ConfigEntry, + device_payload, ) -> None: """Verify outlet control and poe control unique ID update works.""" entity_registry.async_get_or_create( @@ -1604,13 +1552,10 @@ async def test_updating_unique_id( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) -@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_hub_state_change( - hass: HomeAssistant, mock_websocket_state: WebsocketStateManager -) -> None: +async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: """Verify entities state reflect on hub connection becoming unavailable.""" entity_ids = ( "switch.block_client_2", @@ -1618,7 +1563,6 @@ async def test_hub_state_change( "switch.plug_outlet_1", "switch.block_media_streaming", "switch.unifi_network_plex", - "switch.unifi_network_test_traffic_rule", "switch.ssid_1", ) for entity_id in entity_ids: diff --git a/tests/components/unifi/test_update.py b/tests/components/unifi/test_update.py index 7bf4b9aec9d..3b1de6c4456 100644 --- a/tests/components/unifi/test_update.py +++ b/tests/components/unifi/test_update.py @@ -1,11 +1,9 @@ """The tests for the UniFi Network update platform.""" from copy import deepcopy -from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest -from syrupy import SnapshotAssertion from yarl import URL from homeassistant.components.unifi.const import CONF_SITE_ID @@ -15,28 +13,23 @@ from homeassistant.components.update import ( ATTR_LATEST_VERSION, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, + UpdateDeviceClass, + UpdateEntityFeature, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, + ATTR_SUPPORTED_FEATURES, CONF_HOST, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, - Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .conftest import ( - ConfigEntryFactoryType, - WebsocketMessageMock, - WebsocketStateManager, -) - -from tests.common import MockConfigEntry, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -# Device with new firmware available DEVICE_1 = { "board_rev": 3, "device_id": "mock-id", @@ -53,7 +46,6 @@ DEVICE_1 = { "upgrade_to_firmware": "4.3.17.11279", } -# Device without new firmware available DEVICE_2 = { "board_rev": 3, "device_id": "mock-id", @@ -69,40 +61,43 @@ DEVICE_2 = { @pytest.mark.parametrize("device_payload", [[DEVICE_1, DEVICE_2]]) -@pytest.mark.parametrize( - "site_payload", - [ - [{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}], - [{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}], - ], -) -async def test_entity_and_device_data( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry_factory: ConfigEntryFactoryType, - snapshot: SnapshotAssertion, -) -> None: - """Validate entity and device data with and without admin rights.""" - with patch("homeassistant.components.unifi.PLATFORMS", [Platform.UPDATE]): - config_entry = await config_entry_factory() - await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - - -@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @pytest.mark.usefixtures("config_entry_setup") -async def test_device_updates( - hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock -) -> None: +async def test_device_updates(hass: HomeAssistant, mock_websocket_message) -> None: """Test the update_items function with some devices.""" + assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 2 + + # Device with new firmware available + device_1_state = hass.states.get("update.device_1") assert device_1_state.state == STATE_ON + assert device_1_state.attributes[ATTR_INSTALLED_VERSION] == "4.0.42.10433" + assert device_1_state.attributes[ATTR_LATEST_VERSION] == "4.3.17.11279" assert device_1_state.attributes[ATTR_IN_PROGRESS] is False + assert device_1_state.attributes[ATTR_DEVICE_CLASS] == UpdateDeviceClass.FIRMWARE + assert ( + device_1_state.attributes[ATTR_SUPPORTED_FEATURES] + == UpdateEntityFeature.PROGRESS | UpdateEntityFeature.INSTALL + ) + + # Device without new firmware available + + device_2_state = hass.states.get("update.device_2") + assert device_2_state.state == STATE_OFF + assert device_2_state.attributes[ATTR_INSTALLED_VERSION] == "4.0.42.10433" + assert device_2_state.attributes[ATTR_LATEST_VERSION] == "4.0.42.10433" + assert device_2_state.attributes[ATTR_IN_PROGRESS] is False + assert device_2_state.attributes[ATTR_DEVICE_CLASS] == UpdateDeviceClass.FIRMWARE + assert ( + device_2_state.attributes[ATTR_SUPPORTED_FEATURES] + == UpdateEntityFeature.PROGRESS | UpdateEntityFeature.INSTALL + ) # Simulate start of update device_1 = deepcopy(DEVICE_1) device_1["state"] = 4 mock_websocket_message(message=MessageKey.DEVICE, data=device_1) + await hass.async_block_till_done() device_1_state = hass.states.get("update.device_1") assert device_1_state.state == STATE_ON @@ -117,6 +112,7 @@ async def test_device_updates( device_1["upgradable"] = False del device_1["upgrade_to_firmware"] mock_websocket_message(message=MessageKey.DEVICE, data=device_1) + await hass.async_block_till_done() device_1_state = hass.states.get("update.device_1") assert device_1_state.state == STATE_OFF @@ -125,13 +121,30 @@ async def test_device_updates( assert device_1_state.attributes[ATTR_IN_PROGRESS] is False +@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) +@pytest.mark.parametrize( + "site_payload", + [[{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}]], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_not_admin(hass: HomeAssistant) -> None: + """Test that the INSTALL feature is not available on a non-admin account.""" + assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 1 + device_state = hass.states.get("update.device_1") + assert device_state.state == STATE_ON + assert ( + device_state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature.PROGRESS + ) + + @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) async def test_install( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - config_entry_setup: MockConfigEntry, + config_entry_setup: ConfigEntry, ) -> None: """Test the device update install call.""" + assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 1 device_state = hass.states.get("update.device_1") assert device_state.state == STATE_ON @@ -161,10 +174,9 @@ async def test_install( @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @pytest.mark.usefixtures("config_entry_setup") -async def test_hub_state_change( - hass: HomeAssistant, mock_websocket_state: WebsocketStateManager -) -> None: +async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: """Verify entities state reflect on hub becoming unavailable.""" + assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 1 assert hass.states.get("update.device_1").state == STATE_ON # Controller unavailable diff --git a/tests/components/unifiprotect/test_binary_sensor.py b/tests/components/unifiprotect/test_binary_sensor.py index 31669aa62bb..af8ce015955 100644 --- a/tests/components/unifiprotect/test_binary_sensor.py +++ b/tests/components/unifiprotect/test_binary_sensor.py @@ -575,149 +575,3 @@ async def test_binary_sensor_package_detected( ufp.ws_msg(mock_msg) await hass.async_block_till_done() assert len(state_changes) == 2 - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_binary_sensor_person_detected( - hass: HomeAssistant, - ufp: MockUFPFixture, - doorbell: Camera, - unadopted_camera: Camera, - fixed_now: datetime, -) -> None: - """Test binary_sensor person detected detection entity.""" - - await init_entry(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.BINARY_SENSOR, 15, 15) - - doorbell.smart_detect_settings.object_types.append(SmartDetectObjectType.PERSON) - - _, entity_id = ids_from_device_description( - Platform.BINARY_SENSOR, doorbell, EVENT_SENSORS[3] - ) - - events = async_capture_events(hass, EVENT_STATE_CHANGED) - - event = Event( - model=ModelType.EVENT, - id="test_event_id", - type=EventType.SMART_DETECT, - start=fixed_now - timedelta(seconds=1), - end=None, - score=50, - smart_detect_types=[], - smart_detect_event_ids=[], - camera_id=doorbell.id, - api=ufp.api, - ) - - new_camera = doorbell.copy() - new_camera.is_smart_detected = True - - ufp.api.bootstrap.cameras = {new_camera.id: new_camera} - ufp.api.bootstrap.events = {event.id: event} - - mock_msg = Mock() - mock_msg.changed_data = {} - mock_msg.new_obj = event - ufp.ws_msg(mock_msg) - - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - event = Event( - model=ModelType.EVENT, - id="test_event_id", - type=EventType.SMART_DETECT, - start=fixed_now - timedelta(seconds=1), - end=fixed_now + timedelta(seconds=1), - score=65, - smart_detect_types=[SmartDetectObjectType.PERSON], - smart_detect_event_ids=[], - camera_id=doorbell.id, - api=ufp.api, - ) - - new_camera = doorbell.copy() - new_camera.is_smart_detected = True - new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PERSON] = event.id - - ufp.api.bootstrap.cameras = {new_camera.id: new_camera} - ufp.api.bootstrap.events = {event.id: event} - - mock_msg = Mock() - mock_msg.changed_data = {} - mock_msg.new_obj = event - ufp.ws_msg(mock_msg) - - await hass.async_block_till_done() - - entity_events = [event for event in events if event.data["entity_id"] == entity_id] - assert len(entity_events) == 3 - assert entity_events[0].data["new_state"].state == STATE_OFF - assert entity_events[1].data["new_state"].state == STATE_ON - assert entity_events[2].data["new_state"].state == STATE_OFF - - # Event is already seen and has end, should now be off - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - # Now send an event that has an end right away - event = Event( - model=ModelType.EVENT, - id="new_event_id", - type=EventType.SMART_DETECT, - start=fixed_now - timedelta(seconds=1), - end=fixed_now + timedelta(seconds=1), - score=80, - smart_detect_types=[SmartDetectObjectType.PERSON], - smart_detect_event_ids=[], - camera_id=doorbell.id, - api=ufp.api, - ) - - new_camera = doorbell.copy() - new_camera.is_smart_detected = True - new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PERSON] = event.id - - ufp.api.bootstrap.cameras = {new_camera.id: new_camera} - ufp.api.bootstrap.events = {event.id: event} - - mock_msg = Mock() - mock_msg.changed_data = {} - mock_msg.new_obj = event - - state_changes: list[HAEvent[EventStateChangedData]] = async_capture_events( - hass, EVENT_STATE_CHANGED - ) - ufp.ws_msg(mock_msg) - - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - assert len(state_changes) == 2 - - on_event = state_changes[0] - state = on_event.data["new_state"] - assert state - assert state.state == STATE_ON - assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION - assert state.attributes[ATTR_EVENT_SCORE] == 80 - - off_event = state_changes[1] - state = off_event.data["new_state"] - assert state - assert state.state == STATE_OFF - assert ATTR_EVENT_SCORE not in state.attributes - - # replay and ensure ignored - ufp.ws_msg(mock_msg) - await hass.async_block_till_done() - assert len(state_changes) == 2 diff --git a/tests/components/unifiprotect/test_camera.py b/tests/components/unifiprotect/test_camera.py index 379f443923a..9fedb67fea4 100644 --- a/tests/components/unifiprotect/test_camera.py +++ b/tests/components/unifiprotect/test_camera.py @@ -4,22 +4,16 @@ from __future__ import annotations from unittest.mock import AsyncMock, Mock -import pytest from uiprotect.api import DEVICE_UPDATE_INTERVAL from uiprotect.data import Camera as ProtectCamera, CameraChannel, StateType from uiprotect.exceptions import NvrError from uiprotect.websocket import WebsocketState from homeassistant.components.camera import ( + STATE_IDLE, CameraEntityFeature, - CameraState, - CameraWebRTCProvider, - RTCIceCandidate, - StreamType, - WebRTCSendMessage, async_get_image, async_get_stream_source, - async_register_webrtc_provider, ) from homeassistant.components.unifiprotect.const import ( ATTR_BITRATE, @@ -28,7 +22,6 @@ from homeassistant.components.unifiprotect.const import ( ATTR_HEIGHT, ATTR_WIDTH, DEFAULT_ATTRIBUTION, - DOMAIN, ) from homeassistant.components.unifiprotect.utils import get_camera_base_name from homeassistant.const import ( @@ -38,12 +31,11 @@ from homeassistant.const import ( STATE_UNAVAILABLE, Platform, ) -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from .utils import ( - Camera, MockUFPFixture, adopt_devices, assert_entity_counts, @@ -54,45 +46,6 @@ from .utils import ( ) -class MockWebRTCProvider(CameraWebRTCProvider): - """WebRTC provider.""" - - @property - def domain(self) -> str: - """Return the integration domain of the provider.""" - return DOMAIN - - @callback - def async_is_supported(self, stream_source: str) -> bool: - """Return if this provider is supports the Camera as source.""" - return True - - async def async_handle_async_webrtc_offer( - self, - camera: Camera, - offer_sdp: str, - session_id: str, - send_message: WebRTCSendMessage, - ) -> None: - """Handle the WebRTC offer and return the answer via the provided callback.""" - - async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate - ) -> None: - """Handle the WebRTC candidate.""" - - @callback - def async_close_session(self, session_id: str) -> None: - """Close the session.""" - - -@pytest.fixture -async def web_rtc_provider(hass: HomeAssistant) -> None: - """Fixture to enable WebRTC provider for camera entities.""" - await async_setup_component(hass, "camera", {}) - async_register_webrtc_provider(hass, MockWebRTCProvider()) - - def validate_default_camera_entity( hass: HomeAssistant, camera_obj: ProtectCamera, @@ -113,14 +66,6 @@ def validate_default_camera_entity( assert entity.disabled is False assert entity.unique_id == unique_id - device_registry = dr.async_get(hass) - device = device_registry.async_get(entity.device_id) - assert device - assert device.manufacturer == "Ubiquiti" - assert device.name == camera_obj.name - assert device.model == camera_obj.market_name or camera_obj.type - assert device.model_id == camera_obj.type - return entity_id @@ -196,7 +141,7 @@ async def validate_rtsps_camera_state( """Validate a camera's state.""" channel = camera_obj.channels[channel_id] - assert await async_get_stream_source(hass, entity_id) == channel.rtsps_no_srtp_url + assert await async_get_stream_source(hass, entity_id) == channel.rtsps_url validate_common_camera_state(hass, channel, entity_id, features) @@ -330,26 +275,6 @@ async def test_basic_setup( await validate_no_stream_camera_state(hass, doorbell, 3, entity_id, features=0) -@pytest.mark.usefixtures("web_rtc_provider") -async def test_webrtc_support( - hass: HomeAssistant, - ufp: MockUFPFixture, - camera_all: ProtectCamera, -) -> None: - """Test webrtc support is available.""" - camera_high_only = camera_all.copy() - camera_high_only.channels = [c.copy() for c in camera_all.channels] - camera_high_only.name = "Test Camera 1" - camera_high_only.channels[0].is_rtsp_enabled = True - camera_high_only.channels[1].is_rtsp_enabled = False - camera_high_only.channels[2].is_rtsp_enabled = False - await init_entry(hass, ufp, [camera_high_only]) - entity_id = validate_default_camera_entity(hass, camera_high_only, 0) - state = hass.states.get(entity_id) - assert state - assert StreamType.WEB_RTC in state.attributes["frontend_stream_type"] - - async def test_adopt( hass: HomeAssistant, ufp: MockUFPFixture, camera: ProtectCamera ) -> None: @@ -498,7 +423,7 @@ async def test_camera_websocket_disconnected( entity_id = "camera.test_camera_high_resolution_channel" state = hass.states.get(entity_id) - assert state and state.state == CameraState.IDLE + assert state and state.state == STATE_IDLE # websocket disconnects ufp.ws_state_subscription(WebsocketState.DISCONNECTED) @@ -512,7 +437,7 @@ async def test_camera_websocket_disconnected( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state and state.state == CameraState.IDLE + assert state and state.state == STATE_IDLE async def test_camera_ws_update( diff --git a/tests/components/unifiprotect/test_config_flow.py b/tests/components/unifiprotect/test_config_flow.py index 8bfdc004092..5d02e1cf098 100644 --- a/tests/components/unifiprotect/test_config_flow.py +++ b/tests/components/unifiprotect/test_config_flow.py @@ -224,7 +224,13 @@ async def test_form_reauth_auth( ) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_config.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert not result["errors"] flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) diff --git a/tests/components/unifiprotect/test_event.py b/tests/components/unifiprotect/test_event.py deleted file mode 100644 index 9d1a701fe39..00000000000 --- a/tests/components/unifiprotect/test_event.py +++ /dev/null @@ -1,154 +0,0 @@ -"""Test the UniFi Protect event platform.""" - -from __future__ import annotations - -from datetime import datetime, timedelta -from unittest.mock import Mock - -from uiprotect.data import Camera, Event, EventType, ModelType, SmartDetectObjectType - -from homeassistant.components.unifiprotect.const import ( - ATTR_EVENT_ID, - DEFAULT_ATTRIBUTION, -) -from homeassistant.components.unifiprotect.event import EVENT_DESCRIPTIONS -from homeassistant.const import ATTR_ATTRIBUTION, Platform -from homeassistant.core import Event as HAEvent, HomeAssistant, callback -from homeassistant.helpers.event import async_track_state_change_event - -from .utils import ( - MockUFPFixture, - adopt_devices, - assert_entity_counts, - ids_from_device_description, - init_entry, - remove_entities, -) - - -async def test_camera_remove( - hass: HomeAssistant, ufp: MockUFPFixture, doorbell: Camera, unadopted_camera: Camera -) -> None: - """Test removing and re-adding a camera device.""" - - ufp.api.bootstrap.nvr.system_info.ustorage = None - await init_entry(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.EVENT, 1, 1) - await remove_entities(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.EVENT, 0, 0) - await adopt_devices(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.EVENT, 1, 1) - - -async def test_doorbell_ring( - hass: HomeAssistant, - ufp: MockUFPFixture, - doorbell: Camera, - unadopted_camera: Camera, - fixed_now: datetime, -) -> None: - """Test a doorbell ring event.""" - - await init_entry(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.EVENT, 1, 1) - events: list[HAEvent] = [] - - @callback - def _capture_event(event: HAEvent) -> None: - events.append(event) - - _, entity_id = ids_from_device_description( - Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[0] - ) - - unsub = async_track_state_change_event(hass, entity_id, _capture_event) - event = Event( - model=ModelType.EVENT, - id="test_event_id", - type=EventType.RING, - start=fixed_now - timedelta(seconds=1), - end=None, - score=100, - smart_detect_types=[], - smart_detect_event_ids=[], - camera_id=doorbell.id, - api=ufp.api, - ) - - new_camera = doorbell.copy() - new_camera.last_ring_event_id = "test_event_id" - ufp.api.bootstrap.cameras = {new_camera.id: new_camera} - ufp.api.bootstrap.events = {event.id: event} - - mock_msg = Mock() - mock_msg.changed_data = {} - mock_msg.new_obj = event - ufp.ws_msg(mock_msg) - - await hass.async_block_till_done() - - assert len(events) == 1 - state = events[0].data["new_state"] - assert state - timestamp = state.state - assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION - assert state.attributes[ATTR_EVENT_ID] == "test_event_id" - - event = Event( - model=ModelType.EVENT, - id="test_event_id", - type=EventType.RING, - start=fixed_now - timedelta(seconds=1), - end=fixed_now + timedelta(seconds=1), - score=50, - smart_detect_types=[], - smart_detect_event_ids=[], - camera_id=doorbell.id, - api=ufp.api, - ) - - new_camera = doorbell.copy() - ufp.api.bootstrap.cameras = {new_camera.id: new_camera} - ufp.api.bootstrap.events = {event.id: event} - - mock_msg = Mock() - mock_msg.changed_data = {} - mock_msg.new_obj = event - ufp.ws_msg(mock_msg) - - await hass.async_block_till_done() - - # Event is already seen and has end, should now be off - state = hass.states.get(entity_id) - assert state - assert state.state == timestamp - - # Now send an event that has an end right away - event = Event( - model=ModelType.EVENT, - id="new_event_id", - type=EventType.RING, - start=fixed_now - timedelta(seconds=1), - end=fixed_now + timedelta(seconds=1), - score=80, - smart_detect_types=[SmartDetectObjectType.PACKAGE], - smart_detect_event_ids=[], - camera_id=doorbell.id, - api=ufp.api, - ) - - new_camera = doorbell.copy() - ufp.api.bootstrap.cameras = {new_camera.id: new_camera} - ufp.api.bootstrap.events = {event.id: event} - - mock_msg = Mock() - mock_msg.changed_data = {} - mock_msg.new_obj = event - - ufp.ws_msg(mock_msg) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state - assert state.state == timestamp - unsub() diff --git a/tests/components/unifiprotect/test_lock.py b/tests/components/unifiprotect/test_lock.py index 8b37b1c5928..62a1cb9ff46 100644 --- a/tests/components/unifiprotect/test_lock.py +++ b/tests/components/unifiprotect/test_lock.py @@ -6,12 +6,16 @@ from unittest.mock import AsyncMock, Mock from uiprotect.data import Doorlock, LockStatusType -from homeassistant.components.lock import LockState from homeassistant.components.unifiprotect.const import DEFAULT_ATTRIBUTION from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_ENTITY_ID, + STATE_JAMMED, + STATE_LOCKED, + STATE_LOCKING, STATE_UNAVAILABLE, + STATE_UNLOCKED, + STATE_UNLOCKING, Platform, ) from homeassistant.core import HomeAssistant @@ -60,7 +64,7 @@ async def test_lock_setup( state = hass.states.get(entity_id) assert state - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION @@ -88,7 +92,7 @@ async def test_lock_locked( state = hass.states.get("lock.test_lock_lock") assert state - assert state.state == LockState.LOCKED + assert state.state == STATE_LOCKED async def test_lock_unlocking( @@ -115,7 +119,7 @@ async def test_lock_unlocking( state = hass.states.get("lock.test_lock_lock") assert state - assert state.state == LockState.UNLOCKING + assert state.state == STATE_UNLOCKING async def test_lock_locking( @@ -142,7 +146,7 @@ async def test_lock_locking( state = hass.states.get("lock.test_lock_lock") assert state - assert state.state == LockState.LOCKING + assert state.state == STATE_LOCKING async def test_lock_jammed( @@ -169,7 +173,7 @@ async def test_lock_jammed( state = hass.states.get("lock.test_lock_lock") assert state - assert state.state == LockState.JAMMED + assert state.state == STATE_JAMMED async def test_lock_unavailable( diff --git a/tests/components/unifiprotect/test_media_source.py b/tests/components/unifiprotect/test_media_source.py index 18944460ca5..60cd3150884 100644 --- a/tests/components/unifiprotect/test_media_source.py +++ b/tests/components/unifiprotect/test_media_source.py @@ -669,7 +669,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.RING, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=[], @@ -683,7 +683,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.MOTION, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=[], @@ -697,7 +697,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["person"], @@ -706,7 +706,7 @@ async def test_browse_media_recent_truncated( metadata={ "detected_thumbnails": [ { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "person", "cropped_id": "event_id", } @@ -720,7 +720,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "person"], @@ -734,7 +734,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -748,7 +748,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -758,7 +758,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", } @@ -772,7 +772,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -782,7 +782,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -802,7 +802,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -812,7 +812,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -823,7 +823,7 @@ async def test_browse_media_recent_truncated( }, }, { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "person", "cropped_id": "event_id", }, @@ -837,7 +837,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle"], @@ -846,7 +846,7 @@ async def test_browse_media_recent_truncated( metadata={ "detected_thumbnails": [ { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -870,7 +870,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_AUDIO_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["alrmSpeak"], diff --git a/tests/components/unifiprotect/test_migrate.py b/tests/components/unifiprotect/test_migrate.py index 4bfc29a142b..4e1bf8bd418 100644 --- a/tests/components/unifiprotect/test_migrate.py +++ b/tests/components/unifiprotect/test_migrate.py @@ -7,6 +7,9 @@ from unittest.mock import patch from uiprotect.data import Camera from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) from homeassistant.components.script import DOMAIN as SCRIPT_DOMAIN from homeassistant.components.unifiprotect.const import DOMAIN from homeassistant.const import SERVICE_RELOAD, Platform @@ -16,7 +19,6 @@ from homeassistant.setup import async_setup_component from .utils import MockUFPFixture, init_entry -from tests.components.repairs import async_process_repairs_platforms from tests.typing import WebSocketGenerator diff --git a/tests/components/unifiprotect/test_repairs.py b/tests/components/unifiprotect/test_repairs.py index adb9555e6ea..bdfcd6ff475 100644 --- a/tests/components/unifiprotect/test_repairs.py +++ b/tests/components/unifiprotect/test_repairs.py @@ -3,21 +3,24 @@ from __future__ import annotations from copy import copy, deepcopy +from http import HTTPStatus from unittest.mock import AsyncMock, Mock from uiprotect.data import Camera, CloudAccount, ModelType, Version +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) from homeassistant.components.unifiprotect.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.core import HomeAssistant from .utils import MockUFPFixture, init_entry -from tests.components.repairs import ( - async_process_repairs_platforms, - process_repair_fix_flow, - start_repair_fix_flow, -) from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -49,7 +52,12 @@ async def test_ea_warning_ignore( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, "ea_channel_warning") + url = RepairsFlowIndexView.url + resp = await client.post( + url, json={"handler": DOMAIN, "issue_id": "ea_channel_warning"} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -58,7 +66,10 @@ async def test_ea_warning_ignore( } assert data["step_id"] == "start" - data = await process_repair_fix_flow(client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -67,7 +78,10 @@ async def test_ea_warning_ignore( } assert data["step_id"] == "confirm" - data = await process_repair_fix_flow(client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" @@ -100,7 +114,12 @@ async def test_ea_warning_fix( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, "ea_channel_warning") + url = RepairsFlowIndexView.url + resp = await client.post( + url, json={"handler": DOMAIN, "issue_id": "ea_channel_warning"} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -120,7 +139,10 @@ async def test_ea_warning_fix( ufp.ws_msg(mock_msg) await hass.async_block_till_done() - data = await process_repair_fix_flow(client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" @@ -154,12 +176,18 @@ async def test_cloud_user_fix( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, "cloud_user") + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "cloud_user"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "confirm" - data = await process_repair_fix_flow(client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -200,17 +228,26 @@ async def test_rtsp_read_only_ignore( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, issue_id) + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "start" - data = await process_repair_fix_flow(client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "confirm" - data = await process_repair_fix_flow(client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" @@ -250,12 +287,18 @@ async def test_rtsp_read_only_fix( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, issue_id) + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "start" - data = await process_repair_fix_flow(client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" @@ -294,12 +337,18 @@ async def test_rtsp_writable_fix( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, issue_id) + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "start" - data = await process_repair_fix_flow(client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" @@ -349,12 +398,18 @@ async def test_rtsp_writable_fix_when_not_setup( await hass.config_entries.async_unload(ufp.entry.entry_id) await hass.async_block_till_done() - data = await start_repair_fix_flow(client, DOMAIN, issue_id) + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "start" - data = await process_repair_fix_flow(client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" diff --git a/tests/components/unifiprotect/test_switch.py b/tests/components/unifiprotect/test_switch.py index 9e0e9efa0ce..6e5c83ef237 100644 --- a/tests/components/unifiprotect/test_switch.py +++ b/tests/components/unifiprotect/test_switch.py @@ -35,16 +35,15 @@ CAMERA_SWITCHES_BASIC = [ for d in CAMERA_SWITCHES if ( not d.name.startswith("Detections:") - and d.name - not in {"SSH enabled", "Color night vision", "Tracking: person", "HDR mode"} + and d.name != "SSH enabled" + and d.name != "Color night vision" + and d.name != "Tracking: person" + and d.name != "HDR mode" ) - or d.name - in { - "Detections: motion", - "Detections: person", - "Detections: vehicle", - "Detections: animal", - } + or d.name == "Detections: motion" + or d.name == "Detections: person" + or d.name == "Detections: vehicle" + or d.name == "Detections: animal" ] CAMERA_SWITCHES_NO_EXTRA = [ d diff --git a/tests/components/unifiprotect/utils.py b/tests/components/unifiprotect/utils.py index 25a9ddcbb92..21c01f77c5f 100644 --- a/tests/components/unifiprotect/utils.py +++ b/tests/components/unifiprotect/utils.py @@ -5,6 +5,7 @@ from __future__ import annotations from collections.abc import Callable, Sequence from dataclasses import dataclass from datetime import timedelta +from typing import Any from unittest.mock import Mock from uiprotect import ProtectApiClient @@ -40,11 +41,11 @@ class MockUFPFixture: ws_subscription: Callable[[WSSubscriptionMessage], None] | None = None ws_state_subscription: Callable[[WebsocketState], None] | None = None - def ws_msg(self, msg: WSSubscriptionMessage) -> None: + def ws_msg(self, msg: WSSubscriptionMessage) -> Any: """Emit WS message for testing.""" if self.ws_subscription is not None: - self.ws_subscription(msg) + return self.ws_subscription(msg) def reset_objects(bootstrap: Bootstrap): diff --git a/tests/components/universal/test_media_player.py b/tests/components/universal/test_media_player.py index 5ebfd2c13ad..814fa34a125 100644 --- a/tests/components/universal/test_media_player.py +++ b/tests/components/universal/test_media_player.py @@ -8,11 +8,8 @@ from voluptuous.error import MultipleInvalid from homeassistant import config as hass_config from homeassistant.components import input_number, input_select, media_player, switch -from homeassistant.components.media_player import ( - BrowseMedia, - MediaClass, - MediaPlayerEntityFeature, -) +from homeassistant.components.media_player import MediaClass, MediaPlayerEntityFeature +from homeassistant.components.media_player.browse_media import BrowseMedia import homeassistant.components.universal.media_player as universal from homeassistant.const import ( SERVICE_RELOAD, @@ -58,7 +55,7 @@ def validate_config(config): class MockMediaPlayer(media_player.MediaPlayerEntity): """Mock media player for testing.""" - def __init__(self, hass: HomeAssistant, name: str) -> None: + def __init__(self, hass, name): """Initialize the media player.""" self.hass = hass self._name = name @@ -223,7 +220,7 @@ class MockMediaPlayer(media_player.MediaPlayerEntity): @pytest.fixture -async def mock_states(hass: HomeAssistant) -> Mock: +async def mock_states(hass): """Set mock states used in tests.""" result = Mock() @@ -328,10 +325,10 @@ async def test_config_bad_children(hass: HomeAssistant) -> None: config_bad_children = {"name": "test", "children": {}, "platform": "universal"} config_no_children = validate_config(config_no_children) - assert config_no_children["children"] == [] + assert [] == config_no_children["children"] config_bad_children = validate_config(config_bad_children) - assert config_bad_children["children"] == [] + assert [] == config_bad_children["children"] async def test_config_bad_commands(hass: HomeAssistant) -> None: @@ -339,7 +336,7 @@ async def test_config_bad_commands(hass: HomeAssistant) -> None: config = {"name": "test", "platform": "universal"} config = validate_config(config) - assert config["commands"] == {} + assert {} == config["commands"] async def test_config_bad_attributes(hass: HomeAssistant) -> None: @@ -347,7 +344,7 @@ async def test_config_bad_attributes(hass: HomeAssistant) -> None: config = {"name": "test", "platform": "universal"} config = validate_config(config) - assert config["attributes"] == {} + assert {} == config["attributes"] async def test_config_bad_key(hass: HomeAssistant) -> None: @@ -1283,7 +1280,6 @@ async def test_master_state_with_template(hass: HomeAssistant) -> None: context = Context() hass.states.async_set("input_boolean.test", STATE_ON, context=context) await hass.async_block_till_done() - await hass.async_block_till_done() assert hass.states.get("media_player.tv").state == STATE_OFF assert events[0].context == context diff --git a/tests/components/upb/test_config_flow.py b/tests/components/upb/test_config_flow.py index 59a4e97d22b..d5d6d70bb68 100644 --- a/tests/components/upb/test_config_flow.py +++ b/tests/components/upb/test_config_flow.py @@ -1,11 +1,10 @@ """Test the UPB Control config flow.""" from asyncio import TimeoutError -from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch +from unittest.mock import MagicMock, PropertyMock, patch from homeassistant import config_entries from homeassistant.components.upb.const import DOMAIN -from homeassistant.config_entries import ConfigFlowResult from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -16,20 +15,17 @@ def mocked_upb(sync_complete=True, config_ok=True): def _upb_lib_connect(callback): callback() - upb_mock = AsyncMock() + upb_mock = MagicMock() type(upb_mock).network_id = PropertyMock(return_value="42") type(upb_mock).config_ok = PropertyMock(return_value=config_ok) - type(upb_mock).disconnect = MagicMock() if sync_complete: - upb_mock.async_connect.side_effect = _upb_lib_connect + upb_mock.connect.side_effect = _upb_lib_connect return patch( "homeassistant.components.upb.config_flow.upb_lib.UpbPim", return_value=upb_mock ) -async def valid_tcp_flow( - hass: HomeAssistant, sync_complete: bool = True, config_ok: bool = True -) -> ConfigFlowResult: +async def valid_tcp_flow(hass, sync_complete=True, config_ok=True): """Get result dict that are standard for most tests.""" with ( @@ -114,3 +110,42 @@ async def test_form_user_with_already_configured(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" await hass.async_block_till_done() + + +async def test_form_import(hass: HomeAssistant) -> None: + """Test we get the form with import source.""" + + with ( + mocked_upb(), + patch( + "homeassistant.components.upb.async_setup_entry", return_value=True + ) as mock_setup_entry, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={"host": "tcp://42.4.2.42", "file_path": "upb.upe"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "UPB" + + assert result["data"] == {"host": "tcp://42.4.2.42", "file_path": "upb.upe"} + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_junk_input(hass: HomeAssistant) -> None: + """Test we get the form with import source.""" + + with mocked_upb(): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={"foo": "goo", "goo": "foo"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + await hass.async_block_till_done() diff --git a/tests/components/update/common.py b/tests/components/update/common.py index 465812e6a3a..70b69498f66 100644 --- a/tests/components/update/common.py +++ b/tests/components/update/common.py @@ -5,16 +5,48 @@ from typing import Any from homeassistant.components.update import UpdateEntity +from tests.common import MockEntity + _LOGGER = logging.getLogger(__name__) -class MockUpdateEntity(UpdateEntity): +class MockUpdateEntity(MockEntity, UpdateEntity): """Mock UpdateEntity class.""" - def __init__(self, **values: Any) -> None: - """Initialize an entity.""" - for key, val in values.items(): - setattr(self, f"_attr_{key}", val) + @property + def auto_update(self) -> bool: + """Indicate if the device or service has auto update enabled.""" + return self._handle("auto_update") + + @property + def installed_version(self) -> str | None: + """Version currently installed and in use.""" + return self._handle("installed_version") + + @property + def in_progress(self) -> bool | int | None: + """Update installation progress.""" + return self._handle("in_progress") + + @property + def latest_version(self) -> str | None: + """Latest version available for install.""" + return self._handle("latest_version") + + @property + def release_summary(self) -> str | None: + """Summary of the release notes or changelog.""" + return self._handle("release_summary") + + @property + def release_url(self) -> str | None: + """URL to the full release notes of the latest version available.""" + return self._handle("release_url") + + @property + def title(self) -> str | None: + """Title of the software.""" + return self._handle("title") def install(self, version: str | None, backup: bool, **kwargs: Any) -> None: """Install an update.""" @@ -22,10 +54,10 @@ class MockUpdateEntity(UpdateEntity): _LOGGER.info("Creating backup before installing update") if version is not None: - self._attr_installed_version = version + self._values["installed_version"] = version _LOGGER.info("Installed update with version: %s", version) else: - self._attr_installed_version = self.latest_version + self._values["installed_version"] = self.latest_version _LOGGER.info("Installed latest update") def release_notes(self) -> str | None: diff --git a/tests/components/update/conftest.py b/tests/components/update/conftest.py index eae5cc318da..759f243e8db 100644 --- a/tests/components/update/conftest.py +++ b/tests/components/update/conftest.py @@ -51,24 +51,12 @@ def mock_update_entities() -> list[MockUpdateEntity]: ), MockUpdateEntity( name="Update Already in Progress", - unique_id="update_already_in_progress", + unique_id="update_already_in_progres", installed_version="1.0.0", latest_version="1.0.1", - in_progress=True, + in_progress=50, supported_features=UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS, - update_percentage=50, - ), - MockUpdateEntity( - name="Update Already in Progress Float", - unique_id="update_already_in_progress_float", - installed_version="1.0.0", - latest_version="1.0.1", - in_progress=True, - supported_features=UpdateEntityFeature.INSTALL - | UpdateEntityFeature.PROGRESS, - update_percentage=0.25, - display_precision=2, ), MockUpdateEntity( name="Update No Install", diff --git a/tests/components/update/test_device_trigger.py b/tests/components/update/test_device_trigger.py index 202b3d32509..fa9af863f56 100644 --- a/tests/components/update/test_device_trigger.py +++ b/tests/components/update/test_device_trigger.py @@ -21,6 +21,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, setup_test_component_platform, ) @@ -30,6 +31,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -175,7 +182,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_update_entities: list[MockUpdateEntity], ) -> None: """Test for turn_on and turn_off triggers firing.""" @@ -246,21 +253,21 @@ async def test_if_fires_on_state_change( state = hass.states.get("update.update_available") assert state assert state.state == STATE_ON - assert not service_calls + assert not calls hass.states.async_set("update.update_available", STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == "no_update device - update.update_available - on - off - None" ) hass.states.async_set("update.update_available", STATE_ON) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[1].data["some"] + calls[1].data["some"] == "update_available device - update.update_available - off - on - None" ) @@ -269,7 +276,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_update_entities: list[MockUpdateEntity], ) -> None: """Test for turn_on and turn_off triggers firing.""" @@ -319,13 +326,13 @@ async def test_if_fires_on_state_change_legacy( state = hass.states.get("update.update_available") assert state assert state.state == STATE_ON - assert not service_calls + assert not calls hass.states.async_set("update.update_available", STATE_OFF) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == "no_update device - update.update_available - on - off - None" ) @@ -334,7 +341,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], mock_update_entities: list[MockUpdateEntity], ) -> None: """Test for triggers firing with delay.""" @@ -385,16 +392,16 @@ async def test_if_fires_on_state_change_with_for( state = hass.states.get("update.update_available") assert state assert state.state == STATE_ON - assert not service_calls + assert not calls hass.states.async_set("update.update_available", STATE_OFF) await hass.async_block_till_done() - assert not service_calls + assert not calls async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == "turn_off device - update.update_available - on - off - 0:00:05" ) diff --git a/tests/components/update/test_init.py b/tests/components/update/test_init.py index a35f7bb0f12..b37abc2263a 100644 --- a/tests/components/update/test_init.py +++ b/tests/components/update/test_init.py @@ -1,10 +1,9 @@ """The tests for the Update component.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch -from awesomeversion import AwesomeVersion, AwesomeVersionStrategy import pytest +from typing_extensions import Generator from homeassistant.components.update import ( ATTR_BACKUP, @@ -18,7 +17,6 @@ from homeassistant.components.update import ( ) from homeassistant.components.update.const import ( ATTR_AUTO_UPDATE, - ATTR_DISPLAY_PRECISION, ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, @@ -26,15 +24,11 @@ from homeassistant.components.update.const import ( ATTR_RELEASE_URL, ATTR_SKIPPED_VERSION, ATTR_TITLE, - ATTR_UPDATE_PERCENTAGE, UpdateEntityFeature, ) from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import ( ATTR_ENTITY_ID, - ATTR_ENTITY_PICTURE, - ATTR_FRIENDLY_NAME, - ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, @@ -93,7 +87,6 @@ async def test_update(hass: HomeAssistant) -> None: assert update.state == STATE_ON assert update.state_attributes == { ATTR_AUTO_UPDATE: False, - ATTR_DISPLAY_PRECISION: 0, ATTR_INSTALLED_VERSION: "1.0.0", ATTR_IN_PROGRESS: False, ATTR_LATEST_VERSION: "1.0.1", @@ -101,7 +94,6 @@ async def test_update(hass: HomeAssistant) -> None: ATTR_RELEASE_URL: "https://example.com", ATTR_SKIPPED_VERSION: None, ATTR_TITLE: "Title", - ATTR_UPDATE_PERCENTAGE: None, } # Test no update available @@ -548,20 +540,10 @@ async def test_entity_with_backup_support( assert "Installed update with version: 0.9.8" in caplog.text -@pytest.mark.parametrize( - ("entity_id", "expected_display_precision", "expected_update_percentage"), - [ - ("update.update_already_in_progress", 0, 50), - ("update.update_already_in_progress_float", 2, 0.25), - ], -) async def test_entity_already_in_progress( hass: HomeAssistant, mock_update_entities: list[MockUpdateEntity], caplog: pytest.LogCaptureFixture, - entity_id: str, - expected_display_precision: int, - expected_update_percentage: float, ) -> None: """Test update install already in progress.""" setup_test_component_platform(hass, DOMAIN, mock_update_entities) @@ -569,14 +551,12 @@ async def test_entity_already_in_progress( assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) await hass.async_block_till_done() - state = hass.states.get(entity_id) + state = hass.states.get("update.update_already_in_progress") assert state assert state.state == STATE_ON - assert state.attributes[ATTR_DISPLAY_PRECISION] == expected_display_precision assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "1.0.1" - assert state.attributes[ATTR_IN_PROGRESS] is True - assert state.attributes[ATTR_UPDATE_PERCENTAGE] == expected_update_percentage + assert state.attributes[ATTR_IN_PROGRESS] == 50 with pytest.raises( HomeAssistantError, @@ -585,20 +565,10 @@ async def test_entity_already_in_progress( await hass.services.async_call( DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: entity_id}, + {ATTR_ENTITY_ID: "update.update_already_in_progress"}, blocking=True, ) - # Check update percentage is suppressed when in_progress is False - entity = next( - entity for entity in mock_update_entities if entity.entity_id == entity_id - ) - entity._attr_in_progress = False - entity.async_write_ha_state() - state = hass.states.get(entity_id) - assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - async def test_entity_without_progress_support( hass: HomeAssistant, @@ -986,125 +956,3 @@ async def test_deprecated_supported_features_ints_with_service_call( }, blocking=True, ) - - -async def test_custom_version_is_newer(hass: HomeAssistant) -> None: - """Test UpdateEntity with overridden version_is_newer method.""" - - class MockUpdateEntity(UpdateEntity): - def version_is_newer(self, latest_version: str, installed_version: str) -> bool: - """Return True if latest_version is newer than installed_version.""" - return AwesomeVersion( - latest_version, - find_first_match=True, - ensure_strategy=[AwesomeVersionStrategy.SEMVER], - ) > AwesomeVersion( - installed_version, - find_first_match=True, - ensure_strategy=[AwesomeVersionStrategy.SEMVER], - ) - - update = MockUpdateEntity() - update.hass = hass - update.platform = MockEntityPlatform(hass) - - STABLE = "20230913-111730/v1.14.0-gcb84623" - BETA = "20231107-162609/v1.14.1-rc1-g0617c15" - - # Set current installed version to STABLE - update._attr_installed_version = STABLE - update._attr_latest_version = BETA - - assert update.installed_version == STABLE - assert update.latest_version == BETA - assert update.state == STATE_ON - - # Set current installed version to BETA - update._attr_installed_version = BETA - update._attr_latest_version = STABLE - - assert update.installed_version == BETA - assert update.latest_version == STABLE - assert update.state == STATE_OFF - - -@pytest.mark.parametrize( - ("supported_features", "extra_expected_attributes"), - [ - ( - 0, - [ - {}, - {}, - {}, - {}, - {}, - {}, - {}, - ], - ), - ( - UpdateEntityFeature.PROGRESS, - [ - {ATTR_IN_PROGRESS: False}, - {ATTR_IN_PROGRESS: False}, - {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 0}, - {ATTR_IN_PROGRESS: True}, - {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 1}, - {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 10}, - {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 100}, - ], - ), - ], -) -async def test_update_percentage_backwards_compatibility( - hass: HomeAssistant, - supported_features: UpdateEntityFeature, - extra_expected_attributes: list[dict], -) -> None: - """Test deriving update percentage from deprecated in_progress.""" - update = MockUpdateEntity() - - update._attr_installed_version = "1.0.0" - update._attr_latest_version = "1.0.1" - update._attr_name = "legacy" - update._attr_release_summary = "Summary" - update._attr_release_url = "https://example.com" - update._attr_supported_features = supported_features - update._attr_title = "Title" - - setup_test_component_platform(hass, DOMAIN, [update]) - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) - await hass.async_block_till_done() - - expected_attributes = { - ATTR_AUTO_UPDATE: False, - ATTR_DISPLAY_PRECISION: 0, - ATTR_ENTITY_PICTURE: "https://brands.home-assistant.io/_/test/icon.png", - ATTR_FRIENDLY_NAME: "legacy", - ATTR_INSTALLED_VERSION: "1.0.0", - ATTR_IN_PROGRESS: False, - ATTR_LATEST_VERSION: "1.0.1", - ATTR_RELEASE_SUMMARY: "Summary", - ATTR_RELEASE_URL: "https://example.com", - ATTR_SKIPPED_VERSION: None, - ATTR_SUPPORTED_FEATURES: supported_features, - ATTR_TITLE: "Title", - ATTR_UPDATE_PERCENTAGE: None, - } - - state = hass.states.get("update.legacy") - assert state is not None - assert state.state == STATE_ON - assert state.attributes == expected_attributes | extra_expected_attributes[0] - - in_progress_list = [False, 0, True, 1, 10, 100] - - for i, in_progress in enumerate(in_progress_list): - update._attr_in_progress = in_progress - update.async_write_ha_state() - state = hass.states.get("update.legacy") - assert state.state == STATE_ON - assert ( - state.attributes == expected_attributes | extra_expected_attributes[i + 1] - ) diff --git a/tests/components/update/test_recorder.py b/tests/components/update/test_recorder.py index 68e5f93a757..0bd209ce1c2 100644 --- a/tests/components/update/test_recorder.py +++ b/tests/components/update/test_recorder.py @@ -7,11 +7,9 @@ from datetime import timedelta from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.update.const import ( - ATTR_DISPLAY_PRECISION, ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_RELEASE_SUMMARY, - ATTR_UPDATE_PERCENTAGE, DOMAIN, ) from homeassistant.const import ATTR_ENTITY_PICTURE, CONF_PLATFORM @@ -36,9 +34,7 @@ async def test_exclude_attributes( assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) await hass.async_block_till_done() state = hass.states.get("update.update_already_in_progress") - assert state.attributes[ATTR_DISPLAY_PRECISION] == 0 - assert state.attributes[ATTR_IN_PROGRESS] is True - assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 + assert state.attributes[ATTR_IN_PROGRESS] == 50 assert ( state.attributes[ATTR_ENTITY_PICTURE] == "https://brands.home-assistant.io/_/test/icon.png" @@ -56,9 +52,7 @@ async def test_exclude_attributes( assert len(states) >= 1 for entity_states in states.values(): for state in entity_states: - assert ATTR_DISPLAY_PRECISION not in state.attributes assert ATTR_ENTITY_PICTURE not in state.attributes assert ATTR_IN_PROGRESS not in state.attributes assert ATTR_RELEASE_SUMMARY not in state.attributes assert ATTR_INSTALLED_VERSION in state.attributes - assert ATTR_UPDATE_PERCENTAGE not in state.attributes diff --git a/tests/components/upnp/conftest.py b/tests/components/upnp/conftest.py index 4bee5c0e589..0bfcd062ac0 100644 --- a/tests/components/upnp/conftest.py +++ b/tests/components/upnp/conftest.py @@ -2,15 +2,11 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine, Generator import copy from datetime import datetime -import socket -from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, create_autospec, patch from urllib.parse import urlparse -from async_upnp_client.aiohttp import AiohttpNotifyServer from async_upnp_client.client import UpnpDevice from async_upnp_client.profiles.igd import IgdDevice, IgdState import pytest @@ -102,24 +98,9 @@ def mock_igd_device(mock_async_create_device) -> IgdDevice: port_mapping_number_of_entries=0, ) - mock_igd_device.async_subscribe_services = AsyncMock() - - mock_notify_server = create_autospec(AiohttpNotifyServer) - mock_notify_server.event_handler = MagicMock() - - with ( - patch( - "homeassistant.components.upnp.device.async_get_local_ip", - return_value=(socket.AF_INET, "127.0.0.1"), - ), - patch( - "homeassistant.components.upnp.device.IgdDevice.__new__", - return_value=mock_igd_device, - ), - patch( - "homeassistant.components.upnp.device.AiohttpNotifyServer.__new__", - return_value=mock_notify_server, - ), + with patch( + "homeassistant.components.upnp.device.IgdDevice.__new__", + return_value=mock_igd_device, ): yield mock_igd_device @@ -155,7 +136,7 @@ def mock_setup_entry(): @pytest.fixture(autouse=True) -def silent_ssdp_scanner() -> Generator[None]: +async def silent_ssdp_scanner(hass): """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), @@ -176,13 +157,7 @@ async def ssdp_instant_discovery(): """Instant discovery.""" # Set up device discovery callback. - async def register_callback( - hass: HomeAssistant, - callback: Callable[ - [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None - ], - match_dict: dict[str, str] | None = None, - ) -> MagicMock: + async def register_callback(hass, callback, match_dict): """Immediately do callback.""" await callback(TEST_DISCOVERY, ssdp.SsdpChange.ALIVE) return MagicMock() @@ -209,13 +184,7 @@ async def ssdp_instant_discovery_multi_location(): test_discovery.ssdp_all_locations = {TEST_LOCATION6, TEST_LOCATION} # Set up device discovery callback. - async def register_callback( - hass: HomeAssistant, - callback: Callable[ - [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None - ], - match_dict: dict[str, str] | None = None, - ) -> MagicMock: + async def register_callback(hass, callback, match_dict): """Immediately do callback.""" await callback(test_discovery, ssdp.SsdpChange.ALIVE) return MagicMock() @@ -238,13 +207,7 @@ async def ssdp_no_discovery(): """No discovery.""" # Set up device discovery callback. - async def register_callback( - hass: HomeAssistant, - callback: Callable[ - [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None - ], - match_dict: dict[str, str] | None = None, - ) -> MagicMock: + async def register_callback(hass, callback, match_dict): """Don't do callback.""" return MagicMock() @@ -267,7 +230,7 @@ async def mock_config_entry( ssdp_instant_discovery, mock_igd_device: IgdDevice, mock_mac_address_from_host, -) -> MockConfigEntry: +): """Create an initialized integration.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/upnp/test_config_flow.py b/tests/components/upnp/test_config_flow.py index 8799e0faab3..b8a08d3f592 100644 --- a/tests/components/upnp/test_config_flow.py +++ b/tests/components/upnp/test_config_flow.py @@ -9,7 +9,6 @@ import pytest from homeassistant import config_entries from homeassistant.components import ssdp from homeassistant.components.upnp.const import ( - CONFIG_ENTRY_FORCE_POLL, CONFIG_ENTRY_HOST, CONFIG_ENTRY_LOCATION, CONFIG_ENTRY_MAC_ADDRESS, @@ -474,28 +473,3 @@ async def test_flow_ssdp_with_mismatched_udn(hass: HomeAssistant) -> None: CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, CONFIG_ENTRY_HOST: TEST_HOST, } - - -async def test_options_flow( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Test that the options flow works.""" - result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - user_input = { - CONFIG_ENTRY_FORCE_POLL: True, - } - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONFIG_ENTRY_FORCE_POLL: True, - } - assert mock_config_entry.options == { - CONFIG_ENTRY_FORCE_POLL: True, - } diff --git a/tests/components/upnp/test_init.py b/tests/components/upnp/test_init.py index ff74ca87b12..4b5e375f8e0 100644 --- a/tests/components/upnp/test_init.py +++ b/tests/components/upnp/test_init.py @@ -2,18 +2,13 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine import copy -from typing import Any from unittest.mock import AsyncMock, MagicMock, patch -from async_upnp_client.exceptions import UpnpCommunicationError -from async_upnp_client.profiles.igd import IgdDevice import pytest from homeassistant.components import ssdp from homeassistant.components.upnp.const import ( - CONFIG_ENTRY_FORCE_POLL, CONFIG_ENTRY_LOCATION, CONFIG_ENTRY_MAC_ADDRESS, CONFIG_ENTRY_ORIGINAL_UDN, @@ -36,9 +31,7 @@ from tests.common import MockConfigEntry @pytest.mark.usefixtures("ssdp_instant_discovery", "mock_mac_address_from_host") -async def test_async_setup_entry_default( - hass: HomeAssistant, mock_igd_device: IgdDevice -) -> None: +async def test_async_setup_entry_default(hass: HomeAssistant) -> None: """Test async_setup_entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -50,17 +43,12 @@ async def test_async_setup_entry_default( CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, }, - options={ - CONFIG_ENTRY_FORCE_POLL: False, - }, ) # Load config_entry. entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) is True - mock_igd_device.async_subscribe_services.assert_called() - @pytest.mark.usefixtures("ssdp_instant_discovery", "mock_no_mac_address_from_host") async def test_async_setup_entry_default_no_mac_address(hass: HomeAssistant) -> None: @@ -75,9 +63,6 @@ async def test_async_setup_entry_default_no_mac_address(hass: HomeAssistant) -> CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: None, }, - options={ - CONFIG_ENTRY_FORCE_POLL: False, - }, ) # Load config_entry. @@ -106,9 +91,6 @@ async def test_async_setup_entry_multi_location( CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, }, - options={ - CONFIG_ENTRY_FORCE_POLL: False, - }, ) # Load config_entry. @@ -137,19 +119,10 @@ async def test_async_setup_udn_mismatch( CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, }, - options={ - CONFIG_ENTRY_FORCE_POLL: False, - }, ) # Set up device discovery callback. - async def register_callback( - hass: HomeAssistant, - callback: Callable[ - [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None - ], - match_dict: dict[str, str] | None = None, - ) -> MagicMock: + async def register_callback(hass, callback, match_dict): """Immediately do callback.""" await callback(test_discovery, ssdp.SsdpChange.ALIVE) return MagicMock() @@ -170,78 +143,3 @@ async def test_async_setup_udn_mismatch( # Ensure that the IPv4 location is used. mock_async_create_device.assert_called_once_with(TEST_LOCATION) - - -@pytest.mark.usefixtures( - "ssdp_instant_discovery", - "mock_get_source_ip", - "mock_mac_address_from_host", -) -async def test_async_setup_entry_force_poll( - hass: HomeAssistant, mock_igd_device: IgdDevice -) -> None: - """Test async_setup_entry with forced polling.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id=TEST_USN, - data={ - CONFIG_ENTRY_ST: TEST_ST, - CONFIG_ENTRY_UDN: TEST_UDN, - CONFIG_ENTRY_ORIGINAL_UDN: TEST_UDN, - CONFIG_ENTRY_LOCATION: TEST_LOCATION, - CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, - }, - options={ - CONFIG_ENTRY_FORCE_POLL: True, - }, - ) - - # Load config_entry. - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) is True - - mock_igd_device.async_subscribe_services.assert_not_called() - - # Ensure that the device is forced to poll. - mock_igd_device.async_get_traffic_and_status_data.assert_called_with( - None, force_poll=True - ) - - -@pytest.mark.usefixtures( - "ssdp_instant_discovery", - "mock_get_source_ip", - "mock_mac_address_from_host", -) -async def test_async_setup_entry_force_poll_subscribe_error( - hass: HomeAssistant, mock_igd_device: IgdDevice -) -> None: - """Test async_setup_entry where subscribing fails.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id=TEST_USN, - data={ - CONFIG_ENTRY_ST: TEST_ST, - CONFIG_ENTRY_UDN: TEST_UDN, - CONFIG_ENTRY_ORIGINAL_UDN: TEST_UDN, - CONFIG_ENTRY_LOCATION: TEST_LOCATION, - CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, - }, - options={ - CONFIG_ENTRY_FORCE_POLL: False, - }, - ) - - # Subscribing partially succeeds, but not completely. - # Unsubscribing will fail for the subscribed services afterwards. - mock_igd_device.async_subscribe_services.side_effect = UpnpCommunicationError - mock_igd_device.async_unsubscribe_services.side_effect = UpnpCommunicationError - - # Load config_entry, should still be able to load, falling back to polling/the old functionality. - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) is True - - # Ensure that the device is forced to poll. - mock_igd_device.async_get_traffic_and_status_data.assert_called_with( - None, force_poll=True - ) diff --git a/tests/components/uptime/conftest.py b/tests/components/uptime/conftest.py index 008172dc35a..2fe96b91b63 100644 --- a/tests/components/uptime/conftest.py +++ b/tests/components/uptime/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import patch import pytest +from typing_extensions import Generator from homeassistant.components.uptime.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/uptime/snapshots/test_config_flow.ambr b/tests/components/uptime/snapshots/test_config_flow.ambr index 38312667375..3e5b492f871 100644 --- a/tests/components/uptime/snapshots/test_config_flow.ambr +++ b/tests/components/uptime/snapshots/test_config_flow.ambr @@ -17,8 +17,6 @@ 'data': dict({ }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'uptime', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/uptime/snapshots/test_sensor.ambr b/tests/components/uptime/snapshots/test_sensor.ambr index 561e4b83320..92baf939eb3 100644 --- a/tests/components/uptime/snapshots/test_sensor.ambr +++ b/tests/components/uptime/snapshots/test_sensor.ambr @@ -61,7 +61,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'Uptime', 'name_by_user': None, 'primary_config_entry': , @@ -71,3 +70,25 @@ 'via_device_id': None, }) # --- +# name: test_uptime_sensor.3 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'is_new': False, + 'manufacturer': None, + 'model': None, + 'name': 'Uptime', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/uptimerobot/test_config_flow.py b/tests/components/uptimerobot/test_config_flow.py index 3ba5ad696a6..1cf0a358a87 100644 --- a/tests/components/uptimerobot/test_config_flow.py +++ b/tests/components/uptimerobot/test_config_flow.py @@ -168,7 +168,15 @@ async def test_reauthentication( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -201,7 +209,15 @@ async def test_reauthentication_failure( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -237,7 +253,15 @@ async def test_reauthentication_failure_no_existing_entry( ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -270,7 +294,15 @@ async def test_reauthentication_failure_account_not_matching( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/utility_meter/snapshots/test_diagnostics.ambr b/tests/components/utility_meter/snapshots/test_diagnostics.ambr index 6cdf121d7e3..28841854766 100644 --- a/tests/components/utility_meter/snapshots/test_diagnostics.ambr +++ b/tests/components/utility_meter/snapshots/test_diagnostics.ambr @@ -5,8 +5,6 @@ 'data': dict({ }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'utility_meter', 'minor_version': 1, 'options': dict({ @@ -41,17 +39,7 @@ 'status': 'collecting', 'tariff': 'tariff0', }), - 'last_sensor_data': dict({ - 'last_period': '0', - 'last_reset': '2024-04-05T00:00:00+00:00', - 'last_valid_state': 3, - 'native_unit_of_measurement': 'kWh', - 'native_value': dict({ - '__type': "", - 'decimal_str': '3', - }), - 'status': 'collecting', - }), + 'last_sensor_data': None, 'name': 'Energy Bill tariff0', 'period': 'monthly', 'source': 'sensor.input1', @@ -67,17 +55,7 @@ 'status': 'paused', 'tariff': 'tariff1', }), - 'last_sensor_data': dict({ - 'last_period': '0', - 'last_reset': '2024-04-05T00:00:00+00:00', - 'last_valid_state': 7, - 'native_unit_of_measurement': 'kWh', - 'native_value': dict({ - '__type': "", - 'decimal_str': '7', - }), - 'status': 'paused', - }), + 'last_sensor_data': None, 'name': 'Energy Bill tariff1', 'period': 'monthly', 'source': 'sensor.input1', diff --git a/tests/components/utility_meter/test_config_flow.py b/tests/components/utility_meter/test_config_flow.py index 612bfaa88d7..560566d7c49 100644 --- a/tests/components/utility_meter/test_config_flow.py +++ b/tests/components/utility_meter/test_config_flow.py @@ -72,10 +72,6 @@ async def test_config_flow(hass: HomeAssistant, platform) -> None: assert config_entry.title == "Electricity meter" -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.utility_meter.config.error.tariffs_not_unique"], -) async def test_tariffs(hass: HomeAssistant) -> None: """Test tariffs.""" input_sensor_entity_id = "sensor.input" diff --git a/tests/components/utility_meter/test_diagnostics.py b/tests/components/utility_meter/test_diagnostics.py index 8be5f949940..cefd17fc7e4 100644 --- a/tests/components/utility_meter/test_diagnostics.py +++ b/tests/components/utility_meter/test_diagnostics.py @@ -4,7 +4,6 @@ from aiohttp.test_utils import TestClient from freezegun import freeze_time import pytest from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.auth.models import Credentials from homeassistant.components.utility_meter.const import DOMAIN @@ -46,6 +45,11 @@ def _get_test_client_generator( return auth_client +def limit_diagnostic_attrs(prop, path) -> bool: + """Mark attributes to exclude from diagnostic snapshot.""" + return prop in {"entry_id"} + + @freeze_time("2024-04-06 00:00:00+00:00") @pytest.mark.usefixtures("socket_enabled") async def test_diagnostics( @@ -91,17 +95,7 @@ async def test_diagnostics( ATTR_LAST_RESET: last_reset, }, ), - { - "native_value": { - "__type": "", - "decimal_str": "3", - }, - "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "0", - "last_valid_state": 3, - "status": "collecting", - }, + {}, ), ( State( @@ -111,17 +105,7 @@ async def test_diagnostics( ATTR_LAST_RESET: last_reset, }, ), - { - "native_value": { - "__type": "", - "decimal_str": "7", - }, - "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "0", - "last_valid_state": 7, - "status": "paused", - }, + {}, ), ], ) @@ -141,4 +125,4 @@ async def test_diagnostics( hass, _get_test_client_generator(hass, aiohttp_client, new_token), config_entry ) - assert diag == snapshot(exclude=props("entry_id", "created_at", "modified_at")) + assert diag == snapshot(exclude=limit_diagnostic_attrs) diff --git a/tests/components/utility_meter/test_select.py b/tests/components/utility_meter/test_select.py index 1f54f3b500a..61f6cbe75b9 100644 --- a/tests/components/utility_meter/test_select.py +++ b/tests/components/utility_meter/test_select.py @@ -3,72 +3,10 @@ from homeassistant.components.utility_meter.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -async def test_select_entity_name_config_entry( - hass: HomeAssistant, -) -> None: - """Test for Utility Meter select platform.""" - - config_entry_config = { - "cycle": "none", - "delta_values": False, - "name": "Energy bill", - "net_consumption": False, - "offset": 0, - "periodically_resetting": True, - "source": "sensor.energy", - "tariffs": ["peak", "offpeak"], - } - - source_config_entry = MockConfigEntry() - source_config_entry.add_to_hass(hass) - utility_meter_config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options=config_entry_config, - title=config_entry_config["name"], - ) - - utility_meter_config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(utility_meter_config_entry.entry_id) - - await hass.async_block_till_done() - - state = hass.states.get("select.energy_bill") - assert state is not None - assert state.attributes.get("friendly_name") == "Energy bill" - - -async def test_select_entity_name_yaml( - hass: HomeAssistant, -) -> None: - """Test for Utility Meter select platform.""" - - yaml_config = { - "utility_meter": { - "energy_bill": { - "name": "Energy bill", - "source": "sensor.energy", - "tariffs": ["peak", "offpeak"], - "unique_id": "1234abcd", - } - } - } - - assert await async_setup_component(hass, DOMAIN, yaml_config) - - await hass.async_block_till_done() - - state = hass.states.get("select.energy_bill") - assert state is not None - assert state.attributes.get("friendly_name") == "Energy bill" - - async def test_device_id( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/utility_meter/test_sensor.py b/tests/components/utility_meter/test_sensor.py index 0ab78739f7f..745bf0ce012 100644 --- a/tests/components/utility_meter/test_sensor.py +++ b/tests/components/utility_meter/test_sensor.py @@ -26,6 +26,7 @@ from homeassistant.components.utility_meter.const import ( ) from homeassistant.components.utility_meter.sensor import ( ATTR_LAST_RESET, + ATTR_LAST_VALID_STATE, ATTR_STATUS, COLLECTING, PAUSED, @@ -759,6 +760,64 @@ async def test_restore_state( "status": "paused", }, ), + # sensor.energy_bill_tariff2 has missing keys and falls back to + # saved state + ( + State( + "sensor.energy_bill_tariff2", + "2.1", + attributes={ + ATTR_STATUS: PAUSED, + ATTR_LAST_RESET: last_reset_1, + ATTR_LAST_VALID_STATE: None, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, + }, + ), + { + "native_value": { + "__type": "", + "decimal_str": "2.2", + }, + "native_unit_of_measurement": "kWh", + "last_valid_state": "None", + }, + ), + # sensor.energy_bill_tariff3 has invalid data and falls back to + # saved state + ( + State( + "sensor.energy_bill_tariff3", + "3.1", + attributes={ + ATTR_STATUS: COLLECTING, + ATTR_LAST_RESET: last_reset_1, + ATTR_LAST_VALID_STATE: None, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, + }, + ), + { + "native_value": { + "__type": "", + "decimal_str": "3f", # Invalid + }, + "native_unit_of_measurement": "kWh", + "last_valid_state": "None", + }, + ), + # No extra saved data, fall back to saved state + ( + State( + "sensor.energy_bill_tariff4", + "error", + attributes={ + ATTR_STATUS: COLLECTING, + ATTR_LAST_RESET: last_reset_1, + ATTR_LAST_VALID_STATE: None, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, + }, + ), + {}, + ), ], ) @@ -793,6 +852,25 @@ async def test_restore_state( assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY + state = hass.states.get("sensor.energy_bill_tariff2") + assert state.state == "2.1" + assert state.attributes.get("status") == PAUSED + assert state.attributes.get("last_reset") == last_reset_1 + assert state.attributes.get("last_valid_state") == "None" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY + + state = hass.states.get("sensor.energy_bill_tariff3") + assert state.state == "3.1" + assert state.attributes.get("status") == COLLECTING + assert state.attributes.get("last_reset") == last_reset_1 + assert state.attributes.get("last_valid_state") == "None" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY + + state = hass.states.get("sensor.energy_bill_tariff4") + assert state.state == STATE_UNKNOWN + # utility_meter is loaded, now set sensors according to utility_meter: hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -804,7 +882,12 @@ async def test_restore_state( state = hass.states.get("sensor.energy_bill_tariff0") assert state.attributes.get("status") == COLLECTING - for entity_id in ("sensor.energy_bill_tariff1",): + for entity_id in ( + "sensor.energy_bill_tariff1", + "sensor.energy_bill_tariff2", + "sensor.energy_bill_tariff3", + "sensor.energy_bill_tariff4", + ): state = hass.states.get(entity_id) assert state.attributes.get("status") == PAUSED @@ -856,18 +939,7 @@ async def test_service_reset_no_tariffs( ATTR_LAST_RESET: last_reset, }, ), - { - "native_value": { - "__type": "", - "decimal_str": "3", - }, - "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "0", - "last_valid_state": None, - "status": "collecting", - "input_device_class": "energy", - }, + {}, ), ], ) @@ -973,33 +1045,21 @@ async def test_service_reset_no_tariffs_correct_with_multi( State( "sensor.energy_bill", "3", - ), - { - "native_value": { - "__type": "", - "decimal_str": "3", + attributes={ + ATTR_LAST_RESET: last_reset, }, - "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "0", - "status": "collecting", - }, + ), + {}, ), ( State( "sensor.water_bill", "6", - ), - { - "native_value": { - "__type": "", - "decimal_str": "6", + attributes={ + ATTR_LAST_RESET: last_reset, }, - "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "0", - "status": "collecting", - }, + ), + {}, ), ], ) @@ -1744,26 +1804,6 @@ async def test_self_reset_hourly_dst(hass: HomeAssistant) -> None: ) -async def test_self_reset_hourly_dst2(hass: HomeAssistant) -> None: - """Test weekly reset of meter in DST change conditions.""" - - hass.config.time_zone = "Europe/Berlin" - dt_util.set_default_time_zone(dt_util.get_time_zone(hass.config.time_zone)) - await _test_self_reset( - hass, gen_config("daily"), "2024-10-26T23:59:00.000000+02:00" - ) - - state = hass.states.get("sensor.energy_bill") - last_reset = dt_util.parse_datetime("2024-10-27T00:00:00.000000+02:00") - assert ( - dt_util.as_local(dt_util.parse_datetime(state.attributes.get("last_reset"))) - == last_reset - ) - - next_reset = dt_util.parse_datetime("2024-10-28T00:00:00.000000+01:00").isoformat() - assert state.attributes.get("next_reset") == next_reset - - async def test_self_reset_daily(hass: HomeAssistant) -> None: """Test daily reset of meter.""" await _test_self_reset( diff --git a/tests/components/uvc/test_camera.py b/tests/components/uvc/test_camera.py index 43216e354c7..5ce8baf9919 100644 --- a/tests/components/uvc/test_camera.py +++ b/tests/components/uvc/test_camera.py @@ -4,14 +4,15 @@ from datetime import UTC, datetime, timedelta from unittest.mock import call, patch import pytest +import requests from uvcclient import camera, nvr from homeassistant.components.camera import ( DEFAULT_CONTENT_TYPE, SERVICE_DISABLE_MOTION, SERVICE_ENABLE_MOTION, + STATE_RECORDING, CameraEntityFeature, - CameraState, async_get_image, async_get_stream_source, ) @@ -45,7 +46,6 @@ def mock_remote_fixture(camera_info): ] mock_remote.return_value.index.return_value = mock_cameras mock_remote.return_value.server_version = (3, 2, 0) - mock_remote.return_value.camera_identifier = "id" yield mock_remote @@ -205,7 +205,6 @@ async def test_setup_partial_config_v31x( """Test the setup with a v3.1.x server.""" config = {"platform": "uvc", "nvr": "foo", "key": "secret"} mock_remote.return_value.server_version = (3, 1, 3) - mock_remote.return_value.camera_identifier = "uuid" assert await async_setup_component(hass, "camera", {"camera": config}) await hass.async_block_till_done() @@ -261,6 +260,7 @@ async def test_setup_incomplete_config( [ (nvr.NotAuthorized, 0), (nvr.NvrError, 2), + (requests.exceptions.ConnectionError, 2), ], ) async def test_setup_nvr_errors_during_indexing( @@ -293,6 +293,7 @@ async def test_setup_nvr_errors_during_indexing( [ (nvr.NotAuthorized, 0), (nvr.NvrError, 2), + (requests.exceptions.ConnectionError, 2), ], ) async def test_setup_nvr_errors_during_initialization( @@ -336,7 +337,7 @@ async def test_properties(hass: HomeAssistant, mock_remote) -> None: assert state assert state.name == "Front" - assert state.state == CameraState.RECORDING + assert state.state == STATE_RECORDING assert state.attributes["brand"] == "Ubiquiti" assert state.attributes["model_name"] == "UVC" assert state.attributes["supported_features"] == CameraEntityFeature.STREAM @@ -354,7 +355,7 @@ async def test_motion_recording_mode_properties( state = hass.states.get("camera.front") assert state - assert state.state == CameraState.RECORDING + assert state.state == STATE_RECORDING mock_remote.return_value.get_camera.return_value["recordingSettings"][ "fullTimeRecordEnabled" @@ -369,7 +370,7 @@ async def test_motion_recording_mode_properties( state = hass.states.get("camera.front") assert state - assert state.state != CameraState.RECORDING + assert state.state != STATE_RECORDING assert state.attributes["last_recording_start_time"] == datetime( 2021, 1, 8, 1, 56, 32, 367000, tzinfo=UTC ) @@ -382,7 +383,7 @@ async def test_motion_recording_mode_properties( state = hass.states.get("camera.front") assert state - assert state.state != CameraState.RECORDING + assert state.state != STATE_RECORDING mock_remote.return_value.get_camera.return_value["recordingIndicator"] = ( "MOTION_INPROGRESS" @@ -394,7 +395,7 @@ async def test_motion_recording_mode_properties( state = hass.states.get("camera.front") assert state - assert state.state == CameraState.RECORDING + assert state.state == STATE_RECORDING mock_remote.return_value.get_camera.return_value["recordingIndicator"] = ( "MOTION_FINISHED" @@ -406,7 +407,7 @@ async def test_motion_recording_mode_properties( state = hass.states.get("camera.front") assert state - assert state.state == CameraState.RECORDING + assert state.state == STATE_RECORDING async def test_stream(hass: HomeAssistant, mock_remote) -> None: diff --git a/tests/components/v2c/conftest.py b/tests/components/v2c/conftest.py index 5c7db8bbab3..1803298be28 100644 --- a/tests/components/v2c/conftest.py +++ b/tests/components/v2c/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the V2C tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from pytrydan.models.trydan import TrydanData +from typing_extensions import Generator from homeassistant.components.v2c.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/v2c/snapshots/test_diagnostics.ambr b/tests/components/v2c/snapshots/test_diagnostics.ambr index 96567b80c54..a4f6cad4cc8 100644 --- a/tests/components/v2c/snapshots/test_diagnostics.ambr +++ b/tests/components/v2c/snapshots/test_diagnostics.ambr @@ -6,8 +6,6 @@ 'host': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'v2c', 'entry_id': 'da58ee91f38c2406c2a36d0a1a7f8569', 'minor_version': 1, @@ -20,7 +18,7 @@ 'unique_id': 'ABC123', 'version': 1, }), - 'data': "TrydanData(ID='ABC123', charge_state=, ready_state=, charge_power=1500.27, voltage_installation=None, charge_energy=1.8, slave_error=, charge_time=4355, house_power=0.0, fv_power=0.0, battery_power=0.0, paused=, locked=, timer=, intensity=6, dynamic=, min_intensity=6, max_intensity=16, pause_dynamic=, dynamic_power_mode=, contracted_power=4600, firmware_version='2.1.7', SSID=None, IP=None, signal_status=None)", + 'data': "TrydanData(ID='ABC123', charge_state=, ready_state=, charge_power=1500.27, charge_energy=1.8, slave_error=, charge_time=4355, house_power=0.0, fv_power=0.0, battery_power=0.0, paused=, locked=, timer=, intensity=6, dynamic=, min_intensity=6, max_intensity=16, pause_dynamic=, dynamic_power_mode=, contracted_power=4600, firmware_version='2.1.7')", 'host_status': 200, 'raw_data': '{"ID":"ABC123","ChargeState":2,"ReadyState":0,"ChargePower":1500.27,"ChargeEnergy":1.8,"SlaveError":4,"ChargeTime":4355,"HousePower":0.0,"FVPower":0.0,"BatteryPower":0.0,"Paused":0,"Locked":0,"Timer":0,"Intensity":6,"Dynamic":0,"MinIntensity":6,"MaxIntensity":16,"PauseDynamic":0,"FirmwareVersion":"2.1.7","DynamicPowerMode":2,"ContractedPower":4600}', }) diff --git a/tests/components/v2c/snapshots/test_sensor.ambr b/tests/components/v2c/snapshots/test_sensor.ambr index 7b9ae4a9ff3..cc8077333cb 100644 --- a/tests/components/v2c/snapshots/test_sensor.ambr +++ b/tests/components/v2c/snapshots/test_sensor.ambr @@ -126,7 +126,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': None, + 'original_icon': 'mdi:ev-station', 'original_name': 'Charge power', 'platform': 'v2c', 'previous_unique_id': None, @@ -141,6 +141,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'EVSE 1.1.1.1 Charge power', + 'icon': 'mdi:ev-station', 'state_class': , 'unit_of_measurement': , }), @@ -254,103 +255,6 @@ 'state': '0.0', }) # --- -# name: test_sensor[sensor.evse_1_1_1_1_installation_voltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.evse_1_1_1_1_installation_voltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Installation voltage', - 'platform': 'v2c', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voltage_installation', - 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_voltage_installation', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.evse_1_1_1_1_installation_voltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'EVSE 1.1.1.1 Installation voltage', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.evse_1_1_1_1_installation_voltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensor[sensor.evse_1_1_1_1_ip_address-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.evse_1_1_1_1_ip_address', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IP address', - 'platform': 'v2c', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ip_address', - 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_ip_address', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.evse_1_1_1_1_ip_address-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'EVSE 1.1.1.1 IP address', - }), - 'context': , - 'entity_id': 'sensor.evse_1_1_1_1_ip_address', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensor[sensor.evse_1_1_1_1_meter_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -400,7 +304,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': , + 'entity_category': None, 'entity_id': 'sensor.evse_1_1_1_1_meter_error', 'has_entity_name': True, 'hidden_by': None, @@ -524,98 +428,3 @@ 'state': '0.0', }) # --- -# name: test_sensor[sensor.evse_1_1_1_1_signal_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.evse_1_1_1_1_signal_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Signal status', - 'platform': 'v2c', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'signal_status', - 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_signal_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.evse_1_1_1_1_signal_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'EVSE 1.1.1.1 Signal status', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.evse_1_1_1_1_signal_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensor[sensor.evse_1_1_1_1_ssid-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.evse_1_1_1_1_ssid', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'SSID', - 'platform': 'v2c', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ssid', - 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_ssid', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.evse_1_1_1_1_ssid-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'EVSE 1.1.1.1 SSID', - }), - 'context': , - 'entity_id': 'sensor.evse_1_1_1_1_ssid', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/v2c/test_diagnostics.py b/tests/components/v2c/test_diagnostics.py index eafbd68e6fc..770b00e988b 100644 --- a/tests/components/v2c/test_diagnostics.py +++ b/tests/components/v2c/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion -from syrupy.filters import props from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -25,6 +24,7 @@ async def test_entry_diagnostics( await init_integration(hass, mock_config_entry) - assert await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot() + ) diff --git a/tests/components/v2c/test_sensor.py b/tests/components/v2c/test_sensor.py index 430f91647dd..9e7e3800767 100644 --- a/tests/components/v2c/test_sensor.py +++ b/tests/components/v2c/test_sensor.py @@ -28,7 +28,7 @@ async def test_sensor( await init_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - assert _METER_ERROR_OPTIONS == [ + assert [ "no_error", "communication", "reading", @@ -64,4 +64,4 @@ async def test_sensor( "tcp_head_mismatch", "empty_message", "undefined_error", - ] + ] == _METER_ERROR_OPTIONS diff --git a/tests/components/vacuum/common.py b/tests/components/vacuum/common.py index 6228c1d2f74..0e46ebf5e44 100644 --- a/tests/components/vacuum/common.py +++ b/tests/components/vacuum/common.py @@ -4,8 +4,6 @@ All containing methods are legacy helpers that should not be used by new components. Instead call the service directly. """ -from typing import Any - from homeassistant.components.vacuum import ( ATTR_FAN_SPEED, ATTR_PARAMS, @@ -28,149 +26,136 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def turn_on(hass, entity_id=ENTITY_MATCH_ALL): """Turn all or specified vacuum on.""" hass.add_job(async_turn_on, hass, entity_id) -async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): """Turn all or specified vacuum on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) @bind_hass -def turn_off(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def turn_off(hass, entity_id=ENTITY_MATCH_ALL): """Turn all or specified vacuum off.""" hass.add_job(async_turn_off, hass, entity_id) -async def async_turn_off( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): """Turn all or specified vacuum off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) @bind_hass -def toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def toggle(hass, entity_id=ENTITY_MATCH_ALL): """Toggle all or specified vacuum.""" hass.add_job(async_toggle, hass, entity_id) -async def async_toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def async_toggle(hass, entity_id=ENTITY_MATCH_ALL): """Toggle all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TOGGLE, data, blocking=True) @bind_hass -def locate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def locate(hass, entity_id=ENTITY_MATCH_ALL): """Locate all or specified vacuum.""" hass.add_job(async_locate, hass, entity_id) -async def async_locate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def async_locate(hass, entity_id=ENTITY_MATCH_ALL): """Locate all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_LOCATE, data, blocking=True) @bind_hass -def clean_spot(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def clean_spot(hass, entity_id=ENTITY_MATCH_ALL): """Tell all or specified vacuum to perform a spot clean-up.""" hass.add_job(async_clean_spot, hass, entity_id) -async def async_clean_spot( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_clean_spot(hass, entity_id=ENTITY_MATCH_ALL): """Tell all or specified vacuum to perform a spot clean-up.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_CLEAN_SPOT, data, blocking=True) @bind_hass -def return_to_base(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def return_to_base(hass, entity_id=ENTITY_MATCH_ALL): """Tell all or specified vacuum to return to base.""" hass.add_job(async_return_to_base, hass, entity_id) -async def async_return_to_base( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_return_to_base(hass, entity_id=ENTITY_MATCH_ALL): """Tell all or specified vacuum to return to base.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_RETURN_TO_BASE, data, blocking=True) @bind_hass -def start_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def start_pause(hass, entity_id=ENTITY_MATCH_ALL): """Tell all or specified vacuum to start or pause the current task.""" hass.add_job(async_start_pause, hass, entity_id) -async def async_start_pause( - hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_start_pause(hass, entity_id=ENTITY_MATCH_ALL): """Tell all or specified vacuum to start or pause the current task.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_START_PAUSE, data, blocking=True) @bind_hass -def start(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def start(hass, entity_id=ENTITY_MATCH_ALL): """Tell all or specified vacuum to start or resume the current task.""" hass.add_job(async_start, hass, entity_id) -async def async_start(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def async_start(hass, entity_id=ENTITY_MATCH_ALL): """Tell all or specified vacuum to start or resume the current task.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_START, data, blocking=True) @bind_hass -def pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def pause(hass, entity_id=ENTITY_MATCH_ALL): """Tell all or the specified vacuum to pause the current task.""" hass.add_job(async_pause, hass, entity_id) -async def async_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def async_pause(hass, entity_id=ENTITY_MATCH_ALL): """Tell all or the specified vacuum to pause the current task.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_PAUSE, data, blocking=True) @bind_hass -def stop(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +def stop(hass, entity_id=ENTITY_MATCH_ALL): """Stop all or specified vacuum.""" hass.add_job(async_stop, hass, entity_id) -async def async_stop(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: +async def async_stop(hass, entity_id=ENTITY_MATCH_ALL): """Stop all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True) @bind_hass -def set_fan_speed( - hass: HomeAssistant, fan_speed: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +def set_fan_speed(hass, fan_speed, entity_id=ENTITY_MATCH_ALL): """Set fan speed for all or specified vacuum.""" hass.add_job(async_set_fan_speed, hass, fan_speed, entity_id) -async def async_set_fan_speed( - hass: HomeAssistant, fan_speed: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_set_fan_speed(hass, fan_speed, entity_id=ENTITY_MATCH_ALL): """Set fan speed for all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} data[ATTR_FAN_SPEED] = fan_speed @@ -178,22 +163,12 @@ async def async_set_fan_speed( @bind_hass -def send_command( - hass: HomeAssistant, - command: str, - params: dict[str, Any] | list[Any] | None = None, - entity_id: str = ENTITY_MATCH_ALL, -) -> None: +def send_command(hass, command, params=None, entity_id=ENTITY_MATCH_ALL): """Send command to all or specified vacuum.""" hass.add_job(async_send_command, hass, command, params, entity_id) -async def async_send_command( - hass: HomeAssistant, - command: str, - params: dict[str, Any] | list[Any] | None = None, - entity_id: str = ENTITY_MATCH_ALL, -) -> None: +async def async_send_command(hass, command, params=None, entity_id=ENTITY_MATCH_ALL): """Send command to all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} data[ATTR_COMMAND] = command diff --git a/tests/components/vacuum/conftest.py b/tests/components/vacuum/conftest.py index d298260c575..5167c868f9f 100644 --- a/tests/components/vacuum/conftest.py +++ b/tests/components/vacuum/conftest.py @@ -1,8 +1,7 @@ """Fixtures for Vacuum platform tests.""" -from collections.abc import Generator - import pytest +from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/vacuum/test_device_condition.py b/tests/components/vacuum/test_device_condition.py index 9a2a67f7141..5cc222a1833 100644 --- a/tests/components/vacuum/test_device_condition.py +++ b/tests/components/vacuum/test_device_condition.py @@ -17,7 +17,11 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -25,6 +29,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -109,7 +119,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -171,30 +181,30 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_docked - event - test_event2" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_docked - event - test_event2" hass.states.async_set(entry.entity_id, STATE_CLEANING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "is_cleaning - event - test_event1" + assert len(calls) == 2 + assert calls[1].data["some"] == "is_cleaning - event - test_event1" # Returning means it's still cleaning hass.states.async_set(entry.entity_id, STATE_RETURNING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].data["some"] == "is_cleaning - event - test_event1" + assert len(calls) == 3 + assert calls[2].data["some"] == "is_cleaning - event - test_event1" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -237,5 +247,5 @@ async def test_if_state_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "is_cleaning - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "is_cleaning - event - test_event1" diff --git a/tests/components/vacuum/test_device_trigger.py b/tests/components/vacuum/test_device_trigger.py index c186bd4d9eb..56e351a6446 100644 --- a/tests/components/vacuum/test_device_trigger.py +++ b/tests/components/vacuum/test_device_trigger.py @@ -20,6 +20,7 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, + async_mock_service, ) @@ -28,6 +29,12 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -175,7 +182,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -240,18 +247,18 @@ async def test_if_fires_on_state_change( # Fake that the entity is cleaning hass.states.async_set(entry.entity_id, STATE_CLEANING) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"cleaning - device - {entry.entity_id} - docked - cleaning" ) # Fake that the entity is docked hass.states.async_set(entry.entity_id, STATE_DOCKED) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"docked - device - {entry.entity_id} - cleaning - docked" ) @@ -260,7 +267,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -306,9 +313,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is cleaning hass.states.async_set(entry.entity_id, STATE_CLEANING) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"cleaning - device - {entry.entity_id} - docked - cleaning" ) @@ -317,7 +324,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -363,16 +370,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 hass.states.async_set(entry.entity_id, STATE_CLEANING) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 await hass.async_block_till_done() assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"turn_off device - {entry.entity_id} - docked - cleaning - 0:00:05" ) diff --git a/tests/components/vacuum/test_init.py b/tests/components/vacuum/test_init.py index d03f1d28b58..efd2a63f0f7 100644 --- a/tests/components/vacuum/test_init.py +++ b/tests/components/vacuum/test_init.py @@ -2,13 +2,10 @@ from __future__ import annotations -from enum import Enum -from types import ModuleType from typing import Any import pytest -from homeassistant.components import vacuum from homeassistant.components.vacuum import ( DOMAIN, SERVICE_CLEAN_SPOT, @@ -33,45 +30,11 @@ from . import MockVacuum, help_async_setup_entry_init, help_async_unload_entry from tests.common import ( MockConfigEntry, MockModule, - help_test_all, - import_and_test_deprecated_constant_enum, mock_integration, setup_test_component_platform, ) -def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: - return [(enum_field, constant_prefix) for enum_field in enum if enum_field] - - -@pytest.mark.parametrize( - "module", - [vacuum], -) -def test_all(module: ModuleType) -> None: - """Test module.__all__ is correctly set.""" - help_test_all(module) - - -@pytest.mark.parametrize( - ("enum", "constant_prefix"), _create_tuples(vacuum.VacuumEntityFeature, "SUPPORT_") -) -@pytest.mark.parametrize( - "module", - [vacuum], -) -def test_deprecated_constants( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, - module: ModuleType, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, module, enum, constant_prefix, "2025.10" - ) - - @pytest.mark.parametrize( ("service", "expected_state"), [ diff --git a/tests/components/vallox/conftest.py b/tests/components/vallox/conftest.py index b6529409300..a6ea95944b3 100644 --- a/tests/components/vallox/conftest.py +++ b/tests/components/vallox/conftest.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, patch import pytest from vallox_websocket_api import MetricData +from homeassistant import config_entries from homeassistant.components.vallox.const import DOMAIN from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME @@ -78,7 +79,13 @@ async def init_reconfigure_flow( hass: HomeAssistant, mock_entry, setup_vallox_entry ) -> tuple[MockConfigEntry, ConfigFlowResult]: """Initialize a config entry and a reconfigure flow for it.""" - result = await mock_entry.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": mock_entry.entry_id, + }, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" diff --git a/tests/components/vallox/test_init.py b/tests/components/vallox/test_init.py index 4fbde7e0357..58e46acd689 100644 --- a/tests/components/vallox/test_init.py +++ b/tests/components/vallox/test_init.py @@ -4,11 +4,7 @@ import pytest from vallox_websocket_api import Profile from homeassistant.components.vallox import ( - ATTR_DURATION, - ATTR_PROFILE, ATTR_PROFILE_FAN_SPEED, - I18N_KEY_TO_VALLOX_PROFILE, - SERVICE_SET_PROFILE, SERVICE_SET_PROFILE_FAN_SPEED_AWAY, SERVICE_SET_PROFILE_FAN_SPEED_BOOST, SERVICE_SET_PROFILE_FAN_SPEED_HOME, @@ -16,7 +12,7 @@ from homeassistant.components.vallox import ( from homeassistant.components.vallox.const import DOMAIN from homeassistant.core import HomeAssistant -from .conftest import patch_set_fan_speed, patch_set_profile +from .conftest import patch_set_fan_speed from tests.common import MockConfigEntry @@ -51,45 +47,3 @@ async def test_create_service( # Assert set_fan_speed.assert_called_once_with(profile, 30) - - -@pytest.mark.parametrize( - ("profile", "duration"), - [ - ("home", None), - ("home", 15), - ("away", None), - ("away", 15), - ("boost", None), - ("boost", 15), - ("fireplace", None), - ("fireplace", 15), - ("extra", None), - ("extra", 15), - ], -) -async def test_set_profile_service( - hass: HomeAssistant, mock_entry: MockConfigEntry, profile: str, duration: int | None -) -> None: - """Test service for setting profile and duration.""" - # Act - await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - with patch_set_profile() as set_profile: - service_data = {ATTR_PROFILE: profile} | ( - {ATTR_DURATION: duration} if duration is not None else {} - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_SET_PROFILE, - service_data=service_data, - ) - - await hass.async_block_till_done() - - # Assert - set_profile.assert_called_once_with( - I18N_KEY_TO_VALLOX_PROFILE[profile], duration - ) diff --git a/tests/components/valve/test_init.py b/tests/components/valve/test_init.py index d8eb38a3b9b..3ef3b1ff4b0 100644 --- a/tests/components/valve/test_init.py +++ b/tests/components/valve/test_init.py @@ -1,9 +1,8 @@ """The tests for Valve.""" -from collections.abc import Generator - import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant.components.valve import ( DOMAIN, @@ -11,13 +10,16 @@ from homeassistant.components.valve import ( ValveEntity, ValveEntityDescription, ValveEntityFeature, - ValveState, ) from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_SET_VALVE_POSITION, SERVICE_TOGGLE, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, STATE_UNAVAILABLE, Platform, ) @@ -329,9 +331,7 @@ async def test_supported_features(hass: HomeAssistant) -> None: assert valve.supported_features is None -def call_service( - hass: HomeAssistant, service: str, ent: ValveEntity, position: int | None = None -): +def call_service(hass, service, ent, position=None): """Call any service on entity.""" params = {ATTR_ENTITY_ID: ent.entity_id} if position is not None: @@ -344,21 +344,21 @@ def set_valve_position(ent, position) -> None: ent._values["current_valve_position"] = position -def is_open(hass: HomeAssistant, ent: ValveEntity) -> bool: +def is_open(hass, ent): """Return if the valve is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, ValveState.OPEN) + return hass.states.is_state(ent.entity_id, STATE_OPEN) -def is_opening(hass: HomeAssistant, ent: ValveEntity) -> bool: +def is_opening(hass, ent): """Return if the valve is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, ValveState.OPENING) + return hass.states.is_state(ent.entity_id, STATE_OPENING) -def is_closed(hass: HomeAssistant, ent: ValveEntity) -> bool: +def is_closed(hass, ent): """Return if the valve is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, ValveState.CLOSED) + return hass.states.is_state(ent.entity_id, STATE_CLOSED) -def is_closing(hass: HomeAssistant, ent: ValveEntity) -> bool: +def is_closing(hass, ent): """Return if the valve is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, ValveState.CLOSING) + return hass.states.is_state(ent.entity_id, STATE_CLOSING) diff --git a/tests/components/valve/test_intent.py b/tests/components/valve/test_intent.py index 4f29017b4c1..a8f4054602b 100644 --- a/tests/components/valve/test_intent.py +++ b/tests/components/valve/test_intent.py @@ -6,8 +6,8 @@ from homeassistant.components.valve import ( SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE, SERVICE_SET_VALVE_POSITION, - ValveState, ) +from homeassistant.const import STATE_CLOSED, STATE_OPEN from homeassistant.core import HomeAssistant from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -20,7 +20,7 @@ async def test_open_valve_intent(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "intent", {}) entity_id = f"{DOMAIN}.test_valve" - hass.states.async_set(entity_id, ValveState.CLOSED) + hass.states.async_set(entity_id, STATE_CLOSED) calls = async_mock_service(hass, DOMAIN, SERVICE_OPEN_VALVE) response = await intent.async_handle( @@ -41,7 +41,7 @@ async def test_close_valve_intent(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "intent", {}) entity_id = f"{DOMAIN}.test_valve" - hass.states.async_set(entity_id, ValveState.OPEN) + hass.states.async_set(entity_id, STATE_OPEN) calls = async_mock_service(hass, DOMAIN, SERVICE_CLOSE_VALVE) response = await intent.async_handle( @@ -63,7 +63,7 @@ async def test_set_valve_position(hass: HomeAssistant) -> None: entity_id = f"{DOMAIN}.test_valve" hass.states.async_set( - entity_id, ValveState.CLOSED, attributes={ATTR_CURRENT_POSITION: 0} + entity_id, STATE_CLOSED, attributes={ATTR_CURRENT_POSITION: 0} ) calls = async_mock_service(hass, DOMAIN, SERVICE_SET_VALVE_POSITION) diff --git a/tests/components/velbus/conftest.py b/tests/components/velbus/conftest.py index 402acb821be..3d59ad615c6 100644 --- a/tests/components/velbus/conftest.py +++ b/tests/components/velbus/conftest.py @@ -1,9 +1,9 @@ """Fixtures for the Velbus tests.""" -from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.velbus.const import DOMAIN from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/velbus/test_config_flow.py b/tests/components/velbus/test_config_flow.py index 432fcea10db..59effcae706 100644 --- a/tests/components/velbus/test_config_flow.py +++ b/tests/components/velbus/test_config_flow.py @@ -1,10 +1,10 @@ """Tests for the Velbus config flow.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest import serial.tools.list_ports +from typing_extensions import Generator from velbusaio.exceptions import VelbusConnectionFailed from homeassistant.components import usb diff --git a/tests/components/velux/conftest.py b/tests/components/velux/conftest.py index 512b2a007ed..692216827b2 100644 --- a/tests/components/velux/conftest.py +++ b/tests/components/velux/conftest.py @@ -1,9 +1,9 @@ """Configuration for Velux tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/velux/test_config_flow.py b/tests/components/velux/test_config_flow.py index 5f7932d358a..8021ad52810 100644 --- a/tests/components/velux/test_config_flow.py +++ b/tests/components/velux/test_config_flow.py @@ -10,7 +10,7 @@ import pytest from pyvlx import PyVLXException from homeassistant.components.velux import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -69,8 +69,22 @@ async def test_user_errors( assert result["errors"] == {"base": error_name} -async def test_flow_duplicate_entry(hass: HomeAssistant) -> None: - """Test initialized flow with a duplicate entry.""" +async def test_import_valid_config(hass: HomeAssistant) -> None: + """Test import initialized flow with valid config.""" + with patch(PYVLX_CONFIG_FLOW_CLASS_PATH, autospec=True): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=DUMMY_DATA, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DUMMY_DATA[CONF_HOST] + assert result["data"] == DUMMY_DATA + + +@pytest.mark.parametrize("flow_source", [SOURCE_IMPORT, SOURCE_USER]) +async def test_flow_duplicate_entry(hass: HomeAssistant, flow_source: str) -> None: + """Test import initialized flow with a duplicate entry.""" with patch(PYVLX_CONFIG_FLOW_CLASS_PATH, autospec=True): conf_entry: MockConfigEntry = MockConfigEntry( domain=DOMAIN, title=DUMMY_DATA[CONF_HOST], data=DUMMY_DATA @@ -80,8 +94,26 @@ async def test_flow_duplicate_entry(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_USER}, + context={"source": flow_source}, data=DUMMY_DATA, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize(("error", "error_name"), error_types_to_test) +async def test_import_errors( + hass: HomeAssistant, error: Exception, error_name: str +) -> None: + """Test import initialized flow with exceptions.""" + with patch( + PYVLX_CONFIG_FLOW_CONNECT_FUNCTION_PATH, + side_effect=error, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=DUMMY_DATA, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == error_name diff --git a/tests/components/venstar/__init__.py b/tests/components/venstar/__init__.py index 6a40212b793..116a3be0925 100644 --- a/tests/components/venstar/__init__.py +++ b/tests/components/venstar/__init__.py @@ -15,7 +15,7 @@ class VenstarColorTouchMock: pin=None, proto="http", SSLCert=False, - ) -> None: + ): """Initialize the Venstar library.""" self.status = {} self.model = "COLORTOUCH" diff --git a/tests/components/venstar/util.py b/tests/components/venstar/util.py index 44b3efe0720..369d3332135 100644 --- a/tests/components/venstar/util.py +++ b/tests/components/venstar/util.py @@ -2,7 +2,7 @@ import requests_mock -from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN +from homeassistant.components.climate import DOMAIN from homeassistant.const import CONF_HOST, CONF_PLATFORM from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -15,7 +15,7 @@ TEST_MODELS = ["t2k", "colortouch"] def mock_venstar_devices(f): """Decorate function to mock a Venstar Colortouch and T2000 thermostat API.""" - async def wrapper(hass: HomeAssistant) -> None: + async def wrapper(hass): # Mock thermostats are: # Venstar T2000, FW 4.38 # Venstar "colortouch" T7850, FW 5.1 @@ -37,7 +37,7 @@ def mock_venstar_devices(f): f"http://venstar-{model}.localdomain/query/alerts", text=load_fixture(f"venstar/{model}_alerts.json"), ) - await f(hass) + return await f(hass) return wrapper @@ -54,7 +54,7 @@ async def async_init_integration( } for model in TEST_MODELS ] - config = {CLIMATE_DOMAIN: platform_config} + config = {DOMAIN: platform_config} - await async_setup_component(hass, CLIMATE_DOMAIN, config) + await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done() diff --git a/tests/components/vera/common.py b/tests/components/vera/common.py index c5e3a5d4931..5e0fac6c84a 100644 --- a/tests/components/vera/common.py +++ b/tests/components/vera/common.py @@ -83,7 +83,7 @@ def new_simple_controller_config( class ComponentFactory: """Factory class.""" - def __init__(self, vera_controller_class_mock) -> None: + def __init__(self, vera_controller_class_mock): """Initialize the factory.""" self.vera_controller_class_mock = vera_controller_class_mock diff --git a/tests/components/vera/test_config_flow.py b/tests/components/vera/test_config_flow.py index 9572645f6d2..057945450e3 100644 --- a/tests/components/vera/test_config_flow.py +++ b/tests/components/vera/test_config_flow.py @@ -5,11 +5,7 @@ from unittest.mock import MagicMock, patch from requests.exceptions import RequestException from homeassistant import config_entries -from homeassistant.components.vera.const import ( - CONF_CONTROLLER, - CONF_LEGACY_UNIQUE_ID, - DOMAIN, -) +from homeassistant.components.vera import CONF_CONTROLLER, CONF_LEGACY_UNIQUE_ID, DOMAIN from homeassistant.const import CONF_EXCLUDE, CONF_LIGHTS, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType diff --git a/tests/components/vera/test_lock.py b/tests/components/vera/test_lock.py index d24a0e1265f..4139a494e1f 100644 --- a/tests/components/vera/test_lock.py +++ b/tests/components/vera/test_lock.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock import pyvera as pv -from homeassistant.components.lock import LockState +from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import HomeAssistant from .common import ComponentFactory, new_simple_controller_config @@ -29,7 +29,7 @@ async def test_lock( ) update_callback = component_data.controller_data[0].update_callback - assert hass.states.get(entity_id).state == LockState.UNLOCKED + assert hass.states.get(entity_id).state == STATE_UNLOCKED await hass.services.async_call( "lock", @@ -41,7 +41,7 @@ async def test_lock( vera_device.is_locked.return_value = True update_callback(vera_device) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == LockState.LOCKED + assert hass.states.get(entity_id).state == STATE_LOCKED await hass.services.async_call( "lock", @@ -53,4 +53,4 @@ async def test_lock( vera_device.is_locked.return_value = False update_callback(vera_device) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == LockState.UNLOCKED + assert hass.states.get(entity_id).state == STATE_UNLOCKED diff --git a/tests/components/verisure/conftest.py b/tests/components/verisure/conftest.py index 5aafcda2bb3..03086ac2ead 100644 --- a/tests/components/verisure/conftest.py +++ b/tests/components/verisure/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.verisure.const import CONF_GIID, DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/verisure/test_config_flow.py b/tests/components/verisure/test_config_flow.py index e6dd11669d1..cf478b093c0 100644 --- a/tests/components/verisure/test_config_flow.py +++ b/tests/components/verisure/test_config_flow.py @@ -352,7 +352,15 @@ async def test_reauth_flow( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result.get("step_id") == "reauth_confirm" assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} @@ -387,7 +395,15 @@ async def test_reauth_flow_with_mfa( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) assert result.get("step_id") == "reauth_confirm" assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} @@ -450,7 +466,15 @@ async def test_reauth_flow_errors( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await mock_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) mock_verisure_config_flow.login.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/vesync/snapshots/test_diagnostics.ambr b/tests/components/vesync/snapshots/test_diagnostics.ambr index 54ed8acf2d7..fcb2cc7b286 100644 --- a/tests/components/vesync/snapshots/test_diagnostics.ambr +++ b/tests/components/vesync/snapshots/test_diagnostics.ambr @@ -38,7 +38,13 @@ 'setDisplay', 'setLevel', ]), - '_config_dict': dict({ + 'cid': 'abcdefghabcdefghabcdefghabcdefgh', + 'config': dict({ + 'auto_target_humidity': 60, + 'automatic_stop': True, + 'display': True, + }), + 'config_dict': dict({ 'features': list([ 'warm_mist', 'nightlight', @@ -65,7 +71,6 @@ 'LUH-A602S-WEUR', 'LUH-A602S-WEU', 'LUH-A602S-WJP', - 'LUH-A602S-WUSC', ]), 'module': 'VeSyncHumid200300S', 'warm_mist_levels': list([ @@ -75,16 +80,6 @@ 3, ]), }), - '_features': list([ - 'warm_mist', - 'nightlight', - ]), - 'cid': 'abcdefghabcdefghabcdefghabcdefgh', - 'config': dict({ - 'auto_target_humidity': 60, - 'automatic_stop': True, - 'display': True, - }), 'config_module': 'WFON_AHM_LUH-A602S-WUS_US', 'connection_status': 'online', 'connection_type': 'WiFi+BTOnboarding+BTNotify', @@ -110,6 +105,10 @@ 'device_type': 'LUH-A602S-WUS', 'enabled': False, 'extension': None, + 'features': list([ + 'warm_mist', + 'nightlight', + ]), 'mac_id': '**REDACTED**', 'manager': '**REDACTED**', 'mist_levels': list([ @@ -204,7 +203,7 @@ 'auto', 'sleep', ]), - 'supported_features': 57, + 'supported_features': 9, }), 'entity_id': 'fan.fan', 'last_changed': str, diff --git a/tests/components/vesync/snapshots/test_fan.ambr b/tests/components/vesync/snapshots/test_fan.ambr index 60af4ae3d5b..a9210447f1e 100644 --- a/tests/components/vesync/snapshots/test_fan.ambr +++ b/tests/components/vesync/snapshots/test_fan.ambr @@ -22,7 +22,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', - 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, 'primary_config_entry': , @@ -66,8 +65,8 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vesync', + 'supported_features': , + 'translation_key': None, 'unique_id': 'air-purifier', 'unit_of_measurement': None, }), @@ -81,7 +80,7 @@ 'auto', 'sleep', ]), - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_131s', @@ -114,7 +113,6 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', - 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, 'primary_config_entry': , @@ -157,8 +155,8 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vesync', + 'supported_features': , + 'translation_key': None, 'unique_id': 'asd_sdfKIHG7IJHGwJGJ7GJ_ag5h3G55', 'unit_of_measurement': None, }), @@ -178,7 +176,7 @@ 'sleep', ]), 'screen_status': True, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_200s', @@ -211,7 +209,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', - 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, 'primary_config_entry': , @@ -255,8 +252,8 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vesync', + 'supported_features': , + 'translation_key': None, 'unique_id': '400s-purifier', 'unit_of_measurement': None, }), @@ -277,7 +274,7 @@ 'sleep', ]), 'screen_status': True, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_400s', @@ -310,7 +307,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', - 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, 'primary_config_entry': , @@ -354,8 +350,8 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vesync', + 'supported_features': , + 'translation_key': None, 'unique_id': '600s-purifier', 'unit_of_measurement': None, }), @@ -376,7 +372,7 @@ 'sleep', ]), 'screen_status': True, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_600s', @@ -409,7 +405,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', - 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, 'primary_config_entry': , @@ -447,7 +442,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', - 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, 'primary_config_entry': , @@ -501,7 +495,6 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', - 'model_id': None, 'name': 'Outlet', 'name_by_user': None, 'primary_config_entry': , @@ -539,7 +532,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', - 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, 'primary_config_entry': , @@ -577,7 +569,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', - 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/vesync/snapshots/test_light.ambr b/tests/components/vesync/snapshots/test_light.ambr index 36694ae3ef6..c2c9854fa9f 100644 --- a/tests/components/vesync/snapshots/test_light.ambr +++ b/tests/components/vesync/snapshots/test_light.ambr @@ -22,7 +22,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', - 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, 'primary_config_entry': , @@ -60,7 +59,6 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', - 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, 'primary_config_entry': , @@ -98,7 +96,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', - 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, 'primary_config_entry': , @@ -136,7 +133,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', - 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, 'primary_config_entry': , @@ -174,7 +170,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', - 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, 'primary_config_entry': , @@ -264,7 +259,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', - 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, 'primary_config_entry': , @@ -372,7 +366,6 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', - 'model_id': None, 'name': 'Outlet', 'name_by_user': None, 'primary_config_entry': , @@ -410,7 +403,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', - 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, 'primary_config_entry': , @@ -515,7 +507,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', - 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/vesync/snapshots/test_sensor.ambr b/tests/components/vesync/snapshots/test_sensor.ambr index 11d931e023a..97013b4e9ce 100644 --- a/tests/components/vesync/snapshots/test_sensor.ambr +++ b/tests/components/vesync/snapshots/test_sensor.ambr @@ -22,7 +22,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', - 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, 'primary_config_entry': , @@ -152,7 +151,6 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', - 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, 'primary_config_entry': , @@ -238,7 +236,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', - 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, 'primary_config_entry': , @@ -417,7 +414,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', - 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, 'primary_config_entry': , @@ -596,7 +592,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', - 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, 'primary_config_entry': , @@ -634,7 +629,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', - 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, 'primary_config_entry': , @@ -688,7 +682,6 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', - 'model_id': None, 'name': 'Outlet', 'name_by_user': None, 'primary_config_entry': , @@ -1020,7 +1013,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', - 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, 'primary_config_entry': , @@ -1058,7 +1050,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', - 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/vesync/snapshots/test_switch.ambr b/tests/components/vesync/snapshots/test_switch.ambr index 4b271ee55d9..86b3b0ff5cd 100644 --- a/tests/components/vesync/snapshots/test_switch.ambr +++ b/tests/components/vesync/snapshots/test_switch.ambr @@ -22,7 +22,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', - 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, 'primary_config_entry': , @@ -60,7 +59,6 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', - 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, 'primary_config_entry': , @@ -98,7 +96,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', - 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, 'primary_config_entry': , @@ -136,7 +133,6 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', - 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, 'primary_config_entry': , @@ -174,7 +170,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', - 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, 'primary_config_entry': , @@ -212,7 +207,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', - 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, 'primary_config_entry': , @@ -266,7 +260,6 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', - 'model_id': None, 'name': 'Outlet', 'name_by_user': None, 'primary_config_entry': , @@ -348,7 +341,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', - 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, 'primary_config_entry': , @@ -386,7 +378,6 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', - 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/vicare/__init__.py b/tests/components/vicare/__init__.py index c2a1ab49e5c..329a3b04d58 100644 --- a/tests/components/vicare/__init__.py +++ b/tests/components/vicare/__init__.py @@ -6,9 +6,6 @@ from typing import Final from homeassistant.components.vicare.const import CONF_HEATING_TYPE from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry MODULE = "homeassistant.components.vicare" @@ -20,11 +17,3 @@ ENTRY_CONFIG: Final[dict[str, str]] = { } MOCK_MAC = "B874241B7B9" - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/vicare/conftest.py b/tests/components/vicare/conftest.py index aadf85e7081..6899839a0e1 100644 --- a/tests/components/vicare/conftest.py +++ b/tests/components/vicare/conftest.py @@ -2,18 +2,18 @@ from __future__ import annotations -from collections.abc import AsyncGenerator, Generator from dataclasses import dataclass from unittest.mock import AsyncMock, Mock, patch import pytest from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareService import ViCareDeviceAccessor, readFeature +from typing_extensions import AsyncGenerator, Generator from homeassistant.components.vicare.const import DOMAIN from homeassistant.core import HomeAssistant -from . import ENTRY_CONFIG, MODULE, setup_integration +from . import ENTRY_CONFIG, MODULE from tests.common import MockConfigEntry, load_json_object_fixture @@ -40,7 +40,7 @@ class MockPyViCare: ), f"deviceId{idx}", f"model{idx}", - "online", + f"online{idx}", ) ) @@ -87,25 +87,10 @@ async def mock_vicare_gas_boiler( f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures), ): - await setup_integration(hass, mock_config_entry) + mock_config_entry.add_to_hass(hass) - yield mock_config_entry - - -@pytest.fixture -async def mock_vicare_room_sensors( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> AsyncGenerator[MockConfigEntry]: - """Return a mocked ViCare API representing multiple room sensor devices.""" - fixtures: list[Fixture] = [ - Fixture({"type:climateSensor"}, "vicare/RoomSensor1.json"), - Fixture({"type:climateSensor"}, "vicare/RoomSensor2.json"), - ] - with patch( - f"{MODULE}.vicare_login", - return_value=MockPyViCare(fixtures), - ): - await setup_integration(hass, mock_config_entry) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() yield mock_config_entry diff --git a/tests/components/vicare/fixtures/RoomSensor1.json b/tests/components/vicare/fixtures/RoomSensor1.json deleted file mode 100644 index b970e54a48c..00000000000 --- a/tests/components/vicare/fixtures/RoomSensor1.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "data": [ - { - "apiVersion": 1, - "commands": {}, - "deviceId": "zigbee-d87a3bfffe5d844a", - "feature": "device.messages.errors.raw", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "entries": { - "type": "array", - "value": [] - } - }, - "timestamp": "2024-03-01T04:40:59.911Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-d87a3bfffe5d844a/features/device.messages.errors.raw" - }, - { - "apiVersion": 1, - "commands": { - "setName": { - "isExecutable": true, - "name": "setName", - "params": { - "name": { - "constraints": { - "maxLength": 40, - "minLength": 1, - "regEx": "^[\\p{L}0-9]+( [\\p{L}0-9]+)*$" - }, - "required": true, - "type": "string" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-d87a3bfffe5d844a/features/device.name/commands/setName" - } - }, - "deviceId": "zigbee-d87a3bfffe5d844a", - "feature": "device.name", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "name": { - "type": "string", - "value": "Office" - } - }, - "timestamp": "2024-03-01T04:40:59.911Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-d87a3bfffe5d844a/features/device.name" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "zigbee-d87a3bfffe5d844a", - "feature": "device.sensors.humidity", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "status": { - "type": "string", - "value": "connected" - }, - "value": { - "type": "number", - "unit": "percent", - "value": 53 - } - }, - "timestamp": "2024-03-02T07:51:07.303Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-d87a3bfffe5d844a/features/device.sensors.humidity" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "zigbee-d87a3bfffe5d844a", - "feature": "device.sensors.temperature", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "status": { - "type": "string", - "value": "connected" - }, - "value": { - "type": "number", - "unit": "celsius", - "value": 17.5 - } - }, - "timestamp": "2024-03-02T07:52:42.043Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-d87a3bfffe5d844a/features/device.sensors.temperature" - } - ] -} diff --git a/tests/components/vicare/fixtures/RoomSensor2.json b/tests/components/vicare/fixtures/RoomSensor2.json deleted file mode 100644 index 81a1d935700..00000000000 --- a/tests/components/vicare/fixtures/RoomSensor2.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "data": [ - { - "apiVersion": 1, - "commands": {}, - "deviceId": "zigbee-5cc7c1fffea33a3b", - "feature": "device.messages.errors.raw", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "entries": { - "type": "array", - "value": [] - } - }, - "timestamp": "2024-03-01T04:40:59.911Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-5cc7c1fffea33a3b/features/device.messages.errors.raw" - }, - { - "apiVersion": 1, - "commands": { - "setName": { - "isExecutable": true, - "name": "setName", - "params": { - "name": { - "constraints": { - "maxLength": 40, - "minLength": 1, - "regEx": "^[\\p{L}0-9]+( [\\p{L}0-9]+)*$" - }, - "required": true, - "type": "string" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-5cc7c1fffea33a3b/features/device.name/commands/setName" - } - }, - "deviceId": "zigbee-5cc7c1fffea33a3b", - "feature": "device.name", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "name": { - "type": "string", - "value": "" - } - }, - "timestamp": "2024-03-01T04:40:59.911Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-5cc7c1fffea33a3b/features/device.name" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "zigbee-5cc7c1fffea33a3b", - "feature": "device.sensors.humidity", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "status": { - "type": "string", - "value": "connected" - }, - "value": { - "type": "number", - "unit": "percent", - "value": 52 - } - }, - "timestamp": "2024-03-02T07:42:06.922Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-5cc7c1fffea33a3b/features/device.sensors.humidity" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "zigbee-5cc7c1fffea33a3b", - "feature": "device.sensors.temperature", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "status": { - "type": "string", - "value": "connected" - }, - "value": { - "type": "number", - "unit": "celsius", - "value": 16.9 - } - }, - "timestamp": "2024-03-02T07:24:48.056Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/zigbee-5cc7c1fffea33a3b/features/device.sensors.temperature" - } - ] -} diff --git a/tests/components/vicare/fixtures/ViAir300F.json b/tests/components/vicare/fixtures/ViAir300F.json deleted file mode 100644 index 090c7a81ddf..00000000000 --- a/tests/components/vicare/fixtures/ViAir300F.json +++ /dev/null @@ -1,882 +0,0 @@ -{ - "data": [ - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "device.productIdentification", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "product": { - "type": "object", - "value": { - "busAddress": 1, - "busType": "CanExternal", - "productFamily": "B_00028_VA330", - "viessmannIdentificationNumber": "################" - } - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.productIdentification" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "device.messages.errors.raw", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "entries": { - "type": "array", - "value": [] - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.messages.errors.raw" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "device.serial", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "value": { - "type": "string", - "value": "deviceSerialViAir300F" - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": true - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.levels.levelFour", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "volumeFlow": { - "type": "number", - "unit": "cubicMeter/hour", - "value": 234 - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelFour" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.levels.levelOne", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "volumeFlow": { - "type": "number", - "unit": "cubicMeter/hour", - "value": 54 - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelOne" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.levels.levelThree", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "volumeFlow": { - "type": "number", - "unit": "cubicMeter/hour", - "value": 180 - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelThree" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.levels.levelTwo", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "volumeFlow": { - "type": "number", - "unit": "cubicMeter/hour", - "value": 125 - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelTwo" - }, - { - "apiVersion": 1, - "commands": { - "setMode": { - "isExecutable": true, - "name": "setMode", - "params": { - "mode": { - "constraints": { - "enum": [ - "permanent", - "ventilation", - "sensorOverride", - "sensorDriven" - ] - }, - "required": true, - "type": "string" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.active/commands/setMode" - }, - "setModeContinuousSensorOverride": { - "isExecutable": "true", - "name": "setModeContinuousSensorOverride", - "params": {}, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.active/commands/setModeContinuousSensorOverride" - } - }, - "deviceId": "0", - "feature": "ventilation.operating.modes.active", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "value": { - "type": "string", - "value": "permanent" - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.active" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.modes.filterChange", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.filterChange" - }, - { - "apiVersion": 1, - "commands": { - "setLevel": { - "isExecutable": true, - "name": "setLevel", - "params": { - "level": { - "constraints": { - "enum": ["levelOne", "levelTwo", "levelThree", "levelFour"] - }, - "required": true, - "type": "string" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.permanent/commands/setLevel" - } - }, - "deviceId": "0", - "feature": "ventilation.operating.modes.permanent", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": true - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.permanent" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.modes.sensorDriven", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.sensorDriven" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.modes.sensorOverride", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.sensorOverride" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.modes.ventilation", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.ventilation" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.programs.active", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "value": { - "type": "string", - "value": "levelOne" - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.active" - }, - { - "apiVersion": 1, - "commands": { - "activate": { - "isExecutable": true, - "name": "activate", - "params": { - "timeout": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": false, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/activate" - }, - "deactivate": { - "isExecutable": true, - "name": "deactivate", - "params": {}, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/deactivate" - }, - "setDefaultRuntime": { - "isExecutable": true, - "name": "setDefaultRuntime", - "params": { - "defaultRuntime": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": true, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/setDefaultRuntime" - }, - "setTimeout": { - "isExecutable": true, - "name": "setTimeout", - "params": { - "timeout": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": true, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/setTimeout" - } - }, - "deviceId": "0", - "feature": "ventilation.operating.programs.forcedLevelFour", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - }, - "defaultRuntime": { - "type": "number", - "unit": "minutes", - "value": 30 - }, - "isActiveWritable": { - "type": "boolean", - "value": true - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.programs.levelFour", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - }, - "volumeFlow": { - "type": "number", - "unit": "cubicMeter/hour", - "value": 234 - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelFour" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.programs.levelOne", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": true - }, - "volumeFlow": { - "type": "number", - "unit": "cubicMeter/hour", - "value": 54 - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelOne" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.programs.levelThree", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - }, - "volumeFlow": { - "type": "number", - "unit": "cubicMeter/hour", - "value": 180 - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelThree" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.programs.levelTwo", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - }, - "volumeFlow": { - "type": "number", - "unit": "cubicMeter/hour", - "value": 125 - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelTwo" - }, - { - "apiVersion": 1, - "commands": { - "activate": { - "isExecutable": true, - "name": "activate", - "params": { - "timeout": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": false, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/activate" - }, - "deactivate": { - "isExecutable": true, - "name": "deactivate", - "params": {}, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/deactivate" - }, - "setDefaultRuntime": { - "isExecutable": true, - "name": "setDefaultRuntime", - "params": { - "defaultRuntime": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": true, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/setDefaultRuntime" - }, - "setTimeout": { - "isExecutable": true, - "name": "setTimeout", - "params": { - "timeout": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": true, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/setTimeout" - } - }, - "deviceId": "0", - "feature": "ventilation.operating.programs.silent", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - }, - "defaultRuntime": { - "type": "number", - "unit": "minutes", - "value": 30 - }, - "isActiveWritable": { - "type": "boolean", - "value": true - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.programs.standby", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - }, - "volumeFlow": { - "type": "number", - "unit": "cubicMeter/hour", - "value": 0 - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.standby" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "ventilation.operating.state", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "demand": { - "type": "string", - "value": "ventilation" - }, - "level": { - "type": "string", - "value": "levelOne" - }, - "reason": { - "type": "string", - "value": "permanent" - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.state" - }, - { - "apiVersion": 1, - "commands": { - "activate": { - "isExecutable": true, - "name": "activate", - "params": { - "timeout": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": false, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/activate" - }, - "deactivate": { - "isExecutable": true, - "name": "deactivate", - "params": {}, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/deactivate" - }, - "setDefaultRuntime": { - "isExecutable": true, - "name": "setDefaultRuntime", - "params": { - "defaultRuntime": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": true, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/setDefaultRuntime" - }, - "setTimeout": { - "isExecutable": true, - "name": "setTimeout", - "params": { - "timeout": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": true, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/setTimeout" - } - }, - "deviceId": "0", - "feature": "ventilation.quickmodes.forcedLevelFour", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - }, - "defaultRuntime": { - "type": "number", - "unit": "minutes", - "value": 30 - }, - "isActiveWritable": { - "type": "boolean", - "value": true - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour" - }, - { - "apiVersion": 1, - "commands": { - "activate": { - "isExecutable": true, - "name": "activate", - "params": { - "timeout": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": false, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/activate" - }, - "deactivate": { - "isExecutable": true, - "name": "deactivate", - "params": {}, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/deactivate" - }, - "setDefaultRuntime": { - "isExecutable": true, - "name": "setDefaultRuntime", - "params": { - "defaultRuntime": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": true, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/setDefaultRuntime" - }, - "setTimeout": { - "isExecutable": true, - "name": "setTimeout", - "params": { - "timeout": { - "constraints": { - "max": 1440, - "min": 1, - "stepping": 1 - }, - "required": true, - "type": "number" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/setTimeout" - } - }, - "deviceId": "0", - "feature": "ventilation.quickmodes.silent", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - }, - "defaultRuntime": { - "type": "number", - "unit": "minutes", - "value": 30 - }, - "isActiveWritable": { - "type": "boolean", - "value": true - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent" - }, - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "heating.boiler.serial", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "value": { - "type": "string", - "value": "################" - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.boiler.serial" - }, - { - "apiVersion": 1, - "commands": { - "setSchedule": { - "isExecutable": true, - "name": "setSchedule", - "params": { - "newSchedule": { - "constraints": { - "defaultMode": "levelOne", - "maxEntries": 4, - "modes": ["levelTwo", "levelThree"], - "overlapAllowed": false, - "resolution": 10 - }, - "required": true, - "type": "Schedule" - } - }, - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.schedule/commands/setSchedule" - } - }, - "deviceId": "0", - "feature": "ventilation.schedule", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "active": { - "type": "boolean", - "value": false - }, - "entries": { - "type": "Schedule", - "value": { - "fri": [ - { - "end": "22:00", - "mode": "levelTwo", - "position": 0, - "start": "06:00" - } - ], - "mon": [ - { - "end": "22:00", - "mode": "levelTwo", - "position": 0, - "start": "06:00" - } - ], - "sat": [ - { - "end": "22:00", - "mode": "levelTwo", - "position": 0, - "start": "06:00" - } - ], - "sun": [ - { - "end": "22:00", - "mode": "levelTwo", - "position": 0, - "start": "06:00" - } - ], - "thu": [ - { - "end": "22:00", - "mode": "levelTwo", - "position": 0, - "start": "06:00" - } - ], - "tue": [ - { - "end": "22:00", - "mode": "levelTwo", - "position": 0, - "start": "06:00" - } - ], - "wed": [ - { - "end": "22:00", - "mode": "levelTwo", - "position": 0, - "start": "06:00" - } - ] - } - } - }, - "timestamp": "2024-03-20T01:29:35.549Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.schedule" - } - ] -} diff --git a/tests/components/vicare/fixtures/Vitodens300W.json b/tests/components/vicare/fixtures/Vitodens300W.json index d183146e94d..4cf67ebe0f7 100644 --- a/tests/components/vicare/fixtures/Vitodens300W.json +++ b/tests/components/vicare/fixtures/Vitodens300W.json @@ -1,22 +1,5 @@ { "data": [ - { - "apiVersion": 1, - "commands": {}, - "deviceId": "0", - "feature": "device.serial", - "gatewayId": "################", - "isEnabled": true, - "isReady": true, - "properties": { - "value": { - "type": "string", - "value": "deviceSerialVitodens300W" - } - }, - "timestamp": "2024-07-30T20:03:40.073Z", - "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial" - }, { "properties": {}, "commands": {}, diff --git a/tests/components/vicare/fixtures/dummy-device-no-serial.json b/tests/components/vicare/fixtures/dummy-device-no-serial.json deleted file mode 100644 index 268c73f0e37..00000000000 --- a/tests/components/vicare/fixtures/dummy-device-no-serial.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "data": [] -} diff --git a/tests/components/vicare/snapshots/test_binary_sensor.ambr b/tests/components/vicare/snapshots/test_binary_sensor.ambr index f3e4d4e1c84..7454f914435 100644 --- a/tests/components/vicare/snapshots/test_binary_sensor.ambr +++ b/tests/components/vicare/snapshots/test_binary_sensor.ambr @@ -1,378 +1,4 @@ # serializer version: 1 -# name: test_all_entities[binary_sensor.model0_burner-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.model0_burner', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Burner', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'burner', - 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_active-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.model0_burner-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'model0 Burner', - }), - 'context': , - 'entity_id': 'binary_sensor.model0_burner', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[binary_sensor.model0_circulation_pump-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.model0_circulation_pump', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Circulation pump', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'circulation_pump', - 'unique_id': 'gateway0_deviceSerialVitodens300W-circulationpump_active-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.model0_circulation_pump-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'model0 Circulation pump', - }), - 'context': , - 'entity_id': 'binary_sensor.model0_circulation_pump', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[binary_sensor.model0_circulation_pump_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.model0_circulation_pump_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Circulation pump', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'circulation_pump', - 'unique_id': 'gateway0_deviceSerialVitodens300W-circulationpump_active-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.model0_circulation_pump_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'model0 Circulation pump', - }), - 'context': , - 'entity_id': 'binary_sensor.model0_circulation_pump_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[binary_sensor.model0_dhw_charging-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.model0_dhw_charging', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DHW charging', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'domestic_hot_water_charging', - 'unique_id': 'gateway0_deviceSerialVitodens300W-charging_active', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.model0_dhw_charging-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'model0 DHW charging', - }), - 'context': , - 'entity_id': 'binary_sensor.model0_dhw_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[binary_sensor.model0_dhw_circulation_pump-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.model0_dhw_circulation_pump', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DHW circulation pump', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'domestic_hot_water_circulation_pump', - 'unique_id': 'gateway0_deviceSerialVitodens300W-dhw_circulationpump_active', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.model0_dhw_circulation_pump-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'model0 DHW circulation pump', - }), - 'context': , - 'entity_id': 'binary_sensor.model0_dhw_circulation_pump', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[binary_sensor.model0_dhw_pump-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.model0_dhw_pump', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DHW pump', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'domestic_hot_water_pump', - 'unique_id': 'gateway0_deviceSerialVitodens300W-dhw_pump_active', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.model0_dhw_pump-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'model0 DHW pump', - }), - 'context': , - 'entity_id': 'binary_sensor.model0_dhw_pump', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[binary_sensor.model0_frost_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.model0_frost_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Frost protection', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'frost_protection', - 'unique_id': 'gateway0_deviceSerialVitodens300W-frost_protection_active-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.model0_frost_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Frost protection', - }), - 'context': , - 'entity_id': 'binary_sensor.model0_frost_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[binary_sensor.model0_frost_protection_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.model0_frost_protection_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Frost protection', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'frost_protection', - 'unique_id': 'gateway0_deviceSerialVitodens300W-frost_protection_active-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.model0_frost_protection_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Frost protection', - }), - 'context': , - 'entity_id': 'binary_sensor.model0_frost_protection_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_binary_sensors[burner] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/vicare/snapshots/test_button.ambr b/tests/components/vicare/snapshots/test_button.ambr deleted file mode 100644 index 9fadc6a983f..00000000000 --- a/tests/components/vicare/snapshots/test_button.ambr +++ /dev/null @@ -1,47 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[button.model0_activate_one_time_charge-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.model0_activate_one_time_charge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Activate one-time charge', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'activate_onetimecharge', - 'unique_id': 'gateway0_deviceSerialVitodens300W-activate_onetimecharge', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[button.model0_activate_one_time_charge-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Activate one-time charge', - }), - 'context': , - 'entity_id': 'button.model0_activate_one_time_charge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/vicare/snapshots/test_climate.ambr b/tests/components/vicare/snapshots/test_climate.ambr deleted file mode 100644 index aea0ea879c2..00000000000 --- a/tests/components/vicare/snapshots/test_climate.ambr +++ /dev/null @@ -1,167 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[climate.model0_heating-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - ]), - 'max_temp': 37, - 'min_temp': 3, - 'preset_modes': list([ - 'comfort', - 'eco', - 'home', - 'sleep', - ]), - 'target_temp_step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.model0_heating', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Heating', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'heating', - 'unique_id': 'gateway0_deviceSerialVitodens300W-heating-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[climate.model0_heating-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'model0 Heating', - 'hvac_action': , - 'hvac_modes': list([ - ]), - 'max_temp': 37, - 'min_temp': 3, - 'preset_mode': None, - 'preset_modes': list([ - 'comfort', - 'eco', - 'home', - 'sleep', - ]), - 'supported_features': , - 'target_temp_step': 1, - 'temperature': None, - 'vicare_programs': list([ - 'comfort', - 'eco', - 'external', - 'holiday', - 'normal', - 'reduced', - 'standby', - ]), - }), - 'context': , - 'entity_id': 'climate.model0_heating', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_entities[climate.model0_heating_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - ]), - 'max_temp': 37, - 'min_temp': 3, - 'preset_modes': list([ - 'comfort', - 'eco', - 'home', - 'sleep', - ]), - 'target_temp_step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.model0_heating_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Heating', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'heating', - 'unique_id': 'gateway0_deviceSerialVitodens300W-heating-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[climate.model0_heating_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'model0 Heating', - 'hvac_action': , - 'hvac_modes': list([ - ]), - 'max_temp': 37, - 'min_temp': 3, - 'preset_mode': None, - 'preset_modes': list([ - 'comfort', - 'eco', - 'home', - 'sleep', - ]), - 'supported_features': , - 'target_temp_step': 1, - 'temperature': None, - 'vicare_programs': list([ - 'comfort', - 'eco', - 'external', - 'holiday', - 'normal', - 'reduced', - 'standby', - ]), - }), - 'context': , - 'entity_id': 'climate.model0_heating_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/vicare/snapshots/test_diagnostics.ambr b/tests/components/vicare/snapshots/test_diagnostics.ambr index ae9b05389c7..dfc29d46cc2 100644 --- a/tests/components/vicare/snapshots/test_diagnostics.ambr +++ b/tests/components/vicare/snapshots/test_diagnostics.ambr @@ -4,24 +4,6 @@ 'data': list([ dict({ 'data': list([ - dict({ - 'apiVersion': 1, - 'commands': dict({ - }), - 'deviceId': '0', - 'feature': 'device.serial', - 'gatewayId': '################', - 'isEnabled': True, - 'isReady': True, - 'properties': dict({ - 'value': dict({ - 'type': 'string', - 'value': 'deviceSerialVitodens300W', - }), - }), - 'timestamp': '2024-07-30T20:03:40.073Z', - 'uri': 'https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial', - }), dict({ 'apiVersion': 1, 'commands': dict({ @@ -4721,8 +4703,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'vicare', 'entry_id': '1234', 'minor_version': 1, diff --git a/tests/components/vicare/snapshots/test_fan.ambr b/tests/components/vicare/snapshots/test_fan.ambr deleted file mode 100644 index 8ec4bc41d8d..00000000000 --- a/tests/components/vicare/snapshots/test_fan.ambr +++ /dev/null @@ -1,64 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[fan.model0_ventilation-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'preset_modes': list([ - , - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'fan', - 'entity_category': None, - 'entity_id': 'fan.model0_ventilation', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Ventilation', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'ventilation', - 'unique_id': 'gateway0_deviceSerialViAir300F-ventilation', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[fan.model0_ventilation-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Ventilation', - 'percentage': 0, - 'percentage_step': 25.0, - 'preset_mode': None, - 'preset_modes': list([ - , - , - , - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'fan.model0_ventilation', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/vicare/snapshots/test_number.ambr b/tests/components/vicare/snapshots/test_number.ambr deleted file mode 100644 index 5a030fc0213..00000000000 --- a/tests/components/vicare/snapshots/test_number.ambr +++ /dev/null @@ -1,624 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[number.model0_comfort_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_comfort_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Comfort temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'comfort_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-comfort_temperature-0', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.model0_comfort_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Comfort temperature', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.model0_comfort_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[number.model0_comfort_temperature_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_comfort_temperature_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Comfort temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'comfort_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-comfort_temperature-1', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.model0_comfort_temperature_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Comfort temperature', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.model0_comfort_temperature_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[number.model0_heating_curve_shift-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 40, - 'min': -13, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_heating_curve_shift', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Heating curve shift', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'heating_curve_shift', - 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve shift-0', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.model0_heating_curve_shift-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Heating curve shift', - 'max': 40, - 'min': -13, - 'mode': , - 'step': 1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.model0_heating_curve_shift', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[number.model0_heating_curve_shift_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 40, - 'min': -13, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_heating_curve_shift_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Heating curve shift', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'heating_curve_shift', - 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve shift-1', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.model0_heating_curve_shift_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Heating curve shift', - 'max': 40, - 'min': -13, - 'mode': , - 'step': 1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.model0_heating_curve_shift_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[number.model0_heating_curve_slope-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 3.5, - 'min': 0.2, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_heating_curve_slope', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Heating curve slope', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'heating_curve_slope', - 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve slope-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[number.model0_heating_curve_slope-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Heating curve slope', - 'max': 3.5, - 'min': 0.2, - 'mode': , - 'step': 0.1, - }), - 'context': , - 'entity_id': 'number.model0_heating_curve_slope', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[number.model0_heating_curve_slope_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 3.5, - 'min': 0.2, - 'mode': , - 'step': 0.1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_heating_curve_slope_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Heating curve slope', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'heating_curve_slope', - 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve slope-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[number.model0_heating_curve_slope_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Heating curve slope', - 'max': 3.5, - 'min': 0.2, - 'mode': , - 'step': 0.1, - }), - 'context': , - 'entity_id': 'number.model0_heating_curve_slope_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[number.model0_normal_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_normal_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Normal temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'normal_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-normal_temperature-0', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.model0_normal_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Normal temperature', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.model0_normal_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[number.model0_normal_temperature_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_normal_temperature_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Normal temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'normal_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-normal_temperature-1', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.model0_normal_temperature_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Normal temperature', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.model0_normal_temperature_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[number.model0_reduced_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_reduced_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Reduced temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reduced_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-reduced_temperature-0', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.model0_reduced_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Reduced temperature', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.model0_reduced_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[number.model0_reduced_temperature_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_reduced_temperature_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Reduced temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reduced_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-reduced_temperature-1', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.model0_reduced_temperature_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Reduced temperature', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1.0, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.model0_reduced_temperature_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_all_entities[number.model0_dhw_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.model0_dhw_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DHW temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dhw_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-dhw_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[number.model0_dhw_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 DHW temperature', - 'max': 100.0, - 'min': 0.0, - 'mode': , - 'step': 1, - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'number.model0_dhw_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- diff --git a/tests/components/vicare/snapshots/test_sensor.ambr b/tests/components/vicare/snapshots/test_sensor.ambr deleted file mode 100644 index 793f3e87611..00000000000 --- a/tests/components/vicare/snapshots/test_sensor.ambr +++ /dev/null @@ -1,1256 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[sensor.model0_boiler_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_boiler_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Boiler temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'boiler_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-boiler_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_boiler_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Boiler temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_boiler_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '63', - }) -# --- -# name: test_all_entities[sensor.model0_burner_hours-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.model0_burner_hours', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Burner hours', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'burner_hours', - 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_hours-0', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_burner_hours-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Burner hours', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_burner_hours', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '18726.3', - }) -# --- -# name: test_all_entities[sensor.model0_burner_modulation-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_burner_modulation', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Burner modulation', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'burner_modulation', - 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_modulation-0', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.model0_burner_modulation-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Burner modulation', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.model0_burner_modulation', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.model0_burner_starts-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.model0_burner_starts', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Burner starts', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'burner_starts', - 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_starts-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.model0_burner_starts-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Burner starts', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.model0_burner_starts', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '14315', - }) -# --- -# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_dhw_gas_consumption_this_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DHW gas consumption this month', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'hotwater_gas_consumption_heating_this_month', - 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_heating_this_month', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 DHW gas consumption this month', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.model0_dhw_gas_consumption_this_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '805', - }) -# --- -# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_week-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_dhw_gas_consumption_this_week', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DHW gas consumption this week', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'hotwater_gas_consumption_heating_this_week', - 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_heating_this_week', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_week-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 DHW gas consumption this week', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.model0_dhw_gas_consumption_this_week', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '84', - }) -# --- -# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_dhw_gas_consumption_this_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DHW gas consumption this year', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'hotwater_gas_consumption_heating_this_year', - 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_heating_this_year', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 DHW gas consumption this year', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.model0_dhw_gas_consumption_this_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8203', - }) -# --- -# name: test_all_entities[sensor.model0_dhw_gas_consumption_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_dhw_gas_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DHW gas consumption today', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'hotwater_gas_consumption_today', - 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_today', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.model0_dhw_gas_consumption_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 DHW gas consumption today', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.model0_dhw_gas_consumption_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '22', - }) -# --- -# name: test_all_entities[sensor.model0_dhw_max_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_dhw_max_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DHW max temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'hotwater_max_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_max_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_dhw_max_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 DHW max temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_dhw_max_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60', - }) -# --- -# name: test_all_entities[sensor.model0_dhw_min_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_dhw_min_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DHW min temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'hotwater_min_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_min_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_dhw_min_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 DHW min temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_dhw_min_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_all_entities[sensor.model0_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power consumption this month', - 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption this month', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'model0 Energy', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '7.843', - }) -# --- -# name: test_all_entities[sensor.model0_electricity_consumption_this_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_electricity_consumption_this_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Electricity consumption this year', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_consumption_this_year', - 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption this year', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_electricity_consumption_this_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'model0 Electricity consumption this year', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_electricity_consumption_this_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '207.106', - }) -# --- -# name: test_all_entities[sensor.model0_electricity_consumption_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_electricity_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Electricity consumption today', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_consumption_today', - 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption today', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_electricity_consumption_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'model0 Electricity consumption today', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_electricity_consumption_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.219', - }) -# --- -# name: test_all_entities[sensor.model0_heating_gas_consumption_this_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_heating_gas_consumption_this_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Heating gas consumption this month', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'gas_consumption_heating_this_month', - 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_this_month', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.model0_heating_gas_consumption_this_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Heating gas consumption this month', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.model0_heating_gas_consumption_this_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.model0_heating_gas_consumption_this_week-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_heating_gas_consumption_this_week', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Heating gas consumption this week', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'gas_consumption_heating_this_week', - 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_this_week', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.model0_heating_gas_consumption_this_week-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Heating gas consumption this week', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.model0_heating_gas_consumption_this_week', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.model0_heating_gas_consumption_this_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_heating_gas_consumption_this_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Heating gas consumption this year', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'gas_consumption_heating_this_year', - 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_this_year', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.model0_heating_gas_consumption_this_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Heating gas consumption this year', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.model0_heating_gas_consumption_this_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30946', - }) -# --- -# name: test_all_entities[sensor.model0_heating_gas_consumption_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_heating_gas_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Heating gas consumption today', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'gas_consumption_heating_today', - 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_today', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.model0_heating_gas_consumption_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'model0 Heating gas consumption today', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.model0_heating_gas_consumption_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.model0_outside_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_outside_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outside temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outside_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-outside_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_outside_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Outside temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_outside_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.8', - }) -# --- -# name: test_all_entities[sensor.model0_electricity_consumption_this_week-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_electricity_consumption_this_week', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Electricity consumption this week', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_consumption_this_week', - 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption this week', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_electricity_consumption_this_week-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'model0 Electricity consumption this week', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_electricity_consumption_this_week', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.829', - }) -# --- -# name: test_all_entities[sensor.model0_supply_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_supply_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Supply temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'supply_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-supply_temperature-0', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_supply_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Supply temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_supply_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '63', - }) -# --- -# name: test_all_entities[sensor.model0_supply_temperature_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_supply_temperature_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Supply temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'supply_temperature', - 'unique_id': 'gateway0_deviceSerialVitodens300W-supply_temperature-1', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.model0_supply_temperature_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Supply temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_supply_temperature_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '25.5', - }) -# --- -# name: test_room_sensors[sensor.model0_humidity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_humidity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Humidity', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'gateway0_zigbee_d87a3bfffe5d844a-room_humidity', - 'unit_of_measurement': '%', - }) -# --- -# name: test_room_sensors[sensor.model0_humidity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'humidity', - 'friendly_name': 'model0 Humidity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.model0_humidity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '53', - }) -# --- -# name: test_room_sensors[sensor.model0_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model0_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'gateway0_zigbee_d87a3bfffe5d844a-room_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_room_sensors[sensor.model0_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model0 Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model0_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17.5', - }) -# --- -# name: test_room_sensors[sensor.model1_humidity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model1_humidity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Humidity', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'gateway1_zigbee_5cc7c1fffea33a3b-room_humidity', - 'unit_of_measurement': '%', - }) -# --- -# name: test_room_sensors[sensor.model1_humidity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'humidity', - 'friendly_name': 'model1 Humidity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.model1_humidity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '52', - }) -# --- -# name: test_room_sensors[sensor.model1_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.model1_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'gateway1_zigbee_5cc7c1fffea33a3b-room_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_room_sensors[sensor.model1_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'model1 Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.model1_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '16.9', - }) -# --- diff --git a/tests/components/vicare/snapshots/test_water_heater.ambr b/tests/components/vicare/snapshots/test_water_heater.ambr deleted file mode 100644 index bca04b1bbfa..00000000000 --- a/tests/components/vicare/snapshots/test_water_heater.ambr +++ /dev/null @@ -1,113 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[water_heater.model0_domestic_hot_water-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_temp': 60, - 'min_temp': 10, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'water_heater', - 'entity_category': None, - 'entity_id': 'water_heater.model0_domestic_hot_water', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domestic hot water', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'domestic_hot_water', - 'unique_id': 'gateway0_deviceSerialVitodens300W-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[water_heater.model0_domestic_hot_water-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'model0 Domestic hot water', - 'max_temp': 60, - 'min_temp': 10, - 'supported_features': , - 'target_temp_high': None, - 'target_temp_low': None, - 'temperature': None, - }), - 'context': , - 'entity_id': 'water_heater.model0_domestic_hot_water', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_entities[water_heater.model0_domestic_hot_water_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_temp': 60, - 'min_temp': 10, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'water_heater', - 'entity_category': None, - 'entity_id': 'water_heater.model0_domestic_hot_water_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domestic hot water', - 'platform': 'vicare', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'domestic_hot_water', - 'unique_id': 'gateway0_deviceSerialVitodens300W-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[water_heater.model0_domestic_hot_water_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'model0 Domestic hot water', - 'max_temp': 60, - 'min_temp': 10, - 'supported_features': , - 'target_temp_high': None, - 'target_temp_low': None, - 'temperature': None, - }), - 'context': , - 'entity_id': 'water_heater.model0_domestic_hot_water_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/vicare/test_binary_sensor.py b/tests/components/vicare/test_binary_sensor.py index b9b8a57a59b..79ce91642af 100644 --- a/tests/components/vicare/test_binary_sensor.py +++ b/tests/components/vicare/test_binary_sensor.py @@ -1,18 +1,11 @@ """Test ViCare binary sensors.""" -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import MODULE, setup_integration -from .conftest import Fixture, MockPyViCare - -from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.parametrize( @@ -31,21 +24,3 @@ async def test_binary_sensors( ) -> None: """Test the ViCare binary sensor.""" assert hass.states.get(f"binary_sensor.model0_{entity_id}") == snapshot - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] - with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), - patch(f"{MODULE}.PLATFORMS", [Platform.BINARY_SENSOR]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_button.py b/tests/components/vicare/test_button.py deleted file mode 100644 index c024af41d78..00000000000 --- a/tests/components/vicare/test_button.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Test ViCare button entity.""" - -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import MODULE, setup_integration -from .conftest import Fixture, MockPyViCare - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] - with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), - patch(f"{MODULE}.PLATFORMS", [Platform.BUTTON]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_climate.py b/tests/components/vicare/test_climate.py deleted file mode 100644 index 44df87276e7..00000000000 --- a/tests/components/vicare/test_climate.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Test ViCare climate entity.""" - -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import MODULE, setup_integration -from .conftest import Fixture, MockPyViCare - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] - with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), - patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_config_flow.py b/tests/components/vicare/test_config_flow.py index a522cf75d5d..b823bb72dc9 100644 --- a/tests/components/vicare/test_config_flow.py +++ b/tests/components/vicare/test_config_flow.py @@ -11,7 +11,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import dhcp from homeassistant.components.vicare.const import DOMAIN -from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -104,7 +104,11 @@ async def test_step_reauth(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> ) config_entry.add_to_hass(hass) - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, + data=VALID_CONFIG, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/vicare/test_diagnostics.py b/tests/components/vicare/test_diagnostics.py index 6adf4fe0edc..815b39545a9 100644 --- a/tests/components/vicare/test_diagnostics.py +++ b/tests/components/vicare/test_diagnostics.py @@ -3,7 +3,6 @@ from unittest.mock import MagicMock from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -22,4 +21,4 @@ async def test_diagnostics( hass, hass_client, mock_vicare_gas_boiler ) - assert diag == snapshot(exclude=props("created_at", "modified_at")) + assert diag == snapshot diff --git a/tests/components/vicare/test_fan.py b/tests/components/vicare/test_fan.py deleted file mode 100644 index ba5db6e42c7..00000000000 --- a/tests/components/vicare/test_fan.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Test ViCare fan.""" - -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import MODULE, setup_integration -from .conftest import Fixture, MockPyViCare - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - fixtures: list[Fixture] = [Fixture({"type:ventilation"}, "vicare/ViAir300F.json")] - with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), - patch(f"{MODULE}.PLATFORMS", [Platform.FAN]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_init.py b/tests/components/vicare/test_init.py deleted file mode 100644 index 62bec7f50c5..00000000000 --- a/tests/components/vicare/test_init.py +++ /dev/null @@ -1,107 +0,0 @@ -"""Test ViCare migration.""" - -from unittest.mock import patch - -from homeassistant.components.vicare.const import DOMAIN -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er - -from . import MODULE -from .conftest import Fixture, MockPyViCare - -from tests.common import MockConfigEntry - - -# Device migration test can be removed in 2025.4.0 -async def test_device_and_entity_migration( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that the device registry is updated correctly.""" - fixtures: list[Fixture] = [ - Fixture({"type:boiler"}, "vicare/Vitodens300W.json"), - Fixture({"type:boiler"}, "vicare/dummy-device-no-serial.json"), - ] - with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), - patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), - ): - mock_config_entry.add_to_hass(hass) - - # device with serial data point - device0 = device_registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - identifiers={ - (DOMAIN, "gateway0"), - }, - model="model0", - ) - entry0 = entity_registry.async_get_or_create( - domain=Platform.CLIMATE, - platform=DOMAIN, - config_entry=mock_config_entry, - unique_id="gateway0-0", - translation_key="heating", - device_id=device0.id, - ) - entry1 = entity_registry.async_get_or_create( - domain=Platform.CLIMATE, - platform=DOMAIN, - config_entry=mock_config_entry, - unique_id="gateway0_deviceSerialVitodens300W-heating-1", - translation_key="heating", - device_id=device0.id, - ) - # device without serial data point - device1 = device_registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - identifiers={ - (DOMAIN, "gateway1"), - }, - model="model1", - ) - entry2 = entity_registry.async_get_or_create( - domain=Platform.CLIMATE, - platform=DOMAIN, - config_entry=mock_config_entry, - unique_id="gateway1-0", - translation_key="heating", - device_id=device1.id, - ) - # device is not provided by api - device2 = device_registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - identifiers={ - (DOMAIN, "gateway2"), - }, - model="model2", - ) - entry3 = entity_registry.async_get_or_create( - domain=Platform.CLIMATE, - platform=DOMAIN, - config_entry=mock_config_entry, - unique_id="gateway2-0", - translation_key="heating", - device_id=device2.id, - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.async_block_till_done() - - assert ( - entity_registry.async_get(entry0.entity_id).unique_id - == "gateway0_deviceSerialVitodens300W-heating-0" - ) - assert ( - entity_registry.async_get(entry1.entity_id).unique_id - == "gateway0_deviceSerialVitodens300W-heating-1" - ) - assert ( - entity_registry.async_get(entry2.entity_id).unique_id - == "gateway1_deviceId1-heating-0" - ) - assert entity_registry.async_get(entry3.entity_id).unique_id == "gateway2-0" diff --git a/tests/components/vicare/test_number.py b/tests/components/vicare/test_number.py deleted file mode 100644 index c3aa66a86f6..00000000000 --- a/tests/components/vicare/test_number.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Test ViCare number entity.""" - -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import MODULE, setup_integration -from .conftest import Fixture, MockPyViCare - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] - with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), - patch(f"{MODULE}.PLATFORMS", [Platform.NUMBER]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_sensor.py b/tests/components/vicare/test_sensor.py deleted file mode 100644 index 06c8b963680..00000000000 --- a/tests/components/vicare/test_sensor.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Test ViCare sensor entity.""" - -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import MODULE, setup_integration -from .conftest import Fixture, MockPyViCare - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - fixtures: list[Fixture] = [ - Fixture({"type:boiler"}, "vicare/Vitodens300W.json"), - ] - with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), - patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_room_sensors( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - fixtures: list[Fixture] = [ - Fixture({"type:climateSensor"}, "vicare/RoomSensor1.json"), - Fixture({"type:climateSensor"}, "vicare/RoomSensor2.json"), - ] - with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), - patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_types.py b/tests/components/vicare/test_types.py deleted file mode 100644 index c411213f13e..00000000000 --- a/tests/components/vicare/test_types.py +++ /dev/null @@ -1,99 +0,0 @@ -"""Test ViCare diagnostics.""" - -import pytest - -from homeassistant.components.climate import PRESET_COMFORT, PRESET_SLEEP -from homeassistant.components.vicare.fan import VentilationMode -from homeassistant.components.vicare.types import HeatingProgram - - -@pytest.mark.parametrize( - ("vicare_program", "expected_result"), - [ - ("", None), - (None, None), - ("anything", None), - (HeatingProgram.COMFORT, PRESET_COMFORT), - (HeatingProgram.COMFORT_HEATING, PRESET_COMFORT), - ], -) -async def test_heating_program_to_ha_preset( - vicare_program: str | None, - expected_result: str | None, -) -> None: - """Testing ViCare HeatingProgram to HA Preset.""" - - assert HeatingProgram.to_ha_preset(vicare_program) == expected_result - - -@pytest.mark.parametrize( - ("ha_preset", "expected_result"), - [ - ("", None), - (None, None), - ("anything", None), - (PRESET_SLEEP, HeatingProgram.REDUCED), - ], -) -async def test_ha_preset_to_heating_program( - ha_preset: str | None, - expected_result: str | None, -) -> None: - """Testing HA Preset to ViCare HeatingProgram.""" - - supported_programs = [ - HeatingProgram.COMFORT, - HeatingProgram.ECO, - HeatingProgram.NORMAL, - HeatingProgram.REDUCED, - ] - assert ( - HeatingProgram.from_ha_preset(ha_preset, supported_programs) == expected_result - ) - - -async def test_ha_preset_to_heating_program_error() -> None: - """Testing HA Preset to ViCare HeatingProgram.""" - - supported_programs = [ - "test", - ] - assert ( - HeatingProgram.from_ha_preset(HeatingProgram.NORMAL, supported_programs) is None - ) - - -@pytest.mark.parametrize( - ("vicare_mode", "expected_result"), - [ - ("", None), - (None, None), - ("anything", None), - ("sensorOverride", VentilationMode.SENSOR_OVERRIDE), - ], -) -async def test_ventilation_mode_to_ha_mode( - vicare_mode: str | None, - expected_result: str | None, -) -> None: - """Testing ViCare mode to VentilationMode.""" - - assert VentilationMode.from_vicare_mode(vicare_mode) == expected_result - - -@pytest.mark.parametrize( - ("ha_mode", "expected_result"), - [ - ("", None), - (None, None), - ("anything", None), - (VentilationMode.SENSOR_OVERRIDE, "sensorOverride"), - ], -) -async def test_ha_mode_to_ventilation_mode( - ha_mode: str | None, - expected_result: str | None, -) -> None: - """Testing VentilationMode to ViCare mode.""" - - assert VentilationMode.to_vicare_mode(ha_mode) == expected_result diff --git a/tests/components/vicare/test_water_heater.py b/tests/components/vicare/test_water_heater.py deleted file mode 100644 index fbb5863cf7a..00000000000 --- a/tests/components/vicare/test_water_heater.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Test ViCare water heater entity.""" - -from unittest.mock import patch - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import MODULE, setup_integration -from .conftest import Fixture, MockPyViCare - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] - with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), - patch(f"{MODULE}.PLATFORMS", [Platform.WATER_HEATER]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vilfo/conftest.py b/tests/components/vilfo/conftest.py index fbc48da28b3..11b620b82e0 100644 --- a/tests/components/vilfo/conftest.py +++ b/tests/components/vilfo/conftest.py @@ -1,9 +1,9 @@ """Vilfo tests conftest.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.vilfo import DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST diff --git a/tests/components/vilfo/test_config_flow.py b/tests/components/vilfo/test_config_flow.py index 24739f509e4..c4fdb2fe22c 100644 --- a/tests/components/vilfo/test_config_flow.py +++ b/tests/components/vilfo/test_config_flow.py @@ -150,10 +150,6 @@ async def test_form_exceptions( assert result["type"] is FlowResultType.CREATE_ENTRY -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.vilfo.config.error.wrong_host"], -) async def test_form_wrong_host( hass: HomeAssistant, mock_is_valid_host: AsyncMock, diff --git a/tests/components/vizio/conftest.py b/tests/components/vizio/conftest.py index 923509dea2c..b06ce2e1eb7 100644 --- a/tests/components/vizio/conftest.py +++ b/tests/components/vizio/conftest.py @@ -1,6 +1,5 @@ """Configure py.test.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest @@ -30,19 +29,19 @@ from .const import ( class MockInput: """Mock Vizio device input.""" - def __init__(self, name) -> None: + def __init__(self, name): """Initialize mock Vizio device input.""" self.meta_name = name self.name = name -def get_mock_inputs(input_list) -> list[MockInput]: +def get_mock_inputs(input_list): """Return list of MockInput.""" return [MockInput(device_input) for device_input in input_list] @pytest.fixture(name="vizio_get_unique_id", autouse=True) -def vizio_get_unique_id_fixture() -> Generator[None]: +def vizio_get_unique_id_fixture(): """Mock get vizio unique ID.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.get_unique_id", @@ -52,7 +51,7 @@ def vizio_get_unique_id_fixture() -> Generator[None]: @pytest.fixture(name="vizio_data_coordinator_update", autouse=True) -def vizio_data_coordinator_update_fixture() -> Generator[None]: +def vizio_data_coordinator_update_fixture(): """Mock get data coordinator update.""" with patch( "homeassistant.components.vizio.coordinator.gen_apps_list_from_url", @@ -62,7 +61,7 @@ def vizio_data_coordinator_update_fixture() -> Generator[None]: @pytest.fixture(name="vizio_data_coordinator_update_failure") -def vizio_data_coordinator_update_failure_fixture() -> Generator[None]: +def vizio_data_coordinator_update_failure_fixture(): """Mock get data coordinator update failure.""" with patch( "homeassistant.components.vizio.coordinator.gen_apps_list_from_url", @@ -72,7 +71,7 @@ def vizio_data_coordinator_update_failure_fixture() -> Generator[None]: @pytest.fixture(name="vizio_no_unique_id") -def vizio_no_unique_id_fixture() -> Generator[None]: +def vizio_no_unique_id_fixture(): """Mock no vizio unique ID returrned.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.get_unique_id", @@ -82,7 +81,7 @@ def vizio_no_unique_id_fixture() -> Generator[None]: @pytest.fixture(name="vizio_connect") -def vizio_connect_fixture() -> Generator[None]: +def vizio_connect_fixture(): """Mock valid vizio device and entry setup.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.validate_ha_config", @@ -92,7 +91,7 @@ def vizio_connect_fixture() -> Generator[None]: @pytest.fixture(name="vizio_complete_pairing") -def vizio_complete_pairing_fixture() -> Generator[None]: +def vizio_complete_pairing_fixture(): """Mock complete vizio pairing workflow.""" with ( patch( @@ -108,7 +107,7 @@ def vizio_complete_pairing_fixture() -> Generator[None]: @pytest.fixture(name="vizio_start_pairing_failure") -def vizio_start_pairing_failure_fixture() -> Generator[None]: +def vizio_start_pairing_failure_fixture(): """Mock vizio start pairing failure.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.start_pair", @@ -118,7 +117,7 @@ def vizio_start_pairing_failure_fixture() -> Generator[None]: @pytest.fixture(name="vizio_invalid_pin_failure") -def vizio_invalid_pin_failure_fixture() -> Generator[None]: +def vizio_invalid_pin_failure_fixture(): """Mock vizio failure due to invalid pin.""" with ( patch( @@ -134,14 +133,14 @@ def vizio_invalid_pin_failure_fixture() -> Generator[None]: @pytest.fixture(name="vizio_bypass_setup") -def vizio_bypass_setup_fixture() -> Generator[None]: +def vizio_bypass_setup_fixture(): """Mock component setup.""" with patch("homeassistant.components.vizio.async_setup_entry", return_value=True): yield @pytest.fixture(name="vizio_bypass_update") -def vizio_bypass_update_fixture() -> Generator[None]: +def vizio_bypass_update_fixture(): """Mock component update.""" with ( patch( @@ -154,7 +153,7 @@ def vizio_bypass_update_fixture() -> Generator[None]: @pytest.fixture(name="vizio_guess_device_type") -def vizio_guess_device_type_fixture() -> Generator[None]: +def vizio_guess_device_type_fixture(): """Mock vizio async_guess_device_type function.""" with patch( "homeassistant.components.vizio.config_flow.async_guess_device_type", @@ -164,7 +163,7 @@ def vizio_guess_device_type_fixture() -> Generator[None]: @pytest.fixture(name="vizio_cant_connect") -def vizio_cant_connect_fixture() -> Generator[None]: +def vizio_cant_connect_fixture(): """Mock vizio device can't connect with valid auth.""" with ( patch( @@ -180,7 +179,7 @@ def vizio_cant_connect_fixture() -> Generator[None]: @pytest.fixture(name="vizio_update") -def vizio_update_fixture() -> Generator[None]: +def vizio_update_fixture(): """Mock valid updates to vizio device.""" with ( patch( @@ -224,7 +223,7 @@ def vizio_update_fixture() -> Generator[None]: @pytest.fixture(name="vizio_update_with_apps") -def vizio_update_with_apps_fixture(vizio_update: None) -> Generator[None]: +def vizio_update_with_apps_fixture(vizio_update: pytest.fixture): """Mock valid updates to vizio device that supports apps.""" with ( patch( @@ -244,7 +243,7 @@ def vizio_update_with_apps_fixture(vizio_update: None) -> Generator[None]: @pytest.fixture(name="vizio_update_with_apps_on_input") -def vizio_update_with_apps_on_input_fixture(vizio_update: None) -> Generator[None]: +def vizio_update_with_apps_on_input_fixture(vizio_update: pytest.fixture): """Mock valid updates to vizio device that supports apps but is on a TV input.""" with ( patch( @@ -264,7 +263,7 @@ def vizio_update_with_apps_on_input_fixture(vizio_update: None) -> Generator[Non @pytest.fixture(name="vizio_hostname_check") -def vizio_hostname_check() -> Generator[None]: +def vizio_hostname_check(): """Mock vizio hostname resolution.""" with patch( "homeassistant.components.vizio.config_flow.socket.gethostbyname", diff --git a/tests/components/vizio/test_config_flow.py b/tests/components/vizio/test_config_flow.py index 42d4394ca80..712dd2a31b5 100644 --- a/tests/components/vizio/test_config_flow.py +++ b/tests/components/vizio/test_config_flow.py @@ -57,8 +57,11 @@ from .const import ( from tests.common import MockConfigEntry -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_user_flow_minimum_fields(hass: HomeAssistant) -> None: +async def test_user_flow_minimum_fields( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, +) -> None: """Test user config flow with minimum fields.""" # test form shows result = await hass.config_entries.flow.async_init( @@ -78,8 +81,11 @@ async def test_user_flow_minimum_fields(hass: HomeAssistant) -> None: assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.SPEAKER -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_user_flow_all_fields(hass: HomeAssistant) -> None: +async def test_user_flow_all_fields( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, +) -> None: """Test user config flow with all fields.""" # test form shows result = await hass.config_entries.flow.async_init( @@ -102,8 +108,11 @@ async def test_user_flow_all_fields(hass: HomeAssistant) -> None: assert CONF_APPS not in result["data"] -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_speaker_options_flow(hass: HomeAssistant) -> None: +async def test_speaker_options_flow( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_update: pytest.fixture, +) -> None: """Test options config flow for speaker.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_SPEAKER_CONFIG @@ -127,8 +136,11 @@ async def test_speaker_options_flow(hass: HomeAssistant) -> None: assert CONF_APPS not in result["data"] -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_tv_options_flow_no_apps(hass: HomeAssistant) -> None: +async def test_tv_options_flow_no_apps( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_update: pytest.fixture, +) -> None: """Test options config flow for TV without providing apps option.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -155,8 +167,11 @@ async def test_tv_options_flow_no_apps(hass: HomeAssistant) -> None: assert CONF_APPS not in result["data"] -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_tv_options_flow_with_apps(hass: HomeAssistant) -> None: +async def test_tv_options_flow_with_apps( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_update: pytest.fixture, +) -> None: """Test options config flow for TV with providing apps option.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -184,8 +199,11 @@ async def test_tv_options_flow_with_apps(hass: HomeAssistant) -> None: assert result["data"][CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_tv_options_flow_start_with_volume(hass: HomeAssistant) -> None: +async def test_tv_options_flow_start_with_volume( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_update: pytest.fixture, +) -> None: """Test options config flow for TV with providing apps option after providing volume step in initial config.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -223,8 +241,11 @@ async def test_tv_options_flow_start_with_volume(hass: HomeAssistant) -> None: assert result["data"][CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_user_host_already_configured(hass: HomeAssistant) -> None: +async def test_user_host_already_configured( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, +) -> None: """Test host is already configured during user setup.""" entry = MockConfigEntry( domain=DOMAIN, @@ -244,8 +265,11 @@ async def test_user_host_already_configured(hass: HomeAssistant) -> None: assert result["errors"] == {CONF_HOST: "existing_config_entry_found"} -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_user_serial_number_already_exists(hass: HomeAssistant) -> None: +async def test_user_serial_number_already_exists( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, +) -> None: """Test serial_number is already configured with different host and name during user setup.""" # Set up new entry MockConfigEntry( @@ -265,8 +289,9 @@ async def test_user_serial_number_already_exists(hass: HomeAssistant) -> None: assert result["errors"] == {CONF_HOST: "existing_config_entry_found"} -@pytest.mark.usefixtures("vizio_no_unique_id") -async def test_user_error_on_could_not_connect(hass: HomeAssistant) -> None: +async def test_user_error_on_could_not_connect( + hass: HomeAssistant, vizio_no_unique_id: pytest.fixture +) -> None: """Test with could_not_connect during user setup due to no connectivity.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -276,9 +301,8 @@ async def test_user_error_on_could_not_connect(hass: HomeAssistant) -> None: assert result["errors"] == {CONF_HOST: "cannot_connect"} -@pytest.mark.usefixtures("vizio_cant_connect") async def test_user_error_on_could_not_connect_invalid_token( - hass: HomeAssistant, + hass: HomeAssistant, vizio_cant_connect: pytest.fixture ) -> None: """Test with could_not_connect during user setup due to invalid token.""" result = await hass.config_entries.flow.async_init( @@ -289,10 +313,12 @@ async def test_user_error_on_could_not_connect_invalid_token( assert result["errors"] == {"base": "cannot_connect"} -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" -) -async def test_user_tv_pairing_no_apps(hass: HomeAssistant) -> None: +async def test_user_tv_pairing_no_apps( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_complete_pairing: pytest.fixture, +) -> None: """Test pairing config flow when access token not provided for tv during user entry and no apps configured.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -318,10 +344,12 @@ async def test_user_tv_pairing_no_apps(hass: HomeAssistant) -> None: assert CONF_APPS not in result["data"] -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_start_pairing_failure" -) -async def test_user_start_pairing_failure(hass: HomeAssistant) -> None: +async def test_user_start_pairing_failure( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_start_pairing_failure: pytest.fixture, +) -> None: """Test failure to start pairing from user config flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -332,10 +360,12 @@ async def test_user_start_pairing_failure(hass: HomeAssistant) -> None: assert result["errors"] == {"base": "cannot_connect"} -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_invalid_pin_failure" -) -async def test_user_invalid_pin(hass: HomeAssistant) -> None: +async def test_user_invalid_pin( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_invalid_pin_failure: pytest.fixture, +) -> None: """Test failure to complete pairing from user config flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -353,8 +383,11 @@ async def test_user_invalid_pin(hass: HomeAssistant) -> None: assert result["errors"] == {CONF_PIN: "complete_pairing_failed"} -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_user_ignore(hass: HomeAssistant) -> None: +async def test_user_ignore( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, +) -> None: """Test user config flow doesn't throw an error when there's an existing ignored source.""" entry = MockConfigEntry( domain=DOMAIN, @@ -370,8 +403,11 @@ async def test_user_ignore(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_flow_minimum_fields(hass: HomeAssistant) -> None: +async def test_import_flow_minimum_fields( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, +) -> None: """Test import config flow with minimum fields.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -389,8 +425,11 @@ async def test_import_flow_minimum_fields(hass: HomeAssistant) -> None: assert result["data"][CONF_VOLUME_STEP] == DEFAULT_VOLUME_STEP -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_flow_all_fields(hass: HomeAssistant) -> None: +async def test_import_flow_all_fields( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, +) -> None: """Test import config flow with all fields.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -407,8 +446,11 @@ async def test_import_flow_all_fields(hass: HomeAssistant) -> None: assert result["data"][CONF_VOLUME_STEP] == VOLUME_STEP -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_entity_already_configured(hass: HomeAssistant) -> None: +async def test_import_entity_already_configured( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, +) -> None: """Test entity is already configured during import setup.""" entry = MockConfigEntry( domain=DOMAIN, @@ -426,8 +468,11 @@ async def test_import_entity_already_configured(hass: HomeAssistant) -> None: assert result["reason"] == "already_configured_device" -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_update_options(hass: HomeAssistant) -> None: +async def test_import_flow_update_options( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_update: pytest.fixture, +) -> None: """Test import config flow with updated options.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -454,8 +499,11 @@ async def test_import_flow_update_options(hass: HomeAssistant) -> None: assert config_entry.options[CONF_VOLUME_STEP] == VOLUME_STEP + 1 -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_update_name_and_apps(hass: HomeAssistant) -> None: +async def test_import_flow_update_name_and_apps( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_update: pytest.fixture, +) -> None: """Test import config flow with updated name and apps.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -485,8 +533,11 @@ async def test_import_flow_update_name_and_apps(hass: HomeAssistant) -> None: assert config_entry.options[CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_update_remove_apps(hass: HomeAssistant) -> None: +async def test_import_flow_update_remove_apps( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_update: pytest.fixture, +) -> None: """Test import config flow with removed apps.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -515,10 +566,12 @@ async def test_import_flow_update_remove_apps(hass: HomeAssistant) -> None: assert CONF_APPS not in config_entry.options -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" -) -async def test_import_needs_pairing(hass: HomeAssistant) -> None: +async def test_import_needs_pairing( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_complete_pairing: pytest.fixture, +) -> None: """Test pairing config flow when access token not provided for tv during import.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -550,10 +603,12 @@ async def test_import_needs_pairing(hass: HomeAssistant) -> None: assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" -) -async def test_import_with_apps_needs_pairing(hass: HomeAssistant) -> None: +async def test_import_with_apps_needs_pairing( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_complete_pairing: pytest.fixture, +) -> None: """Test pairing config flow when access token not provided for tv but apps are included during import.""" import_config = MOCK_TV_CONFIG_NO_TOKEN.copy() import_config[CONF_APPS] = {CONF_INCLUDE: [CURRENT_APP]} @@ -591,8 +646,11 @@ async def test_import_with_apps_needs_pairing(hass: HomeAssistant) -> None: assert result["data"][CONF_APPS][CONF_INCLUDE] == [CURRENT_APP] -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") -async def test_import_flow_additional_configs(hass: HomeAssistant) -> None: +async def test_import_flow_additional_configs( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_update: pytest.fixture, +) -> None: """Test import config flow with additional configs defined in CONF_APPS.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -608,9 +666,10 @@ async def test_import_flow_additional_configs(hass: HomeAssistant) -> None: assert CONF_APPS not in config_entry.options -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") async def test_import_error( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test that error is logged when import config has an error.""" @@ -641,8 +700,11 @@ async def test_import_error( assert len(vizio_log_list) == 1 -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") -async def test_import_ignore(hass: HomeAssistant) -> None: +async def test_import_ignore( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, +) -> None: """Test import config flow doesn't throw an error when there's an existing ignored source.""" entry = MockConfigEntry( domain=DOMAIN, @@ -661,10 +723,12 @@ async def test_import_ignore(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" -) -async def test_zeroconf_flow(hass: HomeAssistant) -> None: +async def test_zeroconf_flow( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_guess_device_type: pytest.fixture, +) -> None: """Test zeroconf config flow.""" discovery_info = dataclasses.replace(MOCK_ZEROCONF_SERVICE_INFO) result = await hass.config_entries.flow.async_init( @@ -696,10 +760,12 @@ async def test_zeroconf_flow(hass: HomeAssistant) -> None: assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.SPEAKER -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" -) -async def test_zeroconf_flow_already_configured(hass: HomeAssistant) -> None: +async def test_zeroconf_flow_already_configured( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_guess_device_type: pytest.fixture, +) -> None: """Test entity is already configured during zeroconf setup.""" entry = MockConfigEntry( domain=DOMAIN, @@ -720,10 +786,12 @@ async def test_zeroconf_flow_already_configured(hass: HomeAssistant) -> None: assert result["reason"] == "already_configured" -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" -) -async def test_zeroconf_flow_with_port_in_host(hass: HomeAssistant) -> None: +async def test_zeroconf_flow_with_port_in_host( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_guess_device_type: pytest.fixture, +) -> None: """Test entity is already configured during zeroconf setup when port is in host.""" entry = MockConfigEntry( domain=DOMAIN, @@ -746,10 +814,12 @@ async def test_zeroconf_flow_with_port_in_host(hass: HomeAssistant) -> None: assert result["reason"] == "already_configured" -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" -) -async def test_zeroconf_dupe_fail(hass: HomeAssistant) -> None: +async def test_zeroconf_dupe_fail( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_guess_device_type: pytest.fixture, +) -> None: """Test zeroconf config flow when device gets discovered multiple times.""" discovery_info = dataclasses.replace(MOCK_ZEROCONF_SERVICE_INFO) result = await hass.config_entries.flow.async_init( @@ -770,10 +840,12 @@ async def test_zeroconf_dupe_fail(hass: HomeAssistant) -> None: assert result["reason"] == "already_in_progress" -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" -) -async def test_zeroconf_ignore(hass: HomeAssistant) -> None: +async def test_zeroconf_ignore( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_guess_device_type: pytest.fixture, +) -> None: """Test zeroconf discovery doesn't throw an error when there's an existing ignored source.""" entry = MockConfigEntry( domain=DOMAIN, @@ -791,8 +863,11 @@ async def test_zeroconf_ignore(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM -@pytest.mark.usefixtures("vizio_guess_device_type", "vizio_no_unique_id") -async def test_zeroconf_no_unique_id(hass: HomeAssistant) -> None: +async def test_zeroconf_no_unique_id( + hass: HomeAssistant, + vizio_guess_device_type: pytest.fixture, + vizio_no_unique_id: pytest.fixture, +) -> None: """Test zeroconf discovery aborts when unique_id is None.""" discovery_info = dataclasses.replace(MOCK_ZEROCONF_SERVICE_INFO) @@ -804,10 +879,12 @@ async def test_zeroconf_no_unique_id(hass: HomeAssistant) -> None: assert result["reason"] == "cannot_connect" -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" -) -async def test_zeroconf_abort_when_ignored(hass: HomeAssistant) -> None: +async def test_zeroconf_abort_when_ignored( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_guess_device_type: pytest.fixture, +) -> None: """Test zeroconf discovery aborts when the same host has been ignored.""" entry = MockConfigEntry( domain=DOMAIN, @@ -827,13 +904,13 @@ async def test_zeroconf_abort_when_ignored(hass: HomeAssistant) -> None: assert result["reason"] == "already_configured" -@pytest.mark.usefixtures( - "vizio_connect", - "vizio_bypass_setup", - "vizio_hostname_check", - "vizio_guess_device_type", -) -async def test_zeroconf_flow_already_configured_hostname(hass: HomeAssistant) -> None: +async def test_zeroconf_flow_already_configured_hostname( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_hostname_check: pytest.fixture, + vizio_guess_device_type: pytest.fixture, +) -> None: """Test entity is already configured during zeroconf setup when existing entry uses hostname.""" config = MOCK_SPEAKER_CONFIG.copy() config[CONF_HOST] = "hostname" @@ -856,8 +933,12 @@ async def test_zeroconf_flow_already_configured_hostname(hass: HomeAssistant) -> assert result["reason"] == "already_configured" -@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup", "vizio_hostname_check") -async def test_import_flow_already_configured_hostname(hass: HomeAssistant) -> None: +async def test_import_flow_already_configured_hostname( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_setup: pytest.fixture, + vizio_hostname_check: pytest.fixture, +) -> None: """Test entity is already configured during import setup when existing entry uses hostname.""" config = MOCK_SPEAKER_CONFIG.copy() config[CONF_HOST] = "hostname" diff --git a/tests/components/vizio/test_init.py b/tests/components/vizio/test_init.py index c2b19377809..eba5af437b1 100644 --- a/tests/components/vizio/test_init.py +++ b/tests/components/vizio/test_init.py @@ -15,8 +15,11 @@ from .const import MOCK_SPEAKER_CONFIG, MOCK_USER_VALID_TV_CONFIG, UNIQUE_ID from tests.common import MockConfigEntry, async_fire_time_changed -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_setup_component(hass: HomeAssistant) -> None: +async def test_setup_component( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test component setup.""" assert await async_setup_component( hass, DOMAIN, {DOMAIN: MOCK_USER_VALID_TV_CONFIG} @@ -25,8 +28,11 @@ async def test_setup_component(hass: HomeAssistant) -> None: assert len(hass.states.async_entity_ids(Platform.MEDIA_PLAYER)) == 1 -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_tv_load_and_unload(hass: HomeAssistant) -> None: +async def test_tv_load_and_unload( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test loading and unloading TV entry.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID @@ -46,8 +52,11 @@ async def test_tv_load_and_unload(hass: HomeAssistant) -> None: assert DOMAIN not in hass.data -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_speaker_load_and_unload(hass: HomeAssistant) -> None: +async def test_speaker_load_and_unload( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test loading and unloading speaker entry.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID @@ -67,11 +76,11 @@ async def test_speaker_load_and_unload(hass: HomeAssistant) -> None: assert DOMAIN not in hass.data -@pytest.mark.usefixtures( - "vizio_connect", "vizio_bypass_update", "vizio_data_coordinator_update_failure" -) async def test_coordinator_update_failure( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_bypass_update: pytest.fixture, + vizio_data_coordinator_update_failure: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test coordinator update failure after 10 days.""" diff --git a/tests/components/vizio/test_media_player.py b/tests/components/vizio/test_media_player.py index 12e19077c8e..52a5732706d 100644 --- a/tests/components/vizio/test_media_player.py +++ b/tests/components/vizio/test_media_player.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import AsyncIterator from contextlib import asynccontextmanager from datetime import timedelta from typing import Any @@ -130,7 +129,7 @@ def _get_attr_and_assert_base_attr( @asynccontextmanager async def _cm_for_test_setup_without_apps( all_settings: dict[str, Any], vizio_power_state: bool | None -) -> AsyncIterator[None]: +) -> None: """Context manager to setup test for Vizio devices without including app specific patches.""" with ( patch( @@ -212,7 +211,7 @@ async def _test_setup_speaker( @asynccontextmanager async def _cm_for_test_setup_tv_with_apps( hass: HomeAssistant, device_config: dict[str, Any], app_config: dict[str, Any] -) -> AsyncIterator[None]: +) -> None: """Context manager to setup test for Vizio TV with support for apps.""" config_entry = MockConfigEntry( domain=DOMAIN, data=vol.Schema(VIZIO_SCHEMA)(device_config), unique_id=UNIQUE_ID @@ -281,46 +280,63 @@ async def _test_service( assert service_call.call_args == call(*args, **kwargs) -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_speaker_on(hass: HomeAssistant) -> None: +async def test_speaker_on( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test Vizio Speaker entity setup when on.""" await _test_setup_speaker(hass, True) -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_speaker_off(hass: HomeAssistant) -> None: +async def test_speaker_off( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test Vizio Speaker entity setup when off.""" await _test_setup_speaker(hass, False) -@pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_speaker_unavailable( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, ) -> None: """Test Vizio Speaker entity setup when unavailable.""" await _test_setup_speaker(hass, None) -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_init_tv_on(hass: HomeAssistant) -> None: +async def test_init_tv_on( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test Vizio TV entity setup when on.""" await _test_setup_tv(hass, True) -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_init_tv_off(hass: HomeAssistant) -> None: +async def test_init_tv_off( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test Vizio TV entity setup when off.""" await _test_setup_tv(hass, False) -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_init_tv_unavailable(hass: HomeAssistant) -> None: +async def test_init_tv_unavailable( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test Vizio TV entity setup when unavailable.""" await _test_setup_tv(hass, None) -@pytest.mark.usefixtures("vizio_cant_connect") -async def test_setup_unavailable_speaker(hass: HomeAssistant) -> None: +async def test_setup_unavailable_speaker( + hass: HomeAssistant, vizio_cant_connect: pytest.fixture +) -> None: """Test speaker entity sets up as unavailable.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID @@ -330,8 +346,9 @@ async def test_setup_unavailable_speaker(hass: HomeAssistant) -> None: assert hass.states.get("media_player.vizio").state == STATE_UNAVAILABLE -@pytest.mark.usefixtures("vizio_cant_connect") -async def test_setup_unavailable_tv(hass: HomeAssistant) -> None: +async def test_setup_unavailable_tv( + hass: HomeAssistant, vizio_cant_connect: pytest.fixture +) -> None: """Test TV entity sets up as unavailable.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID @@ -341,8 +358,11 @@ async def test_setup_unavailable_tv(hass: HomeAssistant) -> None: assert hass.states.get("media_player.vizio").state == STATE_UNAVAILABLE -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_services(hass: HomeAssistant) -> None: +async def test_services( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test all Vizio media player entity services.""" await _test_setup_tv(hass, True) @@ -429,8 +449,11 @@ async def test_services(hass: HomeAssistant) -> None: await _test_service(hass, MP_DOMAIN, "pause", SERVICE_MEDIA_PAUSE, None) -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_options_update(hass: HomeAssistant) -> None: +async def test_options_update( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test when config entry update event fires.""" await _test_setup_speaker(hass, True) config_entry = hass.config_entries.async_entries(DOMAIN)[0] @@ -453,7 +476,7 @@ async def _test_update_availability_switch( hass: HomeAssistant, initial_power_state: bool | None, final_power_state: bool | None, - caplog: pytest.LogCaptureFixture, + caplog: pytest.fixture, ) -> None: now = dt_util.utcnow() future_interval = timedelta(minutes=1) @@ -493,27 +516,30 @@ async def _test_update_availability_switch( assert len(vizio_log_list) == 1 -@pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_update_unavailable_to_available( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device becomes available after being unavailable.""" await _test_update_availability_switch(hass, None, True, caplog) -@pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_update_available_to_unavailable( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device becomes unavailable after being available.""" await _test_update_availability_switch(hass, True, None, caplog) -@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps.""" @@ -538,9 +564,10 @@ async def test_setup_with_apps( ) -@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps_include( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps and apps["include"] in config.""" @@ -555,9 +582,10 @@ async def test_setup_with_apps_include( assert "app_id" not in attr -@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps_exclude( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps and apps["exclude"] in config.""" @@ -572,9 +600,10 @@ async def test_setup_with_apps_exclude( assert "app_id" not in attr -@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps_additional_apps_config( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps and apps["additional_configs"] in config.""" @@ -650,9 +679,10 @@ def test_invalid_apps_config(hass: HomeAssistant) -> None: vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_SPEAKER_APPS_FAILURE) -@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_unknown_app_config( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps where app config returned is unknown.""" @@ -666,9 +696,10 @@ async def test_setup_with_unknown_app_config( assert attr["app_id"] == UNKNOWN_APP_CONFIG -@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_no_running_app( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps where no app is running.""" @@ -682,8 +713,11 @@ async def test_setup_with_no_running_app( assert "app_name" not in attr -@pytest.mark.usefixtures("vizio_connect", "vizio_update") -async def test_setup_tv_without_mute(hass: HomeAssistant) -> None: +async def test_setup_tv_without_mute( + hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update: pytest.fixture, +) -> None: """Test Vizio TV entity setup when mute property isn't returned by Vizio API.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -703,9 +737,10 @@ async def test_setup_tv_without_mute(hass: HomeAssistant) -> None: assert "is_volume_muted" not in attr -@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_apps_update( hass: HomeAssistant, + vizio_connect: pytest.fixture, + vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps where no app is running.""" @@ -737,8 +772,9 @@ async def test_apps_update( assert len(apps) == len(APP_LIST) -@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps_on_input") -async def test_vizio_update_with_apps_on_input(hass: HomeAssistant) -> None: +async def test_vizio_update_with_apps_on_input( + hass: HomeAssistant, vizio_connect, vizio_update_with_apps_on_input +) -> None: """Test a vizio TV with apps that is on a TV input.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/vlc_telnet/test_config_flow.py b/tests/components/vlc_telnet/test_config_flow.py index a4b559bbe1b..54edafab14a 100644 --- a/tests/components/vlc_telnet/test_config_flow.py +++ b/tests/components/vlc_telnet/test_config_flow.py @@ -9,10 +9,10 @@ from aiovlc.exceptions import AuthError, ConnectError import pytest from homeassistant import config_entries +from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.vlc_telnet.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry @@ -153,7 +153,15 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=DOMAIN, data=entry_data) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry_data, + ) with ( patch("homeassistant.components.vlc_telnet.config_flow.Client.connect"), @@ -201,7 +209,15 @@ async def test_reauth_errors( entry = MockConfigEntry(domain=DOMAIN, data=entry_data) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "unique_id": entry.unique_id, + }, + data=entry_data, + ) with ( patch( diff --git a/tests/components/vodafone_station/const.py b/tests/components/vodafone_station/const.py index 9adf32b339d..1b3d36def03 100644 --- a/tests/components/vodafone_station/const.py +++ b/tests/components/vodafone_station/const.py @@ -1,7 +1,5 @@ """Common stuff for Vodafone Station tests.""" -from aiovodafone.api import VodafoneStationDevice - from homeassistant.components.vodafone_station.const import DOMAIN from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PASSWORD, CONF_USERNAME @@ -18,98 +16,3 @@ MOCK_CONFIG = { } MOCK_USER_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0] - - -DEVICE_DATA_QUERY = { - "xx:xx:xx:xx:xx:xx": VodafoneStationDevice( - connected=True, - connection_type="wifi", - ip_address="192.168.1.10", - name="WifiDevice0", - mac="xx:xx:xx:xx:xx:xx", - type="laptop", - wifi="2.4G", - ) -} - -SENSOR_DATA_QUERY = { - "sys_serial_number": "M123456789", - "sys_firmware_version": "XF6_4.0.05.04", - "sys_bootloader_version": "0220", - "sys_hardware_version": "RHG3006 v1", - "omci_software_version": "\t\t1.0.0.1_41032\t\t\n", - "sys_uptime": "12:16:41", - "sys_cpu_usage": "97%", - "sys_reboot_cause": "Web Reboot", - "sys_memory_usage": "51.94%", - "sys_wireless_driver_version": "17.10.188.75;17.10.188.75", - "sys_wireless_driver_version_5g": "17.10.188.75;17.10.188.75", - "vf_internet_key_online_since": "", - "vf_internet_key_ip_addr": "0.0.0.0", - "vf_internet_key_system": "0.0.0.0", - "vf_internet_key_mode": "Auto", - "sys_voip_version": "v02.01.00_01.13a\n", - "sys_date_time": "20.10.2024 | 03:44 pm", - "sys_build_time": "Sun Jun 23 17:55:49 CST 2024\n", - "sys_model_name": "RHG3006", - "inter_ip_address": "1.1.1.1", - "inter_gateway": "1.1.1.2", - "inter_primary_dns": "1.1.1.3", - "inter_secondary_dns": "1.1.1.4", - "inter_firewall": "601036", - "inter_wan_ip_address": "1.1.1.1", - "inter_ipv6_link_local_address": "", - "inter_ipv6_link_global_address": "", - "inter_ipv6_gateway": "", - "inter_ipv6_prefix_delegation": "", - "inter_ipv6_dns_address1": "", - "inter_ipv6_dns_address2": "", - "lan_ip_network": "192.168.0.1/24", - "lan_default_gateway": "192.168.0.1", - "lan_subnet_address_subnet1": "", - "lan_mac_address": "11:22:33:44:55:66", - "lan_dhcp_server": "601036", - "lan_dhcpv6_server": "601036", - "lan_router_advertisement": "601036", - "lan_ipv6_default_gateway": "fe80::1", - "lan_port1_switch_mode": "1301722", - "lan_port2_switch_mode": "1301722", - "lan_port3_switch_mode": "1301722", - "lan_port4_switch_mode": "1301722", - "lan_port1_switch_speed": "10", - "lan_port2_switch_speed": "100", - "lan_port3_switch_speed": "1000", - "lan_port4_switch_speed": "1000", - "lan_port1_switch_status": "1301724", - "lan_port2_switch_status": "1301724", - "lan_port3_switch_status": "1301724", - "lan_port4_switch_status": "1301724", - "wifi_status": "601036", - "wifi_name": "Wifi-Main-Network", - "wifi_mac_address": "AA:BB:CC:DD:EE:FF", - "wifi_security": "401027", - "wifi_channel": "8", - "wifi_bandwidth": "573", - "guest_wifi_status": "601037", - "guest_wifi_name": "Wifi-Guest", - "guest_wifi_mac_addr": "AA:BB:CC:DD:EE:GG", - "guest_wifi_security": "401027", - "guest_wifi_channel": "N/A", - "guest_wifi_ip": "192.168.2.1", - "guest_wifi_subnet_addr": "255.255.255.0", - "guest_wifi_dhcp_server": "192.168.2.1", - "wifi_status_5g": "601036", - "wifi_name_5g": "Wifi-Main-Network", - "wifi_mac_address_5g": "AA:BB:CC:DD:EE:HH", - "wifi_security_5g": "401027", - "wifi_channel_5g": "36", - "wifi_bandwidth_5g": "4803", - "guest_wifi_status_5g": "601037", - "guest_wifi_name_5g": "Wifi-Guest", - "guest_wifi_mac_addr_5g": "AA:BB:CC:DD:EE:II", - "guest_wifi_channel_5g": "N/A", - "guest_wifi_security_5g": "401027", - "guest_wifi_ip_5g": "192.168.2.1", - "guest_wifi_subnet_addr_5g": "255.255.255.0", - "guest_wifi_dhcp_server_5g": "192.168.2.1", -} diff --git a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr deleted file mode 100644 index c258b14dc2d..00000000000 --- a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,43 +0,0 @@ -# serializer version: 1 -# name: test_entry_diagnostics - dict({ - 'device_info': dict({ - 'client_devices': list([ - dict({ - 'connected': True, - 'connection_type': 'wifi', - 'hostname': 'WifiDevice0', - 'type': 'laptop', - }), - ]), - 'last_exception': None, - 'last_update success': True, - 'sys_cpu_usage': '97', - 'sys_firmware_version': 'XF6_4.0.05.04', - 'sys_hardware_version': 'RHG3006 v1', - 'sys_memory_usage': '51.94', - 'sys_model_name': 'RHG3006', - 'sys_reboot_cause': 'Web Reboot', - }), - 'entry': dict({ - 'data': dict({ - 'host': 'fake_host', - 'password': '**REDACTED**', - 'username': '**REDACTED**', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'vodafone_station', - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }), - }) -# --- diff --git a/tests/components/vodafone_station/test_config_flow.py b/tests/components/vodafone_station/test_config_flow.py index 3a54f250871..0492d32070f 100644 --- a/tests/components/vodafone_station/test_config_flow.py +++ b/tests/components/vodafone_station/test_config_flow.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components.device_tracker import CONF_CONSIDER_HOME from homeassistant.components.vodafone_station.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -124,9 +124,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" with ( patch( @@ -139,6 +136,15 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: "homeassistant.components.vodafone_station.async_setup_entry", ), ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -166,10 +172,6 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - with ( patch( "homeassistant.components.vodafone_station.config_flow.VodafoneStationSercommApi.login", @@ -182,6 +184,15 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> "homeassistant.components.vodafone_station.async_setup_entry", ), ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/vodafone_station/test_diagnostics.py b/tests/components/vodafone_station/test_diagnostics.py deleted file mode 100644 index 02918d81912..00000000000 --- a/tests/components/vodafone_station/test_diagnostics.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Tests for Vodafone Station diagnostics platform.""" - -from __future__ import annotations - -from unittest.mock import patch - -from syrupy import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.components.vodafone_station.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from .const import DEVICE_DATA_QUERY, MOCK_USER_DATA, SENSOR_DATA_QUERY - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test config entry diagnostics.""" - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) - entry.add_to_hass(hass) - - with ( - patch("aiovodafone.api.VodafoneStationSercommApi.login"), - patch( - "aiovodafone.api.VodafoneStationSercommApi.get_devices_data", - return_value=DEVICE_DATA_QUERY, - ), - patch( - "aiovodafone.api.VodafoneStationSercommApi.get_sensor_data", - return_value=SENSOR_DATA_QUERY, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.state == ConfigEntryState.LOADED - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( - exclude=props( - "entry_id", - "created_at", - "modified_at", - ) - ) diff --git a/tests/components/voicerss/test_tts.py b/tests/components/voicerss/test_tts.py index 776c0ac153a..1a2ad002586 100644 --- a/tests/components/voicerss/test_tts.py +++ b/tests/components/voicerss/test_tts.py @@ -36,8 +36,9 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/voip/conftest.py b/tests/components/voip/conftest.py index cbca8997797..b039a49e0f0 100644 --- a/tests/components/voip/conftest.py +++ b/tests/components/voip/conftest.py @@ -14,9 +14,6 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -from tests.components.tts.conftest import ( - mock_tts_cache_dir_fixture_autouse, # noqa: F401 -) @pytest.fixture(autouse=True) diff --git a/tests/components/voip/snapshots/test_voip.ambr b/tests/components/voip/snapshots/test_voip.ambr deleted file mode 100644 index 3cc64400419..00000000000 --- a/tests/components/voip/snapshots/test_voip.ambr +++ /dev/null @@ -1,7 +0,0 @@ -# serializer version: 1 -# name: test_calls_not_allowed - b'\xfe\xff\x04\x00\x05\x00\x03\x00\x04\x00\x03\x00\x02\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x03\x00\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x03\x00\x02\x00\x03\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\xfe\xff\xfc\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\xfd\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfd\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\xfe\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xfe\xff\xfe\xff\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x02\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\x00\x00\xfe\xff\xfc\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfd\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xff\xff\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xfb\xff\xfb\xff\xff\xff\xfe\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfa\xff\xfe\xff\x00\x00\xfd\xff\x00\x00\x00\x00\xff\xff\x00\x00\xfd\xff\xfa\xff\xfc\xff\xfc\xff\xfa\xff\xfe\xff\xfd\xff\xf8\xff\xf7\xff\xfa\xff\xfe\xff\xfa\xff\xf8\xff\xf9\xff\xfa\xff\xfd\xff\x00\x00\x00\x00\x00\x00\xfb\xff\xfb\xff\xfa\xff\xfd\xff\xff\xff\xff\xff\x01\x00\xfc\xff\xff\xff\xf8\xff\xff\xff\x00\x00\xf3\xff\xfd\xff\xf3\xff\xfb\xff\x01\x00\xff\xff\xfa\xff\x02\x00\xf4\xff\xeb\xff\xfc\xff\xf7\xff\xe8\xff\xfb\xff\xf8\xff\xf7\xff\r\x00\xfe\xff\x02\x00\xfe\xff\xf9\xff\xfa\xff\xf8\xff\x00\x00\xf6\xff\xfe\xff\x02\x00\x05\x00\x04\x00\xfa\xff\xf4\xff\xe8\xff\xf3\xff\x06\x00\xf9\xff\x06\x00\n\x00\xf8\xff\xfa\xff\x01\x00\xf4\xff\xfd\xff\xf7\xff\xf4\xff\x01\x00\x05\x00\x02\x00\x04\x00\xfc\xff\xef\xff\x03\x00\xf3\xff\xfc\xff\x08\x00\x04\x00\xfd\xff\x08\x00\x04\x00\x00\x00\x00\x00\x06\x00\x03\x00\xfd\xff\x04\x00\x15\x00\x06\x00\x12\x00\x15\x00\x05\x00\x04\x00\x05\x00\x05\x00\x02\x00\x07\x00\x05\x00\xfc\xff\xfd\xff\x06\x00\xff\xff\xf8\xff\x01\x00\xf2\xff\xe6\xff\xf4\xff\xef\xff\xfb\xff\xfc\xff\xf2\xff\xec\xff\xe4\xff\xe6\xff\xf9\xff\xfa\xff\xee\xff\xea\xff\xe9\xff\xf8\xff\x06\x00\x0b\x00\xe9\xff\x03\x00\xea\xff\xfc\xff\x0f\x00\x00\x00\x13\x00\xe6\xff\xfe\xff\x10\x00\x12\x00\xfd\xff\x03\x00\xf1\xff\xfb\xff\x18\x00\x1f\x00\x08\x00\xfa\xff\xf9\xff\xf6\xff\r\x00\x17\x00\x03\x00\xfb\xff\xfc\xff\xf3\xff,\x00\x1c\x00\xf8\xff\xed\xff\x05\x00\x10\x00$\x00@\x00\x19\x00\x00\x00\x19\x004\x00G\x00]\x001\x00\x07\x005\x00J\x00X\x00\\\x00\x03\x00\xf6\xff\x13\x007\x00]\x008\x00\xef\xff\xeb\xff\x00\x00#\x00\x85\x00S\x00\xb6\xff\xcf\xff\x1a\x00\xc3\xff\xb6\x00\x8a\x00^\xff\xe0\xff\xfc\xff\xba\xff4\x00n\x00\xc5\xff5\xff\xf4\xffR\x00\xe8\xff-\x00\x11\x00z\xff\xb0\xff\x92\x00\xeb\xff\xca\xff\t\x00\xa0\xff\xcb\xff6\x00L\x00\x02\x00\x91\xff\xdb\xff\xd3\xff\xed\xff\xc0\xff\x8b\xff\x97\x00\xe2\xff\x16\x00B\x00\xbc\xff\xfb\xff1\x00\xe4\xff\xed\xff\x95\x00\xcc\x00H\x00>\x00\x03\x00g\xff\x18\x01\x8c\x01\xa8\xff?\xff\xc6\xfeO\xff\xaa\x00\x00\x01Q\xff\xaf\xfe\xce\xfe\xd8\xfe\x7f\xff\xce\xfe\x93\xfd\xb6\xfc\x9c\xfd\xb1\xff\xf7\x00H\x00D\xfe\x8d\xfc\xc2\xfco\xffG\x01r\x00\x94\xffG\x007\x01,\x02\xc0\x02\x18\x01\xaa\xff\xf0\xffS\x00\xbf\x029\x03\xa0\x01p\x00/\x00\xc4\xff\xb3\xff\xd4\xffU\xfdB\xfd\x8b\xfe\xfb\xfe\x86\xfe\x0e\xfd\xba\xfd\xb7\xfd\x8e\xfc\xf0\xfc\x88\xfd"\xfe\'\xfe]\xfe\xfb\xfe\x13\x00\x08\x01\xe1\x00&\xff\xf0\xfe\x05\x015\x01E\x02:\x02G\x02*\x02E\x02\xcf\x02\x1f\x03\xcc\x03\x15\x03N\x03\xdf\x03\x82\x04X\x05P\x05f\x04}\x04Q\x06\xe3\x06\x9a\x06\x8e\x06\xc7\x05a\x05\xe6\x05-\x06g\x066\x06\x9e\x05\xf4\x03\x9b\x03\x14\x03e\x02\x99\x01\xdf\xff\xa1\xfe{\xfe%\xfe2\xfd/\xfc\xc3\xfa-\xf9\xe2\xf8\xa2\xf8\x8d\xf8\xa0\xf9B\xf9\x15\xf9\xf3\xf8<\xf9y\xfa\xe1\xfa\xce\xfa#\xfb\xa1\xfc\xf3\xfd\xec\xfeE\xff\xc5\xfe\x9f\xfe8\xff\x19\xff\xff\xfe5\xff\xd8\xfe\x90\xfe\x87\xfd\xb5\xfcR\xfc\x18\xfc\xae\xfaI\xf9/\xf9\x14\xf9>\xf9\xb6\xf8d\xf8o\xf8E\xf8\x18\xf8c\xf8g\xfaA\xfb\xe2\xfak\xfb\xda\xfbM\xfd\xa0\xfe\x1c\xfft\xfe\xee\xfe\xf9\xff\x0e\x00y\x00*\x00P\xff\xfa\xfe\x84\xfe\xef\xfd\xd4\xfe\xb3\xfdf\xfd\xfa\xfbq\xfb\xfa\xfb \xfd{\xfd\xe4\xfc\xb3\xfc\xe5\xfa\x97\xfd\xee\xffP\x00o\x01o\x00\xfc\x01\x13\x04S\x05R\x08\x13\x07\xda\x08\xa6\t`\x0cX\x11\x1c\x0f\x88\x0b\xb5\x04\x17\x08\x8f\x17\x8f)\x9f4G+\xa0\x1c\x9f\x12\xe9\x13\x88#\xac+++\x94"\x8f\x1bM\x1f\xa0\x1e\x05\x17\xf1\x04\x17\xf4V\xec\x13\xf0\x0e\xfaJ\xfe&\xf9\xcb\xe7\x96\xd7\xa6\xcf\xab\xd2\xd2\xd9\x95\xdbT\xd9J\xdb\x84\xe2\x98\xe8\x06\xeb8\xe8J\xe5\x93\xe5\xfa\xea\xa2\xf8:\t\xe9\x11\xd3\x10c\n\x97\x05\x1a\x08\xbb\r\x94\r\x15\x0e\xef\rz\x0eU\x10\xc1\r+\x08\xbd\xfd(\xf24\xeaj\xec\x1f\xf3H\xf7\x0e\xf5\x07\xed\xcb\xe6\x9f\xe2\xe1\xe2\xdc\xe5&\xe87\xed\xcb\xf1\x13\xf8\xf9\xfe*\x039\x04\xda\x00h\xff\xe3\x03\xdf\x0eY\x18\xf4\x1d\xa7\x1d8\x1a=\x16\xad\x12\xa8\x118\x11\xa7\x10\xaa\x0e\xfb\r`\x0c}\n\xd2\x06|\xfe@\xf5\x11\xf1U\xf2K\xf6\x9c\xf9\xf3\xf8\xf8\xf5\xc6\xf2\xb5\xf0\x1a\xf2\xb6\xf4\xa5\xf6\x87\xf7~\xf9\xa3\xfd1\x01{\x03\xff\x01\xfc\xfc\x1c\xfb\xa7\xfb\xf7\xfd\x85\x00\xb1\x00\xaf\xfe\x01\xfc\xf1\xf9x\xf7\xab\xf6c\xf4M\xf2f\xf2?\xf2\x1f\xf7 \xf8\xf7\xf7!\xf6W\xf1@\xf34\xf5\xce\xf8\xdb\xfdA\x00\x9f\x02[\x03\x18\x04\x0b\x01\xb0\x02\x0c\x06>\x08 \x0b\xfa\n\xc6\x0e\xaa\x12K\x13>\x12y\x11o\x11C\x17\xe6\':7w9 /\x0e$\'$\xcb)\x04,\xc8+\xc3+\x9a)t"`\x18\xf0\x0f\x88\x07s\xfc\xe7\xef\xf7\xe7\x9a\xe9\x0c\xed3\xec\xcf\xe4*\xda\x11\xd1\xab\xcc\xf6\xcc\x00\xd1^\xd8\x93\xdf\xb2\xe4\x08\xe75\xea\xad\xefZ\xf3`\xf4\x0c\xf6\x05\xfd/\tU\x13\xaa\x17\x06\x16,\x13-\x10\xdb\r\x18\x0c"\n\xe7\t\x8b\x08\xef\x04\x19\x00P\xfb\xd8\xf5\xbe\xed{\xe4C\xe0\x04\xe1\xff\xe2\xe4\xe2\xaf\xe2\x89\xe2N\xe2\x9e\xe2 \xe4{\xe8\x1e\xef\xf5\xf5\x1a\xfc\xf2\x01\x9e\x08\xba\x0fh\x12\xf5\x14\xcc\x17\xb1\x1cb \xb8 9!\xa6 |\x1f\xbe\x1bu\x17\xef\x13~\x0fo\nt\x05\xef\x00;\xfd\xe3\xf9L\xf6\xab\xf1&\xef\xc7\xed\xf0\xec\x1a\xed\xf4\xec;\xee/\xf0\xa9\xf22\xf68\xf9\xf3\xfax\xfb\xd2\xfd(\xff\x00\x01\x9d\x02\xad\x02T\x03R\x02\xf6\x00$\xff\\\xfd\x9e\xfa\xef\xf6\x91\xf4\x1d\xf3K\xf3\x80\xf2\x88\xf0X\xed#\xec\x8f\xebb\xeaK\xeb\xdc\xec;\xf0\xf2\xf3\x15\xf5\xfe\xf7\xb2\xfa\xf3\xfb(\xff\xc9\x00`\x03]\t\x0b\x0cW\x10O\x14\xbf\x14\x9c\x14\xb6\x11\x81\x11X\x14y\x17f\x1b\xce&c9vB\x96:,&\xd5\x1b\xbc%\xf20H4.3\x983\xaa0P%B\x15\xa8\n/\x03i\xf8#\xf0\xcf\xf03\xf9"\xfc\x8a\xf1\xb0\xde{\xd1\xf9\xcc\xa9\xcc>\xcfw\xd67\xe1\xb8\xe7F\xe7\x06\xe6\xf1\xe7\x0c\xe9C\xe8\x0c\xea\xa2\xf2\x83\x00k\r\x91\x13\xb1\x13X\x0f\xa4\n\x13\x07(\x05\xe9\x06L\n\x00\x0b\xbb\x08\xe9\x04\x01\xffN\xf7\x97\xee\'\xe6\x9c\xe0\x95\xdeu\xdfX\xe3\x14\xe5\xba\xe4\xee\xe1\xad\xddE\xdc+\xe0\x10\xe8\xab\xf0\xbc\xf7\x06\xfet\x05\xdf\n\xc3\x0fH\x13)\x16W\x18\x7f\x1c\xc3"\x14)\xaf+<)k$0\x1e8\x19V\x15\r\x13\xd7\x0f3\x0c\x0e\x08\xb8\x01l\xfa\xe3\xf3\x05\xef\x17\xec_\xeah\xea\xa6\xeb\xfd\xec`\xeex\xef\x82\xf0\xcf\xf0U\xf2\x9d\xf5S\xfa\x89\xff\x1f\x02[\x03>\x039\x02L\x01\x0b\x00\x1e\x00\x11\x00\xb5\xfe~\xfcv\xf9!\xf7\xb5\xf3\xde\xf0Z\xed\xac\xeat\xe9\xc3\xe8\xdd\xe9\xf5\xe9\xe5\xe9\xce\xe8+\xe9_\xeb\x0f\xee\xa8\xf3x\xf6\xc7\xf9\x8d\xfc`\xfe\xd3\x03R\tA\x0f\x15\x12C\x12\xdb\x12\xfd\x14%\x1bK\x1eN\x1f\xd3\x1f\x08#\xa5.\xe7<\xb7D\x99?\x101*&\xc8&O.N3\xac2w/\x03(\xf0\x1c\xbe\x0e\xd9\x03\x90\xfd\x9f\xf5*\xefz\xeb\x04\xed\x81\xee\xac\xe9\x04\xe0\xcd\xd4`\xcdL\xcc7\xd2x\xdb0\xe4?\xea\x03\xeb\xe6\xe9\x1d\xeaH\xed\xd2\xf2\xfe\xf6\xc3\xfc\x11\x04\xa6\x0cc\x12H\x14\'\x12\x06\r!\x08h\x04\x1c\x03\x0c\x03\x10\x03{\x02\x18\xff?\xf8\xe8\xef\x1a\xe7\x00\xe1\x1e\xddb\xdb\xcf\xdby\xdd)\xe0=\xe2T\xe2\xf5\xe1\xac\xe2\xcf\xe4\xd4\xe8\xbc\xef?\xfa\xf3\x03k\x0cF\x12A\x16R\x18F\x19U\x1c\xca w%\x9a(\x99)])S&(!\xa5\x1aD\x14\x12\x0fP\x0b\x12\x08g\x04q\x00\xa2\xfb\xf8\xf5x\xef1\xea^\xe7\x82\xe7\x04\xe9p\xeb\xc0\xed\xf6\xef\x94\xf1\x94\xf2#\xf3v\xf4k\xf7j\xfb\xbf\xff\xcc\x03N\x07\x1f\x08\xbc\x06\x03\x04\xe8\x01\xe7\x00\r\x00\xf5\xfeo\xfdE\xfbl\xf8\x8e\xf5|\xf1;\xed\'\xea\n\xe8,\xe7\xe3\xe7,\xe9d\xea`\xea#\xe9\xf5\xe8\x88\xea\x11\xede\xf2\xcb\xf7`\xfc\xda\xff7\x00l\x01H\x04g\tX\x0f\x93\x13\x1c\x162\x18S\x1c\xa5!\xa5,&=\xe5J\x93L\xaf>[1\x0e1\x819x?j>\x0b;\x8c6\xc3,\\\x1de\x0e\xb3\x04X\xfc8\xf35\xeb_\xe87\xebR\xeb\xf2\xe2\x8e\xd4\x8c\xc9\xad\xc6\xb1\xc9\xe4\xce\xc2\xd5c\xde\x8d\xe5x\xe8\xc8\xe8\xd7\xe9\xc7\xed\x9a\xf2\x15\xf7\xa0\xfc\x87\x04\xc1\x0fd\x19V\x1d\x97\x19a\x12\x93\x0b\xee\x06\xf7\x04E\x05\xe8\x06\x05\x07\xbb\x02\xb6\xf9\xbd\xee|\xe5P\xe0\xa5\xdc\x98\xd9*\xd8\x86\xd9;\xdc\x05\xde>\xde]\xdeg\xdf\xd7\xe1\xea\xe6\r\xeeE\xf7`\x01 \nM\x0fI\x12C\x15T\x1a\x1a \x91$q\'\xa6)\x89*H)\xb4%\xf8 6\x1d\x80\x19\xe1\x14d\x0f|\n\x8f\x06\x83\x02[\xfc\xd1\xf5\'\xf0T\xec\x99\xea4\xea\xad\xeaP\xeb1\xec\xfa\xec\x9a\xed\xb2\xeeo\xf0|\xf3]\xf7!\xfb8\xff\xe3\x02\xed\x05\xd4\x07\xc2\x07b\x06\x9a\x04\xc7\x03\x97\x03\xcc\x03\xad\x02\xe1\xff\xe6\xfb0\xf7X\xf3G\xf0t\xed\xcb\xea\x80\xe8\x08\xe7b\xe6\xab\xe6J\xe7\xef\xe75\xe9\xc1\xea\xde\xec\xcd\xef\xd2\xf2\x93\xf5\xdc\xf7#\xfa\xce\xfd\xc7\x02:\x07Y\x0b\xc3\x0e\xc8\x10\x82\x11]\x11\xb9\x12\x98\x16 \x1c"%b3+C\xd8JVD\x845\xd2*\x89)I-\xca3X;\xc0?\xc3;\xfd,\r\x19\x8f\x08r\xfd\t\xf8c\xf5^\xf2\x06\xf0W\xedR\xea\x0c\xe5\xdb\xdc\xe9\xd2\xee\xca\x87\xc6\xf4\xc5\xe7\xcaP\xd5\xb0\xe1\x8c\xeaM\xec\xdb\xe8\xc5\xe5\x99\xe5U\xe9G\xf1\xd1\xfc\x8b\t\xff\x12+\x16\x90\x15\xc9\x13\xe8\x10J\x0cQ\x07\x99\x04U\x05\x85\x07\x06\t\xb2\x08\xd9\x04\xa1\xfck\xf1)\xe6\xe3\xddA\xda:\xdc\xe1\xe1\xb4\xe7\x93\xe9\x90\xe7\x0f\xe5K\xe3b\xe3\x14\xe6\xc6\xeb=\xf4"\xfdI\x05\xaa\x0c\xd7\x12\xaf\x17}\x1a\xe9\x1a\x8e\x1a\x07\x1b\x97\x1e\xc6#\xc3(M*\'(\xe4"I\x1b\xd4\x13\x9c\r\\\n\xdc\x08\xcd\x07\xaa\x04b\xffH\xf9\xba\xf3f\xef\x0b\xec\xb7\xe9\x89\xe8=\xe8\x8a\xe8\xc4\xe9\x18\xec\xf7\xeeK\xf1\x00\xf3\x02\xf4\x11\xf5\x87\xf6\xd9\xf9\x0b\xfe\xdf\x01b\x04\xee\x04\xd1\x04o\x03\xfc\x00\xd8\xfe\x9f\xfd\x06\xfdd\xfc,\xfa\xa9\xf7A\xf5M\xf3\x85\xf1\xab\xef\xac\xed\xa8\xec\xd0\xecp\xed\xae\xee\xda\xefn\xf1\x8a\xf3"\xf5\xc6\xf6\x19\xf9\x0f\xfc_\xff$\x02\x0e\x05d\x08_\x0c\x01\x10&\x12\x17\x13Q\x13\x14\x142\x15\x0b\x18b\x1fK,\xd89\x86@\x8e<\x991\xf4\'t$\xb5&\xcf+\xfa1\xa16\x036\x7f-s\x1e.\x0e\x1b\x02\x97\xfb\x81\xf93\xf8\x1d\xf55\xf0k\xeat\xe5\x7f\xe0\xaa\xda\x00\xd4\xfb\xcd\xa8\xc9r\xc8i\xcb\x1d\xd2T\xda\xf2\xe0g\xe4\'\xe5]\xe4\xb4\xe3e\xe5\x07\xeb\xf4\xf4\xcd\xffL\x08\x12\rD\x0f\xfc\x0f\xa4\x0f\xea\r\x13\x0cE\x0b\xca\x0b.\rj\x0e\xa3\x0e\xe6\x0c\xf7\x08\xf9\x02\xd0\xfb\x84\xf4\xd9\xee]\xec\xcd\xec\xca\xee \xf0\x16\xf0:\xef\xb4\xed\xfe\xeb\xe5\xea\xdc\xeb\x94\xefW\xf5\xf7\xfb\xe6\x01\x90\x06\x11\n\xf5\x0c?\x0f\xd3\x10\xb1\x11\x0c\x13c\x15\x8a\x18\xef\x1a\xfb\x1bK\x1b+\x19\xa0\x15\xed\x10\xec\x0b/\x07\xe3\x03\xdd\x01\x94\x00\xfc\xfe\xba\xfc\x8d\xf9\xe5\xf5\xef\xf1g\xee\xe3\xeb\xf6\xea\xb1\xebx\xed\xac\xef\xa0\xf1\xea\xf2k\xf3y\xf3q\xf3\xf6\xf3\x0e\xf5\xd4\xf6\xfe\xf8\x7f\xfb\xd0\xfd\xa0\xffX\x00\x1c\x00G\xff&\xfeV\xfd\xbe\xfc\xcb\xfc2\xfd\xe5\xfd\x94\xfe\xce\xfe^\xfep\xfdf\xfc\xb9\xfb\xa3\xfb\x05\xfc\x18\xfdo\xfe\xce\xff\xf0\x00\x88\x01\x8a\x01\x0b\x01B\x00\xea\xff\xa1\x00A\x02|\x04y\x06\xc5\x07V\x08\\\x08\x08\x089\x08\xff\x08\xe5\nj\x0ej\x14\x14\x1ds&\xde,\xc6-\xd4)\x19$\x17 \x8c\x1f\xa9"\xf1\'?-0/\xba+\x88"\xa1\x15\x97\x08\xc7\xfe\x01\xfa\xed\xf8x\xf8\xf3\xf5\x84\xf0$\xe9c\xe1\x19\xdaC\xd4Z\xd0\x97\xce:\xcel\xce\xf4\xceF\xd08\xd3\xe0\xd7\x9d\xdd5\xe3e\xe7\xe5\xe9\xfc\xebr\xef^\xf5\x06\xfd\x90\x05\x86\r\xf0\x13l\x17\x9a\x17\x95\x15P\x13\x88\x12\x99\x13\xdc\x15\xc4\x17$\x18\xed\x15\x94\x11\xd0\x0b\xd4\x05\xb0\x00\xf9\xfc\xcb\xfa^\xf9\xfc\xf7\x05\xf6\xca\xf3\xbd\xf1t\xf0\xab\xef\xfb\xeeA\xee\xcb\xed\'\xeeW\xef\xa4\xf1\x13\xf5Q\xf9\xa3\xfd\xe0\x00\xa2\x02\xd4\x02F\x02\x90\x02[\x04\xb5\x07\xa1\x0b\xd9\x0e\x0f\x11\xc5\x11\xd9\x10\xc1\x0e\x02\x0c\xdf\t\x95\x08E\x08N\x08\x08\x08\x05\x07&\x05\xe8\x02\x0c\x00\x81\xfc\xa2\xf8o\xf5\xc7\xf3\xaf\xf3\x81\xf4\xb5\xf5\xd4\xf6_\xf7\x1a\xf7\xdb\xf5\x07\xf4c\xf2\xc9\xf1\xff\xf2\xd9\xf5\x93\xf9\xcf\xfc\xf4\xfe\x06\x00d\x00M\x00\xd8\xff\xd5\xff\xb2\x00v\x02O\x04\xaa\x05Q\x06;\x06|\x05,\x04\xb0\x02n\x01\xbf\x00n\x00z\x00`\x00\xe1\xff\xdc\xfe\x7f\xfd?\xfca\xfb\xea\xfa\xdf\xfa\xf8\xfa\x13\xfb,\xfbH\xfb\x96\xfb\xce\xfb\x18\xfc\xd4\xfcx\xfe\xc5\x00z\x03\xea\x06$\x0cG\x13\xd5\x1ao \xec"\xea"\xb2!\x7f \xe7\x1f\xfc \x00$=(\xba+\xfd+q\'\xb1\x1e\xe7\x13\xd3\t\\\x02<\xfe\x9c\xfc\xde\xfbO\xfa\\\xf6\x96\xef\xb6\xe6\xa9\xddf\xd6\x93\xd2&\xd2:\xd4:\xd7\xfa\xd9\xed\xdb4\xdd<\xde\x85\xdf\xe4\xe1\x8a\xe5\x88\xea/\xf0.\xf63\xfcv\x02o\x08\xaa\rX\x11:\x13\x80\x13\xc9\x12\xee\x11\x85\x117\x12\xbe\x13\x87\x15\xe7\x15\xdb\x13\xf0\x0e0\x08\x1a\x01\x08\xfb\x16\xf7>\xf5#\xf5;\xf5`\xf4\xcd\xf1\xed\xed\x15\xea\x85\xe7\xa2\xe6;\xe7\xc7\xe8+\xeb"\xeeS\xf1\xb5\xf4\xff\xf7B\xfbA\xfe\xb6\x00\xaf\x02Z\x04\x84\x06\xb6\t\xff\r\x8f\x12R\x16C\x188\x18\xc6\x16\xa0\x14\xb2\x12O\x11\xd0\x10\xdc\x10\x8d\x10\xfb\x0e\xe7\x0b\xd2\x07\x98\x03\xe3\xff\xa1\xfc\xb8\xf9\x18\xf7\x02\xf5\xb3\xf3\x16\xf3\xf5\xf2\x10\xf3#\xf3\xf9\xf2X\xf2h\xf1\xba\xf0A\xf1Q\xf3\xa8\xf62\xfa\xf9\xfcV\xfe~\xfe<\xfe8\xfe\xf7\xfe;\x00\xf0\x01\x80\x03\x81\x04\x84\x04\x8e\x03\x06\x02\x91\x00\xb7\xffK\xff\x0e\xffe\xfe\x8b\xfdg\xfcX\xfb`\xfa\x8a\xf9\x1e\xf9\x11\xf9;\xf9!\xf9o\xf8\x8e\xf7\xf6\xf6f\xf7\x07\xf9m\xfb\xea\xfd6\xff[\xff\xe7\xfe\xf5\xfeS\x00\x83\x03+\t\xe2\x11:\x1c\t%\xfd(\xb5\'\xf1#0!\x82!\xdf$K*\xe3/\xb93\xdc3m/|&\xe7\x1a\xec\x0fb\x08\xd5\x04\xa0\x035\x02.\xff\x08\xfa\x96\xf2Q\xe9o\xdf\x1c\xd70\xd2\'\xd1\xbe\xd2f\xd5\n\xd8H\xda\x0e\xdcd\xdd:\xde.\xdf\x93\xe1(\xe6\n\xed\xd4\xf4V\xfc\xb6\x02\xfc\x07\xdd\x0b9\x0e\xf0\x0e\xb1\x0eI\x0e[\x0e\xfe\x0e\xd1\x0f\xaf\x10\x00\x11\x18\x10\xc5\x0c\xda\x067\xff\xce\xf7b\xf2p\xef\xe7\xee\xa8\xef\x80\xf0\x07\xf0\xbc\xed7\xea\xe2\xe6a\xe5\x92\xe68\xea\x00\xef\xce\xf3\x03\xf8\xab\xfb\xc6\xfe~\x01i\x04\x89\x07Z\x0b\x19\x0f\x9e\x12x\x15\x94\x17\x8d\x19\xea\x1a\x8e\x1b\xe7\x1at\x19\xaf\x17\x00\x16]\x14\x87\x12p\x10\xe7\r\xfc\n\xb4\x076\x04v\x00\xc4\xfcF\xf9\xf0\xf6z\xf5w\xf4J\xf3\xdf\xf1\xce\xf0\xf4\xefj\xef\'\xef\xa8\xef\r\xf1\xd7\xf2\\\xf4U\xf5=\xf6O\xf7\xb8\xf8\x18\xfa \xfb\xa2\xfb\xee\xfbl\xfc\x08\xfd\xb0\xfd\xdb\xfd\xc1\xfd[\xfd\xd5\xfc\xf4\xfb\xd8\xfa\x00\xfa\xcb\xf9/\xfaa\xfa\x0c\xfa\x02\xf9\xdf\xf7\xf5\xf6\xb3\xf6\xe2\xf6_\xf7.\xf8\xf0\xf8m\xf9U\xf9\xc9\xf8~\xf8\xef\xf8c\xfa\xa9\xfcp\xff\x81\x02\xfe\x04\x80\x06E\x07T\x08\xa3\n\xe8\x0e\xec\x15\xf8 E.\x9e8\x88:Z4\x0f,\x91(5,=4\x87<\xe9@v?j7u*z\x1br\x0ey\x06\xef\x03\x94\x03\xd2\x00s\xf9\t\xef\xe5\xe4%\xdcE\xd4\xc1\xcc\xc5\xc6\xa8\xc3I\xc4\xe9\xc7I\xcd\xac\xd2.\xd6\x82\xd7\x9f\xd7\n\xd85\xda\x07\xe0\x92\xea\x99\xf8L\x05\xf8\x0c|\x0f\xb0\x0fK\x0f\x9e\x0e\xd6\r}\x0e\x8c\x11\x9b\x15G\x18\xdc\x17D\x14\xaf\r\xe0\x04J\xfb \xf3\xec\xed\\\xec<\xeeN\xf1\x84\xf2\xdd\xef$\xea\x9a\xe4\xcb\xe1\\\xe2b\xe5\x8d\xeaZ\xf1\x9b\xf8\xe3\xfe\xfb\x02n\x05:\x07\xcb\t\xa2\x0cD\x10\x9e\x14\x11\x1a\xc1\x1f\xfa#\x18%\x8a"\x90\x1e\xee\x1a\x98\x18k\x16\xb7\x14\x80\x14\xea\x14\x8c\x13\x1f\x0e\xbd\x05j\xfd\xa3\xf7\xee\xf4Y\xf4\xd5\xf43\xf5W\xf4E\xf2,\xefg\xeb\x19\xe8K\xe7\xb0\xe9\xde\xed\xe8\xf1 \xf5\x95\xf7\x88\xf8\xa0\xf7\x9b\xf5N\xf4\xe8\xf4)\xf7#\xfa\xfc\xfc\xe9\xfen\xff\x9a\xfe\x8e\xfc\x92\xf9\xd3\xf6y\xf5A\xf6z\xf8\xc1\xfa\xa2\xfb\xdb\xfa\x97\xf9K\xf8\x84\xf7\xe6\xf6\xdc\xf6\x0c\xf8\xaf\xf9\x99\xfb\xe1\xfc\'\xfd\x81\xfdc\xfe\xe5\xff-\x017\x01\xcf\x00W\x01\xf3\x02\x1e\x05\xbd\x06\x7f\x07O\x08g\t\x1d\x0b6\r\xd3\x0f\xfd\x12\xef\x17\xe3\x1e\x16\'\x11/ 414\x84/\xb7)\x1e(\x1e-\x125\xad9\xfc6\xef.\t%\xd0\x1ab\x10?\x07\x8b\x01\xa7\xfe\xba\xfb\xd3\xf5S\xed\xb1\xe3\x96\xda\xd4\xd2\x04\xcd8\xc9\x0e\xc7\xe3\xc6\xc5\xc9\xb4\xce\x9b\xd2I\xd3M\xd2\xe7\xd2\x0e\xd7u\xde\x11\xe8\x8a\xf29\xfc\xf5\x03\xcd\x07j\x086\x08\xba\nO\x105\x16&\x193\x19#\x18\x92\x16\x8d\x13\xe3\x0e\x90\t\xb3\x04\xc0\x00|\xfd\x88\xfa\xad\xf7\xbe\xf4\x03\xf2\xd0\xee\xba\xea\xec\xe6\xaa\xe5\x0f\xe8\x01\xec\xfa\xeeV\xf0D\xf2\xfc\xf5W\xfaN\xfe\xa1\x01\xee\x057\nV\x0ex\x11\xe6\x14\x11\x18\x02\x1b<\x1c\xc0\x1b\x8d\x1a\xee\x18\x80\x18n\x17\x9b\x152\x12\n\x0f\x1b\r\x92\x0bd\x08\xba\x02\x8f\xfc\x14\xf8\t\xf6^\xf5\xe0\xf4\xfb\xf3\xa3\xf2\xb6\xf0\xd1\xee\xb5\xec,\xeb\x96\xea\n\xecr\xefM\xf2\xf7\xf3\xbe\xf46\xf5\xff\xf4\xac\xf3\xc7\xf2/\xf4\x84\xf7\xbd\xfa\xbc\xfc4\xfd\xbc\xfc\x85\xfb\xcf\xf9\x85\xf8P\xf8\x9c\xf9i\xfcP\xffu\x006\xff\xea\xfc\x1e\xfba\xfa\xd4\xf9\xde\xf9\xa2\xfb\x1f\xfey\xff#\x00\x8f\x00T\x00\x10\xfd~\xf8\xe3\xf8\xcf\xfe\xc9\x05B\tW\x07\xfc\x03V\x01P\x01}\x03\xb6\x05=\x08J\n\x04\r\xd1\x0f;\x17S$j/a.\x99!V\x1a<"p0\xb47\xab7N7S6\xe4/\x80%\x86\x1d\xcb\x183\x13z\r\x14\n\xde\x08\xcf\x04\x9d\xfb\xeb\xee^\xe0E\xd5\x90\xd0\x1c\xd2Y\xd5\xde\xd4\xf7\xd1\x14\xcf\xa0\xcc\xdb\xc9\x07\xc8\x8e\xca\xd8\xd1m\xdb\xa0\xe4\x8a\xec\xb9\xf3\xd1\xf7K\xf8\x9b\xf8\'\xfd\x9f\x05\xfd\rr\x13\xa7\x17\\\x1a\t\x1bi\x18\xf2\x12\x86\r\xb0\n\xa8\x0b/\r\xbb\x0bY\x07\xbc\x01\x88\xfb\xb8\xf4r\xee\xf5\xea\x9e\xea\x04\xeb\xf7\xeb-\xedk\xee\xdd\xeda\xec\x1e\xec\x0f\xeeL\xf2\xf7\xf8x\x00\x11\x07\xcf\t\xc3\n\xc7\x0ba\x0e,\x110\x13\x14\x15\xfb\x18\xdc\x1di \xe9\x1e\x1c\x19S\x131\x0f\x11\x0f\xbd\x10\xd7\x10W\r8\t7\x05d\x00\xc4\xfa\xfe\xf5\xda\xf3\x8a\xf2\x0e\xf2\n\xf3Q\xf3r\xf1\x8c\xed\x03\xea\xbb\xe8\x1e\xea\n\xed\xa8\xf0!\xf3\xbb\xf3\xad\xf2O\xf2\x96\xf2\xec\xf2\x98\xf3\xf1\xf5!\xf9\xaf\xfb\x16\xfd\xa3\xfd\xaf\xfc\x95\xfbs\xfb\xfb\xfc\xd8\xff\xfa\x01\xa4\x02b\x01\xe7\xffQ\x00\xa7\x00\xc9\xff\x9d\xfeD\xfdS\xfe7\x00\xd9\xff?\xff\xb6\xfd&\xfd\xdd\xfd\x99\xfe\xdc\x00\x80\x02n\x02\xe9\x02\xc3\x03\xea\x03F\x04\xe7\x04\xe1\x062\t\x01\x0b\x83\r\x8f\x0fH\x15t\x1fJ);\'&\x1e\xc6\x1cU& 0\x0f0p.\x8e1\xc22\xe1,\x06%Z \x9f\x1b\xd7\x13\xe7\rk\x0e\x89\x0e\xe4\x06\xeb\xf9f\xeef\xe7\x0b\xe2A\xdc\xd5\xd8e\xd7\t\xd6\x82\xd3\xbb\xd1\xee\xd0d\xcf?\xcd\xf8\xcc\x14\xd1\xec\xd7\x1b\xdfS\xe5\x06\xea:\xed\x08\xf0\x1b\xf4"\xf9@\xfeO\x02\xb3\x06(\x0c\x8d\x10\xc7\x12\xf6\x12\xa1\x11K\x0f\xa9\x0c\xef\x0be\x0c\x0e\x0c\x06\n\x98\x07\xbb\x04\x08\x00w\xfa\xe9\xf6\xa1\xf4q\xf26\xf1m\xf2\xcc\xf5\xd2\xf5c\xf3.\xf1L\xf1)\xf2m\xf4\x95\xf8~\xfe\x0b\x03/\x060\t\xf0\t\xa6\x08\xe1\x07\x1a\n\xe3\x0e[\x15\xf7\x1a\xfd\x1b\x96\x17\xef\x11\xb7\x0f\x05\x10\x08\x0eM\x0c\xd0\x0be\x0c \x0cK\t\xdc\x03\xa2\xfc\x90\xf5\xc5\xf1\xfa\xf2\xec\xf4r\xf5\xe8\xf3\xdf\xef\xe3\xeb\xf7\xe8p\xe7\xdb\xe6\xd3\xe6\xbc\xe7a\xeb`\xef\xde\xf1\xc6\xf1\x86\xf0M\xef\xf1\xf0D\xf4\xb0\xf7Y\xfb\n\xfe\xa0\x00#\x03\\\x03z\x02\x94\x01P\x01`\x02\xf2\x03/\x08\x92\x0bh\t\xa8\x06\xf5\x03\xcc\x03$\x05\x16\x03\x01\x03\xaf\x05\xea\x06\x85\tW\n\x89\x07}\x01%\xfe\xd2\x01o\x04\xee\x05\x11\tN\n\xb9\x07H\x06v\x06\xd5\x06\xf8\x05\x8d\x05]\x07g\x0b\xee\x12b\x1a\xb1\x1b\x05\x17\\\x14?\x16\n\x19H\x1b\xe8\x1d\x8e!\xf7"l!\xf4\x1f\xbd\x1e\xe0\x19\xfb\x112\r\xa6\r\xf4\r\xec\n%\x07c\x03\x94\xfc\xff\xf3Z\xeeV\xeb{\xe7\xa2\xe2\xd3\xe0Y\xe1w\xe0Q\xdd8\xda\xf4\xd7O\xd6;\xd5\x8b\xd7\xb7\xdc{\xe1+\xe4\xef\xe5-\xe91\xec\xa8\xedc\xf0\xfb\xf4\xb9\xf8-\xfd]\x02\xac\x06\x0f\t\x97\t>\tf\t4\nW\x0b9\r\x02\x0eF\x0e\xb7\rM\x0c\x90\n\x9d\x07\xcc\x04L\x03\xc0\x024\x03$\x04]\x03\xfa\x01\x8e\xff\xe1\xfd\xc4\xfd\xdd\xfd\x8d\xfd\xdc\xfe,\x01J\x02B\x03\xdd\x041\x05J\x03\xfb\x01\x87\x03K\x06R\x07\x15\x08\xd1\x07V\x08m\x06\x84\x03\xdd\x02\x87\x008\xffT\x00\r\x002\xff\xe9\xfd\xcf\xfbJ\xf8e\xf6\xe0\xf60\xf5\xbc\xf2\xf5\xf4\x14\xf8\x12\xf6\xd9\xf0%\xf5\x06\xf8\x96\xefi\xf0\xe3\xf7 \xf9\xd0\xf5\xf1\xf6x\xfe\xed\xf9\xae\xf6z\xfc\xf1\xfb\xf1\xf7\x86\xfbc\x02\xae\xfd\x85\xfb+\x06\xdf\x01\x10\xf7V\x00r\x08\xcb\xfe`\xfa\x85\t\x97\x0f\xce\xfcy\x01\x17\x16\x84\x07\xa1\xfc\xe7\ny\x11\x02\t\xa3\x08\x1e\x12\x93\x0e?\x05\x8b\rQ\x11\x87\n(\x08\xe4\x0b[\x0c\xae\t)\x0c\x8c\x0c\x07\tZ\x03a\x05\x01\x07\x9c\x04\xeb\x03\xf9\x02\x8a\x04\xe4\x04\xdf\x00\xc9\x01e\x05\xf9\xfe\x03\xfe!\x06@\x06S\x01\xac\x05V\x08\xba\x05]\x03\xfd\x04\x8d\x07\xb2\x05\xc0\x01\xba\x04\xd8\x07F\x05\xcb\x02\xd8\x02i\x00\xcd\xfb$\xfb>\xfc\x15\xfb\x02\xf9\xe4\xf6\x04\xf5\x9e\xf4\xdc\xf3\xbd\xf1\xa0\xef\x8c\xefD\xee\xe4\xed\xd3\xf0c\xf2\x82\xef\x1b\xf0q\xf2&\xf4\xf6\xf2F\xf7B\xf9\xe0\xf5j\xfb\xae\xff\xe3\xfe\xa3\xfd\xbd\xff#\x03\xa9\x02<\x02\xab\x04F\x07\x1b\x04\xcb\x02\xb5\x07\xc2\x06\xa6\x03\xc2\x05\xa5\x04\xc9\x04\x96\x05\x13\x04\x10\x05\xcc\x04\xa3\x02r\x03\xb7\x04\xcf\x04\xd3\x01\x83\x040\x02\xb2\x01j\x04:\x01\x1b\xff#\x02\xb1\x01\xd3\xfb\x08\xfc\xc7\x00%\xfd\xec\xf4\xa3\xfb"\xfc\x08\xf7\xd4\xf7?\xfb\xb1\xf4|\xf2\x16\xf9\xd9\xf8Z\xf2\xf4\xf4V\x01\'\xf3\x15\xf4)\x00\x1a\xfdY\xef\xaf\xfb\x9c\xffd\x01\xc5\xf8\xcb\xf8\x95\x0c\xbc\xfea\xf6\xa7\t\xd0\x05%\xfaC\x047\x06C\x04\x06\xff5\t\xe7\x06\x07\xf8\xec\x07}\x0c\xa3\xfa\xbf\xf8"\x08\x81\x08\x03\xf7\xeb\xfb\xcb\x0e\xd2\xff\xf3\xf8\xb8\x05\x88\x03U\xfd\xad\x019\t~\x00\xc4\xff\x98\t\xbb\x04*\xfe\xa1\x0bk\x0c\x97\xfdI\x08M\n\xb9\x04\xe7\x04\xb6\n\xc3\tV\x05\xc3\x08\xed\x05\xf3\x03\xe7\x0cY\x07\xdd\xfbk\x05D\x10\xf5\xfa\x1a\xfd\xee\x10E\x01C\xf7\xf7\x04\x88\x05\xf4\xf4G\x04b\x00\xc3\xf8\xea\x00b\xfb\xc9\xfb\x8d\xfcG\xff\x18\xf7j\xf3\xda\x03,\x03\xd3\xf3\xb8\xf8\xc7\x06\x1a\xfa\x8d\xfb\x08\xfe\xdc\x03\x80\x00\xdc\xfa\xbe\x08\xa8\x02:\x00\xd2\x05\x9d\x05\x1f\x00[\x03\xbd\x06\x91\x03\xf4\x03\xf8\x03P\x04\x18\x02f\x01\xc6\x01\xfd\x01\x8a\x01\xa3\xfd@\xfd\x11\x05#\xfdG\xfa\xd2\x01g\xfdg\xf7\xe8\xfd\xb7\x02\x94\xf7\xf8\xf5q\x02\xf7\xfd\xda\xf3\xa8\xfd:\x01\x86\xf7D\xf5\xc7\x03\xa8\xfd\xc8\xfbt\xfc@\x00 \xfe\x04\xfee\x060\xfa\xf7\xf8\xcc\x06\xdb\x00\\\xfa\x11\x00\x07\x03\xa3\xfb|\xfc\xc7\x04\xef\xf9\x17\xf8\xe5\xfc\xd7\x05\xb0\xef\x81\xfa\x1f\x08\xdd\xf8Q\xf1\xe7\xfd\x88\x01\x89\xf5Q\xf3\x00\x00\x08\x04\xd6\xed\xf1\x01\x8f\x00?\xf4O\x01\xcf\xfa\x89\xff\xfe\x01\xa0\xf8y\x06T\x01\x1c\xfbD\t[\x05\x85\xfa}\t\xf5\x07n\xfd\x0f\n\t\x04\xa1\x02\xa4\t\xff\x01c\x01V\x0bv\x04\x7f\x00\xf9\x08R\xff&\x04]\x07\xfa\xfc\n\x01P\n\xcb\xfc*\xff\xff\x05,\xfdb\xff\xe1\xffH\x06\x1e\xfbS\xf1\xbd\x07@\x10n\xeb\x1c\x01\xcf\x0b0\xf5\x9e\xf2(\x0c\xb1\x06\xb4\xef"\x01\xaf\t\xd3\xfeF\xf7`\x05Y\xffy\xff\x10\xfae\x05\x83\x04\x80\xfc\x97\xfd\x0f\x07I\x02\xfb\xf4\x1e\xfd\xd9\x0b\xa2\xf8\x98\xf6\xd7\x06p\x01\xd6\xfc\x11\xfdB\xff\xc1\xf3\xb6\x04\x98\xfa\xbd\xfb\x95\xffe\x04\xc0\xfe\xc4\xf6\x13\xff9\x06\x18\xff\xc4\xf4e\x0b\xee\x069\xf7g\x01R\x11\xd4\xfd \xfd\x89\t\xc2\nE\xf9D\x06\x07\x0fb\xff\x94\xfd\x9e\x08l\x0b\xc7\xfa\xd5\x03a\x0c\xaa\xf8\x8b\xfb\x9b\x0b%\x02v\xfc,\x04\xf6\x00\'\xff\xfd\x01\xc3\xfd\x94\x03\xfa\xfa\x14\xff,\x03\xe6\x01\xfb\xfc*\xfe+\x01[\xfa^\xff\xbe\xfdQ\x00\xf8\xfd&\xfd\x1e\xfb\x1c\x06\xbf\xfe\x00\xf2\x8e\x0b\xd0\xf8\x96\xf8\xe9\x02\xcc\xfd\xe2\xfeH\xf8\xf8\x04\xc0\xf9\xc0\xfd\x1b\xffh\xfdm\xfc\x0c\x01]\xf5\x90\x00y\x03\xbb\xf5C\x08{\xf6\xe4\xfc\xa2\x00\xca\x00\x8f\xf9\x95\x02\xb6\xf6\xa2\x04&\x00\x85\xf9\xde\x02\'\xffn\x00\x13\xff\xaf\x02\xf8\xf5~\x0b+\xfa6\x02f\x05f\xfd\xe5\xfe6\x07\xff\x02B\xfc\x1e\x00\xc6\x07\xf4\x01\xa9\xfbp\x0bk\xfb\xce\x03\x0b\x05\x96\x01\xb9\xfa8\x05Z\x00\xf7\x02\xcf\x00;\xfc\xf6\x00\xec\x02\xed\xff\xbb\xfe\x85\xfc\xa5\x02\x11\x02\xbd\xf4\xac\x08\x9f\x06\x00\xf1\x17\x01n\x10u\xec\x88\x05x\r\x96\xf3\xdb\xffJ\x0eV\xfa\xa8\xfc\x0c\x07\xe4\xfd\x1b\x06l\xfb\xff\x05\x95\x01[\xfd\xfc\xfeJ\x055\x02\x17\xf6_\x07\xb9\xf7\x15\x04\xa6\x019\xf4\x0e\x06\xb7\xfc\xb7\xf9\xd5\xffV\xfd&\xfbz\x02\xbf\xee\x82\x0f\\\x02x\xea\x17\x08E\x00\xff\xfdo\xf5\x9d\x0f\xc5\xff\xcc\xf9\x8c\xff~\x00\x03\x073\xf49\r\x19\x01\xb7\xfd\xa8\x01!\x02\xf4\x01\xe1\xffh\xfeo\xfcX\n\xa5\xfdi\x03r\x04\x0e\xfd\xed\xffb\x06\xdb\xf9\x02\x00\xba\rL\xf4\xc0\x02I\x08\x92\x038\xf8\xc8\x03T\x03\xd6\xf8\x9d\x08p\x05\x1f\xff\xd8\xf5\x96\x14\xae\xfdH\xf0/\x10\xcf\x07\xfa\xed\xa7\xfd\xff\x15\xa7\xf6\x9f\xf9\xcb\x08\x80\x02\x9f\xf7\t\xf2|\x0c\xca\x00\xd5\xf1I\x00\xe7\x06\xae\xf9s\xef\xe9\x0b@\xf7\x8f\xf2I\x06\xe6\xf4\xe4\xfc&\x05\xaf\xf3`\x02]\xf9\xb7\xfc\xc1\x003\xf9\x1f\x03O\xf9\xeb\x04\xa7\xfa\x1c\x07\x1b\xfb\xd0\xfe5\x05\xbc\xfe\xda\x02\xda\xfdE\x05\x1a\xffH\x08\xfb\xfd\x1e\x05\xbb\x08\x0e\xf4\xa5\x04E\x08(\x02\xbc\xfd\x89\xff\xf9\x0fg\xf3\t\xfe\x90\x12\xa9\xfb\xd6\xef\x18\x06\xd9\x11\x95\xf0B\xfb:\x14\x9a\xfe\xda\xf1\xff\xfc\xb5\x0e\xde\xfd\xe0\xf5]\x0cK\xfd\xc6\xf2\xaa\x14r\x01h\xe9X\x04t\x0b\xaa\xf8\x9f\xf5\xb3\x0b5\x07\x86\xf2\xe0\xfa\x0c\x0f\xce\xf2H\x01\x9d\x00i\xfb\x1c\x05\x10\x00\x7f\xfcs\xfb\x93\x05\x0c\xfa\x90\x00\x1f\xf8O\x05\x17\xff\xb1\xff\xbb\xf2j\x05\xec\x04\'\xec\x99\x10x\xff\xf8\xec\xc3\x06v\x0b\xcc\xe5\x82\x04\xe7\x19`\xe6\x8f\xfa\xe6\x1a\x8a\xf2\x1b\xf7\x88\x05G\x08{\xfbd\xf7\xc7\x16\xc7\xfa\xe0\xf4\xfb\x0c\xd2\x04\xdc\xf9[\x07\xe6\xff"\xfa$\x0f\'\x01P\xfa9\x08\x9e\x04\xe8\xff\x83\xffq\xff\xac\x06R\xff\x80\x01W\x03\x89\x02}\x00\xb0\xfcX\x07x\x00\xed\xf7p\n\x08\xfa\xd0\x00%\x08\xb4\xf6\\\x05o\x02[\xfd&\xf9\x8b\x08\xee\xf9\x89\xfcq\tN\xf8\x80\xf9#\n\x04\xff\x9c\xf2\x9e\r\xcf\xf5\xc1\x005\x05\xe1\xf7\xda\xf9\x91\t\x1b\xfb(\xf4\x1d\x13h\xed\xe2\xfb\xe1\r3\xf5}\xf8\x06\xfe\xf4\x07\x88\xf4\x87\xfa\x02\x05,\xffu\xfa\xd2\xf7\xf4\x04!\x00\x17\xfcQ\xf6\xb9\x0b\xf6\xfc\xb3\xf5\xbc\x06D\x04\xc2\xfbj\xff\\\x06\x8f\xfa\x9c\x01X\x03}\x02K\x00w\x10r\xf5\xeb\xffK\r`\xfbc\xfd\x82\x0c\xb3\x02\x88\xfb*\x00-\ns\xfc\x86\xf9\x01\t*\xfal\x018\xf5\x82\x11#\xfc\xe2\xef\x9b\n^\x06\xae\xf2\x88\x02@\x0c\x1d\xfa\xae\xf6\xb4\x13\x99\x01\x84\xeer\r\x00\x0c\xf1\xf7\x8e\xf1\x0e\x18\x97\xfc\x89\xef#\x0c\xde\x08\xa1\xefk\xff^\x0c\xde\xf2~\xfa\xfd\x08\x84\xf9\x81\xf3\x89\x06\xd0\x06\xf3\xec\x17\xfci\x0e\xe6\xf2\x99\xf3\x8b\x05_\x06\xb1\xf6\x85\xf3\xf5\x14\xa8\xfeH\xe2I\x12\xa8\x0f\xda\xe80\x01\xbd\x14\x8b\xf5\xd6\xf8#\r\x93\x046\xf3\x0e\x06~\x0b\x0b\xfd\x17\xfe\x08\x08\xfc\x06)\xf5p\x07\x0b\x08\xee\xfc\x8b\xf9\xd4\tf\x03\x0e\xfb\xc7\x02\x8c\x02F\x000\xfcQ\x00\x05\x01\x10\xfec\xfb\x03\x08$\x00\x84\xf9\x87\td\xff\xa6\xeeD\x03\xe4\x07L\xfd\x83\xfe\xb7\x02\xff\xfb]\xfd}\x05\x91\xf9\x97\xfer\xff\xad\xfa\x0e\x00\x14\x02d\x01_\xfcJ\xfb\xa8\xfc\x85\xfd\xd6\xfe\xb7\x02\x9a\xf6\x1a\xfd\xe8\x03\xd3\xfb\xb0\xfb\x9b\xfc6\x02\x94\xf5x\xfdn\x07\x81\xf9\x97\xff\x97\x03\xf2\xf6\xa1\xf6\xfa\x10\xe3\x01y\xf1q\x04\xd3\x04\xc7\xfe\xe9\xfb\xb8\x0c\xc5\xffi\xf7E\x03P\nD\tG\xf4\x96\x08\xe5\x08\xb0\xf6(\x07\x06\x0b\xad\x02W\x03\xec\x01\xa4\x012\x052\x02!\x05#\x01\n\x00\xcd\x03r\x02\xb8\x02E\xff\x19\xffL\xfe\xf1\xfd\xeb\x01A\xff\xec\xffz\xf8\n\xfal\x06\xa0\xf7\x90\xfd\xa2\xfe\xcd\xf9\xfb\xfe\x86\x001\xf8\xfa\xfb\xc2\x08\xac\xf3i\xf6\xa7\x07A\xff\xae\xf7\x9f\x00\x85\x01\xf7\xfa\xc9\xfb\xda\x02\xc6\xfb\xa3\xf8m\x03\xce\xfe\x87\xff\xb7\x00n\xff[\xfcd\xfe\x10\xff\xd1\xfdh\xfe \xfc7\xff\x90\x03\xae\x013\xfe\x1e\x01o\x01\xf4\xfdn\x03Y\x07\x17\x03\xe3\x01K\t;\x0b\xf8\x04\x80\nM\x0e\x8a\x04t\x05\xcd\x0f\xe8\x0c\xdf\x08<\x0cc\x08e\x08v\n\x05\n\xaa\x05\x8f\x01\x07\x05V\x02G\xfe\x8f\x02\xad\x01\x9e\xfa\x9b\xfa\xc0\xfa\xcf\xf8\xd9\xfa\xde\xf8I\xf5\x0e\xf8\x05\xf9]\xf7\x13\xfa\xa1\xfa\xaf\xf8e\xf8\x90\xfa\xee\xfd\x1e\xfe\x0f\xff\t\xfe\xa5\xfd\xc9\x00t\x01I\xff&\x00e\x03\xca\xfc\x95\xfa\x13\x02\xd8\x02\xcb\xfb\xa6\xfa\xa9\xfb\x0e\xf9\x0c\xf9\x04\xfat\xf9\xe3\xf4/\xf6\xb0\xf5\xf3\xf5\x80\xf8\xbe\xf5\x14\xf8\xca\xf2\x07\xf3\x0b\xfa\x84\xfc\xb1\xf6V\xf8\xe1\xfc[\xf7\xfb\xfb;\x00\x03\xff\xad\xf9r\xfb)\xffN\xfc\xbb\x01T\x01\xd9\xf9\x9f\xfd\xb9\x00d\xfd\x11\xfd\x0e\x00\xcc\xff\x8a\xf9,\xfc\xb1\x03\x87\x05\n\x02\x9c\xfdx\xffj\x03\x16\x04E\x05v\x05%\x07\x8d\t\'\x0bu\r\xa2\x0e\x90\rB\x0fM\x11=\x13\xcd\x19\xef\x1dH \xa7"k#\xd2"\xcc\x1e$\x1e\xd8\x1f\xef\x1f\xe6\x1b`\x18|\x18>\x17}\x11\xfc\n\x92\x03\'\xfdV\xf7u\xf2\x93\xf1\xbc\xef~\xeb\xb2\xe7\xd2\xe6\xc0\xe5[\xe3E\xe2#\xe0\xca\xde\x93\xdf\x0f\xe3\x14\xe8\xb7\xec\x14\xf0\x1f\xf2\x99\xf3=\xf7\x9c\xfb\xac\xfd\xc5\xfeb\x01\xbb\x02u\x05j\n\xba\x0b\n\x0c\xa5\x0b+\x07\xec\x035\x03=\x02*\xff\xb7\xfa\x08\xf8E\xf7\xb9\xf6\xcf\xf5\x8f\xf5&\xf1\xbc\xec\xa2\xebV\xecq\xee\x9f\xf0\xfd\xf1\xfa\xf2\xdb\xf6\xa1\xfc\x07\x01\x04\x03\x08\x05G\x04q\x06\x05\n\xd5\rJ\x11\x9f\x12R\x12\xc3\x11\xa1\x12\xef\x12\x1c\x10s\x0b\xb0\x07\r\x05u\x03\xb4\x02\x84\x01\x93\xfe\xbf\xfa"\xf7^\xf5\xfc\xf3\x10\xf22\xefG\xed\xa8\xed(\xf0\xd8\xf2\xf4\xf2\x19\xf3\xd4\xf3\xbd\xf3\x10\xf4\xa0\xf5\xca\xf6\xd1\xf7n\xf9\x18\xfbt\xfc\xce\xfdO\xfd\xf3\xfb\xe9\xfa#\xfa\xb0\xfa\x9d\xf9\x9d\xf8\xcc\xf8\xf4\xf9`\xfc\'\xfc\x9a\xfb\xa9\xfb\x87\xfc\x03\xff~\xffs\xfd\xce\x03U\x16O%?*\xc9\'\x0b*\xb52r5\xff2\x872F5T5e2\x9d2\xf14\xc10\x15#\x99\x13\xa4\t\x03\x04\x94\xfco\xf3\xc7\xeb5\xe7\xb2\xe3\xf5\xdf\xfa\xdc\xd7\xd9\xf3\xd5\x89\xd0\x89\xcb#\xcd\x84\xd5\xdf\xdc\x1b\xe1\x94\xe6\x86\xee#\xf6\x80\xfc|\x01D\x06+\x08\xe6\t\xa9\x0e\xb4\x13b\x19\xd5\x1d[\x1cL\x19\xb7\x16\x9f\x13\xe2\x0eR\t)\x03\xb1\xfbl\xf4\xac\xef\x8d\xef\x86\xed^\xe9\x06\xe4;\xde\xc0\xdaU\xd9P\xda\xb9\xdc\x17\xdes\xe1\x1d\xe8\xa2\xee\xa9\xf5\xe0\xfa\xab\xfe\x93\x01y\x05\xfe\t\xbf\x0f\x85\x15\x06\x1a\xc1\x1d\xcb\x1e\xd2\x1d\x16\x1e=\x1d\xfb\x19\xcd\x15v\x11[\x0e\x0c\x0b\xe8\x07\xe4\x04T\x01\xd7\xfc\xa4\xf8?\xf5\xcf\xf2%\xf2~\xf1\x99\xf0\xcc\xef{\xf0\xa0\xf2`\xf4A\xf5m\xf6R\xf7\xf8\xf8H\xfa\xc6\xfa\xe9\xfb4\xfc\xd6\xfb\x12\xfa\xa0\xf8A\xf8\xa8\xf8N\xf6\xc6\xf3\x8b\xf2l\xf1\xd4\xf1q\xf1\xa6\xf0\xd2\xf0\xd3\xf0f\xf2\xae\xf3\xcf\xf3\x9c\xf5\x8c\xf9\x9c\xfcX\x00\n\x05\xcb\x07\x8e\t[\x0b3\x0f\xad\x16\xff n+\xfa2_6\xd86\xc87\xd48\xf96\\2^,\x98(k\'\xa8$\xd1\x1f\xaf\x19\x9d\x11Z\x076\xfc\xa7\xf2\x90\xeb\xc4\xe5\x1d\xdf\xf5\xd8\x97\xd6\x8a\xd7D\xda\x89\xdc\xf8\xdc\x11\xdc\xa2\xdc&\xdf\x86\xe2X\xe7k\xed\xf6\xf3\xe9\xf8N\xfe\x99\x05f\ry\x12T\x14\xcc\x13w\x13\xbb\x13\xf8\x13E\x13\x88\x11\x03\x0f\xd7\x0bg\x08y\x04\x1f\x01\xf0\xfc\xf5\xf6\x9c\xefM\xe9\x85\xe5\xab\xe3\xf8\xe1o\xe0\xd3\xdf6\xe0\x83\xe1\x94\xe3\xb9\xe5~\xe8-\xeb\x9a\xedO\xf18\xf6x\xfcr\x02V\x07"\x0b\xf2\x0e\x18\x12m\x14\xc7\x15\x85\x16\xb2\x16+\x16\xa4\x14\xb0\x13\x12\x13\xcc\x11i\x0f\xaf\x0b\xdd\x07i\x05\xa6\x02T\xff\x05\xfd\xee\xfa\x18\xf9\xd7\xf7\xfd\xf6\x89\xf6\xed\xf6\x86\xf7_\xf7p\xf6\xd7\xf5\xf7\xf6G\xf8\x7f\xf8\xd1\xf8\x0e\xfa\'\xfb\x9a\xfbo\xfc\xa8\xfc\xcb\xfc\xce\xfd\x0b\xfe\x8a\xfdh\xfd:\xfe\xf3\xfe\x88\xffp\x00\xd4\x01\xd0\x02\x8a\x03d\x04\xdc\x04\xde\x05\x8b\x06\xdc\x06t\x07\xe2\x07\x97\x08s\tN\tz\x08z\x083\x08\x9c\x06\xf7\x04\xa1\x044\x05.\x05,\x04\r\x02\x8b\x00\x80\x00\xac\x00\xd5\x00o\x00\xf6\xff\x03\x00D\xff\x99\xfeE\xffr\x00\xc8\x01\xd8\x02\x19\x03.\x031\x04\x8b\x05k\x06i\x07\xf1\x08E\n\xa3\n\x8e\n\xd4\ns\x0b\xb3\x0b\xfa\n\xad\t>\x08\xdf\x06j\x05\xad\x03R\x02(\x01\xa4\xff\xc2\xfd\xb7\xfbT\xfa\x94\xf9g\xf8\xa9\xf6$\xf5c\xf4+\xf4\xda\xf3H\xf3/\xf3@\xf3D\xf3\xbf\xf3\x86\xf4\xbd\xf5\x1f\xf7\x9f\xf7z\xf8\xac\xf9{\xfa\x89\xfb\'\xfc\xbf\xfc\x85\xfd\xd2\xfdR\xfeq\xfey\xfe\xe5\xfe\xdb\xfe\xb3\xfe\x7f\xfe\x1c\xfe\xbf\xfd\xc9\xfd\xd7\xfd\xd7\xfd\x9a\xfd\xc1\xfdp\xfe\x8a\xfe\xf6\xfe\xcf\xff^\x00\x07\x01\x85\x01\xdf\x01\x99\x02i\x03\xf5\x03f\x04\x8a\x04\xa1\x04\xf5\x04]\x05]\x05\x1c\x05\xcb\x04_\x04\xc5\x03\x17\x03\x15\x03\xc9\x02\x1d\x02\xaf\x01\xde\x00j\x006\x00\xc9\xffW\xff;\xff=\xff\xcf\xfe\x95\xfe\xc9\xfe\xd6\xfe\x9f\xfe\xa2\xfe\xf0\xfe\xa3\xfe\xbb\xfe\x08\xff\xbd\xfe\xb4\xfe\xd5\xfe\x97\xfe4\xfe\t\xfe\xcf\xfd:\xfdy\xfc\x0e\xfc\xbf\xfb\x04\xfbl\xfa5\xfa\xca\xf9R\xf92\xf9*\xf9\x19\xf9\\\xf9\x02\xfa\xbc\xfa3\xfb\xfd\xfbA\xfdU\xfee\xff|\x00\xc3\x01\xda\x02?\x04h\x05e\x064\x07h\x08\x01\t\x18\t\x88\t\xb2\t\xc5\t\xb9\t\x9d\t"\t\xc7\x08W\x08\xda\x07\x1c\x07\x8f\x06\x11\x06\x82\x05\xa1\x04\xdc\x03s\x03[\x03\r\x03\x86\x02\x85\x02\x93\x02l\x02:\x02u\x02z\x02w\x02C\x02(\x02\xed\x01\xd7\x01\xac\x01<\x01\xcf\x00N\x00\xd4\xff?\xff\x86\xfe\xac\xfd\x1d\xfd\xb5\xfc\x13\xfc0\xfb3\xfa\xa3\xf9(\xf9v\xf8\xdb\xf7g\xf7\x0c\xf7\x01\xf7B\xf7a\xf7\xa4\xf7\x12\xf8u\xf8\xa6\xf8\x02\xf9\x0b\xfa\xb8\xfa\x0b\xfb\xdb\xfb\xf4\xfc\x01\xfe\xe4\xfe\xdc\xffL\x00\xa5\x00M\x01\xd3\x01\x02\x02v\x02\xb4\x02\xbf\x020\x031\x03\x0c\x035\x03\x1b\x03\xa5\x02\x9c\x02u\x02,\x02\xf8\x01\x00\x02\xdb\x01~\x01~\x01\x98\x01I\x01\x0f\x01/\x01#\x01\xde\x00\xb9\x00\xdf\x00\xe1\x00\xb5\x00\xa3\x00\xad\x00_\x00\xfd\xff\xcf\xffz\xff\x02\xff\xa1\xfe\x0e\xfel\xfd\xe5\xfc\xa0\xfc8\xfc\xbc\xfb,\xfb\xbe\xfa\xb0\xfa\xab\xfa\xa0\xfa\xd5\xfaW\xfb\xbd\xfb)\xfc\xf9\xfc\t\xfe\xa2\xfe0\xff\x04\x00E\x01\x0c\x02\xc1\x02\x8f\x03J\x04\xd5\x04b\x05\xef\x05\x01\x06\xee\x05\xf0\x05\xbd\x05I\x05\x1c\x05\xa8\x04\x15\x04\x97\x03;\x03}\x02\xe9\x01\xb8\x01-\x01\xb0\x00q\x00o\x00;\x00a\x00\x93\x00\x94\x00\xc2\x00_\x01\xc8\x01\xff\x01]\x02\xd4\x02$\x03t\x03\xcb\x03\xe0\x03\x0c\x04\x19\x04\x10\x04\xda\x03\x9e\x03M\x03\x97\x02\xfa\x01[\x01=\x00X\xff\x97\xfe\x88\xfd\x99\xfc\xc2\xfb\xe9\xfa0\xfa\xbf\xf9Y\xf9\xf6\xf8\xae\xf8\xcd\xf8\xbb\xf8\xfe\xf8\x85\xf9\x15\xfa\xf3\xfa\xa9\xfb\xa0\xfc\x99\xfd\x7f\xfe;\xff\x0f\x00\xdb\x00\x82\x01\xf7\x01r\x02\xf8\x02B\x03f\x03\x81\x03f\x03\x14\x03\x1a\x03\xe4\x029\x02\xb2\x01\x8c\x01&\x01\x98\x00\x0f\x00\xf4\xff\xb3\xff\n\xff\xe3\xfe\x01\xff\xef\xfe\xe5\xfe\xf1\xfe\xfa\xfe6\xffv\xff\xa5\xff\xd8\xff\x00\x00=\x00Z\x00\x80\x00\xca\x00\xe8\x00\xed\x00\xb5\x00\xa2\x00\x9e\x00\x7f\x004\x00\xe4\xff\x7f\xff\x1f\xff\xba\xfel\xfe]\xfe\xfc\xfd\xbc\xfd\x87\xfdN\xfd$\xfd2\xfd\'\xfd1\xfdu\xfd\xbc\xfd0\xfe\x90\xfe\xe7\xfeG\xff\xb0\xff/\x00\x80\x00\xe3\x00+\x01&\x01:\x01\x84\x01\xa4\x01w\x01m\x01n\x013\x01\xd9\x00\x97\x00W\x00\x07\x00\xdf\xff\x9c\xff\x8e\xff\x95\xffy\xff`\xff\x98\xff\xb4\xff\xba\xff\xf1\xff9\x00\x96\x00\x0f\x01k\x01\xc4\x015\x02\x96\x02\xc9\x02\t\x03+\x034\x039\x039\x03+\x03\xef\x02\xc5\x02{\x02\x1e\x02\xb5\x014\x01\xb1\x008\x00\xaf\xff\x08\xffX\xfe\xc8\xfd\x9a\xfd\x1d\xfd\x94\xfcB\xfc\x13\xfc\x06\xfc\xfc\xfb\x10\xfc?\xfcb\xfc\xa9\xfc\x06\xfd\\\xfd\xd1\xfdA\xfe\x8f\xfe\xec\xfeU\xff\xa2\xff\xf7\xff2\x00k\x00\xa3\x00\xd4\x00\xd3\x00\xc9\x00\xe1\x00\xdd\x00\xc8\x00\xa8\x00\xab\x00\x9b\x00a\x00S\x00[\x008\x00\x16\x00\x00\x00\xfa\xff\xfa\xff\x1b\x007\x00:\x00K\x00o\x00\x80\x00\xac\x00\xf2\x00\x06\x01\x13\x01?\x01|\x01\xa1\x01\xdb\x01\xd9\x01\xcb\x01\xdd\x01\xbe\x01\xbd\x01\xb8\x01\x95\x01e\x01]\x01"\x01\xd0\x00\xa9\x00\x8e\x00G\x00\xeb\xff\xb1\xfff\xff8\xff)\xff\x04\xff\xd6\xfe\xc8\xfe\xcb\xfe\xbc\xfe\xbc\xfe\xc3\xfe\xdc\xfe\xd1\xfe\xba\xfe\xcb\xfe\xe2\xfe\xf4\xfe\x06\xff\x07\xff\x0b\xff\x1c\xff\x03\xff\xfd\xfe%\xff\x14\xff\x0c\xff\x04\xff\r\xff7\xffa\xffj\xff\x80\xff\x9f\xff\xcf\xff\t\x00<\x00r\x00\xa7\x00\xba\x00\xeb\x00/\x01n\x01\xb0\x01\xde\x01\x11\x02L\x02z\x02\xa4\x02\xe0\x02\xf6\x02\xf8\x02\xea\x02\xd5\x02\xb4\x02\x97\x02\x85\x02;\x02\xeb\x01\x92\x01;\x01\xc7\x00W\x00\xf1\xff\x8d\xff7\xff\xea\xfe\xbb\xfe\xa0\xfe{\xfeZ\xfe8\xfe*\xfe1\xfe0\xfe>\xfeg\xfe\xa1\xfe\xd2\xfe\xf8\xfe \xff=\xffm\xff\x83\xffx\xfff\xffb\xff^\xffZ\xffK\xff1\xff\x0c\xff\xd7\xfe\x9e\xfen\xfe7\xfe\xfc\xfd\xd4\xfd\xbb\xfd\xad\xfd\xae\xfd\xbc\xfd\xe4\xfd\x13\xfeQ\xfe\x9c\xfe\xee\xfeI\xff\xb5\xff\x12\x00f\x00\xbf\x00-\x01\x92\x01\xe0\x01/\x02w\x02\xad\x02\xd9\x02\x04\x03\x18\x03\x19\x03\x18\x03\x07\x03\xed\x02\xca\x02\xa0\x02d\x02\x1b\x02\xcd\x01}\x01+\x01\xde\x00\x85\x001\x00\xe4\xff\x8f\xff>\xff\xf5\xfe\xb0\xfev\xfe;\xfe\x06\xfe\xd7\xfd\xba\xfd\xa7\xfd\x93\xfd\x89\xfd\x80\xfdt\xfdg\xfda\xfdd\xfdm\xfdz\xfd\x97\xfd\xc7\xfd\xe5\xfd\xfb\xfd*\xfeg\xfe\x93\xfe\xc1\xfe\xfc\xfe9\xffo\xff\xb9\xff\x03\x00G\x00\x87\x00\xcf\x00\r\x01E\x01\x8b\x01\xc0\x01\xe2\x01\x05\x02\x18\x02&\x02+\x02%\x02\x1b\x02\xfd\x01\xd9\x01\xbb\x01\xa8\x01\x96\x01u\x01H\x01\x12\x01\xec\x00\xc1\x00\x9c\x00|\x00`\x00C\x007\x006\x00<\x007\x00\x1a\x00\x0b\x00\x03\x00\xf8\xff\xed\xff\xe8\xff\xf3\xff\xef\xff\xed\xff\xe9\xff\xe2\xff\xde\xff\xd3\xff\xb3\xff\x89\xfff\xffD\xff\x1d\xff\xfc\xfe\xd7\xfe\xb2\xfe\x8b\xfeh\xfeP\xfe;\xfe\x1f\xfe\x03\xfe\xf4\xfd\xf2\xfd\xf0\xfd\xfb\xfd\t\xfe\x12\xfe9\xfeo\xfe\x9c\xfe\xd1\xfe\r\xffP\xff\x85\xff\xbc\xff\xfb\xffA\x00\x82\x00\xc0\x00\x08\x01E\x01\x81\x01\xb4\x01\xd7\x01\xf4\x01\x10\x025\x02J\x02S\x02V\x02Y\x02]\x02]\x02W\x02E\x02%\x02\x07\x02\xe5\x01\xc0\x01\x92\x01^\x01 \x01\xde\x00\xa6\x00w\x00>\x00\xfc\xff\xbd\xff\x7f\xffG\xff\x11\xff\xd4\xfe\x96\xfe[\xfe4\xfe\x19\xfe\x05\xfe\xf9\xfd\xf5\xfd\xf4\xfd\xf1\xfd\xfd\xfd\r\xfe\x16\xfe\'\xfe>\xfe^\xfe\x82\xfe\xb5\xfe\xe6\xfe\x14\xffE\xffj\xff\x87\xff\xa9\xff\xcd\xff\xf6\xff\x17\x00:\x00]\x00\x80\x00\xa3\x00\xbf\x00\xdb\x00\xe8\x00\xef\x00\xff\x00\x13\x01 \x01$\x01\'\x01!\x01&\x01\'\x01\x16\x01\x04\x01\xf2\x00\xdb\x00\xd5\x00\xc6\x00\xbd\x00\xad\x00\xa1\x00\x9e\x00\xa2\x00\xaa\x00\xa8\x00\xa5\x00\xa4\x00\xac\x00\xbc\x00\xca\x00\xc8\x00\xc9\x00\xc4\x00\xc3\x00\xc8\x00\xb5\x00\x9c\x00\x81\x00f\x00Z\x00J\x00.\x00\x03\x00\xe4\xff\xd0\xff\xb1\xff\x8d\xffd\xff9\xff\x11\xff\x05\xff\xf6\xfe\xe6\xfe\xc6\xfe\xa4\xfe\x8d\xfe\x91\xfe\x90\xfeh\xfeX\xfeQ\xfeM\xfe^\xfez\xfe\x8d\xfe\x9b\xfe\xbb\xfe\xd7\xfe\xff\xfe\'\xffT\xffy\xff\xa5\xff\xd1\xff\t\x007\x00]\x00\x8c\x00\xa7\x00\xbe\x00\xd5\x00\xeb\x00\xfd\x00\x02\x01\xfe\x00\x00\x01\x05\x01\x04\x01\xfb\x00\xed\x00\xe5\x00\xd4\x00\xbc\x00\xa8\x00\x93\x00~\x00k\x00U\x00?\x00%\x00"\x00\x15\x00\x05\x00\xfc\xff\xf2\xff\xee\xff\xe6\xff\xde\xff\xd8\xff\xcf\xff\xbe\xff\xb8\xff\xad\xff\xab\xff\xac\xff\xa7\xff\x9f\xff\x97\xff\x94\xff\x84\xffu\xffc\xff\\\xffS\xff@\xff2\xff$\xff&\xff#\xff\x1d\xff+\xff3\xff>\xffG\xffT\xffb\xffv\xff\x8c\xff\x9c\xff\xbb\xff\xdc\xff\xf4\xff\x07\x00\x1c\x00,\x000\x00F\x00c\x00a\x00k\x00{\x00\x82\x00\x8b\x00\x90\x00\xa2\x00\x9b\x00\x84\x00\x83\x00}\x00s\x00k\x00j\x00`\x00S\x00M\x00N\x00^\x00U\x00P\x00Q\x00`\x00v\x00z\x00y\x00{\x00\x87\x00\x98\x00\xa3\x00\x9b\x00\x91\x00\x89\x00q\x00_\x00T\x008\x00(\x00\n\x00\xf4\xff\xd8\xff\xc3\xff\xaa\xff\x8d\xff\xa3\xffg\xffj\xff\x7f\xffH\xffe\xffH\xff\n\xffk\xff\x0e\xffU\xff\x1d\xff\x19\xff)\xff\xf5\xfe{\xff\x0b\xff_\xff^\xff,\xffi\xffj\xffp\xff\xad\xff\x94\xff\xcb\xff\xdb\xff\xba\xff\xe1\xff\x1f\x00\x00\x00\xf4\xff\xf6\xff\xfb\xff0\x00)\x00~\x007\x00\xc2\x00l\x00t\x00\xcb\x00\xa3\x00 \x01\xda\x00\xf2\x00\xf7\x00\x12\x01\xe9\x00\x9d\x01\x9c\x00\x1a\x02~\xfe\x1a\xffM\x0e\x90\tk\x02o\xfe\x02\xfd\xc5\xff\xff\xfc\xf2\xfc\x93\xfcQ\xfd\x93\xfc\xda\x00\x82\x03N\x00 \xfd\x00\xfe\x07\x03\xf8\x02x\xfc[\xfc\xae\x01\xc7\n:\x08;\xfe\xa1\xfa\x88\xf8\xfb\xfa\xb6\xff\xef\xfda\xfd#\xfe\x88\x03\x11\nk\xfc\xbc\x02\xb8\x07\xad\x03\n\x06z\x05b\x04[\x03\xb0\x07\xce\xfe\xd9\xf8x\xfc+\x01$\x03C\x03M\xfd\x18\xf5\xb1\xef-\xec\xa1\xeb\x8e\xf2\xc1\xf9\xf0\xfb\n\xff\xf6\xfaH\xf7B\xfb \xff\xc9\x00\xf2\x02\xba\x05\xc2\x07b\x01(\x03\xfc\x04\xd3\x08_\x0e`\x08\xc0\x05\xd5\x00-\x04\x99\x07\x88\x07\xee\x03!\x02\xf0\xffn\xfe\xe2\xfd\xa6\xf9D\xfb|\xfd=\xffX\xfe2\xfc\xa0\xfc\xec\xfbQ\xf8\\\xfba\xfb4\xfc\x14\xff\x90\xfd\x15\x00\x06\x01\x1e\x01\x11\x06\x11\x08!\x08"\x04\xf3\x01}\x02\xc5\xff\xc0\x01\x0c\x02\x10\x05\x1a\x02\xb6\x04\x91\x02\xb3\xffi\x01\x90\xfd \xfb\xbb\xf8\x8e\xfd\xdd\xfcw\xff_\x01\xcc\xfd{\xfa\x07\xf7\x04\xfa]\xfe\x07\x01\xa9\x01\xa5\x02\xd5\x00\x98\x01\xfd\x00\xf7\x00k\x01\xca\x01\x98\x01.\x07\xf0\x12Q\x0f\xcb\x08\xce\xffA\xfep\xfb\xdb\xf6\xa5\xf9\xe1\xf7F\xf8\xec\xfd\x8b\xff\x80\xff\t\x01\xc6\xfe\xee\xff8\xfcu\xfa\x01\xfc5\xfa\xab\xfcE\x003\x02]\x02Q\x02\x90\x03\xba\x03\\\x02\xf5\x02\xab\x02\x08\x01~\xffQ\xfe\x86\xfc{\xfa\x1d\xfe\xc4\xfd\xe8\x00\xe1\x01\xbc\x01`\x011\xfd=\xfa@\xf6D\xf94\xfe\xd1\x00|\x02\xc7\xfe\xd8\xfd\x86\xff\xf5\xff\xc9\x01\xcf\x05Z\x08\xab\x07\xba\x048\x01 \xfdJ\xfb\x03\xfb\xe8\xfa\xe0\x01\xde\x06\x11\x06\xb7\x05t\x02\x14\xfd\xa7\xfd\xf9\xf9Y\xfaW\xff\x7f\xfe\xb6\xff\xd2\x007\x00\x8a\xfeE\x00\xc2\x01p\x04\xe9\x04\xe0\x05\xf1\x05&\x01\x13\xfe)\xfb\xce\xfc@\x03_\x04\xe7\x04C\x043\x00\x97\xfc\x82\xfd\xf6\x00\xec\x01\x90\x00\x01\xff6\xfd\xf6\xfa\x1d\xfa\xf2\xf9#\xfd\xcd\x01\xca\x04\xa6\x06\xf2\x03\x95\xff\x9d\xfd:\xfa\xab\xf8\xe7\xfa\xec\xfd\xb5\x01\xf5\x034\x03L\x01\xac\xfc\xd2\xfc=\x00\x12\x01\x84\x01u\x01"\x03\x0b\x02\xc8\xfeL\xff\xbd\xff\xe8\xff\xf5\x00\xe4\x00\x81\xff\x90\xfco\xfd9\xff\xf7\xfeQ\x04\xb2\x05\x11\x03\xf2\xfe/\xfcA\xffG\xff\xdc\x01\x97\x03\xe8\x00\x9e\x00\x1b\x01\xb5\x00e\x01\xca\xfe\xd8\xfe&\x00\'\x01\xde\x00y\xff\xe6\xfc\xe6\xfc\x8a\x00\n\x01\xcf\x02h\x03\x0c\x04-\x03\xfe\x00<\xff?\xfdb\xfa\n\xfa\xd2\xfc?\x01\xc1\x04\xd9\x04\x9e\x04Y\x02\xd9\xfe\xde\xfb\xe0\xfa\xe7\xfa\xa3\xfe\x08\x03\xf7\x04D\x03#\xfe\xd5\xf8B\xfc\x1b\xff=\xff|\x03\x82\x04\x1d\x01\xd0\xfdN\xfeg\xfdN\xfc\xfc\xfb\xe8\xfcy\x00\xdc\x02V\x00\xa6\xff\xc4\xff\xb8\xfd\xb3\xfcr\xfb;\xfe$\x02\xd3\x05\xaa\x06^\x01\x08\xfe\xe7\xfd\xfe\xfdu\xffb\x00\xbc\x01\xf8\x03\xf6\x04\xed\x03[\x01y\xfeZ\xfe\xe1\xffw\xff>\x03\x03\x04(\x01\x08\xfe\xd5\xfb\x1e\xfd\xce\xff\x9d\x02X\x015\x01\x87\x00\xfc\xfe\xf0\xfd\'\xf8\x9e\xf5\xc8\xf9\xac\xfei\x03%\x05\x05\x03\x8e\x00\\\xfeG\xff[\x00\xec\xfe\xd3\xffI\x03 \x07\xcd\x04\xd9\x01\xae\xff\xec\xfb\xd8\xfb\xad\xfa\xa2\xfd\xdc\x03s\x07\xab\x05L\x00\xa9\xfcy\xf9\x99\xf9\xf9\xfc\x0c\xff\xfd\xff\xe9\x01\x82\x02\xac\x01\x85\xff+\xfc(\xfa<\xf9N\xfau\xfd\xb4\x00\xef\x03Z\x05\x11\x05j\x03\xdd\x00\xd5\xff\xd6\xff\xd5\x00\xc8\x01w\x01\xbf\x00\'\x01\xb8\x01y\x00\x07\xff@\xff\xcb\x01\xb8\x01B\x03n\x04*\x01\x85\xfeK\xfb\x83\xfbN\xfdq\xfd_\xfe\xb1\x00U\x04\x8b\x03\xbf\x01\x18\x00(\xfdH\xfd\x9f\xfdU\xff\xa9\x00&\x00\x97\xff\x9a\xff\x8f\x00\x1e\x00\x81\x00J\x01%\x01\xda\x00+\xff\xb8\xfd=\xfe\x9f\xfe\xdd\xfe\'\xff\xa7\xff-\x00\xf1\xff`\xfe\x80\xfdM\xfe^\xfe\xb0\xff5\x01\xda\x01\x9a\x02\x87\x01\x06\x01\x13\x01\xc7\xff\x83\x01\xfa\x02\x97\x03|\x04\xd6\x02\xd2\x00\xbf\xfe\xc0\xfb\xc6\xfb~\xfeD\xfe\x05\x00\xb1\x02\xfe\x01\xfc\xff\x07\xfe\xfa\xfc\xaa\xfd\xd7\xff\xf0\x01\xda\x02\x8d\x024\x02\xc2\x011\x01\xb1\x00\x19\x00L\xff\xf1\xff\xac\x00|\x00*\xff9\xfe\xa8\xfd\x80\xfc\xf5\xfd\x94\x00\x86\x03^\x047\x043\x03K\x01\xe6\x00\xf0\xff5\xff;\xff4\xfd\xdd\xfc\xb1\xfex\xfe\xd8\xfe\n\xfe\xdb\xfcg\xfd\x8f\xfdC\xfds\xfd\xc2\xfeL\x00\x9d\x01\xc4\x02p\x04\n\x03}\x01\x8d\x00\x01\xff\xc7\xfe!\xff\x97\xff\x1f\xfe(\xff\xe4\xff\x9a\xfd\xc6\xfc\x8f\xfc\xd9\xfd\xe4\xff\x82\x01w\x03+\x06\xd8\x07\x10\x06T\x03\xbb\x00[\xff(\xfe\x1e\xfd\xfd\xfc\x88\xfd%\x00\xba\x01\xe9\x01\x13\x01\xa8\xffj\xff"\xfe\x93\xfd\xc1\xfd;\xfe\x98\xffr\x00\xed\xff\\\xff\x86\xfe\x81\xfeF\xffq\xff\x17\x00\xfb\xffI\xff\xda\x00\xf1\x02\xb0\x02{\x02\xc3\x02\xaa\x02\x1c\x03U\x03\x10\x02k\x01k\xffh\x00\xab\x00\xcd\xff\xf9\xff:\xfe\xaa\xfe\x10\xffE\xffB\xff\x1e\xffu\xff>\x00\xe3\x00\xb5\x00\xd9\xff\xba\xff\xc3\xff\xb5\x00`\x02\xe6\x02 \x04\x8d\x04\xbd\x04>\x04\x99\x03\xc9\x02\x1b\x01\x96\xff]\xff\xfa\xff\xd3\x00\x1c\x01K\x00\x13\xff\x0e\xfeq\xfd\xed\xfcl\xfc\xa3\xfb\x07\xfcb\xfdN\xfd\x05\xfd\xd3\xfc\x88\xfcr\xfc|\xfc\x89\xfc\x8a\xfc\t\xfdy\xfdH\xfe\xfd\xfeh\xfe\x0c\xfe\x8a\xfdM\xfd\xd8\xfc\\\xfb\x85\xfa\x0b\xfa \xfa\x00\xfa\xc1\xf9\xff\xf9B\xf9\xde\xf8\xe7\xf8\xc8\xf8\xa7\xf8\r\xf8\xa9\xf8-\xfak\xfbw\xfc\x85\xfd\xbc\xfe\xd7\xfe\xd7\xfe\xbb\xfe\n\xfeo\xff0\x00G\x01\xc9\x02U\x02\xed\x02#\x03]\x03%\x03d\x01f\x01+\x01>\x02\x9c\x03\n\x04\xb1\x05 \x05r\x05\xf9\x04\xdb\x039\x03>\x02\x85\x02t\x04U\x07\x9c\tW\x0b\xc4\x0c\xeb\r\xd7\x10\x99\x13t\x15t\x18\xda\x1b/ h#\xc6#\x16"Q\x1d\xe4\x17\xe4\x11<\x0b\xd6\x04\xc6\xffZ\xfc\x16\xfa\xcb\xf8\xc0\xf6\xc5\xf3\xcd\xf0\x1d\xee2\xecb\xeb;\xeb\x04\xec\xcd\xed\x01\xf0\xe6\xf1\xf7\xf2n\xf3\x92\xf3\xcc\xf4\x8e\xf6\xf6\xf8\x06\xfc\n\xff\xe6\x01\xf4\x03f\x04c\x03=\x01\x86\xfe\x04\xfc\x00\xfaG\xf8B\xf6c\xf4A\xf2\xd9\xefv\xed\xd4\xea7\xe9\xd0\xe8\x86\xe9\x8d\xeb=\xee\xf6\xf05\xf3F\xf4\xab\xf4\x1d\xf5\x83\xf5H\xf6M\xf8\xfd\xfa\xcd\xfd\xe4\x00\x14\x03\xe9\x03\x84\x03\xf6\x01\xb4\x00\x92\xff\x00\xff\xf0\xfe\xca\xfe\xda\xfe2\xfe\x98\xfdi\xfc\x95\xfa\xf8\xf9\x03\xfa\xb2\xfb\x88\xfeY\x01\x81\x03\x1f\x05\xe3\x05\xd7\x06\xed\x07C\x08\xbc\t\xd9\nF\x0c\xc8\x0eD\x0f\xca\x0eY\x0c\x17\x08\t\x06\xf8\x06L\x0c1\x15%\x1f<)\xe21K7\x837\xf03\xdd-}(\x86%u#G!\x15\x1de\x16\x04\r\xce\x01\x08\xf6\xbf\xebv\xe6\xed\xe55\xe8\xd8\xeb\xe5\xed\xb1\xed\xec\xeb\xaa\xe8\x00\xe6\xc8\xe4O\xe5\xc2\xe8:\xeeW\xf4"\xf9d\xfb\xa8\xfb\xcb\xfb\x8f\xfd\xd1\x00\xab\x05s\ni\x0e\xc0\x10\xdc\x0f\x86\x0bU\x04P\xfcx\xf5\xb0\xf0m\xee:\xed\xbe\xec\xfc\xeb;\xea"\xe8\xed\xe4F\xe2\xe9\xe1\xc0\xe3\xdd\xe7K\xecg\xf0\x8f\xf3\xf8\xf4\x81\xf5\xe5\xf5\xab\xf6\x9b\xf8\x8f\xfb\xf9\xfe\xbc\x01;\x03D\x03z\x01:\xff\x8c\xfd\xcb\xfc7\xfd\xb2\xfdG\xfe\xaf\xfdw\xfcx\xfak\xf7\xbe\xf5j\xf4%\xf5\xc8\xf7\x8f\xf9\x8b\xfc\xe4\xfdD\xfe|\xff\xde\xfe\xc9\xff\xc4\x004\x02\x03\x06\x81\t\xda\r\xd3\x10\x8d\x12>\x12<\x0f\x01\x0b\xe4\x04Y\x00\xf6\xfe\xb5\x01a\n\xa6\x16\xf5$\x822\x11<\x86@\x02A\xe7>\x02\xfa+\xf8\x04\xf6p\xf5\x8a\xf4\xcd\xf3v\xf3S\xf2\x00\xf2\xea\xf1w\xf2\xca\xf4\xbc\xf7\xbd\xfb0\xff\x01\x01\xee\x02\x15\x03\xb5\x03\x11\x05<\x06\xb4\x08\xca\t3\n`\n`\x08\x98\x06V\x03\xca\x00\xa7\x00,\x02z\x07z\x0e\xdf\x17r#\xa9.\xdf9\xdaB!IXL\x08KEF\x91<\xfc/\xb3!\x8e\x11,\x03F\xf5\xe2\xe8\x15\xdf\x83\xd7%\xd4Z\xd4\'\xd8\xc6\xde*\xe6\xf8\xec\x99\xf1\xa3\xf4_\xf6\xce\xf7\x17\xfa\xa8\xfdy\x02\xad\x07\x05\x0c\x19\x0f\xe6\x10\x98\x11\xb0\x11\xc1\x10\xa2\x0e\xd1\n\xce\x04K\xfd\xbe\xf4M\xec|\xe55\xe0\xb3\xdd\x98\xdd\xef\xde\xc9\xe1\xf1\xe4\x05\xe96\xeeA\xf3\xc0\xf7I\xfb-\xfd\x97\xfe\x8f\xff\xf2\xffZ\x00-\x00\xd5\xffv\xffp\xfe\xce\xfc\x10\xfb\x0b\xf9\x07\xf8\xd8\xf7\xbc\xf7{\xf7\xa8\xf6s\xf5\xb8\xf4\xd2\xf4\xaa\xf5\xe8\xf6*\xf8\xaa\xf8\xee\xf7K\xf6k\xf4\x16\xf3\x11\xf3\xda\xf3\xcc\xf5\xbf\xf89\xfbK\xfd\xbd\xfe\xaf\x00\xa6\x03\xd3\x06b\x08|\x08\x80\x06\xd0\x03\xc3\x01.\xff`\xfd\xee\xfbH\xfb\x00\xfb\xe2\xf8,\xf5*\xf2\xb6\xf5\xac\x02a\x17\x0e/\xc5CnS\x8b]\xdcbzc\xe0^8V\xe6J3;\x83&\xad\x0c1\xf1 \xda\xb7\xca\xe3\xc3\x1d\xc4\xb6\xc7{\xcc\x9e\xd2\x80\xd9v\xe2\x01\xec^\xf5R\xfd\x01\x03\xdd\x06\xa3\x07\x8e\x06\xed\x04\xd9\x04\x8b\x08\xf2\x0e\xc8\x151\x1a\x99\x18\xc7\x12\xfa\t\x9b\x01F\xfa\xd4\xf2\x86\xebv\xe2\xbc\xd9\xc3\xd2\x13\xcf?\xd1\xdf\xd6:\xdf\xb0\xe8\xe2\xf0\xba\xf8\xbf\xfe\x80\x03j\x07\xd7\t\xd3\n\xc4\t\xa7\x06\xd0\x02^\xff\xd7\xfc\xc6\xfb\x0f\xfb\xe3\xf9\xf7\xf7\xac\xf5\x8a\xf4}\xf4\xd0\xf5\x90\xf8\xb9\xfa"\xfcn\xfb\xbc\xf8\x17\xf6\x19\xf6\xe5\xf8q\xfc\xfe\xfd \xfbL\xf6%\xf2\xb2\xef"\xf0\xd2\xf1\x9e\xf4\x04\xf8\xa8\xfa\x85\xfdC\x00\x1c\x03\xdb\x06\xba\n\x07\rH\x0c\xc7\x08S\x04\xa1\xff\xde\xfb4\xf8\xfb\xf3c\xf0\x1b\xec\xc4\xe7\xaf\xe4\xe1\xe6\xe0\xf4G\x0e\x06.=IkY\xc9b\'hwm2p\xfdh\xdfX\x11B\xaf&6\t\xdd\xe9\xb6\xce\xc7\xbc\xff\xb5\x08\xb7[\xb8\x83\xbam\xc1\xf4\xd0\xd8\xe7\xd8\xfd;\x0c\x7f\x12\xf0\x14\xc2\x160\x18\xbe\x183\x18u\x17\x03\x176\x16\xa6\x13\xe6\x0e\x8e\n\n\x08L\x06~\x002\xf5\xb2\xe6F\xd9.\xd0O\xcb\x96\xc8I\xc7.\xc8\xb5\xcd#\xd8\xbc\xe6\x04\xf7p\x06N\x13\x1b\x1c?!\xe2!2\x1f\x05\x1b\xa9\x14\xf6\x0b\xa0\x00\x07\xf5\xfa\xeb\x96\xe6t\xe4}\xe4D\xe5y\xe6\xf0\xe8f\xed.\xf3\xe5\xf9o\x009\x05\xfe\x07A\tp\tS\tn\x08k\x06\xde\x02\xe0\xfdl\xf8\xf3\xf2 \xef\xc2\xed\x0b\xee\x9f\xef\x99\xf12\xf4\xdb\xf7\xe0\xfb\xd4\x00\xca\x04\xaf\x06-\x07x\x05\xe7\x01\xaf\xfd\x12\xfa\xdf\xf6\xe2\xf1|\xe9\x08\xe0\xd7\xda\x8a\xde3\xebR\xfbi\x0b\xea\x1e&6?P"h\x90v\xa4zwwJo\x02bAO\xe46\xb0\x17b\xf7\xe1\xdb\x04\xc6j\xb7\xad\xafD\xaeb\xb3\x18\xbeQ\xcdh\xdf\xb0\xf2\xa1\x03~\x0f\xf5\x15\xff\x17,\x18\xe6\x187\x19w\x17\xaa\x13\xbf\x0e|\n\x0e\x08%\x07\x0c\x06\xaf\x01\xbb\xfa\x85\xf2V\xea\x08\xe4\xc6\xde_\xd9\xf8\xd40\xd2\x1a\xd4:\xda)\xe3C\xee\xce\xf9\xcb\x04[\x0fM\x17\xd3\x1b\xb1\x1cd\x19\x1e\x13\xae\n\x80\x01\x94\xf7\xab\xecp\xe3\xa9\xdd\xa7\xdbL\xdeQ\xe3\x1f\xe8\'\xeek\xf5\x7f\xfdz\x05V\x0bY\x0e\xac\x0f\xe7\x0ed\x0cs\x08\x16\x04G\x00v\xfcA\xf7V\xf1\x0b\xeci\xe9\xeb\xe9\x87\xeba\xed\x9f\xef\xf2\xf2\xbe\xf7\xbf\xfc\x90\x01(\x06\x19\n\xbc\x0c\xf0\x0c\xa2\n%\x06w\x01\xc7\xfc\xdf\xf5\xbd\xef\xf5\xe9`\xe4\xdb\xdf\x7f\xda\x9b\xd9\xc3\xe3s\xfar\x18\x9d1fC\xb1P9_%r\xac\x7f\xff\x7f#rSYR?\x06(x\x0e\xfc\xf1"\xd5R\xbc\xc8\xac5\xa8D\xab\x8b\xb3\xf4\xc0\xe1\xcf\'\xe0W\xf2.\x03B\x12Z\x1fb%&$) p\x1c\xd1\x196\x18J\x14/\x0c\x9d\x02[\xfb\x9b\xf7\x99\xf6\x05\xf5\x16\xf0\x1a\xe7\xbc\xde1\xdb\x96\xdb9\xdd\x82\xde^\xdeb\xe0t\xe7&\xf2=\xfdS\x06w\r\xa9\x11\x9b\x11"\x11 \x11\xc1\r\xd8\x07\x91\xff\xed\xf4\xfb\xec\xd9\xe9p\xe8{\xe6\xdc\xe5\xea\xe7\xa7\xec\xbe\xf3\xdb\xfb\xca\x012\x06W\n\x0f\r\xc6\r\xb3\r\xa9\x0b\x81\x06\'\x009\xfa\xde\xf4c\xf0\x17\xedq\xeaV\xe9S\xeb\x8a\xefe\xf4)\xf9\x06\xfe\x98\x02q\x06\x9c\t\xb9\x0bS\x0cR\x0b:\x08}\x03\xef\xfdR\xf8p\xf3\xf0\xee\xcf\xeb=\xeaG\xe7\x94\xe2\xa0\xe0\xe3\xe5\xc9\xf4\xfe\n\xef \xe50o>\xd5P\x18e-t\x0fygr&d\x1fT\x10B\xb8)\xec\x0c\xa7\xf0\x87\xd5+\xc0i\xb3\xaf\xad!\xaf\x9c\xb5s\xbe\xf5\xc9\x81\xdat\xef(\x03\xcd\x10a\x17\xa1\x19\x05\x1c>\x1f\x12 &\x1c\x18\x14\xce\x0bi\x06c\x04\x83\x02\xdc\xfe\xeb\xf9[\xf5\xd1\xf1\x9e\xefS\xed\x1a\xea\xf8\xe5\xfb\xe1\x14\xdf\xb1\xde\\\xe1\x83\xe66\xec\x9f\xf0\xf6\xf4\xaf\xfb\xc5\x04+\r,\x11\xa9\x0f\xfc\x0b\x87\t\xe8\x07L\x04\xda\xfd\xdb\xf5\xe4\xee\xa8\xebm\xeb\x9d\xec\x9e\xee\xa2\xf0\x1e\xf3\xd5\xf7\xc9\xfd\xcc\x03\x0e\x08\xf4\x08T\x07\x84\x05\x82\x03N\x00\xa2\xfb\'\xf6\x08\xf1@\xedj\xeb\x83\xebQ\xed\xb7\xf0\x8e\xf4\x10\xf8?\xfc\xba\x01}\x07\x92\x0b\x8a\x0c#\x0b\xe4\x08\xbb\x06\x95\x04Q\x01V\xfc\xa2\xf55\xf0\xce\xec\xb3\xea\x8b\xe9\xa5\xe5\r\xe0A\xe1\x94\xedP\x02\xd9\x17\xab&\x981N@LVMl\xdbw1v#l``\xf4S\xc9A\xac(\x9d\x0b\x94\xef\x15\xd8`\xc4\x15\xb6\xc4\xae\x05\xae\xa8\xb1\x14\xb8\x8a\xc2\xa2\xd1\'\xe5^\xf8\x06\x04\xd4\tB\x0fL\x15\xf5\x1bm\x1fA\x1c[\x15\xaf\x10\xdf\x0ec\r\x04\x0b~\x07\xc5\x02\xd0\xfe[\xfb\xd4\xf6O\xf1\xb8\xec0\xe8\x93\xe2\x05\xdd\xbc\xd9\xec\xd9\xea\xdd\xb6\xe3\x92\xe7\x86\xea\x81\xf0&\xfa\x98\x03(\t5\nz\t\xa5\t9\x0b\xd0\n+\x06\x19\x00\x1c\xfb\xbf\xf7\xef\xf5\xb7\xf5\xae\xf5\xdc\xf5\xb4\xf6m\xf8\xd1\xfa\xc4\xfd\xd4\x00J\x02*\x01w\xff\x8e\xfd\n\xfb\x1e\xf9d\xf6\x98\xf2\xc1\xef\xb0\xee\x98\xef\x17\xf2C\xf5\x81\xf8\xf7\xfb\xb5\xffR\x04\xb6\x08\xb0\x0b-\r\xdc\x0ck\x0b\x03\t\xaa\x05\x98\x01\xc7\xfc\xaf\xf7\xc4\xf2\x0f\xee\\\xeb\xb1\xe9\x02\xe7R\xe3\xdc\xe0\xaf\xe4I\xf1g\x03\xd5\x14\x1e"\x1b.\x00?\xeaS\xe1e2n\xfck\xacd\x8a][T\x96D/.\xf7\x14g\xfd\xd5\xe9\xeb\xd9\x9e\xccX\xc2\xf9\xbb~\xb9X\xbb6\xc2\x96\xcc\x9e\xd7\xdd\xe0@\xe8\x90\xefA\xf8p\x01f\x08\xa3\x0b-\x0c\x07\r\x14\x10\xbb\x14\x0b\x18\xaf\x18\xa5\x17\x9e\x16\xe7\x15\x99\x14>\x11\x85\x0b\x00\x04\x1d\xfc\x1f\xf4\x96\xec\xcc\xe5\xf6\xdf"\xdb>\xd7o\xd5\xd5\xd6\xd7\xda\xc9\xdf\x1d\xe4\xc9\xe7\x94\xec\x1e\xf3$\xfaO\xff\xe7\x01\x1b\x03\x80\x04\xb8\x06\x9a\x08p\t\xf3\x08\xc2\x07\x85\x06\xf9\x05\x05\x06\xf5\x05\x05\x050\x03\xd9\x00\xe7\xfeg\xfd\xc1\xfb.\xf9\x17\xf6G\xf3/\xf1[\xf0\x94\xf0h\xf1\xa1\xf2I\xf4\xd5\xf6\x91\xfa^\xffg\x04\xca\x08\x00\x0c[\x0e\n\x10%\x11v\x11e\x10\xf0\r\xa6\n<\x07\xdb\x03~\x00,\xfd\x05\xfa \xf7[\xf4J\xf1|\xee\xac\xed;\xf0\xfa\xf5\xd5\xfcO\x03\x9b\t_\x113\x1bq%l-I2\xb14\xf15\x056\xd83\xb5.F\',\x1f%\x17\x19\x0f\xd8\x06\xaf\xfe\xc4\xf7m\xf2<\xee\xef\xeai\xe8\x07\xe7\xda\xe6\x10\xe7\n\xe7\xf0\xe69\xe7+\xe8\x7f\xe9\xa2\xeat\xeb^\xec\xef\xedA\xf0%\xf3\xdf\xf5|\xf8?\xfb<\xfec\x01)\x04W\x06-\x08\xae\t\x9c\n\xa8\n\xc4\tU\x08\xcb\x06\xed\x04\xa7\x02\xdd\xff\x06\xfd\xb6\xfa\xd9\xf8J\xf7\xba\xf5N\xf4b\xf3\xe7\xf2\xea\xf20\xf3\x8d\xf3=\xf4.\xf5.\xf6=\xf7@\xf8z\xf9\xc0\xfa\x0f\xfc7\xfd;\xfe<\xffB\x00R\x01L\x02\xf8\x02@\x03y\x03\xcf\x03^\x04\xf6\x04K\x05t\x05y\x05r\x05T\x05\x13\x05\xcb\x04{\x04\x11\x04\xa5\x03\x0b\x03|\x02\xbe\x01\xea\x008\x00\x81\xff\xb0\xfe\xc8\xfd\xfa\xfc\x80\xfc:\xfc\xf8\xfb\xa9\xfb2\xfb\xea\xfa\xcb\xfa\xc7\xfa\xee\xfa\x1d\xfbh\xfb\xb5\xfb\n\xfc[\xfc\xa9\xfc\x02\xfdH\xfd\x8c\xfd\xb3\xfd\xb5\xfd\xe2\xfd]\xfe\x1f\xff1\x00\x91\x01A\x03#\x05\xf7\x06\xa9\x08I\n\xf5\x0bu\r\xca\x0e\xd4\x0fx\x10\xf4\x10l\x11\xf5\x11a\x12\x90\x12{\x12\x08\x12f\x11\x99\x10\x80\x0f \x0et\x0c\x95\n\x9d\x08s\x06\x05\x04f\x01\xe4\xfe\xae\xfc\xe4\xfa]\xf9\x0c\xf8\x1f\xf7\xcb\xf6\xbc\xf6\xf9\xf6e\xf7\xf5\xf7\xb8\xf8O\xf9\x8e\xf9U\xf9\xc4\xf8\x16\xf8U\xf7\xa1\xf6\xec\xf5u\xf5B\xf5k\xf5\xdc\xf5\xa2\xf6\x96\xf7\xb6\xf8\xbd\xf9\x87\xfa\xfa\xfa\x1e\xfb\x12\xfb\xc1\xfa.\xfak\xf9\x97\xf8\x02\xf8\xd5\xf7\x19\xf8\xc1\xf8\x9d\xf9\xd2\xfa<\xfc\xba\xfd:\xff\x95\x00\xd6\x01\xdd\x02\xaa\x03H\x04\xa0\x04\xaf\x04\xa8\x04k\x04\x0b\x04\x99\x03\x1d\x03m\x02\xa4\x01\xc8\x00\xf3\xff\x1a\xff]\xfe\x9f\xfd\xde\xfc=\xfc\xdd\xfb\xc9\xfb\x1e\xfc\xc0\xfc\x9f\xfd\x94\xfe\x86\xff\x87\x00\x85\x01\x84\x02\x80\x035\x04\xa4\x04\xe3\x04\xf4\x04\xdf\x04\xa0\x04\x17\x04l\x03\xa4\x02\xc9\x01\xec\x00\xe2\xff\xdf\xfe\xf1\xfd\x06\xfd8\xfc|\xfb\xe6\xfa\x87\xfaQ\xfam\xfa\xc6\xfa\x83\xfb\x8d\xfc\xc9\xfda\xff\r\x01\xdf\x02\xc3\x04\xb8\x06\xb5\x08\x92\n8\x0c\x9b\r\xb1\x0ev\x0f\xef\x0f\x1e\x10\xcf\x0f\x0f\x0f\xf7\r\xc7\x0c\x9a\x0be\n\x07\t\x9e\x07q\x06\x99\x05\xe8\x04B\x04\x95\x03\x05\x03\x97\x02+\x02\xab\x01\x08\x01V\x00\x9a\xff\xb9\xfe\xa8\xfd\x84\xfcw\xfb\x86\xfa\xaf\xf9\xbc\xf8\xbf\xf7\xff\xf6\x95\xf6s\xf6]\xf6B\xf6C\xf6y\xf6\xe4\xf6T\xf7\xb8\xf7\xf0\xf7:\xf8\x98\xf8\xf7\xf84\xf94\xf9\x14\xf9\xfc\xf8\xea\xf8\xf8\xf8\x00\xf9!\xf9s\xf9\xfc\xf9\xb3\xfav\xfb^\xfcY\xfdC\xfe\x11\xff\xb0\xffA\x00\xa7\x00\xd5\x00\xbc\x00k\x00\xf8\xff\x8e\xff\x10\xff\x8c\xfe\x1f\xfe\xe0\xfd\xb7\xfd\xbe\xfd\xf4\xfda\xfe\x08\xff\xc8\xfft\x00\x06\x01\x8e\x012\x02\xe4\x02\x98\x03\x15\x04K\x04]\x04z\x04\x9a\x04\xa8\x04\x88\x04-\x04\xb8\x03"\x03\xa3\x02\x0c\x02s\x01\xd5\x00\x16\x00V\xff\xae\xfe^\xfeg\xfe\x92\xfe\xc0\xfe\xe4\xfe\x1c\xff\x8c\xff\xf6\xffE\x00J\x00\x08\x00\x92\xff\xf5\xfeT\xfe\xa5\xfd\xd7\xfc\x0c\xfcg\xfb\x19\xfb.\xfb\xa2\xfbg\xfcG\xfde\xfe\xbe\xff9\x01\xc8\x02)\x04P\x05K\x06)\x07\xf2\x07\x9d\x08\x05\t%\t5\tM\tm\tp\t7\t\xd9\x08\x80\x08!\x08\xcd\x07o\x07\xde\x06>\x06\x87\x05\xce\x04$\x04\x8b\x03\xf8\x02S\x02\xaf\x01"\x01\xa4\x002\x00\xcd\xffR\xff\xc9\xfeI\xfe\xd5\xfdv\xfd\x1a\xfd\xbf\xfcV\xfc\x16\xfc\xd4\xfb\xa4\xfb\xa9\xfb\xb6\xfb\xce\xfb\xde\xfb\xc4\xfb\x92\xfbW\xfb\x0b\xfb\xa6\xfa"\xfa\x97\xf9\n\xf9\x85\xf8\x0f\xf8\xc7\xf7\xba\xf7\xd3\xf7\x08\xf8@\xf8\xbf\xf8w\xf9n\xfam\xfb4\xfc\xe0\xfc\xa8\xfdy\xfe"\xff\x98\xff\xbd\xff\xc8\xff\xe9\xff\x10\x00\x1e\x00\x05\x00\xcb\xff\x9f\xff\x83\xffm\xffd\xff^\xffy\xff\xa9\xff\xf8\xffV\x00\xe2\x00\x8a\x01M\x02&\x03\xec\x03\x9c\x04\x1e\x05o\x05\x8f\x05o\x05 \x05\x95\x04\xde\x03\'\x03i\x02\xad\x01\xfa\x00\\\x00\xf6\xff\xac\xff\x98\xff\x80\xffq\xffe\xff`\xffa\xffc\xffC\xff\x0e\xff\xd4\xfe\xb2\xfe\xbc\xfe\xdb\xfe\x12\xffB\xff\x8b\xff\xdf\xff5\x00t\x00\x8d\x00|\x00G\x00\xf1\xff\x81\xff\x02\xff\x83\xfe"\xfe\xd1\xfd\xa1\xfd\x88\xfd\xa0\xfd\xef\xfd`\xfe\xcf\xfe7\xff\xae\xff.\x00\xbc\x00O\x01\xe2\x01|\x02*\x03\xfd\x03\xf0\x04\xf7\x05\x01\x07\r\x08\x12\t\x0c\n\xde\ni\x0b\xb4\x0b\xaa\x0b\\\x0b\xc8\n\xe0\t\xc7\x08\x8f\x079\x06\xd2\x04q\x03\x1d\x02\xe9\x00\xd8\xff\xef\xfe*\xfe\x90\xfd#\xfd\xe2\xfc\xbf\xfc\xa7\xfc\x91\xfc\x9f\xfc\xac\xfc\xaf\xfc\x8d\xfcE\xfc\xf3\xfb\x89\xfb\x02\xfb_\xfa\x9c\xf9\xdc\xf8+\xf8\x87\xf7\xf4\xf6|\xf6A\xf60\xf6E\xf6t\xf6\xbd\xf6*\xf7\xad\xf7K\xf8\xf2\xf8\x8f\xf9E\xfa\x02\xfb\xbf\xfbt\xfc\x14\xfd\xbc\xfdR\xfe\xd9\xfeG\xff\xa1\xff\xe2\xff\x1d\x00I\x00Y\x00c\x00`\x00`\x00p\x00\x83\x00\xa6\x00\xd1\x00\x0f\x01f\x01\xda\x01[\x02\xde\x02X\x03\xc8\x036\x04\x96\x04\xd5\x04\xfa\x04\xf3\x04\xc5\x04y\x04\t\x04\x8f\x03\r\x03\x86\x02\x06\x02\xa1\x01U\x01,\x01\x1e\x011\x01P\x01w\x01\x9d\x01\xb2\x01\xa0\x01c\x01\xfc\x00v\x00\xc5\xff\xfa\xfe-\xfeh\xfd\xc4\xfc9\xfc\xe3\xfb\xb3\xfb\xa8\xfb\xca\xfb\r\xfc_\xfc\xaa\xfc\xe5\xfc\x07\xfd\x1b\xfd%\xfd\x14\xfd\x01\xfd\xe9\xfc\xd5\xfc\xde\xfc\xf6\xfc-\xfd\x85\xfd\xee\xfde\xfe\xe2\xfek\xff\xf9\xff\x96\x003\x01\xd3\x01~\x028\x03\x08\x04\xf1\x04\xed\x05\xf3\x06\xf6\x07\xee\x08\xe2\t\xbf\nt\x0b\xf6\x0bA\x0cX\x0c@\x0c\xf4\x0by\x0b\xd5\n\t\n\x1e\t6\x08K\x07P\x06k\x05\x8b\x04\xb4\x03\xf1\x022\x02\x85\x01\xde\x002\x00\x88\xff\xdb\xfe5\xfe\x97\xfd\xf3\xfcW\xfc\xd8\xfbl\xfb\x13\xfb\xd2\xfa\x94\xfa\\\xfa7\xfa&\xfa\x15\xfa\xf3\xf9\xc4\xf9\x97\xf9k\xf9.\xf9\xe8\xf8\xa0\xf8j\xf8=\xf8\x17\xf8\xff\xf7\xf9\xf7\t\xf8)\xf8S\xf8\x9a\xf8\xea\xf8M\xf9\xc6\xf99\xfa\xb4\xfa/\xfb\xa7\xfb#\xfc\x9c\xfc\x1b\xfd\xa8\xfd;\xfe\xd3\xfek\xff\x11\x00\xc0\x00u\x010\x02\xde\x02s\x03\xed\x03\\\x04\xa8\x04\xd7\x04\xe8\x04\xc3\x04\x9c\x04\\\x04\x10\x04\xb9\x03Y\x03\xfd\x02\xa3\x02^\x02\x1c\x02\xef\x01\xd0\x01\xa9\x01\x9b\x01\xa6\x01\xb8\x01\xc5\x01\xcf\x01\xd1\x01\xc0\x01\xaf\x01\x8a\x01;\x01\xe7\x00\x8a\x00\x1e\x00\xb0\xff@\xff\xcf\xfeg\xfe\x05\xfe\xb4\xfds\xfdE\xfd\x1e\xfd\x0b\xfd\xfb\xfc\xfa\xfc\x0c\xfd\x1c\xfd1\xfdE\xfdu\xfd\xa4\xfd\xbe\xfd\xe7\xfd\x1f\xfeX\xfe\x93\xfe\xcd\xfe\x03\xff=\xff}\xff\xbf\xff\xfb\xff+\x00P\x00\x84\x00\xb7\x00\xdd\x00\xf8\x00!\x01K\x01\x7f\x01\xc3\x01\xf5\x01\'\x02P\x02\x81\x02\xb5\x02\xcc\x02\xf3\x02\x01\x03\n\x03,\x03\x18\x031\x037\x03=\x03f\x03\x80\x03\xc6\x03\xef\x03(\x04_\x04\x7f\x04\x9f\x04\xad\x04\xa4\x04\x94\x04q\x04?\x04\x02\x04\xaa\x03a\x03\x10\x03\xb0\x02l\x02\x18\x02\xd4\x01\x8b\x019\x01\xfb\x00\xa7\x00W\x00\x03\x00\xbb\xff[\xff\xf2\xfe\x86\xfe\x1b\xfe\xb4\xfd9\xfd\xc1\xfcB\xfc\xd1\xfbT\xfb\xfb\xfa\x9a\xfaR\xfa\x10\xfa\xd9\xf9\xc2\xf9\xaa\xf9\xa2\xf9\xbc\xf9\xd6\xf9\x00\xfa,\xfak\xfa\xca\xfa\x1e\xfbs\xfb\xd5\xfb?\xfc\xa9\xfc\r\xfd~\xfd\xe5\xfd4\xfe~\xfe\xcf\xfe,\xff\x80\xff\xc1\xff\x17\x00g\x00\x9d\x00\xaf\x00\xdb\x00\x0b\x01\x15\x01\x1b\x01(\x01L\x01P\x01.\x01$\x01\xfb\x00\n\x01\x13\x01!\x01_\x01_\x01]\x01c\x01\r\x01\xf3\x00\xcf\x00\x94\x00\x10\x01\x1f\x01\x06\x01\x1b\x01\xcc\x00\xa1\x00\x91\x00u\x00\x85\x00\xe1\x00\xe1\x00\xa4\x00f\x00J\x00\x16\x00\x14\x00\x98\x00\xdf\x00\xe6\x00D\x01P\x01P\x01%\x01%\x01\xcf\x00\xd5\x00T\x01\xb1\x01^\x01\xe9\x00\x01\x01\x10\x01;\x00\xf3\xfe\xe0\x00\xc4\x06\xc3\t\x91\x08j\x01\x94\xf8&\xf58\xf6\xb7\xf7\x8e\xfb\xb8\x00*\x04\x0b\x05\xbf\xfb\xcd\xf9A\xf9\xda\xf7\xb8\xfc\xb4\x01#\x04\xd7\x02e\x02\x7f\x00\xdd\xfe\xce\xff\xd9\x02\xfb\x04\x7f\x06\xab\x07\x14\x08\xe8\x05r\x02\x9e\xff#\xff{\x00:\x02&\x03\xc6\x01\xee\xff\x10\xfe\xc8\xfc\x9f\xfc\xe9\xfc;\xfd\x8a\xfd\x89\xfd\xe2\xfde\xfc\xf1\xfc\xd8\xfc\xed\xfa\xb1\xfbn\xfe\xdc\xffM\x00\xed\x00\x8e\x00\xfe\xff\x05\xff\xf4\x00\x11\x03\xe7\x03\xe0\x03\xa8\x037\x03\x92\x01b\x00\xb1\x01\x8f\x03\xcc\x03\xa7\x04^\x044\x03\xc0\x01i\x00\xd3\xff\x92\x02\x94\x03E\x06\x1c\x06|\x03P\x03L\x00m\x00\x1c\x03i\x06\x04\x06s\x05W\x02l\x00\xce\x01\xdd\x02H\x03\x95\x01\x1a\xffA\xfd\xdd\xfe\xe4\x00\xf9\x00\xa0\xff\xce\xfc\xd1\xfb\xba\xfb+\xfb\x9c\xfbe\xfc\xe5\xfc\xf5\xfc\\\xfc\x04\xfa\xa8\xf7\x1c\xf7\xe3\xf8\xf9\xfb\xdf\xfd\xe1\xfd\xa2\xfb\xa4\xf7\x05\xf7\xe1\xf8\xc7\xfaL\xfd\x92\xfez\xfe\xb8\xfch\xfa\xc7\xf9\xa5\xf92\xfb\xf7\xfdh\x00V\x01\xae\xfe\xd6\xfb\xc0\xfa4\xfc:\x00\x95\x02^\x03G\x02D\xff\xef\xfd_\xfd\xaa\xff\xd2\x01I\x02\x8b\x04\x9d\x03o\x00\x0b\xfe4\xfd\x10\xfe1\x01\x90\x04;\x06\xfb\x03\xe5\xfe\xba\xfb8\xfbi\xfe\x0f\x04\xfd\x06\xc6\x04\xcb\x00N\xfe\xae\xfd\xa4\xff\x02\x01\xbc\x01O\x02\xb7\x01\n\x01+\xffL\xfes\x00\xc8\x02\x81\x00\x19\xfe\xc6\xfd_\xfe\x92\x01\x8e\x02Z\x00\x9b\xfdB\xfb\xa5\xfb\xa3\xfd\xd2\xfe\xf0\xfe~\xfd\xfd\xfa\xb7\xf9\x80\xf9\xcd\xf9\x8a\xfa\xa9\xfbq\xfb\xac\xfaZ\xfa\t\xfbO\xfcP\xfdP\xfe\x91\x01e\x06\xb0\n\x16\x0e\xdd\x10R\x12\xa3\x13g\x15 \x17\xdf\x18&\x1as\x1a<\x19R\x17\x8c\x15q\x14\xf1\x13\x92\x13\xd0\x11\xa7\x0e@\x0b\xee\x07\xf2\x04\x06\x02\x16\xff\xa7\xfb\xbe\xf8A\xf6\x90\xf3\x10\xf1\x12\xef\xe2\xed_\xedm\xed\xa6\xed\x99\xed*\xed\xfa\xec)\xed\xd1\xed\x04\xef\x11\xf0\xf7\xf0\xc8\xf1\xbd\xf2!\xf4\xe6\xf5\xd6\xf7\xe0\xf9{\xfb\x81\xfc4\xfd\xc8\xfdB\xfe\xaa\xfe\xd2\xfe\x95\xfey\xfeb\xfes\xfe\xcf\xfe\xe6\xfe\x18\xffx\xff\xc6\xff/\x00\x81\x00.\x00\xe4\xff\xa2\xff|\xff\xec\xff\xdd\xff\x89\xff\xa6\xff\x17\x00 \x00\xd0\x00{\x01[\x01\x83\x01\xc6\x01\x14\x02\x97\x02"\x03J\x03\x9a\x03\x13\x04q\x04%\x05F\x06\x10\x07\xd6\x07\xbd\x08\x1a\t\xe3\t5\n\x9a\n\xfa\n\xbe\n\xa9\n\x87\n\x1e\nt\t\xb8\x08\xba\x07P\x06\xa3\x05\xd0\x04\xa5\x03V\x02h\x00I\xfe\xce\xfc\xcb\xfb<\xfa\x03\xf9v\xf7\x1a\xf6\xfb\xf4\xfc\xf46\xf5\x14\xf5\\\xf5\xbe\xf5\x07\xf6z\xf6\x1a\xf7\x19\xf7\xb1\xf7\x9f\xf8q\xf9S\xfa\x98\xfaa\xfa\xd4\xfaz\xfc\xb1\xfd\xac\xfe\xae\xfeH\xfe\x84\xfe\x91\xff\xee\x00/\x01(\x00\r\xff\x8b\xfe\x0f\xff\xd4\x00)\x01\x99\xff_\xfeD\xffI\x00\xc9\x00U\x00\xca\xfe[\xfe\xa7\x00\xc9\x02\x89\x03]\x02*\x00K\x01\xc8\x02\xc6\x02\xe7\x01`\x01\x85\x01\x10\x02\xf6\x01y\x000\xff\xee\xfe\xe8\xffo\x00P\x00\xde\xff\x04\xff\xe8\xfe\xa0\xff\x86\x00R\x02\x1a\x05\xbf\x07\x19\n\xcc\x0bs\r\xe6\x0fl\x12%\x14\x8e\x14\x15\x14\n\x13v\x12\xcd\x12\x14\x13\xd3\x126\x11\x01\x0f\x18\x0e\xef\r\x18\r\x15\x0bC\x08O\x059\x02\r\xff\xcf\xfb\xac\xf8r\xf5=\xf2\xe0\xef\x06\xee&\xec1\xea\xfc\xe8s\xe9\x01\xeb\xbc\xebQ\xeb"\xebB\xec\xb0\xeeT\xf1\'\xf3)\xf4\x06\xf5S\xf6\xbf\xf8\xed\xfbw\xfe\xd1\xff\xa1\x00\xff\x01\xce\x03\x07\x05\x1e\x05\xde\x04"\x05=\x05\x89\x04$\x03\xa3\x01\x9b\x00\xfa\xfft\xff\xb5\xfe\xbd\xfd\x99\xfc\xd1\xfb\xab\xfb\xbd\xfb\x8d\xfb\xec\xfa\\\xfa\\\xfa\x81\xfa^\xfam\xfa\xab\xfa\x05\xfb\xfb\xfbm\xfc\xae\xfc\xaf\xfd\xc0\xfe\xc5\xff\'\x01\xdd\x01\xca\x01\x82\x02m\x03\xc2\x04\xb9\x053\x06\xb9\x06\xae\x07\xc0\x08\x9b\tE\n.\n\xb3\n`\x0b2\x0cO\x0c\xaa\x0b\xfa\tG\t6\t\xb4\x08\xb9\x07^\x05\xab\x03\x86\x03_\x03\x01\x02\x1f\xff|\xfco\xfb\x86\xfb\x80\xfbU\xf9\xa3\xf6\xb7\xf4\xfe\xf4\x9c\xf5\xe6\xf44\xf4\x82\xf4\x17\xf5\x81\xf5\x1d\xf6V\xf6m\xf7*\xf9e\xfa\x06\xfb\xc2\xfb\xc3\xfb\xbc\xfc\xcb\xfeB\x01\\\x02\xcd\x00n\xffj\x00,\x04\xfb\x05\x9b\x04\xe9\x01%\x00\x8f\x00\xfc\x01U\x03\xb3\x02\xcf\x00?\xfe+\xfc(\xfc\xf7\xfd\xf5\xfe\xf3\xfd\xd5\xfb`\xfa\xfb\xf9\x96\xfa^\xfbs\xfbN\xfbJ\xfbD\xfb\xb3\xfb;\xfc\x0c\xfd\x80\xfeH\xff\x16\xffz\xfeG\xfeh\xff\x82\x01\xeb\x02\xa7\x02b\x02U\x04\x9d\t\xc8\x0f\x1e\x13\x17\x136\x12[\x14V\x19\xc3\x1c\xef\x1b\xb7\x18\xbf\x16\x9c\x17T\x19G\x19y\x17\x0c\x15@\x13[\x13_\x143\x13\x19\x0f\x16\n\xeb\x06\xca\x057\x03\xf2\xfd\xb5\xf88\xf5\xdd\xf2\xfb\xef\x81\xec\xf1\xe9\xfc\xe8\xd2\xe8\xa0\xe8\xa7\xe8\xd3\xe8\xd9\xe8>\xe9k\xea\xfb\xeb\x03\xed%\xed\x11\xee\xc3\xf0N\xf3T\xf4\xb2\xf4F\xf6\x80\xf9K\xfc$\xfdT\xfd\x15\xfe(\xff\xf8\xff\x99\x00\xe1\x00m\x00<\xff\xa8\xfe%\xff\xb0\xffR\xffh\xfeu\xfe5\xff|\xff\xf3\xfex\xfe\xb0\xfe\x05\xff\xe7\xfek\xfeG\xfeU\xfeL\xfe\x87\xfe\n\xff\xc5\xff-\x00?\x00\x02\x01\xe1\x012\x02\x80\x02{\x02\xe6\x02\xeb\x03"\x04\x19\x04\x92\x04h\x05\x16\x06\x9e\x06\xb7\x06\xdf\x06\xce\x077\x08E\x08\x17\x08r\x07\xa8\x06\x94\x06%\x07~\x07!\x07\xa3\x05\xf2\x04\x80\x053\x06\x0c\x06b\x05!\x04\xf6\x02\x91\x01C\x00\xc3\xff\xa1\xff;\xff\x1e\xfe%\xfc\xdc\xf9\x18\xf8\x8f\xf7\xe9\xf7O\xf85\xf8\x89\xf7\x06\xf7e\xf6\xf7\xf5\x04\xf5Y\xf4\x0b\xf6\x1c\xf9\xee\xfa*\xfa\xe0\xf7\xba\xf6;\xf9\x14\xfdg\xff;\xffI\xfe\x06\xfe\x99\xffy\x01\xa1\x02\x8b\x02\xc5\x00N\x00F\x01/\x03v\x03\xc4\x01\xc3\xffW\xffB\x00\x90\x00+\x00\x7f\xff,\xff\x14\xff\x91\xfe\xc6\xfcR\xfcK\xfdb\xfem\xffS\xff\xb9\xfd\xf4\xfc\x9c\xfd\xee\xfe\x15\x00\xee\xff\xe0\xfe\xff\xfe\xfc\xff\xa1\x01\x1c\x02\x8b\x00\x8e\xff5\x00\x05\x02\xc6\x02\x8e\x01\x1f\x00w\x00\xb4\x02J\x05\xdc\x07\x19\nw\x0b\xe4\x0bh\x0c\x1c\x0e\xb9\x10\xc8\x11\x8c\x10\xa5\x0f\x18\x10W\x11\xb6\x11\x0e\x11%\x11\x06\x12\xcd\x11\xaa\x11\xb0\x11f\x10\x9d\r\xc1\t\x9a\x07"\x07\xb1\x04e\xff&\xfb\x11\xfa0\xfa\xfd\xf7\xcb\xf3\x9f\xf0\xdc\xef*\xf0\x08\xf0\xc7\xefv\xef,\xee(\xedS\xee\x86\xf0R\xf1\x1c\xf0\xf2\xef\xdb\xf27\xf5h\xf4\xa5\xf2\xe2\xf3X\xf8\xa8\xfb(\xfb\x91\xf9\x96\xf9\x92\xfa\x86\xfb\xb9\xfcv\xfe*\xff\x82\xfd.\xfc\n\xfd\xcb\xfe\xfd\xfe\x96\xfd\x81\xfd\xe1\xfet\xff\x8a\xfe!\xfeP\xff\x95\x00D\x00!\x00\x8f\x00\xb9\x00\x98\x00\x00\x00\xd3\x00\xe1\x01\xf1\x00\xb7\xff)\x01\xa3\x02)\x03\x87\x02\xe0\x00\xa1\x01\xcc\x041\x05\x87\x04\xad\x04\xfc\x04O\x05\x1b\x06M\x06\x9c\x06c\x07\x1b\x07\xfc\x06\xcd\x079\x07d\x06\x12\x07s\x08\x10\n\xba\t\xd9\x06_\x05\xb6\x05\xaa\x05\xbb\x05\x05\x05\xa5\x03Y\x03\xb1\x01@\x01\xa0\x00\xd8\xfe\x11\xfe\x93\xfe\xc6\xff\x17\x00\xae\xfd\xa1\xfa\xa9\xf9^\xfb\x17\xfe]\xfe\x95\xfcc\xfa\xf3\xfay\xfc:\xfd\xcf\xfb\xb6\xf9\x8f\xf91\xfb\xbd\xfc\x86\xfc\xa2\xfa}\xf9\xfa\xf9\xc1\xfb\xd6\xfc\x9c\xfbS\xfa\xab\xf9W\xfap\xfb"\xfbp\xfa\xdd\xf9&\xfa\xfa\xfa\xfe\xfa\xa1\xfa\xda\xfa\xe3\xfa-\xfb\x91\xfbJ\xfb\x00\xfbl\xfa\xee\xf96\xfa\xd1\xfa\xf4\xfa\x1c\xfb\xd7\xfaM\xfb\xe3\xfb#\xfc\xe4\xfc\xd8\xfc\x9b\xfc:\xfd\xbb\xfd0\xfeK\xfe\x08\xfew\xff\xdc\x00\xb0\x00\xf8\xff,\x00Y\x01-\x03\xc2\x030\x03\xda\x02+\x02Q\x032\x07\x85\n\x95\x0b\xd8\x0fL\x1d\x0e/X7;2\\-\x9b4\xc5?\xe4>\xc4/\xaa \x05\x1c\x0c\x1b\n\x14\x00\x07w\xfa\xbe\xf2\x12\xefk\xed6\xebT\xe4\x18\xdc\xb8\xd8\x00\xdb\xe4\xdc\xef\xd8S\xd4_\xd6\x83\xdc\xc5\xe0\x97\xe3\xb4\xea\xcc\xf5`\xfe<\x02^\x07\x9b\x10A\x18\xac\x18\x96\x14\xb8\x12\xa7\x13l\x13\x10\x0f\x1f\x08\xc2\x01\xac\xfdT\xfb\xe4\xf8\x99\xf4z\xee~\xe8\n\xe4\xca\xe1I\xe0r\xdd\xc4\xd9?\xd8P\xda\xe9\xde(\xe4\x89\xe9X\xef\x14\xf6\xee\xfd\xfc\x06\x83\x0f\xf9\x14i\x17t\x19\xa9\x1c3\x1fU\x1e\xf7\x19Y\x15\xbd\x12;\x11G\x0eK\ts\x04\x00\x01"\xfe\x91\xfa\xa2\xf6\xc1\xf3\x9e\xf1\x14\xef\xce\xec\xb8\xecF\xefS\xf2o\xf4\x19\xf7\x17\xfc\x9b\x02\x95\x08\xf6\x0c\x07\x10\x8e\x12\x7f\x15x\x18\xfd\x19\x9b\x18]\x15\xde\x13\x82\x13\xf9\x11W\rV\x07\xe1\x03\xe9\x00\x8d\xfcN\xf7]\xf2\x7f\xef\n\xecq\xe8\x0f\xe7T\xe7B\xe9\xf3\xe9\xf3\xea\xf5\xef\xb8\xf7{\x00T\x06T\t\xc3\r\x05\x14\xfe\x1a|\x1d\xdb\x1a\xe3\x17\x18\x18\xcd\x19\x7f\x17h\x10\xb5\tu\x06\xde\x04\x0e\x012\xfa~\xf4\x12\xf1m\xee\xff\xeaC\xe7\xad\xe5|\xe5\n\xe5\xe1\xe45\xe6\xf6\xe9\xbb\xee\x01\xf2{\xf4\xa2\xf7\xfd\xfb\x94\x00g\x03\xb5\x03\xd9\x032\x05\x89\x07P\t\xd2\x08\xd3\x07\xf9\x07\xa3\x08\x93\x08\x11\x07\xa3\x04=\x02\xbf\xff\xfd\xfc^\xfa\xd4\xf7\xd8\xf5\xb3\xf3+\xf2\x1f\xf2U\xf3\xc4\xf4s\xf5]\xf5\x0e\xf6\xad\xf8|\xfb4\xfdz\xfd\xbb\xfe\xbc\x01\t\x05T\x077\t\x83\x0b\x17\r\xae\r\xa3\x0e7\x103\x0e\x0e\nO\x0bS\x17\xc1&\x10,\x8d\'\xba&o1\xbf<\xbe:\xd7,\x91\x1f\x1b\x1b\xe7\x17\x87\r/\xff\x91\xf46\xf0\xc8\xeb\xc8\xe3\x90\xde\xfd\xe0E\xe5\x92\xe2\xb1\xdb9\xdbP\xe4\xf5\xeb\xe1\xe9\'\xe4g\xe6\xeb\xf0Q\xfa>\xfdG\xfe\xc5\x02\xf2\t\xf1\x0f\xd7\x133\x16\x84\x15U\x11\x07\x0c\x7f\x08/\x05}\xfe\xa5\xf4\xdc\xeb\xb8\xe6\xa8\xe4\x04\xe3\xa2\xe0[\xde\xfe\xdd_\xe0\xde\xe3\t\xe7W\xe9\x8d\xebm\xee\x81\xf2\xef\xf6\x14\xfbM\xff \x04\x84\x08.\x0c\xc7\x10\x9c\x16\x96\x1a\xa4\x1a\xf9\x18\xf4\x18\n\x1a"\x18\xeb\x11^\n\xe2\x04V\x01A\xfd\xa7\xf7X\xf2T\xef\xa7\xeeP\xef8\xf09\xf1\xbc\xf2\xfd\xf4\x96\xf7E\xfa\xff\xfc\xb2\xff\xda\x01\xa5\x030\x06\xb8\t0\r\xd4\x0f\xe3\x11\t\x14=\x16\xb9\x17\x18\x18\x06\x17F\x14\xab\x10\xfc\x0c\x17\t<\x04\\\xfe\x87\xf8\xea\xf3j\xf0c\xed\x00\xeb^\xe9\xfc\xe8\xf5\xe9\xe8\xebZ\xee\xd2\xf0\xf8\xf3\xd0\xf7\x1b\xfc\x98\xff\xa1\x02\xd5\x07x\x0e\xa3\x13X\x15\xc3\x15\x88\x18\x19\x1b\x07\x1a\xd3\x15i\x11p\x0e{\n\xb2\x04x\xfe\xae\xf8*\xf4\x8e\xf0\xa0\xedc\xeb\x08\xea\xe2\xe9\xad\xea\x15\xec\xdb\xed\t\xf0\xe4\xf2u\xf5\xef\xf7\xa1\xfa\xc4\xfd`\x01\x1f\x04\xda\x05n\x07@\t\x1c\x0b\xc2\x0b\xc0\ni\ty\x08V\x07n\x05\x85\x02\xdc\xff\x14\xfe)\xfcW\xfa\xba\xf8R\xf7\xc5\xf6 \xf6\x94\xf5\xe4\xf53\xf6\xee\xf6\xa4\xf7!\xf8t\xf9\xe1\xfa\x8b\xfc6\xfe/\xff\xbd\x00\x86\x027\x04\xa8\x05\x86\x06"\x07\xa0\x07\xb7\x07~\x07\x07\x07\xc2\x05;\x04p\x02\xe5\x00\x87\xff\xaa\xfd-\xfb\xbb\xf9\xc1\xf9\xbd\xf9\xd6\xf7\xf5\xf4s\xf5\xb1\xfb\xc2\x02b\x05\xa0\x07\x04\x11a!\xef,\xef,\xa7)G.S8h:\xa2/\xc7!"\x1bB\x19\xb5\x12\x1c\x05E\xf7\x0c\xef\xe2\xeb_\xe9\xce\xe4P\xdf\x8e\xdc\xd2\xddw\xe0f\xe17\xe1\xbe\xe2\x80\xe6d\xe9\x06\xeb\x7f\xee\x98\xf5_\xfc\x0c\xff6\x00\x87\x05\x95\x0eA\x14\xbb\x12\x14\x0fZ\x0f\xad\x11\n\x10|\x087\x00N\xfb9\xf8\x03\xf4&\xee\x03\xe9\xb4\xe6A\xe6\xc9\xe5\x82\xe5\x9c\xe60\xe9\x8d\xebX\xed\x95\xef\xf3\xf2\x1b\xf7\xf2\xfah\xfdj\xff\xf5\x02f\x08\x03\r\xc2\x0e\x9d\x0f\xff\x11+\x15]\x16{\x14D\x11\x9c\x0eN\x0c\xcd\x08\xb6\x03\x87\xfe\xa6\xfa\xe2\xf7\x8b\xf5\xaf\xf3\xee\xf2\xa2\xf3O\xf5;\xf7v\xf9p\xfc\xdb\xff\xe7\x02\x07\x05\xd4\x06\x19\t[\x0b\xdc\x0c~\r\xd3\r\x84\x0eo\x0f\xf8\x0f\x96\x0fM\x0e\xd4\x0cy\x0b\xb3\t\xb9\x06\xb0\x02\x95\xfe$\xfb\xdf\xf7L\xf4\xe4\xf0E\xee\xd8\xece\xec\xa9\xec\xbd\xed\xb4\xef\x83\xf2y\xf5\x7f\xf8\xd8\xfb<\xff<\x02(\x04\x9e\x05\x15\x07c\x08L\t5\t\r\tT\t\xb8\t\xb8\t\xf1\x08\xa8\x08/\t5\t\x1b\x08\x13\x06\x18\x04\xb1\x02\xbf\x00\xd8\xfd\xad\xfa\xf4\xf7e\xf6.\xf5\xda\xf3\xfb\xf2\xec\xf2 \xf4\x0e\xf6\xc5\xf7\x86\xf9\x81\xfb\xcd\xfd<\x00\x07\x02:\x03r\x04z\x05*\x067\x06\xa3\x05,\x05\x9f\x04\xd4\x03\xd2\x02\xb9\x01\xdc\x00\xe4\xff\xe3\xfe\xc8\xfd\x03\xfd}\xfc\x03\xfc~\xfb\xea\xfa\xcd\xfa\x1d\xfb\x81\xfb\xa5\xfb\xb4\xfb\xef\xfbh\xfc\xdb\xfc\t\xfd\xe4\xfc\xf4\xfc\x17\xfd8\xfdI\xfd\'\xfd8\xfdr\xfd\x9a\xfd\x0c\xfef\xfe\xa7\xfe\xfc\xfe\x1c\xff|\xff\xbe\xff\x83\xffN\xffC\xffD\xffU\xff+\xff^\xff)\x00\xdb\x00\xec\x00/\x01B\x02\xb1\x03\xf1\x03\xe9\x02I\x04\xdf\tb\x10s\x13\x18\x14\x8c\x17X\x1fv%\x82%\x8d"m!\x07"\xc1\x1f\x0e\x19q\x11/\x0bx\x05R\xff\xdd\xf8\x88\xf3\t\xef\\\xeb=\xe9z\xe8\x00\xe8l\xe7\x00\xe8\xa1\xe9\xcc\xea5\xebA\xec\xcb\xee\x15\xf1\x16\xf2X\xf3\xae\xf6\xe9\xfa\xdc\xfd\x9b\xff\x82\x02\xfc\x06\x83\nm\x0b%\x0b\xab\x0b(\x0c\x97\n\xec\x067\x03%\x00\xdd\xfc\xcd\xf8\x93\xf4r\xf1\x87\xefI\xeeN\xed\n\xed\xd9\xedv\xef:\xf1\x0e\xf3\x00\xf5%\xf7\x99\xf95\xfci\xfe\xfb\xff\xb6\x01M\x04\xfb\x06\xb2\x08\x07\n\xdb\x0b\x1e\x0e\xc6\x0f;\x10\xdf\x0fn\x0f\xc5\x0e=\r\x82\n\x1c\x07\xe2\x03\x13\x01G\xfee\xfb\xf3\xf8\xb1\xf7\x95\xf7\xe1\xf7G\xf8N\xf9L\xfb\x9e\xfdo\xff\xab\x00\xf8\x01b\x03_\x04\xad\x04\xa9\x04\xab\x04\xe7\x04J\x05u\x05b\x05;\x05h\x05\xcb\x05\xb6\x05\xef\x04\xbf\x03\xa0\x02J\x01e\xff\x13\xfd\xbf\xfa\xc6\xf8+\xf7\xd8\xf5\xd8\xf4q\xf4\xea\xf4\xe0\xf5\x0f\xf7\xa7\xf8\xb1\xfa\x04\xfd\xf9\xfe\x9a\x00,\x02\xba\x03\x0e\x05\xbd\x05\'\x06\x9d\x06)\x07{\x07\x82\x07\x06\x08\x0c\t\xe1\t\xdf\t6\t\x8e\x08\xf7\x07\xd1\x06\xb1\x04\xeb\x01\x0b\xffd\xfc\xd3\xf9O\xf7\xf7\xf46\xf3Z\xf2`\xf2\xed\xf2\xcf\xf3 \xf5\t\xf7b\xf9\xb6\xfb\xd2\xfd\xcf\xff\xb6\x01k\x03\xb4\x04\x89\x05\x18\x06q\x06\x83\x06T\x06\xd7\x053\x05r\x04d\x03/\x02\xfa\x00\xa4\xffJ\xfe\xb0\xfc\xe2\xfaV\xf9\xee\xf7\xc3\xf6\xc0\xf5\xc0\xf4\x1e\xf4\xe5\xf3\xf3\xf3]\xf4\x00\xf5\xd6\xf5\x00\xf7F\xf8\xc3\xf9k\xfb\r\xfd\xb6\xfeP\x00\'\x02\xe5\x03Z\x05j\x06\x9b\x07\xc0\x08\x82\t\x9e\tQ\tn\t.\tN\x08\xb9\x06G\x05\x98\x04p\x03j\x01\xaf\xff\x04\xff\xe0\xfet\xfdC\xfb\x9d\xfb\x8e\xff\x07\x04\xa5\x05>\x06?\n\xac\x11q\x17\xe2\x18\xff\x18\x8b\x1b\x05\x1f\x91\x1f\x8b\x1c\xc5\x18\xff\x15\xa9\x12\xc4\r\x08\x08\xd2\x02A\xfe\xe5\xf9\xd7\xf5Q\xf2>\xef\xeb\xec\xca\xeb\xf9\xea\x97\xe9=\xe8^\xe8\xc4\xe9k\xea\xfc\xe9X\xea\xe9\xec\x10\xf0\xd5\xf1\xec\xf2\xb2\xf5C\xfaO\xfe\x9f\x00\x85\x02\x81\x05\xa3\x08%\n\xf5\tx\t3\t2\x08\xf5\x05\x15\x03\\\x00\x05\xfe\xab\xfb\x19\xf9\xcf\xf6A\xf5|\xf4\xfc\xf3\xae\xf3\xb1\xf3L\xf4\x99\xf5.\xf7\x83\xf8\x96\xf9 \xfb\x99\xfd\xf2\xffV\x01\x84\x02p\x04\xf0\x06\xe3\x08\xf9\t\xc6\n\xea\x0b\n\ru\r\xec\x0c\xfe\x0b#\x0b:\n\xc3\x08\xa2\x06`\x04\xba\x02z\x01\xd0\xff\xc1\xfdh\xfcV\xfc\x84\xfc\xf6\xfbI\xfb\xc6\xfb\x10\xfd\xec\xfd\xeb\xfd\xfb\xfd\xbc\xfe\xce\xffp\x00l\x00R\x00\x95\x00@\x01\xb0\x01u\x01)\x01d\x01\xf4\x01\xed\x01 \x01\x8c\x00\x8b\x00_\x00\x80\xffT\xfe\xa7\xfd\x84\xfd@\xfd\x93\xfc\xfc\xfb\x1b\xfc\xd4\xfca\xfd\x97\xfd\x06\xfe\x06\xff-\x00\xf7\x00z\x01\x18\x02\xe5\x02\x8c\x03\xde\x03\xe7\x03\x10\x04H\x04[\x04A\x04\x0b\x04\xea\x03\xb4\x03g\x03\r\x03\xa7\x020\x02\xac\x01\x11\x01\x8b\x00.\x00\xd6\xff^\xff\xe6\xfe\xe3\xfe\x1e\xffB\xff\x10\xff\xf7\xfeO\xff\xa0\xff\x9a\xffA\xff\x02\xff\xf7\xfe\xe8\xfe\x93\xfe\x0f\xfe\xc1\xfd\xb0\xfd\x9a\xfdM\xfd\x08\xfd\x12\xfd6\xfdD\xfd"\xfd\x0f\xfd7\xfdX\xfd!\xfd\xb7\xfc\xac\xfc\xe6\xfc\xf0\xfc\xba\xfc\x95\xfc\xc5\xfc&\xfdi\xfd\x9f\xfd\x10\xfe\xb4\xfeG\xff\xa3\xff\x0e\x00\xc7\x00\x84\x01\xdd\x01\xf0\x018\x02\x99\x02\xba\x02}\x024\x02)\x029\x02\x08\x02\x9c\x01Y\x012\x01\x06\x01\xc7\x00\x8e\x00e\x007\x00\x00\x00\xc7\xff\x9c\xffw\xffa\xffW\xff@\xff0\xffC\xffw\xff\x8b\xffv\xffw\xff\x9d\xff\xcd\xff\xb9\xff\x8a\xffu\xff\x9c\xff\xab\xff{\xff4\xff\x0b\xffB\xff`\xffj\xffb\xffb\xff\x98\xff\x96\xffx\xff\x7f\xff\x81\xffd\xff\x02\xffh\xfe\x1e\xfe+\xfe\x08\xfeU\xfd\xab\xfc&\xfd\xde\xfe\x0c\x01\xd6\x02<\x04F\x06^\t\xc2\x0c\xb5\x0e\xd2\x0e\xf1\x0eI\x10\x8b\x11w\x10S\r\xf2\nx\n\x9a\to\x06\x96\x02\xee\x00\xf4\x00\xc4\xff\x11\xfdI\xfb\x97\xfb\x07\xfc\xbd\xfa\xc1\xf8/\xf8\xcd\xf8\xb3\xf8R\xf7\xe9\xf5\xde\xf5\xc5\xf6\x1c\xf7\x87\xf6Z\xf6\x8b\xf7x\xf9\x98\xfa\xc3\xfa\x8f\xfbf\xfd\x0c\xfft\xff/\xff|\xff9\x00C\x00N\xffI\xfe\xfb\xfd\xe1\xfdM\xfdf\xfc\xda\xfb\xef\xfb0\xfc\'\xfc\xfc\xfb.\xfc\xd3\xfco\xfd\x8f\xfd\x8b\xfd\xf2\xfd\x9d\xfe\xec\xfe\xc8\xfe\xc5\xfe?\xff\xc9\xff\x0b\x00*\x00u\x00\x1e\x01\xcd\x01S\x02\xb3\x02\x17\x03\xa7\x030\x04Y\x04;\x04\x1c\x04@\x04Q\x04\xe9\x036\x03\xc9\x02\xd3\x02\xcb\x02W\x02\xd8\x01\xda\x01!\x02:\x02\x11\x02\x04\x023\x02X\x02P\x02$\x02\xf9\x01\xec\x01\xee\x01\xca\x01\\\x01\xe3\x00\xc8\x00\xe3\x00\x9e\x00\xf6\xffo\xff_\xfff\xff\xf5\xfeI\xfe\xee\xfd\xf7\xfd\xeb\xfdx\xfd\x08\xfd\x02\xfd1\xfd\x1e\xfd\xd1\xfc\xc4\xfc$\xfd\x8a\xfd\xa6\xfd\xc1\xfd$\xfe\xba\xfe,\xff\x83\xff\xe0\xffF\x00\x96\x00\xd2\x00\x0e\x015\x018\x01,\x012\x01A\x01B\x016\x010\x01H\x01l\x01\x89\x01\x8e\x01}\x01w\x01\x85\x01x\x01\'\x01\xad\x00n\x00C\x00\xea\xffo\xff!\xff*\xffM\xffG\xffY\xff\xac\xff+\x00\xa7\x00\xfc\x00Z\x01\xdd\x01b\x02\xd0\x02\xf4\x02\xcf\x02\xbc\x02\xdb\x02\xc0\x02,\x02w\x01\x18\x01\xf8\x00|\x00\x80\xff\xcd\xfe\x96\xfeO\xfe\x87\xfd\xa5\xfcN\xfcB\xfc\xfc\xfb\x88\xfbQ\xfb\xa6\xfb\x0e\xfc\x1f\xfc*\xfc\x9e\xfcl\xfd\x05\xfej\xfe\x13\xffB\x00\x85\x01a\x02\xfd\x02\xc6\x03\xd3\x04d\x05Y\x05\x13\x05\xf5\x04\xd8\x049\x04=\x03T\x02\xbd\x01\x1a\x019\x00Z\xff\xc4\xfe\x8e\xfeN\xfe\xd8\xfdo\xfdo\xfd\xac\xfd\xb5\xfd}\xfdj\xfd\xa7\xfd\xe9\xfd\xe5\xfd\xb8\xfd\xca\xfd\x15\xfe@\xfe1\xfe%\xfeF\xfes\xfek\xfeB\xfe4\xfe1\xfe$\xfe\xec\xfd\xae\xfd\x98\xfd\x80\xfdt\xfdO\xfd*\xfdO\xfdV\xfdp\xfdc\xfdf\xfd\xa9\xfd\xb6\xfd\xc8\xfd\xd0\xfd\xea\xfd:\xfe^\xfem\xfe\x91\xfe\xba\xfe\xec\xfe\xfd\xfe\xf9\xfe\n\xff)\xffH\xffT\xff[\xffV\xffm\xff\x91\xffz\xffg\xff\xa9\xff(\x00\xaf\x00\xf9\x00\xbb\x01Q\x03\t\x05e\x06\x8f\x07\'\t\x1e\x0bf\x0c\xfe\x0c\xa8\r\x83\x0e\xe8\x0e^\x0e\xeb\r\t\x0e\xd7\r\xdc\x0c\xc7\x0b\x9f\x0b\x96\x0b}\n\xd5\x08\xfa\x07\xa0\x07R\x06\xc1\x03\x7f\x01b\x00D\xff\x00\xfdR\xfa\xdc\xf8\x88\xf8\xd4\xf7I\xf6,\xf5{\xf5Z\xf6k\xf6\xd5\xf5\xc5\xf5\x9d\xf6U\xf7\x1f\xf7\x90\xf6\x98\xf6\t\xf76\xf7\xe1\xf6\x8e\xf6\xc6\xf6O\xf7\xc4\xf7\xf1\xf7/\xf8\xd5\xf8\xc2\xf9\x86\xfa\xfe\xfak\xfb+\xfc\x14\xfd\xa9\xfd\xea\xfdE\xfe\xe3\xfek\xff\x9f\xff\xc0\xff%\x00\x9d\x00\xd7\x00\xe6\x00\x1d\x01\x8b\x01\xe6\x013\x02{\x02\xdc\x02P\x03\xba\x03\x00\x043\x04j\x04\xa0\x04\xb6\x04\xac\x04\x95\x04\x82\x04\x80\x04o\x04J\x04\'\x04\x1b\x04$\x042\x043\x046\x04?\x048\x04\x13\x04\xd5\x03\x91\x03F\x03\xe9\x02\x83\x02\x02\x02w\x01\xf0\x00y\x00\xfb\xffh\xff\xdf\xfeo\xfe\x16\xfe\xaa\xfd1\xfd\xc4\xfc}\xfcN\xfc\n\xfc\xc6\xfb\xb5\xfb\xc6\xfb\xc8\xfb\xb8\xfb\xe4\xfb;\xfcw\xfc\xa0\xfc\xef\xfc[\xfd\xa8\xfd\xbc\xfd\xef\xfd>\xfel\xfeo\xfeo\xfe\xa5\xfe\xd4\xfe\xdd\xfe\xf7\xfeK\xff\xb0\xff\x05\x00_\x00\xcb\x00M\x01\xb4\x01\x15\x02{\x02\xc6\x02\xf0\x02(\x03p\x03s\x034\x03\x18\x03;\x03A\x03\xfc\x02\xd3\x02 \x03\x9b\x03\xb3\x03d\x03G\x03\xa2\x03\xf4\x03\xb5\x037\x03,\x03q\x03F\x03\x9c\x02\x1d\x022\x02B\x02\x99\x01\xbe\x00.\x00\xeb\xffS\xffB\xfea\xfd\xd4\xfcg\xfc\xbe\xfb\xe9\xfao\xfa;\xfa\x1b\xfa\xe8\xf9\xa8\xf9\xa8\xf9\xd4\xf9\x01\xfa\x16\xfa\x1b\xfa]\xfa\xc5\xfa\x18\xfbJ\xfbw\xfb\xe6\xfbk\xfc\xc2\xfc\x02\xfdX\xfd\xe2\xfdi\xfe\xc0\xfe\x08\xffs\xff\x03\x00\x84\x00\xce\x00\x17\x01\x8d\x01\x01\x02E\x02E\x02\\\x02\xbb\x02\xe3\x02\xcb\x02\x80\x02w\x02\xab\x02\x88\x024\x02\xeb\x01\xe8\x01\x05\x02\xbc\x01S\x016\x01>\x01#\x01\xb8\x00Q\x00@\x00@\x00\x0e\x00\xc1\xff\x89\xfft\xffx\xffa\xff\x1e\xff\x0c\xff7\xffm\xffy\xffN\xffu\xff\xf0\xff>\x00`\x00\xcd\x00\xe1\x01"\x03\xdb\x03i\x04\x80\x05\xce\x06w\x07v\x07\xd5\x07\xbc\x08\xfd\x08L\x08\xc4\x07N\x08\xbc\x08\xf8\x07\xef\x06\x12\x07\xa8\x07\xf5\x06%\x05\x18\x04*\x04\x85\x03H\x01\x0e\xffK\xfe\t\xfe\xa5\xfc\x80\xfa\x85\xf9\xff\xf9=\xfaS\xf9n\xf8\xcd\xf8\xbc\xf9\xb1\xf9\xd3\xf8\x91\xf8C\xf9\xac\xf9*\xf9\x8d\xf8\xdd\xf8\x98\xf9\xda\xf9\xd4\xf94\xfa\x04\xfb\xb3\xfb\x10\xfcw\xfc\x12\xfd\x8c\xfd\xd4\xfd\x16\xfeT\xfer\xfe\x8f\xfe\xc0\xfe\xfd\xfe\x1e\xff8\xff\x85\xff\xe5\xff#\x00>\x00t\x00\xc1\x00\xd9\x00\xbf\x00\x9b\x00\xa4\x00\xb5\x00\x8d\x00@\x00&\x00M\x00q\x00P\x00F\x00\x97\x00\xed\x00\x0c\x01\t\x01=\x01\x9f\x01\xd1\x01\xcd\x01\xe2\x01"\x02c\x02y\x02\x81\x02\xad\x02\xeb\x02\x14\x03\x19\x03\x17\x03&\x03&\x03\x01\x03\xb7\x02o\x025\x02\xe7\x01\x87\x01#\x01\xd7\x00\x95\x00K\x00\xf2\xff\xb2\xff\x93\xffp\xff<\xff\x03\xff\xd3\xfe\xaa\xfeu\xfeH\xfe0\xfe\x18\xfe\x0c\xfe"\xfeF\xfeb\xfey\xfe\xb9\xfe\x07\xff2\xffV\xff\x92\xff\xcd\xff\xd0\xff\xb8\xff\xdb\xff%\x00)\x00\x01\x00+\x00\x98\x00\xcd\x00\x9c\x00\xb8\x00v\x01\x0c\x02\xd3\x01j\x01\xaf\x01H\x02$\x02`\x01\x17\x01u\x01{\x01\xa2\x00\xce\xff\xce\xff\x12\x00\xa4\xff\xc2\xfeb\xfe\x9e\xfe\xae\xfe=\xfe\xcf\xfd\xf0\xfd9\xfe(\xfe\xde\xfd\xe3\xfdI\xfel\xfej\xfe\x92\xfe\xda\xfe\x02\xff\x05\xff:\xffv\xff\x89\xff\x86\xff\xa3\xff\xc5\xff\xab\xff\x8a\xff\x89\xff\x99\xff\x97\xffv\xffY\xffV\xffK\xff7\xff3\xff?\xff?\xff\x1b\xff\x1c\xffB\xff9\xff\xff\xfe\xe6\xfe\'\xffT\xff"\xff\xd9\xfe\xe4\xfe(\xff \xff\xcb\xfe\xa2\xfe\xdd\xfe\t\xff\xd6\xfe\x98\xfe\xb4\xfe\x08\xff\n\xff\xe8\xfe\xec\xfe2\xffr\xff}\xff\xa8\xff\xee\xff*\x00R\x00\x92\x00\xff\x00V\x01p\x01\xc8\x01\x91\x02K\x03\xc2\x03E\x04U\x05\x8e\x06\x0e\x07\xfd\x06\x86\x07\xbd\x08K\t\xa7\x08\x18\x08\xb1\x08|\t\x08\t\x00\x08\xf2\x07\xb8\x08\xac\x08`\x07N\x06c\x06\\\x06\x0c\x05\x14\x03\xc3\x01?\x01U\x00\xb2\xfe\t\xfd\x16\xfc\xa9\xfb\xf9\xfa\xdf\xf9\xe8\xf8\x96\xf8\x96\xf83\xf8]\xf7\xbf\xf6\xa7\xf6\xb4\xf6d\xf6\xd6\xf5\xaa\xf5\xe6\xf5C\xf6p\xf6\x90\xf6\xff\xf6\xb8\xf7|\xf8\x03\xf9d\xf9\xe6\xf9\x92\xfa-\xfb\x82\xfb\xb6\xfb.\xfc\xdb\xfcg\xfd\xc2\xfd!\xfe\xc0\xfe\x82\xff\x1b\x00\x91\x00\x01\x01\x85\x01\x05\x02O\x02~\x02\xb6\x02\x04\x03E\x03P\x03R\x03\x82\x03\xd3\x03\xfd\x03\x06\x04"\x04f\x04\xa1\x04\xbd\x04\xdb\x04\xff\x04\x1e\x05!\x05 \x05#\x05\x16\x05\xfd\x04\xf0\x04\xe7\x04\xc8\x04\x90\x04l\x04g\x04Q\x04\x08\x04\xba\x03\x84\x03V\x03\xf5\x02\\\x02\xd3\x01o\x01\xfb\x00K\x00\x86\xff\xfa\xfe\x9d\xfe&\xfex\xfd\xda\xfc\x96\xfcW\xfc\xe8\xfbt\xfb;\xfb+\xfb\xe0\xfa\x8d\xfa\x97\xfa\xe1\xfa\xfa\xfa\xdc\xfa#\xfb\xc4\xfb \xfc\x0f\xfca\xfc\x82\xfd\x9b\xfe\xde\xfe\xde\xfe\x80\xff\x85\x00\xf4\x00\xe5\x00$\x01\xd5\x01t\x02\x86\x02_\x02\x9f\x02!\x03t\x03b\x03*\x03L\x03\x83\x03\x98\x03t\x03+\x03\x01\x03\xfe\x02\xe8\x02\xa9\x02[\x02\x08\x02\x0b\x02(\x02\xea\x01y\x01&\x01\x1e\x01\n\x01\x9c\x00"\x00\xe7\xff\xaa\xff?\xff\xc8\xfep\xfe5\xfe\xec\xfd\x9f\xfd_\xfd\x16\xfd\xb0\xfcV\xfc1\xfc+\xfc\xfa\xfb\xab\xfb\x9d\xfb\xc6\xfb\xcc\xfb\x98\xfb\x88\xfb\xdc\xfbC\xfct\xfcv\xfc\xa7\xfc\x1e\xfd\x8c\xfd\xc0\xfd\xe2\xfd9\xfe\xad\xfe\xf8\xfe\x0c\xff\x1f\xffc\xff\xaf\xff\xc7\xff\xb8\xff\xbd\xff\xfa\xff3\x00.\x00\x11\x00&\x00o\x00\xad\x00\x9e\x00\x94\x00\xe4\x00O\x01|\x01Q\x01r\x01\xfa\x01V\x02J\x02 \x02j\x02\xfa\x02\x1f\x03\xd3\x02\xc2\x02&\x03\x7f\x03Y\x03\x15\x03=\x03\xab\x03\xd4\x03\xbe\x03\xf2\x03n\x04\xc0\x04\xd8\x04\xf9\x04a\x05\x9a\x05x\x05u\x05\xb0\x05\xd2\x05\x91\x05;\x05F\x05w\x05\x1e\x05c\x04\xed\x03\xc4\x03b\x03\x81\x02o\x01\x9f\x00\x01\x00,\xff\t\xfe\xe3\xfc\x1e\xfc\xa0\xfb\xf4\xfa\x07\xfa@\xf9\xe7\xf8\xb8\xf8h\xf8\xea\xf7\xa5\xf7\xb6\xf7\xc7\xf7\xc3\xf7\xb7\xf7\xe1\xf7@\xf8\x94\xf8\xdb\xf84\xf9\xa8\xf9;\xfa\xda\xfak\xfb\xeb\xfbq\xfc\x13\xfd\xb7\xfd9\xfe\x9a\xfe\x13\xff\xa0\xff\xfd\xff3\x00w\x00\xd2\x00\x15\x01&\x011\x01\\\x01\x88\x01\x9d\x01\xa6\x01\xc3\x01\xe5\x01\xeb\x01\xdf\x01\xce\x01\xd1\x01\xd6\x01\xbc\x01\x96\x01\x87\x01\x98\x01\xaa\x01\x9e\x01\x9d\x01\xc1\x01\xe2\x01\xed\x01\xed\x01\x0b\x02A\x02[\x02I\x02@\x02S\x02l\x02j\x02V\x02\\\x02p\x02n\x02N\x025\x023\x02 \x02\xec\x01\xaa\x01{\x01H\x01\x01\x01\xb2\x00n\x00)\x00\xe0\xff\x9b\xfff\xff9\xff\xfd\xfe\xcb\xfe\xb1\xfe\x9e\xfe|\xfeW\xfeQ\xfe_\xfe`\xfeU\xfem\xfe\xa7\xfe\xd3\xfe\xd6\xfe\xe8\xfe9\xff\x8d\xff\xa3\xff\xa4\xff\xd7\xff2\x00\\\x00Y\x00~\x00\xd9\x00\x1f\x01\x1f\x01\x18\x01?\x01n\x01d\x01E\x01@\x01;\x01\x1b\x01\xe9\x00\xcf\x00\xaf\x00v\x00Q\x00D\x00\x1d\x00\xde\xff\xb5\xff\xb2\xff\xad\xff\x7f\xffU\xff]\xffv\xffo\xffH\xffC\xffo\xff\x99\xff\xa1\xff\x9f\xff\xbb\xff\xeb\xff\x05\x00\x08\x00\r\x00\'\x00D\x00D\x00,\x00\x1b\x00\x17\x00\x1b\x00\x06\x00\xd3\xff\xb1\xff\xa7\xff\xa5\xff\x94\xffw\xffh\xfft\xffz\xffa\xff<\xff4\xffP\xff_\xffW\xffR\xffw\xff\xa5\xff\xc0\xff\xd6\xff\xf1\xff!\x00Q\x00y\x00\x8c\x00\x96\x00\xae\x00\xd5\x00\xe1\x00\xc9\x00\xc2\x00\xdc\x00\xfb\x00\xf8\x00\xe4\x00\xf4\x00 \x01:\x01%\x01\x0f\x01\x1f\x012\x01\x1b\x01\xe6\x00\xbe\x00\xa9\x00\x9f\x00x\x00?\x00\x18\x00\x02\x00\xe2\xff\xa9\xffl\xff3\xff\xf5\xfe\xb9\xfet\xfe\x1e\xfe\xc2\xfdp\xfd"\xfd\xdb\xfc\x97\xfce\xfcA\xfc4\xfc9\xfc7\xfc5\xfcM\xfc\x89\xfc\xb8\xfc\xd7\xfc\x04\xfdM\xfd\xa0\xfd\xe1\xfd!\xfe\x80\xfe\xf5\xfec\xff\xb6\xff\x17\x00\x96\x00\x06\x01R\x01\x99\x01\xfb\x01J\x02p\x02\x8b\x02\xa3\x02\xad\x02\xa2\x02\x8d\x02\x84\x02o\x02F\x02\'\x02\n\x02\xd1\x01\x86\x01:\x01\xf5\x00\xaf\x00N\x00\xed\xff\xbc\xff\x8c\xffK\xff\x04\xff\xd7\xfe\xc9\xfe\xa9\xfe\x82\xfeo\xfev\xfe\x84\xfe\x8f\xfe\xa3\xfe\xbe\xfe\xe9\xfe5\xff\x90\xff\xe4\xff>\x00\xbc\x00R\x01\xdc\x01Z\x02\xf0\x02\x97\x03\x1e\x04\x85\x04\xeb\x04]\x05\xb6\x05\xdb\x05\xea\x05\x05\x06\x15\x06\xfb\x05\xbc\x05~\x05K\x05\xe3\x04L\x04\xb6\x032\x03\x9e\x02\xe0\x01\x1f\x01r\x00\xd1\xff$\xffr\xfe\xdd\xfdd\xfd\xf5\xfc\x9e\xfcJ\xfc\x00\xfc\xcd\xfb\xa5\xfb\x8e\xfbv\xfbY\xfbQ\xfbb\xfb}\xfb\x89\xfb\x93\xfb\xad\xfb\xdf\xfb\x0e\xfc*\xfcL\xfct\xfc\xa4\xfc\xd0\xfc\xfa\xfc#\xfdV\xfd\x89\xfd\xb2\xfd\xd8\xfd\xff\xfd+\xfeY\xfe\x7f\xfe\x9f\xfe\xcb\xfe\xf6\xfe&\xffR\xfft\xff\x9c\xff\xbf\xff\xe1\xff\x01\x00+\x00W\x00t\x00\x92\x00\xb0\x00\xd7\x00\xfc\x00\x1d\x01F\x01n\x01\x97\x01\xbd\x01\xe1\x01\x0b\x02,\x02J\x02i\x02\x8d\x02\x9e\x02\xa4\x02\xa6\x02\xa9\x02\xa4\x02\x92\x02w\x02h\x02U\x024\x02\x16\x02\xf5\x01\xd1\x01\xa6\x01w\x01H\x01\x0b\x01\xd9\x00\xb4\x00z\x00*\x00\xe9\xff\xc6\xff\xb1\xffr\xff$\xff\x1b\xff5\xff2\xff\xf9\xfe\xf0\xfeQ\xff\xb2\xff\xba\xff\xa0\xff\xc7\xff\x13\x00!\x00\xfb\xff\xef\xff\x00\x00\x0b\x00\xd2\xff|\xffP\xffB\xff\'\xff\xd9\xfe\x8a\xfex\xfey\xfed\xfe:\xfe&\xfe9\xfeO\xfeN\xfeD\xfeN\xfef\xfe\x8f\xfe\xb8\xfe\xd7\xfe\xfc\xfe+\xffo\xff\xa5\xff\xcc\xff\xed\xff\x1f\x00O\x00d\x00s\x00~\x00\x92\x00\xa2\x00\x9c\x00\x89\x00\x86\x00\x80\x00o\x00N\x006\x002\x00\x1a\x00\xfc\xff\xde\xff\xcb\xff\xb8\xff\x9d\xff\x83\xffu\xffx\xffu\xffn\xffk\xffr\xff\x84\xff\x86\xff\x8b\xff\x89\xff\x93\xff\x9e\xff\x9d\xff\x99\xff\x9a\xff\x9e\xff\xa1\xff\xa2\xff\xa7\xff\xb7\xff\xcc\xff\xd7\xff\xdf\xff\xed\xff\x07\x00.\x00T\x00s\x00\x9a\x00\xbf\x00\xde\x00\xf7\x00\x15\x01:\x01W\x01k\x01t\x01\x83\x01\x85\x01u\x01[\x01H\x01=\x01(\x01\x12\x01\xfb\x00\xe2\x00\xc2\x00\xaa\x00\xa9\x00\xc6\x00\xee\x00\x18\x01@\x01s\x01\xb6\x01\xff\x01I\x02\x9a\x02\x00\x03l\x03\xb5\x03\xd9\x03\xf5\x03\x1b\x04;\x04/\x04\x05\x04\xd9\x03\xa2\x03]\x03\xf2\x02|\x02\x12\x02\xa2\x01$\x01\x96\x00\x07\x00\x82\xff\xfd\xfem\xfe\xe1\xfd\\\xfd\xd9\xfcZ\xfc\xdd\xfbw\xfb\x1f\xfb\xc8\xfa\x7f\xfaH\xfa.\xfa\x1f\xfa\x1d\xfa0\xfaK\xfak\xfa\x92\xfa\xbf\xfa\xfc\xfa8\xfbs\xfb\xb5\xfb\xfa\xfbD\xfc\x8c\xfc\xd7\xfc%\xfdp\xfd\xc1\xfd\x0f\xfeZ\xfe\xa5\xfe\xed\xfe>\xff\x90\xff\xe4\xff,\x00|\x00\xca\x00\x0e\x01N\x01\x89\x01\xc7\x01\x04\x02@\x02n\x02\x8f\x02\xb3\x02\xd7\x02\xee\x02\xf7\x02\xfd\x02\x01\x03\x02\x03\xf6\x02\xe6\x02\xd7\x02\xc7\x02\xaf\x02\x96\x02x\x02[\x02C\x02$\x02\x03\x02\xe2\x01\xc7\x01\xab\x01\x86\x01c\x01H\x016\x01\x1f\x01\x07\x01\xf7\x00\xf1\x00\xe8\x00\xd5\x00\xc0\x00\xb3\x00\xa9\x00\x91\x00q\x00W\x00<\x00\x1f\x00\xf8\xff\xd3\xff\xb3\xff\x93\xffm\xffJ\xff.\xff\x13\xff\xf6\xfe\xd8\xfe\xbd\xfe\x9c\xfe\x7f\xfec\xfeJ\xfe5\xfe\x1d\xfe\r\xfe\x01\xfe\xfc\xfd\xf2\xfd\xf2\xfd\xff\xfd\x17\xfe4\xfeO\xfew\xfe\xa1\xfe\xca\xfe\xf1\xfe\x16\xffE\xfft\xff\x9f\xff\xc2\xff\xe4\xff\x0b\x007\x00\\\x00z\x00\x93\x00\xaf\x00\xc8\x00\xd4\x00\xda\x00\xe3\x00\xeb\x00\xec\x00\xde\x00\xd9\x00\xd5\x00\xcf\x00\xc2\x00\xaf\x00\x99\x00\x88\x00~\x00m\x00]\x00L\x00>\x003\x00&\x00\x1a\x00\x0b\x00\x07\x00\x06\x00\x04\x00\x01\x00\xfc\xff\x01\x00\x00\x00\xff\xff\x02\x00\x06\x00\r\x00\x15\x00\x17\x00\x1b\x00)\x00,\x000\x00/\x001\x00>\x00K\x00K\x00>\x00B\x00G\x00E\x00>\x004\x00-\x00-\x00+\x00\x1b\x00\r\x00\x02\x00\xfa\xff\xee\xff\xdc\xff\xd1\xff\xc3\xff\xb6\xff\xa9\xff\xa9\xff\xb6\xff\xc6\xff\xd7\xff\xea\xff\r\x009\x00b\x00\x95\x00\xd7\x00&\x01m\x01\xaa\x01\xe8\x01(\x02^\x02\x86\x02\xaf\x02\xcf\x02\xdf\x02\xda\x02\xca\x02\xbc\x02\x9e\x02d\x02"\x02\xe0\x01\x98\x01@\x01\xd6\x00i\x00\xfe\xff\x9b\xff0\xff\xbb\xfeQ\xfe\xee\xfd\x9f\xfdM\xfd\xfa\xfc\xb6\xfc\x86\xfce\xfcH\xfc0\xfc+\xfc9\xfcK\xfc`\xfc|\xfc\xa6\xfc\xd2\xfc\xfd\xfc.\xfda\xfd\x9a\xfd\xcb\xfd\xfb\xfd-\xfe\\\xfe\x95\xfe\xc2\xfe\xeb\xfe\x14\xff9\xffh\xff\x8c\xff\xb5\xff\xd3\xff\xf4\xff\x1f\x00D\x00d\x00\x87\x00\xb2\x00\xdf\x00\n\x01+\x01I\x01m\x01\x8f\x01\x9e\x01\xa7\x01\xb5\x01\xc2\x01\xc4\x01\xb6\x01\xb1\x01\xae\x01\xa6\x01\x99\x01\x86\x01p\x01\\\x01P\x01;\x01 \x01\x12\x01\x0c\x01\x04\x01\xf7\x00\xe8\x00\xdb\x00\xd7\x00\xcc\x00\xba\x00\xa3\x00\x8c\x00\x85\x00p\x00E\x00\x16\x00\xef\xff\xc1\xff\x8b\xff[\xff5\xff\x12\xff\xe5\xfe\xb6\xfe\x86\xfe`\xfeB\xfe4\xfe\x1c\xfe\x0f\xfe\x0c\xfe\x0f\xfe\x1d\xfe$\xfe.\xfe>\xfeN\xfei\xfe\x83\xfe\xa7\xfe\xd2\xfe\xf1\xfe\x1c\xffP\xff\x93\xff\xc5\xff\xf6\xff(\x00_\x00\x90\x00\xb4\x00\xee\x00(\x01J\x01[\x01m\x01\x80\x01u\x01z\x01\x8b\x01\x97\x01\xaa\x01\xc6\x01\xb8\x01\x86\x01J\x01/\x01%\x01\x14\x01\x0f\x01\xf3\x00\xc9\x00\xae\x00\x8e\x00N\x00\x0e\x00\xe2\xff\xdf\xff\xf9\xff\x06\x00\xf2\xff\xd8\xff\xca\xff\xbc\xff\xc8\xff\xd4\xff\xda\xff\xd5\xff\xc3\xff\xa4\xffb\xff"\xff\x10\xff\x19\xff\x1d\xff&\xffA\xff\x81\xff\xde\xff\\\x00\xf7\x00\xb4\x01\x90\x02`\x03\x17\x04\xc4\x04i\x05\xfb\x05a\x06\x89\x06\x95\x06\x8e\x06d\x06\x0b\x06\x96\x05\x12\x05y\x04\xca\x03\x14\x03d\x02\xbb\x01\x1a\x01k\x00\xaa\xff\xeb\xfe6\xfe~\xfd\xbb\xfc\x01\xfcb\xfb\xda\xfa\\\xfa\xf2\xf9\xbb\xf9\xb6\xf9\xc7\xf9\xf1\xf9E\xfa\xc1\xfaC\xfb\xca\xfbV\xfc\xe1\xfc]\xfd\xca\xfd"\xfeX\xfep\xfe}\xfe\x8d\xfe\x86\xfen\xfeN\xfe3\xfe \xfe\x04\xfe\xeb\xfd\xdf\xfd\xd6\xfd\xce\xfd\xca\xfd\xc9\xfd\xc4\xfd\xd3\xfd\xd8\xfd\xd3\xfd\xe4\xfd\xfa\xfd\x1b\xfe@\xfey\xfe\xc1\xfe\x12\xffl\xff\xcd\xff1\x00\x90\x00\xf1\x00N\x01\xa6\x01\xef\x01#\x02D\x02l\x02\x84\x02\x84\x02{\x02}\x02\x85\x02\x86\x02\x85\x02\x9b\x02\xda\x02\x1b\x039\x03M\x03\x82\x03\xbc\x03\xd4\x03\xc8\x03\xaf\x03p\x03\x1b\x03\xc9\x02~\x02\x03\x02e\x01\xcf\x00@\x00\xb3\xff&\xff\xb4\xfeO\xfe\xf9\xfd\xaa\xfdA\xfd\xe9\xfc\xb3\xfc\x98\xfcV\xfc\x03\xfc\xe9\xfb\xe4\xfb\xb7\xfb\x85\xfb\x8c\xfb\xd3\xfb\xf7\xfb!\xfc\x8f\xfc\xff\xfc3\xfdI\xfd\x8b\xfd\xe0\xfd\xec\xfd\xe9\xfd;\xfez\xfe]\xfer\xfe\xc2\xfe\xe1\xfe\t\xffX\xff~\xff\x81\xff\x9f\xff\xba\xff\x9a\xff\xc4\xff"\x009\x00,\x00\x88\x00\x07\x01\x15\x01\r\x01b\x01\xea\x01*\x02A\x02\x8b\x02\xe4\x02\x13\x03a\x03\xab\x03\x9b\x03m\x03I\x03I\x03L\x03B\x03^\x03h\x03\xdd\x02j\x02\x8e\x02\xb7\x02\x87\x02P\x02\xa0\x02P\x03\xfd\x03\xcf\x04"\x06\xdb\x07z\t\x90\n&\x0b\xed\x0b\xec\x0c}\r#\r\x81\x0c\x0c\x0cx\x0bC\n\xef\x08\x0b\x08B\x07\xe5\x05\xfb\x03\r\x026\x00R\xfe\\\xfcD\xfa\xfb\xf7\xde\xf57\xf4\xf0\xf2\xca\xf1%\xf1#\xf14\xf1\x11\xf1*\xf1\xe4\xf1\xc9\xf2\x89\xf3K\xf48\xf5\x0f\xf6\x1b\xf7\xb2\xf86\xfal\xfb\xdf\xfc\xc4\xfe:\x00\xe3\x00\xb8\x01\x07\x03\xd0\x03}\x03\xff\x02\x0f\x03\x13\x03s\x02\xc9\x01\x89\x01Y\x01\xb4\x00\xd7\xffG\xff\xe2\xfe6\xfeR\xfdx\xfc\xbd\xfb\x06\xfb\x8b\xfas\xfa\x96\xfa\xe7\xfaQ\xfb\xee\xfb\xb4\xfc\x9a\xfd\xb6\xfe\xc6\xff\xa3\x00F\x01\xe9\x01\xa2\x02r\x03_\x04*\x05\xaf\x05"\x06\x9a\x06\x0b\x07B\x079\x07\x1d\x07\xc1\x060\x06z\x05\xc1\x04;\x04\xbd\x033\x03\x84\x028\x02\xb9\x02\\\x03R\x03\xde\x02\x97\x02\x80\x02\xce\x01\xfd\x00\x8b\x00\x07\x00\x04\xff\xf4\xfdx\xfdT\xfd\x04\xfd\xc6\xfc\x9c\xfc*\xfc\xaa\xfb\x81\xfb\xac\xfb\x9a\xfb_\xfbs\xfb\x9d\xfb\x83\xfbY\xfb\xb7\xfbt\xfc\xd3\xfc\xa0\xfcl\xfc\x85\xfc\xb2\xfc\x8e\xfcm\xfc\x87\xfc\xbb\xfc\xb1\xfc\x97\xfc\x0f\xfd\xd0\xfdM\xfe;\xfe\x0e\xfe\xdc\xfdz\xfd\x14\xfd\xf0\xfc\xf3\xfc\xd2\xfc\x86\xfcV\xfcF\xfch\xfc\xb4\xfc\n\xfd9\xfd\x11\xfd\x1b\xfd5\xfd\x19\xfd\xd0\xfc]\xfd\x1a\xff\xb5\x00-\x01\x82\x01\xb0\x02\xe9\x03\xd9\x03h\x03\n\x04\xf9\x04\xe0\x04u\x04]\x05\x1b\x07\xfd\x07\x8d\x07\x03\x07\t\x073\x07\xcc\x07\xbb\t8\rc\x11\xff\x13y\x14|\x14Z\x15\x00\x16\x81\x14\xdb\x12C\x13\xd4\x13\xbd\x11\x00\x0f>\x0f\xf1\x0f\x93\x0c\x96\x06\xa0\x02=\x00\xce\xfb\x96\xf6X\xf4\x9a\xf3#\xf1\xc1\xed\xbe\xeca\xed\xae\xec\xed\xea\x8b\xe9\xe9\xe8\x8a\xe8\xe0\xe8\x1a\xea.\xecd\xef\xf2\xf2\x94\xf5\xc7\xf7\xe4\xfa,\xfe}\xff\xb5\xff8\x01s\x03n\x04\xd0\x04\xb1\x06\xed\x08\x1b\t\xee\x07C\x07\xbf\x06\xbd\x04\xd0\x01\x90\xff\x15\xfej\xfc\x94\xfaY\xf9\xa0\xf8\xce\xf7\xa1\xf6\\\xf5O\xf4\x82\xf3\x1a\xf3Q\xf3\xeb\xf3,\xf5\x07\xf7F\xf9W\xfb\xfc\xfc\xe5\xfe\xca\x00k\x02\x99\x03\x08\x05\xfe\x06\xea\x08Q\n\xe2\x0bY\ro\x0eJ\x0eK\x0e\x93\x0e\x19\x0e\xfb\x0c.\x0c\x10\x0b\x8b\tq\t\x8d\x0c\xc9\x0e\x87\x0c\x8c\x08\x83\x06\x80\x05n\x02\x17\x00\xc7\xff9\xffQ\xfc\x1e\xfa\xbb\xfa\xc8\xfb^\xfa7\xf7d\xf4\xad\xf2\x16\xf2\xfe\xf2\xd6\xf4[\xf6\xbf\xf6\xab\xf6+\xf7\xd4\xf7\xb0\xf8:\xf9\\\xf9\\\xf9\xc9\xf9\x93\xfb\x00\xfe\xbd\xff\xad\x00N\x00h\xff\xad\xfe)\xffm\x00\x9e\x00/\x00\xdd\xff\x95\xff.\xff\xea\xfeo\xff\x1e\xff\x84\xfd\x0f\xfc\xab\xfb\xd2\xfbT\xfb2\xfbS\xfbr\xfb\x93\xfa1\xfa$\xfb\xb2\xfc6\xfd&\xfc%\xfc\x1d\xfd\xc3\xfe\xb1\xff\xee\x00]\x03\xe8\x04\xb6\x05\x86\x06\xd1\x07d\x08\x98\x08K\t/\n=\ng\x0b5\x11\xbb\x19\xdf\x1d\x92\x1a\x96\x15\x9d\x15\xe5\x17\x1c\x17\xd1\x15|\x19^\x1d\xf9\x19!\x13\xba\x12\xe0\x15\xde\x10\xc8\x05\x18\xff\xf8\xfe\x03\xfd\xb4\xf8\x15\xf9b\xfbn\xf7V\xee>\xe9\x02\xe9\xec\xe7\x8c\xe5\xed\xe45\xe6\x86\xe7m\xe9\x86\xec\xc8\xee1\xefK\xef\x06\xef\x1e\xf0\x86\xf3v\xf9\x13\xfe+\x00)\x022\x04O\x05\x1a\x05\xc6\x046\x05R\x05\x04\x05\xf1\x04\xfd\x05\xaf\x07\xc7\x06"\x03I\xff9\xfdu\xfbD\xf8\xb7\xf6\xea\xf6X\xf6X\xf4\x8a\xf3\xb5\xf4\xdd\xf4\x14\xf3\x80\xf1\x8e\xf1G\xf2\xaa\xf3\xbe\xf6\xe7\xf9T\xfc_\xfdC\xffn\x00\xde\x01\x8a\x03\xb4\x054\x07\x9f\x08\x9a\n\xce\x0cZ\x0eX\x0fl\x0fN\x0e%\x0eX\x0e\xfc\x0eN\x0e\x1d\x0eG\r\n\r\xfc\r!\x0f\x89\r1\t\xf9\x05K\x03\x0e\x01Y\xffe\xff\xaa\xfe\xeb\xfb$\xf9?\xf7(\xf6N\xf4z\xf2\xe2\xf0\t\xf0}\xf1\xab\xf2\xa9\xf3e\xf4\xd7\xf4\xc6\xf4\xa9\xf3\xe3\xf4\xa5\xf6\x8a\xf8}\xf9i\xfa\x16\xfc\x0e\xfdW\xfe\xde\xfe\xaf\xff@\x00L\x00\xcc\x00\x06\x02\xf0\x03~\x05F\x05\xcb\x03!\x03\x19\x02E\x01\xed\x01b\x03\xb0\x01\xb7\xff\xa4\xff\x92\xfeS\xfdA\xfe\x15\xfeB\xfb\x1b\xfb\xbb\xfc\x83\xfd\xdd\xfc\x06\x00O\x02\x95\xff\x02\xff\xc8\x03F\x05M\x04?\x05\xe6\x07\xc3\x07\xde\x06l\x0bI\x0c[\n\x0e\n+\x0c\r\x0b\x1f\n\x18\x0bi\n\r\n\xb4\t\xf1\n\x8b\t\t\nJ\x0bE\x0cn\x0b\xa4\t\x1b\n\xe1\to\t\x0c\tw\x08\xcc\x08\x1f\x08X\x06i\x05^\x04l\x02\xec\xffI\xfe\x9f\xfdN\xfcg\xfb\xdd\xfa\xa3\xf9\x84\xf8w\xf7\x85\xf6\x15\xf5\xd3\xf4\x8f\xf4N\xf4\xcc\xf4V\xf5M\xf6f\xf5\xce\xf5\xa3\xf6R\xf7\xb6\xf7^\xf8\xdc\xf9\xf1\xfa\xd1\xfbY\xfc~\xfd\x15\xfe4\xfe@\xfea\xfe\x83\xfe\x91\xfe\xd8\xfe\x03\xff!\xffP\xfe\xa4\xfd\xee\xfcg\xfc4\xfc;\xfb\xbc\xfa\xad\xfa#\xfa\x87\xfa\xf6\xf9\x14\xfa\xfc\xf9\xbe\xf9\x1c\xfaj\xfa\x8c\xfb\x18\xfc\xb1\xfc\xae\xfd\x13\xfeR\x00\x9d\x01{\x03@\x06\x98\x06\xff\x07Z\x075\t2\tw\n\xf0\n\x04\n\x87\n\xa7\x078\x08\x13\x06\xf9\x04\xfd\x02\x06\x01\xb4\xff\x8a\xfe\xbe\xfe\x05\xfd\xb7\xfd\xb6\xfbI\xfa\x87\xf9\xb0\xf9\xe2\xf9\xbb\xf9P\xfa\xb7\xfa\xe4\xf9\xf7\xfa\xfe\xfaP\xfc\xdc\xfc\xd0\xfb\x01\xfe\xfc\xfc\xe0\xfc\n\x00\xde\x00\x1b\xfej\xff\xd6\x01\x92\xff\xeb\x00?\x02:\x02\x12\xff\xf0\x00\xcc\x03\x8d\xff\xd2\xff\x06\x04\x80\x03\x0c\xfd\xcd\xff~\x01D\xffk\x01\xef\x03\x1f\x00\x0b\xffe\x02\xfe\x01\x85\xffc\x02\xbc\x03\xdc\x00\xd7\x01\x84\x02\x97\x02\x92\x01\x0b\x04\xb4\x00\xd4\x00\xeb\x013\x01\xa1\x01f\x02\xf7\x01\x9b\xfe\x89\x00\xee\x00\xab\x00\xc5\x01\xd9\x01L\x00?\x02G\x00\x07\x02\xbd\x02:\x01\xff\x011\x01\x92\x03G\x01\x0c\x031\x04G\x01f\x03\xe5\x018\x02a\x02f\x00\xe4\x01,\x00\xe0\x00]\x00\xa2\xff\x04\xfe5\xffC\xff\x1a\xfd\xfa\xfd\xfd\xfcc\xfe0\xfd\xa5\xfd\r\xfe,\xfd\xa1\xfd\x86\xfe\x92\xfe\xd0\xfen\x00.\x00j\xffr\x00\xf8\x01j\x00[\x02\x85\x02\x9d\x01\xbd\x02~\x03\xea\x01\xe5\x01\x99\x03 \x01$\x01S\x02\x96\x01\x9a\x00&\x00\xeb\x00D\x00\xed\xfe\x99\xff\x08\xfe\xf4\xfe"\xfe\xf8\xfd\xe5\xfdn\xfdh\xfd\xe2\xfc\xdf\xfe\x8f\xfbY\xfd\xc4\xfd\x85\xfb\xd7\xfc\x82\xfd\x96\xfc\xf1\xfcE\xfe\x1f\xfd(\xfd\x05\xfe\xce\xfd\xf1\xfdM\xfe\xd9\xfex\xff\x0e\xffZ\xff\x1e\xff\x12\x010\xff\x8a\xff\xda\x00 \x001\x00E\x00\xcd\x00\xcf\xff\x07\x01\xf8\x00\xf5\x00\xa4\x00\xc7\xff\x9c\x00\xe4\xff\xfc\xfe\xbc\x00A\x00k\xff=\xff\xa8\x00D\xffP\xff\xe5\xff:\xfe6\x00\xca\xff\xd9\xff\xa0\x00\xe7\x00X\x00%\x00\xad\x00\x10\x01{\x00\xba\x01\x9e\x01\xea\x00\xb6\x01\xaa\x01\xb3\x01\xa0\x00\x03\x01\t\x01\xc1\x00V\x00\xb0\x02\xb2\xff\xb5\xff\xfd\x01\xe0\xfe\xab\xff@\x00\xfa\xff\xd0\xfe\xd4\xff!\x00\xeb\xfe\xcb\xff\x9c\xff?\xff\xa8\xfe3\x00i\xffJ\xff\xf1\xff\x8d\xff\x88\xff{\xff\xa0\x00>\xff-\x00\x0b\x00\x82\xff=\x00\xdf\x00X\x00\xbd\xffp\x00\x1b\x01[\x00\x9b\x01H\x01\xd3\x00\x86\x01\x82\x00\xca\x01q\x01v\x00\x10\x01\xca\x00\xa8\x00R\x01\xba\x00i\x01m\x00\xea\xff\x9e\x00f\x00\xef\x00L\xff\x9c\xff5\xff\xc2\xfe\x1b\x01\x19\xfe\x98\xff\xdb\xff\xd5\xfdR\xff\xd9\xfe6\xff&\xfe\x9f\x00\xd8\xfe\xdb\xfe \x00I\xff\xc6\xffo\xff\x83\x00\x07\x00@\x00\xc9\xff\xb3\x00\x97\x00\x87\x00\x89\x01\x9a\x00\xde\x00\x8c\xff/\x02\x92\x00L\x00\x17\x01H\x00\x98\x00x\xffi\x01\x10\x00\x91\xff\xdc\xff\xcc\xff\x9f\xff\x96\xff\x84\xff>\xff\x90\xfe\xd0\xff#\xff0\xff\x80\xff\x0c\xff;\xff\xc2\xfe\xd7\xffp\xfe\xc6\xff\xc2\xfe\xcb\xffF\xffn\xff\xf3\xff\xa7\xff|\xff]\xff\xc3\xffy\xff\x1a\x00\x99\xffq\x00\x7f\xff_\x00_\x00\xa7\xff\xff\xffu\x00\xce\xff\xdb\xff\xdc\xff\xbd\xff\x9c\xff~\xff\xb1\xff\x18\xff\xc4\xffX\xfe\x1f\xff%\xff\x17\xffM\xff^\xff\xda\xfeY\xff^\xff\x80\xff%\xff\x9e\xff\xe7\xff\x00\xffm\x009\x00+\x00X\x00\xda\x00\xb8\x00\xf1\x00\x9c\x00]\x01\xe1\x00\xa3\x01\x19\x01\xd8\x01P\x01\xfa\x00\\\x02d\x00\xb7\x01\x94\x00\x0c\x01g\x00 \x00\x13\x00Q\x00\xc6\xff}\xff\xca\xff\x0f\xff7\xff\x1b\xff#\xff\xd9\xfe\x04\xff\xdc\xfe?\xff\xa9\xfel\xff*\xff\x81\xff\xb3\xfex\x00\xc7\xfeS\xff#\x01X\xffe\x00)\x00-\x01\x05\x00W\x01D\x00<\x01\x9d\x00\xec\x002\x01\xbc\x00\x96\x01\xe2\x00n\x01\xb1\x00\xe4\x00\x8d\x00\x08\x01/\x00\xc6\x00\x89\x00_\x005\x00\x12\x00\x0f\x00\x8a\xff\xda\xffy\xff\x89\xff[\xffY\xff\xb6\xffq\xff\x16\xff\xa0\xff\x08\xffI\xffo\xff\xa0\xff\x85\xff\xad\xff\xc2\xff\x00\x00\x08\x00\x1d\x00\x92\x00`\x00\xb1\x00\xb5\x00?\x016\x01\xfb\x001\x01\x06\x01\x1e\x01;\x018\x01c\x01\xbb\x00\xd7\x00\xd7\x00s\x00\xed\xff3\x00\'\x00)\xff\xe2\xff1\xff\x17\xff\x0b\xff\xb4\xfe\xe9\xfe2\xfe\xc8\xfe\xb1\xfe$\xfe\xd3\xfe\x9f\xfe\xa9\xfe\xe9\xfe\xec\xfe\r\xffy\xffO\xffs\xff\xdb\xffM\x00N\x00j\x00\xa9\x00\xa1\x00B\x01\xa2\x00\xbd\x00_\x01\xeb\x00\x15\x01P\x01\xcf\x00\x9c\x00\xdd\x00\xdb\xffh\x00\xdc\xffr\xff\x06\x00\\\xff\x1b\xff\x93\xff\xc8\xfe`\xfe\x9e\xfeT\xfe\xbf\xfe\xf5\xfd\xd9\xfe\x84\xfe\n\xfe\x1d\xff\xc1\xfe\xae\xfe\xda\xfeQ\xff\xf2\xfej\xff\xb9\xff\x1a\x00e\x00\x0f\x00\x9a\x00\x10\x01\x85\x00\xeb\x00\'\x01\xe1\x00z\x010\x01\xa6\x01\xb9\x00\x8e\x01_\x01\xda\x00\xfc\x00\xc7\x00\xb2\x00O\x00\xcd\x00\'\x00<\x00P\x00d\xff\xd2\xff\xb8\xff&\xff`\xff\x1a\xff\xf8\xfe\x15\xff\x08\xff\xaa\xfe\xdf\xfe\xea\xfef\xffy\xfe%\xff\x92\xfea\xffo\xff\x92\xfe\x00\x00\x14\xffx\x00\xda\xffZ\x00[\x00\xac\xff\xf3\xff\x02\x00\x00\x01*\x00`\x00\xa5\x00\x9c\x00\x06\x00\x83\x00\xd8\xff\xbc\xff\x17\x00\n\x00R\x00W\xff\x99\x00\x88\xff\x88\xff\xa2\xff{\xff\xdb\xff\xdb\xffk\x00\x86\xff\x1b\x00\xd6\xff$\x00\xea\xff\xfc\xff\xfe\xff\xa1\x00\xa3\x00&\x00K\x01|\x00\x94\x00\x94\x00\xb4\x00\xd1\x00\xce\x00\x05\x01L\x01\xfa\x00\xa1\x00\xee\x00\xdf\x00h\x00\x8f\x00\xa6\x00\xc7\x00\xd3\x00\x80\xff\x92\x00\x16\x00\xd1\xff\xb7\xffA\x00\x0c\xff\x0c\xff\xb3\xff\x0e\xff\x81\xff\xc9\xfeN\xff\xc6\xfex\xff\xa4\xfez\xff\xf3\xfe\xf5\xfe\x8f\xff\xa2\xfe\xbc\xff+\xff\xdf\xff\xce\xff\xdc\xff\x16\x00\xb6\xff\xd2\xff\r\x014\x00\xe5\x00\xea\x00\xac\x00\xa5\x01\xb2\x00\x8a\x01m\x01\x04\x01\xa1\x01\xce\x01\xcf\x00\x83\x02\x10\x02e\x000\x01#\x01\xd5\x00\xa3\x00\x87\x00\x03\x00\x13\x00l\xff\'\xffX\xff\x9a\xfe\xa6\xfeg\xfe\x0f\xfek\xfe9\xfe{\xfeV\xfe\x8f\xfeT\xfe\xa2\xfe\x13\xff\xf1\xfe\x03\xff\x01\xff\xa1\xff\xed\xff\x02\x00}\x00Z\x00@\x00\xaa\x00\xcc\x00D\x01V\x01p\x01\xd7\x00\xb4\x01\x13\x01H\x01\xa4\x01\xa1\x00\xce\x00$\x01\x8a\x00\xa9\x00\n\x01\xae\xff\x10\xff8\x01\x18\xffF\xfe\xe9\x00\x91\xfe>\xff\xcf\xfeG\xff\xa8\xff\xe0\xfe\xa2\xfeu\xffo\xfeG\xff\x0b\x00\x1d\xff\xec\xffi\xff\xd5\xff\xaa\xff\xd6\xff*\x01\xde\xff\xcb\xff\'\x01\x0e\x00\xad\x00\xed\x00\x10\x01\x00\x00\xbf\x00\xf5\x00\xf9\xff@\x01\xc1\xff\xb8\x00\x00\x00\x7f\xffM\x00\x83\xff`\xff\xe4\xff=\xff\xb2\xfe\x1b\x00W\xfe\xc3\xfe~\xff\xcc\xfen\xfe\\\xff\x1b\xff\xa2\xfe\x03\xff\x15\xffJ\xff\xe3\xfe\xa7\x00\xa2\xfe\x00\x00\xcf\xff@\xff8\x00\x12\x00b\x00(\x00\x19\x01\xb5\xff~\x00\xde\x00\xf8\x00d\x00:\x01N\x00+\x01S\x01Q\x01\x12\x01\x9f\x00k\x01\x06\x00\xbd\x00#\x01<\x001\x00\x95\x00\xe7\xff\n\x01\x04\x00\x19\xff\xbc\xff\xb5\xff\x81\xff\xa4\xff5\x00P\xff&\x00\r\x00e\xfe\x98\x00M\xfe\xc8\xffl\x000\xffe\x005\xffL\x00u\xff6\x00O\xff\xc4\xff\x13\x00\x11\x00\xd8\xff\x8d\xff$\x00\xda\xff:\x00]\xfeK\x00\x1b\x00\xc3\xff\xcd\xff\xb4\xff\x17\xff+\xff_\x01\xaa\xffe\xff\xb0\xff\x1e\x00V\xff\xa4\x00\xc4\x00\x12\xffi\x00}\xffc\x00\xa9\xff(\xff\xe4\xff\x00\x00[\x00{\xffT\x00\x95\xfet\xff\xd6\xff<\x00\xb4\x00s\xff\x13\x01#\xfe\xce\x01R\xffZ\x00\xcd\x01E\xff\x05\x01\x83\xff\xc1\x01\xce\xff\xb8\x01\xc9\x008\xff\xed\x00\xc4\x00\xdb\xfe\xe1\x008\xff\x12\x01\xd8\x00t\xff\xa2\x01\xcc\xfch\x01y\x00S\xfe\r\x00\xa4\x00\xfa\xff\xe6\xff\x11\x01=\xfd\\\xfe\x8f\x00s\xff\x85\x00\xeb\x00\xcc\xff-\xff\x96\xff@\x00\xfc\xfe\xb5\x019\xfe\xa8\x02\xa8\x00\xc6\xff\xa0\x00\xb0\xfa\xd6\x01i\x01g\x01\xd6\x00M\xff\x18\xff>\xfe\xb2\x00\xfd\xfe2\x01\x13\x00\x0e\xfe\xbc\x00\x15\xfd\xcf\x034\xfe \xfa\x92\x04X\xfd\xaa\xfe\x89\x00=\x01\x8b\xfd\xb7\x00\x86\x01\xef\xfd\xce\x01*\xff\xb1\xff3\x02\x96\x00\x95\x01\x0e\x01\x90\xffL\x02\xba\x009\xffM\x01\xc3\x01d\x03%\xff\x92\xfe\x92\x04\x18\xfd#\x05i\x00\x8f\xfb\xb3\x03\xf9\xfbi\x05L\xfd\x01\x02\x95\x00*\xf9\xb4\x02u\x01(\xff\xd8\xfe\x9d\xfd\xfc\xffq\x02D\xfc\x0c\x00E\x01\x1a\x03e\xfbA\xffA\x02g\xfd\x92\x03\x8f\x00\x81\xfd\xae\x00\xdf\xfe=\xfec\x02\x81\x00\x95\xffr\xfc\xc7\xfe\xa4\x02t\xff\xde\xfd\x90\xfe\x0e\x01\x1c\xfdv\xfe\x9a\x04,\xfe\xdc\x00\x9e\xfb\xeb\x01\x83\xfe\x16\xff\\\x04\x88\xfb\xdc\x03\x1c\x00\x08\x00X\xfdf\x01\x1e\x00P\xfe\xef\xfeE\x05\xfd\xff@\xfe\t\x01\x96\xfc\xe9\x00\x1b\xfe~\x01g\xff-\xfe\xc1\x01\x08\x02\xde\xfc\x17\x03\xbf\xfe\x07\x00\x03\x01\xae\xfd\x1c\x02t\xfe]\x02\x19\x02\xd6\xff\xaa\xff\xb2\xfe\xf6\xff$\x01\x80\xff\x9b\xffS\xfc\x1c\xff\xad\x00\xa5\xfdL\x02\xb6\xffb\xfdZ\x02\x94\xfe\xc5\xfd\x7f\xfd\xaa\xf7\x9c\x00\xac\x10\x02\x03\x97\x00#\xfe\xf9\xfb\xa6\xffy\x02\x1f\x03N\xff\xd0\x06Y\xfd\x8e\x01\xda\x0bn\x047\xf5&\xf7\xff\xfa1\xfd\x8b\x03}\x00\xda\xfeR\xffC\xfc\t\xf8i\xfc\x7f\x02\xe5\xfb$\xfe\xb6\xffL\x03u\x03\xeb\xfe\xa4\x05\x9e\xfdP\x00\xae\x03 \x02\xd0\x03\xbb\x07\x14\x02\xa9\xff\xc4\x07a\x01\x84\xfb?\x02\x10\x06\x11\x00\xa8\xfd\xaa\x002\xfcM\xfc\xf3\x02\x08\xfds\xfb\xc8\xfa\xed\xfc\xb8\xfb\xf2\x03\x04\x00b\xf9\xf3\xfe\xf7\xf9\x8c\x00`\x00n\x00\x85\x01\xfb\x02\xf2\xfc0\xfeU\xfe\xca\xff\xc1\x04z\xfe\x1f\x01\x9a\xfd\xa5\xfdZ\xffB\x01\r\x01X\xfc\xa0\xff\x10\x00)\xfdU\x01\x8d\x00\xae\xff\xb3\x00\xec\xfe\xf0\xfe\xc7\x00$\x04"\x01\x16\x01\xfb\xffA\x01?\x03s\x02A\x01C\x01\xcc\x01\x89\x01]\xff\x15\x01\xe1\x03%\x01\x1e\xff|\xff\x88\x00\x14\xff\x1e\x01\x9c\x01\xea\xff\xbc\x00\xc2\x00\xdb\xfe\x90\xff\xba\x01\x1a\x01\xe9\x00\x8e\xff\x9b\x00\xea\x01~\x01\xb9\x00\x82\xff\xe1\xffk\x00\xd1\x00\x93\x01\xcc\x01\x9e\x01Z\xff\\\x00v\x00i\x01\x9e\x00\xdb\xff \x01:\x00\xbd\x01w\x01\r\x00\xd4\xfe=\xfe\xeb\xfe \xfe\x14\xfe\x96\xff\xd4\xffS\x00I\xff\xac\xfb\x9b\xfb\x90\xfc\x1d\xfe<\xfd\xd0\xfe\xb9\x00\x11\x00,\xff\x03\xfdu\xfcr\xfb=\xfd\xa5\xfdg\x001\x03m\xfd\xe4\xfb\xf2\xff[\xff\x03\xf9]\xf9\xb4\xfa\x1d\xfd6\xfe[\xfc\x0e\xfe\xcb\xfc\xa7\xf9\xcc\xf6}\xf9\xff\xfc\xa6\xf9\xa0\xf9\xf6\xfbu\xfe\xfd\x00\x89\xfe*\xfbY\xf8\xb0\xfa\xdd\xfd\xb5\xff\xf2\xffl\x00.\xffp\xfc\xbc\xf9\xa0\xf9\x8b\x00\xfa\x07\xfa\x15<\x17\x96\x10\xd9\x0c\x9f\x0f \x18\xc4\x17\xa7\x17\xd7\x1c\xcf!\xef \x92\x1e\xc4\x1f\x19\x1e\x92\x13\\\ti\x07$\x0c\x03\x0c\xdd\x07\x86\x03\xb1\xfd\xc0\xf7\xa3\xf0\x9c\xec\xb4\xeb\xe4\xe8~\xe6 \xe5\n\xe7)\xea\x82\xe9\xac\xe61\xe5\xc9\xe6\xc2\xe8Q\xec:\xf3\xae\xfa~\xfd\xf3\xfb\x8d\xfc*\x00\xf4\x02\xf8\x03]\x04\x05\x08v\n+\x0bd\x0b\xdc\x0b\x0e\x0bj\x04e\xff=\xfd\xac\xfd\xe5\xfe\x83\xfdx\xfbI\xf9\x86\xf3\xf4\xee7\xee\xee\xeeM\xf1\xa4\xf0{\xf0\x86\xf2\xc9\xf4`\xf6q\xf7u\xfa\xc1\xfcK\xff\x04\x023\x06\xd9\x0b\x14\x0e \x0f \x0f\x06\x10\xe3\x11\xfe\x12\x19\x13\xcd\x12\xb9\x12\x92\x10\xa8\x0eG\r\x83\x0b\x90\x08j\x04\x14\x01\x00\x00\xe2\xfe\xa6\xfcg\xfa\xf5\xf7\x92\xf4\x99\xf1\xc2\xf0w\xf08\xf1=\xf2m\xf1\xee\xef\xa2\xef\x82\xef\x81\xf0]\xf2\xfe\xf4\xdb\xf6\t\xf7\x8a\xf8\xab\xf9\xc8\xf9\xae\xfa\x04\xfb\xaf\xfb \xfd\xb1\xfd\xb2\xfe\x88\xfeb\xfe4\xfc9\xf9D\xf8d\xf7\xcc\xf6\x17\xf7\xed\xf8\xf1\xf7\x1b\xf6i\xf5\xc0\xf5\xda\xf4\xd4\xf1\xe7\xf7\xa0\x0c+$"-\x15%\x15\x1b\xe1\x1a\xc4\x1fM%F1DDWN\xa5E\xce5%-_*\xaa \xa4\x14&\x14o\x1a\'\x1bh\x10\xf4\x05O\xfd\x94\xeeI\xdc\x18\xd1\x04\xd4`\xdcy\xe1\x05\xe0i\xdc\x15\xd9b\xd2P\xccn\xcd\x9e\xd7Z\xe4\x04\xed\xa1\xf5\x8f\xfe>\x03\n\xffO\xf9_\xfc\xf8\x05\xf7\x0e\x82\x14#\x19\xac\x1cT\x1a\x8b\x11\x14\n\x0c\x07\xe8\x05\xd8\x02\xc4\xff\x18\x00\xc6\x00\xdc\xfb{\xf2\x19\xea{\xe4\x8f\xdf)\xddJ\xe0\x16\xe8@\xed\xa7\xeb\x99\xe8E\xe6x\xe6\xd9\xe7\xd0\xed8\xf83\x02\xb8\x07$\t9\n\xeb\n\x9c\x0b7\r\x02\x11>\x18\x91\x1d\xfc\x1f\xe5!\xd0 \xdd\x1a\xd3\x11\xed\x0e\x9e\x16\xfd\x1e\xf7\x1e\xad\x19\x9d\x12>\nz\x00\x8c\xfc\x85\x00+\x05d\x03\xfa\xfc\xc4\xf7\x9d\xf2\\\xec\x08\xe9W\xea\xaf\xec\xa7\xec\x16\xebF\xec\xdd\xedx\xed\xea\xeaW\xe8\xbb\xe7\x0c\xea^\xef\x13\xf5\xf0\xf8\xa1\xf8s\xf5-\xf3V\xf2f\xf5\xb5\xfb0\x00\xdd\x02\xd0\x02\x12\x01\xcd\xfdg\xfb\x96\xfb\x93\xfer\x01\r\x02\xea\x02\x9b\x01)\x00\xd3\xfeq\xfe@\xfe\xe0\xfc\x98\xfc\xf0\xfc\x05\x00)\x05\x9d\x0c\xb0\x11\xa6\x10A\x0e}\x0c\xcd\r|\x12\xa2\x1al&\x7f-;,P&4!\x94\x1e~\x1dA \xb8%\xe1(\xe8$L\x1c\xb7\x14\xab\r9\x06S\x00\x9f\xfe\xa3\xff\x98\xfd\x82\xf8\xaf\xf3$\xee|\xe6p\xde"\xdc\x13\xe0\xa7\xe4N\xe6o\xe6:\xe6\x00\xe3\xbb\xde&\xde{\xe4\xff\xec\x97\xf2\xb8\xf5K\xf7\xc9\xf6\xd6\xf4\x80\xf4*\xf8\x9d\xfd\xca\x014\x04I\x05$\x04Z\x01\xca\xfeu\xfe\xce\xff\x82\x01\xfe\x02W\x03\xf0\x01]\xff\xd0\xfc7\xfb\xdf\xf9D\xfab\xfb\xc8\xfc\x0c\xfd\x85\xfc\xef\xfb)\xfb\xcc\xfaB\xfb\x94\xfdM\x00w\x02\n\x03\x02\x03g\x03X\x04\x9a\x06r\tK\x0c,\r\'\x0c,\nP\t.\x0b\x84\r\xc1\x0fx\x10<\x0e\xa3\n\xea\x05\x16\x04\x87\x04\xd0\x04{\x05\xd8\x04\xe6\x02\xb3\xfdf\xf9\xd3\xf7\x8f\xf7c\xf7\x15\xf82\xfau\xf9\xc3\xf5E\xf3\x9f\xf3\xa1\xf4\xcb\xf4\x03\xf60\xf8\x90\xf8\x19\xf7-\xf6\x88\xf6.\xf7\x85\xf7\x04\xf9\x02\xfb~\xfb\xb4\xfa\x02\xfat\xfa\xff\xfa\xb0\xfb\xee\xfc\x83\xfe\x08\xff\xd5\xfe\xba\xfe#\xffQ\x007\x01+\x02\xe3\x02]\x03t\x04U\x05D\x06\x92\x07\x86\x08\xcd\x08\xd6\x07\xbf\x07c\t\xb9\np\x0bU\x0b\xdc\nn\n&\tK\x086\x08\xba\x08\xa0\x08\x05\x08\x04\x07\xce\x05\x1f\x05?\x04X\x04\xdb\x04i\x05\x84\x05\xaa\x04(\x04\x13\x04P\x04\xad\x04\xb7\x05\xb1\x06\xb5\x06\xf2\x05\xe0\x04\xc8\x04\xdd\x04\xa6\x04\xd6\x04\xab\x04\xd0\x03]\x02\xc5\x00\xf4\xff\x08\xff\xd6\xfd\x96\xfc\x85\xfbe\xfa\xb8\xf8\xfb\xf6\xb7\xf5;\xf5\xe0\xf4w\xf4!\xf4\xf8\xf3\x14\xf4K\xf4\x9a\xf4i\xf5\x99\xf6\xb5\xf7\xc3\xf8\xe9\xf9m\xfb\xc7\xfc\xb9\xfd\xbd\xfe\x14\x00X\x01!\x02\x98\x02\x17\x03\x9a\x03\xe6\x03\xf8\x03$\x04D\x04\xe8\x03U\x03\xed\x02m\x02\xcf\x01L\x01\x16\x01\x8c\x00\xbc\xff1\xff\xdd\xfe?\xfe\x98\xfd~\xfd\xa1\xfd{\xfdG\xfdO\xfdS\xfd8\xfdC\xfd\xaf\xfd!\xfem\xfe\x94\xfe\xa1\xfe\xb2\xfe\xce\xfe\x04\xff.\xffB\xff7\xff\x06\xff\xd1\xfe\xc4\xfe\xb5\xfe\xa1\xfee\xfe\x19\xfe\xbc\xfdw\xfd\x91\xfd\xfd\xfd\x1f\xfe\xee\xfd\xe2\xfd\xf4\xfd\xee\xfd\x18\xfe\x9e\xfe+\xffK\xff%\xffS\xff\xa6\xff\xe8\xff+\x00\x87\x00\xca\x00\xda\x00\xfd\x00O\x01\xb6\x01\xee\x01\xe7\x01\xf3\x01!\x02K\x02[\x02i\x02}\x02q\x02I\x02\x0b\x02\x04\x02\xfd\x01\xe2\x01\xca\x01\x98\x01|\x01l\x01T\x01S\x01n\x01\xa4\x01\xa0\x01\x98\x01\x96\x01\xc5\x01\xf9\x01*\x02e\x02\x83\x02\x80\x02G\x021\x020\x02(\x02\x05\x02\xbc\x01~\x01@\x01\xed\x00\x91\x00B\x00\xe3\xffv\xff\xe5\xfem\xfe1\xfe\x13\xfe\xe3\xfd\x8c\xfd&\xfd\xc8\xfc\xa9\xfc\xbe\xfc\xfc\xfc=\xfdr\xfd\x90\xfd\x97\xfd\xaa\xfd\xf7\xfd\x82\xfe*\xff\xc3\xff:\x00\x99\x00\xe6\x004\x01\x98\x01\x18\x02\x8e\x02\xe7\x02.\x03n\x03\x87\x03z\x03B\x03\x10\x03\xd7\x02\xb7\x02\x8c\x02T\x02%\x02\xdb\x01Y\x01\xa4\x00\xfe\xff\xa3\xff\x80\xffn\xffF\xff\xe3\xfe_\xfe\xe1\xfd\x93\xfdw\xfd\x83\xfd\x8b\xfd{\xfd\x82\xfd\x8d\xfd\x9d\xfd\xaa\xfd\xb8\xfd\xd1\xfd\xfb\xfd.\xfe\x94\xfe\x17\xffa\xffh\xffA\xff$\xffX\xff\xbc\xff\x1c\x006\x00\t\x00\xc0\xff\x9c\xffv\xff`\xffY\xff4\xff\xfb\xfe\xcb\xfe\x98\xfeg\xfeQ\xfeF\xfe\x03\xfe\xaf\xfd\xc4\xfd\x15\xfe[\xfe\x83\xfe\x8c\xfe\xab\xfe\xae\xfe\xb7\xfe\x11\xff\xaf\xffG\x00\xad\x00\xd3\x00\xf3\x00C\x01\x89\x01\xce\x01A\x02\x90\x02\xa9\x02\xa4\x02\xc6\x02\xf6\x02\n\x03\x00\x03\xd8\x02\xc2\x02\xc3\x02\xc1\x02\xc7\x02\xbf\x02\x88\x02K\x02(\x02\x01\x02\x05\x02\xed\x01\xa9\x01h\x018\x01\x1c\x01\xf4\x00\x90\x00>\x00\xf5\xff\x93\xff*\xff\xf4\xfe\xeb\xfe\xc7\xfe\x89\xfe&\xfe\xed\xfd\xc0\xfd\x8c\xfd\x83\xfd\x9b\xfd\xcc\xfd\xdb\xfd\xbb\xfd\xd1\xfd\x0f\xfe+\xfe7\xfeJ\xfe\x92\xfe\xf3\xfeL\xff\x90\xff\xe5\xff\x15\x00/\x00U\x00\xa2\x00\x13\x01w\x01\xc2\x01\xe8\x01\xf8\x01\x03\x02\x16\x02:\x02V\x02g\x02d\x02L\x02\'\x02\x00\x02\xd5\x01\xaf\x01w\x012\x01\x01\x01\xe9\x00\xcc\x00\xa9\x00}\x00L\x00\x12\x00\xd8\xff\xae\xff\xaa\xff\xa8\xff\x9e\xff\x7f\xffF\xff\x19\xff\x0f\xff\t\xff\xf2\xfe\xd3\xfe\xb5\xfe\x89\xfeV\xfe2\xfe:\xfe6\xfe\x11\xfe\xec\xfd\xe4\xfd\xdf\xfd\xc7\xfd\xbf\xfd\xda\xfd\xf3\xfd\xfb\xfd\x08\xfe;\xfeu\xfev\xfet\xfe\x92\xfe\xc4\xfe\xe8\xfe\x04\xff,\xffL\xffq\xffw\xffu\xff\x8b\xff\xaf\xff\xd4\xff\xfd\xff\x1e\x00O\x00w\x00\x92\x00\xaa\x00\xdc\x00\xf6\x00\xf0\x00\x00\x01F\x01\x92\x01\xba\x01\xd8\x01\xfc\x01\x0e\x02\x04\x02\x0e\x02F\x02o\x02j\x02Q\x02Q\x02\\\x02Q\x02=\x02"\x02\xef\x01\xa8\x01k\x01K\x01/\x01\x04\x01\xc7\x00\x81\x00/\x00\xe7\xff\xac\xff|\xffF\xff\x10\xff\xd3\xfe\x97\xfeX\xfe!\xfe\xed\xfd\xbd\xfd\x89\xfdT\xfd?\xfd/\xfd\x1f\xfd\x19\xfd\x03\xfd\xef\xfc\xe0\xfc\xe2\xfc\xf6\xfc!\xfd?\xfdb\xfd\x86\xfd\x99\xfd\xbb\xfd\xf1\xfd.\xfet\xfe\xb0\xfe\xfa\xfeA\xff\x85\xff\xc5\xff\x15\x00h\x00\xbd\x00\x1b\x01\x87\x01\xfb\x01^\x02\xb5\x02\x01\x03K\x03\x99\x03\xe5\x031\x04r\x04\x97\x04\xa1\x04\x97\x04\x84\x04{\x04`\x046\x04\xfb\x03\xa9\x03L\x03\xe7\x02v\x02\x05\x02\x93\x01\x1d\x01\xa0\x00!\x00\xa6\xff)\xff\xb0\xfe6\xfe\xc0\xfdY\xfd\x05\xfd\xca\xfc\x8f\xfcN\xfc\x13\xfc\xf1\xfb\xe0\xfb\xe4\xfb\xfd\xfb"\xfcA\xfcV\xfcw\xfc\xbe\xfc\x16\xfdk\xfd\xbb\xfd\x03\xfeJ\xfe\x90\xfe\xde\xfe;\xff\xa0\xff\x01\x00Q\x00\x94\x00\xcf\x00\r\x01A\x01c\x01\x87\x01\xb6\x01\xda\x01\xf7\x01\x0c\x02\x13\x02\x06\x02\xe6\x01\xc7\x01\xb8\x01\xbb\x01\xc7\x01\xc8\x01\xcb\x01\xb4\x01\x94\x01~\x01\x82\x01\x8f\x01\x90\x01\x8f\x01\x8b\x01\x86\x01\x85\x01\x91\x01\x9c\x01\x9f\x01\x92\x01z\x01q\x01g\x01_\x01Y\x01S\x015\x01\xfc\x00\xd4\x00\xab\x00|\x00H\x00\x14\x00\xe5\xff\xab\xff\x85\xffV\xff\x17\xff\xcf\xfe\x86\xfe@\xfe\x04\xfe\xde\xfd\xc4\xfd\x9c\xfdb\xfd"\xfd\xed\xfc\xc6\xfc\xb2\xfc\xb1\xfc\xc0\xfc\xc8\xfc\xd0\xfc\xe7\xfc\x04\xfd!\xfdG\xfd|\xfd\xcb\xfd\x1e\xfex\xfe\xdc\xfe,\xffn\xff\xb4\xff\t\x00q\x00\xd4\x00<\x01\x8a\x01\xc6\x01\xf4\x01\x1e\x02R\x02\x82\x02\xae\x02\xd0\x02\xe2\x02\xf1\x02\xf5\x02\xf9\x02\xed\x02\xd3\x02\xab\x02\x8a\x02v\x02m\x02S\x02&\x02\xee\x01\xb0\x01y\x01=\x01\x17\x01\xed\x00\xb5\x00p\x00*\x00\xf4\xff\xb9\xff\x7f\xffA\xff\t\xff\xcb\xfe\x8c\xfeW\xfe$\xfe\xf4\xfd\xc3\xfd\x9a\xfd\x81\xfdg\xfdI\xfd3\xfd\'\xfd\x1d\xfd!\xfd0\xfdN\xfdd\xfdq\xfd\x85\xfd\xa2\xfd\xc8\xfd\xf2\xfd\x1e\xfeS\xfe\x88\xfe\xb8\xfe\xe0\xfe\r\xff8\xffm\xff\x9f\xff\xd8\xff\x18\x00P\x00\x7f\x00\xa7\x00\xd2\x00\xfb\x00\x18\x014\x01^\x01\x90\x01\xbd\x01\xd3\x01\xd8\x01\xdd\x01\xe0\x01\xe8\x01\xfc\x01\x15\x02\x1d\x02\x0c\x02\x04\x02\xff\x01\xfb\x01\xe4\x01\xd2\x01\xcf\x01\xbb\x01\xb2\x01\xa5\x01\x90\x01q\x01A\x01\x18\x01\x03\x01\xf4\x00\xde\x00\xc6\x00\x9a\x00^\x00 \x00\xe6\xff\xc4\xff\xa4\xff\x89\xffa\xff.\xff\xfd\xfe\xbb\xfev\xfe;\xfe\x05\xfe\xe3\xfd\xc5\xfd\xaa\xfd\x94\xfds\xfdM\xfd"\xfd\x11\xfd!\xfd;\xfdY\xfdn\xfd\x84\xfd\x9b\xfd\xbc\xfd\xea\xfd*\xfeu\xfe\xc2\xfe\x12\xffY\xff\x9b\xff\xe2\xff\x1e\x00h\x00\xc0\x00\x0b\x01R\x01\x92\x01\xc8\x01\xe5\x01\x07\x02!\x02D\x02\x81\x02\x94\x02\xa3\x02\xa7\x02\x92\x02y\x02l\x02V\x02F\x02:\x02,\x02\x11\x02\xe2\x01\xb2\x01~\x01S\x015\x01\x1b\x01\xf0\x00\xca\x00\x94\x00a\x00;\x00\x0e\x00\xf3\xff\xd0\xff\xaa\xff\x8a\xff]\xff1\xff\x03\xff\xe0\xfe\xc5\xfe\xae\xfe\x9a\xfe\x7f\xfee\xfeF\xfe9\xfe,\xfe\x1c\xfe \xfe$\xfe.\xfe:\xfe;\xfeC\xfeR\xfef\xfe{\xfe\x98\xfe\xbd\xfe\xdb\xfe\x01\xff \xff0\xffM\xff\x88\xff\xc6\xff\xdf\xff\xed\xff\x1e\x00W\x00\x84\x00\xad\x00\xce\x00\x00\x01(\x01=\x01M\x01R\x01Y\x01v\x01r\x01r\x01\x86\x01\x99\x01\x9f\x01\x98\x01\xb2\x01\xd8\x01\xe0\x01\xda\x01\xce\x01\x95\x01G\x01.\x01K\x01l\x01\xad\x01\xe3\x01\x96\x01\xdf\x00\xfd\xff\xd9\xff\x03\x00:\x00\x82\x00\x7f\x002\x00\x8b\xff\xdc\xfe\xa1\xfe\xc4\xfe\xe2\xfe\x0c\xff\xf1\xfe\xb6\xfen\xfe#\xfeL\xfe\x88\xfeK\xfeQ\xfe\xdf\xfe\xc2\xfe\xb0\xfe;\xfe\xcb\xfd\x04\xfe\xa8\xfd\xae\xfd\xfd\xfd\x19\xfe\x15\xfe\x8f\xfeJ\xff\x81\xff\xab\xff\xdf\xff7\x00\xe5\xff\x9e\xff\xfd\xfe\x16\xfe\xd4\xfc\xfc\x00\xd9\x0e\xbc\x14P\x05\xc9\xf5\x93\xf8\xdd\xfa\xe7\xf8\x7f\xfe\xaf\t6\t\xbf\x00#\xfd\x86\xfa\xec\xf6S\xf5\xba\xfd$\x064\tY\x08\x98\x02\x1d\xfe\x9d\xfc\xe2\xfb\xcf\x00\xea\x05\xda\x08\xc4\x04n\x01\xf0\xff8\xfe\x13\xff\x00\x00L\x03\xc7\x02\x9a\x00$\xfd\x9b\xfc\xf8\xfc\x95\xfd:\xfe\x97\x00\x86\x00\x7f\xfeJ\xfb4\xfc\x1f\xfb\x0b\xfb\xf8\xfe\xde\xff5\x02\xf2\xfe\x03\xfe\x8b\xfa\x04\xfd\xa0\xff\x81\x05\x8e\x04d\x03f\x02\xaa\xfb\xe4\xfcT\xff`\x07\x16\x07\xf8\x05f\x03\xf3\xfe9\xfc\xe4\xfb\x92\xffK\x02@\x02*\x002\xfc\xf3\xfa\xc3\xf9\xa8\xfb\xf9\xfe\xb0\x00\xfd\xff_\xfe\xf2\xfba\xff\x98\xfb\xd3\xf4%\x06)\x19E\x17H\x06-\xffH\xfb\x0c\xf7\xd7\xfc\xf4\x0bV\x14O\r\xbb\x02\x1a\xf8\x11\xf1\xe0\xf0\xab\xfb\x01\x03\x1e\x03\xcb\x01x\xff0\xf9\x8e\xf3>\xf5+\xfb\x88\xfes\x02\x95\x08\xdf\x02\x15\xf8\xa8\xf7\x9a\xfb\xd4\xfeQ\x04D\x0b\x01\t\x11\xfe\x8a\xf9\xfc\xfaI\x03\xaf\x06a\tk\t\xda\x00k\xfa\xb9\xf8\x9d\xfb\xcd\xfd\\\x02Z\x08\xb2\x04\xa0\xfas\xf59\xf4;\xf9?\xfd\xc8\x01\x9d\x03)\x00U\xfd\xb6\xfbx\xfc\x05\xfe\xae\x03\x1b\x04F\x04\x84\x03V\xffi\xfe\xe3\xff[\x04\xf2\x02\xb0\x01\xbe\x04\x83\x05\x18\x01Y\xfe@\xfe\xd7\xfe5\x02\x8d\x05\xd5\x06\x18\x02\xd9\x00\x05\xfe\xf1\xfe\xd1\x03g\x03g\x02\xa2\xfdL\xfc3\xfb/\xff\xe0\x00\xa1\x03\xec\x02\x18\xfa6\xfb&\xfe\xed\xff\x07\x02\x0f\x03 \x01\xb7\x00\xdb\x01\x07\xffy\xfbw\xff\xe1\x01_\x00\xcf\xffx\x02\xcb\x02!\xfbm\xf9h\xfem\xfer\xfc\xee\xfa\x9b\xfdY\x01\xf1\xfd\xea\xfd\xd2\xfd\xd9\xfb\xaf\xfc\x1f\xffA\xff\xeb\xff\xbf\x03a\x03\x9e\x02\x86\x02I\x01!\x01\xee\xff\xba\xff\xb9\x02#\x03v\x01\xd1\x02\xa0\x07\x1c\x04\xdd\xfd\x1c\xfe\x04\xff\xa1\x02\xbd\x03$\x02\xd6\x03\xaa\x00\x08\xff\xdc\xfe=\x009\x00s\xfe\xeb\xfep\xfec\xff<\xff\xa9\xff\x99\xfc\x80\xf9J\xfa\xab\xfd{\xfe\xce\xfew\x01\xc6\x00h\xfe\x07\xfa\x07\xfa\x1f\xff\xc6\x03\x0f\x06-\x016\xfe\x1c\x00\xe1\x02\x14\x02\xb9\xff"\xfe\xb0\x07D\x05\xb2\xf7\xc9\xfa\xe4\x06\xd9\x07y\xfb7\xfe\xa0\x00&\xfa{\xfb\xfb\x00\xf8\x00\x84\xfco\x01E\x06\xfe\x02\xe2\x00\x12\x00\xe0\xfd\x8c\xfe\xbb\x07\x9f\x0bm\x01\xc8\xfcl\xff \xfb\x9f\xf68\xf9V\x03\x89\x06\xe5\x02\x03\xfd\xa8\xf7,\xf7\xf0\xf8j\xfe\xd4\x05m\nd\x06\xfc\x00s\x01\xb4\xfe\x03\xfb6\x03\xfc\n\xd8\x0eA\n\xc1\x04\xa7\xfa\xed\xf2\xb8\xf8"\x02\xa9\x06m\xfc\xb2\xfe\\\xf9\x10\xf4\x0e\xf7\xce\xf7\xbd\xf9G\xf7\xe4\x00Q\x07\x19\x01\xaa\xfe\xb3\xfea\xfe\xbc\xfa\xc9\xfcj\x08\xa6\x0b`\x07\xf5\x03\x04\x04[\xfde\xfb\xa0\xfe\x0e\x003\x044\x08\xd2\x08\xd5\x00\x9d\xfd^\xfa\xbf\xfb\xbb\xfd\xdd\xfbo\x00\xb1\xfd\xd3\xffS\x01\x89\xf8v\xf6\x07\xf7k\xfbe\x00\xd9\x02W\x08P\x03\xd2\xfa\xf5\xfdh\x00\xc4\x00\xe9\x05\xb5\x12\x93\x0e\xbd\xfc:\xfc\xda\x02\xd3\x06r\x03\xbf\x06-\t}\xff\x01\xfc\x9a\xfe\xfc\x00&\xfb\xf6\xfb\xe2\xff\x17\xfdM\xfa\x1e\xf8v\xfc\xa1\xfe\xe0\xff~\xfdd\xf8\r\xfb\xe8\xff\xd9\x03j\x01u\xfd\x1a\x01f\x055\x03 \x03\xa6\x03I\x04^\xfe\xe0\xfd\xd9\x03\xde\x03\xd6\x01W\x03V\x03s\xfd\x12\xf5\xef\xf8\xc6\x00\xec\xffT\xfd\x98\xfcK\x00W\xfaE\xfaY\x00\xc8\xfeg\xfc\xda\xffp\x04\xd1\xfd)\xfc\xc5\x03\xe2\x04G\x02\x98\x03\xa3\x03\x1d\x00&\xff{\x04\xb9\x04\x1a\x00h\x00\x8b\x03\xe6\x04d\x02\x8a\xfd\n\xfd\xe9\xfc\xac\xfd\x14\xfft\xfeC\x00P\xff\'\xfd%\xf9;\xfc\x92\x00A\xfd\xd3\xfc\xe3\x01\xdb\x05\x00\x01R\xffU\xfe\xdd\x02\xd8\t\xb3\x05\xaa\x02\x00\x01j\xfe\xb3\x01\x91\x04\x8a\x07(\t\x8c\x02\xfc\xfb/\xfbW\xfc\xc6\xfbY\xfc\xeb\xfe\xfa\xfe\x93\xff\xb4\xfe\xa9\xf9\xcd\xf8u\xfb\xa9\xfba\xfaC\xfd\x9b\x01\xb0\x00\x06\xfc\xd0\xfb\x1d\x01/\xff\x11\xfc \xfd\xe8\x02\xc0\x02\xb6\x01\xfa\x04\xed\x02\xb9\xfe\x17\xfeg\x06\x12\x08\xda\x05>\x05J\x03~\xff\xa1\xff\xba\x03=\x01\xed\xfd\xa9\xfe2\x02\x0f\x00s\xfa\x13\xfb\xd1\xfci\xff\x8b\xfb\xea\xfb<\xff2\x00\t\x08<\x00l\xfb\x14\xfeJ\x00\xd2\x02\xb5\x05\x9f\nh\x04%\xfeD\xfa\x9c\xf8\x84\xfc;\x04\x8e\x07\x9c\x06&\x05\xe1\xfe\xfe\xf6\x0b\xf6\xed\xfcY\x01\xd4\x00\x12\x01\xcf\x03}\xff;\xf9\x11\xfa\xfa\xfb\x0e\xfc[\xfag\xfc\xc3\x01\xc3\x05&\x04[\xfeO\xfb\x12\xfd\xdb\x01\xc6\x04\xab\x06\n\x049\x01+\x02\xb3\x02\r\x05\x05\x07\x19\x05\x94\xfd\xa5\xf8a\xfb:\xfe\xd9\xfd\xa9\xfdi\xfe\x84\xfd\x08\xfa\x7f\xfc\xdd\xfe\t\xfe\xdb\xfd\xfd\xfdx\x01:\x03m\x05\xf9\x01#\xff\x8d\x00x\x02\x18\x05\xd2\x04\x89\x06\xe6\x04\x86\x00W\xfd\xf5\xfc\xaa\x01\x13\x05\xea\x07^\x06H\x01\xdf\xfb\x97\xfb\x87\xfd\xff\xf9\xca\xfay\xfeE\x02\xa7\x00\x17\xfd.\xfd\x05\xfai\xf6\x07\xf6\xba\xfc\\\x042\x05\x87\x02\xe2\xfd\xb6\xf9\xb3\xfbH\xff\xe2\x03\xe0\x04\xaa\x03\xe2\x01\xb2\x00\xd4\x02U\x01\xd9\x00\n\x00"\xff-\x01\xe0\x03\xab\x02\xc7\xfe\xbd\xfd\xc5\xfb\xa7\xfa\x91\xfd\x16\x00\x93\x00+\x00m\x00u\xff\x80\xfd\xa2\xfb\x9b\xf9\xc2\xfc?\x00q\x03\xdf\x04m\x04\x06\x03\x82\xfe\xe4\xf9L\xfb;\x01\x8d\x03\xb3\x02C\x02\xd9\x02O\x02E\xff\xf1\xfd\xc0\xfe\xe8\xff\x7f\x00f\xfe\xe8\xfff\x02\xfe\x00m\xff\x00\xff\xf0\xfd(\xfd>\xfa"\xfa\x02\xfc\x05\xfc\xf8\xff\xc0\x01"\x02\x83\x01\x19\xff;\xfd\x0f\xfbQ\xfb\x1a\x02\xda\x06\xdc\x08\xa4\x08\xdb\x05*\x02\x86\xfc\xe1\xfaX\xff\xfd\x04?\x07\x9a\t\x0c\x06\x1e\x01\xb9\xff|\xfeL\xff;\x00\xf8\x04T\x08\x19\t\xf4\x08&\x06:\x03\xa8\x00\x17\x00^\x04\xd7\x08\xf0\n\xa9\tI\x08\xfa\x04R\x00\xee\xfe9\x00\x9d\x04\x8a\x04\xa6\x04c\x05`\x03\x0b\x02\xc0\xff}\xfd\xf3\xfcy\xfe\xe5\xfe\xa1\xfeJ\xff`\xff\xf9\xfc\xf2\xf9\xa8\xf8O\xf6l\xf4\xda\xf6\xbc\xf9\xd5\xfaP\xfb\x80\xf9*\xf7\x14\xf6E\xf7\x0b\xf8\x95\xf7\xc3\xf8\xf2\xf9\xb1\xfa\x14\xf9\x89\xf8Y\xf9\xc3\xf8\x9d\xf9\x8e\xf9\x98\xf8\xb4\xf7\xf5\xf8\x11\xf9i\xf9\xa6\xfb\xa3\xfaZ\xf9\x1d\xf8\x81\xf6\x04\xf5\xe4\xf2\xe3\xf2\xf6\xf32\xf5\xd7\xf7\x1a\xf9U\xf8]\xf9\xf4\xf9\x94\xf78\xf8\xdc\xfc\x8b\x03,\x0b\x15\x0b\x8f\x08K\x07\xf7\x02\x11\x04\xb9\x06\xb4\x08J\r*\x0c\x92\t\xee\x07j\x08\x82\t\xac\x07\xc0\x07N\x05\'\x04\xbd\x03\xa5\x03\x8f\x07\xf9\x04J\x04\t\x07\xb5\x01\x81\xfb1\x00\x92\x15\xd7)\xd5,k$Q\x1bc\x18\x8a\x17\x81\x1a5&01\xa12k)\xe3\x1fx\x18\x91\x11=\x07{\xfe\x96\xfb\x14\xfd\x1b\x00\x06\x01`\x00y\xf4M\xed\xe3\xe3\x12\xd8\xbc\xdaI\xe6\xe2\xf0-\xf2r\xf1\x99\xee\x18\xedc\xe9\x0c\xe8\xd8\xef\x84\xf5A\xfaL\xfd_\xfdM\x04=\x07\xb3\x01Z\xff\xb0\xf9\x01\xf6\x1d\xfaL\xff)\xfe\xdf\xfcN\xfav\xf2\x1b\xe9\x95\xe4\xd9\xe8\xb9\xeb\xbc\xec\xba\xed\xcb\xea\xf0\xe7\xde\xe6k\xe7\xd9\xe8;\xec\xab\xf17\xf5\xf8\xf7\xe0\xf9\x1a\xfb!\xfa\xe4\xf8\x9f\xf9\xec\xfb\xcf\xfeN\x00\xd1\x00G\x01\x11\xff8\xf9\t\xf5\xa5\xf3\xc1\xf3\xaf\xf7\xa2\xfb\xb5\xfc\xf9\xfc\x82\xfb\xcc\xf8\xec\xf5\xa3\xf5\xfa\xfa\xf5\x02U\x05\xcd\x03\x19\x07\xae\r\x06\x13\x1e\x11R\n\xda\x07\xa3\x069\x0bs\x105\x15\xd7\x1a\x14\x16\x90\x0e\x1a\x07\xec\x01z\xfd\xd8\xfb\xeb\x0e\xb9,\x9e>\x1d=\xaa-\xa0"\xaf \xfc\x1bv\x1d\xa1,\xac;@?\x0c8\xb0*h\x1b\x88\x0e\x03\xfd\xf0\xf1\x16\xf3\x9f\xf8\x0b\x03{\x04\x99\xfd\x89\xf2\x10\xe2\x16\xd3\xd2\xcc\xda\xd0\xe4\xdf\xaa\xef/\xf4\xcc\xf5\xe2\xf6\xb3\xf1\\\xec\x88\xe9\xb7\xeb\xf3\xf3\xa1\xfb\x00\x03\xb9\nF\x13\x14\x11\xad\x04\xcf\xf9\x86\xf0\xec\xee\xa1\xf4-\xfb\xc0\xffI\x01\xee\xfd>\xf4\xca\xe9f\xe4\xb5\xe4\xc9\xe6\x13\xe8\xc5\xee~\xf4R\xf7\xbf\xf7\xe3\xf2\xea\xec^\xe8\x95\xe8\x9d\xec\x11\xf5s\xfea\x03j\x03\xb0\x01j\xfd\xde\xf8\t\xf7\xdc\xf8C\xfd\xd9\x02\xca\x06\x17\x04\x17\xfe\xe0\xf7\x8e\xef\xf5\xe9\xcb\xe9P\xec\xf8\xf0k\xf3\xc9\xf6\x80\xf7 \xf4Q\xf2\xa5\xf0n\xf4\x8c\xfb\xa4\x01 \x07~\x08L\x0c\xf3\x0e\xf6\x08@\x03\xec\x04\x81\x07\xd3\x0c\xe2\x10\x91\x10l\x11\xdb\x0b\xa5\x05\x02\x04p\xfe<\xfb\xa0\x0c\xf5.\xefK\x04T\x8dA-*\x94"\'!S\'\x9c9\xebH\xcfJ@;\x90%\xb4\x16\x95\x06\xda\xf8\xea\xed\xfb\xe5*\xe9p\xf0\xdb\xf4S\xf11\xecO\xe0Y\xd0L\xc8Y\xca\xd2\xd8\xb4\xedE\xfb\xda\x01\xce\x05\xc5\x01:\xfb\xb2\xf6\xe5\xf6y\xfa\xab\x00.\x07I\x0e\xb8\x11;\x10T\x0cq\xfeB\xf4\xca\xef\xc2\xe6\x9c\xe3\xdd\xe7K\xee\x95\xf4]\xf5\x8b\xf0\xb4\xe6\x80\xdeW\xdd\x0f\xe2i\xe9\xcd\xf2Z\xfd\xec\x01"\x02\x14\x02\xc3\x01g\x00\x14\xfdn\xfa\xad\xfc\xee\x014\x08o\x0b\t\t!\x05\xd6\xfe\x04\xf8\xb4\xf3\xf2\xf3,\xf9B\xfcW\xfc\x86\xf9\xa8\xf5\xef\xf4*\xf2b\xee<\xeb\xdf\xe9\x1b\xeb)\xed\xb7\xf3\xea\xfa\xfe\xfe\x83\x01\xfb\xfdp\xf8\x88\xf7\xed\xf8\x95\xfc\x94\x028\x057\x06\xe0\x05\xeb\x03\x90\x01\xf7\x00\x98\xfc\xfa\xf5\xa7\xf5\xcd\xf5d\xf2\xdc\xf1\xce\xfd\xef \xb8O\xe6f\x83^\xa6I\xd96F.\x913\x9b?!Pv[\xceR\xb1>\x0c*\x19\x12\x17\xfc\x94\xe5\x19\xcf\xfe\xc6\xbb\xcb\x13\xd4~\xdc\xbd\xe1\xd7\xdfZ\xd8\x9d\xcb\xf5\xc2A\xc8%\xd9\xc1\xee\x0c\x04\x13\x14\x95\x1e\xb4%\xd7\'\x02"\xba\x17\xcb\x0ca\x05\xfc\x04]\x07b\x0e`\x14\x0e\x10&\x07O\xf5\xde\xdeP\xcd\x18\xc0\xea\xbe\xf8\xc6\xd9\xd1C\xdej\xe6*\xe8\xe9\xe5\xa8\xe5\xff\xe6\x1b\xe8\\\xee\x8a\xfa\xef\x08s\x18\x05(\xe9.\x00*2!J\x16e\n\x9e\x04\xb4\x01"\x00^\x02\x01\x03\xf3\xfd\xa7\xf6&\xf1"\xeb0\xe6\xa7\xe2 \xe2I\xe7\xb9\xee\xf0\xf31\xf7\xec\xf6\x1e\xf5\xf8\xf4\xc0\xf14\xf3\x98\xf9\xf3\xfc\xf2\x02\x8e\x06\xc6\x05\xfa\x06U\x04\xed\x00\xc3\xfe\xe5\xf93\xfc9\xfe\x9e\xfe \x02\xda\xfe\xb2\xf9\x90\xf5\x10\xef\x86\xedA\xef\x88\xee\x1a\xf0\xf9\xf2\x84\xed\x07\xe9=\xf6\xa2\x15\xb1@e[\xa1W\x08K>B\xffB\xbaH\x0fL\xd8P\xa0Q\xf3Jk>\xea-\x0b\x1f`\r\x1d\xf3,\xd7\xa2\xc4\x95\xc0\xdc\xc7p\xd0\xfb\xd5D\xd9\x89\xd9\xb2\xd7\x97\xd6)\xd9\x12\xe2\xe1\xed\xfa\xfa\xd5\x08p\x19\x97)\x913\x8f3\x9a(\xcf\x1a/\rk\x03;\xfeH\xfc\xcf\x00f\x00\xc1\xfa!\xf2z\xe1a\xd1\x95\xc4\xa3\xbc;\xbe\xc7\xc7Q\xd5\xeb\xe2\xba\xee\xd4\xf5c\xf9l\xfa(\xfbp\xfe\xc3\x03\xf9\n\x0e\x15\xb2 \x06)M*\xfb#p\x18\xbe\nk\xfe\x1e\xf7\n\xf3\xba\xf0\x95\xf1\x1d\xf1r\xee\xc9\xee\xec\xedX\xec\xa4\xebF\xe9M\xe9\x85\xed\x0e\xf5\x9a\xfbu\x01W\x05I\x04\xfb\x02\xf7\xff@\xfc\xa1\xfb\x1d\xfbM\xfc\xe4\xfe\x0e\x01\xf1\x02\xf0\x01\xc4\xff\xc4\xf9X\xf4\xdc\xf0p\xedl\xf06\xf4\x85\xf5\xc9\xf8\xf5\xf5e\xf0\xa9\xefX\xebZ\xeb*\xf2\x89\xf0<\xec\xed\xf3l\x0cC6\xf5]\xf5f{ZKL\xa2?i<\x8e@\xccDrL!NDBu/\xb7\x1b\x0c\n?\xf7\x9e\xdc\xb3\xc3\xc7\xb7\x00\xba/\xc5\xd6\xd2\xba\xdd\xa7\xe3\xe1\xe2:\xdeh\xdb\x18\xe0W\xec\xaf\xfb\xc8\t\xf6\x16\xb4%\xe8116T/?!\xb9\x12\xe2\x06\xa3\xfdk\xfc\x10\x02\x9d\x04.\x02/\xf5\xde\xe3n\xd5D\xcaw\xc6\x00\xc6?\xc9\xb6\xd2 \xdd\x89\xea\xc6\xf6L\xfe\n\x02d\x00c\xfeY\xff.\x04\xc8\r\xb3\x18\x9f \xa1!Z\x1dg\x15/\nO\x00\x7f\xf8o\xf0\xef\xea\xf9\xe9,\xeb8\xee\xb4\xf2\xb1\xf4\x81\xf3k\xf1\x1b\xee\xa2\xece\xf0F\xf7a\xfe\x8d\x05\xbf\x08)\t\xdf\x07W\x05\x99\x03\x87\x00\xd7\xfdL\xfb\xb0\xfa\xc5\xfc@\xff\xd3\x01\xac\x01/\xfd\xf1\xf7\xd6\xf1\xb9\xebA\xe9Q\xe9.\xea\xc6\xec\x9a\xefH\xf1\x1d\xf2\x94\xf1\xce\xee[\xec\xa9\xeb\x93\xea\x08\xea\xd1\xec\x8f\xf8\xe4\x18\xf1C(d(n\xeccWS$I\xc8D\xdeB\x92E\xecJhM\xd1G\xe76\x11"\x96\x0e\xbd\xf5\x01\xd9\xfa\xbd\xc9\xacz\xadg\xba\xee\xca\r\xd7\x86\xdd\xda\xdd\xc6\xdbH\xd9c\xdar\xe4\xa1\xf4D\x06\x01\x17H%\xb61\xd09\xe980/?\x1f\xe8\x0f\x18\x04\xc3\xfc\xa2\xf9\xec\xf9\x17\xfd\x9a\xfb\xa5\xf4)\xe8\xe0\xd89\xcc$\xc2\xf0\xbe\x9a\xc2s\xcb\x08\xdb:\xeb#\xf8\xb2\xff\xa1\x02\xb1\x02&\x01.\x01i\x03\x0c\tM\x13\x8b\x1e\xcd$]#\x87\x1c\xc8\x12q\x07\xd0\xfd\x8e\xf5/\xefq\xedx\xef\n\xf1\xbd\xf1\xf7\xf2o\xf2\xc0\xf0\x1d\xef\x12\xec\xc0\xeb\x9f\xf0\xb8\xf6\xcd\xfeY\x05\xb7\x07\xc1\t#\x08\x12\x05G\x03\\\x01\xc1\x00d\x01P\x02\xfd\x02o\x03\xf2\x02\xbb\xff\xb3\xfa\xa6\xf6\x84\xf1\xbe\xed\x82\xec7\xebZ\xec\t\xee\xe9\xedQ\xedp\xecR\xe9\xb9\xe8\xe7\xecz\xefR\xf1p\xf0Q\xec4\xf3\x1e\x0c\xfe0\xf1V\x95iif\x9dY\xf6NxK\x9bL\x9cK\xe2H\xe8G\\Bs6l&\x0e\x12\xcf\xfd\xb3\xe9\x8b\xd2$\xbd\xa6\xb1\xcf\xb2Z\xbe\x83\xcc\x01\xd5\xdd\xd6\x92\xd6/\xd5\xe1\xd6T\xde+\xea\x1d\xf9\xec\x08\xd2\x15\x85 g*\xe40\xed2\xcf-\x9d"\xc7\x15\xcb\n\x0e\x04\x97\x01\xf1\x02\xa8\x02E\xfe\xed\xf4\xb3\xe5\x9e\xd6I\xccM\xc7\xdd\xc79\xcb\x82\xd0\x00\xd8\x98\xe1O\xebH\xf4\xb8\xfa\xf6\xfc.\xfd\x1a\xfd\x1f\xfes\x03\xf3\x0cx\x17\x1d\x1f\xce #\x1d|\x16F\x0f}\x083\x02\xba\xfc\x15\xf8p\xf4\xce\xf2\xf9\xf3\r\xf7c\xf9\'\xf9\x01\xf5(\xef\xab\xea\xf4\xe9\x1c\xee\xeb\xf4\xf7\xfa\xe2\xff\xe0\x01b\x02.\x02f\x01\x90\x01i\x01\x0c\x01p\x01*\x02\xce\x04R\x07L\x08[\x07\x92\x01\xee\xfa\x19\xf5?\xf0=\xee\x9a\xec\x8e\xeb\xb2\xec\n\xed\x0b\xecS\xeb\x9e\xea\xc5\xea\xea\xe9\xb4\xe6\xbe\xe6I\xf2\x94\x0c\xa80zP\xd1]\xf2Y\xdfN+E.B{B\x7fC\xb2G/K\x85IH>\x92)*\x13M\x00T\xf0z\xdf\r\xcd[\xbeS\xb9\xcc\xbe\xfd\xc9A\xd4\x13\xd8\xa6\xd6\x8c\xd4s\xd4`\xd8R\xe0i\xebI\xfaK\n\xc3\x178"x(\xd8+=-\x91*\xde"\x12\x18\xea\rq\x08\x90\x084\tT\x06+\xffB\xf4\x92\xe81\xddd\xd2{\xcab\xc7\xd4\xc8\x02\xcez\xd4S\xda\xf7\xe0\x17\xe9`\xf0\x89\xf5S\xf7\xd8\xf7N\xfb[\x02\xa9\x0bD\x15I\x1d\xa7"\x81$\xaa!\xfd\x1a\xca\x12\xa3\x0b\xf0\x06\xa5\x03\x85\x00,\xfd\x04\xfa\xf9\xf7\xef\xf5\x16\xf3\x03\xf0\xa4\xec\xa1\xea\x06\xea\xcd\xe9\x7f\xeb)\xefS\xf3Q\xf9\x92\xfe\xba\x01q\x04Z\x05z\x05p\x07\xc3\x07\x01\t"\x0b\xa3\x0b`\r\x9f\x0c\x1d\t\x9e\x03l\xfc\x1b\xf6\xa3\xf1\xb5\xefB\xee\xe7\xecO\xec\xab\xe9q\xe7X\xe6\xf4\xe3\xd3\xe3~\xe4<\xe5\xed\xecO\xfe\xf0\x16\x9c1^E\xaaM\xe5M\xdbH\xa9A`=4=9A\xc2G\x1fK\x10EU7o%\xa9\x12\xa6\x02\x82\xf1\xd4\xe0\x82\xd4\x97\xcd\xa9\xcc\x87\xce\xf9\xcf$\xd1\xae\xd2\xb4\xd3j\xd4\xb4\xd4\xdc\xd5%\xdbc\xe5\xbc\xf2\xb6\x00\xa2\x0c\x0f\x17\xf3\x1f\xac%!\'\t$\x0b\x1f\xc5\x1a\x9d\x17/\x15\xb6\x12\x95\x0f\xbb\x0b\xf3\x05T\xfd\xe1\xf2k\xe8\xb4\xdf}\xd9\xad\xd4\xeb\xd0\xae\xce3\xcf\xa7\xd2\xf7\xd77\xde,\xe4\x99\xe9d\xee\xe6\xf1\x17\xf5\xa3\xf9b\xffE\x07\xee\x0f\x9b\x16\xf1\x1bL\x1f\xf6\x1f)\x1f\xbd\x1b4\x16\xad\x10\r\x0b\x86\x06\xfd\x03\xaa\x01\t\xff\x10\xfc\x99\xf70\xf24\xedY\xe9\xc3\xe7:\xe8*\xea\xef\xecz\xf0=\xf4q\xf7\x8a\xfa\xde\xfd{\x01\x82\x05b\t|\x0b\\\x0c\x0b\x0c\x96\n"\t\xdd\x06Q\x04x\x01\xe4\xfd\x17\xfa\xe2\xf5a\xf2T\xef\x1f\xed\xf8\xeb\xb5\xea\xe8\xeaR\xeb\x90\xeb\'\xec\xed\xeb\x1c\xee\x0b\xf5\x93\x02\xe3\x16\xed+2<[D\xdbD\xe5A\x16=\x9f8\xb16e7\x9d;\xa9?\xb0>\xff6K)\x87\x19\xed\nO\xfd\x00\xefg\xe1\xe3\xd6\xe7\xd0J\xd0X\xd2\xcb\xd4X\xd7\xa8\xd8M\xd8w\xd6w\xd3F\xd2\xdd\xd5]\xdf\xc7\xed\x87\xfdZ\x0bh\x15=\x1b\xb1\x1d\xf3\x1c\xdf\x1a\xfc\x18\x96\x17c\x17\x10\x17P\x15G\x12\xc9\r\xa2\x08h\x034\xfc\xf7\xf2\xf8\xe8\xc0\xdf\x0b\xd9u\xd5\xd2\xd3\x80\xd4\x89\xd7\x12\xdcg\xe1\xd1\xe5L\xe8$\xe9\x0f\xea\xf3\xeb\xff\xef\xae\xf66\xff.\tE\x13,\x1b}\x1f\xb8\x1f\xb4\x1c\xfb\x18\xbe\x15_\x13\xd2\x11A\x10\xa7\x0e\x9a\x0c}\t\x8a\x05\xe4\x00m\xfb\x1b\xf6@\xf1\x10\xedB\xea&\xe9\r\xea\xdd\xec\xca\xf0\x83\xf44\xf7n\xf8\xf6\xf8o\xf9\x8e\xfa\xa9\xfc\xfa\xfe`\x010\x03\x81\x03}\x03\x92\x02g\x01\x8c\x00\x0c\xff\xd8\xfdZ\xfc\x82\xfa\x1b\xf9\xbf\xf7\xdd\xf6\xcc\xf6\xdd\xf6s\xf6@\xf5#\xf3\xd4\xf2p\xf7c\x02\xc9\x11\x99!Q-F3\xad4t2\x8c.a+\xd1*L.\xfe4;;\xcd\xf63\xf5\x01\xf4\x88\xf2[\xf1\x00\xf1T\xf1\xaf\xf2\xf6\xf44\xf7>\xf9\x91\xfa\xf0\xfa\x00\xfb\xc6\xfa_\xfa\x89\xfa\xc1\xfaZ\xfb\xab\xfc\xe7\xfdR\xff\xee\xff\xa3\xff\xa7\xfe\xd8\xfc\xf9\xfa\t\xf9/\xf8\xc3\xf9\x07\xffo\x08Z\x14\xba\x1f$(\xe3+R+\x9a(}%\xfa#\xe9$\x03(\xdd,11\xc92\x130M(d\x1d\xd0\x11\xca\x07\xac\x00\xde\xfb\x19\xf9d\xf7\xee\xf5\x9b\xf3\x11\xf0\x8b\xeb\xb5\xe6\xf7\xe2\x93\xe0<\xdf\x8e\xde\xfc\xdd\x1f\xde\xb3\xdf\xf2\xe2\xd7\xe7\x9c\xedx\xf3\xc2\xf8p\xfc&\xfe\xf4\xfd\xdc\xfc1\xfc\t\xfd\xff\xffw\x04H\t\xf5\x0cK\x0e\x8b\x0c\xda\x07.\x01r\xfa\x84\xf5<\xf3S\xf3\x89\xf4\xdd\xf5\xef\xf5\xb3\xf4@\xf2\xf4\xee\x0c\xec\x1b\xea\x8c\xe9\xcb\xea<\xed\x90\xf0/\xf4\xc8\xf7\xb3\xfb\xd0\xff\xe3\x03[\x07\n\n\xad\x0br\x0c\x9a\x0c~\x0c\xc1\x0c\xad\rq\x0f\xaf\x11~\x13\xb5\x13\xd6\x11\x19\x0e<\t\x04\x04O\xff\xd1\xfb9\xfa8\xfa\x18\xfb\xeb\xfb\xc3\xfbc\xfa\x05\xf8\xed\xf4\x18\xf2\xea\xef\x0e\xef\xea\xef\x0f\xf2\xde\xf45\xf7v\xf8\x9d\xf8\xc7\xf7\xb7\xf6\xbd\xf5u\xf5\x06\xf6H\xf7P\xf9p\xfbB\xfdX\xfeQ\xfep\xfd\x19\xfc\x94\xfa\xa0\xf9/\xfa\xa8\xfcl\x02\x16\x0b>\x15\xe8\x1ex%\x11(\x9b\'X%{#0#\x7f$\xf3\'\x0e,\xc5/c1\xff.\x01)\r Y\x16\xf7\r/\x076\x02\x89\xfei\xfb\xfe\xf8\n\xf6T\xf2\xd6\xed\xb5\xe8\x87\xe4O\xe1\x19\xdf\xbb\xdd\x80\xdci\xdc\xb2\xdd\xad\xe0,\xe5\x11\xea\xf5\xeee\xf3\xb6\xf6\xe9\xf8\xe3\xf9q\xfak\xfb\x8a\xfd\x1e\x01\x84\x05\xee\t@\r\xac\x0e\xbd\r\x87\n\xb5\x05\xa6\x00\x99\xfcd\xfa\xed\xf95\xfa\xab\xfa-\xfa\xbc\xf8g\xf6`\xf3\x90\xf0M\xee*\xeds\xed\xc9\xee5\xf1\xef\xf3\xe9\xf6:\xfa\x8f\xfd\xfc\x00\xae\x03\xc0\x05\xfa\x06{\x07\xca\x07\xdd\x07r\x08\xc1\t\xa5\x0b\x14\x0e%\x10\xd2\x10\xed\x0fW\r\xa9\t\x99\x05\xca\x01\xe1\xfe\x80\xfdd\xfd\x17\xfe\xf7\xfe\x19\xff\x17\xfe!\xfcC\xf9+\xf6\x94\xf3\xc8\xf13\xf1\xd7\xf1\xf7\xf2S\xf4I\xf5\x88\xf50\xf5~\xf4\x8e\xf3\x17\xf3\xc8\xf2\xe7\xf2\x99\xf3v\xf4,\xf6\x01\xf8\xa0\xf9\x0c\xfb\x90\xfb\xd5\xfb\xcd\xfb\x7f\xfb,\xfc\x93\xfe5\x04\x81\rM\x18\xaa"\xdc)\xe1,\xfe,\x0f+\xff(\x02(\xb1(\t,z0+4\xf34\xbd0\xa9(j\x1e?\x14\x01\x0c\x07\x05R\xff\xcb\xfa"\xf7\xd4\xf3\xef\xefm\xeb\xc2\xe6\xb8\xe2\x97\xdf\xe1\xdc\x88\xdar\xd8/\xd7\xae\xd7\xfd\xd9\x0f\xde\x84\xe3\xe2\xe9\x94\xf0F\xf6X\xfas\xfc^\xfdM\xfe\xfc\xff\xf7\x02\xfd\x06o\x0b\x93\x0fm\x12\x02\x13\xfd\x10\x8d\x0c$\x07^\x02\x08\xff\x06\xfdz\xfb\x13\xfa@\xf8$\xf6\xd0\xf3:\xf1\xd0\xee\xb8\xec,\xeb\xae\xea\x18\xebn\xecj\xee\r\xf1\x91\xf4\xe1\xf8~\xfdx\x01u\x045\x06\x0e\x07U\x07k\x07\xfb\x07U\t\x93\x0bO\x0e\xcc\x10\x1f\x12\xcf\x11\xe8\x0f\xcb\x0c\xd9\x08\xe3\x04{\x01W\xff_\xfe5\xfe`\xfe;\xfe\x8f\xfd=\xfc\x0c\xfaP\xf7b\xf4\xf8\xf1\xb4\xf0\x9d\xf0S\xf1t\xf2f\xf3\x00\xf4\xe9\xf3A\xf3"\xf2*\xf1\xd7\xf0]\xf1\xd5\xf2\x7f\xf4\x9d\xf5.\xf6U\xf6\xb8\xf6d\xf8\x99\xfa0\xfdu\xffK\x00}\x00n\x00j\x01W\x05\xc9\x0c\xa3\x17i$T/"6\x1a7K3\xc3.\xc3+\xc8,\x900\x9a4c7T6=1\xd7(\xa7\x1d\x89\x12\x93\x08d\x006\xfaE\xf4Z\xee\x0e\xe8G\xe2B\xde\xbb\xdb\\\xda-\xd9;\xd7 \xd5\xe2\xd2\xc8\xd1\x04\xd3M\xd7I\xdf[\xe9\\\xf3 \xfb\xd8\xff\xb1\x02\xdd\x04G\x07\xf8\t\xb7\x0c\xe9\x0fb\x13u\x16\xf1\x17\x1b\x17\x88\x14\x12\x11C\r\x07\t\xed\x03\x0c\xfe-\xf8\x05\xf3T\xef\x1a\xed\xd2\xeb;\xeb\xb6\xea\xfe\xe9\xd3\xe8,\xe7\xdf\xe5}\xe5\xf4\xe6\x85\xea\x85\xefg\xf5\x06\xfb\x0b\x00o\x04\xa2\x07\xe2\t\xe6\n\x82\x0b\xac\x0c5\x0ey\x10\x90\x12\xd9\x137\x14>\x13\x89\x11\x0f\x0f\xef\x0b\xd3\x08r\x05b\x02\x92\xff\x04\xfd]\xfb3\xfa\xba\xf9\x81\xf9\xb1\xf8{\xf7\xba\xf5\xc5\xf3H\xf2)\xf1.\xf1\x18\xf2\x97\xf3 \xf5\xd7\xf5\x82\xf5\x9f\xf4\xa3\xf3|\xf3M\xf44\xf5F\xf6\xcb\xf6\xb0\xf6\x9c\xf6i\xf6\x9f\xf6\xc2\xf7\x7f\xf9x\xfc.\xff\xc0\x00e\x01\xb0\x00\x1e\x00\x06\x00\x14\x01\x01\x06D\x0fo\x1cn*6429%9\xa26m4\xff1<1\xdc0\xd30\xb00F.u*\xdd#\xf0\x1a\xf4\x0f\xce\x02\xfd\xf5\x96\xeao\xe2\xce\xdd\x9a\xdbk\xdb[\xdbW\xda\xc5\xd7o\xd4\xd9\xd1b\xd1{\xd3\xcf\xd7\x81\xddI\xe4\xf3\xebI\xf4\xaa\xfd\x9f\x06a\x0e\xea\x13\x94\x16\xe4\x16\xb0\x15\xb8\x13\xe7\x12\xab\x13\xaf\x15\xe0\x17\x12\x18W\x15/\x0f\x9b\x06/\xfd?\xf4\xb6\xecC\xe7\xd4\xe3$\xe2z\xe1\x81\xe1\xc1\xe1\xbf\xe1.\xe1Q\xe0\xba\xdf*\xe0\x8e\xe2Z\xe7m\xee\t\xf7\xaf\xff\x1e\x07\xa5\x0c%\x10&\x12.\x13\xbe\x13+\x14\xa8\x14\xfe\x14?\x15\x1e\x15\xa6\x14w\x13K\x11\xcb\r\x02\tO\x03\xe4\xfd[\xf9\x90\xf6\xcc\xf5\n\xf6\xf3\xf6\x1a\xf7\x81\xf6\xb9\xf5\xc4\xf4<\xf4\xdc\xf3\xfe\xf3\xd5\xf4\xe2\xf5w\xf7\xc5\xf8\xe0\xf9\xcf\xfa\xb3\xfb\xb6\xfc7\xfds\xfc\x9c\xfa\x80\xf8O\xf6\xff\xf4\x03\xf4\xc5\xf3[\xf5\xf3\xf6g\xf8T\xf8\xb3\xf6\xa1\xf6\xd5\xf6\xba\xf8\xcc\xfa\xc0\xfbs\xfd\x96\xfd\x9b\xff\x9a\x05\xa2\x10\xce R0p;\xd4?\xe7=m9N4b1\xcd0\xe81c4\xab3\xf1/p(\x00\x1e\x14\x13\xf1\x05 \xf8\xca\xeb\xf3\xe1$\xdc\x8d\xd9\x1e\xd8\x94\xd8G\xd9\xf5\xd8\xc4\xd7f\xd5\xcd\xd3\xf5\xd4F\xd8;\xde1\xe6q\xef\x8a\xfaR\x05\xe4\x0e\\\x15\xa3\x18\xc5\x19\xa8\x19\x9a\x19\x86\x19\xa5\x19\x0f\x1a\x05\x1a\xe1\x18\xef\x15\x00\x11\xb8\n,\x03\x8e\xfa+\xf1:\xe8\x02\xe1\xb9\xdc\xf9\xda\x10\xdb\xb4\xdb\x1d\xdc\xdc\xdb\xde\xda\xd5\xd9\x04\xdaO\xdcO\xe1T\xe8t\xf0\xa2\xf8S\x00G\x08\xad\x0f\xba\x15\x04\x19\\\x19G\x184\x17\x0b\x17S\x18\xa7\x19r\x1aM\x19\x0f\x16\xb3\x10h\n\xbc\x04\xfb\xff\xd4\xfc\xa0\xf9\xe3\xf6\xfd\xf4p\xf4S\xf5\x92\xf6$\xf7\x9e\xf6\xa8\xf5\xd6\xf4\x02\xf5\xe8\xf5\x90\xf7\xe1\xf9\x8a\xfc\x1d\xff\xfd\x00e\x01\xd2\x00\xa6\xff%\xfe\x93\xfc\x1c\xfa\xe0\xf76\xf6\x1e\xf6\xe6\xf6}\xf7\xde\xf6&\xf5?\xf3[\xf1\xce\xf0R\xf0\xb5\xf18\xf4\xfd\xf6\xa9\xfa\xfa\xfc\xef\xff[\x02\x7f\x039\x04\xdf\x02x\x04n\x0c\xc5\x1a\x85-%:\xdf>\xe8;\x0c6&3\xf91;2\xbe1\x9f03.k)c!\xd3\x17\xdf\x0e<\x06e\xfcQ\xef\xf8\xe1\xfd\xd8$\xd7+\xdaf\xdd\xbc\xdd\xbd\xdb\xbb\xd9\x02\xd9\xac\xd9\xe1\xdbc\xe0c\xe7\x86\xf0\x7f\xf9\x85\x01/\tk\x10\xff\x16\x99\x1a\xf9\x19\xc0\x16\x9e\x13F\x13\x94\x15\xbc\x17\x93\x17N\x14\xa5\x0eY\x07]\xfe\x82\xf4\x13\xeb\xd6\xe33\xdf\xc9\xdbo\xd91\xd8\xfb\xd8\x05\xdb;\xdc\x1b\xdbw\xd8\x02\xd7[\xd9\x02\xe0L\xe9:\xf3\x90\xfc\xe0\x04\x9d\x0b^\x10;\x13,\x15|\x16\x14\x18\xfc\x18(\x1a\x8c\x1b0\x1d0\x1e:\x1c8\x17\x05\x10\xbb\x08\xe2\x02W\xfex\xfb\xf2\xf9\x94\xf9\xb0\xf9\x85\xf8x\xf6\xce\xf3\x88\xf1\'\xf0Y\xef4\xf0\xc7\xf2&\xf7\xce\xfbK\xff\x9c\x00\xd0\x00\x1a\x005\xff\xc0\xfeC\xfe\x86\xff\x8b\x00\x00\x01D\x00\xd5\xfd\\\xfb \xf8\xeb\xf4\xe2\xf1\x9e\xee\x88\xedZ\xed\xc5\xee\xc0\xf0\x83\xf0,\xf1\xaf\xf18\xf3\x99\xf5\xfe\xf6\x84\xf9I\xfc\x8b\x00\xbf\x04\xed\x07!\x0b\x93\r?\x14\xe3\x1fo-5:F>3:\xa93c.\xab.U1\xd01\xba0>-\xe8\'\xc9!]\x18\x82\r\xdc\x01\xe9\xf5\xc7\xebK\xe3<\xdf\x89\xdfb\xe2\t\xe5-\xe4a\xe0&\xdc\xa9\xd9>\xdb\x06\xe0\xc2\xe6\x8d\xee\xe3\xf6\x84\xffG\x07\xa5\r\n\x11<\x12g\x11\xed\x0f_\x0f\xeb\x0f\xc1\x115\x14G\x15\x16\x134\r\x89\x04N\xfb\xe7\xf2\xfa\xeaQ\xe4\xb4\xdf\xb8\xdd\xbb\xde\x9a\xdf\x83\xdf\xf4\xdd\xe2\xdb\xaa\xda1\xda\xf1\xda=\xde5\xe4S\xed\x06\xf7\\\xffW\x06A\x0b\xe5\x0f}\x12z\x13P\x14\xa0\x15\x81\x18\x89\x1b\x9a\x1d\x80\x1e\x82\x1c\x01\x19E\x13\xa0\x0cT\x06\xdf\x00\xc6\xfd\xba\xfb\xc8\xfa?\xfa\x92\xf9\x07\xf8\xd4\xf5N\xf3\t\xf1\xd4\xf0\x10\xf2\xd3\xf4w\xf8\xe7\xfb+\xff9\x01C\x02\xa1\x02(\x02\x99\x01\x9f\x00\x94\xff\xee\xfe\xd5\xfe\x87\xfeo\xfd$\xfb\xa7\xf7U\xf4\xa2\xf0\xfa\xed[\xec\x0b\xeb\xb3\xeb\x13\xec\xa4\xedb\xef+\xf0\x83\xf2\xcf\xf3\xbe\xf6:\xfa\xca\xfd\xa0\x02l\x06\x11\t\xfc\x08\x12\x08\x80\n\x1c\x12\xff\x1f\xa7-\xb25\xbf6\xb12\xd6.\x08,\xeb*8+\xb5+\xcc+V*\xb2&\x06!\x1a\x19\xc6\x0e\x82\x02\x8a\xf62\xed\xce\xe8\xf6\xe8\xb1\xea\xbd\xec\x05\xecm\xe8\x8d\xe3E\xdf\xfb\xdda\xdf\xc7\xe2\n\xe8,\xef\xd7\xf7h\x00d\x06\xa1\x08f\x07\xd5\x05\xc4\x044\x05\x96\x07,\x0b\xe1\x0fx\x12\xe8\x10/\x0c\x19\x05\xd6\xfd\xce\xf6#\xf0\xa5\xeb\xc3\xe9Q\xea\x08\xeb\x0c\xeb\x90\xe9L\xe7%\xe4\xeb\xe0N\xde\x8d\xde\xb5\xe2\xff\xe8\xab\xef\xbb\xf5<\xfb\x97\x00\xf5\x03\xff\x04&\x05\xd6\x05\xc9\x08\xe5\x0b)\x10\xcf\x13\xa4\x17N\x19\n\x17\x98\x12\xe8\x0c\x1c\t\x17\x06"\x03i\x01,\x00\xec\x00\x19\x01\x0e\xff\x1e\xfc"\xf8L\xf5\x9c\xf3\xd7\xf3\xba\xf5\x84\xf9M\xfd\x94\xff\x10\x01A\x01L\x01\xa2\x00\x9b\xffz\xfe0\xfe{\xff\xec\x00\x10\x02\x17\x01f\xfdB\xf9A\xf5\xd9\xf2\xde\xf1\xed\xf0\xb8\xef\x08\xeeq\xed:\xee\x0f\xf0\x1c\xf2\xc3\xf1z\xf1X\xf2h\xf4\x1f\xf9+\xfd\xf7\xff\x98\x02\x0f\x04\xdc\x05\x9d\x08\xf0\x07\x8d\x06\xff\x07\xcb\x0fp\x1f=->2P.\x85\'\x0e&\xb2(0+\xc4*\xd4(^)\x1b)\x89&\xad!\xe9\x19\x1b\x10S\x03\xa9\xf6\xf9\xef\x13\xf1.\xf5\xad\xf6\x15\xf3\x80\xed\x19\xeaB\xe7\x03\xe4\xbb\xe0b\xdfs\xe2\xc0\xe8u\xf0\xdd\xf7\x93\xfd\x98\x00\xfd\xfe\x0c\xfb\x17\xf8v\xfa\x8c\x00\xf7\x05\xcf\x08\xb1\t\xb9\x0b\x9a\x0c!\t\x13\x03\x91\xfc\xfb\xf7\xa5\xf5\xba\xf3I\xf3\xcf\xf4\xe7\xf6>\xf6\xe7\xf1O\xecH\xe9\x98\xe8O\xe8\xf7\xe7\x9e\xe9<\xee\xbb\xf4\xc6\xf9\x99\xfb\xe3\xfc.\xfd\xc0\xfd,\xfe~\xff\xd1\x03\xc9\x08\x84\x0c\x1b\x0e\x8c\x0e\xeb\x0fB\x0f~\x0c\xdf\x07\xe9\x04\xbe\x04\xe5\x05\x7f\x06\x19\x05\xf0\x03\x16\x02\xe5\xff\x0c\xfd[\xfb\xe3\xfb\xb3\xfc;\xfd\x7f\xfd\xa2\xfe\xd2\x00g\x025\x02\x0c\x00\x9e\xfef\xfe$\xff\xe3\xff\'\xff\x7f\xfe\xb6\xfcI\xfb\x82\xf9\xfa\xf6\x9e\xf5\x02\xf4n\xf3\x93\xf1\xb6\xef\xa1\xef\x1e\xf0\xa5\xf1\xce\xf0\xfd\xef\xc1\xf0G\xf2\xae\xf5\xc6\xf7\xe3\xf9-\xfc\x15\xfd\x97\xfe\x17\x00\x99\x02\xb5\x05\x1a\x06\x88\x04\xe2\x02\xde\x07u\x14k"\x80*\xf4(\x0b$o"\x86%\xe3**,*+\xdb*\xf8*\xd4+%)\xda"\x94\x19Q\rR\x02\xb1\xfcX\xfc\xae\xfd\xda\xfb{\xf5\xb1\xee\x00\xea\x1a\xe7-\xe5\x95\xe1 \xde\xed\xdd*\xe1\xb9\xe7\x8b\xef\xea\xf4\x01\xf6\x06\xf3a\xf0\x81\xf2\xff\xf8\x0c\x002\x05\xf5\x07\xf7\x08\x9e\n \x0c\x96\x0c\xa3\n8\x04g\xfe\xca\xfb.\xfdR\x01\xeb\x01\xd8\xfe5\xf9A\xf3-\xef^\xec\x00\xea\xc4\xe8U\xe8\xad\xe8\x86\xeb\xba\xefE\xf3\x07\xf5\xfd\xf2/\xf0\xe1\xefE\xf3f\xfa\xc1\xff\x1f\x04\xa3\x06n\t\x83\x0b\n\x0cR\x0b;\t\x8f\x07\x0e\x07\xb9\x08\xc1\x0b\xf3\r\xb2\rj\n\xfc\x04\x8f\x01\xe1\xfft\xffq\xfe\xc1\xfdQ\xfe\xdb\xff}\x01\xc1\x00\xb7\xfe;\xfcn\xfa\xee\xf9F\xfaS\xfb\x06\xfdx\xfd\xd2\xfck\xfb\xe1\xf9\xc8\xf8\x14\xf7Z\xf5[\xf3\xe2\xf2\xd4\xf3\xac\xf4\x8a\xf4i\xf3T\xf2\xcd\xf2\xde\xf2\x7f\xf3\xdf\xf4\x03\xf6\x1a\xf9\xf8\xf9\xe1\xfa\x18\xfd\x99\xfe\x9c\x01\x00\x02\x18\x02\xe2\x03\x94\x03\xbb\x02\x10\x03\x86\t[\x19\x96%\xe9\'\xc6#j!\x00&u*\xcd)\x9f(\x8e+80a2W/%)\x12!Q\x16\xeb\t3\x01\xbf\xfe\xec\xffH\xfe\xc9\xf7?\xf1\x13\xed\x8b\xe9\x01\xe3g\xdbi\xd7\xb2\xd8\xfd\xdd#\xe4\xdf\xe9B\xef\xa6\xf1\xe3\xf0\'\xeea\xeeC\xf4\x80\xfbE\x02\x1b\x06/\nK\x0f\xda\x11\x02\x11\x1b\r\xa1\x08\x0c\x06H\x04N\x04[\x05\x86\x06\xe2\x05\xc3\x00=\xfa\n\xf4|\xf0\x84\xedH\xea\xac\xe7O\xe7I\xe9-\xec(\xeec\xee\x17\xee\x1b\xed6\xedh\xee\xad\xf2$\xf9Q\xffA\x03[\x06\x14\t\x92\x0b\xf9\x0bM\n\x90\t\x88\n\xd2\x0c[\x0f\x02\x11\xe2\x10.\x0fj\x0bz\x07\x00\x05\xa0\x03\xd9\x02&\x02\xcf\x00\xbb\x00\xe5\x00k\x00\xe2\xfe\x16\xfb2\xf8I\xf7(\xf8T\xfa\x90\xfb\xed\xfb%\xfb"\xf9 \xf7\xbc\xf5\xec\xf4\r\xf5\x80\xf5\xd1\xf6\x9b\xf7m\xf7\x90\xf69\xf5\x0f\xf4\x1f\xf3N\xf3\x82\xf5\xd7\xf7s\xf8\x95\xf8\xa9\xf8\x1c\xfbM\xfd\xcf\xfd\x8d\xfd\xa4\xfc\x02\xff\xcb\x01\x05\x04i\x05\xca\x04\xf7\x05\xad\ni\x13\x12\x1f\xa9%v$\x9d\x1f\x93\x1e\xb9${+\xed,\x1b+O+M-\xcc,E&\x0b\x1d\xb1\x14;\rR\x064\x01D\x00\x0c\x00\x9b\xfb4\xf3\x9b\xeaD\xe5\x82\xe2o\xdf\x00\xdd^\xdc+\xdfU\xe4l\xe9\xd5\xec\xe4\xed\xb7\xed\x0c\xed\xc3\xee{\xf3\x0e\xfb\xbe\x02\x1d\x08\xf6\tk\n\x88\x0b\x11\x0cu\x0b\xd7\x08\xd2\x06\xad\x07#\tI\t)\x07$\x03\x98\xfe\xca\xf8R\xf3;\xefE\xed\x0b\xed:\xec\x00\xebW\xea\xa3\xea}\xeb\xd7\xeaS\xe9\xa3\xe9E\xec\xb7\xf0+\xf5p\xf9\xb3\xfd\xc8\x00\xe6\x01\xe5\x02G\x057\x08\xa8\t\x17\nm\n\xb0\x0c\xf0\x0et\x0f\xa1\x0e\xac\x0b\xbd\tK\x083\x07\xd0\x06\xf4\x05{\x05V\x04~\x02\x1d\x01\xa1\xff\xc5\xfed\xfda\xfb@\xfa*\xfa\x06\xfb\xfe\xfa\x9f\xf9C\xf8\xac\xf6I\xf6\x9b\xf69\xf6K\xf6s\xf5\xeb\xf4\x92\xf5\xd4\xf4:\xf4\xc4\xf39\xf3\xe3\xf4\xeb\xf5/\xf7n\xf8\xf1\xf7\x9b\xf8~\xf9\xd8\xfaB\xfd\xea\xfe>\x01\x11\x03\t\x02\x15\x00r\xff\xed\x02~\t\x95\x10m\x15%\x19\x07\x1ds\x1fg \x11\x1f_\x1e1!\xb8&\x83++,\xd4(&$\xe9\x1f;\x1b\xa5\x14\xd7\r\xc6\x08\xe4\x066\x06\xac\x03\xa7\xff\xfc\xf9i\xf3\xb0\xec$\xe7\xd5\xe4\t\xe55\xe6E\xe7^\xe8M\xea\xe2\xeb_\xec\xdb\xeb\xbf\xeb\xb3\xedJ\xf1\x9d\xf6\xf8\xfc\xa6\x02\xc3\x05Y\x05\xac\x03\xad\x030\x05?\x06\xd2\x05#\x05\xc5\x05(\x07)\x07\xe5\x04\xbd\x00\x82\xfcp\xf9\xa5\xf7\xb6\xf6w\xf5P\xf4\xe1\xf3\xe1\xf3\x8c\xf3G\xf2|\xf0c\xef\xfd\xee\xd2\xefQ\xf2\xca\xf5\xe5\xf9\x17\xfc\xa2\xfc0\xfcG\xfc\xc3\xfd\xcb\xff\xb3\x01\x88\x03\r\x05\xcf\x06\x0b\x08 \x08X\x077\x06\x95\x05\xa9\x05\xc0\x06\xfc\x07\xf5\x08\xb4\x08\x8c\x07;\x06\x95\x05\x10\x05G\x04\x93\x03\x9a\x03x\x04\xff\x04\x82\x04\x84\x02[\x00\xe2\xfd\xac\xfby\xfaa\xf9\xd3\xf8M\xf8p\xf7#\xf7?\xf6\x87\xf4\xc4\xf2x\xf1\xdd\xf1v\xf3\r\xf5a\xf6[\xf7\x19\xf8\x1f\xf9\xd4\xfaf\xfcu\xfdV\xfes\xff\xd9\x01\xa5\x047\x06\xf2\x06\x1e\x06\xb6\x04\xe0\x03\x8f\x04\xf3\x07\xdf\x0b\xdf\rK\r(\x0cn\r\x84\x0f\xbb\x10\x87\x10\xc7\x0f\x00\x110\x13\x1a\x15\x00\x161\x15\xd2\x13\x0f\x12\x0f\x10\xc8\x0e\xa3\rn\x0c\xb7\n\xb8\x08\xf8\x07 \x07\x11\x05*\x01\xe0\xfc\x96\xfa\xbf\xf9>\xf9\t\xf9\xc7\xf8\xa1\xf8r\xf7\xcd\xf54\xf5\xfc\xf4\x81\xf4.\xf3\x80\xf2\x84\xf3I\xf5\x80\xf6\xa8\xf6\x92\xf6w\xf6\x0b\xf6\x92\xf5\r\xf6\x9b\xf7\x1e\xf9\xb0\xf9\xb3\xf9?\xfa\x1e\xfb\x8e\xfb\x7f\xfb\xac\xfb\x1a\xfcx\xfc\x01\xfd\xcb\xfd\x8d\xfe[\xfe[\xfd\xd7\xfc\x01\xfd0\xfd\xe8\xfca\xfc\x9f\xfc\xca\xfcf\xfc\x06\xfcc\xfcp\xfd#\xfe1\xfeC\xfe\x93\xfe\x05\xff?\xff\xd1\xff\xe3\x00\xb4\x01@\x02b\x02\xf4\x02\x12\x04\xc1\x04\xf9\x04:\x05\xb0\x05\x18\x06J\x06J\x06n\x06a\x06\xde\x05(\x05\x98\x04\xf7\x03\xed\x02\x97\x01N\x00!\xff\xd9\xfdu\xfc\x1f\xfb\xf0\xf9\xce\xf8\x92\xf7M\xf6e\xf5\x9e\xf4\xe8\xf3q\xf3R\xf3\xaa\xf3\x17\xf4s\xf4\xfb\xf4\xda\xf5\xe3\xf6\xf3\xf7\xdd\xf8\xcf\xf9\xba\xfa\xa4\xfb\x9b\xfc\xb5\xfd\xc7\xfe\x9b\xff\xd1\xff\x96\xff=\xffO\xff$\x00L\x01\xa0\x02\xe3\x039\x05\xe2\x06\x96\x08[\n~\x0c\x0f\x0f\xc8\x11\x89\x14\x1b\x17\xba\x19!\x1c\x82\x1d#\x1e\xc9\x1eC\x1f\xcc\x1e)\x1d^\x1b\x05\x1au\x18\xe1\x15\xa7\x12\xf8\x0f|\rb\n\xeb\x06(\x045\x02\xcf\xff\xec\xfc:\xfar\xf8\xae\xf6E\xf4<\xf2\xf9\xf0\x1b\xf0\xcc\xeet\xed\x19\xed_\xedk\xed\x19\xed.\xed\x08\xee\xe8\xeea\xef\xea\xef\xd1\xf0\x07\xf2\x17\xf3\xc6\xf3\xa4\xf4\xba\xf5\xdf\xf6\xff\xf7\x05\xf9\xef\xf9z\xfa\xa8\xfa\xab\xfa\xf6\xfa\x98\xfb\x14\xfcz\xfc\x97\xfc\x81\xfc\x8d\xfc\xaf\xfc1\xfd\xfe\xfd\x9e\xfe\x11\xffz\xff0\x00p\x01\x89\x02\r\x03*\x03p\x03\xff\x03\x9c\x04\x18\x05{\x05\xaf\x05\x89\x05$\x05\xdd\x04\xd0\x04\xa0\x04;\x04\xe8\x03\xbd\x03\x91\x03%\x03\x9e\x02d\x02V\x02\xfb\x01m\x01\x10\x01\xd9\x00\x91\x00\xf9\xffm\xff\x19\xff\x95\xfe\xcf\xfd&\xfd\x9e\xfc\xfd\xfb\x1e\xfbL\xfa\xdb\xf9x\xf9\xdd\xf8E\xf8\x16\xf8\x13\xf8\xf8\xf7\xa9\xf7c\xf7\x95\xf7\xee\xf7\x0f\xf8\x1e\xf8D\xf8r\xf8\xac\xf8\x01\xf9R\xf9\x96\xf9\xbd\xf9\xcf\xf9\xa0\xfaL\xfc\x0b\xfe\xe5\xff\xa3\x01\x87\x03#\x06\x15\tm\x0c\x1a\x10d\x13\x1a\x16K\x18l\x1a\xc2\x1c!\x1f\x92 \xc1 . ^\x1f\x84\x1e8\x1d\x1a\x1b\x9a\x18\xdc\x15\xe4\x12\xd9\x0f\xd4\x0c\xf8\t\xc4\x06/\x03\x03\x00\xc5\xfd\x11\xfc\xfc\xf9\xcc\xf7\x08\xf6\xc3\xf4\x8c\xf3\x19\xf2\xfc\xf0H\xf0\xa3\xef"\xefH\xef\xbf\xef\xfb\xef\xd1\xef\x98\xef\x08\xf0\xa6\xf0$\xf1\xa9\xf1\x18\xf2\x89\xf2\x02\xf3\xc5\xf3\xd8\xf4\xc0\xf59\xf6u\xf6\xda\xf6[\xf7\xe2\xf7T\xf8\xac\xf8\x0c\xf9x\xf9\x02\xfa\x9d\xfa\x1c\xfb\x81\xfb\xd5\xfbK\xfc\xf4\xfc\xe8\xfd\xf9\xfe\xe4\xff\x9a\x00:\x01\xfb\x01\xcd\x02\x97\x03I\x04\xc8\x042\x05\x82\x05\xb0\x05\xdc\x05\xd8\x05\xb2\x05p\x05/\x05\x00\x05\xa1\x04\x18\x04\xa2\x03N\x03\r\x03\xc5\x02\x88\x02P\x02\x02\x02\x95\x011\x01\xdb\x00[\x00\xc1\xff\'\xff\xab\xfe \xfek\xfd\xa4\xfc\xfd\xfb^\xfb\xac\xfa\xfc\xf9l\xf9\xf8\xf8\x87\xf8U\xf8D\xf8D\xf8X\xf8^\xf8s\xf8\xba\xf8\xd9\xf8\x03\xf9U\xf9x\xf9\xb5\xf9\x11\xfal\xfa\xe9\xfaU\xfb\x94\xfb\x08\xfc\x95\xfcT\xfdz\xfe\xf5\xff\x05\x02c\x04\xc6\x06`\t\xd7\x0b\x1c\x0eq\x10\xf4\x12\xce\x15\x95\x18\x84\x1a\xdd\x1b\xe6\x1c\xa5\x1d\n\x1e\xde\x1d^\x1dt\x1c\xb6\x1a\x99\x18\x89\x16\xbc\x14\xaa\x12\xdd\x0f\xd2\x0c\xce\t\x1a\x07\x9d\x046\x02?\x00;\xfe2\xfcB\xfa\xa4\xf8u\xf7\x1f\xf6\xcf\xf4z\xf3p\xf2\xa0\xf1\xba\xf0#\xf0\xb4\xefU\xef\x1d\xef\xe4\xee\xe7\xee\xf3\xee\xfb\xee0\xef\x9b\xef\x1b\xf0\xa4\xf0_\xf1S\xf2)\xf3\x02\xf4\xe3\xf4\xbb\xf5q\xf6\xe5\xf6\x96\xf7x\xf84\xf9\xc6\xf9k\xfa5\xfb\xcc\xfb3\xfc\xc2\xfcr\xfd\x0e\xfe\xa8\xfe\x82\xff\xbc\x00\xac\x01>\x02\xc5\x02T\x03\xf5\x03U\x04\xc7\x04d\x05\xbd\x05\xd6\x05\xca\x05\xf5\x05\x10\x06\xd1\x05r\x05D\x050\x05\xd8\x04X\x04\x1a\x04\xf7\x03\xac\x03=\x03\xea\x02\xac\x025\x02\xad\x018\x01\xe9\x00y\x00\xde\xff@\xff\xaa\xfe/\xfe\x93\xfd\xf9\xfcg\xfc\xd9\xfbY\xfb\xea\xfa\x8d\xfa\x1d\xfa\xbf\xf9t\xf99\xf9"\xf9.\xf9R\xf9i\xf9O\xf9=\xf9f\xf9\x91\xf9\x88\xf9\x9c\xf9\xe7\xf96\xfaM\xfaB\xfa\x7f\xfa)\xfb\xcb\xfb\x04\xfc\xa5\xfc\xe1\xfd(\xff\x8e\x00/\x02A\x04\xc2\x06\xf6\x080\x0b\xe5\r\\\x10\\\x12!\x14\x1c\x16\x0c\x18\x99\x19\xa0\x1aS\x1b\xd0\x1b\x95\x1b\xd6\x1a\x13\x1a\x01\x19\xae\x17\xc6\x15\xb8\x13\xe6\x11\xbf\x0f|\r\x05\x0b\xa7\x08w\x06\xfd\x03\x9c\x01j\xffp\xfd\x82\xfbz\xf9\xbd\xf7A\xf6\xc9\xf4W\xf3\xf5\xf1\xde\xf0\n\xf03\xef\x88\xee\x08\xee\x8f\xedC\xed,\xed]\xed\xbb\xed\xd4\xed\xf6\xedb\xee\x0c\xef\xe1\xef\xa6\xf0o\xf1F\xf2#\xf3\r\xf4\x18\xf5&\xf6\t\xf7\xcb\xf7\x89\xf8u\xf9\xa3\xfa\xa2\xfb_\xfc\xfc\xfc\xa5\xfdj\xfe\x1c\xff\xd8\xff\x94\x008\x01\xb4\x01&\x02\xc7\x02h\x03\xd3\x03\x19\x04X\x04\xb3\x04\x00\x05(\x057\x05C\x05M\x05]\x05`\x05R\x058\x05\r\x05\xd6\x04\xd0\x04\xde\x04\xcc\x04\x93\x04I\x04\x1f\x04\xdf\x03z\x03?\x03 \x03\xd2\x02H\x02\xea\x01\xb6\x012\x01X\x00s\xff\xe4\xfe_\xfe\xb0\xfd\xf4\xfca\xfc\xc5\xfb\x0c\xfbz\xfa\x11\xfa\xbd\xf96\xf9\x92\xf81\xf8+\xf8=\xf8G\xf8\\\xf8\x88\xf8\xc7\xf8\x0b\xf9C\xf9\xb4\xf9S\xfa\xdb\xfae\xfb\xf6\xfb\xb8\xfc\x8c\xfdA\xfe\x11\xff\x0e\x00"\x01\x16\x02\xfd\x02\x1a\x04H\x05r\x06\xd8\x07\x80\t\x17\x0bs\x0c\xb3\r\x1d\x0f\x85\x10\xac\x11\xc1\x12\xe1\x13\xbd\x14:\x15\x88\x15\xe1\x15!\x16\xe0\x15#\x15M\x14o\x13Z\x12\xe6\x106\x0f\x8d\r\xd3\x0b\xf6\t\x17\x08?\x06G\x04\x1d\x02\t\x00:\xfe\x9d\xfc\xe1\xfa"\xf9\xac\xf7j\xf6$\xf5\xf5\xf3\xf1\xf2\x1d\xf2L\xf1o\xf0\xe7\xef\x9e\xeff\xef2\xef*\xefw\xef\xca\xef\t\xf0_\xf0\xe4\xf0\x91\xf1"\xf2\xc1\xf2\x8e\xf3^\xf4)\xf5\xe1\xf5\xba\xf6\x9c\xf7R\xf8\xf7\xf8\xa8\xf9p\xfa\'\xfb\xc8\xfbk\xfc\x0f\xfd\xa0\xfd&\xfe\xb5\xfea\xff\xf9\xff\x80\x00\x0e\x01\xc2\x01\x8b\x02.\x03\xb9\x03@\x04\xc0\x041\x05\xa0\x05\'\x06\x96\x06\xca\x06\xe7\x06\x1a\x07Z\x07^\x07%\x07\xe5\x06\xa4\x06T\x06\xf2\x05y\x05\xff\x04h\x04\xcc\x03:\x03\xab\x02\n\x02>\x01]\x00\x96\xff\xf0\xfeU\xfe\xba\xfd\x1d\xfd\x82\xfc\xf7\xfb\x8c\xfb\x1f\xfb\x95\xfa$\xfa\xbb\xf9S\xf9\x10\xf9\r\xf99\xf9?\xf9\x14\xf9\x04\xf9H\xf9\xb8\xf9\r\xfaW\xfa\xc7\xfa[\xfb\xc6\xfb/\xfc\xea\xfc\xd9\xfd\xa8\xfe\x16\xff\xae\xff\xa3\x00\x96\x01g\x02,\x03\xfc\x03\xe0\x04\xa0\x05J\x06,\x07\xf0\x07k\x08\xd9\x08L\t\xc8\tM\n\xb7\n\x15\x0b[\x0b\x88\x0b\xdb\x0b@\x0ch\x0cX\x0cC\x0c=\x0c\x1b\x0c\xc9\x0b\\\x0b\xf2\n\x87\n\xeb\t5\t|\x08\xcd\x07\xf6\x06\xf6\x05\xe8\x04\xe8\x03\xf4\x02\xd8\x01\xaf\x00\x9b\xff\x99\xfe\x9b\xfd\x84\xfc\x85\xfb\x91\xfam\xf9M\xf8U\xf7}\xf6\xac\xf5\xb6\xf4\xf7\xf3o\xf3\xeb\xf2z\xf2#\xf2\xea\xf1\xc7\xf1\xa8\xf1\xb9\xf1\xec\xf18\xf2\xa5\xf21\xf3\xef\xf3\xaa\xf4d\xf5<\xf6K\xf7e\xf8j\xf9r\xfaz\xfb~\xfcn\xfdS\xfeL\xff=\x00\x16\x01\xe2\x01\xb7\x02\x87\x031\x04\xb2\x043\x05\xb1\x05\x19\x06h\x06\x9a\x06\xbc\x06\xde\x06\xe0\x06\xce\x06\xbd\x06\xb7\x06\x9d\x06T\x06\x0e\x06\xc8\x05m\x05\xf2\x04x\x04\x00\x04u\x03\xd9\x02P\x02\xe4\x01b\x01\xb1\x00\xef\xffE\xff\xaf\xfe\x15\xfeO\xfd\x90\xfc\xee\xfbg\xfb\xe2\xfa\x7f\xfaM\xfa(\xfa\xd4\xf9x\xf9l\xf9\x91\xf9\xaa\xf9\xbb\xf9\r\xfa{\xfa\xc3\xfa\xf0\xfaa\xfb\x15\xfc\x9b\xfc\x11\xfd\x98\xfd3\xfe\xc8\xfec\xff\x19\x00\xce\x00^\x01\xca\x01R\x02\x0b\x03\x9a\x03\xef\x03\\\x04\xfc\x04|\x05\xcd\x05\x13\x06t\x06\xc1\x06\xdf\x06\t\x07R\x07\xa8\x07\xec\x07\x14\x08;\x08W\x08:\x08\xfa\x07\xc4\x07\x9e\x07\x85\x07g\x07\x18\x07\xc7\x06\x87\x06\x1f\x06\xad\x05A\x05\xf1\x04\x90\x04\x13\x04\x8b\x03\xf9\x02y\x02\xfa\x01\x8b\x01A\x01\xf9\x00\x93\x00\x1c\x00\x95\xff\x0c\xff\x81\xfe\xf4\xfdq\xfd\x01\xfd\x9a\xfc\x10\xfc{\xfb\xe2\xfaJ\xfa\xc1\xf9^\xf90\xf9\x14\xf9\xf5\xf8\xbd\xf8\x7f\xf8]\xf8C\xf8Y\xf8\x8c\xf8\xc8\xf8\x08\xf9K\xf9\xb8\xf9/\xfa\xa7\xfa\x1d\xfb\xa1\xfb>\xfc\xdf\xfcs\xfd\n\xfe\xa4\xfe\'\xff\xaf\xff:\x00\xc0\x00P\x01\xc9\x014\x02\x98\x02\xf6\x02b\x03\xb1\x03\xe6\x03\xf6\x03\xe4\x03\xe6\x03\xf1\x03\xf7\x03\xfa\x03\xe4\x03\xb7\x03U\x03\xea\x02\x89\x02\x1c\x02\xc1\x01`\x01\xf4\x00\x8e\x00\x1a\x00\xa6\xff>\xff\xc9\xfed\xfe\xfb\xfd\xa1\xfd/\xfd\xb7\xfci\xfc\x1b\xfc\xdc\xfb\xbb\xfbd\xfb-\xfb\x00\xfb\xc3\xfa\xcd\xfa\xa4\xfa\x98\xfa\xbc\xfa\xd9\xfa\xfe\xfaD\xfb\x8e\xfb\xdf\xfbN\xfc\xa1\xfc \xfd\x94\xfd3\xfe\xaa\xfe"\xff\xb5\xffn\x00\x13\x01\xdd\x01U\x02\xf8\x02\x95\x03\xec\x03`\x04\x87\x04\xd3\x04\xed\x04\xef\x04\xfd\x04"\x05$\x05~\x05\x7f\x05\xca\x05\xb4\x05`\x05q\x05\x11\x053\x05\x86\x04\xed\x04|\x04\xe1\x03u\x04\xb3\x03o\x030\x03\x19\x03\xb7\x02Y\x02c\x02\x8f\x02\x9d\x02\xf4\x01\xe9\x01\xd1\x01\'\x01\xcd\x00\r\x00\xe7\xffj\xff\xf3\xfe\xc5\xfe"\xfey\xfbH\xfa\x8d\x01\x9c\x0fV\x15\x8a\x03\x89\xee\x1c\xe8\xd7\xee\\\xf7\x91\x03|\x05\x86\xfd\n\xf3\xe9\xe7\xc7\xeee\xf3=\xf8\xde\xf8\x1e\xf6\x00\xf6\x80\xf6\xe9\xf8\xbd\xfe\n\x04\x1f\x02W\xfb\x17\xf8B\xfcI\x028\r+\r\xcf\x04u\xff\x7f\x00\x93\x06\xa5\x0c\xaf\n\xa1\x02\xbb\x005\x05\n\x0b5\x0b\xe4\x06"\x02\x90\xff\xbf\x04\xb3\x06\x0c\xfff\x04\xde\x02\xa8\xff\x01\x01 \x03\x17\x07\xd3\xff\xed\xfd\xc0\xfe\x97\xfc\xd3\xfe\xbf\x03a\x04\xed\x01\x92\xfc\x02\xfbt\xfe\xfd\x00\xd3\xfe\x80\xfdO\xffj\xfeE\xfc^\xfe\xab\xfcH\xfe\xf4\xfd\x13\xf8\x97\xf7\xa8\xf9\xea\x034\xfcN\xfa\x88\xfbQ\xf6c\xf8S\xfc+\x02\x1d\xf9\xac\xfc3\xfeZ\xfau\xfb\xe8\xfeP\x02\xe3\x00\xd3\xff\xa1\xfc(\xfdf\xfd\xe7\x01t\x07\xe8\x04?\x02\xda\x01\xc2\x00\x8e\x01\xac\x05-\x05\x00\x03\x1c\x04\xae\x07\xb2\x04 \x03\x87\x04f\x04\xce\x06\xa1\x06F\x03\xc3\x01\xb5\x02\x93\x03\xba\x05\xbc\x04\xeb\x04\xb9\x01\xd1\xfe`\x00\xa4\x04\x00\x03\xc0\xffx\xff\x9a\x01\x00\x00w\x00\xa4\x02u\x00\xcd\xfd"\xfd\xda\xfdZ\xfeA\x02\\\x001\xfes\xfb\xfa\xfa\xe1\xff0\x01\x9b\xfd\xdb\xfc_\xfdB\xfc<\xfc\x9e\xfd\xad\xfeG\xfd\xf5\xfb\xb4\xfa\xcb\xfb\x0e\xfdd\xfc\x0c\xfd\xdd\xfd\x8f\xfco\xfb-\xfb\xae\xfc\xc3\xfe\xe5\xfeg\xff\xdc\xfe\x0e\xfe\x80\xfc\x11\x00\xf1\x03\xce\x01\x0f\x03=\x02\xc1\xfe\xbc\xff\xb6\x04\x00\x05\xbd\x025\x03a\x01\xfe\x00\xd1\x05+\x05\xca\x01s\x01\x06\x05\xb6\x03$\x02\xad\x019\x02\x9e\x02\xf0\x01\x84\x01C\xff\xab\xfe\xa2\xfe,\x02z\xff\x0c\xfd\x99\xfb\x85\xfc\xc5\xfe\xb4\xfd\x81\xfc\xd9\xfcn\xfe_\xfd\x88\xfb\x8f\xfc2\xfe\xfc\xfd\xd2\xfb\xbd\xfa\x9c\xfe\xc0\xfb\xa0\xfcC\xfe\xa0\xfc\xfc\xfb?\xfc\xc8\xfe?\xfdw\xfc|\xfd\x8d\xfd!\xff\xe9\xff8\x00V\xfe\xce\xfdO\xff\x0e\xff\x15\x00\xb3\xff\'\x02\xd6\x01\xc4\x00\xc5\x01_\x03F\x01\x94\x00h\x02\xbc\x02\xe1\x04W\x04\x03\x04\x87\x02R\x04v\x04\x0e\x04U\x07n\x05q\x02\x18\x03\xe4\x03\x14\x03\xeb\x03\x8b\x06^\x04\xa4\x02\t\x02F\x02\x92\x00|\x02\'\x03\xc7\xff\x16\x017\x01m\x01\xeb\xff\xc2\xffr\xfd\xf6\xfd\x0c\x00h\xfe$\xff\x92\xffw\xfc\xe0\xfa\x16\xfe+\xfc2\xfbC\xfd\x7f\xfbw\xfb\xa0\xfc?\xfc\xd0\xfb/\xfbY\xfb\xdc\xf9\x05\xfc>\xfdk\xfd\\\xfd\x8a\xfc\x18\xfe\xa8\xfd"\xfe\x06\xff\x19\x00!\xfd\xfa\xfe\xce\x00\xae\x00\x82\x01S\xff\xfa\xff,\x01;\x02X\x03\xf2\x03\xf0\x01\xc2\x01\xf9\x01E\x02\xdf\x03e\x05\xb2\x04\x81\x02\xc6\x03\xdf\x02\xab\x03^\x03\xc2\x03\x19\x02\xa6\x02"\x04\x1c\x02\x84\x02\x95\x01n\x02\x96\x00X\x00:\x00\xf3\x00\xb7\xff\xbe\xfe\xac\xff\xa1\xfe\x90\xfe3\xfe:\xff|\xfe\xe3\xfe4\xfe\x15\xfe\xa5\xfe%\xfe\x82\xfc\x07\xfd\x00\xfe\xbb\xfe\xbf\xfd0\xfc\x1d\xfe\xf9\xfd\xb4\xfd\x8c\xfc\x91\xfd\xe6\xfb\x02\xfc\x86\xfd\n\xfd\xea\xfeb\xfd\xa8\xfd\xd0\xfc\x8f\xfe\x04\x00-\x00r\xfdG\xfd\x15\x01v\xfe{\x00\xff\x00s\x01\xb2\x00W\x01\x7f\x025\x02\x85\x019\x00b\x04\xd1\x03T\x03\xe7\x02\x9c\x02 \x04\xc4\x03\r\x04\xcf\x04j\x04$\x02e\x02\xed\x04\xa9\x03Y\x03\xfc\x04\xe7\x03{\x02\xeb\x01\x9d\x02\xc9\x00c\x02R\x02\x9e\x02\xbd\xffr\x01\xc4\x01\x9f\x00\xda\xfe\xeb\xfcu\xff9\xfc\xca\xfe5\xfeG\xff\xdf\xfd\xe8\xfb\xb0\xfb}\xfbI\xfb\x8b\xfd\xd5\xfa\xad\xfa\x11\xfe\x94\xfb\x0c\xfd\x99\xfdG\xfd\x98\xfa\xec\xfa\x00\xfd\x86\xfe\xc3\xfb\x1e\xff0\xfeX\xfeZ\xfe\x95\xfd\xf4\xfe\x9f\xfe{\x01\xfc\xfe\x0b\x01-\x00\xe3\x00\xf5\x00e\x01b\x02"\x02\x9a\x01\xd7\x01\x82\x01I\x04l\x04.\x01\x91\x03e\x03\xb0\x02\xc9\x03\x9a\x03I\x02\xda\x02;\x01\xc6\x03\xb7\x02\x08\x01\xf1\x02\x8f\x01\xfb\xff\xc4\x00\xe2\x01\xb6\xff"\x00(\x00\x01\x01d\x00c\x00\xba\xffW\xfe\xcc\xfe\xb3\xfe\xaf\x00O\x00\xa8\xfe]\xfe\xb4\xff\xed\xfe$\xfe[\xfe\x93\xfe\xe5\xfe\xdf\xfeF\xff\xe0\xfd\x96\xfdN\xfe\xf3\xfdQ\xfc\xd0\xfdN\xfeQ\xfe\xd8\xfe\xbf\xfb\x17\xfb\x03\xfd\xef\xfc\x81\xfc\xae\xfdM\xfch\xfdd\xfee\xfd\x9f\xfb<\xfd^\xfe\xf9\xfe\xd3\xff%\xfe[\xff\xce\xfe\xb2\x00J\x01\xa6\x00s\x00\xc4\x01\xae\x01\xad\x00H\x03\x8a\x03\x9c\x02w\x02\xbf\x02\x81\x02\xfd\x05:\x033\x01\xcf\x03\x8e\x03\xee\x03\xba\x02\x1e\x03F\x02<\x02p\x03\xc4\x02m\x01\x15\x01\xb7\x01<\x01~\x00_\x00}\x00(\xff\xc2\x00d\xff\xeb\xfd4\x00\\\xfe:\xfd,\xfe\x0e\xfe\x0b\xfe\xc9\xfdX\xfe\xf5\xfc%\xfd\xa9\xfd\xb5\xfc\x1b\xfe=\xfe*\xfd\x85\xfdG\xfet\xfd\x8e\xfe4\xfe\x0b\xfe;\xfd\xb2\xff\xfb\xff[\xff\xe6\x00\xd1\xfe\x05\xff\xa7\xff\xd9\x00\'\x01\xe4\x00P\x00\xf1\x01p\x01t\x01\xbc\x01\x10\x01\xd9\x011\x01;\x02\x9c\x02\x1e\x022\x02s\x01\x86\x01\xab\x013\x01N\x02\xf6\x01\xc1\x00\x94\x01\xe2\x01\x84\x01\xf9\x01#\x01\xf1\x00H\x01\x06\x01\x86\x00\xac\x01F\x02\x01\x01I\x00\x18\x01\xb0\xff5\x00\xd1\x01\xe8\xffH\x01F\xff\xb5\xff\xd7\xffq\xff\xf2\xfe0\xffR\xffD\xfe\x08\xffS\xfd\x0e\xfe\xdd\xfd\xa0\xfdY\xfeD\xfd\xdf\xfc\xae\xfd\xa2\xfd\xaf\xfd>\xfd\xea\xfc\x95\xfc\x95\xfd\xc6\xfd\x8b\xfe\x92\xfe\xc2\xfdb\xfeD\xfd\xb5\xfe\xe0\xfd\x1b\x00\xe6\xfe\x89\xff\xa3\xff~\xff\xf9\x01\x16\x00\'\x01\x0e\x01\x84\x01$\x00J\x02\xed\x01>\x03\xcf\x03\xc8\x02\xfa\x020\x028\x03\xfb\x02\xb2\x02\xc4\x02P\x04j\x03\xa3\x03w\x02\xb6\x03\x85\x01N\x02\xa2\x02\x0c\x03_\x017\x00\xf3\x00t\x00L\x02\xaa\xff\xce\x00\x94\xfe\xc0\xfe]\xfe&\xff\xd8\xfe\x11\xfe\xb2\xfd\'\xfd\xea\xfdr\xfc\xea\xfc\xe3\xfc\x95\xfc\xc8\xfd\x14\xfcT\xfc\x87\xfc\x92\xfc\x9d\xfc\xc7\xfdS\xfe\xdd\xfc<\xffR\xfd\r\xfeJ\xfe2\xffo\xfeG\xff\xc0\xffN\xff\xf2\x007\xff~\x01\x03\x00F\x00\xa5\x00\x7f\x01\x85\x01\xfa\x00\x82\x02\xe2\x00A\x02~\x02\xa2\x01\x8b\x00$\x01J\x02\xe0\x017\x03;\x01\x8a\x00\xc0\x00\x11\x01V\x01c\x02\xff\x01\x05\x00\xd1\x00\xf8\x00Z\x00\xa1\x01_\x01\x06\x00\xb8\x00\x16\x00c\x00\x81\x00{\x00\xbe\xffP\x01V\xff\x95\xff\xf7\xfe\x97\x00\xd8\xfe\xc2\xfe\\\x01<\xfe\xa3\xfeS\xfd\'\xff\xdb\xfe\x0f\xffO\xfdv\xfd\xc8\xfd\xb7\xfc\xd0\xfd\xad\xfd\x84\xfd\xef\xfct\xfd\xe8\xfcX\xfe\x05\xfd\x03\xfd\xbd\xfe+\xfd\xf3\xfe\xd7\xfd\x9f\xffc\xfe\x0c\xff\x15\x011\xfe\xbd\x00\x95\xff~\x01^\x00F\x01\xc2\x00\xf4\x01l\x02[\x00\t\x03\xb2\x01\x06\x03\t\x02\xb8\x02\xdc\x01{\x02\x95\x02|\x02Y\x02<\x02i\x02\t\x02B\x02.\x01e\x02v\x01B\x01\x9a\x01=\x00\xbd\x01\x9d\x00\xcc\x00\x0f\x00\xcd\xfev\x01\xf8\xfe\x8b\xff\xa9\xff"\xff\xb5\xfe\xad\xff\x1d\xfe\xf2\xfdT\xff\n\xfe8\xff>\xfe\xed\xfd\x8f\xfd\x12\xfe[\xfe\xd1\xff\xd7\xfe\xd1\xff\xfc\xfd\x00\xfe2\xfen\xfe\xa5\xffR\xffc\x00Q\xfe\xf5\x00\xad\xfd\x95\x00\x9e\x00\xfa\xff\x17\x00\xaf\xfe(\x01\xf8\x00\x87\x02\x1d\x00w\x02\xcb\x00\x1d\x02\xe6\x02I\x01\x05\x03\x99\x01\xb3\x02\xcf\x01\xe3\x02"\x02?\x02\xe6\x01=\x01\xfb\x01\x7f\x00O\x01u\x00:\x00n\x00\x08\x01T\xff\x98\xff\x16\xff\xbc\xffm\xff\x89\xffV\xff\xd9\xfd\xbb\xff9\xff0\xfe\xc2\xffO\xfe\xa9\xfe\xc5\x00_\xfd\xa7\xff\x02\xfe\xdb\xff<\xfe\x93\xff\x1c\xff\x1b\xff\xdd\xfer\xfe\x8d\xff/\xfe\x1e\x00U\xfd[\xfff\xfd\xa1\xff\x15\xff\x8c\xfe\xb0\xfe\x13\xfew\xfe\x87\xfeW\xff\x07\xff\x15\x00\xc3\xfe\xa5\xff\xfd\xfe\x1f\x00\xae\xff\xa3\xff\xc2\x00Q\x00g\xff\xed\x00\xcf\xff\xac\x01b\x01\xa1\xff<\x01\xd2\x00\xd2\x01S\x00\xee\x01\x91\x00\x0b\x02\xa8\x02\x96\x01\xcf\x01\x88\xff\xe1\x00y\x01G\x01g\x03\xa5\x01c\x00\x19\x01\xeb\x01\xc6\x00E\x02\xcc\x00\x08\x01a\x01\x89\x00\x1c\x01\xad\xffd\x02\x8b\xff\x81\xff\xc3\xff\x86\xfe\xd9\xffy\xff!\xff\x13\xff\x12\xfer\xfe\x19\xff@\xfe\xa6\xfe&\xfd\xe0\xfd9\xffp\xfe=\xfe\xce\xfd\xde\xff\x86\xfe;\xfe\x85\xfft\xfd?\xffq\xff\xa6\x00\x18\xff\xce\xff\xa6\xff~\xfe9\x01u\x00\x83\xff\xbc\x00\xce\x00\xda\xff\x01\x00\x9e\x00\xa1\x00I\x00\x96\x015\x00\x96\x018\xff\x15\x02\x03\x00\xc8\x00\xcf\x01E\xff\xfe\x01\x95\x00\xb7\x01\xfe\xffO\x01\'\x01\xe4\x00\xec\xfe\x15\x01\xed\xff\x9c\xff\xc8\x00\xc8\xff*\x01\xde\xfe*\x00\xc6\xfe\xd0\xffl\xffP\xfe/\x01\xbb\xfe\xaf\xff\xa3\xfe\xf6\xfe2\xff?\xff_\x00\xc4\xfd\xd2\xff\xeb\xfe\xea\xff\x7f\xfe\'\xff\n\xff\xb0\xfe\xc0\xffm\xfe\xf7\xffr\xff\x89\xff\xd9\xfe\xdc\xfe\xab\xfe\x1c\xff\xe8\xff\xf7\xfe7\xfe\xdc\xff8\xff;\x00\xbe\xff\x94\xfe\x94\xff\xc5\xff{\xff\xc6\xff*\x00\xc4\xff\xb8\xff\x8c\x01\xea\xff#\xff\xe6\x00K\x01\xe3\x00Z\x00\xd0\xff\xf8\x01,\x00\xb9\x011\x02b\x00q\x02\x1b\xff\x08\x03$\x00\xb4\x01&\x01\x8c\x00\xee\x01a\x00\x9c\x03_\x01\xf5\xff]\xff\xdc\xff\xc0\xffN\x01\xd5\x00\x93\x00\x10\x00t\xff\x95\xff\xae\x00S\xff\x1c\xff\xa6\xfe\xa9\xfeR\x00g\x00u\xff\x08\xffQ\x00\xaa\xfeD\xff`\xfea\x01\x88\xff\xd6\xffU\xff\xf2\xfek\x00/\xff\xcd\xff\xc4\xffq\x00\x9b\xff\x93\x00\xac\xff\x00\xffR\xff`\x00\xf4\x00N\x00\xef\xff\xbd\x00\r\x00w\xff\x91\x00\xc1\xff\xe5\xff#\x01\x15\x00\xc6\x00\x08\x00\t\x00\xd0\x00F\xffd\x00\x83\xff\xde\x01\x8f\xff\xd4\x00\xa2\x00\x7f\x00\xea\x00\x08\x01\xe2\x01\xe0\xff\xc8\x01\xce\xfe\xa7\x01\xdc\x00/\x01\x1c\x01-\x00Z\x01\xa2\xff4\x00\xcb\xff\xb0\xff\xfe\xfe\xa2\x01F\xff\x81\xff\x9a\xff\xf2\xfe~\xff5\xff\x1b\x00\xf5\xfe\x13\xff\x96\xfe\x14\xfe\xa0\xff\xe2\xffx\xfe\x17\xfe\x94\xfd\xf6\xff\xf6\xff\xba\x00:\xff6\xff\xaa\xff.\xfey\x00\xe8\xfd\x04\xff9\x00\x86\xff.\x02\xba\xfe6\x00(\xff`\x00t\xfe\xe7\xff2\x01*\xff\xe3\x01\x0e\x00\x19\x01\xc9\xfe\x9d\x00\xeb\xfe\x00\xff\xce\x00W\x01\x9d\x01\xdc\xfea\x00\xdd\xff\xd3\xfe\xa6\xff\xe1\x00\xe7\x00\xbe\xff|\x01\x11\x00\\\x00@\x01\xc1\xfe\x94\xff\xb6\xff\x07\x01\x81\xff+\x01\x05\x00\xa1\x00v\xff\x90\xff&\xff\xf7\xff}\x01i\xfe\xe9\xffn\x00`\x00\xde\xfe\xdb\x00Y\xfeh\x00\xac\xff\xd0\xff\xda\xffR\xfe]\xff\xb0\x00\xc9\x00\x90\xff.\xff\xcf\xfe\xbf\x00\xd3\xff\xcc\xff>\xfe3\x00\xde\xffJ\x00\x8e\x00\x8f\x006\xff\xd2\xff\x98\xfe\xea\xfe\x95\x00i\xff7\x01\xc9\xfe:\x02\xbc\xff\x19\xff\xba\xfe$\xff\xb3\x00\n\x00\xd8\x01S\x00K\x00\xbe\x00\x12\xff\xa8\x00\xc8\xff\x91\xfe\xd1\x01L\xff\x9d\x00\xc2\x00\xd2\x01\xb1\x00\n\x00\xf7\xff\xdc\x00\x10\x01\xe6\xfd\xd3\x00W\x00\xa7\x00{\x02\x08\x01}\xfe|\xff\xef\xffI\xff\xb1\xff_\x00\xab\xfe\xdd\xff\x8c\x00\xde\xfe\x8f\xffh\xffI\xff|\xff\xea\xfe\\\x00C\xffG\x00~\x00\xb0\xff\x04\xffJ\xffR\x00\xee\xff\xca\xff\xc0\xfd\xe9\x00\xe3\xffN\xff\xb6\xfew\xff\xf7\xffd\x01\xab\x00t\x00<\x00\xbb\xfe\xaa\xff\x7f\xfeK\xff\x14\x01L\x01m\x01\x10\x01a\xff~\xff.\xffT\x00\x96\x00\xcf\x00G\x00m\x00\x88\xff2\x00R\x00e\x00\x83\x01\xe7\xff\xef\x00}\x00"\x02\xce\xff]\xfe\x10\x01\r\x00\x8a\xff\x0e\x00w\x00\x8b\x00\x07\x02\xd3\x00\xd0\xfe\x8b\xfe\x8a\xff\x80\x00q\x00\xf9\x00\xc9\xff\xcb\xfe\xcf\xfe.\x00\xbe\x01T\xff\xbb\xfc\x80\x00\xca\x00\xa5\xff\'\x00\xc5\xff\x8a\xfd_\xfc\xd7\xff"\x022\x02Q\x00\x95\xff\xf7\xff\x8b\x00\x80\xfe\xbd\xfe\xaa\xff\xbb\xff\xd9\xfe\xbb\xff\xf1\x01<\xff\x03\x01%\x01\xd3\xff\xe5\xff\x85\xff\x88\x01`\xff\x15\xff\x06\x00\x13\x00<\xffV\x01\xe0\x02\xa4\x01\xca\xff\xd4\xff\x03\x000\xfe%\xff\x12\xff\xea\x01\xca\xff\x94\x00\xb2\x00\xd6\xff\xab\x01\x0e\xff\x16\x00x\xff\xa2\x00X\x029\x00\x81\x00*\x01W\x00\x81\xff\xcc\x008\xfe{\xfd\xae\xff\xcd\x02<\x04n\x00\x12\xff*\xfe\xff\xfd\xe5\xffz\x02\x9a\xff\xa4\xfdW\xff\x9c\x01\xc6\x01J\x00\x0b\x00J\xff2\xfd\x00\xfe\x18\x00`\xffk\xff\xaf\xff\x8a\x00\xf5\xff#\x01\xfe\xff\xac\xfe\x99\xff\xe3\xfe\r\x00H\xffn\x01\xeb\x01G\x00\xe2\xff\xc9\xfe\xf2\xfe\x00\xff\x91\x00\xb5\x01\xda\x01\x9f\x01\xd5\x01\xb3\xffN\xfe\xec\xfe\xc1\xffA\x01i\x00\xf7\x00\x00\x01h\x00\x88\x00\xd4\xfei\xfe\x8b\xfe\x86\xff\xf6\x00\xc4\x00\xa9\xff\xf4\x00\xd0\x00a\xfe\xcb\xfe\x1d\x00v\xfe\xe5\xfd\x06\x00C\x00D\x01\xfc\xff\x8c\xfe\x14\xfe\xe3\xfd\\\xfeX\xfdB\xfe(\xff\xb3\xfeh\xff\x97\xff\x83\xfe?\xfd\xed\xfc\x8d\xffO\xfe\x06\xfe\x9c\xfd\x82\xfd\xfd\xff\xec\xffB\xff\xb3\xfd\xb3\xfdy\xfem\xfe\xc1\xff\xee\xfe8\xff\x14\xff\x80\xff1\xff7\xfe\xe5\xfd$\xfc\x1b\xff\xa5\x004\xff\x08\xfe\xce\xfd\x81\xfec\xfe\xce\xfeC\xfe\xc2\xffo\xff\xe7\xff\xb5\x00H\x00G\x00\xb5\xff\xc0\xfe\xe6\xfe\x89\xff7\x00\xb0\xff\xb0\xff\xc5\xff\x83\xfe\x9a\xfeQ\xff\xbb\xfeX\xff\x1a\x00\xf7\x00o\x02\x1c\x04s\x05\x86\x07\xbd\t\xac\x0bH\x0e\xa9\x10\xef\x12U\x14O\x15\xd5\x15\xaf\x15\x1f\x15\x0b\x14h\x12\x14\x10\x10\r8\nG\x07z\x03\xe9\xff\x01\xfdt\xfa\xa5\xf7\x9b\xf5\xdb\xf3\xf8\xf1B\xf1\xc6\xf0$\xf0\x05\xf0,\xf0g\xf0\xd0\xf0\xbf\xf1\\\xf2\x91\xf2\xa7\xf3\t\xf5\x01\xf6\xe2\xf7\x1e\xfa\x92\xfb%\xfd!\xff\xec\x003\x02_\x03\xbe\x03\xc9\x031\x04\x13\x04\xaa\x03\x1c\x03\x06\x02\xd3\x00\xa7\xff\xc0\xfe\xeb\xfd\xa0\xfc\xfe\xfb\xa3\xfb]\xfb7\xfb{\xfb&\xfc9\xfc}\xfcD\xfd\r\xfe\x17\xff\xc6\xff+\x00O\x01t\x02&\x037\x03`\x03\xb9\x034\x04\x01\x04\xb3\x03\x92\x03T\x03\xb7\x02\xc7\x01k\x00\xf0\xfff\xff\xff\xfe\x15\xff\x91\xfe.\xfe6\xfd\xdc\xfb4\xfb\x0c\xfd\xd4\xfeI\xff\x9c\xfd\xcc\xfc\xc8\xfd%\xff\x81\xff8\xfeb\xfd\x9c\xfe\x87\xfff\xfe\x03\xfeO\xfe4\xfe\xb9\xfc\xf8\xfb\xcc\xfbv\xfbT\xfb\xd4\xf9\xe8\xf8Z\xf8\xdc\xf8@\xf8\xe2\xf7\x95\xf7\xe9\xf7\xf1\xf7#\xf8\xd4\xf8\x0f\xf9\x10\xfa\xde\xfa\n\xfcf\xfd$\xff\x16\x01L\x04\xb9\x07\r\x0bR\x0f\xa3\x13\xa1\x18@\x1e"#$\'0*\xb7,I.\x9f.\x82-\x0f+d\'\xb1!R\x1b\xb1\x14\xae\r\xb6\x06^\xff\x05\xf8\xf9\xf1\x02\xed\xdd\xe8r\xe5\x99\xe2\x9a\xe0\xb8\xdf\x9c\xdf\xd8\xdf\x96\xe0\xff\xe1t\xe3\xc3\xe4\x87\xe6\xfb\xe8\x05\xec\xe1\xeej\xf1_\xf47\xf8h\xfcg\x00:\x04\xc2\x07\x00\x0b\xcb\r\xc4\x0f\xdf\x10"\x11{\x10\xd5\x0e;\x0c\xe9\x08|\x05\xa4\x01`\xfd\xf9\xf8\x18\xf5\xef\xf1S\xefW\xed\xec\xebw\xeb\xc9\xeb\xb5\xec\xe3\xedy\xef\xd1\xf17\xf4\'\xf6/\xf8\x9e\xfa6\xfd\t\x00N\x02\x11\x04|\x06)\tD\x0b\xa4\r\xa3\x0f \x11c\x12Y\x13\xd3\x13\xac\x13\x1b\x13\xbe\x11\x91\x0f\x14\r\x8e\n\xcb\x07\xc1\x04\xfd\x01F\xff\x83\xfc\xac\xfa=\xf9\xb8\xf7\xc8\xf6_\xf6!\xf6\xd1\xf5\xde\xf5\xf1\xf5\xe0\xf5)\xf6Z\xf6I\xf64\xf6V\xf6M\xf6\x11\xf6&\xf6Z\xf6\x8f\xf6\xb4\xf6\x89\xf6\xa6\xf6\xe4\xf6N\xf7\xef\xf74\xf8\xa5\xf84\xf9\xe4\xf9]\xfa\xe9\xfa\xf4\xfb\x97\xfc\xc3\xfc\t\xfd^\xfd\xd7\xfd\xfb\xfd"\xfe\x04\xfe\xab\xfd\xfa\xfd\x0f\xfe\x9a\xfe$\x002\x02\xd6\x04{\x085\r\x83\x12\x7f\x18u\x1f2&\x19,\xad1V6)::\x11\xc6\x14[\x17\xbc\x18\xf2\x18\n\x18\xbf\x15\x00\x12)\r\xc7\x07\xd9\x01Q\xfbW\xf4\xcc\xedJ\xe8\xc9\xe3M\xe0\xde\xdd\xbb\xdc\x11\xdd\xdc\xde\xe1\xe1\xca\xe5k\xea\x93\xef\n\xf5X\xfa\x97\xff\xb4\x04p\t\x8a\r\r\x11\xe1\x13]\x16\xa2\x18b\x1a\\\x1b\xd7\x1b\x16\x1c\xd3\x1b\xfa\x1a\xb7\x19\xe0\x17n\x15Z\x12\x93\x0ea\n!\x06\xb1\x01V\xfd#\xf9"\xf5\xc3\xf1E\xef\xa6\xed\xfe\xec\xdc\xecR\xedm\xee\x16\xf0\x1c\xf2V\xf4\xb5\xf6\xcf\xf8|\xfa\xe3\xfb\xf5\xfc\xc5\xfdl\xfe\x86\xfe\'\xfe\xa8\xfd\x07\xfdi\xfc\xb0\xfb\xc7\xfa\xe8\xf9\xf3\xf8\xf6\xf7-\xf7\x8b\xf6\xe2\xf5N\xf5\xcc\xf4E\xf4M\xf4\xaa\xf4E\xf5\xf6\xf5\xe0\xf6\xec\xf7\x0e\xf9\x88\xfa\x05\xfc`\xfd\xd6\xfe\xd3\xff\xbb\x00\xa3\x01\x8a\x02 \x04_\x05Z\x067\x08~\x0b\xf3\x10i\x17\x15\x1d\x87"a(\x02/\xe45\xea:\x96=\xaa>N>\xe9; 7O0\x12(\xa4\x1e\xb0\x13\xbf\x07\x96\xfc\xe4\xf2E\xea\xe9\xe1 \xda\xac\xd4\x15\xd2[\xd1m\xd1\x15\xd2\xfe\xd3\x03\xd7c\xda\xef\xdd\xa4\xe1\x97\xe5^\xe9u\xec`\xef\x12\xf3\xc4\xf7\xb7\xfc\xd3\x00>\x04X\x08C\r\x01\x12\x84\x15\xae\x17\xbe\x18\xb2\x18\xff\x16\x9b\x13\xd8\x0e/\ts\x02\xb2\xfaV\xf2\xb1\xeam\xe4\x1b\xdf\xa3\xdaI\xd7\xd4\xd5\xa9\xd6\x8a\xd9\xb5\xdd\xc1\xe2\xab\xe8J\xef`\xf6V\xfd\xe1\x03\xfd\tH\x0f~\x13\xaa\x16\xf8\x18\xfd\x1aX\x1c\xd8\x1cy\x1c\xad\x1b\xd4\x1a\xf2\x19\x8f\x18\xb8\x16\x89\x14\xf7\x11\x08\x0f\xb5\x0bT\x08\xc0\x04\xcd\x00\xad\xfc\xb8\xf84\xf5l\xf2y\xf0,\xef\x9e\xee\xcd\xee\xde\xef\xbe\xf1.\xf4\xc5\xf6\x93\xf9Z\xfc\xf0\xfe(\x01\xcb\x02\xce\x038\x04\xf5\x03\x12\x03\xb2\x01\x12\x00;\xfe\x10\xfc\xc0\xf9\xb5\xf7\x02\xf6q\xf4\xe6\xf2\x94\xf1\xb9\xf01\xf0\xea\xef\xee\xef3\xf0\xd2\xf0\x98\xf1U\xf2n\xf3 \xf59\xf7\'\xf9\xdf\xfa\xea\xfcZ\xff\xa0\x01\xf2\x02\xb6\x03\\\x04\xb3\x04a\x04\xf4\x02Q\x01V\x00\x1a\xff\xcd\xfd\x1a\xfd\xee\xfe\xa4\x03 \t\xbf\x0eq\x15\xf6\x1e\xb7*!5\xd4<\xa0B\x0fH\x11L\xf7K\xc7G\x00A\x868\xc2-* \xad\x11t\x04Q\xf8C\xec\xa4\xe0p\xd7\r\xd2y\xcf\x0b\xceB\xcd\x12\xce\xd2\xd0\x8c\xd4\x03\xd8\t\xdbo\xde\x16\xe27\xe5\xc0\xe7\xab\xea\xf0\xee$\xf42\xf9\x12\xfe\xfa\x036\x0b\x94\x12\xb5\x18?\x1d\x9a \xb9"\xca"K %\x1b3\x14\xc0\x0b:\x02%\xf8D\xeeR\xe5\x83\xdd\x1b\xd7\xbc\xd2\xc5\xd0Y\xd1\xf7\xd3\xfd\xd7!\xddL\xe3p\xea\n\xf2\x85\xf9K\x00\xeb\x05\xb8\n\x0b\x0f\x1d\x13\xae\x16\\\x19\x02\x1b\x1d\x1c\xe6\x1c\xcc\x1dn\x1e{\x1e\x81\x1d\\\x1bQ\x18\xcc\x14\x0e\x11\xa2\x0c\'\x07\xf8\x00\xd7\xfaJ\xf5\x9d\xf0$\xed\xe9\xea\xbf\xe9\x87\xe9\x99\xeaN\xedp\xf15\xf6\xea\xfa\t\xff\xba\x02\x16\x06+\t\x85\x0b\x83\x0c\x02\x0cr\nj\x08g\x06P\x04\x01\x025\xffA\xfc\x8c\xf9m\xf7\xe7\xf5\x9b\xf4\x1f\xf3I\xf1]\xef\xee\xed\x1b\xed\xb7\xec\x8d\xec\xb3\xec\x04\xed\xf0\xed\xc2\xef^\xf2\xea\xf5\x80\xf9\x9c\xfc\x87\xff&\x02\xde\x04\x1e\x07\xfc\x07\x08\x08\xab\x07s\x06\xc5\x04\xa2\x02~\x00\x03\xff4\xfdq\xfa\x8e\xf7`\xf5V\xf4\xc2\xf3\x85\xf2Y\xf1\x9c\xf1\xa8\xf3X\xf8\n\x01\xb7\r\x0b\x1c\xac(\xf52\'>=K\x89V\xd6[\x1d[\rW\xe5P\xe6F\xf08\\)\xda\x19\xa4\t\x7f\xf8\xe8\xe8\xaf\xddo\xd6\xf4\xd0\x92\xcbx\xc7\xd8\xc5,\xc6]\xc7\xa3\xc8\xc0\xc9\x00\xcb\x88\xcc7\xcf&\xd4\x9d\xdb1\xe5\x82\xef3\xfa\x9c\x05\t\x12\xb5\x1e\xea)t2\x017\x957\xbc4\xfa.\xbe&k\x1c@\x10m\x03q\xf6R\xeaK\xe0\xed\xd8/\xd4\x10\xd1\xf7\xcei\xce\xa2\xcfu\xd2\xef\xd5\x98\xd9\xa2\xdd=\xe2\x88\xe7\x82\xed\xa1\xf4\r\xfd\xeb\x05\x85\x0e\xef\x16*\x1f\x98&\xe5,H1;3{2D/\x14*\x18#]\x1aa\x10\xad\x05\x90\xfb\xe8\xf2\xdc\xebm\xe6\xdc\xe2:\xe1G\xe1\xd0\xe2\xa1\xe5n\xe9\xbd\xed\xa8\xf1.\xf5\xe9\xf8\xca\xfda\x03j\x08T\x0c&\x10\xa5\x146\x19\xa8\x1c\xac\x1em\x1f\xd5\x1eM\x1c\xb6\x17\x04\x12\xcc\x0b\xe2\x04D\xfdB\xf5\xe3\xed\xfb\xe7\xdc\xe3\xf8\xe05\xdf\xc1\xde\xf9\xdf]\xe2\x8d\xe5o\xe9\xb4\xed\x0e\xf2\xd6\xf5_\xf9o\xfd\x9e\x01\x7f\x05\xfd\x07n\tt\x0b\xbb\rR\x0fP\x0f\xdd\r\x07\x0c\xb5\t\t\x06.\x01\x95\xfc0\xf8a\xf3\xec\xed\x07\xe9\xa3\xe6|\xe6\x17\xe6j\xe5\xbe\xe5&\xe8"\xec\x8c\xef\x9f\xf2\xc5\xf6$\xfbA\xff\x96\x05U\x12\x97%\x898\xabDdK\xefR,\\\x7fax^WUmK\x8c?X/b\x1d\n\x0f\\\x04\xa4\xf8\xef\xe9\xb2\xdc\xc6\xd5\xf9\xd2\xc8\xceh\xc8h\xc3\x10\xc11\xc0\xf7\xbf\xad\xc1\x9a\xc7J\xd01\xd9g\xe2\x0b\xee\xd1\xfdP\x0e \x1b\xd3#<*\xe7.\x061\xab0O-\xd4&\x1d\x1e\xd4\x14\xb8\x0b\x07\x03\xaf\xfa\x90\xf3\xb3\xec\x8e\xe5h\xde\xa5\xd8\x9b\xd4\xa8\xd0F\xcc$\xc8]\xc6\xd3\xc7\n\xcc^\xd2x\xdb\xca\xe6[\xf2=\xfdt\x08#\x14*\x1e\xae$\xfb\'\x87)\x9f)\x8c(\x17&\x9e"T\x1e\x97\x19\xcd\x14\xcc\x0fr\n\x88\x04\xc7\xfd\xed\xf6\x99\xf0\xb9\xeaY\xe5\x1b\xe1\xfd\xde\xc2\xde\xe8\xdfv\xe2X\xe7\t\xee\xfd\xf3\xd5\xf8\x02\x00<\r\x1a\x1c\xb5$\xb4%\xdb%\xe8)\xab-p+\xc7$\xf3\x1eY\x1an\x13=\nl\x03\x1c\x01\x97\xfeh\xf7\xaa\xed\x9e\xe62\xe4h\xe2\x01\xde_\xd9g\xd8\xbb\xdb1\xdf\xff\xe1\xc0\xe7\xaa\xf1\xd9\xfb\x14\x01&\x03\xf1\x07=\x0fO\x14^\x13\x07\x10\xf9\x0e\xf5\x0el\x0c\xef\x07\x12\x05-\x04\x9e\x01\x0b\xfb\xdb\xf3\xbb\xefS\xed}\xe8\x89\xe0\x91\xd9\xfd\xd6A\xd8\xcb\xd98\xdc0\xe1L\xe8\xe0\xef\xf1\xf5\xad\xfby\x01\xd7\x05\xef\x08\x18\n\xa8\n\xb8\x0b\xb6\x0eI\x14\x93\x1aO%\x116\x08H\x90S\xbeV\xd6WVX\x86R\xf6C(4\xa2(\xb6\x1d)\x0f>\x01D\xfbx\xfa(\xf6\x1e\xed\x90\xe3l\xda\xac\xd1\x84\xc8\x97\xc1\xa1\xbf\xa1\xc1\xf9\xc6\xe0\xce\xda\xd8\x14\xe6i\xf5n\x02\xdb\n\x9c\x0f$\x12\xe7\x132\x15\x98\x15\x06\x16\xe9\x16\xef\x18\x1e\x1b\xc3\x1aM\x18K\x15+\x10N\x06-\xf8\x16\xea\xe8\xde\xcd\xd5\x17\xce\n\xcak\xcb\x92\xcf\xa9\xd3\xea\xd6W\xdb\xb9\xe0\x11\xe51\xe8\x9e\xeb\xac\xf0\xe7\xf6\xcf\xfe\x9a\x08\xcb\x13\x0f\x1e\xe2%\xc1*\xc7,w+\xa0\'\x94"\xdb\x1c\xa8\x16U\x10\x95\nd\x06\xa8\x03K\x01\xb0\xfd\x9d\xf8-\xf3\x9a\xeeE\xea\x9a\xe6\x03\xe4\x92\xe3\x17\xe5)\xe8s\xedz\xf5P\xfeP\x06\xa7\x0b\xf5\x0e4\x11_\x13\t\x16_\x17U\x19\x02\x1e/%\xb5*\xc7+5) $\xb0\x1b/\x10\r\x04/\xf9\xc2\xef]\xe7\xc2\xe0\xa8\xdc\x99\xdb\xa1\xdb\xac\xdcK\xddu\xdd\x0b\xde\xb9\xde\xb4\xe1\xd3\xe6\x0e\xef"\xf9\xb9\x02M\x0bB\x12\x8f\x17[\x1a\x90\x1a\x1b\x19\x1e\x16\xda\x11\xa2\x0c~\x08\t\x06\xf8\x03\xc1\x00\xbf\xfb\x84\xf5\x87\xee\xd9\xe7k\xe2R\xdf\x1d\xde\xd8\xdd\xf9\xdeA\xe1\x96\xe5m\xeb^\xf0l\xf4r\xf7\x03\xfb\x87\xfe\x03\x02\x94\x06\xa2\n\xc2\rZ\x0f\x0b\x10=\x11\xd5\x12\xbe\x13~\x13\x11\x10I\x0c\x02\n\x8e\x08Q\rf\x1e\xd55\xf9B\xc9;,+_$\x07&\x0c#d\x1b\xd8\x17\xe1\x19\x8a\x18\xd6\x0e\x11\t|\r\xef\r\xc7\xff\xb0\xe9g\xda\'\xd6\xa9\xd6\x83\xd8#\xdf\x9f\xe6\x90\xe8\x04\xe5\x88\xe1\x92\xe3\xfb\xe9\x8a\xed\x0e\xec\x9f\xeaX\xee\x97\xf7\xfa\x02\x04\x0eg\x163\x18\x06\x13\x9b\x0c\r\t\xca\x07\x86\x07\xa2\x06\xc4\x03\xa5\xfe#\xf9\xe3\xf7l\xf9\xc4\xf8\x9b\xf3\xc5\xeb:\xe4\x15\xdfR\xde/\xe2\xe2\xe89\xefX\xf3L\xf5k\xf7z\xfb\x93\x00\x98\x04\x07\x07\xd8\x08\xd5\n\xd0\rS\x12\xe6\x17\x89\x1b\xaf\x1a\x1e\x15\xdf\r}\x07\xde\x025\xffU\xfc#\xfa\x1c\xf8\x8b\xf5\xf0\xf3\x0b\xf4@\xf4)\xf2I\xee\xe5\xeb\x1a\xec\xbb\xee\xc3\xf3\xe7\xfa\x08\x03\x88\x08\xb2\n\xc3\n\xa4\x0b\x18\x0e\xf8\x10\xe4\x12\xe1\x13\xf0\x15\xfb\x18\x1d\x1au\x19\xd9\x17\x0b\x16\xe1\x10\xe0\x07\x85\xff\'\xfaI\xf7\xcf\xf4\xc9\xf2^\xf1\xa7\xef\xd1\xec\xc2\xe9\x1c\xe7\xd7\xe6\\\xe8\x05\xebd\xee\xc9\xf3\xd2\xfaU\x01c\x05\xaf\x07\xce\t6\x0bL\x0b\x8e\n\x80\nh\x0bG\x0c\xfb\x0b\xa0\nW\x08\xf3\x04C\x00\xcd\xfa\xa9\xf6\xb3\xf4=\xf4j\xf4\xa4\xf3_\xf3\xd7\xf3T\xf4\xc3\xf4\xc3\xf3K\xf3\x10\xf4\x91\xf6\xb1\xf9\x9d\xfc!\xff\x8d\xff\x18\xff \xfdQ\xfc\x10\xfd\xd4\xfcn\xfc8\xfb`\xfb\xba\xfbD\xfae\xf7\x98\xf5\xb2\xf5\xf7\xf5V\xf5\xd1\xf5w\xf88\xfc\xb1\x03y\x15D.\x01=\x9c7\xc6\'\x7f#(+\x821\x0f3\xe17uB\xe0@s.\xb7\x1bR\x167\x13x\x03\x02\xf0\x83\xe8\'\xecX\xec\x12\xe6\x9e\xe2\xa5\xde\xb9\xd4:\xc6\xfe\xbeo\xc6-\xd6,\xe4t\xebJ\xf0v\xf5=\xf9G\xfb\x16\xff0\x08\xbc\x10.\x16\xe2\x1b\x81"?\'\x9b&3!\x1f\x18:\r\xc1\x047\x02*\x03o\x02\xac\xfdP\xf5!\xeb\x9d\xe0\xd1\xd8k\xd5\xd4\xd5\xb2\xd7\x97\xd9F\xdc\x92\xe0\xc9\xe5\x05\xe9k\xea\xc8\xec\xe6\xf1,\xf8g\xff\x1c\x08\x1d\x11\x1b\x16/\x16h\x14;\x13\x8e\x12F\x12\x8f\x12)\x13}\x12I\x0f"\n\x88\x04\x16\x00\xca\xfc~\xf9R\xf6\xd6\xf4O\xf5\x82\xf5{\xf5\xbb\xf5\x04\xf7\x9c\xf7\xaf\xf7\x18\xf9\xa1\xfc\xcb\x00y\x04\xf6\x06e\t\'\x0b\xc4\x0c\xfd\r\xda\r\xb9\x0c\x8c\x0c}\x10\xc2\x15\x80\x18d\x16\x92\x11\xcc\x0b\xb9\x05\xc0\x01_\x01\xce\x02\x13\x02\x94\xfd\x0b\xf8\xda\xf3\xfb\xf0\x18\xf0w\xf0\x19\xf1C\xf1D\xf1n\xf2L\xf4P\xf6j\xf8$\xfa\x04\xfb\'\xfc\x88\xfe\xc7\x01\xe8\x03\x10\x05\x9f\x05:\x05\xe9\x03]\x02#\x02\xb4\x02\x9a\x02\x0b\x01\xa2\xfe@\xfc\x17\xfan\xf8(\xf7\r\xf6R\xf4\xe6\xf1:\xf0\t\xf0\xce\xf0(\xf1p\xf1\xea\xf18\xf2\xb3\xf1\x9f\xf1\xb3\xf3c\xf7P\xfa\xe8\xfb0\xfd\xda\xfe&\x00\xeb\xff\x0b\x00\xdd\x00u\x03]\x06\x12\x08\x9b\x08\xef\x06\xfb\x057\x058\x04\xd7\x07\xf7\x16\x801e?\'3\x0b\x1a\x1e\x10,\x1a\x92$\xad+R7MA\x1d5\x8d\x16\xeb\x04I\t\x9a\x0e\xcf\x05-\xfdi\xff\x89\x01\xbb\xf6\xda\xe7^\xe2\xae\xe2z\xdc\xa8\xd2S\xd2\x0b\xe0\x08\xee-\xee\x15\xe6\xd8\xe1z\xe3S\xe5;\xead\xf8X\x086\rq\t\\\x08\xee\x0cn\x0e\xdd\x0c<\x0f\xaf\x14q\x16\xd2\x11\xca\x0e\xb1\x0e\xe2\x0b\xae\x01@\xf6x\xf1\x80\xf1)\xf1U\xefT\xee+\xec?\xe4\xab\xda\xf6\xd7O\xdd]\xe4\xc4\xe8\xa5\xecq\xf0\xb7\xf1\x92\xf0\xa2\xf1y\xf7;\xff\xef\x05S\x0b\xfb\x0fY\x13\x81\x13x\x12\xa1\x12?\x14\xb7\x15@\x16\x0f\x17\xf5\x16\xc8\x13#\x0e>\t]\x06\x9f\x03(\x01:\x00,\x00\xaf\xfdH\xf9\xfa\xf5"\xf5\x88\xf4\x8c\xf4\x81\xf6\xfb\xf9\xc4\xfb\xa9\xfb\xb9\xfc\x05\xff\xa2\xfe6\xfe\x8a\x01Z\nZ\x0f\xa8\r\xff\ni\t\xd5\x07\xa3\x04\x1b\x08r\x0fw\x11J\x0b\xac\x03\xcd\xff~\xfc\xbc\xf9c\xfcM\x01\xff\x00\x87\xfb \xf7\x9f\xf6\xe7\xf5\xd6\xf4\xa7\xf6$\xfaN\xfb\xb2\xfa\xf9\xfb\xe3\xfd\xc2\xfd\xdb\xfb\xd0\xfbk\xfd\xe5\xfe\xa9\x00\xed\x02\x0b\x04\xd8\x017\xfe>\xfc\xc4\xfc\xb5\xfew\x00Z\x00T\xfe\xcb\xfa\x89\xf8\xee\xf7\xd0\xf8\xf0\xf9\xd8\xf9|\xf8}\xf6"\xf62\xf7\xab\xf8]\xf9R\xf9K\xf9A\xf9f\xfa\xf2\xfc%\xff\x94\xff\x9c\xfem\xfd\xef\xfc\x84\xfc\x97\xfdJ\xff\x08\x00\x8b\xfe\x0c\xfc\xab\xfb\\\xfbM\xfb\x9b\xfa\xff\xf9Y\xf7\x94\xf2\x1c\xf4\x81\xff\xd0\x11N\x1f\x0f d\x14\xa0\x03\xf4\x02\xc9\x17\xa15\x88Aa8\xc0+\x9c!\x02\x1a\xd9\x15b!\xcc2F19\x1b=\x07\xe7\x06\x94\x07\x0e\xfd6\xf2\x9c\xf4$\xf82\xee\xf2\xe2n\xe4\x11\xe7\xd9\xda\xb8\xcb\x88\xd0\xd9\xe3A\xee\xc0\xe9S\xe5z\xe6\xa7\xe3\xcb\xe1n\xec\xf0\x01\xd7\r\xf2\x07J\x00w\x01\xfd\x05R\x06R\t?\x13\xad\x1a\x13\x15i\t\x93\x03\x90\x03@\x02\xf5\xfe\xd6\xff\x95\x02[\xff\xb0\xf5\xc0\xed^\xeb2\xea-\xe8\x81\xe9\xd1\xee\xeb\xf1\xac\xed\xe3\xe7?\xe7\xac\xeam\xee\xd0\xf2\x0f\xfal\x00\xb1\x00p\xfd\xf9\xfc\x11\x01\xab\x05\xa3\t\x80\x0e\xec\x12]\x13\xa9\x0f\x96\x0cQ\x0cm\r\x9a\x0ek\x10\x94\x12\x19\x12\xb1\rf\x07<\x03\xa3\x02Q\x04\xcf\x05e\x06U\x05t\x02I\xfd\xf2\xf8O\xf8\xeb\xfa8\xfd.\xfeg\xfeI\xfe`\xfbP\xf8\xb3\xf7\x84\xfa\xe0\xfd\xf8\xff\x0e\x03\x8d\x04\x95\x03\xe6\xfe\xb6\xfc\x9d\xff\xe4\x05W\x0c\xaf\x0f\x9b\x0e8\x08X\x01\x1e\xff\x9d\x02\x97\x08 \x0cx\x0b\x7f\x05\xf4\xfd\x0f\xf9\xaa\xf8h\xfbs\xfd\x95\xfey\xfd\x15\xfa\xa5\xf6\xc7\xf4\x00\xf6X\xf7m\xf8\xb9\xf9\xf6\xfa*\xfb\x15\xfa4\xf9*\xf9\x88\xf9\x93\xfa\xd7\xfcF\xff\xad\xffu\xfd\xe0\xfa\xb0\xf9\xb4\xfa\x9e\xfc\xb7\xfe\xcf\xffJ\xfe\x9e\xfbg\xf8l\xf77\xf8\xad\xfa\xe5\xfc\x9d\xfc\x8a\xfb\xc6\xf8\xab\xf7@\xf6\x86\xf6m\xf8F\xfaz\xfc\x94\xfc\xd2\xfc%\xfc*\xfa\xb0\xf8.\xf9\x98\xfc\x19\xfe\x0b\xfe!\x06\xaf\x15\x02\x1d\x13\x0fn\xff\x9c\x05u\x19\xc3\'&//6\xf3/X\x17\x82\n\xcd\x1c\xa55A7F-x(B\x1d\x07\x07b\xfe_\x0f\xfd\x1b\x06\x0f\xe8\xfd"\xfb\x13\xf7~\xe7P\xdf\xb3\xe7\xbb\xed\xf0\xe3\xf3\xdc\xc4\xe2\xb2\xe5\xeb\xdb\xd2\xd3\xec\xda\x81\xe6\x9d\xec\x8b\xef\xeb\xf3\\\xf4\xea\xedG\xebe\xf4W\x03\xff\r\xae\x0eF\x08$\x02w\x00\x05\x04$\x08\x9a\x0b\x0b\x0eY\x0c\x90\x04\x83\xfc\x01\xfb\xa3\xfcE\xfal\xf5\x85\xf5\x89\xf8\xc7\xf6/\xf03\xec\xcd\xeb\x11\xea\x9f\xe8\x16\xed1\xf5\xae\xf7\x08\xf3\x8f\xef,\xf1\xad\xf4>\xf8q\xfe\x00\x06\xbf\x08\x97\x05\x17\x03\xb1\x05\x1e\nu\x0c\xa6\x0es\x12"\x14\xe4\x10\xcb\x0c\xdb\x0c?\x0f;\x0f\xef\r\x91\x0e\x05\x0f\x9d\x0b\x83\x06\x12\x05(\x06\xba\x04\xba\x02\x03\x03\xe4\x04f\x01y\xfbM\xf9\xeb\xfa\x8f\xfaL\xf9\xdb\xfc\xc9\x02^\x00\x93\xf7P\xf4y\xf9\xd0\xfee\xff\xd0\x02X\x07\x07\x05m\xfd\x86\xfc#\x03\xc5\x07+\x06[\x06\x0f\t\xb3\x06\x12\x01\x00\xff\xf9\x02?\x04\xdc\x01r\x01}\x02&\x00\x9e\xf9\x10\xf7\x1c\xf9\xc4\xfa\xf2\xf9\x08\xf9\xd3\xf8\x8b\xf5f\xf1-\xf0\xc2\xf2\n\xf5(\xf5\x0c\xf5S\xf4\xff\xf26\xf2\xa7\xf30\xf7\x0f\xf9Q\xfa\xbd\xfaQ\xfb\xfd\xfb\xc4\xfc\x05\xff\x9f\x00\x94\x02\x89\x03%\x04\xcb\x03\x10\x03(\x03\xb7\x03\x99\x04W\x05\x12\x06\x86\x05,\x04r\x02c\x02\xa4\x02L\x03!\x04\x8c\x04\x03\x045\x02o\x02\xa5\x03V\x04C\x04\x1d\x04\xdc\x04[\x03\xc8\x03;\x05\xe6\x06\xa4\x07\xbc\x07\xcf\n\xfe\n\\\x0c\x84\x119\x17\x8f\x16\xce\x0f\xef\r\x9d\x12\xe5\x15\x99\x16m\x19q\x1b\xa0\x16\xe0\x0bk\t1\rI\x0e\xe9\n\x0c\x08\x9c\x08\xc8\x02\xe5\xfa\xd0\xf7u\xf9\x9c\xf8\x07\xf3\xf5\xf0%\xf2\xf2\xf0,\xec\x81\xe8\xf8\xe8\xda\xe9E\xe9\xb9\xe9*\xed\x80\xef\xfb\xec\xba\xe9\xcf\xeaQ\xf0\x86\xf3\xd3\xf4V\xf7\x90\xf9P\xf94\xf7\xbe\xf9J\xfe\x12\x00\xa0\xff\xe4\x00\xe7\x03\xa8\x032\x01\xe1\x00\xb8\x02\xd4\x02\xf6\x00\x88\x01\xfe\x03\xc6\x035\x00\xc5\xfdt\xfe\xa2\xfe\x03\xfe`\xfe\x80\x00\x98\xffz\xfc\x11\xfb\xcf\xfcf\xfe\x0f\xfe\r\xffg\x00\x1d\x00L\xfeH\xfe\xfb\x006\x02\xe7\x01\xe7\x01S\x03\x02\x04A\x03\xed\x03a\x05\xf4\x05\xcf\x04\xe4\x04"\x070\x08\xf1\x06\x98\x06\x9d\x07_\x076\x06\r\x06\xa0\x08\xdd\x08\x80\x06\xfe\x04\xc5\x04\xdf\x03E\x02\xf6\x01\xde\x02\xb6\x01\x94\xffX\xfe\xbf\xfd\x97\xfcd\xfb\xf6\xfbE\xfcd\xfc\xc9\xfb\x0c\xfc\x81\xfb;\xfa\xe2\xf9I\xfa\xa3\xfbX\xfcc\xfd\x9b\xfd0\xfd\x12\xfcP\xfb\xe9\xfb\xc7\xfc4\xfe \xff0\xff\x92\xfe7\xfd\xa9\xfc\xfe\xfcA\xfd\xcd\xfd\xc9\xfd\xa6\xfd\xbc\xfc;\xfbj\xfa\x9d\xf9\xfd\xf9[\xfag\xfb]\xfc\r\xfcP\xfa\x96\xfa\\\xfa*\xfbA\xfd\x01\xff}\x01s\x00\xf9\xff\x07\x00\xe4\x00j\x01\xa3\x01\xa1\x02L\x04\x81\x04\xdb\x03*\x05\x05\x05\xb1\x03N\x04\xc3\x05:\x06\xeb\x04\x89\x05\xc9\x07F\x07+\x07\x0e\x07i\x06?\x05\x81\x06B\x08\xbf\x08\xd6\x06\xcd\x030\x04l\x06m\x06\xbc\x052\x07\xb5\x06\xb5\x01o\x00\xff\x03X\x01\xba\xffO\x03\xb0\x06t\x02]\xfb\x9d\xfd\xf7\xfe\xe5\xfd\xf3\xfc\xa5\xfe\xfe\xfe\xb2\xfc\xf0\xfb\xeb\xfe\xa2\x02\x88\xff\xa7\xfb{\xfd\xe1\x00\r\x01Z\xff\xb9\x01\x0c\x04B\x02\xdc\xff/\x02\xee\x03p\x00\xcb\xfep\x01\xed\x02\x1a\x00x\xfd&\xfeG\xfe~\xfbF\xfb/\xfc\xc8\xfa)\xf9\xec\xf8\xcb\xf9\x1b\xfa\xdf\xf9\x84\xf9\xf0\xfaz\xfa \xf9W\xfa4\xfc \xfd6\xfe\xd3\xfe\xfd\xfe\xa4\xfe\xbf\xfc?\xfe\xc3\x00n\x01\x0c\x01l\xff\xb4\x00Q\x02"\x01\x7f\x00~\x00\xde\x00\x1b\x00&\xff\x9b\x019\x02\xd1\xfe\xd4\xfd\xba\xfe&\xfe<\xffr\xffK\xff\'\xfe]\xfd\x10\xfeK\xfeZ\xff\xb5\xfe\xb2\x00\x1a\xff-\xfd\x1f\xff\x92\xffS\xfeg\xff]\x01M\xfd\xd3\xfb\xce\xff,\x02\x12\xfe\xb6\xfe\xd3\x00%\xfeo\xff\x9e\xff\xa0\xffN\xfe\x9d\x02\xfa\x00\x00\x01\xf3\x03/\x00\x92\x00\xaa\x05\xa8\x03\xc8\xff\x98\x01\xce\x04\xbf\x05/\x00U\x06\xdd\x06\x12\x00\xb4\xffq\x02\xee\xff\xb9\x00\xfe\x03\xc5\xffN\x00\xa3\xff6\xff\xc3\xfb?\xfeX\x00\x16\xfd\xde\xfeg\x02\x9a\x00\xce\xfb\xc6\xfc\xdc\xf9W\xfd\xf5\x01b\x00\x00\x03I\xfe\x17\x01\xcb\xfe\x80\xfb\x93\xff\x8f\x002\x08!\x01s\xfd\x0e\x05\xcc\x04\x04\x00\xa3\xfd\'\x07~\x04\xaf\xfe\xa2\x01\xfa\x04X\x04\xf0\xfe_\x02\x1c\x05\xcc\x01\xd3\xfb\xee\x07\x10\x05\xa3\xf8B\xfe\xbb\x04\x90\x03\x9b\xfbW\x06\xc9\x03\xa3\xf5V\xff9\nG\xf9?\xfc\\\n\x90\xfc%\xfc\xd4\x028\x05\x0f\xfco\xfa\x8b\x04\x93\xfe\x1a\xfd%\x04\xba\x04/\xf9\x98\xfeJ\x00p\xf9\x86\xfe\xc0\x00\x04\xfe3\xfd\xcc\x01{\xfd\n\xfa\xba\xfe\x87\xfe\x90\xfa\xcb\xff;\x02\xb6\x00R\xfdx\x01\xd2\xfd\xb7\xfd$\xff\x98\xfd\xb4\x04\x9b\xfen\xfd\xed\xffK\x00\x1a\xfe=\xfe\xe7\xfb\x98\xfeI\x01\x87\xfdj\xff\xec\x03\xf1\xfd~\xfa\xea\x02\x00\xff\x10\xfa\xf5\x00\x1a\x01\x00\xffL\x01\x92\xfe\xc9\xff.\x04\x04\xfc\x8f\xfd\x96\x02I\xfe\x97\x06\x99\x00x\x02\x93\x01\x1d\xfe\xc9\x00F\xff\xca\x04\xf7\x04y\xfe\xef\xff]\x03\xe0\xfa\xd0\x01\xbe\x00&\xfd\x01\xff\xe3\xfc\xaa\x02r\x00S\xfc\xd5\xf8G\xfb\xf9\x00\xbd\xfb\xde\x01\xd9\x03\x88\xf9J\xfbj\xff\xd2\xfb\xc2\xfa|\x03\x15\x04\xcb\xf9=\x00|\x03W\xff\xea\xfb\xcb\x00z\x01\xa7\xfbU\x04\xaa\x07\xed\xfa\x96\x01\xfd\t\xca\xfb#\xf7\x1b\x06\xd6\x03\xe8\xfd\x1f\n\x80\x00\xf5\xf6\xef\t\xf5\x04\x8f\xf99\x02\xb4\x02g\xff,\xfa\xb6\x0bX\x0c\xbd\xf8a\xff[\x04\x16\xf3\x1e\x00\x95\x0e\x83\xf9\x87\x06P\x06\x07\xfc\x1c\xf9\x04\x01\x93\xfe\xd4\xfc\xfa\x06\xfd\xf9>\x02\xc8\x05\x17\x01E\xfa\xa9\xef\xa7\t\x9e\xfd,\xf8t\x0e\xfb\xfa\x1a\xfa\x93\x02\x0e\x01\x1a\xf4\xce\x03\xfa\x021\xf7\xfe\x00\xcb\r~\x01\xc8\xf6\xa5\t}\xfb\xd4\xf0\xa3\x0c\xf0\x07\xc5\xf7\x04\x14R\x02B\xf7\xf3\xfc\xf1\x05\x83\xfb)\xfcJ\n\xea\xfeT\x02v\x01N\x05[\xf8n\xf2\xdf\x01\xdd\x040\xf9\xc2\t9\n\xc8\xf0-\x01\x17\x08\xa9\xf6C\xfeF\x05k\x02\x99\xff\n\x03A\x06\x9b\xfb\x80\xffv\xf7\x07\x02\xf5\x00\x96\xfb\x89\x0c\x06\xfc\xd2\xf6\xec\x02H\x01$\xf7\x0b\t\xf5\xfc\x10\xfcB\x00\xa9\xfc\x0c\x07\x10\xfb\xa3\xfeO\x01\xc5\x04\xaa\xfa\x8b\x02\x8b\x07\xa8\xf8w\xfd\xf1\x03\x07\xff7\x04\x95\n\xa4\xfbs\xfa\x91\x02\x8a\x02\xc2\xff\x95\xfd@\xffx\x06v\xffh\xfc\xcc\xfcM\x04\xb1\xfb\x16\x00\x9a\xfe\x0c\xfbx\xfar\x03\x1e\x06}\xf8Z\x00:\xfd\x1d\xf9\xa3\x00c\x05\xe6\xf3@\x06?\x08\xf4\xf4^\xfe\xd0\x03\xb7\x04\x19\xf9\x8b\xfd\xdf\x05\x00\xfc\xe5\xfe\xd7\n\x85\xfe\xc3\xfc\t\xfe\x18\x00\xfc\x02\xfd\xffz\xfb.\x0c\x7f\x03\xeb\xf3\xf4\x02\x06\r\xfc\xf7D\xf0\x1a\x15\xaf\xfc\xad\xf9\xd3\x06\x0f\x07[\xff\x1b\xf9o\xff\x87\xff\xc1\x01\xac\xfb$\x0b.\xff\x7f\xfe\x8b\x00\x17\x04\xb5\xf7@\xf6\xcf\x0f\xe6\xfe\x14\xf5b\x05e\x13\xf8\xee\xbf\xf1\xbe\x18\xbd\xf9\x15\xe7\x85\x13\x04\x0b\x81\xed\xa7\x05\x00\x0b\xbc\xfc\x8c\xf2l\x06\xbb\x00~\xf3H\t\xa2\t\x14\x00\x8a\xf8D\x05)\xff\xb0\xe8\xa4\n\xee\t\xb3\xf1\\\x0c~\x08\xd2\xef\x0f\xfd}\x03P\xfb\x91\xf7\xce\xff\x17\x0b?\x01g\xf7\x9c\x08V\xfb\x01\xf4\xff\x08K\xf8\x18\x03\x81\x08\xce\xf77\x02o\t\xa1\xf5q\x03\x13\xfd\xee\xf6\'\x0c_\xfeh\x03\xe8\x06\x14\xfa\xf4\xff\xb5\x04\x1b\xf6\xd9\x07\x18\x03A\xf8%\x039\x087\x03o\xf9\xf6\x07\x8f\xfd\xd1\xf8\xb8\x01-\x03\xe2\x00d\x03\xb6\x07\xe0\xf7\xb8\x01\x18\xff\xb5\x00\xa1\xfb3\xfd\xa2\t?\xf9j\x02\x8d\xfd\xc4\x06\xeb\xfci\xf4\xf6\x02\x88\x03\xe7\xfd~\xfe&\x01\x11\xff\xc1\x03[\xfd\x16\xf8\x99\x03\x9b\x03^\xf8H\x06\x8f\xfe\x9f\xfc\xa8\x073\x01\xb0\xf2?\x0b0\x07\xee\xe9\xfa\t{\x06\xa5\xffT\xfe\xa8\xffd\x05\xfe\xfb\xfc\xf9\xcc\x04M\x000\xfcW\x06\x8b\xfb\x9b\x03\x13\x00\xcc\xfbF\x010\x01L\xf2Z\n\xa3\x04~\xf7Z\x07\xd0\xff\xcb\xfc\xb5\xfd(\xfd\xe1\xfc&\x04/\xfd\xe5\x06b\xfc\xa5\xfcR\x08\xa8\xf7f\xf6\\\x0b\xf2\xfeM\xf9s\x07\xfd\x07\xfe\xf9\x08\xf7\xf3\n#\xf9\xaf\xff\xfd\xfe\x8f\r\x7f\xf8q\xfa\xe5\x11\xb9\xf7\xf9\xf3\x1a\x02\xa3\r\xe3\xf3o\x04\xbb\r\t\xf7\x7f\xf8\xcd\x05\x00\x009\xf7\xe8\x05\xc1\x06\xc5\xfd\x9b\xfb\x8f\n\x11\xfdC\xed\xf8\x0c\xf5\x02\xc1\xf3\x14\x06\xb0\n\x1e\xf8\xfa\xf7\xe1\n\xdb\xfe\xbb\xf1\x9f\x01\x11\x0e\xd0\xf2\xff\x00\xc6\x0f\xcc\xf7:\xf7\xc3\x0co\x00\xe4\xefL\x058\x11d\xf3\x07\xfb\xe5\x1f\xd3\xe8\x04\xf5\xfc\x1ah\xf1\x08\xf8\xaf\x0c\x80\x03\x08\xfa6\xfb\x0b\x0b\xb2\xfc`\xf6\t\x08\x95\xfc\xdc\xf3\x90\x07\x1c\t\x9f\xed\xaa\t\xde\x02\xa3\xf4\xef\x008\x04}\xfb\xd2\xfeF\x070\xfb@\x02\xf9\xfe\xf3\x04\x8b\xf7c\x07k\x01Z\xf9O\x03\x0e\t\xc6\xfa\x91\xfcr\n\xc6\xfa\xbd\xfeX\xfc%\x07\xfa\xfcC\xfc!\t\x18\xfdX\xf8V\x0b\x1e\xf7e\x01\xc2\xf8#\x08g\xfe\x05\xf8y\x0c\x95\xf7\x1a\x07\xeb\xf2\x92\x04\xec\x00\xf8\xfd\xcf\x01\x9b\x03\x00\x01a\xfb\xc3\x01Z\x04\xaf\xfa<\xfe\x11\x08=\xfd\x8c\x00&\xfbQ\x08\x9f\x02\x0c\xfa\x02\xfc\xb5\x06\xee\xfd\xfa\xf3g\x0e\x91\xfe\xdb\xf7\xbd\x07\xda\x04j\xf3\xf2\xff\x16\r\x1e\xf35\xf8,\x12\x00\x02!\xf4\r\x07\xba\x07\x03\xf2\x0b\xfa\xe6\n\xda\xffS\xffC\xfb-\x0b\xb1\xffO\xf6:\x03\xcc\xfe\x8e\xfe\xb7\xfd\x82\x04\xed\x00s\x08\xf5\xf3\x14\x02\x81\x07d\xf1/\x03\xee\x05d\xfe\'\xf9\x91\x06\x05\x06\'\xf7\xfc\x00<\x01\xa6\xf8-\xfd\x03\x0b\xbe\xfc)\xfd\x80\x051\xf9\'\x01`\xfdt\x04\x87\xfb\x00\xf9c\x0c\x00\x04&\xf0\xe4\t:\x05M\xef`\n\xf6\x00\xbb\xf8A\x01\xf1\x07Y\xfd\xe5\xf9\x15\x07\x9a\x022\xf2\xbc\x076\t\xd6\xef\xac\x03\xf9\x11U\xf3\xa5\xf7\xab\x0f3\xfe+\xf1\x13\x07\xdf\r\x0b\xf5}\xfa\x16\r\x07\xfe\x8e\xf1\xbd\t\xdb\x07\xe9\xf8\x7f\xfa\x95\r\xd1\xf9B\xf6\xd5\r8\xfd\xf7\xf5\xc2\x05\xad\x07\x82\xf6~\x03d\n\xa3\xf2\xa8\xfel\x00\x85\x07j\xfa\xee\xfb\xaf\x0f\xd1\xf4n\xfc\xbf\x06\x86\xfe\x1d\xfa\xeb\x035\xff\xda\xfd\x93\x02\xd6\x00\xe3\xfeZ\xfb\xfa\x01\xa7\x03m\xfeO\xf3\xa5\x10\x96\xfe\xa5\xf2s\t\x9f\x04\xc8\xf7\xd7\x02\xfb\x03\x0e\xfby\x02\r\xfa\xf8\nx\xfa\xba\xff\x85\x0b3\xf6\x1f\xff+\x04X\x00\xc1\xfa\xd7\x05\xb0\x01P\xfdp\x03\x1f\xfdr\x03e\xfa.\x01S\x01 \xffR\x00\xc1\xfc\xe9\t\xb4\xf9\x7f\xfd\x0b\x02\x9e\xffD\xfaq\x05<\x01V\xf5\xf2\x0bp\x01\xfa\xf7\xe9\x04%\xff5\xf8\xe9\x06\x03\xf88\x06l\x04\x82\xfb\xaf\xff\xad\x02=\x02\xd5\xf2\x98\x07\xa7\x07\x94\xf1\xeb\x02n\t\xab\xfa;\x01L\x04C\xfaJ\xfd-\x008\x00\xd8\x01$\xfc\x02\x06\x1e\x02\xc9\xf8^\x05|\x01G\xf8&\xfb\x99\x08D\xfd\xce\xfa:\x13.\xf5\xf5\xfcU\x08\xf3\xf6y\xfeT\x01\xf2\x07\xe6\xfcP\xff\x0c\x07\x02\x02\xde\xf4\xa1\x04\x17\x01k\xf8s\t_\xfe\xab\x01\xcf\x05\x8c\xf8\xb5\x01\x19\xfe{\xfe\xbc\x055\xf7\xc6\x08\xf9\x04Y\xf9\xaa\xfd\xa2\x08\xc1\xf6\xed\xfd\x0f\x07\xd9\xf8\x16\x08\x83\xfe~\xfc\xf0\x02\x8b\xfe\x92\xf8\xe6\x07\xd0\xf9y\xffF\x08V\xf6t\x05c\x03\x7f\xf5\xf7\xff\xd0\x05s\xf6\x98\x01\x1e\x06\x84\xff\x1a\xfd\xd2\x00\x94\x02\xb4\xf8\n\x01\xc3\x06\xce\xfa\xd4\x02\xbf\x04\x10\xff\x17\xfc\xd7\x03\xdd\x02\x0c\xf5\xd1\t\xe3\xfd\xd9\xfd\xc6\x01\x89\x00\r\xffG\xfe\x0c\xfe\xe3\xff`\x02\x86\xf7\x7f\x0bs\xf8\xfa\x00\xbd\x05\xcb\xf6\xcb\xfd4\x04\xb1\xfe6\xfe\x1c\x06l\xfb\xcf\xff\x1b\x02j\x00\xeb\xfd\x9e\xfc\xdd\x05\xbc\x03\x12\xf75\x07z\x06\xc0\xf5\xe8\xfe\xb0\x0b\xc3\xfad\xf9@\t%\x01\x06\xf8~\x06N\x06\xaa\xf5!\x05\x14\xff\x18\xf7\xbb\x07\x9d\x06\x1c\xf3\xf0\x05\x9d\x05\xb2\xf7}\x02N\xfd\x96\xfe.\x00~\x00:\x01b\x02N\xff\x1e\x00\x08\xfc=\xff\r\xfdo\x03\xe0\xfeN\x00r\x04\r\xfbu\x00\xe4\x01\xe0\xfan\x00>\x01.\xf8\xaf\x075\x01d\xff\xed\xffF\xfd\x15\xff6\x01(\x00\'\xff3\x00\x91\x01\xa3\xff\x1a\xff\xfe\x03|\x03\xa4\xfbi\xfd\xe3\x02\xb7\xfb\x94\x03j\x060\xfc\xce\x00b\x05\xd2\xf9\x02\xff\x9d\x06\xc0\xfb-\xff\xba\x06\xaa\xfa\xfe\x01\x06\x08\x89\xfa\x9e\x00;\xff\xdc\xffV\xfa\xa7\x04-\x07\x98\xf7J\x04l\x03\xcd\xfaz\xfd\x82\x05\xdc\xfb\xbc\xfad\x05\xde\x02\x11\xfbl\x04b\x02\x15\xfa\xdf\xfc\xe5\x00V\x02\xcc\xf9\x97\x060\x00q\xf9v\x06}\x00\\\xfc\xfc\xfc\x1e\x02\r\xfc\xb6\xffl\x05%\xfd\x9e\x01\xcd\xff\x02\x00\x14\xffT\xfe\xf9\xfe\xba\xfc\xde\x02\xb6\xff\'\x01\r\x04\xa3\xff\x81\xfa\x01\x02\x01\xfeo\xfa\x88\x04\x89\x00\x01\x01\xe8\xfd\xd2\x05\xcf\x00;\xf9n\x03\x19\xfb,\xfe\x99\x02S\x00\xcf\x04d\x01\x90\xffD\xff\x1b\xffr\x00\xff\xfb\n\x04<\xff\x9d\xfe\xb4\x07b\xff\xb9\x00\x94\x01\x8e\xfc\xce\xfd\xdb\xfee\x00\x1b\x00\\\x03N\x02\x12\xfe\xb1\x01[\xfdA\xfe\x0e\x00@\xfc\xc7\x00\xab\xff\xf7\x00\xc1\x02\xcc\x01\xb8\xfd\'\xfe\xf9\x00\xa8\xf8\x05\x04-\x03\xf1\xf9\xc3\x04s\x00\x16\xfe\xcf\x01\\\x02\xe5\xfcx\x01\xf9\xfb"\xfd\r\x077\xfd\x0b\x031\x02/\xfd\xc9\xff\x9d\xfey\xfe7\xff\x8c\x00*\x00\xa7\xfe\x87\xff!\x04\xa9\xff\xa9\xfc,\xff\xd1\xfd\xc0\xfen\x01\xd4\x01\x87\x00\x08\x03\xf6\xfd7\xff\xbc\x01h\xff>\x01L\xfd.\xff<\x04r\xff%\x00\xd6\x03u\xfd\xc6\xfc\xbe\x02\x9d\xfd\x1c\xff\n\x03\xcb\xfd]\xfe\xe2\x02\xbd\x00\x08\xfe\x07\x00\xa3\xff\xda\xfd(\xfe\x8a\x01\xe5\x00K\xfe\xf0\x00\xc2\x01\xb7\xfd\'\xff[\x02\xec\xfd^\xfd\x98\x02\x92\x00\xca\xffd\x017\x00\x1e\xff\xd0\xfe\xf8\xff\x1b\xff\xce\x003\x00\xd0\xff3\x01\x94\x00\xb3\xfe\xfc\xff\xc5\xff\xc3\xfc2\x02\xfd\x00#\xff\x8a\x01\x19\x00\xa3\xff8\xff\x12\x00\x18\xffC\xffT\x01\xfa\x00\xef\x01&\x00\x0e\xff\xc4\x00\x80\xff\x80\xfd\xab\x00\x8f\x02x\xfe\xbd\x01\xc1\x01<\xfe*\x00\xe7\x00\xf8\xfd\xd4\xfe=\x01&\x00\xc1\xff\xff\x00\x05\x03G\xff\xbb\xfd\xbf\x00\xdd\xff\xd8\xfc[\x00Y\x04(\xfeF\x00\x8e\x02\x88\xfes\xff\x1e\x00\xc7\xfdD\xfdr\x02\xeb\x01^\xfe<\x01\xd8\x00&\xfe\xaa\xfe\xca\xfeR\xfeQ\x00\x8e\xff\x19\x00\xc7\x00y\xff\x1b\x00\x83\xffi\xfc\r\xff\'\x00\xff\xfc\xe1\xff\x9c\x03\r\x00u\xfc\xf3\xff\xa0\xfe\x98\xfb`\xfe\xaa\xffI\xfd\xbb\xff\xcc\x01\xae\xfd\xf1\xfdO\xff\x8c\xfdt\xfd\x00\x00\x8f\x00\xaa\x00\xe3\x02F\x03\xaf\x03?\x04\x8f\x04Y\x05)\x04\xb9\x05o\x07\xfc\x07\r\tK\t\x90\x07O\x07\x0f\x06\xba\x044\x04@\x03\xd9\x02\xbb\x00\x8a\x00\x81\xff\xce\xfd\xa0\xfc\xa6\xfa\xca\xf8\xc5\xf7\xc7\xf7Q\xf8:\xf8\xca\xf7\x82\xf8\xba\xf8\x7f\xf8\xd4\xf9s\xfa\xa9\xfa>\xfc\x82\xfd.\xff\x84\x00\xfe\x015\x02Z\x02\xa4\x03\xee\x02\xce\x03\xc6\x04\xe5\x04\xac\x04\xb6\x03\xba\x03-\x03\xc8\x02\xa9\x01\x9f\x00[\x00\x04\xff\xd0\xfe\xc9\xfe\xc6\xfd{\xfd\x0b\xfda\xfb\x0c\xfc\xbc\xfc\xc9\xfb9\xfc\xb8\xfc\xcc\xfc\xf5\xfc|\xfe\x1f\xff\xc8\xfe`\xff\xdf\xff:\x00\xea\x00\x8e\x02V\x02\x08\x02\xbc\x02\xe3\x02\xff\x02?\x037\x03\n\x02\xa6\x02\xc6\x02%\x02\x8f\x02\xed\x01\xbf\x00\x00\x01\xce\x00d\x00\xa3\x00c\x00\x90\xff}\xff\xe8\xffI\xff^\xff\x98\xff\xd3\xfe\xc6\xfe0\xff\x8e\xff\xb5\xff\xfe\xfeI\xff\xee\xfe\xaf\xfe\x18\xff\x11\xff\xf8\xfe-\xff \xff\x05\xffe\xff)\xff\xe0\xfe\xe9\xfe\x08\xff\xeb\xfe\x9c\xff\xbd\xff\xfb\xfe\x80\xff\x97\xff0\xff\x97\xff\x06\x00\xc4\xfft\xff8\x00\x1a\x00\x00\x00{\x00`\x00j\x00\x07\x00\xb0\x00\xf0\x00n\x00\xa1\x00\xb9\x00p\x00\x7f\x00\x9a\x00\xbe\x00\xc4\x00e\x00\xbe\x00\x0f\x00\x13\x00\x82\x00\xe9\xff\xff\xff\'\x00\xd8\xff\xf4\xff\x0b\x00X\xff\xa0\xffT\xff.\xff\xd8\xff\x89\xff\xb0\xff\xfc\xff\x88\xff\x83\xffo\xff\x88\xff\xbc\xff{\xff\xf5\xff+\x00\xdc\xff\xef\xff8\x00\xb8\xff\x98\xff\xdc\xff\xbb\xff\n\x00w\x00<\x00`\x00}\x00\xb6\xff;\x00\x02\x00\xdb\xffo\x00K\x00R\x005\x00\x98\x00\xfb\xff\xdd\xff\xdc\xff\xb9\xff\xb7\xff\xb3\xff&\x00\x04\x00\xa9\xff\x03\x00\xb2\xff6\xff\xac\xff\x90\xffm\xff\xb2\xff\xfd\xff\xf5\xff\x02\x00\x1a\x00(\x00\xf5\xff;\x00\x1c\x00E\x00\x96\x00\x95\x00\x87\x00\xb0\x00\xd0\x00y\x00\xca\x00E\x00n\x00\x93\x00J\x00x\x00\x9d\x00D\x00U\x00?\x00\xe3\xff\x1a\x00\x0c\x00\xc8\xff\xde\xff\xbb\xff\x90\xff\x11\x00\xa1\xff\x85\xff\xdb\xff\x1d\xff;\xff6\xff\xe8\xfe\xa2\xff\x8b\xffr\xff\x96\x00\xd1\xffy\x00\xcc\x00\x93\xff=\x00\x15\x00\xfe\xff\x1d\x04\x11\x05\xf4\x03o\x03\x11\x02\x7f\x01W\x01P\x02\xc6\x02\xc3\x02`\x022\x02\xb0\x01]\x00\xda\xfe\x16\xfc\xd2\xfa\xaf\xfb{\xfc\r\xfdD\xfd\xe3\xfc\x1c\xfb\xc5\xfaF\xfbt\xfa\x83\xfb\xb3\xfb\xea\xfa\xab\xfd\xf1\xfe\xae\xfe\xbb\xff\xd6\xfe\xcb\xfd\x0f\xfef\xfe\\\xff\xc2\xff\xe9\xffg\x00\xca\x00\x9f\x00\xe6\x00g\x00t\xff\x86\xffO\x00\x07\x01\x0c\x02+\x03\x8f\x03a\x03\xb7\x038\x04W\x04\xa3\x04\xdd\x04]\x05$\x06\xf3\x06Y\x071\x07^\x06*\x05S\x04\xc3\x03j\x03\xc7\x02\x01\x02t\x01\xbd\x00\x00\x00\xeb\xfe\x9d\xfd`\xfct\xfb\x13\xfb\x00\xfb\x1a\xfb`\xfb|\xfbr\xfbu\xfb\xa3\xfb\xde\xfbs\xfcW\xfdQ\xfe_\xffR\x009\x01\xd3\x01%\x026\x024\x02\x84\x02\x06\x03\x8a\x03\xb8\x03b\x03\xb8\x02\x16\x02M\x01\x82\x00\xe5\xff\x1b\xffP\xfe\xa8\xfd)\xfd\xa7\xfcD\xfc\xb0\xfb\xf5\xfa\x90\xfam\xfau\xfa\t\xfbN\xfbs\xfb\x0f\xfc\x97\xfc\x18\xfd\xef\xfd\xee\xfe\x1b\xff\xb1\xff\x96\x00\x0e\x01\xd7\x01c\x02\xae\x02\xf8\x021\x03F\x03a\x03I\x03\xd3\x02u\x02F\x02\x12\x02\x0e\x02\xab\x01\x06\x01\x8f\x00\xff\xff\x9d\xff\x80\xffF\xff\xe5\xfe\xd4\xfe\xc9\xfe\xca\xfe\xfc\xfe\x0c\xff\xe4\xfe\x17\xffG\xff\x90\xff\x10\x00R\x00o\x00\xba\x00\xbb\x00\xb4\x00\xc1\x00\x13\x01\xfd\x00\xd1\x00\r\x01\x03\x01\xcf\x00\x8b\x00I\x00\xf6\xff\xbf\xff\x90\xffd\xffd\xffI\xff\x16\xff\xeb\xfe\xd3\xfe\xba\xfe\x8c\xfe\x83\xfe\x82\xfe\xa9\xfe\x0e\xff#\xff{\xffb\xff\x80\xff\xbf\xff\xb8\xff\x13\x00L\x00\x89\x00\xe6\x000\x01@\x01A\x01]\x016\x01\x02\x01\x07\x01\x06\x01\xdd\x00\xd2\x00\x8e\x00\x1d\x00\xda\xff\x80\xff\xfa\xfe\xf5\xfe\xcf\xfe\xc0\xfe\xdd\xfe\xf3\xfe\x06\xff\xbc\xfez\xfe\x9c\xfe\x03\xff\xfe\xffZ\x01J\x03g\x02\x10\x01\x1e\x02\x97\x01\x17\x02\xeb\x02}\x02 \x04\xae\x03\xfe\x02/\x03\xc0\x01\x1e\x00\xeb\xfeY\xfe\x9c\xfe\xda\xfe\xae\xfdZ\xfe\xa8\xfd\xf4\xfc\\\xfdP\xfc\x99\xfd\xf0\xfc}\xfb\xbf\xfdY\xfe\x9b\xff\x7f\x03\x80\x03\xde\x02W\x01\x9a\x00\xca\x01\x95\x01\xc8\x01\x83\x02\xa4\x02\x82\x01\xef\x01\x9e\x01\xb2\xfe\x91\xfc\xb9\xfa\t\xfa\xe3\xfap\xfb\x1b\xfc<\xfb\xa0\xfa\xaf\xf9\xcb\xf8\xa2\xf8\x84\xf8W\xf9\x0f\xfas\xfa\xe7\xfbI\xfd%\xfd\xa2\xfd\xc7\xfd)\xfeI\xff\xa5\x00j\x02\x07\x04\xb7\x05\x8e\x07"\tS\n|\x0c\xd3\rq\rc\r>\x0e\x1c\x0f\xb8\x0f\x10\x10r\x0f\xe3\r\xe7\x0b\xf4\t\x16\x08\xd6\x05:\x03\x87\x00\x97\xfe\xdb\xfd\x8b\xfca\xfa\x11\xf8$\xf6\x9e\xf4y\xf38\xf3\xa5\xf3R\xf4\x8b\xf4\xd7\xf4:\xf6\x91\xf7|\xf8i\xf9\xb5\xfaG\xfc\x1d\xfe\xf4\xff\xe2\x01\xc3\x03\xb5\x04\xc9\x04\r\x05\xd8\x05V\x06\x18\x06\xc7\x05Z\x05\xc9\x04\xfe\x03\xe5\x02\xd4\x01P\x003\xfeG\xfcg\xfbu\xfb\x18\xfb\xa6\xf9t\xf8#\xf8\xf8\xf7\xdf\xf7\xf1\xf7k\xf8\xd6\xf8=\xf9~\xfa\x99\xfcR\xfe\xff\xfe\x13\xff\xe4\xffg\x01\xa5\x02\xd2\x03\xcb\x043\x05\xf8\x05\x02\x06\x96\x062\x07\xce\x06\\\x05\xa4\x04\x90\x04A\x04u\x04V\x03E\x02\x97\x01\x98\x003\x00\xa5\xff\x1f\xff"\xfe\x9d\xfc\x0e\xfc\xbf\xfc\x1e\xffr\x00o\xfd8\xfc\xa8\xfc\xb2\xfc\xef\xfd\x1b\xfe\x97\xfeb\xfe\x93\xfe\xb2\x00p\x02\xcb\x02\xd3\x00X\xfe\xb5\xfe\x95\x028\x04\xad\x04\xe3\x04-\x03!\x03\xac\x034\x03`\x03G\x01\n\xff\xad\xff\xab\x00\xa7\x01\x88\x00\xff\xfd\xf1\xfb\xb5\xfam\xfa\x1c\xfae\xfa\xe6\xf9\x82\xf8\x14\xf9>\xfa4\xfas\xfa\x11\xf94\xf8v\xf9\x89\xfa\xa7\xfb\xd0\xfcp\xfc\xb0\xfd>\xfe\xd5\xfd\x12\x00h\x00<\xff\xf2\xff@\x01\x82\x01G\x02a\x01\x16\x01\xd3\x00\x97\x00\xbe\x02\xbd\x01\xa5\x01/\x01\xbb\x00\xf3\x01\x0c\x01c\x01]\x02h\x02\x18\x04\xc1\x08H\x0c\x82\n\xce\x08=\t\xb2\x08m\n\x83\x0b\xe2\x0c\xd2\x0e\x8e\x0b\'\x0b\x00\x0c\xde\x08\xd0\x05B\x02L\x00s\x00\x85\xff\xf1\xfeh\xfe\x06\xfc\x99\xf9L\xf8\xaf\xf7\xa1\xf7\xb5\xf5\x12\xf4j\xf5\xa4\xf6r\xf8 \xfa\xf1\xfal\xfb\xb0\xfa\x11\xfbB\xfd6\xff\xb9\xffr\x00\xb1\x01\x9b\x03\xc5\x04E\x05\x8c\x05\xa4\x04\xa8\x02\xca\x01\x9f\x01V\x02\x82\x01U\x00\xa7\xff\xc3\xfeN\xfey\xfc\xe8\xfa\x18\xf9>\xf7|\xf5\xc9\xf5\xac\xf76\xf9\x08\xf9\xea\xf8E\xf9%\xf9\x98\xfa}\xfb\xa1\xfd\x01\xfe\xd7\xff\x89\x05\xa5\t0\r\xf0\r\x0e\x0c@\x0bH\n\xfe\nd\x0cP\x0e\x01\r\xc3\x0b\x1d\x0e\x88\x0b\x07\x08\xf5\x02\xf1\xfb\xce\xf78\xf7u\xf9\x8b\xfb\x9f\xf8A\xf6\xcb\xf4[\xf2\xba\xf1\xe9\xf1\xd9\xf0\xe4\xef\x8f\xf0\x8e\xf4\x03\xfa\xca\xf9_\xfa\x9e\xf7>\xf6\x16\xf8\x0e\xfb \xfe\xce\xfb\xa9\xfdZ\x00[\xff\x08\xfe\x91\x01\xdd\x01\x9f\xfa\x13\xfb\x96\x01v\x009\xffR\x03H\x05\x15\x02\xf3\xfeA\x05\xba\x05\xe2\x01\xd5\x06\x9f\x08\xa7\x064\x08\xc9\x0b\x1c\n\x0e\x05\xb0\x07\x04\x08[\x05R\x08l\tK\x04\x12\x02i\x06\x1c\x05`\x04o\t\xd8\x08l\x04\x15\x07I\x0bl\n\x15\x0c\x84\x0c0\x0b+\x0b\x1a\r%\x0e\x06\r\xe0\x0b-\x08W\x06\xa4\x06\xfe\x06\xf8\x03\x82\xff,\xfd\xf6\xfb\x9c\xfc<\xfbd\xf9`\xf7N\xf5\x9c\xf3\xd2\xf3\x88\xf48\xf3\xe4\xf2\xc9\xf2\xb7\xf3g\xf5\x81\xf5\xd5\xf6\xc0\xf79\xf6)\xf6n\xf8\xcc\xfa\xc6\xfc`\xfdp\xfdo\xfd\x0e\xfe\x11\xff(\xff\xfd\xfe\x00\xfeG\xfd\r\xfe<\xff_\xfe\xe4\xfd\x0b\xfd\xee\xfb\x1c\xfc9\xfc\xa7\xfc4\xfd\\\xfdE\xfcf\xff\xef\x00\xee\x00\xf9\x02u\x02\x11\x03\xa6\x02\x16\x04\x8d\x05\xe3\x05\xf0\x06\x0c\x06\x90\x05\xec\x05\x00\x04`\x03\xf6\x01\xfd\xff[\x00o\xfd\x18\xfe@\xfe\xc4\xfa\xb7\xfb9\xf8\x88\xf6\x18\xf9\x08\xf7\xca\xf6\x96\xfa\xce\xf9\x12\xf8\n\xf9\xa0\xfb&\xfc\xae\xfa\x0f\xfek\xfd\xf2\xfe\xd3\x00r\x00\xeb\x02\xa7\x00~\xff\\\xff-\xfe\xa4\x01J\x02B\xfe\xe7\x00\xc4\xff\xc6\xfbG\xfc\xd5\x00\xe2\xfb\xe7\xfc+\x02\xfc\xfb;\xff\xc7\x03\xf8\x00\x8f\xff\x1b\x03P\x05<\x04\xdd\x02t\t\xe4\x08O\x04\xa8\t\x9d\n\xb1\x07\x19\x08\x18\n\x07\x08\x17\x06b\x07\x8a\x08!\x07 \x06h\x05\x1c\x04\x11\x04\xf7\x02\xed\x03\xdf\x04\x1d\x02\xd8\xfe\n\x05?\x05}\xfbE\x02\xcc\x04*\xfb\xa4\xfdV\x04\x0b\x00\xa8\xff\xef\x01\x0c\xff+\xff\x95\x01E\xfe\xad\xfd\xa3\x00N\x00\xfc\xff0\x023\x02\xb5\xfe\xde\x00\x1c\xfd\xd4\xfc\xa7\x03T\xfa6\xfcG\x03:\xfd\xb2\xfb\xc9\xfd\xbf\xf9\xf5\xfa\x13\xfc\xe4\xf9}\xfaf\xfd\xf9\xfd\xb1\xf9`\xffg\xfd\xc5\xfb\xc1\xfb\x10\xfdK\x01\x98\xfa!\x01\xe5\x044\xfb#\x02\x03\x06\xdd\xf6\xac\x00\xf4\x061\xfcS\xfa\xd0\n\xc2\x03-\xfb#\x03\xcd\x05\x06\x03\x8f\xf7L\x05\xe4\x04"\xfa\x83\xfdt\x05\x98\x00\xb0\xf9#\x04\x9c\x01\x00\xf8\x1f\xfb_\x04/\xf9N\xf6<\x08\xe2\xfe\x87\xf7\xcb\xfc\x92\t\x91\xf4\xa9\xf8\xe2\x06\x0c\xf7\x88\xf9\xee\x05|\xfe\x8d\xf5\xb4\x0b\xa9\xfd}\xef,\x052\x08N\xf0\x1b\xff\xf1\x0c\xa0\xf6m\xf8P\t\xe9\x04Z\xf3-\x02\xbc\t\x1c\xf5\x8a\xf7W\x13s\xfb\xe4\xef\xd3\r\x11\x06`\xf3 \x01L\x0eT\xf3\x8c\xfe\xd0\x0b\xde\xfa\xdc\xff\xb7\tM\xff\xe3\xfbM\x07\xee\x05!\xf5^\x05\x1f\x0f\xce\xf2\x88\x04\xdb\x0f%\xf1\xc2\x04.\x11C\xfa-\x01\xf9\t\xd4\xfc\x93\xfbB\x0e\x88\x02\xa7\xfeP\x0b\x16\xfe\x02\x00\x1b\n\xbb\xfe7\xf8%\n\x00\x034\xf98\x07#\x08g\xf6\xee\xfdk\x0e\xba\xf0R\xf2\xf9\x16\xa0\xf7S\xeeo\x0b\x04\x0c\x00\xeb\xb3\xfbO\r*\xef\x92\xf4\xa8\x02.\x08%\xf8)\xfd\x00\x02\xd8\x00\xc1\xf1\x0e\x01\xc8\t\xd2\xf4b\xfa+\x0f\xa1\xfeh\xf0\x1b\x10\xec\x02\xf8\xfbV\xf9\xac\x08\xed\xfe\xf9\xfdK\x02\x17\xfe\xff\xfes\x01\x1c\xff\x8a\xfaa\x07\x89\xf7\x8c\xf9\xc3\x02\x1f\x00\xdb\xfb\xfc\n\xc8\xfcd\xf7o\x0c\xe4\xfd@\xfe\x08\x03\\\xfbo\x0e\xe9\xf9j\x00\x1b\x11\xcb\xee[\x02B\x05%\xfb\x0c\xfc\xfd\x07<\x00\xe3\xf8\x7f\x01\xd7\x04\x1c\x05\x9a\xee\xa2\t\x1b\x00.\xf1/\x04K\x05\xc3\xfe-\xfa\x0e\x01H\x02\xb5\xfb7\xf7\xde\t\xd1\xfa\xf5\xf7\xed\x06;\xfen\xfb(\x07]\x06\xfd\xea4\x05\xcc\x0eW\xea\xad\x02\x08\x11+\xf1\xcf\xf9\x15\x0b(\x01\x1d\xef\x87\x068\x06-\xf5 \xff\xfd\x04\xc3\xfb\xc4\xf8\x85\x0c\r\xfa\xab\xf1\xee\x12\x92\xfe~\xf1\xd6\x11"\x00W\xf5\xa2\x08\xb6\x07\xa6\xfc~\xfc\xd4\x0e\xae\x03\x10\xf3\xe9\r2\x06Z\xf9>\x03\xe1\x08#\xfbh\xfe\xc4\x08\xf0\xf9\x8c\n\xdc\xf75\xfd\x13\x0c\xff\xf7\xec\xf8\x0e\x0eQ\xfa\xaf\xf1\xa1\x10\xe1\x00\x8a\xf36\x056\rY\xf6\x0e\xf9\xce\x0c\x89\x02G\xf0\x91\x04\xae\x16C\xf13\xfa\xcd\x0f\xe9\xf6\x08\xff[\x00\x17\x05\xc3\xfb\xaf\x00\xc8\xfb\x8b\x02\x0c\x0b\x97\xe71\x10\x01\xfa+\xf5\xe3\x08\xea\xfe1\xf7\xb5\xf8\x83\x0f\xa7\xfa\n\xf25\nv\x04\xf8\xe8\xa3\t\x9c\n\xaf\xf8*\xfd\xa9\x08`\x01b\xeft\x12\x0e\xfb\xa3\xf9\xfb\r\xba\x034\xf6\xb1\x05a\x05e\xff\xc2\xf3h\x04B\x13\xc6\xeb\x05\x0b\x85\x061\xf7\xe9\x01U\x05\xae\xf6\x15\x03\xdf\x07\x9a\xf1\xf5\x06\xea\x08I\xf2\xde\x04\xfd\x00\xb8\xf3\xcd\x05j\x02]\xf9\xb6\n\xc3\xf1\x8e\x02\xca\x11\x82\xe2\xf6\n\x13\x0c\xc2\xee\xe0\xf9\xe1\x11K\xfdB\xf7\xac\x08I\x03O\xf8\xa3\xf4\xda\r\xa0\xff\xa8\xfaS\x06W\x02\x91\xfa\xe0\xfc\x94\x0cB\xf4S\xfbP\r]\xf5\x06\xff(\t\xb2\xfb\x01\xfe~\x02t\xf9\x0b\x04 \xfe\x8a\x00\x8a\xff\xb1\xfd\xa3\x08\xaa\xfc\xed\xf5\x9f\x063\x07b\xf2_\x03o\x08\xac\xf8\x8d\x02\x0f\x05\x18\xf2d\r\xe5\x00\x0c\xf03\n;\x03\xfd\xfd\x98\xfc9\x05\xbb\xfe\xf1\xfc\xbd\x02V\xff\xc1\x00\x94\xf9\xc2\x040\x00A\xfe\xb7\x04u\xfe\xfa\xffC\x01\xbe\xf8\xcd\x04V\x01V\xfa\x1c\x03\x1d\x03\xd4\xfe\xf5\x00\xba\xfc]\xfc\xbe\xff\xaf\xf6\x8b\n\x13\xfb\x88\xfdv\x08`\xf6\xb9\x00.\x05\xd9\xf5\x98\x01\'\x02\x88\x01\xf3\x01\x81\x006\x03b\xfa\xea\x04$\xfdP\x04\xab\xf9\xfa\x06\x9c\x06\xc1\xf84\x00\xd8\x04\x99\x00\x1b\xfc\x98\x02f\x08S\xf7\xc7\xfe\xa8\x04\x1e\xfe\x13\x02p\xfa\xbe\x07\xb8\x00\xce\xfa\x0f\xfe6\x07\x18\xf4X\x00\xa5\x06\xcf\xfcm\xfe\x9e\xff>\x01\xf0\xf8\x8e\x02i\x02y\xfb1\xf3\xe3\x133\xf5\x91\xf9\x10\x0f\x9a\xf8\xb7\xfbM\x06\x92\x016\xf2\xc1\x0b\xc2\x00=\xfa\x86\x04D\x08\x95\xf6\xa4\x00/\x07\x1f\xf7\xd0\x04r\xfdY\x01\xcc\x05\xf0\xf5\t\x01\xf4\n\xa7\xf2\x89\x04\x05\x01D\xf5\xdd\x02\xf0\x07\xd6\xf2\x1a\x04\x8e\x03\xf5\xf7c\x06\x1d\xf9\xbf\n\x00\xf8\xef\x01\x8c\x04I\xfdS\x02\x7f\x00\x8d\xfe\xd1\x08\xcd\xfcD\xfaH\x0e\x81\x02\xc9\xf0\x12\x0b\xd9\te\xef\xa3\x08\xf2\x02\xfb\xf8H\x06\xf3\x03M\xf3#\r\xc2\xfe\x96\xf6B\x02\x9b\x0b\x1f\xef[\xff\xba\x11\x9b\xeb\xbd\x06W\x03\xa3\x03X\xed\xe7\t\xb0\x02U\xf7X\x04\xf0\x02\x9f\x01Y\xfa\x8a\x01\xe6\xfe\xdc\x04n\xf8E\x07\xfc\x03\xff\xf6\x18\x00:\x07\xf5\xfe\xe8\xfb\xa4\xfeh\x04\\\xf9\x8b\x05\x12\x01\x82\xf2\xd0\x0eT\xfb\x07\xf9\xae\x07\x86\x01\x1f\xfeA\xfb\'\xfd\xa1\r\x99\xfaL\xf8\xfd\x12\xb8\xf52\xfb\xc5\x06\x1e\xff8\xf8\xf2\r\xc6\xf8\xd3\xfbI\r%\xf9?\x02,\xf8\xa5\n\xa4\xfa\xaf\xf9\xe5\x0c\xa7\xfe\xa4\xf6\x00\x0b\x16\xfe\xfd\xf7i\nd\xf6K\x03\xbb\x06\xf3\xf1\x13\t{\t\x03\xf6t\xfdr\x07\xf2\xf6\xc9\xfe\x13\x0e9\xf3\x8e\x01\x16\x05\x00\xfe\x10\xf6n\x0e2\xf7\xb0\xf38\x14\xfa\xfa\x07\xf1\x9c\x11\x98\xfcv\xee.\x16\xc0\xf4\x19\xf4,\x0cD\x03\xf5\xed1\x11\xdf\xfds\xf2\xf0\x0c\xe0\xf7k\x00\xbf\xfe\xd9\x00=\x07\xb3\xfc[\xfe\xfa\xff\x10\x07\xe1\xef\xac\x0c\x85\x06\xca\xec\xa6\x16M\xf6A\xfa]\x0b\x03\xfc\xbf\xfc\xca\n\x80\xf8\xdc\xfe\x7f\n\x9f\xf8\xbc\x017\x04\x8f\xf78\x05q\xfbz\x08\xf1\x06{\xeeO\nJ\x03i\xf3\xe5\x00\xb6\r\x06\xf1\xad\x06~\x01\x80\xfc\x07\x01\x1b\xfc\xab\x03\x1e\xfe\xc6\xf8b\x03\xd4\x08|\xf0\xb7\t\x86\xfb)\xfe\'\x03\xdb\x003\xf5\xce\x06s\x06\x9f\xf2\xa7\x01s\x0f5\xf0\xfc\xfd\xe2\x14\x85\xe92\x066\x03\xf2\x02\x12\xf8B\x05\xb8\t\x95\xee\xd4\n\x9b\xfd\xa5\xff<\xfa\xe1\rR\xf7\x9e\xfb\xeb\x10\xc1\xec\t\x0c\xed\xfb\xff\xfb4\x04\xe2\x00\xe0\xff\xd5\xf5\xa8\x12=\xf9\x85\xf3-\x11\x08\xfa\xa2\xf4$\x0eh\x04t\xe8\x94\x11\x8d\r\x9a\xe85\n\xc0\x05\x10\xf3\xbe\x04\xed\x01V\xfc8\x06\xb7\x02)\xf3\xf5\x10\xcc\xf7\xd9\xf4U\x10\x10\xfd\xb9\xf5\xba\x06\xe5\x04\xbf\xf4\x1b\x0c\xcb\xfd\xd7\xf9\xb5\x032\xfb\xa2\x02\x14\x04\x8c\xf1\x11\nQ\x06,\xf1+\x04\x8e\x0e\xf8\xf0G\xf6\x01\x12\x8d\xf0\xf6\x02\x93\x0c\x90\xf2K\x03\xe4\x08_\xf7\x8e\xfd\xb3\x01 \x01d\x03\xda\xfc\xe5\xffc\nD\xf7\xbb\x01y\xfb\x88\x02\xce\n3\xf2\xe7\x01V\x12D\xf2\xa3\xf5\x15\x18\x08\xf3h\xf9\x0c\x0bm\x03\x95\xf8J\x02\xbd\x06\x9d\xf9\x9a\xfb\xe9\x08\xca\xfe\xae\xf9\xf6\x03\xe1\x07#\xf8\xd8\xfa\xe1\x0c6\xf4\x89\x05\x10\xfbA\x05j\xfcl\xfc\xfa\x0e\x0f\xf1c\x00a\x03U\x03\x96\xec\x8e\r\xb1\x03"\xf9\xb5\x04\x1f\xfbo\x04\xd9\xf8\x00\n\xde\xf3\x93\x07\xfc\x05R\xf7[\x07V\xfb0\x06\xe6\xfb\xd1\xf6\x12\x13\xcb\xf3\xc3\xfcY\x08\x16\xfc\xa1\xfb\x91\x01\xa2\x06\x7f\xf1r\x08\xe8\xfa\xf8\x06\xe7\xf8y\x03*\x04\xf6\xf8\xfa\xf9\xab\x04Z\x03\xab\xfa:\x07\xee\xf3\x90\x0e\xa8\xf6>\xfd\xb1\x08\xf2\xfd\x19\xfe*\xfd;\x06w\x05\xd2\xf5\xf7\x00\x00\nT\xfdF\xf5v\rW\x01\xea\xf0\x82\n\x99\x03\x16\xf9\xb3\x05B\x02\x86\xf4{\x06b\xffE\x03\xb6\xf5\x18\x08\xd7\xfcy\x03e\x00\xab\xf4Q\x0cn\xf7I\x00Y\x01n\x03\x15\x01$\xf9#\xffG\r\xcc\xec\xdc\x05\xf0\x08w\xf6 \x01\x0b\xff\x16\x06\xe3\xf2.\x0f\xd8\xf8\xbf\xf7\x98\x08T\xff\xa8\xfc\xa8\xfcQ\n\xe1\xf8\xe7\xf6U\x13\x92\xf73\xfb`\x06G\xfb\x1b\x00\x80\xfcS\tB\xffO\xfb\x1f\x01G\x04\x85\xfa*\xfa3\x15\xb4\xeew\xf8Y\x1ep\xeao\xfb\xd2\x14\x03\xf72\xf5\x02\x11\xb0\xf9_\xfb\x87\x08\xb2\xf8\x91\x0e\xe5\xf0\x0c\x05H\x026\x006\xfc/\xfe\x91\n\x8c\xf66\x08l\xf5C\x06\xb0\x02`\xf9E\xfb\xc6\x0c$\x002\xef\x84\n\x9f\x06"\xf5\xa8\xfel\x05\xb2\xffK\x01\x1c\xf5\xa9\x06\xb1\xff\x8b\x04N\xfa\x8b\xfd!\x04\xf3\xfe\xe1\xfe\x11\xfb\xac\x0c-\xf0\xa6\x04\xd3\x08X\xfbm\xfcr\x03\x8c\x03S\xefH\x0c\x98\t\xff\xeb\x00\x07\x8d\x0e"\xee\xcd\x05"\x04\xad\xfb\x16\xfdW\x00\xbc\x05}\xfc\xfe\xff\xe0\x036\x03\xd5\xf0\xf8\x08\\\x06 \xf5\xd0\x01U\x03\xda\x01\xc4\xfe\x19\xfc\xb1\x01\x1e\x0b\xfa\xf3?\xfe\xd8\t\x98\xfcv\xfb\xa7\x04\xa3\x02\xff\xf8\xf6\x0b\xee\xf21\x07\x92\x01\xb9\xf8i\x07\xce\xfc\x01\x04\xa4\xf8\xf2\x04\xa1\xff\xa9\xfd\xe5\x01\xd5\x01\xad\xf7\xd0\x06\xdf\x01\x14\xfd\xc9\xfbb\x00\xc8\x07c\xf8\xcd\x01\x8a\xfbd\x10\xb7\xef}\xfc"\t\x02\x02\x93\xff\xfc\xf3j\x10\xba\xf9\x17\xf8\xa9\x0b\x1d\xfa\x8e\x00\xe5\x08:\xf8\xf5\xfa\xdc\x04\x98\x00\x9a\xf9\xfe\x0b7\xfd\xc0\xf71\x04e\x05w\xf5\xf1\xfa\x05\x0fP\xf5\xd3\xfeT\r\x1e\xfe\xd5\xf7\xf9\x05?\xf7\xf9\x027\x07\xb1\xf7\x18\x0f\x8a\xf8\xce\xfe2\x04~\xf6H\x07\xdc\x01C\xf7\x8b\x05\x88\tv\xf3=\x02{\x07r\xf5y\xfe\x92\x0b@\xf5\xc3\x02\xa8\x03\xf7\xfc\x89\xff\xbc\x01p\x04\xf2\xeeC\x0c\x08\x03&\xee\x0c\x0c\x13\x0b\xf4\xee\xa7\x03S\x08-\xef\xbf\ny\x05\x82\xf3\xc3\x08\xd8\xf8"\x05S\x00{\xfcM\x01x\x02\x0e\xfd\xbd\x00\x9f\x02\xd4\xfb\xe5\x06\x03\xf7\xee\x08Q\xfc0\xff\xa3\xff\x99\xfb\x8b\x0c\xbd\xf9\x82\xf8\x92\x040\x0c|\xee\x85\xffG\x11\xd5\xf4\x8b\xfe\xd2\x04k\x00Y\xf8\xbd\x08\x94\xf9\x9b\x02\xef\xfe\xe9\xff\x87\tF\xed\x8e\x0cY\xfe\xe2\xfa\xc1\x00\xdb\t\x04\xf9\x19\xf92\x0e\xe5\xf1\x8b\x08\xc0\xfeL\xfbZ\xfe<\t\xfc\xfb\xf3\xf8.\x13\xe8\xeb\xf0\x06\x1b\xfc\xfc\x03\x15\x06\x99\xee\x99\x10\xba\xfc\xbc\xfa\x13\xff\xe6\x06\x08\xfa\xb9\x01Z\x01\xeb\xf6\x06\x0e\x85\xf9\xaa\xf8\xc0\tv\xfd\xc4\xf9;\x0e_\xee\x9f\x08@\x06&\xf4\xb8\x00\x12\x06\xf6\xff\xb7\x01\xda\xf98\x00(\x0f{\xe8Y\x08\xf6\n\xeb\xfa\xc5\xf8{\x07\x80\xffs\x00\xec\xfb5\xfeC\r\x10\xf3\x00\x027\x08,\xf5\xd4\x03\xbb\x04\x9a\xf6q\t\x97\xfbH\x02 \xf9\xe0\x01N\x08\xbd\xf8\x85\xfb\xc6\x0c\x1f\xf6\xd2\xff&\x0cB\xec\x19\n\x7f\x03\xe4\x01\xe3\xf1\xeb\x0c\x81\x02\x82\xf0\xed\x05d\x08\xee\xfa!\xf7\x9b\n\xd0\x018\xf7\xbb\xfe\xa1\n\xa2\xff\xcb\xf5\xe0\x01\x89\x03\xd7\xfbT\x06w\x01&\xf8\x19\x05\xa7\x03k\xf6\xb8\x02\x8c\x02T\xffK\xfaW\x08Q\x03t\xf6u\x07\x8e\xfd?\xf5/\n\xde\xfd\x85\x03\x05\xfd\xc2\x00\x03\x06\x05\xf7\xd9\x01(\xfe\x1d\x08&\xfb\x92\xfd\x07\x0c+\xfb\x01\xf3\xe2\x12&\xf4\t\xfa\xa5\x0e|\x00i\xf2\xfe\x0c;\xfd2\xfar\x07\x08\xf2L\r\xc2\x00@\xf6\xaa\x08\xc3\x089\xe9\xe4\x11\xe9\xf7P\xfe\x97\x039\xfeW\x02\x19\x01\x93\x00\x96\xfc\x88\x02\x97\xf5-\x13\x16\xeb\x8e\x07\x05\n\x1b\xf4\xd7\x08-\xfe\x0c\xf9\xdc\x03\xd5\x04"\xe7\xfd\x11U\x0fw\xe8\x95\x02w\x122\xf0n\xf7\xbc\x177\xee\xed\xff\x7f\x0b4\x00<\xf7\x82\x04\x07\x0c\n\xef#\xfe\x96\x13+\xf4\x05\xf6\xc6\x0e2\xfb\x90\xfb\xa6\x02\x15\x07\xe8\xf2(\x01\x1d\x01\xef\x07\xab\xf4b\x07\xe7\x05\xf2\xf1\xbe\x01\xde\x00}\x05n\xf9\xf1\x07\xd8\xf3\xd8\x04\x84\x04>\xfet\xfd;\x031\x03\xee\xf0o\x0f\x9d\x01\x1f\xf3a\x08\xdd\xff=\x01\x8d\x00\xb4\xff\x94\xfd\xb5\xfb8\x05\xe0\xffb\xfd\x8a\tc\xfb\x05\xf7\xa5\x06\xc2\xff\xe8\x02M\xf1\'\n\x1b\x02\xa7\xff\xff\x01[\xf6\x01\ti\xf7A\xfe\x89\t\xbb\x01\xeb\xf4\x8e\x0b\x80\xffy\xf1\x1d\x0b\xdf\x01K\xf6I\x03U\t\t\xf6x\xfc\xd9\rT\xf8\x8f\xf7|\x08\xac\xfex\xf8&\t\x80\x06\x9b\xf0\xd6\xff\xe0\t\x89\x01\xe6\xf5"\x05x\x05\t\xf6^\x02\xee\x04\x87\xfe\x11\x00s\x04"\xf9\xa9\xf7G\x15\xb8\xf3\xa9\xfdw\r\xcb\xf1.\x07\xc4\xfc\xbe\x00\xdd\xff{\x04\x90\xfd0\x00^\x01\xf2\xfb\x98\x05\xc3\xf6J\n\xb6\xf5\xd0\x07H\x00\x11\xfa#\x0bU\xf23\x04&\x02\xf9\xfd\x93\xfa0\x0b \xfc=\xf7\xf2\r\x02\x00\xe0\xef\xcc\x01J\r\x9b\xf7\xf5\xf9/\x0c!\x059\xf0A\t\x82\xf6Y\x01\x16\x04\x16\xff\xec\x04\xf1\xf7j\n\xe8\xf6\xc9\xff\x8c\x05\xc5\xfb\x8e\xf5\xf0\x07u\x14\xee\xe8e\x01\xa8\x13\x11\xe6\xc4\x01\x10\x10\x01\xfbM\xf6\xec\x07\xef\x08+\xf3#\x02\x1a\x07%\xf5\xd3\xf9\x87\x12\x08\xf9\xd1\xfcy\x07\x9f\xff\xb9\xf7\x10\x00\x14\t\xb0\xf46\x08\xa4\x02\x16\xf9\xf4\x006\t\xf1\xfc.\xec8\x16\xc4\x07-\xe9\xfb\x05\xad\x0ft\xf6\xbf\xf2\x87\x113\xfe\xbf\xf6t\t\xb7\xfc?\xfa\x8a\x0cA\xf56\xff\xf9\x02\'\x05\xe1\xf9\x0f\xf8C\x0f\xba\xf1\xd8\x06\x15\xfe\xce\x00\x88\x02$\xfe\xdb\xf6\xfb\x06\xb2\x00\xe4\xfd\xa6\xfe%\x02)\tC\xefG\ry\xf4R\x05D\x02\xd4\x00%\xfc\xbc\x07\xee\xfb\xec\xfd\xd5\n\xd9\xf6A\x07t\xfe\x8d\xf5\xc8\x03\x83\x0f}\xef\x12\x06\xe1\tS\xf0\xcd\xfap\x0b\xee\xfd\xf5\xf5O\xfd\x90\nk\x05\xa1\xf3\x07\x05H\x00\xa7\xfd2\xfb\xb1\xfe\xce\r\xba\xff\x16\xfc\r\x00Q\x00\x87\x04\x80\xf8\xd2\xf7X\x0e.\x02z\xf3T\n\xcf\x02R\xf5\x80\x00!\xff\xc8\xfa\n\x0b\xe2\xfd\x05\xfeN\xffL\xff\xd5\x04_\xf2\xff\x02\x83\x0b\x93\xefK\x04\xba\n\x84\xfc\x93\x02\xd7\xee\xdd\x0c\xa8\x05\xe3\xf0\x0e\x0b7\x0b3\xf4\xcb\xfa\x9d\t\xae\xfe\x88\xfa\xeb\x05u\x06\xaa\xf1d\x06\x18\x0f\x12\xef]\xf9\xf9\x17\x01\xf5\xc2\xf5\x8f\n(\x05\xc7\xfa\xbf\xf5}\x0c\x83\x00\xa2\xf6\xdb\x05\x9c\x01@\xfe\xad\xfb\xeb\x00\x8f\x05u\xfb"\xfc\xc4\t\\\xfe\xfd\xf1\x80\x05\xc4\x08\xf5\xfb$\xf8\xc3\x06\xe2\xfe\x98\xf5\xf3\x08\x19\x06\xb2\xf6\\\x00P\x04\x8e\xf7f\x02m\x05\xa9\xfb_\xfeL\x00\xe7\x02\xdb\xfe\x87\x01e\x00\xd8\xf9;\xff\x10\x02d\xffB\x08J\xfaV\xf8\xd5\x0b\x99\xfbO\xfb\xec\x07%\xfd\xb5\xf4*\x04g\rm\xf8\xc0\x00r\x01\xe5\xf6\x9f\x009\x02J\x01\'\x00\xe5\x018\xf9:\x02\x10\x08*\xf7h\x01\xbe\xffQ\xfa\x1a\x05j\x03\xdc\x02\xd7\x01\x80\xfbW\xfa9\x05\xb2\x00\xd9\xff\x8c\x05\x91\xfe\n\xfcO\x05\x13\x01\x0b\xfa\x1d\x04\xd1\x02p\xf7\xd1\x03\x8e\n\xc6\xfa|\xf9+\xff\x07\x05\xc4\xfa\x9f\xfb\x03\x08\xad\x03\xaa\xf6+\xfe4\x02q\xfdk\x02y\xfd\x85\xfe\xc5\xfe|\x02Q\xff\xae\x02\xac\xfc3\xfd2\x01\xbb\xfa\x10\x07\xbe\x03E\xfa9\xfc\xb2\x06\xef\xfeg\xfa\x9b\x01\x03\x04F\xfe\xd1\xfb\x1e\x05\n\x01\x83\xfc\xa2\x00\xea\xfd\x8a\xfe;\x02d\x00L\xff\xfa\x01\xa0\x00\x99\xfc\x1f\xff\x7f\xff\x92\x01\xc6\xff\xa6\xfd=\x03\x9b\x02G\xfe\x14\xfe\x1b\x047\xfdq\xfd\xf7\x02}\x02U\x02\xce\xff\xa7\x00\x0b\xfc\xbb\x00L\x00\x91\x01\r\x01\xf7\xfe\xae\xff\xbb\xfb\xf6\x02\x1b\x03\x1c\xfb\x07\xfd\xda\x01\x05\xfea\x00\x0f\x00\x8f\xff0\xff\xa9\xfbK\x02R\x02\xfe\xff\xd3\xfdi\xffC\x014\x00T\x04\xb7\x00\xb6\xff\xbe\xfd~\x000\x04\x15\x02\x0f\x03\x00\x00\x98\xfe\xba\xfea\x02a\x02\xe9\xff\x0f\x017\x01\xd6\xff\x98\x00\xc5\x02\x85\xfe\xed\xfc+\x02\xf4\x01d\x00A\x02\x1f\x02\xc0\xffI\xfe\x96\x01\x9c\x00z\x01\x98\x02\xa7\x01\xfb\xff\x92\x02\xea\x01i\xff\xfa\xfe#\x00\xe5\x01\x99\xff\xc4\x01\x8a\x01\x89\xff\xf1\xfc\xb4\xfe\xc0\xfe\xf9\xfd1\x00\x15\xff\xff\xfd#\xff\xfb\xfd\x15\xfe\xa9\xfes\xfc\xab\xfe\x9f\xfe~\xfe;\x00]\xff\x7f\xfc\xbb\xfa_\xfe0\x00\xd8\xfd!\xfb\xf1\xfd\xfc\xfb\xf6\xf7x\xfcd\xfa\xc2\xf8\xcd\xf7-\xf9\xec\xf8\xca\xf7b\xf7\x9b\xf3\xb9\xf3N\xf7\xb5\xf9\xd9\xf5L\xfa_\xf99\xf5\xe8\xf8N\xfc\xbe\xff2\xff\t\x03\xdc\x05\xc9\x05\x85\x08\x9b\x0c6\x0f\x85\x11=\x13\x87\x16\x05\x19{\x1c,\x1d\xcd\x1b\xb4\x1b\x83\x1a\xa7\x1c5\x1c\xd1\x1a\xd1\x1aT\x16\xe4\x10\xc1\x0f\x93\r-\t+\x05\x14\x02^\xfe\xd6\xfb\xf2\xf8\xd9\xf4/\xf1W\xed\xf6\xeb\x8c\xec\xb7\xec\xa8\xeb\x03\xea\xf8\xe8e\xea\x96\xec\xa3\xedA\xf0\x91\xf3\xc7\xf3\xb7\xf6\x10\xfa\xac\xfbN\xfe\xc6\xff\xd0\x00d\x04!\x07\x18\x066\x06\x04\x06\xb4\x04\xb0\x02\x9c\x02z\x02T\x00<\xfe\xef\xfb\xd4\xf8_\xf4\xce\xf1!\xf0\x15\xee\xbd\xec\xac\xea;\xea\x12\xe8]\xe5\x13\xe5]\xe5\xa6\xe4\xe5\xe5p\xe7\xaf\xe7F\xea[\xeb\xf3\xe9\x1a\xec{\xf2\x04\xf5\xf7\xf6\x90\xf9\xa0\xf8\xe8\xfd\xab\x01\xd0\x03\xf5\t\xab\r&\x16\xf0\x1f\xc0%\r&\t%\x04(e,\xb75A;\xbc>\xb3AA\n\xb1\n\xed\x0c\xbb\x0cK\ry\x0f\x02\x0f2\x0c\x06\x06\xbc\x01\x15\x01\x92\x00\xbc\xfd\xf0\xf9J\xf3\x92\xeco\xea\x01\xeaP\xe8\x0c\xe6M\xe3b\xe3>\xe5/\xe5j\xe6\x19\xe7s\xe6\xd7\xe8\x8c\xedS\xf1C\xf5}\xf7\xbc\xf6\xad\xf8\xc8\xfb\x11\xfd\x9b\xff\x0b\x00\xc3\xfd\xd7\xfe\xa3\xff\x97\xff\xe1\x01\x15\x00B\xfco\xfd\xdf\xfc\x90\xfdc\xfe\xbe\xfc\xde\xfc\xcd\xfb\xda\xfd8\x08\xdc\x14q\x14\x86\rM\nr\x10\xb0\x1f\xca(L,\x1a-2,d-\xc1/\xa50\r/\x1a*\xa9\'\xa8+\xbb.C)\x0f\x1c\'\x0f\x0e\x08s\x06\xc3\x06\xbf\x06\x01\xff-\xf4\x9d\xec|\xe6l\xe5\x82\xe4\x1b\xe0\xb8\xdd\\\xdf\x9b\xe1\x91\xe3\xa4\xe3o\xe1a\xe1\xf8\xe3\x1d\xea\xb2\xf3?\xf9P\xfb\x82\xfc@\xfd\xc0\xff \x05\xe4\t,\x0bC\x0c\x1d\r\xea\r\xf9\x0eD\x0c\x96\x07\xd6\x02r\x01\x9d\x02\xa8\x012\xfdU\xf7l\xf0\xde\xed\x95\xec\x13\xeax\xe9\xed\xe7{\xe5\xde\xe4\xe3\xe6\xb2\xe6\x11\xe6\x1f\xe7\x16\xe9\x1c\xed6\xf2\xf3\xf4\x90\xf4x\xf4\xf0\xf6\xa2\xf9\xac\xfb0\xff\x7f\x00\x90\x00\xdb\x02\xc9\x01\xbd\xffR\x01\xca\x00\x9b\x02v\x02\x8c\x02=\x03\x8e\x01\xe4\x00\x98\x00Y\x02\xff\x03\xbc\x02\xe4\xff\x1c\xffB\x04,\x0c\xa3\n\x85\x0b\r\x11\x81\x14\x18\x18k\x1au\x1e\x1f \x92\x1f\xb1#\xe8+\x1a1\xe8+-%\x80#\x1e$\x17%\xae#\xff!\t\x1d\xad\x13\x83\x0f\xa2\r\xb5\n\xc6\x05L\xfd\xfd\xf9\xff\xf7)\xf4^\xf2\xef\xedv\xe7u\xe3\xdb\xe1P\xe5\x80\xe7\xd9\xe5\xdb\xe3\x04\xe2M\xe2t\xe6\xe9\xea\x08\xed"\xef\xbe\xef(\xf3\xd5\xf7?\xfb*\xfd\x95\xfc\x82\xfc\xf9\x00\xae\x07\xe0\x07M\x07\x0f\x05\x8f\x01\x16\x02\xcf\x03\xa8\x04g\x02\x89\xfd\xf3\xf9\x07\xf9\x03\xfa\x0e\xf8\x14\xf4z\xf0[\xed\xaa\xebi\xf1\t\xf2x\xee\xcb\xec\xaa\xe89\xec \xf2X\xf26\xf1o\xf4\xa0\xf3,\xf3\x81\xf8A\xfbo\xfa\xec\xf8\xb5\xf9@\xfc\xf7\x00\\\x01\xbd\xff\xbf\xff\xac\xff\xfa\x028\t\xc9\x07Q\x05\x00\x02\xd0\x02u\x07\x97\n6\x11K\x0c\xa6\x05\xf5\x07\xea\x04k\x08\xa9\x0e\x84\x08\xec\x07\x15\x0e.\x15\xf9\x15\xab\x11=\x0b@\x0ba\x10J\x17\xda\x1cD\x1e>\x1b#\x16\xd1\x14v\x14\x92\x17\x8b\x15g\x13\xeb\x14\xcd\x13\x1d\x13<\x0f\x91\x08C\x03<\x00\x0b\x01\xfa\x048\x02:\xfc^\xf8"\xf36\xf0\n\xf0a\xefV\xef\xd4\xee\xb5\xed\'\xec\xf3\xea:\xeb\x91\xe9\xb8\xe82\xee\x88\xf2L\xf2\xc5\xf4\xbb\xf5\x8e\xf3d\xf7\xe9\xf9\xfe\xfc}\x02\xb6\x02-\x03\xb0\x02\xd8\x02\x83\x02V\x02\xeb\x02Y\x02\x98\x01s\x01\xd9\xfey\xfb\xfb\xf6\xcd\xf5N\xf5Y\xf3f\xf3\xfd\xf6\xb1\xf1\xb4\xed\x93\xef\xd4\xeb\x14\xedM\xf1@\xf2\xa4\xf4j\xf6\x08\xf3\xea\xf3\xf6\xf7\x0c\xf6j\xf8\'\xffO\x01/\x03\xcc\x06\xb9\x08\xaa\xff\xe3\xfe[\r\xe5\ra\x0c\x81\x0fd\x0c@\x0c\xe9\n\xa6\x0bi\x0e\xed\x08\\\x06E\x0eY\x0f\xea\n\xe9\x04)\x02.\x04\xdd\xfdw\x06\xe8\x10\x1d\x05\x16\xfa\xba\x00\xb5\x02\x8f\xfan\xff\x96\x07b\xfdp\xf8\x16\x08(\x04\x14\xf9\x14\xfe\x1c\x01\xc6\x00\x7f\x00\xfa\x06\xcb\x08\x88\tu\x08p\x03\x82\n3\x0b\xd5\x08Q\x11\x13\x14\xfd\x0e~\r\xef\x0f\xad\x0e\xb2\n\x82\x0c@\x0cy\x0c\x9c\x0c\x95\x08\xfa\x08\xbd\x04 \xff\xdd\xfd\xdd\xff\x93\xff\x8e\xfb\xaa\xfb\x96\xf9\x92\xf5\xb7\xf4,\xf4`\xf1\xb3\xf1\x96\xf2]\xf09\xf5\x90\xf5!\xf1\x19\xf1_\xf5\x1d\xf4I\xf2\xba\xf7\xce\xf9i\xf8\x1c\xf8\xcd\xfdA\xfa>\xf9O\xfd\xad\xf9\x99\xfb\x12\xffc\x02.\xfag\xfd\x8c\x00M\x00\x92\xf72\xfa!\xff\xb4\xfb\xb6\xfb\xe9\xffe\xf8M\xfc\xad\xfc\xf6\xfa\xc9\xfbk\xfa^\xfa\xb2\xf9=\x02\xbe\xfb\xb8\x00T\xf8\xa5\x00\x8f\xf8v\xfe\'\x01"\xf7\x07\x03\xdb\xfe&\x05\x9a\xf9x\x022\x00\'\xff\r\xfb\xe7\x06(\tT\xfb4\x0f\xaa\x04\x14\xfbX\xfc\x98\x11l\x00$\xfci\x16c\x082\xfaL\x06\xb2\x11\xbe\xf6\xcd\xfd\x1a\x1b\xf2\x03\xc0\xf4\xf7\x10\xb1\n/\xfaH\xfe>\x10\xe9\xfc\xa8\xf9b\x06\xf1\x04h\x06\xc3\xf2m\x07\xbe\x07\xbd\xf4R\x02\x7f\x06\x06\xfc\x0f\xff\xe0\x00\x83\xfe\xe3\x02\xf9\x02X\xffm\xfc\xaa\xfat\x055\x08\x0b\xfa\x89\xfb\xeb\x06\xef\x00\xb9\xfea\x02\x1a\x07\xbc\xffL\xfc\xa6\x02v\x07r\x07\xdd\xfcx\x06\x82\x02\xd1\x02M\x03\xfa\x02\x05\x07\x0b\xfeY\x03\xca\x08\xaa\xfbi\x05\x19\x07i\xfd\xae\xfc(\x04\xe5\x05\x83\xfd\xb9\x01#\xff\x95\xfe\xf5\xf9\xae\x02\t\x00{\xf3\xe6\xfc6\xfe\x14\xf9\x9e\xf2\x07\xfay\xfdf\xebU\xf5\xf8\xfe\x93\xf5S\xf6\xa4\xef\x9a\xfc\x94\xf5\x1a\xf1\x90\x04\xa9\xf7O\xf4\xc4\xf9\xee\xfc\xff\xf9S\xfe\xed\xfc\xed\xf5\x16\x05\xea\x05\xe3\xef2\x05\x94\x05\xdc\xfbv\xfb\xbf\xfc~\x11\x9a\x02Z\xed;\n\x19\x06\\\xfe\xf5\xfbn\x06\xfc\x02O\xf7\xff\x0f\xe5\xf2B\x01\x1b\x12\xfd\xefQ\xfa\xa8\x18\x05\x00a\xf2\xc0\n\x06\x0c)\xf8\x81\xff\x8a\x12\xc5\x07\xfb\xfa[\xff\x87\x12\xe2\x03\x9d\xf4B\x0b\x81\x0f\x0c\xfb\xa4\xfe\x0c\x0e,\x04\xfd\xfeC\xfc \n%\x01\x0e\x00h\x06E\x01\xa0\x04\x9d\x02\x11\xf6Y\t2\t\xca\xedS\x10\xc9\x0b\x11\xe7H\x07\xe9\x19\xb1\xf2\xfe\xf5\xec\x05\xff\x0b2\xf1\x01\x02T\x1a\xed\xeed\xf3\xf6\x0e\xbc\x02e\xec\xea\x04\x7f\n\xe0\xfa\xd7\xf2\x11\x0b\x83\x03\xc2\xe7\x87\xfa\xb9\r\x1a\xf8\x05\xf0\x03\x08S\x04x\xef\x1a\xf8\xf3\xfdl\xfd\x1b\xff\xf4\xf1X\x0c\xd2\xf4@\x00\xc5\xfe\x16\xfd*\xf7\xb5\x01\xd4\n\x05\xef\x8c\t\x96\x05\x9c\x07\xc9\xf0\x9f\x07\x85\x0c\x80\xec\xa8\x07\x01\x10v\xfc`\x07\x1c\x05\xc6\xf8/\x01\xb7\x03\xb1\x02\xaf\x00\xa6\xff\x15\x07\xcd\xfb\xd6\xfe`\x02\x15\x01\xaf\xf3\xee\xfc|\x0f\xc4\xf4\xee\x05{\x00\xda\xfc\xd5\xf86\x06\x9a\xfd\x02\x06n\xfe\xf2\xfb?\n\xff\xfcp\x03\xc5\xf5.\t0\xff\xaf\xf3\x18\x0c\xb4\ta\xf0q\x03\x86\xff\xf2\xff\x1e\x02U\xf5\x1c\x0b\x0c\xff\x82\xfb\xfc\xfa|\t\xcd\x04\xd4\xe8\xb7\x0e}\xff\xe8\xf6F\x0f?\xfb\x80\xfb\t\x08\x81\xfa\xaf\xfa\x93\x04\xe8\rG\xf1\xb4\x01B\t\xba\xf4\x83\x0c\xcf\xf4\xa2\x06\xfe\xf5\xbf\x08\x80\xf5\x9b\x0b\xe6\xf6h\xfbw\x05\xc6\xf8\xb4\x07\'\xf2U\n\x8f\xef*\x0f?\xf9>\xfeQ\xf9\x93\r\x9c\x02\x17\xec\xa9\x0c\t\x0c\xa2\xf0\xb9\xf9e\x1e\xa7\xee\xbf\x00\x9c\n\xe0\xff\xf6\xf4\xe3\x08\x83\x03\xe1\xf8\xcb\t\xd3\xfb)\xfd\xca\x02\x0f\xf96\x07\xa8\xed\xd2\x086\t\xe8\xe5\xbe\x0e\xda\x01Y\xf3\xcb\xee7\x12\xf3\xfe\x0c\xe4\x08\x18\xf3\x01\x19\xef6\x02n\x03K\xfe\xcf\xf1\xb4\x0c\xa5\x059\xf7\x8f\x08K\x00\x91\xf8\xb1\x06s\x04Q\xfc\x87\x02I\x07\xa9\xff\xd8\xfe+\x10\xe0\xf6i\xfeI\x06K\x03C\x04\x1e\xfa\t\x10 \xf5\xfa\xfe>\x18<\xf3\x97\xf8\xf6\x0f\x97\xf6\xb4\x03\x97\x06\x0f\x05S\xff"\xff\xe9\xfdZ\xf9\x7f\x0f\xa2\xedy\rJ\x07\xeb\xf0V\x03\xb8\x01\xc9\xf5\xcc\x00*\xfe7\xf7F\t~\xf9\x9a\xfb\xa7\x00\x05\xfb\x0e\xf2\xc9\x10"\xf1\x1e\xf9S\rZ\xf2\xaf\xf8\xb0\t\xec\xf8@\xfa/\x05C\xefL\n*\x03\x9d\xf8Q\x04-\xf9\x9e\xff?\x0c(\xee\xc0\x07\xa8\x14\xe2\xe8\xab\xfa\xd1\x1a\xa5\xfay\xef\xda\x0e\x83\x0c\\\xf3+\xf70\x16R\x00\xab\xf7\x81\x01\xb7\x0b\x13\xfaw\xf7\\\x1bs\xef}\xfbB\x17\xd5\xed\xac\xfb\xb1\x13\xe9\xf8\x87\xf6q\r\xc5\xfa\x07\x05\x14\xfd\x85\xfb\x92\x07\xdf\xf7\x03\x03\x89\x07\xfe\xfa\x8b\xfa\xdc\x06\xc7\x02j\xf6\xde\x00\xb8\x0e\x18\xedg\x07\x11\x01`\x02/\x04\x19\xf0\x11\nw\xff\x1a\x01X\xf5\xcc\x0e\xf4\xf9\xef\xf8,\x08a\xfd\x0f\x04\xbd\xf3\x9d\x00\xc1\x07\xda\xfa\xc3\x00\x8d\x021\xfb\x8a\ta\xea<\x0c\xdf\x0b\x06\xeb\xc8\x08\xec\x00]\xfe\r\x06\xa4\xfb\x08\xf4\x9f\x10L\xf6v\xfb:\t\xa1\x04\x13\xf7\xc5\xfa\n\x0e\xf5\xfc\xb3\xf3\xe8\x0bp\x04\x91\xf1\x99\x11\xd6\xfb\xe3\xfb\xb9\xfd\x18\tm\xf5B\t\x1b\xff\x95\xf6\xf6\x14\xa1\xf1\xf8\xf4\xf6\x10\xa5\xfc]\xf0Y\x0f\xd3\x01\x8c\xf7\xac\xfd\xab\t\xa3\xf4C\xfeY\x08\x12\xf9U\x01\x1a\x06"\xfa\xf0\xff\xcf\xff\xe6\xfcD\x01:\x01\xb7\x00\xf0\xf7C\x0b\x0f\xf6p\xffM\n\x83\xf7\xeb\xf8\xff\x064\x04h\xfaD\xfd\xd4\x08\xd7\xfeR\xf8\xb1\n\x94\xfc\x99\xfc*\x06\xd1\x04\xc8\xf63\x08\x1b\x06_\xf4+\x05x\x08\x9f\xfa\xa1\xfc\xec\t)\xfc{\xfcR\x05\x14\x00~\xfeT\x03v\xf7^\x01\xe0\x07\xec\xf1P\x07r\x06\xac\xe8\xe7\x12y\x01\xf1\xea9\x0c\xa1\x07X\xec\xcc\x06d\r6\xe9\xc4\x0b\x10\xfa\x0f\x05\x11\xfd\n\xf4g\x13\xa9\xf1{\xfe\xf1\x08\xac\xf6\xeb\xfc~\x0b\xac\xf11\xfb\xf8\x13\x89\xf0E\xf6\xdf\x12\xe3\xf7l\xf6\x92\x0b\xe8\xfc\x93\xf7c\x02\x7f\x05\x12\xfb\xc8\xfe:\x07\xfd\xfeE\xf7/\t\xb0\xff!\xfd\xdc\x02\x15\x07\x17\xfe\xe4\x00\xb5\x06\xca\xf9\xf6\x04t\x06k\xfbD\x07\xca\xfd\xd2\x01q\xfe\xb2\x05\xb2\x06\x93\xeb5\x13\xec\xf9\x1c\xfa\'\x08\x85\xfc\xf5\xf88\x08\x9e\x03\xd7\xf03\x07G\x0f~\xe7a\xfd#\x1f\xd0\xe6e\xfd\xa4\x17\xb5\xf4\x15\xef\\\x15\x8b\xf9\xb6\xf3\x91\x11\x82\xf8\'\xfe\xeb\x02E\x01e\x00\x99\xf8c\x06\xe1\xfb\xa1\xfeZ\x08/\xff\xef\xf2\xcc\x0b\xe1\x04\xe7\xe7\x99\x11\xed\xff_\xf7\xdf\xfa\xb3\x0c6\x01\x7f\xecg\x107\xf8\x05\xf9s\x04\x03\x01\x13\xff\x9d\xfd\xbc\xfd\x97\xf8\xd2\r\x87\xf0\xf3\x01:\t\xa4\xeb\x9f\x0c"\x07G\xef\xc1\xff\xce\x0f\x9b\xf0>\x00\x94\t\x7f\xfb\x90\xf8\xad\x0c\xe7\xfd\xee\xf0\xc8\x19\xfb\xf1\xd9\xfe\xe7\t^\xfbU\x01N\xff\x97\x07\x18\x02\xc0\xfe\xf7\xf7\x80\x11@\xf6\xe2\xfcY\x14\x93\xf1:\x05\xdf\x001\xfeU\x04\\\xfd{\x06"\xf9=\x06k\xfcL\xff;\x03<\xfc^\tV\xeb\xf2\r=\x04\x9f\xf6\xc7\x002\x06\xce\xf9\x1b\xf0t\x18\x1b\xf7\x0c\xf0\xd3\x0f\x9c\xfe\xd1\xf3\x8a\x05\xd1\x04a\xf4\xe0\x00I\x06\x84\xf7,\x01\xc7\x06\x88\xff+\xed\xb0\x10\xa7\x02e\xf2\x8e\x04\xb3\x05\xa1\x00\xc1\xf1\xbd\x16J\xf4w\xfd\xaf\x04\xed\x00\xe1\xfb\xbb\x02~\x0c\xa6\xee\xd7\x0eq\xfdI\xf4i\x0c\xf6\x00\xdd\xf82\x03\xf8\x06\xa7\xf8\x13\x00\xb8\x074\xf7G\xff\xbb\x03\x14\x04\x00\xf9.\x02\xb5\xfd\xc7\x00\x05\x03\xf2\xf6\xaa\x08\x94\xfb\xcb\xf9|\x07\x02\x05\xad\xefW\t5\x05E\xef\x19\x0e_\x03\xaa\xf3J\x07\xe9\x02\x07\xf3\xce\x11=\xfb\x84\xf3\xbb\x12\x80\xf7\xd4\x009\x05\xf2\xf8-\x03\xef\x02\x94\xfe\x8f\xf9\xb2\x0eb\xfe\t\xeb^\x11H\x05\\\xee~\x07k\x07\xb0\xf0\n\t(\x05\xe8\xf0\x06\x06\x19\n/\xf1\xc0\xff\x9f\r\xf5\xf3\x1e\x02^\x00\xdc\x01\x8c\xfe\x8d\xfey\x02(\x01\xd4\xfb\xc0\xfc\xfa\x0b\xfa\xfa\xed\xf6\xfa\x0cS\xfb\xcc\xf8_\x0b\x12\xfd2\xfa}\x03l\x04Z\xf9\x10\x082\xf7\xab\x02^\x05\xdc\xf9(\x004\x02\x8d\x08\x19\xee\x1d\x0c\x0f\x00\x7f\xf6{\x0c`\xfa\xd1\xfa\x85\x07X\xfd\xea\xfd\x90\x005\xff8\x00z\x013\xfa\x91\xfc\x8a\x06\\\xfd\x00\xfa[\x03\x1a\x01e\xf6\xa2\x06s\xfbQ\xfc~\x04\x1d\xfc3\xfe\xe0\x05\x8d\xff\x03\xf9\x8c\x08\x0b\xfcn\xfe`\x04+\x02\xf7\x01/\xff\x0f\x00N\x05h\xfe\x94\xfd \x07\xd8\xfe\x15\xffR\x04\xaa\x00\x17\xff\xf3\xfdi\x045\xff9\xfdL\x043\xfd\xf1\xfe\xec\x02\xed\xfd\xf1\xfd\x04\x03\xf5\xf9\x88\x01\xa6\x00\xd6\xfb\xd4\xff\xe9\x00\xaa\x01u\xf8\x83\x01\xd7\x03\x8e\xf74\x05\x0f\x00\x01\xf9;\x06\xaf\xfde\xfc-\x04z\x025\xf9\x86\x04\xff\x02\x8e\xfa\xb8\x02\xe3\x03^\xfe\xc6\xff\xb3\x04\xa1\xfa\xd5\x03U\x02\x1c\xfdy\x01 \x00\xdf\x00\x16\xfes\x04:\xfc3\xfe\xc3\x03n\xfc\x7f\xfe\x95\x010\xfe`\xfe)\xff\xf3\xfe\xc2\xffS\xfd\xc9\xfe=\x01I\xfd\xb8\xfc1\x04\xe9\xfe\xc4\xfd<\xff\xcf\xff3\x02\xcc\xfe\xeb\xffz\x02\xb9\xff\xe4\x00\xb3\x01r\x01\xe1\x01D\x01\xf2\xff\x1a\x02d\x03`\x00\xd8\x00\x18\x02{\x01k\xff\x7f\x01\xab\x00\xb1\xffH\x00\xf9\xffi\xfe\x98\xff\'\x00q\xfd\x14\xff`\xff\x11\xfe\xb0\xfe\x93\xff\xba\xfdc\xffN\xff|\xfe\'\xff\xdc\x00\x1b\xff\x02\x00L\x01H\xff`\x00\xd8\x00\xf0\x00\xe9\x007\x01\xdd\x00\xe3\x00\x92\x00\xda\x00\xe2\x00\x84\x00\xe9\xff\xa5\x00\x0f\x01\x01\xff2\x003\x00f\xff\x9f\xff\xfa\xffY\xffz\xff\x8a\xffw\xff\x95\xffc\xff\x8c\xff\xa8\xffw\xff\xa1\xff\xf2\xff\xb7\xff\xc8\xff\xb8\xffG\x00\xd1\xff\xd0\xffd\x00\xfa\xff\xa9\xff\x9c\x00M\x00\xca\xff+\x00\xd3\xffa\x00\xda\xff\xfe\xffb\x00\xaf\xff\xb4\xffK\x00\x98\xff\x94\xff[\x00\xb4\xff[\xff\x06\x00\x02\x00r\xff\xdf\xff-\x00\xae\xff\xfa\xff`\x00\x01\x009\x00`\x00\x16\x00\x8b\x00\x96\x00=\x00\xf0\x00\x99\x00i\x00\xf9\x00\x91\x00B\x00r\x00\x8a\x003\x00\x07\x00\x82\x00\x15\x006\xff\xe7\xff?\x00\x04\xff\x8e\xffB\x00\xb0\xfe\r\xff\x0c\x00T\xff\x11\xff\xb1\xffa\xffD\xff\xf2\xff\xbd\xffx\xff\xf9\xff\x02\x00\xf8\xff\xe9\xffj\x00P\x00\xfc\xff]\x00\x87\x00J\x00H\x00\x99\x00K\x00\xec\xffk\x008\x00\xda\xffX\x00\xfe\xff\xb7\xff\xc6\xff\xc8\xffD\x00\xc0\xff9\xff\x11\x00\xc9\xff\xe7\xff\xab\xff\x9b\xffw\x00\xa4\xffY\xff\xd4\x00)\x00(\xff\xef\xff/\x01/\xff.\xff\x05\x01}\xff\x9b\xff\xbe\xff\x13\x00K\x00\xa3\xff\xb9\xff\xaf\xffb\xff\xaa\x00\xb9\xff\xeb\xfeI\x00\x97\xff$\x00\x07\xff\xc7\xff\r\x01\x9c\xfe\x13\xffE\x01Q\x01\x1c\xfe\xaf\xff\x8a\x01\x87\xffl\x00\x8f\xff\x0e\x01r\x01L\xfd\xf7\x00\xc3\x03d\xfe\xd9\xfd4\x02\x00\x01\x00\x00\xa0\xffh\x01.\x01v\xfe\x8c\xff\x18\x02a\x01\xac\xfcM\x01\xd4\x00R\x00\xb7\x00\x15\x01w\xfd\x80\xfd`\x02\xa3\x03\x07\xfe\xeb\xfa\x80\x03\x94\x01p\xfd\\\xff"\x02\x05\xfc,\xfe*\x04\x0f\xfe\x93\xfdK\x03\xf8\xfd\x13\xfb\x85\x04\xd2\x02\xc4\xf8{\xfd\xc4\x08{\xfe\xd5\xfa\xde\x00+\x04\xff\xfd\x02\xfe#\x00\x03\x01\xad\x00\xc6\xfd|\x01\xbc\x02\xbd\xfe\x02\x00\xaf\x00\x04\xfe\x94\x00\'\x01\xba\x03\xbb\xff\x9d\xfc\xbd\x02\xd0\xfd\xa4\x01\xc3\x01z\xf9\xdd\x06\xb3\x04\x95\xf4r\x00q\x04|\xffW\xfeE\xfe\xdd\x03~\xfc\x1b\x00\x12\x02\x07\xfb\xbd\xfe\x18\x07\x97\x00W\xf7\x0b\x05>\x04B\xf7\xe6\x03\x81\x03[\xfd$\xfcM\x03\x85\x07\x05\xfa\x1c\xfd\xac\x01c\x06y\xfe\xcb\xf7\xd5\xff\x08\nx\x03\x85\xf2\xec\x02K\x0b#\xf5@\xf8$\n\xbb\x06H\xf5S\xfc\xa1\x07\x05\x02\xe3\xf9\xe9\xfbB\x07(\x00\x12\xf8\x8e\x02~\x0c(\xfa\xa8\xf3\x04\ta\x05\xe7\xf8\x84\xfe\x97\x06\xc8\x04%\xf3\x92\xfd\xdc\x0e\xa0\xfe\x93\xf2\xd3\x00\x90\x05\x88\x01\xaa\xfbH\x00?\x05?\xfa/\xfd:\x05\xd8\x05\xa5\xf8I\xfb\xfa\nK\x07\xb4\xf2\xf5\xfdX\n\xfe\x03\xb7\xf6z\xff\xf1\x03\x98\xff\x85\x032\xff\xce\xfc\xb4\xfb\xea\x00H\x07\xf4\xf6\xb7\xfd\xe9\x07\xec\xff\x08\xf8\xc4\xffN\x04\x81\x00}\xfcn\xfa\xee\x04\x98\xfc\xce\x03l\xfc\xd0\x04\x0b\xff\xea\xf3e\x06\xb3\x07\xbf\xfc\x86\xf92\t\x82\xff\x10\xfe@\t!\xf9&\xfd\xa2\x07\xf8\xfa\x99\t\x03\x02\xc5\xf7\t\xf8\xaa\x0ci\x07\x0e\xec|\x00/\x05\xe6\t\x07\xf8\x0e\xf17\x07\x18\x03\xaf\xfd8\xff\xbd\x03S\xf2\xaf\xfc\x9a\x15\xd7\xfc.\xf3\x8f\xf7\x9e\x07+\x14\x9c\xf8\xbb\xe8Z\x03D\x1b\x11\xfd\xa7\xe9\x9a\xff\x81\x12\xbd\x05.\xf7\xe3\xf5\xd0\xff>\x04\x99\nu\xfdB\xfa\x14\xfe\xa1\xfcn\r\xc1\xf7Q\xfb\xbd\r\x1b\xfc \xf8i\x04\xeb\n\x08\x00\x0c\xf7\xe9\xfa?\x02?\x01\x06\r(\xff/\xf1\xf5\xff\xb5\x01g\x03\x17\xfe#\xfe\x13\x06\xb9\xf8\x8e\xf4z\x0b\x1b\x0c\x9d\xfc\xc2\xf1\xe2\xf9\x86\n\x9d\n\xad\xf7\xed\xf3\x9f\tZ\tN\xf3\xc2\xf9D\x10\xd9\x02\x88\xf2\xdf\xf6\xa4\x0e3\x0f\xc0\xf5\xdf\xf2\xb2\x0b\xa4\x02\x81\xf4\x18\x06\x07\x06l\xff\x88\xf9\xfe\xfc\x1d\nk\xfc\xee\xf4\xf6\x06\x88\x01\xb4\xf9E\x05\xf1\x05\xd7\xf4\xc2\xfe\x96\x07\xf9\xf59\x03\x9e\x08\xf0\xf5:\x01@\x0e\x93\xfa}\xf2\xb6\x02\t\xff\xcd\x02\x02\x05\xc0\xf6\xaa\x07\xb3\x02\xa3\xf3\x1b\x07\xea\xff\xf4\xf1N\xfe\x92\x10\xfb\x06\xcb\xf5\xb5\xf7#\x00\xf1\x07\xcc\xfd\x92\x03Z\xf9\x17\xf6!\x0f\x80\x07\xd5\xfa\xfd\xfc/\x02\x04\xfa\xcb\xf8\xe8\x07\x86\t\x91\xfe\xf3\xfc\r\x05\xf6\xfdm\xf4 \x00\x93\n\xbf\x07j\xf3|\xf8\xeb\x0eQ\t:\xf5\x19\xf5\x9c\x06\x8a\x02\xda\x00\xc8\xf5\xd5\x06\x1a\x0c\xa9\xed.\xfd<\x0c1\xfff\xedq\x01^\x0e\xdb\xfd\xb3\xf6\xc7\x05\x0c\x08\xef\xf7\xea\xf6o\x06\x10\r\xde\xf9\xd1\xf0\xf8\x07c\x14\xb0\xfc\x08\xf8\xe4\xfb\xd9\x00\xca\x06H\x01\x82\xfb[\x02m\x00\x87\x01\x8b\x03\x03\xfe\x9e\xf7\xcc\xfb\x94\n>\x01q\xf8X\xf99\n:\x08Q\xfd$\xf9\xe9\xfb4\x08\x02\x07\x89\xfau\xf6E\nr\x0b\x16\xfb\x82\xfc\x1b\x01\xff\x03w\x01\x1e\xfb\xf9\xff\x9b\x03\xdb\x00\\\x03U\x02\x85\xfb!\xfcR\x03\x89\x01\x10\xfc\xec\xfe\xd3\x01/\x06L\xfbH\xf6Q\x02\x11\x02\xa9\x01n\xf8\x19\xf9l\x02|\x04\xd8\xfc\xe2\xfbW\xfdQ\xfe\x85\x00R\xfe\x17\x00E\x00\x15\xff\xf0\xfd\xde\xfb2\xff\x17\x08K\xff\x16\xfa\xa9\xfd\x8d\x01?\x03\x98\xfb=\x00\x8e\x04D\xfd\xdb\xf5\x94\x03n\x0b\x91\xfd\xc2\xf0\xf5\xf9\x87\t\xd3\x05\xb4\xfb\x8f\xf2\xf3\xf7u\x06C\x08\x00\xf9i\xf0\xe5\xf6Z\x01\xf5\x07?\xfe\x05\xf6I\xf9t\xff\x7f\xfe/\xf7\xf7\x01\x98\t\xea\xfd\x06\xf4\x19\xf7h\x05\x8f\t\x87\xfd_\xf3\x97\xf5k\x03<\x08I\x05\x91\xfc\x95\xf2+\xf8\xe4\x02\xda\x07w\x07\x91\x08}\x07&\x06\x1f\tP\x11\xa8\x12\xa3\x07\xa6\x06\xce\x15\xfd#y \xdd\x10\x11\x0c2\x12\xa9\x12O\x11\x81\x12\xfd\x108\x0b\x99\x05\xbe\t$\rT\x00\x8f\xf0g\xee\xce\xf6,\xfb\x08\xf6\x93\xf0W\xee2\xea\x17\xe7s\xe80\xebU\xe9\xc9\xe6\xc8\xeb\xaa\xf4\xf3\xf8\r\xf5\xe0\xef\xbf\xf1]\xf6\xd0\xfa\xfb\xff{\x03\x1f\x05v\x044\x045\x07\xf9\x07\xe2\x03\x00\x00\xb0\x02B\t\x85\x0b\x83\x06a\x02\xa8\xff\xbf\xfd \xfc\x8e\xfc\x1b\xfdl\xfb\xa4\xf9\x00\xf9\x13\xfb\xb9\xfa$\xf7U\xf3D\xf3;\xf8s\xfc\xed\xfcT\xfd\xb8\xfc\xd3\xfbA\xfeN\x01\r\x02"\x02\x83\x03\xb3\x06\x97\tf\n\xad\x08m\x07\xb2\x07^\x07C\n\xa9\x0c\xf7\x0c\x96\nn\n\x13\n7\n!\tX\x07\x19\t\x8f\t*\n\xbb\n\xb5\x08\xa7\x03\xc1\x00\x8e\x001\x01\x9d\x01\xec\xff\xa9\xfe+\xfc\x8b\xf9\xe4\xf7f\xf6\xbe\xf4k\xf3(\xf4\xcf\xf5\x98\xf5\t\xf5\xa0\xf4{\xf2^\xf1\xdb\xf3\x0e\xf5\x9a\xf5a\xf7u\xf8\xf2\xf8\xef\xf9\xa5\xfa}\xf9+\xf9\xce\xfb\x0f\xfe)\xffv\x00\xc5\xff\x07\xffE\xff0\xff\x1e\x00\xc7\xfe\xf0\xfd\xe9\xfdi\xfd\x9e\xfc&\xfb\xe5\xf7\x07\xf7C\xf7j\xf8\x94\xfa\x88\xf80\xf7\xc8\xf43\xf3;\xf5\x9f\xfe0\x0c}\x13b\x11T\n\x85\x0e\x8c\x1a|\x1f\x12\x1dr\x1e=,\xf88\xf38\n/\xca$1!\xfa\x1e\xa4!\x07$Y#\x81\x1c\xcc\x12\x98\x0c\xf3\x04\x0f\xfa-\xefH\xeap\xeb\x9f\xeeq\xeeo\xe8\x11\xe0c\xd7\xe7\xd3[\xd5T\xda\x93\xdf\xc4\xe2\xbe\xe7\x1f\xec\x81\xee\x8f\xed\xaf\xecQ\xf0\xee\xf4\xc3\xfb\xeb\x03[\n{\x0c\xd2\x08\xcb\x05\x93\x06\xb1\x08\x02\x08I\x05\xc1\x06i\n|\x0b\x91\x06\x10\x000\xfaT\xf6\x84\xf4\xff\xf5_\xf82\xf7\xae\xf3\x9a\xf1-\xf1\xa5\xef3\xedZ\xeb/\xee\xd7\xf2\xdd\xf6)\xfa\xb3\xfb\xf6\xfa\xfc\xf8\xcd\xf9\xf3\xfe\xc8\x02d\x05\r\x08;\x0b\xa7\x0e\xec\x0e@\r;\x0c/\x0c4\x0c\xfd\x0e^\x12\xbb\x13O\x11\xd3\r\x06\x0bF\n\xee\x08|\x06x\x05,\x05\x8c\x06f\x07\xce\x04;\x00\xb3\xfbZ\xf8>\xfa\xf5\xfc,\xfel\xfe\xc0\xfc\xb9\xfa.\xf9n\xf90\xf9\x9d\xf8-\xf9\xbb\xfb\xbf\xffI\x01\xa4\x00p\xfd\xc9\xfa_\xfc\xee\xfdR\x011\x02s\x02\xc6\x01\xfa\x00\xb5\x00P\xfe\x86\xfc\x9a\xfb\x8e\xfb\r\xfd\xd5\xfe6\xfe\xbe\xfb\xf0\xf8\xb3\xf6\x17\xf7\xd5\xf7\x0e\xf8\x91\xf8v\xf9\xb8\xfa\x07\xfbK\xfa\xb2\xf81\xf7:\xf7v\xf9\x0c\xfc\xb2\xfc\xd4\xfd\xb8\xfd\xb7\xfbs\xfc\xde\xfb\xf9\xfb|\xfd_\xfd\x0c\xff5\xff\xcd\xfe}\xff]\xfc\x90\xfc\xec\xfci\xfc\x9c\xfep\xfe\x83\xff\r\xfe\xc7\xfc\xae\xfd\xde\x04m\x0f\xa9\x13a\x0fq\ne\x0eV\x1a\x1f \x9d\x1c\x99\x1d\xa8#\x1e+Q+\x96$q\x1f\xa6\x1a-\x18\x13\x19\xcd\x1b\x9b\x1bh\x13\xef\x07\xd9\x02\x9d\x00\x83\xfb\x1e\xf4\x07\xee\x97\xec0\xec\xb0\xe9d\xe9K\xe7\xd5\xe1\xcc\xdc\x8b\xdc\x9a\xe2\xdc\xe8e\xeb\xe1\xebS\xee\x1a\xf22\xf5\x0c\xf7d\xf8\xac\xfb\x01\xff\xac\x02\xf7\x07\xa1\x0b?\x0bk\x07m\x05\xe3\x06\xd2\x08\xef\x08\x1e\x06\xab\x04\xcc\x03r\x02G\x00\x01\xfd\xde\xf9b\xf6$\xf4\xbb\xf4\xda\xf6g\xf6\xd7\xf2\xbb\xef\x9f\xef\xd2\xf0\xd5\xf0(\xf1{\xf2\xb3\xf4g\xf6\xf8\xf7\x97\xfa\xc2\xfb\xbb\xfbW\xfc\xe5\xfe\xd8\x02\x06\x06/\x07\xb5\x07\x9d\x08p\t\x9b\n\xa1\n\x96\nW\nG\x0b\x9f\x0c\xd1\x0cO\x0cg\nW\x08\xb9\x06o\x06M\x06@\x06&\x05E\x037\x02\xa9\x01X\x00\x9a\xfe\t\xfdv\xfbZ\xfb\xc0\xfb\xcc\xfb\xde\xfb\x15\xfbh\xf9N\xfa\x02\xfb\x06\xfb+\xfb\x9b\xfa\xc7\xfd\x18\x00\x89\x01)\x03q\x03\xee\x03\x1f\x05\xec\x06k\x08\xa1\t?\t!\n\xf1\x0b_\x0b\x17\x0b8\x08\x95\x06V\x06B\x04\x11\x04\xee\x01R\x01\xed\xfef\xfcp\xfb]\xf9}\xf7n\xf5C\xf4U\xf4\x82\xf4]\xf44\xf4\x7f\xf3N\xf25\xf2\x88\xf2W\xf35\xf4\x86\xf4<\xf6(\xf7\x1c\xf8\xe8\xf8 \xf8\xff\xf7\xe3\xf89\xfa\xa5\xfb\xf3\xfcT\xfd\xd5\xfd\xcb\xfd\xe0\xfd\x11\xfe\xf0\xfd\xa1\xfe[\xfe\xd1\xfe>\x00\xbb\x00\xf5\xfe\xb9\xfe\x94\xfe]\xfe\x93\xff\xbb\xff\x19\x01\xd6\xff/\xff\xcd\xff\xd1\x01\xc9\x05V\n)\r\xae\r\xed\x0c*\x0e\x88\x12n\x176\x1b\xaf\x1b\x91\x1d7 \x9f!\x15 \x8e\x1c\xd7\x1b\xce\x1aI\x19y\x18(\x17\xaf\x14\xe7\x0e\xd5\x08\xc7\x05\x96\x03p\xff\x90\xfa\xa8\xf7c\xf6\xcd\xf4\xb2\xf1\xc1\xef\x1e\xef\xc2\xec\n\xea \xeb]\xee\x08\xef\xd7\xed\xa3\xed\x9b\xefq\xf1\x8d\xf1:\xf3c\xf4I\xf4B\xf4\xf5\xf4|\xf7\x90\xf8\x9f\xf7#\xf7\x06\xf8\xd4\xf8F\xf8\x7f\xf8\x9e\xf8\x84\xf8R\xf8?\xf8~\xf9\xe5\xf9\xcb\xf9!\xfa\xcb\xfaW\xfb\xa7\xfb\xb1\xfb5\xfc\xdf\xfc(\xfdi\xfe\xcc\xff\xce\x00\x9e\x00j\x00\xb7\x00T\x01\xbc\x01\xee\x01\xbe\x02\xb5\x03\xc4\x03\x01\x04\x83\x04\x89\x04\xe2\x03\xa2\x03*\x04\x04\x05\x91\x05\x06\x06\xa1\x06>\x07Q\x07@\x07S\x07a\x07\xc4\x07\xb0\x07q\x081\t\xc4\x08P\x08\xb5\x07\'\x07y\x06\x81\x05\xa6\x04\x0e\x04\x7f\x03~\x02\xf0\x01X\x01+\x00\xf4\xfe\xbb\xfd\x9a\xfcr\xfc\xb3\xfb,\xfb\x19\xfb\xc6\xfao\xfaG\xfaE\xfa\x8c\xfa`\xfa}\xfa#\xfb\xcb\xfb\xe5\xfc\xf5\xfdx\xfe\x8a\x00\xf4\x01\xd8\x01Z\x02\x7f\x03\x80\x05\x9b\x05\xaa\x05q\x06\x8c\x07"\x08\xb4\x067\x06P\x06\x01\x05"\x03\xbe\x02\xcb\x02\x18\x02\xb1\xff\x0c\xfe2\xfe\xb6\xfc\xf8\xfaN\xfa\xbd\xf9X\xf9\xf7\xf7Y\xf7\xe6\xf7t\xf7\x8f\xf6\x1a\xf6\x96\xf6\x82\xf6\x9c\xf6\xd1\xf61\xf7\x92\xf7z\xf7\xbb\xf7x\xf8.\xf9B\xf9j\xf9\\\xfa\xc6\xfbu\xfc\x8e\xfc\xf7\xfc\xb6\xfd\xea\xfeO\xff\x03\x00\xee\x00|\x01K\x019\x01\xe6\x01^\x02\x1f\x02\xf5\x01{\x024\x02l\x02%\x02U\x02\xd9\x02\xc8\x02-\x03\xd4\x03\x1a\x04\xd3\x04I\x08\xb9\n-\x0bS\x0b\xd1\x0c\x9f\x0f)\x11\x88\x12\xcb\x14\t\x16\xff\x15\x14\x16\xab\x16\x10\x17\x0f\x16\x8f\x144\x13\xb6\x11\x9a\x0f\x87\r\xda\x0b\xc4\x08J\x05/\x03\xf4\x000\xfe\x18\xfbE\xf9\xba\xf7\x86\xf4\x0c\xf2\xb8\xf1=\xf27\xf1\xf0\xee\x00\xef\x0b\xf0\xbe\xef\xd4\xee\x04\xef\x9e\xf0\xb1\xf07\xf0b\xf1\x11\xf3\xae\xf3\x9f\xf2\xc3\xf2\x9d\xf4\xb7\xf5\xb4\xf5\xda\xf5\x0c\xf7P\xf8e\xf8\xa4\xf8,\xfa\x94\xfb\xdc\xfb\x81\xfb\xd9\xfc=\xfe\x91\xfe\xa8\xfeK\xff\x95\x00\xe6\x00Q\x00\xaf\x00\xaa\x01\xb7\x01\n\x01\xeb\x00\xa4\x01\xe5\x01\x1a\x01\xae\x00\xa5\x01\xf1\x01\x1b\x01\xee\x00\xc1\x01\xf9\x01\xa5\x01X\x01:\x02Z\x03\xd7\x02\xa2\x02\x9b\x03n\x04n\x04>\x04\xf5\x04\xb1\x05\x7f\x05N\x056\x06\xe9\x06\x91\x06;\x06q\x06\xa7\x06G\x06\xd6\x05\xae\x05\x80\x05\xe1\x04 \x04\xb5\x038\x03O\x02|\x01\xe6\x00\x16\x00\x87\xff\xb6\xfe\xe7\xfdW\xfd\xb6\xfc-\xfc\xe1\xfb\x82\xfbI\xfb\xec\xfa\x0f\xfbB\xfb\xe7\xfa\xe9\xfae\xfb\x97\xfb\t\xfc\x8e\xfd#\xfe\xea\xfd\xa9\xfe\x1e\xff#\x00\xcb\x00,\x01Z\x02\x9d\x02\x85\x02\xb4\x02u\x03?\x04\xa4\x03\\\x03\x9f\x03\xd1\x03*\x03l\x02\xac\x02K\x02\xa3\x01\xe3\x00\xae\x00\xb0\x00\xc1\xff\x8c\xfe0\xfe!\xfel\xfdg\xfc8\xfc0\xfc\xc5\xfb \xfb\xf9\xfaO\xfb*\xfb\x93\xfa\xa4\xfaZ\xfb\xca\xfb\x9d\xfb\x9c\xfbm\xfc\xbe\xfc\x95\xfc\xd6\xfcv\xfd\x13\xfe{\xfe\x83\xfe\xcf\xfe\x90\xff\xc5\xff\xc1\xff~\x00\xba\x00\xbd\x00\xa2\x00\xe8\x00"\x01\xf1\x00\xe7\x00\xf0\x00\xf2\x00\xbe\x007\x00\x95\xffe\xff\x0b\xff\x05\xff\xff\xfe\x08\xff\xe2\xfe\xcb\xfe\x1c\xff\x83\xff]\xff\xc3\xffr\x01-\x03\xe0\x04\x07\x06\x1d\x08@\n"\x0b\x18\x0c\xd3\x0e\xe9\x11L\x13\xc1\x13r\x14\x97\x15\x8a\x15;\x14i\x14`\x15\x19\x14\xef\x104\x0eN\r\xbb\x0b\x16\x08\xce\x04\x1e\x03\x02\x016\xfd\xfa\xf9\xf5\xf8\x12\xf8\xe2\xf4^\xf1\x85\xf0S\xf1\x17\xf0\x1d\xeeM\xee\xa1\xef\x80\xef\x18\xee\xba\xee\xd0\xf0\x1f\xf1I\xf0A\xf1\xcf\xf3\xdb\xf4\x8a\xf4\xeb\xf4\xae\xf6\xb8\xf7y\xf7Y\xf8\x04\xfas\xfa8\xfa\xa1\xfaG\xfcU\xfd\x0e\xfdR\xfdH\xfe\x89\xfeY\xfe\xfb\xfe\xd5\xff\xfb\xff\xae\xff\xef\xff\xa0\x00\xba\x00T\x00m\x00\xc1\x00\x82\x005\x00\xad\x00j\x01(\x01\t\x01\x9a\x01\xea\x01\x03\x02f\x02\x13\x03\xc0\x03\x06\x04k\x04U\x05\xf8\x05Q\x06\x9a\x06\x06\x07z\x07`\x07b\x07\xa8\x07\x84\x072\x07\xdd\x06\xbe\x06\x91\x06\xd5\x05 \x05\xa6\x04\xf4\x03\n\x03?\x02\x90\x01\x0f\x016\x00\x06\xffN\xfe\xc8\xfd\xd2\xfc\xf1\xfbU\xfb\xd3\xfa=\xfa\xa5\xf9\x82\xf9\x89\xf9\x7f\xf9M\xf9\\\xf9\xa9\xf9\xcd\xf9\xf4\xf9e\xfa\xf3\xfaP\xfb\xa7\xfbQ\xfc\r\xfd\x8d\xfd"\xfe\xa0\xfe\xfe\xfe\x9f\xff[\x00+\x01\x0c\x02\xf5\x02\xbc\x033\x04\xdd\x04\xa8\x05\x9e\x06\xfb\x06\xe5\x06|\x07\xf6\x07\xcd\x07C\x07\x10\x07\xfd\x06\x16\x06\xd9\x04\n\x04\x8e\x03\x92\x02\x02\x01\xd8\xff<\xffa\xfe\x1b\xfd\x0e\xfc\xd4\xfbQ\xfbR\xfa\xbe\xf9\xcb\xf9\xa4\xf97\xf9\xfa\xf8&\xf9\xa2\xf9\xc9\xf9\xd4\xf98\xfa\xd2\xfa=\xfb\x98\xfb.\xfc\xf4\xfcx\xfd\xa3\xfd-\xfe\xc2\xfe+\xffv\xffV\xff\x80\xff\x00\x00\x95\xffH\xffp\xffu\xff\x13\xff\xa0\xfek\xfew\xfe:\xfe\xc1\xfd\xc3\xfd\xd6\xfd\xcc\xfd\xe5\xfdD\xfe\xd0\xfe\xf2\xfe\xb8\xfeS\xff%\x00\xe7\x00a\x01&\x02\x10\x03t\x03\x14\x04 \x05\xc7\x05p\x06w\x07\xa9\x08\xe6\t\x8c\n@\x0b5\x0c\xf4\x0c\\\r`\x0e\x88\x0f\x07\x10\x04\x10\xd1\x0f\x02\x10\xe9\x0f\x1c\x0f\xc2\x0e\x96\x0e\xa3\r\xd9\x0b\xfa\t\xdf\x08\x9b\x07K\x05\xf8\x02b\x01\xdf\xff\x94\xfd&\xfb\x8e\xf95\xf8%\xf6\xe8\xf3\xe6\xf2\x92\xf2l\xf1F\xf0\x1d\xf0\x8a\xf0e\xf0\xee\xef_\xf0]\xf1\xd7\xf1\x0e\xf2\x15\xf3\xa5\xf4\x87\xf5\xf3\xf5\xd0\xf6"\xf8 \xf9\x94\xf9Z\xfa}\xfb8\xfc\xa1\xfc8\xfd>\xfe\xf8\xfe+\xff\x88\xff\x1b\x00\x8c\x00\xc7\x00\x1d\x01\xa1\x01\xda\x01\xe8\x01%\x02\x7f\x02\x9c\x02\x8a\x02\x9a\x02\xa8\x02\x89\x02b\x02t\x02\x86\x02\\\x02?\x02F\x026\x02\x01\x02\xea\x01\n\x02\x18\x02\x02\x02\x02\x026\x02a\x02X\x02i\x02\xaf\x02\xd6\x02\xae\x02\x8a\x02\xbc\x02\xe1\x02\xc8\x02\x9b\x02\xa3\x02\xb3\x02m\x02\x1e\x02\x02\x02\xda\x01g\x01\xe4\x00\x9b\x00\x82\x00\x1f\x00\x8c\xff\x1c\xff\xd2\xfed\xfe\xdd\xfdu\xfd.\xfd\xd7\xfck\xfcA\xfc1\xfc\t\xfc\xdc\xfb\xc9\xfb\xed\xfb\x14\xfc*\xfc`\xfc\xa1\xfc\xd8\xfc\x13\xfdm\xfd\xef\xfde\xfe\xcf\xfe\x18\xffl\xff\xe0\xffy\x00\x12\x01\x80\x01\xf2\x01y\x02\xc3\x02P\x03\xd9\x03\x8a\x04\xc1\x04\x84\x04\xe2\x04C\x05q\x050\x05S\x05\x91\x05\x17\x05p\x045\x04r\x04\xd9\x03\xfe\x02\x9b\x02l\x02\xc9\x01\xc1\x00P\x00\xfc\xff7\xffg\xfe"\xfe\xe8\xfd\x08\xfd"\xfc\xda\xfb\xaa\xfb@\xfb\xc4\xfa\xbd\xfa\x98\xfa\x10\xfa\xd8\xf9\x1d\xfaD\xfa\x05\xfa\n\xfa<\xfa\x8c\xfa\x86\xfa\xa0\xfa\x17\xfb6\xfbE\xfb\x8d\xfb\xf3\xfb>\xfc\x87\xfc\xc2\xfc \xfds\xfd\xa9\xfd\x1c\xfe\x7f\xfe\xe4\xfeA\xff\x9d\xff\x0b\x00\x80\x00\x08\x01\x89\x01\xff\x01y\x02\xea\x02R\x03\xd0\x03B\x04\x95\x04\xe5\x04Q\x05\xd7\x05*\x06\x19\x06Z\x06\xb7\x06\xba\x06\x9e\x06\x8d\x06\xb1\x06\xab\x06D\x06\xf4\x05\xd9\x05\x91\x05\x1a\x05\xc2\x04\x80\x04-\x04\xbb\x03B\x03\x1d\x03\xf1\x02\xac\x02\x86\x02{\x02y\x02M\x028\x02_\x02z\x02n\x02T\x02U\x02U\x02\x14\x02\xf3\x01\xd2\x01}\x01\x15\x01\x9e\x00A\x00\xc0\xff)\xff\x89\xfe\xd7\xfd7\xfd\x93\xfc\x0c\xfc\x92\xfb\xfa\xfa\xa5\xfa,\xfa\xd1\xf9\xa3\xf9z\xf9\x8a\xf9a\xf9~\xf9\xca\xf9\xf8\xf9U\xfa\x85\xfa\xda\xfaQ\xfb\xa5\xfb+\xfc\x9b\xfc\x0c\xfde\xfd\xae\xfd5\xfe\xa5\xfe\xfc\xfeH\xff\x88\xff\xd5\xff\x11\x00M\x00\x9b\x00\xcb\x00\xee\x00\x1c\x01M\x01u\x01\x80\x01\x88\x01\x87\x01\x88\x01\x94\x01\x9d\x01\x96\x01\x7f\x01[\x015\x01\x13\x01\xed\x00\xbe\x00\x89\x00X\x00(\x00\x03\x00\xdb\xff\xa7\xffv\xffQ\xff-\xff\x15\xff\x03\xff\xf0\xfe\xf8\xfe\xff\xfe\x06\xff\x0e\xff\x1b\xffE\xffv\xff\xae\xff\xd6\xff\xfe\xff-\x00f\x00\xa4\x00\xd3\x00\x03\x011\x01]\x01\x85\x01\xb2\x01\xdc\x01\xf5\x01\x0b\x02"\x02A\x02N\x02E\x02>\x028\x02.\x02*\x02\x1c\x02\t\x02\xe4\x01\xb8\x01\x9b\x01\x81\x01R\x01\x0c\x01\xd3\x00\xa0\x00l\x00@\x00\x06\x00\xc4\xffv\xff2\xff\n\xff\xdd\xfe\xa9\xfei\xfe7\xfe\x17\xfe\xeb\xfd\xc4\xfd\xa7\xfd\x8b\xfdy\xfdc\xfdV\xfdD\xfd\x1f\xfd\x00\xfd\xfd\xfc\n\xfd\x0f\xfd\xf8\xfc\xea\xfc\xf0\xfc\xed\xfc\xe8\xfc\xf6\xfc\x04\xfd\x08\xfd\x14\xfd(\xfdc\xfd\x86\xfd\x98\xfd\xd7\xfd\x1d\xfei\xfe\xb2\xfe\xf1\xfeW\xff\xb7\xff\xff\xffq\x00\xec\x00E\x01\x9c\x01\xfb\x01d\x02\xca\x02\x06\x03I\x03\x9f\x03\xd6\x03\xfd\x03%\x04L\x04m\x04\x84\x04\x8d\x04\x95\x04{\x04`\x04L\x040\x04\x18\x04\xf0\x03\xb9\x03{\x03.\x03\xe6\x02\xa0\x02Q\x02\x01\x02\xaa\x01Z\x01\xfe\x00\x9a\x00<\x00\xdf\xff\x80\xff\x1c\xff\xbf\xfeg\xfe\x11\xfe\xc8\xfd\x93\xfdd\xfd6\xfd\x13\xfd\xfd\xfc\xe9\xfc\xec\xfc\xfb\xfc\x0b\xfd\x1e\xfd?\xfdc\xfd\x88\xfd\xb5\xfd\xe3\xfd\x04\xfe(\xfeL\xfe\x80\xfe\xb3\xfe\xb1\xfe\xd5\xfe\xf2\xfe\xf5\xfe\x11\xff\x15\xff%\xff=\xff+\xff&\xff:\xff:\xff?\xffH\xffT\xffh\xff\x80\xff\x91\xff\xb6\xff\xde\xff\xf5\xff*\x00[\x00\x92\x00\xd5\x00\x11\x01W\x01\xa4\x01\xd7\x01\t\x02P\x02\x80\x02\xa9\x02\xd8\x02\xfd\x02$\x030\x03&\x03+\x03\t\x03\xfb\x02\xca\x02\x8f\x02b\x02.\x02\xe5\x01\x8c\x01K\x01\xeb\x00\xa1\x00C\x00\xd7\xff\xa1\xffb\xff\x01\xff\xdb\xfe\x97\xfef\xfe-\xfe\x10\xfe\xda\xfd\xcc\xfd\xc0\xfd\xa7\xfd\xa6\xfd\xb0\xfd\xb9\xfd\xc2\xfd\xd4\xfd\xb3\xfd\x05\xfe\xd8\xfd-\xfe+\xfeY\xfe\x85\xfep\xfe\xe7\xfe\xb9\xfe\x15\xff)\xff\x90\xffu\xff\x94\xff\x07\x00\xfd\xff@\x00\x8f\x00\xdc\x00\x00\x01\x0b\x01N\x01\x98\x01\x90\x01\xae\x01\xcb\x01\xf1\x01>\x02\x17\x025\x02\x82\x022\x02\x93\x02\xa3\x01<\x02\x03\x01\xf5\x00\x81\x00`\xff<\x05\x02\x059\x00\xf1\xfb\x98\x04\x9f\xfbB\xfck\x04\x06\xfa\x02\x02\x88\xfa\x8c\xff\xd8\xfdt\xf9*\xfe\xad\xfb\x8c\xfd8\xfdt\xfd;\xffZ\xfd\r\xfe\xe9\xff\xfd\xfd\xa9\xff\x18\xff\x83\x00\xfe\xfe\x82\x02\x85\xfec\x01|\x00x\xff\xb6\x02,\xfft\x01\x00\x00\x9b\x00\x93\x00\xff\x00\xf4\xffk\x01\xe1\xff\xe6\x00\t\x00,\x00\x19\x01_\xff(\x01?\x00\xda\x00H\x00\n\x01\x97\x00\'\x01\x07\xff;\x02=\x00\x1c\x00\xc1\x01"\x00\x9a\x01\xae\x00{\x01\x11\x00,\x01\x00\x00\xaf\x01\x0c\x00\x1e\x02\xc6\xff\x1c\x02\xa8\xff\x06\x01T\x00\xa6\xff\x92\x019\xfds\x04\x16\xfc\xe2\x01\xe9\xfe\x0e\xff\x7f\x00\xcc\xfd\xbb\xff\xea\xfd\xf2\xfe\xe4\xfd\x90\xfe\x86\xfe\x98\xfb\xf5\xffA\xfc:\xfd=\xff,\xfbr\xffi\xfc\xa2\xfe\xad\xfd\x8e\xfec\xfe@\xfe\xd6\xff\xa5\xfe\xed\xff\xdd\xffN\xff\x98\x010\xffO\x02~\xff\x04\x01\xfe\x01\xd4\xfe\xac\x032\xff\xe8\x02\xdd\xff\xf8\x01\xd6\x00\xa6\x01%\x00[\x02\x12\x015\xff\x06\x04a\xfe\x9d\x01.\x01\xaf\x00\xba\x00T\x00\x8d\x02\x8c\xff\xd5\x01\xed\xfe\xfe\x01\xb8\xff\xc1\xffZ\x03\xba\xfd\xe4\x03\xf2\xfd\xd5\xff\xcc\x02\xec\xfeK\x00\x85\x01\xfe\xfc\x83\x01\xd1\xff\x1d\xfd\x03\x04\xa5\xfb\x19\x01\x1a\x00\xa1\xfb\xe2\x02\xfd\xfcO\xfd\xe6\x02`\xfbs\x00\xc1\x00\x8e\xfb\x15\x01T\x00$\xfc5\x02\xdb\xfc,\xffT\x02\xab\xfb\xf6\x02.\xfe\xb9\xff\x82\xffU\x00\x08\xff\xca\x00\xf5\xff\xd5\xff\x15\xff\xf3\x01\xdf\xfe\x10\x03\xd8\xff\x90\xfd\xc0\x04V\xfd"\x01M\x03\xb1\xfe\xad\x00\xb0\x03b\xfcO\x04\xf8\xffD\x00\x93\x010\x01,\xff\xc9\x00t\x02\xf0\xfc\xb2\x03\x89\xff2\x003\x01\xe8\xff^\xfe\xd8\x03\xe0\xfc\xde\x01,\x01\x9c\xfc\xcb\x01\xfd\x02[\xfa\xc1\x04\xf1\xfe\x8c\xfcW\x068\xfb\xe6\xfd\xbd\x03(\xff\xc6\xfd:\x06n\xf9U\x01\xb3\x00\xf3\xfd\xbf\xffO\x03\x0e\xfd\x0b\x01\xfe\x01#\xf9\xfe\x04Q\xf9\x9a\x06.\xfc\xc2\x01P\xfdr\x02\x16\xfdC\xff\xf9\x01\xe0\xf8\xf8\x07\xe5\xfa\xa1\x01\xb3\xfe\xbe\x03/\xf9\x17\x07\xe8\xf9h\x00\x88\x06\x19\xf4\x8e\t3\xfe\xa6\xfeS\x00\x96\x03\xb4\xf8\x9d\x05\xb4\x00\xa3\xfa\xa0\n\xa7\xf78\x04\xc7\x01|\xfcx\x04\xbf\xfeE\x01 \xfc\x92\x06\x04\xfcF\x04\xc8\x01\xd9\xfbn\x06\xda\xfa^\x02r\xff\x1d\x03x\xfd\x8b\x03t\xfe\xd6\xffO\x02=\xfaI\x04\xa2\x00\x82\xf7\x88\x07\x12\xfb\x18\x03>\xfe\xd6\xfcF\x03\xff\xf8\x7f\x08\xe6\xf6\xa7\x05\x0f\xfbH\x04(\xfb&\xffw\x07)\xf5n\x08\'\xfaX\x04\xc9\xf9t\x03R\x01\xa9\xfc\x9e\x03\xe1\xfd\x07\x02\x18\xfb\x06\x04c\x00\x88\xfdW\xff\xd3\x03v\xfb\xff\x03\xf4\xfd#\xfd5\x04\xb4\xf9\xc2\x03\xd3\xfd?\xff\x9f\x00\xbb\x01\x8b\xfd\x1d\xfd\x8e\x03K\xfb\xe9\x02\xf3\xfeG\x00\xf3\xfc\x91\x06[\xfa\x87\xfe\x7f\x05\xf7\xf7\x91\x04k\xfe\xec\x01\x10\x00\xe4\xfds\x02\xb8\x02\xe1\xf7L\x07\x98\xfd\xc0\x01\x98\x01l\xfdQ\x04A\xfb\xbf\x05\x9a\xfc\xf7\x03\xae\xf9\x91\x07\xa9\xfd\x8b\xffn\x00P\xfeX\x00\x94\xfeH\x05\xd5\xfc\xa9\x00\x19\xfc\x8a\x03\xcb\xfb8\x00E\x03o\xfa@\x03\x9e\xffM\xfdr\x04\xa4\xf7J\x02\x88\x03g\xfa\\\x02\x8c\x00T\xfc-\x01\xab\x01]\xfd\xe2\x00(\xffX\xfcr\x06\x9d\xfc!\xfe\x7f\x08"\xf8\x1d\x05,\x00\xd6\xfe\xcd\xff\xd3\x01\x17\x01\x06\xff\x15\x07\xcf\xf7c\x05j\x02D\xf9b\x06d\xfe\xa6\xfeC\x05\xc6\xf8\xb5\x03\xa1\x04\xac\xf7\xa7\x05\xe9\xfd\xbb\xfd\xa8\x00\xaa\x00\x81\xfed\xff\xb8\x00\x9f\xfc;\x06D\xf8\xb5\x05\xc3\xfe\xa1\xfd!\x03\xc0\xfe\xcc\xff\x00\x00\xe3\x00\xca\xff\xfd\x00\x80\xfee\x02\x15\x00\xd3\xfd\x07\x00\x1c\x05a\xf8\xb4\x04l\xff\xd1\xfaB\x08\x15\xf9U\x02\xa9\x00n\xfc\xf3\x04\xa9\xfa\x0e\x04\xca\xfa|\x03\x84\xfeL\xfdz\x06+\xf8\xe8\t\x0f\xf6\x0b\x01\xaf\x05X\xfa\x87\x03\x08\x03\x18\xf9\xa2\x05\xba\x01\xc3\xf8\xb2\x06\xe2\x00\xf4\xfda\x02\x06\xff\x81\xfd\x1a\x05f\xfdu\x038\xfd"\x01\xc5\xffu\xff\xe0\x01\x89\xfb\x1a\x04B\x00\xe9\xfd\x06\x01t\x05J\xf9\xd6\x00\xb9\x01\xb1\x00z\xfe\x07\x00\x1d\x07\x98\xf5\x1b\t\x85\xf9\xbb\x00\x13\x04\x18\xfb\xb9\xff!\x03l\x01Q\xf8m\nl\xf7\x08\x00r\x03\xcd\xfd\r\xff\x97\x02&\xff\x15\xfd\x13\x03\xec\xfe\x03\xff\xc4\xfev\x030\xfd\xfa\xfa\xa9\x06b\xff=\x01@\xff\x17\xf9\xe4\x07\xcc\xfb\xc5\x00\xc2\x02\xa4\xfdM\xfd\x1c\x04\x03\xfe\x14\x00T\x03\xd6\xf7\x0b\x07\x83\x01K\xf7\xa0\t\xf1\xf9\xbe\x00\xd0\x04+\xfa\x8f\x03=\xfb\xd6\x053\xf9\x06\x07\xd7\xfbr\x00\x01\x042\xf7\n\x07\xb9\xfbf\x01\xa5\x05\xd2\xf8\xcd\x02\x8f\x01\xc8\xf8\xe4\x07\xd7\xfb^\xff\xe4\x03\xa5\xfci\x00\x93\xff\xb9\xff\x01\xffi\x01\xfe\x00\xb3\xfdV\x02F\xfc1\x03\x97\x00\xfc\xf9%\x04\x82\xff\xf6\xfd\x15\x00\x8f\x05\x9f\xffL\xf6\xe1\x080\xfb\xd1\xfd\xb8\x07\xb5\xf8\xec\x06\x9f\xfb\xb9\xfd\xf6\x03Y\x00w\xfc\x95\x03-\xfe\x99\xfb\xcc\n\x9e\xf4\xbd\x04\xb7\x00\x04\xfc\xdd\x07Q\xfa \xfc\xfe\t(\xfa{\xfeX\xff\x87\x06\x8f\xfc\x19\xfc\xb8\x0f\xab\xef!\x06U\xfe\xed\x00Y\x04c\xf9#\x08\x0c\xfd\xc4\xfeR\xfe\xe2\x04\xe5\xf9E\x07u\xfa\x8b\x01\xfd\x025\xf9\xcc\nh\xf1\xdf\n\x17\x00\x02\xf7~\t\'\xf9\x1c\x02\xd9\x02\xa2\xfdW\xfe\xad\x00\xcf\x02a\xfa}\x06c\xf8\xa6\x00\xc9\n\xc9\xf6\x8e\x01\xa4\xff\x97\x00T\xfd(\x00\x17\x04\x19\xfc*\x03\x1e\xfc\xdc\x04\xc3\xfc\x9b\xfb\xf8\x08\xc7\xfeo\xf6\x16\x0b\x01\xf9Q\xfe\xf9\x0b8\xf5\xf2\x03g\x00@\xff,\xfb\x97\x0b\xa5\xf3\xc5\x03\xf8\x05w\xf7t\x056\x00E\x04`\xf4\x98\x06\xda\xfeZ\xfbA\nf\xf8[\x03\xde\xffe\x00 \x03\xf5\xf2\x1a\x0e\x96\xff\xb5\xf5\x8c\n\x00\x02t\xf4\x14\x0b\x9a\xfb!\xfce\x07\xc7\xfcu\xfdZ\x07@\xfc\x02\xfb\x8c\t\r\xf5\x9a\x08\x1e\xfb\x9b\x01\x8a\x06\x16\xf7k\x00p\x08\xde\xf6\x8e\xfe%\x0b7\xf4L\x03\xf8\x06\x9c\xf7\x17\x01\xf2\x02\x82\xfb\xfc\x01G\xff\'\xfe\x97\x02\x1b\xfcB\x07D\xfef\xf7h\x07\xf2\xfd\xe0\xf6/\x05b\x08B\xf6\xb5\x07\x95\xfa4\xff\'\x04f\xfc\xe2\xfc\xee\x06\xdf\x03\x8a\xf4\xcd\x0c\x11\xf6?\t\x14\xfd@\xf6e\x14\x92\xed\x84\x06\xda\x03\xb7\xf8\xbb\x03V\x00&\x02\xf3\xf3\x10\x10\xc6\xf3z\x02V\x08n\xf4m\n\x82\xfb:\xf7\x88\t\xcb\xf9}\x04U\x032\xf4\xc7\x0c\xc1\xf7\xdd\xff4\x03u\xff\x85\xfeo\x00\x86\x00\xc9\x02[\xfc\x9d\x00m\x02.\xff\x89\xfb\xc0\x08\x00\xf8\xe0\x00l\x04\x90\xfa1\x06I\xfc\x81\x06\xf5\xef*\r\xbd\xfc\xca\xfc\xba\x00\x0b\x01\xc7\xff+\x01\xb6\x03+\xf1R\x0f\xb6\xf5\xc6\x00*\n\xb8\xf2\xfb\x0b\xe7\xfb"\xf8`\x0eY\xf1\x9d\x060\xff\xc1\x008\x02?\xf8T\x08\xb9\xf4\xe1\x0f\x0b\xf5l\xfb;\n7\xfa\x94\x012\xfc_\ns\xf4#\xfc\\\x13\xdf\xf2\xe1\x01x\x02o\xfbw\x00\xca\x00^\x01w\x02\xba\x00T\xfa~\x06\x84\xf7\xa2\x02+\n\x07\xf3\x81\xff\xb9\x10\xf1\xee\r\x05\xfe\x07w\xf4\x83\x07\x81\xffa\xfb\x00\x03\xf9\x03\xd8\xf6\xd9\x0c\x1e\xf9\xd5\xfc\xb0\x03\x97\x00\x19\xfd\xa3\xff=\x05\x0e\xfaS\x06\n\xf9@\x03\x8b\xfeu\x01\x80\xfd\xde\xffM\x05C\xf9L\x05\xa6\xfc~\xff\x8d\x01\xc4\xfb\xab\x057\x00a\xf9\xd9\xfeB\x06\xa5\x02\xf7\xfam\xfd\xd1\x01\\\x01\x17\x01\xef\xf96\x06\x17\xfe>\xfb)\x07=\xfdl\x03\x11\xfe \xfb\xec\xfd\xce\x0c\x86\xf9C\xf6\xa7\x0f\x11\xffG\xf1x\r\xc8\xfeT\xf8k\x06\x0b\xfb\xd7\x01T\x05\x8e\xf7\xc0\x06x\x06\xf7\xea\x10\x10Z\x00\xad\xf3,\n[\xfeo\xfeT\x02V\xff\x9c\xf9\x91\x10!\xf5z\xf6\xee\x13\xef\xf5M\xfc\xb4\x06\xf6\x01=\xf7C\x0bO\xf5\xa3\x08\\\xfd|\xfa\xe3\x0bn\xf3t\r\x9d\xf0j\x08L\x02\xd1\xf9\xa0\x022\x02\xd6\xfa6\x05L\xff]\xfam\x06\xc3\xf6\xb0\x08}\xfe\xe3\xfep\xfb{\x11W\xeb\xa7\x00\xed\x0b5\xf7\x16\x08\xb9\xf8y\x08\xfd\xf9\x88\xfe\x1f\x07\xb0\xf9\xbc\x01\xde\x06\x17\xfci\xfa\xeb\xff\xfc\x06\xfa\xfa\x1e\x01\x17\x07\xe6\xf5\xf1\x00\xa5\x04z\xf9\'\xfe\xdf\x01\xb9\x04\x8e\xf8\n\x07>\x06\xe5\xf6\x89\x04\xa9\xf2\xbf\x08\xd5\x08\xff\xef]\x11a\x00\x15\xf6\n\x01\x17\x01\xc0\x04]\xf5\xf8\x05\xf5\x04\xd1\xfa\x9f\x02\x87\xff/\xfee\x01\xc2\xfa)\x03O\x05\xdb\xf6E\x01w\n\xc9\xf4n\x03\x91\n"\xeb7\x07\xa0\x0c\x99\xe9l\x07A\x14#\xeb\xd5\x04\xc2\x05\xa3\xf1\x05\tP\x08\xaa\xf1j\x03|\x06c\xf8F\x00\xf7\tD\xf4\xc2\x01\xe5\x08\x1b\xf5\xb4\x04S\x06\xb3\xf7 \xfe,\x0c8\xf82\xfco\x04\x80\xfcv\x03\xfb\x04v\xf4u\x03\xad\x0e2\xe9\xb5\x02\xbb\x11!\xf2\x90\x05O\xff\x00\xfd|\x00\x02\x06,\xf2\xe0\x0bb\x00|\xf1\xb2\x16\xe2\xed\xf5\x03\x0b\x02\xd0\xfdg\xfb\xfc\x0b\n\xfaK\xf8M\x0f\xe7\xec\x1a\x12\xa5\xf4M\xff\xbb\x02\xe0\xfb\x08\x0b\xae\xf4M\x108\xec\xa4\x08\xde\x00\r\xf3<\x17`\xee\xf0\x03=\x07\xd4\xf87\xfe/\x07`\xfa\xe1\xff\xa7\x06\xc5\xef\xd8\x11\xbe\xfc\x1d\xef\xcf\x16\xe2\xf1\x08\xfdQ\x15\x93\xe5\xa7\r}\xfe\x99\x00r\xf8\x19\x03\xd9\t2\xfbX\xfd\xb5\xfa\x8e\x16\xe7\xde{\x10o\r{\xed\t\x02\xa8\x0c\xdf\xf4\xc6\x02-\x03\x9d\xf31\x15.\xedG\x04\xc3\rv\xeb\x85\n\xac\x03\xee\xf3]\r\x99\xf7\xcb\x01(\xfe\x99\xffH\x03\xcd\x04t\xf2\xed\n4\xfd\x88\xfaw\x0c\xda\xe9U\x159\xf6_\nN\xf4\x9d\x01(\x05\xdd\xf7A\x04\x9a\xfa3\x0eH\xf2q\x00T\x0c\x17\xf6[\xf6P\x15\xbc\xf8\x83\xef=\x16r\xef\x17\x04r\x0b\x02\xf7\xd5\x01{\xf8d\x11n\xee\xc7\x04\x1c\x04\xca\xfb\x80\x01\xa5\xfe\x0b\t\xfe\xf7\xbb\x03\x90\xff"\xf6\x95\x07t\xfd-\x046\x01\x9d\xf9\x85\x0b\r\xf6u\x02\xab\xf9\xf9\x0bP\xfc\x8e\xf6\xe6\x15\xba\xef]\xfd\xe7\x10\x8d\xed\xdd\x01Q\x0b\xf3\xfe\x97\xeeW\x19u\xf2\xf7\xf9\xf9\x11\x06\xe7g\x12\x11\xff1\xf9y\x04\xdb\x06\x12\xf0z\x10Q\xf5\xf8\xfd\xee\n\xee\xf4\xcb\x04\xd5\x05&\xfc\x19\xfbM\x07\xbe\xf60\x0b\xd8\xf3\x1f\x02\xf6\x0b\xaf\xf2t\x08\xcf\x02\xd7\xf6\x16\xfc\xf9\r\x0e\xec\x08\x02\x1c\x19\xdb\xe6C\x0bD\x00\xa9\xf8\x8e\x04f\x03\xc9\xf36\x0b\x17\x04\xa3\xf6w\n\xb8\xf6\x8b\x0b\'\xf3\xda\x02,\x10\x9e\xe8\x80\x0cl\xff\xb1\xf5Z\r\r\xf6.\n\xc0\xf1\x02\x02\x90\x06\x12\xfc\xc0\x02\t\xff\x12\x043\xfav\xf9N\x06\xdd\xff\x9b\xfeY\x06\x1c\xf4\xda\x08:\xfc\xac\x00T\x04\x7f\xf7\xa3\x06_\xffQ\xfc\xe8\x08\xb5\xf7\x1b\x03\x0b\x02{\xfb\xdc\x07p\xff\x83\xf44\t\xfe\xfb4\x01I\x05\xbc\xfc\x9f\x07\xfc\xecS\x0bs\x02\x89\xfa\xcd\xf7_\x12-\xef\xf8\x0c@\x03\xbb\xe7\xf3\x18n\xef\x9f\x017\x03F\n\xf9\xf1\xe7\x05\xf9\x04\xed\xf2F\t\x98\xf8\xde\x04\xa4\x01\x06\xfa\xeb\x02\x80\xff2\x00u\x05I\xf8\x95\xfbU\x07\xd6\xfc\xa9\xf9\xbc\x12\xb2\xf2\xd6\xf5\x0e\x0fT\x02x\xfd\xfd\xf8\x1a\x08\xaa\xf6\x0e\x03\xb9\x01\xfc\xfdG\x0f\xa8\xf1\x0f\x02j\x01\x0f\xfe\xd7\xfd\xb2\x04\x99\x02\x95\xf2\x08\x11G\xfa\xa7\xfdQ\x01\x7f\xfe\xb3\x04*\xf8<\x06\xa8\xfb;\x07d\xf8\x97\x06P\xfd\x84\xfc\x8e\x06I\xf5\xa8\x0c^\xf4\xc1\x07\x07\xfe\xfc\x00\xb5\xfe\x87\xff\xa8\xfe\xed\xfb\xee\x0e;\xf1V\x05J\x00\xb5\xfa\x08\n!\xf7\x1d\xfd\xd1\x0e\xf8\xf5\xb3\xfe\xad\xfc\xe6\x06\xa3\xfe\xa7\xfb1\n9\xf5\\\x03R\x03\x1e\xf9\xa0\x07\xf1\xfeP\xf4\xdc\x07\xf0\x06}\xfc\xf2\xf9\xac\x08v\xf9\xed\xfe\xca\x02|\xfd\x07\t\xe2\xf1\x84\x05\x11\ns\xf3>\t\xda\xf4W\x05\'\x01s\xf7\xe5\x0e\x12\xf9\x10\xfe!\xffn\n\xbb\xf1\x95\x08|\x01Q\xf6Z\t\x03\xf73\n]\x02\x97\xf0\x1d\x0e\xaa\x02,\xf15\x0fT\xf5\x89\x01\xf5\x06\x11\xf77\x03Q\x0bP\xf3\xb0\x02E\x00\xdd\x01;\xfes\xfaY\x06\xcb\x02\x9f\xfaT\xfa\x85\x10\x06\xed\x12\n\xca\xfd\xb0\xfb\xc0\t\xf0\xf5+\x017\x03o\xfa\x80\x07\xff\xf9\r\x02\xa0\t\xdc\xeaD\x11w\xf3/\x07V\x03\x19\x01\x80\xf6\x02\x08\xd2\x06\xb2\xe9\x90\x1b\x92\xf3\xd2\xfb\xcd\x10\xbc\xee\x93\xfe^\x11R\xf4\xa7\xfe\xdd\n\xe6\xf2r\xff.\r\x7f\xee\xe7\x04\x1c\x01d\xf5\xb0\n\x08\x014\xff\xf4\xf7\xb9\r\xbb\xf5P\xfb\xab\r\x81\xfd\xc5\xfc\xcd\x06\xe1\xfc(\xff\xbc\x08A\xef\xaf\tW\x07\x0e\xf3x\x06\x12\x01\x99\xfeN\xfb\r\x04\xab\xfb!\xfd\x81\x0b?\xf8b\xfd%\x08\xff\xfd\xb2\xf5\xb1\t0\xfd\x81\xf0X\x11\xac\xfc\x9a\xf99\x10~\xf2\xe5\xfc\xa1\x07^\xfe,\xfe\xa4\x06\x80\x00\xd1\xf4\x07\rR\xfc\xd1\xfb\\\x04N\xfep\xfe\xce\x04\x1f\xff\x17\xfc\xf0\x020\x03\x0f\xfe3\xfc\x86\x03\x10\xfc\xc9\x03v\xfd\xda\x01\x16\x04e\xf8z\t\x07\xf8\x8c\x00[\x01\xc0\xfa\xd1\x03\x07\x03\xea\x00\xe4\x02\x07\x01\xb7\xf2\xd2\xfe\x81\x08\xa9\xff\xa3\xf8\x05\nz\xfe\xaa\xf6s\x07\xd3\x01\xb6\xf5\xd5\x05\x12\xfd\x9e\xf4\x16\x10\x13\xff}\xfd\xe5\x04%\xf7F\x00\x9a\xff\x02\x00%\x06|\xfaI\x01\x92\x06/\xf9\xdd\x056\xff\xf2\xf3\xc0\n\x9c\xfch\xfe\x11\t\xe8\xfd\xc9\xf8\xa2\x00^\x06\xa0\xf7\x18\x05\xf9\x01R\xf78\x02\x1f\x03\xdd\x01\r\xfb\xc8\x03\x83\xfc\xd2\xfb\x13\t\x08\xf9\x8b\x02T\x00\x80\xfa,\x05\xe0\xff\xd7\x01\xee\xff\xe8\xfe\xf4\xfaY\x00.\x02J\x017\x00\xe4\x00A\x00,\xfc\x12\xfe\x9e\x03o\xfd\xff\xfe\x98\xff\x12\x05\xd2\x04\xc4\xf9\x12\x00w\xfd\xdd\x037\xfc\xc7\xfd\xe3\x04^\x07\x13\xfa\xdd\xfe?\x04\x0c\xfb)\x02\xa1\x00?\xfc\x87\xfc\xd7\x03\xd6\xfd\x1d\x05\xeb\xfbs\xfe\x10\x00\xb2\xf8n\x02n\xfe\xa8\x01\x16\xfe\xf4\x00\x98\x02~\xfe\xe3\xfc4\x03\xb0\x00?\xfe\xb8\x02A\x00T\x00i\x04\xd7\x00\x17\xfd\x0c\x00@\x03\x00\xfd\xb7\x00\xc4\x02#\xfcQ\xfe\x16\xfd)\x01Y\xfeW\x00\x14\xff8\xff\xf7\xfcN\x02\xe2\x02e\xfb2\x04V\xff\xca\x02:\x03\x16\x03\x93\x02l\xfd\xb6\xff\xaf\x01n\x03\xf0\x02\xd1\xffx\xfe\xe7\xfc \x00\x94\x02\xe6\xfa\xfa\xfe\xaf\xfd(\xfd\x9b\xff\x03\xfe\x91\xfd\n\xfd8\x00\xe5\x00b\xff-\x02\x96\xfe\xce\xf9]\x04\xa1\x02\x84\x00\x04\x06`\x02\xf7\xfeF\x01\x85\x00\x06\x00\x10\x03.\x03;\xfe!\x00\x8d\x020\x00\xdf\xfe\xde\xfd\xcb\xff\x08\xff"\xfc\x19\x01J\x01Q\xfd\x11\x00l\xffk\xfd!\xff\xf6\x00\xec\xfe\xb6\xff\xda\x04\xf0\xff|\xfe*\x02\xf2\x00\xcb\xff\x17\x03b\x01\xdc\x01\xb8\x00\xed\xff\xce\x004\x00\xc0\x01\xd1\x005\x01\xab\xfe\xb0\x00\xae\xff\x98\xfe\x88\x00\xa7\xfeJ\xff\xd5\x01\x99\x00\x86\x00\x97\x00\r\xfcF\xfd\xfe\xff\xb4\x00\xff\x01]\x02|\xff\xcd\xfe\x87\x00\xfb\xff\xa3\xfeW\xfe\xcf\xff\xc8\x01\xd3\x01\xbc\x02\xe5\xff:\xfe8\xfdc\xfc.\x00\xfc\xff\x02\x00\x8d\x00\x1c\xff\xd9\xff\x1e\x00r\xfd\x8b\xfc\xf1\xfc\xf5\xfc8\x00\x1c\x02\xfe\x00\xfc\xfd\xf4\xfd^\xfd\xf2\xfb\xc3\xfd\x9c\xfe+\xfe\x00\xffW\xff\xcc\xff\x8b\xfd\x94\xfc\x93\xfb\xc6\xfb\xf1\xfco\xfdM\x01\xef\xff\xfa\xfc\xe3\xfe\xee\xfd$\xfdO\xfe\xf0\xfc\xec\xfc\x13\xfd1\xffU\xff\xf5\xfe\x0c\xfe\x7f\xfd\x14\xfd@\xfc\xf6\xfa\xfe\xfb+\xffB\x02{\x07\x1f\x0b\xe1\nB\x08n\n\xdc\x0c\x03\x11c\x12\x19\x14\xd9\x17\x9d\x18\xef\x19f\x19\x9c\x16\xc7\x11\x9e\r\x11\x0b\x95\x0c\xb3\r\xb7\t\xc4\x049\x01\x99\xfb\xf8\xf6\xb5\xf4X\xf1w\xeeN\xeb.\xed\x86\xed_\xee\x11\xee\x93\xea%\xeb\xe0\xea\xec\xed\x16\xf2Z\xf7\xed\xfaA\xfd*\xff\xc6\x025\x06\xa9\x05\xf3\x07\xe4\x06\xa0\tb\x0c\xa7\x0c\x90\x0c\x16\x07\x07\x05m\x01\xf8\xfe+\xfek\xfax\xf6\x0f\xf4/\xf3\xd6\xf1\x0f\xf1\xbc\xed\xa0\xeaU\xea\x07\xea|\xed\xdd\xefM\xf0\x84\xf0x\xf1;\xf3\x7f\xf5\xde\xf8\xe1\xf7\xa3\xf7\x96\xfbd\xfe\x03\x00\xda\x02q\x01F\xfeK\x00`\x00\x06\x02M\x03\xbd\xff:\xfe\x0f\xff\x05\x00\x11\xff\xbd\xfbS\xf9)\xf8\xcd\xfaG\xfdR\xfc\x9d\xfb\xfd\xf8\r\xf9\xbb\xffc\x04\xe2\x048\x03b\x03\xf1\xff\xa3\x016\nY\x15\xd8#\xda(\xd9*\xc8+\xe2,\xad.I+\xac)Q-24\xa68~3\xd2(\xf7\x1b\x8d\x0f\xb3\x08(\x03(\xfe4\xf8E\xf3\xe9\xf1\xe1\xf03\xec\xf2\xe1J\xd7G\xd4\xab\xd6x\xddb\xe5~\xe9\xc9\xeb\xb9\xed]\xf0t\xf3\xd4\xf7\xbd\xf8\x9d\xfb\x9e\x02\xc1\n\xb3\x12\x11\x15\xf0\x11_\x0cc\x07F\x05\xe1\x04\x9e\x03\xbf\x00\x1d\xfe\xb8\xfa\xc2\xf7\xcc\xf4%\xed\x10\xe6\xef\xdf\xee\xdd\xa2\xe0\xe3\xe3\x18\xe6W\xe7\xa8\xe7"\xe9]\xec\x8b\xf0\r\xf5\xf3\xf7\x16\xfd\x82\x04\x0b\rR\x14\x0e\x17f\x16\xc7\x15\x15\x16\x99\x17\xc9\x19C\x19h\x15\xbc\x12\x91\x0f\xed\x0b0\x07\x18\x01\x19\xfb\xed\xf6\x9d\xf6\xd5\xf5R\xf5R\xf3\x0e\xf1+\xef\x9c\xf0\xf4\xf3\x10\xf5}\xf6\xd9\xf8\xf5\xfa\xb7\xfez\x02\x1c\x01\xe0\x00\x99\x00\xa4\x00\x9f\x04\x96\x05\x18\x03S\x01\xab\xfd\xa7\xfc\x86\xfe\xa9\xfa\xfb\xf6\xb1\xf3\xaa\xf1 \xf2M\xf3\xb1\xf1\xff\xee\x06\xeeN\xeb\xdf\xed;\xf2\xea\xf3~\xf6p\xf4U\xf7\xc1\xfb\xde\xff\x19\x03\xae\xff#\x04+\t\xc3\x16\x15-\xb76\xdd8\x1f1\xa1+\xa71p7\x8c6\xd0344\xd73p/\xdb$\xeb\x16\xc9\x04\x9c\xf4\xd0\xed\n\xef\xcc\xf1\x01\xee\xb2\xe5\x8f\xde\xde\xdb\xeb\xd9\xe6\xd7h\xd7Q\xdaC\xe1#\xed"\xfaS\x02\xcf\x03\xc8\x00Y\x00\x04\x04\x11\x0c*\x14\x8a\x18\\\x1a\x0b\x1b\x94\x18y\x13&\x0c^\x00\xec\xf7\xe5\xf4\xb6\xf3"\xf4\xc9\xf0@\xe9\x7f\xdf\xe7\xd6\xde\xd4.\xd3\x80\xd3\xac\xd6>\xdc\xdd\xe4\x1a\xebb\xef#\xf1m\xf1r\xf5\x07\xfd0\x08\x7f\x12,\x18\x06\x1ai\x1a\xd1\x1af\x1a\x8e\x18@\x14\xf9\x11\x00\x13&\x16:\x14\xae\x0c\xe1\x02q\xfa\xb3\xf6s\xf5\x84\xf65\xf5\xe1\xf4M\xf4\x0f\xf4\x0e\xf8\x1c\xf7\xce\xf5\xd6\xf7\xb8\xf9\xb8\x02\xaa\tl\x0c\r\rU\n\xc5\t\xc9\tP\n\x87\x08\x98\x05\xc6\x03\x0c\x02\x17\x01\xe2\xfdj\xf6\xf5\xee.\xeaD\xe7[\xe7h\xe6\xd1\xe4\xa9\xe4\x15\xe4\xee\xe6\x9f\xe81\xe6Q\xe6\x13\xe7\x7f\xec\xb5\xf5\x8b\xfa;\xfc\xe9\xffA\x03\x9f\x06\x94\x0bE\x08\xc7\x05\x18\n;\x0f\xb7\x16\x1c\x199\x19\xe7\x1e\xdb+\x025\x193r*\xeb"\xd6!^#\xbe\'\xbd,R,\xc9"\xe6\x15c\r\xa8\x07\x9c\xfe\xa3\xf3\xb0\xef\xeb\xf12\xf6\xe4\xf7\xc6\xf6\xff\xf1\xb7\xe9\x19\xe3\xcc\xe4\x97\xee\\\xf8\x1b\xfe)\x03H\x07\xb9\x08\x05\x07\xd4\x03\xdc\x01\xb7\x01\xc8\x04\x07\nw\x0f\x8c\x10\xb5\nS\xffM\xf4\x9b\xef\xe1\xeb~\xea\xcf\xea\x1d\xeb\x06\xed\x87\xea\xb7\xe7\xb3\xe3\xb7\xde\xa2\xddN\xe0\xc5\xe8\xce\xf2\xc6\xfaS\xfe\x0f\x00`\x00&\x00l\x00\x95\x02.\x06\x00\x0b\x00\x11\xcc\x12O\x13j\x0e\x92\x06\xea\x01\xdb\xffK\x01\xbf\x02\xd3\x04\x8f\x04s\x02c\x00\x82\xfc\xf0\xfa\xfc\xf95\xfa\x13\xfd\x86\x00y\x05.\x08{\x07\x14\x06\xcd\x04V\x05\x89\x08f\n\x05\rA\x0e\x83\x0e.\x0e\x1d\x0c(\x08\x8d\x03\x10\xff)\xfcS\xfc\x89\xfb\x7f\xf8Y\xf3~\xee\xeb\xea\x8d\xe8X\xe7\xf1\xe4n\xe4W\xe7\x7f\xe9\x06\xed\x05\xef\xf1\xed~\xefF\xf0\xe2\xf2T\xf8\xa7\xfa\xf2\xfd7\x01|\x03T\x05\xb6\x03\x91\x01\xbf\xfe\xc4\xfe\xd0\xfek\x02/\x04H\x03\x84\x02\x00\xfe\xea\xfaV\xf5f\xf55\xffL\x0b:\x19\x86&\xd30\x003\x86+\xa1\x1e-\x1a\xfd#81\xb6:s:t2\xf6%;\x17\xe2\x08.\xfd\x88\xf3U\xed\xa6\xed\x1c\xf2)\xfa\xc9\xf7\x14\xec\x07\xe0J\xd9\xde\xda\xb6\xe1\x05\xec=\xf7\x11\x01\xf0\x07\x10\n\xb7\tE\x07\x05\x02\xb6\x00\xf6\x04\x87\x0c\xf5\x13q\x15\xeb\x10\xee\x07g\xfc\xd8\xf3\xe1\xedL\xe7M\xe5\xbe\xe6\xa1\xe7m\xea)\xe9\xe7\xe3U\xdd\xed\xd8\xe1\xda\xa5\xe1m\xea\xf7\xf1\x7f\xf9\x18\xff\xd4\x02I\x04=\x04%\x03\x11\x05 \t\xa6\x0e\x00\x14\xc0\x14\x0c\x12\xbf\r\x02\n\x0b\x04\xda\xfe:\xfc\x1e\xfft\x05\xfb\x07i\x06\r\x01\xf5\xfcn\xfb\xe9\xfa\xfb\xfc<\x01*\x05\xf3\x07r\n\xec\t\xf9\x065\x03{\x00\xf5\x02\x8e\x06g\n\xf7\n\xf6\t\xcb\x08L\x05\xde\x02\x0b\xffB\xfc\xbb\xfc\xa4\xfc\x9f\xfd\x1f\xfd\xcf\xf8\xcc\xf2\xf8\xee\x9f\xec{\xeb\x8f\xecp\xec\xbe\xec{\xee\x11\xf0\xee\xf0\x8f\xf0r\xf0\x15\xf1\xd4\xf3\xfd\xf7\x98\xfb\x0b\xfd\xea\xfc\x98\xfc!\xfd\xe6\xfdi\xfd}\xfb\xa0\xf9>\xf8\x9b\xf9-\xf9\xd4\xf6\xfa\xf3\x00\xf4\xb8\xf9\xc3\xfb\x19\xfd\xf3\xfb\xcf\xf9k\xfeu\x03Q\x0c4\x1c\xcb+\x954\x817\xd6638\x179\xc16\xe16\x819\x819\x8c5\xb9.\xe4$\xfb\x17P\x06\xad\xf8\xdf\xf2O\xefZ\xed\x95\xea\n\xe9)\xe7\x96\xe2\x98\xdd\xef\xdcy\xdf\x00\xe4\xd2\xeby\xf5\xbd\xfe\xad\x04a\x05\xfc\x04\xc1\x04\x06\x05=\x07H\n\xef\x0e\xb9\x10\xb3\x0e\xde\x08\x94\x01.\xfbo\xf3\xaa\xedW\xeb\xae\xebR\xeb\x03\xe9@\xe6\xba\xe2\xfd\xde\xf9\xdb\x9f\xdc\x0b\xe1\xe1\xe5\xd5\xeb\xca\xf0~\xf4\\\xf7t\xf8\xbc\xf9\xc3\xfc\xc4\x00f\x06\xaa\x0b#\x0e\xfb\x0f)\r\xd5\tz\tg\t\x97\nw\n\xd2\x07\xfa\x06\xb4\x07\xbb\x07\xf8\x07\x8f\x05i\x02\x02\x03\x0b\x05l\x07s\t\xf4\x07j\x07\x14\x08o\x08\x86\x08\xde\x06T\x05\x81\x05\xad\x05\xec\x06e\t\x93\tC\x06\xff\x03x\x02\x1e\x02\x03\x014\xfe/\xfd\xf2\xfd\xdd\xfc\xb5\xf9\xed\xf5s\xf0\\\xec\xd3\xeaH\xe9M\xe9\xf3\xe9%\xe8\x86\xe9\xd9\xe9\x9f\xe8Y\xe9\xfa\xe7\xb3\xe9\x1e\xef\xa3\xf2\xb9\xf6\xbf\xf9\xad\xf9\x14\xfa\xe8\xf8.\xfa\x87\xfc,\xfe\x0e\x00\xfc\x01R\x01\x08\x01j\x00\x18\xfd:\xfeu\xff\x8b\x01\xfa\x05\xba\x06\x02\x06\xa4\x06\x15\x07]\tS\x0bK\x0c\xe0\x0f+\x14\x80\x17]\x1b\x10!\x8f(\xda0\x8401*o(\xcc*\xf2,I+\xdf\'l&Z"f\x1a\x90\x12w\x0by\x04S\xfb\xea\xf52\xf6\xbc\xf6\xb8\xf3p\xecG\xe82\xe7\xd5\xe5\xba\xe5u\xe7\xe3\xe9\xf2\xec\x9b\xee\xbe\xf1\xca\xf5\xec\xf5t\xf4\x06\xf5\xfe\xf7\xff\xfbI\xfe\x9d\xfe\xd0\xfd\t\xfcI\xfaA\xf7\xaf\xf4\xde\xf3\xf6\xf1f\xef\xfe\xee\xa0\xef.\xeeN\xeb>\xe9\xd2\xe8\xb8\xe8Q\xea\x0b\xed\x15\xf0\xd5\xf2c\xf4\xe6\xf6\x1e\xf9\x1b\xfb|\xfd\xef\xffD\x035\x07\xcb\nD\x0e5\x0f\x90\x0f\xaf\x10@\x10}\x104\x11\xd3\x10\xbf\x10m\x11\xb4\x13\x05\x15\\\x10Q\x08_\x04\xc2\x03}\x04c\x050\x03\xcd\x00(\xffD\xfd\xa3\xfc\x9a\xfb\xb7\xf83\xf8\xd0\xf9[\xfd\x93\x01{\x03\xf4\x00A\xfd\xfc\xfb\xf9\xfb\x9d\xfcv\xfc#\xfcZ\xfc\xcf\xfb\xc7\xfa \xf87\xf4\'\xf0>\xee(\xee\x18\xef\xc7\xf0\x1a\xf1\xa0\xf0-\xf0R\xef\xb5\xefK\xf1\xb5\xf2Z\xf5\x00\xf8\xc2\xfb\xf1\xfeB\xfe\x15\xfd(\xfd\xab\xfd\xa4\xff\xc8\x01Z\x02\x95\x05.\x07\r\x06\x8e\x07\x9c\x06E\x04\xc4\x04T\x05C\to\x0b\x06\n\xc4\n\xe5\t\xfc\x07\xd4\x05\x06\x05\xec\x06*\x08\xab\n\x8b\r\x11\x0e\x0b\r\x91\x0cl\x0f)\x14\x14\x18\xa4\x1ac\x1f\x96%\xf2&\x9f%A$\xcb"\xdc"\x0b!\x91\x1f^\x1f2\x1a\xbc\x13\x87\x0e\xbb\x08a\x02\xdf\xfb\xae\xf5\xe6\xf1\x13\xef\xda\xeb\x1a\xe9\xbe\xe5\xb3\xe1G\xdf\x96\xde\xae\xdf\xb5\xe1\xfb\xe2\x11\xe4\xee\xe5v\xe8\x1f\xeb=\xed\xb0\xefM\xf2\t\xf5O\xf8n\xfba\xfe&\xff\xa7\xfe\xa4\xfe\xa8\xffg\x00p\xff\xc5\xfe\xed\xfe\x1c\xfe\x0f\xfcd\xfa\x10\xf9\'\xf7 \xf5\xcd\xf4\xe1\xf5\x9a\xf6p\xf6\xbc\xf6\xca\xf7\x88\xf8y\xf9j\xfbA\xfe\xb4\x00R\x03@\x060\t#\x0b\x1e\x0cE\re\x0f\xd2\x0f\xec\x0f\x7f\x10\xf7\x103\x11X\x0fQ\x0c!\nQ\x08\xfc\x05\xc0\x031\x01k\xff\xc4\xfd\x16\xfc\xfd\xfa\x88\xf9\xe3\xf6:\xf5\xbc\xf5}\xf6\x11\xf7\x80\xf7\xcf\xf7\\\xf8\xde\xf7\x04\xf8j\xf8\x9c\xf7x\xf7\xd5\xf7\x07\xf9u\xf9\xe2\xf8\xe0\xf7\xe4\xf5\x15\xf5\xbb\xf4-\xf5\xcb\xf4\xb2\xf5\x89\xf5\xd9\xf5u\xf7\xbe\xf7<\xf8\xa6\xf7\xb6\xf8\x05\xfd\x00\x01\x97\x00\x15\x01\xc4\x03\x15\x05\xf2\x04\xb9\x04Q\t\x0c\n\xfd\x08\xce\n\xac\x0b-\n\xc3\t-\n-\x08\xfc\x08\x19\x0b\xbd\n\x89\x08\x18\x08\x0c\x08\xb7\x07f\x07\x8d\x08\xb5\x07\xa1\x04\xef\x05\x91\x08<\x08@\x06.\x07\xf3\x07p\x05I\x04\xc9\x04\xab\x04\xd6\x042\x04\xbf\x02d\x04|\x07?\x08&\x07\xeb\x06i\n%\x0c\xb8\x0b\xed\x0bw\x0c2\x0c\x04\x0c\xa3\x0c\xd9\x0b\xb4\t\x95\x06@\x04\xda\x01\xc7\xff\x8e\xfe\xb6\xfcw\xfa\xab\xf7\x1b\xf6U\xf5\xfa\xf3\xff\xf16\xf0k\xf0\xfa\xf1\xa2\xf1\x80\xf1>\xf2>\xf2\xc1\xf1\xa3\xf21\xf5D\xf6S\xf6\xc1\xf7\x1b\xfa\xba\xfb\xe4\xfc\xf1\xfd\xf3\xfe.\xff2\x00(\x02\xbd\x03\xb9\x038\x03\xaa\x02M\x02\x13\x03\x90\x02@\x01O\x00\x14\x00\x84\xff\x1e\xfe\xc2\xfd\xa7\xfcy\xfa\xba\xf9\x8a\xfa\x1b\xfb\x02\xfa\xcb\xf9\xc1\xfa\x8a\xfbr\xfb\xf7\xfbI\xfd#\xff-\xff\x9d\xff\x1f\x02\x04\x04q\x03\x8f\x03"\x04\xae\x03\xb5\x04_\x03\xcb\x01\x9a\x03;\x03\x92\x01E\xfek\xfev\xfe\x82\xfa\xdf\xfb\x93\xfc\xdb\xf9\xee\xf9\xe6\xfa\xf4\xf8\xaa\xfc\x10\xfdP\xf9h\xfe\xc1\xff\x9f\xfd\xd2\x00\r\x03\xc8\x01\xc5\x02L\x03\x18\x07\x8e\x07=\x03\x82\xfeb\x05_\x05\xe4\xff(\x08\xf6\x01\x17\xfe\x8d\x02\xe6\x02\xc1\x03\xd5\x00D\xfc\x83\x00\xca\x03\x84\x01\x9e\x05\x97\x02r\xfd\x89\x02L\x04a\x02y\x03\xc3\x049\x001\x01\xd4\x04\\\x04\xda\x04\x1c\x01\x1d\x00\t\x00\xf4\x00\xa6\x02\xac\x02\xd0\xfeY\xfa\x95\x00\x9a\xff\xfa\xfa\xd8\x01\x05\x00#\xf8\x9b\xf8>\xff\xb2\x01\xd5\xfd\xca\xfc\xde\xfd\x80\xfe6\x00)\x01\xb5\x01\xf9\x00:\xfe"\x02\xc6\x05\xce\x01\xc6\x00\xbb\x01\x89\x02\x04\x01\x81\x03\xdd\x04G\x01\x9e\x00\xb0\x00\xff\x00\xa7\x03\xba\x040\xfeF\xff\xa9\x03\xe5\x02+\x00T\x00\xc7\x02j\xfd]\x01\xfe\x05_\x01f\xffH\x02\xec\x034\x00\xa4\x01\x10\x03\xac\x003\xff\x0e\x02<\x01\xcc\xfd[\x00\x12\xff\x1e\xfd\x87\xfb\xe4\xfbv\xff0\xfe\x86\xfa\x8e\xfb\x8e\xfb\x16\xf9\xa4\xfc$\xfdU\xfc}\xfc\xdb\xfb\xc1\xfc\xb0\xfe^\x00\xe7\xfcE\xfe\xf6\xfeH\xffo\x01Q\x01\xcc\xffW\xff\xfe\xfeN\x00\x8c\xffi\x01\x0e\x01\x7f\xfb>\xfd\xe9\x01\x1e\x01\xf9\xfcz\xfe\x16\xff\x8f\xfe\x1e\xff\xb8\xffP\x00\x93\xfe\xd7\xfc%\xff\x16\x01\xc2\xfe\x8a\xff0\xfe\x89\xff\xe6\xfd\xe1\xff?\x01W\x00Q\x02\xca\xfe\xfd\xfew\x02]\x04f\xff\xa4\xfd\x0b\x00\x9c\x01\xa2\x03%\x00\x9d\xfe\xbf\x00\xba\x03\xee\xfc\xea\xfd\xfa\x01\x89\xfdL\x01\x0c\xfe\x10\xff\x07\x01\xf8\xfd\x07\xfd\x89\xfcO\xff\xef\xff\x01\x008\xfat\xfe\x0e\x01\x9f\xfc\xf6\xfba\xfe\xf3\xff\x98\xf8T\xffR\x02i\xfe\xac\xfc8\xf8\xe0\x03\xad\x01\xf0\xfa>\x00\xc0\x032\xfe\x9a\xff\x87\x04\xc7\x02\xfd\xfc\x1c\x00\x1b\x08\xb5\x01\x94\x00\xce\x07\x0e\x04\xbb\xfe\xb2\x03\xae\x08)\x03D\xfc\xee\x06\xdf\t\xe4\xfb\xef\x03\x98\x08\xb0\xfd\xdf\xff\xc0\x07\x11\x03\xd7\xfd\xd9\x01\xaa\x05\xe5\x03\xfc\xfe\x8b\x05J\x02\xd1\xfd\xbb\x01\xa6\x01\xc2\x05\x97\xfe\xd2\xfd\x9a\x01 \x03\xbc\xffD\xf8|\xfeV\x03\xce\xfc\x83\xfa\x1f\xfe\xde\xfd\xd1\xfcJ\xfa3\xfe\xcf\xfcL\xf9\x0f\xfe4\xfc\xaa\xfc\x08\xfc\x90\xff\x90\xfc\xab\xfd\xe4\xfd\xfb\xfb\xdf\xfd\xa8\xfc\xc8\x02\x10\xfe\xab\xffg\x01\x02\xfe\x00\xfbY\x02\xfd\x02\x12\xff\xd7\xff\xc9\x03\x83\xff\x8b\xfc\x19\x05\x8e\x05v\xff\xbf\xfec\x071\x00\xa9\xff\x1e\x039\x06S\xff\xf7\xfd@\x036\x067\x05~\xfe\xe8\x02}\x01b\x02\xe6\x02\x9e\t\xd4\x03\x89\xff\xb1\x06\xf5\x05\x11\x00&\x03\x84\x06\x00\x00\xe1\xfe\x92\x01\x9e\x05f\xffV\xfb\x0c\xfb\x96\x00\x05\xfd\x1b\xfbn\x02\x82\xfe|\xf3\xf1\xff\x86\x02\xef\xf62\x00?\xfb\x12\xf7W\x00\xaa\xfb\xd1\x00\x89\xfd\xc6\xf6\xb6\xfe\xcd\xff;\xfb!\xfc\x81\x03\x14\xfa\x94\xfb\x08\xff\xe6\xfcQ\x01\xc0\x019\xfbH\xfb(\x03 \xfd\x9e\xffl\xfe\xf8\x01\xf4\xfe\xd4\xfdZ\xfe\xb2\x00\xfb\x03\xc7\xfb\x1c\x01\x18\x03-\xfe\x92\xff\x83\x03\xbb\x03\x00\x01\x04\xfe\x02\x01\xc2\x08!\x04\x86\xfd\x10\x01\x8b\x08\xe7\x02H\xff\xd5\x00d\tU\x04\xfe\xfb[\x05f\x02\x8f\x00\x16\x03\xbd\x00\x06\x02\xa8\x01\xbd\xf8\x86\x05\xd2\x03\xd2\xf8g\xfe\x1f\x06\x9f\xf9\xeb\xf9:\x07O\x00\xed\xf1\x9b\xfb`\ny\xf9\xd3\xfan\x00q\xff\x1e\xfb7\xfcG\xff\xcc\x00W\xf38\xfe\x1f\x08\x03\xfaD\xfb\xdb\xfd\x0b\x00\xaf\xfc\xa0\xfc\x83\x00\x16\xfd\xa1\xfa\x04\x05(\xffD\xfc\xd1\xff\x9d\x00\xe9\xfc\x93\xf8\xf9\x05\xf0\x03\x06\xf9\xc4\x00Q\x00\xea\x04\xf4\xfft\xfcb\x03\n\x00\xc7\xffX\x04\x9b\n\xc3\x01v\xfe\xfe\x01Y\x06G\x01 \x02\xa5\td\x05\xbd\xfe6\xff\'\x0c\x87\x00\xa3\xfaZ\x04Y\x03\xdf\xff\r\x02\xd8\x03\xa7\x00W\x01\xdb\xff\xdd\xfc\x86\xfe\xb8\x05I\xff\xaf\xfd\x03\xfd[\x03\xb2\x02\xb9\xfd\xa7\xfa\xf0\xf9\x05\x06\xcc\x02\xec\xf8\xb1\xfb=\x08\xc2\xfb\xf0\xf3\x0e\x03\x95\x05f\xf6Y\xf7N\x04\xbe\xfe\xe6\xfc\xd0\xfc\xb1\xff\x93\xfe\xb9\xf7\x10\xfd\x90\x01\x88\xff\x9b\xff\x03\xff\xd9\xfc\x08\xfc\xcf\x01:\x03T\xfeX\xfc}\xfdz\x06;\x03O\xfb\xf8\x01\xdb\x04T\xff\x8d\xff\x83\x06\r\x02\xa5\xff\xdc\xfeJ\x08\x1e\x03\xbe\xfc\x14\x055\x00\xbc\x00\xab\x02K\x05\xea\xfd\x82\x00G\x04%\x00\x11\x01\xeb\xff\xd8\x03\xee\xfdD\xff\\\x05\x8c\x00\xcb\xfd\xcc\x02\\\x00\x19\xfa\x98\x07\x86\x006\xff\x0c\xfe\xb4\xf7]\x0bS\x03\r\xf4\x9c\x00\x99\r]\xf6)\xf3\x18\x08J\x08U\xf6\x96\xf7\xf5\x01!\x00\xf8\x01\xb2\xf8\xe9\xfb\xf8\xfax\xfa\x86\x04\xb4\xff\x14\xf9\xd4\xfc \x02\xc9\xfb0\xfe\xae\xfe\x7f\x01L\xfe\x85\xff\xb6\x01\x91\x00\x12\xfde\xfc\x0e\x07g\xfe2\xf8\xb5\x03\xa2\t\xfc\xfb\xe2\xf7\xfc\x04_\x02K\xf7\xad\x04\xe2\n}\xfc\xd7\xf8W\x07\x06\x05\xab\xfb\x9a\xfe\xc6\x07&\x05\xb8\xfd:\x05\xf2\x03\xd1\xff\x07\x00k\x05Q\xfe\xe5\x04\xfd\x061\xf9\x07\xf9\xe7\x07Y\x0c_\xf3\x82\xfa\xef\x08\xb6\xfa\xba\xf8\xa8\x03\xf9\x07\x1a\xf8\xc8\xf5\xa5\x06d\x03v\xf8\x95\xfdC\x02\xcd\xfc\x97\x02\xdf\xfd\xce\xfc\x06\x05\x94\xfc\xf4\xfd\x1c\xfe\xdd\xff\n\x01\xde\x00\xa4\xfb(\xfe\xe3\xfc\xea\x00R\x04}\xf4\x03\xf9{\x01?\t\x95\xf62\xf5.\n\x1d\x03{\xf2\xac\xfc\xff\r\xa6\x02\x05\xf5k\x01\x9f\x0e\xfe\xfa{\xf9x\x08\xf5\x08N\xfd\xb6\xfc*\x08e\x07G\xfd+\x03\x19\xff\xe0\xfeG\x05\xf0\x02\x86\xfei\x04\xac\x03\xbc\xf6l\x01\x81\x07`\xfe\xa9\xfa_\x05\x8c\x00\xfa\xf6~\xfe\xe3\x07\x9b\xfdl\xfbl\x03\xa8\xff\xdb\xf9Y\x02\xff\x01\x80\x00\x06\x00\xeb\xfc\x19\x08\xae\xff)\xfb"\x01\x8f\x04m\xfc\x99\x02\x01\xfd\x8c\xfc\xb9\x03\xb6\xf9\x0c\xfc\xc6\x029\xfd@\xf6\xda\xfe\xa4\xfc\xbb\xf9C\x00\xaa\x00o\xfa\x1a\xfcJ\xff\xd4\xfch\xfb@\x08\xb8\xff\x8e\xf3\'\x03\x18\x074\x00z\xf9\xfc\xff]\xff\xb1\x02A\x03\x9d\x01\xcc\x034\xfd\xc7\xff\xa5\x04\x84\t\xef\xfe3\xfb\xe6\x06)\x07\x83\x05c\xfa\xc4\x05\xc7\x05|\xf8\x98\x04o\t\x1a\xff\x9d\xff\xd6\x01(\x02\xda\xfe[\xfd\x8a\x05>\xfe\xf9\xf9\x8f\x03"\x05\xe7\xf7T\xfc\xd6\xfe\xa0\xfb\xb0\xfc\r\x08\xe9\x00\xbf\xf4\xb9\xfe\xd2\xfeY\x01\xcf\xfd\xc8\x03\n\x02-\xf3F\x02\xc4\x0e\xa5\xfb\xc2\xf53\x02G\x07\xbb\xfa\x90\xfe\x9f\x0e\x05\x04\x91\xf2\xe2\xf7\x8b\x04P\x11`\x01V\xf3\x14\xf8\x86\x06:\x07=\xf7\xb5\x03;\x00\xb1\xf7\\\xf6\x95\xfd<\r\xde\x06\xc6\xf2\xb2\xf5\x83\x03\x91\x04\xa1\xfa\xe4\x01\xe1\x02U\xf8(\x03\x9b\x08\x16\x02\x0e\xfd\xf3\xff\xa6\xfaK\x04\xdf\x07X\xfe\xf0\xfeG\x05\xde\x00\x85\xfd\x03\x06^\xfd\x0f\xfd\x87\x03\x12\x02\xc7\x03\xb8\x02\xa0\xfcn\xfa\x80\x04k\x03\xc0\xfbQ\xfdP\x08\xd7\x00l\xf6\xc7\x00M\x01r\xfc|\x01]\xfd-\xfdV\x03\xd7\xfbL\xfcl\xf9\x0b\x02\x81\x0cm\xf3B\xf2 \x0b\x93\x07\xac\xfdj\xf2\xe6\x00\xa6\n\x1a\xfa\xf8\xf9d\x05(\x0b\xf8\xfc\xb3\xf0p\x00\x15\x0eo\xfd\xc2\xf2S\x02\xc1\x0e\x0c\xfb\x84\xf4D\x06Y\x08L\xfe\x01\xf5\xa7\xffh\x0fn\x080\xfa\xda\xf6u\x00n\x0c@\x03h\xfa\xf5\x01|\x06\x01\x00\xdf\xfb\xec\x03L\x05\xcb\xfd\x1b\xf7\x94\xfd\xbc\x0eA\x04\xf9\xf4\x8d\xfb/\xfdY\xff-\x03}\xfd}\x00\x03\xfe\x0f\xfc\xbf\x02\x8e\x02\x9b\xf9[\xfc+\x02g\x01\xd0\x073\x00\xc9\xfd\xa2\xf7\xc0\xf9+\tg\x04\x16\x02\xbc\xffw\xf8P\xfea\x07\xdc\xfe\x8e\xfcz\xfc"\x00\xbc\x02\xb1\xffI\x05U\x00\x8b\xf5m\xf9-\x07\n\x05\xe0\x01\xf9\xfa\xdc\xfa\x81\xfb6\xff\xd5\x05\x1e\x028\x02\xdd\xf8\x90\xfbU\x02[\x03\xbd\x00W\x01`\x00\x1e\x00%\x02d\x04a\x04m\xfcC\xfc*\xfe\xad\x00+\x08\x8f\x04\xc5\xfb\xb7\xfc\xd3\xfd\xb9\x00@\x00d\xf94\xfe5\x05\x9a\xff]\xff\x1e\x04\xca\x02\xed\xf8\xff\xf4\x11\x03`\r\x0b\x04\xeb\xfc\xbb\xfb\x15\xfd\xac\x00\xf2\xfe:\x04\xb2\x00\xa1\xf9\xb1\xfe~\x011\x04[\x04\x96\xfc%\xf3<\xfb\x8a\x0b\x10\x07\xd6\xffV\xfd\xa9\xf9\x96\xfby\x00\x16\x06s\x04\x86\xfd\x87\xfb\xa1\xfe\xae\x01\xa2\x04D\x01o\xfb\xed\xfd\x9c\x03\xa8\x02"\x05\xdd\x01\xdc\xf9\x82\xfcD\x01\xed\x04\x15\x01?\x00\xfc\xffl\x00\xa9\x01O\xff\x07\x00\xf2\xff\xa8\xfe\xdf\x00\x93\x00\xd3\x01\x87\x03k\xfc\xb8\xfa)\x00\xc3\x01\x18\x02_\x01\xd4\xfd\xd7\xfc.\xfd<\xff\xb8\x00\xff\x05 \xfd\xe5\xf7\xb2\xff\xf3\x04)\x03\xa7\xf9\x1d\xfdL\x00\xe7\x01\xb9\x01?\x03\xb7\xfdD\xfc\x94\xffj\x00:\x05:\x04\xd4\x00\xc2\xf85\xfd;\x031\x03\xbf\x03[\x00B\xfd|\xfd\x8c\x00\xd1\x01\xcf\x00\xe2\xfd\x8b\xfdV\xffB\x03*\x06{\xfeg\xfa)\x00\x9f\x01\xd3\xfee\x01\x92\x03\xb6\x02\xf8\x00\xcb\xfdp\xfe7\x00\xd4\xff2\xff"\x04\xdf\x025\xfd\xe6\xfd\xe9\x00J\x01\x8f\xfb{\xfd!\x02$\x03\xb5\x02\xc1\xfd_\xfb\n\xfe\xfb\xfe6\x00g\x02\x81\x02\x8f\xfe\x94\xfcx\xfe\n\x00K\xfe<\xfd\x19\x00+\x03w\x02\xd7\xfe\xeb\xfb\x8d\xfc\x83\xfe*\xff\xd8\x00V\x02\xdc\x02}\xfdO\xfa\xba\xfe\xbf\x01(\x00\x9b\x00\xbb\x01\xb0\x00\x03\x00\x91\xfe4\xfd\xd8\xff\xf7\x01R\x00 \x02\xd2\x02v\x00(\xfd\xa5\xfcf\x00\x19\x02\xad\x01\x94\x00\x00\x01\xa1\x01r\xfe\x17\xfe\xfe\xfe\xb2\xff\xb3\x01\x12\x01h\x02<\x03\x19\x00P\xfc\xc6\xfb\xad\x01\x8a\x05q\x03\xb2\xff\x86\x00\xb8\xff\xe1\xfd5\xff\xe2\x01\x16\x02g\xff\xef\xfe]\x016\x02\xdf\xfdZ\xfcE\xfe\x19\x00\x95\x01\xd7\x00e\xff\xc0\xfd\xc2\xfb\xbf\xfe\xed\x00&\x00\x03\x01\x8d\xfe/\xfd\xbd\xfd\x9f\x00\xf3\xff\xb7\xff\x03\x02/\xffq\xfc\xa8\xfd\x97\xfe\xbf\xfd\xe3\xfew\xff\xf2\xffM\xfc\x98\xfb\x85\xfd\x02\xfd3\xffr\xff\xcc\x01\xd1\x02T\x03\xd5\x01L\x02\xf5\x04\x85\x07{\n^\n\x0c\x0c\xc0\n3\n,\x0b%\x0b\xd8\x0bI\x0b.\n\x02\x0b\xfe\x07}\x05\xeb\x03\x9e\x00\\\x00\x9d\xff\x0c\xfe@\xfb\xe5\xf9\x83\xf6d\xf4T\xf4\xa7\xf4Z\xf4\xa0\xf3G\xf4\xba\xf2\xee\xf2w\xf4\xa9\xf6\xdf\xf8\x85\xf9<\xfb.\xfc-\xfe\xb8\xff!\x00S\x03\xe4\x04\x0c\x05\xc1\x051\x07\xf4\x06U\x05\xf9\x04p\x04"\x050\x04X\x02\x12\x01\xc8\xfd5\xfcw\xfb\t\xfa\xf5\xf8\xcf\xf7\xf3\xf6\xc0\xf4\x0f\xf5V\xf4\xbe\xf4C\xf4\n\xf5\x95\xf6\x89\xf64\xf8r\xf7\x1f\xf9\xf9\xfae\xfcs\xfe\xef\xfe\x96\x00\xb2\x00\x9b\x00B\x01\xdd\x02\xff\x04\xe6\x03a\x03\xc4\x02\xbe\x02\x1e\x03\xbd\x02\x1e\x03\xfe\x01z\x01\xa3\x01=\x00\xca\xff\xaa\xff\x11\x01\xac\x01\xd3\xff\x1a\xff\xef\xfd\xfa\xfeg\xfd\xfc\xff\xd4\xff\xbd\xfb\x9c\xfd\x9d\xfdh\x00\x11\x01\xc0\xff\x84\xffk\xfcO\xfd\xd6\x08B\x14m\x16~\x0f\t\t\xd1\x0e|\x182\x1f\x0f \xaf\x1f\x98!\x8a\x1f\xf3\x1e}\x1e\x92\x1a\x8c\x16\xf6\x11v\x16\xb1\x18\x1b\x11>\x06\xd1\xfb\x05\xfa\xba\xf9\x9d\xf8\xa9\xf7\xe5\xf1\x9b\xea\xdc\xe4\xbb\xe4\xaa\xe5\xd2\xe5B\xe4\x83\xe5\x90\xe7\xd6\xe8\x97\xea\xa8\xe9+\xeb\x80\xeeL\xf4<\xfaO\xfd\x84\xfe\xc5\xfb\x9e\xfb\xd6\xffw\x04\xa0\x07\x85\x07t\x06\xba\x04<\x03\x12\x03A\x02X\x01\xff\xff\x0f\x004\xff\x97\xfdE\xfa\xba\xf6\xe3\xf4|\xf5\xa4\xf7\xef\xf8\xe0\xf7>\xf52\xf3{\xf3{\xf6\xbc\xf8\xdf\xfa\t\xfcD\xfc\x96\xfd\xae\xfe\xb4\xff\x00\x01\xc3\x02\x98\x05k\x07\xa2\x08i\x08\x86\x07!\x07\xc0\x07}\t\xb1\n\xbf\nl\t\x18\x07\xa3\x05\x90\x05\xbf\x05[\x05\r\x04\x84\x04\xf1\x03\xa2\x01T\x00\t\xff\xa7\xfe\x0c\x01\xa0\x02\x92\x02A\x005\xfd+\xfe\xef\xfe\xac\x01g\x03\xea\x03\xf2\x00\x11\xffK\x02n\x01[\x02\xf7\x00\xa3\x01P\x03\xf0\x00\xda\x04\x90\x03~\xfe\xf5\xfb^\xfc\xf9\x02\xa9\x01\t\xff\x19\xfd\xea\xf9\xdd\xf8\xdb\xf7\x14\xfb\x13\xfc!\xf8\x08\xf5#\xf5\x14\xf70\xf6&\xf5\xbc\xf58\xf6\xef\xf5\x91\xf67\xf8\x99\xf8\x11\xf7D\xf7W\xfa\xd3\xfc]\xfc6\xfb\xdd\xfb\x1d\xfc\x9a\xfc\xb3\xfe\x9f\x00X\x00\xbc\xfe\xae\xfd*\xff@\xff\xd4\xfe\x9f\xfe\xca\xfd\xcd\xfd\xdf\xfd\xf5\xfc\x1a\xfc~\xfb\xf3\xfbh\xfc0\xfb \xfc\x95\xfb\x9d\xfey\x08\x19\x10s\r.\x04\xd9\x03"\x13\x8c\x1f\x96%M&\xe2!*\x1d\xf0\x19\xfe#\x99.\x04/\x07&\xa1\x1d\'\x1b\x07\x18\x8c\x15\xc5\x12\xb8\x0e]\x08\xf6\x02\xf5\xfe.\xf8\xfc\xef\xa6\xean\xe9\x85\xebo\xeb\x9d\xe8\xdf\xe1\x1d\xdd6\xdeL\xe4\xe7\xea\xb1\xee\x89\xef\x11\xedZ\xec\x81\xf0\xcf\xf8\xd0\xff\x1f\x02\xb3\x02\xee\x02\xb9\x04\xd2\x04J\x06\xf0\x08\xa0\t4\tD\x07*\x06r\x03\xcc\xfd\x91\xfb3\xfc\xdc\xfc\xdf\xf9^\xf5\xc5\xf1o\xee\x16\xec\xac\xec\x9a\xef*\xf0\x94\xedq\xeb\xd4\xec\xd8\xee\xd6\xf0 \xf4\x88\xf7\x17\xfa\x87\xfa\xd2\xfc\x00\x007\x02(\x04>\x07E\x0b\x9a\x0cF\x0c\xee\x0b\xc2\x0c\xae\r\x18\x0ed\x0f\xb0\x0f5\x0eO\x0bh\t%\t\x97\x08*\x08\xa2\x06\x07\x06\xf0\x03U\x01\x94\xff<\xff\xa3\xffW\xff\xff\xfd\xc1\xfd\xf2\xfc\xa8\xfb\x84\xfb\x93\xfbU\xfd\x93\xfd\x9c\xfd\x12\xfd(\xfd\xc8\xfc\xce\xfdw\xff\xd9\xff\xd4\x00\xa6\xff\x83\x00\xfd\x00\x91\x01\x06\x02|\x03\x16\x04;\x03\xcc\x02\xc4\x02\xee\x03\xae\x03B\x04\x01\x04\xba\x02\x9c\x01O\x00\x05\x01\x02\x01[\x00b\x00\x08\xfe\xee\xfc\xf5\xfa\xf9\xfcm\xfe\xf9\xfe\x17\xffo\xfcM\xfc\xb9\xfa\xe2\xfd\xab\xff\xc2\x00\xde\x00\x84\xfeF\xfe\xd4\xfc>\xfd\t\xfeR\xffq\xff\xc1\xfd\xd0\xfa!\xf9\'\xf9\xca\xf9{\xfaa\xf9\x01\xf9\x03\xf8\\\xf6C\xf6y\xf6\xeb\xf7\xf4\xf7\x07\xf9`\xfa\xd3\xf9\xcf\xf8\xbc\xf8Q\xfd\x0c\xffP\x01\x08\x03\xd4\x02\x80\x02\x03\x01\x01\x07\xe2\nN\x0b\x99\x0c\xf0\r\x06\x0e\xc9\x07\x8b\x06E\x0c\x91\x12\x86\x14\xfe\x10\x92\x0c\xa9\x04\xc6\x01V\x08V\x11\xbe\x12u\t\xee\x00\xe7\xfej\x01R\x06i\tC\x08\xee\x03\xdd\xfe\xa0\xffY\x02\xc2\x03)\x04\x03\x03:\x05\xf1\x05\xa5\x05V\x04\xd4\x01Y\x02\xf7\x04]\x07T\tt\x05\x15\x00\xd9\xfb\xbc\xfa\xf0\xffX\x00d\xfd\xf6\xf6\xbc\xf1s\xf1\xf1\xef\x9a\xf1d\xf2\xad\xef\x1e\xec\x96\xe9\xf9\xeb\n\xefc\xee\x06\xef\xae\xf1\t\xf3\xab\xf2\xb0\xf3\x86\xf7\x03\xfbT\xfb\xe4\xfby\xff\x87\x00\t\x00\x1e\x00\xb0\x02\xc4\x04\x08\x03\x84\x02n\x03\x8c\x03\x82\x01\xc4\x00\xd2\x01B\x02j\x00\x95\xfe\xc1\x00\xf8\xfeG\xfd\xac\xfe,\x02-\x03\xee\xfe\xa1\xfdS\x020\x05\x13\x05\x0b\x05\xf2\x05\x9f\x06\x1d\x04c\x06\r\t]\x08\xa2\x05\'\x03\xe3\x04\xcc\x04\xbb\x02\xe0\xff<\xff\xbc\xfeI\xfd`\xfc\t\xfd\xe9\xfb\xb7\xf9J\xf9\xe0\xfb\x85\xfce\xfb!\xfe\x1e\x01\xfc\x01\xb0\xfe\xe5\xff\xe1\x05O\x08\x9b\x07\xbf\x06\xef\x07\xef\x06\xed\x05)\tZ\x0b\xda\x06\xad\xff\xc3\xfe_\x03\x1f\x04y\x00_\xfb\xd1\xf7\xb3\xf5\xc0\xf5\x03\xf9E\xf9!\xf5\xd6\xf0_\xf2\xdc\xf5U\xf6+\xf6\x03\xf7\x19\xf9\xbd\xfac\xfe\xde\x01\xae\x00l\xff\x18\x02\xff\t/\x0ey\x0e\xb2\x0e\xf5\x0b9\t\xdc\t\xba\x10R\x16\x14\x12\x98\x0b\xf1\x06\x8d\x04\xd0\x04-\x06O\t\xc8\x05\xfb\xfc\xe4\xf6\x01\xf8\xcc\xfc\xe6\xfd\x14\xfc\xe9\xf9K\xf8\x1f\xf7\x10\xfa?\xff\x0b\x02(\xff7\xfd\x94\xff\xc4\x02W\x04\xfd\x04\xd6\x05u\x03\xdc\x00d\x02\x04\x06&\x06\xce\x02\xdd\x00E\x00\xd9\xff\x84\x00\x1b\x01,\x00T\xfd\x87\xfc\x93\xfe^\xfee\xfd\xba\xfcS\xfc.\xfc\x9c\xfbH\xfe$\xff\x98\xfcL\xfa\xfc\xfaR\xfd\x87\xfd\xa2\xfd\xe8\xfd8\xfdV\xfa`\xfa`\xfdK\xfe\xd5\xfc\x9c\xfa\x11\xfb&\xfb\xce\xfa\x01\xfb\xdd\xfb=\xfc\xb5\xfa\xf2\xfaH\xfc\x1a\xfd\xad\xfc\xe8\xfc\xd8\xfe\x9a\xff\x7f\x00\x8d\x00\xbc\x01\x9d\x02\x84\x02\xcd\x03:\x05\x13\x06\xbf\x05\x14\x05v\x05\xba\x05\xc4\x05\r\x06\xee\x05H\x04\x7f\x02o\x012\x02%\x026\x00f\xfe\xeb\xfc\x06\xfd~\xfcU\xfc\x93\xfc\xc4\xfbI\xfb\x87\xfb\x10\xfee\xfeJ\xfe\xcd\xff\x9a\x02\x99\x03\xb5\x02\x7f\x03)\x06\xfb\x07\xe4\x07\xd2\x08\x92\x08\x91\x07\xc2\x05F\x06\x94\x07h\x06\x85\x03P\x01\xdb\xffK\xfe\x8b\xfd\x0c\xfcC\xfbI\xf9;\xf7\x9d\xf6K\xf6f\xf6\x83\xf5\x99\xf5\xf6\xf5\xf4\xf57\xf6\x8e\xf6\xb2\xf7{\xf8\x10\xf9\x01\xfa\xe0\xfa\xc1\xfb0\xfcx\xfdy\xfe\x81\xff \x00\xa8\x00O\x01N\x01\x03\x02\xda\x02E\x035\x03\xe5\x02\xd5\x02\xd3\x02+\x03\xf4\x02\xe4\x02\x84\x02R\x01\x82\x01\x98\x01\x13\x020\x01e\x00\xf6\xff\x08\x00\x00\x00\x04\x01\x14\x01"\x00\xcd\xff`\xffK\x01\x8b\x01\x9f\x02\x12\x02\x83\x01\x02\x01\x00\x014\x02\xa4\x02\x9e\x02y\x01\x85\x00\xa5\xff\xb1\xff\xf6\xfd\x94\xfd\x05\xfd\xe7\xfc\x92\xfc,\xfc\xcb\xfd\x03\xfdo\xfd]\xff\xf8\x02\x17\x05\x88\x06N\tx\x0c\x14\r2\x0e\xc3\x12&\x17\xdc\x17\xa2\x15\xdf\x15w\x15\x08\x148\x13\x7f\x14e\x13F\x0c\xee\x05\x87\x02_\x01i\xfeD\xfc\x04\xfa\xbc\xf4L\xed\xf6\xe9\xd4\xeb\x89\xed\xfa\xecf\xeb\xaf\xea_\xe9V\xe9\x02\xedd\xf2\xdf\xf5:\xf6\xd6\xf6m\xf89\xfb\x82\xfe[\x02\xdf\x04\xca\x04\xd0\x03\xfe\x03\x7f\x052\x06R\x06\xa1\x05\x9c\x03\x90\x01\x08\x00\xea\xff&\xff`\xfd\x97\xfb?\xfa\x1c\xf9\x88\xf8\xd2\xf8\xd3\xf8\xe5\xf7\xe9\xf6s\xf7\x03\xf9\'\xfa\x1d\xfb\xee\xfb%\xfc\x8b\xfc\xda\xfd\x1a\x00\xef\x01\xa3\x02\xc3\x02\xe3\x02\xff\x02\xea\x03o\x05/\x06\r\x06.\x05\x80\x04/\x04O\x04\xc2\x04\xba\x04\xc8\x03\xc0\x02%\x02\xba\x01\x85\x01\x96\x01\x8d\x01(\x01\xaa\x00\xa0\x00\xfb\x00\xb6\x00\xd1\x007\x01\xd8\x01*\x02"\x02\x1a\x02\x04\x02\xf6\x01#\x02b\x02\x84\x02\xe7\x01\x0e\x01+\x00\xe6\xff\xb0\xff1\xff\xfe\xfe=\xfe\x81\xfdI\xfdQ\xfd4\xfd\xc2\xfds\xfe\xab\xfe\x18\xff\xc2\xff3\x01\xd0\x01e\x02\x86\x03H\x04\x04\x05\x00\x05<\x05\xae\x05;\x05\xc8\x04]\x04h\x03K\x025\x01\x9e\x00\xc6\xffn\xfe\r\xfd\xc3\xfb\r\xfb\x98\xfa9\xfa\'\xfaZ\xf9\xba\xf8\x9b\xf8\x18\xf9\xf6\xf9S\xfaV\xfa\x88\xfa\xe4\xfa\xc6\xfb\xcf\xfc\x87\xfd=\xfe\x97\xfe\x04\xff\xe2\xffZ\x00/\x01\x89\x01\xd8\x01\xf8\x01\xd0\x01\x1e\x02=\x02\xeb\x01\xb1\x01f\x01\xfd\x00j\x00\xee\xff\xa8\xff\xa8\xff-\xffu\xfeK\xfe\xe0\xfd\xb7\xfd\x05\xfe{\xfe\x7f\xfe"\xfe\x0e\xfe\xad\xfe7\xffF\xff9\x00\xb6\x00\xa5\x00\x7f\x00\xf9\x00\xe3\x01\x0e\x02<\x02\xd1\x02\x13\x03\xc2\x02\xac\x02\n\x03\xef\x02\xd0\x02\xae\x02\x1c\x02A\x01\x9f\x00\xb0\x00\xf7\xff"\xff\xdb\xfe\xa1\xfe\x91\xfd\t\xfd(\xfe\xaa\xff\x83\x00\xcc\x01\xaa\x03~\x04\x86\x04\xc8\x05.\nS\r\x96\x0e\xbf\x0e\xd7\x0ev\x0e\x0f\x0e\xb1\x0fU\x11\'\x10n\x0c\x00\t\xba\x060\x05\xd7\x03\xa2\x02\xa1\xff6\xfb\x98\xf7\xb9\xf5K\xf5\xc1\xf4\xdd\xf3\xb7\xf22\xf1H\xf0\xa2\xf0\xec\xf1\xbf\xf3\x91\xf4\xee\xf4\x81\xf5\x9f\xf6~\xf8\x1d\xfa\x85\xfb\x8a\xfc\xe2\xfc\x9a\xfd\x9e\xfe\xb3\xff\x9b\x00\x8b\x007\x00\x04\x00\x1a\x00\xac\x00|\x00\n\x00i\xff\xbc\xfe^\xfe7\xfeE\xfe\xc5\xfd\xe2\xfcB\xfc*\xfcD\xfcV\xfcl\xfcz\xfcW\xfcx\xfc\x01\xfd\xd5\xfd\x81\xfe\xe3\xfeS\xff\x06\x00\xa8\x00)\x01\xbc\x01g\x02\xc5\x02\xdb\x02#\x03}\x03\xa0\x03\xa6\x03\xd1\x03\xf8\x03\xfd\x03\xd1\x03\xc0\x03\xb5\x03\xbc\x03\xe2\x03\xf8\x03\xf0\x03\xbd\x03\x8e\x03l\x03\x7f\x03\x8e\x03v\x03-\x03\xce\x02p\x02A\x02\x0f\x02\xcc\x01k\x01\xf4\x00\x8e\x00\x1d\x00\xd8\xff\x9d\xff;\xff\xe8\xfe\xb8\xfe\x86\xfeV\xfe=\xfe*\xfe\x1c\xfe\x1a\xfe3\xfe\\\xfeo\xfex\xfee\xfe]\xfe\x93\xfe\xc6\xfe\x06\xff(\xffM\xff~\xff\x94\xff\xc4\xffG\x00\xea\x001\x01\'\x01n\x01\xdc\x01v\x02\xef\x02\x89\x03\xa0\x03\x03\x03\x81\x02\x94\x02\xef\x02\xa1\x02\xe9\x01R\x01\xba\x00\xcd\xff\x1e\xff\n\xff\xc7\xfe\xfc\xfd1\xfd\x0c\xfd$\xfd\xd2\xfc\xae\xfc\r\xfdA\xfd\x02\xfd\n\xfd\x87\xfd\xdf\xfd\xe1\xfd\xda\xfd=\xfe\x85\xfe\\\xfel\xfe\xbf\xfe\xd2\xfe\xa6\xfe\xb2\xfe\x14\xffM\xffK\xff9\xff\xa4\xff\xc4\xffo\xff\x81\xff\xca\xff\xe1\xff\x8d\xffa\xfft\xff:\xff\xdf\xfe\xc1\xfe\xd4\xfe\xdb\xfe\xa5\xfe\x81\xfe\x8e\xfe\xd6\xfe\x1d\xffZ\xff\xad\xff\xed\xff\x1f\x00R\x00\x8f\x00\xd6\x00\xf8\x00\x12\x01\x1d\x01\xfd\x00\t\x01\xe1\x00\xc4\x00\xb1\x00\x86\x00\x89\x00l\x00W\x00\\\x00d\x00\x7f\x00k\x00k\x00}\x00F\x005\x006\x002\x00\xfd\xff\xa1\xff\x8b\xffp\xff\xd8\xfez\xfe\xc2\xfen\xff:\x003\x01t\x02T\x03!\x04^\x05c\x07\x8e\tJ\x0b\x91\x0cH\r\x96\r\xe2\r\x1f\x0eV\x0e\x06\x0e\xb0\x0c\xa8\nh\x08\x8a\x06\xc1\x04\x95\x021\x00\xa8\xfd\x1f\xfb\x9b\xf8\xd0\xf6\xfb\xf55\xf5\xfa\xf3\xec\xf2z\xf2\xc4\xf23\xf3\xdf\xf3\x05\xf5\xf6\xf5\x8c\xf6W\xf7\xc5\xf8d\xfaS\xfb\xfc\xfb\x0b\xfd\t\xfe\x97\xfe\x0e\xff\xbc\xffI\x00)\x00\xf7\xff]\x00\xbc\x00x\x00\xfe\xff\xdc\xff\xdc\xffu\xff\x15\xff\x11\xff\xef\xfe\x1f\xfeb\xfdp\xfd\x9e\xfd^\xfd\x0c\xfd\x10\xfd8\xfd\x1a\xfd6\xfd\xe3\xfd\x81\xfe\xa5\xfe\xbf\xfeS\xff\xf7\xffY\x00\xdd\x00k\x01\xb3\x01\xc9\x01\n\x02k\x02\xbe\x02\xcf\x02\xd7\x02\xf1\x02\xeb\x02\xde\x02\xe0\x02\xf8\x02\x08\x03\xf5\x02\xe7\x02\xf4\x02\xe1\x02\xca\x02\xe0\x02\xfa\x02\xed\x02\xcb\x02\xcc\x02\xaf\x02g\x02%\x02\xf2\x01\x9f\x010\x01\xc2\x00Y\x00\xda\xffM\xff\xd3\xfey\xfe\x13\xfe\xac\xfdg\xfd;\xfd\x1c\xfd\x06\xfd"\xfdu\xfd\xb9\xfd\x04\xfek\xfe\xeb\xfeg\xff\xe0\xffe\x00\xe7\x00W\x01\xad\x01\xfe\x01J\x02l\x02i\x02\\\x02Q\x02+\x02\xeb\x01\x96\x013\x01\xc7\x00o\x00\x1e\x00\xcb\xffn\xff@\xff+\xff\xe6\xfe\xd5\xfe(\xff\x98\xff\xa6\xff\xa3\xff\t\x00r\x00\xbb\x00D\x01o\x02)\x03\xd4\x02\x99\x02\n\x03P\x03\xf9\x02\xc9\x02\xf7\x02z\x028\x01e\x00m\x00\xf9\xff\xd7\xfe\x0f\xfe\xd1\xfdC\xfd]\xfc\x12\xfct\xfcC\xfc\xa0\xfb\x97\xfb\x1c\xfcU\xfcD\xfc\x8f\xfc8\xfdt\xfdU\xfd\xbd\xfdq\xfe\x7f\xfeA\xfe\x82\xfe\x02\xff-\xff\'\xffb\xff\xd3\xff\xe3\xff\xd4\xff=\x00\xd5\x00\x16\x01\x16\x01<\x01\x85\x01\xa3\x01\xa9\x01\xc5\x01\xc4\x01\x8e\x015\x01\xfe\x00\xdc\x00\xb1\x00u\x00*\x00\xed\xff\xb7\xff\xa6\xff\xac\xff\xb7\xff\xcd\xff\xd5\xff\x00\x00<\x00v\x00\xaf\x00\xd0\x00\xe8\x00\xee\x00\xfe\x00\x06\x01\xd8\x00\x83\x005\x00\xe3\xff\x84\xff\x13\xff\xaa\xfeE\xfe\xb6\xfd%\xfd\xc6\xfc\x93\xfcS\xfc\xfb\xfb\xcf\xfb\xc6\xfb\xb8\xfb\xca\xfbI\xfcO\xfdj\xfew\xff\xb8\x003\x02\xbb\x03E\x05=\x07q\t<\x0bc\x0cF\r\x1b\x0e\xc4\x0e\xed\x0e\x08\x0f\xd8\x0e\xe4\r.\x0c?\n\xaa\x08\xf1\x06\xcf\x04\x7f\x02/\x00\xd7\xfdi\xfb\x86\xf9]\xf8U\xf7\x13\xf6\xff\xf4|\xf4w\xf4\x97\xf4\n\xf5\xd4\xf5\x8f\xf6\x18\xf7\xd7\xf7\xfb\xf8A\xfa.\xfb\xf8\xfb\xef\xfc\xb6\xfdD\xfe\xcb\xfeW\xff\xb7\xff\xaa\xff\xa3\xff\xdc\xff\xfa\xff\xc3\xff\x82\xffl\xffW\xff\x08\xff\xc9\xfe\xc9\xfe\xa9\xfe4\xfe\xda\xfd\xcf\xfd\xc0\xfd\x9a\xfd\x87\xfd\x91\xfd\xa4\xfd\x9e\xfd\xba\xfd\x18\xfev\xfe\xbf\xfe\x07\xff[\xff\xb3\xff\xf2\xffJ\x00\xa0\x00\xd1\x00\xfd\x00+\x01M\x01g\x01v\x01\x90\x01\xa3\x01\xa1\x01\xa5\x01\xbb\x01\xd0\x01\xe4\x01\xfe\x01(\x02R\x02e\x02\x86\x02\xc7\x02\xe9\x02\xfb\x02\x19\x035\x03&\x03\xf5\x02\xcf\x02\xb9\x02m\x02\x03\x02\xac\x01P\x01\xd0\x00C\x00\xce\xff]\xff\xd6\xfeU\xfe\xff\xfd\xba\xfdw\xfd;\xfd*\xfd9\xfd?\xfd]\xfd\x9e\xfd\xed\xfd3\xfe|\xfe\xe1\xfeM\xff\x98\xff\xea\xffW\x00\xc4\x00\xff\x006\x01\x85\x01\xca\x01\xe4\x01\xf8\x01\x17\x02 \x02\xfe\x01\xf0\x01\xff\x01\xf3\x01\xb6\x01|\x01a\x01A\x01\t\x01\xd5\x00\xb4\x00\x87\x00/\x00\xef\xff\xd8\xff\xc2\xff\x94\xfff\xff^\xffW\xffB\xff1\xffH\xffk\xffi\xffm\xff\x94\xff\xc7\xff\xe3\xff\x05\x002\x00R\x00Y\x00d\x00\x7f\x00\x8c\x00|\x00t\x00m\x00X\x00A\x008\x00\'\x00\xff\xff\xcc\xff\xc5\xff\xb8\xff\x8f\xff\x80\xff\x8f\xff\x82\xffS\xffO\xffm\xffp\xff^\xffh\xff\x95\xff\x96\xff\x89\xff\xa5\xff\xdd\xff\xf1\xff\xe8\xff\xfd\xff5\x00Q\x00Y\x00q\x00\xa7\x00\xc8\x00\xc6\x00\xe8\x00%\x01M\x01V\x01S\x01x\x01\x91\x01\xa1\x01\xc0\x01\xfc\x01\xf5\x01\xb2\x01\x92\x01\x8f\x01h\x01#\x01\xe4\x00\xb2\x00.\x00\xa1\xffD\xff\r\xff\xa7\xfe)\xfe\xd2\xfd\x93\xfd5\xfd\xd7\xfc\xbb\xfc\xca\xfc\xad\xfc|\xfc\x85\xfc\xbb\xfc\xcb\xfc\xc5\xfc\xfe\xfco\xfd\xb5\xfd\xe1\xfdU\xfe\n\xfff\xff\x81\xff\xe1\xffe\x00\xa8\x00\xb8\x00\x03\x01j\x01b\x01\x1f\x01\xfc\x00\xf9\x00\xce\x00\x94\x00u\x00X\x00\t\x00\xa6\xffk\xffd\xff[\xffC\xff\x1f\xff\xf4\xfe\xd4\xfe\xd4\xfe\xe7\xfe\t\xff$\xff.\xff#\xff4\xffd\xff\xa0\xff\xdb\xff\xf4\xff\r\x00,\x00B\x00}\x00\xbd\x00\xfa\x00\x1c\x011\x01M\x01~\x01\xad\x01\xc6\x01\xdb\x01\xf3\x01\xf7\x01\xdb\x01\xdb\x01\xf9\x01\xef\x01\xb5\x01u\x01Q\x01\x07\x01\xaa\x00\x80\x00f\x00\xf9\xffk\xff.\xff"\xff\xf8\xfe\xd2\xfe\x0f\xff]\xffJ\xffN\xff\xce\xfft\x00\xe7\x00y\x01A\x02\xe6\x020\x03\xb3\x03\x8a\x04\x1b\x05J\x05\x8e\x05\xee\x05\n\x06\xe3\x05\xc7\x05\x96\x05\x0b\x054\x04y\x03\xd0\x02\xf8\x01\xdb\x00\xbd\xff\x9c\xfek\xfd`\xfc\x93\xfb\x00\xfbb\xfa\xa9\xf9%\xf9\xdd\xf8\xda\xf8\x10\xf9r\xf9\xd7\xf9.\xfa\xa0\xfa6\xfb\xf7\xfb\xc7\xfc\x8b\xfd=\xfe\xb1\xfe\x1a\xff\x97\xff\x15\x00}\x00\xc9\x00\xfb\x00\x03\x01\xe9\x00\xe7\x00\xf2\x00\xe9\x00\xbd\x00\x83\x00N\x00\x0e\x00\xde\xff\xc7\xff\xb2\xff\x8e\xffW\xffD\xff;\xff7\xff@\xffR\xffR\xffE\xffM\xffY\xffS\xffJ\xffB\xff@\xff7\xff/\xffA\xffd\xff]\xffL\xffT\xffx\xff\x94\xff\xa7\xff\xcd\xff\xf6\xff\xff\xff\xfb\xff#\x00p\x00\x98\x00\x9b\x00\xaa\x00\xc4\x00\xbf\x00\xc2\x00\xdd\x00\x06\x01\x0c\x01\xfc\x00\xfd\x00\x18\x01)\x016\x01M\x01V\x01Z\x01c\x01x\x01\x83\x01\x83\x01\x84\x01n\x01N\x01<\x01&\x01\x02\x01\xd5\x00\xa1\x00i\x00,\x00\xf9\xff\xd9\xff\xbe\xff\x9a\xffx\xff_\xffX\xffW\xff_\xff{\xff\x96\xff\xaa\xff\xbe\xff\xde\xff\x08\x000\x00P\x00q\x00\x88\x00\x94\x00\x9d\x00\xa0\x00\xad\x00\xa3\x00~\x00Y\x009\x00\x1c\x00\x00\x00\xde\xff\xb9\xff\x93\xffy\xffk\xff[\xffT\xffL\xff:\xff3\xff+\xff4\xff@\xffH\xffR\xffQ\xffU\xfff\xff{\xff\x95\xff\xab\xff\xbd\xff\xcf\xff\xde\xff\xf4\xff\x19\x004\x00A\x00R\x00n\x00\x89\x00\x9d\x00\xb2\x00\xce\x00\xde\x00\xe2\x00\xef\x00\x04\x01\x0f\x01\x19\x01\x15\x01\x12\x01\n\x01\xfe\x00\xed\x00\xdd\x00\xc3\x00\x9d\x00v\x00Q\x000\x00\xfe\xff\xca\xff\x98\xffl\xffM\xff#\xff\x01\xff\xdc\xfe\xbe\xfe\xa3\xfe\x9b\xfe\x9d\xfe\x9d\xfe\xa6\xfe\xb3\xfe\xc8\xfe\xe3\xfe\x05\xff&\xffC\xffj\xff\x8e\xff\xa7\xff\xc0\xff\xd4\xff\xed\xff\x00\x00\x01\x00\x1b\x00\x1e\x00,\x004\x00)\x00G\x00i\x00\x8b\x00\xa1\x00\x8c\x00\x9c\x00\x92\x00\xd3\x00P\x01\xe8\x01A\x027\x02[\x02j\x02f\x026\x02/\x021\x02\xb7\x01\'\x01\xac\x00G\x00\xd6\xff=\xff\xb6\xfe0\xfe\x99\xfd\x0e\xfd\x8a\xfcF\xfc\x13\xfc\xe3\xfb\xb2\xfb\x9a\xfb\xc4\xfb\xfe\xfb4\xfco\xfc\xd5\xfc9\xfdz\xfd\xdf\xfdF\xfe\xa7\xfe\xee\xfe\x0c\xffU\xff\x9b\xff\xc7\xff\xeb\xff\xfa\xff\x06\x00\xe5\xff\xbe\xff\xb8\xff\x9e\xff}\xff8\xff\xf4\xfe\xdc\xfe\xb6\xfe\x95\xfe\x88\xfe\x95\xfe\x9c\xfe\x91\xfe\xa9\xfe\xde\xfe\x1c\xffD\xff\x8d\xff\xfa\xff>\x00u\x00\xc1\x009\x01\x91\x01\xc9\x01\x1d\x02{\x02\x90\x02\x90\x02\x9e\x02\xb9\x02\xa6\x02|\x02\x8a\x02\x99\x02\x8f\x02j\x02p\x02\x8b\x02\xa8\x02\xde\x02t\x03g\x04G\x05\xfb\x05\xb4\x06\xaa\x07^\x08\xd0\x08o\t\x0c\n9\n\x0f\n\xd3\t\xa5\t\x0c\t:\x08H\x07.\x06\xd4\x047\x03\xb4\x010\x00\xa8\xfe/\xfd\xc3\xfb\x83\xfa\x8c\xf9\xb4\xf8\x18\xf8\xa0\xf7_\xf7T\xf7L\xf7y\xf7\xce\xf7;\xf8\xbc\xf8O\xf9\x0e\xfa\xca\xfa\x98\xfbg\xfc\xfa\xfc\x84\xfd\xf1\xfdF\xfe\x89\xfe\xa9\xfe\xc3\xfe\xd1\xfe\xbf\xfe\xa3\xfey\xfeH\xfe\x18\xfe\xcb\xfd\x83\xfdA\xfd\x0f\xfd\xd8\xfc\xb7\xfc\xc3\xfc\xd3\xfc\xf8\xfc0\xfdv\xfd\xc6\xfd-\xfe\x9e\xfe\x06\xffk\xff\xce\xff(\x00\x93\x00\xfb\x00J\x01\x9d\x01\xef\x01\x1d\x026\x02E\x02M\x02B\x020\x02\x17\x02\xf9\x01\xda\x01\xc9\x01\xac\x01\x96\x01{\x01[\x01S\x01M\x01F\x01:\x01;\x01B\x01G\x01T\x01[\x01`\x01X\x01P\x01N\x01:\x01\x14\x01\xeb\x00\xbf\x00\x8b\x00N\x00\x0e\x00\xd3\xff\x98\xffQ\xff\x0e\xff\xd2\xfe\xa5\xfer\xfeO\xfe6\xfe&\xfe\x1d\xfe"\xfe5\xfeM\xfel\xfe\x95\xfe\xc1\xfe\xf3\xfe-\xffe\xff\x98\xff\xd3\xff\t\x00I\x00z\x00\xa2\x00\xbb\x00\xcf\x00\xe8\x00\xf1\x00\xf9\x00\x03\x01\xfa\x00\xf4\x00\xe8\x00\xe2\x00\xd6\x00\xbf\x00\xa5\x00\x8b\x00t\x00a\x00J\x00;\x00(\x00\x19\x00\x16\x00\x17\x00 \x00%\x00)\x00<\x00F\x00Y\x00m\x00t\x00\x87\x00\xa0\x00\xa5\x00\xbb\x00\xb0\x00\xb6\x00\xb8\x00\xb0\x00\xb2\x00\x97\x00\x93\x00\x95\x00\x87\x00\x84\x00f\x00?\x00\x1c\x00\xd9\xff\xab\xff\x88\xffg\xffw\xff6\xff\xfd\xfe\x03\xff\xfc\xfe\xef\xfe$\xffk\xff\xa1\xff\x9c\xff\xb8\xff6\x00\xc2\x00\xe8\x01K\x04\xa4\x05\xf7\x05\xd9\x05`\x05\xae\x04\x83\x02v\x01\xe1\x00o\xffH\xfeU\xfd\xe9\xfcH\xfc\xf0\xfa\xcf\xf9d\xf8\xc0\xf6\x15\xf6\x87\xf5\xb6\xf5y\xf6\xf5\xf7\x8f\xf9)\xfa\xb7\xfb\xad\xfd\xa4\xfe\x7f\xfe\xf5\xfe-\x00y\x00\x19\x01\x10\x02\x07\x03q\x03\x15\x03\xab\x03\x14\x04\xa8\x03I\x03\xe3\x01\x11\x01\xa7\x00\xef\xff\xb1\xff\x82\xff\xbe\xffA\xff\xfc\xfe_\xff}\xff\xfa\xfe\x8d\xfe\x88\xfe\xa2\xfe\xf7\xfe\x07\x00\x00\x01\x9f\x01#\x02\xa4\x02\xe0\x03u\x04n\x04m\x04\xd2\x04C\x05\x19\x05}\x05\xcf\x06\xb6\x06\x08\x06\xb8\x05\xa3\x05\xfe\x05\x98\x05\xbe\x05\xd2\x05\xc8\x05\x9a\x05T\x05u\x05#\x05R\x04n\x03\xd6\x02\x96\x020\x02\xb6\x01;\x01\xc2\x00/\x00A\xffi\xfe\xac\xfd\r\xfdJ\xfc\x9b\xfbH\xfbo\xfb\x9e\xfb\x8f\xfbt\xfbZ\xfb3\xfb\x0e\xfb\xe2\xfa\xbf\xfa\xda\xfa\x02\xfbf\xfb\xe8\xfb\xc1\xfcj\xfd\xa4\xfd\xb3\xfd\xb5\xfd\xcc\xfd\xaa\xfd\x9a\xfd\xbd\xfd\x08\xfeP\xfe\x95\xfe\xdf\xfe \xff\x04\xff\x9b\xfe\'\xfe\xcb\xfd\x90\xfdD\xfdT\xfd\xb5\xfd\x19\xfeg\xfe\xc1\xfe8\xffb\xffh\xff\x83\xff\xac\xff\xec\xff0\x00\xb6\x000\x01\x84\x01\xdc\x01 \x02^\x02@\x02\xf9\x01\xb2\x01x\x01J\x01A\x01A\x01\\\x01Q\x016\x01:\x01\xfd\x00\xd3\x00\x8f\x00L\x00\x15\x00\xe8\xff\x00\x00,\x00K\x00g\x00`\x00S\x00M\x00>\x00\x0c\x00\xe5\xff\xcf\xff\xbc\xff\xc4\xff\xdf\xff\x12\x00/\x00,\x00;\x00B\x006\x00=\x00+\x00\x1f\x00\x04\x00\x11\x00E\x00M\x00|\x00\xa0\x00\x8a\x00\x99\x00t\x00U\x00,\x00\xf7\xff\xe9\xff\xb5\xff\xb9\xff\xb0\xff\xb3\xff\xc0\xff\xae\xff\x8e\xffX\xff*\xff\x08\xff\xed\xfe\xb6\xfe\x9d\xfe\xa8\xfe\xae\xfe\x06\xffT\xffw\xff\xae\xff\x9a\xff\xc2\xff\xfc\xff$\x00Y\x00g\x00\x91\x00\xbe\x00 \x01\x8a\x01\x9f\x01\xab\x01G\x01\xd2\x00\xa8\x00S\x00A\x00\x03\x00\xa5\xff\xaa\xff|\xffe\xffy\xffa\xffA\xff\x19\xff\x01\xff!\xfft\xffm\xff\xbc\xffT\x00\xca\x007\x04\x8a\x06p\x07\xfd\x07\xa2\x06\xf5\x05\xbd\x03\xe5\x02\xc5\x02\x03\x02b\x02\xe3\x01\xb7\x01\x8a\x01Q\x00\xdb\xfd\xeb\xfa\r\xf9\x85\xf7\xf9\xf6\xbd\xf7\xa6\xf8\x1e\xfa\x14\xfb\xda\xfb4\xfc\xf0\xfc\xa4\xfci\xfaV\xfb\x1a\xfc4\xfc\xbd\xfe\x00\x00\x19\x01\xfe\x01H\x02\x8c\x02\xa3\x01\xe3\x00\x95\x00\xa3\xff\x8d\xff\xb0\x00,\x01\xb7\x01d\x02\xd1\x02"\x02\xaa\x00\x94\xff\xed\xfeX\xfd\x1a\xfde\xfd\xc5\xfd\xf6\xfd\x15\xff\xc2\xffg\xff\xeb\xff\x8d\xff\x18\xff\x84\xfeg\xff\xe7\xfe\x82\x00Y\x03\xa1\x01\x8e\x03I\x05\xb3\x03\xe8\x02%\x03~\x03\r\x01\x8a\x00\xd0\x03C\x01\x94\xff&\x04l\x02\x90\xff\xa4\x01\xbd\x01\xe4\xfeq\xff\x81\x01\x89\xff\xd3\xff\xc0\x01\xaf\x02\xc2\x01@\x02\xf5\x02\x18\x01\xb4\x00\x84\x02\xf4\x02\x06\x003\x01\xf9\x02\x1e\x005\x00e\x02\xbf\x00\xe2\xfe\xc1\xff\xfe\xff\x8b\xfe9\xff\x9e\x00\x9a\xfe\xb7\xff\x18\x01\x92\xfe\xd8\xff\xce\x01\xa3\xff\xc9\xfd\x83\x00\xd3\x00\x05\xff\xfd\x00\x04\x02*\x00g\x00\xe1\x01\xe9\xff\x1f\xff\x05\x01\xad\xff8\xfdY\x01\x82\x01\x0c\xfd\x9f\xff\xd5\x01\x98\xfe^\xfc\xcf\xff\x83\xfe\xdd\xfaV\xffI\x00\x95\xfb\xfe\xfc]\x00k\xfea\xfd\xbc\xff9\xfe\xb0\xfcZ\xff\xb9\xff\xde\xfd\x92\xff\xa0\x00\x13\xff\xf9\xfe\x1e\x00,\x00\xb0\xfeZ\xffM\xff+\xfe\x17\xff\xcd\xfe\xb0\xff\x11\x00\x03\xffm\xffb\xff\xf1\xfey\xfe\xb8\xfe2\x00\xe2\x00G\xff\x89\x00\xa9\x01\xb8\xff1\x00\xcc\x00j\x00\x9a\x00q\x01\x8d\x00\xfd\x00\x85\x02\x7f\x01\xdd\xff\xab\x00\x00\x00\xbb\xfe\xe0\x00l\x00m\xffD\x00\xe6\x00a\xff4\xfd\xe4\xfeu\xff\x0f\xfc\xc7\xfd\xd9\x02\x96\xfe:\xfdM\x03\xf1\x00\x8b\xfb>\x007\x02)\xfdI\xff\x1c\x05N\x01y\xfeU\x03\xbd\x03\x86\x01\x08\xff\xe8\x02\xff\xfc2\xff\xde\x00\xe7\xff\xe6\xfd\xfe\xfe\x91\x02C\xfc\r\xfeV\x00\xca\xfd?\xfa\x00\xffn\x00\xde\xfb)\xff\xdc\x01\x0b\xfe\xb1\x01e\x01\xb5\xfd\xe0\x00\xaf\x02\xa1\xfd\x9d\xfd\x02\x04\xfb\xffq\xfc\t\x01\xab\x060\xfd(\xff\xaf\x06\x15\xffT\xfc\xbb\x04,\x02\xe9\xfa\n\x03\xb8\x03F\xfd\x9c\xff\xe4\x05\xc4\x00\x08\xff(\x01\x91\x00u\xfft\xfbz\x033\x03\xf7\xfb\xbe\x02\n\x04\x96\x01\xf2\xfe\xe1\x00\x7f\x03\xc8\xfd\x15\xfe(\x01\x96\x020\x01\x19\x02\xe4\x00_\x01\x1e\x01\xd9\xfcS\xfe\xbc\x03\xd6\xfd\xfb\xfc\x9b\x012\x00<\x00}\xfc\xe0\x02\xea\xff\xf2\xfbE\xfe0\x01\xb8\x00C\xfcy\x01\xd0\x00~\xfe\xdc\x01\xc0\x00\r\x01`\xfe_\xfes\x03\x08\xfbT\x01\xc6\x04x\xfb\xc1\xfe\x1c\x05\xbc\xfc=\xfe\xb5\x00\xa1\xfd\xd4\xfd[\xfd\x7f\x01\x85\xfc\x08\x01\xe9\x00\x98\xfc\x05\x00\xdc\x02\x03\xfbR\xfc;\x05\x1e\xff\xa2\xfcF\x02\x17\x03y\xfe]\x01\xbc\x01$\x00\xeb\xfc\x18\x01r\xfd\xcb\x00\xa9\x04\xa4\xfe.\xfeb\x03f\x034\xf9o\x04\xf0\x00\xe8\xfa\x14\xff\x14\x02\xb8\x01\xf9\xfeM\xfd\xde\x03\xf6\x00\xd2\xfa%\x02\x8a\x01\xe0\xfc\xf3\xfc\x82\x03E\x03\x0c\xfen\x02V\x03y\xfd\xcf\xff\x9c\x03\xf2\xfet\x00\xbe\x03\xe6\xfdW\x02\x81\x03\x98\xfa\xe3\x00\xb4\x027\xfe\xfa\xfd\xbf\x03K\x01\x9a\xfa6\x03y\x00\xbc\xfc\x9c\x00p\x01)\xff\xeb\xfe\x07\x03\xa4\xfe?\xfed\x02\xd8\xfa`\x00\xcc\x05\x00\xfb\x9e\x00\x16\x07T\xf9\xf4\xfc\x05\x08\x0e\xfc\xac\xf9\xbe\x07O\xfe\xff\xfb\xfc\x04\xf2\x01\xee\xfc\xe1\xfe\xd9\x03\xdb\xfa\xa1\xfa\xbc\x04"\x07w\xf8\xc3\x01\xe6\x07\x1a\xf8\xec\xfd\xaf\x04o\xfdw\xfb@\x04\x86\x02A\xfc\xf0\x00\xe1\x04\xf0\xfbP\xfc\x96\x01\'\x02\x95\xfb\xb9\x00]\x03\x85\xfe\xa3\x00\x86\xfe\x91\xfd^\x00p\x02\xe5\xfa\xea\x01\xd3\x01:\xfd\x16\x03\xb9\xfe\xda\xfb\x8e\x02z\x00\xc2\xfb\xbf\xfe\x1f\x07w\x01e\xf8\x89\x03E\x05\x96\xf8\xbb\xfe\x04\x08]\xfaz\xfd\xd0\x07W\xfeG\xfe\x18\x05\xa8\xfc\x11\x00\x15\x00\xd7\xfeL\x00\xce\x00{\x01\xa0\xff\x88\x002\x01_\x01\x85\xf9\x98\xff\xd9\x00\xed\xfb|\x01\x9d\x04\xef\xfe\x93\xfc0\x03g\xfe\x96\xfcG\x00\xf7\xff\xe0\xff1\x006\x05\xb2\x00\x1d\xfb\xa1\x02F\x00\x07\xfc_\xff\xc1\x01d\x02+\xff\xff\xffc\x02\x03\xffP\xfd.\xff5\x04l\xfdU\xfa\xb4\x05\xd1\x01X\xfcn\x01\xd8\x03\xac\xfd\x96\xfa|\x06\xe6\xfd\xbf\xf7t\x05\xdd\x01\x90\xfd\xaa\x02l\x03\xdf\xfa\xaa\xfe\xc0\x01\xd1\xfc%\xff\x01\x01\xf7\xfe\x08\x01=\x01\xbd\x01\xa5\xff\x96\xfcQ\x03\xbc\xfe\xeb\xfc6\x00f\x01\xb3\x03N\xfe%\x00\xfc\x06\x83\xfc\x15\xf9[\x07\x11\x01n\xf7\xd7\x03\xe7\x07\xca\xfa\xa5\xfc\x17\x07\xbc\x01\xaf\xf6\xaa\x04\xa7\x02a\xf5(\x05\x99\x04[\xf9\xaf\x03\xb8\x00\xe7\xfb\xab\x02\x82\x02\x11\xfc\xc7\xfd\xd3\x05\x16\xfd)\xffu\x00\xcf\x00\xe4\x03\xd4\xfc\xce\xfex\x02&\x00h\xfe1\xff\xc5\x01c\xff^\xfek\x032\xff\xcf\xfa\xa4\x06;\x01\xbc\xf6\x85\x04\x08\x02\xab\xfc%\xfe\x07\x04\xec\xff\xe4\xfa\xf5\x03s\x00\x1a\xfd\x87\x00\xcd\x01\x94\xfcF\xff\x1d\x02\x12\x011\xfe\x89\x02%\x00\x9f\x00\xc4\xfc\xe5\xfen\x06\xce\xfc)\xfe\xac\x07\xa2\xfe6\xf9\x02\x07q\x00u\xfa%\x01\xa9\x05\xee\xf9\x98\x00\x88\tp\xf7\xde\xfb:\x08\xeb\xfe&\xf7\x9e\x06^\x05d\xf9\x11\xfe\xcb\x07\xa4\xff\xe3\xf7q\x06I\x00v\xf9\xa2\x02j\x04w\xfc\xcc\xfek\x04\x1e\xfe:\xfb\xa5\x04>\x01\x83\xf9B\x02P\x02c\x01_\xfcA\x03\x8f\x02 \xf9\xf7\xff|\x03/\xfd\xca\xfd(\x07G\xfb\xba\xff\x97\x04e\x00d\xf9\x85\xfe\x86\x06^\xfb\xef\xff\xd1\x05\xd1\x00\xb1\xfa\x81\xffy\x04\x06\xfa\x83\xfd\x97\x04\xae\xfe\x99\x01\xd2\x02;\xfb\x13\x02T\x00\xf8\xf9L\x04\xdb\xfe$\xfc\xbc\x05\xdd\x03\xa7\xf6\xf4\x05\xc7\x05d\xf3\xfa\x01\x0e\x06\xf9\xf9\xa1\xfe\x98\x07:\x02\xb0\xfb_\x04\xb7\xfek\xf86\x032\x03\xac\xfei\xfeH\x05a\xfd\x10\xfeB\x05}\xf8\xe6\x01X\x025\xf7\xd3\x06v\x02\x9c\xfbc\xff\xab\x05/\xf9\xaf\xfa~\x08s\x03\xa5\xf5\xcc\x01\xc9\n\xbe\xf4\xf6\xfdM\t.\xfc\x1d\xf8\xb2\x07\xda\x01\xee\xf7L\x03\xc2\x04\xd0\xfcl\xfa\xea\x04\xff\x04q\xf5\xf1\x05\xd8\x03\x04\xf8\xba\x01\x97\x08s\xf8\xec\xfa\x03\x11\xa1\xf5\xa9\xf8Y\rc\x00\x81\xf1\xf0\x08\xd1\x07\xa2\xf6\xd4\xff`\x06?\x02>\xf5Q\x03~\x04\xd0\xfc\x8d\xfd%\x03\xfd\x05j\xf8\xcb\x02\xb2\x02\xbd\xf8\xd6\x01\x1d\x02\x15\x01@\xfc\xe5\x03~\x01D\xfa\xe2\x02}\x04%\xf6n\xff_\n\xb3\xf7\x86\xfc\xd3\x0b(\xfb\xbe\xf7r\n\x84\x01y\xf3+\x05\xf7\x07v\xf1\xb4\x01\xf8\x0fH\xf6\xa5\xf7\x0c\x0e\xca\xfe/\xf46\x06R\x01;\xfb\x8d\xff\xc2\x05\xb2\x03\xe8\xf9D\xff\x91\x06u\xf9b\xf9c\x0c\x97\xfc\xdf\xf5P\n\xce\x06s\xf6\x17\xfd\xe6\n\xe9\xfc\xf5\xf6\x98\x05\x96\x05\x90\xf6\xbc\x00\xb7\x0eU\xf5O\xfd\xbf\x0b\xf2\xf8\x0e\xfd\xd3\x034\x00B\xffM\xffT\x05\xa7\xfe\r\xfb\x8b\x05>\x03\x01\xf7|\xfe\x99\t+\xfb\x81\xfbc\t\xe5\xfd\x97\xf9S\x04x\x03Q\xfa\xd3\xfc\x12\t-\xfd\x9d\xf8\xd4\x03\xff\x07\xa3\xf9`\xfb\xf6\x07-\x01\xda\xf4\x9d\x05\x15\x06q\xf7\\\x02\xea\x05)\xfc(\xf7\x1a\x07\xc7\x05\x1c\xf6\xdd\xff\xf8\n\xb8\xf4\'\xfen\x08\x90\xfc\xb1\xf8{\x01\xf8\t\xca\xf7\x7f\x00r\x07E\xfb\xea\xf8\xc7\x04\xe0\x06!\xf9\x05\x03\x17\x06:\xfb\xb0\xfa\xc8\x06\xe9\x01\xcc\xf8\xd7\x02\xe6\x05\xd5\xfa\x13\x00\xe0\x05{\xf9J\xff\x9a\x04\x0e\xff\xf2\xf8\xf1\x06\xd4\x04\xdc\xf7\x87\x01\xce\x04\x9b\xfa^\xfdS\x05 \xfc\xe8\xfcd\x04\xe2\x04\x8b\xfb\x16\xfb\x84\x06\'\xfc\x97\xfc\xf4\x05\xc8\xfe\xb0\xfc\xc5\x00\xdd\x05z\xfb\xd7\xfcj\x03\xf7\x01\xbf\xfaR\x00{\x04\xa4\xfd\xf9\xfe7\x03\x96\x01\xc1\xf8\xee\x02\xb0\xfe\x83\x01Y\x01\xba\xfa\x0e\x05\x86\xff\xb8\xfa\xee\x06\xdb\xff\xf9\xf2\x80\n\xed\x05G\xf2\xc3\x02+\x0b\xfa\xfa\xe1\xf5\x0b\t\xb9\x038\xf6\x01\x01:\x08g\xfc\xa8\xf7\xac\t\x81\x03l\xf5\xa8\x02\xe5\x08W\xf6\xbe\xfbd\x0bg\xff*\xf6\xd5\x04y\x06\x02\xfbe\xfe\xea\x03\x19\x02G\xf6\xa0\xffK\r$\xf8\x12\xf9\xf3\x0e\x98\xfe\x19\xef\x14\n\xc8\x08w\xf2X\xffs\x08h\x00\x9f\xf8;\x01\xae\x08\xe5\xfb\x93\xf4\xf3\n\xef\x05\x97\xedH\n\x9d\x08\x97\xf4!\xfe\xcd\x071\xff=\xfa\xf7\x04\x80\x00B\xfb\xda\xffi\x05W\xffA\xfc|\x03\xf4\x01?\xfa\x03\x00\x01\x06W\xfd\xb7\xfb\xf2\x05\x9c\x01n\xf9\xee\x02(\x06^\xf5\xb6\xff\n\n}\xfa\xb1\xfc\t\x04A\x04-\xfb{\xfb\x16\x08t\xfc\xab\xf9M\t\xd0\xfd\xd1\xfb\x95\x02:\x07_\xfb\x01\xf9\xb4\x08\xfb\xfa\x1a\xff5\x01\xc3\x00\xde\x02U\xff\xa0\xfd\xf6\x02X\xfe\xfd\xf9\xe2\x05\xb7\x022\xf8\xd3\x03\x9e\x03)\xfb\x9d\x03>\x02\x12\xfb/\xfdI\x04\xd4\xff\x94\xfc\xa9\x01-\x06\xe0\xf9K\xfcF\x0b\xc7\xfd\xff\xf4e\x04\xba\x05\xb5\xf8\x9e\xff\x0c\tE\xfd\xa8\xfb\x86\x03r\x01-\xfa\xd1\xff\xc1\x04a\xff\x14\xfd0\x04\xeb\x03$\xf8\x1b\x04E\xfc*\x00Q\x04r\xfc\xd1\x00X\x02\xcf\x00\x9e\xfc\x03\x01k\x00d\xff~\xff\xe2\xff\xeb\x03c\xff\xe0\xfb-\x04\x9a\xff\xe1\xfc\xa0\xff\xe3\x05&\xfdA\xfb$\x08\x12\xff\'\xfa\x8a\x00x\x07\x9d\xf8\x92\xfe\xd0\x07\x9d\xfb\x8c\xff\xcc\x03\x91\xfd\xb5\xf9\xf9\x06\x08\xfd\xa5\xfa}\x059\x03\x95\xfb\x15\xfe>\x03\x90\x00\xc7\xfct\xfe\xaa\x06\xf3\xfc_\xfe\x94\x04\x7f\x00\xd5\xf9;\x04\xda\x03:\xf9\x95\xfe[\x08\xf1\xfb\x05\xfb\x05\x03\xcb\x03\xad\xfd\xce\xf93\x04\x0e\x02/\xfe\'\xfdx\x06F\xfb$\x00\xb1\x01!\xf8\x95\x04V\x06\x95\xfb\xb4\xfa\xca\x05\x94\x00\x97\xfb\xf0\x00\x84\x04\'\xfd\xdc\xfb\xe9\x05[\x03\xf2\xf8\xd6\xfe\x9a\x07G\xfdJ\xfb\xb5\x04e\xfev\x01\x80\x00\xc9\xfc\xdb\x015\x04\xbd\xfc%\xf9\xbb\x06\xda\x00\x85\xff\x17\xfc\xa0\x03g\xfd\xe6\x01\xea\x04\x15\xf5\xb3\x02\xa9\x03}\x00\x7f\xfaR\x03\xe4\x05\x9c\xf8\x14\xfe\xed\x07z\xfa\x07\xfb\xd5\x07\xd5\x00\x08\xfc\x07\xfc/\x05\xc8\x04\xf6\xf9R\xfa\xb6\x08\x90\xfe6\xf9\x01\x02\xeb\x08\x1c\xfc\xb3\xf3\xdc\x0bi\x05?\xf5~\xff~\tn\xfai\xf7\xb4\nJ\x02\xa4\xf9\x97\x03\x02\x010\xfc\xd8\xfdJ\x03y\x04i\xf9\xe8\xfcV\x0c,\xfb\xab\xf6\r\x0c\xe3\x01M\xf2\'\x05D\x06\x8f\xfap\x00,\x02\x03\x04u\xf8\xc0\x00\xbd\x03\xf1\xfc\x1a\xfef\x01\xd7\x04\xa6\xfa\xc5\x02\xc6\x001\xfd\xd4\xfd\xec\x03M\xfe\xee\xfcx\x04\xb7\x03q\xf8\xb8\xfer\x0bi\xf5\xc8\xfd\x0b\x07[\x01\xb7\xf7V\xff\xe3\x0c\xfb\xf9\xec\xfa~\x02\x19\x03\x94\xfa\xab\xfei\x08-\xfbj\xfc\xb4\x02\x04\x06\xcd\xfb\xaa\xf9k\x08\x88\xfe\x87\xf6W\x05p\x08\x8a\xf8`\xfd\xd9\x07J\xfd\xa1\xfc\x1f\x01\x98\x00\xf9\xfe\xcc\xfe\xde\x00s\x04\xff\xfd\xc5\xfeg\x03#\xfb\xf5\xfe\xe8\x03\xb5\xfc9\x01#\x01\xf7\xff\x99\x03\x94\xfa\xf4\xff]\x05\x12\x00\xbd\xf6a\x03\xe6\x07\xc8\xf9\x87\x00\xfc\x01\xfb\x02\xa6\xfc\xa9\xfb\x9c\x07\xc3\xfd\x94\xf9\xb0\x05\x7f\x05\x15\xf9q\xfc\x8e\t\xaa\xfd\xd2\xf7\x9d\x06\xa0\x02\x17\xf7\xb8\x01~\x08\xf1\xfb8\xfa|\x03R\x05\xa2\xfaQ\xfc9\x06\x8e\x00\x15\xfa\x87\x01{\x02<\x00\x19\xff~\x01\x0c\xfdJ\x03\xf4\xfc\x9b\xff\xb3\x04H\xfc\xa3\x03\x8b\x00$\xfe\xca\xfbm\x01!\x06\xda\xfb\xc3\xfb\x89\x05\x00\x00\xb0\xfb\xd4\xff\xf2\x03d\xfb\xd3\xfb\xdb\x08\x84\xffd\xfa\xcd\x03K\x054\xf7F\xfd\x97\tx\x01e\xfa\xd1\xff\x12\x06l\xfd\xc4\xfc\xaf\x02\x87\x01Y\xfc\xad\x00t\x02\xe3\xff\x88\xffv\xfeL\x01\xf5\xfe\xe9\xfe\xe3\xffG\x03\xa4\xfc\x05\x00I\x03u\xfe\xb2\xfd\x91\xffr\x02\xc1\xfcB\xfd\xde\x04H\x03\xc8\xfaL\x00\xf2\x03-\xf9\xd6\x01\xe6\x06\xbd\xf9Z\xfd\x08\x05\xe1\x02\x9d\xf8\xfd\x02\x03\x05\xf3\xf9;\xfc\xca\x08\x1d\x00\xb5\xf6_\x06\x85\x06O\xf8\x00\xfb\xfe\t\xee\xfd\x9b\xf9\xe4\x03\xc6\x04\xc0\xf9[\xffC\x06\xa6\xfd\xd8\xfa\x81\x02F\x04\xed\xfa\xec\x00\xd9\x02!\xfe\x02\x00\xc7\xfeF\x00^\x017\xfdK\x02\x98\x00p\xfc9\x00\x14\x04\xa4\xff\xf7\xfbX\xff\xb5\x04\x0b\x00\r\xf8a\x05\xb2\x04\xea\xf8n\xfd\xb0\x07\xc3\x00\xd3\xf8i\x02\x9c\x04"\xfb\xcd\xfc\x98\x06q\x00*\xf9w\x03\xa9\x05\x9e\xf8z\xfe\x85\x05J\x00\xa5\xfa\xa0\x00\xd4\x03\xf0\xfd\x93\xfe\xf8\x01l\x00\x8f\xfcg\x00~\x03\x8a\xfdC\xfd\x08\x05\xdc\xfen\xfb\x17\x02\x8b\x05P\xfb3\xfc^\x07{\xfe\xb0\xfa\x90\x02\xb5\x04\x01\xfdX\xfcs\x04\xa7\x01\n\xfcL\x00`\x01\x06\x01\xc6\xfdD\xff\xd6\x04\x02\xfd\xd0\xfeO\x01\x80\x00\r\xff\x9e\xff\x9e\x00\xe8\xff\xd5\x00\xfb\xfd\t\x020\x00\x8c\xfd\xc0\xfe\x0f\x04\xbd\xfe\x19\xfc\xfa\x02\xfe\x03\x02\xfd\xf3\xfa\x9a\x06\xdf\xff\x1e\xfa\xb7\x00.\x05>\xfeb\xfe8\x01K\x00\xa9\xff\x8a\xfeX\x00\xbb\x00}\xffB\x00:\xff\xad\xff\xfe\x02D\x00#\xfcM\x00Q\x03\x15\xfcX\x00\xbb\x03N\xfd\x84\xfd}\x03\xfd\x03H\xfbe\xfc&\x05\xa5\xff\xeb\xfb\x95\x01v\x03\xc5\xfe\x8c\xff\x02\x01\xaf\xfe\x03\x00\x0e\x01\xa5\xff\x98\xfd\xbd\x01\xd0\x03/\xfe\x1f\xfe\xb2\x02\x08\xfe\xce\xffS\x00\x96\x00c\xffY\x00\x80\x01\x16\xff\xe6\xffg\x00\xb2\x00\x17\xfeh\x00\xfb\x01:\xff\xaa\xfd\x0e\x03)\x01\xea\xfc\xec\xff\xfe\x03\xe3\xfc\x82\xfe\x92\x05\x84\xfd\xce\xfb\x0e\x03l\x05u\xfa\x85\xfd\xe2\x05\x8f\xff\xf8\xfb\x0f\x01\xb0\x02\x02\xfc\xe1\xfe\x08\x02\x8f\xff~\xfeI\x01!\x01\x9e\xfd\'\xfe\r\x03\xa9\x01\xaf\xfc\x82\xff\x10\x04\xa1\x00\xf4\xfc\xf4\x01A\x01\xd3\xfeY\xff \x01\x1b\x00\x9c\xfe\xc1\xff\x00\x01\xd0\xfe\xa8\xfe"\x03\xfd\xfdP\xfd`\x01\xa3\x02\xbe\xfe3\xfd\xda\x011\x01\xda\xfc\xdc\xfe\xaf\x02\xbd\x01\x0c\xfe\xb3\xfdQ\x01n\x03.\xfdc\xfe\xb4\x02\x7f\x00\xa6\xfe\xda\x00\x8f\x02~\xfd\x16\xfe#\x02\x98\x02`\xfdh\xfe\x8c\x02r\x01\x11\xfc\x18\x01%\x03\x80\xfd\xa6\xfeN\x00\xef\x01e\xfe.\x01\x18\x00\xfc\xfde\x00\x8c\x01\xc6\xff\x9c\xfe\xdb\x00\xd1\xff\x89\xff\x01\x01\x0b\x01\x81\xff\xfc\xff\x93\xfe1\x00\'\x01\x03\xffA\x01\x1d\x00\xc2\xfe\x03\xff\x03\x01\x9e\x01-\xfe}\xfe\x90\x00\xe9\xfe\x8e\x00\xee\x01\x8e\xff\xd3\xfd\x89\xffg\x01\xec\xffE\x00\xea\xff(\xff,\xff\xa4\x006\x01\xa9\xff\xb3\xff\xcc\x01\xc8\xfd7\xfe\xed\x02+\x00\xc0\xfe\x7f\x00\xf3\xff@\xff\xc5\x01\xd5\xff\xe0\xff\xba\xff)\xff2\x00>\x00\\\x01c\xff\xfb\xffA\x00Y\xfe\x9e\x00\x8f\x01\xba\xfe\xa7\xfe\xe1\x00\xf7\x00\x9d\xffX\xff\xe7\x00\x8a\xff\xd6\xfe\x12\x00\xaf\x00f\x00g\x00f\xff,\xfe\xdf\x00\x89\x01:\xff\x9f\xfe\xde\xffF\x01A\xff\x02\x00\xdb\x00\x94\xff\xad\xfem\xff\x14\x01\xc8\xff\xc0\xff\xd3\x00\x81\xff\xbd\xfe`\x00-\x01s\xff\x8d\xfe\xe5\xff\x91\x00/\x00\x16\x00C\x00-\x00\xe2\xfen\xff\xb7\x00\xb8\xff\xb4\xff\x82\x00\x98\xff\x08\x00U\x00\xcb\xff\x11\x00I\x00V\xff\xfc\xfe\xc1\xffe\x01{\x00\x95\xff\xe7\xff\xdb\xff1\x00\x0e\x00\xd1\x00\xc3\xffb\xff\xde\x00r\x00B\xff\x80\x00\xd1\x01T\xff\xa0\xfe\xae\x00\xf9\x00p\xff\x03\x00\xba\x00\xd4\xfe\xef\xffz\x01T\xff\xb8\xfe\xfb\x00"\x00\xf1\xfeA\x00\x12\x00\xde\xff+\x00Y\x00i\xff*\xff\x0b\x01\x95\x00\xab\xfe\x91\xff\x0c\x01W\x00\x0e\xfe\x8d\x00T\x021\xff\xfb\xfe\x9b\x00S\x00l\xff\xfe\xff\xd4\x00s\x00\xfc\xff\x01\x00\xbe\xff\xec\xff\x87\xff\xd9\xff<\x00i\xff\x00\x00\x7f\xff\x93\x00\xf2\xff\xad\xfe}\x00Y\x00\xd2\xfe\x96\x00E\x01d\xff\xe1\xff\x0e\x01\x1d\x00\x92\xff\x85\x00\x86\x00\xcd\xff\x98\xff\\\x00\xbb\x00Y\xff\x87\xffB\x01J\x00\x9a\xfe\xbd\xff\x19\x01\n\x00\xc0\xfe\xa5\xff\x96\x01?\x00\xa6\xfeU\x00\x95\x00\x06\xff\xca\xff\xad\x00\x91\xff&\x00a\x00\x00\x00 \x00y\x00\x83\x00\x96\xffb\xff\x89\x00^\x00}\x00W\x00\xe8\xfe\x11\x00\xaf\x00\xb0\xff\xb5\xff%\x00\x01\x00\xcb\xffp\x00#\x00L\xff\x10\x00\x9b\x00\xb9\xffv\xff\xf3\xff\xc1\x00\x07\x00\xeb\xff1\x00\xa1\xff\xd7\xff\xd3\x00\xb6\xffL\xff\xa7\x00\xd1\x00\xdd\xff\xb2\xfe\xd4\x00\xd4\x00x\xfe/\xff\xb5\x00\xd1\x00(\xffQ\xff\xc1\x00\x00\x00n\xfe2\x00\xff\x00~\xff\x84\xfe\xb8\x00\x82\x01\x19\xff\xf0\xfe[\x00\xdd\x00\x86\xff(\xff\xfc\x00\x95\x00\xe6\xfe\n\x00\xfe\x00\x14\x00e\xfe\xfd\xff\xcc\x01\xa4\xff}\xfe\x91\x00A\x01\x80\xff\xba\xfej\x00\xe3\x00\xf3\xfe\xd7\xff_\x00\xc3\xff\xfb\xff\x00\x00\xe7\xff\xc3\xff\xae\xff\xff\xff\x08\x00\xf6\xff\x1f\x00\x89\xff\xf2\xff\x9e\x00\x19\x00A\xff\x84\xff\xbc\x00_\x00\xac\xff\x8d\xffM\x00m\x00\xf8\xff\xab\xff\xca\xffX\x00\xc7\xff\xbe\xff\x99\x00\xce\xff\\\xff\x84\x00_\x00<\xff\xa4\xffX\x00\xb6\xffS\xffA\x00;\x00`\xff\xbf\xffq\x00\xec\xffZ\xff\r\x00o\x00\x86\xff\xb4\xff{\x00a\x00\xc7\xff\xff\xff`\x00\xaa\xff\x08\x00\x8a\x00q\xff\x8c\xff\xa5\x00M\x00P\xff\xb3\xff\x95\x00\xed\xff2\xff\x02\x00\xe1\xff\x91\xff\xbb\xff\x99\x00\xf6\xffA\xffj\x00p\x00^\xff\xa4\xff\xdc\x00s\x00k\xff\xce\xff\xc3\x00o\x00\x7f\xff\r\x00\xaa\x003\x00\xb3\xff8\x00h\x00\xfe\xff\xd8\xff\xf7\xff\xf1\xff\xd4\xff\x13\x00-\x00\xf9\xff|\xff\xd5\xffg\x00\xf1\xff\x9b\xff\xbe\xff2\x00\xfb\xff\xd6\xffT\x00B\x00\xb2\xff\x87\xff\xf1\xffa\x00.\x00\xf5\xff\x02\x00\xfe\xff\x14\x00\x1d\x00\x19\x00\x14\x00\xff\xff\xd5\xff\xc1\xff=\x00G\x00\x16\x00\xc5\xff\x9f\xff\xe1\xff\'\x00\x0f\x00\xc2\xff\xba\xff\xdd\xff\x06\x00\x19\x00\t\x00\xd2\xff\x0b\x00\xec\xff\x01\x00-\x00(\x00N\x00\x0b\x00\x0e\x00\x0c\x00^\x00V\x00\x00\x00\xe0\xff\x16\x00N\x006\x00\x02\x00\xff\xff\x13\x00\xf9\xff\xda\xff#\x00;\x00\xfe\xff\xc2\xff\x06\x00 \x00\t\x00*\x00\xe8\xff\x00\x00=\x00\xf4\xff\xdc\xff>\x00M\x00\xf5\xff\r\x00h\x00R\x00\xcd\xff\'\x00\\\x00\xd5\xff\xed\xffB\x00&\x00\xd6\xff\xfe\xff+\x00\xe0\xff\xb2\xff\x10\x00(\x00\xc7\xff\xeb\xff\x0b\x00\xfc\xff\xef\xff\t\x00\x11\x00\xdd\xff\xd3\xff\x16\x000\x00\xcf\xff\xeb\xff5\x00\xea\xff\xf6\xff\x08\x00\xde\xff\xf4\xff\x17\x00\r\x00\xe0\xff\x06\x00!\x00\xe5\xff\xd8\xff\xff\xff\xd1\xff\xbf\xff\xf1\xff\xff\xff\xee\xff\xe8\xff\n\x00\xe1\xff\xf1\xff\xf7\xff\xf6\xff\xda\xff\x12\x00 \x00\xee\xff\xf6\xff:\x00!\x00\xb7\xff\x12\x00\x0f\x00\xf6\xff\x16\x00\r\x00\x1a\x00\x08\x00\x01\x00\x06\x00\x0e\x00\x17\x00\xdd\xff\xdf\xff\'\x00\x05\x00\xf2\xff\xf2\xff\xf9\xff\xea\xff\xe6\xff\x0e\x00\xe2\xff\xcb\xff\x00\x00\x1e\x00\xdc\xff\xdd\xff\x0b\x00\xfa\xff\xee\xff\xdd\xff\xff\xff\xeb\xff\xe9\xff\x05\x00\xdc\xff\xef\xff\x02\x00\xe5\xff\xdf\xff\xf9\xff\x13\x00\xe7\xff\xe1\xff\xd8\xff\t\x00\x11\x00\xcb\xff\xe7\xff\x04\x00\xde\xff\xc4\xff\xf6\xff\x04\x00\xe0\xff\xdf\xff\x1e\x00\xf2\xff\xcb\xff\x1d\x00A\x00\xba\xff\xbe\xff*\x00\x04\x00\xee\xff\xff\xff\x01\x00\xf5\xff\xf1\xff\xfc\xff\x0e\x00\x1b\x00\xf0\xff\xcf\xff\t\x00(\x00\x1a\x00\xf8\xff\xf8\xff\xfa\xff\x05\x00\x0e\x00\xf3\xff\xe0\xff\x00\x00\xf5\xff\xd8\xff\x00\x00\x16\x00\x03\x00\xe7\xff\xd8\xff\xf6\xff\x03\x00\x08\x00\x0f\x00\x00\x00\xf7\xff\x1e\x00\t\x00\x1b\x00\r\x00\xe9\xff\xf7\xff\x0c\x00\x13\x00\x05\x00\x17\x00\xdb\xff\xc5\xff\xe5\xff\x11\x00\xfc\xff\xbb\xff\xeb\xff\t\x00\xe3\xff\xc2\xff\x1a\x00\x1f\x00\xb3\xff\xb9\xff\x04\x00\x16\x00\xdd\xff\xe9\xff\xef\xff\xe1\xff\xf3\xff\xf8\xff\xf6\xff\xfc\xff\xf6\xff\xf0\xff\xf3\xff\x00\x00 \x00\x11\x00\xf5\xff\xf8\xff\x11\x00\r\x00\x16\x00\x0f\x00\x14\x00\xec\xff\x1e\x00)\x00\xf3\xff\x05\x00\xfd\xff\x00\x00\xed\xff\xfb\xff\x13\x00\t\x00\x03\x00\x08\x00 \x00 \x00\xf2\xff\x03\x00$\x00\xf4\xff\x07\x00!\x00\t\x00\xe6\xff!\x006\x00\xec\xff\xf9\xff#\x00\x17\x00\x01\x00\x12\x003\x00"\x00\xf7\xff\x1a\x00<\x00\'\x00\n\x00\n\x00+\x00+\x00\x14\x00\x18\x00=\x00\x12\x00\xf3\xff-\x004\x00\xfc\xff\x10\x00\x1f\x00\xeb\xff\xf4\xff\x14\x00\x16\x00\xe9\xff\xe7\xff\x00\x00\xe5\xff\xf3\xff\x08\x00\xf0\xff\xdb\xff\x01\x00\x19\x00\xf2\xff\xef\xff\x0f\x00\x08\x00\xed\xff\x12\x00\x0e\x00\xf2\xff\x18\x00"\x00\xf4\xff\xef\xff:\x00\x11\x00\xf5\xff\x0f\x00\x08\x00\x15\x00\x04\x00\x07\x00\x02\x00\x07\x00\x11\x00\xff\xff\xf7\xff\xff\xff\x05\x00\xf9\xff\xe2\xff\xfa\xff+\x00\x1a\x00\xf3\xff\xf3\xff/\x00\x1a\x00\xde\xff\xed\xff0\x00\x10\x00\xf5\xff\xf5\xff\x15\x00\x00\x00\xc5\xff\n\x00\xf6\xff\xe4\xff\xf7\xff\xeb\xff\xed\xff\xf3\xff\xf8\xff\xe4\xff\xec\xff\xf8\xff\xe5\xff\xda\xff\x05\x00\x00\x00\xf3\xff\xea\xff\xf7\xff\xfc\xff\xec\xff\x01\x00\xf6\xff\xe6\xff\xe9\xff\xfe\xff\x10\x00\xfa\xff\xef\xff\xf8\xff\x01\x00\xf0\xff\xe1\xff\xff\xff\xf9\xff\xe6\xff\xf3\xff\xf3\xff\xe2\xff\xe3\xff\xf6\xff\xf5\xff\xe4\xff\xea\xff\xf6\xff\xf0\xff\x00\x00\xf8\xff\xf7\xff\x02\x00\xff\xff\xf9\xff\xff\xff\x10\x00\x00\x00\xee\xff\xea\xff\x14\x00\x0e\x00\xe4\xff\r\x00\x13\x00\xd1\xff\xd9\xff\x15\x00\xf6\xff\xd3\xff\xf0\xff\x01\x00\xee\xff\xea\xff\xf6\xff\xf5\xff\xf1\xff\xde\xff\xe7\xff\n\x00\x00\x00\x02\x00\xf0\xff\xf0\xff\x05\x00\x12\x00\xfe\xff\xe4\xff\xfa\xff\x11\x00\xf8\xff\xe9\xff\xfc\xff\r\x00\xe9\xff\xe4\xff\xec\xff\xea\xff\xe0\xff\xf4\xff\x02\x00\xd1\xff\xcf\xff\x00\x00\xff\xff\xd1\xff\xdf\xff\xf6\xff\xe5\xff\xdd\xff\xf1\xff\x0b\x00\x06\x00\xe1\xff\xe3\xff\xff\xff\x18\x00\x08\x00\xf4\xff\r\x00\x1e\x00\n\x00\xfa\xff\x19\x00\x18\x00\x00\x00\xf9\xff\x0c\x00\x19\x00\x08\x00\x01\x00\x01\x00\xfc\xff\xf5\xff\xfd\xff\t\x00\xfa\xff\xeb\xff\xeb\xff\xf5\xff\xf3\xff\xf0\xff\xf0\xff\xe4\xff\xdc\xff\xf2\xff\xf3\xff\xee\xff\x06\x00\x00\x00\xe1\xff\xf5\xff\x12\x00\xf7\xff\xed\xff\x01\x00\x0b\x00\xef\xff\xf7\xff\x17\x00\x05\x00\xf8\xff\xf9\xff\x10\x00\x0c\x00\xfd\xff\xff\xff\x0f\x00\x0e\x00\xf8\xff\x06\x00\x0e\x00\xff\xff\x0f\x00\x15\x00\x0b\x00\r\x00\x15\x00\n\x00\t\x00\x16\x00\x16\x00\x0e\x00\x06\x00\x13\x00\x18\x00\x11\x00\x0f\x00\x0f\x00\x12\x00\x16\x00\x16\x00\x1a\x00\x1b\x00\r\x00\x1d\x00"\x00\x17\x00\x15\x00$\x00\x17\x00\x05\x00\x1b\x00\x19\x00\x13\x00\x17\x00\x12\x00\x10\x00\x10\x00\x1b\x00\x1d\x00\x02\x00\xfc\xff\x1c\x00\x18\x00\x04\x00\x0c\x00\x0e\x00\xfe\xff\n\x00&\x00\xff\xff\xf3\xff\x1a\x00\x0c\x00\xe5\xff\x07\x00\x16\x00\xe5\xff\xf5\xff\r\x00\xee\xff\xf1\xff\x10\x00\xf9\xff\xf2\xff\t\x00\x11\x00\xf5\xff\xfa\xff\x0b\x00\xf7\xff\xf8\xff\xfc\xff\x03\x00\x05\x00\x03\x00\xfc\xff\x00\x00\x13\x00\x01\x00\xf4\xff\xf7\xff\x03\x00\xfc\xff\xff\xff\xfc\xff\xff\xff\xf0\xff\xff\xff\x11\x00\xf4\xff\xee\xff\x05\x00\xfe\xff\xee\xff\x04\x00\x01\x00\xf0\xff\xf6\xff\x07\x00\xf8\xff\xed\xff\x00\x00\xf7\xff\xed\xff\xfe\xff\xff\xff\xf2\xff\xf9\xff\t\x00\x06\x00\xed\xff\xfd\xff\x06\x00\xf5\xff\xf8\xff\x00\x00\xf7\xff\xf8\xff\xf3\xff\xf3\xff\x00\x00\xf6\xff\xe9\xff\xfe\xff\xfb\xff\xe4\xff\xe9\xff\x00\x00\xf7\xff\xe8\xff\xf0\xff\x04\x00\xf8\xff\xe9\xff\xfd\xff\xf7\xff\xf3\xff\xf2\xff\x04\x00\xfa\xff\xf9\xff\xff\xff\xfb\xff\xf6\xff\x03\x00\x02\x00\xf1\xff\xf9\xff\xfc\xff\xed\xff\xe8\xff\xf4\xff\xe1\xff\xee\xff\xef\xff\xe3\xff\xe7\xff\xef\xff\xe1\xff\xe3\xff\xf5\xff\xea\xff\xe4\xff\xf1\xff\xf3\xff\xe8\xff\xe9\xff\xf2\xff\xee\xff\xf0\xff\xf9\xff\xee\xff\xec\xff\xfe\xff\xf0\xff\xee\xff\x01\x00\xfc\xff\xe8\xff\xf4\xff\x05\x00\xf0\xff\xe6\xff\x04\x00\x05\x00\xf6\xff\xf5\xff\n\x00\xfc\xff\xf1\xff\x04\x00\xfd\xff\xeb\xff\xf9\xff\x02\x00\xf3\xff\xf9\xff\x00\x00\xeb\xff\xed\xff\t\x00\xfc\xff\xe4\xff\xff\xff\x10\x00\xea\xff\xeb\xff\x11\x00\xfc\xff\xe2\xff\x02\x00\x12\x00\xf3\xff\xf7\xff\x0b\x00\x00\x00\xf7\xff\x07\x00\x01\x00\xfe\xff\x05\x00\x04\x00\x00\x00\xfe\xff\x04\x00\x07\x00\xfb\xff\x02\x00\x08\x00\x00\x00\x01\x00\x01\x00\x04\x00\xfd\xff\xfc\xff\x04\x00\xff\xff\xfb\xff\x04\x00\x04\x00\xfc\xff\xfc\xff\x07\x00\xf9\xff\xfc\xff\x11\x00\x01\x00\x00\x00\r\x00\x0c\x00\xfc\xff\xfa\xff\x07\x00\x06\x00\xfd\xff\x00\x00\x04\x00\x04\x00\xf7\xff\x02\x00\x05\x00\xf5\xff\xf7\xff\x07\x00\x04\x00\xf6\xff\x00\x00\x10\x00\x0c\x00\x00\x00\n\x00\x11\x00\x00\x00\xff\xff\x1a\x00\x0e\x00\x04\x00\x1a\x00\x0c\x00\x08\x00\x12\x00\x10\x00\t\x00\x0b\x00\x0e\x00\x0f\x00\n\x00\x0c\x00\x0f\x00\x0c\x00\x0c\x00\x13\x00\n\x00\t\x00\x17\x00\x12\x00\x0c\x00\x13\x00\x15\x00\x08\x00\r\x00\x17\x00\x05\x00\x04\x00\x14\x00\x07\x00\xff\xff\x10\x00\x11\x00\x02\x00\r\x00\x1b\x00\x01\x00\xfd\xff\x17\x00\x13\x00\xfe\xff\x07\x00\x11\x00\x04\x00\x01\x00\x0b\x00\x06\x00\xfa\xff\x04\x00\x05\x00\xfe\xff\xf8\xff\x05\x00\x0b\x00\x02\x00\xf8\xff\xfc\xff\x13\x00\x02\x00\xf7\xff\x02\x00\x03\x00\xf7\xff\xff\xff\x00\x00\xf6\xff\xf7\xff\x02\x00\xff\xff\xf6\xff\xfb\xff\xfe\xff\xf7\xff\xf4\xff\x00\x00\xfb\xff\xf2\xff\x00\x00\x03\x00\xfc\xff\xff\xff\x03\x00\xfc\xff\xf9\xff\x04\x00\x00\x00\xfc\xff\x01\x00\x06\x00\x05\x00\xfc\xff\x00\x00\x05\x00\xfd\xff\xff\xff\x08\x00\xfd\xff\xf9\xff\x00\x00\x03\x00\xf7\xff\xf0\xff\xfb\xff\xfd\xff\xf4\xff\xf0\xff\xf2\xff\xf4\xff\xf1\xff\xf0\xff\xf7\xff\xf2\xff\xf1\xff\xfb\xff\xfd\xff\xf4\xff\xf9\xff\x00\x00\x01\x00\xf7\xff\xfc\xff\x07\x00\xf7\xff\xf0\xff\x05\x00\x03\x00\xef\xff\xfc\xff\xff\xff\xef\xff\xf2\xff\xfa\xff\xf1\xff\xf0\xff\xf6\xff\xfb\xff\xf5\xff\xf2\xff\xf7\xff\xf6\xff\xf3\xff\xf6\xff\xf6\xff\xf7\xff\xf9\xff\xf5\xff\xed\xff\xf3\xff\xf3\xff\xf6\xff\xf4\xff\xf4\xff\xf5\xff\xf2\xff\xf3\xff\xf9\xff\xf7\xff\xef\xff\xf6\xff\xfa\xff\xee\xff\xef\xff\xfb\xff\xf9\xff\xf2\xff\xfa\xff\xfd\xff\xf5\xff\xf3\xff\xf7\xff\xf8\xff\xef\xff\xf8\xff\x00\x00\xf9\xff\xf2\xff\xf7\xff\xfd\xff\xf5\xff\xf8\xff\xfc\xff\xfa\xff\xf4\xff\xfd\xff\x01\x00\xf1\xff\xf5\xff\x01\x00\xfb\xff\xf2\xff\xfd\xff\xff\xff\xf3\xff\xee\xff\xfe\xff\xfd\xff\xf1\xff\xfd\xff\xff\xff\xf6\xff\xf8\xff\x00\x00\xf5\xff\xf4\xff\x00\x00\x04\x00\xf9\xff\xfb\xff\x07\x00\x00\x00\xf9\xff\x02\x00\n\x00\xfc\xff\xff\xff\x0b\x00\t\x00\xfc\xff\x07\x00\x0f\x00\xfc\xff\xf9\xff\x08\x00\t\x00\xfe\xff\xff\xff\x02\x00\x01\x00\xfd\xff\x07\x00\x02\x00\xfb\xff\x05\x00\n\x00\x01\x00\xfd\xff\x03\x00\x0b\x00\t\x00\x04\x00\n\x00\n\x00\x03\x00\x02\x00\x03\x00\x02\x00\x02\x00\x03\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfd\xff\xfb\xff\xff\xff\xff\xff\xfc\xff\xfa\xff\xff\xff\x06\x00\x03\x00\x00\x00\x00\x00\xfc\xff\xfa\xff\xfb\xff\xfd\xff\xf5\xff\xf8\xff\x01\x00\xff\xff\xff\xff\x00\x00\x03\x00\x02\x00\x00\x00\x02\x00\x06\x00\x04\x00\x06\x00\n\x00\x0b\x00\x0b\x00\r\x00\x10\x00\x0c\x00\x13\x00\x17\x00\x14\x00\x0e\x00\x17\x00\x13\x00\n\x00\x11\x00\x0e\x00\x06\x00\x06\x00\x11\x00\x0c\x00\x0b\x00\x13\x00\x12\x00\x08\x00\n\x00\x12\x00\x0c\x00\x06\x00\x05\x00\t\x00\x07\x00\n\x00\x06\x00\x00\x00\t\x00\x05\x00\x00\x00\x00\x00\xfe\xff\xfa\xff\xfe\xff\x02\x00\x00\x00\x00\x00\x02\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfb\xff\xf8\xff\xfd\xff\xfb\xff\xf9\xff\xfc\xff\xfc\xff\xf4\xff\xf1\xff\xfb\xff\xfd\xff\xf7\xff\xfc\xff\x00\x00\xf7\xff\xfb\xff\xff\xff\xfb\xff\xf9\xff\xff\xff\xfe\xff\xfb\xff\xfd\xff\xfe\xff\xf9\xff\xf9\xff\xfc\xff\xff\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\xfd\xff\xfa\xff\xfa\xff\xfb\xff\xfb\xff\xfb\xff\xfd\xff\xfd\xff\xf9\xff\xf8\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x01\x00\xfc\xff\xf7\xff\x05\x00\x01\x00\xfd\xff\x03\x00\x03\x00\xfd\xff\xfd\xff\x00\x00\xfb\xff\xf8\xff\xfd\xff\xfe\xff\xfc\xff\xfd\xff\xfb\xff\xfa\xff\xf5\xff\xf2\xff\xf5\xff\xf9\xff\xf4\xff\xf5\xff\xf6\xff\xf3\xff\xfa\xff\xf3\xff\xf5\xff\xf7\xff\xf3\xff\xf2\xff\xf5\xff\xf2\xff\xef\xff\xf1\xff\xf4\xff\xf5\xff\xf1\xff\xf6\xff\xf6\xff\xf0\xff\xf4\xff\xf7\xff\xf5\xff\xf3\xff\xfa\xff\xfb\xff\xf4\xff\xf7\xff\xf8\xff\xf8\xff\xf7\xff\xf8\xff\xfd\xff\xf7\xff\xf5\xff\xf5\xff\xf5\xff\xf6\xff\xf2\xff\xf3\xff\xf5\xff\xf7\xff\xf9\xff\xf6\xff\xf6\xff\xfa\xff\xfd\xff\xf9\xff\xfa\xff\xfe\xff\xfe\xff\xfc\xff\xff\xff\xfe\xff\xfa\xff\xfc\xff\xfb\xff\xfb\xff\xf9\xff\xfc\xff\xfa\xff\xf6\xff\xfc\xff\xfb\xff\xfd\xff\x00\x00\xfe\xff\xfa\xff\xf9\xff\xfc\xff\xfd\xff\xfe\xff\xfa\xff\xfb\xff\xfe\xff\xfc\xff\xf9\xff\xfb\xff\xfa\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x03\x00\x01\x00\x02\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x00\x00\x01\x00\x03\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x04\x00\x04\x00\x03\x00\x06\x00\x05\x00\x06\x00\x04\x00\x07\x00\x07\x00\x03\x00\x04\x00\x06\x00\x06\x00\x04\x00\x06\x00\x03\x00\x00\x00\x01\x00\x04\x00\x04\x00\x04\x00\x08\x00\x07\x00\x06\x00\x07\x00\x04\x00\x03\x00\x06\x00\x06\x00\x04\x00\x04\x00\x06\x00\x08\x00\t\x00\x0b\x00\n\x00\x0c\x00\r\x00\x0c\x00\x0b\x00\x0b\x00\x0c\x00\x07\x00\x05\x00\x07\x00\x06\x00\x03\x00\x04\x00\x04\x00\x02\x00\x03\x00\x03\x00\x06\x00\x03\x00\x00\x00\x01\x00\x07\x00\x07\x00\x05\x00\x06\x00\x06\x00\x06\x00\x06\x00\x05\x00\x04\x00\x01\x00\x03\x00\x04\x00\x02\x00\x00\x00\x00\x00\x02\x00\x01\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\xfc\xff\xfb\xff\xfe\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xff\xff\xfe\xff\x01\x00\x00\x00\xff\xff\xfd\xff\xfa\xff\xfb\xff\xfb\xff\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xfb\xff\xfa\xff\xfb\xff\xfa\xff\xf9\xff\xfa\xff\xfa\xff\xfc\xff\xff\xff\x00\x00\xff\xff\xfc\xff\xfd\xff\xfd\xff\xfa\xff\xf5\xff\xf3\xff\xf8\xff\xf7\xff\xf6\xff\xf6\xff\xf5\xff\xf4\xff\xf6\xff\xf6\xff\xf4\xff\xf4\xff\xf7\xff\xf8\xff\xf7\xff\xfa\xff\xf6\xff\xf3\xff\xf3\xff\xf5\xff\xf6\xff\xf7\xff\xf8\xff\xf7\xff\xf9\xff\xf8\xff\xf5\xff\xf6\xff\xf9\xff\xf9\xff\xf9\xff\xfa\xff\xf8\xff\xf9\xff\xfb\xff\xff\xff\xfe\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfb\xff\xf8\xff\xfa\xff\xfb\xff\xfa\xff\xf6\xff\xf4\xff\xf3\xff\xf6\xff\xf6\xff\xf8\xff\xf8\xff\xf5\xff\xf7\xff\xfb\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\xfe\xff\xf9\xff\xfc\xff\xfa\xff\xf9\xff\xf8\xff\xf5\xff\xf4\xff\xf4\xff\xf1\xff\xf2\xff\xf4\xff\xf4\xff\xf5\xff\xf3\xff\xf5\xff\xf3\xff\xf6\xff\xf8\xff\xf8\xff\xfa\xff\xf9\xff\xfb\xff\xfc\xff\xfe\xff\xfc\xff\xfb\xff\x00\x00\x00\x00\xfb\xff\xfd\xff\xfe\xff\xf9\xff\xfe\xff\xfc\xff\xfa\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x01\x00\x03\x00\x05\x00\x05\x00\x04\x00\x02\x00\x03\x00\x02\x00\x03\x00\x02\x00\x01\x00\x03\x00\x03\x00\x02\x00\x03\x00\x03\x00\x03\x00\x02\x00\x03\x00\x03\x00\x02\x00\x04\x00\x07\x00\x03\x00\x04\x00\x06\x00\x06\x00\x07\x00\x03\x00\x03\x00\x02\x00\x04\x00\x07\x00\x05\x00\x02\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x05\x00\x03\x00\x05\x00\x04\x00\x06\x00\x08\x00\x08\x00\t\x00\x08\x00\x06\x00\x05\x00\x05\x00\x03\x00\x05\x00\x07\x00\x08\x00\x06\x00\t\x00\x07\x00\x05\x00\x06\x00\x04\x00\x04\x00\x04\x00\x02\x00\x02\x00\x00\x00\xff\xff\x01\x00\x01\x00\x04\x00\x05\x00\x04\x00\x03\x00\x05\x00\x05\x00\x05\x00\x06\x00\x06\x00\x06\x00\x05\x00\x04\x00\x04\x00\x07\x00\x05\x00\x04\x00\x05\x00\x04\x00\x02\x00\x03\x00\x05\x00\x05\x00\x03\x00\x03\x00\x02\x00\x00\x00\xfe\xff\xf8\xff\xfa\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x02\x00\x03\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x05\x00\x05\x00\x04\x00\x02\x00\x01\x00\x00\x00\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xfd\xff\xff\xff\xfd\xff\xf9\xff\xfc\xff\xfd\xff\xfb\xff\xfb\xff\xf9\xff\xf9\xff\xfb\xff\xfa\xff\xf8\xff\xf8\xff\xf9\xff\xf9\xff\xfa\xff\xf7\xff\xf8\xff\xf9\xff\xf7\xff\xf8\xff\xf7\xff\xf7\xff\xf8\xff\xfa\xff\xfa\xff\xf8\xff\xf6\xff\xf6\xff\xf5\xff\xf8\xff\xf7\xff\xf4\xff\xf7\xff\xf5\xff\xf8\xff\xfb\xff\xf9\xff\xfb\xff\xfc\xff\xfa\xff\xf7\xff\xf9\xff\xfa\xff\xfb\xff\xfc\xff\xfc\xff\xf9\xff\xf7\xff\xf6\xff\xf6\xff\xf3\xff\xf0\xff\xf1\xff\xf1\xff\xf1\xff\xf3\xff\xef\xff\xee\xff\xf1\xff\xf3\xff\xf3\xff\xf1\xff\xf2\xff\xf3\xff\xf3\xff\xf4\xff\xf6\xff\xf4\xff\xf7\xff\xf5\xff\xf6\xff\xf6\xff\xf7\xff\xf7\xff\xf8\xff\xf6\xff\xf6\xff\xf6\xff\xf5\xff\xf3\xff\xf8\xff\xfc\xff\xfc\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc9\x00\xb5\x00\x9d\x00/\x00\x16\x01F\x01\xcb\x00#\x01\xc3\x00p\x00\xe7\x00\x9f\x01\x87\x01\t\x01\xd3\x00\xa0\x01`\x00\xc2\xfe9\x00\xae\x01\x9f\x00\xc5\xff\xfe\xff\x85\xff\xea\xfe\xf3\x00;\x00o\xfdQ\xfd\xd3\x00U\x01\xdb\xfeh\x00:\x02\x9d\xffb\xfe\t\x00<\x00\xbb\xffk\x02\r\x01\xd9\xfe5\x00\x7f\x01\x9f\x00\xa5\xfe`\xfe\xcc\xfd\x91\xfe\x9e\xff6\x00\x83\xfed\xfe\x13\xfe\x9c\xfd\xa4\xfe}\xff\xf6\xfe\xb4\xfd\xe9\xff\xff\x00\xd6\xff\xc9\x00L\x01#\x00\xf0\xff\xa4\x00\x16\x02\xfd\x01N\x02\xba\x02\xf1\x01\x9c\x01\xa7\x01,\x02\x9c\x01\x86\x01w\x01\x9f\x01\xca\x01\\\x01\xe4\x00\x9f\x00\xcc\xffh\xff\xa8\xff\xc0\xff\xc3\xffE\xff\xae\xfeE\xfe$\xfe \xfeT\xfe|\xfe!\xfe\x99\xfe\x83\xfe\x16\xff\xbe\xfe\xaf\xfd+\xff\xa9\xff\r\xff\x81\xff\xde\xff\xf9\xfe4\xff\x06\x00a\xffF\xff\x85\xff@\x00J\x00u\x00\xae\xff\xb0\xfe\x9e\xff\x9d\xffP\xff\x82\x00\x1c\x00\xbc\x00 \x01P\xff\x88\xfe\xeb\xfd\xc4\xff\xe3\xffB\x00\xd7\x01\x92\xffr\xfe\x08\x00$\xff&\xfe"\xffe\xffT\xfe\xc5\x00\xbd\x02\x13\x00\xb1\xff7\xff\xcf\xfe\x17\xff\xd9\xff~\x020\x026\xffS\x01\xd0\x01\xaf\xffE\xff\xa9\x00\x8d\x02\xaf\x00#\x00\xe3\xffa\x02n\x02\xdc\x01\xe4\x01\xbf\xff\xc1\xfe\xcc\xfew\xffM\x00#\x02\x82\x02.\xff}\xfc\x13\xff\x03\x01\xbc\xfe\x0b\xfcq\xfb\xc3\xff\xa8\x02\xb2\xfd\xa9\x00\xc1\x00\xd8\xfb\x0e\xfc\xad\xff\x0e\xff\x9b\xfb\xcb\x00\xcc\x01\x82\x00u\x00\xc2\x00F\xff9\xfb\xa2\xfd\xe0\x01\xca\x01\xb8\x00\x8f\x02\xb7\x04\xa7\xff+\xfe\xe0\x01\xfc\x00\xbf\xfc\x80\x00R\x04G\x03|\x01\xa6\x01u\x02\x0f\xfe\xc6\xfe\x1f\x01\x08\xfe\xe4\x00>\x04\x18\x03\x93\xff\xc2\xfeA\x01R\xfd\xdd\xfd=\x003\xff!\x00k\x026\x00\xc1\xff\xe3\x01\x1e\xfco\xfc\xe2\x00"\x01u\x02s\x03*\x01\x17\x00u\xfc\xe9\xfb\xbe\x02\xbb\x02\\\xfe\xfd\x00l\x029\xfe\xd9\xfcl\x00x\xffs\xfe\xb8\x02\xa2\xff\x93\xfd\xb9\x04\xaa\x03\x9c\xfcc\xfe_\xfe2\xfe\x93\x02\xc2\x01\xdf\x01\x0f\xfek\xfb\xf5\xfe\x13\x01\xc3\x00\xf9\xfb\xff\xfe\x8e\x00\x06\xfe\x12\x03\x8a\x02s\x00\xcb\x01\xc8\x00a\xfe>\xff\x86\x02=\x08\xee\x04r\xfd\x9f\xfb\xc0\xfb\xe4\xfd\xde\x01\x16\x05\xa9\x00\x92\xfc\xc4\xfa;\xfc\xda\x01\t\x01`\x01\x16\xfd<\xfd\xbc\x024\x01\xf5\xfe\x00\x02\xad\xfe\x84\xf8C\xfd\x80\x03;\x03%\x02\xbd\x03\x06\xfch\xf9\x00\x03U\x05\xe5\x00\x1a\x02P\x03E\x01C\x02\x1b\xff\x8f\xff\x8b\xfd\x08\xfd\xe1\xff/\xfcg\xffA\x03\xf2\xff\x9a\xfa\xc2\xfa\xee\xfa\x1f\xfbR\x02\xb3\x06D\xfe\x11\xfc\xec\x00\x03\x01H\x04R\x01i\xfd\x8c\xfdl\x03\xdd\x03/\x04!\xff\x9a\xfe\x83\x03\xd3\x01\xce\x04@\x01\x15\xfa\xae\xfb\xef\x01\x12\x03\xc2\x02\x9f\x02\xb0\x02\xe7\xfbg\xfb\x15\xff\x1e\xffu\xff`\x005\x01_\xfec\x01\x92\x03X\x04\xc9\xfd\x8b\xfdN\xff\x04\xfc\x05\x02\xfe\x04\xf6\xff\x7f\x00B\xfe\x05\xfe\xa1\xff\x0f\xfa\xb3\xfd&\xff6\xfc\x02\x02Y\x04J\x02\x9c\xfe\x1c\xfdf\xff~\x01\xbf\x00\xff\xff\x98\x00\xc6\x03d\x06\xce\x00\x89\xff\x1d\xfe$\xfc\xa0\x01\n\x07\xc2\x01\xf5\xf8\xf7\xfd\x03\xfe{\xfd\xaa\x02\xc2\x01\x84\xf8\xaa\xf4\xca\xfb\xba\xfc\xac\xfe\xca\x02\x00\xff\x9b\xfb\x9e\xfd\xcc\x00\x92\x01\xaa\xfd\x14\x00\xf8\x04\xb1\x03\xe7\x02_\x05\xe2\x007\xfb\x10\x04l\x03?\xfd\xb1\xfe\xd6\x03C\x05\xb0\x02\xc3\xfc\xeb\xfaf\xfe(\xfeU\x02\x86\x03\x86\xfd\xc6\xf7V\xfcV\xfe`\x03\x11\n\x1b\xfc\xdd\xf28\xf9\xe2\x01$\x02\xe4\x05\xc5\x01$\xfaA\xfa4\x06\xba\x06)\xfe\xf4\xfb8\xfd\x1b\x04\xda\x04^\x05\xd3\x03I\x03t\x03`\x03M\x04Y\r\x00\x03\xa8\xf4\xdd\xfft\x00:\xfd\xdf\x00\xa1\x01`\x05f\xfc@\xf3{\xf0\xc2\xf3\x9f\x00\x99\x03\x15\x05\xfd\xfb{\xf4\xfe\xf9\xbf\x03\xc7\n-\x07\xe8\x01#\xf8\xe9\xff\x88\x0e\xfa\x053\x03\n\x03\x93\x05\x7f\x03\x8b\xfc\x03\x02\x89\x01m\xfdF\x00-\xfd\xed\xfbX\xfd\xe2\xff\xad\x05f\x008\xf2\xa5\xed\x15\xfa#\x08\'\tm\xfd\xd4\xf9\xbd\xfe1\xff\x7f\x046\x05\xf3\xf9\x15\xf7]\x01\xe0\x0bT\n\xc4\x03\x9d\x011\xfe\x89\xfa\x96\x02u\n\xea\x01\xe1\xfac\xfa"\x05\x90\x04\xa9\x00K\xfe\x98\xf4\xa6\xf4\xb2\xfa\xde\x01f\x02\xed\xff)\xff\x06\xfe4\xff:\xff(\xff^\xfe@\x01x\x06\t\xffB\x01R\x04\xb8\x03\xde\x03J\xfc\xd4\xfb\xf2\x00\x05\x06\xc8\x02d\x06\x10\x03\x9d\xfa,\x00\x89\x06M\x00\xbd\xf6\x1f\xfd\xdc\x02G\x02\xc5\xfbI\xf6\\\xfd*\x03\x95\xfe^\xfa\x0b\xf9)\xfcl\x07\x82\x08v\x03\x87\xfeq\xff\x88\xff\xf3\xfdh\t\x87\x0bz\x00d\xfd\xe4\xff\x16\x02\xfe\x03M\x03\x7f\x01\xff\xfa\x85\xf4\xdf\xfc\x93\x05\xd8\x00"\xfb\x10\xfb\xa4\xfd\xda\xfd\xd4\xfeF\xfa\x00\xf8M\xff=\x04\xda\x05\x8c\x01\xe8\xfd\xed\xfb\xf9\x01^\x05{\x04S\x00\xfc\xfb\x15\x02\x9e\x03\xde\x01g\x03A\xfe\x9c\xfb\xb4\x00,\x00\xa6\xffp\xfe\xc7\xfb\xe7\xfd\xd3\xfe\xdc\x00\xba\x00[\xfd\xde\xfeG\xffF\xfc\xe8\x00\xb8\x02\xa0\x01L\x02n\xff\xf9\xfd\xd4\x00#\x02\xdf\x05\x80\x01\x8e\xfd\x17\x00Q\xff\xc1\x03\xe7\x03\xa5\xfe\x04\xfdo\xfec\xfc\x1a\x00R\x00\xab\xfe\x9b\xfc\xb1\xf8\xe5\xfdh\x01K\x02\x92\xfe\xb5\xfa\xff\xfce\xfe\x8c\x02n\x0by\x04\xd7\xfc0\x01\xa3\xff,\x003\t\xa6\x05\x14\xffU\x00J\xfd\n\x00y\x04\x04\xfe\xc5\xfb,\xfa+\xfe\\\x03\xe8\x00\xf1\xfbh\xfa5\xfdb\xfdU\x00d\x01\x1d\x00\x91\xfa\x8b\xfcG\x02\x16\x02\xf1\xffT\xfc*\x02\xec\x03m\x00\xb9\xfe\xe8\xfd"\x01\xc5\x08\xdc\x05\xdf\xf8\x11\xfb\xfb\x03s\x05\xcb\x04\xe0\xfdy\xf8\xd3\xfa5\x07\x86\x08m\x00H\xfc\xa6\xf9\xbb\xffB\x06\x00\x04d\xfe=\xfe\xc5\xfb\xb0\xfe&\x06\x81\x04\x94\xfe\xbb\xfb"\xfc\xdc\xff\xdf\x00\xba\xff\x82\x00\x0b\xfd\xdf\xfc\xc2\xfe\x91\x00\x9e\x01\x99\xff\x89\xfe\x90\xfcL\xffh\x03\xad\x03\x9f\x02b\xfd\xc4\xfdi\x02\\\x02\xd3\x00O\x01\xce\xfe\xb3\xfc\xd5\x01c\x02a\xffh\xfe\x86\xfc\xa5\xfd+\xffC\x05e\x02\x85\xfa,\xfbE\xff\xb2\x01%\x04E\x03\xb1\xfd\xe8\xfb\xf2\xff\xf5\x04*\x03\xb0\xfe\x94\xf9\xab\xfe\x86\x05\x9d\x01\xec\xff\xea\xfc\xe4\xfdN\x03r\x02\x8b\x00\x0b\xfe8\x00\x98\x02\x92\x01\xf3\xffX\xfdn\xfff\x01\x90\x01)\xfc\xd9\xfc\x91\x02\x0e\xff\xdc\xfc4\xff@\x00\x06\xff~\xfe\xdd\xff\xf3\xff1\x03$\x02\xcc\xfd\xe5\xfcJ\x03\xcd\x04\xac\xff\xcc\xfd\xde\xffh\x02D\x00\xb2\x03\xdb\x029\xfc\x18\xfbV\x02i\x05\xe8\xfdQ\xfc\xa7\xfeM\xff\xe4\xff\x91\xff\x18\xffr\xfer\xfe\xc6\x00_\x00\xc9\xff&\x01\x1c\x01>\x00t\xff\xc2\xfeP\x02s\x02@\x01\xf0\xff\x01\xfe\xa6\x01\x04\x03\x07\x01\xf2\xfc\x8a\xfcG\x00\x1c\x04\x1f\x02\xda\xfdg\xfct\xfeF\x04\xde\x03\\\xfbo\xf9\xab\xfe\x1b\x01\xfa\x03\xf1\x01\xd1\xfdq\xfb\xe8\xfc\xd1\x01\r\x03\xa1\xffz\xff\x1b\x00H\xff\x8c\x03\x83\x05\x87\x01\n\x00\xe2\xfe\xea\xfcB\x03O\x05R\x01;\xfe\x9f\xfc\x88\xfeC\x01f\xfe\xbe\xfd\x8f\xfd\xe1\xfc \xffO\x00\x93\xff\xf3\xfd2\xfe\xd2\xfe\xc6\x01\x99\xff\x81\xfe!\x00\xf3\x02\x12\x02\x01\x017\x01\x19\xfeV\x01\x9c\x02R\x02\x06\x01\xaf\xfd\xa4\xfe\xe7\x00f\x02\xf2\xfe\x11\xfd\x1c\xfck\xfd\xcb\x04\xac\xfe^\xfa\xa7\xfdk\xfc\x85\x02B\x05\x83\xfe\x98\xfbw\xfd\x03\x01w\x05\xbb\x05Z\x00\xef\xfc\x9e\xfdM\x01\x17\x07k\x03\x12\xfc\x9a\xf96\x00\x15\x07\xff\x05\x86\xfe\xa7\xf7%\xfa|\xff+\x05t\x05u\xfd\x9e\xf7\xb4\xf9f\x03\xe0\x07D\xfe<\xf8\x1a\xf9\x16\x01\x99\x04k\x02\xf8\xfd\x80\xf8\xd5\x00\xb2\x03n\x01\x1a\x00\xe7\xff\xf4\xff\x87\x03\x11\x03\xcc\xff\x86\x02\x1e\x031\x03\xdd\x00&\x00Y\x01\xb2\x02y\x01$\xfc.\xfe\'\x01Y\x00_\xfe\x16\xfd\x9f\xfe\x04\xfd%\xfb}\xfb\xd8\xff\x99\xff5\xfb"\xfd\x96\xfd\xd7\xfd\xd6\xff\xc8\x03\xdb\xfd\xc6\xfa\xb1\x03\xdb\x03\x01\x08\x17\x05\x06\xfc\x98\x02\xf1\x03\xd4\x05a\x08<\x027\xffx\xffS\x02A\x03\x80\x00 \xfa\xf9\xf8\x05\xfc\xeb\x00\x81\xff\xe6\xfa%\xf9\xbd\xf8\xe3\xfb\xd5\xfe\x0f\x00\x82\xfc\xbf\xfeH\x01s\x03\x0f\x05\x1b\x03\xd1\x00\x10\x03q\x02\xe4\x03\xba\x08\x9c\x02\xa8\xff\xbb\xff?\x01\xa6\x05\x05\x05;\xfa\xb7\xf9\xd3\x00\x94\x00\x9f\xff\x97\x00\xe8\xfbT\xf9\x87\xff\x89\x00\x87\xff\x1e\xfce\xfc\x86\xfeT\xfe\x9b\x01\x15\x02o\xff\x11\xfc3\xfc*\x03G\x06P\x04#\x01\x93\xff\xb0\xff\t\x04s\tt\x06D\xfb\xcf\xfb\xf5\x05\x89\x04%\x02q\xfft\xfa\xa2\xfa\xd5\x00\x9d\xffa\xfdu\xfa\xc6\xfa6\xff0\xfe\xb8\x00\xdf\xfd\x1f\xf99\xfd\xf6\x06\x13\x06\xfa\xfc\x91\xfcm\xfe\xb7\x01\x1b\x08G\x03\xcb\xfa\x1d\xfe\xcb\x02\xef\x05\xa7\x03\xef\xfde\xfde\xfe\xda\x00\xd3\x02s\x02\xf5\x00\xba\xfc\x13\xfe\xc5\x00\xe5\xff\xf1\xffd\xfe\\\xfc\xab\xfe-\x00#\x01J\x01\xba\xfc\xcc\xfc>\xff\x1c\x01H\x00\xba\xfe\x87\xfe\xa8\x01x\x01\x88\x00\xfd\x04\xac\x01\x1b\xfd/\x00\x8b\x02t\x02\xdf\x01\xbc\x00\xc6\x01\x15\x02Y\x01\xcb\xff\\\x00\x05\xff\x91\xfc\xeb\xfck\x02\xdf\x03\x85\x01\xe5\xfd\xf3\xfb\x96\xfd\xe4\xff(\x03\x02\xff\x7f\xfc\xdb\xff\xcf\x03\xe6\x01\xef\xff\xe8\xfd\xa0\xfb\x1d\xfe\xb1\xfd&\x03\x8e\x02\xf8\xfd\x95\xfb\xed\xfb\x90\x02x\x03\xfe\xfc\x87\xf7v\xfc\xe8\x02\xae\x05`\x00\xa8\xfaC\xfd\xc7\xfe\xd6\xff\x12\xfd\xca\xfd\xcd\xfe\x7f\xff\xc5\x00\x01\x00\xe3\x00\xb2\xfe\x9b\xfbA\xfb\xfc\x02\x19\x04\xe1\x01\xe5\x01`\xffK\x03*\x04\x92\xff\x1d\xfd\xce\xff\xa8\x01\x18\x02t\x02/\xfft\xff\xe4\x009\xfe\\\xffY\xfeH\xfe\xc7\x00\xe7\x00L\x00\xd9\xfe\xc6\x00\xe5\xfe{\x00\xd3\xff\xa8\xfd5\x01\xdc\x00\xe8\x00\x98\x02\x93\xff\xea\xfc\xd3\x02\xc2\x02\xc6\x00\xea\xff\xdb\xff\x13\x01\x00\x01\x15\x03y\x01T\xffc\x00e\x04\xbb\x03\xd7\x00\x88\xffB\xfe\xfb\x00\x98\x02\x18\x04\x10\x02z\xfe\xb3\x00\xa9\x01W\xffe\xfe\xf4\xff\x9c\xfe\xa2\xfe\xb9\x01\xa4\x00\x1b\x01\x1d\x00\x02\xfd\x92\xfb\xb8\xfb\xb5\xff\x19\x01\xc2\xffh\x00\x92\xff\x89\xff\xe1\xfe.\xfd\x93\xfbj\xfbu\xfe\xc4\x00b\x02\x80\x02.\xff\x82\xfcd\xfbP\xfb\xe4\xfc\xf8\xfd\xe3\x01x\x01\xfb\xfe\x1f\x00K\xff\xa9\xfc\x81\xfai\xfa\x10\xfc\x0c\x01!\x02\xb2\x01\xd0\xfe \xfb]\xfb\x8f\xfc\xf4\xfdG\xfc8\xfa\xe3\xfb\x0e\x01\\\x02\xbe\xfe\xaa\xfa{\xf6\xbc\xf8{\xfc\r\xffB\xfe.\xfb^\xfbx\xfc"\xfe\x08\xfd\x0f\xfbz\xf9\x1a\xfbz\xff\x1f\x01\xa2\x027\x016\xfd\xda\xfd\xe2\x02P\x04\x00\x02F\x04\x9d\x06\xca\x05\x08\t&\x08\xe3\x06\x1e\x06\x14\x05t\t\xfd\x0b\x82\t\x12\t\xd6\x07c\x06e\nz\x0by\x08G\x07\xfa\n\xae\x0c\xa4\x0cM\x0e\xa9\x10\xd1\x11"\x11&\x12\xdc\x14L\x17\x95\x15\x8d\x15\x85\x14\x05\x15\n\x17\r\x14"\x10\x12\r\x16\n.\x07\xf7\x04\x95\x02\x1c\xfe\xe7\xf9\xfe\xf6\n\xf4\x82\xf0\xf0\xec\xed\xe8B\xe7Y\xe7v\xe6\x07\xe5 \xe5\xa1\xe4\xb2\xe3\xa1\xe3\x08\xe5\xa6\xe6\xbb\xe8y\xea3\xecq\xef\\\xf0\xbb\xef.\xf1\xf1\xf2r\xf5\xbf\xf6U\xf6\xa5\xf8\xe7\xf9\xc9\xf7*\xf6l\xf7\xe3\xf6\x92\xf4_\xf4\xd6\xf4b\xf4\xe8\xf2}\xf2\xa3\xefQ\xee\x96\xee\xb1\xf2\xe5\xf3<\xf0\xff\xee\xa5\xf0\x11\xf7\x1c\xfd\xe5\xfb\x1f\xf9\xb3\xfc\xc1\x01\xcf\x05C\x08\xda\x08o\t\x18\n\xa8\x0e+\x12\x8d\x12;\x11\xe5\x0e\xe3\x10\x8a\x14\xd4\x16\x93\x15\xb6\x11\x88\x0f\xaa\x12B\x1e\xdc&\xac$C\x1dF\x1c\xc5!\x00)z/\xa21\x920H.n0\x894\x891\x16&\xe6\x1c\xcf\x1d~"\xf7!X\x19\xb4\x0b\x99\xfeK\xf6q\xf3\xee\xf2\x81\xed\n\xe3\xe5\xdae\xd9?\xd8\xb4\xd2O\xcd\x0c\xca\x1c\xcaY\xceA\xd4\xf8\xd7\xa4\xd7\t\xd7\x8a\xda\xd7\xe1;\xea\x1e\xf15\xf3\xf0\xf6U\xff\xa5\x04\xff\x06\x93\tr\n\x02\r=\x10\xbc\x13\x80\x15_\x11\x88\n\xa2\x07\x8c\x08\n\x08/\x03y\xfc\x08\xf8\xf6\xf4\x8d\xf1\xb3\xee\xc7\xea\x11\xe6%\xe2\xe5\xe1\xe8\xe4#\xe5\x05\xe2\x90\xe0\xf4\xe3r\xe9\x94\xeb\xbf\xec\xc1\xefC\xf2/\xf5\x93\xfa4\xff!\x01Z\xff\xd3\xff\xf4\x04w\x08\x1b\t8\x07g\x05\x97\x06\x14\x07\x81\x07\xa6\x07\xfa\x041\x03\x9d\x01\x84\x01\x10\x03\xfe\x00\x10\x01\x90\xffk\xff\t\xff\xee\xffv\x05\xf1\x03\x00\x02<\x01,\x022\t\x9e\x12\xc2\x1dz\x1f\x89\x18\x07\x19\xd4#O-\xd91\xf84\xf57f:2:H:\xfa7O0\x18*\x97*\xca-\xa1,\xf7 \x16\x11\t\x07\xe5\x00s\xfeg\xfc\xdc\xf5\xd1\xecP\xe43\xdd\xe6\xda#\xd9;\xd4\xce\xd0\xb6\xd1\xd6\xd5\xce\xd6\xcb\xd4"\xd4n\xd5\\\xda\xd8\xe1\xde\xe8\x90\xecE\xef\'\xf1\x19\xf3y\xfa\xae\x00%\x03\'\x07\x11\x0b\x03\r5\x0b=\n&\x0b\x1c\x0c\xd5\x0bv\x0c\x1b\x0b\x97\x05\xa7\xfe\x80\xfa\x9d\xf9R\xf8K\xf5\x13\xf29\xef\xfd\xea\xae\xe6\xf6\xe4\x01\xe6o\xe6;\xe6\xe6\xe6\xd6\xe8`\xe93\xe8\xb3\xe8T\xec\xf2\xf0m\xf3V\xf5\xc6\xf6\xb3\xf7(\xf8\xbb\xf9N\xfb\xe9\xfd\xfe\xffJ\x03\xbf\x04T\x04\xd3\x00\xf8\xfe\x97\x03d\x06\xd0\x08l\t\xbb\x07,\x07\xd8\x02\xfe\x02*\x08\xfa\t\xd5\x08J\x081\x0c\t\rh\x0b=\x0c\xfd\x13\x04\x1f\xda"\xd0"\xa9!\n$\xf6)\xa71 8\x0f<\x8c:b4\xfa1>2C1\xb9-X\'S$* \x1b\x17\xce\r\x12\x03\xaa\xf9\x8d\xf4\xaa\xf1s\xef\x12\xe8\xcb\xdc\x9e\xd3\xf2\xcf\x8a\xd1\xcb\xd2\xc0\xd3\xe1\xd15\xcfr\xcf\xc0\xd1\x81\xd6~\xdb\xe2\xe1\xda\xe6\xc1\xe9t\xee\x93\xf3\xf4\xf4\xc5\xf8\x91\x00i\x07\xe5\x0br\x0c5\x0bj\n?\tf\x0c8\x11|\x11g\r\x03\x07^\x02\xc8\x00\x91\xfee\xfd\xdb\xfb\xfe\xf6#\xf2\'\xef\x86\xec\xb9\xe9\xaa\xe6\xfd\xe6\xa5\xea\x0c\xeb\xa8\xe8\xc1\xe5s\xe6c\xe9L\xed\x02\xf1\xc4\xf1\x13\xf2<\xf2\xe8\xf3\xf5\xf6K\xf9\xad\xf9Q\xfbY\xfd\x9f\xfd\xef\xfdG\xff\xbb\x00\n\x01X\x01.\x03\xcc\x03T\x04q\x04\xd7\x03\x06\x04\x93\x03\\\x07)\x08\x9f\x03\x9e\x01\xa1\x02\xad\x06.\x0b\xe2\rM\x0fe\x0b\xb3\t\xbc\x12\xc5\x1d\xb8$\'$|#\x97\'D,\xc90\xaf5\xa07\x9a6\x884\xb13\xe33A/\xc6)i&\x15#\x91\x1e\x90\x16\x9f\x0cy\x04\xb9\xfe\x8c\xfa\xb2\xf6\xbf\xef\xa9\xe5\x85\xde\xfe\xda\xea\xd9\xc5\xd8y\xd5w\xd2F\xcf)\xcf\xce\xd2U\xd6\x1a\xd8\x8d\xd9\x16\xdc\xa8\xdf\x1b\xe4\xb4\xe8\xef\xecg\xf2\x8b\xf7\xfe\xfa\xdf\xfd\x00\x00\x16\x03\x93\x07y\n\xbb\x0c6\r\x12\x0c\xc3\ng\tu\t\x14\n\xaa\x08\x1c\x05\x07\x01\xb6\xfc\xc1\xf9\xa8\xf9n\xf9\x93\xf6s\xf2\x80\xee|\xee\xc7\xee\xeb\xedP\xee\xd2\xed-\xed<\xed\xeb\xed\x83\xf0\x85\xf1T\xf1w\xf2y\xf3f\xf4\x94\xf5\xdc\xf6$\xf8\xfa\xf9J\xfa\'\xfaI\xfbw\xfd\xb9\xfe\x9c\xff\x17\x01Q\x02\x99\x01\xe1\x01X\x04?\x06\x02\x07\x9a\x05\xbd\x07D\t\x89\t\xfb\x0b|\r\xaf\r\xdc\x0el\x15\x9c\x1c/\x1d\x9a\x1a\xa2\x1c\x8a#\xec((-\xc40=0\xa1.\xbe,\xf7.\xf41k0\xc0,)\'\x02#K\x1e\x8c\x18\x8f\x14v\x0f\xb3\x08Y\x01\xe1\xfa\xff\xf4\x8e\xef\xc9\xea\xce\xe6\xbf\xe2\xc4\xde}\xdb\xd4\xd8&\xd7\x99\xd6\x9f\xd8V\xda\x1d\xdb\xea\xdb5\xdd\xc1\xe0@\xe5\x9d\xe9\xa7\xec\x90\xef[\xf3|\xf6\xbb\xf9P\xfdb\x00\xd8\x02x\x03\x7f\x04\x93\x06\x90\x07\xfe\x06%\x06\xbe\x059\x05\xa5\x03\xc8\x015\x00_\xfd\x85\xfa\xca\xf9(\xf9\x87\xf6\xa2\xf2\x00\xf0\xb5\xf0\x8a\xf1u\xf0M\xef\xbd\xed\xd7\xec>\xee)\xf1a\xf3\x98\xf1\x17\xf0\xe0\xf1_\xf4\x94\xf5\xdd\xf5=\xf7\x92\xf7}\xf7n\xf8\xcd\xf9i\xf9B\xfb\xfd\xfe\x1b\xff\xbb\xfc\xfb\xfa=\xfe\x14\x03\xfd\x04\xf2\x02\x14\x00\xde\xff\xeb\x02\x98\x07q\n~\tT\x06t\x05[\t \x11\xd9\x17\xfd\x19}\x18\x8c\x15\xfb\x17\xab!@-L3\xdf,>(\xe0)20[6-7?3\x0b,h&\xa6$\xd0%M#M\x1b\x1a\x12[\x0c\x9f\x06\xa8\xff\\\xfa\x0f\xf7\xf1\xf1d\xea\xf6\xe3\x1b\xe0\x87\xdc\x92\xd9\xb3\xd9\x19\xda\xe1\xd7-\xd3\x1f\xd2\n\xd6\xe6\xdas\xde\xa0\xdfQ\xdf+\xe0\xf9\xe3A\xeb\xb7\xf1\xb6\xf3\x06\xf4\xf2\xf5\xa5\xf8\xe4\xfc2\x02\x8a\x05m\x04\x94\x02v\x04\x8d\x07\xea\x07\xd3\x06c\x06\x8c\x05\xd7\x03\x0e\x027\x01}\xff\xc0\xfc\x93\xfb\xb5\xfb\xa1\xf9b\xf5S\xf3\x17\xf4\xe3\xf4\xb2\xf3\xc2\xf1\x88\xf1w\xf0\xf2\xef\x11\xf2k\xf3\xb3\xf2B\xf1\xf9\xf1l\xf4W\xf5\xde\xf4\xef\xf5&\xf7\xee\xf8\x98\xfa\xbc\xfah\xfbF\xfc0\xfe\xbb\x00\xb6\x01\xd2\x01q\x01\xe8\x01R\x05\'\x07X\x07\xc6\x06<\x05\x88\x06\xb9\x0b\x80\x127\x13T\x0e\x02\r^\x15\xcc\x1f\xb3#\x02"p G#1)\x910\x9b3\x050\x0b+\xfc*\xa0/O1\x91-\x9b%\xd6\x1f\xb9\x1cL\x1a\xce\x16\xaf\x10\xac\x08\xa0\x01\x10\xfd\x0c\xf9\x00\xf4\xa2\xedr\xe95\xe6\xe7\xe2\xd1\xdfL\xdd\xc6\xda3\xd9\xa9\xda\x12\xddw\xdcF\xda\xa1\xdbj\xdf\xb2\xe3\xaf\xe6\xa3\xe8\xfc\xe8\xfb\xea\xe8\xef\x8f\xf5\xed\xf8^\xf9\x0b\xfam\xfc\x90\xff\x0e\x03\xc2\x04\xe3\x03\xdd\x02\t\x04\xcb\x052\x05T\x03V\x02\xdb\x01\x08\x00\x83\xfe#\xfe\x85\xfcG\xf9{\xf7\x1c\xf8\x9c\xf7\xa6\xf4W\xf28\xf2\x99\xf2M\xf2\xae\xf1\x9e\xf1\x86\xf0\xe5\xef\x8b\xf1|\xf3&\xf3&\xf2\x9e\xf3\x9e\xf5"\xf6\xff\xf6\xac\xf8\x84\xfa\xbe\xfa\x1b\xfc\x1b\xff\x1c\x00d\x00\xc0\x026\x05b\x05z\x05\x88\x06\x95\x08S\nS\x0c\x10\rB\x0b\xac\nX\rP\x13\x9c\x18\x83\x19\xe5\x16\xeb\x15\x1c\x1a\xa6"\x1a)b*\xe3\']%y&@+\x120d0s*\xca#\x8c \xc7 ^ \x08\x1c\xbf\x15\xeb\r\x16\x07\x1a\x03\x9d\x00d\xfcD\xf5\x80\xee-\xeb\xbc\xe7\xde\xe3\xcc\xe0\xbd\xdep\xdd\x18\xdbo\xda;\xdbA\xdb]\xdb\x1e\xdd\xc4\xe0\x83\xe2t\xe2l\xe5\x9f\xe90\xed{\xef\xfc\xf1f\xf4g\xf6\xee\xf8\xcc\xfc\xa9\xffs\x00h\x00H\x01\x90\x02\n\x04/\x05 \x05A\x03\xde\x00\xb5\x00\xbe\x017\x01\x9a\xfe\x0e\xfc\x9e\xfa\xca\xf9\x16\xf9F\xf8\xa5\xf6\x18\xf4r\xf3~\xf4\xf6\xf3\x8d\xf1\xad\xf0\xe7\xf19\xf2f\xf1\x9a\xf1$\xf2\xd7\xf1\xc9\xf1\x84\xf3.\xf5\xb0\xf5<\xf6I\xf7\xf0\xf7\x8d\xf8\x1a\xfb4\xff\x12\x00\x0c\xfe\xe0\xfdt\x01\xfc\x05\xaa\x078\x07\xc5\x05\xb8\x04\xbe\x08\xae\x11\x9e\x15\x9d\x12-\x0e\x90\x11\x99\x1bp!\xc0%_&|$[$=*P3\x856\x9c2\x1c.=-3.Z/M.\xc0)\xaf!\x13\x1a\x16\x17\xc1\x14\xca\x0f\x02\x08\x9c\xff_\xf9\xd4\xf3\xac\xefY\xebZ\xe6#\xe1\xe7\xdc\xc2\xda\x10\xd9\x0e\xd8\xbf\xd6k\xd6\x88\xd7\xd2\xd8\xbb\xd9}\xdb\x81\xde\xf1\xe1\xcf\xe4\xc5\xe7~\xea\x86\xedz\xf0>\xf4!\xf8\xd8\xfa\x06\xfc?\xfd\xe9\xfe\xb6\x01\xee\x04\xb0\x06\x7f\x05,\x03M\x03\xb2\x05g\x07\xbf\x06\x05\x04\xe0\x00(\xffW\x00\xf4\x01\x15\x00\xfc\xfb\xda\xf8\x07\xf9\x07\xf9x\xf8\xf8\xf7L\xf6\xb8\xf3l\xf26\xf4\xb1\xf4\x7f\xf2\x17\xf1\x9f\xf2\x10\xf3\x1d\xf2\xc8\xf1\x08\xf3?\xf3\xac\xf3&\xf6}\xf7\x1e\xf6:\xf5o\xf8M\xfcS\xfe\x12\xff\x8c\xfe\\\xfef\xfe\xa0\x02\xb5\t\xea\x0b\xc9\x087\x05@\x05\x0c\x0c`\x17U\x1c\x1d\x18\xab\x10*\x13\x96\x1f\x03*5-\x0b)3&\xd4&\xcf-X5\x837\xc61{*y)\x14+W+\x1a\'\xa9 \x96\x19h\x13H\x0f9\x0b\xcb\x05g\xff\x12\xf97\xf3\xe7\xed\xc8\xe9\x92\xe5k\xe2d\xe0\x9f\xddu\xdaV\xd7\x93\xd7\x95\xd9\xc7\xdb\'\xdc8\xdbM\xdb \xdeE\xe3I\xe8\xf5\xea\x1d\xeb\xbb\xebd\xef\xa8\xf5F\xfby\xfc\xb2\xfb\xff\xfb\xe8\xff\xa3\x04\x9d\x06\xd2\x05\x94\x04{\x04;\x05F\x06\x87\x06\t\x05\xfe\x01\x17\x00:\x00e\xff\x94\xfc2\xfa\xf6\xf8\xc2\xf7!\xf6J\xf4\x9c\xf3C\xf2\x81\xf0\n\xf1\xa7\xf1\xf2\xef\xac\xedl\xee,\xf1\x10\xf1\x98\xef\xaf\xef$\xf1\xbc\xf1\x80\xf3\x10\xf6\xfa\xf5\x15\xf5\xb3\xf7\xb0\xfb\xca\xfd\x8a\xfd5\xfe\xb0\x00\xa0\x01F\x04\x1b\x06\x0c\x07\x9a\x08)\x08D\x06\xe5\x05\xac\x0bb\x144\x15B\x0e\xc2\tr\x0e\xa3\x1b\x15&\x8f&\xb5\x1e\x83\x19\xd8 \xbb/\x8f8;5]+]&-+c3\xaa5\xd3.0#X\x1b\xef\x1a\x8a\x1c>\x19\x96\x0f+\x06\x19\xff&\xfa6\xf7\xea\xf3\xcd\xee\x89\xe7R\xe2,\xe0\xed\xdd\xd8\xdb\x98\xda\xdc\xda<\xda$\xd8*\xd8B\xda\xe2\xddg\xe0\xb9\xe2\xc3\xe3\x19\xe4\xce\xe6\t\xed)\xf3\xf6\xf4\xe1\xf3[\xf4\x11\xf8<\xfe\xad\x02.\x03\x0f\x00=\xfe\x0f\x01T\x06V\x086\x05\x84\x00T\xfe\x1e\x00\x90\x02\x0c\x02\xe1\xfd\'\xf9\xc0\xf7\x1c\xf9\xaa\xf9\\\xf7\x9f\xf4\xef\xf2\xbe\xf1\xa6\xf1V\xf21\xf2\x15\xf0\xb7\xee\x08\xf0\xc3\xf0O\xf0\xf8\xef"\xf17\xf2\xf8\xf2\x1b\xf4\xa7\xf5\xae\xf6\xcb\xf7\xe1\xfa\x0f\xfd\xfa\xfe,\x01\xf4\x02\xb6\x04\x1f\x06\t\x08:\n\xfd\x0c\xe5\x0e\x7f\x0f\x99\x0f!\x10\x95\x11\xb6\x14<\x1a6\x1e\x85\x1d+\x19\xbe\x1a\x03#h+f.\x8d*(\'\x99&\x91+\xaa2\x064\xe1,\x7f#\x98\x1f\xb0"=$\x1f \x01\x17N\x0c\xea\x05F\x03\xdd\x03:\x00P\xf7\x83\xed\xc4\xe7\x85\xe6\x1a\xe6\x10\xe5l\xe1\x87\xdc2\xd8\xd5\xd8(\xdc\xcd\xdd"\xdd\x06\xdd-\xde\x10\xdf\xf5\xe0\x0e\xe6N\xeaE\xec\xce\xec9\xee\xf5\xf0\x1b\xf5\x8e\xfa:\xfeC\xfe\xff\xfc8\xfe_\x02D\x06<\x08\xde\x06\xd4\x03,\x02\x1a\x04\xb0\x06\xf8\x05|\x02 \xff\xa4\xfd\x96\xfcN\xfc6\xfcx\xfa\xb2\xf6\xd5\xf3k\xf4\xe9\xf4\xa5\xf3+\xf2\xb9\xf1\xc2\xf0C\xefN\xefd\xf1t\xf1-\xf0\x82\xf0o\xf1n\xf1\xf1\xf1\xc8\xf4\x16\xf7\xa0\xf6#\xf7\xa1\xf9\x1e\xfc\xde\xfcQ\xff\xe7\x02\x9e\x03h\x03\x93\x05l\t\xce\x0b\xe8\x0b\x18\x0cf\r\x81\x10:\x16\xac\x17s\x15X\x15\x84\x19\x98 |$\xd4%\x81#\x7f"L&\x7f-M1d-\x9b(\x84&Z(u*\x99(U#\xe9\x1aA\x15\x00\x14\xa2\x12\x07\x0e\xde\x05v\xfe\xda\xf7!\xf4\x1f\xf3\x9c\xf0\x10\xead\xe2\xc5\xdfP\xe0Y\xe0\x84\xdey\xdc\x9c\xda\x97\xda\xba\xdc\xff\xdf\xaf\xe1\xad\xe1t\xe2\x8b\xe5\xff\xe9[\xed\xb7\xee\x90\xf0q\xf3\xfe\xf6\xd8\xf9\x9f\xfb\x8e\xfca\xfe\x16\x01\xac\x02\xd1\x01\x14\x01\x02\x02\xe1\x02}\x02m\x01p\xff\x9d\xfc]\xfbs\xfck\xfc\xee\xf8\x03\xf5\xf1\xf3k\xf4:\xf4\x90\xf3~\xf2U\xf0j\xeeB\xef\xd4\xf1\xff\xf1A\xf0`\xefS\xf0\xf9\xf0<\xf2\x9b\xf4\x9b\xf5\x82\xf4.\xf5\xeb\xf7\xec\xfa\xf1\xfb\x14\xfd}\xff\xcb\xff\xfc\x00\xcf\x03\xc0\x06\xd1\x08\xc0\x06\x82\x06$\t\xc5\x0c@\x11\xa1\x0f=\x0c\xd8\x0b\xd0\x10\x93\x19\xd4\x1d\x11\x1b\xfc\x16v\x18}!\xe1+6.\xb3*\x07&\x88\'Y.75M5\xad-\xfd%\xfc#\xbb&$\'\xa1!\xf3\x17\x1b\x10\xa9\ne\x07\xcb\x04\xc6\xff\x99\xf7.\xef\x8a\xea\xa6\xe8\xe3\xe5\x83\xe23\xdf$\xdc\x16\xdae\xd8/\xd9\xb9\xda&\xdcc\xdc\xaa\xdc\x0f\xde\r\xe08\xe3;\xe8\xfa\xecV\xed\xcc\xecG\xef\xa3\xf4\x88\xfaI\xfd\xad\xfd\xc3\xfb\x83\xfc\xd2\x00[\x051\x06\xbc\x03#\x01g\x00(\x02F\x04^\x03[\xff\x18\xfc\xf6\xfb\x94\xfcu\xfb\x01\xf9\'\xf7\xfc\xf5\x01\xf5R\xf4\xfb\xf3\xa2\xf2\xe3\xf0\xec\xf0\xf2\xf1i\xf1\xb4\xef\x87\xef\x0f\xf1\xa0\xf1\xd7\xf1p\xf2w\xf3\xe6\xf3c\xf5\xfe\xf7\xa4\xf9N\xfa\x1f\xfc\x16\xff`\x01\x1b\x03\x12\x04\xc4\x05f\x07d\n\xd4\r\x99\x0e)\x0e\xcd\x0c3\x0f\x15\x16B\x1b\x95\x1b\x84\x177\x16\xc3\x1b\x8b$\xa3*\xc2)\xaf$K"|\'\x9a/~3\xeb.\xaf\'V#\xfc$\xb5(\xf7\'5!\x0b\x17\x1a\x10\xf7\r<\x0eP\x0b\xb6\x03\x06\xfa\xd4\xf2\x0e\xf05\xef\xbf\xedG\xe9\x17\xe3\xd0\xdd\xa2\xdc\x83\xde\x89\xdf\x8a\xde\n\xddC\xdc\xa5\xdc\x84\xdet\xe2\x88\xe5*\xe7\xe6\xe7@\xe9\x90\xeb\x15\xef\xce\xf3\xf8\xf7\x8e\xf9<\xf9\x89\xf9\x8f\xfc\x06\x01\xbf\x04\xdf\x045\x02\x10\x00E\x01\x1d\x04/\x05"\x03r\xffh\xfc\xcd\xfaq\xfbo\xfc\x98\xfa\x06\xf65\xf2\x93\xf1[\xf2\x7f\xf2P\xf1z\xeff\xed\xa4\xec\x83\xed\x1f\xef\x85\xef\x07\xef\xaf\xeeF\xefh\xf0g\xf2\x9a\xf4H\xf5o\xf5Y\xf7\xb2\xf9\xb8\xfb\xea\xfc\xc2\xfe)\x01\xce\x01\x94\x03a\x07:\tR\t\xb0\t\xe9\n\x8d\rb\x11\xbc\x16?\x17v\x13\x92\x13\xa1\x19y"\xb5&\xb6%\x86"\xe4!\xbf&h/\x8b3\xa9/\xf6(\xdc%H)\xc6,\xc9+\x83%\xb5\x1c\xec\x16\xe6\x14\xc1\x14\x10\x11g\t\xe6\x00\xe7\xf9r\xf6\xdf\xf4W\xf2\\\xec\xde\xe5$\xe2\xa9\xe0\xde\xdf\xf3\xdeS\xdeT\xdcp\xdb\xe8\xdb\x0e\xdev\xe0\x13\xe2\xd1\xe3\x9c\xe5M\xe8@\xeb\xaf\xedB\xf1\xbb\xf4Y\xf7\x83\xf8\x0c\xfa\x92\xfc\\\xffD\x01i\x02\x00\x02+\x01\x13\x01\r\x02\x89\x02N\x01\xbc\xfe\x7f\xfc\xad\xfbE\xfb8\xfa0\xf8\xfe\xf5\n\xf4J\xf3E\xf3\xdc\xf2\xc4\xf1\x0f\xf0\x92\xef3\xf0X\xf1B\xf12\xf0\xdd\xef\xff\xf0\x8d\xf2\xb8\xf3/\xf4.\xf43\xf4\xa8\xf5!\xf8\xbe\xfaU\xfbd\xfbF\xfc\xae\xfd\x19\x00\x03\x03\xa7\x05\xf7\x06\xe3\x05\x90\x04W\x07\xfb\x0c6\x126\x13*\x10F\x0e<\x11Q\x19\xfb!\x1e$\x96 \x89\x1c\x88 \xb2)\xd30\xb82\x80-\xf9)o)9.;2\x1f0\x88)B"g\x1f\xbe\x1e\x82\x1c\x80\x176\x11.\nr\x03\'\xff\xfe\xfb\xe0\xf7\xe4\xf1j\xec\xb0\xe8\xa5\xe4\x86\xe1\x8b\xdf\xa7\xde\xd2\xdd`\xdb\'\xdac\xda\x01\xdc\x0e\xde\xfd\xdf\xb5\xe1\x8f\xe2\xc1\xe3\xcf\xe7\x8f\xec0\xf0s\xf1\x95\xf2\x0f\xf5\xaa\xf8a\xfcD\xff\xce\xff~\xff\xd2\xff\xb5\x01\x9e\x03=\x04\xf7\x02\xdc\x00\xb0\xffM\xff\xe7\xfe\x97\xfd\xa0\xfb|\xf9[\xf7\xf1\xf5\xd0\xf4|\xf3)\xf28\xf1\xfc\xef/\xefw\xee"\xee\xc4\xee\xed\xee\x01\xef\xd1\xee\x1f\xef\x0e\xf1\xae\xf2\xb5\xf3R\xf4\x9f\xf5\xc6\xf6\xc1\xf8Z\xfb\xe7\xfd\x7f\xfe \xffL\x01U\x04\xa2\x06\xd1\x06\xc9\x08\x15\x0b\xdf\r\x1f\x0fp\x0f\xdf\x11c\x13\xe0\x18\xf1\x1d\x14\x1e\x1a\x1c\xc5\x1c\xcd#w+\x9a-\\+\xd1(O(=,\xd11\x1d3a-\xb1$\x18";$\n%\x84 1\x18\xb3\x10E\x0b\xf8\x07)\x06_\x02\xf9\xfa\xb1\xf2\xa5\xed\xa2\xeb\x81\xe9r\xe6\xf0\xe2\xd3\xdf\xc9\xdc\xf3\xda\xad\xdb~\xdd\x01\xde\xd0\xdc\xaa\xdd\x91\xdf\xcb\xe0\x06\xe3\xfd\xe7<\xec\xcf\xec\x90\xec~\xef\xbe\xf3\x0f\xf8\xae\xfbh\xfd\x8f\xfc\xf1\xfb\xd1\xfeQ\x03\xf8\x04\xa4\x03\xca\x00\xc4\xfe\x1a\xffF\x01\xc4\x01e\xfe\xe6\xf9@\xf7N\xf7f\xf7\\\xf6@\xf40\xf1\x9b\xee\xc4\xed\xaa\xee\x11\xef1\xee\xaf\xec$\xec\x11\xec4\xed\xa8\xee\x1f\xf0\xc3\xf0\x0c\xf1#\xf2\x92\xf3\x0b\xf5\xef\xf6\x0c\xf9V\xfa\xab\xfb\\\xfd\xfa\xfd\xf4\xfe\xe9\xff\xb2\x02\x99\x07\xc7\x089\x07c\x06.\x08\xdd\x0fk\x16J\x18z\x17\xc6\x152\x1b\xdd#\xa3+\xeb.\xf9+++e/\xc96\xef:\xad9K5q2\x012\xab2.1\t,o$.\x1eo\x1a\x7f\x16:\x10y\x08\x12\x02\xe6\xfb#\xf6\xae\xf0n\xec\x9f\xe6\x11\xe1r\xde\xfa\xdc\xfc\xd9\x8f\xd5\xd2\xd4Z\xd6_\xd7\x9d\xd6\x18\xd7\x0b\xd8I\xda\xc3\xdd\xac\xe2d\xe6\xc4\xe6\x86\xe8\xcd\xec\xe8\xf2\x0c\xf8|\xf9v\xfav\xfb\xa1\xff\x0c\x04x\x05O\x04\xca\x03\x98\x04\xc0\x04r\x04\n\x04\x17\x02\x98\xfe\xdd\xfc\xd9\xfc\x1f\xfb]\xf7\x92\xf4L\xf3$\xf2\xa6\xf0o\xefx\xed\xde\xeb\xbc\xeb\xb5\xec{\xed]\xecE\xec\xcc\xedk\xef\x88\xf0{\xf2c\xf4\x18\xf6\x8c\xf7W\xf9\xc5\xfc$\xfe\x08\xffF\x02n\x043\x06j\x06#\x072\t\x9d\x0b\xa0\x0e\xd2\x0f\xa2\x0e\x17\x0eS\x10\xf9\x15p\x1c[\x1e)\x1c\xff\x19\x00\x1e\xa5\'p.\x93/\x8e-y*h,\x801\xf66[7+0\x1b+\xa4(k)\x9c\'B#\x19\x1d]\x15#\x0f\x07\n\x83\x06\x83\x001\xf9\xd4\xf2N\xee\x03\xe9[\xe3\x12\xdf(\xdcf\xda\x1e\xd7q\xd5\n\xd5\xd7\xd3&\xd3\xf7\xd4C\xd9G\xdb\x08\xdb1\xde\xab\xe2R\xe6\xa3\xe9\x06\xee\x13\xf2\xef\xf4\xf6\xf6\xb2\xfa\xa5\xfe\x80\x01F\x03O\x04j\x04\xb8\x04\xc3\x05\xaf\x06C\x06\x07\x04g\x01\x9f\xff\xc2\xfe\x83\xfd`\xfb\x8b\xf8\xd4\xf5}\xf3\x0b\xf2w\xf1Y\xefh\xec\xa2\xeb\x1a\xecA\xeb\xff\xe9\x97\xe9\xdb\xe9A\xea\x7f\xeb\x10\xed\x15\xee:\xee4\xef\xb5\xf1\xf6\xf4N\xf7\x8f\xf8\xce\xf9\xc2\xfb_\xfd\x9f\x00\x83\x03n\x06\xe8\x08\xee\x08\xd1\x0b\x82\r2\x0fm\x13\xcf\x17-\x1d\n \xdb\x1e\x07\x1f!"/+\xa03\xd73\x0b2\x16/\x96/;4\xbf:?;\xcc4r-m)\x82*\x8f)\xcc%\x86\x1eD\x16\r\x0fV\t\xe3\x05_\x01z\xfa\x89\xf2\x9f\xec\x8c\xe8\x08\xe3\x8d\xde\xab\xdb\xd7\xd9\xce\xd8L\xd5w\xd3\x08\xd36\xd4\xc1\xd6\xa3\xd9]\xdc\xa1\xdd\xe4\xdd \xe1\x8e\xe8\xed\xed%\xf1_\xf3\xa5\xf4\x9b\xf7p\xfb\xb9\x00\x82\x03\x18\x04\xc8\x03\xc4\x03\xf7\x03z\x05\xa2\x06\x1c\x05}\x02\xf7\xff]\xfe\x15\xfd]\xfbm\xf9\n\xf7\x13\xf4\xb4\xf1\xf8\xefr\xeei\xed\xff\xeb\xb9\xeaF\xea\x8d\xe9\x91\xe8B\xe8;\xe9\xfe\xea\xe1\xeb\x0c\xec\x97\xec\x02\xee*\xf0C\xf3\x82\xf5A\xf7\x16\xf8t\xf9\x99\xfc\x90\x00Q\x03\xf9\x03x\x03\xcf\x042\t\xfa\r\\\x12J\x10\x9c\r\xeb\r\x1b\x15/ \x06&\\$\x93\x1e\x10\x1fY&\xaf1\x959\x039\xe53\x08/\xba1}9_<\xc78\xf81\x12.\x8e+](\x91$\x8b\x1eh\x18\x9f\x12\xe6\r\xec\x07\xe0\xfe:\xf6\x90\xf0\x1c\xef\xec\xec\x9e\xe6\xbb\xde\xb8\xd7J\xd4d\xd6(\xd9<\xd9\x90\xd5\x96\xd2\xa0\xd3\x1f\xd7\xd3\xdc\xc6\xe1\xa0\xe2E\xe4\xb0\xe7i\xebT\xf0\xed\xf3c\xf5\xee\xf9\x97\xfe\x87\x00\xc3\x00\x1a\x01\xf3\x01o\x03\x8e\x05\xff\x07\xf0\x06>\x01s\xfd\xb0\xfd;\xff\x84\xfe}\xfb]\xf8\xc8\xf3}\xef\xfb\xee%\xef\x87\xee\xe9\xeb,\xe94\xe8\xed\xe6\xf6\xe5\xf7\xe5\xc3\xe7\xc5\xe8\xc0\xe9\x86\xea\xe7\xea\x97\xeb\xc5\xecz\xf0\xc7\xf5\\\xf83\xf9\x96\xfan\xfc;\xff;\x02\xc6\x04\xa5\x08\xe4\t\x8a\x0b\x01\x10\xfb\x0e\xf8\rz\x0e\x80\x11\x05\x1a\xe0\x1f \x1f\xf6\x1a2\x19(\x1e\x85\'\x81.\xc7/\x11-\xc3)G-w2\xe16\x985j/ .\xc0-\x91.Q+\x8b$\x9d\x1e\x11\x1a\xca\x18\r\x16]\x0fJ\x05\x9a\xfd\xc5\xf9~\xf7x\xf4\x08\xef{\xe8F\xe0\xb7\xdd\xce\xde\x0c\xdd\x1a\xd9\xa9\xd6\x15\xd8\xd1\xd9\xd9\xd9\xd1\xdb\xce\xda\xcf\xdb\x8b\xe0:\xe52\xecf\xed\xe7\xebg\xee,\xf4\xb8\xf9\xc0\xfbp\xfd\x81\xfe\xa9\xff\x02\x01,\x03\x97\x03\x83\x01\x1e\xff\x9b\xfe\xf8\xff\x05\x00\xf0\xfb\xc5\xf6u\xf4\xfa\xf3J\xf3\x00\xf15\xee\xe3\xea\xb8\xe8\xc8\xe8\xe4\xe8\xe8\xe7\xb9\xe61\xe6(\xe6\x17\xe8\x1d\xea4\xea\x9e\xea\x14\xec\x0f\xef\xe1\xf1L\xf4\x87\xf7H\xf7\x99\xf8%\xfeY\x01\xf5\x03\xdb\x06\x9a\x07\xcc\t{\n\x04\r\x1e\x11G\x11\xc4\x12\xc6\x13\xf5\x15\xb0\x16U\x14\x99\x13\x89\x18W \xe8"\xeb!\xbf\x1e\x00\x1f\xd8!\x06&\xad+\xeb.\x94-\xba)\x98)3*\xea)\xbb\'\x7f&\xd5&}$\xc6\x1f\xb8\x1a%\x15\x19\x0f\xcc\x0be\n\x8d\x08\xd6\x02J\xfa\r\xf4\xd9\xef8\xeb\x81\xe8\xb3\xe7*\xe5\xdf\xdf\\\xdcS\xdbQ\xdb\xa8\xd8%\xd8N\xdd8\xe0\xa9\xdf\xe7\xe0\xb2\xe1\xea\xe2\x83\xe7\xff\xec\xc8\xf2O\xf5 \xf5\xa5\xf5\xdc\xf8\xf4\xfc\xfe\xfe[\x01Y\x03\xf1\x02\x8b\x00G\x01\xf8\x01\x0b\x00\xef\xfc\xe1\xfc\x86\xfdz\xfbi\xf7{\xf3\x98\xf2\x85\xf0\xe8\xed\xbe\xeez\xef\xf6\xea\xb9\xe9\xaa\xea\x94\xe8R\xe8\xee\xec\xc8\xeb\xda\xe9a\xf0m\xf3`\xf2\xa4\xf1>\xf4t\xf87\xfc\x17\xff\x9c\x04\xd8\x04c\x04X\x07\xe1\t\x13\x0c\x14\x10u\x12\xff\x0f\xd4\x11D\x14\xad\x11\xcc\x10D\x11\xcb\x11\\\x12[\x14\xc7\x15\x81\x11\xb2\x10\xcf\x10-\x12\xc3\x17\xd1\x1b\x84\x1c\x8e\x19\x9e\x1b\xb0\x1b\x8c\x1d@#\x1b$I#\n"_"\xca!\x80\x1f\xd0\x1d\x8f\x1c\r\x1a\x08\x18E\x15\xc2\x11B\x0c\xf3\x05\x13\x02\xba\x00\x93\xfdH\xf9\xeb\xf4B\xee\xf4\xea\x86\xe7\xc8\xe5C\xe4\x06\xe2\xe0\xe0D\xe0(\xdf\x89\xdf\xea\xde\xe8\xde"\xe3\xe6\xe4\x88\xe8\x05\xecc\xeb\xcc\xedy\xf0\xd5\xf1\x82\xf7\x88\xfb\xb5\xfah\xfd\xc2\xff\xb2\xff\x9d\x00F\x00c\xff\x7f\x00n\x006\x00:\xff\xd0\xfc:\xf9\xf1\xf6\x1f\xf6\x8f\xf4\x89\xf3\x82\xf3\x8d\xf1:\xee\x06\xefR\xec\xbf\xea/\xef\x81\xec@\xec\xc9\xf49\xf0n\xef\x90\xf5\xf1\xf2\x19\xf9\x01\xfb\xdd\xfa;\xfe\xb4\x05j\xff\x98\x03X\x0b/\x06\xac\t\xc0\t\xe7\x0f\x0b\rg\x08K\x11\xbe\x11\x90\x08\x1f\x11\xaf\x11:\x06\x13\x0fu\x0e\x83\n\xd4\r}\n<\x07\xb1\n\xc1\x0cN\x08\x1c\tL\t\xed\x08F\x0b\xbc\x0c\x0c\x0e^\r/\r\xe5\x0ed\x0f\x81\x12\xa7\x13\xee\x13\xbb\x14\xc5\x11\xa4\x13\xf1\x16U\x12\x87\x0f\xc1\x11\xfd\x11y\r\x05\x0f.\x0f_\x08D\x04\xcf\x037\x031\x01\x13\xff\xa6\xfd\xd5\xfbe\xf6\x8b\xf4i\xf4\x18\xf2X\xf0\xe8\xef.\xefv\xee\xea\xee~\xec\xdc\xeb\xcc\xed\x0f\xedG\xeb_\xee\x11\xf2\x89\xf0\x1f\xf0i\xf2Z\xf2f\xf1\x18\xf4\xb7\xf5\x9c\xf7\x1b\xf7\xd8\xf5j\xf6R\xfa\xfd\xf8\xc4\xf7\xda\xf6)\xf8\x1d\xfc\xa3\xfa\x1f\xf7\x11\xf9?\xf80\xf8\xfd\xf6\x8e\xf7.\xfb\x05\xf9F\xf6O\xf7\xb6\xfb\x8b\xf65\xf5c\xfa\xc1\xf5\x1c\xfe\x03\xfa\xe4\xf8\xfe\xfa\x1d\xfdv\xf9\x9d\xf7N\x04\xe1\xfe\x8d\xfc\x8c\xfe\xfa\x03\xaf\xff\x84\x04\x8b\x05\x82\x01\x7f\t\x83\x05\xed\x02z\x0fQ\t\xc3\x05\xc3\x0e\x97\x0f|\t\xa6\x0cp\x0e\xe3\n\xc3\r\xdf\x0b\xde\x12\x8c\x0f3\n\x07\x0f\x03\x08\x87\x06\xd5\r\x1f\rJ\x08`\t\x1d\x0b\xb8\x06\xec\xff\xd2\x06\xd9\x08Y\xff8\x04\xaf\x0c]\x05u\x03\xd7\x07}\x03\x15\x06\x90\x07\xf3\x05R\rW\x0b\xaf\x064\x0c\xbc\x0fQ\x08}\x03\x89\x0bw\x0bs\x04t\n_\x0bc\x05)\x02\x05\xff\x17\xfd\xd8\x00\xd6\xfd\x15\xfb~\xfc\xf8\xf8s\xf4S\xf5|\xf6\t\xf1\xf6\xf1u\xf4\xab\xf4\xb9\xf5 \xf4\xbb\xf3<\xf5\x9c\xf5\xa2\xf6\xcf\xfa\xd3\xfck\xf84\xfd\x05\xfdJ\xfb\x8c\xff\x87\xfd\xe0\xfd\xd8\x01\xf6\xf6o\x00X\xff\x83\xf8j\xf80\xfbb\xfa\xa1\xf1\xb7\xf87\xf8_\xf2\xc8\xee\x82\xf8\xdb\xf3\xcf\xec\xbc\xf2\xab\xf5C\xf4-\xee\x12\xf5\xe2\xf3/\xf4F\xf4\xed\xf8%\xf5\x8e\xf6^\xff\x03\xf9\xad\xf9\xf3\x04\x82\x01N\xf7\x99\x03C\t\x1c\x04i\x03W\rw\x0c\x1c\x02\xb5\x02\x9a\x0c\x9b\t6\t\xc4\x0b\xd8\x08\n\x0b&\tS\x05j\x04\xcd\n\x1b\x08S\x01\x8f\r\xdd\x0b\xf4\x01\xdd\x02O\x08e\x03\x80\x00\x80\x07Z\ne\x01\xca\x04m\t\x8f\xfe\x8d\x00\x13\t\xc8\x03\x8f\x00x\x0ci\r\xde\x001\x03\xcb\t\xc9\x08j\x06\xf3\x0c\x8f\x0e\x81\t@\t\xdc\x0e\x04\x08\xc8\x05x\r\xea\x0b<\x08\x85\x0c9\x0b\xb4\x05|\x06s\x04\xd3\xff\xb4\x02\x95\x07\x1b\x01[\xfd\xa3\x00E\xfd&\xf4\t\xf9\x84\xfc\xeb\xf6\x88\xf4B\xf7\x94\xf7O\xf3\xdc\xf2W\xf8\x93\xf4\xc6\xec_\xf29\xf7e\xf8\xb1\xf5]\xf5)\xf7N\xef\x90\xf0\xd3\xfa1\xf9\xc9\xef\xec\xf8<\xfd?\xf4\xe7\xee\xb3\xf7\x83\xf7X\xee\'\xf4h\xfbJ\xf3\xf5\xfa7\xf6\xde\xefA\xee\x7f\xf6\x97\xf9\xc0\xf3\x16\xfe\x04\xfaA\xf9B\xf4\x8b\x00X\xf7\x8d\xf8\x8e\x06O\xfc\x98\xfe\xbc\x06\xad\rS\xff\xff\xf7\x18\t\x9f\n\x11\xfd\x92\x07\xf1\x18\x03\n=\xf6\xb5\x13Q\n\xeb\xf7\xf4\x0e*\x10g\x02O\x07\xa0\rj\n\x85\x03\xb0\x03\x1e\t \x06\xd5\x06\xf4\x0e\x1c\x10W\x02\xd9\x05\x96\t \r\xc1\x02\x8d\t\xc4\x10\x8e\x04z\x05F\x0c~\x05,\x00_\x08>\x02\xf2\x02\xfd\x05V\x04\x9d\x00\x04\x02`\x02i\xfb\xbd\x03E\x00I\x00;\x05q\xfc\xe2\xfa\xab\x04\x17\x02\xc9\xf9j\x05Z\xff\xf8\xfa\xa8\xfek\x04\x9b\x01\xaa\xf9,\xfe\x0f\xfd\xfb\xfd|\xfd\xa0\x03\x16\xfe\xaf\xf3I\xf9h\x04w\xf9K\xf2\xc0\x06\xb6\xf7\xbb\xf3\x13\xf7c\x03p\xfaF\xefW\xfe5\xfe\xf9\xf0\x89\xf6L\t\xa9\xf9+\xf2\x08\xf5\xf7\x0b\xe9\xfc\'\xf1\x05\x0c\x16\x04\x01\xec?\x08\x06\x13Q\xfa\x1b\xfc\n\x05\xa9\x0e(\xf8v\x00\x13\x12\x8b\x02\xc1\xf8\xec\x04\x00\x049\x03\xcb\xfd\x0f\xf9\xbd\x07\xfa\xf9W\x04\xc9\xfcc\xfaP\x01\xb1\xfc\xc1\xf4\xf6\xfd\x11\nb\x00\x0f\xfa\x85\xfbC\x07\xea\x06\x93\xfbc\xf5\x87\x0f\r\x031\xf9\xd4\x0c\x93\x14J\xf8\xa3\xf4\xd6\x0c6\x06\xf4\x01\xf7\x03\x15\x0bR\xff\xfa\x04h\x02P\x07\xfd\xfeX\xf4f\x08e\x00\xf7\xfd\xaf\x05\x98\x042\xeeV\xf9{\ne\xf0K\xf8\x81\x03\x8e\xfb\x15\xf2\xa7\x00\x1f\x00y\xfa\x0b\xef?\xf9\x08\x03\xcc\xee\x02\xfeS\x08\x02\xfa}\xe8\xee\x05\xeb\xfa\xc8\xed\xcf\x03_\xf9\x03\xf98\x01\xac\xfd\xfe\xf9\'\x01V\xf7\xd5\xf7\xf0\x0c\x0e\xfe\xc9\xfa\x1c\x07\xc2\x07\xe4\xfdw\xf8\x7f\x0f>\th\xf5\xbb\x018\x1b\x7f\x01s\xf4\x8b\x10{\x0f\xf9\xf7\x1b\x03\xf4\x16\xa0\xfct\x01\x9b\x0cI\t\x86\xfa"\t9\t\xf9\xfa\xff\x06k\x0b\xc0\xfa\xeb\xfa\xed\r<\x04\xd6\xfc\xae\xf5\xcf\x0e\x1b\x02\xad\xf25\x04\x06\x0bN\xf8\xcf\xf9\xa0\x06h\xfa+\xfe\n\xfe\xf2\x08N\xf9\xe5\x01\x86\xff\x9b\xfa7\xfdH\xff\xa7\ti\xf9L\xfb*\x06\xb9\x06\x9b\xf5\xd2\xfcC\x0e\x07\xf7\x17\xf6\xaa\x11\xe2\t\xed\xf5\x91\x00\x0e\n-\xfb\xf4\xfd\xb7\nx\x03=\xfb\xc1\x05\xaf\x068\xfe\x87\x03[\x03\x1c\xf7[\xfa\xf0\x0b\xc1\x00L\xf9\x15\x01\x06\x03\xc3\xf3?\xf42\x03\x93\x02E\xf0\x82\xf2\xe0\x07\x1c\xfb\x7f\xf0`\xf9\xf2\xfeL\xf1\xdc\xf2[\x02-\x04n\xf4\xd8\xf8h\x01\x86\xf9?\xf9\x85\xfcP\x04\xc3\xfa]\xf98\x05a\xfen\xf2~\x02\x06\x00\x81\xf2\x81\xfc\xf9\xff\xe9\xfa\xf3\xf9\xd9\xfc\x95\xfb\xd1\xf7 \xf4\x8d\x04\xe3\x01\x8e\xf2&\xf8\n\x0eG\xfbF\xec\xeb\t\xf7\x10(\xf9n\xf1\x9f\x11L\x0c\x89\xf5W\x04A\x0f6\x07\xd4\xf3.\x0e\xf1\x17S\xf1\xb9\xfdW\x1eX\xfe\x05\xedq\x19}\x08\xce\xfa\xa9\x02a\nJ\x00m\x04I\x06.\x06\xaa\xfdm\xfb\x90\x0ft\x06\x8c\x00\xf7\x01\xe2\n\x93\xfc\x13\x00\xa5\x0fz\xffa\xfa\xf8\x0f\xcb\x05\xda\xf5\xd6\x07V\n\xda\xfb(\xf8\x1f\x08\xb8\x02\x91\xfa\xad\x03?\xfeI\x00T\xfb\x9e\xfe\xc9\xff\xc2\xff\x90\x01\x8c\xfe\x0c\xfch\x07\xda\x04\x9c\xf8\n\x01J\nh\x018\x04f\nj\x05\x00\x02\xc4\x08X\x0c\xbb\xfe\xae\x07x\x05N\t\xb9\x06{\x03J\x0cQ\xff\xce\xf8\x9f\x05\xbc\x07\xc8\xf8C\x02\xb3\x00\r\xfa\x8b\xf7\x93\xfe\xba\x01\xaa\xf5\xca\xf1A\xfa6\xfd\xaf\xfd\xa5\xf7D\xf9\x03\xfdG\xf1\xbd\xfb\x81\xfe\xcf\xf8\x13\xfdy\xfc\xa0\xf5\xac\x00&\x03\x05\xf3t\xf6\x12\x00\xaa\xf7\xa0\xf8\xd3\xffa\x00\xe4\xf6!\xf4M\xf4X\xfaC\xfe \xf3x\xf9Z\xf9\x10\xfad\xfb\xf5\xf0~\xfb\xa5\x00\xfc\xf0Z\xf7i\x00,\x01\xa9\xff\xa4\xf1t\xfc\xd1\x0cg\xfc\xe0\xeer\t\xb5\x0e\x98\xf6\x0e\xfb\x80\x0e`\x05h\xfc\xe4\xff\xc8\x08\xdd\xfe\xbf\xff\x1e\x05\xbf\x051\x05\xa6\xfb\r\x0bS\xfb\xb9\xfe\xb1\x06h\xfeC\xfa\x97\x04\xe5\r\x14\xfc\xcf\xfa\x87\x03b\x03M\x00;\xfb]\x07\x15\x04\x14\x05%\x02\x17\x02\xb1\r\xde\x02\xc2\xfbW\x03\xf9\x10\x14\x03K\x00d\t\xe1\x0b4\xff\xf9\x00\xfb\x0b\xc0\xff\x8e\x03_\x02\'\x01\xdb\x08T\x03z\xffv\x050\x02\xae\x00\x91\x01\x0b\x00\x15\x00M\x05K\x02\x1e\x05S\x03\xf9\x00\x05\x02y\x01}\x01\xb5\x03L\x078\x01B\x06\t\x08u\x03\x1e\x03<\x03T\x06\x0c\x04\x1a\x03Y\x07\xe1\x06\xa8\x02\r\x03\xa4\x01\xc8\x01*\x02n\xfd\xdf\x01\x9f\x02\x83\xff%\xfea\xfc\\\xfe\xf7\xfc\x8a\xfb\xb4\xfav\xfc\x81\xfa#\xfb\x0e\xfd\xbc\xf8\xf8\xf8\x9f\xf5\xd7\xf8\xfe\xfa\x04\xf7V\xf9\xae\xf8{\xf4\xad\xf8m\xf8H\xf5O\xf7\x15\xf5\xd7\xf6\x1f\xfb\xa3\xf7s\xf9s\xf5\xa7\xf3\xcb\xf63\xf8\x96\xfb\x1c\xf9\xc0\xf7\xd6\xfa\xc8\xf6\n\xf3\x08\xff^\xfa\x88\xf4\xbc\xfc\xca\xf9\xa4\xfcP\xfc~\xfc?\xfa\xa5\xfa\x06\xf9\x8b\x00\x12\xff\x06\x00\xee\x03\x90\xfa\xe3\x00g\x03}\x01\xeb\xfd\x05\x02c\x00R\x04\xa1\t\xf6\x05\xe0\x01C\x04\xd0\x01\x93\x05\x11\x05e\x06\xb1\x07\xcd\x05\xae\x0c\xbe\x03?\x06\x18\x08v\x05\x8b\x04\x12\x07k\x06\x80\x035\ri\x07\xe8\x02\xfa\x03\x8a\x05\\\x04i\x04D\x05\x88\x04"\x03r\x03\x9a\x07\x15\x03\xaf\x01\xa7\x03\x8b\x02\x05\x03\xb3\x03\xe1\x05\xe0\x03\xd0\x04m\x06\x14\x04(\x05-\x04\x08\x05\x85\x07\x97\x07Y\x05\xd9\x07\xa7\x06\xd7\x05\xcb\x07o\x05\xca\x05\xdf\x03\xb0\x04+\x05C\x05\xfa\x03\x0e\x02\xf7\x00\xb5\x00\xef\x00\xdd\xff5\x00\xec\xfe8\xfd\x1b\xfd\xe7\xff\xfe\xfc,\xfb>\xfd~\xfb\xc4\xf98\xfc\xd0\xfcu\xfa\x89\xfa\xe0\xfb\xc8\xfa\x12\xfaT\xfc\xd9\xfaz\xfa\xfb\xfa\x99\xfc\t\xfbV\xfaT\xfc\xe1\xfc\x15\xfc\x9e\xfa3\xfb)\xfbu\xfc\x9d\xfb\xe0\xfa\xf2\xfa_\xfbq\xfa\xdb\xfb\xa1\xfa\xf4\xf9\x94\xfaP\xfa\xa4\xf9F\xfb\x9a\xfb\x11\xf9m\xfa?\xfb\x99\xf9\x15\xfa\xe3\xfb\xe6\xf7\xa4\xfbA\xfc\xce\xf9i\xfb\xe9\xfc\xc2\xfa\x95\xfaO\xfd\xbf\xfcf\xfb?\xfe\xfb\xff~\xfdL\xff\xcf\xff\x8c\xfev\xff\xcf\x01<\x01\xdb\x02\x9b\x02\x12\x02\xd7\x03 \x04S\x04I\x04\xd8\x03\x1f\x04\xc4\x05\x8d\x06\xda\x057\x04\x8b\x052\x05T\x04\x9a\x03\x83\x05@\x05e\x04\xde\x03b\x03\x93\x02F\x02\xf9\x03\x10\x02\x98\x02\xaa\x02\n\x01\xe9\xff\xf9\x01c\x02\x92\x00\x15\x01\xfe\x00v\x00\xef\xff&\x01\xbf\x00?\x00_\x00\xf0\xff\xea\x00\xed\x00\x8e\x01\xd5\x00t\xff/\x02\xbe\x00X\x00\x17\x02\x05\x03\x1d\x02M\x01\xaf\x01\xe7\x02.\x03\x01\x03\xc1\x03O\x03\x8c\x03;\x03\x9f\x03\xe3\x04\x13\x05\x96\x03\x1f\x04\xb6\x03P\x03~\x03\x98\x03\xcc\x02X\x02\x17\x02\xdc\x00\x17\x01\xa0\x00\xb2\xff\r\xff\x16\xff\xe2\xfd\xad\xfd\xc1\xfd\xd3\xfc`\xfcN\xfcq\xfc;\xfba\xfb>\xfbB\xfb$\xfb\x16\xfb\x03\xfa\xe0\xfa\x13\xfbf\xfb\xef\xfbE\xfa\xfe\xf95\xfa\xad\xfbv\xfb\x9f\xfb\\\xfc%\xfb\x89\xfb\x99\xfbe\xfb\xcd\xfc\xb3\xfc.\xfc\xf0\xfc\x9d\xfc\x97\xfbH\xfdM\xfeP\xfdO\xfb\xe1\xfdK\xfe(\xfd|\xfe\x14\xfep\xfd^\xfdu\xff\xd7\xfe\xa5\xfe\x83\xffS\xff\xf1\xfd\x0f\x01@\x01\n\xff\xcd\x00\xd9\x01\xa5\x00\xc4\x02\x8d\x03\x81\x00\x9a\x02\xe7\x031\x02F\x03\xf1\x04\x89\x03\xbe\x02\xbd\x03\xb0\x04W\x035\x02\xa6\x03\x8b\x04)\x03\xed\x02(\x03\x9f\x02\xfa\x00\x8a\x02\xaa\x02\xc7\x00\xcb\x02\xa4\x01\x9f\xffL\x00\xb9\x00\xd4\x00\xe1\xff\xcc\xfe\xe0\x00\xd2\xff\x80\xff\x9b\x01T\x00\xad\x00\xe3\xffp\x00u\x01\xf5\x02m\x014\x01\x0c\x025\x03\x98\x03\x19\x02\x95\x03\x08\x05\xad\x03\xf1\x01>\x04\xac\x05J\x04r\x03W\x04\xd1\x03\xcb\x03\xb2\x03r\x03r\x03J\x03\xb8\x02\xb7\x01\x99\x02W\x02V\x01:\x00\xa7\x00{\x00\x18\x00Y\x00I\xff{\xff\x05\xff\xb1\xfe\xf2\xfd#\xfeK\xfe\x84\xfe~\xfd\xd7\xfd\xed\xfd\xb7\xfd&\xfdx\xfdN\xfdd\xfc\x81\xfd\xbd\xfc\x0c\xfd\xb1\xfc\x1c\xfd \xfd^\xfd\xb6\xfc\xc0\xfb\x8d\xfb\xbf\xfdT\xfdF\xfc(\xfe\xb5\xfc\x13\xfd\x03\xfcd\xfc\xb6\xfd\xde\xfc)\xfc\xbb\xfc\x85\xfd\xb2\xfbW\xfd\xa0\xfc@\xfc\xe6\xfby\xfc@\xfe\x9b\xfc\xae\xfd_\xfd=\xfd\x10\xfd\xea\xfef\xffM\xfej\x003\xffl\xff\xdc\xff(\x00y\x00\x06\x01\xae\x01\xc7\x00<\x01X\x01\x1c\x02\x0c\x02\xcf\x01\xc1\x01*\x02\xfb\x01 \x02k\x02n\x01\xbd\x01~\x028\x01\x1d\x01\xe5\x01^\x015\x01\xfb\x00;\x01R\x00\xb1\xff\xd6\xffw\x01!\x01x\xfe;\x00\xd1\x00\xb2\xff\xc7\xff\x8f\xff\x88\xff\xda\xfe\xc0\x00k\x00\x16\x00\xc4\xff5\xff\x03\x001\x00\xf3\xff\xb8\x00\xdc\x00\x93\xff\xae\xff\xd6\x00\xb5\x01\x1a\x01\xe5\x00\x05\x00\x99\x00\xd9\x012\x02\xbd\x01\xdc\x01j\x01\xb9\x01S\x02\x83\x02w\x02g\x02\xe8\x028\x02d\x02H\x03\xe5\x02\xbe\x02x\x02{\x02B\x02\x95\x02\xe7\x02|\x02t\x02\xb7\x01Y\x01\'\x01\xc7\x01\xb8\x01\x89\x00\x14\x00\xde\x00\x12\x00\xad\xffe\x00\x0c\x00R\xff\x06\xff\xa7\xff\xba\xfe\x9a\xfe\xda\xfe\xe6\xfe0\xfe\x06\xfey\xfeq\xfe;\xfdK\xfe\xe7\xfd\x86\xfc\xd9\xfdi\xfd\xaa\xfd\x90\xfdW\xfcp\xfd\xee\xfc\xb2\xfc6\xfd\xbd\xfd6\xfc\xea\xfc[\xfd\xb0\xfci\xfdc\xfdf\xfc\x1c\xfe\x07\xff\xae\xfc\x17\xff`\xfd\x9b\xfc.\xff\xe7\xfe\x04\xff\xa9\xff+\x00\x91\xfd"\xfe$\x00\xd9\xfe\t\xff\x0e\x00]\x00\x84\xfe,\x00\xcd\x00v\x006\xfe$\x00\x9a\x01\x9e\xfd\x98\x01X\x01\x8b\x00\xf9\xff\xb0\x00p\x00/\x00\t\x01\xa4\x00\x17\x01M\x00\x92\x02u\x00\x8e\x00\xe8\x00\x8b\x01\x03\x00]\x01\x85\x01e\xff\xa6\x01=\x00g\x00\xa1\x00H\x01\xe8\x003\x00\xa8\x00\xaf\xff\xc2\x00\xd6\x00"\x00\x8f\x01%\x01#\x01b\xff\xe4\x01\xdf\x01\xa6\xff\xd4\x01\xa5\x00\xe0\x01V\x02\xd0\x01&\x00T\x02\xb5\x00\xd5\x01\xd9\x029\x02\xdf\x02v\x01\xfe\x01a\x01\xf4\x03e\x01\x03\x04d\x03\x97\x01\xba\x02.\x03i\x02\xf6\x02\xc0\x03|\x02\x95\x02\xe4\x001\x03\xc7\x01\x15\x02#\x02x\x00\xee\x01\xe8\xffS\x00\xd7\x01L\xff\r\x00\xe3\xff:\x00\xc6\xffN\xff\xd3\xff\x8c\xfe\x13\xff\x1f\xfeT\xfe\xce\xff\xd8\xfe\xe6\xfd\x8e\xfd\xaa\xfc"\xfe\x06\xfd"\xfe{\xfd\x14\xfe\xa8\xfb\xa6\xfcr\xfd\x9a\xfdE\xfd\xb2\xfc\x00\xfe>\xfc@\xfdI\xfc6\xfe\xb6\xfd\x8a\xfb|\xfdJ\xfe\xd3\xfc0\xfd\x03\xffV\xfd\xcb\xfb\x17\xfd"\xfe\xbf\xfe\xea\xfc[\x00k\xfbp\xfc=\xfeG\xfec\xff\\\xfd\xfe\x00q\xf9\xe6\x00^\xfeF\x01\x00\x00W\xfc\x93\x00\xc2\xfb\xea\x02-\x01\x8a\xff\x14\x00\x9e\x00\x8a\xff\xf7\xff\xc8\xfeG\x02;\x01\xd8\x003\x02\xaa\xfd\x8d\xffY\x02@\x03\xf8\x023\x00=\xfd\xce\xff\xe5\x01\xb8\x02\x81\x03\\\x02\xc8\xfd\xa9\xfd}\x02\x83\x02\x85\x00\xd2\x00|\xfec\xff\x0b\x02\xae\xff\xc9\x00z\x00\xb9\x00\x07\x00@\xfe\xc6\x00J\x03\xf9\xffj\x00\x10\x02\x19\x00\x98\x01\x8a\x01\x07\x01\xe4\x03\xd8\x00\x89\x01C\x02\xb2\x021\x04\x84\x02\x05\x04(\x01v\x01{\x04\xa3\x03j\x03\x1f\x04x\x01\xf7\x03\xff\x03y\x03R\x02M\x02B\x02\xa9\x01x\x03\xe3\x02}\x02\xde\x00S\x01$\x00c\x01\x8f\x01\xa4\x00n\x00\x92\xfe?\xfe%\x00\xb2\x00\xfd\xfe\x0e\x00\x00\xfd4\xfd1\xff\x98\xfe}\xfd\x19\x00\xc6\xfd\xe0\xfcG\xfdt\xfc\xe5\xfe9\xfc\xbd\xfd\xc4\xfe\x87\xfc\x99\xff\xb6\xfe\xa6\xfa\xb9\xfbc\xfcV\xfc\x9e\xfd\x8c\xfd\x8c\xfc\x86\xfd-\xfa\x16\xfc\x8e\xfc<\xfd/\xfc\xe2\xfbp\xfc\xaa\xfc|\xfd\xbb\xfai\xfe<\xfc\x8a\xfd\xcf\xfd+\xfd<\xfd\x03\x02\x9c\xfc\xf3\xf6j\x031\x00\x12\xfc\x12\x03\x9e\x02\xf9\xfaH\xfe\x9f\x02\xf1\x00\xbc\xfew\xfc\xab\x02\'\x05\x8e\x05t\x026\xfd\x9c\xff\xeb\xfd\xe5\x02B\x02\xda\x02&\x03\x1f\x05\xdf\x04\x15\xfc\xce\xff\xff\x00!\xffl\xff\xc6\x07\x11\x03\xaa\xfd\x99\x03\x9d\x00\x8b\xff\xed\xffx\x01\\\x03\xa4\x03m\xfe\xbb\xfe\x1b\x02p\x03c\x04_\x00W\x02N\x049\xfez\x00j\x05\xfe\x03\xaf\x02\xab\x04#\x06\xa5\x01\xbc\x00\xb6\x066\x06;\x03\x18\x04\x9c\x02\xc3\x02\xd6\x04\xb0\x07\xc1\x043\x02\xad\x03\x0b\x01\xc0\xffl\x02\x96\x03\xce\x01d\x02\xe1\x02\x84\xfdO\xfe\xc6\x01\x97\xffe\xfe\x8a\xff\xe6\xfd\x01\xfd\x8d\xff\xb4\xff\xc0\x01\x82\x00\x17\xfb\xe0\xf9|\xff\xcf\x00\'\xff\xcb\x00I\x01\xbd\xfe\xb1\xfav\xfc\xa2\xfe\x1e\xff$\xff\x99\xfd\xb8\xfd\x8a\xfdY\xff\x99\xff3\xfd\xf8\xf9\xd4\xfb\xad\xfd\xd4\xf9\xda\xfb`\x02\x08\x00\xc9\xf7_\xf9\x99\xfb\xa0\xfa\xe6\xfc(\xfe\xd4\xf8s\xfa\xef\xfc\xeb\xfa@\xfd\xe8\xfb\xdb\xf8V\xf7\xb0\xffh\xfeD\xf8F\xfc\x02\xfe\x90\xfao\xfcM\xfc&\xf9`\xff\x9c\x01\x07\xfdI\xfb\xb6\xfcg\x00n\x03`\x00\xfe\xfa:\xfd \x01_\x02\x8c\xff\xe5\x00\xe5\x02B\x00\x86\x03\x00\x03\xd3\xfcP\xfd\xb6\xff\xa4\x03r\t\xa5\x05f\xfd|\xfc\xfd\xfc\xb0\x00\x85\x039\x01&\xff\x8e\xff\xd5\xff \xfek\x00\x80\x01\x8f\xff\xd1\xfe+\xff\x9f\x03\xaf\x05-\x06\'\t\x0f\x0b\x1b\x0b\xaa\t\xec\x08\xef\x07C\x0c\xd3\x11\xcb\x11x\x11\x08\x12\x9e\x0f$\r;\x0e\x98\r$\x0c\x06\t\x18\x07\xb2\x07@\x08\xfb\x06\xcf\x05\xd4\x02\xf4\xfeF\xfa.\xf8n\xf9\x9e\xf9x\xf8;\xf7\xe7\xf7\x8a\xf5S\xf6U\xf5\xeb\xf4"\xf7\xe1\xf3\x1a\xf3\x12\xf5\xf8\xf9y\xfc\xbd\xfbr\xfb\xe8\xf9\xdf\xf8\'\xf96\xfb\xe1\xfd\x96\xff\xdb\xfc\t\xfe\xe4\xfd\x80\xfcs\xff%\x00\r\xfe\xd8\xfb\xab\xfb\xfb\xfb\xd7\xfe\xc9\xfdp\xfe)\x00(\xfe}\xfb\xf7\xf9N\xff\x06\x00\x18\xfc\x03\xfe\xb8\xffd\xfdA\xfd\xb9\x00*\x01\x1c\xfc\x94\xfa\x93\xfb\x8a\xfb \xfe\xea\x010\xfe\x95\xfcD\xfd\x18\xf9b\xf9:\xfa9\xfc\x98\xfb\r\xf9\x10\xfa\xff\x00u\x01\xd3\xfc\xb2\xf9\xf5\xf4\xdc\xf4\x17\xfc\xbd\x01\xe4\x01.\x00\xc3\xfe\xff\xfd)\xfb\x9c\xf9\x8e\xfc\xe6\xfb\x8f\xfbW\xfc\xb0\xf9\xad\xfd\r\x03\x84\xffc\xfc\xb7\xf8\x8d\xf0\xff\xeb\xfb\xf1G\x01\x13\x12\x0c\x1c\xe0\x1a|\x12H\x0eu\x0fX\x13g\x1c\xb5)\xa44\xf85\xed482D.\r)5\x1f%\x18\xd0\x15\xf0\x17\t\x1c\xfb\x19/\x11r\x041\xf68\xe8\'\xe0\xa5\xdf\x99\xe2u\xe47\xe3\x89\xe1\xdb\xde\xa3\xda\x1b\xd7\x9e\xd4\xb3\xd4\x90\xda\xb4\xe4T\xef\xbf\xf8>\xffW\xffu\xfa<\xfa-\xfd\xe5\x03\xe0\r\x00\x13\xa4\x16s\x18[\x17\x96\x14v\x0f\x13\t\x9e\x04\xde\x03\xc8\x04\xf1\x07!\x08\xfe\x04\xac\xfdX\xf2D\xea\xc7\xe7\xc1\xe7C\xe9\xd0\xec\x9c\xee\xbf\xee\xfd\xed\xf9\xed\x93\xecw\xeb@\xed\x05\xf1`\xf7D\x00A\x07u\x07\xcd\x04\xe0\x019\xfeM\xfe\xb2\x01\x9b\x05\x86\x08\x19\x07"\x03\xba\xfeK\xfa\x9d\xf6\xa1\xf4\x8a\xf0h\xf0\x96\xf2\x98\xf3\x97\xf6N\xf7\xd6\xf2\xfa\xebK\xe7\xea\xe5\x89\xea\xd8\xf2*\xf9\x98\xf9+\xf7\x19\xf5{\xf7\xd3\xf8@\xf8\xde\xf5F\xeep\xeb\xc1\xf3$\x15\\A\xf5S"D\x9b"\x03\x13\x7f!?r\xe9ZqL\x11Ha?\xdf&\xef\x0cc\x071\x10\xd0\x13\xf0\x07\x84\xee\x9e\xd2\xd4\xbb\x86\xad\x91\xad\x8e\xbbR\xcb\x11\xd0&\xc7\x9f\xbe_\xc0F\xc9\x8b\xd0\x87\xd7u\xe5g\xf6|\x08h\x12\x7f\x19*\x1e\xc1\x1c\xad\x1d\xbe\x1f\x0e"8&\xbd"\xff\x18\xe0\x12\x8b\x0f\x97\t}\xfd\x9e\xec\x90\xde\x9f\xd6\xbd\xcf\xbf\xcd\xe0\xcf\xf2\xd0\xea\xcc\xa8\xc6\x0f\xc5\xf3\xca\x14\xd6Z\xdcb\xe0L\xe7w\xf1\x88\xfe\x0c\n\xf2\x12\xd9\x18?\x19g\x15]\x14\x8b\x18\x00 \xd7#\xf1\x1e\xa9\x16\x0c\x0fd\x07Q\xff\x1a\xf7s\xf0\x95\xed\x96\xec\xc7\xe9\xcb\xe6\xf7\xe1\x1a\xdc"\xd6\xe3\xd2)\xd6f\xdfd\xe8\xf1\xee\xb0\xf0\x02\xf2\x90\xf6\x98\xfa\t\x02q\x08\xe0\x0e*\x13\x1a\x15\xb9\x17\xca\x180\x19\xe1\x12\xd1\x0b\x00\ta\x03\xa0\xfd\x96\xfe\xaa\x14;>\xa9Y\x81PC.9\x17\x03\x1d\xa44\xfcL\x88[\x9ba\xfdV\xc8=e\'5\x1c\xa3\x17\xc3\x0bk\xfb\x1f\xf7\xfd\x01\xc4\x0bi\x01\xff\xe2\xf2\xc1\x95\xaf=\xb1\xee\xc1`\xdaN\xed\xfb\xf0\xd4\xe3\xba\xd4\x9f\xd2*\xdc\xa5\xeb\xbb\xf8\xab\x06)\x16\x00$[)\x81"8\x12`\x03\xa3\xffp\x03\x93\x0e`\x17[\x10L\x01\x07\xed\x89\xdaP\xd4\xca\xd3\xfe\xd2\xa7\xd0\xe3\xd0\x85\xd7\xd5\xe0X\xe4I\xe1\xd8\xdc}\xd9\x1d\xdf\xb9\xf1\xc5\x08\xd5\x19W\x1c;\x13?\x0b\xbf\t\x8a\x0f\xb3\x17\xe3\x1a\xf4\x18\x88\x13\xee\x0c\xcf\x05T\xfe\xdf\xf6\xde\xed\x16\xe6\xfa\xe1Z\xe4\xea\xe9Y\xe9\xcf\xe2G\xd9\x07\xd4\xb2\xd5\xc6\xdb`\xe5\x81\xee\xdc\xf4L\xf8\x81\xf9\xf9\xfc@\x01H\x03\x8c\x05=\t~\x11\xff\x1a\x90\x1f\xa8\x1d\xa5\x14\x85\x08\x92\xfej\xfa\x00\xfe\xf7\x04\xc0\x03\xb2\xf5p\xe4\xf1\xe6\xa0\x0c\xda>\xceS\x03=\xbb\x1b\xa7\x14\xaa,\xd4M\xef^\xbdbs^qN]=\xba0j\'\xcd\x1bT\x05\xb6\xf6j\xfc\x8a\t\xf5\x07\xfa\xee\xf8\xcc\xf6\xb7\'\xb5\xaf\xbf\xcd\xcfa\xde\xbc\xe4k\xe2h\xdbe\xda\xbc\xe3X\xf0\x17\xf9B\xfe\xe6\x08\x96\x1a\xf2*\x06,\x0f!j\x11\xa9\x03V\x03\x02\x08U\r1\x0e0\x04=\xf9|\xebR\xdd\xc5\xd38\xcb\x93\xcc\x1b\xd4b\xdb\xab\xe3\xcd\xe7\x0b\xe7,\xe2J\xde\xa2\xe3\x91\xf3g\x05w\x11q\x17e\x17g\x12\x8f\r\'\x0b,\x0b\xa6\x0c\x87\r\xca\x0c@\x0bW\x08\xa0\xff\x16\xf1a\xe2\xd0\xda\xf6\xdb)\xe2\xd4\xe8\x1c\xeb\xc0\xe7\xb7\xe0\x8f\xd8\xf9\xd6\xe8\xdd\xbe\xe9\xfd\xf4C\xfc\xba\xfex\x02!\x05\xe6\x03\xad\x04\xa4\x05\xdf\t+\x11X\x14\xcf\x14\x9c\x11$\t8\xff\x01\xf9\x18\xf8\x16\xf9\n\xf9"\xf2\x92\xe3\x8b\xd78\xdb\xf1\xfd\xdb1\x95RtO\xfc8\xcf,~8SRYb\xe4h^o\xbcq\xb3i@Xb=S\x1c\\\xfd\x80\xe9\xb8\xeaY\xfag\x01f\xefJ\xcf\xd9\xb2Q\xa9\xce\xae\xaa\xb8d\xc6j\xd7{\xe5\xce\xed\xd7\xf2\xb1\xf6\xe6\xf8!\xf6\xf5\xf5\x93\x04\xfb\x1f\x047w=\xf6-\xfd\x15\x1a\x06b\xfd_\xff\x94\xff\xef\xf8\x1e\xf4\xaf\xeb\x94\xe3\xa4\xdc\x19\xd13\xc6\x18\xbf\xe4\xc0\xae\xcd\'\xdeY\xec\xab\xf2Y\xf1\xa1\xee\x9d\xefr\xf7\xa9\x03\x04\x11\xef\x1b\xa7\x1f\x9e\x1d\x9c\x19\xad\x14\xe6\x0er\n@\t\xf3\x08\x84\x07\xf4\x02\x1e\xf9Z\xee\x89\xe5r\xe0\x1e\xdf\xb2\xde\x90\xe0D\xe1@\xe1\xc4\xe0 \xe0\x8d\xe2\xb5\xe7~\xed\xa2\xf4\x82\xfc\xd4\x03~\x08D\x07\x03\x04\xb7\x03\xe4\x07\x8c\x0f\xb5\x14\xba\x14I\x10r\x079\xfe\x16\xf8\xd2\xf5\xc5\xf4\x9f\xf1\xfb\xeb\xc4\xe9\x8f\xec\xa0\xe9\xdf\xdb\x07\xce\x96\xd4\xc4\xfcE2\x95T\x00Z\x94I\xa1:\x85=1M{c\x14uxyRp\x98\\\xf5E\xc50n\x1a\xf2\xfeB\xe7\x1a\xdf\x85\xe3\xcf\xe9V\xe9\x03\xdbC\xc5<\xb2F\xab\\\xb6\x11\xce]\xe7\x1b\xf8\xb9\xfc\x95\xfba\xfb\xea\x00\xe3\x08\x15\r\xc2\x12\xa6\x1b\xe2%\x11,2(\x10\x1c\xdb\x08\x84\xf3\x1d\xe5\xc7\xe1m\xe6W\xed{\xed\xeb\xe2\xb7\xd49\xc8)\xc4(\xc8\xfb\xce-\xd7\x00\xe0%\xeb9\xf6L\xff\x9e\x03\xbe\x02\xc1\x00,\x00\xcc\x05\xce\x13\xcc \x9f#\xed\x1do\x14\xc2\r6\x0b \x08]\x033\xfe\xc5\xfa\xce\xf6\xe6\xf0v\xe9!\xe3#\xde\xa9\xd8>\xd6\xb7\xd8h\xdf\xb4\xe6\xd1\xe8\xd2\xe7\xa1\xe8*\xee\xb4\xf6\xcf\xfd\x07\x04j\t\xac\rU\x11n\x11\xf7\x0e \r\x9c\n\xd0\x08\xb1\x08D\x07\'\x03\x80\xfc]\xf5|\xf0;\xee@\xea\xbe\xe4\xc4\xe2\xd7\xe1\xd3\xdd\x82\xdd8\xec\x93\x10\x98;qR\xefQ\x82FYB\x89K\x0c\\Fk\xddsQu\xafl\xddW\xb8;\xb6\x1d\xfd\x03Q\xf2<\xe8\xd6\xe7]\xeb\xb9\xe8O\xddG\xcdF\xbd\xe0\xb5\xea\xb9\xf3\xc6Z\xda\xe1\xee<\xff\xc9\x08K\t\xd8\x04\x9e\x00\x9c\x00]\x06\xcf\x10H\x1e\xb8\'M(p\x1bu\x07\x0c\xf57\xe7\x82\xe1\xf5\xdf|\xe1\xa3\xe5Y\xe8\x89\xe8\xe6\xe1(\xd8\xee\xcf^\xcbY\xd0\xf5\xdb]\xeb\x1e\xfaJ\x00&\xff\xd4\xfc\xf1\xfb\x99\xfd\xd0\x00j\x04r\x0c\x9a\x17\xb4\x1e\x80\x1b\t\x10\x1e\x03w\xfa\x88\xf8\x95\xfaW\xfd\x92\xff\x9c\xff\x98\xf9\xa2\xee\x9f\xe3\xf3\xdc\xd0\xdbX\xde-\xe4\xca\xeda\xf7N\xfc\x9f\xf9\xb6\xf16\xec\x9f\xedP\xf5&\x01\xb6\x0bS\x12\xed\x13[\x0f\xc9\x07\xf6\xff\xac\xfb\x9c\xfc\xfb\xfe4\x01>\x01\xd2\xfe\xa2\xfb\x01\xf7t\xf2s\xec6\xe7\xff\xe5s\xe8B\xeew\xeb\xf5\xe6\xa2\xf3\xe4\x145>\xa1T>R\xbcG\x9eC$K\'V\xc3^\xc3c&d\x94`\xa2Q\x8f8\x11\x1b\x98\xff6\xea\xf3\xdaE\xd7z\xde\'\xe7\xde\xe6(\xdb\xb9\xca\xf3\xbfx\xbf\xd0\xc7i\xd7\xb5\xeb\xe8\x00!\x11l\x16\\\x13\xf6\x0b\x10\x06v\x04\x8f\x06\x02\x10\xab\x1a\x9d!\xf1\x1e\xea\x0f\xfe\xfdl\xed\x85\xe3\xc1\xe0\x91\xde\x16\xdfS\xe1\xab\xe3\x11\xe4`\xe0\x14\xd9\x81\xd1P\xd0\x84\xd6\xa9\xe2U\xef\x86\xf8\x96\xfdZ\xfe*\xfer\xff\x92\x03\x1c\t\x91\x0e\x85\x13\xb9\x15\x05\x14\xdf\x0e\xad\x07\x7f\xff\x1e\xf9\xfe\xf6\xe8\xf7\xab\xfb\x97\xfe\xbc\xfb\x08\xf4\xc5\xe9\xbf\xe1\xb8\xde\x01\xe1\xc7\xe7\xf1\xeeI\xf5S\xf9\x94\xf9\xda\xf8\xd4\xf6f\xf4\xbc\xf4\x82\xf7;\xff\xb4\t\xfb\x10\x86\x13\xc4\x0eM\x05*\xfeM\xfa\xa0\xfa\x0c\xfd\xb9\xfe%\xff|\xfc\xc0\xf8p\xf2\xdf\xeb\xcc\xe7\xdf\xe6\xe0\xed\x08\xf8\x1b\xfd\x8c\xf8\x80\xee\xbf\xecV\xff\xfe%9M,_\xd4Y\x03L7G\x90NyU\x1dU\xe2OkIqF~>\x82*\xcc\x10\x0b\xf5\xf4\xde\xce\xd3\x0e\xd3\x9a\xdb\xd9\xe5B\xe9\x01\xe3{\xd7b\xd0\t\xd3\xc7\xdd\x92\xeb\xaa\xf8Y\x03\x8f\x0co\x13s\x15l\x12\x89\x0b\xf3\x03O\xffJ\x01\x1f\t\xfd\x120\x16X\x0f\xbc\x00\xda\xedU\xe2n\xde[\xdf\xbe\xe4.\xe8)\xea)\xe9\xb3\xe3\xb6\xdeR\xdb<\xda>\xdc\xd7\xdfS\xe6\x9e\xf0\x1e\xfc\xf5\x05Q\n\x9f\x07\xd4\x01\x0f\xffy\x02J\n\x90\x11\xf4\x15\x1a\x15\x1b\x0f\x02\x07\x16\xfe\xfe\xf6\x02\xf3~\xf0\xb9\xef\xc3\xefk\xf0W\xf0\xab\xee\xff\xea\xcb\xe6\xfa\xe5.\xe8\x98\xee\xff\xf5\x16\xfdS\x02\xa1\x02\x1f\x01\x07\xfe^\xfbm\xfcT\xff\x15\x05\x19\x0bm\x0e\xa0\x0e\r\t\x08\x00W\xf7\xf6\xf0\x0f\xef\xca\xf0H\xf4\\\xf8\xb4\xfa\xe3\xfa\xff\xf7\xc1\xf2~\xed\xf1\xeb}\xf0\xb5\xf5\xc6\xf9\x10\xff\xfe\t\x8f!\xa7<\xaeP\x8bY\x97SKI\x9e@\xec<\xdeB\x8aK\xa1Q\xc4M\x19<\xb7#.\x0b\xf3\xf7\xd0\xea\x15\xe0\x0c\xd9\xc7\xd8\x86\xde\x8a\xe5\xed\xe8H\xe5+\xdeV\xd8\xa2\xd6\xf0\xda\xe7\xe5\xfd\xf55\x08\xd1\x15\xd1\x19,\x15;\rR\x08\xdf\x07\xd4\x08\xff\x08"\t[\nk\x0c\x10\x0bR\x03\n\xf7e\xe9\xef\xde[\xd8\'\xd6\x80\xd9\'\xe0\\\xe7\xce\xe9\xdc\xe4\x7f\xdd\x03\xd9\xe8\xdac\xe1\x97\xe8\xe7\xef\x98\xf8\xef\x02=\x0c\xcd\x10>\x0fQ\n|\x05\x1a\x03V\x03M\x06\x93\x0b/\x10\xa4\x10\x84\n)\xffQ\xf3\xfc\xebf\xea7\xed\xf3\xf1\x94\xf4\x84\xf5\xda\xf4\xec\xf1\xac\xee\x82\xeb\x81\xe9P\xeb\x06\xf0\x1b\xf7\x91\xff[\x05\xc9\x07\x1d\x06\x90\x00\x9a\xfc\xcd\xfb\x0f\x00\xb3\x07\xf6\r^\x11\x8a\x0eT\x07Y\xfe\xca\xf4\xbc\xf0\xc4\xf0\x9a\xf37\xf7\xbc\xf6\xbd\xf5\x8f\xf4%\xf2\xea\xee(\xec\x86\xec\xed\xee\xa6\xf1I\xf4}\xfc\xaf\x11z/\x1eMp^\xd4\\nP\xb1D\xc2?\xafB\x95H6M{N\xf7F\x905\xb8\x1d\xfe\x05\xbd\xf3Y\xe6\x17\xdc;\xd5\xd0\xd3\xc6\xd8\x1c\xe0\xd4\xe4\xdc\xe3Q\xde\xa5\xd8.\xd68\xd9\xb5\xe2\x92\xf2G\x03\x00\x0fh\x121\x0f\xfe\x0b\xe9\x0b\xe7\x0c\xd3\x0c\xe6\n1\t\xbc\n\xb7\r\xaf\x0el\x0bF\x03\xf2\xf8\xed\xed\xf7\xe3\x7f\xdd\xec\xdc\x05\xe2\xea\xe8?\xec)\xea@\xe3[\xdc\x98\xd9\x82\xda\xac\xdf\xf5\xe6@\xef\xa4\xf7\xd8\xfd\x82\x01\xf3\x03\x0c\x06\x89\x085\n"\t\xae\x06\xc9\x05\x86\x07\x9f\x0b\xda\x0e\xbd\r\x0f\t\x80\x01\xaa\xf9\xaa\xf3V\xef\xef\xeeu\xf1u\xf4I\xf6O\xf5\x8c\xf2\xb6\xf0z\xeff\xf0\xfb\xf2\x85\xf60\xfc^\x02$\x07\xca\t\xe4\x08E\x06\xc7\x04\xba\x04\xf4\x06\x1e\t\xa8\t\x17\x08!\x04z\xfe>\xf8\x07\xf4\xe2\xf1]\xf1\xaf\xf2\x0c\xf3\xe7\xf1b\xef\x9b\xea\xdc\xe9!\xeb\xae\xeb&\xea\xc6\xe6\x8c\xed\xa1\x026$\xf6G&]\x99`wUUE\x17<\xe4:\xccB\xa3NDW\xcdW\xbcJ|2k\x15h\xfa\xf4\xe6\xf2\xdc\x89\xd9T\xda\xf9\xdbD\xdd\xcb\xde,\xdfW\xdd~\xd8\xdb\xd1\xae\xcd\xfe\xcf}\xd9\x9d\xe8\x85\xf9\xc6\tG\x16\x1e\x1cN\x19\x0e\x0f*\x03\xe8\xfc\xf7\x00\x9c\x0c#\x19\x87\x1f\x15\x1d\xc3\x13\xc9\x05m\xf6\xab\xe8=\xdf\xbf\xdc\xe0\xdf\xb3\xe4\xe3\xe6\x17\xe4<\xdf\xe4\xdb%\xdb\xd7\xdb\x1e\xdd\x85\xde\xe0\xe0\x95\xe5\x83\xec\r\xf6\xd5\x00\x00\x0b\x9e\x11\x99\x12\x86\x0eN\x08I\x03\x94\x026\x07\xef\x0e\xba\x15\x80\x17\xec\x12&\t\xb0\xfd4\xf4\xb0\xee\xb3\xed4\xefh\xf1\n\xf3w\xf2A\xf0q\xee\x14\xee\xa9\xef\x82\xf2\xae\xf5K\xf9\xca\xfd\xe9\x02e\x08h\rY\x10\x9a\x11\xed\x10\xf3\r\x91\n\xb0\x06\x1b\x043\x03\x06\x02\xe7\xff\x7f\xfb\xb1\xf5.\xf0\xa3\xeb\t\xe9\x88\xe7\xa3\xe7\x03\xe8\xf6\xe6\xd4\xe3\x1b\xdev\xd8\xf1\xd7\xb5\xe1\xd5\xf8\xba\x18L8yOPXGUmJ\x11@\xe8=\xceEOU\x05b[dJY\x02D\x8f+\x0b\x14\xfd\xffc\xee.\xe0@\xd8\xd5\xd59\xd7\x8d\xd8O\xd8\xe9\xd6\x9f\xd4P\xd0\x0f\xca\xf8\xc4\xd5\xc6P\xd4w\xeaz\x01\xfa\x10\xe6\x16l\x16\xbc\x12\xac\x0e3\x0b\x8a\n/\x0f\x8e\x17\t [#\xce\x1e\x02\x153\t\x10\xfdZ\xf1\xc2\xe5\xdb\xdbO\xd6\xf6\xd5\x02\xd9\'\xdck\xdd#\xdd\x92\xdb\r\xd94\xd6e\xd57\xd9\x0e\xe2\xa9\xee&\xfb-\x05w\x0c\xe2\x11\xe1\x15\x06\x18\x9c\x17\x0c\x15\xe1\x11F\x10]\x11\x10\x14]\x16\xf1\x15\x0e\x12$\n\xed\xfe\xc4\xf2\xda\xe8\x8f\xe4_\xe6^\xec\xb9\xf2\xf9\xf5\xaa\xf4\x95\xf0.\xec_\xea\x0b\xedD\xf4\t\xff}\t\x14\x11-\x14-\x13\x17\x11\x12\x0f\xad\x0eL\x0f\xe3\x0e\xab\r\x0f\n\xcc\x04\x00\xffx\xf8\x8b\xf3k\xef\xec\xeb"\xe8\xc6\xe28\xde#\xdb{\xda\xab\xdc\xbd\xde\xb7\xe2.\xe5\xf9\xe3\xd5\xdf.\xdaN\xde\xe9\xf0\x0f\x11&7\x01U\\dgd|Y\x00M\x01D:C\xbeLFZ/e\x7fdfT\xdd8Q\x19\r\xff\xba\xed\x84\xe3\xe1\xddT\xda\xde\xd8,\xd9\xc2\xd9\x9f\xd9g\xd8~\xd6\x01\xd5\x9a\xd3\xe6\xd2\x10\xd6\xad\xdf\x1a\xf1\x04\x06D\x17p\x1f\xfb\x1d}\x16\x0e\x0ek\x08\xcc\x06\x1a\t\x01\x0e$\x13p\x157\x12\x9c\x08\xac\xfa;\xec\x03\xe0\x83\xd7\xc1\xd2@\xd1\xca\xd2\x9e\xd6\x07\xdb\xbb\xdd\xc8\xdd!\xdc{\xda\xdb\xda\xed\xdd\xff\xe3\x0c\xed\xfe\xf7A\x03\x82\x0c\x80\x12\x95\x15P\x173\x19S\x1b\x12\x1c\xfb\x1a\xa3\x18&\x16Q\x14J\x12\xeb\x0e\x9f\t\xf5\x02?\xfb\xcb\xf2\xc5\xea7\xe5*\xe4,\xe7L\xec\x89\xf0P\xf2\xc2\xf1\xbd\xf0\x0b\xf1:\xf3(\xf8\xb3\xff\xd5\x08]\x11\xf9\x16[\x18\x11\x16\xea\x11P\x0e\xf5\x0b\x86\n\xf1\x08\xe6\x05\xec\x01P\xfc\xfa\xf5^\xef\x04\xe9\x14\xe5\x91\xe2J\xe0\xbb\xddC\xda\x1d\xd9=\xd9\x9b\xda\xed\xdb\xbb\xdcl\xdeB\xde\x8e\xdc\xb6\xdc\xb7\xe5+\xfd\xb7\x1fSBSZ\xf5b\xb4`\x1dY\x02R\xf1NIQ\xa6Y\xa5b\x1bfb^NK:1d\x17\xb5\x02\xfc\xf3\x17\xea\xce\xe1\xfa\xdad\xd6O\xd4\xd3\xd4\x19\xd6H\xd7\xc6\xd7\x02\xd7}\xd5\x9d\xd4\x86\xd7\xc2\xe0o\xf0\xb5\x02\xda\x11l\x19\xfe\x18\x86\x13\x8b\rm\n\x9b\ne\x0c \x0eM\x0eC\x0cU\x07b\xff\x85\xf5\xcb\xeb\x80\xe3_\xdd\x1b\xd9&\xd6\xcf\xd4J\xd5\xa5\xd7O\xda\x8e\xdc\xea\xdd\x19\xdf\xc0\xe0\x08\xe3\xf4\xe6\xae\xec~\xf4\xb4\xfd\xce\x06\xb9\x0e\x13\x15\xf8\x19&\x1eM!0"\xa0 -\x1d\xb2\x19\xa2\x17h\x16r\x14;\x10\x89\t\xb8\x00\x80\xf6{\xec\xf5\xe4\x9b\xe2+\xe5\x8a\xea\xef\xefb\xf2I\xf2\xd1\xf1\x7f\xf2\x9c\xf5\xd8\xfa\xb7\x01\xe9\t\xaf\x10[\x15\xd0\x166\x15[\x13/\x11\xc3\x0f\xf9\r"\n\xb0\x05\xd1\xff$\xfa\xd8\xf4\x92\xeec\xe8+\xe2k\xdd\x12\xda2\xd7t\xd5\x18\xd4\xcb\xd3\x14\xd5?\xd7\x7f\xdc\xe1\xe2\xd9\xe8\xa1\xed\xfa\xef\xce\xf23\xf6#\xfa\x80\x01\xb5\x0f\x95\'SF\xc2`\xe7o\xaep\x1eh\x8b^\x80V2S\xe7R?SqR\xcbK`>\xb9*\xf3\x12!\xfc*\xe8R\xd9\xab\xcf\xfc\xc9\xab\xc8"\xcbV\xd0A\xd6\x03\xda\xb3\xda\x1b\xdav\xda\xf0\xdeJ\xe7\x19\xf2p\xfd\xed\x07p\x10l\x16\x16\x19n\x18\x91\x15\xf2\x11~\x0e\xca\t\xbc\x02\xee\xf9\xc8\xf2\xc6\xef\xf3\xef\xdb\xefX\xec\xb5\xe5B\xde\xf8\xd7\xad\xd38\xd1S\xd1\xa0\xd4\xd7\xda\xe2\xe1\x86\xe7%\xeb\x1d\xee\xa1\xf1\xa8\xf54\xfa\xfe\xfe\xe2\x04\x9e\x0c\x0b\x16Z\x1f\x11&N(Y&?!\xfc\x19\x87\x11\x02\tf\x02\x1e\xff\x13\xfe\xd0\xfc\xf7\xf8\x0f\xf3\xb6\xec\x7f\xe7\x99\xe4\x8d\xe4\x00\xe8:\xee\xb4\xf6\xe9\xff"\x07\x96\x0b\xa6\ro\x0e\xfb\x0e\xc2\x0f\xf4\x10}\x12\xb5\x13\xa1\x14\xff\x13.\x11\xa4\x0b\xc3\x03\xaa\xfb\xb3\xf3R\xed\xfa\xe8\xf1\xe5\xec\xe4\xf4\xe3\xe2\xe13\xde\xca\xd8\xcd\xd4\xb9\xd2\xe3\xd3\x19\xd8n\xdd(\xe4\xce\xea\xbb\xf0\xaf\xf5\x94\xf8%\xfb\xeb\xfdX\x01\x03\x06G\t\x03\n\xa6\x06\x90\x01N\x01g\x0c\xda#uA\xf1Y\xe9e\x03d\xffXNL5C9@(BvF\x87H\xe8C\xc76\xbd#{\x10\x87\x00\x85\xf2\xad\xe3e\xd45\xc9e\xc6\xb1\xcc\xa9\xd7\xa9\xe1W\xe7\xa4\xe7a\xe4(\xdfa\xdb\xb8\xdc\xbb\xe5<\xf5\xec\x05b\x11%\x15r\x13\x81\x10v\x0e\x9d\x0bj\x06\x8a\xffz\xf9V\xf6\x17\xf6u\xf7O\xf9e\xf97\xf6\xf2\xeex\xe4\x95\xda\x9b\xd4\x1f\xd5\x9e\xdb#\xe5\x07\xee\xa6\xf3\xde\xf4\xce\xf2\xe4\xef\x8c\xee\x1b\xf0\xd7\xf3\xd7\xf8\xe8\xfdv\x03\xd3\t\x1b\x10h\x15\xde\x17.\x17\xc4\x13T\x0e\xfb\x08\x06\x05\n\x04\x9b\x05B\x07\xcc\x07Q\x05[\x00\xb0\xfa$\xf5\xd4\xf1\xd3\xefY\xf0f\xf3c\xf7\xb1\xfc_\x01\x89\x05\x03\t\'\x0b\x9f\x0c\xcf\x0cD\x0cH\x0c\x9f\x0c-\r\xc5\x0c?\n\t\x06\xcb\x00\x1c\xfb\xd9\xf5\xad\xf0\xe5\xeb\x1a\xe8\x81\xe5\x11\xe41\xe3\xa0\xe2\x93\xe21\xe3\x9b\xe4\x16\xe6\xeb\xe7\xb3\xea&\xee/\xf3\xc5\xf7Z\xfb\t\xfe\xac\xff(\x01X\x02\xd7\x03\xdf\x06j\n\x08\r\xf9\r\x84\x0c\x18\nj\x05\xbd\xfe\xc5\xf8a\xfa\xea\x08?$;B\x85V7Z\xb8NF?\xa64y2\xea5\xba9|;X9S2\x05\'\xdb\x16{\x05\r\xf4a\xe3\xe9\xd4t\xc9?\xc6\xe9\xcb=\xd8\x0f\xe5@\xeb-\xe99\xe2R\xdb`\xdae\xe0\xf5\xeb\xa4\xfaD\t\x89\x15\x9c\x1d\xb7 \x06\x1f\xa3\x1a\xbb\x14.\x0e4\x07\xe2\x003\xfd\xd3\xfc\xb8\xfek\xff\xa6\xfbL\xf2\x1d\xe6\x0c\xdb\xce\xd3\x19\xd1\x8b\xd2\x0f\xd7|\xdd\xae\xe3i\xe8\x05\xebu\xec\x88\xedx\xee*\xef\xc9\xef%\xf2~\xf7>\x00\'\n\xee\x11/\x15+\x14\xd0\x10\xaa\r\xf4\x0b\xe3\x0b\x10\rD\x0ey\x0e\xd1\x0c\xa7\t\x17\x06\x9d\x02\\\xffL\xfb\xa3\xf6\xd2\xf2F\xf1\xc7\xf2\xa7\xf6s\xfb$\x00v\x03\xcd\x04\x0b\x05\x01\x05\xe8\x05=\x08\x9f\n\xa3\x0c}\r\xcc\x0c\xc1\n\x06\x07\x15\x02J\xfc\xfc\xf6\xa9\xf2\x89\xef\xe6\xed\x8e\xec\xa9\xeb7\xeb\x10\xeb)\xeb"\xeb\xe9\xea\xfd\xeb,\xee\xff\xf0\xff\xf3\x96\xf6,\xf9\xfb\xfbd\xfd*\xfeD\xff\x14\x01\x1a\x04\x06\x06\x83\x06\xbc\x05L\x03\xa3\x00\x16\xfeP\xfbR\xfa\xee\xf6b\xf0\xb5\xea\x9d\xe9\xfc\xf48\n\x0f ?0<5\xfc2a0\x151o5\xf79X=\xf9=\xa3=\x15;\x8a4o,Q!\x9a\x13\x10\x04%\xf4%\xea\xb1\xe8z\xed_\xf4\xe0\xf6\xfd\xf3\x0b\xee\x11\xe8T\xe5!\xe6H\xe9\xc7\xed`\xf2\xb5\xf7D\xfeb\x05\xd2\x0b\xa0\x0e\x16\r\xa8\x08\x05\x04\xb5\x01\x1c\x02\xcc\x03\xd1\x04\xbe\x03\xe5\xffi\xfat\xf4\xe4\xee\xef\xe9Z\xe4\x1e\xde6\xd9^\xd7<\xd9D\xdd\xc8\xe0;\xe2\xf5\xe1q\xe1\xbe\xe2>\xe6t\xeb\xa3\xf1\xb8\xf7\xcd\xfc\xbf\x00\xbc\x03\xe6\x06\xb9\n\xd0\rb\x0f\xf8\x0eq\x0eP\x0f_\x11<\x13\xdf\x12\x12\x10+\x0c\x0e\x08\xcb\x04Y\x01_\xfeA\xfc\x9a\xfa&\xfa\x12\xfa\x01\xfb3\xfd`\xff&\x01\xf1\x01i\x02\x13\x04\xb8\x06\xb1\t5\x0cH\rc\ry\x0c[\nE\x07\x7f\x03\xb5\xff\x8e\xfc\xe7\xf9\xd2\xf7\xc6\xf6\xc4\xf5\x91\xf4\x98\xf2\xc7\xef\xa1\xed\xfd\xebq\xeb\xbe\xeb\x1f\xec,\xed\x83\xef,\xf2\xe1\xf4N\xf6w\xf6\xa3\xf6W\xf7\xae\xf8A\xfa\xbc\xfb`\xfc\xf6\xfb\x13\xfa\xd8\xf6\xfe\xf3\xcf\xf31\xf6u\xfa8\xfe\x8e\xff\xc0\xff\xdf\xfeT\xfeI\xfe\x1b\xfe\xe5\xfd!\xfd2\xfd\x84\x00\xc4\t%\x18\x07(!4O:\x9e;\xf4:\xbc;\xdc=\xe0@rC\xb7DeD\xe9@\x84:\n1s%\x1f\x19\xfc\x0b(\x01\xb7\xf9\xaa\xf5G\xf4\xab\xf1t\xed\xf5\xe72\xe2\xa3\xdd\x1d\xdb\xd4\xda)\xdc\xae\xdeo\xe2\x0b\xe7e\xec\xe1\xf1>\xf6\xaa\xf9\x87\xfbk\xfcY\xfd\x8d\xfe\xd6\xff(\x01\xfb\x00l\xff\xb3\xfc-\xf9\x82\xf5\xa8\xf1\x8c\xed\x08\xea\xac\xe7U\xe6\xa5\xe5\x8d\xe4\x0f\xe3\\\xe1^\xe0z\xe0\xc5\xe1\xb7\xe3\xd9\xe5P\xe8t\xebJ\xef>\xf4U\xfaP\x01\xdc\x08e\x0f\xed\x14\xe1\x18\x87\x1b\xc8\x1c\x01\x1c\xd6\x19\x9c\x16\x80\x13p\x10U\r$\x0b\x87\x08i\x05\xb4\x01\x95\xfd\x8f\xfa\x07\xf9\xbe\xf8W\xf9\x93\xfa\xc7\xfb\xd3\xfc\xba\xfd\x01\xfe;\xfe=\xfe\xa6\xfdo\xfd|\xfdD\xfe\xf8\xff\x00\x01\xa1\x01\xfe\x00\xcd\xff\xcd\xfd\xef\xfb.\xfb\xfd\xf9\x97\xf8\x07\xf7s\xf5z\xf4\xb7\xf3a\xf2\x86\xf1\xb3\xf19\xf0\x8e\xed\xcd\xec\x01\xee"\xf1k\xf46\xf6\xde\xf7-\xfal\xfc\xf9\xfd!\x00#\x02\xb9\x02\x0f\x03^\x057\x08\x18\x0c\xb5\x10\xbd\x12\xff\x12\xbd\x11:\x0f\xfb\r\x86\r\x07\r\x85\rE\r\xcd\x0b\x15\n;\x08k\x06\x9c\x05<\x05\xcd\x04\x15\x04\xdd\x02g\x02K\x03\xf4\x05\xab\t\xdc\r\xff\x11I\x16r\x19\x1f\x1b\x0c\x1c\x81\x1c\xe6\x1c\xba\x1d5\x1f\xb6 \xdf!\xd6\x1f\x05\x1cS\x17:\x11\xc0\x0cc\t\xcd\x06\x02\x05o\x02\x90\xfe\xb9\xf9w\xf5\x98\xf1\xa0\xedO\xea\x0f\xe8\x06\xe7\xb7\xe6V\xe6Q\xe5\xab\xe4\x9f\xe4\xd7\xe4l\xe5V\xe6_\xe7\x89\xe9K\xec\xc4\xeeh\xf1(\xf3\x94\xf4;\xf55\xf5\x03\xf6\x95\xf7\xdd\xf8\x96\xf9\x90\xfa\x0f\xfb\xe0\xfb\xfe\xfcq\xfd\xf0\xfd\x06\xfe\xca\xfd\xad\xfdH\xfe\x81\xff\x99\x00\x16\x01\xdb\x00.\x00\x1e\x00Y\x01\xb0\x01|\x03@\x05\xec\x04C\x05{\x06\xec\x05]\x06\x19\x06\xda\x04\n\x05\x82\x05\xbb\x05>\x04\x92\x02\x8e\x02\r\x02\xfa\xff\xbb\xfep\xfd)\xfc\x0f\xfcR\xfb\xb9\xf8&\xf8\xdd\xfa*\xfb$\xf9\xf3\xf8,\xfa\x15\xfd\x00\xfe4\xfe\xe0\xfb\x8c\xfa/\xfc\xd5\xffA\x01Q\xffx\xfdp\xfd\x1c\xfe\x92\xfd\x93\xfeh\xfc\x92\xfa\xa8\xfb\xa2\xf9!\xfa\xaf\xfcQ\xfbN\xfd\x8d\xfe\x10\xfc\x0f\xfd)\xfd\x81\xfc\x12\xff[\x02\xc0\x04\xc2\x04G\x045\x05\xab\x07\xd6\x07\xa6\x06\xc3\x07\xc3\x07\x96\t\xb5\t\xcc\nH\x0b\xf4\x0b\xd7\r\'\r\t\x10\xca\x0c:\n\xbe\x0b\x0c\n\x0f\n\x8c\nM\x08\xee\x07\xf7\t\xc3\x07\x08\t\x7f\x07#\x02C\x02\x85\x02\xfa\x03!\x059\x08t\x04\x16\x01j\xfe\xac\xfe\x87\x02\xa9\x01\'\xff\xa7\xfci\xfb\xe7\xfb`\xff_\xfb>\xfc\x8b\xfb;\xfaa\xf9G\xf51\xf8\x14\xfa\x89\xfb\x13\xfa\xf7\xf8\xc1\xf9\xf1\xf9\x8d\xf9(\xfaS\xfa\xb6\xfa8\xfap\xfb\x96\xfd\x94\xfe\xed\xff5\xfe\xa8\xfc\xfc\xfc[\xfe;\x00\xe2\x00\x05\x01)\x02w\x01\xb9\x00\xbf\xff\x8b\x00\x1b\x01D\x02\x1b\x02Z\x04\xa2\x03\xe2\x02V\x04\xf7\x03*\x05C\x07\xe1\x04\x13\x01o\x06D\x07\x93\x01\xd9\x01l\x05\xbc\x03\x87\xfd\xe9\x00p\xfe\xd4\xfaV\xfe?\xfe\xe3\xfb\xb8\xfa3\xfd\xf5\xf75\xf6\xef\xf5\xb7\xf7\x8e\xf6\xfd\xf5x\xfb2\xf6(\xf3\x94\xf6B\xf64\xf5~\xf55\xf9\xc6\xf7B\xf6W\xfb\xfb\xf9\xc1\xfaZ\xfb\'\xfa\xda\xf7\xd2\xf7:\xf72\xfat\xfb\xa6\xfa\x0f\xf9\x1a\xfd\xb3\xfaI\xfa/\x00\xf4\xfd\xda\xff\xe9\xfd\xb5\x01V\x04z\x06\xdd\x05\x16\x06\xc9\x05\xfa\x07\x85\x0b\x1f\nq\x08\xd7\t\xf7\r\x9f\x0cL\n\x83\t\xe7\n&\x0c\xba\x06}\x04>\n\x8c\n\x15\x07\xc0\x068\x08&\x04\x06\x03\x95\x05\x1a\x05\xee\x04\xdc\x02\x84\x01\x06\x02\xec\x06\x0c\x05\x99\x02Q\x00\xd4\x00>\x000\xff\xed\xfe+\xfa%\xfd\x1d\xfd\xeb\xfb\xb7\xfb\x97\xfc\xb4\xf9$\xfdU\xfb\x08\xfay\xfb\x01\xf8\x16\xfbr\xf9\x18\xf8\xb1\xf7\x94\xfb\x81\xfc\x1d\xf5I\xf9\xb1\xfau\xfet\xfb\x9b\xf8\xcb\xff\xaf\xfd\x92\xfeU\xffh\x00\xca\x01\xec\x02~\x00\xc7\x02t\x06\xa9\x03\x1d\x03t\x04\x9d\x02H\x02%\x06\xcb\x0c.\x06\x99\x04F\x02\x1e\xff!\x08\x91\x0cY\x06\x06\x04A\x07\xd3\x03\x15\x06~\x03N\n\xf7\x02\x83\x03&\x08\x9d\x06\xae\x03R\xfd\xfa\x06W\x00\xdb\xfd\xdd\xfe\xb2\x00\xcf\xfd\x06\xfdT\xfe\xd0\xf8+\xfa\x15\xfb\xa7\xf6\x0c\xf7\xeb\xf8\xd8\xf7\xf5\xf5@\xf7\x12\xf9\x0f\xf6%\xf9\xa0\xf5\xf7\xf8b\xfbd\xf8\xe5\xfc)\x00f\xfd\x9f\xfd6\x00v\xfc\x92\x01\xc0\x03\xf3\xfd\xc1\xff!\xfe\xeb\xfdz\x02\x91\x02\xca\xfe\xf1\xfe\x01\x00\xe8\xff6\x06\xa4\x06\x9e\x02~\x01\xbb\x06W\x084\x06\xb2\x03\xee\x04\xb3\x065\x04q\x07\xc3\x046\x03q\tK\x05\xe3\x01\xce\x01\xb9\x00\xc2\x03\x9c\x03g\xff\x90\xffp\xff\xba\x02M\x01\x0c\x01\xbe\x02\xcb\xfeE\xfd\x8d\x00N\x00T\xfc\xe6\xfd\xc7\xfd\x14\xfe\xf8\xff~\xfe\x8e\xfet\xfdP\xfeX\xfa\x13\xf9\xa6\xfdZ\xfeB\xfe\xc2\xf87\xfb`\xfaE\x00\x85\xf8\xc2\xf6]\xfb\xdd\xf8\x14\xfc\xba\xf9\xec\xfa\xa0\xfd\xea\xfa|\xf9\x99\xff\xc9\xfc`\xfc\xa1\xf3\x19\xfd\t\x00\xac\x03\x85\x04.\xff\xbf\xfft\xfe\xe9\x06q\x00\xbc\x030\x06$\x06\xa9\x04\x9c\x07\xb6\tg\x05.\x05N\x03\xd8\x00\x92\x05\xa6\x04\xa5\x02\x87\x07\x80\x04\xcf\x02\xf6\xfe`\x04\xcd\x05\x0f\x00\xfc\x03\xc3\x00\xeb\xff\x8b\x04\xb1\x08&\x00\x03\xfdX\xfd#\xfe\xaa\x01\xe1\xfd\x19\x03\xc6\x00\xe4\xfc\xda\xfb\xdf\xf2\xf8\xf8=\xfd:\xfa\x96\xfdt\xf9 \xf9!\xf8p\xf8)\xf8\xa7\xf8\xa8\xf8\xf5\xf9`\xfc\x94\xfd\xd6\xfb\xd5\xfb\x81\xfc\x96\xfe\x8f\x02z\x01\xf0\x00*\x00\x12\x05\x89\xfa@\x02#\x02\x12\x01\xb8\x02\x97\x02O\x02}\xff\xcd\x01\x82\xf8>\x038\x02v\x03H\xffQ\x00\x8a\x04\x99\x04A\x02#\x01m\x042\x03s\x03\xeb\x03\x1e\x08\t\x05\xed\x02_\x01S\x06\x0c\x07\x9f\x04\x7f\x07F\x04\xf9\xff.\x03H\x06\x85\x05J\x06\xc2\x01\x16\xfe\xac\xfe4\x01\xb2\x01\x10\x00\xf1\xff8\xff\xb2\xfe\xee\xfd\x83\xfbO\xfb\'\xfb:\xfd\x83\xfc\x8b\xf9\x1d\xfbN\xffe\xfc\x13\xf9\x96\xfc<\xfb\xb3\xf9_\xf9\x82\xf9I\xfcH\x014\xfbm\xfa\xad\xfa\xcf\xfd\x9a\xfeK\xfb\xa4\xfe\x82\xfd.\xfe \xfb7\x00\x1c\x02\x00\x02\xd4\xfeE\xff\x91\x03`\xffr\x03R\x02/\x03\x1e\x06\xa4\x02\xb0\x05\x04\x08\xf5\x05\x0f\x005\x05\x9e\x08\x91\x05w\x06i\x05\x03\x03"\x07%\t\xf6\x05R\x04(\x02v\x00\x07\x02*\x03\xb9\x02\xf6\x01\x14\xfd\xf2\x00+\x00\\\xffT\xff\xd0\xfa\xd7\xf9!\xfc\xea\xfd\xa7\xfe\x93\xfc\xa4\xf9L\xfd\x88\x00P\xfa1\xfc\xf2\x02a\xf9|\xfa\xba\xfb\x95\xfa\xd8\xfdI\xfe\xf0\xfd\xbd\xfa5\xfaj\xfe\xe0\xfe\n\xfb\xfa\xf9\x9d\xfb\xbe\xfd\x9c\xff:\x02r\xfcd\xfc\xd7\x00\x16\xff\xc8\xff@\x01\xf6\xfe\t\x01\xdb\x01\x0c\x03\xde\x02?\x00\xc2\x04v\xfe(\x02e\x05\xdb\x01\xf7\xfe\x92\x03\x98\x030\x02U\x05\x01\x04\xa7\x02u\x02\xfc\x01\xca\x03\xd9\x07\xa6\x01\x82\x03\xce\x02\x05\x036\xff.\x04W\x02R\x01\n\x05M\x01Q\x03\xeb\xfeW\x03\xb9\x00\xbf\x01\x9c\x00u\xfdW\xff\xa5\x03a\x00\xab\xfd\xf5\xfb\xa1\xfb\xc6\xfdl\xfc\xf0\xfb\x82\xfcG\x00X\xfb\xae\xfaF\xfa\xa9\xfb\'\xfa!\xfb|\xfc\xe7\xfa\xe0\xfa\xc3\xfca\xfd\xb1\xfbE\xfb\xb1\xf9\x88\xfd\x00\xfe\xa4\xfd\xbf\x00\xaa\xff\xf1\xfd\x82\xff\x12\x01\xe6\xfd\xd9\x01`\x04`\xfe\x89\xffg\x01D\x04=\x02!\x05E\x05\xfa\x02Q\xff\x7f\xffN\x06\xc4\x07\xc7\x08W\x012\x04\xad\x00\xf1\x03\x11\x04H\x03\xc2\x02\x85\x02\x06\x03\xae\xfe\x00\x07\xb3\x00\x1e\xfc\xa9\xfd\x12\x01r\x02G\x02\x91\xfe\xf8\xfc\xa4\xfd\xea\xfc\xf1\x01\x95\xff\xf6\xfc\x17\xfd\x8d\xfcd\xff\xdc\xfcC\xfdS\xfc\xe3\xfc\xf2\xfb\x97\xfc\xb8\xfcG\xfe~\xfe\x16\xfd\xe4\x00s\xf8\xb2\xfab\xff\x16\xffE\x00c\x00!\xffR\xfa\xc4\xfb\xd3\x00_\xfe/\xffT\xfe.\xfd\x93\xffW\x00\xac\xfb2\xfd\x94\x01\x84\xfc\xab\x01}\x01\x9a\x01\xe4\xff>\x01\x8b\xff.\x03`\x03\xeb\x00\x8f\x03\xfb\x04^\x046\x03\x13\x05\xcf\x02\xd2\x03\x04\x02\xdf\x06U\x03\x93\x04\xa2\x01a\x01\xb5\x04\xc4\x04{\x02\xe8\x02#\x03\xd6\xfd2\x00 \x04I\x040\xff0\x02\xf2\xff\xf6\xfb\xec\xff\x1a\xff\x97\x01\xcd\x03\xd6\xfb\x00\xf91\xfeY\x01:\xfe"\xfb\xf9\xfaJ\xfdH\xfd[\xffo\xfc\xd6\xf9\xae\xfa\xa5\xfbq\xfe~\xfc\xcc\xfc\xa7\xfb\x9e\xfc\xca\xfe\x85\x03\x81\xfe0\xfb\x11\x00\x8a\x01\xc1\xff\xaf\xff\x9b\xfe\x8e\x02\xa0\xff\xf2\xff\xb7\x03\xc5\xff\xea\x00\xdf\x01\xa6\x00\xed\xfc\xad\x02\xdd\xfe\xb7\x04x\x04\xa6\x00\xe7\x00W\xffv\x03*\x05Y\x06\xcc\xff#\x01-\x04<\x01\xda\x00G\x02r\x06\xca\x019\xffa\x00R\x01\xb3\x04m\x008\x00\xba\xff\xd5\x01\xd7\x00m\xfe\xef\x00\x9a\x02\xc2\xff\x8f\xffv\xff\xcb\xfdK\xfd\xb0\xfe\xac\x01\xd7\xff\xaf\xff\xd2\xfa\xa2\xfd\xe1\xfd5\xff,\xfc\xc6\xfd\x16\xfe\xf0\xfe\xa4\x004\xff\x0c\xffw\xfa,\x01\x7f\xfc\x9b\xfc&\xff*\x00\x9d\xfe/\xff\x9d\xfeQ\xfd\x8d\xfe\x9d\xff\xb1\xfc\xfc\xfc\xd2\x00\xb8\x00\xae\x02\xb8\xfe\xea\xfe\xba\x01\xb9\x02\x8a\xff\x1b\xff\x93\xff\r\x04\xa4\x02P\x02\xb0\x01\xc1\x00\xe9\x00\xdc\xff\xbe\x04\xde\x021\x01%\x01\xaf\x01\x97\x01(\x03\xe8\x02\xa2\x00\xab\x00\xa7\xff\xe7\xfdB\x02&\x00\xa1\x00\x05\x02m\xff\xfd\x01\x9f\x00\xfb\xfe\x00\x01\xe3\xfe\n\xfe4\x00\xc4\x01\xfa\x01\xd7\xff\xd7\xfe4\xff,\xfe\xae\xfc|\xfe\xe6\xfe\xa1\xffO\xfee\xfd|\xfe9\xfc\xe5\xfdV\xffT\xfeP\xfcL\xfc\xce\xfeX\xfd\x85\xfeW\xffE\xff\x14\x01\xe6\xfc8\x00\xd0\xfd\x15\xff\xdd\x04\x01\x00\xd8\x00\xa9\xfe7\xff_\x00\xec\x02\x97\x00\xab\xfe\x18\x04j\x00\xa5\xfd.\x01\x8f\x01\xf8\xff\xba\xfd\xda\x00>\x02\x18\x00\xa7\x02\xba\x01\xb3\xff{\xffj\xffS\x007\x00i\x01F\x03\r\x00\xdc\xfd\x17\xff\x1c\xff\x9a\x01W\x02\xa0\xfe/\x01\x0c\x01+\xfd\xd2\xff\x14\x00\xc3\xfe\xc1\xfft\x02\x10\x05\xd6\xfe\x98\xff=\xfe\x80\xfc!\x01\x99\x00\xf2\xff\xb0\x01>\x01-\x00\xb1\xfe\xe1\xfc\xb8\x01y\xfe\x80\xfd\xc8\xfe0\xff\xea\x00x\xfff\xff\xa8\x00~\xfe\x1e\xfd\x84\xfc\xe8\xfd\xd0\x01\xa7\xff\xd6\xfe`\xff\xb2\xfd\xfe\xff*\x00\x00\x00\x1f\x00`\x00\xb2\xff2\x01\xc4\x01\x85\x00A\x01]\x00\xac\x01\x9f\xffA\x02\xb1\x00\xcc\x01"\x03\x86\x00\xbe\xff\xc0\x01$\x01Z\xff:\xff+\x03$\x01\xb6\x03\t\x02\xa0\xfc\xb0\x01s\xfe\x11\x03<\x00\x1f\xffS\xff!\x01\x02\x03\xba\xffg\xfe\xb4\xfe\xa9\xff\xae\xfdN\xfd\xb7\xffx\x03G\xff\xc8\xfe\xf7\xff$\xffY\xff\x1b\xfe\xc0\x00\x9e\xfe\x7f\x00i\x00d\xff\xf6\x00\xd2\xfd\x86\x00\xd3\xfd\xb8\x00\x0f\xff\xdf\xfe\xc5\xfdf\x03N\x00\xdb\xf9\x06\x03\x19\xfe\xb2\xff\x17\x04l\xfdc\xfd\x98\x03w\x00\x04\x00\xdb\x01q\xffZ\x01\xcc\xff\x8a\x01\xa4\t\x94\xfe\xb6\xfb\r\x00<\x05\xab\x02\xbb\x01H\x04\x0e\xfdP\xfc<\xfd\xa7\xff\xa3\x02\xc1\xfe>\xfd\xfc\xfe\x8c\xfb\xaf\x01r\x03\xef\x018\xfcv\x00\x80\xfc\xd0\xff\xfe\x06\xcf\xff\x19\x01\xc7\xff4\x03n\x025\x00=\xfc5\xfd\xd2\xfb\x1a\x015\x00\x14\xfee\xfe\x10\xff\xb9\x01\x0c\x01M\x01O\xfd\xc0\xfb%\xfd\x9a\xfe5\xfe\xd9\xfe\xf7\xff\x0f\x00V\xff\x85\x00\xb3\xfb\x1e\xfd^\x02e\x02"\x00X\xfaC\xffb\x02s\x01\xca\x021\xffw\xff\xc3\x00B\x02\x01\x01_\x00\xe8\x01\xdf\xff\xb8\xffP\x03\x9f\x03\x9d\x02\xd9\xfc\x1d\xfd\xeb\x01\x1a\x05\x88\x00\x1b\x03N\x07$\xfc\x02\x01\x90\xfd\x97\xfd\x0e\x03N\x04\xc2\x038\xfeu\x00T\x05!\x02\xca\xfb-\xfe\xe5\xff!\xfd\x93\xfe\xf1\x035\x00\xe2\xfc\xcb\xfd\x0f\x017\xfd{\xfa\xb6\xfd\x18\xfe\xe7\xff\x82\xfe\x8b\xfe\xb2\xfc8\xfd\xd7\x00\xbb\x00\x96\xfe%\xfd\xd9\xfa\xe8\xfd?\x01\xbd\xff\xd6\x02\xcf\x00\xb0\xfb\x8f\xff\x8e\x00\xc7\xfd\xbe\xfe-\x00\x9d\xff{\x00~\x04[\x00\x03\xff\xdc\x023\x00\x00\x00-\x01{\x01\xf0\xff\t\xfe\x14\x04\xda\x04\xeb\x010\x01\xa3\xfdd\xfc\x15\xffJ\x01\xc3\xffP\x01[\x03\x91\x03\x1c\x02q\x01\xeb\xfc\xba\xf9\xa6\xfcI\xff\xbe\x01\xc6\xfe$\xff`\x05\xc3\x02\xfb\xff#\x03\xd2\xfb\xf3\xf9\x1b\x00\\\xfck\x01\xd6\x06\x8b\x029\xffS\x01b\xfd\x15\xfe6\x00\xd5\xfc\t\x00O\xffT\xfc\x8d\xfed\xff\xd7\xfct\x00a\xff\x94\x01C\x00\xcc\xf92\xfb\x9f\xfd\xf7\xfe\xa4\xff\xef\x00\xc7\xfd\xf4\xfe\x87\x01~\x01\x17\xfc\x08\xfd\x9b\xffX\xfd\xca\xfe\xcb\xff \x00h\x02\x7f\x04\xcd\x02!\x01i\xfcq\xfe.\x01\xf6\xfe\x16\x02 \x04\xb7\x02x\x05P\x05E\x03\xd9\x01y\x01\xea\xfe\xd5\x00x\x02\xf1\x01\xb9\x03\x1c\x01p\x01R\xff\xb5\x02\xea\x05r\x00(\xfeg\x01\x92\x02J\x00\x1e\xfc\xd9\xfce\xfc\x1f\xff\xe0\x01\x19\x02 \x03z\x012\xffM\xfc\xee\xfa\xce\xfb\xbe\xfd\x0f\xfe\xe9\x00\xd1\x01\xb1\x01\xec\x00\x0f\x01R\x00}\xfc\xca\xf9\x85\xfb\x95\x00\x9c\x00\xec\x00\xea\xfdf\xff1\x00\xec\x00\x04\x02\xe3\xfd"\xfe\xe7\xfez\x03\xe7\x03\x14\x01%\x00\xbf\xfcr\xfc\x99\xff\xe7\xffd\xfek\xff!\x02;\x02\xf6\xff\xb3\xff6\x005\x01\xc1\xfd\xdb\xfd\xec\xff\xe9\x01m\x02\xf3\x03\x96\x05\x0b\x03\\\xffS\xfd\x15\xff\xce\xfeM\xfe\xa3\x000\x03\xad\x01I\x02\n\x01|\x00\xf2\x01\x1b\xff\xc4\xff?\xff\xb0\x00\xe3\x00\x1d\xfed\xfe\x8d\x00\xf1\xffE\xfe\x8e\x00\xc4\xfd\xa6\xfd\x95\xff/\x01\xf8\x01u\xff\x07\xfeY\xfc:\xfd\xb6\xff\x87\x03i\x00r\xfc\xf6\xfb\xa6\xfd\xc4\x00m\x01\x81\xff\xfe\xfcB\xf9\xa2\xfb\x86\x01\x1f\x02:\x017\xff\xda\xfd\xc4\xfe\x03\x04c\x03]\xff\xd5\xffM\xff\xee\xfe\x1f\x017\x03-\x02\x8e\x00\xb3\xfe*\xfe\x03\xff\x9b\x00\x0f\x01<\x00\xb4\xfe\xce\xfe\\\xff\x02\x01;\x01S\x00\x9e\x01\x82\x01V\xffC\x00\x11\x021\x01\x16\x01\xff\xffW\xffU\xff\x81\x02B\x02\xe6\xfe\x9a\xfe3\xff\x86\xfes\xfeP\x00z\x00T\xffX\xff\xc0\x000\x00\x8e\x01\xa5\x01\xd7\xff=\xfec\xff\xd3\xffi\xffd\xff\xed\xfd>\xfe\xd3\xfe\n\xff~\xfeU\xfe(\xfe\xf1\xfe\xd6\xfey\xfe\xa6\xff\xa9\xff\r\x00^\x00\xed\x00P\x01q\x01_\x01\xdf\xff\xa5\xff\x83\xff5\x00\xbe\x00\xa7\x00L\x01\x1c\x01\r\x00(\xffh\x00A\x01\x7f\x01\x08\x01\xa9\xff\x8d\x00c\x01\x00\x01\x9e\x00\xf6\xff_\x008\x01P\x01\x11\x010\x01\xec\x00!\x00\x82\x00\x1c\x01V\x017\x01\x13\x01\xc1\x00\x9c\x00\xbc\x00\x03\x01\xa7\x006\x00O\x00\\\x00@\x00h\xff\xdd\xff\xf8\xff/\xffK\xfe\x15\xfe\xb9\xfe\x14\xffM\xff-\xff9\xfeC\xfe\x85\xff\xba\xffc\xffF\xff\xde\xfe)\xff\xa3\xff\xcf\xff5\x00\xcf\xff\xaa\xfe7\xfd\x1b\xfd\x02\xfd\x10\xfd\'\xfd\xf8\xfc\xde\xfd\x19\xfe\x11\xfeW\xfdN\xfd\xe2\xfe\xc3\xfe[\xfd\xb5\xfc!\xfdv\xfe\xbc\xfea\xfe*\xfe#\xfe\x17\xff\t\xff\xbd\xfe\xfe\xff\x05\x00m\xfe\xb2\xfd\xb7\xfdm\xfe\x9c\xfe\xdc\xfdO\xfd\xc2\xfd\xc8\xfe\xf8\xfe\xbb\xff\x80\x00\xfe\xff\x1b\x009\x00C\x00\xac\xff\xcc\xfe\xfb\xfc6\xfa\xcc\xf8\x95\xf7\x05\xf7\xb3\xf9M\x01`\x0c;\x17Y"\x13,\x8c2u5\xb73\x080\x06*|"Z\x1a\xf8\x11\x94\t\x9b\x01p\xfb\n\xf7\x1f\xf3H\xf0\x0c\xed\xdb\xe9\xf7\xe7\xeb\xe5\xf4\xe56\xe6\xcc\xe6\xc0\xe7W\xe9\xa0\xed\x15\xf3\xaf\xf8J\xfe\xff\x02\xd5\x05;\x08\xb8\t\xd4\tM\x08\x17\x05\x12\x00q\xfb\x96\xf7\xcf\xf4c\xf3|\xf18\xf0\x1d\xf0V\xf0\xa4\xf1\xad\xf2x\xf3U\xf4\xc0\xf4\x87\xf5W\xf6\xa8\xf7&\xf8\x9c\xf8\xb9\xf9\x0b\xfb!\xfd3\xfe\r\xff\xe6\x00\xa1\x02\x9c\x05\xa6\x07\xc0\x08\xfe\t\x87\t\x1c\t\x96\x08e\x08\xd1\n%\x0fQ\x12Z\x15t\x16\x9c\x16\xd8\x15\x0f\x12\xe7\x0bx\x03\xa1\xfb\x9e\xf4&\xef\x86\xea\xb4\xe7\x02\xe6f\xe5&\xe8\xa5\xec\xa2\xf1\xa1\xf6\xe3\xfaT\xfe\xa6\x01\xa4\x04d\x06\xc4\x06\xc9\x05\x9e\x03<\x01H\xfe\x97\xfc\x81\xfa\xcc\xf7\x15\xf6O\xf4\xff\xf2d\xf3\xa2\xf3\xef\xf2\xb3\xf3\xb9\xf4r\xf7\xa5\xfa\x89\xfdi\xff\xec\xff\x15\xffO\xfd\x9c\xfb\x1f\xfa4\xf9%\xf8\xe9\xf7\x9e\xf7\x81\xf8J\xfb\xa5\xff@\x04U\x08m\t\x81\t\xf3\t\x18\n\x9d\t\x18\x05x\x01j\xffW\xfc:\xf93\xf6\x00\xf6n\xf7Q\xfa=\x05V!\x8bJ"i\x0bp;gHd\x0fk\x88gOR\xe40A\x12\xc9\xfa\xda\xe6/\xd7 \xce\xb0\xcci\xcco\xc7\xb4\xc3\x07\xcc\xa7\xdd\x8c\xe9\xd2\xe5\xd9\xdcT\xde\xa7\xecc\xfb\xb1\x00\xe5\xff]\x02\x80\x0b\x04\x15\xbf\x1b\x07\x1e\xfe\x1b\xea\x14;\x07\x85\xf8\xaa\xf0\x88\xeb\x1c\xe4\xa5\xd67\xcb\xef\xc9C\xd0\x9e\xd9\x1c\xdfQ\xe0\x85\xe2\xce\xe7V\xee\x98\xf3\x1a\xf9\xd1\xfe\xdc\x00\xaa\x00\xcc\x02S\nt\x13\x94\x18\xb2\x17>\x13\xa7\x0f\x07\x0e\xb9\n|\x03\xb4\xf9\x8b\xef\x07\xe8\x15\xe5\xa0\xe7J\xed\xe5\xf2\xb7\xf7\xbb\xfdn\x06T\x10E\x18\x06\x1b\x8c\x18\xc1\x13\xa9\x0f\x0e\r\xfb\nx\x07w\x02\x92\xfe\x96\xfd1\x00q\x04d\x07@\x08=\x07\xf7\x054\x06\xc0\x05\xa9\x04X\x01a\xfdy\xfb\x19\xfc\x00\x00\xf4\x03{\x06\xd8\x07\x94\x08-\x0b\xe7\r\x1b\x0e\xcd\n\xe0\x03\x85\xfd\x92\xf8\xac\xf4\xf8\xf1K\xef%\xee\x9c\xed_\xeel\xf1\xa6\xf6\xd6\xfb\xd5\xfe\x98\x00\xd3\x022\x06\x91\t\xed\ni\n5\t\xb3\x08\xcf\x08\xb9\x07\x08\x05\x1a\x01o\xfc\x96\xf7\xb4\xf3 \xf1\x0e\xef \xee\xb3\xedg\xeeZ\xf13\xf6\xb4\xfb\x95\xff(\x02(\x04\x84\x05\x85\x060\x06\x9b\x04\x12\x02\xe9\xffI\xfe\'\xfd\x94\xfc\x14\xfcc\xfb\x0e\xfa\x00\xf9#\xf9M\xfa\xbb\xfan\xfa\x04\xfaX\xfa\x8e\xfb\xc5\xfc\x99\xfd\xec\xfd\x06\xff\xa6\x00x\x02n\x04\xd2\x05+\x06\x10\x05(\x03\xf8\x01n\x01\xd4\xffD\xfc{\xf6\xc9\xee\xbd\xea\xea\xeez\xfb\\\x0b\xd8\x17\r#{7GU\x13i\xfdd\xc5M\x898\x102\x97-\x04\x1d\x92\x015\xe9-\xdf\xee\xdfH\xe0\xf0\xdem\xdf\xf5\xe0W\xe1\xab\xe0\x81\xe3\x7f\xeb>\xf3\x9e\xf5\x0c\xf5*\xfa\xec\x08t\x19\tL\x0e}\x11\x9f\x10\xad\n\x0f\x01)\xf7\x8e\xefH\xeb\xb4\xe9P\xe9/\xea\xd3\xec\xe9\xf2\xdf\xfa>\x01\xea\x03s\x02r\xff\x8b\xfd\xf1\xfc&\xfdi\xfc\'\xfb\xd5\xfa\xd5\xfc\xe5\x00T\x04\xad\x04\x08\x02\x0c\xfe\x86\xfa\x13\xf8\xf2\xf5s\xf3h\xf0\xc8\xee\x13\xf1\xb2\xf6\xe6\xfd!\x03\xab\x05\xef\x06\xda\x07j\t\xd4\t\x9d\x071\x04\x91\x00\x04\xff\x03\xff\xad\xff\xe9\xffS\xff\x89\xfe/\xffW\x01_\x03\x13\x04\x13\x02\xb0\xff\xf7\xfe\xb9\x00o\x03\x1c\x05\xe3\x05\xf3\x06\xfb\x08\x03\n\xfe\x08\x96\x06a\x04\xb7\x02\xe2\x01\xb1\x00\x87\xfe\x88\xfa\x14\xf5\xf1\xf2\x10\xf9\x1a\x07\xc7\x16\xb5%}9JS\xdegDi\xfaV{?0.\xbb\x1e\xb2\x08\xcc\xebZ\xd2f\xc65\xc6\xdd\xc9\xdd\xcd\xae\xd4A\xe0G\xeba\xf1\x06\xf4c\xf8C\xff!\x04\x83\x03\x91\x01\xd7\x04D\r\x8e\x13\xd4\x11\xad\t\x7f\x00\x08\xf8:\xee\xff\xe1\xce\xd6\xc5\xcf\xb1\xcc\x85\xcc\x8c\xd1\x10\xdd\\\xed\xe5\xfc6\x08`\x0f\x90\x14\x15\x19\x1e\x1a!\x14\xf4\x07\xff\xf9\x9f\xef`\xeb\xd6\xebg\xec\x8e\xea\xd6\xe8\x17\xeb\x81\xf1\x89\xf8\x99\xfb`\xf9\xb3\xf5\xc6\xf5\xf8\xfa\xfe\x01V\x07\xd4\n$\x0e\x80\x13\\\x1a\t \x0b!\x83\x1ba\x11\x96\x06\x0b\xfe3\xf8o\xf2\xcd\xeb*\xe6\x00\xe5\x8c\xea/\xf5\xfa\xff\xea\x06\x99\n\xb0\x0e\xbe\x14\xb6\x19$\x1a\xe0\x15\xf2\x0fa\x0c\xfb\x0b\xee\x0c\x16\x0c\x89\x08\x0c\x04\x1c\x00\xc4\xfc\xb1\xf9\xe2\xf5q\xf1$\xed\x17\xeb7\xed9\xf3\\\xfa\xe9\xff3\x03\xa6\x06\xcf\x0b:\x11\xa3\x12\xea\rI\x05\xdd\xfd\x1c\xfa\x80\xf8\x99\xf5n\xf14\xef2\xf2\x82\xf9y\x01\xcc\x06\x07\t<\tb\x08\x95\x06Y\x03#\xfe\xb5\xf7s\xf1s\xed\xa0\xed\xbd\xf1Z\xf7.\xfck\xff/\x02%\x05\x80\x07W\x07\xac\x03\x97\xfd\x8e\xf8\x92\xf6\xc2\xf7j\xf9N\xfa%\xfbR\xfd,\x01\x03\x05\xce\x06u\x05\x0c\x02\x04\xff\x9e\xfd\x93\xfd\x9e\xfd\xbb\xfd"\xfek\xff\x15\x02@\x05 \x07\xbd\x06\x14\x04\x99\x00\x13\xfe@\xfd \xfd\xa4\xfc\xb7\xfbj\xfb\x03\xfd\xf5\x00\xa5\x05}\x08f\x08\x84\x06\xe3\x04\x10\x04\x8d\x03a\x02\x82\xff\x90\xfc\xa6\xfb\xe0\xfc!\xff\x11\x00G\xff\xeb\xfd\xf0\xfc\x87\xfd\xfd\xfe7\x00\x04\x00\xde\xfe\x86\xfe\x8a\xff\xc4\x01j\x03\xdf\x03\x1d\x03\xcd\x01\xe3\x00q\x00R\xff\xf9\xfc/\xfa\x8c\xf8\x94\xf9\xad\xfdW\x02S\x043\x03\x1f\x01\xd4\x00\xa8\x03#\x06\xb0\x03w\xfb?\xf3\xe5\xf5\x01\tn$q8\xcd<\xe6:%A\xa9L\xd0J\xe50\xbf\t|\xec\x15\xe3\x9c\xe4\xab\xe3;\xdbg\xd4\xfa\xd7\x15\xe6\x16\xf5;\xfcX\xfa9\xf3\xd2\xed+\xee\xdf\xf3\x89\xfb\x8c\x009\x02f\x03\xf6\x07/\x0e\xb7\x0f\xa8\x07o\xf7c\xe6\xeb\xda\x8c\xd6\xa1\xd7A\xdbc\xe1\xa7\xe9\x86\xf3n\x00\x1b\x0fB\x1a\xa0\x1b\xe3\x12\xef\x06\x08\xff\xab\xfbB\xf8#\xf1\x99\xe8\xef\xe5\xa3\xeb\x0e\xf5\x1c\xfbH\xfb\xd5\xf8\x8d\xf7\x8b\xf8g\xfa\x90\xfbb\xfc\xfc\xfeL\x04\x10\x0c\xa8\x153\x1e\xb6!\xa7\x1e\xe4\x17P\x11X\x0bi\x03\xab\xf8\xc0\xed\n\xe72\xe7\xdb\xec\xcd\xf3\xac\xf9D\xff\n\x06\x18\r\xdb\x11:\x13[\x11\x9f\rg\t0\x06P\x05,\x06\xef\x06~\x06n\x05w\x04\xf0\x03\xba\x02x\xff\x1f\xfa@\xf5\xbc\xf3\x03\xf6\xf5\xf9\xd5\xfc9\xfe\x1c\x00J\x03\xb0\x06e\x07\xd1\x04\xc8\x00\xd8\xfdV\xfd(\xfe\x7f\xfep\xfd\xd9\xfb\xa2\xfb\x05\xfd\xa0\xff\xd3\x01\xac\x02k\x02\x16\x02\xb0\x02\xcb\x03\xea\x03H\x02\x84\xff\xdb\xfdo\xfe\xed\x00\x1d\x03_\x030\x02\xde\x00,\x00\x80\xff\xac\xfdq\xfaK\xf6\r\xf33\xf2\x12\xf4\xdc\xf6\xe3\xf8\xcd\xf9)\xfb3\xfes\x02\x8b\x05a\x05=\x02\xd3\xfe)\xfd\x9e\xfdW\xfe]\xfe\xa7\xfd@\xfdq\xfe9\x00\x0f\x01\xad\xffU\xfc\x04\xf9\xc2\xf7\x10\xf9B\xfb\x85\xfcq\xfcj\xfc\xe1\xfd\x91\x00\x99\x02b\x02\xcc\xff\xfa\xfc\x12\xfc\x87\xfd\xf6\xff\xe9\x00>\xff]\xfd\x84\xfd\x96\xff\xd8\x01\xc4\x01\xea\xff.\xfe\xec\xfd\x11\xff?\x00\x18\x00f\xfeA\xfc-\xfbA\xfc\x1a\xff\xce\x00\xe9\xff\x9c\xfd\x8d\xfcP\xfd\xc6\xfd\xc1\xfbE\xf7\x18\xf2;\xeei\xed-\xf6:\x0f\xd53\xd7R\x84\\KUJN\x9bK\x94A\xaa\'\xfe\x07\xe7\xf40\xf4\xf8\xfa\x9b\xfb\xdc\xf4d\xeeq\xee\xa6\xefr\xeb\x80\xe2\xc1\xdac\xda\xb1\xe0\xeb\xebi\xfay\t\xf5\x14\xb3\x17\x1d\x13C\x0br\x02\xf2\xf7M\xea[\xdc\t\xd4\xcf\xd4S\xdd3\xe8A\xf1\xeb\xf5\xe3\xf7o\xfbi\x023\x08\xa3\x06\x0c\xff\x1a\xf84\xf8y\xfe[\x04\x98\x05\x83\x010\xfc\x01\xf85\xf4x\xef\xe3\xe8\xff\xe1\xb1\xdd\xe8\xdeb\xe6\xb9\xf1f\xfdx\x06;\r\xfc\x12\xfe\x17\xbf\x1ad\x18?\x11\xcb\x08n\x03\xfa\x01\x01\x02\x07\x01\xbe\xfe\x86\xfd\x91\xfe\xf7\x00\x12\x01v\xfd\xbe\xf8\xef\xf6\xf0\xf9G\xff\xb9\x04(\nn\x10B\x169\x19\xe9\x18\xde\x16h\x14\xa6\x10:\x0b\xa2\x05[\x02\\\x02\x0f\x03\x1e\x02,\x00\xd3\xff\xaa\x01\xbc\x02\x81\x00\xf8\xfb\x0c\xf9\x98\xf9\xa2\xfbG\xfc\x1d\xfc\xb3\xfd\n\x02\xe3\x06\x92\tz\t\xac\x07v\x04\r\x008\xfb\x97\xf7\x86\xf5\x1d\xf4\xfd\xf2I\xf3\xec\xf5\x0b\xfa{\xfcv\xfbg\xf8\xf4\xf6f\xf8\x1d\xfb$\xfc\xc8\xfb\x17\xfd\x8f\x01\xc4\x06C\t}\x07n\x03r\xffh\xfc\xa4\xf9\xb9\xf66\xf4[\xf3\xea\xf4\x82\xf8\xd7\xfc\x90\x00\n\x02,\x01\x06\xff\xaf\xfd\xb5\xfd/\xfe\xbb\xfd\xa3\xfc\x89\xfc\x0b\xfeY\x00\xe2\x01\x8f\x01\xd3\xff\xd0\xfd\xa3\xfcZ\xfc>\xfc\xb5\xfbE\xfb\xe0\xfb/\xfeo\x01d\x04\xa7\x05L\x05F\x04x\x03\x10\x03r\x02Z\x01\x0f\x00\x8a\xff\x03\x00%\x01=\x02g\x02<\x015\xff\x81\xfdL\xfds\xfe\xb8\xff\xcb\xffJ\xfe\xb7\xfcV\xfd\x9d\xff\xa3\x00G\xfe=\xfa\x0b\xf9#\xfc\x08\x01.\x04\xdc\x03\x8a\x02y\x02\xeb\x05~\x0b~\x0f\x7f\x0f\n\r\xba\x0c\xb0\x11\x19\x1al%\xb31\xc7=\x81E\xbfB\x995 #\xed\x11\xde\x04^\xfa\x94\xf2\xe0\xec\x0e\xe8\x07\xe5\xcd\xe3G\xe3\xb1\xe0f\xde#\xe0\xb7\xe5\xee\xec\xbb\xf3\xf0\xfb\xd9\x04\xa1\n\xb6\x0be\t\xf6\x04U\xfd_\xf2\x1b\xe8\x08\xe3\xe2\xe3\xbc\xe6\x06\xe7o\xe6\x14\xeaj\xf3\x86\xfa\xb7\xf7\x97\xef\x12\xee\x8e\xf6L\xff\x1f\x01k\x00\x1b\x04\x97\t\xf7\x08@\x01\xb9\xf8\x8d\xf3T\xef\x7f\xe9\xd7\xe4\x89\xe5\x0c\xec\xf6\xf3\xd2\xf8\xbf\xfa\xa1\xfd=\x03\x97\x08\xed\x08\xd7\x04\xc3\x02Z\x07J\x0f\xe8\x13\xc8\x12Y\x0f\x1d\rT\x0b\\\x07\xc2\x00\n\xfa\xa7\xf5M\xf4\xaf\xf5r\xf9c\xfe\xdf\x01\xdf\x02\xdd\x02)\x05_\n\x98\x0f\xa3\x11\x9f\x10k\x10Q\x13\xd9\x16\x8e\x16\xcb\x10\x0c\x08\x99\x00\x99\xfd\x1c\xfe~\xfe\xd0\xfc\x08\xfb\x94\xfcz\x01%\x05O\x03\x1a\xfd\x9a\xf8[\xfb\xf1\x03s\x0c\xcd\x0fU\x0e\xc9\x0b\xdb\t4\x07=\x01H\xf8\r\xf0%\xec\x18\xee\t\xf4\xf5\xfa\x82\xffv\x00g\xfe\x08\xfcr\xfb\xff\xfbP\xfc\x8a\xfbz\xfb\x16\xfen\x02\xc5\x056\x05c\x00\xed\xf9\xe5\xf4?\xf3\x14\xf4o\xf5C\xf7e\xfaL\xff\x0b\x04~\x06n\x05\x80\x01\xd8\xfc\x90\xf9\x8c\xf8)\xf9Q\xfa\xcf\xfb\xe0\xfcK\xfd|\xfc\x93\xfa\\\xf8s\xf6\xed\xf5\x87\xf7$\xfb\n\x00\x80\x04\xb0\x07}\t\xc3\t\xc0\x089\x06\n\x03\xd5\x00I\x00\xb7\x00\\\x004\xff/\xfeN\xfeU\xfe;\xfd\x89\xfb\xae\xfa;\xfd\x96\x01\x96\x05\xb5\x07-\x08\xa6\x08\n\t\xaf\x07Y\x04F\x00\x82\xfd\x82\xfc\xfa\xfc\x9f\xfdq\xfd\xc4\xfc\xc4\xfb\x15\xfc,\xfd\xbd\xfd\xca\xfe\x00\x02\x93\x08\x00\x0e\xf5\x0e\xc9\x0b\xc0\x08\xb0\t\x80\r{\x0e\x03\t\x9e\x07i\x19\xac:\x18QSF\x1a$B\t\x82\x06A\x12R\x17\xf0\x0f(\x05>\x01U\x04\x1c\x060\xfe\xeb\xebP\xd8\xc0\xcd\x0e\xd13\xdeU\xee\x8d\xfa\xfc\xfe\x94\xfcu\xf7\xf5\xf32\xf2\xa5\xf0\xa5\xef\xd9\xf0z\xf6\x81\xff+\x07^\x08U\x02q\xf7C\xeao\xdeb\xdb\x80\xe5\x87\xf5\x01\xfe-\xfbi\xf5\xae\xf4W\xf7*\xf6\xab\xf0\xb3\xec\xbd\xef\xe7\xf9\x86\x04\x9a\t5\x08\x84\x02\x80\xfb$\xf6\x8c\xf3\xfa\xf3s\xf5\xf6\xf7\xb9\xfbG\x00\x17\x05\x9e\x08\xc0\x08\xa6\x03\xa1\xfc\xd9\xf9j\xfe#\x06L\ng\t\x92\x07\x8a\x07S\x07\xe9\x04\xde\x00\x9e\xfd\x03\xfc\xc9\xfb\x9e\xfd\xdb\x01&\x06\x9a\x07=\x06x\x04\t\x05\x83\x07\x80\n\x88\x0c-\r4\x0eC\x10M\x12\xcf\x11\r\r\xf7\x07\xd4\x05\x9b\x06\xd3\x05[\x01\x02\xfe\xbd\xfe\xd6\x01C\x02\x1e\xfe\xe7\xf8\xbf\xf5\xae\xf4\r\xf5\xb1\xf5]\xf7\x85\xf9\xd8\xfa\xdb\xfb\x91\xfc[\xfd\xd2\xfcd\xfb\x9b\xfa!\xfb\xae\xfc\x1d\xfe`\xff\xe2\xff\x86\xffT\xfes\xfc\x94\xfa"\xf94\xf9^\xfa\xd0\xfb\xae\xfc&\xfd\xe5\xfdC\xfe`\xfe\xb6\xfd\x0e\xfd\xb7\xfc\xd8\xfcn\xfd\xfc\xfd\x9c\xfe\xec\xfe\xd5\xfe\x1c\xfe:\xfd\x9b\xfc\x17\xfc\x8a\xfb\xb4\xfb\xab\xfcE\xfe#\xff\xbe\xfe\x0f\xfe\x85\xfd\x9f\xfd\xe1\xfd}\xfe\xdc\xff.\x02=\x04\xbb\x04\\\x03\xe7\x01\x91\x01\xc9\x02\xc8\x04t\x06\x85\x07\xa3\x07w\x06Q\x04p\x01\xd4\xff$\xff\'\xfe\xd0\xfc\xfa\xfc\xa0\xff\x18\x03\xe3\x02\x00\xff%\xfb\x92\xf9\xe2\xfbw\xff(\x03\x01\x07\xd0\x06\xd6\x02\x83\xfb\x0b\xfc\x14\r\xd2\'V8\xce1\xf7!\x9c\x1bs"\xec*\x7f+l(\xa5\'\xb2(\xdb&\x17\x1e?\x10\x8c\x03\xcb\xf9\xd0\xf1p\xeb(\xea\xe3\xec\x04\xec\x87\xe2\xca\xd8\xae\xd8\x0b\xdf\x1b\xe2+\xe0\x05\xe2s\xeb]\xf4F\xf7\xa1\xf6.\xf8\r\xfc\'\xfe\xb0\xff\x94\x03\x00\n\xef\x0bJ\x07\xc9\x01\xbc\x00\xc1\x01\xed\xfe\xb9\xf8\xca\xf3\x97\xf2A\xf3.\xf26\xee,\xe9\xec\xe5\x12\xe5\xf3\xe4N\xe5A\xe7\x82\xea+\xedt\xee1\xf0\xcc\xf3\x7f\xf8\x1e\xfc\xf5\xfd\xe5\xff\x00\x04*\t\xc8\x0ca\r\x9c\r\x0b\x0f\x8a\x10(\x10\x88\x0e\xa6\r\x95\x0c\xe3\t\xaf\x06\x94\x05\xa1\x06\xfa\x05\n\x03g\xffz\xfd{\xfc\x95\xfbL\xfc\xf6\xfd\xb3\xff\x89\x01\x95\x03\x1e\x05\xa1\x04*\x04\xc8\x06\xb1\n\x14\x0e~\x119\x15o\x16\xb5\x11h\n\xbf\x06<\x07\x89\x08\x1b\x08~\x06\xa0\x03\x1b\xff\x03\xfa#\xf6S\xf3J\xf1\x8f\xf0\x95\xf1\xdb\xf2\xd2\xf2\xd3\xf1\xb5\xf0b\xf07\xf1\x8e\xf3b\xf7\xff\xfa\x02\xfd\xe4\xfd\x0f\xfe"\xfe/\xfe\x16\xff,\x01\xd8\x02;\x034\x02\xa9\x00\xdd\xfe\x01\xfd\xad\xfbg\xfb\x7f\xfb3\xfbX\xfa\xf2\xf8\x88\xf7\x1c\xf6\x1f\xf5\xd2\xf43\xf5\x86\xf6\xa5\xf8P\xfa\xb2\xfaa\xfad\xfaT\xfb\xf0\xfb#\xfd\x1d\x00\xba\x04n\x07\xc9\x06;\x04\x05\x028\x02\x15\x046\x07\xfb\t\xb0\n\xad\t\xab\x05g\x00z\xfdI\x00\x80\x06\x14\t\xb4\x05I\x00s\xfeD\x01\xd7\x05\x94\t"\tq\x05\xdc\x01\x8a\x03\x15\t]\x0e\xc6\x14\xaa \x03,M*m\x1b&\x11\xc2\x16\x93#\xa9+\xcb-\xda-\xca\'\x85\x1a\xfb\r\xdc\x08\xac\x08\xaf\x07C\x05?\x02\x14\xfd\x1f\xf4>\xe9Y\xe0\x13\xdb%\xda\xce\xdcL\xe0\xd6\xe27\xe1\x8f\xdcU\xd8-\xd7a\xdbP\xe45\xefh\xf7\x9f\xf8\xc6\xf5\x98\xf5h\xf9\xca\xfe\xdb\x03Y\n/\x10\xb2\x10J\x0c8\x07[\x05f\x05z\x05W\x05\xb5\x04\x02\x03E\xfem\xf6\xb0\xee\x8d\xeb\xe8\xec\xc1\xee\xf8\xed/\xec\x07\xeb\xc1\xe9\xd5\xe7\xb8\xe7M\xeb\xf3\xf0\x84\xf5\xe2\xf8(\xfc\x83\xff\xbb\x01\x88\x03\x07\x06<\n\xcc\x0e\x8e\x12\xef\x14\xbd\x15V\x14G\x12+\x11`\x12S\x14\x82\x14\x8f\x11H\x0c\x97\x08\xc8\tD\x0e\xaa\x0f\xc9\n\xa7\x03\x0b\x01\x9e\x02\x86\x03\n\x03\xb7\x03t\x04\x1c\x02p\xfdc\xfb\xa3\xfc\xba\xfda\xfd\x1f\xfd\x9d\xfd\xac\xfd\\\xfc7\xfb\xf9\xf9$\xf9j\xf9\xeb\xfa\x1e\xfc\x9a\xfb\x87\xf9S\xf7+\xf6\xbf\xf6\x9d\xf8~\xfaE\xfbi\xfa\xd6\xf8\xd1\xf7\xd4\xf8\xb4\xfa>\xfc\x18\xfd\xf2\xfd]\xffo\xff\xeb\xfdA\xfcg\xfc2\xfe\xaa\xff\xd4\xff\xfe\xfe\xb0\xfd\x15\xfc\xef\xf9<\xf9\x15\xfa_\xfb-\xfc\x93\xfb\x0f\xfb\x0f\xfa\xce\xf8\x91\xf8\x89\xf96\xfb\xea\xfc!\xfe\xc6\xfe\r\xff\xe9\xfe\xc2\xff\xd5\x00q\x01\xfe\x02n\x06\x9b\x08\x01\x08r\x05\x13\x05\xb5\x07&\t\xc8\t\xf9\nT\x0c\xa6\x0b/\x08S\x06\xaa\x07\xa0\n\xe0\x0cr\rm\x0c\xba\nF\n(\x0c\xeb\x10\x9a\x18C\x1f\xd3\x1fB\x18\xe8\x0f\x11\x10\xfd\x17) \xc3!\xb0\x1e\x88\x1a}\x15\x9c\x0f\xeb\n\x99\t\xca\n\x91\t\x93\x05\xcf\x00\xc4\xfb\xda\xf5\x0e\xef\xdd\xea\n\xea\x08\xea\x83\xe9\xf5\xe7\xfb\xe4\x02\xe1T\xddw\xdcX\xde\x81\xe1w\xe5\xaa\xe8?\xe9G\xe7\x97\xe6G\xe9\xb5\xed\x8e\xf1\\\xf5\xfa\xf9k\xfc\x91\xfb\x13\xfa\x9f\xfaR\xfd\xcb\xff\x9e\x02\xdc\x052\x075\x05`\x01\xc5\xfeW\xfeX\xff=\x01\x9b\x02v\x01\xc3\xfd\r\xfa>\xf8\xcf\xf7\xa2\xf7\xcf\xf7\xf4\xf8\xca\xf9\x02\xf9\x0e\xf7/\xf6o\xf7/\xf9v\xfb\xa1\xfdI\x00\x9c\x01\x8b\x02\x92\x04\xa0\x07\xb1\t\xf9\t$\x0b(\x0fL\x14[\x17A\x17\xf8\x13h\x10\xb3\x0fh\x13\xcb\x16\x8b\x15\x02\x11D\r!\x0b\xf3\x07\x9f\x04\xb4\x03\x0e\x04\x98\x02\xee\xfe\xef\xfb\xa5\xf9\xc2\xf6{\xf4\xb1\xf4\xef\xf5q\xf5\x1c\xf4l\xf3\t\xf3\xcf\xf1\xed\xf0z\xf2\xfd\xf4^\xf6\xf4\xf6\xf2\xf7\xfb\xf8%\xf9K\xf9\x7f\xfaI\xfc|\xfd6\xfe\xf9\xfe9\xff\xb5\xfec\xfe\xe3\xfe\xeb\xffu\x00Z\x00 \x00\xdc\xff0\xffO\xfe\x8d\xfdy\xfd\x1e\xfe\x9d\xfeT\xfe`\xfd\xf4\xfc\x86\xfd)\xfeb\xfe\xe4\xfe\xc8\xff\x9f\x00\xa7\x00\x98\x00e\x01\'\x02b\x02(\x02+\x03\x9b\x04U\x04;\x03\x82\x03\x9e\x05\xc5\x06\xda\x05\xd1\x04x\x05\xdb\x06\x0e\x07\xaa\x05k\x04\xa1\x04a\x06\xa9\x07\xc5\x07\x9f\x08\xca\x0bJ\x0fb\x0f\\\r\x94\r\x01\x11\xa2\x13\xb3\x14F\x15\xe1\x15\x8e\x14\x9f\x12\x04\x13\xb8\x14\x94\x14.\x12U\x10\x0f\x0e[\nn\x07\x18\x07a\x07,\x04}\xff?\xfc\xb2\xf9!\xf6&\xf3\x9b\xf3\x12\xf4\xf3\xf0\x07\xec\r\xea\x1e\xebJ\xebp\xea\xc6\xea\x04\xec\xed\xeb\x14\xeb\x07\xec\x8a\xee\x10\xf0\xb8\xf08\xf2g\xf4<\xf5\xe7\xf4\xc4\xf5U\xf8U\xfa\xeb\xfa\x8a\xfb\x96\xfc\xcb\xfc\xf3\xfb!\xfc\xa5\xfe\x1a\x01T\x01\xe6\xff\xbe\xfew\xfe\xd4\xfe\x8e\xff|\x00\x8f\x00H\xff\x89\xfd\xd5\xfcV\xfd\xf2\xfd\x9e\xfd\xa5\xfc\xc0\xfb\xc4\xfb\x11\xfcm\xfc\x98\xfc\xc5\xfc"\xfd\xba\xfd\xed\xfe\x98\x00\'\x02\xf8\x02\x84\x03\xde\x04\xa0\x06v\x08\xb9\t\xb5\n5\x0b6\x0bg\x0b\xe9\x0b\x94\x0ce\x0c\xec\x0bx\x0b\xd0\n\xbe\t\x1d\x08\xfb\x06\x0f\x06\\\x05\x98\x04|\x03>\x02\xaf\x005\xff\xea\xfd\xee\xfcR\xfc\xcd\xfb/\xfb<\xfa\x02\xf9\xa8\xf7\xe9\xf6\xfc\xf6\\\xf7G\xf7\xb6\xf6\x1f\xf6\xc9\xf5\x94\xf5\xf5\xf5\x00\xf7\xfb\xf78\xf8\xf7\xf7\xd5\xf7\x19\xf8\x99\xf8p\xf9\x85\xfa\\\xfb\x8d\xfb\xa4\xfb(\xfc\x0b\xfd~\xfd\x83\xfd\xdf\xfd\x9e\xfeR\xff\xe4\xff\xb4\x00|\x01h\x01\xe2\x00\xdd\x00\x83\x01\x87\x01\xed\x00\r\x01=\x02?\x03\xb0\x02\x8c\x01\x88\x01\x0e\x02J\x02Q\x02\x7f\x02\xa7\x02#\x02\xd9\x01\xc8\x02A\x04\xe1\x04\\\x04W\x04\xe1\x04\x94\x05\'\x06\xa4\x07s\n\xcc\x0cx\rw\r\x80\x0e\xc0\x10\xab\x12\xfd\x13E\x15$\x16\xf4\x15h\x15\xfe\x15\x1c\x17\xeb\x16\x0f\x15F\x13?\x12\xca\x10i\x0e\xbe\x0b\xb7\t\x16\x07\xbf\x03\x82\x00\x03\xfe\xca\xfb\xb1\xf8H\xf5\xc6\xf25\xf1\xd2\xef\xf2\xed\x06\xec\x8e\xea\x9d\xe9A\xe9\x8e\xe9\x1f\xeau\xeae\xea\xa7\xea\x8a\xeb\xee\xecO\xee\xb5\xef#\xf1k\xf2U\xf39\xf4\xac\xf5\x86\xf7%\xf9\'\xfa\xe3\xfa\xe1\xfb\xd7\xfc\xce\xfd\xdd\xfe\xd9\xff;\x00\x0e\x003\x00\xe7\x00\x8a\x01\xae\x01s\x01O\x01J\x01b\x01k\x01_\x014\x01)\x011\x01{\x01\xdc\x01\'\x02\t\x02\xad\x01\xca\x01\xd9\x02R\x04%\x05\x19\x05\x8a\x04\n\x04J\x04T\x05\xac\x06E\x07\x0b\x07\x8a\x06C\x06\x02\x06\x10\x06\xbb\x06h\x07@\x07A\x06w\x052\x05\xcb\x04J\x04%\x04\x0f\x04\x1e\x03\xaf\x01\xb9\x00R\x00\x9f\xff\x88\xfe\xc0\xfdT\xfd\x8f\xfcx\xfb\xc2\xfah\xfa\xeb\xf9k\xf9\x1c\xf9\xe2\xf8k\xf8\x06\xf8\x04\xf8\x13\xf8\xe3\xf7\xc5\xf7\x0e\xf8\xa6\xf84\xf9\x89\xf9\xa9\xf9\xb7\xf9\xc4\xf9[\xfa\x9a\xfb\xf9\xfc\x90\xfd`\xfd\xfc\xfc;\xfd\x1c\xfe$\xff\xde\xff"\x00\x1c\x00!\x00a\x00\xe8\x00q\x01\xcc\x01\xb2\x01\x9c\x01\xe4\x01u\x02\xe7\x02\x07\x03\x14\x03!\x03\xfb\x02\xb7\x02\xc6\x021\x03\x81\x03\x9c\x03\x81\x03Z\x03\x0e\x03\xf1\x02.\x03{\x03\xe3\x03\xe4\x04\xcf\x06\xc8\x08\x8d\t\xa4\t$\nx\x0b<\re\x0f\xc9\x11p\x13\xba\x13\x9e\x13\xfa\x13\x97\x14w\x14*\x14`\x14N\x14\x0b\x13\xd7\x10\xd9\x0e\x01\r\x87\n\x18\x089\x06I\x04I\x01\xe3\xfd%\xfb\x06\xf9\x8c\xf6\xfa\xf3\xed\xf1S\xf0\xa4\xee\x01\xed\x01\xec_\xeb\xae\xeaE\xeaz\xea/\xeb\xa0\xeb\x07\xec\xc6\xec\xc4\xed\xc7\xee\xed\xefm\xf1\x12\xf3O\xf4I\xf5I\xf6a\xf7i\xf8s\xf9\xac\xfa\xf3\xfb\xcf\xfc5\xfd~\xfd\x0b\xfe\xa7\xfe\n\xff6\xff\x92\xff\x17\x00]\x00\x0e\x00\xab\xff\xb0\xff\xcd\xff\xa2\xffj\xff\xcc\xffg\x00_\x00\xf4\xff\xf0\xffq\x00\xb6\x00\xcf\x00|\x01d\x02\xaf\x02\x9e\x024\x03\x1f\x040\x04\xd4\x03-\x04\x1c\x05b\x05\x1c\x05F\x05\xc7\x05\x95\x05\xe1\x04\xe0\x04i\x05~\x05\xfd\x04\xdb\x04I\x05R\x05\xf6\x04\xd3\x04\xb3\x04(\x04\x86\x03\x84\x03\xb1\x03+\x03%\x02"\x01d\x00\x9a\xff\xdf\xfe;\xfex\xfdk\xfcb\xfb\x86\xfa\xed\xf9l\xf9\x02\xf9Y\xf8\x96\xf7"\xf7+\xf7`\xf7l\xf7U\xf7@\xf7@\xf7\x86\xf7!\xf8\xfa\xf8\x90\xf9\x1e\xfa\xb0\xfa(\xfbU\xfb\xc1\xfb\xf8\xfcb\xfe\'\xff=\xffB\xfft\xff\xb4\xffe\x00\x82\x01a\x02\xf7\x01\xfa\x00\xe9\x00%\x02o\x03\xe9\x03\xe9\x03\xa8\x03\x16\x03\xb2\x02w\x03\x1c\x05:\x06\x1f\x06e\x05\x1e\x05~\x05+\x06\xf2\x06\xb5\x07\xde\x07\x88\x07\xb0\x07}\th\x0c\xae\x0e\x07\x0f\x0e\x0e\xce\r\x83\x0f\x90\x12m\x15\x01\x17!\x176\x16\x0f\x15\xc8\x14\xea\x15W\x17M\x17\x0e\x15\x99\x11\x85\x0e~\x0c/\x0b\xd7\t\x93\x07\xda\x03\xfd\xfe\xab\xfa\xbb\xf7\xe1\xf5\xfc\xf3\x80\xf1\xa9\xeey\xeb\xa0\xe8\x12\xe7\xc4\xe6\xd9\xe6C\xe6`\xe5\xee\xe41\xe5W\xe6J\xe8U\xea\xad\xebN\xec \xed\xff\xee\xcb\xf1\xe9\xf4\x9d\xf7\x90\xf9^\xfa\xc6\xfa\xce\xfb\xe2\xfd\x90\x00\xad\x02\xb6\x03\xbd\x033\x03\xef\x02G\x03\x1b\x04\xa4\x04\xad\x04J\x04Y\x03g\x02\x02\x02;\x02\x90\x02.\x02\x80\x01\x97\x00\x0b\x00\xcd\x00\xac\x02E\x04\xa8\x03\xb0\x01x\x00!\x01\xe3\x02\x8e\x04a\x05\x02\x05y\x03\xc2\x01X\x01\x1b\x02\x82\x03\x1b\x04\xc1\x03\xa9\x02\\\x01\x97\x00\x8a\x00\x16\x01\xc8\x01\xef\x01\xa2\x01D\x01\x18\x01\xe9\x00K\x00\xe0\xff\xd8\xff&\x00N\x00_\x00p\x00\xcd\xffY\xfe\xc2\xfc\'\xfc{\xfc\xde\xfc\x10\xfd\xe8\xfc<\xfc\xdc\xfa;\xf9Q\xf8D\xf8\xa3\xf8\xff\xf8V\xf9d\xf9\xb4\xf8\xb1\xf7\x0c\xf7\x17\xf7\x93\xf7O\xf80\xf9\xf9\xf93\xfa\x03\xfa\xed\xf9C\xfa?\xfbp\xfce\xfd\xb1\xfdZ\xfdW\xfd\x0c\xfe\x18\xff\xf6\xff\x80\x00\xa9\x00\x91\x00/\x00P\x00\xfa\x00\xb3\x01\xfd\x01\xc5\x01\xee\x01\x14\x03\xae\x04\xa1\x05C\x05M\x04Y\x040\x06\x85\nz\x10\xac\x15.\x17\x03\x15\xdd\x12W\x14s\x19\xe6\x1fw%\xca(\x06(\xc2#i\x1f\xa9\x1e\x87!\xe0#\\#\xee\x1f:\x1b\x94\x15\xe4\x0fI\x0b\xde\x07\xb1\x04\xb3\xff\xb2\xf9\x0c\xf4\xb5\xef^\xec"\xe8;\xe3\x80\xde\x00\xdb)\xd9\xb9\xd8\x9f\xd9\x82\xda&\xdad\xd89\xd7\xaa\xd8\x90\xdc\xc3\xe1~\xe6\xd3\xe9V\xeb\x8a\xec\'\xef\x9b\xf3\xdf\xf8\x03\xfe\'\x02-\x04|\x04Q\x05Y\x08\x02\x0c\x0c\x0e6\x0e\x0f\x0e\xe9\r+\r\xb6\x0c,\r\x1e\r;\x0b\xf6\x07T\x05\x13\x04\xd5\x02$\x02\xb1\x00\xa2\xfe\xf4\xfb\xfd\xf9_\xf9Q\xf9\x92\xf9\xfb\xf9\x99\xf9\xe1\xf7\xe1\xf5G\xf6\x8d\xf9\x17\xfe0\x01\xd2\x01:\x00J\xfeh\xfe\x93\x00~\x04\xc0\x07\xff\t\x9c\t\xa0\x07!\x06\x80\x06C\x08\xb8\x08G\x08l\x07\xb7\x06\xe1\x05|\x05\xf0\x05l\x05\xe3\x02v\xffU\xfd\x1b\xfdj\xfd\x90\xfdu\xfd\xc1\xfb\xf3\xf8\xe4\xf5\xb4\xf4\xc3\xf4\xe1\xf4\x14\xf5<\xf5\xdc\xf4\xc2\xf3\xe5\xf2\r\xf3s\xf3d\xf3e\xf3:\xf4\xa0\xf5\xba\xf6\xc8\xf7A\xf8`\xf8\xed\xf7%\xf8k\xf9*\xfb\x18\xfd\xad\xfe\x8a\xff=\xff;\xfeg\xfe\\\xffX\x00\xde\x00J\x01\x1f\x02\xcc\x02\xd8\x03\xb0\x04\xf3\x04\xd5\x04\xd3\x04;\x05\xa0\x07\x82\x0ex\x19\xcf!\x89#\xcc!\xe5!\xbc#\xd6%\xf9+\xfd6\xb8?\x1b?\x958\xf33-1\xc3,.(\xe7%\xfd"/\x1b\xa5\x11\x8f\n\xd1\x04\xd2\xfc\x07\xf3\xb7\xe9\x16\xe0\xc3\xd6a\xd0/\xcei\xcez\xcda\xcb\\\xc8\x1e\xc5F\xc4O\xc7G\xcd|\xd3G\xd9\x8a\xdf\xe9\xe5\x19\xec9\xf2\x00\xf9\x16\xffV\x032\x06{\t\xdb\r\xda\x12\x14\x17\xac\x19\xb8\x19\xb2\x16!\x12\x1e\x0e\x9b\x0b\r\n)\x08\xf7\x05\xe2\x02V\xfe\xe9\xf8r\xf4t\xf1\x01\xefO\xecx\xea$\xea\x17\xeb\xe7\xeb\xa2\xed\x95\xefS\xf1=\xf2\x12\xf3\xa4\xf5\xc1\xf98\xff\xcd\x044\t\x11\x0cb\r\xed\r7\x0ec\x0f5\x11\xf2\x12\xf3\x13\xb3\x13A\x13\x05\x12P\x10\xd3\r\x1b\x0b\xd6\x07\x8f\x041\x02\xa7\x00\x1e\x00\x08\xff<\xfd=\xfa\x11\xf7?\xf48\xf2F\xf1%\xf1\xa9\xf1:\xf2\xa5\xf2\x04\xf3\x13\xf3\r\xf3\xe6\xf2\x8c\xf2\x94\xf2\xb2\xf2\x0f\xf4\xba\xf5\x1b\xf7\xbe\xf7\xf1\xf7\xa7\xf7T\xf6\xf6\xf4\xf1\xf4\xf3\xf5\x8b\xf6\x19\xf7\xaf\xf75\xf8\xe6\xf6\xa4\xf6\x9b\xf7e\xf8\xb6\xf7~\xf6J\xf7\xb8\xf8\xde\xfa\x05\xfe6\x01\xab\x02@\x02\xe6\x02\x94\x05\xa5\x08\t\r\xf3\x11\xde\x18B =)w4\x97<\xd7?\t? @\xe8BGD\tFMH\x18JkEz;J2T*\n"\xb2\x15G\t\xd2\xfe\xc3\xf4,\xeaF\xe0\xf1\xdaf\xd62\xcf\xda\xc5[\xbf\xaf\xbcQ\xba\x16\xba\xe2\xbd\xff\xc3\xb3\xc8\x08\xcdW\xd4\x94\xdd\xae\xe5\xb0\xed\x05\xf6\x06\xfd\xb8\x01\x08\x07%\x0eR\x14_\x19C\x1dK\x1f\xfc\x1d;\x1a1\x17\xc7\x13>\x0f\x07\n/\x04a\xfd\x82\xf6x\xf1\xa8\xed\xfa\xe8\x84\xe4\xf5\xe0U\xdd\x9a\xd9:\xd8\xc6\xda)\xde\x8c\xe1\xac\xe5\xd6\xea\x8a\xf0\xba\xf5\xf0\xfc=\x04\x19\x0b\x97\x10G\x15\xdd\x18\xd1\x1d\xf9"K\'N)6)B(\xbf$\xbb!\x91\x1e\x92\x1b%\x17\xd6\x11\xac\x0c\x11\x07\x95\x01n\xfdr\xf9\xb8\xf5#\xf2\xad\xee\xbf\xebM\xe9*\xe9\x19\xea0\xeah\xeav\xebE\xed\x1f\xef\x84\xf0Y\xf3-\xf5=\xf6\xc2\xf6A\xf7\x9d\xf8\x18\xf9\x1a\xf9\x8b\xf8\xf0\xf6\xd8\xf5\xb6\xf4S\xf3\xa4\xf1\x10\xef\x1a\xee\xf9\xec\xf5\xeb\xe4\xea8\xea\xf8\xea\x03\xea\xf4\xe94\xec\x8a\xf0\x82\xf3\xd9\xf2\xaa\xf3\x8c\xf6\x0c\xfa\xba\xfc\xad\x02\xe5\n\xac\x10\xb7\x14w\x1a!"\xa5(\xc80%=7H;NLR\xf4VUY\xafV[T\xf6R\xcfN\x94F`=\x165\xac+#!p\x153\x08=\xfa\xbd\xec"\xe0p\xd5\x00\xcde\xc7\xa1\xc1o\xbc\x80\xb9\x94\xb9X\xbb@\xbe\xef\xc2\xb5\xc7\x9b\xcde\xd4\xe1\xdc\x14\xe6\x0f\xf0\x16\xfb\xae\x03\xc3\t\x8a\x0f\xa9\x15\xc1\x1a\xdb\x1b\xc5\x1b\xf1\x1a\t\x18J\x13\xf6\r\xc2\t{\x04m\xfd\xde\xf5\x9b\xee(\xe8+\xe2\x96\xdc\x04\xd8e\xd4\x9b\xd1\xc8\xcf\x1e\xd0\xb5\xd2\xed\xd6,\xdbr\xe0\xdb\xe6\x98\xedd\xf5$\xfd<\x05\xde\x0b]\x12 \x18\x13\x1d\xc7"M*\t0\xc51\xef0\xd00\xc6.\x86)\x9a%;"k\x1d\xf4\x14L\r\x14\t\x10\x04\xdd\xff&\xfc\xc2\xf8C\xf4\x94\xee4\xec\x00\xea6\xe9Q\xe9\x1f\xe9\xa7\xe9n\xe9b\xec\x9c\xf0\t\xf4\xec\xf6T\xf88\xfa\x97\xf9;\xf9\xc9\xf9D\xfa\xb6\xf9\xd7\xf6\xb2\xf4v\xf3?\xf2\x82\xf0\x7f\xee\xfb\xec8\xea]\xe7\xe6\xe4\x87\xe3.\xe3\x98\xe2\x8b\xe3z\xe3E\xe5\xfa\xe7\xe0\xeb\xa2\xefX\xf2\xa5\xf6\xc7\xf9\xd8\xfc\\\xffK\x04P\n\xf8\x0fX\x17\xf6\x1f\x8a(\xd80B\xbb\x90\xb7\x8a\xb7N\xba\xf3\xbev\xc6m\xcf\xdb\xd7\xc4\xe0\xe8\xea\xb6\xf4\xcc\xfc\xb5\x04\xff\x0c\x0b\x13P\x16\x96\x19o\x1d\x12\x1f\xcb\x1dI\x1b\x9c\x17\x87\x11\xab\t\xd3\x01a\xf9\x1f\xf0\xf2\xe6N\xde\xad\xd6\xa3\xd0\xf4\xcc\xcf\xca\xc5\xc9\xc2\xca\x11\xcd\x9c\xcf7\xd3\x13\xd9\xba\xdf[\xe6!\xee\xe0\xf6\x86\xff/\x08e\x11O\x1a\xdd!\x81(\xc7-\xd90L1I2k1i.L*\x11&\x87!\xc6\x1a\xfd\x14\xdd\x0f\x9f\tG\x03J\xfdE\xf8-\xf3\x9e\xeeO\xec\x05\xea\r\xe9X\xe9\xbc\xea\xd4\xec\xd1\xeeV\xf2\x13\xf6\xc2\xf82\xfb%\xfd\x8e\xfeL\xff\xe0\xfeA\xff\xa0\xfe\xe7\xfd|\xfc`\xfa\xb1\xf8\xd6\xf5\xcc\xf2\xf3\xee\xba\xea,\xe7\xb7\xe3\x01\xe1\xd6\xdeS\xdd\xb1\xddD\xde\x0e\xdf\xf7\xdf\xc4\xe1h\xe4\xbd\xe5\xf9\xe6\x03\xea\x17\xef\x9c\xf3\x08\xf7/\xfb\x9d\x00\xab\x06\x85\x0e\xc2\x18\x86"\x9c+\x1a7\xd4D)P\xf8X\xa2c\xd7l\xb0n\xecj@h\x0beZ[\xbbM\\A 5\x87%@\x14q\x05\xce\xf8\xff\xec4\xe1w\xd5\x85\xcb^\xc4\xd8\xbe\xae\xb9\xf9\xb6\xe7\xb7\xa0\xb9\xc0\xbb\x1b\xc1z\xca\x93\xd4\xef\xdex\xeb\'\xf8p\x02\xab\x0bw\x15n\x1d\x9b!\xf6#w%j#\xb2\x1e\x89\x1aB\x16\xa3\x0f$\x07\xd6\xfe\xef\xf5\xee\xebq\xe2\x92\xda\xdf\xd2\x19\xcb\xd6\xc4\x0e\xc0\xbd\xbcZ\xbc/\xbf\x9e\xc3\x1f\xc9\xc9\xd0\xfe\xd9\xab\xe3\x1a\xee\x94\xf9\xd5\x04\x9c\x0e\xd0\x17\xbf 5(Y.\xb03\xc97:939\xd37\xf84i0\xf4*\x93$\x92\x1c\xff\x13\x8c\x0b)\x03\x83\xfbW\xf5r\xf02\xec\xad\xe8/\xe77\xe7\xf9\xe7\xb1\xe9G\xec2\xef\xde\xf1\x14\xf5\xe8\xf8\xab\xfc\xbe\xff\x92\x02\x87\x04\x00\x06[\x06R\x06U\x05(\x03\x90\x00\xe6\xfc\xab\xf8\xfa\xf3\xc1\xef\xf7\xeb\x0e\xe8^\xe4\xd5\xe1\xbf\xdfN\xdd\xe8\xdb\xd4\xdb=\xdc\xfb\xdb\x10\xdc\xaa\xddl\xdf,\xe1\xe9\xe3\xce\xe7\x9d\xec!\xf0\xf0\xf3\x0c\xf9\x08\xfek\x022\x06}\x0c\xd3\x14\xe3\x1c\xbc%m2LBRP\xe8ZRd7m\x9ar\x80sxp\xaajca\xfaS\x19C\xc31o"\xc7\x13\xd3\x03*\xf4\x96\xe7\x87\xdd\xb3\xd3F\xcb\xfa\xc5?\xc2f\xbe*\xbb\x1a\xba\xc9\xbb\r\xc0A\xc6K\xcd\xe1\xd5\x01\xe1f\xed~\xf9\xee\x05\xd2\x12\xb4\x1d\xdd$b)2,\x83,\xad)d$\x11\x1d\xee\x13"\n\x9f\x00\xbd\xf6\xda\xec\xad\xe3\xca\xda\xc6\xd1\xb3\xc9\x8f\xc3\x8f\xbe\xdb\xbay\xb9\xd4\xb9Q\xbb\x91\xbf\x80\xc7\xba\xd0\xc8\xdaU\xe7T\xf5\xd9\x01\x90\r.\x1aM%\xed,?3k8\x16:\xf98}8n7\xb42\xbb,Q(p"\x06\x1a\\\x12{\r\xfb\x064\xfe\x0f\xf7C\xf2p\xedH\xe8\x16\xe6)\xe6\x80\xe6\x84\xe7\x94\xea<\xef\x96\xf4\xa9\xfay\x00\xe2\x04\xe8\x07m\n\x9d\x0b\x01\x0b)\t\xd1\x06h\x03e\xfei\xf9\x93\xf5\x16\xf2:\xeeb\xea8\xe7\x07\xe4\xc0\xe0\xe5\xdd^\xdb\xb8\xd8z\xd65\xd5|\xd4D\xd4\xa3\xd5R\xd9\x91\xdd\xc0\xe1\xe8\xe6.\xee\xb6\xf5V\xfb\x8b\x00o\x06\x98\x0b\x88\x0e\t\x12\xf2\x18\xb8!\\)\xc71\x04=SI\xf9S8]\x00fDk?k\x9dg\x02a\\WJJq;K+K\x1ap\n\x01\xfd\xa9\xf1\xe4\xe7\x17\xe0g\xda\xa9\xd5\xe0\xd1_\xcf\xae\xcd\x90\xcc\xfa\xcb\xe2\xcbK\xcd\xc8\xd0;\xd6P\xdd\xb6\xe5\xa4\xef\x1e\xfa@\x04\xe6\r\x84\x16V\x1d\xae!\t#\x1b!\xdf\x1c&\x17\x1e\x10.\x07\xbe\xfd\x94\xf5\x16\xee\xd0\xe5F\xde\x13\xd9\xa5\xd4Y\xcf\x80\xca\xc1\xc7\x9c\xc5\xf8\xc2\xe5\xc1\xb8\xc3\xe3\xc6\x05\xcb\xa2\xd1"\xdb\xd3\xe5,\xf1\x1d\xfe\x8d\x0b9\x17\xd4 \xef(\xd0.W2\x813\xc22Z0\xe5,.(\xfa"T\x1e\xe7\x19\x14\x15\x9b\x0f\x01\x0bG\x07"\x03\xc7\xfe\xc2\xfb\xa4\xf9"\xf7\xe3\xf4%\xf4\xc3\xf4\xf9\xf5\x17\xf8\xe2\xfa\x15\xfe\xc5\x00e\x03\xad\x050\x07\xce\x07G\x07i\x05\xa0\x02\x80\xff\x89\xfc\x15\xf9\x0b\xf5+\xf1e\xedi\xe9\xb8\xe5Y\xe3\x89\xe1\x0e\xdfM\xdc\xa9\xda\xd1\xd9\xe6\xd8\xd7\xd8\x10\xda{\xdc\x90\xde\xa6\xe1\x8c\xe7h\xee\x9f\xf4\x0e\xfa\xe4\xff\x88\x05\x82\x08\xbb\n;\x0eS\x11\x86\x12\xc6\x13%\x18X\x1e\xfb$\x17.\x849\x97C\xdcJ\x85Q\x9eW\x83Y\x9eV\xc4QtJ\xaf?H2\xfa%\xc4\x1aU\x0f\x9a\x04\xda\xfb3\xf5\xf1\xef\x00\xecA\xe9\xd8\xe65\xe4<\xe1~\xde\x91\xdc^\xdbq\xda\x92\xda\x85\xdcL\xe0j\xe5\x0e\xecQ\xf4v\xfc\x8d\x03\x94\t\xc7\x0eo\x12\\\x13J\x12d\x0f\x02\x0bF\x05\t\xff\xa9\xf9\xd8\xf4\xd2\xef\xd3\xea\xa3\xe6\x8f\xe3\xb5\xe0\xb0\xdd5\xdb\x17\xd99\xd63\xd3\xa6\xd1\xea\xd1:\xd2q\xd3u\xd7w\xdd#\xe4\xce\xeb\xaa\xf5\xe2\xff\x12\x08\x19\x0fW\x16\xcc\x1b\xa2\x1e\xb1 u"p"\xb7 l\x1fk\x1e2\x1c[\x19j\x17G\x15\xe4\x11\x7f\x0e(\x0c~\t\xe8\x05\xa6\x02\x91\x00\x00\xff\xa6\xfdX\xfd\xf0\xfd\xa2\xfep\xff!\x00\r\x01\xf3\x01Q\x02\x85\x01\xed\xffk\xfe\xea\xfc\xe7\xfa\xda\xf89\xf7\x92\xf5/\xf3\xb7\xf0%\xef\xf8\xed\x8b\xec\r\xeb}\xe90\xe8\xe1\xe6\x0b\xe6\x89\xe5L\xe5\xcd\xe5\xa4\xe6\xe3\xe7\xd0\xe9\xf4\xec\x1c\xf1#\xf5Z\xf9\x82\xfdo\x01\xec\x04\xcd\x07u\nP\x0c\x8c\ra\x0e\xbd\x0e\xb9\x0f\xd9\x11\xee\x14\x8b\x18\xdf\x1c\x8d!\xb2%\x8f)\xc6-~1\xf42Q2\x1d1\xec.++\x8a&\xae"\x98\x1e\x8a\x19z\x14\xac\x10\xb7\rB\n~\x06\xfa\x02q\xffV\xfb\xe1\xf6\xec\xf2\xc0\xef\xe0\xec\xd4\xe9\xba\xe7\x03\xe7(\xe7\x98\xe7\xc0\xe8R\xeb\x1c\xee)\xf0\x04\xf2\x81\xf4\xe8\xf6\xce\xf7\xd4\xf76\xf8j\xf8q\xf7\t\xf6}\xf5\x84\xf5\x84\xf4\xfa\xf2)\xf2\xb4\xf1N\xf0\x96\xee\x9f\xed\xc1\xec\xdf\xea\xb6\xe8\xfc\xe7\xf9\xe7C\xe7.\xe7\xee\xe8\xab\xeb\x18\xee\xfa\xf0\x84\xf5\t\xfa[\xfd7\x00r\x03\x0c\x06I\x07y\x080\n\x96\x0bK\x0c}\rX\x0f\xea\x10\xe9\x11\xec\x12\xb9\x13\x86\x13\x91\x12s\x11\xfb\x0f\xa1\r\xb7\n"\x08\xc6\x05\x9b\x03\xeb\x01\xba\x00\xfd\xffz\xffp\xff\xb6\xff\xf4\xff!\x00\xf3\xff^\xffy\xfek\xfd\x93\xfc\xcd\xfb\xdd\xfa\x14\xfa\x96\xf9L\xf9\xd1\xf8i\xf8Y\xf8\xf8\xf7\x9f\xf6\xd6\xf4\x98\xf3Z\xf2w\xf0\xd8\xee_\xee\xae\xee\x18\xefZ\xf0\xeb\xf2\xd7\xf5\x01\xf8\xba\xf9\x91\xfb&\xfd\xdf\xfd<\xfe\xe5\xfe\x00\x00\xfd\x00G\x02\x01\x04s\x06B\t\x87\x0b\xb8\r\x00\x10*\x12\xbf\x13~\x14C\x158\x16\xf7\x16Q\x17,\x18\xd5\x194\x1b\xcb\x1bS\x1c"\x1d8\x1d\x10\x1cv\x1a\xd0\x18Z\x16\x1c\x13u\x10}\x0ex\x0cq\n\x01\t\xeb\x07z\x06\xe2\x04\xa3\x03#\x02\xfb\xff\xc1\xfd\xfe\xfb\'\xfa\x0f\xf8@\xf6\x98\xf4\x0c\xf3\xa5\xf1\x88\xf0\xf2\xef\x89\xefA\xef\x14\xef\x11\xef\xfd\xee\xb6\xeeD\xee\xb9\xed\x0b\xedh\xec\xd0\xebO\xeb\xe5\xea\xb9\xea\xc5\xea\xd6\xea\xf7\xeav\xebX\xec4\xed\x16\xeed\xef3\xf1\x02\xf3\xba\xf4\r\xf7\xa3\xf9\xfe\xfb\xf1\xfd\xc9\xff\xe0\x01\xa7\x03\xfe\x044\x06s\x07n\x08\x14\t\xbe\t{\n"\x0bx\x0b\xcb\x0bM\x0c\xa1\x0cg\x0c\n\x0c\xea\x0b\x93\x0b~\nk\t\xcd\x08U\x08u\x07\xae\x06\x9d\x06\x8d\x06\x1d\x06\x94\x05\x8c\x05n\x05\xa8\x04\xb0\x03\xef\x02.\x02\xd6\x00\x98\xff\xc3\xfe\x08\xfe\x05\xfd\xf9\xfbV\xfb\xb3\xfa\xb1\xf9\x89\xf8\x90\xf7\x98\xf6:\xf5\x07\xf4;\xf3\x9d\xf2\x14\xf2\xbb\xf1\x01\xf2z\xf2\xd8\xf2]\xf3\x19\xf4\xda\xf4z\xf5S\xf6u\xf7\x8a\xf8\x8b\xf9\xcd\xfaE\xfc\xb1\xfd\xff\xfeL\x00\x86\x01\xb0\x02\xcc\x03,\x05\x8b\x06\xc4\x07\xd5\x08\xe6\t\xdf\n\x9e\x0bF\x0c\xeb\x0cH\rL\rX\r\x94\r\xc5\r\xdd\r\x03\x0e1\x0e,\x0e\xff\r\xe4\r\xaa\r;\r\xa5\x0c\x0f\x0cy\x0b\xb8\n\xf1\t\\\t\xae\x08\xef\x07#\x07N\x06c\x05E\x04T\x03h\x02b\x01[\x00b\xff\x83\xfe\x94\xfd\x9d\xfc\xce\xfb\xed\xfa\xc6\xf9f\xf8\xfd\xf6\xa9\xf5Z\xf4\x08\xf3\xea\xf1\x11\xf1{\xf0\x0e\xf0\xf2\xef2\xf0\xa7\xf0\x14\xf1~\xf1\x02\xf2\xa1\xf2<\xf3\xd4\xf3\x9b\xf4\x89\xf5\x98\xf6\xc4\xf7\x1d\xf9\x9f\xfa\x08\xfcU\xfd\x95\xfe\xd4\xff\xec\x00\xd4\x01\x8f\x024\x03\xf2\x03\xc3\x04p\x05\x17\x06\xcd\x06\xa0\x07O\x08\xc4\x08.\t\x88\t\xa8\t\x7f\tR\t/\t\x07\t\xad\x08k\x08:\x08\xde\x07q\x07\x11\x07\xb4\x06\x13\x064\x05a\x04{\x03m\x02K\x01c\x00\x93\xff{\xfe<\xfd+\xfc2\xfb%\xfa\x00\xf9\n\xf87\xf7P\xf6\x98\xf5d\xf5\x82\xf5\x9e\xf5\xdb\xf5|\xf6,\xf7\x89\xf7\xd0\xf7.\xf8\xa4\xf8\xb3\xf8\xbe\xf8U\xf9\x1b\xfa\xd3\xfa\xaf\xfb\xf5\xfcE\xfe\x1e\xff\xb5\xffP\x00\xc0\x00\xc7\x00\xa9\x00\xb7\x00\x03\x01+\x01m\x01+\x02A\x03=\x04 \x05?\x06J\x07\x01\x08\x92\x08\x0e\tz\t\xba\t\xe1\t@\n\xb5\n\x04\x0bK\x0b\x8c\x0b\x8b\x0bS\x0b\x02\x0b\x8e\n\xf4\tC\t\x91\x08\xfa\x07;\x07q\x06\xc7\x05"\x05N\x04K\x03>\x02\x14\x01\xd5\xff\x92\xfe`\xfdF\xfc.\xfb)\xfah\xf9\xb8\xf8\x0f\xf8x\xf7\xf8\xf6o\xf6\xe3\xf5j\xf5\r\xf5\xc7\xf4\x9d\xf4\xa9\xf4\xfe\xf4i\xf5\xea\xf5\x85\xf60\xf7\xdf\xf7\x86\xf8.\xf9\xc7\xf9e\xfa\x0c\xfb\xbb\xfbr\xfc\x18\xfd\xcb\xfd\x8c\xfeX\xff\x0c\x00\xb3\x00m\x01&\x02\xea\x02\xbb\x03\x88\x04O\x05\xef\x05_\x06\xb8\x06\x02\x07"\x07 \x07\x19\x07\x13\x07\x1b\x07\x0e\x07\x15\x078\x07I\x07D\x075\x07!\x07\xf9\x06\x9e\x063\x06\xcc\x059\x05\x93\x04\xdf\x03$\x03Z\x02c\x01x\x00\x88\xff\x93\xfe\x89\xfdw\xfc\x7f\xfb\x86\xfa\xa1\xf9\xce\xf8\x1c\xf8\x98\xf7D\xf7$\xf7\'\xf7L\xf7\x9a\xf7\r\xf8\xa0\xf8*\xf9\xb0\xf9&\xfa\x8b\xfa\xdd\xfa8\xfb\x93\xfb\xef\xfbH\xfc\x90\xfc\xd8\xfc\x13\xfdN\xfd\x8c\xfd\xcc\xfd\n\xfeQ\xfe\xa0\xfe\xfd\xfe^\xff\xdd\xff\x94\x00P\x01\x02\x02\xba\x02{\x03,\x04\xd0\x04x\x05\x1d\x06\xa5\x06\xfa\x06D\x07\x86\x07\xa3\x07\xbb\x07\xcb\x07\xc9\x07\xb4\x07z\x07P\x073\x07\xfd\x06\xa3\x06O\x06\xe5\x05^\x05\xd8\x04S\x04\xba\x03\x1f\x03o\x02\xa8\x01\xea\x00\x15\x00S\xff\x97\xfe\xe0\xfd6\xfd\x9a\xfc\r\xfc\x8e\xfb\x1a\xfb\xb9\xfal\xfa\x1f\xfa\xd7\xf9\x97\xf9Z\xf9\x1c\xf9\xe0\xf8\xc1\xf8\xa2\xf8\x8b\xf8~\xf8\x91\xf8\xb8\xf8\xdd\xf8&\xf9}\xf9\xd8\xf9:\xfa\xa6\xfa\x1c\xfb\x99\xfb\x16\xfc\xa9\xfcP\xfd\xfe\xfd\xcb\xfe\xb3\xff\xa5\x00\xa0\x01\x93\x02p\x03=\x04\xed\x04|\x05\xe2\x05(\x06`\x06\x87\x06\xa8\x06\xc1\x06\xd5\x06\xd7\x06\xde\x06\xf4\x06\xec\x06\xcd\x06\xa0\x06m\x06+\x06\xd1\x05\x87\x05=\x05\xdf\x04f\x04\xed\x03\x8c\x03\x16\x03z\x02\xdc\x01,\x01t\x00\xa2\xff\xca\xfe\xfc\xfd/\xfdc\xfc\xae\xfb\x11\xfb\x83\xfa\r\xfa\xae\xf9d\xf9&\xf9\x05\xf9\xf4\xf8\xf8\xf8\x05\xf9\x10\xf97\xf9r\xf9\xb8\xf9\x04\xfaI\xfa\xa9\xfa\x11\xfb\x81\xfb\xfd\xfbu\xfc\x01\xfd\x8a\xfd\x17\xfe\x99\xfe\x15\xff\x98\xff\x0c\x00{\x00\xe9\x00a\x01\xd1\x010\x02\x94\x02\x07\x03l\x03\xb9\x03\x0e\x04z\x04\xda\x04/\x05\x8d\x05\xec\x05D\x06\x7f\x06\xb3\x06\xe4\x06\xfe\x06\xf7\x06\xe4\x06\xc7\x06\xa4\x06|\x06U\x06"\x06\xf4\x05\xc1\x05\x86\x057\x05\xd4\x04V\x04\xcd\x033\x03u\x02\xb3\x01\xe3\x00\r\x00.\xffU\xfe\x84\xfd\xbb\xfc\xeb\xfb&\xfb\x86\xfa\xe9\xf9X\xf9\xd7\xf8[\xf8\xfb\xf7\x98\xf7V\xf7,\xf7\x00\xf7\xf2\xf6\x10\xf7=\xf7\x88\xf7\xea\xf7u\xf8)\xf9\xdb\xf9\x8b\xfaD\xfb\xfe\xfb\xba\xfc`\xfd\xf5\xfd\x9c\xfeR\xff\x03\x00\x96\x00c\x01U\x02)\x03\xf8\x03\xc3\x04e\x05\xc1\x05\x01\x06\x1a\x06\'\x06#\x06\x03\x06\xe8\x05\xf8\x05\xf8\x05\xe2\x05\xd9\x05\xc4\x05\xab\x05q\x054\x05\xe7\x04~\x04\xe7\x03e\x03\xf4\x02y\x02\x04\x02\xa6\x01F\x01\xe5\x00\x81\x00\x12\x00\xb3\xff/\xff\x9b\xfe\x01\xfee\xfd\xda\xfcS\xfc\xdb\xfb\x80\xfb.\xfb\xfc\xfa\xe3\xfa\xc5\xfa\xa8\xfa\x94\xfa\x87\xfax\xfa_\xfaT\xfaU\xfaU\xfar\xfa\xbc\xfa\x1a\xfb\x8c\xfb\xfc\xfb\x82\xfc\xfa\xfcl\xfd\xdb\xfdD\xfe\xac\xfe\xff\xfe]\xff\xc4\xff+\x00\x9b\x00\n\x01~\x01\xe2\x01I\x02\x8d\x02\xba\x02\xed\x02\x18\x03T\x03\x86\x03\xbe\x03\xea\x03\x10\x049\x04\\\x04m\x04a\x04e\x04_\x04S\x04i\x04\x8e\x04\xd0\x04\xf9\x04/\x05n\x05\x84\x05\x83\x05g\x054\x05\xe6\x04k\x04\xdf\x03O\x03\xad\x02\xf9\x015\x01~\x00\xaf\xff\xca\xfe\xea\xfd\x15\xfdG\xfc\x84\xfb\xdd\xfa@\xfa\xc5\xf9a\xf9\x1d\xf9\xf0\xf8\xd4\xf8\xe2\xf8\x03\xf9C\xf9\x92\xf9\xe1\xf9d\xfa\xe4\xfaj\xfb\xee\xfbf\xfc\xe1\xfc6\xfd\x97\xfd\xf3\xfdQ\xfe\xc0\xfe9\xff\xb6\xff>\x00\xe9\x00\x8f\x017\x02\xd0\x02L\x03\xbe\x03\xf7\x03 \x04A\x045\x049\x041\x04&\x04\x1c\x04\x04\x04\xde\x03\xc7\x03\x95\x03I\x03\xfd\x02\xa7\x02\\\x02\xf6\x01\xa3\x01y\x01J\x01\x0e\x01\xd8\x00\xd3\x00\x01\x01\xe9\x00\xc0\x00\xb8\x00\xb3\x00\x93\x00M\x00&\x00$\x00\xf0\xff\x9b\xffW\xff\x10\xff\xac\xfe\x14\xfe\x9f\xfdh\xfd\x18\xfd\xac\xfcd\xfcD\xfc\x17\xfc\xe7\xfb\xb9\xfb\x8d\xfb\x99\xfb\xb3\xfb\xc0\xfb\xfd\xfb\x17\xfc\xea\xfb\xc3\xfb\x99\xfb\xbd\xfb%\xfc\xaa\xfc\x16\xfd\x19\xfdh\xfd\x0e\xfe\xa2\xfec\xff\xc9\xff~\x00\x02\x01\x02\x02\xc3\x02\xb7\x02"\x02\x1b\x02\t\x038\x05"\rs\x1a\xb6\x1f\xd3\x13\xf8\x03\xf8\xfd\xba\xfb~\xf5a\xf50\x00c\x0c\xda\x0c\xec\x06\x02\x04\xcd\x01\xcb\xfa6\xf1\x99\xf03\xf8V\xff\xe0\x01H\x06g\x0b\xf0\t\xe8\x01b\xfbU\xf9\xd6\xf86\xf9\xc8\xfc\x03\x02|\x05\x9f\x06\xf4\x03l\xff\xbd\xf9\x05\xf7\xba\xf4\x14\xf7]\xfc\x97\x02\x19\x04\x01\x01&\xff\xbd\xfb\xc0\xf8A\xf6[\xf9\xaf\xfd\x83\x01b\x02\xe3\x01\x80\x00&\xfd\x83\xfa\x9b\xf96\xfb\x8b\xfd\xa6\x01\x1b\x06\xd9\x06\xdc\x02\x80\xfe\x1a\xfdM\xfc\xe5\xfb3\xff\xf0\x04>\x08b\x06\x9d\x03\xd0\x02\xfd\x00\xb1\xfd\'\xfc\x85\x00\xec\x05t\x07C\x06\x87\x078\tJ\x04\xb7\xfd\xcf\xfd\xd7\x02\xeb\x04\x88\x03\xba\x03\x84\x05\xad\x01\x84\xfc.\xfa\x1b\xfc\xd1\xfef\xfe\xed\xfew\xfe\x9c\xff9\x01B\x01\xd3\xfd\xc5\xf9\x96\xfb]\xfd=\xfd\x17\xfc&\xfe\xbb\xfe\x86\xfb\xa5\xfb\x89\x00\xf5\x02\xbe\xfe\xa2\xfcX\x00\x81\x02\x13\xfe\xae\xfa\xd5\xfe]\x05\xa6\x04S\xfd\xa1\xf8.\xf9\xee\xfb\xd7\xfbg\xfc\x0e\x01F\x02\x96\xff\xf1\xfa_\xfb\x17\xfeX\xff\x1f\x00;\x00c\x04\x97\x02\x8e\xff\x8b\xfc\x1e\x00\x87\x01\xd5\xfa\xe4\xf7\xa4\xfdQ\t3\t\xad\x04S\x02\xf0\x03\xed\x00\x99\xfc6\xfe{\x04\x8a\t\xf1\t\xb9\x07>\x06N\x05\xac\x04\xea\x03\x8d\x01H\x01\x19\x03\x86\x04x\x05\xfc\x07\x1b\x07\xc4\x02Y\xfcq\xfa\x00\xfd\'\x02Q\x05Q\x02,\xff{\xfa\xd8\xf5G\xf5\xcf\xfb&\xff\x98\xf7\x18\xf1s\xf8R\x039\x01\x98\xfa\xf0\xfd7\x01H\xfa\xc2\xf3\xa7\xfb6\x06\n\x03o\x00\x90\x046\x05\xb7\xf9\xf5\xf5\x9c\x02\x91\n\xce\x06,\x01\x1e\x042\x01E\xfb\x8f\xfe:\x07:\x07d\xf9\xe4\xf6Q\xfc\xc7\xffo\xfdE\xfau\xfc\x9d\xf8N\xf5\xd5\xf9*\x00\x80\xffW\xfb\x0b\xfd\xe0\x00F\xfe\xb2\xfcN\x00F\x07T\x0b\x92\rt\x10\xc5\x0e\xfc\x08\xa1\x06\xe2\x0b\xac\x0c\x92\t\xb7\t\x1c\tg\x05\xed\x03\xe2\x00\xd9\xf9\xc0\xf4\xff\xf1\x05\xf5\x92\xf5\xc7\xf4x\xf7\x9a\xf5\x80\xf1u\xeez\xf0\xb3\xf2\'\xf5\x05\xfcV\x03^\x02\x00\xfc\xb8\xfdk\x02\x9c\x03\x07\x05a\nc\r\xd9\x0bD\t\xeb\tV\nu\x08\x94\x08\x8c\x08A\x04U\x00L\x01d\x01\x88\xfd\x87\xf8\xd1\xf9\xc9\xf8i\xf5\x94\xf4;\xf7;\xf9\xe0\xf5\x17\xf5\xce\xf8\x91\xfd6\xfe\xe8\x00^\x05A\x05\xc4\x01\x9b\x01T\x07/\x0b\xd2\x08\x14\t\x88\r,\r\x1a\x07\xec\x04\n\x08\xac\x04\x03\x00m\x03O\tr\x05\xbc\xfc,\xfd\xf8\xfc\x8a\xf9S\xfa\xbe\xfe\x8e\xff|\xfba\xf9\xa9\xf7?\xf6\x00\xf7\xcb\xf9>\xfd\xc5\xfc\xea\xf9\x19\xfb\x17\xfc\x9c\xfd\xa7\xfc\n\xfc*\xff\xe6\x006\x02\xa4\x04\xdf\x05\xe3\x00\x85\xfc\xcb\x00Z\x04\xa4\x03\xd6\x03\x95\x06s\x04\xeb\xfc#\xfd\x8a\x02.\x00\x10\xfe\x00\x01D\x03\xcd\x01U\xfe\xf8\xff\xd2\xfe\x81\xfc]\xfe7\x02\xbd\xfe(\xfa,\x00\xe8\x04(\x04\xd1\x00(\x00M\x02\xf2\xfe(\xfb\xac\xfe\x17\x02U\x00|\xff\x1d\x05\xe9\x05\x17\xff;\xfb\xf0\xfd\xc6\x019\x01\xa2\x02"\x05~\x02\x88\xfe\xe3\xfc\xba\x00\x08\x02\x90\x01\xea\x01\xaa\x00)\xff=\xffR\x003\xfe\x12\xfd\xc2\xfe#\x01\x0f\x00|\xff\xe7\x00J\xff\xcc\xfa\x9c\xfa\xcf\xff\xd7\xffM\xfc\x90\xfd\xf9\x00@\xfe\xf2\xf9\xbb\xfb\xd8\xfe\xd0\xfeJ\xfc\x89\xfeP\x02(\xffk\xfd\xf7\xfe/\x03\x1b\x02y\xfe\xbe\x008\x04W\x03&\xff\xfa\xff\xb1\x00;\xff\xae\x00f\x05\x01\x06\xdc\x02\x16\x02\xa6\xffA\xfb\x9a\xfd\xab\x06\xad\x08\xca\x02-\xff\xb4\xfeF\xfc\xd7\xfa\xc3\xff\xf3\x04\xa5\x02e\xfd4\xfe\x0e\x01\x80\xff\x90\xfd6\xfd\xbb\xfc\x0b\xfdF\x01g\x04\xda\x00\x08\xfcA\xfd\xfb\xff[\xfeO\x00\x19\x05b\x03\x89\xfb_\xfb\xc6\x02\xb8\x00\xd2\xfc\xb6\xfdL\x04;\x03\xc2\xfcE\xff\xf7\x00\xaa\xff\x05\xff\xe6\x01W\x04\xc6\x00\x81\x01a\x01\xd2\xfe\xf9\xff\xda\x02$\x03d\xfe\x10\xff@\x016\x00\xbb\xfdr\xfc\xad\xff8\x00\x82\x00X\x00\xef\xff\xa6\xfd?\xfd\x8c\x00\xd6\x00x\x01\xfd\x00\xd8\x02\xc3\x02\xe9\xfez\x01]\x03\x81\x01\x0b\x00R\x03l\x06D\x01\x14\xfe\xce\xffV\x00\x19\x00\xd7\x00I\x04\xa2\x01Y\xfc\xcb\xfa\xf1\xfdQ\x02\x85\x00b\xffy\xfe\x07\xff\xa8\xff%\xff\xfb\xfdG\xfdr\xfdU\xfd\xbb\xfe0\x00^\x00L\xfd\xd6\xf9Z\xf9\xf1\xfb|\xfeF\x00n\xff\xa7\xfe\xe5\xfa%\xf8\xeb\xf9\xd1\xfd\x80\x00x\xff\xf0\xfe\x8a\xfd\xa1\xfcS\xf9\x9b\xf8\xcb\xfc\x00\x00\xba\xffr\xfd^\xfdO\xfd\xba\xfb\xe0\xfa\xf4\xfcu\xff\xc9\xfe\x1a\xfdM\xffk\x02\xf7\x00\x1d\xff#\xfe\xa6\xfe\xcf\xfe\xd7\xff2\x04*\x06O\x03\xb9\xffn\x00\xd3\x03\xb1\x06\xdb\x05\xd2\x05\xc5\x054\x06[\x07\xc6\x08\x89\t\xe9\x07"\x06\x8c\x05-\x07t\x07\n\tR\x05\xe4\x00\x08\x01\xc5\x03\x94\x07\xbc\x05w\x05\xb2\x05\x8a\x04\xbb\x04v\x08\xa0\x0f\xf1\x10%\r\x0c\r\xdf\x0f\xf6\x11\x1d\x11H\x11\x9e\x12\x84\x10\xd8\x0c\x17\x0cv\x0e)\x0c\xb0\x06t\x02)\x01\'\x00\x03\xfd\xd7\xfb\xfc\xfa\x1d\xf7L\xf2\xe5\xef\x99\xef\x8b\xee0\xec\xa0\xebP\xeb-\xea\xac\xe9\xf2\xe9\x99\xea\x0f\xea\xf6\xe9H\xebe\xed\xb1\xef\xbf\xf1\xe7\xf2P\xf3]\xf3\x85\xf4\x03\xf6q\xf7\x0f\xf9\xdc\xf9w\xf9\xb7\xf8C\xf8\xa5\xf7\xda\xf6k\xf6\xbd\xf5B\xf5\xe5\xf4u\xf3\xa4\xf2X\xf1\x1a\xf0\x9d\xef\x8c\xef(\xf0\xdf\xf0]\xf1u\xf11\xf2\xd3\xf2\x00\xf4\\\xf6\xf4\xf8"\xfa\x9a\xfb\xda\xfc\xd6\xff5\x02\xa4\x04\xe3\t\x0c\n\x98\t\x10\x0b.\x0e\x14\x12\x8b\x12\x87\x14\x9f\x14D\x12\xc2\x10\xdb\x10\x08\x16\xdd\x1c\xa0%f&\x8e\x1e^\x1b\x1c#U/\x020e)\x89\'\xc4)\xd8-)1(4\xd1/\xa9#\xa7\x19k\x17~\x1aZ\x1a4\x16}\r\xd7\x03\x94\xfc\xce\xf8\xfb\xf5Z\xf1*\xec\xc9\xe6\x18\xe3\xc5\xe2\x05\xe4X\xe3\xb7\xdf\xe0\xdc\r\xdeT\xe0{\xe3\xd7\xe5\x85\xe7\xe8\xe7\x08\xe8\xff\xea,\xf1&\xf6\xd2\xf6}\xf3\x0f\xf1\xc5\xf5L\xfe=\x01\xdd\xffw\xfc]\xf9O\xf9\x93\xf9\xe0\xfd\x97\x009\xfd\x9f\xf5\xdb\xf2)\xf5\x0f\xf73\xf7?\xf4\x19\xf2m\xef\x13\xefO\xf2|\xf5>\xf5\xdd\xf1b\xef\xbb\xef\xb8\xf3%\xf8\x8c\xfa\xeb\xf9\xac\xf6\x12\xf5v\xf50\xf7\xec\xf9\x07\xfb}\xfa\x04\xf9\x94\xf8Z\xf8\xf3\xf8\xbb\xf8Z\xf8d\xf81\xf9\xa1\xfb+\xfdL\xfd\x06\xfcp\xfa\xa2\xfa(\xfd\x88\x00\x99\x033\x037\x02y\x01\xec\x02\\\x06\xa1\x08\x13\n\xc6\x0b1\r\xf4\r\xbf\x0e\xe6\x0e\xe4\rB\x0f\xf0\x16\xc5!>\'\xdb#\xf0\x1f\xec\x1eU"\x9f\'\xfe+R/\xb8.f,\x13+\x06*\n(i#q\x1d\xe7\x18\x15\x16b\x16\xd4\x15\x18\x11l\x08\x8c\xff\xbb\xf9\xc5\xf6\xc3\xf44\xf2\x17\xf0c\xec\x10\xe8b\xe6\xa9\xe6\xb4\xe7\n\xe6\xe9\xe2\xaa\xe1\xef\xe2h\xe6K\xea\xfa\xec\xd6\xed%\xed\xf8\xec\xc3\xefK\xf3\x7f\xf4T\xf44\xf4\x8a\xf6S\xf9\x8c\xfaU\xfa\xc9\xf8\x10\xf7\x87\xf5*\xf6\xdf\xf8\r\xfa\xbc\xf9;\xf8d\xf7\xed\xf6\x01\xf6\x8d\xf53\xf5\xf2\xf5\x08\xf7\xab\xf7\xcf\xf7\x80\xf7s\xf7\x9f\xf7\xea\xf7\r\xf8\xfd\xf8.\xfa;\xfb-\xfc\x87\xfc\xe4\xfcd\xfd\x12\xfe.\xff;\x00\xe7\x00d\x01\xde\x01\xf8\x01q\x02\xe9\x02\x18\x03-\x03\x9e\x02j\x02\xef\x02\xee\x02@\x02\xae\x01\xe2\x01h\x025\x02\xb4\x01\xc3\x01\x16\x02\xce\x01\xf8\x00"\x01\xa5\x01\xdd\x01i\x01\x84\x00\xe0\xff\xbb\xff\xb1\xffR\xff\xad\xff\xea\xff\x91\xff\x85\xff\x15\x00\x88\x01>\x03\xe5\x04O\x06\xae\x07P\t\x01\x0bt\r\xbc\x10}\x13\xd3\x15\xc3\x16\xab\x17\xeb\x18\xec\x19A\x1b\xb8\x1b\xe7\x1bX\x1b\'\x19\xb5\x17u\x16g\x14P\x126\x0f7\x0c\xa5\t\xab\x06+\x04\xa8\x01:\xff=\xfc\xc6\xf9\xcd\xf7\x18\xf6\xe2\xf4B\xf3\xf9\xf1\xdf\xf0\x8f\xef\xd4\xee\x99\xee\xee\xee1\xef\x8d\xeef\xee\xed\xee\x9f\xef~\xf0B\xf1B\xf2\x1d\xf3\'\xf3Q\xf3q\xf4\xb0\xf5\xe2\xf6\xe2\xf7\x9f\xf8Q\xf9\x13\xfa\xbb\xfa\xbb\xfb5\xfc8\xfc\xe8\xfc\xad\xfd\'\xfez\xfeD\xfe\xe7\xfdv\xfd\xcf\xfc\xd6\xfc\x05\xfd\x92\xfc/\xfc\xde\xfb\x1a\xfb\xc4\xfa\xd1\xfa\x84\xfar\xfa\x06\xfa\x8d\xf9\xb1\xf9\x91\xf9M\xf9\x83\xf9g\xf9,\xf9H\xf9G\xf96\xf9F\xf9\xe2\xf8\xd0\xf8+\xf9\x89\xf9\xfe\xf9`\xfa\xa0\xfa\xee\xfa\xfe\xfa\xfa\xfa\x9e\xfb\x94\xfc\x00\xfd\x0e\xfd;\xfd`\xfd\x8e\xfd\x01\xfeG\xfe\xbc\xfe<\xff\x08\x00r\x01\xf2\x02\x01\x05E\x08\x07\x0c\xdd\x0e\x80\x11\xdc\x14\xbc\x18y\x1cL\x1f\n"N%\xc6\'p(\x80(\xe0(\x93(\x80&\xd5#\xec!\xb9\x1f\xb8\x1b"\x17\x8e\x13$\x10\xa3\x0b\xc3\x061\x03\x08\x01\x1a\xfes\xfa\xd9\xf7=\xf6q\xf4l\xf2.\xf1+\xf1\xa9\xf0$\xefm\xee!\xef\xa1\xef\x8a\xef]\xef\x8d\xef\xc7\xefm\xef"\xef\xba\xef\'\xf0\x08\xf0\xd5\xef=\xf0\r\xf1`\xf1\xd3\xf1z\xf2$\xf3\xc6\xf3\x1a\xf4\xf2\xf4\x10\xf6\x00\xf7\xcc\xf7n\xf8\xd3\xf8Q\xf9\xef\xf9\x81\xfa\xfd\xfa5\xfb_\xfb\xb6\xfb\xf3\xfb\xe2\xfb\xce\xfb\xd2\xfb\x8b\xfb\x8a\xfb\xcf\xfb8\xfc\xa2\xfc\xbe\xfc\xe4\xfc\x1c\xfdV\xfd\x80\xfd\xb6\xfd\x14\xfe[\xfe\x80\xfek\xfeR\xfej\xfet\xfey\xfe\x81\xfep\xfe^\xfe,\xfe\x0f\xfeG\xfe\xa2\xfe\xbc\xfe\xcc\xfe\xaf\xfe\x93\xfe\x9d\xfeR\xfe8\xfee\xfe~\xfe\xc9\xfe\x14\xff&\xff\x1b\xff\xe4\xfe\x99\xfe\xfd\xfep\x00A\x03\x83\x06v\x08\x13\n\x87\x0c;\x0f\xe3\x11\x93\x14i\x18W\x1c\x0f\x1e\xbe\x1e< \x0b"\t#\xed!\xaf v |\x1e\x0e\x1bY\x18G\x16\xf6\x13\x83\x0f\xb2\nt\x08\x90\x06\xc8\x02\x11\xff\xd1\xfc\xbb\xfb\xaa\xf9W\xf7\xd8\xf6\x1e\xf7\xd0\xf5H\xf3\x97\xf2s\xf3\xc8\xf3_\xf3A\xf3\x07\xf4\xbf\xf3j\xf2\x1a\xf2i\xf2]\xf2\x97\xf11\xf1\xf7\xf1,\xf2\xd4\xf1\xc0\xf1\x1c\xf2\'\xf2\x0c\xf2\x97\xf2\xa5\xf3G\xf4q\xf4\xd0\xf4\x7f\xf5%\xf6\xb9\xf6\x08\xf7C\xf7}\xf7\x96\xf7\xe5\xf7M\xf8\xa4\xf8\xfa\xf8\xe9\xf8\xb8\xf8\xd9\xf8\'\xf9Z\xf9\x8f\xf9\x16\xfa\xb2\xfa\x13\xfb\x9d\xfb\x87\xfc[\xfd\xb1\xfd\xed\xfd\x87\xfei\xff\x05\x00R\x00\xbd\x00\x12\x01 \x01+\x01\x80\x01\xc1\x01u\x01\xe8\x00l\x00\xc2\x00\x14\x01\x06\x01\x01\x01\xa9\x00\x0b\x00\xa0\xff\xb7\xff\x04\x00\x1d\x00\xb0\xffA\xffA\xff\xea\xfe\xc6\xfe\xe2\xff\xe5\x00\xcc\x00\x8a\x00%\x01\x04\x03\x8a\x05/\x08S\x0b\x01\x0e\x0c\x0fG\x10\x0b\x13|\x16"\x19\xf7\x1a\xaf\x1c\x19\x1e\xe4\x1d\x1e\x1d\x80\x1d\x1f\x1e\xee\x1c\xfb\x19:\x17I\x15\xb3\x12\x86\x0f\xb1\x0c\n\n\x84\x06S\x028\xff\xba\xfd>\xfc\xd8\xf9I\xf7\xd4\xf5\xf4\xf4-\xf4\x99\xf3\xa3\xf3\xc7\xf3\xfc\xf2\x11\xf2k\xf2\xb9\xf3\xdb\xf4\x15\xf5,\xf5\xf2\xf56\xf6\xbb\xf5\x82\xf5\xe5\xf5j\xf6\x02\xf6V\xf5\xce\xf5Q\xf6\xc4\xf5\xaa\xf4J\xf4\xcc\xf4\xdb\xf4r\xf4\xac\xf4\x85\xf5\x7f\xf5\xb6\xf4\xa4\xf4q\xf5"\xf66\xf6\x85\xf6Q\xf7\xf1\xf7F\xf8\xb5\xf8\x90\xf9O\xfa\xb5\xfa\xe5\xfar\xfb1\xfc\xae\xfc4\xfd\xdb\xfd\x9c\xfe.\xffY\xff\x8d\xff\xc4\xff\x06\x00J\x00\x92\x00\x13\x01\x8a\x01\xa2\x01\x93\x01\x80\x01\xa1\x01\xa9\x01p\x01i\x01\x95\x01\xac\x01\xbe\x01\xd0\x01\xfd\x01\xda\x01c\x01\xf9\x00\xde\x00\xf1\x00\xee\x00\xf4\x00\xc9\x00V\x00\xf0\xff\xc5\xff\xc2\xff\xf4\xff\x0b\x00&\x00s\x00p\x00\x9e\x00\xd5\x01\xc3\x03\xc8\x05\xca\x07y\t\x11\x0b1\r\xc7\x0f\xa1\x12\xeb\x14\xf5\x15Y\x17\x0f\x19>\x1a<\x1b\x98\x1b}\x1bU\x1a\t\x18\x95\x16\xba\x156\x14\xa1\x11\x88\x0e\xbe\x0b\'\tt\x06N\x04\x81\x02I\x00|\xfdB\xfbL\xfa\xed\xf97\xf9\x10\xf8\x18\xf7\x81\xf6\xe2\xf5\x9e\xf5\xf3\xf5n\xf6S\xf6\x91\xf5l\xf5\x18\xf6q\xf6\x18\xf6\x86\xf5c\xf5<\xf5\xbd\xf4\x96\xf4\xe3\xf4\xbf\xf4\xec\xf3?\xf3U\xf3\xc6\xf3\xbc\xf3s\xf3u\xf3\x98\xf3\xdd\xf3L\xf4\xf8\xf4O\xf5V\xf5i\xf5\xc7\xf5\xa3\xf6\x81\xf7G\xf8\xd2\xf8\t\xf9Z\xf9\xd8\xf9{\xfa\xfb\xfa\x94\xfb\n\xfc=\xfc\x8f\xfc\t\xfd\x96\xfd\xcc\xfd\xb2\xfd\xcf\xfdI\xfe\xd5\xfec\xff\xe7\xff\x0e\x00<\x00j\x00\xd8\x00\x87\x01\x0e\x02J\x02n\x02\xa1\x02\xf6\x029\x03;\x03L\x03L\x03\xf9\x02\x8f\x02\x90\x02\xc2\x02\x8b\x02\xf8\x01p\x01s\x01M\x01\xa3\x007\x00\xf3\xff\xc8\xff\xc4\xff\xb4\xff\xf0\xff\xd8\xff^\xffq\xff\x05\x00\n\x01Q\x02\x1b\x045\x06i\x07\x15\x08\x9b\t(\x0c\x99\x0e\xc5\x10\xec\x12\xda\x14\xdb\x15)\x16\xdf\x16\x1a\x18\xc5\x18\x7f\x18\xaf\x17\xa2\x16\x18\x15F\x13}\x11\x00\x10U\x0e\xf4\x0b\n\tU\x06E\x04X\x02%\x00.\xfe\xae\xfcX\xfb}\xf9\xda\xf7,\xf7\xac\xf6\x87\xf53\xf4\x94\xf3e\xf3\xbf\xf2\n\xf2 \xf2H\xf2\xb0\xf1\xf1\xf0\xd5\xf09\xf1\x1c\xf1\xc0\xf0#\xf1\xba\xf1\xbe\xf1\xac\xf1@\xf2[\xf3\xeb\xf3\x15\xf4\xcf\xf4\xd5\xf5\\\xf6\xb3\xf6\x95\xf7\xbe\xf8_\xf9\x92\xf9\xf5\xf9\xb4\xfa\x14\xfb2\xfb\xac\xfbO\xfc\x81\xfcq\xfc\x8a\xfc\xe2\xfc\xf6\xfc\xbf\xfc\xd7\xfc6\xfdt\xfd\x87\xfd\xb5\xfd\xf3\xfd0\xfeh\xfe\xdb\xfe\x7f\xff\x08\x00\x8e\x00\r\x01\xb7\x01R\x02\x08\x03\xbe\x03E\x04\xbd\x048\x05\x98\x05\xe4\x05V\x06\xd6\x06\xe7\x06t\x069\x06T\x06\r\x06N\x05\xdc\x04\xb9\x04j\x04\x9b\x03\xae\x02%\x02\x15\x02\xe3\x01+\x01\x00\x00/\xff"\xff\xc0\xfew\xfe:\xfe\x18\xfem\xfe\xa1\xfed\xfe\xb8\xfe\xb8\xff\xb1\x00\xf8\x00!\x01\x8a\x02Y\x04a\x05\xc3\x06\xe6\x08\x93\n\xfa\na\x0b\xf8\x0c\xd2\x0e\xb2\x0f\x01\x10\xcd\x10\x9e\x11Y\x11\xb2\x10\x9e\x10\xc3\x10\x11\x10\x80\x0eM\r\x82\x0cD\x0b\xa4\t\xfc\x07\x8d\x06\xc5\x04\xc2\x02\x00\x01\xb5\xffz\xfe\xe2\xfc4\xfb\xca\xf9\x9b\xf8\x94\xf7\x90\xf6\xc6\xf5!\xf5T\xf4\xc8\xf3|\xf35\xf3&\xf3#\xf38\xf3A\xf3O\xf3\xb2\xf3+\xf4w\xf4\xc8\xf4H\xf5\xec\xf5p\xf6\xbb\xf6\x19\xf7\x99\xf7\x04\xf8j\xf8\x0c\xf9\xc6\xf90\xfaa\xfa\x90\xfa\xdd\xfa!\xfbI\xfb\x82\xfb\xd4\xfb\x16\xfc=\xfcU\xfc\x83\xfc\xa6\xfc\xae\xfc\x9f\xfc\xb6\xfc1\xfd\xb5\xfd-\xfe\x9f\xfe\xde\xfe\xf5\xfe!\xff\x9a\xffr\x007\x01\xc6\x01&\x02s\x02\n\x03\x94\x03\t\x04r\x04\xda\x04\x1f\x052\x05y\x05\xff\x050\x06\xd2\x05a\x05V\x05w\x05!\x05\xb6\x04`\x04\xa2\x03\xd5\x029\x02+\x02\xfa\x01\x16\x01x\x00Q\x00\xe3\xff>\xff\x0b\xffr\xff\x88\xff\x05\xff\xb1\xfe\r\xff{\xff\xb6\xff\x1f\x00L\x00\x9e\x00\xd8\x00\x1b\x01\x14\x02\xae\x02\xec\x02)\x03\x87\x03\xac\x03\x08\x04\xa8\x04`\x05\xe9\x05\xef\x05\xec\x05,\x06\x18\x06\x12\x06\x01\x06\xc2\x05\xc2\x05M\x05\xa4\x04\x06\x04v\x03\x0c\x03t\x02\x9a\x01\x01\x01\xa2\x00\xcc\xffY\xfe\xa0\xfd\xba\xfd\xaa\xfd\xde\xfc\x1c\xfc\r\xfc[\xfbB\xfa\xeb\xf9\x98\xfa#\xfb\xe7\xfa\xcb\xfa\xee\xfa\xe0\xfa\xcb\xfa9\xfb\xe7\xfb\x83\xfc\x88\xfc\xc0\xfc\xaf\xfd\x8a\xfe\xf2\xfe/\xffo\xff\x86\xff\x9c\xffU\xff\xb5\xffK\x00]\x00e\x00\x1a\x00\xf4\xff\xe6\xff\x99\xff\x99\xffy\xff*\xff!\xff\x11\xff%\xff\xe5\xfe\xb6\xfe\x9b\xfef\xfeD\xfeL\xfe\x98\xfeq\xfe\xcf\xfe\xab\xfe\x8e\xfe\xcf\xfe\xdf\xfe\xb7\xfeV\xff\x9c\xffJ\xff\xa3\xffi\xff\x88\xffV\xff4\xff\x95\xff\xa0\xff\x96\xff\xe9\xfe\xf4\xfe\xc3\xfeV\xfe_\xfe_\xfe8\xff\x80\x00\xca\x02\xb8\x012\xfe\x82\xfc\x89\xfd!\xfei\xffr\x00\xc4\x00l\x004\xfe\xa2\xfe[\xff\x17\xff\x81\xffH\x01\x12\x02Z\x02\x84\x02M\x03\xf5\x03\x1c\x04B\x04Z\x04r\x05\xe9\x05\xdf\x05U\x06\xa3\x06*\x06\xf9\x05\x07\x05.\x05\x02\x05\xed\x03\x7f\x03\xef\x02\xfa\x01G\x01\x08\x01(\x00W\x00\x01\xff\xd9\xfd\x92\xfc^\xfc\xe6\xfc<\xfc+\xfc:\xfcV\xfcw\xfb\x13\xfc\x7f\xfc\x80\xfc\xed\xfc\x87\xfd\xb0\xfd\xca\xfd\x86\xfer\xfe\xb3\xfe\xb6\xfe\x0c\xff \xff\xaf\xff\x92\xff\x18\x00Q\x00\xa0\xff\xc0\xff\r\x00\x92\x00\xd8\x00\x05\x01\\\x01\xc9\x01H\x02\xcd\x02\x98\x02\x13\x03S\x03\x08\x04\xc7\x03N\x044\x04t\x03\xd7\x03%\x03\xd6\x02\x13\x03\xfd\x02E\x02\xbb\x018\x01&\x01%\x00l\xffl\xff\x90\xfe\x1b\xffH\xfe\xd8\xffu\xfe\n\xfdd\xfd\x90\xfc\x05\xfd\xfa\xfb\x0c\xfd\x1c\xfe\xc7\xfdG\xfd\x08\xfdn\xfc\\\xfcI\xfb3\xfd\xae\xfd\x8e\xfe\xcd\xfe\x1f\xfe\x86\xfe\x1b\xfdZ\xfe\xf6\xfd\x05\xff\xc0\xfe\x1f\xff\xc7\xff\x81\xff\xb9\xff#\x00\xee\xff\xba\xff\x9b\xfe\xe5\xfe\x02\x00\xd2\xff1\x00\xd9\x00,\x01\xa8\xff\x02\x00Z\x00\xac\x00u\x00\xc1\x00\x8e\x01V\x01\xdf\x00\x03\x02d\x02,\x01\x9d\x01\xd1\x01/\x02^\x01\xee\x01V\x02\xe5\x01\xec\x01\xb4\x00\xa9\x00\xc3\xffM\x00{\x00\xe1\xff\xb5\xff\xba\xfeh\xfe\x0c\xfe\xd9\xfd1\xfe+\xff\xcc\xfd^\xfd\xb4\xfd,\xfe\xac\xfdB\xfeY\xfe\x8f\xfe,\xfe\xe5\xfc\xe2\xfc\x99\xfd\x05\xfe\xa9\xfd\xf4\xfe\x08\xff0\xfe\xb6\xfc\x9f\xfd\xa5\xfd\x91\xfe\x14\xffg\xff\xcd\xff\x83\x00p\x01\xd5\x00\xe2\xff\x90\x00\x94\x03\xc1\x02$\x03\xbf\x03\xba\x04c\x03\x95\x02\x8e\x03h\x05\x0f\x06\xbf\x046\x03\xa4\x04M\x03,\x02b\x01\x85\x03R\x02\x03\x00\xad\x00\x81\x00\x95\xff\x8e\xfd\xd3\xffs\xff\xcd\xfe\xc5\xfd\x1b\xff\xa9\xfdS\xfem\xfdE\xfeK\xfe2\xfd\r\x00E\xfdy\xff(\xfe@\xfe&\xfdO\xfc.\xfeh\xfe\xe8\xfd\x92\xfe:\xff\xad\xfdK\xfd#\xfe\x83\xfe\x10\xff\x94\x00L\xff\x04\xff\xc8\xff!\xff?\x00\xe7\x00\xe0\x02\xac\x01\x7f\x00\xf5\xff\x00\x01\x9b\x01>\x01\x8d\x02\xdb\x01\xff\x01?\x01p\x01\xb8\x00\xc1\x01\xe4\xff_\x00\xa2\x00\xe8\x01\xfb\x01V\x00/\x02\xe0\xff\t\x006\xff\x83\xff\xd6\xff\xa3\x00\xa4\x00u\x01\x83\x00\xf5\xfe\xc9\xff\x9f\xfe\xe7\xff\x1f\xffW\x00`\x01\xf5\xff\x06\xff\xdc\xfeW\xffh\x00\xf0\x00\x83\xff\xe5\xff^\xfe\x96\xffG\x00\xee\xfe9\xff0\xfee\xffV\xfec\xfe\x9f\xfe|\xfe\xa0\xfe[\xfd\x1b\xfd<\xfeO\xfd\xea\xfe\'\xff\x97\xff\xd3\xffX\xfeO\x01/\x01\xf4\x00\xe6\xff\x9e\x02\xb1\x02\xda\x02\x0b\x05\xc4\x03\x91\x04\xa8\x02G\x02\xc1\x03\x16\x04\xf1\x01a\x03\xff\x01\xdc\x01_\x01\xa2\x01G\xff\xdc\xff\xe8\x00\xca\xfd\xeb\xff\x98\xfd\xf0\xfe\xfd\xfc\x1a\x01\xb0\xfc\x8f\xfd<\xfd\x00\xfdv\xff\x81\xfe\xfd\x00\xe1\xf9\xaf\x008\xff\x89\x00J\x00#\xff\xac\xff\xe0\xfe\xfe\xfe\xba\x01y\x01\xec\xfe\x19\x00\xc5\xff\xc0\x02\xf5\x01\x18\x00\x0c\xfe\xd5\xff\xb4\xfds\x01#\x02\xc4\x00\xd5\xff(\xfe\xf0\xfe2\xff\xf2\xfe\x8f\x005\x00 \xfe\x9a\x01.\xffr\x00\xc1\x00\x93\xff\x89\xff"\x00\x17\x02R\x00\x94\xff\xc9\x01\xa7\xff\xd5\xfe(\x00\xa5\xfd\xdb\xffe\x00\x12\xff\xe8\x01\x9a\xff\x84\xfe}\xff4\xfen\xfd\xed\xfcY\x00#\x03\x1a\x00\x9d\xfe$\x00I\xfc%\xff\n\x00\x7f\x00\xca\xfe\xfa\xff\xd3\x01\xbb\xfd\xbc\xfeO\xff\xe0\x00$\xfe\xd2\xff\xb0\x01\xf6\xfb\n\x01U\x00e\x00\xd3\xfdn\xfdX\x01\x0f\xff\xdf\x00\x06\xff\xad\x02\xaa\xfc\x86\x01`\x01\xba\x01\xdd\xfe\xec\x00\n\x05\xbd\x00\xfd\x01\xb3\xfe\x96\x03`\x02?\x01+\x00\xbe\x02\xbb\x00\x12\xff\xeb\xfec\x02\xf6\x025\xfc8\xfc\x19\x01\xee\xfda\x00L\xfd\x0b\x00Z\xfe1\xfbO\x01\xe8\xfbq\xff\x9f\xfcr\xfe\xb0\xfe\xd7\x00\x7f\x00\xa9\xffJ\xfe\x96\xfdt\xfd\xf3\xfd\x9e\x02)\x01\xc1\xff$\x01\x1d\x01\xbb\xff\x12\xff\xa9\x00G\x03~\x01\xda\xff\xcc\xfc\xcb\x02\x91\x02\x86\x06\x98\x01\xdc\x00\xf5\x00\x89\xfc\x7f\x03\xd7\x00\x08\x03\xdf\x01\x85\x00$\x04F\x00\xba\xfc\x87\x01t\x02B\xff\t\xfff\xfe9\x00\xc0\xfeH\x00:\x02\x01\x02\x91\xfeR\xf9\x99\xff\r\xfe\xee\x00\x17\x00\xc1\xfe\x9b\x00^\xfd\xef\x02\x87\x00\x08\xfe\xb2\x00\xd2\xfd\xba\xfe\xb8\xfe\x94\xff\x04\x02\xf8\xfe\xab\xffA\xff\xac\x03\xb5\xfe\x1d\xfd\n\xfb\xf1\xfb@\x02\xeb\x003\xff\x0b\xff@\x00\xce\xfe\xc6\xfe\x94\xfe\xa3\x04\xe5\xfeT\xfd\x1d\x02e\xfb1\x00\xc4\x04\xbc\x02|\x02 \xff2\x01\xb3\x00)\xff\xb1\xfc\x9d\x00\x94\x04\xf3\x04\\\t\x81\xfb\xb1\xfc\x8e\x01\x1f\x00\xe1\x02\xf6\xfe\x13\x04R\x00\xcb\x00M\x02\xeb\x01h\xfc\x86\xfc\xbf\xff\xae\xfbb\x03>\x01\xa8\xff\x12\xfe\x88\xfc[\xfc\x13\x01\x01\xff\x02\xfb\xb3\x03\x83\x00C\xff\r\x00R\xffn\xfe\xd5\xff\x0b\xfe\x92\xfe\xb3\xfe|\xfd\x9d\xffi\x02s\x00\x17\x02\x04\xfd\x1e\xf8\xed\xfbV\xff\xe1\x03\x96\x04\x95\x04\n\x01\xb8\xfb\xdb\xfc:\xfe@\x00<\x04?\x04r\x01\x85\x00\x8d\x03\x9b\xfd\xd3\x02\xdc\x05"\x00\xf8\x00\x1c\xfe\xc2\x00\xeb\x02\xed\x01\xbe\x02k\x03e\xff\xc1\xfa\x07\xffi\x01h\x00\xc1\xff\xfc\xff~\xfe]\x00\n\x00\xd2\x04\x87\xfbt\xf8\xf6\x02\xc5\xfe!\xfc*\x03\x84\t\xe1\xfe\xc3\xfa*\xfa)\xfc\xb4\xfdw\x02\x14\x05z\x02\xb1\xfe\xfd\x02\x91\xfe\xb8\xf7\xb4\xfe\xb4\xfbg\xfe\xf0\xfe\x1d\x04V\x04\x0e\x02\x1c\x02l\x00\xd1\xf1o\xf0w\xfd\xc3\x07\xd1\x0cI\x08q\x02\xb4\xf7\xb4\xf8\x1c\xf6Z\x01\xd3\x04\xba\x03\xf5\x02h\x03\xed\x00y\xfb\xee\x03I\xfe\x9e\x01\x0e\xff/\xf9z\xff\xef\x04\xb8\n.\x05\x8b\xfd\xb4\xfc\x06\xfb\xfe\xf6(\x00\xa4\t\xce\tn\x02\xb3\xf5c\xfau\xfb\x18\x04\xf6\tA\x03\x1c\xfc\xfc\xf6;\xf9}\xfbS\x06G\t\x15\x03\xd2\xf7i\xf87\x01\xc1\x03\x08\xfd\x14\x00\xbf\xfe\xca\xfa\xd6\xfe\xd4\xf8\xf6\x04\x17\x0f\x88\x02\xbd\xf7\xd1\xfb\x92\xfb\xd3\xfd"\x05\x11\x03\xad\x03\x8b\x02\xde\xff\xab\xfb\xe0\xfem\x05n\x04W\x00\xcc\xfd\x1b\xff/\xff\x7f\xfd\xcb\x03&\x06H\x02\xeb\x00\x14\xfe+\x02\x7f\xff\x8b\xfaJ\x01\xbc\xffr\xfeO\x04\xab\x04\xec\x05.\x00\xbf\xf9@\x00\xd0\xf5g\xf8{\x06/\x0c\x9a\x04\xe9\xf8\xdc\xf9\x91\xfc"\xff \xfc\xf9\x064\x05W\xfb\xfe\xfd \x01<\x03\x11\xfd\xd6\xfc\xaa\xff\xe9\xfcW\x05\x87\x04Y\xff\xe3\xff\xe9\xfdl\xfb\x15\xfb\x8d\x00!\xffK\x01\xe0\x01\x84\x03v\x03\xd6\xfaX\xfb\xdb\x003\x06\xe9\xfd\xf5\xf62\x06,\x05\xa7\xff\xf8\x05{\x06\x1b\xfa\xf7\xf4\x16\xfd\x9e\x03\xa9\x05@\x01\xb3\x04\xfd\x03w\xf9\x9e\xfc\x8c\x01:\xfd&\x00y\x06\xc3\x00\xc5\xf5:\xff\x82\x054\x00\xee\x06\xb9\x05\xb2\xfd,\xf5\xdf\xf65\x01m\x02\xfd\x02\xbe\x06\xce\x06\x1e\x03\xc4\xff\x1b\xfcz\xf8\xb3\xf7[\xff\xc1\x05\xa1\x05o\x08\xc6\t\xa0\xfd-\xf3\x01\xf5\x13\xfa\xd0\xf9\xab\x01\xf2\x0cc\x0cs\tq\xfa&\xf3\xe3\xf3\xbf\xfan\x00_\x02\xf6\t\xa1\n\xf9\x050\xfc\xae\xfc\xce\xf9~\xf8\xfa\xfa\xa7\xfc|\x05w\x0b\x14\r\xaf\x06\xc0\xfd\x89\xf6H\xee\xec\xf5^\x03c\x08\x12\x10\x11\rE\xf9\x00\xec\xfd\xf1S\x00\r\x07\x8d\x06-\x07\x12\x00\xc8\xfa\xf6\xfb\xa9\xfd\xfa\xfc\xca\xff\xe1\xfe\x0e\x01\xfb\t\xc7\x06m\x02\xf1\xfb\x14\xf2\xd7\xf5\xc1\x00$\x07\xd9\x0b\x8d\n\x9b\x01{\xf8\xe2\xf7\x12\xf8\xb5\xfa\xd0\x039\x0c\x05\x0b\xff\xfbk\xf9\xd4\xfb\x12\xfb\xc4\xfd\r\x07\x1c\x05\x86\xfc\x05\xfe\x16\x00\xe5\x00\xcf\x02~\x00\xd5\xfam\xfc\xee\xfd\x87\x05\x14\t\xe6\x014\xfc\xe3\xf7\xc7\xf9\xb7\xff7\x06\xa3\nz\x03\xf4\xfa\xfd\xf9n\xfbN\xfd\x8b\x02\xca\x06\xc9\x06\xdf\x00\x87\xf8v\xf8"\xfe\xf9\x04\xd8\x02X\xffy\xfe\xe5\xffo\xff\x86\xfd\xd4\xfe\x13\x00\xf4\xfe\xb4\xfb\x14\xfe\xc6\x02\xb3\x04\x1d\x02*\xfd\'\xfcR\xfd\x8f\xfb\xb3\xfeL\x02\xd0\x02\xa4\x04\xd9\x01\xf5\xfdU\xfc\x05\xfc\x8c\xfdu\xffu\x03n\x03\xa1\x01\x0c\x01\x83\x00\xd1\xff\x1d\xfe\x96\xfek\xfd\xd9\xfe+\x02{\x04y\x02+\xff\xcf\xfd\x01\xfe\xd6\xfes\xffD\x01\xb7\xff\xc2\xfc\'\xfb\xbf\xfb\xb6\xfe\xda\x02\x99\x01\x11\xfd\xd9\xfa\x90\xfb\xa9\xfa\xe1\xfb.\x00\xfd\xff\x11\x00\x03\xff\xf5\xfe\xa1\xfe)\xfd\x84\xfd\xbd\xfdk\x00\xb1\x03\xd0\x04I\x04\xd0\x03\xe0\x03\xe2\x02\xab\x01\xe4\x04[\n\x02\n\x0f\t\x8a\x08~\x06\xfd\x06u\x08\xa6\t\xd0\x0b\x18\rQ\tB\x06?\x04\xf8\x04\xc0\tE\x0c#\n\t\x05\xee\x02!\x01^\x00\xad\x01\x1b\x03\xf7\x03\xb3\x01Q\xfe~\xfc\xc4\xfbB\xf9}\xf8\x9c\xf8D\xf9|\xfa\x87\xf9\xf3\xf7\x12\xf6K\xf3`\xf0\xbc\xef\xcf\xf2\xab\xf6q\xf7\x16\xf6\x99\xf3\xfb\xf0\x95\xedJ\xec\xa1\xf0\xe0\xf6.\xf9\xbb\xf6\xec\xf2\x03\xf0\xf0\xeeT\xf0\xc9\xf2\x8b\xf4\x99\xf6\x14\xf6C\xf3@\xf2o\xf2+\xf3\xb2\xf2\xdf\xf0\xc0\xf3l\xf7-\xf74\xf5\xa7\xf4\xd3\xf5\xf1\xf4\xb2\xf6\x06\xfa#\xfb\xe5\xfc\x04\xfd\x8c\xfdn\x00s\x03\xc4\x06\n\x08b\x07Q\t(\x11\xc0\x1f\xb6-\x9c2Q*\xac"\x84&\x19/6<\xe7H\x93P\xe6Lc>02\x111\xe85\xa15\xc22i/U)8\x1fn\x12\xb3\x08r\x00p\xf63\xee\x97\xebY\xedI\xeb;\xe2\xb9\xd3\x93\xc8l\xc56\xc6l\xcb\xb8\xd2\xe6\xd7\xea\xd5\x81\xcf@\xcdt\xd2\xbc\xda\xa4\xe01\xe8\x83\xee\x90\xf4\x95\xf8\x18\xf9\x85\xfe\xdc\x03\x11\x05\xa2\x04@\t\xed\x10\xbd\x15\xe2\x153\x13\xe3\x0f[\t\xf1\x05~\x07B\n\xf3\n\xcf\x05\xfa\xfd\xca\xf6\x08\xf1\xf6\xee5\xee\xc6\xecY\xeb\xde\xe71\xe4\xd9\xe1\xe3\xe1\x9a\xe3\x18\xe3\xa0\xe2\xb0\xe3y\xe5<\xe8\t\xeaK\xed\x15\xee\xa7\xed\xa2\xefR\xf3\xd8\xf8$\xfb\x99\xfc+\xfe&\xfe\xe0\xfd/\xfe;\x02\x1e\x06\x04\x06\x86\x04\'\x02\xd1\x04\xaa\x04\xf0\x02{\x01%\x00\xa2\x00\x9c\xfd\x93\x02\xd5\x07w\n\x86\x03\x87\x01\xad\x10\xcf\x1e\x82#"!l(|/o.\xe1,\x8b3\xa7C\xafG\x9bA\xa2?\xc7A\xb9?\xe83P-\x95-5-j\'\xaf\x1eh\x1a\xf8\x11\xbc\x04\xee\xf7&\xef\xc7\xec\x8c\xe9\xcf\xe6\x16\xe4\xf6\xdd\xe3\xd3B\xc9X\xc7\xcf\xcc\x94\xd3\\\xd6\xfd\xd5g\xd6\x0e\xd6%\xd5.\xd9\xca\xe1\x11\xe9\x11\xee#\xf1\xf1\xf3\xf5\xf7\xde\xf9\xcc\xfa\xaf\xfd\xb4\x01\xf5\x07\xa9\x0b\xad\x0c[\x0ba\x07\xbc\x02\x03\x01:\x05\x03\x0b\x08\x0c/\x06\xa9\xff\xbe\xf9N\xf5\xdf\xf4{\xf8\x8a\xfb\x1d\xf9\x90\xf3T\xf0\xdf\xedN\xec_\xedA\xf0+\xf3\xbd\xf2\xfe\xf1\x11\xf2x\xf2\x94\xf1 \xf1\xf2\xf3\x84\xf8\xe5\xfb\x05\xfd*\xfc\xca\xfa\xe1\xf8\xe1\xf7>\xfb\xd6\x00\xb8\x04@\x04K\xff\xfd\xfb\xcf\xfb\xa6\xfd\xbe\xff\x1a\x02\xbc\x01\x13\xffU\xfb\xf5\xf8\x89\xfa@\xf9\xb3\xf7$\xf8\xb7\xf9\xc5\xf94\xf7j\xf4\xcc\xf0\xf9\xef\xf2\xfaK\x12,$\x1b\x1d\xff\x0e\xc6\x0b%\x16d\'\xcf6NJ\x16R\xf2G!5\x13/y;AHDL\x98HfB76\x80%\x94\x1c\x18\x1c\xfc\x1ay\x13\xc0\x08X\x03\x9a\xfc\xd8\xee9\xe0d\xd8[\xd6g\xd5\x9a\xd4a\xd5\n\xd4\xd4\xca(\xc0\x91\xbej\xc7\xea\xd2Y\xd9\xa5\xdb\x05\xdd\x8c\xdb \xda\x9e\xdf\x0c\xeb\x05\xf8\x10\xfe\xf1\xfc{\xfc\x84\xfeK\x01\x0f\x04%\n\xc3\x0f\xff\x10\x87\x0ca\t#\x0b\x82\x0b\xed\x08\x14\x08\x8e\t\x15\t\xb6\x05z\x02\xc9\x00#\xfd\x9a\xf8Q\xf7\xc4\xfa\x11\xfcM\xfa\xfe\xf5\xe3\xf1I\xee\xc8\xedZ\xf2\xce\xf6j\xf9d\xf6,\xf2\xbf\xf0\xce\xf2\xb1\xf7B\xfc_\xff\x7f\xff(\xfec\xfe>\xff0\x02C\x04,\x05\xba\x050\x05\x10\x05\x8d\x04]\x04\x0f\x03\x1d\x02S\x01\xec\x00\x16\x00U\xfc\x14\xfa\x8a\xf8)\xf8\xa5\xf7\'\xf6\xe5\xf4\xa7\xf2\xa9\xf0\xff\xec|\xeb\xa9\xeb\xf0\xec\x13\xf3p\xf8\x9e\xfcP\xfai\xf4m\xf7(\x03!\x13i\x1f\xdc#\n!\xd5\x1a\x81\x1a\x0c&\xf79\x02E\xe1D\xe7<\xff2\x17/81[9\xe4=19l-\xc2"\xf4\x1bu\x17W\x16b\x13\x90\x0b\x97\x00E\xf6\x11\xf2`\xef\x9f\xeb\xfa\xe5\x14\xdf\x85\xd9\xa7\xd6:\xd7\xed\xd7D\xd8\xe7\xd55\xd3\x1d\xd33\xd6\x94\xdc\xb9\xe0\xc2\xe15\xe2\xcf\xe2T\xe5\x01\xe9\xa9\xef\x0b\xf6\x07\xf8\n\xf6\x14\xf5\xf7\xf8W\xfd\xb6\xff|\x02]\x04\xd8\x03-\x01\x8e\x01n\x05\xe3\x06\xbb\x05\xe6\x04H\x05\xd7\x03\x0f\x022\x02\xea\x03A\x021\xff]\xfeS\xff%\xff\\\xfdN\xfc\xdd\xfb\xc6\xf9o\xf8\x8c\xfav\xfc\xe4\xfc\xff\xf9\x80\xf8u\xf8\x1d\xfa\x7f\xfcM\xff\x1a\x00(\xfen\xfc^\xfca\xfe\\\x01K\x03~\x033\x02r\xff\xbd\xfe\xeb\xfe\xfd\xffq\x00\x9a\xffJ\xfe\x90\xfc\xd8\xfar\xf9\xd4\xf8\xe2\xf7\xd9\xf6\xf9\xf4\xdc\xf4\xc4\xf4(\xf3\xc0\xf1k\xf1Z\xf3\xe0\xf4\xbd\xf6t\xf8.\xf7\xef\xf3\xc1\xf6\x0b\x02m\x0e\xc3\x13\x99\x11\xd4\x0e\x1a\r\x8c\x10\x9b\x1d\xe3.\xb48\xa23o)_$R\'~0\xe09\xa3?q9c+& +\x1e\x7f#c&\xae$\xbb\x1c\xba\x10\x9d\x05\xec\xff\xe2\x00K\x01\xdf\xfd\x9a\xf7F\xf1\xa9\xeb\x87\xe6E\xe4<\xe5\x0e\xe6-\xe4"\xe1\xab\xdf\x82\xdf/\xdeG\xddZ\xden\xe1d\xe3a\xe3h\xe4\t\xe5\xd0\xe4J\xe4r\xe6X\xeb2\xee\x15\xef\xfe\xee\x91\xef\x86\xf0B\xf1E\xf4e\xf7\xd1\xf9r\xfa\x93\xfa\x0c\xfco\xfd\xbd\xfen\x00x\x02D\x047\x04\x9d\x03\xdc\x04\xbb\x06\xbd\x08\x87\x08J\x081\x08\xbd\x07\xf0\x07P\x08\x11\n]\n\x00\t\xfd\x06\x93\x06\xa5\x07\x8a\x08\xdd\x07\xfa\x06d\x06\x19\x06\x8f\x05\xed\x05\xf5\x06:\x06\t\x04\xab\x02\n\x038\x04/\x04s\x03s\x02g\xffu\xfc\xcf\xfb0\xfd\xcd\xfd\xf5\xfbr\xf96\xf73\xf5\xce\xf3\x1f\xf4\xe4\xf4\xeb\xf3\xcf\xf1]\xf0u\xf0\xca\xf0\x07\xf1o\xf1\xfb\xf1\xf1\xf1\x15\xf2\xd5\xf3!\xf6c\xf7|\xf7J\xf8\xaa\xfa\xfb\xfd4\x01\xb2\x03)\x05\xd3\x05P\x07\xd1\n\xaa\x0f\xda\x13d\x16k\x17\x0e\x18l\x19X\x1c9 =#\x95#}"\xf9 \xa9 }!N"\x1f"\xd7\x1f1\x1c>\x18\xa6\x15F\x14\xde\x12k\x10\x8a\x0c\x04\x08\x91\x03I\x00P\xfe\xa6\xfc*\xfa\xdb\xf6l\xf3\xc6\xf0*\xef\x08\xee\xfe\xec\xaf\xeb\x1e\xea\xf0\xe8\\\xe8\x84\xe8\xb1\xe8\x8b\xe8\xfc\xe7\xca\xe7\x12\xe8\xce\xe8\xe0\xe9\x90\xea\xd1\xea"\xeb\xbe\xeb\xcc\xec\x16\xee_\xef\xa4\xf0\xa5\xf1\x90\xf2\xf3\xf3\xa2\xf5O\xf7\xad\xf8\xc9\xf9\xef\xfaL\xfc\xc2\xfdN\xff\xa8\x00\xab\x01c\x02\x15\x03\xff\x03\x08\x05\xe6\x05o\x06\xc0\x06\xf7\x06/\x07|\x07\xed\x079\x08.\x08\xd9\x07\x91\x07\x87\x07\x87\x07\x9b\x07\x97\x07Z\x07\xc9\x06\x08\x06\x9a\x05o\x053\x05\xb4\x04\xe6\x03\xfd\x02\x07\x02\x0f\x01Z\x00\xb3\xff\xcb\xfe\xb7\xfd\x7f\xfcr\xfbs\xfa}\xf9\xa5\xf8\xeb\xf7)\xf7K\xf6\xae\xf5j\xf5J\xf55\xf5\x00\xf5\xe2\xf4\xe6\xf4$\xf5\xde\xf5\xf3\xf6\x00\xf8\xe4\xf8\x91\xf9F\xfa/\xfb\x84\xfc$\xfe\xb0\xff(\x01i\x02p\x03e\x04\x8a\x05\xe9\x06\x1f\x08\xff\x08\xac\tN\n\xc1\nY\x0bG\x0cF\r\xe1\r\xf4\r\xc1\r\xe2\rh\x0e\x14\x0f\xd2\x0f,\x10\x11\x10\xa5\x0fM\x0fb\x0f\x88\x0fa\x0f\xff\x0eL\x0eJ\r[\x0c\x8b\x0b\x03\x0bD\n\x02\t\x8b\x07^\x06U\x05Z\x04m\x03c\x02&\x01\xec\xff\x0b\xff\x7f\xfe\xf4\xfd&\xfdY\xfc\x94\xfb\xf3\xfa\x83\xfai\xfa;\xfa\xae\xf9\xe3\xf8\x1d\xf8\xdb\xf7\xa8\xf7\x81\xf71\xf7\xa5\xf6\xce\xf5\t\xf5\xae\xf4\xa5\xf4\xb6\xf4\x9c\xf42\xf4\x9c\xf3\x1f\xf3\xf2\xf22\xf3\xa3\xf3\xf4\xf3\x19\xf4\x0f\xf4\x13\xf4g\xf4\x01\xf5\xe0\xf5\xb4\xf6V\xf7\xf8\xf7\x93\xf8d\xf9N\xfaX\xfbd\xfc]\xfd5\xfe\x0c\xff\xf1\xff\xd7\x00\xaf\x01r\x026\x03\xe7\x03\x80\x04\xe0\x04/\x05}\x05\xb6\x05\xef\x05\x05\x06\xfc\x05\xd1\x05\x8a\x05=\x05\xf1\x04\xac\x04[\x04\xf1\x03}\x03\xff\x02\x87\x02 \x02\xbc\x01h\x01#\x01\xd3\x00j\x00\x00\x00\xbf\xff\x99\xffj\xffC\xff\x17\xff\xcf\xfej\xfe\x15\xfe\xf4\xfd\xd9\xfd\xb9\xfd\x88\xfdF\xfd\xdd\xfcr\xfc5\xfc7\xfc8\xfc\x12\xfc\xbb\xfb<\xfb\xd9\xfa\x9b\xfa\x9c\xfa\xae\xfa\xae\xfa\x94\xfa}\xfa\x87\xfa\xb9\xfa\x1e\xfb\x94\xfb\x1a\xfc\xa7\xfcN\xfd1\xfeS\xff\xb2\x00"\x02\x91\x03\xf0\x047\x06\xa7\x07_\tM\x0b\x1c\r\xb5\x0e!\x10j\x11\x8b\x12\x8a\x13\xb1\x14\xdf\x15\xaa\x16\r\x17\x07\x17\xc0\x16G\x16\xd2\x15T\x15\xa3\x14\x8f\x133\x12\x8b\x10\xbc\x0e\x0f\rZ\x0b\x83\t\x8e\x07r\x05O\x039\x016\xff9\xfd8\xfbA\xf9c\xf7\xb0\xf5(\xf4\xa2\xf2.\xf1\xcb\xef\x97\xee\x99\xed\xc2\xec$\xec\xa0\xeb\'\xeb\xbe\xea\x85\xea\x7f\xea\xa2\xea\xe1\xea:\xeb\xb8\xeb>\xec\xc7\xec\x94\xed\x98\xee\x81\xefY\xf08\xf1U\xf2\x8e\xf3\xcc\xf4\x13\xf6R\xf7\x85\xf8\xbb\xf9)\xfb\xbd\xfcC\xfe\xa0\xff\xd1\x00\x0c\x02[\x03\xae\x04\x02\x062\x07/\x08\xf3\x08\xa9\tr\nH\x0b\xf3\x0bG\x0ck\x0cy\x0cv\x0c_\x0c.\x0c\xd3\x0bC\x0b\x80\n\x9e\t\xcf\x08\x00\x08\x11\x07\x0c\x06\xde\x04\xb3\x03\x80\x02H\x01+\x00\x13\xff\xfa\xfd\xdf\xfc\xce\xfb\xd2\xfa\xe8\xf9\x10\xf9J\xf8\xa3\xf7\r\xf7\x84\xf6\n\xf6\xa9\xf5h\xf5:\xf5:\xf5`\xf5\x97\xf5\xdc\xf5,\xf6\x8d\xf60\xf7\xed\xf7\xde\xf8\xc7\xf9\x94\xfak\xfbQ\xfcr\xfd\xb4\xfe\x03\x007\x01F\x02)\x03\r\x04\x1c\x05^\x06\x91\x07l\x08\x12\t\xaa\t8\n\xee\n\xc6\x0b\x80\x0c\x07\ra\r\xb3\r\xff\r*\x0eD\x0e\x82\x0e\xcd\x0e\xfe\x0e%\x0f\x14\x0f\xdd\x0e\x95\x0e{\x0em\x0eX\x0e\x1f\x0e\xa6\r\xda\x0c\xff\x0bi\x0b\xe6\nQ\nz\t^\x08\x06\x07\xa5\x05m\x04f\x03e\x02\x14\x01\x91\xff\t\xfe\x91\xfc5\xfb\x03\xfa\xff\xf8\xf2\xf7\xbe\xf6t\xf5b\xf4\x8a\xf3\xdd\xf2Y\xf2\xde\xf1e\xf1\xf2\xf0\x9f\xf0\x87\xf0\xa3\xf0\xc5\xf0\xee\xf0\x1b\xf1i\xf1\xe3\xf1c\xf2\xef\xf2\x88\xf3(\xf4\xd0\xf4\x94\xf5f\xf65\xf7\x0f\xf8\xe1\xf8\xb3\xf9\x92\xfau\xfbg\xfcN\xfd$\xfe\xec\xfe\xb4\xff\x87\x00V\x01\t\x02\x88\x02\xeb\x02H\x03\xbd\x03&\x04\x84\x04\xb8\x04\xbc\x04\xa2\x04y\x04x\x04\x87\x04\x7f\x04Q\x04\xfb\x03\x9a\x03:\x03\xf5\x02\xc5\x02\xa0\x02S\x02\xd1\x01U\x01\xff\x00\xd1\x00\x9e\x00u\x00/\x00\xd2\xffi\xff1\xff6\xff2\xff\x18\xff\xcd\xfe\x8e\xfeu\xfep\xfe\x97\xfe\xb6\xfe\x9a\xfeo\xfeP\xfec\xfe\x89\xfe\x94\xfe\x8b\xfem\xfeL\xfeM\xfeg\xfe\x7f\xfes\xfe8\xfe\x15\xfe\t\xfe\x02\xfe\xf5\xfd\xef\xfd\xd2\xfd\xb8\xfd\x8b\xfd\x94\xfd\xd6\xfd\xf3\xfd\xf9\xfd\xe1\xfd\xe9\xfd1\xfe\xad\xfe"\xffr\xff\xc1\xff\'\x00\xbc\x00\x8a\x01\x84\x02u\x03B\x04\xf1\x04\xd0\x05\xf3\x06&\x088\th\n\x98\x0b\x99\x0cO\r\xee\r\xc5\x0e\x96\x0f&\x10m\x10\x8f\x10O\x10\xcb\x0fb\x0f\x1b\x0f\xb0\x0e\xc1\r1\x0c\x8b\n!\t\xfb\x07\xdf\x06h\x05\xa5\x03\x90\x01s\xff\xa2\xfdN\xfc,\xfb\xf2\xf99\xf85\xf6\x88\xf4\xc1\xf3{\xf3\xe7\xf2\xe0\xf1\xc8\xf04\xf0=\xf0\x9a\xf0\t\xf1%\xf1\xff\xf0\xfa\xf0s\xf1{\xf2\xc8\xf3\xa5\xf4\xd8\xf4\x17\xf5\xe3\xf5W\xf7q\xf8#\xf9\x90\xf93\xfa\xe6\xfa\x89\xfb\xb1\xfc\xd1\xfd3\xfe\xd6\xfd\xf1\xfd>\xff\x84\x00\xb5\x00\x7f\x00\x8c\x00\xea\x00B\x01\x12\x02z\x02\xed\x02\xb3\x03\x86\x03\x88\x02\xc1\x02\x12\x04w\x02\x88\x01\x17\x08\xa4\x0f\x9f\np\xfc\\\xfa\xe4\x05l\x0el\r\x15\t\x95\x030\xfb\x1b\xfa1\x07\x9c\x0fO\x08\x03\xfe!\xfa\x98\xfb\xf6\xfe\xa9\x02\xb7\x01\xa0\xfb\xb1\xf6\x91\xf7\x13\xf9\xe8\xf7\x9e\xf8\x05\xfcA\xf9\xa9\xf0?\xee<\xf6\x90\xfd.\xfc\xf8\xf6e\xf42\xf5\xfa\xf6V\xfbJ\x01\xe7\x01\x7f\xfcu\xf8\x0c\xfb\xb0\x01\xc2\x05m\x05\xeb\x02m\x00b\x01\x11\x05#\x08\xa2\t\x8e\tt\x06\xdc\x03\xb1\x064\x0c\xb1\r*\n\x85\x08\xfb\n\x7f\x0c\x1c\x0cV\x0c\x0f\r\xa5\x0c\xb3\x0b\x9e\x0c\x02\x0e\xa8\rf\x0c=\x0b\xdb\n[\nm\nb\x0b:\x0cM\n|\x06\x1d\x05\x19\x07\x9f\t\xb2\x07\xc1\x04\x02\x04\x8f\x03\xbd\x01\xb6\x01\xb8\x03\x96\x01\xc8\xfb\xea\xf9\xd4\xfd\x02\x00\x9b\xfc?\xf8\xce\xf6\x1a\xf6\'\xf5\xeb\xf6\xac\xf9\xda\xf8\x87\xf3\x84\xee\x14\xefk\xf3\xc2\xf6\x02\xf7\xd5\xf4\\\xf1\xe8\xeeW\xf0\xf5\xf5\x82\xfa\xeb\xf9S\xf70\xf6B\xf6\x1b\xf8\xf7\xfc\x9f\x01\x87\x01\xbb\xfd2\xfd\xd8\x00t\x03\x91\x04#\x06e\x07!\x06!\x05\x9b\x07\xa0\t\x16\x08\xd0\x068\x07\x02\x08S\x07\xdc\x06\xd1\x05\x91\x01\xab\xfd\x0c\xff\xf8\x01\xb6\xff\x85\xfb\xf3\xf8D\xf5\xc6\xf0|\xf1\xaa\xf6\x03\xf7\x83\xf0\x02\xebk\xe9O\xeb"\xeew\xf0\x89\xf0\xf3\xed\xcd\xeb\xfc\xeb\xdb\xed\'\xf2\'\xf6c\xf6\x1d\xf4\xf9\xf2\x05\xf5g\xf8\x19\xfcT\xfe\xcc\xfel\xfd\t\xfc\x9c\xfe\xf2\x01\xb3\x03]\x05\xe7\x07\xd2\x08i\x05J\x01@\x02\xc2\x08\xa7\x0e\xb8\x0fh\x0b\x13\x06>\x04^\x06\x7f\t\x8a\x0f*\x14\x9c\x112\x0bk\t\x87\r\xe6\x0f\x98\x14\x12 \xd3,\xe6(\xa4\x18\x7f\x0f?\x19\x9c,\xe26.8\xd42}(\xce\x1b\x9e\x18o%K2\x890f!\x8d\x13q\x0cc\x08\xed\x08W\n\xae\x07\x1e\xfe\xee\xf1\xdb\xe8\xb8\xe3\xd1\xe1\xa8\xdfo\xdcT\xd8\xe8\xd5=\xd4\xa2\xce\x06\xc9D\xc8\xbc\xcd\x93\xd3\xd4\xd6\x03\xd8C\xd7\xcf\xd4\x87\xd4\xfa\xdb+\xe6F\xee\xc0\xf1N\xf1\xe5\xf0C\xf1t\xf5\xb9\xfe\xe3\x06\x8a\n{\t\x1a\x07\xf0\x06\x80\t/\rC\x11?\x13\x9f\x12\xea\x10\xd4\x0e\x9f\rx\r"\x0e\xb7\r\x06\r9\x0b%\t\xf6\x06\x8a\x03\xf1\x00\xf3\xff\x89\xff:\xff\xb1\xfd\xb7\xfb\xeb\xf8 \xf5-\xf4\x19\xf5\x91\xf7\xee\xf7\xba\xf6N\xf5B\xf3\x0f\xf2j\xf3\xfb\xf6r\xfa\'\xfb\x8a\xf9\xb3\xf7A\xf7N\xf8\xc8\xfa\xbb\xfd\x83\xff\xf4\xfe:\xfc\x1e\xfa\xa0\xfa\xaf\xfc|\xfe\xc1\xfe\x9f\xfd\xd8\xfbC\xf9\xce\xf7\x9a\xf7R\xf9\x9a\xf9\xdf\xf7c\xf5\x83\xf3\x10\xf3|\xf2\x12\xf3\x87\xf5d\xf8 \xfa\xed\xf9Z\xf9\x92\xfa\xbd\xfd\xe7\x04;\x11C#\x93+\x03"\xec\x12R\x16}1\xc4I\xc4PQLKG\x8a@F8\x87<\x99M\x9eY\xd1O\t;E.\xe8)\xe2\'\x91"\x9c\x1dC\x17v\n\xce\xfa9\xeeH\xe7N\xe2\x8f\xdb;\xd3\xbe\xcd\xc6\xcb\xc1\xc6\xe3\xbd<\xb6\xa0\xb57\xbb\x01\xc1\x8e\xc4\x0f\xc7\xe9\xc6;\xc5\x92\xc6*\xce\xf6\xda\xf7\xe5f\xec~\xef\xbe\xf0[\xf2y\xf8\xbd\x01@\n\x0c\x0f\x97\x11\t\x13\xa4\x12\xff\x11\xaa\x14\xa9\x18\xdb\x17\xa1\x13L\x12R\x13\x1e\x11\xc3\n\xcc\x05\'\x05p\x03\xea\xff&\xfd\xca\xfbe\xf9\xe3\xf3_\xef:\xef1\xf1\xe7\xf2)\xf2\xc8\xef\x9c\xee\xe2\xee&\xf1\xf6\xf3\x7f\xf7\x9a\xfa\x81\xfc\x88\xfc\xfb\xfd\xd2\x01g\x05-\x07{\x08\xb1\nC\r\x17\rE\x0b\x04\x0b\xf2\x0b2\x0c\x1b\x0b\x18\t\xc1\x06\xda\x03\n\x00H\xfe1\xfdE\xfb\xbb\xf6\xdc\xf1\xb9\xed]\xeb\x07\xea3\xe9S\xe7\xb9\xe3\xdf\xe1\xae\xdfO\xde0\xdd\xf0\xdf\xc1\xe6\x0b\xe9\xde\xe7(\xed$\xf63\xf7:\xf1\xae\xfd\xc0#_>\xae1^\x18\xf6\x1b\x847\xbdJ\xe7O\xeb\\zl\x04ffJ\xfd:3IO[nZrL&Ex?\xc1+\xbd\x12\x05\t0\r\x80\n\xc2\xf9\x15\xea\x06\xe5.\xde_\xcb\xa1\xb9\x95\xb4\x89\xb9\x93\xbev\xbd \xbc\xe2\xb8l\xb2\xb2\xae\x95\xb3[\xc0V\xcf\xb7\xdb\x83\xe1h\xe2$\xe2?\xe6G\xf0\x8f\xfb\xb9\x08\x87\x15Z\x1ak\x17\x91\x12\\\x14<\x1a\xfb\x1d\x9b\x1f\xc1!f!_\x1b\xa7\x12\x1c\r\xa4\n\xc4\x06\xd0\x00<\xfcB\xfbj\xf9\xec\xf3\xf6\xea\x14\xe2\x1d\xdeT\xde\xc5\xe0\x93\xe42\xe7\xe2\xe6\x7f\xe2_\xdd\xbd\xde\xcb\xe6\x92\xf1\xca\xf8\xc6\xfb\xb1\xfc{\xfd`\xff(\x04\xe8\n\x17\x12\x8d\x15n\x155\x15<\x16\x94\x178\x17\xb1\x14\x08\x13\xed\x12\xc9\x12\xc0\x10h\x0c\xce\x06p\x01\x8d\xfc\x8e\xf9\x95\xf8\xa3\xf7\xce\xf5\x8a\xef@\xe94\xe4\xe1\xe1\xf6\xe0\xba\xdfS\xe1F\xe10\xe0\x12\xdf\xa2\xe0\xbd\xe4\xa3\xe5\xa9\xe6\x04\xe8\xa6\xe8\xde\xeb\xcd\xfav\x1d96\xdf/\xc7\x14\xfe\x08\xa2\x1e\xe3>}Z\x9amJs_bqC\xf56\xc9GHb*k|a\tQ\x80=\t,w\x1e\x8e\x16&\x13:\x0c\xee\xff4\xf1r\xe4Y\xdbG\xd1\x08\xbf3\xacR\xa5z\xab\xa3\xb7]\xbb\xa3\xb6y\xad\x9d\xa5\x80\xa4\xbd\xaeA\xc3O\xd9\xc6\xe7\xe7\xe8\xcf\xe5\xf7\xe5\x05\xedH\xfb5\x0e\xce V+o(\xdf \x8c\x1dJ#t+\x9d0\x990\xab,\x01&\xfa\x1c\xb2\x15M\x11=\r-\x06V\xfc\xaf\xf5\x0f\xf3\x83\xf0\x02\xeb\x96\xe2\x80\xdaF\xd4\xcf\xd1\x90\xd4I\xdaV\xdf|\xe03\xde\x93\xdbj\xdc$\xe2\xa8\xec\xd6\xf6\xf5\xfdj\x01s\x03\xbf\x04t\x07E\x0c\x9b\x12\xb0\x17\x15\x1a\xc2\x1a\xe5\x1b\xd4\x1b\x86\x19\xc4\x15\xcf\x12I\x11/\x10]\x0eb\x0b\x14\x07\x0f\x00\x19\xf8\xe5\xf0\x02\xed\xb2\xebS\xeb\x94\xe9\xb9\xe51\xe10\xdd\xab\xda\xfc\xd8\xe1\xda\xbb\xddL\xe1\xa0\xe3\xa0\xe4H\xe7\xe6\xea\x19\xef\xd2\xf3\x15\xf8N\xfb\xa0\xfaU\xf8\x12\xfd\x9b\x13\xa08\x95S\x80M\x99,2\x16\x96&tM.l\x92w\\w\xb5n\xdcW\x97>\xfe8iIXYMU\x8fA\xf3,3\x1fP\x11l\x00G\xefJ\xe3P\xdce\xd6\xe8\xce\x95\xc8r\xc1\xa1\xb2\x8a\x9e?\x8f\xc6\x90y\xa0\x18\xb2?\xbc\xbc\xbc4\xb7\xd6\xb1\x9f\xb3r\xc0\xb0\xd6\xb4\xee\x8d\x00\xe0\x07\xe4\x07\x83\x07\x96\x0cN\x17\xf5$X/\xca3\xe23\xc83\x022\x9f-s(?%\xe7 k\x19\x99\x12u\x10\x8f\x0e\xd7\x06\r\xfa\xd1\xed\x1e\xe6\x96\xe1\x1d\xe1^\xe3\x83\xe4\x11\xe1)\xdb\t\xd7k\xd6\xd9\xd8\xc7\xdf\x85\xe8\\\xeeQ\xf0\x98\xf2-\xf7\xc1\xfbe\xff\'\x03\x12\nY\x0f[\x12m\x14\x8b\x17s\x1a\x95\x1a\xf3\x17\xcf\x15\xb4\x14\xcf\x14#\x15\n\x144\x108\n\xf2\x02(\xfc(\xf7\xae\xf3\x8e\xf2\x82\xf0k\xec}\xe5`\xdeg\xda{\xd9@\xda\x8d\xda\xdb\xdbv\xdcn\xdc\xd8\xdc\xed\xde\x1d\xe3J\xe7v\xe9X\xeb\x9e\xeeh\xf4\x90\xfc~\x04\xba\x08\xc1\x08\xc8\x06\xbb\x05M\x06\xa4\r\xcd&\x80M\x95f|^\xed@\x11/]7vK\x8d^|q\xff\x7f\xbc{\x88b\x88D_5\xcc5\x9a7\xba2\xdd\'\xb3\x1bZ\x10)\x04\xaf\xf4V\xe2\x90\xcf)\xbe\x8c\xaf\xc5\xa5\xa1\xa6\xa4\xb1\xc9\xbb.\xb9O\xa9\xbf\x98\xa6\x91\x05\x97\xbd\xa5\xeb\xba\xc8\xd2\x05\xe5\x13\xeb\xb4\xe6\xe7\xe5\xc1\xf0@\x01\x08\r\xed\x14z\x1e\xc2(\xbe.p1\x0e4@6z2\x10\'\xee\x19D\x11\x9e\x10\xe9\x15\xd6\x1a%\x18,\x0c\x89\xfc\xdb\xeeV\xe3m\xdbI\xdbt\xe2\x91\xe9\xe5\xe8\xba\xe4\xff\xe3\xcd\xe5l\xe5E\xe2\xbb\xe1\xf2\xe6\x8c\xee\x06\xf7\xb7\x016\x0c(\x12i\x11T\x0c3\x08\xef\x08\xab\x0e\x0f\x18i\x1f\xb4!\x8e\x1f\xf0\x1aI\x15a\x10\x12\r\xc5\n\x05\x08\x0e\x04\xc2\x00\x1a\xff\x07\xfd\xb8\xf80\xf2\xe0\xe9A\xe1\xb8\xda7\xd9\x9f\xdc;\xe1\xd8\xe2\xe7\xdf\x93\xdb\xf9\xd6\xa7\xd4\x91\xd7\xa2\xdd\x90\xe4\x01\xea?\xed\xb2\xefS\xf1\x8a\xf21\xf6\xf5\xfb\xc1\x01\x9c\x07!\x0c\xbd\x0f$\x11c\x10\xaf\x0f\x89\x13v \x036OJ\xdcQoK\x9eA\\>\xe0A>G\xdcMgW\x04_\xb0\\`O|@\x816\xb0/\x8f%Q\x18\x9a\x0c\xdd\x03t\xfc\xf1\xf4\xb3\xed\xe5\xe4A\xd9\xd8\xca\xce\xbc\x9e\xb1_\xab\x10\xac\x00\xb34\xbb\x8e\xbf\x9d\xbe\xab\xbb\x85\xba\x02\xbd~\xc4Z\xd0I\xde\xb7\xea\xef\xf4\xeb\xfcQ\x04\xa9\x0bN\x13\x82\x19v\x1d\xab\x1ew\x1e\xc6\x1e) \xa3"0%\xf5%}"\x86\x1a\x95\x0f\xd8\x04\xaf\xfc\x16\xf8\xcf\xf7\xde\xf8\x00\xf8]\xf3\x17\xecf\xe5\xd9\xe0}\xde"\xde\xab\xde\xa8\xe0H\xe3\xc8\xe6E\xeb\x82\xf0\xfc\xf5\x07\xfa\xbc\xfb>\xfc"\xfd\xdb\x00\x17\x07\x83\x0eC\x15+\x19p\x1a?\x19\x14\x18\xb8\x16\xe1\x15=\x16\xb5\x16\x02\x16i\x13\x01\x10?\r\x93\n\x16\x07z\x02\xd6\xfcL\xf6\xd2\xef?\xeb1\xe9\xf0\xe8;\xe8\xc9\xe5&\xe2\xac\xdd\x19\xda\x92\xd8p\xd9*\xdci\xdf\x88\xe2$\xe5\xf8\xe6#\xe8\xcb\xe9;\xed$\xf3\xd0\xf9F\x00\n\x05\x98\x07\xaa\t\x99\x0cP\x11\x93\x16\\\x1a\xd9\x1a\xf9\x19\xdf\x19(\x1e\xb7\'\x9c4\x16@\xa7E\x8fD\n?\xb78e4-4w7\x86;\x9e<\x109\xd31\xb2(\xbd\x1f\xd6\x17+\x10\x0b\x07\xf6\xfb3\xf1!\xe9\x02\xe4X\xe1\xb2\xdf\xb3\xdd4\xd9\xb5\xd1\xa9\xc9\xf5\xc34\xc2\x9d\xc4\x18\xca\xef\xd0\x9e\xd6\xa3\xda\x97\xde\x03\xe4=\xeb\x1f\xf2\xa7\xf7g\xfb\\\xfeq\x01\xa5\x05,\x0b&\x11\x0f\x16\xbf\x18\x86\x18\x88\x15\xd5\x10k\x0c.\t\x0e\x07\x04\x05M\x02*\xff\xe5\xfb\xfe\xf8_\xf6\xca\xf3\t\xf1&\xeem\xeb\xfd\xe8\x0f\xe7\xa2\xe6`\xe8\xbc\xeb\xed\xee\xcb\xf0\xe8\xf1J\xf3\xbd\xf5\xf4\xf8\xee\xfc\xf7\x00\x82\x04D\x07\xc1\t\x95\x0cZ\x0f\xed\x11\x8d\x14\x9a\x16&\x17\xff\x15\xa1\x14B\x14W\x14\xad\x13=\x12\xdd\x10\xed\x0e\xdf\x0b\xe5\x07\x91\x03\x9b\xff\xb6\xfb\xdb\xf7h\xf4%\xf1\xe9\xed6\xeb\x12\xe9_\xe7\xf2\xe5\xf1\xe4\xd8\xe4\x13\xe5l\xe4\xc2\xe2\xac\xe2]\xe5\xc0\xe8Q\xeb\t\xed\xe6\xef\\\xf3\xbb\xf6\\\xf9A\xfc\x1f\x00q\x03Y\x05&\x06\xdd\x07/\x0b\xb5\x0f\xf5\x12G\x14X\x14\xd9\x14\'\x17\xe4\x1b\xf0!\n(\xce,\xa6.\xc1-"+\x1d(\x11&\'%\xff$\x05%\xb3#\x7f \xcc\x1b\\\x16\x89\x11Y\r\xd8\x08\xc1\x03/\xfeo\xf9?\xf6\x04\xf4W\xf2N\xf1\x93\xf0\xdf\xee\xa6\xeb\xff\xe7X\xe5\x9a\xe4\x84\xe5\x1f\xe8\xa8\xeb\xd4\xee\x7f\xf0\x0f\xf1\xcf\xf1\xc5\xf3f\xf6\x15\xf9`\xfb\xe7\xfc\\\xfd\xe8\xfc\x14\xfcO\xfb\x13\xfb0\xfb\x95\xfbM\xfb\xd5\xf9y\xf7\x02\xf5\xce\xf2\n\xf1e\xf0\x9b\xf0V\xf1\xd0\xf1!\xf2N\xf2\xb5\xf2f\xf3\xac\xf4\\\xf6\x06\xf8\x8a\xf9\xda\xfa\x1d\xfcz\xfd\x15\xff,\x01}\x03^\x05]\x06\xf5\x06b\x07\x17\x08\x90\x08J\tq\n\xac\x0bV\x0c\x1d\x0c\xa6\x0b\x19\x0b2\n_\t\x08\t!\t\xd5\x08z\x07\t\x06\xbf\x04\xa1\x03i\x02Q\x01C\x00\x17\xff\xca\xfd\x91\xfcK\xfbO\xf9\xa2\xf65\xf4\xad\xf2q\xf1\'\xf0}\xefg\xf0\xaf\xf1\xc2\xf1Z\xf0\xc3\xee[\xedO\xec\xb8\xecD\xee\xb8\xf0\xfb\xf3\x8a\xf7>\xfa\xa0\xfbq\xfc\x01\xfe\xc5\x00\x18\x04\xb0\x06c\x08}\n\xd9\r\xfd\x11{\x16\xf9\x1a\xd9\x1e\xf5 \x11!\x0c \x8f\x1e\x1d\x1d\xa7\x1b\x93\x1a;\x1a^\x19\xeb\x16!\x13n\x0f\x0f\r4\x0b\x86\t\x1f\x08\x05\x07\x86\x05\n\x03/\x00\x83\xfd\x88\xfb8\xfa\x0c\xfa\x15\xfbu\xfc\xe6\xfc\xc5\xfb\xc1\xf9\xe9\xf7\xde\xf6\xd9\xf6\xde\xf7\xf1\xf9\xd2\xfc\xa5\xff3\x01\xd1\x00\xdb\xfe\xfd\xfb5\xf9T\xf7\x0c\xf7\t\xf8:\xf9\xa9\xf9\xcc\xf8\xc8\xf6\xf8\xf3\x03\xf1\x17\xef\xc3\xee\x1d\xf0\x0f\xf2\x10\xf4\xb1\xf5\x88\xf6R\xf6C\xf5V\xf4\x0b\xf4\xa4\xf44\xf6@\xf8/\xfa\x93\xfb\x1b\xfcO\xfc\xc3\xfc\x85\xfd\x08\xff\xf8\x00\xe3\x02\x14\x04$\x04\xfb\x026\x01\xe7\xff\xd8\xff\xf0\x00Q\x02\x07\x03\xb3\x02\x8e\x01\xe0\xffl\xfe\xc4\xfd-\xfe\x94\xffY\x01\x11\x03\xb0\x045\x05|\x04\xee\x021\x01@\x00U\x00\r\x01g\x02h\x03-\x03*\x02\x03\x01/\x00s\xff\xc8\xfe\x8f\xfe\xf5\xfe\x88\xff]\x00\xdf\x01!\x04\x06\x07\xe3\t\xa6\x0c\xef\x0ev\x10a\x117\x12\xfe\x120\x13\xd9\x12U\x12F\x12\xa8\x12\x13\x13\xa6\x12\xf0\x10$\x0e\xa4\n\xe2\x06\xf0\x02\xfe\xfem\xfbM\xf8\xb4\xf5\x8d\xf3\xd4\xf0\xc3\xed$\xebN\xe9U\xe8\x02\xe8\x17\xe8\xf8\xe8_\xea^\xec\x80\xee\xb4\xf0\x00\xf3\xda\xf5\x94\xf9\x7f\xfd\x0f\x01\x9d\x030\x05w\x06\x0f\x08\x1e\nv\x0c\xd8\x0ea\x11\xe7\x13\xd3\x15,\x16\x9c\x14,\x11\x9f\x0c\xe8\x07\xb9\x03|\x00>\xfe\xf3\xfc%\xfc\x08\xfb\x18\xf9\xf0\xf5\xe9\xf1\x01\xee\xf7\xeaF\xe91\xe9\x8a\xea\xcd\xecx\xef\x84\xf1\x02\xf3-\xf4\x18\xf57\xf6\x96\xf7c\xf9\xa5\xfb\xe1\xfd\xf4\xffh\x01K\x02\xcc\x02%\x03\x91\x03\xfb\x03\xfb\x03:\x03\xa5\x01a\xff\xfe\xfc\xc0\xfa\x13\xf9_\xf8q\xf8\xfa\xf81\xf9\xaf\xf8,\xf7)\xf5\x10\xf3}\xf1\'\xf1k\xf2\n\xf5Q\xf8v\xfb\xca\xfd\xf4\xfeX\xff-\x00\xcf\x02\x0c\x08\x9d\x0f\xb4\x18,"\xe4*\xab1t5\xe45f4\x802T1\x061\x041\xc80S/\x15,\xb3&\x97\x1fe\x17\xcc\x0e\xbd\x06\x83\xff\xf0\xf8\xbf\xf2\xee\xec\xf3\xe7\xe6\xe3\xe1\xe0\x7f\xdeP\xdc\x06\xda!\xd8\xcd\xd6T\xd6\xbc\xd6K\xd8\xaa\xdb\xc9\xe0\xe3\xe6\xe6\xec\xec\xf1\xa8\xf5g\xf8\x8c\xfaB\xfc\xfe\xfd\x12\x00\xd3\x02W\x06\xcd\t\x82\x0c\xbf\r\x80\r\xb6\x0b\xd3\x08G\x05\xa3\x01\xc9\xfe\x08\xfdA\xfc\xee\xfbA\xfb\x9d\xf9\xd8\xf6P\xf3\x83\xefl\xec\xe0\xea\x1d\xeb$\xed\xb5\xef\xa2\xf1"\xf2\xc9\xf1\xa8\xf1%\xf3\xb8\xf6\xc5\xfb\x01\x01"\x05\x98\x07q\x08\xb9\x08B\t2\x0bG\x0e\xc5\x11\xd9\x14$\x16s\x15\xf7\x12?\x0f\xbb\x0b:\t\x92\x07\x92\x06-\x05@\x03\x04\x01\xfd\xfd@\xfaA\xf6?\xf2\xfa\xee\x81\xec\x00\xeb\xfe\xea\xf4\xeb:\xed\xbf\xed\xfe\xecG\xebs\xe9@\xe9\xcb\xeb0\xf1.\xf7o\xfb\x1f\xfd&\xfc\xd1\xfan\xfai\xfc\xc9\x00\xe7\x05p\n\xbf\x0c\xc0\x0c\xac\x0bK\x0c1\x12Q\x1e7.\x91\xf5Q\xf0+\xec\x14\xe9\xd6\xe6c\xe6\x95\xe8\xd0\xec\t\xf2\x91\xf64\xfa\xa8\xfbj\xfaM\xf7\xb3\xf4\xc5\xf4o\xf8\xaa\xfd"\x02b\x02\x16\xfe\xb3\xf6P\xee\xf8\xe7\xc9\xe4w\xe5\x88\xe8%\xebc\xeb(\xe9\xf9\xe4\x9a\xe0\x10\xde\xda\xdf\xf9\xe5\xf7\xed\xdd\xf4\x05\xf9\xef\xfa\x82\xfbT\xfd\x18\x04\x07\x15\xb8/\x89M\xafc\xa2j\xbacrW\xf1P\x05U2a2o*xLu\xbdd\xddH%)\x84\x0e`\xfdQ\xf4\xc9\xef\x9d\xebp\xe4,\xd9\x19\xcaJ\xb9*\xaaB\xa0\x19\x9d\xb6\xa1\x19\xad\xd2\xbd\xb4\xd07\xdfX\xe5Z\xe4\xbe\xe1Q\xe6\xc8\xf4\xe7\x0b\x06&\xe9:5E\xcaB:7\r)\xe3\x1f@\x1ew"\xea%|"\x0f\x17\x1d\x06#\xf4\xe1\xe2\xe4\xd3\xd9\xc80\xc3a\xc1_\xc06\xbf\xb2\xbe\x7f\xbf\xdd\xc0r\xc1\xa6\xc1\xda\xc5\xbe\xd0\x91\xe2\xe0\xf5\xb6\x04-\x0e4\x12G\x14a\x18,\x1f\x88)@4\xda:\xac:\xa23\xc5*Q#\xa3\x1e}\x1bU\x17 \x12\xd7\n\x9d\x01\x8b\xf9\xe6\xf2+\xefr\xed\xa3\xeb\x14\xe9\xa9\xe6\xb1\xe6\xb6\xea\x8e\xf0K\xf5\x8c\xf8d\xfa\x8b\xfb\xc5\xfc\\\xfeg\x02<\x066\x08\xbe\x06i\x02k\xfd\xa5\xf9\x9e\xf7\xb7\xf5\xcf\xf2\x7f\xee\xac\xe8?\xe3\xb3\xdf\xe6\xdd\x88\xde\x92\xdf\xe0\xdf\x18\xe0\xf8\xdf\x08\xe0\xec\xe2\x08\xe7\xb8\xed\xe2\xf4J\xf9\x8d\xfdT\x01\xaa\x06\xa5\x0bu\x0f\xae\x14\xa1\x1b\x89%\xb04JJ\xa8`\x9cg\x15\\\x01I\x8a>}D1Q\r^pd_\\\x8eDP#@\x07V\xfa\x9c\xf8\x9b\xfa\xb5\xf8?\xef[\xe1\xbc\xd2*\xc6z\xbc\xbe\xb8o\xba\x94\xc0\x85\xc6\xc6\xcc\x85\xd6,\xe2\xbd\xe9\xe3\xe9\xee\xe8\x00\xeeM\xfc\x1a\x0e\x98\x1d\x81(L.7-\x85%\xb2\x1c|\x19>\x1d\xe5 \x97\x1f\xf5\x16W\n\xfd\xfb\x13\xee>\xe3\x7f\xdb\x81\xd5w\xcf\x9c\xc8\xc4\xc3\x08\xc2\xd5\xc2\xa3\xc3<\xc3\xaf\xc3\x16\xc5\xb7\xcaa\xd6\x01\xe6\x99\xf3\x12\xfb\xe9\xfe\xf3\x01\xed\x06\xe4\x0f\x01\x1cM(\xdc.\x1c/\n,\xe0(\xb8&#&\x00&\xb2#\xc1\x1e\xdc\x184\x13\x9e\r\x04\x07q\xff\xa5\xf9]\xf4\x15\xf2`\xf2\x04\xf4\x07\xf4&\xf1\x85\xec\xcf\xeau\xed\x87\xf1\xc3\xf65\xfa\xea\xfb2\xfc\x99\xfa\xd2\xf9L\xfa\xf0\xfa\x1d\xfd\xf1\xfd\x11\xfeP\xfc\xd5\xf8\xb3\xf5\xff\xf0O\xed\xd6\xea3\xeb\xd0\xec\xc1\xedg\xec8\xe9\xba\xe6\xeb\xe4\xe2\xe5\xbc\xe9 \xed\x8f\xf3\x95\xf7\xe6\xf8\\\xf9Q\xf4\x8c\xf3\xd2\xf7\xc3\xff\xf6\x0b.\x12\x9d\x11\x9e\x0c\x9f\x0e\xeb \xc9=\xaeP\x9bM:A\xf79\x89=IH\xfaS\xd6_5c\xd8RK:I(b#<&:!\xb2\x15c\x08e\xfd}\xf4\xa8\xeb\xc9\xe08\xd7\x17\xd1\x89\xccW\xccO\xcf"\xd5I\xd9_\xd4\xe6\xcb\xa4\xcaX\xd6>\xe9J\xf7S\xfd\xab\xfe8\xfe\x83\xfd\x89\x015\x0br\x17`\x1e(\x1d\r\x17)\x10$\x0b\x95\x08"\x07\xe2\x03\xb7\xfd\x16\xf5\xcd\xed\x16\xe7\x1c\xe1\x03\xd9\x94\xd1\x95\xcc\x86\xcb\xfc\xcc\x9a\xcf1\xd2\xfd\xd2\xc2\xd0\xac\xcf\x91\xd5\x89\xe0,\xef\xa7\xfa}\xff\x10\x00\x8c\x02\xad\n\xa0\x16s\x1fp& +\x9c)\x06&{%d)\\,G)\xec"\x99\x1d\xff\x18s\x15N\x11F\x0c\xaa\x05\x04\x00U\xfb\x0f\xf9\x90\xf7\x84\xf5;\xf3\xd5\xef\x00\xee-\xee\x08\xef\xb0\xefr\xefL\xf0\x98\xf1\xfe\xf2\x90\xf3\xba\xf33\xf5H\xf5\x00\xf5\x17\xf4)\xf4\xb0\xf5\x02\xf5V\xf2\x01\xef\xbc\xed\xa9\xed\x13\xee&\xedA\xea\x9a\xe7_\xe4\xb5\xe5|\xea\x9f\xed\x12\xf0\x00\xed\xbe\xeb\x0e\xecp\xed\xea\xf0\xfa\xf1t\xf3\x97\xf3\xd7\xf9\xc8\n\x13!\xcc0\xda+\x08!<"z4\xf4L\\]\xc3f\x04g6[kJ\x08D[MaV\xa5R\'Ce1i"4\x13\x1c\x07/\xffG\xf8\x0c\xf0D\xe4\x91\xd9X\xd1c\xca\x97\xc2\xd2\xbb\xb2\xba_\xc0\xff\xc9\xf8\xcf\x92\xd0j\xcf\xe1\xd1z\xda+\xe7S\xf4\x1e\x01?\n\xf1\rF\x0e\xf9\x0e\xf2\x13=\x1c\x05"\x06#\xd1\x1f\x9c\x191\x12)\t6\x03\xc5\xfe\xf3\xfa>\xf5\x0f\xed-\xe2\xaa\xd6\xe3\xcdD\xca\xf9\xca\xf0\xcc\x7f\xcf\x99\xce\xe7\xc9\xf6\xc6\x9d\xcaD\xd5\x10\xe1\xaf\xeb\xc8\xf5N\xfb*\xff\xa3\x02p\x0b\xdd\x15\x03\x1f\xa6\'X-\x840\x11/a-I-\xba,\xd3,(,$*^&Z\x1e\xf1\x15\xc8\r\xa7\x08\xb4\x04.\x01h\xfdU\xf8=\xf2Y\xebl\xe7I\xe6\x15\xe7\xd7\xe7\xdc\xe6\x00\xe7\x8a\xe6+\xe5K\xe5\xf0\xe5\xc6\xe9?\xed\x10\xee&\xee\xc4\xee\x85\xf0R\xf1\xa1\xf0\xbf\xf0$\xf2M\xf3Q\xf3/\xf3\x85\xf3b\xf2Y\xef\xca\xef3\xf1\x1a\xf3\xa5\xf2\xe6\xec\xc8\xeb\xaa\xec0\xf2l\xf6z\xf7]\xfb\xc2\x02\xfa\x0b\x84\x12\\\x1b,+\x84<_DR@^=\x87D\xd6P&[sa;c\x8c]!O)>]6\x076\xd85\xfc/\xac"\x8c\x13\x03\x03\xa7\xf2\x05\xe6\xfd\xdel\xdb\x0f\xd7\xca\xd0\xef\xc8\xfd\xc2\xd1\xbe\x03\xbb\xb5\xba\xd2\xbe\xd5\xc7\x98\xd3\xfe\xd9\xc9\xdc\x1c\xde\x0f\xe2W\xeb\x1f\xf7`\x05T\x11y\x18\xfa\x17\xa9\x14\xd1\x13\xd4\x16\xaf\x1dZ"\xc3"\x9d\x1d\xe3\x12\xb0\x07c\xfe\xeb\xf8\xd2\xf6\xc6\xf4u\xef\xc5\xe6\xa8\xddy\xd4\x19\xcd\xa8\xc8(\xc9(\xce\xae\xd1\xef\xd3\xb6\xd5z\xd6!\xd7\x06\xda\xf9\xe3\r\xf3=\x02~\t\t\n\xdd\x08\xea\x0c\'\x18\xd6#K,\xd4/\x8c.\xca*\x81\'\xc9\'a+\xc5+\x0b*d%\xb0\x1fO\x17\xc1\x0e\x0f\x08\xa4\x04\xb9\x02\xe4\xffV\xfbs\xf3\xbc\xea\xbe\xe2\xe1\xdf\xbd\xe0\x0e\xe4.\xe6\xea\xe4\x89\xe0\xd5\xdb\x0b\xdbn\xdf\xda\xe5s\xeb\x9b\xed\x12\xed\xb2\xeb\\\xeau\xed\r\xf1\x0b\xf7\x8a\xfbY\xfc\xbb\xfay\xf8\r\xf6M\xf6\x1c\xfa\x81\xfe=\x04\xfc\xff\xd4\xf7\x0e\xf2e\xf1\x7f\xf8u\xfd\xe5\xfe\x02\xfc\x0b\xfdC\t/\x1a\x81#\xc9\x1d\xa7\x18\xc8\x1f\x1d/\xd4A:M\xbcQVM[A\xe8<\xffB!M6P2H[9\xdc,\x86#x\x1c\xcc\x17q\x10\\\x07\xfc\xfa8\xed\xf1\xe2\xa2\xdb\xa2\xd6\\\xd1\xe5\xca\xa0\xc63\xc5/\xc6!\xc5\x95\xc3\xf3\xc4\xe5\xca\x93\xd4\x7f\xdc\x03\xe3\xa7\xe7@\xeb\xda\xef\xd2\xf7\xde\x02u\r\x04\x13M\x13%\x12\x15\x12\x10\x15_\x18b\x19p\x17K\x13U\r\x11\x07\xb1\x004\xfdE\xfa^\xf5\xdd\xee\xc2\xe8\xa7\xe3\xad\xdd\xb0\xd9j\xd8}\xd9\xe6\xd9\xd9\xd8\x1b\xd9&\xdb\xec\xdc\xfb\xe0L\xe6\x95\xeeA\xf6s\xfa#\xfeM\x02\x98\x08\xa1\x0f\xe3\x15K\x1b|\x1f\xd0!/#\x9e#\x92$\xb7%\xf7%\x1e%\xcb"\x80\x1f{\x1b\xcf\x16Q\x13A\x0f*\x0b&\x05\x95\xfe]\xfa\xdd\xf5;\xf24\xedY\xe9\xb2\xe6G\xe4\xe5\xe3$\xe3=\xe3N\xe1\xb9\xe1\x83\xe3\x07\xe6\xcc\xe8J\xe9o\xea\xb0\xeb\xd7\xee\xfd\xf2?\xf6F\xf7\x8c\xf8\xf3\xf9\xe3\xfa\xed\xfc\xa6\xffj\x018\x01\xa7\xfe\xde\xff\x11\x01c\x01\x8d\x01_\x01\xcc\x02l\xffT\xff\x05\x01\xee\x08[\x14\x92\x1b\xc2\x1d \x17\x8f\x14Y\x1bs*\xf98\x17=\xf8:\x813I/H0p67>!=\xee5\xf3)| \xd6\x1b\xd9\x19\xe5\x19\x97\x14\xd1\x0b\xa6\x00a\xf6@\xef\xd5\xe9n\xe8\'\xe7\xa3\xe4B\xdfQ\xd9.\xd5\xe4\xd3\xbc\xd5\xdf\xd9\x1a\xdf\xac\xe2U\xe3\x9c\xe2\x98\xe2\x13\xe6G\xec\x00\xf4\x1e\xfaK\xfc\x0e\xfc%\xfa\x15\xfa9\xfc\x16\x01\xaa\x05[\x06J\x02\x8d\xfd\xeb\xfa$\xfa\xd7\xfa1\xfb\xb5\xfa\x8d\xf6\x9a\xf1U\xf0\x7f\xf0\xd6\xef\xeb\xec\x88\xebt\xeeM\xf0!\xf1\x9f\xef@\xee$\xee\x1a\xf1c\xf7\xee\xfc\xbe\xfe\x0e\xfd\r\xfc\xd6\xfdQ\x02\xc9\x08^\x0e\x8f\x10\xa6\x0e\x1c\x0c\xe4\x0ct\x10\xbb\x14\xb1\x16\xed\x16\x04\x15\xa1\x11\xc3\x0e\xb4\r\x19\x0e\x05\x0e\xe9\x0b\xd0\x08`\x04\x1c\x00\xe3\xfc\xd6\xfa\x86\xf9\xa3\xf7\xdb\xf5\x85\xf3\xd6\xef(\xedE\xecl\xec\xe2\xed\'\xed\x12\xed\xf0\xeb\xf6\xea\x17\xeb|\xebN\xed$\xef\xae\xf0c\xf2\xe3\xf2b\xf2\xd2\xf1f\xf1b\xf6\xe9\xfb\x82\xff\x91\xfe\xaa\xf9\xc4\xf9\xe5\xfc\x0b\x04\xaf\x08\xff\x06\xd6\x02/\x01e\x07 \x10.\x15\xfd\x13\xc2\x117\x13y\x1b\xb0%\xcf*N)\xda%\x90\'N-[3\x8b675\x851\x0c-\x9c*\xa7*\x0e*\r([#\xbb\x1c\xaa\x15\x01\x10\xb2\r\xd9\n\xee\x05\x08\xffr\xf8+\xf4\xe5\xef\x0c\xed\xe2\xe9\x0b\xe7T\xe3\xdc\xdf\x15\xdf\x8a\xdf\xa2\xe0\x94\xdf\xd4\xde&\xdf\xf5\xe0\xbf\xe3\x81\xe6\xe6\xe8\xdd\xe9\xd0\xe9\x01\xea\xcd\xeb2\xefj\xf2\x02\xf4c\xf3\x0b\xf2\xe3\xf1\x0f\xf2V\xf3\xa1\xf4\xb0\xf4\x0e\xf4\xe5\xf2\xaa\xf2\xdf\xf2\x0f\xf2S\xf1\r\xf2\xab\xf3+\xf51\xf5P\xf5\x0e\xf6\xd2\xf6\xfd\xf8\t\xfb\x90\xfd\x9b\xff\xdb\x00i\x02*\x04\x8e\x05\x00\x08,\nl\x0c\'\x0e\xc0\x0e\x82\x0f\xf6\x0f\x8b\x10\xe5\x10.\x12\x86\x12m\x12Q\x11u\x0fU\x0eU\ra\x0c\x93\x0b\xe1\t\x82\x06\x88\x04h\x03X\x02\xf7\x00\xa0\xfe\xb8\xfb\xa9\xf9\x9f\xf8\xe5\xf8\x8f\xf7\xd4\xf5\xd6\xf5\xc6\xf5W\xf5\xf2\xf3B\xf0\t\xef\xc7\xf1u\xf5\xfc\xf6\x06\xf57\xf2\x00\xf18\xf1T\xf3\xf7\xf5R\xf6c\xf4\x08\xf4\x1c\xf8\x92\xfd\xb6\xfe\xe4\xf91\xf7\xb4\xf7\xba\xfa#\x00K\x04#\x06S\x05\xf6\x05\xd6\x06x\x07E\x07 \x06\xb7\n\x1b\x14\xbc\x17\xa0\x132\x0b~\t\x83\x11\xc7\x1a3 s\x1b\x08\x11\xc5\x0bx\x10\xbc\x1eO*\r(\xf7\x1b\xa0\x0f\x96\r,\x16\xf3\x1f\x16%\x8d"9\x1a\xb5\x11\x0f\x0e\x8a\x0f\xc8\x11\xdc\x12\x02\x11\x88\x0bK\x06d\x01y\xff_\xff\xcc\xfd\x0e\xfa\xc9\xf5\n\xf3 \xf1\xbf\xee\xef\xea\x92\xe8X\xe7\xcc\xe6\xdb\xe60\xe4\xb8\xe0\xab\xde\xc9\xde\xe1\xe0\x90\xe2c\xe3Z\xe3\x8b\xe2\x88\xe0\x95\xdf\xcf\xe2O\xe8-\xec\xff\xec\x96\xed\x12\xed\\\xee8\xefE\xf0\x05\xf6]\xfc\xa2\xff\xe8\xfe\x8e\xfc\x92\xfd\xe4\x00\x90\x013\x03\xb9\x07\xec\x0bJ\x0c*\nS\x07\xcc\x06\xfa\nm\x0f\xef\x11\xf2\x0e\xc6\n_\n\xa0\x0b\xa4\x0eH\x0f\x82\r\xf7\x07\xb2\x08\xf9\n\x1b\x0b\xeb\x06&\x04\x01\x05Q\x05\xc9\x03\xde\x02h\x01\xbb\x00@\x01_\xfe<\xfb\x89\xf5\xb0\xf9H\x00\xab\x01\x11\xf9\x16\xf2u\xf1 \xf3\x0e\xfb\xf5\xfe\xa5\xfbD\xf0N\xe78\xf0\x7f\xfa\x19\xfc|\xf9\xce\xf8\x9a\xf3^\xec&\xf1R\xfaU\xf5\x83\xf8\xc3\x01\xbd\xfe\xe1\xf6\n\xef\xc8\xf5\xa9\xfe\xdf\x08\xad\t8\x01B\xf7\xf7\xf9\x0e\x0bC\x0e\xd0\x05\x82\x01f\x03\x1c\x0f\xa8\x1e\x9f\x13s\xfc\x1a\xf7\xe6\x05\x9e\x1c\\#I\x1d1\x0eC\xfa\x06\x00y\r\x7f\x12\xc9\x15\x98\x1aW\x11y\x0b\xeb\n\xb5\x01R\x03{\t\r\x16y\x19\x18\x0f\x1c\x08>\x00\x08\x01\xe2\n\x1a\n\xa3\nr\rC\r\xa4\x07\x11\xfd\x0b\xf9\xc5\xff9\x01a\x05\xea\x06\x89\xfb\x95\xf8\xf5\xf6\x9d\xf4L\xf1\x13\xef\xc1\xf1N\xf5d\xfb\xb0\xf3\xb1\xea\xe8\xddJ\xe3\xd5\xf1|\xf5F\xf8\x9a\xef\xaf\xe7\xfa\xe33\xe8;\xf3\xf1\xf8&\xfe\xf7\x00\xad\xf2\xd1\xe9X\xed:\xfbE\t&\n\xc4\x06\x8f\xfel\xfd\xa3\x00\xc4\xfd?\n\x8b\x15\xcb\r\x8b\x06\xe2\x04=\x0f\x15\x11_\x01g\x02\xe0\x0e/\x14\x8d\x17\x10\x10B\xfe\xd7\xf7\x8e\x04\x1b\x15&\x10\x07\nW\x06m\xff<\xfc9\xff\xfd\x03\xe2\x02\xb1\xfcA\xfb\x0c\xf6u\xfa\xe9\x03\xd4\xfd\x12\xef8\xe5#\xec\x82\xf7\xa9\x02c\xf9\x8c\xf0Z\xe4U\xe1\x12\xfb\xeb\x06\x17\xfdH\xf2q\xe8\x8f\xe1\xef\xf2\xda\x05m\x08\xa2\x07\xa2\x06\x12\xef\x10\xdd@\xf3*\x0f\xce\x0f\x98\x0b\xdb\x02\xd6\xf6z\xf6\x85\xfe\xe5\x10F\x0ek\x04"\x02\x10\x05-\x04r\x03\xe6\r\x0f\x13}\x07@\xfa\xeb\xf93\t7\x16\xde\x15\xf1\x07\xf6\xfeU\xf7\x1f\xf4[\x13\xb3\x1c\xae\x15\xa7\x00G\xf1\xa8\xf8\xe1\x04\xe5\x183\x19L\t\xeb\xfa\xaf\xf4\x9f\x01`\x07%\n\x99\x0fO\t\xbb\x082\x042\xf4Z\xf3y\x082\x12Q\x0b\xf7\xfa\xd2\xf6\x87\xfb&\xfc\xe8\x05"\x07\xb3\xec\xae\xf2M\x03z\xf8\xf3\xfa\r\xf5\xc7\xf1m\xf8m\xf9\x9d\xf3\x7f\xedR\xeeq\xf0w\x02\x92\xfd\xec\xf0l\xeb,\xef[\x00\x8e\xf9\x1f\xf4~\xf8\xe0\xf1)\xfc$\x10n\x0b\x90\xf3v\xef\'\xfeS\t\xb6\x11\xc9\x06\x8e\xfc\xf9\x072\x11\xb0\x18H\x04\x07\xf3\x80\x00\x8e\x12\xc9#\x92\x14\xa3\xf3\x87\xfb\xd3\x14\x83\x0cp\x0c\xe7\t\xa3\xfc\xbd\x00\xb5\x05\xaf\x0e\xad\x03\x17\xfbJ\xf8\xb7\x08;\x0f\xd9\x005\xe8\xe0\xe1\xf7\x04\xa7\x12\xe4\xff\xfe\xfaW\xe8\'\xe5\xc9\x03\xbb\xf9/\xf6\x8b\xf5\xa2\xf2\xcb\xf3\xaf\xf7\x0b\xfez\xf9\xa1\xf5\x0f\xf6\x10\xf0|\xe2J\xff\x1b\x17h\x16\xbc\xf5\xc5\xd0\xa8\xdc\xde\x07\xf4$\x15\x12\x8b\t%\xf4?\xdb\xd1\xe6\xa6\x06\xd7/\xe4#\xa3\xff\x89\xdb\xf8\xd36\x11\xae;\n W\xf0\x18\xdb\x16\xf3k\x1dr*\x93\x18E\xfb\xff\xe1L\xeb\x7f\x1a\xd7&P\x12\t\x02\x87\xf2\x05\xfb)\x05\\\r\x9c\x1b3\x06\x0c\xf2K\xf9\xec\xfcd\x15\xe3\x12n\x01\x06\xfc\xc3\xf1c\xfb\x12\x03N\x07\x1b\x06\x1b\x04\xdf\x05\xf1\xf8\x8b\xf5\x99\xe1\xef\xe9\'%\xd8\'\xaf\xf6\xc7\xe0\x93\xe3\xcf\xef\xdc\x07\x9e\x04}\x01}\x04\x04\xebA\xf6_\xf8\xc3\xef3\xf0\x00\xfe\x8c\t\x10\x046\xf2}\xe1\xe4\xef\xb1\x08\xa0\x07\xf7\xfc \xfb\xe4\xee@\x06h\nc\x03\'\xf0\xd6\xf1p\x0f>\x11y\x10\xd9\x08_\x00c\xfd\xcb\x02\xe4\x07H\x07g\x08\x93!\xd7\x1d\xcd\x02L\xeb\x16\xedm\n\xfa\x1eb&\xe1\r\xc0\xfbO\xf5u\xf1k\xfa\xce\x08\xbf\x16\x1d\tx\x02\xbb\x03\xf5\x00\xc7\xf6\x0b\xe7\x7f\xec\xa2\xfaC\x08\x16\x19d\x0b\xc4\xf5\xe7\xe7\x93\xd1\xf6\xe9 \x0b\xbb\tG\x05\xfe\xf3\t\xfb\x10\xfd\x9b\xf5z\xe4x\xda\xd6\xfc4\x0f@\x1a\xb9\x07\xd7\xe4$\xe3/\xed\x0e\x08\xab\x061\xf6\x08\x07\xe4\x0cI\xf9\x1b\xf3\xca\xf9\x17\xfb\xf5\t\x0b\x0c\xb2\n\x05\x104\xfd4\xf8\xab\xfb\xf4\xfb\xac\x0c>\x19S\rv\x17\r\x0b\xae\xf2\xbb\xff<\xf8\x0b\x05\xe1\x16\xf7\x1d\xdb\x18\x9b\x03\x8f\xfc\xa5\xf29\xf4\xde\x0b\x96\x12\x02\x13\xdd\rr\x08\xfb\xf8\xf4\xe69\xfe\xd1\x05\x04\x00\xda\x0f#\t\xa6\xf4\xb5\xf2\xc3\xf8]\x04\x81\xee\xd5\xf2i\r\xb8\xf8\xff\xf9\xb8\x00J\nF\xdd\xcb\xcf\x10\x00X\x17v\x1b\x0b\xf0\x9c\xe5\x96\xe1Q\xea\x15\xfe\'\x0f\xc8\x16\xc1\x05o\xd5w\xd3\xb0\xfa\xb6\x0e\xc5\x1c\xe2\x11\x12\xf3\x1a\xd9\xa4\xee\x0b\x02\x14\x0c\x99\x1c5\x10\xef\xe8\xf8\xdb;\t\x0f$v\x16\xb6\xfb*\xf0\xfc\xf8\xe0\t\x19\x1c\xfb\x11~\xf6v\xfd2\x0b\xff\t\xf5\x0f\x03\xffW\xff\'\x0e\xdb\x10\xd4\x02\x00\xf6\x99\xfe\'\x08K\x0f-\x06\x89\x10\x81\x05\xa9\xe7\x86\xf9\xe4\xfee\xfe;\x17|\x11|\x03!\xee\xbb\xd7g\xf6\xd7\x154\x15*\x01?\xe8-\xec%\xfa\x97\x05\xfa\xf5\xaf\xf4\x95\xf9Z\xff\x9b\xf6$\xf7\xa5\x03}\xf0\x1e\xf8G\xf4\xe7\xefH\xfan\x0e?\nq\xf2f\xf4\x15\xe0K\xf1[$g\n8\xf7s\xffi\xef\x01\xf3\x94\nk\x12\x00\x00W\xfb\xae\xfcp\x03q\x11\xfc\x0c"\xfb\xa9\xf0}\xf04\x14\xa7)Q\x1c\x1d\xf4K\xebQ\xee\xcb\xefw"\x83@\xcf\x16^\xddF\xed\x07\xff>\x04\xb3\x16\x12\x1dV\x02\xb7\xea\xfb\x0b!\x10{\xfaH\t1\x06\xc0\xdc\x7f\xf5s \xc2!\xd8\xf9\xc8\xe7\x03\xf2\xc5\xe2\xcb\n@\x07\xea\xf8J\x10\x0e\x00\xb1\xf54\xd0\x02\xe4\x81\x0c9\x18R\x13\xc7\xebN\xdf\xf2\xe07\xf6\xb1\x079\x1b\xe8\xff\x98\xf1O\xf4\xe9\xe3a\xeeB\x0b\xa5\x130\x12\xb3\xfc\xf9\xda\xbf\xf3\xd2\x03\xf9\r\xe3\x0e=\x14\x1e\xf5t\xec~\xef\xd6\xff\xbd5T\x19\x90\xfb|\xdff\xd9\x17\r\x18*\xf07\x83\x14P\xd8\xbc\xc8\xcc\xef\xaf/\x1d"\x85\x12\xf1\x17\xd6\xe8l\xe8\r\xe8\xcd\xfd\xf80[\x19\xad\x081\xf4]\xdf\xb0\xf2g\x0c1!d\x0b\n\xed\xa6\xf1\xaf\xfb\xd7\xff\xbc\x13t\xf8d\xe2\x90\xfd\xd2\x12V\x04\x96\xe1\x01\xfaN\x17h\xf0\xb3\xe8\xdd\xf9U\xfe\xf9\xfdR\xf1!\x17P%\xcf\xdf\xca\xb7B\xee\xf4\x0f\xcd)\x01-`\xf1\xe9\xd3u\xd98\xf7\x89\t\x93\x10\n\x1b8\x14f\xe6\xcf\xe3d\xed\x06\x04p\'\x14\x13\x1c\xf7H\xdb_\xeb\xf2!\xd6+J\x1e_\xe9\xba\xc6H\xf2\x04\x11\xb8;\xb95\xdb\xffa\xc7\x9c\xd5\xdb\x13\xdb-\xb3#n\xfaa\xf6\xa8\xfc\x0f\x07\x84\xfaV\x00\xea\x15\xde\x02\x8f\xf4\xf8\xffY\xf8l\x0f\xbc\x0f\xb3\xf8\xba\xe6\x9e\xed*&\x14\xfc.\xe4\xcf\x0c\x14\x02\xc8\xf1\xdb\xf5\x84\xe8y\x13e\x0b\x8b\xe5\x89\x11\x05\xf9\xd9\xe9\xbe\xf5\xeb\xf2\xfa\xfd3\x19\xe7\n\xec\xf4x\xec\x00\xf4\xeb\x00a\xf2\xd4\xf3\xb0\x14\xc5\x1e\xff\x07\xb2\xe3V\xdb\xa9\xf1[\x13l*\x97\xf2`\xec@\xf8\x1d\x06\xf97\xe5\x01\x00\xc5\xea\xd4\xe4\t\xc0=\x08A\x1f\xf09\xcb\r\xf0\xbc\xff\xf2\x04\x18\x1f\xd2\r@\x08H\t\xad\xe9\xeb\xf3u\x0b\x1c\x0f;\x01\x98\x0e&\xfb\x80\xffh\xf5\xff\xff\x89!\xe3\xfd\x99\xf8\x04\xf2\x07\xe5B\x0b\x1c!\xf0\x1c\'\xf87\xbf\xd0\xe5\x06\r\xac#\x91\x1c\x10\xee\x07\xecA\xe7\x1f\xec\xa7\x12\xcd\x07\xac\xfa+\x06\xd0\xe2\xa2\xe9f#\xdd\n\xcd\xfaW\x05\xba\xcb\xb5\xdc\xb8 m# \x10\xf4\xfd\'\xe2\x08\xf4E\t\xb3\xf6\x9e\x05\x9d\x05\xc0\x02\x14\nI\x03!\x06\xa3\x00\x03\xe3\xa7\xf8)\x19Z\x16\xca\xf92\xf0\xfe\x02\x9f\tb\x06\xd2\xff.\xf8\xbe\xfb\x91\xfao\x0f\x17\x0eD\x03(\x07M\xea\xd3\x08p\xf0\xd1\xef+%\r\x01\xd4\x00\xbe\x19n\xfec\xe0\x87\xe2\xe3\xff\xa1\x1b\xb2!_\x065\xef \xf0[\xe6\xbf\xf1r\x1b#\x1b\xb8\xff\x80\xfdC\xe2\xa5\xe8\xed\x04H\x12A\x10\xa6\xf6\xb5\xf3t\xf5v\xf4\xec\x05[\x03e\t\xa8\x14\xa2\xd8\x1f\xdd\xbb\x1ct\x0fZ\x00*\x0f#\xea\x19\xe5w\xf8\xc2\x10\xa0!\xbd\x02#\xfa\x16\xed8\xe2\xdc\x03\x84$p\x06f\xff8\x04\'\xf5\x1c\xff\x82\xf1\xd5\xf7^\x14\xaf\x19\xd7\x19j\xe4\xe4\xd7\x18\xfb\x8d#\xd7\x13\x91\t\xc3\xee\x96\xdfI\x0c\xf3\x17z\x10\xa8\xf3y\xe7\xec\xf2\xa3\x1c\\\x02\x85\xfb\x9b\x17\x82\xf7H\xeb\x94\xfa\x90\xff\x00\xf6\xef\xfb\xbc"\xa6!\xe4\xe0f\xe0\x1b\xee\xc5\x03\xf5\x11.\nJ\x01\x10\xfe>\xfa\x85\xfb-\xfc\x00\xe2w\x00\xa8%\x8c\n\xe1\xe2\x84\xf4r\x0bR\n\xb1\x08\xc4\xeac\xd8\xe9\x02\xb2&)\'p\x07\xa6\xc7J\xd0\x83\x18\xc4&\r\x01:\xfe\x0c\xfay\x03\xbd\xfe"\xf6\xc8\x04\x9d\xf2\x85\x00\x0b\x1e\xfb\xf9h\xf4w\x1e3\t\xdf\xd4\x10\xe7\xf5\t\xda\x1ae,\x9c\x04Y\xe0\xce\xd7\xfa\xfdZ3(\x1a\x0b\xe1\xee\xe7\x94\x03A\x12\x0f\x10}\x00\xbe\x04\x90\xe9\xc2\xf4o\x0c|\xf9\xe4\xfb\x8c\x14\xfa\x06\x85\xfe\x01\xef(\xf2\n\xff\xe1\xfd\xdf\x15\x0c\xf9\xd1\xef\xd1\xee\xe1\n\x051\x90\xe3P\xd0\xea\x0c\x9f\x17\x9a\x05$\xf65\xef\xed\xf2E\x12\xe6\x1a\xac\xf3\x9b\xf0\xa4\xf9\x89\x0b\xdc\x08B\xdf\x81\x04%#"\xfb7\xf6@\xfd\xf7\xeel\x07\xd9\x04\xef\t\xc1\x1aL\xe8\xba\xdb2\x04\xb7\x18\x98\x16\x87\xff}\xe3%\xec\x18\t\x16\x1cz\xff4\xf4\xbe\x04\xcb\xf7!\xfaw\x0b\x1e\x02r\xff\x8a\xf7v\xff?\r\x7f\xf7\x01\xfc\xc2\r\xdf\t)\xfc"\xe6\xfb\xee\xc1\x193\x1a\x9c\xffA\xfc\xc9\xebJ\xde\x8d\x16\x8b4^\xfd\xc9\xde\xf9\xe2^\xfdU\x1e\xbe\x13-\x05\x0e\t\x9f\xdf\xc3\xc7\x0f\x0f\xe5/P\x17\xb6\x01\xee\xdb\xdb\xe8g\xfb\x1d\x02\xdb\x15:\x1c\x14\xf1!\xdd\xc5\xf6\xb9\x11\xff\x1e\xae\xefo\xe2\x1a\xf8\xbf\x12\x81\n9\xf6~\x0f\xa7\x05\xf1\xfa\x92\xdd\x95\xe4\x0c e&\xa4\x0e\t\xf0\xc8\xea\x91\xea\x8f\xf9\x90\x1aQ \x81\x04\xc9\xe6\xd9\xea\xf1\x01v\nD\x14d\x17\xf4\xee\xed\xdf\x18\xec<\r 2?\x0e\xcf\xf1\x00\xea\'\xdf\x04\xf5\xf0\x1d\\/\xee\x0b\xa6\xdf\x89\xd5L\xfe\x1c\x18k\x0b+\x0b-\xf2"\xfb\xcc\xfd \xef\xdb\x13\xe2\x03 \xf4&\xff\x98\x04\x19\xfas\xec\x98\x0e\x8c#\x01\xf0\x1b\xd4B\xff\x05\x14\xaa\nQ\x05\x7f\xf9\xa4\xfe\xb1\xec\xcf\xfa0\x13\xaa\xf4K\xfdd\x12\xb0\x12\xf2\xfa\x88\xd9\xfc\xe4\xf5\x0f\xc5*\x9a\x15\xc1\xfd\x0c\xe0g\xd9{\xfd\xa3\x1d~*\xee\xfdE\xea\xf2\xf0\x13\xec\xb1\n\x94\x1e\x92\t\x1c\xf9.\xfa\x1a\xdey\xf7\xb3"\xf4\x1c!\n3\xe6q\xdar\xf3M\x13\xf7\x1a\xf2%\xfa\x00<\xcc^\xe8\xf8\xfdU\x10\x19&\x91\r\xc4\xfd\xda\xe3\xac\xe1\xf2\xfa\x8b\t<"\xe4\x16\t\x00~\xd8\xa0\xe3\xfd\x06\x83\x17\xfd\x1b\xc0\xf1\xc2\xdb\xa2\xfb\xe5\x172\x14\xee\xfc\x7f\xf4X\xea(\xf2\x07\x07>\x04\x9e\x0b\xb3\x18\x1d\x00\xeb\xe4n\xf3#\xf3\x97\x02\xd9\x0b\xe2\r\xd8\x11Y\xf9\xbd\xf1\xe2\xf2\xf4\xf7\xfe\x08\x9a\x10\xf5\x02\xb6\x03i\xf7W\xf0\xab\xfc\x1c\to\x13\xd4\rD\xf5d\xe4+\xfb\xec\x07\x9f\x06\x82\r\x13\x08\x06\xf7\x03\xeb\xc6\xf5\x89\x19m\x1d\x7f\xfc\x99\xdd\x11\xeb\xa4\xfe\xa1\x1e\'\x1d\x8d\x07\xd9\xf6e\xde+\xef\xda\x04\xd6\x0b\xa5\x11\xc4\x12c\xf1\xab\xedN\xef\xcf\xf8\xd6\x167\x14(\xfaA\xe6\x88\xf4\xc5\x0e\xc0\x02\x10\xf9\x05\x13\xb3\xfd\xad\xe2Z\xef\x8d\t\xb8\x11\x94\x12X\n\xb3\xef\xb9\xd8e\xf2]\x1a\xfe\x13^\x04K\x01z\xf6\x8a\xed\xa5\x00\xa8\x06k\nD\x0b\x15\xf6`\xed\x98\x07J\x06+\rY\xff\x8e\xeb\x11\x0c\xd2\xfe\xf6\xf6"\x10\x9a\x06j\xefa\xfd\x8c\t\xe2\x05\xc0\x04v\xfc,\xf9^\xfaT\xfb\x8a\x108\n\x91\xf7v\x06\x87\xf9E\xf9\x98\x03b\xfb\x81\x02\xa0\x0b\x0c\x00\xea\xf2n\x01\xc3\x0f\x01\x04\xae\xe8\xc7\xea\xc8\x05>\x19\xe9\x0e\x18\x00\xa8\xea;\xe3\xdd\x04\xe2\x10\x1e\x0b\x9e\x02c\xef%\xf0\xd3\x08\xb4\n\xad\x03\x16\xf6\xb8\xfe\x14\xf4\xc5\xec\xad\x15\x10\x1a\xca\x08L\xee\xdf\xde\xc1\xf3i\n#\x17*\x1d\xb7\x00\x8e\xdb?\xee\xda\x04\x88\r$\x0f\xe7\x06`\xf3O\xf2s\xfd)\r\xb3\x19\xa3\xf9\xcf\xe24\xf2\x17\x05\xc6\x16\x17\x14(\x08\xbb\xf4\x82\xe2\xe9\xf2\xe6\x04\xc3\x13\x8f\x11\xcd\x08~\xf9\xd7\xea6\xf6\x98\x06\xc3\x0b\xc4\x0eD\xfd\xf1\xee\xc0\xfb[\x07Q\x06\xfb\x08\xa1\x06\x9c\xf2\xfb\xefu\xff\xd9\x05<\x08\x1f\x0b\xfd\x04=\xf9\xed\xe8i\xf5\xc7\x0c\xbf\x0c\x13\x05\x7f\xfb\x93\xf7<\xf6+\x00\x0c\x04\xed\xfe\x07\xfe\xfa\xf95\x00\x9c\x01\x04\xfe\x10\x06.\xfd\xeb\xf6\xa3\xf9\x94\xf6\x17\x03\xb4\x0f/\x0bn\xfc\x0b\xf1\xeb\xf1\x80\x05x\x11\xbe\x01v\xf8\x1c\xfe\x16\x02\xe5\x04\xb0\x04`\x05\xa3\xfd\x01\xf4D\xf7"\x0bF\t@\x08n\x07g\xf9\xb1\xf7\x7f\xf3+\xfc+\x08\x8b\x0e\xd5\x10\x98\xf8\x03\xf4\xcf\xfe\x1e\xf6\xf2\xff\x07\x13\xa9\x03\x1a\xf8\xc3\xf6\xac\xfe\x92\r\xb2\x02\xdf\x02&\x01G\xe86\xef\xe4\x0cE\x17B\x0b\xf5\xfc\xfc\xf0~\xf2\xe4\xfa)\x02\xbe\x0f\xb6\x02\xba\xf7\x8d\xfb*\x004\x07,\xfdx\xf2,\xfd\x8c\x04\xb8\x00\xc1\xfef\xfes\xff4\xff\xbe\xfcq\x00,\x01\xb8\x013\xfd.\xfb\r\x02\xc1\x02\xe6\x00\xbb\x00\x8f\x01"\xfe\xbb\xfd\x82\xfbG\x01\x91\x06P\x02\xec\xfc\x13\xfb\x0c\x05\x9a\x03}\x00\x10\x00\xc7\xffa\xfbB\x00\xdb\t>\x03A\xff\xf6\xffH\xff4\xfd\x11\xff\x91\x03\xc5\x06\xd7\x02\xce\xfd\xf0\xfcj\xfc\xf2\x01D\t#\x04w\xf9\x13\xf9\xe7\x03H\x04\xe9\xfb\x08\xfe\x18\x02\xbd\xff8\xfa\x97\x003\x01\xe9\xffe\x01\xf3\xfc\xb9\xfc\xd5\xf9p\xf7\'\x05o\x0e\xb4\x02\xd5\xf6\x13\xf5e\xfc`\x00R\x06\x05\t/\xff\xfb\xf7,\xf8\x82\xfe\xb9\nU\x05\xec\xfbx\xfcx\xfc2\xfc\xa2\x01\xf0\tT\x03M\xfd\x96\xf7\xbf\xfc\x9b\x04\xdf\x02\xfe\x03\xaa\x00\x06\xffr\x01\xf4\xfd\x93\xfb\x8f\x022\x04\xe9\x00\xcc\x01\x97\x02\xe9\xfc1\xfc\xce\x02\xd9\x03k\xff\xe4\xfb6\x01\xa7\x00~\x00H\x02e\xfe{\x02\xb9\xff\xc0\xf9m\x00\xf5\x02\xcf\x00\xf0\xff\xdb\xfe\x81\xfeA\x019\x01\x00\x00\x13\xfe\xc8\xfa\x8c\x00\xab\x02\xd7\xfd\xe9\x00*\x02\xad\xfe\x87\xfdU\xfc\xfe\xfd/\x01\x04\xff\xf0\xfd\xc2\x02\xaa\x00\x9e\xfda\xffy\xff\x15\xfe,\xfe6\x01\xbc\xff\xd1\xfd\x9c\x01Z\x03w\xff\xc3\x00e\xff\xf1\xfa\x08\x017\x02\xa9\xff+\x00,\xfe \xffk\x03\x00\x03\xcd\xff\xd6\xfa\xc7\xf8Q\x00\x03\x05\t\x04&\x00\xac\xfd\xaf\xfbP\xfdw\x03&\x02\xb5\xfe\xf1\xfb\x97\xfe\xd1\x03\x99\x05\x89\x01\x81\xfb\xdd\xfc\x95\xff\xb6\x00T\x03$\x02\xb6\x02[\x04\x0e\xfd\xc3\xf9\xab\xfdz\x02\xd1\x06\x82\x04F\xfd\xad\xf8\xcf\xfb\xed\x01\xdc\x06\xf1\x04\x1a\xfd&\xf7_\xf7\xe7\xff\xcc\x07\xaf\x06\x8c\xff-\xf8\x87\xf6`\xfc\xb0\x05r\x06Q\x00\x8c\xf8;\xf4\xfa\xfas\x02\xa3\x05 \x003\xf9\x1c\xf7\xb5\xf8\xd9\xff\x7f\x07\xd9\x05\xfe\xfd\xdb\xfbQ\x01B\x08\x16\x0e\xd3\r\xa6\t_\x07r\x08\xd3\x0e\xeb\x15\x01\x15\xcb\x108\x0c\xf4\n\xc1\x0e\xaf\x11r\x0f?\x07\x0b\x03$\x04\xbf\x04^\x04\xa5\xffV\xf9\xc0\xf5l\xf3\xc6\xf3\xb9\xf5\xf3\xf3\xa4\xf0\xa2\xeeV\xee\xa9\xee\xc0\xef\xaf\xf1\xaf\xf3s\xf3\xc5\xf3\x8d\xf6^\xf9h\xfd\xc2\xfe\xb6\xfd+\xfe\xf0\xfe\x8a\x02\xdb\x08\x87\n\x96\x05\xf2\x00\x87\x01\xc3\x06\xde\x08#\x07d\x04\x9a\xfd\xd5\xf9\x8c\xfd\x7f\x05v\x06\xc7\xfb~\xf1\x87\xef\xcc\xf6i\xfe\x08\xff\x06\xf9\xd6\xf0`\xf0\xec\xf4\xbd\xf9\x83\xfe\xf8\xfa\x84\xf6\x93\xf6\xa3\xfcb\x00\xc5\xfd\xab\xfd\xd0\xfdy\xfe\xc2\xff\xc8\xff\x92\x00\xfb\x02\xa6\x02z\x04\xa1\xffy\xfa\xc4\xfb=\xff\xe7\x05\x02\x06\xe0\x00\xda\xfc*\xfc]\xfcp\xff\xf4\xff\xf5\x00_\x01\x00\x00/\xff\xca\xff\xd8\x00\xc3\x00\xde\xffd\xfd\xc7\xfc(\x00\n\x046\x04K\x03U\x01u\xff\xba\x01\xf9\x02\x87\x02f\x05\x8a\x0b\xd7\x11\xc1\x14\xae\x11\xa0\r\xf5\x0e_\x14Q\x1a\x11\x1e\xf0\x1dP\x1c\x1c\x1c\x86\x1c\xca\x1b\xaf\x17\x95\x12M\x10@\x0f\x1f\rA\n\xe5\x06\xbc\x02\x80\xfe7\xf9b\xf3/\xefm\xee\x82\xeeX\xed\xe0\xeb\xd9\xea\x97\xea\xe1\xea/\xec\x07\xed\x8a\xebY\xea\t\xed\xab\xf3\xf0\xf8j\xfaK\xf8\x1e\xf6a\xf7\x1a\xfa\x06\xfdh\xfeM\xfd\x0f\xfb\x1e\xfbl\xfd\x91\xfe\x07\xfd\xfd\xf9"\xf8>\xf7\x90\xf7\xb9\xf8$\xf9\xab\xf8\n\xf7w\xf6\xbd\xf7\xd9\xf8\x01\xf9\xac\xf8\xfb\xf81\xfa\xa6\xfb\x90\xfd\xa2\xff\xb6\xff\xd7\xfe\xc7\xff\xe6\x00\xf0\x01\x17\x03\xe9\x03\x81\x04S\x045\x04\'\x05\xf2\x05\xcf\x05\xef\x04T\x04>\x05-\x06X\x06r\x06\x1c\x06L\x05\x80\x058\x06\xc1\x06_\x07\xb8\x06<\x06l\x07\x0e\x08\xd8\x07\x91\x07\xcf\x06B\x06%\x06\x1c\x06\x11\x06\x15\x06h\x04.\x03\xf3\x02\xbd\x014\x01\x7f\x00\x0b\xff\xed\xfd\x1c\xfe\xd8\xfd\xb9\xfd\xc0\xfd\xbf\xfd\x8d\xfc[\xfc\x9f\xfcF\xfd\x03\xfe\xe6\xfd\x1f\xff\x06\x00\xdf\xffV\xff\xa2\xff\xe6\xff8\xff#\xff\xd4\xff\xcf\xff\\\xff\xbb\xfeO\xfem\xfd\xbd\xfc\x7f\xfc\xb5\xfb\x8e\xfb:\xfb\xe1\xfa\r\xfb\x88\xfa-\xfa\xb1\xf9\xfc\xf8]\xf9\x89\xf9\xc6\xf9\x97\xf9\xa3\xf9>\xfaE\xfa_\xfa\x04\xfas\xfa\x05\xfb\x90\xfb!\xfc/\xfc\xd3\xfc*\xfd\xd3\xfcA\xfd4\xfe\x82\xfe]\xfe\xb2\xfe\x0b\xff9\xff\x9b\xff\xa8\xff,\x00E\x00G\x00\xda\xff\xc0\xff\xe1\xff\x0c\x00\xcb\xff\xb8\xff\xa4\xff\xf6\xfeh\xff\xcf\xff\x92\x00\xf5\xff\xb0\xff:\x01\'\x04\x8b\x07<\n\x85\x0b}\x0c4\x0f\xc0\x12\xf9\x16<\x1bj\x1f\x03 \xaf\x1e\xdd\x1e\xce!4$\xbb"\x15 \xa9\x1dk\x1bt\x18\xe4\x13\x88\x10\xd9\r\x82\t\x07\x04\\\xfe&\xfb\xc3\xf9Q\xf6\xe6\xf1\x87\xee\xb9\xec\xd3\xea\x80\xe9Q\xe9\xc3\xe9\x1d\xea\xe6\xe8\xc1\xe7\x97\xe8L\xea\x85\xeb \xec\xb3\xec\xbd\xee6\xf0F\xf0\xeb\xf0\xec\xf1`\xf3<\xf4\xd1\xf4;\xf6\xc4\xf7\xbb\xf7|\xf7^\xf9\x07\xfby\xfb&\xfb\x84\xfb\x84\xfc\x9b\xfci\xfc\xfe\xfd\xf4\xfeB\xfe\x89\xfdM\xfd\xdb\xfdF\xfe\xc2\xfd\xb5\xfd\x88\xfe\x80\xfe\xf7\xfd\xff\xfd\xe0\xfep\xffK\xff\x14\xff\x19\x00\x05\x01m\x012\x02+\x03\xd1\x03\x07\x04h\x04]\x05\xb6\x06\xbc\x07a\x08\xd8\x08W\t\xee\t\xc1\nb\x0b\xaf\x0b\xdc\x0b\x98\x0b\xb4\x0b\xd4\x0bW\x0b\xde\n\n\n\xb5\x08\xa4\x07\xc5\x06\x8b\x058\x04n\x02N\x00\x10\xffI\xfe+\xfd\x0b\xfcV\xfa\xa9\xf8)\xf8\x9d\xf7Z\xf7\xb5\xf7\xb5\xf7i\xf7\x9a\xf7>\xf8\xf3\xf8\xdd\xf9\xa4\xfa[\xfb!\xfd\x97\xfe\xd3\xff\x1c\x01j\x028\x03\xa9\x03\xab\x04\xdc\x05\xc3\x06\xb6\x06&\x06$\x06M\x06\x8b\x05.\x04q\x03\xc7\x02\x1e\x01I\xff\xfe\xfe\xdc\xfe\x97\xfd\x9b\xfb\x98\xfa0\xfal\xf9\xb6\xf8\xd2\xf8k\xf9u\xf9\xb2\xf8H\xf8&\xf9\x81\xf9y\xf9\xaf\xf9 \xfa\xd7\xfay\xfbw\xfb\xbc\xfb\xa1\xfc\x84\xfc1\xfcl\xfc\x9c\xfd4\xfe\xe6\xfd\xe7\xfd\x07\xfe\x16\xfe\xe8\xfd\x87\xfdT\xfd\xa4\xfd\x7f\xfd\x85\xfd\x03\xfd\xab\xfc\x85\xfc\xef\xfb\x07\xfc\x81\xfc\xcb\xfc\xe3\xfc$\xfd\r\xfd\x85\xfdN\xfd\xf5\xfdA\xff\x17\x00\x08\x01\xff\x01\x8c\x02\x99\x03/\x05\xe4\x05\xbc\x06\x04\t\xde\x0c\x81\x10z\x12.\x14\x83\x17L\x1aY\x1b_\x1cu\x1f\xb0#\x1b%W#\t"\x87"A!\x18\x1d\xde\x19\x90\x19\xff\x16\x99\x0f\x9b\x08\x90\x06Z\x05i\x00\x8f\xf9\x80\xf5\x88\xf3\xaf\xef|\xea\x98\xe8\x0e\xeac\xea \xe7v\xe4F\xe5&\xe7\x1f\xe7c\xe6\xcf\xe7\'\xea\xd6\xea\r\xeb\xac\xecD\xefH\xf0\xc3\xef\xf4\xf0\x8e\xf3\x02\xf5\x87\xf5k\xf6\xdf\xf7\xca\xf8O\xf9\xc8\xfaj\xfc\xa2\xfc\xf9\xfb\xa3\xfc\xef\xfd\x7f\xfeP\xfe\xcc\xfeF\xffL\xfe\xf8\xfd\'\xff\xab\xff\xcc\xfe\xcd\xfd$\xfe6\xff\x82\xfe\x8f\xfe\xff\xffS\x00g\xffE\xffn\x00\xe9\x01\xdd\x01\xfb\x01d\x03\x16\x04z\x044\x05\x81\x06\xcb\x07\x04\x08\xf2\x07\x00\t)\n\x8b\n\xb0\n\xfb\n[\x0b?\x0b\xde\n\xd2\n\xe3\nG\nH\t\x8b\x08\x0e\x08\xa1\x07z\x06[\x05d\x04H\x03\xbd\x01\x91\x00\xf3\xff\xf5\xfeY\xfd\x18\xfc\x82\xfb\xb4\xfal\xf9\xaf\xf8[\xf8\x99\xf7p\xf7e\xf7\xcc\xf6\xa7\xf65\xf7\xab\xf7K\xf8j\xf9\xef\xf9\x82\xfa\x88\xfb\x83\xfc\xe6\xfdI\xffm\x00\x89\x01\x95\x02=\x03\x9d\x03M\x04\x1a\x05\x12\x05\xeb\x04\xb4\x04\xa0\x04a\x04s\x03\x8d\x02\xf1\x01c\x01X\x006\xff\x06\xff\x8c\xfe\x8e\xfd\xd2\xfc\xc9\xfc\xb5\xfcJ\xfc\xf1\xfb\xf7\xfb"\xfcZ\xfc\x88\xfc\xfc\xfcr\xfdu\xfd9\xfd\x8f\xfd6\xfe\x82\xfe\xdb\xfe.\xff,\xff\x0f\xff\x0f\xff1\xff\x9b\xff\x08\x00w\xff"\xffW\xff?\xff\xd3\xfe\xaa\xfe+\xfe\xca\xfdD\xfd\x8c\xfc0\xfc\x02\xfcx\xfb\xc7\xfae\xfa{\xfaK\xfaM\xf96\xf9\xdf\xf9o\xfab\xfaj\xfa\xcb\xfa\xef\xfa\x0b\xfbh\xfb\xf2\xfb\x8c\xfc/\xfd\x95\xfdI\xfe\x1a\xff\x1c\x00\xb1\x00\x8c\x01\x99\x03@\x06\xee\x07\x02\nf\x0e6\x13\x9c\x15Z\x16i\x18Y\x1dL!\xf4!V"\xb2$Y&\x16$\xef \xb4 \xa5 \xff\x1b6\x15\x13\x12\x04\x11\xb6\x0c\x14\x05_\xffj\xfd3\xfa\xb3\xf3\xdf\xee?\xeeh\xed\xfd\xe8\xed\xe4)\xe5j\xe7a\xe6\xab\xe3~\xe4\xe6\xe7\xf6\xe8\x16\xe8A\xe9\xfc\xec2\xef\xd2\xee\xc0\xef\xb6\xf2\xbd\xf4\xd3\xf4C\xf5M\xf7\xbe\xf8\xe6\xf8e\xf9\xb7\xfah\xfbK\xfbw\xfb \xfc\xb3\xfc\x0e\xfdo\xfdy\xfd\x1a\xfd\x1e\xfd\xde\xfd4\xfe\x98\xfdS\xfd\xe1\xfd\xf6\xfd\x17\xfdB\xfd\xab\xfe1\xffN\xfeJ\xfe\xf2\xff\x0b\x01\xc8\x00J\x01\x18\x03?\x048\x04\xf6\x04\xff\x06{\x08u\x08\xdc\x08\x83\n\xb0\x0b\xd4\x0b\xff\x0b\x07\r\xbe\r^\r#\r\xa0\r\xd4\r\x0e\r\x01\x0c\x83\x0b[\x0bh\n\xe3\x08\xc0\x07\xcb\x06;\x05D\x03\xae\x01w\x00\xfa\xfe\xf9\xfcU\xfb8\xfa\x08\xf9\x89\xf7T\xf6\xcd\xf5[\xf5\x80\xf4%\xf4\x81\xf4\xc5\xf4\xcf\xf46\xf5\t\xf6\xe1\xf6\xd0\xf7\xa1\xf8~\xf9\x9e\xfa\xc2\xfb\xf1\xfc>\xfe\xd4\xff\xc4\x00\x84\x01\xba\x02\xa0\x03\xa7\x04\x8a\x05e\x06H\x07\x97\x07\x85\x07\x8a\x07\x03\x08\xe4\x07<\x07\xbf\x062\x06\xbf\x05\xd1\x04\xc2\x03\xd2\x02\xfc\x01\x1f\x01+\x00p\xff\t\xff4\xfej\xfd\x08\xfd\x00\xfd\xd3\xfcn\xfc<\xfcV\xfcx\xfcz\xfc\xb7\xfc-\xfdv\xfdw\xfdm\xfd\xd0\xfd_\xfer\xfe\x81\xfe\xc7\xfe\xe7\xfe\xe7\xfe\xa4\xfe\x89\xfe\xa2\xfet\xfe\xa5\xfd\xf0\xfc\xb9\xfco\xfc\xa8\xfb\xdb\xfa4\xfa\x9e\xf9\xe1\xf8(\xf8\xe5\xf7\xc3\xf7o\xf7.\xf7K\xf7\xb1\xf7\xe7\xf7\x16\xf8\x9d\xf8\x81\xf9s\xfa\x1d\xfb\xeb\xfb\xd0\xfco\xfd=\xfe,\xff\xe2\xff\xb9\x00{\x01\xeb\x01Y\x02\x94\x02\xf6\x02\xab\x03\x1e\x04x\x04"\x05\xf9\x05,\x07\x99\x08\x1c\n\xf2\x0b{\r\x0b\x0f\xda\x11S\x15\x99\x17\x81\x18\x15\x1a\xaf\x1c\x0f\x1e\xbf\x1d6\x1e\xc3\x1f\x15\x1fp\x1b\xa1\x18\xfa\x17#\x16\x04\x11\x02\x0c\x8d\t\x86\x06\x80\x00\xa0\xfaV\xf8\xb5\xf6\xec\xf1X\xec\x10\xea\x02\xea\xf7\xe7\xe9\xe4d\xe4\x1f\xe6G\xe6\xbd\xe4\x04\xe5\xbb\xe7\xdd\xe9\x02\xea\x82\xea\xfc\xec\xa4\xef\xdf\xf0k\xf1\x19\xf3u\xf5\xd6\xf6B\xf7O\xf8|\xfaO\xfcX\xfc6\xfc\xca\xfd\xf8\xff\x84\x00\xe0\xffy\x00\x08\x02)\x02\x0b\x01M\x01\t\x035\x03s\x01\xa8\x00\xe5\x01\x87\x029\x01:\x00\xfe\x00\x92\x01\x83\x00\xbf\xff\xcc\x00\xfd\x01g\x01{\x00.\x01\x89\x02\xc3\x02F\x02\x02\x03\x8a\x04\xf3\x04\xb9\x04n\x05\xd3\x06o\x07\x14\x07T\x07h\x08\xe3\x08\x8b\x08p\x08\xef\x08\x17\tl\x08\xf3\x07\xe2\x07m\x075\x06\x05\x05r\x04\xd9\x03\xa1\x02S\x01J\x001\xff\xc7\xfd\x85\xfc\xbb\xfb\r\xfb\x15\xfa\x1a\xf9\xa3\xf8T\xf8\xf1\xf7\xbe\xf7\xf0\xf7.\xf81\xf8\x86\xf8?\xf9\xfe\xf9\xac\xfar\xfbT\xfc!\xfd\xd8\xfd\xa8\xfe|\xff1\x00\xac\x00H\x01\xde\x016\x02^\x02\x91\x02\x07\x03[\x03g\x03\x87\x03\x9d\x03\xaa\x03n\x03n\x03@\x04N\x05\\\x05\xd7\x040\x05+\x06f\x06\xef\x05T\x06U\x07-\x07\xcf\x05I\x05\x06\x06\xad\x05\xb4\x03\\\x02p\x02\x06\x02"\x00\xa9\xfe\xa0\xfe0\xfex\xfc.\xfb[\xfb\x83\xfb\x91\xfa~\xf9\x95\xf9\xdf\xf9\x84\xf9\x1e\xf9W\xf9\x8f\xf9)\xf9\xc8\xf8\xfe\xf8\x81\xf9\xb1\xf9\x91\xf9\xa9\xf9\xe3\xf9:\xfa\xa4\xfa\x08\xfbl\xfb\xe0\xfbT\xfc\xb9\xfc/\xfd\xe8\xfd\x85\xfe\xc2\xfe\xf2\xfeN\xff\xc8\xff\x05\x00#\x00T\x00|\x00\x89\x00\xa1\x00\xd1\x00\xec\x00\xf4\x00\xd9\x00\xcf\x00\xff\x00#\x01%\x01&\x01\xf1\x00\xc7\x00\xbf\x00\x8f\x00i\x00N\x00(\x00\xf8\xff\xe1\xff!\x00Y\x00E\x000\x00\xc3\x00\xe7\x01*\x03\x99\x04c\x06Y\x08\x0c\nM\x0b\xea\x0cZ\x0f\xcd\x11>\x13$\x14C\x154\x16\x13\x16S\x15\xeb\x14h\x14\x81\x12\xa4\x0fF\rR\x0bl\x08\x93\x04&\x01r\xfeh\xfb\xe4\xf7\x17\xf53\xf3\'\xf1\xdb\xeeD\xed\xe0\xec\x9a\xec\x00\xec\xeb\xeb\xa3\xec~\xed-\xeeR\xef\xff\xf0p\xf2\x87\xf3\xb7\xf4R\xf6\xfd\xf7<\xf9^\xfa\x92\xfb|\xfc_\xfdY\xfe~\xffK\x00\x95\x00\xe8\x00[\x01\xb8\x01\xc9\x01\xe8\x014\x02\x1e\x02\xa9\x01L\x01/\x01\xde\x00\x08\x00G\xff\xf1\xfe\x9b\xfe\x0f\xfer\xfd\x10\xfd\xa7\xfc\x00\xfc\xb4\xfb\xe3\xfb\x10\xfc\xf5\xfb\xd4\xfb\x1f\xfc\x81\xfc\xb2\xfc\x08\xfd\xaa\xfdX\xfe\xdb\xfeT\xff\'\x00\x13\x01\xbb\x01\\\x02&\x03\x0b\x04\xec\x04\xb3\x05w\x061\x07\xb9\x07,\x08\x90\x08\xe5\x08\r\t\x03\t\xe4\x08\x9e\x08&\x08y\x07\xa3\x06\xa8\x05\xa3\x04\x9f\x03\x9b\x02}\x01T\x00+\xff\x12\xfe\x13\xfd;\xfc\x83\xfb\xe8\xfaa\xfa\x15\xfa\x04\xfa\x10\xfa:\xfa\xa0\xfa0\xfb\xce\xfbh\xfc\x01\xfd\xcb\xfd\x97\xfec\xffW\x007\x01\xea\x01^\x02\xa7\x02\t\x03t\x03\xb8\x03\xd8\x03\xdb\x03\xc9\x03\x8e\x03/\x03\xc3\x02H\x02\xa2\x01\xf8\x00f\x00\xee\xffz\xff\xe1\xfe(\xfe\x7f\xfd\xf7\xfc\xa3\xfcg\xfcC\xfcN\xfcL\xfcH\xfc\\\xfc\x9f\xfc\x13\xfd|\xfd\xdd\xfdv\xfe3\xff\xc7\xffH\x00\xe6\x00\x97\x01#\x02\x96\x02(\x03\xc0\x03*\x04a\x04\x90\x04\xd5\x04\xf8\x04\xd3\x04\xa9\x04\xa0\x04\x87\x047\x04\xf0\x03\xa3\x03/\x03\x9b\x02\x1c\x02\xbc\x01[\x01\r\x01\xa2\x00\x0f\x00\xa5\xffb\xff\x1a\xff\xb6\xfeg\xfeT\xfeh\xfeN\xfe\x10\xfe\xd2\xfd\xb3\xfd\xc1\xfd\xc9\xfd\xc6\xfd\xb6\xfd\xb5\xfd\xb1\xfd\xc5\xfd\xe2\xfd\xf2\xfd\xd6\xfd\xa9\xfd\xde\xfdU\xfe\xca\xfe\xf0\xfe\x1f\xff\x9d\xff\'\x00\x87\x00J\x01\xe6\x01\xbe\x00C\xff\x97\x01<\x07\x14\t\xbc\x03~\xfei\xffn\x03\xf0\x04\x80\x04\xed\x03\xe0\x00H\xfbJ\xf9V\xfd)\x00#\xfc\x88\xf6#\xf6\xdc\xf8 \xf9\x9e\xf6\xed\xf4\xc5\xf4\xb8\xf5\xb3\xf7\x0b\xfa\xc8\xfa\xdf\xf8*\xf7\xae\xf8\x08\xfd\x01\x01\x9c\x01\xd3\xff\xe3\xfeW\x006\x03\x01\x05X\x05\x0e\x05q\x04\x16\x04B\x04\xb0\x04\x14\x04\xe8\x01!\x00\x14\x00\xf6\x00\x7f\x00t\xfe\x0b\xfc\x9a\xfa\xc7\xf9\x10\xf9i\xf9V\xfca\x00\x0e\x01\xa1\xfd<\xfb\xe4\xfc\xd1\x01:\x08\t\x0f$\x13c\x0f^\x08\r\x08=\x10\xe0\x17M\x18\xda\x15\x90\x14\x97\x11A\x0c\x08\nk\x0cr\r\xe6\t\xed\x06\xaf\x05\x90\x01-\xf9\xed\xf3\x92\xf6\xfa\xfad\xfb\xe8\xf7X\xf3r\xee\xfd\xea\xf3\xech\xf3\xec\xf7\\\xf7j\xf4\x84\xf2j\xf2\x1e\xf4\xa4\xf8\x0c\xfe\xe0\x00\x88\x00e\xff+\xff\xc8\xff\xba\x01,\x05l\x08$\t\xce\x07\xc7\x05\x81\x03\x0e\x02\xd5\x02\xa9\x05\x05\x07\xda\x04O\x00\x10\xfc\xb9\xf9\xc7\xf9\xfe\xfbG\xfdc\xfb\x89\xf7\xb9\xf4/\xf4\x8d\xf4u\xf5\xee\xf6\x03\xf8\xdb\xf7\xe2\xf6\xa8\xf6\x85\xf7,\xf9\x84\xfb"\xfe\xcb\xff\xde\xff\x8d\xff9\x00^\x02\xb5\x04^\x06P\x07\xa4\x07y\x07\x18\x072\x07\xc2\x07\x8b\x08\xcf\x08j\x08Z\x07\xaf\x051\x04v\x03\x93\x03\xb6\x03\x0f\x03\xa8\x01\x17\x00\xe0\xfe?\xfe\x1a\xfes\xfe\xcd\xfe\xa0\xfe\xf1\xfd\\\xfdi\xfd\xc0\xfdC\xfe\x0c\xff\x06\x00\x93\x00z\x00<\x00d\x00\xdf\x00\x96\x01X\x02\xf0\x02\x02\x03]\x02l\x01\x02\x01M\x01\x02\x02O\x02\xdc\x01\xd7\x00\x94\xff\x08\xffn\xff\x0e\x00h\xff<\xfe\xab\xfe\x15\x00y\xff\xbf\xfcG\xfbB\xfd\x1a\x00\xca\x00\x1f\xff\xa9\xfc<\xfb1\xfc\t\xff\xa4\x00n\xffn\xfd\xf8\xfc\x84\xfd\xb6\xfd \xfe \xff\x92\xff-\xff\xd8\xfe\xd7\xfe\xb2\xfe\xb2\xfe\xd7\xffN\x01\xa4\x01\x16\x01\xb0\x00\xc9\x00\n\x01\x9c\x01\xac\x02\x92\x03\x89\x03\xfb\x02\x8d\x02{\x02\xf2\x02\xd7\x03\x8e\x04F\x04l\x03\xc0\x02\x9c\x02\xac\x02\xf3\x02&\x03\xb5\x02\xae\x01\xce\x00\x93\x00\x8e\x00,\x00\xbe\xffe\xff\xf9\xfei\xfe\x00\xfe\xcb\xfdz\xfdI\xfd~\xfd\xc2\xfdX\xfd\xaf\xfc\xe9\xfc\xb1\xfdv\xfes\xfe\x1a\xfe\x05\xfek\xfeb\xff*\x00;\x00\xe8\xff\x05\x00d\x00\xa5\x00\xb8\x00\x15\x01Q\x01\x01\x01\xc1\x00\xa7\x00\x86\x00*\x00\xf4\xff+\x00;\x00\xc5\xff\xfe\xfei\xfe3\xfe7\xfeZ\xfeh\xfe\x13\xfe}\xfdH\xfdq\xfd\xc8\xfd\n\xfe]\xfe\xc3\xfe\xfb\xfe\x03\xff,\xff\x90\xff#\x00\xb4\x00\x11\x01/\x01\x0e\x01%\x01s\x01\xc7\x01\x08\x02\x16\x02\x02\x02\xaa\x01U\x01K\x01T\x01Q\x01-\x01\xec\x00\x85\x00\xf8\xff\xb9\xff\xeb\xff\x0f\x00\xe5\xff\xa2\xff}\xffc\xff5\xffK\xff\x85\xff\xbc\xff\xcc\xff\xbf\xff\xb3\xff\x9a\xff\xa0\xff\xca\xff\x0f\x003\x00$\x00\xf6\xff\xc6\xff\xba\xff\xc2\xff\xcb\xff\xb3\xff\x97\xffz\xffP\xff.\xff\x15\xff\x13\xff&\xff%\xff:\xff;\xffL\xff_\xff\x97\xff\xe6\xff\x1c\x004\x00F\x00o\x00\xcc\x00\x17\x01)\x01\x19\x01\x0f\x01\x19\x014\x01U\x01r\x01J\x01\xec\x00\xb5\x00\xbd\x00\xcf\x00\x9d\x00U\x00\x01\x00\xb7\xffy\xffh\xffk\xff<\xff\xec\xfe\xaf\xfe\x9a\xfe\x9b\xfe\xab\xfe\xaf\xfe\xc9\xfe\xd2\xfe\xf5\xfe-\xff\\\xff\x83\xff\xbf\xff,\x00\x90\x00\xcb\x00\xe8\x00\r\x01I\x01\x8a\x01\xba\x01\xc8\x01\xa5\x01m\x01j\x01q\x01E\x01\xe2\x00|\x005\x00\xe0\xff\xac\xff\xa5\xffh\xff\xf2\xfeK\xfe\x0f\xfe&\xfe?\xfe\x1a\xfe\x01\xfe5\xfe\xb0\xfen\xfe!\xfeN\xfe&\xfe3\xfe\xc3\xff@\x04\x12\x05\xf6\xff\x86\xfbt\xfe\xae\x05\xb9\x07\xd4\x05\xa6\x03\x88\x01\xb8\xfe\x17\x00\x82\x06\x12\t\xb5\x03$\xfe\x16\xff\xa1\x01\xcc\x00W\xff>\x00S\x00\xd5\xfd\xc6\xfc\x04\xfe\xd4\xfdh\xfb\x17\xfb*\xfe\xbc\xff\x87\xfd\xe9\xfa%\xfb\xf2\xfc0\xfeN\xff&\x00\x8b\xff\x84\xfd/\xfdh\xff\xba\x019\x02\x84\x01\x14\x01\xb1\x00H\x00\xf1\x00I\x02\xbf\x02\xa0\x01\x86\x00\x89\x00o\x00\xbd\xff)\xff\'\xff>\xff\xc5\xfe+\xfeQ\xfds\xfc\x0e\xfc\x94\xfc`\xfdC\xfdz\xfc\x8e\xfb\x07\xfbP\xfb\\\xfcP\xfd,\xfd?\xfc\xbc\xfb\x0c\xfc\x93\xfc\xc4\xfc\x1c\xfd\xa6\xfe\xa4\x00w\x01O\x00W\xff\xc6\x00\xb2\x04\x83\x08\xcc\t\xe9\x08\x9a\x075\x08\xca\n\xd0\r[\x0f\x9a\x0e\xf4\x0c\xa2\x0b0\x0b*\x0b\xa8\n\x97\t\x06\x08\xf5\x05n\x03\x98\x00d\xfe\\\xfd\xdf\xfc\xfe\xfb\xee\xf9:\xf7F\xf5\xda\xf4\xbd\xf5\xe9\xf6y\xf7$\xf7g\xf6j\xf6\xf6\xf7g\xfav\xfc\xb8\xfd_\xfe\xdf\xfe*\xff\xfb\xff\xa0\x01/\x03\xc1\x031\x03m\x02\xba\x01)\x01\x0c\x01\xfe\x00\xa0\x00p\xff\xd6\xfde\xfcU\xfb\xf1\xfa\xfd\xfa\xe9\xfaO\xfas\xf9\xcb\xf8\xac\xf8\r\xf9\x1e\xfa6\xfb\xc6\xfb\xd9\xfb\xe5\xfb\xa3\xfc\xff\xfd\xa8\xff\x0c\x01\x94\x01\x99\x01\xde\x01\xbf\x02\xb0\x03]\x04\xdf\x044\x05\x08\x05\xae\x04\x84\x04\xab\x04\xc9\x04\xa5\x04\x85\x04\x17\x04t\x03\xe1\x02\x88\x02p\x02_\x024\x02\xc8\x01\x1a\x01\x93\x00l\x00\x81\x00\x85\x00]\x00\r\x00\xb1\xffq\xff[\xffO\xff\x13\xff\xec\xfe\xf7\xfe\xe9\xfe\x99\xfe=\xfe\x10\xfe\x0e\xfe\x1a\xfe6\xfeM\xfe=\xfe\x1e\xfe-\xfe\x80\xfe\xe2\xfe\x15\xff6\xffm\xff\xcb\xff"\x00]\x00|\x00\xa7\x00\xce\x00\xf6\x00\x1d\x015\x01/\x01\x0b\x01\xe4\x00\xda\x00\xdc\x00\xac\x00\\\x00\x1a\x00\x02\x00\xea\xff\xc5\xff\x93\xffi\xff0\xff$\xff9\xff^\xff`\xff5\xffB\xff}\xff\xad\xff\xcf\xff\x02\x00.\x00\\\x00\x80\x00\xca\x00\xfb\x00\xf3\x00\xf1\x00\x14\x01c\x01s\x01a\x01S\x01>\x011\x01)\x013\x01#\x01\xd3\x00\x8f\x00\x99\x00\xa1\x00{\x00)\x00\x07\x00\xff\xff\xd6\xff\xb5\xff\xbb\xff\xab\xff\x89\xff6\xff\x0c\xff1\xffW\xff\x05\xff\xdb\xfe`\xff\x11\x00\x97\xff\x04\xff\xb3\xffT\xff<\xfe\x9e\xff\xd0\x04\xd9\x05B\xff\x9e\xfa\xaf\xfe\x81\x05\xe8\x05\x80\x03O\x02\x08\x00c\xfco\xfe\xcf\x05,\x074\x00J\xfb-\xfeM\x01\x0c\x00\xd8\xfe\xea\xffo\xff\x03\xfdU\xfd\x84\xff\xfc\xfeR\xfc\xa6\xfc\x87\xff9\x00\xef\xfd_\xfc \xfd%\xfe\xfb\xfe\x07\x00H\x00\xa3\xfe\xfd\xfc\xcd\xfd\xdb\xff\xb0\x00\\\x00/\x00\xc5\xff\xdb\xfe\xc6\xfeB\x00B\x01\xd4\x00\xec\xff\xdc\xff\n\x00\xd2\xff\xde\xffQ\x00P\x00\xf4\xff\xef\xff(\x00\xc4\xff\xea\xfe\xb7\xfeq\xff=\x00\x0f\x00J\xff\x8d\xfe\'\xfe\xab\xfe\xf0\xff\xab\x00\xdd\xffx\xfeG\xfer\xff.\x00\x0f\x00\x83\xff\x07\xff\xb8\xfe\xf9\xfe\xb3\xff\xb1\xff|\xfe\x9e\xfd\x12\xfe\xdb\xfe#\xff\x15\xff\x86\xff\x89\xff\x18\xff\x02\x00W\x02\xdc\x04X\x05\xf9\x04M\x054\x06C\x08\xab\n\x8f\x0c\xf6\x0bs\t\x8b\x08\xf5\t\x8f\x0b\xfb\n\x94\x08\x00\x06\x1f\x04\xc7\x02\x02\x02\xa3\x00:\xfe\x91\xfb\xfc\xf9|\xf9V\xf8_\xf6\xf5\xf4\xef\xf4\xcf\xf5`\xf6p\xf6T\xf6n\xf6w\xf7\xe7\xf9z\xfc\xcf\xfd\xdf\xfd-\xfe\xac\xff\x9f\x014\x03\x08\x04\x01\x04V\x03\xc7\x02\x17\x03~\x03\xd8\x02\x10\x01\xae\xff\x16\xffD\xfe\xbd\xfc\\\xfb\x8c\xfa\x92\xf9\x9c\xf8[\xf8\x96\xf8&\xf8P\xf7\x88\xf7\xa5\xf8\xc2\xf9\x8e\xfa\x9c\xfb\xa2\xfcc\xfd~\xfeX\x00\x14\x02\xd5\x02g\x03\x8d\x04\xae\x055\x06i\x06\xf3\x067\x07\x14\x07#\x07\\\x07\xf4\x06\n\x06V\x05\x14\x05\x98\x04\x03\x04\x94\x03\x00\x03\x03\x02\x11\x01\x9f\x00\x8d\x00g\x00\t\x00\x92\xff\x08\xff\xc0\xfe\xdf\xfe+\xffn\xffP\xff\x03\xff\xd5\xfe\x04\xffW\xffk\xffy\xff\x92\xff|\xffK\xff\x16\xff\x02\xff\xe2\xfe\xc5\xfe\xc1\xfe\xbf\xfe\x90\xfe2\xfe\xcf\xfd\xab\xfd\xc4\xfd\xdc\xfd\xc9\xfd\xf5\xfd\x82\xfe\x98\xfe\xe0\xfd\x92\xfd\xae\xfe\x14\x00p\x00n\x00\x96\x00L\x00\x01\x00\x8d\x01\xdc\x03\xe3\x03\x03\x02\xbd\x01e\x03\xb7\x03\x89\x02\xaa\x02\x03\x04\xb3\x03\xec\x01R\x01\xc2\x010\x01$\x00\x93\x00w\x01\x83\x00U\xfe\xc6\xfd\xe9\xfec\xff\xbb\xfe\x8c\xfe\x03\xff\xb8\xfe\xdc\xfd-\xfea\xff\xa7\xff\xf8\xfe \xff\xc5\xff\x95\xff\xf0\xfeG\xff#\x00\x00\x00i\xff\x98\xff\xbe\xff\x02\xff\x85\xfe7\xff\x92\xff\x8c\xfe\xd6\xfd\x7f\xfe\xa5\xfe\x97\xfd\xf7\xfc\xaf\xfd\xe4\xfdW\xfd~\xfd\xd6\xfd#\xfdY\xfc8\xfdc\xfe\xe9\xfd\x1c\xfdf\xfd\xd0\xfdU\xfd\x9c\xfd^\xfeO\xfeN\xfdl\xfdU\xfeY\xfem\xfd\xe5\xfcT\xfdY\xfd\xf1\xfc\x90\xfcc\xfc\x07\xfc\xcb\xfb\xcb\xfb@\xfcn\xfc*\xfdB\xfe\xdf\xffj\x02\x0f\x04\x13\x05\xb4\x053\t\xa1\x0e\xaf\x12,\x14\xd3\x13K\x14n\x16\xe4\x19\x86\x1c\xe8\x1b\'\x19\xaf\x161\x15\x01\x14\x12\x12\xed\x0ec\n\xc7\x05\x9b\x02\x8a\xffd\xfb\xb6\xf6<\xf3\r\xf1\xf9\xee\x01\xed\xd7\xea\xec\xe8\x1b\xe8\x01\xe9\x9a\xea\xb6\xeb\xbb\xec,\xee\x02\xf0{\xf2\xd1\xf5\x0b\xf9\x07\xfb\xdb\xfc\xc2\xff\xff\x02_\x04H\x04M\x05\x8f\x07\xd0\x08\x12\x08,\x07\xaf\x060\x05\x04\x03{\x02\x1c\x03V\x01\xeb\xfc&\xfa\x8e\xfa\xa0\xfa\x81\xf8\xa3\xf6\x84\xf6\x1b\xf6\x85\xf4\xc7\xf4"\xf7\xe1\xf7\x84\xf6R\xf6\xd7\xf9\xba\xfc\x9a\xfcF\xfd\xeb\xff\x1d\x02\xf6\x01L\x03G\x07\xb0\x08\x14\x08\xb3\x08y\n\x11\n\xcc\x08/\x0b\xc8\x0c\x15\n\x13\x07D\x07\xf1\x07\xf1\x05Q\x043\x04\x01\x028\xff\x9e\xfe?\xff\xb3\xfd\x0f\xfbQ\xfax\xfa\xa7\xf94\xf9\x80\xf9!\xf9\xd6\xf7\xab\xf7C\xf9e\xfa\x0c\xfa\xba\xf9w\xfaW\xfb\xb1\xfbT\xfc\x95\xfd<\xfe\xbf\xfd\xee\xfd3\xff\xfc\xff)\xffo\xfe\xea\xfeb\xffB\xff\xe5\xfe.\xfe\x0e\xfd6\xfc\xa5\xfc-\xfd\xae\xfc\x96\xfbG\xfa?\xf9I\xf9w\xfbN\xfc9\xfa\xe9\xf7C\xf8\x13\xfa3\xfa\x04\xfb\x08\xfc\xd4\xfb`\xfa\x14\xfc\x1b\x01\xfd\x02\xbc\x01*\x02-\x07\xd4\x0b\x9a\x0e\xeb\x12<\x17\xab\x17u\x15\xf6\x18\xc7"\xa3(\xe5%\xb6 \x82 \xf5"\xa6"f \xf3\x1d\x1f\x1a\xd6\x13\xfa\x0e\xfb\x0c.\ta\x01\x9d\xf9\x85\xf6\xd0\xf5\x89\xf23\xed\xd8\xe7\xac\xe4\xcc\xe3\xa6\xe4\t\xe6 \xe6\xd1\xe5a\xe6\xfa\xe7\x8d\xeb\xde\xef\xe7\xf2\xc2\xf3Q\xf6\xad\xfb\xa0\xff\xd9\xff\xf0\xff=\x03#\x05\xf4\x04&\x06\xa8\x08\x1b\x07\x1c\x01\xb8\xff\x9e\x02\xe4\x01\x08\xfc\xa9\xf9\xee\xfb\xb0\xfa\xb4\xf4M\xf2\x9f\xf4\x8e\xf4\xb3\xf1\xae\xf2\xac\xf6\xc6\xf6\xa2\xf3^\xf4\n\xf9\xb9\xfb\xa8\xfc\x16\xff\xe9\x02\x8f\x03\x04\x03\xa5\x040\x08\xc7\t\xb0\t\x02\x0b\xa7\x0c\xee\x0c\xf2\nS\t\xdd\x08\xba\x08\xa2\x08\x91\x08\xb1\x07!\x05u\x01o\xff2\xff\xf4\xfe\xe0\xfd0\xfd\x8b\xfc&\xfb\xe8\xf8\t\xf87\xf8a\xf8\xd5\xf8Z\xfa8\xfbn\xfa!\xf9Z\xf9\x8d\xfa\xa0\xfb\x1d\xfd(\xfeM\xfeU\xfd\x1c\xfdR\xfd{\xfd\xe6\xfdu\xfe\x8b\xfeE\xfd\xdc\xfb\xa6\xfa>\xfat\xfa\x87\xfa\x9e\xf9\xd9\xf7X\xf6\xe6\xf5\x8f\xf5\x81\xf6\x03\xf7\x1c\xf6\\\xf4>\xf5\x0f\xf9B\xfa\x99\xf8\x93\xf7\xd9\xfa8\x00\x06\x04\x82\x06\x07\x07q\x06\x0c\x08&\x0e\xda\x16\xb5\x1b\x07\x1c\xe0\x1cm \'$\x11%\x8b&i)\x89+\x85*5(y&\n#J\x1d\x96\x17\xc1\x14\xb6\x12N\x0e(\x07\xd7\xff\xc6\xf9\xb8\xf4\xea\xf0\xbb\xedP\xeb\xe3\xe8\xda\xe6`\xe5^\xe4\xfc\xe30\xe4\xf0\xe5T\xe9\xc2\xec\x11\xefS\xf05\xf2\xa1\xf4K\xf7k\xfay\xfd=\xff\x0b\xff*\xff\xad\x00\xc5\x01t\x00\x9c\xfd\xaa\xfc\x84\xfd4\xfde\xfa\x00\xf7\x1b\xf5\xcb\xf3_\xf2\xa1\xf1\x8d\xf1~\xf0\xe9\xee/\xefP\xf1\x18\xf3\x06\xf3\x10\xf4\xa5\xf6O\xf9\xcc\xfb\xe2\xfe\x91\x02\xe4\x03\x80\x04\x94\x06E\n\x99\x0c\xee\x0cc\r\xe6\rr\x0ey\x0e\x8c\x0e\xf8\x0c\xbe\nw\t\xac\t\xe4\x08B\x06\xd1\x03\x1f\x02\x05\x01\xd8\xff\xe5\xfe\xff\xfdC\xfc\xf0\xfa\xc4\xfaO\xfb\xfb\xfb\xc1\xfbd\xfb\x03\xfbF\xfbd\xfct\xfd-\xfe\x14\xfe\xbc\xfd\x12\xfd\r\xfd;\xfd-\xfd\xcd\xfb\x12\xfa\xcd\xf8d\xf8Q\xf8\xf0\xf6n\xf4\xaa\xf1\x98\xf0<\xf2\xa6\xf3\xf0\xf2T\xf0\xab\xee\\\xf0\xa9\xf3O\xf6\xb9\xf6<\xf59\xf5f\xf8g\xfdK\x00\xce\xff\xe1\xfe\x0c\x01\xdd\x05\x8f\tm\nB\n)\x0b%\x0e\x9f\x11Y\x14\x7f\x15\xa5\x15|\x16G\x18\xe5\x1a\xf7\x1cF\x1e\xba\x1e%\x1e\x1d\x1eP\x1f\xf7 \x07!\xa9\x1eH\x1c\x9e\x1b\x84\x1b\x13\x1a\xf6\x16\xa2\x13\x8c\x10|\r\xb3\n\x81\x08\xd3\x05\xa0\x01\x0c\xfd\x1c\xfa\xd9\xf8\xea\xf6\x9a\xf3\x01\xf1\xaf\xefJ\xeea\xec@\xebn\xeb\xc7\xea_\xe9*\xe9g\xea\x93\xebA\xeb\x1e\xeb\xd5\xeb\xe2\xec\xfe\xed\xe0\xee\xe7\xef\x9b\xf0\xc7\xf0P\xf1`\xf2\xbe\xf3Z\xf4\xf9\xf3,\xf4@\xf5\x8f\xf6\xf8\xf6\xc8\xf6\x80\xf7\x95\xf8\x8c\xf9)\xfa!\xfb\xb0\xfcR\xfd\xb6\xfd\xd3\xfe\x87\x00\xe7\x01b\x029\x03\x7f\x04\x80\x05\x1c\x06\x0c\x07H\x08\x80\x08"\x08|\x08\xd4\t\\\n\x85\t\xb6\x08\x9b\x08\xb4\x08\x06\x08\xc5\x07\xbe\x07A\x07\x0b\x06$\x05\x06\x05\xed\x04{\x04\xb7\x03\xf7\x02@\x02\xd8\x01\xb2\x01;\x01+\x00\x1e\xffZ\xfe\xe0\xfda\xfd\xb3\xfc\xaf\xfb=\xfa\xdf\xf8!\xf8\xa1\xf7\xd4\xf6\xa8\xf5f\xf4m\xf3\xb9\xf2E\xf2\xf7\xf1\xa9\xf1(\xf1\xa5\xf0\xa6\xf0 \xf1\x91\xf1\xd8\xf1J\xf2\x05\xf3\xe7\xf3\xee\xf4\x1b\xf6A\xf7L\xf8U\xf9\xb1\xfau\xfc4\xfeq\xffi\x00\xd2\x01\xb5\x03y\x05\xae\x06\x04\x08\xcc\t\x18\x0b\xef\x0b2\r\xc1\x0eb\x10\x8f\x11\x96\x12\xe3\x13\xcd\x14\x9b\x15b\x16\x92\x17\xa4\x187\x19\x1e\x1a1\x1b/\x1c^\x1c\x90\x1cu\x1d\xd8\x1d\x97\x1d$\x1d\x1d\x1d\xc0\x1c\xf1\x1a\xe2\x182\x17o\x15\xed\x12j\x0fC\x0c\xf9\x08\x19\x05I\x01\xf0\xfd\xca\xfa\x0e\xf7\r\xf3\xde\xef\x1f\xed\x97\xeap\xe8\xaf\xe6\xfc\xe4A\xe3.\xe2\x0e\xe2\x1f\xe2\x06\xe2>\xe2\xcc\xe2\x90\xe3~\xe4\xc7\xe5?\xe7\x97\xe8\xb2\xe9\x05\xeb\xb6\xec\x81\xee \xf0b\xf1\xb9\xf2B\xf4\xdb\xf55\xf7q\xf8\xb6\xf9\xef\xfa\xfe\xfb\x12\xfd\x85\xfe\x16\x00=\x01+\x02\x87\x03\xfa\x04\xec\x05j\x066\x07\x8d\x08\x04\n\xf5\n9\x0b*\x0b\xdb\n\x98\n\xba\n\x03\x0b\xdb\n\x17\n%\tI\x08~\x07\xc8\x065\x06d\x05:\x04e\x03\x1b\x03\xce\x02\x19\x02C\x01\xca\x00d\x00\xda\xff\x9d\xff\x9f\xffd\xff\xa6\xfe\x00\xfe\xe8\xfd\xce\xfd;\xfdO\xfcs\xfb\xbd\xfa\xeb\xf9/\xf9`\xf8E\xf7\xf0\xf5\xa8\xf4\xa6\xf3\x03\xf3\x7f\xf2\xf2\xf1`\xf1\x06\xf1%\xf1\x8a\xf1\x05\xf2Q\xf2\xf0\xf2\xe0\xf3\t\xf5U\xf6\xa7\xf7\xf7\xf82\xfaS\xfb\xa2\xfc.\xfe\xc5\xff)\x01j\x02\xd4\x03(\x05\x84\x06\xb2\x07\xf6\x08\x87\n+\x0c\xbe\r\xe1\x0e\'\x10\x18\x11\xb8\x11\xc9\x12\n\x14]\x15\x16\x16\x0b\x16\x14\x16U\x16\xa8\x16j\x17`\x18\xdf\x18\xe4\x18\x11\x19\xbb\x19!\x1a~\x1a:\x1b\xce\x1b\xed\x1a\x1d\x19\x89\x18l\x18>\x17\x8b\x14\xf0\x11\xe3\x0f\xad\x0c~\x08\x1c\x05\x90\x02d\xff\x99\xfa\x8d\xf6?\xf4\xd7\xf1x\xee\'\xebL\xe9\x1d\xe8\xfd\xe5\x06\xe4s\xe3\xa5\xe3=\xe3=\xe2b\xe2\xbf\xe3o\xe4F\xe4\xd0\xe4\xca\xe6\x0c\xe9\xec\xe9\xba\xea\xcf\xec&\xef\xab\xf0\xb2\xf1\xaa\xf3U\xf6\x11\xf8\xae\xf8\xfa\xf9d\xfcs\xfe5\xff\xbc\xffK\x01\xe0\x02-\x03H\x03G\x04\xc8\x05.\x06\xa9\x05\r\x06\xf9\x06\x1a\x07\xff\x05\x93\x05\x83\x06\xd5\x06\t\x06F\x05\x97\x05\xb3\x05q\x04J\x03B\x03\xa5\x03\xff\x02\xe4\x01\xd3\x01$\x02\x92\x01P\x00\xb1\xff\xe6\xff\xa1\xff\xa2\xfe=\xfe\xa0\xfe\x84\xfe\x8c\xfd\xca\xfc\xbf\xfc\x9d\xfc\xb9\xfb\x10\xfb5\xfb?\xfb\x82\xfa\x9a\xf9D\xf9\xfd\xf8;\xf8=\xf7\xc0\xf6\x98\xf6\xfd\xf5N\xf5\'\xf5!\xf5\xd6\xf4\x80\xf4\xab\xf4R\xf5\xe4\xf5|\xf6\x96\xf7\xc2\xf8\xb4\xf9\xb5\xfa>\xfc\xd4\xfd\xfd\xfe*\x00\x96\x01\x17\x03\x1b\x04\xe7\x04b\x06\xb8\x07Z\x08\x06\t\xf2\tL\x0b\xea\x0b\x18\x0c9\ro\x0e\x0b\x0f;\x0f\x8d\x0f\xba\x10\x1d\x11t\x10\xf7\x10`\x11-\x11i\x10\xd3\x0fo\x10"\x10h\x0e\xce\r@\x0e8\x0ea\r1\r\xbb\x0e-\x0f\xf8\r\x86\r$\x0f\xa6\x10\xbb\x0f\xbe\x0eK\x0f\xeb\x0f\xbc\x0e2\x0c\x00\x0ca\x0c\x1b\nC\x06\x98\x03C\x03\x96\x01\xfd\xfc\xb3\xf9\xc1\xf8D\xf7"\xf3b\xef#\xef\xe1\xee\xac\xebx\xe8\xf7\xe8q\xea\x8b\xe8\xe4\xe5\x07\xe7\xc6\xe9\xab\xe9a\xe8\xe7\xe9?\xed\x18\xee\x13\xed/\xef\xf3\xf2j\xf46\xf4\xa1\xf5\xcb\xf8\x1c\xfa\xd7\xf9\x82\xfb\x16\xfe\xb1\xfe\xc3\xfd\x83\xfe\xc5\x00\x98\x01\xf2\x00\x0c\x01Q\x02\xa2\x02<\x02i\x02V\x03\x9e\x03\xee\x02\xdf\x02\x90\x03\xdc\x03_\x03\xc3\x02\xe2\x02\x14\x03\xd1\x02\x07\x02\xba\x01\x9f\x01?\x01x\x00\xd1\xff\xf7\xff\xcf\xff\x1e\xff<\xfe\x07\xfe\xe8\xfdd\xfd\xc3\xfc\xd9\xfc\x07\xfdw\xfc\xb5\xfb\x84\xfb\'\xfcA\xfc\x8d\xfb\x8f\xfb\xea\xfb3\xfcd\xfc\x8a\xfcL\xfd\xa4\xfd#\xfd\x03\xfd\xa4\xfd\x9d\xfe\xbc\xfe\xe9\xfd\xf2\xfb\xdd\xfa\xb3\xfc\x7f\x00\xd4\x03\x9a\x00\x93\xf9\x16\xf6v\xf8O\xfe\xff\x01\t\x01\xc4\xfe\xac\xfd*\xfd\x11\xfd6\xfd{\xfe\x10\x01\xb4\x02 \x03O\x05\x7f\x07{\x08q\x07!\x07<\t3\n\xe5\n#\x0c!\x0f\x03\x12\x16\x11\xf7\x0f\xbf\x0e\x82\x0c\x81\x0b&\x0b\xd5\x0c\xed\r\xfd\x0b\xa8\n\x14\n\x08\t\x13\x06i\x029\x00\xc3\xff\xaa\x00=\x02\xb4\x03\x94\x04\xc0\x02\xf8\xffP\xfeV\x00(\x05\xca\x08>\n\t\x0b\x05\x0eE\x11\xdb\x11#\x0f\xa3\x0cl\r\xf0\x11T\x16B\x18\x9d\x14\x1e\r\xc2\x07\xde\x06\x1e\t|\x07\x8c\x01\x8b\xfc[\xfbn\xfc\x1d\xfaD\xf3\xe9\xea\x0c\xe5\'\xe5\xdb\xe8~\xec\x02\xebP\xe5\xd9\xe1\x95\xe3\xf7\xe7e\xe8X\xe5l\xe6\xf5\xed$\xf6|\xf9\x07\xf7\xd1\xf4\x9f\xf4\x15\xf7O\xfbI\xff0\x02\x03\x03r\x04\xe2\x04G\x03s\xff\xb0\xfc:\xfeb\x01\xa4\x03_\x03V\x01\x8d\xfep\xfa\x93\xf7\x8a\xf6\xea\xf7Y\xf9\xcc\xf9\x18\xfal\xf9\xbb\xf8x\xf6\xf8\xf4\xed\xf3*\xf5z\xf8;\xfc\xe2\xfec\xfe!\xfc\x07\xfa\x98\xf9\n\xfbm\xfc\x12\xfe\xb7\xffL\x01\xeb\x01C\x00\xd7\xfdr\xfb\x84\xfb\xfc\xfc\x19\x00q\x02\x00\x03z\x02\xef\x00\xe1\xff\xa4\xfe\x80\xfe9\x00\xa3\x02\xdb\x044\x05u\x05\xc2\x04^\x02\xb7\xff\xd5\xfe\x98\x01\xe1\x04A\x06\xea\x04\xd2\x02\x04\x01\\\x00\xc6\x02\x07\x03\x86\x01\xb3\xfe\x1a\xff\xbb\x04\xcd\x061\x03\x80\xfe\xbf\xfc\xac\x00\x14\x03p\x02\xcb\x011\x01\x1d\x04\xc5\x06c\x08\x07\x07\xec\x03>\x04\\\x08\xb4\x0cB\x0e\xc2\nR\x083\t\xeb\n\xe2\r\xbe\x0b\xf6\x08m\x07v\x07\\\t\x14\t\xe9\x06b\x04W\x02\x9a\x02\xe4\x03\x89\x04\xd9\x04\r\x03\xa9\x00\xb0\xfe\x1d\xfd\x8d\xfc\x1f\xfd$\x02!\n\x1f\x0f\x16\x0f\xe6\x08\x05\x05\xc5\x05\x01\r\xa5\x15 \x19\xe9\x17\xcf\x13\xc4\x14s\x14\xda\x10\xe4\x08\x99\x02\xfb\x04V\n\xd1\r\x9f\n\xc3\x00\x02\xf7\x0c\xf0\x8b\xef\xd8\xf1\xd1\xf2\xbd\xf3}\xf2>\xf1d\xec\x18\xe71\xe5%\xe6\x1a\xebB\xef\x83\xf2\xf4\xf3\xc9\xf2\xb8\xf2\xeb\xf1\xfb\xf0\x8c\xf1\xff\xf4\xb6\xfc\xe3\x02\xec\x04\xc7\x00$\xfa\xf9\xf6P\xf8\xda\xfd\xa4\x01\xda\x02\xbd\x01(\xfe\xad\xfb%\xf9\x07\xf9u\xf8\xdf\xf6\xff\xf6\x00\xf9O\xfcD\xfcl\xf8\x01\xf3m\xf1\n\xf4\xdf\xf9(\xfe_\xfd*\xfb\x84\xf8p\xfa\x02\xfd\xc1\xfdb\xfd\x14\xfd"\x00$\x03b\x03$\x00\xda\xfb\x1d\xf9\x1c\xfa\xb6\xfd\xb4\x005\x01,\xfe\x11\xfb\xad\xf9k\xfa}\xfbu\xfb+\xfb\xa8\xfd\x91\x01\x9e\x04\x90\x03F\xfeh\xfbj\xfcK\x00\xa2\x03\x98\x02\\\x01\x0c\x02\x16\x04\xc2\x05?\x02\xe2\xfc\xbf\xfee\x03=\to\n\xc5\x07/\x08Y\x06k\x02\xe7\x01\xe6\x01o\x07\x08\n\x8c\x07\xbc\x06\x1d\x03\xc3\x01\x00\x01\xbf\x00\xe5\x033\x07\xa4\nP\x0et\x0b4\x07\xc2\x019\x02g\x08\xb4\x0c\x97\x11\x90\x0f\xbc\x0c\xc0\x08S\x06\xf5\x06\x1e\x05H\x01}\x02\xad\x06=\x0c\xfb\x0cl\x07O\x05"\x01\x10\xff\x9d\xff!\x01\xf0\x05\xb7\x08\x84\t\xd9\t\xe0\x04%\xff\x11\xfb$\xfb\xa3\x01\x15\x08\xdb\x0c\xa8\n\x00\x03~\xfa\x9a\xf7\xb1\xfb\xf5\x00-\x03\x9c\x01\x88\x00\x8d\xffS\xfd\xce\xfa\xa6\xf80\xfb\x90\xfd\x93\x01\xfe\x01\x95\xff\x01\xfd*\xf7\t\xf6>\xf4\x8c\xf7\xa3\xfa\xdc\xf9m\xf8\xc4\xf3\x92\xf3\xcd\xf4\x91\xf7\xb8\xf9y\xf9\xcc\xfa)\xfb\xb0\xfdF\xfe\xeb\xfe(\x00q\xffC\x00X\xfc\x86\xf9\xaa\xf8\x06\xf9\x88\xfbo\xfb2\xf90\xf8^\xf6>\xf6\x89\xf5q\xf4c\xf8\xec\xfc\xca\x01\x84\x00\'\xfbC\xf8k\xf9\xe6\xfdr\x00-\x00%\x00\xdc\x00\xd9\x02\x06\x03!\x00v\xfd\xb6\xfc\xbc\x01\xa6\x08\xf4\t\xea\x04i\xfe8\xfde\x00\x04\x02\xd1\x02G\x01\xbb\x00\xe3\xff\xf4\xfe\x9a\xfc\xec\xf9\xe6\xf8y\xf9u\xfbl\xfa\xc8\xfa@\xfb,\xfc\x90\xfcf\xf9\xe7\xf9\xf4\xfc\xd7\x01\x04\x06L\x05\x0f\x05\xb9\x04x\x05\x84\x07\x93\x08z\x08\xd9\x03\xf2\x01\xf3\x02O\x07\xef\x08m\x06w\x04u\x01>\x02\xe2\x02>\x03\x1e\x03o\xff\xe9\xfd\xd8\xfef\x02#\x07-\x077\x03\x9d\xfd5\xf9\xe0\xf9\x9e\xff\xf0\x05\xc9\t\x9c\x08\xb8\x04\t\x03:\x02\xe8\x03\xe6\x024\x01E\x02\x89\x06\x81\x0b\xb7\t\xed\x03:\xfcr\xf8\xff\xfa\xf7\x00\xed\x06\xc8\x06\x1c\x04\x1a\x00]\xfc\x85\xf9\x9f\xf8\xd9\xfb\x88\x00\x8f\x02\x06\x01\xa4\xfd\xb0\xfbt\xfa\xe9\xf9\xca\xfa,\xfc\xca\xfe\xd3\x01c\x05\x1a\x067\x04j\x01\xad\x00\xd9\x01\xbb\x01\xe9\x01V\x01\x16\x02;\x01\xcb\xfe3\xfd\x8d\xfcR\xfcE\xfc\xe3\xfc\'\xff\xba\x01\xdb\x03}\x05\x1c\x04\xc9\x00\x91\xfc\xeb\xfb\x06\xff\xd2\x03\x19\x07`\x06s\x02\x93\xfd\x90\xfav\xfa\x12\xfd\xf2\xffW\x03\xf7\x06\xe5\n6\x0cI\t\xee\x01=\xfbV\xf9F\xff\xec\x06}\x08\xfc\x02}\xf9\xd1\xf4\x90\xf4\x90\xf7)\xfb\x05\xfc\x80\xfd \xff\xb0\x001\x01\x1c\xfc\xd6\xf6\xd4\xf2\xe1\xf4\xaa\xfb\xba\x00\x82\x03\xa8\x00\xb8\xfb\xcf\xf5\x81\xf3$\xf7K\xfc\xd5\xfe\xb4\xfe\x80\xfe\xa2\xffQ\x01\x17\x00G\xfe\x8b\xfbU\xfb\x04\xff\xc5\x01\xe6\x02\xae\xff\x07\xfd\xff\xfd\x9e\x01\xdc\x04y\x03\xb5\xff\xb0\xfd\xcb\xfdy\xff\x8f\x01\xd7\x02\xad\x01\xa7\xff\x91\xff\x15\x005\x00"\xff\xdc\xff\x80\x00,\x02\x81\x03\x9b\x04\x08\x06\x0f\x04W\x02\x06\x00\xa7\x00j\x03\x90\x04\x99\x04K\x02k\x00\x9b\x00\x7f\x00@\x01\xbc\x00\x17\x00\xf1\x01\xe7\x02b\x03\xc0\x00\xc5\xfd\xc6\xfb?\xfb\xe7\xff\xb9\x04\xc0\x04O\xfe\x1d\xf7i\xf6D\xfcp\x01\x17\x02\xc9\xfe\xcf\xfc\xa0\xfe\xc5\xffZ\xfe"\xfa7\xf7 \xf8\xe5\xfc/\x02\xf4\x04\xad\x06\xbc\x05T\x02\x01\xfd\xb0\xf8D\xfb%\x03i\x0b\xf1\x0c\xc8\x06\x8a\xfe\xce\xf7\xf7\xf4\xfa\xf5v\xfa\xeb\xfd\xa8\xff\x1e\x03*\x05r\x04\x01\x01l\xfdY\xfe\\\x00\xc5\x04\x00\nR\x0c#\x0b\x82\x05\xab\xfe\xf4\xf9J\xfa\xb7\xfdw\x01u\x03Z\x03\x8b\x00_\xfdn\xfdF\x00\xd2\x04p\x08\x83\x06\x10\x02\xdd\xff\x18\x00\xaa\xffa\xfd\x93\xfd\x86\x00\x84\x05\xf3\x07`\x07\x11\x033\xfe\xfa\xfb\x98\xf8\xae\xfa\xfb\xff\x0e\x06q\t\xfe\x04\x05\xfe\xe9\xf6?\xf6\x12\xfc\xbb\x00\xa1\x01j\xff\x92\xff\x1c\x02\xae\x03\x99\x01\x97\xfeM\xfc\xbe\xfb\x11\xfc\xc1\xfe0\x03\x1c\x03\x08\xff\xdc\xf9\xb1\xf8z\xfcq\x00\x7f\x02\xf5\x01\xf0\xff\xf7\xfe\r\xfez\xffr\x01\x8c\x01\xba\x00\xb1\xfe\xca\xff\x81\x00\x1b\xff\x9c\xfbP\xf7\x92\xf8\xa1\xfc\x85\x01\xf2\x04\xd8\x04\xe2\x02\xbe\xfek\xfe6\xff\xcc\xff?\x01L\x04\x88\x07\xed\x04e\x01\x01\xff\x9d\xfeQ\xffj\x01\xe8\x03\x12\x06\xba\x06\x87\x04o\x00\xfc\xfc\xb5\xfc\x1c\xff\xb3\x03\xa5\x057\x01\x87\xf9\x1e\xf7\x99\xfa\x15\xff_\x01\\\x01y\x003\xfdv\xfbQ\xfc\x81\xfe\x87\xff\xfd\xfe`\x01\xdb\x04\xa4\x03k\xff\x1a\xfd\xe9\xfd\t\xfe\xfd\xfc\xb6\xfd\x04\x02\xf4\x04\xf4\x01\x01\xfb\xea\xf3\x1e\xf3\xa3\xf6\x90\xfdP\x04\xe0\x03\xbb\x00\n\xfeb\xfd\xb3\xfe\xdc\xfd\xef\xfd\x93\xfe\xb1\x01 \x06K\x064\x02o\xfa\xfa\xf5\x11\xf5h\xf8^\xfe\xee\x04\x12\t\xb5\x07\xee\x05\xff\x04\x97\x02\xca\xfdB\xfd\x1c\x016\x06U\nq\t\xef\x03\x82\xf9\xdb\xf0\xa8\xf1B\xfc\x89\x08\x9b\x0c`\x08\xed\x00\xf9\xf8\x06\xf6D\xfa\xce\x01\xf0\x06{\t\x86\x0c2\n\xaf\x01\xfa\xf8\x93\xf5{\xf7\x04\xfd\xeb\x04:\x0b\xb5\x0c\x93\x07\xed\xfd\xf3\xf3\xf1\xf1\xc8\xf9\xb8\x02J\ne\x0c\x9f\x07N\xff\x1b\xf7&\xf4\xf7\xf4\xd8\xf9\x8c\x01\x9f\x08o\ro\x0b\x01\x04)\xfa\xb2\xf1l\xf1M\xf7\xc8\x00 \tC\n1\x07*\x02v\xfc.\xf9\xb2\xf6\xa9\xf7B\xfdh\x03\xc1\x08\x82\x06\x12\xff\xf5\xf8\xa6\xf6V\xf9\x0c\xfey\x04k\x086\x05h\xff\x99\xf99\xf8\x99\xfd\xe1\x04\xb4\t\xf0\x08\xc1\x04\x00\x00 \xfc\x7f\xfb\x82\xfc:\xfd\xb3\xfdW\x00\x04\x05j\x08O\x08u\x04\x88\xff\xab\xfa\xe9\xf9\xd5\xfd_\x04\x82\t\xec\x08\r\x07\xdf\x03\xcb\xff\x0b\xfc\xa1\xf7\xcb\xf7\x17\xfb\xc2\xfd\xd3\x01\x16\x06:\x08%\x06n\x00\x95\xfa\xd1\xf4\xee\xf4\xfb\xfa\x17\xff\xac\x01"\x03L\x04\x80\x03n\xfe\xaf\xfa\xa9\xf8\xfe\xf86\xfc\x92\xfe\x16\x03:\x05\x9c\x04\x9b\x02}\xfd\xda\xf9\xfa\xf5\xc0\xf5"\xfd\xfe\x03U\x07>\x03\x9d\xfc\x08\xfb9\xfc4\xfe\x9d\xfe\xb9\xfei\x00\x95\x02\x16\x05\x10\x07[\x03`\xfd\xc4\xfaS\xfc\xa0\xfe\x90\x01\x9b\x05F\x08\xbb\x06\x82\x02j\xffS\xfc\x86\xfa\x0c\xfc\x9f\xff\xa1\x03\xc5\x06\xce\x06\xcf\x04\xba\x00 \xfb*\xf6(\xf5\x81\xfa\xa4\x02\x83\x08\x97\n1\x07>\x01c\xfdn\xfd\xa0\xfd{\xfdA\x01V\x05:\x08\xeb\x07\xd1\x05$\x01@\xfc\x12\xf9M\xf8\x05\xfd\x9a\x04\xa6\x08\xa0\x06\xde\x01\x00\xff1\xfd\x03\xfbq\xf9\x96\xfa\x97\xffe\x03\xad\x05\xec\x04U\x00\xfd\xfa\x8f\xf6\xde\xf6\x96\xfaD\x01\xc7\t\xe4\x0ct\x08v\x013\xfc\x84\xf8I\xf6\x08\xf8\x8b\xfd\xe2\x05\x96\x0c<\r\xf3\x07\xaa\xfe\xb8\xf3E\xec\xf2\xed\xc3\xf5\x88\x01\xaa\x0c\xa7\x10b\x0e\xa8\x06\x8b\xfd\xda\xf6X\xf1\xfd\xf2\x93\xfa\xb7\x041\x0e\x96\x11\xe0\x0c\xf0\xff\xd2\xf3s\xed\x98\xf2\x04\xff\xf5\x06U\tz\x07\xa4\x07\xec\x07\x0c\x05\x16\xfe\x98\xf6\xeb\xf4T\xf9\xcf\x00\xb5\x05\xfa\x06\x9f\x03\x03\xfc\x00\xf7\xe8\xfa\xd1\x01\xfd\x04{\x04j\x02o\xff\t\xfe|\x01\xb8\x04=\x02\x11\xfd\x87\xfb\xc6\xfa\xd2\xfb\x1a\x00F\x03\xa6\xff\xa2\xfa9\xfaA\xfd\x05\x02\x9c\x04i\x04\xfd\x00T\xfe\xe8\xfd\xe9\xfcR\xfdc\xfe\xc4\x00\t\x02q\xfem\xfa<\xf7\x07\xf6\xc6\xf9\x84\xff\x8e\x05\xc7\x07\xc8\x06h\x02\x8c\xf9\x13\xf5\x18\xf6\x08\xfb\x82\x02\xb2\t\xc4\x0f\x1c\x0f\xaa\x07L\xff\xf8\xf6\x1f\xf3\xcc\xf74\xff\xae\x08\xee\x0cj\x0b\x11\x05\x82\xf9\xb9\xf0I\xee>\xf8c\x07=\x11\xcb\x13N\x0f\xcb\x07\xf8\xff\xa9\xf8>\xf4\xe1\xf5/\xfd\xb8\x04G\x08\xe8\x07\xc3\x02\xf3\xf7\xfe\xec\x92\xecj\xf6\xae\x03b\x0e\x03\x14\x9e\x11\xa8\x07T\xfb\xf3\xf1\xdd\xf2\xe7\xfbc\x04\'\n\xd6\n<\x08\x01\x02\xe2\xf6+\xf1\xf0\xf2\xf9\xf6\x8d\xfd-\x07\xbe\x0f\xde\x0f;\x08\x8b\xfc\xed\xf0x\xeeR\xf3|\xf8\xa3\xfe:\x05\x99\x0b\xd6\x0b\xee\x03t\xfd\xf6\xf8\xd7\xf5e\xf6\xd7\xf9\x1a\x03Y\x0b\x8a\r\xaf\tv\xffl\xf8\x84\xf4@\xf3A\xfb\xdc\x03Q\t\xd6\x07V\x03\x00\x02\x97\x01;\x00\x96\xfc\xad\xfa6\xfc \xff\xb4\x019\x04\xec\x02C\xff\xaf\xfd\x00\xfe\x89\xfe]\xff\xaa\x00f\x01\xa2\xff\xa1\x00\xca\x04\x84\x06\x81\x03\xa2\xfe^\xfe*\xff`\x00\xfe\x00x\xff\x16\xfeF\xfb\xea\xf8\x9c\xf7\x1d\xf8\xd2\xfe\xfb\x05\x94\n\xc6\t9\x05V\x01\x8b\xfdE\xfa\xc1\xf9\xe6\xfd\xe9\x01\xf1\x04\x1f\x07\xb3\x07\xda\x04\xd5\xff\x0f\xfb\xfa\xf6\xce\xf8\xd7\xfe\xc3\x03\xe5\x06\xb2\x08\xc2\x08\xd8\x03g\xfc\x98\xf8\x02\xf8\x87\xfaG\xff`\x04\x7f\x07n\x05\xa8\x00\xf1\xfc\xb7\xfaT\xfa\x07\xfb\x91\xfd\xfe\x01\x8b\x06\xfd\x07|\x05\xef\xff\x07\xfb\xad\xf9$\xfb\xd6\xff\x80\x05\xcd\ta\n.\x07\x1b\x01N\xfb*\xf8\xca\xf7%\xfb\xf2\xff\xbb\x03^\x05!\x03\xa5\xfe\xd0\xfb\xfb\xf8h\xf8i\xf9\xcb\xfc\xf3\x01\xc5\x05\x8e\x08^\x08\xa1\x05\x02\x00u\xfb\x06\xfb\xff\xfbU\xfd;\xfe-\x01\xad\x04!\x06\xcc\x02\xd4\xfb\x97\xf9\xfd\xfd\xf1\x00\xb4\x00\x99\x012\x03n\x00A\xfdM\x00\x9a\x02\xa5\x01\x81\x00T\xff\xa1\xfb\x19\xf8\xba\xf8Y\xfb\x03\xfe\r\x01g\x04\xf6\x02\x7f\x00h\xff\xa2\xffF\xfe\xfe\xfcH\xfe\xdb\xfeD\xff\xff\xff\xef\x01b\x02\x80\x00\xbd\xfe\t\xff\x95\x00\x95\x00\t\x01\x02\x03\x1e\x027\xff\xd0\xfez\xff\xaf\xfe\xa4\xfc\xff\xfb@\xfd\xe0\xff\xae\x02t\x02\x17\xffQ\xfb*\xf9\xaf\xfa\xd8\xfd|\x01d\x03\xc5\x04\xc3\x04,\x01\x0b\xfd\x80\xfb\xa8\xfbb\xfd\xf7\xffg\x03D\x05\x80\x03\x1f\x01\xbc\xfe\xdd\xff\x90\x03<\x04\x87\x01\x81\xfe>\xfe\x99\x01\x80\x03&\x02\xe9\x00+\xff6\xfc<\xfa#\xfb\x83\xfb;\xfbM\xffH\x06\\\x06\xf7\x00\xb0\xfd\xd8\xfd\x84\xffJ\xff\x86\xfd\xb1\xfe\xb5\x04-\nH\ta\x02\x00\xfb!\xf8\x9d\xf8M\xfd\xaa\x04Q\x08\x8e\x07\x17\x04"\x01\x9c\xfdj\xfa\xdb\xfb+\xff\xab\x01e\x03\x93\x02\xa9\x01\r\x02\xe3\xff\xe7\xfd\xfe\xfcD\xfc\xbd\xff\x94\x04\xa9\x07\x82\x07\x01\x03\x9d\xfet\xfbY\xfa\xb0\xfe\xf9\x01\x92\x01\xf5\x00f\x00\x05\x02\t\x00\xbe\xfd\xaa\xfc\xfc\xf9\x17\xfb\xbd\xffP\x04x\x08\xa8\x07/\x02\xcd\xfb\xb6\xf8\xc7\xfc\xf5\x02\x11\x07,\x07g\x04\x81\x03\x0f\x04\xb8\x02_\xfe\xf0\xf8\xbe\xf7\xc9\xf9\xd9\xfe\x9a\x04}\x05C\x03\xa4\xfd\x9a\xf9\'\xf7\xe7\xf5\xb5\xf8\xa9\xfb\xaf\xffW\x03%\x04\xc8\x03\xc0\x02u\x028\x01\x03\x00\xb0\x01\xe1\x02\x9a\x02\xc8\x03+\x04%\x02\xfb\xffp\xff\xf9\xff\x95\xff\xba\xff\xc6\x00\x90\xff\xbb\xfe\x94\xfd\xd5\xfc\x19\xfe:\xff\x91\xfe\xf3\xfb\xb1\xfc#\xff\x87\xff%\xff\x84\xfe\x1d\xfd/\xfd\xe4\xfe\x87\x00\n\x01\xe7\x00\x1b\x019\xff6\xfc\x8a\xfb\xd4\xfa\x82\xf9\x87\xfb\xb0\xfc%\xfe\\\xff\x9d\xff\x1d\xff\x18\xfc!\xfb\xe8\xfa\x11\xfc\xfc\xfei\x02\xdf\x04I\x04\xd8\x02\xee\x00Q\xffu\xfe^\xfc\xd0\xf9\xfb\xf9\xea\xfeW\x04\xbe\x05\xb1\x04\x97\x00\xc8\xfa\n\xf9\x15\xfd\xea\x02\xf9\x05\xe0\x06S\x06-\x06\x93\x05\xdf\x01W\xfe\x89\xf9\xd9\xf6\x13\xf9\xfa\xfb\xbf\x01?\t\x01\x0c\x03\x06\xe3\xfc\x99\xf94\xf9.\xfa\xb5\x00P\t\\\x0e\x95\x0f\x82\re\x07\xe6\xfd\xff\xf5\xcb\xf3\x18\xf7\x86\x00\x13\nu\r\xa0\x0ce\x06>\xff\xd4\xfa\xe2\xf8C\xfb\xfb\xfc.\x00\xe5\x04B\x08\xd9\t\xc8\x04\x12\xfc1\xf8\x06\xf7\x85\xf7E\xfc\xaa\x020\x075\x04"\x01\x9d\x01\xce\xfer\xfb\x02\xf8g\xf5l\xf8\x1b\xfb\xab\xfc\xc6\xfcH\x02\xcd\r\x13\x12K\x11D\x0f\xa2\x10\x06\x13\x19\x13=\x15B\x17G\x18\xe8\x16y\x13a\x10P\x0b\xb8\x04\xf4\xfd\x1e\xf9\xe0\xf7\xfb\xf8\xcb\xfa,\xfa\x13\xf8\xbf\xf5\x12\xf5\xb8\xf5\xde\xf5\xb3\xf4$\xf3\x97\xf4|\xf5\x1a\xf4\x10\xf6e\xf8^\xf9\xea\xf7\xa6\xf6p\xf7W\xf8\x98\xfaH\xfcl\xfd\x08\xfe~\xff\xed\xffL\xfe\xf0\xfd_\xfc\\\xf8\r\xf5k\xf4o\xf4\x18\xf4\x1c\xf4C\xf4R\xf3\'\xf2\x05\xf2\xae\xf1\xde\xf1z\xf3M\xf5\x05\xf6\x11\xf7\x8b\xf8K\xf8\xeb\xf6\xd8\xf5\xa5\xf5\xd3\xf59\xf7$\xf9\xee\xf9n\xfa\x10\xfb\x97\xfbj\xfbf\xfb\x0e\xfc\xec\xfc\x9b\xfd@\xfda\xfd\xc0\xfd?\xff\x05\x01\xf8\x01\x9b\x01\x19\x02p\x05\x82\x08\x82\x08@\x07\xcb\x06\xab\x06\xb0\x08\xf7\x0b\x07\r \x0b\x97\nz\x0b\xaa\tr\x03\xbc\x00\xce\x04\x1d\x08\xfa\x08\xdd\nI\x0e\xee\x10\xa9\x0f\x1a\x0b$\x08\x9c\t\x86\x0bz\n\x91\tc\x0bV\r&\x0c\x89\te\x05&\x02\xee\x00?\x00\xac\x00\xc9\xfd.\xf9\xd5\xf9(\x06\xb4\x1d\x9e3\x97:\x994\xe4,i(\xc3#e f!\xc4$A%\xd6!G\x1f\x7f\x1aE\x11r\x01\xe3\xef;\xe3\xc9\xdb\x1b\xdb\x1e\xde@\xe2h\xe4N\xe3\x12\xe2\x96\xe1\xe0\xdf\x10\xdc\x83\xd8~\xd8\x1c\xdd\x10\xe5r\xf0\x00\xfd\x16\x03^\x00\x96\xfb\xd3\xf8\xf4\xf8\xbe\xf88\xfa\xed\xfd\xf2\xff\x11\x02W\x03f\x03R\x01\x14\xfbJ\xf4\x87\xee\xd8\xeb\xbf\xeb\x0c\xebg\xecR\xf0\x0e\xf4Y\xf4\xf0\xf3\xf0\xf4u\xf4e\xf3\xa0\xf4\xad\xfae\x01\xa5\x07\xeb\x0e\xec\x12\xc3\x12\xfe\x12\xf7\x13\xae\x14\xa9\x11c\x0e\xa4\r\xdb\ng\x08\xf6\x05&\x02r\xfd\xf8\xf7\xeb\xf41\xf4\xc3\xf3\x12\xf4\x16\xf3A\xf1\x04\xf1\xf1\xf1\xa7\xf3\xfc\xf4\x82\xf5Q\xf6Y\xf6k\xf6\x14\xf8C\xf9\xd5\xf9R\xf9h\xf9`\xfb\x11\xfd.\xfe\x06\xfed\xfc\xe0\xfa\xce\xf9\xe1\xf8\xef\xf8\xe8\xf8c\xf8\xa7\xf5\x1d\xf1\xf3\xedn\xed\x97\xedO\xed*\xeeA\xf1,\xf5\x07\xf9T\xfc5\xffb\x00\xe9\xff\x82\x01&\x05\x7f\t\xee\x0e\xd8\x11\xfd\x13\x07\x14\xf0\x11\xa6\x0c>\tE\x14\xca.DJ9WJW\xb6S@P\xbbI\xb5A?=\xad;X8\x982\x19.<+\xe7")\x12\xe1\xfaM\xe5\xd0\xd6&\xce\xda\xcbU\xcdC\xd2)\xd8"\xdb\xc5\xdb\xb3\xdb\x8d\xdb+\xda\xda\xd7\x12\xda1\xe4R\xf3\xe5\x01\xa2\x0b6\x10]\x0fz\x0c\xc0\x07\x9b\x03r\x00\x88\xfeL\xfd\xa1\xfb\x03\xfc:\xfcE\xf8\xed\xf0\xd3\xe7\x90\xdfA\xd8\xa4\xd3\xee\xd3X\xd5\xe3\xd7\x8a\xdb>\xe0d\xe4\x15\xe7\x08\xea\xef\xec\xc2\xef\xa6\xf3\xb4\xf9%\x01b\t\\\x11\x05\x17\xf0\x19B\x1c\xaf\x1d\xcd\x1d\xf1\x1b\xb2\x19\xdb\x17g\x16\x96\x17x\x1a\xde\x1a\x0f\x18^\x12\x13\x0b\x97\x02(\xfbQ\xf7Z\xf5\x9c\xf3i\xf3X\xf59\xf8\xeb\xfa\x91\xfc\xdc\xfd\x05\xfe\xc9\xfd\xff\xfe\x00\x02\xd6\x05\x07\t)\x0b\xd2\x0bi\n=\x06h\x00\xf2\xf8\xdc\xf0\x10\xea(\xe5\x83\xe2R\xe1\xf7\xe0\xd8\xe0\xaa\xdfH\xdda\xda\x02\xd8V\xd7\x1a\xd9\x0c\xdd\xfd\xe1-\xe7p\xeb\x8b\xee=\xf1\x85\xf34\xf6\xe9\xf9\x98\xfd\xb7\xffD\x02\x9a\x06\xda\x0c\xb2\x13\xac\x18\xed\x1cF\x1f\xe0\x1fO >#\xa2)\xd30A8\xc5A\xdbK&R\xdaPLJeD\x8b=\xc04H*\xce"\xbc\x1f\xac\x1b\xa3\x14\x12\x0b\xd0\x02\xbb\xfb\xb3\xf2\xdd\xe8\xaa\xe0(\xdc\xe6\xdbB\xdd!\xe0\xe2\xe45\xebn\xf0~\xf2\x91\xf3\x88\xf6g\xfa\x0f\xfc\xf2\xfc2\xff\xd0\x02\xb1\x05\xa9\x07\xc4\x08\xe8\x07\x97\x04\xfc\xff\xeb\xf9\xe7\xf3W\xef\x91\xeb\xd4\xe6I\xe1=\xde\xdc\xdca\xdbj\xd9X\xd8\x83\xd7\x0b\xd6\x83\xd6\t\xdaU\xdf\x87\xe5\xb5\xeb\x07\xf1\xfd\xf5\xf4\xfb\x0b\x03\x05\x083\n\xf2\x0b^\x0e#\x10\xcc\x11\xc7\x13\xd3\x14\x9f\x13\x19\x11\xf7\x0e\x0f\r\xb2\x0b\x00\x0b\xe4\x08-\x068\x04\xd3\x04\xf1\x05\xf3\x05\xb5\x05p\x05]\x05\x92\x05\xe1\x06\xc6\x08x\n\xed\n\x8e\np\n9\x0b\xf9\x0c \x0e\xa4\r\xd6\x0b\xd7\t\xff\x07\xdd\x05-\x03\xc8\xff~\xfb\xaf\xf6G\xf2$\xef\x9e\xec\xce\xe9;\xe6\xae\xe2\x05\xe0\xf9\xde0\xdf\x80\xdfG\xe0\xc7\xe1\xed\xe3Y\xe6T\xe9>\xed\x12\xf1\xb5\xf2+\xf37\xf4\xbf\xf6\xb4\xf8\r\xf8w\xf6\x16\xf6\x9b\xf6\xf8\xf5Y\xf4\xb3\xf3\xdf\xf3\xab\xf3;\xf26\xf1\xb1\xf2\x16\xf6\xce\xf9\xd4\xfd\xe8\x02\xc6\n\x9a\x13\x07\x1d9*\xe7=\xaeR;]\xf5\\\x1a[(_\xd8aJ[\xc6O\xdbH\xc1E\xc5=@0\xed#\x18\x1bT\x0f\n\xfcN\xe7\xeb\xda6\xd6\x9a\xd1!\xca\xd1\xc6\xcb\xca\xf3\xd04\xd4#\xd79\xddZ\xe3\xb0\xe6A\xe9\xfd\xee\xf7\xf7\xa9\x01\xac\x08\x11\r\x9c\x10\x8d\x14\\\x16\x14\x14s\x0f?\n\x97\x02\x92\xf9\xcb\xf2@\xee6\xe9\xc2\xe2\xb1\xdd\xae\xd9\xd7\xd4\x07\xd1\xcf\xcf\x15\xcf\x05\xce\x00\xce\x01\xd1\xf9\xd6\n\xdf]\xe8\x91\xf0k\xf7^\xfe\xae\x05\xb9\x0b}\x10\xf7\x14/\x19\xf8\x1aU\x1b&\x1d7 \xfa!\x94 z\x1c\xa6\x18\xe8\x14\xe3\x11<\r\x82\x08]\x04Z\x01\x1e\xff\xd6\xfcH\xfd`\xffb\x00t\xffY\xff\x04\x03\x11\x07\xaf\x08g\t\x80\n\xf9\x0b\x08\x0cN\x0cu\r\xbd\r\xdf\x0cZ\n\xea\x07\xba\x05\xf9\x03\xfc\x00\xa5\xfbX\xf6*\xf2\x1b\xef\x18\xec\xbe\xe96\xe8~\xe6\xba\xe4.\xe3\xa8\xe2c\xe2\xfc\xe1\xf3\xe1&\xe2\x8b\xe3\x9c\xe6\xbf\xeak\xee\xed\xf0\x04\xf3\xc5\xf4\x95\xf5\x8c\xf5\xae\xf5\xe8\xf5\xbf\xf4\x9a\xf2\xd2\xf0*\xf1\xbf\xf2i\xf4\r\xf5\xf7\xf4\x05\xf5y\xf6u\xf9K\xfc#\xff\xc5\x02q\x06@\n\xba\x104\x1b\x91\'\xf93\x04C#U\xa0a5c{`\xafa\x15d\xcc]GQ\x85G\xedB\xb5;\xa1.B"Q\x18 \x0c+\xfa&\xe7\xad\xd9\xcd\xd1\xd6\xc9\xb3\xc0\xb5\xba\x9f\xbc\x86\xc3j\xc9\x11\xcdl\xd2\xe0\xd9\x89\xdfZ\xe3\xf0\xe8\xe4\xf1\x10\xfap\xff\t\x04y\n\xf1\x11)\x17\\\x17\x01\x14\x97\x0f\x96\n\xc3\x02\x0f\xf9l\xf1B\xec\xa6\xe6\x93\xdf\xe3\xda\xe3\xd9v\xd9\x14\xd7\x0f\xd4"\xd2\xc0\xd1\x11\xd3\x01\xd6\x8e\xda\xd7\xe0\xb9\xe8\xd0\xf0\x1d\xf9l\x02\xd7\x0b\x07\x13a\x17\x03\x1a\x9d\x1c\xa4\x1ej \xc0!N"\xd2!\xb8 \xaf\x1e\x90\x1c,\x1a#\x16K\x0fA\x07@\x01\x92\xfdV\xfa>\xf8=\xf8\x17\xf9\xe3\xf8\x1e\xf9<\xfc\xa8\xff\xca\xff_\xfd5\xfd\xf0\xff\x9a\x02\xf9\x04\x14\x08\xd7\x0b\xe3\r\xac\x0eR\x0f-\x0f\x0f\ru\x08\xae\x02\xeb\xfc\xcf\xf8\x92\xf5\'\xf2O\xee\xc4\xeav\xe8e\xe65\xe4!\xe2a\xe0\xd2\xde9\xdde\xddz\xe0$\xe5\xc6\xe9\xb3\xed\xf6\xf1\x03\xf7\x85\xfb\x04\xff#\x01\'\x02\x93\x02\x9b\x02\xca\x021\x03(\x04\x95\x04r\x03:\x01\xfe\xfe\xf3\xfc[\xfa\xe4\xf6\xa4\xf3\xb4\xf1%\xf1\xed\xf1\xad\xf3\xf4\xf6\xae\xfa\xb7\xfd?\x00\x99\x02<\x06\xa6\x0b\xa3\x13\x86 \x9f1\xf2AJL\x13R>YKaxc\xe6\\\xdfS\xd7M\xe0H\x1b@\xea5\xcf-\xab%_\x19\x8e\t\xd4\xfb\x90\xf0\x99\xe4\x87\xd5\xc4\xc6\xa6\xbc\xd4\xb7\xd9\xb5\x98\xb5a\xb7=\xbc\xb7\xc2;\xc9"\xd0\t\xd8\xf6\xdf\xbc\xe5Y\xea\x85\xf07\xf9\x1b\x03\xeb\x0b\xb8\x12\xbd\x17\xa9\x1b\x9a\x1e&\x1f\xeb\x1cd\x17\xe7\x0f\xda\x06\xc9\xfd^\xf6o\xf0k\xebg\xe66\xe2N\xdf\xa9\xdd\xee\xdc\xab\xdc\xf6\xdb\x88\xdb\x8e\xdcg\xdfN\xe4\xfe\xea\xdf\xf2\xf4\xfa\xb6\x02\xa3\n\xb8\x12\xbc\x19$\x1f\xab!a"v!S +\x1f\xb2\x1d\xca\x1a-\x17\xf5\x12\x8d\x0fx\x0c\x9c\x08v\x04\xee\xfe:\xfa\xd1\xf5B\xf2o\xf0A\xf0n\xf1s\xf2t\xf3 \xf6\x97\xfa\xca\xfe\x96\x01\xac\x02\xc7\x03\x1e\x05E\x06v\x067\x06\x17\x06\x03\x06H\x05\x9f\x04\xce\x04b\x04i\x02\x92\xfe\x11\xfb[\xf8\x95\xf5\xd0\xf2\x19\xf0\x14\xeeC\xed\xb2\xed$\xef\xef\xf0\xb1\xf2t\xf4\xf6\xf5"\xf7N\xf8\x04\xfa\xb6\xfb\xc4\xfc\x7f\xfd\xe8\xfe\x99\x013\x04_\x05[\x05v\x04\x0b\x03\x8b\x00\xce\xfd\xaf\xfb\x04\xfa\x83\xf8\x84\xf6\x19\xf56\xf5[\xf6`\xf7S\xf7\xaf\xf6\x8d\xf6\xf4\xf6\xd4\xf7\x08\xf9\xb2\xfa`\xfc\x88\xfdP\xff:\x02\x9e\x05\xb0\tO\x10\x03\x1a\x7f#_*%1\xdf9\xc7A\xd5C\xecA\x8b@J@\xe2<\xa35\x85.\x19)}"\x08\x19\xe2\x0f\xa8\x08g\x01E\xf7#\xecU\xe3\xc6\xdcu\xd6\xec\xcf\x1f\xcb\xa9\xc9~\xca/\xcc\xe4\xceT\xd46\xdb\x10\xe14\xe6\x14\xec\xcc\xf2\x9e\xf8_\xfdI\x02\xfe\x06\xc2\n\x9b\x0e\xb2\x12}\x15\xc8\x15;\x15\\\x14\n\x12\xad\rr\x08D\x03W\xfd\xad\xf6\x08\xf1\x1d\xed!\xea7\xe7@\xe4\x9a\xe2b\xe2\r\xe3\xbb\xe3\x83\xe4e\xe5\x9e\xe6\xe0\xe8\x11\xec\r\xf0!\xf4@\xf8d\xfc\x94\x00\x88\x04x\x08\xb3\x0b\xab\r\xe0\r\xf7\r\xf8\r*\x0e\xec\r\xf4\x0c\x8d\x0c\xa1\x0b\x06\x0bR\n\x92\t%\t?\x08\x11\x07V\x05\xfd\x03\xb1\x03\xc5\x03\x0c\x04\x1c\x04\x18\x04X\x04\xb6\x04t\x05\xa8\x05\x7f\x05\xeb\x04\x11\x04\x13\x03%\x02\xe0\x01\x85\x01\xe1\x00\xfd\xff@\xff\xf7\xfe\x8f\xfe\x0b\xfe>\xfdQ\xfc\xa4\xfbE\xfbM\xfb(\xfb\x1d\xfbp\xfb\xd7\xfb\'\xfc4\xfcU\xfcC\xfc\xd5\xfb2\xfb\xd3\xfa\xf8\xfa$\xfb\xf4\xfa\xc2\xfa\xb1\xfa\xda\xfa\xb8\xfaI\xfa\xd6\xf9X\xf9\xc2\xf8\xcf\xf7\xd6\xf67\xf6\xd6\xf5\x81\xf5\x00\xf5\xa9\xf4\xc7\xf4\x00\xf5\xc8\xf4I\xf4\xfa\xf3\x05\xf4\xf4\xf3-\xf4K\xf5z\xf7\x18\xfa\x86\xfc\xe2\xfe9\x01\x9a\x03\xfb\x05N\x08.\x0b\xc5\x0fR\x16k\x1dV#\x16()-\x952\xdb6X8G8\xe97\x037\x0e4\xfb/\x8c,\xe7)\xea%\xc1\x1f\x18\x19x\x13m\x0e\xe4\x07\xa9\x00\xea\xf9\x02\xf4\x1c\xee\x1c\xe8/\xe3\x16\xe0\x08\xde/\xdc\xda\xda\xee\xdaj\xdc)\xde\xaf\xdfD\xe1\xa9\xe3G\xe6\xd3\xe8c\xeb\xe4\xedE\xf0\x9b\xf2\xf0\xf4\x17\xf7\xb3\xf8%\xfae\xfb\xdd\xfb\x95\xfb\n\xfb\xb5\xfa\x07\xfa\x8f\xf8\x07\xf7\xcc\xf5\xed\xf4A\xf4\xae\xf3\x96\xf3\xdf\xf3F\xf4\xe7\xf4\xdf\xf5\x0b\xf7;\xf8\x7f\xf9\xd8\xfaM\xfc\xd3\xfdf\xff\xfa\x00\x81\x02\xd5\x03\x17\x05<\x06\x15\x07\xa3\x07\x06\x080\x08/\x08&\x08\xf3\x07\xf2\x07\x10\x08R\x08\xc5\x08\x8e\t\x95\n\xcc\x0b\x1f\r9\x0e\x1a\x0f\x80\x0f\xb9\x0f\xb8\x0f\x80\x0f\xbf\x0e\xdf\r\xe7\x0c\xd2\x0bx\n\x9b\x08\xc3\x06\xed\x04\xeb\x02w\x00\xe1\xfd~\xfbM\xf94\xf7#\xf5W\xf3\xfe\xf1\xf0\xf0\x10\xf0u\xefO\xefj\xef\x85\xef\xb0\xef2\xf0!\xf1\x12\xf2\xd5\xf2x\xf3U\xf4J\xf51\xf6\xf3\xf6\xcb\xf7\xbd\xf8\x8f\xf9&\xfa\x9f\xfa-\xfb\xad\xfb\xea\xfb\xfa\xfb*\xfc\x8b\xfc\x07\xfd~\xfd\xd9\xfdI\xfe\x8e\xfe\xcd\xfe\x16\xffl\xff\xcc\xff!\x00\x7f\x00\xf7\x00X\x01\xb6\x01\x15\x02\x94\x02\'\x03\x91\x03\x1a\x04\xf1\x04\x06\x06\x1b\x07\xfd\x07\xda\x08\r\n1\x0b\xdb\x0b\x19\x0c\x8c\x0cl\r[\x0e^\x0f\xdb\x10\x14\x13(\x15G\x16\xb4\x16b\x17z\x18)\x19\x0c\x19\xe4\x18\xf8\x18\xa6\x18l\x17\xe5\x15\x1a\x15\x81\x14\xe8\x124\x10T\r\xda\n\x0b\x08\x87\x04\x00\x01\xdd\xfd\xf4\xfa\xc2\xf7\x9e\xf4-\xf2\x81\xf0!\xef\x9f\xed \xec\xd6\xea\xe5\xe9=\xe9\xc3\xe8\xb3\xe8\xe1\xe8*\xe9/\xe9A\xe9\xf4\xe9>\xeb\xd6\xecR\xee\xa3\xef\x12\xf1\x9b\xf2\x13\xf4\x99\xf5\x16\xf7\x8d\xf8\xd3\xf9\xda\xfa\xbf\xfb\xcf\xfc%\xfe\x8b\xff\xb9\x00\xbe\x01\xbe\x02\xae\x03v\x04\xe6\x043\x05s\x05e\x05\xfb\x04K\x04\xb1\x03.\x03\x88\x02\xb7\x01\xe5\x00j\x00\x1e\x00\xbc\xffS\xffE\xffy\xffv\xff\'\xff\x1a\xffo\xff\xdf\xff\'\x00\x96\x00f\x01w\x02k\x03T\x04_\x05t\x06\x8d\x07J\x08\x96\x08\xe3\x08%\t4\t\xe8\x08v\x08#\x08\xad\x07\xbb\x06e\x05\x14\x04\xe8\x02\x93\x01\xda\xff!\xfe\xa3\xfcW\xfb\x04\xfa\xd9\xf8.\xf8\xfa\xf7\xcf\xf7\x84\xf7[\xf7\x99\xf7\x18\xf8G\xf8V\xf8\x93\xf8\xee\xf82\xf9O\xf9\x8b\xf9!\xfa\xc1\xfa\x0c\xfb*\xfbw\xfb\xfb\xfbH\xfcB\xfc%\xfc)\xfc\x1e\xfc\xca\xfb\x94\xfb\xab\xfb\x06\xfcA\xfcA\xfcX\xfc\xac\xfc\x1d\xfdU\xfd\x83\xfd\xce\xfd \xfe(\xfe\x17\xfeU\xfe\xca\xfe.\xffj\xff\xb9\xff^\x00\x1d\x01\xaa\x01+\x02\xcc\x02l\x03\xc8\x03\x13\x04\xeb\x04e\x06\xd8\x07I\t\r\x0b\x1a\r\x06\x0f\xb5\x10\xe9\x12\xa6\x15\xe0\x17\n\x19P\x1a\'\x1c\xe1\x1d\xa8\x1e\x1b\x1f\x05 h +\x1f\xfd\x1cD\x1b\xc6\x19(\x17\x19\x13\x03\x0f\x89\x0b\xd9\x07Y\x03\x08\xff\xac\xfb\xe9\xf8\x95\xf5\x01\xf27\xef\x91\xed,\xecK\xea{\xe8x\xe7\x0e\xe7d\xe6\x85\xe5\x83\xe5q\xe6v\xe7\x04\xe8\xb8\xe8a\xeab\xec\xf6\xed2\xef\xbd\xf0\x8b\xf2\xf2\xf3\xe2\xf4\xce\xf5\x06\xf7R\xf8(\xf9\xc9\xf9\x9a\xfa\x92\xfb=\xfcv\xfc\xbe\xfcM\xfd\x95\xfdU\xfd\xe0\xfc\x93\xfc[\xfc\xdd\xfbe\xfb\x17\xfb\xfa\xfa\n\xfb\x1c\xfb]\xfb\xdc\xfb\x8d\xfcY\xfd\n\xfe\xd0\xfe\xcc\xff\xef\x00\x08\x02\x06\x03#\x04{\x05\xd6\x06\x01\x08\x12\tB\nu\x0bz\x0c`\r3\x0e\xd0\x0e*\x0f%\x0f\xe3\x0ee\x0e\xaf\r\xd0\x0c\xcf\x0b\xb2\n\x83\t^\x08O\x07N\x065\x05 \x04+\x03%\x02\xf3\x00\xa7\xffZ\xfe\x0b\xfd\xa4\xfbN\xfaJ\xf9P\xf8\\\xf7\x9f\xf61\xf6\xf4\xf5\xa1\xf5:\xf5\xf6\xf4\xa9\xf4/\xf4\xb2\xf3E\xf3\x01\xf3\xd8\xf2\xba\xf2\xe9\xf2S\xf3\xd9\xf3p\xf4\xfe\xf4\xab\xf5C\xf6\xb4\xf6\x12\xf7d\xf7\xc0\xf7\x1b\xf8u\xf8\xf8\xf8\xab\xf9\x98\xfa\x8f\xfb\x88\xfc\x83\xfd}\xfeY\xff\x05\x00\xd5\x00\xdb\x01\xde\x02\x7f\x03\x15\x04\x0c\x05*\x06\x11\x07\xe7\x07\x05\t\x7f\n\xc5\x0b\xd9\x0ce\x0e\xbf\x10/\x13Y\x15\xba\x17\x87\x1a>\x1d\x02\x1f\x88 J"\xdb#V$n$\xf1$\\%\xc4$R#\xf1!k \xc4\x1d\x00\x1a\x18\x166\x12\xc4\r\xa9\x08\xc4\x03\x87\xff\x96\xfbo\xf7~\xf3\x1f\xf0N\xed\xb8\xeaT\xe8U\xe6\xbf\xe4\x83\xe3I\xe2/\xe1\xb6\xe0\xd0\xe0F\xe1\xa6\xe1[\xe2}\xe3\xce\xe4%\xe6z\xe7\x19\xe9\xab\xea\xcb\xeb\xc3\xec\xe2\xed2\xef\xa4\xf0\xff\xf1g\xf3\xfa\xf4\xa5\xf69\xf8\xb2\xf92\xfb\xba\xfc\t\xfe\x0c\xff\xeb\xff\xde\x00\xd1\x01\x92\x02.\x03\xaf\x034\x04\x8f\x04\xad\x04\xba\x04\xd9\x04\xfa\x04\xce\x04m\x04\x12\x04\xe8\x03\xce\x03\x8d\x03y\x03\xb9\x03/\x04\xab\x04\x19\x05\xc3\x05\xa6\x06h\x07\xe2\x07e\x087\t\xfe\tv\n\xd0\nq\x0bM\x0c\xdf\x0c\x0b\r=\rV\r\xe4\x0c\xbc\x0bD\n\xda\x08G\x07Y\x05\x8e\x03,\x02\x0f\x01\xee\xff\xdb\xfe\xee\xfd\xff\xfc\x06\xfc\xdd\xfa\xaa\xf9e\xf8\x15\xf7\xcf\xf5\x90\xf4\x8a\xf3\xdb\xf2\x82\xf2!\xf2\xd6\xf1\xe5\xf1+\xf2u\xf2\x8d\xf2\xaa\xf2\xcf\xf2\xdb\xf2\xd9\xf2\xed\xf2M\xf3\xd7\xf3y\xf4I\xf5[\xf6{\xf7e\xf80\xf9\xea\xf9\x99\xfa\xfc\xfa\'\xfbd\xfb\xaf\xfb#\xfc\xaf\xfcl\xfdb\xfe5\xff\xc4\xffF\x00\xe6\x00l\x01\xc4\x01\x0e\x02\x9c\x02Z\x03\x00\x04\x0b\x05\xe6\x06&\t\t\x0b\xc5\x0c\xfc\x0e\x8b\x11\xd5\x13\xcc\x15!\x18\xbe\x1a\xce\x1c%\x1e\x98\x1f\x95!?#\xd2#\xd6#&$G$!#\xf6 \xe8\x1e\xe3\x1c\xc1\x19\x8e\x15\x90\x11X\x0e\xd3\np\x06M\x02\t\xff\xf7\xfbN\xf8\x87\xf4\x94\xf1\x1b\xefC\xecO\xe9\xe9\xe63\xe5\xce\xe3s\xe2}\xe1e\xe1\x85\xe1\xbd\xe1\xfa\xe1\x9d\xe2\xcd\xe3\xf2\xe4\x03\xe63\xe7\xb5\xe8f\xea%\xec\x17\xee^\xf0\xc6\xf2\x0f\xf5>\xf7\x84\xf9\x96\xfbu\xfdP\xff\x02\x01\x94\x02\xd2\x03\xda\x04\xc0\x05p\x06\xd5\x06\x02\x07\x08\x07\xd5\x06\x8a\x06\x12\x06_\x05\x93\x04\xd0\x03\x08\x03\x1e\x02-\x01u\x00\x00\x00\x87\xff\xfb\xfe\xb7\xfe\xeb\xfeG\xff\x81\xff\xf6\xff\xf0\x00\'\x02\x15\x03\xfd\x03B\x05\x9b\x06\xb1\x07l\x08D\tK\n\x1c\x0b{\x0b\xc1\x0b\x07\x0c\x18\x0c\xcf\x0bQ\x0b\xce\n@\n~\tb\x08\x1a\x07\xc3\x05Y\x04\xb7\x02\xfc\x00_\xff\xf5\xfd\xa4\xfcZ\xfb+\xfaG\xf9\x91\xf8\xee\xf7S\xf7\xd7\xf6\x88\xf6?\xf6\xf3\xf5\xbc\xf5\xb4\xf5\xc9\xf5\xee\xf5+\xf6\x9b\xf66\xf7\xcb\xf7P\xf8\xcf\xf8\\\xf9\xc2\xf9\x02\xfa6\xfaf\xfa\x8d\xfa\xa5\xfa\xd2\xfa\x10\xfb\\\xfb\x98\xfb\xcf\xfb\x0e\xfc.\xfc4\xfc\x07\xfc\xbf\xfby\xfb\x1f\xfb\xbf\xfan\xfaG\xfa+\xfa\x00\xfa\xfe\xf9u\xfaB\xfb\r\xfc\xc5\xfc\xeb\xfds\xff\x11\x01\xe5\x02f\x05\xb0\x08\xf8\x0b\x9d\x0eB\x11}\x14\xa3\x17\x1e\x1a"\x1c\xa5\x1e&!h"\xa0"K#~$\xa8$N#\xd2!\xe4 (\x1f\x8d\x1b\x92\x17\xac\x14\xc5\x11b\r,\x08\x17\x04\xe8\x00\x1d\xfd\xa1\xf8\x1b\xf5\xcf\xf2u\xf0I\xedQ\xea\x9c\xe8\x90\xe7\x1f\xe6\x89\xe4\xb6\xe3\xe2\xe3\x1d\xe4\xf3\xe3A\xe4\xaa\xe5q\xe7\xae\xe8\xba\xe9J\xebO\xed\xfe\xee>\xf0\xc4\xf1\xb2\xf3h\xf5\xa0\xf6\xe4\xf7|\xf91\xfb\x9d\xfc\xce\xfd\x10\xff4\x009\x01\x06\x02\xab\x02@\x03\xa6\x03\xf5\x03\x15\x04\x0f\x04\xf7\x03\xd0\x03\xa0\x03w\x03\'\x03\xc1\x02T\x02\xe9\x01\x96\x01\x16\x01\xc3\x00\xb1\x00\xac\x00\x96\x00\x80\x00\xd7\x00h\x01\xd5\x01P\x02\'\x03\x13\x04\xcd\x04g\x05#\x06\xed\x06\x84\x07\xfb\x07h\x08\xc8\x08\xe9\x08\xc4\x08y\x08\x04\x08u\x07\xc3\x06\xe1\x05\xee\x04\xf4\x03\xed\x02\xc7\x01\x91\x00r\xfft\xfel\xfdj\xfc\x84\xfb\xb4\xfa\xe3\xf9,\xf9\x95\xf8$\xf8\xef\xf7\xc7\xf7\xbd\xf7\xd2\xf7*\xf8\xa3\xf8\x15\xf9\x81\xf9\xf1\xf9\x96\xfa"\xfb\x95\xfb\x1d\xfc\xd2\xfc\x99\xfd!\xfe\xb1\xfer\xff*\x00\xca\x00%\x01\x9c\x01\x03\x02@\x02O\x02R\x02\x82\x02\xa3\x02\x96\x02u\x02\x81\x02\x9f\x02\xa4\x02\x94\x02\x92\x02\x9b\x02m\x02\x1a\x02\xd1\x01\x96\x01T\x01\xe9\x00s\x00\x06\x00\x94\xff\x16\xff\xa2\xfe\x0b\xfen\xfd\xe0\xfcF\xfc\x91\xfb\xcc\xfa!\xfa\xa0\xf93\xf9\xe2\xf8\r\xf9\xa7\xf9d\xfa)\xfbB\xfc\xdc\xfd\xbb\xffj\x01-\x03<\x05t\x07X\t\x08\x0b\xe1\x0c\x06\x0f\x00\x11U\x12\x7f\x13\xcf\x14%\x16\xd7\x16\xe7\x16\xea\x16\xcd\x16\x05\x16]\x14x\x12\xd3\x10\xff\x0e\x8d\x0c\xc8\ta\x07@\x05\xde\x02)\x00\xd3\xfd\xfc\xfbF\xfa=\xf8-\xf6\xcb\xf4\xdd\xf3\xec\xf2\x0c\xf2\xb1\xf1\x04\xf2`\xf2\x9d\xf2\x1e\xf32\xf4Y\xf5\x1e\xf6\xc6\xf6\xb7\xf7\xa2\xf8\x1e\xf9a\xf9\xda\xf9\x83\xfa\xd6\xfa\xee\xfa\x1a\xfb`\xfbt\xfb\\\xfbO\xfbH\xfb#\xfb\xcc\xfat\xfa6\xfa\xf2\xf9\xba\xf9}\xf9L\xf9\x1e\xf9\xe7\xf8\xda\xf8\xfe\xf8:\xf9l\xf9\x9d\xf9\xc4\xf9\x11\xfaz\xfa\xf1\xfa\x92\xfbG\xfc\xef\xfc\x87\xfdN\xfe<\xffA\x00:\x01"\x02\xff\x02\xd8\x03\x9c\x04E\x05\xe4\x05z\x06\xe3\x06\x16\x070\x07I\x07a\x07g\x07S\x07\x1f\x07\xca\x06\x7f\x06\x15\x06\x98\x05+\x05\xaf\x04*\x04\x8d\x03\xf4\x02u\x02\xf8\x01\x8b\x010\x01\xce\x00o\x00+\x00\xf6\xff\xcb\xff\x99\xff\x82\xffn\xffM\xff&\xff\x0f\xff\r\xff\xf5\xfe\xdc\xfe\xe9\xfe\x0e\xff;\xffc\xff\xbc\xff#\x00A\x001\x00S\x00\x9c\x00\xc5\x00\xab\x00\xba\x00\xfd\x00\x00\x01\xcf\x00\xc3\x00\x04\x012\x01\xd0\x00J\x00\x0c\x00\xd8\xff^\xff\xa9\xfe.\xfe\xd7\xfdP\xfd_\xfc\xa1\xfbM\xfb*\xfb\xe2\xfaY\xfa\xfe\xf9\xe8\xf9\xdd\xf9\xb7\xf9\xb0\xf9\x1a\xfa\xa2\xfa\x01\xfbB\xfb\xe7\xfb\xfb\xfc\x02\xfe\xbe\xfeR\xff\x03\x00\xd4\x00\x94\x01\x0c\x02o\x02\xcb\x02\x0c\x03\xf1\x02\xa1\x02j\x02<\x02\xe4\x01I\x01\x99\x00\x1b\x00\xaa\xffO\xff\xd0\xfeS\xfe\xf0\xfd\xa6\xfdi\xfdE\xfdQ\xfd\xa7\xfdX\xfe7\xff7\x00w\x01\xbc\x02\xc8\x03\xb6\x04\xb8\x05\xd5\x06\xd8\x07\xa1\x08\x8a\t\x9a\n\xca\x0b\xa0\x0c*\r\xd3\r\x1e\x0e\xe6\rp\r\xd3\x0cG\x0co\x0b~\n\xcc\t\x19\t=\x08]\x07\x9d\x06\xc0\x05\x98\x04D\x031\x02!\x01t\xff\xf7\xfd\x19\xfd2\xfc\xd9\xfaM\xf9\x87\xf8F\xf8!\xf7\xb6\xf5{\xf5d\xf5h\xf4n\xf3I\xf3t\xf3\xb6\xf2=\xf2\xc6\xf2M\xf3\xb6\xf3[\xf4\x88\xf5\x9a\xf6\x11\xf7\x9e\xf7\x9f\xf8\x95\xf9T\xfaO\xfb\x91\xfc\xed\xfdJ\xff\xc1\x00\xff\x01\xb1\x02W\x03\xe0\x03\xcf\x03\xbf\x03\xa8\x03:\x03\xeb\x02\'\x03?\x03\xd4\x02o\x02\x03\x02\x12\x01\xe9\xff\x10\xffA\xfej\xfd5\xfdt\xfd\xc5\xfdf\xfe\x8e\xff\xa4\x00\xfe\x00"\x01f\x01O\x01F\x01\x81\x01\xe8\x01\xac\x02\x97\x03~\x04K\x05\xdf\x05d\x06\x06\x06C\x05\x8d\x04\xae\x03\xf9\x02e\x02%\x02\'\x02\x06\x02\xe3\x01t\x01\x17\x01\xe6\x00\x19\x00\x16\xffW\xfe\xed\xfd\xb8\xfd\xa0\xfd\xba\xfd\xfd\xfd9\xfeX\xfep\xfe\x9c\xfe\xa0\xfe\x92\xfer\xfe?\xfe8\xfe\x9e\xfeu\xffh\xff4\xff\x9b\xff\xfc\xff\xf3\xff\xff\xff\x95\x00\xf5\x00\xb5\x00\x99\x00\xf1\x00\x1f\x01\'\x01\xde\x00~\x00\x8e\x00\x02\x01P\x01`\x01\xac\x01\xc7\x01\xf6\x00\x04\x00\x0e\x00\x08\x00K\xff\xd6\xfe \xffU\xffk\xff\xab\xff0\xff\xf3\xfc\x17\xfb\xfb\xf9\x86\xf8~\xf9w\x01\xbd\t\xce\x08I\x04D\x05Q\x088\x04b\xfeR\xfe\\\x00\xb9\x00\xfa\x00\xb8\x03&\x05p\x02\xef\xfc\xe7\xf7Y\xf6\xc2\xf70\xf9\xfa\xf8\xa5\xfa#\xff\x8e\x03C\x051\x06\x97\x07\x92\x05\xad\x00\xfc\xfe\xce\x01q\x05\xd3\x07\x15\x08+\x07\x91\x05\xaf\x03\xaf\xff~\xf8B\xf4\x82\xf3\xfe\xf1Q\xf1\xcf\xf3\\\xf7\xab\xf6\xa3\xf3,\xf1\xff\xed\xd6\xec\xdc\xedU\xef\x10\xf0\xa9\xf3\xb1\xf8\xd2\xfa_\xfa\x1e\xfa\xd5\xf9\xb2\xf7=\xf5\xb5\xf6U\xfb\x83\xff\x17\x05\xfb\x0f,\x1d\xdd!\x8a\x1eo\x1d\xd6\x1f\x85 \x04\x1f\xcb\x1e\xc4\x1fx\x1fV\x1f\xb4\x1c\r\x16l\r\xa4\x03\x1d\xfa\xca\xf1\x91\xed\xa0\xeb\xfe\xe9\xb0\xe9\xae\xeb\xc3\xed\x83\xef\xd4\xf0\xc0\xf0\xe8\xf0\xb0\xf3K\xf9\x90\xff\x08\x05\xb8\tk\rt\x0e\xd4\r\xf7\x0c\x19\x0b\xed\x07:\x04\x85\x02q\x03\xee\x03\x81\x02;\xffF\xfb\x9d\xf8\xbc\xf6g\xf4\xe0\xf1\x9f\xf1\x91\xf3\x00\xf5\x8e\xf6\xad\xf9x\xfb\xf3\xf9\xe4\xf8{\xfa0\xfc\x8e\xfc\xfd\xfc+\xfeX\xff\xe0\x00\xbd\x02\xde\x02\x13\x01\xfb\xfey\xfd\xd9\xfcm\xfdZ\xfe\x90\xfe\xdb\xfe\xeb\xffw\x01\x08\x02m\x01?\x00:\xff\x87\xff\xf9\x00\xcb\x02(\x04\xb4\x04\x85\x04\xc1\x04~\x05\x03\x06\xc4\x04\xf5\x02i\x02"\x03\xac\x04\x80\x05[\x05\x8d\x04n\x03\x9c\x02\x06\x02\xa4\x01\xeb\x00t\xff:\xfe`\xfe\x81\xff\xc0\xffC\xfeA\xfcK\xfb?\xfb#\xfb\xea\xfa\xda\xfa\xb4\xfao\xfav\xfa \xfb\x12\xfc\xcc\xfc\xc8\xfc\x16\xfd\xcf\xfeK\x01\x1e\x03{\x03\x96\x03R\x04u\x05\x80\x06\x00\x07s\x07.\x07\x14\x06K\x05\xc9\x04C\x04\xcb\x02\xc3\x00v\xff\xb6\xfe\x06\xff\x04\xff\xf6\xfd\r\xfd\x94\xfc\xaf\xfc\xe1\xfc\xf6\xfc\xaa\xfd\x10\xfe!\xfe\xe0\xfe,\x00|\x01\x15\x01\xee\xff)\xff\x07\xff\x92\xff\xc6\xff\xab\xff%\xff\x88\xff\x13\x00\x7f\x00\xd2\x00\x05\x01\n\x01\xbe\x00X\x01\x97\x02l\x03\xaf\x03\xad\x03\xdc\x03Z\x04\xd1\x04k\x04\x1c\x037\x01\xa3\xff\x9b\xfep\xfec\xfe\r\xfd\x80\xfb\xbf\xfa\x87\xfa0\xf9*\xf7\x1f\xf6\xb5\xf5\xf6\xf5\x07\xf7\xed\xf8\xf1\xf9\t\xfak\xfaM\xfbQ\xfc\x18\xfdb\xfd*\xfe\xc2\xff\x0e\x02\x9d\x03\xee\x03[\x04\xa7\x04,\x04F\x03\xb1\x02\x0b\x03d\x032\x03W\x03\xda\x03"\x04\xb9\x02\x86\x00@\xffP\xff\xfb\xff\'\x01&\x02#\x02\xc3\x01\x0b\x01;\x01\xe4\x013\x02\x90\x01G\x00/\x02\x0e\x063\x07\xb0\x03\x00\x00\xbc\x01\x12\x04t\x01\x83\xfc\xc4\xfbr\xfe\x96\xff\xc7\xfe\xc5\xfe\xa8\xff5\xff\x02\xffU\x00Z\x01\xec\xff\x8e\xfe\x90\x00[\x04\xf1\x04\xf1\x00\x83\xfc\xee\xfaS\xfcz\xfdL\xfc\x02\xfa\x18\xf9\x96\xfb\x84\xff\xe0\x00\x91\xfe\xe9\xfb<\xfet\x02b\x03>\x01\xdc\x00\xcb\x01\x0e\x00\xc7\xfd)\xfe\xfb\xff9\xff`\xfd\x02\xfd\xc2\xfd\xf7\xfd\x99\xfd[\xfe\xfe\xff_\x02\xf9\x03\xc4\x04}\x05\xb4\x06s\x07?\x06\xa4\x04\x06\x05\x00\tq\r\x1e\x0e\x99\n\xe9\x05\xe7\x03\x06\x03\xd8\x01\x03\x01F\x01b\x01\xda\xff\x17\xffR\x01\x95\x01\x86\xfb\x1f\xf5s\xf6\xe3\xfbO\xfc\xd5\xf8\x96\xf6\xe2\xf6b\xf8\xc0\xf9M\xfa\xa6\xf8\xf3\xf6\\\xf7\xa0\xf86\xfa\x82\xfby\xfd4\x01\xd9\x05x\x08u\x07\xe1\x05\xc1\x04\x0e\x03\x11\x02$\x03\x9c\x04.\x04$\x03\xf5\x01\x0f\x00\xab\xfd`\xfbu\xf9\x8d\xf8>\xf9S\xfa\xdd\xfa\xe8\xfb\xa0\xfd:\xfe\xba\xfd\xaa\xfd\xc0\xfe\x1a\x00q\x00\x9c\x00\x96\x01\xb7\x03\x1c\x05\xa5\x04\xa1\x04I\x05\x83\x04\x83\x02U\x01\xa2\x01=\x02\x87\x02\xef\x01\xd3\x00\xad\x00>\x01\xd6\x00\xfc\xff\x19\x00\x92\x00r\x00\x15\x00p\x00:\x01\x8f\x01\x8b\x01q\x01\x8d\x01c\x01\xaf\x00\xf3\xff\x90\xffy\xff\x91\xff\xac\xff\x87\xfft\xffN\xff\xed\xfe\x9e\xfeh\xfe9\xfe\x19\xfeL\xfe\x80\xfeF\xfe\xd9\xfd\x8d\xfd\x95\xfd\xc2\xfd\x04\xfe/\xfe\x06\xfe\xe6\xfd\x16\xfel\xfe\xc3\xfe\x14\xff\xc8\xff\x8f\x00j\x01S\x02\xd5\x02\xf3\x02\xcb\x02z\x02\x1b\x02\xc2\x01\xb6\x01\x8a\x01\x11\x01@\x01\xc8\x01N\x01\x0b\x00\x11\xff\x82\xfe\x93\xfd\xa5\xfc\xa0\xfcA\xfct\xfd\x0f\x021\x07\xf7\x08\x8d\x06w\x04\x00\x03Z\x01\x1c\x01\xa1\x02e\x04^\x059\x06M\x06^\x05\'\x03\\\xff\xa4\xfct\xfc\x8e\xfe\x9e\x00\x01\x02\x1d\x03\xf7\x01I\xff\xd5\xfd\x19\xfd\xeb\xfb\xd0\xf9\n\xf8\x9e\xf7k\xf8.\xfa\xd7\xfae\xf9-\xf8\xa7\xf7\x1d\xf78\xf7f\xf8\xa1\xf9\xf1\xf9\xb8\xfan\xfc\x12\xfeL\xfee\xfd)\xfc\xf8\xfa\xa8\xfa\xe0\xfaU\xfb.\xfb\x9c\xfa\xa6\xf9:\xf8\xd5\xf7-\xf9\xc9\xf8\x90\xf4\x96\xef)\xef$\xf1*\xf2\xe4\xf1)\xf2\xc1\xf3\xd0\xf3U\xf2\x19\xf2\x18\xf5\xdf\xf9\xd8\xfd\x90\x02_\tj\x11A\x18\x1f\x1f\xae(\xda1\'5a4\xfb6\xad9\xc66\xe8/f*\x1a\'U"$\x1b\xa0\x12\xb2\x08\xe5\xfd`\xf3\x9f\xea\x89\xe6\xc7\xe4\x84\xe3\xa4\xe2\xff\xe2E\xe4\xb2\xe6+\xea\x85\xec\xa5\xed\x96\xf0\x8f\xf6.\xfd\xce\x02_\x07\x04\n\xb2\t%\t\x98\t\x05\t\xeb\x06d\x04\x1a\x03-\x03\x17\x03\xec\x01\xc4\xfe\x1b\xfa\xed\xf4\x87\xf1\xd2\xef\xca\xee\x18\xef%\xf0\xbd\xf0z\xf15\xf3f\xf4\x15\xf4\x90\xf3\xbb\xf3\xc3\xf4\xdb\xf7\x8f\xfc\xdf\xff+\x01\xe1\x01\x89\x029\x02\xac\x01\x8b\x01\xf1\x00+\x00\xf7\x00\xaa\x02\x84\x036\x03\x1f\x02\xbc\x00s\xff\xdc\xfe\xd2\xfe\x95\xfex\xffc\x00\xde\x00\x1b\x01v\x01w\x01\x95\x00\xda\x00\xf4\x01)\x03/\x048\x05f\x06z\x06\xb8\x06=\x07\xd9\x06\xfc\x05\x15\t\x12\x12v\x16 \x12`\x0b\'\x06\xc6\x01\x87\xfe\xc3\xff\x96\x02\xe6\xffP\xfc%\xfe\xbc\xfeb\xf9\xe3\xf2`\xee*\xec\xa4\xee9\xf6\xc8\xfc\x8c\xfey\xfdS\xfd\x1e\xfd\x88\xfc\xb3\xfc\x00\xfb\x1d\xf9\x18\xfb\x02\xff\xa6\x02\x90\x02\xb0\xff&\xfc\xec\xf7\xdc\xf4\x12\xf4\xfb\xf3\xa6\xf3o\xf4\xef\xf5-\xf9\xb1\xf9\xc0\xf6-\xf4\x07\xf12\xf1\xda\xf4@\xf7\x98\xf9\xb2\xf9\x16\xfa\xf9\xfas\xf8\x13\xf9\x03\xfa\x94\xfbZ\x00\xfe\x02\x12\x02\xd9\x02\xeb\x07\xdc\x0e(\x10\xdb\x10g\x14\xd2\x13U\x17\x0c$\xa34y8$0\x1a.\x920o1],]%\xa9 "\x1c\xee\x1c\xd4\x1b\x97\x12\xf0\x06\x0c\xfb\xc7\xf0\xce\xe8j\xe5,\xe5c\xe1T\xdb\xa3\xdc\xaf\xe20\xe5\xbf\xe2\x08\xe0\x90\xe1\x95\xe7\xbe\xed\xb1\xf5\xd4\xfd2\x02\x11\x04\xe0\x05\xa6\n\x18\x0eO\x0c\xd5\t\\\n&\n\xa3\x08\xe2\x06]\x06 \x03\x0f\xfdh\xf7\x8c\xf3\xc1\xef\xc4\xe9J\xe7\xb8\xe7M\xe8\xba\xe8\xc3\xe9\xce\xea!\xea\x88\xeb\x0e\xeff\xf2\x19\xf6\x95\xfb\x96\x01\xfe\x044\x07\x1f\na\x0b\r\n\xd0\t\xb1\n\x86\n\x82\tv\x08\xc0\x07b\x05\xd3\x03\x10\x04>\x02\xae\xfe\xa6\xfd\xd3\xfe\x8b\x00T\x01\xd9\x02\\\x06\xe7\x052\x051\x07\xa3\x08*\t\x11\x07\x08\x07\xa7\x08\xeb\x06\xcf\x04\x93\x03\xdb\x023\x01\x95\xff%\xff\x14\x00\'\xfe\xd7\xfai\xfaq\xf9O\xf7\x08\xf2\xfd\xee\xda\xf0\x8d\xee\xb7\xeb\xfb\xeaR\xe9L\xe8\x8d\xe4\xfd\xe5R\xe9\xce\xe7)\xe9\xf2\xebt\xf1n\xf5\x86\xf6\x18\xf9Z\xfb\x84\xfd<\x00Q\x03Z\x05v\x05\xd9\x05[\t\x8a\x0b)\x0b{\x06\xb9\x03\t\t\x84\r\xe0\x0e/\t\xc2\x06\x06\x08\x8d\x05\xdb\x03\xa1\x04\xd7\x05\x8a\x05\xf7\x08-\x0e\x1b\x12`\x11\x90\x0f\xde\x12\x1c\x15\xaf\x19\xc5\x1c\xa1\x1f1#v"\x16\'\x81/\x993\xae,*$\x91#\xec"\xc4\x1c\x83\x18\xc1\x16\xdf\x0eh\x055\x00\xf8\xfd\x8f\xf4.\xe8\xd2\xe0\x98\xdf\xa1\xe1\x03\xe2\x9d\xe2.\xe2\xca\xe2\xf9\xe1\x01\xe39\xe9\xe8\xed\x1e\xf1N\xf5\x1e\xfdn\x01\t\x03\x8e\x05\x10\x03O\xfd\xe6\xf9Z\xfa\xf5\xfaQ\xf9\x91\xf8\x89\xf7\xda\xf35\xf1\xe1\xefc\xec\xbe\xe9\xcd\xe8\x81\xeb+\xf1d\xf4\x12\xf7/\xf8\xbb\xf9;\xf9]\xf9\x87\xfc,\x00\xd1\x01r\x03\xd9\x07\x8f\x0b\xcc\x0c\xab\x0b`\x0b^\x08\xd7\x06\xf2\x06n\x08\t\x0b\x8f\x08\xef\x06v\x04\xcf\x020\x02\x0f\xfe4\xfc\xd3\xf9\xce\xf6\xf9\xf6\xca\xf5\x1c\xf5d\xf2\xfb\xeep\xeeL\xef\x11\xf1\x14\xf2\x0f\xf2R\xf2i\xf3\xbd\xf4\x9c\xf7\xa5\xfa\xee\xfc\x9f\xfd\xca\xff\x88\x02\xa3\x03\xaa\x04B\x05]\x05\xdb\x03s\x04r\x06\xa6\x06\x10\x06e\x03r\x00\x0f\xffu\xff\xcc\x01!\x017\xff\xe4\xff\xbf\xfc\x7f\x01@\x04\xc5\x02\xde\x036\x01c\x00W\x00\xe3\x05[\x0b\xfd\n@\n\x0b\x0e\xbd\x0f\xb4\x0e\x10\r\r\x0fo\nW\x06\xda\x0e\xae\x10+\r\x9b\rC\r\x9d\x07!\x047\x06\x82\x06\x05\x00b\x02\x81\x08\xd6\x06\xcc\x08\x08\x0c\xb8\nz\x01\\\xffY\x06m\x05\x05\x04\xd6\n\xc1\x08\xe4\x03\x87\x07?\x07\x13\x02\xdd\xfdp\xfd\xd9\xfb\xb7\xfa\xd2\xfd\x89\x01t\x00\xeb\xfaw\xf9\xd1\xf6\xd6\xf3\x8d\xf3\xb5\xf5?\xf7Q\xf2\xc0\xf4$\xf9q\xf7\x9c\xf7!\xf8\x86\xf43\xf5\x8c\xf8z\xfa\x06\xfe]\xffP\xff\'\xfb\x87\xfc~\x00\x15\x00(\xfe\xd5\xff!\x01\x8c\xfe]\x02\n\x04q\x00\r\xff#\xfc+\xf9e\xfdM\xfe\xf2\xfb\xbc\xfa\xe0\xfc\xca\xf8\xab\xf6\xed\xf8\x9a\xfb|\xfb\xf1\xf3p\xf7\xbb\xfd\xfe\xfb\xd7\xfap\xfe\xd4\xfa\n\xfc\xd2\x003\xff@\x02|\x02\x87\xfd\xef\xf9\xd6\x038\x07;\x01\x87\xfe_\x02m\x05\xfa\xfb\x94\x00\x90\x01K\xffu\xfdB\xffM\x03\x87\xfa9\x08\x0e\xff\xdb\xf3G\x01/\xfe\xa5\xf4\x18\x02\xd0\x05\x0c\xf4\xd3\xf8\x12\x02\xed\xff\xd9\xfa\xb0\xfc;\x02~\xf5\x8e\xf53\x0e\xc1\xfc\x01\xfa8\x0c\x01\xfcB\x02z\x0c&\x00R\xfe\x92\x08\xb5\x04\xe1\x05\x8f\r:\x0c\xf3\x01\xbf\x02\xec\x07\xa4\x04\xa2\x01]\x08\xef\x03~\x03\xf1\x04\x85\x03\x16\x05I\n\xc3\x03\xe1\xff\x97\x0b\x99\xfc\xe8\xfb\x18\x03v\x0b\xff\x06]\x02Y\x06?\x015\x05\xa7\xfc\xc7\xfe+\x03"\xfb|\x08\x9e\x05\x17\x03\x8d\x08\xb3\xfc"\xf8\xad\x01\xba\xf8\xfa\xf9\x9b\x0b\xfc\xf3\xa0\xfc\xa9\x06\xa1\xfa\xdb\xf9\xf3\x00\x9a\xef\xbd\xf3#\xff\x12\xfd\xbb\x02*\xfd\xe2\x01)\xfd\x1d\xff\x8a\xf2W\x05#\x03\x95\xf6\xa6\x06b\x06\x81\xfe;\x00\xad\n6\xfbV\x04\xc3\xfc\x9d\x00A\x03q\xfe\xc6\x01\x03\x01B\xf7\xa9\x04\xb3\xfc\xca\xf2\xbc\x03:\xfb,\xfbi\xf9\x9c\x04-\x0bd\xf4\xc6\xfa\xf2\x0c\xbb\xf9\xe1\xff\x1a\nR\xfe/\x02\x0c\x0b\xb6\xf7/\x00B\t\xb9\xf3\xa6\x00\x9b\xfbR\x01g\xff\xbc\x01\x11\x04o\xf8\x8a\x00N\x08s\xfc\xdc\xf6\x13\x0e.\xf3\xc1\xf2{\x13:\xf5\xc4\xfce\nU\xf4\xf2\xfeF\xfd\x0c\xee\xee\x08\x8d\x06\x0f\xee\xdc\x04\x08\xffB\xfb?\x05\xc3\x04\xc6\xf56\xfc]\x08"\xf3\xa7\x06\xa3\t\x02\x03\xba\xf6:\x00N\x03\x92\xfc\xf2\x04\xd6\xf9!\x0b\x82\xfac\xf5\xc0\x15\xd5\xf7\xcc\xf8\xc4\n\x8c\xf56\x03\xbc\x0e\xbd\xff\xf1\xfd|\x07u\xfc\xb8\x07D\xfd\xd7\x03\xba\x0e\xaf\xf6\xb5\x03\n\x06\xc9\x03\x02\x01\xbf\xfe\x8e\x01\xe7\xfe\xe7\x00*\xfc\x90\xfd\x8f\x02R\x02u\xf2\xfa\x054\x02\x0c\xf2\xa4\x02H\x00{\xf3\xd3\x00\xfa\x07^\xf8\xe5\x02\x1a\x00\xd0\xfc\xed\x05\xff\xf9\xec\xf9e\x07\xd7\xf6W\x02\xd9\n\xd2\xf4\x15\xfd\x14\x077\xef\x87\xfb\x11\x10\x0b\xf8@\xf4\xe6\x08\x05\x06\xe5\xef\xdb\r\x07\xfb%\xf5\xf2\x05\x14\xf7\x17\x07\x1b\x06\x90\xf2)\xfej\x10\xd5\xf9\xbb\xf9\x1e\x0b\xec\xf8\x15\x03\xa5\xff\xb8\x08\x8b\x00\xae\x07\xf0\x03\x96\xf6\xc8\n\xdd\x02\xc4\xfbz\x02\'\x11K\xf5\xb4\x03\x1f\x01:\xffL\x03\xe2\xee\xb7\x0f\x84\xfc\xce\xf9\xef\x0e\xa2\xf5\x0b\xf7B\x15{\xf4\xa2\xf0\xce\x17\x1c\xfc#\xf0\xef\x0c\xea\x07m\xf1\xcd\x06\xbe\xfb>\xfb\x8b\x02\xc1\xfe\xfd\x03\xc3\x00*\xf6\x08\xff\x10\x0b\xc5\xef9\xfe\x81\ns\xee\xe2\x04\xb2\x00]\xfa/\x07I\xfd\x00\xf8;\x009\xf8\xbc\xf6\x84\x18T\xf89\xfb\x81\x07\x10\xfd\xf9\xfb\xa8\x02\xca\x05\xa7\xfd\xf2\x04\xda\xfd{\x05\xb6\x08\xd6\xfc\xdf\x02\xb0\x04\xed\xf8\xff\n\x11\x03\xa4\x00y\xfd:\x0b\x16\x04\\\xf7\x02\xfe\xd3\x01\xf3\x06G\xf7z\xfe\x91\x08v\x04\xe8\xf8\x93\xfb\x90\xfa\r\x06\xd1\xf9\xa8\xf4\xdb\x14\x87\xfen\xf2\x16\x08\xa6\xfc\x1c\xfc\xb1\xfe\xdd\t\xb0\xf7\xff\x01\xd5\x03:\xf5\x8f\t\x8b\x00\xcc\xfc3\x00R\x06\xe9\xf1\xd1\x06\x8d\x01\x1b\xf8&\xfb4\x00|\x02\xe3\xf5\xa6\x08\xcf\xfa\xb9\xedT\x02{\xff\x06\xf2\xb3\x0e\xcf\x00\xd7\xf1\x90\x06\xa3\x08\xe2\xf9g\xfc|\x01\xb7\t*\xfbe\t\xc0\x0b\xb2\xf60\x05\xb0\x00\xd1\x06\xff\xf8z\x00\x10\x0f\xf6\xfdi\xf8\x98\na\x04%\xf5W\x08\xd0\x03\xeb\xfc\xdc\xf8\x0b\x06\xfa\x04y\xfc<\xfd\xff\x00f\x0c(\xf4\xbf\x001\x07\x8f\xf4\xce\x01\xdb\x00\xb9\xfc\x9d\x06#\x01\x8f\xf6\xe1\xfb_\x04\x8e\xf9|\xff\\\x03l\xedE\x04\x07\x08Q\xf3O\xf8\xea\x10<\xf2\x92\xfa\x1c\x08x\xf5\xa5\xfc\x93\x06\xd9\xff\xc2\xfe\xc4\x01\xab\xff\xf0\x00k\xf6\xe0\rA\xf6)\x05\xa1\x03B\xfe\xdd\xffI\xfcg\x0e\x91\xefN\x0b\x1f\x01\x04\xef\xda\x07\x88\x0c:\xf45\x04\xfb\xf9\xf8\x05j\x08\xae\xf3I\tu\x08\xf8\xf4\xdc\x02\xe3\x13^\xef%\x06>\x07\x0e\x00\xcd\xff\xce\x01\xab\x06\x98\xff\xb5\x06\xac\xf8\xc9\x02&\t=\xf6\xc8\x00\xd6\x01D\x07\xda\xfcy\xf2a\x19\xb0\xed\x81\xff\x0c\x031\xfcZ\x03\xf1\xec\x7f\x12\xb6\xef$\x00\x9b\x085\xf6\xa8\xf5\xaf\x05\x1b\xfc\xa2\xf7 \x02\xfe\x02g\x03\x06\xf9P\xfc\x0b\x01\xe4\xfc\xa1\xfb\x18\x08\xba\x04\xdd\xf5\x88\x08\x8a\x03\x13\xf0I\x07\xbe\xfe%\x02\xfd\xf8\xc6\r4\xff\x01\xf2\xd7\r\xde\x01\xc7\xf6F\xf61\x1a\xaa\xf3\x9a\xf9\xff\x12\xec\xfb\xca\xfc\x98\x03\xc2\x04H\xf5\x13\x04\x9c\x03\x9b\xfb\x87\x01(\t\xda\xff\xc7\xf5\x84\x04f\x07\xd0\xf6J\x04\xe8\x08`\xefZ\x11G\x02\xb6\xf5\xb1\x04\x13\x07\x04\xf6\xc3\xf2\xd9\x18\x8b\xf9\xbd\xfa\xfa\x04o\x08|\xf5\xa3\xfdf\x0b\x90\xf1\xb2\x03\x94\x03\xf0\xfc\x00\x00\xab\n\xb5\xf2\xdd\xf9\xfd\x07\xfb\xefN\n\x19\xf5\xd9\xf6J\x16R\xf3\x1c\xf5\x80\t\x12\xf6\xe0\xffW\xfe\xcc\xf7\x02\x06\xb4\xfcT\x04\x14\xf5\x86\x06\xd6\t\xc5\xedD\x07A\x01\xea\xf9N\x04\xe6\x01\xa6\n>\x07\xa3\xfa#\xfdV\x05\xe3\xf9\x93\r=\xff\xdf\xff\x9d\tQ\xfc5\xfdB\tV\xffz\xfb\x99\x07\x9b\xf1\x17\x13\x98\xfb\x8f\xfb\xf1\x06E\x05\xab\xeeM\xfe\xac\x12s\xfc\xab\xf7\xdd\x00\x91\nC\xf5\x0f\xfc`\x05\x13\x03v\xee\xc4\x08\x18\x07\xf4\xf5\x14\xfb\x95\n\xfe\xfc\xd5\xee\x8b\x0c\\\x03\x14\xf2\x99\n\xb6\x01\xfe\xee\xf5\np\x05-\xf1\xf2\x00\xb4\x14\x1b\xeb\x90\xfa\x85\x17s\xf5\xd9\xed\xe0\nb\n.\xefJ\x02\xf6\r\xd7\xfc\xef\xf6p\xf8\xc9\x06\x84\x06\xa1\xf4\xe2\xf8\x02\x19E\xf4\xf3\xf3T\x16\x10\xef\xaa\xfb\x11\n\xd5\x01\x80\xf6\xce\x05\xb2\x06\xf8\xf5e\x03~\x10\xdd\xea\x13\xfd\xbe\x16&\xef\xc8\xff\xc8\x12\xd9\xf9c\xf8d\x0e\x81\xff\xc9\xf8\x19\x06\xef\x05\xa1\xf1\xbf\x05d\x18\xc7\xed\x0c\xf5\x06\x1c\x9e\xfc\xeb\xe6\xf4\x0f\xe0\x07\xd8\xf0\xed\x00\xe1\x07Y\x02-\xff)\xf6r\x06\xe8\xfe\xeb\xed"\x15\xf4\xf6#\xf1\xa8\x0b\r\x07?\xf5\xa3\xf5\xb3\x10.\x00\xdd\xed\x1e\x05\x89\x08\xb6\xf6+\xff\x9d\x04\x9a\x04Q\xf8\xdd\x04e\xff\xd4\x00Z\xfb\x06\xffn\nI\xf87\xfe\xc0\x0b!\xf7\xbe\xf8\xa9\x0f\xd7\xfa:\xf7\xdd\x01\xf3\np\xfa\x1b\xffH\xfd\xfb\x030\x02\xb6\xfb^\x01\xaa\x03j\x03\xd7\xfa\xc0\xff\xa8\xff\xe6\r\x15\xfa7\xfc/\r\x9c\xfc\n\xf8\xaf\x10\x80\xf8/\xfc\x8e\x10+\xfc\xf3\xf0\x1f\x08\xd7\x08e\xf6\r\xff\n\x01~\x07\xa5\xed\x90\x04O\x07I\xf1\x9e\x02\x91\xfdn\x06\x14\xff\x1e\xf8\xae\x0bS\xf8o\xf1U\x12a\x02`\xf01\x0f8\t\x18\xed\x88\xfb\x94\x12v\xf2\xbd\xff\xfc\x05\xdd\xfc\xf4\x04Q\xfb2\xff\x10\xfd1\x05\x16\xfcB\xfe\xc4\xfb\xe1\x0f$\xf6\x08\xf7?\x14;\xf6;\xf5\xc2\x06i\x05\xdc\xf0\\\x07\x17\x07\xfc\xfa\xb0\x018\xff\x85\xfd\xe5\xf7\xe2\x0f\x8f\xfd\xa1\xf5\xb5\x06\xec\x01\xd8\x01c\xf5d\r4\xffx\xf5[\x080\x08M\xf2\x92\x02\xe8\n\xef\xf7\xeb\x07\xeb\xf9d\xfe\xa7\x01\xb0\x01\x07\xfe\x9b\xfd5\x04i\x02\xf5\xf1&\rx\x04o\xe7*\x0e\x9b\x0c\xd7\xeb\xf1\xfcC\x12\x80\xfc\x1b\xf0\x97\xfc\x18\x1a\xa7\xf4\x86\xe9\xd1\x16 \x05t\xe2\x11\r\xef\x15\x08\xea\xb5\xfa9\x14\xac\xf3g\xf3\xa6\x15y\xfb\x7f\xf3I\x03Y\x0e\x81\xf83\xf6\xc8\x19\x12\xf0\xe4\xef\xe2\x10\x87\n\xe7\xe7\xde\x08|\x1b\x88\xe36\xf4\x8d\x1e\xdc\xfa\xb9\xe6t\x17a\xfa\x08\xfb\x9b\x05W\xfe\xba\x01\xcf\xfc\xb4\x00Y\xfeR\x06b\xf1g\x0fb\xff\xdd\xf1m\x0c\x81\x00\xbb\xf6O\x06M\x0b|\xec\xcd\x04d\n\'\xf3\xd1\x04\xd6\x0c~\xf3\x85\xffR\x0cB\xf3\xc5\xff\xf6\x08\x13\xfb\xe1\xfe`\x02.\x05\xc7\xf8\x9c\x00}\x05>\xf3\xed\x05\xab\x064\xf6\xb9\xf9\xd3\x13\x11\xfa\x17\xf1S\x0cg\x02w\xf2\xcf\x02\x06\t\xc6\xf7\xbb\xff\xf0\n9\xff"\xf5n\x06\x8e\xfeN\x00\xbb\xf7\xb6\x05\x17\nZ\xf9E\xfdX\x07O\x02U\xebj\x0b_\x12\xe0\xe4F\x03\xc8\x11\xb4\xf2\xe3\x03g\x0b\xe2\xf0\n\x015\x04\xe5\xfa\xbb\xff\xcb\x04\x7f\x02j\xf9_\xff\xec\tM\x04F\xeb\xae\x01\xcb\n\x8f\xf5\xa3\xfbI\x1a(\xf4z\xf4O\x0fD\xf7\xbf\xf9\xfe\x06\x81\x05\xf3\xf3\x15\x07"\x0ch\xf8\xc4\xf7O\x0c\xe0\xf3S\xfc\x80\x0e\xa6\xfbg\xfa,\x10:\xf6\x9f\xf6\xa7\x0fb\xf9`\xfa\x03\x02^\x07\n\x01\x97\xfd\xd1\xfc\xba\ts\xf7\xa8\xfcD\x05H\xfe\xce\x01P\x07\xfa\xf9\xcd\xfeb\x08\x9a\xf0\xb6\x08.\x03`\xf8\xa0\x04,\x02\xe9\xfb\x16\x05`\xfdi\xf8\xf9\x02\xd6\xf7D\x01}\x05N\x07\xb5\xf3\xe3\xff\x16\t5\xf6\'\x006\t\xa3\xfb\x8f\xfd\xb1\x0b\x8f\xf8V\x04#\x02\xb1\xfat\xfe\xcd\x06\xf5\xfao\xfe\xd4\x02\xd4\xfc\xf1\xfdT\xff\xf7\x00\xc7\xfcY\xffU\xfb\xe1\x0ca\xf2\xb8\x02B\n\x96\xf3l\xf8\x17\x04\x86\x07\x81\xf99\x05\xeb\xfe\xa8\xf7:\x064\x04X\xf8-\xff\x1f\t\xd7\xfe\xb1\xf7\xd7\x0e\xbf\x00\xa6\xed\x9f\x07\xbf\x0e\xb9\xf2\x1a\xfd\xac\x0e\xcf\xfap\xf2\xf2\x10P\x06\xb8\xed\xc3\x08\xd1\x00\x8a\xf5\x94\x045\x0c\xf7\xf7\x11\xf9\r\x0b\x8f\xfe\x0f\xf9\x80\x01\xb6\x000\xfe\xcd\xffZ\x06o\x02\xf4\xfa\x1b\x03\xe8\xfa\xeb\xfd\x8f\x00\x9e\x00\xf0\x03\x8d\xffw\xfd\xcc\x03\xc0\xfa\xb4\xfel\x04\x8f\xf8h\xfdJ\x00n\x01\xcd\x04\xd5\x02\xf9\xf5 \x02\x12\xfd\xa2\xfb\'\x07\x80\xfd\x12\xfe\xbd\x03k\xf8\xf6\x05\x82\x06\x8b\xf5\xe4\x04_\xfd6\xf8\x87\x01D\x08\\\x02\xae\xfc\xae\x03\x99\xfb\'\xfd\x18\x03\xbb\x03\xfe\xf8y\x02\xf2\x07\xb6\xf6\x89\x07i\x07\xad\xf8\xa1\xfdb\xfe5\xff\xea\xffe\t\x96\xfe5\xfa\xc8\x06g\x00\x9a\xfbG\xff[\x03\x9d\xf7\x8a\x00\x8b\x05+\x06\x8b\xff\xe5\xfd\x1f\xf9\x90\xfd=\x05N\xfc\xe5\x02\x18\x01\x9d\x01\x90\xfc\x10\x00\xb4\x046\x02S\xf9\x91\xf3\xc4\x08y\t\xbb\xf96\x06\x96\x00\x93\xf5\xb7\x00g\x08Z\xfcX\xfaP\x04\xc6\xf9e\x01\xfd\x05\xfc\x04\xb0\xfb|\xf5\x8b\xfe\xc6\x04c\xff)\xfc{\x058\xff"\xfa\x05\xfe\xb1\t%\xfe\x9a\xf9\x1d\xfe\x06\xfdD\x020\x06\xb3\x00\xbb\xfe\xb9\x00\xff\xfb\x19\x00H\x03\x16\x03N\xfc\xbc\xfe\xb6\x01\xba\x01I\x03\x05\x02\x8b\xfd\xa3\xfd\xa4\x00D\x00t\xff5\x02\xe3\xff\xa4\xfdD\x04\xc4\x00\xa9\xff\xe8\xff\xb5\xfcB\xfdC\x01\xa2\x02\x10\x01D\x01\x15\xfe\xbb\x00\xcc\x00k\xfeQ\x00\xc4\xfcV\xff[\x03\x99\xff\xa1\xff=\x01\xab\xff\xba\xfd\x90\x01\x17\x03\xbf\xff\xd0\xfa\xfe\xfc\xb0\x05\xb2\x01\n\x01B\x04\xba\xfd\x0b\xfc\xbf\x01\x93\x00\xe6\xfc\x91\x01P\x02\x9e\xfd\xc4\x01\x1e\x04\x81\xff\xde\xf9\xa4\xfer\x03c\xfdi\x01\xe3\x02\xc5\xfd\x89\x01\xdb\x00[\xfe\xf5\xff\xe9\xff`\xfdv\xfd\x1a\x03\x98\x05\xd3\xfd\r\xfe\x01\x02x\xfe\x87\xfd\xc9\x02\xd2\x00\xd1\xfe\xa0\x00\xd7\xff}\xffS\x01\xc2\x02\xce\xfd\x0b\xfe"\x018\x016\xfdA\x02o\x01h\xfd\xbe\x00*\x00\xe5\x01u\x02\t\xff,\xfb"\x00^\x01\x08\x00\x80\x04\xdf\xff\x92\xfd1\xff\x06\x01z\xfd\x91\xff\xef\x02a\xfc\x92\x00\x89\x03]\x01!\xfe\x01\xfe\xf9\xfd\xf7\xfd\xe3\x00\r\x00T\x02#\x02\xcc\xfb\x07\x00\x92\x01\x8f\xfc\xb8\xfe\xdc\x00\xbd\xfc\xc1\xfd\t\x05\x0f\x024\xfb(\xfe&\x01\xf3\xfa\x80\xfc\x80\x02k\xff\'\xff%\x00X\xfe\xe9\xffp\xff\xb7\xfb\x12\xffa\xfd\x8d\xfd\x9c\x01\x8b\x01q\xfeG\xff2\xfew\xfbv\xff\xc3\x00\x8a\xfd\xf3\xfd\x1b\x030\x00\xb2\xfd\x82\x01?\x00\xc1\xfbZ\x00\x8a\x00K\xfe"\x04\xc8\x02\x9e\xfe\xfb\x00\xc6\x01\xd4\xff\x96\x00p\x01\x05\x03p\x01\x92\x03&\x06x\x03\x87\x03\xa0\x05T\x03\x93\x05\'\n\xae\x07[\x08\x9d\n\x9a\n\x82\t\xcc\x0b.\n\xcb\x06\x8e\ns\n\xca\x07\xcc\x07a\x08_\x042\x02W\x02I\xfeD\xfc\xdb\xfd@\xfbW\xf8\xa5\xf8\xb6\xf6m\xf4\x8e\xf3\xa5\xf3\xa0\xf2\xeb\xf1\x03\xf4\x8d\xf5\x1c\xf5\x9f\xf5K\xf6-\xf7\xdb\xf8F\xfa\xf1\xfa\x8c\xfd\x05\xfe\x0e\xfe\xc4\x00\xb3\x00\x1d\x00\x83\xff\x81\x00\x1c\x01\x80\x00J\x00\x87\xfd\x02\xfd_\xfd\xb0\xfa\x96\xf9\xf8\xfa9\xf8|\xf5:\xf63\xf5\x14\xf5\xb0\xf3F\xf3\xf5\xf4\xd4\xf4\x88\xf3n\xf5\xbb\xf8\xa1\xf54\xf8G\xfb+\xf9\xb0\xfc\x01\x01\x80\xfeh\xfc\x8f\x03\x0e\x05\x85\xffc\x02[\x07\xf6\x02\xab\xfe3\x05(\x05{\x01u\x02`\x04\x13\x00\x8e\xfe\x90\x00\xb2\xfe?\xfe\xed\x03}\x03\x14\xfe7\x03\x82\x06\x94\x06r\x05\x0b\x07\xa1\t\xb5\x0cT\x148\x19\xf4\x1b\xaa \x9f\x1f\xf6\x1dF!5&Y\'h%4(\xf3*\xba)F%: |\x1b\x0c\x14\xdb\x0c\x8a\x0b\x05\r=\x08X\xfeq\xf8a\xf5\x88\xef\xac\xe7\xce\xe1\r\xe01\xdes\xdc\x0e\xde\x8d\xe0>\xdf\xe2\xda\xbd\xd8\xab\xdc\x8e\xe0|\xe1$\xe6y\xed\x15\xf1j\xf3\xa9\xf6I\xfa\x86\xfc}\xfc!\xff\xf8\x04V\t\x83\x0bx\x0bu\x0cY\x0c\x05\t\x7f\x07:\x07\xa9\x06:\x04&\x03%\x03o\x01\xc1\xfd\xc9\xfa\xaf\xf7\xa3\xf4L\xf3\xc1\xf36\xf5\x97\xf4\'\xf3Z\xf3\xf4\xf5l\xf5^\xf4\x98\xf6|\xf9i\xfb\xe4\xfef\x02#\x04\xc9\x04;\x07B\x084\x08\x82\x0b\xca\ru\rF\x0e\x0e\x10\xd0\x0eM\rD\x0cE\x0b\x0f\t\x9a\x08\xea\x07 \x06\x86\x04\x8d\x02v\x00\n\xfe]\xfc/\xfaR\xf8\xac\xf9#\xf8S\xf6\xc6\xf5x\xf4~\xf3\xba\xf2\xf2\xf2_\xf5\xf3\xf2\x00\xf2\xb1\xf5\x84\xf5s\xf4\x1a\xf6P\xf61\xf5L\xf7e\xf9\x93\xfc!\xf9+\xf9m\x00\xd7\xfe\xb6\xfb\x88\xff\x89\x03\xd2\xfd\x07\x00)\x06\x98\x04A\x01\x03\x04"\x08\x89\x03@\x04\xb8\x05\xd4\x05\xcc\x06\xed\x06\xc6\t\x1e\t\xe4\x08\x10\x08\xf3\x08\x93\n\x1c\r\x8e\x0f\xe9\r\xec\x0f+\x14\xf6\x17m\x16c\x14.\x14d\x15\xb1\x18\xdf\x1b[\x1b\xa8\x18Y\x16u\x14\xd9\x13A\x12\xc1\r\x88\t\xd6\x05\'\x03\xf6\x02\xfc\x00\xcb\xfb\x93\xf6\t\xf2K\xee4\xecO\xeb}\xe9L\xe7\xce\xe5\x9a\xe5w\xe7_\xe8\xd7\xe6\x16\xe63\xe7\x9b\xe9M\xedG\xf1\x16\xf5/\xf6\xa2\xf5\xd0\xf7\x1b\xfb\xe8\xfd\xd4\xfd\xb9\xfe\xd3\x00<\x039\x05\x9f\x05\xe0\x04\x80\x02\xc5\x00\x84\xff\xa8\x01*\x03\xc9\x00\x19\xff\xd3\xfe<\xfe,\xfd\xaa\xfbB\xfbL\xfa\x8c\xf8\x86\xfaf\xfe\n\xff2\xfd\xd3\xfc\xde\xfc\xff\xfdX\xff\xdd\xff\xa9\x01~\x03b\x03^\x03\xed\x07\x14\x07D\x02\xe6\x02\x8e\x04\xb2\x04\xfc\x04\x94\x04\xc5\x06z\x04\'\xff\x1f\x00!\x05C\x00}\xf8\x02\xfe\xc2\xff\xd2\xf9n\xfd\x10\xff\x8e\xf9\x87\xf6\xb0\xfb\x05\xf5x\xf6u\xfb\x9c\xf5\x8e\xf9\xcd\xf4\x9e\xfa>\x02A\xf2,\xf5\xbc\xfe\xbc\xf6\xd8\xf5\xd7\xfb\xd4\x00[\x01\xf5\xfaX\xff\xba\xfe\x12\xfd\x1a\x02N\xfe\xef\xfe\xe8\x07\x07\x07m\x04\x0e\x07z\np\x08\x13\xff\x93\x06\\\x11\xe5\t)\x06\x8e\x10K\x0e\x0e\x087\t\xe0\n\x97\x07B\x06\x98\t\xed\n\x8c\t\x92\x0b)\x07\xbb\x04\xf4\x05\xb5\x07N\x08\xb4\x08B\t\xa7\x0c3\x0f)\n\xe4\x0cS\x0eW\x0b!\re\x10\x8e\x10,\x0fW\x0eq\x0e\xe6\x0c\xca\tj\x08J\x05\xf2\x03\xd1\x02\x05\x01^\xffN\xfb\xc1\xf7\xcb\xf4\x01\xf3 \xf14\xef\xa4\xee#\xee\xb0\xeb\x1c\xebi\xed\xaf\xecR\xeaZ\xed4\xeef\xee\x05\xf1\x8c\xf4o\xf4`\xf5^\xf8\x9d\xf8\'\xfa\xd5\xfb\x9d\xfd\xe9\xfc\x17\xfe\xc8\x01\x9c\x00\x02\xff3\x01x\x00\xdd\xfe@\x00\xa9\xfe \x01\x9d\xff\x00\xfe\xaf\xff\xad\xfe\xd7\xfe\xbc\xffq\xff3\xfd\xc7\xff\xb5\x03\xe1\xfe\xdb\xff\xf8\x05\xca\x01\x87\xff\x92\x06\x0c\x04h\x01\x00\x05\x04\x05Q\xffd\x034\x05\x01\xfe\x1d\x01#\x02\xae\xfd\x8c\xfd\x84\xfd\xb5\x00\xbd\xf76\xfa;\x00M\xf6\xdf\xfbd\xfa\t\xfa*\xfe\x18\xf8\xf2\xf6\xfb\xfd\x0b\xfd_\xfaM\xfd\xd4\xfc\x08\xff,\xf8\xfa\x03C\xfe\x9b\xfa-\x02\xc2\x01\xd3\xfe=\xf9\n\x07!\xfcp\xfa\xd6\xfcc\x02w\x03\x12\xf5\xf8\xfe\x98\t\x04\xf5Q\xf2\xab\n\xc0\x02\xfe\xf6\xb2\xff\xc2\x08\x08\xfe;\xfe\xdc\t0\x00\x0c\xfe\xe3\x0e\xa7\xfe\xa3\x07)\x12/\x04\xa5\x07\x04\n\x86\t\x86\x018\x0f\xc0\x0ft\x03\t\n\xb8\n\x8e\n\xb4\x00:\tT\nd\xfb\xcd\x03(\x10\xba\x08A\xf6~\t\xe3\x04"\xfc6\x00\xeb\x06;\x07\xe4\xf7x\x08\xaf\x03d\xffU\x04\x92\x03\x82\xfb\xf0\xfd\x9a\x08)\x01\xfa\xfe\x08\x01=\x06B\xf9\x95\xfe\xec\x03%\xfc\xa6\xff\xff\xfb\x16\xff\x18\x02\xee\xfdS\xfb\xaa\xfd\x18\xfc\xad\xf9\x8f\xfb\x0e\x02L\xf88\xf5\xac\x03*\xfab\xf2\x06\xfeH\x02l\xf0\x0e\xefo\x08\xa6\xfc\x18\xf4T\xfb\xdc\xfe*\xfd\x14\xf2\xe8\x03\xdf\xfeo\xf4U\xfc\xec\x01\xa0\xff\x84\xfa\x1f\x01>\xfcL\xfa\x1a\x03\xed\xfcZ\xfd\xec\x00\xd4\x07\x8c\xf6\r\xffA\x0b\x9e\x06\xb5\xed[\x02e\x11C\xf9%\xf85\r\xc1\x0br\xe8\xf4\x0b\x94\x05f\xf3\xdc\x01\xe4\xff\xfe\xfe\xbc\xfcC\x01\x96\x03\x9a\xf5\xad\xf5\x7f\x07\xfc\xfa\\\xf3\xdf\t\xe4\x01^\xed\xef\x08\xe0\x01\xce\xf4&\x03[\xf8\xa5\x05\xae\xffx\xfc\x9e\n\x91\x02N\xf4e\x06\xb7\x0cL\xee\xbb\x0b\xb1\nP\xf7i\x08\x8e\x01l\x003\t\xaa\xfc\xc2\x00/\x08\xa9\xf1\x9c\x04\xc6\x19\xf6\xef\x9e\xfcm\x13\x88\xf8\xe6\xec\'\x18\x90\x06\x0e\xe7D\x12\xce\x04\xc4\xfcr\xff\xd1\x08V\xf8\x0e\x04C\xfe\xa5\x03\xe1\x08\xd0\xf5\xb2\x07{\x01*\x04\x1c\xf9\xbd\xfd\xcf\t\x0c\xf8s\xfd\x8c\x06\xa3\xff|\xfeM\xfdq\t\xda\xe9\x16\r\r\x03\x98\xef\x94\x06\x8d\x05\x8a\x03\x99\xefT\r\xae\xfcS\xfc;\x06\xbd\x00I\xfd\xe3\x03\x10\xff:\x03\xfe\x03+\x01*\x02\xe8\xfd\x89\x06U\xfa%\x04z\x05\xe5\xf8>\x00v\x06h\xfb\xd3\x06\x06\xff\xd8\xed\xb3\x12\x11\xf7\xad\xf5\x92\x11\x17\xffL\xf5/\x05\xed\x08\xa9\xf7\xaa\x01\x8f\x00\xd2\x05\xd9\xf4o\x13\x9a\xff\xa9\xf7~\x05g\x01\xf9\x00\x9e\xf7\xa5\nm\xfef\x00\\\xf8\x05\n\xb0\xfd\xd3\xf4A\x07\x18\xfe\xd6\xfc\xff\xf6\x8a\x059\x03\xa6\xeeW\x06\x87\x01j\xf3.\xffK\x05\xdf\xf7r\xfc\xfa\xfe\x98\xfa\\\x03\xa8\xfd\x1e\xf9\x88\x01]\xfdZ\xf9\x85\x06\xa3\xf9\x16\x06\x18\xf2\xc8\x03\xf3\x053\xfa\xb1\xf8\xd1\x078\x02\xd1\xf6\x18\x08@\xfb\x85\x02\x9b\xf6e\x10a\xfd;\xf1\xb2\x0e\xda\x04\x99\xf4M\x01D\x0bT\xfd\xdd\xf4G\r\xf9\x05\x9b\xefb\t\xc3\x0b\xd0\xf1\x07\xff\x13\r\xae\xfa\x0b\xfd\xce\x01\xac\x0b\x9d\xee\x99\x06\xab\x03?\x02&\xf6H\x02\x95\x0c\xcd\xee\xfd\x05\x81\x05\x91\xfd\xd3\xf0p\x15\x97\xf2f\xfd)\t=\xfa^\x00\xb9\xfe|\x03\xc2\xf9J\x01\xfa\xf9\xe3\x07@\xff$\xfa\xd2\x02m\x02C\xf6\xa0\x05\xf6\x02\x88\xfa\xe2\xfa\xb3\x05\x7f\x02\xff\xf5(\t\x03\x00Q\xf4y\x03\xf8\x0b\xb5\xef\x81\x05\xcf\x03\xdb\xfa\xea\x07\x96\xff\xb2\xfbc\x04L\xff\x0b\xfe\xe3\x04s\x07O\xfc\x99\x03\xef\x017\xfa6\n\xeb\xfc\x13\x00\xa7\x05@\x06&\xf7\x80\x06\xea\x04\xf5\xfc\x06\xfc \x02[\x05\x86\xff|\x02\xac\xfa\x16\x06\x1d\x00;\xfa\x80\x03n\x03^\xf7-\x02\x03\x03\xd6\xfdH\xff\xdf\x03\xaa\xf7!\x00\x8b\x040\xfcf\x00\xf1\x00\x9c\xfc=\x03\xb8\x00\x94\xfa\xb2\x06\x16\xfc\xb7\xfe\x9d\xff\xf2\x01\xd1\x02\x0c\xfcX\x00\xd7\x03\x0c\xff\x98\xf9\xbe\x05\x96\xfd\x11\x01\xd3\x01w\xfa\x07\x04\x9a\xff2\xff\xff\xfc\xdb\x01J\x01\x87\xfa5\x01W\x03p\xfe]\xfcu\x04\x88\xfe?\xfd[\x06\xa7\xfd:\xfb7\x06\x04\x02\xe0\xfa\xb0\x02\x83\x04\x11\xfc\xc6\x03\xde\x01\xa8\xfe\x06\x01x\xff\xf5\x01-\x03\xca\x02\xee\xfc\xd3\xfe\xa9\x06a\xfb\x14\x01W\x05\x19\xfb5\x02\xb4\xfd\x96\x00e\x019\xfe\xcb\x00M\xf9h\x06~\xfb\xb5\xfa0\x07\xfd\xfa\xdd\xff\xa5\xfb\xc7\xfe\xd3\x02\xa8\x00\x1f\xf7\x12\x03/\x02X\xf7Y\x04\x98\xfeH\xf9\xcd\x03\x81\xfe\x8b\xf9\xfe\x05\r\xfd\x1c\xfc\x07\x01.\x01\xa6\xfaQ\x01Y\x03\xc6\xfe\n\xfbB\x04R\x03\xcc\xf9\xa0\x01B\x01\xc2\x04\x02\xf9\xc7\x03\x90\x02\x02\x00T\xfdm\xfde\x08,\xfb\x95\x01\x9c\x02\x9a\xff\xb3\xff\xe3\xfe\xb0\x00\x98\x01\x08\x02G\xfd\xd9\x00/\x04*\xfe\xdb\xfe@\x01\xb1\x00\xb6\x00\x80\xfei\x01\x98\x01\xd8\xfc\xdb\x01p\x00T\xfe\x95\x00\xeb\xfd\x13\x00\xf0\xff\x0f\xff|\xff\x03\x00d\xfd\x86\xff\x91\xff\x04\xfe+\xffv\xfd\xe6\xff\r\xff\xf6\xfc\xc3\xffF\xff8\xfc\x04\xff\xd6\xff-\xfc\xbd\xff\xa0\xff\xf5\xfb\xb4\xff\xdd\x00\xd1\xfcT\xfe}\x00\xba\xfe\x96\xfe\x15\x00\xdc\xff\x9c\xff\xe5\xffu\x003\x01\xd7\x00\x06\x01\x18\x00\x90\x01\xa6\x02k\x00\xb0\x01\xec\x02C\x00\xa8\x02r\x03\x08\x00X\x02\xf6\x02\xd1\x00\xa0\x01R\x02N\x01!\x01\x8a\x01\x14\x02^\x00\xb0\x00c\x01\xb0\xff{\x00\x92\x01}\xff\xbd\xfe+\x01\xb3\x00\xce\xfd\xdf\xff\x98\x00\xd7\xfe\xad\xfe>\x00\xbe\xff\xe4\xfdd\x00\xf4\x00C\xfe\xea\xfd\xfe\x00\xa7\x00\xa8\xfd\x9c\xffw\x000\xff\xdd\xfe\xd2\xff\x9b\xff\x13\xfe.\xff\xb9\x007\xfe8\xfe\x93\xff|\xff\xbc\xfd\xdb\xfe*\xff\x07\xfe*\xff\xf7\xfe\x7f\xfe\xa4\xfe+\xff\xc5\xfe\x80\xfe\xd8\xfe\xe5\xffA\xff\xe6\xfex\x00\x85\xffh\xff\xc3\x00\xca\x00\xea\xffb\x00\x81\x01k\x01\xb4\x00\x83\x01C\x02\xf5\x00\xaf\x01\xc0\x02t\x01p\x01\xc4\x02"\x02\x11\x01\xaa\x02\xe0\x01Z\x01\xab\x01\xa0\x01R\x01\xae\x00u\x01v\x01\x10\x00\xce\xff\xea\x009\x00R\xff\x03\x00\xc5\xff\xdb\xfe-\xffZ\xff\xd5\xfes\xfe\xbb\xfe\x0b\xff_\xfe5\xfe\x8c\xfe\xcb\xfe1\xfe7\xfe\xad\xfe\xa4\xfe\'\xfe\xeb\xfe_\xff\x86\xfe\xde\xfe\x89\xffK\xff\xdd\xfe\x17\x00\xe6\xff\x10\xff\x00\x00\xb9\x00w\xff\x1f\x005\x01\xc1\xff*\x00\xf3\x00\xd3\x00j\x00e\x00R\x01\x0b\x01x\x00b\x01\x04\x01\xb7\x00\x10\x01\x01\x01\t\x01\xf7\x008\x01R\x01\x08\x01\xd1\x00\x1f\x01\x0b\x01\x9b\x00\x01\x01\xf7\x00y\x00\xd9\x00\x0c\x01w\x00@\x00\xd8\x00u\x00\x14\x00Q\x00\x12\x00\xd0\xff\xe8\xff\xf0\xffN\xff\xa1\xff\xe7\xff\x13\xff\xd5\xfe\xd3\xff*\xffl\xfe~\xff=\xff\x87\xfeB\xff\x02\xff\xed\xfe$\xff\xd3\xfe&\xff\x1e\xff\xfe\xfe<\xff\x1b\xff\x08\xff\x87\xff\x1e\xff \xff\xce\xff\x85\xffH\xffv\xff\x17\x00b\xffY\xff=\x00\xf7\xff\x92\xff%\x00\x88\x00\xca\xff/\x00\\\x00F\x00T\x00y\x00\x86\x00\x8d\x00\xc2\x00\x9a\x00\xa1\x00\x8c\x00\xbf\x00\xcc\x009\x00\x90\x00?\x01\x88\x00\x14\x00\xff\x00\xb8\x00\xef\xff\x86\x00q\x000\x00\xe3\xffM\x00S\x00\xce\xff\xf8\xff\x0c\x00\x80\xff\xea\xff\x01\x00\x98\xffu\xff\xb6\xff\xd2\xff-\xffo\xff\xc9\xffS\xff\x13\xff\x99\xffL\xff%\xffe\xff,\xff^\xffE\xff.\xffz\xff\x7f\xff\x88\xffh\xffs\xff\xa1\xff^\xff\x8a\xff\xf7\xff\xa3\xff\xc8\xff\xe9\xff\xe2\xff\x03\x00"\x00\xf1\xff\xff\xffi\x00(\x00E\x00\x8b\x00?\x00L\x00\x9b\x00R\x00\x8c\x00\x94\x00\x94\x00T\x00\x7f\x00\xb6\x00C\x00F\x00\x93\x00D\x00\xf6\xff\x83\x00[\x00\xfb\xff*\x00)\x00\xeb\xff \x00\x1b\x00\xd8\xff\n\x00\x0f\x00\xdc\xff\x00\x00\xb9\xff\xdc\xff\x0f\x00d\xff\xc5\xff^\x00v\xff\xe1\xff\xfd\xff\x8f\xff\xe1\xff\xb5\xff\xc4\xff\xf5\xff\x14\x00k\xff\xf6\xff\x02\x00\x94\xff\xaa\xff\x0e\x00\xa9\xff\x86\xff<\x00\xb0\xff\x90\xffN\x00<\x00^\xff\xbd\xff\x86\x00\xe9\xff\x99\xffk\x00M\x00\x1f\x00\xe8\xff8\x00\x91\x00\xd0\xff\r\x00\x98\x00\xb4\x00\xc8\xff!\x00\x87\x00\x1b\x00M\x00B\x00\x0f\x00G\x00\x08\x00r\x00F\x00\xbc\xffJ\x00m\x00\xdf\xff\x06\x00\xa0\x00\xdc\xff4\x00P\x00\xfd\xff`\x00\x12\x00\xf6\xffg\x00\xfd\xff:\x00\x19\x00\x11\x00\x11\x00\xee\xff\x1f\x00,\x00\xf7\xff!\x00\x08\x00\x9d\xffd\x00\x03\x00\x80\xff\xe7\xff*\x00\xc6\xff\xf4\xfe\x15\x00\x1d\x00\xbd\xfe\x90\xff\\\x00h\xff\xba\xfeL\x00;\x00_\xff\x8a\xff\xf8\xff\xc7\xff\x19\xff\x1a\x00\xb6\x004\xffL\xffn\x00\xbb\x00\x99\xff\xf1\xff\x83\x00\xb7\xff\x8e\xff\xdf\x00\x1e\x01\xf4\xfex\xff\r\x02\xfb\x00M\xfd\xbb\x01\xee\x02\xf8\xfc\x95\xff\x18\x03 \x00\xc2\xfe\x08\x00,\x01*\x00\xb3\xff4\x01\xb6\xff[\xff;\x00\x9d\x00\x1c\x00\t\x00\x92\xff,\xff\xaf\x00X\xff,\xfe\x88\x02L\xff\xe4\xfd\xd4\xff\xcc\x00t\xff\xd0\xfd\xd2\x01e\xff\x98\x00<\xfe\xe9\xff\xa0\xff\xd0\xff\xd6\x01r\xfd4\xfe\xe1\x00\x16\x01h\xfe\xd0\xfe\x85\x00|\x01\xe3\xfe\x8d\xfe\x84\xfc&\x00D\x03\xa9\x03\\\x02\xcf\xfe\xf2\xfb\xf5\xfe\xca\x07\xf3\xfe\xdf\xfbv\x04\xa1\x00X\xfc8\x02\xc8\x04\x82\xfcY\xfa\x9c\x02 \x03z\xfd\xef\xfdh\x000\x00\x8e\xfe\xfa\xff{\xffc\x00\xa7\xfe\xd4\xff\xa2\x00\xd0\xfc\xc4\xff\x8e\x02\x1d\x05>\xfc\x18\xfd\x0b\x04\xb3\x02\x82\xfdb\xff\xc4\x03\xe6\xfe\xb5\xff\xd1\x00L\x00\xb4\xfc\xc1\x00\\\xff\xaa\xfd\x18\x02\xde\xfd\x07\x01~\xff\x94\xfd\xea\xfe\xac\x01\xfb\xfcZ\x00\x9e\x00\xf1\xfd\xf4\x00U\x01+\xff}\xfd\n\x00!\xff\x89\x02\xa2\xffD\xff\xcd\x02\xbf\x00\xfa\x01S\xfd\n\xfdr\x00\x88\x05\xeb\x00\\\xfdn\x02\xff\xfdB\x03(\x00\xbe\xf7\xe1\x02\x99\x04\\\xf9)\x00\x1f\t\xd7\xf8\xaa\xf8~\x04\xb7\x04j\xf9\xfb\xfbu\x03\xc9\x03\xa9\xfe\t\xfe\x85\x01\x8e\xfd\\\x00\xf6\x01$\x03*\xfa\xa4\x00E\t\xed\xfb\xc9\xfc\xa9\x04D\x00]\xfc;\x03\x16\xfd\x18\xff\x9c\x03^\x01-\xfb2\x01`\xfeh\x02\x82\xfe\t\xf8\xfc\x06\xd2\x07\xba\xf9<\xf7C\t\xff\x03h\xfb\x88\xfb\xca\x00\x81\x01\x08\x01\xe0\x00w\xff^\xfdN\xfc\xae\x05a\xfe\xfa\xf8\x95\x02k\x07\x89\xf7\xa3\xfc3\x0b\xb1\xfe,\xf8\xad\xfd1\x0b\x1d\x00\xd0\xf7\xe4\x03O\x03\t\xfd\xee\xfd\xb2\x06\xef\x02$\xf9\xe5\xffR\n\xf7\xfb\xba\xf8\x1c\t\xb0\x06\xff\xf7\xd8\xffb\tO\xf9 \xf8\xc1\t\x11\x07\xc2\xfav\xf8\x17\x03\xfc\x01\xe5\xff\xf5\x00E\xfd\xd9\xfct\x03\xb6\x02E\xf8\xb2\x02\x1e\x08q\xfa\xba\xf66\x04\x05\x0b\x8b\xfc\xdb\xf7\xda\x04\xc6\x05\'\xf90\xf4\x95\x0e\xc3\x04\xce\xed\xad\x05\xe6\x0b*\xf48\xf9\x8c\x0f\x16\xf6\x84\xee^\x13\x13\n\xe2\xec|\xf91\x12w\x01K\xea\xaf\x08\xf3\x0b\xee\xf5\x1e\xf7o\r\xb3\x08\x14\xf1\xa5\x02)\x06\xf7\xf5G\xf9e\x13\xb7\x03\x00\xf4m\x02\x88\x02\xd2\xfex\xfe\x7f\xff\x0f\x06\xf3\xfee\xfa\xc7\x03O\x02\xf9\x00l\xfe\xeb\xfdO\xfc\x99\x02\x13\x03\xea\x02j\xfb\n\xfb\xd8\x05A\x00\xed\xfep\xfc\x9c\x02\xd3\xfe\xab\x02\xe7\xfd\xfc\x01g\x08\xdb\xff\xeb\xf4\x88\xfb\xd2\x0bN\x04\xa1\xf6F\xff\x0e\x0fK\xf9\xee\xf0\x1c\x05\xab\re\xf6?\xf2,\x06@\t%\xfc\x95\xf8"\xfe\x80\x02P\xfa\xad\xfcD\x08G\xfc\x9a\xff\x89\xfb\xd3\xfe\xf3\x07\x96\x05\x08\xfc\xb3\xf9\xb4\x01\x99\x07z\x05\x0e\xfaJ\xfb!\x02\xf8\x04\x91\xf9\xc0\xff\x85\x0cp\xfd\x12\xf0\xef\x02\xb9\x08U\xfb\x8f\xf7F\x07$\x04\x03\xfb\x01\xfec\x04h\x02B\xf7\'\xff\xe8\x08&\x03\x88\xf5h\x026\x07\xd6\x01\xe0\xf8/\xfd\n\x05\xfa\xfdN\xfd\xdd\x02\n\x03q\xfdo\xfd=\x00Q\x03v\xff&\xfb\xe7\x00\xbb\x00&\xffT\x070\x00\xf0\xf8\xfe\xffd\x04\x80\xff$\xfey\x04w\x03\'\xf9\x84\xf9\xac\x0cj\x07\x19\xf4\xc8\xf9\'\x08\xd6\x05\x03\xfb\xa4\xfe\xe8\x03\x96\xfd\x1d\xfbg\x03\xb4\x049\xfe\x9b\xfc\xea\xfcg\xffW\x02;\x03~\xfe \xfa\xd1\xffN\x04}\x03\x1e\xfc!\xfd\x06\x01\xef\xfa\xb6\x03\xff\x0b\xea\xfd\x16\xf7\xe8\x00\xb7\x06-\x00p\xfe\xa7\x04N\xffk\xfa\x14\x07>\n\x0c\xfa\xf7\xf5\xcc\x01a\x05\x16\xff&\xff:\x01g\xfa\xf0\xf6\x08\x03e\x08\x84\xfa\xae\xf3\x16\xfd\xdc\x04\x97\xfe#\x00\xe0\x00\xb9\xf5$\xf9\xcf\x06\x88\x06l\xfc\x0b\xfc\xf0\xff\xb4\xfd\xab\xffD\x086\x03\x9a\xf6\x9a\xf9\x0c\x04\xc1\x03\x08\xff+\xfe\x16\xfb\x1b\xf9\xc8\xfe\xda\x02\xfe\xfb\xb1\xf6\x9f\xfa\x01\xfeu\xfcz\xf8=\xf8\x1b\xf9\xde\xf6|\xf9h\xfd\xe4\xfe\x98\xfdJ\xfcM\xfa\xf3\xfd\xc6\x03\xe4\x084\x12\xf8\x16h\x10\x01\x0bB\x14\xb4\x1e\xcd\x1b\x0f\x14\xcb\x19\xf7$\'$0\x1d4\x1a&\x19\x85\x0f\\\x06\x96\n\x10\x12z\x0c\xcd\xfe\x15\xfa\xb1\xfd$\xf7U\xeaG\xe3\xfc\xe5Q\xe8H\xe4V\xe6\xda\xeb\x13\xe8\x9e\xdc\x11\xd9\x90\xe3\'\xec\xf1\xe9\xb8\xe8\xb1\xf2\xfa\xfc}\xfc\x99\xf7X\xf8\xfa\xfd:\xff\x88\x00\x80\x08C\x13\xc2\x11@\x067\x05\x07\x0c\xf9\x0b\xf7\x02\xf4\x00\x9a\x08U\rX\x07r\x00\xd0\xfei\xfa\xa1\xf4\xe9\xf4\xb0\xfa\xea\xfc\xa5\xf8N\xf6\x14\xf8\xf2\xf8\x89\xf3\'\xf0\xb5\xf2{\xf8*\xfe\x8f\x00V\x02\xf7\xfe\x9a\xfb>\xfc\x8f\x01\xc0\x05\xc6\x06\xd3\x08S\x0c\xb5\x0e\xdc\x0cN\x0c\xfb\t\xf4\x07\xbd\t|\x0eH\x13\x18\x12(\x0cy\x08o\x07\xb6\x06\xe7\x05h\x05M\x05\xc2\x04\xe5\x02D\x01\x08\xfe\xb8\xfa\xcb\xf7\xfd\xf5E\xf8\x17\xfa\xf0\xf8\xc9\xf5\x03\xf3\x95\xf1h\xf2\x96\xf1\xbb\xf1#\xf5\x8a\xf6\x1d\xf7\x1e\xf6\xcd\xf7<\xf8\xab\xf6>\xf8\xfa\xfb\xb3\xff\xae\x00a\xff\xa0\xffk\x00\xba\xfe\r\xfe\xa4\xff\xd3\x01(\x02d\x01\x1c\x01\xa0\xff\xed\xfd\x82\xfc\xa8\xfc\xe5\xfc\'\xfd\x02\xfd`\xfb\xb4\xfb\x0b\xf9\x96\xf5\x1c\xf4\x9c\xf2\xa4\xf5\x16\xf6O\xf6\x17\xf6\xe6\xf5L\xf7\xe4\xf7\x1c\xfa\xde\xfb\xfd\xfc\xd9\x01\xb9\x10B"J%L\x18\xf3\x13\xe4"Z1#2I0\xcc6!;\xa88\x825\xd10\xea$\xae\x18\x89\x1a\x9f$\xd3"R\x12s\x02x\xfb?\xf3I\xec\xf0\xe7>\xe4E\xdd\x1a\xd7\xea\xd9\x98\xde\x81\xd8\xa5\xc8\xd0\xc2i\xce\xc8\xdbh\xdei\xdc\x9a\xe1\x98\xe9$\xed\xe4\xec\xfb\xf0\xae\xf8x\xfcy\x00>\t\x10\x15\x91\x15\x0c\n\n\x07P\x0f\xa2\x14x\x0eI\n(\x0e}\x10\xf1\n[\x035\x00\x87\xfb4\xf4\xbe\xf3V\xf9F\xfb\xa1\xf4M\xed\x17\xee*\xf1\xb7\xed\xbf\xea=\xee\xa9\xf4(\xf9\x1b\xfac\xfc\x85\xfc^\xf9z\xf9u\x00\xe0\x07\xcb\n\xa3\x0b\xff\x0cL\x0f\x1d\x0f\xe4\r\x1c\r\xa3\r\x1a\x0f,\x12%\x15\xa7\x14\xd5\x0f\xd6\t\x97\x06S\x05\xda\x05E\x05)\x04\x87\x03O\x01c\xfe\xa0\xfa\xc0\xf7z\xf4;\xf3\x9a\xf6E\xfc\xa0\xffJ\xfe\xbb\xfa;\xf7\x04\xf8\xeb\xf9\x0b\xfd\xb9\x01\xf0\x02\xb8\x03\'\x046\x03\xff\x00j\xfdQ\xfc\xfd\xfd7\x01\x84\x03\x17\x02\x11\x00\xa9\xfc=\xfa\xc8\xf8h\xf9\xe2\xfa\xdb\xfb\xc0\xfcC\xfd\xd4\xfc\x9e\xfa\x9d\xf8\xde\xf7\xd0\xf8\x80\xfbv\xfdv\xfe\xdb\xfft\xfe\xa6\xfcp\xfc"\xfd\x85\xff(\x00\xb2\x012\x04\xbd\x03\xc0\x02\xa8\x00\x9c\xfe\xe5\xfe<\xff\x9e\x00\x1b\x02\x85\x01:\xfff\xfc\xf1\xfa\x0b\xfb\xef\xfak\xfa*\xfc\x0f\xfeT\xff\xf9\xfe\xd0\xfc\xb1\xfb&\xfb\xa0\xfc\xfb\x00\xe3\x01\xcf\x00\x87\xfet\xfc\x9f\xfd\xeb\xfbF\xfc\xab\xfe\x12\x01\xb7\x00I\xff\x1f\xfe\x16\xfb\xc2\xf8\x94\x01\xb9\x18\xa7%\x81\x19\xd5\x06f\x0e\x7f$\x83(G\x1f\x80!\xc91y4\xb7*N(\xdd&~\x18\xf3\x08\xb9\x10E$\xaa!\xe4\t\x89\xf9\xe7\xfa\xf3\xf5s\xe8>\xe1\xb5\xe5\x9b\xe6i\xdfe\xe0h\xe6\xfb\xde8\xcc\xfe\xc6R\xd8\x0b\xe9\xd5\xe8\xe7\xe3a\xe9\xb3\xf1\xa1\xf1\xb8\xee\xa5\xf2\xd8\xfa\xce\xfc\xec\xfe\xf4\x08q\x13\x86\x0fh\x00\xa0\xfd\x11\x08\x88\r\x8b\x06:\x02\xff\x06\x16\t\xad\x013\xfa\xa2\xf9\xf5\xf6(\xf0\xfe\xf0G\xf9\xc6\xfa\x8d\xf1\xfd\xea\xe2\xee\xe1\xf2\x9b\xee\xbc\xec^\xf3\xf3\xf8\x8c\xf8\xe9\xf8\x81\xfe\xf0\xff\xee\xf9\xa9\xf8,\x02B\x0b\xf8\n)\tq\x0b!\x0e\x80\x0c\xab\x0bk\x0ep\x10q\x0f_\x0f\xa0\x12\x8f\x14\x91\x10\xb7\t1\x07\x1c\to\n\xc5\t\x11\x08\xb3\x06\xbd\x03\xd3\xff\xdf\xfd\xdb\xfdZ\xfcm\xf9\xdd\xf8\xd7\xfa\xc0\xfb\x14\xfa\xab\xf6w\xf5\xd0\xf6F\xf7]\xf8\xf6\xfa\xdc\xfcY\xfd\x84\xfc[\xfd\xf7\xff\xb3\x00\x14\x00\xee\x00c\x04\x00\x07\r\x07\xcf\x06\xbc\x06\xd8\x05=\x04\x19\x04x\x05k\x06;\x05k\x039\x02Y\x01\xd6\xff\x04\xfe\x90\xfd\x10\xfe\x08\xfe\xc0\xfc\xc7\xfcD\xfc[\xfb\x89\xfa&\xfa\xf2\xfbG\xfd\xdc\xfd!\xfe\x82\xfe\xd6\xfe\x9a\xfeo\xff\x14\x02\xfb\x04s\x03*\x02G\x04\x1e\t\x13\x0b\xca\x07\x17\x07B\x07\xe7\x07|\x07K\x08\xd1\x08\x11\x07\xe5\x04 \x04\xb5\x02g\xff\x9f\xfdU\xfc\xa7\xfc\x9e\xfc\xae\xfa]\xf9y\xf7\xe2\xf5\x04\xf5\xa2\xf4\xe9\xf5&\xf6\xec\xf5\x82\xf6\xdc\xf6\xeb\xf6"\xf6\xb4\xf6g\xf8v\xfaA\xfbc\xfc\xef\xfd\xea\xfe\x9f\xff\xce\xff*\x01 \x031\x04\x12\x05\xfc\x05\xaf\x06\xb7\x06\xf4\x05L\x06?\x07\xb0\x06\x00\x06\xef\x05\xef\x05\xf8\x041\x03}\x02\xf5\x01r\x006\xffK\xfe\t\xfeW\xfd\x9d\xfb7\xfa\x99\xf9E\xf9\x81\xf9r\xf9\xe8\xf8j\xf9\xf8\xf9V\xfau\xfa[\xfa\x81\xfa\x9d\xfa\xc9\xfa\xe4\xfb\x02\xfc \xfb\xe8\xfa\xe2\xfa\xd6\xfa8\xf9\x1d\xf8(\xfa\xe1\xfc\xcb\xfcK\xfb@\xfc_\x02\x87\x07\x8f\t\x13\x0e\xba\x15\xfc\x18\x9f\x14\xf9\x14m\x1f6(_&\x9f"\x8d&]*l%9\x1d\xdf\x1a\xa1\x1a\xde\x166\x11\xa7\x0f\xdc\x0c\xd6\x03\xa3\xf9k\xf4\xf0\xf1\x81\xed\x91\xe8Y\xe6T\xe5m\xe3\xfc\xe1\xa8\xe1\xe7\xdf\xf9\xdc\xb7\xdd&\xe3\xe0\xe81\xec\xbe\xedF\xf0^\xf3\xbf\xf5d\xf8\xed\xfa\x97\xfdV\x00\xc2\x03u\x07\xfb\t\xa1\to\x06\t\x04\xfb\x03\xe2\x05u\x06}\x05\x10\x04.\x02\x00\x00\xb1\xfd\xc4\xfb1\xf9n\xf6\xb9\xf5\x9c\xf7\xd9\xf8\x13\xf7\x1c\xf5\xd9\xf4\x1b\xf5a\xf4\xa5\xf4\x92\xf7*\xfam\xfa\x05\xfb\xe1\xfd0\x00\x93\xff\xd8\xfe\xd3\x00\x08\x04n\x05$\x06\xf5\x07$\tR\x08I\x07\xf8\x07j\t\x84\t\xc2\x08\xf2\x08\xe0\t\x8f\t\xcc\x07y\x06\r\x061\x05M\x04-\x04R\x04S\x03O\x01\xfe\xff\x9e\xff\xc8\xfe?\xfd[\xfc]\xfc\x99\xfcn\xfc\x9d\xfb\\\xfbO\xfb\xc9\xfa\xd4\xfa\xf3\xfb\x14\xfd\x80\xfd\xb7\xfdC\xfe,\xff\xe9\xff\n\x00\xa6\x00\xdb\x01\xca\x02\xf4\x02\x12\x03\x99\x03\x96\x03<\x03\x1c\x03$\x03\xf8\x02\xe2\x02\xb3\x029\x02\xb4\x01\x0c\x01i\x00\x15\x00\xc9\xff]\xff!\xff/\xff/\xff\xfd\xfe\x9a\xfe\\\xfep\xfer\xfe\x9e\xfe\x07\xff`\xff\xad\xff\xc6\xff\xaf\xff\xb5\xff\xa2\xff\xaa\xff\x18\x00S\x00d\x00\xa8\x00\x94\x00Z\x00M\x00F\x00\x19\x00\xe5\xff\x0f\x00o\x00\xa6\x00\x84\x00\x14\x00\x01\x00>\x00.\x00,\x00\xc6\x00\xbc\x01Q\x02`\x02\xaa\x02\xe7\x02\xfa\x02Q\x03\xf2\x03m\x04?\x04E\x04\x0e\x05\x1a\x06#\x05\xd5\x02\xde\x02b\x04\x16\x04t\x01q\x00\x06\x02\x0b\x02\x13\xffW\xfd\xa2\xfe\xd6\xfe\x9d\xfb\x88\xf9\x98\xfb\xf9\xfc`\xfa\xb9\xf7\x19\xf9\x1c\xfb\xa8\xf9\x8a\xf7y\xf8\x88\xfa:\xfa\xf7\xf8F\xfa\x97\xfc\xae\xfc\x95\xfb\x8c\xfc\xf8\xfe\xdc\xffe\xff\xae\xffx\x01\xe4\x02\xef\x02\xf8\x02\xad\x03\'\x04\xbc\x03-\x03|\x03\r\x04\x88\x03E\x02\xb2\x01\xef\x01t\x01\xe9\xff\xb6\xfe\x8a\xfe0\xfe/\xfdd\xfc:\xfc\xfe\xfb:\xfb\xbf\xfa\x03\xfbe\xfbZ\xfb$\xfbo\xfb7\xfc\xc2\xfc\xfb\xfcr\xfd\x1a\xfe\xa7\xfe\xc0\xfe\xdf\xfe*\xff)\xff\xbc\xfe{\xfe\xa9\xfe\xcf\xfek\xfe\xce\xfd8\xfd\xe5\xfc\xb4\xfc\xa2\xfc&\xfd\x8d\xfe\xca\x00\xf7\x02f\x04b\x05m\x07\xb0\n\xaf\r\x13\x10\xd6\x12\xc9\x15z\x17\xd0\x17k\x18\xc9\x19Y\x1a6\x19e\x17\x15\x16\x9b\x14\xf5\x11\xb5\x0e\xe1\x0b~\tz\x06\xdd\x02\x89\xff\n\xfd\xb7\xfa\x16\xf8\xcb\xf5W\xf4v\xf3\x10\xf2w\xf0\x9c\xef}\xef\x82\xef\x0f\xef\xd2\xeex\xef?\xf0\xb1\xf04\xf1#\xf2$\xf3\xc9\xf3Z\xf4\x8d\xf5\x01\xf7:\xf8C\xf9\x1a\xfa\x14\xfb\x00\xfc\xa4\xfc2\xfd\xde\xfd\x93\xfe\xfc\xfe\xfc\xfe\xe4\xfe\xf2\xfe\xd1\xfel\xfe\x03\xfe\xcb\xfd\x8b\xfd\x18\xfdk\xfc\x00\xfc\x1a\xfc!\xfc&\xfch\xfc\xc0\xfc\xf5\xfc\xf4\xfc#\xfd\x84\xfd\xfc\xfd\x83\xfe\x0f\xff\xa1\xff\x1d\x00s\x00\xbd\x00\x07\x01F\x01\xb9\x01i\x02\x11\x03\xb0\x03X\x04\xda\x04P\x05\xba\x05Y\x06\x0e\x07\x9c\x07\x07\x08\x95\x08B\tq\t\x1a\t\xe2\x08\xd6\x08V\x08{\x07\xf7\x06\x9a\x06\x85\x05\x01\x04\xe1\x02$\x02\xdd\x00?\xff/\xfe\xa7\xfd\xbb\xfc\x8a\xfb\xfd\xfa\xf8\xfa\x8f\xfa\xdc\xf9\xd7\xf9l\xfa\x81\xfa0\xfa\x93\xfa\x85\xfb\xcd\xfb\xc5\xfb9\xfc\xf4\xfc \xfd\x06\xfdj\xfd\x1d\xfe[\xfe7\xfew\xfe(\xffK\xff\x11\xff]\xff\xef\xff$\x00\x1c\x00}\x00\x13\x01F\x01O\x01\x94\x01\xec\x01\xf5\x01\xde\x01\xeb\x01\r\x02\x11\x02\xd7\x01\xaa\x01\xb4\x01\x8d\x01>\x01\x18\x01\x18\x01\x16\x01\x08\x01\x1e\x01:\x01x\x01\x8a\x01\x84\x01\xb7\x01\xed\x01\xfc\x01\t\x02#\x02*\x02\x1a\x02\x08\x02\x01\x02\xd8\x01\x85\x01\x0e\x01\xb8\x00\x99\x00r\x00\x02\x00{\xff\xa2\xff\x06\x00\xdb\xff\xbb\xff\xf2\xff[\x00\x17\x00\xf1\xff|\x01\xb6\x03\xd7\x03\x84\x02\r\x03\x12\x05t\x05\n\x04\x1a\x04\x7f\x05[\x05c\x03\x8f\x02M\x03\x87\x02\xf0\xff\\\xfe\xe9\xfe\xdc\xfe\x8a\xfc~\xfa\x82\xfa\xb1\xfa\xb5\xf9\xc0\xf8\x01\xf9[\xf9\xde\xf8k\xf8Q\xf9\xc8\xfa=\xfbF\xfb\x1b\xfcs\xfds\xfe\x0b\xffi\xff.\x00\xf7\x00\x83\x01\x00\x02_\x02\xb9\x02\xb0\x02q\x02f\x02h\x02\xf9\x01\x17\x01\x81\x00\x82\x00?\x00S\xff\x81\xfe-\xfe\xbb\xfd\x0c\xfd\x82\xfcZ\xfc\x18\xfc\x96\xfb|\xfb\xe0\xfb<\xfc,\xfc\x13\xfcd\xfc\xe7\xfcA\xfd\x97\xfd"\xfe\x9b\xfe\xfc\xfeW\xff\xda\xffi\x00\xab\x00\xae\x00\xdb\x00Z\x01\xc8\x01\xd7\x01\xd5\x01\x12\x02q\x02v\x02[\x02}\x02\xa0\x02u\x02E\x02g\x02o\x02\x1a\x02\xbc\x01\x7f\x01\x14\x01a\x00\xc9\xffL\xff\xa6\xfe\xde\xfdA\xfd\xde\xfcw\xfc\r\xfc\xcd\xfb\xc8\xfb\xc7\xfb9\xfc^\xfd\x04\xff\xa8\x00R\x02N\x04\x88\x06\xa8\x08\xbd\n2\r\xd2\x0f\xca\x11\x04\x13e\x14\xe4\x15o\x16\xd8\x15b\x156\x157\x14\xc7\x11l\x0f\xc8\r\x85\x0b\xec\x07x\x049\x02\xbd\xff\xfe\xfbx\xf8{\xf6\xec\xf4S\xf2\xd0\xef\xcb\xee{\xee\x82\xed[\xecq\xecD\xedy\xedZ\xed%\xee\xbb\xef\xd8\xf0S\xf1c\xf2\x0b\xf4L\xf5\x18\xf6\x15\xf7\xa3\xf8\xe7\xf9\xb1\xfa\xb1\xfb\x04\xfd\x1d\xfe\xbb\xfeQ\xff\x07\x00\x8d\x00\xf2\x00i\x01\xd1\x01\xd6\x01\xa9\x01\xae\x01\xb0\x01Y\x01\xf0\x00\xa1\x00W\x00\xe1\xffy\xffn\xffE\xff\xd0\xfey\xfe\x8f\xfe\xd0\xfe\xab\xfe\x98\xfe\xf2\xfeX\xff\xc6\xff<\x00\xd7\x00g\x01\xd0\x01K\x02\xf7\x02\x9b\x03\xef\x03E\x04\xc0\x04F\x05\xb0\x05\x08\x06K\x06e\x06v\x06\x99\x06\xd9\x06\xf5\x06\xd4\x06\x96\x06G\x06\xe8\x05t\x05\xe5\x04\x18\x04<\x03~\x02\xbe\x01\xcc\x00\xad\xff\x95\xfe\xa5\xfd\xc3\xfc\xe0\xfb"\xfb\x92\xfa\xf3\xf9G\xf9\xd4\xf8\xb6\xf8\xa2\xf8\x84\xf8\x9c\xf8\xf3\xf8e\xf9\xd3\xf9F\xfa\xeb\xfa\x9e\xfbH\xfc\xfb\xfc\xb9\xfd\x93\xfe{\xffL\x00\x1b\x01\xd5\x01\xab\x02x\x03\x02\x04t\x04\xd7\x04C\x05\x9c\x05\xd9\x05\xfb\x05\xf5\x05\xd4\x05\x82\x05%\x05\xd9\x04s\x04\xe3\x03X\x03\xdc\x02Q\x02\xc5\x01@\x01\xb6\x00*\x00\xb3\xff{\xffZ\xff0\xff\x19\xff\n\xff\xfb\xfe\r\xff)\xffD\xffa\xff\x92\xff\xd4\xff\x0b\x00-\x00M\x00l\x00q\x00v\x00\xa7\x00\xd7\x00\xbf\x00\x95\x00\x90\x00\x91\x00~\x00A\x00\x13\x00\xf8\xff\xc4\xfft\xff\'\xff\x0f\xff\xf6\xfe\xb2\xfe|\xfe\x9a\xfe\xc5\xfe\xb7\xfe\xd2\xfe%\xff\x95\xff\xba\xff\xda\xff\x9b\x00\xa5\x01\x03\x02\xee\x01t\x02A\x03p\x03\r\x03"\x03\xa4\x03\x9b\x03\x05\x03\xa1\x02\x85\x02\r\x02\x19\x01\\\x009\x00\xe7\xff\xf5\xfe\x05\xfe\x9e\xfdW\xfd\xa6\xfc\xf4\xfb\x99\xfbg\xfb-\xfb\xd8\xfa\xd2\xfa\xf3\xfa\xe2\xfa\xc4\xfa\xdd\xfa.\xfb{\xfb\xa8\xfb\xc9\xfb\x1d\xfc\x8a\xfc\xdc\xfc\x1b\xfd>\xfdy\xfd\xd4\xfd0\xfe~\xfe\xd8\xfe\x1d\xffI\xffu\xff\xc3\xff\x1d\x00T\x00\x8b\x00\xba\x00\x05\x01x\x01\xdf\x01 \x02V\x02\x99\x02\xeb\x02;\x03}\x03\xe1\x03\x16\x04\x1c\x04B\x04q\x04\x88\x04W\x04\x13\x04\xe7\x03\xca\x03\xaa\x03V\x03\xeb\x02s\x02\x13\x02\xbe\x01o\x01\x1d\x01\xb8\x00`\x00\x1d\x00\xf0\xff\xcf\xff\xc5\xff\xa3\xff\x8a\xff\xa1\xff\xc7\xff\xd5\xff\xdf\xff\x00\x00,\x00^\x00\x87\x00\xae\x00\xc8\x00\xca\x00\xcd\x00\xd5\x00\xe1\x00\xdc\x00\xbb\x00\x9d\x00v\x00`\x00:\x00\xf9\xff\xb9\xff}\xff;\xff\t\xff\xd4\xfe\x9c\xfex\xfeC\xfe\x12\xfe\xee\xfd\xd5\xfd\xce\xfd\xbd\xfd\xc3\xfd\xc8\xfd\xd1\xfd\xdd\xfd\x01\xfe8\xfek\xfe\xa7\xfe\xdd\xfe$\xff^\xff\x97\xff\xd4\xff\x08\x00S\x00\x8b\x00\xa0\x00\xab\x00\xa1\x00\x96\x00\x81\x00Z\x005\x00\xf4\xff\x91\xff!\xff\xca\xfey\xfe+\xfe\xe4\xfd\xb5\xfd\x8a\xfda\xfd3\xfd2\xfdo\xfd\xd4\xfd\x1f\xfeD\xfe\x89\xfe\xe2\xfe2\xff^\xff\xc3\xff6\x00p\x00\x95\x00\xbc\x00\xea\x00\x01\x01\xfe\x00\xef\x00\xff\x00\x00\x01\xf3\x00\xcb\x00\xbb\x00\xbe\x00\xa9\x00\xa0\x00\x91\x00\x90\x00\x93\x00\x98\x00\x97\x00\xa6\x00\xad\x00\xb7\x00\xc1\x00\xc1\x00\xcc\x00\xc6\x00\xb7\x00\xbc\x00\xba\x00\xa4\x00\x83\x00]\x00E\x00$\x00\xfb\xff\xd6\xff\xc1\xff\x92\xffN\xff\x14\xff\xfd\xfe\xec\xfe\xc1\xfe\xb1\xfe\xb3\xfe\xb3\xfe\xa1\xfe\x97\xfe\xb2\xfe\xd4\xfe\xe6\xfe\xfc\xfe4\xffh\xff\x8c\xff\xbd\xff\xfe\xff2\x00=\x00`\x00\x8f\x00\xba\x00\xe5\x00\xfd\x00&\x01=\x01H\x01[\x01c\x01n\x01\x85\x01\x90\x01\xa4\x01\xb7\x01\xbc\x01\xb3\x01\x84\x01~\x01\xa2\x01\xae\x01\xaf\x01\xbe\x01\xdf\x01\xec\x01\xeb\x01\xf7\x01\n\x02"\x02;\x02Q\x02c\x02s\x02t\x02w\x02q\x02]\x02<\x024\x02/\x02\x13\x02\xfc\x01\xb9\x01X\x01\xef\x00\xa5\x00^\x00 \x00\xe7\xff\x98\xffL\xff\xf0\xfe\x97\xfen\xfeG\xfe\x01\xfe\xf0\xfd\xf7\xfd\xdc\xfd\xcc\xfd\xb5\xfd\xa6\xfd\xb5\xfd\x9e\xfd\x96\xfd\xb8\xfd\xb8\xfd\xa5\xfd\x94\xfd\x92\xfd\x98\xfd\x91\xfd}\xfd\x80\xfd\x85\xfdx\xfdh\xfdO\xfdQ\xfdQ\xfd?\xfdN\xfd`\xfdv\xfd\x90\xfd\x93\xfd\xa5\xfd\xc2\xfd\xe3\xfd\xec\xfd\x05\xfe>\xfe]\xfet\xfe\x95\xfe\xdc\xfe\x15\xff\x15\xffB\xff\x92\xff\xc3\xff\xee\xff!\x00d\x00\x94\x00\xc2\x00\x02\x01M\x01z\x01\x99\x01\xca\x01\xf9\x01!\x029\x02H\x02]\x02f\x02i\x02a\x02V\x02D\x025\x02%\x02\x0e\x02\x02\x02\xf0\x01\xe1\x01\xc8\x01\xa8\x01\x8e\x01\x89\x01z\x01j\x01i\x01W\x019\x01\x1f\x01\x05\x01\xf5\x00\xe8\x00\xcd\x00\xc5\x00\xb4\x00\x8e\x00n\x00E\x00\x19\x00\x00\x00\xdc\xff\xaf\xff\x9a\xff~\xffG\xff:\xff(\xff\xf9\xfe\xe2\xfe\xcd\xfe\xbc\xfe\x9c\xfe\x8e\xfe\x8f\xfe}\xfeW\xfeD\xfe]\xfeT\xfeU\xfec\xfe]\xfea\xfeo\xfe|\xfe\x83\xfe\x9c\xfe\xab\xfe\xb8\xfe\xe2\xfe\xf5\xfe\x12\xff.\xff/\xffK\xffx\xff\x96\xff\x9a\xff\xb4\xff\xda\xff\xed\xff\xfa\xff\xee\xff\xfc\xff\x1b\x00\x10\x00\x13\x00/\x00@\x00B\x00I\x00m\x00{\x00\x96\x00\xb4\x00\xd0\x00\xf3\x00\x1a\x018\x01h\x01\x99\x01\xbe\x01\xd0\x01\xe4\x01\x02\x02\x1e\x02$\x02\x07\x02\xfe\x01\xf3\x01\xcd\x01\x9c\x01n\x01F\x01\r\x01\xba\x00w\x00A\x00\x01\x00\xab\xffU\xff)\xff\xf0\xfe\xbf\xfe\x97\xfel\xfeM\xfe+\xfe\x1c\xfe\x15\xfe\x15\xfe\x14\xfe!\xfeS\xfev\xfe\x92\xfe\xb5\xfe\xe7\xfe\x12\xffD\xff|\xff\xb2\xff\xe7\xff\x0c\x000\x00`\x00~\x00\x89\x00\xa1\x00\xb2\x00\xc7\x00\xcb\x00\xc7\x00\xce\x00\xcd\x00\xc3\x00\xb0\x00\xae\x00\xa1\x00~\x00x\x00a\x00K\x00C\x00,\x008\x007\x00!\x00$\x009\x008\x000\x004\x00=\x00Q\x00M\x00A\x00N\x00S\x007\x00-\x006\x00?\x009\x00 \x00\x1e\x00\x17\x00\x08\x00\xfb\xff\xe3\xff\xce\xff\xb5\xff\x9b\xff\x93\xff\x8e\xff\x8a\xffz\xffe\xffW\xff^\xffa\xffa\xffe\xffs\xff\x86\xff\x88\xff\x89\xff\x92\xff\xa5\xff\xbc\xff\xcc\xff\xe2\xff\x07\x00\x10\x00\x1c\x004\x00H\x00W\x00n\x00\x8b\x00\x99\x00\xa2\x00\x9b\x00\x96\x00\x8d\x00\x88\x00\x7f\x00z\x00\x7f\x00v\x00j\x00g\x00[\x00J\x00H\x00B\x00A\x00?\x00<\x000\x00/\x00(\x00\x1c\x00\x13\x00\x06\x00\xfd\xff\xf0\xff\xf1\xff\xdd\xff\xbd\xff\xb2\xff\xab\xff\xa1\xff\x86\xffz\xff}\xffr\xffc\xffe\xffm\xfff\xff]\xffW\xfft\xff\x7f\xff\x80\xff\x86\xff\x8e\xff\x99\xff\xa2\xff\xa5\xff\xa3\xff\xab\xff\xae\xff\xb2\xff\xb3\xff\xb5\xff\xaf\xff\xaf\xff\xb1\xff\xa6\xff\xa3\xff\xb0\xff\xba\xff\xb5\xff\xbc\xff\xb8\xff\xb8\xff\xb7\xff\xc2\xff\xc6\xff\xcb\xff\xd4\xff\xda\xff\xeb\xff\xf3\xff\xff\xff\x08\x00\x14\x00\x17\x00\x1f\x00:\x00B\x00A\x00\\\x00g\x00o\x00\x7f\x00\x85\x00\x83\x00\x8f\x00\xa2\x00\x9b\x00\x91\x00\x91\x00\x8f\x00\x8b\x00\x90\x00\x98\x00\x8e\x00y\x00v\x00m\x00k\x00m\x00X\x00D\x00@\x00;\x00#\x00\x0c\x00\xf4\xff\xe1\xff\xdf\xff\xce\xff\xc0\xff\xab\xff\x96\xff\x85\xff{\xffj\xffh\xffl\xff\\\xffR\xffR\xffN\xffF\xff:\xff9\xff1\xff4\xff@\xffF\xffH\xffG\xffK\xffV\xffl\xff{\xff\x89\xff\x9d\xff\xb3\xff\xc9\xff\xd4\xff\xec\xff\x01\x00\x12\x00&\x00<\x00P\x00e\x00v\x00\x84\x00\x95\x00\x9d\x00\xa7\x00\xb2\x00\xbc\x00\xbc\x00\xb6\x00\xba\x00\xbc\x00\xb2\x00\xa8\x00\xa7\x00\xa5\x00\xa2\x00\x9e\x00\x9b\x00\x8e\x00{\x00t\x00n\x00j\x00[\x00S\x00I\x00C\x009\x00,\x00\'\x00\x1d\x00\x12\x00\x0e\x00\x04\x00\x07\x00\r\x00\x00\x00\xeb\xff\xe1\xff\xd6\xff\xd1\xff\xc4\xff\xb2\xff\xa3\xff\x8a\xff\x8a\xff\x8d\xff\x82\xff}\xff~\xffv\xffr\xff\x83\xffy\xffs\xff~\xffv\xffz\xff\x82\xff}\xffr\xff\x80\xff\x90\xff\x9b\xff\xa6\xff\xb1\xff\xbb\xff\xc8\xff\xce\xff\xd5\xff\xe1\xff\xf6\xff\x0c\x00\x15\x00\x14\x00\x1f\x008\x00G\x00W\x00[\x00b\x00t\x00\x84\x00\x7f\x00\x84\x00\x89\x00\x91\x00\x90\x00\x90\x00\x93\x00\x8d\x00\x90\x00\x81\x00{\x00d\x00c\x00\\\x00B\x00A\x00/\x00\x1c\x00\x1d\x00\t\x00\xf2\xff\xee\xff\xdc\xff\xcf\xff\xc8\xff\xb3\xff\xa5\xff\xb4\xff\xb3\xff\xb4\xff\x9c\xff\x8a\xff\xa4\xff\xa2\xff\x99\xff\xae\xff\x9b\xff\x93\xff\xab\xff\xc8\xff\xae\xff\x93\xff\xb0\xff\xe2\xff\xde\xff\xb4\xff\xb8\xff\x91\xff\x9d\xff\xb9\xff\xb1\xff\xc4\xff\xc0\xff\xad\xffa\xffC\xff\x84\xffn\xffP\xff\x92\xff\xbc\xff\xb5\xff\x9a\xff\x97\xff\xe4\xff\xc0\xff\x86\xff\xa5\xff\xae\xff\xfc\xff\x1d\x00v\x00\x04\x01\xf5\x00\x81\x01\x1e\x01\xef\x01>\x01\xa7\x01\xdd\x00\x00\xfd\xdc\x08\xea\x10G\x04\xce\xf3.\xfe\x19\x05\x10\x04\xaa\x02A\xff\xb1\xfa/\xf7\xce\x00\xc7\xf9[\xfc\xbd\xf9\xa5\xf40\xfe\xd9\x00\xc7\xfe5\xfe)\xfdy\xfb \xfb\xec\x05-\x0b\xb7\xfb\xba\xfeC\x04\xcf\x01\xe2\x03m\x03v\x03\xee\xffM\xfa\xe1\x00\x98\tR\x03`\xfad\x01\xae\x01\xfe\xf8\xb1\x00\x96\x04h\xff\xd1\xfb\xa5\xfb\xaa\x011\x00\xa1\xfc\xf5\x01+\xfeH\xf33\x02]\x05`\x00}\xfa\x9e\xfd\xee\x01\xc6\x00\xae\xfe@\xfb\x18\x07\x1b\x04a\xfa\x19\x02s\x06\xab\xfe}\x02\xd5\x02-\xff4\x02c\xfdu\x06~\x08\xcc\xfe\xf8\xf5\x18\x07a\x07\xc9\xfey\xff\x01\xff:\x01K\xff\xb0\x04\x9e\xfft\x03\x9a\xf9\xe8\xfb\xce\x01\xc3\xff\xdf\x02\xda\xf6 \x03\xcb\x00\x9a\xf6\x96\xfeS\x00\xd5\xff2\xf7&\xfei\x04\x9d\xf9\x03\xfd\xec\x06O\xfei\xf4\xf3\x02=\x06\x90\x01\xad\xfe\x18\x02\xe4\x00\xa4\x01L\xffp\x044\x05\x19\x00\xbb\xff\xa3\xfe2\x04\xec\xff\xdf\x04{\x04c\xfa\xbc\xfd\xc0\x0bG\xf9\xaf\xf9\x8a\x06\xf5\x01n\xfc\x0c\xfap\x068\xfd\xc1\xfa\\\xfcL\x05-\x01]\xf8\xd5\xfd\x99\x039\xf9=\x00\x8b\n\xde\xfbz\xf7A\x04\xd1\np\xf2Z\xff\x9e\x10x\xfb\xb7\xf3\x02\t\xe9\t\x08\xef\xfd\x01\xad\r\xf1\xfa)\xf5\xbc\xffa\x0b\xa8\xfci\xfc\x98\x05O\xfd\x7f\xfa>\x00\x13\t-\x00\xc4\xfa|\xfc\x9a\x05\xd3\x01\x15\xfa\r\x05L\xfe\xce\xf4L\x01\xda\x05\xf3\xfb\x91\x02\x1e\x01\xc5\xefS\x03\x18\x0b\xfe\xf6C\xff\x11\x06\xad\xff\x91\xfe\xfe\xff\x05\x04\xb6\x04\xa1\xf9\x03\xfe\xdf\x05\x99\xfb\x02\x03\xb4\x07u\xf6h\xffd\x05\xe1\xfc\xa1\xfd\x87\xfe\xd1\x01\x94\xff\xe6\x00N\xf8\x1d\x04\xb8\xfeL\xfa\xba\x04\xbb\xfd\x85\xf5\x8b\x07\xc0\x08\xe2\xf5\'\x07\x88\xffD\x01\x1f\x00\xb8\x04\xa8\x05\xca\xf8E\x01z\t\xcf\x00L\xfc\xc2\x02\x06\x01\xef\xf8\xb2\xfbH\x06\xc7\x06%\xf8\x86\xf3A\x0e\x02\xfe\'\xf3\xa5\x00\x14\n\x8a\xfeJ\xf0$\x07\x12\x07\xaa\xfc\x04\xfbf\x01U\x04\xac\xff\x9b\x02\x04\x06J\x01~\xfc\x9c\x00\x1a\x06)\x05\x9a\x05 \x04\x14\xf6\x1e\xfe1\x06O\x03\x9f\x03\x05\xf9,\xfc\x8f\x04P\xfb\xc4\xf8N\x03\xe1\x03o\xf7A\xf9{\xff\xf1\xfe\xcf\x05f\xf9\xe4\xfa#\x04\x82\xfcG\x009\x02\xb3\xfe\xe4\xff\x83\x02\x9e\xfb\x0c\xf9\x97\x05\x87\x04\x02\xfd:\x01\x98\xfa\x7f\xfa\xe2\x07\xab\x05[\xf4[\xfc\xe0\x0br\xfc\xc9\xfc\xdd\t\x16\x00\x93\xf3!\xfa:\x11\xcb\x06\xf9\xf24\x01\xb7\x07:\xf5\xfc\x00\xe4\x10\xf6\xfau\xe8\xdb\x00\xab\x16`\xfd\xea\xf9v\x06\xc3\xfdw\xea\xa8\x02\xff\x13\x10\x03v\xf7\x13\xfaq\x03\xcc\xff\x83\x02J\x06\xa2\xfd\x87\xf3#\xfc\x02\x0c#\x06\xb5\xfc\x86\xfd\xba\xfa\xc3\xf8\x13\xfe`\x0c\xe3\x02\x17\xf5\xb9\x00\x85\x05\x0c\xfeP\xff\x81\x08\xee\x01\xfe\xef8\xfd\xc3\x0b\xd3\x06\xdd\x03\xa1\xfb\xdc\xf7\xbf\xffM\x06\xd0\xfc\xf8\xff\xa0\x04\xaa\xfe\xa2\xfe\x98\xfcG\x01r\x05\xea\xfc\xc3\xf7\xe9\xfc\xd2\x04a\x06x\xfe\xec\xf6c\x01\x07\x03;\xff@\x000\x01\xc3\xfe:\xff\xa0\x05\xb4\x00\xe0\xfe\x1d\x02\xf3\x00\x97\xfd\xcd\xffO\x06\xcb\x02p\xfd\x01\xffS\x02\xbe\x01\x9c\xfd\x81\xfd\xf2\x03C\xff\xcf\xfb^\x01L\x03\xb7\x00e\xfb2\xfe;\x03K\xff\xb7\xf8`\x03\x1e\x04\x04\xfc\xe1\xffO\x01\xcd\xfew\xfb\xcc\x00\xc4\x04\xef\x00\xe6\xfa\x14\x01\x82\x04\xc8\xff\x94\x00\xa1\x00\xbd\xff\xb4\xfaU\x011\x084\x02t\xfc%\xfci\xfcZ\xfe\xf5\x06c\x06q\xf8[\xf6X\x02\x89\x04\xd4\xfeM\xff;\x05\xb6\xfe3\xf8\xbb\x00\xea\x04\xa5\x04U\xff\x98\xfc`\xfa\xf4\x02\x18\n\x92\x01\xe6\xf6\x9e\xfa\x9e\x022\x01A\x02\xd5\x01\xe9\x02D\xfbi\xfbb\x01\xae\x03V\x00!\xfd\x87\x00^\x03\xa2\x00\x07\xfc\xf3\x00\xcd\x00\xd2\xfeO\x00\xca\x02\xef\xfd\x02\xfc\x14\x02\xe9\x05S\x01\x17\xfa\xaf\xfc\xf2\x00j\xff\x0e\x03\x15\x06[\xfe\xaf\xf5z\xfdu\x03Y\x06\t\x01\x14\xf6\x9a\x00\x05\x05/\x04\xbb\xfd\xd0\xfcY\xfer\xfeh\x03\x9a\x03\x98\x00\x8f\xfb\xbe\x00x\x03\x1c\xff\xf8\xffc\x00:\x00\xd4\xfa\xa3\xfd_\t\x14\x08\xb5\xfc?\xf2$\xfb\x19\x08b\x04\xd0\x00T\xfb\xe9\xfa\xa5\xfe\xf6\x03\xd1\x05M\xfc\xb9\xf8N\xfe\x08\x04V\x03.\x03\xf8\x01t\xfa\x8d\xf82\x00\x96\x06\xd9\x04y\x00\x08\xfc\x97\xfb\x99\x02e\x06u\xff\xc0\xfbG\xfb\x14\xfe\xa3\x06M\x06\xcc\xffm\xfc\xf0\xfb\xbe\xfe\x14\x01\xaf\x00&\x03\xcf\x02\xeb\xf9%\xfa2\x05B\x06\xc4\xff\x99\xfa\x9f\xf7,\xff\x9a\x06\xd1\x04e\x01B\xfb\xb7\xfc\xb7\xfd\x1e\x03\xab\x04\xce\x02\x86\xf9+\xf8\x84\x04\x0c\x08`\x05s\xfb\xd6\xf8\xc5\xfe~\x01\x1a\x02e\x05\xd3\xfe@\xf9)\x00a\x06\x1f\x00\xf8\xfe\x83\xfe\xeb\xf7f\xff,\t\xb8\x05{\xfe\xa5\xfa\x16\xfb~\xff\xc5\x02\xdf\x03\n\x01p\xfe\xbc\xfa\xf9\xff)\x05D\x02\xcd\x00H\xfb\x1c\xfa\xaa\x01\xf1\x05\xe2\x00s\xfd\xfb\xfd\t\x01&\x01m\xff\xbf\xfdK\xff\xf1\x00\xe7\x01\x97\x02S\xfc\n\xff4\xffK\xfd%\x02\x92\x04\xac\x01\xff\xfa!\xfc\x8d\x01\x95\x02\xef\xff\x14\xfe\x83\xfeS\x00:\x026\x03%\x01\x18\xfb\xb8\xf8\x80\x00C\x07~\x05>\xfel\xfb\xf7\xff\x0c\x02\x84\x01\x98\x00}\xff\x82\xfd\x15\x00\xed\x01L\x03\xca\x01\x07\xfem\xfc5\xffW\xff\xca\xffI\x04d\x02\x89\xfc]\xfa\xda\x03\x9d\x04\x8f\xfcD\xff\x06\x00\x86\xfd\xa8\x03\x9f\x03q\xfe1\xfd_\xff;\x010\x04\xa8\x01\x87\xfe^\xfe\x0c\xfd\xec\x00I\x03\xa8\x02\xea\x01\xc7\xfd\xe2\xfa\xda\xfe \x04\xfb\x02\x04\xfeh\xfa\xfd\xfcZ\x00\xb2\x00\xf6\x01|\x02n\xfd\xf3\xf8n\xfe\xf0\x02\x9e\x04\x07\x04\xc9\xfc<\xfa`\x03\x0c\x05\x99\x00\xa7\xff\xd6\xfd[\x00s\x01\xd0\x00\x16\xffO\x00d\xffN\xfc\x93\xffw\x02O\x01\xa5\xff\xac\xfc\xae\xfeS\x04W\x00V\xfa\x82\xfdp\x00\t\x01X\x02\x93\x01\x00\xfd\x86\xfcT\x02`\x02\xdb\xfd>\x00\x8f\x02\xdc\xfeb\x00\xa2\x03I\x01\xe9\xfc\xcf\xfc\x02\x02)\x02\xe1\xfed\x00.\x02\x7f\x00\x05\xfd\xeb\xff<\xff\x7f\xfe\x9c\x03h\x00I\xfc@\xfd\x9b\x02\xa8\x04\xe9\x01-\xfd\xb5\xfb\xfa\xfc\xc3\x02\xf8\x03,\x02:\x02m\xff\x1d\xfd\x84\xfd\x88\x02@\x02\xe2\x00K\xfeb\xfe\x07\x01\xc3\x02\xe3\x01\xb1\xfd\xb3\xfc\t\xfdF\xff\xf6\xff\xb3\x00c\x03\x19\x02\xe1\xfc\xa5\xfaS\xfe\x15\x01s\x01:\x02h\xfe\xa4\xfc\x16\x01B\x03~\x00\xb4\xfey\xfd\x8f\xfc/\x01\xd7\x03\x14\x02\xb3\xfd~\xfe\x81\x000\x01\x05\x01R\xffv\xff\xba\xfd\x15\x01\x92\x03\xa2\x02\xf5\x00\n\xfe>\xfc\xe3\xfd\x19\x03E\x04\xa3\x00\x18\xfd\x06\xfe\x91\xff_\x01\x1e\x03\x9f\xfe\xb1\xfb\xc2\xfdU\x01\x81\x02@\x01\xb3\xff\xab\xfd)\xfe\xa7\xff\xca\x00w\x00~\xff\xc0\xffm\x00\xc6\xff\xb3\x00\x00\x03\xd5\xfe\x83\xfc\xa8\xfeN\x00\xfa\x00\xd0\x02\xa4\x024\xfd\x1f\xfe\xb9\x00\x9c\xffb\x00n\x00y\xff`\xff\x8a\x00\x88\x01\x06\x00\t\xff\xfa\xff\x81\xfe\x19\xfe\xf6\xff0\x01$\x01\xc0\xff\x87\xfe\xc7\xff,\x00\x11\x00\xc1\xff\x1c\xff`\xff\xca\x00*\x00>\xff\xc0\x01)\x01U\xfek\xfej\xff\xd2\x00\x9c\x02\xf3\x01j\xff\xd8\xfd\xcf\xfe\xb6\x01\xb8\x01\x94\xff\x9a\xff\xfa\xfe\x99\xff\x95\x01\xa1\x01S\x00\x87\xfe=\xfe\xe9\xfes\x00\xd7\x00\xd6\x01{\x00\xae\xfc\x85\xff\x83\x01\xad\x00!\x00\xdf\xfdU\xfe\xac\x00\xa5\x02\xb4\x01]\xffl\xfe7\xfe\xcd\xff\xeb\x00h\x00\x0b\x00\xbc\xff_\x00H\x01\x8c\x00\xdb\xff\x10\xff\x04\xfe\xe6\xffq\x01\xdf\x00\xfe\x00\x12\x01\xae\x00C\xff\xe8\xfd\x1d\xfe\xa2\xffc\x01\xad\x01\x02\x01\xd4\xff>\xff\xda\xff\xae\xff{\xff\xdb\xff#\x00\xd0\x00%\x01\x17\x00/\x01(\x02E\x00\xa6\xfd\xff\xfc\xa9\xff\xdd\x01\n\x02\x8d\x00\xb0\xfe\xe6\xfe\x0f\x00\xb1\x00#\x00\x1f\xff\x88\xfe\x19\xff\xde\x00\x90\x01\x90\x01\x95\x00\x83\xfe\xa6\xfd\x83\xfe\xbe\x00\xbf\x01\x16\x01F\x00W\xff\x8e\xff\x7f\x00T\x01\xd0\x00Q\xff\xf3\xfe\xb0\xff`\x01J\x01\xed\x00\x9e\xff\x7f\xfd\x03\xfe\x14\x00\xff\x00\x87\x00\xe7\xffY\xff\xe4\xff\xca\x00\x89\x00\x91\xff\xf3\xfe\xc5\xfe\x8a\x00\x93\x01\xaf\x00\x85\x00\x91\x00\xed\xffj\xff\xb1\xff\x93\x00c\x00\xdb\xff_\x00R\x01\x06\x01u\xff\x11\xff\'\xff\xa3\xff\xb0\xff\\\xffq\xffr\xff/\x00\xdc\x00\xd7\xff\xd2\xfe\x80\xfe\xb7\xfel\x00\xab\x01\x10\x01\x10\x00|\xffT\xff\xbc\xff>\x00\xc4\x00\xc4\x00Q\x00\x82\xff\x85\xff^\x00\xb6\x00\x1b\x00%\xff\x0b\xff\xaa\xff\x97\x004\x01\x8e\x00R\xff\xe1\xfeS\xff\xe3\xff+\x00\xff\xff\x02\x00\xe6\xff\xd5\xff\x91\xff8\xffx\xff\xf6\xffE\x00\x08\x00\xed\xff\x83\xff\xaf\xff\xb0\x00\xef\x00\xaf\xff\xcd\xfe\x98\xffs\x00\xc0\x00\x1c\x01\xa0\x00\xf7\xfe\xb4\xfe\xd1\xff\x03\x008\x00\xb7\x00\x89\xff&\xfe^\xff@\x01m\x01O\x00\xa9\xfe\xfe\xfd>\xff\xff\x00}\x01\xa0\x00L\xff\x8e\xfeD\xff8\x00\xe5\x00\xd1\x00C\xff\xfc\xfe\x00\x00\x1b\x01\x10\x01\xa3\xff\xc7\xfee\xff\x89\x00\x03\x01u\x00\xa7\xff\x99\xff\xd9\xff3\x00,\x00\xc4\xffM\xff\x83\xff5\x00h\x00\x04\x00\xc6\xff\x92\xff`\xff\x8d\xffz\xffg\xff\xfa\xff\x82\x00{\x00\x1b\x00\xbc\xff\x93\xffs\xff\xbd\xff6\x00\x9f\x00\x8b\x00\xe1\xff\xf8\xff\x85\x00\xc1\x00,\x00*\xff\xef\xfe\x14\x00>\x01\x19\x01N\x00\xa9\xff\x7f\xfff\xff*\x00\xab\x00\x00\x00\xb1\xff\xe5\xff?\x00\xcc\x00\xcd\x00z\xff~\xfe;\xff\x91\x00\x00\x01\xc3\x00A\x00\x86\xff\xe2\xfe\x91\xff\xea\x00\xb3\x00\xfa\xff{\xff\xa1\xffM\x00\xe5\x00\xda\x00\xf2\xff\x01\xff\xcb\xfe\xc8\xff\xdb\x00\xd2\x00\\\x00\xd4\xffC\xffm\xff\xf6\xff\xf4\xff\xc7\xff\x9b\xff\xaf\xff<\x00\x9a\x00d\x00\xd9\xffT\xffQ\xff\x0e\x00\xea\x00\xc3\x001\x00V\x00\x7f\x009\x00F\x00\x11\x00}\xff\x8c\xff\x95\x006\x01\xd1\x00\x04\x00\x1e\xff\xae\xfeV\xff\x8e\x00\xfc\x00\x8a\x00\x16\x00_\xffr\xff4\x00o\x00<\x00z\xffE\xff\x17\x00/\x01$\x01F\x00h\xff\x15\xff\xb2\xff\xcf\x00M\x01\xf6\x00q\x00\xf6\xff\xcc\xff0\x00k\x00\xb8\xffM\xff\x97\xff!\x00\x97\x00o\x00\xbf\xff\x0c\xff\xef\xfea\xff\xd8\xff\x15\x00\x00\x00\xc5\xff\xba\xff\xcb\xff\xed\xff\xf8\xff\xbf\xff\x94\xff\xa4\xff\xf9\xffT\x00m\x00-\x00\xa7\xff\x8b\xff\xda\xff?\x00\xbc\x00\xc5\x00b\x00\xf4\xff\xec\xff\xf4\xff\xde\xff\xe1\xff\xe9\xff\xfa\xff\n\x00(\x00.\x00\xd2\xffS\xffx\xff\xe8\xff\x0c\x00\x10\x003\x002\x005\x00U\x00\xff\xff\xc0\xff\xdb\xff\x07\x008\x00j\x00s\x00c\x00L\x00\x18\x00\xae\xff\x96\xff\xf4\xff\x05\x001\x00\x8a\x00W\x00\xbf\xff\x88\xff\xbf\xff\xd5\xff\xfe\xff\x03\x00\xc2\xff\xc6\xff9\x00\xa5\x007\x00_\xff\xf6\xfe`\xff$\x00Y\x00\xf0\xff\xae\xff\x9e\xff\xb7\xff\xee\xff\xdf\xff\x89\xffG\xff\x93\xff\xe9\xffJ\x00g\x00\xf8\xff\x95\xff\xa6\xff\xe5\xff\x18\x00.\x00\x0b\x00\xb7\xffx\xff\xec\xffb\x009\x00\x94\xffR\xff\xa0\xff\xcf\xff\x1e\x00U\x00\xf4\xffp\xff\x92\xff\x00\x008\x001\x00\xf5\xff\x8f\xff\x86\xff\xe2\xff:\x005\x00\xe4\xff\x9f\xff\xa1\xff\xeb\xffV\x00K\x00$\x00!\x00D\x00t\x00g\x00\x1e\x00\xf4\xff\x0b\x00F\x00e\x00<\x00\xdd\xff\xb0\xff\xca\xff\xf5\xff\xff\xff\xca\xff\x9b\xff\xa1\xff\xe5\xff:\x00N\x00\xec\xfft\xffs\xff\xd3\xffA\x00;\x00\xee\xff\x98\xffw\xff\xde\xff]\x00$\x00~\xffW\xff\xaa\xff!\x00r\x002\x00\xb3\xffi\xff\xb0\xff3\x00_\x00 \x00\xd8\xff\xc3\xff\xd8\xff*\x00~\x00I\x00\xb5\xff\\\xff\x82\xff\xfc\xffi\x00w\x00\x0c\x00\x8c\xff\x84\xff\xf2\xff|\x00\x8b\x00.\x00\xe0\xff\xfd\xff\\\x00\xa5\x00\x93\x00\'\x00\xd1\xff\xce\xff\x12\x00Y\x00K\x00\x0e\x00\xe4\xff\xd0\xff\xf9\xff/\x00 \x00\xde\xff\xc2\xff\x03\x005\x00?\x00-\x00\r\x00\xee\xff\xf0\xff*\x00S\x00C\x00\r\x00\xf2\xff\x13\x00T\x00}\x00M\x00\xc4\xff\x9c\xff\x05\x00r\x00f\x00$\x00\xf5\xff\xc8\xff\xf2\xffH\x00M\x00\xf0\xff\xb7\xff\xda\xff\t\x00E\x00y\x008\x00\xb9\xff\x9a\xff\xef\xffF\x00M\x00\t\x00\xd4\xff\xdc\xff&\x00]\x00?\x00\xf0\xff\xdb\xff+\x00S\x00M\x00A\x00 \x00\xee\xff\xff\xffA\x00:\x00\xf2\xff\xc0\xff\xcd\xff\xf5\xff\x1b\x00\x1e\x00\xfc\xff\xcf\xff\xd3\xff\x01\x00\n\x00\xdb\xff\xd5\xff\xef\xff\x00\x00\x07\x00\xfc\xff\xe6\xff\xc3\xff\xd5\xff\xe6\xff\xfb\xff\x06\x00\xf1\xff\xd3\xff\xf0\xffG\x002\x00\xf8\xff\xd4\xff\xe7\xff\x15\x00F\x008\x00\xf6\xff\xbd\xff\xcb\xff\x15\x001\x00\x08\x00\xb2\xff\x8d\xff\xb7\xff\x07\x002\x00\x08\x00\xbe\xff\x9a\xff\xb5\xff\xfa\xffD\x00%\x00\xd8\xff\xb9\xff\xda\xff:\x00{\x008\x00\xbd\xff\xa6\xff\xdf\xff8\x00h\x004\x00\xaa\xff\x8c\xff\xf6\xff3\x003\x00\x04\x00\xc7\xff\xa4\xff\xea\xffB\x000\x00\xeb\xff\xb5\xff\xa8\xff\xce\xff\x12\x004\x00\x14\x00\xd9\xff\xbe\xff\xda\xff\x04\x00\x0f\x00\x06\x00\xc5\xff\xb4\xff\xf6\xff0\x008\x00\x0f\x00\xde\xff\xa9\xff\xc2\xff\xf0\xff\x00\x00\xf2\xff\xd2\xff\xbe\xff\xb2\xff\xd6\xff\xf7\xff\xfa\xff\xd8\xff\xc6\xff\xde\xff\x08\x00"\x00\x00\x00\xdb\xff\xcd\xff\xe9\xff\xf9\xff\x1b\x00%\x00\x04\x00\xf9\xff\x06\x00\x0c\x00\xff\xff\xf4\xff\xf4\xff\xed\xff\xe5\xff\xf1\xff\xfc\xff\xe6\xff\xc2\xff\xaf\xff\xb6\xff\xc6\xff\xdc\xff\xe2\xff\xd7\xff\xd3\xff\xd3\xff\xd3\xff\xd3\xff\xdf\xff\xe4\xff\xd4\xff\xea\xff\x03\x00\x07\x00\t\x00\x0b\x00\xf5\xff\xe1\xff\xee\xff\x16\x00*\x00\x1a\x00\x0b\x00\x04\x00\x07\x00\x0c\x00\n\x00\x02\x00\xfe\xff\xf8\xff\xf4\xff\x02\x00\x12\x00\t\x00\x03\x00\x02\x00\xf7\xff\xf4\xff\xfe\xff!\x00\x1e\x00\x05\x00\x0f\x00\x11\x00\x01\x00\xfc\xff\x06\x00\x01\x00\x01\x00\x06\x00\x03\x00\x05\x00\x07\x00\x00\x00\xfe\xff\xfc\xff\x00\x00\x10\x00\x0e\x00\t\x00\n\x00\x14\x00\x19\x00\x1a\x00\x11\x00\x00\x00\xfe\xff\x08\x00\x17\x00\x15\x00\x02\x00\xf2\xff\xfa\xff\x0f\x00\x1c\x00\x04\x00\xfc\xff\x11\x00\x1d\x002\x00=\x00A\x00(\x00\x14\x00\x1d\x00-\x001\x00*\x00\x19\x00\x0e\x00\x16\x00\x1f\x00.\x00\x1d\x00\xfe\xff\xf2\xff\x05\x00,\x005\x00(\x00\t\x00\xfb\xff\r\x00%\x003\x00%\x00\x13\x00\x11\x00\x16\x00"\x00+\x00!\x00\t\x00\x00\x00\x12\x00\x05\x00\x07\x00*\x00\x11\x00\xef\xff\xe4\xff\xf4\xff\xfb\xff\xfa\xff\xeb\xff\xd5\xff\xdf\xff\x0f\x00*\x00\x0c\x00\x00\x00\x03\x00\x05\x00\xfc\xff\xfd\xff\x07\x00\x05\x00\n\x00\x14\x00\x12\x00\x01\x00\xfe\xff\xf3\xff\xe9\xff\xe2\xff\xf8\xff\x05\x00\xff\xff\xfa\xff\xf7\xff\xf4\xff\xf0\xff\xf6\xff\xfc\xff\xfb\xff\xfe\xff\x04\x00\x01\x00\xfe\xff\x02\x00\xfe\xff\xf5\xff\xeb\xff\xef\xff\xfc\xff\x00\x00\xf6\xff\xdf\xff\xd6\xff\xea\xff\xf7\xff\xf9\xff\xea\xff\xd7\xff\xdc\xff\xf8\xff\x06\x00\xfc\xff\xf2\xff\xfc\xff\xf4\xff\xfe\xff\x18\x00\x17\x00\xff\xff\xf5\xff\x01\x00\x08\x00\x0c\x00\r\x00\xfc\xff\xef\xff\xf0\xff\xf4\xff\xf4\xff\xe9\xff\xdf\xff\xcf\xff\xcf\xff\xe8\xff\xf1\xff\xf4\xff\xf0\xff\xed\xff\xe4\xff\xf1\xff\x03\x00\x06\x00\x00\x00\x04\x00\x0c\x00\xfb\xff\x0b\x00"\x00\x0e\x00\xf0\xff\xf8\xff\x0e\x00\x12\x00\x02\x00\x0e\x00\x05\x00\xdf\xff\xea\xff\xf3\xff\xec\xff\xda\xff\xd0\xff\xc9\xff\xc4\xff\xc6\xff\xc9\xff\xbc\xff\xaa\xff\xb6\xff\xc1\xff\xc8\xff\xd3\xff\xc8\xff\xce\xff\xd3\xff\xdf\xff\xec\xff\xeb\xff\xea\xff\xf2\xff\x00\x00\t\x00\x10\x00\n\x00\xf8\xff\xf6\xff\xf7\xff\xf1\xff\xf5\xff\xf9\xff\xf3\xff\xea\xff\xe4\xff\xef\xff\xe3\xff\xd8\xff\xd6\xff\xda\xff\xe9\xff\xf1\xff\xf7\xff\xf0\xff\xe5\xff\xde\xff\xeb\xff\xfe\xff\xfb\xff\xfa\xff\xfe\xff\xfd\xff\x00\x00\n\x00\x0c\x00\xfe\xff\xf2\xff\xfb\xff\x05\x00\x15\x00\x1d\x00\x0c\x00\x03\x00\x05\x00\x0c\x00\r\x00\x10\x00\x11\x00\x10\x00\x0b\x00\x12\x00\x12\x00\x08\x00\x06\x00\x00\x00\x03\x00\x0f\x00"\x00\x1e\x00\x0f\x00\t\x00\x07\x00\x06\x00\x15\x00\x16\x00\x00\x00\xf6\xff\x00\x00\x13\x00\x0b\x00\xf9\xff\xe9\xff\xe1\xff\xe3\xff\xf7\xff\x00\x00\xee\xff\xd9\xff\xd8\xff\xec\xff\xf0\xff\xf7\xff\xfa\xff\xf6\xff\xf8\xff\x05\x00\x16\x00\x14\x00\t\x00\x04\x00\x04\x00\x0e\x00 \x00,\x00&\x00\x1f\x00\x19\x00%\x00)\x00\x1b\x00\x10\x00\n\x00\x07\x00\r\x00\x18\x00\x04\x00\xfc\xff\t\x00\x07\x00\x05\x00\x0b\x00\t\x00\r\x00\x03\x00\x02\x00\x05\x00\r\x00\x1e\x00 \x00 \x00\x1d\x00\'\x00"\x00\x13\x00\x12\x00\x18\x00\x14\x00\x13\x00\x12\x00\x1c\x00 \x00\x1f\x00#\x00\x18\x00\x17\x00\x1a\x00\x1a\x00\x18\x00 \x00\x1c\x00\x10\x00\x13\x00\x04\x00\xf9\xff\xfe\xff\x0e\x00\x11\x00\t\x00\x05\x00\x00\x00\xfe\xff\x08\x00\t\x00\xfe\xff\xf8\xff\xf8\xff\x00\x00\x00\x00\x00\x00\x08\x00\xfb\xff\xfc\xff\x05\x00\x04\x00\x00\x00\xfe\xff\xf8\xff\xf5\xff\xf5\xff\xf7\xff\xfb\xff\x01\x00\x01\x00\xf6\xff\xf6\xff\xfc\xff\xfc\xff\xf7\xff\xf9\xff\xfc\xff\xf9\xff\xf8\xff\xff\xff\xfe\xff\xf7\xff\xf4\xff\xed\xff\xe8\xff\xed\xff\xec\xff\xe3\xff\xe8\xff\xe4\xff\xde\xff\xe1\xff\xe7\xff\xde\xff\xdc\xff\xe2\xff\xe1\xff\xe7\xff\xf6\xff\xfc\xff\xfb\xff\x00\x00\x0c\x00\x07\x00\x04\x00\x06\x00\xfd\xff\xfd\xff\xfe\xff\xfb\xff\xf8\xff\xed\xff\xee\xff\xf3\xff\xf1\xff\xef\xff\xeb\xff\xea\xff\xeb\xff\xf3\xff\xfc\xff\xfa\xff\xf7\xff\xfb\xff\x05\x00\x06\x00\x00\x00\xfe\xff\xff\xff\xfa\xff\xf3\xff\xfa\xff\x01\x00\xfe\xff\xfb\xff\x00\x00\xf7\xff\xf9\xff\xfa\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\x05\x00\xfd\xff\xfd\xff\xf5\xff\xf4\xff\xf3\xff\xea\xff\xe9\xff\xe6\xff\xe4\xff\xdb\xff\xd6\xff\xdb\xff\xe5\xff\xe4\xff\xe3\xff\xd4\xff\xd4\xff\xe1\xff\xdd\xff\xe6\xff\xe8\xff\xf1\xff\xf5\xff\xf4\xff\xf6\xff\xf4\xff\xf4\xff\xe8\xff\xe5\xff\xeb\xff\xef\xff\xf1\xff\xf1\xff\xe4\xff\xdb\xff\xe0\xff\xe2\xff\xe9\xff\xec\xff\xeb\xff\xeb\xff\xe9\xff\xf0\xff\xf8\xff\xf0\xff\xee\xff\xf5\xff\xfb\xff\xf6\xff\xfc\xff\x00\x00\xfa\xff\x01\x00\x00\x00\x05\x00\x06\x00\x03\x00\xff\xff\xf6\xff\x03\x00\x06\x00\xfe\xff\x02\x00\x06\x00\xfc\xff\xfc\xff\x00\x00\xfe\xff\x00\x00\xfe\xff\xf9\xff\xfc\xff\x03\x00\x03\x00\x00\x00\xff\xff\x02\x00\x10\x00\x11\x00\x10\x00\x13\x00\x1c\x00"\x00\x1b\x00\x14\x00\x13\x00\x11\x00\x10\x00\x0b\x00\x00\x00\xfc\xff\xfd\xff\x00\x00\xfc\xff\xf5\xff\xf2\xff\xf7\xff\xf9\xff\xfe\xff\t\x00\x0c\x00\x12\x00\x10\x00\x06\x00\x02\x00\r\x00\r\x00\x08\x00\x03\x00\x04\x00\x05\x00\xfa\xff\xf9\xff\xfb\xff\xf9\xff\xf7\xff\xff\xff\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\xfd\xff\xfe\xff\x01\x00\x07\x00\x0c\x00\x12\x00\x18\x00\x11\x00\x14\x00#\x00\x1f\x00\x1f\x00(\x00\x1f\x00\x1e\x00&\x00\x1f\x00\x17\x00\x11\x00\x15\x00\x16\x00\x14\x00\x16\x00\x13\x00\r\x00\x15\x00\x1b\x00\x0f\x00\x16\x00 \x00\x1c\x00\x16\x00\x13\x00\x0f\x00\x0f\x00\x0f\x00\x10\x00\x0e\x00\n\x00\n\x00\x02\x00\x05\x00\x04\x00\x04\x00\x00\x00\x00\x00\x04\x00\x05\x00\x04\x00\xfd\xff\xf8\xff\xfb\xff\xfa\xff\xfa\xff\xf8\xff\xf8\xff\xfb\xff\xfa\xff\xf5\xff\xf7\xff\xf7\xff\xfa\xff\xf6\xff\xf6\xff\xfa\xff\xf9\xff\xf9\xff\xf8\xff\xfb\xff\xff\xff\x00\x00\xfb\xff\xfb\xff\x04\x00\x04\x00\xfd\xff\xfa\xff\x05\x00\x01\x00\x07\x00\x13\x00\r\x00\x03\x00\x04\x00\x01\x00\xff\xff\xfc\xff\xfd\xff\xfb\xff\x02\x00\t\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xf4\xff\xee\xff\xfb\xff\x01\x00\xfb\xff\xfb\xff\x00\x00\xff\xff\x01\x00\x06\x00\x07\x00\x00\x00\x02\x00\x07\x00\x00\x00\x01\x00\xfb\xff\xf3\xff\xee\xff\xee\xff\xf1\xff\xf4\xff\xee\xff\xf5\xff\xfb\xff\xef\xff\xea\xff\xe8\xff\xef\xff\xec\xff\xf2\xff\xf1\xff\xec\xff\xee\xff\xeb\xff\xe7\xff\xe4\xff\xed\xff\xef\xff\xec\xff\xec\xff\xea\xff\xef\xff\xeb\xff\xe7\xff\xed\xff\xf7\xff\xf9\xff\xff\xff\xfe\xff\xf3\xff\xef\xff\xee\xff\xf3\xff\xf0\xff\xee\xff\xee\xff\xea\xff\xe6\xff\xe6\xff\xe9\xff\xe7\xff\xf0\xff\xe9\xff\xe3\xff\xe7\xff\xe7\xff\xec\xff\xef\xff\xf1\xff\xf4\xff\xf5\xff\xf1\xff\xf4\xff\x00\x00\xf8\xff\xf9\xff\x00\x00\xfb\xff\xfc\xff\xfd\xff\x00\x00\xf8\xff\xf9\xff\xfb\xff\xf4\xff\xef\xff\xf8\xff\xf6\xff\xf4\xff\xf8\xff\xf9\xff\xfb\xff\xfa\xff\xfc\xff\xf8\xff\xff\xff\x00\x00\xfd\xff\xf7\xff\xf6\xff\xf8\xff\xfc\xff\x06\x00\x04\x00\x02\x00\x08\x00\x05\x00\xfd\xff\xfe\xff\xfe\xff\xfc\xff\xfb\xff\xf8\xff\xef\xff\xf2\xff\xf2\xff\xf1\xff\xf5\xff\xec\xff\xec\xff\xf0\xff\xef\xff\xec\xff\xec\xff\xf1\xff\xf6\xff\xf4\xff\xf5\xff\xfa\xff\xfe\xff\x05\x00\x04\x00\x04\x00\x05\x00\x03\x00\x03\x00\x04\x00\t\x00\x0c\x00\t\x00\t\x00\t\x00\x08\x00\x08\x00\t\x00\r\x00\r\x00\x0c\x00\n\x00\t\x00\x08\x00\x08\x00\x0c\x00\x0c\x00\x0b\x00\n\x00\x08\x00\n\x00\x0c\x00\x03\x00\x04\x00\x00\x00\x05\x00\x0e\x00\r\x00\x0c\x00\r\x00\x15\x00\x0e\x00\t\x00\x0e\x00\x11\x00\x0b\x00\x0b\x00\t\x00\x0e\x00\x15\x00\x14\x00\x15\x00\x0e\x00\x0f\x00\x14\x00\x16\x00\x12\x00\x16\x00\x19\x00\x12\x00\x11\x00\x0b\x00\x01\x00\x01\x00\n\x00\x0b\x00\x0f\x00\x14\x00\x12\x00\x10\x00\x17\x00\x14\x00\x16\x00\x15\x00\n\x00\x0b\x00\x0f\x00\r\x00\x0f\x00\n\x00\r\x00\x10\x00\r\x00\r\x00\x10\x00\r\x00\x04\x00\x00\x00\x07\x00\n\x00\n\x00\n\x00\x08\x00\x05\x00\x02\x00\x06\x00\x05\x00\x07\x00\x04\x00\x02\x00\x05\x00\x02\x00\xff\xff\xfa\xff\xfa\xff\xf9\xff\xf4\xff\xf3\xff\xf6\xff\xf4\xff\xf7\xff\xf7\xff\xf8\xff\xf6\xff\xf8\xff\xee\xff\xea\xff\xee\xff\xf2\xff\xf6\xff\xfa\xff\xfc\xff\x00\x00\x01\x00\x05\x00\x06\x00\x04\x00\x03\x00\x00\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xf4\xff\xf4\xff\xf7\xff\xf6\xff\xf3\xff\xf3\xff\xf2\xff\xf3\xff\xf5\xff\xf4\xff\xf4\xff\xfc\xff\xfe\xff\x00\x00\x02\x00\xfe\xff\xfa\xff\xfc\xff\xfc\xff\xf7\xff\xf8\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xfd\xff\xf6\xff\xf5\xff\xf5\xff\xfa\xff\xfc\xff\xfa\xff\xfc\xff\xf9\xff\xfa\xff\xf4\xff\xf6\xff\xf4\xff\xee\xff\xee\xff\xf0\xff\xee\xff\xed\xff\xea\xff\xea\xff\xea\xff\xe5\xff\xeb\xff\xe8\xff\xed\xff\xf2\xff\xed\xff\xf3\xff\xf1\xff\xf5\xff\xf4\xff\xf7\xff\xfa\xff\xf2\xff\xf6\xff\xf0\xff\xea\xff\xef\xff\xf2\xff\xef\xff\xec\xff\xe6\xff\xe3\xff\xe3\xff\xe1\xff\xe5\xff\xec\xff\xe6\xff\xe6\xff\xea\xff\xee\xff\xf4\xff\xf3\xff\xf5\xff\xfb\xff\xfd\xff\xf9\xff\xfd\xff\xfa\xff\xf5\xff\xfa\xff\xf9\xff\x00\x00\xfc\xff\xff\xff\xff\xff\xf3\xff\xfb\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\x00\x00\xff\xff\xfc\xff\x00\x00\xff\xff\xfe\xff\x01\x00\x04\x00\xff\xff\xfc\xff\xff\xff\x02\x00\x03\x00\x05\x00\x03\x00\x03\x00\x06\x00\x07\x00\x04\x00\x03\x00\x07\x00\x03\x00\x00\x00\xfe\xff\xf9\xff\xfb\xff\xf9\xff\xfa\xff\xfa\xff\xf7\xff\xf4\xff\xf5\xff\xf4\xff\xf9\xff\xfe\xff\x00\x00\x07\x00\x05\x00\x04\x00\x02\x00\x04\x00\x08\x00\x03\x00\x01\x00\x01\x00\x04\x00\x01\x00\x02\x00\x05\x00\x02\x00\x00\x00\x06\x00\x08\x00\n\x00\x07\x00\t\x00\n\x00\x02\x00\x04\x00\x05\x00\x07\x00\x04\x00\x07\x00\x02\x00\x00\x00\x04\x00\x0c\x00\t\x00\x08\x00\x0c\x00\x0b\x00\r\x00\r\x00\x0c\x00\x0c\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x18\x00\x16\x00\x18\x00\x15\x00\x13\x00\x14\x00\x13\x00\x12\x00\x0f\x00\r\x00\t\x00\x07\x00\x06\x00\x06\x00\x06\x00\x04\x00\x00\x00\xfc\xff\x01\x00\x03\x00\x07\x00\x02\x00\x02\x00\x07\x00\x07\x00\t\x00\x07\x00\t\x00\t\x00\x06\x00\x08\x00\x05\x00\x03\x00\xff\xff\x00\x00\xff\xff\xff\xff\x02\x00\x04\x00\x07\x00\x06\x00\x02\x00\x02\x00\x04\x00\x01\x00\x01\x00\x05\x00\x03\x00\xff\xff\xfa\xff\xff\xff\xff\xff\xfa\xff\xf9\xff\x00\x00\xff\xff\xfd\xff\x01\x00\x00\x00\xfe\xff\x00\x00\xfd\xff\x00\x00\x00\x00\x02\x00\x02\x00\x06\x00\x08\x00\x02\x00\x01\x00\x04\x00\x07\x00\x03\x00\x00\x00\x05\x00\x03\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\xfe\xff\xf9\xff\xfa\xff\xfb\xff\xfd\xff\xf8\xff\xf5\xff\xf3\xff\xed\xff\xf2\xff\xf5\xff\xf3\xff\xf6\xff\xf7\xff\xf6\xff\xf7\xff\xf6\xff\xf8\xff\xf5\xff\xfe\xff\xfd\xff\xff\xff\x04\x00\x00\x00\xfa\xff\xfa\xff\xfc\xff\xf6\xff\xf4\xff\xf8\xff\xf8\xff\xfa\xff\xfa\xff\xf6\xff\xfa\xff\xfc\xff\xfe\xff\x03\x00\x00\x00\xf7\xff\xf4\xff\xf4\xff\xf8\xff\xf8\xff\xf8\xff\xfa\xff\xf4\xff\xf1\xff\xf3\xff\xf4\xff\xf2\xff\xf0\xff\xef\xff\xef\xff\xeb\xff\xee\xff\xee\xff\xf0\xff\xf2\xff\xf3\xff\xf5\xff\xf2\xff\xf2\xff\xf7\xff\xf5\xff\xf1\xff\xf6\xff\xf9\xff\xfa\xff\xf3\xff\xf1\xff\xef\xff\xed\xff\xe8\xff\xe9\xff\xea\xff\xf1\xff\xf1\xff\xf0\xff\xf7\xff\xf6\xff\xfc\xff\xf7\xff\xf8\xff\xf7\xff\xfd\xff\x00\x00\xf6\xff\xf6\xff\xfb\xff\xfb\xff\xfe\xff\x04\x00\xfe\xff\xf9\xff\x01\x00\xff\xff\xfd\xff\xff\xff\xfe\xff\xfa\xff\xfb\xff\xfc\xff\xfc\xff\xff\xff\xfb\xff\xfc\xff\xfe\xff\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xf9\xff\xfa\xff\xfd\xff\xfe\xff\xfd\xff\x00\x00\xff\xff\x00\x00\x03\x00\x02\x00\x03\x00\x02\x00\x03\x00\x04\x00\x04\x00\x06\x00\x05\x00\x03\x00\x01\x00\xff\xff\xff\xff\xfa\xff\xfa\xff\xfd\xff\xfe\xff\xfc\xff\xfe\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xfd\xff\xf8\xff\xf9\xff\xf6\xff\xf3\xff\xfa\xff\xfa\xff\xf6\xff\xf8\xff\xfc\xff\xfa\xff\xfa\xff\xf9\xff\xfb\xff\xf9\xff\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\x00\x00\x01\x00\x06\x00\x00\x00\x03\x00\t\x00\t\x00\t\x00\x06\x00\x04\x00\x02\x00\x0c\x00\t\x00\t\x00\x0f\x00\x12\x00\x13\x00\x17\x00\x19\x00\x1d\x00\x1d\x00\x16\x00\x16\x00\x18\x00\x19\x00\x15\x00\x16\x00\x16\x00\x14\x00\x13\x00\x17\x00\x16\x00\x10\x00\r\x00\n\x00\x0b\x00\t\x00\x04\x00\x03\x00\x04\x00\x03\x00\x03\x00\x05\x00\x02\x00\x03\x00\x04\x00\x02\x00\x02\x00\xff\xff\xfc\xff\xfb\xff\xff\xff\xff\xff\xfb\xff\xfe\xff\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\x02\x00\x00\x00\x00\x00\x01\x00\x04\x00\x06\x00\x05\x00\x08\x00\x05\x00\x04\x00\x06\x00\x05\x00\x01\x00\x00\x00\x02\x00\xfc\xff\xfd\xff\xff\xff\xfe\xff\xfa\xff\xfa\xff\xf8\xff\xf4\xff\xfa\xff\xfe\xff\xfa\xff\xfe\xff\xfe\xff\x01\x00\x01\x00\xfc\xff\xfd\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfd\xff\x00\x00\xfd\xff\xfc\xff\x00\x00\x00\x00\xfb\xff\xf8\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xfc\xff\xfb\xff\xfd\xff\xfa\xff\xf9\xff\xf8\xff\xf6\xff\xf7\xff\xf6\xff\xf7\xff\xf7\xff\xf6\xff\xf8\xff\xf7\xff\xf7\xff\xf7\xff\xfa\xff\xfb\xff\xfa\xff\xfc\xff\x01\x00\xfc\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xf7\xff\xfa\xff\xfa\xff\xf6\xff\xf7\xff\xf9\xff\xf6\xff\xf4\xff\xf3\xff\xf0\xff\xf2\xff\xf0\xff\xed\xff\xf2\xff\xf2\xff\xf2\xff\xf6\xff\xf6\xff\xf7\xff\xf5\xff\xf3\xff\xf6\xff\xf8\xff\xf7\xff\xf4\xff\xf3\xff\xef\xff\xf2\xff\xf0\xff\xf1\xff\xef\xff\xf2\xff\xf4\xff\xeb\xff\xef\xff\xf3\xff\xf5\xff\xf3\xff\xf6\xff\xf7\xff\xf3\xff\xf5\xff\xf5\xff\xf6\xff\xf5\xff\xf4\xff\xf5\xff\xf7\xff\xf3\xff\xf3\xff\xf4\xff\xf8\xff\xf8\xff\xf6\xff\xfa\xff\xfd\xff\xfa\xff\xf8\xff\xfb\xff\xfc\xff\xfe\xff\xfa\xff\xf8\xff\xf6\xff\xf7\xff\xf9\xff\xf8\xff\xf8\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xf8\xff\xf9\xff\xff\xff\xfe\xff\xfd\xff\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x01\x00\x01\x00\x05\x00\xff\xff\xfe\xff\xff\xff\xfd\xff\xfa\xff\xfd\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\xfb\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xfa\xff\xf9\xff\xfb\xff\x00\x00\x00\x00\x02\x00\x01\x00\x04\x00\x06\x00\x04\x00\x05\x00\x05\x00\x06\x00\x06\x00\x07\x00\t\x00\t\x00\t\x00\x03\x00\x01\x00\x04\x00\x02\x00\x01\x00\x02\x00\x02\x00\x00\x00\x00\x00\x03\x00\x06\x00\x02\x00\x05\x00\x08\x00\x05\x00\x07\x00\t\x00\x08\x00\x05\x00\x06\x00\x0c\x00\x0c\x00\x08\x00\t\x00\x0b\x00\x0b\x00\t\x00\n\x00\x08\x00\x07\x00\n\x00\x04\x00\x06\x00\x08\x00\x08\x00\x08\x00\t\x00\x08\x00\x07\x00\x08\x00\x08\x00\x06\x00\x06\x00\t\x00\n\x00\x06\x00\x08\x00\n\x00\x07\x00\t\x00\x0e\x00\r\x00\x0c\x00\n\x00\x08\x00\x05\x00\x06\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\xfb\xff\xfd\xff\xfa\xff\xf7\xff\xfd\xff\xfd\xff\xf8\xff\xfc\xff\xfc\xff\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfb\xff\xff\xff\xfe\xff\xff\xff\xfc\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfa\xff\xf8\xff\xf7\xff\xf9\xff\xff\xff\x00\x00\x02\x00\x00\x00\xff\xff\xfb\xff\xfd\xff\xfe\xff\xf8\xff\xfa\xff\xff\xff\xfc\xff\xfb\xff\xfd\xff\xf9\xff\xf5\xff\xf6\xff\xfa\xff\xf9\xff\xf7\xff\xf6\xff\xf2\xff\xf7\xff\xf7\xff\xf7\xff\xf8\xff\xf5\xff\xf5\xff\xf7\xff\xf5\xff\xf5\xff\xf5\xff\xf9\xff\xf7\xff\xf0\xff\xf1\xff\xef\xff\xef\xff\xea\xff\xe8\xff\xea\xff\xed\xff\xee\xff\xf0\xff\xf7\xff\xf6\xff\xf8\xff\xf3\xff\xf1\xff\xf2\xff\xf4\xff\xf7\xff\xf5\xff\xf4\xff\xf5\xff\xf4\xff\xf6\xff\xf7\xff\xf4\xff\xf3\xff\xf6\xff\xf5\xff\xf1\xff\xf5\xff\xfa\xff\xf7\xff\xf7\xff\xf8\xff\xfa\xff\xfe\xff\xfc\xff\xfc\xff\xfa\xff\xf7\xff\xf6\xff\xf7\xff\xf7\xff\xfa\xff\xf7\xff\xf7\xff\xfd\xff\xfb\xff\xfb\xff\xfa\xff\xfb\xff\xff\xff\xff\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xff\xff\x01\x00\xfd\xff\xfb\xff\xfc\xff\xfe\xff\xfd\xff\xfb\xff\xfc\xff\xf9\xff\xfa\xff\xfb\xff\xfc\xff\xfd\xff\xf8\xff\xf6\xff\xf4\xff\xf4\xff\xf9\xff\xf9\xff\xf6\xff\xfa\xff\xf9\xff\xf8\xff\xfb\xff\xfb\xff\xfa\xff\xf8\xff\xfb\xff\xfd\xff\xfb\xff\xfd\xff\xfb\xff\xf9\xff\xf9\xff\xfd\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xfc\xff\xfc\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x03\x00\x06\x00\x06\x00\x07\x00\x08\x00\x08\x00\n\x00\x0b\x00\x08\x00\t\x00\x08\x00\n\x00\x08\x00\x08\x00\x05\x00\x08\x00\t\x00\n\x00\x0b\x00\t\x00\x05\x00\x03\x00\x04\x00\x06\x00\x04\x00\x01\x00\x02\x00\x03\x00\x03\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xfc\xff\xfc\xff\x03\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x01\x00\xfd\xff\x02\x00\x03\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x01\x00\x03\x00\x00\x00\x00\x00\x04\x00\x04\x00\x04\x00\x06\x00\n\x00\x03\x00\x03\x00\x04\x00\x05\x00\x06\x00\x03\x00\x02\x00\x03\x00\x06\x00\x08\x00\x04\x00\x07\x00\t\x00\x08\x00\x07\x00\x07\x00\x08\x00\x07\x00\x07\x00\x07\x00\x08\x00\t\x00\t\x00\x07\x00\x05\x00\x06\x00\x05\x00\x01\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\xfe\xff\xfc\xff\xfe\xff\xfc\xff\xf9\xff\xf7\xff\xf7\xff\xf8\xff\xf7\xff\xf7\xff\xf8\xff\xf5\xff\xf6\xff\xf4\xff\xf5\xff\xf6\xff\xf7\xff\xf8\xff\xfb\xff\xfd\xff\xfc\xff\xf8\xff\xfa\xff\xfc\xff\xfd\xff\xff\xff\xfb\xff\xfd\xff\xf8\xff\xf4\xff\xf6\xff\xf7\xff\xf5\xff\xf6\xff\xf4\xff\xf4\xff\xf4\xff\xf2\xff\xf4\xff\xf7\xff\xf6\xff\xf4\xff\xfa\xff\xfb\xff\xfa\xff\xf4\xff\xf7\xff\xf8\xff\xfa\xff\xf9\xff\xf6\xff\xfa\xff\xf8\xff\xf9\xff\xf8\xff\xf8\xff\xf8\xff\xfa\xff\xf8\xff\xf5\xff\xf8\xff\xf9\xff\xfc\xff\xfc\xff\xfe\xff\xff\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xf9\xff\xf8\xff\xf9\xff\xf8\xff\xf3\xff\xf2\xff\xf3\xff\xf3\xff\xef\xff\xf0\xff\xf2\xff\xf3\xff\xf2\xff\xf2\xff\xf7\xff\xf7\xff\xfa\xff\xf8\xff\xf7\xff\xf7\xff\xf5\xff\xf7\xff\xf8\xff\xf8\xff\xf9\xff\xfa\xff\xf9\xff\xfc\xff\xfa\xff\xfa\xff\xfc\xff\xfb\xff\xfd\xff\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfb\xff\xff\xff\xfc\xff\xfb\xff\xfd\xff\xfb\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\xf8\xff\xfc\xff\xfe\xff\xfd\xff\xfb\xff\xfd\xff\xfc\xff\xf8\xff\xf9\xff\xfa\xff\xf7\xff\xf7\xff\xfa\xff\xf9\xff\xfa\xff\xf6\xff\xf6\xff\xf7\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\xfb\xff\xf9\xff\xfd\xff\xfc\xff\xfb\xff\xfa\xff\xfa\xff\xf7\xff\xf7\xff\xfc\xff\xfa\xff\xfc\xff\xfd\xff\xfd\xff\x00\x00\x00\x00\xfa\xff\xf8\xff\xfa\xff\xf7\xff\xf3\xff\xf6\xff\xf7\xff\xf6\xff\xf8\xff\xfa\xff\xf7\xff\xf7\xff\xf6\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xfd\xff\x03\x00\x00\x00\x04\x00\t\x00\x05\x00\x07\x00\t\x00\t\x00\x08\x00\t\x00\n\x00\n\x00\x05\x00\x02\x00\x02\x00\x02\x00\x00\x00\x01\x00\x01\x00\x02\x00\x07\x00\x05\x00\x06\x00\x04\x00\x01\x00\x00\x00\x02\x00\x02\x00\x00\x00\x05\x00\t\x00\x0b\x00\n\x00\x0b\x00\x0c\x00\x0b\x00\t\x00\n\x00\t\x00\n\x00\r\x00\x0e\x00\r\x00\x08\x00\x05\x00\x02\x00\x04\x00\x02\x00\x01\x00\x03\x00\x00\x00\xff\xff\x03\x00\x05\x00\x04\x00\x03\x00\x00\x00\x00\x00\x06\x00\t\x00\x04\x00\x01\x00\x01\x00\xfe\xff\x00\x00\x00\x00\x00\x00\xfc\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\x01\x00\x02\x00\x00\x00\x03\x00\x02\x00\x04\x00\x05\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x04\x00\x06\x00\x06\x00\x01\x00\xfe\xff\xfd\xff\xfb\xff\xfa\xff\xfb\xff\xfc\xff\xf8\xff\xf5\xff\xf6\xff\xf6\xff\xf5\xff\xf6\xff\xf9\xff\xf8\xff\xf5\xff\xf6\xff\xf4\xff\xf1\xff\xf4\xff\xf4\xff\xf2\xff\xee\xff\xee\xff\xef\xff\xee\xff\xf0\xff\xf2\xff\xf2\xff\xf5\xff\xef\xff\xf0\xff\xee\xff\xf1\xff\xf3\xff\xf1\xff\xf0\xff\xf3\xff\xf3\xff\xf3\xff\xf0\xff\xf4\xff\xf8\xff\xee\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' -# --- -# name: test_pipeline_error - b'\'\xff\x9d\xfe\xc7\xfe\x92\xfe\x88\xfe\xe2\xfe\x02\x00\x9a\x00!\x00H\xff$\xff|\xff\x94\xff1\xff\xd6\xfe\xdf\xfe8\xffj\xff*\xff\xba\xfe\x99\xfe\xf1\xfe\\\xff\x87\xff\x84\xffs\xff?\xff\xf5\xfe\xce\xfe\xd7\xfe\x0e\xff\x8e\xff\xed\xff\xea\xff\xd2\xff\xcf\xff\xa4\xffP\xff\x1b\xff=\xff\x8e\xff\xbe\xff\xd1\xff\xe9\xff\x01\x00\xdf\xffe\xff\xc9\xfe\x88\xfe\xd6\xfe[\xff\x9e\xff\x9d\xff\x9c\xff\xbe\xff\xde\xff\xc5\xff\x95\xff\x98\xff\xc7\xff\xf0\xff\n\x00\x15\x00\xf3\xff\xba\xff\x9a\xff\xae\xff\xe5\xff\r\x00\x15\x00!\x00A\x00Z\x00[\x00A\x00\r\x00\xee\xff\r\x00V\x00\x8a\x00\x89\x00p\x00l\x00\x98\x00\xe2\x00\x13\x01\xff\x00\xc6\x00\xa9\x00\xae\x00\x9e\x00x\x00_\x00x\x00\xc9\x00\x10\x01%\x01)\x01\x1c\x01\xea\x00\xa1\x00j\x00\x85\x00\xf7\x00i\x01q\x01\x1e\x01\xe0\x00\xea\x00\n\x01\n\x01\xe0\x00\xb3\x00\xb3\x00\xeb\x00.\x01K\x01=\x01\xff\x00\xae\x00\x81\x00\x97\x00\xd6\x00\x10\x016\x01K\x010\x01\xe6\x00\x9f\x00^\x00\'\x00*\x00|\x00\xdf\x00\xfa\x00\xcc\x00\x94\x00X\x00\xfa\xff\xc0\xff\xfb\xff\x89\x00\xed\x00\xe3\x00\xa5\x00\x81\x00\x88\x00\x95\x00\x89\x00q\x00c\x00S\x00B\x005\x00\'\x000\x00H\x00H\x00<\x007\x00#\x00\xe8\xff\xa3\xff\xba\xff?\x00\x9d\x00l\x00\xf8\xff\xb9\xff\xbf\xff\xd3\xff\xdd\xff\xe6\xff\xf3\xff\x02\x00"\x008\x00+\x00\n\x00\xf8\xff\x04\x00\r\x00\xf4\xff\xc1\xff\xa9\xff\xd8\xffI\x00\xba\x00\xd3\x00u\x00\xf1\xff\x97\xffh\xffY\xff}\xff\xcf\xff8\x00}\x00r\x008\x00\t\x00\xfb\xff\x02\x00\x12\x003\x00l\x00\x8b\x00^\x00!\x004\x00b\x00.\x00\x1a\x00\xa2\x00\xfa\x00\x93\x00\xed\xff\xa7\xff\xd8\xff(\x00<\x00\x04\x00\xd4\xff\xf7\xffR\x00\x88\x00W\x00\xef\xff\x94\xffm\xffW\xff\xde\xfe_\xff\xb3\x01\x86\x02v\x00\x87\xfe\xae\xfe\xb6\xff\xe5\xffg\xff\x1d\xffF\xff\xa4\xff\xe3\xff\xdf\xff\xdb\xff\xed\xff\xf0\xff\xc1\xffl\xffm\xff\xce\xff\xf8\xff\xc1\xff\x8d\xff\xa7\xff\x05\x00\x83\x00\xde\x00\xed\x00\xad\x00\x0c\x00@\xff\xcb\xfe\x0c\xff\xec\xff\xbb\x00\x03\x01\x04\x01\xd6\x00c\x00\xe0\xffz\xff>\xffh\xff\xf7\xffw\x00\xa0\x00{\x00\x0f\x00\\\xff\xb3\xfe\xb3\xfe\xb6\xff\xe7\x00\x0e\x01>\x00\x92\xff\xbc\xffY\x00\xa1\x00N\x00\xcb\xff|\xffn\xff\x81\xff\xb3\xff,\x00\xb9\x00\xc6\x00R\x00\x01\x00\x1e\x00_\x00`\x00 \x00\xd8\xff\xc5\xff\xf4\xff6\x00`\x00v\x00\x8d\x00\xb4\x00\xe4\x00\xf4\x00\xad\x00,\x00\xbc\xff\x96\xff\xde\xff~\x00.\x01m\x01\xea\x00/\x00\xd8\xff\xb5\xff\xa3\xff\xcb\xff\xfc\xff\xee\xff\xa6\xff\x8d\xff\x00\x00\xd2\x00c\x01$\x01\x11\x00\x0c\xff\xe2\xfe;\xfft\xff\x9f\xff$\x00\xd5\x00\x1e\x01\xce\x00E\x00\xda\xffs\xff\xea\xfep\xfe\x80\xfeH\xffW\x00\xf6\x00\x03\x01\xd1\x00S\x00}\xff\xcb\xfe\x8b\xfe\x96\xfe\xcb\xfeB\xff\xee\xff\x86\x00\xd5\x00\xdf\x00y\x00\x94\xff\x9a\xfe\x14\xfe>\xfe\xf1\xfe\xaa\xff\xe9\xff\xe7\xff\x11\x00;\x00\x13\x00\xaa\xffF\xff\x1b\xff%\xffU\xff\xc7\xff\x82\x00\x1a\x01)\x01\xd3\x00\x80\x00C\x00\xde\xffY\xff)\xff\x9c\xffl\x00\x19\x01\\\x017\x01\xc7\x003\x00\xb3\xffq\xffp\xff\xb0\xff,\x00\x9f\x00\xbb\x00\x9b\x00\x91\x00\x8e\x00P\x00\xdb\xffr\xffF\xff]\xff\x9d\xff\xf2\xff/\x00#\x00\xe1\xff\xa8\xff\x8f\xff\x87\xff\x85\xff|\xfff\xffH\xffJ\xff\x85\xff\xd7\xff\x0c\x00\xfe\xff\x98\xff\xe5\xfe6\xfe\x14\xfe\xc7\xfe\xe7\xff\x9a\x00g\x00\xb6\xff=\xff&\xff\x18\xff\xb6\xfe\x11\xfe\xaa\xfd#\xfen\xff\xb7\x002\x01\xc5\x00\xe8\xff(\xff\xd7\xfe\xf4\xfe=\xffl\xfft\xff\x8c\xff\xda\xff\x14\x00\xdc\xffl\xffY\xff\xd0\xffo\x00\xb7\x00m\x00\xb9\xff\x02\xff\x97\xfe\x9b\xfe\x10\xff\xd2\xff\x89\x00\xcb\x00\x8b\x005\x00:\x00\xa8\x00\r\x01\xeb\x00p\x00\x10\x00\xf3\xff\'\x00\x91\x00\xf8\x00V\x01\xa9\x01\xbf\x01j\x01\xd0\x00)\x00\x8c\xff/\xffw\xffg\x00z\x01+\x027\x02\xbf\x01\x19\x01\x85\x00\x05\x00\x80\xff-\xffp\xffD\x00(\x01\xb9\x01\x15\x02[\x026\x02+\x01V\xff\xa0\xfd\n\xfd\xdc\xfd\x92\xff~\x01\xf4\x02G\x03A\x02\x8c\x00U\xff\xed\xfeP\xfe\xeb\xfc\x13\xfc\xa4\xfd/\x01\xec\x03\xc1\x03v\x01\x84\xff\x15\xffZ\xffZ\xffI\xff\xc5\xff\xad\x00?\x01A\x01b\x01\n\x02d\x02|\x01\xe1\xff\x1c\xff\xe5\xff\x89\x01\xb5\x02\xdb\x02V\x02~\x01\\\x00!\xff\x0c\xfeD\xfd\x03\xfd\x84\xfd\xbb\xfeI\x00p\x01\x8d\x01\xac\x00b\xffV\xfe\xbd\xfdZ\xfd\x0c\xfd\x1c\xfd\xe6\xfdO\xff\xda\x00\x07\x02\x93\x02k\x02\xc2\x01\xe2\x00\n\x00w\xffM\xff\xaa\xff\x93\x00\x94\x01*\x02O\x02\'\x02h\x01\xdb\xff\xfa\xfd\xaf\xfc\x8c\xfct\xfd\xd5\xfe@\x00|\x013\x02\xee\x01t\x00L\xfe\x7f\xfc\xd7\xfbR\xfc~\xfd\x03\xff\xae\x00\x0c\x02\x8a\x02\x14\x023\x01H\x00$\xff\xd2\xfd%\xfd\xb6\xfd\xa4\xfe\x95\xfe\xdf\xfdu\xfe\x18\x01i\x03o\x02\x92\xfe$\xfb\xb5\xfa\x03\xfd"\x00\xc4\x02\x8b\x04 \x05\x15\x04\xd2\x01\x86\xff\x13\xfe}\xfdl\xfd\x1b\xfe\t\x00\xa5\x02F\x04\xd2\x03\xde\x01\xdc\xff\xc0\xfey\xfe}\xfe\x9b\xfe\xa9\xfe`\xfe\xc3\xfd2\xfd\xd6\xfc\x97\xfc\xb6\xfc~\xfd\xa9\xfe\xb1\xffS\x00\xad\x00\xd8\x00\x9b\x00\x04\x00v\xff\xe1\xfe\xe9\xfd\xca\xfci\xfc\x8e\xfd\xd4\xff\xba\x018\x02\xb8\x01C\x01_\x01\xd6\x01\xe9\x01\x19\x01\xc3\xff\x8b\xfe\xc6\xfd\xba\xfd\xab\xfe=\x00\x82\x01\xd2\x01a\x01\x02\x01\x0b\x01\xfc\x00]\x00f\xff\xf8\xfez\xff1\x00d\x00F\x00s\x00\x19\x01\xd8\x01%\x02\xe0\x01\x8f\x01\xac\x01\x02\x02N\x02\xd2\x02\xaa\x03T\x04f\x04\xc4\x03\r\x02\xf2\xfe\xc4\xfb*\xfb\xf8\xfd+\x01\x1e\x01\xcb\xfdr\xfa\xdd\xf9\x17\xfcI\xff\xcf\x01\x07\x03\xbd\x02\x06\x01\xc0\xfe \xfd-\xfc\x17\xfb]\xfa\\\xfc2\x02\xce\x08\x1f\x0bj\x073\x01h\xfd\xa7\xfd\x1d\x00\x7f\x02\x95\x03\xdb\x02M\x00\xaa\xfcI\xf9W\xf7\x06\xf7\xd8\xf7~\xf9\xe8\xfb\xcc\xfe`\x01\x87\x02\x88\x01\x05\xff\xb5\xfc\xcc\xfb\xf5\xfbk\xfc<\xfd\xa3\xfe7\x00\x8d\x01\xee\x02t\x04~\x05`\x05w\x04\xe8\x03\x02\x04\xb7\x03\x00\x02G\xffn\xfd\x13\xfe\xb0\x00\xf1\x02J\x03c\x02\xbe\x01\xf2\x01\xbf\x02\xdc\x03\x0f\x05F\x06;\x07%\x07O\x05 \x02\xfc\xfeJ\xfd\xb7\xfd\xa3\xff\x87\x01\x1b\x02m\x01p\x00\xf6\xff_\x000\x01*\x01\xb6\xff\xae\xfd$\xfc\n\xfb\xbe\xf9\x08\xf9T\xfbO\x01\x97\x07\x13\t\x06\x04I\xfc\xce\xf7,\xf9U\xfe\x03\x04O\x08\x02\n6\x08\x83\x03\xe6\xfd!\xf9\r\xf66\xf5=\xf7\x8a\xfb\x07\x00"\x03\xdd\x04\xbe\x05\x82\x05\x98\x03h\x00*\xfd\x94\xfa\xc1\xf8\xf3\xf7y\xf88\xfa\xbc\xfc\x80\xff\xc2\x01\xd2\x02\x98\x02\xb2\x01\x1d\x01\xe0\x00\x93\xffX\xfc\xa3\xf8\x1c\xf7@\xf9"\xfeM\x03\xc1\x06!\x08\x01\x08\xef\x06\xce\x05\xb5\x05\xfa\x06\xfb\x08m\n\xf7\t\x1b\x07\x87\x02\x1d\xfe\xe3\xfbZ\xfc7\xfe\xfb\xff\x1d\x01\xa3\x01}\x01\x88\x00\xfc\xfe\x89\xfd\xe3\xfc\'\xfd\xbb\xfd\x16\xfe\x1d\xfe\x9a\xfd\x9d\xfc\xd2\xfc0\x00h\x05\x10\x08\xa8\x05\xca\x00\xfd\xfd\xf1\xfe\xd4\x01F\x04\x9f\x05\xcf\x05\x9d\x03\xe1\xfd=\xf6C\xf1w\xf2\x1a\xf8\x93\xfcO\xfcE\xf9A\xf8I\xfb\xd1\xff)\x02\x8d\x01\x01\x00\xf5\xfe\xc2\xfd\x83\xfb\xfc\xf8\xec\xf7\\\xf9\x03\xfd\xa2\x01-\x05\x12\x06\xea\x04\xdd\x03,\x04\xd8\x04\xec\x03\x91\x00\xea\xfbJ\xf8s\xf7/\xf9\xf3\xfb\xa9\xfe\xea\x00T\x02\xb1\x02\xa9\x029\x03b\x04h\x05\x9d\x054\x04\xce\x00\xb1\xfc\x1a\xfa\\\xfa\x08\xfd\x98\x00m\x03\xdd\x049\x05\xa2\x04!\x03\\\x01N\x00w\x00z\x01r\x02\x95\x02I\x01o\xfeZ\xfb\xb4\xfa4\xfe\xae\x03s\x06\xaa\x04j\x01J\x004\x01X\x02\xaf\x03\x8c\x05<\x06\xc5\x03\xce\xfe\xea\xf9\xfb\xf6B\xf6r\xf7\r\xfaC\xfd]\x00\xb0\x02f\x03G\x02C\x00\x9f\xfe\xf5\xfd\xa8\xfdN\xfc\x1d\xf9\xf9\xf5\xb2\xf6L\xfc\xb1\x02\xa6\x04f\x01\x13\xfd\x1d\xfc]\xff\x06\x04~\x06}\x05d\x02\x1c\xff\x87\xfc\xa5\xfa\xc0\xf9\xe1\xfa\x9d\xfe\xce\x03\x05\x08\xa6\t6\tH\x08\xc1\x07\xa6\x07\xbf\x07v\x07\xad\x05\xdd\x01F\xfd\xfe\xf9E\xf9\xef\xfa\xab\xfd\x8f\xff\xa9\xff\xdf\xfe\xa5\xfe\n\xff!\xff~\xfej\xfdZ\xfc\x84\xfb\xa8\xfav\xf9\x02\xf9\xb5\xfb\x06\x02M\x08e\nM\x08P\x05\xea\x03H\x04\xe1\x05!\x08g\t\xc9\x07<\x03\xb1\xfdT\xf9\x1e\xf7\xf5\xf6y\xf8\xfd\xfae\xfd\x17\xffD\x00\xc5\x00\xfe\xffl\xfe\xb6\xfdG\xfex\xfe \xfd-\xfb6\xfa\xe3\xfa\xda\xfc\x8c\xff\\\x02\x97\x04\xc6\x05\xd8\x05G\x05x\x04\x0c\x03\xb8\x00\x00\xfe\x85\xfb~\xf9C\xf8`\xf9X\xfe\x1b\x05Z\x08\xa2\x05\xa4\x00|\xfe?\x00\xd6\x038\x07Y\t\xa4\t\xdd\x07_\x04\x1e\x00R\xfc%\xfan\xfa\x17\xfd\xc5\x00\x80\x03.\x04U\x03&\x029\x01\xaa\x00\x99\x00\x0f\x01\xf7\x00u\xfe\x90\xf9\xbd\xf5\x1a\xf7=\xfd\x9e\x02\xf9\x02\x1c\x00\xee\xfe \x01"\x04\x9b\x05G\x052\x03c\xff\x02\xfb\x97\xf7\xb4\xf5^\xf5\xe5\xf6c\xfa\xd7\xfe\xbf\x02\xfb\x04$\x05\xa5\x03\xbd\x01\xc3\x00\xdd\x00\xac\x00\xeb\xfe\x1f\xfc\x0b\xfa\xc1\xf9\xd8\xfa\xcb\xfc\xa0\xff\xac\x02b\x04\xf7\x03f\x02=\x01\xd9\x00\x87\x00\x8e\xff\xcf\xfd.\xfc\xd9\xfb\xfc\xfc\xf4\xfeU\x01\x7f\x03^\x04M\x044\x058\x07\xe2\x07 \x06\xef\x03d\x03=\x04=\x04\x84\x01\xcb\xfca\xf9\x9e\xfa\xba\xffI\x036\x01\xa9\xfbI\xf8\xd3\xf9$\xfe\xc4\x01V\x03\xbb\x03\xc0\x03\x7f\x02\xaf\xfe{\xf9\x0e\xf7\x12\xfa\xdc\xff\xa4\x03%\x04\xa3\x03\xd8\x03\x8c\x04\x85\x05\x1c\x07\x91\x08\xe7\x07\xe7\x03w\xfd\xbf\xf6\xe7\xf1\x89\xf0A\xf3\xb0\xf8\xac\xfd\t\x00n\x00\x81\x00Q\x00\xf6\xfe\xb2\xfc\xa2\xfa7\xf9P\xf8\xf2\xf7\x17\xf8\xbb\xf8N\xfa\x88\xfd[\x02\x06\x07\x1d\t\xdf\x071\x05\x9f\x03\xb0\x03\xc3\x03O\x02}\xff\xe3\xfc\xe4\xfb\xc4\xfc\xff\xfe\xb6\x01\xc6\x03\xb0\x04D\x05:\x06\xb1\x06\xc5\x05^\x04\xc3\x03\xaf\x03\xc9\x02\xc4\x00\x03\xff\x99\xfe\x07\xffm\xff\x98\xffw\xffI\xff\xef\xff}\x01<\x02\xaf\x00 \xfe\x8f\xfd\x02\x00i\x02^\x00\xe8\xf9L\xf5\xde\xf8G\x02\xde\x07\x19\x04\xfe\xfb\x82\xf8\x84\xfc\xd4\x03\x8f\t\xfc\x0b/\x0b\x9f\x077\x02%\xfc~\xf6\xbc\xf2{\xf2\x13\xf6\xf0\xfb\x8f\x01\x07\x05\x88\x05\xa3\x03\xe2\x00\xee\xfe\x9f\xfe>\xff1\xff\x92\xfd\xb2\xfa\x9a\xf7\xd9\xf5\x19\xf7d\xfb%\x00>\x02\x83\x01\x01\x01\xe5\x02k\x05\x05\x05\x8d\x00\x99\xfa\x11\xf7\xb6\xf7\xe4\xfa\x1d\xfe\xd6\x00f\x03\x8f\x05g\x07Z\tz\n*\tw\x06\x98\x05#\x07\n\x08\xfa\x05\x0e\x02\xe0\xfe\xa3\xfd\xcb\xfd`\xfe\x18\xff\xf6\xff\xd4\x00E\x01\xe0\x00\xd5\xff\xd8\xfe\x96\xfeB\xffC\x00V\x00\xd9\xfe\n\xfd<\xfd\xcf\xffK\x02\x9b\x02u\x01f\x01\xab\x031\x06(\x06\xd2\x03\xfa\x01\xf2\x01\xe6\x01S\xff\xdf\xfa\x06\xf8\n\xf9R\xfc)\xfe\x01\xfd\x16\xfb\x99\xfb\xd8\xfe3\x02k\x03\xc3\x02\x9f\x01c\x00\xa4\xfe\x85\xfc\xb1\xfa\xbf\xf9$\xfaJ\xfc\xf7\xff|\x03\xeb\x04\x1d\x04\x9b\x02\xcb\x01\x9c\x01 \x01"\x008\xff\x85\xfev\xfd\x08\xfcc\xfb\x9e\xfc,\xffP\x01\x04\x02\xeb\x01.\x026\x03w\x04\xd7\x04W\x03\'\x00\x13\xfd\xe4\xfb\x80\xfc8\xfd>\xfd\xad\xfd\x8a\xff\xfb\x010\x03S\x02:\x00y\xfeB\xfe\x9a\xff&\x01T\x01\xb8\xffG\xfd\x85\xfb\\\xfb\x8a\xfc\x03\xfeZ\xff\x01\x01\xe7\x02\xf1\x03s\x03?\x02g\x01\xec\x00;\x00\xfa\xfeq\xfd\x93\xfc\xf6\xfc\x1d\xfe\xce\xfe}\xfe\x19\xfe\xe7\xfe\xc1\x00\xe0\x01\xaa\x00\xf6\xfd\xa2\xfc\x0b\xfe0\x00V\x00t\xfe\xb0\xfc]\xfc\xfe\xfc\xfc\xfdb\xff\x0b\x01a\x02\x1c\x03\x96\x03 \x04\x86\x04A\x04\x0e\x03G\x01\x8d\xffG\xfe\xdc\xfd\x8e\xfe\xd9\xff\xfe\x00\xdb\x01\xbc\x02y\x03|\x03\xb7\x02\xab\x01\xb6\x000\x00w\x00\x10\x01\xb0\x00\xb7\xfeI\xfcq\xfb\xf6\xfc`\xff\xb6\x00\xce\x00\xf1\x00\xb4\x01B\x02\xd6\x01\xaf\x00}\xff\xa3\xfe\x1b\xfe\xc2\xfds\xfd#\xfd`\xfd\x0e\xff\xe2\x01\xf0\x03\x84\x03@\x01q\xff\x85\xff\x02\x01g\x02n\x02\xd9\x00\x83\xfe\x9d\xfc\xe1\xfbC\xfc:\xfdT\xfe\xca\xff\xb1\x01\x0f\x03\xac\x02\xb5\x00\xc8\xfeW\xfe=\xff\x1b\x00h\x00\xb7\x00\xa8\x00\x86\xff\xed\xfdR\xfd\x98\xfe\x1f\x018\x03\xa4\x03\xb7\x02\xbe\x01\xbd\x01\x97\x02\r\x03\xdc\x01>\xff\x11\xfd\x0f\xfd\x95\xfes\x00\x08\x03\xbd\x04\t\x04\xd1\x02\xb0\x02(\x03-\x03\x92\x02\xa1\x01\x8b\x00}\xff\xa4\xfe0\xfet\xfeC\xff\x00\x00Z\x00j\x00\x9e\x00\xe4\x00\x96\x00\xa6\xff\xd6\xfe\xd7\xfe\x9b\xffu\x00\x97\x00\xbd\xff\x88\xfe\xbf\xfd\xa3\xfd\x0f\xfe\xc5\xfe\xd6\xff2\x01d\x026\x03a\x03e\x02\x96\x00\xd8\xfe\x9c\xfd\x14\xfd?\xfd\xd1\xfd\x9f\xfe\x8a\xffA\x00:\x006\xff\xeb\xfd\xb5\xfd\x02\xff\xa7\x00j\x01+\x01\x82\x00\xc4\xff\x9f\xfe\xec\xfc\x86\xfb\x8b\xfbO\xfd\x01\x000\x02/\x03Z\x03\xe1\x02\xa7\x014\x00o\xff\xa2\xff\x1e\x00\x00\x00`\xff$\xff\xa0\xff1\x00~\x00\r\x01\x17\x02\xfd\x02\t\x03d\x02\x99\x01\xba\x00\xa8\xff\x9d\xfe\x15\xfeb\xfeZ\xffP\x00\x9d\x00\x82\x00\xc9\x00>\x01?\x01\xfe\x00\xf2\x00\x18\x011\x01\x16\x01\xc5\x00p\x00X\x00g\x00\x0b\x00\x10\xff\x0b\xfe\xbf\xfd\x8d\xfe:\x00\xf3\x01\x94\x02\xb1\x01\x1f\x00.\xffU\xff\x9c\xff\xc8\xfe\x11\xfd\xf4\xfb`\xfc\xde\xfdA\xff\xe9\xff&\x00N\x00P\x00J\x00o\x00\xb4\x00\xe4\x00\xc4\x00D\x00\x8a\xff\xd1\xfeN\xfe\x15\xfe\x15\xfet\xfeW\xffm\x00B\x01\xb9\x01\xbd\x01C\x01\xc1\x00g\x00\xaf\xffN\xfe\xbc\xfc\xc7\xfb\xe4\xfb\x05\xfd\xe2\xfe#\x01\x07\x03\xda\x03\xa6\x03\x00\x03=\x02\x14\x01`\xff\xea\xfd\xa8\xfd\x8f\xfe\xb6\xffW\x00|\x00\x94\x00\xc2\x00\x00\x01 \x01\xef\x00\xaa\x00\xc4\x00\x1b\x01\xfc\x00.\x00/\xffu\xfeD\xfe\xa0\xfe\x12\xff?\xffR\xff\xbd\xff\x9a\x00>\x01\x05\x01[\x00H\x00\x0f\x01\xd9\x01\xab\x01Q\x00\x96\xfe\x94\xfd\xaa\xfd,\xfe\x86\xfe\xff\xfe\xfb\xff\xff\x00\xff\x00\xc6\xffl\xfe%\xfe=\xff\xe9\x00\xf0\x01\x90\x01\x00\x00\xfd\xfd=\xfc\x13\xfb\xa4\xfa\x1d\xfb\xb4\xfcY\xff0\x02\r\x04\x89\x04\xf7\x03\x9e\x02\xb9\x00\xa7\xfe\x03\xfd\x81\xfc9\xfdo\xfe_\xff\xf0\xff\x8d\x00\xa1\x01\x03\x03\xd5\x03~\x03G\x02\xda\x00\x9b\xff\x90\xfe\x8e\xfdp\xfc}\xfbd\xfb\xa1\xfc\xe8\xfeI\x01\xda\x02K\x03\xf6\x02\\\x02\xa8\x01\xfd\x00\x80\x00$\x00\xf0\xff\xf7\xff\x04\x00 \x00u\x00\xd6\x00\xf2\x00\xef\x00\x1e\x01N\x012\x01\xe3\x00\xab\x00\x99\x00\x95\x00\x82\x00L\x00\xfc\xff\xb4\xffu\xff(\xff\x1e\xff\xcd\xff\xec\x00\x9a\x01^\x01\x0e\x01\xb7\x01\x04\x03\xad\x03\xec\x02\xfb\x00\xb6\xfe\x0f\xfd\x82\xfc\x04\xfd\x17\xfe>\xff\x11\x00\x8a\x00\xfb\x00q\x01\x18\x01<\xff\xf6\xfc\xba\xfcM\xffU\x02\x08\x03%\x01\xa6\xfe/\xfd\xc7\xfc\x16\xfdN\xfe~\x00\xc6\x02\xfb\x03\xfe\x03\xcd\x03\xeb\x03F\x03\xaa\x00\x10\xfd\xf1\xfa\xa5\xfb~\xfe\x95\x01\x81\x03,\x04\xfa\x03\xfd\x02i\x01\xb9\xffE\xfel\xfd\x92\xfd\x92\xfe\xa7\xff\x00\x00H\xff\xf0\xfd\xcd\xfc\x7f\xfc(\xfdp\xfed\xff\x87\xff\x97\xff#\x00\xc8\x00&\x01\\\x01\x7f\x01_\x01\xf2\x00\x82\x00+\x00\xd7\xff\xdc\xff\xc9\x00`\x02\x8f\x03\xbf\x03:\x03O\x02\x06\x01\x99\xffj\xfe\xb6\xfdi\xfd<\xfd#\xfd\x92\xfd\xd1\xfeA\x00\xc3\x00\xce\xff!\xfe\x17\xfdG\xfdJ\xfe\x96\xff\x02\x01T\x02\xc2\x02\xe0\x01\\\x00!\xff-\xfe+\xfd\xac\xfc\xa1\xfd\xa9\xff\xfe\x00\xc1\x00X\x00\x81\x01i\x034\x03\xd1\xff\x99\xfb\x7f\xf9N\xfa\xbe\xfc\x8c\xff]\x02\xa9\x04h\x05B\x047\x02\x91\x00\xd3\xff|\xff\x1b\xff\x1f\xff\x00\x007\x01\xa9\x01\xf9\x00\xdd\xfff\xff\xea\xff\xb5\x00\x02\x01\x8e\x00\x91\xff\x92\xfe\x0f\xfe\xf3\xfd\xb5\xfdc\xfd\xa2\xfdi\xfe\xef\xfe\xcd\xfe\x93\xfe8\xff\xcd\x00b\x02\x04\x035\x02#\x00\xb0\xfd\x19\xfcn\xfc\x9a\xfe\t\x01\x0f\x02\xa0\x01\xde\x00\x8f\x00\xbd\x00\x06\x01\xe9\x00Y\x00\xb7\xff7\xff%\xff\xea\xffT\x01h\x02Q\x020\x01\x03\x00\x7f\xffc\xff*\xff\xf9\xfeo\xff\x95\x00d\x01\x05\x01\x17\x00\xe5\xff\xee\x00\\\x02\xe6\x02\x0e\x02\x95\x00\x9d\xffz\xff\xf5\xff\x12\x01\xd1\x02\x9d\x04\xb0\x05\xae\x05[\x04\x8f\x01\r\xfe0\xfc\xa4\xfd|\x00\xff\x00\x1a\xfeC\xfac\xf8j\xf9l\xfc\xdd\xffF\x02\xa0\x02\xe3\x00}\xfe\x85\xfdH\xfe\xa8\xfe+\xfd\xc7\xfb\xf7\xfd\xa0\x03\xfc\x07\x02\x07>\x02\xea\xfe\xd8\xff4\x03\xe4\x05\x91\x06X\x05\x89\x02q\xfe\xe3\xf9>\xf6\x88\xf4\xe3\xf4\xde\xf6\xc2\xf9\xd3\xfcw\xff\x1f\x01*\x01l\xff\xe1\xfc(\xfb\x15\xfb*\xfc\x86\xfd\xc1\xfe\xd5\xff\xf0\x00w\x02<\x043\x05y\x04\xbb\x02\xf9\x01G\x03`\x05\xe3\x05\x9d\x03\xd8\xffn\xfd\xe8\xfd\t\x00\x82\x01\x82\x01\xec\x00\x96\x00\xbf\x00\xb8\x01\x90\x03\x9b\x05\xe4\x06\xb8\x06\xf0\x04E\x02\xee\xff\xc4\xfe\xf2\xfe\xf5\xff\xe2\x00\x13\x01\x98\x00\xcd\xff\xf3\xfew\xfe\xc5\xfeZ\xffd\xff\xd8\xfe\x18\xfe\x10\xfdw\xfb\r\xfa\x0e\xfb\xda\xff\x13\x06\x96\x08J\x04\x1d\xfc\xaf\xf6\xea\xf7\xdd\xfd\x16\x04M\x08B\n\x8a\t\xde\x05\'\x005\xfa\xd1\xf5[\xf4u\xf6\x08\xfbX\xff\x7f\x01(\x02\n\x03g\x04\xde\x04u\x03\xc6\x00\xfa\xfd\x9f\xfb\xfc\xf9B\xf9c\xf9c\xfar\xfc,\xffm\x01S\x02\r\x02\xab\x01\xa7\x01\xe3\x00)\xfeH\xfa\xcc\xf7\xc3\xf8\xef\xfc\xf6\x01s\x05\xcb\x06\xe2\x06\x85\x06(\x06g\x06\x9b\x07_\t\xd8\n.\x0b\xbe\t,\x06D\x01\\\xfdn\xfc\xfd\xfd\xc9\xffW\x00\xd1\xff\x1e\xff\xc2\xfe\x9d\xfe^\xfe\xdd\xfd?\xfd\xdb\xfc\xff\xfc\xc2\xfdw\xfe\x1b\xfeo\xfd#\xff\x05\x04V\x08\xd9\x07B\x03D\xff\t\xff\x8c\x012\x04\xe1\x05\x9f\x06\x95\x05d\x01\xaa\xfa\xe4\xf4\xc3\xf3\x0c\xf7X\xfa\xee\xf9\xcd\xf6C\xf5\x1b\xf8\xb1\xfd\xe2\x01\x8f\x02U\x01\\\x00x\xffs\xfd\x89\xfaq\xf8\xb2\xf8\x7f\xfb\xd0\xff\xca\x03\xa2\x05*\x05\xfe\x03\xce\x03\xbc\x047\x05\x85\x03\x80\xff(\xfb\xfa\xf8y\xf9\x01\xfb<\xfcj\xfd\x0b\xff\xca\x00:\x02\xae\x03[\x05\xd4\x06|\x07\x95\x06\x8a\x03,\xff\x88\xfb?\xfa\xa7\xfb\xea\xfeg\x02\x85\x04\xd8\x04\xdb\x03C\x02\xd3\x00&\x00b\x00\x1b\x01\x9d\x01i\x01O\x00Q\xfe!\xfcs\xfb\xe4\xfd\xa2\x02\xfc\x05"\x05\xc0\x01\xcb\xffo\x00\xc8\x01\xfd\x02\xc8\x04l\x06\x9a\x05\xb0\x01\xb1\xfc\xe7\xf8\xfd\xf6\xa2\xf6\xb8\xf7\xf5\xf9\xbc\xfcE\xff\xc2\x00\xe3\x00.\x00N\xff\xa2\xfe-\xfeO\xfd\xfe\xfa\xb3\xf7\x8c\xf6\xe8\xf9\xa4\xff\xbc\x02\xad\x00#\xfc\xdb\xf9\x1b\xfcM\x01\xdb\x05\x12\x07\xce\x04\xd2\x00\x14\xfd{\xfa\x16\xf9T\xf9\xfd\xfb\xb8\x00\x8c\x05\x84\x08a\t6\t\xdc\x08\xaa\x08\xf1\x08z\t\xd3\x08|\x05\x19\x00C\xfb.\xf9\x15\xfa\x85\xfck\xfe\x9f\xfe\xe2\xfd\xc3\xfd\x9c\xfeJ\xff\xec\xfe\xc6\xfd\x86\xfc\xa0\xfb\x04\xfb1\xfav\xf9\xf1\xfaF\x00(\x07\xfb\n\x1d\n\x03\x07\x9f\x04\x06\x04%\x05\xb0\x07G\ns\n\xd5\x06\xad\x00\xe9\xfa\x90\xf7\x91\xf6\x11\xf7\xa2\xf8\xf4\xfat\xfda\xff\xfd\xff\x13\xff\x94\xfd)\xfdM\xfeB\xffI\xfe\xfa\xfb/\xfa\x0c\xfa\x96\xfbV\xfey\x01\xf1\x03!\x05;\x05\xc7\x04-\x04L\x03\xd2\x01\xd5\xff\x86\xfd\xff\xfa\xd3\xf8\xba\xf8\xc2\xfc\xda\x03\xb8\x08/\x07\x85\x01\xc6\xfd\xd9\xfe\xd9\x02\xda\x06y\t\x98\n\x10\nh\x07\xcc\x02\x91\xfd\xe8\xf9r\xf9\xed\xfb\x84\xffG\x02H\x03\xe4\x02\xf8\x01\'\x01\xc4\x00\xf3\x00\x8f\x01\xbc\x01\xf6\xff\xb2\xfb/\xf7z\xf6\t\xfb\xf7\x00\x03\x03\xe0\x00\x07\xff\x99\x00\x13\x04\x9d\x06O\x07\xe9\x05\xfd\x01\xa7\xfc4\xf8\xe5\xf5\x81\xf5\xb0\xf6\x87\xf9x\xfdH\x01\xdf\x03\xb4\x04\xce\x03\xeb\x01Z\x00\x1b\x00\xb3\x00\\\x00\x19\xfe\x1a\xfbq\xf9\xc6\xf9v\xfb\r\xfe"\x01\x8a\x03\x1d\x04\x17\x03\xd0\x01\r\x01m\x00M\xffy\xfd\xb7\xfb_\xfb\xe2\xfcO\xff\x96\x01>\x03\xe9\x03\xfb\x03\xf8\x04=\x07\x7f\x08\x11\x07\xb5\x04\t\x04+\x05\xba\x05s\x03\x9b\xfe(\xfa\xbe\xf9\x14\xfe\xd3\x02\x93\x02K\xfdc\xf8j\xf8\x8f\xfc\xd1\x00\xae\x02\xe6\x02J\x03\x90\x03q\x01b\xfc\x02\xf8\x9f\xf8\xa8\xfdN\x02\xaa\x03B\x03{\x03\xa3\x04\xf6\x05m\x07\xe1\x08\xe4\x08\xd4\x05\xbb\xffy\xf8\x83\xf2\xcd\xefU\xf1b\xf6\xff\xfbJ\xff0\x00|\x00\xc8\x00\xf2\xff\xa8\xfd7\xfb\x8f\xf9\x8c\xf8$\xf8p\xf8,\xf9J\xfaz\xfcQ\x00\xd3\x04\xc9\x07\xd7\x07\xc8\x05\xd7\x03q\x03\xd5\x03+\x03\x9b\x00K\xfdJ\xfb\xa4\xfb\xfe\xfd\x1a\x01m\x034\x04w\x04\x9c\x05\xfe\x06\xc8\x06\xf6\x04a\x03\x0e\x03\x02\x03\xc8\x01\xc5\xff\xab\xfe\xe5\xfe.\xff\xc9\xfe\x00\xfey\xfd\x00\xfe\xe6\xff\xc8\x01}\x01\xf9\xfe\xff\xfc\x0c\xfe\xc4\x00\xc7\x00\x05\xfc\x93\xf6a\xf7u\xff\x00\x07\t\x06-\xfeX\xf8\x14\xfa\xee\x00\xa3\x07\x96\x0b?\x0c\x90\tR\x04\x12\xfe)\xf8\xcb\xf3K\xf2d\xf4U\xf9*\xff\xc0\x03\x95\x05w\x04\xa0\x01\xeb\xfe\xe1\xfd\x8a\xfe>\xffR\xfe\xac\xfbt\xf82\xf6b\xf6\xc1\xf9\xcd\xfe\x14\x02\xe0\x01\xab\x00\xe6\x011\x05\xa2\x06{\x03;\xfd\x1a\xf8>\xf70\xfa\x0b\xfe\x0e\x01e\x03h\x05V\x07\x86\t\x1d\x0b&\n\xb4\x06\xfa\x03`\x04X\x06\\\x064\x03%\xff\x17\xfd\x8f\xfd\xe0\xfe\x80\xffO\xff,\xff\xc5\xff\xb6\x00\xf5\x000\x00N\xffX\xff*\x00s\x003\xffV\xfd\x1e\xfdj\xff*\x02\xc5\x02z\x01\xf9\x00\xf6\x02\xd9\x05\xa7\x06\xd3\x04\xad\x02\t\x02\x06\x02Y\x00t\xfc\xc6\xf8L\xf81\xfb_\xfe\xcf\xfe\xe7\xfc\xf2\xfb\x05\xfe\x97\x01\x9e\x03*\x03\xb6\x01\xab\x00\xf6\xff\xd8\xfe\xf9\xfc\xe0\xfa\xb8\xf9\xa3\xfa\xec\xfd?\x02E\x05\x92\x05\xef\x03I\x02\x9e\x01M\x01\x86\x00z\xff\xac\xfe\xe4\xfd\xc7\xfc\x0e\xfc\x02\xfd\xaf\xffX\x02Y\x03\xee\x02\xbc\x02\xbf\x03]\x05\x0e\x06\x9e\x04J\x01\xd0\xfd\x1d\xfc\xa7\xfc\n\xfe\xa3\xfe\x8b\xfe\x1c\xff\xbf\x00m\x02\xcf\x02r\x01+\xff\xba\xfdr\xfe\xb1\x006\x02?\x01A\xfe\x83\xfb\xef\xfad\xfcF\xfe\xa4\xff\xf2\x00\xa5\x02\x0e\x04"\x04\xf4\x02\x95\x01\xa3\x00\xe0\xff\x06\xff\x1f\xfel\xfdE\xfd\xb0\xfd\x0f\xfe\xb7\xfd\x10\xfdu\xfdo\xff\x83\x01v\x01\x03\xff\xb5\xfc\x07\xfd$\xff\x1a\x00\xb5\xfe\x82\xfc\x9a\xfbu\xfc\n\xfel\xff\x85\x00\x88\x01\x87\x02|\x03\x1d\x04\x10\x04R\x03;\x02\x1e\x01\x11\x00\x07\xffL\xfe\x88\xfe\xbd\xff\xf3\x00\x81\x01\xe0\x01\x91\x027\x03)\x03c\x02M\x01Y\x00\xd3\xff\xba\xffo\xff$\xfe\x1d\xfc\x1e\xfb\x9d\xfc\xaa\xff\xb6\x01\xa9\x01\xd7\x00\xdf\x00\xb5\x01,\x02\x98\x01N\x00\x00\xff1\xfe\x0f\xfeh\xfe\xaa\xfeq\xfeb\xfe\x83\xff\x85\x01\xa5\x02\xc7\x01\xf3\xff\x1b\xff\xdc\xff \x01h\x01\x03\x00\x83\xfdR\xfb|\xfa\xed\xfa\xfb\xfb5\xfd\xb6\xfe\x88\x00\t\x02?\x02\xf5\x000\xff\'\xfe2\xfe\xd7\xfe|\xff\xb3\xffJ\xffU\xfe<\xfd\xc7\xfc\xa1\xfd\xad\xff\xd0\x01\xd6\x02q\x02V\x01\x95\x00\xbe\x00Q\x01\x1b\x01\x88\xff\xab\xfdY\xfd\r\xff8\x01-\x02(\x02\x8d\x02\x97\x030\x04\xdc\x03K\x03\x03\x03\xa0\x02\xa6\x018\x00\xe3\xfe\x0b\xfe\xc8\xfd2\xfe)\xff)\x00\xc5\x00\x10\x01\x1d\x01\xb7\x00\xe1\xff*\xff\x17\xff\x9a\xff#\x00\x1d\x00{\xff\xaf\xfeD\xfeh\xfe\xd7\xfed\xff\x0e\x00\xee\x00\x01\x02\xd3\x02\xe5\x029\x02G\x01`\x00T\xff\x15\xfe\x1f\xfd\x0c\xfd\xf4\xfdA\xffD\x00\xc2\x00\xa7\x00\x0b\x00`\xff>\xff\xd3\xff\xbc\x00.\x01\x9a\x00D\xff\xe2\xfd\xbf\xfc\xdb\xfb\xb5\xfb\x19\xfd\xdf\xff\x9c\x02\xdd\x03\x95\x03\xb3\x02\xca\x01\xcc\x00\xda\xff^\xffk\xff\x98\xff\xaf\xff\n\x00\xad\x00\xf6\x00\xdd\x006\x01@\x02\xfb\x02\x87\x02d\x01\xbc\x00\xbf\x00r\x00F\xff\xfd\xfd\x9e\xfdV\xfet\xff\x15\x008\x00\xa4\x00\xb1\x01\xa1\x02\x9c\x02\xde\x01O\x01h\x01\xab\x01M\x01;\x009\xff\xf7\xfe`\xff\xb5\xffT\xff\xae\xfe\xd3\xfe\x18\x00\xa7\x017\x02&\x01H\xff#\xfe`\xfe3\xffH\xffA\xfe\x0c\xfd\xda\xfc\xbf\xfd\xc5\xfe?\xff\x87\xff\x12\x00\xe4\x00\xb5\x016\x02C\x02\xe4\x01\x14\x01\xe4\xff\x92\xfez\xfd\xfa\xfc>\xfd$\xfeM\xffd\x00<\x01\xb9\x01\xcf\x01\x93\x01\x00\x01;\x00\xa1\xff"\xffZ\xfeG\xfdt\xfc~\xfcu\xfd\xff\xfe\xcc\x00~\x02v\x03c\x03\xa3\x02\xd1\x01 \x01C\x00\'\xffO\xfe\x16\xfe&\xfe\x1a\xfe!\xfe\xab\xfe\xc0\xff\xf9\x00\xc5\x01\xce\x01=\x01\xa3\x00m\x00d\x00\xf7\xff\xff\xfe\x12\xfe\xdd\xfd\x8b\xfe\x94\xff\x03\x00\xaa\xff\x89\xffv\x00\xd1\x01W\x02\xbf\x01\x04\x01\x04\x01\x83\x01\x86\x01x\x00\xc8\xfe\xaf\xfd\xb0\xfd\xff\xfd\xfc\xfd&\xfe)\xff\xd2\x00\x1e\x02/\x02\x1b\x01\xcb\xff0\xff\x82\xff-\x00p\x00\xeb\xff\xc1\xfeg\xfdg\xfc\x16\xfc\x93\xfc\xe2\xfd\xcb\xff\xb9\x01 \x03\xc2\x03\xad\x03\x04\x03\xd0\x01\x13\x00\x16\xfe\xc7\xfc\x0e\xfd\x91\xfe\xf6\xffs\x00\x86\x00#\x01{\x02\xa6\x03\xab\x03\x81\x02\xda\x00a\xff]\xfe\xae\xfd\xf0\xfc\xf2\xfb)\xfb\x89\xfb\x89\xfd]\x00\x8e\x02,\x03y\x02\x80\x01\x03\x01\r\x01\x18\x01\xaf\x00\xe6\xff \xff\xb0\xfe\xa7\xfe\xcc\xfe\xf4\xfeI\xff\xfb\xff\xcf\x00j\x01\xa3\x01{\x01\xe9\x00\x00\x00\t\xffP\xfe\xef\xfd\xf5\xfdR\xfe\xba\xfe\x18\xff\xd5\xff/\x01\xa2\x02;\x03Q\x02v\x00t\xff:\x00\xcf\x01\xb4\x02Y\x02(\x01\xc5\xffw\xfe\'\xfd\xef\xfbs\xfbU\xfcq\xfe\xc0\x006\x02\x80\x02\x06\x02K\x01\x90\x00\xfb\xff\x89\xff\x01\xffo\xfe8\xfee\xfe\xaa\xfe\x03\xff\xe7\xff\x82\x01-\x03\x1f\x043\x04\xa4\x03\x98\x02\xfd\x00\xc9\xfe\x93\xfc[\xfb\xa5\xfb\x14\xfd\xd8\xfe=\x00\x18\x01\xa1\x01\xf3\x01\t\x02\xef\x01\xa4\x010\x01\xaf\x00\x0e\x00(\xff%\xfed\xfd+\xfd}\xfd\x13\xfe\xeb\xfe\'\x01I\x04\xd0\x04I\x02\xf2\xffv\xff\xac\xffb\xff\xf2\xfe\xe4\xfe\x1b\xffL\xff_\xff{\xff\xf6\xff\xd8\x00\xd0\x01I\x02\xf4\x01g\x01@\x01D\x01\x00\x01~\x00$\x00\xf9\xff{\xffJ\xfe\xd2\xfc,\xfc;\xfd\x8f\xff\x8a\x01\x0e\x02\x7f\x01\xd8\x00\xc0\x00L\x01\xcc\x01\x82\x01l\x00\x12\xff\x1c\xfe\x01\xfe}\xfe\xd4\xfe\xf3\xfe\x9d\xff+\x01\xd4\x02\x8b\x03\x1a\x03\xec\x01\x8d\x00\x8d\xff\x01\xff\x8c\xfe\x02\xfe\x9f\xfd\x9a\xfd\x02\xfe\xd2\xfe\xe1\xff\xe6\x00\x9f\x01\x03\x02<\x02s\x02\x80\x02\x08\x02\xb8\x00\xb8\xfe\xe4\xfc9\xfc\xee\xfc[\xfe\xc4\xff\xe3\x00\xac\x01\x0e\x02\xff\x01\xa1\x01>\x01\xe3\x00Z\x00\xa4\xff?\xff~\xff\xc5\xffj\xff\x9f\xfeC\xfe\x10\xff\xf0\x00\xfb\x02\xca\x03\xc3\x02\x0e\x01\x15\x00\xf6\xff#\x00M\x00i\x00=\x00\x9e\xff\xc9\xfe\x18\xfe\xcb\xfd<\xfeS\xfft\x00C\x01\xbc\x01\xe8\x01\xd8\x01\x9c\x01\x1e\x01v\x00\xd9\xffV\xff\xde\xfea\xfe\x17\xfeg\xfev\xff\xc5\x00\x90\x01}\x01\xcd\x00E\x00u\x00\xf5\x00\x07\x01`\x006\xff\xf4\xfd\x10\xfd\xe6\xfc}\xfd\x88\xfe\xc1\xff\xe5\x00\xaf\x01\x1a\x02F\x02#\x02}\x01\x8d\x00\xad\xff\xe4\xfe<\xfe\xdd\xfd\xc2\xfd\xa8\xfd\xa2\xfd\x0e\xfe\xf2\xfe\xdd\xffS\x00D\x00\x1b\x00P\x00\xcb\x00\xed\x00:\x00\xf3\xfe\xc3\xfd&\xfdN\xfdK\xfe\x05\x00\xfc\x01F\x03P\x03|\x02\x92\x01\xea\x00O\x00\x9e\xff\x1e\xff\x0f\xff\x1d\xff\xb2\xfe\xad\xfd\xaa\xfc\x96\xfc\xad\xfd \xff\xf8\xff\x12\x00\xfe\xff?\x00\xcb\x00+\x01\xfc\x00b\x00\xbf\xffO\xff\x1b\xff\xfb\xfe\xcc\xfe\xb4\xfe\t\xff\xee\xff\x1d\x01\r\x02=\x02\x97\x01\x8e\x00\xad\xff%\xff\xc7\xfeT\xfe\xce\xfdu\xfd\x94\xfdJ\xfeY\xff_\x00\xf3\x00\xe6\x00r\x003\x00\x99\x00W\x01\x94\x01\xe7\x00\xb6\xff\xbc\xfem\xfe\x86\xfek\xfe9\xfe}\xfe:\xff\xfc\xffr\x00\xa9\x00\xbc\x00\xca\x00\xdc\x00\xed\x00\xde\x00\x96\x00\x06\x000\xffd\xfeI\xfe>\xff\xc0\x00\xbc\x01\xb9\x01J\x01;\x01\xaf\x01!\x02\x0f\x02K\x01\x13\x00\xee\xfe7\xfe\xe9\xfd\x05\xfe\xa7\xfe\xac\xff\xb9\x00\x96\x01\'\x02`\x02P\x02\x03\x02\x85\x01\xf5\x00b\x00\xc3\xff/\xff\xd7\xfe\xec\xfev\xff6\x00\xc4\x00\xf5\x00\x04\x01G\x01\xa8\x01\xd6\x01\x9b\x01\x03\x01J\x00\xb5\xffO\xff\x1d\xffS\xff\xf1\xff\xab\x00;\x01\xa3\x01\x0f\x02\x90\x02\xf2\x02\xca\x02\xe8\x01\xb1\x00\xd7\xff\xa7\xff\xca\xff\xaf\xff\x19\xffe\xfe7\xfe\xdb\xfe\xec\xff\x9c\x00L\x004\xffc\xfe\xc8\xfe?\x00\xa7\x01\x05\x02q\x01\xa7\x00!\x00\xdb\xff\xb7\xff\xb9\xff\xf4\xffZ\x00\xba\x00\x1f\x01\xb8\x01M\x02E\x02\x8b\x01\xbf\x00E\x00\xd9\xff\x17\xff\xfd\xfd\xf1\xfc\x83\xfc\x10\xfdp\xfe\xe1\xff\x9c\x00\x98\x00}\x00\xcb\x00F\x01B\x01s\x00+\xff\x01\xfe\x80\xfd\xd3\xfd\x9c\xfeR\xff\xa7\xff\xb7\xff\xe5\xffx\x00G\x01\xbe\x01{\x01\xb6\x00\xee\xffI\xff\x8e\xfe\xc2\xfdw\xfd#\xfeq\xff\x8e\x00\x11\x013\x01[\x01\xad\x01\xf5\x01\xe1\x01f\x01\xac\x00\xc7\xff\xd0\xfe$\xfe\x0e\xfeo\xfe\xda\xfe\x06\xff\x06\xff\x19\xff?\xff>\xff\x0c\xff\xf1\xfe\x1f\xfft\xff\xc8\xff\x13\x00E\x00\x15\x00\\\xffw\xfe,\xfe\xe3\xfe\x19\x00\xc2\x00\x81\x00\xfe\xff\x07\x00\xa0\x000\x01]\x01*\x01\xa9\x00\xd9\xff\xb4\xfer\xfd\x97\xfc\x9d\xfc\x92\xfd\xe3\xfe\xe7\xffi\x00\xad\x00\xf8\x00$\x01\xec\x00R\x00\xac\xffK\xff\x19\xff\xcf\xfeL\xfe\xd0\xfd\xc9\xfdn\xfe\x80\xffk\x00\xda\x00\xec\x00\xec\x00\xf7\x00\xe5\x00r\x00\x9d\xff\xac\xfe\n\xfe\x1b\xfe\xfa\xfe9\x000\x01\x8d\x01\x84\x01\x98\x01\x11\x02\xa5\x02\xc5\x025\x02&\x01\xf5\xff\x07\xff\x9e\xfe\xc7\xfeM\xff\xd0\xff\x05\x00\xf3\xff\xdc\xff\xd9\xff\xd1\xff\xb8\xff\xc3\xff4\x00\xf7\x00\x8c\x01\x8b\x01\xf9\x00C\x00\xde\xff\x02\x00\x7f\x00\xe6\x00\x13\x01*\x01a\x01\xc9\x01 \x02\x04\x02h\x01\xba\x00`\x00(\x00\x9a\xff\xbc\xfe\x11\xfe\x0e\xfe\xa9\xfen\xff\xe1\xff\xea\xff\xea\xffJ\x00\xea\x00.\x01\xd0\x008\x00\xda\xff\xb3\xffz\xff\x12\xff\xc2\xfe\xf9\xfe\xbe\xff\x82\x00\xbd\x00\x83\x00Y\x00\x8e\x00\xda\x00\xc9\x00N\x00\xd2\xff\xb4\xff\xe1\xff\x00\x00\xdd\xff\xa3\xff\xa4\xff\t\x00\xaf\x006\x01f\x01^\x01l\x01\xa8\x01\xc5\x01U\x01I\x00\x1a\xffa\xfec\xfe\xeb\xfeu\xff\xa1\xff\x8d\xff\x82\xff\x98\xff\xbf\xff\xe3\xff\r\x00M\x00\x87\x00~\x00\x16\x00\x8b\xffO\xff\xa5\xffK\x00\xb7\x00\xc8\x00\xcc\x00\t\x01p\x01\xb3\x01\x97\x010\x01\xe2\x00\xe1\x00\xd0\x00&\x00\xfe\xfe&\xfe1\xfe\xd5\xfep\xff\xb0\xff\xbf\xff\xe1\xff&\x00`\x00U\x00\xe1\xff9\xff\xca\xfe\xbb\xfe\xcd\xfe\xc9\xfe\xc8\xfe\xfb\xfeV\xff\x90\xffj\xff\x00\xff\xd3\xfe?\xff\xf1\xffC\x00\x08\x00\xb5\xff\xae\xff\xce\xff\xaf\xffJ\xff\x0f\xffm\xffI\x00\xff\x00\n\x01\x95\x00E\x00u\x00\xdb\x00\xee\x00e\x00\x8d\xff\x04\xff"\xff\x9c\xff\xcc\xff~\xff\x1e\xff\r\xffF\xff~\xff\x8a\xff\x81\xff\xa1\xff\x11\x00\xb5\x00<\x01<\x01\x97\x00\xb3\xff:\xffd\xff\xd1\xff(\x00k\x00\xb2\x00\xde\x00\xd5\x00\xad\x00~\x00W\x00F\x00<\x00\x02\x00\x8e\xff6\xffQ\xff\xb4\xff\xe6\xff\xcb\xff\xb0\xff\xde\xffK\x00\xa7\x00\xc0\x00\xa2\x00y\x00^\x00%\x00\xa1\xff\xf2\xfe\x84\xfe\xa8\xfe>\xff\xcf\xff\xf4\xff\xa1\xffK\xff]\xff\xb7\xff\xe9\xff\xbd\xffx\xff\x81\xff\xdc\xff\x17\x00\xf5\xff\xb5\xff\xbd\xff\x19\x00w\x00y\x00\x19\x00\xc6\xff\xfa\xff\xa3\x00%\x01\x12\x01\x9a\x00"\x00\xbf\xff`\xff\x16\xff\x1d\xff|\xff\x00\x00b\x00]\x00\xf3\xff|\xffY\xff\x9b\xff\x0c\x00r\x00\xa5\x00\x9b\x00m\x00J\x00B\x002\x00\x0e\x00\x17\x00n\x00\xca\x00\xe4\x00\xcc\x00\xbb\x00\xcc\x00\xfb\x00(\x01%\x01\xd2\x00L\x00\xdc\xff\xaf\xff\xaf\xff\xa8\xff\x91\xff\x8c\xff\xa8\xff\xdc\xff\x0e\x00\'\x00)\x00!\x00\t\x00\xd8\xff\x82\xff\x1d\xff\xfc\xfeQ\xff\xe1\xffS\x00p\x00A\x00\x0e\x00\x1f\x00]\x00y\x00^\x00I\x00S\x00T\x00#\x00\xdb\xff\xc0\xff\x06\x00\x8b\x00\xd0\x00\x83\x00\xe8\xff\xa3\xff\xe9\xffG\x00E\x00\xf1\xff\xbb\xff\xe1\xff-\x000\x00\xcd\xffY\xffE\xff\xab\xff\x1c\x00\x1f\x00\xab\xff1\xff\x1f\xffx\xff\xf6\xffN\x00[\x00\x12\x00\xac\xff\x86\xff\xbf\xff\x12\x00<\x00G\x00B\x00\x1c\x00\xe4\xff\xd3\xff\xf6\xff/\x00j\x00\x9b\x00\x91\x00>\x00\xf0\xff\xf8\xff&\x00\x19\x00\xd5\xff\xaa\xff\xc0\xff\xf0\xff\x00\x00\xf2\xff\xfd\xff-\x00K\x00\x16\x00\x8f\xff\xf4\xfe\x9b\xfe\xc2\xfeV\xff\x00\x00O\x00\x14\x00\x88\xff\x1f\xff\x18\xffE\xffe\xff\x83\xff\xd4\xffJ\x00\x91\x00\x85\x00e\x00\x80\x00\xbc\x00\xd5\x00\xae\x00d\x00E\x00j\x00\x8e\x00]\x00\xf1\xff\xd8\xffK\x00\xcf\x00\xba\x00\x0b\x00P\xff\x19\xffw\xff\xf4\xff\t\x00\xaf\xffb\xff\x7f\xff\xdc\xff\x15\x00\xfc\xff\xc4\xff\xb9\xff\xf1\xff:\x00V\x00)\x00\xe9\xff\xed\xff+\x00M\x00\x1c\x00\xc8\xff\xb0\xff\xe6\xff7\x00l\x00q\x00L\x00\x07\x00\xd1\xff\xd6\xff\x03\x00\x1f\x00\x1e\x00\x19\x00\x13\x00\xf0\xff\xbc\xff\x9f\xff\xae\xff\xd4\xff\xe2\xff\xba\xffz\xffS\xffj\xff\xb5\xff\n\x00=\x009\x00\x06\x00\xc9\xff\xa0\xff\x91\xff\x9b\xff\xd0\xffC\x00\xc0\x00\xeb\x00\xa1\x00%\x00\xdf\xff\xfc\xffR\x00\x91\x00\x94\x00v\x00T\x00\x1b\x00\xb2\xffO\xffJ\xff\xb3\xff5\x00p\x00^\x00C\x00Q\x00\x81\x00\x9a\x00t\x00 \x00\xde\xff\xe7\xff)\x00Y\x00W\x00R\x00\x88\x00\xdc\x00\xe5\x00m\x00\xb3\xff=\xff\\\xff\xe9\xffc\x00c\x00\x06\x00\xbf\xff\xd4\xff\x10\x00+\x00\x14\x00\xe9\xff\xbf\xff\xb2\xff\xdb\xff*\x00r\x00\x9b\x00\xb1\x00\x9e\x00:\x00\x95\xff\x12\xff\xfe\xfeA\xff\x97\xff\xc6\xff\xca\xff\xc4\xff\xc6\xff\xb5\xff\x8a\xffh\xffw\xff\xb6\xff\xed\xff\xdf\xff\x89\xff0\xff\x14\xff[\xff\xf1\xffy\x00\x9b\x00_\x00+\x00Q\x00\xb3\x00\xf8\x00\xe0\x00\x86\x001\x00\x08\x00\xea\xff\xbd\xff\x9b\xff\xae\xff\xfb\xffE\x00Y\x00?\x00\x0c\x00\xd6\xff\xb4\xff\xb7\xff\xc8\xff\xc6\xff\xb1\xff\x94\xffu\xffh\xff\x83\xff\xd1\xffD\x00\x9d\x00\x99\x00G\x00\x0c\x001\x00\x93\x00\xca\x00\x92\x00\x10\x00\xb9\xff\xd0\xff&\x00b\x00b\x00J\x009\x002\x00\x14\x00\xd4\xff\x9f\xff\xa6\xff\xf3\xffY\x00y\x00\x17\x00y\xff%\xff8\xfft\xff\x9b\xff\xa7\xff\xb6\xff\xd5\xff\xeb\xff\xe3\xff\xc3\xff\xb3\xff\xd5\xff\x0c\x00\xec\xffZ\xff\xd3\xfe\xc2\xfe\x15\xff}\xff\xbb\xff\xcc\xff\xca\xff\xd3\xff\xf0\xff\x1a\x00>\x00i\x00\xa0\x00\xbc\x00\x91\x00\x1d\x00\x86\xff\t\xff\xf3\xfeO\xff\xce\xff+\x00\\\x00\x80\x00\xab\x00\xd5\x00\xdc\x00\x9f\x00:\x00\xf2\xff\xee\xff\xfc\xff\xda\xff\xac\xff\xba\xff\x16\x00\x8b\x00\xcc\x00\xba\x00}\x00Q\x00<\x003\x00 \x00\xf9\xff\xba\xfft\xffJ\xffX\xff\x91\xff\xca\xff\xed\xff\x14\x00V\x00\x9f\x00\xbe\x00\xa7\x00\x8b\x00\x8d\x00\x93\x00j\x00\x1c\x00\xf0\xff\xf6\xff\xfc\xff\xdd\xff\xba\xff\xca\xff\t\x002\x00\r\x00\xab\xff]\xffj\xff\xcd\xff\x1e\x00\xfc\xff\x88\xff0\xffB\xff\x9f\xff\xec\xff\x03\x00\t\x00-\x00p\x00\x9b\x00\x8c\x00m\x00{\x00\xb7\x00\xd8\x00\xa5\x00\x1d\x00\x86\xff3\xff6\xffd\xff\x8e\xff\xb9\xff\xfc\xffK\x00y\x00x\x00`\x00I\x006\x00.\x00/\x00\x11\x00\xcc\xff\x97\xff\xc3\xffM\x00\xd3\x00\xfd\x00\xdb\x00\xbf\x00\xdc\x00\x06\x01\xf0\x00\x91\x009\x00 \x00\x1b\x00\xf4\xff\xc5\xff\xcd\xff\x07\x001\x00&\x00\x15\x001\x00`\x00o\x00U\x00*\x00\x04\x00\xf6\xff\xf0\xff\xd0\xff\x8d\xffN\xff<\xffb\xff\xc5\xffL\x00\xb0\x00\xb0\x00\\\x00 \x00J\x00\xa0\x00\x92\x00\xfe\xfff\xffN\xff\xa0\xff\xf4\xff\x0e\x00\x11\x00*\x00?\x00)\x00\xeb\xff\xa3\xffz\xff\x9a\xff\xfc\xffI\x001\x00\xb3\xff(\xff\xef\xfe\x19\xffT\xffq\xff\xa0\xff\x19\x00\xab\x00\xe8\x00\xaa\x009\x00\xe9\xff\xd5\xff\xd2\xff\xb8\xff\x86\xffU\xffF\xff^\xff\x8a\xff\xb7\xff\xd2\xff\xd8\xff\xcf\xff\xba\xff\xa5\xff\x97\xff\x8c\xff\x92\xff\xa3\xff\xa3\xff\x8e\xff\x80\xff\x89\xff\x99\xff\xa2\xff\xaf\xff\xd8\xff\x18\x00n\x00\xd6\x00\x1e\x01\x0b\x01\xa5\x004\x00\xda\xffk\xff\xe8\xfe\x9d\xfe\xd6\xfeh\xff\xe6\xff\x14\x00\x08\x00\xe8\xff\xcf\xff\xc5\xff\xc4\xff\xb2\xffx\xff/\xff\x0e\xff:\xff\x82\xff\xac\xff\xb8\xff\xda\xff8\x00\xaa\x00\xde\x00\xb4\x00v\x00\x87\x00\xe4\x00#\x01\xee\x00O\x00\xad\xffj\xff\x8c\xff\xbc\xff\xc2\xff\xc1\xff\xf1\xff;\x00S\x000\x00\x03\x00\x04\x00\'\x00=\x003\x00\x03\x00\xb3\xffm\xffc\xff\xb0\xff3\x00\xa8\x00\xeb\x00\x03\x01\xff\x00\xd8\x00\x96\x00Z\x00A\x00@\x00.\x00\xfa\xff\xc1\xff\x9e\xff\x86\xffg\xffU\xffs\xff\xcd\xff7\x00\x80\x00\x9b\x00\x92\x00w\x00P\x00 \x00\xe5\xff\xa6\xff\x87\xff\x9b\xff\xd4\xff\xf8\xff\xf2\xff\x03\x00b\x00\xf6\x00c\x01f\x01\n\x01\x91\x00-\x00\xef\xff\xd0\xff\xb9\xff\xa7\xff\xa8\xff\xca\xff\r\x00d\x00\x9e\x00\x84\x00\x18\x00\xbc\xff\xc2\xff\x1b\x00j\x00_\x00\x0e\x00\xbd\xff\x94\xff\x8a\xff\x8a\xff\x9d\xff\xe1\xffY\x00\xcd\x00\x05\x01\n\x01\x0c\x01\x17\x01\xff\x00\xae\x00E\x00\xf2\xff\xc7\xff\xaf\xff\x99\xff\x8d\xff\xb2\xff\x0e\x00g\x00}\x00D\x00\xf4\xff\xc9\xff\xd1\xff\xf0\xff\xfa\xff\xdc\xff\xa0\xffk\xffk\xff\x9d\xff\xcd\xff\xe3\xff\x05\x00N\x00\x9d\x00\xcb\x00\xd1\x00\xaa\x00P\x00\xd7\xffp\xffA\xffC\xff^\xffo\xffT\xff\x1a\xff\xff\xfe/\xff\x9a\xff\xff\xff$\x00\x0e\x00\xef\xff\xdc\xff\xc7\xff\x9c\xffk\xff\\\xff\x85\xff\xbd\xff\xdc\xff\xda\xff\xd4\xff\xe7\xff\x17\x00U\x00\x86\x00\x95\x00\x80\x00_\x003\x00\xe7\xff\x88\xffB\xff#\xff\x1b\xff7\xff\x93\xff\x06\x00F\x00@\x00\x1a\x00\xee\xff\xc9\xff\xb3\xff\xb3\xff\xc7\xff\xcd\xff\xad\xffh\xff4\xffK\xff\xbd\xffS\x00\xba\x00\xc6\x00\x98\x00x\x00{\x00s\x00A\x00\x07\x00\x01\x007\x00_\x009\x00\xbe\xff/\xff\xf6\xfe?\xff\xca\xff,\x00G\x00;\x005\x006\x00)\x00\xf2\xff\x8a\xff#\xff\n\xffR\xff\xc0\xff\x0e\x006\x00d\x00\xa4\x00\xc3\x00\x97\x00U\x00<\x00<\x00(\x00\xfe\xff\xe8\xff\xfe\xff\x0e\x00\xca\xffB\xff\xe9\xfe\x14\xff\x85\xff\xd6\xff\xf7\xff\x18\x00W\x00\x89\x00v\x00&\x00\xd5\xff\xa7\xff\x94\xff|\xffg\xff\x81\xff\xdb\xffO\x00\xb0\x00\xfb\x00/\x01-\x01\xf1\x00\xaa\x00\x80\x00T\x00\xfb\xff\x88\xff:\xff;\xff\x8a\xff\xfd\xffd\x00\x99\x00\x8e\x00]\x00!\x00\xdf\xff\xa6\xff\x9a\xff\xc6\xff\x11\x00J\x00J\x00\x05\x00\xb4\xff\xa7\xff\xec\xffP\x00\x98\x00\xb5\x00\xc5\x00\xd6\x00\xce\x00\x90\x00A\x00\x1c\x00$\x00+\x00\x05\x00\xb0\xffR\xff-\xffa\xff\xd4\xff?\x00j\x00\\\x00D\x00>\x00<\x00%\x00\xe6\xff\x99\xffa\xffP\xffj\xff\x9f\xff\xe1\xff\x1d\x00C\x00M\x00J\x00F\x00H\x00G\x002\x00\x11\x00\xfa\xff\xea\xff\xbb\xffS\xff\xe2\xfe\xc2\xfe$\xff\xd8\xff{\x00\xc6\x00\xc1\x00\xa6\x00\x97\x00\x88\x00T\x00\xeb\xffi\xff\t\xff\xf8\xfe4\xff\x99\xff\x06\x00d\x00\xac\x00\xe2\x00\n\x01\xfe\x00\x93\x00\x00\x00\xaf\xff\xd1\xff \x001\x00\xea\xff\x90\xffz\xff\xc7\xffB\x00\x97\x00\x98\x00U\x00\x0b\x00\xf2\xff\r\x005\x00<\x00\x14\x00\xdf\xff\xbd\xff\xa6\xff\x8f\xff\x83\xff\x9d\xff\xea\xffY\x00\xc7\x00\x06\x01\x02\x01\xc9\x00~\x00*\x00\xe4\xff\xbb\xff\xa4\xff\x83\xffU\xff<\xffT\xff\x8c\xff\xbc\xff\xc6\xff\xb4\xff\xac\xff\xc9\xff\x12\x00a\x00\x82\x00H\x00\xdd\xff\x7f\xff:\xff\t\xff\x02\xffT\xff\xf4\xff\x9d\x00\x02\x01\r\x01\xdb\x00\x9c\x00e\x00*\x00\xdf\xff\x98\xffo\xff[\xffA\xff.\xffW\xff\xc1\xff-\x00`\x00b\x00b\x00b\x00I\x00\x19\x00\xfc\xff\x07\x00\x17\x00\xfb\xff\xb0\xffq\xffr\xff\xa0\xff\xbf\xff\xc5\xff\xed\xffS\x00\xc2\x00\xed\x00\xc6\x00\x89\x00`\x00/\x00\xd2\xff_\xff\x17\xff!\xffg\xff\xcc\xffE\x00\xb4\x00\xed\x00\xd3\x00\x80\x00)\x00\xf2\xff\xd6\xff\xb1\xff|\xffH\xff&\xff\x17\xff*\xffY\xff\x90\xff\xc1\xff\xed\xff#\x00S\x00m\x00o\x00W\x00\x1d\x00\xd5\xff\xa1\xff~\xff\\\xff2\xff(\xffo\xff\xf7\xff\x81\x00\xc2\x00\xb2\x00}\x00_\x00[\x00I\x00\x01\x00\x91\xff2\xff\t\xff\x17\xffX\xff\xc8\xff@\x00\x8f\x00\xa1\x00\x8e\x00l\x000\x00\xdb\xff\x9d\xff\xb5\xff\x1c\x00y\x00m\x00\xfd\xff\x83\xffT\xff\x87\xff\xe0\xff!\x00H\x00z\x00\xc0\x00\xf8\x00\xfc\x00\xc2\x00c\x00\xfd\xff\xa8\xffj\xffC\xff9\xff[\xff\xb1\xff\'\x00\x96\x00\xe2\x00\xf7\x00\xdd\x00\xac\x00p\x00.\x00\xeb\xff\xa6\xffc\xff2\xff2\xffc\xff\xb7\xff\x11\x00`\x00\x8e\x00\x84\x00W\x00/\x00\x1c\x00\x0c\x00\xe6\xff\xab\xffp\xffG\xff?\xff^\xff\x9f\xff\xf4\xffV\x00\xc1\x00\x1b\x017\x01\x07\x01\xb3\x00n\x00N\x00G\x002\x00\xf4\xff\x9c\xffh\xff\x85\xff\xe2\xffC\x00z\x00x\x00U\x00/\x00\x18\x00\r\x00\n\x00\x11\x00\x15\x00\x00\x00\xbb\xffk\xffW\xff\x96\xff\x02\x00Q\x00f\x00`\x00_\x00c\x00`\x00_\x00f\x00t\x00\x87\x00~\x00$\x00z\xff\xee\xfe\xf0\xfe\x80\xff3\x00\xa9\x00\xd7\x00\xe6\x00\xe0\x00\xae\x00`\x00\x10\x00\xba\xffZ\xff\n\xff\xe5\xfe\xe7\xfe\xfd\xfe\'\xffg\xff\xbc\xff\x07\x00"\x00\x11\x00\x01\x00\n\x00\x12\x00\xf1\xff\xac\xff_\xff3\xff9\xffl\xff\xbb\xff$\x00\xa2\x00\t\x01!\x01\xdd\x00{\x00F\x00N\x00T\x00"\x00\xc0\xffj\xffR\xffy\xff\xb8\xff\xfb\xffD\x00\x7f\x00\x8b\x00q\x00F\x00\x13\x00\xdc\xff\xcc\xff\x08\x00h\x00\x86\x00/\x00\xad\xff\x84\xff\xce\xff-\x00Q\x00?\x000\x00=\x00_\x00z\x00}\x00m\x00c\x00c\x00K\x00\xfd\xff\x9a\xffs\xff\xaa\xff\x14\x00[\x00]\x004\x00\r\x00\x07\x00\x18\x00#\x00\x16\x00\xf8\xff\xc8\xff}\xff\x16\xff\xb8\xfe\x95\xfe\xd2\xfe\\\xff\xeb\xff6\x00-\x00\x02\x00\xe3\xff\xd4\xff\xc5\xff\xb0\xff\x94\xffh\xff2\xff\x12\xff0\xff\x83\xff\xe9\xff6\x00V\x00L\x00+\x00\x17\x00.\x00]\x00h\x00.\x00\xd4\xff\x8d\xfft\xff\x89\xff\xcb\xff4\x00\xa0\x00\xd0\x00\xab\x00\\\x00\x18\x00\xef\xff\xd4\xff\xc5\xff\xd1\xff\xea\xff\xe5\xff\xba\xff\xa1\xff\xbc\xff\xf5\xff$\x00F\x00e\x00p\x00P\x00!\x00\x1d\x00]\x00\xae\x00\xd0\x00\xa7\x00[\x00\x1a\x00\x06\x00\x19\x00.\x00&\x00\x01\x00\xd6\xff\xca\xff\xf6\xff5\x00H\x00\'\x00\x01\x00\xf6\xff\xed\xff\xb3\xffH\xff\xf1\xfe\xf5\xfeL\xff\xb1\xff\xd5\xff\xac\xffx\xff\x86\xff\xd4\xff\x1f\x005\x00\x17\x00\xcd\xffa\xff\xf3\xfe\xc2\xfe\xf6\xfer\xff\xf3\xff\\\x00\xa9\x00\xcc\x00\xbd\x00\x95\x00~\x00q\x00U\x00+\x00\x0f\x00\xfb\xff\xd6\xff\xa8\xff\xa4\xff\xeb\xffT\x00\x89\x00X\x00\xef\xff\x9b\xff\x80\xff\x9d\xff\xd6\xff\x07\x00\x10\x00\xe4\xff\xb0\xff\xb2\xff\xf5\xffO\x00\x92\x00\xb5\x00\xbc\x00\xa9\x00\x8d\x00{\x00|\x00\x87\x00\xa0\x00\xb5\x00\xa5\x00Z\x00\xfd\xff\xde\xff\x16\x00f\x00x\x00<\x00\xee\xff\xce\xff\xda\xff\xf3\xff\x11\x00;\x00]\x00\\\x00*\x00\xdf\xff\xa6\xff\x92\xff\xa1\xff\xbe\xff\xda\xff\xee\xff\xf6\xff\xf2\xff\xec\xff\xec\xff\xf3\xff\x01\x00\x12\x00\x0f\x00\xdb\xff\x86\xff\\\xff\x88\xff\xeb\xffE\x00c\x00?\x00\x08\x00\x00\x008\x00u\x00x\x00H\x00\x18\x00\xfe\xff\xdb\xff\xa7\xff\x91\xff\xc4\xff\'\x00x\x00\x88\x00W\x00\x12\x00\xdd\xff\xcd\xff\xe3\xff\x0f\x000\x00-\x00\x07\x00\xd8\xff\xbd\xff\xbf\xff\xd9\xff\x0b\x00J\x00i\x00=\x00\xe7\xff\xb8\xff\xda\xff%\x00]\x00b\x004\x00\xf2\xff\xca\xff\xda\xff\x10\x00N\x00n\x00[\x00,\x00\x04\x00\xf6\xff\xfb\xff\xfd\xff\xf3\xff\xea\xff\xe3\xff\xd2\xff\xa9\xff\x84\xff\x8d\xff\xc8\xff\x0c\x00\'\x00\r\x00\xd3\xff\x92\xffk\xffu\xff\xb8\xff\x12\x00>\x00\x18\x00\xba\xffm\xffn\xff\xb6\xff\x04\x00)\x00,\x00"\x00\x13\x00\x01\x00\xf1\xff\xe8\xff\xf4\xff\x15\x00A\x00T\x006\x00\xfc\xff\xe1\xff\xfc\xff\'\x00 \x00\xd7\xff\x83\xffc\xff\x85\xff\xb4\xff\xc7\xff\xc1\xff\xc1\xff\xc7\xff\xc3\xff\xa8\xff\x84\xff}\xff\xb6\xff\x1c\x00n\x00t\x00<\x00\x08\x00\x05\x00,\x00_\x00\x89\x00\x9a\x00\x7f\x005\x00\xde\xff\xac\xff\xba\xff\xf1\xff#\x00$\x00\xe6\xff\x91\xff^\xffg\xff\x95\xff\xbb\xff\xd8\xff\xf4\xff\x0b\x00\x0b\x00\xf9\xff\xea\xff\xea\xff\xf7\xff\x13\x009\x00L\x00.\x00\xef\xff\xc8\xff\xe0\xff\x19\x001\x00\xfe\xff\xa7\xff{\xff\xa9\xff\x0b\x00Y\x00o\x00_\x00;\x00\t\x00\xd9\xff\xd3\xff\x0e\x00k\x00\xb3\x00\xc0\x00\x90\x00P\x005\x00Q\x00v\x00t\x00A\x00\n\x00\xe8\xff\xc8\xff\x9e\xff\x80\xff\x8e\xff\xc3\xff\r\x00K\x00Y\x006\x00\x0b\x00\x10\x00B\x00b\x00<\x00\xec\xff\xbc\xff\xd4\xff\x17\x00I\x00U\x00X\x00j\x00o\x00H\x00\xf6\xff\xb2\xff\xa7\xff\xd0\xff\x07\x00\x18\x00\xec\xff\xaf\xff\xa6\xff\xe6\xffA\x00\x80\x00\x8d\x00y\x00K\x00\x01\x00\xaa\xffn\xffk\xff\xa2\xff\xf0\xff\x13\x00\xe2\xff\x81\xffK\xffc\xff\xa0\xff\xc7\xff\xba\xff\x89\xffY\xffS\xffq\xff\xa2\xff\xdb\xff%\x00t\x00\x9b\x00v\x00\x1f\x00\xde\xff\xe4\xff*\x00l\x00g\x00"\x00\xef\xff\x08\x00W\x00\x8d\x00}\x00<\x00\xfd\xff\xce\xff\x9c\xff]\xff<\xffq\xff\xed\xff`\x00\x8b\x00n\x00>\x00)\x001\x00:\x00,\x00\x0c\x00\xed\xff\xe0\xff\xe2\xff\xf1\xff\x13\x00J\x00\x89\x00\xae\x00\xa0\x00d\x00,\x00\x1d\x00$\x00\x11\x00\xcc\xffr\xffI\xff\x7f\xff\xf3\xffU\x00r\x00c\x00\\\x00c\x00J\x00\x01\x00\xad\xff\x87\xff\xa1\xff\xed\xff8\x00F\x00\x0b\x00\xc4\xff\xb7\xff\xf9\xffL\x00\\\x00\x17\x00\xb9\xff{\xff`\xff[\xffs\xff\xbe\xff"\x00[\x00H\x00\x04\x00\xc7\xff\xa7\xff\x9c\xff\xaa\xff\xd5\xff\x0f\x00<\x00I\x00A\x004\x00#\x00\x12\x00\x0b\x00\x03\x00\xe4\xff\xac\xffz\xffs\xff\xa8\xff\x00\x00O\x00o\x00c\x00O\x00M\x00O\x00E\x002\x00#\x00\x1b\x00\x14\x00\x08\x00\x05\x00%\x00b\x00\x98\x00\xa2\x00\x82\x00Y\x00A\x00\x1c\x00\xd7\xff\x92\xffl\xffd\xffy\xff\xa1\xff\xd0\xff\xf9\xff\x15\x007\x00e\x00\x82\x00g\x00&\x00\xf3\xff\xed\xff\x02\x00\x0f\x00\xfa\xff\xcc\xff\xa8\xff\xb7\xff\xfa\xff9\x00?\x00\n\x00\xc6\xff\x95\xff\x80\xffs\xffj\xffm\xff\x99\xff\xf0\xffF\x00e\x00D\x00\x05\x00\xe0\xff\xe8\xff\r\x00-\x00>\x00B\x004\x00\x13\x00\xf3\xff\xe8\xff\xf4\xff\xf8\xff\xd4\xff\x91\xffQ\xff1\xff6\xffZ\xff\x96\xff\xe3\xff\'\x00<\x00"\x00\xf9\xff\xdb\xff\xd5\xff\xe6\xff\x04\x00\x12\x00\x00\x00\xec\xff\xfe\xff/\x00a\x00s\x00c\x00?\x00\x1e\x00\n\x00\xff\xff\xf7\xff\xea\xff\xdd\xff\xd9\xff\xe6\xff\xee\xff\xe6\xff\xd9\xff\xe9\xff!\x00c\x00\x87\x00\x85\x00i\x008\x00\xfb\xff\xc7\xff\xb6\xff\xc0\xff\xc5\xff\xb5\xff\xa8\xff\xc3\xff\xf8\xff\x1a\x00\x17\x00\x04\x00\xf3\xff\xdf\xff\xbd\xff\xa1\xff\xaf\xff\xe0\xff\r\x00 \x00%\x007\x00]\x00}\x00t\x00B\x00\t\x00\xf8\xff\x12\x008\x00N\x00N\x00A\x00.\x00\x15\x00\xf4\xff\xcb\xff\x9e\xff{\xff\x82\xff\xc0\xff\x1d\x00d\x00n\x00D\x00\x18\x00\x13\x00$\x00.\x00\'\x00\x0e\x00\xf1\xff\xdf\xff\xde\xff\xed\xff\n\x00&\x004\x002\x00#\x00\x07\x00\xe6\xff\xcf\xff\xc7\xff\xc6\xff\xbf\xff\xaf\xff\xa9\xff\xb4\xff\xc5\xff\xd8\xff\xf2\xff\x1b\x00G\x00b\x00i\x00c\x00Y\x00O\x00C\x00>\x003\x00\x10\x00\xde\xff\xc1\xff\xcc\xff\xf0\xff\x15\x002\x00<\x001\x00\x03\x00\xaf\xff`\xffN\xff\x84\xff\xd4\xff\x0f\x00$\x00\x1d\x00\x0f\x00\x08\x00\x05\x00\xfe\xff\xfe\xff\x17\x00E\x00q\x00~\x00j\x00C\x00\x12\x00\xe0\xff\xb8\xff\xa9\xff\xbb\xff\xe4\xff\x04\x00\x03\x00\xf2\xff\xf0\xff\x00\x00\x1a\x006\x00T\x00f\x00\\\x00;\x00\x18\x00\xfb\xff\xe8\xff\xe0\xff\xed\xff\x17\x00N\x00j\x00P\x00\x11\x00\xd7\xff\xca\xff\xe6\xff\xfd\xff\xe4\xff\xa9\xff~\xff\x82\xff\xa3\xff\xc0\xff\xc9\xff\xcf\xff\xde\xff\xf7\xff\x19\x004\x006\x00\x1d\x00\xff\xff\xf9\xff\xfd\xff\xe9\xff\xb5\xff\x89\xff\x91\xff\xc5\xff\xfd\xff\r\x00\xfd\xff\xe9\xff\xe1\xff\xda\xff\xcf\xff\xd3\xff\xf3\xff&\x00F\x00;\x00\x11\x00\xeb\xff\xdf\xff\xe8\xff\xf3\xff\xff\xff\x1a\x00>\x00L\x006\x00\x0b\x00\xde\xff\xb6\xff\x9d\xff\x99\xff\xa3\xff\xae\xff\xb4\xff\xaf\xff\xab\xff\xb9\xff\xe2\xff\x1d\x00V\x00{\x00\x8f\x00\x95\x00\x91\x00x\x00A\x00\n\x00\xf7\xff\x12\x00;\x00X\x00Z\x00C\x00 \x00\x06\x00\xfc\xff\x06\x00\x1a\x00#\x00\x10\x00\xe6\xff\xc2\xff\xb7\xff\xc1\xff\xcd\xff\xd7\xff\xef\xff\x1a\x00H\x00i\x00v\x00p\x00^\x00F\x006\x005\x00,\x00\x0b\x00\xe2\xff\xca\xff\xcf\xff\xda\xff\xce\xff\xae\xff\x9b\xff\xa1\xff\xae\xff\xae\xff\xa0\xff\xa2\xff\xc0\xff\xec\xff\xf4\xff\xc6\xff\x8f\xff\x7f\xff\x9d\xff\xce\xff\xf1\xff\x05\x00\x11\x00\x17\x00\x1c\x00.\x00H\x00S\x00?\x00\x1b\x00\xfe\xff\xeb\xff\xd1\xff\xaa\xff\x8b\xff\x8c\xff\xba\xff\xff\xff>\x00]\x00X\x00>\x00#\x00\x06\x00\xdf\xff\xbb\xff\xb5\xff\xd5\xff\x06\x00)\x00+\x00\x18\x00\x0e\x00\'\x00G\x00G\x00\x1d\x00\xf2\xff\xea\xff\xfe\xff\n\x00\x01\x00\xf6\xff\xfc\xff\x15\x000\x007\x00)\x00\x11\x00\x00\x00\x06\x00#\x00P\x00\x81\x00\x9d\x00\x8d\x00P\x00\x07\x00\xd3\xff\xba\xff\xb6\xff\xc3\xff\xe2\xff\t\x00\'\x00)\x00\r\x00\xdd\xff\xb6\xff\xb4\xff\xd4\xff\xf0\xff\xe5\xff\xbd\xff\x9a\xff\x90\xff\x9b\xff\xaf\xff\xbc\xff\xc1\xff\xc5\xff\xca\xff\xd6\xff\xe6\xff\xef\xff\xea\xff\xde\xff\xd7\xff\xd3\xff\xc4\xff\xa6\xff\x8c\xff\x90\xff\xb6\xff\xf7\xffA\x00u\x00\x85\x00\x84\x00\x8c\x00\x9a\x00\x97\x00n\x000\x00\xff\xff\xf4\xff\x03\x00\t\x00\x01\x00\xff\xff\x1b\x00C\x00L\x00&\x00\xed\xff\xd0\xff\xda\xff\xef\xff\xf3\xff\xe9\xff\xe2\xff\xeb\xff\x05\x00"\x001\x000\x001\x00H\x00l\x00~\x00u\x00o\x00w\x00{\x00W\x00\r\x00\xc3\xff\xa5\xff\xbf\xff\xf3\xff\x16\x00\x1d\x00\x15\x00\t\x00\x03\x00\t\x00\x1e\x002\x00=\x00?\x004\x00\x0e\x00\xd4\xff\x9b\xff\x82\xff\x96\xff\xc9\xff\xff\xff \x00&\x00\x1b\x00\x05\x00\xed\xff\xd0\xff\xb3\xff\x9f\xff\x96\xff\x98\xff\xa3\xff\xb8\xff\xda\xff\x01\x00\x1d\x00 \x00\x11\x00\x0e\x00(\x00R\x00r\x00o\x00M\x00$\x00\x0c\x00\xf6\xff\xce\xff\x9c\xff\x8a\xff\xb5\xff\t\x00B\x004\x00\xfb\xff\xdf\xff\xf9\xff&\x000\x00\n\x00\xda\xff\xbd\xff\xbe\xff\xd1\xff\xf2\xff\x1a\x00@\x00]\x00j\x00b\x00E\x00(\x00\x1d\x00\x1e\x00\x14\x00\xf7\xff\xd5\xff\xc7\xff\xd4\xff\xe6\xff\xe9\xff\xdf\xff\xda\xff\xeb\xff\x06\x00\x0b\x00\xed\xff\xd4\xff\xe6\xff\x1a\x00<\x00\x1e\x00\xd2\xff\x95\xff\x90\xff\xb2\xff\xd9\xff\xee\xff\xf5\xff\xfa\xff\x04\x00\x11\x00\x1e\x00\x1b\x00\x04\x00\xe5\xff\xd7\xff\xde\xff\xe2\xff\xd1\xff\xc3\xff\xd1\xff\xfb\xff#\x00.\x00#\x00\x1c\x00*\x00>\x00@\x00%\x00\xfc\xff\xd4\xff\xb6\xff\xa6\xff\xa8\xff\xba\xff\xd9\xff\xf8\xff\x08\x00\xfa\xff\xd2\xff\xb7\xff\xbf\xff\xe5\xff\x08\x00\x14\x00\x0b\x00\xf9\xff\xdb\xff\xb9\xff\xa8\xff\xbd\xff\xfa\xffA\x00k\x00e\x00D\x00/\x007\x00M\x00N\x00)\x00\xf2\xff\xcb\xff\xc7\xff\xd6\xff\xd7\xff\xc5\xff\xc0\xff\xe0\xff\x10\x00\x1e\x00\xfa\xff\xd1\xff\xd4\xff\x04\x00.\x00\x1d\x00\xe1\xff\xb1\xff\xb3\xff\xd4\xff\xec\xff\xeb\xff\xed\xff\x05\x001\x00M\x00A\x00\x17\x00\xf0\xff\xea\xff\n\x000\x005\x00\x11\x00\xec\xff\xe8\xff\x08\x00/\x00<\x003\x001\x00P\x00~\x00\x93\x00\x81\x00\\\x00>\x00.\x00\x1b\x00\xf2\xff\xc6\xff\xb1\xff\xc2\xff\xe4\xff\xfa\xff\xfb\xff\xf7\xff\xff\xff\x03\x00\xf1\xff\xd9\xff\xd8\xff\xe7\xff\xf3\xff\xf0\xff\xf0\xff\x04\x00$\x00;\x009\x00!\x00\x0c\x00\x11\x009\x00h\x00}\x00c\x000\x00\x03\x00\xe9\xff\xd6\xff\xb5\xff\x8f\xff\x8b\xff\xb6\xff\xf2\xff\x08\x00\xf2\xff\xd5\xff\xdc\xff\x00\x00\x17\x00\x01\x00\xc8\xff\x96\xff\x8f\xff\xb3\xff\xda\xff\xe4\xff\xdc\xff\xe6\xff\x08\x00\'\x00#\x00\x0b\x00\xff\xff\x0f\x000\x00G\x00@\x00$\x00\n\x00\x0e\x00+\x00C\x00C\x007\x002\x006\x005\x00%\x00\x1a\x00#\x00;\x00E\x003\x00\x08\x00\xd7\xff\xad\xff\x91\xff\x96\xff\xb5\xff\xe0\xff\x05\x00-\x00M\x00M\x00+\x00\x08\x00\x04\x00\x1c\x00+\x00\x17\x00\xee\xff\xd2\xff\xde\xff\x04\x00#\x001\x00?\x00O\x00K\x00!\x00\xe6\xff\xbd\xff\xb3\xff\xba\xff\xc7\xff\xd3\xff\xda\xff\xdb\xff\xcd\xff\xb3\xff\x9f\xff\xa0\xff\xb7\xff\xd6\xff\xef\xff\xf7\xff\xf2\xff\xe9\xff\xe8\xff\xec\xff\xde\xff\xbc\xff\x93\xff\x81\xff\x9a\xff\xd8\xff\x11\x00\x1d\x00\x01\x00\xef\xff\xfe\xff\x1a\x00%\x00\x15\x00\xf7\xff\xe6\xff\xf2\xff\x16\x001\x00&\x00\x00\x00\xdd\xff\xd8\xff\xf2\xff\x18\x00J\x00w\x00\x8a\x00r\x00=\x00\r\x00\xee\xff\xd5\xff\xbd\xff\xaf\xff\xbf\xff\xe8\xff\x17\x004\x00>\x004\x00\x19\x00\x02\x00\x00\x00\x1b\x002\x00"\x00\xed\xff\xc0\xff\xc2\xff\xee\xff!\x00D\x00X\x00a\x00W\x008\x00\x0f\x00\xf7\xff\xf3\xff\xf6\xff\xf5\xff\xf9\xff\x02\x00\x00\x00\xec\xff\xd0\xff\xbf\xff\xbb\xff\xba\xff\xb3\xff\xa6\xff\x9a\xff\x9e\xff\xb9\xff\xdb\xff\xf1\xff\xf8\xff\xf5\xff\xe4\xff\xc9\xff\xb1\xff\xab\xff\xb0\xff\xb6\xff\xcb\xff\xfb\xff6\x00\\\x00b\x00_\x00d\x00i\x00Z\x005\x00\r\x00\xf4\xff\xe7\xff\xe9\xff\xfd\xff\x16\x00%\x00\'\x00&\x00+\x00(\x00\x14\x00\xf9\xff\xe3\xff\xd9\xff\xd5\xff\xd0\xff\xca\xff\xc7\xff\xce\xff\xe2\xff\t\x00<\x00f\x00y\x00t\x00e\x00S\x007\x00\x10\x00\xec\xff\xe3\xff\xfd\xff,\x00X\x00k\x00_\x00<\x00\n\x00\xe3\xff\xd8\xff\xe6\xff\xf5\xff\xfb\xff\xfa\xff\xf7\xff\xf6\xff\xee\xff\xdb\xff\xca\xff\xcc\xff\xe0\xff\xf2\xff\xf3\xff\xe8\xff\xdd\xff\xda\xff\xdf\xff\xe9\xff\xf4\xff\xfd\xff\xfc\xff\xee\xff\xdb\xff\xd1\xff\xd2\xff\xd3\xff\xce\xff\xc1\xff\xb9\xff\xc6\xff\x01\x00\\\x00\x8c\x00u\x00=\x00\x0f\x00\xfb\xff\xf5\xff\xed\xff\xde\xff\xd4\xff\xdd\xff\xf7\xff\x19\x00;\x00I\x00@\x00(\x00\x17\x00\x1c\x00"\x00\n\x00\xd3\xff\xa4\xff\xa0\xff\xc0\xff\xe9\xff\x0c\x00(\x00;\x00:\x00/\x00+\x000\x00&\x00\x06\x00\xe2\xff\xd8\xff\xe9\xff\xfb\xff\xfe\xff\xf6\xff\xf3\xff\xfb\xff\x06\x00\x10\x00\x18\x00!\x00\'\x00\x18\x00\xf9\xff\xe0\xff\xd5\xff\xd7\xff\xdf\xff\xec\xff\x00\x00\x13\x00\x12\x00\xfa\xff\xe4\xff\xe9\xff\xfb\xff\xfd\xff\xe8\xff\xd9\xff\xe9\xff\t\x00\x0f\x00\xea\xff\xb7\xff\xa1\xff\xb6\xff\xe0\xff\xfb\xff\xfa\xff\xee\xff\xf1\xff\x10\x00=\x00[\x00O\x00(\x00\x07\x00\x00\x00\x05\x00\x01\x00\xf5\xff\xea\xff\xe7\xff\xec\xff\xf2\xff\xf8\xff\x03\x00\x11\x00\x15\x00\x07\x00\xf0\xff\xda\xff\xcc\xff\xc4\xff\xc0\xff\xc0\xff\xc8\xff\xe4\xff\x1b\x00R\x00e\x00L\x00)\x00\x1c\x005\x00Z\x00g\x00M\x00!\x00\xfb\xff\xf2\xff\x01\x00\x12\x00\x0b\x00\xee\xff\xd0\xff\xc9\xff\xd6\xff\xe2\xff\xe5\xff\xdb\xff\xce\xff\xca\xff\xdf\xff\xf7\xff\xef\xff\xc5\xff\xa1\xff\xa5\xff\xc9\xff\xed\xff\x06\x00\x1d\x001\x008\x00.\x00\x1e\x00\x18\x00\x12\x00\x00\x00\xe5\xff\xda\xff\xeb\xff\x0b\x00\x1f\x00\x16\x00\xf9\xff\xea\xff\x02\x00)\x00A\x00@\x003\x00&\x00\x1b\x00\x03\x00\xd9\xff\xb8\xff\xb7\xff\xd2\xff\xfb\xff\x1b\x00(\x00\x1f\x00\x0c\x00\xf8\xff\xec\xff\xe7\xff\xe3\xff\xe6\xff\xf1\xff\x03\x00\x05\x00\xf8\xff\xe8\xff\xf0\xff\x10\x000\x008\x00&\x00\x18\x00#\x00=\x00K\x00=\x00 \x00\x0b\x00\x04\x00\x04\x00\x03\x00\x00\x00\xf8\xff\xed\xff\xe5\xff\xe2\xff\xe2\xff\xe6\xff\xef\xff\x04\x00\x1a\x00.\x005\x00$\x00\xf3\xff\xb8\xff\x9b\xff\xab\xff\xd3\xff\xf5\xff\x06\x00\x0b\x00\x03\x00\xfe\xff\x02\x00\r\x00\x16\x00\x18\x00\x08\x00\xe0\xff\xb4\xff\xa5\xff\xb8\xff\xd1\xff\xda\xff\xe3\xff\t\x00G\x00v\x00|\x00`\x00;\x00\x1c\x00\xff\xff\xe3\xff\xd4\xff\xdd\xff\xff\xff"\x000\x00\x1f\x00\xff\xff\xec\xff\xeb\xff\xee\xff\xe8\xff\xdf\xff\xdb\xff\xdb\xff\xdd\xff\xdc\xff\xd1\xff\xc0\xff\xbe\xff\xe0\xff\x1d\x00J\x00J\x00,\x00\x10\x00\x15\x003\x00G\x00=\x00*\x00\x1f\x00\x1f\x00 \x00\x1e\x00\x14\x00\xf8\xff\xd1\xff\xad\xff\xa2\xff\xb7\xff\xdd\xff\xf7\xff\xfb\xff\xf9\xff\r\x00*\x00)\x00\xf7\xff\xb9\xff\xa5\xff\xc4\xff\xf3\xff\x13\x00\x1d\x00\x15\x00\x0c\x00\x14\x001\x00S\x00^\x00D\x00\x11\x00\xd7\xff\xab\xff\x9b\xff\xa5\xff\xbc\xff\xdb\xff\x03\x00,\x00D\x00J\x00=\x00\'\x00\x0f\x00\x04\x00\x02\x00\xf7\xff\xe7\xff\xe2\xff\xef\xff\xff\xff\xfc\xff\xef\xff\xf2\xff\t\x00\x1b\x00\x12\x00\xf9\xff\xe9\xff\xea\xff\xed\xff\xe7\xff\xe2\xff\xea\xff\xff\xff\x10\x00\x15\x00\x1d\x00.\x00A\x00D\x008\x003\x00=\x00@\x00/\x00\x14\x00\x05\x00\n\x00\x12\x00\x07\x00\xf1\xff\xd8\xff\xbe\xff\xa5\xff\x9e\xff\xb3\xff\xdc\xff\xfc\xff\x05\x00\x05\x00\x18\x003\x00/\x00\x04\x00\xd8\xff\xcf\xff\xe1\xff\xef\xff\xf2\xff\xf9\xff\x0f\x00(\x00;\x00C\x00?\x00/\x00\x10\x00\xe8\xff\xc8\xff\xbc\xff\xc5\xff\xd3\xff\xdc\xff\xe6\xff\xf7\xff\n\x00\x15\x00\x19\x00\x1a\x00\x16\x00\x10\x00\x0c\x00\x10\x00\x11\x00\x07\x00\xeb\xff\xcb\xff\xb5\xff\xb5\xff\xca\xff\xe6\xff\xf6\xff\xf7\xff\xf2\xff\xf1\xff\xf4\xff\xf8\xff\xfd\xff\xfc\xff\xf1\xff\xe3\xff\xd8\xff\xd9\xff\xe2\xff\xfb\xff\x1f\x00:\x00C\x00>\x005\x004\x007\x009\x002\x00&\x00\x1c\x00\x16\x00\x0b\x00\xe9\xff\xb5\xff\x8d\xff\x96\xff\xca\xff\x04\x00 \x00\x17\x00\x08\x00\x11\x000\x00;\x00\x1a\x00\xe4\xff\xc5\xff\xc8\xff\xdd\xff\xee\xff\xfd\xff\x15\x00,\x004\x002\x00.\x00/\x00-\x00 \x00\x06\x00\xec\xff\xd7\xff\xca\xff\xbf\xff\xb8\xff\xc0\xff\xdd\xff\x05\x00(\x00:\x00?\x008\x00-\x00"\x00\x1b\x00\x10\x00\x01\x00\xeb\xff\xd5\xff\xcd\xff\xd2\xff\xd4\xff\xd1\xff\xd0\xff\xdb\xff\xef\xff\xfa\xff\xf9\xff\xf0\xff\xec\xff\xec\xff\xe6\xff\xd5\xff\xbe\xff\xb4\xff\xc9\xff\xf4\xff\x1f\x005\x009\x00@\x00R\x00d\x00j\x00\\\x00D\x00,\x00\x12\x00\xf7\xff\xda\xff\xbe\xff\xa3\xff\x9a\xff\xac\xff\xd6\xff\x00\x00\r\x00\xff\xff\xf6\xff\x06\x00\x1e\x00 \x00\x08\x00\xf2\xff\xf0\xff\xf5\xff\xf1\xff\xed\xff\x03\x00.\x00R\x00[\x00S\x00Q\x00S\x00K\x000\x00\n\x00\xe9\xff\xca\xff\xae\xff\x9a\xff\x97\xff\xb5\xff\xee\xff+\x00H\x00<\x00\x1e\x00\n\x00\x0b\x00\x11\x00\x11\x00\x06\x00\xfc\xff\xf4\xff\xeb\xff\xe0\xff\xd8\xff\xd9\xff\xe5\xff\xf4\xff\xfc\xff\xfc\xff\xf7\xff\xf6\xff\xfd\xff\x05\x00\x06\x00\xfc\xff\xe9\xff\xd6\xff\xd0\xff\xdc\xff\xf2\xff\xff\xff\x00\x00\x07\x00$\x00I\x00_\x00W\x00?\x00$\x00\x08\x00\xec\xff\xd4\xff\xc2\xff\xb4\xff\xab\xff\xb1\xff\xcf\xff\xf4\xff\x0e\x00\x12\x00\x14\x00+\x00M\x00Y\x00=\x00\n\x00\xe7\xff\xdd\xff\xe0\xff\xe0\xff\xdf\xff\xeb\xff\x07\x00$\x00;\x00G\x00H\x00@\x00.\x00\x14\x00\xf4\xff\xd6\xff\xbb\xff\xa8\xff\xa3\xff\xb7\xff\xe7\xff!\x00M\x00U\x00G\x00:\x006\x00/\x00\x17\x00\xfb\xff\xf0\xff\xf2\xff\xf1\xff\xe2\xff\xcd\xff\xc3\xff\xc7\xff\xd4\xff\xe0\xff\xe5\xff\xe4\xff\xe4\xff\xea\xff\xf6\xff\xff\xff\xff\xff\xf8\xff\xeb\xff\xde\xff\xd6\xff\xdf\xff\xf5\xff\x0c\x00\x1f\x003\x00F\x00O\x00K\x00C\x00C\x00C\x003\x00\x0e\x00\xe6\xff\xc8\xff\xb5\xff\xa9\xff\xa9\xff\xb8\xff\xd1\xff\xeb\xff\xff\xff\x15\x000\x00E\x00=\x00\x17\x00\xeb\xff\xd9\xff\xdd\xff\xe1\xff\xe0\xff\xe8\xff\t\x005\x00U\x00^\x00\\\x00Z\x00V\x00E\x00+\x00\x08\x00\xde\xff\xb2\xff\x8f\xff\x89\xff\xa6\xff\xd4\xff\xfb\xff\x0e\x00\x11\x00\x11\x00\x1b\x00\'\x00/\x00#\x00\x04\x00\xde\xff\xc1\xff\xb5\xff\xbc\xff\xd2\xff\xee\xff\x04\x00\r\x00\x11\x00\x12\x00\x0e\x00\n\x00\x02\x00\xf7\xff\xea\xff\xe3\xff\xde\xff\xd6\xff\xc5\xff\xba\xff\xc4\xff\xde\xff\xf5\xff\x01\x00\x0b\x00\x1a\x000\x00E\x00M\x00A\x00$\x00\x02\x00\xe4\xff\xca\xff\xb8\xff\xb1\xff\xba\xff\xd5\xff\xfa\xff\x19\x00$\x00%\x000\x00E\x00O\x00?\x00\x16\x00\xf3\xff\xe5\xff\xea\xff\xed\xff\xf0\xff\xf9\xff\x05\x00\n\x00\x06\x00\x0b\x00 \x00<\x00I\x00=\x00!\x00\xfc\xff\xd4\xff\xb0\xff\x9b\xff\x9e\xff\xba\xff\xe3\xff\x0f\x00,\x006\x006\x00@\x00R\x00T\x007\x00\x06\x00\xd9\xff\xba\xff\xac\xff\xae\xff\xbb\xff\xcd\xff\xe0\xff\xf0\xff\xf8\xff\xf8\xff\xf7\xff\xfd\xff\x0c\x00\x16\x00\r\x00\xf4\xff\xd5\xff\xbf\xff\xbd\xff\xd1\xff\xf7\xff\x15\x00\x1e\x00\x1c\x00$\x00=\x00Y\x00_\x00K\x00\'\x00\x04\x00\xf1\xff\xe6\xff\xd6\xff\xc5\xff\xc4\xff\xe0\xff\n\x00%\x00%\x00\x1b\x00\x1f\x009\x00V\x00W\x007\x00\x0e\x00\xf5\xff\xf4\xff\xfa\xff\x00\x00\x04\x00\r\x00\x1d\x00*\x00(\x00\x1e\x00\x1b\x00(\x00=\x00G\x00@\x00 \x00\xee\xff\xbd\xff\xa7\xff\xb7\xff\xde\xff\x00\x00\n\x00\xfd\xff\xf3\xff\xfe\xff\x1b\x003\x00+\x00\x07\x00\xdf\xff\xc8\xff\xbc\xff\xaf\xff\x9e\xff\x97\xff\xa5\xff\xc8\xff\xf0\xff\x0b\x00\x12\x00\x15\x00\x1d\x00%\x00\x17\x00\xfb\xff\xde\xff\xc8\xff\xbf\xff\xc7\xff\xe4\xff\x06\x00\x0f\x00\xfd\xff\xeb\xff\xed\xff\x0b\x007\x00S\x00K\x00)\x00\x02\x00\xdd\xff\xbf\xff\xad\xff\xb0\xff\xcb\xff\xf6\xff\x1d\x00*\x00\x1d\x00\x0e\x00\x18\x00:\x00Y\x00Z\x009\x00\x13\x00\xfa\xff\xef\xff\xea\xff\xf1\xff\x08\x00(\x005\x00\'\x00\x0f\x00\x08\x00\x18\x00,\x002\x00+\x00\x1c\x00\x05\x00\xe5\xff\xc9\xff\xbd\xff\xc9\xff\xe5\xff\xff\xff\x0c\x00\r\x00\x07\x00\x07\x00\x10\x00\x1c\x00\x1e\x00\x13\x00\xfc\xff\xda\xff\xb7\xff\xa4\xff\xab\xff\xbf\xff\xd2\xff\xdd\xff\xe0\xff\xdf\xff\xe6\xff\xfa\xff\x0c\x00\x06\x00\xea\xff\xcf\xff\xc9\xff\xd2\xff\xda\xff\xdb\xff\xdf\xff\xe7\xff\xee\xff\xed\xff\xef\xff\xf8\xff\x0e\x00,\x00C\x00F\x00.\x00\x06\x00\xdb\xff\xbb\xff\xb1\xff\xc1\xff\xe8\xff\x0c\x00\x1a\x00\x13\x00\n\x00\x16\x00<\x00f\x00w\x00^\x00/\x00\x06\x00\xf0\xff\xe1\xff\xda\xff\xe6\xff\x08\x00/\x00:\x00)\x00\x13\x00\r\x00\x15\x00!\x00(\x00(\x00\x1b\x00\xfe\xff\xdd\xff\xcc\xff\xd7\xff\xf7\xff\x13\x00\x1e\x00\x19\x00\x15\x00\x1d\x004\x00?\x001\x00\n\x00\xe3\xff\xcc\xff\xc2\xff\xb9\xff\xb4\xff\xbb\xff\xce\xff\xe8\xff\xfd\xff\x07\x00\x01\x00\xf2\xff\xea\xff\xed\xff\xf1\xff\xf3\xff\xf0\xff\xea\xff\xe7\xff\xed\xff\xfb\xff\t\x00\n\x00\xf8\xff\xdf\xff\xca\xff\xcb\xff\xec\xff\x1b\x008\x00.\x00\x08\x00\xe0\xff\xc7\xff\xc1\xff\xc6\xff\xcf\xff\xde\xff\xf3\xff\x0c\x00#\x004\x00@\x00I\x00N\x00N\x00E\x000\x00\x14\x00\xf7\xff\xe6\xff\xee\xff\x0b\x00%\x00,\x00 \x00\x19\x00!\x001\x009\x00.\x00\x1d\x00\x14\x00\x0f\x00\xff\xff\xe1\xff\xcd\xff\xd4\xff\xf6\xff\x16\x00"\x00\x1c\x00\x19\x00!\x00*\x00(\x00\x0f\x00\xee\xff\xd5\xff\xce\xff\xcf\xff\xd4\xff\xde\xff\xee\xff\t\x00\x1e\x00#\x00\x17\x00\x0c\x00\x0f\x00\x1a\x00\x13\x00\xf6\xff\xd2\xff\xbf\xff\xc7\xff\xdb\xff\xec\xff\xfa\xff\x05\x00\x05\x00\xf9\xff\xe7\xff\xdc\xff\xd7\xff\xd9\xff\xe2\xff\xed\xff\xf3\xff\xf4\xff\xf1\xff\xef\xff\xf2\xff\xf7\xff\xf9\xff\xf9\xff\x00\x00\x0c\x00\x12\x00\t\x00\x03\x00\x0e\x00$\x00+\x00\x1f\x00\x0b\x00\xfc\xff\xf2\xff\xe9\xff\xe0\xff\xd6\xff\xda\xff\xf0\xff\x0f\x00-\x00;\x002\x00\x1c\x00\x0b\x00\x08\x00\x0c\x00\x08\x00\xf5\xff\xe2\xff\xe8\xff\t\x00\'\x00,\x00\x1c\x00\x0f\x00\x12\x00"\x00/\x00#\x00\x04\x00\xe5\xff\xd8\xff\xd4\xff\xcc\xff\xc1\xff\xc5\xff\xd9\xff\xf3\xff\x07\x00\x12\x00\x19\x00!\x00"\x00\x15\x00\xfb\xff\xe0\xff\xd3\xff\xd3\xff\xdc\xff\xf0\xff\x0f\x00+\x008\x001\x00\x1c\x00\t\x00\xfb\xff\xf1\xff\xed\xff\xeb\xff\xe6\xff\xe3\xff\xe0\xff\xe3\xff\xe7\xff\xeb\xff\xea\xff\xea\xff\xf6\xff\x0e\x00\x1c\x00\x16\x00\x03\x00\xf6\xff\xf9\xff\x08\x00\x18\x00!\x00#\x00\x1f\x00\x11\x00\xfa\xff\xe2\xff\xd6\xff\xdd\xff\xf3\xff\t\x00\x13\x00\x17\x00\x19\x00\x17\x00\x0e\x00\xfd\xff\xeb\xff\xdc\xff\xd6\xff\xd7\xff\xdf\xff\xe8\xff\xeb\xff\xf1\xff\x05\x00&\x00>\x00?\x00*\x00\x0e\x00\xf7\xff\xe5\xff\xd0\xff\xb9\xff\xae\xff\xc1\xff\xef\xff\x1e\x005\x005\x002\x006\x00>\x009\x00#\x00\x04\x00\xeb\xff\xe0\xff\xe4\xff\xf1\xff\xfc\xff\x06\x00\x12\x00\x1a\x00\x18\x00\x0b\x00\xfe\xff\xf5\xff\xef\xff\xe4\xff\xd5\xff\xcd\xff\xd7\xff\xef\xff\t\x00\x14\x00\x14\x00\x14\x00\x18\x00!\x00)\x00.\x002\x002\x001\x000\x00)\x00\x1c\x00\x0b\x00\xfa\xff\xec\xff\xe3\xff\xe2\xff\xe5\xff\xe9\xff\xeb\xff\xf3\xff\xfb\xff\x03\x00\x07\x00\x02\x00\xfc\xff\xfb\xff\x01\x00\x06\x00\x00\x00\xf2\xff\xe7\xff\xe6\xff\xef\xff\xfd\xff\x0c\x00\x16\x00\x19\x00\x16\x00\x16\x00\x14\x00\x00\x00\xd9\xff\xb1\xff\x98\xff\x98\xff\xad\xff\xcf\xff\xf0\xff\x02\x00\n\x00\x15\x00 \x00$\x00\x1d\x00\x11\x00\x05\x00\xfd\xff\xf5\xff\xeb\xff\xe3\xff\xe6\xff\xfb\xff\x16\x00)\x002\x00,\x00\x1b\x00\n\x00\x02\x00\xfe\xff\xf7\xff\xec\xff\xe3\xff\xe6\xff\xf1\xff\x00\x00\t\x00\x06\x00\xff\xff\x05\x00\x13\x00\x1f\x00!\x00\x1c\x00\x1a\x00\x1b\x00\x17\x00\x06\x00\xf5\xff\xef\xff\xf5\xff\x01\x00\n\x00\x0e\x00\n\x00\xfb\xff\xea\xff\xeb\xff\x04\x00%\x006\x001\x00!\x00\x0f\x00\xfb\xff\xe4\xff\xd5\xff\xdb\xff\xf4\xff\n\x00\r\x00\x07\x00\t\x00\x15\x00!\x00\x1c\x00\n\x00\xf0\xff\xdd\xff\xd7\xff\xd7\xff\xd4\xff\xcb\xff\xc9\xff\xd4\xff\xec\xff\x03\x00\x0c\x00\x0e\x00\r\x00\r\x00\x0e\x00\r\x00\t\x00\x00\x00\xf1\xff\xde\xff\xd4\xff\xde\xff\xf6\xff\x0c\x00\x10\x00\x07\x00\xff\xff\x02\x00\r\x00\x12\x00\x08\x00\xf2\xff\xe0\xff\xde\xff\xe6\xff\xee\xff\xf7\xff\x01\x00\x0f\x00\x1f\x00/\x00B\x00I\x00>\x00%\x00\x16\x00\x1b\x00\'\x00(\x00\x11\x00\xf9\xff\xef\xff\xf6\xff\xf9\xff\xee\xff\xdc\xff\xd3\xff\xda\xff\xee\xff\x07\x00\x17\x00\x18\x00\x05\x00\xf2\xff\xe7\xff\xe6\xff\xe7\xff\xee\xff\x02\x00\x1b\x00$\x00\x1c\x00\x0f\x00\x10\x00$\x009\x00>\x000\x00\x12\x00\xf2\xff\xd6\xff\xbe\xff\xaf\xff\xb7\xff\xd8\xff\x00\x00\x18\x00\x12\x00\xfd\xff\xf3\xff\xfb\xff\x06\x00\x08\x00\xff\xff\xf5\xff\xf2\xff\xf3\xff\xf3\xff\xef\xff\xea\xff\xe6\xff\xe8\xff\xf1\xff\xfd\xff\x03\x00\x04\x00\x05\x00\xff\xff\xf1\xff\xe4\xff\xde\xff\xdf\xff\xdd\xff\xdb\xff\xe0\xff\xf0\xff\x03\x00\x0f\x00\x15\x00\x18\x00\x1c\x00\x1f\x00$\x00*\x00*\x00\x18\x00\xfa\xff\xe0\xff\xdb\xff\xe3\xff\xea\xff\xea\xff\xe9\xff\xef\xff\xfc\xff\x0c\x00\x1c\x00\'\x00"\x00\x11\x00\x01\x00\xfe\xff\x03\x00\x04\x00\xff\xff\x00\x00\n\x00\x12\x00\x0f\x00\x0b\x00\x12\x00!\x00,\x00,\x00$\x00\x13\x00\xfb\xff\xe0\xff\xc8\xff\xba\xff\xc2\xff\xe1\xff\x0b\x00!\x00\x18\x00\x05\x00\x07\x00"\x00>\x00@\x00,\x00\x17\x00\x12\x00\x0e\x00\xff\xff\xe8\xff\xd9\xff\xe2\xff\xfb\xff\x15\x00\x1d\x00\x12\x00\xfe\xff\xed\xff\xe8\xff\xea\xff\xe9\xff\xe2\xff\xd7\xff\xd5\xff\xde\xff\xed\xff\xf8\xff\xfb\xff\xf8\xff\xf9\xff\x06\x00\x16\x00\x1d\x00\x1d\x00 \x00#\x00\x19\x00\x05\x00\xee\xff\xe1\xff\xdd\xff\xdc\xff\xda\xff\xdc\xff\xe0\xff\xe6\xff\xe7\xff\xf1\xff\t\x00 \x00!\x00\r\x00\xf2\xff\xe1\xff\xd9\xff\xd6\xff\xe0\xff\xf4\xff\t\x00\x18\x00#\x00.\x006\x007\x000\x00)\x00!\x00\x13\x00\xff\xff\xe9\xff\xdb\xff\xdb\xff\xf0\xff\x0c\x00\x1c\x00\x16\x00\x08\x00\x08\x00\x16\x00%\x00#\x00\x17\x00\x0b\x00\t\x00\n\x00\x02\x00\xf2\xff\xe9\xff\xf1\xff\x03\x00\x0e\x00\n\x00\x00\x00\xff\xff\x07\x00\x11\x00\x11\x00\x01\x00\xf0\xff\xed\xff\xf1\xff\xe7\xff\xd1\xff\xc3\xff\xd1\xff\xf3\xff\x10\x00\x1b\x00\x1c\x00\x1a\x00\x15\x00\x0f\x00\x0b\x00\x03\x00\xf7\xff\xe8\xff\xe1\xff\xe9\xff\xf5\xff\xf7\xff\xee\xff\xe0\xff\xda\xff\xd8\xff\xd9\xff\xe9\xff\x04\x00\x1c\x00\x1e\x00\x0b\x00\xf4\xff\xe5\xff\xdb\xff\xd5\xff\xd5\xff\xde\xff\xf0\xff\x06\x00\x16\x00\x1c\x00\x1e\x00!\x00)\x00*\x00\x1b\x00\x01\x00\xe4\xff\xcf\xff\xc7\xff\xd1\xff\xe9\xff\x04\x00\x16\x00\x1b\x00\x1b\x00$\x001\x003\x00(\x00\x1c\x00\x1b\x00\x1e\x00\x18\x00\x06\x00\xf3\xff\xf1\xff\xfa\xff\x00\x00\x03\x00\t\x00\x14\x00\x18\x00\x13\x00\t\x00\x01\x00\xf9\xff\xf0\xff\xe4\xff\xd9\xff\xd1\xff\xd4\xff\xe9\xff\t\x00\x1c\x00\x19\x00\x11\x00\x12\x00\x1f\x00*\x00)\x00\x1e\x00\x0e\x00\x06\x00\x07\x00\x03\x00\xed\xff\xd1\xff\xc8\xff\xd7\xff\xf6\xff\x02\x00\xf8\xff\xe6\xff\xe7\xff\xf9\xff\x0c\x00\t\x00\xf5\xff\xe1\xff\xdd\xff\xe9\xff\xf4\xff\xf7\xff\xf4\xff\xf3\xff\xfc\xff\x0c\x00\x16\x00\x1c\x00#\x00,\x00+\x00\x1c\x00\x03\x00\xeb\xff\xd6\xff\xc8\xff\xc7\xff\xd9\xff\xf4\xff\x06\x00\n\x00\x0c\x00\x12\x00\x1c\x00$\x00&\x00$\x00\x1f\x00\n\x00\xe8\xff\xd0\xff\xd6\xff\xee\xff\xfa\xff\xf4\xff\xf3\xff\x07\x00&\x001\x00!\x00\x07\x00\xf8\xff\xf6\xff\xf7\xff\xef\xff\xe1\xff\xda\xff\xe0\xff\xf4\xff\t\x00\x11\x00\x11\x00\x17\x00#\x00.\x00.\x00*\x00%\x00\x19\x00\x03\x00\xe7\xff\xd3\xff\xd1\xff\xe2\xff\xfd\xff\x0e\x00\n\x00\xfa\xff\xec\xff\xed\xff\x00\x00\x13\x00\x10\x00\xff\xff\xf2\xff\xf6\xff\xfb\xff\xee\xff\xd6\xff\xd0\xff\xe4\xff\x06\x00\x1f\x00\'\x00$\x00 \x00\x1f\x00\x1f\x00\x17\x00\x06\x00\xf6\xff\xeb\xff\xe9\xff\xe7\xff\xe7\xff\xed\xff\xfb\xff\x0b\x00\r\x00\x08\x00\x08\x00\x13\x00"\x00(\x00!\x00\n\x00\xf2\xff\xdf\xff\xd9\xff\xde\xff\xe2\xff\xe2\xff\xe5\xff\xed\xff\xfe\xff\r\x00\x13\x00\x17\x00\x18\x00\x16\x00\x0b\x00\xf5\xff\xd6\xff\xbd\xff\xbc\xff\xd8\xff\xf4\xff\xf9\xff\xf6\xff\x06\x00/\x00O\x00J\x00/\x00\x18\x00\x11\x00\t\x00\xf1\xff\xd5\xff\xd0\xff\xe6\xff\t\x00\x1f\x00\x1f\x00\x11\x00\xfe\xff\xf6\xff\xff\xff\x13\x00\x1e\x00\x17\x00\x03\x00\xf0\xff\xe4\xff\xdd\xff\xd6\xff\xd8\xff\xe6\xff\xfe\xff\x13\x00\x1f\x00!\x00 \x00!\x00%\x00!\x00\x14\x00\x06\x00\xfe\xff\xf9\xff\xed\xff\xdd\xff\xdc\xff\xf3\xff\x15\x00+\x00\'\x00\x1b\x00\x17\x00\x1c\x00\x18\x00\n\x00\xfa\xff\xf1\xff\xf0\xff\xef\xff\xf0\xff\xf0\xff\xea\xff\xe2\xff\xe2\xff\xee\xff\xf9\xff\xfd\xff\x00\x00\x05\x00\x0c\x00\x0b\x00\x01\x00\xee\xff\xd7\xff\xc3\xff\xbf\xff\xcb\xff\xdd\xff\xed\xff\xff\xff\x14\x00#\x00)\x00\'\x00*\x002\x00+\x00\x0c\x00\xdf\xff\xc1\xff\xc5\xff\xde\xff\xf6\xff\xfd\xff\xfe\xff\x01\x00\x0b\x00\x15\x00\x13\x00\x0c\x00\x08\x00\x08\x00\x06\x00\xfa\xff\xe8\xff\xdb\xff\xdc\xff\xef\xff\x06\x00\x17\x00\x1d\x00\x1e\x00 \x00#\x00)\x00+\x00\'\x00 \x00\x14\x00\x02\x00\xe6\xff\xcf\xff\xc7\xff\xd6\xff\xf6\xff\x12\x00$\x00*\x00+\x00,\x00-\x00%\x00\x15\x00\xfe\xff\xec\xff\xe3\xff\xe0\xff\xe6\xff\xf3\xff\x02\x00\x0b\x00\n\x00\x05\x00\x01\x00\xff\xff\xff\xff\xfb\xff\xf0\xff\xe4\xff\xe0\xff\xe2\xff\xdf\xff\xd5\xff\xd3\xff\xe1\xff\xf3\xff\xfc\xff\xfe\xff\xfc\xff\xfd\xff\x07\x00\x17\x00(\x00-\x00\x1e\x00\xfe\xff\xde\xff\xcb\xff\xcb\xff\xd7\xff\xe5\xff\xf8\xff\x0b\x00\x17\x00\x11\x00\x06\x00\x04\x00\r\x00\x19\x00\x19\x00\r\x00\xfc\xff\xe7\xff\xd9\xff\xda\xff\xe5\xff\xf6\xff\x03\x00\x0e\x00\x15\x00\x19\x00\x19\x00\x1a\x00\x1f\x00$\x00\x1e\x00\x0c\x00\xf4\xff\xe0\xff\xda\xff\xe1\xff\xf5\xff\x10\x00(\x006\x00:\x009\x003\x00)\x00\x1a\x00\n\x00\xf6\xff\xe3\xff\xd5\xff\xd2\xff\xdc\xff\xec\xff\xf8\xff\x02\x00\n\x00\n\x00\x04\x00\xf9\xff\xf5\xff\xf9\xff\xfc\xff\xfb\xff\xf1\xff\xdf\xff\xcb\xff\xc5\xff\xdb\xff\x01\x00\x1d\x00\x1e\x00\r\x00\x03\x00\x02\x00\x07\x00\x07\x00\x02\x00\xfc\xff\xf9\xff\xfa\xff\xf6\xff\xec\xff\xe6\xff\xea\xff\xf7\xff\t\x00\x13\x00\x0f\x00\x04\x00\x00\x00\x08\x00\x18\x00#\x00\x1b\x00\x07\x00\xee\xff\xdb\xff\xdb\xff\xe2\xff\xed\xff\xf8\xff\x05\x00\x10\x00\x17\x00\x17\x00\x17\x00\x1a\x00\x1f\x00\x1a\x00\x0f\x00\x02\x00\xf7\xff\xf2\xff\xf3\xff\xfd\xff\n\x00\x14\x00\x16\x00\x18\x00\x1c\x00!\x00"\x00\x1c\x00\x14\x00\n\x00\xfa\xff\xe7\xff\xd7\xff\xd2\xff\xd8\xff\xe3\xff\xf3\xff\x03\x00\x0e\x00\r\x00\x07\x00\x04\x00\x05\x00\x02\x00\xf8\xff\xee\xff\xe5\xff\xdd\xff\xd5\xff\xdb\xff\xee\xff\x00\x00\x08\x00\x08\x00\x0b\x00\x0e\x00\r\x00\x08\x00\x06\x00\x0b\x00\x10\x00\x0b\x00\xfe\xff\xf1\xff\xea\xff\xee\xff\xfd\xff\x0f\x00\x1f\x00"\x00\x18\x00\t\x00\x02\x00\x04\x00\x05\x00\x02\x00\x04\x00\n\x00\n\x00\xff\xff\xef\xff\xea\xff\xf3\xff\x00\x00\x06\x00\x03\x00\xfd\xff\x02\x00\r\x00\x16\x00\x19\x00\x17\x00\x14\x00\r\x00\x01\x00\xf5\xff\xee\xff\xef\xff\xf4\xff\x00\x00\x0b\x00\x12\x00\x16\x00\x19\x00\x1c\x00\x1e\x00\x1b\x00\x0b\x00\xf4\xff\xdc\xff\xce\xff\xcf\xff\xdd\xff\xee\xff\xf9\xff\xfb\xff\xf4\xff\xf1\xff\xf4\xff\xfc\xff\xfd\xff\xf8\xff\xf4\xff\xf2\xff\xed\xff\xe0\xff\xd7\xff\xd8\xff\xe8\xff\xfa\xff\x04\x00\t\x00\x08\x00\x04\x00\xfd\xff\xfd\xff\x07\x00\x0f\x00\x0c\x00\x00\x00\xee\xff\xe3\xff\xe5\xff\xf3\xff\x04\x00\x13\x00\x1d\x00!\x00\x1e\x00\x18\x00\x12\x00\x10\x00\x11\x00\x14\x00\x17\x00\x13\x00\t\x00\x00\x00\xff\xff\x08\x00\x0e\x00\r\x00\x03\x00\xfa\xff\xf8\xff\xff\xff\n\x00\x0f\x00\x10\x00\x0f\x00\x11\x00\x11\x00\t\x00\xfa\xff\xee\xff\xf1\xff\x01\x00\x12\x00\x19\x00\x17\x00\x11\x00\x12\x00\x11\x00\t\x00\xfd\xff\xf0\xff\xe6\xff\xdf\xff\xdb\xff\xdb\xff\xde\xff\xe2\xff\xe7\xff\xf2\xff\xfc\xff\x00\x00\xfe\xff\xfd\xff\x00\x00\x03\x00\x02\x00\xfb\xff\xed\xff\xdf\xff\xdb\xff\xe4\xff\xf9\xff\x08\x00\x0c\x00\x04\x00\xf9\xff\xf5\xff\xfb\xff\x02\x00\x08\x00\x07\x00\x01\x00\xf7\xff\xeb\xff\xe8\xff\xec\xff\xf6\xff\x05\x00\x14\x00 \x00#\x00\x1b\x00\r\x00\x02\x00\x03\x00\x0b\x00\x13\x00\x12\x00\x08\x00\xfa\xff\xf3\xff\xf4\xff\xfa\xff\xff\xff\x04\x00\x0b\x00\x16\x00!\x00(\x00"\x00\x14\x00\x0e\x00\x11\x00\x12\x00\x0c\x00\xfd\xff\xf6\xff\xfe\xff\x13\x00#\x00\x1f\x00\x0e\x00\xff\xff\xfe\xff\x04\x00\x02\x00\xf6\xff\xec\xff\xe7\xff\xe5\xff\xe1\xff\xdd\xff\xe1\xff\xed\xff\xfd\xff\x08\x00\x0b\x00\x04\x00\xff\xff\xfd\xff\xfd\xff\xf8\xff\xf0\xff\xec\xff\xed\xff\xf0\xff\xee\xff\xe8\xff\xe8\xff\xf0\xff\xf7\xff\xfa\xff\xf9\xff\xf7\xff\xf6\xff\xf6\xff\xfb\xff\x02\x00\x06\x00\x02\x00\xfa\xff\xf7\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x10\x00\x1c\x00\x1d\x00\x12\x00\x06\x00\x05\x00\x0c\x00\x14\x00\x12\x00\x07\x00\xfa\xff\xee\xff\xe9\xff\xe7\xff\xe6\xff\xec\xff\xfc\xff\x12\x00#\x00&\x00\x1c\x00\r\x00\x05\x00\x03\x00\x03\x00\x02\x00\xfe\xff\xfb\xff\xfb\xff\xff\xff\x08\x00\x0f\x00\x14\x00\x18\x00\x1d\x00\x1e\x00\x16\x00\x08\x00\xfb\xff\xf5\xff\xf3\xff\xea\xff\xdb\xff\xd9\xff\xed\xff\t\x00\x1a\x00\x16\x00\t\x00\x04\x00\x07\x00\x0b\x00\x05\x00\xf9\xff\xf2\xff\xf3\xff\xf5\xff\xf0\xff\xe7\xff\xe3\xff\xee\xff\x00\x00\x0e\x00\x10\x00\x06\x00\xf9\xff\xf1\xff\xf0\xff\xf6\xff\xfa\xff\xfc\xff\xfb\xff\xfc\xff\xfa\xff\xf4\xff\xed\xff\xe9\xff\xf5\xff\x0c\x00\x1b\x00\x1c\x00\x13\x00\x0b\x00\t\x00\x07\x00\x05\x00\x03\x00\x06\x00\x05\x00\xf9\xff\xe9\xff\xe1\xff\xe9\xff\xfc\xff\x0e\x00\x16\x00\x17\x00\x15\x00\x17\x00\x1a\x00\x16\x00\x0b\x00\xfe\xff\xf6\xff\xf2\xff\xf1\xff\xef\xff\xeb\xff\xef\xff\xfb\xff\x11\x00$\x00&\x00\x18\x00\x03\x00\xf4\xff\xee\xff\xeb\xff\xe1\xff\xd6\xff\xd4\xff\xe1\xff\xf6\xff\x08\x00\x10\x00\x19\x00\x1e\x00#\x00"\x00\x1b\x00\x0f\x00\xff\xff\xf4\xff\xf0\xff\xee\xff\xea\xff\xe7\xff\xef\xff\x01\x00\x0f\x00\x10\x00\x07\x00\xfe\xff\xfc\xff\xfc\xff\xfa\xff\xf6\xff\xf8\xff\xfe\xff\x04\x00\xff\xff\xf2\xff\xe8\xff\xe7\xff\xf6\xff\x0c\x00\x1f\x00\'\x00$\x00\x1b\x00\x0f\x00\x04\x00\xfb\xff\xf7\xff\xf6\xff\xf6\xff\xf2\xff\xe9\xff\xe5\xff\xe8\xff\xee\xff\xf9\xff\x04\x00\x0f\x00\x17\x00\x19\x00\x15\x00\x0e\x00\x04\x00\xfc\xff\xfb\xff\xfb\xff\xf6\xff\xe9\xff\xe1\xff\xec\xff\x04\x00\x1e\x00%\x00\x1e\x00\x12\x00\r\x00\x0b\x00\x04\x00\xf8\xff\xe8\xff\xdd\xff\xdc\xff\xe4\xff\xef\xff\xf7\xff\xfc\xff\x04\x00\x13\x00 \x00#\x00\x18\x00\x08\x00\xfd\xff\xf6\xff\xee\xff\xe3\xff\xdc\xff\xde\xff\xe9\xff\xf7\xff\x03\x00\x0c\x00\x0e\x00\x13\x00\x18\x00\x1a\x00\x14\x00\n\x00\x04\x00\x00\x00\xfd\xff\xf9\xff\xf5\xff\xf5\xff\xfb\xff\x03\x00\x0b\x00\x10\x00\x13\x00\x14\x00\x13\x00\x0e\x00\x06\x00\xfb\xff\xf3\xff\xf3\xff\xf6\xff\xf3\xff\xea\xff\xe4\xff\xe3\xff\xe9\xff\xf2\xff\xfe\xff\x10\x00"\x00.\x00.\x00!\x00\x0e\x00\xfc\xff\xf2\xff\xee\xff\xec\xff\xe8\xff\xe9\xff\xf0\xff\xf9\xff\x06\x00\r\x00\x14\x00\x19\x00\x1a\x00\x13\x00\x02\x00\xf0\xff\xe6\xff\xe0\xff\xe0\xff\xe1\xff\xe4\xff\xed\xff\xf6\xff\x01\x00\r\x00\x18\x00 \x00$\x00\x1f\x00\x14\x00\x06\x00\xf7\xff\xe8\xff\xe0\xff\xe3\xff\xec\xff\xf6\xff\xfd\xff\x00\x00\x04\x00\x0c\x00\x13\x00\x16\x00\x11\x00\x06\x00\x00\x00\xfe\xff\x00\x00\xfd\xff\xf7\xff\xf4\xff\xfa\xff\x01\x00\x03\x00\x04\x00\x06\x00\x0c\x00\x13\x00\x17\x00\x18\x00\x17\x00\x0e\x00\xfe\xff\xf1\xff\xec\xff\xed\xff\xec\xff\xe8\xff\xe7\xff\xec\xff\xf6\xff\x02\x00\x11\x00\x1c\x00\x1f\x00\x1b\x00\x14\x00\x0f\x00\n\x00\xff\xff\xf3\xff\xeb\xff\xed\xff\xf4\xff\xf9\xff\xfb\xff\xfd\xff\x05\x00\x0e\x00\x19\x00\x1a\x00\x13\x00\x07\x00\xf5\xff\xe9\xff\xe1\xff\xdf\xff\xe4\xff\xec\xff\xf2\xff\xf6\xff\xf7\xff\xff\xff\x0e\x00\x1f\x00%\x00\x1e\x00\x0f\x00\xfe\xff\xef\xff\xe2\xff\xdc\xff\xdf\xff\xe7\xff\xef\xff\xf6\xff\xf9\xff\xfc\xff\xff\xff\x05\x00\x0e\x00\x13\x00\x12\x00\x0e\x00\x0b\x00\t\x00\x05\x00\xfc\xff\xf5\xff\xf7\xff\xfb\xff\xfa\xff\xfb\xff\x02\x00\x10\x00\x1a\x00\x1b\x00\x11\x00\xfd\xff\xe9\xff\xda\xff\xd8\xff\xe4\xff\xf1\xff\xf8\xff\xf7\xff\xf2\xff\xf2\xff\xfa\xff\x06\x00\x13\x00 \x00%\x00&\x00"\x00\x1c\x00\x13\x00\x0b\x00\x04\x00\x00\x00\xfe\xff\xfc\xff\xfe\xff\x03\x00\x08\x00\x06\x00\x05\x00\x05\x00\x0c\x00\x11\x00\x0e\x00\x03\x00\xf2\xff\xe4\xff\xe1\xff\xe9\xff\xf3\xff\xf4\xff\xf1\xff\xf5\xff\x08\x00\x1f\x00*\x00$\x00\x15\x00\n\x00\x00\x00\xf5\xff\xec\xff\xe6\xff\xe8\xff\xed\xff\xf3\xff\xf9\xff\x01\x00\x07\x00\n\x00\n\x00\n\x00\t\x00\x0b\x00\x0b\x00\x0b\x00\x01\x00\xf0\xff\xe4\xff\xe4\xff\xed\xff\xf2\xff\xed\xff\xe8\xff\xee\xff\x00\x00\x10\x00\x12\x00\x07\x00\xf6\xff\xe8\xff\xe6\xff\xea\xff\xf2\xff\xf4\xff\xf3\xff\xf1\xff\xf3\xff\xfa\xff\x04\x00\x0f\x00\x17\x00\x1e\x00\x1d\x00\x17\x00\r\x00\x02\x00\xfa\xff\xf7\xff\xfc\xff\x06\x00\n\x00\x05\x00\xfc\xff\xf6\xff\xf9\xff\x01\x00\x0b\x00\x17\x00 \x00\x1f\x00\x14\x00\x08\x00\x03\x00\x04\x00\x02\x00\xfb\xff\xf7\xff\xf9\xff\x04\x00\x0f\x00\x15\x00\x13\x00\x0f\x00\x10\x00\x12\x00\x14\x00\x0c\x00\xfc\xff\xee\xff\xeb\xff\xef\xff\xf0\xff\xec\xff\xea\xff\xf0\xff\x00\x00\x0f\x00\x18\x00\x17\x00\x11\x00\x0f\x00\x13\x00\x13\x00\x08\x00\xf3\xff\xe5\xff\xe8\xff\xef\xff\xf3\xff\xef\xff\xf1\xff\xf8\xff\x00\x00\xff\xff\xfc\xff\xf8\xff\xf3\xff\xea\xff\xe3\xff\xe1\xff\xe5\xff\xe9\xff\xe8\xff\xe7\xff\xe5\xff\xe6\xff\xf2\xff\x03\x00\x16\x00\x1e\x00\x17\x00\x0c\x00\x06\x00\x07\x00\x06\x00\x03\x00\xfc\xff\xf6\xff\xf8\xff\xfb\xff\x02\x00\x06\x00\x04\x00\x02\x00\x07\x00\x0f\x00\x14\x00\x10\x00\x06\x00\x00\x00\xff\xff\x04\x00\x0c\x00\x10\x00\x0c\x00\x04\x00\x02\x00\x08\x00\x13\x00\x1a\x00\x19\x00\x18\x00\x1a\x00\x1a\x00\x12\x00\x04\x00\xfc\xff\xfa\xff\xf5\xff\xef\xff\xec\xff\xf7\xff\x06\x00\x0c\x00\x03\x00\xfb\xff\xfc\xff\t\x00\x16\x00\x1b\x00\x13\x00\x03\x00\xf5\xff\xef\xff\xf0\xff\xf0\xff\xec\xff\xe9\xff\xef\xff\xfb\xff\x03\x00\x02\x00\xfb\xff\xf8\xff\xf6\xff\xf3\xff\xec\xff\xe7\xff\xe7\xff\xec\xff\xf0\xff\xf1\xff\xf2\xff\xf4\xff\xfe\xff\x08\x00\x0f\x00\r\x00\x08\x00\x08\x00\x05\x00\x01\x00\xfa\xff\xf3\xff\xf0\xff\xed\xff\xed\xff\xee\xff\xed\xff\xed\xff\xf2\xff\xfa\xff\x08\x00\x0f\x00\x0f\x00\n\x00\x06\x00\t\x00\x0c\x00\t\x00\x05\x00\x04\x00\x05\x00\n\x00\x10\x00\x19\x00\x1c\x00\x18\x00\x16\x00\x13\x00\x12\x00\x0b\x00\x00\x00\xf8\xff\xf9\xff\xfe\xff\x01\x00\xff\xff\x01\x00\x05\x00\x08\x00\t\x00\x08\x00\x07\x00\n\x00\x0c\x00\x0f\x00\x12\x00\x11\x00\t\x00\x00\x00\xfa\xff\xf8\xff\xf5\xff\xf0\xff\xf1\xff\xf5\xff\xf6\xff\xf0\xff\xee\xff\xf3\xff\xfa\xff\xfb\xff\xf3\xff\xed\xff\xed\xff\xf4\xff\xf9\xff\xfa\xff\xf7\xff\xf8\xff\xfc\xff\x03\x00\x07\x00\x05\x00\x02\x00\x03\x00\x05\x00\x06\x00\x02\x00\x00\x00\x02\x00\x05\x00\x02\x00\xf9\xff\xf3\xff\xf3\xff\xf4\xff\xf5\xff\xf7\xff\xff\xff\t\x00\x10\x00\x11\x00\r\x00\x03\x00\xfa\xff\xf4\xff\xf9\xff\x01\x00\x05\x00\x02\x00\xfd\xff\x01\x00\t\x00\r\x00\n\x00\x04\x00\xfc\xff\xf7\xff\xf6\xff\xf9\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x07\x00\r\x00\x11\x00\x10\x00\x0e\x00\t\x00\x04\x00\x01\x00\x04\x00\x07\x00\t\x00\x07\x00\x06\x00\x06\x00\x05\x00\x01\x00\xfc\xff\xfb\xff\xfa\xff\xf7\xff\xf3\xff\xf3\xff\xf2\xff\xed\xff\xe7\xff\xe9\xff\xf7\xff\x06\x00\x0e\x00\x0b\x00\x02\x00\xfd\xff\xfb\xff\x00\x00\x02\x00\x00\x00\xfc\xff\xfd\xff\x03\x00\x05\x00\x00\x00\xfc\xff\xfd\xff\x02\x00\x03\x00\x00\x00\xfc\xff\xfc\xff\xf9\xff\xf6\xff\xf3\xff\xf3\xff\xfc\xff\x08\x00\x0e\x00\x0e\x00\x08\x00\x02\x00\x02\x00\x0b\x00\x13\x00\x13\x00\x0e\x00\t\x00\x0c\x00\x0f\x00\x0c\x00\x04\x00\xfc\xff\xfd\xff\x00\x00\xfe\xff\xf8\xff\xf1\xff\xee\xff\xf3\xff\xfb\xff\x02\x00\x02\x00\xfb\xff\xf7\xff\xf6\xff\xf9\xff\xfb\xff\xf9\xff\xf8\xff\xfb\xff\xff\xff\x03\x00\x02\x00\x02\x00\x03\x00\x00\x00\xfc\xff\xfb\xff\x01\x00\x04\x00\x01\x00\xf8\xff\xf0\xff\xea\xff\xe6\xff\xe9\xff\xf3\xff\x05\x00\x12\x00\x15\x00\x12\x00\r\x00\n\x00\x06\x00\xfc\xff\xf8\xff\xf9\xff\x03\x00\x0b\x00\x08\x00\xfc\xff\xf2\xff\xf7\xff\t\x00\x1a\x00\x1a\x00\n\x00\xfa\xff\xf1\xff\xf0\xff\xf2\xff\xf3\xff\xf7\xff\x01\x00\x0c\x00\x12\x00\x11\x00\x0b\x00\x06\x00\x02\x00\x04\x00\x0b\x00\x10\x00\x11\x00\x12\x00\x11\x00\x0b\x00\x05\x00\x00\x00\x01\x00\x02\x00\x01\x00\xfd\xff\xfe\xff\x03\x00\x0b\x00\x0f\x00\x0e\x00\x06\x00\x03\x00\x04\x00\t\x00\n\x00\x02\x00\xf4\xff\xed\xff\xf2\xff\xfc\xff\xff\xff\xf8\xff\xf3\xff\xf5\xff\xfe\xff\x01\x00\xfc\xff\xf4\xff\xeb\xff\xe6\xff\xe4\xff\xe5\xff\xe5\xff\xe1\xff\xde\xff\xe2\xff\xf2\xff\x05\x00\r\x00\x0b\x00\t\x00\n\x00\n\x00\x05\x00\x00\x00\xfc\xff\xfb\xff\xfd\xff\xff\xff\xfe\xff\xf9\xff\xfc\xff\x05\x00\x11\x00\x16\x00\x13\x00\x0c\x00\x06\x00\x00\x00\xf5\xff\xeb\xff\xea\xff\xf6\xff\x05\x00\x0e\x00\x0b\x00\x05\x00\x01\x00\x02\x00\x08\x00\x10\x00\x10\x00\x0e\x00\x0e\x00\x11\x00\x11\x00\x0c\x00\x03\x00\xfd\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x06\x00\x02\x00\x00\x00\x05\x00\r\x00\x15\x00\x19\x00\x17\x00\x0f\x00\x07\x00\xfe\xff\xf8\xff\xf3\xff\xef\xff\xee\xff\xef\xff\xf6\xff\x05\x00\x12\x00\x15\x00\n\x00\xfd\xff\xf5\xff\xf3\xff\xed\xff\xe5\xff\xe2\xff\xe1\xff\xe4\xff\xe7\xff\xec\xff\xf2\xff\xf9\xff\x02\x00\x0c\x00\x14\x00\x15\x00\x0c\x00\xfb\xff\xeb\xff\xe2\xff\xe5\xff\xf2\xff\xfd\xff\xfd\xff\xf7\xff\xf6\xff\xfe\xff\x07\x00\t\x00\x06\x00\x02\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfa\xff\xf7\xff\xf8\xff\x01\x00\x0b\x00\x14\x00\x11\x00\n\x00\x06\x00\x08\x00\x0e\x00\x12\x00\x14\x00\x14\x00\x12\x00\x0e\x00\t\x00\x05\x00\xff\xff\xf9\xff\xf5\xff\xfa\xff\xff\xff\x06\x00\x0b\x00\x13\x00\x19\x00\x19\x00\x15\x00\x0e\x00\x07\x00\xfe\xff\xf7\xff\xf0\xff\xef\xff\xf0\xff\xf3\xff\xf4\xff\xf7\xff\xfd\xff\x03\x00\n\x00\r\x00\x0c\x00\x05\x00\xfa\xff\xeb\xff\xe5\xff\xea\xff\xf3\xff\xf5\xff\xf2\xff\xf3\xff\xf9\xff\x04\x00\x0f\x00\x18\x00\x1a\x00\x19\x00\x12\x00\x04\x00\xf7\xff\xed\xff\xe9\xff\xea\xff\xee\xff\xf3\xff\xf5\xff\xf8\xff\xfc\xff\x01\x00\x07\x00\n\x00\n\x00\x07\x00\xfe\xff\xf1\xff\xe8\xff\xe7\xff\xef\xff\xfe\xff\x0c\x00\x10\x00\t\x00\x02\x00\x00\x00\x03\x00\x07\x00\t\x00\n\x00\r\x00\x13\x00\x17\x00\x12\x00\x06\x00\xfa\xff\xf4\xff\xf5\xff\xfa\xff\xff\xff\x03\x00\x06\x00\x0e\x00\x16\x00\x1a\x00\x17\x00\x10\x00\n\x00\x08\x00\x06\x00\xff\xff\xf6\xff\xee\xff\xe9\xff\xe7\xff\xe9\xff\xee\xff\xf9\xff\x05\x00\x0e\x00\x11\x00\n\x00\xfb\xff\xec\xff\xe4\xff\xe6\xff\xe9\xff\xec\xff\xf0\xff\xf5\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x10\x00\x1c\x00$\x00\x1f\x00\x0f\x00\xfc\xff\xf1\xff\xf0\xff\xf2\xff\xf3\xff\xf3\xff\xf7\xff\x03\x00\x11\x00\x17\x00\x15\x00\x0e\x00\n\x00\t\x00\x07\x00\xfe\xff\xf5\xff\xee\xff\xf2\xff\xfa\xff\x04\x00\x06\x00\x01\x00\xfe\xff\xff\xff\x04\x00\x07\x00\x04\x00\x01\x00\x00\x00\x01\x00\xfe\xff\xfd\xff\xfc\xff\xff\xff\x00\x00\xfc\xff\xf6\xff\xf4\xff\xf8\xff\xff\xff\x08\x00\x11\x00\x17\x00\x1a\x00\x1a\x00\x18\x00\x12\x00\x05\x00\xf7\xff\xec\xff\xe6\xff\xe7\xff\xea\xff\xed\xff\xf5\xff\x01\x00\x0c\x00\x11\x00\x0e\x00\x06\x00\xfc\xff\xf5\xff\xf2\xff\xf1\xff\xf0\xff\xee\xff\xf1\xff\xf5\xff\xf9\xff\xfd\xff\x02\x00\x0b\x00\x14\x00\x1c\x00\x1b\x00\r\x00\xf9\xff\xea\xff\xe8\xff\xec\xff\xf0\xff\xf3\xff\xf7\xff\xfe\xff\x08\x00\x0c\x00\t\x00\x04\x00\x07\x00\x10\x00\x17\x00\x17\x00\x0e\x00\x04\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\xfd\xff\x00\x00\x07\x00\x0b\x00\x08\x00\x02\x00\xff\xff\x01\x00\x06\x00\x06\x00\x02\x00\x00\x00\xfc\xff\xf6\xff\xf0\xff\xed\xff\xed\xff\xf6\xff\x01\x00\n\x00\x11\x00\x15\x00\x16\x00\x11\x00\x0b\x00\xff\xff\xf2\xff\xe9\xff\xe6\xff\xe8\xff\xe6\xff\xe3\xff\xe5\xff\xf3\xff\x03\x00\x0e\x00\r\x00\t\x00\x06\x00\x03\x00\xfd\xff\xf7\xff\xf3\xff\xf0\xff\xf2\xff\xfa\xff\x03\x00\t\x00\x0c\x00\x0b\x00\r\x00\x13\x00\x15\x00\x0f\x00\x04\x00\xfd\xff\xfc\xff\xff\xff\xfd\xff\xf7\xff\xf2\xff\xf6\xff\xfd\xff\x01\x00\x01\x00\xff\xff\x00\x00\x06\x00\x0c\x00\x0f\x00\x0b\x00\x02\x00\xfd\xff\xfc\xff\x01\x00\x06\x00\n\x00\x08\x00\x02\x00\xfa\xff\xf6\xff\xf7\xff\xfb\xff\x01\x00\n\x00\x13\x00\x15\x00\x16\x00\x12\x00\n\x00\xfd\xff\xf0\xff\xe6\xff\xe8\xff\xf3\xff\x01\x00\x0c\x00\x12\x00\x17\x00\x1c\x00\x1d\x00\x19\x00\x0e\x00\x02\x00\xf3\xff\xe8\xff\xe1\xff\xe0\xff\xe0\xff\xe3\xff\xec\xff\xf8\xff\x00\x00\x02\x00\x03\x00\x01\x00\xfd\xff\xf4\xff\xed\xff\xec\xff\xed\xff\xf3\xff\xfa\xff\x00\x00\x04\x00\x05\x00\x03\x00\x05\x00\n\x00\x10\x00\x11\x00\x0e\x00\x0b\x00\x08\x00\x05\x00\xfd\xff\xf4\xff\xf2\xff\xf6\xff\xfc\xff\xff\xff\xff\xff\x03\x00\x08\x00\x0b\x00\x0b\x00\r\x00\x0e\x00\x0e\x00\x0c\x00\n\x00\x07\x00\x04\x00\x03\x00\x02\x00\x00\x00\xfa\xff\xf4\xff\xf1\xff\xf4\xff\xfa\xff\x03\x00\t\x00\x07\x00\x06\x00\x06\x00\x06\x00\x02\x00\xfb\xff\xf5\xff\xf3\xff\xf8\xff\xfc\xff\xfe\xff\x00\x00\x06\x00\x11\x00\x1e\x00%\x00"\x00\x18\x00\x08\x00\xf5\xff\xe5\xff\xdc\xff\xda\xff\xdd\xff\xe5\xff\xf1\xff\xfe\xff\x07\x00\n\x00\x0b\x00\x08\x00\x04\x00\xff\xff\xfd\xff\xfa\xff\xf7\xff\xf6\xff\xf4\xff\xf5\xff\xf9\xff\x01\x00\x06\x00\x07\x00\t\x00\x0c\x00\x11\x00\r\x00\x03\x00\xf8\xff\xee\xff\xe9\xff\xee\xff\xf6\xff\xfc\xff\xfa\xff\xf7\xff\xf9\xff\x01\x00\x06\x00\x07\x00\x08\x00\x0c\x00\x10\x00\x10\x00\x0b\x00\x03\x00\xfa\xff\xf7\xff\xfd\xff\x01\x00\x00\x00\xf9\xff\xf5\xff\xfa\xff\x02\x00\x07\x00\x06\x00\x05\x00\x07\x00\x0c\x00\x11\x00\x11\x00\t\x00\xff\xff\xf7\xff\xf5\xff\xfa\xff\xfe\xff\x00\x00\x03\x00\t\x00\x14\x00\x1a\x00\x19\x00\x11\x00\x08\x00\xfe\xff\xf1\xff\xe7\xff\xe1\xff\xe3\xff\xe9\xff\xee\xff\xf5\xff\xfb\xff\x01\x00\x07\x00\r\x00\x10\x00\r\x00\x08\x00\x02\x00\xfb\xff\xf4\xff\xee\xff\xed\xff\xf3\xff\xff\xff\t\x00\x0e\x00\x0e\x00\x10\x00\x13\x00\x13\x00\x0e\x00\x04\x00\xfc\xff\xf9\xff\xfb\xff\xfc\xff\xfa\xff\xf4\xff\xf1\xff\xf5\xff\xfe\xff\x04\x00\x04\x00\x05\x00\x07\x00\x0c\x00\x0b\x00\x03\x00\xf4\xff\xea\xff\xea\xff\xf0\xff\xf7\xff\xfa\xff\xf9\xff\xfa\xff\xfc\xff\xfe\xff\xfd\xff\xfe\xff\x03\x00\n\x00\x0e\x00\x0b\x00\x08\x00\x04\x00\xfe\xff\xf9\xff\xf7\xff\xf9\xff\xfe\xff\x03\x00\x06\x00\x0c\x00\x10\x00\x15\x00\x16\x00\x13\x00\x0e\x00\n\x00\x04\x00\xfc\xff\xf2\xff\xeb\xff\xe9\xff\xeb\xff\xf2\xff\xfb\xff\x04\x00\n\x00\x0b\x00\n\x00\x04\x00\xff\xff\xfd\xff\xfa\xff\xf7\xff\xf4\xff\xf5\xff\xfb\xff\x03\x00\x08\x00\x08\x00\x03\x00\x00\x00\x02\x00\x0b\x00\x11\x00\x13\x00\x0e\x00\x07\x00\xff\xff\xf8\xff\xf1\xff\xee\xff\xed\xff\xef\xff\xf8\xff\x03\x00\x08\x00\x08\x00\t\x00\x11\x00\x15\x00\x10\x00\x08\x00\x00\x00\xfc\xff\xfb\xff\xf9\xff\xf4\xff\xf3\xff\xf7\xff\x00\x00\x01\x00\xfd\xff\xf8\xff\xf8\xff\xfd\xff\x01\x00\x00\x00\xfa\xff\xf4\xff\xf0\xff\xee\xff\xf0\xff\xf7\xff\xfe\xff\x04\x00\x05\x00\x07\x00\x08\x00\x0b\x00\r\x00\x10\x00\x12\x00\x0f\x00\x08\x00\xfd\xff\xf5\xff\xf1\xff\xef\xff\xf0\xff\xf2\xff\xf8\xff\xfd\xff\x02\x00\x07\x00\t\x00\n\x00\x07\x00\x04\x00\x02\x00\x02\x00\x04\x00\x02\x00\x02\x00\x02\x00\x07\x00\x0b\x00\x0b\x00\x07\x00\x05\x00\n\x00\x0e\x00\x0c\x00\x07\x00\x02\x00\xff\xff\xfa\xff\xf4\xff\xf2\xff\xf5\xff\xf9\xff\xfd\xff\xfe\xff\xfa\xff\xf7\xff\xf7\xff\xff\xff\x0c\x00\x16\x00\x18\x00\x0f\x00\x03\x00\xfa\xff\xf3\xff\xed\xff\xea\xff\xee\xff\xf8\xff\x00\x00\x03\x00\x02\x00\x01\x00\x04\x00\x08\x00\n\x00\x06\x00\x00\x00\xfc\xff\xf9\xff\xf7\xff\xf4\xff\xf6\xff\xfc\xff\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\x04\x00\x08\x00\x05\x00\xff\xff\xf6\xff\xef\xff\xec\xff\xef\xff\xf3\xff\xf3\xff\xf3\xff\xf5\xff\xfb\xff\x04\x00\x08\x00\x0b\x00\x0b\x00\r\x00\x10\x00\x10\x00\x08\x00\x00\x00\xf8\xff\xf7\xff\xfb\xff\x04\x00\x0c\x00\x0b\x00\x07\x00\x05\x00\t\x00\r\x00\x10\x00\x0c\x00\x08\x00\x03\x00\xfe\xff\xfb\xff\xf8\xff\xf7\xff\xf8\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x03\x00\n\x00\x0e\x00\x0c\x00\x08\x00\x04\x00\x00\x00\xfa\xff\xf4\xff\xf3\xff\xf4\xff\xfb\xff\x00\x00\xff\xff\xfa\xff\xf7\xff\xfa\xff\x04\x00\x0c\x00\x0f\x00\x0b\x00\x03\x00\xfc\xff\xf5\xff\xf1\xff\xef\xff\xf1\xff\xf7\xff\x01\x00\x04\x00\x03\x00\x03\x00\x04\x00\x0b\x00\x0e\x00\r\x00\t\x00\x02\x00\xfd\xff\xf8\xff\xf4\xff\xf1\xff\xf1\xff\xf4\xff\xf9\xff\xff\xff\x01\x00\x02\x00\x04\x00\x08\x00\x0c\x00\x0c\x00\x04\x00\xfc\xff\xf6\xff\xf6\xff\xfa\xff\xfd\xff\x01\x00\x02\x00\x01\x00\x04\x00\t\x00\r\x00\x0e\x00\r\x00\r\x00\n\x00\x04\x00\xf9\xff\xf0\xff\xee\xff\xef\xff\xf5\xff\xf9\xff\xfd\xff\xff\xff\x01\x00\x04\x00\n\x00\x0e\x00\r\x00\x08\x00\x01\x00\xff\xff\xfe\xff\xfc\xff\xf9\xff\xfb\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\x03\x00\x07\x00\x08\x00\x05\x00\x02\x00\x01\x00\x01\x00\xfe\xff\xf8\xff\xf4\xff\xf6\xff\xff\xff\x07\x00\x07\x00\x02\x00\xfe\xff\x01\x00\x07\x00\n\x00\t\x00\x07\x00\x04\x00\x01\x00\xfe\xff\xf8\xff\xf3\xff\xed\xff\xec\xff\xf0\xff\xf6\xff\xfc\xff\x01\x00\x03\x00\x05\x00\t\x00\r\x00\x0e\x00\x0c\x00\x07\x00\x02\x00\xf9\xff\xf3\xff\xf4\xff\xfc\xff\x02\x00\x06\x00\t\x00\x0c\x00\x0f\x00\x11\x00\r\x00\x07\x00\xfd\xff\xf2\xff\xec\xff\xeb\xff\xee\xff\xf2\xff\xf4\xff\xf8\xff\xfc\xff\x03\x00\x08\x00\x0b\x00\x0b\x00\t\x00\x05\x00\x01\x00\xff\xff\xfd\xff\xfb\xff\xf5\xff\xf4\xff\xf8\xff\xfd\xff\x02\x00\x03\x00\x05\x00\x06\x00\x08\x00\x0b\x00\r\x00\t\x00\x02\x00\xf9\xff\xf6\xff\xf6\xff\xfc\xff\x02\x00\x06\x00\x07\x00\x04\x00\x04\x00\t\x00\x0e\x00\r\x00\x08\x00\x02\x00\x04\x00\x07\x00\x06\x00\xff\xff\xf9\xff\xf6\xff\xf7\xff\xfa\xff\xfd\xff\xfc\xff\xfe\xff\x01\x00\x04\x00\t\x00\x0b\x00\x0b\x00\x08\x00\x01\x00\xfc\xff\xf7\xff\xef\xff\xeb\xff\xed\xff\xf4\xff\xfb\xff\x04\x00\r\x00\x13\x00\x12\x00\x0e\x00\x07\x00\x04\x00\xff\xff\xf8\xff\xf1\xff\xec\xff\xea\xff\xeb\xff\xf3\xff\xfc\xff\x05\x00\x0b\x00\x0c\x00\n\x00\x06\x00\x02\x00\xfd\xff\xfa\xff\xf5\xff\xf4\xff\xf5\xff\xf8\xff\xfc\xff\xfd\xff\xfe\xff\x01\x00\x07\x00\x0c\x00\x0e\x00\x0b\x00\x07\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xf5\xff\xf2\xff\xf1\xff\xf9\xff\x01\x00\x06\x00\x04\x00\x01\x00\x02\x00\x06\x00\n\x00\x0b\x00\x0b\x00\n\x00\x04\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x06\x00\n\x00\r\x00\x0c\x00\x08\x00\x04\x00\x02\x00\x01\x00\x00\x00\xfc\xff\xf6\xff\xf2\xff\xf6\xff\xfe\xff\x03\x00\x02\x00\x02\x00\x06\x00\x0b\x00\r\x00\x0e\x00\x0c\x00\x04\x00\xfa\xff\xf1\xff\xee\xff\xef\xff\xef\xff\xee\xff\xf1\xff\xf8\xff\x02\x00\x06\x00\n\x00\x08\x00\x04\x00\x01\x00\xfd\xff\xfc\xff\xf8\xff\xf6\xff\xf4\xff\xf4\xff\xf8\xff\xfe\xff\x03\x00\x08\x00\x0c\x00\x0e\x00\x0e\x00\n\x00\x05\x00\x02\x00\xfe\xff\xfa\xff\xf8\xff\xf6\xff\xf7\xff\xfb\xff\x00\x00\x01\x00\xff\xff\xff\xff\x05\x00\x0b\x00\x0c\x00\t\x00\x04\x00\x05\x00\x08\x00\x05\x00\xff\xff\xf7\xff\xf5\xff\xf7\xff\xfe\xff\x07\x00\n\x00\x08\x00\x04\x00\x03\x00\x05\x00\x08\x00\x04\x00\x00\x00\xfc\xff\xfc\xff\xfb\xff\xf9\xff\xfa\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x05\x00\x0c\x00\x0e\x00\n\x00\x06\x00\x01\x00\xfe\xff\xf9\xff\xf6\xff\xf4\xff\xf2\xff\xf4\xff\xf9\xff\xff\xff\x02\x00\x00\x00\xfd\xff\xfc\xff\x01\x00\x05\x00\x06\x00\x02\x00\xfb\xff\xf5\xff\xf3\xff\xf9\xff\xfe\xff\x00\x00\xfe\xff\xfa\xff\xfc\xff\x03\x00\t\x00\n\x00\t\x00\x05\x00\x04\x00\x02\x00\xff\xff\xfb\xff\xf6\xff\xf2\xff\xf5\xff\xfa\xff\xff\xff\x00\x00\x02\x00\x07\x00\x0b\x00\x0b\x00\x08\x00\x07\x00\x07\x00\x03\x00\xfd\xff\xf9\xff\xfb\xff\xff\xff\x04\x00\x05\x00\x04\x00\x05\x00\x08\x00\x0b\x00\x0c\x00\x0b\x00\x05\x00\x00\x00\xfe\xff\xfd\xff\xfc\xff\xf8\xff\xf4\xff\xf6\xff\xfb\xff\xff\xff\x01\x00\x03\x00\x06\x00\x08\x00\x08\x00\x06\x00\x01\x00\xfe\xff\xfb\xff\xf8\xff\xf9\xff\xfa\xff\xf7\xff\xf6\xff\xf8\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfb\xff\xf7\xff\xf8\xff\xfa\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x05\x00\x05\x00\x06\x00\x06\x00\x03\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xf8\xff\xf4\xff\xf2\xff\xf6\xff\xff\xff\t\x00\x0f\x00\x0e\x00\x0c\x00\x0e\x00\r\x00\x07\x00\xfe\xff\xf8\xff\xf6\xff\xf5\xff\xf6\xff\xfa\xff\x03\x00\x0c\x00\x11\x00\x10\x00\r\x00\x08\x00\x05\x00\x04\x00\xff\xff\xfa\xff\xf5\xff\xf5\xff\xf8\xff\xff\xff\x03\x00\x05\x00\x06\x00\n\x00\x12\x00\x15\x00\x10\x00\x07\x00\x00\x00\xfc\xff\xfc\xff\xfc\xff\xfa\xff\xf5\xff\xf5\xff\xf9\xff\xff\xff\x03\x00\x03\x00\x02\x00\x01\x00\xfe\xff\xf9\xff\xf6\xff\xf7\xff\xfd\xff\xfe\xff\xfd\xff\xfb\xff\xfd\xff\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x02\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x01\x00\xfc\xff\xf4\xff\xf0\xff\xee\xff\xf1\xff\xf6\xff\xfd\xff\x05\x00\t\x00\n\x00\x0b\x00\r\x00\x08\x00\x03\x00\xfe\xff\xfb\xff\xf9\xff\xf7\xff\xf8\xff\xfd\xff\x04\x00\x0b\x00\x0c\x00\x0c\x00\n\x00\x07\x00\x05\x00\x01\x00\xfc\xff\xf7\xff\xf4\xff\xf4\xff\xf5\xff\xf5\xff\xf3\xff\xf8\xff\x04\x00\x11\x00\x13\x00\r\x00\x04\x00\x01\x00\x03\x00\x04\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x04\x00\x06\x00\t\x00\x0b\x00\t\x00\x04\x00\xff\xff\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xf9\xff\xfa\xff\xfd\xff\x01\x00\x06\x00\x06\x00\x03\x00\x03\x00\x03\x00\x04\x00\x00\x00\xfd\xff\xff\xff\x06\x00\n\x00\x08\x00\x00\x00\xf8\xff\xf5\xff\xf9\xff\xfa\xff\xf9\xff\xfb\xff\xff\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xff\xff\x00\x00\xfd\xff\xf9\xff\xf4\xff\xf5\xff\xfb\xff\x01\x00\x06\x00\x07\x00\t\x00\x08\x00\x06\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xf8\xff\xf8\xff\xfb\xff\x02\x00\t\x00\n\x00\x06\x00\x02\x00\x02\x00\x06\x00\x05\x00\x01\x00\xfb\xff\xf8\xff\xf6\xff\xf4\xff\xf1\xff\xee\xff\xf3\xff\x00\x00\x0c\x00\x11\x00\t\x00\xff\xff\xfb\xff\xfd\xff\x02\x00\x00\x00\xfd\xff\xfb\xff\xff\xff\x06\x00\x0b\x00\r\x00\x0b\x00\x0b\x00\n\x00\x07\x00\x03\x00\x02\x00\x00\x00\x01\x00\x02\x00\x02\x00\xfe\xff\xf9\xff\xf7\xff\xf8\xff\xf9\xff\xf6\xff\xf7\xff\xfc\xff\x02\x00\x05\x00\x03\x00\x01\x00\x02\x00\x06\x00\x07\x00\x05\x00\x00\x00\xfa\xff\xf9\xff\xfc\xff\x02\x00\x07\x00\t\x00\n\x00\x07\x00\x06\x00\x03\x00\xff\xff\xfb\xff\xf7\xff\xf5\xff\xf9\xff\xfa\xff\xf9\xff\xf9\xff\xf7\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x05\x00\x07\x00\t\x00\t\x00\x07\x00\x05\x00\x02\x00\xfd\xff\xf7\xff\xf5\xff\xf7\xff\xfc\xff\x04\x00\x07\x00\x04\x00\xff\xff\xfd\xff\xff\xff\x01\x00\xfd\xff\xf8\xff\xf5\xff\xf8\xff\xfb\xff\xfe\xff\xff\xff\x00\x00\x05\x00\x0b\x00\x0f\x00\x0c\x00\x06\x00\x03\x00\x02\x00\x05\x00\x06\x00\x04\x00\xfe\xff\xfb\xff\xfd\xff\xfe\xff\xfc\xff\xf8\xff\xfa\xff\xff\xff\x03\x00\x04\x00\x05\x00\x04\x00\x04\x00\x04\x00\x01\x00\xfd\xff\xf9\xff\xf6\xff\xf7\xff\xf9\xff\xff\xff\x05\x00\x0b\x00\x0e\x00\x0c\x00\x06\x00\x01\x00\xfd\xff\xfb\xff\xf9\xff\xfb\xff\xfb\xff\xfc\xff\xfe\xff\x01\x00\x03\x00\x04\x00\x04\x00\x05\x00\x04\x00\x04\x00\x04\x00\x03\x00\x00\x00\xff\xff\x02\x00\x06\x00\x05\x00\x02\x00\xfc\xff\xf7\xff\xf8\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x03\x00\x05\x00\x03\x00\x01\x00\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x05\x00\x05\x00\x03\x00\x02\x00\x04\x00\x04\x00\x02\x00\xff\xff\xfc\xff\xfa\xff\xf7\xff\xf3\xff\xef\xff\xee\xff\xf5\xff\xff\xff\x05\x00\x06\x00\x04\x00\x04\x00\x05\x00\x06\x00\x01\x00\xfa\xff\xf7\xff\xf6\xff\xfb\xff\x00\x00\x01\x00\x04\x00\x07\x00\n\x00\x0b\x00\t\x00\x02\x00\xfc\xff\xf8\xff\xf7\xff\xf8\xff\xfa\xff\xfb\xff\xfd\xff\xff\xff\x00\x00\x01\x00\x02\x00\x06\x00\x08\x00\x07\x00\x05\x00\x03\x00\x04\x00\x06\x00\t\x00\n\x00\n\x00\x07\x00\x04\x00\x04\x00\x02\x00\x01\x00\x01\x00\x03\x00\x03\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xf9\xff\xf9\xff\xfd\xff\x02\x00\x03\x00\xff\xff\xfa\xff\xfa\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x04\x00\x03\x00\x05\x00\x04\x00\x00\x00\xfb\xff\xf2\xff\xef\xff\xf2\xff\xf8\xff\xfd\xff\xff\xff\xff\xff\x03\x00\x07\x00\x07\x00\x01\x00\xf9\xff\xf6\xff\xf6\xff\xf9\xff\xfa\xff\xf6\xff\xf5\xff\xfa\xff\x04\x00\r\x00\x0e\x00\x08\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfd\xff\xfa\xff\xf9\xff\xfc\xff\x01\x00\x04\x00\x05\x00\x06\x00\x08\x00\n\x00\t\x00\x06\x00\x05\x00\x01\x00\x00\x00\x01\x00\x03\x00\x06\x00\x06\x00\x02\x00\xfe\xff\xfc\xff\xfd\xff\x03\x00\x08\x00\x08\x00\x03\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x03\x00\x01\x00\xfe\xff\xfc\xff\xfe\xff\x02\x00\x02\x00\x01\x00\x00\x00\x03\x00\x07\x00\x07\x00\x02\x00\xfa\xff\xf6\xff\xf8\xff\xfd\xff\x01\x00\x01\x00\xfc\xff\xf7\xff\xf3\xff\xf6\xff\xf9\xff\xfa\xff\xf9\xff\xfb\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\x00\x00\xff\xff\xfc\xff\xf9\xff\xf8\xff\xfb\xff\x00\x00\x05\x00\x06\x00\x04\x00\x04\x00\x06\x00\x08\x00\x04\x00\xfe\xff\xf7\xff\xf7\xff\xfc\xff\x00\x00\xfe\xff\xfc\xff\xfe\xff\x06\x00\x0b\x00\x0c\x00\n\x00\t\x00\x07\x00\x05\x00\x01\x00\x01\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\x00\x00\x05\x00\x08\x00\x04\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfa\xff\xf7\xff\xf7\xff\xf9\xff\xfc\xff\x00\x00\x01\x00\x02\x00\x05\x00\x07\x00\x07\x00\x03\x00\xfb\xff\xf5\xff\xf6\xff\xfa\xff\xfe\xff\x01\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x01\x00\x02\x00\xff\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\x00\x00\x04\x00\x04\x00\x03\x00\x01\x00\x04\x00\n\x00\t\x00\x05\x00\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x00\x00\x03\x00\x07\x00\t\x00\x08\x00\x07\x00\x07\x00\n\x00\t\x00\x05\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\xfd\xff\xfa\xff\xfc\xff\x02\x00\x07\x00\t\x00\x06\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfa\xff\xf5\xff\xf4\xff\xf7\xff\xfd\xff\x01\x00\x03\x00\x03\x00\x02\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfa\xff\xf8\xff\xf6\xff\xf5\xff\xf6\xff\xf7\xff\xf9\xff\xf8\xff\xf7\xff\xf6\xff\xfa\xff\xff\xff\x04\x00\x04\x00\x02\x00\xfe\xff\xf9\xff\xf5\xff\xf5\xff\xfa\xff\x00\x00\x00\x00\x02\x00\x02\x00\x04\x00\x06\x00\x07\x00\x06\x00\x05\x00\x06\x00\x07\x00\x07\x00\x04\x00\x02\x00\x01\x00\x02\x00\x02\x00\x03\x00\x03\x00\x05\x00\x06\x00\x08\x00\x08\x00\x06\x00\x07\x00\x0c\x00\x0e\x00\x0c\x00\x06\x00\x02\x00\x03\x00\x05\x00\x04\x00\x00\x00\xfc\xff\xfb\xff\x00\x00\x05\x00\x07\x00\x06\x00\x04\x00\x04\x00\x03\x00\x00\x00\xf9\xff\xf6\xff\xf5\xff\xf5\xff\xf4\xff\xf4\xff\xf9\xff\xfd\xff\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\xff\xff\xff\xff\xfc\xff\xf7\xff\xf2\xff\xf2\xff\xf7\xff\xfa\xff\xf9\xff\xf7\xff\xf8\xff\xfd\xff\x03\x00\x05\x00\x03\x00\xfb\xff\xf4\xff\xf1\xff\xee\xff\xf1\xff\xf4\xff\xf8\xff\xfb\xff\xff\xff\x00\x00\x03\x00\x05\x00\x07\x00\x07\x00\x06\x00\x06\x00\x06\x00\x06\x00\x04\x00\x02\x00\x02\x00\x04\x00\x06\x00\x05\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\t\x00\x0b\x00\r\x00\x0c\x00\x07\x00\x04\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x03\x00\x07\x00\n\x00\x0c\x00\x0b\x00\n\x00\x08\x00\x04\x00\xfe\xff\xfb\xff\xf8\xff\xf9\xff\xf6\xff\xf3\xff\xf3\xff\xf8\xff\x00\x00\x04\x00\x04\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfd\xff\xf8\xff\xf0\xff\xee\xff\xef\xff\xf4\xff\xf7\xff\xf9\xff\xfb\xff\xfd\xff\x01\x00\x06\x00\x07\x00\x05\x00\x00\x00\xf8\xff\xf5\xff\xf4\xff\xf6\xff\xf9\xff\xfb\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x04\x00\t\x00\n\x00\x06\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfa\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x04\x00\x03\x00\x02\x00\x04\x00\t\x00\r\x00\x0c\x00\x05\x00\xfc\xff\xfb\xff\xff\xff\x03\x00\x06\x00\x02\x00\x00\x00\x00\x00\x04\x00\x0b\x00\r\x00\x0c\x00\t\x00\x07\x00\x05\x00\x02\x00\xff\xff\xfe\xff\xfd\xff\xfa\xff\xf8\xff\xf9\xff\xfc\xff\x01\x00\x05\x00\x04\x00\x05\x00\x06\x00\x04\x00\x03\x00\xfe\xff\xf9\xff\xf6\xff\xf5\xff\xf7\xff\xf9\xff\xf9\xff\xf6\xff\xf6\xff\xf8\xff\xfd\xff\x02\x00\x03\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xf8\xff\xf4\xff\xf5\xff\xf8\xff\xfd\xff\x01\x00\x01\x00\x02\x00\x02\x00\x05\x00\x07\x00\x08\x00\t\x00\n\x00\x08\x00\x05\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x02\x00\x03\x00\x04\x00\x05\x00\x03\x00\x00\x00\xfb\xff\xfa\xff\xf9\xff\xf7\xff\xf7\xff\xfd\xff\x02\x00\x05\x00\x08\x00\x08\x00\n\x00\x0b\x00\r\x00\n\x00\x02\x00\xfa\xff\xf7\xff\xfa\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\x03\x00\x05\x00\x04\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfc\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x04\x00\x06\x00\x08\x00\x06\x00\xff\xff\xf8\xff\xf5\xff\xf5\xff\xfc\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x04\x00\x07\x00\x06\x00\x04\x00\x02\x00\x03\x00\x04\x00\x05\x00\x04\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\x02\x00\x07\x00\x08\x00\x04\x00\x00\x00\xfd\xff\xfc\xff\xfc\xff\xfb\xff\xf9\xff\xf9\xff\x00\x00\x04\x00\t\x00\x0c\x00\n\x00\x08\x00\x06\x00\x02\x00\xfe\xff\xfa\xff\xf7\xff\xf4\xff\xf3\xff\xf4\xff\xf9\xff\xff\xff\x03\x00\x01\x00\x00\x00\xfd\xff\xff\xff\x00\x00\xff\xff\xfb\xff\xfa\xff\xf8\xff\xfa\xff\xfa\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\x01\x00\x01\x00\x03\x00\x04\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x04\x00\x06\x00\x06\x00\x08\x00\n\x00\x0b\x00\x0c\x00\x0b\x00\x07\x00\x03\x00\x02\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\x02\x00\x04\x00\x05\x00\x07\x00\x0b\x00\x0c\x00\r\x00\x07\x00\x01\x00\xfd\xff\xfa\xff\xf9\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xfd\xff\xfd\xff\xfc\xff\xfa\xff\xf8\xff\xf5\xff\xf4\xff\xf5\xff\xf4\xff\xf6\xff\xfa\xff\xff\xff\x01\x00\x04\x00\x04\x00\x07\x00\x04\x00\x02\x00\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x06\x00\x05\x00\x04\x00\x03\x00\x05\x00\x07\x00\x07\x00\x06\x00\x05\x00\x04\x00\x04\x00\x04\x00\x05\x00\x05\x00\x03\x00\x01\x00\x00\x00\x02\x00\x05\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xfe\xff\xfd\xff\xfc\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\x02\x00\x05\x00\x06\x00\x08\x00\t\x00\x07\x00\x04\x00\x01\x00\xff\xff\xfd\xff\xfa\xff\xf9\xff\xfb\xff\xff\xff\x01\x00\x02\x00\xff\xff\xfe\xff\x00\x00\x02\x00\x04\x00\x03\x00\xfd\xff\xfa\xff\xf8\xff\xf8\xff\xf8\xff\xf9\xff\xf7\xff\xf9\xff\xfd\xff\x01\x00\x05\x00\x06\x00\x04\x00\x00\x00\xfc\xff\xf9\xff\xfa\xff\xfa\xff\xfa\xff\xf8\xff\xfa\xff\xfc\xff\x00\x00\x02\x00\x04\x00\x06\x00\x07\x00\x07\x00\x06\x00\x05\x00\x04\x00\x04\x00\x06\x00\x05\x00\x03\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfb\xff\xf9\xff\xfa\xff\xff\xff\x05\x00\t\x00\t\x00\t\x00\x0c\x00\x0c\x00\n\x00\x08\x00\x04\x00\xff\xff\xfa\xff\xf7\xff\xf7\xff\xf9\xff\xfc\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x02\x00\xff\xff\xfb\xff\xfa\xff\xf9\xff\xf9\xff\xfb\xff\xfb\xff\xfc\xff\xfc\xff\xfe\xff\x01\x00\x06\x00\x08\x00\x07\x00\x06\x00\x04\x00\x03\x00\x01\x00\x01\x00\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xff\xff\x01\x00\x05\x00\x07\x00\x07\x00\x05\x00\x03\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x01\x00\x03\x00\x03\x00\x00\x00\xfd\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x02\x00\xff\xff\xfb\xff\xf8\xff\xf9\xff\xfd\xff\x02\x00\x07\x00\x08\x00\t\x00\x08\x00\x08\x00\t\x00\x07\x00\x07\x00\x06\x00\x04\x00\x02\x00\xfd\xff\xfa\xff\xfb\xff\x00\x00\x04\x00\x07\x00\x06\x00\x03\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfb\xff\xf8\xff\xf8\xff\xf9\xff\xfc\xff\xfd\xff\xfc\xff\xfb\xff\xfd\xff\x01\x00\x03\x00\x04\x00\x01\x00\xfe\xff\xfe\xff\x01\x00\x01\x00\xfe\xff\xfb\xff\xf9\xff\xf9\xff\xfc\xff\xff\xff\x04\x00\x08\x00\x08\x00\t\x00\n\x00\n\x00\x07\x00\x04\x00\x00\x00\xfd\xff\xfb\xff\xfa\xff\xfa\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xf9\xff\xf6\xff\xf7\xff\xf9\xff\xf7\xff\xf5\xff\xf4\xff\xf6\xff\xfc\xff\x02\x00\x07\x00\x07\x00\x06\x00\x05\x00\x06\x00\x06\x00\x08\x00\t\x00\t\x00\x05\x00\x01\x00\xfe\xff\xfe\xff\x01\x00\x03\x00\x05\x00\x06\x00\x02\x00\x02\x00\x01\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfc\xff\xfc\xff\xfa\xff\xfd\xff\xfe\xff\xff\xff\x02\x00\x05\x00\x07\x00\x07\x00\x05\x00\x01\x00\x01\x00\x02\x00\x06\x00\x03\x00\xfe\xff\xf8\xff\xf7\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xff\xff\x02\x00\x03\x00\x00\x00\xfd\xff\xfd\xff\x00\x00\x02\x00\x00\x00\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xf9\xff\xf8\xff\xf9\xff\xfd\xff\xff\xff\x03\x00\x05\x00\x06\x00\x07\x00\x06\x00\x02\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfb\xff\xf9\xff\xfb\xff\xfe\xff\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\xff\xff\xfc\xff\xfb\xff\xfd\xff\x01\x00\x03\x00\x04\x00\x04\x00\x03\x00\x04\x00\x03\x00\x02\x00\x00\x00\x01\x00\x06\x00\x07\x00\x04\x00\x00\x00\xfc\xff\xfb\xff\xfe\xff\x00\x00\x02\x00\x06\x00\x07\x00\x07\x00\x05\x00\x03\x00\x02\x00\x03\x00\x04\x00\x03\x00\xff\xff\xfc\xff\xfa\xff\xfb\xff\xfe\xff\x01\x00\xff\xff\xfc\xff\xfa\xff\xf9\xff\xfb\xff\xfc\xff\xfc\xff\xfb\xff\xfc\xff\xfe\xff\xfe\xff\xfc\xff\xfa\xff\xf9\xff\xfb\xff\xfe\xff\x03\x00\x04\x00\x04\x00\x06\x00\x08\x00\t\x00\n\x00\t\x00\x07\x00\x04\x00\xff\xff\xfc\xff\xfa\xff\xfd\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xfd\xff\xfb\xff\xf9\xff\xf6\xff\xf7\xff\xf7\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x04\x00\x07\x00\x08\x00\x04\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\x04\x00\x05\x00\x05\x00\x05\x00\x04\x00\x01\x00\xfe\xff\xfd\xff\xfd\xff\x00\x00\x01\x00\x03\x00\x03\x00\x03\x00\x02\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfc\xff\xfa\xff\xf8\xff\xf9\xff\xfa\xff\xfe\xff\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x04\x00\x03\x00\x04\x00\x04\x00\x05\x00\x03\x00\xfe\xff\xfb\xff\xf9\xff\xfc\xff\xff\xff\x03\x00\x06\x00\x04\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\xff\xff\xfb\xff\xf8\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xfb\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xff\xff\x03\x00\x05\x00\x04\x00\x03\x00\x04\x00\x04\x00\x07\x00\x07\x00\x07\x00\x04\x00\x02\x00\xfe\xff\xfd\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfb\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x03\x00\x07\x00\x08\x00\x07\x00\x08\x00\x06\x00\x07\x00\x08\x00\x07\x00\x04\x00\xfe\xff\xfa\xff\xf9\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfa\xff\xfa\xff\xfb\xff\xfc\xff\xfe\xff\x00\x00\x03\x00\x07\x00\x07\x00\x04\x00\x01\x00\x00\x00\x04\x00\x05\x00\x04\x00\x01\x00\xfb\xff\xfa\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfe\xff\x00\x00\x00\x00\xfe\xff\xfd\xff\xfb\xff\xfc\xff\xfe\xff\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\xfe\xff\xfd\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x03\x00\x04\x00\x01\x00\x01\x00\x03\x00\x05\x00\x07\x00\x06\x00\x06\x00\x04\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x05\x00\x05\x00\x02\x00\x01\x00\x01\x00\x02\x00\xff\xff\xfc\xff\xf8\xff\xf8\xff\xf9\xff\xfc\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfc\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x04\x00\x05\x00\x05\x00\x05\x00\x06\x00\x05\x00\x05\x00\x04\x00\x03\x00\x00\x00\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfd\xff\xfa\xff\xfb\xff\xfd\xff\xfd\xff\xfd\xff\xfa\xff\xfa\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x04\x00\x06\x00\x07\x00\x08\x00\x07\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\xfe\xff\xfb\xff\xf9\xff\xfa\xff\xfc\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x06\x00\x05\x00\x02\x00\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x01\x00\x00\x00\xff\xff\x01\x00\x04\x00\x06\x00\x05\x00\x00\x00\xfd\xff\xfd\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x03\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfe\xff\x00\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfb\xff\xf9\xff\xf8\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\x02\x00\x05\x00\x06\x00\x04\x00\x01\x00\xff\xff\x00\x00\x01\x00\x04\x00\x03\x00\x01\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x04\x00\x06\x00\x06\x00\x04\x00\x03\x00\x03\x00\x05\x00\x06\x00\x04\x00\x00\x00\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xfc\xff\xfc\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x03\x00\x03\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\x02\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfd\xff\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\x02\x00\x01\x00\x02\x00\x04\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x03\x00\x03\x00\x01\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x04\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\xff\xff\xff\xff\xff\xff\x02\x00\x03\x00\x05\x00\x03\x00\x02\x00\x01\x00\x02\x00\x02\x00\x03\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x05\x00\x04\x00\x03\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\xfd\xff\xfe\xff\x01\x00\x01\x00\x03\x00\x03\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x03\x00\x04\x00\x04\x00\x02\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x04\x00\x04\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x02\x00\x01\x00\x02\x00\xfe\xff\xfd\xff\xfe\xff\xfd\xff\x00\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x04\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x04\x00\x05\x00\x04\x00\x03\x00\x00\x00\xff\xff\x00\x00\x03\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x03\x00\x04\x00\x05\x00\x03\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x01\x00\xff\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfd\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x03\x00\x04\x00\x03\x00\x01\x00\xff\xff\xfe\xff\x00\x00\x02\x00\x04\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x02\x00\xff\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x01\x00\x03\x00\x03\x00\x03\x00\xff\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x02\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfc\xff\xfd\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x03\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x01\x00\x04\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfe\xff\x00\x00\x03\x00\x03\x00\x03\x00\x03\x00\x01\x00\x02\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfb\xff\xfa\xff\xfb\xff\xfe\xff\x00\x00\x02\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x05\x00\x07\x00\x08\x00\x07\x00\x05\x00\x04\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xfa\xff\xfa\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x02\x00\x01\x00\x00\x00\xfe\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfb\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x02\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\xff\xff\xfc\xff\xfe\xff\xff\xff\x02\x00\x04\x00\x07\x00\x05\x00\x03\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfd\xff\xfb\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x05\x00\x05\x00\x02\x00\xff\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\xff\xff\x01\x00\x02\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x03\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\xfe\xff\xfe\xff\xfb\xff\xfc\xff\xfc\xff\xfc\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x03\x00\x03\x00\x03\x00\x03\x00\x01\x00\xfe\xff\xfd\xff\xfb\xff\xfa\xff\xfc\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x03\x00\x03\x00\x04\x00\x05\x00\x06\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x04\x00\x03\x00\x02\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfc\xff\xfb\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfa\xff\xfe\xff\x01\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xfc\xff\xfa\xff\xfa\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x01\x00\x03\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfd\xff\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x06\x00\x06\x00\x06\x00\x05\x00\x03\x00\x02\x00\x01\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xff\xff\x01\x00\x03\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfc\xff\xff\xff\x02\x00\x02\x00\x03\x00\x02\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xff\xff\x01\x00\x04\x00\x06\x00\x05\x00\x04\x00\x04\x00\x02\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x05\x00\x05\x00\x05\x00\x05\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x03\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x02\x00\x05\x00\x04\x00\x05\x00\x06\x00\x05\x00\x04\x00\x04\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x02\x00\x04\x00\x05\x00\x04\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x03\x00\x03\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x04\x00\x06\x00\x04\x00\x04\x00\x02\x00\x03\x00\x04\x00\x03\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\x02\x00\x04\x00\x06\x00\x06\x00\x04\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xfb\xff\xfd\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\x01\x00\x00\x00\x02\x00\x04\x00\x06\x00\x06\x00\x06\x00\x05\x00\x03\x00\x03\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xff\xff\x03\x00\x04\x00\x04\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x01\x00\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x04\x00\x03\x00\x03\x00\x01\x00\x01\x00\x03\x00\x03\x00\x02\x00\x01\x00\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x04\x00\x04\x00\x04\x00\x03\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x02\x00\x03\x00\x04\x00\x02\x00\x00\x00\xff\xff\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x03\x00\x03\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfd\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x03\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x03\x00\x03\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\xff\xff\x00\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x01\x00\x00\x00\xfd\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x02\x00\x03\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\x00\x00\x01\x00\x03\x00\x03\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xfb\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xfe\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\x02\x00\x02\x00\x01\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x03\x00\x02\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00' -# --- diff --git a/tests/components/voip/test_binary_sensor.py b/tests/components/voip/test_binary_sensor.py index 44ac8e4d77f..58f1e0ea53b 100644 --- a/tests/components/voip/test_binary_sensor.py +++ b/tests/components/voip/test_binary_sensor.py @@ -1,21 +1,10 @@ """Test VoIP binary sensor devices.""" -from http import HTTPStatus - -import pytest - -from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN -from homeassistant.components.voip import DOMAIN from homeassistant.components.voip.devices import VoIPDevice from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, issue_registry as ir -from homeassistant.setup import async_setup_component - -from tests.typing import ClientSessionGenerator -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_call_in_progress( hass: HomeAssistant, config_entry: ConfigEntry, @@ -35,131 +24,3 @@ async def test_call_in_progress( state = hass.states.get("binary_sensor.192_168_1_210_call_in_progress") assert state.state == "off" - - -@pytest.mark.usefixtures("voip_device") -async def test_assist_in_progress_disabled_by_default( - hass: HomeAssistant, - config_entry: ConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test assist in progress binary sensor is added disabled.""" - - assert not hass.states.get("binary_sensor.192_168_1_210_call_in_progress") - entity_entry = entity_registry.async_get( - "binary_sensor.192_168_1_210_call_in_progress" - ) - assert entity_entry - assert entity_entry.disabled - assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_assist_in_progress_issue( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, - voip_device: VoIPDevice, -) -> None: - """Test assist in progress binary sensor.""" - - call_in_progress_entity_id = "binary_sensor.192_168_1_210_call_in_progress" - - state = hass.states.get(call_in_progress_entity_id) - assert state is not None - - entity_entry = entity_registry.async_get(call_in_progress_entity_id) - issue = issue_registry.async_get_issue( - DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" - ) - assert issue is not None - - # Test issue goes away after disabling the entity - entity_registry.async_update_entity( - call_in_progress_entity_id, - disabled_by=er.RegistryEntryDisabler.USER, - ) - await hass.async_block_till_done() - issue = issue_registry.async_get_issue( - DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" - ) - assert issue is None - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_assist_in_progress_repair_flow( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, - voip_device: VoIPDevice, -) -> None: - """Test assist in progress binary sensor deprecation issue flow.""" - - call_in_progress_entity_id = "binary_sensor.192_168_1_210_call_in_progress" - - state = hass.states.get(call_in_progress_entity_id) - assert state is not None - - entity_entry = entity_registry.async_get(call_in_progress_entity_id) - assert entity_entry.disabled_by is None - issue = issue_registry.async_get_issue( - DOMAIN, f"assist_in_progress_deprecated_{entity_entry.id}" - ) - assert issue is not None - assert issue.data == { - "entity_id": call_in_progress_entity_id, - "entity_uuid": entity_entry.id, - "integration_name": "VoIP", - } - assert issue.translation_key == "assist_in_progress_deprecated" - assert issue.translation_placeholders == {"integration_name": "VoIP"} - - assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) - await hass.async_block_till_done() - await hass.async_start() - - client = await hass_client() - - resp = await client.post( - "/api/repairs/issues/fix", - json={"handler": DOMAIN, "issue_id": issue.issue_id}, - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data == { - "data_schema": [], - "description_placeholders": { - "assist_satellite_domain": "assist_satellite", - "entity_id": call_in_progress_entity_id, - "integration_name": "VoIP", - }, - "errors": None, - "flow_id": flow_id, - "handler": DOMAIN, - "last_step": None, - "preview": None, - "step_id": "confirm_disable_entity", - "type": "form", - } - - resp = await client.post(f"/api/repairs/issues/fix/{flow_id}") - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data == { - "description": None, - "description_placeholders": None, - "flow_id": flow_id, - "handler": DOMAIN, - "type": "create_entry", - } - - # Test the entity is disabled - entity_entry = entity_registry.async_get(call_in_progress_entity_id) - assert entity_entry.disabled_by is er.RegistryEntryDisabler.USER diff --git a/tests/components/voip/test_repairs.py b/tests/components/voip/test_repairs.py deleted file mode 100644 index ec2a2cfed96..00000000000 --- a/tests/components/voip/test_repairs.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Test VoIP repairs.""" - -import pytest - -from homeassistant.components.voip import repairs -from homeassistant.core import HomeAssistant - - -async def test_create_fix_flow_raises_on_unknown_issue_id(hass: HomeAssistant) -> None: - """Test reate_fix_flow raises on unknown issue_id.""" - - with pytest.raises(ValueError): - await repairs.async_create_fix_flow(hass, "no_such_issue", None) diff --git a/tests/components/voip/test_select.py b/tests/components/voip/test_select.py index 78bb8d6c6b4..a9741b44081 100644 --- a/tests/components/voip/test_select.py +++ b/tests/components/voip/test_select.py @@ -15,7 +15,7 @@ async def test_pipeline_select( Functionality is tested in assist_pipeline/test_select.py. This test is only to ensure it is set up. """ - state = hass.states.get("select.192_168_1_210_assistant") + state = hass.states.get("select.192_168_1_210_assist_pipeline") assert state is not None assert state.state == "preferred" diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index 17af2748c1c..6c292241237 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -3,27 +3,15 @@ import asyncio import io from pathlib import Path -from typing import Any +import time from unittest.mock import AsyncMock, Mock, patch import wave import pytest -from syrupy.assertion import SnapshotAssertion -from voip_utils import CallInfo -from homeassistant.components import assist_pipeline, assist_satellite, tts, voip -from homeassistant.components.assist_satellite import AssistSatelliteEntity - -# pylint: disable-next=hass-component-root-import -from homeassistant.components.assist_satellite.entity import AssistSatelliteState -from homeassistant.components.voip import HassVoipDatagramProtocol -from homeassistant.components.voip.assist_satellite import Tones, VoipAssistSatellite -from homeassistant.components.voip.devices import VoIPDevice, VoIPDevices -from homeassistant.components.voip.voip import PreRecordMessageProtocol, make_protocol -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.components import assist_pipeline, voip +from homeassistant.components.voip.devices import VoIPDevice from homeassistant.core import Context, HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_component import EntityComponent from homeassistant.setup import async_setup_component _ONE_SECOND = 16000 * 2 # 16Khz 16-bit @@ -31,8 +19,9 @@ _MEDIA_ID = "12345" @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir def _empty_wav() -> bytes: @@ -47,194 +36,30 @@ def _empty_wav() -> bytes: return wav_io.getvalue() -def async_get_satellite_entity( - hass: HomeAssistant, domain: str, unique_id_prefix: str -) -> AssistSatelliteEntity | None: - """Get Assist satellite entity.""" - ent_reg = er.async_get(hass) - satellite_entity_id = ent_reg.async_get_entity_id( - Platform.ASSIST_SATELLITE, domain, f"{unique_id_prefix}-assist_satellite" - ) - if satellite_entity_id is None: - return None - assert not satellite_entity_id.endswith("none") - - component: EntityComponent[AssistSatelliteEntity] = hass.data[ - assist_satellite.DOMAIN - ] - return component.get_entity(satellite_entity_id) - - -async def test_is_valid_call( - hass: HomeAssistant, - voip_devices: VoIPDevices, - voip_device: VoIPDevice, - call_info: CallInfo, -) -> None: - """Test that a call is now allowed from an unknown device.""" - assert await async_setup_component(hass, "voip", {}) - protocol = HassVoipDatagramProtocol(hass, voip_devices) - assert not protocol.is_valid_call(call_info) - - ent_reg = er.async_get(hass) - allowed_call_entity_id = ent_reg.async_get_entity_id( - "switch", voip.DOMAIN, f"{voip_device.voip_id}-allow_call" - ) - assert allowed_call_entity_id is not None - state = hass.states.get(allowed_call_entity_id) - assert state is not None - assert state.state == STATE_OFF - - # Allow calls - hass.states.async_set(allowed_call_entity_id, STATE_ON) - assert protocol.is_valid_call(call_info) - - -async def test_calls_not_allowed( - hass: HomeAssistant, - voip_devices: VoIPDevices, - voip_device: VoIPDevice, - call_info: CallInfo, - snapshot: SnapshotAssertion, -) -> None: - """Test that a pre-recorded message is played when calls aren't allowed.""" - assert await async_setup_component(hass, "voip", {}) - protocol: PreRecordMessageProtocol = make_protocol(hass, voip_devices, call_info) - assert isinstance(protocol, PreRecordMessageProtocol) - assert protocol.file_name == "problem.pcm" - - # Test the playback - done = asyncio.Event() - played_audio_bytes = b"" - - def send_audio(audio_bytes: bytes, **kwargs): - nonlocal played_audio_bytes - - # Should be problem.pcm from components/voip - played_audio_bytes = audio_bytes - done.set() - - protocol.transport = Mock() - protocol.loop_delay = 0 - with patch.object(protocol, "send_audio", send_audio): - protocol.on_chunk(bytes(_ONE_SECOND)) - - async with asyncio.timeout(1): - await done.wait() - - assert sum(played_audio_bytes) > 0 - assert played_audio_bytes == snapshot() - - -async def test_pipeline_not_found( - hass: HomeAssistant, - voip_devices: VoIPDevices, - voip_device: VoIPDevice, - call_info: CallInfo, - snapshot: SnapshotAssertion, -) -> None: - """Test that a pre-recorded message is played when a pipeline isn't found.""" - assert await async_setup_component(hass, "voip", {}) - - with patch( - "homeassistant.components.voip.voip.async_get_pipeline", return_value=None - ): - protocol: PreRecordMessageProtocol = make_protocol( - hass, voip_devices, call_info - ) - - assert isinstance(protocol, PreRecordMessageProtocol) - assert protocol.file_name == "problem.pcm" - - -async def test_satellite_prepared( - hass: HomeAssistant, - voip_devices: VoIPDevices, - voip_device: VoIPDevice, - call_info: CallInfo, - snapshot: SnapshotAssertion, -) -> None: - """Test that satellite is prepared for a call.""" - assert await async_setup_component(hass, "voip", {}) - - pipeline = assist_pipeline.Pipeline( - conversation_engine="test", - conversation_language="en", - language="en", - name="test", - stt_engine="test", - stt_language="en", - tts_engine="test", - tts_language="en", - tts_voice=None, - wake_word_entity=None, - wake_word_id=None, - ) - - satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) - assert isinstance(satellite, VoipAssistSatellite) - - with ( - patch( - "homeassistant.components.voip.voip.async_get_pipeline", - return_value=pipeline, - ), - ): - protocol = make_protocol(hass, voip_devices, call_info) - assert protocol == satellite - - async def test_pipeline( hass: HomeAssistant, - voip_devices: VoIPDevices, voip_device: VoIPDevice, - call_info: CallInfo, ) -> None: """Test that pipeline function is called from RTP protocol.""" assert await async_setup_component(hass, "voip", {}) - satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) - assert isinstance(satellite, VoipAssistSatellite) - voip_user_id = satellite.config_entry.data["user"] - assert voip_user_id - - # Satellite is muted until a call begins - assert satellite.state == AssistSatelliteState.IDLE + def is_speech(self, chunk): + """Anything non-zero is speech.""" + return sum(chunk) > 0 done = asyncio.Event() # Used to test that audio queue is cleared before pipeline starts bad_chunk = bytes([1, 2, 3, 4]) - async def async_pipeline_from_audio_stream( - hass: HomeAssistant, - context: Context, - *args, - device_id: str | None, - tts_audio_output: str | dict[str, Any] | None, - **kwargs, - ): - assert context.user_id == voip_user_id + async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): assert device_id == voip_device.device_id - # voip can only stream WAV - assert tts_audio_output == { - tts.ATTR_PREFERRED_FORMAT: "wav", - tts.ATTR_PREFERRED_SAMPLE_RATE: 16000, - tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, - tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, - } - stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - in_command = False - async for chunk in stt_stream: + async for _chunk in stt_stream: # Stream will end when VAD detects end of "speech" - assert chunk != bad_chunk - if sum(chunk) > 0: - in_command = True - elif in_command: - break # done with command + assert _chunk != bad_chunk # Test empty data event_callback( @@ -244,38 +69,6 @@ async def test_pipeline( ) ) - event_callback( - assist_pipeline.PipelineEvent( - type=assist_pipeline.PipelineEventType.STT_START, - data={"engine": "test", "metadata": {}}, - ) - ) - - assert satellite.state == AssistSatelliteState.LISTENING - - # Fake STT result - event_callback( - assist_pipeline.PipelineEvent( - type=assist_pipeline.PipelineEventType.STT_END, - data={"stt_output": {"text": "fake-text"}}, - ) - ) - - event_callback( - assist_pipeline.PipelineEvent( - type=assist_pipeline.PipelineEventType.INTENT_START, - data={ - "engine": "test", - "language": hass.config.language, - "intent_input": "fake-text", - "conversation_id": None, - "device_id": None, - }, - ) - ) - - assert satellite.state == AssistSatelliteState.PROCESSING - # Fake intent result event_callback( assist_pipeline.PipelineEvent( @@ -288,21 +81,6 @@ async def test_pipeline( ) ) - # Fake tts result - event_callback( - assist_pipeline.PipelineEvent( - type=assist_pipeline.PipelineEventType.TTS_START, - data={ - "engine": "test", - "language": hass.config.language, - "voice": "test", - "tts_input": "fake-text", - }, - ) - ) - - assert satellite.state == AssistSatelliteState.RESPONDING - # Proceed with media output event_callback( assist_pipeline.PipelineEvent( @@ -311,18 +89,6 @@ async def test_pipeline( ) ) - event_callback( - assist_pipeline.PipelineEvent( - type=assist_pipeline.PipelineEventType.RUN_END - ) - ) - - original_tts_response_finished = satellite.tts_response_finished - - def tts_response_finished(): - original_tts_response_finished() - done.set() - async def async_get_media_source_audio( hass: HomeAssistant, media_source_id: str, @@ -332,55 +98,101 @@ async def test_pipeline( with ( patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", + new=is_speech, + ), + patch( + "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", + "homeassistant.components.voip.voip.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), - patch.object(satellite, "tts_response_finished", tts_response_finished), ): - satellite._tones = Tones(0) - satellite.transport = Mock() - - satellite.connection_made(satellite.transport) - assert satellite.state == AssistSatelliteState.IDLE + rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(), + opus_payload_type=123, + listening_tone_enabled=False, + processing_tone_enabled=False, + error_tone_enabled=False, + silence_seconds=assist_pipeline.vad.VadSensitivity.to_seconds("aggressive"), + ) + rtp_protocol.transport = Mock() # Ensure audio queue is cleared before pipeline starts - satellite._audio_queue.put_nowait(bad_chunk) + rtp_protocol._audio_queue.put_nowait(bad_chunk) def send_audio(*args, **kwargs): - # Don't send audio - pass + # Test finished successfully + done.set() - satellite.send_audio = Mock(side_effect=send_audio) + rtp_protocol.send_audio = Mock(side_effect=send_audio) # silence - satellite.on_chunk(bytes(_ONE_SECOND)) + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) # "speech" - satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) + rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) - # silence - satellite.on_chunk(bytes(_ONE_SECOND)) + # silence (assumes aggressive VAD sensitivity) + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): await done.wait() - # Finished speaking - assert satellite.state == AssistSatelliteState.IDLE - -async def test_stt_stream_timeout( - hass: HomeAssistant, voip_devices: VoIPDevices, voip_device: VoIPDevice -) -> None: - """Test timeout in STT stream during pipeline run.""" +async def test_pipeline_timeout(hass: HomeAssistant, voip_device: VoIPDevice) -> None: + """Test timeout during pipeline run.""" assert await async_setup_component(hass, "voip", {}) - satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) - assert isinstance(satellite, VoipAssistSatellite) + done = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, **kwargs): + await asyncio.sleep(10) + + with ( + patch( + "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch( + "homeassistant.components.voip.voip.PipelineRtpDatagramProtocol._wait_for_speech", + return_value=True, + ), + ): + rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(), + opus_payload_type=123, + pipeline_timeout=0.001, + listening_tone_enabled=False, + processing_tone_enabled=False, + error_tone_enabled=False, + ) + transport = Mock(spec=["close"]) + rtp_protocol.connection_made(transport) + + # Closing the transport will cause the test to succeed + transport.close.side_effect = done.set + + # silence + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + + # Wait for mock pipeline to time out + async with asyncio.timeout(1): + await done.wait() + + +async def test_stt_stream_timeout(hass: HomeAssistant, voip_device: VoIPDevice) -> None: + """Test timeout in STT stream during pipeline run.""" + assert await async_setup_component(hass, "voip", {}) done = asyncio.Event() @@ -391,19 +203,28 @@ async def test_stt_stream_timeout( pass with patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ): - satellite._tones = Tones(0) - satellite._audio_chunk_timeout = 0.001 + rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(), + opus_payload_type=123, + audio_timeout=0.001, + listening_tone_enabled=False, + processing_tone_enabled=False, + error_tone_enabled=False, + ) transport = Mock(spec=["close"]) - satellite.connection_made(transport) + rtp_protocol.connection_made(transport) # Closing the transport will cause the test to succeed transport.close.side_effect = done.set # silence - satellite.on_chunk(bytes(_ONE_SECOND)) + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) # Wait for mock pipeline to time out async with asyncio.timeout(1): @@ -412,34 +233,23 @@ async def test_stt_stream_timeout( async def test_tts_timeout( hass: HomeAssistant, - voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will time out based on its length.""" assert await async_setup_component(hass, "voip", {}) - satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) - assert isinstance(satellite, VoipAssistSatellite) + def is_speech(self, chunk): + """Anything non-zero is speech.""" + return sum(chunk) > 0 done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - in_command = False - async for chunk in stt_stream: - if sum(chunk) > 0: - in_command = True - elif in_command: - break # done with command - - # Fake STT result - event_callback( - assist_pipeline.PipelineEvent( - type=assist_pipeline.PipelineEventType.STT_END, - data={"stt_output": {"text": "fake-text"}}, - ) - ) + async for _chunk in stt_stream: + # Stream will end when VAD detects end of "speech" + pass # Fake intent result event_callback( @@ -463,7 +273,15 @@ async def test_tts_timeout( tone_bytes = bytes([1, 2, 3, 4]) - async def async_send_audio(audio_bytes: bytes, **kwargs): + def send_audio(audio_bytes, **kwargs): + if audio_bytes == tone_bytes: + # Not TTS + return + + # Block here to force a timeout in _send_tts + time.sleep(2) + + async def async_send_audio(audio_bytes, **kwargs): if audio_bytes == tone_bytes: # Not TTS return @@ -480,22 +298,37 @@ async def test_tts_timeout( with ( patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", + new=is_speech, + ), + patch( + "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", + "homeassistant.components.voip.voip.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), ): - satellite._tts_extra_timeout = 0.001 - for tone in Tones: - satellite._tone_bytes[tone] = tone_bytes + rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(), + opus_payload_type=123, + tts_extra_timeout=0.001, + listening_tone_enabled=True, + processing_tone_enabled=True, + error_tone_enabled=True, + silence_seconds=assist_pipeline.vad.VadSensitivity.to_seconds("relaxed"), + ) + rtp_protocol._tone_bytes = tone_bytes + rtp_protocol._processing_bytes = tone_bytes + rtp_protocol._error_bytes = tone_bytes + rtp_protocol.transport = Mock() + rtp_protocol.send_audio = Mock() - satellite.transport = Mock() - satellite.send_audio = Mock() - - original_send_tts = satellite._send_tts + original_send_tts = rtp_protocol._send_tts async def send_tts(*args, **kwargs): # Call original then end test successfully @@ -504,17 +337,17 @@ async def test_tts_timeout( done.set() - satellite._async_send_audio = AsyncMock(side_effect=async_send_audio) # type: ignore[method-assign] - satellite._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + rtp_protocol._async_send_audio = AsyncMock(side_effect=async_send_audio) # type: ignore[method-assign] + rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] # silence - satellite.on_chunk(bytes(_ONE_SECOND)) + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) # "speech" - satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) + rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) - # silence - satellite.on_chunk(bytes(_ONE_SECOND)) + # silence (assumes relaxed VAD sensitivity) + rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): @@ -523,34 +356,23 @@ async def test_tts_timeout( async def test_tts_wrong_extension( hass: HomeAssistant, - voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will only stream WAV audio.""" assert await async_setup_component(hass, "voip", {}) - satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) - assert isinstance(satellite, VoipAssistSatellite) + def is_speech(self, chunk): + """Anything non-zero is speech.""" + return sum(chunk) > 0 done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - in_command = False - async for chunk in stt_stream: - if sum(chunk) > 0: - in_command = True - elif in_command: - break # done with command - - # Fake STT result - event_callback( - assist_pipeline.PipelineEvent( - type=assist_pipeline.PipelineEventType.STT_END, - data={"stt_output": {"text": "fake-text"}}, - ) - ) + async for _chunk in stt_stream: + # Stream will end when VAD detects end of "speech" + pass # Fake intent result event_callback( @@ -581,17 +403,28 @@ async def test_tts_wrong_extension( with ( patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", + new=is_speech, + ), + patch( + "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", + "homeassistant.components.voip.voip.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), ): - satellite.transport = Mock() + rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(), + opus_payload_type=123, + ) + rtp_protocol.transport = Mock() - original_send_tts = satellite._send_tts + original_send_tts = rtp_protocol._send_tts async def send_tts(*args, **kwargs): # Call original then end test successfully @@ -600,16 +433,16 @@ async def test_tts_wrong_extension( done.set() - satellite._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] # silence - satellite.on_chunk(bytes(_ONE_SECOND)) + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) # "speech" - satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) + rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) # silence (assumes relaxed VAD sensitivity) - satellite.on_chunk(bytes(_ONE_SECOND * 4)) + rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): @@ -618,34 +451,23 @@ async def test_tts_wrong_extension( async def test_tts_wrong_wav_format( hass: HomeAssistant, - voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will only stream WAV audio with a specific format.""" assert await async_setup_component(hass, "voip", {}) - satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) - assert isinstance(satellite, VoipAssistSatellite) + def is_speech(self, chunk): + """Anything non-zero is speech.""" + return sum(chunk) > 0 done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - in_command = False - async for chunk in stt_stream: - if sum(chunk) > 0: - in_command = True - elif in_command: - break # done with command - - # Fake STT result - event_callback( - assist_pipeline.PipelineEvent( - type=assist_pipeline.PipelineEventType.STT_END, - data={"stt_output": {"text": "fake-text"}}, - ) - ) + async for _chunk in stt_stream: + # Stream will end when VAD detects end of "speech" + pass # Fake intent result event_callback( @@ -683,17 +505,28 @@ async def test_tts_wrong_wav_format( with ( patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", + new=is_speech, + ), + patch( + "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", + "homeassistant.components.voip.voip.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), ): - satellite.transport = Mock() + rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(), + opus_payload_type=123, + ) + rtp_protocol.transport = Mock() - original_send_tts = satellite._send_tts + original_send_tts = rtp_protocol._send_tts async def send_tts(*args, **kwargs): # Call original then end test successfully @@ -702,16 +535,16 @@ async def test_tts_wrong_wav_format( done.set() - satellite._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] # silence - satellite.on_chunk(bytes(_ONE_SECOND)) + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) # "speech" - satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) + rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) # silence (assumes relaxed VAD sensitivity) - satellite.on_chunk(bytes(_ONE_SECOND * 4)) + rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): @@ -720,32 +553,21 @@ async def test_tts_wrong_wav_format( async def test_empty_tts_output( hass: HomeAssistant, - voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will not stream when output is empty.""" assert await async_setup_component(hass, "voip", {}) - satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) - assert isinstance(satellite, VoipAssistSatellite) + def is_speech(self, chunk): + """Anything non-zero is speech.""" + return sum(chunk) > 0 async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - in_command = False - async for chunk in stt_stream: - if sum(chunk) > 0: - in_command = True - elif in_command: - break # done with command - - # Fake STT result - event_callback( - assist_pipeline.PipelineEvent( - type=assist_pipeline.PipelineEventType.STT_END, - data={"stt_output": {"text": "fake-text"}}, - ) - ) + async for _chunk in stt_stream: + # Stream will end when VAD detects end of "speech" + pass # Fake intent result event_callback( @@ -769,78 +591,37 @@ async def test_empty_tts_output( with ( patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", + new=is_speech, + ), + patch( + "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.assist_satellite.VoipAssistSatellite._send_tts", + "homeassistant.components.voip.voip.PipelineRtpDatagramProtocol._send_tts", ) as mock_send_tts, ): - satellite.transport = Mock() + rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(), + opus_payload_type=123, + ) + rtp_protocol.transport = Mock() # silence - satellite.on_chunk(bytes(_ONE_SECOND)) + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) # "speech" - satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) + rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) # silence (assumes relaxed VAD sensitivity) - satellite.on_chunk(bytes(_ONE_SECOND * 4)) + rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) # Wait for mock pipeline to finish async with asyncio.timeout(1): - await satellite._tts_done.wait() + await rtp_protocol._tts_done.wait() mock_send_tts.assert_not_called() - - -async def test_pipeline_error( - hass: HomeAssistant, - voip_devices: VoIPDevices, - voip_device: VoIPDevice, - snapshot: SnapshotAssertion, -) -> None: - """Test that a pipeline error causes the error tone to be played.""" - assert await async_setup_component(hass, "voip", {}) - - satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) - assert isinstance(satellite, VoipAssistSatellite) - - done = asyncio.Event() - played_audio_bytes = b"" - - async def async_pipeline_from_audio_stream(*args, **kwargs): - # Fake error - event_callback = kwargs["event_callback"] - event_callback( - assist_pipeline.PipelineEvent( - type=assist_pipeline.PipelineEventType.ERROR, - data={"code": "error-code", "message": "error message"}, - ) - ) - - async def async_send_audio(audio_bytes: bytes, **kwargs): - nonlocal played_audio_bytes - - # Should be error.pcm from components/voip - played_audio_bytes = audio_bytes - done.set() - - with ( - patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - ): - satellite._tones = Tones.ERROR - satellite.transport = Mock() - satellite._async_send_audio = AsyncMock(side_effect=async_send_audio) # type: ignore[method-assign] - - satellite.on_chunk(bytes(_ONE_SECOND)) - - # Wait for error tone to be played - async with asyncio.timeout(1): - await done.wait() - - assert sum(played_audio_bytes) > 0 - assert played_audio_bytes == snapshot() diff --git a/tests/components/volvooncall/test_config_flow.py b/tests/components/volvooncall/test_config_flow.py index 5268432c17e..8bf8bcc7412 100644 --- a/tests/components/volvooncall/test_config_flow.py +++ b/tests/components/volvooncall/test_config_flow.py @@ -153,7 +153,13 @@ async def test_reauth(hass: HomeAssistant) -> None: ) first_entry.add_to_hass(hass) - result = await first_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": first_entry.entry_id, + }, + ) # the first form is just the confirmation prompt assert result["type"] is FlowResultType.FORM diff --git a/tests/components/vulcan/test_config_flow.py b/tests/components/vulcan/test_config_flow.py index a51d9727126..3311f3c71b2 100644 --- a/tests/components/vulcan/test_config_flow.py +++ b/tests/components/vulcan/test_config_flow.py @@ -137,13 +137,14 @@ async def test_config_flow_reauth_success( mock_student.return_value = [ Student.load(load_fixture("fake_student_1.json", "vulcan")) ] - entry = MockConfigEntry( + MockConfigEntry( domain=const.DOMAIN, unique_id="0", data={"student_id": "0"}, + ).add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} ) - entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -175,13 +176,14 @@ async def test_config_flow_reauth_without_matching_entries( mock_student.return_value = [ Student.load(load_fixture("fake_student_1.json", "vulcan")) ] - entry = MockConfigEntry( + MockConfigEntry( domain=const.DOMAIN, unique_id="0", data={"student_id": "1"}, + ).add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} ) - entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -204,13 +206,9 @@ async def test_config_flow_reauth_with_errors( """Test reauth config flow with errors.""" mock_keystore.return_value = fake_keystore mock_account.return_value = fake_account - entry = MockConfigEntry( - domain=const.DOMAIN, - unique_id="0", - data={"student_id": "0"}, + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} ) - entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -310,7 +308,7 @@ async def test_multiple_config_entries( unique_id="123456", data=json.loads(load_fixture("fake_config_entry_data.json", "vulcan")), ).add_to_hass(hass) - await register.register("token", "region", "000000") + await register.register(hass, "token", "region", "000000") result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -703,7 +701,7 @@ async def test_student_already_exists( | {"student_id": "0"}, ).add_to_hass(hass) - await register.register("token", "region", "000000") + await register.register(hass, "token", "region", "000000") result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} diff --git a/tests/components/wake_on_lan/conftest.py b/tests/components/wake_on_lan/conftest.py index 8a1cb3f41eb..cec3076d83e 100644 --- a/tests/components/wake_on_lan/conftest.py +++ b/tests/components/wake_on_lan/conftest.py @@ -2,24 +2,14 @@ from __future__ import annotations -from collections.abc import Generator -from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest - -from homeassistant.components.wake_on_lan.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - -DEFAULT_MAC = "00:01:02:03:04:05" +from typing_extensions import Generator @pytest.fixture -def mock_send_magic_packet() -> Generator[AsyncMock]: +def mock_send_magic_packet() -> AsyncMock: """Mock magic packet.""" with patch("wakeonlan.send_magic_packet") as mock_send: yield mock_send @@ -37,48 +27,3 @@ def mock_subprocess_call(subprocess_call_return_value: int) -> Generator[MagicMo with patch("homeassistant.components.wake_on_lan.switch.sp.call") as mock_sp: mock_sp.return_value = subprocess_call_return_value yield mock_sp - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Automatically path uuid generator.""" - with patch( - "homeassistant.components.wake_on_lan.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture(name="get_config") -async def get_config_to_integration_load() -> dict[str, Any]: - """Return configuration. - - To override the config, tests can be marked with: - @pytest.mark.parametrize("get_config", [{...}]) - """ - return { - CONF_MAC: DEFAULT_MAC, - CONF_BROADCAST_ADDRESS: "255.255.255.255", - CONF_BROADCAST_PORT: 9, - } - - -@pytest.fixture(name="loaded_entry") -async def load_integration( - hass: HomeAssistant, get_config: dict[str, Any] -) -> MockConfigEntry: - """Set up the Statistics integration in Home Assistant.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - title=f"Wake on LAN {DEFAULT_MAC}", - source=SOURCE_USER, - options=get_config, - entry_id="1", - ) - - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/wake_on_lan/test_button.py b/tests/components/wake_on_lan/test_button.py deleted file mode 100644 index abcae686a1b..00000000000 --- a/tests/components/wake_on_lan/test_button.py +++ /dev/null @@ -1,54 +0,0 @@ -"""The tests for the wake on lan button platform.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -from freezegun.api import FrozenDateTimeFactory - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.util import dt as dt_util - -from tests.common import MockConfigEntry - - -async def test_state( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - loaded_entry: MockConfigEntry, -) -> None: - """Test button default state.""" - - state = hass.states.get("button.wake_on_lan_00_01_02_03_04_05") - assert state is not None - assert state.state == STATE_UNKNOWN - - entry = entity_registry.async_get("button.wake_on_lan_00_01_02_03_04_05") - assert entry - assert entry.unique_id == "00:01:02:03:04:05" - - -async def test_service_calls( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - loaded_entry: MockConfigEntry, - mock_send_magic_packet: AsyncMock, -) -> None: - """Test service call.""" - - now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") - freezer.move_to(now) - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: "button.wake_on_lan_00_01_02_03_04_05"}, - blocking=True, - ) - - assert ( - hass.states.get("button.wake_on_lan_00_01_02_03_04_05").state == now.isoformat() - ) diff --git a/tests/components/wake_on_lan/test_config_flow.py b/tests/components/wake_on_lan/test_config_flow.py deleted file mode 100644 index b565fba505e..00000000000 --- a/tests/components/wake_on_lan/test_config_flow.py +++ /dev/null @@ -1,109 +0,0 @@ -"""Test the Scrape config flow.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -from homeassistant import config_entries -from homeassistant.components.wake_on_lan.const import DOMAIN -from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .conftest import DEFAULT_MAC - -from tests.common import MockConfigEntry - - -async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: - """Test we get the form.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_MAC: DEFAULT_MAC, - CONF_BROADCAST_ADDRESS: "255.255.255.255", - CONF_BROADCAST_PORT: 9, - }, - ) - await hass.async_block_till_done(wait_background_tasks=True) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["version"] == 1 - assert result["options"] == { - CONF_MAC: DEFAULT_MAC, - CONF_BROADCAST_ADDRESS: "255.255.255.255", - CONF_BROADCAST_PORT: 9, - } - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: - """Test options flow.""" - - result = await hass.config_entries.options.async_init(loaded_entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - CONF_BROADCAST_ADDRESS: "192.168.255.255", - CONF_BROADCAST_PORT: 10, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_MAC: DEFAULT_MAC, - CONF_BROADCAST_ADDRESS: "192.168.255.255", - CONF_BROADCAST_PORT: 10, - } - - await hass.async_block_till_done() - - assert loaded_entry.options == { - CONF_MAC: DEFAULT_MAC, - CONF_BROADCAST_ADDRESS: "192.168.255.255", - CONF_BROADCAST_PORT: 10, - } - - # Check the entity was updated, no new entity was created - assert len(hass.states.async_all()) == 1 - - state = hass.states.get("button.wake_on_lan_00_01_02_03_04_05") - assert state is not None - - -async def test_entry_already_exist( - hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: - """Test abort when entry already exist.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_MAC: DEFAULT_MAC, - CONF_BROADCAST_ADDRESS: "255.255.255.255", - CONF_BROADCAST_PORT: 9, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/wake_on_lan/test_init.py b/tests/components/wake_on_lan/test_init.py index 1784f8ef12d..8cfb0e6491e 100644 --- a/tests/components/wake_on_lan/test_init.py +++ b/tests/components/wake_on_lan/test_init.py @@ -8,21 +8,9 @@ import pytest import voluptuous as vol from homeassistant.components.wake_on_lan import DOMAIN, SERVICE_SEND_MAGIC_PACKET -from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry - - -async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: - """Test unload an entry.""" - - assert loaded_entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(loaded_entry.entry_id) - await hass.async_block_till_done() - assert loaded_entry.state is ConfigEntryState.NOT_LOADED - async def test_send_magic_packet(hass: HomeAssistant) -> None: """Test of send magic packet service call.""" diff --git a/tests/components/wake_on_lan/test_switch.py b/tests/components/wake_on_lan/test_switch.py index 9a478b46175..77e1ba55519 100644 --- a/tests/components/wake_on_lan/test_switch.py +++ b/tests/components/wake_on_lan/test_switch.py @@ -13,7 +13,6 @@ from homeassistant.const import ( STATE_ON, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component from tests.common import async_mock_service @@ -65,7 +64,7 @@ async def test_broadcast_config_ip_and_port( hass: HomeAssistant, mock_send_magic_packet: AsyncMock ) -> None: """Test with broadcast address and broadcast port config.""" - mac = "00:01:02:03:04:05" + mac = "00-01-02-03-04-05" broadcast_address = "255.255.255.255" port = 999 @@ -93,7 +92,6 @@ async def test_broadcast_config_ip_and_port( blocking=True, ) - mac = dr.format_mac(mac) mock_send_magic_packet.assert_called_with( mac, ip_address=broadcast_address, port=port ) @@ -104,7 +102,7 @@ async def test_broadcast_config_ip( ) -> None: """Test with only broadcast address.""" - mac = "00:01:02:03:04:05" + mac = "00-01-02-03-04-05" broadcast_address = "255.255.255.255" assert await async_setup_component( @@ -130,7 +128,6 @@ async def test_broadcast_config_ip( blocking=True, ) - mac = dr.format_mac(mac) mock_send_magic_packet.assert_called_with(mac, ip_address=broadcast_address) @@ -139,7 +136,7 @@ async def test_broadcast_config_port( ) -> None: """Test with only broadcast port config.""" - mac = "00:01:02:03:04:05" + mac = "00-01-02-03-04-05" port = 999 assert await async_setup_component( @@ -159,7 +156,6 @@ async def test_broadcast_config_port( blocking=True, ) - mac = dr.format_mac(mac) mock_send_magic_packet.assert_called_with(mac, port=port) diff --git a/tests/components/wake_word/test_init.py b/tests/components/wake_word/test_init.py index cdaf7e0e3f0..c19d3e7032f 100644 --- a/tests/components/wake_word/test_init.py +++ b/tests/components/wake_word/test_init.py @@ -1,13 +1,14 @@ """Test wake_word component setup.""" import asyncio -from collections.abc import AsyncIterable, Generator +from collections.abc import AsyncIterable from functools import partial from pathlib import Path from unittest.mock import patch from freezegun import freeze_time import pytest +from typing_extensions import Generator from homeassistant.components import wake_word from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow diff --git a/tests/components/wallbox/__init__.py b/tests/components/wallbox/__init__.py index 9ec10dc72aa..f21e895b3a7 100644 --- a/tests/components/wallbox/__init__.py +++ b/tests/components/wallbox/__init__.py @@ -1,6 +1,7 @@ """Tests for the Wallbox integration.""" from http import HTTPStatus +import json import requests_mock @@ -13,15 +14,11 @@ from homeassistant.components.wallbox.const import ( CHARGER_CURRENT_VERSION_KEY, CHARGER_DATA_KEY, CHARGER_ENERGY_PRICE_KEY, - CHARGER_FEATURES_KEY, CHARGER_LOCKED_UNLOCKED_KEY, CHARGER_MAX_AVAILABLE_POWER_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, - CHARGER_MAX_ICP_CURRENT_KEY, CHARGER_NAME_KEY, CHARGER_PART_NUMBER_KEY, - CHARGER_PLAN_KEY, - CHARGER_POWER_BOOST_KEY, CHARGER_SERIAL_NUMBER_KEY, CHARGER_SOFTWARE_KEY, CHARGER_STATUS_ID_KEY, @@ -48,8 +45,6 @@ test_response = { CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E", CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"}, CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"}, - CHARGER_MAX_ICP_CURRENT_KEY: 20, - CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]}, }, } @@ -69,8 +64,6 @@ test_response_bidir = { CHARGER_PART_NUMBER_KEY: "QSP1-0-2-4-9-002-E", CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"}, CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"}, - CHARGER_MAX_ICP_CURRENT_KEY: 20, - CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]}, }, } @@ -120,7 +113,7 @@ async def setup_integration(hass: HomeAssistant, entry: MockConfigEntry) -> None ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, + json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), status_code=HTTPStatus.OK, ) @@ -143,7 +136,7 @@ async def setup_integration_bidir(hass: HomeAssistant, entry: MockConfigEntry) - ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, + json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), status_code=HTTPStatus.OK, ) @@ -168,7 +161,7 @@ async def setup_integration_connection_error( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, + json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), status_code=HTTPStatus.FORBIDDEN, ) diff --git a/tests/components/wallbox/const.py b/tests/components/wallbox/const.py index a86ae9fc3b9..452b3af0af8 100644 --- a/tests/components/wallbox/const.py +++ b/tests/components/wallbox/const.py @@ -9,7 +9,6 @@ STATUS = "status" MOCK_NUMBER_ENTITY_ID = "number.wallbox_wallboxname_maximum_charging_current" MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID = "number.wallbox_wallboxname_energy_price" -MOCK_NUMBER_ENTITY_ICP_CURRENT_ID = "number.wallbox_wallboxname_maximum_icp_current" MOCK_LOCK_ENTITY_ID = "lock.wallbox_wallboxname_lock" MOCK_SENSOR_CHARGING_SPEED_ID = "sensor.wallbox_wallboxname_charging_speed" MOCK_SENSOR_CHARGING_POWER_ID = "sensor.wallbox_wallboxname_charging_power" diff --git a/tests/components/wallbox/test_config_flow.py b/tests/components/wallbox/test_config_flow.py index 467e20c51c1..c0ff0b19c94 100644 --- a/tests/components/wallbox/test_config_flow.py +++ b/tests/components/wallbox/test_config_flow.py @@ -160,7 +160,13 @@ async def test_form_reauth(hass: HomeAssistant, entry: MockConfigEntry) -> None: status_code=200, ) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -186,15 +192,7 @@ async def test_form_reauth_invalid(hass: HomeAssistant, entry: MockConfigEntry) with requests_mock.Mocker() as mock_request: mock_request.get( "https://user-api.wall-box.com/users/signin", - json={ - "jwt": "fakekeyhere", - "refresh_token": "refresh_fakekeyhere", - "user_id": 12345, - "ttl": 145656758, - "refresh_token_ttl": 145756758, - "error": False, - "status": 200, - }, + text='{"jwt":"fakekeyhere","refresh_token": "refresh_fakekeyhere","user_id":12345,"ttl":145656758,"refresh_token_ttl":145756758,"error":false,"status":200}', status_code=200, ) mock_request.get( @@ -203,7 +201,13 @@ async def test_form_reauth_invalid(hass: HomeAssistant, entry: MockConfigEntry) status_code=200, ) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/wallbox/test_init.py b/tests/components/wallbox/test_init.py index b4b5a199243..f1362489c50 100644 --- a/tests/components/wallbox/test_init.py +++ b/tests/components/wallbox/test_init.py @@ -1,5 +1,7 @@ """Test Wallbox Init Component.""" +import json + import requests_mock from homeassistant.components.wallbox.const import ( @@ -88,7 +90,7 @@ async def test_wallbox_refresh_failed_invalid_auth( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, + json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), status_code=403, ) diff --git a/tests/components/wallbox/test_lock.py b/tests/components/wallbox/test_lock.py index 1d48e53b515..637f0c827f4 100644 --- a/tests/components/wallbox/test_lock.py +++ b/tests/components/wallbox/test_lock.py @@ -1,5 +1,7 @@ """Test Wallbox Lock component.""" +import json + import pytest import requests_mock @@ -36,7 +38,7 @@ async def test_wallbox_lock_class(hass: HomeAssistant, entry: MockConfigEntry) - ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json={CHARGER_LOCKED_UNLOCKED_KEY: False}, + json=json.loads(json.dumps({CHARGER_LOCKED_UNLOCKED_KEY: False})), status_code=200, ) @@ -58,6 +60,8 @@ async def test_wallbox_lock_class(hass: HomeAssistant, entry: MockConfigEntry) - blocking=True, ) + await hass.config_entries.async_unload(entry.entry_id) + async def test_wallbox_lock_class_connection_error( hass: HomeAssistant, entry: MockConfigEntry @@ -74,7 +78,7 @@ async def test_wallbox_lock_class_connection_error( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json={CHARGER_LOCKED_UNLOCKED_KEY: False}, + json=json.loads(json.dumps({CHARGER_LOCKED_UNLOCKED_KEY: False})), status_code=404, ) @@ -97,6 +101,8 @@ async def test_wallbox_lock_class_connection_error( blocking=True, ) + await hass.config_entries.async_unload(entry.entry_id) + async def test_wallbox_lock_class_authentication_error( hass: HomeAssistant, entry: MockConfigEntry @@ -109,6 +115,8 @@ async def test_wallbox_lock_class_authentication_error( assert state is None + await hass.config_entries.async_unload(entry.entry_id) + async def test_wallbox_lock_class_platform_not_ready( hass: HomeAssistant, entry: MockConfigEntry @@ -120,3 +128,5 @@ async def test_wallbox_lock_class_platform_not_ready( state = hass.states.get(MOCK_LOCK_ENTITY_ID) assert state is None + + await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/wallbox/test_number.py b/tests/components/wallbox/test_number.py index c319668c161..5d782224ce5 100644 --- a/tests/components/wallbox/test_number.py +++ b/tests/components/wallbox/test_number.py @@ -1,15 +1,14 @@ """Test Wallbox Switch component.""" +import json + import pytest import requests_mock from homeassistant.components.input_number import ATTR_VALUE, SERVICE_SET_VALUE -from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN -from homeassistant.components.wallbox import InvalidAuth from homeassistant.components.wallbox.const import ( CHARGER_ENERGY_PRICE_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, - CHARGER_MAX_ICP_CURRENT_KEY, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -21,11 +20,7 @@ from . import ( setup_integration_bidir, setup_integration_platform_not_ready, ) -from .const import ( - MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID, - MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, - MOCK_NUMBER_ENTITY_ID, -) +from .const import MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID, MOCK_NUMBER_ENTITY_ID from tests.common import MockConfigEntry @@ -45,7 +40,7 @@ async def test_wallbox_number_class( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, + json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), status_code=200, ) state = hass.states.get(MOCK_NUMBER_ENTITY_ID) @@ -61,6 +56,7 @@ async def test_wallbox_number_class( }, blocking=True, ) + await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_bidir( @@ -73,6 +69,7 @@ async def test_wallbox_number_class_bidir( state = hass.states.get(MOCK_NUMBER_ENTITY_ID) assert state.attributes["min"] == -25 assert state.attributes["max"] == 25 + await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_energy_class( @@ -91,7 +88,7 @@ async def test_wallbox_number_energy_class( mock_request.post( "https://api.wall-box.com/chargers/config/12345", - json={CHARGER_ENERGY_PRICE_KEY: 1.1}, + json=json.loads(json.dumps({CHARGER_ENERGY_PRICE_KEY: 1.1})), status_code=200, ) @@ -104,6 +101,7 @@ async def test_wallbox_number_energy_class( }, blocking=True, ) + await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_connection_error( @@ -121,7 +119,7 @@ async def test_wallbox_number_class_connection_error( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, + json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), status_code=404, ) @@ -135,6 +133,7 @@ async def test_wallbox_number_class_connection_error( }, blocking=True, ) + await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_energy_price_connection_error( @@ -152,7 +151,7 @@ async def test_wallbox_number_class_energy_price_connection_error( ) mock_request.post( "https://api.wall-box.com/chargers/config/12345", - json={CHARGER_ENERGY_PRICE_KEY: 1.1}, + json=json.loads(json.dumps({CHARGER_ENERGY_PRICE_KEY: 1.1})), status_code=404, ) @@ -166,6 +165,7 @@ async def test_wallbox_number_class_energy_price_connection_error( }, blocking=True, ) + await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_energy_price_auth_error( @@ -183,7 +183,7 @@ async def test_wallbox_number_class_energy_price_auth_error( ) mock_request.post( "https://api.wall-box.com/chargers/config/12345", - json={CHARGER_ENERGY_PRICE_KEY: 1.1}, + json=json.loads(json.dumps({CHARGER_ENERGY_PRICE_KEY: 1.1})), status_code=403, ) @@ -197,6 +197,7 @@ async def test_wallbox_number_class_energy_price_auth_error( }, blocking=True, ) + await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_platform_not_ready( @@ -210,95 +211,4 @@ async def test_wallbox_number_class_platform_not_ready( assert state is None - -async def test_wallbox_number_class_icp_energy( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test wallbox sensor class.""" - - await setup_integration(hass, entry) - - with requests_mock.Mocker() as mock_request: - mock_request.get( - "https://user-api.wall-box.com/users/signin", - json=authorisation_response, - status_code=200, - ) - - mock_request.post( - "https://api.wall-box.com/chargers/config/12345", - json={CHARGER_MAX_ICP_CURRENT_KEY: 10}, - status_code=200, - ) - - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, - ATTR_VALUE: 10, - }, - blocking=True, - ) - - -async def test_wallbox_number_class_icp_energy_auth_error( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test wallbox sensor class.""" - - await setup_integration(hass, entry) - - with requests_mock.Mocker() as mock_request: - mock_request.get( - "https://user-api.wall-box.com/users/signin", - json=authorisation_response, - status_code=200, - ) - mock_request.post( - "https://api.wall-box.com/chargers/config/12345", - json={CHARGER_MAX_ICP_CURRENT_KEY: 10}, - status_code=403, - ) - - with pytest.raises(InvalidAuth): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, - ATTR_VALUE: 10, - }, - blocking=True, - ) - - -async def test_wallbox_number_class_icp_energy_connection_error( - hass: HomeAssistant, entry: MockConfigEntry -) -> None: - """Test wallbox sensor class.""" - - await setup_integration(hass, entry) - - with requests_mock.Mocker() as mock_request: - mock_request.get( - "https://user-api.wall-box.com/users/signin", - json=authorisation_response, - status_code=200, - ) - mock_request.post( - "https://api.wall-box.com/chargers/config/12345", - json={CHARGER_MAX_ICP_CURRENT_KEY: 10}, - status_code=404, - ) - - with pytest.raises(ConnectionError): - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - { - ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, - ATTR_VALUE: 10, - }, - blocking=True, - ) + await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/wallbox/test_sensor.py b/tests/components/wallbox/test_sensor.py index 69d0cc57340..5a8b3c290c1 100644 --- a/tests/components/wallbox/test_sensor.py +++ b/tests/components/wallbox/test_sensor.py @@ -30,3 +30,5 @@ async def test_wallbox_sensor_class( # Test round with precision '0' works state = hass.states.get(MOCK_SENSOR_MAX_AVAILABLE_POWER) assert state.state == "25.0" + + await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/wallbox/test_switch.py b/tests/components/wallbox/test_switch.py index b7c3a81dc73..d06251db003 100644 --- a/tests/components/wallbox/test_switch.py +++ b/tests/components/wallbox/test_switch.py @@ -1,5 +1,7 @@ """Test Wallbox Lock component.""" +import json + import pytest import requests_mock @@ -34,7 +36,7 @@ async def test_wallbox_switch_class( ) mock_request.post( "https://api.wall-box.com/v3/chargers/12345/remote-action", - json={CHARGER_STATUS_ID_KEY: 193}, + json=json.loads(json.dumps({CHARGER_STATUS_ID_KEY: 193})), status_code=200, ) @@ -56,6 +58,8 @@ async def test_wallbox_switch_class( blocking=True, ) + await hass.config_entries.async_unload(entry.entry_id) + async def test_wallbox_switch_class_connection_error( hass: HomeAssistant, entry: MockConfigEntry @@ -72,7 +76,7 @@ async def test_wallbox_switch_class_connection_error( ) mock_request.post( "https://api.wall-box.com/v3/chargers/12345/remote-action", - json={CHARGER_STATUS_ID_KEY: 193}, + json=json.loads(json.dumps({CHARGER_STATUS_ID_KEY: 193})), status_code=404, ) @@ -95,6 +99,8 @@ async def test_wallbox_switch_class_connection_error( blocking=True, ) + await hass.config_entries.async_unload(entry.entry_id) + async def test_wallbox_switch_class_authentication_error( hass: HomeAssistant, entry: MockConfigEntry @@ -111,7 +117,7 @@ async def test_wallbox_switch_class_authentication_error( ) mock_request.post( "https://api.wall-box.com/v3/chargers/12345/remote-action", - json={CHARGER_STATUS_ID_KEY: 193}, + json=json.loads(json.dumps({CHARGER_STATUS_ID_KEY: 193})), status_code=403, ) @@ -133,3 +139,5 @@ async def test_wallbox_switch_class_authentication_error( }, blocking=True, ) + + await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/waqi/conftest.py b/tests/components/waqi/conftest.py index 75709d4f56e..b2e1a7d77d4 100644 --- a/tests/components/waqi/conftest.py +++ b/tests/components/waqi/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the World Air Quality Index (WAQI) tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.waqi.const import CONF_STATION_NUMBER, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/water_heater/common.py b/tests/components/water_heater/common.py index e2fca153fe6..e0a8075f4cc 100644 --- a/tests/components/water_heater/common.py +++ b/tests/components/water_heater/common.py @@ -19,9 +19,7 @@ from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, ENTITY_MATCH_A from homeassistant.core import HomeAssistant -async def async_set_away_mode( - hass: HomeAssistant, away_mode: bool, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_set_away_mode(hass, away_mode, entity_id=ENTITY_MATCH_ALL): """Turn all or specified water_heater devices away mode on.""" data = {ATTR_AWAY_MODE: away_mode} @@ -32,11 +30,8 @@ async def async_set_away_mode( async def async_set_temperature( - hass: HomeAssistant, - temperature: float, - entity_id: str = ENTITY_MATCH_ALL, - operation_mode: str | None = None, -) -> None: + hass, temperature=None, entity_id=ENTITY_MATCH_ALL, operation_mode=None +): """Set new target temperature.""" kwargs = { key: value @@ -53,9 +48,7 @@ async def async_set_temperature( ) -async def async_set_operation_mode( - hass: HomeAssistant, operation_mode: str, entity_id: str = ENTITY_MATCH_ALL -) -> None: +async def async_set_operation_mode(hass, operation_mode, entity_id=ENTITY_MATCH_ALL): """Set new target operation mode.""" data = {ATTR_OPERATION_MODE: operation_mode} diff --git a/tests/components/water_heater/conftest.py b/tests/components/water_heater/conftest.py index df16e5cc6da..619d5e5c359 100644 --- a/tests/components/water_heater/conftest.py +++ b/tests/components/water_heater/conftest.py @@ -1,8 +1,7 @@ """Fixtures for water heater platform tests.""" -from collections.abc import Generator - import pytest +from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/water_heater/test_init.py b/tests/components/water_heater/test_init.py index 4e0f860366c..f883cf47b19 100644 --- a/tests/components/water_heater/test_init.py +++ b/tests/components/water_heater/test_init.py @@ -22,7 +22,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from tests.common import ( @@ -43,7 +42,7 @@ async def test_set_temp_schema_no_req( """Test the set temperature schema with missing required data.""" domain = "climate" service = "test_set_temperature" - schema = cv.make_entity_service_schema(SET_TEMPERATURE_SCHEMA) + schema = SET_TEMPERATURE_SCHEMA calls = async_mock_service(hass, domain, service, schema) data = {"hvac_mode": "off", "entity_id": ["climate.test_id"]} @@ -60,7 +59,7 @@ async def test_set_temp_schema( """Test the set temperature schema with ok required data.""" domain = "water_heater" service = "test_set_temperature" - schema = cv.make_entity_service_schema(SET_TEMPERATURE_SCHEMA) + schema = SET_TEMPERATURE_SCHEMA calls = async_mock_service(hass, domain, service, schema) data = { diff --git a/tests/components/watttime/conftest.py b/tests/components/watttime/conftest.py index 650d07b36a1..0b7403d45fc 100644 --- a/tests/components/watttime/conftest.py +++ b/tests/components/watttime/conftest.py @@ -1,7 +1,6 @@ """Define test fixtures for WattTime.""" -from collections.abc import AsyncGenerator -from typing import Any +import json from unittest.mock import AsyncMock, Mock, patch import pytest @@ -21,17 +20,13 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME, ) -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import MockConfigEntry, load_fixture @pytest.fixture(name="client") -def client_fixture( - get_grid_region: AsyncMock, data_realtime_emissions: JsonObjectType -) -> Mock: +def client_fixture(get_grid_region, data_realtime_emissions): """Define an aiowatttime client.""" client = Mock() client.emissions.async_get_grid_region = get_grid_region @@ -42,7 +37,7 @@ def client_fixture( @pytest.fixture(name="config_auth") -def config_auth_fixture() -> dict[str, Any]: +def config_auth_fixture(hass): """Define an auth config entry data fixture.""" return { CONF_USERNAME: "user", @@ -51,7 +46,7 @@ def config_auth_fixture() -> dict[str, Any]: @pytest.fixture(name="config_coordinates") -def config_coordinates_fixture() -> dict[str, Any]: +def config_coordinates_fixture(hass): """Define a coordinates config entry data fixture.""" return { CONF_LATITUDE: 32.87336, @@ -60,7 +55,7 @@ def config_coordinates_fixture() -> dict[str, Any]: @pytest.fixture(name="config_location_type") -def config_location_type_fixture() -> dict[str, Any]: +def config_location_type_fixture(hass): """Define a location type config entry data fixture.""" return { CONF_LOCATION_TYPE: LOCATION_TYPE_COORDINATES, @@ -68,9 +63,7 @@ def config_location_type_fixture() -> dict[str, Any]: @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, config_auth: dict[str, Any], config_coordinates: dict[str, Any] -) -> MockConfigEntry: +def config_entry_fixture(hass, config_auth, config_coordinates): """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -89,30 +82,25 @@ def config_entry_fixture( @pytest.fixture(name="data_grid_region", scope="package") -def data_grid_region_fixture() -> JsonObjectType: +def data_grid_region_fixture(): """Define grid region data.""" - return load_json_object_fixture("grid_region_data.json", "watttime") + return json.loads(load_fixture("grid_region_data.json", "watttime")) @pytest.fixture(name="data_realtime_emissions", scope="package") -def data_realtime_emissions_fixture() -> JsonObjectType: +def data_realtime_emissions_fixture(): """Define realtime emissions data.""" - return load_json_object_fixture("realtime_emissions_data.json", "watttime") + return json.loads(load_fixture("realtime_emissions_data.json", "watttime")) @pytest.fixture(name="get_grid_region") -def get_grid_region_fixture(data_grid_region: JsonObjectType) -> AsyncMock: +def get_grid_region_fixture(data_grid_region): """Define an aiowatttime method to get grid region data.""" return AsyncMock(return_value=data_grid_region) @pytest.fixture(name="setup_watttime") -async def setup_watttime_fixture( - hass: HomeAssistant, - client: Mock, - config_auth: dict[str, Any], - config_coordinates: dict[str, Any], -) -> AsyncGenerator[None]: +async def setup_watttime_fixture(hass, client, config_auth, config_coordinates): """Define a fixture to set up WattTime.""" with ( patch( diff --git a/tests/components/watttime/snapshots/test_diagnostics.ambr b/tests/components/watttime/snapshots/test_diagnostics.ambr index 0c137acc36b..2ed35c19ad1 100644 --- a/tests/components/watttime/snapshots/test_diagnostics.ambr +++ b/tests/components/watttime/snapshots/test_diagnostics.ambr @@ -18,8 +18,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'watttime', 'minor_version': 1, 'options': dict({ diff --git a/tests/components/watttime/test_config_flow.py b/tests/components/watttime/test_config_flow.py index 5087717491f..f8eee6b48bf 100644 --- a/tests/components/watttime/test_config_flow.py +++ b/tests/components/watttime/test_config_flow.py @@ -25,8 +25,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry - @pytest.mark.parametrize( ("exc", "error"), @@ -146,16 +144,21 @@ async def test_show_form_user(hass: HomeAssistant) -> None: async def test_step_reauth( - hass: HomeAssistant, - config_entry: MockConfigEntry, - setup_watttime, + hass: HomeAssistant, config_auth, config_coordinates, config_entry, setup_watttime ) -> None: """Test a full reauth flow.""" - result = await config_entry.start_reauth_flow(hass) with patch( "homeassistant.components.watttime.async_setup_entry", return_value=True, ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data={ + **config_auth, + **config_coordinates, + }, + ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_PASSWORD: "password"}, diff --git a/tests/components/watttime/test_diagnostics.py b/tests/components/watttime/test_diagnostics.py index f4465a44d26..0526a64aedc 100644 --- a/tests/components/watttime/test_diagnostics.py +++ b/tests/components/watttime/test_diagnostics.py @@ -19,4 +19,4 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry - ) == snapshot(exclude=props("entry_id", "created_at", "modified_at")) + ) == snapshot(exclude=props("entry_id")) diff --git a/tests/components/waze_travel_time/conftest.py b/tests/components/waze_travel_time/conftest.py index c9214ed8b71..c929fc219f9 100644 --- a/tests/components/waze_travel_time/conftest.py +++ b/tests/components/waze_travel_time/conftest.py @@ -5,7 +5,6 @@ from unittest.mock import patch import pytest from pywaze.route_calculator import CalcRoutesResponse, WRCError -from homeassistant.components.waze_travel_time.config_flow import WazeConfigFlow from homeassistant.components.waze_travel_time.const import DOMAIN from homeassistant.core import HomeAssistant @@ -20,7 +19,6 @@ async def mock_config_fixture(hass: HomeAssistant, data, options): data=data, options=options, entry_id="test", - version=WazeConfigFlow.VERSION, ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/waze_travel_time/test_config_flow.py b/tests/components/waze_travel_time/test_config_flow.py index 9ff7509a52c..5b1e3417bfc 100644 --- a/tests/components/waze_travel_time/test_config_flow.py +++ b/tests/components/waze_travel_time/test_config_flow.py @@ -3,7 +3,6 @@ import pytest from homeassistant import config_entries -from homeassistant.components.waze_travel_time.config_flow import WazeConfigFlow from homeassistant.components.waze_travel_time.const import ( CONF_AVOID_FERRIES, CONF_AVOID_SUBSCRIPTION_ROADS, @@ -61,13 +60,18 @@ async def test_reconfigure(hass: HomeAssistant) -> None: domain=DOMAIN, data=MOCK_CONFIG, options=DEFAULT_OPTIONS, - version=WazeConfigFlow.VERSION, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - reconfigure_result = await entry.start_reconfigure_flow(hass) + reconfigure_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + ) assert reconfigure_result["type"] is FlowResultType.FORM assert reconfigure_result["step_id"] == "user" @@ -99,7 +103,6 @@ async def test_options(hass: HomeAssistant) -> None: domain=DOMAIN, data=MOCK_CONFIG, options=DEFAULT_OPTIONS, - version=WazeConfigFlow.VERSION, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -116,8 +119,8 @@ async def test_options(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: ["exclude"], - CONF_INCL_FILTER: ["include"], + CONF_EXCL_FILTER: "exclude", + CONF_INCL_FILTER: "include", CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -129,8 +132,8 @@ async def test_options(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: ["exclude"], - CONF_INCL_FILTER: ["include"], + CONF_EXCL_FILTER: "exclude", + CONF_INCL_FILTER: "include", CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -140,8 +143,8 @@ async def test_options(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: ["exclude"], - CONF_INCL_FILTER: ["include"], + CONF_EXCL_FILTER: "exclude", + CONF_INCL_FILTER: "include", CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -206,14 +209,10 @@ async def test_invalid_config_entry( async def test_reset_filters(hass: HomeAssistant) -> None: """Test resetting inclusive and exclusive filters to empty string.""" options = {**DEFAULT_OPTIONS} - options[CONF_INCL_FILTER] = ["test"] - options[CONF_EXCL_FILTER] = ["test"] + options[CONF_INCL_FILTER] = "test" + options[CONF_EXCL_FILTER] = "test" config_entry = MockConfigEntry( - domain=DOMAIN, - data=MOCK_CONFIG, - options=options, - entry_id="test", - version=WazeConfigFlow.VERSION, + domain=DOMAIN, data=MOCK_CONFIG, options=options, entry_id="test" ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) @@ -229,6 +228,8 @@ async def test_reset_filters(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, + CONF_EXCL_FILTER: "", + CONF_INCL_FILTER: "", CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -239,8 +240,8 @@ async def test_reset_filters(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: [""], - CONF_INCL_FILTER: [""], + CONF_EXCL_FILTER: "", + CONF_INCL_FILTER: "", CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", diff --git a/tests/components/waze_travel_time/test_init.py b/tests/components/waze_travel_time/test_init.py index 9c59278ff99..58aaa8983a7 100644 --- a/tests/components/waze_travel_time/test_init.py +++ b/tests/components/waze_travel_time/test_init.py @@ -2,32 +2,11 @@ import pytest -from homeassistant.components.waze_travel_time.const import ( - CONF_AVOID_FERRIES, - CONF_AVOID_SUBSCRIPTION_ROADS, - CONF_AVOID_TOLL_ROADS, - CONF_EXCL_FILTER, - CONF_INCL_FILTER, - CONF_REALTIME, - CONF_UNITS, - CONF_VEHICLE_TYPE, - DEFAULT_AVOID_FERRIES, - DEFAULT_AVOID_SUBSCRIPTION_ROADS, - DEFAULT_AVOID_TOLL_ROADS, - DEFAULT_FILTER, - DEFAULT_OPTIONS, - DEFAULT_REALTIME, - DEFAULT_VEHICLE_TYPE, - DOMAIN, - METRIC_UNITS, -) -from homeassistant.config_entries import ConfigEntryState +from homeassistant.components.waze_travel_time.const import DEFAULT_OPTIONS from homeassistant.core import HomeAssistant from .const import MOCK_CONFIG -from tests.common import MockConfigEntry - @pytest.mark.parametrize( ("data", "options"), @@ -64,59 +43,3 @@ async def test_service_get_travel_times(hass: HomeAssistant) -> None: }, ] } - - -@pytest.mark.usefixtures("mock_update") -async def test_migrate_entry_v1_v2(hass: HomeAssistant) -> None: - """Test successful migration of entry data.""" - mock_entry = MockConfigEntry( - domain=DOMAIN, - version=1, - data=MOCK_CONFIG, - options={ - CONF_REALTIME: DEFAULT_REALTIME, - CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, - CONF_UNITS: METRIC_UNITS, - CONF_AVOID_FERRIES: DEFAULT_AVOID_FERRIES, - CONF_AVOID_SUBSCRIPTION_ROADS: DEFAULT_AVOID_SUBSCRIPTION_ROADS, - CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, - }, - ) - - mock_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - updated_entry = hass.config_entries.async_get_entry(mock_entry.entry_id) - - assert updated_entry.state is ConfigEntryState.LOADED - assert updated_entry.version == 2 - assert updated_entry.options[CONF_INCL_FILTER] == DEFAULT_FILTER - assert updated_entry.options[CONF_EXCL_FILTER] == DEFAULT_FILTER - - mock_entry = MockConfigEntry( - domain=DOMAIN, - version=1, - data=MOCK_CONFIG, - options={ - CONF_REALTIME: DEFAULT_REALTIME, - CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, - CONF_UNITS: METRIC_UNITS, - CONF_AVOID_FERRIES: DEFAULT_AVOID_FERRIES, - CONF_AVOID_SUBSCRIPTION_ROADS: DEFAULT_AVOID_SUBSCRIPTION_ROADS, - CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, - CONF_INCL_FILTER: "include", - CONF_EXCL_FILTER: "exclude", - }, - ) - - mock_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - - updated_entry = hass.config_entries.async_get_entry(mock_entry.entry_id) - - assert updated_entry.state is ConfigEntryState.LOADED - assert updated_entry.version == 2 - assert updated_entry.options[CONF_INCL_FILTER] == ["include"] - assert updated_entry.options[CONF_EXCL_FILTER] == ["exclude"] diff --git a/tests/components/waze_travel_time/test_sensor.py b/tests/components/waze_travel_time/test_sensor.py index 94e3a0cf9d7..e09a7199ff4 100644 --- a/tests/components/waze_travel_time/test_sensor.py +++ b/tests/components/waze_travel_time/test_sensor.py @@ -3,7 +3,6 @@ import pytest from pywaze.route_calculator import WRCError -from homeassistant.components.waze_travel_time.config_flow import WazeConfigFlow from homeassistant.components.waze_travel_time.const import ( CONF_AVOID_FERRIES, CONF_AVOID_SUBSCRIPTION_ROADS, @@ -75,8 +74,6 @@ async def test_sensor(hass: HomeAssistant) -> None: CONF_AVOID_TOLL_ROADS: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_FERRIES: True, - CONF_INCL_FILTER: [""], - CONF_EXCL_FILTER: [""], }, ) ], @@ -101,8 +98,7 @@ async def test_imperial(hass: HomeAssistant) -> None: CONF_AVOID_TOLL_ROADS: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_FERRIES: True, - CONF_INCL_FILTER: ["IncludeThis"], - CONF_EXCL_FILTER: [""], + CONF_INCL_FILTER: "IncludeThis", }, ) ], @@ -125,8 +121,7 @@ async def test_incl_filter(hass: HomeAssistant) -> None: CONF_AVOID_TOLL_ROADS: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_FERRIES: True, - CONF_INCL_FILTER: [""], - CONF_EXCL_FILTER: ["ExcludeThis"], + CONF_EXCL_FILTER: "ExcludeThis", }, ) ], @@ -143,11 +138,7 @@ async def test_sensor_failed_wrcerror( ) -> None: """Test that sensor update fails with log message.""" config_entry = MockConfigEntry( - domain=DOMAIN, - data=MOCK_CONFIG, - options=DEFAULT_OPTIONS, - entry_id="test", - version=WazeConfigFlow.VERSION, + domain=DOMAIN, data=MOCK_CONFIG, options=DEFAULT_OPTIONS, entry_id="test" ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/weather/__init__.py b/tests/components/weather/__init__.py index 2dbffbbd617..c24baad5237 100644 --- a/tests/components/weather/__init__.py +++ b/tests/components/weather/__init__.py @@ -61,7 +61,7 @@ class MockWeatherTest(WeatherPlatform.MockWeather): async def create_entity( hass: HomeAssistant, - mock_weather: type[WeatherPlatform.MockWeather], + mock_weather: WeatherPlatform.MockWeather, manifest_extra: dict[str, Any] | None, **kwargs, ) -> WeatherPlatform.MockWeather: diff --git a/tests/components/weather/conftest.py b/tests/components/weather/conftest.py index 78389381ff3..e3e790300a0 100644 --- a/tests/components/weather/conftest.py +++ b/tests/components/weather/conftest.py @@ -1,8 +1,7 @@ """Fixtures for Weather platform tests.""" -from collections.abc import Generator - import pytest +from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/weatherflow/conftest.py b/tests/components/weatherflow/conftest.py index 21c251d39b5..c0811597228 100644 --- a/tests/components/weatherflow/conftest.py +++ b/tests/components/weatherflow/conftest.py @@ -1,12 +1,12 @@ """Fixtures for Weatherflow integration tests.""" import asyncio -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from pyweatherflowudp.client import EVENT_DEVICE_DISCOVERED from pyweatherflowudp.device import WeatherFlowDevice +from typing_extensions import Generator from homeassistant.components.weatherflow.const import DOMAIN diff --git a/tests/components/weatherflow_cloud/__init__.py b/tests/components/weatherflow_cloud/__init__.py index 31004a27f64..c251e7868cc 100644 --- a/tests/components/weatherflow_cloud/__init__.py +++ b/tests/components/weatherflow_cloud/__init__.py @@ -1,13 +1 @@ """Tests for the WeatherflowCloud integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/weatherflow_cloud/conftest.py b/tests/components/weatherflow_cloud/conftest.py index 36b42bf24a8..d47da3c7d1b 100644 --- a/tests/components/weatherflow_cloud/conftest.py +++ b/tests/components/weatherflow_cloud/conftest.py @@ -1,19 +1,10 @@ """Common fixtures for the WeatherflowCloud tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from aiohttp import ClientResponseError import pytest -from weatherflow4py.models.rest.forecast import WeatherDataForecastREST -from weatherflow4py.models.rest.observation import ObservationStationREST -from weatherflow4py.models.rest.stations import StationsResponseREST -from weatherflow4py.models.rest.unified import WeatherFlowDataREST - -from homeassistant.components.weatherflow_cloud.const import DOMAIN -from homeassistant.const import CONF_API_TOKEN - -from tests.common import MockConfigEntry, load_fixture +from typing_extensions import Generator @pytest.fixture @@ -65,51 +56,3 @@ def mock_get_stations_401_error() -> Generator[AsyncMock]: side_effect=side_effects, ) as mock_get_stations: yield mock_get_stations - - -MOCK_API_TOKEN = "1234567890" - - -@pytest.fixture -async def mock_config_entry() -> MockConfigEntry: - """Fixture for MockConfigEntry.""" - return MockConfigEntry( - domain=DOMAIN, - data={CONF_API_TOKEN: MOCK_API_TOKEN}, - version=1, - ) - - -@pytest.fixture -def mock_api(): - """Fixture for Mock WeatherFlowRestAPI.""" - get_stations_response_data = StationsResponseREST.from_json( - load_fixture("stations.json", DOMAIN) - ) - get_forecast_response_data = WeatherDataForecastREST.from_json( - load_fixture("forecast.json", DOMAIN) - ) - get_observation_response_data = ObservationStationREST.from_json( - load_fixture("station_observation.json", DOMAIN) - ) - - data = { - 24432: WeatherFlowDataREST( - weather=get_forecast_response_data, - observation=get_observation_response_data, - station=get_stations_response_data.stations[0], - device_observations=None, - ) - } - - with patch( - "homeassistant.components.weatherflow_cloud.coordinator.WeatherFlowRestAPI", - autospec=True, - ) as mock_api_class: - # Create an instance of AsyncMock for the API - mock_api = AsyncMock() - mock_api.get_all_data.return_value = data - # Patch the class to return our mock_api instance - mock_api_class.return_value = mock_api - - yield mock_api diff --git a/tests/components/weatherflow_cloud/fixtures/forecast.json b/tests/components/weatherflow_cloud/fixtures/forecast.json deleted file mode 100644 index 62793983327..00000000000 --- a/tests/components/weatherflow_cloud/fixtures/forecast.json +++ /dev/null @@ -1,4783 +0,0 @@ -{ - "current_conditions": { - "air_density": 1.0, - "air_temperature": 4.0, - "brightness": 59768, - "conditions": "Clear", - "delta_t": 6.0, - "dew_point": -13.0, - "feels_like": 3.0, - "icon": "clear-day", - "is_precip_local_day_rain_check": true, - "is_precip_local_yesterday_rain_check": true, - "lightning_strike_count_last_1hr": 0, - "lightning_strike_count_last_3hr": 0, - "lightning_strike_last_distance": 39, - "lightning_strike_last_distance_msg": "37 - 41 km", - "lightning_strike_last_epoch": 1698522523, - "precip_accum_local_day": 0, - "precip_accum_local_yesterday": 0, - "precip_minutes_local_day": 0, - "precip_minutes_local_yesterday": 0, - "pressure_trend": "rising", - "relative_humidity": 27, - "sea_level_pressure": 1022.1, - "solar_radiation": 498, - "station_pressure": 795.8, - "time": 1703785918, - "uv": 2, - "wet_bulb_globe_temperature": 2.0, - "wet_bulb_temperature": -1.0, - "wind_avg": 2.0, - "wind_direction": 40, - "wind_direction_cardinal": "NE", - "wind_gust": 4.0 - }, - "forecast": { - "daily": [ - { - "air_temp_high": 5.0, - "air_temp_low": -6.0, - "conditions": "Clear", - "day_num": 28, - "day_start_local": 1703746800, - "icon": "clear-day", - "month_num": 12, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "sunrise": 1703773057, - "sunset": 1703807070 - }, - { - "air_temp_high": 7.0, - "air_temp_low": -1.0, - "conditions": "Clear", - "day_num": 29, - "day_start_local": 1703833200, - "icon": "clear-day", - "month_num": 12, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "sunrise": 1703859473, - "sunset": 1703893513 - }, - { - "air_temp_high": 10.0, - "air_temp_low": -1.0, - "conditions": "Partly Cloudy", - "day_num": 30, - "day_start_local": 1703919600, - "icon": "partly-cloudy-day", - "month_num": 12, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "sunrise": 1703945887, - "sunset": 1703979957 - }, - { - "air_temp_high": 2.0, - "air_temp_low": -3.0, - "conditions": "Partly Cloudy", - "day_num": 31, - "day_start_local": 1704006000, - "icon": "partly-cloudy-day", - "month_num": 12, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "sunrise": 1704032299, - "sunset": 1704066403 - }, - { - "air_temp_high": 5.0, - "air_temp_low": -4.0, - "conditions": "Partly Cloudy", - "day_num": 1, - "day_start_local": 1704092400, - "icon": "partly-cloudy-day", - "month_num": 1, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "sunrise": 1704118709, - "sunset": 1704152851 - }, - { - "air_temp_high": 4.0, - "air_temp_low": -4.0, - "conditions": "Partly Cloudy", - "day_num": 2, - "day_start_local": 1704178800, - "icon": "partly-cloudy-day", - "month_num": 1, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "sunrise": 1704205116, - "sunset": 1704239300 - }, - { - "air_temp_high": 3.0, - "air_temp_low": -5.0, - "conditions": "Partly Cloudy", - "day_num": 3, - "day_start_local": 1704265200, - "icon": "partly-cloudy-day", - "month_num": 1, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "sunrise": 1704291522, - "sunset": 1704325751 - }, - { - "air_temp_high": 4.0, - "air_temp_low": -4.0, - "conditions": "Wintry Mix Possible", - "day_num": 4, - "day_start_local": 1704351600, - "icon": "possibly-sleet-day", - "month_num": 1, - "precip_icon": "chance-sleet", - "precip_probability": 20, - "precip_type": "sleet", - "sunrise": 1704377925, - "sunset": 1704412203 - }, - { - "air_temp_high": 1.0, - "air_temp_low": -5.0, - "conditions": "Partly Cloudy", - "day_num": 5, - "day_start_local": 1704438000, - "icon": "partly-cloudy-day", - "month_num": 1, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "sunrise": 1704464327, - "sunset": 1704498656 - }, - { - "air_temp_high": 4.0, - "air_temp_low": -5.0, - "conditions": "Partly Cloudy", - "day_num": 6, - "day_start_local": 1704524400, - "icon": "partly-cloudy-day", - "month_num": 1, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "sunrise": 1704550726, - "sunset": 1704585111 - } - ], - "hourly": [ - { - "air_temperature": 4.0, - "conditions": "Clear", - "feels_like": -1.0, - "icon": "clear-day", - "local_day": 28, - "local_hour": 11, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 50, - "sea_level_pressure": 1021.3, - "time": 1703786400, - "uv": 4.0, - "wind_avg": 8.0, - "wind_direction": 350, - "wind_direction_cardinal": "N", - "wind_gust": 12.0 - }, - { - "air_temperature": 4.0, - "conditions": "Clear", - "feels_like": 0.0, - "icon": "clear-day", - "local_day": 28, - "local_hour": 12, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 50, - "sea_level_pressure": 1020.5, - "time": 1703790000, - "uv": 5.0, - "wind_avg": 7.0, - "wind_direction": 350, - "wind_direction_cardinal": "N", - "wind_gust": 11.0 - }, - { - "air_temperature": 5.0, - "conditions": "Clear", - "feels_like": 0.0, - "icon": "clear-day", - "local_day": 28, - "local_hour": 13, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 49, - "sea_level_pressure": 1019.3, - "time": 1703793600, - "uv": 5.0, - "wind_avg": 7.0, - "wind_direction": 350, - "wind_direction_cardinal": "N", - "wind_gust": 11.0 - }, - { - "air_temperature": 5.0, - "conditions": "Clear", - "feels_like": 1.0, - "icon": "clear-day", - "local_day": 28, - "local_hour": 14, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 47, - "sea_level_pressure": 1018.9, - "time": 1703797200, - "uv": 4.0, - "wind_avg": 8.0, - "wind_direction": 350, - "wind_direction_cardinal": "N", - "wind_gust": 11.0 - }, - { - "air_temperature": 5.0, - "conditions": "Clear", - "feels_like": 1.0, - "icon": "clear-day", - "local_day": 28, - "local_hour": 15, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 46, - "sea_level_pressure": 1019.9, - "time": 1703800800, - "uv": 3.0, - "wind_avg": 8.0, - "wind_direction": 350, - "wind_direction_cardinal": "N", - "wind_gust": 11.0 - }, - { - "air_temperature": 4.0, - "conditions": "Clear", - "feels_like": -1.0, - "icon": "clear-day", - "local_day": 28, - "local_hour": 16, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 52, - "sea_level_pressure": 1021.9, - "time": 1703804400, - "uv": 1.0, - "wind_avg": 6.0, - "wind_direction": 340, - "wind_direction_cardinal": "NNW", - "wind_gust": 9.0 - }, - { - "air_temperature": 1.0, - "conditions": "Clear", - "feels_like": -4.0, - "icon": "clear-night", - "local_day": 28, - "local_hour": 17, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 64, - "sea_level_pressure": 1025.4, - "time": 1703808000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 330, - "wind_direction_cardinal": "NNW", - "wind_gust": 7.0 - }, - { - "air_temperature": 0.0, - "conditions": "Clear", - "feels_like": -5.0, - "icon": "clear-night", - "local_day": 28, - "local_hour": 18, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 69, - "sea_level_pressure": 1026.1, - "time": 1703811600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 0.0, - "conditions": "Clear", - "feels_like": -4.0, - "icon": "clear-night", - "local_day": 28, - "local_hour": 19, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 65, - "sea_level_pressure": 1026.6, - "time": 1703815200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 0.0, - "conditions": "Clear", - "feels_like": -4.0, - "icon": "clear-night", - "local_day": 28, - "local_hour": 20, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 66, - "sea_level_pressure": 1026.6, - "time": 1703818800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Clear", - "feels_like": -3.0, - "icon": "clear-night", - "local_day": 28, - "local_hour": 21, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 63, - "sea_level_pressure": 1026.7, - "time": 1703822400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Clear", - "feels_like": -3.0, - "icon": "clear-night", - "local_day": 28, - "local_hour": 22, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 61, - "sea_level_pressure": 1026.6, - "time": 1703826000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Clear", - "feels_like": -3.0, - "icon": "clear-night", - "local_day": 28, - "local_hour": 23, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 61, - "sea_level_pressure": 1026.7, - "time": 1703829600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Clear", - "feels_like": -4.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 0, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 64, - "sea_level_pressure": 1026.2, - "time": 1703833200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Clear", - "feels_like": -3.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 1, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 63, - "sea_level_pressure": 1025.9, - "time": 1703836800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Clear", - "feels_like": -3.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 2, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 62, - "sea_level_pressure": 1026.1, - "time": 1703840400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Clear", - "feels_like": -3.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 3, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 61, - "sea_level_pressure": 1026.0, - "time": 1703844000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 0.0, - "conditions": "Clear", - "feels_like": -4.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 4, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 66, - "sea_level_pressure": 1025.9, - "time": 1703847600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 0.0, - "conditions": "Clear", - "feels_like": -4.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 5, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 67, - "sea_level_pressure": 1026.3, - "time": 1703851200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Clear", - "feels_like": -4.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 6, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 64, - "sea_level_pressure": 1026.8, - "time": 1703854800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 2.0, - "conditions": "Clear", - "feels_like": -3.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 7, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 60, - "sea_level_pressure": 1027.3, - "time": 1703858400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 5.0 - }, - { - "air_temperature": 5.0, - "conditions": "Clear", - "feels_like": 2.0, - "icon": "clear-day", - "local_day": 29, - "local_hour": 8, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 48, - "sea_level_pressure": 1026.2, - "time": 1703862000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 6.0, - "conditions": "Clear", - "feels_like": 3.0, - "icon": "clear-day", - "local_day": 29, - "local_hour": 9, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 45, - "sea_level_pressure": 1023.4, - "time": 1703865600, - "uv": 2.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 5.0, - "conditions": "Clear", - "feels_like": 2.0, - "icon": "clear-day", - "local_day": 29, - "local_hour": 10, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 47, - "sea_level_pressure": 1021.9, - "time": 1703869200, - "uv": 3.0, - "wind_avg": 4.0, - "wind_direction": 330, - "wind_direction_cardinal": "NNW", - "wind_gust": 6.0 - }, - { - "air_temperature": 6.0, - "conditions": "Clear", - "feels_like": 3.0, - "icon": "clear-day", - "local_day": 29, - "local_hour": 11, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 44, - "sea_level_pressure": 1020.8, - "time": 1703872800, - "uv": 4.0, - "wind_avg": 4.0, - "wind_direction": 350, - "wind_direction_cardinal": "N", - "wind_gust": 6.0 - }, - { - "air_temperature": 7.0, - "conditions": "Clear", - "feels_like": 4.0, - "icon": "clear-day", - "local_day": 29, - "local_hour": 12, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 42, - "sea_level_pressure": 1019.3, - "time": 1703876400, - "uv": 5.0, - "wind_avg": 4.0, - "wind_direction": 360, - "wind_direction_cardinal": "N", - "wind_gust": 6.0 - }, - { - "air_temperature": 7.0, - "conditions": "Clear", - "feels_like": 5.0, - "icon": "clear-day", - "local_day": 29, - "local_hour": 13, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 40, - "sea_level_pressure": 1018.1, - "time": 1703880000, - "uv": 5.0, - "wind_avg": 4.0, - "wind_direction": 0, - "wind_direction_cardinal": "N", - "wind_gust": 6.0 - }, - { - "air_temperature": 7.0, - "conditions": "Clear", - "feels_like": 5.0, - "icon": "clear-day", - "local_day": 29, - "local_hour": 14, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 40, - "sea_level_pressure": 1017.8, - "time": 1703883600, - "uv": 4.0, - "wind_avg": 3.0, - "wind_direction": 10, - "wind_direction_cardinal": "N", - "wind_gust": 5.0 - }, - { - "air_temperature": 7.0, - "conditions": "Clear", - "feels_like": 5.0, - "icon": "clear-day", - "local_day": 29, - "local_hour": 15, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 41, - "sea_level_pressure": 1018.0, - "time": 1703887200, - "uv": 3.0, - "wind_avg": 3.0, - "wind_direction": 180, - "wind_direction_cardinal": "S", - "wind_gust": 4.0 - }, - { - "air_temperature": 5.0, - "conditions": "Clear", - "feels_like": 3.0, - "icon": "clear-day", - "local_day": 29, - "local_hour": 16, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 46, - "sea_level_pressure": 1018.8, - "time": 1703890800, - "uv": 1.0, - "wind_avg": 3.0, - "wind_direction": 180, - "wind_direction_cardinal": "S", - "wind_gust": 4.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-night", - "local_day": 29, - "local_hour": 17, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 55, - "sea_level_pressure": 1020.6, - "time": 1703894400, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 4.0 - }, - { - "air_temperature": 1.0, - "conditions": "Clear", - "feels_like": -2.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 18, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 62, - "sea_level_pressure": 1020.7, - "time": 1703898000, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": 0.0, - "conditions": "Clear", - "feels_like": -3.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 19, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 66, - "sea_level_pressure": 1020.7, - "time": 1703901600, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 3.0 - }, - { - "air_temperature": 0.0, - "conditions": "Clear", - "feels_like": -3.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 20, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 69, - "sea_level_pressure": 1020.8, - "time": 1703905200, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 3.0 - }, - { - "air_temperature": -1.0, - "conditions": "Clear", - "feels_like": -4.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 21, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 72, - "sea_level_pressure": 1020.3, - "time": 1703908800, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 3.0 - }, - { - "air_temperature": -1.0, - "conditions": "Clear", - "feels_like": -4.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 22, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 73, - "sea_level_pressure": 1019.9, - "time": 1703912400, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": -1.0, - "conditions": "Clear", - "feels_like": -5.0, - "icon": "clear-night", - "local_day": 29, - "local_hour": 23, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 74, - "sea_level_pressure": 1019.4, - "time": 1703916000, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": -1.0, - "conditions": "Clear", - "feels_like": -5.0, - "icon": "clear-night", - "local_day": 30, - "local_hour": 0, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 75, - "sea_level_pressure": 1019.0, - "time": 1703919600, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": -1.0, - "conditions": "Clear", - "feels_like": -5.0, - "icon": "clear-night", - "local_day": 30, - "local_hour": 1, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 75, - "sea_level_pressure": 1018.5, - "time": 1703923200, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": -1.0, - "conditions": "Clear", - "feels_like": -5.0, - "icon": "clear-night", - "local_day": 30, - "local_hour": 2, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 74, - "sea_level_pressure": 1018.1, - "time": 1703926800, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -1.0, - "conditions": "Clear", - "feels_like": -5.0, - "icon": "clear-night", - "local_day": 30, - "local_hour": 3, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 74, - "sea_level_pressure": 1017.7, - "time": 1703930400, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 5.0 - }, - { - "air_temperature": -1.0, - "conditions": "Clear", - "feels_like": -5.0, - "icon": "clear-night", - "local_day": 30, - "local_hour": 4, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 74, - "sea_level_pressure": 1017.4, - "time": 1703934000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 5.0 - }, - { - "air_temperature": -1.0, - "conditions": "Clear", - "feels_like": -5.0, - "icon": "clear-night", - "local_day": 30, - "local_hour": 5, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 73, - "sea_level_pressure": 1017.0, - "time": 1703937600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 5.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -5.0, - "icon": "partly-cloudy-night", - "local_day": 30, - "local_hour": 6, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 72, - "sea_level_pressure": 1016.8, - "time": 1703941200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-night", - "local_day": 30, - "local_hour": 7, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 69, - "sea_level_pressure": 1016.5, - "time": 1703944800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 30, - "local_hour": 8, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 63, - "sea_level_pressure": 1016.3, - "time": 1703948400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-day", - "local_day": 30, - "local_hour": 9, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 55, - "sea_level_pressure": 1015.0, - "time": 1703952000, - "uv": 2.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": 6.0, - "conditions": "Partly Cloudy", - "feels_like": 3.0, - "icon": "partly-cloudy-day", - "local_day": 30, - "local_hour": 10, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 45, - "sea_level_pressure": 1013.7, - "time": 1703955600, - "uv": 2.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": 8.0, - "conditions": "Partly Cloudy", - "feels_like": 5.0, - "icon": "partly-cloudy-day", - "local_day": 30, - "local_hour": 11, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 39, - "sea_level_pressure": 1012.4, - "time": 1703959200, - "uv": 2.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": 9.0, - "conditions": "Partly Cloudy", - "feels_like": 7.0, - "icon": "partly-cloudy-day", - "local_day": 30, - "local_hour": 12, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 36, - "sea_level_pressure": 1011.5, - "time": 1703962800, - "uv": 4.0, - "wind_avg": 4.0, - "wind_direction": 210, - "wind_direction_cardinal": "SSW", - "wind_gust": 5.0 - }, - { - "air_temperature": 10.0, - "conditions": "Partly Cloudy", - "feels_like": 8.0, - "icon": "partly-cloudy-day", - "local_day": 30, - "local_hour": 13, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 35, - "sea_level_pressure": 1010.7, - "time": 1703966400, - "uv": 4.0, - "wind_avg": 4.0, - "wind_direction": 210, - "wind_direction_cardinal": "SSW", - "wind_gust": 5.0 - }, - { - "air_temperature": 9.0, - "conditions": "Partly Cloudy", - "feels_like": 7.0, - "icon": "partly-cloudy-day", - "local_day": 30, - "local_hour": 14, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 35, - "sea_level_pressure": 1009.8, - "time": 1703970000, - "uv": 4.0, - "wind_avg": 4.0, - "wind_direction": 210, - "wind_direction_cardinal": "SSW", - "wind_gust": 5.0 - }, - { - "air_temperature": 8.0, - "conditions": "Partly Cloudy", - "feels_like": 6.0, - "icon": "partly-cloudy-day", - "local_day": 30, - "local_hour": 15, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 38, - "sea_level_pressure": 1010.7, - "time": 1703973600, - "uv": 2.0, - "wind_avg": 3.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 4.0 - }, - { - "air_temperature": 6.0, - "conditions": "Partly Cloudy", - "feels_like": 4.0, - "icon": "partly-cloudy-day", - "local_day": 30, - "local_hour": 16, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 43, - "sea_level_pressure": 1011.6, - "time": 1703977200, - "uv": 2.0, - "wind_avg": 3.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 4.0 - }, - { - "air_temperature": 5.0, - "conditions": "Partly Cloudy", - "feels_like": 3.0, - "icon": "partly-cloudy-night", - "local_day": 30, - "local_hour": 17, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 48, - "sea_level_pressure": 1012.5, - "time": 1703980800, - "uv": 2.0, - "wind_avg": 2.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 3.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": 1.0, - "icon": "partly-cloudy-night", - "local_day": 30, - "local_hour": 18, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 53, - "sea_level_pressure": 1013.1, - "time": 1703984400, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 3.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-night", - "local_day": 30, - "local_hour": 19, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 56, - "sea_level_pressure": 1013.7, - "time": 1703988000, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 3.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-night", - "local_day": 30, - "local_hour": 20, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 60, - "sea_level_pressure": 1014.4, - "time": 1703991600, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 2.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-night", - "local_day": 30, - "local_hour": 21, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 63, - "sea_level_pressure": 1014.7, - "time": 1703995200, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 2.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-night", - "local_day": 30, - "local_hour": 22, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 66, - "sea_level_pressure": 1015.1, - "time": 1703998800, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 2.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-night", - "local_day": 30, - "local_hour": 23, - "precip": 0, - "precip_icon": "chance-snow", - "precip_probability": 0, - "precip_type": "snow", - "relative_humidity": 67, - "sea_level_pressure": 1015.5, - "time": 1704002400, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 2.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 0, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 68, - "sea_level_pressure": 1015.3, - "time": 1704006000, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 2.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 1, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 68, - "sea_level_pressure": 1015.0, - "time": 1704009600, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 3.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 2, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 69, - "sea_level_pressure": 1014.7, - "time": 1704013200, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 3.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 3, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 71, - "sea_level_pressure": 1015.1, - "time": 1704016800, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 3.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 4, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 73, - "sea_level_pressure": 1015.5, - "time": 1704020400, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 3.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 5, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 74, - "sea_level_pressure": 1015.9, - "time": 1704024000, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 3.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 6, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 74, - "sea_level_pressure": 1016.7, - "time": 1704027600, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 3.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 7, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 73, - "sea_level_pressure": 1017.4, - "time": 1704031200, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 3.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 31, - "local_hour": 8, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 71, - "sea_level_pressure": 1018.2, - "time": 1704034800, - "uv": 0.0, - "wind_avg": 2.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 3.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 31, - "local_hour": 9, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 68, - "sea_level_pressure": 1018.2, - "time": 1704038400, - "uv": 1.0, - "wind_avg": 2.0, - "wind_direction": 120, - "wind_direction_cardinal": "ESE", - "wind_gust": 4.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 31, - "local_hour": 10, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 64, - "sea_level_pressure": 1018.2, - "time": 1704042000, - "uv": 1.0, - "wind_avg": 3.0, - "wind_direction": 120, - "wind_direction_cardinal": "ESE", - "wind_gust": 4.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 31, - "local_hour": 11, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 61, - "sea_level_pressure": 1018.1, - "time": 1704045600, - "uv": 1.0, - "wind_avg": 3.0, - "wind_direction": 120, - "wind_direction_cardinal": "ESE", - "wind_gust": 5.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 31, - "local_hour": 12, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 59, - "sea_level_pressure": 1017.6, - "time": 1704049200, - "uv": 3.0, - "wind_avg": 3.0, - "wind_direction": 100, - "wind_direction_cardinal": "E", - "wind_gust": 5.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 31, - "local_hour": 13, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 58, - "sea_level_pressure": 1017.0, - "time": 1704052800, - "uv": 3.0, - "wind_avg": 3.0, - "wind_direction": 100, - "wind_direction_cardinal": "E", - "wind_gust": 5.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 31, - "local_hour": 14, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 58, - "sea_level_pressure": 1016.4, - "time": 1704056400, - "uv": 3.0, - "wind_avg": 3.0, - "wind_direction": 100, - "wind_direction_cardinal": "E", - "wind_gust": 5.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 31, - "local_hour": 15, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 60, - "sea_level_pressure": 1017.9, - "time": 1704060000, - "uv": 2.0, - "wind_avg": 3.0, - "wind_direction": 130, - "wind_direction_cardinal": "SE", - "wind_gust": 5.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 31, - "local_hour": 16, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 63, - "sea_level_pressure": 1019.4, - "time": 1704063600, - "uv": 2.0, - "wind_avg": 3.0, - "wind_direction": 130, - "wind_direction_cardinal": "SE", - "wind_gust": 4.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 17, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 67, - "sea_level_pressure": 1021.0, - "time": 1704067200, - "uv": 2.0, - "wind_avg": 3.0, - "wind_direction": 130, - "wind_direction_cardinal": "SE", - "wind_gust": 4.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 18, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 72, - "sea_level_pressure": 1021.8, - "time": 1704070800, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 4.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 19, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1022.7, - "time": 1704074400, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 4.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 20, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 82, - "sea_level_pressure": 1023.6, - "time": 1704078000, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 4.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 21, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 85, - "sea_level_pressure": 1023.6, - "time": 1704081600, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 22, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 86, - "sea_level_pressure": 1023.6, - "time": 1704085200, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-night", - "local_day": 31, - "local_hour": 23, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 87, - "sea_level_pressure": 1023.6, - "time": 1704088800, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 0, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 87, - "sea_level_pressure": 1024.0, - "time": 1704092400, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 4.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 1, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 88, - "sea_level_pressure": 1024.5, - "time": 1704096000, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 4.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 2, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 89, - "sea_level_pressure": 1024.9, - "time": 1704099600, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 4.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 3, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 90, - "sea_level_pressure": 1024.8, - "time": 1704103200, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 4, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 91, - "sea_level_pressure": 1024.6, - "time": 1704106800, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 5, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 90, - "sea_level_pressure": 1024.5, - "time": 1704110400, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 4.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 6, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 89, - "sea_level_pressure": 1024.4, - "time": 1704114000, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 7, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 85, - "sea_level_pressure": 1024.4, - "time": 1704117600, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-day", - "local_day": 1, - "local_hour": 8, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 79, - "sea_level_pressure": 1024.4, - "time": 1704121200, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-day", - "local_day": 1, - "local_hour": 9, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 69, - "sea_level_pressure": 1022.7, - "time": 1704124800, - "uv": 1.0, - "wind_avg": 4.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 5.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 1, - "local_hour": 10, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 59, - "sea_level_pressure": 1021.1, - "time": 1704128400, - "uv": 1.0, - "wind_avg": 4.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 6.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-day", - "local_day": 1, - "local_hour": 11, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 53, - "sea_level_pressure": 1019.5, - "time": 1704132000, - "uv": 1.0, - "wind_avg": 4.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 6.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": 1.0, - "icon": "partly-cloudy-day", - "local_day": 1, - "local_hour": 12, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 49, - "sea_level_pressure": 1018.5, - "time": 1704135600, - "uv": 4.0, - "wind_avg": 4.0, - "wind_direction": 170, - "wind_direction_cardinal": "S", - "wind_gust": 6.0 - }, - { - "air_temperature": 5.0, - "conditions": "Partly Cloudy", - "feels_like": 2.0, - "icon": "partly-cloudy-day", - "local_day": 1, - "local_hour": 13, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 47, - "sea_level_pressure": 1017.4, - "time": 1704139200, - "uv": 4.0, - "wind_avg": 4.0, - "wind_direction": 170, - "wind_direction_cardinal": "S", - "wind_gust": 6.0 - }, - { - "air_temperature": 5.0, - "conditions": "Partly Cloudy", - "feels_like": 2.0, - "icon": "partly-cloudy-day", - "local_day": 1, - "local_hour": 14, - "precip": 0, - "precip_icon": "chance-rain", - "precip_probability": 0, - "precip_type": "rain", - "relative_humidity": 48, - "sea_level_pressure": 1016.4, - "time": 1704142800, - "uv": 4.0, - "wind_avg": 4.0, - "wind_direction": 170, - "wind_direction_cardinal": "S", - "wind_gust": 6.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": 1.0, - "icon": "partly-cloudy-day", - "local_day": 1, - "local_hour": 15, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 50, - "sea_level_pressure": 1017.7, - "time": 1704146400, - "uv": 3.0, - "wind_avg": 4.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 6.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-day", - "local_day": 1, - "local_hour": 16, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 55, - "sea_level_pressure": 1018.9, - "time": 1704150000, - "uv": 3.0, - "wind_avg": 4.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 5.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 17, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 60, - "sea_level_pressure": 1020.2, - "time": 1704153600, - "uv": 3.0, - "wind_avg": 3.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 5.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 18, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 66, - "sea_level_pressure": 1020.8, - "time": 1704157200, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 5.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 19, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 71, - "sea_level_pressure": 1021.4, - "time": 1704160800, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 5.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -5.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 20, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 75, - "sea_level_pressure": 1022.0, - "time": 1704164400, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 5.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 21, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1021.9, - "time": 1704168000, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 22, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1021.7, - "time": 1704171600, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 1, - "local_hour": 23, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1021.6, - "time": 1704175200, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 0, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1020.8, - "time": 1704178800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 5.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 1, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 77, - "sea_level_pressure": 1020.1, - "time": 1704182400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 5.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 2, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 77, - "sea_level_pressure": 1019.3, - "time": 1704186000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 5.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 3, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 77, - "sea_level_pressure": 1019.0, - "time": 1704189600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 5.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 4, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1018.7, - "time": 1704193200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 5, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1018.4, - "time": 1704196800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 6, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 76, - "sea_level_pressure": 1018.5, - "time": 1704200400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 7, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 74, - "sea_level_pressure": 1018.7, - "time": 1704204000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -5.0, - "icon": "partly-cloudy-day", - "local_day": 2, - "local_hour": 8, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 71, - "sea_level_pressure": 1018.9, - "time": 1704207600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-day", - "local_day": 2, - "local_hour": 9, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 67, - "sea_level_pressure": 1018.2, - "time": 1704211200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 350, - "wind_direction_cardinal": "N", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 2, - "local_hour": 10, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 62, - "sea_level_pressure": 1017.5, - "time": 1704214800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 350, - "wind_direction_cardinal": "N", - "wind_gust": 6.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 2, - "local_hour": 11, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 58, - "sea_level_pressure": 1016.8, - "time": 1704218400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 350, - "wind_direction_cardinal": "N", - "wind_gust": 6.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 2, - "local_hour": 12, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 55, - "sea_level_pressure": 1015.7, - "time": 1704222000, - "uv": 3.0, - "wind_avg": 4.0, - "wind_direction": 10, - "wind_direction_cardinal": "N", - "wind_gust": 6.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-day", - "local_day": 2, - "local_hour": 13, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 53, - "sea_level_pressure": 1014.7, - "time": 1704225600, - "uv": 3.0, - "wind_avg": 4.0, - "wind_direction": 10, - "wind_direction_cardinal": "N", - "wind_gust": 7.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-day", - "local_day": 2, - "local_hour": 14, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 53, - "sea_level_pressure": 1013.6, - "time": 1704229200, - "uv": 3.0, - "wind_avg": 4.0, - "wind_direction": 10, - "wind_direction_cardinal": "N", - "wind_gust": 7.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 2, - "local_hour": 15, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 56, - "sea_level_pressure": 1014.8, - "time": 1704232800, - "uv": 2.0, - "wind_avg": 4.0, - "wind_direction": 360, - "wind_direction_cardinal": "N", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 2, - "local_hour": 16, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 61, - "sea_level_pressure": 1016.1, - "time": 1704236400, - "uv": 2.0, - "wind_avg": 4.0, - "wind_direction": 360, - "wind_direction_cardinal": "N", - "wind_gust": 6.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 17, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 67, - "sea_level_pressure": 1017.4, - "time": 1704240000, - "uv": 2.0, - "wind_avg": 4.0, - "wind_direction": 360, - "wind_direction_cardinal": "N", - "wind_gust": 6.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -5.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 18, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 73, - "sea_level_pressure": 1017.7, - "time": 1704243600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 6.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 19, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1018.1, - "time": 1704247200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 5.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 20, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 82, - "sea_level_pressure": 1018.5, - "time": 1704250800, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 310, - "wind_direction_cardinal": "NW", - "wind_gust": 5.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 21, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 85, - "sea_level_pressure": 1018.4, - "time": 1704254400, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 5.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 22, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 88, - "sea_level_pressure": 1018.4, - "time": 1704258000, - "uv": 0.0, - "wind_avg": 3.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 5.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 2, - "local_hour": 23, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 90, - "sea_level_pressure": 1018.4, - "time": 1704261600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 5.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 0, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 91, - "sea_level_pressure": 1018.4, - "time": 1704265200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 1, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 91, - "sea_level_pressure": 1018.3, - "time": 1704268800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 2, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 93, - "sea_level_pressure": 1018.3, - "time": 1704272400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 3, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 95, - "sea_level_pressure": 1018.1, - "time": 1704276000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 4, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 97, - "sea_level_pressure": 1017.8, - "time": 1704279600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 5, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 98, - "sea_level_pressure": 1017.6, - "time": 1704283200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 5.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 6, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 96, - "sea_level_pressure": 1017.7, - "time": 1704286800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 7, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 93, - "sea_level_pressure": 1017.8, - "time": 1704290400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-day", - "local_day": 3, - "local_hour": 8, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 86, - "sea_level_pressure": 1017.9, - "time": 1704294000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-day", - "local_day": 3, - "local_hour": 9, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1016.1, - "time": 1704297600, - "uv": 1.0, - "wind_avg": 4.0, - "wind_direction": 210, - "wind_direction_cardinal": "SSW", - "wind_gust": 6.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-day", - "local_day": 3, - "local_hour": 10, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 68, - "sea_level_pressure": 1014.3, - "time": 1704301200, - "uv": 1.0, - "wind_avg": 4.0, - "wind_direction": 210, - "wind_direction_cardinal": "SSW", - "wind_gust": 6.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 3, - "local_hour": 11, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 62, - "sea_level_pressure": 1012.5, - "time": 1704304800, - "uv": 1.0, - "wind_avg": 4.0, - "wind_direction": 210, - "wind_direction_cardinal": "SSW", - "wind_gust": 6.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 3, - "local_hour": 12, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 57, - "sea_level_pressure": 1011.4, - "time": 1704308400, - "uv": 4.0, - "wind_avg": 4.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 6.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 3, - "local_hour": 13, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 55, - "sea_level_pressure": 1010.3, - "time": 1704312000, - "uv": 4.0, - "wind_avg": 4.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 7.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 3, - "local_hour": 14, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 55, - "sea_level_pressure": 1009.1, - "time": 1704315600, - "uv": 4.0, - "wind_avg": 4.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 7.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 3, - "local_hour": 15, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 57, - "sea_level_pressure": 1010.4, - "time": 1704319200, - "uv": 3.0, - "wind_avg": 4.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 7.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 3, - "local_hour": 16, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 63, - "sea_level_pressure": 1011.7, - "time": 1704322800, - "uv": 3.0, - "wind_avg": 4.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 6.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -5.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 17, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 68, - "sea_level_pressure": 1012.9, - "time": 1704326400, - "uv": 3.0, - "wind_avg": 4.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 6.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 18, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 74, - "sea_level_pressure": 1013.2, - "time": 1704330000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 240, - "wind_direction_cardinal": "WSW", - "wind_gust": 6.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 19, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 79, - "sea_level_pressure": 1013.5, - "time": 1704333600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 240, - "wind_direction_cardinal": "WSW", - "wind_gust": 6.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 20, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 82, - "sea_level_pressure": 1013.8, - "time": 1704337200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 240, - "wind_direction_cardinal": "WSW", - "wind_gust": 6.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 21, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 85, - "sea_level_pressure": 1014.0, - "time": 1704340800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 22, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 85, - "sea_level_pressure": 1014.1, - "time": 1704344400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 3, - "local_hour": 23, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 86, - "sea_level_pressure": 1014.3, - "time": 1704348000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 0, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 87, - "sea_level_pressure": 1014.6, - "time": 1704351600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 1, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 87, - "sea_level_pressure": 1015.0, - "time": 1704355200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 2, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 88, - "sea_level_pressure": 1015.3, - "time": 1704358800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 3, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 89, - "sea_level_pressure": 1015.7, - "time": 1704362400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 4, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 89, - "sea_level_pressure": 1016.0, - "time": 1704366000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 5, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 88, - "sea_level_pressure": 1016.4, - "time": 1704369600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 6, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 86, - "sea_level_pressure": 1016.9, - "time": 1704373200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 7, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 84, - "sea_level_pressure": 1017.4, - "time": 1704376800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-day", - "local_day": 4, - "local_hour": 8, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1018.0, - "time": 1704380400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-day", - "local_day": 4, - "local_hour": 9, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 72, - "sea_level_pressure": 1016.3, - "time": 1704384000, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-day", - "local_day": 4, - "local_hour": 10, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 63, - "sea_level_pressure": 1014.6, - "time": 1704387600, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 4, - "local_hour": 11, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 58, - "sea_level_pressure": 1013.0, - "time": 1704391200, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 280, - "wind_direction_cardinal": "W", - "wind_gust": 8.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 4, - "local_hour": 12, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 54, - "sea_level_pressure": 1011.6, - "time": 1704394800, - "uv": 4.0, - "wind_avg": 5.0, - "wind_direction": 300, - "wind_direction_cardinal": "WNW", - "wind_gust": 8.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 4, - "local_hour": 13, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 53, - "sea_level_pressure": 1010.2, - "time": 1704398400, - "uv": 4.0, - "wind_avg": 5.0, - "wind_direction": 300, - "wind_direction_cardinal": "WNW", - "wind_gust": 8.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-day", - "local_day": 4, - "local_hour": 14, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 53, - "sea_level_pressure": 1008.8, - "time": 1704402000, - "uv": 4.0, - "wind_avg": 5.0, - "wind_direction": 300, - "wind_direction_cardinal": "WNW", - "wind_gust": 8.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 4, - "local_hour": 15, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 55, - "sea_level_pressure": 1009.6, - "time": 1704405600, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 330, - "wind_direction_cardinal": "NNW", - "wind_gust": 7.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 4, - "local_hour": 16, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 60, - "sea_level_pressure": 1010.3, - "time": 1704409200, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 330, - "wind_direction_cardinal": "NNW", - "wind_gust": 7.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 17, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 65, - "sea_level_pressure": 1011.0, - "time": 1704412800, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 330, - "wind_direction_cardinal": "NNW", - "wind_gust": 7.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -5.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 18, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 70, - "sea_level_pressure": 1011.2, - "time": 1704416400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 7.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 19, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 75, - "sea_level_pressure": 1011.4, - "time": 1704420000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 7.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 20, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 80, - "sea_level_pressure": 1011.6, - "time": 1704423600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 320, - "wind_direction_cardinal": "NW", - "wind_gust": 7.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 21, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 84, - "sea_level_pressure": 1011.7, - "time": 1704427200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 300, - "wind_direction_cardinal": "WNW", - "wind_gust": 7.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 22, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 86, - "sea_level_pressure": 1011.8, - "time": 1704430800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 300, - "wind_direction_cardinal": "WNW", - "wind_gust": 7.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 4, - "local_hour": 23, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 15, - "precip_type": "sleet", - "relative_humidity": 88, - "sea_level_pressure": 1011.9, - "time": 1704434400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 300, - "wind_direction_cardinal": "WNW", - "wind_gust": 6.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 0, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 10, - "precip_type": "sleet", - "relative_humidity": 90, - "sea_level_pressure": 1012.6, - "time": 1704438000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 6.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 1, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 92, - "sea_level_pressure": 1013.3, - "time": 1704441600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 6.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 2, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 93, - "sea_level_pressure": 1014.0, - "time": 1704445200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 6.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 3, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 95, - "sea_level_pressure": 1014.7, - "time": 1704448800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 6.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 4, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 96, - "sea_level_pressure": 1015.4, - "time": 1704452400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 6.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 5, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 95, - "sea_level_pressure": 1016.1, - "time": 1704456000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 290, - "wind_direction_cardinal": "WNW", - "wind_gust": 6.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 6, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 91, - "sea_level_pressure": 1015.9, - "time": 1704459600, - "uv": 1.0, - "wind_avg": 4.0, - "wind_direction": 240, - "wind_direction_cardinal": "WSW", - "wind_gust": 7.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 7, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 85, - "sea_level_pressure": 1015.7, - "time": 1704463200, - "uv": 1.0, - "wind_avg": 4.0, - "wind_direction": 240, - "wind_direction_cardinal": "WSW", - "wind_gust": 7.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-day", - "local_day": 5, - "local_hour": 8, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1015.4, - "time": 1704466800, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 240, - "wind_direction_cardinal": "WSW", - "wind_gust": 7.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-day", - "local_day": 5, - "local_hour": 9, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 71, - "sea_level_pressure": 1015.2, - "time": 1704470400, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 240, - "wind_direction_cardinal": "WSW", - "wind_gust": 7.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-day", - "local_day": 5, - "local_hour": 10, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 66, - "sea_level_pressure": 1015.0, - "time": 1704474000, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 240, - "wind_direction_cardinal": "WSW", - "wind_gust": 8.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-day", - "local_day": 5, - "local_hour": 11, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 63, - "sea_level_pressure": 1014.7, - "time": 1704477600, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 240, - "wind_direction_cardinal": "WSW", - "wind_gust": 8.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 5, - "local_hour": 12, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 61, - "sea_level_pressure": 1015.0, - "time": 1704481200, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 8.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 5, - "local_hour": 13, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 61, - "sea_level_pressure": 1015.2, - "time": 1704484800, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 7.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-day", - "local_day": 5, - "local_hour": 14, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 62, - "sea_level_pressure": 1015.5, - "time": 1704488400, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 7.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-day", - "local_day": 5, - "local_hour": 15, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 65, - "sea_level_pressure": 1015.7, - "time": 1704492000, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 7.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -5.0, - "icon": "partly-cloudy-day", - "local_day": 5, - "local_hour": 16, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 67, - "sea_level_pressure": 1015.9, - "time": 1704495600, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 7.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -5.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 17, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 70, - "sea_level_pressure": 1016.2, - "time": 1704499200, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 190, - "wind_direction_cardinal": "S", - "wind_gust": 7.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 18, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 73, - "sea_level_pressure": 1016.1, - "time": 1704502800, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 19, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 77, - "sea_level_pressure": 1016.0, - "time": 1704506400, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 20, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 80, - "sea_level_pressure": 1015.8, - "time": 1704510000, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -8.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 21, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 85, - "sea_level_pressure": 1015.7, - "time": 1704513600, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 22, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 89, - "sea_level_pressure": 1015.6, - "time": 1704517200, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 5, - "local_hour": 23, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 92, - "sea_level_pressure": 1015.5, - "time": 1704520800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 0, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 95, - "sea_level_pressure": 1015.6, - "time": 1704524400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 6.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -11.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 1, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 98, - "sea_level_pressure": 1015.7, - "time": 1704528000, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -11.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 2, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 99, - "sea_level_pressure": 1015.7, - "time": 1704531600, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -11.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 3, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 100, - "sea_level_pressure": 1015.8, - "time": 1704535200, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -5.0, - "conditions": "Partly Cloudy", - "feels_like": -11.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 4, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 98, - "sea_level_pressure": 1015.9, - "time": 1704538800, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -4.0, - "conditions": "Partly Cloudy", - "feels_like": -10.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 5, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 93, - "sea_level_pressure": 1016.0, - "time": 1704542400, - "uv": 0.0, - "wind_avg": 4.0, - "wind_direction": 270, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -3.0, - "conditions": "Partly Cloudy", - "feels_like": -9.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 6, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 86, - "sea_level_pressure": 1015.8, - "time": 1704546000, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 7, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 78, - "sea_level_pressure": 1015.7, - "time": 1704549600, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 7.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -5.0, - "icon": "partly-cloudy-day", - "local_day": 6, - "local_hour": 8, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 69, - "sea_level_pressure": 1015.6, - "time": 1704553200, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 8.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-day", - "local_day": 6, - "local_hour": 9, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 61, - "sea_level_pressure": 1015.5, - "time": 1704556800, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 8.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 6, - "local_hour": 10, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 56, - "sea_level_pressure": 1015.4, - "time": 1704560400, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 8.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 6, - "local_hour": 11, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 53, - "sea_level_pressure": 1015.2, - "time": 1704564000, - "uv": 1.0, - "wind_avg": 5.0, - "wind_direction": 260, - "wind_direction_cardinal": "W", - "wind_gust": 8.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-day", - "local_day": 6, - "local_hour": 12, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 51, - "sea_level_pressure": 1015.1, - "time": 1704567600, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 8.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-day", - "local_day": 6, - "local_hour": 13, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 51, - "sea_level_pressure": 1015.0, - "time": 1704571200, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 8.0 - }, - { - "air_temperature": 4.0, - "conditions": "Partly Cloudy", - "feels_like": 0.0, - "icon": "partly-cloudy-day", - "local_day": 6, - "local_hour": 14, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 53, - "sea_level_pressure": 1014.8, - "time": 1704574800, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 7.0 - }, - { - "air_temperature": 3.0, - "conditions": "Partly Cloudy", - "feels_like": -1.0, - "icon": "partly-cloudy-day", - "local_day": 6, - "local_hour": 15, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 55, - "sea_level_pressure": 1014.7, - "time": 1704578400, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 7.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -2.0, - "icon": "partly-cloudy-day", - "local_day": 6, - "local_hour": 16, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 57, - "sea_level_pressure": 1014.5, - "time": 1704582000, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 7.0 - }, - { - "air_temperature": 2.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 17, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 59, - "sea_level_pressure": 1014.4, - "time": 1704585600, - "uv": 3.0, - "wind_avg": 5.0, - "wind_direction": 230, - "wind_direction_cardinal": "SW", - "wind_gust": 7.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -3.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 18, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 5, - "precip_type": "sleet", - "relative_humidity": 62, - "sea_level_pressure": 1013.9, - "time": 1704589200, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 7.0 - }, - { - "air_temperature": 1.0, - "conditions": "Partly Cloudy", - "feels_like": -4.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 19, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 65, - "sea_level_pressure": 1013.4, - "time": 1704592800, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 7.0 - }, - { - "air_temperature": 0.0, - "conditions": "Partly Cloudy", - "feels_like": -5.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 20, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 68, - "sea_level_pressure": 1012.9, - "time": 1704596400, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 7.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 21, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 71, - "sea_level_pressure": 1012.4, - "time": 1704600000, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 7.0 - }, - { - "air_temperature": -1.0, - "conditions": "Partly Cloudy", - "feels_like": -6.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 22, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 73, - "sea_level_pressure": 1011.9, - "time": 1704603600, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 7.0 - }, - { - "air_temperature": -2.0, - "conditions": "Partly Cloudy", - "feels_like": -7.0, - "icon": "partly-cloudy-night", - "local_day": 6, - "local_hour": 23, - "precip": 0, - "precip_icon": "chance-sleet", - "precip_probability": 0, - "precip_type": "sleet", - "relative_humidity": 75, - "sea_level_pressure": 1011.4, - "time": 1704607200, - "uv": 0.0, - "wind_avg": 5.0, - "wind_direction": 250, - "wind_direction_cardinal": "WSW", - "wind_gust": 7.0 - } - ] - }, - "latitude": 43.94962, - "location_name": "My Home Station", - "longitude": -102.86831, - "source_id_conditions": 5, - "status": { - "status_code": 0, - "status_message": "SUCCESS" - }, - "timezone": "America/Denver", - "timezone_offset_minutes": -420, - "units": { - "units_air_density": "kg/m3", - "units_brightness": "lux", - "units_distance": "km", - "units_other": "metric", - "units_precip": "mm", - "units_pressure": "mb", - "units_solar_radiation": "w/m2", - "units_temp": "c", - "units_wind": "mps" - } -} diff --git a/tests/components/weatherflow_cloud/fixtures/station_observation.json b/tests/components/weatherflow_cloud/fixtures/station_observation.json deleted file mode 100644 index 148b180df73..00000000000 --- a/tests/components/weatherflow_cloud/fixtures/station_observation.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "elevation": 2063.150146484375, - "is_public": true, - "latitude": 43.94962, - "longitude": -102.86831, - "obs": [ - { - "air_density": 0.96139, - "air_temperature": 10.5, - "barometric_pressure": 782.8, - "brightness": 757, - "delta_t": 8.4, - "dew_point": -10.4, - "feels_like": 10.5, - "heat_index": 10.5, - "lightning_strike_count": 0, - "lightning_strike_count_last_1hr": 0, - "lightning_strike_count_last_3hr": 0, - "lightning_strike_last_distance": 26, - "lightning_strike_last_epoch": 1707346875, - "precip": 0.0, - "precip_accum_last_1hr": 0.0, - "precip_accum_local_day": 0.0, - "precip_accum_local_day_final": 0.0, - "precip_accum_local_yesterday": 0.0, - "precip_accum_local_yesterday_final": 0.0, - "precip_analysis_type_yesterday": 0, - "precip_minutes_local_day": 0, - "precip_minutes_local_yesterday": 0, - "precip_minutes_local_yesterday_final": 0, - "pressure_trend": "steady", - "relative_humidity": 22, - "sea_level_pressure": 1006.2, - "solar_radiation": 6, - "station_pressure": 782.8, - "timestamp": 1708994629, - "uv": 0.03, - "wet_bulb_globe_temperature": 4.6, - "wet_bulb_temperature": 2.1, - "wind_avg": 1.4, - "wind_chill": 10.5, - "wind_direction": 203, - "wind_gust": 3.2, - "wind_lull": 0.3 - } - ], - "outdoor_keys": [ - "timestamp", - "air_temperature", - "barometric_pressure", - "station_pressure", - "pressure_trend", - "sea_level_pressure", - "relative_humidity", - "precip", - "precip_accum_last_1hr", - "precip_accum_local_day", - "precip_accum_local_day_final", - "precip_accum_local_yesterday_final", - "precip_minutes_local_day", - "precip_minutes_local_yesterday_final", - "wind_avg", - "wind_direction", - "wind_gust", - "wind_lull", - "solar_radiation", - "uv", - "brightness", - "lightning_strike_last_epoch", - "lightning_strike_last_distance", - "lightning_strike_count", - "lightning_strike_count_last_1hr", - "lightning_strike_count_last_3hr", - "feels_like", - "heat_index", - "wind_chill", - "dew_point", - "wet_bulb_temperature", - "wet_bulb_globe_temperature", - "delta_t", - "air_density" - ], - "public_name": "My Home Station", - "station_id": 24432, - "station_name": "My Home Station", - "station_units": { - "units_direction": "degrees", - "units_distance": "mi", - "units_other": "metric", - "units_precip": "in", - "units_pressure": "hpa", - "units_temp": "f", - "units_wind": "bft" - }, - "status": { - "status_code": 0, - "status_message": "SUCCESS" - }, - "timezone": "America/Denver" -} diff --git a/tests/components/weatherflow_cloud/fixtures/station_observation_error.json b/tests/components/weatherflow_cloud/fixtures/station_observation_error.json deleted file mode 100644 index 41bb452c911..00000000000 --- a/tests/components/weatherflow_cloud/fixtures/station_observation_error.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "elevation": 2063.150146484375, - "is_public": true, - "latitude": 43.94962, - "longitude": -102.86831, - "obs": [ - { - "air_density": 0.96139, - "air_temperature": 10.5, - "barometric_pressure": 782.8, - "brightness": 757, - "delta_t": 8.4, - "dew_point": -10.4, - "feels_like": 10.5, - "heat_index": 10.5, - "lightning_strike_count": 0, - "lightning_strike_count_last_1hr": 0, - "lightning_strike_count_last_3hr": 0, - "lightning_strike_last_distance": 26, - "precip": 0.0, - "precip_accum_last_1hr": 0.0, - "precip_accum_local_day": 0.0, - "precip_accum_local_day_final": 0.0, - "precip_accum_local_yesterday": 0.0, - "precip_accum_local_yesterday_final": 0.0, - "precip_analysis_type_yesterday": 0, - "precip_minutes_local_day": 0, - "precip_minutes_local_yesterday": 0, - "precip_minutes_local_yesterday_final": 0, - "pressure_trend": "steady", - "relative_humidity": 22, - "sea_level_pressure": 1006.2, - "solar_radiation": 6, - "station_pressure": 782.8, - "timestamp": 1708994629, - "uv": 0.03, - "wet_bulb_globe_temperature": 4.6, - "wet_bulb_temperature": 2.1, - "wind_avg": 1.4, - "wind_chill": 10.5, - "wind_direction": 203, - "wind_gust": 3.2, - "wind_lull": 0.3 - } - ], - "outdoor_keys": [ - "timestamp", - "air_temperature", - "barometric_pressure", - "station_pressure", - "pressure_trend", - "sea_level_pressure", - "relative_humidity", - "precip", - "precip_accum_last_1hr", - "precip_accum_local_day", - "precip_accum_local_day_final", - "precip_accum_local_yesterday_final", - "precip_minutes_local_day", - "precip_minutes_local_yesterday_final", - "wind_avg", - "wind_direction", - "wind_gust", - "wind_lull", - "solar_radiation", - "uv", - "brightness", - "lightning_strike_last_epoch", - "lightning_strike_last_distance", - "lightning_strike_count", - "lightning_strike_count_last_1hr", - "lightning_strike_count_last_3hr", - "feels_like", - "heat_index", - "wind_chill", - "dew_point", - "wet_bulb_temperature", - "wet_bulb_globe_temperature", - "delta_t", - "air_density" - ], - "public_name": "My Home Station", - "station_id": 24432, - "station_name": "My Home Station", - "station_units": { - "units_direction": "degrees", - "units_distance": "mi", - "units_other": "metric", - "units_precip": "in", - "units_pressure": "hpa", - "units_temp": "f", - "units_wind": "bft" - }, - "status": { - "status_code": 0, - "status_message": "SUCCESS" - }, - "timezone": "America/Denver" -} diff --git a/tests/components/weatherflow_cloud/fixtures/stations.json b/tests/components/weatherflow_cloud/fixtures/stations.json deleted file mode 100644 index e0ca96bd240..00000000000 --- a/tests/components/weatherflow_cloud/fixtures/stations.json +++ /dev/null @@ -1,132 +0,0 @@ -{ - "stations": [ - { - "created_epoch": 1658343273, - "devices": [ - { - "device_id": 7654321, - "device_meta": { - "agl": 1.8288, - "environment": "indoor", - "name": "HB-00068123", - "wifi_network_name": "" - }, - "device_type": "HB", - "firmware_revision": "177", - "hardware_revision": "1", - "location_id": 24432, - "serial_number": "HB-00068123" - }, - { - "device_id": 123456, - "device_meta": { - "agl": 1.8288, - "environment": "outdoor", - "name": "ST-11084623", - "wifi_network_name": "" - }, - "device_settings": { - "show_precip_final": true - }, - "device_type": "ST", - "firmware_revision": "172", - "hardware_revision": "1", - "location_id": 24432, - "serial_number": "ST-11084623" - } - ], - "is_local_mode": false, - "last_modified_epoch": 1658344464, - "latitude": 43.94962, - "location_id": 24432, - "longitude": -102.86831, - "name": "My Home Station", - "public_name": "My Home Station", - "station_id": 24432, - "station_items": [ - { - "device_id": 123456, - "item": "air_temperature_humidity", - "location_id": 24432, - "location_item_id": 657904, - "sort": 0, - "station_id": 24432, - "station_item_id": 657904 - }, - { - "device_id": 123456, - "item": "barometric_pressure", - "location_id": 24432, - "location_item_id": 657906, - "sort": 3, - "station_id": 24432, - "station_item_id": 657906 - }, - { - "device_id": 7654321, - "item": "diagnostics", - "location_id": 24432, - "location_item_id": 657912, - "station_id": 24432, - "station_item_id": 657912 - }, - { - "device_id": 123456, - "item": "diagnostics", - "location_id": 24432, - "location_item_id": 657913, - "sort": 6, - "station_id": 24432, - "station_item_id": 657913 - }, - { - "device_id": 123456, - "item": "light", - "location_id": 24432, - "location_item_id": 657908, - "sort": 2, - "station_id": 24432, - "station_item_id": 657908 - }, - { - "device_id": 123456, - "item": "lightning", - "location_id": 24432, - "location_item_id": 657905, - "sort": 4, - "station_id": 24432, - "station_item_id": 657905 - }, - { - "device_id": 123456, - "item": "rain", - "location_id": 24432, - "location_item_id": 657907, - "sort": 5, - "station_id": 24432, - "station_item_id": 657907 - }, - { - "device_id": 123456, - "item": "wind", - "location_id": 24432, - "location_item_id": 657909, - "sort": 1, - "station_id": 24432, - "station_item_id": 657909 - } - ], - "station_meta": { - "elevation": 2063.150146484375, - "share_with_wf": true, - "share_with_wu": true - }, - "timezone": "America/Denver", - "timezone_offset_minutes": -420 - } - ], - "status": { - "status_code": 0, - "status_message": "SUCCESS" - } -} diff --git a/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr b/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr deleted file mode 100644 index 95be86664a2..00000000000 --- a/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr +++ /dev/null @@ -1,806 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[sensor.my_home_station_air_density-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_air_density', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 5, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Air density', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'air_density', - 'unique_id': '24432_air_density', - 'unit_of_measurement': 'kg/m³', - }) -# --- -# name: test_all_entities[sensor.my_home_station_air_density-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station Air density', - 'state_class': , - 'unit_of_measurement': 'kg/m³', - }), - 'context': , - 'entity_id': 'sensor.my_home_station_air_density', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.96139', - }) -# --- -# name: test_all_entities[sensor.my_home_station_dew_point-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_dew_point', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Dew point', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dew_point', - 'unique_id': '24432_dew_point', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_dew_point-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Dew point', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_dew_point', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '-10.4', - }) -# --- -# name: test_all_entities[sensor.my_home_station_feels_like-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_feels_like', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Feels like', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'feels_like', - 'unique_id': '24432_feels_like', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_feels_like-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Feels like', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_feels_like', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.5', - }) -# --- -# name: test_all_entities[sensor.my_home_station_heat_index-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_heat_index', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Heat index', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'heat_index', - 'unique_id': '24432_heat_index', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_heat_index-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Heat index', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_heat_index', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.5', - }) -# --- -# name: test_all_entities[sensor.my_home_station_lightning_count-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_lightning_count', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lightning count', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_count', - 'unique_id': '24432_lightning_strike_count', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_lightning_count-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station Lightning count', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_lightning_count', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.my_home_station_lightning_count_last_1_hr-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_lightning_count_last_1_hr', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lightning count last 1 hr', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_count_last_1hr', - 'unique_id': '24432_lightning_strike_count_last_1hr', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_lightning_count_last_1_hr-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station Lightning count last 1 hr', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_lightning_count_last_1_hr', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.my_home_station_lightning_count_last_3_hr-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_lightning_count_last_3_hr', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lightning count last 3 hr', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_count_last_3hr', - 'unique_id': '24432_lightning_strike_count_last_3hr', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_lightning_count_last_3_hr-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station Lightning count last 3 hr', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_lightning_count_last_3_hr', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.my_home_station_lightning_last_distance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_lightning_last_distance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Lightning last distance', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_last_distance', - 'unique_id': '24432_lightning_strike_last_distance', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_lightning_last_distance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'distance', - 'friendly_name': 'My Home Station Lightning last distance', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_lightning_last_distance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '26', - }) -# --- -# name: test_all_entities[sensor.my_home_station_lightning_last_strike-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_lightning_last_strike', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Lightning last strike', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_last_epoch', - 'unique_id': '24432_lightning_strike_last_epoch', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_lightning_last_strike-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'timestamp', - 'friendly_name': 'My Home Station Lightning last strike', - }), - 'context': , - 'entity_id': 'sensor.my_home_station_lightning_last_strike', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-02-07T23:01:15+00:00', - }) -# --- -# name: test_all_entities[sensor.my_home_station_pressure_barometric-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_pressure_barometric', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Pressure barometric', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'barometric_pressure', - 'unique_id': '24432_barometric_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_pressure_barometric-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'atmospheric_pressure', - 'friendly_name': 'My Home Station Pressure barometric', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_pressure_barometric', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '782.8', - }) -# --- -# name: test_all_entities[sensor.my_home_station_pressure_sea_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_pressure_sea_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Pressure sea level', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'sea_level_pressure', - 'unique_id': '24432_sea_level_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_pressure_sea_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'atmospheric_pressure', - 'friendly_name': 'My Home Station Pressure sea level', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_pressure_sea_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1006.2', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'air_temperature', - 'unique_id': '24432_air_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.5', - }) -# --- -# name: test_all_entities[sensor.my_home_station_wet_bulb_globe_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_wet_bulb_globe_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Wet bulb globe temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wet_bulb_globe_temperature', - 'unique_id': '24432_wet_bulb_globe_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_wet_bulb_globe_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Wet bulb globe temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_wet_bulb_globe_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.6', - }) -# --- -# name: test_all_entities[sensor.my_home_station_wet_bulb_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_wet_bulb_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Wet bulb temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wet_bulb_temperature', - 'unique_id': '24432_wet_bulb_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_wet_bulb_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Wet bulb temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_wet_bulb_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.1', - }) -# --- -# name: test_all_entities[sensor.my_home_station_wind_chill-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_wind_chill', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Wind chill', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wind_chill', - 'unique_id': '24432_wind_chill', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_wind_chill-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Wind chill', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_wind_chill', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.5', - }) -# --- diff --git a/tests/components/weatherflow_cloud/snapshots/test_weather.ambr b/tests/components/weatherflow_cloud/snapshots/test_weather.ambr deleted file mode 100644 index 569b744529c..00000000000 --- a/tests/components/weatherflow_cloud/snapshots/test_weather.ambr +++ /dev/null @@ -1,62 +0,0 @@ -# serializer version: 1 -# name: test_weather[weather.my_home_station-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'weather', - 'entity_category': None, - 'entity_id': 'weather.my_home_station', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': 'weatherflow_forecast_24432', - 'unit_of_measurement': None, - }) -# --- -# name: test_weather[weather.my_home_station-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'dew_point': -13.0, - 'friendly_name': 'My Home Station', - 'humidity': 27, - 'precipitation_unit': , - 'pressure': 795.8, - 'pressure_unit': , - 'supported_features': , - 'temperature': 4.0, - 'temperature_unit': , - 'uv_index': 2, - 'visibility_unit': , - 'wind_bearing': 40.0, - 'wind_gust_speed': 14.4, - 'wind_speed': 7.2, - 'wind_speed_unit': , - }), - 'context': , - 'entity_id': 'weather.my_home_station', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'sunny', - }) -# --- diff --git a/tests/components/weatherflow_cloud/test_config_flow.py b/tests/components/weatherflow_cloud/test_config_flow.py index 9dc5ad1322d..7ade007ceac 100644 --- a/tests/components/weatherflow_cloud/test_config_flow.py +++ b/tests/components/weatherflow_cloud/test_config_flow.py @@ -4,7 +4,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.weatherflow_cloud.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import CONF_API_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -111,14 +111,15 @@ async def test_reauth(hass: HomeAssistant, mock_get_stations_401_error) -> None: assert not await hass.config_entries.async_setup(entry.entry_id) assert entry.state is ConfigEntryState.SETUP_ERROR - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, data=None + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_API_TOKEN: "SAME_SAME"} + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, + data={CONF_API_TOKEN: "SAME_SAME"}, ) assert result["reason"] == "reauth_successful" assert result["type"] is FlowResultType.ABORT - assert entry.data[CONF_API_TOKEN] == "SAME_SAME" diff --git a/tests/components/weatherflow_cloud/test_sensor.py b/tests/components/weatherflow_cloud/test_sensor.py deleted file mode 100644 index 4d6ff0c8c9f..00000000000 --- a/tests/components/weatherflow_cloud/test_sensor.py +++ /dev/null @@ -1,77 +0,0 @@ -"""Tests for the WeatherFlow Cloud sensor platform.""" - -from datetime import timedelta -from unittest.mock import AsyncMock, patch - -from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion -from weatherflow4py.models.rest.observation import ObservationStationREST - -from homeassistant.components.weatherflow_cloud import DOMAIN -from homeassistant.const import STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_fixture, - snapshot_platform, -) - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_api: AsyncMock, -) -> None: - """Test all entities.""" - with patch( - "homeassistant.components.weatherflow_cloud.PLATFORMS", [Platform.SENSOR] - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_all_entities_with_lightning_error( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_api: AsyncMock, - freezer: FrozenDateTimeFactory, -) -> None: - """Test all entities.""" - - get_observation_response_data = ObservationStationREST.from_json( - load_fixture("station_observation_error.json", DOMAIN) - ) - - with patch( - "homeassistant.components.weatherflow_cloud.PLATFORMS", [Platform.SENSOR] - ): - await setup_integration(hass, mock_config_entry) - - assert ( - hass.states.get("sensor.my_home_station_lightning_last_strike").state - == "2024-02-07T23:01:15+00:00" - ) - - # Update the data in our API - all_data = await mock_api.get_all_data() - all_data[24432].observation = get_observation_response_data - mock_api.get_all_data.return_value = all_data - - # Move time forward - freezer.tick(timedelta(minutes=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert ( - hass.states.get("sensor.my_home_station_lightning_last_strike").state - == STATE_UNKNOWN - ) diff --git a/tests/components/weatherflow_cloud/test_weather.py b/tests/components/weatherflow_cloud/test_weather.py deleted file mode 100644 index 04da96df423..00000000000 --- a/tests/components/weatherflow_cloud/test_weather.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Tests for the WeatherFlow Cloud weather platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_weather( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - mock_api: AsyncMock, -) -> None: - """Test all entities.""" - with patch( - "homeassistant.components.weatherflow_cloud.PLATFORMS", [Platform.WEATHER] - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/weatherkit/conftest.py b/tests/components/weatherkit/conftest.py index 14d96d28347..d4b849115f6 100644 --- a/tests/components/weatherkit/conftest.py +++ b/tests/components/weatherkit/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Apple WeatherKit tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/weatherkit/snapshots/test_weather.ambr b/tests/components/weatherkit/snapshots/test_weather.ambr index f6fa2f1514b..1fbe5389e98 100644 --- a/tests/components/weatherkit/snapshots/test_weather.ambr +++ b/tests/components/weatherkit/snapshots/test_weather.ambr @@ -1,4 +1,294 @@ # serializer version: 1 +# name: test_daily_forecast + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 28.6, + 'templow': 21.2, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-09T15:00:00Z', + 'precipitation': 3.6, + 'precipitation_probability': 45.0, + 'temperature': 30.6, + 'templow': 21.0, + 'uv_index': 6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-11T15:00:00Z', + 'precipitation': 0.7, + 'precipitation_probability': 47.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-12T15:00:00Z', + 'precipitation': 7.7, + 'precipitation_probability': 37.0, + 'temperature': 30.4, + 'templow': 22.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-13T15:00:00Z', + 'precipitation': 0.6, + 'precipitation_probability': 45.0, + 'temperature': 31.0, + 'templow': 22.6, + 'uv_index': 6, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'temperature': 31.5, + 'templow': 22.4, + 'uv_index': 7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2023-09-15T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 31.8, + 'templow': 23.3, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-16T15:00:00Z', + 'precipitation': 5.3, + 'precipitation_probability': 35.0, + 'temperature': 30.7, + 'templow': 23.2, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-17T15:00:00Z', + 'precipitation': 2.1, + 'precipitation_probability': 49.0, + 'temperature': 28.1, + 'templow': 22.5, + 'uv_index': 6, + }), + ]), + }) +# --- +# name: test_daily_forecast[forecast] + dict({ + 'weather.home': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 28.6, + 'templow': 21.2, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-09T15:00:00Z', + 'precipitation': 3.6, + 'precipitation_probability': 45.0, + 'temperature': 30.6, + 'templow': 21.0, + 'uv_index': 6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-11T15:00:00Z', + 'precipitation': 0.7, + 'precipitation_probability': 47.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-12T15:00:00Z', + 'precipitation': 7.7, + 'precipitation_probability': 37.0, + 'temperature': 30.4, + 'templow': 22.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-13T15:00:00Z', + 'precipitation': 0.6, + 'precipitation_probability': 45.0, + 'temperature': 31.0, + 'templow': 22.6, + 'uv_index': 6, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'temperature': 31.5, + 'templow': 22.4, + 'uv_index': 7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2023-09-15T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 31.8, + 'templow': 23.3, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-16T15:00:00Z', + 'precipitation': 5.3, + 'precipitation_probability': 35.0, + 'temperature': 30.7, + 'templow': 23.2, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-17T15:00:00Z', + 'precipitation': 2.1, + 'precipitation_probability': 49.0, + 'temperature': 28.1, + 'templow': 22.5, + 'uv_index': 6, + }), + ]), + }), + }) +# --- +# name: test_daily_forecast[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 28.6, + 'templow': 21.2, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-09T15:00:00Z', + 'precipitation': 3.6, + 'precipitation_probability': 45.0, + 'temperature': 30.6, + 'templow': 21.0, + 'uv_index': 6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-11T15:00:00Z', + 'precipitation': 0.7, + 'precipitation_probability': 47.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-12T15:00:00Z', + 'precipitation': 7.7, + 'precipitation_probability': 37.0, + 'temperature': 30.4, + 'templow': 22.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-13T15:00:00Z', + 'precipitation': 0.6, + 'precipitation_probability': 45.0, + 'temperature': 31.0, + 'templow': 22.6, + 'uv_index': 6, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'temperature': 31.5, + 'templow': 22.4, + 'uv_index': 7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2023-09-15T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 31.8, + 'templow': 23.3, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-16T15:00:00Z', + 'precipitation': 5.3, + 'precipitation_probability': 35.0, + 'temperature': 30.7, + 'templow': 23.2, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-17T15:00:00Z', + 'precipitation': 2.1, + 'precipitation_probability': 49.0, + 'temperature': 28.1, + 'templow': 22.5, + 'uv_index': 6, + }), + ]), + }) +# --- # name: test_daily_forecast[get_forecasts] dict({ 'weather.home': dict({ @@ -97,6 +387,11978 @@ }), }) # --- +# name: test_hourly_forecast + dict({ + 'forecast': list([ + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T14:00:00Z', + 'dew_point': 21.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 264, + 'wind_gust_speed': 13.44, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 80.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 261, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.64, + }), + dict({ + 'apparent_temperature': 23.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.12, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 252, + 'wind_gust_speed': 11.15, + 'wind_speed': 6.14, + }), + dict({ + 'apparent_temperature': 23.5, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.7, + 'uv_index': 0, + 'wind_bearing': 248, + 'wind_gust_speed': 11.57, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T18:00:00Z', + 'dew_point': 20.8, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.05, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 12.42, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 23.0, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.3, + 'uv_index': 0, + 'wind_bearing': 224, + 'wind_gust_speed': 11.3, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T20:00:00Z', + 'dew_point': 20.4, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.31, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 221, + 'wind_gust_speed': 10.57, + 'wind_speed': 5.13, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T21:00:00Z', + 'dew_point': 20.5, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.55, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 10.63, + 'wind_speed': 5.7, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.79, + 'temperature': 22.8, + 'uv_index': 1, + 'wind_bearing': 258, + 'wind_gust_speed': 10.47, + 'wind_speed': 5.22, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T23:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.95, + 'temperature': 24.0, + 'uv_index': 2, + 'wind_bearing': 282, + 'wind_gust_speed': 12.74, + 'wind_speed': 5.71, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T00:00:00Z', + 'dew_point': 21.5, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.35, + 'temperature': 25.1, + 'uv_index': 3, + 'wind_bearing': 294, + 'wind_gust_speed': 13.87, + 'wind_speed': 6.53, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T01:00:00Z', + 'dew_point': 21.8, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 26.5, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 16.04, + 'wind_speed': 6.54, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T02:00:00Z', + 'dew_point': 22.0, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.23, + 'temperature': 27.6, + 'uv_index': 6, + 'wind_bearing': 314, + 'wind_gust_speed': 18.1, + 'wind_speed': 7.32, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T03:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.86, + 'temperature': 28.3, + 'uv_index': 6, + 'wind_bearing': 317, + 'wind_gust_speed': 20.77, + 'wind_speed': 9.1, + }), + dict({ + 'apparent_temperature': 31.5, + 'cloud_coverage': 69.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T04:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.65, + 'temperature': 28.6, + 'uv_index': 6, + 'wind_bearing': 311, + 'wind_gust_speed': 21.27, + 'wind_speed': 10.21, + }), + dict({ + 'apparent_temperature': 31.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T05:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.48, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 317, + 'wind_gust_speed': 19.62, + 'wind_speed': 10.53, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.54, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 335, + 'wind_gust_speed': 18.98, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.76, + 'temperature': 27.1, + 'uv_index': 2, + 'wind_bearing': 338, + 'wind_gust_speed': 17.04, + 'wind_speed': 7.75, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.05, + 'temperature': 26.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 14.75, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 344, + 'wind_gust_speed': 10.43, + 'wind_speed': 5.2, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.73, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 6.95, + 'wind_speed': 3.59, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 326, + 'wind_gust_speed': 5.27, + 'wind_speed': 2.1, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.52, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 5.48, + 'wind_speed': 0.93, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T13:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 188, + 'wind_gust_speed': 4.44, + 'wind_speed': 1.79, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 4.49, + 'wind_speed': 2.19, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.21, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 179, + 'wind_gust_speed': 5.32, + 'wind_speed': 2.65, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 173, + 'wind_gust_speed': 5.81, + 'wind_speed': 3.2, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.88, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 5.53, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.94, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 6.09, + 'wind_speed': 3.36, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T19:00:00Z', + 'dew_point': 20.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.96, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 6.83, + 'wind_speed': 3.71, + }), + dict({ + 'apparent_temperature': 22.5, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T20:00:00Z', + 'dew_point': 20.0, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 21.0, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 7.98, + 'wind_speed': 4.27, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T21:00:00Z', + 'dew_point': 20.2, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.61, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 8.4, + 'wind_speed': 4.69, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.87, + 'temperature': 23.1, + 'uv_index': 1, + 'wind_bearing': 150, + 'wind_gust_speed': 7.66, + 'wind_speed': 4.33, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 123, + 'wind_gust_speed': 9.63, + 'wind_speed': 3.91, + }), + dict({ + 'apparent_temperature': 30.4, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 105, + 'wind_gust_speed': 12.59, + 'wind_speed': 3.96, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T01:00:00Z', + 'dew_point': 22.9, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.79, + 'temperature': 28.9, + 'uv_index': 5, + 'wind_bearing': 99, + 'wind_gust_speed': 14.17, + 'wind_speed': 4.06, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T02:00:00Z', + 'dew_point': 22.9, + 'humidity': 66, + 'precipitation': 0.3, + 'precipitation_probability': 7.000000000000001, + 'pressure': 1011.29, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 93, + 'wind_gust_speed': 17.75, + 'wind_speed': 4.87, + }), + dict({ + 'apparent_temperature': 34.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T03:00:00Z', + 'dew_point': 23.1, + 'humidity': 64, + 'precipitation': 0.3, + 'precipitation_probability': 11.0, + 'pressure': 1010.78, + 'temperature': 30.6, + 'uv_index': 6, + 'wind_bearing': 78, + 'wind_gust_speed': 17.43, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T04:00:00Z', + 'dew_point': 23.2, + 'humidity': 66, + 'precipitation': 0.4, + 'precipitation_probability': 15.0, + 'pressure': 1010.37, + 'temperature': 30.3, + 'uv_index': 5, + 'wind_bearing': 60, + 'wind_gust_speed': 15.24, + 'wind_speed': 4.9, + }), + dict({ + 'apparent_temperature': 33.7, + 'cloud_coverage': 79.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T05:00:00Z', + 'dew_point': 23.3, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 17.0, + 'pressure': 1010.09, + 'temperature': 30.0, + 'uv_index': 4, + 'wind_bearing': 80, + 'wind_gust_speed': 13.53, + 'wind_speed': 5.98, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T06:00:00Z', + 'dew_point': 23.4, + 'humidity': 70, + 'precipitation': 1.0, + 'precipitation_probability': 17.0, + 'pressure': 1010.0, + 'temperature': 29.5, + 'uv_index': 3, + 'wind_bearing': 83, + 'wind_gust_speed': 12.55, + 'wind_speed': 6.84, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 88.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 73, + 'precipitation': 0.4, + 'precipitation_probability': 16.0, + 'pressure': 1010.27, + 'temperature': 28.7, + 'uv_index': 2, + 'wind_bearing': 90, + 'wind_gust_speed': 10.16, + 'wind_speed': 6.07, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T08:00:00Z', + 'dew_point': 23.2, + 'humidity': 77, + 'precipitation': 0.5, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.71, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 101, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.82, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 93.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T09:00:00Z', + 'dew_point': 23.2, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.9, + 'temperature': 26.5, + 'uv_index': 0, + 'wind_bearing': 128, + 'wind_gust_speed': 8.89, + 'wind_speed': 4.95, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T10:00:00Z', + 'dew_point': 23.0, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.12, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 134, + 'wind_gust_speed': 10.03, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.43, + 'temperature': 25.1, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 12.4, + 'wind_speed': 5.41, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T12:00:00Z', + 'dew_point': 22.5, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.58, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 16.36, + 'wind_speed': 6.31, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T13:00:00Z', + 'dew_point': 22.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 19.66, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.4, + 'temperature': 24.3, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 21.15, + 'wind_speed': 7.46, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'dew_point': 22.0, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.26, + 'wind_speed': 7.84, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.01, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 23.53, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T17:00:00Z', + 'dew_point': 21.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.78, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 22.83, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T18:00:00Z', + 'dew_point': 21.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.69, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.7, + 'wind_speed': 8.7, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T19:00:00Z', + 'dew_point': 21.4, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.77, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 24.24, + 'wind_speed': 8.74, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.89, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 23.99, + 'wind_speed': 8.81, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T21:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.1, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 25.55, + 'wind_speed': 9.05, + }), + dict({ + 'apparent_temperature': 27.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 24.6, + 'uv_index': 1, + 'wind_bearing': 140, + 'wind_gust_speed': 29.08, + 'wind_speed': 10.37, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.36, + 'temperature': 25.9, + 'uv_index': 2, + 'wind_bearing': 140, + 'wind_gust_speed': 34.13, + 'wind_speed': 12.56, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T00:00:00Z', + 'dew_point': 22.3, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 27.2, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 38.2, + 'wind_speed': 15.65, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T01:00:00Z', + 'dew_point': 22.3, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 37.55, + 'wind_speed': 15.78, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 143, + 'wind_gust_speed': 35.86, + 'wind_speed': 15.41, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T03:00:00Z', + 'dew_point': 22.5, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.61, + 'temperature': 30.3, + 'uv_index': 6, + 'wind_bearing': 141, + 'wind_gust_speed': 35.88, + 'wind_speed': 15.51, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T04:00:00Z', + 'dew_point': 22.6, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.36, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 140, + 'wind_gust_speed': 35.99, + 'wind_speed': 15.75, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T05:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.11, + 'temperature': 30.1, + 'uv_index': 4, + 'wind_bearing': 137, + 'wind_gust_speed': 33.61, + 'wind_speed': 15.36, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T06:00:00Z', + 'dew_point': 22.5, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.98, + 'temperature': 30.0, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 32.61, + 'wind_speed': 14.98, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.13, + 'temperature': 29.2, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 28.1, + 'wind_speed': 13.88, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 28.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 24.22, + 'wind_speed': 13.02, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T09:00:00Z', + 'dew_point': 21.9, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.81, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 22.5, + 'wind_speed': 11.94, + }), + dict({ + 'apparent_temperature': 28.8, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T10:00:00Z', + 'dew_point': 21.7, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 21.47, + 'wind_speed': 11.25, + }), + dict({ + 'apparent_temperature': 28.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.71, + 'wind_speed': 12.39, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.67, + 'wind_speed': 12.83, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T13:00:00Z', + 'dew_point': 21.7, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 23.34, + 'wind_speed': 12.62, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.83, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.9, + 'wind_speed': 12.07, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T15:00:00Z', + 'dew_point': 21.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.74, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.01, + 'wind_speed': 11.19, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T16:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.56, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 21.29, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T17:00:00Z', + 'dew_point': 21.5, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.35, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 20.52, + 'wind_speed': 10.5, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 20.04, + 'wind_speed': 10.51, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T19:00:00Z', + 'dew_point': 21.3, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 12.0, + 'pressure': 1011.37, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 18.07, + 'wind_speed': 10.13, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T20:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.2, + 'precipitation_probability': 13.0, + 'pressure': 1011.53, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 16.86, + 'wind_speed': 10.34, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T21:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.71, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 16.66, + 'wind_speed': 10.68, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T22:00:00Z', + 'dew_point': 21.9, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 24.4, + 'uv_index': 1, + 'wind_bearing': 137, + 'wind_gust_speed': 17.21, + 'wind_speed': 10.61, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.05, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 19.23, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 29.5, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.07, + 'temperature': 26.6, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 20.61, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 82.0, + 'condition': 'rainy', + 'datetime': '2023-09-12T01:00:00Z', + 'dew_point': 23.1, + 'humidity': 75, + 'precipitation': 0.2, + 'precipitation_probability': 16.0, + 'pressure': 1011.89, + 'temperature': 27.9, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 23.35, + 'wind_speed': 11.98, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 29.0, + 'uv_index': 5, + 'wind_bearing': 143, + 'wind_gust_speed': 26.45, + 'wind_speed': 13.01, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.15, + 'temperature': 29.8, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 28.95, + 'wind_speed': 13.9, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.79, + 'temperature': 30.2, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 27.9, + 'wind_speed': 13.95, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T05:00:00Z', + 'dew_point': 23.1, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.43, + 'temperature': 30.4, + 'uv_index': 4, + 'wind_bearing': 140, + 'wind_gust_speed': 26.53, + 'wind_speed': 13.78, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T06:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.21, + 'temperature': 30.1, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 24.56, + 'wind_speed': 13.74, + }), + dict({ + 'apparent_temperature': 32.0, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.26, + 'temperature': 29.1, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 22.78, + 'wind_speed': 13.21, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.51, + 'temperature': 28.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 19.92, + 'wind_speed': 12.0, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T09:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.8, + 'temperature': 27.2, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 17.65, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T10:00:00Z', + 'dew_point': 21.4, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 15.87, + 'wind_speed': 10.23, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T11:00:00Z', + 'dew_point': 21.3, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1011.79, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 13.9, + 'wind_speed': 9.39, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T12:00:00Z', + 'dew_point': 21.2, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 47.0, + 'pressure': 1012.12, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.32, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1012.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.18, + 'wind_speed': 8.59, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T14:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.09, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.84, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T15:00:00Z', + 'dew_point': 21.3, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.99, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.93, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T16:00:00Z', + 'dew_point': 21.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 16.74, + 'wind_speed': 9.49, + }), + dict({ + 'apparent_temperature': 24.7, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T17:00:00Z', + 'dew_point': 20.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.75, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 17.45, + 'wind_speed': 9.12, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.04, + 'wind_speed': 8.68, + }), + dict({ + 'apparent_temperature': 24.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 16.8, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T20:00:00Z', + 'dew_point': 20.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.23, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.35, + 'wind_speed': 8.36, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T21:00:00Z', + 'dew_point': 20.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.49, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 14.09, + 'wind_speed': 7.77, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T22:00:00Z', + 'dew_point': 21.0, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.72, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 152, + 'wind_gust_speed': 14.04, + 'wind_speed': 7.25, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T23:00:00Z', + 'dew_point': 21.4, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 25.5, + 'uv_index': 2, + 'wind_bearing': 149, + 'wind_gust_speed': 15.31, + 'wind_speed': 7.14, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-13T00:00:00Z', + 'dew_point': 21.8, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 27.1, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 16.42, + 'wind_speed': 6.89, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T01:00:00Z', + 'dew_point': 22.0, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.65, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 137, + 'wind_gust_speed': 18.64, + 'wind_speed': 6.65, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T02:00:00Z', + 'dew_point': 21.9, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.26, + 'temperature': 29.4, + 'uv_index': 5, + 'wind_bearing': 128, + 'wind_gust_speed': 21.69, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 33.0, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T03:00:00Z', + 'dew_point': 21.9, + 'humidity': 62, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.88, + 'temperature': 30.1, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 23.41, + 'wind_speed': 7.33, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T04:00:00Z', + 'dew_point': 22.0, + 'humidity': 61, + 'precipitation': 0.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.55, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 56, + 'wind_gust_speed': 23.1, + 'wind_speed': 8.09, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 61, + 'precipitation': 1.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.29, + 'temperature': 30.2, + 'uv_index': 4, + 'wind_bearing': 20, + 'wind_gust_speed': 21.81, + 'wind_speed': 9.46, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T06:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 2.3, + 'precipitation_probability': 11.0, + 'pressure': 1011.17, + 'temperature': 29.7, + 'uv_index': 3, + 'wind_bearing': 20, + 'wind_gust_speed': 19.72, + 'wind_speed': 9.8, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 69.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T07:00:00Z', + 'dew_point': 22.4, + 'humidity': 68, + 'precipitation': 1.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.32, + 'temperature': 28.8, + 'uv_index': 1, + 'wind_bearing': 18, + 'wind_gust_speed': 17.55, + 'wind_speed': 9.23, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T08:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.6, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 27, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.05, + }), + dict({ + 'apparent_temperature': 29.4, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T09:00:00Z', + 'dew_point': 23.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 32, + 'wind_gust_speed': 12.17, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T10:00:00Z', + 'dew_point': 22.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.3, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 69, + 'wind_gust_speed': 11.64, + 'wind_speed': 6.69, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.71, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.23, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.96, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.47, + 'wind_speed': 5.73, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T13:00:00Z', + 'dew_point': 22.3, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.03, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 13.57, + 'wind_speed': 5.66, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.99, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 15.07, + 'wind_speed': 5.83, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T15:00:00Z', + 'dew_point': 22.2, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.95, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 16.06, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T16:00:00Z', + 'dew_point': 22.0, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.9, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 16.05, + 'wind_speed': 5.75, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T17:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.52, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T18:00:00Z', + 'dew_point': 21.8, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.87, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.01, + 'wind_speed': 5.32, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 22.8, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.39, + 'wind_speed': 5.33, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.22, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.79, + 'wind_speed': 5.43, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.12, + 'wind_speed': 5.52, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T22:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.59, + 'temperature': 24.3, + 'uv_index': 1, + 'wind_bearing': 147, + 'wind_gust_speed': 16.14, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T23:00:00Z', + 'dew_point': 22.4, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.74, + 'temperature': 25.7, + 'uv_index': 2, + 'wind_bearing': 146, + 'wind_gust_speed': 19.09, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.78, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 143, + 'wind_gust_speed': 21.6, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T01:00:00Z', + 'dew_point': 23.2, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.61, + 'temperature': 28.7, + 'uv_index': 5, + 'wind_bearing': 138, + 'wind_gust_speed': 23.36, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T02:00:00Z', + 'dew_point': 23.2, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.32, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 24.72, + 'wind_speed': 4.99, + }), + dict({ + 'apparent_temperature': 34.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T03:00:00Z', + 'dew_point': 23.3, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.04, + 'temperature': 30.7, + 'uv_index': 6, + 'wind_bearing': 354, + 'wind_gust_speed': 25.23, + 'wind_speed': 4.74, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.77, + 'temperature': 31.0, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 24.6, + 'wind_speed': 4.79, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 60.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T05:00:00Z', + 'dew_point': 23.2, + 'humidity': 64, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1012.53, + 'temperature': 30.7, + 'uv_index': 5, + 'wind_bearing': 336, + 'wind_gust_speed': 23.28, + 'wind_speed': 5.07, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 59.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T06:00:00Z', + 'dew_point': 23.1, + 'humidity': 66, + 'precipitation': 0.2, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1012.49, + 'temperature': 30.2, + 'uv_index': 3, + 'wind_bearing': 336, + 'wind_gust_speed': 22.05, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 32.9, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T07:00:00Z', + 'dew_point': 23.0, + 'humidity': 68, + 'precipitation': 0.2, + 'precipitation_probability': 40.0, + 'pressure': 1012.73, + 'temperature': 29.5, + 'uv_index': 2, + 'wind_bearing': 339, + 'wind_gust_speed': 21.18, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 31.6, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T08:00:00Z', + 'dew_point': 22.8, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 45.0, + 'pressure': 1013.16, + 'temperature': 28.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 20.35, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T09:00:00Z', + 'dew_point': 22.5, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1013.62, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 347, + 'wind_gust_speed': 19.42, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T10:00:00Z', + 'dew_point': 22.4, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.09, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 348, + 'wind_gust_speed': 18.19, + 'wind_speed': 5.31, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T11:00:00Z', + 'dew_point': 22.4, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.56, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 177, + 'wind_gust_speed': 16.79, + 'wind_speed': 4.28, + }), + dict({ + 'apparent_temperature': 27.5, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.87, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 15.61, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T13:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.91, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 14.7, + 'wind_speed': 4.11, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T14:00:00Z', + 'dew_point': 21.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.8, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 13.81, + 'wind_speed': 4.97, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T15:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.66, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 170, + 'wind_gust_speed': 12.88, + 'wind_speed': 5.57, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T16:00:00Z', + 'dew_point': 21.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.54, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 12.0, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T17:00:00Z', + 'dew_point': 21.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.45, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 11.43, + 'wind_speed': 5.48, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 44.0, + 'pressure': 1014.45, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 11.42, + 'wind_speed': 5.38, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T19:00:00Z', + 'dew_point': 21.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'pressure': 1014.63, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.15, + 'wind_speed': 5.39, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T20:00:00Z', + 'dew_point': 21.8, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 51.0, + 'pressure': 1014.91, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 13.54, + 'wind_speed': 5.45, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T21:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 42.0, + 'pressure': 1015.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 15.48, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T22:00:00Z', + 'dew_point': 22.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 28.999999999999996, + 'pressure': 1015.4, + 'temperature': 25.7, + 'uv_index': 1, + 'wind_bearing': 158, + 'wind_gust_speed': 17.86, + 'wind_speed': 5.84, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 77, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.54, + 'temperature': 27.2, + 'uv_index': 2, + 'wind_bearing': 155, + 'wind_gust_speed': 20.19, + 'wind_speed': 6.09, + }), + dict({ + 'apparent_temperature': 32.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T00:00:00Z', + 'dew_point': 23.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.55, + 'temperature': 28.6, + 'uv_index': 4, + 'wind_bearing': 152, + 'wind_gust_speed': 21.83, + 'wind_speed': 6.42, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T01:00:00Z', + 'dew_point': 23.5, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.35, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 144, + 'wind_gust_speed': 22.56, + 'wind_speed': 6.91, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.0, + 'temperature': 30.4, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.83, + 'wind_speed': 7.47, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.62, + 'temperature': 30.9, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.98, + 'wind_speed': 7.95, + }), + dict({ + 'apparent_temperature': 35.4, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T04:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 31.3, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 23.21, + 'wind_speed': 8.44, + }), + dict({ + 'apparent_temperature': 35.6, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T05:00:00Z', + 'dew_point': 23.7, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.95, + 'temperature': 31.5, + 'uv_index': 5, + 'wind_bearing': 344, + 'wind_gust_speed': 23.46, + 'wind_speed': 8.95, + }), + dict({ + 'apparent_temperature': 35.1, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T06:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.83, + 'temperature': 31.1, + 'uv_index': 3, + 'wind_bearing': 347, + 'wind_gust_speed': 23.64, + 'wind_speed': 9.13, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.96, + 'temperature': 30.3, + 'uv_index': 2, + 'wind_bearing': 350, + 'wind_gust_speed': 23.66, + 'wind_speed': 8.78, + }), + dict({ + 'apparent_temperature': 32.4, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T08:00:00Z', + 'dew_point': 23.1, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 29.0, + 'uv_index': 0, + 'wind_bearing': 356, + 'wind_gust_speed': 23.51, + 'wind_speed': 8.13, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T09:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.61, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 3, + 'wind_gust_speed': 23.21, + 'wind_speed': 7.48, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T10:00:00Z', + 'dew_point': 22.8, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.02, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 20, + 'wind_gust_speed': 22.68, + 'wind_speed': 6.83, + }), + dict({ + 'apparent_temperature': 29.2, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.43, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 129, + 'wind_gust_speed': 22.04, + 'wind_speed': 6.1, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T12:00:00Z', + 'dew_point': 22.7, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.71, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.64, + 'wind_speed': 5.6, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T13:00:00Z', + 'dew_point': 23.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.52, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 16.35, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T14:00:00Z', + 'dew_point': 22.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.37, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 17.11, + 'wind_speed': 5.79, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.21, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 17.32, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 16.6, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T17:00:00Z', + 'dew_point': 22.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.95, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 219, + 'wind_gust_speed': 15.52, + 'wind_speed': 4.62, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T18:00:00Z', + 'dew_point': 22.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.88, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 216, + 'wind_gust_speed': 14.64, + 'wind_speed': 4.32, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T19:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.91, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 198, + 'wind_gust_speed': 14.06, + 'wind_speed': 4.73, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T20:00:00Z', + 'dew_point': 22.4, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.99, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 189, + 'wind_gust_speed': 13.7, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T21:00:00Z', + 'dew_point': 22.5, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 13.77, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.12, + 'temperature': 25.5, + 'uv_index': 1, + 'wind_bearing': 179, + 'wind_gust_speed': 14.38, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 52.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.13, + 'temperature': 26.9, + 'uv_index': 2, + 'wind_bearing': 170, + 'wind_gust_speed': 15.2, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.04, + 'temperature': 28.0, + 'uv_index': 4, + 'wind_bearing': 155, + 'wind_gust_speed': 15.85, + 'wind_speed': 4.76, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 24.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T01:00:00Z', + 'dew_point': 22.6, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.52, + 'temperature': 29.2, + 'uv_index': 6, + 'wind_bearing': 110, + 'wind_gust_speed': 16.27, + 'wind_speed': 6.81, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 16.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.01, + 'temperature': 30.2, + 'uv_index': 8, + 'wind_bearing': 30, + 'wind_gust_speed': 16.55, + 'wind_speed': 6.86, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T03:00:00Z', + 'dew_point': 22.0, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.45, + 'temperature': 31.1, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.52, + 'wind_speed': 6.8, + }), + dict({ + 'apparent_temperature': 34.7, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T04:00:00Z', + 'dew_point': 21.9, + 'humidity': 57, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 31.5, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.08, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.39, + 'temperature': 31.8, + 'uv_index': 6, + 'wind_bearing': 20, + 'wind_gust_speed': 15.48, + 'wind_speed': 6.45, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T06:00:00Z', + 'dew_point': 21.7, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.11, + 'temperature': 31.4, + 'uv_index': 4, + 'wind_bearing': 26, + 'wind_gust_speed': 15.08, + 'wind_speed': 6.43, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 7.000000000000001, + 'condition': 'sunny', + 'datetime': '2023-09-16T07:00:00Z', + 'dew_point': 21.7, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.15, + 'temperature': 30.7, + 'uv_index': 2, + 'wind_bearing': 39, + 'wind_gust_speed': 14.88, + 'wind_speed': 6.61, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.41, + 'temperature': 29.6, + 'uv_index': 0, + 'wind_bearing': 72, + 'wind_gust_speed': 14.82, + 'wind_speed': 6.95, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T09:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.75, + 'temperature': 28.5, + 'uv_index': 0, + 'wind_bearing': 116, + 'wind_gust_speed': 15.13, + 'wind_speed': 7.45, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 13.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T10:00:00Z', + 'dew_point': 22.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.13, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 16.09, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.47, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.37, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 29.3, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T12:00:00Z', + 'dew_point': 22.9, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.6, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 18.29, + 'wind_speed': 9.21, + }), + dict({ + 'apparent_temperature': 28.7, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T13:00:00Z', + 'dew_point': 23.0, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 25.7, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 18.49, + 'wind_speed': 8.96, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T14:00:00Z', + 'dew_point': 22.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.47, + 'wind_speed': 8.45, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.79, + 'wind_speed': 8.1, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.1, + 'temperature': 24.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 19.81, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T17:00:00Z', + 'dew_point': 22.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.68, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 20.96, + 'wind_speed': 8.3, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T18:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.41, + 'wind_speed': 8.24, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T19:00:00Z', + 'dew_point': 22.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 20.42, + 'wind_speed': 7.62, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T20:00:00Z', + 'dew_point': 22.6, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 18.61, + 'wind_speed': 6.66, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T21:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 17.14, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 28.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 26.0, + 'uv_index': 1, + 'wind_bearing': 161, + 'wind_gust_speed': 16.78, + 'wind_speed': 5.5, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.51, + 'temperature': 27.5, + 'uv_index': 2, + 'wind_bearing': 165, + 'wind_gust_speed': 17.21, + 'wind_speed': 5.56, + }), + dict({ + 'apparent_temperature': 31.7, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T00:00:00Z', + 'dew_point': 22.8, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 28.5, + 'uv_index': 4, + 'wind_bearing': 174, + 'wind_gust_speed': 17.96, + 'wind_speed': 6.04, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T01:00:00Z', + 'dew_point': 22.7, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.4, + 'uv_index': 6, + 'wind_bearing': 192, + 'wind_gust_speed': 19.15, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 28.999999999999996, + 'condition': 'sunny', + 'datetime': '2023-09-17T02:00:00Z', + 'dew_point': 22.8, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 30.1, + 'uv_index': 7, + 'wind_bearing': 225, + 'wind_gust_speed': 20.89, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T03:00:00Z', + 'dew_point': 22.8, + 'humidity': 63, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1009.75, + 'temperature': 30.7, + 'uv_index': 8, + 'wind_bearing': 264, + 'wind_gust_speed': 22.67, + 'wind_speed': 10.27, + }), + dict({ + 'apparent_temperature': 33.9, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T04:00:00Z', + 'dew_point': 22.5, + 'humidity': 62, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1009.18, + 'temperature': 30.5, + 'uv_index': 7, + 'wind_bearing': 293, + 'wind_gust_speed': 23.93, + 'wind_speed': 10.82, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T05:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.6, + 'precipitation_probability': 12.0, + 'pressure': 1008.71, + 'temperature': 30.1, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 24.39, + 'wind_speed': 10.72, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 64, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.46, + 'temperature': 29.6, + 'uv_index': 3, + 'wind_bearing': 312, + 'wind_gust_speed': 23.9, + 'wind_speed': 10.28, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 47.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.53, + 'temperature': 28.9, + 'uv_index': 1, + 'wind_bearing': 312, + 'wind_gust_speed': 22.3, + 'wind_speed': 9.59, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 70, + 'precipitation': 0.6, + 'precipitation_probability': 15.0, + 'pressure': 1008.82, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 19.73, + 'wind_speed': 8.58, + }), + dict({ + 'apparent_temperature': 29.6, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 74, + 'precipitation': 0.5, + 'precipitation_probability': 15.0, + 'pressure': 1009.21, + 'temperature': 27.0, + 'uv_index': 0, + 'wind_bearing': 291, + 'wind_gust_speed': 16.49, + 'wind_speed': 7.34, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 78, + 'precipitation': 0.4, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1009.65, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 12.71, + 'wind_speed': 5.91, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T11:00:00Z', + 'dew_point': 21.9, + 'humidity': 82, + 'precipitation': 0.3, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.04, + 'temperature': 25.3, + 'uv_index': 0, + 'wind_bearing': 212, + 'wind_gust_speed': 9.16, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T12:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.3, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1010.24, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 192, + 'wind_gust_speed': 7.09, + 'wind_speed': 3.62, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T13:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1010.15, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 185, + 'wind_gust_speed': 7.2, + 'wind_speed': 3.27, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 44.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T14:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1009.87, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.22, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 49.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T15:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.2, + 'precipitation_probability': 31.0, + 'pressure': 1009.56, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 180, + 'wind_gust_speed': 9.21, + 'wind_speed': 3.3, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 94, + 'precipitation': 0.2, + 'precipitation_probability': 33.0, + 'pressure': 1009.29, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 9.0, + 'wind_speed': 3.46, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T17:00:00Z', + 'dew_point': 21.7, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 35.0, + 'pressure': 1009.09, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 186, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T18:00:00Z', + 'dew_point': 21.6, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 37.0, + 'pressure': 1009.01, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 7.99, + 'wind_speed': 4.07, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.07, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 258, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.55, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T20:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.23, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 8.77, + 'wind_speed': 5.17, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 38.0, + 'pressure': 1009.47, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 318, + 'wind_gust_speed': 9.69, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 30.0, + 'pressure': 1009.77, + 'temperature': 24.2, + 'uv_index': 1, + 'wind_bearing': 324, + 'wind_gust_speed': 10.88, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 83, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.09, + 'temperature': 25.1, + 'uv_index': 2, + 'wind_bearing': 329, + 'wind_gust_speed': 12.21, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T00:00:00Z', + 'dew_point': 21.9, + 'humidity': 80, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.33, + 'temperature': 25.7, + 'uv_index': 3, + 'wind_bearing': 332, + 'wind_gust_speed': 13.52, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T01:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1007.43, + 'temperature': 27.2, + 'uv_index': 5, + 'wind_bearing': 330, + 'wind_gust_speed': 11.36, + 'wind_speed': 11.36, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T02:00:00Z', + 'dew_point': 21.6, + 'humidity': 70, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1007.05, + 'temperature': 27.5, + 'uv_index': 6, + 'wind_bearing': 332, + 'wind_gust_speed': 12.06, + 'wind_speed': 12.06, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T03:00:00Z', + 'dew_point': 21.6, + 'humidity': 69, + 'precipitation': 0.5, + 'precipitation_probability': 10.0, + 'pressure': 1006.67, + 'temperature': 27.8, + 'uv_index': 6, + 'wind_bearing': 333, + 'wind_gust_speed': 12.81, + 'wind_speed': 12.81, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T04:00:00Z', + 'dew_point': 21.5, + 'humidity': 68, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1006.28, + 'temperature': 28.0, + 'uv_index': 5, + 'wind_bearing': 335, + 'wind_gust_speed': 13.68, + 'wind_speed': 13.68, + }), + dict({ + 'apparent_temperature': 30.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T05:00:00Z', + 'dew_point': 21.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1005.89, + 'temperature': 28.1, + 'uv_index': 4, + 'wind_bearing': 336, + 'wind_gust_speed': 14.61, + 'wind_speed': 14.61, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T06:00:00Z', + 'dew_point': 21.2, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 27.0, + 'pressure': 1005.67, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 338, + 'wind_gust_speed': 15.25, + 'wind_speed': 15.25, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T07:00:00Z', + 'dew_point': 21.3, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1005.74, + 'temperature': 27.4, + 'uv_index': 1, + 'wind_bearing': 339, + 'wind_gust_speed': 15.45, + 'wind_speed': 15.45, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T08:00:00Z', + 'dew_point': 21.4, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1005.98, + 'temperature': 26.7, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.38, + 'wind_speed': 15.38, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T09:00:00Z', + 'dew_point': 21.6, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.22, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.27, + 'wind_speed': 15.27, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T10:00:00Z', + 'dew_point': 21.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.44, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 15.09, + 'wind_speed': 15.09, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T11:00:00Z', + 'dew_point': 21.7, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.66, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 336, + 'wind_gust_speed': 14.88, + 'wind_speed': 14.88, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.79, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 333, + 'wind_gust_speed': 14.91, + 'wind_speed': 14.91, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.36, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 83, + 'wind_gust_speed': 4.58, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T14:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.96, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 4.74, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 24.5, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T15:00:00Z', + 'dew_point': 20.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.6, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 152, + 'wind_gust_speed': 5.63, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T16:00:00Z', + 'dew_point': 20.7, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 22.3, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 6.02, + 'wind_speed': 6.02, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T17:00:00Z', + 'dew_point': 20.4, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.2, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 6.15, + 'wind_speed': 6.15, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T18:00:00Z', + 'dew_point': 20.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.08, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 167, + 'wind_gust_speed': 6.48, + 'wind_speed': 6.48, + }), + dict({ + 'apparent_temperature': 23.2, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T19:00:00Z', + 'dew_point': 19.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.04, + 'temperature': 21.8, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 7.51, + 'wind_speed': 7.51, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 99.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T20:00:00Z', + 'dew_point': 19.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.05, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 8.73, + 'wind_speed': 8.73, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 98.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T21:00:00Z', + 'dew_point': 19.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.06, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 9.21, + 'wind_speed': 9.11, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 96.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T22:00:00Z', + 'dew_point': 19.7, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 171, + 'wind_gust_speed': 9.03, + 'wind_speed': 7.91, + }), + ]), + }) +# --- +# name: test_hourly_forecast[forecast] + dict({ + 'weather.home': dict({ + 'forecast': list([ + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T14:00:00Z', + 'dew_point': 21.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 264, + 'wind_gust_speed': 13.44, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 80.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 261, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.64, + }), + dict({ + 'apparent_temperature': 23.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.12, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 252, + 'wind_gust_speed': 11.15, + 'wind_speed': 6.14, + }), + dict({ + 'apparent_temperature': 23.5, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.7, + 'uv_index': 0, + 'wind_bearing': 248, + 'wind_gust_speed': 11.57, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T18:00:00Z', + 'dew_point': 20.8, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.05, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 12.42, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 23.0, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.3, + 'uv_index': 0, + 'wind_bearing': 224, + 'wind_gust_speed': 11.3, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T20:00:00Z', + 'dew_point': 20.4, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.31, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 221, + 'wind_gust_speed': 10.57, + 'wind_speed': 5.13, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T21:00:00Z', + 'dew_point': 20.5, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.55, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 10.63, + 'wind_speed': 5.7, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.79, + 'temperature': 22.8, + 'uv_index': 1, + 'wind_bearing': 258, + 'wind_gust_speed': 10.47, + 'wind_speed': 5.22, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T23:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.95, + 'temperature': 24.0, + 'uv_index': 2, + 'wind_bearing': 282, + 'wind_gust_speed': 12.74, + 'wind_speed': 5.71, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T00:00:00Z', + 'dew_point': 21.5, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.35, + 'temperature': 25.1, + 'uv_index': 3, + 'wind_bearing': 294, + 'wind_gust_speed': 13.87, + 'wind_speed': 6.53, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T01:00:00Z', + 'dew_point': 21.8, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 26.5, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 16.04, + 'wind_speed': 6.54, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T02:00:00Z', + 'dew_point': 22.0, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.23, + 'temperature': 27.6, + 'uv_index': 6, + 'wind_bearing': 314, + 'wind_gust_speed': 18.1, + 'wind_speed': 7.32, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T03:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.86, + 'temperature': 28.3, + 'uv_index': 6, + 'wind_bearing': 317, + 'wind_gust_speed': 20.77, + 'wind_speed': 9.1, + }), + dict({ + 'apparent_temperature': 31.5, + 'cloud_coverage': 69.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T04:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.65, + 'temperature': 28.6, + 'uv_index': 6, + 'wind_bearing': 311, + 'wind_gust_speed': 21.27, + 'wind_speed': 10.21, + }), + dict({ + 'apparent_temperature': 31.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T05:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.48, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 317, + 'wind_gust_speed': 19.62, + 'wind_speed': 10.53, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.54, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 335, + 'wind_gust_speed': 18.98, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.76, + 'temperature': 27.1, + 'uv_index': 2, + 'wind_bearing': 338, + 'wind_gust_speed': 17.04, + 'wind_speed': 7.75, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.05, + 'temperature': 26.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 14.75, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 344, + 'wind_gust_speed': 10.43, + 'wind_speed': 5.2, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.73, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 6.95, + 'wind_speed': 3.59, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 326, + 'wind_gust_speed': 5.27, + 'wind_speed': 2.1, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.52, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 5.48, + 'wind_speed': 0.93, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T13:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 188, + 'wind_gust_speed': 4.44, + 'wind_speed': 1.79, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 4.49, + 'wind_speed': 2.19, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.21, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 179, + 'wind_gust_speed': 5.32, + 'wind_speed': 2.65, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 173, + 'wind_gust_speed': 5.81, + 'wind_speed': 3.2, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.88, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 5.53, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.94, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 6.09, + 'wind_speed': 3.36, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T19:00:00Z', + 'dew_point': 20.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.96, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 6.83, + 'wind_speed': 3.71, + }), + dict({ + 'apparent_temperature': 22.5, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T20:00:00Z', + 'dew_point': 20.0, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 21.0, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 7.98, + 'wind_speed': 4.27, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T21:00:00Z', + 'dew_point': 20.2, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.61, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 8.4, + 'wind_speed': 4.69, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.87, + 'temperature': 23.1, + 'uv_index': 1, + 'wind_bearing': 150, + 'wind_gust_speed': 7.66, + 'wind_speed': 4.33, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 123, + 'wind_gust_speed': 9.63, + 'wind_speed': 3.91, + }), + dict({ + 'apparent_temperature': 30.4, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 105, + 'wind_gust_speed': 12.59, + 'wind_speed': 3.96, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T01:00:00Z', + 'dew_point': 22.9, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.79, + 'temperature': 28.9, + 'uv_index': 5, + 'wind_bearing': 99, + 'wind_gust_speed': 14.17, + 'wind_speed': 4.06, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T02:00:00Z', + 'dew_point': 22.9, + 'humidity': 66, + 'precipitation': 0.3, + 'precipitation_probability': 7.000000000000001, + 'pressure': 1011.29, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 93, + 'wind_gust_speed': 17.75, + 'wind_speed': 4.87, + }), + dict({ + 'apparent_temperature': 34.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T03:00:00Z', + 'dew_point': 23.1, + 'humidity': 64, + 'precipitation': 0.3, + 'precipitation_probability': 11.0, + 'pressure': 1010.78, + 'temperature': 30.6, + 'uv_index': 6, + 'wind_bearing': 78, + 'wind_gust_speed': 17.43, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T04:00:00Z', + 'dew_point': 23.2, + 'humidity': 66, + 'precipitation': 0.4, + 'precipitation_probability': 15.0, + 'pressure': 1010.37, + 'temperature': 30.3, + 'uv_index': 5, + 'wind_bearing': 60, + 'wind_gust_speed': 15.24, + 'wind_speed': 4.9, + }), + dict({ + 'apparent_temperature': 33.7, + 'cloud_coverage': 79.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T05:00:00Z', + 'dew_point': 23.3, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 17.0, + 'pressure': 1010.09, + 'temperature': 30.0, + 'uv_index': 4, + 'wind_bearing': 80, + 'wind_gust_speed': 13.53, + 'wind_speed': 5.98, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T06:00:00Z', + 'dew_point': 23.4, + 'humidity': 70, + 'precipitation': 1.0, + 'precipitation_probability': 17.0, + 'pressure': 1010.0, + 'temperature': 29.5, + 'uv_index': 3, + 'wind_bearing': 83, + 'wind_gust_speed': 12.55, + 'wind_speed': 6.84, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 88.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 73, + 'precipitation': 0.4, + 'precipitation_probability': 16.0, + 'pressure': 1010.27, + 'temperature': 28.7, + 'uv_index': 2, + 'wind_bearing': 90, + 'wind_gust_speed': 10.16, + 'wind_speed': 6.07, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T08:00:00Z', + 'dew_point': 23.2, + 'humidity': 77, + 'precipitation': 0.5, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.71, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 101, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.82, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 93.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T09:00:00Z', + 'dew_point': 23.2, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.9, + 'temperature': 26.5, + 'uv_index': 0, + 'wind_bearing': 128, + 'wind_gust_speed': 8.89, + 'wind_speed': 4.95, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T10:00:00Z', + 'dew_point': 23.0, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.12, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 134, + 'wind_gust_speed': 10.03, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.43, + 'temperature': 25.1, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 12.4, + 'wind_speed': 5.41, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T12:00:00Z', + 'dew_point': 22.5, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.58, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 16.36, + 'wind_speed': 6.31, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T13:00:00Z', + 'dew_point': 22.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 19.66, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.4, + 'temperature': 24.3, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 21.15, + 'wind_speed': 7.46, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'dew_point': 22.0, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.26, + 'wind_speed': 7.84, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.01, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 23.53, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T17:00:00Z', + 'dew_point': 21.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.78, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 22.83, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T18:00:00Z', + 'dew_point': 21.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.69, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.7, + 'wind_speed': 8.7, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T19:00:00Z', + 'dew_point': 21.4, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.77, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 24.24, + 'wind_speed': 8.74, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.89, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 23.99, + 'wind_speed': 8.81, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T21:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.1, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 25.55, + 'wind_speed': 9.05, + }), + dict({ + 'apparent_temperature': 27.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 24.6, + 'uv_index': 1, + 'wind_bearing': 140, + 'wind_gust_speed': 29.08, + 'wind_speed': 10.37, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.36, + 'temperature': 25.9, + 'uv_index': 2, + 'wind_bearing': 140, + 'wind_gust_speed': 34.13, + 'wind_speed': 12.56, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T00:00:00Z', + 'dew_point': 22.3, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 27.2, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 38.2, + 'wind_speed': 15.65, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T01:00:00Z', + 'dew_point': 22.3, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 37.55, + 'wind_speed': 15.78, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 143, + 'wind_gust_speed': 35.86, + 'wind_speed': 15.41, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T03:00:00Z', + 'dew_point': 22.5, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.61, + 'temperature': 30.3, + 'uv_index': 6, + 'wind_bearing': 141, + 'wind_gust_speed': 35.88, + 'wind_speed': 15.51, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T04:00:00Z', + 'dew_point': 22.6, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.36, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 140, + 'wind_gust_speed': 35.99, + 'wind_speed': 15.75, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T05:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.11, + 'temperature': 30.1, + 'uv_index': 4, + 'wind_bearing': 137, + 'wind_gust_speed': 33.61, + 'wind_speed': 15.36, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T06:00:00Z', + 'dew_point': 22.5, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.98, + 'temperature': 30.0, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 32.61, + 'wind_speed': 14.98, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.13, + 'temperature': 29.2, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 28.1, + 'wind_speed': 13.88, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 28.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 24.22, + 'wind_speed': 13.02, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T09:00:00Z', + 'dew_point': 21.9, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.81, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 22.5, + 'wind_speed': 11.94, + }), + dict({ + 'apparent_temperature': 28.8, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T10:00:00Z', + 'dew_point': 21.7, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 21.47, + 'wind_speed': 11.25, + }), + dict({ + 'apparent_temperature': 28.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.71, + 'wind_speed': 12.39, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.67, + 'wind_speed': 12.83, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T13:00:00Z', + 'dew_point': 21.7, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 23.34, + 'wind_speed': 12.62, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.83, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.9, + 'wind_speed': 12.07, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T15:00:00Z', + 'dew_point': 21.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.74, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.01, + 'wind_speed': 11.19, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T16:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.56, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 21.29, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T17:00:00Z', + 'dew_point': 21.5, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.35, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 20.52, + 'wind_speed': 10.5, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 20.04, + 'wind_speed': 10.51, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T19:00:00Z', + 'dew_point': 21.3, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 12.0, + 'pressure': 1011.37, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 18.07, + 'wind_speed': 10.13, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T20:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.2, + 'precipitation_probability': 13.0, + 'pressure': 1011.53, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 16.86, + 'wind_speed': 10.34, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T21:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.71, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 16.66, + 'wind_speed': 10.68, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T22:00:00Z', + 'dew_point': 21.9, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 24.4, + 'uv_index': 1, + 'wind_bearing': 137, + 'wind_gust_speed': 17.21, + 'wind_speed': 10.61, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.05, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 19.23, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 29.5, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.07, + 'temperature': 26.6, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 20.61, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 82.0, + 'condition': 'rainy', + 'datetime': '2023-09-12T01:00:00Z', + 'dew_point': 23.1, + 'humidity': 75, + 'precipitation': 0.2, + 'precipitation_probability': 16.0, + 'pressure': 1011.89, + 'temperature': 27.9, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 23.35, + 'wind_speed': 11.98, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 29.0, + 'uv_index': 5, + 'wind_bearing': 143, + 'wind_gust_speed': 26.45, + 'wind_speed': 13.01, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.15, + 'temperature': 29.8, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 28.95, + 'wind_speed': 13.9, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.79, + 'temperature': 30.2, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 27.9, + 'wind_speed': 13.95, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T05:00:00Z', + 'dew_point': 23.1, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.43, + 'temperature': 30.4, + 'uv_index': 4, + 'wind_bearing': 140, + 'wind_gust_speed': 26.53, + 'wind_speed': 13.78, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T06:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.21, + 'temperature': 30.1, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 24.56, + 'wind_speed': 13.74, + }), + dict({ + 'apparent_temperature': 32.0, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.26, + 'temperature': 29.1, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 22.78, + 'wind_speed': 13.21, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.51, + 'temperature': 28.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 19.92, + 'wind_speed': 12.0, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T09:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.8, + 'temperature': 27.2, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 17.65, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T10:00:00Z', + 'dew_point': 21.4, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 15.87, + 'wind_speed': 10.23, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T11:00:00Z', + 'dew_point': 21.3, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1011.79, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 13.9, + 'wind_speed': 9.39, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T12:00:00Z', + 'dew_point': 21.2, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 47.0, + 'pressure': 1012.12, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.32, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1012.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.18, + 'wind_speed': 8.59, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T14:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.09, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.84, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T15:00:00Z', + 'dew_point': 21.3, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.99, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.93, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T16:00:00Z', + 'dew_point': 21.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 16.74, + 'wind_speed': 9.49, + }), + dict({ + 'apparent_temperature': 24.7, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T17:00:00Z', + 'dew_point': 20.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.75, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 17.45, + 'wind_speed': 9.12, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.04, + 'wind_speed': 8.68, + }), + dict({ + 'apparent_temperature': 24.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 16.8, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T20:00:00Z', + 'dew_point': 20.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.23, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.35, + 'wind_speed': 8.36, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T21:00:00Z', + 'dew_point': 20.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.49, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 14.09, + 'wind_speed': 7.77, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T22:00:00Z', + 'dew_point': 21.0, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.72, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 152, + 'wind_gust_speed': 14.04, + 'wind_speed': 7.25, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T23:00:00Z', + 'dew_point': 21.4, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 25.5, + 'uv_index': 2, + 'wind_bearing': 149, + 'wind_gust_speed': 15.31, + 'wind_speed': 7.14, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-13T00:00:00Z', + 'dew_point': 21.8, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 27.1, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 16.42, + 'wind_speed': 6.89, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T01:00:00Z', + 'dew_point': 22.0, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.65, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 137, + 'wind_gust_speed': 18.64, + 'wind_speed': 6.65, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T02:00:00Z', + 'dew_point': 21.9, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.26, + 'temperature': 29.4, + 'uv_index': 5, + 'wind_bearing': 128, + 'wind_gust_speed': 21.69, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 33.0, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T03:00:00Z', + 'dew_point': 21.9, + 'humidity': 62, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.88, + 'temperature': 30.1, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 23.41, + 'wind_speed': 7.33, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T04:00:00Z', + 'dew_point': 22.0, + 'humidity': 61, + 'precipitation': 0.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.55, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 56, + 'wind_gust_speed': 23.1, + 'wind_speed': 8.09, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 61, + 'precipitation': 1.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.29, + 'temperature': 30.2, + 'uv_index': 4, + 'wind_bearing': 20, + 'wind_gust_speed': 21.81, + 'wind_speed': 9.46, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T06:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 2.3, + 'precipitation_probability': 11.0, + 'pressure': 1011.17, + 'temperature': 29.7, + 'uv_index': 3, + 'wind_bearing': 20, + 'wind_gust_speed': 19.72, + 'wind_speed': 9.8, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 69.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T07:00:00Z', + 'dew_point': 22.4, + 'humidity': 68, + 'precipitation': 1.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.32, + 'temperature': 28.8, + 'uv_index': 1, + 'wind_bearing': 18, + 'wind_gust_speed': 17.55, + 'wind_speed': 9.23, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T08:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.6, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 27, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.05, + }), + dict({ + 'apparent_temperature': 29.4, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T09:00:00Z', + 'dew_point': 23.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 32, + 'wind_gust_speed': 12.17, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T10:00:00Z', + 'dew_point': 22.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.3, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 69, + 'wind_gust_speed': 11.64, + 'wind_speed': 6.69, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.71, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.23, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.96, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.47, + 'wind_speed': 5.73, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T13:00:00Z', + 'dew_point': 22.3, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.03, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 13.57, + 'wind_speed': 5.66, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.99, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 15.07, + 'wind_speed': 5.83, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T15:00:00Z', + 'dew_point': 22.2, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.95, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 16.06, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T16:00:00Z', + 'dew_point': 22.0, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.9, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 16.05, + 'wind_speed': 5.75, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T17:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.52, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T18:00:00Z', + 'dew_point': 21.8, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.87, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.01, + 'wind_speed': 5.32, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 22.8, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.39, + 'wind_speed': 5.33, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.22, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.79, + 'wind_speed': 5.43, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.12, + 'wind_speed': 5.52, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T22:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.59, + 'temperature': 24.3, + 'uv_index': 1, + 'wind_bearing': 147, + 'wind_gust_speed': 16.14, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T23:00:00Z', + 'dew_point': 22.4, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.74, + 'temperature': 25.7, + 'uv_index': 2, + 'wind_bearing': 146, + 'wind_gust_speed': 19.09, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.78, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 143, + 'wind_gust_speed': 21.6, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T01:00:00Z', + 'dew_point': 23.2, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.61, + 'temperature': 28.7, + 'uv_index': 5, + 'wind_bearing': 138, + 'wind_gust_speed': 23.36, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T02:00:00Z', + 'dew_point': 23.2, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.32, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 24.72, + 'wind_speed': 4.99, + }), + dict({ + 'apparent_temperature': 34.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T03:00:00Z', + 'dew_point': 23.3, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.04, + 'temperature': 30.7, + 'uv_index': 6, + 'wind_bearing': 354, + 'wind_gust_speed': 25.23, + 'wind_speed': 4.74, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.77, + 'temperature': 31.0, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 24.6, + 'wind_speed': 4.79, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 60.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T05:00:00Z', + 'dew_point': 23.2, + 'humidity': 64, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1012.53, + 'temperature': 30.7, + 'uv_index': 5, + 'wind_bearing': 336, + 'wind_gust_speed': 23.28, + 'wind_speed': 5.07, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 59.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T06:00:00Z', + 'dew_point': 23.1, + 'humidity': 66, + 'precipitation': 0.2, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1012.49, + 'temperature': 30.2, + 'uv_index': 3, + 'wind_bearing': 336, + 'wind_gust_speed': 22.05, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 32.9, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T07:00:00Z', + 'dew_point': 23.0, + 'humidity': 68, + 'precipitation': 0.2, + 'precipitation_probability': 40.0, + 'pressure': 1012.73, + 'temperature': 29.5, + 'uv_index': 2, + 'wind_bearing': 339, + 'wind_gust_speed': 21.18, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 31.6, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T08:00:00Z', + 'dew_point': 22.8, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 45.0, + 'pressure': 1013.16, + 'temperature': 28.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 20.35, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T09:00:00Z', + 'dew_point': 22.5, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1013.62, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 347, + 'wind_gust_speed': 19.42, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T10:00:00Z', + 'dew_point': 22.4, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.09, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 348, + 'wind_gust_speed': 18.19, + 'wind_speed': 5.31, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T11:00:00Z', + 'dew_point': 22.4, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.56, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 177, + 'wind_gust_speed': 16.79, + 'wind_speed': 4.28, + }), + dict({ + 'apparent_temperature': 27.5, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.87, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 15.61, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T13:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.91, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 14.7, + 'wind_speed': 4.11, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T14:00:00Z', + 'dew_point': 21.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.8, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 13.81, + 'wind_speed': 4.97, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T15:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.66, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 170, + 'wind_gust_speed': 12.88, + 'wind_speed': 5.57, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T16:00:00Z', + 'dew_point': 21.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.54, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 12.0, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T17:00:00Z', + 'dew_point': 21.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.45, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 11.43, + 'wind_speed': 5.48, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 44.0, + 'pressure': 1014.45, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 11.42, + 'wind_speed': 5.38, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T19:00:00Z', + 'dew_point': 21.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'pressure': 1014.63, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.15, + 'wind_speed': 5.39, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T20:00:00Z', + 'dew_point': 21.8, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 51.0, + 'pressure': 1014.91, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 13.54, + 'wind_speed': 5.45, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T21:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 42.0, + 'pressure': 1015.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 15.48, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T22:00:00Z', + 'dew_point': 22.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 28.999999999999996, + 'pressure': 1015.4, + 'temperature': 25.7, + 'uv_index': 1, + 'wind_bearing': 158, + 'wind_gust_speed': 17.86, + 'wind_speed': 5.84, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 77, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.54, + 'temperature': 27.2, + 'uv_index': 2, + 'wind_bearing': 155, + 'wind_gust_speed': 20.19, + 'wind_speed': 6.09, + }), + dict({ + 'apparent_temperature': 32.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T00:00:00Z', + 'dew_point': 23.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.55, + 'temperature': 28.6, + 'uv_index': 4, + 'wind_bearing': 152, + 'wind_gust_speed': 21.83, + 'wind_speed': 6.42, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T01:00:00Z', + 'dew_point': 23.5, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.35, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 144, + 'wind_gust_speed': 22.56, + 'wind_speed': 6.91, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.0, + 'temperature': 30.4, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.83, + 'wind_speed': 7.47, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.62, + 'temperature': 30.9, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.98, + 'wind_speed': 7.95, + }), + dict({ + 'apparent_temperature': 35.4, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T04:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 31.3, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 23.21, + 'wind_speed': 8.44, + }), + dict({ + 'apparent_temperature': 35.6, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T05:00:00Z', + 'dew_point': 23.7, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.95, + 'temperature': 31.5, + 'uv_index': 5, + 'wind_bearing': 344, + 'wind_gust_speed': 23.46, + 'wind_speed': 8.95, + }), + dict({ + 'apparent_temperature': 35.1, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T06:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.83, + 'temperature': 31.1, + 'uv_index': 3, + 'wind_bearing': 347, + 'wind_gust_speed': 23.64, + 'wind_speed': 9.13, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.96, + 'temperature': 30.3, + 'uv_index': 2, + 'wind_bearing': 350, + 'wind_gust_speed': 23.66, + 'wind_speed': 8.78, + }), + dict({ + 'apparent_temperature': 32.4, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T08:00:00Z', + 'dew_point': 23.1, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 29.0, + 'uv_index': 0, + 'wind_bearing': 356, + 'wind_gust_speed': 23.51, + 'wind_speed': 8.13, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T09:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.61, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 3, + 'wind_gust_speed': 23.21, + 'wind_speed': 7.48, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T10:00:00Z', + 'dew_point': 22.8, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.02, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 20, + 'wind_gust_speed': 22.68, + 'wind_speed': 6.83, + }), + dict({ + 'apparent_temperature': 29.2, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.43, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 129, + 'wind_gust_speed': 22.04, + 'wind_speed': 6.1, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T12:00:00Z', + 'dew_point': 22.7, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.71, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.64, + 'wind_speed': 5.6, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T13:00:00Z', + 'dew_point': 23.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.52, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 16.35, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T14:00:00Z', + 'dew_point': 22.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.37, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 17.11, + 'wind_speed': 5.79, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.21, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 17.32, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 16.6, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T17:00:00Z', + 'dew_point': 22.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.95, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 219, + 'wind_gust_speed': 15.52, + 'wind_speed': 4.62, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T18:00:00Z', + 'dew_point': 22.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.88, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 216, + 'wind_gust_speed': 14.64, + 'wind_speed': 4.32, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T19:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.91, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 198, + 'wind_gust_speed': 14.06, + 'wind_speed': 4.73, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T20:00:00Z', + 'dew_point': 22.4, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.99, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 189, + 'wind_gust_speed': 13.7, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T21:00:00Z', + 'dew_point': 22.5, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 13.77, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.12, + 'temperature': 25.5, + 'uv_index': 1, + 'wind_bearing': 179, + 'wind_gust_speed': 14.38, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 52.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.13, + 'temperature': 26.9, + 'uv_index': 2, + 'wind_bearing': 170, + 'wind_gust_speed': 15.2, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.04, + 'temperature': 28.0, + 'uv_index': 4, + 'wind_bearing': 155, + 'wind_gust_speed': 15.85, + 'wind_speed': 4.76, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 24.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T01:00:00Z', + 'dew_point': 22.6, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.52, + 'temperature': 29.2, + 'uv_index': 6, + 'wind_bearing': 110, + 'wind_gust_speed': 16.27, + 'wind_speed': 6.81, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 16.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.01, + 'temperature': 30.2, + 'uv_index': 8, + 'wind_bearing': 30, + 'wind_gust_speed': 16.55, + 'wind_speed': 6.86, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T03:00:00Z', + 'dew_point': 22.0, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.45, + 'temperature': 31.1, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.52, + 'wind_speed': 6.8, + }), + dict({ + 'apparent_temperature': 34.7, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T04:00:00Z', + 'dew_point': 21.9, + 'humidity': 57, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 31.5, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.08, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.39, + 'temperature': 31.8, + 'uv_index': 6, + 'wind_bearing': 20, + 'wind_gust_speed': 15.48, + 'wind_speed': 6.45, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T06:00:00Z', + 'dew_point': 21.7, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.11, + 'temperature': 31.4, + 'uv_index': 4, + 'wind_bearing': 26, + 'wind_gust_speed': 15.08, + 'wind_speed': 6.43, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 7.000000000000001, + 'condition': 'sunny', + 'datetime': '2023-09-16T07:00:00Z', + 'dew_point': 21.7, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.15, + 'temperature': 30.7, + 'uv_index': 2, + 'wind_bearing': 39, + 'wind_gust_speed': 14.88, + 'wind_speed': 6.61, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.41, + 'temperature': 29.6, + 'uv_index': 0, + 'wind_bearing': 72, + 'wind_gust_speed': 14.82, + 'wind_speed': 6.95, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T09:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.75, + 'temperature': 28.5, + 'uv_index': 0, + 'wind_bearing': 116, + 'wind_gust_speed': 15.13, + 'wind_speed': 7.45, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 13.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T10:00:00Z', + 'dew_point': 22.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.13, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 16.09, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.47, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.37, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 29.3, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T12:00:00Z', + 'dew_point': 22.9, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.6, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 18.29, + 'wind_speed': 9.21, + }), + dict({ + 'apparent_temperature': 28.7, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T13:00:00Z', + 'dew_point': 23.0, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 25.7, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 18.49, + 'wind_speed': 8.96, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T14:00:00Z', + 'dew_point': 22.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.47, + 'wind_speed': 8.45, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.79, + 'wind_speed': 8.1, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.1, + 'temperature': 24.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 19.81, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T17:00:00Z', + 'dew_point': 22.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.68, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 20.96, + 'wind_speed': 8.3, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T18:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.41, + 'wind_speed': 8.24, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T19:00:00Z', + 'dew_point': 22.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 20.42, + 'wind_speed': 7.62, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T20:00:00Z', + 'dew_point': 22.6, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 18.61, + 'wind_speed': 6.66, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T21:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 17.14, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 28.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 26.0, + 'uv_index': 1, + 'wind_bearing': 161, + 'wind_gust_speed': 16.78, + 'wind_speed': 5.5, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.51, + 'temperature': 27.5, + 'uv_index': 2, + 'wind_bearing': 165, + 'wind_gust_speed': 17.21, + 'wind_speed': 5.56, + }), + dict({ + 'apparent_temperature': 31.7, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T00:00:00Z', + 'dew_point': 22.8, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 28.5, + 'uv_index': 4, + 'wind_bearing': 174, + 'wind_gust_speed': 17.96, + 'wind_speed': 6.04, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T01:00:00Z', + 'dew_point': 22.7, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.4, + 'uv_index': 6, + 'wind_bearing': 192, + 'wind_gust_speed': 19.15, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 28.999999999999996, + 'condition': 'sunny', + 'datetime': '2023-09-17T02:00:00Z', + 'dew_point': 22.8, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 30.1, + 'uv_index': 7, + 'wind_bearing': 225, + 'wind_gust_speed': 20.89, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T03:00:00Z', + 'dew_point': 22.8, + 'humidity': 63, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1009.75, + 'temperature': 30.7, + 'uv_index': 8, + 'wind_bearing': 264, + 'wind_gust_speed': 22.67, + 'wind_speed': 10.27, + }), + dict({ + 'apparent_temperature': 33.9, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T04:00:00Z', + 'dew_point': 22.5, + 'humidity': 62, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1009.18, + 'temperature': 30.5, + 'uv_index': 7, + 'wind_bearing': 293, + 'wind_gust_speed': 23.93, + 'wind_speed': 10.82, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T05:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.6, + 'precipitation_probability': 12.0, + 'pressure': 1008.71, + 'temperature': 30.1, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 24.39, + 'wind_speed': 10.72, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 64, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.46, + 'temperature': 29.6, + 'uv_index': 3, + 'wind_bearing': 312, + 'wind_gust_speed': 23.9, + 'wind_speed': 10.28, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 47.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.53, + 'temperature': 28.9, + 'uv_index': 1, + 'wind_bearing': 312, + 'wind_gust_speed': 22.3, + 'wind_speed': 9.59, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 70, + 'precipitation': 0.6, + 'precipitation_probability': 15.0, + 'pressure': 1008.82, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 19.73, + 'wind_speed': 8.58, + }), + dict({ + 'apparent_temperature': 29.6, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 74, + 'precipitation': 0.5, + 'precipitation_probability': 15.0, + 'pressure': 1009.21, + 'temperature': 27.0, + 'uv_index': 0, + 'wind_bearing': 291, + 'wind_gust_speed': 16.49, + 'wind_speed': 7.34, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 78, + 'precipitation': 0.4, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1009.65, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 12.71, + 'wind_speed': 5.91, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T11:00:00Z', + 'dew_point': 21.9, + 'humidity': 82, + 'precipitation': 0.3, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.04, + 'temperature': 25.3, + 'uv_index': 0, + 'wind_bearing': 212, + 'wind_gust_speed': 9.16, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T12:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.3, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1010.24, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 192, + 'wind_gust_speed': 7.09, + 'wind_speed': 3.62, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T13:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1010.15, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 185, + 'wind_gust_speed': 7.2, + 'wind_speed': 3.27, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 44.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T14:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1009.87, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.22, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 49.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T15:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.2, + 'precipitation_probability': 31.0, + 'pressure': 1009.56, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 180, + 'wind_gust_speed': 9.21, + 'wind_speed': 3.3, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 94, + 'precipitation': 0.2, + 'precipitation_probability': 33.0, + 'pressure': 1009.29, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 9.0, + 'wind_speed': 3.46, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T17:00:00Z', + 'dew_point': 21.7, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 35.0, + 'pressure': 1009.09, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 186, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T18:00:00Z', + 'dew_point': 21.6, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 37.0, + 'pressure': 1009.01, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 7.99, + 'wind_speed': 4.07, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.07, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 258, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.55, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T20:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.23, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 8.77, + 'wind_speed': 5.17, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 38.0, + 'pressure': 1009.47, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 318, + 'wind_gust_speed': 9.69, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 30.0, + 'pressure': 1009.77, + 'temperature': 24.2, + 'uv_index': 1, + 'wind_bearing': 324, + 'wind_gust_speed': 10.88, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 83, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.09, + 'temperature': 25.1, + 'uv_index': 2, + 'wind_bearing': 329, + 'wind_gust_speed': 12.21, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T00:00:00Z', + 'dew_point': 21.9, + 'humidity': 80, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.33, + 'temperature': 25.7, + 'uv_index': 3, + 'wind_bearing': 332, + 'wind_gust_speed': 13.52, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T01:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1007.43, + 'temperature': 27.2, + 'uv_index': 5, + 'wind_bearing': 330, + 'wind_gust_speed': 11.36, + 'wind_speed': 11.36, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T02:00:00Z', + 'dew_point': 21.6, + 'humidity': 70, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1007.05, + 'temperature': 27.5, + 'uv_index': 6, + 'wind_bearing': 332, + 'wind_gust_speed': 12.06, + 'wind_speed': 12.06, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T03:00:00Z', + 'dew_point': 21.6, + 'humidity': 69, + 'precipitation': 0.5, + 'precipitation_probability': 10.0, + 'pressure': 1006.67, + 'temperature': 27.8, + 'uv_index': 6, + 'wind_bearing': 333, + 'wind_gust_speed': 12.81, + 'wind_speed': 12.81, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T04:00:00Z', + 'dew_point': 21.5, + 'humidity': 68, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1006.28, + 'temperature': 28.0, + 'uv_index': 5, + 'wind_bearing': 335, + 'wind_gust_speed': 13.68, + 'wind_speed': 13.68, + }), + dict({ + 'apparent_temperature': 30.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T05:00:00Z', + 'dew_point': 21.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1005.89, + 'temperature': 28.1, + 'uv_index': 4, + 'wind_bearing': 336, + 'wind_gust_speed': 14.61, + 'wind_speed': 14.61, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T06:00:00Z', + 'dew_point': 21.2, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 27.0, + 'pressure': 1005.67, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 338, + 'wind_gust_speed': 15.25, + 'wind_speed': 15.25, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T07:00:00Z', + 'dew_point': 21.3, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1005.74, + 'temperature': 27.4, + 'uv_index': 1, + 'wind_bearing': 339, + 'wind_gust_speed': 15.45, + 'wind_speed': 15.45, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T08:00:00Z', + 'dew_point': 21.4, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1005.98, + 'temperature': 26.7, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.38, + 'wind_speed': 15.38, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T09:00:00Z', + 'dew_point': 21.6, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.22, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.27, + 'wind_speed': 15.27, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T10:00:00Z', + 'dew_point': 21.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.44, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 15.09, + 'wind_speed': 15.09, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T11:00:00Z', + 'dew_point': 21.7, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.66, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 336, + 'wind_gust_speed': 14.88, + 'wind_speed': 14.88, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.79, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 333, + 'wind_gust_speed': 14.91, + 'wind_speed': 14.91, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.36, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 83, + 'wind_gust_speed': 4.58, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T14:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.96, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 4.74, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 24.5, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T15:00:00Z', + 'dew_point': 20.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.6, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 152, + 'wind_gust_speed': 5.63, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T16:00:00Z', + 'dew_point': 20.7, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 22.3, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 6.02, + 'wind_speed': 6.02, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T17:00:00Z', + 'dew_point': 20.4, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.2, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 6.15, + 'wind_speed': 6.15, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T18:00:00Z', + 'dew_point': 20.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.08, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 167, + 'wind_gust_speed': 6.48, + 'wind_speed': 6.48, + }), + dict({ + 'apparent_temperature': 23.2, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T19:00:00Z', + 'dew_point': 19.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.04, + 'temperature': 21.8, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 7.51, + 'wind_speed': 7.51, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 99.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T20:00:00Z', + 'dew_point': 19.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.05, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 8.73, + 'wind_speed': 8.73, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 98.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T21:00:00Z', + 'dew_point': 19.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.06, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 9.21, + 'wind_speed': 9.11, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 96.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T22:00:00Z', + 'dew_point': 19.7, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 171, + 'wind_gust_speed': 9.03, + 'wind_speed': 7.91, + }), + ]), + }), + }) +# --- +# name: test_hourly_forecast[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T14:00:00Z', + 'dew_point': 21.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 264, + 'wind_gust_speed': 13.44, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 80.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 261, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.64, + }), + dict({ + 'apparent_temperature': 23.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.12, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 252, + 'wind_gust_speed': 11.15, + 'wind_speed': 6.14, + }), + dict({ + 'apparent_temperature': 23.5, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.7, + 'uv_index': 0, + 'wind_bearing': 248, + 'wind_gust_speed': 11.57, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T18:00:00Z', + 'dew_point': 20.8, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.05, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 12.42, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 23.0, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.3, + 'uv_index': 0, + 'wind_bearing': 224, + 'wind_gust_speed': 11.3, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T20:00:00Z', + 'dew_point': 20.4, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.31, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 221, + 'wind_gust_speed': 10.57, + 'wind_speed': 5.13, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T21:00:00Z', + 'dew_point': 20.5, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.55, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 10.63, + 'wind_speed': 5.7, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.79, + 'temperature': 22.8, + 'uv_index': 1, + 'wind_bearing': 258, + 'wind_gust_speed': 10.47, + 'wind_speed': 5.22, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T23:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.95, + 'temperature': 24.0, + 'uv_index': 2, + 'wind_bearing': 282, + 'wind_gust_speed': 12.74, + 'wind_speed': 5.71, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T00:00:00Z', + 'dew_point': 21.5, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.35, + 'temperature': 25.1, + 'uv_index': 3, + 'wind_bearing': 294, + 'wind_gust_speed': 13.87, + 'wind_speed': 6.53, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T01:00:00Z', + 'dew_point': 21.8, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 26.5, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 16.04, + 'wind_speed': 6.54, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T02:00:00Z', + 'dew_point': 22.0, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.23, + 'temperature': 27.6, + 'uv_index': 6, + 'wind_bearing': 314, + 'wind_gust_speed': 18.1, + 'wind_speed': 7.32, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T03:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.86, + 'temperature': 28.3, + 'uv_index': 6, + 'wind_bearing': 317, + 'wind_gust_speed': 20.77, + 'wind_speed': 9.1, + }), + dict({ + 'apparent_temperature': 31.5, + 'cloud_coverage': 69.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T04:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.65, + 'temperature': 28.6, + 'uv_index': 6, + 'wind_bearing': 311, + 'wind_gust_speed': 21.27, + 'wind_speed': 10.21, + }), + dict({ + 'apparent_temperature': 31.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T05:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.48, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 317, + 'wind_gust_speed': 19.62, + 'wind_speed': 10.53, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.54, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 335, + 'wind_gust_speed': 18.98, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.76, + 'temperature': 27.1, + 'uv_index': 2, + 'wind_bearing': 338, + 'wind_gust_speed': 17.04, + 'wind_speed': 7.75, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.05, + 'temperature': 26.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 14.75, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 344, + 'wind_gust_speed': 10.43, + 'wind_speed': 5.2, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.73, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 6.95, + 'wind_speed': 3.59, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 326, + 'wind_gust_speed': 5.27, + 'wind_speed': 2.1, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.52, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 5.48, + 'wind_speed': 0.93, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T13:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 188, + 'wind_gust_speed': 4.44, + 'wind_speed': 1.79, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 4.49, + 'wind_speed': 2.19, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.21, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 179, + 'wind_gust_speed': 5.32, + 'wind_speed': 2.65, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 173, + 'wind_gust_speed': 5.81, + 'wind_speed': 3.2, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.88, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 5.53, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.94, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 6.09, + 'wind_speed': 3.36, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T19:00:00Z', + 'dew_point': 20.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.96, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 6.83, + 'wind_speed': 3.71, + }), + dict({ + 'apparent_temperature': 22.5, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T20:00:00Z', + 'dew_point': 20.0, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 21.0, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 7.98, + 'wind_speed': 4.27, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T21:00:00Z', + 'dew_point': 20.2, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.61, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 8.4, + 'wind_speed': 4.69, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.87, + 'temperature': 23.1, + 'uv_index': 1, + 'wind_bearing': 150, + 'wind_gust_speed': 7.66, + 'wind_speed': 4.33, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 123, + 'wind_gust_speed': 9.63, + 'wind_speed': 3.91, + }), + dict({ + 'apparent_temperature': 30.4, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 105, + 'wind_gust_speed': 12.59, + 'wind_speed': 3.96, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T01:00:00Z', + 'dew_point': 22.9, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.79, + 'temperature': 28.9, + 'uv_index': 5, + 'wind_bearing': 99, + 'wind_gust_speed': 14.17, + 'wind_speed': 4.06, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T02:00:00Z', + 'dew_point': 22.9, + 'humidity': 66, + 'precipitation': 0.3, + 'precipitation_probability': 7.000000000000001, + 'pressure': 1011.29, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 93, + 'wind_gust_speed': 17.75, + 'wind_speed': 4.87, + }), + dict({ + 'apparent_temperature': 34.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T03:00:00Z', + 'dew_point': 23.1, + 'humidity': 64, + 'precipitation': 0.3, + 'precipitation_probability': 11.0, + 'pressure': 1010.78, + 'temperature': 30.6, + 'uv_index': 6, + 'wind_bearing': 78, + 'wind_gust_speed': 17.43, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T04:00:00Z', + 'dew_point': 23.2, + 'humidity': 66, + 'precipitation': 0.4, + 'precipitation_probability': 15.0, + 'pressure': 1010.37, + 'temperature': 30.3, + 'uv_index': 5, + 'wind_bearing': 60, + 'wind_gust_speed': 15.24, + 'wind_speed': 4.9, + }), + dict({ + 'apparent_temperature': 33.7, + 'cloud_coverage': 79.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T05:00:00Z', + 'dew_point': 23.3, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 17.0, + 'pressure': 1010.09, + 'temperature': 30.0, + 'uv_index': 4, + 'wind_bearing': 80, + 'wind_gust_speed': 13.53, + 'wind_speed': 5.98, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T06:00:00Z', + 'dew_point': 23.4, + 'humidity': 70, + 'precipitation': 1.0, + 'precipitation_probability': 17.0, + 'pressure': 1010.0, + 'temperature': 29.5, + 'uv_index': 3, + 'wind_bearing': 83, + 'wind_gust_speed': 12.55, + 'wind_speed': 6.84, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 88.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 73, + 'precipitation': 0.4, + 'precipitation_probability': 16.0, + 'pressure': 1010.27, + 'temperature': 28.7, + 'uv_index': 2, + 'wind_bearing': 90, + 'wind_gust_speed': 10.16, + 'wind_speed': 6.07, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T08:00:00Z', + 'dew_point': 23.2, + 'humidity': 77, + 'precipitation': 0.5, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.71, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 101, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.82, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 93.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T09:00:00Z', + 'dew_point': 23.2, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.9, + 'temperature': 26.5, + 'uv_index': 0, + 'wind_bearing': 128, + 'wind_gust_speed': 8.89, + 'wind_speed': 4.95, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T10:00:00Z', + 'dew_point': 23.0, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.12, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 134, + 'wind_gust_speed': 10.03, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.43, + 'temperature': 25.1, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 12.4, + 'wind_speed': 5.41, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T12:00:00Z', + 'dew_point': 22.5, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.58, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 16.36, + 'wind_speed': 6.31, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T13:00:00Z', + 'dew_point': 22.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 19.66, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.4, + 'temperature': 24.3, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 21.15, + 'wind_speed': 7.46, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'dew_point': 22.0, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.26, + 'wind_speed': 7.84, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.01, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 23.53, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T17:00:00Z', + 'dew_point': 21.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.78, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 22.83, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T18:00:00Z', + 'dew_point': 21.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.69, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.7, + 'wind_speed': 8.7, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T19:00:00Z', + 'dew_point': 21.4, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.77, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 24.24, + 'wind_speed': 8.74, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.89, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 23.99, + 'wind_speed': 8.81, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T21:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.1, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 25.55, + 'wind_speed': 9.05, + }), + dict({ + 'apparent_temperature': 27.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 24.6, + 'uv_index': 1, + 'wind_bearing': 140, + 'wind_gust_speed': 29.08, + 'wind_speed': 10.37, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.36, + 'temperature': 25.9, + 'uv_index': 2, + 'wind_bearing': 140, + 'wind_gust_speed': 34.13, + 'wind_speed': 12.56, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T00:00:00Z', + 'dew_point': 22.3, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 27.2, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 38.2, + 'wind_speed': 15.65, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T01:00:00Z', + 'dew_point': 22.3, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 37.55, + 'wind_speed': 15.78, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 143, + 'wind_gust_speed': 35.86, + 'wind_speed': 15.41, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T03:00:00Z', + 'dew_point': 22.5, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.61, + 'temperature': 30.3, + 'uv_index': 6, + 'wind_bearing': 141, + 'wind_gust_speed': 35.88, + 'wind_speed': 15.51, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T04:00:00Z', + 'dew_point': 22.6, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.36, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 140, + 'wind_gust_speed': 35.99, + 'wind_speed': 15.75, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T05:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.11, + 'temperature': 30.1, + 'uv_index': 4, + 'wind_bearing': 137, + 'wind_gust_speed': 33.61, + 'wind_speed': 15.36, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T06:00:00Z', + 'dew_point': 22.5, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.98, + 'temperature': 30.0, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 32.61, + 'wind_speed': 14.98, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.13, + 'temperature': 29.2, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 28.1, + 'wind_speed': 13.88, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 28.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 24.22, + 'wind_speed': 13.02, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T09:00:00Z', + 'dew_point': 21.9, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.81, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 22.5, + 'wind_speed': 11.94, + }), + dict({ + 'apparent_temperature': 28.8, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T10:00:00Z', + 'dew_point': 21.7, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 21.47, + 'wind_speed': 11.25, + }), + dict({ + 'apparent_temperature': 28.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.71, + 'wind_speed': 12.39, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.67, + 'wind_speed': 12.83, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T13:00:00Z', + 'dew_point': 21.7, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 23.34, + 'wind_speed': 12.62, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.83, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.9, + 'wind_speed': 12.07, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T15:00:00Z', + 'dew_point': 21.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.74, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.01, + 'wind_speed': 11.19, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T16:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.56, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 21.29, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T17:00:00Z', + 'dew_point': 21.5, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.35, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 20.52, + 'wind_speed': 10.5, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 20.04, + 'wind_speed': 10.51, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T19:00:00Z', + 'dew_point': 21.3, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 12.0, + 'pressure': 1011.37, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 18.07, + 'wind_speed': 10.13, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T20:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.2, + 'precipitation_probability': 13.0, + 'pressure': 1011.53, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 16.86, + 'wind_speed': 10.34, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T21:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.71, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 16.66, + 'wind_speed': 10.68, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T22:00:00Z', + 'dew_point': 21.9, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 24.4, + 'uv_index': 1, + 'wind_bearing': 137, + 'wind_gust_speed': 17.21, + 'wind_speed': 10.61, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.05, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 19.23, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 29.5, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.07, + 'temperature': 26.6, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 20.61, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 82.0, + 'condition': 'rainy', + 'datetime': '2023-09-12T01:00:00Z', + 'dew_point': 23.1, + 'humidity': 75, + 'precipitation': 0.2, + 'precipitation_probability': 16.0, + 'pressure': 1011.89, + 'temperature': 27.9, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 23.35, + 'wind_speed': 11.98, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 29.0, + 'uv_index': 5, + 'wind_bearing': 143, + 'wind_gust_speed': 26.45, + 'wind_speed': 13.01, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.15, + 'temperature': 29.8, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 28.95, + 'wind_speed': 13.9, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.79, + 'temperature': 30.2, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 27.9, + 'wind_speed': 13.95, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T05:00:00Z', + 'dew_point': 23.1, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.43, + 'temperature': 30.4, + 'uv_index': 4, + 'wind_bearing': 140, + 'wind_gust_speed': 26.53, + 'wind_speed': 13.78, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T06:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.21, + 'temperature': 30.1, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 24.56, + 'wind_speed': 13.74, + }), + dict({ + 'apparent_temperature': 32.0, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.26, + 'temperature': 29.1, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 22.78, + 'wind_speed': 13.21, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.51, + 'temperature': 28.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 19.92, + 'wind_speed': 12.0, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T09:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.8, + 'temperature': 27.2, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 17.65, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T10:00:00Z', + 'dew_point': 21.4, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 15.87, + 'wind_speed': 10.23, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T11:00:00Z', + 'dew_point': 21.3, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1011.79, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 13.9, + 'wind_speed': 9.39, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T12:00:00Z', + 'dew_point': 21.2, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 47.0, + 'pressure': 1012.12, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.32, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1012.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.18, + 'wind_speed': 8.59, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T14:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.09, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.84, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T15:00:00Z', + 'dew_point': 21.3, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.99, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.93, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T16:00:00Z', + 'dew_point': 21.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 16.74, + 'wind_speed': 9.49, + }), + dict({ + 'apparent_temperature': 24.7, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T17:00:00Z', + 'dew_point': 20.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.75, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 17.45, + 'wind_speed': 9.12, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.04, + 'wind_speed': 8.68, + }), + dict({ + 'apparent_temperature': 24.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 16.8, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T20:00:00Z', + 'dew_point': 20.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.23, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.35, + 'wind_speed': 8.36, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T21:00:00Z', + 'dew_point': 20.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.49, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 14.09, + 'wind_speed': 7.77, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T22:00:00Z', + 'dew_point': 21.0, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.72, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 152, + 'wind_gust_speed': 14.04, + 'wind_speed': 7.25, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T23:00:00Z', + 'dew_point': 21.4, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 25.5, + 'uv_index': 2, + 'wind_bearing': 149, + 'wind_gust_speed': 15.31, + 'wind_speed': 7.14, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-13T00:00:00Z', + 'dew_point': 21.8, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 27.1, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 16.42, + 'wind_speed': 6.89, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T01:00:00Z', + 'dew_point': 22.0, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.65, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 137, + 'wind_gust_speed': 18.64, + 'wind_speed': 6.65, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T02:00:00Z', + 'dew_point': 21.9, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.26, + 'temperature': 29.4, + 'uv_index': 5, + 'wind_bearing': 128, + 'wind_gust_speed': 21.69, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 33.0, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T03:00:00Z', + 'dew_point': 21.9, + 'humidity': 62, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.88, + 'temperature': 30.1, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 23.41, + 'wind_speed': 7.33, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T04:00:00Z', + 'dew_point': 22.0, + 'humidity': 61, + 'precipitation': 0.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.55, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 56, + 'wind_gust_speed': 23.1, + 'wind_speed': 8.09, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 61, + 'precipitation': 1.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.29, + 'temperature': 30.2, + 'uv_index': 4, + 'wind_bearing': 20, + 'wind_gust_speed': 21.81, + 'wind_speed': 9.46, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T06:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 2.3, + 'precipitation_probability': 11.0, + 'pressure': 1011.17, + 'temperature': 29.7, + 'uv_index': 3, + 'wind_bearing': 20, + 'wind_gust_speed': 19.72, + 'wind_speed': 9.8, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 69.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T07:00:00Z', + 'dew_point': 22.4, + 'humidity': 68, + 'precipitation': 1.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.32, + 'temperature': 28.8, + 'uv_index': 1, + 'wind_bearing': 18, + 'wind_gust_speed': 17.55, + 'wind_speed': 9.23, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T08:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.6, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 27, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.05, + }), + dict({ + 'apparent_temperature': 29.4, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T09:00:00Z', + 'dew_point': 23.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 32, + 'wind_gust_speed': 12.17, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T10:00:00Z', + 'dew_point': 22.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.3, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 69, + 'wind_gust_speed': 11.64, + 'wind_speed': 6.69, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.71, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.23, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.96, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.47, + 'wind_speed': 5.73, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T13:00:00Z', + 'dew_point': 22.3, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.03, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 13.57, + 'wind_speed': 5.66, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.99, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 15.07, + 'wind_speed': 5.83, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T15:00:00Z', + 'dew_point': 22.2, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.95, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 16.06, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T16:00:00Z', + 'dew_point': 22.0, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.9, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 16.05, + 'wind_speed': 5.75, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T17:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.52, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T18:00:00Z', + 'dew_point': 21.8, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.87, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.01, + 'wind_speed': 5.32, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 22.8, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.39, + 'wind_speed': 5.33, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.22, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.79, + 'wind_speed': 5.43, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.12, + 'wind_speed': 5.52, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T22:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.59, + 'temperature': 24.3, + 'uv_index': 1, + 'wind_bearing': 147, + 'wind_gust_speed': 16.14, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T23:00:00Z', + 'dew_point': 22.4, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.74, + 'temperature': 25.7, + 'uv_index': 2, + 'wind_bearing': 146, + 'wind_gust_speed': 19.09, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.78, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 143, + 'wind_gust_speed': 21.6, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T01:00:00Z', + 'dew_point': 23.2, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.61, + 'temperature': 28.7, + 'uv_index': 5, + 'wind_bearing': 138, + 'wind_gust_speed': 23.36, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T02:00:00Z', + 'dew_point': 23.2, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.32, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 24.72, + 'wind_speed': 4.99, + }), + dict({ + 'apparent_temperature': 34.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T03:00:00Z', + 'dew_point': 23.3, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.04, + 'temperature': 30.7, + 'uv_index': 6, + 'wind_bearing': 354, + 'wind_gust_speed': 25.23, + 'wind_speed': 4.74, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.77, + 'temperature': 31.0, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 24.6, + 'wind_speed': 4.79, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 60.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T05:00:00Z', + 'dew_point': 23.2, + 'humidity': 64, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1012.53, + 'temperature': 30.7, + 'uv_index': 5, + 'wind_bearing': 336, + 'wind_gust_speed': 23.28, + 'wind_speed': 5.07, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 59.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T06:00:00Z', + 'dew_point': 23.1, + 'humidity': 66, + 'precipitation': 0.2, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1012.49, + 'temperature': 30.2, + 'uv_index': 3, + 'wind_bearing': 336, + 'wind_gust_speed': 22.05, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 32.9, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T07:00:00Z', + 'dew_point': 23.0, + 'humidity': 68, + 'precipitation': 0.2, + 'precipitation_probability': 40.0, + 'pressure': 1012.73, + 'temperature': 29.5, + 'uv_index': 2, + 'wind_bearing': 339, + 'wind_gust_speed': 21.18, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 31.6, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T08:00:00Z', + 'dew_point': 22.8, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 45.0, + 'pressure': 1013.16, + 'temperature': 28.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 20.35, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T09:00:00Z', + 'dew_point': 22.5, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1013.62, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 347, + 'wind_gust_speed': 19.42, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T10:00:00Z', + 'dew_point': 22.4, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.09, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 348, + 'wind_gust_speed': 18.19, + 'wind_speed': 5.31, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T11:00:00Z', + 'dew_point': 22.4, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.56, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 177, + 'wind_gust_speed': 16.79, + 'wind_speed': 4.28, + }), + dict({ + 'apparent_temperature': 27.5, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.87, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 15.61, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T13:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.91, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 14.7, + 'wind_speed': 4.11, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T14:00:00Z', + 'dew_point': 21.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.8, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 13.81, + 'wind_speed': 4.97, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T15:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.66, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 170, + 'wind_gust_speed': 12.88, + 'wind_speed': 5.57, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T16:00:00Z', + 'dew_point': 21.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.54, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 12.0, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T17:00:00Z', + 'dew_point': 21.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.45, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 11.43, + 'wind_speed': 5.48, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 44.0, + 'pressure': 1014.45, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 11.42, + 'wind_speed': 5.38, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T19:00:00Z', + 'dew_point': 21.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'pressure': 1014.63, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.15, + 'wind_speed': 5.39, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T20:00:00Z', + 'dew_point': 21.8, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 51.0, + 'pressure': 1014.91, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 13.54, + 'wind_speed': 5.45, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T21:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 42.0, + 'pressure': 1015.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 15.48, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T22:00:00Z', + 'dew_point': 22.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 28.999999999999996, + 'pressure': 1015.4, + 'temperature': 25.7, + 'uv_index': 1, + 'wind_bearing': 158, + 'wind_gust_speed': 17.86, + 'wind_speed': 5.84, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 77, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.54, + 'temperature': 27.2, + 'uv_index': 2, + 'wind_bearing': 155, + 'wind_gust_speed': 20.19, + 'wind_speed': 6.09, + }), + dict({ + 'apparent_temperature': 32.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T00:00:00Z', + 'dew_point': 23.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.55, + 'temperature': 28.6, + 'uv_index': 4, + 'wind_bearing': 152, + 'wind_gust_speed': 21.83, + 'wind_speed': 6.42, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T01:00:00Z', + 'dew_point': 23.5, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.35, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 144, + 'wind_gust_speed': 22.56, + 'wind_speed': 6.91, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.0, + 'temperature': 30.4, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.83, + 'wind_speed': 7.47, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.62, + 'temperature': 30.9, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.98, + 'wind_speed': 7.95, + }), + dict({ + 'apparent_temperature': 35.4, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T04:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 31.3, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 23.21, + 'wind_speed': 8.44, + }), + dict({ + 'apparent_temperature': 35.6, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T05:00:00Z', + 'dew_point': 23.7, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.95, + 'temperature': 31.5, + 'uv_index': 5, + 'wind_bearing': 344, + 'wind_gust_speed': 23.46, + 'wind_speed': 8.95, + }), + dict({ + 'apparent_temperature': 35.1, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T06:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.83, + 'temperature': 31.1, + 'uv_index': 3, + 'wind_bearing': 347, + 'wind_gust_speed': 23.64, + 'wind_speed': 9.13, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.96, + 'temperature': 30.3, + 'uv_index': 2, + 'wind_bearing': 350, + 'wind_gust_speed': 23.66, + 'wind_speed': 8.78, + }), + dict({ + 'apparent_temperature': 32.4, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T08:00:00Z', + 'dew_point': 23.1, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 29.0, + 'uv_index': 0, + 'wind_bearing': 356, + 'wind_gust_speed': 23.51, + 'wind_speed': 8.13, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T09:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.61, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 3, + 'wind_gust_speed': 23.21, + 'wind_speed': 7.48, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T10:00:00Z', + 'dew_point': 22.8, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.02, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 20, + 'wind_gust_speed': 22.68, + 'wind_speed': 6.83, + }), + dict({ + 'apparent_temperature': 29.2, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.43, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 129, + 'wind_gust_speed': 22.04, + 'wind_speed': 6.1, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T12:00:00Z', + 'dew_point': 22.7, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.71, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.64, + 'wind_speed': 5.6, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T13:00:00Z', + 'dew_point': 23.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.52, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 16.35, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T14:00:00Z', + 'dew_point': 22.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.37, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 17.11, + 'wind_speed': 5.79, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.21, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 17.32, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 16.6, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T17:00:00Z', + 'dew_point': 22.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.95, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 219, + 'wind_gust_speed': 15.52, + 'wind_speed': 4.62, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T18:00:00Z', + 'dew_point': 22.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.88, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 216, + 'wind_gust_speed': 14.64, + 'wind_speed': 4.32, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T19:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.91, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 198, + 'wind_gust_speed': 14.06, + 'wind_speed': 4.73, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T20:00:00Z', + 'dew_point': 22.4, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.99, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 189, + 'wind_gust_speed': 13.7, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T21:00:00Z', + 'dew_point': 22.5, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 13.77, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.12, + 'temperature': 25.5, + 'uv_index': 1, + 'wind_bearing': 179, + 'wind_gust_speed': 14.38, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 52.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.13, + 'temperature': 26.9, + 'uv_index': 2, + 'wind_bearing': 170, + 'wind_gust_speed': 15.2, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.04, + 'temperature': 28.0, + 'uv_index': 4, + 'wind_bearing': 155, + 'wind_gust_speed': 15.85, + 'wind_speed': 4.76, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 24.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T01:00:00Z', + 'dew_point': 22.6, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.52, + 'temperature': 29.2, + 'uv_index': 6, + 'wind_bearing': 110, + 'wind_gust_speed': 16.27, + 'wind_speed': 6.81, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 16.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.01, + 'temperature': 30.2, + 'uv_index': 8, + 'wind_bearing': 30, + 'wind_gust_speed': 16.55, + 'wind_speed': 6.86, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T03:00:00Z', + 'dew_point': 22.0, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.45, + 'temperature': 31.1, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.52, + 'wind_speed': 6.8, + }), + dict({ + 'apparent_temperature': 34.7, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T04:00:00Z', + 'dew_point': 21.9, + 'humidity': 57, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 31.5, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.08, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.39, + 'temperature': 31.8, + 'uv_index': 6, + 'wind_bearing': 20, + 'wind_gust_speed': 15.48, + 'wind_speed': 6.45, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T06:00:00Z', + 'dew_point': 21.7, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.11, + 'temperature': 31.4, + 'uv_index': 4, + 'wind_bearing': 26, + 'wind_gust_speed': 15.08, + 'wind_speed': 6.43, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 7.000000000000001, + 'condition': 'sunny', + 'datetime': '2023-09-16T07:00:00Z', + 'dew_point': 21.7, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.15, + 'temperature': 30.7, + 'uv_index': 2, + 'wind_bearing': 39, + 'wind_gust_speed': 14.88, + 'wind_speed': 6.61, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.41, + 'temperature': 29.6, + 'uv_index': 0, + 'wind_bearing': 72, + 'wind_gust_speed': 14.82, + 'wind_speed': 6.95, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T09:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.75, + 'temperature': 28.5, + 'uv_index': 0, + 'wind_bearing': 116, + 'wind_gust_speed': 15.13, + 'wind_speed': 7.45, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 13.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T10:00:00Z', + 'dew_point': 22.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.13, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 16.09, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.47, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.37, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 29.3, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T12:00:00Z', + 'dew_point': 22.9, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.6, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 18.29, + 'wind_speed': 9.21, + }), + dict({ + 'apparent_temperature': 28.7, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T13:00:00Z', + 'dew_point': 23.0, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 25.7, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 18.49, + 'wind_speed': 8.96, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T14:00:00Z', + 'dew_point': 22.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.47, + 'wind_speed': 8.45, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.79, + 'wind_speed': 8.1, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.1, + 'temperature': 24.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 19.81, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T17:00:00Z', + 'dew_point': 22.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.68, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 20.96, + 'wind_speed': 8.3, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T18:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.41, + 'wind_speed': 8.24, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T19:00:00Z', + 'dew_point': 22.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 20.42, + 'wind_speed': 7.62, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T20:00:00Z', + 'dew_point': 22.6, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 18.61, + 'wind_speed': 6.66, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T21:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 17.14, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 28.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 26.0, + 'uv_index': 1, + 'wind_bearing': 161, + 'wind_gust_speed': 16.78, + 'wind_speed': 5.5, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.51, + 'temperature': 27.5, + 'uv_index': 2, + 'wind_bearing': 165, + 'wind_gust_speed': 17.21, + 'wind_speed': 5.56, + }), + dict({ + 'apparent_temperature': 31.7, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T00:00:00Z', + 'dew_point': 22.8, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 28.5, + 'uv_index': 4, + 'wind_bearing': 174, + 'wind_gust_speed': 17.96, + 'wind_speed': 6.04, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T01:00:00Z', + 'dew_point': 22.7, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.4, + 'uv_index': 6, + 'wind_bearing': 192, + 'wind_gust_speed': 19.15, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 28.999999999999996, + 'condition': 'sunny', + 'datetime': '2023-09-17T02:00:00Z', + 'dew_point': 22.8, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 30.1, + 'uv_index': 7, + 'wind_bearing': 225, + 'wind_gust_speed': 20.89, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T03:00:00Z', + 'dew_point': 22.8, + 'humidity': 63, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1009.75, + 'temperature': 30.7, + 'uv_index': 8, + 'wind_bearing': 264, + 'wind_gust_speed': 22.67, + 'wind_speed': 10.27, + }), + dict({ + 'apparent_temperature': 33.9, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T04:00:00Z', + 'dew_point': 22.5, + 'humidity': 62, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1009.18, + 'temperature': 30.5, + 'uv_index': 7, + 'wind_bearing': 293, + 'wind_gust_speed': 23.93, + 'wind_speed': 10.82, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T05:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.6, + 'precipitation_probability': 12.0, + 'pressure': 1008.71, + 'temperature': 30.1, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 24.39, + 'wind_speed': 10.72, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 64, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.46, + 'temperature': 29.6, + 'uv_index': 3, + 'wind_bearing': 312, + 'wind_gust_speed': 23.9, + 'wind_speed': 10.28, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 47.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.53, + 'temperature': 28.9, + 'uv_index': 1, + 'wind_bearing': 312, + 'wind_gust_speed': 22.3, + 'wind_speed': 9.59, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 70, + 'precipitation': 0.6, + 'precipitation_probability': 15.0, + 'pressure': 1008.82, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 19.73, + 'wind_speed': 8.58, + }), + dict({ + 'apparent_temperature': 29.6, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 74, + 'precipitation': 0.5, + 'precipitation_probability': 15.0, + 'pressure': 1009.21, + 'temperature': 27.0, + 'uv_index': 0, + 'wind_bearing': 291, + 'wind_gust_speed': 16.49, + 'wind_speed': 7.34, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 78, + 'precipitation': 0.4, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1009.65, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 12.71, + 'wind_speed': 5.91, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T11:00:00Z', + 'dew_point': 21.9, + 'humidity': 82, + 'precipitation': 0.3, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.04, + 'temperature': 25.3, + 'uv_index': 0, + 'wind_bearing': 212, + 'wind_gust_speed': 9.16, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T12:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.3, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1010.24, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 192, + 'wind_gust_speed': 7.09, + 'wind_speed': 3.62, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T13:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1010.15, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 185, + 'wind_gust_speed': 7.2, + 'wind_speed': 3.27, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 44.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T14:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1009.87, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.22, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 49.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T15:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.2, + 'precipitation_probability': 31.0, + 'pressure': 1009.56, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 180, + 'wind_gust_speed': 9.21, + 'wind_speed': 3.3, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 94, + 'precipitation': 0.2, + 'precipitation_probability': 33.0, + 'pressure': 1009.29, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 9.0, + 'wind_speed': 3.46, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T17:00:00Z', + 'dew_point': 21.7, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 35.0, + 'pressure': 1009.09, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 186, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T18:00:00Z', + 'dew_point': 21.6, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 37.0, + 'pressure': 1009.01, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 7.99, + 'wind_speed': 4.07, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.07, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 258, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.55, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T20:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.23, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 8.77, + 'wind_speed': 5.17, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 38.0, + 'pressure': 1009.47, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 318, + 'wind_gust_speed': 9.69, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 30.0, + 'pressure': 1009.77, + 'temperature': 24.2, + 'uv_index': 1, + 'wind_bearing': 324, + 'wind_gust_speed': 10.88, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 83, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.09, + 'temperature': 25.1, + 'uv_index': 2, + 'wind_bearing': 329, + 'wind_gust_speed': 12.21, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T00:00:00Z', + 'dew_point': 21.9, + 'humidity': 80, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.33, + 'temperature': 25.7, + 'uv_index': 3, + 'wind_bearing': 332, + 'wind_gust_speed': 13.52, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T01:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1007.43, + 'temperature': 27.2, + 'uv_index': 5, + 'wind_bearing': 330, + 'wind_gust_speed': 11.36, + 'wind_speed': 11.36, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T02:00:00Z', + 'dew_point': 21.6, + 'humidity': 70, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1007.05, + 'temperature': 27.5, + 'uv_index': 6, + 'wind_bearing': 332, + 'wind_gust_speed': 12.06, + 'wind_speed': 12.06, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T03:00:00Z', + 'dew_point': 21.6, + 'humidity': 69, + 'precipitation': 0.5, + 'precipitation_probability': 10.0, + 'pressure': 1006.67, + 'temperature': 27.8, + 'uv_index': 6, + 'wind_bearing': 333, + 'wind_gust_speed': 12.81, + 'wind_speed': 12.81, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T04:00:00Z', + 'dew_point': 21.5, + 'humidity': 68, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1006.28, + 'temperature': 28.0, + 'uv_index': 5, + 'wind_bearing': 335, + 'wind_gust_speed': 13.68, + 'wind_speed': 13.68, + }), + dict({ + 'apparent_temperature': 30.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T05:00:00Z', + 'dew_point': 21.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1005.89, + 'temperature': 28.1, + 'uv_index': 4, + 'wind_bearing': 336, + 'wind_gust_speed': 14.61, + 'wind_speed': 14.61, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T06:00:00Z', + 'dew_point': 21.2, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 27.0, + 'pressure': 1005.67, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 338, + 'wind_gust_speed': 15.25, + 'wind_speed': 15.25, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T07:00:00Z', + 'dew_point': 21.3, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1005.74, + 'temperature': 27.4, + 'uv_index': 1, + 'wind_bearing': 339, + 'wind_gust_speed': 15.45, + 'wind_speed': 15.45, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T08:00:00Z', + 'dew_point': 21.4, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1005.98, + 'temperature': 26.7, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.38, + 'wind_speed': 15.38, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T09:00:00Z', + 'dew_point': 21.6, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.22, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.27, + 'wind_speed': 15.27, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T10:00:00Z', + 'dew_point': 21.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.44, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 15.09, + 'wind_speed': 15.09, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T11:00:00Z', + 'dew_point': 21.7, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.66, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 336, + 'wind_gust_speed': 14.88, + 'wind_speed': 14.88, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.79, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 333, + 'wind_gust_speed': 14.91, + 'wind_speed': 14.91, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.36, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 83, + 'wind_gust_speed': 4.58, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T14:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.96, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 4.74, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 24.5, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T15:00:00Z', + 'dew_point': 20.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.6, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 152, + 'wind_gust_speed': 5.63, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T16:00:00Z', + 'dew_point': 20.7, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 22.3, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 6.02, + 'wind_speed': 6.02, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T17:00:00Z', + 'dew_point': 20.4, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.2, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 6.15, + 'wind_speed': 6.15, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T18:00:00Z', + 'dew_point': 20.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.08, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 167, + 'wind_gust_speed': 6.48, + 'wind_speed': 6.48, + }), + dict({ + 'apparent_temperature': 23.2, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T19:00:00Z', + 'dew_point': 19.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.04, + 'temperature': 21.8, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 7.51, + 'wind_speed': 7.51, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 99.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T20:00:00Z', + 'dew_point': 19.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.05, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 8.73, + 'wind_speed': 8.73, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 98.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T21:00:00Z', + 'dew_point': 19.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.06, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 9.21, + 'wind_speed': 9.11, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 96.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T22:00:00Z', + 'dew_point': 19.7, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 171, + 'wind_gust_speed': 9.03, + 'wind_speed': 7.91, + }), + ]), + }) +# --- # name: test_hourly_forecast[get_forecasts] dict({ 'weather.home': dict({ diff --git a/tests/components/webhook/test_init.py b/tests/components/webhook/test_init.py index 15ec1b15ee5..6f4ae1ebefc 100644 --- a/tests/components/webhook/test_init.py +++ b/tests/components/webhook/test_init.py @@ -9,8 +9,8 @@ from aiohttp.test_utils import TestClient import pytest from homeassistant.components import webhook +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -319,9 +319,7 @@ async def test_ws_webhook( received = [] - async def handler( - hass: HomeAssistant, webhook_id: str, request: web.Request - ) -> web.Response: + async def handler(hass, webhook_id, request): """Handle a webhook.""" received.append(request) return web.json_response({"from": "handler"}) diff --git a/tests/components/webhook/test_trigger.py b/tests/components/webhook/test_trigger.py index 2963db70ad4..37aae47dd14 100644 --- a/tests/components/webhook/test_trigger.py +++ b/tests/components/webhook/test_trigger.py @@ -17,7 +17,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @pytest.fixture(autouse=True) -async def setup_http(hass: HomeAssistant) -> None: +async def setup_http(hass): """Set up http.""" assert await async_setup_component(hass, "http", {}) assert await async_setup_component(hass, "webhook", {}) diff --git a/tests/components/webmin/conftest.py b/tests/components/webmin/conftest.py index ae0d7b26b5a..c3ad43510d5 100644 --- a/tests/components/webmin/conftest.py +++ b/tests/components/webmin/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Webmin integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components.webmin.const import DEFAULT_PORT, DOMAIN from homeassistant.const import ( @@ -37,21 +37,14 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup -async def async_init_integration( - hass: HomeAssistant, with_mac_address: bool = True -) -> MockConfigEntry: +async def async_init_integration(hass: HomeAssistant) -> MockConfigEntry: """Set up the Webmin integration in Home Assistant.""" entry = MockConfigEntry(domain=DOMAIN, options=TEST_USER_INPUT, title="name") entry.add_to_hass(hass) with patch( "homeassistant.components.webmin.helpers.WebminInstance.update", - return_value=load_json_object_fixture( - "webmin_update.json" - if with_mac_address - else "webmin_update_without_mac.json", - DOMAIN, - ), + return_value=load_json_object_fixture("webmin_update.json", DOMAIN), ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/webmin/fixtures/webmin_update_without_mac.json b/tests/components/webmin/fixtures/webmin_update_without_mac.json deleted file mode 100644 index e79c54d0ff2..00000000000 --- a/tests/components/webmin/fixtures/webmin_update_without_mac.json +++ /dev/null @@ -1,108 +0,0 @@ -{ - "disk_total": 18104905818112, - "io": [0, 4], - "load": [ - 1.29, - 1.36, - 1.37, - 3589, - "Intel(R) Core(TM) i7-5820K CPU @ 3.30GHz", - "GenuineIntel", - 15728640, - 12 - ], - "disk_free": 7749321486336, - "kernel": { "os": "Linux", "arch": "x86_64", "version": "6.6.18-1-lts" }, - "disk_fs": [ - { - "device": "UUID=00000000-80b6-0000-8a06-000000000000", - "dir": "/", - "ifree": 14927206, - "total": 248431161344, - "used_percent": 80, - "type": "ext4", - "itotal": 15482880, - "iused": 555674, - "free": 49060442112, - "used": 186676502528, - "iused_percent": 4 - }, - { - "total": 11903838912512, - "used_percent": 38, - "iused": 3542318, - "type": "ext4", - "itotal": 366198784, - "device": "/dev/md127", - "ifree": 362656466, - "dir": "/media/disk2", - "iused_percent": 1, - "free": 7028764823552, - "used": 4275077644288 - }, - { - "dir": "/media/disk1", - "ifree": 183130757, - "device": "UUID=00000000-2bb2-0000-896c-000000000000", - "type": "ext4", - "itotal": 183140352, - "iused": 9595, - "used_percent": 89, - "total": 5952635744256, - "used": 4981066997760, - "free": 671496220672, - "iused_percent": 1 - } - ], - "drivetemps": [ - { "temp": 49, "device": "/dev/sda", "failed": "", "errors": "" }, - { "failed": "", "errors": "", "device": "/dev/sdb", "temp": 49 }, - { "device": "/dev/sdc", "temp": 51, "failed": "", "errors": "" }, - { "failed": "", "errors": "", "device": "/dev/sdd", "temp": 51 }, - { "errors": "", "failed": "", "temp": 43, "device": "/dev/sde" }, - { "device": "/dev/sdf", "temp": 40, "errors": "", "failed": "" } - ], - "mem": [32766344, 28530480, 1953088, 1944384, 27845756, ""], - "disk_used": 9442821144576, - "cputemps": [ - { "temp": 51, "core": 0 }, - { "temp": 49, "core": 1 }, - { "core": 2, "temp": 59 }, - { "temp": 51, "core": 3 }, - { "temp": 50, "core": 4 }, - { "temp": 49, "core": 5 } - ], - "procs": 310, - "cpu": [0, 8, 92, 0, 0], - "cpufans": [ - { "rpm": 0, "fan": 1 }, - { "fan": 2, "rpm": 1371 }, - { "rpm": 0, "fan": 3 }, - { "rpm": 927, "fan": 4 }, - { "rpm": 801, "fan": 5 } - ], - "load_1m": 1.29, - "load_5m": 1.36, - "load_15m": 1.37, - "mem_total": 32766344, - "mem_free": 28530480, - "swap_total": 1953088, - "swap_free": 1944384, - "uptime": { "days": 11, "minutes": 1, "seconds": 28 }, - "active_interfaces": [ - { - "scope6": ["host"], - "address": "127.0.0.1", - "address6": ["::1"], - "name": "lo", - "broadcast": 0, - "up": 1, - "index": 0, - "fullname": "lo", - "netmask6": [128], - "netmask": "255.0.0.0", - "mtu": 65536, - "edit": 1 - } - ] -} diff --git a/tests/components/webmin/snapshots/test_diagnostics.ambr b/tests/components/webmin/snapshots/test_diagnostics.ambr index 8299b0eafba..a56d6b35641 100644 --- a/tests/components/webmin/snapshots/test_diagnostics.ambr +++ b/tests/components/webmin/snapshots/test_diagnostics.ambr @@ -237,8 +237,6 @@ 'data': dict({ }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'webmin', 'entry_id': '**REDACTED**', 'minor_version': 1, diff --git a/tests/components/webmin/snapshots/test_sensor.ambr b/tests/components/webmin/snapshots/test_sensor.ambr index 6af768d63a8..8803ee684ae 100644 --- a/tests/components/webmin/snapshots/test_sensor.ambr +++ b/tests/components/webmin/snapshots/test_sensor.ambr @@ -1,4 +1,688 @@ # serializer version: 1 +# name: test_sensor[sensor.192_168_1_1_data_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_total', + 'unique_id': '12:34:56:78:9a:bc_disk_total', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16861.5074996948', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_total', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_total', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5543.82404708862', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_11-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_11', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_used', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_used', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_11-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_11', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4638.98014068604', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_12-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_12', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_free', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_free', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_12-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_12', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '625.379589080811', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_free', + 'unique_id': '12:34:56:78:9a:bc_disk_free', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7217.11803817749', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_used', + 'unique_id': '12:34:56:78:9a:bc_disk_used', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8794.3125', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_total', + 'unique_id': '12:34:56:78:9a:bc_/_total', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '231.369548797607', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_used', + 'unique_id': '12:34:56:78:9a:bc_/_used', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '173.85604095459', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_free', + 'unique_id': '12:34:56:78:9a:bc_/_free', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45.6910972595215', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_7-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_7', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_total', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_total', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_7-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_7', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11086.3139038086', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_8-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_8', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_used', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_used', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_8-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_8', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3981.47631835938', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_9-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_data_size_9', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data size', + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_free', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_free', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.192_168_1_1_data_size_9-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Data size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_data_size_9', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6546.04735183716', + }) +# --- # name: test_sensor[sensor.192_168_1_1_disk_free_inodes-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1685,6 +2369,747 @@ 'state': '31.248420715332', }) # --- +# name: test_sensor[sensor.192_168_1_1_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_itotal', + 'unique_id': '12:34:56:78:9a:bc_/_itotal', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15482880', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_iused_percent', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_iused_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_11-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_11', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_itotal', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_itotal', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_11-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_11', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '183140352', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_12-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_12', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_iused', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_iused', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_12-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_12', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9595', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_13-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_13', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_ifree', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_ifree', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_13-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_13', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '183130757', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_14-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_14', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_used_percent', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_used_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_14-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_14', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '89', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_15-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_15', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_iused_percent', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_iused_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_15-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_15', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_iused', + 'unique_id': '12:34:56:78:9a:bc_/_iused', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '555674', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_ifree', + 'unique_id': '12:34:56:78:9a:bc_/_ifree', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14927206', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_used_percent', + 'unique_id': '12:34:56:78:9a:bc_/_used_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_iused_percent', + 'unique_id': '12:34:56:78:9a:bc_/_iused_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_itotal', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_itotal', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '366198784', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_7-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_7', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_iused', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_iused', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_7-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_7', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3542318', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_8-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_8', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_ifree', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_ifree', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_8-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_8', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '362656466', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_9-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.192_168_1_1_none_9', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'webmin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disk_fs_used_percent', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_used_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.192_168_1_1_none_9-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '192.168.1.1 None', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.192_168_1_1_none_9', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38', + }) +# --- # name: test_sensor[sensor.192_168_1_1_swap_free-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/webmin/test_config_flow.py b/tests/components/webmin/test_config_flow.py index 03da3340597..a9f5eafc5c7 100644 --- a/tests/components/webmin/test_config_flow.py +++ b/tests/components/webmin/test_config_flow.py @@ -33,16 +33,15 @@ async def user_flow(hass: HomeAssistant) -> str: return result["flow_id"] -@pytest.mark.parametrize( - "fixture", ["webmin_update_without_mac.json", "webmin_update.json"] -) async def test_form_user( - hass: HomeAssistant, user_flow: str, mock_setup_entry: AsyncMock, fixture: str + hass: HomeAssistant, + user_flow: str, + mock_setup_entry: AsyncMock, ) -> None: """Test a successful user initiated flow.""" with patch( "homeassistant.components.webmin.helpers.WebminInstance.update", - return_value=load_json_object_fixture(fixture, DOMAIN), + return_value=load_json_object_fixture("webmin_update.json", DOMAIN), ): result = await hass.config_entries.flow.async_configure( user_flow, TEST_USER_INPUT @@ -74,7 +73,7 @@ async def test_form_user( (Exception, "unknown"), ( Fault("5", "Webmin module net does not exist"), - "unknown", + "Fault 5: Webmin module net does not exist", ), ], ) diff --git a/tests/components/webmin/test_diagnostics.py b/tests/components/webmin/test_diagnostics.py index 98d6544bc76..5f1df44f4a8 100644 --- a/tests/components/webmin/test_diagnostics.py +++ b/tests/components/webmin/test_diagnostics.py @@ -1,7 +1,6 @@ """Tests for the diagnostics data provided by the Webmin integration.""" from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -17,6 +16,9 @@ async def test_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" - assert await get_diagnostics_for_config_entry( - hass, hass_client, await async_init_integration(hass) - ) == snapshot(exclude=props("created_at", "modified_at")) + assert ( + await get_diagnostics_for_config_entry( + hass, hass_client, await async_init_integration(hass) + ) + == snapshot + ) diff --git a/tests/components/webmin/test_init.py b/tests/components/webmin/test_init.py index 36894f00d5f..7b6282edfae 100644 --- a/tests/components/webmin/test_init.py +++ b/tests/components/webmin/test_init.py @@ -19,11 +19,3 @@ async def test_unload_entry(hass: HomeAssistant) -> None: assert entry.state is ConfigEntryState.NOT_LOADED assert not hass.data.get(DOMAIN) - - -async def test_entry_without_mac_address(hass: HomeAssistant) -> None: - """Test an entry without MAC address.""" - - entry = await async_init_integration(hass, False) - - assert entry.runtime_data.unique_id == entry.entry_id diff --git a/tests/components/webmin/test_sensor.py b/tests/components/webmin/test_sensor.py index dd68e2f9f8c..5fb874825a3 100644 --- a/tests/components/webmin/test_sensor.py +++ b/tests/components/webmin/test_sensor.py @@ -8,8 +8,6 @@ from homeassistant.helpers import entity_registry as er from .conftest import async_init_integration -from tests.common import snapshot_platform - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor( @@ -21,4 +19,11 @@ async def test_sensor( entry = await async_init_integration(hass) - await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) + + assert entity_entries + + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") diff --git a/tests/components/webostv/__init__.py b/tests/components/webostv/__init__.py index d6c096f9d3a..5ef210da56d 100644 --- a/tests/components/webostv/__init__.py +++ b/tests/components/webostv/__init__.py @@ -2,7 +2,6 @@ from homeassistant.components.webostv.const import DOMAIN from homeassistant.const import CONF_CLIENT_SECRET, CONF_HOST -from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from .const import CLIENT_KEY, FAKE_UUID, HOST, TV_NAME @@ -10,9 +9,7 @@ from .const import CLIENT_KEY, FAKE_UUID, HOST, TV_NAME from tests.common import MockConfigEntry -async def setup_webostv( - hass: HomeAssistant, unique_id: str | None = FAKE_UUID -) -> MockConfigEntry: +async def setup_webostv(hass, unique_id=FAKE_UUID): """Initialize webostv and media_player for tests.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/webostv/conftest.py b/tests/components/webostv/conftest.py index a30ae933cca..2b5d701f899 100644 --- a/tests/components/webostv/conftest.py +++ b/tests/components/webostv/conftest.py @@ -1,14 +1,17 @@ """Common fixtures and objects for the LG webOS integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant.components.webostv.const import LIVE_TV_APP_ID +from homeassistant.core import HomeAssistant, ServiceCall from .const import CHANNEL_1, CHANNEL_2, CLIENT_KEY, FAKE_UUID, MOCK_APPS, MOCK_INPUTS +from tests.common import async_mock_service + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -19,6 +22,12 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.fixture(name="client") def client_fixture(): """Patch of client library for tests.""" diff --git a/tests/components/webostv/test_config_flow.py b/tests/components/webostv/test_config_flow.py index 9b2983aab47..afda36d913f 100644 --- a/tests/components/webostv/test_config_flow.py +++ b/tests/components/webostv/test_config_flow.py @@ -295,14 +295,16 @@ async def test_form_abort_uuid_configured(hass: HomeAssistant, client) -> None: assert entry.data[CONF_HOST] == "new_host" -async def test_reauth_successful( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_reauth_successful(hass: HomeAssistant, client, monkeypatch) -> None: """Test that the reauthorization is successful.""" entry = await setup_webostv(hass) assert client - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) @@ -329,13 +331,17 @@ async def test_reauth_successful( ], ) async def test_reauth_errors( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch, side_effect, reason + hass: HomeAssistant, client, monkeypatch, side_effect, reason ) -> None: """Test reauthorization errors.""" entry = await setup_webostv(hass) assert client - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/webostv/test_device_trigger.py b/tests/components/webostv/test_device_trigger.py index 41045969335..29c75d4440b 100644 --- a/tests/components/webostv/test_device_trigger.py +++ b/tests/components/webostv/test_device_trigger.py @@ -44,7 +44,7 @@ async def test_get_triggers( async def test_if_fires_on_turn_on_request( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, client, ) -> None: @@ -97,11 +97,11 @@ async def test_if_fires_on_turn_on_request( blocking=True, ) - assert len(service_calls) == 3 - assert service_calls[1].data["some"] == device.id - assert service_calls[1].data["id"] == 0 - assert service_calls[2].data["some"] == ENTITY_ID - assert service_calls[2].data["id"] == 0 + assert len(calls) == 2 + assert calls[0].data["some"] == device.id + assert calls[0].data["id"] == 0 + assert calls[1].data["some"] == ENTITY_ID + assert calls[1].data["id"] == 0 async def test_failure_scenarios( diff --git a/tests/components/webostv/test_diagnostics.py b/tests/components/webostv/test_diagnostics.py index 3d7cb00e021..934b59a7b83 100644 --- a/tests/components/webostv/test_diagnostics.py +++ b/tests/components/webostv/test_diagnostics.py @@ -58,8 +58,5 @@ async def test_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, - "created_at": entry.created_at.isoformat(), - "modified_at": entry.modified_at.isoformat(), - "discovery_keys": {}, }, } diff --git a/tests/components/webostv/test_init.py b/tests/components/webostv/test_init.py index e2638c86f5e..a2961a81a4e 100644 --- a/tests/components/webostv/test_init.py +++ b/tests/components/webostv/test_init.py @@ -3,7 +3,6 @@ from unittest.mock import Mock from aiowebostv import WebOsTvPairError -import pytest from homeassistant.components.webostv.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState @@ -13,9 +12,7 @@ from homeassistant.core import HomeAssistant from . import setup_webostv -async def test_reauth_setup_entry( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_reauth_setup_entry(hass: HomeAssistant, client, monkeypatch) -> None: """Test reauth flow triggered by setup entry.""" monkeypatch.setattr(client, "is_connected", Mock(return_value=False)) monkeypatch.setattr(client, "connect", Mock(side_effect=WebOsTvPairError)) @@ -35,9 +32,7 @@ async def test_reauth_setup_entry( assert flow["context"].get("entry_id") == entry.entry_id -async def test_key_update_setup_entry( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_key_update_setup_entry(hass: HomeAssistant, client, monkeypatch) -> None: """Test key update from setup entry.""" monkeypatch.setattr(client, "client_key", "new_key") entry = await setup_webostv(hass) diff --git a/tests/components/webostv/test_media_player.py b/tests/components/webostv/test_media_player.py index e4c02e680bd..6c4aeb5e984 100644 --- a/tests/components/webostv/test_media_player.py +++ b/tests/components/webostv/test_media_player.py @@ -144,7 +144,7 @@ async def test_media_play_pause(hass: HomeAssistant, client) -> None: ], ) async def test_media_next_previous_track( - hass: HomeAssistant, client, service, client_call, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, client, service, client_call, monkeypatch ) -> None: """Test media next/previous track services.""" await setup_webostv(hass) @@ -270,10 +270,7 @@ async def test_select_sound_output(hass: HomeAssistant, client) -> None: async def test_device_info_startup_off( - hass: HomeAssistant, - client, - monkeypatch: pytest.MonkeyPatch, - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, client, monkeypatch, device_registry: dr.DeviceRegistry ) -> None: """Test device info when device is off at startup.""" monkeypatch.setattr(client, "system_info", None) @@ -294,10 +291,7 @@ async def test_device_info_startup_off( async def test_entity_attributes( - hass: HomeAssistant, - client, - monkeypatch: pytest.MonkeyPatch, - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, client, monkeypatch, device_registry: dr.DeviceRegistry ) -> None: """Test entity attributes.""" entry = await setup_webostv(hass) @@ -389,7 +383,7 @@ async def test_play_media(hass: HomeAssistant, client, media_id, ch_id) -> None: async def test_update_sources_live_tv_find( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, client, monkeypatch ) -> None: """Test finding live TV app id in update sources.""" await setup_webostv(hass) @@ -472,9 +466,7 @@ async def test_update_sources_live_tv_find( assert len(sources) == 1 -async def test_client_disconnected( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_client_disconnected(hass: HomeAssistant, client, monkeypatch) -> None: """Test error not raised when client is disconnected.""" await setup_webostv(hass) monkeypatch.setattr(client, "is_connected", Mock(return_value=False)) @@ -485,10 +477,7 @@ async def test_client_disconnected( async def test_control_error_handling( - hass: HomeAssistant, - client, - caplog: pytest.LogCaptureFixture, - monkeypatch: pytest.MonkeyPatch, + hass: HomeAssistant, client, caplog: pytest.LogCaptureFixture, monkeypatch ) -> None: """Test control errors handling.""" await setup_webostv(hass) @@ -518,9 +507,7 @@ async def test_control_error_handling( ) -async def test_supported_features( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_supported_features(hass: HomeAssistant, client, monkeypatch) -> None: """Test test supported features.""" monkeypatch.setattr(client, "sound_output", "lineout") await setup_webostv(hass) @@ -578,7 +565,7 @@ async def test_supported_features( async def test_cached_supported_features( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, client, monkeypatch ) -> None: """Test test supported features.""" monkeypatch.setattr(client, "is_on", False) @@ -685,7 +672,7 @@ async def test_cached_supported_features( async def test_supported_features_no_cache( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch + hass: HomeAssistant, client, monkeypatch ) -> None: """Test supported features if device is off and no cache.""" monkeypatch.setattr(client, "is_on", False) @@ -729,7 +716,7 @@ async def test_get_image_http( client, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - monkeypatch: pytest.MonkeyPatch, + monkeypatch, ) -> None: """Test get image via http.""" url = "http://something/valid_icon" @@ -755,7 +742,7 @@ async def test_get_image_http_error( hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - monkeypatch: pytest.MonkeyPatch, + monkeypatch, ) -> None: """Test get image via http error.""" url = "http://something/icon_error" @@ -782,7 +769,7 @@ async def test_get_image_https( client, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - monkeypatch: pytest.MonkeyPatch, + monkeypatch, ) -> None: """Test get image via http.""" url = "https://something/valid_icon_https" @@ -802,9 +789,7 @@ async def test_get_image_https( assert content == b"https_image" -async def test_reauth_reconnect( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_reauth_reconnect(hass: HomeAssistant, client, monkeypatch) -> None: """Test reauth flow triggered by reconnect.""" entry = await setup_webostv(hass) monkeypatch.setattr(client, "is_connected", Mock(return_value=False)) @@ -829,9 +814,7 @@ async def test_reauth_reconnect( assert flow["context"].get("entry_id") == entry.entry_id -async def test_update_media_state( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_update_media_state(hass: HomeAssistant, client, monkeypatch) -> None: """Test updating media state.""" await setup_webostv(hass) @@ -849,7 +832,3 @@ async def test_update_media_state( monkeypatch.setattr(client, "media_state", data) await client.mock_state_update() assert hass.states.get(ENTITY_ID).state == MediaPlayerState.IDLE - - monkeypatch.setattr(client, "is_on", False) - await client.mock_state_update() - assert hass.states.get(ENTITY_ID).state == STATE_OFF diff --git a/tests/components/webostv/test_notify.py b/tests/components/webostv/test_notify.py index 75c2e148310..a1c37b9bf97 100644 --- a/tests/components/webostv/test_notify.py +++ b/tests/components/webostv/test_notify.py @@ -72,9 +72,7 @@ async def test_notify(hass: HomeAssistant, client) -> None: ) -async def test_notify_not_connected( - hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch -) -> None: +async def test_notify_not_connected(hass: HomeAssistant, client, monkeypatch) -> None: """Test sending a message when client is not connected.""" await setup_webostv(hass) assert hass.services.has_service(NOTIFY_DOMAIN, TV_NAME) @@ -97,10 +95,7 @@ async def test_notify_not_connected( async def test_icon_not_found( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - client, - monkeypatch: pytest.MonkeyPatch, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, client, monkeypatch ) -> None: """Test notify icon not found error.""" await setup_webostv(hass) @@ -135,7 +130,7 @@ async def test_connection_errors( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, client, - monkeypatch: pytest.MonkeyPatch, + monkeypatch, side_effect, error, ) -> None: diff --git a/tests/components/webostv/test_trigger.py b/tests/components/webostv/test_trigger.py index d7eeae28ea3..918666cf4bf 100644 --- a/tests/components/webostv/test_trigger.py +++ b/tests/components/webostv/test_trigger.py @@ -20,7 +20,7 @@ from tests.common import MockEntity, MockEntityPlatform async def test_webostv_turn_on_trigger_device_id( hass: HomeAssistant, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, client, ) -> None: @@ -58,14 +58,14 @@ async def test_webostv_turn_on_trigger_device_id( blocking=True, ) - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == device.id - assert service_calls[1].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["some"] == device.id + assert calls[0].data["id"] == 0 with patch("homeassistant.config.load_yaml_dict", return_value={}): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) - service_calls.clear() + calls.clear() with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -75,11 +75,11 @@ async def test_webostv_turn_on_trigger_device_id( blocking=True, ) - assert len(service_calls) == 1 + assert len(calls) == 0 async def test_webostv_turn_on_trigger_entity_id( - hass: HomeAssistant, service_calls: list[ServiceCall], client + hass: HomeAssistant, calls: list[ServiceCall], client ) -> None: """Test for turn_on triggers by entity_id firing.""" await setup_webostv(hass) @@ -113,9 +113,9 @@ async def test_webostv_turn_on_trigger_entity_id( blocking=True, ) - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == ENTITY_ID - assert service_calls[1].data["id"] == 0 + assert len(calls) == 1 + assert calls[0].data["some"] == ENTITY_ID + assert calls[0].data["id"] == 0 async def test_wrong_trigger_platform_type( diff --git a/tests/components/websocket_api/test_auth.py b/tests/components/websocket_api/test_auth.py index d55d2f97017..62298098adc 100644 --- a/tests/components/websocket_api/test_auth.py +++ b/tests/components/websocket_api/test_auth.py @@ -26,7 +26,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture -def track_connected(hass: HomeAssistant) -> dict[str, list[int]]: +def track_connected(hass): """Track connected and disconnected events.""" connected_evt = [] @@ -293,6 +293,6 @@ async def test_auth_sending_unknown_type_disconnects( auth_msg = await ws.receive_json() assert auth_msg["type"] == TYPE_AUTH_REQUIRED - await ws._writer.send_frame(b"1" * 130, 0x30) + await ws._writer._send_frame(b"1" * 130, 0x30) auth_msg = await ws.receive() assert auth_msg.type == WSMsgType.close diff --git a/tests/components/websocket_api/test_commands.py b/tests/components/websocket_api/test_commands.py index c1a043f915b..276a383d9e9 100644 --- a/tests/components/websocket_api/test_commands.py +++ b/tests/components/websocket_api/test_commands.py @@ -24,7 +24,6 @@ from homeassistant.core import Context, HomeAssistant, State, SupportsResponse, from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.event import async_track_state_change_event from homeassistant.loader import async_get_integration from homeassistant.setup import async_setup_component from homeassistant.util.json import json_loads @@ -920,7 +919,7 @@ async def test_subscribe_entities_with_unserializable_state( class CannotSerializeMe: """Cannot serialize this.""" - def __init__(self) -> None: + def __init__(self): """Init cannot serialize this.""" hass.states.async_set("light.permitted", "off", {"color": "red"}) @@ -1262,54 +1261,6 @@ async def test_subscribe_unsubscribe_entities_specific_entities( } -async def test_subscribe_unsubscribe_entities_with_filter( - hass: HomeAssistant, - websocket_client: MockHAClientWebSocket, - hass_admin_user: MockUser, -) -> None: - """Test subscribe/unsubscribe entities with an entity filter.""" - hass.states.async_set("switch.not_included", "off") - hass.states.async_set("light.include", "off") - await websocket_client.send_json( - {"id": 7, "type": "subscribe_entities", "include": {"domains": ["light"]}} - ) - - msg = await websocket_client.receive_json() - assert msg["id"] == 7 - assert msg["type"] == const.TYPE_RESULT - assert msg["success"] - - msg = await websocket_client.receive_json() - assert msg["id"] == 7 - assert msg["type"] == "event" - assert msg["event"] == { - "a": { - "light.include": { - "a": {}, - "c": ANY, - "lc": ANY, - "s": "off", - } - } - } - hass.states.async_set("switch.not_included", "on") - hass.states.async_set("light.include", "on") - msg = await websocket_client.receive_json() - assert msg["id"] == 7 - assert msg["type"] == "event" - assert msg["event"] == { - "c": { - "light.include": { - "+": { - "c": ANY, - "lc": ANY, - "s": "on", - } - } - } - } - - async def test_render_template_renders_template( hass: HomeAssistant, websocket_client ) -> None: @@ -2566,18 +2517,18 @@ async def test_integration_setup_info( @pytest.mark.parametrize( ("key", "config"), [ - ("triggers", {"platform": "event", "event_type": "hello"}), - ("triggers", [{"platform": "event", "event_type": "hello"}]), + ("trigger", {"platform": "event", "event_type": "hello"}), + ("trigger", [{"platform": "event", "event_type": "hello"}]), ( - "conditions", + "condition", {"condition": "state", "entity_id": "hello.world", "state": "paulus"}, ), ( - "conditions", + "condition", [{"condition": "state", "entity_id": "hello.world", "state": "paulus"}], ), - ("actions", {"service": "domain_test.test_service"}), - ("actions", [{"service": "domain_test.test_service"}]), + ("action", {"service": "domain_test.test_service"}), + ("action", [{"service": "domain_test.test_service"}]), ], ) async def test_validate_config_works( @@ -2599,13 +2550,13 @@ async def test_validate_config_works( [ # Raises vol.Invalid ( - "triggers", + "trigger", {"platform": "non_existing", "event_type": "hello"}, - "Invalid trigger 'non_existing' specified", + "Invalid platform 'non_existing' specified", ), # Raises vol.Invalid ( - "conditions", + "condition", { "condition": "non_existing", "entity_id": "hello.world", @@ -2619,7 +2570,7 @@ async def test_validate_config_works( ), # Raises HomeAssistantError ( - "conditions", + "condition", { "above": 50, "condition": "device", @@ -2632,7 +2583,7 @@ async def test_validate_config_works( ), # Raises vol.Invalid ( - "actions", + "action", {"non_existing": "domain_test.test_service"}, "Unable to determine action @ data[0]", ), @@ -2863,54 +2814,3 @@ async def test_integration_descriptions( assert response["success"] assert response["result"] - - -async def test_subscribe_entities_chained_state_change( - hass: HomeAssistant, - websocket_client: MockHAClientWebSocket, - hass_admin_user: MockUser, -) -> None: - """Test chaining state changed events. - - Ensure the websocket sends the off state after - the on state. - """ - - @callback - def auto_off_listener(event): - hass.states.async_set("light.permitted", "off") - - async_track_state_change_event(hass, ["light.permitted"], auto_off_listener) - - await websocket_client.send_json({"id": 7, "type": "subscribe_entities"}) - - data = await websocket_client.receive_str() - msg = json_loads(data) - assert msg["id"] == 7 - assert msg["type"] == const.TYPE_RESULT - assert msg["success"] - - data = await websocket_client.receive_str() - msg = json_loads(data) - assert msg["id"] == 7 - assert msg["type"] == "event" - assert msg["event"] == {"a": {}} - - hass.states.async_set("light.permitted", "on") - data = await websocket_client.receive_str() - msg = json_loads(data) - assert msg["id"] == 7 - assert msg["type"] == "event" - assert msg["event"] == { - "a": {"light.permitted": {"a": {}, "c": ANY, "lc": ANY, "s": "on"}} - } - data = await websocket_client.receive_str() - msg = json_loads(data) - assert msg["id"] == 7 - assert msg["type"] == "event" - assert msg["event"] == { - "c": {"light.permitted": {"+": {"c": ANY, "lc": ANY, "s": "off"}}} - } - - await websocket_client.close() - await hass.async_block_till_done() diff --git a/tests/components/websocket_api/test_connection.py b/tests/components/websocket_api/test_connection.py index 343575e5b4a..d6c2765522e 100644 --- a/tests/components/websocket_api/test_connection.py +++ b/tests/components/websocket_api/test_connection.py @@ -2,7 +2,7 @@ import logging from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch from aiohttp.test_utils import make_mocked_request import pytest @@ -75,17 +75,16 @@ async def test_exception_handling( send_messages = [] user = MockUser() refresh_token = Mock() + current_request = AsyncMock() hass.data[DOMAIN] = {} - def get_extra_info(key: str) -> Any | None: + def get_extra_info(key: str) -> Any: if key == "sslcontext": return True if key == "peername": return ("127.0.0.42", 8123) - return None - mocked_transport = Mock() mocked_transport.get_extra_info = get_extra_info mocked_request = make_mocked_request( diff --git a/tests/components/websocket_api/test_decorators.py b/tests/components/websocket_api/test_decorators.py index 81ac4b96409..0ade5329190 100644 --- a/tests/components/websocket_api/test_decorators.py +++ b/tests/components/websocket_api/test_decorators.py @@ -1,7 +1,5 @@ """Test decorators.""" -from typing import Any - import voluptuous as vol from homeassistant.components import http, websocket_api @@ -21,40 +19,24 @@ async def test_async_response_request_context( @websocket_api.websocket_command({"type": "test-get-request-executor"}) @websocket_api.async_response - async def executor_get_request( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], - ) -> None: + async def executor_get_request(hass, connection, msg): handle_request( await hass.async_add_executor_job(http.current_request.get), connection, msg ) @websocket_api.websocket_command({"type": "test-get-request-async"}) @websocket_api.async_response - async def async_get_request( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], - ) -> None: + async def async_get_request(hass, connection, msg): handle_request(http.current_request.get(), connection, msg) @websocket_api.websocket_command({"type": "test-get-request"}) - def get_request( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], - ) -> None: + def get_request(hass, connection, msg): handle_request(http.current_request.get(), connection, msg) @websocket_api.websocket_command( {"type": "test-get-request-with-arg", vol.Required("arg"): str} ) - def get_with_arg_request( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], - ) -> None: + def get_with_arg_request(hass, connection, msg): handle_request(http.current_request.get(), connection, msg) websocket_api.async_register_command(hass, executor_get_request) @@ -163,11 +145,7 @@ async def test_supervisor_only(hass: HomeAssistant, websocket_client) -> None: @websocket_api.ws_require_user(only_supervisor=True) @websocket_api.websocket_command({"type": "test-require-supervisor-user"}) - def require_supervisor_request( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], - ) -> None: + def require_supervisor_request(hass, connection, msg): connection.send_result(msg["id"]) websocket_api.async_register_command(hass, require_supervisor_request) diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 03e30c11ee9..794dd410661 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -363,12 +363,12 @@ async def test_non_json_message( assert "bad= None: - """Test failing to prepare due to timeout.""" + """Test failing to prepare.""" with ( patch( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", @@ -381,24 +381,6 @@ async def test_prepare_fail_timeout( assert "Timeout preparing request" in caplog.text -async def test_prepare_fail_connection_reset( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test failing to prepare due to connection reset.""" - with ( - patch( - "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", - side_effect=(ConnectionResetError, web.WebSocketResponse.prepare), - ), - pytest.raises(ServerDisconnectedError), - ): - await hass_ws_client(hass) - - assert "Connection reset by peer while preparing WebSocket" in caplog.text - - async def test_enable_coalesce( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, diff --git a/tests/components/websocket_api/test_sensor.py b/tests/components/websocket_api/test_sensor.py index 2e5f0c6c605..3af02dc8f2b 100644 --- a/tests/components/websocket_api/test_sensor.py +++ b/tests/components/websocket_api/test_sensor.py @@ -1,10 +1,10 @@ """Test cases for the API stream sensor.""" from homeassistant.auth.providers.homeassistant import HassAuthProvider +from homeassistant.bootstrap import async_setup_component from homeassistant.components.websocket_api.auth import TYPE_AUTH_REQUIRED from homeassistant.components.websocket_api.http import URL from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .test_auth import test_auth_active_with_token diff --git a/tests/components/weheat/__init__.py b/tests/components/weheat/__init__.py deleted file mode 100644 index 65c4f84ba77..00000000000 --- a/tests/components/weheat/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the Weheat integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/weheat/conftest.py b/tests/components/weheat/conftest.py deleted file mode 100644 index 6ecb64ffdf4..00000000000 --- a/tests/components/weheat/conftest.py +++ /dev/null @@ -1,138 +0,0 @@ -"""Fixtures for Weheat tests.""" - -from collections.abc import Generator -from time import time -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from weheat.abstractions.discovery import HeatPumpDiscovery -from weheat.abstractions.heat_pump import HeatPump - -from homeassistant.components.application_credentials import ( - DOMAIN as APPLICATION_CREDENTIALS, - ClientCredential, - async_import_client_credential, -) -from homeassistant.components.weheat.const import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from .const import ( - CLIENT_ID, - CLIENT_SECRET, - TEST_HP_UUID, - TEST_MODEL, - TEST_SN, - USER_UUID_1, -) - -from tests.common import MockConfigEntry - - -@pytest.fixture(autouse=True) -async def setup_credentials(hass: HomeAssistant) -> None: - """Fixture to setup credentials.""" - assert await async_setup_component(hass, APPLICATION_CREDENTIALS, {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential(CLIENT_ID, CLIENT_SECRET), - ) - - -@pytest.fixture -def mock_setup_entry(): - """Mock a successful setup.""" - with patch( - "homeassistant.components.weheat.async_setup_entry", return_value=True - ) as mock_setup: - yield mock_setup - - -@pytest.fixture -def mock_heat_pump_info() -> HeatPumpDiscovery.HeatPumpInfo: - """Create a HeatPumpInfo with default settings.""" - return HeatPumpDiscovery.HeatPumpInfo(TEST_HP_UUID, None, TEST_MODEL, TEST_SN, True) - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title="Weheat", - data={ - "id": "12345", - "auth_implementation": DOMAIN, - "token": { - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", - "type": "Bearer", - "expires_in": 60, - "expires_at": time() + 60, - }, - }, - unique_id="123456789", - ) - - -@pytest.fixture -def mock_user_id() -> Generator[AsyncMock]: - """Mock the user API call.""" - with ( - patch( - "homeassistant.components.weheat.config_flow.get_user_id_from_token", - return_value=USER_UUID_1, - ) as user_mock, - ): - yield user_mock - - -@pytest.fixture -def mock_weheat_discover(mock_heat_pump_info) -> Generator[AsyncMock]: - """Mock an Weheat discovery.""" - with ( - patch( - "homeassistant.components.weheat.HeatPumpDiscovery.discover_active", - autospec=True, - ) as mock_discover, - ): - mock_discover.return_value = [mock_heat_pump_info] - - yield mock_discover - - -@pytest.fixture -def mock_weheat_heat_pump_instance() -> MagicMock: - """Mock an Weheat heat pump instance with a set of default values.""" - mock_heat_pump_instance = MagicMock(spec_set=HeatPump) - - mock_heat_pump_instance.water_inlet_temperature = 11 - mock_heat_pump_instance.water_outlet_temperature = 22 - mock_heat_pump_instance.water_house_in_temperature = 33 - mock_heat_pump_instance.air_inlet_temperature = 44 - mock_heat_pump_instance.power_input = 55 - mock_heat_pump_instance.power_output = 66 - mock_heat_pump_instance.dhw_top_temperature = 77 - mock_heat_pump_instance.dhw_bottom_temperature = 88 - mock_heat_pump_instance.thermostat_water_setpoint = 35 - mock_heat_pump_instance.thermostat_room_temperature = 19 - mock_heat_pump_instance.thermostat_room_temperature_setpoint = 21 - mock_heat_pump_instance.cop = 4.5 - mock_heat_pump_instance.heat_pump_state = HeatPump.State.HEATING - mock_heat_pump_instance.energy_total = 12345 - - return mock_heat_pump_instance - - -@pytest.fixture -def mock_weheat_heat_pump(mock_weheat_heat_pump_instance) -> Generator[AsyncMock]: - """Mock the coordinator HeatPump data.""" - with ( - patch( - "homeassistant.components.weheat.coordinator.HeatPump", - ) as mock_heat_pump, - ): - mock_heat_pump.return_value = mock_weheat_heat_pump_instance - - yield mock_weheat_heat_pump_instance diff --git a/tests/components/weheat/const.py b/tests/components/weheat/const.py deleted file mode 100644 index 61203259c58..00000000000 --- a/tests/components/weheat/const.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Constants for weheat tests.""" - -CLIENT_ID = "1234" -CLIENT_SECRET = "5678" - -USER_UUID_1 = "0000-1111-2222-3333" -USER_UUID_2 = "0000-1111-2222-4444" - -CONF_REFRESH_TOKEN = "refresh_token" -CONF_AUTH_IMPLEMENTATION = "auth_implementation" -MOCK_REFRESH_TOKEN = "mock_refresh_token" -MOCK_ACCESS_TOKEN = "mock_access_token" - -TEST_HP_UUID = "0000-1111-2222-3333" -TEST_NAME = "Test Heat Pump" -TEST_MODEL = "Test Model" -TEST_SN = "SN-Test-This" diff --git a/tests/components/weheat/snapshots/test_sensor.ambr b/tests/components/weheat/snapshots/test_sensor.ambr deleted file mode 100644 index 3bd4a254598..00000000000 --- a/tests/components/weheat/snapshots/test_sensor.ambr +++ /dev/null @@ -1,768 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[sensor.test_model-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'standby', - 'water_check', - 'heating', - 'cooling', - 'dhw', - 'legionella_prevention', - 'defrosting', - 'self_test', - 'manual_control', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'heat_pump_state', - 'unique_id': '0000-1111-2222-3333_heat_pump_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.test_model-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Model', - 'options': list([ - 'standby', - 'water_check', - 'heating', - 'cooling', - 'dhw', - 'legionella_prevention', - 'defrosting', - 'self_test', - 'manual_control', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_model', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heating', - }) -# --- -# name: test_all_entities[sensor.test_model_central_heating_inlet_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_central_heating_inlet_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Central heating inlet temperature', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ch_inlet_temperature', - 'unique_id': '0000-1111-2222-3333_ch_inlet_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_central_heating_inlet_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Model Central heating inlet temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_central_heating_inlet_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '33', - }) -# --- -# name: test_all_entities[sensor.test_model_cop-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_cop', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'COP', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cop', - 'unique_id': '0000-1111-2222-3333_cop', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.test_model_cop-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Model COP', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.test_model_cop', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.5', - }) -# --- -# name: test_all_entities[sensor.test_model_current_room_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_current_room_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Current room temperature', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'thermostat_room_temperature', - 'unique_id': '0000-1111-2222-3333_thermostat_room_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_current_room_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Model Current room temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_current_room_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '19', - }) -# --- -# name: test_all_entities[sensor.test_model_dhw_bottom_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_dhw_bottom_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DHW bottom temperature', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dhw_bottom_temperature', - 'unique_id': '0000-1111-2222-3333_dhw_bottom_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_dhw_bottom_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Model DHW bottom temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_dhw_bottom_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '88', - }) -# --- -# name: test_all_entities[sensor.test_model_dhw_top_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_dhw_top_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DHW top temperature', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dhw_top_temperature', - 'unique_id': '0000-1111-2222-3333_dhw_top_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_dhw_top_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Model DHW top temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_dhw_top_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '77', - }) -# --- -# name: test_all_entities[sensor.test_model_electricity_used-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_electricity_used', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Electricity used', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'electricity_used', - 'unique_id': '0000-1111-2222-3333_electricity_used', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_electricity_used-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Test Model Electricity used', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_electricity_used', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '12345', - }) -# --- -# name: test_all_entities[sensor.test_model_input_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_input_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Input power', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_input', - 'unique_id': '0000-1111-2222-3333_power_input', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_input_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Test Model Input power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_input_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '55', - }) -# --- -# name: test_all_entities[sensor.test_model_output_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_output_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Output power', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_output', - 'unique_id': '0000-1111-2222-3333_power_output', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_output_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Test Model Output power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_output_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '66', - }) -# --- -# name: test_all_entities[sensor.test_model_outside_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_outside_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outside temperature', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'outside_temperature', - 'unique_id': '0000-1111-2222-3333_outside_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_outside_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Model Outside temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_outside_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '44', - }) -# --- -# name: test_all_entities[sensor.test_model_room_temperature_setpoint-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_room_temperature_setpoint', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Room temperature setpoint', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'thermostat_room_temperature_setpoint', - 'unique_id': '0000-1111-2222-3333_thermostat_room_temperature_setpoint', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_room_temperature_setpoint-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Model Room temperature setpoint', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_room_temperature_setpoint', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '21', - }) -# --- -# name: test_all_entities[sensor.test_model_water_inlet_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_water_inlet_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water inlet temperature', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_inlet_temperature', - 'unique_id': '0000-1111-2222-3333_water_inlet_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_water_inlet_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Model Water inlet temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_water_inlet_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11', - }) -# --- -# name: test_all_entities[sensor.test_model_water_outlet_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_water_outlet_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water outlet temperature', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_outlet_temperature', - 'unique_id': '0000-1111-2222-3333_water_outlet_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_water_outlet_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Model Water outlet temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_water_outlet_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '22', - }) -# --- -# name: test_all_entities[sensor.test_model_water_target_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_model_water_target_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water target temperature', - 'platform': 'weheat', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'thermostat_water_setpoint', - 'unique_id': '0000-1111-2222-3333_thermostat_water_setpoint', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.test_model_water_target_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'temperature', - 'friendly_name': 'Test Model Water target temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.test_model_water_target_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '35', - }) -# --- diff --git a/tests/components/weheat/test_config_flow.py b/tests/components/weheat/test_config_flow.py deleted file mode 100644 index b33dd0a8db8..00000000000 --- a/tests/components/weheat/test_config_flow.py +++ /dev/null @@ -1,183 +0,0 @@ -"""Test the Weheat config flow.""" - -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.weheat.const import ( - DOMAIN, - ENTRY_TITLE, - OAUTH2_AUTHORIZE, - OAUTH2_TOKEN, -) -from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult -from homeassistant.const import CONF_ACCESS_TOKEN, CONF_SOURCE, CONF_TOKEN -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_entry_oauth2_flow - -from .const import ( - CLIENT_ID, - CONF_AUTH_IMPLEMENTATION, - CONF_REFRESH_TOKEN, - MOCK_ACCESS_TOKEN, - MOCK_REFRESH_TOKEN, - USER_UUID_1, - USER_UUID_2, -) - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import ClientSessionGenerator - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_full_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - mock_setup_entry, -) -> None: - """Check full of adding a single heat pump.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={CONF_SOURCE: SOURCE_USER} - ) - - await handle_oauth(hass, hass_client_no_auth, aioclient_mock, result) - - with ( - patch( - "homeassistant.components.weheat.config_flow.get_user_id_from_token", - return_value=USER_UUID_1, - ) as mock_weheat, - ): - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_weheat.mock_calls) == 1 - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == USER_UUID_1 - assert result["result"].title == ENTRY_TITLE - assert result["data"][CONF_TOKEN][CONF_REFRESH_TOKEN] == MOCK_REFRESH_TOKEN - assert result["data"][CONF_TOKEN][CONF_ACCESS_TOKEN] == MOCK_ACCESS_TOKEN - assert result["data"][CONF_AUTH_IMPLEMENTATION] == DOMAIN - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_duplicate_unique_id( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - mock_setup_entry, -) -> None: - """Check that the config flow is aborted when an entry with the same ID exists.""" - first_entry = MockConfigEntry( - domain=DOMAIN, - data={}, - unique_id=USER_UUID_1, - ) - - first_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={CONF_SOURCE: SOURCE_USER} - ) - - await handle_oauth(hass, hass_client_no_auth, aioclient_mock, result) - - with ( - patch( - "homeassistant.components.weheat.config_flow.get_user_id_from_token", - return_value=USER_UUID_1, - ), - ): - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - # only care that the config flow is aborted - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.usefixtures("current_request_with_host") -@pytest.mark.parametrize( - ("logged_in_user", "expected_reason"), - [(USER_UUID_1, "reauth_successful"), (USER_UUID_2, "wrong_account")], -) -async def test_reauth( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - mock_user_id: AsyncMock, - mock_weheat_discover: AsyncMock, - setup_credentials, - logged_in_user: str, - expected_reason: str, -) -> None: - """Check reauth flow both with and without the correct logged in user.""" - mock_user_id.return_value = logged_in_user - entry = MockConfigEntry( - domain=DOMAIN, - data={}, - unique_id=USER_UUID_1, - ) - - entry.add_to_hass(hass) - - result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - result = await hass.config_entries.flow.async_configure( - flow_id=result["flow_id"], - user_input={}, - ) - - await handle_oauth(hass, hass_client_no_auth, aioclient_mock, result) - - assert result["type"] is FlowResultType.EXTERNAL_STEP - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == expected_reason - assert entry.unique_id == USER_UUID_1 - - -async def handle_oauth( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - result: ConfigFlowResult, -) -> None: - """Handle the Oauth2 part of the flow.""" - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - - assert result["url"] == ( - f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" - "&redirect_uri=https://example.com/auth/external/callback" - f"&state={state}" - "&scope=openid+offline_access" - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "refresh_token": MOCK_REFRESH_TOKEN, - "access_token": MOCK_ACCESS_TOKEN, - "type": "Bearer", - "expires_in": 60, - }, - ) diff --git a/tests/components/weheat/test_sensor.py b/tests/components/weheat/test_sensor.py deleted file mode 100644 index d9055addc67..00000000000 --- a/tests/components/weheat/test_sensor.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Tests for the weheat sensor platform.""" - -from unittest.mock import AsyncMock, patch - -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy import SnapshotAssertion -from weheat.abstractions.discovery import HeatPumpDiscovery - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_weheat_discover: AsyncMock, - mock_weheat_heat_pump: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.weheat.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await hass.async_block_till_done() - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize(("has_dhw", "nr_of_entities"), [(False, 12), (True, 14)]) -async def test_create_entities( - hass: HomeAssistant, - mock_weheat_discover: AsyncMock, - mock_weheat_heat_pump: AsyncMock, - mock_heat_pump_info: HeatPumpDiscovery.HeatPumpInfo, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - has_dhw: bool, - nr_of_entities: int, -) -> None: - """Test creating entities.""" - mock_heat_pump_info.has_dhw = has_dhw - mock_weheat_discover.return_value = [mock_heat_pump_info] - - with patch("homeassistant.components.weheat.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await hass.async_block_till_done() - assert len(hass.states.async_all()) == nr_of_entities diff --git a/tests/components/wemo/conftest.py b/tests/components/wemo/conftest.py index fee981484ef..1316c37b62b 100644 --- a/tests/components/wemo/conftest.py +++ b/tests/components/wemo/conftest.py @@ -1,15 +1,13 @@ """Fixtures for pywemo.""" -from collections.abc import Generator import contextlib -from unittest.mock import MagicMock, create_autospec, patch +from unittest.mock import create_autospec, patch import pytest import pywemo from homeassistant.components.wemo import CONF_DISCOVERY, CONF_STATIC from homeassistant.components.wemo.const import DOMAIN -from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -24,13 +22,13 @@ MOCK_INSIGHT_STATE_THRESHOLD_POWER = 8.0 @pytest.fixture(name="pywemo_model") -def pywemo_model_fixture() -> str: +def pywemo_model_fixture(): """Fixture containing a pywemo class name used by pywemo_device_fixture.""" return "LightSwitch" @pytest.fixture(name="pywemo_registry", autouse=True) -def async_pywemo_registry_fixture() -> Generator[MagicMock]: +async def async_pywemo_registry_fixture(): """Fixture for SubscriptionRegistry instances.""" registry = create_autospec(pywemo.SubscriptionRegistry, instance=True) @@ -54,9 +52,7 @@ def pywemo_discovery_responder_fixture(): @contextlib.contextmanager -def create_pywemo_device( - pywemo_registry: MagicMock, pywemo_model: str -) -> pywemo.WeMoDevice: +def create_pywemo_device(pywemo_registry, pywemo_model): """Create a WeMoDevice instance.""" cls = getattr(pywemo, pywemo_model) device = create_autospec(cls, instance=True) @@ -65,7 +61,6 @@ def create_pywemo_device( device.name = MOCK_NAME device.serial_number = MOCK_SERIAL_NUMBER device.model_name = pywemo_model.replace("LongPress", "") - device.model = device.model_name device.udn = f"uuid:{device.model_name}-1_0-{device.serial_number}" device.firmware_version = MOCK_FIRMWARE_VERSION device.get_state.return_value = 0 # Default to Off @@ -95,18 +90,14 @@ def create_pywemo_device( @pytest.fixture(name="pywemo_device") -def pywemo_device_fixture( - pywemo_registry: MagicMock, pywemo_model: str -) -> Generator[pywemo.WeMoDevice]: +def pywemo_device_fixture(pywemo_registry, pywemo_model): """Fixture for WeMoDevice instances.""" with create_pywemo_device(pywemo_registry, pywemo_model) as pywemo_device: yield pywemo_device @pytest.fixture(name="pywemo_dli_device") -def pywemo_dli_device_fixture( - pywemo_registry: MagicMock, pywemo_model: str -) -> Generator[pywemo.WeMoDevice]: +def pywemo_dli_device_fixture(pywemo_registry, pywemo_model): """Fixture for Digital Loggers emulated instances.""" with create_pywemo_device(pywemo_registry, pywemo_model) as pywemo_dli_device: pywemo_dli_device.model_name = "DLI emulated Belkin Socket" @@ -115,14 +106,12 @@ def pywemo_dli_device_fixture( @pytest.fixture(name="wemo_entity_suffix") -def wemo_entity_suffix_fixture() -> str: +def wemo_entity_suffix_fixture(): """Fixture to select a specific entity for wemo_entity.""" return "" -async def async_create_wemo_entity( - hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity_suffix: str -) -> er.RegistryEntry | None: +async def async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix): """Create a hass entity for a wemo device.""" assert await async_setup_component( hass, @@ -145,16 +134,12 @@ async def async_create_wemo_entity( @pytest.fixture(name="wemo_entity") -async def async_wemo_entity_fixture( - hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity_suffix: str -) -> er.RegistryEntry | None: +async def async_wemo_entity_fixture(hass, pywemo_device, wemo_entity_suffix): """Fixture for a Wemo entity in hass.""" return await async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix) @pytest.fixture(name="wemo_dli_entity") -async def async_wemo_dli_entity_fixture( - hass: HomeAssistant, pywemo_dli_device: pywemo.WeMoDevice, wemo_entity_suffix: str -) -> er.RegistryEntry | None: +async def async_wemo_dli_entity_fixture(hass, pywemo_dli_device, wemo_entity_suffix): """Fixture for a Wemo entity in hass.""" return await async_create_wemo_entity(hass, pywemo_dli_device, wemo_entity_suffix) diff --git a/tests/components/wemo/entity_test_helpers.py b/tests/components/wemo/entity_test_helpers.py index f57dffad6f9..6700b00ec38 100644 --- a/tests/components/wemo/entity_test_helpers.py +++ b/tests/components/wemo/entity_test_helpers.py @@ -4,11 +4,7 @@ This is not a test module. These test methods are used by the platform test modu """ import asyncio -from collections.abc import Callable, Coroutine import threading -from typing import Any - -import pywemo from homeassistant.components.homeassistant import DOMAIN as HA_DOMAIN from homeassistant.components.wemo.coordinator import async_get_coordinator @@ -21,7 +17,6 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -45,12 +40,7 @@ def _perform_async_update(coordinator): return async_callback -async def _async_multiple_call_helper( - hass: HomeAssistant, - pywemo_device: pywemo.WeMoDevice, - call1: Callable[[], Coroutine[Any, Any, None]], - call2: Callable[[], Coroutine[Any, Any, None]], -) -> None: +async def _async_multiple_call_helper(hass, pywemo_device, call1, call2): """Create two calls (call1 & call2) in parallel; verify only one polls the device. There should only be one poll on the device at a time. Any parallel updates @@ -97,7 +87,7 @@ async def _async_multiple_call_helper( async def test_async_update_locked_callback_and_update( - hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity: er.RegistryEntry + hass: HomeAssistant, pywemo_device, wemo_entity ) -> None: """Test that a callback and a state update request can't both happen at the same time. @@ -112,7 +102,7 @@ async def test_async_update_locked_callback_and_update( async def test_async_update_locked_multiple_updates( - hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity: er.RegistryEntry + hass: HomeAssistant, pywemo_device, wemo_entity ) -> None: """Test that two hass async_update state updates do not proceed at the same time.""" coordinator = async_get_coordinator(hass, wemo_entity.device_id) @@ -122,7 +112,7 @@ async def test_async_update_locked_multiple_updates( async def test_async_update_locked_multiple_callbacks( - hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity: er.RegistryEntry + hass: HomeAssistant, pywemo_device, wemo_entity ) -> None: """Test that two device callback state updates do not proceed at the same time.""" coordinator = async_get_coordinator(hass, wemo_entity.device_id) @@ -168,33 +158,24 @@ class EntityTestHelpers: """Common state update helpers.""" async def test_async_update_locked_multiple_updates( - self, - hass: HomeAssistant, - pywemo_device: pywemo.WeMoDevice, - wemo_entity: er.RegistryEntry, - ) -> None: + self, hass, pywemo_device, wemo_entity + ): """Test that two hass async_update state updates do not proceed at the same time.""" await test_async_update_locked_multiple_updates( hass, pywemo_device, wemo_entity ) async def test_async_update_locked_multiple_callbacks( - self, - hass: HomeAssistant, - pywemo_device: pywemo.WeMoDevice, - wemo_entity: er.RegistryEntry, - ) -> None: + self, hass, pywemo_device, wemo_entity + ): """Test that two device callback state updates do not proceed at the same time.""" await test_async_update_locked_multiple_callbacks( hass, pywemo_device, wemo_entity ) async def test_async_update_locked_callback_and_update( - self, - hass: HomeAssistant, - pywemo_device: pywemo.WeMoDevice, - wemo_entity: er.RegistryEntry, - ) -> None: + self, hass, pywemo_device, wemo_entity + ): """Test that a callback and a state update request can't both happen at the same time. When a state update is received via a callback from the device at the same time diff --git a/tests/components/wemo/test_binary_sensor.py b/tests/components/wemo/test_binary_sensor.py index 576283577c2..99a5df47e25 100644 --- a/tests/components/wemo/test_binary_sensor.py +++ b/tests/components/wemo/test_binary_sensor.py @@ -1,7 +1,6 @@ """Tests for the Wemo binary_sensor entity.""" import pytest -import pywemo from pywemo import StandbyState from homeassistant.components.homeassistant import ( @@ -13,8 +12,6 @@ from homeassistant.components.wemo.binary_sensor import ( MakerBinarySensor, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from .entity_test_helpers import EntityTestHelpers @@ -29,12 +26,8 @@ class TestMotion(EntityTestHelpers): return "Motion" async def test_binary_sensor_registry_state_callback( - self, - hass: HomeAssistant, - pywemo_registry: pywemo.SubscriptionRegistry, - pywemo_device: pywemo.WeMoDevice, - wemo_entity: er.RegistryEntry, - ) -> None: + self, hass, pywemo_registry, pywemo_device, wemo_entity + ): """Verify that the binary_sensor receives state updates from the registry.""" # On state. pywemo_device.get_state.return_value = 1 @@ -49,12 +42,8 @@ class TestMotion(EntityTestHelpers): assert hass.states.get(wemo_entity.entity_id).state == STATE_OFF async def test_binary_sensor_update_entity( - self, - hass: HomeAssistant, - pywemo_registry: pywemo.SubscriptionRegistry, - pywemo_device: pywemo.WeMoDevice, - wemo_entity: er.RegistryEntry, - ) -> None: + self, hass, pywemo_registry, pywemo_device, wemo_entity + ): """Verify that the binary_sensor performs state updates.""" await async_setup_component(hass, HA_DOMAIN, {}) @@ -93,12 +82,8 @@ class TestMaker(EntityTestHelpers): return MakerBinarySensor._name_suffix.lower() async def test_registry_state_callback( - self, - hass: HomeAssistant, - pywemo_registry: pywemo.SubscriptionRegistry, - pywemo_device: pywemo.WeMoDevice, - wemo_entity: er.RegistryEntry, - ) -> None: + self, hass, pywemo_registry, pywemo_device, wemo_entity + ): """Verify that the binary_sensor receives state updates from the registry.""" # On state. pywemo_device.sensor_state = 0 @@ -127,12 +112,8 @@ class TestInsight(EntityTestHelpers): return InsightBinarySensor._name_suffix.lower() async def test_registry_state_callback( - self, - hass: HomeAssistant, - pywemo_registry: pywemo.SubscriptionRegistry, - pywemo_device: pywemo.WeMoDevice, - wemo_entity: er.RegistryEntry, - ) -> None: + self, hass, pywemo_registry, pywemo_device, wemo_entity + ): """Verify that the binary_sensor receives state updates from the registry.""" # On state. pywemo_device.get_state.return_value = 1 diff --git a/tests/components/wemo/test_coordinator.py b/tests/components/wemo/test_coordinator.py index 17061aea2f6..198b132bbd0 100644 --- a/tests/components/wemo/test_coordinator.py +++ b/tests/components/wemo/test_coordinator.py @@ -3,10 +3,9 @@ import asyncio from dataclasses import asdict from datetime import timedelta -from unittest.mock import _Call, call, patch +from unittest.mock import call, patch import pytest -import pywemo from pywemo.exceptions import ActionException, PyWeMoException from pywemo.subscribe import EVENT_TYPE_LONG_PRESS @@ -15,7 +14,7 @@ from homeassistant.components.wemo import CONF_DISCOVERY, CONF_STATIC from homeassistant.components.wemo.const import DOMAIN, WEMO_SUBSCRIPTION_EVENT from homeassistant.components.wemo.coordinator import Options, async_get_coordinator from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -178,7 +177,6 @@ async def test_device_info( } assert device_entries[0].manufacturer == "Belkin" assert device_entries[0].model == "LightSwitch" - assert device_entries[0].model_id == "LightSwitch" assert device_entries[0].sw_version == MOCK_FIRMWARE_VERSION @@ -250,14 +248,14 @@ class TestInsight: ) async def test_should_poll( self, - hass: HomeAssistant, - subscribed: bool, - state: int, - expected_calls: list[_Call], - wemo_entity: er.RegistryEntry, - pywemo_device: pywemo.WeMoDevice, - pywemo_registry: pywemo.SubscriptionRegistry, - ) -> None: + hass, + subscribed, + state, + expected_calls, + wemo_entity, + pywemo_device, + pywemo_registry, + ): """Validate the should_poll returns the correct value.""" pywemo_registry.is_subscribed.return_value = subscribed pywemo_device.get_state.reset_mock() diff --git a/tests/components/wemo/test_device_trigger.py b/tests/components/wemo/test_device_trigger.py index 477f5ee3960..47b704dae5d 100644 --- a/tests/components/wemo/test_device_trigger.py +++ b/tests/components/wemo/test_device_trigger.py @@ -16,7 +16,6 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from tests.common import async_get_device_automations, async_mock_service @@ -30,9 +29,7 @@ def pywemo_model(): return "LightSwitchLongPress" -async def setup_automation( - hass: HomeAssistant, device_id: str, trigger_type: str -) -> None: +async def setup_automation(hass, device_id, trigger_type): """Set up an automation trigger for testing triggering.""" return await async_setup_component( hass, @@ -99,9 +96,7 @@ async def test_get_triggers(hass: HomeAssistant, wemo_entity) -> None: assert triggers == unordered(expected_triggers) -async def test_fires_on_long_press( - hass: HomeAssistant, wemo_entity: er.RegistryEntry -) -> None: +async def test_fires_on_long_press(hass: HomeAssistant, wemo_entity) -> None: """Test wemo long press trigger firing.""" assert await setup_automation(hass, wemo_entity.device_id, EVENT_TYPE_LONG_PRESS) calls = async_mock_service(hass, "test", "automation") diff --git a/tests/components/wemo/test_init.py b/tests/components/wemo/test_init.py index 4a38775d331..48d8f8eac03 100644 --- a/tests/components/wemo/test_init.py +++ b/tests/components/wemo/test_init.py @@ -201,7 +201,6 @@ async def test_discovery( device.name = f"{MOCK_NAME}_{counter}" device.serial_number = f"{MOCK_SERIAL_NUMBER}_{counter}" device.model_name = "Motion" - device.model = "Motion" device.udn = f"uuid:{device.model_name}-1_0-{device.serial_number}" device.firmware_version = MOCK_FIRMWARE_VERSION device.get_state.return_value = 0 # Default to Off diff --git a/tests/components/wemo/test_sensor.py b/tests/components/wemo/test_sensor.py index 2259bfbbf18..7e0c8fa72f0 100644 --- a/tests/components/wemo/test_sensor.py +++ b/tests/components/wemo/test_sensor.py @@ -2,9 +2,6 @@ import pytest -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - from .conftest import MOCK_INSIGHT_CURRENT_WATTS, MOCK_INSIGHT_TODAY_KWH from .entity_test_helpers import EntityTestHelpers @@ -27,7 +24,7 @@ class InsightTestTemplate(EntityTestHelpers): """Select the appropriate entity for the test.""" return cls.ENTITY_ID_SUFFIX - def test_state(self, hass: HomeAssistant, wemo_entity: er.RegistryEntry) -> None: + def test_state(self, hass, wemo_entity): """Test the sensor state.""" assert hass.states.get(wemo_entity.entity_id).state == self.EXPECTED_STATE_VALUE diff --git a/tests/components/whirlpool/conftest.py b/tests/components/whirlpool/conftest.py index 50620b20b8b..a5926f55a94 100644 --- a/tests/components/whirlpool/conftest.py +++ b/tests/components/whirlpool/conftest.py @@ -145,8 +145,6 @@ def side_effect_function(*args, **kwargs): if args[0] == "WashCavity_OpStatusBulkDispense1Level": return "3" - return None - def get_sensor_mock(said): """Get a mock of a sensor.""" diff --git a/tests/components/whirlpool/snapshots/test_diagnostics.ambr b/tests/components/whirlpool/snapshots/test_diagnostics.ambr index c60ce17b952..5a0beb112e6 100644 --- a/tests/components/whirlpool/snapshots/test_diagnostics.ambr +++ b/tests/components/whirlpool/snapshots/test_diagnostics.ambr @@ -29,8 +29,6 @@ 'username': '**REDACTED**', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'whirlpool', 'minor_version': 1, 'options': dict({ diff --git a/tests/components/whirlpool/test_climate.py b/tests/components/whirlpool/test_climate.py index cdae28f4432..18016bd9c67 100644 --- a/tests/components/whirlpool/test_climate.py +++ b/tests/components/whirlpool/test_climate.py @@ -264,10 +264,10 @@ async def test_service_calls( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 16}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, blocking=True, ) - mock_instance.set_temp.assert_called_once_with(16) + mock_instance.set_temp.assert_called_once_with(15) mock_instance.set_mode.reset_mock() await hass.services.async_call( diff --git a/tests/components/whirlpool/test_config_flow.py b/tests/components/whirlpool/test_config_flow.py index 1240e1303e1..e3896a436d4 100644 --- a/tests/components/whirlpool/test_config_flow.py +++ b/tests/components/whirlpool/test_config_flow.py @@ -235,7 +235,15 @@ async def test_reauth_flow(hass: HomeAssistant, region, brand) -> None: ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, + ) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -286,7 +294,21 @@ async def test_reauth_flow_auth_error(hass: HomeAssistant, region, brand) -> Non ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "new-password", + "region": region[0], + "brand": brand[0], + }, + ) + assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -323,7 +345,15 @@ async def test_reauth_flow_connnection_error( ) mock_entry.add_to_hass(hass) - result = await mock_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, + ) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/whirlpool/test_diagnostics.py b/tests/components/whirlpool/test_diagnostics.py index 2a0b2e6fd18..6cfc1b76e38 100644 --- a/tests/components/whirlpool/test_diagnostics.py +++ b/tests/components/whirlpool/test_diagnostics.py @@ -29,4 +29,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, mock_entry) - assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) + assert result == snapshot(exclude=props("entry_id")) diff --git a/tests/components/whirlpool/test_sensor.py b/tests/components/whirlpool/test_sensor.py index 548025e29bd..6af88c8a9f3 100644 --- a/tests/components/whirlpool/test_sensor.py +++ b/tests/components/whirlpool/test_sensor.py @@ -42,8 +42,6 @@ def side_effect_function_open_door(*args, **kwargs): if args[0] == "WashCavity_OpStatusBulkDispense1Level": return "3" - return None - async def test_dryer_sensor_values( hass: HomeAssistant, diff --git a/tests/components/whois/conftest.py b/tests/components/whois/conftest.py index 4bb18581c1a..5fe420abb92 100644 --- a/tests/components/whois/conftest.py +++ b/tests/components/whois/conftest.py @@ -2,12 +2,11 @@ from __future__ import annotations -from collections.abc import Generator from datetime import datetime -from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant.components.whois.const import DOMAIN from homeassistant.const import CONF_DOMAIN @@ -75,7 +74,7 @@ def mock_whois_missing_some_attrs() -> Generator[Mock]: class LimitedWhoisMock: """A limited mock of whois_query.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args, **kwargs): """Mock only attributes the library always sets being available.""" self.creation_date = datetime(2019, 1, 1, 0, 0, 0) self.dnssec = True diff --git a/tests/components/whois/snapshots/test_config_flow.ambr b/tests/components/whois/snapshots/test_config_flow.ambr index 937502d4d6c..08f3861dcd2 100644 --- a/tests/components/whois/snapshots/test_config_flow.ambr +++ b/tests/components/whois/snapshots/test_config_flow.ambr @@ -20,8 +20,6 @@ 'domain': 'example.com', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'whois', 'entry_id': , 'minor_version': 1, @@ -60,8 +58,6 @@ 'domain': 'example.com', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'whois', 'entry_id': , 'minor_version': 1, @@ -100,8 +96,6 @@ 'domain': 'example.com', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'whois', 'entry_id': , 'minor_version': 1, @@ -140,8 +134,6 @@ 'domain': 'example.com', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'whois', 'entry_id': , 'minor_version': 1, @@ -180,8 +172,6 @@ 'domain': 'example.com', }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'whois', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/whois/snapshots/test_sensor.ambr b/tests/components/whois/snapshots/test_sensor.ambr index 4310bc77ebf..9bc125f204b 100644 --- a/tests/components/whois/snapshots/test_sensor.ambr +++ b/tests/components/whois/snapshots/test_sensor.ambr @@ -67,7 +67,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -146,7 +145,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -229,7 +227,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -308,7 +305,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -387,7 +383,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -465,7 +460,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -543,7 +537,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -621,7 +614,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -699,7 +691,6 @@ }), 'manufacturer': None, 'model': None, - 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/wiffi/conftest.py b/tests/components/wiffi/conftest.py index 2383906291f..5f16d676e81 100644 --- a/tests/components/wiffi/conftest.py +++ b/tests/components/wiffi/conftest.py @@ -1,9 +1,9 @@ """Configuration for Wiffi tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/wilight/test_cover.py b/tests/components/wilight/test_cover.py index a844a61fc1a..5b89293032f 100644 --- a/tests/components/wilight/test_cover.py +++ b/tests/components/wilight/test_cover.py @@ -9,7 +9,6 @@ from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, - CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -17,6 +16,10 @@ from homeassistant.const import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -67,7 +70,7 @@ async def test_loading_cover( # First segment of the strip state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED entry = entity_registry.async_get("cover.wl000000000099_1") assert entry @@ -91,7 +94,7 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING # Close await hass.services.async_call( @@ -104,7 +107,7 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING # Set position await hass.services.async_call( @@ -117,7 +120,7 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 50 # Stop @@ -131,4 +134,4 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN diff --git a/tests/components/withings/__init__.py b/tests/components/withings/__init__.py index 127bccbeb00..4b97fc48834 100644 --- a/tests/components/withings/__init__.py +++ b/tests/components/withings/__init__.py @@ -6,12 +6,12 @@ from typing import Any from urllib.parse import urlparse from aiohttp.test_utils import TestClient -from aiowithings import Activity, Device, Goals, MeasurementGroup, SleepSummary, Workout +from aiowithings import Activity, Goals, MeasurementGroup, SleepSummary, Workout from freezegun.api import FrozenDateTimeFactory from homeassistant.components.webhook import async_generate_url +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from tests.common import ( MockConfigEntry, @@ -109,11 +109,3 @@ def load_sleep_fixture( """Return sleep summaries from fixture.""" sleep_json = load_json_array_fixture("withings/sleep_summaries.json") return [SleepSummary.from_api(sleep_summary) for sleep_summary in sleep_json] - - -def load_device_fixture( - fixture: str = "withings/devices.json", -) -> list[Device]: - """Return sleep summaries from fixture.""" - devices_json = load_json_array_fixture(fixture) - return [Device.from_api(device) for device in devices_json] diff --git a/tests/components/withings/conftest.py b/tests/components/withings/conftest.py index 5b73240908a..dfb0658b64a 100644 --- a/tests/components/withings/conftest.py +++ b/tests/components/withings/conftest.py @@ -133,29 +133,6 @@ def polling_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: ) -@pytest.fixture -def second_polling_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: - """Create Withings entry in Home Assistant.""" - return MockConfigEntry( - domain=DOMAIN, - title="Not Henk", - unique_id="54321", - data={ - "auth_implementation": DOMAIN, - "token": { - "status": 0, - "userid": "54321", - "access_token": "mock-access-token", - "refresh_token": "mock-refresh-token", - "expires_at": expires_at, - "scope": ",".join(scopes), - }, - "profile": TITLE, - "webhook_id": WEBHOOK_ID, - }, - ) - - @pytest.fixture(name="withings") def mock_withings(): """Mock withings.""" diff --git a/tests/components/withings/snapshots/test_diagnostics.ambr b/tests/components/withings/snapshots/test_diagnostics.ambr index f7c704a2c49..df2a3b95388 100644 --- a/tests/components/withings/snapshots/test_diagnostics.ambr +++ b/tests/components/withings/snapshots/test_diagnostics.ambr @@ -15,7 +15,7 @@ 'extracellular_water', 'intracellular_water', 'visceral_fat', - 'basal_metabolic_rate', + 'unknown', 'fat_ratio', 'height', 'temperature', @@ -78,7 +78,7 @@ 'extracellular_water', 'intracellular_water', 'visceral_fat', - 'basal_metabolic_rate', + 'unknown', 'fat_ratio', 'height', 'temperature', @@ -141,7 +141,7 @@ 'extracellular_water', 'intracellular_water', 'visceral_fat', - 'basal_metabolic_rate', + 'unknown', 'fat_ratio', 'height', 'temperature', diff --git a/tests/components/withings/snapshots/test_init.ambr b/tests/components/withings/snapshots/test_init.ambr deleted file mode 100644 index be221cad313..00000000000 --- a/tests/components/withings/snapshots/test_init.ambr +++ /dev/null @@ -1,65 +0,0 @@ -# serializer version: 1 -# name: test_devices[12345] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'withings', - '12345', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Withings', - 'model': None, - 'model_id': None, - 'name': 'henk', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- -# name: test_devices[f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'withings', - 'f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Withings', - 'model': 'Body+', - 'model_id': None, - 'name': 'Body+', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': , - }) -# --- diff --git a/tests/components/withings/snapshots/test_sensor.ambr b/tests/components/withings/snapshots/test_sensor.ambr index cfecfb1e28e..70a86c79038 100644 --- a/tests/components/withings/snapshots/test_sensor.ambr +++ b/tests/components/withings/snapshots/test_sensor.ambr @@ -1,62 +1,4 @@ # serializer version: 1 -# name: test_all_entities[sensor.body_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'low', - 'medium', - 'high', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.body_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'withings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery', - 'unique_id': 'f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d_battery', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.body_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Body+ Battery', - 'options': list([ - 'low', - 'medium', - 'high', - ]), - }), - 'context': , - 'entity_id': 'sensor.body_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'high', - }) -# --- # name: test_all_entities[sensor.henk_active_calories_burnt_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/withings/test_config_flow.py b/tests/components/withings/test_config_flow.py index 39c8340a78e..20bef90a31e 100644 --- a/tests/components/withings/test_config_flow.py +++ b/tests/components/withings/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.withings.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -145,7 +145,14 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, polling_config_entry) - result = await polling_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": polling_config_entry.entry_id, + }, + data=polling_config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -200,7 +207,14 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, polling_config_entry) - result = await polling_config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": polling_config_entry.entry_id, + }, + data=polling_config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/withings/test_init.py b/tests/components/withings/test_init.py index e07e1f90cb4..0375d1869d9 100644 --- a/tests/components/withings/test_init.py +++ b/tests/components/withings/test_init.py @@ -14,7 +14,6 @@ from aiowithings import ( ) from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion from homeassistant import config_entries from homeassistant.components import cloud @@ -23,7 +22,6 @@ from homeassistant.components.webhook import async_generate_url from homeassistant.components.withings.const import DOMAIN from homeassistant.const import CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr from homeassistant.util import dt as dt_util from . import call_webhook, prepare_webhook_setup, setup_integration @@ -571,21 +569,3 @@ async def test_webhook_post( resp.close() assert data["code"] == expected_code - - -async def test_devices( - hass: HomeAssistant, - withings: AsyncMock, - webhook_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, - device_registry: dr.DeviceRegistry, -) -> None: - """Test devices.""" - await setup_integration(hass, webhook_config_entry) - - await hass.async_block_till_done() - - for device_id in ("12345", "f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d"): - device = device_registry.async_get_device({(DOMAIN, device_id)}) - assert device is not None - assert device == snapshot(name=device_id) diff --git a/tests/components/withings/test_sensor.py b/tests/components/withings/test_sensor.py index 20927c197a4..8966006e47f 100644 --- a/tests/components/withings/test_sensor.py +++ b/tests/components/withings/test_sensor.py @@ -8,14 +8,12 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.withings import DOMAIN from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from . import ( load_activity_fixture, - load_device_fixture, load_goals_fixture, load_measurements_fixture, load_sleep_fixture, @@ -353,83 +351,3 @@ async def test_warning_if_no_entities_created( await setup_integration(hass, polling_config_entry, False) assert "No data found for Withings entry" in caplog.text - - -async def test_device_sensors_created_when_device_data_received( - hass: HomeAssistant, - withings: AsyncMock, - polling_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - device_registry: dr.DeviceRegistry, -) -> None: - """Test device sensors will be added if we receive device data.""" - withings.get_devices.return_value = [] - await setup_integration(hass, polling_config_entry, False) - - assert hass.states.get("sensor.body_battery") is None - - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("sensor.body_battery") is None - - withings.get_devices.return_value = load_device_fixture() - - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("sensor.body_battery") - assert device_registry.async_get_device( - {(DOMAIN, "f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d")} - ) - - withings.get_devices.return_value = [] - - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("sensor.body_battery") is None - assert not device_registry.async_get_device( - {(DOMAIN, "f998be4b9ccc9e136fd8cd8e8e344c31ec3b271d")} - ) - - -async def test_device_two_config_entries( - hass: HomeAssistant, - withings: AsyncMock, - polling_config_entry: MockConfigEntry, - second_polling_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - device_registry: dr.DeviceRegistry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test device sensors will be added for one config entry only at a time.""" - await setup_integration(hass, polling_config_entry, False) - - assert hass.states.get("sensor.body_battery") is not None - - second_polling_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(second_polling_config_entry.entry_id) - - assert hass.states.get("sensor.not_henk_temperature") is not None - - assert "Platform withings does not generate unique IDs" not in caplog.text - - await hass.config_entries.async_unload(polling_config_entry.entry_id) - await hass.async_block_till_done() - - assert hass.states.get("sensor.body_battery").state == STATE_UNAVAILABLE - - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert hass.states.get("sensor.body_battery").state != STATE_UNAVAILABLE - - await hass.config_entries.async_setup(polling_config_entry.entry_id) - await hass.async_block_till_done() - - assert "Platform withings does not generate unique IDs" not in caplog.text diff --git a/tests/components/wiz/__init__.py b/tests/components/wiz/__init__.py index d84074e37d3..e80a1ed8249 100644 --- a/tests/components/wiz/__init__.py +++ b/tests/components/wiz/__init__.py @@ -1,10 +1,9 @@ """Tests for the WiZ Platform integration.""" -from collections.abc import Callable, Generator -from contextlib import _GeneratorContextManager, contextmanager +from collections.abc import Callable +from contextlib import contextmanager from copy import deepcopy import json -from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from pywizlight import SCENES, BulbType, PilotParser, wizlight @@ -195,11 +194,7 @@ async def setup_integration(hass: HomeAssistant) -> MockConfigEntry: return entry -def _mocked_wizlight( - device: dict[str, Any] | None, - extended_white_range: list[int] | None, - bulb_type: BulbType | None, -) -> wizlight: +def _mocked_wizlight(device, extended_white_range, bulb_type) -> wizlight: bulb = MagicMock(auto_spec=wizlight, name="Mocked wizlight") async def _save_setup_callback(callback: Callable) -> None: @@ -233,13 +228,9 @@ def _mocked_wizlight( return bulb -def _patch_wizlight( - device: dict[str, Any] | None = None, - extended_white_range: list[int] | None = None, - bulb_type: BulbType | None = None, -) -> _GeneratorContextManager: +def _patch_wizlight(device=None, extended_white_range=None, bulb_type=None): @contextmanager - def _patcher() -> Generator[None]: + def _patcher(): bulb = device or _mocked_wizlight(device, extended_white_range, bulb_type) with ( patch("homeassistant.components.wiz.wizlight", return_value=bulb), @@ -253,9 +244,9 @@ def _patch_wizlight( return _patcher() -def _patch_discovery() -> _GeneratorContextManager[None]: +def _patch_discovery(): @contextmanager - def _patcher() -> Generator[None]: + def _patcher(): with patch( "homeassistant.components.wiz.discovery.find_wizlights", return_value=[DiscoveredBulb(FAKE_IP, FAKE_MAC)], @@ -266,12 +257,8 @@ def _patch_discovery() -> _GeneratorContextManager[None]: async def async_setup_integration( - hass: HomeAssistant, - wizlight: wizlight | None = None, - device: dict[str, Any] | None = None, - extended_white_range: list[int] | None = None, - bulb_type: BulbType | None = None, -) -> tuple[wizlight, MockConfigEntry]: + hass, wizlight=None, device=None, extended_white_range=None, bulb_type=None +): """Set up the integration with a mock device.""" entry = MockConfigEntry( domain=DOMAIN, @@ -286,9 +273,7 @@ async def async_setup_integration( return bulb, entry -async def async_push_update( - hass: HomeAssistant, device: wizlight, params: dict[str, Any] -) -> None: +async def async_push_update(hass, device, params): """Push an update to the device.""" device.state = PilotParser(params) device.status = params.get("state") diff --git a/tests/components/wled/conftest.py b/tests/components/wled/conftest.py index 301729843a2..0d839fc8666 100644 --- a/tests/components/wled/conftest.py +++ b/tests/components/wled/conftest.py @@ -1,11 +1,11 @@ """Fixtures for WLED integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from freezegun.api import FrozenDateTimeFactory import pytest -from wled import Device as WLEDDevice, Releases +from typing_extensions import Generator +from wled import Device as WLEDDevice from homeassistant.components.wled.const import DOMAIN from homeassistant.const import CONF_HOST @@ -51,24 +51,7 @@ def device_fixture() -> str: @pytest.fixture -def mock_wled_releases() -> Generator[MagicMock]: - """Return a mocked WLEDReleases client.""" - with patch( - "homeassistant.components.wled.coordinator.WLEDReleases", autospec=True - ) as wled_releases_mock: - wled_releases = wled_releases_mock.return_value - wled_releases.releases.return_value = Releases( - beta="1.0.0b5", - stable="0.99.0", - ) - - yield wled_releases - - -@pytest.fixture -def mock_wled( - device_fixture: str, mock_wled_releases: MagicMock -) -> Generator[MagicMock]: +def mock_wled(device_fixture: str) -> Generator[MagicMock]: """Return a mocked WLED client.""" with ( patch( @@ -77,12 +60,11 @@ def mock_wled( patch("homeassistant.components.wled.config_flow.WLED", new=wled_mock), ): wled = wled_mock.return_value - wled.update.return_value = WLEDDevice.from_dict( + wled.update.return_value = WLEDDevice( load_json_object_fixture(f"{device_fixture}.json", DOMAIN) ) wled.connected = False wled.host = "127.0.0.1" - yield wled diff --git a/tests/components/wled/fixtures/cct.json b/tests/components/wled/fixtures/cct.json deleted file mode 100644 index da36f8a5f69..00000000000 --- a/tests/components/wled/fixtures/cct.json +++ /dev/null @@ -1,383 +0,0 @@ -{ - "state": { - "on": true, - "bri": 255, - "transition": 7, - "ps": 2, - "pl": -1, - "nl": { - "on": false, - "dur": 60, - "mode": 1, - "tbri": 0, - "rem": -1 - }, - "udpn": { - "send": false, - "recv": true, - "sgrp": 1, - "rgrp": 1 - }, - "lor": 0, - "mainseg": 0, - "seg": [ - { - "id": 0, - "start": 0, - "stop": 178, - "len": 178, - "grp": 1, - "spc": 0, - "of": 0, - "on": true, - "frz": false, - "bri": 255, - "cct": 53, - "set": 0, - "col": [ - [0, 0, 0, 255], - [0, 0, 0, 0], - [0, 0, 0, 0] - ], - "fx": 0, - "sx": 128, - "ix": 128, - "pal": 0, - "c1": 128, - "c2": 128, - "c3": 16, - "sel": true, - "rev": false, - "mi": false, - "o1": false, - "o2": false, - "o3": false, - "si": 0, - "m12": 0 - } - ] - }, - "info": { - "ver": "0.15.0-b3", - "vid": 2405180, - "cn": "Kōsen", - "release": "ESP32", - "leds": { - "count": 178, - "pwr": 0, - "fps": 0, - "maxpwr": 0, - "maxseg": 32, - "bootps": 1, - "seglc": [7], - "lc": 7, - "rgbw": true, - "wv": 2, - "cct": 4 - }, - "str": false, - "name": "WLED CCT light", - "udpport": 21324, - "simplifiedui": false, - "live": false, - "liveseg": -1, - "lm": "", - "lip": "", - "ws": 1, - "fxcount": 187, - "palcount": 75, - "cpalcount": 4, - "maps": [ - { - "id": 0 - } - ], - "wifi": { - "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -44, - "signal": 100, - "channel": 11 - }, - "fs": { - "u": 20, - "t": 983, - "pmt": 1721752272 - }, - "ndc": 1, - "arch": "esp32", - "core": "v3.3.6-16-gcc5440f6a2", - "clock": 240, - "flash": 4, - "lwip": 0, - "freeheap": 164804, - "uptime": 79769, - "time": "2024-7-24, 14:34:00", - "opt": 79, - "brand": "WLED", - "product": "FOSS", - "mac": "aabbccddeeff", - "ip": "127.0.0.1" - }, - "effects": [ - "Solid", - "Blink", - "Breathe", - "Wipe", - "Wipe Random", - "Random Colors", - "Sweep", - "Dynamic", - "Colorloop", - "Rainbow", - "Scan", - "Scan Dual", - "Fade", - "Theater", - "Theater Rainbow", - "Running", - "Saw", - "Twinkle", - "Dissolve", - "Dissolve Rnd", - "Sparkle", - "Sparkle Dark", - "Sparkle+", - "Strobe", - "Strobe Rainbow", - "Strobe Mega", - "Blink Rainbow", - "Android", - "Chase", - "Chase Random", - "Chase Rainbow", - "Chase Flash", - "Chase Flash Rnd", - "Rainbow Runner", - "Colorful", - "Traffic Light", - "Sweep Random", - "Chase 2", - "Aurora", - "Stream", - "Scanner", - "Lighthouse", - "Fireworks", - "Rain", - "Tetrix", - "Fire Flicker", - "Gradient", - "Loading", - "Rolling Balls", - "Fairy", - "Two Dots", - "Fairytwinkle", - "Running Dual", - "RSVD", - "Chase 3", - "Tri Wipe", - "Tri Fade", - "Lightning", - "ICU", - "Multi Comet", - "Scanner Dual", - "Stream 2", - "Oscillate", - "Pride 2015", - "Juggle", - "Palette", - "Fire 2012", - "Colorwaves", - "Bpm", - "Fill Noise", - "Noise 1", - "Noise 2", - "Noise 3", - "Noise 4", - "Colortwinkles", - "Lake", - "Meteor", - "Meteor Smooth", - "Railway", - "Ripple", - "Twinklefox", - "Twinklecat", - "Halloween Eyes", - "Solid Pattern", - "Solid Pattern Tri", - "Spots", - "Spots Fade", - "Glitter", - "Candle", - "Fireworks Starburst", - "Fireworks 1D", - "Bouncing Balls", - "Sinelon", - "Sinelon Dual", - "Sinelon Rainbow", - "Popcorn", - "Drip", - "Plasma", - "Percent", - "Ripple Rainbow", - "Heartbeat", - "Pacifica", - "Candle Multi", - "Solid Glitter", - "Sunrise", - "Phased", - "Twinkleup", - "Noise Pal", - "Sine", - "Phased Noise", - "Flow", - "Chunchun", - "Dancing Shadows", - "Washing Machine", - "Rotozoomer", - "Blends", - "TV Simulator", - "Dynamic Smooth", - "Spaceships", - "Crazy Bees", - "Ghost Rider", - "Blobs", - "Scrolling Text", - "Drift Rose", - "Distortion Waves", - "Soap", - "Octopus", - "Waving Cell", - "Pixels", - "Pixelwave", - "Juggles", - "Matripix", - "Gravimeter", - "Plasmoid", - "Puddles", - "Midnoise", - "Noisemeter", - "Freqwave", - "Freqmatrix", - "GEQ", - "Waterfall", - "Freqpixels", - "RSVD", - "Noisefire", - "Puddlepeak", - "Noisemove", - "Noise2D", - "Perlin Move", - "Ripple Peak", - "Firenoise", - "Squared Swirl", - "RSVD", - "DNA", - "Matrix", - "Metaballs", - "Freqmap", - "Gravcenter", - "Gravcentric", - "Gravfreq", - "DJ Light", - "Funky Plank", - "RSVD", - "Pulser", - "Blurz", - "Drift", - "Waverly", - "Sun Radiation", - "Colored Bursts", - "Julia", - "RSVD", - "RSVD", - "RSVD", - "Game Of Life", - "Tartan", - "Polar Lights", - "Swirl", - "Lissajous", - "Frizzles", - "Plasma Ball", - "Flow Stripe", - "Hiphotic", - "Sindots", - "DNA Spiral", - "Black Hole", - "Wavesins", - "Rocktaves", - "Akemi" - ], - "palettes": [ - "Default", - "* Random Cycle", - "* Color 1", - "* Colors 1&2", - "* Color Gradient", - "* Colors Only", - "Party", - "Cloud", - "Lava", - "Ocean", - "Forest", - "Rainbow", - "Rainbow Bands", - "Sunset", - "Rivendell", - "Breeze", - "Red & Blue", - "Yellowout", - "Analogous", - "Splash", - "Pastel", - "Sunset 2", - "Beach", - "Vintage", - "Departure", - "Landscape", - "Beech", - "Sherbet", - "Hult", - "Hult 64", - "Drywet", - "Jul", - "Grintage", - "Rewhi", - "Tertiary", - "Fire", - "Icefire", - "Cyane", - "Light Pink", - "Autumn", - "Magenta", - "Magred", - "Yelmag", - "Yelblu", - "Orange & Teal", - "Tiamat", - "April Night", - "Orangery", - "C9", - "Sakura", - "Aurora", - "Atlantica", - "C9 2", - "C9 New", - "Temperature", - "Aurora 2", - "Retro Clown", - "Candy", - "Toxy Reaf", - "Fairy Reaf", - "Semi Blue", - "Pink Candy", - "Red Reaf", - "Aqua Flash", - "Yelblu Hot", - "Lite Light", - "Red Flash", - "Blink Red", - "Red Shift", - "Red Tide", - "Candy2" - ] -} diff --git a/tests/components/wled/fixtures/rgb.json b/tests/components/wled/fixtures/rgb.json index 50a82eb792e..21f9b005b72 100644 --- a/tests/components/wled/fixtures/rgb.json +++ b/tests/components/wled/fixtures/rgb.json @@ -1,41 +1,28 @@ { "state": { "on": true, - "bri": 128, + "bri": 127, "transition": 7, "ps": -1, "pl": -1, "nl": { "on": false, "dur": 60, - "mode": 1, - "tbri": 0, - "rem": -1 + "fade": true, + "tbri": 0 }, "udpn": { "send": false, - "recv": true, - "sgrp": 1, - "rgrp": 1 + "recv": true }, - "lor": 0, - "mainseg": 1, "seg": [ { "id": 0, "start": 0, - "stop": 15, - "len": 15, - "grp": 1, - "spc": 0, - "of": 0, - "on": true, - "frz": false, - "bri": 255, - "cct": 127, - "set": 0, + "stop": 19, + "len": 20, "col": [ - [127, 172, 255], + [255, 159, 0], [0, 0, 0], [0, 0, 0] ], @@ -43,106 +30,62 @@ "sx": 32, "ix": 128, "pal": 0, - "c1": 128, - "c2": 128, - "c3": 16, - "sel": false, + "sel": true, "rev": false, - "mi": false, - "o1": false, - "o2": false, - "o3": false, - "si": 0, - "m12": 0 + "cln": -1 }, { "id": 1, - "start": 15, + "start": 20, "stop": 30, - "len": 15, - "grp": 1, - "spc": 0, - "of": 0, - "on": true, - "frz": false, - "bri": 255, - "cct": 127, - "set": 0, + "len": 10, "col": [ - [255, 170, 0], + [0, 255, 123], [0, 0, 0], [0, 0, 0] ], - "fx": 3, + "fx": 1, "sx": 16, "ix": 64, "pal": 1, - "c1": 128, - "c2": 128, - "c3": 16, "sel": true, "rev": true, - "mi": false, - "o1": false, - "o2": false, - "o3": false, - "si": 0, - "m12": 0 + "cln": -1 } ] }, "info": { - "ver": "0.14.4", - "vid": 2405180, + "ver": "0.8.5", + "version_latest_stable": "0.12.0", + "version_latest_beta": "0.13.0b1", + "vid": 1909122, "leds": { "count": 30, - "pwr": 515, - "fps": 5, - "maxpwr": 850, - "maxseg": 32, - "seglc": [1, 1], - "lc": 1, "rgbw": false, - "wv": 0, - "cct": 0 + "pin": [2], + "pwr": 470, + "maxpwr": 850, + "maxseg": 10 }, - "str": false, "name": "WLED RGB Light", "udpport": 21324, "live": false, - "liveseg": -1, - "lm": "", - "lip": "", - "ws": -1, - "fxcount": 187, - "palcount": 71, - "cpalcount": 0, - "maps": [ - { - "id": 0 - } - ], + "fxcount": 81, + "palcount": 50, "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -43, - "signal": 100, + "rssi": -62, + "signal": 76, "channel": 11 }, - "fs": { - "u": 12, - "t": 983, - "pmt": 1718827787 - }, - "ndc": 1, - "arch": "esp32", - "core": "v3.3.6-16-gcc5440f6a2", - "lwip": 0, - "freeheap": 198384, - "uptime": 966, - "time": "2024-6-19, 20:10:38", - "opt": 79, + "arch": "esp8266", + "core": "2_4_2", + "freeheap": 14600, + "uptime": 32, + "opt": 119, "brand": "WLED", - "product": "FOSS", + "product": "DIY light", + "btype": "bin", "mac": "aabbccddeeff", "ip": "127.0.0.1" }, @@ -158,21 +101,21 @@ "Colorloop", "Rainbow", "Scan", - "Scan Dual", + "Dual Scan", "Fade", - "Theater", - "Theater Rainbow", + "Chase", + "Chase Rainbow", "Running", "Saw", "Twinkle", "Dissolve", "Dissolve Rnd", "Sparkle", - "Sparkle Dark", + "Dark Sparkle", "Sparkle+", "Strobe", "Strobe Rainbow", - "Strobe Mega", + "Mega Strobe", "Blink Rainbow", "Android", "Chase", @@ -184,30 +127,30 @@ "Colorful", "Traffic Light", "Sweep Random", - "Chase 2", - "Aurora", + "Running 2", + "Red & Blue", "Stream", "Scanner", "Lighthouse", "Fireworks", "Rain", - "Tetrix", + "Merry Christmas", "Fire Flicker", "Gradient", "Loading", - "Rolling Balls", - "Fairy", - "Two Dots", - "Fairytwinkle", - "Running Dual", - "RSVD", - "Chase 3", + "In Out", + "In In", + "Out Out", + "Out In", + "Circus", + "Halloween", + "Tri Chase", "Tri Wipe", "Tri Fade", "Lightning", "ICU", "Multi Comet", - "Scanner Dual", + "Dual Scanner", "Stream 2", "Oscillate", "Pride 2015", @@ -215,133 +158,27 @@ "Palette", "Fire 2012", "Colorwaves", - "Bpm", + "BPM", "Fill Noise", "Noise 1", "Noise 2", "Noise 3", "Noise 4", - "Colortwinkles", + "Colortwinkle", "Lake", "Meteor", - "Meteor Smooth", + "Smooth Meteor", "Railway", "Ripple", - "Twinklefox", - "Twinklecat", - "Halloween Eyes", - "Solid Pattern", - "Solid Pattern Tri", - "Spots", - "Spots Fade", - "Glitter", - "Candle", - "Fireworks Starburst", - "Fireworks 1D", - "Bouncing Balls", - "Sinelon", - "Sinelon Dual", - "Sinelon Rainbow", - "Popcorn", - "Drip", - "Plasma", - "Percent", - "Ripple Rainbow", - "Heartbeat", - "Pacifica", - "Candle Multi", - "Solid Glitter", - "Sunrise", - "Phased", - "Twinkleup", - "Noise Pal", - "Sine", - "Phased Noise", - "Flow", - "Chunchun", - "Dancing Shadows", - "Washing Machine", - "RSVD", - "Blends", - "TV Simulator", - "Dynamic Smooth", - "Spaceships", - "Crazy Bees", - "Ghost Rider", - "Blobs", - "Scrolling Text", - "Drift Rose", - "Distortion Waves", - "Soap", - "Octopus", - "Waving Cell", - "Pixels", - "Pixelwave", - "Juggles", - "Matripix", - "Gravimeter", - "Plasmoid", - "Puddles", - "Midnoise", - "Noisemeter", - "Freqwave", - "Freqmatrix", - "GEQ", - "Waterfall", - "Freqpixels", - "RSVD", - "Noisefire", - "Puddlepeak", - "Noisemove", - "Noise2D", - "Perlin Move", - "Ripple Peak", - "Firenoise", - "Squared Swirl", - "RSVD", - "DNA", - "Matrix", - "Metaballs", - "Freqmap", - "Gravcenter", - "Gravcentric", - "Gravfreq", - "DJ Light", - "Funky Plank", - "RSVD", - "Pulser", - "Blurz", - "Drift", - "Waverly", - "Sun Radiation", - "Colored Bursts", - "Julia", - "RSVD", - "RSVD", - "RSVD", - "Game Of Life", - "Tartan", - "Polar Lights", - "Swirl", - "Lissajous", - "Frizzles", - "Plasma Ball", - "Flow Stripe", - "Hiphotic", - "Sindots", - "DNA Spiral", - "Black Hole", - "Wavesins", - "Rocktaves", - "Akemi" + "Twinklefox" ], "palettes": [ "Default", - "* Random Cycle", - "* Color 1", - "* Colors 1&2", - "* Color Gradient", - "* Colors Only", + "Random Cycle", + "Primary Color", + "Based on Primary", + "Set Colors", + "Based on Set", "Party", "Cloud", "Lava", @@ -358,11 +195,11 @@ "Splash", "Pastel", "Sunset 2", - "Beach", + "Beech", "Vintage", "Departure", "Landscape", - "Beech", + "Beach", "Sherbet", "Hult", "Hult 64", @@ -385,27 +222,6 @@ "April Night", "Orangery", "C9", - "Sakura", - "Aurora", - "Atlantica", - "C9 2", - "C9 New", - "Temperature", - "Aurora 2", - "Retro Clown", - "Candy", - "Toxy Reaf", - "Fairy Reaf", - "Semi Blue", - "Pink Candy", - "Red Reaf", - "Aqua Flash", - "Yelblu Hot", - "Lite Light", - "Red Flash", - "Blink Red", - "Red Shift", - "Red Tide", - "Candy2" + "Sakura" ] } diff --git a/tests/components/wled/fixtures/rgb_no_update.json b/tests/components/wled/fixtures/rgb_no_update.json new file mode 100644 index 00000000000..c8aa902cc95 --- /dev/null +++ b/tests/components/wled/fixtures/rgb_no_update.json @@ -0,0 +1,227 @@ +{ + "state": { + "on": true, + "bri": 127, + "transition": 7, + "ps": -1, + "pl": -1, + "nl": { + "on": false, + "dur": 60, + "fade": true, + "tbri": 0 + }, + "udpn": { + "send": false, + "recv": true + }, + "seg": [ + { + "id": 0, + "start": 0, + "stop": 19, + "len": 20, + "col": [ + [255, 159, 0], + [0, 0, 0], + [0, 0, 0] + ], + "fx": 0, + "sx": 32, + "ix": 128, + "pal": 0, + "sel": true, + "rev": false, + "cln": -1 + }, + { + "id": 1, + "start": 20, + "stop": 30, + "len": 10, + "col": [ + [0, 255, 123], + [0, 0, 0], + [0, 0, 0] + ], + "fx": 1, + "sx": 16, + "ix": 64, + "pal": 1, + "sel": true, + "rev": true, + "cln": -1 + } + ] + }, + "info": { + "ver": null, + "version_latest_stable": null, + "version_latest_beta": null, + "vid": 1909122, + "leds": { + "count": 30, + "rgbw": false, + "pin": [2], + "pwr": 470, + "maxpwr": 850, + "maxseg": 10 + }, + "name": "WLED RGB Light", + "udpport": 21324, + "live": false, + "fxcount": 81, + "palcount": 50, + "wifi": { + "bssid": "AA:AA:AA:AA:AA:BB", + "rssi": -62, + "signal": 76, + "channel": 11 + }, + "arch": "esp8266", + "core": "2_4_2", + "freeheap": 14600, + "uptime": 32, + "opt": 119, + "brand": "WLED", + "product": "DIY light", + "btype": "bin", + "mac": "aabbccddeeff", + "ip": "127.0.0.1" + }, + "effects": [ + "Solid", + "Blink", + "Breathe", + "Wipe", + "Wipe Random", + "Random Colors", + "Sweep", + "Dynamic", + "Colorloop", + "Rainbow", + "Scan", + "Dual Scan", + "Fade", + "Chase", + "Chase Rainbow", + "Running", + "Saw", + "Twinkle", + "Dissolve", + "Dissolve Rnd", + "Sparkle", + "Dark Sparkle", + "Sparkle+", + "Strobe", + "Strobe Rainbow", + "Mega Strobe", + "Blink Rainbow", + "Android", + "Chase", + "Chase Random", + "Chase Rainbow", + "Chase Flash", + "Chase Flash Rnd", + "Rainbow Runner", + "Colorful", + "Traffic Light", + "Sweep Random", + "Running 2", + "Red & Blue", + "Stream", + "Scanner", + "Lighthouse", + "Fireworks", + "Rain", + "Merry Christmas", + "Fire Flicker", + "Gradient", + "Loading", + "In Out", + "In In", + "Out Out", + "Out In", + "Circus", + "Halloween", + "Tri Chase", + "Tri Wipe", + "Tri Fade", + "Lightning", + "ICU", + "Multi Comet", + "Dual Scanner", + "Stream 2", + "Oscillate", + "Pride 2015", + "Juggle", + "Palette", + "Fire 2012", + "Colorwaves", + "BPM", + "Fill Noise", + "Noise 1", + "Noise 2", + "Noise 3", + "Noise 4", + "Colortwinkle", + "Lake", + "Meteor", + "Smooth Meteor", + "Railway", + "Ripple", + "Twinklefox" + ], + "palettes": [ + "Default", + "Random Cycle", + "Primary Color", + "Based on Primary", + "Set Colors", + "Based on Set", + "Party", + "Cloud", + "Lava", + "Ocean", + "Forest", + "Rainbow", + "Rainbow Bands", + "Sunset", + "Rivendell", + "Breeze", + "Red & Blue", + "Yellowout", + "Analogous", + "Splash", + "Pastel", + "Sunset 2", + "Beech", + "Vintage", + "Departure", + "Landscape", + "Beach", + "Sherbet", + "Hult", + "Hult 64", + "Drywet", + "Jul", + "Grintage", + "Rewhi", + "Tertiary", + "Fire", + "Icefire", + "Cyane", + "Light Pink", + "Autumn", + "Magenta", + "Magred", + "Yelmag", + "Yelblu", + "Orange & Teal", + "Tiamat", + "April Night", + "Orangery", + "C9", + "Sakura" + ] +} diff --git a/tests/components/wled/fixtures/rgb_single_segment.json b/tests/components/wled/fixtures/rgb_single_segment.json index 512ac2a00df..aa0b79e98f5 100644 --- a/tests/components/wled/fixtures/rgb_single_segment.json +++ b/tests/components/wled/fixtures/rgb_single_segment.json @@ -1,41 +1,28 @@ { "state": { "on": true, - "bri": 128, + "bri": 127, "transition": 7, "ps": -1, "pl": -1, "nl": { "on": false, "dur": 60, - "mode": 1, - "tbri": 0, - "rem": -1 + "fade": true, + "tbri": 0 }, "udpn": { "send": false, - "recv": true, - "sgrp": 1, - "rgrp": 1 + "recv": true }, - "lor": 0, - "mainseg": 0, "seg": [ { "id": 0, "start": 0, "stop": 30, - "len": 30, - "grp": 1, - "spc": 0, - "of": 0, - "on": true, - "frz": false, - "bri": 255, - "cct": 127, - "set": 0, + "len": 20, "col": [ - [127, 172, 255], + [255, 159, 0], [0, 0, 0], [0, 0, 0] ], @@ -43,72 +30,44 @@ "sx": 32, "ix": 128, "pal": 0, - "c1": 128, - "c2": 128, - "c3": 16, "sel": true, "rev": false, - "mi": false, - "o1": false, - "o2": false, - "o3": false, - "si": 0, - "m12": 0 + "cln": -1 } ] }, "info": { - "ver": "1.0.0b4", - "vid": 2405180, + "ver": "0.8.6b1", + "version_latest_stable": "0.8.5", + "version_latest_beta": "0.8.6b2", + "vid": 1909122, "leds": { "count": 30, - "pwr": 536, - "fps": 5, - "maxpwr": 850, - "maxseg": 32, - "seglc": [1], - "lc": 1, "rgbw": false, - "wv": 0, - "cct": 0 + "pin": [2], + "pwr": 470, + "maxpwr": 850, + "maxseg": 10 }, - "str": false, "name": "WLED RGB Light", "udpport": 21324, "live": false, - "liveseg": -1, - "lm": "", - "lip": "", - "ws": -1, - "fxcount": 187, - "palcount": 71, - "cpalcount": 0, - "maps": [ - { - "id": 0 - } - ], + "fxcount": 81, + "palcount": 50, "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -44, - "signal": 100, + "rssi": -62, + "signal": 76, "channel": 11 }, - "fs": { - "u": 12, - "t": 983, - "pmt": 0 - }, - "ndc": 1, - "arch": "esp32", - "core": "v3.3.6-16-gcc5440f6a2", - "lwip": 0, - "freeheap": 196960, - "uptime": 461, - "time": "1970-1-1, 00:07:41", - "opt": 79, + "arch": "esp8266", + "core": "2_4_2", + "freeheap": 14600, + "uptime": 32, + "opt": 119, "brand": "WLED", - "product": "FOSS", + "product": "DIY light", + "btype": "bin", "mac": "aabbccddeeff", "ip": "127.0.0.1" }, @@ -124,21 +83,21 @@ "Colorloop", "Rainbow", "Scan", - "Scan Dual", + "Dual Scan", "Fade", - "Theater", - "Theater Rainbow", + "Chase", + "Chase Rainbow", "Running", "Saw", "Twinkle", "Dissolve", "Dissolve Rnd", "Sparkle", - "Sparkle Dark", + "Dark Sparkle", "Sparkle+", "Strobe", "Strobe Rainbow", - "Strobe Mega", + "Mega Strobe", "Blink Rainbow", "Android", "Chase", @@ -150,30 +109,30 @@ "Colorful", "Traffic Light", "Sweep Random", - "Chase 2", - "Aurora", + "Running 2", + "Red & Blue", "Stream", "Scanner", "Lighthouse", "Fireworks", "Rain", - "Tetrix", + "Merry Christmas", "Fire Flicker", "Gradient", "Loading", - "Rolling Balls", - "Fairy", - "Two Dots", - "Fairytwinkle", - "Running Dual", - "RSVD", - "Chase 3", + "In Out", + "In In", + "Out Out", + "Out In", + "Circus", + "Halloween", + "Tri Chase", "Tri Wipe", "Tri Fade", "Lightning", "ICU", "Multi Comet", - "Scanner Dual", + "Dual Scanner", "Stream 2", "Oscillate", "Pride 2015", @@ -181,133 +140,27 @@ "Palette", "Fire 2012", "Colorwaves", - "Bpm", + "BPM", "Fill Noise", "Noise 1", "Noise 2", "Noise 3", "Noise 4", - "Colortwinkles", + "Colortwinkle", "Lake", "Meteor", - "Meteor Smooth", + "Smooth Meteor", "Railway", "Ripple", - "Twinklefox", - "Twinklecat", - "Halloween Eyes", - "Solid Pattern", - "Solid Pattern Tri", - "Spots", - "Spots Fade", - "Glitter", - "Candle", - "Fireworks Starburst", - "Fireworks 1D", - "Bouncing Balls", - "Sinelon", - "Sinelon Dual", - "Sinelon Rainbow", - "Popcorn", - "Drip", - "Plasma", - "Percent", - "Ripple Rainbow", - "Heartbeat", - "Pacifica", - "Candle Multi", - "Solid Glitter", - "Sunrise", - "Phased", - "Twinkleup", - "Noise Pal", - "Sine", - "Phased Noise", - "Flow", - "Chunchun", - "Dancing Shadows", - "Washing Machine", - "RSVD", - "Blends", - "TV Simulator", - "Dynamic Smooth", - "Spaceships", - "Crazy Bees", - "Ghost Rider", - "Blobs", - "Scrolling Text", - "Drift Rose", - "Distortion Waves", - "Soap", - "Octopus", - "Waving Cell", - "Pixels", - "Pixelwave", - "Juggles", - "Matripix", - "Gravimeter", - "Plasmoid", - "Puddles", - "Midnoise", - "Noisemeter", - "Freqwave", - "Freqmatrix", - "GEQ", - "Waterfall", - "Freqpixels", - "RSVD", - "Noisefire", - "Puddlepeak", - "Noisemove", - "Noise2D", - "Perlin Move", - "Ripple Peak", - "Firenoise", - "Squared Swirl", - "RSVD", - "DNA", - "Matrix", - "Metaballs", - "Freqmap", - "Gravcenter", - "Gravcentric", - "Gravfreq", - "DJ Light", - "Funky Plank", - "RSVD", - "Pulser", - "Blurz", - "Drift", - "Waverly", - "Sun Radiation", - "Colored Bursts", - "Julia", - "RSVD", - "RSVD", - "RSVD", - "Game Of Life", - "Tartan", - "Polar Lights", - "Swirl", - "Lissajous", - "Frizzles", - "Plasma Ball", - "Flow Stripe", - "Hiphotic", - "Sindots", - "DNA Spiral", - "Black Hole", - "Wavesins", - "Rocktaves", - "Akemi" + "Twinklefox" ], "palettes": [ "Default", - "* Random Cycle", - "* Color 1", - "* Colors 1&2", - "* Color Gradient", - "* Colors Only", + "Random Cycle", + "Primary Color", + "Based on Primary", + "Set Colors", + "Based on Set", "Party", "Cloud", "Lava", @@ -324,11 +177,11 @@ "Splash", "Pastel", "Sunset 2", - "Beach", + "Beech", "Vintage", "Departure", "Landscape", - "Beech", + "Beach", "Sherbet", "Hult", "Hult 64", @@ -351,27 +204,6 @@ "April Night", "Orangery", "C9", - "Sakura", - "Aurora", - "Atlantica", - "C9 2", - "C9 New", - "Temperature", - "Aurora 2", - "Retro Clown", - "Candy", - "Toxy Reaf", - "Fairy Reaf", - "Semi Blue", - "Pink Candy", - "Red Reaf", - "Aqua Flash", - "Yelblu Hot", - "Lite Light", - "Red Flash", - "Blink Red", - "Red Shift", - "Red Tide", - "Candy2" + "Sakura" ] } diff --git a/tests/components/wled/fixtures/rgb_websocket.json b/tests/components/wled/fixtures/rgb_websocket.json index f5a3e715654..4a0ed7b1ee5 100644 --- a/tests/components/wled/fixtures/rgb_websocket.json +++ b/tests/components/wled/fixtures/rgb_websocket.json @@ -1,22 +1,26 @@ { "state": { "on": true, - "bri": 128, + "bri": 255, "transition": 7, "ps": -1, "pl": -1, + "ccnf": { + "min": 1, + "max": 5, + "time": 12 + }, "nl": { "on": false, "dur": 60, + "fade": true, "mode": 1, "tbri": 0, "rem": -1 }, "udpn": { "send": false, - "recv": true, - "sgrp": 1, - "rgrp": 1 + "recv": true }, "lor": 0, "mainseg": 0, @@ -24,89 +28,70 @@ { "id": 0, "start": 0, - "stop": 30, - "len": 30, + "stop": 13, + "len": 13, "grp": 1, "spc": 0, - "of": 0, "on": true, - "frz": false, "bri": 255, - "cct": 127, - "set": 0, "col": [ - [127, 172, 255], + [255, 181, 218], [0, 0, 0], [0, 0, 0] ], "fx": 0, - "sx": 128, + "sx": 43, "ix": 128, - "pal": 0, - "c1": 128, - "c2": 128, - "c3": 16, + "pal": 2, "sel": true, "rev": false, - "mi": false, - "o1": false, - "o2": false, - "o3": false, - "si": 0, - "m12": 0 + "mi": false } ] }, "info": { - "ver": "0.99.0", - "vid": 2405180, + "ver": "0.12.0-b2", + "version_latest_stable": "0.11.0", + "version_latest_beta": "0.12.0-b2", + "vid": 2103220, "leds": { - "count": 30, - "pwr": 536, - "fps": 5, - "maxpwr": 850, - "maxseg": 32, - "seglc": [1], - "lc": 1, + "count": 13, "rgbw": false, - "wv": 0, - "cct": 0 + "wv": false, + "pin": [2], + "pwr": 266, + "fps": 2, + "maxpwr": 1000, + "maxseg": 12, + "seglock": false }, "str": false, "name": "WLED WebSocket", "udpport": 21324, "live": false, - "liveseg": -1, "lm": "", "lip": "", "ws": 0, - "fxcount": 187, - "palcount": 71, - "cpalcount": 0, - "maps": [ - { - "id": 0 - } - ], + "fxcount": 118, + "palcount": 56, "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -44, - "signal": 100, - "channel": 11 + "rssi": -68, + "signal": 64, + "channel": 6 }, "fs": { - "u": 12, - "t": 983, - "pmt": 0 + "u": 40, + "t": 1024, + "pmt": 1623156685 }, "ndc": 1, - "arch": "esp32", - "core": "v3.3.6-16-gcc5440f6a2", - "lwip": 0, - "freeheap": 196960, - "uptime": 461, - "time": "1970-1-1, 00:07:41", - "opt": 79, + "arch": "esp8266", + "core": "2_7_4_7", + "lwip": 1, + "freeheap": 22752, + "uptime": 258411, + "opt": 127, "brand": "WLED", "product": "FOSS", "mac": "aabbccddeeff", @@ -150,7 +135,7 @@ "Colorful", "Traffic Light", "Sweep Random", - "Chase 2", + "Running 2", "Aurora", "Stream", "Scanner", @@ -161,13 +146,13 @@ "Fire Flicker", "Gradient", "Loading", - "Rolling Balls", - "Fairy", + "Police", + "Police All", "Two Dots", - "Fairytwinkle", - "Running Dual", - "RSVD", - "Chase 3", + "Two Areas", + "Circus", + "Halloween", + "Tri Chase", "Tri Wipe", "Tri Fade", "Lightning", @@ -227,79 +212,10 @@ "Chunchun", "Dancing Shadows", "Washing Machine", - "RSVD", + "Candy Cane", "Blends", "TV Simulator", - "Dynamic Smooth", - "Spaceships", - "Crazy Bees", - "Ghost Rider", - "Blobs", - "Scrolling Text", - "Drift Rose", - "Distortion Waves", - "Soap", - "Octopus", - "Waving Cell", - "Pixels", - "Pixelwave", - "Juggles", - "Matripix", - "Gravimeter", - "Plasmoid", - "Puddles", - "Midnoise", - "Noisemeter", - "Freqwave", - "Freqmatrix", - "GEQ", - "Waterfall", - "Freqpixels", - "RSVD", - "Noisefire", - "Puddlepeak", - "Noisemove", - "Noise2D", - "Perlin Move", - "Ripple Peak", - "Firenoise", - "Squared Swirl", - "RSVD", - "DNA", - "Matrix", - "Metaballs", - "Freqmap", - "Gravcenter", - "Gravcentric", - "Gravfreq", - "DJ Light", - "Funky Plank", - "RSVD", - "Pulser", - "Blurz", - "Drift", - "Waverly", - "Sun Radiation", - "Colored Bursts", - "Julia", - "RSVD", - "RSVD", - "RSVD", - "Game Of Life", - "Tartan", - "Polar Lights", - "Swirl", - "Lissajous", - "Frizzles", - "Plasma Ball", - "Flow Stripe", - "Hiphotic", - "Sindots", - "DNA Spiral", - "Black Hole", - "Wavesins", - "Rocktaves", - "Akemi" + "Dynamic Smooth" ], "palettes": [ "Default", @@ -324,11 +240,11 @@ "Splash", "Pastel", "Sunset 2", - "Beach", + "Beech", "Vintage", "Departure", "Landscape", - "Beech", + "Beach", "Sherbet", "Hult", "Hult 64", @@ -357,21 +273,6 @@ "C9 2", "C9 New", "Temperature", - "Aurora 2", - "Retro Clown", - "Candy", - "Toxy Reaf", - "Fairy Reaf", - "Semi Blue", - "Pink Candy", - "Red Reaf", - "Aqua Flash", - "Yelblu Hot", - "Lite Light", - "Red Flash", - "Blink Red", - "Red Shift", - "Red Tide", - "Candy2" + "Aurora 2" ] } diff --git a/tests/components/wled/fixtures/rgbw.json b/tests/components/wled/fixtures/rgbw.json index 285842605ae..100b3936900 100644 --- a/tests/components/wled/fixtures/rgbw.json +++ b/tests/components/wled/fixtures/rgbw.json @@ -1,115 +1,74 @@ { "state": { "on": true, - "bri": 128, + "bri": 140, "transition": 7, - "ps": -1, - "pl": -1, + "ps": 1, + "pl": 3, "nl": { "on": false, "dur": 60, - "mode": 1, - "tbri": 0, - "rem": -1 + "fade": true, + "tbri": 0 }, "udpn": { "send": false, - "recv": true, - "sgrp": 1, - "rgrp": 1 + "recv": true }, - "lor": 0, - "mainseg": 0, "seg": [ { "id": 0, "start": 0, - "stop": 30, - "len": 30, - "grp": 1, - "spc": 0, - "of": 0, - "on": true, - "frz": false, - "bri": 255, - "cct": 127, - "set": 0, + "stop": 13, + "len": 13, "col": [ [255, 0, 0, 139], [0, 0, 0, 0], [0, 0, 0, 0] ], - "fx": 0, - "sx": 128, + "fx": 9, + "sx": 165, "ix": 128, "pal": 0, - "c1": 128, - "c2": 128, - "c3": 16, "sel": true, "rev": false, - "mi": false, - "o1": false, - "o2": false, - "o3": false, - "si": 0, - "m12": 0 + "cln": -1 } ] }, "info": { - "ver": "0.99.0b1", - "vid": 2405180, + "ver": "0.8.6b4", + "version_latest_stable": "0.8.6", + "version_latest_beta": "0.8.6b5", + "vid": 1910255, "leds": { - "count": 30, - "pwr": 536, - "fps": 5, - "maxpwr": 850, - "maxseg": 32, - "seglc": [3], - "lc": 3, + "count": 13, "rgbw": true, - "wv": 0, - "cct": 0 + "pin": [2], + "pwr": 208, + "maxpwr": 850, + "maxseg": 10 }, - "str": false, "name": "WLED RGBW Light", "udpport": 21324, "live": false, - "liveseg": -1, - "lm": "", - "lip": "", - "ws": -1, - "fxcount": 187, - "palcount": 71, - "cpalcount": 0, - "maps": [ - { - "id": 0 - } - ], + "fxcount": 83, + "palcount": 50, "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -44, - "signal": 100, + "rssi": -62, + "signal": 76, "channel": 11 }, - "fs": { - "u": 12, - "t": 983, - "pmt": 0 - }, - "ndc": 1, - "arch": "esp32", - "core": "v3.3.6-16-gcc5440f6a2", - "lwip": 0, - "freeheap": 196960, - "uptime": 461, - "time": "1970-1-1, 00:07:41", - "opt": 79, + "arch": "esp8266", + "core": "2_5_2", + "freeheap": 20136, + "uptime": 5591, + "opt": 119, "brand": "WLED", - "product": "FOSS", - "mac": "aabbccddeeff", + "product": "DIY light", + "btype": "bin", + "mac": "aabbccddee11", "ip": "127.0.0.1" }, "effects": [ @@ -124,21 +83,21 @@ "Colorloop", "Rainbow", "Scan", - "Scan Dual", + "Dual Scan", "Fade", - "Theater", - "Theater Rainbow", + "Chase", + "Chase Rainbow", "Running", "Saw", "Twinkle", "Dissolve", "Dissolve Rnd", "Sparkle", - "Sparkle Dark", + "Dark Sparkle", "Sparkle+", "Strobe", "Strobe Rainbow", - "Strobe Mega", + "Mega Strobe", "Blink Rainbow", "Android", "Chase", @@ -150,30 +109,30 @@ "Colorful", "Traffic Light", "Sweep Random", - "Chase 2", - "Aurora", + "Running 2", + "Red & Blue", "Stream", "Scanner", "Lighthouse", "Fireworks", "Rain", - "Tetrix", + "Merry Christmas", "Fire Flicker", "Gradient", "Loading", - "Rolling Balls", - "Fairy", - "Two Dots", - "Fairytwinkle", - "Running Dual", - "RSVD", - "Chase 3", + "In Out", + "In In", + "Out Out", + "Out In", + "Circus", + "Halloween", + "Tri Chase", "Tri Wipe", "Tri Fade", "Lightning", "ICU", "Multi Comet", - "Scanner Dual", + "Dual Scanner", "Stream 2", "Oscillate", "Pride 2015", @@ -181,7 +140,7 @@ "Palette", "Fire 2012", "Colorwaves", - "Bpm", + "BPM", "Fill Noise", "Noise 1", "Noise 2", @@ -190,124 +149,20 @@ "Colortwinkles", "Lake", "Meteor", - "Meteor Smooth", + "Smooth Meteor", "Railway", "Ripple", "Twinklefox", "Twinklecat", - "Halloween Eyes", - "Solid Pattern", - "Solid Pattern Tri", - "Spots", - "Spots Fade", - "Glitter", - "Candle", - "Fireworks Starburst", - "Fireworks 1D", - "Bouncing Balls", - "Sinelon", - "Sinelon Dual", - "Sinelon Rainbow", - "Popcorn", - "Drip", - "Plasma", - "Percent", - "Ripple Rainbow", - "Heartbeat", - "Pacifica", - "Candle Multi", - "Solid Glitter", - "Sunrise", - "Phased", - "Twinkleup", - "Noise Pal", - "Sine", - "Phased Noise", - "Flow", - "Chunchun", - "Dancing Shadows", - "Washing Machine", - "RSVD", - "Blends", - "TV Simulator", - "Dynamic Smooth", - "Spaceships", - "Crazy Bees", - "Ghost Rider", - "Blobs", - "Scrolling Text", - "Drift Rose", - "Distortion Waves", - "Soap", - "Octopus", - "Waving Cell", - "Pixels", - "Pixelwave", - "Juggles", - "Matripix", - "Gravimeter", - "Plasmoid", - "Puddles", - "Midnoise", - "Noisemeter", - "Freqwave", - "Freqmatrix", - "GEQ", - "Waterfall", - "Freqpixels", - "RSVD", - "Noisefire", - "Puddlepeak", - "Noisemove", - "Noise2D", - "Perlin Move", - "Ripple Peak", - "Firenoise", - "Squared Swirl", - "RSVD", - "DNA", - "Matrix", - "Metaballs", - "Freqmap", - "Gravcenter", - "Gravcentric", - "Gravfreq", - "DJ Light", - "Funky Plank", - "RSVD", - "Pulser", - "Blurz", - "Drift", - "Waverly", - "Sun Radiation", - "Colored Bursts", - "Julia", - "RSVD", - "RSVD", - "RSVD", - "Game Of Life", - "Tartan", - "Polar Lights", - "Swirl", - "Lissajous", - "Frizzles", - "Plasma Ball", - "Flow Stripe", - "Hiphotic", - "Sindots", - "DNA Spiral", - "Black Hole", - "Wavesins", - "Rocktaves", - "Akemi" + "Halloween Eyes" ], "palettes": [ "Default", - "* Random Cycle", - "* Color 1", - "* Colors 1&2", - "* Color Gradient", - "* Colors Only", + "Random Cycle", + "Primary Color", + "Based on Primary", + "Set Colors", + "Based on Set", "Party", "Cloud", "Lava", @@ -324,11 +179,11 @@ "Splash", "Pastel", "Sunset 2", - "Beach", + "Beech", "Vintage", "Departure", "Landscape", - "Beech", + "Beach", "Sherbet", "Hult", "Hult 64", @@ -351,82 +206,36 @@ "April Night", "Orangery", "C9", - "Sakura", - "Aurora", - "Atlantica", - "C9 2", - "C9 New", - "Temperature", - "Aurora 2", - "Retro Clown", - "Candy", - "Toxy Reaf", - "Fairy Reaf", - "Semi Blue", - "Pink Candy", - "Red Reaf", - "Aqua Flash", - "Yelblu Hot", - "Lite Light", - "Red Flash", - "Blink Red", - "Red Shift", - "Red Tide", - "Candy2" + "Sakura" ], "presets": { "0": {}, "1": { - "on": true, - "bri": 128, + "on": false, + "bri": 255, "transition": 7, "mainseg": 0, "seg": [ { "id": 0, "start": 0, - "stop": 131, + "stop": 13, "grp": 1, "spc": 0, - "of": 0, "on": true, - "frz": false, "bri": 255, - "cct": 127, - "set": 0, - "n": "", "col": [ - [40, 255, 3], + [97, 144, 255], [0, 0, 0], [0, 0, 0] ], - "fx": 0, - "sx": 128, - "ix": 128, - "pal": 0, - "c1": 128, - "c2": 128, - "c3": 16, + "fx": 9, + "sx": 183, + "ix": 255, + "pal": 1, "sel": true, "rev": false, - "mi": false, - "o1": false, - "o2": false, - "o3": false, - "si": 0, - "m12": 0 - }, - { - "stop": 0 - }, - { - "stop": 0 - }, - { - "stop": 0 - }, - { - "stop": 0 + "mi": false }, { "stop": 0 @@ -465,56 +274,31 @@ "n": "Preset 1" }, "2": { - "on": true, - "bri": 128, + "on": false, + "bri": 255, "transition": 7, "mainseg": 0, "seg": [ { "id": 0, "start": 0, - "stop": 131, + "stop": 13, "grp": 1, "spc": 0, - "of": 0, "on": true, - "frz": false, "bri": 255, - "cct": 127, - "set": 0, - "n": "", "col": [ - [51, 88, 255], + [97, 144, 255], [0, 0, 0], [0, 0, 0] ], - "fx": 0, - "sx": 128, - "ix": 128, - "pal": 0, - "c1": 128, - "c2": 128, - "c3": 16, + "fx": 9, + "sx": 183, + "ix": 255, + "pal": 1, "sel": true, "rev": false, - "mi": false, - "o1": false, - "o2": false, - "o3": false, - "si": 0, - "m12": 0 - }, - { - "stop": 0 - }, - { - "stop": 0 - }, - { - "stop": 0 - }, - { - "stop": 0 + "mi": false }, { "stop": 0 @@ -555,25 +339,23 @@ "3": { "playlist": { "ps": [1, 2], - "dur": [300, 300], + "dur": [30, 30], "transition": [7, 7], "repeat": 0, - "end": 0, - "r": 0 + "r": false, + "end": 0 }, - "on": true, "n": "Playlist 1" }, "4": { "playlist": { - "ps": [2, 0], - "dur": [300, 300], + "ps": [1, 2], + "dur": [30, 30], "transition": [7, 7], "repeat": 0, - "end": 0, - "r": 0 + "r": false, + "end": 0 }, - "on": true, "n": "Playlist 2" } } diff --git a/tests/components/wled/snapshots/test_button.ambr b/tests/components/wled/snapshots/test_button.ambr index 4e6260bc9bd..9c91c0e0050 100644 --- a/tests/components/wled/snapshots/test_button.ambr +++ b/tests/components/wled/snapshots/test_button.ambr @@ -59,7 +59,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( @@ -71,14 +71,13 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.14.4', + 'sw_version': '0.8.5', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/snapshots/test_diagnostics.ambr b/tests/components/wled/snapshots/test_diagnostics.ambr index 90732c02c36..643e5fe4ad0 100644 --- a/tests/components/wled/snapshots/test_diagnostics.ambr +++ b/tests/components/wled/snapshots/test_diagnostics.ambr @@ -5,109 +5,22 @@ '0': 'Solid', '1': 'Blink', '10': 'Scan', - '100': 'Heartbeat', - '101': 'Pacifica', - '102': 'Candle Multi', - '103': 'Solid Glitter', - '104': 'Sunrise', - '105': 'Phased', - '106': 'Twinkleup', - '107': 'Noise Pal', - '108': 'Sine', - '109': 'Phased Noise', - '11': 'Scan Dual', - '110': 'Flow', - '111': 'Chunchun', - '112': 'Dancing Shadows', - '113': 'Washing Machine', - '114': 'RSVD', - '115': 'Blends', - '116': 'TV Simulator', - '117': 'Dynamic Smooth', - '118': 'Spaceships', - '119': 'Crazy Bees', + '11': 'Dual Scan', '12': 'Fade', - '120': 'Ghost Rider', - '121': 'Blobs', - '122': 'Scrolling Text', - '123': 'Drift Rose', - '124': 'Distortion Waves', - '125': 'Soap', - '126': 'Octopus', - '127': 'Waving Cell', - '128': 'Pixels', - '129': 'Pixelwave', - '13': 'Theater', - '130': 'Juggles', - '131': 'Matripix', - '132': 'Gravimeter', - '133': 'Plasmoid', - '134': 'Puddles', - '135': 'Midnoise', - '136': 'Noisemeter', - '137': 'Freqwave', - '138': 'Freqmatrix', - '139': 'GEQ', - '14': 'Theater Rainbow', - '140': 'Waterfall', - '141': 'Freqpixels', - '142': 'RSVD', - '143': 'Noisefire', - '144': 'Puddlepeak', - '145': 'Noisemove', - '146': 'Noise2D', - '147': 'Perlin Move', - '148': 'Ripple Peak', - '149': 'Firenoise', + '13': 'Chase', + '14': 'Chase Rainbow', '15': 'Running', - '150': 'Squared Swirl', - '151': 'RSVD', - '152': 'DNA', - '153': 'Matrix', - '154': 'Metaballs', - '155': 'Freqmap', - '156': 'Gravcenter', - '157': 'Gravcentric', - '158': 'Gravfreq', - '159': 'DJ Light', '16': 'Saw', - '160': 'Funky Plank', - '161': 'RSVD', - '162': 'Pulser', - '163': 'Blurz', - '164': 'Drift', - '165': 'Waverly', - '166': 'Sun Radiation', - '167': 'Colored Bursts', - '168': 'Julia', - '169': 'RSVD', '17': 'Twinkle', - '170': 'RSVD', - '171': 'RSVD', - '172': 'Game Of Life', - '173': 'Tartan', - '174': 'Polar Lights', - '175': 'Swirl', - '176': 'Lissajous', - '177': 'Frizzles', - '178': 'Plasma Ball', - '179': 'Flow Stripe', '18': 'Dissolve', - '180': 'Hiphotic', - '181': 'Sindots', - '182': 'DNA Spiral', - '183': 'Black Hole', - '184': 'Wavesins', - '185': 'Rocktaves', - '186': 'Akemi', '19': 'Dissolve Rnd', '2': 'Breathe', '20': 'Sparkle', - '21': 'Sparkle Dark', + '21': 'Dark Sparkle', '22': 'Sparkle+', '23': 'Strobe', '24': 'Strobe Rainbow', - '25': 'Strobe Mega', + '25': 'Mega Strobe', '26': 'Blink Rainbow', '27': 'Android', '28': 'Chase', @@ -120,33 +33,33 @@ '34': 'Colorful', '35': 'Traffic Light', '36': 'Sweep Random', - '37': 'Chase 2', - '38': 'Aurora', + '37': 'Running 2', + '38': 'Red & Blue', '39': 'Stream', '4': 'Wipe Random', '40': 'Scanner', '41': 'Lighthouse', '42': 'Fireworks', '43': 'Rain', - '44': 'Tetrix', + '44': 'Merry Christmas', '45': 'Fire Flicker', '46': 'Gradient', '47': 'Loading', - '48': 'Rolling Balls', - '49': 'Fairy', + '48': 'In Out', + '49': 'In In', '5': 'Random Colors', - '50': 'Two Dots', - '51': 'Fairytwinkle', - '52': 'Running Dual', - '53': 'RSVD', - '54': 'Chase 3', + '50': 'Out Out', + '51': 'Out In', + '52': 'Circus', + '53': 'Halloween', + '54': 'Tri Chase', '55': 'Tri Wipe', '56': 'Tri Fade', '57': 'Lightning', '58': 'ICU', '59': 'Multi Comet', '6': 'Sweep', - '60': 'Scanner Dual', + '60': 'Dual Scanner', '61': 'Stream 2', '62': 'Oscillate', '63': 'Pride 2015', @@ -154,82 +67,55 @@ '65': 'Palette', '66': 'Fire 2012', '67': 'Colorwaves', - '68': 'Bpm', + '68': 'BPM', '69': 'Fill Noise', '7': 'Dynamic', '70': 'Noise 1', '71': 'Noise 2', '72': 'Noise 3', '73': 'Noise 4', - '74': 'Colortwinkles', + '74': 'Colortwinkle', '75': 'Lake', '76': 'Meteor', - '77': 'Meteor Smooth', + '77': 'Smooth Meteor', '78': 'Railway', '79': 'Ripple', '8': 'Colorloop', '80': 'Twinklefox', - '81': 'Twinklecat', - '82': 'Halloween Eyes', - '83': 'Solid Pattern', - '84': 'Solid Pattern Tri', - '85': 'Spots', - '86': 'Spots Fade', - '87': 'Glitter', - '88': 'Candle', - '89': 'Fireworks Starburst', '9': 'Rainbow', - '90': 'Fireworks 1D', - '91': 'Bouncing Balls', - '92': 'Sinelon', - '93': 'Sinelon Dual', - '94': 'Sinelon Rainbow', - '95': 'Popcorn', - '96': 'Drip', - '97': 'Plasma', - '98': 'Percent', - '99': 'Ripple Rainbow', }), 'info': dict({ - 'arch': 'esp32', + 'architecture': 'esp8266', + 'arduino_core_version': '2.4.2', 'brand': 'WLED', - 'core': 'v3.3.6-16-gcc5440f6a2', - 'freeheap': 198384, - 'fs': dict({ - 'pmt': 1718827787.0, - 't': 983, - 'u': 12, - }), - 'fxcount': 187, + 'build_type': 'bin', + 'effect_count': 81, + 'filesystem': None, + 'free_heap': 14600, 'ip': '127.0.0.1', 'leds': dict({ - 'count': 30, - 'fps': 5, - 'light_capabilities': 1, - 'max_power': 850, - 'max_segments': 32, - 'power': 515, - 'segment_light_capabilities': list([ - 1, - 1, - ]), + '__type': "", + 'repr': 'Leds(cct=False, count=30, fps=None, light_capabilities=None, max_power=850, max_segments=10, power=470, rgbw=False, wv=True, segment_light_capabilities=None)', }), - 'lip': '', 'live': False, - 'lm': '', - 'mac': 'aabbccddeeff', + 'live_ip': 'Unknown', + 'live_mode': 'Unknown', + 'mac_address': 'aabbccddeeff', 'name': 'WLED RGB Light', - 'palcount': 71, - 'product': 'FOSS', - 'udpport': 21324, - 'uptime': 966, - 'ver': '0.14.4', - 'vid': 2405180, + 'pallet_count': 50, + 'product': 'DIY light', + 'udp_port': 21324, + 'uptime': 32, + 'version': '0.8.5', + 'version_id': 1909122, + 'version_latest_beta': '0.13.0b1', + 'version_latest_stable': '0.12.0', + 'websocket': None, 'wifi': '**REDACTED**', }), 'palettes': dict({ '0': 'Default', - '1': '* Random Cycle', + '1': 'Random Cycle', '10': 'Forest', '11': 'Rainbow', '12': 'Rainbow Bands', @@ -240,18 +126,18 @@ '17': 'Yellowout', '18': 'Analogous', '19': 'Splash', - '2': '* Color 1', + '2': 'Primary Color', '20': 'Pastel', '21': 'Sunset 2', - '22': 'Beach', + '22': 'Beech', '23': 'Vintage', '24': 'Departure', '25': 'Landscape', - '26': 'Beech', + '26': 'Beach', '27': 'Sherbet', '28': 'Hult', '29': 'Hult 64', - '3': '* Colors 1&2', + '3': 'Based on Primary', '30': 'Drywet', '31': 'Jul', '32': 'Grintage', @@ -262,7 +148,7 @@ '37': 'Cyane', '38': 'Light Pink', '39': 'Autumn', - '4': '* Color Gradient', + '4': 'Set Colors', '40': 'Magenta', '41': 'Magred', '42': 'Yelmag', @@ -273,30 +159,9 @@ '47': 'Orangery', '48': 'C9', '49': 'Sakura', - '5': '* Colors Only', - '50': 'Aurora', - '51': 'Atlantica', - '52': 'C9 2', - '53': 'C9 New', - '54': 'Temperature', - '55': 'Aurora 2', - '56': 'Retro Clown', - '57': 'Candy', - '58': 'Toxy Reaf', - '59': 'Fairy Reaf', + '5': 'Based on Set', '6': 'Party', - '60': 'Semi Blue', - '61': 'Pink Candy', - '62': 'Red Reaf', - '63': 'Aqua Flash', - '64': 'Yelblu Hot', - '65': 'Lite Light', - '66': 'Red Flash', - '67': 'Blink Red', - '68': 'Red Shift', - '69': 'Red Tide', '7': 'Cloud', - '70': 'Candy2', '8': 'Lava', '9': 'Ocean', }), @@ -305,90 +170,30 @@ 'presets': dict({ }), 'state': dict({ - 'bri': 128, + 'brightness': 127, 'lor': 0, - 'nl': dict({ - 'dur': 60, - 'mode': 1, - 'on': False, - 'tbri': 0, + 'nightlight': dict({ + '__type': "", + 'repr': 'Nightlight(duration=60, fade=True, on=False, mode=, target_brightness=0)', }), 'on': True, - 'seg': dict({ - '0': dict({ - 'bri': 255, - 'cct': 127, - 'cln': -1, - 'col': list([ - list([ - 127, - 172, - 255, - ]), - list([ - 0, - 0, - 0, - ]), - list([ - 0, - 0, - 0, - ]), - ]), - 'fx': 0, - 'id': 0, - 'ix': 128, - 'len': 15, - 'on': True, - 'pal': 0, - 'rev': False, - 'sel': False, - 'start': 0, - 'stop': 15, - 'sx': 32, + 'playlist': -1, + 'preset': -1, + 'segments': list([ + dict({ + '__type': "", + 'repr': "Segment(brightness=127, clones=-1, color_primary=(255, 159, 0), color_secondary=(0, 0, 0), color_tertiary=(0, 0, 0), effect=Effect(effect_id=0, name='Solid'), intensity=128, length=20, on=True, palette=Palette(name='Default', palette_id=0), reverse=False, segment_id=0, selected=True, speed=32, start=0, stop=19)", }), - '1': dict({ - 'bri': 255, - 'cct': 127, - 'cln': -1, - 'col': list([ - list([ - 255, - 170, - 0, - ]), - list([ - 0, - 0, - 0, - ]), - list([ - 0, - 0, - 0, - ]), - ]), - 'fx': 3, - 'id': 1, - 'ix': 64, - 'len': 15, - 'on': True, - 'pal': 1, - 'rev': True, - 'sel': True, - 'start': 15, - 'stop': 30, - 'sx': 16, + dict({ + '__type': "", + 'repr': "Segment(brightness=127, clones=-1, color_primary=(0, 255, 123), color_secondary=(0, 0, 0), color_tertiary=(0, 0, 0), effect=Effect(effect_id=1, name='Blink'), intensity=64, length=10, on=True, palette=Palette(name='Random Cycle', palette_id=1), reverse=True, segment_id=1, selected=True, speed=16, start=20, stop=30)", }), + ]), + 'sync': dict({ + '__type': "", + 'repr': 'Sync(receive=True, send=False)', }), 'transition': 7, - 'udpn': dict({ - 'recv': True, - 'rgrp': 1, - 'send': False, - 'sgrp': 1, - }), }), }) # --- diff --git a/tests/components/wled/snapshots/test_number.ambr b/tests/components/wled/snapshots/test_number.ambr index 0fb6cff3d51..bee3e180090 100644 --- a/tests/components/wled/snapshots/test_number.ambr +++ b/tests/components/wled/snapshots/test_number.ambr @@ -67,7 +67,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( @@ -79,14 +79,13 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.14.4', + 'sw_version': '0.8.5', 'via_device_id': None, }) # --- @@ -158,7 +157,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( @@ -170,14 +169,182 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.14.4', + 'sw_version': '0.8.5', + 'via_device_id': None, + }) +# --- +# name: test_speed_state[number.wled_rgb_light_segment_1_intensity-42-intensity] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'WLED RGB Light Segment 1 Intensity', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.wled_rgb_light_segment_1_intensity', + 'last_changed': , + 'last_updated': , + 'state': '64', + }) +# --- +# name: test_speed_state[number.wled_rgb_light_segment_1_intensity-42-intensity].1 + EntityRegistryEntrySnapshot({ + '_display_repr': , + '_partial_repr': , + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.wled_rgb_light_segment_1_intensity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Segment 1 Intensity', + 'platform': 'wled', + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aabbccddeeff_intensity_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_speed_state[number.wled_rgb_light_segment_1_intensity-42-intensity].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://127.0.0.1', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'esp8266', + 'id': , + 'identifiers': set({ + tuple( + 'wled', + 'aabbccddeeff', + ), + }), + 'is_new': False, + 'manufacturer': 'WLED', + 'model': 'DIY light', + 'name': 'WLED RGB Light', + 'name_by_user': None, + 'suggested_area': None, + 'sw_version': '0.8.5', + 'via_device_id': None, + }) +# --- +# name: test_speed_state[number.wled_rgb_light_segment_1_speed-42-speed] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'WLED RGB Light Segment 1 Speed', + 'icon': 'mdi:speedometer', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.wled_rgb_light_segment_1_speed', + 'last_changed': , + 'last_updated': , + 'state': '16', + }) +# --- +# name: test_speed_state[number.wled_rgb_light_segment_1_speed-42-speed].1 + EntityRegistryEntrySnapshot({ + '_display_repr': , + '_partial_repr': , + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.wled_rgb_light_segment_1_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:speedometer', + 'original_name': 'Segment 1 Speed', + 'platform': 'wled', + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aabbccddeeff_speed_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_speed_state[number.wled_rgb_light_segment_1_speed-42-speed].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://127.0.0.1', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'esp8266', + 'id': , + 'identifiers': set({ + tuple( + 'wled', + 'aabbccddeeff', + ), + }), + 'is_new': False, + 'manufacturer': 'WLED', + 'model': 'DIY light', + 'name': 'WLED RGB Light', + 'name_by_user': None, + 'suggested_area': None, + 'sw_version': '0.8.5', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/snapshots/test_select.ambr b/tests/components/wled/snapshots/test_select.ambr index 2998583f8b3..f6447f699c9 100644 --- a/tests/components/wled/snapshots/test_select.ambr +++ b/tests/components/wled/snapshots/test_select.ambr @@ -69,7 +69,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( @@ -81,14 +81,13 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.14.4', + 'sw_version': '0.8.5', 'via_device_id': None, }) # --- @@ -97,77 +96,56 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'WLED RGB Light Segment 1 color palette', 'options': list([ - 'Default', - '* Random Cycle', - '* Color 1', - '* Colors 1&2', - '* Color Gradient', - '* Colors Only', - 'Party', - 'Cloud', - 'Lava', - 'Ocean', - 'Forest', - 'Rainbow', - 'Rainbow Bands', - 'Sunset', - 'Rivendell', - 'Breeze', - 'Red & Blue', - 'Yellowout', 'Analogous', - 'Splash', - 'Pastel', - 'Sunset 2', + 'April Night', + 'Autumn', + 'Based on Primary', + 'Based on Set', 'Beach', - 'Vintage', - 'Departure', - 'Landscape', 'Beech', - 'Sherbet', + 'Breeze', + 'C9', + 'Cloud', + 'Cyane', + 'Default', + 'Departure', + 'Drywet', + 'Fire', + 'Forest', + 'Grintage', 'Hult', 'Hult 64', - 'Drywet', - 'Jul', - 'Grintage', - 'Rewhi', - 'Tertiary', - 'Fire', 'Icefire', - 'Cyane', + 'Jul', + 'Landscape', + 'Lava', 'Light Pink', - 'Autumn', 'Magenta', 'Magred', - 'Yelmag', - 'Yelblu', + 'Ocean', 'Orange & Teal', - 'Tiamat', - 'April Night', 'Orangery', - 'C9', + 'Party', + 'Pastel', + 'Primary Color', + 'Rainbow', + 'Rainbow Bands', + 'Random Cycle', + 'Red & Blue', + 'Rewhi', + 'Rivendell', 'Sakura', - 'Aurora', - 'Atlantica', - 'C9 2', - 'C9 New', - 'Temperature', - 'Aurora 2', - 'Retro Clown', - 'Candy', - 'Toxy Reaf', - 'Fairy Reaf', - 'Semi Blue', - 'Pink Candy', - 'Red Reaf', - 'Aqua Flash', - 'Yelblu Hot', - 'Lite Light', - 'Red Flash', - 'Blink Red', - 'Red Shift', - 'Red Tide', - 'Candy2', + 'Set Colors', + 'Sherbet', + 'Splash', + 'Sunset', + 'Sunset 2', + 'Tertiary', + 'Tiamat', + 'Vintage', + 'Yelblu', + 'Yellowout', + 'Yelmag', ]), }), 'context': , @@ -175,7 +153,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '* Random Cycle', + 'state': 'Random Cycle', }) # --- # name: test_color_palette_state[rgb-select.wled_rgb_light_segment_1_color_palette-Icefire-segment-called_with0].1 @@ -185,77 +163,56 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Default', - '* Random Cycle', - '* Color 1', - '* Colors 1&2', - '* Color Gradient', - '* Colors Only', - 'Party', - 'Cloud', - 'Lava', - 'Ocean', - 'Forest', - 'Rainbow', - 'Rainbow Bands', - 'Sunset', - 'Rivendell', - 'Breeze', - 'Red & Blue', - 'Yellowout', 'Analogous', - 'Splash', - 'Pastel', - 'Sunset 2', + 'April Night', + 'Autumn', + 'Based on Primary', + 'Based on Set', 'Beach', - 'Vintage', - 'Departure', - 'Landscape', 'Beech', - 'Sherbet', + 'Breeze', + 'C9', + 'Cloud', + 'Cyane', + 'Default', + 'Departure', + 'Drywet', + 'Fire', + 'Forest', + 'Grintage', 'Hult', 'Hult 64', - 'Drywet', - 'Jul', - 'Grintage', - 'Rewhi', - 'Tertiary', - 'Fire', 'Icefire', - 'Cyane', + 'Jul', + 'Landscape', + 'Lava', 'Light Pink', - 'Autumn', 'Magenta', 'Magred', - 'Yelmag', - 'Yelblu', + 'Ocean', 'Orange & Teal', - 'Tiamat', - 'April Night', 'Orangery', - 'C9', + 'Party', + 'Pastel', + 'Primary Color', + 'Rainbow', + 'Rainbow Bands', + 'Random Cycle', + 'Red & Blue', + 'Rewhi', + 'Rivendell', 'Sakura', - 'Aurora', - 'Atlantica', - 'C9 2', - 'C9 New', - 'Temperature', - 'Aurora 2', - 'Retro Clown', - 'Candy', - 'Toxy Reaf', - 'Fairy Reaf', - 'Semi Blue', - 'Pink Candy', - 'Red Reaf', - 'Aqua Flash', - 'Yelblu Hot', - 'Lite Light', - 'Red Flash', - 'Blink Red', - 'Red Shift', - 'Red Tide', - 'Candy2', + 'Set Colors', + 'Sherbet', + 'Splash', + 'Sunset', + 'Sunset 2', + 'Tertiary', + 'Tiamat', + 'Vintage', + 'Yelblu', + 'Yellowout', + 'Yelmag', ]), }), 'config_entry_id': , @@ -298,7 +255,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( @@ -310,14 +267,13 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.14.4', + 'sw_version': '0.8.5', 'via_device_id': None, }) # --- @@ -335,7 +291,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'Playlist 1', }) # --- # name: test_color_palette_state[rgbw-select.wled_rgbw_light_playlist-Playlist 2-playlist-called_with2].1 @@ -372,7 +328,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'playlist', - 'unique_id': 'aabbccddeeff_playlist', + 'unique_id': 'aabbccddee11_playlist', 'unit_of_measurement': None, }) # --- @@ -384,31 +340,30 @@ 'connections': set({ tuple( 'mac', - 'aa:bb:cc:dd:ee:ff', + 'aa:bb:cc:dd:ee:11', ), }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( 'wled', - 'aabbccddeeff', + 'aabbccddee11', ), }), 'is_new': False, 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGBW Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.99.0b1', + 'sw_version': '0.8.6b4', 'via_device_id': None, }) # --- @@ -426,7 +381,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'Preset 1', }) # --- # name: test_color_palette_state[rgbw-select.wled_rgbw_light_preset-Preset 2-preset-called_with3].1 @@ -463,7 +418,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preset', - 'unique_id': 'aabbccddeeff_preset', + 'unique_id': 'aabbccddee11_preset', 'unit_of_measurement': None, }) # --- @@ -475,31 +430,30 @@ 'connections': set({ tuple( 'mac', - 'aa:bb:cc:dd:ee:ff', + 'aa:bb:cc:dd:ee:11', ), }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( 'wled', - 'aabbccddeeff', + 'aabbccddee11', ), }), 'is_new': False, 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGBW Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.99.0b1', + 'sw_version': '0.8.6b4', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/snapshots/test_switch.ambr b/tests/components/wled/snapshots/test_switch.ambr index ee3a72ba872..6bca0a2ed3b 100644 --- a/tests/components/wled/snapshots/test_switch.ambr +++ b/tests/components/wled/snapshots/test_switch.ambr @@ -3,6 +3,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'duration': 60, + 'fade': True, 'friendly_name': 'WLED RGB Light Nightlight', 'target_brightness': 0, }), @@ -60,7 +61,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( @@ -72,14 +73,13 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.14.4', + 'sw_version': '0.8.5', 'via_device_id': None, }) # --- @@ -142,7 +142,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( @@ -154,14 +154,13 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.14.4', + 'sw_version': '0.8.5', 'via_device_id': None, }) # --- @@ -225,7 +224,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( @@ -237,14 +236,13 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.14.4', + 'sw_version': '0.8.5', 'via_device_id': None, }) # --- @@ -308,7 +306,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp32', + 'hw_version': 'esp8266', 'id': , 'identifiers': set({ tuple( @@ -320,14 +318,13 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'FOSS', - 'model_id': None, + 'model': 'DIY light', 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.14.4', + 'sw_version': '0.8.5', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/test_config_flow.py b/tests/components/wled/test_config_flow.py index a1cf515a24b..a1529eda1c7 100644 --- a/tests/components/wled/test_config_flow.py +++ b/tests/components/wled/test_config_flow.py @@ -33,7 +33,9 @@ async def test_full_user_flow_implementation(hass: HomeAssistant) -> None: assert result.get("title") == "WLED RGB Light" assert result.get("type") is FlowResultType.CREATE_ENTRY + assert "data" in result assert result["data"][CONF_HOST] == "192.168.1.123" + assert "result" in result assert result["result"].unique_id == "aabbccddeeff" @@ -165,6 +167,23 @@ async def test_user_device_exists_abort( assert result.get("reason") == "already_configured" +async def test_user_with_cct_channel_abort( + hass: HomeAssistant, + mock_wled: MagicMock, +) -> None: + """Test we abort user flow if WLED device uses a CCT channel.""" + mock_wled.update.return_value.info.leds.cct = True + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: "192.168.1.123"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "cct_unsupported" + + @pytest.mark.usefixtures("mock_wled") async def test_zeroconf_without_mac_device_exists_abort( hass: HomeAssistant, @@ -215,6 +234,31 @@ async def test_zeroconf_with_mac_device_exists_abort( assert result.get("reason") == "already_configured" +async def test_zeroconf_with_cct_channel_abort( + hass: HomeAssistant, + mock_wled: MagicMock, +) -> None: + """Test we abort zeroconf flow if WLED device uses a CCT channel.""" + mock_wled.update.return_value.info.leds.cct = True + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.123"), + ip_addresses=[ip_address("192.168.1.123")], + hostname="example.local.", + name="mock_name", + port=None, + properties={CONF_MAC: "aabbccddeeff"}, + type="mock_type", + ), + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "cct_unsupported" + + async def test_options_flow( hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: diff --git a/tests/components/wled/test_init.py b/tests/components/wled/test_init.py index 9dfcabd55e3..f6f1da0d41e 100644 --- a/tests/components/wled/test_init.py +++ b/tests/components/wled/test_init.py @@ -7,6 +7,7 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest from wled import WLEDConnectionError +from homeassistant.components.wled.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -43,6 +44,7 @@ async def test_load_unload_config_entry( # Ensure everything is cleaned up nicely and are disconnected assert mock_wled.disconnect.call_count == 1 + assert not hass.data.get(DOMAIN) @patch( @@ -67,3 +69,21 @@ async def test_setting_unique_id( """Test we set unique ID if not set yet.""" assert init_integration.runtime_data assert init_integration.unique_id == "aabbccddeeff" + + +async def test_error_config_entry_with_cct_channel( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_wled: AsyncMock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the WLED fails entry setup with a CCT channel.""" + mock_wled.update.return_value.info.leds.cct = True + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Ensure config entry is errored and are connected and disconnected + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + assert "has a CCT channel, which is not supported" in caplog.text diff --git a/tests/components/wled/test_light.py b/tests/components/wled/test_light.py index 58c4aa4e8c6..2b64619e306 100644 --- a/tests/components/wled/test_light.py +++ b/tests/components/wled/test_light.py @@ -1,5 +1,6 @@ """Tests for the WLED light platform.""" +import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -8,24 +9,14 @@ from wled import Device as WLEDDevice, WLEDConnectionError, WLEDError from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_MODE, - ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, - ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, - ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, DOMAIN as LIGHT_DOMAIN, - ColorMode, -) -from homeassistant.components.wled.const import ( - CONF_KEEP_MAIN_LIGHT, - DOMAIN, - SCAN_INTERVAL, ) +from homeassistant.components.wled.const import CONF_KEEP_MAIN_LIGHT, SCAN_INTERVAL from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_ICON, @@ -39,11 +30,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_object_fixture, -) +from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture pytestmark = pytest.mark.usefixtures("init_integration") @@ -54,9 +41,9 @@ async def test_rgb_light_state( """Test the creation and values of the WLED lights.""" # First segment of the strip assert (state := hass.states.get("light.wled_rgb_light")) - assert state.attributes.get(ATTR_BRIGHTNESS) == 255 + assert state.attributes.get(ATTR_BRIGHTNESS) == 127 assert state.attributes.get(ATTR_EFFECT) == "Solid" - assert state.attributes.get(ATTR_HS_COLOR) == (218.906, 50.196) + assert state.attributes.get(ATTR_HS_COLOR) == (37.412, 100.0) assert state.attributes.get(ATTR_ICON) is None assert state.state == STATE_ON @@ -65,9 +52,9 @@ async def test_rgb_light_state( # Second segment of the strip assert (state := hass.states.get("light.wled_rgb_light_segment_1")) - assert state.attributes.get(ATTR_BRIGHTNESS) == 255 - assert state.attributes.get(ATTR_EFFECT) == "Wipe" - assert state.attributes.get(ATTR_HS_COLOR) == (40.0, 100.0) + assert state.attributes.get(ATTR_BRIGHTNESS) == 127 + assert state.attributes.get(ATTR_EFFECT) == "Blink" + assert state.attributes.get(ATTR_HS_COLOR) == (148.941, 100.0) assert state.attributes.get(ATTR_ICON) is None assert state.state == STATE_ON @@ -76,7 +63,7 @@ async def test_rgb_light_state( # Test main control of the lightstrip assert (state := hass.states.get("light.wled_rgb_light_main")) - assert state.attributes.get(ATTR_BRIGHTNESS) == 128 + assert state.attributes.get(ATTR_BRIGHTNESS) == 127 assert state.state == STATE_ON assert (entry := entity_registry.async_get("light.wled_rgb_light_main")) @@ -201,8 +188,8 @@ async def test_dynamically_handle_segments( assert not hass.states.get("light.wled_rgb_light_segment_1") return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice.from_dict( - load_json_object_fixture("rgb.json", DOMAIN) + mock_wled.update.return_value = WLEDDevice( + json.loads(load_fixture("wled/rgb.json")) ) freezer.tick(SCAN_INTERVAL) @@ -340,8 +327,6 @@ async def test_rgbw_light(hass: HomeAssistant, mock_wled: MagicMock) -> None: """Test RGBW support for WLED.""" assert (state := hass.states.get("light.wled_rgbw_light")) assert state.state == STATE_ON - assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ColorMode.RGBW] - assert state.attributes.get(ATTR_COLOR_MODE) == ColorMode.RGBW assert state.attributes.get(ATTR_RGBW_COLOR) == (255, 0, 0, 139) await hass.services.async_call( @@ -377,34 +362,3 @@ async def test_single_segment_with_keep_main_light( assert (state := hass.states.get("light.wled_rgb_light_main")) assert state.state == STATE_ON - - -@pytest.mark.parametrize("device_fixture", ["cct"]) -async def test_cct_light(hass: HomeAssistant, mock_wled: MagicMock) -> None: - """Test CCT support for WLED.""" - assert (state := hass.states.get("light.wled_cct_light")) - assert state.state == STATE_ON - assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ - ColorMode.COLOR_TEMP, - ColorMode.RGBW, - ] - assert state.attributes.get(ATTR_COLOR_MODE) == ColorMode.COLOR_TEMP - assert state.attributes.get(ATTR_MIN_COLOR_TEMP_KELVIN) == 2000 - assert state.attributes.get(ATTR_MAX_COLOR_TEMP_KELVIN) == 6535 - assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 2942 - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: "light.wled_cct_light", - ATTR_COLOR_TEMP_KELVIN: 4321, - }, - blocking=True, - ) - assert mock_wled.segment.call_count == 1 - mock_wled.segment.assert_called_with( - cct=130, - on=True, - segment_id=0, - ) diff --git a/tests/components/wled/test_number.py b/tests/components/wled/test_number.py index 344eb03bc06..b692de37282 100644 --- a/tests/components/wled/test_number.py +++ b/tests/components/wled/test_number.py @@ -1,5 +1,6 @@ """Tests for the WLED number platform.""" +import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -12,13 +13,13 @@ from homeassistant.components.number import ( DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) -from homeassistant.components.wled.const import DOMAIN, SCAN_INTERVAL +from homeassistant.components.wled.const import SCAN_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import async_fire_time_changed, load_json_object_fixture +from tests.common import async_fire_time_changed, load_fixture pytestmark = pytest.mark.usefixtures("init_integration") @@ -127,8 +128,8 @@ async def test_speed_dynamically_handle_segments( # Test adding a segment dynamically... return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice.from_dict( - load_json_object_fixture("rgb.json", DOMAIN) + mock_wled.update.return_value = WLEDDevice( + json.loads(load_fixture("wled/rgb.json")) ) freezer.tick(SCAN_INTERVAL) diff --git a/tests/components/wled/test_select.py b/tests/components/wled/test_select.py index 364e5fc2034..380af1a286a 100644 --- a/tests/components/wled/test_select.py +++ b/tests/components/wled/test_select.py @@ -1,5 +1,6 @@ """Tests for the WLED select platform.""" +import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -8,13 +9,18 @@ from syrupy.assertion import SnapshotAssertion from wled import Device as WLEDDevice, WLEDConnectionError, WLEDError from homeassistant.components.select import ATTR_OPTION, DOMAIN as SELECT_DOMAIN -from homeassistant.components.wled.const import DOMAIN, SCAN_INTERVAL -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_SELECT_OPTION, STATE_UNAVAILABLE +from homeassistant.components.wled.const import SCAN_INTERVAL +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_SELECT_OPTION, + STATE_UNAVAILABLE, + STATE_UNKNOWN, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import async_fire_time_changed, load_json_object_fixture +from tests.common import async_fire_time_changed, load_fixture pytestmark = pytest.mark.usefixtures("init_integration") @@ -129,8 +135,8 @@ async def test_color_palette_dynamically_handle_segments( assert not hass.states.get("select.wled_rgb_light_segment_1_color_palette") return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice.from_dict( - load_json_object_fixture("rgb.json", DOMAIN) + mock_wled.update.return_value = WLEDDevice( + json.loads(load_fixture("wled/rgb.json")) ) freezer.tick(SCAN_INTERVAL) @@ -142,7 +148,7 @@ async def test_color_palette_dynamically_handle_segments( assert ( segment1 := hass.states.get("select.wled_rgb_light_segment_1_color_palette") ) - assert segment1.state == "* Random Cycle" + assert segment1.state == "Random Cycle" # Test adding if segment shows up again, including the master entity mock_wled.update.return_value = return_value @@ -168,3 +174,39 @@ async def test_playlist_unavailable_without_playlists(hass: HomeAssistant) -> No """Test WLED playlist entity is unavailable when playlists are not available.""" assert (state := hass.states.get("select.wled_rgb_light_playlist")) assert state.state == STATE_UNAVAILABLE + + +@pytest.mark.parametrize("device_fixture", ["rgbw"]) +async def test_old_style_preset_active( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_wled: MagicMock, +) -> None: + """Test unknown preset returned (when old style/unknown) preset is active.""" + # Set device preset state to a random number + mock_wled.update.return_value.state.preset = 99 + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert (state := hass.states.get("select.wled_rgbw_light_preset")) + assert state.state == STATE_UNKNOWN + + +@pytest.mark.parametrize("device_fixture", ["rgbw"]) +async def test_old_style_playlist_active( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_wled: MagicMock, +) -> None: + """Test when old style playlist cycle is active.""" + # Set device playlist to 0, which meant "cycle" previously. + mock_wled.update.return_value.state.playlist = 0 + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert (state := hass.states.get("select.wled_rgbw_light_playlist")) + assert state.state == STATE_UNKNOWN diff --git a/tests/components/wled/test_sensor.py b/tests/components/wled/test_sensor.py index 8bd5431cf59..319622e7cb3 100644 --- a/tests/components/wled/test_sensor.py +++ b/tests/components/wled/test_sensor.py @@ -44,7 +44,7 @@ async def test_sensors( == UnitOfElectricCurrent.MILLIAMPERE ) assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.CURRENT - assert state.state == "515" + assert state.state == "470" assert ( entry := entity_registry.async_get("sensor.wled_rgb_light_estimated_current") @@ -55,7 +55,7 @@ async def test_sensors( assert (state := hass.states.get("sensor.wled_rgb_light_uptime")) assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TIMESTAMP assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.state == "2019-11-11T08:54:26+00:00" + assert state.state == "2019-11-11T09:10:00+00:00" assert (entry := entity_registry.async_get("sensor.wled_rgb_light_uptime")) assert entry.unique_id == "aabbccddeeff_uptime" @@ -64,7 +64,7 @@ async def test_sensors( assert (state := hass.states.get("sensor.wled_rgb_light_free_memory")) assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfInformation.BYTES - assert state.state == "198384" + assert state.state == "14600" assert entry.entity_category is EntityCategory.DIAGNOSTIC assert (entry := entity_registry.async_get("sensor.wled_rgb_light_free_memory")) @@ -74,7 +74,7 @@ async def test_sensors( assert (state := hass.states.get("sensor.wled_rgb_light_wi_fi_signal")) assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "100" + assert state.state == "76" assert entry.entity_category is EntityCategory.DIAGNOSTIC assert (entry := entity_registry.async_get("sensor.wled_rgb_light_wi_fi_signal")) @@ -87,7 +87,7 @@ async def test_sensors( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SIGNAL_STRENGTH_DECIBELS_MILLIWATT ) - assert state.state == "-43" + assert state.state == "-62" assert (entry := entity_registry.async_get("sensor.wled_rgb_light_wi_fi_rssi")) assert entry.unique_id == "aabbccddeeff_wifi_rssi" diff --git a/tests/components/wled/test_switch.py b/tests/components/wled/test_switch.py index 48331ffa9cc..6dfd62e363f 100644 --- a/tests/components/wled/test_switch.py +++ b/tests/components/wled/test_switch.py @@ -1,5 +1,6 @@ """Tests for the WLED switch platform.""" +import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -8,7 +9,7 @@ from syrupy.assertion import SnapshotAssertion from wled import Device as WLEDDevice, WLEDConnectionError, WLEDError from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.components.wled.const import DOMAIN, SCAN_INTERVAL +from homeassistant.components.wled.const import SCAN_INTERVAL from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -21,7 +22,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import async_fire_time_changed, load_json_object_fixture +from tests.common import async_fire_time_changed, load_fixture pytestmark = pytest.mark.usefixtures("init_integration") @@ -143,8 +144,8 @@ async def test_switch_dynamically_handle_segments( # Test adding a segment dynamically... return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice.from_dict( - load_json_object_fixture("rgb.json", DOMAIN) + mock_wled.update.return_value = WLEDDevice( + json.loads(load_fixture("wled/rgb.json")) ) freezer.tick(SCAN_INTERVAL) diff --git a/tests/components/wled/test_update.py b/tests/components/wled/test_update.py index a27aa918385..c576cdf16f9 100644 --- a/tests/components/wled/test_update.py +++ b/tests/components/wled/test_update.py @@ -2,9 +2,8 @@ from unittest.mock import MagicMock -from freezegun.api import FrozenDateTimeFactory import pytest -from wled import Releases, WLEDError +from wled import WLEDError from homeassistant.components.update import ( ATTR_INSTALLED_VERSION, @@ -17,7 +16,6 @@ from homeassistant.components.update import ( UpdateDeviceClass, UpdateEntityFeature, ) -from homeassistant.components.wled.const import RELEASES_SCAN_INTERVAL from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -33,8 +31,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import async_fire_time_changed - pytestmark = pytest.mark.usefixtures("init_integration") @@ -49,12 +45,12 @@ async def test_update_available( state.attributes[ATTR_ENTITY_PICTURE] == "https://brands.home-assistant.io/_/wled/icon.png" ) - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.14.4" - assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.5" + assert state.attributes[ATTR_LATEST_VERSION] == "0.12.0" assert state.attributes[ATTR_RELEASE_SUMMARY] is None assert ( state.attributes[ATTR_RELEASE_URL] - == "https://github.com/Aircoookie/WLED/releases/tag/v0.99.0" + == "https://github.com/Aircoookie/WLED/releases/tag/v0.12.0" ) assert ( state.attributes[ATTR_SUPPORTED_FEATURES] @@ -68,26 +64,15 @@ async def test_update_available( assert entry.entity_category is EntityCategory.CONFIG +@pytest.mark.parametrize("device_fixture", ["rgb_no_update"]) async def test_update_information_available( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - entity_registry: er.EntityRegistry, - mock_wled_releases: MagicMock, + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test having no update information available at all.""" - mock_wled_releases.releases.return_value = Releases( - beta=None, - stable=None, - ) - - freezer.tick(RELEASES_SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert (state := hass.states.get("update.wled_rgb_light_firmware")) assert state.attributes.get(ATTR_DEVICE_CLASS) == UpdateDeviceClass.FIRMWARE assert state.state == STATE_UNKNOWN - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.14.4" + assert state.attributes[ATTR_INSTALLED_VERSION] is None assert state.attributes[ATTR_LATEST_VERSION] is None assert state.attributes[ATTR_RELEASE_SUMMARY] is None assert state.attributes[ATTR_RELEASE_URL] is None @@ -113,12 +98,12 @@ async def test_no_update_available( assert (state := hass.states.get("update.wled_websocket_firmware")) assert state.state == STATE_OFF assert state.attributes.get(ATTR_DEVICE_CLASS) == UpdateDeviceClass.FIRMWARE - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.99.0" - assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.12.0-b2" + assert state.attributes[ATTR_LATEST_VERSION] == "0.12.0-b2" assert state.attributes[ATTR_RELEASE_SUMMARY] is None assert ( state.attributes[ATTR_RELEASE_URL] - == "https://github.com/Aircoookie/WLED/releases/tag/v0.99.0" + == "https://github.com/Aircoookie/WLED/releases/tag/v0.12.0-b2" ) assert ( state.attributes[ATTR_SUPPORTED_FEATURES] @@ -166,8 +151,8 @@ async def test_update_stay_stable( """ assert (state := hass.states.get("update.wled_rgb_light_firmware")) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.14.4" - assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.5" + assert state.attributes[ATTR_LATEST_VERSION] == "0.12.0" await hass.services.async_call( UPDATE_DOMAIN, @@ -176,7 +161,7 @@ async def test_update_stay_stable( blocking=True, ) assert mock_wled.upgrade.call_count == 1 - mock_wled.upgrade.assert_called_with(version="0.99.0") + mock_wled.upgrade.assert_called_with(version="0.12.0") @pytest.mark.parametrize("device_fixture", ["rgbw"]) @@ -192,8 +177,8 @@ async def test_update_beta_to_stable( """ assert (state := hass.states.get("update.wled_rgbw_light_firmware")) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.99.0b1" - assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.6b4" + assert state.attributes[ATTR_LATEST_VERSION] == "0.8.6" await hass.services.async_call( UPDATE_DOMAIN, @@ -202,7 +187,7 @@ async def test_update_beta_to_stable( blocking=True, ) assert mock_wled.upgrade.call_count == 1 - mock_wled.upgrade.assert_called_with(version="0.99.0") + mock_wled.upgrade.assert_called_with(version="0.8.6") @pytest.mark.parametrize("device_fixture", ["rgb_single_segment"]) @@ -217,8 +202,8 @@ async def test_update_stay_beta( """ assert (state := hass.states.get("update.wled_rgb_light_firmware")) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0b4" - assert state.attributes[ATTR_LATEST_VERSION] == "1.0.0b5" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.6b1" + assert state.attributes[ATTR_LATEST_VERSION] == "0.8.6b2" await hass.services.async_call( UPDATE_DOMAIN, @@ -227,4 +212,4 @@ async def test_update_stay_beta( blocking=True, ) assert mock_wled.upgrade.call_count == 1 - mock_wled.upgrade.assert_called_with(version="1.0.0b5") + mock_wled.upgrade.assert_called_with(version="0.8.6b2") diff --git a/tests/components/wmspro/__init__.py b/tests/components/wmspro/__init__.py deleted file mode 100644 index fee2fc64849..00000000000 --- a/tests/components/wmspro/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Tests for the wmspro integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_config_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> bool: - """Set up a config entry.""" - mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - return result diff --git a/tests/components/wmspro/conftest.py b/tests/components/wmspro/conftest.py deleted file mode 100644 index 4b0e7eb4fef..00000000000 --- a/tests/components/wmspro/conftest.py +++ /dev/null @@ -1,127 +0,0 @@ -"""Common fixtures for the wmspro tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.wmspro.const import DOMAIN -from homeassistant.const import CONF_HOST - -from tests.common import MockConfigEntry, load_json_object_fixture - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Return a dummy config entry.""" - return MockConfigEntry( - title="WebControl", - domain=DOMAIN, - data={CONF_HOST: "webcontrol"}, - ) - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.wmspro.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_hub_ping() -> Generator[AsyncMock]: - """Override WebControlPro.ping.""" - with patch( - "wmspro.webcontrol.WebControlPro.ping", - return_value=True, - ) as mock_hub_ping: - yield mock_hub_ping - - -@pytest.fixture -def mock_hub_refresh() -> Generator[AsyncMock]: - """Override WebControlPro.refresh.""" - with patch( - "wmspro.webcontrol.WebControlPro.refresh", - return_value=True, - ) as mock_hub_refresh: - yield mock_hub_refresh - - -@pytest.fixture -def mock_hub_configuration_test() -> Generator[AsyncMock]: - """Override WebControlPro.configuration.""" - with patch( - "wmspro.webcontrol.WebControlPro._getConfiguration", - return_value=load_json_object_fixture("example_config_test.json", DOMAIN), - ) as mock_hub_configuration: - yield mock_hub_configuration - - -@pytest.fixture -def mock_hub_configuration_prod() -> Generator[AsyncMock]: - """Override WebControlPro._getConfiguration.""" - with patch( - "wmspro.webcontrol.WebControlPro._getConfiguration", - return_value=load_json_object_fixture("example_config_prod.json", DOMAIN), - ) as mock_hub_configuration: - yield mock_hub_configuration - - -@pytest.fixture -def mock_hub_status_prod_awning() -> Generator[AsyncMock]: - """Override WebControlPro._getStatus.""" - with patch( - "wmspro.webcontrol.WebControlPro._getStatus", - return_value=load_json_object_fixture( - "example_status_prod_awning.json", DOMAIN - ), - ) as mock_dest_refresh: - yield mock_dest_refresh - - -@pytest.fixture -def mock_hub_status_prod_dimmer() -> Generator[AsyncMock]: - """Override WebControlPro._getStatus.""" - with patch( - "wmspro.webcontrol.WebControlPro._getStatus", - return_value=load_json_object_fixture( - "example_status_prod_dimmer.json", DOMAIN - ), - ) as mock_dest_refresh: - yield mock_dest_refresh - - -@pytest.fixture -def mock_dest_refresh() -> Generator[AsyncMock]: - """Override Destination.refresh.""" - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ) as mock_dest_refresh: - yield mock_dest_refresh - - -@pytest.fixture -def mock_action_call() -> Generator[AsyncMock]: - """Override Action.__call__.""" - - async def fake_call(self, **kwargs): - self._update_params(kwargs) - - with patch( - "wmspro.action.Action.__call__", - fake_call, - ) as mock_action_call: - yield mock_action_call - - -@pytest.fixture -def mock_scene_call() -> Generator[AsyncMock]: - """Override Scene.__call__.""" - with patch( - "wmspro.scene.Scene.__call__", - ) as mock_scene_call: - yield mock_scene_call diff --git a/tests/components/wmspro/fixtures/example_config_prod.json b/tests/components/wmspro/fixtures/example_config_prod.json deleted file mode 100644 index 6e313b566f7..00000000000 --- a/tests/components/wmspro/fixtures/example_config_prod.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "command": "getConfiguration", - "protocolVersion": "1.0.0", - "destinations": [ - { - "id": 58717, - "animationType": 1, - "names": ["Markise", "", "", ""], - "actions": [ - { - "id": 0, - "actionType": 0, - "actionDescription": 0, - "minValue": 0, - "maxValue": 100 - }, - { - "id": 16, - "actionType": 6, - "actionDescription": 12 - }, - { - "id": 22, - "actionType": 8, - "actionDescription": 13 - } - ] - }, - { - "id": 97358, - "animationType": 6, - "names": ["Licht", "", "", ""], - "actions": [ - { - "id": 0, - "actionType": 0, - "actionDescription": 8, - "minValue": 0, - "maxValue": 100 - }, - { - "id": 17, - "actionType": 6, - "actionDescription": 12 - }, - { - "id": 20, - "actionType": 4, - "actionDescription": 6 - }, - { - "id": 22, - "actionType": 8, - "actionDescription": 13 - } - ] - } - ], - "rooms": [ - { - "id": 19239, - "name": "Terrasse", - "destinations": [58717, 97358], - "scenes": [687471, 765095] - } - ], - "scenes": [ - { - "id": 687471, - "names": ["Licht an", "", "", ""] - }, - { - "id": 765095, - "names": ["Licht aus", "", "", ""] - } - ] -} diff --git a/tests/components/wmspro/fixtures/example_config_test.json b/tests/components/wmspro/fixtures/example_config_test.json deleted file mode 100644 index 1bb63e089ad..00000000000 --- a/tests/components/wmspro/fixtures/example_config_test.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "command": "getConfiguration", - "protocolVersion": "1.0.0", - "destinations": [ - { - "id": 17776, - "animationType": 0, - "names": ["Küche", "", "", ""], - "actions": [ - { - "id": 0, - "actionType": 0, - "actionDescription": 2, - "minValue": 0, - "maxValue": 100 - }, - { - "id": 6, - "actionType": 2, - "actionDescription": 3, - "minValue": -127, - "maxValue": 127 - }, - { - "id": 16, - "actionType": 6, - "actionDescription": 12 - }, - { - "id": 22, - "actionType": 8, - "actionDescription": 13 - }, - { - "id": 23, - "actionType": 7, - "actionDescription": 12 - } - ] - }, - { - "id": 200951, - "animationType": 999, - "names": ["Aktor Potentialfrei", "", "", ""], - "actions": [ - { - "id": 22, - "actionType": 8, - "actionDescription": 13 - }, - { - "id": 26, - "actionType": 9, - "actionDescription": 999, - "minValue": 0, - "maxValue": 16 - } - ] - } - ], - "rooms": [ - { - "id": 42581, - "name": "Raum 0", - "destinations": [17776, 116682, 194367, 200951], - "scenes": [688966] - } - ], - "scenes": [ - { - "id": 688966, - "names": ["Gute Nacht", "", "", ""] - } - ] -} diff --git a/tests/components/wmspro/fixtures/example_status_prod_awning.json b/tests/components/wmspro/fixtures/example_status_prod_awning.json deleted file mode 100644 index 6ca697a4532..00000000000 --- a/tests/components/wmspro/fixtures/example_status_prod_awning.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "command": "getStatus", - "protocolVersion": "1.0.0", - "details": [ - { - "destinationId": 58717, - "data": { - "drivingCause": 0, - "heartbeatError": false, - "blocking": false, - "productData": [ - { - "actionId": 0, - "value": { - "percentage": 100 - } - } - ] - } - } - ] -} diff --git a/tests/components/wmspro/fixtures/example_status_prod_dimmer.json b/tests/components/wmspro/fixtures/example_status_prod_dimmer.json deleted file mode 100644 index 675549f2457..00000000000 --- a/tests/components/wmspro/fixtures/example_status_prod_dimmer.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "command": "getStatus", - "protocolVersion": "1.0.0", - "details": [ - { - "destinationId": 97358, - "data": { - "drivingCause": 0, - "heartbeatError": false, - "blocking": false, - "productData": [ - { - "actionId": 0, - "value": { - "percentage": 0 - } - }, - { - "actionId": 20, - "value": { - "onOffState": false - } - } - ] - } - } - ] -} diff --git a/tests/components/wmspro/snapshots/test_cover.ambr b/tests/components/wmspro/snapshots/test_cover.ambr deleted file mode 100644 index 0456f074d49..00000000000 --- a/tests/components/wmspro/snapshots/test_cover.ambr +++ /dev/null @@ -1,50 +0,0 @@ -# serializer version: 1 -# name: test_cover_device - DeviceRegistryEntrySnapshot({ - 'area_id': 'terrasse', - 'config_entries': , - 'configuration_url': 'http://webcontrol/control', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'wmspro', - '58717', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'WAREMA Renkhoff SE', - 'model': 'Awning', - 'model_id': None, - 'name': 'Markise', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '58717', - 'suggested_area': 'Terrasse', - 'sw_version': None, - 'via_device_id': , - }) -# --- -# name: test_cover_update - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by WMS WebControl pro API', - 'current_position': 0, - 'device_class': 'awning', - 'friendly_name': 'Markise', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.markise', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- diff --git a/tests/components/wmspro/snapshots/test_diagnostics.ambr b/tests/components/wmspro/snapshots/test_diagnostics.ambr deleted file mode 100644 index 00cb62e18c4..00000000000 --- a/tests/components/wmspro/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,244 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'config': dict({ - 'command': 'getConfiguration', - 'destinations': list([ - dict({ - 'actions': list([ - dict({ - 'actionDescription': 0, - 'actionType': 0, - 'id': 0, - 'maxValue': 100, - 'minValue': 0, - }), - dict({ - 'actionDescription': 12, - 'actionType': 6, - 'id': 16, - }), - dict({ - 'actionDescription': 13, - 'actionType': 8, - 'id': 22, - }), - ]), - 'animationType': 1, - 'id': 58717, - 'names': list([ - 'Markise', - '', - '', - '', - ]), - }), - dict({ - 'actions': list([ - dict({ - 'actionDescription': 8, - 'actionType': 0, - 'id': 0, - 'maxValue': 100, - 'minValue': 0, - }), - dict({ - 'actionDescription': 12, - 'actionType': 6, - 'id': 17, - }), - dict({ - 'actionDescription': 6, - 'actionType': 4, - 'id': 20, - }), - dict({ - 'actionDescription': 13, - 'actionType': 8, - 'id': 22, - }), - ]), - 'animationType': 6, - 'id': 97358, - 'names': list([ - 'Licht', - '', - '', - '', - ]), - }), - ]), - 'protocolVersion': '1.0.0', - 'rooms': list([ - dict({ - 'destinations': list([ - 58717, - 97358, - ]), - 'id': 19239, - 'name': 'Terrasse', - 'scenes': list([ - 687471, - 765095, - ]), - }), - ]), - 'scenes': list([ - dict({ - 'id': 687471, - 'names': list([ - 'Licht an', - '', - '', - '', - ]), - }), - dict({ - 'id': 765095, - 'names': list([ - 'Licht aus', - '', - '', - '', - ]), - }), - ]), - }), - 'dests': dict({ - '58717': dict({ - 'actions': dict({ - '0': dict({ - 'actionDescription': 'AwningDrive', - 'actionType': 'Percentage', - 'attrs': dict({ - 'maxValue': 100, - 'minValue': 0, - }), - 'id': 0, - 'params': dict({ - }), - }), - '16': dict({ - 'actionDescription': 'ManualCommand', - 'actionType': 'Stop', - 'attrs': dict({ - }), - 'id': 16, - 'params': dict({ - }), - }), - '22': dict({ - 'actionDescription': 'Identify', - 'actionType': 'Identify', - 'attrs': dict({ - }), - 'id': 22, - 'params': dict({ - }), - }), - }), - 'animationType': 'Awning', - 'available': True, - 'blocking': None, - 'drivingCause': 'Unknown', - 'heartbeatError': None, - 'id': 58717, - 'name': 'Markise', - 'room': dict({ - '19239': 'Terrasse', - }), - 'status': dict({ - }), - 'unknownProducts': dict({ - }), - }), - '97358': dict({ - 'actions': dict({ - '0': dict({ - 'actionDescription': 'LightDimming', - 'actionType': 'Percentage', - 'attrs': dict({ - 'maxValue': 100, - 'minValue': 0, - }), - 'id': 0, - 'params': dict({ - }), - }), - '17': dict({ - 'actionDescription': 'ManualCommand', - 'actionType': 'Stop', - 'attrs': dict({ - }), - 'id': 17, - 'params': dict({ - }), - }), - '20': dict({ - 'actionDescription': 'LightSwitch', - 'actionType': 'Switch', - 'attrs': dict({ - }), - 'id': 20, - 'params': dict({ - }), - }), - '22': dict({ - 'actionDescription': 'Identify', - 'actionType': 'Identify', - 'attrs': dict({ - }), - 'id': 22, - 'params': dict({ - }), - }), - }), - 'animationType': 'Dimmer', - 'available': True, - 'blocking': None, - 'drivingCause': 'Unknown', - 'heartbeatError': None, - 'id': 97358, - 'name': 'Licht', - 'room': dict({ - '19239': 'Terrasse', - }), - 'status': dict({ - }), - 'unknownProducts': dict({ - }), - }), - }), - 'host': 'webcontrol', - 'rooms': dict({ - '19239': dict({ - 'destinations': dict({ - '58717': 'Markise', - '97358': 'Licht', - }), - 'id': 19239, - 'name': 'Terrasse', - 'scenes': dict({ - '687471': 'Licht an', - '765095': 'Licht aus', - }), - }), - }), - 'scenes': dict({ - '687471': dict({ - 'id': 687471, - 'name': 'Licht an', - 'room': dict({ - '19239': 'Terrasse', - }), - }), - '765095': dict({ - 'id': 765095, - 'name': 'Licht aus', - 'room': dict({ - '19239': 'Terrasse', - }), - }), - }), - }) -# --- diff --git a/tests/components/wmspro/snapshots/test_light.ambr b/tests/components/wmspro/snapshots/test_light.ambr deleted file mode 100644 index d13e444645d..00000000000 --- a/tests/components/wmspro/snapshots/test_light.ambr +++ /dev/null @@ -1,53 +0,0 @@ -# serializer version: 1 -# name: test_light_device - DeviceRegistryEntrySnapshot({ - 'area_id': 'terrasse', - 'config_entries': , - 'configuration_url': 'http://webcontrol/control', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'wmspro', - '97358', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'WAREMA Renkhoff SE', - 'model': 'Dimmer', - 'model_id': None, - 'name': 'Licht', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '97358', - 'suggested_area': 'Terrasse', - 'sw_version': None, - 'via_device_id': , - }) -# --- -# name: test_light_update - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by WMS WebControl pro API', - 'brightness': None, - 'color_mode': None, - 'friendly_name': 'Licht', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.licht', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/wmspro/snapshots/test_scene.ambr b/tests/components/wmspro/snapshots/test_scene.ambr deleted file mode 100644 index 940d4e31e83..00000000000 --- a/tests/components/wmspro/snapshots/test_scene.ambr +++ /dev/null @@ -1,47 +0,0 @@ -# serializer version: 1 -# name: test_scene_activate - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by WMS WebControl pro API', - 'friendly_name': 'Raum 0 Gute Nacht', - }), - 'context': , - 'entity_id': 'scene.raum_0_gute_nacht', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_scene_room_device - DeviceRegistryEntrySnapshot({ - 'area_id': 'raum_0', - 'config_entries': , - 'configuration_url': 'http://webcontrol/control', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'wmspro', - '42581', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'WAREMA Renkhoff SE', - 'model': 'Room', - 'model_id': None, - 'name': 'Raum 0', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': '42581', - 'suggested_area': 'Raum 0', - 'sw_version': None, - 'via_device_id': , - }) -# --- diff --git a/tests/components/wmspro/test_config_flow.py b/tests/components/wmspro/test_config_flow.py deleted file mode 100644 index 782dc051c8c..00000000000 --- a/tests/components/wmspro/test_config_flow.py +++ /dev/null @@ -1,394 +0,0 @@ -"""Test the wmspro config flow.""" - -from unittest.mock import AsyncMock, patch - -import aiohttp - -from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.components.wmspro.const import DOMAIN -from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER, ConfigEntryState -from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from . import setup_config_entry - -from tests.common import MockConfigEntry - - -async def test_config_flow( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock -) -> None: - """Test we can handle user-input to create a config entry.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - return_value=True, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.2.3.4", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "1.2.3.4" - assert result["data"] == { - CONF_HOST: "1.2.3.4", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_config_flow_from_dhcp( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock -) -> None: - """Test we can handle DHCP discovery to create a config entry.""" - info = DhcpServiceInfo( - ip="1.2.3.4", hostname="webcontrol", macaddress="00:11:22:33:44:55" - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_DHCP}, data=info - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - return_value=True, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.2.3.4", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "1.2.3.4" - assert result["data"] == { - CONF_HOST: "1.2.3.4", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_config_flow_from_dhcp_add_mac( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_hub_refresh: AsyncMock, -) -> None: - """Test we can use DHCP discovery to add MAC address to a config entry.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - return_value=True, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.2.3.4", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "1.2.3.4" - assert result["data"] == { - CONF_HOST: "1.2.3.4", - } - assert len(mock_setup_entry.mock_calls) == 1 - assert hass.config_entries.async_entries(DOMAIN)[0].unique_id is None - - info = DhcpServiceInfo( - ip="1.2.3.4", hostname="webcontrol", macaddress="00:11:22:33:44:55" - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_DHCP}, data=info - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" - - -async def test_config_flow_from_dhcp_ip_update( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_hub_refresh: AsyncMock, -) -> None: - """Test we can use DHCP discovery to update IP in a config entry.""" - info = DhcpServiceInfo( - ip="1.2.3.4", hostname="webcontrol", macaddress="00:11:22:33:44:55" - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_DHCP}, data=info - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - return_value=True, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.2.3.4", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "1.2.3.4" - assert result["data"] == { - CONF_HOST: "1.2.3.4", - } - assert len(mock_setup_entry.mock_calls) == 1 - assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" - - info = DhcpServiceInfo( - ip="5.6.7.8", hostname="webcontrol", macaddress="00:11:22:33:44:55" - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_DHCP}, data=info - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" - assert hass.config_entries.async_entries(DOMAIN)[0].data[CONF_HOST] == "5.6.7.8" - - -async def test_config_flow_from_dhcp_no_update( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_hub_refresh: AsyncMock, -) -> None: - """Test we do not use DHCP discovery to overwrite hostname with IP in config entry.""" - info = DhcpServiceInfo( - ip="1.2.3.4", hostname="webcontrol", macaddress="00:11:22:33:44:55" - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_DHCP}, data=info - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - return_value=True, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "webcontrol", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "webcontrol" - assert result["data"] == { - CONF_HOST: "webcontrol", - } - assert len(mock_setup_entry.mock_calls) == 1 - assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" - - info = DhcpServiceInfo( - ip="5.6.7.8", hostname="webcontrol", macaddress="00:11:22:33:44:55" - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_DHCP}, data=info - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" - assert hass.config_entries.async_entries(DOMAIN)[0].data[CONF_HOST] == "webcontrol" - - -async def test_config_flow_ping_failed( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock -) -> None: - """Test we handle ping failed error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - return_value=False, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.2.3.4", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - return_value=True, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.2.3.4", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "1.2.3.4" - assert result["data"] == { - CONF_HOST: "1.2.3.4", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_config_flow_cannot_connect( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock -) -> None: - """Test we handle cannot connect error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - side_effect=aiohttp.ClientError, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.2.3.4", - }, - ) - - assert result["type"] == FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - return_value=True, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.2.3.4", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "1.2.3.4" - assert result["data"] == { - CONF_HOST: "1.2.3.4", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_config_flow_unknown_error( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock -) -> None: - """Test we handle an unknown error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - side_effect=RuntimeError, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.2.3.4", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown"} - - with patch( - "wmspro.webcontrol.WebControlPro.ping", - return_value=True, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.2.3.4", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "1.2.3.4" - assert result["data"] == { - CONF_HOST: "1.2.3.4", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_config_flow_duplicate_entries( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_dest_refresh: AsyncMock, - mock_hub_configuration_test: AsyncMock, -) -> None: - """Test we prevent creation of duplicate config entries.""" - await setup_config_entry(hass, mock_config_entry) - assert mock_config_entry.state is ConfigEntryState.LOADED - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "5.6.7.8", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - - -async def test_config_flow_multiple_entries( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_dest_refresh: AsyncMock, - mock_hub_configuration_test: AsyncMock, - mock_hub_configuration_prod: AsyncMock, -) -> None: - """Test we allow creation of different config entries.""" - await setup_config_entry(hass, mock_config_entry) - assert mock_config_entry.state is ConfigEntryState.LOADED - - mock_hub_configuration_prod.return_value = mock_hub_configuration_test.return_value - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "5.6.7.8", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "5.6.7.8" - assert result["data"] == { - CONF_HOST: "5.6.7.8", - } - assert len(hass.config_entries.async_entries(DOMAIN)) == 2 diff --git a/tests/components/wmspro/test_cover.py b/tests/components/wmspro/test_cover.py deleted file mode 100644 index 2c20ef51b64..00000000000 --- a/tests/components/wmspro/test_cover.py +++ /dev/null @@ -1,227 +0,0 @@ -"""Test the wmspro cover support.""" - -from unittest.mock import AsyncMock, patch - -from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion - -from homeassistant.components.wmspro.const import DOMAIN -from homeassistant.components.wmspro.cover import SCAN_INTERVAL -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_CLOSE_COVER, - SERVICE_OPEN_COVER, - SERVICE_SET_COVER_POSITION, - SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_OPEN, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_config_entry - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_cover_device( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_prod: AsyncMock, - mock_hub_status_prod_awning: AsyncMock, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test that a cover device is created correctly.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_prod.mock_calls) == 1 - assert len(mock_hub_status_prod_awning.mock_calls) == 2 - - device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "58717")}) - assert device_entry is not None - assert device_entry == snapshot - - -async def test_cover_update( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_prod: AsyncMock, - mock_hub_status_prod_awning: AsyncMock, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, -) -> None: - """Test that a cover entity is created and updated correctly.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_prod.mock_calls) == 1 - assert len(mock_hub_status_prod_awning.mock_calls) == 2 - - entity = hass.states.get("cover.markise") - assert entity is not None - assert entity == snapshot - - # Move time to next update - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert len(mock_hub_status_prod_awning.mock_calls) >= 3 - - -async def test_cover_open_and_close( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_prod: AsyncMock, - mock_hub_status_prod_awning: AsyncMock, - mock_action_call: AsyncMock, -) -> None: - """Test that a cover entity is opened and closed correctly.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_prod.mock_calls) == 1 - assert len(mock_hub_status_prod_awning.mock_calls) >= 1 - - entity = hass.states.get("cover.markise") - assert entity is not None - assert entity.state == STATE_CLOSED - assert entity.attributes["current_position"] == 0 - - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ): - before = len(mock_hub_status_prod_awning.mock_calls) - - await hass.services.async_call( - Platform.COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: entity.entity_id}, - blocking=True, - ) - - entity = hass.states.get("cover.markise") - assert entity is not None - assert entity.state == STATE_OPEN - assert entity.attributes["current_position"] == 100 - assert len(mock_hub_status_prod_awning.mock_calls) == before - - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ): - before = len(mock_hub_status_prod_awning.mock_calls) - - await hass.services.async_call( - Platform.COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: entity.entity_id}, - blocking=True, - ) - - entity = hass.states.get("cover.markise") - assert entity is not None - assert entity.state == STATE_CLOSED - assert entity.attributes["current_position"] == 0 - assert len(mock_hub_status_prod_awning.mock_calls) == before - - -async def test_cover_open_to_pos( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_prod: AsyncMock, - mock_hub_status_prod_awning: AsyncMock, - mock_action_call: AsyncMock, -) -> None: - """Test that a cover entity is opened to correct position.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_prod.mock_calls) == 1 - assert len(mock_hub_status_prod_awning.mock_calls) >= 1 - - entity = hass.states.get("cover.markise") - assert entity is not None - assert entity.state == STATE_CLOSED - assert entity.attributes["current_position"] == 0 - - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ): - before = len(mock_hub_status_prod_awning.mock_calls) - - await hass.services.async_call( - Platform.COVER, - SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: entity.entity_id, "position": 50}, - blocking=True, - ) - - entity = hass.states.get("cover.markise") - assert entity is not None - assert entity.state == STATE_OPEN - assert entity.attributes["current_position"] == 50 - assert len(mock_hub_status_prod_awning.mock_calls) == before - - -async def test_cover_open_and_stop( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_prod: AsyncMock, - mock_hub_status_prod_awning: AsyncMock, - mock_action_call: AsyncMock, -) -> None: - """Test that a cover entity is opened and stopped correctly.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_prod.mock_calls) == 1 - assert len(mock_hub_status_prod_awning.mock_calls) >= 1 - - entity = hass.states.get("cover.markise") - assert entity is not None - assert entity.state == STATE_CLOSED - assert entity.attributes["current_position"] == 0 - - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ): - before = len(mock_hub_status_prod_awning.mock_calls) - - await hass.services.async_call( - Platform.COVER, - SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: entity.entity_id, "position": 80}, - blocking=True, - ) - - entity = hass.states.get("cover.markise") - assert entity is not None - assert entity.state == STATE_OPEN - assert entity.attributes["current_position"] == 80 - assert len(mock_hub_status_prod_awning.mock_calls) == before - - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ): - before = len(mock_hub_status_prod_awning.mock_calls) - - await hass.services.async_call( - Platform.COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: entity.entity_id}, - blocking=True, - ) - - entity = hass.states.get("cover.markise") - assert entity is not None - assert entity.state == STATE_OPEN - assert entity.attributes["current_position"] == 80 - assert len(mock_hub_status_prod_awning.mock_calls) == before diff --git a/tests/components/wmspro/test_diagnostics.py b/tests/components/wmspro/test_diagnostics.py deleted file mode 100644 index 930c3f2898e..00000000000 --- a/tests/components/wmspro/test_diagnostics.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Test the wmspro diagnostics.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from . import setup_config_entry - -from tests.common import MockConfigEntry -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_prod: AsyncMock, - mock_dest_refresh: AsyncMock, - snapshot: SnapshotAssertion, -) -> None: - """Test that a config entry can be loaded with DeviceConfig.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_prod.mock_calls) == 1 - assert len(mock_dest_refresh.mock_calls) == 2 - - result = await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) - assert result == snapshot diff --git a/tests/components/wmspro/test_init.py b/tests/components/wmspro/test_init.py deleted file mode 100644 index aeb5f3db152..00000000000 --- a/tests/components/wmspro/test_init.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Test the wmspro initialization.""" - -from unittest.mock import AsyncMock - -import aiohttp - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import setup_config_entry - -from tests.common import MockConfigEntry - - -async def test_config_entry_device_config_ping_failed( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, -) -> None: - """Test that a config entry will be retried due to ConfigEntryNotReady.""" - mock_hub_ping.side_effect = aiohttp.ClientError - await setup_config_entry(hass, mock_config_entry) - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - assert len(mock_hub_ping.mock_calls) == 1 - - -async def test_config_entry_device_config_refresh_failed( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_refresh: AsyncMock, -) -> None: - """Test that a config entry will be retried due to ConfigEntryNotReady.""" - mock_hub_refresh.side_effect = aiohttp.ClientError - await setup_config_entry(hass, mock_config_entry) - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_refresh.mock_calls) == 1 diff --git a/tests/components/wmspro/test_light.py b/tests/components/wmspro/test_light.py deleted file mode 100644 index db53b54a2f6..00000000000 --- a/tests/components/wmspro/test_light.py +++ /dev/null @@ -1,206 +0,0 @@ -"""Test the wmspro light support.""" - -from unittest.mock import AsyncMock, patch - -from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion - -from homeassistant.components.light import ATTR_BRIGHTNESS -from homeassistant.components.wmspro.const import DOMAIN -from homeassistant.components.wmspro.light import SCAN_INTERVAL -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_config_entry - -from tests.common import MockConfigEntry, async_fire_time_changed - - -async def test_light_device( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_prod: AsyncMock, - mock_hub_status_prod_dimmer: AsyncMock, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test that a light device is created correctly.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_prod.mock_calls) == 1 - assert len(mock_hub_status_prod_dimmer.mock_calls) == 2 - - device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "97358")}) - assert device_entry is not None - assert device_entry == snapshot - - -async def test_light_update( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_prod: AsyncMock, - mock_hub_status_prod_dimmer: AsyncMock, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, -) -> None: - """Test that a light entity is created and updated correctly.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_prod.mock_calls) == 1 - assert len(mock_hub_status_prod_dimmer.mock_calls) == 2 - - entity = hass.states.get("light.licht") - assert entity is not None - assert entity == snapshot - - # Move time to next update - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - assert len(mock_hub_status_prod_dimmer.mock_calls) >= 3 - - -async def test_light_turn_on_and_off( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_prod: AsyncMock, - mock_hub_status_prod_dimmer: AsyncMock, - mock_action_call: AsyncMock, -) -> None: - """Test that a light entity is turned on and off correctly.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_prod.mock_calls) == 1 - assert len(mock_hub_status_prod_dimmer.mock_calls) >= 1 - - entity = hass.states.get("light.licht") - assert entity is not None - assert entity.state == STATE_OFF - assert entity.attributes[ATTR_BRIGHTNESS] is None - - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ): - before = len(mock_hub_status_prod_dimmer.mock_calls) - - await hass.services.async_call( - Platform.LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id}, - blocking=True, - ) - - entity = hass.states.get("light.licht") - assert entity is not None - assert entity.state == STATE_ON - assert entity.attributes[ATTR_BRIGHTNESS] >= 1 - assert len(mock_hub_status_prod_dimmer.mock_calls) == before - - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ): - before = len(mock_hub_status_prod_dimmer.mock_calls) - - await hass.services.async_call( - Platform.LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity.entity_id}, - blocking=True, - ) - - entity = hass.states.get("light.licht") - assert entity is not None - assert entity.state == STATE_OFF - assert entity.attributes[ATTR_BRIGHTNESS] is None - assert len(mock_hub_status_prod_dimmer.mock_calls) == before - - -async def test_light_dimm_on_and_off( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_prod: AsyncMock, - mock_hub_status_prod_dimmer: AsyncMock, - mock_action_call: AsyncMock, -) -> None: - """Test that a light entity is dimmed on and off correctly.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_prod.mock_calls) == 1 - assert len(mock_hub_status_prod_dimmer.mock_calls) >= 1 - - entity = hass.states.get("light.licht") - assert entity is not None - assert entity.state == STATE_OFF - assert entity.attributes[ATTR_BRIGHTNESS] is None - - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ): - before = len(mock_hub_status_prod_dimmer.mock_calls) - - await hass.services.async_call( - Platform.LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id}, - blocking=True, - ) - - entity = hass.states.get("light.licht") - assert entity is not None - assert entity.state == STATE_ON - assert entity.attributes[ATTR_BRIGHTNESS] >= 1 - assert len(mock_hub_status_prod_dimmer.mock_calls) == before - - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ): - before = len(mock_hub_status_prod_dimmer.mock_calls) - - await hass.services.async_call( - Platform.LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id, ATTR_BRIGHTNESS: 128}, - blocking=True, - ) - - entity = hass.states.get("light.licht") - assert entity is not None - assert entity.state == STATE_ON - assert entity.attributes[ATTR_BRIGHTNESS] == 128 - assert len(mock_hub_status_prod_dimmer.mock_calls) == before - - with patch( - "wmspro.destination.Destination.refresh", - return_value=True, - ): - before = len(mock_hub_status_prod_dimmer.mock_calls) - - await hass.services.async_call( - Platform.LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity.entity_id}, - blocking=True, - ) - - entity = hass.states.get("light.licht") - assert entity is not None - assert entity.state == STATE_OFF - assert entity.attributes[ATTR_BRIGHTNESS] is None - assert len(mock_hub_status_prod_dimmer.mock_calls) == before diff --git a/tests/components/wmspro/test_scene.py b/tests/components/wmspro/test_scene.py deleted file mode 100644 index a6b16e5bbc9..00000000000 --- a/tests/components/wmspro/test_scene.py +++ /dev/null @@ -1,63 +0,0 @@ -"""Test the wmspro scene support.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.components.wmspro.const import DOMAIN -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from homeassistant.setup import async_setup_component - -from . import setup_config_entry - -from tests.common import MockConfigEntry - - -async def test_scene_room_device( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_test: AsyncMock, - mock_dest_refresh: AsyncMock, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test that a scene room device is created correctly.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_test.mock_calls) == 1 - - device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "42581")}) - assert device_entry is not None - assert device_entry == snapshot - - -async def test_scene_activate( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_hub_ping: AsyncMock, - mock_hub_configuration_test: AsyncMock, - mock_dest_refresh: AsyncMock, - mock_scene_call: AsyncMock, - snapshot: SnapshotAssertion, -) -> None: - """Test that a scene entity is created and activated correctly.""" - assert await setup_config_entry(hass, mock_config_entry) - assert len(mock_hub_ping.mock_calls) == 1 - assert len(mock_hub_configuration_test.mock_calls) == 1 - - entity = hass.states.get("scene.raum_0_gute_nacht") - assert entity is not None - assert entity == snapshot - - await async_setup_component(hass, "homeassistant", {}) - await hass.services.async_call( - "homeassistant", - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id}, - blocking=True, - ) - - assert len(mock_scene_call.mock_calls) == 1 diff --git a/tests/components/wolflink/const.py b/tests/components/wolflink/const.py deleted file mode 100644 index 073faec51b2..00000000000 --- a/tests/components/wolflink/const.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Constants for the Wolf SmartSet Service tests.""" - -from homeassistant.components.wolflink.const import ( - DEVICE_GATEWAY, - DEVICE_ID, - DEVICE_NAME, -) -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME - -CONFIG = { - DEVICE_NAME: "test-device", - DEVICE_ID: 1234, - DEVICE_GATEWAY: 5678, - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", -} diff --git a/tests/components/wolflink/test_config_flow.py b/tests/components/wolflink/test_config_flow.py index d30cc046a85..bd71d9d3180 100644 --- a/tests/components/wolflink/test_config_flow.py +++ b/tests/components/wolflink/test_config_flow.py @@ -17,10 +17,16 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import CONFIG - from tests.common import MockConfigEntry +CONFIG = { + DEVICE_NAME: "test-device", + DEVICE_ID: 1234, + DEVICE_GATEWAY: 5678, + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", +} + INPUT_CONFIG = { CONF_USERNAME: CONFIG[CONF_USERNAME], CONF_PASSWORD: CONFIG[CONF_PASSWORD], @@ -128,7 +134,7 @@ async def test_already_configured_error(hass: HomeAssistant) -> None: patch("homeassistant.components.wolflink.async_setup_entry", return_value=True), ): MockConfigEntry( - domain=DOMAIN, unique_id=str(CONFIG[DEVICE_ID]), data=CONFIG + domain=DOMAIN, unique_id=CONFIG[DEVICE_ID], data=CONFIG ).add_to_hass(hass) result = await hass.config_entries.flow.async_init( diff --git a/tests/components/wolflink/test_init.py b/tests/components/wolflink/test_init.py deleted file mode 100644 index ec39619452f..00000000000 --- a/tests/components/wolflink/test_init.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Test the Wolf SmartSet Service.""" - -from unittest.mock import patch - -from httpx import RequestError - -from homeassistant.components.wolflink.const import DEVICE_ID, DOMAIN, MANUFACTURER -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from .const import CONFIG - -from tests.common import MockConfigEntry - - -async def test_unique_id_migration( - hass: HomeAssistant, device_registry: dr.DeviceRegistry -) -> None: - """Test already configured while creating entry.""" - config_entry = MockConfigEntry( - domain=DOMAIN, unique_id=CONFIG[DEVICE_ID], data=CONFIG - ) - config_entry.add_to_hass(hass) - - device_id = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, CONFIG[DEVICE_ID])}, - configuration_url="https://www.wolf-smartset.com/", - manufacturer=MANUFACTURER, - ).id - - assert config_entry.version == 1 - assert config_entry.minor_version == 1 - assert config_entry.unique_id == 1234 - assert ( - hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, 1234) - is config_entry - ) - assert hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, "1234") is None - assert device_registry.async_get(device_id).identifiers == {(DOMAIN, 1234)} - - with ( - patch( - "homeassistant.components.wolflink.fetch_parameters", - side_effect=RequestError("Unable to fetch parameters"), - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - - assert config_entry.version == 1 - assert config_entry.minor_version == 2 - assert config_entry.unique_id == "1234" - assert ( - hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, "1234") - is config_entry - ) - assert hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, 1234) is None - - assert device_registry.async_get(device_id).identifiers == {(DOMAIN, "1234")} diff --git a/tests/components/workday/__init__.py b/tests/components/workday/__init__.py index 17449af8bd1..a7e26765643 100644 --- a/tests/components/workday/__init__.py +++ b/tests/components/workday/__init__.py @@ -4,8 +4,6 @@ from __future__ import annotations from typing import Any -from holidays import OPTIONAL - from homeassistant.components.workday.const import ( DEFAULT_EXCLUDES, DEFAULT_NAME, @@ -312,26 +310,3 @@ TEST_LANGUAGE_NO_CHANGE = { "remove_holidays": ["2022-12-04", "2022-12-24,2022-12-26"], "language": "de", } -TEST_NO_OPTIONAL_CATEGORY = { - "name": DEFAULT_NAME, - "country": "CH", - "province": "FR", - "excludes": DEFAULT_EXCLUDES, - "days_offset": DEFAULT_OFFSET, - "workdays": DEFAULT_WORKDAYS, - "add_holidays": [], - "remove_holidays": [], - "language": "de", -} -TEST_OPTIONAL_CATEGORY = { - "name": DEFAULT_NAME, - "country": "CH", - "province": "FR", - "excludes": DEFAULT_EXCLUDES, - "days_offset": DEFAULT_OFFSET, - "workdays": DEFAULT_WORKDAYS, - "add_holidays": [], - "remove_holidays": [], - "language": "de", - "category": [OPTIONAL], -} diff --git a/tests/components/workday/conftest.py b/tests/components/workday/conftest.py index 081d6ce90db..33bf98f90c3 100644 --- a/tests/components/workday/conftest.py +++ b/tests/components/workday/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Workday integration tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/workday/snapshots/test_binary_sensor.ambr b/tests/components/workday/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 4cf7dca4861..00000000000 --- a/tests/components/workday/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,59 +0,0 @@ -# serializer version: 1 -# name: test_only_repairs_for_current_next_year - dict({ - tuple( - 'workday', - 'bad_date_holiday-1-2024_08_15', - ): IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': None, - 'created': , - 'data': dict({ - 'country': 'DE', - 'entry_id': '1', - 'named_holiday': '2024-08-15', - }), - 'dismissed_version': None, - 'domain': 'workday', - 'is_fixable': True, - 'is_persistent': False, - 'issue_domain': None, - 'issue_id': 'bad_date_holiday-1-2024_08_15', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'bad_date_holiday', - 'translation_placeholders': dict({ - 'country': 'DE', - 'remove_holidays': '2024-08-15', - 'title': 'Mock Title', - }), - }), - tuple( - 'workday', - 'bad_date_holiday-1-2025_08_15', - ): IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': None, - 'created': , - 'data': dict({ - 'country': 'DE', - 'entry_id': '1', - 'named_holiday': '2025-08-15', - }), - 'dismissed_version': None, - 'domain': 'workday', - 'is_fixable': True, - 'is_persistent': False, - 'issue_domain': None, - 'issue_id': 'bad_date_holiday-1-2025_08_15', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'bad_date_holiday', - 'translation_placeholders': dict({ - 'country': 'DE', - 'remove_holidays': '2025-08-15', - 'title': 'Mock Title', - }), - }), - }) -# --- diff --git a/tests/components/workday/snapshots/test_diagnostics.ambr b/tests/components/workday/snapshots/test_diagnostics.ambr deleted file mode 100644 index f41b86b7f6d..00000000000 --- a/tests/components/workday/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,48 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'config_entry': dict({ - 'data': dict({ - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'workday', - 'entry_id': '1', - 'minor_version': 1, - 'options': dict({ - 'add_holidays': list([ - '2022-12-01', - '2022-12-05,2022-12-15', - ]), - 'country': 'DE', - 'days_offset': 0, - 'excludes': list([ - 'sat', - 'sun', - 'holiday', - ]), - 'language': 'de', - 'name': 'Workday Sensor', - 'province': 'BW', - 'remove_holidays': list([ - '2022-12-04', - '2022-12-24,2022-12-26', - ]), - 'workdays': list([ - 'mon', - 'tue', - 'wed', - 'thu', - 'fri', - ]), - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }), - }) -# --- diff --git a/tests/components/workday/test_binary_sensor.py b/tests/components/workday/test_binary_sensor.py index 212c3e9d305..e973a9f9c28 100644 --- a/tests/components/workday/test_binary_sensor.py +++ b/tests/components/workday/test_binary_sensor.py @@ -5,18 +5,10 @@ from typing import Any from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy.assertion import SnapshotAssertion from homeassistant.components.workday.binary_sensor import SERVICE_CHECK_DATE -from homeassistant.components.workday.const import ( - DEFAULT_EXCLUDES, - DEFAULT_NAME, - DEFAULT_OFFSET, - DEFAULT_WORKDAYS, - DOMAIN, -) +from homeassistant.components.workday.const import DOMAIN from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.dt import UTC @@ -47,8 +39,6 @@ from . import ( TEST_CONFIG_YESTERDAY, TEST_LANGUAGE_CHANGE, TEST_LANGUAGE_NO_CHANGE, - TEST_NO_OPTIONAL_CATEGORY, - TEST_OPTIONAL_CATEGORY, init_integration, ) @@ -410,54 +400,3 @@ async def test_language_difference_no_change_other_language( """Test skipping if no difference in language naming.""" await init_integration(hass, TEST_LANGUAGE_NO_CHANGE) assert "Changing language from en to en_US" not in caplog.text - - -@pytest.mark.parametrize( - ("config", "end_state"), - [(TEST_OPTIONAL_CATEGORY, "off"), (TEST_NO_OPTIONAL_CATEGORY, "on")], -) -async def test_optional_category( - hass: HomeAssistant, - config: dict[str, Any], - end_state: str, - freezer: FrozenDateTimeFactory, -) -> None: - """Test setup from various configs.""" - # CH, subdiv FR has optional holiday Jan 2nd - freezer.move_to(datetime(2024, 1, 2, 12, tzinfo=UTC)) # Tuesday - await init_integration(hass, config) - - state = hass.states.get("binary_sensor.workday_sensor") - assert state is not None - assert state.state == end_state - - -async def test_only_repairs_for_current_next_year( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - issue_registry: ir.IssueRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test only repairs are raised for current and next year.""" - freezer.move_to(datetime(2024, 8, 15, 12, tzinfo=UTC)) - remove_dates = [ - # None of these dates are holidays - "2024-08-15", # Creates issue - "2025-08-15", # Creates issue - "2026-08-15", # No issue - ] - config = { - "name": DEFAULT_NAME, - "country": "DE", - "province": "BW", - "excludes": DEFAULT_EXCLUDES, - "days_offset": DEFAULT_OFFSET, - "workdays": DEFAULT_WORKDAYS, - "add_holidays": [], - "remove_holidays": remove_dates, - "language": "de", - } - await init_integration(hass, config) - - assert len(issue_registry.issues) == 2 - assert issue_registry.issues == snapshot diff --git a/tests/components/workday/test_config_flow.py b/tests/components/workday/test_config_flow.py index cc83cee93a2..7eb3065e576 100644 --- a/tests/components/workday/test_config_flow.py +++ b/tests/components/workday/test_config_flow.py @@ -5,13 +5,11 @@ from __future__ import annotations from datetime import datetime from freezegun.api import FrozenDateTimeFactory -from holidays import HALF_DAY, OPTIONAL import pytest from homeassistant import config_entries from homeassistant.components.workday.const import ( CONF_ADD_HOLIDAYS, - CONF_CATEGORY, CONF_EXCLUDES, CONF_OFFSET, CONF_REMOVE_HOLIDAYS, @@ -356,14 +354,13 @@ async def test_options_form_abort_duplicate(hass: HomeAssistant) -> None: hass, { "name": "Workday Sensor", - "country": "CH", + "country": "DE", "excludes": ["sat", "sun", "holiday"], "days_offset": 0, "workdays": ["mon", "tue", "wed", "thu", "fri"], "add_holidays": [], "remove_holidays": [], - "province": "FR", - "category": [OPTIONAL], + "province": None, }, entry_id="1", ) @@ -371,14 +368,13 @@ async def test_options_form_abort_duplicate(hass: HomeAssistant) -> None: hass, { "name": "Workday Sensor2", - "country": "CH", + "country": "DE", "excludes": ["sat", "sun", "holiday"], "days_offset": 0, "workdays": ["mon", "tue", "wed", "thu", "fri"], "add_holidays": ["2023-03-28"], "remove_holidays": [], - "province": "FR", - "category": [OPTIONAL], + "province": None, }, entry_id="2", ) @@ -393,8 +389,6 @@ async def test_options_form_abort_duplicate(hass: HomeAssistant) -> None: "workdays": ["mon", "tue", "wed", "thu", "fri"], "add_holidays": [], "remove_holidays": [], - "province": "FR", - "category": [OPTIONAL], }, ) @@ -608,48 +602,3 @@ async def test_language( state = hass.states.get("binary_sensor.workday_sensor") assert state is not None assert state.state == "on" - - -async def test_form_with_categories(hass: HomeAssistant) -> None: - """Test optional categories.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: "Workday Sensor", - CONF_COUNTRY: "CH", - }, - ) - await hass.async_block_till_done() - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - { - CONF_EXCLUDES: DEFAULT_EXCLUDES, - CONF_OFFSET: DEFAULT_OFFSET, - CONF_WORKDAYS: DEFAULT_WORKDAYS, - CONF_ADD_HOLIDAYS: [], - CONF_REMOVE_HOLIDAYS: [], - CONF_LANGUAGE: "de", - CONF_CATEGORY: [HALF_DAY], - }, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Workday Sensor" - assert result3["options"] == { - "name": "Workday Sensor", - "country": "CH", - "excludes": ["sat", "sun", "holiday"], - "days_offset": 0, - "workdays": ["mon", "tue", "wed", "thu", "fri"], - "add_holidays": [], - "remove_holidays": [], - "language": "de", - "category": ["half_day"], - } diff --git a/tests/components/workday/test_diagnostics.py b/tests/components/workday/test_diagnostics.py deleted file mode 100644 index 13206a361f1..00000000000 --- a/tests/components/workday/test_diagnostics.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Test Workday diagnostics.""" - -from __future__ import annotations - -from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props - -from homeassistant.core import HomeAssistant - -from . import TEST_CONFIG_ADD_REMOVE_DATE_RANGE, init_integration - -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test generating diagnostics for a config entry.""" - entry = await init_integration(hass, TEST_CONFIG_ADD_REMOVE_DATE_RANGE) - - diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) - - assert diag == snapshot( - exclude=props("full_features", "created_at", "modified_at"), - ) diff --git a/tests/components/workday/test_repairs.py b/tests/components/workday/test_repairs.py index e25d4e0ca45..60a55e1a347 100644 --- a/tests/components/workday/test_repairs.py +++ b/tests/components/workday/test_repairs.py @@ -2,6 +2,12 @@ from __future__ import annotations +from http import HTTPStatus + +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) from homeassistant.components.workday.const import CONF_REMOVE_HOLIDAYS, DOMAIN from homeassistant.const import CONF_COUNTRY from homeassistant.core import HomeAssistant @@ -17,7 +23,6 @@ from . import ( ) from tests.common import ANY -from tests.components.repairs import process_repair_fix_flow, start_repair_fix_flow from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -47,15 +52,24 @@ async def test_bad_country( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, "bad_country") + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "bad_country"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == {"title": entry.title} assert data["step_id"] == "country" - data = await process_repair_fix_flow(client, flow_id, json={"country": "DE"}) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url, json={"country": "DE"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() - data = await process_repair_fix_flow(client, flow_id, json={"province": "HB"}) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url, json={"province": "HB"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -100,15 +114,24 @@ async def test_bad_country_none( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, "bad_country") + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "bad_country"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == {"title": entry.title} assert data["step_id"] == "country" - data = await process_repair_fix_flow(client, flow_id, json={"country": "DE"}) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url, json={"country": "DE"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() - data = await process_repair_fix_flow(client, flow_id, json={}) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url, json={}) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -153,13 +176,19 @@ async def test_bad_country_no_province( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, "bad_country") + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "bad_country"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == {"title": entry.title} assert data["step_id"] == "country" - data = await process_repair_fix_flow(client, flow_id, json={"country": "SE"}) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url, json={"country": "SE"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -204,7 +233,10 @@ async def test_bad_province( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, "bad_province") + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "bad_province"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -213,7 +245,10 @@ async def test_bad_province( } assert data["step_id"] == "province" - data = await process_repair_fix_flow(client, flow_id, json={"province": "BW"}) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url, json={"province": "BW"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -258,7 +293,10 @@ async def test_bad_province_none( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, "bad_province") + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "bad_province"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -267,7 +305,10 @@ async def test_bad_province_none( } assert data["step_id"] == "province" - data = await process_repair_fix_flow(client, flow_id, json={}) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url, json={}) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -318,9 +359,13 @@ async def test_bad_named_holiday( issue = i assert issue is not None - data = await start_repair_fix_flow( - client, DOMAIN, "bad_named_holiday-1-not_a_holiday" + url = RepairsFlowIndexView.url + resp = await client.post( + url, + json={"handler": DOMAIN, "issue_id": "bad_named_holiday-1-not_a_holiday"}, ) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -330,17 +375,23 @@ async def test_bad_named_holiday( } assert data["step_id"] == "fix_remove_holiday" - data = await process_repair_fix_flow( - client, flow_id, json={"remove_holidays": ["Christmas", "Not exist 2"]} + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post( + url, json={"remove_holidays": ["Christmas", "Not exist 2"]} ) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["errors"] == { CONF_REMOVE_HOLIDAYS: "remove_holiday_error", } - data = await process_repair_fix_flow( - client, flow_id, json={"remove_holidays": ["Christmas", "Thanksgiving"]} + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post( + url, json={"remove_holidays": ["Christmas", "Thanksgiving"]} ) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -391,7 +442,13 @@ async def test_bad_date_holiday( issue = i assert issue is not None - data = await start_repair_fix_flow(client, DOMAIN, "bad_date_holiday-1-2024_02_05") + url = RepairsFlowIndexView.url + resp = await client.post( + url, + json={"handler": DOMAIN, "issue_id": "bad_date_holiday-1-2024_02_05"}, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["description_placeholders"] == { @@ -401,9 +458,10 @@ async def test_bad_date_holiday( } assert data["step_id"] == "fix_remove_holiday" - data = await process_repair_fix_flow( - client, flow_id, json={"remove_holidays": ["2024-02-06"]} - ) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url, json={"remove_holidays": ["2024-02-06"]}) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" await hass.async_block_till_done() @@ -485,12 +543,18 @@ async def test_other_fixable_issues( "ignored": False, } in results - data = await start_repair_fix_flow(client, DOMAIN, "issue_1") + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "issue_1"}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "confirm" - data = await process_repair_fix_flow(client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" await hass.async_block_till_done() diff --git a/tests/components/worldclock/conftest.py b/tests/components/worldclock/conftest.py deleted file mode 100644 index 74ed82f099a..00000000000 --- a/tests/components/worldclock/conftest.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Fixtures for the Worldclock integration.""" - -from __future__ import annotations - -from collections.abc import Generator -from typing import Any -from unittest.mock import AsyncMock, patch - -import pytest - -from homeassistant.components.worldclock.const import ( - CONF_TIME_FORMAT, - DEFAULT_NAME, - DEFAULT_TIME_STR_FORMAT, - DOMAIN, -) -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_NAME, CONF_TIME_ZONE -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Automatically patch setup.""" - with patch( - "homeassistant.components.worldclock.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture(name="get_config") -async def get_config_to_integration_load() -> dict[str, Any]: - """Return configuration. - - To override the config, tests can be marked with: - @pytest.mark.parametrize("get_config", [{...}]) - """ - return { - CONF_NAME: DEFAULT_NAME, - CONF_TIME_ZONE: "America/New_York", - CONF_TIME_FORMAT: DEFAULT_TIME_STR_FORMAT, - } - - -@pytest.fixture(name="loaded_entry") -async def load_integration( - hass: HomeAssistant, get_config: dict[str, Any] -) -> MockConfigEntry: - """Set up the Worldclock integration in Home Assistant.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - title=DEFAULT_NAME, - source=SOURCE_USER, - options=get_config, - entry_id="1", - ) - - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry diff --git a/tests/components/worldclock/test_config_flow.py b/tests/components/worldclock/test_config_flow.py deleted file mode 100644 index dfdb8159b9c..00000000000 --- a/tests/components/worldclock/test_config_flow.py +++ /dev/null @@ -1,104 +0,0 @@ -"""Test the Worldclock config flow.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -from homeassistant import config_entries -from homeassistant.components.worldclock.const import ( - CONF_TIME_FORMAT, - DEFAULT_NAME, - DEFAULT_TIME_STR_FORMAT, - DOMAIN, -) -from homeassistant.const import CONF_NAME, CONF_TIME_ZONE -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from tests.common import MockConfigEntry - - -async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: - """Test we get the form.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_TIME_ZONE: "America/New_York", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["version"] == 1 - assert result["options"] == { - CONF_NAME: DEFAULT_NAME, - CONF_TIME_ZONE: "America/New_York", - CONF_TIME_FORMAT: DEFAULT_TIME_STR_FORMAT, - } - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: - """Test options flow.""" - - result = await hass.config_entries.options.async_init(loaded_entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - CONF_TIME_FORMAT: "%a, %b %d, %Y %I:%M %p", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_NAME: DEFAULT_NAME, - CONF_TIME_ZONE: "America/New_York", - CONF_TIME_FORMAT: "%a, %b %d, %Y %I:%M %p", - } - - await hass.async_block_till_done() - - # Check the entity was updated, no new entity was created - assert len(hass.states.async_all()) == 1 - - state = hass.states.get("sensor.worldclock_sensor") - assert state is not None - - -async def test_entry_already_exist( - hass: HomeAssistant, loaded_entry: MockConfigEntry -) -> None: - """Test abort when entry already exist.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_NAME: DEFAULT_NAME, - CONF_TIME_ZONE: "America/New_York", - CONF_TIME_FORMAT: DEFAULT_TIME_STR_FORMAT, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/worldclock/test_init.py b/tests/components/worldclock/test_init.py deleted file mode 100644 index 5683836c166..00000000000 --- a/tests/components/worldclock/test_init.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Test Worldclock component setup process.""" - -from __future__ import annotations - -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: - """Test unload an entry.""" - - assert loaded_entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(loaded_entry.entry_id) - await hass.async_block_till_done() - assert loaded_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/worldclock/test_sensor.py b/tests/components/worldclock/test_sensor.py index a8e3e41e649..00195a49827 100644 --- a/tests/components/worldclock/test_sensor.py +++ b/tests/components/worldclock/test_sensor.py @@ -1,32 +1,19 @@ """The test for the World clock sensor platform.""" -from datetime import tzinfo - import pytest -from homeassistant.components.worldclock.const import ( - CONF_TIME_FORMAT, - DEFAULT_NAME, - DOMAIN, -) -from homeassistant.const import CONF_NAME, CONF_TIME_ZONE -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.helpers import issue_registry as ir +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry - @pytest.fixture -async def time_zone() -> tzinfo | None: +def time_zone(): """Fixture for time zone.""" - return await dt_util.async_get_time_zone("America/New_York") + return dt_util.get_time_zone("America/New_York") -async def test_time_imported_from_yaml( - hass: HomeAssistant, time_zone: tzinfo | None, issue_registry: ir.IssueRegistry -) -> None: +async def test_time(hass: HomeAssistant, time_zone) -> None: """Test the time at a different location.""" config = {"sensor": {"platform": "worldclock", "time_zone": "America/New_York"}} @@ -42,42 +29,26 @@ async def test_time_imported_from_yaml( assert state.state == dt_util.now(time_zone=time_zone).strftime("%H:%M") - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" - ) - assert issue - assert issue.issue_domain == DOMAIN - -async def test_time_from_config_entry( - hass: HomeAssistant, time_zone: tzinfo | None, loaded_entry: MockConfigEntry -) -> None: - """Test the time at a different location.""" - - state = hass.states.get("sensor.worldclock_sensor") - assert state is not None - - assert state.state == dt_util.now(time_zone=time_zone).strftime("%H:%M") - - -@pytest.mark.parametrize( - "get_config", - [ - { - CONF_NAME: DEFAULT_NAME, - CONF_TIME_ZONE: "America/New_York", - CONF_TIME_FORMAT: "%a, %b %d, %Y %I:%M %p", - } - ], -) -async def test_time_format( - hass: HomeAssistant, time_zone: tzinfo | None, loaded_entry: MockConfigEntry -) -> None: +async def test_time_format(hass: HomeAssistant, time_zone) -> None: """Test time_format setting.""" + time_format = "%a, %b %d, %Y %I:%M %p" + config = { + "sensor": { + "platform": "worldclock", + "time_zone": "America/New_York", + "time_format": time_format, + } + } + + assert await async_setup_component( + hass, + "sensor", + config, + ) + await hass.async_block_till_done() state = hass.states.get("sensor.worldclock_sensor") assert state is not None - assert state.state == dt_util.now(time_zone=time_zone).strftime( - "%a, %b %d, %Y %I:%M %p" - ) + assert state.state == dt_util.now(time_zone=time_zone).strftime(time_format) diff --git a/tests/components/ws66i/test_media_player.py b/tests/components/ws66i/test_media_player.py index 23f64d7d514..a66e79bf9e0 100644 --- a/tests/components/ws66i/test_media_player.py +++ b/tests/components/ws66i/test_media_player.py @@ -1,7 +1,6 @@ """The tests for WS66i Media player platform.""" from collections import defaultdict -from typing import Any from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory @@ -74,7 +73,7 @@ class AttrDict(dict): class MockWs66i: """Mock for pyws66i object.""" - def __init__(self, fail_open=False, fail_zone_check=None) -> None: + def __init__(self, fail_open=False, fail_zone_check=None): """Init mock object.""" self.zones = defaultdict( lambda: AttrDict( @@ -171,9 +170,7 @@ async def _setup_ws66i_with_options(hass: HomeAssistant, ws66i) -> MockConfigEnt return config_entry -async def _call_media_player_service( - hass: HomeAssistant, name: str, data: dict[str, Any] -) -> None: +async def _call_media_player_service(hass, name, data): await hass.services.async_call( MEDIA_PLAYER_DOMAIN, name, service_data=data, blocking=True ) diff --git a/tests/components/wyoming/__init__.py b/tests/components/wyoming/__init__.py index 4540cdaabfd..5bfbbfe87b2 100644 --- a/tests/components/wyoming/__init__.py +++ b/tests/components/wyoming/__init__.py @@ -8,11 +8,7 @@ from wyoming.info import ( AsrModel, AsrProgram, Attribution, - HandleModel, - HandleProgram, Info, - IntentModel, - IntentProgram, Satellite, TtsProgram, TtsVoice, @@ -91,48 +87,6 @@ WAKE_WORD_INFO = Info( ) ] ) -INTENT_INFO = Info( - intent=[ - IntentProgram( - name="Test Intent", - description="Test Intent", - installed=True, - attribution=TEST_ATTR, - models=[ - IntentModel( - name="Test Model", - description="Test Model", - installed=True, - attribution=TEST_ATTR, - languages=["en-US"], - version=None, - ) - ], - version=None, - ) - ] -) -HANDLE_INFO = Info( - handle=[ - HandleProgram( - name="Test Handle", - description="Test Handle", - installed=True, - attribution=TEST_ATTR, - models=[ - HandleModel( - name="Test Model", - description="Test Model", - installed=True, - attribution=TEST_ATTR, - languages=["en-US"], - version=None, - ) - ], - version=None, - ) - ] -) SATELLITE_INFO = Info( satellite=Satellite( name="Test Satellite", @@ -196,10 +150,10 @@ async def reload_satellite( return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.run" + "homeassistant.components.wyoming.satellite.WyomingSatellite.run" ) as _run_mock, ): # _run_mock: satellite task does not actually run await hass.config_entries.async_reload(config_entry_id) - return hass.data[DOMAIN][config_entry_id].device + return hass.data[DOMAIN][config_entry_id].satellite.device diff --git a/tests/components/wyoming/conftest.py b/tests/components/wyoming/conftest.py index 018fff33821..47ef0566dc6 100644 --- a/tests/components/wyoming/conftest.py +++ b/tests/components/wyoming/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Wyoming tests.""" -from collections.abc import Generator from pathlib import Path from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator from homeassistant.components import stt from homeassistant.components.wyoming import DOMAIN @@ -13,21 +13,15 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from . import ( - HANDLE_INFO, - INTENT_INFO, - SATELLITE_INFO, - STT_INFO, - TTS_INFO, - WAKE_WORD_INFO, -) +from . import SATELLITE_INFO, STT_INFO, TTS_INFO, WAKE_WORD_INFO from tests.common import MockConfigEntry @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir @pytest.fixture(autouse=True) @@ -90,36 +84,6 @@ def wake_word_config_entry(hass: HomeAssistant) -> ConfigEntry: return entry -@pytest.fixture -def intent_config_entry(hass: HomeAssistant) -> ConfigEntry: - """Create a config entry.""" - entry = MockConfigEntry( - domain="wyoming", - data={ - "host": "1.2.3.4", - "port": 1234, - }, - title="Test Intent", - ) - entry.add_to_hass(hass) - return entry - - -@pytest.fixture -def handle_config_entry(hass: HomeAssistant) -> ConfigEntry: - """Create a config entry.""" - entry = MockConfigEntry( - domain="wyoming", - data={ - "host": "1.2.3.4", - "port": 1234, - }, - title="Test Handle", - ) - entry.add_to_hass(hass) - return entry - - @pytest.fixture async def init_wyoming_stt(hass: HomeAssistant, stt_config_entry: ConfigEntry): """Initialize Wyoming STT.""" @@ -152,34 +116,6 @@ async def init_wyoming_wake_word( await hass.config_entries.async_setup(wake_word_config_entry.entry_id) -@pytest.fixture -async def init_wyoming_intent( - hass: HomeAssistant, intent_config_entry: ConfigEntry -) -> ConfigEntry: - """Initialize Wyoming intent recognizer.""" - with patch( - "homeassistant.components.wyoming.data.load_wyoming_info", - return_value=INTENT_INFO, - ): - await hass.config_entries.async_setup(intent_config_entry.entry_id) - - return intent_config_entry - - -@pytest.fixture -async def init_wyoming_handle( - hass: HomeAssistant, handle_config_entry: ConfigEntry -) -> ConfigEntry: - """Initialize Wyoming intent handler.""" - with patch( - "homeassistant.components.wyoming.data.load_wyoming_info", - return_value=HANDLE_INFO, - ): - await hass.config_entries.async_setup(handle_config_entry.entry_id) - - return handle_config_entry - - @pytest.fixture def metadata(hass: HomeAssistant) -> stt.SpeechMetadata: """Get default STT metadata.""" @@ -217,7 +153,7 @@ async def init_satellite(hass: HomeAssistant, satellite_config_entry: ConfigEntr return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.run" + "homeassistant.components.wyoming.satellite.WyomingSatellite.run" ) as _run_mock, ): # _run_mock: satellite task does not actually run @@ -229,4 +165,4 @@ async def satellite_device( hass: HomeAssistant, init_satellite, satellite_config_entry: ConfigEntry ) -> SatelliteDevice: """Get a satellite device fixture.""" - return hass.data[DOMAIN][satellite_config_entry.entry_id].device + return hass.data[DOMAIN][satellite_config_entry.entry_id].satellite.device diff --git a/tests/components/wyoming/snapshots/test_config_flow.ambr b/tests/components/wyoming/snapshots/test_config_flow.ambr index bdead0f2028..a0e0c7c5011 100644 --- a/tests/components/wyoming/snapshots/test_config_flow.ambr +++ b/tests/components/wyoming/snapshots/test_config_flow.ambr @@ -1,4 +1,42 @@ # serializer version: 1 +# name: test_hassio_addon_discovery + FlowResultSnapshot({ + 'context': dict({ + 'source': 'hassio', + 'unique_id': '1234', + }), + 'data': dict({ + 'host': 'mock-piper', + 'port': 10200, + }), + 'description': None, + 'description_placeholders': None, + 'flow_id': , + 'handler': 'wyoming', + 'options': dict({ + }), + 'result': ConfigEntrySnapshot({ + 'data': dict({ + 'host': 'mock-piper', + 'port': 10200, + }), + 'disabled_by': None, + 'domain': 'wyoming', + 'entry_id': , + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'hassio', + 'title': 'Piper', + 'unique_id': '1234', + 'version': 1, + }), + 'title': 'Piper', + 'type': , + 'version': 1, + }) +# --- # name: test_hassio_addon_discovery[info0] FlowResultSnapshot({ 'context': dict({ @@ -26,8 +64,6 @@ 'port': 10200, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'wyoming', 'entry_id': , 'minor_version': 1, @@ -72,8 +108,6 @@ 'port': 10200, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'wyoming', 'entry_id': , 'minor_version': 1, @@ -94,6 +128,7 @@ # name: test_zeroconf_discovery FlowResultSnapshot({ 'context': dict({ + 'name': 'Test Satellite', 'source': 'zeroconf', 'title_placeholders': dict({ 'name': 'Test Satellite', @@ -117,8 +152,6 @@ 'port': 12345, }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'wyoming', 'entry_id': , 'minor_version': 1, diff --git a/tests/components/wyoming/snapshots/test_conversation.ambr b/tests/components/wyoming/snapshots/test_conversation.ambr deleted file mode 100644 index 24763cac441..00000000000 --- a/tests/components/wyoming/snapshots/test_conversation.ambr +++ /dev/null @@ -1,7 +0,0 @@ -# serializer version: 1 -# name: test_connection_lost - 'Connection to service was lost' -# --- -# name: test_oserror - 'Error communicating with service: Boom!' -# --- diff --git a/tests/components/wyoming/snapshots/test_tts.ambr b/tests/components/wyoming/snapshots/test_tts.ambr index 7ca5204e66c..299bddb07e5 100644 --- a/tests/components/wyoming/snapshots/test_tts.ambr +++ b/tests/components/wyoming/snapshots/test_tts.ambr @@ -32,6 +32,28 @@ }), ]) # --- +# name: test_get_tts_audio_mp3 + list([ + dict({ + 'data': dict({ + 'text': 'Hello world', + }), + 'payload': None, + 'type': 'synthesize', + }), + ]) +# --- +# name: test_get_tts_audio_raw + list([ + dict({ + 'data': dict({ + 'text': 'Hello world', + }), + 'payload': None, + 'type': 'synthesize', + }), + ]) +# --- # name: test_voice_speaker list([ dict({ diff --git a/tests/components/wyoming/test_binary_sensor.py b/tests/components/wyoming/test_binary_sensor.py index 99ed5cda58e..8d4e3c72c56 100644 --- a/tests/components/wyoming/test_binary_sensor.py +++ b/tests/components/wyoming/test_binary_sensor.py @@ -1,17 +1,13 @@ """Test Wyoming binary sensor devices.""" -import pytest - from homeassistant.components.wyoming.devices import SatelliteDevice from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from . import reload_satellite -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_assist_in_progress( hass: HomeAssistant, satellite_config_entry: ConfigEntry, @@ -40,19 +36,3 @@ async def test_assist_in_progress( assert state is not None assert state.state == STATE_OFF assert not satellite_device.is_active - - -async def test_assist_in_progress_disabled_by_default( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - satellite_device: SatelliteDevice, -) -> None: - """Test assist in progress binary sensor is added disabled.""" - assist_in_progress_id = satellite_device.get_assist_in_progress_entity_id(hass) - assert assist_in_progress_id - - assert not hass.states.get(assist_in_progress_id) - entity_entry = entity_registry.async_get(assist_in_progress_id) - assert entity_entry - assert entity_entry.disabled - assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION diff --git a/tests/components/wyoming/test_config_flow.py b/tests/components/wyoming/test_config_flow.py index 6bca226d621..e363a0650bc 100644 --- a/tests/components/wyoming/test_config_flow.py +++ b/tests/components/wyoming/test_config_flow.py @@ -8,11 +8,11 @@ from syrupy.assertion import SnapshotAssertion from wyoming.info import Info from homeassistant import config_entries +from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.wyoming.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from . import EMPTY_INFO, SATELLITE_INFO, STT_INFO, TTS_INFO diff --git a/tests/components/wyoming/test_conversation.py b/tests/components/wyoming/test_conversation.py deleted file mode 100644 index 02b04503962..00000000000 --- a/tests/components/wyoming/test_conversation.py +++ /dev/null @@ -1,224 +0,0 @@ -"""Test conversation.""" - -from __future__ import annotations - -from unittest.mock import patch - -from syrupy import SnapshotAssertion -from wyoming.asr import Transcript -from wyoming.handle import Handled, NotHandled -from wyoming.intent import Entity, Intent, NotRecognized - -from homeassistant.components import conversation -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import Context, HomeAssistant -from homeassistant.helpers import intent - -from . import MockAsyncTcpClient - - -async def test_intent(hass: HomeAssistant, init_wyoming_intent: ConfigEntry) -> None: - """Test when an intent is recognized.""" - agent_id = "conversation.test_intent" - - conversation_id = "conversation-1234" - test_intent = Intent( - name="TestIntent", - entities=[Entity(name="entity", value="value")], - text="success", - ) - - class TestIntentHandler(intent.IntentHandler): - """Test Intent Handler.""" - - intent_type = "TestIntent" - - async def async_handle(self, intent_obj: intent.Intent): - """Handle the intent.""" - assert intent_obj.slots.get("entity", {}).get("value") == "value" - return intent_obj.create_response() - - intent.async_register(hass, TestIntentHandler()) - - with patch( - "homeassistant.components.wyoming.conversation.AsyncTcpClient", - MockAsyncTcpClient([test_intent.event()]), - ): - result = await conversation.async_converse( - hass=hass, - text="test text", - conversation_id=conversation_id, - context=Context(), - language=hass.config.language, - agent_id=agent_id, - ) - - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.speech, "No speech" - assert result.response.speech.get("plain", {}).get("speech") == "success" - assert result.conversation_id == conversation_id - - -async def test_intent_handle_error( - hass: HomeAssistant, init_wyoming_intent: ConfigEntry -) -> None: - """Test error during handling when an intent is recognized.""" - agent_id = "conversation.test_intent" - - test_intent = Intent(name="TestIntent", entities=[], text="success") - - class TestIntentHandler(intent.IntentHandler): - """Test Intent Handler.""" - - intent_type = "TestIntent" - - async def async_handle(self, intent_obj: intent.Intent): - """Handle the intent.""" - raise intent.IntentError - - intent.async_register(hass, TestIntentHandler()) - - with patch( - "homeassistant.components.wyoming.conversation.AsyncTcpClient", - MockAsyncTcpClient([test_intent.event()]), - ): - result = await conversation.async_converse( - hass=hass, - text="test text", - conversation_id=None, - context=Context(), - language=hass.config.language, - agent_id=agent_id, - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.FAILED_TO_HANDLE - - -async def test_not_recognized( - hass: HomeAssistant, init_wyoming_intent: ConfigEntry -) -> None: - """Test when an intent is not recognized.""" - agent_id = "conversation.test_intent" - - with patch( - "homeassistant.components.wyoming.conversation.AsyncTcpClient", - MockAsyncTcpClient([NotRecognized(text="failure").event()]), - ): - result = await conversation.async_converse( - hass=hass, - text="test text", - conversation_id=None, - context=Context(), - language=hass.config.language, - agent_id=agent_id, - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.NO_INTENT_MATCH - assert result.response.speech, "No speech" - assert result.response.speech.get("plain", {}).get("speech") == "failure" - - -async def test_handle(hass: HomeAssistant, init_wyoming_handle: ConfigEntry) -> None: - """Test when an intent is handled.""" - agent_id = "conversation.test_handle" - - conversation_id = "conversation-1234" - - with patch( - "homeassistant.components.wyoming.conversation.AsyncTcpClient", - MockAsyncTcpClient([Handled(text="success").event()]), - ): - result = await conversation.async_converse( - hass=hass, - text="test text", - conversation_id=conversation_id, - context=Context(), - language=hass.config.language, - agent_id=agent_id, - ) - - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.speech, "No speech" - assert result.response.speech.get("plain", {}).get("speech") == "success" - assert result.conversation_id == conversation_id - - -async def test_not_handled( - hass: HomeAssistant, init_wyoming_handle: ConfigEntry -) -> None: - """Test when an intent is not handled.""" - agent_id = "conversation.test_handle" - - with patch( - "homeassistant.components.wyoming.conversation.AsyncTcpClient", - MockAsyncTcpClient([NotHandled(text="failure").event()]), - ): - result = await conversation.async_converse( - hass=hass, - text="test text", - conversation_id=None, - context=Context(), - language=hass.config.language, - agent_id=agent_id, - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.FAILED_TO_HANDLE - assert result.response.speech, "No speech" - assert result.response.speech.get("plain", {}).get("speech") == "failure" - - -async def test_connection_lost( - hass: HomeAssistant, init_wyoming_handle: ConfigEntry, snapshot: SnapshotAssertion -) -> None: - """Test connection to client is lost.""" - agent_id = "conversation.test_handle" - - with patch( - "homeassistant.components.wyoming.conversation.AsyncTcpClient", - MockAsyncTcpClient([None]), - ): - result = await conversation.async_converse( - hass=hass, - text="test text", - conversation_id=None, - context=Context(), - language=hass.config.language, - agent_id=agent_id, - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.UNKNOWN - assert result.response.speech, "No speech" - assert result.response.speech.get("plain", {}).get("speech") == snapshot() - - -async def test_oserror( - hass: HomeAssistant, init_wyoming_handle: ConfigEntry, snapshot: SnapshotAssertion -) -> None: - """Test connection error.""" - agent_id = "conversation.test_handle" - - mock_client = MockAsyncTcpClient([Transcript("success").event()]) - - with ( - patch( - "homeassistant.components.wyoming.conversation.AsyncTcpClient", mock_client - ), - patch.object(mock_client, "read_event", side_effect=OSError("Boom!")), - ): - result = await conversation.async_converse( - hass=hass, - text="test text", - conversation_id=None, - context=Context(), - language=hass.config.language, - agent_id=agent_id, - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.UNKNOWN - assert result.response.speech, "No speech" - assert result.response.speech.get("plain", {}).get("speech") == snapshot() diff --git a/tests/components/wyoming/test_devices.py b/tests/components/wyoming/test_devices.py index 24423264f93..98efb76ab1d 100644 --- a/tests/components/wyoming/test_devices.py +++ b/tests/components/wyoming/test_devices.py @@ -32,8 +32,8 @@ async def test_device_registry_info( assist_in_progress_id = satellite_device.get_assist_in_progress_entity_id(hass) assert assist_in_progress_id assist_in_progress_state = hass.states.get(assist_in_progress_id) - # assist_in_progress binary sensor is disabled - assert assist_in_progress_state is None + assert assist_in_progress_state is not None + assert assist_in_progress_state.state == STATE_OFF muted_id = satellite_device.get_muted_entity_id(hass) assert muted_id @@ -58,8 +58,7 @@ async def test_remove_device_registry_entry( # Check associated entities assist_in_progress_id = satellite_device.get_assist_in_progress_entity_id(hass) assert assist_in_progress_id - # assist_in_progress binary sensor is disabled - assert hass.states.get(assist_in_progress_id) is None + assert hass.states.get(assist_in_progress_id) is not None muted_id = satellite_device.get_muted_entity_id(hass) assert muted_id diff --git a/tests/components/wyoming/test_satellite.py b/tests/components/wyoming/test_satellite.py index f293f976242..1a291153ad0 100644 --- a/tests/components/wyoming/test_satellite.py +++ b/tests/components/wyoming/test_satellite.py @@ -23,7 +23,6 @@ from wyoming.vad import VoiceStarted, VoiceStopped from wyoming.wake import Detect, Detection from homeassistant.components import assist_pipeline, wyoming -from homeassistant.components.wyoming.assist_satellite import WyomingAssistSatellite from homeassistant.components.wyoming.devices import SatelliteDevice from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant, State @@ -241,22 +240,23 @@ async def test_satellite_pipeline(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.wyoming.assist_satellite.tts.async_get_media_source_audio", + "homeassistant.components.wyoming.satellite.tts.async_get_media_source_audio", return_value=("wav", get_test_wav()), ), - patch("homeassistant.components.wyoming.assist_satellite._PING_SEND_DELAY", 0), + patch("homeassistant.components.wyoming.satellite._PING_SEND_DELAY", 0), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device - assert device is not None + device: SatelliteDevice = hass.data[wyoming.DOMAIN][ + entry.entry_id + ].satellite.device async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -443,7 +443,7 @@ async def test_satellite_muted(hass: HomeAssistant) -> None: """Test callback for a satellite that has been muted.""" on_muted_event = asyncio.Event() - original_on_muted = WyomingAssistSatellite.on_muted + original_on_muted = wyoming.satellite.WyomingSatellite.on_muted async def on_muted(self): # Trigger original function @@ -462,16 +462,12 @@ async def test_satellite_muted(hass: HomeAssistant) -> None: "homeassistant.components.wyoming.data.load_wyoming_info", return_value=SATELLITE_INFO, ), - patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", - SatelliteAsyncTcpClient([]), - ), patch( "homeassistant.components.wyoming.switch.WyomingSatelliteMuteSwitch.async_get_last_state", return_value=State("switch.test_mute", STATE_ON), ), patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_muted", + "homeassistant.components.wyoming.satellite.WyomingSatellite.on_muted", on_muted, ), ): @@ -488,11 +484,11 @@ async def test_satellite_restart(hass: HomeAssistant) -> None: """Test pipeline loop restart after unexpected error.""" on_restart_event = asyncio.Event() - original_on_restart = WyomingAssistSatellite.on_restart + original_on_restart = wyoming.satellite.WyomingSatellite.on_restart async def on_restart(self): await original_on_restart(self) - self.stop_satellite() + self.stop() on_restart_event.set() with ( @@ -501,14 +497,14 @@ async def test_satellite_restart(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite._connect_and_loop", + "homeassistant.components.wyoming.satellite.WyomingSatellite._connect_and_loop", side_effect=RuntimeError(), ), patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_restart", + "homeassistant.components.wyoming.satellite.WyomingSatellite.on_restart", on_restart, ), - patch("homeassistant.components.wyoming.assist_satellite._RESTART_SECONDS", 0), + patch("homeassistant.components.wyoming.satellite._RESTART_SECONDS", 0), ): await setup_config_entry(hass) async with asyncio.timeout(1): @@ -521,7 +517,7 @@ async def test_satellite_reconnect(hass: HomeAssistant) -> None: reconnect_event = asyncio.Event() stopped_event = asyncio.Event() - original_on_reconnect = WyomingAssistSatellite.on_reconnect + original_on_reconnect = wyoming.satellite.WyomingSatellite.on_reconnect async def on_reconnect(self): await original_on_reconnect(self) @@ -530,7 +526,7 @@ async def test_satellite_reconnect(hass: HomeAssistant) -> None: num_reconnects += 1 if num_reconnects >= 2: reconnect_event.set() - self.stop_satellite() + self.stop() async def on_stopped(self): stopped_event.set() @@ -541,20 +537,18 @@ async def test_satellite_reconnect(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient.connect", + "homeassistant.components.wyoming.satellite.AsyncTcpClient.connect", side_effect=ConnectionRefusedError(), ), patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_reconnect", + "homeassistant.components.wyoming.satellite.WyomingSatellite.on_reconnect", on_reconnect, ), patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_stopped", + "homeassistant.components.wyoming.satellite.WyomingSatellite.on_stopped", on_stopped, ), - patch( - "homeassistant.components.wyoming.assist_satellite._RECONNECT_SECONDS", 0 - ), + patch("homeassistant.components.wyoming.satellite._RECONNECT_SECONDS", 0), ): await setup_config_entry(hass) async with asyncio.timeout(1): @@ -567,7 +561,7 @@ async def test_satellite_disconnect_before_pipeline(hass: HomeAssistant) -> None on_restart_event = asyncio.Event() async def on_restart(self): - self.stop_satellite() + self.stop() on_restart_event.set() with ( @@ -576,14 +570,14 @@ async def test_satellite_disconnect_before_pipeline(hass: HomeAssistant) -> None return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", MockAsyncTcpClient([]), # no RunPipeline event ), patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", ) as mock_run_pipeline, patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_restart", + "homeassistant.components.wyoming.satellite.WyomingSatellite.on_restart", on_restart, ), ): @@ -609,7 +603,7 @@ async def test_satellite_disconnect_during_pipeline(hass: HomeAssistant) -> None async def on_restart(self): # Pretend sensor got stuck on self.device.is_active = True - self.stop_satellite() + self.stop() on_restart_event.set() async def on_stopped(self): @@ -621,23 +615,25 @@ async def test_satellite_disconnect_during_pipeline(hass: HomeAssistant) -> None return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", MockAsyncTcpClient(events), ), patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", ) as mock_run_pipeline, patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_restart", + "homeassistant.components.wyoming.satellite.WyomingSatellite.on_restart", on_restart, ), patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_stopped", + "homeassistant.components.wyoming.satellite.WyomingSatellite.on_stopped", on_stopped, ), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][ + entry.entry_id + ].satellite.device async with asyncio.timeout(1): await on_restart_event.wait() @@ -669,11 +665,11 @@ async def test_satellite_error_during_pipeline(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", wraps=_async_pipeline_from_audio_stream, ) as mock_run_pipeline, ): @@ -705,7 +701,7 @@ async def test_tts_not_wav(hass: HomeAssistant) -> None: """Test satellite receiving non-WAV audio from text-to-speech.""" assert await async_setup_component(hass, assist_pipeline.DOMAIN, {}) - original_stream_tts = WyomingAssistSatellite._stream_tts + original_stream_tts = wyoming.satellite.WyomingSatellite._stream_tts error_event = asyncio.Event() async def _stream_tts(self, media_id): @@ -728,19 +724,19 @@ async def test_tts_not_wav(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", wraps=_async_pipeline_from_audio_stream, ) as mock_run_pipeline, patch( - "homeassistant.components.wyoming.assist_satellite.tts.async_get_media_source_audio", + "homeassistant.components.wyoming.satellite.tts.async_get_media_source_audio", return_value=("mp3", bytes(1)), ), patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite._stream_tts", + "homeassistant.components.wyoming.satellite.WyomingSatellite._stream_tts", _stream_tts, ), ): @@ -823,16 +819,18 @@ async def test_pipeline_changed(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][ + entry.entry_id + ].satellite.device async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -895,16 +893,18 @@ async def test_audio_settings_changed(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][ + entry.entry_id + ].satellite.device async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -938,7 +938,7 @@ async def test_invalid_stages(hass: HomeAssistant) -> None: ).event(), ] - original_run_pipeline_once = WyomingAssistSatellite._run_pipeline_once + original_run_pipeline_once = wyoming.satellite.WyomingSatellite._run_pipeline_once start_stage_event = asyncio.Event() end_stage_event = asyncio.Event() @@ -967,11 +967,11 @@ async def test_invalid_stages(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite._run_pipeline_once", + "homeassistant.components.wyoming.satellite.WyomingSatellite._run_pipeline_once", _run_pipeline_once, ), ): @@ -1029,11 +1029,11 @@ async def test_client_stops_pipeline(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), ): @@ -1083,11 +1083,11 @@ async def test_wake_word_phrase(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ), patch( - "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", wraps=_async_pipeline_from_audio_stream, ) as mock_run_pipeline, ): @@ -1114,12 +1114,14 @@ async def test_timers(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + "homeassistant.components.wyoming.satellite.AsyncTcpClient", SatelliteAsyncTcpClient([]), ) as mock_client, ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][ + entry.entry_id + ].satellite.device async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -1283,3 +1285,104 @@ async def test_timers(hass: HomeAssistant) -> None: timer_finished = mock_client.timer_finished assert timer_finished is not None assert timer_finished.id == timer_started.id + + +async def test_satellite_conversation_id(hass: HomeAssistant) -> None: + """Test that the same conversation id is used until timeout.""" + assert await async_setup_component(hass, assist_pipeline.DOMAIN, {}) + + events = [ + RunPipeline( + start_stage=PipelineStage.WAKE, + end_stage=PipelineStage.TTS, + restart_on_end=True, + ).event(), + ] + + pipeline_kwargs: dict[str, Any] = {} + pipeline_event_callback: Callable[[assist_pipeline.PipelineEvent], None] | None = ( + None + ) + run_pipeline_called = asyncio.Event() + + async def async_pipeline_from_audio_stream( + hass: HomeAssistant, + context, + event_callback, + stt_metadata, + stt_stream, + **kwargs, + ) -> None: + nonlocal pipeline_kwargs, pipeline_event_callback + pipeline_kwargs = kwargs + pipeline_event_callback = event_callback + + run_pipeline_called.set() + + with ( + patch( + "homeassistant.components.wyoming.data.load_wyoming_info", + return_value=SATELLITE_INFO, + ), + patch( + "homeassistant.components.wyoming.satellite.AsyncTcpClient", + SatelliteAsyncTcpClient(events), + ) as mock_client, + patch( + "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + async_pipeline_from_audio_stream, + ), + patch( + "homeassistant.components.wyoming.satellite.tts.async_get_media_source_audio", + return_value=("wav", get_test_wav()), + ), + patch("homeassistant.components.wyoming.satellite._PING_SEND_DELAY", 0), + ): + entry = await setup_config_entry(hass) + satellite: wyoming.WyomingSatellite = hass.data[wyoming.DOMAIN][ + entry.entry_id + ].satellite + + async with asyncio.timeout(1): + await mock_client.connect_event.wait() + await mock_client.run_satellite_event.wait() + + async with asyncio.timeout(1): + await run_pipeline_called.wait() + + assert pipeline_event_callback is not None + + # A conversation id should have been generated + conversation_id = pipeline_kwargs.get("conversation_id") + assert conversation_id + + # Reset and run again + run_pipeline_called.clear() + pipeline_kwargs.clear() + + pipeline_event_callback( + assist_pipeline.PipelineEvent(assist_pipeline.PipelineEventType.RUN_END) + ) + + async with asyncio.timeout(1): + await run_pipeline_called.wait() + + # Should be the same conversation id + assert pipeline_kwargs.get("conversation_id") == conversation_id + + # Reset and run again, but this time "time out" + satellite._conversation_id_time = None + run_pipeline_called.clear() + pipeline_kwargs.clear() + + pipeline_event_callback( + assist_pipeline.PipelineEvent(assist_pipeline.PipelineEventType.RUN_END) + ) + + async with asyncio.timeout(1): + await run_pipeline_called.wait() + + # Should be a different conversation id + new_conversation_id = pipeline_kwargs.get("conversation_id") + assert new_conversation_id + assert new_conversation_id != conversation_id diff --git a/tests/components/wyoming/test_select.py b/tests/components/wyoming/test_select.py index 2438d25b838..e6ec2c4d432 100644 --- a/tests/components/wyoming/test_select.py +++ b/tests/components/wyoming/test_select.py @@ -5,7 +5,6 @@ from unittest.mock import Mock, patch from homeassistant.components import assist_pipeline from homeassistant.components.assist_pipeline.pipeline import PipelineData from homeassistant.components.assist_pipeline.select import OPTION_PREFERRED -from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.components.wyoming.devices import SatelliteDevice from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -141,50 +140,3 @@ async def test_noise_suppression_level_select( ) assert satellite_device.noise_suppression_level == 2 - - -async def test_vad_sensitivity_select( - hass: HomeAssistant, - satellite_config_entry: ConfigEntry, - satellite_device: SatelliteDevice, -) -> None: - """Test VAD sensitivity select.""" - vs_entity_id = satellite_device.get_vad_sensitivity_entity_id(hass) - assert vs_entity_id - - state = hass.states.get(vs_entity_id) - assert state is not None - assert state.state == VadSensitivity.DEFAULT - assert satellite_device.vad_sensitivity == VadSensitivity.DEFAULT - - # Change setting - with patch.object(satellite_device, "set_vad_sensitivity") as mock_vs_changed: - await hass.services.async_call( - "select", - "select_option", - {"entity_id": vs_entity_id, "option": VadSensitivity.AGGRESSIVE.value}, - blocking=True, - ) - - state = hass.states.get(vs_entity_id) - assert state is not None - assert state.state == VadSensitivity.AGGRESSIVE.value - - # set function should have been called - mock_vs_changed.assert_called_once_with(VadSensitivity.AGGRESSIVE) - - # test restore - satellite_device = await reload_satellite(hass, satellite_config_entry.entry_id) - - state = hass.states.get(vs_entity_id) - assert state is not None - assert state.state == VadSensitivity.AGGRESSIVE.value - - await hass.services.async_call( - "select", - "select_option", - {"entity_id": vs_entity_id, "option": VadSensitivity.RELAXED.value}, - blocking=True, - ) - - assert satellite_device.vad_sensitivity == VadSensitivity.RELAXED diff --git a/tests/components/xiaomi/test_device_tracker.py b/tests/components/xiaomi/test_device_tracker.py index 625e6f404ad..975e666af68 100644 --- a/tests/components/xiaomi/test_device_tracker.py +++ b/tests/components/xiaomi/test_device_tracker.py @@ -6,7 +6,7 @@ from unittest.mock import MagicMock, call, patch import requests -from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN +from homeassistant.components.device_tracker import DOMAIN import homeassistant.components.xiaomi.device_tracker as xiaomi from homeassistant.components.xiaomi.device_tracker import get_scanner from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PLATFORM, CONF_USERNAME @@ -144,7 +144,6 @@ def mocked_requests(*args, **kwargs): 200, ) _LOGGER.debug("UNKNOWN ROUTE") - return None @patch( @@ -154,9 +153,9 @@ def mocked_requests(*args, **kwargs): async def test_config(xiaomi_mock, hass: HomeAssistant) -> None: """Testing minimal configuration.""" config = { - DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( + DOMAIN: xiaomi.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", CONF_PASSWORD: "passwordTest", } @@ -164,7 +163,7 @@ async def test_config(xiaomi_mock, hass: HomeAssistant) -> None: } xiaomi.get_scanner(hass, config) assert xiaomi_mock.call_count == 1 - assert xiaomi_mock.call_args == call(config[DEVICE_TRACKER_DOMAIN]) + assert xiaomi_mock.call_args == call(config[DOMAIN]) call_arg = xiaomi_mock.call_args[0][0] assert call_arg["username"] == "admin" assert call_arg["password"] == "passwordTest" @@ -179,9 +178,9 @@ async def test_config(xiaomi_mock, hass: HomeAssistant) -> None: async def test_config_full(xiaomi_mock, hass: HomeAssistant) -> None: """Testing full configuration.""" config = { - DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( + DOMAIN: xiaomi.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: "alternativeAdminName", CONF_PASSWORD: "passwordTest", @@ -190,7 +189,7 @@ async def test_config_full(xiaomi_mock, hass: HomeAssistant) -> None: } xiaomi.get_scanner(hass, config) assert xiaomi_mock.call_count == 1 - assert xiaomi_mock.call_args == call(config[DEVICE_TRACKER_DOMAIN]) + assert xiaomi_mock.call_args == call(config[DOMAIN]) call_arg = xiaomi_mock.call_args[0][0] assert call_arg["username"] == "alternativeAdminName" assert call_arg["password"] == "passwordTest" @@ -203,9 +202,9 @@ async def test_config_full(xiaomi_mock, hass: HomeAssistant) -> None: async def test_invalid_credential(mock_get, mock_post, hass: HomeAssistant) -> None: """Testing invalid credential handling.""" config = { - DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( + DOMAIN: xiaomi.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: INVALID_USERNAME, CONF_PASSWORD: "passwordTest", @@ -220,9 +219,9 @@ async def test_invalid_credential(mock_get, mock_post, hass: HomeAssistant) -> N async def test_valid_credential(mock_get, mock_post, hass: HomeAssistant) -> None: """Testing valid refresh.""" config = { - DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( + DOMAIN: xiaomi.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: "admin", CONF_PASSWORD: "passwordTest", @@ -244,9 +243,9 @@ async def test_token_timed_out(mock_get, mock_post, hass: HomeAssistant) -> None New token is requested and list is downloaded a second time. """ config = { - DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( + DOMAIN: xiaomi.PLATFORM_SCHEMA( { - CONF_PLATFORM: DEVICE_TRACKER_DOMAIN, + CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: TOKEN_TIMEOUT_USERNAME, CONF_PASSWORD: "passwordTest", diff --git a/tests/components/xiaomi_ble/conftest.py b/tests/components/xiaomi_ble/conftest.py index d4864cbe2f8..bb74b3c7af3 100644 --- a/tests/components/xiaomi_ble/conftest.py +++ b/tests/components/xiaomi_ble/conftest.py @@ -1,9 +1,9 @@ """Session fixtures.""" -from collections.abc import Generator from unittest import mock import pytest +from typing_extensions import Generator class MockServices: @@ -19,7 +19,7 @@ class MockBleakClient: services = MockServices() - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args, **kwargs): """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/components/xiaomi_ble/test_config_flow.py b/tests/components/xiaomi_ble/test_config_flow.py index e25ac939a53..b61615e0f79 100644 --- a/tests/components/xiaomi_ble/test_config_flow.py +++ b/tests/components/xiaomi_ble/test_config_flow.py @@ -2,12 +2,7 @@ from unittest.mock import patch -from xiaomi_ble import ( - XiaomiBluetoothDeviceData as DeviceData, - XiaomiCloudBLEDevice, - XiaomiCloudException, - XiaomiCloudInvalidAuthenticationException, -) +from xiaomi_ble import XiaomiBluetoothDeviceData as DeviceData from homeassistant import config_entries from homeassistant.components.bluetooth import BluetoothChange @@ -101,25 +96,20 @@ async def test_async_step_bluetooth_valid_device_but_missing_payload_then_full( context={"source": config_entries.SOURCE_BLUETOOTH}, data=MISSING_PAYLOAD_ENCRYPTED, ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": "get_encryption_key_4_5"}, - ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "get_encryption_key_4_5" with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"bindkey": "a115210eed7a88e50ad52662e732a9fb"}, ) - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} - assert result3["result"].unique_id == "A4:C1:38:56:53:84" + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} + assert result2["result"].unique_id == "A4:C1:38:56:53:84" async def test_async_step_bluetooth_during_onboarding(hass: HomeAssistant) -> None: @@ -249,244 +239,21 @@ async def test_async_step_bluetooth_valid_device_v4_encryption( context={"source": config_entries.SOURCE_BLUETOOTH}, data=JTYJGD03MI_SERVICE_INFO, ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": "get_encryption_key_4_5"}, - ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "get_encryption_key_4_5" with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result3["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result3["result"].unique_id == "54:EF:44:E3:9C:BC" - - -async def test_bluetooth_discovery_device_v4_encryption_from_cloud( - hass: HomeAssistant, -) -> None: - """Test discovery via bluetooth with a valid v4 device, with auth from cloud.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_BLUETOOTH}, - data=JTYJGD03MI_SERVICE_INFO, - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": "cloud_auth"}, - ) - device = XiaomiCloudBLEDevice( - name="x", - mac="54:EF:44:E3:9C:BC", - bindkey="5b51a7c91cde6707c9ef18dfda143a58", - ) - with patch( - "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", - return_value=device, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={"username": "x@x.x", "password": "x"}, - ) - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result3["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result3["result"].unique_id == "54:EF:44:E3:9C:BC" - - -async def test_bluetooth_discovery_device_v4_encryption_from_cloud_wrong_key( - hass: HomeAssistant, -) -> None: - """Test discovery via bluetooth with a valid v4 device, with wrong auth from cloud.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_BLUETOOTH}, - data=JTYJGD03MI_SERVICE_INFO, - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": "cloud_auth"}, - ) - - device = XiaomiCloudBLEDevice( - name="x", - mac="54:EF:44:E3:9C:BC", - bindkey="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - ) - with patch( - "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", - return_value=device, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={"username": "x@x.x", "password": "x"}, - ) - - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "get_encryption_key_4_5" - assert result3["errors"]["bindkey"] == "decryption_failed" - - # Verify we can fallback to manual key - with patch( - "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True - ): - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], - user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, - ) - - assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" - - -async def test_bluetooth_discovery_incorrect_cloud_account( - hass: HomeAssistant, -) -> None: - """Test discovery via bluetooth with incorrect cloud account.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_BLUETOOTH}, - data=JTYJGD03MI_SERVICE_INFO, - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": "cloud_auth"}, - ) - - with patch( - "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", - return_value=None, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={"username": "wrong@wrong.wrong", "password": "correct"}, - ) - - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "cloud_auth" - assert result3["errors"]["base"] == "api_device_not_found" - - device = XiaomiCloudBLEDevice( - name="x", - mac="54:EF:44:E3:9C:BC", - bindkey="5b51a7c91cde6707c9ef18dfda143a58", - ) - # Verify we can try again with the correct account - with patch( - "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", - return_value=device, - ): - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], - user_input={"username": "correct@correct.correct", "password": "correct"}, - ) - - assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" - - -async def test_bluetooth_discovery_incorrect_cloud_auth( - hass: HomeAssistant, -) -> None: - """Test discovery via bluetooth with incorrect cloud auth.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_BLUETOOTH}, - data=JTYJGD03MI_SERVICE_INFO, - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": "cloud_auth"}, - ) - - with patch( - "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", - side_effect=XiaomiCloudInvalidAuthenticationException, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={"username": "x@x.x", "password": "wrong"}, - ) - - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "cloud_auth" - assert result3["errors"]["base"] == "auth_failed" - - device = XiaomiCloudBLEDevice( - name="x", - mac="54:EF:44:E3:9C:BC", - bindkey="5b51a7c91cde6707c9ef18dfda143a58", - ) - # Verify we can try again with the correct password - with patch( - "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", - return_value=device, - ): - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], - user_input={"username": "x@x.x", "password": "correct"}, - ) - - assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" - - -async def test_bluetooth_discovery_cloud_offline( - hass: HomeAssistant, -) -> None: - """Test discovery via bluetooth when the cloud is offline.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_BLUETOOTH}, - data=JTYJGD03MI_SERVICE_INFO, - ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": "cloud_auth"}, - ) - - with patch( - "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", - side_effect=XiaomiCloudException, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={"username": "x@x.x", "password": "wrong"}, - ) - - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "api_error" + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key( @@ -498,36 +265,31 @@ async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key( context={"source": config_entries.SOURCE_BLUETOOTH}, data=JTYJGD03MI_SERVICE_INFO, ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "get_encryption_key_4_5_choose_method" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "get_encryption_key_4_5" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"next_step_id": "get_encryption_key_4_5"}, - ) - - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], user_input={"bindkey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, ) - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "get_encryption_key_4_5" - assert result3["errors"]["bindkey"] == "decryption_failed" + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "get_encryption_key_4_5" + assert result2["errors"]["bindkey"] == "decryption_failed" # Test can finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key_length( @@ -539,36 +301,31 @@ async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key_length( context={"source": config_entries.SOURCE_BLUETOOTH}, data=JTYJGD03MI_SERVICE_INFO, ) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "get_encryption_key_4_5_choose_method" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "get_encryption_key_4_5" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"next_step_id": "get_encryption_key_4_5"}, - ) - - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18fda143a58"}, ) - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "get_encryption_key_4_5" - assert result3["errors"]["bindkey"] == "expected_32_characters" + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "get_encryption_key_4_5" + assert result2["errors"]["bindkey"] == "expected_32_characters" # Test can finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_bluetooth_not_xiaomi(hass: HomeAssistant) -> None: @@ -700,25 +457,20 @@ async def test_async_step_user_short_payload_then_full(hass: HomeAssistant) -> N result["flow_id"], user_input={"address": "A4:C1:38:56:53:84"}, ) - assert result1["type"] is FlowResultType.MENU - assert result1["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], - user_input={"next_step_id": "get_encryption_key_4_5"}, - ) + assert result1["type"] is FlowResultType.FORM + assert result1["step_id"] == "get_encryption_key_4_5" with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"bindkey": "a115210eed7a88e50ad52662e732a9fb"}, ) - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Temperature/Humidity Sensor 5384 (LYWSD03MMC)" - assert result3["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Temperature/Humidity Sensor 5384 (LYWSD03MMC)" + assert result2["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} async def test_async_step_user_with_found_devices_v4_encryption( @@ -740,26 +492,21 @@ async def test_async_step_user_with_found_devices_v4_encryption( result["flow_id"], user_input={"address": "54:EF:44:E3:9C:BC"}, ) - assert result1["type"] is FlowResultType.MENU - assert result1["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], - user_input={"next_step_id": "get_encryption_key_4_5"}, - ) + assert result1["type"] is FlowResultType.FORM + assert result1["step_id"] == "get_encryption_key_4_5" with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result3["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result3["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_user_with_found_devices_v4_encryption_wrong_key( @@ -783,36 +530,31 @@ async def test_async_step_user_with_found_devices_v4_encryption_wrong_key( result["flow_id"], user_input={"address": "54:EF:44:E3:9C:BC"}, ) - assert result1["type"] is FlowResultType.MENU - assert result1["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], - user_input={"next_step_id": "get_encryption_key_4_5"}, - ) + assert result1["type"] is FlowResultType.FORM + assert result1["step_id"] == "get_encryption_key_4_5" # Try an incorrect key - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"bindkey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, ) - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "get_encryption_key_4_5" - assert result3["errors"]["bindkey"] == "decryption_failed" + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "get_encryption_key_4_5" + assert result2["errors"]["bindkey"] == "decryption_failed" # Check can still finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_user_with_found_devices_v4_encryption_wrong_key_length( @@ -836,38 +578,33 @@ async def test_async_step_user_with_found_devices_v4_encryption_wrong_key_length result["flow_id"], user_input={"address": "54:EF:44:E3:9C:BC"}, ) - assert result1["type"] is FlowResultType.MENU - assert result1["step_id"] == "get_encryption_key_4_5_choose_method" - - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], - user_input={"next_step_id": "get_encryption_key_4_5"}, - ) + assert result1["type"] is FlowResultType.FORM + assert result1["step_id"] == "get_encryption_key_4_5" # Try an incorrect key - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef1dfda143a58"}, ) - assert result3["type"] is FlowResultType.FORM - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "get_encryption_key_4_5" - assert result3["errors"]["bindkey"] == "expected_32_characters" + assert result2["type"] is FlowResultType.FORM + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "get_encryption_key_4_5" + assert result2["errors"]["bindkey"] == "expected_32_characters" # Check can still finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_user_with_found_devices_legacy_encryption( @@ -1266,19 +1003,14 @@ async def test_async_step_reauth_v4(hass: HomeAssistant) -> None: assert len(results) == 1 result = results[0] - assert result["step_id"] == "get_encryption_key_4_5_choose_method" + assert result["step_id"] == "get_encryption_key_4_5" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"next_step_id": "get_encryption_key_4_5"}, - ) - - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "reauth_successful" + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" async def test_async_step_reauth_v4_wrong_key(hass: HomeAssistant) -> None: @@ -1320,90 +1052,22 @@ async def test_async_step_reauth_v4_wrong_key(hass: HomeAssistant) -> None: assert len(results) == 1 result = results[0] - assert result["step_id"] == "get_encryption_key_4_5_choose_method" + assert result["step_id"] == "get_encryption_key_4_5" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"next_step_id": "get_encryption_key_4_5"}, - ) - - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dada143a58"}, ) - assert result3["type"] is FlowResultType.FORM - assert result3["step_id"] == "get_encryption_key_4_5" - assert result3["errors"]["bindkey"] == "decryption_failed" - - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], - user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, - ) - assert result4["type"] is FlowResultType.ABORT - assert result4["reason"] == "reauth_successful" - - -async def test_async_step_reauth_v4_from_cloud(hass: HomeAssistant) -> None: - """Test reauth with a v4 key from the cloud.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="54:EF:44:E3:9C:BC", - ) - entry.add_to_hass(hass) - saved_callback = None - - def _async_register_callback(_hass, _callback, _matcher, _mode): - nonlocal saved_callback - saved_callback = _callback - return lambda: None - - with patch( - "homeassistant.components.bluetooth.update_coordinator.async_register_callback", - _async_register_callback, - ): - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 0 - - # WARNING: This test data is synthetic, rather than captured from a real device - # obj type is 0x1310, payload len is 0x2 and payload is 0x6000 - saved_callback( - make_advertisement( - "54:EF:44:E3:9C:BC", - b"XY\x97\tf\xbc\x9c\xe3D\xefT\x01\x08\x12\x05\x00\x00\x00q^\xbe\x90", - ), - BluetoothChange.ADVERTISEMENT, - ) - - await hass.async_block_till_done() - - results = hass.config_entries.flow.async_progress() - assert len(results) == 1 - result = results[0] - - assert result["step_id"] == "get_encryption_key_4_5_choose_method" + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "get_encryption_key_4_5" + assert result2["errors"]["bindkey"] == "decryption_failed" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"next_step_id": "cloud_auth"}, + user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - device = XiaomiCloudBLEDevice( - name="x", - mac="54:EF:44:E3:9C:BC", - bindkey="5b51a7c91cde6707c9ef18dfda143a58", - ) - with patch( - "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", - return_value=device, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={"username": "x@x.x", "password": "x"}, - ) - - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "reauth_successful" + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: @@ -1419,7 +1083,16 @@ async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: device = DeviceData() - result = await entry.start_reauth_flow(hass, data={"device": device}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "title_placeholders": {"name": entry.title}, + "unique_id": entry.unique_id, + }, + data=entry.data | {"device": device}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" diff --git a/tests/components/xiaomi_ble/test_device_trigger.py b/tests/components/xiaomi_ble/test_device_trigger.py index 218a382ada5..87a4d340d8c 100644 --- a/tests/components/xiaomi_ble/test_device_trigger.py +++ b/tests/components/xiaomi_ble/test_device_trigger.py @@ -18,6 +18,7 @@ from tests.common import ( MockConfigEntry, async_capture_events, async_get_device_automations, + async_mock_service, ) from tests.components.bluetooth import inject_bluetooth_service_info_bleak @@ -28,6 +29,12 @@ def get_device_id(mac: str) -> tuple[str, str]: return (BLUETOOTH_DOMAIN, mac) +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + async def _async_setup_xiaomi_device( hass: HomeAssistant, mac: str, data: Any | None = None ): @@ -392,9 +399,7 @@ async def test_get_triggers_for_invalid_device_id( async def test_if_fires_on_button_press( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] ) -> None: """Test for button press event trigger firing.""" mac = "54:EF:44:E3:9C:BC" @@ -447,17 +452,15 @@ async def test_if_fires_on_button_press( ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_button_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_button_press" assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() async def test_if_fires_on_double_button_long_press( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] ) -> None: """Test for button press event trigger firing.""" mac = "DC:ED:83:87:12:73" @@ -510,17 +513,15 @@ async def test_if_fires_on_double_button_long_press( ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_right_button_press" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_right_button_press" assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() async def test_if_fires_on_motion_detected( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] ) -> None: """Test for motion event trigger firing.""" mac = "DE:70:E8:B2:39:0C" @@ -566,8 +567,8 @@ async def test_if_fires_on_motion_detected( ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "test_trigger_motion_detected" + assert len(calls) == 1 + assert calls[0].data["some"] == "test_trigger_motion_detected" assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() @@ -675,9 +676,7 @@ async def test_automation_with_invalid_trigger_event_property( async def test_triggers_for_invalid__model( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], + hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] ) -> None: """Test invalid model doesn't return triggers.""" mac = "DE:70:E8:B2:39:0C" diff --git a/tests/components/xiaomi_ble/test_sensor.py b/tests/components/xiaomi_ble/test_sensor.py index 11a20a62d02..4d9a29e3111 100644 --- a/tests/components/xiaomi_ble/test_sensor.py +++ b/tests/components/xiaomi_ble/test_sensor.py @@ -11,7 +11,6 @@ from homeassistant.components.xiaomi_ble.const import CONF_SLEEPY_DEVICE, DOMAIN from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, - STATE_ON, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -466,115 +465,6 @@ async def test_xiaomi_hhccjcy01_only_some_sources_connectable( await hass.async_block_till_done() -async def test_xiaomi_xmosb01xs(hass: HomeAssistant) -> None: - """Test XMOSB01XS multiple advertisements. - - This device has multiple advertisements before all sensors are visible. - """ - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="DC:8E:95:23:07:B7", - data={"bindkey": "272b1c920ef435417c49228b8ab9a563"}, - ) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 0 - inject_bluetooth_service_info_bleak( - hass, - make_advertisement( - "DC:8E:95:23:07:B7", - ( - b"\x58\x59\x83\x46\x91\xb7\x07\x23\x95\x8e\xdc\xc7\x17\x61\xc1" - b"\x24\x03\x00\x25\x44\xb0\x65" - ), - connectable=False, - ), - ) - inject_bluetooth_service_info_bleak( - hass, - make_advertisement( - "DC:8E:95:23:07:B7", - b"\x10\x59\x83\x46\x90\xb7\x07\x23\x95\x8e\xdc", - connectable=False, - ), - ) - inject_bluetooth_service_info_bleak( - hass, - make_advertisement( - "DC:8E:95:23:07:B7", - b"\x48\x59\x83\x46\x9d\x34\x45\xec\xab\xda\x93\xf9\x24\x03\x00\x9e\x01\x6d\x3d", - connectable=False, - ), - ) - inject_bluetooth_service_info_bleak( - hass, - make_advertisement( - "DC:8E:95:23:07:B7", - ( - b"\x58\x59\x83\x46\xa9\xb7\x07\x23\x95\x8e\xdc\xc6\x59\xa2\xdc\xc5" - b"\x24\x03\x00\xa0\x4d\x0d\x45" - ), - connectable=False, - ), - ) - inject_bluetooth_service_info_bleak( - hass, - make_advertisement( - "DC:8E:95:23:07:B7", - ( - b"\x58\x59\x83\x46\xa4\xb7\x07\x23\x95\x8e\xdc\x77\x2a\xe2\x5c\x11" - b"\x24\x03\x00\xab\x87\x7b\xd7" - ), - connectable=False, - ), - ) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 4 - - occupancy_sensor = hass.states.get("binary_sensor.occupancy_sensor_07b7_occupancy") - occupancy_sensor_attribtes = occupancy_sensor.attributes - assert occupancy_sensor.state == STATE_ON - assert ( - occupancy_sensor_attribtes[ATTR_FRIENDLY_NAME] - == "Occupancy Sensor 07B7 Occupancy" - ) - - illum_sensor = hass.states.get("sensor.occupancy_sensor_07b7_illuminance") - illum_sensor_attr = illum_sensor.attributes - assert illum_sensor.state == "111.0" - assert illum_sensor_attr[ATTR_FRIENDLY_NAME] == "Occupancy Sensor 07B7 Illuminance" - assert illum_sensor_attr[ATTR_UNIT_OF_MEASUREMENT] == "lx" - assert illum_sensor_attr[ATTR_STATE_CLASS] == "measurement" - - illum_sensor = hass.states.get("sensor.occupancy_sensor_07b7_duration_detected") - illum_sensor_attr = illum_sensor.attributes - assert illum_sensor.state == "2" - assert ( - illum_sensor_attr[ATTR_FRIENDLY_NAME] - == "Occupancy Sensor 07B7 Duration detected" - ) - assert illum_sensor_attr[ATTR_UNIT_OF_MEASUREMENT] == "min" - assert illum_sensor_attr[ATTR_STATE_CLASS] == "measurement" - - illum_sensor = hass.states.get("sensor.occupancy_sensor_07b7_duration_cleared") - illum_sensor_attr = illum_sensor.attributes - assert illum_sensor.state == "2" - assert ( - illum_sensor_attr[ATTR_FRIENDLY_NAME] - == "Occupancy Sensor 07B7 Duration cleared" - ) - assert illum_sensor_attr[ATTR_UNIT_OF_MEASUREMENT] == "min" - assert illum_sensor_attr[ATTR_STATE_CLASS] == "measurement" - - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - assert entry.data[CONF_SLEEPY_DEVICE] is True - - async def test_xiaomi_cgdk2_bind_key(hass: HomeAssistant) -> None: """Test CGDK2 bind key. diff --git a/tests/components/xiaomi_miio/test_button.py b/tests/components/xiaomi_miio/test_button.py index 1f79a3ec0d0..8159d7c49e5 100644 --- a/tests/components/xiaomi_miio/test_button.py +++ b/tests/components/xiaomi_miio/test_button.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock, patch import pytest -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN, SERVICE_PRESS from homeassistant.components.xiaomi_miio.const import ( CONF_FLOW_TYPE, DOMAIN as XIAOMI_DOMAIN, @@ -68,7 +68,7 @@ async def test_vacuum_button_press(hass: HomeAssistant) -> None: pressed_at = dt_util.utcnow() await hass.services.async_call( - BUTTON_DOMAIN, + DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id + "_reset_side_brush"}, blocking=True, @@ -81,7 +81,7 @@ async def test_vacuum_button_press(hass: HomeAssistant) -> None: async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up vacuum component.""" - entity_id = f"{BUTTON_DOMAIN}.{entity_name}" + entity_id = f"{DOMAIN}.{entity_name}" config_entry = MockConfigEntry( domain=XIAOMI_DOMAIN, diff --git a/tests/components/xiaomi_miio/test_config_flow.py b/tests/components/xiaomi_miio/test_config_flow.py index 146526c69a5..481be189ddd 100644 --- a/tests/components/xiaomi_miio/test_config_flow.py +++ b/tests/components/xiaomi_miio/test_config_flow.py @@ -704,7 +704,7 @@ async def test_config_flow_step_device_manual_model_succes(hass: HomeAssistant) } -async def config_flow_device_success(hass: HomeAssistant, model_to_test: str) -> None: +async def config_flow_device_success(hass, model_to_test): """Test a successful config flow for a device (base class).""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -748,7 +748,7 @@ async def config_flow_device_success(hass: HomeAssistant, model_to_test: str) -> } -async def config_flow_generic_roborock(hass: HomeAssistant) -> None: +async def config_flow_generic_roborock(hass): """Test a successful config flow for a generic roborock vacuum.""" dummy_model = "roborock.vacuum.dummy" @@ -794,9 +794,7 @@ async def config_flow_generic_roborock(hass: HomeAssistant) -> None: } -async def zeroconf_device_success( - hass: HomeAssistant, zeroconf_name_to_test: str, model_to_test: str -) -> None: +async def zeroconf_device_success(hass, zeroconf_name_to_test, model_to_test): """Test a successful zeroconf discovery of a device (base class).""" result = await hass.config_entries.flow.async_init( const.DOMAIN, @@ -976,7 +974,11 @@ async def test_reauth(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - result = await config_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + const.DOMAIN, + context={"source": config_entries.SOURCE_REAUTH}, + data=config_entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/xiaomi_miio/test_select.py b/tests/components/xiaomi_miio/test_select.py index 566f1516fdf..f2f04127d75 100644 --- a/tests/components/xiaomi_miio/test_select.py +++ b/tests/components/xiaomi_miio/test_select.py @@ -12,7 +12,7 @@ import pytest from homeassistant.components.select import ( ATTR_OPTION, ATTR_OPTIONS, - DOMAIN as SELECT_DOMAIN, + DOMAIN, SERVICE_SELECT_OPTION, ) from homeassistant.components.xiaomi_miio import UPDATE_INTERVAL @@ -141,9 +141,9 @@ async def test_select_coordinator_update(hass: HomeAssistant, setup_test) -> Non assert state.state == "left" -async def setup_component(hass: HomeAssistant, entity_name: str) -> str: +async def setup_component(hass, entity_name): """Set up component.""" - entity_id = f"{SELECT_DOMAIN}.{entity_name}" + entity_id = f"{DOMAIN}.{entity_name}" config_entry = MockConfigEntry( domain=XIAOMI_DOMAIN, diff --git a/tests/components/xiaomi_miio/test_vacuum.py b/tests/components/xiaomi_miio/test_vacuum.py index 76321a1a0a8..462145d16ab 100644 --- a/tests/components/xiaomi_miio/test_vacuum.py +++ b/tests/components/xiaomi_miio/test_vacuum.py @@ -1,18 +1,18 @@ """The tests for the Xiaomi vacuum platform.""" -from collections.abc import Generator from datetime import datetime, time, timedelta from unittest import mock from unittest.mock import MagicMock, patch from miio import DeviceException import pytest +from typing_extensions import Generator from homeassistant.components.vacuum import ( ATTR_BATTERY_ICON, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, - DOMAIN as VACUUM_DOMAIN, + DOMAIN, SERVICE_CLEAN_SPOT, SERVICE_LOCATE, SERVICE_PAUSE, @@ -283,7 +283,7 @@ async def test_xiaomi_vacuum_services( # Call services await hass.services.async_call( - VACUUM_DOMAIN, SERVICE_START, {"entity_id": entity_id}, blocking=True + DOMAIN, SERVICE_START, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls( [mock.call.resume_or_start()], any_order=True @@ -292,42 +292,42 @@ async def test_xiaomi_vacuum_services( mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - VACUUM_DOMAIN, SERVICE_PAUSE, {"entity_id": entity_id}, blocking=True + DOMAIN, SERVICE_PAUSE, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.pause()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - VACUUM_DOMAIN, SERVICE_STOP, {"entity_id": entity_id}, blocking=True + DOMAIN, SERVICE_STOP, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.stop()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - VACUUM_DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": entity_id}, blocking=True + DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.home()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - VACUUM_DOMAIN, SERVICE_LOCATE, {"entity_id": entity_id}, blocking=True + DOMAIN, SERVICE_LOCATE, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.find()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - VACUUM_DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": entity_id}, blocking=True + DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.spot()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - VACUUM_DOMAIN, + DOMAIN, SERVICE_SEND_COMMAND, {"entity_id": entity_id, "command": "raw"}, blocking=True, @@ -339,7 +339,7 @@ async def test_xiaomi_vacuum_services( mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - VACUUM_DOMAIN, + DOMAIN, SERVICE_SEND_COMMAND, {"entity_id": entity_id, "command": "raw", "params": {"k1": 2}}, blocking=True, @@ -498,7 +498,7 @@ async def test_xiaomi_vacuum_fanspeeds( # Set speed service: await hass.services.async_call( - VACUUM_DOMAIN, + DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": entity_id, "fan_speed": 60}, blocking=True, @@ -512,7 +512,7 @@ async def test_xiaomi_vacuum_fanspeeds( fan_speed_dict = mock_mirobo_fanspeeds.fan_speed_presets() await hass.services.async_call( - VACUUM_DOMAIN, + DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": entity_id, "fan_speed": "Medium"}, blocking=True, @@ -525,7 +525,7 @@ async def test_xiaomi_vacuum_fanspeeds( assert "ERROR" not in caplog.text await hass.services.async_call( - VACUUM_DOMAIN, + DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": entity_id, "fan_speed": "invent"}, blocking=True, @@ -533,9 +533,9 @@ async def test_xiaomi_vacuum_fanspeeds( assert "Fan speed step not recognized" in caplog.text -async def setup_component(hass: HomeAssistant, entity_name: str) -> str: +async def setup_component(hass, entity_name): """Set up vacuum component.""" - entity_id = f"{VACUUM_DOMAIN}.{entity_name}" + entity_id = f"{DOMAIN}.{entity_name}" config_entry = MockConfigEntry( domain=XIAOMI_DOMAIN, diff --git a/tests/components/yale/__init__.py b/tests/components/yale/__init__.py deleted file mode 100644 index 7f72d348042..00000000000 --- a/tests/components/yale/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the yale component.""" diff --git a/tests/components/yale/conftest.py b/tests/components/yale/conftest.py deleted file mode 100644 index 3e633430846..00000000000 --- a/tests/components/yale/conftest.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Yale tests conftest.""" - -from unittest.mock import patch - -import pytest -from yalexs.manager.ratelimit import _RateLimitChecker - -from homeassistant.components.yale.const import DOMAIN -from homeassistant.core import HomeAssistant - -from .mocks import mock_client_credentials, mock_config_entry - -from tests.common import MockConfigEntry, load_fixture - - -@pytest.fixture(name="mock_discovery", autouse=True) -def mock_discovery_fixture(): - """Mock discovery to avoid loading the whole bluetooth stack.""" - with patch( - "homeassistant.components.yale.data.discovery_flow.async_create_flow" - ) as mock_discovery: - yield mock_discovery - - -@pytest.fixture(name="disable_ratelimit_checks", autouse=True) -def disable_ratelimit_checks_fixture(): - """Disable rate limit checks.""" - with patch.object(_RateLimitChecker, "register_wakeup"): - yield - - -@pytest.fixture(name="mock_config_entry") -def mock_config_entry_fixture(jwt: str) -> MockConfigEntry: - """Return the default mocked config entry.""" - return mock_config_entry(jwt=jwt) - - -@pytest.fixture(name="jwt") -def load_jwt_fixture() -> str: - """Load Fixture data.""" - return load_fixture("jwt", DOMAIN).strip("\n") - - -@pytest.fixture(name="reauth_jwt") -def load_reauth_jwt_fixture() -> str: - """Load Fixture data.""" - return load_fixture("reauth_jwt", DOMAIN).strip("\n") - - -@pytest.fixture(name="reauth_jwt_wrong_account") -def load_reauth_jwt_wrong_account_fixture() -> str: - """Load Fixture data.""" - return load_fixture("reauth_jwt_wrong_account", DOMAIN).strip("\n") - - -@pytest.fixture(name="client_credentials", autouse=True) -async def mock_client_credentials_fixture(hass: HomeAssistant) -> None: - """Mock client credentials.""" - await mock_client_credentials(hass) - - -@pytest.fixture(name="skip_cloud", autouse=True) -def skip_cloud_fixture(): - """Skip setting up cloud. - - Cloud already has its own tests for account link. - - We do not need to test it here as we only need to test our - usage of the oauth2 helpers. - """ - with patch("homeassistant.components.cloud.async_setup", return_value=True): - yield diff --git a/tests/components/yale/fixtures/get_activity.bridge_offline.json b/tests/components/yale/fixtures/get_activity.bridge_offline.json deleted file mode 100644 index 9c2ded96665..00000000000 --- a/tests/components/yale/fixtures/get_activity.bridge_offline.json +++ /dev/null @@ -1,36 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "associated_bridge_offline", - "dateTime": 1582007218000, - "info": { - "remote": true, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.bridge_online.json b/tests/components/yale/fixtures/get_activity.bridge_online.json deleted file mode 100644 index 6f8b5e6a4a6..00000000000 --- a/tests/components/yale/fixtures/get_activity.bridge_online.json +++ /dev/null @@ -1,36 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "associated_bridge_online", - "dateTime": 1582007218000, - "info": { - "remote": true, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.doorbell_motion.json b/tests/components/yale/fixtures/get_activity.doorbell_motion.json deleted file mode 100644 index cf0f231a49a..00000000000 --- a/tests/components/yale/fixtures/get_activity.doorbell_motion.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - { - "otherUser": { - "FirstName": "Unknown", - "UserName": "deleteduser", - "LastName": "User", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "dateTime": 1582663119959, - "deviceID": "K98GiDT45GUL", - "info": { - "videoUploadProgress": "in_progress", - "image": { - "resource_type": "image", - "etag": "fdsf", - "created_at": "2020-02-25T20:38:39Z", - "type": "upload", - "format": "jpg", - "version": 1582663119, - "secure_url": "https://res.cloudinary.com/updated_image.jpg", - "signature": "fdfdfd", - "url": "http://res.cloudinary.com/updated_image.jpg", - "bytes": 48545, - "placeholder": false, - "original_filename": "file", - "width": 720, - "tags": [], - "public_id": "xnsj5gphpzij9brifpf4", - "height": 576 - }, - "dvrID": "dvr", - "videoAvailable": false, - "hasSubscription": false - }, - "callingUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "house": { - "houseName": "K98GiDT45GUL", - "houseID": "na" - }, - "action": "doorbell_motion_detected", - "deviceType": "doorbell", - "entities": { - "otherUser": "deleted", - "house": "na", - "device": "K98GiDT45GUL", - "activity": "de5585cfd4eae900bb5ba3dc", - "callingUser": "deleted" - }, - "deviceName": "Front Door" - } -] diff --git a/tests/components/yale/fixtures/get_activity.jammed.json b/tests/components/yale/fixtures/get_activity.jammed.json deleted file mode 100644 index 782a13f9c73..00000000000 --- a/tests/components/yale/fixtures/get_activity.jammed.json +++ /dev/null @@ -1,36 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "jammed", - "dateTime": 1582007218000, - "info": { - "remote": true, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.lock.json b/tests/components/yale/fixtures/get_activity.lock.json deleted file mode 100644 index b40e7d61ccf..00000000000 --- a/tests/components/yale/fixtures/get_activity.lock.json +++ /dev/null @@ -1,36 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "lock", - "dateTime": 1582007218000, - "info": { - "remote": true, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json b/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json deleted file mode 100644 index 38c26ffb7dd..00000000000 --- a/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json +++ /dev/null @@ -1,36 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "Relock", - "UserID": "automaticrelock", - "FirstName": "Auto" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "lock", - "dateTime": 1582007218000, - "info": { - "remote": false, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json b/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json deleted file mode 100644 index bfbc621e064..00000000000 --- a/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json +++ /dev/null @@ -1,36 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "lock", - "dateTime": 1582007218000, - "info": { - "remote": false, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_keypad.json b/tests/components/yale/fixtures/get_activity.lock_from_keypad.json deleted file mode 100644 index 1b1e13e67dd..00000000000 --- a/tests/components/yale/fixtures/get_activity.lock_from_keypad.json +++ /dev/null @@ -1,37 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "lock", - "dateTime": 1582007218000, - "info": { - "remote": false, - "keypad": true, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_manual.json b/tests/components/yale/fixtures/get_activity.lock_from_manual.json deleted file mode 100644 index e2fc195cfda..00000000000 --- a/tests/components/yale/fixtures/get_activity.lock_from_manual.json +++ /dev/null @@ -1,39 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "lock", - "dateTime": 1582007218000, - "info": { - "remote": false, - "keypad": false, - "manual": true, - "tag": false, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.locking.json b/tests/components/yale/fixtures/get_activity.locking.json deleted file mode 100644 index ad2df6f7e91..00000000000 --- a/tests/components/yale/fixtures/get_activity.locking.json +++ /dev/null @@ -1,36 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "locking", - "dateTime": 1582007218000, - "info": { - "remote": true, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.unlock_from_manual.json b/tests/components/yale/fixtures/get_activity.unlock_from_manual.json deleted file mode 100644 index e8bf95818ce..00000000000 --- a/tests/components/yale/fixtures/get_activity.unlock_from_manual.json +++ /dev/null @@ -1,39 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "unlock", - "dateTime": 1582007218000, - "info": { - "remote": false, - "keypad": false, - "manual": true, - "tag": false, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.unlock_from_tag.json b/tests/components/yale/fixtures/get_activity.unlock_from_tag.json deleted file mode 100644 index 57876428677..00000000000 --- a/tests/components/yale/fixtures/get_activity.unlock_from_tag.json +++ /dev/null @@ -1,39 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "unlock", - "dateTime": 1582007218000, - "info": { - "remote": false, - "keypad": false, - "manual": false, - "tag": true, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_activity.unlocking.json b/tests/components/yale/fixtures/get_activity.unlocking.json deleted file mode 100644 index 0fbd0be3eb8..00000000000 --- a/tests/components/yale/fixtures/get_activity.unlocking.json +++ /dev/null @@ -1,36 +0,0 @@ -[ - { - "entities": { - "activity": "mockActivity2", - "house": "123", - "device": "online_with_doorsense", - "callingUser": "mockUserId2", - "otherUser": "deleted" - }, - "callingUser": { - "LastName": "elven princess", - "UserID": "mockUserId2", - "FirstName": "Your favorite" - }, - "otherUser": { - "LastName": "User", - "UserName": "deleteduser", - "FirstName": "Unknown", - "UserID": "deleted", - "PhoneNo": "deleted" - }, - "deviceType": "lock", - "deviceName": "MockHouseTDoor", - "action": "unlocking", - "dateTime": 1582007218000, - "info": { - "remote": true, - "DateLogActionID": "ABC+Time" - }, - "deviceID": "online_with_doorsense", - "house": { - "houseName": "MockHouse", - "houseID": "123" - } - } -] diff --git a/tests/components/yale/fixtures/get_doorbell.json b/tests/components/yale/fixtures/get_doorbell.json deleted file mode 100644 index 32714211618..00000000000 --- a/tests/components/yale/fixtures/get_doorbell.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "status_timestamp": 1512811834532, - "appID": "august-iphone", - "LockID": "BBBB1F5F11114C24CCCC97571DD6AAAA", - "recentImage": { - "original_filename": "file", - "placeholder": false, - "bytes": 24476, - "height": 640, - "format": "jpg", - "width": 480, - "version": 1512892814, - "resource_type": "image", - "etag": "54966926be2e93f77d498a55f247661f", - "tags": [], - "public_id": "qqqqt4ctmxwsysylaaaa", - "url": "http://image.com/vmk16naaaa7ibuey7sar.jpg", - "created_at": "2017-12-10T08:01:35Z", - "signature": "75z47ca21b5e8ffda21d2134e478a2307c4625da", - "secure_url": "https://image.com/vmk16naaaa7ibuey7sar.jpg", - "type": "upload" - }, - "settings": { - "keepEncoderRunning": true, - "videoResolution": "640x480", - "minACNoScaling": 40, - "irConfiguration": 8448272, - "directLink": true, - "overlayEnabled": true, - "notify_when_offline": true, - "micVolume": 100, - "bitrateCeiling": 512000, - "initialBitrate": 384000, - "IVAEnabled": false, - "turnOffCamera": false, - "ringSoundEnabled": true, - "JPGQuality": 70, - "motion_notifications": true, - "speakerVolume": 92, - "buttonpush_notifications": true, - "ABREnabled": true, - "debug": false, - "batteryLowThreshold": 3.1, - "batteryRun": false, - "IREnabled": true, - "batteryUseThreshold": 3.4 - }, - "doorbellServerURL": "https://doorbells.august.com", - "name": "Front Door", - "createdAt": "2016-11-26T22:27:11.176Z", - "installDate": "2016-11-26T22:27:11.176Z", - "serialNumber": "tBXZR0Z35E", - "dvrSubscriptionSetupDone": true, - "caps": ["reconnect"], - "doorbellID": "K98GiDT45GUL", - "HouseID": "mockhouseid1", - "telemetry": { - "signal_level": -56, - "date": "2017-12-10 08:05:12", - "battery_soc": 96, - "battery": 4.061763, - "steady_ac_in": 22.196405, - "BSSID": "88:ee:00:dd:aa:11", - "SSID": "foo_ssid", - "updated_at": "2017-12-10T08:05:13.650Z", - "temperature": 28.25, - "wifi_freq": 5745, - "load_average": "0.50 0.47 0.35 1/154 9345", - "link_quality": 54, - "battery_soh": 95, - "uptime": "16168.75 13830.49", - "ip_addr": "10.0.1.11", - "doorbell_low_battery": false, - "ac_in": 23.856874 - }, - "installUserID": "c3b2a94e-373e-aaaa-bbbb-36e996827777", - "status": "doorbell_call_status_online", - "firmwareVersion": "2.3.0-RC153+201711151527", - "pubsubChannel": "7c7a6672-59c8-3333-ffff-dcd98705cccc", - "updatedAt": "2017-12-10T08:05:13.650Z" -} diff --git a/tests/components/yale/fixtures/get_doorbell.nobattery.json b/tests/components/yale/fixtures/get_doorbell.nobattery.json deleted file mode 100644 index 2a7f1e2d3b2..00000000000 --- a/tests/components/yale/fixtures/get_doorbell.nobattery.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "status_timestamp": 1512811834532, - "appID": "august-iphone", - "LockID": "BBBB1F5F11114C24CCCC97571DD6AAAA", - "recentImage": { - "original_filename": "file", - "placeholder": false, - "bytes": 24476, - "height": 640, - "format": "jpg", - "width": 480, - "version": 1512892814, - "resource_type": "image", - "etag": "54966926be2e93f77d498a55f247661f", - "tags": [], - "public_id": "qqqqt4ctmxwsysylaaaa", - "url": "http://image.com/vmk16naaaa7ibuey7sar.jpg", - "created_at": "2017-12-10T08:01:35Z", - "signature": "75z47ca21b5e8ffda21d2134e478a2307c4625da", - "secure_url": "https://image.com/vmk16naaaa7ibuey7sar.jpg", - "type": "upload" - }, - "settings": { - "keepEncoderRunning": true, - "videoResolution": "640x480", - "minACNoScaling": 40, - "irConfiguration": 8448272, - "directLink": true, - "overlayEnabled": true, - "notify_when_offline": true, - "micVolume": 100, - "bitrateCeiling": 512000, - "initialBitrate": 384000, - "IVAEnabled": false, - "turnOffCamera": false, - "ringSoundEnabled": true, - "JPGQuality": 70, - "motion_notifications": true, - "speakerVolume": 92, - "buttonpush_notifications": true, - "ABREnabled": true, - "debug": false, - "batteryLowThreshold": 3.1, - "batteryRun": false, - "IREnabled": true, - "batteryUseThreshold": 3.4 - }, - "doorbellServerURL": "https://doorbells.august.com", - "name": "Front Door", - "createdAt": "2016-11-26T22:27:11.176Z", - "installDate": "2016-11-26T22:27:11.176Z", - "serialNumber": "tBXZR0Z35E", - "dvrSubscriptionSetupDone": true, - "caps": ["reconnect"], - "doorbellID": "K98GiDT45GUL", - "HouseID": "3dd2accaea08", - "telemetry": { - "signal_level": -56, - "date": "2017-12-10 08:05:12", - "steady_ac_in": 22.196405, - "BSSID": "88:ee:00:dd:aa:11", - "SSID": "foo_ssid", - "updated_at": "2017-12-10T08:05:13.650Z", - "temperature": 28.25, - "wifi_freq": 5745, - "load_average": "0.50 0.47 0.35 1/154 9345", - "link_quality": 54, - "uptime": "16168.75 13830.49", - "ip_addr": "10.0.1.11", - "doorbell_low_battery": false, - "ac_in": 23.856874 - }, - "installUserID": "c3b2a94e-373e-aaaa-bbbb-36e996827777", - "status": "doorbell_call_status_online", - "firmwareVersion": "2.3.0-RC153+201711151527", - "pubsubChannel": "7c7a6672-59c8-3333-ffff-dcd98705cccc", - "updatedAt": "2017-12-10T08:05:13.650Z" -} diff --git a/tests/components/yale/fixtures/get_doorbell.offline.json b/tests/components/yale/fixtures/get_doorbell.offline.json deleted file mode 100644 index 13a8483c995..00000000000 --- a/tests/components/yale/fixtures/get_doorbell.offline.json +++ /dev/null @@ -1,126 +0,0 @@ -{ - "recentImage": { - "tags": [], - "height": 576, - "public_id": "fdsfds", - "bytes": 50013, - "resource_type": "image", - "original_filename": "file", - "version": 1582242766, - "format": "jpg", - "signature": "fdsfdsf", - "created_at": "2020-02-20T23:52:46Z", - "type": "upload", - "placeholder": false, - "url": "http://res.cloudinary.com/august-com/image/upload/ccc/ccccc.jpg", - "secure_url": "https://res.cloudinary.com/august-com/image/upload/cc/cccc.jpg", - "etag": "zds", - "width": 720 - }, - "firmwareVersion": "3.1.0-HYDRC75+201909251139", - "doorbellServerURL": "https://doorbells.august.com", - "installUserID": "mock", - "caps": ["reconnect", "webrtc", "tcp_wakeup"], - "messagingProtocol": "pubnub", - "createdAt": "2020-02-12T03:52:28.719Z", - "invitations": [], - "appID": "august-iphone-v5", - "HouseID": "houseid1", - "doorbellID": "tmt100", - "name": "Front Door", - "settings": { - "batteryUseThreshold": 3.4, - "brightness": 50, - "batteryChargeCurrent": 60, - "overCurrentThreshold": -250, - "irLedBrightness": 40, - "videoResolution": "720x576", - "pirPulseCounter": 1, - "contrast": 50, - "micVolume": 50, - "directLink": true, - "auto_contrast_mode": 0, - "saturation": 50, - "motion_notifications": true, - "pirSensitivity": 20, - "pirBlindTime": 7, - "notify_when_offline": false, - "nightModeAlsThreshold": 10, - "minACNoScaling": 40, - "DVRRecordingTimeout": 15, - "turnOffCamera": false, - "debug": false, - "keepEncoderRunning": true, - "pirWindowTime": 0, - "bitrateCeiling": 2000000, - "backlight_comp": false, - "buttonpush_notifications": true, - "buttonpush_notifications_partners": false, - "minimumSnapshotInterval": 30, - "pirConfiguration": 272, - "batteryLowThreshold": 3.1, - "sharpness": 50, - "ABREnabled": true, - "hue": 50, - "initialBitrate": 1000000, - "ringSoundEnabled": true, - "IVAEnabled": false, - "overlayEnabled": true, - "speakerVolume": 92, - "ringRepetitions": 3, - "powerProfilePreset": -1, - "irConfiguration": 16836880, - "JPGQuality": 70, - "IREnabled": true - }, - "updatedAt": "2020-02-20T23:58:21.580Z", - "serialNumber": "abc", - "installDate": "2019-02-12T03:52:28.719Z", - "dvrSubscriptionSetupDone": true, - "pubsubChannel": "mock", - "chimes": [ - { - "updatedAt": "2020-02-12T03:55:38.805Z", - "_id": "cccc", - "type": 1, - "serialNumber": "ccccc", - "doorbellID": "tmt100", - "name": "Living Room", - "chimeID": "cccc", - "createdAt": "2020-02-12T03:55:38.805Z", - "firmware": "3.1.16" - } - ], - "telemetry": { - "battery": 3.985, - "battery_soc": 81, - "load_average": "0.45 0.18 0.07 4/98 831", - "ip_addr": "192.168.100.174", - "BSSID": "snp", - "uptime": "96.55 70.59", - "SSID": "bob", - "updated_at": "2020-02-20T23:53:09.586Z", - "dtim_period": 0, - "wifi_freq": 2462, - "date": "2020-02-20 11:47:36", - "BSSIDManufacturer": "Ubiquiti - Ubiquiti Networks Inc.", - "battery_temp": 22, - "battery_avg_cur": -291, - "beacon_interval": 0, - "signal_level": -49, - "battery_soh": 95, - "doorbell_low_battery": false - }, - "secChipCertSerial": "", - "tcpKeepAlive": { - "keepAliveUUID": "mock", - "wakeUp": { - "token": "wakemeup", - "lastUpdated": 1582242723931 - } - }, - "statusUpdatedAtMs": 1582243101579, - "status": "doorbell_offline", - "type": "hydra1", - "HouseName": "housename" -} diff --git a/tests/components/yale/fixtures/get_lock.doorsense_init.json b/tests/components/yale/fixtures/get_lock.doorsense_init.json deleted file mode 100644 index 1132cc61a8d..00000000000 --- a/tests/components/yale/fixtures/get_lock.doorsense_init.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "LockName": "Front Door Lock", - "Type": 2, - "Created": "2017-12-10T03:12:09.210Z", - "Updated": "2017-12-10T03:12:09.210Z", - "LockID": "A6697750D607098BAE8D6BAA11EF8063", - "HouseID": "000000000000", - "HouseName": "My House", - "Calibrated": false, - "skuNumber": "AUG-SL02-M02-S02", - "timeZone": "America/Vancouver", - "battery": 0.88, - "SerialNumber": "X2FSW05DGA", - "LockStatus": { - "status": "locked", - "doorState": "init", - "dateTime": "2017-12-10T04:48:30.272Z", - "isLockStatusChanged": false, - "valid": true - }, - "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", - "homeKitEnabled": false, - "zWaveEnabled": false, - "isGalileo": false, - "Bridge": { - "_id": "aaacab87f7efxa0015884999", - "mfgBridgeID": "AAGPP102XX", - "deviceModel": "august-doorbell", - "firmwareVersion": "2.3.0-RC153+201711151527", - "operative": true - }, - "keypad": { - "_id": "5bc65c24e6ef2a263e1450a8", - "serialNumber": "K1GXB0054Z", - "lockID": "92412D1B44004595B5DEB134E151A8D3", - "currentFirmwareVersion": "2.27.0", - "battery": {}, - "batteryLevel": "Medium", - "batteryRaw": 170 - }, - "OfflineKeys": { - "created": [], - "loaded": [], - "deleted": [], - "loadedhk": [ - { - "key": "kkk01d4300c1dcxxx1c330f794941222", - "slot": 256, - "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", - "created": "2017-12-10T03:12:09.218Z", - "loaded": "2017-12-10T03:12:55.563Z" - } - ] - }, - "parametersToSet": {}, - "users": { - "cccca94e-373e-aaaa-bbbb-333396827777": { - "UserType": "superuser", - "FirstName": "Foo", - "LastName": "Bar", - "identifiers": ["email:foo@bar.com", "phone:+177777777777"], - "imageInfo": { - "original": { - "width": 948, - "height": 949, - "format": "jpg", - "url": "http://www.image.com/foo.jpeg", - "secure_url": "https://www.image.com/foo.jpeg" - }, - "thumbnail": { - "width": 128, - "height": 128, - "format": "jpg", - "url": "http://www.image.com/foo.jpeg", - "secure_url": "https://www.image.com/foo.jpeg" - } - } - } - }, - "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", - "ruleHash": {}, - "cameras": [], - "geofenceLimits": { - "ios": { - "debounceInterval": 90, - "gpsAccuracyMultiplier": 2.5, - "maximumGeofence": 5000, - "minimumGeofence": 100, - "minGPSAccuracyRequired": 80 - } - } -} diff --git a/tests/components/yale/fixtures/get_lock.low_keypad_battery.json b/tests/components/yale/fixtures/get_lock.low_keypad_battery.json deleted file mode 100644 index 43b5513a527..00000000000 --- a/tests/components/yale/fixtures/get_lock.low_keypad_battery.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "LockName": "Front Door Lock", - "Type": 2, - "Created": "2017-12-10T03:12:09.210Z", - "Updated": "2017-12-10T03:12:09.210Z", - "LockID": "A6697750D607098BAE8D6BAA11EF8063", - "HouseID": "000000000000", - "HouseName": "My House", - "Calibrated": false, - "skuNumber": "AUG-SL02-M02-S02", - "timeZone": "America/Vancouver", - "battery": 0.88, - "SerialNumber": "X2FSW05DGA", - "LockStatus": { - "status": "locked", - "doorState": "closed", - "dateTime": "2017-12-10T04:48:30.272Z", - "isLockStatusChanged": true, - "valid": true - }, - "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", - "homeKitEnabled": false, - "zWaveEnabled": false, - "isGalileo": false, - "Bridge": { - "_id": "aaacab87f7efxa0015884999", - "mfgBridgeID": "AAGPP102XX", - "deviceModel": "august-doorbell", - "firmwareVersion": "2.3.0-RC153+201711151527", - "operative": true - }, - "keypad": { - "_id": "5bc65c24e6ef2a263e1450a8", - "serialNumber": "K1GXB0054Z", - "lockID": "92412D1B44004595B5DEB134E151A8D3", - "currentFirmwareVersion": "2.27.0", - "battery": {}, - "batteryLevel": "Low", - "batteryRaw": 128 - }, - "OfflineKeys": { - "created": [], - "loaded": [], - "deleted": [], - "loadedhk": [ - { - "key": "kkk01d4300c1dcxxx1c330f794941222", - "slot": 256, - "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", - "created": "2017-12-10T03:12:09.218Z", - "loaded": "2017-12-10T03:12:55.563Z" - } - ] - }, - "parametersToSet": {}, - "users": { - "cccca94e-373e-aaaa-bbbb-333396827777": { - "UserType": "superuser", - "FirstName": "Foo", - "LastName": "Bar", - "identifiers": ["email:foo@bar.com", "phone:+177777777777"], - "imageInfo": { - "original": { - "width": 948, - "height": 949, - "format": "jpg", - "url": "http://www.image.com/foo.jpeg", - "secure_url": "https://www.image.com/foo.jpeg" - }, - "thumbnail": { - "width": 128, - "height": 128, - "format": "jpg", - "url": "http://www.image.com/foo.jpeg", - "secure_url": "https://www.image.com/foo.jpeg" - } - } - } - }, - "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", - "ruleHash": {}, - "cameras": [], - "geofenceLimits": { - "ios": { - "debounceInterval": 90, - "gpsAccuracyMultiplier": 2.5, - "maximumGeofence": 5000, - "minimumGeofence": 100, - "minGPSAccuracyRequired": 80 - } - } -} diff --git a/tests/components/yale/fixtures/get_lock.offline.json b/tests/components/yale/fixtures/get_lock.offline.json deleted file mode 100644 index 50d3d345ef8..00000000000 --- a/tests/components/yale/fixtures/get_lock.offline.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "Calibrated": false, - "Created": "2000-00-00T00:00:00.447Z", - "HouseID": "houseid", - "HouseName": "MockName", - "LockID": "ABC", - "LockName": "Test", - "LockStatus": { - "status": "unknown" - }, - "OfflineKeys": { - "created": [], - "createdhk": [ - { - "UserID": "mock-user-id", - "created": "2000-00-00T00:00:00.447Z", - "key": "mockkey", - "slot": 12 - } - ], - "deleted": [], - "loaded": [] - }, - "SerialNumber": "ABC", - "Type": 3, - "Updated": "2000-00-00T00:00:00.447Z", - "battery": -1, - "cameras": [], - "currentFirmwareVersion": "undefined-1.59.0-1.13.2", - "geofenceLimits": { - "ios": { - "debounceInterval": 90, - "gpsAccuracyMultiplier": 2.5, - "maximumGeofence": 5000, - "minGPSAccuracyRequired": 80, - "minimumGeofence": 100 - } - }, - "homeKitEnabled": false, - "isGalileo": false, - "macAddress": "a:b:c", - "parametersToSet": {}, - "pubsubChannel": "mockpubsub", - "ruleHash": {}, - "skuNumber": "AUG-X", - "supportsEntryCodes": false, - "users": { - "mockuserid": { - "FirstName": "MockName", - "LastName": "House", - "UserType": "superuser", - "identifiers": ["phone:+15558675309", "email:mockme@mock.org"] - } - }, - "zWaveDSK": "1-2-3-4", - "zWaveEnabled": true -} diff --git a/tests/components/yale/fixtures/get_lock.online.json b/tests/components/yale/fixtures/get_lock.online.json deleted file mode 100644 index 7abadeef4b6..00000000000 --- a/tests/components/yale/fixtures/get_lock.online.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "LockName": "Front Door Lock", - "Type": 2, - "Created": "2017-12-10T03:12:09.210Z", - "Updated": "2017-12-10T03:12:09.210Z", - "LockID": "A6697750D607098BAE8D6BAA11EF8063", - "HouseID": "000000000000", - "HouseName": "My House", - "Calibrated": false, - "skuNumber": "AUG-SL02-M02-S02", - "timeZone": "America/Vancouver", - "battery": 0.88, - "SerialNumber": "X2FSW05DGA", - "LockStatus": { - "status": "locked", - "doorState": "closed", - "dateTime": "2017-12-10T04:48:30.272Z", - "isLockStatusChanged": true, - "valid": true - }, - "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", - "homeKitEnabled": false, - "zWaveEnabled": false, - "isGalileo": false, - "Bridge": { - "_id": "aaacab87f7efxa0015884999", - "mfgBridgeID": "AAGPP102XX", - "deviceModel": "august-doorbell", - "firmwareVersion": "2.3.0-RC153+201711151527", - "operative": true - }, - "keypad": { - "_id": "5bc65c24e6ef2a263e1450a8", - "serialNumber": "K1GXB0054Z", - "lockID": "92412D1B44004595B5DEB134E151A8D3", - "currentFirmwareVersion": "2.27.0", - "battery": {}, - "batteryLevel": "Medium", - "batteryRaw": 170 - }, - "OfflineKeys": { - "created": [], - "loaded": [], - "deleted": [], - "loadedhk": [ - { - "key": "kkk01d4300c1dcxxx1c330f794941222", - "slot": 256, - "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", - "created": "2017-12-10T03:12:09.218Z", - "loaded": "2017-12-10T03:12:55.563Z" - } - ] - }, - "parametersToSet": {}, - "users": { - "cccca94e-373e-aaaa-bbbb-333396827777": { - "UserType": "superuser", - "FirstName": "Foo", - "LastName": "Bar", - "identifiers": ["email:foo@bar.com", "phone:+177777777777"], - "imageInfo": { - "original": { - "width": 948, - "height": 949, - "format": "jpg", - "url": "http://www.image.com/foo.jpeg", - "secure_url": "https://www.image.com/foo.jpeg" - }, - "thumbnail": { - "width": 128, - "height": 128, - "format": "jpg", - "url": "http://www.image.com/foo.jpeg", - "secure_url": "https://www.image.com/foo.jpeg" - } - } - } - }, - "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", - "ruleHash": {}, - "cameras": [], - "geofenceLimits": { - "ios": { - "debounceInterval": 90, - "gpsAccuracyMultiplier": 2.5, - "maximumGeofence": 5000, - "minimumGeofence": 100, - "minGPSAccuracyRequired": 80 - } - } -} diff --git a/tests/components/yale/fixtures/get_lock.online.unknown_state.json b/tests/components/yale/fixtures/get_lock.online.unknown_state.json deleted file mode 100644 index abc8b40a132..00000000000 --- a/tests/components/yale/fixtures/get_lock.online.unknown_state.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "LockName": "Side Door", - "Type": 1001, - "Created": "2019-10-07T01:49:06.831Z", - "Updated": "2019-10-07T01:49:06.831Z", - "LockID": "BROKENID", - "HouseID": "abc", - "HouseName": "dog", - "Calibrated": false, - "timeZone": "America/Chicago", - "battery": 0.9524716174964851, - "hostLockInfo": { - "serialNumber": "YR", - "manufacturer": "yale", - "productID": 1536, - "productTypeID": 32770 - }, - "supportsEntryCodes": true, - "skuNumber": "AUG-MD01", - "macAddress": "MAC", - "SerialNumber": "M1FXZ00EZ9", - "LockStatus": { - "status": "unknown_error_during_connect", - "dateTime": "2020-02-22T02:48:11.741Z", - "isLockStatusChanged": true, - "valid": true, - "doorState": "closed" - }, - "currentFirmwareVersion": "undefined-4.3.0-1.8.14", - "homeKitEnabled": true, - "zWaveEnabled": false, - "isGalileo": false, - "Bridge": { - "_id": "id", - "mfgBridgeID": "id", - "deviceModel": "august-connect", - "firmwareVersion": "2.2.1", - "operative": true, - "status": { - "current": "online", - "updated": "2020-02-21T15:06:47.001Z", - "lastOnline": "2020-02-21T15:06:47.001Z", - "lastOffline": "2020-02-06T17:33:21.265Z" - }, - "hyperBridge": true - }, - "parametersToSet": {}, - "ruleHash": {}, - "cameras": [], - "geofenceLimits": { - "ios": { - "debounceInterval": 90, - "gpsAccuracyMultiplier": 2.5, - "maximumGeofence": 5000, - "minimumGeofence": 100, - "minGPSAccuracyRequired": 80 - } - } -} diff --git a/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json b/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json deleted file mode 100644 index 84822df9b89..00000000000 --- a/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "Bridge": { - "_id": "bridgeid", - "deviceModel": "august-connect", - "firmwareVersion": "2.2.1", - "hyperBridge": true, - "mfgBridgeID": "C5WY200WSH", - "operative": true, - "status": { - "current": "online", - "lastOffline": "2000-00-00T00:00:00.447Z", - "lastOnline": "2000-00-00T00:00:00.447Z", - "updated": "2000-00-00T00:00:00.447Z" - } - }, - "Calibrated": false, - "Created": "2000-00-00T00:00:00.447Z", - "HouseID": "123", - "HouseName": "Test", - "LockID": "missing_doorsense_id", - "LockName": "Online door missing doorsense", - "LockStatus": { - "dateTime": "2017-12-10T04:48:30.272Z", - "isLockStatusChanged": false, - "status": "locked", - "valid": true - }, - "SerialNumber": "XY", - "Type": 1001, - "Updated": "2000-00-00T00:00:00.447Z", - "battery": 0.922, - "currentFirmwareVersion": "undefined-4.3.0-1.8.14", - "homeKitEnabled": true, - "hostLockInfo": { - "manufacturer": "yale", - "productID": 1536, - "productTypeID": 32770, - "serialNumber": "ABC" - }, - "isGalileo": false, - "macAddress": "12:22", - "pins": { - "created": [], - "loaded": [] - }, - "skuNumber": "AUG-MD01", - "supportsEntryCodes": true, - "timeZone": "Pacific/Hawaii", - "zWaveEnabled": false -} diff --git a/tests/components/yale/fixtures/get_lock.online_with_doorsense.json b/tests/components/yale/fixtures/get_lock.online_with_doorsense.json deleted file mode 100644 index d9b413708ca..00000000000 --- a/tests/components/yale/fixtures/get_lock.online_with_doorsense.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "Bridge": { - "_id": "bridgeid", - "deviceModel": "august-connect", - "firmwareVersion": "2.2.1", - "hyperBridge": true, - "mfgBridgeID": "C5WY200WSH", - "operative": true, - "status": { - "current": "online", - "lastOffline": "2000-00-00T00:00:00.447Z", - "lastOnline": "2000-00-00T00:00:00.447Z", - "updated": "2000-00-00T00:00:00.447Z" - } - }, - "pubsubChannel": "pubsub", - "Calibrated": false, - "Created": "2000-00-00T00:00:00.447Z", - "HouseID": "mockhouseid1", - "HouseName": "Test", - "LockID": "online_with_doorsense", - "LockName": "Online door with doorsense", - "LockStatus": { - "dateTime": "2017-12-10T04:48:30.272Z", - "doorState": "open", - "isLockStatusChanged": false, - "status": "locked", - "valid": true - }, - "SerialNumber": "XY", - "Type": 1001, - "Updated": "2000-00-00T00:00:00.447Z", - "battery": 0.922, - "currentFirmwareVersion": "undefined-4.3.0-1.8.14", - "homeKitEnabled": true, - "hostLockInfo": { - "manufacturer": "yale", - "productID": 1536, - "productTypeID": 32770, - "serialNumber": "ABC" - }, - "isGalileo": false, - "macAddress": "12:22", - "pins": { - "created": [], - "loaded": [] - }, - "skuNumber": "AUG-MD01", - "supportsEntryCodes": true, - "timeZone": "Pacific/Hawaii", - "zWaveEnabled": false -} diff --git a/tests/components/yale/fixtures/get_lock.online_with_keys.json b/tests/components/yale/fixtures/get_lock.online_with_keys.json deleted file mode 100644 index 4efcba44d09..00000000000 --- a/tests/components/yale/fixtures/get_lock.online_with_keys.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "LockName": "Front Door Lock", - "Type": 2, - "Created": "2017-12-10T03:12:09.210Z", - "Updated": "2017-12-10T03:12:09.210Z", - "LockID": "A6697750D607098BAE8D6BAA11EF8064", - "HouseID": "000000000000", - "HouseName": "My House", - "Calibrated": false, - "skuNumber": "AUG-SL02-M02-S02", - "timeZone": "America/Vancouver", - "battery": 0.88, - "SerialNumber": "X2FSW05DGA", - "LockStatus": { - "status": "locked", - "doorState": "closed", - "dateTime": "2017-12-10T04:48:30.272Z", - "isLockStatusChanged": true, - "valid": true - }, - "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", - "homeKitEnabled": false, - "zWaveEnabled": false, - "isGalileo": false, - "Bridge": { - "_id": "aaacab87f7efxa0015884999", - "mfgBridgeID": "AAGPP102XX", - "deviceModel": "august-doorbell", - "firmwareVersion": "2.3.0-RC153+201711151527", - "operative": true - }, - "keypad": { - "_id": "5bc65c24e6ef2a263e1450a9", - "serialNumber": "K1GXB0054L", - "lockID": "92412D1B44004595B5DEB134E151A8D4", - "currentFirmwareVersion": "2.27.0", - "battery": {}, - "batteryLevel": "Medium", - "batteryRaw": 170 - }, - "OfflineKeys": { - "created": [], - "loaded": [ - { - "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", - "slot": 1, - "key": "kkk01d4300c1dcxxx1c330f794941111", - "created": "2017-12-10T03:12:09.215Z", - "loaded": "2017-12-10T03:12:54.391Z" - } - ], - "deleted": [], - "loadedhk": [ - { - "key": "kkk01d4300c1dcxxx1c330f794941222", - "slot": 256, - "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", - "created": "2017-12-10T03:12:09.218Z", - "loaded": "2017-12-10T03:12:55.563Z" - } - ] - }, - "parametersToSet": {}, - "users": { - "cccca94e-373e-aaaa-bbbb-333396827777": { - "UserType": "superuser", - "FirstName": "Foo", - "LastName": "Bar", - "identifiers": ["email:foo@bar.com", "phone:+177777777777"], - "imageInfo": { - "original": { - "width": 948, - "height": 949, - "format": "jpg", - "url": "http://www.image.com/foo.jpeg", - "secure_url": "https://www.image.com/foo.jpeg" - }, - "thumbnail": { - "width": 128, - "height": 128, - "format": "jpg", - "url": "http://www.image.com/foo.jpeg", - "secure_url": "https://www.image.com/foo.jpeg" - } - } - } - }, - "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", - "ruleHash": {}, - "cameras": [], - "geofenceLimits": { - "ios": { - "debounceInterval": 90, - "gpsAccuracyMultiplier": 2.5, - "maximumGeofence": 5000, - "minimumGeofence": 100, - "minGPSAccuracyRequired": 80 - } - } -} diff --git a/tests/components/yale/fixtures/get_lock.online_with_unlatch.json b/tests/components/yale/fixtures/get_lock.online_with_unlatch.json deleted file mode 100644 index 288ab1a2f28..00000000000 --- a/tests/components/yale/fixtures/get_lock.online_with_unlatch.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "LockName": "Lock online with unlatch supported", - "Type": 17, - "Created": "2024-03-14T18:03:09.003Z", - "Updated": "2024-03-14T18:03:09.003Z", - "LockID": "online_with_unlatch", - "HouseID": "mockhouseid1", - "HouseName": "Zuhause", - "Calibrated": false, - "timeZone": "Europe/Berlin", - "battery": 0.61, - "batteryInfo": { - "level": 0.61, - "warningState": "lock_state_battery_warning_none", - "infoUpdatedDate": "2024-04-30T17:55:09.045Z", - "lastChangeDate": "2024-03-15T07:04:00.000Z", - "lastChangeVoltage": 8350, - "state": "Mittel", - "icon": "https://app-resources.aaecosystem.com/images/lock_battery_state_medium.png" - }, - "hostHardwareID": "xxx", - "supportsEntryCodes": true, - "remoteOperateSecret": "xxxx", - "skuNumber": "NONE", - "macAddress": "DE:AD:BE:00:00:00", - "SerialNumber": "LPOC000000", - "LockStatus": { - "status": "locked", - "dateTime": "2024-04-30T18:41:25.673Z", - "isLockStatusChanged": false, - "valid": true, - "doorState": "init" - }, - "currentFirmwareVersion": "1.0.4", - "homeKitEnabled": false, - "zWaveEnabled": false, - "isGalileo": false, - "Bridge": { - "_id": "65f33445529187c78a100000", - "mfgBridgeID": "LPOCH0004Y", - "deviceModel": "august-lock", - "firmwareVersion": "1.0.4", - "operative": true, - "status": { - "current": "online", - "lastOnline": "2024-04-30T18:41:27.971Z", - "updated": "2024-04-30T18:41:27.971Z", - "lastOffline": "2024-04-25T14:41:40.118Z" - }, - "locks": [ - { - "_id": "656858c182e6c7c555faf758", - "LockID": "68895DD075A1444FAD4C00B273EEEF28", - "macAddress": "DE:AD:BE:EF:0B:BC" - } - ], - "hyperBridge": true - }, - "OfflineKeys": { - "created": [], - "loaded": [ - { - "created": "2024-03-14T18:03:09.034Z", - "key": "055281d4aa9bd7b68c7b7bb78e2f34ca", - "slot": 1, - "UserID": "b4b44424-0000-0000-0000-25c224dad337", - "loaded": "2024-03-14T18:03:33.470Z" - } - ], - "deleted": [] - }, - "parametersToSet": {}, - "users": { - "b4b44424-0000-0000-0000-25c224dad337": { - "UserType": "superuser", - "FirstName": "m10x", - "LastName": "m10x", - "identifiers": ["phone:+494444444", "email:m10x@example.com"] - } - }, - "pubsubChannel": "pubsub", - "ruleHash": {}, - "cameras": [], - "geofenceLimits": { - "ios": { - "debounceInterval": 90, - "gpsAccuracyMultiplier": 2.5, - "maximumGeofence": 5000, - "minimumGeofence": 100, - "minGPSAccuracyRequired": 80 - } - }, - "accessSchedulesAllowed": true -} diff --git a/tests/components/yale/fixtures/get_locks.json b/tests/components/yale/fixtures/get_locks.json deleted file mode 100644 index 3fab55f82c9..00000000000 --- a/tests/components/yale/fixtures/get_locks.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "A6697750D607098BAE8D6BAA11EF8063": { - "LockName": "Front Door Lock", - "UserType": "superuser", - "macAddress": "2E:BA:C4:14:3F:09", - "HouseID": "000000000000", - "HouseName": "A House" - }, - "A6697750D607098BAE8D6BAA11EF9999": { - "LockName": "Back Door Lock", - "UserType": "user", - "macAddress": "2E:BA:C4:14:3F:88", - "HouseID": "000000000011", - "HouseName": "A House" - } -} diff --git a/tests/components/yale/fixtures/jwt b/tests/components/yale/fixtures/jwt deleted file mode 100644 index d64f31b9bb2..00000000000 --- a/tests/components/yale/fixtures/jwt +++ /dev/null @@ -1 +0,0 @@ -eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6ImE3NmMyNWU1LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjE3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.BdRo-dEr-osbDQGB2XzlI-mIj4gqULtapODt-sj-eA8 diff --git a/tests/components/yale/fixtures/lock_open.json b/tests/components/yale/fixtures/lock_open.json deleted file mode 100644 index b6cfe3c90fc..00000000000 --- a/tests/components/yale/fixtures/lock_open.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "status": "kAugLockState_Locked", - "resultsFromOperationCache": false, - "retryCount": 1, - "info": { - "wlanRSSI": -54, - "lockType": "lock_version_1001", - "lockStatusChanged": false, - "serialNumber": "ABC", - "serial": "123", - "action": "lock", - "context": { - "startDate": "2020-02-19T01:59:39.516Z", - "retryCount": 1, - "transactionID": "mock" - }, - "bridgeID": "mock", - "wlanSNR": 41, - "startTime": "2020-02-19T01:59:39.517Z", - "duration": 5149, - "lockID": "ABC", - "rssi": -77 - }, - "totalTime": 5162, - "doorState": "kAugDoorState_Open" -} diff --git a/tests/components/yale/fixtures/lock_with_doorbell.online.json b/tests/components/yale/fixtures/lock_with_doorbell.online.json deleted file mode 100644 index bb2367d1111..00000000000 --- a/tests/components/yale/fixtures/lock_with_doorbell.online.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "LockName": "Front Door Lock", - "Type": 7, - "Created": "2017-12-10T03:12:09.210Z", - "Updated": "2017-12-10T03:12:09.210Z", - "LockID": "A6697750D607098BAE8D6BAA11EF8063", - "HouseID": "000000000000", - "HouseName": "My House", - "Calibrated": false, - "skuNumber": "AUG-SL02-M02-S02", - "timeZone": "America/Vancouver", - "battery": 0.88, - "SerialNumber": "X2FSW05DGA", - "LockStatus": { - "status": "locked", - "doorState": "closed", - "dateTime": "2017-12-10T04:48:30.272Z", - "isLockStatusChanged": true, - "valid": true - }, - "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", - "homeKitEnabled": false, - "zWaveEnabled": false, - "isGalileo": false, - "Bridge": { - "_id": "aaacab87f7efxa0015884999", - "mfgBridgeID": "AAGPP102XX", - "deviceModel": "august-doorbell", - "firmwareVersion": "2.3.0-RC153+201711151527", - "operative": true - }, - "keypad": { - "_id": "5bc65c24e6ef2a263e1450a8", - "serialNumber": "K1GXB0054Z", - "lockID": "92412D1B44004595B5DEB134E151A8D3", - "currentFirmwareVersion": "2.27.0", - "battery": {}, - "batteryLevel": "Medium", - "batteryRaw": 170 - }, - "OfflineKeys": { - "created": [], - "loaded": [ - { - "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", - "slot": 1, - "key": "kkk01d4300c1dcxxx1c330f794941111", - "created": "2017-12-10T03:12:09.215Z", - "loaded": "2017-12-10T03:12:54.391Z" - } - ], - "deleted": [], - "loadedhk": [ - { - "key": "kkk01d4300c1dcxxx1c330f794941222", - "slot": 256, - "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", - "created": "2017-12-10T03:12:09.218Z", - "loaded": "2017-12-10T03:12:55.563Z" - } - ] - }, - "parametersToSet": {}, - "users": { - "cccca94e-373e-aaaa-bbbb-333396827777": { - "UserType": "superuser", - "FirstName": "Foo", - "LastName": "Bar", - "identifiers": ["email:foo@bar.com", "phone:+177777777777"], - "imageInfo": { - "original": { - "width": 948, - "height": 949, - "format": "jpg", - "url": "http://www.image.com/foo.jpeg", - "secure_url": "https://www.image.com/foo.jpeg" - }, - "thumbnail": { - "width": 128, - "height": 128, - "format": "jpg", - "url": "http://www.image.com/foo.jpeg", - "secure_url": "https://www.image.com/foo.jpeg" - } - } - } - }, - "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", - "ruleHash": {}, - "cameras": [], - "geofenceLimits": { - "ios": { - "debounceInterval": 90, - "gpsAccuracyMultiplier": 2.5, - "maximumGeofence": 5000, - "minimumGeofence": 100, - "minGPSAccuracyRequired": 80 - } - } -} diff --git a/tests/components/yale/fixtures/reauth_jwt b/tests/components/yale/fixtures/reauth_jwt deleted file mode 100644 index 4db8d061b68..00000000000 --- a/tests/components/yale/fixtures/reauth_jwt +++ /dev/null @@ -1 +0,0 @@ -eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6ImE3NmMyNWU1LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjI3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.DtkHscsvbTE-SyKW3RxwXFQIKMf0xJwfPZN1X3JesqA diff --git a/tests/components/yale/fixtures/reauth_jwt_wrong_account b/tests/components/yale/fixtures/reauth_jwt_wrong_account deleted file mode 100644 index b0b62438178..00000000000 --- a/tests/components/yale/fixtures/reauth_jwt_wrong_account +++ /dev/null @@ -1 +0,0 @@ -eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6IjQ0NDQ0NDQ0LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjE3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.PenDp4JUIBQZEx2BFxaCqV1-6yMuUPtmnB6jq1wpoX8 diff --git a/tests/components/yale/fixtures/unlock_closed.json b/tests/components/yale/fixtures/unlock_closed.json deleted file mode 100644 index f676c005a17..00000000000 --- a/tests/components/yale/fixtures/unlock_closed.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "status": "kAugLockState_Unlocked", - "resultsFromOperationCache": false, - "retryCount": 1, - "info": { - "wlanRSSI": -54, - "lockType": "lock_version_1001", - "lockStatusChanged": false, - "serialNumber": "ABC", - "serial": "123", - "action": "lock", - "context": { - "startDate": "2020-02-19T01:59:39.516Z", - "retryCount": 1, - "transactionID": "mock" - }, - "bridgeID": "mock", - "wlanSNR": 41, - "startTime": "2020-02-19T01:59:39.517Z", - "duration": 5149, - "lockID": "ABC", - "rssi": -77 - }, - "totalTime": 5162, - "doorState": "kAugDoorState_Closed" -} diff --git a/tests/components/yale/mocks.py b/tests/components/yale/mocks.py deleted file mode 100644 index 03ab3609002..00000000000 --- a/tests/components/yale/mocks.py +++ /dev/null @@ -1,515 +0,0 @@ -"""Mocks for the yale component.""" - -from __future__ import annotations - -from collections.abc import Iterable -from contextlib import contextmanager -import json -import os -import time -from typing import Any -from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch - -from yalexs.activity import ( - ACTIVITY_ACTIONS_BRIDGE_OPERATION, - ACTIVITY_ACTIONS_DOOR_OPERATION, - ACTIVITY_ACTIONS_DOORBELL_DING, - ACTIVITY_ACTIONS_DOORBELL_MOTION, - ACTIVITY_ACTIONS_DOORBELL_VIEW, - ACTIVITY_ACTIONS_LOCK_OPERATION, - SOURCE_LOCK_OPERATE, - SOURCE_LOG, - Activity, - BridgeOperationActivity, - DoorbellDingActivity, - DoorbellMotionActivity, - DoorbellViewActivity, - DoorOperationActivity, - LockOperationActivity, -) -from yalexs.api_async import ApiAsync -from yalexs.authenticator_common import Authentication, AuthenticationState -from yalexs.const import Brand -from yalexs.doorbell import Doorbell, DoorbellDetail -from yalexs.lock import Lock, LockDetail -from yalexs.manager.ratelimit import _RateLimitChecker -from yalexs.manager.socketio import SocketIORunner - -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) -from homeassistant.components.yale.const import DOMAIN -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry, load_fixture - -USER_ID = "a76c25e5-49aa-4c14-cd0c-48a6931e2081" - - -def _mock_get_config( - brand: Brand = Brand.YALE_GLOBAL, jwt: str | None = None -) -> dict[str, Any]: - """Return a default yale config.""" - return { - DOMAIN: { - "auth_implementation": "yale", - "token": { - "access_token": jwt or "access_token", - "expires_in": 1, - "refresh_token": "refresh_token", - "expires_at": time.time() + 3600, - "service": "yale", - }, - } - } - - -def _mock_authenticator(auth_state: AuthenticationState) -> Authentication: - """Mock an yale authenticator.""" - authenticator = MagicMock() - type(authenticator).state = PropertyMock(return_value=auth_state) - return authenticator - - -def _timetoken() -> str: - return str(time.time_ns())[:-2] - - -async def mock_yale_config_entry( - hass: HomeAssistant, -) -> MockConfigEntry: - """Mock yale config entry and client credentials.""" - entry = mock_config_entry() - entry.add_to_hass(hass) - return entry - - -def mock_config_entry(jwt: str | None = None) -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data=_mock_get_config(jwt=jwt)[DOMAIN], - options={}, - unique_id=USER_ID, - ) - - -async def mock_client_credentials(hass: HomeAssistant) -> ClientCredential: - """Mock client credentials.""" - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential("1", "2"), - DOMAIN, - ) - - -@contextmanager -def patch_yale_setup(): - """Patch yale setup process.""" - with ( - patch("yalexs.manager.gateway.ApiAsync") as api_mock, - patch.object(_RateLimitChecker, "register_wakeup") as authenticate_mock, - patch("yalexs.manager.data.SocketIORunner") as socketio_mock, - patch.object(socketio_mock, "run"), - patch( - "homeassistant.components.yale.config_entry_oauth2_flow.async_get_config_entry_implementation" - ), - ): - yield api_mock, authenticate_mock, socketio_mock - - -async def _mock_setup_yale( - hass: HomeAssistant, - api_instance: ApiAsync, - socketio_mock: SocketIORunner, - authenticate_side_effect: MagicMock, -) -> ConfigEntry: - """Set up yale integration.""" - entry = await mock_yale_config_entry(hass) - with patch_yale_setup() as patched_setup: - api_mock, authenticate_mock, sockio_mock_ = patched_setup - authenticate_mock.side_effect = authenticate_side_effect - sockio_mock_.return_value = socketio_mock - api_mock.return_value = api_instance - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - return entry - - -async def _create_yale_with_devices( - hass: HomeAssistant, - devices: Iterable[LockDetail | DoorbellDetail] | None = None, - api_call_side_effects: dict[str, Any] | None = None, - activities: list[Any] | None = None, - brand: Brand = Brand.YALE_GLOBAL, - authenticate_side_effect: MagicMock | None = None, -) -> tuple[ConfigEntry, SocketIORunner]: - entry, _, socketio = await _create_yale_api_with_devices( - hass, - devices, - api_call_side_effects, - activities, - brand, - authenticate_side_effect, - ) - return entry, socketio - - -async def _create_yale_api_with_devices( - hass: HomeAssistant, - devices: Iterable[LockDetail | DoorbellDetail] | None = None, - api_call_side_effects: dict[str, Any] | None = None, - activities: dict[str, Any] | None = None, - brand: Brand = Brand.YALE_GLOBAL, - authenticate_side_effect: MagicMock | None = None, -) -> tuple[ConfigEntry, ApiAsync, SocketIORunner]: - if api_call_side_effects is None: - api_call_side_effects = {} - if devices is None: - devices = () - - update_api_call_side_effects(api_call_side_effects, devices, activities) - - api_instance = await make_mock_api(api_call_side_effects, brand) - socketio = SocketIORunner( - MagicMock( - api=api_instance, async_get_access_token=AsyncMock(return_value="token") - ) - ) - socketio.run = AsyncMock() - - entry = await _mock_setup_yale( - hass, - api_instance, - socketio, - authenticate_side_effect=authenticate_side_effect, - ) - - return entry, api_instance, socketio - - -def update_api_call_side_effects( - api_call_side_effects: dict[str, Any], - devices: Iterable[LockDetail | DoorbellDetail], - activities: dict[str, Any] | None = None, -) -> None: - """Update side effects dict from devices and activities.""" - - device_data = {"doorbells": [], "locks": []} - for device in devices or (): - if isinstance(device, LockDetail): - device_data["locks"].append( - {"base": _mock_yale_lock(device.device_id), "detail": device} - ) - elif isinstance(device, DoorbellDetail): - device_data["doorbells"].append( - { - "base": _mock_yale_doorbell( - deviceid=device.device_id, - brand=device._data.get("brand", Brand.YALE_GLOBAL), - ), - "detail": device, - } - ) - else: - raise ValueError # noqa: TRY004 - - def _get_device_detail(device_type, device_id): - for device in device_data[device_type]: - if device["detail"].device_id == device_id: - return device["detail"] - raise ValueError - - def _get_base_devices(device_type): - return [device["base"] for device in device_data[device_type]] - - def get_lock_detail_side_effect(access_token, device_id): - return _get_device_detail("locks", device_id) - - def get_doorbell_detail_side_effect(access_token, device_id): - return _get_device_detail("doorbells", device_id) - - def get_operable_locks_side_effect(access_token): - return _get_base_devices("locks") - - def get_doorbells_side_effect(access_token): - return _get_base_devices("doorbells") - - def get_house_activities_side_effect(access_token, house_id, limit=10): - if activities is not None: - return activities - return [] - - def lock_return_activities_side_effect(access_token, device_id): - lock = _get_device_detail("locks", device_id) - return [ - # There is a check to prevent out of order events - # so we set the doorclosed & lock event in the future - # to prevent a race condition where we reject the event - # because it happened before the dooropen & unlock event. - _mock_lock_operation_activity(lock, "lock", 2000), - _mock_door_operation_activity(lock, "doorclosed", 2000), - ] - - def unlock_return_activities_side_effect(access_token, device_id): - lock = _get_device_detail("locks", device_id) - return [ - _mock_lock_operation_activity(lock, "unlock", 0), - _mock_door_operation_activity(lock, "dooropen", 0), - ] - - api_call_side_effects.setdefault("get_lock_detail", get_lock_detail_side_effect) - api_call_side_effects.setdefault( - "get_doorbell_detail", get_doorbell_detail_side_effect - ) - api_call_side_effects.setdefault( - "get_operable_locks", get_operable_locks_side_effect - ) - api_call_side_effects.setdefault("get_doorbells", get_doorbells_side_effect) - api_call_side_effects.setdefault( - "get_house_activities", get_house_activities_side_effect - ) - api_call_side_effects.setdefault( - "lock_return_activities", lock_return_activities_side_effect - ) - api_call_side_effects.setdefault( - "unlock_return_activities", unlock_return_activities_side_effect - ) - api_call_side_effects.setdefault( - "async_unlatch_return_activities", unlock_return_activities_side_effect - ) - - -async def make_mock_api( - api_call_side_effects: dict[str, Any], - brand: Brand = Brand.YALE_GLOBAL, -) -> ApiAsync: - """Make a mock ApiAsync instance.""" - api_instance = MagicMock(name="Api", brand=brand) - - if api_call_side_effects["get_lock_detail"]: - type(api_instance).async_get_lock_detail = AsyncMock( - side_effect=api_call_side_effects["get_lock_detail"] - ) - - if api_call_side_effects["get_operable_locks"]: - type(api_instance).async_get_operable_locks = AsyncMock( - side_effect=api_call_side_effects["get_operable_locks"] - ) - - if api_call_side_effects["get_doorbells"]: - type(api_instance).async_get_doorbells = AsyncMock( - side_effect=api_call_side_effects["get_doorbells"] - ) - - if api_call_side_effects["get_doorbell_detail"]: - type(api_instance).async_get_doorbell_detail = AsyncMock( - side_effect=api_call_side_effects["get_doorbell_detail"] - ) - - if api_call_side_effects["get_house_activities"]: - type(api_instance).async_get_house_activities = AsyncMock( - side_effect=api_call_side_effects["get_house_activities"] - ) - - if api_call_side_effects["lock_return_activities"]: - type(api_instance).async_lock_return_activities = AsyncMock( - side_effect=api_call_side_effects["lock_return_activities"] - ) - - if api_call_side_effects["unlock_return_activities"]: - type(api_instance).async_unlock_return_activities = AsyncMock( - side_effect=api_call_side_effects["unlock_return_activities"] - ) - - if api_call_side_effects["async_unlatch_return_activities"]: - type(api_instance).async_unlatch_return_activities = AsyncMock( - side_effect=api_call_side_effects["async_unlatch_return_activities"] - ) - - api_instance.async_unlock_async = AsyncMock() - api_instance.async_lock_async = AsyncMock() - api_instance.async_status_async = AsyncMock() - api_instance.async_get_user = AsyncMock(return_value={"UserID": "abc"}) - api_instance.async_unlatch_async = AsyncMock() - api_instance.async_unlatch = AsyncMock() - api_instance.async_add_websocket_subscription = AsyncMock() - - return api_instance - - -def _mock_yale_authentication( - token_text: str, token_timestamp: float, state: AuthenticationState -) -> Authentication: - authentication = MagicMock(name="yalexs.authentication") - type(authentication).state = PropertyMock(return_value=state) - type(authentication).access_token = PropertyMock(return_value=token_text) - type(authentication).access_token_expires = PropertyMock( - return_value=token_timestamp - ) - return authentication - - -def _mock_yale_lock(lockid: str = "mocklockid1", houseid: str = "mockhouseid1") -> Lock: - return Lock(lockid, _mock_yale_lock_data(lockid=lockid, houseid=houseid)) - - -def _mock_yale_doorbell( - deviceid="mockdeviceid1", houseid="mockhouseid1", brand=Brand.YALE_GLOBAL -) -> Doorbell: - return Doorbell( - deviceid, - _mock_yale_doorbell_data(deviceid=deviceid, houseid=houseid, brand=brand), - ) - - -def _mock_yale_doorbell_data( - deviceid: str = "mockdeviceid1", - houseid: str = "mockhouseid1", - brand: Brand = Brand.YALE_GLOBAL, -) -> dict[str, Any]: - return { - "_id": deviceid, - "DeviceID": deviceid, - "name": f"{deviceid} Name", - "HouseID": houseid, - "UserType": "owner", - "serialNumber": "mockserial", - "battery": 90, - "status": "standby", - "currentFirmwareVersion": "mockfirmware", - "Bridge": { - "_id": "bridgeid1", - "firmwareVersion": "mockfirm", - "operative": True, - }, - "LockStatus": {"doorState": "open"}, - } - - -def _mock_yale_lock_data( - lockid: str = "mocklockid1", houseid: str = "mockhouseid1" -) -> dict[str, Any]: - return { - "_id": lockid, - "LockID": lockid, - "LockName": f"{lockid} Name", - "HouseID": houseid, - "UserType": "owner", - "SerialNumber": "mockserial", - "battery": 90, - "currentFirmwareVersion": "mockfirmware", - "Bridge": { - "_id": "bridgeid1", - "firmwareVersion": "mockfirm", - "operative": True, - }, - "LockStatus": {"doorState": "open"}, - } - - -async def _mock_operative_yale_lock_detail(hass: HomeAssistant) -> LockDetail: - return await _mock_lock_from_fixture(hass, "get_lock.online.json") - - -async def _mock_lock_with_offline_key(hass: HomeAssistant) -> LockDetail: - return await _mock_lock_from_fixture(hass, "get_lock.online_with_keys.json") - - -async def _mock_inoperative_yale_lock_detail(hass: HomeAssistant) -> LockDetail: - return await _mock_lock_from_fixture(hass, "get_lock.offline.json") - - -async def _mock_activities_from_fixture( - hass: HomeAssistant, path: str -) -> list[Activity]: - json_dict = await _load_json_fixture(hass, path) - activities = [] - for activity_json in json_dict: - activity = _activity_from_dict(activity_json) - if activity: - activities.append(activity) - - return activities - - -async def _mock_lock_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: - json_dict = await _load_json_fixture(hass, path) - return LockDetail(json_dict) - - -async def _mock_doorbell_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: - json_dict = await _load_json_fixture(hass, path) - return DoorbellDetail(json_dict) - - -async def _load_json_fixture(hass: HomeAssistant, path: str) -> dict[str, Any]: - fixture = await hass.async_add_executor_job( - load_fixture, os.path.join("yale", path) - ) - return json.loads(fixture) - - -async def _mock_doorsense_enabled_yale_lock_detail(hass: HomeAssistant) -> LockDetail: - return await _mock_lock_from_fixture(hass, "get_lock.online_with_doorsense.json") - - -async def _mock_doorsense_missing_yale_lock_detail(hass: HomeAssistant) -> LockDetail: - return await _mock_lock_from_fixture(hass, "get_lock.online_missing_doorsense.json") - - -async def _mock_lock_with_unlatch(hass: HomeAssistant) -> LockDetail: - return await _mock_lock_from_fixture(hass, "get_lock.online_with_unlatch.json") - - -def _mock_lock_operation_activity( - lock: Lock, action: str, offset: float -) -> LockOperationActivity: - return LockOperationActivity( - SOURCE_LOCK_OPERATE, - { - "dateTime": (time.time() + offset) * 1000, - "deviceID": lock.device_id, - "deviceType": "lock", - "action": action, - }, - ) - - -def _mock_door_operation_activity( - lock: Lock, action: str, offset: float -) -> DoorOperationActivity: - return DoorOperationActivity( - SOURCE_LOCK_OPERATE, - { - "dateTime": (time.time() + offset) * 1000, - "deviceID": lock.device_id, - "deviceType": "lock", - "action": action, - }, - ) - - -def _activity_from_dict(activity_dict: dict[str, Any]) -> Activity | None: - action = activity_dict.get("action") - - activity_dict["dateTime"] = time.time() * 1000 - - if action in ACTIVITY_ACTIONS_DOORBELL_DING: - return DoorbellDingActivity(SOURCE_LOG, activity_dict) - if action in ACTIVITY_ACTIONS_DOORBELL_MOTION: - return DoorbellMotionActivity(SOURCE_LOG, activity_dict) - if action in ACTIVITY_ACTIONS_DOORBELL_VIEW: - return DoorbellViewActivity(SOURCE_LOG, activity_dict) - if action in ACTIVITY_ACTIONS_LOCK_OPERATION: - return LockOperationActivity(SOURCE_LOG, activity_dict) - if action in ACTIVITY_ACTIONS_DOOR_OPERATION: - return DoorOperationActivity(SOURCE_LOG, activity_dict) - if action in ACTIVITY_ACTIONS_BRIDGE_OPERATION: - return BridgeOperationActivity(SOURCE_LOG, activity_dict) - return None diff --git a/tests/components/yale/snapshots/test_binary_sensor.ambr b/tests/components/yale/snapshots/test_binary_sensor.ambr deleted file mode 100644 index e294cb7c76c..00000000000 --- a/tests/components/yale/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_doorbell_device_registry - DeviceRegistryEntrySnapshot({ - 'area_id': 'tmt100_name', - 'config_entries': , - 'configuration_url': 'https://account.aaecosystem.com', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'yale', - 'tmt100', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Yale Home Inc.', - 'model': 'hydra1', - 'model_id': None, - 'name': 'tmt100 Name', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': 'tmt100 Name', - 'sw_version': '3.1.0-HYDRC75+201909251139', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/yale/snapshots/test_diagnostics.ambr b/tests/components/yale/snapshots/test_diagnostics.ambr deleted file mode 100644 index c3d8d8e2aaa..00000000000 --- a/tests/components/yale/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,125 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'brand': 'yale_global', - 'doorbells': dict({ - 'K98GiDT45GUL': dict({ - 'HouseID': '**REDACTED**', - 'LockID': 'BBBB1F5F11114C24CCCC97571DD6AAAA', - 'appID': 'august-iphone', - 'caps': list([ - 'reconnect', - ]), - 'createdAt': '2016-11-26T22:27:11.176Z', - 'doorbellID': 'K98GiDT45GUL', - 'doorbellServerURL': 'https://doorbells.august.com', - 'dvrSubscriptionSetupDone': True, - 'firmwareVersion': '2.3.0-RC153+201711151527', - 'installDate': '2016-11-26T22:27:11.176Z', - 'installUserID': '**REDACTED**', - 'name': 'Front Door', - 'pubsubChannel': '**REDACTED**', - 'recentImage': '**REDACTED**', - 'serialNumber': 'tBXZR0Z35E', - 'settings': dict({ - 'ABREnabled': True, - 'IREnabled': True, - 'IVAEnabled': False, - 'JPGQuality': 70, - 'batteryLowThreshold': 3.1, - 'batteryRun': False, - 'batteryUseThreshold': 3.4, - 'bitrateCeiling': 512000, - 'buttonpush_notifications': True, - 'debug': False, - 'directLink': True, - 'initialBitrate': 384000, - 'irConfiguration': 8448272, - 'keepEncoderRunning': True, - 'micVolume': 100, - 'minACNoScaling': 40, - 'motion_notifications': True, - 'notify_when_offline': True, - 'overlayEnabled': True, - 'ringSoundEnabled': True, - 'speakerVolume': 92, - 'turnOffCamera': False, - 'videoResolution': '640x480', - }), - 'status': 'doorbell_call_status_online', - 'status_timestamp': 1512811834532, - 'telemetry': dict({ - 'BSSID': '88:ee:00:dd:aa:11', - 'SSID': 'foo_ssid', - 'ac_in': 23.856874, - 'battery': 4.061763, - 'battery_soc': 96, - 'battery_soh': 95, - 'date': '2017-12-10 08:05:12', - 'doorbell_low_battery': False, - 'ip_addr': '10.0.1.11', - 'link_quality': 54, - 'load_average': '0.50 0.47 0.35 1/154 9345', - 'signal_level': -56, - 'steady_ac_in': 22.196405, - 'temperature': 28.25, - 'updated_at': '2017-12-10T08:05:13.650Z', - 'uptime': '16168.75 13830.49', - 'wifi_freq': 5745, - }), - 'updatedAt': '2017-12-10T08:05:13.650Z', - }), - }), - 'locks': dict({ - 'online_with_doorsense': dict({ - 'Bridge': dict({ - '_id': 'bridgeid', - 'deviceModel': 'august-connect', - 'firmwareVersion': '2.2.1', - 'hyperBridge': True, - 'mfgBridgeID': 'C5WY200WSH', - 'operative': True, - 'status': dict({ - 'current': 'online', - 'lastOffline': '2000-00-00T00:00:00.447Z', - 'lastOnline': '2000-00-00T00:00:00.447Z', - 'updated': '2000-00-00T00:00:00.447Z', - }), - }), - 'Calibrated': False, - 'Created': '2000-00-00T00:00:00.447Z', - 'HouseID': '**REDACTED**', - 'HouseName': 'Test', - 'LockID': 'online_with_doorsense', - 'LockName': 'Online door with doorsense', - 'LockStatus': dict({ - 'dateTime': '2017-12-10T04:48:30.272Z', - 'doorState': 'open', - 'isLockStatusChanged': False, - 'status': 'locked', - 'valid': True, - }), - 'SerialNumber': 'XY', - 'Type': 1001, - 'Updated': '2000-00-00T00:00:00.447Z', - 'battery': 0.922, - 'currentFirmwareVersion': 'undefined-4.3.0-1.8.14', - 'homeKitEnabled': True, - 'hostLockInfo': dict({ - 'manufacturer': 'yale', - 'productID': 1536, - 'productTypeID': 32770, - 'serialNumber': 'ABC', - }), - 'isGalileo': False, - 'macAddress': '12:22', - 'pins': '**REDACTED**', - 'pubsubChannel': '**REDACTED**', - 'skuNumber': 'AUG-MD01', - 'supportsEntryCodes': True, - 'timeZone': 'Pacific/Hawaii', - 'zWaveEnabled': False, - }), - }), - }) -# --- diff --git a/tests/components/yale/snapshots/test_lock.ambr b/tests/components/yale/snapshots/test_lock.ambr deleted file mode 100644 index b1a9f6a4d86..00000000000 --- a/tests/components/yale/snapshots/test_lock.ambr +++ /dev/null @@ -1,37 +0,0 @@ -# serializer version: 1 -# name: test_lock_device_registry - DeviceRegistryEntrySnapshot({ - 'area_id': 'online_with_doorsense_name', - 'config_entries': , - 'configuration_url': 'https://account.aaecosystem.com', - 'connections': set({ - tuple( - 'bluetooth', - '12:22', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'yale', - 'online_with_doorsense', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Yale Home Inc.', - 'model': 'AUG-MD01', - 'model_id': None, - 'name': 'online_with_doorsense Name', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': 'online_with_doorsense Name', - 'sw_version': 'undefined-4.3.0-1.8.14', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/yale/snapshots/test_sensor.ambr b/tests/components/yale/snapshots/test_sensor.ambr deleted file mode 100644 index a425cfa90de..00000000000 --- a/tests/components/yale/snapshots/test_sensor.ambr +++ /dev/null @@ -1,95 +0,0 @@ -# serializer version: 1 -# name: test_lock_operator_autorelock - ReadOnlyDict({ - 'autorelock': True, - 'friendly_name': 'online_with_doorsense Name Operator', - 'keypad': False, - 'manual': False, - 'method': 'autorelock', - 'remote': False, - 'tag': False, - }) -# --- -# name: test_lock_operator_keypad - ReadOnlyDict({ - 'autorelock': False, - 'friendly_name': 'online_with_doorsense Name Operator', - 'keypad': True, - 'manual': False, - 'method': 'keypad', - 'remote': False, - 'tag': False, - }) -# --- -# name: test_lock_operator_manual - ReadOnlyDict({ - 'autorelock': False, - 'friendly_name': 'online_with_doorsense Name Operator', - 'keypad': False, - 'manual': True, - 'method': 'manual', - 'remote': False, - 'tag': False, - }) -# --- -# name: test_lock_operator_remote - ReadOnlyDict({ - 'autorelock': False, - 'friendly_name': 'online_with_doorsense Name Operator', - 'keypad': False, - 'manual': False, - 'method': 'remote', - 'remote': True, - 'tag': False, - }) -# --- -# name: test_restored_state - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'autorelock': False, - 'entity_picture': 'image.png', - 'friendly_name': 'online_with_doorsense Name Operator', - 'keypad': False, - 'manual': False, - 'method': 'tag', - 'remote': False, - 'tag': True, - }), - 'context': , - 'entity_id': 'sensor.online_with_doorsense_name_operator', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Tag Unlock', - }) -# --- -# name: test_unlock_operator_manual - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'autorelock': False, - 'friendly_name': 'online_with_doorsense Name Operator', - 'keypad': False, - 'manual': True, - 'method': 'manual', - 'remote': False, - 'tag': False, - }), - 'context': , - 'entity_id': 'sensor.online_with_doorsense_name_operator', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Your favorite elven princess', - }) -# --- -# name: test_unlock_operator_tag - ReadOnlyDict({ - 'autorelock': False, - 'friendly_name': 'online_with_doorsense Name Operator', - 'keypad': False, - 'manual': False, - 'method': 'tag', - 'remote': False, - 'tag': True, - }) -# --- diff --git a/tests/components/yale/test_binary_sensor.py b/tests/components/yale/test_binary_sensor.py deleted file mode 100644 index 811c845e359..00000000000 --- a/tests/components/yale/test_binary_sensor.py +++ /dev/null @@ -1,308 +0,0 @@ -"""The binary_sensor tests for the yale platform.""" - -import datetime - -from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion - -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_LOCK, - SERVICE_UNLOCK, - STATE_OFF, - STATE_ON, - STATE_UNAVAILABLE, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -import homeassistant.util.dt as dt_util - -from .mocks import ( - _create_yale_with_devices, - _mock_activities_from_fixture, - _mock_doorbell_from_fixture, - _mock_doorsense_enabled_yale_lock_detail, - _mock_lock_from_fixture, -) - -from tests.common import async_fire_time_changed - - -async def test_doorsense(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge.""" - lock_one = await _mock_lock_from_fixture( - hass, "get_lock.online_with_doorsense.json" - ) - await _create_yale_with_devices(hass, [lock_one]) - states = hass.states - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON - - data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} - await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON - - await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - - assert ( - states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF - ) - - -async def test_lock_bridge_offline(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge that goes offline.""" - lock_one = await _mock_lock_from_fixture( - hass, "get_lock.online_with_doorsense.json" - ) - activities = await _mock_activities_from_fixture( - hass, "get_activity.bridge_offline.json" - ) - await _create_yale_with_devices(hass, [lock_one], activities=activities) - states = hass.states - assert ( - states.get("binary_sensor.online_with_doorsense_name_door").state - == STATE_UNAVAILABLE - ) - - -async def test_create_doorbell(hass: HomeAssistant) -> None: - """Test creation of a doorbell.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - await _create_yale_with_devices(hass, [doorbell_one]) - states = hass.states - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF - assert ( - states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF - ) - assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON - assert ( - states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF - ) - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF - assert ( - states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF - ) - - -async def test_create_doorbell_offline(hass: HomeAssistant) -> None: - """Test creation of a doorbell that is offline.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") - await _create_yale_with_devices(hass, [doorbell_one]) - states = hass.states - assert states.get("binary_sensor.tmt100_name_motion").state == STATE_UNAVAILABLE - assert states.get("binary_sensor.tmt100_name_connectivity").state == STATE_OFF - assert ( - states.get("binary_sensor.tmt100_name_doorbell_ding").state == STATE_UNAVAILABLE - ) - - -async def test_create_doorbell_with_motion( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: - """Test creation of a doorbell.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - activities = await _mock_activities_from_fixture( - hass, "get_activity.doorbell_motion.json" - ) - await _create_yale_with_devices(hass, [doorbell_one], activities=activities) - states = hass.states - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON - assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON - assert ( - states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF - ) - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF - - -async def test_doorbell_update_via_socketio( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: - """Test creation of a doorbell that can be updated via socketio.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - - _, socketio = await _create_yale_with_devices(hass, [doorbell_one]) - assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" - states = hass.states - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF - assert ( - states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF - ) - - listener = list(socketio._listeners)[0] - listener( - doorbell_one.device_id, - dt_util.utcnow(), - { - "status": "imagecapture", - "data": { - "result": { - "created_at": "2021-03-16T01:07:08.817Z", - "secure_url": ( - "https://dyu7azbnaoi74.cloudfront.net/zip/images/zip.jpeg" - ), - }, - }, - }, - ) - - await hass.async_block_till_done() - - assert states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_ON - - listener( - doorbell_one.device_id, - dt_util.utcnow(), - { - "status": "doorbell_motion_detected", - "data": { - "event": "doorbell_motion_detected", - "image": { - "height": 640, - "width": 480, - "format": "jpg", - "created_at": "2021-03-16T02:36:26.886Z", - "bytes": 14061, - "secure_url": ( - "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" - ), - "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", - "etag": "09e839331c4ea59eef28081f2caa0e90", - }, - "doorbellName": "Front Door", - "callID": None, - "origin": "mars-api", - "mutableContent": True, - }, - }, - ) - - await hass.async_block_till_done() - - assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON - assert ( - states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF - ) - - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert ( - states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF - ) - - listener( - doorbell_one.device_id, - dt_util.utcnow(), - { - "status": "buttonpush", - }, - ) - - await hass.async_block_till_done() - - assert states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_ON - - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert ( - states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF - ) - - -async def test_doorbell_device_registry( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test creation of a lock with doorsense and bridge ands up in the registry.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") - await _create_yale_with_devices(hass, [doorbell_one]) - - reg_device = device_registry.async_get_device(identifiers={("yale", "tmt100")}) - assert reg_device == snapshot - - -async def test_door_sense_update_via_socketio(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - assert lock_one.pubsub_channel == "pubsub" - - activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") - config_entry, socketio = await _create_yale_with_devices( - hass, [lock_one], activities=activities - ) - states = hass.states - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON - - listener = list(socketio._listeners)[0] - listener( - lock_one.device_id, - dt_util.utcnow(), - {"status": "kAugLockState_Unlocking", "doorState": "closed"}, - ) - - await hass.async_block_till_done() - - assert ( - states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF - ) - - listener( - lock_one.device_id, - dt_util.utcnow(), - {"status": "kAugLockState_Locking", "doorState": "open"}, - ) - - await hass.async_block_till_done() - - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON - - async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) - await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON - - socketio.connected = True - async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) - await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON - - # Ensure socketio status is always preserved - async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) - await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON - - listener( - lock_one.device_id, - dt_util.utcnow(), - {"status": "kAugLockState_Unlocking", "doorState": "open"}, - ) - - await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON - - async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) - await hass.async_block_till_done() - assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - -async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: - """Test creation of a lock with a doorbell.""" - lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") - await _create_yale_with_devices(hass, [lock_one]) - states = hass.states - assert ( - states.get( - "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" - ).state - == STATE_OFF - ) diff --git a/tests/components/yale/test_button.py b/tests/components/yale/test_button.py deleted file mode 100644 index 92d3ecef859..00000000000 --- a/tests/components/yale/test_button.py +++ /dev/null @@ -1,23 +0,0 @@ -"""The button tests for the yale platform.""" - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant - -from .mocks import _create_yale_api_with_devices, _mock_lock_from_fixture - - -async def test_wake_lock(hass: HomeAssistant) -> None: - """Test creation of a lock and wake it.""" - lock_one = await _mock_lock_from_fixture( - hass, "get_lock.online_with_doorsense.json" - ) - _, api_instance, _ = await _create_yale_api_with_devices(hass, [lock_one]) - entity_id = "button.online_with_doorsense_name_wake" - binary_sensor_online_with_doorsense_name = hass.states.get(entity_id) - assert binary_sensor_online_with_doorsense_name is not None - api_instance.async_status_async.reset_mock() - await hass.services.async_call( - BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) - api_instance.async_status_async.assert_called_once() diff --git a/tests/components/yale/test_camera.py b/tests/components/yale/test_camera.py deleted file mode 100644 index 122f3c65def..00000000000 --- a/tests/components/yale/test_camera.py +++ /dev/null @@ -1,93 +0,0 @@ -"""The camera tests for the yale platform.""" - -from http import HTTPStatus -from unittest.mock import patch - -from yalexs.const import Brand -from yalexs.doorbell import ContentTokenExpired - -from homeassistant.components.camera import CameraState -from homeassistant.core import HomeAssistant - -from .mocks import _create_yale_with_devices, _mock_doorbell_from_fixture - -from tests.typing import ClientSessionGenerator - - -async def test_create_doorbell( - hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator -) -> None: - """Test creation of a doorbell.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - - with patch.object( - doorbell_one, "async_get_doorbell_image", create=False, return_value="image" - ): - await _create_yale_with_devices(hass, [doorbell_one], brand=Brand.YALE_GLOBAL) - - camera_k98gidt45gul_name_camera = hass.states.get( - "camera.k98gidt45gul_name_camera" - ) - assert camera_k98gidt45gul_name_camera.state == CameraState.IDLE - - url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ - "entity_picture" - ] - - client = await hass_client_no_auth() - resp = await client.get(url) - assert resp.status == HTTPStatus.OK - body = await resp.text() - assert body == "image" - - -async def test_doorbell_refresh_content_token_recover( - hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator -) -> None: - """Test camera image content token expired.""" - doorbell_two = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - with patch.object( - doorbell_two, - "async_get_doorbell_image", - create=False, - side_effect=[ContentTokenExpired, "image"], - ): - await _create_yale_with_devices( - hass, - [doorbell_two], - brand=Brand.YALE_GLOBAL, - ) - url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ - "entity_picture" - ] - - client = await hass_client_no_auth() - resp = await client.get(url) - assert resp.status == HTTPStatus.OK - body = await resp.text() - assert body == "image" - - -async def test_doorbell_refresh_content_token_fail( - hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator -) -> None: - """Test camera image content token expired.""" - doorbell_two = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - with patch.object( - doorbell_two, - "async_get_doorbell_image", - create=False, - side_effect=ContentTokenExpired, - ): - await _create_yale_with_devices( - hass, - [doorbell_two], - brand=Brand.YALE_GLOBAL, - ) - url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ - "entity_picture" - ] - - client = await hass_client_no_auth() - resp = await client.get(url) - assert resp.status == HTTPStatus.INTERNAL_SERVER_ERROR diff --git a/tests/components/yale/test_config_flow.py b/tests/components/yale/test_config_flow.py deleted file mode 100644 index 004162c0ebf..00000000000 --- a/tests/components/yale/test_config_flow.py +++ /dev/null @@ -1,275 +0,0 @@ -"""Test the yale config flow.""" - -from collections.abc import Generator -from unittest.mock import ANY, Mock, patch - -import pytest - -from homeassistant.components.yale.application_credentials import ( - OAUTH2_AUTHORIZE, - OAUTH2_TOKEN, -) -from homeassistant.components.yale.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_entry_oauth2_flow - -from .mocks import USER_ID - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import ClientSessionGenerator - -CLIENT_ID = "1" - - -@pytest.fixture -def mock_setup_entry() -> Generator[Mock]: - """Patch setup entry.""" - with patch( - "homeassistant.components.yale.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.mark.usefixtures("client_credentials") -@pytest.mark.usefixtures("current_request_with_host") -async def test_full_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - jwt: str, - mock_setup_entry: Mock, -) -> None: - """Check full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - - assert result["url"] == ( - f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" - "&redirect_uri=https://example.com/auth/external/callback" - f"&state={state}" - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.clear_requests() - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "access_token": jwt, - "scope": "any", - "expires_in": 86399, - "refresh_token": "mock-refresh-token", - "user_id": "mock-user-id", - "expires_at": 1697753347, - }, - ) - - result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - entry = hass.config_entries.async_entries(DOMAIN)[0] - assert entry.unique_id == USER_ID - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["result"].unique_id == USER_ID - assert entry.data == { - "auth_implementation": "yale", - "token": { - "access_token": jwt, - "expires_at": ANY, - "expires_in": ANY, - "refresh_token": "mock-refresh-token", - "scope": "any", - "user_id": "mock-user-id", - }, - } - - -@pytest.mark.usefixtures("client_credentials") -@pytest.mark.usefixtures("current_request_with_host") -async def test_full_flow_already_exists( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - jwt: str, - mock_setup_entry: Mock, - mock_config_entry: MockConfigEntry, -) -> None: - """Check full flow for a user that already exists.""" - - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - - assert result["url"] == ( - f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" - "&redirect_uri=https://example.com/auth/external/callback" - f"&state={state}" - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.clear_requests() - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "access_token": jwt, - "scope": "any", - "expires_in": 86399, - "refresh_token": "mock-refresh-token", - "user_id": "mock-user-id", - "expires_at": 1697753347, - }, - ) - - result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "already_configured" - - -@pytest.mark.usefixtures("client_credentials") -@pytest.mark.usefixtures("current_request_with_host") -async def test_reauth( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - mock_config_entry: MockConfigEntry, - reauth_jwt: str, - mock_setup_entry: Mock, -) -> None: - """Test the reauthentication case updates the existing config entry.""" - - mock_config_entry.add_to_hass(hass) - - mock_config_entry.async_start_reauth(hass) - await hass.async_block_till_done() - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - result = flows[0] - assert result["step_id"] == "auth" - - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "access_token": reauth_jwt, - "expires_in": 86399, - "refresh_token": "mock-refresh-token", - "user_id": USER_ID, - "token_type": "Bearer", - "expires_at": 1697753347, - }, - ) - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - await hass.async_block_till_done() - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - - assert mock_config_entry.unique_id == USER_ID - assert "token" in mock_config_entry.data - # Verify access token is refreshed - assert mock_config_entry.data["token"]["access_token"] == reauth_jwt - - -@pytest.mark.usefixtures("client_credentials") -@pytest.mark.usefixtures("current_request_with_host") -async def test_reauth_wrong_account( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - mock_config_entry: MockConfigEntry, - reauth_jwt_wrong_account: str, - jwt: str, - mock_setup_entry: Mock, -) -> None: - """Test the reauthentication aborts, if user tries to reauthenticate with another account.""" - assert mock_config_entry.data["token"]["access_token"] == jwt - - mock_config_entry.add_to_hass(hass) - - mock_config_entry.async_start_reauth(hass) - await hass.async_block_till_done() - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - result = flows[0] - assert result["step_id"] == "auth" - - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "access_token": reauth_jwt_wrong_account, - "expires_in": 86399, - "refresh_token": "mock-refresh-token", - "token_type": "Bearer", - "expires_at": 1697753347, - }, - ) - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - await hass.async_block_till_done() - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_invalid_user" - - assert mock_config_entry.unique_id == USER_ID - assert "token" in mock_config_entry.data - # Verify access token is like before - assert mock_config_entry.data["token"]["access_token"] == jwt diff --git a/tests/components/yale/test_diagnostics.py b/tests/components/yale/test_diagnostics.py deleted file mode 100644 index e5fd6b1c1a7..00000000000 --- a/tests/components/yale/test_diagnostics.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Test yale diagnostics.""" - -from syrupy import SnapshotAssertion - -from homeassistant.core import HomeAssistant - -from .mocks import ( - _create_yale_api_with_devices, - _mock_doorbell_from_fixture, - _mock_lock_from_fixture, -) - -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test generating diagnostics for a config entry.""" - lock_one = await _mock_lock_from_fixture( - hass, "get_lock.online_with_doorsense.json" - ) - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - - entry, _, _ = await _create_yale_api_with_devices(hass, [lock_one, doorbell_one]) - diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) - - assert diag == snapshot diff --git a/tests/components/yale/test_event.py b/tests/components/yale/test_event.py deleted file mode 100644 index 7aeb9d8f12b..00000000000 --- a/tests/components/yale/test_event.py +++ /dev/null @@ -1,162 +0,0 @@ -"""The event tests for the yale.""" - -from freezegun.api import FrozenDateTimeFactory - -from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN -from homeassistant.core import HomeAssistant -import homeassistant.util.dt as dt_util - -from .mocks import ( - _create_yale_with_devices, - _mock_activities_from_fixture, - _mock_doorbell_from_fixture, - _mock_lock_from_fixture, -) - -from tests.common import async_fire_time_changed - - -async def test_create_doorbell(hass: HomeAssistant) -> None: - """Test creation of a doorbell.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - await _create_yale_with_devices(hass, [doorbell_one]) - - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state is not None - assert motion_state.state == STATE_UNKNOWN - doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state == STATE_UNKNOWN - - -async def test_create_doorbell_offline(hass: HomeAssistant) -> None: - """Test creation of a doorbell that is offline.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") - await _create_yale_with_devices(hass, [doorbell_one]) - motion_state = hass.states.get("event.tmt100_name_motion") - assert motion_state is not None - assert motion_state.state == STATE_UNAVAILABLE - doorbell_state = hass.states.get("event.tmt100_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state == STATE_UNAVAILABLE - - -async def test_create_doorbell_with_motion( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: - """Test creation of a doorbell.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - activities = await _mock_activities_from_fixture( - hass, "get_activity.doorbell_motion.json" - ) - await _create_yale_with_devices(hass, [doorbell_one], activities=activities) - - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state is not None - assert motion_state.state != STATE_UNKNOWN - isotime = motion_state.state - doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state == STATE_UNKNOWN - - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state.state == isotime - - -async def test_doorbell_update_via_socketio( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> None: - """Test creation of a doorbell that can be updated via socketio.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - - _, socketio = await _create_yale_with_devices(hass, [doorbell_one]) - assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" - - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state is not None - assert motion_state.state == STATE_UNKNOWN - doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state == STATE_UNKNOWN - - listener = list(socketio._listeners)[0] - listener( - doorbell_one.device_id, - dt_util.utcnow(), - { - "status": "doorbell_motion_detected", - "data": { - "event": "doorbell_motion_detected", - "image": { - "height": 640, - "width": 480, - "format": "jpg", - "created_at": "2021-03-16T02:36:26.886Z", - "bytes": 14061, - "secure_url": ( - "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" - ), - "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", - "etag": "09e839331c4ea59eef28081f2caa0e90", - }, - "doorbellName": "Front Door", - "callID": None, - "origin": "mars-api", - "mutableContent": True, - }, - }, - ) - - await hass.async_block_till_done() - - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state is not None - assert motion_state.state != STATE_UNKNOWN - isotime = motion_state.state - - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - motion_state = hass.states.get("event.k98gidt45gul_name_motion") - assert motion_state is not None - assert motion_state.state != STATE_UNKNOWN - - listener( - doorbell_one.device_id, - dt_util.utcnow(), - { - "status": "buttonpush", - }, - ) - - await hass.async_block_till_done() - - doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state != STATE_UNKNOWN - isotime = motion_state.state - - freezer.tick(40) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") - assert doorbell_state is not None - assert doorbell_state.state != STATE_UNKNOWN - assert motion_state.state == isotime - - -async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: - """Test creation of a lock with a doorbell.""" - lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") - await _create_yale_with_devices(hass, [lock_one]) - - doorbell_state = hass.states.get( - "event.a6697750d607098bae8d6baa11ef8063_name_doorbell" - ) - assert doorbell_state is not None - assert doorbell_state.state == STATE_UNKNOWN diff --git a/tests/components/yale/test_init.py b/tests/components/yale/test_init.py deleted file mode 100644 index c028924199e..00000000000 --- a/tests/components/yale/test_init.py +++ /dev/null @@ -1,236 +0,0 @@ -"""The tests for the yale platform.""" - -from unittest.mock import Mock - -from aiohttp import ClientResponseError -import pytest -from yalexs.exceptions import InvalidAuth, YaleApiError - -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState -from homeassistant.components.yale.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_LOCK, - SERVICE_OPEN, - SERVICE_UNLOCK, - STATE_ON, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.setup import async_setup_component - -from .mocks import ( - _create_yale_with_devices, - _mock_doorsense_enabled_yale_lock_detail, - _mock_doorsense_missing_yale_lock_detail, - _mock_inoperative_yale_lock_detail, - _mock_lock_with_offline_key, - _mock_operative_yale_lock_detail, -) - -from tests.typing import WebSocketGenerator - - -async def test_yale_api_is_failing(hass: HomeAssistant) -> None: - """Config entry state is SETUP_RETRY when yale api is failing.""" - - config_entry, socketio = await _create_yale_with_devices( - hass, - authenticate_side_effect=YaleApiError( - "offline", ClientResponseError(None, None, status=500) - ), - ) - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_yale_is_offline(hass: HomeAssistant) -> None: - """Config entry state is SETUP_RETRY when yale is offline.""" - - config_entry, socketio = await _create_yale_with_devices( - hass, authenticate_side_effect=TimeoutError - ) - - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_yale_late_auth_failure(hass: HomeAssistant) -> None: - """Test we can detect a late auth failure.""" - config_entry, socketio = await _create_yale_with_devices( - hass, - authenticate_side_effect=InvalidAuth( - "authfailed", ClientResponseError(None, None, status=401) - ), - ) - - assert config_entry.state is ConfigEntryState.SETUP_ERROR - flows = hass.config_entries.flow.async_progress() - - assert flows[0]["step_id"] == "pick_implementation" - - -async def test_unlock_throws_yale_api_http_error(hass: HomeAssistant) -> None: - """Test unlock throws correct error on http error.""" - mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) - aiohttp_client_response_exception = ClientResponseError(None, None, status=400) - - def _unlock_return_activities_side_effect(access_token, device_id): - raise YaleApiError( - "This should bubble up as its user consumable", - aiohttp_client_response_exception, - ) - - await _create_yale_with_devices( - hass, - [mocked_lock_detail], - api_call_side_effects={ - "unlock_return_activities": _unlock_return_activities_side_effect - }, - ) - data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - with pytest.raises( - HomeAssistantError, - match=( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ), - ): - await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - - -async def test_lock_throws_yale_api_http_error(hass: HomeAssistant) -> None: - """Test lock throws correct error on http error.""" - mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) - aiohttp_client_response_exception = ClientResponseError(None, None, status=400) - - def _lock_return_activities_side_effect(access_token, device_id): - raise YaleApiError( - "This should bubble up as its user consumable", - aiohttp_client_response_exception, - ) - - await _create_yale_with_devices( - hass, - [mocked_lock_detail], - api_call_side_effects={ - "lock_return_activities": _lock_return_activities_side_effect - }, - ) - data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - with pytest.raises( - HomeAssistantError, - match=( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ), - ): - await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - - -async def test_open_throws_hass_service_not_supported_error( - hass: HomeAssistant, -) -> None: - """Test open throws correct error on entity does not support this service error.""" - mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) - await _create_yale_with_devices(hass, [mocked_lock_detail]) - data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - with pytest.raises(HomeAssistantError): - await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - - -async def test_inoperative_locks_are_filtered_out(hass: HomeAssistant) -> None: - """Ensure inoperative locks do not get setup.""" - yale_operative_lock = await _mock_operative_yale_lock_detail(hass) - yale_inoperative_lock = await _mock_inoperative_yale_lock_detail(hass) - await _create_yale_with_devices(hass, [yale_operative_lock, yale_inoperative_lock]) - - lock_abc_name = hass.states.get("lock.abc_name") - assert lock_abc_name is None - lock_a6697750d607098bae8d6baa11ef8063_name = hass.states.get( - "lock.a6697750d607098bae8d6baa11ef8063_name" - ) - assert lock_a6697750d607098bae8d6baa11ef8063_name.state == LockState.LOCKED - - -async def test_lock_has_doorsense(hass: HomeAssistant) -> None: - """Check to see if a lock has doorsense.""" - doorsenselock = await _mock_doorsense_enabled_yale_lock_detail(hass) - nodoorsenselock = await _mock_doorsense_missing_yale_lock_detail(hass) - await _create_yale_with_devices(hass, [doorsenselock, nodoorsenselock]) - - binary_sensor_online_with_doorsense_name_open = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name_open.state == STATE_ON - binary_sensor_missing_doorsense_id_name_open = hass.states.get( - "binary_sensor.missing_with_doorsense_name_door" - ) - assert binary_sensor_missing_doorsense_id_name_open is None - - -async def test_load_unload(hass: HomeAssistant) -> None: - """Config entry can be unloaded.""" - - yale_operative_lock = await _mock_operative_yale_lock_detail(hass) - yale_inoperative_lock = await _mock_inoperative_yale_lock_detail(hass) - config_entry, socketio = await _create_yale_with_devices( - hass, [yale_operative_lock, yale_inoperative_lock] - ) - - assert config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.NOT_LOADED - - -async def test_load_triggers_ble_discovery( - hass: HomeAssistant, mock_discovery: Mock -) -> None: - """Test that loading a lock that supports offline ble operation passes the keys to yalexe_ble.""" - - yale_lock_with_key = await _mock_lock_with_offline_key(hass) - yale_lock_without_key = await _mock_operative_yale_lock_detail(hass) - - config_entry, socketio = await _create_yale_with_devices( - hass, [yale_lock_with_key, yale_lock_without_key] - ) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - - assert len(mock_discovery.mock_calls) == 1 - assert mock_discovery.mock_calls[0].kwargs["data"] == { - "name": "Front Door Lock", - "address": None, - "serial": "X2FSW05DGA", - "key": "kkk01d4300c1dcxxx1c330f794941111", - "slot": 1, - } - - -async def test_device_remove_devices( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test we can only remove a device that no longer exists.""" - assert await async_setup_component(hass, "config", {}) - yale_operative_lock = await _mock_operative_yale_lock_detail(hass) - config_entry, socketio = await _create_yale_with_devices( - hass, [yale_operative_lock] - ) - entity = entity_registry.entities["lock.a6697750d607098bae8d6baa11ef8063_name"] - - device_entry = device_registry.async_get(entity.device_id) - client = await hass_ws_client(hass) - response = await client.remove_device(device_entry.id, config_entry.entry_id) - assert not response["success"] - - dead_device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, "remove-device-id")}, - ) - response = await client.remove_device(dead_device_entry.id, config_entry.entry_id) - assert response["success"] diff --git a/tests/components/yale/test_lock.py b/tests/components/yale/test_lock.py deleted file mode 100644 index f0fe018759c..00000000000 --- a/tests/components/yale/test_lock.py +++ /dev/null @@ -1,425 +0,0 @@ -"""The lock tests for the yale platform.""" - -import datetime - -from aiohttp import ClientResponseError -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy import SnapshotAssertion -from yalexs.manager.activity import INITIAL_LOCK_RESYNC_TIME - -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_LOCK, - SERVICE_OPEN, - SERVICE_UNLOCK, - STATE_UNAVAILABLE, - STATE_UNKNOWN, -) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr, entity_registry as er -import homeassistant.util.dt as dt_util - -from .mocks import ( - _create_yale_with_devices, - _mock_activities_from_fixture, - _mock_doorsense_enabled_yale_lock_detail, - _mock_lock_from_fixture, - _mock_lock_with_unlatch, - _mock_operative_yale_lock_detail, -) - -from tests.common import async_fire_time_changed - - -async def test_lock_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion -) -> None: - """Test creation of a lock with doorsense and bridge ands up in the registry.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - await _create_yale_with_devices(hass, [lock_one]) - - reg_device = device_registry.async_get_device( - identifiers={("yale", "online_with_doorsense")} - ) - assert reg_device == snapshot - - -async def test_lock_changed_by(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED - assert lock_state.attributes["changed_by"] == "Your favorite elven princess" - - -async def test_state_locking(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge that is locking.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture(hass, "get_activity.locking.json") - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - assert hass.states.get("lock.online_with_doorsense_name").state == LockState.LOCKING - - -async def test_state_unlocking(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge that is unlocking.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture( - hass, "get_activity.unlocking.json" - ) - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == LockState.UNLOCKING - - -async def test_state_jammed(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge that is jammed.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture(hass, "get_activity.jammed.json") - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - assert hass.states.get("lock.online_with_doorsense_name").state == LockState.JAMMED - - -async def test_one_lock_operation( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: - """Test creation of a lock with doorsense and bridge.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - await _create_yale_with_devices(hass, [lock_one]) - - lock_state = hass.states.get("lock.online_with_doorsense_name") - - assert lock_state.state == LockState.LOCKED - - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" - - data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} - await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - - lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.UNLOCKED - - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" - - await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - - lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED - - # No activity means it will be unavailable until the activity feed has data - assert entity_registry.async_get("sensor.online_with_doorsense_name_operator") - operator_state = hass.states.get("sensor.online_with_doorsense_name_operator") - assert operator_state.state == STATE_UNKNOWN - - -async def test_open_lock_operation(hass: HomeAssistant) -> None: - """Test open lock operation using the open service.""" - lock_with_unlatch = await _mock_lock_with_unlatch(hass) - await _create_yale_with_devices(hass, [lock_with_unlatch]) - - assert hass.states.get("lock.online_with_unlatch_name").state == LockState.LOCKED - - data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} - await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - - assert hass.states.get("lock.online_with_unlatch_name").state == LockState.UNLOCKED - - -async def test_open_lock_operation_socketio_connected( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test open lock operation using the open service when socketio is connected.""" - lock_with_unlatch = await _mock_lock_with_unlatch(hass) - assert lock_with_unlatch.pubsub_channel == "pubsub" - - _, socketio = await _create_yale_with_devices(hass, [lock_with_unlatch]) - socketio.connected = True - - assert hass.states.get("lock.online_with_unlatch_name").state == LockState.LOCKED - - data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} - await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - - listener = list(socketio._listeners)[0] - listener( - lock_with_unlatch.device_id, - dt_util.utcnow() + datetime.timedelta(seconds=2), - { - "status": "kAugLockState_Unlocked", - }, - ) - - await hass.async_block_till_done() - await hass.async_block_till_done() - - assert hass.states.get("lock.online_with_unlatch_name").state == LockState.UNLOCKED - await hass.async_block_till_done() - - -async def test_one_lock_operation_socketio_connected( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test lock and unlock operations are async when socketio is connected.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - assert lock_one.pubsub_channel == "pubsub" - states = hass.states - - _, socketio = await _create_yale_with_devices(hass, [lock_one]) - socketio.connected = True - - lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" - - data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} - await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - - listener = list(socketio._listeners)[0] - listener( - lock_one.device_id, - dt_util.utcnow() + datetime.timedelta(seconds=1), - { - "status": "kAugLockState_Unlocked", - }, - ) - - await hass.async_block_till_done() - await hass.async_block_till_done() - - lock_state = states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.UNLOCKED - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" - - await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - - listener( - lock_one.device_id, - dt_util.utcnow() + datetime.timedelta(seconds=2), - { - "status": "kAugLockState_Locked", - }, - ) - - await hass.async_block_till_done() - await hass.async_block_till_done() - - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKED - - # No activity means it will be unavailable until the activity feed has data - assert entity_registry.async_get("sensor.online_with_doorsense_name_operator") - assert ( - states.get("sensor.online_with_doorsense_name_operator").state == STATE_UNKNOWN - ) - - freezer.tick(INITIAL_LOCK_RESYNC_TIME) - - listener( - lock_one.device_id, - dt_util.utcnow() + datetime.timedelta(seconds=2), - { - "status": "kAugLockState_Unlocked", - }, - ) - - await hass.async_block_till_done() - - assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKED - - -async def test_lock_jammed(hass: HomeAssistant) -> None: - """Test lock gets jammed on unlock.""" - - def _unlock_return_activities_side_effect(access_token, device_id): - raise ClientResponseError(None, None, status=531) - - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - await _create_yale_with_devices( - hass, - [lock_one], - api_call_side_effects={ - "unlock_return_activities": _unlock_return_activities_side_effect - }, - ) - - states = hass.states - lock_state = states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" - - data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} - await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - - assert states.get("lock.online_with_doorsense_name").state == LockState.JAMMED - - -async def test_lock_throws_exception_on_unknown_status_code( - hass: HomeAssistant, -) -> None: - """Test lock throws exception.""" - - def _unlock_return_activities_side_effect(access_token, device_id): - raise ClientResponseError(None, None, status=500) - - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - await _create_yale_with_devices( - hass, - [lock_one], - api_call_side_effects={ - "unlock_return_activities": _unlock_return_activities_side_effect - }, - ) - - lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_state.state == LockState.LOCKED - assert lock_state.attributes["battery_level"] == 92 - assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" - - data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} - with pytest.raises(ClientResponseError): - await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - - -async def test_one_lock_unknown_state(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge.""" - lock_one = await _mock_lock_from_fixture( - hass, - "get_lock.online.unknown_state.json", - ) - await _create_yale_with_devices(hass, [lock_one]) - - assert hass.states.get("lock.brokenid_name").state == STATE_UNKNOWN - - -async def test_lock_bridge_offline(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge that goes offline.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture( - hass, "get_activity.bridge_offline.json" - ) - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - states = hass.states - assert states.get("lock.online_with_doorsense_name").state == STATE_UNAVAILABLE - - -async def test_lock_bridge_online(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge that goes offline.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture( - hass, "get_activity.bridge_online.json" - ) - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - states = hass.states - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKED - - -async def test_lock_update_via_socketio(hass: HomeAssistant) -> None: - """Test creation of a lock with doorsense and bridge.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - assert lock_one.pubsub_channel == "pubsub" - - activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") - config_entry, socketio = await _create_yale_with_devices( - hass, [lock_one], activities=activities - ) - socketio.connected = True - states = hass.states - - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKED - - listener = list(socketio._listeners)[0] - listener( - lock_one.device_id, - dt_util.utcnow(), - { - "status": "kAugLockState_Unlocking", - }, - ) - - await hass.async_block_till_done() - await hass.async_block_till_done() - - assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING - - listener( - lock_one.device_id, - dt_util.utcnow(), - { - "status": "kAugLockState_Locking", - }, - ) - - await hass.async_block_till_done() - await hass.async_block_till_done() - - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING - - async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) - await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING - - socketio.connected = True - async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) - await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING - - # Ensure socketio status is always preserved - async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) - await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.LOCKING - - listener( - lock_one.device_id, - dt_util.utcnow() + datetime.timedelta(seconds=2), - { - "status": "kAugLockState_Unlocking", - }, - ) - - await hass.async_block_till_done() - await hass.async_block_till_done() - - assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING - - async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) - await hass.async_block_till_done() - assert states.get("lock.online_with_doorsense_name").state == LockState.UNLOCKING - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - -async def test_open_throws_hass_service_not_supported_error( - hass: HomeAssistant, -) -> None: - """Test open throws correct error on entity does not support this service error.""" - mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) - await _create_yale_with_devices(hass, [mocked_lock_detail]) - data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - with pytest.raises(HomeAssistantError, match="does not support this service"): - await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) diff --git a/tests/components/yale/test_sensor.py b/tests/components/yale/test_sensor.py deleted file mode 100644 index 5d724b4bb9d..00000000000 --- a/tests/components/yale/test_sensor.py +++ /dev/null @@ -1,320 +0,0 @@ -"""The sensor tests for the yale platform.""" - -from typing import Any - -from syrupy import SnapshotAssertion - -from homeassistant import core as ha -from homeassistant.const import ( - ATTR_ENTITY_PICTURE, - ATTR_UNIT_OF_MEASUREMENT, - PERCENTAGE, - STATE_UNKNOWN, -) -from homeassistant.core import CoreState, HomeAssistant -from homeassistant.helpers import entity_registry as er - -from .mocks import ( - _create_yale_with_devices, - _mock_activities_from_fixture, - _mock_doorbell_from_fixture, - _mock_doorsense_enabled_yale_lock_detail, - _mock_lock_from_fixture, -) - -from tests.common import mock_restore_cache_with_extra_data - - -async def test_create_doorbell(hass: HomeAssistant) -> None: - """Test creation of a doorbell.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") - await _create_yale_with_devices(hass, [doorbell_one]) - - battery_state = hass.states.get("sensor.k98gidt45gul_name_battery") - assert battery_state.state == "96" - assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE - - -async def test_create_doorbell_offline( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: - """Test creation of a doorbell that is offline.""" - doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") - await _create_yale_with_devices(hass, [doorbell_one]) - - battery_state = hass.states.get("sensor.tmt100_name_battery") - assert battery_state.state == "81" - assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE - - entry = entity_registry.async_get("sensor.tmt100_name_battery") - assert entry - assert entry.unique_id == "tmt100_device_battery" - - -async def test_create_doorbell_hardwired(hass: HomeAssistant) -> None: - """Test creation of a doorbell that is hardwired without a battery.""" - doorbell_one = await _mock_doorbell_from_fixture( - hass, "get_doorbell.nobattery.json" - ) - await _create_yale_with_devices(hass, [doorbell_one]) - - sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") - assert sensor_tmt100_name_battery is None - - -async def test_create_lock_with_linked_keypad( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: - """Test creation of a lock with a linked keypad that both have a battery.""" - lock_one = await _mock_lock_from_fixture(hass, "get_lock.doorsense_init.json") - await _create_yale_with_devices(hass, [lock_one]) - - battery_state = hass.states.get( - "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" - ) - assert battery_state.state == "88" - assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE - - entry = entity_registry.async_get( - "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" - ) - assert entry - assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - - keypad_battery_state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert keypad_battery_state.state == "62" - assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE - entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") - assert entry - assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" - - -async def test_create_lock_with_low_battery_linked_keypad( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: - """Test creation of a lock with a linked keypad that both have a battery.""" - lock_one = await _mock_lock_from_fixture(hass, "get_lock.low_keypad_battery.json") - await _create_yale_with_devices(hass, [lock_one]) - - battery_state = hass.states.get( - "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" - ) - assert battery_state.state == "88" - assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE - entry = entity_registry.async_get( - "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" - ) - assert entry - assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - - state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "10" - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE - entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") - assert entry - assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" - - # No activity means it will be unavailable until someone unlocks/locks it - lock_operator_sensor = entity_registry.async_get( - "sensor.a6697750d607098bae8d6baa11ef8063_name_operator" - ) - assert ( - lock_operator_sensor.unique_id - == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" - ) - assert ( - hass.states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator").state - == STATE_UNKNOWN - ) - - -async def test_lock_operator_bluetooth( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: - """Test operation of a lock with doorsense and bridge.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture( - hass, "get_activity.lock_from_bluetooth.json" - ) - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - lock_operator_sensor = entity_registry.async_get( - "sensor.online_with_doorsense_name_operator" - ) - assert lock_operator_sensor - - state = hass.states.get("sensor.online_with_doorsense_name_operator") - assert state.state == "Your favorite elven princess" - assert state.attributes["manual"] is False - assert state.attributes["tag"] is False - assert state.attributes["remote"] is False - assert state.attributes["keypad"] is False - assert state.attributes["autorelock"] is False - assert state.attributes["method"] == "mobile" - - -async def test_lock_operator_keypad( - hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion -) -> None: - """Test operation of a lock with doorsense and bridge.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture( - hass, "get_activity.lock_from_keypad.json" - ) - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - lock_operator_sensor = entity_registry.async_get( - "sensor.online_with_doorsense_name_operator" - ) - assert lock_operator_sensor - - state = hass.states.get("sensor.online_with_doorsense_name_operator") - assert state.state == "Your favorite elven princess" - assert state.attributes == snapshot - - -async def test_lock_operator_remote( - hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion -) -> None: - """Test operation of a lock with doorsense and bridge.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - lock_operator_sensor = entity_registry.async_get( - "sensor.online_with_doorsense_name_operator" - ) - assert lock_operator_sensor - - state = hass.states.get("sensor.online_with_doorsense_name_operator") - assert state.state == "Your favorite elven princess" - assert state.attributes == snapshot - - -async def test_lock_operator_manual( - hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion -) -> None: - """Test operation of a lock with doorsense and bridge.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture( - hass, "get_activity.lock_from_manual.json" - ) - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - lock_operator_sensor = entity_registry.async_get( - "sensor.online_with_doorsense_name_operator" - ) - assert lock_operator_sensor - state = hass.states.get("sensor.online_with_doorsense_name_operator") - assert state.state == "Your favorite elven princess" - assert state.attributes == snapshot - - -async def test_lock_operator_autorelock( - hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion -) -> None: - """Test operation of a lock with doorsense and bridge.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture( - hass, "get_activity.lock_from_autorelock.json" - ) - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - lock_operator_sensor = entity_registry.async_get( - "sensor.online_with_doorsense_name_operator" - ) - assert lock_operator_sensor - - state = hass.states.get("sensor.online_with_doorsense_name_operator") - assert state.state == "Auto Relock" - assert state.attributes == snapshot - - -async def test_unlock_operator_manual( - hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion -) -> None: - """Test operation of a lock manually.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture( - hass, "get_activity.unlock_from_manual.json" - ) - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - lock_operator_sensor = entity_registry.async_get( - "sensor.online_with_doorsense_name_operator" - ) - assert lock_operator_sensor - - state = hass.states.get("sensor.online_with_doorsense_name_operator") - assert state.state == "Your favorite elven princess" - assert state == snapshot - - -async def test_unlock_operator_tag( - hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion -) -> None: - """Test operation of a lock with a tag.""" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - activities = await _mock_activities_from_fixture( - hass, "get_activity.unlock_from_tag.json" - ) - await _create_yale_with_devices(hass, [lock_one], activities=activities) - - lock_operator_sensor = entity_registry.async_get( - "sensor.online_with_doorsense_name_operator" - ) - assert lock_operator_sensor - - state = hass.states.get("sensor.online_with_doorsense_name_operator") - assert state.state == "Your favorite elven princess" - assert state.attributes == snapshot - - -async def test_restored_state( - hass: HomeAssistant, hass_storage: dict[str, Any], snapshot: SnapshotAssertion -) -> None: - """Test restored state.""" - - entity_id = "sensor.online_with_doorsense_name_operator" - lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) - - fake_state = ha.State( - entity_id, - state="Tag Unlock", - attributes={ - "method": "tag", - "manual": False, - "remote": False, - "keypad": False, - "tag": True, - "autorelock": False, - ATTR_ENTITY_PICTURE: "image.png", - }, - ) - - # Home assistant is not running yet - hass.set_state(CoreState.not_running) - mock_restore_cache_with_extra_data( - hass, - [ - ( - fake_state, - {"native_value": "Tag Unlock", "native_unit_of_measurement": None}, - ) - ], - ) - - await _create_yale_with_devices(hass, [lock_one]) - - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state.state == "Tag Unlock" - assert state == snapshot diff --git a/tests/components/yale_smart_alarm/conftest.py b/tests/components/yale_smart_alarm/conftest.py index 7a7abcac67c..9583df5faa6 100644 --- a/tests/components/yale_smart_alarm/conftest.py +++ b/tests/components/yale_smart_alarm/conftest.py @@ -7,7 +7,6 @@ from typing import Any from unittest.mock import Mock, patch import pytest -from yalesmartalarmclient import YaleDoorManAPI, YaleLock, YaleSmartAlarmData from yalesmartalarmclient.const import YALE_STATE_ARM_FULL from homeassistant.components.yale_smart_alarm.const import DOMAIN, PLATFORMS @@ -34,10 +33,7 @@ async def patch_platform_constant() -> list[Platform]: @pytest.fixture async def load_config_entry( - hass: HomeAssistant, - get_data: YaleSmartAlarmData, - get_all_data: YaleSmartAlarmData, - load_platforms: list[Platform], + hass: HomeAssistant, load_json: dict[str, Any], load_platforms: list[Platform] ) -> tuple[MockConfigEntry, Mock]: """Set up the Yale Smart Living integration in Home Assistant.""" with patch("homeassistant.components.yale_smart_alarm.PLATFORMS", load_platforms): @@ -53,79 +49,25 @@ async def load_config_entry( config_entry.add_to_hass(hass) - cycle = get_data.cycle["data"] - data = {"data": cycle["device_status"]} - with patch( "homeassistant.components.yale_smart_alarm.coordinator.YaleSmartAlarmClient", autospec=True, ) as mock_client_class: client = mock_client_class.return_value client.auth = Mock() - client.auth.get_authenticated = Mock(return_value=data) - client.auth.post_authenticated = Mock(return_value={"code": "000"}) - client.auth.put_authenticated = Mock(return_value={"code": "000"}) - client.lock_api = YaleDoorManAPI(client.auth) - locks = [ - YaleLock(device, lock_api=client.lock_api) - for device in cycle["device_status"] - if device["type"] == YaleLock.DEVICE_TYPE - ] - client.get_locks.return_value = locks - client.get_all.return_value = get_all_data - client.get_information.return_value = get_data + client.lock_api = Mock() + client.get_all.return_value = load_json client.get_armed_status.return_value = YALE_STATE_ARM_FULL - await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() return (config_entry, client) -@pytest.fixture(name="loaded_fixture", scope="package") -def get_fixture_data() -> dict[str, Any]: +@pytest.fixture(name="load_json", scope="package") +def load_json_from_fixture() -> dict[str, Any]: """Load fixture with json data and return.""" data_fixture = load_fixture("get_all.json", "yale_smart_alarm") json_data: dict[str, Any] = json.loads(data_fixture) return json_data - - -@pytest.fixture(name="get_data") -def get_update_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: - """Load update data and return.""" - - status = {"data": loaded_fixture["STATUS"]} - cycle = {"data": loaded_fixture["CYCLE"]} - online = {"data": loaded_fixture["ONLINE"]} - panel_info = {"data": loaded_fixture["PANEL INFO"]} - return YaleSmartAlarmData( - status=status, - cycle=cycle, - online=online, - panel_info=panel_info, - ) - - -@pytest.fixture(name="get_all_data") -def get_diag_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: - """Load all data and return.""" - - devices = {"data": loaded_fixture["DEVICES"]} - mode = {"data": loaded_fixture["MODE"]} - status = {"data": loaded_fixture["STATUS"]} - cycle = {"data": loaded_fixture["CYCLE"]} - online = {"data": loaded_fixture["ONLINE"]} - history = {"data": loaded_fixture["HISTORY"]} - panel_info = {"data": loaded_fixture["PANEL INFO"]} - auth_check = {"data": loaded_fixture["AUTH CHECK"]} - return YaleSmartAlarmData( - devices=devices, - mode=mode, - status=status, - cycle=cycle, - online=online, - history=history, - panel_info=panel_info, - auth_check=auth_check, - ) diff --git a/tests/components/yale_smart_alarm/fixtures/get_all.json b/tests/components/yale_smart_alarm/fixtures/get_all.json index 6c68e05c566..e85a93f3c3e 100644 --- a/tests/components/yale_smart_alarm/fixtures/get_all.json +++ b/tests/components/yale_smart_alarm/fixtures/get_all.json @@ -175,7 +175,7 @@ "address": "RF4", "type": "device_type.door_contact", "name": "Device4", - "status1": "device_status.dc_close,device_status.low_battery", + "status1": "device_status.dc_close", "status2": null, "status_switch": null, "status_power": null, @@ -763,7 +763,7 @@ "address": "RF4", "type": "device_type.door_contact", "name": "Device4", - "status1": "device_status.dc_close,device_status.low_battery", + "status1": "device_status.dc_close", "status2": null, "status_switch": null, "status_power": null, diff --git a/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr b/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr index ed7e847439c..7bb144e8d2a 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr @@ -1,51 +1,4 @@ # serializer version: 1 -# name: test_binary_sensor[load_platforms0][binary_sensor.device4_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.device4_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'RF4-battery', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[load_platforms0][binary_sensor.device4_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Device4 Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.device4_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensor[load_platforms0][binary_sensor.device4_door-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -93,53 +46,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.device5_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.device5_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'RF5-battery', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[load_platforms0][binary_sensor.device5_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Device5 Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.device5_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensor[load_platforms0][binary_sensor.device5_door-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -187,53 +93,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensor[load_platforms0][binary_sensor.device6_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.device6_battery', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Battery', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'RF6-battery', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[load_platforms0][binary_sensor.device6_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Device6 Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.device6_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensor[load_platforms0][binary_sensor.device6_door-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr index af939336677..a5dfe4b50dd 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr @@ -1,654 +1,28 @@ # serializer version: 1 # name: test_diagnostics dict({ - 'auth_check': dict({ - 'data': dict({ - 'agent': False, - 'dealer_group': 'yale', - 'dealer_id': '605', - 'first_login': '1', - 'id': '**REDACTED**', - 'is_auth': '1', - 'mac': '**REDACTED**', - 'mail_address': '**REDACTED**', - 'master': '1', - 'name': '**REDACTED**', - 'token_time': '2023-08-17 16:19:20', - 'user_id': '**REDACTED**', - 'xml_version': '2', - }), + 'AUTH CHECK': dict({ + 'agent': False, + 'dealer_group': 'yale', + 'dealer_id': '605', + 'first_login': '1', + 'id': '**REDACTED**', + 'is_auth': '1', + 'mac': '**REDACTED**', + 'mail_address': '**REDACTED**', + 'master': '1', + 'name': '**REDACTED**', + 'token_time': '2023-08-17 16:19:20', + 'user_id': '**REDACTED**', + 'xml_version': '2', }), - 'cycle': dict({ - 'data': dict({ - 'alarm_event_latest': None, - 'capture_latest': None, - 'device_status': list([ - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '35', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '1', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.lock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.lock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': None, - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '2', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.unlock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': None, - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '3', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.lock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.lock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - '_battery': True, - '_state': 'closed', - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '000', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '4', - 'rf': None, - 'rssi': '0', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.dc_close,device_status.low_battery', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.dc_close', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_contact', - 'type_no': '4', - }), - dict({ - '_battery': False, - '_state': 'open', - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '000', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '5', - 'rf': None, - 'rssi': '0', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.dc_open', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.dc_open', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_contact', - 'type_no': '4', - }), - dict({ - '_battery': False, - '_state': 'unavailable', - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '000', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '6', - 'rf': None, - 'rssi': '0', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'unknwon', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_contact', - 'type_no': '4', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '36', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '7', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.lock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.lock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '4', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '8', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.unlock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.unlock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '10', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '9', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.error', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.error', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '001', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '8', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': '', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': 21, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.temperature_sensor', - 'type_no': '40', - }), - ]), - 'model': list([ - dict({ - 'area': '1', - 'mode': 'disarm', - }), - ]), - 'panel_status': dict({ - 'warning_snd_mute': '0', - }), - 'report_event_latest': dict({ - 'cid_code': '1807', - 'event_time': None, - 'id': '**REDACTED**', - 'report_id': '1027299996', - 'time': '1692271914', - 'utc_event_time': None, - }), - }), - }), - 'devices': dict({ - 'data': list([ + 'CYCLE': dict({ + 'alarm_event_latest': None, + 'capture_latest': None, + 'device_status': list([ dict({ + '_state': 'locked', + '_state2': 'closed', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -709,6 +83,8 @@ 'type_no': '72', }), dict({ + '_state': 'unlocked', + '_state2': 'unknown', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -768,6 +144,8 @@ 'type_no': '72', }), dict({ + '_state': 'locked', + '_state2': 'unknown', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -828,6 +206,7 @@ 'type_no': '72', }), dict({ + '_state': 'closed', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -858,7 +237,7 @@ 'sresp_button_2': None, 'sresp_button_3': None, 'sresp_button_4': None, - 'status1': 'device_status.dc_close,device_status.low_battery', + 'status1': 'device_status.dc_close', 'status2': None, 'status_dim_level': None, 'status_fault': list([ @@ -888,6 +267,7 @@ 'type_no': '4', }), dict({ + '_state': 'open', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -948,6 +328,7 @@ 'type_no': '4', }), dict({ + '_state': 'unavailable', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -1007,6 +388,8 @@ 'type_no': '4', }), dict({ + '_state': 'unlocked', + '_state2': 'closed', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -1067,6 +450,8 @@ 'type_no': '72', }), dict({ + '_state': 'unlocked', + '_state2': 'open', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -1127,6 +512,7 @@ 'type_no': '72', }), dict({ + '_state': 'unavailable', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -1246,193 +632,799 @@ 'type_no': '40', }), ]), - }), - 'history': dict({ - 'data': list([ - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027299996', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:31:54', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180201101', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1802', - 'name': '**REDACTED**', - 'report_id': '1027299889', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:31:43', - 'type': 'device_type.door_lock', - 'user': 101, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027299587', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:31:11', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180101001', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1801', - 'name': '**REDACTED**', - 'report_id': '1027296099', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:24:52', - 'type': 'device_type.door_lock', - 'user': 1, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027273782', - 'status_temp_format': 'C', - 'time': '2023/08/17 10:43:21', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180201101', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1802', - 'name': '**REDACTED**', - 'report_id': '1027273230', - 'status_temp_format': 'C', - 'time': '2023/08/17 10:42:09', - 'type': 'device_type.door_lock', - 'user': 101, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027100172', - 'status_temp_format': 'C', - 'time': '2023/08/17 05:28:57', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180101001', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1801', - 'name': '**REDACTED**', - 'report_id': '1027099978', - 'status_temp_format': 'C', - 'time': '2023/08/17 05:28:39', - 'type': 'device_type.door_lock', - 'user': 1, - 'zone': 1, - }), - dict({ - 'area': 0, - 'cid': '18160200000', - 'cid_source': 'SYSTEM', - 'event_time': None, - 'event_type': '1602', - 'name': '', - 'report_id': '1027093266', - 'status_temp_format': 'C', - 'time': '2023/08/17 05:17:12', - 'type': '', - 'user': '', - 'zone': 0, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1026912623', - 'status_temp_format': 'C', - 'time': '2023/08/16 20:29:36', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - ]), - }), - 'mode': dict({ - 'data': list([ + 'model': list([ dict({ 'area': '1', 'mode': 'disarm', }), ]), + 'panel_status': dict({ + 'warning_snd_mute': '0', + }), + 'report_event_latest': dict({ + 'cid_code': '1807', + 'event_time': None, + 'id': '**REDACTED**', + 'report_id': '1027299996', + 'time': '1692271914', + 'utc_event_time': None, + }), }), - 'online': dict({ - 'data': 'online', - }), - 'panel_info': dict({ - 'data': dict({ - 'SMS_Balance': '50', - 'contact': '', - 'dealer_name': 'Poland', + 'DEVICES': list([ + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, 'mac': '**REDACTED**', - 'mail_address': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '35', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '1', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.lock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.lock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': None, + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '2', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.unlock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': None, + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '3', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.lock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.lock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '000', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '4', + 'rf': None, + 'rssi': '0', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.dc_close', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.dc_close', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_contact', + 'type_no': '4', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '000', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '5', + 'rf': None, + 'rssi': '0', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.dc_open', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.dc_open', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_contact', + 'type_no': '4', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '000', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '6', + 'rf': None, + 'rssi': '0', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'unknwon', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_contact', + 'type_no': '4', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '36', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '7', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.lock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.lock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '4', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '8', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.unlock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.unlock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '10', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '9', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.error', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.error', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '001', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '8', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': '', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': 21, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.temperature_sensor', + 'type_no': '40', + }), + ]), + 'HISTORY': list([ + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027299996', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:31:54', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180201101', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1802', + 'name': '**REDACTED**', + 'report_id': '1027299889', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:31:43', + 'type': 'device_type.door_lock', + 'user': 101, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027299587', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:31:11', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180101001', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1801', + 'name': '**REDACTED**', + 'report_id': '1027296099', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:24:52', + 'type': 'device_type.door_lock', + 'user': 1, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027273782', + 'status_temp_format': 'C', + 'time': '2023/08/17 10:43:21', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180201101', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1802', + 'name': '**REDACTED**', + 'report_id': '1027273230', + 'status_temp_format': 'C', + 'time': '2023/08/17 10:42:09', + 'type': 'device_type.door_lock', + 'user': 101, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027100172', + 'status_temp_format': 'C', + 'time': '2023/08/17 05:28:57', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180101001', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1801', + 'name': '**REDACTED**', + 'report_id': '1027099978', + 'status_temp_format': 'C', + 'time': '2023/08/17 05:28:39', + 'type': 'device_type.door_lock', + 'user': 1, + 'zone': 1, + }), + dict({ + 'area': 0, + 'cid': '18160200000', + 'cid_source': 'SYSTEM', + 'event_time': None, + 'event_type': '1602', 'name': '', - 'net_version': 'MINIGW-MZ-1_G 1.0.1.29A', - 'phone': 'UK-01902364606 / Sweden-0770373710 / Demark-89887818 / Norway-81569036', - 'report_account': '**REDACTED**', - 'rf51_version': '', - 'service_time': 'UK - Mon to Fri 8:30 til 17:30 / Scandinavia - Mon to Fri 8:00 til 20:00, Sat to Sun 10:00 til 15:00', - 'version': 'MINIGW-MZ-1_G 1.0.1.29A,,4.1.2.6.2,00:1D:94:0B:5E:A7,10111112,ML_yamga', - 'voice_balance': '0', - 'xml_version': '2', - 'zb_version': '4.1.2.6.2', - 'zw_version': '', + 'report_id': '1027093266', + 'status_temp_format': 'C', + 'time': '2023/08/17 05:17:12', + 'type': '', + 'user': '', + 'zone': 0, }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1026912623', + 'status_temp_format': 'C', + 'time': '2023/08/16 20:29:36', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + ]), + 'MODE': list([ + dict({ + 'area': '1', + 'mode': 'disarm', + }), + ]), + 'ONLINE': 'online', + 'PANEL INFO': dict({ + 'SMS_Balance': '50', + 'contact': '', + 'dealer_name': 'Poland', + 'mac': '**REDACTED**', + 'mail_address': '**REDACTED**', + 'name': '', + 'net_version': 'MINIGW-MZ-1_G 1.0.1.29A', + 'phone': 'UK-01902364606 / Sweden-0770373710 / Demark-89887818 / Norway-81569036', + 'report_account': '**REDACTED**', + 'rf51_version': '', + 'service_time': 'UK - Mon to Fri 8:30 til 17:30 / Scandinavia - Mon to Fri 8:00 til 20:00, Sat to Sun 10:00 til 15:00', + 'version': 'MINIGW-MZ-1_G 1.0.1.29A,,4.1.2.6.2,00:1D:94:0B:5E:A7,10111112,ML_yamga', + 'voice_balance': '0', + 'xml_version': '2', + 'zb_version': '4.1.2.6.2', + 'zw_version': '', }), - 'status': dict({ - 'data': dict({ - 'acfail': 'main.normal', - 'battery': 'main.normal', - 'gsm_rssi': '0', - 'imei': '', - 'imsi': '', - 'jam': 'main.normal', - 'rssi': '1', - 'tamper': 'main.normal', - }), + 'STATUS': dict({ + 'acfail': 'main.normal', + 'battery': 'main.normal', + 'gsm_rssi': '0', + 'imei': '', + 'imsi': '', + 'jam': 'main.normal', + 'rssi': '1', + 'tamper': 'main.normal', }), }) # --- diff --git a/tests/components/yale_smart_alarm/snapshots/test_lock.ambr b/tests/components/yale_smart_alarm/snapshots/test_lock.ambr index 34da7db087a..da9c11e01d2 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_lock.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_lock.ambr @@ -236,7 +236,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'open', + 'state': 'unlocked', }) # --- # name: test_lock[load_platforms0][lock.device9-entry] diff --git a/tests/components/yale_smart_alarm/snapshots/test_select.ambr b/tests/components/yale_smart_alarm/snapshots/test_select.ambr deleted file mode 100644 index 52ec7a99c2c..00000000000 --- a/tests/components/yale_smart_alarm/snapshots/test_select.ambr +++ /dev/null @@ -1,343 +0,0 @@ -# serializer version: 1 -# name: test_switch[load_platforms0][select.device1_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.device1_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '1111-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][select.device1_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device1 Volume', - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'context': , - 'entity_id': 'select.device1_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'low', - }) -# --- -# name: test_switch[load_platforms0][select.device2_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.device2_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '2222-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][select.device2_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device2 Volume', - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'context': , - 'entity_id': 'select.device2_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'low', - }) -# --- -# name: test_switch[load_platforms0][select.device3_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.device3_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '3333-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][select.device3_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device3 Volume', - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'context': , - 'entity_id': 'select.device3_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'low', - }) -# --- -# name: test_switch[load_platforms0][select.device7_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.device7_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '7777-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][select.device7_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device7 Volume', - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'context': , - 'entity_id': 'select.device7_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'low', - }) -# --- -# name: test_switch[load_platforms0][select.device8_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.device8_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '8888-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][select.device8_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device8 Volume', - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'context': , - 'entity_id': 'select.device8_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'low', - }) -# --- -# name: test_switch[load_platforms0][select.device9_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'select', - 'entity_category': None, - 'entity_id': 'select.device9_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '9999-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][select.device9_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device9 Volume', - 'options': list([ - 'high', - 'low', - 'off', - ]), - }), - 'context': , - 'entity_id': 'select.device9_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'low', - }) -# --- diff --git a/tests/components/yale_smart_alarm/snapshots/test_switch.ambr b/tests/components/yale_smart_alarm/snapshots/test_switch.ambr deleted file mode 100644 index f631a6fcbfe..00000000000 --- a/tests/components/yale_smart_alarm/snapshots/test_switch.ambr +++ /dev/null @@ -1,277 +0,0 @@ -# serializer version: 1 -# name: test_switch[load_platforms0][switch.device1_autolock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.device1_autolock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Autolock', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'autolock', - 'unique_id': '1111-autolock', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][switch.device1_autolock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device1 Autolock', - }), - 'context': , - 'entity_id': 'switch.device1_autolock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[load_platforms0][switch.device2_autolock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.device2_autolock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Autolock', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'autolock', - 'unique_id': '2222-autolock', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][switch.device2_autolock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device2 Autolock', - }), - 'context': , - 'entity_id': 'switch.device2_autolock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[load_platforms0][switch.device3_autolock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.device3_autolock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Autolock', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'autolock', - 'unique_id': '3333-autolock', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][switch.device3_autolock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device3 Autolock', - }), - 'context': , - 'entity_id': 'switch.device3_autolock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[load_platforms0][switch.device7_autolock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.device7_autolock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Autolock', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'autolock', - 'unique_id': '7777-autolock', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][switch.device7_autolock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device7 Autolock', - }), - 'context': , - 'entity_id': 'switch.device7_autolock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[load_platforms0][switch.device8_autolock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.device8_autolock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Autolock', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'autolock', - 'unique_id': '8888-autolock', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][switch.device8_autolock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device8 Autolock', - }), - 'context': , - 'entity_id': 'switch.device8_autolock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[load_platforms0][switch.device9_autolock-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.device9_autolock', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Autolock', - 'platform': 'yale_smart_alarm', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'autolock', - 'unique_id': '9999-autolock', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[load_platforms0][switch.device9_autolock-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device9 Autolock', - }), - 'context': , - 'entity_id': 'switch.device9_autolock', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/yale_smart_alarm/test_config_flow.py b/tests/components/yale_smart_alarm/test_config_flow.py index e5b59f79463..4ef201d2122 100644 --- a/tests/components/yale_smart_alarm/test_config_flow.py +++ b/tests/components/yale_smart_alarm/test_config_flow.py @@ -132,7 +132,15 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -149,6 +157,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { + "username": "test-username", "password": "new-test-password", }, ) @@ -193,7 +202,15 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": entry.unique_id, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) with patch( "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", @@ -202,6 +219,7 @@ async def test_reauth_flow_error( result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { + "username": "test-username", "password": "wrong-password", }, ) @@ -224,6 +242,7 @@ async def test_reauth_flow_error( result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { + "username": "test-username", "password": "new-test-password", }, ) @@ -239,211 +258,6 @@ async def test_reauth_flow_error( } -async def test_reconfigure(hass: HomeAssistant) -> None: - """Test reconfigure config flow.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="test-username", - data={ - "username": "test-username", - "password": "test-password", - "name": "Yale Smart Alarm", - "area_id": "1", - }, - version=2, - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - - with ( - patch( - "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", - return_value="", - ), - patch( - "homeassistant.components.yale_smart_alarm.async_setup_entry", - return_value=True, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "username": "test-username", - "password": "new-test-password", - "area_id": "2", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" - assert entry.data == { - "username": "test-username", - "password": "new-test-password", - "name": "Yale Smart Alarm", - "area_id": "2", - } - - -async def test_reconfigure_username_exist(hass: HomeAssistant) -> None: - """Test reconfigure config flow abort other username already exist.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="test-username", - data={ - "username": "test-username", - "password": "test-password", - "name": "Yale Smart Alarm", - "area_id": "1", - }, - version=2, - ) - entry.add_to_hass(hass) - entry2 = MockConfigEntry( - domain=DOMAIN, - unique_id="other-username", - data={ - "username": "other-username", - "password": "test-password", - "name": "Yale Smart Alarm 2", - "area_id": "1", - }, - version=2, - ) - entry2.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - - with ( - patch( - "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", - return_value="", - ), - patch( - "homeassistant.components.yale_smart_alarm.async_setup_entry", - return_value=True, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "username": "other-username", - "password": "test-password", - "area_id": "1", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unique_id_exists"} - - with ( - patch( - "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", - return_value="", - ), - patch( - "homeassistant.components.yale_smart_alarm.async_setup_entry", - return_value=True, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "username": "other-new-username", - "password": "test-password", - "area_id": "1", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert entry.data == { - "username": "other-new-username", - "name": "Yale Smart Alarm", - "password": "test-password", - "area_id": "1", - } - - -@pytest.mark.parametrize( - ("sideeffect", "p_error"), - [ - (AuthenticationError, "invalid_auth"), - (ConnectionError, "cannot_connect"), - (TimeoutError, "cannot_connect"), - (UnknownError, "cannot_connect"), - ], -) -async def test_reconfigure_flow_error( - hass: HomeAssistant, sideeffect: Exception, p_error: str -) -> None: - """Test a reauthentication flow.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="test-username", - data={ - "username": "test-username", - "password": "test-password", - "name": "Yale Smart Alarm", - "area_id": "1", - }, - version=2, - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - - with patch( - "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", - side_effect=sideeffect, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "username": "test-username", - "password": "update-password", - "area_id": "1", - }, - ) - await hass.async_block_till_done() - - assert result["step_id"] == "reconfigure" - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": p_error} - - with ( - patch( - "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", - return_value="", - ), - patch( - "homeassistant.components.yale_smart_alarm.async_setup_entry", - return_value=True, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "username": "test-username", - "password": "new-test-password", - "area_id": "1", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert entry.data == { - "username": "test-username", - "name": "Yale Smart Alarm", - "password": "new-test-password", - "area_id": "1", - } - - async def test_options_flow(hass: HomeAssistant) -> None: """Test options config flow.""" entry = MockConfigEntry( diff --git a/tests/components/yale_smart_alarm/test_coordinator.py b/tests/components/yale_smart_alarm/test_coordinator.py index 386e4ad72f7..6f1125fcf65 100644 --- a/tests/components/yale_smart_alarm/test_coordinator.py +++ b/tests/components/yale_smart_alarm/test_coordinator.py @@ -3,20 +3,16 @@ from __future__ import annotations from datetime import timedelta +from typing import Any from unittest.mock import Mock, patch import pytest -from yalesmartalarmclient import ( - YALE_STATE_ARM_FULL, - AuthenticationError, - UnknownError, - YaleSmartAlarmData, -) +from yalesmartalarmclient.const import YALE_STATE_ARM_FULL +from yalesmartalarmclient.exceptions import AuthenticationError, UnknownError -from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.yale_smart_alarm.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -36,7 +32,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed ) async def test_coordinator_setup_errors( hass: HomeAssistant, - get_data: YaleSmartAlarmData, + load_json: dict[str, Any], p_error: Exception, ) -> None: """Test the Yale Smart Living coordinator with errors.""" @@ -68,61 +64,61 @@ async def test_coordinator_setup_errors( async def test_coordinator_setup_and_update_errors( hass: HomeAssistant, load_config_entry: tuple[MockConfigEntry, Mock], - get_data: YaleSmartAlarmData, + load_json: dict[str, Any], ) -> None: """Test the Yale Smart Living coordinator with errors.""" client = load_config_entry[1] state = hass.states.get("alarm_control_panel.yale_smart_alarm") - assert state.state == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMED_AWAY client.reset_mock() - client.get_information.side_effect = ConnectionError("Could not connect") + client.get_all.side_effect = ConnectionError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_information.assert_called_once() + client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_information.side_effect = ConnectionError("Could not connect") + client.get_all.side_effect = ConnectionError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=2)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_information.assert_called_once() + client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_information.side_effect = TimeoutError("Could not connect") + client.get_all.side_effect = TimeoutError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=3)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_information.assert_called_once() + client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_information.side_effect = UnknownError("info") + client.get_all.side_effect = UnknownError("info") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=4)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_information.assert_called_once() + client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_information.side_effect = None - client.get_information.return_value = get_data + client.get_all.side_effect = None + client.get_all.return_value = load_json client.get_armed_status.return_value = YALE_STATE_ARM_FULL async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_information.assert_called_once() + client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") - assert state.state == AlarmControlPanelState.ARMED_AWAY + assert state.state == STATE_ALARM_ARMED_AWAY client.reset_mock() - client.get_information.side_effect = AuthenticationError("Can not authenticate") + client.get_all.side_effect = AuthenticationError("Can not authenticate") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=6)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_information.assert_called_once() + client.get_all.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/yale_smart_alarm/test_lock.py b/tests/components/yale_smart_alarm/test_lock.py index bb8c9d55053..09ce8529084 100644 --- a/tests/components/yale_smart_alarm/test_lock.py +++ b/tests/components/yale_smart_alarm/test_lock.py @@ -3,11 +3,13 @@ from __future__ import annotations from copy import deepcopy +from typing import Any from unittest.mock import Mock import pytest from syrupy.assertion import SnapshotAssertion -from yalesmartalarmclient import UnknownError, YaleDoorManAPI, YaleSmartAlarmData +from yalesmartalarmclient.exceptions import UnknownError +from yalesmartalarmclient.lock import YaleDoorManAPI from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.const import ( @@ -18,7 +20,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, snapshot_platform @@ -45,15 +47,17 @@ async def test_lock( ) async def test_lock_service_calls( hass: HomeAssistant, - get_data: YaleSmartAlarmData, + load_json: dict[str, Any], load_config_entry: tuple[MockConfigEntry, Mock], + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test the Yale Smart Alarm lock.""" client = load_config_entry[1] - data = deepcopy(get_data.cycle) - data["data"] = data["data"].pop("device_status") + data = deepcopy(load_json) + data["data"] = data.pop("DEVICES") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(return_value={"code": "000"}) @@ -62,14 +66,6 @@ async def test_lock_service_calls( state = hass.states.get("lock.device1") assert state.state == "locked" - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_UNLOCK, - {ATTR_ENTITY_ID: "lock.device1"}, - blocking=True, - ) - await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, @@ -97,15 +93,17 @@ async def test_lock_service_calls( ) async def test_lock_service_call_fails( hass: HomeAssistant, - get_data: YaleSmartAlarmData, + load_json: dict[str, Any], load_config_entry: tuple[MockConfigEntry, Mock], + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test the Yale Smart Alarm lock service call fails.""" client = load_config_entry[1] - data = deepcopy(get_data.cycle) - data["data"] = data["data"].pop("device_status") + data = deepcopy(load_json) + data["data"] = data.pop("DEVICES") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(side_effect=UnknownError("test_side_effect")) @@ -147,17 +145,21 @@ async def test_lock_service_call_fails( ) async def test_lock_service_call_fails_with_incorrect_status( hass: HomeAssistant, - get_data: YaleSmartAlarmData, + load_json: dict[str, Any], load_config_entry: tuple[MockConfigEntry, Mock], + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test the Yale Smart Alarm lock service call fails with incorrect return state.""" client = load_config_entry[1] - data = deepcopy(get_data.cycle) - data["data"] = data["data"].pop("device_status") + data = deepcopy(load_json) + data["data"] = data.pop("DEVICES") + client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(return_value={"code": "FFF"}) + client.lock_api = YaleDoorManAPI(client.auth) state = hass.states.get("lock.device1") assert state.state == "locked" diff --git a/tests/components/yale_smart_alarm/test_select.py b/tests/components/yale_smart_alarm/test_select.py deleted file mode 100644 index c874f83aed7..00000000000 --- a/tests/components/yale_smart_alarm/test_select.py +++ /dev/null @@ -1,66 +0,0 @@ -"""The test for the Yale smart living select.""" - -from __future__ import annotations - -from unittest.mock import Mock - -import pytest -from syrupy.assertion import SnapshotAssertion -from yalesmartalarmclient import YaleSmartAlarmData - -from homeassistant.components.select import ( - DOMAIN as SELECT_DOMAIN, - SERVICE_SELECT_OPTION, -) -from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.parametrize( - "load_platforms", - [[Platform.SELECT]], -) -async def test_switch( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - load_config_entry: tuple[MockConfigEntry, Mock], - get_data: YaleSmartAlarmData, - snapshot: SnapshotAssertion, -) -> None: - """Test the Yale Smart Living volume select.""" - client = load_config_entry[1] - - await snapshot_platform( - hass, entity_registry, snapshot, load_config_entry[0].entry_id - ) - - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: "select.device1_volume", - ATTR_OPTION: "high", - }, - blocking=True, - ) - - client.auth.post_authenticated.assert_called_once() - client.auth.put_authenticated.assert_called_once() - - state = hass.states.get("select.device1_volume") - assert state.state == "high" - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: "select.device1_volume", - ATTR_OPTION: "not_exist", - }, - blocking=True, - ) diff --git a/tests/components/yale_smart_alarm/test_sensor.py b/tests/components/yale_smart_alarm/test_sensor.py index 848d31cedc3..d91ddc0e6ce 100644 --- a/tests/components/yale_smart_alarm/test_sensor.py +++ b/tests/components/yale_smart_alarm/test_sensor.py @@ -2,10 +2,9 @@ from __future__ import annotations +from typing import Any from unittest.mock import Mock -from yalesmartalarmclient import YaleSmartAlarmData - from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -14,7 +13,7 @@ from tests.common import MockConfigEntry async def test_coordinator_setup_and_update_errors( hass: HomeAssistant, load_config_entry: tuple[MockConfigEntry, Mock], - get_data: YaleSmartAlarmData, + load_json: dict[str, Any], ) -> None: """Test the Yale Smart Living coordinator with errors.""" diff --git a/tests/components/yale_smart_alarm/test_switch.py b/tests/components/yale_smart_alarm/test_switch.py deleted file mode 100644 index b189a3fd003..00000000000 --- a/tests/components/yale_smart_alarm/test_switch.py +++ /dev/null @@ -1,46 +0,0 @@ -"""The test for the Yale smart living switch.""" - -from __future__ import annotations - -from unittest.mock import Mock - -import pytest -from syrupy.assertion import SnapshotAssertion -from yalesmartalarmclient import YaleSmartAlarmData - -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import MockConfigEntry, snapshot_platform - - -@pytest.mark.parametrize( - "load_platforms", - [[Platform.SWITCH]], -) -async def test_switch( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - load_config_entry: tuple[MockConfigEntry, Mock], - get_data: YaleSmartAlarmData, - snapshot: SnapshotAssertion, -) -> None: - """Test the Yale Smart Living autolock switch.""" - - await snapshot_platform( - hass, entity_registry, snapshot, load_config_entry[0].entry_id - ) - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: "switch.device1_autolock", - }, - blocking=True, - ) - - state = hass.states.get("switch.device1_autolock") - assert state.state == STATE_OFF diff --git a/tests/components/yalexs_ble/test_config_flow.py b/tests/components/yalexs_ble/test_config_flow.py index c546e754239..15552fdec5f 100644 --- a/tests/components/yalexs_ble/test_config_flow.py +++ b/tests/components/yalexs_ble/test_config_flow.py @@ -513,10 +513,14 @@ async def test_integration_discovery_takes_precedence_over_bluetooth( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} - flows = list(hass.config_entries.flow._handler_progress_index[DOMAIN]) + flows = [ + flow + for flow in hass.config_entries.flow.async_progress() + if flow["handler"] == DOMAIN + ] assert len(flows) == 1 - assert flows[0].unique_id == YALE_ACCESS_LOCK_DISCOVERY_INFO.address - assert flows[0].local_name == YALE_ACCESS_LOCK_DISCOVERY_INFO.name + assert flows[0]["context"]["unique_id"] == YALE_ACCESS_LOCK_DISCOVERY_INFO.address + assert flows[0]["context"]["local_name"] == YALE_ACCESS_LOCK_DISCOVERY_INFO.name with patch( "homeassistant.components.yalexs_ble.util.async_discovered_service_info", @@ -724,10 +728,14 @@ async def test_integration_discovery_takes_precedence_over_bluetooth_uuid_addres assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} - flows = list(hass.config_entries.flow._handler_progress_index[DOMAIN]) + flows = [ + flow + for flow in hass.config_entries.flow.async_progress() + if flow["handler"] == DOMAIN + ] assert len(flows) == 1 - assert flows[0].unique_id == LOCK_DISCOVERY_INFO_UUID_ADDRESS.address - assert flows[0].local_name == LOCK_DISCOVERY_INFO_UUID_ADDRESS.name + assert flows[0]["context"]["unique_id"] == LOCK_DISCOVERY_INFO_UUID_ADDRESS.address + assert flows[0]["context"]["local_name"] == LOCK_DISCOVERY_INFO_UUID_ADDRESS.name with patch( "homeassistant.components.yalexs_ble.util.async_discovered_service_info", @@ -800,10 +808,14 @@ async def test_integration_discovery_takes_precedence_over_bluetooth_non_unique_ assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} - flows = list(hass.config_entries.flow._handler_progress_index[DOMAIN]) + flows = [ + flow + for flow in hass.config_entries.flow.async_progress() + if flow["handler"] == DOMAIN + ] assert len(flows) == 1 - assert flows[0].unique_id == OLD_FIRMWARE_LOCK_DISCOVERY_INFO.address - assert flows[0].local_name == OLD_FIRMWARE_LOCK_DISCOVERY_INFO.name + assert flows[0]["context"]["unique_id"] == OLD_FIRMWARE_LOCK_DISCOVERY_INFO.address + assert flows[0]["context"]["local_name"] == OLD_FIRMWARE_LOCK_DISCOVERY_INFO.name with patch( "homeassistant.components.yalexs_ble.util.async_discovered_service_info", @@ -933,7 +945,11 @@ async def test_reauth(hass: HomeAssistant) -> None: unique_id=YALE_ACCESS_LOCK_DISCOVERY_INFO.address, ) entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, + data=entry.data, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_validate" diff --git a/tests/components/yamaha/test_media_player.py b/tests/components/yamaha/test_media_player.py index 2375e7d07f4..02246e69269 100644 --- a/tests/components/yamaha/test_media_player.py +++ b/tests/components/yamaha/test_media_player.py @@ -25,7 +25,7 @@ def _create_zone_mock(name, url): class FakeYamahaDevice: """A fake Yamaha device.""" - def __init__(self, ctrl_url, name, zones=None) -> None: + def __init__(self, ctrl_url, name, zones=None): """Initialize the fake Yamaha device.""" self.ctrl_url = ctrl_url self.name = name @@ -46,27 +46,11 @@ def main_zone_fixture(): def device_fixture(main_zone): """Mock the yamaha device.""" device = FakeYamahaDevice("http://receiver", "Receiver", zones=[main_zone]) - with ( - patch("rxv.RXV", return_value=device), - patch("rxv.find", return_value=[device]), - ): + with patch("rxv.RXV", return_value=device): yield device -@pytest.fixture(name="device2") -def device2_fixture(main_zone): - """Mock the yamaha device.""" - device = FakeYamahaDevice( - "http://127.0.0.1:80/YamahaRemoteControl/ctrl", "Receiver 2", zones=[main_zone] - ) - with ( - patch("rxv.RXV", return_value=device), - patch("rxv.find", return_value=[device]), - ): - yield device - - -async def test_setup_host(hass: HomeAssistant, device, device2, main_zone) -> None: +async def test_setup_host(hass: HomeAssistant, device, main_zone) -> None: """Test set up integration with host.""" assert await async_setup_component(hass, MP_DOMAIN, CONFIG) await hass.async_block_till_done() @@ -76,36 +60,6 @@ async def test_setup_host(hass: HomeAssistant, device, device2, main_zone) -> No assert state is not None assert state.state == "off" - with patch("rxv.find", return_value=[device2]): - assert await async_setup_component(hass, MP_DOMAIN, CONFIG) - await hass.async_block_till_done() - - state = hass.states.get("media_player.yamaha_receiver_main_zone") - - assert state is not None - assert state.state == "off" - - -@pytest.mark.parametrize( - ("error"), - [ - AttributeError, - ValueError, - UnicodeDecodeError("", b"", 1, 0, ""), - ], -) -async def test_setup_find_errors(hass: HomeAssistant, device, main_zone, error) -> None: - """Test set up integration encountering an Error.""" - - with patch("rxv.find", side_effect=error): - assert await async_setup_component(hass, MP_DOMAIN, CONFIG) - await hass.async_block_till_done() - - state = hass.states.get("media_player.yamaha_receiver_main_zone") - - assert state is not None - assert state.state == "off" - async def test_setup_no_host(hass: HomeAssistant, device, main_zone) -> None: """Test set up integration without host.""" diff --git a/tests/components/yamaha_musiccast/test_config_flow.py b/tests/components/yamaha_musiccast/test_config_flow.py index 7629d2401c2..321e7250e5a 100644 --- a/tests/components/yamaha_musiccast/test_config_flow.py +++ b/tests/components/yamaha_musiccast/test_config_flow.py @@ -1,6 +1,5 @@ """Test config flow.""" -from collections.abc import Generator from unittest.mock import patch from aiomusiccast import MusicCastConnectionException @@ -18,7 +17,7 @@ from tests.common import MockConfigEntry @pytest.fixture(autouse=True) -def silent_ssdp_scanner() -> Generator[None]: +async def silent_ssdp_scanner(hass): """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), diff --git a/tests/components/yandex_transport/test_sensor.py b/tests/components/yandex_transport/test_sensor.py index 13432850b2b..5ad9fa92c39 100644 --- a/tests/components/yandex_transport/test_sensor.py +++ b/tests/components/yandex_transport/test_sensor.py @@ -1,7 +1,6 @@ """Tests for the yandex transport platform.""" import json -from typing import Any from unittest.mock import AsyncMock, patch import pytest @@ -77,9 +76,7 @@ SUBURBAN_RESULT_STATE = dt_util.utc_from_timestamp(1634984640).isoformat( ) -async def assert_setup_sensor( - hass: HomeAssistant, config: dict[str, Any], count: int = 1 -) -> None: +async def assert_setup_sensor(hass, config, count=1): """Set up the sensor and assert it's been created.""" with assert_setup_component(count): assert await async_setup_component(hass, sensor.DOMAIN, config) diff --git a/tests/components/yandextts/test_tts.py b/tests/components/yandextts/test_tts.py index 77878c2be51..496c187469a 100644 --- a/tests/components/yandextts/test_tts.py +++ b/tests/components/yandextts/test_tts.py @@ -29,8 +29,9 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/yardian/conftest.py b/tests/components/yardian/conftest.py index 00e76c4c34f..26a01f889b7 100644 --- a/tests/components/yardian/conftest.py +++ b/tests/components/yardian/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Yardian tests.""" -from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest +from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/yeelight/__init__.py b/tests/components/yeelight/__init__.py index bdd8cdda312..2de064cf567 100644 --- a/tests/components/yeelight/__init__.py +++ b/tests/components/yeelight/__init__.py @@ -109,7 +109,7 @@ CONFIG_ENTRY_DATA = {CONF_ID: ID} class MockAsyncBulb: """A mock for yeelight.aio.AsyncBulb.""" - def __init__(self, model, bulb_type, cannot_connect) -> None: + def __init__(self, model, bulb_type, cannot_connect): """Init the mock.""" self.model = model self.bulb_type = bulb_type diff --git a/tests/components/yeelight/test_config_flow.py b/tests/components/yeelight/test_config_flow.py index 1acb553af3d..4d788ba8258 100644 --- a/tests/components/yeelight/test_config_flow.py +++ b/tests/components/yeelight/test_config_flow.py @@ -7,11 +7,7 @@ import pytest from homeassistant import config_entries from homeassistant.components import dhcp, ssdp, zeroconf -from homeassistant.components.yeelight.config_flow import ( - MODEL_UNKNOWN, - CannotConnect, - YeelightConfigFlow, -) +from homeassistant.components.yeelight.config_flow import MODEL_UNKNOWN, CannotConnect from homeassistant.components.yeelight.const import ( CONF_DETECTED_MODEL, CONF_MODE_MUSIC, @@ -507,20 +503,10 @@ async def test_discovered_by_homekit_and_dhcp(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] is None - real_is_matching = YeelightConfigFlow.is_matching - return_values = [] - - def is_matching(self, other_flow) -> bool: - return_values.append(real_is_matching(self, other_flow)) - return return_values[-1] - with ( _patch_discovery(), _patch_discovery_interval(), patch(f"{MODULE_CONFIG_FLOW}.AsyncBulb", return_value=mocked_bulb), - patch.object( - YeelightConfigFlow, "is_matching", wraps=is_matching, autospec=True - ), ): result2 = await hass.config_entries.flow.async_init( DOMAIN, @@ -532,8 +518,6 @@ async def test_discovered_by_homekit_and_dhcp(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_in_progress" - # Ensure the is_matching method returned True - assert return_values == [True] with ( _patch_discovery(), diff --git a/tests/components/yolink/test_config_flow.py b/tests/components/yolink/test_config_flow.py index 1dd71368d73..d7ba09e4269 100644 --- a/tests/components/yolink/test_config_flow.py +++ b/tests/components/yolink/test_config_flow.py @@ -172,7 +172,15 @@ async def test_reauthentication( ) old_entry.add_to_hass(hass) - result = await old_entry.start_reauth_flow(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/yolink/test_device_trigger.py b/tests/components/yolink/test_device_trigger.py index c1d3a8acda8..f6aa9a28ac0 100644 --- a/tests/components/yolink/test_device_trigger.py +++ b/tests/components/yolink/test_device_trigger.py @@ -1,17 +1,27 @@ """The tests for YoLink device triggers.""" +import pytest from pytest_unordered import unordered from yolink.const import ATTR_DEVICE_DIMMER, ATTR_DEVICE_SMART_REMOTER from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.yolink import DOMAIN, YOLINK_EVENT -from homeassistant.components.yolink.const import DEV_MODEL_FLEX_FOB_YS3604_UC from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import ( + MockConfigEntry, + async_get_device_automations, + async_mock_service, +) + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "yolink", "automation") async def test_get_triggers( @@ -24,7 +34,6 @@ async def test_get_triggers( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, model=ATTR_DEVICE_SMART_REMOTER, - model_id=DEV_MODEL_FLEX_FOB_YS3604_UC, ) expected_triggers = [ @@ -101,7 +110,6 @@ async def test_get_triggers_exception( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, model=ATTR_DEVICE_DIMMER, - model_id=None, ) expected_triggers = [] @@ -112,9 +120,7 @@ async def test_get_triggers_exception( async def test_if_fires_on_event( - hass: HomeAssistant, - service_calls: list[ServiceCall], - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry ) -> None: """Test for event triggers firing.""" mac_address = "12:34:56:AB:CD:EF" @@ -126,7 +132,6 @@ async def test_if_fires_on_event( connections={connection}, identifiers={(DOMAIN, mac_address)}, model=ATTR_DEVICE_SMART_REMOTER, - model_id=DEV_MODEL_FLEX_FOB_YS3604_UC, ) assert await async_setup_component( @@ -161,5 +166,5 @@ async def test_if_fires_on_event( }, ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["message"] == "service called" + assert len(calls) == 1 + assert calls[0].data["message"] == "service called" diff --git a/tests/components/youless/snapshots/test_sensor.ambr b/tests/components/youless/snapshots/test_sensor.ambr index bcfd0139e5c..22e480c390e 100644 --- a/tests/components/youless/snapshots/test_sensor.ambr +++ b/tests/components/youless/snapshots/test_sensor.ambr @@ -47,7 +47,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.0', + 'state': 'unavailable', }) # --- # name: test_sensors[sensor.energy_delivery_low-entry] @@ -98,7 +98,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.029', + 'state': 'unavailable', }) # --- # name: test_sensors[sensor.energy_high-entry] @@ -405,7 +405,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1624.264', + 'state': '1234.564', }) # --- # name: test_sensors[sensor.phase_1_current-entry] @@ -967,6 +967,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1234.564', + 'state': 'unavailable', }) # --- diff --git a/tests/components/youtube/__init__.py b/tests/components/youtube/__init__.py index 31125d3a71e..1b559f0f1c4 100644 --- a/tests/components/youtube/__init__.py +++ b/tests/components/youtube/__init__.py @@ -1,8 +1,8 @@ """Tests for the YouTube integration.""" -from collections.abc import AsyncGenerator import json +from typing_extensions import AsyncGenerator from youtubeaio.models import YouTubeChannel, YouTubePlaylistItem, YouTubeSubscription from youtubeaio.types import AuthScope diff --git a/tests/components/youtube/snapshots/test_diagnostics.ambr b/tests/components/youtube/snapshots/test_diagnostics.ambr index 50dc2757e8c..a938cb8daad 100644 --- a/tests/components/youtube/snapshots/test_diagnostics.ambr +++ b/tests/components/youtube/snapshots/test_diagnostics.ambr @@ -12,7 +12,6 @@ }), 'subscriber_count': 2290000, 'title': 'Google for Developers', - 'total_views': 214141263, }), }) # --- diff --git a/tests/components/youtube/snapshots/test_sensor.ambr b/tests/components/youtube/snapshots/test_sensor.ambr index dce546b4803..cddfa6f6a3d 100644 --- a/tests/components/youtube/snapshots/test_sensor.ambr +++ b/tests/components/youtube/snapshots/test_sensor.ambr @@ -30,21 +30,6 @@ 'state': '2290000', }) # --- -# name: test_sensor.2 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://yt3.ggpht.com/fca_HuJ99xUxflWdex0XViC3NfctBFreIl8y4i9z411asnGTWY-Ql3MeH_ybA4kNaOjY7kyA=s800-c-k-c0x00ffffff-no-rj', - 'friendly_name': 'Google for Developers Views', - 'unit_of_measurement': 'views', - }), - 'context': , - 'entity_id': 'sensor.google_for_developers_views', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '214141263', - }) -# --- # name: test_sensor_without_uploaded_video StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -73,18 +58,3 @@ 'state': '2290000', }) # --- -# name: test_sensor_without_uploaded_video.2 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'entity_picture': 'https://yt3.ggpht.com/fca_HuJ99xUxflWdex0XViC3NfctBFreIl8y4i9z411asnGTWY-Ql3MeH_ybA4kNaOjY7kyA=s800-c-k-c0x00ffffff-no-rj', - 'friendly_name': 'Google for Developers Views', - 'unit_of_measurement': 'views', - }), - 'context': , - 'entity_id': 'sensor.google_for_developers_views', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '214141263', - }) -# --- diff --git a/tests/components/youtube/test_sensor.py b/tests/components/youtube/test_sensor.py index e883347c8db..ae0c38306e4 100644 --- a/tests/components/youtube/test_sensor.py +++ b/tests/components/youtube/test_sensor.py @@ -29,9 +29,6 @@ async def test_sensor( state = hass.states.get("sensor.google_for_developers_subscribers") assert state == snapshot - state = hass.states.get("sensor.google_for_developers_views") - assert state == snapshot - async def test_sensor_without_uploaded_video( hass: HomeAssistant, snapshot: SnapshotAssertion, setup_integration: ComponentSetup @@ -55,9 +52,6 @@ async def test_sensor_without_uploaded_video( state = hass.states.get("sensor.google_for_developers_subscribers") assert state == snapshot - state = hass.states.get("sensor.google_for_developers_views") - assert state == snapshot - async def test_sensor_updating( hass: HomeAssistant, setup_integration: ComponentSetup @@ -101,9 +95,6 @@ async def test_sensor_reauth_trigger( state = hass.states.get("sensor.google_for_developers_subscribers") assert state.state == "2290000" - state = hass.states.get("sensor.google_for_developers_views") - assert state.state == "214141263" - mock.set_thrown_exception(UnauthorizedError()) future = dt_util.utcnow() + timedelta(minutes=15) async_fire_time_changed(hass, future) @@ -130,9 +121,6 @@ async def test_sensor_unavailable( state = hass.states.get("sensor.google_for_developers_subscribers") assert state.state == "2290000" - state = hass.states.get("sensor.google_for_developers_views") - assert state.state == "214141263" - mock.set_thrown_exception(YouTubeBackendError()) future = dt_util.utcnow() + timedelta(minutes=15) async_fire_time_changed(hass, future) @@ -143,6 +131,3 @@ async def test_sensor_unavailable( state = hass.states.get("sensor.google_for_developers_subscribers") assert state.state == "unavailable" - - state = hass.states.get("sensor.google_for_developers_views") - assert state.state == "unavailable" diff --git a/tests/components/zamg/conftest.py b/tests/components/zamg/conftest.py index 9fa4f333ef8..1795baa7fad 100644 --- a/tests/components/zamg/conftest.py +++ b/tests/components/zamg/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Zamg integration tests.""" -from collections.abc import Generator import json from unittest.mock import MagicMock, patch import pytest +from typing_extensions import Generator from zamg import ZamgData as ZamgDevice from homeassistant.components.zamg.const import CONF_STATION_ID, DOMAIN diff --git a/tests/components/zeroconf/test_init.py b/tests/components/zeroconf/test_init.py index be78964f231..0a552f37aa9 100644 --- a/tests/components/zeroconf/test_init.py +++ b/tests/components/zeroconf/test_init.py @@ -12,7 +12,6 @@ from zeroconf import ( ) from zeroconf.asyncio import AsyncServiceInfo -from homeassistant import config_entries from homeassistant.components import zeroconf from homeassistant.const import ( EVENT_COMPONENT_LOADED, @@ -23,11 +22,8 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.generated import zeroconf as zc_gen -from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.setup import ATTR_COMPONENT, async_setup_component -from tests.common import MockConfigEntry, MockModule, mock_integration - NON_UTF8_VALUE = b"ABCDEF\x8a" NON_ASCII_KEY = b"non-ascii-key\x8a" PROPERTIES = { @@ -307,14 +303,7 @@ async def test_zeroconf_match_macaddress(hass: HomeAssistant) -> None: assert len(mock_service_browser.mock_calls) == 1 assert len(mock_config_flow.mock_calls) == 1 assert mock_config_flow.mock_calls[0][1][0] == "shelly" - assert mock_config_flow.mock_calls[0][2]["context"] == { - "discovery_key": DiscoveryKey( - domain="zeroconf", - key=("_http._tcp.local.", "Shelly108._http._tcp.local."), - version=1, - ), - "source": "zeroconf", - } + assert mock_config_flow.mock_calls[0][2]["context"] == {"source": "zeroconf"} @pytest.mark.usefixtures("mock_async_zeroconf") @@ -553,11 +542,6 @@ async def test_homekit_match_partial_space(hass: HomeAssistant) -> None: assert mock_config_flow.mock_calls[1][2]["context"] == { "source": "zeroconf", "alternative_domain": "lifx", - "discovery_key": DiscoveryKey( - domain="zeroconf", - key=("_hap._tcp.local.", "_name._hap._tcp.local."), - version=1, - ), } @@ -1397,261 +1381,3 @@ async def test_zeroconf_removed(hass: HomeAssistant) -> None: assert len(mock_service_browser.mock_calls) == 1 assert len(mock_async_progress_by_init_data_type.mock_calls) == 1 assert mock_async_abort.mock_calls[0][1][0] == "mock_flow_id" - - -@pytest.mark.usefixtures("mock_async_zeroconf") -@pytest.mark.parametrize( - ( - "entry_domain", - "entry_discovery_keys", - ), - [ - # Matching discovery key - ( - "shelly", - { - "zeroconf": ( - DiscoveryKey( - domain="zeroconf", - key=("_http._tcp.local.", "Shelly108._http._tcp.local."), - version=1, - ), - ) - }, - ), - # Matching discovery key - ( - "shelly", - { - "zeroconf": ( - DiscoveryKey( - domain="zeroconf", - key=("_http._tcp.local.", "Shelly108._http._tcp.local."), - version=1, - ), - ), - "other": ( - DiscoveryKey( - domain="other", - key="blah", - version=1, - ), - ), - }, - ), - # Matching discovery key, other domain - # Note: Rediscovery is not currently restricted to the domain of the removed - # entry. Such a check can be added if needed. - ( - "comp", - { - "zeroconf": ( - DiscoveryKey( - domain="zeroconf", - key=("_http._tcp.local.", "Shelly108._http._tcp.local."), - version=1, - ), - ) - }, - ), - ], -) -@pytest.mark.parametrize( - "entry_source", - [ - config_entries.SOURCE_IGNORE, - config_entries.SOURCE_USER, - config_entries.SOURCE_ZEROCONF, - ], -) -async def test_zeroconf_rediscover( - hass: HomeAssistant, - entry_domain: str, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], - entry_source: str, -) -> None: - """Test we reinitiate flows when an ignored config entry is removed.""" - - def http_only_service_update_mock(zeroconf, services, handlers): - """Call service update handler.""" - handlers[0]( - zeroconf, - "_http._tcp.local.", - "Shelly108._http._tcp.local.", - ServiceStateChange.Added, - ) - - entry = MockConfigEntry( - domain=entry_domain, - discovery_keys=entry_discovery_keys, - unique_id="mock-unique-id", - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - with ( - patch.dict( - zc_gen.ZEROCONF, - { - "_http._tcp.local.": [ - { - "domain": "shelly", - "name": "shelly*", - "properties": {"macaddress": "ffaadd*"}, - } - ] - }, - clear=True, - ), - patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, - patch.object( - zeroconf, "AsyncServiceBrowser", side_effect=http_only_service_update_mock - ) as mock_service_browser, - patch( - "homeassistant.components.zeroconf.AsyncServiceInfo", - side_effect=get_zeroconf_info_mock("FFAADDCC11DD"), - ), - ): - assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await hass.async_block_till_done() - - expected_context = { - "discovery_key": DiscoveryKey( - domain="zeroconf", - key=("_http._tcp.local.", "Shelly108._http._tcp.local."), - version=1, - ), - "source": "zeroconf", - } - assert len(mock_service_browser.mock_calls) == 1 - assert len(mock_config_flow.mock_calls) == 1 - assert mock_config_flow.mock_calls[0][1][0] == "shelly" - assert mock_config_flow.mock_calls[0][2]["context"] == expected_context - - await hass.config_entries.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert len(mock_service_browser.mock_calls) == 1 - assert len(mock_config_flow.mock_calls) == 2 - assert mock_config_flow.mock_calls[1][1][0] == "shelly" - assert mock_config_flow.mock_calls[1][2]["context"] == expected_context - - -@pytest.mark.usefixtures("mock_async_zeroconf") -@pytest.mark.parametrize( - ( - "entry_domain", - "entry_discovery_keys", - "entry_source", - "entry_unique_id", - ), - [ - # Discovery key from other domain - ( - "shelly", - { - "bluetooth": ( - DiscoveryKey( - domain="bluetooth", - key=("_http._tcp.local.", "Shelly108._http._tcp.local."), - version=1, - ), - ) - }, - config_entries.SOURCE_IGNORE, - "mock-unique-id", - ), - # Discovery key from the future - ( - "shelly", - { - "zeroconf": ( - DiscoveryKey( - domain="zeroconf", - key=("_http._tcp.local.", "Shelly108._http._tcp.local."), - version=2, - ), - ) - }, - config_entries.SOURCE_IGNORE, - "mock-unique-id", - ), - ], -) -async def test_zeroconf_rediscover_no_match( - hass: HomeAssistant, - entry_domain: str, - entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], - entry_source: str, - entry_unique_id: str, -) -> None: - """Test we don't reinitiate flows when a non matching config entry is removed.""" - - def http_only_service_update_mock(zeroconf, services, handlers): - """Call service update handler.""" - handlers[0]( - zeroconf, - "_http._tcp.local.", - "Shelly108._http._tcp.local.", - ServiceStateChange.Added, - ) - - hass.config.components.add(entry_domain) - mock_integration(hass, MockModule(entry_domain)) - - entry = MockConfigEntry( - domain=entry_domain, - discovery_keys=entry_discovery_keys, - unique_id=entry_unique_id, - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - with ( - patch.dict( - zc_gen.ZEROCONF, - { - "_http._tcp.local.": [ - { - "domain": "shelly", - "name": "shelly*", - "properties": {"macaddress": "ffaadd*"}, - } - ] - }, - clear=True, - ), - patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, - patch.object( - zeroconf, "AsyncServiceBrowser", side_effect=http_only_service_update_mock - ) as mock_service_browser, - patch( - "homeassistant.components.zeroconf.AsyncServiceInfo", - side_effect=get_zeroconf_info_mock("FFAADDCC11DD"), - ), - ): - assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await hass.async_block_till_done() - - expected_context = { - "discovery_key": DiscoveryKey( - domain="zeroconf", - key=("_http._tcp.local.", "Shelly108._http._tcp.local."), - version=1, - ), - "source": "zeroconf", - } - assert len(mock_service_browser.mock_calls) == 1 - assert len(mock_config_flow.mock_calls) == 1 - assert mock_config_flow.mock_calls[0][1][0] == "shelly" - assert mock_config_flow.mock_calls[0][2]["context"] == expected_context - - await hass.config_entries.async_remove(entry.entry_id) - await hass.async_block_till_done() - - assert len(mock_service_browser.mock_calls) == 1 - assert len(mock_config_flow.mock_calls) == 1 diff --git a/tests/components/zerproc/test_light.py b/tests/components/zerproc/test_light.py index 6e00cfbde4c..c47f960b182 100644 --- a/tests/components/zerproc/test_light.py +++ b/tests/components/zerproc/test_light.py @@ -35,13 +35,13 @@ from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture -async def mock_entry() -> MockConfigEntry: +async def mock_entry(hass): """Create a mock light entity.""" return MockConfigEntry(domain=DOMAIN) @pytest.fixture -async def mock_light(hass: HomeAssistant, mock_entry: MockConfigEntry) -> MagicMock: +async def mock_light(hass, mock_entry): """Create a mock light entity.""" mock_entry.add_to_hass(hass) diff --git a/tests/components/zeversolar/snapshots/test_diagnostics.ambr b/tests/components/zeversolar/snapshots/test_diagnostics.ambr index 4090a3262ba..eebc8468076 100644 --- a/tests/components/zeversolar/snapshots/test_diagnostics.ambr +++ b/tests/components/zeversolar/snapshots/test_diagnostics.ambr @@ -10,7 +10,6 @@ # name: test_entry_diagnostics dict({ 'communication_status': 'OK', - 'energy_today': 123.4, 'hardware_version': 'M10', 'meter_status': 'OK', 'num_inverters': 1, diff --git a/tests/components/zeversolar/snapshots/test_sensor.ambr b/tests/components/zeversolar/snapshots/test_sensor.ambr index aaef2c43d79..bee522133a5 100644 --- a/tests/components/zeversolar/snapshots/test_sensor.ambr +++ b/tests/components/zeversolar/snapshots/test_sensor.ambr @@ -1,4 +1,24 @@ # serializer version: 1 +# name: test_sensors + ConfigEntrySnapshot({ + 'data': dict({ + 'host': 'zeversolar-fake-host', + 'port': 10200, + }), + 'disabled_by': None, + 'domain': 'zeversolar', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }) +# --- # name: test_sensors[sensor.zeversolar_sensor_energy_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/zha/common.py b/tests/components/zha/common.py index 1dd1e5f81aa..a8bec33a23a 100644 --- a/tests/components/zha/common.py +++ b/tests/components/zha/common.py @@ -1,12 +1,19 @@ """Common test objects.""" +import asyncio from datetime import timedelta -from unittest.mock import AsyncMock, Mock +import math +from typing import Any +from unittest.mock import AsyncMock, Mock, patch import zigpy.zcl import zigpy.zcl.foundation as zcl_f -from homeassistant.components.zha.helpers import ZHADeviceProxy +import homeassistant.components.zha.core.const as zha_const +from homeassistant.components.zha.core.helpers import ( + async_get_zha_config_value, + get_zha_gateway, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util @@ -91,7 +98,7 @@ def make_attribute(attrid, value, status=0): return attr -def send_attribute_report(hass: HomeAssistant, cluster, attrid, value): +def send_attribute_report(hass, cluster, attrid, value): """Send a single attribute report.""" return send_attributes_report(hass, cluster, {attrid: value}) @@ -124,9 +131,7 @@ async def send_attributes_report( await hass.async_block_till_done() -def find_entity_id( - domain: str, zha_device: ZHADeviceProxy, hass: HomeAssistant, qualifier=None -) -> str | None: +def find_entity_id(domain, zha_device, hass, qualifier=None): """Find the entity id under the testing. This is used to get the entity id in order to get the state from the state @@ -139,13 +144,11 @@ def find_entity_id( for entity_id in entities: if qualifier in entity_id: return entity_id - return None - return entities[0] + else: + return entities[0] -def find_entity_ids( - domain: str, zha_device: ZHADeviceProxy, hass: HomeAssistant -) -> list[str]: +def find_entity_ids(domain, zha_device, hass): """Find the entity ids under the testing. This is used to get the entity id in order to get the state from the state @@ -160,7 +163,7 @@ def find_entity_ids( ] -def async_find_group_entity_id(hass: HomeAssistant, domain, group): +def async_find_group_entity_id(hass, domain, group): """Find the group entity id under test.""" entity_id = f"{domain}.coordinator_manufacturer_coordinator_model_{group.name.lower().replace(' ', '_')}" @@ -169,6 +172,13 @@ def async_find_group_entity_id(hass: HomeAssistant, domain, group): return entity_id +async def async_enable_traffic(hass, zha_devices, enabled=True): + """Allow traffic to flow through the gateway and the ZHA device.""" + for zha_device in zha_devices: + zha_device.update_available(enabled) + await hass.async_block_till_done() + + def make_zcl_header( command_id: int, global_command: bool = True, tsn: int = 1 ) -> zcl_f.ZCLHeader: @@ -189,8 +199,57 @@ def reset_clusters(clusters): cluster.write_attributes.reset_mock() -async def async_shift_time(hass: HomeAssistant): +async def async_test_rejoin(hass, zigpy_device, clusters, report_counts, ep_id=1): + """Test device rejoins.""" + reset_clusters(clusters) + + zha_gateway = get_zha_gateway(hass) + await zha_gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done() + for cluster, reports in zip(clusters, report_counts, strict=False): + assert cluster.bind.call_count == 1 + assert cluster.bind.await_count == 1 + if reports: + assert cluster.configure_reporting.call_count == 0 + assert cluster.configure_reporting.await_count == 0 + assert cluster.configure_reporting_multiple.call_count == math.ceil( + reports / zha_const.REPORT_CONFIG_ATTR_PER_REQ + ) + assert cluster.configure_reporting_multiple.await_count == math.ceil( + reports / zha_const.REPORT_CONFIG_ATTR_PER_REQ + ) + else: + # no reports at all + assert cluster.configure_reporting.call_count == reports + assert cluster.configure_reporting.await_count == reports + assert cluster.configure_reporting_multiple.call_count == reports + assert cluster.configure_reporting_multiple.await_count == reports + + +async def async_wait_for_updates(hass): + """Wait until all scheduled updates are executed.""" + await hass.async_block_till_done() + await asyncio.sleep(0) + await asyncio.sleep(0) + await hass.async_block_till_done() + + +async def async_shift_time(hass): """Shift time to cause call later tasks to run.""" next_update = dt_util.utcnow() + timedelta(seconds=11) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() + + +def patch_zha_config(component: str, overrides: dict[tuple[str, str], Any]): + """Patch the ZHA custom configuration defaults.""" + + def new_get_config(config_entry, section, config_key, default): + if (section, config_key) in overrides: + return overrides[section, config_key] + return async_get_zha_config_value(config_entry, section, config_key, default) + + return patch( + f"homeassistant.components.zha.{component}.async_get_zha_config_value", + side_effect=new_get_config, + ) diff --git a/tests/components/zha/conftest.py b/tests/components/zha/conftest.py index a9f4c51d75d..410eaceda76 100644 --- a/tests/components/zha/conftest.py +++ b/tests/components/zha/conftest.py @@ -1,6 +1,6 @@ """Test configuration for the ZHA component.""" -from collections.abc import Generator +from collections.abc import Callable import itertools import time from typing import Any @@ -8,6 +8,7 @@ from unittest.mock import AsyncMock, MagicMock, create_autospec, patch import warnings import pytest +from typing_extensions import Generator import zigpy from zigpy.application import ControllerApplication import zigpy.backups @@ -24,9 +25,14 @@ from zigpy.zcl.clusters.general import Basic, Groups from zigpy.zcl.foundation import Status import zigpy.zdo.types as zdo_t -import homeassistant.components.zha.const as zha_const +import homeassistant.components.zha.core.const as zha_const +import homeassistant.components.zha.core.device as zha_core_device +from homeassistant.components.zha.core.gateway import ZHAGateway +from homeassistant.components.zha.core.helpers import get_zha_gateway from homeassistant.core import HomeAssistant +from homeassistant.helpers import restore_state from homeassistant.setup import async_setup_component +import homeassistant.util.dt as dt_util from .common import patch_cluster as common_patch_cluster @@ -38,6 +44,17 @@ FIXTURE_GRP_NAME = "fixture group" COUNTER_NAMES = ["counter_1", "counter_2", "counter_3"] +@pytest.fixture(scope="module", autouse=True) +def disable_request_retry_delay(): + """Disable ZHA request retrying delay to speed up failures.""" + + with patch( + "homeassistant.components.zha.core.cluster_handlers.RETRYABLE_REQUEST_DECORATOR", + zigpy.util.retryable_request(tries=3, delay=0), + ): + yield + + @pytest.fixture(scope="module", autouse=True) def globally_load_quirks(): """Load quirks automatically so that ZHA tests run deterministically in isolation. @@ -111,9 +128,6 @@ class _FakeApp(ControllerApplication): ) -> None: pass - def _persist_coordinator_model_strings_in_db(self) -> None: - pass - def _wrap_mock_instance(obj: Any) -> MagicMock: """Auto-mock every attribute and method in an object.""" @@ -154,8 +168,6 @@ async def zigpy_app_controller(): app.state.node_info.nwk = 0x0000 app.state.node_info.ieee = zigpy.types.EUI64.convert("00:15:8d:00:02:32:4f:32") - app.state.node_info.manufacturer = "Coordinator Manufacturer" - app.state.node_info.model = "Coordinator Model" app.state.network_info.pan_id = 0x1234 app.state.network_info.extended_pan_id = app.state.node_info.ieee app.state.network_info.channel = 15 @@ -190,14 +202,10 @@ async def zigpy_app_controller(): async def config_entry_fixture() -> MockConfigEntry: """Fixture representing a config entry.""" return MockConfigEntry( - version=4, + version=3, domain=zha_const.DOMAIN, data={ - zigpy.config.CONF_DEVICE: { - zigpy.config.CONF_DEVICE_PATH: "/dev/ttyUSB0", - zigpy.config.CONF_DEVICE_BAUDRATE: 115200, - zigpy.config.CONF_DEVICE_FLOW_CONTROL: "hardware", - }, + zigpy.config.CONF_DEVICE: {zigpy.config.CONF_DEVICE_PATH: "/dev/ttyUSB0"}, zha_const.CONF_RADIO_TYPE: "ezsp", }, options={ @@ -272,6 +280,170 @@ def cluster_handler(): return cluster_handler +@pytest.fixture +def zigpy_device_mock(zigpy_app_controller): + """Make a fake device using the specified cluster classes.""" + + def _mock_dev( + endpoints, + ieee="00:0d:6f:00:0a:90:69:e7", + manufacturer="FakeManufacturer", + model="FakeModel", + node_descriptor=b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", + nwk=0xB79C, + patch_cluster=True, + quirk=None, + attributes=None, + ): + """Make a fake device using the specified cluster classes.""" + device = zigpy.device.Device( + zigpy_app_controller, zigpy.types.EUI64.convert(ieee), nwk + ) + device.manufacturer = manufacturer + device.model = model + device.node_desc = zdo_t.NodeDescriptor.deserialize(node_descriptor)[0] + device.last_seen = time.time() + + for epid, ep in endpoints.items(): + endpoint = device.add_endpoint(epid) + endpoint.device_type = ep[SIG_EP_TYPE] + endpoint.profile_id = ep.get(SIG_EP_PROFILE, 0x0104) + endpoint.request = AsyncMock() + + for cluster_id in ep.get(SIG_EP_INPUT, []): + endpoint.add_input_cluster(cluster_id) + + for cluster_id in ep.get(SIG_EP_OUTPUT, []): + endpoint.add_output_cluster(cluster_id) + + device.status = zigpy.device.Status.ENDPOINTS_INIT + + if quirk: + device = quirk(zigpy_app_controller, device.ieee, device.nwk, device) + else: + # Allow zigpy to apply quirks if we don't pass one explicitly + device = zigpy.quirks.get_device(device) + + if patch_cluster: + for endpoint in (ep for epid, ep in device.endpoints.items() if epid): + endpoint.request = AsyncMock(return_value=[0]) + for cluster in itertools.chain( + endpoint.in_clusters.values(), endpoint.out_clusters.values() + ): + common_patch_cluster(cluster) + + if attributes is not None: + for ep_id, clusters in attributes.items(): + for cluster_name, attrs in clusters.items(): + cluster = getattr(device.endpoints[ep_id], cluster_name) + + for name, value in attrs.items(): + attr_id = cluster.find_attribute(name).id + cluster._attr_cache[attr_id] = value + + return device + + return _mock_dev + + +@patch("homeassistant.components.zha.setup_quirks", MagicMock(return_value=True)) +@pytest.fixture +def zha_device_joined(hass, setup_zha): + """Return a newly joined ZHA device.""" + setup_zha_fixture = setup_zha + + async def _zha_device(zigpy_dev, *, setup_zha: bool = True): + zigpy_dev.last_seen = time.time() + + if setup_zha: + await setup_zha_fixture() + + zha_gateway = get_zha_gateway(hass) + zha_gateway.application_controller.devices[zigpy_dev.ieee] = zigpy_dev + await zha_gateway.async_device_initialized(zigpy_dev) + await hass.async_block_till_done() + return zha_gateway.get_device(zigpy_dev.ieee) + + return _zha_device + + +@patch("homeassistant.components.zha.setup_quirks", MagicMock(return_value=True)) +@pytest.fixture +def zha_device_restored(hass, zigpy_app_controller, setup_zha): + """Return a restored ZHA device.""" + setup_zha_fixture = setup_zha + + async def _zha_device(zigpy_dev, *, last_seen=None, setup_zha: bool = True): + zigpy_app_controller.devices[zigpy_dev.ieee] = zigpy_dev + + if last_seen is not None: + zigpy_dev.last_seen = last_seen + + if setup_zha: + await setup_zha_fixture() + + zha_gateway = get_zha_gateway(hass) + return zha_gateway.get_device(zigpy_dev.ieee) + + return _zha_device + + +@pytest.fixture(params=["zha_device_joined", "zha_device_restored"]) +def zha_device_joined_restored(request: pytest.FixtureRequest): + """Join or restore ZHA device.""" + named_method = request.getfixturevalue(request.param) + named_method.name = request.param + return named_method + + +@pytest.fixture +def zha_device_mock( + hass: HomeAssistant, config_entry, zigpy_device_mock +) -> Callable[..., zha_core_device.ZHADevice]: + """Return a ZHA Device factory.""" + + def _zha_device( + endpoints=None, + ieee="00:11:22:33:44:55:66:77", + manufacturer="mock manufacturer", + model="mock model", + node_desc=b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", + patch_cluster=True, + ) -> zha_core_device.ZHADevice: + if endpoints is None: + endpoints = { + 1: { + "in_clusters": [0, 1, 8, 768], + "out_clusters": [0x19], + "device_type": 0x0105, + }, + 2: { + "in_clusters": [0], + "out_clusters": [6, 8, 0x19, 768], + "device_type": 0x0810, + }, + } + zigpy_device = zigpy_device_mock( + endpoints, ieee, manufacturer, model, node_desc, patch_cluster=patch_cluster + ) + return zha_core_device.ZHADevice( + hass, + zigpy_device, + ZHAGateway(hass, {}, config_entry), + ) + + return _zha_device + + +@pytest.fixture +def hass_disable_services(hass): + """Mock services.""" + with patch.object( + hass, "services", MagicMock(has_service=MagicMock(return_value=True)) + ): + yield hass + + @pytest.fixture(autouse=True) def speed_up_radio_mgr(): """Speed up the radio manager connection time by removing delays.""" @@ -351,66 +523,31 @@ def network_backup() -> zigpy.backups.NetworkBackup: @pytest.fixture -def zigpy_device_mock(zigpy_app_controller): - """Make a fake device using the specified cluster classes.""" +def core_rs(hass_storage: dict[str, Any]) -> Callable[[str, Any, dict[str, Any]], None]: + """Core.restore_state fixture.""" - def _mock_dev( - endpoints, - ieee="00:0d:6f:00:0a:90:69:e7", - manufacturer="FakeManufacturer", - model="FakeModel", - node_descriptor=b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", - nwk=0xB79C, - patch_cluster=True, - quirk=None, - attributes=None, - ): - """Make a fake device using the specified cluster classes.""" - device = zigpy.device.Device( - zigpy_app_controller, zigpy.types.EUI64.convert(ieee), nwk - ) - device.manufacturer = manufacturer - device.model = model - device.node_desc = zdo_t.NodeDescriptor.deserialize(node_descriptor)[0] - device.last_seen = time.time() + def _storage(entity_id: str, state: str, attributes: dict[str, Any]) -> None: + now = dt_util.utcnow().isoformat() - for epid, ep in endpoints.items(): - endpoint = device.add_endpoint(epid) - endpoint.device_type = ep[SIG_EP_TYPE] - endpoint.profile_id = ep.get(SIG_EP_PROFILE, 0x0104) - endpoint.request = AsyncMock() + hass_storage[restore_state.STORAGE_KEY] = { + "version": restore_state.STORAGE_VERSION, + "key": restore_state.STORAGE_KEY, + "data": [ + { + "state": { + "entity_id": entity_id, + "state": str(state), + "attributes": attributes, + "last_changed": now, + "last_updated": now, + "context": { + "id": "3c2243ff5f30447eb12e7348cfd5b8ff", + "user_id": None, + }, + }, + "last_seen": now, + } + ], + } - for cluster_id in ep.get(SIG_EP_INPUT, []): - endpoint.add_input_cluster(cluster_id) - - for cluster_id in ep.get(SIG_EP_OUTPUT, []): - endpoint.add_output_cluster(cluster_id) - - device.status = zigpy.device.Status.ENDPOINTS_INIT - - if quirk: - device = quirk(zigpy_app_controller, device.ieee, device.nwk, device) - else: - # Allow zigpy to apply quirks if we don't pass one explicitly - device = zigpy.quirks.get_device(device) - - if patch_cluster: - for endpoint in (ep for epid, ep in device.endpoints.items() if epid): - endpoint.request = AsyncMock(return_value=[0]) - for cluster in itertools.chain( - endpoint.in_clusters.values(), endpoint.out_clusters.values() - ): - common_patch_cluster(cluster) - - if attributes is not None: - for ep_id, clusters in attributes.items(): - for cluster_name, attrs in clusters.items(): - cluster = getattr(device.endpoints[ep_id], cluster_name) - - for name, value in attrs.items(): - attr_id = cluster.find_attribute(name).id - cluster._attr_cache[attr_id] = value - - return device - - return _mock_dev + return _storage diff --git a/tests/components/zha/data.py b/tests/components/zha/data.py index 80a3df524cd..eb135c7e8fe 100644 --- a/tests/components/zha/data.py +++ b/tests/components/zha/data.py @@ -23,6 +23,12 @@ BASE_CUSTOM_CONFIGURATION = { "required": True, "default": True, }, + { + "type": "boolean", + "name": "always_prefer_xy_color_mode", + "required": True, + "default": True, + }, { "type": "boolean", "name": "group_members_assume_state", @@ -49,12 +55,6 @@ BASE_CUSTOM_CONFIGURATION = { "optional": True, "default": 21600, }, - { - "default": True, - "name": "enable_mains_startup_polling", - "required": True, - "type": "boolean", - }, ] }, "data": { @@ -62,9 +62,9 @@ BASE_CUSTOM_CONFIGURATION = { "enhanced_light_transition": True, "default_light_transition": 0, "light_transitioning_flag": True, + "always_prefer_xy_color_mode": True, "group_members_assume_state": False, "enable_identify_on_join": True, - "enable_mains_startup_polling": True, "consider_unavailable_mains": 7200, "consider_unavailable_battery": 21600, } @@ -94,6 +94,12 @@ CONFIG_WITH_ALARM_OPTIONS = { "required": True, "default": True, }, + { + "type": "boolean", + "name": "always_prefer_xy_color_mode", + "required": True, + "default": True, + }, { "type": "boolean", "name": "group_members_assume_state", @@ -120,12 +126,6 @@ CONFIG_WITH_ALARM_OPTIONS = { "optional": True, "default": 21600, }, - { - "default": True, - "name": "enable_mains_startup_polling", - "required": True, - "type": "boolean", - }, ], "zha_alarm_options": [ { @@ -154,9 +154,9 @@ CONFIG_WITH_ALARM_OPTIONS = { "enhanced_light_transition": True, "default_light_transition": 0, "light_transitioning_flag": True, + "always_prefer_xy_color_mode": True, "group_members_assume_state": False, "enable_identify_on_join": True, - "enable_mains_startup_polling": True, "consider_unavailable_mains": 7200, "consider_unavailable_battery": 21600, }, diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr deleted file mode 100644 index f46a06e84b8..00000000000 --- a/tests/components/zha/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,294 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics_for_config_entry - dict({ - 'application_state': dict({ - 'broadcast_counters': dict({ - }), - 'counters': dict({ - 'ezsp_counters': dict({ - 'counter_1': dict({ - '__type': "", - 'repr': "Counter(name='counter_1', _raw_value=1, reset_count=0, _last_reset_value=0)", - }), - 'counter_2': dict({ - '__type': "", - 'repr': "Counter(name='counter_2', _raw_value=1, reset_count=0, _last_reset_value=0)", - }), - 'counter_3': dict({ - '__type': "", - 'repr': "Counter(name='counter_3', _raw_value=1, reset_count=0, _last_reset_value=0)", - }), - }), - }), - 'device_counters': dict({ - }), - 'group_counters': dict({ - }), - 'network_info': dict({ - 'channel': 15, - 'channel_mask': 0, - 'children': list([ - ]), - 'extended_pan_id': '**REDACTED**', - 'key_table': list([ - ]), - 'metadata': dict({ - }), - 'network_key': '**REDACTED**', - 'nwk_addresses': dict({ - }), - 'nwk_manager_id': 0, - 'nwk_update_id': 0, - 'pan_id': 4660, - 'security_level': 0, - 'source': None, - 'stack_specific': dict({ - }), - 'tc_link_key': dict({ - 'key': list([ - 90, - 105, - 103, - 66, - 101, - 101, - 65, - 108, - 108, - 105, - 97, - 110, - 99, - 101, - 48, - 57, - ]), - 'partner_ieee': '**REDACTED**', - 'rx_counter': 0, - 'seq': 0, - 'tx_counter': 0, - }), - }), - 'node_info': dict({ - 'ieee': '**REDACTED**', - 'logical_type': 2, - 'manufacturer': 'Coordinator Manufacturer', - 'model': 'Coordinator Model', - 'nwk': 0, - 'version': None, - }), - }), - 'config': dict({ - 'device_config': dict({ - }), - 'enable_quirks': False, - }), - 'config_entry': dict({ - 'data': dict({ - 'device': dict({ - 'baudrate': 115200, - 'flow_control': 'hardware', - 'path': '/dev/ttyUSB0', - }), - 'radio_type': 'ezsp', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'zha', - 'minor_version': 1, - 'options': dict({ - 'custom_configuration': dict({ - 'zha_alarm_options': dict({ - 'alarm_arm_requires_code': False, - 'alarm_failed_tries': 2, - 'alarm_master_code': '**REDACTED**', - }), - 'zha_options': dict({ - 'enhanced_light_transition': True, - 'group_members_assume_state': False, - }), - }), - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 4, - }), - 'devices': list([ - dict({ - 'logical_type': 'Coordinator', - 'manufacturer': 'Coordinator Manufacturer', - 'model': 'Coordinator Model', - }), - dict({ - 'logical_type': 'EndDevice', - 'manufacturer': 'FakeManufacturer', - 'model': 'FakeModel', - }), - ]), - 'energy_scan': dict({ - '11': 4.313725490196078, - '12': 4.705882352941177, - '13': 5.098039215686274, - '14': 5.490196078431373, - '15': 5.882352941176471, - '16': 6.2745098039215685, - '17': 6.666666666666667, - '18': 7.0588235294117645, - '19': 7.450980392156863, - '20': 7.8431372549019605, - '21': 8.235294117647058, - '22': 8.627450980392156, - '23': 9.019607843137255, - '24': 9.411764705882353, - '25': 9.803921568627452, - '26': 10.196078431372548, - }), - }) -# --- -# name: test_diagnostics_for_device - dict({ - 'active_coordinator': False, - 'area_id': None, - 'available': True, - 'cluster_details': dict({ - '1': dict({ - 'device_type': dict({ - 'id': 1025, - 'name': 'IAS_ANCILLARY_CONTROL', - }), - 'in_clusters': dict({ - '0x0500': dict({ - 'attributes': dict({ - '0x0000': dict({ - 'attribute': "ZCLAttributeDef(id=0x0000, name='zone_state', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", - 'value': None, - }), - '0x0001': dict({ - 'attribute': "ZCLAttributeDef(id=0x0001, name='zone_type', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", - 'value': None, - }), - '0x0002': dict({ - 'attribute': "ZCLAttributeDef(id=0x0002, name='zone_status', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", - 'value': None, - }), - '0x0010': dict({ - 'attribute': "ZCLAttributeDef(id=0x0010, name='cie_addr', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", - 'value': list([ - 50, - 79, - 50, - 2, - 0, - 141, - 21, - 0, - ]), - }), - '0x0011': dict({ - 'attribute': "ZCLAttributeDef(id=0x0011, name='zone_id', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", - 'value': None, - }), - '0x0012': dict({ - 'attribute': "ZCLAttributeDef(id=0x0012, name='num_zone_sensitivity_levels_supported', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", - 'value': None, - }), - '0x0013': dict({ - 'attribute': "ZCLAttributeDef(id=0x0013, name='current_zone_sensitivity_level', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", - 'value': None, - }), - }), - 'endpoint_attribute': 'ias_zone', - 'unsupported_attributes': list([ - 18, - 'current_zone_sensitivity_level', - ]), - }), - '0x0501': dict({ - 'attributes': dict({ - '0xfffd': dict({ - 'attribute': "ZCLAttributeDef(id=0xFFFD, name='cluster_revision', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", - 'value': None, - }), - '0xfffe': dict({ - 'attribute': "ZCLAttributeDef(id=0xFFFE, name='reporting_status', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", - 'value': None, - }), - }), - 'endpoint_attribute': 'ias_ace', - 'unsupported_attributes': list([ - 4096, - 'unknown_attribute_name', - ]), - }), - }), - 'out_clusters': dict({ - }), - 'profile_id': 260, - }), - }), - 'device_type': 'EndDevice', - 'endpoint_names': list([ - dict({ - 'name': 'IAS_ANCILLARY_CONTROL', - }), - ]), - 'entities': list([ - dict({ - 'entity_id': 'alarm_control_panel.fakemanufacturer_fakemodel_alarm_control_panel', - 'name': 'FakeManufacturer FakeModel', - }), - ]), - 'ieee': '**REDACTED**', - 'lqi': None, - 'manufacturer': 'FakeManufacturer', - 'manufacturer_code': 4098, - 'model': 'FakeModel', - 'name': 'FakeManufacturer FakeModel', - 'neighbors': list([ - ]), - 'nwk': 47004, - 'power_source': 'Mains', - 'quirk_applied': False, - 'quirk_class': 'zigpy.device.Device', - 'quirk_id': None, - 'routes': list([ - ]), - 'rssi': None, - 'signature': dict({ - 'endpoints': dict({ - '1': dict({ - 'device_type': '0x0401', - 'input_clusters': list([ - '0x0500', - '0x0501', - ]), - 'output_clusters': list([ - ]), - 'profile_id': '0x0104', - }), - }), - 'manufacturer': 'FakeManufacturer', - 'model': 'FakeModel', - 'node_descriptor': dict({ - 'aps_flags': 0, - 'complex_descriptor_available': 0, - 'descriptor_capability_field': 0, - 'frequency_band': 8, - 'logical_type': 2, - 'mac_capability_flags': 140, - 'manufacturer_code': 4098, - 'maximum_buffer_size': 82, - 'maximum_incoming_transfer_size': 82, - 'maximum_outgoing_transfer_size': 82, - 'reserved': 0, - 'server_mask': 0, - 'user_descriptor_available': 0, - }), - }), - 'user_given_name': None, - }) -# --- diff --git a/tests/components/zha/test_alarm_control_panel.py b/tests/components/zha/test_alarm_control_panel.py index 609438cd725..8d3bd76ef61 100644 --- a/tests/components/zha/test_alarm_control_panel.py +++ b/tests/components/zha/test_alarm_control_panel.py @@ -4,24 +4,23 @@ from unittest.mock import AsyncMock, call, patch, sentinel import pytest from zigpy.profiles import zha -from zigpy.zcl import Cluster from zigpy.zcl.clusters import security import zigpy.zcl.foundation as zcl_f -from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_DOMAIN, - AlarmControlPanelState, +from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, + STATE_UNAVAILABLE, + Platform, ) -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, -) -from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from .common import find_entity_id +from .common import async_enable_traffic, find_entity_id from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @@ -40,41 +39,45 @@ def alarm_control_panel_platform_only(): yield +@pytest.fixture +def zigpy_device(zigpy_device_mock): + """Device tracker zigpy device.""" + endpoints = { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + return zigpy_device_mock( + endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00" + ) + + @patch( "zigpy.zcl.clusters.security.IasAce.client_command", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), ) async def test_alarm_control_panel( - hass: HomeAssistant, setup_zha, zigpy_device_mock + hass: HomeAssistant, zha_device_joined_restored, zigpy_device ) -> None: """Test ZHA alarm control panel platform.""" - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [security.IasAce.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", - ) - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.ALARM_CONTROL_PANEL, zha_device_proxy, hass) - cluster = zigpy_device.endpoints[1].ias_ace + zha_device = await zha_device_joined_restored(zigpy_device) + cluster = zigpy_device.endpoints.get(1).ias_ace + entity_id = find_entity_id(Platform.ALARM_CONTROL_PANEL, zha_device, hass) assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the panel was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED # arm_away from HA cluster.client_command.reset_mock() @@ -85,7 +88,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( @@ -108,7 +111,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY cluster.client_command.reset_mock() await hass.services.async_call( ALARM_DOMAIN, @@ -123,7 +126,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED assert cluster.client_command.call_count == 4 assert cluster.client_command.await_count == 4 assert cluster.client_command.call_args == call( @@ -146,7 +149,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( @@ -166,7 +169,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_NIGHT + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( @@ -185,7 +188,7 @@ async def test_alarm_control_panel( "cluster_command", 1, 0, [security.IasAce.ArmMode.Arm_All_Zones, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -195,7 +198,7 @@ async def test_alarm_control_panel( "cluster_command", 1, 0, [security.IasAce.ArmMode.Arm_Day_Home_Only, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -205,33 +208,33 @@ async def test_alarm_control_panel( "cluster_command", 1, 0, [security.IasAce.ArmMode.Arm_Night_Sleep_Only, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_NIGHT + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT # disarm from panel with bad code cluster.listener_event( "cluster_command", 1, 0, [security.IasAce.ArmMode.Disarm, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_NIGHT + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT # disarm from panel with bad code for 2nd time trips alarm cluster.listener_event( "cluster_command", 1, 0, [security.IasAce.ArmMode.Disarm, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED # disarm from panel with good code cluster.listener_event( "cluster_command", 1, 0, [security.IasAce.ArmMode.Disarm, "4321", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED # panic from panel cluster.listener_event("cluster_command", 1, 4, []) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -239,7 +242,7 @@ async def test_alarm_control_panel( # fire from panel cluster.listener_event("cluster_command", 1, 3, []) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -247,35 +250,13 @@ async def test_alarm_control_panel( # emergency from panel cluster.listener_event("cluster_command", 1, 2, []) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED # reset the panel await reset_alarm_panel(hass, cluster, entity_id) - await hass.services.async_call( - ALARM_DOMAIN, - "alarm_trigger", - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED - assert cluster.client_command.call_count == 1 - assert cluster.client_command.await_count == 1 - assert cluster.client_command.call_args == call( - 4, - security.IasAce.PanelStatus.In_Alarm, - 0, - security.IasAce.AudibleNotification.Default_Sound, - security.IasAce.AlarmStatus.Emergency_Panic, - ) - # reset the panel - await reset_alarm_panel(hass, cluster, entity_id) - cluster.client_command.reset_mock() - - -async def reset_alarm_panel(hass: HomeAssistant, cluster: Cluster, entity_id: str): +async def reset_alarm_panel(hass, cluster, entity_id): """Reset the state of the alarm panel.""" cluster.client_command.reset_mock() await hass.services.async_call( @@ -285,7 +266,7 @@ async def reset_alarm_panel(hass: HomeAssistant, cluster: Cluster, entity_id: st blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( diff --git a/tests/components/zha/test_api.py b/tests/components/zha/test_api.py index 7aff6d81f5d..ed3394aafba 100644 --- a/tests/components/zha/test_api.py +++ b/tests/components/zha/test_api.py @@ -6,12 +6,12 @@ from typing import TYPE_CHECKING from unittest.mock import AsyncMock, MagicMock, call, patch import pytest -from zha.application.const import RadioType import zigpy.backups import zigpy.state from homeassistant.components.zha import api -from homeassistant.components.zha.helpers import get_zha_gateway_proxy +from homeassistant.components.zha.core.const import RadioType +from homeassistant.components.zha.core.helpers import get_zha_gateway from homeassistant.core import HomeAssistant if TYPE_CHECKING: @@ -41,7 +41,7 @@ async def test_async_get_network_settings_inactive( """Test reading settings with an inactive ZHA installation.""" await setup_zha() - gateway = get_zha_gateway_proxy(hass) + gateway = get_zha_gateway(hass) await hass.config_entries.async_unload(gateway.config_entry.entry_id) backup = zigpy.backups.NetworkBackup() @@ -53,7 +53,7 @@ async def test_async_get_network_settings_inactive( controller.new = AsyncMock(return_value=zigpy_app_controller) with patch.dict( - "homeassistant.components.zha.api.RadioType._member_map_", + "homeassistant.components.zha.core.const.RadioType._member_map_", ezsp=MagicMock(controller=controller, description="EZSP"), ): settings = await api.async_get_network_settings(hass) @@ -68,7 +68,7 @@ async def test_async_get_network_settings_missing( """Test reading settings with an inactive ZHA installation, no valid channel.""" await setup_zha() - gateway = get_zha_gateway_proxy(hass) + gateway = get_zha_gateway(hass) await hass.config_entries.async_unload(gateway.config_entry.entry_id) # Network settings were never loaded for whatever reason diff --git a/tests/components/zha/test_base.py b/tests/components/zha/test_base.py new file mode 100644 index 00000000000..203df2ffda5 --- /dev/null +++ b/tests/components/zha/test_base.py @@ -0,0 +1,19 @@ +"""Test ZHA base cluster handlers module.""" + +from homeassistant.components.zha.core.cluster_handlers import parse_and_log_command + +from .test_cluster_handlers import ( # noqa: F401 + endpoint, + poll_control_ch, + zigpy_coordinator_device, +) + + +def test_parse_and_log_command(poll_control_ch) -> None: # noqa: F811 + """Test that `parse_and_log_command` correctly parses a known command.""" + assert parse_and_log_command(poll_control_ch, 0x00, 0x01, []) == "fast_poll_stop" + + +def test_parse_and_log_command_unknown(poll_control_ch) -> None: # noqa: F811 + """Test that `parse_and_log_command` correctly parses an unknown command.""" + assert parse_and_log_command(poll_control_ch, 0x00, 0xAB, []) == "0xAB" diff --git a/tests/components/zha/test_binary_sensor.py b/tests/components/zha/test_binary_sensor.py index a9765a1b547..8276223926d 100644 --- a/tests/components/zha/test_binary_sensor.py +++ b/tests/components/zha/test_binary_sensor.py @@ -1,26 +1,54 @@ """Test ZHA binary sensor.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest -from zigpy.profiles import zha -from zigpy.zcl.clusters import general +import zigpy.profiles.zha +from zigpy.zcl.clusters import general, measurement, security -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, -) -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .common import find_entity_id, send_attributes_report +from .common import ( + async_enable_traffic, + async_test_rejoin, + find_entity_id, + send_attributes_report, +) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -ON = 1 -OFF = 0 +from tests.common import async_mock_load_restore_state_from_storage + +DEVICE_IAS = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.IAS_ZONE, + SIG_EP_INPUT: [security.IasZone.cluster_id], + SIG_EP_OUTPUT: [], + } +} + + +DEVICE_OCCUPANCY = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.OCCUPANCY_SENSOR, + SIG_EP_INPUT: [measurement.OccupancySensing.cluster_id], + SIG_EP_OUTPUT: [], + } +} + + +DEVICE_ONOFF = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SENSOR, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + } +} @pytest.fixture(autouse=True) @@ -30,69 +58,121 @@ def binary_sensor_platform_only(): "homeassistant.components.zha.PLATFORMS", ( Platform.BINARY_SENSOR, - Platform.SENSOR, + Platform.DEVICE_TRACKER, + Platform.NUMBER, + Platform.SELECT, ), ): yield -async def test_binary_sensor( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - setup_zha, - zigpy_device_mock, -) -> None: - """Test ZHA binary_sensor platform.""" - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SENSOR, - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], - } - }, - ieee="01:2d:6f:00:0a:90:69:e8", - ) - cluster = zigpy_device.endpoints[1].out_clusters[general.OnOff.cluster_id] - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device_proxy, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_OFF - - await send_attributes_report( - hass, cluster, {general.OnOff.AttributeDefs.on_off.id: ON} - ) +async def async_test_binary_sensor_on_off(hass, cluster, entity_id): + """Test getting on and off messages for binary sensors.""" + # binary sensor on + await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2}) assert hass.states.get(entity_id).state == STATE_ON - await send_attributes_report( - hass, cluster, {general.OnOff.AttributeDefs.on_off.id: OFF} - ) + # binary sensor off + await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) assert hass.states.get(entity_id).state == STATE_OFF - # test enable / disable sync w/ ZHA library - entity_entry = entity_registry.async_get(entity_id) - entity_key = (Platform.BINARY_SENSOR, entity_entry.unique_id) - assert zha_device_proxy.device.platform_entities.get(entity_key).enabled - entity_registry.async_update_entity( - entity_id=entity_id, disabled_by=er.RegistryEntryDisabler.USER - ) +async def async_test_iaszone_on_off(hass, cluster, entity_id): + """Test getting on and off messages for iaszone binary sensors.""" + # binary sensor on + cluster.listener_event("cluster_command", 1, 0, [1]) await hass.async_block_till_done() + assert hass.states.get(entity_id).state == STATE_ON - assert not zha_device_proxy.device.platform_entities.get(entity_key).enabled - - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + # binary sensor off + cluster.listener_event("cluster_command", 1, 0, [0]) await hass.async_block_till_done() + assert hass.states.get(entity_id).state == STATE_OFF - assert zha_device_proxy.device.platform_entities.get(entity_key).enabled + # check that binary sensor remains off when non-alarm bits change + cluster.listener_event("cluster_command", 1, 0, [0b1111111100]) + await hass.async_block_till_done() + assert hass.states.get(entity_id).state == STATE_OFF + + +@pytest.mark.parametrize( + ("device", "on_off_test", "cluster_name", "reporting", "name"), + [ + ( + DEVICE_IAS, + async_test_iaszone_on_off, + "ias_zone", + (0,), + "FakeManufacturer FakeModel IAS zone", + ), + ( + DEVICE_OCCUPANCY, + async_test_binary_sensor_on_off, + "occupancy", + (1,), + "FakeManufacturer FakeModel Occupancy", + ), + ], +) +async def test_binary_sensor( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined_restored, + device, + on_off_test, + cluster_name, + reporting, + name, +) -> None: + """Test ZHA binary_sensor platform.""" + zigpy_device = zigpy_device_mock(device) + zha_device = await zha_device_joined_restored(zigpy_device) + entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device, hass) + assert entity_id is not None + + assert hass.states.get(entity_id).name == name + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the sensors exist and are in the unavailable state + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + await async_enable_traffic(hass, [zha_device]) + + # test that the sensors exist and are in the off state + assert hass.states.get(entity_id).state == STATE_OFF + + # test getting messages that trigger and reset the sensors + cluster = getattr(zigpy_device.endpoints[1], cluster_name) + await on_off_test(hass, cluster, entity_id) + + # test rejoin + await async_test_rejoin(hass, zigpy_device, [cluster], reporting) + assert hass.states.get(entity_id).state == STATE_OFF + + +@pytest.mark.parametrize( + "restored_state", + [ + STATE_ON, + STATE_OFF, + ], +) +async def test_onoff_binary_sensor_restore_state( + hass: HomeAssistant, + zigpy_device_mock, + core_rs: Callable[[str, Any, dict[str, Any]], None], + zha_device_restored, + restored_state: str, +) -> None: + """Test ZHA OnOff binary_sensor restores last state from HA.""" + + entity_id = "binary_sensor.fakemanufacturer_fakemodel_opening" + core_rs(entity_id, state=restored_state, attributes={}) + await async_mock_load_restore_state_from_storage(hass) + + zigpy_device = zigpy_device_mock(DEVICE_ONOFF) + zha_device = await zha_device_restored(zigpy_device) + entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device, hass) + + assert entity_id is not None + assert hass.states.get(entity_id).state == restored_state diff --git a/tests/components/zha/test_button.py b/tests/components/zha/test_button.py index 33ed004312b..fdcc0d7271c 100644 --- a/tests/components/zha/test_button.py +++ b/tests/components/zha/test_button.py @@ -1,25 +1,29 @@ """Test ZHA button.""" -from unittest.mock import patch +from typing import Final +from unittest.mock import call, patch from freezegun import freeze_time import pytest -from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from zhaquirks.const import ( + DEVICE_TYPE, + ENDPOINTS, + INPUT_CLUSTERS, + OUTPUT_CLUSTERS, + PROFILE_ID, +) +from zhaquirks.tuya.ts0601_valve import ParksideTuyaValveManufCluster +from zigpy.const import SIG_EP_PROFILE +from zigpy.exceptions import ZigbeeException from zigpy.profiles import zha -from zigpy.zcl.clusters import general +from zigpy.quirks import CustomCluster, CustomDevice +from zigpy.quirks.v2 import add_to_registry_v2 +import zigpy.types as t +from zigpy.zcl.clusters import general, security +from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster import zigpy.zcl.foundation as zcl_f -from homeassistant.components.button import ( - DOMAIN as BUTTON_DOMAIN, - SERVICE_PRESS, - ButtonDeviceClass, -) -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, -) +from homeassistant.components.button import DOMAIN, SERVICE_PRESS, ButtonDeviceClass from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -28,9 +32,11 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from .common import find_entity_id +from .common import find_entity_id, update_attribute_cache +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE @pytest.fixture(autouse=True) @@ -38,53 +44,106 @@ def button_platform_only(): """Only set up the button and required base platforms to speed up tests.""" with patch( "homeassistant.components.zha.PLATFORMS", - (Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SENSOR), + ( + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.DEVICE_TRACKER, + Platform.NUMBER, + Platform.SELECT, + Platform.SENSOR, + Platform.SWITCH, + ), ): yield @pytest.fixture -async def setup_zha_integration(hass: HomeAssistant, setup_zha): - """Set up ZHA component.""" - - # if we call this in the test itself the test hangs forever - await setup_zha() - - -@freeze_time("2021-11-04 17:37:00", tz_offset=-1) -async def test_button( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - setup_zha_integration, # pylint: disable=unused-argument - zigpy_device_mock, -) -> None: - """Test ZHA button platform.""" - - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) +async def contact_sensor( + hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored +): + """Contact sensor fixture.""" zigpy_device = zigpy_device_mock( { 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SENSOR, SIG_EP_INPUT: [ general.Basic.cluster_id, general.Identify.cluster_id, + security.IasZone.cluster_id, ], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.IAS_ZONE, + SIG_EP_PROFILE: zha.PROFILE_ID, } }, - ieee="01:2d:6f:00:0a:90:69:e8", ) - cluster = zigpy_device.endpoints[1].identify - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) + zha_device = await zha_device_joined_restored(zigpy_device) + return zha_device, zigpy_device.endpoints[1].identify - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.BUTTON, zha_device_proxy, hass) + +class FrostLockQuirk(CustomDevice): + """Quirk with frost lock attribute.""" + + class TuyaManufCluster(CustomCluster, ManufacturerSpecificCluster): + """Tuya manufacturer specific cluster.""" + + cluster_id = 0xEF00 + ep_attribute = "tuya_manufacturer" + + attributes = {0xEF01: ("frost_lock_reset", t.Bool)} + + replacement = { + ENDPOINTS: { + 1: { + PROFILE_ID: zha.PROFILE_ID, + DEVICE_TYPE: zha.DeviceType.ON_OFF_SWITCH, + INPUT_CLUSTERS: [general.Basic.cluster_id, TuyaManufCluster], + OUTPUT_CLUSTERS: [], + }, + } + } + + +@pytest.fixture +async def tuya_water_valve( + hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored +): + """Tuya Water Valve fixture.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + PROFILE_ID: zha.PROFILE_ID, + DEVICE_TYPE: zha.DeviceType.ON_OFF_SWITCH, + INPUT_CLUSTERS: [ + general.Basic.cluster_id, + general.Identify.cluster_id, + general.Groups.cluster_id, + general.Scenes.cluster_id, + general.OnOff.cluster_id, + ParksideTuyaValveManufCluster.cluster_id, + ], + OUTPUT_CLUSTERS: [general.Time.cluster_id, general.Ota.cluster_id], + }, + }, + manufacturer="_TZE200_htnnfasr", + model="TS0601", + ) + + zha_device = await zha_device_joined_restored(zigpy_device) + return zha_device, zigpy_device.endpoints[1].tuya_manufacturer + + +@freeze_time("2021-11-04 17:37:00", tz_offset=-1) +async def test_button( + hass: HomeAssistant, entity_registry: er.EntityRegistry, contact_sensor +) -> None: + """Test ZHA button platform.""" + + zha_device, cluster = contact_sensor + assert cluster is not None + entity_id = find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None state = hass.states.get(entity_id) @@ -101,7 +160,7 @@ async def test_button( return_value=[0x00, zcl_f.Status.SUCCESS], ): await hass.services.async_call( - BUTTON_DOMAIN, + DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -116,3 +175,198 @@ async def test_button( assert state assert state.state == "2021-11-04T16:37:00+00:00" assert state.attributes[ATTR_DEVICE_CLASS] == ButtonDeviceClass.IDENTIFY + + +async def test_frost_unlock( + hass: HomeAssistant, entity_registry: er.EntityRegistry, tuya_water_valve +) -> None: + """Test custom frost unlock ZHA button.""" + + zha_device, cluster = tuya_water_valve + assert cluster is not None + entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="frost_lock_reset") + assert entity_id is not None + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNKNOWN + assert state.attributes[ATTR_DEVICE_CLASS] == ButtonDeviceClass.RESTART + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.entity_category == EntityCategory.CONFIG + + with patch( + "zigpy.zcl.Cluster.request", + return_value=[0x00, zcl_f.Status.SUCCESS], + ): + await hass.services.async_call( + DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await hass.async_block_till_done() + assert cluster.write_attributes.mock_calls == [ + call({"frost_lock_reset": 0}, manufacturer=None) + ] + + state = hass.states.get(entity_id) + assert state + assert state.attributes[ATTR_DEVICE_CLASS] == ButtonDeviceClass.RESTART + + cluster.write_attributes.reset_mock() + cluster.write_attributes.side_effect = ZigbeeException + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + # There are three retries + assert cluster.write_attributes.mock_calls == [ + call({"frost_lock_reset": 0}, manufacturer=None), + call({"frost_lock_reset": 0}, manufacturer=None), + call({"frost_lock_reset": 0}, manufacturer=None), + ] + + +class FakeManufacturerCluster(CustomCluster, ManufacturerSpecificCluster): + """Fake manufacturer cluster.""" + + cluster_id: Final = 0xFFF3 + ep_attribute: Final = "mfg_identify" + + class AttributeDefs(zcl_f.BaseAttributeDefs): + """Attribute definitions.""" + + feed: Final = zcl_f.ZCLAttributeDef( + id=0x0000, type=t.uint8_t, access="rw", is_manufacturer_specific=True + ) + + class ServerCommandDefs(zcl_f.BaseCommandDefs): + """Server command definitions.""" + + self_test: Final = zcl_f.ZCLCommandDef( + id=0x00, schema={"identify_time": t.uint16_t}, direction=False + ) + + +( + add_to_registry_v2("Fake_Model", "Fake_Manufacturer") + .replaces(FakeManufacturerCluster) + .command_button( + FakeManufacturerCluster.ServerCommandDefs.self_test.name, + FakeManufacturerCluster.cluster_id, + command_args=(5,), + ) + .write_attr_button( + FakeManufacturerCluster.AttributeDefs.feed.name, + 2, + FakeManufacturerCluster.cluster_id, + ) +) + + +@pytest.fixture +async def custom_button_device( + hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored +): + """Button device fixture for quirks button tests.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + FakeManufacturerCluster.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.REMOTE_CONTROL, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + manufacturer="Fake_Model", + model="Fake_Manufacturer", + ) + + zigpy_device.endpoints[1].mfg_identify.PLUGGED_ATTR_READS = { + FakeManufacturerCluster.AttributeDefs.feed.name: 0, + } + update_attribute_cache(zigpy_device.endpoints[1].mfg_identify) + zha_device = await zha_device_joined_restored(zigpy_device) + return zha_device, zigpy_device.endpoints[1].mfg_identify + + +@freeze_time("2021-11-04 17:37:00", tz_offset=-1) +async def test_quirks_command_button(hass: HomeAssistant, custom_button_device) -> None: + """Test ZHA button platform.""" + + zha_device, cluster = custom_button_device + assert cluster is not None + entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="self_test") + assert entity_id is not None + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNKNOWN + + with patch( + "zigpy.zcl.Cluster.request", + return_value=[0x00, zcl_f.Status.SUCCESS], + ): + await hass.services.async_call( + DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await hass.async_block_till_done() + assert len(cluster.request.mock_calls) == 1 + assert cluster.request.call_args[0][0] is False + assert cluster.request.call_args[0][1] == 0 + assert cluster.request.call_args[0][3] == 5 # duration in seconds + + state = hass.states.get(entity_id) + assert state + assert state.state == "2021-11-04T16:37:00+00:00" + + +@freeze_time("2021-11-04 17:37:00", tz_offset=-1) +async def test_quirks_write_attr_button( + hass: HomeAssistant, custom_button_device +) -> None: + """Test ZHA button platform.""" + + zha_device, cluster = custom_button_device + assert cluster is not None + entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="feed") + assert entity_id is not None + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNKNOWN + assert cluster.get(cluster.AttributeDefs.feed.name) == 0 + + with patch( + "zigpy.zcl.Cluster.request", + return_value=[0x00, zcl_f.Status.SUCCESS], + ): + await hass.services.async_call( + DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await hass.async_block_till_done() + assert cluster.write_attributes.mock_calls == [ + call({cluster.AttributeDefs.feed.name: 2}, manufacturer=None) + ] + + state = hass.states.get(entity_id) + assert state + assert state.state == "2021-11-04T16:37:00+00:00" + assert cluster.get(cluster.AttributeDefs.feed.name) == 2 diff --git a/tests/components/zha/test_climate.py b/tests/components/zha/test_climate.py index 7b94db51d04..32ef08fcd96 100644 --- a/tests/components/zha/test_climate.py +++ b/tests/components/zha/test_climate.py @@ -1,17 +1,17 @@ """Test ZHA climate.""" from typing import Literal -from unittest.mock import patch +from unittest.mock import call, patch import pytest -from zha.application.platforms.climate.const import HVAC_MODE_2_SYSTEM, SEQ_OF_OPERATION import zhaquirks.sinope.thermostat +from zhaquirks.sinope.thermostat import SinopeTechnologiesThermostatCluster import zhaquirks.tuya.ts0601_trv import zigpy.profiles -from zigpy.profiles import zha import zigpy.types import zigpy.zcl.clusters from zigpy.zcl.clusters.hvac import Thermostat +import zigpy.zcl.foundation as zcl_f from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, @@ -28,6 +28,10 @@ from homeassistant.components.climate import ( FAN_LOW, FAN_ON, PRESET_AWAY, + PRESET_BOOST, + PRESET_COMFORT, + PRESET_ECO, + PRESET_NONE, SERVICE_SET_FAN_MODE, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, @@ -35,11 +39,13 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.components.zha.helpers import ( - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, +from homeassistant.components.zha.climate import HVAC_MODE_2_SYSTEM, SEQ_OF_OPERATION +from homeassistant.components.zha.core.const import ( + PRESET_COMPLEX, + PRESET_SCHEDULE, + PRESET_TEMP_MANUAL, ) +from homeassistant.components.zha.core.device import ZHADevice from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, @@ -47,15 +53,15 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from .common import find_entity_id, send_attributes_report +from .common import async_enable_traffic, find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE CLIMATE = { 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.THERMOSTAT, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, SIG_EP_INPUT: [ zigpy.zcl.clusters.general.Basic.cluster_id, zigpy.zcl.clusters.general.Identify.cluster_id, @@ -68,8 +74,8 @@ CLIMATE = { CLIMATE_FAN = { 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.THERMOSTAT, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, SIG_EP_INPUT: [ zigpy.zcl.clusters.general.Basic.cluster_id, zigpy.zcl.clusters.general.Identify.cluster_id, @@ -102,7 +108,72 @@ CLIMATE_SINOPE = { }, } +CLIMATE_ZEN = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, + SIG_EP_INPUT: [ + zigpy.zcl.clusters.general.Basic.cluster_id, + zigpy.zcl.clusters.general.Identify.cluster_id, + zigpy.zcl.clusters.hvac.Fan.cluster_id, + zigpy.zcl.clusters.hvac.Thermostat.cluster_id, + zigpy.zcl.clusters.hvac.UserInterface.cluster_id, + ], + SIG_EP_OUTPUT: [zigpy.zcl.clusters.general.Ota.cluster_id], + } +} + +CLIMATE_MOES = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, + SIG_EP_INPUT: [ + zigpy.zcl.clusters.general.Basic.cluster_id, + zigpy.zcl.clusters.general.Identify.cluster_id, + zigpy.zcl.clusters.hvac.Thermostat.cluster_id, + zigpy.zcl.clusters.hvac.UserInterface.cluster_id, + 61148, + ], + SIG_EP_OUTPUT: [zigpy.zcl.clusters.general.Ota.cluster_id], + } +} + +CLIMATE_BECA = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SMART_PLUG, + SIG_EP_INPUT: [ + zigpy.zcl.clusters.general.Basic.cluster_id, + zigpy.zcl.clusters.general.Groups.cluster_id, + zigpy.zcl.clusters.general.Scenes.cluster_id, + 61148, + ], + SIG_EP_OUTPUT: [ + zigpy.zcl.clusters.general.Time.cluster_id, + zigpy.zcl.clusters.general.Ota.cluster_id, + ], + } +} + +CLIMATE_ZONNSMART = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, + SIG_EP_INPUT: [ + zigpy.zcl.clusters.general.Basic.cluster_id, + zigpy.zcl.clusters.hvac.Thermostat.cluster_id, + zigpy.zcl.clusters.hvac.UserInterface.cluster_id, + 61148, + ], + SIG_EP_OUTPUT: [zigpy.zcl.clusters.general.Ota.cluster_id], + } +} + MANUF_SINOPE = "Sinope Technologies" +MANUF_ZEN = "Zen Within" +MANUF_MOES = "_TZE200_ckud7u2l" +MANUF_BECA = "_TZE200_b6wax7g0" +MANUF_ZONNSMART = "_TZE200_hue3yfsn" ZCL_ATTR_PLUG = { "abs_min_heat_setpoint_limit": 800, @@ -147,22 +218,22 @@ def climate_platform_only(): @pytest.fixture -def device_climate_mock(hass: HomeAssistant, setup_zha, zigpy_device_mock): +def device_climate_mock(hass, zigpy_device_mock, zha_device_joined): """Test regular thermostat device.""" async def _dev(clusters, plug=None, manuf=None, quirk=None): - plugged_attrs = ZCL_ATTR_PLUG if plug is None else {**ZCL_ATTR_PLUG, **plug} + if plug is None: + plugged_attrs = ZCL_ATTR_PLUG + else: + plugged_attrs = {**ZCL_ATTR_PLUG, **plug} + zigpy_device = zigpy_device_mock(clusters, manufacturer=manuf, quirk=quirk) zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 zigpy_device.endpoints[1].thermostat.PLUGGED_ATTR_READS = plugged_attrs - - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - return gateway_proxy.get_device_proxy(zigpy_device.ieee) + zha_device = await zha_device_joined(zigpy_device) + await async_enable_traffic(hass, [zha_device]) + await hass.async_block_till_done() + return zha_device return _dev @@ -197,6 +268,44 @@ async def device_climate_sinope(device_climate_mock): ) +@pytest.fixture +async def device_climate_zen(device_climate_mock): + """Zen Within thermostat.""" + + return await device_climate_mock(CLIMATE_ZEN, manuf=MANUF_ZEN) + + +@pytest.fixture +async def device_climate_moes(device_climate_mock): + """MOES thermostat.""" + + return await device_climate_mock( + CLIMATE_MOES, manuf=MANUF_MOES, quirk=zhaquirks.tuya.ts0601_trv.MoesHY368_Type1 + ) + + +@pytest.fixture +async def device_climate_beca(device_climate_mock) -> ZHADevice: + """Beca thermostat.""" + + return await device_climate_mock( + CLIMATE_BECA, + manuf=MANUF_BECA, + quirk=zhaquirks.tuya.ts0601_trv.MoesHY368_Type1new, + ) + + +@pytest.fixture +async def device_climate_zonnsmart(device_climate_mock): + """ZONNSMART thermostat.""" + + return await device_climate_mock( + CLIMATE_ZONNSMART, + manuf=MANUF_ZONNSMART, + quirk=zhaquirks.tuya.ts0601_trv.ZonnsmartTV01_ZG, + ) + + def test_sequence_mappings() -> None: """Test correct mapping between control sequence -> HVAC Mode -> Sysmode.""" @@ -209,7 +318,7 @@ def test_sequence_mappings() -> None: async def test_climate_local_temperature(hass: HomeAssistant, device_climate) -> None: """Test local temperature.""" - thrm_cluster = device_climate.device.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -225,7 +334,7 @@ async def test_climate_hvac_action_running_state( ) -> None: """Test hvac action via running state.""" - thrm_cluster = device_climate_sinope.device.device.endpoints[1].thermostat + thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) sensor_entity_id = find_entity_id( Platform.SENSOR, device_climate_sinope, hass, "hvac" @@ -285,12 +394,101 @@ async def test_climate_hvac_action_running_state( assert hvac_sensor_state.state == HVACAction.FAN +async def test_climate_hvac_action_running_state_zen( + hass: HomeAssistant, device_climate_zen +) -> None: + """Test Zen hvac action via running state.""" + + thrm_cluster = device_climate_zen.device.endpoints[1].thermostat + entity_id = find_entity_id(Platform.CLIMATE, device_climate_zen, hass) + sensor_entity_id = find_entity_id( + Platform.SENSOR, device_climate_zen, hass, "hvac_action" + ) + + state = hass.states.get(entity_id) + assert ATTR_HVAC_ACTION not in state.attributes + hvac_sensor_state = hass.states.get(sensor_entity_id) + assert hvac_sensor_state.state == "unknown" + + await send_attributes_report( + hass, thrm_cluster, {0x0029: Thermostat.RunningState.Cool_2nd_Stage_On} + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING + hvac_sensor_state = hass.states.get(sensor_entity_id) + assert hvac_sensor_state.state == HVACAction.COOLING + + await send_attributes_report( + hass, thrm_cluster, {0x0029: Thermostat.RunningState.Fan_State_On} + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.FAN + hvac_sensor_state = hass.states.get(sensor_entity_id) + assert hvac_sensor_state.state == HVACAction.FAN + + await send_attributes_report( + hass, thrm_cluster, {0x0029: Thermostat.RunningState.Heat_2nd_Stage_On} + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + hvac_sensor_state = hass.states.get(sensor_entity_id) + assert hvac_sensor_state.state == HVACAction.HEATING + + await send_attributes_report( + hass, thrm_cluster, {0x0029: Thermostat.RunningState.Fan_2nd_Stage_On} + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.FAN + hvac_sensor_state = hass.states.get(sensor_entity_id) + assert hvac_sensor_state.state == HVACAction.FAN + + await send_attributes_report( + hass, thrm_cluster, {0x0029: Thermostat.RunningState.Cool_State_On} + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING + hvac_sensor_state = hass.states.get(sensor_entity_id) + assert hvac_sensor_state.state == HVACAction.COOLING + + await send_attributes_report( + hass, thrm_cluster, {0x0029: Thermostat.RunningState.Fan_3rd_Stage_On} + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.FAN + hvac_sensor_state = hass.states.get(sensor_entity_id) + assert hvac_sensor_state.state == HVACAction.FAN + + await send_attributes_report( + hass, thrm_cluster, {0x0029: Thermostat.RunningState.Heat_State_On} + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + hvac_sensor_state = hass.states.get(sensor_entity_id) + assert hvac_sensor_state.state == HVACAction.HEATING + + await send_attributes_report( + hass, thrm_cluster, {0x0029: Thermostat.RunningState.Idle} + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.OFF + hvac_sensor_state = hass.states.get(sensor_entity_id) + assert hvac_sensor_state.state == HVACAction.OFF + + await send_attributes_report( + hass, thrm_cluster, {0x001C: Thermostat.SystemMode.Heat} + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + hvac_sensor_state = hass.states.get(sensor_entity_id) + assert hvac_sensor_state.state == HVACAction.IDLE + + async def test_climate_hvac_action_pi_demand( hass: HomeAssistant, device_climate ) -> None: """Test hvac action based on pi_heating/cooling_demand attrs.""" - thrm_cluster = device_climate.device.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -339,7 +537,7 @@ async def test_hvac_mode( ) -> None: """Test HVAC mode.""" - thrm_cluster = device_climate.device.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -516,7 +714,7 @@ async def test_set_hvac_mode( ) -> None: """Test setting hvac mode.""" - thrm_cluster = device_climate.device.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -555,11 +753,134 @@ async def test_set_hvac_mode( } +async def test_preset_setting(hass: HomeAssistant, device_climate_sinope) -> None: + """Test preset setting.""" + + entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) + thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + + # unsuccessful occupancy change + thrm_cluster.write_attributes.return_value = [ + zcl_f.WriteAttributesResponse( + [ + zcl_f.WriteAttributesStatusRecord( + status=zcl_f.Status.FAILURE, + attrid=SinopeTechnologiesThermostatCluster.AttributeDefs.set_occupancy.id, + ) + ] + ) + ] + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, + blocking=True, + ) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + assert thrm_cluster.write_attributes.call_count == 1 + assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 0} + + # successful occupancy change + thrm_cluster.write_attributes.reset_mock() + thrm_cluster.write_attributes.return_value = [ + zcl_f.WriteAttributesResponse( + [zcl_f.WriteAttributesStatusRecord(status=zcl_f.Status.SUCCESS)] + ) + ] + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, + blocking=True, + ) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY + assert thrm_cluster.write_attributes.call_count == 1 + assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 0} + + # unsuccessful occupancy change + thrm_cluster.write_attributes.reset_mock() + thrm_cluster.write_attributes.return_value = [ + zcl_f.WriteAttributesResponse( + [ + zcl_f.WriteAttributesStatusRecord( + status=zcl_f.Status.FAILURE, + attrid=SinopeTechnologiesThermostatCluster.AttributeDefs.set_occupancy.id, + ) + ] + ) + ] + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, + blocking=True, + ) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY + assert thrm_cluster.write_attributes.call_count == 1 + assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 1} + + # successful occupancy change + thrm_cluster.write_attributes.reset_mock() + thrm_cluster.write_attributes.return_value = [ + zcl_f.WriteAttributesResponse( + [zcl_f.WriteAttributesStatusRecord(status=zcl_f.Status.SUCCESS)] + ) + ] + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, + blocking=True, + ) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + assert thrm_cluster.write_attributes.call_count == 1 + assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 1} + + +async def test_preset_setting_invalid( + hass: HomeAssistant, device_climate_sinope +) -> None: + """Test invalid preset setting.""" + + entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) + thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "invalid_preset"}, + blocking=True, + ) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + assert thrm_cluster.write_attributes.call_count == 0 + + async def test_set_temperature_hvac_mode(hass: HomeAssistant, device_climate) -> None: """Test setting HVAC mode in temperature service call.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.OFF @@ -601,7 +922,7 @@ async def test_set_temperature_heat_cool( quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT_COOL @@ -687,7 +1008,7 @@ async def test_set_temperature_heat(hass: HomeAssistant, device_climate_mock) -> quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT @@ -766,7 +1087,7 @@ async def test_set_temperature_cool(hass: HomeAssistant, device_climate_mock) -> quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.COOL @@ -851,7 +1172,7 @@ async def test_set_temperature_wrong_mode( manuf=MANUF_SINOPE, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.DRY @@ -870,11 +1191,38 @@ async def test_set_temperature_wrong_mode( assert thrm_cluster.write_attributes.await_count == 0 +async def test_occupancy_reset(hass: HomeAssistant, device_climate_sinope) -> None: + """Test away preset reset.""" + + entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) + thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, + blocking=True, + ) + thrm_cluster.write_attributes.reset_mock() + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY + + await send_attributes_report( + hass, thrm_cluster, {"occupied_heating_setpoint": zigpy.types.uint16_t(1950)} + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + + async def test_fan_mode(hass: HomeAssistant, device_climate_fan) -> None: """Test fan mode.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass) - thrm_cluster = device_climate_fan.device.device.endpoints[1].thermostat + thrm_cluster = device_climate_fan.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert set(state.attributes[ATTR_FAN_MODES]) == {FAN_AUTO, FAN_ON} @@ -905,7 +1253,7 @@ async def test_set_fan_mode_not_supported( """Test fan setting unsupported mode.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass) - fan_cluster = device_climate_fan.device.device.endpoints[1].fan + fan_cluster = device_climate_fan.device.endpoints[1].fan with pytest.raises(ServiceValidationError): await hass.services.async_call( @@ -921,7 +1269,7 @@ async def test_set_fan_mode(hass: HomeAssistant, device_climate_fan) -> None: """Test fan mode setting.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass) - fan_cluster = device_climate_fan.device.device.endpoints[1].fan + fan_cluster = device_climate_fan.device.endpoints[1].fan state = hass.states.get(entity_id) assert state.attributes[ATTR_FAN_MODE] == FAN_AUTO @@ -944,3 +1292,309 @@ async def test_set_fan_mode(hass: HomeAssistant, device_climate_fan) -> None: ) assert fan_cluster.write_attributes.await_count == 1 assert fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 5} + + +async def test_set_moes_preset(hass: HomeAssistant, device_climate_moes) -> None: + """Test setting preset for moes trv.""" + + entity_id = find_entity_id(Platform.CLIMATE, device_climate_moes, hass) + thrm_cluster = device_climate_moes.device.endpoints[1].thermostat + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 1 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 0 + } + + thrm_cluster.write_attributes.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_SCHEDULE}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 2 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 2 + } + assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { + "operation_preset": 1 + } + + thrm_cluster.write_attributes.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_COMFORT}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 2 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 2 + } + assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { + "operation_preset": 3 + } + + thrm_cluster.write_attributes.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_ECO}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 2 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 2 + } + assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { + "operation_preset": 4 + } + + thrm_cluster.write_attributes.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_BOOST}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 2 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 2 + } + assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { + "operation_preset": 5 + } + + thrm_cluster.write_attributes.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_COMPLEX}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 2 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 2 + } + assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { + "operation_preset": 6 + } + + thrm_cluster.write_attributes.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 1 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 2 + } + + +async def test_set_moes_operation_mode( + hass: HomeAssistant, device_climate_moes +) -> None: + """Test setting preset for moes trv.""" + + entity_id = find_entity_id(Platform.CLIMATE, device_climate_moes, hass) + thrm_cluster = device_climate_moes.device.endpoints[1].thermostat + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 0}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 1}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_SCHEDULE + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 2}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 3}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_COMFORT + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 4}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_ECO + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 5}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_BOOST + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 6}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_COMPLEX + + +@pytest.mark.parametrize( + ("preset_attr", "preset_mode"), + [ + (0, PRESET_AWAY), + (1, PRESET_SCHEDULE), + # pylint: disable-next=fixme + # (2, PRESET_NONE), # TODO: why does this not work? + (4, PRESET_ECO), + (5, PRESET_BOOST), + (7, PRESET_TEMP_MANUAL), + ], +) +async def test_beca_operation_mode_update( + hass: HomeAssistant, + device_climate_beca: ZHADevice, + preset_attr: int, + preset_mode: str, +) -> None: + """Test beca trv operation mode attribute update.""" + + entity_id = find_entity_id(Platform.CLIMATE, device_climate_beca, hass) + thrm_cluster = device_climate_beca.device.endpoints[1].thermostat + + # Test sending an attribute report + await send_attributes_report(hass, thrm_cluster, {"operation_preset": preset_attr}) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == preset_mode + + # Test setting the preset + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: preset_mode}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.mock_calls == [ + call( + {"operation_preset": preset_attr}, + manufacturer=device_climate_beca.manufacturer_code, + ) + ] + + +async def test_set_zonnsmart_preset( + hass: HomeAssistant, device_climate_zonnsmart +) -> None: + """Test setting preset from homeassistant for zonnsmart trv.""" + + entity_id = find_entity_id(Platform.CLIMATE, device_climate_zonnsmart, hass) + thrm_cluster = device_climate_zonnsmart.device.endpoints[1].thermostat + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_SCHEDULE}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 1 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 0 + } + + thrm_cluster.write_attributes.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "holiday"}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 2 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 1 + } + assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { + "operation_preset": 3 + } + + thrm_cluster.write_attributes.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "frost protect"}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 2 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 1 + } + assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { + "operation_preset": 4 + } + + thrm_cluster.write_attributes.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, + blocking=True, + ) + + assert thrm_cluster.write_attributes.await_count == 1 + assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { + "operation_preset": 1 + } + + +async def test_set_zonnsmart_operation_mode( + hass: HomeAssistant, device_climate_zonnsmart +) -> None: + """Test setting preset from trv for zonnsmart trv.""" + + entity_id = find_entity_id(Platform.CLIMATE, device_climate_zonnsmart, hass) + thrm_cluster = device_climate_zonnsmart.device.endpoints[1].thermostat + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 0}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_SCHEDULE + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 1}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 2}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == "holiday" + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 3}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == "holiday" + + await send_attributes_report(hass, thrm_cluster, {"operation_preset": 4}) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == "frost protect" diff --git a/tests/components/zha/test_cluster_handlers.py b/tests/components/zha/test_cluster_handlers.py new file mode 100644 index 00000000000..655a36a2492 --- /dev/null +++ b/tests/components/zha/test_cluster_handlers.py @@ -0,0 +1,1009 @@ +"""Test ZHA Core cluster handlers.""" + +from collections.abc import Callable +import logging +import math +import threading +from types import NoneType +from unittest import mock +from unittest.mock import AsyncMock, patch + +import pytest +import zigpy.device +import zigpy.endpoint +from zigpy.endpoint import Endpoint as ZigpyEndpoint +import zigpy.profiles.zha +import zigpy.quirks as zigpy_quirks +import zigpy.types as t +from zigpy.zcl import foundation +import zigpy.zcl.clusters +from zigpy.zcl.clusters import CLUSTERS_BY_ID +import zigpy.zdo.types as zdo_t + +from homeassistant.components.zha.core import cluster_handlers, registries +from homeassistant.components.zha.core.cluster_handlers.lighting import ( + ColorClusterHandler, +) +import homeassistant.components.zha.core.const as zha_const +from homeassistant.components.zha.core.device import ZHADevice +from homeassistant.components.zha.core.endpoint import Endpoint +from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .common import make_zcl_header +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE + +from tests.common import async_capture_events + + +@pytest.fixture(autouse=True) +def disable_platform_only(): + """Disable platforms to speed up tests.""" + with patch("homeassistant.components.zha.PLATFORMS", []): + yield + + +@pytest.fixture +def ieee(): + """IEEE fixture.""" + return t.EUI64.deserialize(b"ieeeaddr")[0] + + +@pytest.fixture +def nwk(): + """NWK fixture.""" + return t.NWK(0xBEEF) + + +@pytest.fixture +async def zha_gateway(hass, setup_zha): + """Return ZhaGateway fixture.""" + await setup_zha() + return get_zha_gateway(hass) + + +@pytest.fixture +def zigpy_coordinator_device(zigpy_device_mock): + """Coordinator device fixture.""" + + coordinator = zigpy_device_mock( + {1: {SIG_EP_INPUT: [0x1000], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, + "00:11:22:33:44:55:66:77", + "test manufacturer", + "test model", + ) + with patch.object(coordinator, "add_to_group", AsyncMock(return_value=[0])): + yield coordinator + + +@pytest.fixture +def endpoint(zigpy_coordinator_device): + """Endpoint fixture.""" + endpoint_mock = mock.MagicMock(spec_set=Endpoint) + endpoint_mock.zigpy_endpoint.device.application.get_device.return_value = ( + zigpy_coordinator_device + ) + type(endpoint_mock.device).skip_configuration = mock.PropertyMock( + return_value=False + ) + endpoint_mock.device.hass.loop_thread_id = threading.get_ident() + endpoint_mock.id = 1 + return endpoint_mock + + +@pytest.fixture +def poll_control_ch(endpoint, zigpy_device_mock): + """Poll control cluster handler fixture.""" + cluster_id = zigpy.zcl.clusters.general.PollControl.cluster_id + zigpy_dev = zigpy_device_mock( + {1: {SIG_EP_INPUT: [cluster_id], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, + "00:11:22:33:44:55:66:77", + "test manufacturer", + "test model", + ) + + cluster = zigpy_dev.endpoints[1].in_clusters[cluster_id] + cluster_handler_class = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( + cluster_id + ).get(None) + return cluster_handler_class(cluster, endpoint) + + +@pytest.fixture +async def poll_control_device(zha_device_restored, zigpy_device_mock): + """Poll control device fixture.""" + cluster_id = zigpy.zcl.clusters.general.PollControl.cluster_id + zigpy_dev = zigpy_device_mock( + {1: {SIG_EP_INPUT: [cluster_id], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, + "00:11:22:33:44:55:66:77", + "test manufacturer", + "test model", + ) + + return await zha_device_restored(zigpy_dev) + + +@pytest.mark.parametrize( + ("cluster_id", "bind_count", "attrs"), + [ + (zigpy.zcl.clusters.general.Basic.cluster_id, 0, {}), + ( + zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, + 1, + {"battery_voltage", "battery_percentage_remaining"}, + ), + ( + zigpy.zcl.clusters.general.DeviceTemperature.cluster_id, + 1, + {"current_temperature"}, + ), + (zigpy.zcl.clusters.general.Identify.cluster_id, 0, {}), + (zigpy.zcl.clusters.general.Groups.cluster_id, 0, {}), + (zigpy.zcl.clusters.general.Scenes.cluster_id, 1, {}), + (zigpy.zcl.clusters.general.OnOff.cluster_id, 1, {"on_off"}), + (zigpy.zcl.clusters.general.OnOffConfiguration.cluster_id, 1, {}), + (zigpy.zcl.clusters.general.LevelControl.cluster_id, 1, {"current_level"}), + (zigpy.zcl.clusters.general.Alarms.cluster_id, 1, {}), + (zigpy.zcl.clusters.general.AnalogInput.cluster_id, 1, {"present_value"}), + (zigpy.zcl.clusters.general.AnalogOutput.cluster_id, 1, {"present_value"}), + (zigpy.zcl.clusters.general.AnalogValue.cluster_id, 1, {"present_value"}), + (zigpy.zcl.clusters.general.BinaryOutput.cluster_id, 1, {"present_value"}), + (zigpy.zcl.clusters.general.BinaryValue.cluster_id, 1, {"present_value"}), + (zigpy.zcl.clusters.general.MultistateInput.cluster_id, 1, {"present_value"}), + (zigpy.zcl.clusters.general.MultistateOutput.cluster_id, 1, {"present_value"}), + (zigpy.zcl.clusters.general.MultistateValue.cluster_id, 1, {"present_value"}), + (zigpy.zcl.clusters.general.Commissioning.cluster_id, 1, {}), + (zigpy.zcl.clusters.general.Partition.cluster_id, 1, {}), + (zigpy.zcl.clusters.general.Ota.cluster_id, 0, {}), + (zigpy.zcl.clusters.general.PowerProfile.cluster_id, 1, {}), + (zigpy.zcl.clusters.general.ApplianceControl.cluster_id, 1, {}), + (zigpy.zcl.clusters.general.PollControl.cluster_id, 1, {}), + (zigpy.zcl.clusters.general.GreenPowerProxy.cluster_id, 0, {}), + (zigpy.zcl.clusters.closures.DoorLock.cluster_id, 1, {"lock_state"}), + ( + zigpy.zcl.clusters.hvac.Thermostat.cluster_id, + 1, + { + "local_temperature", + "occupied_cooling_setpoint", + "occupied_heating_setpoint", + "unoccupied_cooling_setpoint", + "unoccupied_heating_setpoint", + "running_mode", + "running_state", + "system_mode", + "occupancy", + "pi_cooling_demand", + "pi_heating_demand", + }, + ), + (zigpy.zcl.clusters.hvac.Fan.cluster_id, 1, {"fan_mode"}), + ( + zigpy.zcl.clusters.lighting.Color.cluster_id, + 1, + { + "current_x", + "current_y", + "color_temperature", + "current_hue", + "enhanced_current_hue", + "current_saturation", + }, + ), + ( + zigpy.zcl.clusters.measurement.IlluminanceMeasurement.cluster_id, + 1, + {"measured_value"}, + ), + ( + zigpy.zcl.clusters.measurement.IlluminanceLevelSensing.cluster_id, + 1, + {"level_status"}, + ), + ( + zigpy.zcl.clusters.measurement.TemperatureMeasurement.cluster_id, + 1, + {"measured_value"}, + ), + ( + zigpy.zcl.clusters.measurement.PressureMeasurement.cluster_id, + 1, + {"measured_value"}, + ), + ( + zigpy.zcl.clusters.measurement.FlowMeasurement.cluster_id, + 1, + {"measured_value"}, + ), + ( + zigpy.zcl.clusters.measurement.RelativeHumidity.cluster_id, + 1, + {"measured_value"}, + ), + (zigpy.zcl.clusters.measurement.OccupancySensing.cluster_id, 1, {"occupancy"}), + ( + zigpy.zcl.clusters.smartenergy.Metering.cluster_id, + 1, + { + "instantaneous_demand", + "current_summ_delivered", + "current_tier1_summ_delivered", + "current_tier2_summ_delivered", + "current_tier3_summ_delivered", + "current_tier4_summ_delivered", + "current_tier5_summ_delivered", + "current_tier6_summ_delivered", + "current_summ_received", + "status", + }, + ), + ( + zigpy.zcl.clusters.homeautomation.ElectricalMeasurement.cluster_id, + 1, + { + "active_power", + "active_power_max", + "apparent_power", + "rms_current", + "rms_current_max", + "rms_voltage", + "rms_voltage_max", + }, + ), + ], +) +async def test_in_cluster_handler_config( + cluster_id, bind_count, attrs, endpoint, zigpy_device_mock, zha_gateway +) -> None: + """Test ZHA core cluster handler configuration for input clusters.""" + zigpy_dev = zigpy_device_mock( + {1: {SIG_EP_INPUT: [cluster_id], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, + "00:11:22:33:44:55:66:77", + "test manufacturer", + "test model", + ) + + cluster = zigpy_dev.endpoints[1].in_clusters[cluster_id] + cluster_handler_class = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( + cluster_id, {None, cluster_handlers.ClusterHandler} + ).get(None) + cluster_handler = cluster_handler_class(cluster, endpoint) + + await cluster_handler.async_configure() + + assert cluster.bind.call_count == bind_count + assert cluster.configure_reporting.call_count == 0 + assert cluster.configure_reporting_multiple.call_count == math.ceil(len(attrs) / 3) + reported_attrs = { + a + for a in attrs + for attr in cluster.configure_reporting_multiple.call_args_list + for attrs in attr[0][0] + } + assert set(attrs) == reported_attrs + + +@pytest.mark.parametrize( + ("cluster_id", "bind_count"), + [ + (0x0000, 0), + (0x0001, 1), + (0x0002, 1), + (0x0003, 0), + (0x0004, 0), + (0x0005, 1), + (0x0006, 1), + (0x0007, 1), + (0x0008, 1), + (0x0009, 1), + (0x0015, 1), + (0x0016, 1), + (0x0019, 0), + (0x001A, 1), + (0x001B, 1), + (0x0020, 1), + (0x0021, 0), + (0x0101, 1), + (0x0202, 1), + (0x0300, 1), + (0x0400, 1), + (0x0402, 1), + (0x0403, 1), + (0x0405, 1), + (0x0406, 1), + (0x0702, 1), + (0x0B04, 1), + ], +) +async def test_out_cluster_handler_config( + cluster_id, bind_count, endpoint, zigpy_device_mock, zha_gateway +) -> None: + """Test ZHA core cluster handler configuration for output clusters.""" + zigpy_dev = zigpy_device_mock( + {1: {SIG_EP_OUTPUT: [cluster_id], SIG_EP_INPUT: [], SIG_EP_TYPE: 0x1234}}, + "00:11:22:33:44:55:66:77", + "test manufacturer", + "test model", + ) + + cluster = zigpy_dev.endpoints[1].out_clusters[cluster_id] + cluster.bind_only = True + cluster_handler_class = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( + cluster_id, {None: cluster_handlers.ClusterHandler} + ).get(None) + cluster_handler = cluster_handler_class(cluster, endpoint) + + await cluster_handler.async_configure() + + assert cluster.bind.call_count == bind_count + assert cluster.configure_reporting.call_count == 0 + + +def test_cluster_handler_registry() -> None: + """Test ZIGBEE cluster handler Registry.""" + + # get all quirk ID from zigpy quirks registry + all_quirk_ids = {} + for cluster_id in CLUSTERS_BY_ID: + all_quirk_ids[cluster_id] = {None} + # pylint: disable-next=too-many-nested-blocks + for manufacturer in zigpy_quirks._DEVICE_REGISTRY.registry.values(): + for model_quirk_list in manufacturer.values(): + for quirk in model_quirk_list: + quirk_id = getattr(quirk, zha_const.ATTR_QUIRK_ID, None) + device_description = getattr(quirk, "replacement", None) or getattr( + quirk, "signature", None + ) + + for endpoint in device_description["endpoints"].values(): + cluster_ids = set() + if "input_clusters" in endpoint: + cluster_ids.update(endpoint["input_clusters"]) + if "output_clusters" in endpoint: + cluster_ids.update(endpoint["output_clusters"]) + for cluster_id in cluster_ids: + if not isinstance(cluster_id, int): + cluster_id = cluster_id.cluster_id + if cluster_id not in all_quirk_ids: + all_quirk_ids[cluster_id] = {None} + all_quirk_ids[cluster_id].add(quirk_id) + + # pylint: disable-next=undefined-loop-variable + del quirk, model_quirk_list, manufacturer + + for ( + cluster_id, + cluster_handler_classes, + ) in registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.items(): + assert isinstance(cluster_id, int) + assert 0 <= cluster_id <= 0xFFFF + assert cluster_id in all_quirk_ids + assert isinstance(cluster_handler_classes, dict) + for quirk_id, cluster_handler in cluster_handler_classes.items(): + assert isinstance(quirk_id, (NoneType, str)) + assert issubclass(cluster_handler, cluster_handlers.ClusterHandler) + assert quirk_id in all_quirk_ids[cluster_id] + + +def test_epch_unclaimed_cluster_handlers(cluster_handler) -> None: + """Test unclaimed cluster handlers.""" + + ch_1 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) + ch_2 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) + ch_3 = cluster_handler(zha_const.CLUSTER_HANDLER_COLOR, 768) + + ep_cluster_handlers = Endpoint( + mock.MagicMock(spec_set=ZigpyEndpoint), mock.MagicMock(spec_set=ZHADevice) + ) + all_cluster_handlers = {ch_1.id: ch_1, ch_2.id: ch_2, ch_3.id: ch_3} + with mock.patch.dict( + ep_cluster_handlers.all_cluster_handlers, all_cluster_handlers, clear=True + ): + available = ep_cluster_handlers.unclaimed_cluster_handlers() + assert ch_1 in available + assert ch_2 in available + assert ch_3 in available + + ep_cluster_handlers.claimed_cluster_handlers[ch_2.id] = ch_2 + available = ep_cluster_handlers.unclaimed_cluster_handlers() + assert ch_1 in available + assert ch_2 not in available + assert ch_3 in available + + ep_cluster_handlers.claimed_cluster_handlers[ch_1.id] = ch_1 + available = ep_cluster_handlers.unclaimed_cluster_handlers() + assert ch_1 not in available + assert ch_2 not in available + assert ch_3 in available + + ep_cluster_handlers.claimed_cluster_handlers[ch_3.id] = ch_3 + available = ep_cluster_handlers.unclaimed_cluster_handlers() + assert ch_1 not in available + assert ch_2 not in available + assert ch_3 not in available + + +def test_epch_claim_cluster_handlers(cluster_handler) -> None: + """Test cluster handler claiming.""" + + ch_1 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) + ch_2 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) + ch_3 = cluster_handler(zha_const.CLUSTER_HANDLER_COLOR, 768) + + ep_cluster_handlers = Endpoint( + mock.MagicMock(spec_set=ZigpyEndpoint), mock.MagicMock(spec_set=ZHADevice) + ) + all_cluster_handlers = {ch_1.id: ch_1, ch_2.id: ch_2, ch_3.id: ch_3} + with mock.patch.dict( + ep_cluster_handlers.all_cluster_handlers, all_cluster_handlers, clear=True + ): + assert ch_1.id not in ep_cluster_handlers.claimed_cluster_handlers + assert ch_2.id not in ep_cluster_handlers.claimed_cluster_handlers + assert ch_3.id not in ep_cluster_handlers.claimed_cluster_handlers + + ep_cluster_handlers.claim_cluster_handlers([ch_2]) + assert ch_1.id not in ep_cluster_handlers.claimed_cluster_handlers + assert ch_2.id in ep_cluster_handlers.claimed_cluster_handlers + assert ep_cluster_handlers.claimed_cluster_handlers[ch_2.id] is ch_2 + assert ch_3.id not in ep_cluster_handlers.claimed_cluster_handlers + + ep_cluster_handlers.claim_cluster_handlers([ch_3, ch_1]) + assert ch_1.id in ep_cluster_handlers.claimed_cluster_handlers + assert ep_cluster_handlers.claimed_cluster_handlers[ch_1.id] is ch_1 + assert ch_2.id in ep_cluster_handlers.claimed_cluster_handlers + assert ep_cluster_handlers.claimed_cluster_handlers[ch_2.id] is ch_2 + assert ch_3.id in ep_cluster_handlers.claimed_cluster_handlers + assert ep_cluster_handlers.claimed_cluster_handlers[ch_3.id] is ch_3 + assert "1:0x0300" in ep_cluster_handlers.claimed_cluster_handlers + + +@mock.patch( + "homeassistant.components.zha.core.endpoint.Endpoint.add_client_cluster_handlers" +) +@mock.patch( + "homeassistant.components.zha.core.discovery.PROBE.discover_entities", + mock.MagicMock(), +) +def test_ep_all_cluster_handlers(m1, zha_device_mock: Callable[..., ZHADevice]) -> None: + """Test Endpoint adding all cluster handlers.""" + zha_device = zha_device_mock( + { + 1: { + SIG_EP_INPUT: [0, 1, 6, 8], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + }, + 2: { + SIG_EP_INPUT: [0, 1, 6, 8, 768], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: 0x0000, + }, + } + ) + assert "1:0x0000" in zha_device._endpoints[1].all_cluster_handlers + assert "1:0x0001" in zha_device._endpoints[1].all_cluster_handlers + assert "1:0x0006" in zha_device._endpoints[1].all_cluster_handlers + assert "1:0x0008" in zha_device._endpoints[1].all_cluster_handlers + assert "1:0x0300" not in zha_device._endpoints[1].all_cluster_handlers + assert "2:0x0000" not in zha_device._endpoints[1].all_cluster_handlers + assert "2:0x0001" not in zha_device._endpoints[1].all_cluster_handlers + assert "2:0x0006" not in zha_device._endpoints[1].all_cluster_handlers + assert "2:0x0008" not in zha_device._endpoints[1].all_cluster_handlers + assert "2:0x0300" not in zha_device._endpoints[1].all_cluster_handlers + assert "1:0x0000" not in zha_device._endpoints[2].all_cluster_handlers + assert "1:0x0001" not in zha_device._endpoints[2].all_cluster_handlers + assert "1:0x0006" not in zha_device._endpoints[2].all_cluster_handlers + assert "1:0x0008" not in zha_device._endpoints[2].all_cluster_handlers + assert "1:0x0300" not in zha_device._endpoints[2].all_cluster_handlers + assert "2:0x0000" in zha_device._endpoints[2].all_cluster_handlers + assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers + assert "2:0x0006" in zha_device._endpoints[2].all_cluster_handlers + assert "2:0x0008" in zha_device._endpoints[2].all_cluster_handlers + assert "2:0x0300" in zha_device._endpoints[2].all_cluster_handlers + + zha_device.async_cleanup_handles() + + +@mock.patch( + "homeassistant.components.zha.core.endpoint.Endpoint.add_client_cluster_handlers" +) +@mock.patch( + "homeassistant.components.zha.core.discovery.PROBE.discover_entities", + mock.MagicMock(), +) +def test_cluster_handler_power_config( + m1, zha_device_mock: Callable[..., ZHADevice] +) -> None: + """Test that cluster handlers only get a single power cluster handler.""" + in_clusters = [0, 1, 6, 8] + zha_device = zha_device_mock( + { + 1: {SIG_EP_INPUT: in_clusters, SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}, + 2: { + SIG_EP_INPUT: [*in_clusters, 768], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: 0x0000, + }, + } + ) + assert "1:0x0000" in zha_device._endpoints[1].all_cluster_handlers + assert "1:0x0001" in zha_device._endpoints[1].all_cluster_handlers + assert "1:0x0006" in zha_device._endpoints[1].all_cluster_handlers + assert "1:0x0008" in zha_device._endpoints[1].all_cluster_handlers + assert "1:0x0300" not in zha_device._endpoints[1].all_cluster_handlers + assert "2:0x0000" in zha_device._endpoints[2].all_cluster_handlers + assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers + assert "2:0x0006" in zha_device._endpoints[2].all_cluster_handlers + assert "2:0x0008" in zha_device._endpoints[2].all_cluster_handlers + assert "2:0x0300" in zha_device._endpoints[2].all_cluster_handlers + + zha_device.async_cleanup_handles() + + zha_device = zha_device_mock( + { + 1: {SIG_EP_INPUT: [], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}, + 2: {SIG_EP_INPUT: in_clusters, SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}, + } + ) + assert "1:0x0001" not in zha_device._endpoints[1].all_cluster_handlers + assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers + + zha_device.async_cleanup_handles() + + zha_device = zha_device_mock( + {2: {SIG_EP_INPUT: in_clusters, SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}} + ) + assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers + + zha_device.async_cleanup_handles() + + +async def test_ep_cluster_handlers_configure(cluster_handler) -> None: + """Test unclaimed cluster handlers.""" + + ch_1 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) + ch_2 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) + ch_3 = cluster_handler(zha_const.CLUSTER_HANDLER_COLOR, 768) + ch_3.async_configure = AsyncMock(side_effect=TimeoutError) + ch_3.async_initialize = AsyncMock(side_effect=TimeoutError) + ch_4 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) + ch_5 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) + ch_5.async_configure = AsyncMock(side_effect=TimeoutError) + ch_5.async_initialize = AsyncMock(side_effect=TimeoutError) + + endpoint_mock = mock.MagicMock(spec_set=ZigpyEndpoint) + type(endpoint_mock).in_clusters = mock.PropertyMock(return_value={}) + type(endpoint_mock).out_clusters = mock.PropertyMock(return_value={}) + endpoint = Endpoint.new(endpoint_mock, mock.MagicMock(spec_set=ZHADevice)) + + claimed = {ch_1.id: ch_1, ch_2.id: ch_2, ch_3.id: ch_3} + client_handlers = {ch_4.id: ch_4, ch_5.id: ch_5} + + with ( + mock.patch.dict(endpoint.claimed_cluster_handlers, claimed, clear=True), + mock.patch.dict(endpoint.client_cluster_handlers, client_handlers, clear=True), + ): + await endpoint.async_configure() + await endpoint.async_initialize(mock.sentinel.from_cache) + + for ch in (*claimed.values(), *client_handlers.values()): + assert ch.async_initialize.call_count == 1 + assert ch.async_initialize.await_count == 1 + assert ch.async_initialize.call_args[0][0] is mock.sentinel.from_cache + assert ch.async_configure.call_count == 1 + assert ch.async_configure.await_count == 1 + + assert ch_3.debug.call_count == 2 + assert ch_5.debug.call_count == 2 + + +async def test_poll_control_configure(poll_control_ch) -> None: + """Test poll control cluster handler configuration.""" + await poll_control_ch.async_configure() + assert poll_control_ch.cluster.write_attributes.call_count == 1 + assert poll_control_ch.cluster.write_attributes.call_args[0][0] == { + "checkin_interval": poll_control_ch.CHECKIN_INTERVAL + } + + +async def test_poll_control_checkin_response(poll_control_ch) -> None: + """Test poll control cluster handler checkin response.""" + rsp_mock = AsyncMock() + set_interval_mock = AsyncMock() + fast_poll_mock = AsyncMock() + cluster = poll_control_ch.cluster + patch_1 = mock.patch.object(cluster, "checkin_response", rsp_mock) + patch_2 = mock.patch.object(cluster, "set_long_poll_interval", set_interval_mock) + patch_3 = mock.patch.object(cluster, "fast_poll_stop", fast_poll_mock) + + with patch_1, patch_2, patch_3: + await poll_control_ch.check_in_response(33) + + assert rsp_mock.call_count == 1 + assert set_interval_mock.call_count == 1 + assert fast_poll_mock.call_count == 1 + + await poll_control_ch.check_in_response(33) + assert cluster.endpoint.request.call_count == 3 + assert cluster.endpoint.request.await_count == 3 + assert cluster.endpoint.request.call_args_list[0][0][1] == 33 + assert cluster.endpoint.request.call_args_list[0][0][0] == 0x0020 + assert cluster.endpoint.request.call_args_list[1][0][0] == 0x0020 + + +async def test_poll_control_cluster_command( + hass: HomeAssistant, poll_control_device +) -> None: + """Test poll control cluster handler response to cluster command.""" + checkin_mock = AsyncMock() + poll_control_ch = poll_control_device._endpoints[1].all_cluster_handlers["1:0x0020"] + cluster = poll_control_ch.cluster + events = async_capture_events(hass, zha_const.ZHA_EVENT) + + with mock.patch.object(poll_control_ch, "check_in_response", checkin_mock): + tsn = 22 + hdr = make_zcl_header(0, global_command=False, tsn=tsn) + assert not events + cluster.handle_message( + hdr, [mock.sentinel.args, mock.sentinel.args2, mock.sentinel.args3] + ) + await hass.async_block_till_done() + + assert checkin_mock.call_count == 1 + assert checkin_mock.await_count == 1 + assert checkin_mock.await_args[0][0] == tsn + assert len(events) == 1 + data = events[0].data + assert data["command"] == "checkin" + assert data["args"][0] is mock.sentinel.args + assert data["args"][1] is mock.sentinel.args2 + assert data["args"][2] is mock.sentinel.args3 + assert data["unique_id"] == "00:11:22:33:44:55:66:77:1:0x0020" + assert data["device_id"] == poll_control_device.device_id + + +async def test_poll_control_ignore_list( + hass: HomeAssistant, poll_control_device +) -> None: + """Test poll control cluster handler ignore list.""" + set_long_poll_mock = AsyncMock() + poll_control_ch = poll_control_device._endpoints[1].all_cluster_handlers["1:0x0020"] + cluster = poll_control_ch.cluster + + with mock.patch.object(cluster, "set_long_poll_interval", set_long_poll_mock): + await poll_control_ch.check_in_response(33) + + assert set_long_poll_mock.call_count == 1 + + set_long_poll_mock.reset_mock() + poll_control_ch.skip_manufacturer_id(4151) + with mock.patch.object(cluster, "set_long_poll_interval", set_long_poll_mock): + await poll_control_ch.check_in_response(33) + + assert set_long_poll_mock.call_count == 0 + + +async def test_poll_control_ikea(hass: HomeAssistant, poll_control_device) -> None: + """Test poll control cluster handler ignore list for ikea.""" + set_long_poll_mock = AsyncMock() + poll_control_ch = poll_control_device._endpoints[1].all_cluster_handlers["1:0x0020"] + cluster = poll_control_ch.cluster + + poll_control_device.device.node_desc.manufacturer_code = 4476 + with mock.patch.object(cluster, "set_long_poll_interval", set_long_poll_mock): + await poll_control_ch.check_in_response(33) + + assert set_long_poll_mock.call_count == 0 + + +@pytest.fixture +def zigpy_zll_device(zigpy_device_mock): + """ZLL device fixture.""" + + return zigpy_device_mock( + {1: {SIG_EP_INPUT: [0x1000], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, + "00:11:22:33:44:55:66:77", + "test manufacturer", + "test model", + ) + + +async def test_zll_device_groups( + zigpy_zll_device, endpoint, zigpy_coordinator_device +) -> None: + """Test adding coordinator to ZLL groups.""" + + cluster = zigpy_zll_device.endpoints[1].lightlink + cluster_handler = cluster_handlers.lightlink.LightLinkClusterHandler( + cluster, endpoint + ) + + get_group_identifiers_rsp = zigpy.zcl.clusters.lightlink.LightLink.commands_by_name[ + "get_group_identifiers_rsp" + ].schema + + with patch.object( + cluster, + "command", + AsyncMock( + return_value=get_group_identifiers_rsp( + total=0, start_index=0, group_info_records=[] + ) + ), + ) as cmd_mock: + await cluster_handler.async_configure() + assert cmd_mock.await_count == 1 + assert ( + cluster.server_commands[cmd_mock.await_args[0][0]].name + == "get_group_identifiers" + ) + assert cluster.bind.call_count == 0 + assert zigpy_coordinator_device.add_to_group.await_count == 1 + assert zigpy_coordinator_device.add_to_group.await_args[0][0] == 0x0000 + + zigpy_coordinator_device.add_to_group.reset_mock() + group_1 = zigpy.zcl.clusters.lightlink.GroupInfoRecord(0xABCD, 0x00) + group_2 = zigpy.zcl.clusters.lightlink.GroupInfoRecord(0xAABB, 0x00) + with patch.object( + cluster, + "command", + AsyncMock( + return_value=get_group_identifiers_rsp( + total=2, start_index=0, group_info_records=[group_1, group_2] + ) + ), + ) as cmd_mock: + await cluster_handler.async_configure() + assert cmd_mock.await_count == 1 + assert ( + cluster.server_commands[cmd_mock.await_args[0][0]].name + == "get_group_identifiers" + ) + assert cluster.bind.call_count == 0 + assert zigpy_coordinator_device.add_to_group.await_count == 2 + assert ( + zigpy_coordinator_device.add_to_group.await_args_list[0][0][0] + == group_1.group_id + ) + assert ( + zigpy_coordinator_device.add_to_group.await_args_list[1][0][0] + == group_2.group_id + ) + + +@mock.patch( + "homeassistant.components.zha.core.discovery.PROBE.discover_entities", + mock.MagicMock(), +) +async def test_cluster_no_ep_attribute( + zha_device_mock: Callable[..., ZHADevice], +) -> None: + """Test cluster handlers for clusters without ep_attribute.""" + + zha_device = zha_device_mock( + {1: {SIG_EP_INPUT: [0x042E], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, + ) + + assert "1:0x042e" in zha_device._endpoints[1].all_cluster_handlers + assert zha_device._endpoints[1].all_cluster_handlers["1:0x042e"].name + + zha_device.async_cleanup_handles() + + +async def test_configure_reporting(hass: HomeAssistant, endpoint) -> None: + """Test setting up a cluster handler and configuring attribute reporting in two batches.""" + + class TestZigbeeClusterHandler(cluster_handlers.ClusterHandler): + BIND = True + REPORT_CONFIG = ( + # By name + cluster_handlers.AttrReportConfig(attr="current_x", config=(1, 60, 1)), + cluster_handlers.AttrReportConfig(attr="current_hue", config=(1, 60, 2)), + cluster_handlers.AttrReportConfig( + attr="color_temperature", config=(1, 60, 3) + ), + cluster_handlers.AttrReportConfig(attr="current_y", config=(1, 60, 4)), + ) + + mock_ep = mock.AsyncMock(spec_set=zigpy.endpoint.Endpoint) + mock_ep.device.zdo = AsyncMock() + + cluster = zigpy.zcl.clusters.lighting.Color(mock_ep) + cluster.bind = AsyncMock( + spec_set=cluster.bind, + return_value=[zdo_t.Status.SUCCESS], # ZDOCmd.Bind_rsp + ) + cluster.configure_reporting_multiple = AsyncMock( + spec_set=cluster.configure_reporting_multiple, + return_value=[ + foundation.ConfigureReportingResponseRecord( + status=foundation.Status.SUCCESS + ) + ], + ) + + cluster_handler = TestZigbeeClusterHandler(cluster, endpoint) + await cluster_handler.async_configure() + + # Since we request reporting for five attributes, we need to make two calls (3 + 1) + assert cluster.configure_reporting_multiple.mock_calls == [ + mock.call( + { + "current_x": (1, 60, 1), + "current_hue": (1, 60, 2), + "color_temperature": (1, 60, 3), + } + ), + mock.call( + { + "current_y": (1, 60, 4), + } + ), + ] + + +async def test_invalid_cluster_handler( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test setting up a cluster handler that fails to match properly.""" + + class TestZigbeeClusterHandler(cluster_handlers.ClusterHandler): + REPORT_CONFIG = ( + cluster_handlers.AttrReportConfig(attr="missing_attr", config=(1, 60, 1)), + ) + + mock_device = mock.AsyncMock(spec_set=zigpy.device.Device) + zigpy_ep = zigpy.endpoint.Endpoint(mock_device, endpoint_id=1) + + cluster = zigpy_ep.add_input_cluster(zigpy.zcl.clusters.lighting.Color.cluster_id) + cluster.configure_reporting_multiple = AsyncMock( + spec_set=cluster.configure_reporting_multiple, + return_value=[ + foundation.ConfigureReportingResponseRecord( + status=foundation.Status.SUCCESS + ) + ], + ) + + mock_zha_device = mock.AsyncMock(spec=ZHADevice) + mock_zha_device.quirk_id = None + zha_endpoint = Endpoint(zigpy_ep, mock_zha_device) + + # The cluster handler throws an error when matching this cluster + with pytest.raises(KeyError): + TestZigbeeClusterHandler(cluster, zha_endpoint) + + # And one is also logged at runtime + with ( + patch.dict( + registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY[cluster.cluster_id], + {None: TestZigbeeClusterHandler}, + ), + caplog.at_level(logging.WARNING), + ): + zha_endpoint.add_all_cluster_handlers() + + assert "missing_attr" in caplog.text + + +async def test_standard_cluster_handler( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test setting up a cluster handler that matches a standard cluster.""" + + class TestZigbeeClusterHandler(ColorClusterHandler): + pass + + mock_device = mock.AsyncMock(spec_set=zigpy.device.Device) + zigpy_ep = zigpy.endpoint.Endpoint(mock_device, endpoint_id=1) + + cluster = zigpy_ep.add_input_cluster(zigpy.zcl.clusters.lighting.Color.cluster_id) + cluster.configure_reporting_multiple = AsyncMock( + spec_set=cluster.configure_reporting_multiple, + return_value=[ + foundation.ConfigureReportingResponseRecord( + status=foundation.Status.SUCCESS + ) + ], + ) + + mock_zha_device = mock.AsyncMock(spec=ZHADevice) + mock_zha_device.quirk_id = None + zha_endpoint = Endpoint(zigpy_ep, mock_zha_device) + + with patch.dict( + registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY[cluster.cluster_id], + {"__test_quirk_id": TestZigbeeClusterHandler}, + ): + zha_endpoint.add_all_cluster_handlers() + + assert len(zha_endpoint.all_cluster_handlers) == 1 + assert isinstance( + list(zha_endpoint.all_cluster_handlers.values())[0], ColorClusterHandler + ) + + +async def test_quirk_id_cluster_handler( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test setting up a cluster handler that matches a standard cluster.""" + + class TestZigbeeClusterHandler(ColorClusterHandler): + pass + + mock_device = mock.AsyncMock(spec_set=zigpy.device.Device) + zigpy_ep = zigpy.endpoint.Endpoint(mock_device, endpoint_id=1) + + cluster = zigpy_ep.add_input_cluster(zigpy.zcl.clusters.lighting.Color.cluster_id) + cluster.configure_reporting_multiple = AsyncMock( + spec_set=cluster.configure_reporting_multiple, + return_value=[ + foundation.ConfigureReportingResponseRecord( + status=foundation.Status.SUCCESS + ) + ], + ) + + mock_zha_device = mock.AsyncMock(spec=ZHADevice) + mock_zha_device.quirk_id = "__test_quirk_id" + zha_endpoint = Endpoint(zigpy_ep, mock_zha_device) + + with patch.dict( + registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY[cluster.cluster_id], + {"__test_quirk_id": TestZigbeeClusterHandler}, + ): + zha_endpoint.add_all_cluster_handlers() + + assert len(zha_endpoint.all_cluster_handlers) == 1 + assert isinstance( + list(zha_endpoint.all_cluster_handlers.values())[0], TestZigbeeClusterHandler + ) + + +# parametrize side effects: +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [ + (zigpy.exceptions.ZigbeeException(), "Failed to send request"), + ( + zigpy.exceptions.ZigbeeException("Zigbee exception"), + "Failed to send request: Zigbee exception", + ), + (TimeoutError(), "Failed to send request: device did not respond"), + ], +) +async def test_retry_request( + side_effect: Exception | None, expected_error: str | None +) -> None: + """Test the `retry_request` decorator's handling of zigpy-internal exceptions.""" + + async def func(arg1: int, arg2: int) -> int: + assert arg1 == 1 + assert arg2 == 2 + + raise side_effect + + func = mock.AsyncMock(wraps=func) + decorated_func = cluster_handlers.retry_request(func) + + with pytest.raises(HomeAssistantError) as exc: + await decorated_func(1, arg2=2) + + assert func.await_count == 3 + assert isinstance(exc.value, HomeAssistantError) + assert str(exc.value) == expected_error + + +async def test_cluster_handler_naming() -> None: + """Test that all cluster handlers are named appropriately.""" + for client_cluster_handler in registries.CLIENT_CLUSTER_HANDLER_REGISTRY.values(): + assert issubclass(client_cluster_handler, cluster_handlers.ClientClusterHandler) + assert client_cluster_handler.__name__.endswith("ClientClusterHandler") + + for cluster_handler_dict in registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.values(): + for cluster_handler in cluster_handler_dict.values(): + assert not issubclass( + cluster_handler, cluster_handlers.ClientClusterHandler + ) + assert cluster_handler.__name__.endswith("ClusterHandler") diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index 87ba46a4ced..0c8414f458f 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -1,17 +1,14 @@ """Tests for ZHA config flow.""" -from collections.abc import Callable, Coroutine, Generator import copy from datetime import timedelta from ipaddress import ip_address import json -from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, create_autospec, patch import uuid import pytest -from serial.tools.list_ports_common import ListPortInfo -from zha.application.const import RadioType +import serial.tools.list_ports from zigpy.backups import BackupManager import zigpy.config from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH, SCHEMA_DEVICE @@ -21,15 +18,16 @@ import zigpy.types from homeassistant import config_entries from homeassistant.components import ssdp, usb, zeroconf -from homeassistant.components.hassio import AddonError, AddonState +from homeassistant.components.hassio import AddonState from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL from homeassistant.components.zha import config_flow, radio_manager -from homeassistant.components.zha.const import ( +from homeassistant.components.zha.core.const import ( CONF_BAUDRATE, CONF_FLOW_CONTROL, CONF_RADIO_TYPE, DOMAIN, EZSP_OVERWRITE_EUI64, + RadioType, ) from homeassistant.components.zha.radio_manager import ProbeResult from homeassistant.config_entries import ( @@ -38,7 +36,6 @@ from homeassistant.config_entries import ( SOURCE_USER, SOURCE_ZEROCONF, ConfigEntryState, - ConfigFlowResult, ) from homeassistant.const import CONF_SOURCE from homeassistant.core import HomeAssistant @@ -46,9 +43,6 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -type RadioPicker = Callable[ - [RadioType], Coroutine[Any, Any, tuple[ConfigFlowResult, ListPortInfo]] -] PROBE_FUNCTION_PATH = "zigbee.application.ControllerApplication.probe" @@ -76,7 +70,7 @@ def mock_multipan_platform(): @pytest.fixture(autouse=True) -def mock_app() -> Generator[AsyncMock]: +def mock_app(): """Mock zigpy app interface.""" mock_app = AsyncMock() mock_app.backups = create_autospec(BackupManager, instance=True) @@ -121,13 +115,6 @@ def backup(make_backup): return make_backup() -@pytest.fixture(autouse=True) -def mock_supervisor_client( - supervisor_client: AsyncMock, addon_store_info: AsyncMock -) -> None: - """Mock supervisor client.""" - - def mock_detect_radio_type( radio_type: RadioType = RadioType.ezsp, ret: ProbeResult = ProbeResult.RADIO_TYPE_DETECTED, @@ -143,9 +130,9 @@ def mock_detect_radio_type( return detect -def com_port(device="/dev/ttyUSB1234") -> ListPortInfo: +def com_port(device="/dev/ttyUSB1234"): """Mock of a serial port.""" - port = ListPortInfo("/dev/ttyUSB1234") + port = serial.tools.list_ports_common.ListPortInfo("/dev/ttyUSB1234") port.serial_number = "1234" port.manufacturer = "Virtual serial port" port.device = device @@ -779,7 +766,6 @@ async def test_user_flow_show_form(hass: HomeAssistant) -> None: assert result["step_id"] == "choose_serial_port" -@pytest.mark.usefixtures("addon_not_installed") @patch("serial.tools.list_ports.comports", MagicMock(return_value=[])) async def test_user_flow_show_manual(hass: HomeAssistant) -> None: """Test user flow manual entry when no comport detected.""" @@ -1052,12 +1038,10 @@ def test_prevent_overwrite_ezsp_ieee() -> None: @pytest.fixture -def pick_radio( - hass: HomeAssistant, -) -> Generator[RadioPicker]: +def pick_radio(hass): """Fixture for the first step of the config flow (where a radio is picked).""" - async def wrapper(radio_type: RadioType) -> tuple[ConfigFlowResult, ListPortInfo]: + async def wrapper(radio_type): port = com_port() port_select = f"{port}, s/n: {port.serial_number} - {port.manufacturer}" @@ -1086,7 +1070,7 @@ def pick_radio( async def test_strategy_no_network_settings( - pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant + pick_radio, mock_app, hass: HomeAssistant ) -> None: """Test formation strategy when no network settings are present.""" mock_app.load_network_info = MagicMock(side_effect=NetworkNotFormed()) @@ -1099,7 +1083,7 @@ async def test_strategy_no_network_settings( async def test_formation_strategy_form_new_network( - pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant + pick_radio, mock_app, hass: HomeAssistant ) -> None: """Test forming a new network.""" result, port = await pick_radio(RadioType.ezsp) @@ -1117,7 +1101,7 @@ async def test_formation_strategy_form_new_network( async def test_formation_strategy_form_initial_network( - pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant + pick_radio, mock_app, hass: HomeAssistant ) -> None: """Test forming a new network, with no previous settings on the radio.""" mock_app.load_network_info = AsyncMock(side_effect=NetworkNotFormed()) @@ -1138,7 +1122,7 @@ async def test_formation_strategy_form_initial_network( @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) async def test_onboarding_auto_formation_new_hardware( - mock_app: AsyncMock, hass: HomeAssistant + mock_app, hass: HomeAssistant ) -> None: """Test auto network formation with new hardware during onboarding.""" mock_app.load_network_info = AsyncMock(side_effect=NetworkNotFormed()) @@ -1173,7 +1157,7 @@ async def test_onboarding_auto_formation_new_hardware( async def test_formation_strategy_reuse_settings( - pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant + pick_radio, mock_app, hass: HomeAssistant ) -> None: """Test reusing existing network settings.""" result, port = await pick_radio(RadioType.ezsp) @@ -1206,10 +1190,7 @@ def test_parse_uploaded_backup(process_mock) -> None: @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_non_ezsp( - allow_overwrite_ieee_mock, - pick_radio: RadioPicker, - mock_app: AsyncMock, - hass: HomeAssistant, + allow_overwrite_ieee_mock, pick_radio, mock_app, hass: HomeAssistant ) -> None: """Test restoring a manual backup on non-EZSP coordinators.""" result, port = await pick_radio(RadioType.znp) @@ -1241,11 +1222,7 @@ async def test_formation_strategy_restore_manual_backup_non_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_overwrite_ieee_ezsp( - allow_overwrite_ieee_mock, - pick_radio: RadioPicker, - mock_app: AsyncMock, - backup, - hass: HomeAssistant, + allow_overwrite_ieee_mock, pick_radio, mock_app, backup, hass: HomeAssistant ) -> None: """Test restoring a manual backup on EZSP coordinators (overwrite IEEE).""" result, port = await pick_radio(RadioType.ezsp) @@ -1285,10 +1262,7 @@ async def test_formation_strategy_restore_manual_backup_overwrite_ieee_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_ezsp( - allow_overwrite_ieee_mock, - pick_radio: RadioPicker, - mock_app: AsyncMock, - hass: HomeAssistant, + allow_overwrite_ieee_mock, pick_radio, mock_app, hass: HomeAssistant ) -> None: """Test restoring a manual backup on EZSP coordinators (don't overwrite IEEE).""" result, port = await pick_radio(RadioType.ezsp) @@ -1329,7 +1303,7 @@ async def test_formation_strategy_restore_manual_backup_ezsp( async def test_formation_strategy_restore_manual_backup_invalid_upload( - pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant + pick_radio, mock_app, hass: HomeAssistant ) -> None: """Test restoring a manual backup but an invalid file is uploaded.""" result, port = await pick_radio(RadioType.ezsp) @@ -1381,7 +1355,7 @@ def test_format_backup_choice() -> None: ) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) async def test_formation_strategy_restore_automatic_backup_ezsp( - pick_radio: RadioPicker, mock_app: AsyncMock, make_backup, hass: HomeAssistant + pick_radio, mock_app, make_backup, hass: HomeAssistant ) -> None: """Test restoring an automatic backup (EZSP radio).""" mock_app.backups.backups = [ @@ -1430,11 +1404,7 @@ async def test_formation_strategy_restore_automatic_backup_ezsp( @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @pytest.mark.parametrize("is_advanced", [True, False]) async def test_formation_strategy_restore_automatic_backup_non_ezsp( - is_advanced, - pick_radio: RadioPicker, - mock_app: AsyncMock, - make_backup, - hass: HomeAssistant, + is_advanced, pick_radio, mock_app, make_backup, hass: HomeAssistant ) -> None: """Test restoring an automatic backup (non-EZSP radio).""" mock_app.backups.backups = [ @@ -1487,11 +1457,7 @@ async def test_formation_strategy_restore_automatic_backup_non_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_ezsp_restore_without_settings_change_ieee( - allow_overwrite_ieee_mock, - pick_radio: RadioPicker, - mock_app: AsyncMock, - backup, - hass: HomeAssistant, + allow_overwrite_ieee_mock, pick_radio, mock_app, backup, hass: HomeAssistant ) -> None: """Test a manual backup on EZSP coordinators without settings (no IEEE write).""" # Fail to load settings @@ -1878,23 +1844,10 @@ async def test_config_flow_port_yellow_port_name(hass: HomeAssistant) -> None: ) -async def test_config_flow_ports_no_hassio(hass: HomeAssistant) -> None: - """Test config flow serial port name when this is not a hassio install.""" - - with ( - patch("homeassistant.components.zha.config_flow.is_hassio", return_value=False), - patch("serial.tools.list_ports.comports", MagicMock(return_value=[])), - ): - ports = await config_flow.list_serial_ports(hass) - - assert ports == [] - - async def test_config_flow_port_multiprotocol_port_name(hass: HomeAssistant) -> None: """Test config flow serial port name for multiprotocol add-on.""" with ( - patch("homeassistant.components.zha.config_flow.is_hassio", return_value=True), patch( "homeassistant.components.hassio.addon_manager.AddonManager.async_get_addon_info" ) as async_get_addon_info, @@ -1902,28 +1855,16 @@ async def test_config_flow_port_multiprotocol_port_name(hass: HomeAssistant) -> ): async_get_addon_info.return_value.state = AddonState.RUNNING async_get_addon_info.return_value.hostname = "core-silabs-multiprotocol" - ports = await config_flow.list_serial_ports(hass) - assert len(ports) == 1 - assert ports[0].description == "Multiprotocol add-on" - assert ports[0].manufacturer == "Nabu Casa" - assert ports[0].device == "socket://core-silabs-multiprotocol:9999" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_USER}, + ) - -async def test_config_flow_port_no_multiprotocol(hass: HomeAssistant) -> None: - """Test config flow serial port listing when addon info fails to load.""" - - with ( - patch("homeassistant.components.zha.config_flow.is_hassio", return_value=True), - patch( - "homeassistant.components.hassio.addon_manager.AddonManager.async_get_addon_info", - side_effect=AddonError, - ), - patch("serial.tools.list_ports.comports", MagicMock(return_value=[])), - ): - ports = await config_flow.list_serial_ports(hass) - - assert ports == [] + assert ( + result["data_schema"].schema["path"].container[0] + == "socket://core-silabs-multiprotocol:9999 - Multiprotocol add-on - Nabu Casa" + ) @patch("serial.tools.list_ports.comports", MagicMock(return_value=[com_port()])) diff --git a/tests/components/zha/test_cover.py b/tests/components/zha/test_cover.py index e5d588aa1bf..5f6dac885f2 100644 --- a/tests/components/zha/test_cover.py +++ b/tests/components/zha/test_cover.py @@ -1,10 +1,12 @@ """Test ZHA cover.""" +import asyncio from unittest.mock import patch import pytest -from zigpy.profiles import zha -from zigpy.zcl.clusters import closures +import zigpy.profiles.zha +import zigpy.types +from zigpy.zcl.clusters import closures, general import zigpy.zcl.foundation as zcl_f from homeassistant.components.cover import ( @@ -20,22 +22,34 @@ from homeassistant.components.cover import ( SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - CoverState, + SERVICE_TOGGLE_COVER_TILT, ) -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, +from homeassistant.components.zha.core.const import ZHA_EVENT +from homeassistant.const import ( + ATTR_COMMAND, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + STATE_UNAVAILABLE, + Platform, ) -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import CoreState, HomeAssistant, State from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_component import async_update_entity -from .common import find_entity_id, send_attributes_report, update_attribute_cache +from .common import ( + async_enable_traffic, + async_test_rejoin, + find_entity_id, + make_zcl_header, + send_attributes_report, + update_attribute_cache, +) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from tests.common import async_capture_events, mock_restore_cache + Default_Response = zcl_f.GENERAL_COMMANDS[zcl_f.GeneralCommand.Default_Response].schema @@ -54,49 +68,94 @@ def cover_platform_only(): yield +@pytest.fixture +def zigpy_cover_device(zigpy_device_mock): + """Zigpy cover device.""" + + endpoints = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.WINDOW_COVERING_DEVICE, + SIG_EP_INPUT: [closures.WindowCovering.cluster_id], + SIG_EP_OUTPUT: [], + } + } + return zigpy_device_mock(endpoints) + + +@pytest.fixture +def zigpy_cover_remote(zigpy_device_mock): + """Zigpy cover remote device.""" + + endpoints = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.WINDOW_COVERING_CONTROLLER, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [closures.WindowCovering.cluster_id], + } + } + return zigpy_device_mock(endpoints) + + +@pytest.fixture +def zigpy_shade_device(zigpy_device_mock): + """Zigpy shade device.""" + + endpoints = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SHADE, + SIG_EP_INPUT: [ + closures.Shade.cluster_id, + general.LevelControl.cluster_id, + general.OnOff.cluster_id, + ], + SIG_EP_OUTPUT: [], + } + } + return zigpy_device_mock(endpoints) + + +@pytest.fixture +def zigpy_keen_vent(zigpy_device_mock): + """Zigpy Keen Vent device.""" + + endpoints = { + 1: { + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.LEVEL_CONTROLLABLE_OUTPUT, + SIG_EP_INPUT: [general.LevelControl.cluster_id, general.OnOff.cluster_id], + SIG_EP_OUTPUT: [], + } + } + return zigpy_device_mock( + endpoints, manufacturer="Keen Home Inc", model="SV02-612-MP-1.3" + ) + + WCAttrs = closures.WindowCovering.AttributeDefs WCCmds = closures.WindowCovering.ServerCommandDefs WCT = closures.WindowCovering.WindowCoveringType WCCS = closures.WindowCovering.ConfigStatus -async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: +async def test_cover_non_tilt_initial_state( + hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device +) -> None: """Test ZHA cover platform.""" - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.WINDOW_COVERING_DEVICE, - SIG_EP_INPUT: [closures.WindowCovering.cluster_id], - SIG_EP_OUTPUT: [], - } - }, - ) # load up cover domain - cluster = zigpy_device.endpoints[1].window_covering + cluster = zigpy_cover_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { WCAttrs.current_position_lift_percentage.name: 0, - WCAttrs.current_position_tilt_percentage.name: 42, - WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, + WCAttrs.window_covering_type.name: WCT.Drapery, WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), } update_attribute_cache(cluster) - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.COVER, zha_device_proxy, hass) - assert entity_id is not None - + zha_device = await zha_device_joined_restored(zigpy_cover_device) assert ( - not zha_device_proxy.device.endpoints[1] + not zha_device.endpoints[1] .all_cluster_handlers[f"1:0x{cluster.cluster_id:04x}"] .inverted ) @@ -110,10 +169,75 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: in cluster.read_attributes.call_args[0][0] ) + entity_id = find_entity_id(Platform.COVER, zha_device, hass) + assert entity_id is not None + + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the cover was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + await hass.async_block_till_done() + + # test update + prev_call_count = cluster.read_attributes.call_count await async_update_entity(hass, entity_id) + assert cluster.read_attributes.call_count == prev_call_count + 1 state = hass.states.get(entity_id) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN + assert state.attributes[ATTR_CURRENT_POSITION] == 100 + + +async def test_cover( + hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device +) -> None: + """Test ZHA cover platform.""" + + # load up cover domain + cluster = zigpy_cover_device.endpoints[1].window_covering + cluster.PLUGGED_ATTR_READS = { + WCAttrs.current_position_lift_percentage.name: 0, + WCAttrs.current_position_tilt_percentage.name: 42, + WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, + WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), + } + update_attribute_cache(cluster) + zha_device = await zha_device_joined_restored(zigpy_cover_device) + assert ( + not zha_device.endpoints[1] + .all_cluster_handlers[f"1:0x{cluster.cluster_id:04x}"] + .inverted + ) + assert cluster.read_attributes.call_count == 3 + assert ( + WCAttrs.current_position_lift_percentage.name + in cluster.read_attributes.call_args[0][0] + ) + assert ( + WCAttrs.current_position_tilt_percentage.name + in cluster.read_attributes.call_args[0][0] + ) + + entity_id = find_entity_id(Platform.COVER, zha_device, hass) + assert entity_id is not None + + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the cover was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + await hass.async_block_till_done() + + # test update + prev_call_count = cluster.read_attributes.call_count + await async_update_entity(hass, entity_id) + assert cluster.read_attributes.call_count == prev_call_count + 1 + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 58 @@ -121,25 +245,25 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 100} ) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED # test to see if it opens await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 0} ) - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN # test that the state remains after tilting to 100% await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} ) - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN # test to see the state remains after tilting to 0% await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} ) - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN # close from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): @@ -152,13 +276,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][2].command.name == WCCmds.down_close.name assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert hass.states.get(entity_id).state == STATE_CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 100} ) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): await hass.services.async_call( @@ -177,13 +301,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 100 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert hass.states.get(entity_id).state == STATE_CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} ) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED # open from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): @@ -196,13 +320,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][2].command.name == WCCmds.up_open.name assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == CoverState.OPENING + assert hass.states.get(entity_id).state == STATE_OPENING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 0} ) - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): await hass.services.async_call( @@ -221,13 +345,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 0 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == CoverState.OPENING + assert hass.states.get(entity_id).state == STATE_OPENING await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} ) - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN # set position UI with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): @@ -247,19 +371,19 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 53 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert hass.states.get(entity_id).state == STATE_CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 35} ) - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert hass.states.get(entity_id).state == STATE_CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 53} ) - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): await hass.services.async_call( @@ -278,19 +402,19 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 53 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert hass.states.get(entity_id).state == STATE_CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 35} ) - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert hass.states.get(entity_id).state == STATE_CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 53} ) - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN # stop from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x2, zcl_f.Status.SUCCESS]): @@ -316,48 +440,68 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][2].command.name == WCCmds.stop.name assert cluster.request.call_args[1]["expect_reply"] is True + # test rejoin + cluster.PLUGGED_ATTR_READS = {WCAttrs.current_position_lift_percentage.name: 0} + await async_test_rejoin(hass, zigpy_cover_device, [cluster], (1,)) + assert hass.states.get(entity_id).state == STATE_OPEN + + # test toggle + with patch("zigpy.zcl.Cluster.request", return_value=[0x2, zcl_f.Status.SUCCESS]): + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_TOGGLE_COVER_TILT, + {"entity_id": entity_id}, + blocking=True, + ) + assert cluster.request.call_count == 1 + assert cluster.request.call_args[0][0] is False + assert cluster.request.call_args[0][1] == 0x08 + assert ( + cluster.request.call_args[0][2].command.name + == WCCmds.go_to_tilt_percentage.name + ) + assert cluster.request.call_args[0][3] == 100 + assert cluster.request.call_args[1]["expect_reply"] is True + async def test_cover_failures( - hass: HomeAssistant, setup_zha, zigpy_device_mock + hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device ) -> None: """Test ZHA cover platform failure cases.""" - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.WINDOW_COVERING_DEVICE, - SIG_EP_INPUT: [closures.WindowCovering.cluster_id], - SIG_EP_OUTPUT: [], - } - }, - ) # load up cover domain - cluster = zigpy_device.endpoints[1].window_covering + cluster = zigpy_cover_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { WCAttrs.current_position_tilt_percentage.name: 42, WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, } update_attribute_cache(cluster) + zha_device = await zha_device_joined_restored(zigpy_cover_device) - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.COVER, zha_device_proxy, hass) + entity_id = find_entity_id(Platform.COVER, zha_device, hass) assert entity_id is not None + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the cover was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # test update returned None + prev_call_count = cluster.read_attributes.call_count + await async_update_entity(hass, entity_id) + assert cluster.read_attributes.call_count == prev_call_count + 1 + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + await hass.async_block_till_done() + # test that the state has changed from unavailable to closed await send_attributes_report(hass, cluster, {0: 0, 8: 100, 1: 1}) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert hass.states.get(entity_id).state == STATE_CLOSED # test to see if it opens await send_attributes_report(hass, cluster, {0: 1, 8: 0, 1: 100}) - assert hass.states.get(entity_id).state == CoverState.OPEN + assert hass.states.get(entity_id).state == STATE_OPEN # close from UI with patch( @@ -526,3 +670,319 @@ async def test_cover_failures( cluster.request.call_args[0][1] == closures.WindowCovering.ServerCommandDefs.stop.id ) + + +async def test_shade( + hass: HomeAssistant, zha_device_joined_restored, zigpy_shade_device +) -> None: + """Test ZHA cover platform for shade device type.""" + + # load up cover domain + zha_device = await zha_device_joined_restored(zigpy_shade_device) + + cluster_on_off = zigpy_shade_device.endpoints[1].on_off + cluster_level = zigpy_shade_device.endpoints[1].level + entity_id = find_entity_id(Platform.COVER, zha_device, hass) + assert entity_id is not None + + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the cover was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + await hass.async_block_till_done() + + # test that the state has changed from unavailable to off + await send_attributes_report(hass, cluster_on_off, {8: 0, 0: False, 1: 1}) + assert hass.states.get(entity_id).state == STATE_CLOSED + + # test to see if it opens + await send_attributes_report(hass, cluster_on_off, {8: 0, 0: True, 1: 1}) + assert hass.states.get(entity_id).state == STATE_OPEN + + # close from UI command fails + with patch( + "zigpy.zcl.Cluster.request", + return_value=Default_Response( + command_id=closures.WindowCovering.ServerCommandDefs.down_close.id, + status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, + ), + ): + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {"entity_id": entity_id}, + blocking=True, + ) + assert cluster_on_off.request.call_count == 1 + assert cluster_on_off.request.call_args[0][0] is False + assert cluster_on_off.request.call_args[0][1] == 0x0000 + assert hass.states.get(entity_id).state == STATE_OPEN + + with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): + await hass.services.async_call( + COVER_DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True + ) + assert cluster_on_off.request.call_count == 1 + assert cluster_on_off.request.call_args[0][0] is False + assert cluster_on_off.request.call_args[0][1] == 0x0000 + assert hass.states.get(entity_id).state == STATE_CLOSED + + # open from UI command fails + assert ATTR_CURRENT_POSITION not in hass.states.get(entity_id).attributes + await send_attributes_report(hass, cluster_level, {0: 0}) + with patch( + "zigpy.zcl.Cluster.request", + return_value=Default_Response( + command_id=closures.WindowCovering.ServerCommandDefs.up_open.id, + status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, + ), + ): + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {"entity_id": entity_id}, + blocking=True, + ) + assert cluster_on_off.request.call_count == 1 + assert cluster_on_off.request.call_args[0][0] is False + assert cluster_on_off.request.call_args[0][1] == 0x0001 + assert hass.states.get(entity_id).state == STATE_CLOSED + + # stop from UI command fails + with patch( + "zigpy.zcl.Cluster.request", + return_value=Default_Response( + command_id=general.LevelControl.ServerCommandDefs.stop.id, + status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, + ), + ): + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {"entity_id": entity_id}, + blocking=True, + ) + + assert cluster_level.request.call_count == 1 + assert cluster_level.request.call_args[0][0] is False + assert ( + cluster_level.request.call_args[0][1] + == general.LevelControl.ServerCommandDefs.stop.id + ) + assert hass.states.get(entity_id).state == STATE_CLOSED + + # open from UI succeeds + with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): + await hass.services.async_call( + COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True + ) + assert cluster_on_off.request.call_count == 1 + assert cluster_on_off.request.call_args[0][0] is False + assert cluster_on_off.request.call_args[0][1] == 0x0001 + assert hass.states.get(entity_id).state == STATE_OPEN + + # set position UI command fails + with patch( + "zigpy.zcl.Cluster.request", + return_value=Default_Response( + command_id=closures.WindowCovering.ServerCommandDefs.go_to_lift_percentage.id, + status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, + ), + ): + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {"entity_id": entity_id, "position": 47}, + blocking=True, + ) + + assert cluster_level.request.call_count == 1 + assert cluster_level.request.call_args[0][0] is False + assert cluster_level.request.call_args[0][1] == 0x0004 + assert int(cluster_level.request.call_args[0][3] * 100 / 255) == 47 + assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 0 + + # set position UI success + with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {"entity_id": entity_id, "position": 47}, + blocking=True, + ) + assert cluster_level.request.call_count == 1 + assert cluster_level.request.call_args[0][0] is False + assert cluster_level.request.call_args[0][1] == 0x0004 + assert int(cluster_level.request.call_args[0][3] * 100 / 255) == 47 + assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 47 + + # report position change + await send_attributes_report(hass, cluster_level, {8: 0, 0: 100, 1: 1}) + assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == int( + 100 * 100 / 255 + ) + + # test rejoin + await async_test_rejoin( + hass, zigpy_shade_device, [cluster_level, cluster_on_off], (1,) + ) + assert hass.states.get(entity_id).state == STATE_OPEN + + # test cover stop + with patch("zigpy.zcl.Cluster.request", side_effect=TimeoutError): + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {"entity_id": entity_id}, + blocking=True, + ) + assert cluster_level.request.call_count == 3 + assert cluster_level.request.call_args[0][0] is False + assert cluster_level.request.call_args[0][1] in (0x0003, 0x0007) + + +async def test_shade_restore_state( + hass: HomeAssistant, zha_device_restored, zigpy_shade_device +) -> None: + """Ensure states are restored on startup.""" + mock_restore_cache( + hass, + ( + State( + "cover.fakemanufacturer_fakemodel_shade", + STATE_OPEN, + {ATTR_CURRENT_POSITION: 50}, + ), + ), + ) + + hass.set_state(CoreState.starting) + + zha_device = await zha_device_restored(zigpy_shade_device) + entity_id = find_entity_id(Platform.COVER, zha_device, hass) + assert entity_id is not None + + # test that the cover was created and that it is available + assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 50 + + +async def test_cover_restore_state( + hass: HomeAssistant, zha_device_restored, zigpy_cover_device +) -> None: + """Ensure states are restored on startup.""" + cluster = zigpy_cover_device.endpoints[1].window_covering + cluster.PLUGGED_ATTR_READS = { + WCAttrs.current_position_lift_percentage.name: 50, + WCAttrs.current_position_tilt_percentage.name: 42, + WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, + } + update_attribute_cache(cluster) + + hass.set_state(CoreState.starting) + + zha_device = await zha_device_restored(zigpy_cover_device) + entity_id = find_entity_id(Platform.COVER, zha_device, hass) + assert entity_id is not None + + # test that the cover was created and that it is available + assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 100 - 50 + assert hass.states.get(entity_id).attributes[ATTR_CURRENT_TILT_POSITION] == 100 - 42 + + +async def test_keen_vent( + hass: HomeAssistant, zha_device_joined_restored, zigpy_keen_vent +) -> None: + """Test keen vent.""" + + # load up cover domain + zha_device = await zha_device_joined_restored(zigpy_keen_vent) + + cluster_on_off = zigpy_keen_vent.endpoints[1].on_off + cluster_level = zigpy_keen_vent.endpoints[1].level + entity_id = find_entity_id(Platform.COVER, zha_device, hass) + assert entity_id is not None + + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the cover was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + await hass.async_block_till_done() + + # test that the state has changed from unavailable to off + await send_attributes_report(hass, cluster_on_off, {8: 0, 0: False, 1: 1}) + assert hass.states.get(entity_id).state == STATE_CLOSED + + # open from UI command fails + p1 = patch.object(cluster_on_off, "request", side_effect=TimeoutError) + p2 = patch.object(cluster_level, "request", return_value=[4, 0]) + + with p1, p2: + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {"entity_id": entity_id}, + blocking=True, + ) + assert cluster_on_off.request.call_count == 3 + assert cluster_on_off.request.call_args[0][0] is False + assert cluster_on_off.request.call_args[0][1] == 0x0001 + assert cluster_level.request.call_count == 1 + assert hass.states.get(entity_id).state == STATE_CLOSED + + # open from UI command success + p1 = patch.object(cluster_on_off, "request", return_value=[1, 0]) + p2 = patch.object(cluster_level, "request", return_value=[4, 0]) + + with p1, p2: + await hass.services.async_call( + COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True + ) + await asyncio.sleep(0) + assert cluster_on_off.request.call_count == 1 + assert cluster_on_off.request.call_args[0][0] is False + assert cluster_on_off.request.call_args[0][1] == 0x0001 + assert cluster_level.request.call_count == 1 + assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 100 + + +async def test_cover_remote( + hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_remote +) -> None: + """Test ZHA cover remote.""" + + # load up cover domain + await zha_device_joined_restored(zigpy_cover_remote) + + cluster = zigpy_cover_remote.endpoints[1].out_clusters[ + closures.WindowCovering.cluster_id + ] + zha_events = async_capture_events(hass, ZHA_EVENT) + + # up command + hdr = make_zcl_header(0, global_command=False) + cluster.handle_message(hdr, []) + await hass.async_block_till_done() + + assert len(zha_events) == 1 + assert zha_events[0].data[ATTR_COMMAND] == "up_open" + + # down command + hdr = make_zcl_header(1, global_command=False) + cluster.handle_message(hdr, []) + await hass.async_block_till_done() + + assert len(zha_events) == 2 + assert zha_events[1].data[ATTR_COMMAND] == "down_close" diff --git a/tests/components/zha/test_device.py b/tests/components/zha/test_device.py new file mode 100644 index 00000000000..87acdc5fd1c --- /dev/null +++ b/tests/components/zha/test_device.py @@ -0,0 +1,363 @@ +"""Test ZHA device switch.""" + +from datetime import timedelta +import logging +import time +from unittest import mock +from unittest.mock import patch + +import pytest +import zigpy.profiles.zha +import zigpy.types +from zigpy.zcl.clusters import general +import zigpy.zdo.types as zdo_t + +from homeassistant.components.zha.core.const import ( + CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, + CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, +) +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +import homeassistant.helpers.device_registry as dr +import homeassistant.util.dt as dt_util + +from .common import async_enable_traffic, make_zcl_header +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE + +from tests.common import async_fire_time_changed + + +@pytest.fixture(autouse=True) +def required_platforms_only(): + """Only set up the required platform and required base platforms to speed up tests.""" + with patch( + "homeassistant.components.zha.PLATFORMS", + ( + Platform.DEVICE_TRACKER, + Platform.SENSOR, + Platform.SELECT, + Platform.SWITCH, + Platform.BINARY_SENSOR, + ), + ): + yield + + +@pytest.fixture +def zigpy_device(zigpy_device_mock): + """Device tracker zigpy device.""" + + def _dev(with_basic_cluster_handler: bool = True, **kwargs): + in_clusters = [general.OnOff.cluster_id] + if with_basic_cluster_handler: + in_clusters.append(general.Basic.cluster_id) + + endpoints = { + 3: { + SIG_EP_INPUT: in_clusters, + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + } + } + return zigpy_device_mock(endpoints, **kwargs) + + return _dev + + +@pytest.fixture +def zigpy_device_mains(zigpy_device_mock): + """Device tracker zigpy device.""" + + def _dev(with_basic_cluster_handler: bool = True): + in_clusters = [general.OnOff.cluster_id] + if with_basic_cluster_handler: + in_clusters.append(general.Basic.cluster_id) + + endpoints = { + 3: { + SIG_EP_INPUT: in_clusters, + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + } + } + return zigpy_device_mock( + endpoints, node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00" + ) + + return _dev + + +@pytest.fixture +def device_with_basic_cluster_handler(zigpy_device_mains): + """Return a ZHA device with a basic cluster handler present.""" + return zigpy_device_mains(with_basic_cluster_handler=True) + + +@pytest.fixture +def device_without_basic_cluster_handler(zigpy_device): + """Return a ZHA device without a basic cluster handler present.""" + return zigpy_device(with_basic_cluster_handler=False) + + +@pytest.fixture +async def ota_zha_device(zha_device_restored, zigpy_device_mock): + """ZHA device with OTA cluster fixture.""" + zigpy_dev = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id], + SIG_EP_OUTPUT: [general.Ota.cluster_id], + SIG_EP_TYPE: 0x1234, + } + }, + "00:11:22:33:44:55:66:77", + "test manufacturer", + "test model", + ) + + return await zha_device_restored(zigpy_dev) + + +def _send_time_changed(hass, seconds): + """Send a time changed event.""" + now = dt_util.utcnow() + timedelta(seconds=seconds) + async_fire_time_changed(hass, now) + + +@patch( + "homeassistant.components.zha.core.cluster_handlers.general.BasicClusterHandler.async_initialize", + new=mock.AsyncMock(), +) +async def test_check_available_success( + hass: HomeAssistant, device_with_basic_cluster_handler, zha_device_restored +) -> None: + """Check device availability success on 1st try.""" + zha_device = await zha_device_restored(device_with_basic_cluster_handler) + await async_enable_traffic(hass, [zha_device]) + basic_ch = device_with_basic_cluster_handler.endpoints[3].basic + + basic_ch.read_attributes.reset_mock() + device_with_basic_cluster_handler.last_seen = None + assert zha_device.available is True + _send_time_changed(hass, zha_device.consider_unavailable_time + 2) + await hass.async_block_till_done() + assert zha_device.available is False + assert basic_ch.read_attributes.await_count == 0 + + device_with_basic_cluster_handler.last_seen = ( + time.time() - zha_device.consider_unavailable_time - 2 + ) + _seens = [time.time(), device_with_basic_cluster_handler.last_seen] + + def _update_last_seen(*args, **kwargs): + device_with_basic_cluster_handler.last_seen = _seens.pop() + + basic_ch.read_attributes.side_effect = _update_last_seen + + # successfully ping zigpy device, but zha_device is not yet available + _send_time_changed(hass, 91) + await hass.async_block_till_done() + assert basic_ch.read_attributes.await_count == 1 + assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] + assert zha_device.available is False + + # There was traffic from the device: pings, but not yet available + _send_time_changed(hass, 91) + await hass.async_block_till_done() + assert basic_ch.read_attributes.await_count == 2 + assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] + assert zha_device.available is False + + # There was traffic from the device: don't try to ping, marked as available + _send_time_changed(hass, 91) + await hass.async_block_till_done() + assert basic_ch.read_attributes.await_count == 2 + assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] + assert zha_device.available is True + + +@patch( + "homeassistant.components.zha.core.cluster_handlers.general.BasicClusterHandler.async_initialize", + new=mock.AsyncMock(), +) +async def test_check_available_unsuccessful( + hass: HomeAssistant, device_with_basic_cluster_handler, zha_device_restored +) -> None: + """Check device availability all tries fail.""" + + zha_device = await zha_device_restored(device_with_basic_cluster_handler) + await async_enable_traffic(hass, [zha_device]) + basic_ch = device_with_basic_cluster_handler.endpoints[3].basic + + assert zha_device.available is True + assert basic_ch.read_attributes.await_count == 0 + + device_with_basic_cluster_handler.last_seen = ( + time.time() - zha_device.consider_unavailable_time - 2 + ) + + # unsuccessfully ping zigpy device, but zha_device is still available + _send_time_changed(hass, 91) + await hass.async_block_till_done() + assert basic_ch.read_attributes.await_count == 1 + assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] + assert zha_device.available is True + + # still no traffic, but zha_device is still available + _send_time_changed(hass, 91) + await hass.async_block_till_done() + assert basic_ch.read_attributes.await_count == 2 + assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] + assert zha_device.available is True + + # not even trying to update, device is unavailable + _send_time_changed(hass, 91) + await hass.async_block_till_done() + assert basic_ch.read_attributes.await_count == 2 + assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] + assert zha_device.available is False + + +@patch( + "homeassistant.components.zha.core.cluster_handlers.general.BasicClusterHandler.async_initialize", + new=mock.AsyncMock(), +) +async def test_check_available_no_basic_cluster_handler( + hass: HomeAssistant, + device_without_basic_cluster_handler, + zha_device_restored, + caplog: pytest.LogCaptureFixture, +) -> None: + """Check device availability for a device without basic cluster.""" + caplog.set_level(logging.DEBUG, logger="homeassistant.components.zha") + + zha_device = await zha_device_restored(device_without_basic_cluster_handler) + await async_enable_traffic(hass, [zha_device]) + + assert zha_device.available is True + + device_without_basic_cluster_handler.last_seen = ( + time.time() - zha_device.consider_unavailable_time - 2 + ) + + assert "does not have a mandatory basic cluster" not in caplog.text + _send_time_changed(hass, 91) + await hass.async_block_till_done() + assert zha_device.available is False + assert "does not have a mandatory basic cluster" in caplog.text + + +async def test_ota_sw_version( + hass: HomeAssistant, device_registry: dr.DeviceRegistry, ota_zha_device +) -> None: + """Test device entry gets sw_version updated via OTA cluster handler.""" + + ota_ch = ota_zha_device._endpoints[1].client_cluster_handlers["1:0x0019"] + entry = device_registry.async_get(ota_zha_device.device_id) + assert entry.sw_version is None + + cluster = ota_ch.cluster + hdr = make_zcl_header(1, global_command=False) + sw_version = 0x2345 + cluster.handle_message(hdr, [1, 2, 3, sw_version, None]) + await hass.async_block_till_done() + entry = device_registry.async_get(ota_zha_device.device_id) + assert int(entry.sw_version, base=16) == sw_version + + +@pytest.mark.parametrize( + ("device", "last_seen_delta", "is_available"), + [ + ("zigpy_device", 0, True), + ( + "zigpy_device", + CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS + 2, + True, + ), + ( + "zigpy_device", + CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY - 2, + True, + ), + ( + "zigpy_device", + CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY + 2, + False, + ), + ("zigpy_device_mains", 0, True), + ( + "zigpy_device_mains", + CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS - 2, + True, + ), + ( + "zigpy_device_mains", + CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS + 2, + False, + ), + ( + "zigpy_device_mains", + CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY - 2, + False, + ), + ( + "zigpy_device_mains", + CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY + 2, + False, + ), + ], +) +async def test_device_restore_availability( + hass: HomeAssistant, + request: pytest.FixtureRequest, + device, + last_seen_delta, + is_available, + zha_device_restored, +) -> None: + """Test initial availability for restored devices.""" + + zigpy_device = request.getfixturevalue(device)() + zha_device = await zha_device_restored( + zigpy_device, last_seen=time.time() - last_seen_delta + ) + entity_id = "switch.fakemanufacturer_fakemodel_switch" + + await hass.async_block_till_done() + # ensure the switch entity was created + assert hass.states.get(entity_id).state is not None + assert zha_device.available is is_available + if is_available: + assert hass.states.get(entity_id).state == STATE_OFF + else: + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + +async def test_device_is_active_coordinator( + hass: HomeAssistant, zha_device_joined, zigpy_device +) -> None: + """Test that the current coordinator is uniquely detected.""" + + current_coord_dev = zigpy_device(ieee="aa:bb:cc:dd:ee:ff:00:11", nwk=0x0000) + current_coord_dev.node_desc = current_coord_dev.node_desc.replace( + logical_type=zdo_t.LogicalType.Coordinator + ) + + old_coord_dev = zigpy_device(ieee="aa:bb:cc:dd:ee:ff:00:12", nwk=0x0000) + old_coord_dev.node_desc = old_coord_dev.node_desc.replace( + logical_type=zdo_t.LogicalType.Coordinator + ) + + # The two coordinators have different IEEE addresses + assert current_coord_dev.ieee != old_coord_dev.ieee + + current_coordinator = await zha_device_joined(current_coord_dev) + stale_coordinator = await zha_device_joined(old_coord_dev) + + # Ensure the current ApplicationController's IEEE matches our coordinator's + current_coordinator.gateway.application_controller.state.node_info.ieee = ( + current_coord_dev.ieee + ) + + assert current_coordinator.is_active_coordinator + assert not stale_coordinator.is_active_coordinator diff --git a/tests/components/zha/test_device_action.py b/tests/components/zha/test_device_action.py index 8bee821654d..13e9d789191 100644 --- a/tests/components/zha/test_device_action.py +++ b/tests/components/zha/test_device_action.py @@ -1,23 +1,23 @@ """The test for ZHA device automation actions.""" -from unittest.mock import patch +from unittest.mock import call, patch import pytest from pytest_unordered import unordered -from zigpy.profiles import zha +from zhaquirks.inovelli.VZM31SN import InovelliVZM31SNv11 +import zigpy.profiles.zha from zigpy.zcl.clusters import general, security import zigpy.zcl.foundation as zcl_f from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.zha import DOMAIN -from homeassistant.components.zha.helpers import get_zha_gateway from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE from tests.common import async_get_device_automations, async_mock_service @@ -52,37 +52,66 @@ def required_platforms_only(): yield -async def test_get_actions( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - setup_zha, - zigpy_device_mock, -) -> None: - """Test we get the expected actions from a ZHA device.""" +@pytest.fixture +async def device_ias(hass, zigpy_device_mock, zha_device_joined_restored): + """IAS device fixture.""" - await setup_zha() - gateway = get_zha_gateway(hass) + clusters = [general.Basic, security.IasZone, security.IasWd] + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [c.cluster_id for c in clusters], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + } + }, + ) + + zha_device = await zha_device_joined_restored(zigpy_device) + zha_device.update_available(True) + await hass.async_block_till_done() + return zigpy_device, zha_device + + +@pytest.fixture +async def device_inovelli(hass, zigpy_device_mock, zha_device_joined): + """Inovelli device fixture.""" zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [ general.Basic.cluster_id, - security.IasZone.cluster_id, - security.IasWd.cluster_id, + general.Identify.cluster_id, + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + 0xFC31, ], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], - SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, - SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.DIMMABLE_LIGHT, } - } + }, + ieee="00:1d:8f:08:0c:90:69:6b", + manufacturer="Inovelli", + model="VZM31-SN", + quirk=InovelliVZM31SNv11, ) - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - ieee_address = str(zigpy_device.ieee) + zha_device = await zha_device_joined(zigpy_device) + zha_device.update_available(True) + await hass.async_block_till_done() + return zigpy_device, zha_device + + +async def test_get_actions( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + device_ias, +) -> None: + """Test we get the expected actions from a ZHA device.""" + + ieee_address = str(device_ias[0].ieee) reg_device = device_registry.async_get_device(identifiers={(DOMAIN, ieee_address)}) siren_level_select = entity_registry.async_get( @@ -139,40 +168,112 @@ async def test_get_actions( assert actions == unordered(expected_actions) -async def test_action( +async def test_get_inovelli_actions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - setup_zha, - zigpy_device_mock, + entity_registry: er.EntityRegistry, + device_inovelli, +) -> None: + """Test we get the expected actions from a ZHA device.""" + + inovelli_ieee_address = str(device_inovelli[0].ieee) + inovelli_reg_device = device_registry.async_get_device( + identifiers={(DOMAIN, inovelli_ieee_address)} + ) + inovelli_button = entity_registry.async_get("button.inovelli_vzm31_sn_identify") + inovelli_light = entity_registry.async_get("light.inovelli_vzm31_sn_light") + + actions = await async_get_device_automations( + hass, DeviceAutomationType.ACTION, inovelli_reg_device.id + ) + + expected_actions = [ + { + "device_id": inovelli_reg_device.id, + "domain": DOMAIN, + "metadata": {}, + "type": "issue_all_led_effect", + }, + { + "device_id": inovelli_reg_device.id, + "domain": DOMAIN, + "metadata": {}, + "type": "issue_individual_led_effect", + }, + { + "device_id": inovelli_reg_device.id, + "domain": Platform.BUTTON, + "entity_id": inovelli_button.id, + "metadata": {"secondary": True}, + "type": "press", + }, + { + "device_id": inovelli_reg_device.id, + "domain": Platform.LIGHT, + "entity_id": inovelli_light.id, + "metadata": {"secondary": False}, + "type": "turn_off", + }, + { + "device_id": inovelli_reg_device.id, + "domain": Platform.LIGHT, + "entity_id": inovelli_light.id, + "metadata": {"secondary": False}, + "type": "turn_on", + }, + { + "device_id": inovelli_reg_device.id, + "domain": Platform.LIGHT, + "entity_id": inovelli_light.id, + "metadata": {"secondary": False}, + "type": "toggle", + }, + { + "device_id": inovelli_reg_device.id, + "domain": Platform.LIGHT, + "entity_id": inovelli_light.id, + "metadata": {"secondary": False}, + "type": "brightness_increase", + }, + { + "device_id": inovelli_reg_device.id, + "domain": Platform.LIGHT, + "entity_id": inovelli_light.id, + "metadata": {"secondary": False}, + "type": "brightness_decrease", + }, + { + "device_id": inovelli_reg_device.id, + "domain": Platform.LIGHT, + "entity_id": inovelli_light.id, + "metadata": {"secondary": False}, + "type": "flash", + }, + ] + + assert actions == unordered(expected_actions) + + +async def test_action( + hass: HomeAssistant, device_registry: dr.DeviceRegistry, device_ias, device_inovelli ) -> None: """Test for executing a ZHA device action.""" - await setup_zha() - gateway = get_zha_gateway(hass) + zigpy_device, zha_device = device_ias + inovelli_zigpy_device, inovelli_zha_device = device_inovelli - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - security.IasZone.cluster_id, - security.IasWd.cluster_id, - ], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - ) zigpy_device.device_automation_triggers = { (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE} } - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - ieee_address = str(zigpy_device.ieee) + ieee_address = str(zha_device.ieee) + inovelli_ieee_address = str(inovelli_zha_device.ieee) reg_device = device_registry.async_get_device(identifiers={(DOMAIN, ieee_address)}) + inovelli_reg_device = device_registry.async_get_device( + identifiers={(DOMAIN, inovelli_ieee_address)} + ) + + cluster = inovelli_zigpy_device.endpoints[1].in_clusters[0xFC31] with patch( "zigpy.zcl.Cluster.request", @@ -197,6 +298,25 @@ async def test_action( "device_id": reg_device.id, "type": "warn", }, + { + "domain": DOMAIN, + "device_id": inovelli_reg_device.id, + "type": "issue_all_led_effect", + "effect_type": "Open_Close", + "duration": 5, + "level": 10, + "color": 41, + }, + { + "domain": DOMAIN, + "device_id": inovelli_reg_device.id, + "type": "issue_individual_led_effect", + "effect_type": "Falling", + "led_number": 1, + "duration": 5, + "level": 10, + "color": 41, + }, ], } ] @@ -206,11 +326,7 @@ async def test_action( await hass.async_block_till_done() calls = async_mock_service(hass, DOMAIN, "warning_device_warn") - cluster_handler = ( - gateway.get_device(zigpy_device.ieee) - .endpoints[1] - .client_cluster_handlers["1:0x0006"] - ) + cluster_handler = zha_device.endpoints[1].client_cluster_handlers["1:0x0006"] cluster_handler.zha_send_event(COMMAND_SINGLE, []) await hass.async_block_till_done() @@ -219,41 +335,44 @@ async def test_action( assert calls[0].service == "warning_device_warn" assert calls[0].data["ieee"] == ieee_address + assert len(cluster.request.mock_calls) == 2 + assert ( + call( + False, + cluster.commands_by_name["led_effect"].id, + cluster.commands_by_name["led_effect"].schema, + 6, + 41, + 10, + 5, + expect_reply=False, + manufacturer=4151, + tsn=None, + ) + in cluster.request.call_args_list + ) + assert ( + call( + False, + cluster.commands_by_name["individual_led_effect"].id, + cluster.commands_by_name["individual_led_effect"].schema, + 1, + 6, + 41, + 10, + 5, + expect_reply=False, + manufacturer=4151, + tsn=None, + ) + in cluster.request.call_args_list + ) -async def test_invalid_zha_event_type( - hass: HomeAssistant, setup_zha, zigpy_device_mock -) -> None: + +async def test_invalid_zha_event_type(hass: HomeAssistant, device_ias) -> None: """Test that unexpected types are not passed to `zha_send_event`.""" - await setup_zha() - gateway = get_zha_gateway(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - security.IasZone.cluster_id, - security.IasWd.cluster_id, - ], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - ) - zigpy_device.device_automation_triggers = { - (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE} - } - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - cluster_handler = ( - gateway.get_device(zigpy_device.ieee) - .endpoints[1] - .client_cluster_handlers["1:0x0006"] - ) + zigpy_device, zha_device = device_ias + cluster_handler = zha_device._endpoints[1].client_cluster_handlers["1:0x0006"] # `zha_send_event` accepts only zigpy responses, lists, and dicts with pytest.raises(TypeError): diff --git a/tests/components/zha/test_device_tracker.py b/tests/components/zha/test_device_tracker.py index ae96de44f17..64360c8b2ff 100644 --- a/tests/components/zha/test_device_tracker.py +++ b/tests/components/zha/test_device_tracker.py @@ -5,22 +5,23 @@ import time from unittest.mock import patch import pytest -from zha.application.registries import SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE -from zigpy.profiles import zha +import zigpy.profiles.zha from zigpy.zcl.clusters import general from homeassistant.components.device_tracker import SourceType -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, +from homeassistant.components.zha.core.registries import ( + SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, ) -from homeassistant.const import STATE_HOME, STATE_NOT_HOME, Platform +from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from .common import find_entity_id, send_attributes_report +from .common import ( + async_enable_traffic, + async_test_rejoin, + find_entity_id, + send_attributes_report, +) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from tests.common import async_fire_time_changed @@ -43,41 +44,49 @@ def device_tracker_platforms_only(): yield +@pytest.fixture +def zigpy_device_dt(zigpy_device_mock): + """Device tracker zigpy device.""" + endpoints = { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.PowerConfiguration.cluster_id, + general.Identify.cluster_id, + general.PollControl.cluster_id, + general.BinaryInput.cluster_id, + ], + SIG_EP_OUTPUT: [general.Identify.cluster_id, general.Ota.cluster_id], + SIG_EP_TYPE: SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + } + } + return zigpy_device_mock(endpoints) + + async def test_device_tracker( - hass: HomeAssistant, setup_zha, zigpy_device_mock + hass: HomeAssistant, zha_device_joined_restored, zigpy_device_dt ) -> None: """Test ZHA device tracker platform.""" - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.PowerConfiguration.cluster_id, - general.Identify.cluster_id, - general.PollControl.cluster_id, - general.BinaryInput.cluster_id, - ], - SIG_EP_OUTPUT: [general.Identify.cluster_id, general.Ota.cluster_id], - SIG_EP_TYPE: SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - ) - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.DEVICE_TRACKER, zha_device_proxy, hass) - cluster = zigpy_device.endpoints[1].power + zha_device = await zha_device_joined_restored(zigpy_device_dt) + cluster = zigpy_device_dt.endpoints.get(1).power + entity_id = find_entity_id(Platform.DEVICE_TRACKER, zha_device, hass) assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_NOT_HOME + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the device tracker was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + zigpy_device_dt.last_seen = time.time() - 120 + next_update = dt_util.utcnow() + timedelta(seconds=30) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + # test that the state has changed from unavailable to not home assert hass.states.get(entity_id).state == STATE_NOT_HOME @@ -86,7 +95,7 @@ async def test_device_tracker( hass, cluster, {0x0000: 0, 0x0020: 23, 0x0021: 200, 0x0001: 2} ) - zigpy_device.last_seen = time.time() + 10 + zigpy_device_dt.last_seen = time.time() + 10 next_update = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() @@ -98,3 +107,7 @@ async def test_device_tracker( assert entity.is_connected is True assert entity.source_type == SourceType.ROUTER assert entity.battery_level == 100 + + # test adding device tracker to the network and HA + await async_test_rejoin(hass, zigpy_device_dt, [cluster], (2,)) + assert hass.states.get(entity_id).state == STATE_HOME diff --git a/tests/components/zha/test_device_trigger.py b/tests/components/zha/test_device_trigger.py index 09b2d155547..b43392af61a 100644 --- a/tests/components/zha/test_device_trigger.py +++ b/tests/components/zha/test_device_trigger.py @@ -1,26 +1,35 @@ """ZHA device automation trigger tests.""" +from datetime import timedelta +import time from unittest.mock import patch import pytest -from zha.application.const import ATTR_ENDPOINT_ID from zigpy.application import ControllerApplication -from zigpy.device import Device as ZigpyDevice import zigpy.profiles.zha -import zigpy.types +from zigpy.zcl.clusters import general from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, ) -from homeassistant.components.zha.helpers import get_zha_gateway +from homeassistant.components.zha.core.const import ATTR_ENDPOINT_ID from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component +import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, async_get_device_automations +from .common import async_enable_traffic +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + async_get_device_automations, + async_mock_service, +) @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -42,6 +51,16 @@ LONG_PRESS = "remote_button_long_press" LONG_RELEASE = "remote_button_long_release" +SWITCH_SIGNATURE = { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + } +} + + @pytest.fixture(autouse=True) def sensor_platforms_only(): """Only set up the sensor platform and required base platforms to speed up tests.""" @@ -56,21 +75,31 @@ def _same_lists(list_a, list_b): return all(item in list_b for item in list_a) +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + +@pytest.fixture +async def mock_devices(hass, zigpy_device_mock, zha_device_joined_restored): + """IAS device fixture.""" + + zigpy_device = zigpy_device_mock(SWITCH_SIGNATURE) + + zha_device = await zha_device_joined_restored(zigpy_device) + zha_device.update_available(True) + await hass.async_block_till_done() + return zigpy_device, zha_device + + async def test_triggers( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - setup_zha, + hass: HomeAssistant, device_registry: dr.DeviceRegistry, mock_devices ) -> None: """Test ZHA device triggers.""" - await setup_zha() - gateway = get_zha_gateway(hass) + zigpy_device, zha_device = mock_devices - zigpy_device = ZigpyDevice( - application=gateway.application_controller, - ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), - nwk=0x1234, - ) zigpy_device.device_automation_triggers = { (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, @@ -79,13 +108,9 @@ async def test_triggers( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - zha_device = gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zha_device.device) - await hass.async_block_till_done(wait_background_tasks=True) + ieee_address = str(zha_device.ieee) - reg_device = device_registry.async_get_device( - identifiers={("zha", str(zha_device.ieee))} - ) + reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) triggers = await async_get_device_automations( hass, DeviceAutomationType.TRIGGER, reg_device.id @@ -145,26 +170,14 @@ async def test_triggers( async def test_no_triggers( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, setup_zha + hass: HomeAssistant, device_registry: dr.DeviceRegistry, mock_devices ) -> None: """Test ZHA device with no triggers.""" - await setup_zha() - gateway = get_zha_gateway(hass) - zigpy_device = ZigpyDevice( - application=gateway.application_controller, - ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), - nwk=0x1234, - ) - zigpy_device.device_automation_triggers = {} + _, zha_device = mock_devices + ieee_address = str(zha_device.ieee) - zha_device = gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zha_device.device) - await hass.async_block_till_done(wait_background_tasks=True) - - reg_device = device_registry.async_get_device( - identifiers={("zha", str(zha_device.ieee))} - ) + reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) triggers = await async_get_device_automations( hass, DeviceAutomationType.TRIGGER, reg_device.id @@ -184,21 +197,12 @@ async def test_no_triggers( async def test_if_fires_on_event( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], - setup_zha, + mock_devices, + calls: list[ServiceCall], ) -> None: """Test for remote triggers firing.""" - await setup_zha() - gateway = get_zha_gateway(hass) - - zigpy_device = ZigpyDevice( - application=gateway.application_controller, - ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), - nwk=0x1234, - ) - ep = zigpy_device.add_endpoint(1) - ep.add_output_cluster(0x0006) + zigpy_device, zha_device = mock_devices zigpy_device.device_automation_triggers = { (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, @@ -208,13 +212,8 @@ async def test_if_fires_on_event( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - zha_device = gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zha_device.device) - await hass.async_block_till_done(wait_background_tasks=True) - - reg_device = device_registry.async_get_device( - identifiers={("zha", str(zha_device.ieee))} - ) + ieee_address = str(zha_device.ieee) + reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) assert await async_setup_component( hass, @@ -240,46 +239,35 @@ async def test_if_fires_on_event( await hass.async_block_till_done() - zha_device.emit_zha_event( - { - "unique_id": f"{zha_device.ieee}:1:0x0006", - "endpoint_id": 1, - "cluster_id": 0x0006, - "command": COMMAND_SINGLE, - "args": [], - "params": {}, - }, - ) + cluster_handler = zha_device.endpoints[1].client_cluster_handlers["1:0x0006"] + cluster_handler.zha_send_event(COMMAND_SINGLE, []) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["message"] == "service called" + assert len(calls) == 1 + assert calls[0].data["message"] == "service called" async def test_device_offline_fires( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - service_calls: list[ServiceCall], - setup_zha, + zigpy_device_mock, + zha_device_restored, + calls: list[ServiceCall], ) -> None: """Test for device offline triggers firing.""" - await setup_zha() - gateway = get_zha_gateway(hass) - - zigpy_device = ZigpyDevice( - application=gateway.application_controller, - ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), - nwk=0x1234, + zigpy_device = zigpy_device_mock( + { + 1: { + "in_clusters": [general.Basic.cluster_id], + "out_clusters": [general.OnOff.cluster_id], + "device_type": 0, + } + } ) - zha_device = gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zha_device.device) - await hass.async_block_till_done(wait_background_tasks=True) - - reg_device = device_registry.async_get_device( - identifiers={("zha", str(zha_device.ieee))} - ) + zha_device = await zha_device_restored(zigpy_device, last_seen=time.time()) + await async_enable_traffic(hass, [zha_device]) + await hass.async_block_till_done() assert await async_setup_component( hass, @@ -288,7 +276,7 @@ async def test_device_offline_fires( automation.DOMAIN: [ { "trigger": { - "device_id": reg_device.id, + "device_id": zha_device.device_id, "domain": "zha", "platform": "device", "type": "device_offline", @@ -303,39 +291,44 @@ async def test_device_offline_fires( }, ) + await hass.async_block_till_done() assert zha_device.available is True - zha_device.available = False - zha_device.emit_zha_event({"device_event_type": "device_offline"}) + + zigpy_device.last_seen = time.time() - zha_device.consider_unavailable_time - 2 + + # there are 3 checkins to perform before marking the device unavailable + future = dt_util.utcnow() + timedelta(seconds=90) + async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["message"] == "service called" + future = dt_util.utcnow() + timedelta(seconds=90) + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + future = dt_util.utcnow() + timedelta( + seconds=zha_device.consider_unavailable_time + 100 + ) + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert zha_device.available is False + assert len(calls) == 1 + assert calls[0].data["message"] == "service called" async def test_exception_no_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + mock_devices, + calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, - setup_zha, ) -> None: """Test for exception when validating device triggers.""" - await setup_zha() - gateway = get_zha_gateway(hass) + _, zha_device = mock_devices - zigpy_device = ZigpyDevice( - application=gateway.application_controller, - ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), - nwk=0x1234, - ) - - zha_device = gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zha_device.device) - await hass.async_block_till_done(wait_background_tasks=True) - - reg_device = device_registry.async_get_device( - identifiers={("zha", str(zha_device.ieee))} - ) + ieee_address = str(zha_device.ieee) + reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) await async_setup_component( hass, @@ -368,19 +361,14 @@ async def test_exception_no_triggers( async def test_exception_bad_trigger( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + mock_devices, + calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, - setup_zha, ) -> None: """Test for exception when validating device triggers.""" - await setup_zha() - gateway = get_zha_gateway(hass) + zigpy_device, zha_device = mock_devices - zigpy_device = ZigpyDevice( - application=gateway.application_controller, - ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), - nwk=0x1234, - ) zigpy_device.device_automation_triggers = { (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, @@ -389,13 +377,8 @@ async def test_exception_bad_trigger( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - zha_device = gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zha_device.device) - await hass.async_block_till_done(wait_background_tasks=True) - - reg_device = device_registry.async_get_device( - identifiers={("zha", str(zha_device.ieee))} - ) + ieee_address = str(zha_device.ieee) + reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) await async_setup_component( hass, @@ -429,37 +412,23 @@ async def test_validate_trigger_config_missing_info( hass: HomeAssistant, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, + zigpy_device_mock, + mock_zigpy_connect: ControllerApplication, + zha_device_joined, caplog: pytest.LogCaptureFixture, - setup_zha, ) -> None: """Test device triggers referring to a missing device.""" - await setup_zha() - gateway = get_zha_gateway(hass) - - zigpy_device = ZigpyDevice( - application=gateway.application_controller, - ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), - nwk=0x1234, - ) - zigpy_device.device_automation_triggers = { - (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, - (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, - (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE}, - (LONG_PRESS, LONG_PRESS): {COMMAND: COMMAND_HOLD}, - (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, - } - - zha_device = gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zha_device.device) - await hass.async_block_till_done(wait_background_tasks=True) + # Join a device + switch = zigpy_device_mock(SWITCH_SIGNATURE) + await zha_device_joined(switch) # After we unload the config entry, trigger info was not cached on startup, nor can # it be pulled from the current device, making it impossible to validate triggers await hass.config_entries.async_unload(config_entry.entry_id) reg_device = device_registry.async_get_device( - identifiers={("zha", str(zha_device.ieee))} + identifiers={("zha", str(switch.ieee))} ) assert await async_setup_component( @@ -496,32 +465,16 @@ async def test_validate_trigger_config_unloaded_bad_info( hass: HomeAssistant, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, + zigpy_device_mock, + mock_zigpy_connect: ControllerApplication, + zha_device_joined, caplog: pytest.LogCaptureFixture, - zigpy_app_controller: ControllerApplication, - setup_zha, ) -> None: """Test device triggers referring to a missing device.""" - await setup_zha() - gateway = get_zha_gateway(hass) - - zigpy_device = ZigpyDevice( - application=gateway.application_controller, - ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), - nwk=0x1234, - ) - zigpy_device.device_automation_triggers = { - (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, - (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, - (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE}, - (LONG_PRESS, LONG_PRESS): {COMMAND: COMMAND_HOLD}, - (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, - } - - zigpy_app_controller.devices[zigpy_device.ieee] = zigpy_device - zha_device = gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zha_device.device) - await hass.async_block_till_done(wait_background_tasks=True) + # Join a device + switch = zigpy_device_mock(SWITCH_SIGNATURE) + await zha_device_joined(switch) # After we unload the config entry, trigger info was not cached on startup, nor can # it be pulled from the current device, making it impossible to validate triggers @@ -529,12 +482,11 @@ async def test_validate_trigger_config_unloaded_bad_info( # Reload ZHA to persist the device info in the cache await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - + await hass.async_block_till_done() await hass.config_entries.async_unload(config_entry.entry_id) reg_device = device_registry.async_get_device( - identifiers={("zha", str(zha_device.ieee))} + identifiers={("zha", str(switch.ieee))} ) assert await async_setup_component( diff --git a/tests/components/zha/test_diagnostics.py b/tests/components/zha/test_diagnostics.py index 0e78a9a1b5b..4bb30a5fc8c 100644 --- a/tests/components/zha/test_diagnostics.py +++ b/tests/components/zha/test_diagnostics.py @@ -3,17 +3,13 @@ from unittest.mock import patch import pytest -from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from zigpy.profiles import zha from zigpy.zcl.clusters import security -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, -) +from homeassistant.components.diagnostics import REDACTED +from homeassistant.components.zha.core.device import ZHADevice +from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.components.zha.diagnostics import KEYS_TO_REDACT from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -27,6 +23,14 @@ from tests.components.diagnostics import ( ) from tests.typing import ClientSessionGenerator +CONFIG_ENTRY_DIAGNOSTICS_KEYS = [ + "config", + "config_entry", + "application_state", + "versions", + "devices", +] + @pytest.fixture(autouse=True) def required_platforms_only(): @@ -37,47 +41,64 @@ def required_platforms_only(): yield +@pytest.fixture +def zigpy_device(zigpy_device_mock): + """Device tracker zigpy device.""" + endpoints = { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id, security.IasZone.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + return zigpy_device_mock( + endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00" + ) + + async def test_diagnostics_for_config_entry( hass: HomeAssistant, hass_client: ClientSessionGenerator, config_entry: MockConfigEntry, - setup_zha, - zigpy_device_mock, - snapshot: SnapshotAssertion, + zha_device_joined, + zigpy_device, ) -> None: """Test diagnostics for config entry.""" + await zha_device_joined(zigpy_device) - await setup_zha() gateway = get_zha_gateway(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [security.IasAce.cluster_id, security.IasZone.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee="01:2d:6f:00:0a:90:69:e8", - node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", - ) - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - scan = {c: c for c in range(11, 26 + 1)} - gateway.application_controller.energy_scan.side_effect = None - gateway.application_controller.energy_scan.return_value = scan - diagnostics_data = await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) + with patch.object(gateway.application_controller, "energy_scan", return_value=scan): + diagnostics_data = await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) - assert diagnostics_data == snapshot( - exclude=props("created_at", "modified_at", "entry_id", "versions") - ) + for key in CONFIG_ENTRY_DIAGNOSTICS_KEYS: + assert key in diagnostics_data + assert diagnostics_data[key] is not None + + # Energy scan results are presented as a percentage. JSON object keys also must be + # strings, not integers. + assert diagnostics_data["energy_scan"] == { + str(k): 100 * v / 255 for k, v in scan.items() + } + + assert isinstance(diagnostics_data["devices"], list) + assert len(diagnostics_data["devices"]) == 2 + assert diagnostics_data["devices"] == [ + { + "manufacturer": "Coordinator Manufacturer", + "model": "Coordinator Model", + "logical_type": "Coordinator", + }, + { + "manufacturer": "FakeManufacturer", + "model": "FakeModel", + "logical_type": "EndDevice", + }, + ] async def test_diagnostics_for_device( @@ -85,41 +106,19 @@ async def test_diagnostics_for_device( hass_client: ClientSessionGenerator, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, - setup_zha, - zigpy_device_mock, - snapshot: SnapshotAssertion, + zha_device_joined, + zigpy_device, ) -> None: """Test diagnostics for device.""" - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [security.IasAce.cluster_id, security.IasZone.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee="01:2d:6f:00:0a:90:69:e8", - node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", - ) - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + zha_device: ZHADevice = await zha_device_joined(zigpy_device) # add unknown unsupported attribute with id and name - zha_device_proxy.device.device.endpoints[1].in_clusters[ + zha_device.device.endpoints[1].in_clusters[ security.IasAce.cluster_id ].unsupported_attributes.update({0x1000, "unknown_attribute_name"}) # add known unsupported attributes with id and name - zha_device_proxy.device.device.endpoints[1].in_clusters[ + zha_device.device.endpoints[1].in_clusters[ security.IasZone.cluster_id ].unsupported_attributes.update( { @@ -129,11 +128,17 @@ async def test_diagnostics_for_device( ) device = device_registry.async_get_device( - identifiers={("zha", str(zha_device_proxy.device.ieee))} + identifiers={("zha", str(zha_device.ieee))} ) assert device diagnostics_data = await get_diagnostics_for_device( hass, hass_client, config_entry, device ) - - assert diagnostics_data == snapshot(exclude=props("device_reg_id", "last_seen")) + assert diagnostics_data + device_info: dict = zha_device.zha_device_info + for key in device_info: + assert key in diagnostics_data + if key not in KEYS_TO_REDACT: + assert key in diagnostics_data + else: + assert diagnostics_data[key] == REDACTED diff --git a/tests/components/zha/test_discover.py b/tests/components/zha/test_discover.py new file mode 100644 index 00000000000..c59acc3395f --- /dev/null +++ b/tests/components/zha/test_discover.py @@ -0,0 +1,1100 @@ +"""Test ZHA device discovery.""" + +from collections.abc import Callable +import enum +import itertools +import re +from typing import Any +from unittest import mock +from unittest.mock import AsyncMock, Mock, patch + +import pytest +from zhaquirks.ikea import PowerConfig1CRCluster, ScenesCluster +from zhaquirks.xiaomi import ( + BasicCluster, + LocalIlluminanceMeasurementCluster, + XiaomiPowerConfigurationPercent, +) +from zhaquirks.xiaomi.aqara.driver_curtain_e1 import ( + WindowCoveringE1, + XiaomiAqaraDriverE1, +) +from zigpy.const import SIG_ENDPOINTS, SIG_MANUFACTURER, SIG_MODEL, SIG_NODE_DESC +import zigpy.profiles.zha +import zigpy.quirks +from zigpy.quirks.v2 import ( + BinarySensorMetadata, + EntityMetadata, + EntityType, + NumberMetadata, + QuirksV2RegistryEntry, + ZCLCommandButtonMetadata, + ZCLSensorMetadata, + add_to_registry_v2, +) +from zigpy.quirks.v2.homeassistant import UnitOfTime +import zigpy.types +from zigpy.zcl import ClusterType +import zigpy.zcl.clusters.closures +import zigpy.zcl.clusters.general +import zigpy.zcl.clusters.security +import zigpy.zcl.foundation as zcl_f + +from homeassistant.components.zha.core import cluster_handlers +import homeassistant.components.zha.core.const as zha_const +from homeassistant.components.zha.core.device import ZHADevice +import homeassistant.components.zha.core.discovery as disc +from homeassistant.components.zha.core.endpoint import Endpoint +from homeassistant.components.zha.core.helpers import get_zha_gateway +import homeassistant.components.zha.core.registries as zha_regs +from homeassistant.const import STATE_OFF, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_platform import EntityPlatform +from homeassistant.util.json import load_json + +from .common import find_entity_id, update_attribute_cache +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from .zha_devices_list import ( + DEV_SIG_ATTRIBUTES, + DEV_SIG_CLUSTER_HANDLERS, + DEV_SIG_ENT_MAP, + DEV_SIG_ENT_MAP_CLASS, + DEV_SIG_ENT_MAP_ID, + DEV_SIG_EVT_CLUSTER_HANDLERS, + DEVICES, +) + +NO_TAIL_ID = re.compile("_\\d$") +UNIQUE_ID_HD = re.compile(r"^(([\da-fA-F]{2}:){7}[\da-fA-F]{2}-\d{1,3})", re.X) + +IGNORE_SUFFIXES = [ + zigpy.zcl.clusters.general.OnOff.StartUpOnOff.__name__, + "on_off_transition_time", + "on_level", + "on_transition_time", + "off_transition_time", + "default_move_rate", + "start_up_current_level", + "counter", +] + + +def contains_ignored_suffix(unique_id: str) -> bool: + """Return true if the unique_id ends with an ignored suffix.""" + return any(suffix.lower() in unique_id.lower() for suffix in IGNORE_SUFFIXES) + + +@patch( + "zigpy.zcl.clusters.general.Identify.request", + new=AsyncMock(return_value=[mock.sentinel.data, zcl_f.Status.SUCCESS]), +) +# We do this here because we are testing ZHA discovery logic. Point being we want to ensure that +# all discovered entities are dispatched for creation. In order to test this we need the entities +# added to HA. So we ensure that they are all enabled even though they won't necessarily be in reality +# at runtime +@patch( + "homeassistant.components.zha.entity.ZhaEntity.entity_registry_enabled_default", + new=Mock(return_value=True), +) +@pytest.mark.parametrize("device", DEVICES) +async def test_devices( + device, + hass_disable_services, + zigpy_device_mock, + zha_device_joined_restored, +) -> None: + """Test device discovery.""" + zigpy_device = zigpy_device_mock( + endpoints=device[SIG_ENDPOINTS], + ieee="00:11:22:33:44:55:66:77", + manufacturer=device[SIG_MANUFACTURER], + model=device[SIG_MODEL], + node_descriptor=device[SIG_NODE_DESC], + attributes=device.get(DEV_SIG_ATTRIBUTES), + patch_cluster=False, + ) + + cluster_identify = _get_first_identify_cluster(zigpy_device) + if cluster_identify: + cluster_identify.request.reset_mock() + + with patch( + "homeassistant.helpers.entity_platform.EntityPlatform._async_schedule_add_entities_for_entry", + side_effect=EntityPlatform._async_schedule_add_entities_for_entry, + autospec=True, + ) as mock_add_entities: + zha_dev = await zha_device_joined_restored(zigpy_device) + await hass_disable_services.async_block_till_done() + + if cluster_identify: + # We only identify on join + should_identify = ( + zha_device_joined_restored.name == "zha_device_joined" + and not zigpy_device.skip_configuration + ) + + if should_identify: + assert cluster_identify.request.mock_calls == [ + mock.call( + False, + cluster_identify.commands_by_name["trigger_effect"].id, + cluster_identify.commands_by_name["trigger_effect"].schema, + effect_id=zigpy.zcl.clusters.general.Identify.EffectIdentifier.Okay, + effect_variant=( + zigpy.zcl.clusters.general.Identify.EffectVariant.Default + ), + expect_reply=True, + manufacturer=None, + tsn=None, + ) + ] + else: + assert cluster_identify.request.mock_calls == [] + + event_cluster_handlers = { + ch.id + for endpoint in zha_dev._endpoints.values() + for ch in endpoint.client_cluster_handlers.values() + } + assert event_cluster_handlers == set(device[DEV_SIG_EVT_CLUSTER_HANDLERS]) + + # Keep track of unhandled entities: they should always be ones we explicitly ignore + created_entities = { + entity.entity_id: entity + for mock_call in mock_add_entities.mock_calls + for entity in mock_call.args[1] + } + unhandled_entities = set(created_entities.keys()) + entity_registry = er.async_get(hass_disable_services) + + for (platform, unique_id), ent_info in device[DEV_SIG_ENT_MAP].items(): + no_tail_id = NO_TAIL_ID.sub("", ent_info[DEV_SIG_ENT_MAP_ID]) + ha_entity_id = entity_registry.async_get_entity_id(platform, "zha", unique_id) + message1 = f"No entity found for platform[{platform}] unique_id[{unique_id}]" + message2 = f"no_tail_id[{no_tail_id}] with entity_id[{ha_entity_id}]" + assert ha_entity_id is not None, f"{message1} {message2}" + assert ha_entity_id.startswith(no_tail_id) + + entity = created_entities[ha_entity_id] + unhandled_entities.remove(ha_entity_id) + + assert entity.platform.domain == platform + assert type(entity).__name__ == ent_info[DEV_SIG_ENT_MAP_CLASS] + # unique_id used for discover is the same for "multi entities" + assert unique_id == entity.unique_id + assert {ch.name for ch in entity.cluster_handlers.values()} == set( + ent_info[DEV_SIG_CLUSTER_HANDLERS] + ) + + # All unhandled entities should be ones we explicitly ignore + for entity_id in unhandled_entities: + domain = entity_id.split(".")[0] + assert domain in zha_const.PLATFORMS + assert contains_ignored_suffix(entity_id) + + +def _get_first_identify_cluster(zigpy_device): + for endpoint in list(zigpy_device.endpoints.values())[1:]: + if hasattr(endpoint, "identify"): + return endpoint.identify + + +@mock.patch( + "homeassistant.components.zha.core.discovery.ProbeEndpoint.discover_by_device_type" +) +@mock.patch( + "homeassistant.components.zha.core.discovery.ProbeEndpoint.discover_by_cluster_id" +) +def test_discover_entities(m1, m2) -> None: + """Test discover endpoint class method.""" + endpoint = mock.MagicMock() + disc.PROBE.discover_entities(endpoint) + assert m1.call_count == 1 + assert m1.call_args[0][0] is endpoint + assert m2.call_count == 1 + assert m2.call_args[0][0] is endpoint + + +@pytest.mark.parametrize( + ("device_type", "platform", "hit"), + [ + (zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT, Platform.LIGHT, True), + (zigpy.profiles.zha.DeviceType.ON_OFF_BALLAST, Platform.SWITCH, True), + (zigpy.profiles.zha.DeviceType.SMART_PLUG, Platform.SWITCH, True), + (0xFFFF, None, False), + ], +) +def test_discover_by_device_type(device_type, platform, hit) -> None: + """Test entity discovery by device type.""" + + endpoint = mock.MagicMock(spec_set=Endpoint) + ep_mock = mock.PropertyMock() + ep_mock.return_value.profile_id = 0x0104 + ep_mock.return_value.device_type = device_type + type(endpoint).zigpy_endpoint = ep_mock + + get_entity_mock = mock.MagicMock( + return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) + ) + with mock.patch( + "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", + get_entity_mock, + ): + disc.PROBE.discover_by_device_type(endpoint) + if hit: + assert get_entity_mock.call_count == 1 + assert endpoint.claim_cluster_handlers.call_count == 1 + assert endpoint.claim_cluster_handlers.call_args[0][0] is mock.sentinel.claimed + assert endpoint.async_new_entity.call_count == 1 + assert endpoint.async_new_entity.call_args[0][0] == platform + assert endpoint.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls + + +def test_discover_by_device_type_override() -> None: + """Test entity discovery by device type overriding.""" + + endpoint = mock.MagicMock(spec_set=Endpoint) + ep_mock = mock.PropertyMock() + ep_mock.return_value.profile_id = 0x0104 + ep_mock.return_value.device_type = 0x0100 + type(endpoint).zigpy_endpoint = ep_mock + + overrides = {endpoint.unique_id: {"type": Platform.SWITCH}} + get_entity_mock = mock.MagicMock( + return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) + ) + with ( + mock.patch( + "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", + get_entity_mock, + ), + mock.patch.dict(disc.PROBE._device_configs, overrides, clear=True), + ): + disc.PROBE.discover_by_device_type(endpoint) + assert get_entity_mock.call_count == 1 + assert endpoint.claim_cluster_handlers.call_count == 1 + assert endpoint.claim_cluster_handlers.call_args[0][0] is mock.sentinel.claimed + assert endpoint.async_new_entity.call_count == 1 + assert endpoint.async_new_entity.call_args[0][0] == Platform.SWITCH + assert endpoint.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls + + +def test_discover_probe_single_cluster() -> None: + """Test entity discovery by single cluster.""" + + endpoint = mock.MagicMock(spec_set=Endpoint) + ep_mock = mock.PropertyMock() + ep_mock.return_value.profile_id = 0x0104 + ep_mock.return_value.device_type = 0x0100 + type(endpoint).zigpy_endpoint = ep_mock + + get_entity_mock = mock.MagicMock( + return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) + ) + cluster_handler_mock = mock.MagicMock(spec_set=cluster_handlers.ClusterHandler) + with mock.patch( + "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", + get_entity_mock, + ): + disc.PROBE.probe_single_cluster(Platform.SWITCH, cluster_handler_mock, endpoint) + + assert get_entity_mock.call_count == 1 + assert endpoint.claim_cluster_handlers.call_count == 1 + assert endpoint.claim_cluster_handlers.call_args[0][0] is mock.sentinel.claimed + assert endpoint.async_new_entity.call_count == 1 + assert endpoint.async_new_entity.call_args[0][0] == Platform.SWITCH + assert endpoint.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls + assert endpoint.async_new_entity.call_args[0][3] == mock.sentinel.claimed + + +@pytest.mark.parametrize("device_info", DEVICES) +async def test_discover_endpoint( + device_info: dict[str, Any], + zha_device_mock: Callable[..., ZHADevice], + hass: HomeAssistant, +) -> None: + """Test device discovery.""" + + with mock.patch( + "homeassistant.components.zha.core.endpoint.Endpoint.async_new_entity" + ) as new_ent: + device = zha_device_mock( + device_info[SIG_ENDPOINTS], + manufacturer=device_info[SIG_MANUFACTURER], + model=device_info[SIG_MODEL], + node_desc=device_info[SIG_NODE_DESC], + patch_cluster=True, + ) + + assert device_info[DEV_SIG_EVT_CLUSTER_HANDLERS] == sorted( + ch.id + for endpoint in device._endpoints.values() + for ch in endpoint.client_cluster_handlers.values() + ) + + # build a dict of entity_class -> (platform, unique_id, cluster_handlers) tuple + ha_ent_info = {} + for call in new_ent.call_args_list: + platform, entity_cls, unique_id, cluster_handlers = call[0] + if not contains_ignored_suffix(unique_id): + unique_id_head = UNIQUE_ID_HD.match(unique_id).group( + 0 + ) # ieee + endpoint_id + ha_ent_info[(unique_id_head, entity_cls.__name__)] = ( + platform, + unique_id, + cluster_handlers, + ) + + for platform_id, ent_info in device_info[DEV_SIG_ENT_MAP].items(): + platform, unique_id = platform_id + + test_ent_class = ent_info[DEV_SIG_ENT_MAP_CLASS] + test_unique_id_head = UNIQUE_ID_HD.match(unique_id).group(0) + assert (test_unique_id_head, test_ent_class) in ha_ent_info + + entity_platform, entity_unique_id, entity_cluster_handlers = ha_ent_info[ + (test_unique_id_head, test_ent_class) + ] + assert platform is entity_platform.value + # unique_id used for discover is the same for "multi entities" + assert unique_id.startswith(entity_unique_id) + assert {ch.name for ch in entity_cluster_handlers} == set( + ent_info[DEV_SIG_CLUSTER_HANDLERS] + ) + + device.async_cleanup_handles() + + +def _ch_mock(cluster): + """Return mock of a cluster_handler with a cluster.""" + cluster_handler = mock.MagicMock() + type(cluster_handler).cluster = mock.PropertyMock( + return_value=cluster(mock.MagicMock()) + ) + return cluster_handler + + +@mock.patch( + ( + "homeassistant.components.zha.core.discovery.ProbeEndpoint" + ".handle_on_off_output_cluster_exception" + ), + new=mock.MagicMock(), +) +@mock.patch( + "homeassistant.components.zha.core.discovery.ProbeEndpoint.probe_single_cluster" +) +def _test_single_input_cluster_device_class(probe_mock): + """Test SINGLE_INPUT_CLUSTER_DEVICE_CLASS matching by cluster id or class.""" + + door_ch = _ch_mock(zigpy.zcl.clusters.closures.DoorLock) + cover_ch = _ch_mock(zigpy.zcl.clusters.closures.WindowCovering) + multistate_ch = _ch_mock(zigpy.zcl.clusters.general.MultistateInput) + + class QuirkedIAS(zigpy.quirks.CustomCluster, zigpy.zcl.clusters.security.IasZone): + pass + + ias_ch = _ch_mock(QuirkedIAS) + + class _Analog(zigpy.quirks.CustomCluster, zigpy.zcl.clusters.general.AnalogInput): + pass + + analog_ch = _ch_mock(_Analog) + + endpoint = mock.MagicMock(spec_set=Endpoint) + endpoint.unclaimed_cluster_handlers.return_value = [ + door_ch, + cover_ch, + multistate_ch, + ias_ch, + ] + + disc.ProbeEndpoint().discover_by_cluster_id(endpoint) + assert probe_mock.call_count == len(endpoint.unclaimed_cluster_handlers()) + probes = ( + (Platform.LOCK, door_ch), + (Platform.COVER, cover_ch), + (Platform.SENSOR, multistate_ch), + (Platform.BINARY_SENSOR, ias_ch), + (Platform.SENSOR, analog_ch), + ) + for call, details in zip(probe_mock.call_args_list, probes, strict=False): + platform, ch = details + assert call[0][0] == platform + assert call[0][1] == ch + + +def test_single_input_cluster_device_class_by_cluster_class() -> None: + """Test SINGLE_INPUT_CLUSTER_DEVICE_CLASS matching by cluster id or class.""" + mock_reg = { + zigpy.zcl.clusters.closures.DoorLock.cluster_id: Platform.LOCK, + zigpy.zcl.clusters.closures.WindowCovering.cluster_id: Platform.COVER, + zigpy.zcl.clusters.general.AnalogInput: Platform.SENSOR, + zigpy.zcl.clusters.general.MultistateInput: Platform.SENSOR, + zigpy.zcl.clusters.security.IasZone: Platform.BINARY_SENSOR, + } + + with mock.patch.dict( + zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS, mock_reg, clear=True + ): + _test_single_input_cluster_device_class() + + +@pytest.mark.parametrize( + ("override", "entity_id"), + [ + (None, "light.manufacturer_model_light"), + ("switch", "switch.manufacturer_model_switch"), + ], +) +async def test_device_override( + hass_disable_services, zigpy_device_mock, setup_zha, override, entity_id +) -> None: + """Test device discovery override.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.COLOR_DIMMABLE_LIGHT, + "endpoint_id": 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + } + }, + "00:11:22:33:44:55:66:77", + "manufacturer", + "model", + patch_cluster=False, + ) + + if override is not None: + override = {"device_config": {"00:11:22:33:44:55:66:77-1": {"type": override}}} + + await setup_zha(override) + assert hass_disable_services.states.get(entity_id) is None + zha_gateway = get_zha_gateway(hass_disable_services) + await zha_gateway.async_device_initialized(zigpy_device) + await hass_disable_services.async_block_till_done() + assert hass_disable_services.states.get(entity_id) is not None + + +async def test_group_probe_cleanup_called( + hass_disable_services, setup_zha, config_entry +) -> None: + """Test cleanup happens when ZHA is unloaded.""" + await setup_zha() + disc.GROUP_PROBE.cleanup = mock.Mock(wraps=disc.GROUP_PROBE.cleanup) + await hass_disable_services.config_entries.async_unload(config_entry.entry_id) + await hass_disable_services.async_block_till_done() + disc.GROUP_PROBE.cleanup.assert_called() + + +async def test_quirks_v2_entity_discovery( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined, +) -> None: + """Test quirks v2 discovery.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, + zigpy.zcl.clusters.general.Groups.cluster_id, + zigpy.zcl.clusters.general.OnOff.cluster_id, + ], + SIG_EP_OUTPUT: [ + zigpy.zcl.clusters.general.Scenes.cluster_id, + ], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER, + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + manufacturer="Ikea of Sweden", + model="TRADFRI remote control", + ) + + ( + add_to_registry_v2( + "Ikea of Sweden", "TRADFRI remote control", zigpy.quirks._DEVICE_REGISTRY + ) + .replaces(PowerConfig1CRCluster) + .replaces(ScenesCluster, cluster_type=ClusterType.Client) + .number( + zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, + zigpy.zcl.clusters.general.OnOff.cluster_id, + min_value=1, + max_value=100, + step=1, + unit=UnitOfTime.SECONDS, + multiplier=1, + translation_key="on_off_transition_time", + ) + ) + + zigpy_device = zigpy.quirks._DEVICE_REGISTRY.get_device(zigpy_device) + zigpy_device.endpoints[1].power.PLUGGED_ATTR_READS = { + "battery_voltage": 3, + "battery_percentage_remaining": 100, + } + update_attribute_cache(zigpy_device.endpoints[1].power) + zigpy_device.endpoints[1].on_off.PLUGGED_ATTR_READS = { + zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name: 3, + } + update_attribute_cache(zigpy_device.endpoints[1].on_off) + + zha_device = await zha_device_joined(zigpy_device) + + entity_id = find_entity_id( + Platform.NUMBER, + zha_device, + hass, + ) + assert entity_id is not None + + state = hass.states.get(entity_id) + assert state is not None + + +async def test_quirks_v2_entity_discovery_e1_curtain( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined, +) -> None: + """Test quirks v2 discovery for e1 curtain motor.""" + aqara_E1_device = zigpy_device_mock( + { + 1: { + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.WINDOW_COVERING_DEVICE, + SIG_EP_INPUT: [ + zigpy.zcl.clusters.general.Basic.cluster_id, + zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, + zigpy.zcl.clusters.general.Identify.cluster_id, + zigpy.zcl.clusters.general.Time.cluster_id, + WindowCoveringE1.cluster_id, + XiaomiAqaraDriverE1.cluster_id, + ], + SIG_EP_OUTPUT: [ + zigpy.zcl.clusters.general.Identify.cluster_id, + zigpy.zcl.clusters.general.Time.cluster_id, + zigpy.zcl.clusters.general.Ota.cluster_id, + XiaomiAqaraDriverE1.cluster_id, + ], + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + manufacturer="LUMI", + model="lumi.curtain.agl006", + ) + + class AqaraE1HookState(zigpy.types.enum8): + """Aqara hook state.""" + + Unlocked = 0x00 + Locked = 0x01 + Locking = 0x02 + Unlocking = 0x03 + + class FakeXiaomiAqaraDriverE1(XiaomiAqaraDriverE1): + """Fake XiaomiAqaraDriverE1 cluster.""" + + attributes = XiaomiAqaraDriverE1.attributes.copy() + attributes.update( + { + 0x9999: ("error_detected", zigpy.types.Bool, True), + } + ) + + ( + add_to_registry_v2("LUMI", "lumi.curtain.agl006") + .adds(LocalIlluminanceMeasurementCluster) + .replaces(BasicCluster) + .replaces(XiaomiPowerConfigurationPercent) + .replaces(WindowCoveringE1) + .replaces(FakeXiaomiAqaraDriverE1) + .removes(FakeXiaomiAqaraDriverE1, cluster_type=ClusterType.Client) + .enum( + BasicCluster.AttributeDefs.power_source.name, + BasicCluster.PowerSource, + BasicCluster.cluster_id, + entity_platform=Platform.SENSOR, + entity_type=EntityType.DIAGNOSTIC, + ) + .enum( + "hooks_state", + AqaraE1HookState, + FakeXiaomiAqaraDriverE1.cluster_id, + entity_platform=Platform.SENSOR, + entity_type=EntityType.DIAGNOSTIC, + ) + .binary_sensor( + "error_detected", + FakeXiaomiAqaraDriverE1.cluster_id, + translation_key="valve_alarm", + ) + ) + + aqara_E1_device = zigpy.quirks._DEVICE_REGISTRY.get_device(aqara_E1_device) + + aqara_E1_device.endpoints[1].opple_cluster.PLUGGED_ATTR_READS = { + "hand_open": 0, + "positions_stored": 0, + "hooks_lock": 0, + "hooks_state": AqaraE1HookState.Unlocked, + "light_level": 0, + "error_detected": 0, + } + update_attribute_cache(aqara_E1_device.endpoints[1].opple_cluster) + + aqara_E1_device.endpoints[1].basic.PLUGGED_ATTR_READS = { + BasicCluster.AttributeDefs.power_source.name: BasicCluster.PowerSource.Mains_single_phase, + } + update_attribute_cache(aqara_E1_device.endpoints[1].basic) + + WCAttrs = zigpy.zcl.clusters.closures.WindowCovering.AttributeDefs + WCT = zigpy.zcl.clusters.closures.WindowCovering.WindowCoveringType + WCCS = zigpy.zcl.clusters.closures.WindowCovering.ConfigStatus + aqara_E1_device.endpoints[1].window_covering.PLUGGED_ATTR_READS = { + WCAttrs.current_position_lift_percentage.name: 0, + WCAttrs.window_covering_type.name: WCT.Drapery, + WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), + } + update_attribute_cache(aqara_E1_device.endpoints[1].window_covering) + + zha_device = await zha_device_joined(aqara_E1_device) + + power_source_entity_id = find_entity_id( + Platform.SENSOR, + zha_device, + hass, + qualifier=BasicCluster.AttributeDefs.power_source.name, + ) + assert power_source_entity_id is not None + state = hass.states.get(power_source_entity_id) + assert state is not None + assert state.state == BasicCluster.PowerSource.Mains_single_phase.name + + hook_state_entity_id = find_entity_id( + Platform.SENSOR, + zha_device, + hass, + qualifier="hooks_state", + ) + assert hook_state_entity_id is not None + state = hass.states.get(hook_state_entity_id) + assert state is not None + assert state.state == AqaraE1HookState.Unlocked.name + + error_detected_entity_id = find_entity_id( + Platform.BINARY_SENSOR, + zha_device, + hass, + ) + assert error_detected_entity_id is not None + state = hass.states.get(error_detected_entity_id) + assert state is not None + assert state.state == STATE_OFF + + +def _get_test_device( + zigpy_device_mock, + manufacturer: str, + model: str, + augment_method: Callable[[QuirksV2RegistryEntry], QuirksV2RegistryEntry] + | None = None, +): + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, + zigpy.zcl.clusters.general.Groups.cluster_id, + zigpy.zcl.clusters.general.OnOff.cluster_id, + ], + SIG_EP_OUTPUT: [ + zigpy.zcl.clusters.general.Scenes.cluster_id, + ], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER, + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + manufacturer=manufacturer, + model=model, + ) + + v2_quirk = ( + add_to_registry_v2(manufacturer, model, zigpy.quirks._DEVICE_REGISTRY) + .replaces(PowerConfig1CRCluster) + .replaces(ScenesCluster, cluster_type=ClusterType.Client) + .number( + zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, + zigpy.zcl.clusters.general.OnOff.cluster_id, + endpoint_id=3, + min_value=1, + max_value=100, + step=1, + unit=UnitOfTime.SECONDS, + multiplier=1, + translation_key="on_off_transition_time", + ) + .number( + zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, + zigpy.zcl.clusters.general.Time.cluster_id, + min_value=1, + max_value=100, + step=1, + unit=UnitOfTime.SECONDS, + multiplier=1, + translation_key="on_off_transition_time", + ) + .sensor( + zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, + zigpy.zcl.clusters.general.OnOff.cluster_id, + entity_type=EntityType.CONFIG, + translation_key="analog_input", + ) + ) + + if augment_method: + v2_quirk = augment_method(v2_quirk) + + zigpy_device = zigpy.quirks._DEVICE_REGISTRY.get_device(zigpy_device) + zigpy_device.endpoints[1].power.PLUGGED_ATTR_READS = { + "battery_voltage": 3, + "battery_percentage_remaining": 100, + } + update_attribute_cache(zigpy_device.endpoints[1].power) + zigpy_device.endpoints[1].on_off.PLUGGED_ATTR_READS = { + zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name: 3, + } + update_attribute_cache(zigpy_device.endpoints[1].on_off) + return zigpy_device + + +async def test_quirks_v2_entity_no_metadata( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test quirks v2 discovery skipped - no metadata.""" + + zigpy_device = _get_test_device( + zigpy_device_mock, "Ikea of Sweden2", "TRADFRI remote control2" + ) + setattr(zigpy_device, "_exposes_metadata", {}) + zha_device = await zha_device_joined(zigpy_device) + assert ( + f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not expose any quirks v2 entities" + in caplog.text + ) + + +async def test_quirks_v2_entity_discovery_errors( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test quirks v2 discovery skipped - errors.""" + + zigpy_device = _get_test_device( + zigpy_device_mock, "Ikea of Sweden3", "TRADFRI remote control3" + ) + zha_device = await zha_device_joined(zigpy_device) + + m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not have an" + m2 = " endpoint with id: 3 - unable to create entity with cluster" + m3 = " details: (3, 6, )" + assert f"{m1}{m2}{m3}" in caplog.text + + time_cluster_id = zigpy.zcl.clusters.general.Time.cluster_id + + m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not have a" + m2 = f" cluster with id: {time_cluster_id} - unable to create entity with " + m3 = f"cluster details: (1, {time_cluster_id}, )" + assert f"{m1}{m2}{m3}" in caplog.text + + # fmt: off + entity_details = ( + "{'cluster_details': (1, 6, ), 'entity_metadata': " + "ZCLSensorMetadata(entity_platform=, " + "entity_type=, cluster_id=6, endpoint_id=1, " + "cluster_type=, initially_disabled=False, " + "attribute_initialized_from_cache=True, translation_key='analog_input', " + "attribute_name='off_wait_time', divisor=1, multiplier=1, " + "unit=None, device_class=None, state_class=None)}" + ) + # fmt: on + + m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} has an entity with " + m2 = f"details: {entity_details} that does not have an entity class mapping - " + m3 = "unable to create entity" + assert f"{m1}{m2}{m3}" in caplog.text + + +DEVICE_CLASS_TYPES = [NumberMetadata, BinarySensorMetadata, ZCLSensorMetadata] + + +def validate_device_class_unit( + quirk: QuirksV2RegistryEntry, + entity_metadata: EntityMetadata, + platform: Platform, + translations: dict, +) -> None: + """Ensure device class and unit are used correctly.""" + if ( + hasattr(entity_metadata, "unit") + and entity_metadata.unit is not None + and hasattr(entity_metadata, "device_class") + and entity_metadata.device_class is not None + ): + m1 = "device_class and unit are both set - unit: " + m2 = f"{entity_metadata.unit} device_class: " + m3 = f"{entity_metadata.device_class} for {platform.name} " + raise ValueError(f"{m1}{m2}{m3}{quirk}") + + +def validate_translation_keys( + quirk: QuirksV2RegistryEntry, + entity_metadata: EntityMetadata, + platform: Platform, + translations: dict, +) -> None: + """Ensure translation keys exist for all v2 quirks.""" + if isinstance(entity_metadata, ZCLCommandButtonMetadata): + default_translation_key = entity_metadata.command_name + else: + default_translation_key = entity_metadata.attribute_name + translation_key = entity_metadata.translation_key or default_translation_key + + if ( + translation_key is not None + and translation_key not in translations["entity"][platform] + ): + raise ValueError( + f"Missing translation key: {translation_key} for {platform.name} {quirk}" + ) + + +def validate_translation_keys_device_class( + quirk: QuirksV2RegistryEntry, + entity_metadata: EntityMetadata, + platform: Platform, + translations: dict, +) -> None: + """Validate translation keys and device class usage.""" + if isinstance(entity_metadata, ZCLCommandButtonMetadata): + default_translation_key = entity_metadata.command_name + else: + default_translation_key = entity_metadata.attribute_name + translation_key = entity_metadata.translation_key or default_translation_key + + metadata_type = type(entity_metadata) + if metadata_type in DEVICE_CLASS_TYPES: + device_class = entity_metadata.device_class + if device_class is not None and translation_key is not None: + m1 = "translation_key and device_class are both set - translation_key: " + m2 = f"{translation_key} device_class: {device_class} for {platform.name} " + raise ValueError(f"{m1}{m2}{quirk}") + + +def validate_metadata(validator: Callable) -> None: + """Ensure v2 quirks metadata does not violate HA rules.""" + all_v2_quirks = itertools.chain.from_iterable( + zigpy.quirks._DEVICE_REGISTRY._registry_v2.values() + ) + translations = load_json("homeassistant/components/zha/strings.json") + for quirk in all_v2_quirks: + for entity_metadata in quirk.entity_metadata: + platform = Platform(entity_metadata.entity_platform.value) + validator(quirk, entity_metadata, platform, translations) + + +def bad_translation_key(v2_quirk: QuirksV2RegistryEntry) -> QuirksV2RegistryEntry: + """Introduce a bad translation key.""" + return v2_quirk.sensor( + zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, + zigpy.zcl.clusters.general.OnOff.cluster_id, + entity_type=EntityType.CONFIG, + translation_key="missing_translation_key", + ) + + +def bad_device_class_unit_combination( + v2_quirk: QuirksV2RegistryEntry, +) -> QuirksV2RegistryEntry: + """Introduce a bad device class and unit combination.""" + return v2_quirk.sensor( + zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, + zigpy.zcl.clusters.general.OnOff.cluster_id, + entity_type=EntityType.CONFIG, + unit="invalid", + device_class="invalid", + translation_key="analog_input", + ) + + +def bad_device_class_translation_key_usage( + v2_quirk: QuirksV2RegistryEntry, +) -> QuirksV2RegistryEntry: + """Introduce a bad device class and translation key combination.""" + return v2_quirk.sensor( + zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, + zigpy.zcl.clusters.general.OnOff.cluster_id, + entity_type=EntityType.CONFIG, + translation_key="invalid", + device_class="invalid", + ) + + +@pytest.mark.parametrize( + ("augment_method", "validate_method", "expected_exception_string"), + [ + ( + bad_translation_key, + validate_translation_keys, + "Missing translation key: missing_translation_key", + ), + ( + bad_device_class_unit_combination, + validate_device_class_unit, + "cannot have both unit and device_class", + ), + ( + bad_device_class_translation_key_usage, + validate_translation_keys_device_class, + "cannot have both a translation_key and a device_class", + ), + ], +) +async def test_quirks_v2_metadata_errors( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined, + augment_method: Callable[[QuirksV2RegistryEntry], QuirksV2RegistryEntry], + validate_method: Callable, + expected_exception_string: str, +) -> None: + """Ensure all v2 quirks translation keys exist.""" + + # no error yet + validate_metadata(validate_method) + + # ensure the error is caught and raised + try: + # introduce an error + zigpy_device = _get_test_device( + zigpy_device_mock, + "Ikea of Sweden4", + "TRADFRI remote control4", + augment_method=augment_method, + ) + await zha_device_joined(zigpy_device) + + validate_metadata(validate_method) + # if the device was created we remove it + # so we don't pollute the rest of the tests + zigpy.quirks._DEVICE_REGISTRY.remove(zigpy_device) + except ValueError: + # if the device was not created we remove it + # so we don't pollute the rest of the tests + zigpy.quirks._DEVICE_REGISTRY._registry_v2.pop( + ( + "Ikea of Sweden4", + "TRADFRI remote control4", + ) + ) + with pytest.raises(ValueError, match=expected_exception_string): + raise + + +class BadDeviceClass(enum.Enum): + """Bad device class.""" + + BAD = "bad" + + +def bad_binary_sensor_device_class( + v2_quirk: QuirksV2RegistryEntry, +) -> QuirksV2RegistryEntry: + """Introduce a bad device class on a binary sensor.""" + + return v2_quirk.binary_sensor( + zigpy.zcl.clusters.general.OnOff.AttributeDefs.on_off.name, + zigpy.zcl.clusters.general.OnOff.cluster_id, + device_class=BadDeviceClass.BAD, + ) + + +def bad_sensor_device_class( + v2_quirk: QuirksV2RegistryEntry, +) -> QuirksV2RegistryEntry: + """Introduce a bad device class on a sensor.""" + + return v2_quirk.sensor( + zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, + zigpy.zcl.clusters.general.OnOff.cluster_id, + device_class=BadDeviceClass.BAD, + ) + + +def bad_number_device_class( + v2_quirk: QuirksV2RegistryEntry, +) -> QuirksV2RegistryEntry: + """Introduce a bad device class on a number.""" + + return v2_quirk.number( + zigpy.zcl.clusters.general.OnOff.AttributeDefs.on_time.name, + zigpy.zcl.clusters.general.OnOff.cluster_id, + device_class=BadDeviceClass.BAD, + ) + + +ERROR_ROOT = "Quirks provided an invalid device class" + + +@pytest.mark.parametrize( + ("augment_method", "expected_exception_string"), + [ + ( + bad_binary_sensor_device_class, + f"{ERROR_ROOT}: BadDeviceClass.BAD for platform binary_sensor", + ), + ( + bad_sensor_device_class, + f"{ERROR_ROOT}: BadDeviceClass.BAD for platform sensor", + ), + ( + bad_number_device_class, + f"{ERROR_ROOT}: BadDeviceClass.BAD for platform number", + ), + ], +) +async def test_quirks_v2_metadata_bad_device_classes( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined, + caplog: pytest.LogCaptureFixture, + augment_method: Callable[[QuirksV2RegistryEntry], QuirksV2RegistryEntry], + expected_exception_string: str, +) -> None: + """Test bad quirks v2 device classes.""" + + # introduce an error + zigpy_device = _get_test_device( + zigpy_device_mock, + "Ikea of Sweden4", + "TRADFRI remote control4", + augment_method=augment_method, + ) + await zha_device_joined(zigpy_device) + + assert expected_exception_string in caplog.text + + # remove the device so we don't pollute the rest of the tests + zigpy.quirks._DEVICE_REGISTRY.remove(zigpy_device) diff --git a/tests/components/zha/test_fan.py b/tests/components/zha/test_fan.py index 0105c569653..095f505876e 100644 --- a/tests/components/zha/test_fan.py +++ b/tests/components/zha/test_fan.py @@ -1,25 +1,32 @@ """Test ZHA fan.""" -from unittest.mock import call, patch +from unittest.mock import AsyncMock, call, patch import pytest -from zha.application.platforms.fan.const import PRESET_MODE_ON +import zhaquirks.ikea.starkvind +from zigpy.device import Device +from zigpy.exceptions import ZigbeeException from zigpy.profiles import zha from zigpy.zcl.clusters import general, hvac +import zigpy.zcl.foundation as zcl_f from homeassistant.components.fan import ( ATTR_PERCENTAGE, + ATTR_PERCENTAGE_STEP, ATTR_PRESET_MODE, DOMAIN as FAN_DOMAIN, SERVICE_SET_PERCENTAGE, SERVICE_SET_PRESET_MODE, NotValidPresetModeError, ) -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, +from homeassistant.components.zha.core.device import ZHADevice +from homeassistant.components.zha.core.discovery import GROUP_PROBE +from homeassistant.components.zha.core.group import GroupMember +from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.components.zha.fan import ( + PRESET_MODE_AUTO, + PRESET_MODE_ON, + PRESET_MODE_SMART, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -27,15 +34,25 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_OFF, STATE_ON, + STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.setup import async_setup_component -from .common import find_entity_id, send_attributes_report +from .common import ( + async_enable_traffic, + async_find_group_entity_id, + async_test_rejoin, + async_wait_for_updates, + find_entity_id, + send_attributes_report, +) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -ON = 1 -OFF = 0 +IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" +IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8" @pytest.fixture(autouse=True) @@ -58,49 +75,122 @@ def fan_platform_only(): yield -async def test_fan(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: - """Test ZHA fan platform.""" +@pytest.fixture +def zigpy_device(zigpy_device_mock): + """Fan zigpy device.""" + endpoints = { + 1: { + SIG_EP_INPUT: [hvac.Fan.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + return zigpy_device_mock( + endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00" + ) - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + +@pytest.fixture +async def coordinator(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA fan platform.""" zigpy_device = zigpy_device_mock( { 1: { - SIG_EP_INPUT: [general.Basic.cluster_id, hvac.Fan.cluster_id], + SIG_EP_INPUT: [general.Groups.cluster_id], SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, SIG_EP_PROFILE: zha.PROFILE_ID, } }, - ieee="01:2d:6f:00:0a:90:69:e8", - node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + ieee="00:15:8d:00:02:32:4f:32", + nwk=0x0000, + node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.FAN, zha_device_proxy, hass) - cluster = zigpy_device.endpoints[1].fan +@pytest.fixture +async def device_fan_1(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA fan platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Groups.cluster_id, + general.OnOff.cluster_id, + hvac.Fan.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + }, + }, + ieee=IEEE_GROUPABLE_DEVICE, + ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + await hass.async_block_till_done() + return zha_device + + +@pytest.fixture +async def device_fan_2(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA fan platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Groups.cluster_id, + general.OnOff.cluster_id, + hvac.Fan.cluster_id, + general.LevelControl.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + }, + }, + ieee=IEEE_GROUPABLE_DEVICE2, + ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + await hass.async_block_till_done() + return zha_device + + +async def test_fan( + hass: HomeAssistant, zha_device_joined_restored, zigpy_device +) -> None: + """Test ZHA fan platform.""" + + zha_device = await zha_device_joined_restored(zigpy_device) + cluster = zigpy_device.endpoints.get(1).fan + entity_id = find_entity_id(Platform.FAN, zha_device, hass) assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the fan was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to off assert hass.states.get(entity_id).state == STATE_OFF # turn on at fan - await send_attributes_report( - hass, - cluster, - {hvac.Fan.AttributeDefs.fan_mode.id: hvac.FanMode.Low}, - ) + await send_attributes_report(hass, cluster, {1: 2, 0: 1, 2: 3}) assert hass.states.get(entity_id).state == STATE_ON # turn off at fan - await send_attributes_report( - hass, cluster, {hvac.Fan.AttributeDefs.fan_mode.id: hvac.FanMode.Off} - ) + await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) assert hass.states.get(entity_id).state == STATE_OFF # turn on from HA @@ -140,8 +230,11 @@ async def test_fan(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert exc.value.translation_key == "not_valid_preset_mode" assert len(cluster.write_attributes.mock_calls) == 0 + # test adding new fan to the network and HA + await async_test_rejoin(hass, zigpy_device, [cluster], (1,)) -async def async_turn_on(hass: HomeAssistant, entity_id, percentage=None): + +async def async_turn_on(hass, entity_id, percentage=None): """Turn fan on.""" data = { key: value @@ -152,14 +245,14 @@ async def async_turn_on(hass: HomeAssistant, entity_id, percentage=None): await hass.services.async_call(Platform.FAN, SERVICE_TURN_ON, data, blocking=True) -async def async_turn_off(hass: HomeAssistant, entity_id): +async def async_turn_off(hass, entity_id): """Turn fan off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(Platform.FAN, SERVICE_TURN_OFF, data, blocking=True) -async def async_set_percentage(hass: HomeAssistant, entity_id, percentage=None): +async def async_set_percentage(hass, entity_id, percentage=None): """Set percentage for specified fan.""" data = { key: value @@ -172,7 +265,7 @@ async def async_set_percentage(hass: HomeAssistant, entity_id, percentage=None): ) -async def async_set_preset_mode(hass: HomeAssistant, entity_id, preset_mode=None): +async def async_set_preset_mode(hass, entity_id, preset_mode=None): """Set preset_mode for specified fan.""" data = { key: value @@ -183,3 +276,633 @@ async def async_set_preset_mode(hass: HomeAssistant, entity_id, preset_mode=None await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PRESET_MODE, data, blocking=True ) + + +@patch( + "zigpy.zcl.clusters.hvac.Fan.write_attributes", + new=AsyncMock(return_value=zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]), +) +@patch( + "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", + new=0, +) +async def test_zha_group_fan_entity( + hass: HomeAssistant, device_fan_1, device_fan_2, coordinator +) -> None: + """Test the fan entity for a ZHA group.""" + zha_gateway = get_zha_gateway(hass) + assert zha_gateway is not None + zha_gateway.coordinator_zha_device = coordinator + coordinator._zha_gateway = zha_gateway + device_fan_1._zha_gateway = zha_gateway + device_fan_2._zha_gateway = zha_gateway + member_ieee_addresses = [device_fan_1.ieee, device_fan_2.ieee] + members = [GroupMember(device_fan_1.ieee, 1), GroupMember(device_fan_2.ieee, 1)] + + # test creating a group with 2 members + zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) + await hass.async_block_till_done() + + assert zha_group is not None + assert len(zha_group.members) == 2 + for member in zha_group.members: + assert member.device.ieee in member_ieee_addresses + assert member.group == zha_group + assert member.endpoint is not None + + entity_domains = GROUP_PROBE.determine_entity_domains(hass, zha_group) + assert len(entity_domains) == 2 + + assert Platform.LIGHT in entity_domains + assert Platform.FAN in entity_domains + + entity_id = async_find_group_entity_id(hass, Platform.FAN, zha_group) + assert hass.states.get(entity_id) is not None + + group_fan_cluster = zha_group.endpoint[hvac.Fan.cluster_id] + + dev1_fan_cluster = device_fan_1.device.endpoints[1].fan + dev2_fan_cluster = device_fan_2.device.endpoints[1].fan + + await async_enable_traffic(hass, [device_fan_1, device_fan_2], enabled=False) + await async_wait_for_updates(hass) + # test that the fans were created and that they are unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [device_fan_1, device_fan_2]) + await async_wait_for_updates(hass) + # test that the fan group entity was created and is off + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on from HA + group_fan_cluster.write_attributes.reset_mock() + await async_turn_on(hass, entity_id) + await hass.async_block_till_done() + assert len(group_fan_cluster.write_attributes.mock_calls) == 1 + assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 2} + + # turn off from HA + group_fan_cluster.write_attributes.reset_mock() + await async_turn_off(hass, entity_id) + assert len(group_fan_cluster.write_attributes.mock_calls) == 1 + assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 0} + + # change speed from HA + group_fan_cluster.write_attributes.reset_mock() + await async_set_percentage(hass, entity_id, percentage=100) + assert len(group_fan_cluster.write_attributes.mock_calls) == 1 + assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 3} + + # change preset mode from HA + group_fan_cluster.write_attributes.reset_mock() + await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_ON) + assert len(group_fan_cluster.write_attributes.mock_calls) == 1 + assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 4} + + # change preset mode from HA + group_fan_cluster.write_attributes.reset_mock() + await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO) + assert len(group_fan_cluster.write_attributes.mock_calls) == 1 + assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 5} + + # change preset mode from HA + group_fan_cluster.write_attributes.reset_mock() + await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_SMART) + assert len(group_fan_cluster.write_attributes.mock_calls) == 1 + assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 6} + + # test some of the group logic to make sure we key off states correctly + await send_attributes_report(hass, dev1_fan_cluster, {0: 0}) + await send_attributes_report(hass, dev2_fan_cluster, {0: 0}) + await hass.async_block_till_done() + + # test that group fan is off + assert hass.states.get(entity_id).state == STATE_OFF + + await send_attributes_report(hass, dev2_fan_cluster, {0: 2}) + await async_wait_for_updates(hass) + + # test that group fan is speed medium + assert hass.states.get(entity_id).state == STATE_ON + + await send_attributes_report(hass, dev2_fan_cluster, {0: 0}) + await async_wait_for_updates(hass) + + # test that group fan is now off + assert hass.states.get(entity_id).state == STATE_OFF + + +@patch( + "zigpy.zcl.clusters.hvac.Fan.write_attributes", + new=AsyncMock(side_effect=ZigbeeException), +) +@patch( + "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", + new=0, +) +async def test_zha_group_fan_entity_failure_state( + hass: HomeAssistant, + device_fan_1, + device_fan_2, + coordinator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the fan entity for a ZHA group when writing attributes generates an exception.""" + zha_gateway = get_zha_gateway(hass) + assert zha_gateway is not None + zha_gateway.coordinator_zha_device = coordinator + coordinator._zha_gateway = zha_gateway + device_fan_1._zha_gateway = zha_gateway + device_fan_2._zha_gateway = zha_gateway + member_ieee_addresses = [device_fan_1.ieee, device_fan_2.ieee] + members = [GroupMember(device_fan_1.ieee, 1), GroupMember(device_fan_2.ieee, 1)] + + # test creating a group with 2 members + zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) + await hass.async_block_till_done() + + assert zha_group is not None + assert len(zha_group.members) == 2 + for member in zha_group.members: + assert member.device.ieee in member_ieee_addresses + assert member.group == zha_group + assert member.endpoint is not None + + entity_domains = GROUP_PROBE.determine_entity_domains(hass, zha_group) + assert len(entity_domains) == 2 + + assert Platform.LIGHT in entity_domains + assert Platform.FAN in entity_domains + + entity_id = async_find_group_entity_id(hass, Platform.FAN, zha_group) + assert hass.states.get(entity_id) is not None + + group_fan_cluster = zha_group.endpoint[hvac.Fan.cluster_id] + + await async_enable_traffic(hass, [device_fan_1, device_fan_2], enabled=False) + await async_wait_for_updates(hass) + # test that the fans were created and that they are unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [device_fan_1, device_fan_2]) + await async_wait_for_updates(hass) + # test that the fan group entity was created and is off + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on from HA + group_fan_cluster.write_attributes.reset_mock() + + with pytest.raises(HomeAssistantError): + await async_turn_on(hass, entity_id) + + await hass.async_block_till_done() + assert len(group_fan_cluster.write_attributes.mock_calls) == 1 + assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 2} + + +@pytest.mark.parametrize( + ("plug_read", "expected_state", "expected_percentage"), + [ + (None, STATE_OFF, None), + ({"fan_mode": 0}, STATE_OFF, 0), + ({"fan_mode": 1}, STATE_ON, 33), + ({"fan_mode": 2}, STATE_ON, 66), + ({"fan_mode": 3}, STATE_ON, 100), + ], +) +async def test_fan_init( + hass: HomeAssistant, + zha_device_joined_restored, + zigpy_device, + plug_read, + expected_state, + expected_percentage, +) -> None: + """Test ZHA fan platform.""" + + cluster = zigpy_device.endpoints.get(1).fan + cluster.PLUGGED_ATTR_READS = plug_read + + zha_device = await zha_device_joined_restored(zigpy_device) + entity_id = find_entity_id(Platform.FAN, zha_device, hass) + assert entity_id is not None + assert hass.states.get(entity_id).state == expected_state + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == expected_percentage + assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None + + +async def test_fan_update_entity( + hass: HomeAssistant, + zha_device_joined_restored, + zigpy_device, +) -> None: + """Test ZHA fan platform.""" + + cluster = zigpy_device.endpoints.get(1).fan + cluster.PLUGGED_ATTR_READS = {"fan_mode": 0} + + zha_device = await zha_device_joined_restored(zigpy_device) + entity_id = find_entity_id(Platform.FAN, zha_device, hass) + assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0 + assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 3 + if zha_device_joined_restored.name == "zha_device_joined": + assert cluster.read_attributes.await_count == 2 + else: + assert cluster.read_attributes.await_count == 4 + + await async_setup_component(hass, "homeassistant", {}) + await hass.async_block_till_done() + + await hass.services.async_call( + "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True + ) + assert hass.states.get(entity_id).state == STATE_OFF + if zha_device_joined_restored.name == "zha_device_joined": + assert cluster.read_attributes.await_count == 3 + else: + assert cluster.read_attributes.await_count == 5 + + cluster.PLUGGED_ATTR_READS = {"fan_mode": 1} + await hass.services.async_call( + "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True + ) + assert hass.states.get(entity_id).state == STATE_ON + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 33 + assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 3 + if zha_device_joined_restored.name == "zha_device_joined": + assert cluster.read_attributes.await_count == 4 + else: + assert cluster.read_attributes.await_count == 6 + + +@pytest.fixture +def zigpy_device_ikea(zigpy_device_mock): + """Ikea fan zigpy device.""" + endpoints = { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.Identify.cluster_id, + general.Groups.cluster_id, + general.Scenes.cluster_id, + 64637, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COMBINED_INTERFACE, + SIG_EP_PROFILE: zha.PROFILE_ID, + }, + } + return zigpy_device_mock( + endpoints, + manufacturer="IKEA of Sweden", + model="STARKVIND Air purifier", + quirk=zhaquirks.ikea.starkvind.IkeaSTARKVIND, + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + ) + + +async def test_fan_ikea( + hass: HomeAssistant, + zha_device_joined_restored: ZHADevice, + zigpy_device_ikea: Device, +) -> None: + """Test ZHA fan Ikea platform.""" + zha_device = await zha_device_joined_restored(zigpy_device_ikea) + cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier + entity_id = find_entity_id(Platform.FAN, zha_device, hass) + assert entity_id is not None + + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the fan was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to off + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on at fan + await send_attributes_report(hass, cluster, {6: 1}) + assert hass.states.get(entity_id).state == STATE_ON + + # turn off at fan + await send_attributes_report(hass, cluster, {6: 0}) + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on from HA + cluster.write_attributes.reset_mock() + await async_turn_on(hass, entity_id) + assert cluster.write_attributes.mock_calls == [ + call({"fan_mode": 1}, manufacturer=None) + ] + + # turn off from HA + cluster.write_attributes.reset_mock() + await async_turn_off(hass, entity_id) + assert cluster.write_attributes.mock_calls == [ + call({"fan_mode": 0}, manufacturer=None) + ] + + # change speed from HA + cluster.write_attributes.reset_mock() + await async_set_percentage(hass, entity_id, percentage=100) + assert cluster.write_attributes.mock_calls == [ + call({"fan_mode": 10}, manufacturer=None) + ] + + # change preset_mode from HA + cluster.write_attributes.reset_mock() + await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO) + assert cluster.write_attributes.mock_calls == [ + call({"fan_mode": 1}, manufacturer=None) + ] + + # set invalid preset_mode from HA + cluster.write_attributes.reset_mock() + with pytest.raises(NotValidPresetModeError) as exc: + await async_set_preset_mode( + hass, entity_id, preset_mode="invalid does not exist" + ) + assert exc.value.translation_key == "not_valid_preset_mode" + assert len(cluster.write_attributes.mock_calls) == 0 + + # test adding new fan to the network and HA + await async_test_rejoin(hass, zigpy_device_ikea, [cluster], (9,)) + + +@pytest.mark.parametrize( + ( + "ikea_plug_read", + "ikea_expected_state", + "ikea_expected_percentage", + "ikea_preset_mode", + ), + [ + (None, STATE_OFF, None, None), + ({"fan_mode": 0}, STATE_OFF, 0, None), + ({"fan_mode": 1}, STATE_ON, 10, PRESET_MODE_AUTO), + ({"fan_mode": 10}, STATE_ON, 20, "Speed 1"), + ({"fan_mode": 15}, STATE_ON, 30, "Speed 1.5"), + ({"fan_mode": 20}, STATE_ON, 40, "Speed 2"), + ({"fan_mode": 25}, STATE_ON, 50, "Speed 2.5"), + ({"fan_mode": 30}, STATE_ON, 60, "Speed 3"), + ({"fan_mode": 35}, STATE_ON, 70, "Speed 3.5"), + ({"fan_mode": 40}, STATE_ON, 80, "Speed 4"), + ({"fan_mode": 45}, STATE_ON, 90, "Speed 4.5"), + ({"fan_mode": 50}, STATE_ON, 100, "Speed 5"), + ], +) +async def test_fan_ikea_init( + hass: HomeAssistant, + zha_device_joined_restored, + zigpy_device_ikea, + ikea_plug_read, + ikea_expected_state, + ikea_expected_percentage, + ikea_preset_mode, +) -> None: + """Test ZHA fan platform.""" + cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier + cluster.PLUGGED_ATTR_READS = ikea_plug_read + + zha_device = await zha_device_joined_restored(zigpy_device_ikea) + entity_id = find_entity_id(Platform.FAN, zha_device, hass) + assert entity_id is not None + assert hass.states.get(entity_id).state == ikea_expected_state + assert ( + hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] + == ikea_expected_percentage + ) + assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] == ikea_preset_mode + + +async def test_fan_ikea_update_entity( + hass: HomeAssistant, + zha_device_joined_restored, + zigpy_device_ikea, +) -> None: + """Test ZHA fan platform.""" + cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier + cluster.PLUGGED_ATTR_READS = {"fan_mode": 0} + + zha_device = await zha_device_joined_restored(zigpy_device_ikea) + entity_id = find_entity_id(Platform.FAN, zha_device, hass) + assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0 + assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 10 + if zha_device_joined_restored.name == "zha_device_joined": + assert cluster.read_attributes.await_count == 3 + else: + assert cluster.read_attributes.await_count == 6 + + await async_setup_component(hass, "homeassistant", {}) + await hass.async_block_till_done() + + await hass.services.async_call( + "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True + ) + assert hass.states.get(entity_id).state == STATE_OFF + if zha_device_joined_restored.name == "zha_device_joined": + assert cluster.read_attributes.await_count == 4 + else: + assert cluster.read_attributes.await_count == 7 + + cluster.PLUGGED_ATTR_READS = {"fan_mode": 1} + await hass.services.async_call( + "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True + ) + assert hass.states.get(entity_id).state == STATE_ON + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 10 + assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is PRESET_MODE_AUTO + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 10 + if zha_device_joined_restored.name == "zha_device_joined": + assert cluster.read_attributes.await_count == 5 + else: + assert cluster.read_attributes.await_count == 8 + + +@pytest.fixture +def zigpy_device_kof(zigpy_device_mock): + """Fan by King of Fans zigpy device.""" + endpoints = { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.Identify.cluster_id, + general.Groups.cluster_id, + general.Scenes.cluster_id, + 64637, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COMBINED_INTERFACE, + SIG_EP_PROFILE: zha.PROFILE_ID, + }, + } + return zigpy_device_mock( + endpoints, + manufacturer="King Of Fans, Inc.", + model="HBUniversalCFRemote", + quirk=zhaquirks.kof.kof_mr101z.CeilingFan, + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + ) + + +async def test_fan_kof( + hass: HomeAssistant, + zha_device_joined_restored: ZHADevice, + zigpy_device_kof: Device, +) -> None: + """Test ZHA fan platform for King of Fans.""" + zha_device = await zha_device_joined_restored(zigpy_device_kof) + cluster = zigpy_device_kof.endpoints.get(1).fan + entity_id = find_entity_id(Platform.FAN, zha_device, hass) + assert entity_id is not None + + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the fan was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to off + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on at fan + await send_attributes_report(hass, cluster, {1: 2, 0: 1, 2: 3}) + assert hass.states.get(entity_id).state == STATE_ON + + # turn off at fan + await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on from HA + cluster.write_attributes.reset_mock() + await async_turn_on(hass, entity_id) + assert cluster.write_attributes.mock_calls == [ + call({"fan_mode": 2}, manufacturer=None) + ] + + # turn off from HA + cluster.write_attributes.reset_mock() + await async_turn_off(hass, entity_id) + assert cluster.write_attributes.mock_calls == [ + call({"fan_mode": 0}, manufacturer=None) + ] + + # change speed from HA + cluster.write_attributes.reset_mock() + await async_set_percentage(hass, entity_id, percentage=100) + assert cluster.write_attributes.mock_calls == [ + call({"fan_mode": 4}, manufacturer=None) + ] + + # change preset_mode from HA + cluster.write_attributes.reset_mock() + await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_SMART) + assert cluster.write_attributes.mock_calls == [ + call({"fan_mode": 6}, manufacturer=None) + ] + + # set invalid preset_mode from HA + cluster.write_attributes.reset_mock() + with pytest.raises(NotValidPresetModeError) as exc: + await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO) + assert exc.value.translation_key == "not_valid_preset_mode" + assert len(cluster.write_attributes.mock_calls) == 0 + + # test adding new fan to the network and HA + await async_test_rejoin(hass, zigpy_device_kof, [cluster], (1,)) + + +@pytest.mark.parametrize( + ("plug_read", "expected_state", "expected_percentage", "expected_preset"), + [ + (None, STATE_OFF, None, None), + ({"fan_mode": 0}, STATE_OFF, 0, None), + ({"fan_mode": 1}, STATE_ON, 25, None), + ({"fan_mode": 2}, STATE_ON, 50, None), + ({"fan_mode": 3}, STATE_ON, 75, None), + ({"fan_mode": 4}, STATE_ON, 100, None), + ({"fan_mode": 6}, STATE_ON, None, PRESET_MODE_SMART), + ], +) +async def test_fan_kof_init( + hass: HomeAssistant, + zha_device_joined_restored, + zigpy_device_kof, + plug_read, + expected_state, + expected_percentage, + expected_preset, +) -> None: + """Test ZHA fan platform for King of Fans.""" + + cluster = zigpy_device_kof.endpoints.get(1).fan + cluster.PLUGGED_ATTR_READS = plug_read + + zha_device = await zha_device_joined_restored(zigpy_device_kof) + entity_id = find_entity_id(Platform.FAN, zha_device, hass) + assert entity_id is not None + assert hass.states.get(entity_id).state == expected_state + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == expected_percentage + assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] == expected_preset + + +async def test_fan_kof_update_entity( + hass: HomeAssistant, + zha_device_joined_restored, + zigpy_device_kof, +) -> None: + """Test ZHA fan platform for King of Fans.""" + + cluster = zigpy_device_kof.endpoints.get(1).fan + cluster.PLUGGED_ATTR_READS = {"fan_mode": 0} + + zha_device = await zha_device_joined_restored(zigpy_device_kof) + entity_id = find_entity_id(Platform.FAN, zha_device, hass) + assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0 + assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 4 + if zha_device_joined_restored.name == "zha_device_joined": + assert cluster.read_attributes.await_count == 2 + else: + assert cluster.read_attributes.await_count == 4 + + await async_setup_component(hass, "homeassistant", {}) + await hass.async_block_till_done() + + await hass.services.async_call( + "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True + ) + assert hass.states.get(entity_id).state == STATE_OFF + if zha_device_joined_restored.name == "zha_device_joined": + assert cluster.read_attributes.await_count == 3 + else: + assert cluster.read_attributes.await_count == 5 + + cluster.PLUGGED_ATTR_READS = {"fan_mode": 1} + await hass.services.async_call( + "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True + ) + assert hass.states.get(entity_id).state == STATE_ON + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 25 + assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None + assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 4 + if zha_device_joined_restored.name == "zha_device_joined": + assert cluster.read_attributes.await_count == 4 + else: + assert cluster.read_attributes.await_count == 6 diff --git a/tests/components/zha/test_gateway.py b/tests/components/zha/test_gateway.py new file mode 100644 index 00000000000..3a576ed6e55 --- /dev/null +++ b/tests/components/zha/test_gateway.py @@ -0,0 +1,404 @@ +"""Test ZHA Gateway.""" + +import asyncio +from unittest.mock import MagicMock, PropertyMock, patch + +import pytest +from zigpy.application import ControllerApplication +from zigpy.profiles import zha +import zigpy.types +from zigpy.zcl.clusters import general, lighting +import zigpy.zdo.types + +from homeassistant.components.zha.core.gateway import ZHAGateway +from homeassistant.components.zha.core.group import GroupMember +from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .common import async_find_group_entity_id +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE + +from tests.common import MockConfigEntry + +IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" +IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8" + + +@pytest.fixture +def zigpy_dev_basic(zigpy_device_mock): + """Zigpy device with just a basic cluster.""" + return zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) + + +@pytest.fixture(autouse=True) +def required_platform_only(): + """Only set up the required and required base platforms to speed up tests.""" + with patch( + "homeassistant.components.zha.PLATFORMS", + ( + Platform.SENSOR, + Platform.LIGHT, + Platform.DEVICE_TRACKER, + Platform.NUMBER, + Platform.SELECT, + ), + ): + yield + + +@pytest.fixture +async def zha_dev_basic(hass, zha_device_restored, zigpy_dev_basic): + """ZHA device with just a basic cluster.""" + + return await zha_device_restored(zigpy_dev_basic) + + +@pytest.fixture +async def coordinator(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA light platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee="00:15:8d:00:02:32:4f:32", + nwk=0x0000, + node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device + + +@pytest.fixture +async def device_light_1(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA light platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + general.Groups.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee=IEEE_GROUPABLE_DEVICE, + ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device + + +@pytest.fixture +async def device_light_2(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA light platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + general.Groups.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee=IEEE_GROUPABLE_DEVICE2, + ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device + + +async def test_device_left(hass: HomeAssistant, zigpy_dev_basic, zha_dev_basic) -> None: + """Device leaving the network should become unavailable.""" + + assert zha_dev_basic.available is True + + get_zha_gateway(hass).device_left(zigpy_dev_basic) + await hass.async_block_till_done() + assert zha_dev_basic.available is False + + +async def test_gateway_group_methods( + hass: HomeAssistant, device_light_1, device_light_2, coordinator +) -> None: + """Test creating a group with 2 members.""" + zha_gateway = get_zha_gateway(hass) + assert zha_gateway is not None + zha_gateway.coordinator_zha_device = coordinator + coordinator._zha_gateway = zha_gateway + device_light_1._zha_gateway = zha_gateway + device_light_2._zha_gateway = zha_gateway + member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] + members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)] + + # test creating a group with 2 members + zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) + await hass.async_block_till_done() + + assert zha_group is not None + assert len(zha_group.members) == 2 + for member in zha_group.members: + assert member.device.ieee in member_ieee_addresses + + entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) + assert hass.states.get(entity_id) is not None + + # test get group by name + assert zha_group == zha_gateway.async_get_group_by_name(zha_group.name) + + # test removing a group + await zha_gateway.async_remove_zigpy_group(zha_group.group_id) + await hass.async_block_till_done() + + # we shouldn't have the group anymore + assert zha_gateway.async_get_group_by_name(zha_group.name) is None + + # the group entity should be cleaned up + assert entity_id not in hass.states.async_entity_ids(Platform.LIGHT) + + # test creating a group with 1 member + zha_group = await zha_gateway.async_create_zigpy_group( + "Test Group", [GroupMember(device_light_1.ieee, 1)] + ) + await hass.async_block_till_done() + + assert zha_group is not None + assert len(zha_group.members) == 1 + for member in zha_group.members: + assert member.device.ieee in [device_light_1.ieee] + + # the group entity should not have been cleaned up + assert entity_id not in hass.states.async_entity_ids(Platform.LIGHT) + + with patch("zigpy.zcl.Cluster.request", side_effect=TimeoutError): + await zha_group.members[0].async_remove_from_group() + assert len(zha_group.members) == 1 + for member in zha_group.members: + assert member.device.ieee in [device_light_1.ieee] + + +async def test_gateway_create_group_with_id( + hass: HomeAssistant, device_light_1, coordinator +) -> None: + """Test creating a group with a specific ID.""" + zha_gateway = get_zha_gateway(hass) + assert zha_gateway is not None + zha_gateway.coordinator_zha_device = coordinator + coordinator._zha_gateway = zha_gateway + device_light_1._zha_gateway = zha_gateway + + zha_group = await zha_gateway.async_create_zigpy_group( + "Test Group", [GroupMember(device_light_1.ieee, 1)], group_id=0x1234 + ) + await hass.async_block_till_done() + + assert len(zha_group.members) == 1 + assert zha_group.members[0].device is device_light_1 + assert zha_group.group_id == 0x1234 + + +@patch( + "homeassistant.components.zha.core.gateway.ZHAGateway.async_load_devices", + MagicMock(), +) +@patch( + "homeassistant.components.zha.core.gateway.ZHAGateway.async_load_groups", + MagicMock(), +) +@pytest.mark.parametrize( + ("device_path", "thread_state", "config_override"), + [ + ("/dev/ttyUSB0", True, {}), + ("socket://192.168.1.123:9999", False, {}), + ("socket://192.168.1.123:9999", True, {"use_thread": True}), + ], +) +async def test_gateway_initialize_bellows_thread( + device_path: str, + thread_state: bool, + config_override: dict, + hass: HomeAssistant, + zigpy_app_controller: ControllerApplication, + config_entry: MockConfigEntry, +) -> None: + """Test ZHA disabling the UART thread when connecting to a TCP coordinator.""" + data = dict(config_entry.data) + data["device"]["path"] = device_path + config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry(config_entry, data=data) + + zha_gateway = ZHAGateway(hass, {"zigpy_config": config_override}, config_entry) + + with patch( + "bellows.zigbee.application.ControllerApplication.new", + return_value=zigpy_app_controller, + ) as mock_new: + await zha_gateway.async_initialize() + + assert mock_new.mock_calls[-1].kwargs["config"]["use_thread"] is thread_state + + await zha_gateway.shutdown() + + +@pytest.mark.parametrize( + ("device_path", "config_override", "expected_channel"), + [ + ("/dev/ttyUSB0", {}, None), + ("socket://192.168.1.123:9999", {}, None), + ("socket://192.168.1.123:9999", {"network": {"channel": 20}}, 20), + ("socket://core-silabs-multiprotocol:9999", {}, 15), + ("socket://core-silabs-multiprotocol:9999", {"network": {"channel": 20}}, 20), + ], +) +async def test_gateway_force_multi_pan_channel( + device_path: str, + config_override: dict, + expected_channel: int | None, + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test ZHA disabling the UART thread when connecting to a TCP coordinator.""" + data = dict(config_entry.data) + data["device"]["path"] = device_path + config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry(config_entry, data=data) + + zha_gateway = ZHAGateway(hass, {"zigpy_config": config_override}, config_entry) + + _, config = zha_gateway.get_application_controller_data() + assert config["network"]["channel"] == expected_channel + + +async def test_single_reload_on_multiple_connection_loss( + hass: HomeAssistant, + zigpy_app_controller: ControllerApplication, + config_entry: MockConfigEntry, +) -> None: + """Test that we only reload once when we lose the connection multiple times.""" + config_entry.add_to_hass(hass) + + zha_gateway = ZHAGateway(hass, {}, config_entry) + + with patch( + "bellows.zigbee.application.ControllerApplication.new", + return_value=zigpy_app_controller, + ): + await zha_gateway.async_initialize() + + with patch.object( + hass.config_entries, "async_reload", wraps=hass.config_entries.async_reload + ) as mock_reload: + zha_gateway.connection_lost(RuntimeError()) + zha_gateway.connection_lost(RuntimeError()) + zha_gateway.connection_lost(RuntimeError()) + zha_gateway.connection_lost(RuntimeError()) + zha_gateway.connection_lost(RuntimeError()) + + assert len(mock_reload.mock_calls) == 1 + + await hass.async_block_till_done() + + +@pytest.mark.parametrize("radio_concurrency", [1, 2, 8]) +async def test_startup_concurrency_limit( + radio_concurrency: int, + hass: HomeAssistant, + zigpy_app_controller: ControllerApplication, + config_entry: MockConfigEntry, + zigpy_device_mock, +) -> None: + """Test ZHA gateway limits concurrency on startup.""" + config_entry.add_to_hass(hass) + zha_gateway = ZHAGateway(hass, {}, config_entry) + + with patch( + "bellows.zigbee.application.ControllerApplication.new", + return_value=zigpy_app_controller, + ): + await zha_gateway.async_initialize() + + for i in range(50): + zigpy_dev = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + general.Groups.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee=f"11:22:33:44:{i:08x}", + nwk=0x1234 + i, + ) + zigpy_dev.node_desc.mac_capability_flags |= ( + zigpy.zdo.types.NodeDescriptor.MACCapabilityFlags.MainsPowered + ) + + zha_gateway._async_get_or_create_device(zigpy_dev) + + # Keep track of request concurrency during initialization + current_concurrency = 0 + concurrencies = [] + + async def mock_send_packet(*args, **kwargs): + nonlocal current_concurrency + + current_concurrency += 1 + concurrencies.append(current_concurrency) + + await asyncio.sleep(0.001) + + current_concurrency -= 1 + concurrencies.append(current_concurrency) + + type(zha_gateway).radio_concurrency = PropertyMock(return_value=radio_concurrency) + assert zha_gateway.radio_concurrency == radio_concurrency + + with patch( + "homeassistant.components.zha.core.device.ZHADevice.async_initialize", + side_effect=mock_send_packet, + ): + await zha_gateway.async_fetch_updated_state_mains() + + await zha_gateway.shutdown() + + # Make sure concurrency was always limited + assert current_concurrency == 0 + assert min(concurrencies) == 0 + + if radio_concurrency > 1: + assert 1 <= max(concurrencies) < zha_gateway.radio_concurrency + else: + assert 1 == max(concurrencies) == zha_gateway.radio_concurrency diff --git a/tests/components/zha/test_helpers.py b/tests/components/zha/test_helpers.py index f6dc8291d9f..0615fefd644 100644 --- a/tests/components/zha/test_helpers.py +++ b/tests/components/zha/test_helpers.py @@ -1,34 +1,81 @@ """Tests for ZHA helpers.""" +import enum import logging -from typing import Any +from unittest.mock import patch import pytest import voluptuous_serialize -from zigpy.application import ControllerApplication +from zigpy.profiles import zha +from zigpy.quirks.v2.homeassistant import UnitOfPower as QuirksUnitOfPower from zigpy.types.basic import uint16_t -from zigpy.zcl.clusters import lighting +from zigpy.zcl.clusters import general, lighting -import homeassistant.components.zha.const as zha_const -from homeassistant.components.zha.helpers import ( +from homeassistant.components.zha.core.helpers import ( cluster_command_schema_to_vol_schema, convert_to_zcl_values, - create_zha_config, - exclude_none_values, - get_zha_data, + validate_unit, ) +from homeassistant.const import Platform, UnitOfPower from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry +from .common import async_enable_traffic +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE _LOGGER = logging.getLogger(__name__) -async def test_zcl_schema_conversions(hass: HomeAssistant) -> None: +@pytest.fixture(autouse=True) +def light_platform_only(): + """Only set up the light and required base platforms to speed up tests.""" + with patch( + "homeassistant.components.zha.PLATFORMS", + ( + Platform.BUTTON, + Platform.LIGHT, + Platform.NUMBER, + Platform.SELECT, + ), + ): + yield + + +@pytest.fixture +async def device_light(hass: HomeAssistant, zigpy_device_mock, zha_device_joined): + """Test light.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + general.Groups.cluster_id, + general.Identify.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) + color_cluster = zigpy_device.endpoints[1].light_color + color_cluster.PLUGGED_ATTR_READS = { + "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature + | lighting.Color.ColorCapabilities.XY_attributes + } + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return color_cluster, zha_device + + +async def test_zcl_schema_conversions(hass: HomeAssistant, device_light) -> None: """Test ZHA ZCL schema conversion helpers.""" - command_schema = lighting.Color.ServerCommandDefs.color_loop_set.schema + color_cluster, zha_device = device_light + await async_enable_traffic(hass, [zha_device]) + command_schema = color_cluster.commands_by_name["color_loop_set"].schema expected_schema = [ { "type": "multi_select", @@ -67,14 +114,16 @@ async def test_zcl_schema_conversions(hass: HomeAssistant) -> None: "required": True, }, { - "type": "multi_select", - "options": ["Execute if off present"], + "type": "integer", + "valueMin": 0, + "valueMax": 255, "name": "options_mask", "optional": True, }, { - "type": "multi_select", - "options": ["Execute if off"], + "type": "integer", + "valueMin": 0, + "valueMax": 255, "name": "options_override", "optional": True, }, @@ -166,53 +215,23 @@ async def test_zcl_schema_conversions(hass: HomeAssistant) -> None: assert converted_data["update_flags"] == 0 -@pytest.mark.parametrize( - ("obj", "expected_output"), - [ - ({"a": 1, "b": 2, "c": None}, {"a": 1, "b": 2}), - ({"a": 1, "b": 2, "c": 0}, {"a": 1, "b": 2, "c": 0}), - ({"a": 1, "b": 2, "c": ""}, {"a": 1, "b": 2, "c": ""}), - ({"a": 1, "b": 2, "c": False}, {"a": 1, "b": 2, "c": False}), - ], -) -def test_exclude_none_values( - obj: dict[str, Any], expected_output: dict[str, Any] -) -> None: - """Test exclude_none_values helper.""" - result = exclude_none_values(obj) - assert result == expected_output +def test_unit_validation() -> None: + """Test unit validation.""" - for key in expected_output: - assert expected_output[key] == obj[key] + assert validate_unit(QuirksUnitOfPower.WATT) == UnitOfPower.WATT + class FooUnit(enum.Enum): + """Foo unit.""" -async def test_create_zha_config_remove_unused( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_zigpy_connect: ControllerApplication, -) -> None: - """Test creating ZHA config data with unused keys.""" - config_entry.add_to_hass(hass) + BAR = "bar" - options = config_entry.options.copy() - options["custom_configuration"]["zha_options"]["some_random_key"] = "a value" + class UnitOfMass(enum.Enum): + """UnitOfMass.""" - hass.config_entries.async_update_entry(config_entry, options=options) + BAR = "bar" - assert ( - config_entry.options["custom_configuration"]["zha_options"]["some_random_key"] - == "a value" - ) + with pytest.raises(KeyError): + validate_unit(FooUnit.BAR) - status = await async_setup_component( - hass, - zha_const.DOMAIN, - {zha_const.DOMAIN: {zha_const.CONF_ENABLE_QUIRKS: False}}, - ) - assert status is True - await hass.async_block_till_done() - - ha_zha_data = get_zha_data(hass) - - # Does not error out - create_zha_config(hass, ha_zha_data) + with pytest.raises(ValueError): + validate_unit(UnitOfMass.BAR) diff --git a/tests/components/zha/test_init.py b/tests/components/zha/test_init.py index 887284919da..4d4956d3978 100644 --- a/tests/components/zha/test_init.py +++ b/tests/components/zha/test_init.py @@ -3,21 +3,20 @@ import asyncio import typing from unittest.mock import AsyncMock, Mock, patch -import zoneinfo import pytest from zigpy.application import ControllerApplication from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH from zigpy.exceptions import TransientConnectionError -from homeassistant.components.zha.const import ( +from homeassistant.components.zha.core.const import ( CONF_BAUDRATE, CONF_FLOW_CONTROL, CONF_RADIO_TYPE, CONF_USB_PATH, DOMAIN, ) -from homeassistant.components.zha.helpers import get_zha_data, get_zha_gateway +from homeassistant.components.zha.core.helpers import get_zha_data from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, MAJOR_VERSION, @@ -44,7 +43,7 @@ def disable_platform_only(): @pytest.fixture -def config_entry_v1(hass: HomeAssistant): +def config_entry_v1(hass): """Config entry version 1 fixture.""" return MockConfigEntry( domain=DOMAIN, @@ -140,6 +139,7 @@ async def test_config_depreciation(hass: HomeAssistant, zha_config) -> None: ("socket://[1.2.3.4]:5678 ", "socket://1.2.3.4:5678"), ], ) +@patch("homeassistant.components.zha.setup_quirks", Mock(return_value=True)) @patch( "homeassistant.components.zha.websocket_api.async_load_api", Mock(return_value=True) ) @@ -252,7 +252,7 @@ async def test_zha_retry_unique_ids( ) as mock_connect: with patch( "homeassistant.config_entries.async_call_later", - lambda hass, delay, action: async_call_later(hass, 0.01, action), + lambda hass, delay, action: async_call_later(hass, 0, action), ): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) @@ -282,30 +282,10 @@ async def test_shutdown_on_ha_stop( zha_data = get_zha_data(hass) with patch.object( - zha_data.gateway_proxy, "shutdown", wraps=zha_data.gateway_proxy.shutdown + zha_data.gateway, "shutdown", wraps=zha_data.gateway.shutdown ) as mock_shutdown: hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) hass.set_state(CoreState.stopping) await hass.async_block_till_done() assert len(mock_shutdown.mock_calls) == 1 - - -async def test_timezone_update( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_zigpy_connect: ControllerApplication, -) -> None: - """Test that the ZHA gateway timezone is updated when HA timezone changes.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - gateway = get_zha_gateway(hass) - - assert hass.config.time_zone == "US/Pacific" - assert gateway.config.local_timezone == zoneinfo.ZoneInfo("US/Pacific") - - await hass.config.async_update(time_zone="America/New_York") - - assert hass.config.time_zone == "America/New_York" - assert gateway.config.local_timezone == zoneinfo.ZoneInfo("America/New_York") diff --git a/tests/components/zha/test_light.py b/tests/components/zha/test_light.py index ef2714b3b58..a9d32362863 100644 --- a/tests/components/zha/test_light.py +++ b/tests/components/zha/test_light.py @@ -1,11 +1,12 @@ """Test ZHA light.""" +from collections.abc import Callable +from datetime import timedelta +from typing import Any from unittest.mock import AsyncMock, call, patch, sentinel import pytest -from zha.application.platforms.light.const import FLASH_EFFECTS from zigpy.profiles import zha -from zigpy.zcl import Cluster from zigpy.zcl.clusters import general, lighting import zigpy.zcl.foundation as zcl_f @@ -15,23 +16,41 @@ from homeassistant.components.light import ( FLASH_SHORT, ColorMode, ) -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, +from homeassistant.components.zha.core.const import ( + CONF_ALWAYS_PREFER_XY_COLOR_MODE, + CONF_GROUP_MEMBERS_ASSUME_STATE, + ZHA_OPTIONS, ) -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.components.zha.core.group import GroupMember +from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.components.zha.light import FLASH_EFFECTS +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +import homeassistant.util.dt as dt_util from .common import ( + async_enable_traffic, + async_find_group_entity_id, async_shift_time, + async_test_rejoin, + async_wait_for_updates, find_entity_id, + patch_zha_config, send_attributes_report, update_attribute_cache, ) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from tests.common import ( + async_fire_time_changed, + async_mock_load_restore_state_from_storage, +) + +IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" +IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e9" +IEEE_GROUPABLE_DEVICE3 = "03:2d:6f:00:0a:90:69:e7" + LIGHT_ON_OFF = { 1: { SIG_EP_PROFILE: zha.PROFILE_ID, @@ -92,6 +111,195 @@ def light_platform_only(): yield +@pytest.fixture +async def coordinator(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA light platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Groups.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee="00:15:8d:00:02:32:4f:32", + nwk=0x0000, + node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device + + +@pytest.fixture +async def device_light_1(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA light platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + general.Groups.cluster_id, + general.Identify.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee=IEEE_GROUPABLE_DEVICE, + nwk=0xB79D, + ) + color_cluster = zigpy_device.endpoints[1].light_color + color_cluster.PLUGGED_ATTR_READS = { + "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature + | lighting.Color.ColorCapabilities.XY_attributes + } + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device + + +@pytest.fixture +async def device_light_2(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA light platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + general.Groups.cluster_id, + general.Identify.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee=IEEE_GROUPABLE_DEVICE2, + manufacturer="sengled", + nwk=0xC79E, + ) + color_cluster = zigpy_device.endpoints[1].light_color + color_cluster.PLUGGED_ATTR_READS = { + "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature + | lighting.Color.ColorCapabilities.XY_attributes + } + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device + + +@pytest.fixture +async def device_light_3(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA light platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + general.Groups.cluster_id, + general.Identify.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee=IEEE_GROUPABLE_DEVICE3, + nwk=0xB89F, + ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device + + +@pytest.fixture +async def eWeLink_light(hass, zigpy_device_mock, zha_device_joined): + """Mock eWeLink light.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + general.Groups.cluster_id, + general.Identify.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee="03:2d:6f:00:0a:90:69:e3", + manufacturer="eWeLink", + nwk=0xB79D, + ) + color_cluster = zigpy_device.endpoints[1].light_color + color_cluster.PLUGGED_ATTR_READS = { + "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature + | lighting.Color.ColorCapabilities.XY_attributes, + "color_temp_physical_min": 0, + "color_temp_physical_max": 0, + } + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device + + +async def test_light_refresh( + hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored +) -> None: + """Test ZHA light platform refresh.""" + + # create zigpy devices + zigpy_device = zigpy_device_mock(LIGHT_ON_OFF) + on_off_cluster = zigpy_device.endpoints[1].on_off + on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 0} + zha_device = await zha_device_joined_restored(zigpy_device) + entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + on_off_cluster.read_attributes.reset_mock() + + # not enough time passed + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=20)) + await hass.async_block_till_done() + assert on_off_cluster.read_attributes.call_count == 0 + assert on_off_cluster.read_attributes.await_count == 0 + assert hass.states.get(entity_id).state == STATE_OFF + + # 1 interval - 1 call + on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 1} + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=80)) + await hass.async_block_till_done() + assert on_off_cluster.read_attributes.call_count == 1 + assert on_off_cluster.read_attributes.await_count == 1 + assert hass.states.get(entity_id).state == STATE_ON + + # 2 intervals - 2 calls + on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 0} + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=80)) + await hass.async_block_till_done() + assert on_off_cluster.read_attributes.call_count == 2 + assert on_off_cluster.read_attributes.await_count == 2 + assert hass.states.get(entity_id).state == STATE_OFF + + @patch( "zigpy.zcl.clusters.lighting.Color.request", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), @@ -114,42 +322,34 @@ def light_platform_only(): ) async def test_light( hass: HomeAssistant, - setup_zha, zigpy_device_mock, + zha_device_joined_restored, device, reporting, ) -> None: """Test ZHA light platform.""" - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - + # create zigpy devices zigpy_device = zigpy_device_mock(device) - cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None) + zha_device = await zha_device_joined_restored(zigpy_device) + entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) - if cluster_color: - cluster_color.PLUGGED_ATTR_READS = { - "color_temperature": 100, - "color_temp_physical_min": 0, - "color_temp_physical_max": 600, - "color_capabilities": lighting.ColorCapabilities.XY_attributes - | lighting.ColorCapabilities.Color_temperature, - } - update_attribute_cache(cluster_color) - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.LIGHT, zha_device_proxy, hass) assert entity_id is not None cluster_on_off = zigpy_device.endpoints[1].on_off cluster_level = getattr(zigpy_device.endpoints[1], "level", None) + cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None) cluster_identify = getattr(zigpy_device.endpoints[1], "identify", None) + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the lights were created and that they are unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the lights were created and are off assert hass.states.get(entity_id).state == STATE_OFF # test turning the lights on and off from the light @@ -179,6 +379,889 @@ async def test_light( hass, cluster_level, entity_id, 150, STATE_ON ) + # test rejoin + await async_test_off_from_hass(hass, cluster_on_off, entity_id) + clusters = [c for c in (cluster_on_off, cluster_level, cluster_color) if c] + await async_test_rejoin(hass, zigpy_device, clusters, reporting) + + +@pytest.mark.parametrize( + ("plugged_attr_reads", "config_override", "expected_state"), + [ + # HS light without cached hue or saturation + ( + { + "color_capabilities": ( + lighting.Color.ColorCapabilities.Hue_and_saturation + ), + }, + {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, + {}, + ), + # HS light with cached hue + ( + { + "color_capabilities": ( + lighting.Color.ColorCapabilities.Hue_and_saturation + ), + "current_hue": 100, + }, + {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, + {}, + ), + # HS light with cached saturation + ( + { + "color_capabilities": ( + lighting.Color.ColorCapabilities.Hue_and_saturation + ), + "current_saturation": 100, + }, + {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, + {}, + ), + # HS light with both + ( + { + "color_capabilities": ( + lighting.Color.ColorCapabilities.Hue_and_saturation + ), + "current_hue": 100, + "current_saturation": 100, + }, + {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, + {}, + ), + ], +) +async def test_light_initialization( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined_restored, + plugged_attr_reads, + config_override, + expected_state, +) -> None: + """Test ZHA light initialization with cached attributes and color modes.""" + + # create zigpy devices + zigpy_device = zigpy_device_mock(LIGHT_COLOR) + + # mock attribute reads + zigpy_device.endpoints[1].light_color.PLUGGED_ATTR_READS = plugged_attr_reads + + with patch_zha_config("light", config_override): + zha_device = await zha_device_joined_restored(zigpy_device) + entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) + + assert entity_id is not None + + # pylint: disable-next=fixme + # TODO ensure hue and saturation are properly set on startup + + +@patch( + "zigpy.zcl.clusters.lighting.Color.request", + new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), +) +@patch( + "zigpy.zcl.clusters.general.Identify.request", + new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), +) +@patch( + "zigpy.zcl.clusters.general.LevelControl.request", + new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), +) +@patch( + "zigpy.zcl.clusters.general.OnOff.request", + new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), +) +async def test_transitions( + hass: HomeAssistant, device_light_1, device_light_2, eWeLink_light, coordinator +) -> None: + """Test ZHA light transition code.""" + zha_gateway = get_zha_gateway(hass) + assert zha_gateway is not None + zha_gateway.coordinator_zha_device = coordinator + coordinator._zha_gateway = zha_gateway + device_light_1._zha_gateway = zha_gateway + device_light_2._zha_gateway = zha_gateway + member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] + members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)] + + assert coordinator.is_coordinator + + # test creating a group with 2 members + zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) + await hass.async_block_till_done() + + assert zha_group is not None + assert len(zha_group.members) == 2 + for member in zha_group.members: + assert member.device.ieee in member_ieee_addresses + assert member.group == zha_group + assert member.endpoint is not None + + device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) + device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass) + eWeLink_light_entity_id = find_entity_id(Platform.LIGHT, eWeLink_light, hass) + assert device_1_entity_id != device_2_entity_id + + group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) + assert hass.states.get(group_entity_id) is not None + + assert device_1_entity_id in zha_group.member_entity_ids + assert device_2_entity_id in zha_group.member_entity_ids + + dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off + dev2_cluster_on_off = device_light_2.device.endpoints[1].on_off + eWeLink_cluster_on_off = eWeLink_light.device.endpoints[1].on_off + + dev1_cluster_level = device_light_1.device.endpoints[1].level + dev2_cluster_level = device_light_2.device.endpoints[1].level + eWeLink_cluster_level = eWeLink_light.device.endpoints[1].level + + dev1_cluster_color = device_light_1.device.endpoints[1].light_color + dev2_cluster_color = device_light_2.device.endpoints[1].light_color + eWeLink_cluster_color = eWeLink_light.device.endpoints[1].light_color + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [device_light_1, device_light_2]) + await async_wait_for_updates(hass) + + # test that the lights were created and are off + group_state = hass.states.get(group_entity_id) + assert group_state.state == STATE_OFF + light1_state = hass.states.get(device_1_entity_id) + assert light1_state.state == STATE_OFF + light2_state = hass.states.get(device_2_entity_id) + assert light2_state.state == STATE_OFF + + # first test 0 length transition with no color and no brightness provided + dev1_cluster_on_off.request.reset_mock() + dev1_cluster_level.request.reset_mock() + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {"entity_id": device_1_entity_id, "transition": 0}, + blocking=True, + ) + assert dev1_cluster_on_off.request.call_count == 0 + assert dev1_cluster_on_off.request.await_count == 0 + assert dev1_cluster_color.request.call_count == 0 + assert dev1_cluster_color.request.await_count == 0 + assert dev1_cluster_level.request.call_count == 1 + assert dev1_cluster_level.request.await_count == 1 + assert dev1_cluster_level.request.call_args == call( + False, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=254, # default "full on" brightness + transition_time=0, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light1_state = hass.states.get(device_1_entity_id) + assert light1_state.state == STATE_ON + assert light1_state.attributes["brightness"] == 254 + + # test 0 length transition with no color and no brightness provided again, but for "force on" lights + eWeLink_cluster_on_off.request.reset_mock() + eWeLink_cluster_level.request.reset_mock() + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {"entity_id": eWeLink_light_entity_id, "transition": 0}, + blocking=True, + ) + assert eWeLink_cluster_on_off.request.call_count == 1 + assert eWeLink_cluster_on_off.request.await_count == 1 + assert eWeLink_cluster_on_off.request.call_args_list[0] == call( + False, + eWeLink_cluster_on_off.commands_by_name["on"].id, + eWeLink_cluster_on_off.commands_by_name["on"].schema, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert eWeLink_cluster_color.request.call_count == 0 + assert eWeLink_cluster_color.request.await_count == 0 + assert eWeLink_cluster_level.request.call_count == 1 + assert eWeLink_cluster_level.request.await_count == 1 + assert eWeLink_cluster_level.request.call_args == call( + False, + eWeLink_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + eWeLink_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=254, # default "full on" brightness + transition_time=0, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + eWeLink_state = hass.states.get(eWeLink_light_entity_id) + assert eWeLink_state.state == STATE_ON + assert eWeLink_state.attributes["brightness"] == 254 + + eWeLink_cluster_on_off.request.reset_mock() + eWeLink_cluster_level.request.reset_mock() + + # test 0 length transition with brightness, but no color provided + dev1_cluster_on_off.request.reset_mock() + dev1_cluster_level.request.reset_mock() + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {"entity_id": device_1_entity_id, "transition": 0, "brightness": 50}, + blocking=True, + ) + assert dev1_cluster_on_off.request.call_count == 0 + assert dev1_cluster_on_off.request.await_count == 0 + assert dev1_cluster_color.request.call_count == 0 + assert dev1_cluster_color.request.await_count == 0 + assert dev1_cluster_level.request.call_count == 1 + assert dev1_cluster_level.request.await_count == 1 + assert dev1_cluster_level.request.call_args == call( + False, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=50, + transition_time=0, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light1_state = hass.states.get(device_1_entity_id) + assert light1_state.state == STATE_ON + assert light1_state.attributes["brightness"] == 50 + + dev1_cluster_level.request.reset_mock() + + # test non 0 length transition with color provided while light is on + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + { + "entity_id": device_1_entity_id, + "transition": 3.5, + "brightness": 18, + "color_temp": 432, + }, + blocking=True, + ) + assert dev1_cluster_on_off.request.call_count == 0 + assert dev1_cluster_on_off.request.await_count == 0 + assert dev1_cluster_color.request.call_count == 1 + assert dev1_cluster_color.request.await_count == 1 + assert dev1_cluster_level.request.call_count == 1 + assert dev1_cluster_level.request.await_count == 1 + assert dev1_cluster_level.request.call_args == call( + False, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=18, + transition_time=35, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert dev1_cluster_color.request.call_args == call( + False, + dev1_cluster_color.commands_by_name["move_to_color_temp"].id, + dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, + color_temp_mireds=432, + transition_time=35, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light1_state = hass.states.get(device_1_entity_id) + assert light1_state.state == STATE_ON + assert light1_state.attributes["brightness"] == 18 + assert light1_state.attributes["color_temp"] == 432 + assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP + + dev1_cluster_level.request.reset_mock() + dev1_cluster_color.request.reset_mock() + + # test 0 length transition to turn light off + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_off", + { + "entity_id": device_1_entity_id, + "transition": 0, + }, + blocking=True, + ) + assert dev1_cluster_on_off.request.call_count == 0 + assert dev1_cluster_on_off.request.await_count == 0 + assert dev1_cluster_color.request.call_count == 0 + assert dev1_cluster_color.request.await_count == 0 + assert dev1_cluster_level.request.call_count == 1 + assert dev1_cluster_level.request.await_count == 1 + assert dev1_cluster_level.request.call_args == call( + False, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=0, + transition_time=0, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light1_state = hass.states.get(device_1_entity_id) + assert light1_state.state == STATE_OFF + + dev1_cluster_level.request.reset_mock() + + # test non 0 length transition and color temp while turning light on (new_color_provided_while_off) + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + { + "entity_id": device_1_entity_id, + "transition": 1, + "brightness": 25, + "color_temp": 235, + }, + blocking=True, + ) + assert dev1_cluster_on_off.request.call_count == 0 + assert dev1_cluster_on_off.request.await_count == 0 + assert dev1_cluster_color.request.call_count == 1 + assert dev1_cluster_color.request.await_count == 1 + assert dev1_cluster_level.request.call_count == 2 + assert dev1_cluster_level.request.await_count == 2 + + # first it comes on with no transition at 2 brightness + assert dev1_cluster_level.request.call_args_list[0] == call( + False, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=2, + transition_time=0, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert dev1_cluster_color.request.call_args == call( + False, + dev1_cluster_color.commands_by_name["move_to_color_temp"].id, + dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, + color_temp_mireds=235, + transition_time=0, # no transition when new_color_provided_while_off + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert dev1_cluster_level.request.call_args_list[1] == call( + False, + dev1_cluster_level.commands_by_name["move_to_level"].id, + dev1_cluster_level.commands_by_name["move_to_level"].schema, + level=25, + transition_time=10, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light1_state = hass.states.get(device_1_entity_id) + assert light1_state.state == STATE_ON + assert light1_state.attributes["brightness"] == 25 + assert light1_state.attributes["color_temp"] == 235 + assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP + + dev1_cluster_level.request.reset_mock() + dev1_cluster_color.request.reset_mock() + + # turn light 1 back off + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_off", + { + "entity_id": device_1_entity_id, + }, + blocking=True, + ) + assert dev1_cluster_on_off.request.call_count == 1 + assert dev1_cluster_on_off.request.await_count == 1 + assert dev1_cluster_color.request.call_count == 0 + assert dev1_cluster_color.request.await_count == 0 + assert dev1_cluster_level.request.call_count == 0 + assert dev1_cluster_level.request.await_count == 0 + group_state = hass.states.get(group_entity_id) + assert group_state.state == STATE_OFF + + dev1_cluster_on_off.request.reset_mock() + dev1_cluster_color.request.reset_mock() + dev1_cluster_level.request.reset_mock() + + # test no transition provided and color temp while turning light on (new_color_provided_while_off) + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + { + "entity_id": device_1_entity_id, + "brightness": 25, + "color_temp": 236, + }, + blocking=True, + ) + assert dev1_cluster_on_off.request.call_count == 0 + assert dev1_cluster_on_off.request.await_count == 0 + assert dev1_cluster_color.request.call_count == 1 + assert dev1_cluster_color.request.await_count == 1 + assert dev1_cluster_level.request.call_count == 2 + assert dev1_cluster_level.request.await_count == 2 + + # first it comes on with no transition at 2 brightness + assert dev1_cluster_level.request.call_args_list[0] == call( + False, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=2, + transition_time=0, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert dev1_cluster_color.request.call_args == call( + False, + dev1_cluster_color.commands_by_name["move_to_color_temp"].id, + dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, + color_temp_mireds=236, + transition_time=0, # no transition when new_color_provided_while_off + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert dev1_cluster_level.request.call_args_list[1] == call( + False, + dev1_cluster_level.commands_by_name["move_to_level"].id, + dev1_cluster_level.commands_by_name["move_to_level"].schema, + level=25, + transition_time=0, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light1_state = hass.states.get(device_1_entity_id) + assert light1_state.state == STATE_ON + assert light1_state.attributes["brightness"] == 25 + assert light1_state.attributes["color_temp"] == 236 + assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP + + dev1_cluster_level.request.reset_mock() + dev1_cluster_color.request.reset_mock() + + # turn light 1 back off to setup group test + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_off", + { + "entity_id": device_1_entity_id, + }, + blocking=True, + ) + assert dev1_cluster_on_off.request.call_count == 1 + assert dev1_cluster_on_off.request.await_count == 1 + assert dev1_cluster_color.request.call_count == 0 + assert dev1_cluster_color.request.await_count == 0 + assert dev1_cluster_level.request.call_count == 0 + assert dev1_cluster_level.request.await_count == 0 + group_state = hass.states.get(group_entity_id) + assert group_state.state == STATE_OFF + + dev1_cluster_on_off.request.reset_mock() + dev1_cluster_color.request.reset_mock() + dev1_cluster_level.request.reset_mock() + + # test no transition when the same color temp is provided from off + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + { + "entity_id": device_1_entity_id, + "color_temp": 236, + }, + blocking=True, + ) + assert dev1_cluster_on_off.request.call_count == 1 + assert dev1_cluster_on_off.request.await_count == 1 + assert dev1_cluster_color.request.call_count == 1 + assert dev1_cluster_color.request.await_count == 1 + assert dev1_cluster_level.request.call_count == 0 + assert dev1_cluster_level.request.await_count == 0 + + assert dev1_cluster_on_off.request.call_args == call( + False, + dev1_cluster_on_off.commands_by_name["on"].id, + dev1_cluster_on_off.commands_by_name["on"].schema, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + assert dev1_cluster_color.request.call_args == call( + False, + dev1_cluster_color.commands_by_name["move_to_color_temp"].id, + dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, + color_temp_mireds=236, + transition_time=0, # no transition when new_color_provided_while_off + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light1_state = hass.states.get(device_1_entity_id) + assert light1_state.state == STATE_ON + assert light1_state.attributes["brightness"] == 25 + assert light1_state.attributes["color_temp"] == 236 + assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP + + dev1_cluster_on_off.request.reset_mock() + dev1_cluster_color.request.reset_mock() + + # turn light 1 back off to setup group test + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_off", + { + "entity_id": device_1_entity_id, + }, + blocking=True, + ) + assert dev1_cluster_on_off.request.call_count == 1 + assert dev1_cluster_on_off.request.await_count == 1 + assert dev1_cluster_color.request.call_count == 0 + assert dev1_cluster_color.request.await_count == 0 + assert dev1_cluster_level.request.call_count == 0 + assert dev1_cluster_level.request.await_count == 0 + group_state = hass.states.get(group_entity_id) + assert group_state.state == STATE_OFF + + dev1_cluster_on_off.request.reset_mock() + dev1_cluster_color.request.reset_mock() + dev1_cluster_level.request.reset_mock() + + # test sengled light uses default minimum transition time + dev2_cluster_on_off.request.reset_mock() + dev2_cluster_color.request.reset_mock() + dev2_cluster_level.request.reset_mock() + + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {"entity_id": device_2_entity_id, "transition": 0, "brightness": 100}, + blocking=True, + ) + assert dev2_cluster_on_off.request.call_count == 0 + assert dev2_cluster_on_off.request.await_count == 0 + assert dev2_cluster_color.request.call_count == 0 + assert dev2_cluster_color.request.await_count == 0 + assert dev2_cluster_level.request.call_count == 1 + assert dev2_cluster_level.request.await_count == 1 + assert dev2_cluster_level.request.call_args == call( + False, + dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=100, + transition_time=1, # transition time - sengled light uses default minimum + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light2_state = hass.states.get(device_2_entity_id) + assert light2_state.state == STATE_ON + assert light2_state.attributes["brightness"] == 100 + + dev2_cluster_level.request.reset_mock() + + # turn the sengled light back off + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_off", + { + "entity_id": device_2_entity_id, + }, + blocking=True, + ) + assert dev2_cluster_on_off.request.call_count == 1 + assert dev2_cluster_on_off.request.await_count == 1 + assert dev2_cluster_color.request.call_count == 0 + assert dev2_cluster_color.request.await_count == 0 + assert dev2_cluster_level.request.call_count == 0 + assert dev2_cluster_level.request.await_count == 0 + light2_state = hass.states.get(device_2_entity_id) + assert light2_state.state == STATE_OFF + + dev2_cluster_on_off.request.reset_mock() + + # test non 0 length transition and color temp while turning light on and sengled (new_color_provided_while_off) + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + { + "entity_id": device_2_entity_id, + "transition": 1, + "brightness": 25, + "color_temp": 235, + }, + blocking=True, + ) + assert dev2_cluster_on_off.request.call_count == 0 + assert dev2_cluster_on_off.request.await_count == 0 + assert dev2_cluster_color.request.call_count == 1 + assert dev2_cluster_color.request.await_count == 1 + assert dev2_cluster_level.request.call_count == 2 + assert dev2_cluster_level.request.await_count == 2 + + # first it comes on with no transition at 2 brightness + assert dev2_cluster_level.request.call_args_list[0] == call( + False, + dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=2, + transition_time=1, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert dev2_cluster_color.request.call_args == call( + False, + dev2_cluster_color.commands_by_name["move_to_color_temp"].id, + dev2_cluster_color.commands_by_name["move_to_color_temp"].schema, + color_temp_mireds=235, + transition_time=1, # sengled transition == 1 when new_color_provided_while_off + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert dev2_cluster_level.request.call_args_list[1] == call( + False, + dev2_cluster_level.commands_by_name["move_to_level"].id, + dev2_cluster_level.commands_by_name["move_to_level"].schema, + level=25, + transition_time=10, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light2_state = hass.states.get(device_2_entity_id) + assert light2_state.state == STATE_ON + assert light2_state.attributes["brightness"] == 25 + assert light2_state.attributes["color_temp"] == 235 + assert light2_state.attributes["color_mode"] == ColorMode.COLOR_TEMP + + dev2_cluster_level.request.reset_mock() + dev2_cluster_color.request.reset_mock() + + # turn the sengled light back off + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_off", + { + "entity_id": device_2_entity_id, + }, + blocking=True, + ) + assert dev2_cluster_on_off.request.call_count == 1 + assert dev2_cluster_on_off.request.await_count == 1 + assert dev2_cluster_color.request.call_count == 0 + assert dev2_cluster_color.request.await_count == 0 + assert dev2_cluster_level.request.call_count == 0 + assert dev2_cluster_level.request.await_count == 0 + light2_state = hass.states.get(device_2_entity_id) + assert light2_state.state == STATE_OFF + + dev2_cluster_on_off.request.reset_mock() + + # test non 0 length transition and color temp while turning group light on (new_color_provided_while_off) + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + { + "entity_id": group_entity_id, + "transition": 1, + "brightness": 25, + "color_temp": 235, + }, + blocking=True, + ) + + group_on_off_cluster_handler = zha_group.endpoint[general.OnOff.cluster_id] + group_level_cluster_handler = zha_group.endpoint[general.LevelControl.cluster_id] + group_color_cluster_handler = zha_group.endpoint[lighting.Color.cluster_id] + assert group_on_off_cluster_handler.request.call_count == 0 + assert group_on_off_cluster_handler.request.await_count == 0 + assert group_color_cluster_handler.request.call_count == 1 + assert group_color_cluster_handler.request.await_count == 1 + assert group_level_cluster_handler.request.call_count == 1 + assert group_level_cluster_handler.request.await_count == 1 + + # groups are omitted from the 3 call dance for new_color_provided_while_off + assert group_color_cluster_handler.request.call_args == call( + False, + dev2_cluster_color.commands_by_name["move_to_color_temp"].id, + dev2_cluster_color.commands_by_name["move_to_color_temp"].schema, + color_temp_mireds=235, + transition_time=10, # sengled transition == 1 when new_color_provided_while_off + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert group_level_cluster_handler.request.call_args == call( + False, + dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=25, + transition_time=10, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + group_state = hass.states.get(group_entity_id) + assert group_state.state == STATE_ON + assert group_state.attributes["brightness"] == 25 + assert group_state.attributes["color_temp"] == 235 + assert group_state.attributes["color_mode"] == ColorMode.COLOR_TEMP + + group_on_off_cluster_handler.request.reset_mock() + group_color_cluster_handler.request.reset_mock() + group_level_cluster_handler.request.reset_mock() + + # turn the sengled light back on + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + { + "entity_id": device_2_entity_id, + }, + blocking=True, + ) + assert dev2_cluster_on_off.request.call_count == 1 + assert dev2_cluster_on_off.request.await_count == 1 + assert dev2_cluster_color.request.call_count == 0 + assert dev2_cluster_color.request.await_count == 0 + assert dev2_cluster_level.request.call_count == 0 + assert dev2_cluster_level.request.await_count == 0 + light2_state = hass.states.get(device_2_entity_id) + assert light2_state.state == STATE_ON + + dev2_cluster_on_off.request.reset_mock() + + # turn the light off with a transition + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_off", + {"entity_id": device_2_entity_id, "transition": 2}, + blocking=True, + ) + assert dev2_cluster_on_off.request.call_count == 0 + assert dev2_cluster_on_off.request.await_count == 0 + assert dev2_cluster_color.request.call_count == 0 + assert dev2_cluster_color.request.await_count == 0 + assert dev2_cluster_level.request.call_count == 1 + assert dev2_cluster_level.request.await_count == 1 + assert dev2_cluster_level.request.call_args == call( + False, + dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=0, + transition_time=20, # transition time + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light2_state = hass.states.get(device_2_entity_id) + assert light2_state.state == STATE_OFF + + dev2_cluster_level.request.reset_mock() + + # turn the light back on with no args should use a transition and last known brightness + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {"entity_id": device_2_entity_id}, + blocking=True, + ) + assert dev2_cluster_on_off.request.call_count == 0 + assert dev2_cluster_on_off.request.await_count == 0 + assert dev2_cluster_color.request.call_count == 0 + assert dev2_cluster_color.request.await_count == 0 + assert dev2_cluster_level.request.call_count == 1 + assert dev2_cluster_level.request.await_count == 1 + assert dev2_cluster_level.request.call_args == call( + False, + dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, + dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, + level=25, + transition_time=1, # transition time - sengled light uses default minimum + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + light2_state = hass.states.get(device_2_entity_id) + assert light2_state.state == STATE_ON + + dev2_cluster_level.request.reset_mock() + + # test eWeLink color temp while turning light on from off (new_color_provided_while_off) + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + { + "entity_id": eWeLink_light_entity_id, + "color_temp": 235, + }, + blocking=True, + ) + assert eWeLink_cluster_on_off.request.call_count == 1 + assert eWeLink_cluster_on_off.request.await_count == 1 + assert eWeLink_cluster_color.request.call_count == 1 + assert eWeLink_cluster_color.request.await_count == 1 + assert eWeLink_cluster_level.request.call_count == 0 + assert eWeLink_cluster_level.request.await_count == 0 + + # first it comes on + assert eWeLink_cluster_on_off.request.call_args_list[0] == call( + False, + eWeLink_cluster_on_off.commands_by_name["on"].id, + eWeLink_cluster_on_off.commands_by_name["on"].schema, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert dev1_cluster_color.request.call_args == call( + False, + dev1_cluster_color.commands_by_name["move_to_color_temp"].id, + dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, + color_temp_mireds=235, + transition_time=0, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + + eWeLink_state = hass.states.get(eWeLink_light_entity_id) + assert eWeLink_state.state == STATE_ON + assert eWeLink_state.attributes["color_temp"] == 235 + assert eWeLink_state.attributes["color_mode"] == ColorMode.COLOR_TEMP + assert eWeLink_state.attributes["min_mireds"] == 153 + assert eWeLink_state.attributes["max_mireds"] == 500 + @patch( "zigpy.zcl.clusters.lighting.Color.request", @@ -192,51 +1275,13 @@ async def test_light( "zigpy.zcl.clusters.general.OnOff.request", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), ) -async def test_on_with_off_color( - hass: HomeAssistant, setup_zha, zigpy_device_mock -) -> None: +async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None: """Test turning on the light and sending color commands before on/level commands for supporting lights.""" - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - nwk=0xB79D, - ) - - dev1_cluster_color = zigpy_device.endpoints[1].light_color - - dev1_cluster_color.PLUGGED_ATTR_READS = { - "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature - | lighting.Color.ColorCapabilities.XY_attributes - } - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.LIGHT, zha_device_proxy, hass) - assert entity_id is not None - - device_1_entity_id = find_entity_id(Platform.LIGHT, zha_device_proxy, hass) - dev1_cluster_on_off = zigpy_device.endpoints[1].on_off - dev1_cluster_level = zigpy_device.endpoints[1].level + device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) + dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off + dev1_cluster_level = device_light_1.device.endpoints[1].level + dev1_cluster_color = device_light_1.device.endpoints[1].light_color # Execute_if_off will override the "enhanced turn on from an off-state" config option that's enabled here dev1_cluster_color.PLUGGED_ATTR_READS = { @@ -358,34 +1403,28 @@ async def test_on_with_off_color( assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP -async def async_test_on_off_from_light( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_on_off_from_light(hass, cluster, entity_id): """Test on off functionality from the light.""" # turn on at light await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 3}) - await hass.async_block_till_done(wait_background_tasks=True) + await async_wait_for_updates(hass) assert hass.states.get(entity_id).state == STATE_ON # turn off at light await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 3}) - await hass.async_block_till_done(wait_background_tasks=True) + await async_wait_for_updates(hass) assert hass.states.get(entity_id).state == STATE_OFF -async def async_test_on_from_light( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_on_from_light(hass, cluster, entity_id): """Test on off functionality from the light.""" # turn on at light await send_attributes_report(hass, cluster, {1: -1, 0: 1, 2: 2}) - await hass.async_block_till_done(wait_background_tasks=True) + await async_wait_for_updates(hass) assert hass.states.get(entity_id).state == STATE_ON -async def async_test_on_off_from_hass( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_on_off_from_hass(hass, cluster, entity_id): """Test on off functionality from hass.""" # turn on via UI cluster.request.reset_mock() @@ -406,9 +1445,7 @@ async def async_test_on_off_from_hass( await async_test_off_from_hass(hass, cluster, entity_id) -async def async_test_off_from_hass( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_off_from_hass(hass, cluster, entity_id): """Test turning off the light from Home Assistant.""" # turn off via UI @@ -430,9 +1467,9 @@ async def async_test_off_from_hass( async def async_test_level_on_off_from_hass( hass: HomeAssistant, - on_off_cluster: Cluster, - level_cluster: Cluster, - entity_id: str, + on_off_cluster, + level_cluster, + entity_id, expected_default_transition: int = 0, ): """Test on off functionality from hass.""" @@ -512,19 +1549,13 @@ async def async_test_level_on_off_from_hass( await async_test_off_from_hass(hass, on_off_cluster, entity_id) -async def async_test_dimmer_from_light( - hass: HomeAssistant, - cluster: Cluster, - entity_id: str, - level: int, - expected_state: str, -): +async def async_test_dimmer_from_light(hass, cluster, entity_id, level, expected_state): """Test dimmer functionality from the light.""" await send_attributes_report( hass, cluster, {1: level + 10, 0: level, 2: level - 10 or 22} ) - await hass.async_block_till_done(wait_background_tasks=True) + await async_wait_for_updates(hass) assert hass.states.get(entity_id).state == expected_state # hass uses None for brightness of 0 in state attributes if level == 0: @@ -532,9 +1563,7 @@ async def async_test_dimmer_from_light( assert hass.states.get(entity_id).attributes.get("brightness") == level -async def async_test_flash_from_hass( - hass: HomeAssistant, cluster: Cluster, entity_id: str, flash -): +async def async_test_flash_from_hass(hass, cluster, entity_id, flash): """Test flash functionality from hass.""" # turn on via UI cluster.request.reset_mock() @@ -574,23 +1603,405 @@ async def async_test_flash_from_hass( "zigpy.zcl.clusters.general.OnOff.request", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), ) -async def test_light_exception_on_creation( +@patch( + "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", + new=0, +) +async def test_zha_group_light_entity( hass: HomeAssistant, - setup_zha, - zigpy_device_mock, - caplog: pytest.LogCaptureFixture, + entity_registry: er.EntityRegistry, + device_light_1, + device_light_2, + device_light_3, + coordinator, ) -> None: - """Test ZHA light entity creation exception.""" + """Test the light entity for a ZHA group.""" + zha_gateway = get_zha_gateway(hass) + assert zha_gateway is not None + zha_gateway.coordinator_zha_device = coordinator + coordinator._zha_gateway = zha_gateway + device_light_1._zha_gateway = zha_gateway + device_light_2._zha_gateway = zha_gateway + member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] + members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)] - await setup_zha() - gateway = get_zha_gateway(hass) - zigpy_device = zigpy_device_mock(LIGHT_COLOR) + assert coordinator.is_coordinator - gateway.get_or_create_device(zigpy_device) - with patch( - "homeassistant.components.zha.light.Light.__init__", side_effect=Exception + # test creating a group with 2 members + zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) + await hass.async_block_till_done() + + assert zha_group is not None + assert len(zha_group.members) == 2 + for member in zha_group.members: + assert member.device.ieee in member_ieee_addresses + assert member.group == zha_group + assert member.endpoint is not None + + device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) + device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass) + device_3_entity_id = find_entity_id(Platform.LIGHT, device_light_3, hass) + + assert device_1_entity_id not in (device_2_entity_id, device_3_entity_id) + assert device_2_entity_id != device_3_entity_id + + group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) + assert hass.states.get(group_entity_id) is not None + + assert device_1_entity_id in zha_group.member_entity_ids + assert device_2_entity_id in zha_group.member_entity_ids + assert device_3_entity_id not in zha_group.member_entity_ids + + group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id] + group_cluster_level = zha_group.endpoint[general.LevelControl.cluster_id] + group_cluster_identify = zha_group.endpoint[general.Identify.cluster_id] + + dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off + dev2_cluster_on_off = device_light_2.device.endpoints[1].on_off + dev3_cluster_on_off = device_light_3.device.endpoints[1].on_off + + dev1_cluster_level = device_light_1.device.endpoints[1].level + + await async_enable_traffic( + hass, [device_light_1, device_light_2, device_light_3], enabled=False + ) + await async_wait_for_updates(hass) + # test that the lights were created and that they are unavailable + assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [device_light_1, device_light_2, device_light_3]) + await async_wait_for_updates(hass) + + # test that the lights were created and are off + group_state = hass.states.get(group_entity_id) + assert group_state.state == STATE_OFF + assert group_state.attributes["supported_color_modes"] == [ + ColorMode.COLOR_TEMP, + ColorMode.XY, + ] + # Light which is off has no color mode + assert group_state.attributes["color_mode"] is None + + # test turning the lights on and off from the HA + await async_test_on_off_from_hass(hass, group_cluster_on_off, group_entity_id) + + await async_shift_time(hass) + + # test short flashing the lights from the HA + await async_test_flash_from_hass( + hass, group_cluster_identify, group_entity_id, FLASH_SHORT + ) + + await async_shift_time(hass) + + # test turning the lights on and off from the light + await async_test_on_off_from_light(hass, dev1_cluster_on_off, group_entity_id) + + # test turning the lights on and off from the HA + await async_test_level_on_off_from_hass( + hass, + group_cluster_on_off, + group_cluster_level, + group_entity_id, + expected_default_transition=1, # a Sengled light is in that group and needs a minimum 0.1s transition + ) + + await async_shift_time(hass) + + # test getting a brightness change from the network + await async_test_on_from_light(hass, dev1_cluster_on_off, group_entity_id) + await async_test_dimmer_from_light( + hass, dev1_cluster_level, group_entity_id, 150, STATE_ON + ) + # Check state + group_state = hass.states.get(group_entity_id) + assert group_state.state == STATE_ON + assert group_state.attributes["supported_color_modes"] == [ + ColorMode.COLOR_TEMP, + ColorMode.XY, + ] + assert group_state.attributes["color_mode"] == ColorMode.XY + + # test long flashing the lights from the HA + await async_test_flash_from_hass( + hass, group_cluster_identify, group_entity_id, FLASH_LONG + ) + + await async_shift_time(hass) + + assert len(zha_group.members) == 2 + # test some of the group logic to make sure we key off states correctly + await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) + await send_attributes_report(hass, dev2_cluster_on_off, {0: 1}) + await hass.async_block_till_done() + + # test that group light is on + assert hass.states.get(device_1_entity_id).state == STATE_ON + assert hass.states.get(device_2_entity_id).state == STATE_ON + assert hass.states.get(group_entity_id).state == STATE_ON + + await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) + await hass.async_block_till_done() + + # test that group light is still on + assert hass.states.get(device_1_entity_id).state == STATE_OFF + assert hass.states.get(device_2_entity_id).state == STATE_ON + assert hass.states.get(group_entity_id).state == STATE_ON + + await send_attributes_report(hass, dev2_cluster_on_off, {0: 0}) + await async_wait_for_updates(hass) + + # test that group light is now off + assert hass.states.get(device_1_entity_id).state == STATE_OFF + assert hass.states.get(device_2_entity_id).state == STATE_OFF + assert hass.states.get(group_entity_id).state == STATE_OFF + + await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) + await async_wait_for_updates(hass) + + # test that group light is now back on + assert hass.states.get(device_1_entity_id).state == STATE_ON + assert hass.states.get(device_2_entity_id).state == STATE_OFF + assert hass.states.get(group_entity_id).state == STATE_ON + + # turn it off to test a new member add being tracked + await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) + await async_wait_for_updates(hass) + assert hass.states.get(device_1_entity_id).state == STATE_OFF + assert hass.states.get(device_2_entity_id).state == STATE_OFF + assert hass.states.get(group_entity_id).state == STATE_OFF + + # add a new member and test that his state is also tracked + await zha_group.async_add_members([GroupMember(device_light_3.ieee, 1)]) + await send_attributes_report(hass, dev3_cluster_on_off, {0: 1}) + await async_wait_for_updates(hass) + assert device_3_entity_id in zha_group.member_entity_ids + assert len(zha_group.members) == 3 + + assert hass.states.get(device_1_entity_id).state == STATE_OFF + assert hass.states.get(device_2_entity_id).state == STATE_OFF + assert hass.states.get(device_3_entity_id).state == STATE_ON + assert hass.states.get(group_entity_id).state == STATE_ON + + # make the group have only 1 member and now there should be no entity + await zha_group.async_remove_members( + [GroupMember(device_light_2.ieee, 1), GroupMember(device_light_3.ieee, 1)] + ) + assert len(zha_group.members) == 1 + assert hass.states.get(group_entity_id) is None + assert device_2_entity_id not in zha_group.member_entity_ids + assert device_3_entity_id not in zha_group.member_entity_ids + + # make sure the entity registry entry is still there + assert entity_registry.async_get(group_entity_id) is not None + + # add a member back and ensure that the group entity was created again + await zha_group.async_add_members([GroupMember(device_light_3.ieee, 1)]) + await send_attributes_report(hass, dev3_cluster_on_off, {0: 1}) + await async_wait_for_updates(hass) + assert len(zha_group.members) == 2 + assert hass.states.get(group_entity_id).state == STATE_ON + + # add a 3rd member and ensure we still have an entity and we track the new one + await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) + await send_attributes_report(hass, dev3_cluster_on_off, {0: 0}) + await async_wait_for_updates(hass) + assert hass.states.get(group_entity_id).state == STATE_OFF + + # this will test that _reprobe_group is used correctly + await zha_group.async_add_members( + [GroupMember(device_light_2.ieee, 1), GroupMember(coordinator.ieee, 1)] + ) + await send_attributes_report(hass, dev2_cluster_on_off, {0: 1}) + await async_wait_for_updates(hass) + assert len(zha_group.members) == 4 + assert hass.states.get(group_entity_id).state == STATE_ON + + await zha_group.async_remove_members([GroupMember(coordinator.ieee, 1)]) + await hass.async_block_till_done() + assert hass.states.get(group_entity_id).state == STATE_ON + assert len(zha_group.members) == 3 + + # remove the group and ensure that there is no entity and that the entity registry is cleaned up + assert entity_registry.async_get(group_entity_id) is not None + await zha_gateway.async_remove_zigpy_group(zha_group.group_id) + assert hass.states.get(group_entity_id) is None + assert entity_registry.async_get(group_entity_id) is None + + +@patch( + "zigpy.zcl.clusters.general.OnOff.request", + new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), +) +@patch( + "homeassistant.components.zha.light.ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY", + new=0, +) +async def test_group_member_assume_state( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + zigpy_device_mock, + zha_device_joined, + coordinator, + device_light_1, + device_light_2, +) -> None: + """Test the group members assume state function.""" + with patch_zha_config( + "light", {(ZHA_OPTIONS, CONF_GROUP_MEMBERS_ASSUME_STATE): True} ): - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) + zha_gateway = get_zha_gateway(hass) + assert zha_gateway is not None + zha_gateway.coordinator_zha_device = coordinator + coordinator._zha_gateway = zha_gateway + device_light_1._zha_gateway = zha_gateway + device_light_2._zha_gateway = zha_gateway + member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] + members = [ + GroupMember(device_light_1.ieee, 1), + GroupMember(device_light_2.ieee, 1), + ] - assert "Error while adding entity from entity data" in caplog.text + assert coordinator.is_coordinator + + # test creating a group with 2 members + zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) + await hass.async_block_till_done() + + assert zha_group is not None + assert len(zha_group.members) == 2 + for member in zha_group.members: + assert member.device.ieee in member_ieee_addresses + assert member.group == zha_group + assert member.endpoint is not None + + device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) + device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass) + + assert device_1_entity_id != device_2_entity_id + + group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) + assert hass.states.get(group_entity_id) is not None + + assert device_1_entity_id in zha_group.member_entity_ids + assert device_2_entity_id in zha_group.member_entity_ids + + group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id] + + await async_enable_traffic( + hass, [device_light_1, device_light_2], enabled=False + ) + await async_wait_for_updates(hass) + # test that the lights were created and that they are unavailable + assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [device_light_1, device_light_2]) + await async_wait_for_updates(hass) + + # test that the lights were created and are off + group_state = hass.states.get(group_entity_id) + assert group_state.state == STATE_OFF + + group_cluster_on_off.request.reset_mock() + await async_shift_time(hass) + + # turn on via UI + await hass.services.async_call( + LIGHT_DOMAIN, "turn_on", {"entity_id": group_entity_id}, blocking=True + ) + + # members also instantly assume STATE_ON + assert hass.states.get(device_1_entity_id).state == STATE_ON + assert hass.states.get(device_2_entity_id).state == STATE_ON + assert hass.states.get(group_entity_id).state == STATE_ON + + # turn off via UI + await hass.services.async_call( + LIGHT_DOMAIN, "turn_off", {"entity_id": group_entity_id}, blocking=True + ) + + # members also instantly assume STATE_OFF + assert hass.states.get(device_1_entity_id).state == STATE_OFF + assert hass.states.get(device_2_entity_id).state == STATE_OFF + assert hass.states.get(group_entity_id).state == STATE_OFF + + # remove the group and ensure that there is no entity and that the entity registry is cleaned up + assert entity_registry.async_get(group_entity_id) is not None + await zha_gateway.async_remove_zigpy_group(zha_group.group_id) + assert hass.states.get(group_entity_id) is None + assert entity_registry.async_get(group_entity_id) is None + + +@pytest.mark.parametrize( + ("restored_state", "expected_state"), + [ + ( + STATE_ON, + { + "brightness": None, + "off_with_transition": None, + "off_brightness": None, + "color_mode": ColorMode.XY, # color_mode defaults to what the light supports when restored with ON state + "color_temp": None, + "xy_color": None, + "hs_color": None, + "effect": None, + }, + ), + ( + STATE_OFF, + { + "brightness": None, + "off_with_transition": None, + "off_brightness": None, + "color_mode": None, + "color_temp": None, + "xy_color": None, + "hs_color": None, + "effect": None, + }, + ), + ], +) +async def test_restore_light_state( + hass: HomeAssistant, + zigpy_device_mock, + core_rs: Callable[[str, Any, dict[str, Any]], None], + zha_device_restored, + restored_state: str, + expected_state: dict[str, Any], +) -> None: + """Test ZHA light restores without throwing an error when attributes are None.""" + + # restore state with None values + attributes = { + "brightness": None, + "off_with_transition": None, + "off_brightness": None, + "color_mode": None, + "color_temp": None, + "xy_color": None, + "hs_color": None, + "effect": None, + } + + entity_id = "light.fakemanufacturer_fakemodel_light" + core_rs( + entity_id, + state=restored_state, + attributes=attributes, + ) + await async_mock_load_restore_state_from_storage(hass) + + zigpy_device = zigpy_device_mock(LIGHT_COLOR) + zha_device = await zha_device_restored(zigpy_device) + entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) + + assert entity_id is not None + assert hass.states.get(entity_id).state == restored_state + + # compare actual restored state to expected state + for attribute, expected_value in expected_state.items(): + assert hass.states.get(entity_id).attributes.get(attribute) == expected_value diff --git a/tests/components/zha/test_lock.py b/tests/components/zha/test_lock.py index dd4afb0ae14..b16d7a31828 100644 --- a/tests/components/zha/test_lock.py +++ b/tests/components/zha/test_lock.py @@ -3,23 +3,27 @@ from unittest.mock import patch import pytest -from zigpy.profiles import zha -from zigpy.zcl import Cluster +import zigpy.profiles.zha from zigpy.zcl.clusters import closures, general import zigpy.zcl.foundation as zcl_f -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.const import ( + STATE_LOCKED, + STATE_UNAVAILABLE, + STATE_UNLOCKED, + Platform, ) -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .common import find_entity_id, send_attributes_report -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from .common import async_enable_traffic, find_entity_id, send_attributes_report +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE + +LOCK_DOOR = 0 +UNLOCK_DOOR = 1 +SET_PIN_CODE = 5 +CLEAR_PIN_CODE = 7 +SET_USER_STATUS = 9 @pytest.fixture(autouse=True) @@ -36,52 +40,49 @@ def lock_platform_only(): yield -async def test_lock(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: - """Test ZHA lock platform.""" - - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) +@pytest.fixture +async def lock(hass, zigpy_device_mock, zha_device_joined_restored): + """Lock cluster fixture.""" zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [closures.DoorLock.cluster_id, general.Basic.cluster_id], SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.DOOR_LOCK, - SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.DOOR_LOCK, } }, - ieee="01:2d:6f:00:0a:90:69:e8", - node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", ) - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) + zha_device = await zha_device_joined_restored(zigpy_device) + return zha_device, zigpy_device.endpoints[1].door_lock - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.LOCK, zha_device_proxy, hass) - cluster = zigpy_device.endpoints[1].door_lock + +async def test_lock(hass: HomeAssistant, lock) -> None: + """Test ZHA lock platform.""" + + zha_device, cluster = lock + entity_id = find_entity_id(Platform.LOCK, zha_device, hass) assert entity_id is not None - assert hass.states.get(entity_id).state == LockState.UNLOCKED + assert hass.states.get(entity_id).state == STATE_UNLOCKED + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the lock was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to unlocked + assert hass.states.get(entity_id).state == STATE_UNLOCKED # set state to locked - await send_attributes_report( - hass, - cluster, - {closures.DoorLock.AttributeDefs.lock_state.id: closures.LockState.Locked}, - ) - assert hass.states.get(entity_id).state == LockState.LOCKED + await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2}) + assert hass.states.get(entity_id).state == STATE_LOCKED # set state to unlocked - await send_attributes_report( - hass, - cluster, - {closures.DoorLock.AttributeDefs.lock_state.id: closures.LockState.Unlocked}, - ) - assert hass.states.get(entity_id).state == LockState.UNLOCKED + await send_attributes_report(hass, cluster, {1: 0, 0: 2, 2: 3}) + assert hass.states.get(entity_id).state == STATE_UNLOCKED # lock from HA await async_lock(hass, cluster, entity_id) @@ -102,7 +103,7 @@ async def test_lock(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: await async_disable_user_code(hass, cluster, entity_id) -async def async_lock(hass: HomeAssistant, cluster: Cluster, entity_id: str): +async def async_lock(hass, cluster, entity_id): """Test lock functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # lock via UI @@ -111,13 +112,10 @@ async def async_lock(hass: HomeAssistant, cluster: Cluster, entity_id: str): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert ( - cluster.request.call_args[0][1] - == closures.DoorLock.ServerCommandDefs.lock_door.id - ) + assert cluster.request.call_args[0][1] == LOCK_DOOR -async def async_unlock(hass: HomeAssistant, cluster: Cluster, entity_id: str): +async def async_unlock(hass, cluster, entity_id): """Test lock functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # lock via UI @@ -126,13 +124,10 @@ async def async_unlock(hass: HomeAssistant, cluster: Cluster, entity_id: str): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert ( - cluster.request.call_args[0][1] - == closures.DoorLock.ServerCommandDefs.unlock_door.id - ) + assert cluster.request.call_args[0][1] == UNLOCK_DOOR -async def async_set_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: str): +async def async_set_user_code(hass, cluster, entity_id): """Test set lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -144,10 +139,7 @@ async def async_set_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert ( - cluster.request.call_args[0][1] - == closures.DoorLock.ServerCommandDefs.set_pin_code.id - ) + assert cluster.request.call_args[0][1] == SET_PIN_CODE assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Enabled assert ( @@ -156,7 +148,7 @@ async def async_set_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: assert cluster.request.call_args[0][6] == "13246579" -async def async_clear_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: str): +async def async_clear_user_code(hass, cluster, entity_id): """Test clear lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -171,14 +163,11 @@ async def async_clear_user_code(hass: HomeAssistant, cluster: Cluster, entity_id ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert ( - cluster.request.call_args[0][1] - == closures.DoorLock.ServerCommandDefs.clear_pin_code.id - ) + assert cluster.request.call_args[0][1] == CLEAR_PIN_CODE assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 -async def async_enable_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: str): +async def async_enable_user_code(hass, cluster, entity_id): """Test enable lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -193,17 +182,12 @@ async def async_enable_user_code(hass: HomeAssistant, cluster: Cluster, entity_i ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert ( - cluster.request.call_args[0][1] - == closures.DoorLock.ServerCommandDefs.set_user_status.id - ) + assert cluster.request.call_args[0][1] == SET_USER_STATUS assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Enabled -async def async_disable_user_code( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_disable_user_code(hass, cluster, entity_id): """Test disable lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -218,9 +202,6 @@ async def async_disable_user_code( ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert ( - cluster.request.call_args[0][1] - == closures.DoorLock.ServerCommandDefs.set_user_status.id - ) + assert cluster.request.call_args[0][1] == SET_USER_STATUS assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Disabled diff --git a/tests/components/zha/test_logbook.py b/tests/components/zha/test_logbook.py index 0b27cd095a9..19a6f9d359f 100644 --- a/tests/components/zha/test_logbook.py +++ b/tests/components/zha/test_logbook.py @@ -3,16 +3,10 @@ from unittest.mock import patch import pytest -from zha.application.const import ZHA_EVENT import zigpy.profiles.zha from zigpy.zcl.clusters import general -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, -) +from homeassistant.components.zha.core.const import ZHA_EVENT from homeassistant.const import CONF_DEVICE_ID, CONF_UNIQUE_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -46,13 +40,9 @@ def sensor_platform_only(): @pytest.fixture -async def mock_devices(hass: HomeAssistant, setup_zha, zigpy_device_mock): +async def mock_devices(hass, zigpy_device_mock, zha_device_joined): """IAS device fixture.""" - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - zigpy_device = zigpy_device_mock( { 1: { @@ -64,13 +54,10 @@ async def mock_devices(hass: HomeAssistant, setup_zha, zigpy_device_mock): } ) - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - - return zigpy_device, zha_device_proxy + zha_device = await zha_device_joined(zigpy_device) + zha_device.update_available(True) + await hass.async_block_till_done() + return zigpy_device, zha_device async def test_zha_logbook_event_device_with_triggers( @@ -89,7 +76,7 @@ async def test_zha_logbook_event_device_with_triggers( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - ieee_address = str(zha_device.device.ieee) + ieee_address = str(zha_device.ieee) reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) @@ -166,7 +153,7 @@ async def test_zha_logbook_event_device_no_triggers( """Test ZHA logbook events with device and without triggers.""" zigpy_device, zha_device = mock_devices - ieee_address = str(zha_device.device.ieee) + ieee_address = str(zha_device.ieee) reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) hass.config.components.add("recorder") diff --git a/tests/components/zha/test_number.py b/tests/components/zha/test_number.py index 180f16e9ae2..6b302f9cbd9 100644 --- a/tests/components/zha/test_number.py +++ b/tests/components/zha/test_number.py @@ -3,22 +3,26 @@ from unittest.mock import call, patch import pytest +from zigpy.exceptions import ZigbeeException from zigpy.profiles import zha -from zigpy.zcl.clusters import general +from zigpy.zcl.clusters import general, lighting import zigpy.zcl.foundation as zcl_f from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, -) -from homeassistant.const import Platform +from homeassistant.components.zha.core.device import ZHADevice +from homeassistant.const import STATE_UNAVAILABLE, EntityCategory, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from .common import find_entity_id, send_attributes_report, update_attribute_cache +from .common import ( + async_enable_traffic, + async_test_rejoin, + find_entity_id, + send_attributes_report, + update_attribute_cache, +) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @@ -39,28 +43,49 @@ def number_platform_only(): yield -async def test_number(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: - """Test ZHA number platform.""" +@pytest.fixture +def zigpy_analog_output_device(zigpy_device_mock): + """Zigpy analog_output device.""" - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + endpoints = { + 1: { + SIG_EP_TYPE: zha.DeviceType.LEVEL_CONTROL_SWITCH, + SIG_EP_INPUT: [general.AnalogOutput.cluster_id, general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + } + } + return zigpy_device_mock(endpoints) - zigpy_device = zigpy_device_mock( + +@pytest.fixture +async def light(zigpy_device_mock): + """Siren fixture.""" + + return zigpy_device_mock( { 1: { - SIG_EP_TYPE: zha.DeviceType.LEVEL_CONTROL_SWITCH, - SIG_EP_INPUT: [ - general.AnalogOutput.cluster_id, - general.Basic.cluster_id, - ], - SIG_EP_OUTPUT: [], SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.Identify.cluster_id, + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + ], + SIG_EP_OUTPUT: [general.Ota.cluster_id], } - } + }, + node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", ) - cluster = zigpy_device.endpoints[1].analog_output + +async def test_number( + hass: HomeAssistant, zha_device_joined_restored, zigpy_analog_output_device +) -> None: + """Test ZHA number platform.""" + + cluster = zigpy_analog_output_device.endpoints.get(1).analog_output cluster.PLUGGED_ATTR_READS = { "max_present_value": 100.0, "min_present_value": 1.0, @@ -73,14 +98,34 @@ async def test_number(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None update_attribute_cache(cluster) cluster.PLUGGED_ATTR_READS["present_value"] = 15.0 - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) + zha_device = await zha_device_joined_restored(zigpy_analog_output_device) + # one for present_value and one for the rest configuration attributes + assert cluster.read_attributes.call_count == 3 + attr_reads = set() + for call_args in cluster.read_attributes.call_args_list: + attr_reads |= set(call_args[0][0]) + assert "max_present_value" in attr_reads + assert "min_present_value" in attr_reads + assert "relinquish_default" in attr_reads + assert "resolution" in attr_reads + assert "description" in attr_reads + assert "engineering_units" in attr_reads + assert "application_type" in attr_reads - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.NUMBER, zha_device_proxy, hass) + entity_id = find_entity_id(Platform.NUMBER, zha_device, hass) assert entity_id is not None + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the number was created and that it is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + assert cluster.read_attributes.call_count == 3 + await async_enable_traffic(hass, [zha_device]) + await hass.async_block_till_done() + assert cluster.read_attributes.call_count == 6 + + # test that the state has changed from unavailable to 15.0 assert hass.states.get(entity_id).state == "15.0" # test attributes @@ -89,13 +134,13 @@ async def test_number(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None assert hass.states.get(entity_id).attributes.get("step") == 1.1 assert hass.states.get(entity_id).attributes.get("icon") == "mdi:percent" assert hass.states.get(entity_id).attributes.get("unit_of_measurement") == "%" - assert ( hass.states.get(entity_id).attributes.get("friendly_name") == "FakeManufacturer FakeModel Number PWM1" ) # change value from device + assert cluster.read_attributes.call_count == 6 await send_attributes_report(hass, cluster, {0x0055: 15}) assert hass.states.get(entity_id).state == "15.0" @@ -120,8 +165,16 @@ async def test_number(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None ] cluster.PLUGGED_ATTR_READS["present_value"] = 30.0 + # test rejoin + assert cluster.read_attributes.call_count == 6 + await async_test_rejoin(hass, zigpy_analog_output_device, [cluster], (1,)) + assert hass.states.get(entity_id).state == "30.0" + assert cluster.read_attributes.call_count == 9 + # update device value with failed attribute report cluster.PLUGGED_ATTR_READS["present_value"] = 40.0 + # validate the entity still contains old value + assert hass.states.get(entity_id).state == "30.0" await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() @@ -130,4 +183,251 @@ async def test_number(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True ) assert hass.states.get(entity_id).state == "40.0" + assert cluster.read_attributes.call_count == 10 assert "present_value" in cluster.read_attributes.call_args[0][0] + + +@pytest.mark.parametrize( + ("attr", "initial_value", "new_value"), + [ + ("on_off_transition_time", 20, 5), + ("on_level", 255, 50), + ("on_transition_time", 5, 1), + ("off_transition_time", 5, 1), + ("default_move_rate", 1, 5), + ("start_up_current_level", 254, 125), + ], +) +async def test_level_control_number( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + light: ZHADevice, + zha_device_joined, + attr: str, + initial_value: int, + new_value: int, +) -> None: + """Test ZHA level control number entities - new join.""" + level_control_cluster = light.endpoints[1].level + level_control_cluster.PLUGGED_ATTR_READS = { + attr: initial_value, + } + zha_device = await zha_device_joined(light) + + entity_id = find_entity_id( + Platform.NUMBER, + zha_device, + hass, + qualifier=attr, + ) + assert entity_id is not None + + assert level_control_cluster.read_attributes.mock_calls == [ + call( + [ + "on_off_transition_time", + "on_level", + "on_transition_time", + "off_transition_time", + "default_move_rate", + ], + allow_cache=True, + only_cache=False, + manufacturer=None, + ), + call( + ["start_up_current_level"], + allow_cache=True, + only_cache=False, + manufacturer=None, + ), + call( + [ + "current_level", + ], + allow_cache=False, + only_cache=False, + manufacturer=None, + ), + ] + + state = hass.states.get(entity_id) + assert state + assert state.state == str(initial_value) + + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry + assert entity_entry.entity_category == EntityCategory.CONFIG + + # Test number set_value + await hass.services.async_call( + "number", + "set_value", + { + "entity_id": entity_id, + "value": new_value, + }, + blocking=True, + ) + + assert level_control_cluster.write_attributes.mock_calls == [ + call({attr: new_value}, manufacturer=None) + ] + + state = hass.states.get(entity_id) + assert state + assert state.state == str(new_value) + + level_control_cluster.read_attributes.reset_mock() + await async_setup_component(hass, "homeassistant", {}) + await hass.async_block_till_done() + + await hass.services.async_call( + "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True + ) + # the mocking doesn't update the attr cache so this flips back to initial value + assert hass.states.get(entity_id).state == str(initial_value) + assert level_control_cluster.read_attributes.mock_calls == [ + call( + [attr], + allow_cache=False, + only_cache=False, + manufacturer=None, + ) + ] + + level_control_cluster.write_attributes.reset_mock() + level_control_cluster.write_attributes.side_effect = ZigbeeException + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + "number", + "set_value", + { + "entity_id": entity_id, + "value": new_value, + }, + blocking=True, + ) + + assert level_control_cluster.write_attributes.mock_calls == [ + call({attr: new_value}, manufacturer=None), + call({attr: new_value}, manufacturer=None), + call({attr: new_value}, manufacturer=None), + ] + assert hass.states.get(entity_id).state == str(initial_value) + + +@pytest.mark.parametrize( + ("attr", "initial_value", "new_value"), + [("start_up_color_temperature", 500, 350)], +) +async def test_color_number( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + light: ZHADevice, + zha_device_joined, + attr: str, + initial_value: int, + new_value: int, +) -> None: + """Test ZHA color number entities - new join.""" + color_cluster = light.endpoints[1].light_color + color_cluster.PLUGGED_ATTR_READS = { + attr: initial_value, + } + zha_device = await zha_device_joined(light) + + entity_id = find_entity_id( + Platform.NUMBER, + zha_device, + hass, + qualifier=attr, + ) + assert entity_id is not None + + assert color_cluster.read_attributes.call_count == 3 + assert ( + call( + [ + "color_temp_physical_min", + "color_temp_physical_max", + "color_capabilities", + "start_up_color_temperature", + "options", + ], + allow_cache=True, + only_cache=False, + manufacturer=None, + ) + in color_cluster.read_attributes.call_args_list + ) + + state = hass.states.get(entity_id) + assert state + assert state.state == str(initial_value) + + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry + assert entity_entry.entity_category == EntityCategory.CONFIG + + # Test number set_value + await hass.services.async_call( + "number", + "set_value", + { + "entity_id": entity_id, + "value": new_value, + }, + blocking=True, + ) + + assert color_cluster.write_attributes.call_count == 1 + assert color_cluster.write_attributes.call_args[0][0] == { + attr: new_value, + } + + state = hass.states.get(entity_id) + assert state + assert state.state == str(new_value) + + color_cluster.read_attributes.reset_mock() + await async_setup_component(hass, "homeassistant", {}) + await hass.async_block_till_done() + + await hass.services.async_call( + "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True + ) + # the mocking doesn't update the attr cache so this flips back to initial value + assert hass.states.get(entity_id).state == str(initial_value) + assert color_cluster.read_attributes.call_count == 1 + assert ( + call( + [attr], + allow_cache=False, + only_cache=False, + manufacturer=None, + ) + in color_cluster.read_attributes.call_args_list + ) + + color_cluster.write_attributes.reset_mock() + color_cluster.write_attributes.side_effect = ZigbeeException + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + "number", + "set_value", + { + "entity_id": entity_id, + "value": new_value, + }, + blocking=True, + ) + + assert color_cluster.write_attributes.mock_calls == [ + call({attr: new_value}, manufacturer=None), + call({attr: new_value}, manufacturer=None), + call({attr: new_value}, manufacturer=None), + ] + assert hass.states.get(entity_id).state == str(initial_value) diff --git a/tests/components/zha/test_radio_manager.py b/tests/components/zha/test_radio_manager.py index 0a51aaa6dba..280b3d05daf 100644 --- a/tests/components/zha/test_radio_manager.py +++ b/tests/components/zha/test_radio_manager.py @@ -1,11 +1,10 @@ """Tests for ZHA config flow.""" -from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, create_autospec, patch import pytest import serial.tools.list_ports -from zha.application.const import RadioType +from typing_extensions import Generator from zigpy.backups import BackupManager import zigpy.config from zigpy.config import CONF_DEVICE_PATH @@ -13,7 +12,7 @@ import zigpy.types from homeassistant.components.usb import UsbServiceInfo from homeassistant.components.zha import radio_manager -from homeassistant.components.zha.const import DOMAIN +from homeassistant.components.zha.core.const import DOMAIN, RadioType from homeassistant.components.zha.radio_manager import ProbeResult, ZhaRadioManager from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant diff --git a/tests/components/zha/test_registries.py b/tests/components/zha/test_registries.py new file mode 100644 index 00000000000..2b1c0dcc561 --- /dev/null +++ b/tests/components/zha/test_registries.py @@ -0,0 +1,602 @@ +"""Test ZHA registries.""" + +from __future__ import annotations + +from unittest import mock + +import pytest +from typing_extensions import Generator +import zigpy.quirks as zigpy_quirks + +from homeassistant.components.zha.binary_sensor import IASZone +from homeassistant.components.zha.core import registries +from homeassistant.components.zha.core.const import ATTR_QUIRK_ID +from homeassistant.components.zha.entity import ZhaEntity +from homeassistant.helpers import entity_registry as er + +MANUFACTURER = "mock manufacturer" +MODEL = "mock model" +QUIRK_CLASS = "mock.test.quirk.class" +QUIRK_ID = "quirk_id" + + +@pytest.fixture +def zha_device(): + """Return a mock of ZHA device.""" + dev = mock.MagicMock() + dev.manufacturer = MANUFACTURER + dev.model = MODEL + dev.quirk_class = QUIRK_CLASS + dev.quirk_id = QUIRK_ID + return dev + + +@pytest.fixture +def cluster_handlers(cluster_handler): + """Return a mock of cluster_handlers.""" + + return [cluster_handler("level", 8), cluster_handler("on_off", 6)] + + +@pytest.mark.parametrize( + ("rule", "matched"), + [ + (registries.MatchRule(), False), + (registries.MatchRule(cluster_handler_names={"level"}), True), + (registries.MatchRule(cluster_handler_names={"level", "no match"}), False), + (registries.MatchRule(cluster_handler_names={"on_off"}), True), + (registries.MatchRule(cluster_handler_names={"on_off", "no match"}), False), + (registries.MatchRule(cluster_handler_names={"on_off", "level"}), True), + ( + registries.MatchRule(cluster_handler_names={"on_off", "level", "no match"}), + False, + ), + # test generic_id matching + (registries.MatchRule(generic_ids={"cluster_handler_0x0006"}), True), + (registries.MatchRule(generic_ids={"cluster_handler_0x0008"}), True), + ( + registries.MatchRule( + generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"} + ), + True, + ), + ( + registries.MatchRule( + generic_ids={ + "cluster_handler_0x0006", + "cluster_handler_0x0008", + "cluster_handler_0x0009", + } + ), + False, + ), + ( + registries.MatchRule( + generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, + cluster_handler_names={"on_off", "level"}, + ), + True, + ), + # manufacturer matching + (registries.MatchRule(manufacturers="no match"), False), + (registries.MatchRule(manufacturers=MANUFACTURER), True), + ( + registries.MatchRule( + manufacturers="no match", aux_cluster_handlers="aux_cluster_handler" + ), + False, + ), + ( + registries.MatchRule( + manufacturers=MANUFACTURER, aux_cluster_handlers="aux_cluster_handler" + ), + True, + ), + (registries.MatchRule(models=MODEL), True), + (registries.MatchRule(models="no match"), False), + ( + registries.MatchRule( + models=MODEL, aux_cluster_handlers="aux_cluster_handler" + ), + True, + ), + ( + registries.MatchRule( + models="no match", aux_cluster_handlers="aux_cluster_handler" + ), + False, + ), + (registries.MatchRule(quirk_ids=QUIRK_ID), True), + (registries.MatchRule(quirk_ids="no match"), False), + ( + registries.MatchRule( + quirk_ids=QUIRK_ID, aux_cluster_handlers="aux_cluster_handler" + ), + True, + ), + ( + registries.MatchRule( + quirk_ids="no match", aux_cluster_handlers="aux_cluster_handler" + ), + False, + ), + # match everything + ( + registries.MatchRule( + generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, + cluster_handler_names={"on_off", "level"}, + manufacturers=MANUFACTURER, + models=MODEL, + quirk_ids=QUIRK_ID, + ), + True, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", + manufacturers={"random manuf", MANUFACTURER}, + ), + True, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", + manufacturers={"random manuf", "Another manuf"}, + ), + False, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", + manufacturers=lambda x: x == MANUFACTURER, + ), + True, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", + manufacturers=lambda x: x != MANUFACTURER, + ), + False, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", models={"random model", MODEL} + ), + True, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", models={"random model", "Another model"} + ), + False, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", models=lambda x: x == MODEL + ), + True, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", models=lambda x: x != MODEL + ), + False, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", + quirk_ids={"random quirk", QUIRK_ID}, + ), + True, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", + quirk_ids={"random quirk", "another quirk"}, + ), + False, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", quirk_ids=lambda x: x == QUIRK_ID + ), + True, + ), + ( + registries.MatchRule( + cluster_handler_names="on_off", quirk_ids=lambda x: x != QUIRK_ID + ), + False, + ), + ( + registries.MatchRule(cluster_handler_names="on_off", quirk_ids=QUIRK_ID), + True, + ), + ], +) +def test_registry_matching(rule, matched, cluster_handlers) -> None: + """Test strict rule matching.""" + assert ( + rule.strict_matched(MANUFACTURER, MODEL, cluster_handlers, QUIRK_ID) is matched + ) + + +@pytest.mark.parametrize( + ("rule", "matched"), + [ + (registries.MatchRule(), False), + (registries.MatchRule(cluster_handler_names={"level"}), True), + (registries.MatchRule(cluster_handler_names={"level", "no match"}), False), + (registries.MatchRule(cluster_handler_names={"on_off"}), True), + (registries.MatchRule(cluster_handler_names={"on_off", "no match"}), False), + (registries.MatchRule(cluster_handler_names={"on_off", "level"}), True), + ( + registries.MatchRule(cluster_handler_names={"on_off", "level", "no match"}), + False, + ), + ( + registries.MatchRule( + cluster_handler_names={"on_off", "level"}, models="no match" + ), + True, + ), + ( + registries.MatchRule( + cluster_handler_names={"on_off", "level"}, + models="no match", + manufacturers="no match", + ), + True, + ), + ( + registries.MatchRule( + cluster_handler_names={"on_off", "level"}, + models="no match", + manufacturers=MANUFACTURER, + ), + True, + ), + # test generic_id matching + (registries.MatchRule(generic_ids={"cluster_handler_0x0006"}), True), + (registries.MatchRule(generic_ids={"cluster_handler_0x0008"}), True), + ( + registries.MatchRule( + generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"} + ), + True, + ), + ( + registries.MatchRule( + generic_ids={ + "cluster_handler_0x0006", + "cluster_handler_0x0008", + "cluster_handler_0x0009", + } + ), + False, + ), + ( + registries.MatchRule( + generic_ids={ + "cluster_handler_0x0006", + "cluster_handler_0x0008", + "cluster_handler_0x0009", + }, + models="mo match", + ), + False, + ), + ( + registries.MatchRule( + generic_ids={ + "cluster_handler_0x0006", + "cluster_handler_0x0008", + "cluster_handler_0x0009", + }, + models=MODEL, + ), + True, + ), + ( + registries.MatchRule( + generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, + cluster_handler_names={"on_off", "level"}, + ), + True, + ), + # manufacturer matching + (registries.MatchRule(manufacturers="no match"), False), + (registries.MatchRule(manufacturers=MANUFACTURER), True), + (registries.MatchRule(models=MODEL), True), + (registries.MatchRule(models="no match"), False), + (registries.MatchRule(quirk_ids=QUIRK_ID), True), + (registries.MatchRule(quirk_ids="no match"), False), + # match everything + ( + registries.MatchRule( + generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, + cluster_handler_names={"on_off", "level"}, + manufacturers=MANUFACTURER, + models=MODEL, + quirk_ids=QUIRK_ID, + ), + True, + ), + ], +) +def test_registry_loose_matching(rule, matched, cluster_handlers) -> None: + """Test loose rule matching.""" + assert ( + rule.loose_matched(MANUFACTURER, MODEL, cluster_handlers, QUIRK_ID) is matched + ) + + +def test_match_rule_claim_cluster_handlers_color(cluster_handler) -> None: + """Test cluster handler claiming.""" + ch_color = cluster_handler("color", 0x300) + ch_level = cluster_handler("level", 8) + ch_onoff = cluster_handler("on_off", 6) + + rule = registries.MatchRule( + cluster_handler_names="on_off", aux_cluster_handlers={"color", "level"} + ) + claimed = rule.claim_cluster_handlers([ch_color, ch_level, ch_onoff]) + assert {"color", "level", "on_off"} == {ch.name for ch in claimed} + + +@pytest.mark.parametrize( + ("rule", "match"), + [ + (registries.MatchRule(cluster_handler_names={"level"}), {"level"}), + (registries.MatchRule(cluster_handler_names={"level", "no match"}), {"level"}), + (registries.MatchRule(cluster_handler_names={"on_off"}), {"on_off"}), + (registries.MatchRule(generic_ids="cluster_handler_0x0000"), {"basic"}), + ( + registries.MatchRule( + cluster_handler_names="level", generic_ids="cluster_handler_0x0000" + ), + {"basic", "level"}, + ), + ( + registries.MatchRule(cluster_handler_names={"level", "power"}), + {"level", "power"}, + ), + ( + registries.MatchRule( + cluster_handler_names={"level", "on_off"}, + aux_cluster_handlers={"basic", "power"}, + ), + {"basic", "level", "on_off", "power"}, + ), + (registries.MatchRule(cluster_handler_names={"color"}), set()), + ], +) +def test_match_rule_claim_cluster_handlers( + rule, match, cluster_handler, cluster_handlers +) -> None: + """Test cluster handler claiming.""" + ch_basic = cluster_handler("basic", 0) + cluster_handlers.append(ch_basic) + ch_power = cluster_handler("power", 1) + cluster_handlers.append(ch_power) + + claimed = rule.claim_cluster_handlers(cluster_handlers) + assert match == {ch.name for ch in claimed} + + +@pytest.fixture +def entity_registry(): + """Registry fixture.""" + return registries.ZHAEntityRegistry() + + +@pytest.mark.parametrize( + ("manufacturer", "model", "quirk_id", "match_name"), + [ + ("random manufacturer", "random model", "random.class", "OnOff"), + ("random manufacturer", MODEL, "random.class", "OnOffModel"), + (MANUFACTURER, "random model", "random.class", "OnOffManufacturer"), + ("random manufacturer", "random model", QUIRK_ID, "OnOffQuirk"), + (MANUFACTURER, MODEL, "random.class", "OnOffModelManufacturer"), + (MANUFACTURER, "some model", "random.class", "OnOffMultimodel"), + ], +) +def test_weighted_match( + cluster_handler, + entity_registry: er.EntityRegistry, + manufacturer, + model, + quirk_id, + match_name, +) -> None: + """Test weightedd match.""" + + s = mock.sentinel + + @entity_registry.strict_match( + s.component, + cluster_handler_names="on_off", + models={MODEL, "another model", "some model"}, + ) + class OnOffMultimodel: + pass + + @entity_registry.strict_match(s.component, cluster_handler_names="on_off") + class OnOff: + pass + + @entity_registry.strict_match( + s.component, cluster_handler_names="on_off", manufacturers=MANUFACTURER + ) + class OnOffManufacturer: + pass + + @entity_registry.strict_match( + s.component, cluster_handler_names="on_off", models=MODEL + ) + class OnOffModel: + pass + + @entity_registry.strict_match( + s.component, + cluster_handler_names="on_off", + models=MODEL, + manufacturers=MANUFACTURER, + ) + class OnOffModelManufacturer: + pass + + @entity_registry.strict_match( + s.component, cluster_handler_names="on_off", quirk_ids=QUIRK_ID + ) + class OnOffQuirk: + pass + + ch_on_off = cluster_handler("on_off", 6) + ch_level = cluster_handler("level", 8) + + match, claimed = entity_registry.get_entity( + s.component, manufacturer, model, [ch_on_off, ch_level], quirk_id + ) + + assert match.__name__ == match_name + assert claimed == [ch_on_off] + + +def test_multi_sensor_match( + cluster_handler, entity_registry: er.EntityRegistry +) -> None: + """Test multi-entity match.""" + + s = mock.sentinel + + @entity_registry.multipass_match( + s.binary_sensor, + cluster_handler_names="smartenergy_metering", + ) + class SmartEnergySensor2: + pass + + ch_se = cluster_handler("smartenergy_metering", 0x0702) + ch_illuminati = cluster_handler("illuminance", 0x0401) + + match, claimed = entity_registry.get_multi_entity( + "manufacturer", + "model", + cluster_handlers=[ch_se, ch_illuminati], + quirk_id="quirk_id", + ) + + assert s.binary_sensor in match + assert s.component not in match + assert set(claimed) == {ch_se} + assert {cls.entity_class.__name__ for cls in match[s.binary_sensor]} == { + SmartEnergySensor2.__name__ + } + + @entity_registry.multipass_match( + s.component, + cluster_handler_names="smartenergy_metering", + aux_cluster_handlers="illuminance", + ) + class SmartEnergySensor1: + pass + + @entity_registry.multipass_match( + s.binary_sensor, + cluster_handler_names="smartenergy_metering", + aux_cluster_handlers="illuminance", + ) + class SmartEnergySensor3: + pass + + match, claimed = entity_registry.get_multi_entity( + "manufacturer", + "model", + cluster_handlers={ch_se, ch_illuminati}, + quirk_id="quirk_id", + ) + + assert s.binary_sensor in match + assert s.component in match + assert set(claimed) == {ch_se, ch_illuminati} + assert {cls.entity_class.__name__ for cls in match[s.binary_sensor]} == { + SmartEnergySensor2.__name__, + SmartEnergySensor3.__name__, + } + assert {cls.entity_class.__name__ for cls in match[s.component]} == { + SmartEnergySensor1.__name__ + } + + +def iter_all_rules() -> Generator[tuple[registries.MatchRule, list[type[ZhaEntity]]]]: + """Iterate over all match rules and their corresponding entities.""" + + for rules in registries.ZHA_ENTITIES._strict_registry.values(): + for rule, entity in rules.items(): + yield rule, [entity] + + for rules in registries.ZHA_ENTITIES._multi_entity_registry.values(): + for multi in rules.values(): + for rule, entities in multi.items(): + yield rule, entities + + for rules in registries.ZHA_ENTITIES._config_diagnostic_entity_registry.values(): + for multi in rules.values(): + for rule, entities in multi.items(): + yield rule, entities + + +def test_quirk_classes() -> None: + """Make sure that all quirk IDs in components matches exist.""" + + def quirk_class_validator(value): + """Validate quirk IDs during self test.""" + if callable(value): + # Callables cannot be tested + return + + if isinstance(value, (frozenset, set, list)): + for v in value: + # Unpack the value if needed + quirk_class_validator(v) + return + + if value not in all_quirk_ids: + raise ValueError(f"Quirk ID '{value}' does not exist.") + + # get all quirk ID from zigpy quirks registry + all_quirk_ids = [] + for manufacturer in zigpy_quirks._DEVICE_REGISTRY._registry.values(): + for model_quirk_list in manufacturer.values(): + for quirk in model_quirk_list: + quirk_id = getattr(quirk, ATTR_QUIRK_ID, None) + if quirk_id is not None and quirk_id not in all_quirk_ids: + all_quirk_ids.append(quirk_id) + # pylint: disable-next=undefined-loop-variable + del quirk, model_quirk_list, manufacturer + + # validate all quirk IDs used in component match rules + for rule, _ in iter_all_rules(): + quirk_class_validator(rule.quirk_ids) + + +def test_entity_names() -> None: + """Make sure that all handlers expose entities with valid names.""" + + for _, entity_classes in iter_all_rules(): + for entity_class in entity_classes: + if hasattr(entity_class, "__attr_name"): + # The entity has a name + assert (name := entity_class.__attr_name) and isinstance(name, str) + elif hasattr(entity_class, "__attr_translation_key"): + assert ( + isinstance(entity_class.__attr_translation_key, str) + and entity_class.__attr_translation_key + ) + elif hasattr(entity_class, "__attr_device_class"): + assert entity_class.__attr_device_class + else: + # The only exception (for now) is IASZone + assert entity_class is IASZone diff --git a/tests/components/zha/test_repairs.py b/tests/components/zha/test_repairs.py index c2925161748..c093fe266bd 100644 --- a/tests/components/zha/test_repairs.py +++ b/tests/components/zha/test_repairs.py @@ -16,7 +16,7 @@ from homeassistant.components.homeassistant_sky_connect.const import ( # pylint DOMAIN as SKYCONNECT_DOMAIN, ) from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN -from homeassistant.components.zha.const import DOMAIN +from homeassistant.components.zha.core.const import DOMAIN from homeassistant.components.zha.repairs.network_settings_inconsistent import ( ISSUE_INCONSISTENT_NETWORK_SETTINGS, ) @@ -148,7 +148,7 @@ async def test_multipan_firmware_repair( autospec=True, ), patch( - "homeassistant.components.zha.Gateway.async_initialize", + "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", side_effect=RuntimeError(), ), patch( @@ -199,7 +199,7 @@ async def test_multipan_firmware_no_repair_on_probe_failure( autospec=True, ), patch( - "homeassistant.components.zha.Gateway.async_initialize", + "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", side_effect=RuntimeError(), ), ): @@ -236,7 +236,7 @@ async def test_multipan_firmware_retry_on_probe_ezsp( autospec=True, ), patch( - "homeassistant.components.zha.Gateway.async_initialize", + "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", side_effect=RuntimeError(), ), ): @@ -311,7 +311,7 @@ async def test_inconsistent_settings_keep_new( old_state = network_backup with patch( - "homeassistant.components.zha.Gateway.async_initialize", + "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", side_effect=NetworkSettingsInconsistent( message="Network settings are inconsistent", new_state=new_state, @@ -390,7 +390,7 @@ async def test_inconsistent_settings_restore_old( old_state = network_backup with patch( - "homeassistant.components.zha.Gateway.async_initialize", + "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", side_effect=NetworkSettingsInconsistent( message="Network settings are inconsistent", new_state=new_state, diff --git a/tests/components/zha/test_select.py b/tests/components/zha/test_select.py index f0f742503e3..70f58ee4e6d 100644 --- a/tests/components/zha/test_select.py +++ b/tests/components/zha/test_select.py @@ -1,30 +1,34 @@ """Test ZHA select entities.""" -from unittest.mock import patch +from typing import Any +from unittest.mock import call, patch import pytest -from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from zhaquirks import ( + DEVICE_TYPE, + ENDPOINTS, + INPUT_CLUSTERS, + OUTPUT_CLUSTERS, + PROFILE_ID, +) +from zigpy.const import SIG_EP_PROFILE from zigpy.profiles import zha +from zigpy.quirks import CustomCluster, CustomDevice +from zigpy.quirks.v2 import CustomDeviceV2, add_to_registry_v2 +import zigpy.types as t from zigpy.zcl.clusters import general, security +from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, -) -from homeassistant.const import ( - STATE_UNAVAILABLE, - STATE_UNKNOWN, - EntityCategory, - Platform, -) -from homeassistant.core import HomeAssistant, State -from homeassistant.helpers import entity_registry as er +from homeassistant.components.zha.select import AqaraMotionSensitivities +from homeassistant.const import STATE_UNKNOWN, EntityCategory, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, restore_state +from homeassistant.util import dt as dt_util -from .common import find_entity_id +from .common import async_enable_traffic, find_entity_id, send_attributes_report +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE -from tests.common import mock_restore_cache +from tests.common import async_mock_load_restore_state_from_storage @pytest.fixture(autouse=True) @@ -46,17 +50,9 @@ def select_select_only(): yield -async def test_select( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - setup_zha, - zigpy_device_mock, -) -> None: - """Test ZHA select platform.""" - - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) +@pytest.fixture +async def siren(hass, zigpy_device_mock, zha_device_joined_restored): + """Siren fixture.""" zigpy_device = zigpy_device_mock( { @@ -66,16 +62,75 @@ async def test_select( SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, SIG_EP_PROFILE: zha.PROFILE_ID, } - } + }, ) - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) + zha_device = await zha_device_joined_restored(zigpy_device) + return zha_device, zigpy_device.endpoints[1].ias_wd - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + +@pytest.fixture +async def light(hass, zigpy_device_mock): + """Siren fixture.""" + + return zigpy_device_mock( + { + 1: { + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT, + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.Identify.cluster_id, + general.OnOff.cluster_id, + ], + SIG_EP_OUTPUT: [general.Ota.cluster_id], + } + }, + node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", + ) + + +@pytest.fixture +def core_rs(hass_storage: dict[str, Any]): + """Core.restore_state fixture.""" + + def _storage(entity_id, state): + now = dt_util.utcnow().isoformat() + + hass_storage[restore_state.STORAGE_KEY] = { + "version": restore_state.STORAGE_VERSION, + "key": restore_state.STORAGE_KEY, + "data": [ + { + "state": { + "entity_id": entity_id, + "state": str(state), + "last_changed": now, + "last_updated": now, + "context": { + "id": "3c2243ff5f30447eb12e7348cfd5b8ff", + "user_id": None, + }, + }, + "last_seen": now, + } + ], + } + + return _storage + + +async def test_select( + hass: HomeAssistant, entity_registry: er.EntityRegistry, siren +) -> None: + """Test ZHA select platform.""" + zha_device, cluster = siren + assert cluster is not None entity_id = find_entity_id( - Platform.SELECT, zha_device_proxy, hass, qualifier="tone" + Platform.SELECT, + zha_device, + hass, + qualifier="tone", ) assert entity_id is not None @@ -112,32 +167,17 @@ async def test_select( assert state.state == security.IasWd.Warning.WarningMode.Burglar.name -@pytest.mark.parametrize( - ("restored_state", "expected_state"), - [ - # Unavailable is not restored - (STATE_UNAVAILABLE, STATE_UNKNOWN), - # Normal state is - ( - security.IasWd.Warning.WarningMode.Burglar.name, - security.IasWd.Warning.WarningMode.Burglar.name, - ), - ], -) async def test_select_restore_state( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - setup_zha, zigpy_device_mock, - restored_state: str, - expected_state: str, + core_rs, + zha_device_restored, ) -> None: - """Test ZHA select platform restore state.""" + """Test ZHA select entity restore state.""" + entity_id = "select.fakemanufacturer_fakemodel_default_siren_tone" - - mock_restore_cache(hass, [State(entity_id, restored_state)]) - - await setup_zha() + core_rs(entity_id, state="Burglar") + await async_mock_load_restore_state_from_storage(hass) zigpy_device = zigpy_device_mock( { @@ -147,14 +187,307 @@ async def test_select_restore_state( SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, SIG_EP_PROFILE: zha.PROFILE_ID, } - } + }, ) - gateway = get_zha_gateway(hass) - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) + zha_device = await zha_device_restored(zigpy_device) + cluster = zigpy_device.endpoints[1].ias_wd + assert cluster is not None + entity_id = find_entity_id( + Platform.SELECT, + zha_device, + hass, + qualifier="tone", + ) + + assert entity_id is not None + state = hass.states.get(entity_id) + assert state + assert state.state == security.IasWd.Warning.WarningMode.Burglar.name + + +async def test_on_off_select_new_join( + hass: HomeAssistant, entity_registry: er.EntityRegistry, light, zha_device_joined +) -> None: + """Test ZHA on off select - new join.""" + on_off_cluster = light.endpoints[1].on_off + on_off_cluster.PLUGGED_ATTR_READS = { + "start_up_on_off": general.OnOff.StartUpOnOff.On + } + zha_device = await zha_device_joined(light) + select_name = "start_up_behavior" + entity_id = find_entity_id( + Platform.SELECT, + zha_device, + hass, + qualifier=select_name, + ) + assert entity_id is not None + + assert on_off_cluster.read_attributes.call_count == 2 + assert ( + call(["start_up_on_off"], allow_cache=True, only_cache=False, manufacturer=None) + in on_off_cluster.read_attributes.call_args_list + ) + assert ( + call(["on_off"], allow_cache=False, only_cache=False, manufacturer=None) + in on_off_cluster.read_attributes.call_args_list + ) state = hass.states.get(entity_id) assert state - assert state.state == expected_state + assert state.state == general.OnOff.StartUpOnOff.On.name + + assert state.attributes["options"] == ["Off", "On", "Toggle", "PreviousValue"] + + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry + assert entity_entry.entity_category == EntityCategory.CONFIG + + # Test select option with string value + await hass.services.async_call( + "select", + "select_option", + { + "entity_id": entity_id, + "option": general.OnOff.StartUpOnOff.Off.name, + }, + blocking=True, + ) + + assert on_off_cluster.write_attributes.call_count == 1 + assert on_off_cluster.write_attributes.call_args[0][0] == { + "start_up_on_off": general.OnOff.StartUpOnOff.Off + } + + state = hass.states.get(entity_id) + assert state + assert state.state == general.OnOff.StartUpOnOff.Off.name + + +async def test_on_off_select_restored( + hass: HomeAssistant, entity_registry: er.EntityRegistry, light, zha_device_restored +) -> None: + """Test ZHA on off select - restored.""" + on_off_cluster = light.endpoints[1].on_off + on_off_cluster.PLUGGED_ATTR_READS = { + "start_up_on_off": general.OnOff.StartUpOnOff.On + } + zha_device = await zha_device_restored(light) + + assert zha_device.is_mains_powered + + assert on_off_cluster.read_attributes.call_count == 4 + # first 2 calls hit cache only + assert ( + call(["start_up_on_off"], allow_cache=True, only_cache=True, manufacturer=None) + in on_off_cluster.read_attributes.call_args_list + ) + assert ( + call(["on_off"], allow_cache=True, only_cache=True, manufacturer=None) + in on_off_cluster.read_attributes.call_args_list + ) + + # 2nd set of calls can actually read from the device + assert ( + call(["start_up_on_off"], allow_cache=True, only_cache=False, manufacturer=None) + in on_off_cluster.read_attributes.call_args_list + ) + assert ( + call(["on_off"], allow_cache=False, only_cache=False, manufacturer=None) + in on_off_cluster.read_attributes.call_args_list + ) + + select_name = "start_up_behavior" + entity_id = find_entity_id( + Platform.SELECT, + zha_device, + hass, + qualifier=select_name, + ) + assert entity_id is not None + + state = hass.states.get(entity_id) + assert state + assert state.state == general.OnOff.StartUpOnOff.On.name + assert state.attributes["options"] == ["Off", "On", "Toggle", "PreviousValue"] + + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry + assert entity_entry.entity_category == EntityCategory.CONFIG + + +async def test_on_off_select_unsupported( + hass: HomeAssistant, light, zha_device_joined_restored +) -> None: + """Test ZHA on off select unsupported.""" + + on_off_cluster = light.endpoints[1].on_off + on_off_cluster.add_unsupported_attribute("start_up_on_off") + zha_device = await zha_device_joined_restored(light) + select_name = general.OnOff.StartUpOnOff.__name__ + entity_id = find_entity_id( + Platform.SELECT, + zha_device, + hass, + qualifier=select_name.lower(), + ) + assert entity_id is None + + +class MotionSensitivityQuirk(CustomDevice): + """Quirk with motion sensitivity attribute.""" + + class OppleCluster(CustomCluster, ManufacturerSpecificCluster): + """Aqara manufacturer specific cluster.""" + + cluster_id = 0xFCC0 + ep_attribute = "opple_cluster" + attributes = { + 0x010C: ("motion_sensitivity", t.uint8_t, True), + 0x020C: ("motion_sensitivity_disabled", t.uint8_t, True), + } + + def __init__(self, *args, **kwargs): + """Initialize.""" + super().__init__(*args, **kwargs) + # populate cache to create config entity + self._attr_cache.update( + { + 0x010C: AqaraMotionSensitivities.Medium, + 0x020C: AqaraMotionSensitivities.Medium, + } + ) + + replacement = { + ENDPOINTS: { + 1: { + PROFILE_ID: zha.PROFILE_ID, + DEVICE_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, + INPUT_CLUSTERS: [general.Basic.cluster_id, OppleCluster], + OUTPUT_CLUSTERS: [], + }, + } + } + + +@pytest.fixture +async def zigpy_device_aqara_sensor(hass, zigpy_device_mock, zha_device_joined): + """Device tracker zigpy Aqara motion sensor device.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, + } + }, + manufacturer="LUMI", + model="lumi.motion.ac02", + quirk=MotionSensitivityQuirk, + ) + + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + await hass.async_block_till_done() + return zigpy_device + + +async def test_on_off_select_attribute_report( + hass: HomeAssistant, light, zha_device_restored, zigpy_device_aqara_sensor +) -> None: + """Test ZHA attribute report parsing for select platform.""" + + zha_device = await zha_device_restored(zigpy_device_aqara_sensor) + cluster = zigpy_device_aqara_sensor.endpoints.get(1).opple_cluster + entity_id = find_entity_id(Platform.SELECT, zha_device, hass) + assert entity_id is not None + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state is in default medium state + assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Medium.name + + # send attribute report from device + await send_attributes_report( + hass, cluster, {"motion_sensitivity": AqaraMotionSensitivities.Low} + ) + assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Low.name + + +( + add_to_registry_v2("Fake_Manufacturer", "Fake_Model") + .replaces(MotionSensitivityQuirk.OppleCluster) + .enum( + "motion_sensitivity", + AqaraMotionSensitivities, + MotionSensitivityQuirk.OppleCluster.cluster_id, + ) + .enum( + "motion_sensitivity_disabled", + AqaraMotionSensitivities, + MotionSensitivityQuirk.OppleCluster.cluster_id, + translation_key="motion_sensitivity", + initially_disabled=True, + ) +) + + +@pytest.fixture +async def zigpy_device_aqara_sensor_v2( + hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored +): + """Device tracker zigpy Aqara motion sensor device.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + MotionSensitivityQuirk.OppleCluster.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, + } + }, + manufacturer="Fake_Manufacturer", + model="Fake_Model", + ) + + zha_device = await zha_device_joined_restored(zigpy_device) + return zha_device, zigpy_device.endpoints[1].opple_cluster + + +async def test_on_off_select_attribute_report_v2( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + zigpy_device_aqara_sensor_v2, +) -> None: + """Test ZHA attribute report parsing for select platform.""" + + zha_device, cluster = zigpy_device_aqara_sensor_v2 + assert isinstance(zha_device.device, CustomDeviceV2) + entity_id = find_entity_id( + Platform.SELECT, zha_device, hass, qualifier="motion_sensitivity" + ) + assert entity_id is not None + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state is in default medium state + assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Medium.name + + # send attribute report from device + await send_attributes_report( + hass, cluster, {"motion_sensitivity": AqaraMotionSensitivities.Low} + ) + assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Low.name + + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry + assert entity_entry.entity_category == EntityCategory.CONFIG + assert entity_entry.disabled is False + assert entity_entry.translation_key == "motion_sensitivity" diff --git a/tests/components/zha/test_sensor.py b/tests/components/zha/test_sensor.py index 2d69cf1ff36..8443c4ced07 100644 --- a/tests/components/zha/test_sensor.py +++ b/tests/components/zha/test_sensor.py @@ -1,20 +1,33 @@ """Test ZHA sensor.""" -from unittest.mock import patch +from collections.abc import Callable +from datetime import timedelta +import math +from typing import Any +from unittest.mock import MagicMock, patch import pytest -from zigpy.profiles import zha -from zigpy.zcl import Cluster +from zhaquirks.danfoss import thermostat as danfoss_thermostat +import zigpy.profiles.zha +from zigpy.quirks import CustomCluster +from zigpy.quirks.v2 import CustomDeviceV2, add_to_registry_v2 +from zigpy.quirks.v2.homeassistant import UnitOfMass +import zigpy.types as t from zigpy.zcl.clusters import general, homeautomation, hvac, measurement, smartenergy from zigpy.zcl.clusters.hvac import Thermostat +from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.components.zha.helpers import get_zha_gateway +from homeassistant.components.zha.core import ZHADevice +from homeassistant.components.zha.core.const import ZHA_CLUSTER_HANDLER_READS_PER_REQ +import homeassistant.config as config_util from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_UNIT_OF_MEASUREMENT, + CONF_UNIT_SYSTEM, LIGHT_LUX, PERCENTAGE, + STATE_UNAVAILABLE, STATE_UNKNOWN, Platform, UnitOfApparentPower, @@ -24,12 +37,29 @@ from homeassistant.const import ( UnitOfPower, UnitOfPressure, UnitOfTemperature, + UnitOfVolume, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, restore_state +from homeassistant.helpers.entity_component import async_update_entity +from homeassistant.util import dt as dt_util -from .common import send_attributes_report +from .common import ( + async_enable_traffic, + async_test_rejoin, + find_entity_id, + find_entity_ids, + send_attribute_report, + send_attributes_report, +) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + async_mock_load_restore_state_from_storage, +) + ENTITY_ID_PREFIX = "sensor.fakemanufacturer_fakemodel_{}" @@ -46,19 +76,60 @@ def sensor_platform_only(): yield -async def async_test_humidity(hass: HomeAssistant, cluster: Cluster, entity_id: str): +@pytest.fixture +async def elec_measurement_zigpy_dev(hass: HomeAssistant, zigpy_device_mock): + """Electric Measurement zigpy device.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + homeautomation.ElectricalMeasurement.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SIMPLE_SENSOR, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + } + }, + ) + zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 + zigpy_device.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS = { + "ac_current_divisor": 10, + "ac_current_multiplier": 1, + "ac_power_divisor": 10, + "ac_power_multiplier": 1, + "ac_voltage_divisor": 10, + "ac_voltage_multiplier": 1, + "measurement_type": 8, + "power_divisor": 10, + "power_multiplier": 1, + } + return zigpy_device + + +@pytest.fixture +async def elec_measurement_zha_dev(elec_measurement_zigpy_dev, zha_device_joined): + """Electric Measurement ZHA device.""" + + zha_dev = await zha_device_joined(elec_measurement_zigpy_dev) + zha_dev.available = True + return zha_dev + + +async def async_test_humidity(hass: HomeAssistant, cluster, entity_id): """Test humidity sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 1000, 2: 100}) assert_state(hass, entity_id, "10.0", PERCENTAGE) -async def async_test_temperature(hass: HomeAssistant, cluster: Cluster, entity_id: str): +async def async_test_temperature(hass: HomeAssistant, cluster, entity_id): """Test temperature sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 2900, 2: 100}) assert_state(hass, entity_id, "29.0", UnitOfTemperature.CELSIUS) -async def async_test_pressure(hass: HomeAssistant, cluster: Cluster, entity_id: str): +async def async_test_pressure(hass: HomeAssistant, cluster, entity_id): """Test pressure sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 1000, 2: 10000}) assert_state(hass, entity_id, "1000", UnitOfPressure.HPA) @@ -67,7 +138,7 @@ async def async_test_pressure(hass: HomeAssistant, cluster: Cluster, entity_id: assert_state(hass, entity_id, "1000", UnitOfPressure.HPA) -async def async_test_illuminance(hass: HomeAssistant, cluster: Cluster, entity_id: str): +async def async_test_illuminance(hass: HomeAssistant, cluster, entity_id): """Test illuminance sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 10, 2: 20}) assert_state(hass, entity_id, "1", LIGHT_LUX) @@ -79,7 +150,7 @@ async def async_test_illuminance(hass: HomeAssistant, cluster: Cluster, entity_i assert_state(hass, entity_id, "unknown", LIGHT_LUX) -async def async_test_metering(hass: HomeAssistant, cluster: Cluster, entity_id: str): +async def async_test_metering(hass: HomeAssistant, cluster, entity_id): """Test Smart Energy metering sensor.""" await send_attributes_report(hass, cluster, {1025: 1, 1024: 12345, 1026: 100}) assert_state(hass, entity_id, "12345.0", None) @@ -88,14 +159,13 @@ async def async_test_metering(hass: HomeAssistant, cluster: Cluster, entity_id: await send_attributes_report(hass, cluster, {1024: 12346, "status": 64 + 8}) assert_state(hass, entity_id, "12346.0", None) - assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|POWER_FAILURE", "POWER_FAILURE|SERVICE_DISCONNECT", ) await send_attributes_report( - hass, cluster, {"metering_device_type": 1, "status": 64 + 8} + hass, cluster, {"status": 64 + 8, "metering_device_type": 1} ) assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|NOT_DEFINED", @@ -103,7 +173,7 @@ async def async_test_metering(hass: HomeAssistant, cluster: Cluster, entity_id: ) await send_attributes_report( - hass, cluster, {"metering_device_type": 2, "status": 64 + 8} + hass, cluster, {"status": 64 + 8, "metering_device_type": 2} ) assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|PIPE_EMPTY", @@ -111,7 +181,7 @@ async def async_test_metering(hass: HomeAssistant, cluster: Cluster, entity_id: ) await send_attributes_report( - hass, cluster, {"metering_device_type": 5, "status": 64 + 8} + hass, cluster, {"status": 64 + 8, "metering_device_type": 5} ) assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|TEMPERATURE_SENSOR", @@ -120,13 +190,13 @@ async def async_test_metering(hass: HomeAssistant, cluster: Cluster, entity_id: # Status for other meter types await send_attributes_report( - hass, cluster, {"metering_device_type": 4, "status": 32} + hass, cluster, {"status": 32, "metering_device_type": 4} ) assert hass.states.get(entity_id).attributes["status"] in ("", "32") async def async_test_smart_energy_summation_delivered( - hass: HomeAssistant, cluster: Cluster, entity_id: str + hass: HomeAssistant, cluster, entity_id ): """Test SmartEnergy Summation delivered sensor.""" @@ -143,7 +213,7 @@ async def async_test_smart_energy_summation_delivered( async def async_test_smart_energy_summation_received( - hass: HomeAssistant, cluster: Cluster, entity_id: str + hass: HomeAssistant, cluster, entity_id ): """Test SmartEnergy Summation received sensor.""" @@ -159,9 +229,7 @@ async def async_test_smart_energy_summation_received( ) -async def async_test_electrical_measurement( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_electrical_measurement(hass: HomeAssistant, cluster, entity_id): """Test electrical measurement sensor.""" # update divisor cached value await send_attributes_report(hass, cluster, {"ac_power_divisor": 1}) @@ -180,12 +248,10 @@ async def async_test_electrical_measurement( assert "active_power_max" not in hass.states.get(entity_id).attributes await send_attributes_report(hass, cluster, {0: 1, 0x050D: 88, 10: 5000}) - assert hass.states.get(entity_id).attributes["active_power_max"] == 8.8 + assert hass.states.get(entity_id).attributes["active_power_max"] == "8.8" -async def async_test_em_apparent_power( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_em_apparent_power(hass: HomeAssistant, cluster, entity_id): """Test electrical measurement Apparent Power sensor.""" # update divisor cached value await send_attributes_report(hass, cluster, {"ac_power_divisor": 1}) @@ -203,9 +269,7 @@ async def async_test_em_apparent_power( assert_state(hass, entity_id, "9.9", UnitOfApparentPower.VOLT_AMPERE) -async def async_test_em_power_factor( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_em_power_factor(hass: HomeAssistant, cluster, entity_id): """Test electrical measurement Power Factor sensor.""" # update divisor cached value await send_attributes_report(hass, cluster, {"ac_power_divisor": 1}) @@ -223,9 +287,7 @@ async def async_test_em_power_factor( assert_state(hass, entity_id, "99", PERCENTAGE) -async def async_test_em_rms_current( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_em_rms_current(hass: HomeAssistant, cluster, entity_id): """Test electrical measurement RMS Current sensor.""" await send_attributes_report(hass, cluster, {0: 1, 0x0508: 1234, 10: 1000}) @@ -240,12 +302,10 @@ async def async_test_em_rms_current( assert "rms_current_max" not in hass.states.get(entity_id).attributes await send_attributes_report(hass, cluster, {0: 1, 0x050A: 88, 10: 5000}) - assert hass.states.get(entity_id).attributes["rms_current_max"] == 8.8 + assert hass.states.get(entity_id).attributes["rms_current_max"] == "8.8" -async def async_test_em_rms_voltage( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_em_rms_voltage(hass: HomeAssistant, cluster, entity_id): """Test electrical measurement RMS Voltage sensor.""" await send_attributes_report(hass, cluster, {0: 1, 0x0505: 1234, 10: 1000}) @@ -260,12 +320,10 @@ async def async_test_em_rms_voltage( assert "rms_voltage_max" not in hass.states.get(entity_id).attributes await send_attributes_report(hass, cluster, {0: 1, 0x0507: 888, 10: 5000}) - assert hass.states.get(entity_id).attributes["rms_voltage_max"] == 8.9 + assert hass.states.get(entity_id).attributes["rms_voltage_max"] == "8.9" -async def async_test_powerconfiguration( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_powerconfiguration(hass: HomeAssistant, cluster, entity_id): """Test powerconfiguration/battery sensor.""" await send_attributes_report(hass, cluster, {33: 98}) assert_state(hass, entity_id, "49", "%") @@ -276,9 +334,7 @@ async def async_test_powerconfiguration( assert hass.states.get(entity_id).attributes["battery_voltage"] == 2.0 -async def async_test_powerconfiguration2( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_powerconfiguration2(hass: HomeAssistant, cluster, entity_id): """Test powerconfiguration/battery sensor.""" await send_attributes_report(hass, cluster, {33: -1}) assert_state(hass, entity_id, STATE_UNKNOWN, "%") @@ -290,17 +346,13 @@ async def async_test_powerconfiguration2( assert_state(hass, entity_id, "49", "%") -async def async_test_device_temperature( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_device_temperature(hass: HomeAssistant, cluster, entity_id): """Test temperature sensor.""" await send_attributes_report(hass, cluster, {0: 2900}) assert_state(hass, entity_id, "29.0", UnitOfTemperature.CELSIUS) -async def async_test_setpoint_change_source( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_setpoint_change_source(hass, cluster, entity_id): """Test the translation of numerical state into enum text.""" await send_attributes_report( hass, cluster, {Thermostat.AttributeDefs.setpoint_change_source.id: 0x01} @@ -309,9 +361,7 @@ async def async_test_setpoint_change_source( assert hass_state.state == "Schedule" -async def async_test_pi_heating_demand( - hass: HomeAssistant, cluster: Cluster, entity_id: str -): +async def async_test_pi_heating_demand(hass, cluster, entity_id): """Test pi heating demand is correctly returned.""" await send_attributes_report( hass, cluster, {Thermostat.AttributeDefs.pi_heating_demand.id: 1} @@ -518,8 +568,8 @@ async def async_test_pi_heating_demand( ) async def test_sensor( hass: HomeAssistant, - setup_zha, zigpy_device_mock, + zha_device_joined_restored, cluster_id, entity_suffix, test_func, @@ -530,18 +580,14 @@ async def test_sensor( ) -> None: """Test ZHA sensor platform.""" - await setup_zha() - gateway = get_zha_gateway(hass) - zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, } - }, + } ) cluster = zigpy_device.endpoints[1].in_clusters[cluster_id] if unsupported_attrs: @@ -554,27 +600,26 @@ async def test_sensor( # this one is mains powered zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 cluster.PLUGGED_ATTR_READS = read_plug - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) + zha_device = await zha_device_joined_restored(zigpy_device) entity_id = ENTITY_ID_PREFIX.format(entity_suffix) - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - } - ) + await async_enable_traffic(hass, [zha_device], enabled=False) + await hass.async_block_till_done() + # ensure the sensor entity was created + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + # allow traffic to flow through the gateway and devices + await async_enable_traffic(hass, [zha_device]) + + # test that the sensor now have their correct initial state (mostly unknown) assert hass.states.get(entity_id).state == initial_sensor_state # test sensor associated logic await test_func(hass, cluster, entity_id) + # test rejoin + await async_test_rejoin(hass, zigpy_device, [cluster], (report_count,)) + def assert_state(hass: HomeAssistant, entity_id, state, unit_of_measurement): """Check that the state is what is expected. @@ -585,3 +630,748 @@ def assert_state(hass: HomeAssistant, entity_id, state, unit_of_measurement): hass_state = hass.states.get(entity_id) assert hass_state.state == state assert hass_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == unit_of_measurement + + +@pytest.fixture +def hass_ms(hass: HomeAssistant) -> Callable[[str], HomeAssistant]: + """Hass instance with measurement system.""" + + async def _hass_ms(meas_sys: str) -> HomeAssistant: + await config_util.async_process_ha_core_config( + hass, {CONF_UNIT_SYSTEM: meas_sys} + ) + await hass.async_block_till_done() + return hass + + return _hass_ms + + +@pytest.fixture +def core_rs(hass_storage: dict[str, Any]): + """Core.restore_state fixture.""" + + def _storage(entity_id, uom, state): + now = dt_util.utcnow().isoformat() + + hass_storage[restore_state.STORAGE_KEY] = { + "version": restore_state.STORAGE_VERSION, + "key": restore_state.STORAGE_KEY, + "data": [ + { + "state": { + "entity_id": entity_id, + "state": str(state), + "attributes": {ATTR_UNIT_OF_MEASUREMENT: uom}, + "last_changed": now, + "last_updated": now, + "context": { + "id": "3c2243ff5f30447eb12e7348cfd5b8ff", + "user_id": None, + }, + }, + "last_seen": now, + } + ], + } + + return _storage + + +@pytest.mark.parametrize( + ("uom", "raw_temp", "expected", "restore"), + [ + (UnitOfTemperature.CELSIUS, 2900, 29, False), + (UnitOfTemperature.CELSIUS, 2900, 29, True), + (UnitOfTemperature.FAHRENHEIT, 2900, 84, False), + (UnitOfTemperature.FAHRENHEIT, 2900, 84, True), + ], +) +async def test_temp_uom( + hass: HomeAssistant, + uom: UnitOfTemperature, + raw_temp: int, + expected: int, + restore: bool, + hass_ms: Callable[[str], HomeAssistant], + core_rs, + zigpy_device_mock, + zha_device_restored, +) -> None: + """Test ZHA temperature sensor unit of measurement.""" + + entity_id = "sensor.fake1026_fakemodel1026_004f3202_temperature" + if restore: + core_rs(entity_id, uom, state=(expected - 2)) + await async_mock_load_restore_state_from_storage(hass) + + hass = await hass_ms("metric" if uom == UnitOfTemperature.CELSIUS else "imperial") + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + measurement.TemperatureMeasurement.cluster_id, + general.Basic.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + } + } + ) + cluster = zigpy_device.endpoints[1].temperature + zha_device = await zha_device_restored(zigpy_device) + entity_id = find_entity_id(Platform.SENSOR, zha_device, hass) + + if not restore: + await async_enable_traffic(hass, [zha_device], enabled=False) + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and devices + await async_enable_traffic(hass, [zha_device]) + + # test that the sensors now have a state of unknown + if not restore: + assert hass.states.get(entity_id).state == STATE_UNKNOWN + + await send_attribute_report(hass, cluster, 0, raw_temp) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state is not None + assert round(float(state.state)) == expected + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == uom + + +@patch( + "zigpy.zcl.ClusterPersistingListener", + MagicMock(), +) +async def test_electrical_measurement_init( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined, +) -> None: + """Test proper initialization of the electrical measurement cluster.""" + + cluster_id = homeautomation.ElectricalMeasurement.cluster_id + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + } + } + ) + cluster = zigpy_device.endpoints[1].in_clusters[cluster_id] + zha_device = await zha_device_joined(zigpy_device) + entity_id = "sensor.fakemanufacturer_fakemodel_power" + + # allow traffic to flow through the gateway and devices + await async_enable_traffic(hass, [zha_device]) + + # test that the sensor now have a state of unknown + assert hass.states.get(entity_id).state == STATE_UNKNOWN + + await send_attributes_report(hass, cluster, {0: 1, 1291: 100, 10: 1000}) + assert int(hass.states.get(entity_id).state) == 100 + + cluster_handler = zha_device._endpoints[1].all_cluster_handlers["1:0x0b04"] + assert cluster_handler.ac_power_divisor == 1 + assert cluster_handler.ac_power_multiplier == 1 + + # update power divisor + await send_attributes_report(hass, cluster, {0: 1, 1291: 20, 0x0403: 5, 10: 1000}) + assert cluster_handler.ac_power_divisor == 5 + assert cluster_handler.ac_power_multiplier == 1 + assert hass.states.get(entity_id).state == "4.0" + + await send_attributes_report(hass, cluster, {0: 1, 1291: 30, 0x0605: 10, 10: 1000}) + assert cluster_handler.ac_power_divisor == 10 + assert cluster_handler.ac_power_multiplier == 1 + assert hass.states.get(entity_id).state == "3.0" + + # update power multiplier + await send_attributes_report(hass, cluster, {0: 1, 1291: 20, 0x0402: 6, 10: 1000}) + assert cluster_handler.ac_power_divisor == 10 + assert cluster_handler.ac_power_multiplier == 6 + assert hass.states.get(entity_id).state == "12.0" + + await send_attributes_report(hass, cluster, {0: 1, 1291: 30, 0x0604: 20, 10: 1000}) + assert cluster_handler.ac_power_divisor == 10 + assert cluster_handler.ac_power_multiplier == 20 + assert hass.states.get(entity_id).state == "60.0" + + +@pytest.mark.parametrize( + ("cluster_id", "unsupported_attributes", "entity_ids", "missing_entity_ids"), + [ + ( + homeautomation.ElectricalMeasurement.cluster_id, + {"apparent_power", "rms_voltage", "rms_current"}, + { + "power", + "ac_frequency", + "power_factor", + }, + { + "apparent_power", + "voltage", + "current", + }, + ), + ( + homeautomation.ElectricalMeasurement.cluster_id, + {"apparent_power", "rms_current", "ac_frequency", "power_factor"}, + {"voltage", "power"}, + { + "apparent_power", + "current", + "ac_frequency", + "power_factor", + }, + ), + ( + homeautomation.ElectricalMeasurement.cluster_id, + set(), + { + "voltage", + "power", + "apparent_power", + "current", + "ac_frequency", + "power_factor", + }, + set(), + ), + ( + smartenergy.Metering.cluster_id, + { + "instantaneous_demand", + }, + { + "summation_delivered", + }, + { + "instantaneous_demand", + }, + ), + ( + smartenergy.Metering.cluster_id, + {"instantaneous_demand", "current_summ_delivered"}, + {}, + { + "instantaneous_demand", + "summation_delivered", + }, + ), + ( + smartenergy.Metering.cluster_id, + {}, + { + "instantaneous_demand", + "summation_delivered", + }, + {}, + ), + ], +) +async def test_unsupported_attributes_sensor( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined_restored, + cluster_id, + unsupported_attributes, + entity_ids, + missing_entity_ids, +) -> None: + """Test ZHA sensor platform.""" + + entity_ids = {ENTITY_ID_PREFIX.format(e) for e in entity_ids} + missing_entity_ids = {ENTITY_ID_PREFIX.format(e) for e in missing_entity_ids} + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + } + } + ) + cluster = zigpy_device.endpoints[1].in_clusters[cluster_id] + if cluster_id == smartenergy.Metering.cluster_id: + # this one is mains powered + zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 + for attr in unsupported_attributes: + cluster.add_unsupported_attribute(attr) + zha_device = await zha_device_joined_restored(zigpy_device) + + await async_enable_traffic(hass, [zha_device], enabled=False) + await hass.async_block_till_done() + present_entity_ids = set(find_entity_ids(Platform.SENSOR, zha_device, hass)) + assert present_entity_ids == entity_ids + assert missing_entity_ids not in present_entity_ids + + +@pytest.mark.parametrize( + ("raw_uom", "raw_value", "expected_state", "expected_uom"), + [ + ( + 1, + 12320, + "1.23", + UnitOfVolume.CUBIC_METERS, + ), + ( + 1, + 1232000, + "123.2", + UnitOfVolume.CUBIC_METERS, + ), + ( + 3, + 2340, + "0.65", + UnitOfVolume.CUBIC_METERS, + ), + ( + 3, + 2360, + "0.68", + UnitOfVolume.CUBIC_METERS, + ), + ( + 8, + 23660, + "2.37", + UnitOfPressure.KPA, + ), + ( + 0, + 9366, + "0.937", + UnitOfEnergy.KILO_WATT_HOUR, + ), + ( + 0, + 999, + "0.1", + UnitOfEnergy.KILO_WATT_HOUR, + ), + ( + 0, + 10091, + "1.009", + UnitOfEnergy.KILO_WATT_HOUR, + ), + ( + 0, + 10099, + "1.01", + UnitOfEnergy.KILO_WATT_HOUR, + ), + ( + 0, + 100999, + "10.1", + UnitOfEnergy.KILO_WATT_HOUR, + ), + ( + 0, + 100023, + "10.002", + UnitOfEnergy.KILO_WATT_HOUR, + ), + ( + 0, + 102456, + "10.246", + UnitOfEnergy.KILO_WATT_HOUR, + ), + ( + 5, + 102456, + "10.25", + "IMP gal", + ), + ( + 7, + 50124, + "5.01", + UnitOfVolume.LITERS, + ), + ], +) +async def test_se_summation_uom( + hass: HomeAssistant, + zigpy_device_mock, + zha_device_joined, + raw_uom, + raw_value, + expected_state, + expected_uom, +) -> None: + """Test ZHA smart energy summation.""" + + entity_id = ENTITY_ID_PREFIX.format("summation_delivered") + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + smartenergy.Metering.cluster_id, + general.Basic.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SIMPLE_SENSOR, + } + } + ) + zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 + + cluster = zigpy_device.endpoints[1].in_clusters[smartenergy.Metering.cluster_id] + for attr in ("instanteneous_demand",): + cluster.add_unsupported_attribute(attr) + cluster.PLUGGED_ATTR_READS = { + "current_summ_delivered": raw_value, + "demand_formatting": 0xF9, + "divisor": 10000, + "metering_device_type": 0x00, + "multiplier": 1, + "status": 0x00, + "summation_formatting": 0b1_0111_010, + "unit_of_measure": raw_uom, + } + await zha_device_joined(zigpy_device) + + assert_state(hass, entity_id, expected_state, expected_uom) + + +@pytest.mark.parametrize( + ("raw_measurement_type", "expected_type"), + [ + (1, "ACTIVE_MEASUREMENT"), + (8, "PHASE_A_MEASUREMENT"), + (9, "ACTIVE_MEASUREMENT, PHASE_A_MEASUREMENT"), + ( + 15, + ( + "ACTIVE_MEASUREMENT, REACTIVE_MEASUREMENT, APPARENT_MEASUREMENT," + " PHASE_A_MEASUREMENT" + ), + ), + ], +) +async def test_elec_measurement_sensor_type( + hass: HomeAssistant, + elec_measurement_zigpy_dev, + raw_measurement_type, + expected_type, + zha_device_joined, +) -> None: + """Test ZHA electrical measurement sensor type.""" + + entity_id = ENTITY_ID_PREFIX.format("power") + zigpy_dev = elec_measurement_zigpy_dev + zigpy_dev.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS[ + "measurement_type" + ] = raw_measurement_type + + await zha_device_joined(zigpy_dev) + + state = hass.states.get(entity_id) + assert state is not None + assert state.attributes["measurement_type"] == expected_type + + +async def test_elec_measurement_sensor_polling( + hass: HomeAssistant, + elec_measurement_zigpy_dev, + zha_device_joined_restored, +) -> None: + """Test ZHA electrical measurement sensor polling.""" + + entity_id = ENTITY_ID_PREFIX.format("power") + zigpy_dev = elec_measurement_zigpy_dev + zigpy_dev.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS["active_power"] = ( + 20 + ) + + await zha_device_joined_restored(zigpy_dev) + + # test that the sensor has an initial state of 2.0 + state = hass.states.get(entity_id) + assert state.state == "2.0" + + # update the value for the power reading + zigpy_dev.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS["active_power"] = ( + 60 + ) + + # ensure the state is still 2.0 + state = hass.states.get(entity_id) + assert state.state == "2.0" + + # let the polling happen + future = dt_util.utcnow() + timedelta(seconds=90) + async_fire_time_changed(hass, future) + await hass.async_block_till_done(wait_background_tasks=True) + + # ensure the state has been updated to 6.0 + state = hass.states.get(entity_id) + assert state.state == "6.0" + + +@pytest.mark.parametrize( + "supported_attributes", + [ + set(), + { + "active_power", + "active_power_max", + "rms_current", + "rms_current_max", + "rms_voltage", + "rms_voltage_max", + }, + { + "active_power", + }, + { + "active_power", + "active_power_max", + }, + { + "rms_current", + "rms_current_max", + }, + { + "rms_voltage", + "rms_voltage_max", + }, + ], +) +async def test_elec_measurement_skip_unsupported_attribute( + hass: HomeAssistant, + elec_measurement_zha_dev, + supported_attributes, +) -> None: + """Test ZHA electrical measurement skipping update of unsupported attributes.""" + + entity_id = ENTITY_ID_PREFIX.format("power") + zha_dev = elec_measurement_zha_dev + + cluster = zha_dev.device.endpoints[1].electrical_measurement + + all_attrs = { + "active_power", + "active_power_max", + "apparent_power", + "rms_current", + "rms_current_max", + "rms_voltage", + "rms_voltage_max", + "power_factor", + "ac_frequency", + "ac_frequency_max", + } + for attr in all_attrs - supported_attributes: + cluster.add_unsupported_attribute(attr) + cluster.read_attributes.reset_mock() + + await async_update_entity(hass, entity_id) + await hass.async_block_till_done() + assert cluster.read_attributes.call_count == math.ceil( + len(supported_attributes) / ZHA_CLUSTER_HANDLER_READS_PER_REQ + ) + read_attrs = { + a for call in cluster.read_attributes.call_args_list for a in call[0][0] + } + assert read_attrs == supported_attributes + + +class OppleCluster(CustomCluster, ManufacturerSpecificCluster): + """Aqara manufacturer specific cluster.""" + + cluster_id = 0xFCC0 + ep_attribute = "opple_cluster" + attributes = { + 0x010C: ("last_feeding_size", t.uint16_t, True), + } + + def __init__(self, *args, **kwargs) -> None: + """Initialize.""" + super().__init__(*args, **kwargs) + # populate cache to create config entity + self._attr_cache.update({0x010C: 10}) + + +( + add_to_registry_v2("Fake_Manufacturer_sensor", "Fake_Model_sensor") + .replaces(OppleCluster) + .sensor( + "last_feeding_size", + OppleCluster.cluster_id, + divisor=1, + multiplier=1, + unit=UnitOfMass.GRAMS, + ) +) + + +@pytest.fixture +async def zigpy_device_aqara_sensor_v2( + hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored +): + """Device tracker zigpy Aqara motion sensor device.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + OppleCluster.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.OCCUPANCY_SENSOR, + } + }, + manufacturer="Fake_Manufacturer_sensor", + model="Fake_Model_sensor", + ) + + zha_device = await zha_device_joined_restored(zigpy_device) + return zha_device, zigpy_device.endpoints[1].opple_cluster + + +async def test_last_feeding_size_sensor_v2( + hass: HomeAssistant, zigpy_device_aqara_sensor_v2 +) -> None: + """Test quirks defined sensor.""" + + zha_device, cluster = zigpy_device_aqara_sensor_v2 + assert isinstance(zha_device.device, CustomDeviceV2) + entity_id = find_entity_id( + Platform.SENSOR, zha_device, hass, qualifier="last_feeding_size" + ) + assert entity_id is not None + + await send_attributes_report(hass, cluster, {0x010C: 1}) + assert_state(hass, entity_id, "1.0", UnitOfMass.GRAMS.value) + + await send_attributes_report(hass, cluster, {0x010C: 5}) + assert_state(hass, entity_id, "5.0", UnitOfMass.GRAMS.value) + + +@pytest.fixture +async def coordinator(hass: HomeAssistant, zigpy_device_mock, zha_device_joined): + """Test ZHA fan platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Groups.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.CONTROL_BRIDGE, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + } + }, + ieee="00:15:8d:00:02:32:4f:32", + nwk=0x0000, + node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device + + +async def test_device_counter_sensors( + hass: HomeAssistant, + coordinator: ZHADevice, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, +) -> None: + """Test quirks defined sensor.""" + + entity_id = "sensor.coordinator_manufacturer_coordinator_model_counter_1" + state = hass.states.get(entity_id) + assert state is None + + # Enable the entity. + entity_registry.async_update_entity(entity_id, disabled_by=None) + await hass.config_entries.async_reload(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "1" + + # simulate counter increment on application + coordinator.device.application.state.counters["ezsp_counters"][ + "counter_1" + ].increment() + + next_update = dt_util.utcnow() + timedelta(seconds=60) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "2" + + +@pytest.fixture +async def zigpy_device_danfoss_thermostat( + hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored +): + """Device tracker zigpy danfoss thermostat device.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.PowerConfiguration.cluster_id, + general.Identify.cluster_id, + general.Time.cluster_id, + general.PollControl.cluster_id, + Thermostat.cluster_id, + hvac.UserInterface.cluster_id, + homeautomation.Diagnostic.cluster_id, + ], + SIG_EP_OUTPUT: [general.Basic.cluster_id, general.Ota.cluster_id], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, + } + }, + manufacturer="Danfoss", + model="eTRV0100", + ) + + zha_device = await zha_device_joined_restored(zigpy_device) + return zha_device, zigpy_device + + +async def test_danfoss_thermostat_sw_error( + hass: HomeAssistant, zigpy_device_danfoss_thermostat +) -> None: + """Test quirks defined thermostat.""" + + zha_device, zigpy_device = zigpy_device_danfoss_thermostat + + entity_id = find_entity_id( + Platform.SENSOR, zha_device, hass, qualifier="software_error" + ) + assert entity_id is not None + + cluster = zigpy_device.endpoints[1].diagnostic + + await send_attributes_report( + hass, + cluster, + { + danfoss_thermostat.DanfossDiagnosticCluster.AttributeDefs.sw_error_code.id: 0x0001 + }, + ) + + hass_state = hass.states.get(entity_id) + assert hass_state.state == "something" + assert hass_state.attributes["Top_pcb_sensor_error"] diff --git a/tests/components/zha/test_silabs_multiprotocol.py b/tests/components/zha/test_silabs_multiprotocol.py index a5f2db22ce5..03c845269e0 100644 --- a/tests/components/zha/test_silabs_multiprotocol.py +++ b/tests/components/zha/test_silabs_multiprotocol.py @@ -11,7 +11,7 @@ import zigpy.state from homeassistant.components import zha from homeassistant.components.zha import silabs_multiprotocol -from homeassistant.components.zha.helpers import get_zha_data +from homeassistant.components.zha.core.helpers import get_zha_gateway from homeassistant.core import HomeAssistant if TYPE_CHECKING: @@ -38,7 +38,8 @@ async def test_async_get_channel_missing( """Test reading channel with an inactive ZHA installation, no valid channel.""" await setup_zha() - await zha.async_unload_entry(hass, get_zha_data(hass).config_entry) + gateway = get_zha_gateway(hass) + await zha.async_unload_entry(hass, gateway.config_entry) # Network settings were never loaded for whatever reason zigpy_app_controller.state.network_info = zigpy.state.NetworkInfo() diff --git a/tests/components/zha/test_siren.py b/tests/components/zha/test_siren.py index f9837a7d016..652955ef98d 100644 --- a/tests/components/zha/test_siren.py +++ b/tests/components/zha/test_siren.py @@ -4,11 +4,7 @@ from datetime import timedelta from unittest.mock import ANY, call, patch import pytest -from zha.application.const import ( - WARNING_DEVICE_MODE_EMERGENCY_PANIC, - WARNING_DEVICE_SOUND_MEDIUM, -) -from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from zigpy.const import SIG_EP_PROFILE from zigpy.profiles import zha import zigpy.zcl from zigpy.zcl.clusters import general, security @@ -20,17 +16,16 @@ from homeassistant.components.siren import ( ATTR_VOLUME_LEVEL, DOMAIN as SIREN_DOMAIN, ) -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, +from homeassistant.components.zha.core.const import ( + WARNING_DEVICE_MODE_EMERGENCY_PANIC, + WARNING_DEVICE_SOUND_MEDIUM, ) -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from .common import find_entity_id +from .common import async_enable_traffic, find_entity_id +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE from tests.common import async_fire_time_changed @@ -51,12 +46,9 @@ def siren_platform_only(): yield -async def test_siren(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: - """Test zha siren platform.""" - - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) +@pytest.fixture +async def siren(hass, zigpy_device_mock, zha_device_joined_restored): + """Siren fixture.""" zigpy_device = zigpy_device_mock( { @@ -66,18 +58,30 @@ async def test_siren(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, SIG_EP_PROFILE: zha.PROFILE_ID, } - } + }, ) - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) + zha_device = await zha_device_joined_restored(zigpy_device) + return zha_device, zigpy_device.endpoints[1].ias_wd - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.SIREN, zha_device_proxy, hass) - cluster = zigpy_device.endpoints[1].ias_wd + +async def test_siren(hass: HomeAssistant, siren) -> None: + """Test zha siren platform.""" + + zha_device, cluster = siren + assert cluster is not None + entity_id = find_entity_id(Platform.SIREN, zha_device, hass) assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the switch was created and that its state is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to off assert hass.states.get(entity_id).state == STATE_OFF # turn on from HA diff --git a/tests/components/zha/test_switch.py b/tests/components/zha/test_switch.py index cc4e41485f9..c8c2842c400 100644 --- a/tests/components/zha/test_switch.py +++ b/tests/components/zha/test_switch.py @@ -1,28 +1,51 @@ """Test ZHA switch.""" -from unittest.mock import call, patch +from unittest.mock import AsyncMock, call, patch import pytest +from zhaquirks.const import ( + DEVICE_TYPE, + ENDPOINTS, + INPUT_CLUSTERS, + OUTPUT_CLUSTERS, + PROFILE_ID, +) +from zigpy.exceptions import ZigbeeException from zigpy.profiles import zha -from zigpy.zcl.clusters import general +from zigpy.quirks import _DEVICE_REGISTRY, CustomCluster, CustomDevice +from zigpy.quirks.v2 import CustomDeviceV2, add_to_registry_v2 +import zigpy.types as t +from zigpy.zcl.clusters import closures, general +from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster import zigpy.zcl.foundation as zcl_f from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, -) -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.components.zha.core.group import GroupMember +from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity from homeassistant.setup import async_setup_component -from .common import find_entity_id, send_attributes_report +from .common import ( + async_enable_traffic, + async_find_group_entity_id, + async_test_rejoin, + async_wait_for_updates, + find_entity_id, + send_attributes_report, + update_attribute_cache, +) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from tests.common import MockConfigEntry + ON = 1 OFF = 0 +IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" +IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8" @pytest.fixture(autouse=True) @@ -40,51 +63,104 @@ def switch_platform_only(): yield -async def test_switch(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: - """Test ZHA switch platform.""" +@pytest.fixture +def zigpy_device(zigpy_device_mock): + """Device tracker zigpy device.""" + endpoints = { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + } + } + return zigpy_device_mock(endpoints) - await setup_zha() - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + +@pytest.fixture +def zigpy_cover_device(zigpy_device_mock): + """Zigpy cover device.""" + + endpoints = { + 1: { + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.WINDOW_COVERING_DEVICE, + SIG_EP_INPUT: [ + general.Basic.cluster_id, + closures.WindowCovering.cluster_id, + ], + SIG_EP_OUTPUT: [], + } + } + return zigpy_device_mock(endpoints) + + +@pytest.fixture +async def device_switch_1(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA switch platform.""" zigpy_device = zigpy_device_mock( { 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.OnOff.cluster_id, - general.Groups.cluster_id, - ], + SIG_EP_INPUT: [general.OnOff.cluster_id, general.Groups.cluster_id], SIG_EP_OUTPUT: [], SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, } }, - ieee="01:2d:6f:00:0a:90:69:e8", - node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + ieee=IEEE_GROUPABLE_DEVICE, ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + await hass.async_block_till_done() + return zha_device - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - entity_id = find_entity_id(Platform.SWITCH, zha_device_proxy, hass) +@pytest.fixture +async def device_switch_2(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA switch platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.OnOff.cluster_id, general.Groups.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + } + }, + ieee=IEEE_GROUPABLE_DEVICE2, + ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + await hass.async_block_till_done() + return zha_device + + +async def test_switch( + hass: HomeAssistant, zha_device_joined_restored, zigpy_device +) -> None: + """Test ZHA switch platform.""" + + zha_device = await zha_device_joined_restored(zigpy_device) cluster = zigpy_device.endpoints[1].on_off + entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the switch was created and that its state is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to off assert hass.states.get(entity_id).state == STATE_OFF # turn on at switch - await send_attributes_report( - hass, cluster, {general.OnOff.AttributeDefs.on_off.id: ON} - ) + await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2}) assert hass.states.get(entity_id).state == STATE_ON # turn off at switch - await send_attributes_report( - hass, cluster, {general.OnOff.AttributeDefs.on_off.id: OFF} - ) + await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) assert hass.states.get(entity_id).state == STATE_OFF # turn on from HA @@ -141,3 +217,765 @@ async def test_switch(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None assert cluster.read_attributes.call_args == call( ["on_off"], allow_cache=False, only_cache=False, manufacturer=None ) + + # test joining a new switch to the network and HA + await async_test_rejoin(hass, zigpy_device, [cluster], (1,)) + + +class WindowDetectionFunctionQuirk(CustomDevice): + """Quirk with window detection function attribute.""" + + class TuyaManufCluster(CustomCluster, ManufacturerSpecificCluster): + """Tuya manufacturer specific cluster.""" + + cluster_id = 0xEF00 + ep_attribute = "tuya_manufacturer" + + attributes = { + 0xEF01: ("window_detection_function", t.Bool), + 0xEF02: ("window_detection_function_inverter", t.Bool), + } + + def __init__(self, *args, **kwargs): + """Initialize with task.""" + super().__init__(*args, **kwargs) + self._attr_cache.update( + {0xEF01: False} + ) # entity won't be created without this + + replacement = { + ENDPOINTS: { + 1: { + PROFILE_ID: zha.PROFILE_ID, + DEVICE_TYPE: zha.DeviceType.ON_OFF_SWITCH, + INPUT_CLUSTERS: [general.Basic.cluster_id, TuyaManufCluster], + OUTPUT_CLUSTERS: [], + }, + } + } + + +@pytest.fixture +async def zigpy_device_tuya(hass, zigpy_device_mock, zha_device_joined): + """Device tracker zigpy tuya device.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + } + }, + manufacturer="_TZE200_b6wax7g0", + quirk=WindowDetectionFunctionQuirk, + ) + + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + await hass.async_block_till_done() + return zigpy_device + + +@patch( + "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", + new=0, +) +async def test_zha_group_switch_entity( + hass: HomeAssistant, + device_switch_1, + device_switch_2, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, +) -> None: + """Test the switch entity for a ZHA group.""" + + # make sure we can still get groups when counter entities exist + entity_id = "sensor.coordinator_manufacturer_coordinator_model_counter_1" + state = hass.states.get(entity_id) + assert state is None + + # Enable the entity. + entity_registry.async_update_entity(entity_id, disabled_by=None) + await hass.config_entries.async_reload(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "1" + + zha_gateway = get_zha_gateway(hass) + assert zha_gateway is not None + device_switch_1._zha_gateway = zha_gateway + device_switch_2._zha_gateway = zha_gateway + member_ieee_addresses = [ + device_switch_1.ieee, + device_switch_2.ieee, + zha_gateway.coordinator_zha_device.ieee, + ] + members = [ + GroupMember(device_switch_1.ieee, 1), + GroupMember(device_switch_2.ieee, 1), + GroupMember(zha_gateway.coordinator_zha_device.ieee, 1), + ] + + # test creating a group with 2 members + zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) + await hass.async_block_till_done() + + assert zha_group is not None + assert len(zha_group.members) == 3 + for member in zha_group.members: + assert member.device.ieee in member_ieee_addresses + assert member.group == zha_group + assert member.endpoint is not None + + entity_id = async_find_group_entity_id(hass, Platform.SWITCH, zha_group) + assert hass.states.get(entity_id) is not None + + group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id] + dev1_cluster_on_off = device_switch_1.device.endpoints[1].on_off + dev2_cluster_on_off = device_switch_2.device.endpoints[1].on_off + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [device_switch_1, device_switch_2]) + await async_wait_for_updates(hass) + + # test that the switches were created and are off + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on from HA + with patch( + "zigpy.zcl.Cluster.request", + return_value=[0x00, zcl_f.Status.SUCCESS], + ): + # turn on via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + ) + assert len(group_cluster_on_off.request.mock_calls) == 1 + assert group_cluster_on_off.request.call_args == call( + False, + ON, + group_cluster_on_off.commands_by_name["on"].schema, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert hass.states.get(entity_id).state == STATE_ON + + # test turn off failure case + hold_off = group_cluster_on_off.off + group_cluster_on_off.off = AsyncMock(return_value=[0x01, zcl_f.Status.FAILURE]) + # turn off via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + ) + assert len(group_cluster_on_off.off.mock_calls) == 1 + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + group_cluster_on_off.off = hold_off + + # turn off from HA + with patch( + "zigpy.zcl.Cluster.request", + return_value=[0x01, zcl_f.Status.SUCCESS], + ): + # turn off via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + ) + assert len(group_cluster_on_off.request.mock_calls) == 1 + assert group_cluster_on_off.request.call_args == call( + False, + OFF, + group_cluster_on_off.commands_by_name["off"].schema, + expect_reply=True, + manufacturer=None, + tsn=None, + ) + assert hass.states.get(entity_id).state == STATE_OFF + + # test turn on failure case + hold_on = group_cluster_on_off.on + group_cluster_on_off.on = AsyncMock(return_value=[0x01, zcl_f.Status.FAILURE]) + # turn on via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + ) + assert len(group_cluster_on_off.on.mock_calls) == 1 + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + group_cluster_on_off.on = hold_on + + # test some of the group logic to make sure we key off states correctly + await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) + await send_attributes_report(hass, dev2_cluster_on_off, {0: 1}) + await async_wait_for_updates(hass) + + # test that group switch is on + assert hass.states.get(entity_id).state == STATE_ON + + await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) + await async_wait_for_updates(hass) + + # test that group switch is still on + assert hass.states.get(entity_id).state == STATE_ON + + await send_attributes_report(hass, dev2_cluster_on_off, {0: 0}) + await async_wait_for_updates(hass) + + # test that group switch is now off + assert hass.states.get(entity_id).state == STATE_OFF + + await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) + await async_wait_for_updates(hass) + + # test that group switch is now back on + assert hass.states.get(entity_id).state == STATE_ON + + +async def test_switch_configurable( + hass: HomeAssistant, zha_device_joined_restored, zigpy_device_tuya +) -> None: + """Test ZHA configurable switch platform.""" + + zha_device = await zha_device_joined_restored(zigpy_device_tuya) + cluster = zigpy_device_tuya.endpoints[1].tuya_manufacturer + entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) + assert entity_id is not None + + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the switch was created and that its state is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to off + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on at switch + await send_attributes_report(hass, cluster, {"window_detection_function": True}) + assert hass.states.get(entity_id).state == STATE_ON + + # turn off at switch + await send_attributes_report(hass, cluster, {"window_detection_function": False}) + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on from HA + with patch( + "zigpy.zcl.Cluster.write_attributes", + return_value=[zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS], + ): + # turn on via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": True}, manufacturer=None) + ] + + cluster.write_attributes.reset_mock() + + # turn off from HA + with patch( + "zigpy.zcl.Cluster.write_attributes", + return_value=[zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS], + ): + # turn off via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": False}, manufacturer=None) + ] + + cluster.read_attributes.reset_mock() + await async_setup_component(hass, "homeassistant", {}) + await hass.async_block_till_done() + + await hass.services.async_call( + "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True + ) + # the mocking doesn't update the attr cache so this flips back to initial value + assert cluster.read_attributes.call_count == 2 + assert [ + call( + [ + "window_detection_function", + ], + allow_cache=False, + only_cache=False, + manufacturer=None, + ), + call( + [ + "window_detection_function_inverter", + ], + allow_cache=False, + only_cache=False, + manufacturer=None, + ), + ] == cluster.read_attributes.call_args_list + + cluster.write_attributes.reset_mock() + cluster.write_attributes.side_effect = ZigbeeException + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + ) + + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": False}, manufacturer=None), + call({"window_detection_function": False}, manufacturer=None), + call({"window_detection_function": False}, manufacturer=None), + ] + + cluster.write_attributes.side_effect = None + + # test inverter + cluster.write_attributes.reset_mock() + cluster._attr_cache.update({0xEF02: True}) + + await hass.services.async_call( + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": True}, manufacturer=None) + ] + + cluster.write_attributes.reset_mock() + await hass.services.async_call( + SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": False}, manufacturer=None) + ] + + # test joining a new switch to the network and HA + await async_test_rejoin(hass, zigpy_device_tuya, [cluster], (0,)) + + +async def test_switch_configurable_custom_on_off_values( + hass: HomeAssistant, zha_device_joined_restored, zigpy_device_mock +) -> None: + """Test ZHA configurable switch platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + } + }, + manufacturer="manufacturer", + model="model", + ) + + ( + add_to_registry_v2(zigpy_device.manufacturer, zigpy_device.model) + .adds(WindowDetectionFunctionQuirk.TuyaManufCluster) + .switch( + "window_detection_function", + WindowDetectionFunctionQuirk.TuyaManufCluster.cluster_id, + on_value=3, + off_value=5, + ) + ) + + zigpy_device = _DEVICE_REGISTRY.get_device(zigpy_device) + + assert isinstance(zigpy_device, CustomDeviceV2) + cluster = zigpy_device.endpoints[1].tuya_manufacturer + cluster.PLUGGED_ATTR_READS = {"window_detection_function": 5} + update_attribute_cache(cluster) + + zha_device = await zha_device_joined_restored(zigpy_device) + + entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) + assert entity_id is not None + + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the switch was created and that its state is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to off + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on at switch + await send_attributes_report(hass, cluster, {"window_detection_function": 3}) + assert hass.states.get(entity_id).state == STATE_ON + + # turn off at switch + await send_attributes_report(hass, cluster, {"window_detection_function": 5}) + assert hass.states.get(entity_id).state == STATE_OFF + + # turn on from HA + with patch( + "zigpy.zcl.Cluster.write_attributes", + return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], + ): + # turn on via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": 3}, manufacturer=None) + ] + cluster.write_attributes.reset_mock() + + # turn off from HA + with patch( + "zigpy.zcl.Cluster.write_attributes", + return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], + ): + # turn off via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": 5}, manufacturer=None) + ] + + +async def test_switch_configurable_custom_on_off_values_force_inverted( + hass: HomeAssistant, zha_device_joined_restored, zigpy_device_mock +) -> None: + """Test ZHA configurable switch platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + } + }, + manufacturer="manufacturer2", + model="model2", + ) + + ( + add_to_registry_v2(zigpy_device.manufacturer, zigpy_device.model) + .adds(WindowDetectionFunctionQuirk.TuyaManufCluster) + .switch( + "window_detection_function", + WindowDetectionFunctionQuirk.TuyaManufCluster.cluster_id, + on_value=3, + off_value=5, + force_inverted=True, + ) + ) + + zigpy_device = _DEVICE_REGISTRY.get_device(zigpy_device) + + assert isinstance(zigpy_device, CustomDeviceV2) + cluster = zigpy_device.endpoints[1].tuya_manufacturer + cluster.PLUGGED_ATTR_READS = {"window_detection_function": 5} + update_attribute_cache(cluster) + + zha_device = await zha_device_joined_restored(zigpy_device) + + entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) + assert entity_id is not None + + assert hass.states.get(entity_id).state == STATE_ON + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the switch was created and that its state is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to off + assert hass.states.get(entity_id).state == STATE_ON + + # turn on at switch + await send_attributes_report(hass, cluster, {"window_detection_function": 3}) + assert hass.states.get(entity_id).state == STATE_OFF + + # turn off at switch + await send_attributes_report(hass, cluster, {"window_detection_function": 5}) + assert hass.states.get(entity_id).state == STATE_ON + + # turn on from HA + with patch( + "zigpy.zcl.Cluster.write_attributes", + return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], + ): + # turn on via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": 5}, manufacturer=None) + ] + cluster.write_attributes.reset_mock() + + # turn off from HA + with patch( + "zigpy.zcl.Cluster.write_attributes", + return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], + ): + # turn off via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": 3}, manufacturer=None) + ] + + +async def test_switch_configurable_custom_on_off_values_inverter_attribute( + hass: HomeAssistant, zha_device_joined_restored, zigpy_device_mock +) -> None: + """Test ZHA configurable switch platform.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + } + }, + manufacturer="manufacturer3", + model="model3", + ) + + ( + add_to_registry_v2(zigpy_device.manufacturer, zigpy_device.model) + .adds(WindowDetectionFunctionQuirk.TuyaManufCluster) + .switch( + "window_detection_function", + WindowDetectionFunctionQuirk.TuyaManufCluster.cluster_id, + on_value=3, + off_value=5, + invert_attribute_name="window_detection_function_inverter", + ) + ) + + zigpy_device = _DEVICE_REGISTRY.get_device(zigpy_device) + + assert isinstance(zigpy_device, CustomDeviceV2) + cluster = zigpy_device.endpoints[1].tuya_manufacturer + cluster.PLUGGED_ATTR_READS = { + "window_detection_function": 5, + "window_detection_function_inverter": t.Bool(True), + } + update_attribute_cache(cluster) + + zha_device = await zha_device_joined_restored(zigpy_device) + + entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) + assert entity_id is not None + + assert hass.states.get(entity_id).state == STATE_ON + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the switch was created and that its state is unavailable + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + # test that the state has changed from unavailable to off + assert hass.states.get(entity_id).state == STATE_ON + + # turn on at switch + await send_attributes_report(hass, cluster, {"window_detection_function": 3}) + assert hass.states.get(entity_id).state == STATE_OFF + + # turn off at switch + await send_attributes_report(hass, cluster, {"window_detection_function": 5}) + assert hass.states.get(entity_id).state == STATE_ON + + # turn on from HA + with patch( + "zigpy.zcl.Cluster.write_attributes", + return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], + ): + # turn on via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": 5}, manufacturer=None) + ] + cluster.write_attributes.reset_mock() + + # turn off from HA + with patch( + "zigpy.zcl.Cluster.write_attributes", + return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], + ): + # turn off via UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.mock_calls == [ + call({"window_detection_function": 3}, manufacturer=None) + ] + + +WCAttrs = closures.WindowCovering.AttributeDefs +WCT = closures.WindowCovering.WindowCoveringType +WCCS = closures.WindowCovering.ConfigStatus +WCM = closures.WindowCovering.WindowCoveringMode + + +async def test_cover_inversion_switch( + hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device +) -> None: + """Test ZHA cover platform.""" + + # load up cover domain + cluster = zigpy_cover_device.endpoints[1].window_covering + cluster.PLUGGED_ATTR_READS = { + WCAttrs.current_position_lift_percentage.name: 65, + WCAttrs.current_position_tilt_percentage.name: 42, + WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, + WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), + WCAttrs.window_covering_mode.name: WCM(WCM.LEDs_display_feedback), + } + update_attribute_cache(cluster) + zha_device = await zha_device_joined_restored(zigpy_cover_device) + assert ( + not zha_device.endpoints[1] + .all_cluster_handlers[f"1:0x{cluster.cluster_id:04x}"] + .inverted + ) + assert cluster.read_attributes.call_count == 3 + assert ( + WCAttrs.current_position_lift_percentage.name + in cluster.read_attributes.call_args[0][0] + ) + assert ( + WCAttrs.current_position_tilt_percentage.name + in cluster.read_attributes.call_args[0][0] + ) + + entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) + assert entity_id is not None + + await async_enable_traffic(hass, [zha_device], enabled=False) + # test that the cover was created and that it is unavailable + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNAVAILABLE + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + await hass.async_block_till_done() + + # test update + prev_call_count = cluster.read_attributes.call_count + await async_update_entity(hass, entity_id) + assert cluster.read_attributes.call_count == prev_call_count + 1 + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + # test to see the state remains after tilting to 0% + await send_attributes_report( + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} + ) + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + with patch( + "zigpy.zcl.Cluster.write_attributes", return_value=[0x1, zcl_f.Status.SUCCESS] + ): + cluster.PLUGGED_ATTR_READS = { + WCAttrs.config_status.name: WCCS.Operational + | WCCS.Open_up_commands_reversed, + } + # turn on from UI + await hass.services.async_call( + SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.call_count == 1 + assert cluster.write_attributes.call_args_list[0] == call( + { + WCAttrs.window_covering_mode.name: WCM.Motor_direction_reversed + | WCM.LEDs_display_feedback + }, + manufacturer=None, + ) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + + cluster.write_attributes.reset_mock() + + # turn off from UI + cluster.PLUGGED_ATTR_READS = { + WCAttrs.config_status.name: WCCS.Operational, + } + await hass.services.async_call( + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.call_count == 1 + assert cluster.write_attributes.call_args_list[0] == call( + {WCAttrs.window_covering_mode.name: WCM.LEDs_display_feedback}, + manufacturer=None, + ) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + cluster.write_attributes.reset_mock() + + # test that sending the command again does not result in a write + await hass.services.async_call( + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + ) + assert cluster.write_attributes.call_count == 0 + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + +async def test_cover_inversion_switch_not_created( + hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device +) -> None: + """Test ZHA cover platform.""" + + # load up cover domain + cluster = zigpy_cover_device.endpoints[1].window_covering + cluster.PLUGGED_ATTR_READS = { + WCAttrs.current_position_lift_percentage.name: 65, + WCAttrs.current_position_tilt_percentage.name: 42, + WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), + } + update_attribute_cache(cluster) + zha_device = await zha_device_joined_restored(zigpy_cover_device) + + assert cluster.read_attributes.call_count == 3 + assert ( + WCAttrs.current_position_lift_percentage.name + in cluster.read_attributes.call_args[0][0] + ) + assert ( + WCAttrs.current_position_tilt_percentage.name + in cluster.read_attributes.call_args[0][0] + ) + + # entity should not be created when mode or config status aren't present + entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) + assert entity_id is None diff --git a/tests/components/zha/test_update.py b/tests/components/zha/test_update.py index 4b6dff4fc6b..32be013e673 100644 --- a/tests/components/zha/test_update.py +++ b/tests/components/zha/test_update.py @@ -3,11 +3,8 @@ from unittest.mock import AsyncMock, call, patch import pytest -from zha.application.platforms.update import ( - FirmwareUpdateEntity as ZhaFirmwareUpdateEntity, -) from zigpy.exceptions import DeliveryError -from zigpy.ota import OtaImagesResult, OtaImageWithMetadata +from zigpy.ota import OtaImageWithMetadata import zigpy.ota.image as firmware from zigpy.ota.providers import BaseOtaImageMetadata from zigpy.profiles import zha @@ -23,31 +20,16 @@ from homeassistant.components.update import ( ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, - ATTR_UPDATE_PERCENTAGE, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, ) -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, -) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_OFF, - STATE_ON, - STATE_UNKNOWN, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from .common import find_entity_id, update_attribute_cache -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE - -from tests.typing import WebSocketGenerator +from .common import async_enable_traffic, find_entity_id, update_attribute_cache +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE @pytest.fixture(autouse=True) @@ -65,32 +47,28 @@ def update_platform_only(): yield +@pytest.fixture +def zigpy_device(zigpy_device_mock): + """Device tracker zigpy device.""" + endpoints = { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], + SIG_EP_OUTPUT: [general.Ota.cluster_id], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + } + } + return zigpy_device_mock( + endpoints, node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00" + ) + + async def setup_test_data( - hass: HomeAssistant, - zigpy_device_mock, + zha_device_joined_restored, + zigpy_device, skip_attribute_plugs=False, file_not_found=False, ): """Set up test data for the tests.""" - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - ) - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - fw_version = 0x12345678 installed_fw_version = fw_version - 10 cluster = zigpy_device.endpoints[1].out_clusters[general.Ota.cluster_id] @@ -125,34 +103,34 @@ async def setup_test_data( ), ) - cluster.endpoint.device.application.ota.get_ota_images = AsyncMock( - return_value=OtaImagesResult( - upgrades=() if file_not_found else (fw_image,), - downgrades=(), - ) + cluster.endpoint.device.application.ota.get_ota_image = AsyncMock( + return_value=None if file_not_found else fw_image ) - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - zha_device_proxy.device.async_update_sw_build_id(installed_fw_version) - return zha_device_proxy, cluster, fw_image, installed_fw_version + zha_device = await zha_device_joined_restored(zigpy_device) + zha_device.async_update_sw_build_id(installed_fw_version) + + return zha_device, cluster, fw_image, installed_fw_version async def test_firmware_update_notification_from_zigpy( hass: HomeAssistant, - setup_zha, - zigpy_device_mock, + zha_device_joined_restored, + zigpy_device, ) -> None: """Test ZHA update platform - firmware update notification.""" - await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - hass, - zigpy_device_mock, + zha_device_joined_restored, + zigpy_device, ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_UNKNOWN + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + assert hass.states.get(entity_id).state == STATE_OFF # simulate an image available notification await cluster._handle_query_next_image( @@ -161,7 +139,7 @@ async def test_firmware_update_notification_from_zigpy( ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.device.manufacturer_code, + zha_device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -173,28 +151,27 @@ async def test_firmware_update_notification_from_zigpy( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert attrs[ATTR_IN_PROGRESS] is False - assert attrs[ATTR_UPDATE_PERCENTAGE] is None + assert not attrs[ATTR_IN_PROGRESS] assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) async def test_firmware_update_notification_from_service_call( - hass: HomeAssistant, - setup_zha, - zigpy_device_mock, + hass: HomeAssistant, zha_device_joined_restored, zigpy_device ) -> None: """Test ZHA update platform - firmware update manual check.""" - await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - hass, - zigpy_device_mock, + zha_device_joined_restored, zigpy_device ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_UNKNOWN + + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + assert hass.states.get(entity_id).state == STATE_OFF async def _async_image_notify_side_effect(*args, **kwargs): await cluster._handle_query_next_image( @@ -203,7 +180,7 @@ async def test_firmware_update_notification_from_service_call( ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.device.manufacturer_code, + zha_device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -233,8 +210,7 @@ async def test_firmware_update_notification_from_service_call( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert attrs[ATTR_IN_PROGRESS] is False - assert attrs[ATTR_UPDATE_PERCENTAGE] is None + assert not attrs[ATTR_IN_PROGRESS] assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" @@ -269,14 +245,11 @@ def make_packet(zigpy_device, cluster, cmd_name: str, **kwargs): @patch("zigpy.device.AFTER_OTA_ATTR_READ_DELAY", 0.01) async def test_firmware_update_success( - hass: HomeAssistant, - setup_zha, - zigpy_device_mock, + hass: HomeAssistant, zha_device_joined_restored, zigpy_device ) -> None: """Test ZHA update platform - firmware update success.""" - await setup_zha() - zha_device, ota_cluster, fw_image, installed_fw_version = await setup_test_data( - hass, zigpy_device_mock + zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( + zha_device_joined_restored, zigpy_device ) assert installed_fw_version < fw_image.firmware.header.file_version @@ -284,16 +257,19 @@ async def test_firmware_update_success( entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_UNKNOWN + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + assert hass.states.get(entity_id).state == STATE_OFF # simulate an image available notification - await ota_cluster._handle_query_next_image( + await cluster._handle_query_next_image( foundation.ZCLHeader.cluster( tsn=0x12, command_id=general.Ota.ServerCommandDefs.query_next_image.id ), general.QueryNextImageCommand( field_control=fw_image.firmware.header.field_control, - manufacturer_code=zha_device.device.manufacturer_code, + manufacturer_code=zha_device.manufacturer_code, image_type=fw_image.firmware.header.image_type, current_file_version=installed_fw_version, ), @@ -304,20 +280,19 @@ async def test_firmware_update_success( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert attrs[ATTR_IN_PROGRESS] is False - assert attrs[ATTR_UPDATE_PERCENTAGE] is None + assert not attrs[ATTR_IN_PROGRESS] assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) - async def endpoint_reply(cluster, sequence, data, **kwargs): - if cluster == general.Ota.cluster_id: - hdr, cmd = ota_cluster.deserialize(data) + async def endpoint_reply(cluster_id, tsn, data, command_id): + if cluster_id == general.Ota.cluster_id: + hdr, cmd = cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): - zha_device.device.device.packet_received( + zigpy_device.packet_received( make_packet( - zha_device.device.device, - ota_cluster, + zigpy_device, + cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -334,10 +309,10 @@ async def test_firmware_update_success( assert cmd.image_type == fw_image.firmware.header.image_type assert cmd.file_version == fw_image.firmware.header.file_version assert cmd.image_size == fw_image.firmware.header.image_size - zha_device.device.device.packet_received( + zigpy_device.packet_received( make_packet( - zha_device.device.device, - ota_cluster, + zigpy_device, + cluster, general.Ota.ServerCommandDefs.image_block.name, field_control=general.Ota.ImageBlockCommand.FieldControl.RequestNodeAddr, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -345,7 +320,7 @@ async def test_firmware_update_success( file_version=fw_image.firmware.header.file_version, file_offset=0, maximum_data_size=40, - request_node_addr=zha_device.device.device.ieee, + request_node_addr=zigpy_device.ieee, ) ) elif isinstance( @@ -361,10 +336,10 @@ async def test_firmware_update_success( assert cmd.file_version == fw_image.firmware.header.file_version assert cmd.file_offset == 0 assert cmd.image_data == fw_image.firmware.serialize()[0:40] - zha_device.device.device.packet_received( + zigpy_device.packet_received( make_packet( - zha_device.device.device, - ota_cluster, + zigpy_device, + cluster, general.Ota.ServerCommandDefs.image_block.name, field_control=general.Ota.ImageBlockCommand.FieldControl.RequestNodeAddr, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -372,7 +347,7 @@ async def test_firmware_update_success( file_version=fw_image.firmware.header.file_version, file_offset=40, maximum_data_size=40, - request_node_addr=zha_device.device.device.ieee, + request_node_addr=zigpy_device.ieee, ) ) elif cmd.file_offset == 40: @@ -393,17 +368,16 @@ async def test_firmware_update_success( assert ( attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" ) - assert attrs[ATTR_IN_PROGRESS] is True - assert attrs[ATTR_UPDATE_PERCENTAGE] == 58 + assert attrs[ATTR_IN_PROGRESS] == 58 assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) - zha_device.device.device.packet_received( + zigpy_device.packet_received( make_packet( - zha_device.device.device, - ota_cluster, + zigpy_device, + cluster, general.Ota.ServerCommandDefs.upgrade_end.name, status=foundation.Status.SUCCESS, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -422,7 +396,7 @@ async def test_firmware_update_success( assert cmd.upgrade_time == 0 def read_new_fw_version(*args, **kwargs): - ota_cluster.update_attribute( + cluster.update_attribute( attrid=general.Ota.AttributeDefs.current_file_version.id, value=fw_image.firmware.header.file_version, ) @@ -432,9 +406,9 @@ async def test_firmware_update_success( ) }, {} - ota_cluster.read_attributes.side_effect = read_new_fw_version + cluster.read_attributes.side_effect = read_new_fw_version - ota_cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) + cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, @@ -451,44 +425,42 @@ async def test_firmware_update_success( attrs[ATTR_INSTALLED_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) - assert attrs[ATTR_IN_PROGRESS] is False - assert attrs[ATTR_UPDATE_PERCENTAGE] is None + assert not attrs[ATTR_IN_PROGRESS] assert attrs[ATTR_LATEST_VERSION] == attrs[ATTR_INSTALLED_VERSION] # If we send a progress notification incorrectly, it won't be handled entity = hass.data[UPDATE_DOMAIN].get_entity(entity_id) - entity.entity_data.entity._update_progress(50, 100, 0.50) + entity._update_progress(50, 100, 0.50) state = hass.states.get(entity_id) - assert attrs[ATTR_IN_PROGRESS] is False - assert attrs[ATTR_UPDATE_PERCENTAGE] is None + assert not attrs[ATTR_IN_PROGRESS] assert state.state == STATE_OFF async def test_firmware_update_raises( - hass: HomeAssistant, - setup_zha, - zigpy_device_mock, + hass: HomeAssistant, zha_device_joined_restored, zigpy_device ) -> None: """Test ZHA update platform - firmware update raises.""" - await setup_zha() - zha_device, ota_cluster, fw_image, installed_fw_version = await setup_test_data( - hass, zigpy_device_mock + zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( + zha_device_joined_restored, zigpy_device ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_UNKNOWN + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + assert hass.states.get(entity_id).state == STATE_OFF # simulate an image available notification - await ota_cluster._handle_query_next_image( + await cluster._handle_query_next_image( foundation.ZCLHeader.cluster( tsn=0x12, command_id=general.Ota.ServerCommandDefs.query_next_image.id ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.device.manufacturer_code, + zha_device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -500,20 +472,19 @@ async def test_firmware_update_raises( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert attrs[ATTR_IN_PROGRESS] is False - assert attrs[ATTR_UPDATE_PERCENTAGE] is None + assert not attrs[ATTR_IN_PROGRESS] assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) - async def endpoint_reply(cluster, sequence, data, **kwargs): - if cluster == general.Ota.cluster_id: - hdr, cmd = ota_cluster.deserialize(data) + async def endpoint_reply(cluster_id, tsn, data, command_id): + if cluster_id == general.Ota.cluster_id: + hdr, cmd = cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): - zha_device.device.device.packet_received( + zigpy_device.packet_received( make_packet( - zha_device.device.device, - ota_cluster, + zigpy_device, + cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -532,7 +503,7 @@ async def test_firmware_update_raises( assert cmd.image_size == fw_image.firmware.header.image_size raise DeliveryError("failed to deliver") - ota_cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) + cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) with pytest.raises(HomeAssistantError): await hass.services.async_call( UPDATE_DOMAIN, @@ -560,56 +531,81 @@ async def test_firmware_update_raises( ) -async def test_update_release_notes( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - setup_zha, - zigpy_device_mock, +async def test_firmware_update_no_longer_compatible( + hass: HomeAssistant, zha_device_joined_restored, zigpy_device ) -> None: - """Test ZHA update platform release notes.""" - await setup_zha() - - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", + """Test ZHA update platform - firmware update is no longer valid.""" + zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( + zha_device_joined_restored, zigpy_device ) - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) - zha_lib_entity = next( - e - for e in zha_device.device.platform_entities.values() - if isinstance(e, ZhaFirmwareUpdateEntity) - ) - zha_lib_entity._attr_release_notes = "Some lengthy release notes" - zha_lib_entity.maybe_emit_state_changed_event() - await hass.async_block_till_done() - entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - ws_client = await hass_ws_client(hass) - await ws_client.send_json( - { - "id": 1, - "type": "update/release_notes", - "entity_id": entity_id, - } + # allow traffic to flow through the gateway and device + await async_enable_traffic(hass, [zha_device]) + + assert hass.states.get(entity_id).state == STATE_OFF + + # simulate an image available notification + await cluster._handle_query_next_image( + foundation.ZCLHeader.cluster( + tsn=0x12, command_id=general.Ota.ServerCommandDefs.query_next_image.id + ), + general.QueryNextImageCommand( + fw_image.firmware.header.field_control, + zha_device.manufacturer_code, + fw_image.firmware.header.image_type, + installed_fw_version, + fw_image.firmware.header.header_version, + ), ) - result = await ws_client.receive_json() - assert result["success"] is True - assert result["result"] == "Some lengthy release notes" + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + attrs = state.attributes + assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" + assert not attrs[ATTR_IN_PROGRESS] + assert ( + attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" + ) + + new_version = 0x99999999 + + async def endpoint_reply(cluster_id, tsn, data, command_id): + if cluster_id == general.Ota.cluster_id: + hdr, cmd = cluster.deserialize(data) + if isinstance(cmd, general.Ota.ImageNotifyCommand): + zigpy_device.packet_received( + make_packet( + zigpy_device, + cluster, + general.Ota.ServerCommandDefs.query_next_image.name, + field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, + manufacturer_code=fw_image.firmware.header.manufacturer_id, + image_type=fw_image.firmware.header.image_type, + # The device reports that it is no longer compatible! + current_file_version=new_version, + hardware_version=1, + ) + ) + + cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + + # We updated the currently installed firmware version, as it is no longer valid + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + attrs = state.attributes + assert attrs[ATTR_INSTALLED_VERSION] == f"0x{new_version:08x}" + assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_LATEST_VERSION] == f"0x{new_version:08x}" diff --git a/tests/components/zha/test_websocket_api.py b/tests/components/zha/test_websocket_api.py index f6afee9eb83..80b9f6accd0 100644 --- a/tests/components/zha/test_websocket_api.py +++ b/tests/components/zha/test_websocket_api.py @@ -10,27 +10,12 @@ from unittest.mock import ANY, AsyncMock, MagicMock, call, patch from freezegun import freeze_time import pytest import voluptuous as vol -from zha.application.const import ( - ATTR_CLUSTER_ID, - ATTR_CLUSTER_TYPE, - ATTR_ENDPOINT_ID, - ATTR_ENDPOINT_NAMES, - ATTR_IEEE, - ATTR_MANUFACTURER, - ATTR_NEIGHBORS, - ATTR_QUIRK_APPLIED, - ATTR_TYPE, - CLUSTER_TYPE_IN, -) -from zha.zigbee.cluster_handlers import ClusterBindEvent, ClusterConfigureReportingEvent -from zha.zigbee.device import ClusterHandlerConfigurationComplete import zigpy.backups -from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE import zigpy.profiles.zha import zigpy.types from zigpy.types.named import EUI64 import zigpy.util -from zigpy.zcl.clusters import closures, general, security +from zigpy.zcl.clusters import general, security from zigpy.zcl.clusters.general import Groups import zigpy.zdo.types as zdo_types @@ -40,12 +25,23 @@ from homeassistant.components.websocket_api import ( TYPE_RESULT, ) from homeassistant.components.zha import DOMAIN -from homeassistant.components.zha.const import EZSP_OVERWRITE_EUI64 -from homeassistant.components.zha.helpers import ( - ZHADeviceProxy, - ZHAGatewayProxy, - get_zha_gateway, - get_zha_gateway_proxy, +from homeassistant.components.zha.core.const import ( + ATTR_CLUSTER_ID, + ATTR_CLUSTER_TYPE, + ATTR_ENDPOINT_ID, + ATTR_ENDPOINT_NAMES, + ATTR_IEEE, + ATTR_MANUFACTURER, + ATTR_MODEL, + ATTR_NEIGHBORS, + ATTR_QUIRK_APPLIED, + ATTR_TYPE, + BINDINGS, + CLUSTER_TYPE_IN, + EZSP_OVERWRITE_EUI64, + GROUP_ID, + GROUP_IDS, + GROUP_NAME, ) from homeassistant.components.zha.websocket_api import ( ATTR_DURATION, @@ -53,19 +49,22 @@ from homeassistant.components.zha.websocket_api import ( ATTR_QR_CODE, ATTR_SOURCE_IEEE, ATTR_TARGET_IEEE, - BINDINGS, - GROUP_ID, - GROUP_IDS, - GROUP_NAME, ID, SERVICE_PERMIT, TYPE, async_load_api, ) -from homeassistant.const import ATTR_MODEL, ATTR_NAME, Platform +from homeassistant.const import ATTR_NAME, Platform from homeassistant.core import Context, HomeAssistant -from .conftest import FIXTURE_GRP_ID, FIXTURE_GRP_NAME +from .conftest import ( + FIXTURE_GRP_ID, + FIXTURE_GRP_NAME, + SIG_EP_INPUT, + SIG_EP_OUTPUT, + SIG_EP_PROFILE, + SIG_EP_TYPE, +) from .data import BASE_CUSTOM_CONFIGURATION, CONFIG_WITH_ALARM_OPTIONS from tests.common import MockConfigEntry, MockUser @@ -94,18 +93,10 @@ def required_platform_only(): @pytest.fixture -async def zha_client( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - setup_zha, - zigpy_device_mock, -) -> MockHAClientWebSocket: - """Get ZHA WebSocket client.""" +async def device_switch(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA switch platform.""" - await setup_zha() - gateway = get_zha_gateway(hass) - - zigpy_device_switch = zigpy_device_mock( + zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [general.OnOff.cluster_id, general.Basic.cluster_id], @@ -116,8 +107,35 @@ async def zha_client( }, ieee=IEEE_SWITCH_DEVICE, ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device - zigpy_device_groupable = zigpy_device_mock( + +@pytest.fixture +async def device_ias_ace(hass, zigpy_device_mock, zha_device_joined): + """Test alarm control panel device.""" + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + } + }, + ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device + + +@pytest.fixture +async def device_groupable(hass, zigpy_device_mock, zha_device_joined): + """Test ZHA light platform.""" + + zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [ @@ -132,14 +150,19 @@ async def zha_client( }, ieee=IEEE_GROUPABLE_DEVICE, ) + zha_device = await zha_device_joined(zigpy_device) + zha_device.available = True + return zha_device - gateway.get_or_create_device(zigpy_device_switch) - await gateway.async_device_initialized(zigpy_device_switch) - await hass.async_block_till_done(wait_background_tasks=True) - gateway.get_or_create_device(zigpy_device_groupable) - await gateway.async_device_initialized(zigpy_device_groupable) - await hass.async_block_till_done(wait_background_tasks=True) +@pytest.fixture +async def zha_client( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + device_switch, + device_groupable, +) -> MockHAClientWebSocket: + """Get ZHA WebSocket client.""" # load the ZHA API async_load_api(hass) @@ -224,7 +247,7 @@ async def test_list_devices(zha_client) -> None: msg = await zha_client.receive_json() devices = msg["result"] - assert len(devices) == 3 # the coordinator is included as well + assert len(devices) == 2 + 1 # the coordinator is included as well msg_id = 100 for device in devices: @@ -261,31 +284,9 @@ async def test_get_zha_config(zha_client) -> None: async def test_get_zha_config_with_alarm( - hass: HomeAssistant, zha_client, zigpy_device_mock + hass: HomeAssistant, zha_client, device_ias_ace ) -> None: """Test getting ZHA custom configuration.""" - - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - - zigpy_device_ias = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [security.IasAce.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.IAS_ANCILLARY_CONTROL, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } - }, - ) - - gateway.get_or_create_device(zigpy_device_ias) - await gateway.async_device_initialized(zigpy_device_ias) - await hass.async_block_till_done(wait_background_tasks=True) - zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy( - zigpy_device_ias.ieee - ) - await zha_client.send_json({ID: 5, TYPE: "zha/configuration"}) msg = await zha_client.receive_json() @@ -294,7 +295,7 @@ async def test_get_zha_config_with_alarm( assert configuration == CONFIG_WITH_ALARM_OPTIONS # test that the alarm options are not in the config when we remove the device - zha_device_proxy.gateway_proxy.gateway.device_removed(zha_device_proxy.device) + device_ias_ace.gateway.device_removed(device_ias_ace.device) await hass.async_block_till_done() await zha_client.send_json({ID: 6, TYPE: "zha/configuration"}) @@ -389,12 +390,11 @@ async def test_get_group_not_found(zha_client) -> None: async def test_list_groupable_devices( - hass: HomeAssistant, zha_client, zigpy_app_controller + zha_client, device_groupable, zigpy_app_controller ) -> None: """Test getting ZHA devices that have a group cluster.""" # Ensure the coordinator doesn't have a group cluster coordinator = zigpy_app_controller.get_device(nwk=0x0000) - del coordinator.endpoints[1].in_clusters[Groups.cluster_id] await zha_client.send_json({ID: 10, TYPE: "zha/devices/groupable"}) @@ -425,10 +425,7 @@ async def test_list_groupable_devices( # Make sure there are no groupable devices when the device is unavailable # Make device unavailable - get_zha_gateway_proxy(hass).device_proxies[ - EUI64.convert(IEEE_GROUPABLE_DEVICE) - ].device.available = False - await hass.async_block_till_done(wait_background_tasks=True) + device_groupable.available = False await zha_client.send_json({ID: 11, TYPE: "zha/devices/groupable"}) @@ -440,16 +437,9 @@ async def test_list_groupable_devices( assert len(device_endpoints) == 0 -async def test_add_group(hass: HomeAssistant, zha_client) -> None: +async def test_add_group(zha_client) -> None: """Test adding and getting a new ZHA zigbee group.""" - await zha_client.send_json( - { - ID: 12, - TYPE: "zha/group/add", - GROUP_NAME: "new_group", - "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], - } - ) + await zha_client.send_json({ID: 12, TYPE: "zha/group/add", GROUP_NAME: "new_group"}) msg = await zha_client.receive_json() assert msg["id"] == 12 @@ -457,17 +447,8 @@ async def test_add_group(hass: HomeAssistant, zha_client) -> None: added_group = msg["result"] - groupable_device = get_zha_gateway_proxy(hass).device_proxies[ - EUI64.convert(IEEE_GROUPABLE_DEVICE) - ] - assert added_group["name"] == "new_group" - assert len(added_group["members"]) == 1 - assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE - assert ( - added_group["members"][0]["device"]["device_reg_id"] - == groupable_device.device_id - ) + assert added_group["members"] == [] await zha_client.send_json({ID: 13, TYPE: "zha/groups"}) @@ -515,82 +496,6 @@ async def test_remove_group(zha_client) -> None: assert len(groups) == 0 -async def test_add_group_member(hass: HomeAssistant, zha_client) -> None: - """Test adding a ZHA zigbee group member.""" - await zha_client.send_json( - { - ID: 12, - TYPE: "zha/group/add", - GROUP_NAME: "new_group", - } - ) - - msg = await zha_client.receive_json() - assert msg["id"] == 12 - assert msg["type"] == TYPE_RESULT - - added_group = msg["result"] - - assert len(added_group["members"]) == 0 - - await zha_client.send_json( - { - ID: 13, - TYPE: "zha/group/members/add", - GROUP_ID: added_group["group_id"], - "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], - } - ) - - msg = await zha_client.receive_json() - assert msg["id"] == 13 - assert msg["type"] == TYPE_RESULT - - added_group = msg["result"] - - assert len(added_group["members"]) == 1 - assert added_group["name"] == "new_group" - assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE - - -async def test_remove_group_member(hass: HomeAssistant, zha_client) -> None: - """Test removing a ZHA zigbee group member.""" - await zha_client.send_json( - { - ID: 12, - TYPE: "zha/group/add", - GROUP_NAME: "new_group", - "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], - } - ) - - msg = await zha_client.receive_json() - assert msg["id"] == 12 - assert msg["type"] == TYPE_RESULT - - added_group = msg["result"] - - assert added_group["name"] == "new_group" - assert len(added_group["members"]) == 1 - assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE - - await zha_client.send_json( - { - ID: 13, - TYPE: "zha/group/members/remove", - GROUP_ID: added_group["group_id"], - "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], - } - ) - - msg = await zha_client.receive_json() - assert msg["id"] == 13 - assert msg["type"] == TYPE_RESULT - - added_group = msg["result"] - assert len(added_group["members"]) == 0 - - @pytest.fixture async def app_controller( hass: HomeAssistant, setup_zha, zigpy_app_controller: ControllerApplication @@ -1132,101 +1037,3 @@ async def test_websocket_bind_unbind_group( assert bind_mock.mock_calls == [call(test_group_id, ANY)] elif command_type == "unbind": assert unbind_mock.mock_calls == [call(test_group_id, ANY)] - - -async def test_websocket_reconfigure( - hass: HomeAssistant, zha_client: MockHAClientWebSocket, zigpy_device_mock -) -> None: - """Test websocket API to reconfigure a device.""" - gateway = get_zha_gateway(hass) - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [closures.WindowCovering.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SHADE, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } - }, - ) - - zha_device = gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device_proxy = get_zha_gateway_proxy(hass).get_device_proxy(zha_device.ieee) - - def mock_reconfigure() -> None: - zha_device_proxy.handle_zha_channel_configure_reporting( - ClusterConfigureReportingEvent( - cluster_name="Window Covering", - cluster_id=258, - attributes={ - "current_position_lift_percentage": { - "min": 0, - "max": 900, - "id": "current_position_lift_percentage", - "name": "current_position_lift_percentage", - "change": 1, - "status": "SUCCESS", - }, - "current_position_tilt_percentage": { - "min": 0, - "max": 900, - "id": "current_position_tilt_percentage", - "name": "current_position_tilt_percentage", - "change": 1, - "status": "SUCCESS", - }, - }, - cluster_handler_unique_id="28:2c:02:bf:ff:ea:05:68:1:0x0102", - event_type="zha_channel_message", - event="zha_channel_configure_reporting", - ) - ) - - zha_device_proxy.handle_zha_channel_bind( - ClusterBindEvent( - cluster_name="Window Covering", - cluster_id=1, - success=True, - cluster_handler_unique_id="28:2c:02:bf:ff:ea:05:68:1:0x0012", - event_type="zha_channel_message", - event="zha_channel_bind", - ) - ) - - zha_device_proxy.handle_zha_channel_cfg_done( - ClusterHandlerConfigurationComplete( - device_ieee="28:2c:02:bf:ff:ea:05:68", - unique_id="28:2c:02:bf:ff:ea:05:68", - event_type="zha_channel_message", - event="zha_channel_cfg_done", - ) - ) - - with patch.object( - zha_device_proxy.device, "async_configure", side_effect=mock_reconfigure - ): - await zha_client.send_json( - { - ID: 6, - TYPE: "zha/devices/reconfigure", - ATTR_IEEE: str(zha_device_proxy.device.ieee), - } - ) - - messages = [] - - while len(messages) != 3: - msg = await zha_client.receive_json() - - if msg[ID] == 6: - messages.append(msg) - - # Ensure the frontend receives progress events - assert {m["event"]["type"] for m in messages} == { - "zha_channel_configure_reporting", - "zha_channel_bind", - "zha_channel_cfg_done", - } diff --git a/tests/components/zha/zha_devices_list.py b/tests/components/zha/zha_devices_list.py new file mode 100644 index 00000000000..4c23244c5e0 --- /dev/null +++ b/tests/components/zha/zha_devices_list.py @@ -0,0 +1,5922 @@ +"""Example Zigbee Devices.""" + +from zigpy.const import ( + SIG_ENDPOINTS, + SIG_EP_INPUT, + SIG_EP_OUTPUT, + SIG_EP_PROFILE, + SIG_EP_TYPE, + SIG_MANUFACTURER, + SIG_MODEL, + SIG_NODE_DESC, +) +from zigpy.profiles import zha, zll +from zigpy.types import Bool, uint8_t +from zigpy.zcl.clusters.closures import DoorLock +from zigpy.zcl.clusters.general import ( + Basic, + Groups, + Identify, + LevelControl, + MultistateInput, + OnOff, + Ota, + PowerConfiguration, + Scenes, +) +from zigpy.zcl.clusters.lighting import Color +from zigpy.zcl.clusters.measurement import ( + IlluminanceMeasurement, + OccupancySensing, + TemperatureMeasurement, +) + +DEV_SIG_CLUSTER_HANDLERS = "cluster_handlers" +DEV_SIG_DEV_NO = "device_no" +DEV_SIG_ENT_MAP = "entity_map" +DEV_SIG_ENT_MAP_CLASS = "entity_class" +DEV_SIG_ENT_MAP_ID = "entity_id" +DEV_SIG_EP_ID = "endpoint_id" +DEV_SIG_EVT_CLUSTER_HANDLERS = "event_cluster_handlers" +DEV_SIG_ZHA_QUIRK = "zha_quirk" +DEV_SIG_ATTRIBUTES = "attributes" + + +PROFILE_ID = SIG_EP_PROFILE +DEVICE_TYPE = SIG_EP_TYPE +INPUT_CLUSTERS = SIG_EP_INPUT +OUTPUT_CLUSTERS = SIG_EP_OUTPUT + +DEVICES = [ + { + DEV_SIG_DEV_NO: 0, + SIG_MANUFACTURER: "ADUROLIGHT", + SIG_MODEL: "Adurolight_NCC", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2080, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4096, 64716], + SIG_EP_OUTPUT: [3, 4, 6, 8, 4096, 64716], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.adurolight_adurolight_ncc_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.adurolight_adurolight_ncc_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.adurolight_adurolight_ncc_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 1, + SIG_MANUFACTURER: "Bosch", + SIG_MODEL: "ISW-ZPR1-WP13", + SIG_NODE_DESC: b"\x02@\x08\x00\x00l\x00\x00\x00\x00\x00\x00\x00", + SIG_ENDPOINTS: { + 5: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 5, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["5:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-5-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.bosch_isw_zpr1_wp13_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-5-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.bosch_isw_zpr1_wp13_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-5-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-5-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-5-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-5-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-5-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.bosch_isw_zpr1_wp13_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 2, + SIG_MANUFACTURER: "CentraLite", + SIG_MODEL: "3130", + SIG_NODE_DESC: b"\x02@\x80N\x10RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 2821], + SIG_EP_OUTPUT: [3, 6, 8, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.centralite_3130_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3130_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3130_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3130_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.centralite_3130_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 3, + SIG_MANUFACTURER: "CentraLite", + SIG_MODEL: "3210-L", + SIG_NODE_DESC: b"\x01@\x8eN\x10RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 81, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 1794, 2820, 2821, 64515], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("switch", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.centralite_3210_l_switch", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.centralite_3210_l_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_apparent_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_current", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_voltage", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_ac_frequency", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_power_factor", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_instantaneous_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_summation_delivered", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.centralite_3210_l_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 4, + SIG_MANUFACTURER: "CentraLite", + SIG_MODEL: "3310-S", + SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 770, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 2821, 64581], + SIG_EP_OUTPUT: [3, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.centralite_3310_s_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_lqi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-64581"): { + DEV_SIG_CLUSTER_HANDLERS: ["humidity"], + DEV_SIG_ENT_MAP_CLASS: "Humidity", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_humidity", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.centralite_3310_s_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 5, + SIG_MANUFACTURER: "CentraLite", + SIG_MODEL: "3315-S", + SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 12, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [0, 3, 2821, 64527], + SIG_EP_OUTPUT: [3], + SIG_EP_PROFILE: 49887, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_3315_s_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.centralite_3315_s_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.centralite_3315_s_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 6, + SIG_MANUFACTURER: "CentraLite", + SIG_MODEL: "3320-L", + SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 12, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [0, 3, 2821, 64527], + SIG_EP_OUTPUT: [3], + SIG_EP_PROFILE: 49887, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_3320_l_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.centralite_3320_l_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.centralite_3320_l_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 7, + SIG_MANUFACTURER: "CentraLite", + SIG_MODEL: "3326-L", + SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 263, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [0, 3, 2821, 64582], + SIG_EP_OUTPUT: [3], + SIG_EP_PROFILE: 49887, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_3326_l_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.centralite_3326_l_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.centralite_3326_l_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 8, + SIG_MANUFACTURER: "CentraLite", + SIG_MODEL: "Motion Sensor-A", + SIG_NODE_DESC: b"\x02@\x80N\x10RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 263, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [0, 3, 1030, 2821], + SIG_EP_OUTPUT: [3], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_motion_sensor_a_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.centralite_motion_sensor_a_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_lqi", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-2-1030"): { + DEV_SIG_CLUSTER_HANDLERS: ["occupancy"], + DEV_SIG_ENT_MAP_CLASS: "Occupancy", + DEV_SIG_ENT_MAP_ID: ( + "binary_sensor.centralite_motion_sensor_a_occupancy" + ), + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.centralite_motion_sensor_a_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 9, + SIG_MANUFACTURER: "ClimaxTechnology", + SIG_MODEL: "PSMP5_00.00.02.02TC", + SIG_NODE_DESC: b"\x01@\x8e\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 81, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 1794], + SIG_EP_OUTPUT: [0], + SIG_EP_PROFILE: 260, + }, + 4: { + SIG_EP_TYPE: 9, + DEV_SIG_EP_ID: 4, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["4:0x0019"], + DEV_SIG_ENT_MAP: { + ("switch", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: ( + "switch.climaxtechnology_psmp5_00_00_02_02tc_switch" + ), + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.climaxtechnology_psmp5_00_00_02_02tc_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: ( + "sensor.climaxtechnology_psmp5_00_00_02_02tc_instantaneous_demand" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: ( + "sensor.climaxtechnology_psmp5_00_00_02_02tc_summation_delivered" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_psmp5_00_00_02_02tc_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_psmp5_00_00_02_02tc_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-4-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.climaxtechnology_psmp5_00_00_02_02tc_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 10, + SIG_MANUFACTURER: "ClimaxTechnology", + SIG_MODEL: "SD8SC_00.00.03.12TC", + SIG_NODE_DESC: b"\x02@\x80\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 1280, 1282], + SIG_EP_OUTPUT: [0], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: ( + "binary_sensor.climaxtechnology_sd8sc_00_00_03_12tc_ias_zone" + ), + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.climaxtechnology_sd8sc_00_00_03_12tc_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_sd8sc_00_00_03_12tc_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_sd8sc_00_00_03_12tc_lqi", + }, + ("select", "00:11:22:33:44:55:66:77-1-1282-WarningMode"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], + DEV_SIG_ENT_MAP_CLASS: "ZHADefaultToneSelectEntity", + DEV_SIG_ENT_MAP_ID: ( + "select.climaxtechnology_sd8sc_00_00_03_12tc_default_siren_tone" + ), + }, + ("select", "00:11:22:33:44:55:66:77-1-1282-SirenLevel"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], + DEV_SIG_ENT_MAP_CLASS: "ZHADefaultSirenLevelSelectEntity", + DEV_SIG_ENT_MAP_ID: ( + "select.climaxtechnology_sd8sc_00_00_03_12tc_default_siren_level" + ), + }, + ("select", "00:11:22:33:44:55:66:77-1-1282-StrobeLevel"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], + DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeLevelSelectEntity", + DEV_SIG_ENT_MAP_ID: ( + "select.climaxtechnology_sd8sc_00_00_03_12tc_default_strobe_level" + ), + }, + ("select", "00:11:22:33:44:55:66:77-1-1282-Strobe"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], + DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeSelectEntity", + DEV_SIG_ENT_MAP_ID: ( + "select.climaxtechnology_sd8sc_00_00_03_12tc_default_strobe" + ), + }, + ("siren", "00:11:22:33:44:55:66:77-1-1282"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], + DEV_SIG_ENT_MAP_CLASS: "ZHASiren", + DEV_SIG_ENT_MAP_ID: "siren.climaxtechnology_sd8sc_00_00_03_12tc_siren", + }, + }, + }, + { + DEV_SIG_DEV_NO: 11, + SIG_MANUFACTURER: "ClimaxTechnology", + SIG_MODEL: "WS15_00.00.03.03TC", + SIG_NODE_DESC: b"\x02@\x80\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 1280], + SIG_EP_OUTPUT: [0], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: ( + "binary_sensor.climaxtechnology_ws15_00_00_03_03tc_ias_zone" + ), + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.climaxtechnology_ws15_00_00_03_03tc_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_ws15_00_00_03_03tc_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_ws15_00_00_03_03tc_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 12, + SIG_MANUFACTURER: "Feibit Inc co.", + SIG_MODEL: "FB56-ZCW08KU1.1", + SIG_NODE_DESC: b"\x01@\x8e\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", + SIG_ENDPOINTS: { + 11: { + SIG_EP_TYPE: 528, + DEV_SIG_EP_ID: 11, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49246, + }, + 13: { + SIG_EP_TYPE: 57694, + DEV_SIG_EP_ID: 13, + SIG_EP_INPUT: [4096], + SIG_EP_OUTPUT: [4096], + SIG_EP_PROFILE: 49246, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-11"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.feibit_inc_co_fb56_zcw08ku1_1_light", + }, + ("button", "00:11:22:33:44:55:66:77-11-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.feibit_inc_co_fb56_zcw08ku1_1_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-11-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.feibit_inc_co_fb56_zcw08ku1_1_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-11-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.feibit_inc_co_fb56_zcw08ku1_1_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 13, + SIG_MANUFACTURER: "HEIMAN", + SIG_MODEL: "SmokeSensor-EM", + SIG_NODE_DESC: b"\x02@\x80\x0b\x12RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 1280, 1282], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.heiman_smokesensor_em_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.heiman_smokesensor_em_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.heiman_smokesensor_em_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.heiman_smokesensor_em_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.heiman_smokesensor_em_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.heiman_smokesensor_em_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 14, + SIG_MANUFACTURER: "Heiman", + SIG_MODEL: "CO_V16", + SIG_NODE_DESC: b"\x02@\x84\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 9, 1280], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.heiman_co_v16_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.heiman_co_v16_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.heiman_co_v16_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.heiman_co_v16_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.heiman_co_v16_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 15, + SIG_MANUFACTURER: "Heiman", + SIG_MODEL: "WarningDevice", + SIG_NODE_DESC: b"\x01@\x8e\x0b\x12RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1027, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 4, 9, 1280, 1282], + SIG_EP_OUTPUT: [3, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("select", "00:11:22:33:44:55:66:77-1-1282-WarningMode"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], + DEV_SIG_ENT_MAP_CLASS: "ZHADefaultToneSelectEntity", + DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_siren_tone", + }, + ("select", "00:11:22:33:44:55:66:77-1-1282-SirenLevel"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], + DEV_SIG_ENT_MAP_CLASS: "ZHADefaultSirenLevelSelectEntity", + DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_siren_level", + }, + ("select", "00:11:22:33:44:55:66:77-1-1282-StrobeLevel"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], + DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeLevelSelectEntity", + DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_strobe_level", + }, + ("select", "00:11:22:33:44:55:66:77-1-1282-Strobe"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], + DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeSelectEntity", + DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_strobe", + }, + ("siren", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], + DEV_SIG_ENT_MAP_CLASS: "ZHASiren", + DEV_SIG_ENT_MAP_ID: "siren.heiman_warningdevice_siren", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.heiman_warningdevice_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.heiman_warningdevice_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.heiman_warningdevice_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.heiman_warningdevice_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.heiman_warningdevice_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 16, + SIG_MANUFACTURER: "HiveHome.com", + SIG_MODEL: "MOT003", + SIG_NODE_DESC: b"\x02@\x809\x10PP\x00\x00\x00P\x00\x00", + SIG_ENDPOINTS: { + 6: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 6, + SIG_EP_INPUT: [0, 1, 3, 32, 1024, 1026, 1280], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["6:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-6-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.hivehome_com_mot003_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-6-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.hivehome_com_mot003_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-6-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-6-1024"): { + DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], + DEV_SIG_ENT_MAP_CLASS: "Illuminance", + DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_illuminance", + }, + ("sensor", "00:11:22:33:44:55:66:77-6-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-6-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-6-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-6-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.hivehome_com_mot003_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 17, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI bulb E12 WS opal 600lm", + SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00,R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 268, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 4096, 64636], + SIG_EP_OUTPUT: [5, 25, 32, 4096], + SIG_EP_PROFILE: 260, + }, + 242: { + SIG_EP_TYPE: 97, + DEV_SIG_EP_ID: 242, + SIG_EP_INPUT: [33], + SIG_EP_OUTPUT: [33], + SIG_EP_PROFILE: 41440, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: ( + "light.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_light" + ), + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_rssi" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_lqi" + ), + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 18, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI bulb E26 CWS opal 600lm", + SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 512, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 4096], + SIG_EP_OUTPUT: [5, 25, 32, 4096], + SIG_EP_PROFILE: 49246, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: ( + "light.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_light" + ), + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_rssi" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_lqi" + ), + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 19, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI bulb E26 W opal 1000lm", + SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 2821, 4096], + SIG_EP_OUTPUT: [5, 25, 32, 4096], + SIG_EP_PROFILE: 49246, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: ( + "light.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_light" + ), + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_rssi" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_lqi" + ), + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 20, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI bulb E26 WS opal 980lm", + SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 544, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 4096], + SIG_EP_OUTPUT: [5, 25, 32, 4096], + SIG_EP_PROFILE: 49246, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: ( + "light.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_light" + ), + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_rssi" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_lqi" + ), + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 21, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI bulb E26 opal 1000lm", + SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 2821, 4096], + SIG_EP_OUTPUT: [5, 25, 32, 4096], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: ( + "light.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_light" + ), + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_rssi" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_lqi" + ), + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 22, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI control outlet", + SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00,R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 266, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 64636], + SIG_EP_OUTPUT: [5, 25, 32], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("switch", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: ( + "switch.ikea_of_sweden_tradfri_control_outlet_switch" + ), + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_control_outlet_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_control_outlet_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_control_outlet_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_control_outlet_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 23, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI motion sensor", + SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2128, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 9, 2821, 4096], + SIG_EP_OUTPUT: [3, 4, 6, 25, 4096], + SIG_EP_PROFILE: 49246, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_motion_sensor_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_motion_sensor_battery" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_motion_sensor_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_motion_sensor_lqi", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Motion", + DEV_SIG_ENT_MAP_ID: ( + "binary_sensor.ikea_of_sweden_tradfri_motion_sensor_motion" + ), + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_motion_sensor_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 24, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI on/off switch", + SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00,R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2080, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 9, 32, 4096, 64636], + SIG_EP_OUTPUT: [3, 4, 6, 8, 25, 258, 4096], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019", "1:0x0102"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_on_off_switch_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_on_off_switch_battery" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_on_off_switch_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_on_off_switch_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_on_off_switch_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 25, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI remote control", + SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2096, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 9, 2821, 4096], + SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 25, 4096], + SIG_EP_PROFILE: 49246, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_remote_control_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_remote_control_battery" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_remote_control_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_remote_control_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_remote_control_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 26, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI signal repeater", + SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00,R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 8, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 9, 2821, 4096, 64636], + SIG_EP_OUTPUT: [25, 32, 4096], + SIG_EP_PROFILE: 260, + }, + 242: { + SIG_EP_TYPE: 97, + DEV_SIG_EP_ID: 242, + SIG_EP_INPUT: [33], + SIG_EP_OUTPUT: [33], + SIG_EP_PROFILE: 41440, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_signal_repeater_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_signal_repeater_rssi" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_signal_repeater_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_signal_repeater_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 27, + SIG_MANUFACTURER: "IKEA of Sweden", + SIG_MODEL: "TRADFRI wireless dimmer", + SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2064, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 9, 2821, 4096], + SIG_EP_OUTPUT: [3, 4, 6, 8, 25, 4096], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.ikea_of_sweden_tradfri_wireless_dimmer_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_wireless_dimmer_battery" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.ikea_of_sweden_tradfri_wireless_dimmer_rssi" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_wireless_dimmer_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_wireless_dimmer_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 28, + SIG_MANUFACTURER: "Jasco Products", + SIG_MODEL: "45852", + SIG_NODE_DESC: b"\x01@\x8e$\x11R\xff\x00\x00\x00\xff\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 257, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], + SIG_EP_OUTPUT: [10, 25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 260, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [0, 3, 2821], + SIG_EP_OUTPUT: [3, 6, 8], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006", "2:0x0008"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], + DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", + DEV_SIG_ENT_MAP_ID: "light.jasco_products_45852_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.jasco_products_45852_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_instantaneous_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_summation_delivered", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.jasco_products_45852_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 29, + SIG_MANUFACTURER: "Jasco Products", + SIG_MODEL: "45856", + SIG_NODE_DESC: b"\x01@\x8e$\x11R\xff\x00\x00\x00\xff\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 1794, 2821], + SIG_EP_OUTPUT: [10, 25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 259, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [0, 3, 2821], + SIG_EP_OUTPUT: [3, 6], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", + DEV_SIG_ENT_MAP_ID: "light.jasco_products_45856_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.jasco_products_45856_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_instantaneous_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_summation_delivered", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.jasco_products_45856_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 30, + SIG_MANUFACTURER: "Jasco Products", + SIG_MODEL: "45857", + SIG_NODE_DESC: b"\x01@\x8e$\x11R\xff\x00\x00\x00\xff\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 257, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], + SIG_EP_OUTPUT: [10, 25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 260, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [0, 3, 2821], + SIG_EP_OUTPUT: [3, 6, 8], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006", "2:0x0008"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], + DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", + DEV_SIG_ENT_MAP_ID: "light.jasco_products_45857_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.jasco_products_45857_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_instantaneous_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_summation_delivered", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.jasco_products_45857_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 31, + SIG_MANUFACTURER: "Keen Home Inc", + SIG_MODEL: "SV02-610-MP-1.3", + SIG_NODE_DESC: b"\x02@\x80[\x11RR\x00\x00*R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 3, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 8, 32, 1026, 1027, 2821, 64513, 64514], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.keen_home_inc_sv02_610_mp_1_3_identify", + }, + ("cover", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off"], + DEV_SIG_ENT_MAP_CLASS: "KeenVent", + DEV_SIG_ENT_MAP_ID: "cover.keen_home_inc_sv02_610_mp_1_3_keen_vent", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { + DEV_SIG_CLUSTER_HANDLERS: ["pressure"], + DEV_SIG_ENT_MAP_CLASS: "Pressure", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_pressure", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.keen_home_inc_sv02_610_mp_1_3_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 32, + SIG_MANUFACTURER: "Keen Home Inc", + SIG_MODEL: "SV02-612-MP-1.2", + SIG_NODE_DESC: b"\x02@\x80[\x11RR\x00\x00*R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 3, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 8, 32, 1026, 1027, 2821, 64513, 64514], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.keen_home_inc_sv02_612_mp_1_2_identify", + }, + ("cover", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off"], + DEV_SIG_ENT_MAP_CLASS: "KeenVent", + DEV_SIG_ENT_MAP_ID: "cover.keen_home_inc_sv02_612_mp_1_2_keen_vent", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { + DEV_SIG_CLUSTER_HANDLERS: ["pressure"], + DEV_SIG_ENT_MAP_CLASS: "Pressure", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_pressure", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.keen_home_inc_sv02_612_mp_1_2_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 33, + SIG_MANUFACTURER: "Keen Home Inc", + SIG_MODEL: "SV02-612-MP-1.3", + SIG_NODE_DESC: b"\x02@\x80[\x11RR\x00\x00*R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 3, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 8, 32, 1026, 1027, 2821, 64513, 64514], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.keen_home_inc_sv02_612_mp_1_3_identify", + }, + ("cover", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off"], + DEV_SIG_ENT_MAP_CLASS: "KeenVent", + DEV_SIG_ENT_MAP_ID: "cover.keen_home_inc_sv02_612_mp_1_3_keen_vent", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { + DEV_SIG_CLUSTER_HANDLERS: ["pressure"], + DEV_SIG_ENT_MAP_CLASS: "Pressure", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_pressure", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.keen_home_inc_sv02_612_mp_1_3_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 34, + SIG_MANUFACTURER: "King Of Fans, Inc.", + SIG_MODEL: "HBUniversalCFRemote", + SIG_NODE_DESC: b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 257, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 514], + SIG_EP_OUTPUT: [3, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.king_of_fans_inc_hbuniversalcfremote_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: ( + "button.king_of_fans_inc_hbuniversalcfremote_identify" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.king_of_fans_inc_hbuniversalcfremote_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.king_of_fans_inc_hbuniversalcfremote_lqi", + }, + ("fan", "00:11:22:33:44:55:66:77-1-514"): { + DEV_SIG_CLUSTER_HANDLERS: ["fan"], + DEV_SIG_ENT_MAP_CLASS: "KofFan", + DEV_SIG_ENT_MAP_ID: "fan.king_of_fans_inc_hbuniversalcfremote_fan", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.king_of_fans_inc_hbuniversalcfremote_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 35, + SIG_MANUFACTURER: "LDS", + SIG_MODEL: "ZBT-CCTSwitch-D0001", + SIG_NODE_DESC: b"\x02@\x80h\x11RR\x00\x00,R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2048, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 4096, 64769], + SIG_EP_OUTPUT: [3, 4, 6, 8, 25, 768, 4096], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019", "1:0x0300"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lds_zbt_cctswitch_d0001_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lds_zbt_cctswitch_d0001_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lds_zbt_cctswitch_d0001_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lds_zbt_cctswitch_d0001_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lds_zbt_cctswitch_d0001_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 36, + SIG_MANUFACTURER: "LEDVANCE", + SIG_MODEL: "A19 RGBW", + SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 258, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.ledvance_a19_rgbw_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.ledvance_a19_rgbw_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ledvance_a19_rgbw_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ledvance_a19_rgbw_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ledvance_a19_rgbw_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 37, + SIG_MANUFACTURER: "LEDVANCE", + SIG_MODEL: "FLEX RGBW", + SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 258, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.ledvance_flex_rgbw_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.ledvance_flex_rgbw_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ledvance_flex_rgbw_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ledvance_flex_rgbw_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ledvance_flex_rgbw_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 38, + SIG_MANUFACTURER: "LEDVANCE", + SIG_MODEL: "PLUG", + SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 81, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 2821, 64513, 64520], + SIG_EP_OUTPUT: [3, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("switch", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.ledvance_plug_switch", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.ledvance_plug_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ledvance_plug_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ledvance_plug_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ledvance_plug_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 39, + SIG_MANUFACTURER: "LEDVANCE", + SIG_MODEL: "RT RGBW", + SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 258, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.ledvance_rt_rgbw_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.ledvance_rt_rgbw_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ledvance_rt_rgbw_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.ledvance_rt_rgbw_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.ledvance_rt_rgbw_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 40, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.plug.maus01", + SIG_NODE_DESC: b"\x01@\x8e_\x11\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 81, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 2, 3, 4, 5, 6, 10, 16, 2820], + SIG_EP_OUTPUT: [10, 25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 9, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [12], + SIG_EP_OUTPUT: [4, 12], + SIG_EP_PROFILE: 260, + }, + 3: { + SIG_EP_TYPE: 83, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [12], + SIG_EP_OUTPUT: [12], + SIG_EP_PROFILE: 260, + }, + 100: { + SIG_EP_TYPE: 263, + DEV_SIG_EP_ID: 100, + SIG_EP_INPUT: [15], + SIG_EP_OUTPUT: [4, 15], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("switch", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.lumi_lumi_plug_maus01_switch", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2"): { + DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], + DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_device_temperature", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_plug_maus01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_voltage", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_lqi", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-100-15"): { + DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], + DEV_SIG_ENT_MAP_CLASS: "BinaryInput", + DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_plug_maus01_binary_input", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_summation_delivered", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_plug_maus01_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 41, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.relay.c2acn01", + SIG_NODE_DESC: b"\x01@\x8e7\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 257, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 2, 3, 4, 5, 6, 10, 12, 16, 2820], + SIG_EP_OUTPUT: [10, 25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 257, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [4, 5, 6, 16], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_relay_c2acn01_light", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2"): { + DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], + DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_device_temperature", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_relay_c2acn01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_apparent_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_current", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_voltage", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_ac_frequency", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_power_factor", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_summation_delivered", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_instantaneous_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_lqi", + }, + ("light", "00:11:22:33:44:55:66:77-2"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_relay_c2acn01_light_2", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_relay_c2acn01_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 42, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.remote.b186acn01", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 24321, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 18, 25, 65535], + SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 24322, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [3, 18], + SIG_EP_OUTPUT: [3, 4, 5, 18], + SIG_EP_PROFILE: 260, + }, + 3: { + SIG_EP_TYPE: 24323, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [3, 18], + SIG_EP_OUTPUT: [3, 4, 5, 12, 18], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b186acn01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b186acn01_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b186acn01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b186acn01_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_remote_b186acn01_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 43, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.remote.b286acn01", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 24321, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 18, 25, 65535], + SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 24322, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [3, 18], + SIG_EP_OUTPUT: [3, 4, 5, 18], + SIG_EP_PROFILE: 260, + }, + 3: { + SIG_EP_TYPE: 24323, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [3, 18], + SIG_EP_OUTPUT: [3, 4, 5, 12, 18], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b286acn01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286acn01_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286acn01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286acn01_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_remote_b286acn01_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 44, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.remote.b286opcn01", + SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 261, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3], + SIG_EP_OUTPUT: [3, 6, 8, 768], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: -1, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: -1, + }, + 3: { + SIG_EP_TYPE: -1, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: -1, + }, + 4: { + SIG_EP_TYPE: -1, + DEV_SIG_EP_ID: 4, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: -1, + }, + 5: { + SIG_EP_TYPE: -1, + DEV_SIG_EP_ID: 5, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: -1, + }, + 6: { + SIG_EP_TYPE: -1, + DEV_SIG_EP_ID: 6, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: -1, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b286opcn01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286opcn01_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286opcn01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286opcn01_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 45, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.remote.b486opcn01", + SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 261, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3], + SIG_EP_OUTPUT: [3, 6, 8, 768], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 259, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [3], + SIG_EP_OUTPUT: [3, 6], + SIG_EP_PROFILE: 260, + }, + 3: { + SIG_EP_TYPE: -1, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: -1, + }, + 4: { + SIG_EP_TYPE: -1, + DEV_SIG_EP_ID: 4, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: -1, + }, + 5: { + SIG_EP_TYPE: -1, + DEV_SIG_EP_ID: 5, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: -1, + }, + 6: { + SIG_EP_TYPE: -1, + DEV_SIG_EP_ID: 6, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: -1, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b486opcn01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b486opcn01_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b486opcn01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b486opcn01_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 46, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.remote.b686opcn01", + SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 261, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3], + SIG_EP_OUTPUT: [3, 6, 8, 768], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b686opcn01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 47, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.remote.b686opcn01", + SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 261, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3], + SIG_EP_OUTPUT: [3, 6, 8, 768], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 259, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [3], + SIG_EP_OUTPUT: [3, 6], + SIG_EP_PROFILE: 260, + }, + 3: { + SIG_EP_TYPE: None, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: None, + }, + 4: { + SIG_EP_TYPE: None, + DEV_SIG_EP_ID: 4, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: None, + }, + 5: { + SIG_EP_TYPE: None, + DEV_SIG_EP_ID: 5, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: None, + }, + 6: { + SIG_EP_TYPE: None, + DEV_SIG_EP_ID: 6, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: None, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b686opcn01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 48, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.router", + SIG_NODE_DESC: b"\x01@\x8e_\x11P\xa0\x00\x00\x00\xa0\x00\x00", + SIG_ENDPOINTS: { + 8: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 8, + SIG_EP_INPUT: [0, 6], + SIG_EP_OUTPUT: [0, 6], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["8:0x0006"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-8"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_router_light", + }, + ("sensor", "00:11:22:33:44:55:66:77-8-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-8-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_lqi", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-8-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Opening", + DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_router_opening", + }, + }, + }, + { + DEV_SIG_DEV_NO: 49, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.router", + SIG_NODE_DESC: b"\x01@\x8e_\x11P\xa0\x00\x00\x00\xa0\x00\x00", + SIG_ENDPOINTS: { + 8: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 8, + SIG_EP_INPUT: [0, 6, 11, 17], + SIG_EP_OUTPUT: [0, 6], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["8:0x0006"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-8"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_router_light", + }, + ("sensor", "00:11:22:33:44:55:66:77-8-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-8-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_lqi", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-8-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Opening", + DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_router_opening", + }, + }, + }, + { + DEV_SIG_DEV_NO: 50, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.router", + SIG_NODE_DESC: b"\x01@\x8e_\x11P\xa0\x00\x00\x00\xa0\x00\x00", + SIG_ENDPOINTS: { + 8: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 8, + SIG_EP_INPUT: [0, 6, 17], + SIG_EP_OUTPUT: [0, 6], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["8:0x0006"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-8"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_router_light", + }, + ("sensor", "00:11:22:33:44:55:66:77-8-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-8-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_lqi", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-8-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Opening", + DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_router_opening", + }, + }, + }, + { + DEV_SIG_DEV_NO: 51, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sen_ill.mgl01", + SIG_NODE_DESC: b"\x02@\x84n\x12\x7fd\x00\x00,d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 262, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 1024], + SIG_EP_OUTPUT: [3], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_battery", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sen_ill_mgl01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1024"): { + DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], + DEV_SIG_ENT_MAP_CLASS: "Illuminance", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_illuminance", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 52, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_86sw1", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 24321, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 18, 25, 65535], + SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 24322, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [3, 18], + SIG_EP_OUTPUT: [3, 4, 5, 18], + SIG_EP_PROFILE: 260, + }, + 3: { + SIG_EP_TYPE: 24323, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [3, 18], + SIG_EP_OUTPUT: [3, 4, 5, 12, 18], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_86sw1_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_86sw1_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_86sw1_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_86sw1_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_86sw1_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 53, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_cube.aqgl01", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 28417, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 25], + SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 28418, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [3, 18], + SIG_EP_OUTPUT: [3, 4, 5, 18], + SIG_EP_PROFILE: 260, + }, + 3: { + SIG_EP_TYPE: 28419, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [3, 12], + SIG_EP_OUTPUT: [3, 4, 5, 12], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_cube_aqgl01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_cube_aqgl01_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_cube_aqgl01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_cube_aqgl01_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_cube_aqgl01_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 54, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_ht", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 24322, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 25, 1026, 1029, 65535], + SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 24322, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [3], + SIG_EP_OUTPUT: [3, 4, 5, 18], + SIG_EP_PROFILE: 260, + }, + 3: { + SIG_EP_TYPE: 24323, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [3], + SIG_EP_OUTPUT: [3, 4, 5, 12], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_ht_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_lqi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1029"): { + DEV_SIG_CLUSTER_HANDLERS: ["humidity"], + DEV_SIG_ENT_MAP_CLASS: "Humidity", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_humidity", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_ht_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 55, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_magnet", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2128, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 25, 65535], + SIG_EP_OUTPUT: [0, 3, 4, 5, 6, 8, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_magnet_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_lqi", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Opening", + DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_magnet_opening", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_magnet_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 56, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_magnet.aq2", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 24321, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 65535], + SIG_EP_OUTPUT: [0, 4, 6, 65535], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_magnet_aq2_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_aq2_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_aq2_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_aq2_lqi", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Opening", + DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_magnet_aq2_opening", + }, + }, + }, + { + DEV_SIG_DEV_NO: 57, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_motion.aq2", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 263, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 1024, 1030, 1280, 65535], + SIG_EP_OUTPUT: [0, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1030"): { + DEV_SIG_CLUSTER_HANDLERS: ["occupancy"], + DEV_SIG_ENT_MAP_CLASS: "Occupancy", + DEV_SIG_ENT_MAP_ID: ( + "binary_sensor.lumi_lumi_sensor_motion_aq2_occupancy" + ), + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_motion_aq2_motion", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_motion_aq2_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1024"): { + DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], + DEV_SIG_ENT_MAP_CLASS: "Illuminance", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_illuminance", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2"): { + DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], + DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", + DEV_SIG_ENT_MAP_ID: ( + "sensor.lumi_lumi_sensor_motion_aq2_device_temperature" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_motion_aq2_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 58, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_smoke", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 12, 18, 1280], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_smoke_smoke", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_smoke_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_smoke_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2"): { + DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], + DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", + DEV_SIG_ENT_MAP_ID: ( + "sensor.lumi_lumi_sensor_smoke_device_temperature" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_smoke_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_smoke_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_smoke_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 59, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_switch", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 6, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3], + SIG_EP_OUTPUT: [0, 4, 5, 6, 8, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_switch_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_switch_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 60, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_switch.aq2", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 6, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 65535], + SIG_EP_OUTPUT: [0, 4, 6, 65535], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006"], + DEV_SIG_ENT_MAP: { + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq2_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq2_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq2_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 61, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_switch.aq3", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 6, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 18], + SIG_EP_OUTPUT: [0, 6], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006"], + DEV_SIG_ENT_MAP: { + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq3_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq3_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq3_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 62, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.sensor_wleak.aq1", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 2, 3, 1280], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_wleak_aq1_ias_zone", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2"): { + DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], + DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", + DEV_SIG_ENT_MAP_ID: ( + "sensor.lumi_lumi_sensor_wleak_aq1_device_temperature" + ), + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_wleak_aq1_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_wleak_aq1_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_wleak_aq1_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_wleak_aq1_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_wleak_aq1_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 63, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.vibration.aq1", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + PROFILE_ID: zha.PROFILE_ID, + DEVICE_TYPE: zha.DeviceType.DOOR_LOCK, + INPUT_CLUSTERS: [ + Basic.cluster_id, + Identify.cluster_id, + Ota.cluster_id, + DoorLock.cluster_id, + ], + OUTPUT_CLUSTERS: [ + Basic.cluster_id, + Identify.cluster_id, + Groups.cluster_id, + Scenes.cluster_id, + Ota.cluster_id, + DoorLock.cluster_id, + ], + }, + 2: { + PROFILE_ID: zha.PROFILE_ID, + DEVICE_TYPE: 0x5F02, + INPUT_CLUSTERS: [Identify.cluster_id, MultistateInput.cluster_id], + OUTPUT_CLUSTERS: [ + Identify.cluster_id, + Groups.cluster_id, + Scenes.cluster_id, + MultistateInput.cluster_id, + ], + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_vibration_aq1_vibration", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_vibration_aq1_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_lqi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2"): { + DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], + DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_device_temperature", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_vibration_aq1_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 64, + SIG_MANUFACTURER: "LUMI", + SIG_MODEL: "lumi.weather", + SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 24321, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 1026, 1027, 1029, 65535], + SIG_EP_OUTPUT: [0, 4, 65535], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_weather_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { + DEV_SIG_CLUSTER_HANDLERS: ["pressure"], + DEV_SIG_ENT_MAP_CLASS: "Pressure", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_pressure", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_lqi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1029"): { + DEV_SIG_CLUSTER_HANDLERS: ["humidity"], + DEV_SIG_ENT_MAP_CLASS: "Humidity", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_humidity", + }, + }, + }, + { + DEV_SIG_DEV_NO: 65, + SIG_MANUFACTURER: "NYCE", + SIG_MODEL: "3010", + SIG_NODE_DESC: b"\x02@\x80\xb9\x10RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1280], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.nyce_3010_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.nyce_3010_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.nyce_3010_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.nyce_3010_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.nyce_3010_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 66, + SIG_MANUFACTURER: "NYCE", + SIG_MODEL: "3014", + SIG_NODE_DESC: b"\x02@\x80\xb9\x10RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1280], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.nyce_3014_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.nyce_3014_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.nyce_3014_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.nyce_3014_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.nyce_3014_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 67, + SIG_MANUFACTURER: None, + SIG_MODEL: None, + SIG_NODE_DESC: b"\x10@\x0f5\x11Y=\x00@\x00=\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 5, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [10, 25], + SIG_EP_OUTPUT: [1280], + SIG_EP_PROFILE: 260, + }, + 242: { + SIG_EP_TYPE: 100, + DEV_SIG_EP_ID: 242, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [33], + SIG_EP_PROFILE: 41440, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: {}, + }, + { + DEV_SIG_DEV_NO: 68, + SIG_MANUFACTURER: None, + SIG_MODEL: None, + SIG_NODE_DESC: b"\x00@\x8f\xcd\xabR\x80\x00\x00\x00\x80\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 48879, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [], + SIG_EP_OUTPUT: [1280], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: {}, + }, + { + DEV_SIG_DEV_NO: 69, + SIG_MANUFACTURER: "OSRAM", + SIG_MODEL: "LIGHTIFY A19 RGBW", + SIG_NODE_DESC: b"\x01@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", + SIG_ENDPOINTS: { + 3: { + SIG_EP_TYPE: 258, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 64527], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.osram_lightify_a19_rgbw_light", + }, + ("button", "00:11:22:33:44:55:66:77-3-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.osram_lightify_a19_rgbw_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_a19_rgbw_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_a19_rgbw_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.osram_lightify_a19_rgbw_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 70, + SIG_MANUFACTURER: "OSRAM", + SIG_MODEL: "LIGHTIFY Dimming Switch", + SIG_NODE_DESC: b"\x02@\x80\x0c\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 2821], + SIG_EP_OUTPUT: [3, 6, 8, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.osram_lightify_dimming_switch_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_dimming_switch_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_dimming_switch_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_dimming_switch_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.osram_lightify_dimming_switch_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 71, + SIG_MANUFACTURER: "OSRAM", + SIG_MODEL: "LIGHTIFY Flex RGBW", + SIG_NODE_DESC: b"\x19@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", + SIG_ENDPOINTS: { + 3: { + SIG_EP_TYPE: 258, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 64527], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.osram_lightify_flex_rgbw_light", + }, + ("button", "00:11:22:33:44:55:66:77-3-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.osram_lightify_flex_rgbw_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_flex_rgbw_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_flex_rgbw_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.osram_lightify_flex_rgbw_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 72, + SIG_MANUFACTURER: "OSRAM", + SIG_MODEL: "LIGHTIFY RT Tunable White", + SIG_NODE_DESC: b"\x01@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", + SIG_ENDPOINTS: { + 3: { + SIG_EP_TYPE: 258, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2820, 64527], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.osram_lightify_rt_tunable_white_light", + }, + ("button", "00:11:22:33:44:55:66:77-3-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.osram_lightify_rt_tunable_white_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-3-2820"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", + DEV_SIG_ENT_MAP_ID: ("sensor.osram_lightify_rt_tunable_white_power"), + }, + ("sensor", "00:11:22:33:44:55:66:77-3-2820-apparent_power"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", + DEV_SIG_ENT_MAP_ID: ( + "sensor.osram_lightify_rt_tunable_white_apparent_power" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-3-2820-rms_current"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", + DEV_SIG_ENT_MAP_ID: ("sensor.osram_lightify_rt_tunable_white_current"), + }, + ("sensor", "00:11:22:33:44:55:66:77-3-2820-rms_voltage"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", + DEV_SIG_ENT_MAP_ID: ("sensor.osram_lightify_rt_tunable_white_voltage"), + }, + ("sensor", "00:11:22:33:44:55:66:77-3-2820-ac_frequency"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", + DEV_SIG_ENT_MAP_ID: ( + "sensor.osram_lightify_rt_tunable_white_ac_frequency" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-3-2820-power_factor"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", + DEV_SIG_ENT_MAP_ID: ( + "sensor.osram_lightify_rt_tunable_white_power_factor" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_rt_tunable_white_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_rt_tunable_white_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.osram_lightify_rt_tunable_white_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 73, + SIG_MANUFACTURER: "OSRAM", + SIG_MODEL: "Plug 01", + SIG_NODE_DESC: b"\x01@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", + SIG_ENDPOINTS: { + 3: { + SIG_EP_TYPE: 16, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 2820, 4096, 64527], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 49246, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], + DEV_SIG_ENT_MAP: { + ("switch", "00:11:22:33:44:55:66:77-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.osram_plug_01_switch", + }, + ("button", "00:11:22:33:44:55:66:77-3-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.osram_plug_01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_plug_01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_plug_01_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.osram_plug_01_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 74, + SIG_MANUFACTURER: "OSRAM", + SIG_MODEL: "Switch 4x-LIGHTIFY", + SIG_NODE_DESC: b"\x02@\x80\x0c\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2064, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 32, 4096, 64768], + SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 25, 768, 4096], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 2064, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [0, 4096, 64768], + SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], + SIG_EP_PROFILE: 260, + }, + 3: { + SIG_EP_TYPE: 2064, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [0, 4096, 64768], + SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], + SIG_EP_PROFILE: 260, + }, + 4: { + SIG_EP_TYPE: 2064, + DEV_SIG_EP_ID: 4, + SIG_EP_INPUT: [0, 4096, 64768], + SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], + SIG_EP_PROFILE: 260, + }, + 5: { + SIG_EP_TYPE: 2064, + DEV_SIG_EP_ID: 5, + SIG_EP_INPUT: [0, 4096, 64768], + SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], + SIG_EP_PROFILE: 260, + }, + 6: { + SIG_EP_TYPE: 2064, + DEV_SIG_EP_ID: 6, + SIG_EP_INPUT: [0, 4096, 64768], + SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [ + "1:0x0005", + "1:0x0006", + "1:0x0008", + "1:0x0019", + "1:0x0300", + "2:0x0005", + "2:0x0006", + "2:0x0008", + "2:0x0300", + "3:0x0005", + "3:0x0006", + "3:0x0008", + "3:0x0300", + "4:0x0005", + "4:0x0006", + "4:0x0008", + "4:0x0300", + "5:0x0005", + "5:0x0006", + "5:0x0008", + "5:0x0300", + "6:0x0005", + "6:0x0006", + "6:0x0008", + "6:0x0300", + ], + DEV_SIG_ENT_MAP: { + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.osram_switch_4x_lightify_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_switch_4x_lightify_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.osram_switch_4x_lightify_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.osram_switch_4x_lightify_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 75, + SIG_MANUFACTURER: "Philips", + SIG_MODEL: "RWL020", + SIG_NODE_DESC: b"\x02@\x80\x0b\x10G-\x00\x00\x00-\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2096, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0], + SIG_EP_OUTPUT: [0, 3, 4, 5, 6, 8], + SIG_EP_PROFILE: 49246, + }, + 2: { + SIG_EP_TYPE: 12, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [0, 1, 3, 15, 64512], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "2:0x0019"], + DEV_SIG_ENT_MAP: { + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.philips_rwl020_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.philips_rwl020_lqi", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-2-15"): { + DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], + DEV_SIG_ENT_MAP_CLASS: "BinaryInput", + DEV_SIG_ENT_MAP_ID: "binary_sensor.philips_rwl020_binary_input", + }, + ("button", "00:11:22:33:44:55:66:77-2-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.philips_rwl020_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-2-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.philips_rwl020_battery", + }, + ("update", "00:11:22:33:44:55:66:77-2-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.philips_rwl020_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 76, + SIG_MANUFACTURER: "Samjin", + SIG_MODEL: "button", + SIG_NODE_DESC: b"\x02@\x80A\x12RR\x00\x00,R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], + SIG_EP_OUTPUT: [3, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_button_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.samjin_button_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.samjin_button_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 77, + SIG_MANUFACTURER: "Samjin", + SIG_MODEL: "multi", + SIG_NODE_DESC: b"\x02@\x80A\x12RR\x00\x00,R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 64514], + SIG_EP_OUTPUT: [3, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_multi_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.samjin_multi_identify", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-1-64514"): { + DEV_SIG_CLUSTER_HANDLERS: ["accelerometer"], + DEV_SIG_ENT_MAP_CLASS: "Accelerometer", + DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_multi_accelerometer", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.samjin_multi_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 78, + SIG_MANUFACTURER: "Samjin", + SIG_MODEL: "water", + SIG_NODE_DESC: b"\x02@\x80A\x12RR\x00\x00,R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280], + SIG_EP_OUTPUT: [3, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_water_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.samjin_water_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.samjin_water_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 79, + SIG_MANUFACTURER: "Securifi Ltd.", + SIG_MODEL: None, + SIG_NODE_DESC: b"\x01@\x8e\x02\x10RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 0, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 2820, 2821], + SIG_EP_OUTPUT: [0, 1, 3, 4, 5, 6, 25, 2820, 2821], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.securifi_ltd_unk_model_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", + DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", + DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_apparent_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", + DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_current", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", + DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_voltage", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", + DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_ac_frequency", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", + DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_power_factor", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_lqi", + }, + ("switch", "00:11:22:33:44:55:66:77-1-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.securifi_ltd_unk_model_switch", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.securifi_ltd_unk_model_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 80, + SIG_MANUFACTURER: "Sercomm Corp.", + SIG_MODEL: "SZ-DWS04N_SF", + SIG_NODE_DESC: b"\x02@\x801\x11R\xff\x00\x00\x00\xff\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], + SIG_EP_OUTPUT: [3, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.sercomm_corp_sz_dws04n_sf_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.sercomm_corp_sz_dws04n_sf_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.sercomm_corp_sz_dws04n_sf_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 81, + SIG_MANUFACTURER: "Sercomm Corp.", + SIG_MODEL: "SZ-ESW01", + SIG_NODE_DESC: b"\x01@\x8e1\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 1794, 2820, 2821], + SIG_EP_OUTPUT: [3, 10, 25, 2821], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 259, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [0, 1, 3], + SIG_EP_OUTPUT: [3, 6], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.sercomm_corp_sz_esw01_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.sercomm_corp_sz_esw01_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_apparent_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_current", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_voltage", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_ac_frequency", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_power_factor", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_instantaneous_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_summation_delivered", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.sercomm_corp_sz_esw01_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 82, + SIG_MANUFACTURER: "Sercomm Corp.", + SIG_MODEL: "SZ-PIR04", + SIG_NODE_DESC: b"\x02@\x801\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1024, 1026, 1280, 2821], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.sercomm_corp_sz_pir04_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.sercomm_corp_sz_pir04_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1024"): { + DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], + DEV_SIG_ENT_MAP_CLASS: "Illuminance", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_illuminance", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.sercomm_corp_sz_pir04_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 83, + SIG_MANUFACTURER: "Sinope Technologies", + SIG_MODEL: "RM3250ZB", + SIG_NODE_DESC: b"\x11@\x8e\x9c\x11G+\x00\x00*+\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 2820, 2821, 65281], + SIG_EP_OUTPUT: [3, 4, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.sinope_technologies_rm3250zb_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", + DEV_SIG_ENT_MAP_ID: ( + "sensor.sinope_technologies_rm3250zb_apparent_power" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_current", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_voltage", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_ac_frequency", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_power_factor", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_lqi", + }, + ("switch", "00:11:22:33:44:55:66:77-1-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.sinope_technologies_rm3250zb_switch", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.sinope_technologies_rm3250zb_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 84, + SIG_MANUFACTURER: "Sinope Technologies", + SIG_MODEL: "TH1123ZB", + SIG_NODE_DESC: b"\x12@\x8c\x9c\x11G+\x00\x00\x00+\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 769, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 513, 516, 1026, 2820, 2821, 65281], + SIG_EP_OUTPUT: [25, 65281], + SIG_EP_PROFILE: 260, + }, + 196: { + SIG_EP_TYPE: 769, + DEV_SIG_EP_ID: 196, + SIG_EP_INPUT: [1], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49757, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.sinope_technologies_th1123zb_identify", + }, + ("climate", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: [ + "thermostat", + "sinope_manufacturer_specific", + ], + DEV_SIG_ENT_MAP_CLASS: "SinopeTechnologiesThermostat", + DEV_SIG_ENT_MAP_ID: "climate.sinope_technologies_th1123zb_thermostat", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", + DEV_SIG_ENT_MAP_ID: ( + "sensor.sinope_technologies_th1123zb_apparent_power" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_current", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_voltage", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_ac_frequency", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_power_factor", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_lqi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-513-hvac_action"): { + DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], + DEV_SIG_ENT_MAP_CLASS: "SinopeHVACAction", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_hvac_action", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-513-pi_heating_demand"): { + DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], + DEV_SIG_ENT_MAP_CLASS: "PiHeatingDemand", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_pi_heating_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-513-setpoint_change_source"): { + DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], + DEV_SIG_ENT_MAP_CLASS: "SetpointChangeSource", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_setpoint_change_source", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.sinope_technologies_th1123zb_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 85, + SIG_MANUFACTURER: "Sinope Technologies", + SIG_MODEL: "TH1124ZB", + SIG_NODE_DESC: b"\x11@\x8e\x9c\x11G+\x00\x00\x00+\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 769, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 513, 516, 1026, 2820, 2821, 65281], + SIG_EP_OUTPUT: [25, 65281], + SIG_EP_PROFILE: 260, + }, + 196: { + SIG_EP_TYPE: 769, + DEV_SIG_EP_ID: 196, + SIG_EP_INPUT: [1], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49757, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.sinope_technologies_th1124zb_identify", + }, + ("climate", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: [ + "thermostat", + "sinope_manufacturer_specific", + ], + DEV_SIG_ENT_MAP_CLASS: "SinopeTechnologiesThermostat", + DEV_SIG_ENT_MAP_ID: "climate.sinope_technologies_th1124zb_thermostat", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", + DEV_SIG_ENT_MAP_ID: ( + "sensor.sinope_technologies_th1124zb_apparent_power" + ), + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_current", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_voltage", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_ac_frequency", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_power_factor", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_lqi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-513-hvac_action"): { + DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], + DEV_SIG_ENT_MAP_CLASS: "SinopeHVACAction", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_hvac_action", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-513-pi_heating_demand"): { + DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], + DEV_SIG_ENT_MAP_CLASS: "PiHeatingDemand", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_pi_heating_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-513-setpoint_change_source"): { + DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], + DEV_SIG_ENT_MAP_CLASS: "SetpointChangeSource", + DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_setpoint_change_source", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.sinope_technologies_th1124zb_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 86, + SIG_MANUFACTURER: "SmartThings", + SIG_MODEL: "outletv4", + SIG_NODE_DESC: b"\x01@\x8e\n\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 9, 15, 2820], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-15"): { + DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], + DEV_SIG_ENT_MAP_CLASS: "BinaryInput", + DEV_SIG_ENT_MAP_ID: "binary_sensor.smartthings_outletv4_binary_input", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.smartthings_outletv4_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurement", + DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", + DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_apparent_power", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", + DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_current", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", + DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_voltage", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", + DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_ac_frequency", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { + DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], + DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", + DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_power_factor", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_lqi", + }, + ("switch", "00:11:22:33:44:55:66:77-1-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.smartthings_outletv4_switch", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.smartthings_outletv4_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 87, + SIG_MANUFACTURER: "SmartThings", + SIG_MODEL: "tagv4", + SIG_NODE_DESC: b"\x02@\x80\n\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 32768, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 15, 32], + SIG_EP_OUTPUT: [3, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("device_tracker", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "ZHADeviceScannerEntity", + DEV_SIG_ENT_MAP_ID: "device_tracker.smartthings_tagv4_device_scanner", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-1-15"): { + DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], + DEV_SIG_ENT_MAP_CLASS: "BinaryInput", + DEV_SIG_ENT_MAP_ID: "binary_sensor.smartthings_tagv4_binary_input", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.smartthings_tagv4_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.smartthings_tagv4_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.smartthings_tagv4_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.smartthings_tagv4_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 88, + SIG_MANUFACTURER: "Third Reality, Inc", + SIG_MODEL: "3RSS007Z", + SIG_NODE_DESC: b"\x02@\x803\x12\x7fd\x00\x00,d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 25], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.third_reality_inc_3rss007z_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss007z_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss007z_lqi", + }, + ("switch", "00:11:22:33:44:55:66:77-1-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.third_reality_inc_3rss007z_switch", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.third_reality_inc_3rss007z_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 89, + SIG_MANUFACTURER: "Third Reality, Inc", + SIG_MODEL: "3RSS008Z", + SIG_NODE_DESC: b"\x02@\x803\x12\x7fd\x00\x00,d\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 2, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 25], + SIG_EP_OUTPUT: [1], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.third_reality_inc_3rss008z_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss008z_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss008z_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss008z_lqi", + }, + ("switch", "00:11:22:33:44:55:66:77-1-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.third_reality_inc_3rss008z_switch", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.third_reality_inc_3rss008z_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 90, + SIG_MANUFACTURER: "Visonic", + SIG_MODEL: "MCT-340 E", + SIG_NODE_DESC: b"\x02@\x80\x11\x10RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.visonic_mct_340_e_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.visonic_mct_340_e_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.visonic_mct_340_e_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 91, + SIG_MANUFACTURER: "Zen Within", + SIG_MODEL: "Zen-01", + SIG_NODE_DESC: b"\x02@\x80X\x11R\x80\x00\x00\x00\x80\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 769, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 4, 5, 32, 513, 514, 516, 2821], + SIG_EP_OUTPUT: [10, 25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.zen_within_zen_01_identify", + }, + ("climate", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["thermostat", "fan"], + DEV_SIG_ENT_MAP_CLASS: "ZenWithinThermostat", + DEV_SIG_ENT_MAP_ID: "climate.zen_within_zen_01_thermostat", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_lqi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-513-hvac_action"): { + DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], + DEV_SIG_ENT_MAP_CLASS: "ThermostatHVACAction", + DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_hvac_action", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-513-pi_heating_demand"): { + DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], + DEV_SIG_ENT_MAP_CLASS: "PiHeatingDemand", + DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_pi_heating_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-513-setpoint_change_source"): { + DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], + DEV_SIG_ENT_MAP_CLASS: "SetpointChangeSource", + DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_setpoint_change_source", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.zen_within_zen_01_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 92, + SIG_MANUFACTURER: "_TYZB01_ns1ndbww", + SIG_MODEL: "TS0004", + SIG_NODE_DESC: b"\x01@\x8e\x02\x10R\x00\x02\x00,\x00\x02\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 4, 5, 6, 10], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + 2: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 2, + SIG_EP_INPUT: [4, 5, 6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 260, + }, + 3: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 3, + SIG_EP_INPUT: [4, 5, 6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 260, + }, + 4: { + SIG_EP_TYPE: 256, + DEV_SIG_EP_ID: 4, + SIG_EP_INPUT: [4, 5, 6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.tyzb01_ns1ndbww_ts0004_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.tyzb01_ns1ndbww_ts0004_lqi", + }, + ("light", "00:11:22:33:44:55:66:77-2"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light_2", + }, + ("light", "00:11:22:33:44:55:66:77-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light_3", + }, + ("light", "00:11:22:33:44:55:66:77-4"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light_4", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.tyzb01_ns1ndbww_ts0004_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 93, + SIG_MANUFACTURER: "netvox", + SIG_MODEL: "Z308E3ED", + SIG_NODE_DESC: b"\x02@\x80\x9f\x10RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 1026, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 3, 21, 32, 1280, 2821], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { + DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], + DEV_SIG_ENT_MAP_CLASS: "IASZone", + DEV_SIG_ENT_MAP_ID: "binary_sensor.netvox_z308e3ed_ias_zone", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.netvox_z308e3ed_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.netvox_z308e3ed_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.netvox_z308e3ed_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.netvox_z308e3ed_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 94, + SIG_MANUFACTURER: "sengled", + SIG_MODEL: "E11-G13", + SIG_NODE_DESC: b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 257, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], + DEV_SIG_ENT_MAP_CLASS: "MinTransitionLight", + DEV_SIG_ENT_MAP_ID: "light.sengled_e11_g13_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.sengled_e11_g13_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_instantaneous_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_summation_delivered", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.sengled_e11_g13_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 95, + SIG_MANUFACTURER: "sengled", + SIG_MODEL: "E12-N14", + SIG_NODE_DESC: b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 257, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], + DEV_SIG_ENT_MAP_CLASS: "MinTransitionLight", + DEV_SIG_ENT_MAP_ID: "light.sengled_e12_n14_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.sengled_e12_n14_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_instantaneous_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_summation_delivered", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.sengled_e12_n14_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 96, + SIG_MANUFACTURER: "sengled", + SIG_MODEL: "Z01-A19NAE26", + SIG_NODE_DESC: b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 257, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 1794, 2821], + SIG_EP_OUTPUT: [25], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], + DEV_SIG_ENT_MAP: { + ("light", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], + DEV_SIG_ENT_MAP_CLASS: "MinTransitionLight", + DEV_SIG_ENT_MAP_ID: "light.sengled_z01_a19nae26_light", + }, + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.sengled_z01_a19nae26_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_instantaneous_demand", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_summation_delivered", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_lqi", + }, + ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.sengled_z01_a19nae26_firmware", + }, + }, + }, + { + DEV_SIG_DEV_NO: 97, + SIG_MANUFACTURER: "unk_manufacturer", + SIG_MODEL: "unk_model", + SIG_NODE_DESC: b"\x01@\x8e\x10\x11RR\x00\x00\x00R\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 512, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 10, 21, 256, 64544, 64545], + SIG_EP_OUTPUT: [3, 64544], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.unk_manufacturer_unk_model_identify", + }, + ("cover", "00:11:22:33:44:55:66:77-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off", "shade"], + DEV_SIG_ENT_MAP_CLASS: "Shade", + DEV_SIG_ENT_MAP_ID: "cover.unk_manufacturer_unk_model_shade", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.unk_manufacturer_unk_model_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.unk_manufacturer_unk_model_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 98, + SIG_MANUFACTURER: "Digi", + SIG_MODEL: "XBee3", + SIG_NODE_DESC: b"\x01@\x8e\x1e\x10R\xff\x00\x00,\xff\x00\x00", + SIG_ENDPOINTS: { + 208: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 208, + SIG_EP_INPUT: [6, 12], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 209: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 209, + SIG_EP_INPUT: [6, 12], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 210: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 210, + SIG_EP_INPUT: [6, 12], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 211: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 211, + SIG_EP_INPUT: [6, 12], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 212: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 212, + SIG_EP_INPUT: [6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 213: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 213, + SIG_EP_INPUT: [6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 214: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 214, + SIG_EP_INPUT: [6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 215: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 215, + SIG_EP_INPUT: [6, 12], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 216: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 216, + SIG_EP_INPUT: [6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 217: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 217, + SIG_EP_INPUT: [6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 218: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 218, + SIG_EP_INPUT: [6, 13], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 219: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 219, + SIG_EP_INPUT: [6, 13], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 220: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 220, + SIG_EP_INPUT: [6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 221: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 221, + SIG_EP_INPUT: [6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 222: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 222, + SIG_EP_INPUT: [6], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 49413, + }, + 232: { + SIG_EP_TYPE: 1, + DEV_SIG_EP_ID: 232, + SIG_EP_INPUT: [17, 146], + SIG_EP_OUTPUT: [8, 17], + SIG_EP_PROFILE: 49413, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: ["232:0x0008"], + DEV_SIG_ENT_MAP: { + ("sensor", "00:11:22:33:44:55:66:77-208-12"): { + DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], + DEV_SIG_ENT_MAP_CLASS: "AnalogInput", + DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input", + }, + ("switch", "00:11:22:33:44:55:66:77-208-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch", + }, + ("sensor", "00:11:22:33:44:55:66:77-209-12"): { + DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], + DEV_SIG_ENT_MAP_CLASS: "AnalogInput", + DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_2", + }, + ("switch", "00:11:22:33:44:55:66:77-209-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_2", + }, + ("sensor", "00:11:22:33:44:55:66:77-210-12"): { + DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], + DEV_SIG_ENT_MAP_CLASS: "AnalogInput", + DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_3", + }, + ("switch", "00:11:22:33:44:55:66:77-210-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_3", + }, + ("sensor", "00:11:22:33:44:55:66:77-211-12"): { + DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], + DEV_SIG_ENT_MAP_CLASS: "AnalogInput", + DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_4", + }, + ("switch", "00:11:22:33:44:55:66:77-211-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_4", + }, + ("switch", "00:11:22:33:44:55:66:77-212-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_5", + }, + ("switch", "00:11:22:33:44:55:66:77-213-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_6", + }, + ("switch", "00:11:22:33:44:55:66:77-214-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_7", + }, + ("sensor", "00:11:22:33:44:55:66:77-215-12"): { + DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], + DEV_SIG_ENT_MAP_CLASS: "AnalogInput", + DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_5", + }, + ("switch", "00:11:22:33:44:55:66:77-215-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_8", + }, + ("switch", "00:11:22:33:44:55:66:77-216-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_9", + }, + ("switch", "00:11:22:33:44:55:66:77-217-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_10", + }, + ("number", "00:11:22:33:44:55:66:77-218-13"): { + DEV_SIG_CLUSTER_HANDLERS: ["analog_output"], + DEV_SIG_ENT_MAP_CLASS: "ZhaNumber", + DEV_SIG_ENT_MAP_ID: "number.digi_xbee3_number", + }, + ("switch", "00:11:22:33:44:55:66:77-218-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_11", + }, + ("switch", "00:11:22:33:44:55:66:77-219-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_12", + }, + ("number", "00:11:22:33:44:55:66:77-219-13"): { + DEV_SIG_CLUSTER_HANDLERS: ["analog_output"], + DEV_SIG_ENT_MAP_CLASS: "ZhaNumber", + DEV_SIG_ENT_MAP_ID: "number.digi_xbee3_number_2", + }, + ("switch", "00:11:22:33:44:55:66:77-220-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_13", + }, + ("switch", "00:11:22:33:44:55:66:77-221-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_14", + }, + ("switch", "00:11:22:33:44:55:66:77-222-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Switch", + DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_15", + }, + }, + }, + { + DEV_SIG_DEV_NO: 99, + SIG_MANUFACTURER: "efektalab.ru", + SIG_MODEL: "EFEKTA_PWS", + SIG_NODE_DESC: b"\x02@\x80\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", + SIG_ENDPOINTS: { + 1: { + SIG_EP_TYPE: 12, + DEV_SIG_EP_ID: 1, + SIG_EP_INPUT: [0, 1, 1026, 1032], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: 260, + }, + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1032"): { + DEV_SIG_CLUSTER_HANDLERS: ["soil_moisture"], + DEV_SIG_ENT_MAP_CLASS: "SoilMoisture", + DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_soil_moisture", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_temperature", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 100, + SIG_MANUFACTURER: "Konke", + SIG_MODEL: "3AFE170100510001", + SIG_NODE_DESC: b"\x02@\x80\x02\x10RR\x00\x00,R\x00\x00", + SIG_ENDPOINTS: { + 1: { + PROFILE_ID: 260, + DEVICE_TYPE: zha.DeviceType.ON_OFF_OUTPUT, + INPUT_CLUSTERS: [ + Basic.cluster_id, + PowerConfiguration.cluster_id, + Identify.cluster_id, + Groups.cluster_id, + Scenes.cluster_id, + OnOff.cluster_id, + ], + OUTPUT_CLUSTERS: [ + Identify.cluster_id, + ], + } + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-1-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.konke_3afe170100510001_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.konke_3afe170100510001_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.konke_3afe170100510001_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.konke_3afe170100510001_lqi", + }, + }, + }, + { + DEV_SIG_DEV_NO: 101, + SIG_MANUFACTURER: "Philips", + SIG_MODEL: "SML001", + SIG_NODE_DESC: b"\x02@\x80\x0b\x10Y?\x00\x00\x00?\x00\x00", + SIG_ENDPOINTS: { + 1: { + PROFILE_ID: zll.PROFILE_ID, + DEVICE_TYPE: zll.DeviceType.ON_OFF_SENSOR, + INPUT_CLUSTERS: [Basic.cluster_id], + OUTPUT_CLUSTERS: [ + Basic.cluster_id, + Identify.cluster_id, + Groups.cluster_id, + Scenes.cluster_id, + OnOff.cluster_id, + LevelControl.cluster_id, + Color.cluster_id, + ], + }, + 2: { + PROFILE_ID: zha.PROFILE_ID, + DEVICE_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, + INPUT_CLUSTERS: [ + Basic.cluster_id, + PowerConfiguration.cluster_id, + Identify.cluster_id, + IlluminanceMeasurement.cluster_id, + TemperatureMeasurement.cluster_id, + OccupancySensing.cluster_id, + ], + OUTPUT_CLUSTERS: [ + Ota.cluster_id, + ], + }, + }, + DEV_SIG_ATTRIBUTES: { + 2: { + "basic": { + "trigger_indicator": Bool(False), + }, + "philips_occupancy": { + "sensitivity": uint8_t(1), + }, + } + }, + DEV_SIG_EVT_CLUSTER_HANDLERS: [ + "1:0x0005", + "1:0x0006", + "1:0x0008", + "1:0x0300", + "2:0x0019", + ], + DEV_SIG_ENT_MAP: { + ("button", "00:11:22:33:44:55:66:77-2-3"): { + DEV_SIG_CLUSTER_HANDLERS: ["identify"], + DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", + DEV_SIG_ENT_MAP_ID: "button.philips_sml001_identify", + }, + ("sensor", "00:11:22:33:44:55:66:77-2-1"): { + DEV_SIG_CLUSTER_HANDLERS: ["power"], + DEV_SIG_ENT_MAP_CLASS: "Battery", + DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_battery", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "RSSISensor", + DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_rssi", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "LQISensor", + DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_lqi", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { + DEV_SIG_CLUSTER_HANDLERS: ["on_off"], + DEV_SIG_ENT_MAP_CLASS: "Motion", + DEV_SIG_ENT_MAP_ID: "binary_sensor.philips_sml001_motion", + }, + ("sensor", "00:11:22:33:44:55:66:77-2-1024"): { + DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], + DEV_SIG_ENT_MAP_CLASS: "Illuminance", + DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_illuminance", + }, + ("binary_sensor", "00:11:22:33:44:55:66:77-2-1030"): { + DEV_SIG_CLUSTER_HANDLERS: ["philips_occupancy"], + DEV_SIG_ENT_MAP_CLASS: "HueOccupancy", + DEV_SIG_ENT_MAP_ID: "binary_sensor.philips_sml001_occupancy", + }, + ("sensor", "00:11:22:33:44:55:66:77-2-1026"): { + DEV_SIG_CLUSTER_HANDLERS: ["temperature"], + DEV_SIG_ENT_MAP_CLASS: "Temperature", + DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_temperature", + }, + ("switch", "00:11:22:33:44:55:66:77-2-0-trigger_indicator"): { + DEV_SIG_CLUSTER_HANDLERS: ["basic"], + DEV_SIG_ENT_MAP_CLASS: "HueMotionTriggerIndicatorSwitch", + DEV_SIG_ENT_MAP_ID: "switch.philips_sml001_led_trigger_indicator", + }, + ("select", "00:11:22:33:44:55:66:77-2-1030-motion_sensitivity"): { + DEV_SIG_CLUSTER_HANDLERS: ["philips_occupancy"], + DEV_SIG_ENT_MAP_CLASS: "HueV1MotionSensitivity", + DEV_SIG_ENT_MAP_ID: "select.philips_sml001_motion_sensitivity", + }, + ("update", "00:11:22:33:44:55:66:77-2-25-firmware_update"): { + DEV_SIG_CLUSTER_HANDLERS: ["ota"], + DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", + DEV_SIG_ENT_MAP_ID: "update.philips_sml001_firmware", + }, + }, + }, +] diff --git a/tests/components/zone/test_trigger.py b/tests/components/zone/test_trigger.py index a28b3c0592a..6ec5e2fd894 100644 --- a/tests/components/zone/test_trigger.py +++ b/tests/components/zone/test_trigger.py @@ -8,7 +8,7 @@ from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from tests.common import mock_component +from tests.common import async_mock_service, mock_component @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -16,8 +16,14 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") + + @pytest.fixture(autouse=True) -def setup_comp(hass: HomeAssistant) -> None: +def setup_comp(hass): """Initialize components.""" mock_component(hass, "group") hass.loop.run_until_complete( @@ -37,7 +43,7 @@ def setup_comp(hass: HomeAssistant) -> None: async def test_if_fires_on_zone_enter( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on zone enter.""" context = Context() @@ -82,11 +88,9 @@ async def test_if_fires_on_zone_enter( ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id - assert ( - service_calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" - ) + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id + assert calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" # Set out of zone again so we can trigger call hass.states.async_set( @@ -100,20 +104,17 @@ async def test_if_fires_on_zone_enter( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 hass.states.async_set( "test.entity", "hello", {"latitude": 32.880586, "longitude": -117.237564} ) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 async def test_if_fires_on_zone_enter_uuid( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - service_calls: list[ServiceCall], + hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall] ) -> None: """Test for firing on zone enter when device is specified by entity registry id.""" context = Context() @@ -164,11 +165,9 @@ async def test_if_fires_on_zone_enter_uuid( ) await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].context.parent_id == context.id - assert ( - service_calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" - ) + assert len(calls) == 1 + assert calls[0].context.parent_id == context.id + assert calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" # Set out of zone again so we can trigger call hass.states.async_set( @@ -182,18 +181,17 @@ async def test_if_fires_on_zone_enter_uuid( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) - assert len(service_calls) == 2 hass.states.async_set( "test.entity", "hello", {"latitude": 32.880586, "longitude": -117.237564} ) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 1 async def test_if_not_fires_for_enter_on_zone_leave( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing on zone leave.""" hass.states.async_set( @@ -222,11 +220,11 @@ async def test_if_not_fires_for_enter_on_zone_leave( ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_if_fires_on_zone_leave( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for firing on zone leave.""" hass.states.async_set( @@ -255,11 +253,11 @@ async def test_if_fires_on_zone_leave( ) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_if_not_fires_for_leave_on_zone_enter( - hass: HomeAssistant, service_calls: list[ServiceCall] + hass: HomeAssistant, calls: list[ServiceCall] ) -> None: """Test for not firing on zone enter.""" hass.states.async_set( @@ -288,12 +286,10 @@ async def test_if_not_fires_for_leave_on_zone_enter( ) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 -async def test_zone_condition( - hass: HomeAssistant, service_calls: list[ServiceCall] -) -> None: +async def test_zone_condition(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test for zone condition.""" hass.states.async_set( "test.entity", "hello", {"latitude": 32.880586, "longitude": -117.237564} @@ -318,11 +314,11 @@ async def test_zone_condition( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 async def test_unknown_zone( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture ) -> None: """Test for firing on zone enter.""" context = Context() diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index 37b1dde7316..a2a4c217b8b 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -3,43 +3,258 @@ import asyncio import copy import io -from typing import Any, cast -from unittest.mock import DEFAULT, AsyncMock, MagicMock, patch +import json +from unittest.mock import DEFAULT, AsyncMock, patch import pytest from zwave_js_server.event import Event from zwave_js_server.model.driver import Driver from zwave_js_server.model.node import Node -from zwave_js_server.model.node.data_model import NodeDataType from zwave_js_server.version import VersionInfo -from homeassistant.components.zwave_js.const import DOMAIN from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonArrayType -from tests.common import ( - MockConfigEntry, - load_json_array_fixture, - load_json_object_fixture, -) +from tests.common import MockConfigEntry, load_fixture + +# Add-on fixtures + + +@pytest.fixture(name="addon_info_side_effect") +def addon_info_side_effect_fixture(): + """Return the add-on info side effect.""" + return None + + +@pytest.fixture(name="addon_info") +def mock_addon_info(addon_info_side_effect): + """Mock Supervisor add-on info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_info", + side_effect=addon_info_side_effect, + ) as addon_info: + addon_info.return_value = { + "available": False, + "hostname": None, + "options": {}, + "state": None, + "update_available": False, + "version": None, + } + yield addon_info + + +@pytest.fixture(name="addon_store_info_side_effect") +def addon_store_info_side_effect_fixture(): + """Return the add-on store info side effect.""" + return None + + +@pytest.fixture(name="addon_store_info") +def mock_addon_store_info(addon_store_info_side_effect): + """Mock Supervisor add-on info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_store_info", + side_effect=addon_store_info_side_effect, + ) as addon_store_info: + addon_store_info.return_value = { + "available": False, + "installed": None, + "state": None, + "version": "1.0.0", + } + yield addon_store_info + + +@pytest.fixture(name="addon_running") +def mock_addon_running(addon_store_info, addon_info): + """Mock add-on already running.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "started", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["state"] = "started" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="addon_installed") +def mock_addon_installed(addon_store_info, addon_info): + """Mock add-on already installed but not running.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +@pytest.fixture(name="addon_not_installed") +def mock_addon_not_installed(addon_store_info, addon_info): + """Mock add-on not installed.""" + addon_store_info.return_value["available"] = True + return addon_info + + +@pytest.fixture(name="addon_options") +def mock_addon_options(addon_info): + """Mock add-on options.""" + return addon_info.return_value["options"] + + +@pytest.fixture(name="set_addon_options_side_effect") +def set_addon_options_side_effect_fixture(addon_options): + """Return the set add-on options side effect.""" + + async def set_addon_options(hass: HomeAssistant, slug, options): + """Mock set add-on options.""" + addon_options.update(options["options"]) + + return set_addon_options + + +@pytest.fixture(name="set_addon_options") +def mock_set_addon_options(set_addon_options_side_effect): + """Mock set add-on options.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_set_addon_options", + side_effect=set_addon_options_side_effect, + ) as set_options: + yield set_options + + +@pytest.fixture(name="install_addon_side_effect") +def install_addon_side_effect_fixture(addon_store_info, addon_info): + """Return the install add-on side effect.""" + + async def install_addon(hass: HomeAssistant, slug): + """Mock install add-on.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + + return install_addon + + +@pytest.fixture(name="install_addon") +def mock_install_addon(install_addon_side_effect): + """Mock install add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_install_addon", + side_effect=install_addon_side_effect, + ) as install_addon: + yield install_addon + + +@pytest.fixture(name="update_addon") +def mock_update_addon(): + """Mock update add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_update_addon" + ) as update_addon: + yield update_addon + + +@pytest.fixture(name="start_addon_side_effect") +def start_addon_side_effect_fixture(addon_store_info, addon_info): + """Return the start add-on options side effect.""" + + async def start_addon(hass: HomeAssistant, slug): + """Mock start add-on.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "started", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["state"] = "started" + + return start_addon + + +@pytest.fixture(name="start_addon") +def mock_start_addon(start_addon_side_effect): + """Mock start add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_start_addon", + side_effect=start_addon_side_effect, + ) as start_addon: + yield start_addon + + +@pytest.fixture(name="stop_addon") +def stop_addon_fixture(): + """Mock stop add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_stop_addon" + ) as stop_addon: + yield stop_addon + + +@pytest.fixture(name="restart_addon_side_effect") +def restart_addon_side_effect_fixture(): + """Return the restart add-on options side effect.""" + return None + + +@pytest.fixture(name="restart_addon") +def mock_restart_addon(restart_addon_side_effect): + """Mock restart add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_restart_addon", + side_effect=restart_addon_side_effect, + ) as restart_addon: + yield restart_addon + + +@pytest.fixture(name="uninstall_addon") +def uninstall_addon_fixture(): + """Mock uninstall add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_uninstall_addon" + ) as uninstall_addon: + yield uninstall_addon + + +@pytest.fixture(name="create_backup") +def create_backup_fixture(): + """Mock create backup.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_create_backup" + ) as create_backup: + yield create_backup + # State fixtures @pytest.fixture(name="controller_state", scope="package") -def controller_state_fixture() -> dict[str, Any]: +def controller_state_fixture(): """Load the controller state fixture data.""" - return load_json_object_fixture("controller_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/controller_state.json")) @pytest.fixture(name="controller_node_state", scope="package") -def controller_node_state_fixture() -> dict[str, Any]: +def controller_node_state_fixture(): """Load the controller node state fixture data.""" - return load_json_object_fixture("controller_node_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/controller_node_state.json")) @pytest.fixture(name="version_state", scope="package") -def version_state_fixture() -> dict[str, Any]: +def version_state_fixture(): """Load the version state fixture data.""" return { "type": "version", @@ -50,7 +265,7 @@ def version_state_fixture() -> dict[str, Any]: @pytest.fixture(name="log_config_state") -def log_config_state_fixture() -> dict[str, Any]: +def log_config_state_fixture(): """Return log config state fixture data.""" return { "enabled": True, @@ -62,70 +277,70 @@ def log_config_state_fixture() -> dict[str, Any]: @pytest.fixture(name="config_entry_diagnostics", scope="package") -def config_entry_diagnostics_fixture() -> JsonArrayType: +def config_entry_diagnostics_fixture(): """Load the config entry diagnostics fixture data.""" - return load_json_array_fixture("config_entry_diagnostics.json", DOMAIN) + return json.loads(load_fixture("zwave_js/config_entry_diagnostics.json")) @pytest.fixture(name="config_entry_diagnostics_redacted", scope="package") -def config_entry_diagnostics_redacted_fixture() -> dict[str, Any]: +def config_entry_diagnostics_redacted_fixture(): """Load the redacted config entry diagnostics fixture data.""" - return load_json_object_fixture("config_entry_diagnostics_redacted.json", DOMAIN) + return json.loads(load_fixture("zwave_js/config_entry_diagnostics_redacted.json")) @pytest.fixture(name="multisensor_6_state", scope="package") -def multisensor_6_state_fixture() -> dict[str, Any]: +def multisensor_6_state_fixture(): """Load the multisensor 6 node state fixture data.""" - return load_json_object_fixture("multisensor_6_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/multisensor_6_state.json")) @pytest.fixture(name="ecolink_door_sensor_state", scope="package") -def ecolink_door_sensor_state_fixture() -> dict[str, Any]: +def ecolink_door_sensor_state_fixture(): """Load the Ecolink Door/Window Sensor node state fixture data.""" - return load_json_object_fixture("ecolink_door_sensor_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/ecolink_door_sensor_state.json")) @pytest.fixture(name="hank_binary_switch_state", scope="package") -def binary_switch_state_fixture() -> dict[str, Any]: +def binary_switch_state_fixture(): """Load the hank binary switch node state fixture data.""" - return load_json_object_fixture("hank_binary_switch_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/hank_binary_switch_state.json")) @pytest.fixture(name="bulb_6_multi_color_state", scope="package") -def bulb_6_multi_color_state_fixture() -> dict[str, Any]: +def bulb_6_multi_color_state_fixture(): """Load the bulb 6 multi-color node state fixture data.""" - return load_json_object_fixture("bulb_6_multi_color_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/bulb_6_multi_color_state.json")) @pytest.fixture(name="light_color_null_values_state", scope="package") -def light_color_null_values_state_fixture() -> dict[str, Any]: +def light_color_null_values_state_fixture(): """Load the light color null values node state fixture data.""" - return load_json_object_fixture("light_color_null_values_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/light_color_null_values_state.json")) @pytest.fixture(name="eaton_rf9640_dimmer_state", scope="package") -def eaton_rf9640_dimmer_state_fixture() -> dict[str, Any]: +def eaton_rf9640_dimmer_state_fixture(): """Load the eaton rf9640 dimmer node state fixture data.""" - return load_json_object_fixture("eaton_rf9640_dimmer_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/eaton_rf9640_dimmer_state.json")) @pytest.fixture(name="lock_schlage_be469_state", scope="package") -def lock_schlage_be469_state_fixture() -> dict[str, Any]: +def lock_schlage_be469_state_fixture(): """Load the schlage lock node state fixture data.""" - return load_json_object_fixture("lock_schlage_be469_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/lock_schlage_be469_state.json")) @pytest.fixture(name="lock_august_asl03_state", scope="package") -def lock_august_asl03_state_fixture() -> dict[str, Any]: +def lock_august_asl03_state_fixture(): """Load the August Pro lock node state fixture data.""" - return load_json_object_fixture("lock_august_asl03_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/lock_august_asl03_state.json")) @pytest.fixture(name="climate_radio_thermostat_ct100_plus_state", scope="package") -def climate_radio_thermostat_ct100_plus_state_fixture() -> dict[str, Any]: +def climate_radio_thermostat_ct100_plus_state_fixture(): """Load the climate radio thermostat ct100 plus node state fixture data.""" - return load_json_object_fixture( - "climate_radio_thermostat_ct100_plus_state.json", DOMAIN + return json.loads( + load_fixture("zwave_js/climate_radio_thermostat_ct100_plus_state.json") ) @@ -133,215 +348,211 @@ def climate_radio_thermostat_ct100_plus_state_fixture() -> dict[str, Any]: name="climate_radio_thermostat_ct100_plus_different_endpoints_state", scope="package", ) -def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture() -> ( - dict[str, Any] -): +def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture(): """Load the thermostat fixture state with values on different endpoints. This device is a radio thermostat ct100. """ - return load_json_object_fixture( - "climate_radio_thermostat_ct100_plus_different_endpoints_state.json", DOMAIN + return json.loads( + load_fixture( + "zwave_js/climate_radio_thermostat_ct100_plus_different_endpoints_state.json" + ) ) @pytest.fixture(name="climate_adc_t3000_state", scope="package") -def climate_adc_t3000_state_fixture() -> dict[str, Any]: +def climate_adc_t3000_state_fixture(): """Load the climate ADC-T3000 node state fixture data.""" - return load_json_object_fixture("climate_adc_t3000_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/climate_adc_t3000_state.json")) @pytest.fixture(name="climate_airzone_aidoo_control_hvac_unit_state", scope="package") -def climate_airzone_aidoo_control_hvac_unit_state_fixture() -> dict[str, Any]: +def climate_airzone_aidoo_control_hvac_unit_state_fixture(): """Load the climate Airzone Aidoo Control HVAC Unit state fixture data.""" - return load_json_object_fixture( - "climate_airzone_aidoo_control_hvac_unit_state.json", DOMAIN + return json.loads( + load_fixture("zwave_js/climate_airzone_aidoo_control_hvac_unit_state.json") ) @pytest.fixture(name="climate_danfoss_lc_13_state", scope="package") -def climate_danfoss_lc_13_state_fixture() -> dict[str, Any]: +def climate_danfoss_lc_13_state_fixture(): """Load Danfoss (LC-13) electronic radiator thermostat node state fixture data.""" - return load_json_object_fixture("climate_danfoss_lc_13_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/climate_danfoss_lc_13_state.json")) @pytest.fixture(name="climate_eurotronic_spirit_z_state", scope="package") -def climate_eurotronic_spirit_z_state_fixture() -> dict[str, Any]: +def climate_eurotronic_spirit_z_state_fixture(): """Load the climate Eurotronic Spirit Z thermostat node state fixture data.""" - return load_json_object_fixture("climate_eurotronic_spirit_z_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/climate_eurotronic_spirit_z_state.json")) @pytest.fixture(name="climate_heatit_z_trm6_state", scope="package") -def climate_heatit_z_trm6_state_fixture() -> dict[str, Any]: +def climate_heatit_z_trm6_state_fixture(): """Load the climate HEATIT Z-TRM6 thermostat node state fixture data.""" - return load_json_object_fixture("climate_heatit_z_trm6_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/climate_heatit_z_trm6_state.json")) @pytest.fixture(name="climate_heatit_z_trm3_state", scope="package") -def climate_heatit_z_trm3_state_fixture() -> dict[str, Any]: +def climate_heatit_z_trm3_state_fixture(): """Load the climate HEATIT Z-TRM3 thermostat node state fixture data.""" - return load_json_object_fixture("climate_heatit_z_trm3_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/climate_heatit_z_trm3_state.json")) @pytest.fixture(name="climate_heatit_z_trm2fx_state", scope="package") -def climate_heatit_z_trm2fx_state_fixture() -> dict[str, Any]: +def climate_heatit_z_trm2fx_state_fixture(): """Load the climate HEATIT Z-TRM2fx thermostat node state fixture data.""" - return load_json_object_fixture("climate_heatit_z_trm2fx_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/climate_heatit_z_trm2fx_state.json")) @pytest.fixture(name="climate_heatit_z_trm3_no_value_state", scope="package") -def climate_heatit_z_trm3_no_value_state_fixture() -> dict[str, Any]: +def climate_heatit_z_trm3_no_value_state_fixture(): """Load the climate HEATIT Z-TRM3 thermostat node w/no value state fixture data.""" - return load_json_object_fixture("climate_heatit_z_trm3_no_value_state.json", DOMAIN) + return json.loads( + load_fixture("zwave_js/climate_heatit_z_trm3_no_value_state.json") + ) @pytest.fixture(name="nortek_thermostat_state", scope="package") -def nortek_thermostat_state_fixture() -> dict[str, Any]: +def nortek_thermostat_state_fixture(): """Load the nortek thermostat node state fixture data.""" - return load_json_object_fixture("nortek_thermostat_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/nortek_thermostat_state.json")) @pytest.fixture(name="srt321_hrt4_zw_state", scope="package") -def srt321_hrt4_zw_state_fixture() -> dict[str, Any]: +def srt321_hrt4_zw_state_fixture(): """Load the climate HRT4-ZW / SRT321 / SRT322 thermostat node state fixture data.""" - return load_json_object_fixture("srt321_hrt4_zw_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/srt321_hrt4_zw_state.json")) @pytest.fixture(name="chain_actuator_zws12_state", scope="package") -def window_cover_state_fixture() -> dict[str, Any]: +def window_cover_state_fixture(): """Load the window cover node state fixture data.""" - return load_json_object_fixture("chain_actuator_zws12_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/chain_actuator_zws12_state.json")) @pytest.fixture(name="fan_generic_state", scope="package") -def fan_generic_state_fixture() -> dict[str, Any]: +def fan_generic_state_fixture(): """Load the fan node state fixture data.""" - return load_json_object_fixture("fan_generic_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/fan_generic_state.json")) @pytest.fixture(name="hs_fc200_state", scope="package") -def hs_fc200_state_fixture() -> dict[str, Any]: +def hs_fc200_state_fixture(): """Load the HS FC200+ node state fixture data.""" - return load_json_object_fixture("fan_hs_fc200_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/fan_hs_fc200_state.json")) @pytest.fixture(name="leviton_zw4sf_state", scope="package") -def leviton_zw4sf_state_fixture() -> dict[str, Any]: +def leviton_zw4sf_state_fixture(): """Load the Leviton ZW4SF node state fixture data.""" - return load_json_object_fixture("leviton_zw4sf_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/leviton_zw4sf_state.json")) @pytest.fixture(name="fan_honeywell_39358_state", scope="package") -def fan_honeywell_39358_state_fixture() -> dict[str, Any]: +def fan_honeywell_39358_state_fixture(): """Load the fan node state fixture data.""" - return load_json_object_fixture("fan_honeywell_39358_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/fan_honeywell_39358_state.json")) @pytest.fixture(name="gdc_zw062_state", scope="package") -def motorized_barrier_cover_state_fixture() -> dict[str, Any]: +def motorized_barrier_cover_state_fixture(): """Load the motorized barrier cover node state fixture data.""" - return load_json_object_fixture("cover_zw062_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/cover_zw062_state.json")) @pytest.fixture(name="iblinds_v2_state", scope="package") -def iblinds_v2_state_fixture() -> dict[str, Any]: +def iblinds_v2_state_fixture(): """Load the iBlinds v2 node state fixture data.""" - return load_json_object_fixture("cover_iblinds_v2_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/cover_iblinds_v2_state.json")) @pytest.fixture(name="iblinds_v3_state", scope="package") -def iblinds_v3_state_fixture() -> dict[str, Any]: +def iblinds_v3_state_fixture(): """Load the iBlinds v3 node state fixture data.""" - return load_json_object_fixture("cover_iblinds_v3_state.json", DOMAIN) - - -@pytest.fixture(name="zvidar_state", scope="package") -def zvidar_state_fixture() -> dict[str, Any]: - """Load the ZVIDAR node state fixture data.""" - return load_json_object_fixture("cover_zvidar_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/cover_iblinds_v3_state.json")) @pytest.fixture(name="qubino_shutter_state", scope="package") -def qubino_shutter_state_fixture() -> dict[str, Any]: +def qubino_shutter_state_fixture(): """Load the Qubino Shutter node state fixture data.""" - return load_json_object_fixture("cover_qubino_shutter_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/cover_qubino_shutter_state.json")) @pytest.fixture(name="aeotec_nano_shutter_state", scope="package") -def aeotec_nano_shutter_state_fixture() -> dict[str, Any]: +def aeotec_nano_shutter_state_fixture(): """Load the Aeotec Nano Shutter node state fixture data.""" - return load_json_object_fixture("cover_aeotec_nano_shutter_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/cover_aeotec_nano_shutter_state.json")) @pytest.fixture(name="fibaro_fgr222_shutter_state", scope="package") -def fibaro_fgr222_shutter_state_fixture() -> dict[str, Any]: +def fibaro_fgr222_shutter_state_fixture(): """Load the Fibaro FGR222 node state fixture data.""" - return load_json_object_fixture("cover_fibaro_fgr222_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/cover_fibaro_fgr222_state.json")) @pytest.fixture(name="fibaro_fgr223_shutter_state", scope="package") -def fibaro_fgr223_shutter_state_fixture() -> dict[str, Any]: +def fibaro_fgr223_shutter_state_fixture(): """Load the Fibaro FGR223 node state fixture data.""" - return load_json_object_fixture("cover_fibaro_fgr223_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/cover_fibaro_fgr223_state.json")) @pytest.fixture(name="shelly_europe_ltd_qnsh_001p10_state", scope="package") -def shelly_europe_ltd_qnsh_001p10_state_fixture() -> dict[str, Any]: +def shelly_europe_ltd_qnsh_001p10_state_fixture(): """Load the Shelly QNSH 001P10 node state fixture data.""" - return load_json_object_fixture("shelly_europe_ltd_qnsh_001p10_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/shelly_europe_ltd_qnsh_001p10_state.json")) @pytest.fixture(name="merten_507801_state", scope="package") -def merten_507801_state_fixture() -> dict[str, Any]: +def merten_507801_state_fixture(): """Load the Merten 507801 Shutter node state fixture data.""" - return load_json_object_fixture("cover_merten_507801_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/cover_merten_507801_state.json")) @pytest.fixture(name="aeon_smart_switch_6_state", scope="package") -def aeon_smart_switch_6_state_fixture() -> dict[str, Any]: +def aeon_smart_switch_6_state_fixture(): """Load the AEON Labs (ZW096) Smart Switch 6 node state fixture data.""" - return load_json_object_fixture("aeon_smart_switch_6_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/aeon_smart_switch_6_state.json")) @pytest.fixture(name="ge_12730_state", scope="package") -def ge_12730_state_fixture() -> dict[str, Any]: +def ge_12730_state_fixture(): """Load the GE 12730 node state fixture data.""" - return load_json_object_fixture("fan_ge_12730_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/fan_ge_12730_state.json")) @pytest.fixture(name="aeotec_radiator_thermostat_state", scope="package") -def aeotec_radiator_thermostat_state_fixture() -> dict[str, Any]: +def aeotec_radiator_thermostat_state_fixture(): """Load the Aeotec Radiator Thermostat node state fixture data.""" - return load_json_object_fixture("aeotec_radiator_thermostat_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/aeotec_radiator_thermostat_state.json")) @pytest.fixture(name="inovelli_lzw36_state", scope="package") -def inovelli_lzw36_state_fixture() -> dict[str, Any]: +def inovelli_lzw36_state_fixture(): """Load the Inovelli LZW36 node state fixture data.""" - return load_json_object_fixture("inovelli_lzw36_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/inovelli_lzw36_state.json")) @pytest.fixture(name="null_name_check_state", scope="package") -def null_name_check_state_fixture() -> dict[str, Any]: +def null_name_check_state_fixture(): """Load the null name check node state fixture data.""" - return load_json_object_fixture("null_name_check_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/null_name_check_state.json")) @pytest.fixture(name="lock_id_lock_as_id150_state", scope="package") -def lock_id_lock_as_id150_state_fixture() -> dict[str, Any]: +def lock_id_lock_as_id150_state_fixture(): """Load the id lock id-150 lock node state fixture data.""" - return load_json_object_fixture("lock_id_lock_as_id150_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/lock_id_lock_as_id150_state.json")) @pytest.fixture( name="climate_radio_thermostat_ct101_multiple_temp_units_state", scope="package" ) -def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture() -> ( - dict[str, Any] -): +def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture(): """Load the climate multiple temp units node state fixture data.""" - return load_json_object_fixture( - "climate_radio_thermostat_ct101_multiple_temp_units_state.json", DOMAIN + return json.loads( + load_fixture( + "zwave_js/climate_radio_thermostat_ct101_multiple_temp_units_state.json" + ) ) @@ -351,160 +562,135 @@ def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture() -> ( ), scope="package", ) -def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state_fixture() -> ( - dict[str, Any] -): +def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state_fixture(): """Load climate device w/ mode+setpoint on diff endpoints node state fixture data.""" - return load_json_object_fixture( - "climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state.json", - DOMAIN, + return json.loads( + load_fixture( + "zwave_js/climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state.json" + ) ) @pytest.fixture(name="vision_security_zl7432_state", scope="package") -def vision_security_zl7432_state_fixture() -> dict[str, Any]: +def vision_security_zl7432_state_fixture(): """Load the vision security zl7432 switch node state fixture data.""" - return load_json_object_fixture("vision_security_zl7432_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/vision_security_zl7432_state.json")) @pytest.fixture(name="zen_31_state", scope="package") -def zem_31_state_fixture() -> dict[str, Any]: +def zem_31_state_fixture(): """Load the zen_31 node state fixture data.""" - return load_json_object_fixture("zen_31_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/zen_31_state.json")) @pytest.fixture(name="wallmote_central_scene_state", scope="package") -def wallmote_central_scene_state_fixture() -> dict[str, Any]: +def wallmote_central_scene_state_fixture(): """Load the wallmote central scene node state fixture data.""" - return load_json_object_fixture("wallmote_central_scene_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/wallmote_central_scene_state.json")) @pytest.fixture(name="ge_in_wall_dimmer_switch_state", scope="package") -def ge_in_wall_dimmer_switch_state_fixture() -> dict[str, Any]: +def ge_in_wall_dimmer_switch_state_fixture(): """Load the ge in-wall dimmer switch node state fixture data.""" - return load_json_object_fixture("ge_in_wall_dimmer_switch_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/ge_in_wall_dimmer_switch_state.json")) @pytest.fixture(name="aeotec_zw164_siren_state", scope="package") -def aeotec_zw164_siren_state_fixture() -> dict[str, Any]: +def aeotec_zw164_siren_state_fixture(): """Load the aeotec zw164 siren node state fixture data.""" - return load_json_object_fixture("aeotec_zw164_siren_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/aeotec_zw164_siren_state.json")) @pytest.fixture(name="lock_popp_electric_strike_lock_control_state", scope="package") -def lock_popp_electric_strike_lock_control_state_fixture() -> dict[str, Any]: +def lock_popp_electric_strike_lock_control_state_fixture(): """Load the popp electric strike lock control node state fixture data.""" - return load_json_object_fixture( - "lock_popp_electric_strike_lock_control_state.json", DOMAIN + return json.loads( + load_fixture("zwave_js/lock_popp_electric_strike_lock_control_state.json") ) @pytest.fixture(name="fortrezz_ssa1_siren_state", scope="package") -def fortrezz_ssa1_siren_state_fixture() -> dict[str, Any]: +def fortrezz_ssa1_siren_state_fixture(): """Load the fortrezz ssa1 siren node state fixture data.""" - return load_json_object_fixture("fortrezz_ssa1_siren_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/fortrezz_ssa1_siren_state.json")) @pytest.fixture(name="fortrezz_ssa3_siren_state", scope="package") -def fortrezz_ssa3_siren_state_fixture() -> dict[str, Any]: +def fortrezz_ssa3_siren_state_fixture(): """Load the fortrezz ssa3 siren node state fixture data.""" - return load_json_object_fixture("fortrezz_ssa3_siren_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/fortrezz_ssa3_siren_state.json")) @pytest.fixture(name="zp3111_not_ready_state", scope="package") -def zp3111_not_ready_state_fixture() -> dict[str, Any]: +def zp3111_not_ready_state_fixture(): """Load the zp3111 4-in-1 sensor not-ready node state fixture data.""" - return load_json_object_fixture("zp3111-5_not_ready_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/zp3111-5_not_ready_state.json")) @pytest.fixture(name="zp3111_state", scope="package") -def zp3111_state_fixture() -> dict[str, Any]: +def zp3111_state_fixture(): """Load the zp3111 4-in-1 sensor node state fixture data.""" - return load_json_object_fixture("zp3111-5_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/zp3111-5_state.json")) @pytest.fixture(name="express_controls_ezmultipli_state", scope="package") -def light_express_controls_ezmultipli_state_fixture() -> dict[str, Any]: +def light_express_controls_ezmultipli_state_fixture(): """Load the Express Controls EZMultiPli node state fixture data.""" - return load_json_object_fixture("express_controls_ezmultipli_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/express_controls_ezmultipli_state.json")) @pytest.fixture(name="lock_home_connect_620_state", scope="package") -def lock_home_connect_620_state_fixture() -> dict[str, Any]: +def lock_home_connect_620_state_fixture(): """Load the Home Connect 620 lock node state fixture data.""" - return load_json_object_fixture("lock_home_connect_620_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/lock_home_connect_620_state.json")) @pytest.fixture(name="switch_zooz_zen72_state", scope="package") -def switch_zooz_zen72_state_fixture() -> dict[str, Any]: +def switch_zooz_zen72_state_fixture(): """Load the Zooz Zen72 switch node state fixture data.""" - return load_json_object_fixture("switch_zooz_zen72_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/switch_zooz_zen72_state.json")) @pytest.fixture(name="indicator_test_state", scope="package") -def indicator_test_state_fixture() -> dict[str, Any]: +def indicator_test_state_fixture(): """Load the indicator CC test node state fixture data.""" - return load_json_object_fixture("indicator_test_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/indicator_test_state.json")) @pytest.fixture(name="energy_production_state", scope="package") -def energy_production_state_fixture() -> dict[str, Any]: +def energy_production_state_fixture(): """Load a mock node with energy production CC state fixture data.""" - return load_json_object_fixture("energy_production_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/energy_production_state.json")) @pytest.fixture(name="nice_ibt4zwave_state", scope="package") -def nice_ibt4zwave_state_fixture() -> dict[str, Any]: +def nice_ibt4zwave_state_fixture(): """Load a Nice IBT4ZWAVE cover node state fixture data.""" - return load_json_object_fixture("cover_nice_ibt4zwave_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/cover_nice_ibt4zwave_state.json")) @pytest.fixture(name="logic_group_zdb5100_state", scope="package") -def logic_group_zdb5100_state_fixture() -> dict[str, Any]: +def logic_group_zdb5100_state_fixture(): """Load the Logic Group ZDB5100 node state fixture data.""" - return load_json_object_fixture("logic_group_zdb5100_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/logic_group_zdb5100_state.json")) @pytest.fixture(name="central_scene_node_state", scope="package") -def central_scene_node_state_fixture() -> dict[str, Any]: +def central_scene_node_state_fixture(): """Load node with Central Scene CC node state fixture data.""" - return load_json_object_fixture("central_scene_node_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/central_scene_node_state.json")) @pytest.fixture(name="light_device_class_is_null_state", scope="package") -def light_device_class_is_null_state_fixture() -> dict[str, Any]: +def light_device_class_is_null_state_fixture(): """Load node with device class is None state fixture data.""" - return load_json_object_fixture("light_device_class_is_null_state.json", DOMAIN) + return json.loads(load_fixture("zwave_js/light_device_class_is_null_state.json")) @pytest.fixture(name="basic_cc_sensor_state", scope="package") -def basic_cc_sensor_state_fixture() -> dict[str, Any]: +def basic_cc_sensor_state_fixture(): """Load node with Basic CC sensor fixture data.""" - return load_json_object_fixture("basic_cc_sensor_state.json", DOMAIN) - - -@pytest.fixture(name="window_covering_outbound_bottom_state", scope="package") -def window_covering_outbound_bottom_state_fixture() -> dict[str, Any]: - """Load node with Window Covering CC fixture data, with only the outbound bottom position supported.""" - return load_json_object_fixture("window_covering_outbound_bottom.json", DOMAIN) - - -@pytest.fixture(name="siren_neo_coolcam_state") -def siren_neo_coolcam_state_state_fixture() -> NodeDataType: - """Load node with siren_neo_coolcam_state fixture data.""" - return cast( - NodeDataType, - load_json_object_fixture("siren_neo_coolcam_nas-ab01z_state.json", DOMAIN), - ) - - -@pytest.fixture(name="aeotec_smart_switch_7_state") -def aeotec_smart_switch_7_state_fixture() -> NodeDataType: - """Load node with fixture data for Aeotec Smart Switch 7.""" - return cast( - NodeDataType, - load_json_object_fixture("aeotec_smart_switch_7_state.json", DOMAIN), - ) + return json.loads(load_fixture("zwave_js/basic_cc_sensor_state.json")) # model fixtures @@ -568,7 +754,7 @@ def mock_client_fixture( @pytest.fixture(name="multisensor_6") -def multisensor_6_fixture(client, multisensor_6_state) -> Node: +def multisensor_6_fixture(client, multisensor_6_state): """Mock a multisensor 6 node.""" node = Node(client, copy.deepcopy(multisensor_6_state)) client.driver.controller.nodes[node.node_id] = node @@ -576,7 +762,7 @@ def multisensor_6_fixture(client, multisensor_6_state) -> Node: @pytest.fixture(name="ecolink_door_sensor") -def legacy_binary_sensor_fixture(client, ecolink_door_sensor_state) -> Node: +def legacy_binary_sensor_fixture(client, ecolink_door_sensor_state): """Mock a legacy_binary_sensor node.""" node = Node(client, copy.deepcopy(ecolink_door_sensor_state)) client.driver.controller.nodes[node.node_id] = node @@ -584,7 +770,7 @@ def legacy_binary_sensor_fixture(client, ecolink_door_sensor_state) -> Node: @pytest.fixture(name="hank_binary_switch") -def hank_binary_switch_fixture(client, hank_binary_switch_state) -> Node: +def hank_binary_switch_fixture(client, hank_binary_switch_state): """Mock a binary switch node.""" node = Node(client, copy.deepcopy(hank_binary_switch_state)) client.driver.controller.nodes[node.node_id] = node @@ -592,7 +778,7 @@ def hank_binary_switch_fixture(client, hank_binary_switch_state) -> Node: @pytest.fixture(name="bulb_6_multi_color") -def bulb_6_multi_color_fixture(client, bulb_6_multi_color_state) -> Node: +def bulb_6_multi_color_fixture(client, bulb_6_multi_color_state): """Mock a bulb 6 multi-color node.""" node = Node(client, copy.deepcopy(bulb_6_multi_color_state)) client.driver.controller.nodes[node.node_id] = node @@ -600,7 +786,7 @@ def bulb_6_multi_color_fixture(client, bulb_6_multi_color_state) -> Node: @pytest.fixture(name="light_color_null_values") -def light_color_null_values_fixture(client, light_color_null_values_state) -> Node: +def light_color_null_values_fixture(client, light_color_null_values_state): """Mock a node with current color value item being null.""" node = Node(client, copy.deepcopy(light_color_null_values_state)) client.driver.controller.nodes[node.node_id] = node @@ -608,7 +794,7 @@ def light_color_null_values_fixture(client, light_color_null_values_state) -> No @pytest.fixture(name="eaton_rf9640_dimmer") -def eaton_rf9640_dimmer_fixture(client, eaton_rf9640_dimmer_state) -> Node: +def eaton_rf9640_dimmer_fixture(client, eaton_rf9640_dimmer_state): """Mock a Eaton RF9640 (V4 compatible) dimmer node.""" node = Node(client, copy.deepcopy(eaton_rf9640_dimmer_state)) client.driver.controller.nodes[node.node_id] = node @@ -616,7 +802,7 @@ def eaton_rf9640_dimmer_fixture(client, eaton_rf9640_dimmer_state) -> Node: @pytest.fixture(name="lock_schlage_be469") -def lock_schlage_be469_fixture(client, lock_schlage_be469_state) -> Node: +def lock_schlage_be469_fixture(client, lock_schlage_be469_state): """Mock a schlage lock node.""" node = Node(client, copy.deepcopy(lock_schlage_be469_state)) client.driver.controller.nodes[node.node_id] = node @@ -624,7 +810,7 @@ def lock_schlage_be469_fixture(client, lock_schlage_be469_state) -> Node: @pytest.fixture(name="lock_august_pro") -def lock_august_asl03_fixture(client, lock_august_asl03_state) -> Node: +def lock_august_asl03_fixture(client, lock_august_asl03_state): """Mock a August Pro lock node.""" node = Node(client, copy.deepcopy(lock_august_asl03_state)) client.driver.controller.nodes[node.node_id] = node @@ -634,7 +820,7 @@ def lock_august_asl03_fixture(client, lock_august_asl03_state) -> Node: @pytest.fixture(name="climate_radio_thermostat_ct100_plus") def climate_radio_thermostat_ct100_plus_fixture( client, climate_radio_thermostat_ct100_plus_state -) -> Node: +): """Mock a climate radio thermostat ct100 plus node.""" node = Node(client, copy.deepcopy(climate_radio_thermostat_ct100_plus_state)) client.driver.controller.nodes[node.node_id] = node @@ -644,7 +830,7 @@ def climate_radio_thermostat_ct100_plus_fixture( @pytest.fixture(name="climate_radio_thermostat_ct100_plus_different_endpoints") def climate_radio_thermostat_ct100_plus_different_endpoints_fixture( client, climate_radio_thermostat_ct100_plus_different_endpoints_state -) -> Node: +): """Mock climate radio thermostat ct100 plus node w/ values on diff endpoints.""" node = Node( client, @@ -655,7 +841,7 @@ def climate_radio_thermostat_ct100_plus_different_endpoints_fixture( @pytest.fixture(name="climate_adc_t3000") -def climate_adc_t3000_fixture(client, climate_adc_t3000_state) -> Node: +def climate_adc_t3000_fixture(client, climate_adc_t3000_state): """Mock a climate ADC-T3000 node.""" node = Node(client, copy.deepcopy(climate_adc_t3000_state)) client.driver.controller.nodes[node.node_id] = node @@ -663,7 +849,7 @@ def climate_adc_t3000_fixture(client, climate_adc_t3000_state) -> Node: @pytest.fixture(name="climate_adc_t3000_missing_setpoint") -def climate_adc_t3000_missing_setpoint_fixture(client, climate_adc_t3000_state) -> Node: +def climate_adc_t3000_missing_setpoint_fixture(client, climate_adc_t3000_state): """Mock a climate ADC-T3000 node with missing de-humidify setpoint.""" data = copy.deepcopy(climate_adc_t3000_state) data["name"] = f"{data['name']} missing setpoint" @@ -679,7 +865,7 @@ def climate_adc_t3000_missing_setpoint_fixture(client, climate_adc_t3000_state) @pytest.fixture(name="climate_adc_t3000_missing_mode") -def climate_adc_t3000_missing_mode_fixture(client, climate_adc_t3000_state) -> Node: +def climate_adc_t3000_missing_mode_fixture(client, climate_adc_t3000_state): """Mock a climate ADC-T3000 node with missing mode setpoint.""" data = copy.deepcopy(climate_adc_t3000_state) data["name"] = f"{data['name']} missing mode" @@ -695,9 +881,7 @@ def climate_adc_t3000_missing_mode_fixture(client, climate_adc_t3000_state) -> N @pytest.fixture(name="climate_adc_t3000_missing_fan_mode_states") -def climate_adc_t3000_missing_fan_mode_states_fixture( - client, climate_adc_t3000_state -) -> Node: +def climate_adc_t3000_missing_fan_mode_states_fixture(client, climate_adc_t3000_state): """Mock ADC-T3000 node w/ missing 'states' metadata on Thermostat Fan Mode.""" data = copy.deepcopy(climate_adc_t3000_state) data["name"] = f"{data['name']} missing fan mode states" @@ -723,7 +907,7 @@ def climate_airzone_aidoo_control_hvac_unit_fixture( @pytest.fixture(name="climate_danfoss_lc_13") -def climate_danfoss_lc_13_fixture(client, climate_danfoss_lc_13_state) -> Node: +def climate_danfoss_lc_13_fixture(client, climate_danfoss_lc_13_state): """Mock a climate radio danfoss LC-13 node.""" node = Node(client, copy.deepcopy(climate_danfoss_lc_13_state)) client.driver.controller.nodes[node.node_id] = node @@ -731,9 +915,7 @@ def climate_danfoss_lc_13_fixture(client, climate_danfoss_lc_13_state) -> Node: @pytest.fixture(name="climate_eurotronic_spirit_z") -def climate_eurotronic_spirit_z_fixture( - client, climate_eurotronic_spirit_z_state -) -> Node: +def climate_eurotronic_spirit_z_fixture(client, climate_eurotronic_spirit_z_state): """Mock a climate radio danfoss LC-13 node.""" node = Node(client, climate_eurotronic_spirit_z_state) client.driver.controller.nodes[node.node_id] = node @@ -741,7 +923,7 @@ def climate_eurotronic_spirit_z_fixture( @pytest.fixture(name="climate_heatit_z_trm6") -def climate_heatit_z_trm6_fixture(client, climate_heatit_z_trm6_state) -> Node: +def climate_heatit_z_trm6_fixture(client, climate_heatit_z_trm6_state): """Mock a climate radio HEATIT Z-TRM6 node.""" node = Node(client, copy.deepcopy(climate_heatit_z_trm6_state)) client.driver.controller.nodes[node.node_id] = node @@ -751,7 +933,7 @@ def climate_heatit_z_trm6_fixture(client, climate_heatit_z_trm6_state) -> Node: @pytest.fixture(name="climate_heatit_z_trm3_no_value") def climate_heatit_z_trm3_no_value_fixture( client, climate_heatit_z_trm3_no_value_state -) -> Node: +): """Mock a climate radio HEATIT Z-TRM3 node.""" node = Node(client, copy.deepcopy(climate_heatit_z_trm3_no_value_state)) client.driver.controller.nodes[node.node_id] = node @@ -759,7 +941,7 @@ def climate_heatit_z_trm3_no_value_fixture( @pytest.fixture(name="climate_heatit_z_trm3") -def climate_heatit_z_trm3_fixture(client, climate_heatit_z_trm3_state) -> Node: +def climate_heatit_z_trm3_fixture(client, climate_heatit_z_trm3_state): """Mock a climate radio HEATIT Z-TRM3 node.""" node = Node(client, copy.deepcopy(climate_heatit_z_trm3_state)) client.driver.controller.nodes[node.node_id] = node @@ -767,7 +949,7 @@ def climate_heatit_z_trm3_fixture(client, climate_heatit_z_trm3_state) -> Node: @pytest.fixture(name="climate_heatit_z_trm2fx") -def climate_heatit_z_trm2fx_fixture(client, climate_heatit_z_trm2fx_state) -> Node: +def climate_heatit_z_trm2fx_fixture(client, climate_heatit_z_trm2fx_state): """Mock a climate radio HEATIT Z-TRM2fx node.""" node = Node(client, copy.deepcopy(climate_heatit_z_trm2fx_state)) client.driver.controller.nodes[node.node_id] = node @@ -775,7 +957,7 @@ def climate_heatit_z_trm2fx_fixture(client, climate_heatit_z_trm2fx_state) -> No @pytest.fixture(name="nortek_thermostat") -def nortek_thermostat_fixture(client, nortek_thermostat_state) -> Node: +def nortek_thermostat_fixture(client, nortek_thermostat_state): """Mock a nortek thermostat node.""" node = Node(client, copy.deepcopy(nortek_thermostat_state)) client.driver.controller.nodes[node.node_id] = node @@ -783,7 +965,7 @@ def nortek_thermostat_fixture(client, nortek_thermostat_state) -> Node: @pytest.fixture(name="srt321_hrt4_zw") -def srt321_hrt4_zw_fixture(client, srt321_hrt4_zw_state) -> Node: +def srt321_hrt4_zw_fixture(client, srt321_hrt4_zw_state): """Mock a HRT4-ZW / SRT321 / SRT322 thermostat node.""" node = Node(client, copy.deepcopy(srt321_hrt4_zw_state)) client.driver.controller.nodes[node.node_id] = node @@ -791,9 +973,7 @@ def srt321_hrt4_zw_fixture(client, srt321_hrt4_zw_state) -> Node: @pytest.fixture(name="aeotec_radiator_thermostat") -def aeotec_radiator_thermostat_fixture( - client, aeotec_radiator_thermostat_state -) -> Node: +def aeotec_radiator_thermostat_fixture(client, aeotec_radiator_thermostat_state): """Mock a Aeotec thermostat node.""" node = Node(client, aeotec_radiator_thermostat_state) client.driver.controller.nodes[node.node_id] = node @@ -801,23 +981,23 @@ def aeotec_radiator_thermostat_fixture( @pytest.fixture(name="nortek_thermostat_added_event") -def nortek_thermostat_added_event_fixture(client) -> Node: +def nortek_thermostat_added_event_fixture(client): """Mock a Nortek thermostat node added event.""" - event_data = load_json_object_fixture("nortek_thermostat_added_event.json", DOMAIN) + event_data = json.loads(load_fixture("zwave_js/nortek_thermostat_added_event.json")) return Event("node added", event_data) @pytest.fixture(name="nortek_thermostat_removed_event") -def nortek_thermostat_removed_event_fixture(client) -> Node: +def nortek_thermostat_removed_event_fixture(client): """Mock a Nortek thermostat node removed event.""" - event_data = load_json_object_fixture( - "nortek_thermostat_removed_event.json", DOMAIN + event_data = json.loads( + load_fixture("zwave_js/nortek_thermostat_removed_event.json") ) return Event("node removed", event_data) @pytest.fixture(name="integration") -async def integration_fixture(hass: HomeAssistant, client) -> MockConfigEntry: +async def integration_fixture(hass: HomeAssistant, client): """Set up the zwave_js integration.""" entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) entry.add_to_hass(hass) @@ -830,7 +1010,7 @@ async def integration_fixture(hass: HomeAssistant, client) -> MockConfigEntry: @pytest.fixture(name="chain_actuator_zws12") -def window_cover_fixture(client, chain_actuator_zws12_state) -> Node: +def window_cover_fixture(client, chain_actuator_zws12_state): """Mock a window cover node.""" node = Node(client, copy.deepcopy(chain_actuator_zws12_state)) client.driver.controller.nodes[node.node_id] = node @@ -838,7 +1018,7 @@ def window_cover_fixture(client, chain_actuator_zws12_state) -> Node: @pytest.fixture(name="fan_generic") -def fan_generic_fixture(client, fan_generic_state) -> Node: +def fan_generic_fixture(client, fan_generic_state): """Mock a fan node.""" node = Node(client, copy.deepcopy(fan_generic_state)) client.driver.controller.nodes[node.node_id] = node @@ -846,7 +1026,7 @@ def fan_generic_fixture(client, fan_generic_state) -> Node: @pytest.fixture(name="hs_fc200") -def hs_fc200_fixture(client, hs_fc200_state) -> Node: +def hs_fc200_fixture(client, hs_fc200_state): """Mock a fan node.""" node = Node(client, copy.deepcopy(hs_fc200_state)) client.driver.controller.nodes[node.node_id] = node @@ -854,7 +1034,7 @@ def hs_fc200_fixture(client, hs_fc200_state) -> Node: @pytest.fixture(name="leviton_zw4sf") -def leviton_zw4sf_fixture(client, leviton_zw4sf_state) -> Node: +def leviton_zw4sf_fixture(client, leviton_zw4sf_state): """Mock a fan node.""" node = Node(client, copy.deepcopy(leviton_zw4sf_state)) client.driver.controller.nodes[node.node_id] = node @@ -862,7 +1042,7 @@ def leviton_zw4sf_fixture(client, leviton_zw4sf_state) -> Node: @pytest.fixture(name="fan_honeywell_39358") -def fan_honeywell_39358_fixture(client, fan_honeywell_39358_state) -> Node: +def fan_honeywell_39358_fixture(client, fan_honeywell_39358_state): """Mock a fan node.""" node = Node(client, copy.deepcopy(fan_honeywell_39358_state)) client.driver.controller.nodes[node.node_id] = node @@ -870,7 +1050,7 @@ def fan_honeywell_39358_fixture(client, fan_honeywell_39358_state) -> Node: @pytest.fixture(name="null_name_check") -def null_name_check_fixture(client, null_name_check_state) -> Node: +def null_name_check_fixture(client, null_name_check_state): """Mock a node with no name.""" node = Node(client, copy.deepcopy(null_name_check_state)) client.driver.controller.nodes[node.node_id] = node @@ -878,7 +1058,7 @@ def null_name_check_fixture(client, null_name_check_state) -> Node: @pytest.fixture(name="gdc_zw062") -def motorized_barrier_cover_fixture(client, gdc_zw062_state) -> Node: +def motorized_barrier_cover_fixture(client, gdc_zw062_state): """Mock a motorized barrier node.""" node = Node(client, copy.deepcopy(gdc_zw062_state)) client.driver.controller.nodes[node.node_id] = node @@ -886,7 +1066,7 @@ def motorized_barrier_cover_fixture(client, gdc_zw062_state) -> Node: @pytest.fixture(name="iblinds_v2") -def iblinds_v2_cover_fixture(client, iblinds_v2_state) -> Node: +def iblinds_v2_cover_fixture(client, iblinds_v2_state): """Mock an iBlinds v2.0 window cover node.""" node = Node(client, copy.deepcopy(iblinds_v2_state)) client.driver.controller.nodes[node.node_id] = node @@ -894,23 +1074,15 @@ def iblinds_v2_cover_fixture(client, iblinds_v2_state) -> Node: @pytest.fixture(name="iblinds_v3") -def iblinds_v3_cover_fixture(client, iblinds_v3_state) -> Node: +def iblinds_v3_cover_fixture(client, iblinds_v3_state): """Mock an iBlinds v3 window cover node.""" node = Node(client, copy.deepcopy(iblinds_v3_state)) client.driver.controller.nodes[node.node_id] = node return node -@pytest.fixture(name="zvidar") -def zvidar_cover_fixture(client, zvidar_state) -> Node: - """Mock a ZVIDAR window cover node.""" - node = Node(client, copy.deepcopy(zvidar_state)) - client.driver.controller.nodes[node.node_id] = node - return node - - @pytest.fixture(name="qubino_shutter") -def qubino_shutter_cover_fixture(client, qubino_shutter_state) -> Node: +def qubino_shutter_cover_fixture(client, qubino_shutter_state): """Mock a Qubino flush shutter node.""" node = Node(client, copy.deepcopy(qubino_shutter_state)) client.driver.controller.nodes[node.node_id] = node @@ -918,7 +1090,7 @@ def qubino_shutter_cover_fixture(client, qubino_shutter_state) -> Node: @pytest.fixture(name="aeotec_nano_shutter") -def aeotec_nano_shutter_cover_fixture(client, aeotec_nano_shutter_state) -> Node: +def aeotec_nano_shutter_cover_fixture(client, aeotec_nano_shutter_state): """Mock a Aeotec Nano Shutter node.""" node = Node(client, copy.deepcopy(aeotec_nano_shutter_state)) client.driver.controller.nodes[node.node_id] = node @@ -926,7 +1098,7 @@ def aeotec_nano_shutter_cover_fixture(client, aeotec_nano_shutter_state) -> Node @pytest.fixture(name="fibaro_fgr222_shutter") -def fibaro_fgr222_shutter_cover_fixture(client, fibaro_fgr222_shutter_state) -> Node: +def fibaro_fgr222_shutter_cover_fixture(client, fibaro_fgr222_shutter_state): """Mock a Fibaro FGR222 Shutter node.""" node = Node(client, copy.deepcopy(fibaro_fgr222_shutter_state)) client.driver.controller.nodes[node.node_id] = node @@ -934,7 +1106,7 @@ def fibaro_fgr222_shutter_cover_fixture(client, fibaro_fgr222_shutter_state) -> @pytest.fixture(name="fibaro_fgr223_shutter") -def fibaro_fgr223_shutter_cover_fixture(client, fibaro_fgr223_shutter_state) -> Node: +def fibaro_fgr223_shutter_cover_fixture(client, fibaro_fgr223_shutter_state): """Mock a Fibaro FGR223 Shutter node.""" node = Node(client, copy.deepcopy(fibaro_fgr223_shutter_state)) client.driver.controller.nodes[node.node_id] = node @@ -944,7 +1116,7 @@ def fibaro_fgr223_shutter_cover_fixture(client, fibaro_fgr223_shutter_state) -> @pytest.fixture(name="shelly_qnsh_001P10_shutter") def shelly_qnsh_001P10_cover_shutter_fixture( client, shelly_europe_ltd_qnsh_001p10_state -) -> Node: +): """Mock a Shelly QNSH 001P10 Shutter node.""" node = Node(client, copy.deepcopy(shelly_europe_ltd_qnsh_001p10_state)) client.driver.controller.nodes[node.node_id] = node @@ -952,7 +1124,7 @@ def shelly_qnsh_001P10_cover_shutter_fixture( @pytest.fixture(name="merten_507801") -def merten_507801_cover_fixture(client, merten_507801_state) -> Node: +def merten_507801_cover_fixture(client, merten_507801_state): """Mock a Merten 507801 Shutter node.""" node = Node(client, copy.deepcopy(merten_507801_state)) client.driver.controller.nodes[node.node_id] = node @@ -960,7 +1132,7 @@ def merten_507801_cover_fixture(client, merten_507801_state) -> Node: @pytest.fixture(name="aeon_smart_switch_6") -def aeon_smart_switch_6_fixture(client, aeon_smart_switch_6_state) -> Node: +def aeon_smart_switch_6_fixture(client, aeon_smart_switch_6_state): """Mock an AEON Labs (ZW096) Smart Switch 6 node.""" node = Node(client, aeon_smart_switch_6_state) client.driver.controller.nodes[node.node_id] = node @@ -968,7 +1140,7 @@ def aeon_smart_switch_6_fixture(client, aeon_smart_switch_6_state) -> Node: @pytest.fixture(name="ge_12730") -def ge_12730_fixture(client, ge_12730_state) -> Node: +def ge_12730_fixture(client, ge_12730_state): """Mock a GE 12730 fan controller node.""" node = Node(client, copy.deepcopy(ge_12730_state)) client.driver.controller.nodes[node.node_id] = node @@ -976,7 +1148,7 @@ def ge_12730_fixture(client, ge_12730_state) -> Node: @pytest.fixture(name="inovelli_lzw36") -def inovelli_lzw36_fixture(client, inovelli_lzw36_state) -> Node: +def inovelli_lzw36_fixture(client, inovelli_lzw36_state): """Mock a Inovelli LZW36 fan controller node.""" node = Node(client, copy.deepcopy(inovelli_lzw36_state)) client.driver.controller.nodes[node.node_id] = node @@ -984,7 +1156,7 @@ def inovelli_lzw36_fixture(client, inovelli_lzw36_state) -> Node: @pytest.fixture(name="lock_id_lock_as_id150") -def lock_id_lock_as_id150_fixture(client, lock_id_lock_as_id150_state) -> Node: +def lock_id_lock_as_id150(client, lock_id_lock_as_id150_state): """Mock an id lock id-150 lock node.""" node = Node(client, copy.deepcopy(lock_id_lock_as_id150_state)) client.driver.controller.nodes[node.node_id] = node @@ -992,7 +1164,7 @@ def lock_id_lock_as_id150_fixture(client, lock_id_lock_as_id150_state) -> Node: @pytest.fixture(name="lock_id_lock_as_id150_not_ready") -def node_not_ready_fixture(client, lock_id_lock_as_id150_state) -> Node: +def node_not_ready(client, lock_id_lock_as_id150_state): """Mock an id lock id-150 lock node that's not ready.""" state = copy.deepcopy(lock_id_lock_as_id150_state) state["ready"] = False @@ -1004,7 +1176,7 @@ def node_not_ready_fixture(client, lock_id_lock_as_id150_state) -> Node: @pytest.fixture(name="climate_radio_thermostat_ct101_multiple_temp_units") def climate_radio_thermostat_ct101_multiple_temp_units_fixture( client, climate_radio_thermostat_ct101_multiple_temp_units_state -) -> Node: +): """Mock a climate device with multiple temp units node.""" node = Node( client, copy.deepcopy(climate_radio_thermostat_ct101_multiple_temp_units_state) @@ -1019,7 +1191,7 @@ def climate_radio_thermostat_ct101_multiple_temp_units_fixture( def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_fixture( client, climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state, -) -> Node: +): """Mock a climate device with mode and setpoint on differenet endpoints node.""" node = Node( client, @@ -1032,7 +1204,7 @@ def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_fixt @pytest.fixture(name="vision_security_zl7432") -def vision_security_zl7432_fixture(client, vision_security_zl7432_state) -> Node: +def vision_security_zl7432_fixture(client, vision_security_zl7432_state): """Mock a vision security zl7432 node.""" node = Node(client, copy.deepcopy(vision_security_zl7432_state)) client.driver.controller.nodes[node.node_id] = node @@ -1040,7 +1212,7 @@ def vision_security_zl7432_fixture(client, vision_security_zl7432_state) -> Node @pytest.fixture(name="zen_31") -def zen_31_fixture(client, zen_31_state) -> Node: +def zen_31_fixture(client, zen_31_state): """Mock a bulb 6 multi-color node.""" node = Node(client, copy.deepcopy(zen_31_state)) client.driver.controller.nodes[node.node_id] = node @@ -1048,7 +1220,7 @@ def zen_31_fixture(client, zen_31_state) -> Node: @pytest.fixture(name="wallmote_central_scene") -def wallmote_central_scene_fixture(client, wallmote_central_scene_state) -> Node: +def wallmote_central_scene_fixture(client, wallmote_central_scene_state): """Mock a wallmote central scene node.""" node = Node(client, copy.deepcopy(wallmote_central_scene_state)) client.driver.controller.nodes[node.node_id] = node @@ -1056,7 +1228,7 @@ def wallmote_central_scene_fixture(client, wallmote_central_scene_state) -> Node @pytest.fixture(name="ge_in_wall_dimmer_switch") -def ge_in_wall_dimmer_switch_fixture(client, ge_in_wall_dimmer_switch_state) -> Node: +def ge_in_wall_dimmer_switch_fixture(client, ge_in_wall_dimmer_switch_state): """Mock a ge in-wall dimmer switch scene node.""" node = Node(client, copy.deepcopy(ge_in_wall_dimmer_switch_state)) client.driver.controller.nodes[node.node_id] = node @@ -1064,7 +1236,7 @@ def ge_in_wall_dimmer_switch_fixture(client, ge_in_wall_dimmer_switch_state) -> @pytest.fixture(name="aeotec_zw164_siren") -def aeotec_zw164_siren_fixture(client, aeotec_zw164_siren_state) -> Node: +def aeotec_zw164_siren_fixture(client, aeotec_zw164_siren_state): """Mock a aeotec zw164 siren node.""" node = Node(client, copy.deepcopy(aeotec_zw164_siren_state)) client.driver.controller.nodes[node.node_id] = node @@ -1074,7 +1246,7 @@ def aeotec_zw164_siren_fixture(client, aeotec_zw164_siren_state) -> Node: @pytest.fixture(name="lock_popp_electric_strike_lock_control") def lock_popp_electric_strike_lock_control_fixture( client, lock_popp_electric_strike_lock_control_state -) -> Node: +): """Mock a popp electric strike lock control node.""" node = Node(client, copy.deepcopy(lock_popp_electric_strike_lock_control_state)) client.driver.controller.nodes[node.node_id] = node @@ -1082,7 +1254,7 @@ def lock_popp_electric_strike_lock_control_fixture( @pytest.fixture(name="fortrezz_ssa1_siren") -def fortrezz_ssa1_siren_fixture(client, fortrezz_ssa1_siren_state) -> Node: +def fortrezz_ssa1_siren_fixture(client, fortrezz_ssa1_siren_state): """Mock a fortrezz ssa1 siren node.""" node = Node(client, copy.deepcopy(fortrezz_ssa1_siren_state)) client.driver.controller.nodes[node.node_id] = node @@ -1090,7 +1262,7 @@ def fortrezz_ssa1_siren_fixture(client, fortrezz_ssa1_siren_state) -> Node: @pytest.fixture(name="fortrezz_ssa3_siren") -def fortrezz_ssa3_siren_fixture(client, fortrezz_ssa3_siren_state) -> Node: +def fortrezz_ssa3_siren_fixture(client, fortrezz_ssa3_siren_state): """Mock a fortrezz ssa3 siren node.""" node = Node(client, copy.deepcopy(fortrezz_ssa3_siren_state)) client.driver.controller.nodes[node.node_id] = node @@ -1098,13 +1270,13 @@ def fortrezz_ssa3_siren_fixture(client, fortrezz_ssa3_siren_state) -> Node: @pytest.fixture(name="firmware_file") -def firmware_file_fixture() -> io.BytesIO: +def firmware_file_fixture(): """Return mock firmware file stream.""" return io.BytesIO(bytes(10)) @pytest.fixture(name="zp3111_not_ready") -def zp3111_not_ready_fixture(client, zp3111_not_ready_state) -> Node: +def zp3111_not_ready_fixture(client, zp3111_not_ready_state): """Mock a zp3111 4-in-1 sensor node in a not-ready state.""" node = Node(client, copy.deepcopy(zp3111_not_ready_state)) client.driver.controller.nodes[node.node_id] = node @@ -1112,7 +1284,7 @@ def zp3111_not_ready_fixture(client, zp3111_not_ready_state) -> Node: @pytest.fixture(name="zp3111") -def zp3111_fixture(client, zp3111_state) -> Node: +def zp3111_fixture(client, zp3111_state): """Mock a zp3111 4-in-1 sensor node.""" node = Node(client, copy.deepcopy(zp3111_state)) client.driver.controller.nodes[node.node_id] = node @@ -1120,9 +1292,7 @@ def zp3111_fixture(client, zp3111_state) -> Node: @pytest.fixture(name="express_controls_ezmultipli") -def express_controls_ezmultipli_fixture( - client, express_controls_ezmultipli_state -) -> Node: +def express_controls_ezmultipli_fixture(client, express_controls_ezmultipli_state): """Mock a Express Controls EZMultiPli node.""" node = Node(client, copy.deepcopy(express_controls_ezmultipli_state)) client.driver.controller.nodes[node.node_id] = node @@ -1130,7 +1300,7 @@ def express_controls_ezmultipli_fixture( @pytest.fixture(name="lock_home_connect_620") -def lock_home_connect_620_fixture(client, lock_home_connect_620_state) -> Node: +def lock_home_connect_620_fixture(client, lock_home_connect_620_state): """Mock a Home Connect 620 lock node.""" node = Node(client, copy.deepcopy(lock_home_connect_620_state)) client.driver.controller.nodes[node.node_id] = node @@ -1138,7 +1308,7 @@ def lock_home_connect_620_fixture(client, lock_home_connect_620_state) -> Node: @pytest.fixture(name="switch_zooz_zen72") -def switch_zooz_zen72_fixture(client, switch_zooz_zen72_state) -> Node: +def switch_zooz_zen72_fixture(client, switch_zooz_zen72_state): """Mock a Zooz Zen72 switch node.""" node = Node(client, copy.deepcopy(switch_zooz_zen72_state)) client.driver.controller.nodes[node.node_id] = node @@ -1146,7 +1316,7 @@ def switch_zooz_zen72_fixture(client, switch_zooz_zen72_state) -> Node: @pytest.fixture(name="indicator_test") -def indicator_test_fixture(client, indicator_test_state) -> Node: +def indicator_test_fixture(client, indicator_test_state): """Mock a indicator CC test node.""" node = Node(client, copy.deepcopy(indicator_test_state)) client.driver.controller.nodes[node.node_id] = node @@ -1154,7 +1324,7 @@ def indicator_test_fixture(client, indicator_test_state) -> Node: @pytest.fixture(name="energy_production") -def energy_production_fixture(client, energy_production_state) -> Node: +def energy_production_fixture(client, energy_production_state): """Mock a mock node with Energy Production CC.""" node = Node(client, copy.deepcopy(energy_production_state)) client.driver.controller.nodes[node.node_id] = node @@ -1162,7 +1332,7 @@ def energy_production_fixture(client, energy_production_state) -> Node: @pytest.fixture(name="nice_ibt4zwave") -def nice_ibt4zwave_fixture(client, nice_ibt4zwave_state) -> Node: +def nice_ibt4zwave_fixture(client, nice_ibt4zwave_state): """Mock a Nice IBT4ZWAVE cover node.""" node = Node(client, copy.deepcopy(nice_ibt4zwave_state)) client.driver.controller.nodes[node.node_id] = node @@ -1170,7 +1340,7 @@ def nice_ibt4zwave_fixture(client, nice_ibt4zwave_state) -> Node: @pytest.fixture(name="logic_group_zdb5100") -def logic_group_zdb5100_fixture(client, logic_group_zdb5100_state) -> Node: +def logic_group_zdb5100_fixture(client, logic_group_zdb5100_state): """Mock a ZDB5100 light node.""" node = Node(client, copy.deepcopy(logic_group_zdb5100_state)) client.driver.controller.nodes[node.node_id] = node @@ -1178,7 +1348,7 @@ def logic_group_zdb5100_fixture(client, logic_group_zdb5100_state) -> Node: @pytest.fixture(name="central_scene_node") -def central_scene_node_fixture(client, central_scene_node_state) -> Node: +def central_scene_node_fixture(client, central_scene_node_state): """Mock a node with the Central Scene CC.""" node = Node(client, copy.deepcopy(central_scene_node_state)) client.driver.controller.nodes[node.node_id] = node @@ -1186,9 +1356,7 @@ def central_scene_node_fixture(client, central_scene_node_state) -> Node: @pytest.fixture(name="light_device_class_is_null") -def light_device_class_is_null_fixture( - client, light_device_class_is_null_state -) -> Node: +def light_device_class_is_null_fixture(client, light_device_class_is_null_state): """Mock a node when device class is null.""" node = Node(client, copy.deepcopy(light_device_class_is_null_state)) client.driver.controller.nodes[node.node_id] = node @@ -1196,38 +1364,8 @@ def light_device_class_is_null_fixture( @pytest.fixture(name="basic_cc_sensor") -def basic_cc_sensor_fixture(client, basic_cc_sensor_state) -> Node: +def basic_cc_sensor_fixture(client, basic_cc_sensor_state): """Mock a node with a Basic CC.""" node = Node(client, copy.deepcopy(basic_cc_sensor_state)) client.driver.controller.nodes[node.node_id] = node return node - - -@pytest.fixture(name="window_covering_outbound_bottom") -def window_covering_outbound_bottom_fixture( - client, window_covering_outbound_bottom_state -) -> Node: - """Load node with Window Covering CC fixture data, with only the outbound bottom position supported.""" - node = Node(client, copy.deepcopy(window_covering_outbound_bottom_state)) - client.driver.controller.nodes[node.node_id] = node - return node - - -@pytest.fixture(name="siren_neo_coolcam") -def siren_neo_coolcam_fixture( - client: MagicMock, siren_neo_coolcam_state: NodeDataType -) -> Node: - """Load node for neo coolcam siren.""" - node = Node(client, siren_neo_coolcam_state) - client.driver.controller.nodes[node.node_id] = node - return node - - -@pytest.fixture(name="aeotec_smart_switch_7") -def aeotec_smart_switch_7_fixture( - client: MagicMock, aeotec_smart_switch_7_state: NodeDataType -) -> Node: - """Load node for Aeotec Smart Switch 7.""" - node = Node(client, aeotec_smart_switch_7_state) - client.driver.controller.nodes[node.node_id] = node - return node diff --git a/tests/components/zwave_js/fixtures/aeotec_smart_switch_7_state.json b/tests/components/zwave_js/fixtures/aeotec_smart_switch_7_state.json deleted file mode 100644 index ea7bbe8b16c..00000000000 --- a/tests/components/zwave_js/fixtures/aeotec_smart_switch_7_state.json +++ /dev/null @@ -1,1863 +0,0 @@ -{ - "nodeId": 9, - "index": 0, - "installerIcon": 1792, - "userIcon": 1792, - "status": 4, - "ready": true, - "isListening": true, - "isRouting": true, - "isSecure": true, - "manufacturerId": 881, - "productId": 175, - "productType": 3, - "firmwareVersion": "1.3", - "zwavePlusVersion": 1, - "deviceConfig": { - "filename": "/data/db/devices/0x0371/zw175.json", - "isEmbedded": true, - "manufacturer": "Aeotec Ltd.", - "manufacturerId": 881, - "label": "ZW175", - "description": "Smart Switch 7", - "devices": [ - { - "productType": 3, - "productId": 175 - } - ], - "firmwareVersion": { - "min": "0.0", - "max": "255.255" - }, - "preferred": false, - "associations": {}, - "paramInformation": { - "_map": {} - }, - "metadata": { - "inclusion": "This product supports Security 2 Command Class. While a Security S2 enabled Controller is needed in order to fully use the security feature. This product can be included and operated in any Z-Wave network with other Z-Wave certified devices from other manufacturers and/or other applications. All non-battery operated nodes within the network will act as repeaters regardless of vendor to increase reliability of the network.\n\n(1) SmartStart Learn Mode\nSmartStart enabled products can be added into a Z-Wave network by scanning the Z-Wave QR Code present on the product with a controller providing SmartStart inclusion. No further action is required and the SmartStart product will be added automatically within 10 minutes of being switched on in the network vicinity.\nIndicator Light will become flash white light for 1s indicating the product has been powered, and then become flash blue light indicating SmartStart Learn Mode starts. It will become constantly bright yellow light after being assigned a NodeID.\nIf Adding succeeds, it will bright blue light for 2s and become Load Indicator Mode.\nIf Adding fails, it will bright red light for 2s and turn back to breathing blue light and then start SmartStart Learn Mode again.\nNote:\nThe label of QR Code on the product and package are used for SmartStart Inclusion. The Z-Wave DSK Code is at bottom of the package. Please do not remove or damage them.\n\n(2) Classic Inclusion Learn Mode\n1. Set your Z-Wave Controller into its 'Add Device' mode in order to add the product into your Z-Wave system. Refer to the Controller's manual if you are unsure of how to perform this step.\n2. Make sure the product is powered. If not, plug it into a wall socket and power on; its LED will be breathing blue light all the time. \n3. Click Action Button once, it will quickly flash blue light for 30 seconds until it is added into the network. It will become constantly bright yellow light after being assigned a NodeID.\n4. If your Z-Wave Controller supports S2 encryption, enter the first 5 digits of DSK into your Controller's interface if /when requested. The DSK is printed on its housing.\n5. If Adding fails, it will bright red light for 2s and then become breathing blue light; repeat steps 1 to 4. Contact us for further support if needed.\n6. If Adding succeeds, it will bright blue light for 2s and then turn to Load Indicator Mode. Now, this product is a part of your Z-Wave home control system. You can configure it and its automations via your Z-Wave system; please refer to your software's user guide for precise instructions.\nNote:\nIf Action Button is clicked again during the Classic Inclusion Learn Mode, the Classic Inclusion Learn Mode will exit. At the same time, Indicator Light will bright red light for 2s, and then become breathing blue light", - "exclusion": "1. Set your Z-Wave Controller into its 'Remove Device' mode in order to remove the product from your Z-Wave system. Refer to the Controller's manual if you are unsure of how to perform this step.\n2. Make sure the product is powered. If not, plug it into a wall socket and power on. \n3. Click Action Button 2 times quickly; it will bright violet light, up to 2s.\n4. If Removing fails, it will bright red light for 2s and then turn back to Load Indicator Mode; repeat steps 1 to 3. Contact us for further support if needed.\n5. If Removing succeeds, it will become breathing blue light. Now, it is removed from Z-Wave network successfully", - "reset": "If the primary controller is missing or inoperable, you may need to reset the device to factory settings.\nMake sure the product is powered. If not, plug it into a wall socket and power on. To complete the reset process manually, press and hold the Action Button for at least 15s and then release. The LED indicator will become breathing blue light, which indicates the reset operation is successful. Otherwise, please try again. Contact us for further support if needed. \nNote: \n1. This procedure should only be used when the primary controller is missing or inoperable.\n2. Factory Reset will:\n(a) Remove the product from Z-Wave network;\n(b) Delete the Association setting;\n(c) Restore the configuration settings to the default.", - "manual": "https://products.z-wavealliance.org/ProductManual/File?folder=&filename=MarketCertificationFiles/3437/Smart%20Switch%207%20product%20manual.pdf" - } - }, - "label": "ZW175", - "interviewAttempts": 1, - "isFrequentListening": false, - "maxDataRate": 100000, - "supportedDataRates": [40000, 100000], - "protocolVersion": 3, - "supportsBeaming": true, - "supportsSecurity": false, - "nodeType": 1, - "zwavePlusNodeType": 0, - "zwavePlusRoleType": 5, - "deviceClass": { - "basic": { - "key": 4, - "label": "Routing End Node" - }, - "generic": { - "key": 16, - "label": "Binary Switch" - }, - "specific": { - "key": 1, - "label": "Binary Power Switch" - } - }, - "interviewStage": "Complete", - "deviceDatabaseUrl": "https://devices.zwave-js.io/?jumpTo=0x0371:0x0003:0x00af:1.3", - "statistics": { - "commandsTX": 221, - "commandsRX": 1452, - "commandsDroppedRX": 22, - "commandsDroppedTX": 0, - "timeoutResponse": 3, - "rtt": 29.9, - "lastSeen": "2024-10-01T13:21:14.968Z" - }, - "highestSecurityClass": 1, - "isControllerNode": false, - "keepAwake": false, - "lastSeen": "2024-10-01T13:12:41.805Z", - "protocol": 0, - "values": [ - { - "endpoint": 0, - "commandClass": 37, - "commandClassName": "Binary Switch", - "property": "currentValue", - "propertyName": "currentValue", - "ccVersion": 1, - "metadata": { - "type": "boolean", - "readable": true, - "writeable": false, - "label": "Current value", - "stateful": true, - "secret": false - }, - "value": true - }, - { - "endpoint": 0, - "commandClass": 37, - "commandClassName": "Binary Switch", - "property": "targetValue", - "propertyName": "targetValue", - "ccVersion": 1, - "metadata": { - "type": "boolean", - "readable": true, - "writeable": true, - "label": "Target value", - "valueChangeOptions": ["transitionDuration"], - "stateful": true, - "secret": false - }, - "value": true - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "targetValue", - "propertyName": "targetValue", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Target value", - "valueChangeOptions": ["transitionDuration"], - "min": 0, - "max": 99, - "stateful": true, - "secret": false - }, - "value": 50 - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "currentValue", - "propertyName": "currentValue", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Current value", - "min": 0, - "max": 99, - "stateful": true, - "secret": false - }, - "value": 50 - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "Up", - "propertyName": "Up", - "ccVersion": 2, - "metadata": { - "type": "boolean", - "readable": false, - "writeable": true, - "label": "Perform a level change (Up)", - "ccSpecific": { - "switchType": 2 - }, - "valueChangeOptions": ["transitionDuration"], - "states": { - "true": "Start", - "false": "Stop" - }, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "Down", - "propertyName": "Down", - "ccVersion": 2, - "metadata": { - "type": "boolean", - "readable": false, - "writeable": true, - "label": "Perform a level change (Down)", - "ccSpecific": { - "switchType": 2 - }, - "valueChangeOptions": ["transitionDuration"], - "states": { - "true": "Start", - "false": "Stop" - }, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "duration", - "propertyName": "duration", - "ccVersion": 2, - "metadata": { - "type": "duration", - "readable": true, - "writeable": false, - "label": "Remaining duration", - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "restorePrevious", - "propertyName": "restorePrevious", - "ccVersion": 2, - "metadata": { - "type": "boolean", - "readable": false, - "writeable": true, - "label": "Restore previous value", - "states": { - "true": "Restore" - }, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 43, - "commandClassName": "Scene Activation", - "property": "sceneId", - "propertyName": "sceneId", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Scene ID", - "valueChangeOptions": ["transitionDuration"], - "min": 1, - "max": 255, - "stateful": false, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 43, - "commandClassName": "Scene Activation", - "property": "dimmingDuration", - "propertyName": "dimmingDuration", - "ccVersion": 1, - "metadata": { - "type": "duration", - "readable": true, - "writeable": true, - "label": "Dimming duration", - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 50, - "commandClassName": "Meter", - "property": "value", - "propertyKey": 65537, - "propertyName": "value", - "propertyKeyName": "Electric_kWh_Consumed", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Electric Consumption [kWh]", - "ccSpecific": { - "meterType": 1, - "scale": 0, - "rateType": 1 - }, - "unit": "kWh", - "stateful": true, - "secret": false - }, - "value": 1.259 - }, - { - "endpoint": 0, - "commandClass": 50, - "commandClassName": "Meter", - "property": "value", - "propertyKey": 66049, - "propertyName": "value", - "propertyKeyName": "Electric_W_Consumed", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Electric Consumption [W]", - "ccSpecific": { - "meterType": 1, - "scale": 2, - "rateType": 1 - }, - "unit": "W", - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 50, - "commandClassName": "Meter", - "property": "value", - "propertyKey": 66561, - "propertyName": "value", - "propertyKeyName": "Electric_V_Consumed", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Electric Consumption [V]", - "ccSpecific": { - "meterType": 1, - "scale": 4, - "rateType": 1 - }, - "unit": "V", - "stateful": true, - "secret": false - }, - "value": 232.895 - }, - { - "endpoint": 0, - "commandClass": 50, - "commandClassName": "Meter", - "property": "value", - "propertyKey": 66817, - "propertyName": "value", - "propertyKeyName": "Electric_A_Consumed", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Electric Consumption [A]", - "ccSpecific": { - "meterType": 1, - "scale": 5, - "rateType": 1 - }, - "unit": "A", - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 50, - "commandClassName": "Meter", - "property": "reset", - "propertyName": "reset", - "ccVersion": 4, - "metadata": { - "type": "boolean", - "readable": false, - "writeable": true, - "label": "Reset accumulated values", - "states": { - "true": "Reset" - }, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 51, - "commandClassName": "Color Switch", - "property": "currentColor", - "propertyKey": 2, - "propertyName": "currentColor", - "propertyKeyName": "Red", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "description": "The current value of the Red channel.", - "label": "Current value (Red)", - "min": 0, - "max": 255, - "stateful": true, - "secret": false - }, - "value": 255 - }, - { - "endpoint": 0, - "commandClass": 51, - "commandClassName": "Color Switch", - "property": "currentColor", - "propertyKey": 3, - "propertyName": "currentColor", - "propertyKeyName": "Green", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "description": "The current value of the Green channel.", - "label": "Current value (Green)", - "min": 0, - "max": 255, - "stateful": true, - "secret": false - }, - "value": 251 - }, - { - "endpoint": 0, - "commandClass": 51, - "commandClassName": "Color Switch", - "property": "currentColor", - "propertyKey": 4, - "propertyName": "currentColor", - "propertyKeyName": "Blue", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "description": "The current value of the Blue channel.", - "label": "Current value (Blue)", - "min": 0, - "max": 255, - "stateful": true, - "secret": false - }, - "value": 246 - }, - { - "endpoint": 0, - "commandClass": 51, - "commandClassName": "Color Switch", - "property": "currentColor", - "propertyName": "currentColor", - "ccVersion": 1, - "metadata": { - "type": "any", - "readable": true, - "writeable": false, - "label": "Current color", - "stateful": true, - "secret": false - }, - "value": { - "red": 255, - "green": 251, - "blue": 246 - } - }, - { - "endpoint": 0, - "commandClass": 51, - "commandClassName": "Color Switch", - "property": "targetColor", - "propertyName": "targetColor", - "ccVersion": 1, - "metadata": { - "type": "any", - "readable": true, - "writeable": true, - "label": "Target color", - "valueChangeOptions": ["transitionDuration"], - "stateful": true, - "secret": false - }, - "value": { - "red": 255, - "green": 251, - "blue": 246 - } - }, - { - "endpoint": 0, - "commandClass": 51, - "commandClassName": "Color Switch", - "property": "hexColor", - "propertyName": "hexColor", - "ccVersion": 1, - "metadata": { - "type": "color", - "readable": true, - "writeable": true, - "label": "RGB Color", - "valueChangeOptions": ["transitionDuration"], - "minLength": 6, - "maxLength": 7, - "stateful": true, - "secret": false - }, - "value": "fffbf6" - }, - { - "endpoint": 0, - "commandClass": 51, - "commandClassName": "Color Switch", - "property": "targetColor", - "propertyKey": 2, - "propertyName": "targetColor", - "propertyKeyName": "Red", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "The target value of the Red channel.", - "label": "Target value (Red)", - "valueChangeOptions": ["transitionDuration"], - "min": 0, - "max": 255, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 51, - "commandClassName": "Color Switch", - "property": "targetColor", - "propertyKey": 3, - "propertyName": "targetColor", - "propertyKeyName": "Green", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "The target value of the Green channel.", - "label": "Target value (Green)", - "valueChangeOptions": ["transitionDuration"], - "min": 0, - "max": 255, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 51, - "commandClassName": "Color Switch", - "property": "targetColor", - "propertyKey": 4, - "propertyName": "targetColor", - "propertyKeyName": "Blue", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "The target value of the Blue channel.", - "label": "Target value (Blue)", - "valueChangeOptions": ["transitionDuration"], - "min": 0, - "max": 255, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 51, - "commandClassName": "Color Switch", - "property": "duration", - "propertyName": "duration", - "ccVersion": 1, - "metadata": { - "type": "duration", - "readable": true, - "writeable": false, - "label": "Remaining duration", - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 4, - "propertyName": "Current Overload Protection Threshold", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Current Overload Protection Threshold", - "default": 2415, - "min": 0, - "max": 2415, - "states": { - "0": "Disable" - }, - "unit": "W", - "valueSize": 2, - "format": 1, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 2415 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 9, - "propertyKey": 1, - "propertyName": "Alarm Trigger State", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Alarm Trigger State", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Trigger on open state", - "1": "Trigger on closed state" - }, - "valueSize": 2, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 9, - "propertyKey": 256, - "propertyName": "React to Alarm Type: Smoke Alarms", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "React to Alarm Type: Smoke Alarms", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 2, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 9, - "propertyKey": 512, - "propertyName": "React to Alarm Type: CO Alarms", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "React to Alarm Type: CO Alarms", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 2, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 9, - "propertyKey": 1024, - "propertyName": "React to Alarm Type: CO2 Alarms", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "React to CO2 Alarms from other Z-Wave devices.", - "label": "React to Alarm Type: CO2 Alarms", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 2, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 9, - "propertyKey": 2048, - "propertyName": "React to Alarm Type: Heart Alarms", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "React to Alarm Type: Heart Alarms", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 2, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 9, - "propertyKey": 4096, - "propertyName": "React to Alarm Type: Water Alarms", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "React to Alarm Type: Water Alarms", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 2, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 9, - "propertyKey": 8192, - "propertyName": "React to Alarm Type: Access Control Alarms", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "React to Alarm Type: Access Control Alarms", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 2, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 9, - "propertyKey": 16384, - "propertyName": "React to Alarm Type: Home Security Alarms", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "React to Alarm Type: Home Security Alarms", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 2, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 8, - "propertyName": "Switch Action on Alarm", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Switch Action on Alarm", - "default": 0, - "min": 0, - "max": 3, - "states": { - "0": "Disable", - "1": "Turn on", - "2": "Turn off", - "3": "Cyclce on/off in 5 second intervals" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 10, - "propertyName": "Method to Disable Alarm", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "Allowable range: 10-255 - Sets the method to disable the alarm or alarm duration", - "label": "Method to Disable Alarm", - "default": 0, - "min": 0, - "max": 255, - "states": { - "0": "Tap action button 3x", - "1": "Idle state from corresponding alarm" - }, - "unit": "seconds", - "valueSize": 2, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 18, - "propertyName": "LED Blinking Frequency", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "LED Blinking Frequency", - "default": 2, - "min": 0, - "max": 9, - "unit": "Hz", - "valueSize": 1, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 2 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 20, - "propertyName": "State After Power Failure", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "State After Power Failure", - "default": 0, - "min": 0, - "max": 2, - "states": { - "0": "Previous state", - "1": "Always on", - "2": "Always off" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 80, - "propertyName": "Report Type To Send", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Report Type To Send", - "default": 2, - "min": 0, - "max": 2, - "states": { - "0": "Disable", - "1": "Basic CC Report", - "2": "Binary Switch CC Report" - }, - "valueSize": 1, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 2 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 81, - "propertyName": "LED Indicator", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "LED Indicator", - "default": 2, - "min": 0, - "max": 2, - "states": { - "0": "Disable", - "1": "Night light mode", - "2": "On/off mode" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 2 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 82, - "propertyKey": 4278190080, - "propertyName": "Night Light (Enable): Hour", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "Allowable range: 0-23", - "label": "Night Light (Enable): Hour", - "default": 18, - "min": 0, - "max": 23, - "valueSize": 4, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 18 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 82, - "propertyKey": 16711680, - "propertyName": "Night Light (Enable): Minute", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "Allowable range: 0-59", - "label": "Night Light (Enable): Minute", - "default": 0, - "min": 0, - "max": 59, - "valueSize": 4, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 82, - "propertyKey": 65280, - "propertyName": "Night Light (Disable): Hour", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "Allowable range: 0-23", - "label": "Night Light (Disable): Hour", - "default": 8, - "min": 0, - "max": 23, - "valueSize": 4, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 8 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 82, - "propertyKey": 255, - "propertyName": "Night Light (Disable): Minute", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "Allowable range: 0-59", - "label": "Night Light (Disable): Minute", - "default": 0, - "min": 0, - "max": 59, - "valueSize": 4, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 91, - "propertyName": "Power Change Threshold", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "Threshold change in power consumption to induce an automatic report", - "label": "Power Change Threshold", - "default": 0, - "min": 0, - "max": 2300, - "states": { - "0": "Disable" - }, - "unit": "W", - "valueSize": 2, - "format": 1, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 92, - "propertyName": "Power (kWh) Change Threshold", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Power (kWh) Change Threshold", - "default": 0, - "min": 0, - "max": 10000, - "states": { - "0": "Disable" - }, - "unit": "KwH", - "valueSize": 2, - "format": 1, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 93, - "propertyName": "Current Change Threshold", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Current Change Threshold", - "default": 0, - "min": 0, - "max": 100, - "states": { - "0": "Disable" - }, - "unit": "A", - "valueSize": 1, - "format": 1, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 101, - "propertyKey": 1, - "propertyName": "Automatic Report: kWh", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Automatic Report: kWh", - "default": 1, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 4, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 101, - "propertyKey": 2, - "propertyName": "Automatic Report: Power", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Automatic Report: Power", - "default": 1, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 4, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 101, - "propertyKey": 4, - "propertyName": "Automatic Report: Voltage", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Automatic Report: Voltage", - "default": 1, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 4, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 101, - "propertyKey": 8, - "propertyName": "Automatic Report: Current", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Automatic Report: Current", - "default": 1, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 4, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 111, - "propertyName": "Automatic Reporting Interval", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Automatic Reporting Interval", - "default": 600, - "min": 0, - "max": 2592000, - "states": { - "0": "Disable" - }, - "unit": "seconds", - "valueSize": 4, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 600 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 19, - "propertyName": "LED Blink Duration", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": false, - "writeable": true, - "label": "LED Blink Duration", - "default": 0, - "min": 0, - "max": 255, - "unit": "seconds", - "valueSize": 2, - "format": 1, - "allowManualEntry": true, - "isFromConfig": true - } - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 255, - "propertyName": "Reset to Factory Default Setting", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": false, - "writeable": true, - "label": "Reset to Factory Default Setting", - "default": 0, - "min": 0, - "max": 1431655765, - "states": { - "0": "Normal Operation", - "1": "Resets all configuration parameters to default setting", - "1431655765": "Reset the product to factory default setting and exclude from Z-Wave network" - }, - "valueSize": 4, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - } - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "Power Management", - "propertyKey": "Over-current status", - "propertyName": "Power Management", - "propertyKeyName": "Over-current status", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Over-current status", - "ccSpecific": { - "notificationType": 8 - }, - "min": 0, - "max": 255, - "states": { - "0": "idle", - "6": "Over-current detected" - }, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "Power Management", - "propertyKey": "Over-load status", - "propertyName": "Power Management", - "propertyKeyName": "Over-load status", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Over-load status", - "ccSpecific": { - "notificationType": 8 - }, - "min": 0, - "max": 255, - "states": { - "0": "idle", - "8": "Over-load detected" - }, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "System", - "propertyKey": "Hardware status", - "propertyName": "System", - "propertyKeyName": "Hardware status", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Hardware status", - "ccSpecific": { - "notificationType": 9 - }, - "min": 0, - "max": 255, - "states": { - "0": "idle", - "3": "System hardware failure (with failure code)" - }, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "alarmType", - "propertyName": "alarmType", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Alarm Type", - "min": 0, - "max": 255, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "alarmLevel", - "propertyName": "alarmLevel", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Alarm Level", - "min": 0, - "max": 255, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 114, - "commandClassName": "Manufacturer Specific", - "property": "manufacturerId", - "propertyName": "manufacturerId", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Manufacturer ID", - "min": 0, - "max": 65535, - "stateful": true, - "secret": false - }, - "value": 881 - }, - { - "endpoint": 0, - "commandClass": 114, - "commandClassName": "Manufacturer Specific", - "property": "productType", - "propertyName": "productType", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Product type", - "min": 0, - "max": 65535, - "stateful": true, - "secret": false - }, - "value": 3 - }, - { - "endpoint": 0, - "commandClass": 114, - "commandClassName": "Manufacturer Specific", - "property": "productId", - "propertyName": "productId", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Product ID", - "min": 0, - "max": 65535, - "stateful": true, - "secret": false - }, - "value": 175 - }, - { - "endpoint": 0, - "commandClass": 117, - "commandClassName": "Protection", - "property": "local", - "propertyName": "local", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Local protection state", - "states": { - "0": "Unprotected", - "2": "NoOperationPossible" - }, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 117, - "commandClassName": "Protection", - "property": "rf", - "propertyName": "rf", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "RF protection state", - "states": { - "0": "Unprotected", - "1": "NoControl" - }, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 117, - "commandClassName": "Protection", - "property": "exclusiveControlNodeId", - "propertyName": "exclusiveControlNodeId", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Node ID with exclusive control", - "min": 1, - "max": 232, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 117, - "commandClassName": "Protection", - "property": "timeout", - "propertyName": "timeout", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "RF protection timeout", - "min": 0, - "max": 255, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "libraryType", - "propertyName": "libraryType", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Library type", - "states": { - "0": "Unknown", - "1": "Static Controller", - "2": "Controller", - "3": "Enhanced Slave", - "4": "Slave", - "5": "Installer", - "6": "Routing Slave", - "7": "Bridge Controller", - "8": "Device under Test", - "9": "N/A", - "10": "AV Remote", - "11": "AV Device" - }, - "stateful": true, - "secret": false - }, - "value": 3 - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "protocolVersion", - "propertyName": "protocolVersion", - "ccVersion": 2, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Z-Wave protocol version", - "stateful": true, - "secret": false - }, - "value": "6.4" - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "firmwareVersions", - "propertyName": "firmwareVersions", - "ccVersion": 2, - "metadata": { - "type": "string[]", - "readable": true, - "writeable": false, - "label": "Z-Wave chip firmware versions", - "stateful": true, - "secret": false - }, - "value": ["1.3"] - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "hardwareVersion", - "propertyName": "hardwareVersion", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Z-Wave chip hardware version", - "stateful": true, - "secret": false - }, - "value": 175 - } - ], - "endpoints": [ - { - "nodeId": 9, - "index": 0, - "installerIcon": 1792, - "userIcon": 1792, - "deviceClass": { - "basic": { - "key": 4, - "label": "Routing End Node" - }, - "generic": { - "key": 16, - "label": "Binary Switch" - }, - "specific": { - "key": 1, - "label": "Binary Power Switch" - } - }, - "commandClasses": [ - { - "id": 94, - "name": "Z-Wave Plus Info", - "version": 2, - "isSecure": false - }, - { - "id": 85, - "name": "Transport Service", - "version": 2, - "isSecure": false - }, - { - "id": 152, - "name": "Security", - "version": 1, - "isSecure": true - }, - { - "id": 159, - "name": "Security 2", - "version": 1, - "isSecure": true - }, - { - "id": 108, - "name": "Supervision", - "version": 1, - "isSecure": false - }, - { - "id": 133, - "name": "Association", - "version": 2, - "isSecure": true - }, - { - "id": 89, - "name": "Association Group Information", - "version": 1, - "isSecure": true - }, - { - "id": 112, - "name": "Configuration", - "version": 1, - "isSecure": true - }, - { - "id": 44, - "name": "Scene Actuator Configuration", - "version": 1, - "isSecure": true - }, - { - "id": 43, - "name": "Scene Activation", - "version": 1, - "isSecure": true - }, - { - "id": 129, - "name": "Clock", - "version": 1, - "isSecure": true - }, - { - "id": 113, - "name": "Notification", - "version": 4, - "isSecure": true - }, - { - "id": 50, - "name": "Meter", - "version": 4, - "isSecure": true - }, - { - "id": 37, - "name": "Binary Switch", - "version": 1, - "isSecure": true - }, - { - "id": 51, - "name": "Color Switch", - "version": 1, - "isSecure": true - }, - { - "id": 38, - "name": "Multilevel Switch", - "version": 2, - "isSecure": true - }, - { - "id": 117, - "name": "Protection", - "version": 2, - "isSecure": true - }, - { - "id": 115, - "name": "Powerlevel", - "version": 1, - "isSecure": true - }, - { - "id": 122, - "name": "Firmware Update Meta Data", - "version": 4, - "isSecure": true - }, - { - "id": 134, - "name": "Version", - "version": 2, - "isSecure": true - }, - { - "id": 90, - "name": "Device Reset Locally", - "version": 1, - "isSecure": true - }, - { - "id": 114, - "name": "Manufacturer Specific", - "version": 2, - "isSecure": true - } - ] - } - ] -} diff --git a/tests/components/zwave_js/fixtures/cover_zvidar_state.json b/tests/components/zwave_js/fixtures/cover_zvidar_state.json deleted file mode 100644 index 05118931026..00000000000 --- a/tests/components/zwave_js/fixtures/cover_zvidar_state.json +++ /dev/null @@ -1,1120 +0,0 @@ -{ - "nodeId": 270, - "index": 0, - "installerIcon": 6656, - "userIcon": 6656, - "status": 4, - "ready": true, - "isListening": false, - "isRouting": false, - "isSecure": true, - "manufacturerId": 1114, - "productId": 1287, - "productType": 2308, - "firmwareVersion": "1.10.0", - "zwavePlusVersion": 2, - "name": "Window Blind Controller", - "location": "**REDACTED**", - "deviceConfig": { - "filename": "/snapshot/build/node_modules/@zwave-js/config/config/devices/0x045a/Z-CM-V01.json", - "isEmbedded": true, - "manufacturer": "ZVIDAR", - "manufacturerId": 1114, - "label": "Z-CM-V01", - "description": "Smart Curtain Motor", - "devices": [ - { - "productType": 2308, - "productId": 1287 - } - ], - "firmwareVersion": { - "min": "0.0", - "max": "255.255" - }, - "preferred": false, - "paramInformation": { - "_map": {} - }, - "compat": { - "removeCCs": {} - } - }, - "label": "Z-CM-V01", - "interviewAttempts": 0, - "isFrequentListening": "1000ms", - "maxDataRate": 100000, - "supportedDataRates": [100000], - "protocolVersion": 3, - "supportsBeaming": false, - "supportsSecurity": true, - "nodeType": 1, - "zwavePlusNodeType": 0, - "zwavePlusRoleType": 7, - "deviceClass": { - "basic": { - "key": 3, - "label": "End Node" - }, - "generic": { - "key": 17, - "label": "Multilevel Switch" - }, - "specific": { - "key": 0, - "label": "Unused" - } - }, - "interviewStage": "Complete", - "deviceDatabaseUrl": "https://devices.zwave-js.io/?jumpTo=0x045a:0x0904:0x0507:1.10.0", - "statistics": { - "commandsTX": 2, - "commandsRX": 1, - "commandsDroppedRX": 1, - "commandsDroppedTX": 0, - "timeoutResponse": 0, - "rtt": 357.6, - "lastSeen": "2024-07-21T16:42:38.086Z", - "rssi": -89, - "lwr": { - "protocolDataRate": 4, - "repeaters": [], - "rssi": -91, - "repeaterRSSI": [] - } - }, - "highestSecurityClass": 1, - "isControllerNode": false, - "keepAwake": false, - "lastSeen": "2024-07-21T16:42:38.086Z", - "protocol": 1, - "values": [ - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "targetValue", - "propertyName": "targetValue", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Target value", - "valueChangeOptions": ["transitionDuration"], - "min": 0, - "max": 99, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "duration", - "propertyName": "duration", - "ccVersion": 4, - "metadata": { - "type": "duration", - "readable": true, - "writeable": false, - "label": "Remaining duration", - "stateful": true, - "secret": false - }, - "value": "unknown" - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "currentValue", - "propertyName": "currentValue", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Current value", - "min": 0, - "max": 99, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "Up", - "propertyName": "Up", - "ccVersion": 4, - "metadata": { - "type": "boolean", - "readable": false, - "writeable": true, - "label": "Perform a level change (Up)", - "ccSpecific": { - "switchType": 2 - }, - "valueChangeOptions": ["transitionDuration"], - "states": { - "true": "Start", - "false": "Stop" - }, - "stateful": true, - "secret": false - }, - "value": true - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "Down", - "propertyName": "Down", - "ccVersion": 4, - "metadata": { - "type": "boolean", - "readable": false, - "writeable": true, - "label": "Perform a level change (Down)", - "ccSpecific": { - "switchType": 2 - }, - "valueChangeOptions": ["transitionDuration"], - "states": { - "true": "Start", - "false": "Stop" - }, - "stateful": true, - "secret": false - }, - "value": true - }, - { - "endpoint": 0, - "commandClass": 38, - "commandClassName": "Multilevel Switch", - "property": "restorePrevious", - "propertyName": "restorePrevious", - "ccVersion": 4, - "metadata": { - "type": "boolean", - "readable": false, - "writeable": true, - "label": "Restore previous value", - "states": { - "true": "Restore" - }, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 1, - "propertyName": "Hand Button Action", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Hand Button Action", - "default": 1, - "min": 0, - "max": 1, - "states": { - "0": "Close", - "1": "Open" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 2, - "propertyName": "Motor Direction", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Motor Direction", - "default": 1, - "min": 1, - "max": 3, - "states": { - "1": "Forward", - "2": "Opposite", - "3": "Reverse" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 3, - "propertyName": "Manually Set Open Boundary", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Manually Set Open Boundary", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Cancel", - "1": "Start" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 4, - "propertyName": "Manually Set Closed Boundary", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Manually Set Closed Boundary", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Cancel", - "1": "Start" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 5, - "propertyName": "Control Motor", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Control Motor", - "default": 3, - "min": 1, - "max": 3, - "states": { - "1": "Open (Up)", - "2": "Close (Down)", - "3": "Stop" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 3 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 6, - "propertyName": "Calibrate Limit Position", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Calibrate Limit Position", - "default": 1, - "min": 1, - "max": 3, - "states": { - "1": "Upper limit", - "2": "Lower limit", - "3": "Third limit" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 7, - "propertyName": "Delete Limit Position", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Delete Limit Position", - "default": 0, - "min": 0, - "max": 3, - "states": { - "0": "All limits", - "1": "Only upper limit", - "2": "Only lower limit", - "3": "Only third limit" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 8, - "propertyName": "Low Battery Level Alarm Threshold", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Low Battery Level Alarm Threshold", - "default": 10, - "min": 0, - "max": 50, - "unit": "%", - "valueSize": 1, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 10 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 9, - "propertyName": "Battery Report Interval", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Battery Report Interval", - "default": 3600, - "min": 0, - "max": 2678400, - "unit": "seconds", - "valueSize": 4, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 3600 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 10, - "propertyName": "Battery Change Report Threshold", - "ccVersion": 4, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Battery Change Report Threshold", - "default": 5, - "min": 0, - "max": 50, - "unit": "%", - "valueSize": 1, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 5 - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "Power Management", - "propertyKey": "Mains status", - "propertyName": "Power Management", - "propertyKeyName": "Mains status", - "ccVersion": 8, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Mains status", - "ccSpecific": { - "notificationType": 8 - }, - "min": 0, - "max": 255, - "states": { - "2": "AC mains disconnected", - "3": "AC mains re-connected" - }, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "alarmType", - "propertyName": "alarmType", - "ccVersion": 8, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Alarm Type", - "min": 0, - "max": 255, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "alarmLevel", - "propertyName": "alarmLevel", - "ccVersion": 8, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Alarm Level", - "min": 0, - "max": 255, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 114, - "commandClassName": "Manufacturer Specific", - "property": "manufacturerId", - "propertyName": "manufacturerId", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Manufacturer ID", - "min": 0, - "max": 65535, - "stateful": true, - "secret": false - }, - "value": 1114 - }, - { - "endpoint": 0, - "commandClass": 114, - "commandClassName": "Manufacturer Specific", - "property": "productType", - "propertyName": "productType", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Product type", - "min": 0, - "max": 65535, - "stateful": true, - "secret": false - }, - "value": 2308 - }, - { - "endpoint": 0, - "commandClass": 114, - "commandClassName": "Manufacturer Specific", - "property": "productId", - "propertyName": "productId", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Product ID", - "min": 0, - "max": 65535, - "stateful": true, - "secret": false - }, - "value": 1287 - }, - { - "endpoint": 0, - "commandClass": 128, - "commandClassName": "Battery", - "property": "level", - "propertyName": "level", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Battery level", - "min": 0, - "max": 100, - "unit": "%", - "stateful": true, - "secret": false - }, - "value": 86 - }, - { - "endpoint": 0, - "commandClass": 128, - "commandClassName": "Battery", - "property": "isLow", - "propertyName": "isLow", - "ccVersion": 1, - "metadata": { - "type": "boolean", - "readable": true, - "writeable": false, - "label": "Low battery level", - "stateful": true, - "secret": false - }, - "value": false - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "libraryType", - "propertyName": "libraryType", - "ccVersion": 3, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Library type", - "states": { - "0": "Unknown", - "1": "Static Controller", - "2": "Controller", - "3": "Enhanced Slave", - "4": "Slave", - "5": "Installer", - "6": "Routing Slave", - "7": "Bridge Controller", - "8": "Device under Test", - "9": "N/A", - "10": "AV Remote", - "11": "AV Device" - }, - "stateful": true, - "secret": false - }, - "value": 3 - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "protocolVersion", - "propertyName": "protocolVersion", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Z-Wave protocol version", - "stateful": true, - "secret": false - }, - "value": "7.16" - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "firmwareVersions", - "propertyName": "firmwareVersions", - "ccVersion": 3, - "metadata": { - "type": "string[]", - "readable": true, - "writeable": false, - "label": "Z-Wave chip firmware versions", - "stateful": true, - "secret": false - }, - "value": ["1.10"] - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "hardwareVersion", - "propertyName": "hardwareVersion", - "ccVersion": 3, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Z-Wave chip hardware version", - "stateful": true, - "secret": false - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "sdkVersion", - "propertyName": "sdkVersion", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "SDK version", - "stateful": true, - "secret": false - }, - "value": "7.16.3" - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "applicationFrameworkAPIVersion", - "propertyName": "applicationFrameworkAPIVersion", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Z-Wave application framework API version", - "stateful": true, - "secret": false - }, - "value": "10.16.3" - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "applicationFrameworkBuildNumber", - "propertyName": "applicationFrameworkBuildNumber", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Z-Wave application framework API build number", - "stateful": true, - "secret": false - }, - "value": 297 - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "hostInterfaceVersion", - "propertyName": "hostInterfaceVersion", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Serial API version", - "stateful": true, - "secret": false - }, - "value": "unused" - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "hostInterfaceBuildNumber", - "propertyName": "hostInterfaceBuildNumber", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Serial API build number", - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "zWaveProtocolVersion", - "propertyName": "zWaveProtocolVersion", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Z-Wave protocol version", - "stateful": true, - "secret": false - }, - "value": "7.16.3" - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "zWaveProtocolBuildNumber", - "propertyName": "zWaveProtocolBuildNumber", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Z-Wave protocol build number", - "stateful": true, - "secret": false - }, - "value": 297 - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "applicationVersion", - "propertyName": "applicationVersion", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Application version", - "stateful": true, - "secret": false - }, - "value": "1.10.0" - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "applicationBuildNumber", - "propertyName": "applicationBuildNumber", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Application build number", - "stateful": true, - "secret": false - }, - "value": 43707 - }, - { - "endpoint": 0, - "commandClass": 135, - "commandClassName": "Indicator", - "property": 80, - "propertyKey": 3, - "propertyName": "Node Identify", - "propertyKeyName": "On/Off Period: Duration", - "ccVersion": 3, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "Sets the duration of an on/off period in 1/10th seconds. Must be set together with \"On/Off Cycle Count\"", - "label": "Node Identify - On/Off Period: Duration", - "ccSpecific": { - "indicatorId": 80, - "propertyId": 3 - }, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 135, - "commandClassName": "Indicator", - "property": 80, - "propertyKey": 4, - "propertyName": "Node Identify", - "propertyKeyName": "On/Off Cycle Count", - "ccVersion": 3, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "Sets the number of on/off periods. 0xff means infinite. Must be set together with \"On/Off Period duration\"", - "label": "Node Identify - On/Off Cycle Count", - "ccSpecific": { - "indicatorId": 80, - "propertyId": 4 - }, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 135, - "commandClassName": "Indicator", - "property": 80, - "propertyKey": 5, - "propertyName": "Node Identify", - "propertyKeyName": "On/Off Period: On time", - "ccVersion": 3, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "description": "This property is used to set the length of the On time during an On/Off period. It allows asymmetric On/Off periods. The value 0x00 MUST represent symmetric On/Off period (On time equal to Off time)", - "label": "Node Identify - On/Off Period: On time", - "ccSpecific": { - "indicatorId": 80, - "propertyId": 5 - }, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 135, - "commandClassName": "Indicator", - "property": "value", - "propertyName": "value", - "ccVersion": 3, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Indicator value", - "ccSpecific": { - "indicatorId": 0 - }, - "min": 0, - "max": 255, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 135, - "commandClassName": "Indicator", - "property": "identify", - "propertyName": "identify", - "ccVersion": 3, - "metadata": { - "type": "boolean", - "readable": false, - "writeable": true, - "label": "Identify", - "states": { - "true": "Identify" - }, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 135, - "commandClassName": "Indicator", - "property": "timeout", - "propertyName": "timeout", - "ccVersion": 3, - "metadata": { - "type": "string", - "readable": true, - "writeable": true, - "label": "Timeout", - "stateful": true, - "secret": false - } - } - ], - "endpoints": [ - { - "nodeId": 261, - "index": 0, - "installerIcon": 6656, - "userIcon": 6656, - "deviceClass": { - "basic": { - "key": 3, - "label": "End Node" - }, - "generic": { - "key": 17, - "label": "Multilevel Switch" - }, - "specific": { - "key": 0, - "label": "Unused" - } - }, - "commandClasses": [ - { - "id": 94, - "name": "Z-Wave Plus Info", - "version": 2, - "isSecure": false - }, - { - "id": 85, - "name": "Transport Service", - "version": 2, - "isSecure": false - }, - { - "id": 159, - "name": "Security 2", - "version": 1, - "isSecure": true - }, - { - "id": 108, - "name": "Supervision", - "version": 1, - "isSecure": false - }, - { - "id": 38, - "name": "Multilevel Switch", - "version": 4, - "isSecure": true - }, - { - "id": 112, - "name": "Configuration", - "version": 4, - "isSecure": true - }, - { - "id": 133, - "name": "Association", - "version": 2, - "isSecure": true - }, - { - "id": 89, - "name": "Association Group Information", - "version": 3, - "isSecure": true - }, - { - "id": 142, - "name": "Multi Channel Association", - "version": 3, - "isSecure": true - }, - { - "id": 134, - "name": "Version", - "version": 3, - "isSecure": true - }, - { - "id": 114, - "name": "Manufacturer Specific", - "version": 2, - "isSecure": true - }, - { - "id": 90, - "name": "Device Reset Locally", - "version": 1, - "isSecure": true - }, - { - "id": 128, - "name": "Battery", - "version": 1, - "isSecure": true - }, - { - "id": 113, - "name": "Notification", - "version": 8, - "isSecure": true - }, - { - "id": 122, - "name": "Firmware Update Meta Data", - "version": 5, - "isSecure": true - }, - { - "id": 115, - "name": "Powerlevel", - "version": 1, - "isSecure": true - }, - { - "id": 135, - "name": "Indicator", - "version": 3, - "isSecure": true - } - ] - } - ] -} diff --git a/tests/components/zwave_js/fixtures/siren_neo_coolcam_nas-ab01z_state.json b/tests/components/zwave_js/fixtures/siren_neo_coolcam_nas-ab01z_state.json deleted file mode 100644 index 41fc9e37423..00000000000 --- a/tests/components/zwave_js/fixtures/siren_neo_coolcam_nas-ab01z_state.json +++ /dev/null @@ -1,746 +0,0 @@ -{ - "nodeId": 36, - "index": 0, - "installerIcon": 3840, - "userIcon": 3840, - "status": 4, - "ready": true, - "isListening": false, - "isRouting": true, - "manufacturerId": 600, - "productId": 4232, - "productType": 3, - "firmwareVersion": "2.94", - "zwavePlusVersion": 1, - "deviceConfig": { - "filename": "/usr/src/app/store/.config-db/devices/0x0258/nas-ab01z.json", - "isEmbedded": true, - "manufacturer": "Shenzhen Neo Electronics Co., Ltd.", - "manufacturerId": 600, - "label": "NAS-AB01Z", - "description": "Siren Alarm", - "devices": [ - { - "productType": 3, - "productId": 136 - }, - { - "productType": 3, - "productId": 4232 - }, - { - "productType": 3, - "productId": 8328 - }, - { - "productType": 3, - "productId": 24712 - } - ], - "firmwareVersion": { - "min": "0.0", - "max": "255.255" - }, - "preferred": false, - "associations": {}, - "paramInformation": { - "_map": {} - } - }, - "label": "NAS-AB01Z", - "interviewAttempts": 0, - "isFrequentListening": "1000ms", - "maxDataRate": 100000, - "supportedDataRates": [40000, 100000], - "protocolVersion": 3, - "supportsBeaming": true, - "supportsSecurity": false, - "nodeType": 1, - "zwavePlusNodeType": 0, - "zwavePlusRoleType": 7, - "deviceClass": { - "basic": { - "key": 4, - "label": "Routing End Node" - }, - "generic": { - "key": 16, - "label": "Binary Switch" - }, - "specific": { - "key": 5, - "label": "Siren" - } - }, - "interviewStage": "Complete", - "deviceDatabaseUrl": "https://devices.zwave-js.io/?jumpTo=0x0258:0x0003:0x1088:2.94", - "statistics": { - "commandsTX": 15, - "commandsRX": 7, - "commandsDroppedRX": 0, - "commandsDroppedTX": 0, - "timeoutResponse": 0, - "rtt": 582.5, - "lastSeen": "2024-10-01T10:22:24.457Z", - "lwr": { - "repeaters": [], - "protocolDataRate": 2 - } - }, - "isControllerNode": false, - "keepAwake": false, - "lastSeen": "2024-09-30T15:07:11.320Z", - "protocol": 0, - "values": [ - { - "endpoint": 0, - "commandClass": 37, - "commandClassName": "Binary Switch", - "property": "currentValue", - "propertyName": "currentValue", - "ccVersion": 1, - "metadata": { - "type": "boolean", - "readable": true, - "writeable": false, - "label": "Current value", - "stateful": true, - "secret": false - }, - "value": false - }, - { - "endpoint": 0, - "commandClass": 37, - "commandClassName": "Binary Switch", - "property": "targetValue", - "propertyName": "targetValue", - "ccVersion": 1, - "metadata": { - "type": "boolean", - "readable": true, - "writeable": true, - "label": "Target value", - "valueChangeOptions": ["transitionDuration"], - "stateful": true, - "secret": false - }, - "value": false - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 1, - "propertyName": "Alarm Volume", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Alarm Volume", - "default": 2, - "min": 1, - "max": 3, - "states": { - "1": "Low", - "2": "Middle", - "3": "High" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 2, - "propertyName": "Alarm Duration", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Alarm Duration", - "default": 2, - "min": 0, - "max": 255, - "states": { - "0": "Off", - "1": "30 seconds", - "2": "1 minute", - "3": "5 minutes", - "255": "Always on" - }, - "valueSize": 1, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 3, - "propertyName": "Doorbell Duration", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Doorbell Duration", - "default": 1, - "min": 0, - "max": 255, - "states": { - "0": "Off", - "255": "Always" - }, - "valueSize": 1, - "format": 1, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 16 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 4, - "propertyName": "Doorbell Volume", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Doorbell Volume", - "default": 2, - "min": 1, - "max": 3, - "states": { - "1": "Low", - "2": "Middle", - "3": "High" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 5, - "propertyName": "Alarm Sound Selection", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Alarm Sound Selection", - "default": 10, - "min": 1, - "max": 10, - "states": { - "1": "Doorbell", - "2": "F\u00fcr Elise", - "3": "Westminster Chimes", - "4": "Ding Dong", - "5": "William Tell", - "6": "Rondo Alla Turca", - "7": "Police Siren", - "8": "Evacuation", - "9": "Beep Beep", - "10": "Beep" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 10 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 6, - "propertyName": "Doorbell Sound Selection", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Doorbell Sound Selection", - "default": 9, - "min": 1, - "max": 10, - "states": { - "1": "Doorbell", - "2": "F\u00fcr Elise", - "3": "Westminster Chimes", - "4": "Ding Dong", - "5": "William Tell", - "6": "Rondo Alla Turca", - "7": "Police Siren", - "8": "Evacuation", - "9": "Beep Beep", - "10": "Beep" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 10 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 7, - "propertyName": "Default Siren Sound", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Default Siren Sound", - "default": 1, - "min": 1, - "max": 2, - "states": { - "1": "Alarm Sound", - "2": "Doorbell Sound" - }, - "valueSize": 1, - "format": 0, - "allowManualEntry": true, - "isFromConfig": true - }, - "value": 2 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 8, - "propertyName": "Alarm LED", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Alarm LED", - "default": 1, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 1, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 1 - }, - { - "endpoint": 0, - "commandClass": 112, - "commandClassName": "Configuration", - "property": 9, - "propertyName": "Doorbell LED", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Doorbell LED", - "default": 0, - "min": 0, - "max": 1, - "states": { - "0": "Disable", - "1": "Enable" - }, - "valueSize": 1, - "format": 1, - "allowManualEntry": false, - "isFromConfig": true - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "Siren", - "propertyKey": "Siren status", - "propertyName": "Siren", - "propertyKeyName": "Siren status", - "ccVersion": 8, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Siren status", - "ccSpecific": { - "notificationType": 14 - }, - "min": 0, - "max": 255, - "states": { - "0": "idle", - "1": "Siren active" - }, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "alarmType", - "propertyName": "alarmType", - "ccVersion": 8, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Alarm Type", - "min": 0, - "max": 255, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 113, - "commandClassName": "Notification", - "property": "alarmLevel", - "propertyName": "alarmLevel", - "ccVersion": 8, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Alarm Level", - "min": 0, - "max": 255, - "stateful": true, - "secret": false - }, - "value": 0 - }, - { - "endpoint": 0, - "commandClass": 114, - "commandClassName": "Manufacturer Specific", - "property": "manufacturerId", - "propertyName": "manufacturerId", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Manufacturer ID", - "min": 0, - "max": 65535, - "stateful": true, - "secret": false - }, - "value": 600 - }, - { - "endpoint": 0, - "commandClass": 114, - "commandClassName": "Manufacturer Specific", - "property": "productType", - "propertyName": "productType", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Product type", - "min": 0, - "max": 65535, - "stateful": true, - "secret": false - }, - "value": 3 - }, - { - "endpoint": 0, - "commandClass": 114, - "commandClassName": "Manufacturer Specific", - "property": "productId", - "propertyName": "productId", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Product ID", - "min": 0, - "max": 65535, - "stateful": true, - "secret": false - }, - "value": 4232 - }, - { - "endpoint": 0, - "commandClass": 128, - "commandClassName": "Battery", - "property": "level", - "propertyName": "level", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Battery level", - "min": 0, - "max": 100, - "unit": "%", - "stateful": true, - "secret": false - }, - "value": 89 - }, - { - "endpoint": 0, - "commandClass": 128, - "commandClassName": "Battery", - "property": "isLow", - "propertyName": "isLow", - "ccVersion": 1, - "metadata": { - "type": "boolean", - "readable": true, - "writeable": false, - "label": "Low battery level", - "stateful": true, - "secret": false - }, - "value": false - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "libraryType", - "propertyName": "libraryType", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Library type", - "states": { - "0": "Unknown", - "1": "Static Controller", - "2": "Controller", - "3": "Enhanced Slave", - "4": "Slave", - "5": "Installer", - "6": "Routing Slave", - "7": "Bridge Controller", - "8": "Device under Test", - "9": "N/A", - "10": "AV Remote", - "11": "AV Device" - }, - "stateful": true, - "secret": false - }, - "value": 6 - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "protocolVersion", - "propertyName": "protocolVersion", - "ccVersion": 2, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Z-Wave protocol version", - "stateful": true, - "secret": false - }, - "value": "4.38" - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "firmwareVersions", - "propertyName": "firmwareVersions", - "ccVersion": 2, - "metadata": { - "type": "string[]", - "readable": true, - "writeable": false, - "label": "Z-Wave chip firmware versions", - "stateful": true, - "secret": false - }, - "value": ["2.94"] - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "hardwareVersion", - "propertyName": "hardwareVersion", - "ccVersion": 2, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Z-Wave chip hardware version", - "stateful": true, - "secret": false - }, - "value": 48 - }, - { - "endpoint": 0, - "commandClass": 135, - "commandClassName": "Indicator", - "property": "value", - "propertyName": "value", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Indicator value", - "ccSpecific": { - "indicatorId": 0 - }, - "min": 0, - "max": 255, - "stateful": true, - "secret": false - }, - "value": 0 - } - ], - "endpoints": [ - { - "nodeId": 36, - "index": 0, - "installerIcon": 3840, - "userIcon": 3840, - "deviceClass": { - "basic": { - "key": 4, - "label": "Routing End Node" - }, - "generic": { - "key": 16, - "label": "Binary Switch" - }, - "specific": { - "key": 5, - "label": "Siren" - } - }, - "commandClasses": [ - { - "id": 37, - "name": "Binary Switch", - "version": 1, - "isSecure": false - }, - { - "id": 133, - "name": "Association", - "version": 2, - "isSecure": false - }, - { - "id": 89, - "name": "Association Group Information", - "version": 1, - "isSecure": false - }, - { - "id": 128, - "name": "Battery", - "version": 1, - "isSecure": false - }, - { - "id": 114, - "name": "Manufacturer Specific", - "version": 2, - "isSecure": false - }, - { - "id": 115, - "name": "Powerlevel", - "version": 1, - "isSecure": false - }, - { - "id": 134, - "name": "Version", - "version": 2, - "isSecure": false - }, - { - "id": 94, - "name": "Z-Wave Plus Info", - "version": 2, - "isSecure": false - }, - { - "id": 90, - "name": "Device Reset Locally", - "version": 1, - "isSecure": false - }, - { - "id": 112, - "name": "Configuration", - "version": 1, - "isSecure": false - }, - { - "id": 113, - "name": "Notification", - "version": 8, - "isSecure": false - }, - { - "id": 135, - "name": "Indicator", - "version": 1, - "isSecure": false - } - ] - } - ] -} diff --git a/tests/components/zwave_js/fixtures/window_covering_outbound_bottom.json b/tests/components/zwave_js/fixtures/window_covering_outbound_bottom.json deleted file mode 100644 index 4791e0d9486..00000000000 --- a/tests/components/zwave_js/fixtures/window_covering_outbound_bottom.json +++ /dev/null @@ -1,282 +0,0 @@ -{ - "nodeId": 2, - "index": 0, - "status": 4, - "ready": true, - "isListening": true, - "isRouting": true, - "isSecure": false, - "interviewAttempts": 1, - "isFrequentListening": false, - "maxDataRate": 100000, - "supportedDataRates": [40000, 9600, 100000], - "protocolVersion": 3, - "supportsBeaming": true, - "supportsSecurity": false, - "nodeType": 1, - "deviceClass": { - "basic": { - "key": 4, - "label": "Routing End Node" - }, - "generic": { - "key": 6, - "label": "Appliance" - }, - "specific": { - "key": 1, - "label": "General Appliance" - } - }, - "interviewStage": "Complete", - "statistics": { - "commandsTX": 8, - "commandsRX": 5, - "commandsDroppedRX": 0, - "commandsDroppedTX": 0, - "timeoutResponse": 2, - "rtt": 96.3, - "lastSeen": "2024-09-12T11:46:43.065Z" - }, - "highestSecurityClass": -1, - "isControllerNode": false, - "keepAwake": false, - "lastSeen": "2024-09-12T11:46:43.065Z", - "protocol": 0, - "values": [ - { - "endpoint": 0, - "commandClass": 106, - "commandClassName": "Window Covering", - "property": "levelChangeUp", - "propertyKey": 13, - "propertyName": "levelChangeUp", - "propertyKeyName": "Outbound Bottom", - "ccVersion": 1, - "metadata": { - "type": "boolean", - "readable": false, - "writeable": true, - "label": "Open - Outbound Bottom", - "ccSpecific": { - "parameter": 13 - }, - "valueChangeOptions": ["transitionDuration"], - "states": { - "true": "Start", - "false": "Stop" - }, - "stateful": true, - "secret": false - }, - "value": true - }, - { - "endpoint": 0, - "commandClass": 106, - "commandClassName": "Window Covering", - "property": "levelChangeDown", - "propertyKey": 13, - "propertyName": "levelChangeDown", - "propertyKeyName": "Outbound Bottom", - "ccVersion": 1, - "metadata": { - "type": "boolean", - "readable": false, - "writeable": true, - "label": "Close - Outbound Bottom", - "ccSpecific": { - "parameter": 13 - }, - "valueChangeOptions": ["transitionDuration"], - "states": { - "true": "Start", - "false": "Stop" - }, - "stateful": true, - "secret": false - }, - "value": true - }, - { - "endpoint": 0, - "commandClass": 106, - "commandClassName": "Window Covering", - "property": "targetValue", - "propertyKey": 13, - "propertyName": "targetValue", - "propertyKeyName": "Outbound Bottom", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": true, - "label": "Target value - Outbound Bottom", - "ccSpecific": { - "parameter": 13 - }, - "valueChangeOptions": ["transitionDuration"], - "min": 0, - "max": 99, - "states": { - "0": "Closed", - "99": "Open" - }, - "stateful": true, - "secret": false - }, - "value": 52 - }, - { - "endpoint": 0, - "commandClass": 106, - "commandClassName": "Window Covering", - "property": "currentValue", - "propertyKey": 13, - "propertyName": "currentValue", - "propertyKeyName": "Outbound Bottom", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Current value - Outbound Bottom", - "ccSpecific": { - "parameter": 13 - }, - "min": 0, - "max": 99, - "states": { - "0": "Closed", - "99": "Open" - }, - "stateful": true, - "secret": false - }, - "value": 52 - }, - { - "endpoint": 0, - "commandClass": 106, - "commandClassName": "Window Covering", - "property": "duration", - "propertyKey": 13, - "propertyName": "duration", - "propertyKeyName": "Outbound Bottom", - "ccVersion": 1, - "metadata": { - "type": "duration", - "readable": true, - "writeable": false, - "label": "Remaining duration - Outbound Bottom", - "ccSpecific": { - "parameter": 13 - }, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "firmwareVersions", - "propertyName": "firmwareVersions", - "ccVersion": 1, - "metadata": { - "type": "string[]", - "readable": true, - "writeable": false, - "label": "Z-Wave chip firmware versions", - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "libraryType", - "propertyName": "libraryType", - "ccVersion": 1, - "metadata": { - "type": "number", - "readable": true, - "writeable": false, - "label": "Library type", - "states": { - "0": "Unknown", - "1": "Static Controller", - "2": "Controller", - "3": "Enhanced Slave", - "4": "Slave", - "5": "Installer", - "6": "Routing Slave", - "7": "Bridge Controller", - "8": "Device under Test", - "9": "N/A", - "10": "AV Remote", - "11": "AV Device" - }, - "stateful": true, - "secret": false - } - }, - { - "endpoint": 0, - "commandClass": 134, - "commandClassName": "Version", - "property": "protocolVersion", - "propertyName": "protocolVersion", - "ccVersion": 1, - "metadata": { - "type": "string", - "readable": true, - "writeable": false, - "label": "Z-Wave protocol version", - "stateful": true, - "secret": false - } - } - ], - "endpoints": [ - { - "nodeId": 2, - "index": 0, - "deviceClass": { - "basic": { - "key": 4, - "label": "Routing End Node" - }, - "generic": { - "key": 6, - "label": "Appliance" - }, - "specific": { - "key": 1, - "label": "General Appliance" - } - }, - "commandClasses": [ - { - "id": 134, - "name": "Version", - "version": 1, - "isSecure": false - }, - { - "id": 108, - "name": "Supervision", - "version": 1, - "isSecure": false - }, - { - "id": 106, - "name": "Window Covering", - "version": 1, - "isSecure": false - } - ] - } - ] -} diff --git a/tests/components/zwave_js/snapshots/test_diagnostics.ambr b/tests/components/zwave_js/snapshots/test_diagnostics.ambr deleted file mode 100644 index dc0dbba59b5..00000000000 --- a/tests/components/zwave_js/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,3428 +0,0 @@ -# serializer version: 1 -# name: test_device_diagnostics - dict({ - 'entities': list([ - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.multisensor_6_any', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Any', - 'primary_value': dict({ - 'command_class': 48, - 'command_class_name': 'Binary Sensor', - 'endpoint': 0, - 'property': 'Any', - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Any', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-48-0-Any', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': 'diagnostic', - 'entity_id': 'binary_sensor.multisensor_6_low_battery_level', - 'hidden_by': None, - 'original_device_class': 'battery', - 'original_icon': None, - 'original_name': 'Low battery level', - 'primary_value': dict({ - 'command_class': 128, - 'command_class_name': 'Battery', - 'endpoint': 0, - 'property': 'isLow', - 'property_key': None, - 'property_key_name': None, - 'property_name': 'isLow', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-128-0-isLow', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.multisensor_6_motion_detection', - 'hidden_by': None, - 'original_device_class': 'motion', - 'original_icon': None, - 'original_name': 'Motion detection', - 'primary_value': dict({ - 'command_class': 113, - 'command_class_name': 'Notification', - 'endpoint': 0, - 'property': 'Home Security', - 'property_key': 'Motion sensor status', - 'property_key_name': 'Motion sensor status', - 'property_name': 'Home Security', - 'state_key': 8, - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-113-0-Home Security-Motion sensor status', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': 'diagnostic', - 'entity_id': 'binary_sensor.multisensor_6_tampering_product_cover_removed', - 'hidden_by': None, - 'original_device_class': 'tamper', - 'original_icon': None, - 'original_name': 'Tampering, product cover removed', - 'primary_value': dict({ - 'command_class': 113, - 'command_class_name': 'Notification', - 'endpoint': 0, - 'property': 'Home Security', - 'property_key': 'Cover status', - 'property_key_name': 'Cover status', - 'property_name': 'Home Security', - 'state_key': 3, - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-113-0-Home Security-Cover status', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': 'config', - 'entity_id': 'button.multisensor_6_idle_home_security_cover_status', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Idle Home Security Cover status', - 'primary_value': dict({ - 'command_class': 113, - 'command_class_name': 'Notification', - 'endpoint': 0, - 'property': 'Home Security', - 'property_key': 'Cover status', - 'property_key_name': 'Cover status', - 'property_name': 'Home Security', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-113-0-Home Security-Cover status', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': 'config', - 'entity_id': 'button.multisensor_6_idle_home_security_motion_sensor_status', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Idle Home Security Motion sensor status', - 'primary_value': dict({ - 'command_class': 113, - 'command_class_name': 'Notification', - 'endpoint': 0, - 'property': 'Home Security', - 'property_key': 'Motion sensor status', - 'property_key_name': 'Motion sensor status', - 'property_name': 'Home Security', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-113-0-Home Security-Motion sensor status', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.multisensor_6_basic', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Basic', - 'primary_value': dict({ - 'command_class': 32, - 'command_class_name': 'Basic', - 'endpoint': 0, - 'property': 'currentValue', - 'property_key': None, - 'property_key_name': None, - 'property_name': 'currentValue', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-32-0-currentValue', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_battery_threshold', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Battery Threshold', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 44, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Battery Threshold', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-44', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_default_unit_of_the_automatic_temperature_report', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Default unit of the automatic temperature report', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 64, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Default unit of the automatic temperature report', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-64', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_group_1_report_interval', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 1 Report Interval', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 111, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Group 1 Report Interval', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-111', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_group_2_report_interval', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 2 Report Interval', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 112, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Group 2 Report Interval', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-112', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_group_3_report_interval', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 3 Report Interval', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 113, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Group 3 Report Interval', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-113', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_humidity_sensor_calibration', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Humidity Sensor Calibration', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 202, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Humidity Sensor Calibration', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-202', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_humidity_threshold', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Humidity Threshold', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 42, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Humidity Threshold', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-42', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_low_battery_report', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Low Battery Report', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 39, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Low Battery Report', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-39', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_lower_limit_value_of_humidity_sensor', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lower limit value of humidity sensor', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 52, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Lower limit value of humidity sensor', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-52', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_lower_limit_value_of_lighting_sensor', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lower limit value of Lighting sensor', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 54, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Lower limit value of Lighting sensor', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-54', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_lower_limit_value_of_ultraviolet_sensor', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lower limit value of ultraviolet sensor', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 56, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Lower limit value of ultraviolet sensor', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-56', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_lower_temperature_limit', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Lower temperature limit', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 50, - 'property_key': 4294901760, - 'property_key_name': None, - 'property_name': 'Lower temperature limit', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-50-4294901760', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_luminance_sensor_calibration', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Luminance Sensor Calibration', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 203, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Luminance Sensor Calibration', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-203', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_luminance_threshold', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Luminance Threshold', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 43, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Luminance Threshold', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-43', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_motion_sensor_reset_timeout', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Motion Sensor reset timeout', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 3, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Motion Sensor reset timeout', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-3', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_recover_limit_value_of_humidity_sensor', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Recover limit value of humidity sensor', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 58, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Recover limit value of humidity sensor', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-58', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_recover_limit_value_of_lighting_sensor', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Recover limit value of Lighting sensor.', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 59, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Recover limit value of Lighting sensor.', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-59', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_recover_limit_value_of_temperature_sensor', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Recover limit value of temperature sensor', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 57, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Recover limit value of temperature sensor', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-57', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_recover_limit_value_of_ultraviolet_sensor', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Recover limit value of Ultraviolet sensor', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 60, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Recover limit value of Ultraviolet sensor', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-60', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_send_a_report_if_the_measurement_is_out_of_limits', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Send a report if the measurement is out of limits', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 48, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Send a report if the measurement is out of limits', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-48', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_temperature_calibration', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Temperature Calibration', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 201, - 'property_key': 65280, - 'property_key_name': None, - 'property_name': 'Temperature Calibration', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-201-65280', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_temperature_threshold', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Temperature Threshold', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 41, - 'property_key': 16776960, - 'property_key_name': None, - 'property_name': 'Temperature Threshold', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-41-16776960', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_timeout_after_wake_up', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Timeout after wake up', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 8, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Timeout after wake up', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-8', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_ultraviolet_sensor_calibration', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Ultraviolet Sensor Calibration', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 204, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Ultraviolet Sensor Calibration', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-204', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_ultraviolet_threshold', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Ultraviolet Threshold', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 45, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Ultraviolet Threshold', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-45', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_upper_limit_value_of_humidity_sensor', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Upper limit value of humidity sensor', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 51, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Upper limit value of humidity sensor', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-51', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_upper_limit_value_of_lighting_sensor', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Upper limit value of Lighting sensor', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 53, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Upper limit value of Lighting sensor', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-53', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_upper_limit_value_of_ultraviolet_sensor', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Upper limit value of ultraviolet sensor', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 55, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Upper limit value of ultraviolet sensor', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-55', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'number', - 'entity_category': 'config', - 'entity_id': 'number.multisensor_6_upper_temperature_limit', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Upper temperature limit', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 49, - 'property_key': 4294901760, - 'property_key_name': None, - 'property_name': 'Upper temperature limit', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-49-4294901760', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'select', - 'entity_category': 'config', - 'entity_id': 'select.multisensor_6_disable_enable_configuration_lock', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disable/Enable Configuration Lock', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 252, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Disable/Enable Configuration Lock', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-252', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'select', - 'entity_category': 'config', - 'entity_id': 'select.multisensor_6_led_function', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'LED function', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 81, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'LED function', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-81', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'select', - 'entity_category': 'config', - 'entity_id': 'select.multisensor_6_motion_sensor_sensitivity', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Motion sensor sensitivity', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 4, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Motion sensor sensitivity', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-4', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'select', - 'entity_category': 'config', - 'entity_id': 'select.multisensor_6_motion_sensor_triggered_command', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Motion Sensor Triggered Command', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 5, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Motion Sensor Triggered Command', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-5', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'select', - 'entity_category': 'config', - 'entity_id': 'select.multisensor_6_selective_reporting', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Selective Reporting', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 40, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Selective Reporting', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-40', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'select', - 'entity_category': 'config', - 'entity_id': 'select.multisensor_6_send_alarm_report_if_low_temperature', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Send Alarm Report if low temperature', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 46, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Send Alarm Report if low temperature', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-46', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'select', - 'entity_category': 'config', - 'entity_id': 'select.multisensor_6_stay_awake_in_battery_mode', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Stay Awake in Battery Mode', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 2, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Stay Awake in Battery Mode', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-2', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'select', - 'entity_category': 'config', - 'entity_id': 'select.multisensor_6_temperature_calibration_unit', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Temperature Calibration (Unit)', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 201, - 'property_key': 255, - 'property_key_name': None, - 'property_name': 'Temperature Calibration (Unit)', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-201-255', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'select', - 'entity_category': 'config', - 'entity_id': 'select.multisensor_6_temperature_threshold_unit', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Temperature Threshold (Unit)', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 41, - 'property_key': 15, - 'property_key_name': None, - 'property_name': 'Temperature Threshold (Unit)', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-41-15', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.multisensor_6_air_temperature', - 'hidden_by': None, - 'original_device_class': 'temperature', - 'original_icon': None, - 'original_name': 'Air temperature', - 'primary_value': dict({ - 'command_class': 49, - 'command_class_name': 'Multilevel Sensor', - 'endpoint': 0, - 'property': 'Air temperature', - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Air temperature', - }), - 'supported_features': 0, - 'unit_of_measurement': '°C', - 'value_id': '52-49-0-Air temperature', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': 'diagnostic', - 'entity_id': 'sensor.multisensor_6_battery_level', - 'hidden_by': None, - 'original_device_class': 'battery', - 'original_icon': None, - 'original_name': 'Battery level', - 'primary_value': dict({ - 'command_class': 128, - 'command_class_name': 'Battery', - 'endpoint': 0, - 'property': 'level', - 'property_key': None, - 'property_key_name': None, - 'property_name': 'level', - }), - 'supported_features': 0, - 'unit_of_measurement': '%', - 'value_id': '52-128-0-level', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.multisensor_6_humidity', - 'hidden_by': None, - 'original_device_class': 'humidity', - 'original_icon': None, - 'original_name': 'Humidity', - 'primary_value': dict({ - 'command_class': 49, - 'command_class_name': 'Multilevel Sensor', - 'endpoint': 0, - 'property': 'Humidity', - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Humidity', - }), - 'supported_features': 0, - 'unit_of_measurement': '%', - 'value_id': '52-49-0-Humidity', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.multisensor_6_illuminance', - 'hidden_by': None, - 'original_device_class': 'illuminance', - 'original_icon': None, - 'original_name': 'Illuminance', - 'primary_value': dict({ - 'command_class': 49, - 'command_class_name': 'Multilevel Sensor', - 'endpoint': 0, - 'property': 'Illuminance', - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Illuminance', - }), - 'supported_features': 0, - 'unit_of_measurement': 'lx', - 'value_id': '52-49-0-Illuminance', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': 'diagnostic', - 'entity_id': 'sensor.multisensor_6_out_of_limit_state_of_the_sensors', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Out-of-limit state of the Sensors', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 61, - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Out-of-limit state of the Sensors', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-61', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': 'diagnostic', - 'entity_id': 'sensor.multisensor_6_power_mode', - 'hidden_by': None, - 'original_device_class': 'enum', - 'original_icon': None, - 'original_name': 'Power Mode', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 9, - 'property_key': 256, - 'property_key_name': None, - 'property_name': 'Power Mode', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-9-256', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': 'diagnostic', - 'entity_id': 'sensor.multisensor_6_sleep_state', - 'hidden_by': None, - 'original_device_class': 'enum', - 'original_icon': None, - 'original_name': 'Sleep State', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 9, - 'property_key': 1, - 'property_key_name': None, - 'property_name': 'Sleep State', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-9-1', - }), - dict({ - 'disabled': False, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.multisensor_6_ultraviolet', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Ultraviolet', - 'primary_value': dict({ - 'command_class': 49, - 'command_class_name': 'Multilevel Sensor', - 'endpoint': 0, - 'property': 'Ultraviolet', - 'property_key': None, - 'property_key_name': None, - 'property_name': 'Ultraviolet', - }), - 'supported_features': 0, - 'unit_of_measurement': 'UV index', - 'value_id': '52-49-0-Ultraviolet', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_1_send_battery_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 1: Send battery reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 101, - 'property_key': 1, - 'property_key_name': None, - 'property_name': 'Group 1: Send battery reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-101-1', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_1_send_humidity_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 1: Send humidity reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 101, - 'property_key': 64, - 'property_key_name': None, - 'property_name': 'Group 1: Send humidity reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-101-64', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_1_send_luminance_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 1: Send luminance reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 101, - 'property_key': 128, - 'property_key_name': None, - 'property_name': 'Group 1: Send luminance reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-101-128', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_1_send_temperature_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 1: Send temperature reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 101, - 'property_key': 32, - 'property_key_name': None, - 'property_name': 'Group 1: Send temperature reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-101-32', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_1_send_ultraviolet_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 1: Send ultraviolet reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 101, - 'property_key': 16, - 'property_key_name': None, - 'property_name': 'Group 1: Send ultraviolet reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-101-16', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_2_send_battery_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 2: Send battery reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 102, - 'property_key': 1, - 'property_key_name': None, - 'property_name': 'Group 2: Send battery reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-102-1', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_2_send_humidity_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 2: Send humidity reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 102, - 'property_key': 64, - 'property_key_name': None, - 'property_name': 'Group 2: Send humidity reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-102-64', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_2_send_luminance_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 2: Send luminance reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 102, - 'property_key': 128, - 'property_key_name': None, - 'property_name': 'Group 2: Send luminance reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-102-128', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_2_send_temperature_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 2: Send temperature reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 102, - 'property_key': 32, - 'property_key_name': None, - 'property_name': 'Group 2: Send temperature reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-102-32', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_2_send_ultraviolet_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 2: Send ultraviolet reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 102, - 'property_key': 16, - 'property_key_name': None, - 'property_name': 'Group 2: Send ultraviolet reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-102-16', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_3_send_battery_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 3: Send battery reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 103, - 'property_key': 1, - 'property_key_name': None, - 'property_name': 'Group 3: Send battery reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-103-1', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_3_send_humidity_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 3: Send humidity reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 103, - 'property_key': 64, - 'property_key_name': None, - 'property_name': 'Group 3: Send humidity reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-103-64', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_3_send_luminance_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 3: Send luminance reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 103, - 'property_key': 128, - 'property_key_name': None, - 'property_name': 'Group 3: Send luminance reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-103-128', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_3_send_temperature_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 3: Send temperature reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 103, - 'property_key': 32, - 'property_key_name': None, - 'property_name': 'Group 3: Send temperature reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-103-32', - }), - dict({ - 'disabled': True, - 'disabled_by': 'integration', - 'domain': 'switch', - 'entity_category': 'config', - 'entity_id': 'switch.multisensor_6_group_3_send_ultraviolet_reports', - 'hidden_by': None, - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Group 3: Send ultraviolet reports', - 'primary_value': dict({ - 'command_class': 112, - 'command_class_name': 'Configuration', - 'endpoint': 0, - 'property': 103, - 'property_key': 16, - 'property_key_name': None, - 'property_name': 'Group 3: Send ultraviolet reports', - }), - 'supported_features': 0, - 'unit_of_measurement': None, - 'value_id': '52-112-0-103-16', - }), - ]), - 'state': dict({ - 'deviceClass': dict({ - 'basic': dict({ - 'key': 2, - 'label': 'Static Controller', - }), - 'generic': dict({ - 'key': 21, - 'label': 'Multilevel Sensor', - }), - 'mandatoryControlledCCs': list([ - ]), - 'mandatorySupportedCCs': list([ - ]), - 'specific': dict({ - 'key': 1, - 'label': 'Routing Multilevel Sensor', - }), - }), - 'deviceConfig': dict({ - 'description': 'Multisensor 6', - 'devices': list([ - dict({ - 'productId': '0x0064', - 'productType': '0x0002', - }), - dict({ - 'productId': '0x0064', - 'productType': '0x0102', - }), - dict({ - 'productId': '0x0064', - 'productType': '0x0202', - }), - ]), - 'firmwareVersion': dict({ - 'max': '255.255', - 'min': '1.10', - }), - 'label': 'ZW100', - 'manufacturer': 'AEON Labs', - 'manufacturerId': 134, - 'paramInformation': dict({ - '_map': dict({ - }), - }), - }), - 'endpoints': dict({ - '0': dict({ - 'commandClasses': list([ - dict({ - 'id': 113, - 'isSecure': False, - 'name': 'Notification', - 'version': 8, - }), - ]), - 'index': 0, - 'installerIcon': 3079, - 'nodeId': 52, - 'userIcon': 3079, - }), - }), - 'firmwareVersion': '1.12', - 'highestSecurityClass': 7, - 'index': 0, - 'installerIcon': 3079, - 'interviewAttempts': 1, - 'isBeaming': True, - 'isControllerNode': False, - 'isFrequentListening': False, - 'isListening': True, - 'isRouting': True, - 'isSecure': False, - 'label': 'ZW100', - 'manufacturerId': 134, - 'maxBaudRate': 40000, - 'neighbors': list([ - 1, - 32, - ]), - 'nodeId': 52, - 'nodeType': 0, - 'productId': 100, - 'productType': 258, - 'ready': True, - 'roleType': 5, - 'status': 1, - 'userIcon': 3079, - 'values': dict({ - '52-112-0-100': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 0, - 'description': 'Reset 101-103 to defaults', - 'format': 0, - 'isFromConfig': True, - 'label': 'Set parameters 101-103 to default.', - 'max': 1, - 'min': 0, - 'readable': False, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 100, - 'propertyName': 'Set parameters 101-103 to default.', - }), - '52-112-0-101-1': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include battery information in periodic reports to Group 1', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 1: Send battery reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 101, - 'propertyKey': 1, - 'propertyName': 'Group 1: Send battery reports', - 'value': 1, - }), - '52-112-0-101-128': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include luminance information in periodic reports to Group 1', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 1: Send luminance reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 101, - 'propertyKey': 128, - 'propertyName': 'Group 1: Send luminance reports', - 'value': 1, - }), - '52-112-0-101-16': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include ultraviolet information in periodic reports to Group 1', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 1: Send ultraviolet reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 101, - 'propertyKey': 16, - 'propertyName': 'Group 1: Send ultraviolet reports', - 'value': 1, - }), - '52-112-0-101-32': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include temperature information in periodic reports to Group 1', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 1: Send temperature reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 101, - 'propertyKey': 32, - 'propertyName': 'Group 1: Send temperature reports', - 'value': 1, - }), - '52-112-0-101-64': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include humidity information in periodic reports to Group 1', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 1: Send humidity reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 101, - 'propertyKey': 64, - 'propertyName': 'Group 1: Send humidity reports', - 'value': 1, - }), - '52-112-0-102-1': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include battery information in periodic reports to Group 2', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 2: Send battery reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 102, - 'propertyKey': 1, - 'propertyName': 'Group 2: Send battery reports', - 'value': 0, - }), - '52-112-0-102-128': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include luminance information in periodic reports to Group 2', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 2: Send luminance reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 102, - 'propertyKey': 128, - 'propertyName': 'Group 2: Send luminance reports', - 'value': 0, - }), - '52-112-0-102-16': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include ultraviolet information in periodic reports to Group 2', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 2: Send ultraviolet reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 102, - 'propertyKey': 16, - 'propertyName': 'Group 2: Send ultraviolet reports', - 'value': 0, - }), - '52-112-0-102-32': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include temperature information in periodic reports to Group 2', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 2: Send temperature reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 102, - 'propertyKey': 32, - 'propertyName': 'Group 2: Send temperature reports', - 'value': 0, - }), - '52-112-0-102-64': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include humidity information in periodic reports to Group 2', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 2: Send humidity reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 102, - 'propertyKey': 64, - 'propertyName': 'Group 2: Send humidity reports', - 'value': 0, - }), - '52-112-0-103-1': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include battery information in periodic reports to Group 3', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 3: Send battery reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 103, - 'propertyKey': 1, - 'propertyName': 'Group 3: Send battery reports', - 'value': 0, - }), - '52-112-0-103-128': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include luminance information in periodic reports to Group 3', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 3: Send luminance reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 103, - 'propertyKey': 128, - 'propertyName': 'Group 3: Send luminance reports', - 'value': 0, - }), - '52-112-0-103-16': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include ultraviolet information in periodic reports to Group 3', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 3: Send ultraviolet reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 103, - 'propertyKey': 16, - 'propertyName': 'Group 3: Send ultraviolet reports', - 'value': 0, - }), - '52-112-0-103-32': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include temperature information in periodic reports to Group 3', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 3: Send temperature reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 103, - 'propertyKey': 32, - 'propertyName': 'Group 3: Send temperature reports', - 'value': 0, - }), - '52-112-0-103-64': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Include humidity information in periodic reports to Group 3', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 3: Send humidity reports', - 'max': 1, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 103, - 'propertyKey': 64, - 'propertyName': 'Group 3: Send humidity reports', - 'value': 0, - }), - '52-112-0-110': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 0, - 'description': 'Set parameters 111-113 to default.', - 'format': 0, - 'isFromConfig': True, - 'label': 'Set parameters 111-113 to default.', - 'max': 1, - 'min': 0, - 'readable': False, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 110, - 'propertyName': 'Set parameters 111-113 to default.', - }), - '52-112-0-111': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 3600, - 'description': 'How often to update Group 1', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 1 Report Interval', - 'max': 2678400, - 'min': 5, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 111, - 'propertyName': 'Group 1 Report Interval', - 'value': 3600, - }), - '52-112-0-112': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 3600, - 'description': 'Group 2 Report Interval', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 2 Report Interval', - 'max': 2678400, - 'min': 5, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 112, - 'propertyName': 'Group 2 Report Interval', - 'value': 3600, - }), - '52-112-0-113': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 3600, - 'description': 'Group 3 Report Interval', - 'format': 0, - 'isFromConfig': True, - 'label': 'Group 3 Report Interval', - 'max': 2678400, - 'min': 5, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 113, - 'propertyName': 'Group 3 Report Interval', - 'value': 3600, - }), - '52-112-0-2': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 0, - 'description': 'Stay awake for 10 minutes at power on', - 'format': 0, - 'isFromConfig': True, - 'label': 'Stay Awake in Battery Mode', - 'max': 1, - 'min': 0, - 'readable': True, - 'states': dict({ - '0': 'Disable', - '1': 'Enable', - }), - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 2, - 'propertyName': 'Stay Awake in Battery Mode', - 'value': 0, - }), - '52-112-0-201-255': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 1, - 'format': 0, - 'isFromConfig': True, - 'label': 'Temperature Calibration (Unit)', - 'max': 2, - 'min': 1, - 'readable': True, - 'states': dict({ - '1': 'Celsius', - '2': 'Fahrenheit', - }), - 'type': 'number', - 'valueSize': 2, - 'writeable': True, - }), - 'property': 201, - 'propertyKey': 255, - 'propertyName': 'Temperature Calibration (Unit)', - 'value': 2, - }), - '52-112-0-201-65280': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 0, - 'format': 0, - 'isFromConfig': True, - 'label': 'Temperature Calibration', - 'max': 127, - 'min': -127, - 'readable': True, - 'type': 'number', - 'valueSize': 2, - 'writeable': True, - }), - 'property': 201, - 'propertyKey': 65280, - 'propertyName': 'Temperature Calibration', - 'value': 0, - }), - '52-112-0-202': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 0, - 'description': 'Humidity Sensor Calibration', - 'format': 0, - 'isFromConfig': True, - 'label': 'Humidity Sensor Calibration', - 'max': 50, - 'min': -50, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 202, - 'propertyName': 'Humidity Sensor Calibration', - 'value': 0, - }), - '52-112-0-203': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 0, - 'description': 'Luminance Sensor Calibration', - 'format': 0, - 'isFromConfig': True, - 'label': 'Luminance Sensor Calibration', - 'max': 1000, - 'min': -1000, - 'readable': True, - 'type': 'number', - 'valueSize': 2, - 'writeable': True, - }), - 'property': 203, - 'propertyName': 'Luminance Sensor Calibration', - 'value': 0, - }), - '52-112-0-204': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 0, - 'description': 'Ultraviolet Sensor Calibration', - 'format': 0, - 'isFromConfig': True, - 'label': 'Ultraviolet Sensor Calibration', - 'max': 10, - 'min': -10, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 204, - 'propertyName': 'Ultraviolet Sensor Calibration', - 'value': 0, - }), - '52-112-0-252': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 0, - 'description': 'Disable/Enable Configuration Lock (0=Disable, 1=Enable)', - 'format': 0, - 'isFromConfig': True, - 'label': 'Disable/Enable Configuration Lock', - 'max': 1, - 'min': 0, - 'readable': True, - 'states': dict({ - '0': 'Disable', - '1': 'Enable', - }), - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 252, - 'propertyName': 'Disable/Enable Configuration Lock', - 'value': 0, - }), - '52-112-0-255': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 0, - 'format': 0, - 'isFromConfig': True, - 'label': 'Reset to default factory settings', - 'max': 1431655765, - 'min': 0, - 'readable': False, - 'states': dict({ - '1': 'Resets all configuration parameters to defaults', - '1431655765': 'Reset to default factory settings and be excluded', - }), - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 255, - 'propertyName': 'Reset to default factory settings', - }), - '52-112-0-3': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 240, - 'description': 'Motion Sensor reset timeout', - 'format': 0, - 'isFromConfig': True, - 'label': 'Motion Sensor reset timeout', - 'max': 3600, - 'min': 10, - 'readable': True, - 'type': 'number', - 'valueSize': 2, - 'writeable': True, - }), - 'property': 3, - 'propertyName': 'Motion Sensor reset timeout', - 'value': 240, - }), - '52-112-0-39': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 20, - 'description': 'Report Low Battery if below this value', - 'format': 0, - 'isFromConfig': True, - 'label': 'Low Battery Report', - 'max': 50, - 'min': 10, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 39, - 'propertyName': 'Low Battery Report', - 'value': 20, - }), - '52-112-0-4': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 5, - 'description': 'Sensitivity level of PIR sensor (1=minimum, 5=maximum)', - 'format': 1, - 'isFromConfig': True, - 'label': 'Motion sensor sensitivity', - 'max': 255, - 'min': 0, - 'readable': True, - 'states': dict({ - '0': 'Disable', - '1': 'Enable, sensitivity level 1 (minimum)', - '2': 'Enable, sensitivity level 2', - '3': 'Enable, sensitivity level 3', - '4': 'Enable, sensitivity level 4', - '5': 'Enable, sensitivity level 5 (maximum)', - }), - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 4, - 'propertyName': 'Motion sensor sensitivity', - 'value': 5, - }), - '52-112-0-40': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 0, - 'description': 'Select to report on thresholds', - 'format': 0, - 'isFromConfig': True, - 'label': 'Selective Reporting', - 'max': 1, - 'min': 0, - 'readable': True, - 'states': dict({ - '0': 'Disable', - '1': 'Enable', - }), - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 40, - 'propertyName': 'Selective Reporting', - 'value': 0, - }), - '52-112-0-41-15': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 1, - 'format': 0, - 'isFromConfig': True, - 'label': 'Temperature Threshold (Unit)', - 'max': 2, - 'min': 1, - 'readable': True, - 'states': dict({ - '1': 'Celsius', - '2': 'Fahrenheit', - }), - 'type': 'number', - 'valueSize': 3, - 'writeable': True, - }), - 'property': 41, - 'propertyKey': 15, - 'propertyName': 'Temperature Threshold (Unit)', - 'value': 0, - }), - '52-112-0-41-16776960': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 20, - 'description': 'Threshold change in temperature to induce an automatic report.', - 'format': 0, - 'isFromConfig': True, - 'label': 'Temperature Threshold', - 'max': 100, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 3, - 'writeable': True, - }), - 'property': 41, - 'propertyKey': 16776960, - 'propertyName': 'Temperature Threshold', - 'value': 5122, - }), - '52-112-0-42': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 10, - 'description': 'Humidity percent change threshold', - 'format': 0, - 'isFromConfig': True, - 'label': 'Humidity Threshold', - 'max': 100, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 42, - 'propertyName': 'Humidity Threshold', - 'value': 10, - }), - '52-112-0-43': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 100, - 'description': 'Luminance change threshold', - 'format': 0, - 'isFromConfig': True, - 'label': 'Luminance Threshold', - 'max': 1000, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 2, - 'writeable': True, - }), - 'property': 43, - 'propertyName': 'Luminance Threshold', - 'value': 100, - }), - '52-112-0-44': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 10, - 'description': 'Battery level threshold', - 'format': 0, - 'isFromConfig': True, - 'label': 'Battery Threshold', - 'max': 100, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 44, - 'propertyName': 'Battery Threshold', - 'value': 10, - }), - '52-112-0-45': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 2, - 'description': 'Ultraviolet change threshold', - 'format': 0, - 'isFromConfig': True, - 'label': 'Ultraviolet Threshold', - 'max': 100, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 45, - 'propertyName': 'Ultraviolet Threshold', - 'value': 2, - }), - '52-112-0-46': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 0, - 'description': 'Send an alarm report if temperature is less than -15 °C', - 'format': 1, - 'isFromConfig': True, - 'label': 'Send Alarm Report if low temperature', - 'max': 255, - 'min': 0, - 'readable': True, - 'states': dict({ - '0': 'Disable', - '1': 'Enable', - }), - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 46, - 'propertyName': 'Send Alarm Report if low temperature', - 'value': 0, - }), - '52-112-0-48': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 0, - 'description': 'Send report when measurement is at upper/lower limit', - 'format': 1, - 'isFromConfig': True, - 'label': 'Send a report if the measurement is out of limits', - 'max': 255, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 48, - 'propertyName': 'Send a report if the measurement is out of limits', - 'value': 0, - }), - '52-112-0-49-4294901760': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 280, - 'format': 0, - 'isFromConfig': True, - 'label': 'Upper temperature limit', - 'max': 2120, - 'min': -400, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 49, - 'propertyKey': 4294901760, - 'propertyName': 'Upper temperature limit', - 'value': 824, - }), - '52-112-0-49-65280': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 1, - 'format': 0, - 'isFromConfig': True, - 'label': 'Upper temperature limit (Unit)', - 'max': 2, - 'min': 1, - 'readable': False, - 'states': dict({ - '1': 'Celsius', - '2': 'Fahrenheit', - }), - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 49, - 'propertyKey': 65280, - 'propertyName': 'Upper temperature limit (Unit)', - 'value': 2, - }), - '52-112-0-5': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 1, - 'format': 1, - 'isFromConfig': True, - 'label': 'Motion Sensor Triggered Command', - 'max': 255, - 'min': 0, - 'readable': True, - 'states': dict({ - '1': 'Send Basic Set CC', - '2': 'Send Sensor Binary Report CC', - }), - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 5, - 'propertyName': 'Motion Sensor Triggered Command', - 'value': 1, - }), - '52-112-0-50-4294901760': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 0, - 'format': 0, - 'isFromConfig': True, - 'label': 'Lower temperature limit', - 'max': 2120, - 'min': -400, - 'readable': True, - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 50, - 'propertyKey': 4294901760, - 'propertyName': 'Lower temperature limit', - 'value': 320, - }), - '52-112-0-50-65280': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 1, - 'format': 0, - 'isFromConfig': True, - 'label': 'Lower temperature limit (Unit)', - 'max': 2, - 'min': 1, - 'readable': False, - 'states': dict({ - '1': 'Celsius', - '2': 'Fahrenheit', - }), - 'type': 'number', - 'valueSize': 4, - 'writeable': True, - }), - 'property': 50, - 'propertyKey': 65280, - 'propertyName': 'Lower temperature limit (Unit)', - 'value': 2, - }), - '52-112-0-51': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 60, - 'description': 'Upper limit value of humidity sensor', - 'format': 0, - 'isFromConfig': True, - 'label': 'Upper limit value of humidity sensor', - 'max': 100, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 51, - 'propertyName': 'Upper limit value of humidity sensor', - 'value': 60, - }), - '52-112-0-52': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 50, - 'description': 'Lower limit value of humidity sensor', - 'format': 0, - 'isFromConfig': True, - 'label': 'Lower limit value of humidity sensor', - 'max': 100, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 52, - 'propertyName': 'Lower limit value of humidity sensor', - 'value': 50, - }), - '52-112-0-53': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1000, - 'description': 'Upper limit value of Lighting sensor', - 'format': 0, - 'isFromConfig': True, - 'label': 'Upper limit value of Lighting sensor', - 'max': 30000, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 2, - 'writeable': True, - }), - 'property': 53, - 'propertyName': 'Upper limit value of Lighting sensor', - 'value': 1000, - }), - '52-112-0-54': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 100, - 'description': 'Lower limit value of Lighting sensor', - 'format': 0, - 'isFromConfig': True, - 'label': 'Lower limit value of Lighting sensor', - 'max': 30000, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 2, - 'writeable': True, - }), - 'property': 54, - 'propertyName': 'Lower limit value of Lighting sensor', - 'value': 100, - }), - '52-112-0-55': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 8, - 'description': 'Upper limit value of ultraviolet sensor', - 'format': 0, - 'isFromConfig': True, - 'label': 'Upper limit value of ultraviolet sensor', - 'max': 11, - 'min': 1, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 55, - 'propertyName': 'Upper limit value of ultraviolet sensor', - 'value': 8, - }), - '52-112-0-56': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 4, - 'description': 'Lower limit value of ultraviolet sensor', - 'format': 0, - 'isFromConfig': True, - 'label': 'Lower limit value of ultraviolet sensor', - 'max': 11, - 'min': 1, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 56, - 'propertyName': 'Lower limit value of ultraviolet sensor', - 'value': 4, - }), - '52-112-0-57': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 0, - 'description': 'Recover limit value of temperature sensor', - 'format': 1, - 'isFromConfig': True, - 'label': 'Recover limit value of temperature sensor', - 'max': 65535, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 2, - 'writeable': True, - }), - 'property': 57, - 'propertyName': 'Recover limit value of temperature sensor', - 'value': 5122, - }), - '52-112-0-58': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 5, - 'description': 'Recover limit value of humidity sensor', - 'format': 0, - 'isFromConfig': True, - 'label': 'Recover limit value of humidity sensor', - 'max': 50, - 'min': 1, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 58, - 'propertyName': 'Recover limit value of humidity sensor', - 'value': 5, - }), - '52-112-0-59': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 10, - 'description': 'Recover limit value of Lighting sensor.', - 'format': 1, - 'isFromConfig': True, - 'label': 'Recover limit value of Lighting sensor.', - 'max': 255, - 'min': 1, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 59, - 'propertyName': 'Recover limit value of Lighting sensor.', - 'value': 10, - }), - '52-112-0-60': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 2, - 'description': 'Recover limit value of Ultraviolet sensor', - 'format': 0, - 'isFromConfig': True, - 'label': 'Recover limit value of Ultraviolet sensor', - 'max': 5, - 'min': 1, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 60, - 'propertyName': 'Recover limit value of Ultraviolet sensor', - 'value': 2, - }), - '52-112-0-61': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 0, - 'description': 'Out-of-limit state of the Sensors', - 'format': 1, - 'isFromConfig': True, - 'label': 'Out-of-limit state of the Sensors', - 'max': 255, - 'min': 0, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': False, - }), - 'property': 61, - 'propertyName': 'Out-of-limit state of the Sensors', - 'value': 0, - }), - '52-112-0-64': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 1, - 'description': 'Default unit of the automatic temperature report', - 'format': 0, - 'isFromConfig': True, - 'label': 'Default unit of the automatic temperature report', - 'max': 2, - 'min': 1, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 64, - 'propertyName': 'Default unit of the automatic temperature report', - 'value': 2, - }), - '52-112-0-8': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': True, - 'default': 30, - 'description': 'Set the timeout of awake after the Wake Up CC is sent out...', - 'format': 1, - 'isFromConfig': True, - 'label': 'Timeout after wake up', - 'max': 255, - 'min': 8, - 'readable': True, - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 8, - 'propertyName': 'Timeout after wake up', - 'value': 15, - }), - '52-112-0-81': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 0, - 'description': 'Disable/Enable LED function', - 'format': 0, - 'isFromConfig': True, - 'label': 'LED function', - 'max': 2, - 'min': 0, - 'readable': True, - 'states': dict({ - '0': 'Enable LED blinking', - '1': 'Disable PIR LED', - '2': 'Disable ALL', - }), - 'type': 'number', - 'valueSize': 1, - 'writeable': True, - }), - 'property': 81, - 'propertyName': 'LED function', - 'value': 0, - }), - '52-112-0-9-1': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 0, - 'format': 0, - 'isFromConfig': True, - 'label': 'Sleep State', - 'max': 1, - 'min': 0, - 'readable': True, - 'states': dict({ - '0': 'Asleep', - '1': 'Awake', - }), - 'type': 'number', - 'valueSize': 2, - 'writeable': False, - }), - 'property': 9, - 'propertyKey': 1, - 'propertyName': 'Sleep State', - 'value': 0, - }), - '52-112-0-9-256': dict({ - 'commandClass': 112, - 'commandClassName': 'Configuration', - 'endpoint': 0, - 'metadata': dict({ - 'allowManualEntry': False, - 'default': 0, - 'format': 0, - 'isFromConfig': True, - 'label': 'Power Mode', - 'max': 1, - 'min': 0, - 'readable': True, - 'states': dict({ - '0': 'USB', - '1': 'Battery', - }), - 'type': 'number', - 'valueSize': 2, - 'writeable': False, - }), - 'property': 9, - 'propertyKey': 256, - 'propertyName': 'Power Mode', - 'value': 0, - }), - '52-113-0-Home Security-Cover status': dict({ - 'commandClass': 113, - 'commandClassName': 'Notification', - 'endpoint': 0, - 'metadata': dict({ - 'ccSpecific': dict({ - 'notificationType': 7, - }), - 'label': 'Cover status', - 'max': 255, - 'min': 0, - 'readable': True, - 'states': dict({ - '0': 'idle', - '3': 'Tampering, product cover removed', - }), - 'type': 'number', - 'writeable': False, - }), - 'property': 'Home Security', - 'propertyKey': 'Cover status', - 'propertyKeyName': 'Cover status', - 'propertyName': 'Home Security', - 'value': 0, - }), - '52-113-0-Home Security-Motion sensor status': dict({ - 'commandClass': 113, - 'commandClassName': 'Notification', - 'endpoint': 0, - 'metadata': dict({ - 'ccSpecific': dict({ - 'notificationType': 7, - }), - 'label': 'Motion sensor status', - 'max': 255, - 'min': 0, - 'readable': True, - 'states': dict({ - '0': 'idle', - '8': 'Motion detection', - }), - 'type': 'number', - 'writeable': False, - }), - 'property': 'Home Security', - 'propertyKey': 'Motion sensor status', - 'propertyKeyName': 'Motion sensor status', - 'propertyName': 'Home Security', - 'value': 8, - }), - '52-114-0-manufacturerId': dict({ - 'commandClass': 114, - 'commandClassName': 'Manufacturer Specific', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Manufacturer ID', - 'max': 65535, - 'min': 0, - 'readable': True, - 'type': 'number', - 'writeable': False, - }), - 'property': 'manufacturerId', - 'propertyName': 'manufacturerId', - 'value': 134, - }), - '52-114-0-productId': dict({ - 'commandClass': 114, - 'commandClassName': 'Manufacturer Specific', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Product ID', - 'max': 65535, - 'min': 0, - 'readable': True, - 'type': 'number', - 'writeable': False, - }), - 'property': 'productId', - 'propertyName': 'productId', - 'value': 100, - }), - '52-114-0-productType': dict({ - 'commandClass': 114, - 'commandClassName': 'Manufacturer Specific', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Product type', - 'max': 65535, - 'min': 0, - 'readable': True, - 'type': 'number', - 'writeable': False, - }), - 'property': 'productType', - 'propertyName': 'productType', - 'value': 258, - }), - '52-128-0-isLow': dict({ - 'commandClass': 128, - 'commandClassName': 'Battery', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Low battery level', - 'readable': True, - 'type': 'boolean', - 'writeable': False, - }), - 'property': 'isLow', - 'propertyName': 'isLow', - 'value': False, - }), - '52-128-0-level': dict({ - 'commandClass': 128, - 'commandClassName': 'Battery', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Battery level', - 'max': 100, - 'min': 0, - 'readable': True, - 'type': 'number', - 'unit': '%', - 'writeable': False, - }), - 'property': 'level', - 'propertyName': 'level', - 'value': 100, - }), - '52-132-0-controllerNodeId': dict({ - 'commandClass': 132, - 'commandClassName': 'Wake Up', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Node ID of the controller', - 'readable': True, - 'type': 'any', - 'writeable': False, - }), - 'property': 'controllerNodeId', - 'propertyName': 'controllerNodeId', - 'value': 1, - }), - '52-132-0-wakeUpInterval': dict({ - 'commandClass': 132, - 'commandClassName': 'Wake Up', - 'endpoint': 0, - 'metadata': dict({ - 'default': 3600, - 'label': 'Wake Up interval', - 'max': 3600, - 'min': 240, - 'readable': False, - 'steps': 60, - 'type': 'number', - 'writeable': True, - }), - 'property': 'wakeUpInterval', - 'propertyName': 'wakeUpInterval', - 'value': 3600, - }), - '52-134-0-firmwareVersions': dict({ - 'commandClass': 134, - 'commandClassName': 'Version', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Z-Wave chip firmware versions', - 'readable': True, - 'type': 'any', - 'writeable': False, - }), - 'property': 'firmwareVersions', - 'propertyName': 'firmwareVersions', - 'value': list([ - '1.12', - ]), - }), - '52-134-0-hardwareVersion': dict({ - 'commandClass': 134, - 'commandClassName': 'Version', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Z-Wave chip hardware version', - 'readable': True, - 'type': 'any', - 'writeable': False, - }), - 'property': 'hardwareVersion', - 'propertyName': 'hardwareVersion', - }), - '52-134-0-libraryType': dict({ - 'commandClass': 134, - 'commandClassName': 'Version', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Libary type', - 'readable': True, - 'type': 'any', - 'writeable': False, - }), - 'property': 'libraryType', - 'propertyName': 'libraryType', - 'value': 3, - }), - '52-134-0-protocolVersion': dict({ - 'commandClass': 134, - 'commandClassName': 'Version', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Z-Wave protocol version', - 'readable': True, - 'type': 'any', - 'writeable': False, - }), - 'property': 'protocolVersion', - 'propertyName': 'protocolVersion', - 'value': '4.54', - }), - '52-32-0-currentValue': dict({ - 'commandClass': 32, - 'commandClassName': 'Basic', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Current value', - 'max': 99, - 'min': 0, - 'readable': True, - 'type': 'number', - 'writeable': False, - }), - 'property': 'currentValue', - 'propertyName': 'currentValue', - 'value': 255, - }), - '52-32-0-targetValue': dict({ - 'commandClass': 32, - 'commandClassName': 'Basic', - 'endpoint': 0, - 'metadata': dict({ - 'label': 'Target value', - 'max': 99, - 'min': 0, - 'readable': True, - 'type': 'number', - 'writeable': True, - }), - 'property': 'targetValue', - 'propertyName': 'targetValue', - }), - '52-48-0-Any': dict({ - 'commandClass': 48, - 'commandClassName': 'Binary Sensor', - 'endpoint': 0, - 'metadata': dict({ - 'ccSpecific': dict({ - 'sensorType': 255, - }), - 'label': 'Any', - 'readable': True, - 'type': 'boolean', - 'writeable': False, - }), - 'property': 'Any', - 'propertyName': 'Any', - 'value': False, - }), - '52-49-0-Air temperature': dict({ - 'commandClass': 49, - 'commandClassName': 'Multilevel Sensor', - 'endpoint': 0, - 'metadata': dict({ - 'ccSpecific': dict({ - 'scale': 0, - 'sensorType': 1, - }), - 'label': 'Air temperature', - 'readable': True, - 'type': 'number', - 'unit': '°C', - 'writeable': False, - }), - 'property': 'Air temperature', - 'propertyName': 'Air temperature', - 'value': 9, - }), - '52-49-0-Humidity': dict({ - 'commandClass': 49, - 'commandClassName': 'Multilevel Sensor', - 'endpoint': 0, - 'metadata': dict({ - 'ccSpecific': dict({ - 'scale': 0, - 'sensorType': 5, - }), - 'label': 'Humidity', - 'readable': True, - 'type': 'number', - 'unit': '%', - 'writeable': False, - }), - 'property': 'Humidity', - 'propertyName': 'Humidity', - 'value': 65, - }), - '52-49-0-Illuminance': dict({ - 'commandClass': 49, - 'commandClassName': 'Multilevel Sensor', - 'endpoint': 0, - 'metadata': dict({ - 'ccSpecific': dict({ - 'scale': 1, - 'sensorType': 3, - }), - 'label': 'Illuminance', - 'readable': True, - 'type': 'number', - 'unit': 'Lux', - 'writeable': False, - }), - 'property': 'Illuminance', - 'propertyName': 'Illuminance', - 'value': 0, - }), - '52-49-0-Ultraviolet': dict({ - 'commandClass': 49, - 'commandClassName': 'Multilevel Sensor', - 'endpoint': 0, - 'metadata': dict({ - 'ccSpecific': dict({ - 'scale': 0, - 'sensorType': 27, - }), - 'label': 'Ultraviolet', - 'readable': True, - 'type': 'number', - 'writeable': False, - }), - 'property': 'Ultraviolet', - 'propertyName': 'Ultraviolet', - 'value': 1, - }), - }), - 'version': 4, - 'zwavePlusVersion': 1, - }), - 'versionInfo': dict({ - 'driverVersion': '6.0.0-beta.0', - 'maxSchemaVersion': 0, - 'minSchemaVersion': 0, - 'serverVersion': '1.0.0', - }), - }) -# --- diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index df1adbc98e5..0437f9d9085 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -5,7 +5,7 @@ from http import HTTPStatus from io import BytesIO import json from typing import Any -from unittest.mock import PropertyMock, patch +from unittest.mock import patch import pytest from zwave_js_server.const import ( @@ -78,16 +78,9 @@ from homeassistant.components.zwave_js.api import ( TYPE, UUID, VALUE, - VALUE_FORMAT, - VALUE_SIZE, VERSION, ) from homeassistant.components.zwave_js.const import ( - ATTR_COMMAND_CLASS, - ATTR_ENDPOINT, - ATTR_METHOD_NAME, - ATTR_PARAMETERS, - ATTR_WAIT_FOR_RESULT, CONF_DATA_COLLECTION_OPTED_IN, DOMAIN, ) @@ -95,7 +88,7 @@ from homeassistant.components.zwave_js.helpers import get_device_id from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from tests.common import MockConfigEntry, MockUser +from tests.common import MockUser from tests.typing import ClientSessionGenerator, WebSocketGenerator CONTROLLER_PATCH_PREFIX = "zwave_js_server.model.controller.Controller" @@ -496,7 +489,6 @@ async def test_node_alerts( async def test_add_node( hass: HomeAssistant, - nortek_thermostat, nortek_thermostat_added_event, integration, client, @@ -532,7 +524,7 @@ async def test_add_node( data={ "source": "controller", "event": "inclusion started", - "strategy": 2, + "secure": False, }, ) client.driver.receive_event(event) @@ -598,7 +590,6 @@ async def test_add_node( "status": 0, "ready": False, "low_security": False, - "low_security_reason": None, } assert msg["event"]["node"] == node_details @@ -944,46 +935,12 @@ async def test_add_node( assert msg["error"]["code"] == "zwave_error" assert msg["error"]["message"] == "zwave_error: Z-Wave error 1 - error message" - # Test inclusion already in progress - client.async_send_command.reset_mock() - type(client.driver.controller).inclusion_state = PropertyMock( - return_value=InclusionState.INCLUDING - ) - - # Create a node that's not ready - node_data = deepcopy(nortek_thermostat.data) # Copy to allow modification in tests. - node_data["ready"] = False - node_data["values"] = {} - node_data["endpoints"] = {} - node = Node(client, node_data) - client.driver.controller.nodes[node.node_id] = node - - await ws_client.send_json( - { - ID: 11, - TYPE: "zwave_js/add_node", - ENTRY_ID: entry.entry_id, - INCLUSION_STRATEGY: InclusionStrategy.DEFAULT.value, - } - ) - - msg = await ws_client.receive_json() - assert msg["success"] - - # Verify no command was sent since inclusion is already in progress - assert len(client.async_send_command.call_args_list) == 0 - - # Verify we got a node added event - msg = await ws_client.receive_json() - assert msg["event"]["event"] == "node added" - assert msg["event"]["node"]["node_id"] == node.node_id - # Test sending command with not loaded entry fails await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() await ws_client.send_json( - {ID: 12, TYPE: "zwave_js/add_node", ENTRY_ID: entry.entry_id} + {ID: 11, TYPE: "zwave_js/add_node", ENTRY_ID: entry.entry_id} ) msg = await ws_client.receive_json() @@ -1865,7 +1822,7 @@ async def test_replace_failed_node( data={ "source": "controller", "event": "inclusion started", - "strategy": 2, + "secure": False, }, ) client.driver.receive_event(event) @@ -3091,21 +3048,9 @@ async def test_get_config_parameters( assert result[key]["property"] == 2 assert result[key]["property_key"] is None assert result[key]["endpoint"] == 0 + assert result[key]["metadata"]["type"] == "number" assert result[key]["configuration_value_type"] == "enumerated" assert result[key]["metadata"]["states"] - assert ( - result[key]["metadata"]["description"] - == "Stay awake for 10 minutes at power on" - ) - assert result[key]["metadata"]["label"] == "Stay Awake in Battery Mode" - assert result[key]["metadata"]["type"] == "number" - assert result[key]["metadata"]["min"] == 0 - assert result[key]["metadata"]["max"] == 1 - assert result[key]["metadata"]["unit"] is None - assert result[key]["metadata"]["writeable"] is True - assert result[key]["metadata"]["readable"] is True - assert result[key]["metadata"]["default"] == 0 - assert result[key]["value"] == 0 key = "52-112-0-201-255" assert result[key]["property_key"] == 255 @@ -3139,180 +3084,6 @@ async def test_get_config_parameters( assert msg["error"]["code"] == ERR_NOT_LOADED -async def test_set_raw_config_parameter( - hass: HomeAssistant, - client, - multisensor_6, - integration, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test that the set_raw_config_parameter WS API call works.""" - entry = integration - ws_client = await hass_ws_client(hass) - device = get_device(hass, multisensor_6) - - # Change from async_send_command to async_send_command_no_wait - client.async_send_command_no_wait.return_value = None - - # Test setting a raw config parameter value - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/set_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - VALUE: 1, - VALUE_SIZE: 2, - VALUE_FORMAT: 1, - } - ) - - msg = await ws_client.receive_json() - assert msg["success"] - assert msg["result"]["status"] == "queued" - - assert len(client.async_send_command_no_wait.call_args_list) == 1 - args = client.async_send_command_no_wait.call_args[0][0] - assert args["command"] == "endpoint.set_raw_config_parameter_value" - assert args["nodeId"] == multisensor_6.node_id - assert args["options"]["parameter"] == 102 - assert args["options"]["value"] == 1 - assert args["options"]["valueSize"] == 2 - assert args["options"]["valueFormat"] == 1 - - # Reset the mock for async_send_command_no_wait instead - client.async_send_command_no_wait.reset_mock() - - # Test getting non-existent node fails - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/set_raw_config_parameter", - DEVICE_ID: "fake_device", - PROPERTY: 102, - VALUE: 1, - VALUE_SIZE: 2, - VALUE_FORMAT: 1, - } - ) - msg = await ws_client.receive_json() - assert not msg["success"] - assert msg["error"]["code"] == ERR_NOT_FOUND - - # Test sending command with not loaded entry fails - await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/set_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - VALUE: 1, - VALUE_SIZE: 2, - VALUE_FORMAT: 1, - } - ) - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == ERR_NOT_LOADED - - -async def test_get_raw_config_parameter( - hass: HomeAssistant, - multisensor_6, - integration, - client, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test the get_raw_config_parameter websocket command.""" - entry = integration - ws_client = await hass_ws_client(hass) - device = get_device(hass, multisensor_6) - - client.async_send_command.return_value = {"value": 1} - - # Test getting a raw config parameter value - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/get_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - } - ) - - msg = await ws_client.receive_json() - assert msg["success"] - assert msg["result"]["value"] == 1 - - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args[0][0] - assert args["command"] == "endpoint.get_raw_config_parameter_value" - assert args["nodeId"] == multisensor_6.node_id - assert args["options"]["parameter"] == 102 - - client.async_send_command.reset_mock() - - # Test FailedZWaveCommand is caught - with patch( - "zwave_js_server.model.node.Node.async_get_raw_config_parameter_value", - side_effect=FailedZWaveCommand("failed_command", 1, "error message"), - ): - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/get_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - } - ) - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == "zwave_error" - assert msg["error"]["message"] == "zwave_error: Z-Wave error 1 - error message" - - # Test getting non-existent node fails - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/get_raw_config_parameter", - DEVICE_ID: "fake_device", - PROPERTY: 102, - } - ) - msg = await ws_client.receive_json() - assert not msg["success"] - assert msg["error"]["code"] == ERR_NOT_FOUND - - # Test FailedCommand exception - client.async_send_command.side_effect = FailedCommand("test", "test") - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/get_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - } - ) - msg = await ws_client.receive_json() - assert not msg["success"] - assert msg["error"]["code"] == "test" - assert msg["error"]["message"] == "Command failed: test" - - # Test sending command with not loaded entry fails - await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/get_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - } - ) - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == ERR_NOT_LOADED - - @pytest.mark.parametrize( ("firmware_data", "expected_data"), [({"target": "1"}, {"firmware_target": 1}), ({}, {})], @@ -5009,157 +4780,3 @@ async def test_hard_reset_controller( assert not msg["success"] assert msg["error"]["code"] == ERR_NOT_FOUND - - -async def test_node_capabilities( - hass: HomeAssistant, - multisensor_6: Node, - integration: MockConfigEntry, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test the node_capabilities websocket command.""" - entry = integration - ws_client = await hass_ws_client(hass) - - node = multisensor_6 - device = get_device(hass, node) - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/node_capabilities", - DEVICE_ID: device.id, - } - ) - msg = await ws_client.receive_json() - assert msg["result"] == { - "0": [ - { - "id": 113, - "name": "Notification", - "version": 8, - "isSecure": False, - "is_secure": False, - } - ] - } - - # Test getting non-existent node fails - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/node_status", - DEVICE_ID: "fake_device", - } - ) - msg = await ws_client.receive_json() - assert not msg["success"] - assert msg["error"]["code"] == ERR_NOT_FOUND - - # Test sending command with not loaded entry fails - await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/node_status", - DEVICE_ID: device.id, - } - ) - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == ERR_NOT_LOADED - - -async def test_invoke_cc_api( - hass: HomeAssistant, - client, - climate_radio_thermostat_ct100_plus_different_endpoints: Node, - integration: MockConfigEntry, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test the invoke_cc_api websocket command.""" - ws_client = await hass_ws_client(hass) - - device_radio_thermostat = get_device( - hass, climate_radio_thermostat_ct100_plus_different_endpoints - ) - assert device_radio_thermostat - - # Test successful invoke_cc_api call with a static endpoint - client.async_send_command.return_value = {"response": True} - client.async_send_command_no_wait.return_value = {"response": True} - - # Test with wait_for_result=False (default) - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/invoke_cc_api", - DEVICE_ID: device_radio_thermostat.id, - ATTR_COMMAND_CLASS: 67, - ATTR_METHOD_NAME: "someMethod", - ATTR_PARAMETERS: [1, 2], - } - ) - msg = await ws_client.receive_json() - assert msg["success"] - assert msg["result"] is None # We did not specify wait_for_result=True - - await hass.async_block_till_done() - - assert len(client.async_send_command_no_wait.call_args_list) == 1 - args = client.async_send_command_no_wait.call_args[0][0] - assert args == { - "command": "endpoint.invoke_cc_api", - "nodeId": 26, - "endpoint": 0, - "commandClass": 67, - "methodName": "someMethod", - "args": [1, 2], - } - - client.async_send_command_no_wait.reset_mock() - - # Test with wait_for_result=True - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/invoke_cc_api", - DEVICE_ID: device_radio_thermostat.id, - ATTR_COMMAND_CLASS: 67, - ATTR_ENDPOINT: 0, - ATTR_METHOD_NAME: "someMethod", - ATTR_PARAMETERS: [1, 2], - ATTR_WAIT_FOR_RESULT: True, - } - ) - msg = await ws_client.receive_json() - assert msg["success"] - assert msg["result"] is True - - await hass.async_block_till_done() - - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args[0][0] - assert args == { - "command": "endpoint.invoke_cc_api", - "nodeId": 26, - "endpoint": 0, - "commandClass": 67, - "methodName": "someMethod", - "args": [1, 2], - } - - client.async_send_command.side_effect = NotFoundError - - # Ensure an error is returned - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/invoke_cc_api", - DEVICE_ID: device_radio_thermostat.id, - ATTR_COMMAND_CLASS: 67, - ATTR_ENDPOINT: 0, - ATTR_METHOD_NAME: "someMethod", - ATTR_PARAMETERS: [1, 2], - ATTR_WAIT_FOR_RESULT: True, - } - ) - msg = await ws_client.receive_json() - assert not msg["success"] - assert msg["error"] == {"code": "NotFoundError", "message": ""} diff --git a/tests/components/zwave_js/test_climate.py b/tests/components/zwave_js/test_climate.py index 5d711528a28..9a4559de1a5 100644 --- a/tests/components/zwave_js/test_climate.py +++ b/tests/components/zwave_js/test_climate.py @@ -812,8 +812,8 @@ async def test_thermostat_heatit_z_trm2fx( | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - assert state.attributes[ATTR_MIN_TEMP] == 0 - assert state.attributes[ATTR_MAX_TEMP] == 50 + assert state.attributes[ATTR_MIN_TEMP] == 7 + assert state.attributes[ATTR_MAX_TEMP] == 35 # Try switching to external sensor event = Event( diff --git a/tests/components/zwave_js/test_config_flow.py b/tests/components/zwave_js/test_config_flow.py index b60515cacd4..10fd5edfabb 100644 --- a/tests/components/zwave_js/test_config_flow.py +++ b/tests/components/zwave_js/test_config_flow.py @@ -1,28 +1,25 @@ """Test the Z-Wave JS config flow.""" import asyncio -from collections.abc import Generator from copy import copy from ipaddress import ip_address -from typing import Any -from unittest.mock import AsyncMock, MagicMock, call, patch -from uuid import uuid4 +from unittest.mock import DEFAULT, MagicMock, call, patch -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import AddonsOptions, Discovery import aiohttp import pytest from serial.tools.list_ports_common import ListPortInfo +from typing_extensions import Generator from zwave_js_server.version import VersionInfo from homeassistant import config_entries from homeassistant.components import usb +from homeassistant.components.hassio import HassioServiceInfo +from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.components.zwave_js.config_flow import SERVER_VERSION_TIMEOUT, TITLE from homeassistant.components.zwave_js.const import ADDON_SLUG, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry @@ -62,7 +59,7 @@ CP2652_ZIGBEE_DISCOVERY_INFO = usb.UsbServiceInfo( @pytest.fixture(name="setup_entry") -def setup_entry_fixture() -> Generator[AsyncMock]: +def setup_entry_fixture(): """Mock entry setup.""" with patch( "homeassistant.components.zwave_js.async_setup_entry", return_value=True @@ -71,7 +68,7 @@ def setup_entry_fixture() -> Generator[AsyncMock]: @pytest.fixture(name="supervisor") -def mock_supervisor_fixture() -> Generator[None]: +def mock_supervisor_fixture(): """Mock Supervisor.""" with patch( "homeassistant.components.zwave_js.config_flow.is_hassio", return_value=True @@ -79,16 +76,37 @@ def mock_supervisor_fixture() -> Generator[None]: yield +@pytest.fixture(name="discovery_info") +def discovery_info_fixture(): + """Return the discovery info from the supervisor.""" + return DEFAULT + + +@pytest.fixture(name="discovery_info_side_effect") +def discovery_info_side_effect_fixture(): + """Return the discovery info from the supervisor.""" + return None + + +@pytest.fixture(name="get_addon_discovery_info") +def mock_get_addon_discovery_info(discovery_info, discovery_info_side_effect): + """Mock get add-on discovery info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", + side_effect=discovery_info_side_effect, + return_value=discovery_info, + ) as get_addon_discovery_info: + yield get_addon_discovery_info + + @pytest.fixture(name="server_version_side_effect") -def server_version_side_effect_fixture() -> Any | None: +def server_version_side_effect_fixture(): """Return the server version side effect.""" return None @pytest.fixture(name="get_server_version", autouse=True) -def mock_get_server_version( - server_version_side_effect: Any | None, server_version_timeout: int -) -> Generator[AsyncMock]: +def mock_get_server_version(server_version_side_effect, server_version_timeout): """Mock server version.""" version_info = VersionInfo( driver_version="mock-driver-version", @@ -112,18 +130,18 @@ def mock_get_server_version( @pytest.fixture(name="server_version_timeout") -def mock_server_version_timeout() -> int: +def mock_server_version_timeout(): """Patch the timeout for getting server version.""" return SERVER_VERSION_TIMEOUT @pytest.fixture(name="addon_setup_time", autouse=True) -def mock_addon_setup_time() -> Generator[None]: +def mock_addon_setup_time(): """Mock add-on setup sleep time.""" with patch( "homeassistant.components.zwave_js.config_flow.ADDON_SETUP_TIMEOUT", new=0 - ): - yield + ) as addon_setup_time: + yield addon_setup_time @pytest.fixture(name="serial_port") @@ -556,19 +574,7 @@ async def test_abort_hassio_discovery_for_other_addon( assert result2["reason"] == "not_zwave_js_addon" -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_usb_discovery( hass: HomeAssistant, supervisor, @@ -598,7 +604,7 @@ async def test_usb_discovery( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call("core_zwave_js") + assert install_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -616,9 +622,10 @@ async def test_usb_discovery( ) assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions( - config={ + { + "options": { "device": USB_DISCOVERY_INFO.device, "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -627,7 +634,7 @@ async def test_usb_discovery( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - ), + }, ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -646,7 +653,7 @@ async def test_usb_discovery( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -666,19 +673,7 @@ async def test_usb_discovery( assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_usb_discovery_addon_not_running( hass: HomeAssistant, supervisor, @@ -728,9 +723,10 @@ async def test_usb_discovery_addon_not_running( ) assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions( - config={ + { + "options": { "device": USB_DISCOVERY_INFO.device, "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -739,7 +735,7 @@ async def test_usb_discovery_addon_not_running( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - ), + }, ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -758,7 +754,7 @@ async def test_usb_discovery_addon_not_running( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -822,9 +818,10 @@ async def test_discovery_addon_not_running( ) assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions( - config={ + { + "options": { "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -833,7 +830,7 @@ async def test_discovery_addon_not_running( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - ), + }, ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -852,7 +849,7 @@ async def test_discovery_addon_not_running( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -905,7 +902,7 @@ async def test_discovery_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call("core_zwave_js") + assert install_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -924,9 +921,10 @@ async def test_discovery_addon_not_installed( ) assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions( - config={ + { + "options": { "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -935,7 +933,7 @@ async def test_discovery_addon_not_installed( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - ), + }, ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -954,7 +952,7 @@ async def test_discovery_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -1115,19 +1113,7 @@ async def test_not_addon(hass: HomeAssistant, supervisor) -> None: assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_running( hass: HomeAssistant, supervisor, @@ -1193,52 +1179,31 @@ async def test_addon_running( ), [ ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], - SupervisorError(), + {"config": ADDON_DISCOVERY_INFO}, + HassioAPIError(), None, None, "addon_get_discovery_info_failed", ), ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, None, TimeoutError, None, "cannot_connect", ), ( - [], + None, None, None, None, "addon_get_discovery_info_failed", ), ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, None, None, - SupervisorError(), + HassioAPIError(), "addon_info_failed", ), ], @@ -1270,19 +1235,7 @@ async def test_addon_running_failures( assert result["reason"] == abort_reason -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_running_already_configured( hass: HomeAssistant, supervisor, @@ -1341,19 +1294,7 @@ async def test_addon_running_already_configured( assert entry.data["lr_s2_authenticated_key"] == "new321" -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_installed( hass: HomeAssistant, supervisor, @@ -1393,9 +1334,10 @@ async def test_addon_installed( ) assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions( - config={ + { + "options": { "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1404,7 +1346,7 @@ async def test_addon_installed( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - ), + }, ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1423,7 +1365,7 @@ async def test_addon_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -1445,17 +1387,7 @@ async def test_addon_installed( @pytest.mark.parametrize( ("discovery_info", "start_addon_side_effect"), - [ - ( - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ), - SupervisorError(), - ) - ], + [({"config": ADDON_DISCOVERY_INFO}, HassioAPIError())], ) async def test_addon_installed_start_failure( hass: HomeAssistant, @@ -1496,9 +1428,10 @@ async def test_addon_installed_start_failure( ) assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions( - config={ + { + "options": { "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1507,7 +1440,7 @@ async def test_addon_installed_start_failure( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - ), + }, ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1516,7 +1449,7 @@ async def test_addon_installed_start_failure( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_start_failed" @@ -1526,18 +1459,11 @@ async def test_addon_installed_start_failure( ("discovery_info", "server_version_side_effect"), [ ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, TimeoutError, ), ( - [], + None, None, ), ], @@ -1581,9 +1507,10 @@ async def test_addon_installed_failures( ) assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions( - config={ + { + "options": { "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1592,7 +1519,7 @@ async def test_addon_installed_failures( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - ), + }, ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1601,7 +1528,7 @@ async def test_addon_installed_failures( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_start_failed" @@ -1609,19 +1536,7 @@ async def test_addon_installed_failures( @pytest.mark.parametrize( ("set_addon_options_side_effect", "discovery_info"), - [ - ( - SupervisorError(), - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], - ) - ], + [(HassioAPIError(), {"config": ADDON_DISCOVERY_INFO})], ) async def test_addon_installed_set_options_failure( hass: HomeAssistant, @@ -1662,9 +1577,10 @@ async def test_addon_installed_set_options_failure( ) assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions( - config={ + { + "options": { "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1673,7 +1589,7 @@ async def test_addon_installed_set_options_failure( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - ), + }, ) assert result["type"] is FlowResultType.ABORT @@ -1682,19 +1598,7 @@ async def test_addon_installed_set_options_failure( assert start_addon.call_count == 0 -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_installed_already_configured( hass: HomeAssistant, supervisor, @@ -1751,9 +1655,10 @@ async def test_addon_installed_already_configured( ) assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions( - config={ + { + "options": { "device": "/new", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1762,7 +1667,7 @@ async def test_addon_installed_already_configured( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - ), + }, ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1771,7 +1676,7 @@ async def test_addon_installed_already_configured( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" @@ -1785,19 +1690,7 @@ async def test_addon_installed_already_configured( assert entry.data["lr_s2_authenticated_key"] == "new321" -@pytest.mark.parametrize( - "discovery_info", - [ - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ] - ], -) +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) async def test_addon_not_installed( hass: HomeAssistant, supervisor, @@ -1828,7 +1721,7 @@ async def test_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call("core_zwave_js") + assert install_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -1847,9 +1740,10 @@ async def test_addon_not_installed( ) assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions( - config={ + { + "options": { "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1858,7 +1752,7 @@ async def test_addon_not_installed( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - ), + }, ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1877,7 +1771,7 @@ async def test_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == TITLE @@ -1901,7 +1795,7 @@ async def test_install_addon_failure( hass: HomeAssistant, supervisor, addon_not_installed, install_addon ) -> None: """Test add-on install failure.""" - install_addon.side_effect = SupervisorError() + install_addon.side_effect = HassioAPIError() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -1921,7 +1815,7 @@ async def test_install_addon_failure( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call("core_zwave_js") + assert install_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" @@ -2022,14 +1916,7 @@ async def test_options_not_addon( ), [ ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, {}, { "device": "/test", @@ -2055,14 +1942,7 @@ async def test_options_not_addon( 0, ), ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, {"use_addon": True}, { "device": "/test", @@ -2135,8 +2015,9 @@ async def test_options_addon_running( new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( + hass, "core_zwave_js", - AddonsOptions(config=new_addon_options), + {"options": new_addon_options}, ) assert client.disconnect.call_count == disconnect_calls @@ -2147,7 +2028,7 @@ async def test_options_addon_running( result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() - assert restart_addon.call_args == call("core_zwave_js") + assert restart_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.CREATE_ENTRY assert entry.data["url"] == "ws://host1:3001" @@ -2182,14 +2063,7 @@ async def test_options_addon_running( ("discovery_info", "entry_data", "old_addon_options", "new_addon_options"), [ ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, {}, { "device": "/test", @@ -2316,14 +2190,7 @@ async def different_device_server_version(*args): ), [ ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, {}, { "device": "/test", @@ -2352,14 +2219,7 @@ async def different_device_server_version(*args): different_device_server_version, ), ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, {}, { "device": "/test", @@ -2436,7 +2296,9 @@ async def test_options_different_device( assert set_addon_options.call_count == 1 new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - "core_zwave_js", AddonsOptions(config=new_addon_options) + hass, + "core_zwave_js", + {"options": new_addon_options}, ) assert client.disconnect.call_count == disconnect_calls assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -2445,7 +2307,7 @@ async def test_options_different_device( await hass.async_block_till_done() assert restart_addon.call_count == 1 - assert restart_addon.call_args == call("core_zwave_js") + assert restart_addon.call_args == call(hass, "core_zwave_js") result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() @@ -2457,7 +2319,9 @@ async def test_options_different_device( assert set_addon_options.call_count == 2 assert set_addon_options.call_args == call( - "core_zwave_js", AddonsOptions(config=addon_options) + hass, + "core_zwave_js", + {"options": addon_options}, ) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -2465,7 +2329,7 @@ async def test_options_different_device( await hass.async_block_till_done() assert restart_addon.call_count == 2 - assert restart_addon.call_args == call("core_zwave_js") + assert restart_addon.call_args == call(hass, "core_zwave_js") result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() @@ -2488,14 +2352,7 @@ async def test_options_different_device( ), [ ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, {}, { "device": "/test", @@ -2521,17 +2378,10 @@ async def test_options_different_device( "emulate_hardware": False, }, 0, - [SupervisorError(), None], + [HassioAPIError(), None], ), ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, {}, { "device": "/test", @@ -2558,8 +2408,8 @@ async def test_options_different_device( }, 0, [ - SupervisorError(), - SupervisorError(), + HassioAPIError(), + HassioAPIError(), ], ), ], @@ -2612,7 +2462,9 @@ async def test_options_addon_restart_failed( assert set_addon_options.call_count == 1 new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - "core_zwave_js", AddonsOptions(config=new_addon_options) + hass, + "core_zwave_js", + {"options": new_addon_options}, ) assert client.disconnect.call_count == disconnect_calls assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -2621,7 +2473,7 @@ async def test_options_addon_restart_failed( await hass.async_block_till_done() assert restart_addon.call_count == 1 - assert restart_addon.call_args == call("core_zwave_js") + assert restart_addon.call_args == call(hass, "core_zwave_js") result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() @@ -2630,7 +2482,9 @@ async def test_options_addon_restart_failed( old_addon_options.pop("network_key") assert set_addon_options.call_count == 2 assert set_addon_options.call_args == call( - "core_zwave_js", AddonsOptions(config=old_addon_options) + hass, + "core_zwave_js", + {"options": old_addon_options}, ) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -2638,7 +2492,7 @@ async def test_options_addon_restart_failed( await hass.async_block_till_done() assert restart_addon.call_count == 2 - assert restart_addon.call_args == call("core_zwave_js") + assert restart_addon.call_args == call(hass, "core_zwave_js") result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() @@ -2661,14 +2515,7 @@ async def test_options_addon_restart_failed( ), [ ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, {}, { "device": "/test", @@ -2761,14 +2608,7 @@ async def test_options_addon_running_server_info_failure( ), [ ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, {}, { "device": "/test", @@ -2794,14 +2634,7 @@ async def test_options_addon_running_server_info_failure( 0, ), ( - [ - Discovery( - addon="core_zwave_js", - service="zwave_js", - uuid=uuid4(), - config=ADDON_DISCOVERY_INFO, - ) - ], + {"config": ADDON_DISCOVERY_INFO}, {"use_addon": True}, { "device": "/test", @@ -2873,7 +2706,7 @@ async def test_options_addon_not_installed( result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert install_addon.call_args == call("core_zwave_js") + assert install_addon.call_args == call(hass, "core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -2885,7 +2718,9 @@ async def test_options_addon_not_installed( new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - "core_zwave_js", AddonsOptions(config=new_addon_options) + hass, + "core_zwave_js", + {"options": new_addon_options}, ) assert client.disconnect.call_count == disconnect_calls @@ -2895,7 +2730,7 @@ async def test_options_addon_not_installed( await hass.async_block_till_done() assert start_addon.call_count == 1 - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") result = await hass.config_entries.options.async_configure(result["flow_id"]) await hass.async_block_till_done() @@ -2911,6 +2746,104 @@ async def test_options_addon_not_installed( assert client.disconnect.call_count == 1 +@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +async def test_import_addon_installed( + hass: HomeAssistant, + supervisor, + addon_installed, + addon_options, + set_addon_options, + start_addon, + get_addon_discovery_info, + serial_port, +) -> None: + """Test import step while add-on already installed on Supervisor.""" + serial_port.device = "/test/imported" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={"usb_path": "/test/imported", "network_key": "imported123"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "on_supervisor" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"use_addon": True} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "configure_addon" + + # the default input should be the imported data + default_input = result["data_schema"]({}) + + assert default_input == { + "usb_path": "/test/imported", + "s0_legacy_key": "imported123", + "s2_access_control_key": "", + "s2_authenticated_key": "", + "s2_unauthenticated_key": "", + "lr_s2_access_control_key": "", + "lr_s2_authenticated_key": "", + } + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], default_input + ) + + assert set_addon_options.call_args == call( + hass, + "core_zwave_js", + { + "options": { + "device": "/test/imported", + "s0_legacy_key": "imported123", + "s2_access_control_key": "", + "s2_authenticated_key": "", + "s2_unauthenticated_key": "", + "lr_s2_access_control_key": "", + "lr_s2_authenticated_key": "", + } + }, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_addon" + + with ( + patch( + "homeassistant.components.zwave_js.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.zwave_js.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() + + assert start_addon.call_args == call(hass, "core_zwave_js") + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TITLE + assert result["data"] == { + "url": "ws://host1:3001", + "usb_path": "/test/imported", + "s0_legacy_key": "imported123", + "s2_access_control_key": "", + "s2_authenticated_key": "", + "s2_unauthenticated_key": "", + "lr_s2_access_control_key": "", + "lr_s2_authenticated_key": "", + "use_addon": True, + "integration_created_addon": False, + } + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_zeroconf(hass: HomeAssistant) -> None: """Test zeroconf discovery.""" diff --git a/tests/components/zwave_js/test_config_validation.py b/tests/components/zwave_js/test_config_validation.py index cebbde3c9b1..8428972bde1 100644 --- a/tests/components/zwave_js/test_config_validation.py +++ b/tests/components/zwave_js/test_config_validation.py @@ -1,31 +1,27 @@ """Test the Z-Wave JS config validation helpers.""" -from typing import Any - import pytest import voluptuous as vol -from homeassistant.components.zwave_js.config_validation import VALUE_SCHEMA, boolean +from homeassistant.components.zwave_js.config_validation import boolean -@pytest.mark.parametrize( - ("test_cases", "expected_value"), - [ - ([True, "true", "yes", "on", "ON", "enable"], True), - ([False, "false", "no", "off", "NO", "disable"], False), - ([1.1, "1.1"], 1.1), - ([1.0, "1.0"], 1.0), - ([1, "1"], 1), - ], -) -def test_validation(test_cases: list[Any], expected_value: Any) -> None: - """Test config validation.""" - for case in test_cases: - assert VALUE_SCHEMA(case) == expected_value - - -@pytest.mark.parametrize("value", ["invalid", "1", "0", 1, 0]) -def test_invalid_boolean_validation(value: str | int) -> None: - """Test invalid cases for boolean config validator.""" +def test_boolean_validation() -> None: + """Test boolean config validator.""" + # test bool + assert boolean(True) + assert not boolean(False) + # test strings + assert boolean("TRUE") + assert not boolean("FALSE") + assert boolean("ON") + assert not boolean("NO") + # ensure 1's and 0's don't get converted to bool with pytest.raises(vol.Invalid): - boolean(value) + boolean("1") + with pytest.raises(vol.Invalid): + boolean("0") + with pytest.raises(vol.Invalid): + boolean(1) + with pytest.raises(vol.Invalid): + boolean(0) diff --git a/tests/components/zwave_js/test_cover.py b/tests/components/zwave_js/test_cover.py index b13d4f9787f..4ecd697f4d1 100644 --- a/tests/components/zwave_js/test_cover.py +++ b/tests/components/zwave_js/test_cover.py @@ -15,7 +15,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN as COVER_DOMAIN, + DOMAIN, SERVICE_CLOSE_COVER, SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, @@ -26,7 +26,6 @@ from homeassistant.components.cover import ( SERVICE_STOP_COVER_TILT, CoverDeviceClass, CoverEntityFeature, - CoverState, ) from homeassistant.components.zwave_js.const import LOGGER from homeassistant.components.zwave_js.helpers import ZwaveValueMatcher @@ -34,6 +33,10 @@ from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -60,12 +63,12 @@ async def test_window_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.WINDOW - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 # Test setting position await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY, ATTR_POSITION: 50}, blocking=True, @@ -86,7 +89,7 @@ async def test_window_cover( # Test setting position await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY, ATTR_POSITION: 0}, blocking=True, @@ -107,7 +110,7 @@ async def test_window_cover( # Test opening await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -128,7 +131,7 @@ async def test_window_cover( # Test stop after opening await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -167,11 +170,11 @@ async def test_window_cover( client.async_send_command.reset_mock() state = hass.states.get(WINDOW_COVER_ENTITY) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN # Test closing await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -191,7 +194,7 @@ async def test_window_cover( # Test stop after closing await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -230,7 +233,7 @@ async def test_window_cover( node.receive_event(event) state = hass.states.get(WINDOW_COVER_ENTITY) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED async def test_fibaro_fgr222_shutter_cover( @@ -241,12 +244,12 @@ async def test_fibaro_fgr222_shutter_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.SHUTTER - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Test opening tilts await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_222_SHUTTER_COVER_ENTITY}, blocking=True, @@ -268,7 +271,7 @@ async def test_fibaro_fgr222_shutter_cover( # Test closing tilts await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_222_SHUTTER_COVER_ENTITY}, blocking=True, @@ -290,7 +293,7 @@ async def test_fibaro_fgr222_shutter_cover( # Test setting tilt position await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: FIBARO_FGR_222_SHUTTER_COVER_ENTITY, ATTR_TILT_POSITION: 12}, blocking=True, @@ -342,12 +345,12 @@ async def test_fibaro_fgr223_shutter_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.SHUTTER - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Test opening tilts await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_223_SHUTTER_COVER_ENTITY}, blocking=True, @@ -367,7 +370,7 @@ async def test_fibaro_fgr223_shutter_cover( client.async_send_command.reset_mock() # Test closing tilts await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_223_SHUTTER_COVER_ENTITY}, blocking=True, @@ -387,7 +390,7 @@ async def test_fibaro_fgr223_shutter_cover( client.async_send_command.reset_mock() # Test setting tilt position await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: FIBARO_FGR_223_SHUTTER_COVER_ENTITY, ATTR_TILT_POSITION: 12}, blocking=True, @@ -438,12 +441,12 @@ async def test_aeotec_nano_shutter_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.WINDOW - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 # Test opening await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -464,7 +467,7 @@ async def test_aeotec_nano_shutter_cover( # Test stop after opening await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -504,11 +507,11 @@ async def test_aeotec_nano_shutter_cover( client.async_send_command.reset_mock() state = hass.states.get(AEOTEC_SHUTTER_COVER_ENTITY) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN # Test closing await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -528,7 +531,7 @@ async def test_aeotec_nano_shutter_cover( # Test stop after closing await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -576,14 +579,11 @@ async def test_motor_barrier_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.GARAGE - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED # Test open await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, - blocking=True, + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -599,16 +599,13 @@ async def test_motor_barrier_cover( # state doesn't change until currentState value update is received state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED client.async_send_command.reset_mock() # Test close await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, - blocking=True, + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -624,7 +621,7 @@ async def test_motor_barrier_cover( # state doesn't change until currentState value update is received state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED client.async_send_command.reset_mock() @@ -649,7 +646,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == CoverState.OPENING + assert state.state == STATE_OPENING # Barrier sends an opened state event = Event( @@ -672,7 +669,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN # Barrier sends a closing state event = Event( @@ -695,7 +692,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == CoverState.CLOSING + assert state.state == STATE_CLOSING # Barrier sends a closed state event = Event( @@ -718,7 +715,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED # Barrier sends a stopped state event = Event( @@ -824,7 +821,7 @@ async def test_fibaro_fgr223_shutter_cover_no_tilt( state = hass.states.get(FIBARO_FGR_223_SHUTTER_COVER_ENTITY) assert state - assert state.state == CoverState.OPEN + assert state.state == STATE_OPEN assert ATTR_CURRENT_POSITION in state.attributes assert ATTR_CURRENT_TILT_POSITION not in state.attributes @@ -849,7 +846,7 @@ async def test_iblinds_v3_cover( assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -870,7 +867,7 @@ async def test_iblinds_v3_cover( client.async_send_command.reset_mock() await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -891,7 +888,7 @@ async def test_iblinds_v3_cover( client.async_send_command.reset_mock() await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: entity_id, ATTR_TILT_POSITION: 12}, blocking=True, @@ -912,7 +909,7 @@ async def test_iblinds_v3_cover( client.async_send_command.reset_mock() await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -941,7 +938,7 @@ async def test_nice_ibt4zwave_cover( state = hass.states.get(entity_id) assert state # This device has no state because there is no position value - assert state.state == CoverState.CLOSED + assert state.state == STATE_CLOSED assert state.attributes[ATTR_SUPPORTED_FEATURES] == ( CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN @@ -953,7 +950,7 @@ async def test_nice_ibt4zwave_cover( assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.GATE await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -973,7 +970,7 @@ async def test_nice_ibt4zwave_cover( client.async_send_command.reset_mock() await hass.services.async_call( - COVER_DOMAIN, + DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -991,106 +988,3 @@ async def test_nice_ibt4zwave_cover( assert args["value"] == 99 client.async_send_command.reset_mock() - - -async def test_window_covering_open_close( - hass: HomeAssistant, client, window_covering_outbound_bottom, integration -) -> None: - """Test Window Covering device open and close commands. - - A Window Covering device with position support - should be able to open/close with the start/stop level change properties. - """ - entity_id = "cover.node_2_outbound_bottom" - state = hass.states.get(entity_id) - - # The entity has position support, but not tilt - assert state - assert ATTR_CURRENT_POSITION in state.attributes - assert ATTR_CURRENT_TILT_POSITION not in state.attributes - - # Test opening - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args[0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == 2 - assert args["valueId"] == { - "commandClass": 106, - "endpoint": 0, - "property": "levelChangeUp", - "propertyKey": 13, - } - assert args["value"] is True - - client.async_send_command.reset_mock() - - # Test stop after opening - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args[0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == 2 - assert args["valueId"] == { - "commandClass": 106, - "endpoint": 0, - "property": "levelChangeUp", - "propertyKey": 13, - } - assert args["value"] is False - - client.async_send_command.reset_mock() - - # Test closing - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args[0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == 2 - assert args["valueId"] == { - "commandClass": 106, - "endpoint": 0, - "property": "levelChangeDown", - "propertyKey": 13, - } - assert args["value"] is True - - client.async_send_command.reset_mock() - - # Test stop after closing - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args[0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == 2 - assert args["valueId"] == { - "commandClass": 106, - "endpoint": 0, - "property": "levelChangeUp", - "propertyKey": 13, - } - assert args["value"] is False - - client.async_send_command.reset_mock() diff --git a/tests/components/zwave_js/test_device_condition.py b/tests/components/zwave_js/test_device_condition.py index 17bc4cf0f5d..61ed2bb35fb 100644 --- a/tests/components/zwave_js/test_device_condition.py +++ b/tests/components/zwave_js/test_device_condition.py @@ -25,7 +25,13 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.setup import async_setup_component -from tests.common import async_get_device_automations +from tests.common import async_get_device_automations, async_mock_service + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") async def test_get_conditions( @@ -93,7 +99,7 @@ async def test_node_status_state( client, lock_schlage_be469, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test for node_status conditions.""" @@ -200,8 +206,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "alive - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "alive - event - test_event1" event = Event( "wake up", @@ -219,8 +225,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "awake - event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "awake - event - test_event2" event = Event( "sleep", @@ -234,8 +240,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(service_calls) == 3 - assert service_calls[2].data["some"] == "asleep - event - test_event3" + assert len(calls) == 3 + assert calls[2].data["some"] == "asleep - event - test_event3" event = Event( "dead", @@ -249,8 +255,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(service_calls) == 4 - assert service_calls[3].data["some"] == "dead - event - test_event4" + assert len(calls) == 4 + assert calls[3].data["some"] == "dead - event - test_event4" async def test_config_parameter_state( @@ -258,7 +264,7 @@ async def test_config_parameter_state( client, lock_schlage_be469, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test for config_parameter conditions.""" @@ -325,8 +331,8 @@ async def test_config_parameter_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "Beeper - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "Beeper - event - test_event1" # Flip Beeper state to not match condition event = Event( @@ -369,8 +375,8 @@ async def test_config_parameter_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[1].data["some"] == "User Slot Status - event - test_event2" + assert len(calls) == 2 + assert calls[1].data["some"] == "User Slot Status - event - test_event2" async def test_value_state( @@ -378,7 +384,7 @@ async def test_value_state( client, lock_schlage_be469, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test for value conditions.""" @@ -421,8 +427,8 @@ async def test_value_state( hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(service_calls) == 1 - assert service_calls[0].data["some"] == "value - event - test_event1" + assert len(calls) == 1 + assert calls[0].data["some"] == "value - event - test_event1" async def test_get_condition_capabilities_node_status( diff --git a/tests/components/zwave_js/test_device_trigger.py b/tests/components/zwave_js/test_device_trigger.py index ccc69f7723d..0fa228288ec 100644 --- a/tests/components/zwave_js/test_device_trigger.py +++ b/tests/components/zwave_js/test_device_trigger.py @@ -28,7 +28,13 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from tests.common import async_get_device_automations +from tests.common import async_get_device_automations, async_mock_service + + +@pytest.fixture +def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Track calls to a mock service.""" + return async_mock_service(hass, "test", "automation") async def test_no_controller_triggers( @@ -79,7 +85,7 @@ async def test_if_notification_notification_fires( client, lock_schlage_be469, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for event.notification.notification trigger firing.""" node: Node = lock_schlage_be469 @@ -162,13 +168,13 @@ async def test_if_notification_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"event.notification.notification - device - zwave_js_notification - {CommandClass.NOTIFICATION}" ) assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"event.notification.notification2 - device - zwave_js_notification - {CommandClass.NOTIFICATION}" ) @@ -215,7 +221,7 @@ async def test_if_entry_control_notification_fires( client, lock_schlage_be469, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for notification.entry_control trigger firing.""" node: Node = lock_schlage_be469 @@ -297,13 +303,13 @@ async def test_if_entry_control_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"event.notification.notification - device - zwave_js_notification - {CommandClass.ENTRY_CONTROL}" ) assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"event.notification.notification2 - device - zwave_js_notification - {CommandClass.ENTRY_CONTROL}" ) @@ -383,7 +389,7 @@ async def test_if_node_status_change_fires( client, lock_schlage_be469, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for node_status trigger firing.""" node: Node = lock_schlage_be469 @@ -454,9 +460,9 @@ async def test_if_node_status_change_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[0].data["some"] == "state.node_status - device - alive" - assert service_calls[1].data["some"] == "state.node_status2 - device - alive" + assert len(calls) == 2 + assert calls[0].data["some"] == "state.node_status - device - alive" + assert calls[1].data["some"] == "state.node_status2 - device - alive" async def test_if_node_status_change_fires_legacy( @@ -466,7 +472,7 @@ async def test_if_node_status_change_fires_legacy( client, lock_schlage_be469, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for node_status trigger firing.""" node: Node = lock_schlage_be469 @@ -537,9 +543,9 @@ async def test_if_node_status_change_fires_legacy( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 2 - assert service_calls[0].data["some"] == "state.node_status - device - alive" - assert service_calls[1].data["some"] == "state.node_status2 - device - alive" + assert len(calls) == 2 + assert calls[0].data["some"] == "state.node_status - device - alive" + assert calls[1].data["some"] == "state.node_status2 - device - alive" async def test_get_trigger_capabilities_node_status( @@ -639,7 +645,7 @@ async def test_if_basic_value_notification_fires( client, ge_in_wall_dimmer_switch, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for event.value_notification.basic trigger firing.""" node: Node = ge_in_wall_dimmer_switch @@ -736,13 +742,13 @@ async def test_if_basic_value_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"event.value_notification.basic - device - zwave_js_value_notification - {CommandClass.BASIC}" ) assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"event.value_notification.basic2 - device - zwave_js_value_notification - {CommandClass.BASIC}" ) @@ -824,7 +830,7 @@ async def test_if_central_scene_value_notification_fires( client, wallmote_central_scene, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for event.value_notification.central_scene trigger firing.""" node: Node = wallmote_central_scene @@ -927,13 +933,13 @@ async def test_if_central_scene_value_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"event.value_notification.central_scene - device - zwave_js_value_notification - {CommandClass.CENTRAL_SCENE}" ) assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"event.value_notification.central_scene2 - device - zwave_js_value_notification - {CommandClass.CENTRAL_SCENE}" ) @@ -1014,7 +1020,7 @@ async def test_if_scene_activation_value_notification_fires( client, hank_binary_switch, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for event.value_notification.scene_activation trigger firing.""" node: Node = hank_binary_switch @@ -1111,13 +1117,13 @@ async def test_if_scene_activation_value_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 2 + assert len(calls) == 2 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == f"event.value_notification.scene_activation - device - zwave_js_value_notification - {CommandClass.SCENE_ACTIVATION}" ) assert ( - service_calls[1].data["some"] + calls[1].data["some"] == f"event.value_notification.scene_activation2 - device - zwave_js_value_notification - {CommandClass.SCENE_ACTIVATION}" ) @@ -1194,7 +1200,7 @@ async def test_if_value_updated_value_fires( client, lock_schlage_be469, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for zwave_js.value_updated.value trigger firing.""" node: Node = lock_schlage_be469 @@ -1255,7 +1261,7 @@ async def test_if_value_updated_value_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 # Publish fake value update that should trigger event = Event( @@ -1277,9 +1283,9 @@ async def test_if_value_updated_value_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == "zwave_js.value_updated.value - zwave_js.value_updated - open" ) @@ -1290,7 +1296,7 @@ async def test_value_updated_value_no_driver( client, lock_schlage_be469, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test zwave_js.value_updated.value trigger with missing driver.""" node: Node = lock_schlage_be469 @@ -1356,7 +1362,7 @@ async def test_value_updated_value_no_driver( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 0 + assert len(calls) == 0 async def test_get_trigger_capabilities_value_updated_value( @@ -1449,7 +1455,7 @@ async def test_if_value_updated_config_parameter_fires( client, lock_schlage_be469, integration, - service_calls: list[ServiceCall], + calls: list[ServiceCall], ) -> None: """Test for zwave_js.value_updated.config_parameter trigger firing.""" node: Node = lock_schlage_be469 @@ -1511,9 +1517,9 @@ async def test_if_value_updated_config_parameter_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(service_calls) == 1 + assert len(calls) == 1 assert ( - service_calls[0].data["some"] + calls[0].data["some"] == "zwave_js.value_updated.config_parameter - zwave_js.value_updated - 255" ) diff --git a/tests/components/zwave_js/test_diagnostics.py b/tests/components/zwave_js/test_diagnostics.py index 835b85177fe..0e6645d9d61 100644 --- a/tests/components/zwave_js/test_diagnostics.py +++ b/tests/components/zwave_js/test_diagnostics.py @@ -1,11 +1,9 @@ """Test the Z-Wave JS diagnostics.""" import copy -from typing import Any, cast from unittest.mock import patch import pytest -from syrupy.assertion import SnapshotAssertion from zwave_js_server.const import CommandClass from zwave_js_server.event import Event from zwave_js_server.model.node import Node @@ -15,6 +13,7 @@ from homeassistant.components.zwave_js.diagnostics import ( ZwaveValueMatcher, async_get_device_diagnostics, ) +from homeassistant.components.zwave_js.discovery import async_discover_node_values from homeassistant.components.zwave_js.helpers import ( get_device_id, get_value_id_from_unique_id, @@ -59,7 +58,6 @@ async def test_device_diagnostics( integration, hass_client: ClientSessionGenerator, version_state, - snapshot: SnapshotAssertion, ) -> None: """Test the device level diagnostics data dump.""" device = device_registry.async_get_device( @@ -115,18 +113,18 @@ async def test_device_diagnostics( # Entities that are created outside of discovery (e.g. node status sensor and # ping button) as well as helper entities created from other integrations should # not be in dump. - assert diagnostics_data == snapshot - + assert len(diagnostics_data["entities"]) == len( + list(async_discover_node_values(multisensor_6, device, {device.id: set()})) + ) assert any( - entity_entry.entity_id == "test.unrelated_entity" - for entity_entry in er.async_entries_for_device(entity_registry, device.id) + entity.entity_id == "test.unrelated_entity" + for entity in er.async_entries_for_device(entity_registry, device.id) ) # Explicitly check that the entity that is not part of this config entry is not # in the dump. - diagnostics_entities = cast(list[dict[str, Any]], diagnostics_data["entities"]) assert not any( entity["entity_id"] == "test.unrelated_entity" - for entity in diagnostics_entities + for entity in diagnostics_data["entities"] ) assert diagnostics_data["state"] == { **multisensor_6.data, @@ -173,7 +171,6 @@ async def test_device_diagnostics_missing_primary_value( entity_id = "sensor.multisensor_6_air_temperature" entry = entity_registry.async_get(entity_id) - assert entry # check that the primary value for the entity exists in the diagnostics diagnostics_data = await get_diagnostics_for_device( @@ -183,8 +180,9 @@ async def test_device_diagnostics_missing_primary_value( value = multisensor_6.values.get(get_value_id_from_unique_id(entry.unique_id)) assert value - diagnostics_entities = cast(list[dict[str, Any]], diagnostics_data["entities"]) - air_entity = next(x for x in diagnostics_entities if x["entity_id"] == entity_id) + air_entity = next( + x for x in diagnostics_data["entities"] if x["entity_id"] == entity_id + ) assert air_entity["value_id"] == value.value_id assert air_entity["primary_value"] == { @@ -220,8 +218,9 @@ async def test_device_diagnostics_missing_primary_value( hass, hass_client, integration, device ) - diagnostics_entities = cast(list[dict[str, Any]], diagnostics_data["entities"]) - air_entity = next(x for x in diagnostics_entities if x["entity_id"] == entity_id) + air_entity = next( + x for x in diagnostics_data["entities"] if x["entity_id"] == entity_id + ) assert air_entity["value_id"] == value.value_id assert air_entity["primary_value"] is None @@ -267,6 +266,5 @@ async def test_device_diagnostics_secret_value( diagnostics_data = await get_diagnostics_for_device( hass, hass_client, integration, device ) - diagnostics_node_state = cast(dict[str, Any], diagnostics_data["state"]) - test_value = _find_ultraviolet_val(diagnostics_node_state) + test_value = _find_ultraviolet_val(diagnostics_data["state"]) assert test_value["value"] == REDACTED diff --git a/tests/components/zwave_js/test_discovery.py b/tests/components/zwave_js/test_discovery.py index 0be0cca78c8..1179d8e843c 100644 --- a/tests/components/zwave_js/test_discovery.py +++ b/tests/components/zwave_js/test_discovery.py @@ -1,12 +1,9 @@ """Test entity discovery for device-specific schemas for the Z-Wave JS integration.""" import pytest -from zwave_js_server.event import Event -from zwave_js_server.model.node import Node from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.components.light import ATTR_SUPPORTED_COLOR_MODES, ColorMode from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, @@ -31,8 +28,6 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_UNKNOWN, Entity from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import MockConfigEntry - async def test_aeon_smart_switch_6_state( hass: HomeAssistant, client, aeon_smart_switch_6, integration @@ -54,18 +49,6 @@ async def test_iblinds_v2(hass: HomeAssistant, client, iblinds_v2, integration) assert state -async def test_zvidar_state(hass: HomeAssistant, client, zvidar, integration) -> None: - """Test that an ZVIDAR Z-CM-V01 multilevel switch value is discovered as a cover.""" - node = zvidar - assert node.device_class.specific.label == "Unused" - - state = hass.states.get("light.window_blind_controller") - assert not state - - state = hass.states.get("cover.window_blind_controller") - assert state - - async def test_ge_12730(hass: HomeAssistant, client, ge_12730, integration) -> None: """Test GE 12730 Fan Controller v2.0 multilevel switch is discovered as a fan.""" node = ge_12730 @@ -385,61 +368,3 @@ async def test_light_device_class_is_null( node = light_device_class_is_null assert node.device_class is None assert hass.states.get("light.bar_display_cases") - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_rediscovery( - hass: HomeAssistant, - siren_neo_coolcam: Node, - integration: MockConfigEntry, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that we don't rediscover known values.""" - node = siren_neo_coolcam - entity_id = "select.siren_alarm_doorbell_sound_selection" - state = hass.states.get(entity_id) - - assert state - assert state.state == "Beep" - - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": 36, - "args": { - "commandClassName": "Configuration", - "commandClass": 112, - "endpoint": 0, - "property": 6, - "newValue": 9, - "prevValue": 10, - "propertyName": "Doorbell Sound Selection", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - - assert state - assert state.state == "Beep Beep" - assert "Platform zwave_js does not generate unique IDs" not in caplog.text - - -async def test_aeotec_smart_switch_7( - hass: HomeAssistant, - aeotec_smart_switch_7: Node, - integration: MockConfigEntry, -) -> None: - """Test that Smart Switch 7 has a light and a switch entity.""" - state = hass.states.get("light.smart_switch_7") - assert state - assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ - ColorMode.HS, - ] - - state = hass.states.get("switch.smart_switch_7") - assert state diff --git a/tests/components/zwave_js/test_fan.py b/tests/components/zwave_js/test_fan.py index 2551fc7b34a..03cd6bfb704 100644 --- a/tests/components/zwave_js/test_fan.py +++ b/tests/components/zwave_js/test_fan.py @@ -653,12 +653,7 @@ async def test_thermostat_fan( assert state.state == STATE_ON assert state.attributes.get(ATTR_FAN_STATE) == "Idle / off" assert state.attributes.get(ATTR_PRESET_MODE) == "Auto low" - assert ( - state.attributes.get(ATTR_SUPPORTED_FEATURES) - == FanEntityFeature.PRESET_MODE - | FanEntityFeature.TURN_OFF - | FanEntityFeature.TURN_ON - ) + assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == FanEntityFeature.PRESET_MODE # Test setting preset mode await hass.services.async_call( diff --git a/tests/components/zwave_js/test_helpers.py b/tests/components/zwave_js/test_helpers.py index 2df2e134f49..016a2d718ac 100644 --- a/tests/components/zwave_js/test_helpers.py +++ b/tests/components/zwave_js/test_helpers.py @@ -42,4 +42,4 @@ async def test_get_value_state_schema_boolean_config_value( aeon_smart_switch_6.values["102-112-0-255"] ) assert isinstance(schema_validator, vol.Coerce) - assert schema_validator.type is bool + assert schema_validator.type == bool diff --git a/tests/components/zwave_js/test_init.py b/tests/components/zwave_js/test_init.py index 4f858f3e545..51aeee72c1d 100644 --- a/tests/components/zwave_js/test_init.py +++ b/tests/components/zwave_js/test_init.py @@ -5,8 +5,6 @@ from copy import deepcopy import logging from unittest.mock import AsyncMock, call, patch -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import AddonsOptions import pytest from zwave_js_server.client import Client from zwave_js_server.event import Event @@ -14,7 +12,7 @@ from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVers from zwave_js_server.model.node import Node from zwave_js_server.model.version import VersionInfo -from homeassistant.components.hassio import HassioAPIError +from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.logger import DOMAIN as LOGGER_DOMAIN, SERVICE_SET_LEVEL from homeassistant.components.persistent_notification import async_dismiss from homeassistant.components.zwave_js import DOMAIN @@ -555,10 +553,10 @@ async def test_start_addon( assert install_addon.call_count == 0 assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - "core_zwave_js", AddonsOptions(config=addon_options) + hass, "core_zwave_js", {"options": addon_options} ) assert start_addon.call_count == 1 - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") async def test_install_addon( @@ -601,16 +599,16 @@ async def test_install_addon( assert entry.state is ConfigEntryState.SETUP_RETRY assert install_addon.call_count == 1 - assert install_addon.call_args == call("core_zwave_js") + assert install_addon.call_args == call(hass, "core_zwave_js") assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - "core_zwave_js", AddonsOptions(config=addon_options) + hass, "core_zwave_js", {"options": addon_options} ) assert start_addon.call_count == 1 - assert start_addon.call_args == call("core_zwave_js") + assert start_addon.call_args == call(hass, "core_zwave_js") -@pytest.mark.parametrize("addon_info_side_effect", [SupervisorError("Boom")]) +@pytest.mark.parametrize("addon_info_side_effect", [HassioAPIError("Boom")]) async def test_addon_info_failure( hass: HomeAssistant, addon_installed, @@ -748,7 +746,7 @@ async def test_addon_options_changed( [ ("1.0.0", True, 1, 1, None, None), ("1.0.0", False, 0, 0, None, None), - ("1.0.0", True, 1, 1, SupervisorError("Boom"), None), + ("1.0.0", True, 1, 1, HassioAPIError("Boom"), None), ("1.0.0", True, 0, 1, None, HassioAPIError("Boom")), ], ) @@ -774,8 +772,8 @@ async def test_update_addon( network_key = "abc123" addon_options["device"] = device addon_options["network_key"] = network_key - addon_info.return_value.version = addon_version - addon_info.return_value.update_available = update_available + addon_info.return_value["version"] = addon_version + addon_info.return_value["update_available"] = update_available create_backup.side_effect = create_backup_side_effect update_addon.side_effect = update_addon_side_effect client.connect.side_effect = InvalidServerVersion( @@ -847,7 +845,7 @@ async def test_issue_registry( ("stop_addon_side_effect", "entry_state"), [ (None, ConfigEntryState.NOT_LOADED), - (SupervisorError("Boom"), ConfigEntryState.LOADED), + (HassioAPIError("Boom"), ConfigEntryState.LOADED), ], ) async def test_stop_addon( @@ -890,7 +888,7 @@ async def test_stop_addon( assert entry.state == entry_state assert stop_addon.call_count == 1 - assert stop_addon.call_args == call("core_zwave_js") + assert stop_addon.call_args == call(hass, "core_zwave_js") async def test_remove_entry( @@ -929,7 +927,7 @@ async def test_remove_entry( await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call("core_zwave_js") + assert stop_addon.call_args == call(hass, "core_zwave_js") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -937,7 +935,7 @@ async def test_remove_entry( partial=True, ) assert uninstall_addon.call_count == 1 - assert uninstall_addon.call_args == call("core_zwave_js") + assert uninstall_addon.call_args == call(hass, "core_zwave_js") assert entry.state is ConfigEntryState.NOT_LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 0 stop_addon.reset_mock() @@ -947,12 +945,12 @@ async def test_remove_entry( # test add-on stop failure entry.add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - stop_addon.side_effect = SupervisorError() + stop_addon.side_effect = HassioAPIError() await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call("core_zwave_js") + assert stop_addon.call_args == call(hass, "core_zwave_js") assert create_backup.call_count == 0 assert uninstall_addon.call_count == 0 assert entry.state is ConfigEntryState.NOT_LOADED @@ -971,7 +969,7 @@ async def test_remove_entry( await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call("core_zwave_js") + assert stop_addon.call_args == call(hass, "core_zwave_js") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -990,12 +988,12 @@ async def test_remove_entry( # test add-on uninstall failure entry.add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - uninstall_addon.side_effect = SupervisorError() + uninstall_addon.side_effect = HassioAPIError() await hass.config_entries.async_remove(entry.entry_id) assert stop_addon.call_count == 1 - assert stop_addon.call_args == call("core_zwave_js") + assert stop_addon.call_args == call(hass, "core_zwave_js") assert create_backup.call_count == 1 assert create_backup.call_args == call( hass, @@ -1003,7 +1001,7 @@ async def test_remove_entry( partial=True, ) assert uninstall_addon.call_count == 1 - assert uninstall_addon.call_args == call("core_zwave_js") + assert uninstall_addon.call_args == call(hass, "core_zwave_js") assert entry.state is ConfigEntryState.NOT_LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 0 assert "Failed to uninstall the Z-Wave JS add-on" in caplog.text @@ -1575,9 +1573,13 @@ async def test_disabled_entity_on_value_removed( hass: HomeAssistant, entity_registry: er.EntityRegistry, zp3111, client, integration ) -> None: """Test that when entity primary values are removed the entity is removed.""" + # re-enable this default-disabled entity + sensor_cover_entity = "sensor.4_in_1_sensor_home_security_cover_status" idle_cover_status_button_entity = ( "button.4_in_1_sensor_idle_home_security_cover_status" ) + entity_registry.async_update_entity(entity_id=sensor_cover_entity, disabled_by=None) + await hass.async_block_till_done() # must reload the integration when enabling an entity await hass.config_entries.async_unload(integration.entry_id) @@ -1588,6 +1590,10 @@ async def test_disabled_entity_on_value_removed( await hass.async_block_till_done() assert integration.state is ConfigEntryState.LOADED + state = hass.states.get(sensor_cover_entity) + assert state + assert state.state != STATE_UNAVAILABLE + state = hass.states.get(idle_cover_status_button_entity) assert state assert state.state != STATE_UNAVAILABLE @@ -1681,6 +1687,10 @@ async def test_disabled_entity_on_value_removed( assert state assert state.state == STATE_UNAVAILABLE + state = hass.states.get(sensor_cover_entity) + assert state + assert state.state == STATE_UNAVAILABLE + state = hass.states.get(idle_cover_status_button_entity) assert state assert state.state == STATE_UNAVAILABLE @@ -1696,6 +1706,7 @@ async def test_disabled_entity_on_value_removed( | { battery_level_entity, binary_cover_entity, + sensor_cover_entity, idle_cover_status_button_entity, } == new_unavailable_entities diff --git a/tests/components/zwave_js/test_light.py b/tests/components/zwave_js/test_light.py index 4c725c6dc29..376bd700a2a 100644 --- a/tests/components/zwave_js/test_light.py +++ b/tests/components/zwave_js/test_light.py @@ -8,7 +8,6 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_TEMP, - ATTR_HS_COLOR, ATTR_MAX_MIREDS, ATTR_MIN_MIREDS, ATTR_RGB_COLOR, @@ -38,8 +37,8 @@ from .common import ( ZEN_31_ENTITY, ) -ZDB5100_ENTITY = "light.matrix_office" HSM200_V1_ENTITY = "light.hsm200" +ZDB5100_ENTITY = "light.matrix_office" async def test_light( @@ -511,388 +510,14 @@ async def test_light_none_color_value( assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["hs"] -async def test_light_on_off_color( - hass: HomeAssistant, client, logic_group_zdb5100, integration -) -> None: - """Test the light entity for RGB lights without dimming support.""" - node = logic_group_zdb5100 - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_OFF - - async def update_color(red: int, green: int, blue: int) -> None: - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "propertyKey": 2, # red - "newValue": red, - "prevValue": None, - "propertyName": "currentColor", - "propertyKeyName": "red", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "propertyKey": 3, # green - "newValue": green, - "prevValue": None, - "propertyName": "currentColor", - "propertyKeyName": "green", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "propertyKey": 4, # blue - "newValue": blue, - "prevValue": None, - "propertyName": "currentColor", - "propertyKeyName": "blue", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "newValue": { - "red": red, - "green": green, - "blue": blue, - }, - "prevValue": None, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - - async def update_switch_state(state: bool) -> None: - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Binary Switch", - "commandClass": 37, - "endpoint": 1, - "property": "currentValue", - "newValue": state, - "prevValue": None, - "propertyName": "currentValue", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - - # Turn on the light. Since this is the first call, the light should default to white - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 2 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == { - "red": 255, - "green": 255, - "blue": 255, - } - - args = client.async_send_command.call_args_list[1][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 37, - "endpoint": 1, - "property": "targetValue", - } - assert args["value"] is True - - # Force the light to turn off - await update_switch_state(False) - - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_OFF - - # Force the light to turn on (green) - await update_color(0, 255, 0) - await update_switch_state(True) - - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_ON - - client.async_send_command.reset_mock() - - # Set the brightness to 128. This should be encoded in the color value - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_BRIGHTNESS: 128}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 2 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == { - "red": 0, - "green": 128, - "blue": 0, - } - - args = client.async_send_command.call_args_list[1][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 37, - "endpoint": 1, - "property": "targetValue", - } - assert args["value"] is True - - client.async_send_command.reset_mock() - - # Force the light to turn on (green, 50%) - await update_color(0, 128, 0) - - # Set the color to red. This should preserve the previous brightness value - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_HS_COLOR: (0, 100)}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 2 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == { - "red": 128, - "green": 0, - "blue": 0, - } - - args = client.async_send_command.call_args_list[1][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 37, - "endpoint": 1, - "property": "targetValue", - } - assert args["value"] is True - - client.async_send_command.reset_mock() - - # Force the light to turn on (red, 50%) - await update_color(128, 0, 0) - - # Turn the device off. This should only affect the binary switch, not the color - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 37, - "endpoint": 1, - "property": "targetValue", - } - assert args["value"] is False - - client.async_send_command.reset_mock() - - # Force the light to turn off - await update_switch_state(False) - - # Turn the device on again. This should only affect the binary switch, not the color - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 37, - "endpoint": 1, - "property": "targetValue", - } - assert args["value"] is True - - -async def test_light_color_only( +async def test_black_is_off( hass: HomeAssistant, client, express_controls_ezmultipli, integration ) -> None: - """Test the light entity for RGB lights with Color Switch CC only.""" + """Test the black is off light entity.""" node = express_controls_ezmultipli state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_ON - async def update_color(red: int, green: int, blue: int) -> None: - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "propertyKey": 2, # red - "newValue": red, - "prevValue": None, - "propertyName": "currentColor", - "propertyKeyName": "red", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "propertyKey": 3, # green - "newValue": green, - "prevValue": None, - "propertyName": "currentColor", - "propertyKeyName": "green", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "propertyKey": 4, # blue - "newValue": blue, - "prevValue": None, - "propertyName": "currentColor", - "propertyKeyName": "blue", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "newValue": { - "red": red, - "green": green, - "blue": blue, - }, - "prevValue": None, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - # Attempt to turn on the light and ensure it defaults to white await hass.services.async_call( LIGHT_DOMAIN, @@ -914,14 +539,64 @@ async def test_light_color_only( client.async_send_command.reset_mock() # Force the light to turn off - await update_color(0, 0, 0) - + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "newValue": { + "red": 0, + "green": 0, + "blue": 0, + }, + "prevValue": { + "red": 0, + "green": 255, + "blue": 0, + }, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_OFF - # Force the light to turn on (50% green) - await update_color(0, 128, 0) - + # Force the light to turn on + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "newValue": { + "red": 0, + "green": 255, + "blue": 0, + }, + "prevValue": { + "red": 0, + "green": 0, + "blue": 0, + }, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_ON @@ -944,9 +619,6 @@ async def test_light_color_only( client.async_send_command.reset_mock() - # Force the light to turn off - await update_color(0, 0, 0) - # Assert that the last color is restored await hass.services.async_call( LIGHT_DOMAIN, @@ -963,131 +635,11 @@ async def test_light_color_only( "endpoint": 0, "property": "targetColor", } - assert args["value"] == {"red": 0, "green": 128, "blue": 0} + assert args["value"] == {"red": 0, "green": 255, "blue": 0} client.async_send_command.reset_mock() - # Force the light to turn on (50% green) - await update_color(0, 128, 0) - - state = hass.states.get(HSM200_V1_ENTITY) - assert state.state == STATE_ON - - client.async_send_command.reset_mock() - - # Assert that the brightness is preserved when changing colors - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_RGB_COLOR: (255, 0, 0)}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 0, - "property": "targetColor", - } - assert args["value"] == {"red": 128, "green": 0, "blue": 0} - - client.async_send_command.reset_mock() - - # Force the light to turn on (50% red) - await update_color(128, 0, 0) - - state = hass.states.get(HSM200_V1_ENTITY) - assert state.state == STATE_ON - - # Assert that the color is preserved when changing brightness - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_BRIGHTNESS: 69}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 0, - "property": "targetColor", - } - assert args["value"] == {"red": 69, "green": 0, "blue": 0} - - client.async_send_command.reset_mock() - - await update_color(69, 0, 0) - - # Turn off again - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: HSM200_V1_ENTITY}, - blocking=True, - ) - await update_color(0, 0, 0) - - client.async_send_command.reset_mock() - - # Assert that the color is preserved when turning on with brightness - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_BRIGHTNESS: 123}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 0, - "property": "targetColor", - } - assert args["value"] == {"red": 123, "green": 0, "blue": 0} - - client.async_send_command.reset_mock() - - await update_color(123, 0, 0) - - # Turn off again - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: HSM200_V1_ENTITY}, - blocking=True, - ) - await update_color(0, 0, 0) - - client.async_send_command.reset_mock() - - # Assert that the brightness is preserved when turning on with color - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_HS_COLOR: (240, 100)}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 0, - "property": "targetColor", - } - assert args["value"] == {"red": 0, "green": 0, "blue": 123} - - client.async_send_command.reset_mock() - - # Clear the color value to trigger an unknown state + # Force the light to turn on event = Event( type="value updated", data={ @@ -1100,14 +652,17 @@ async def test_light_color_only( "endpoint": 0, "property": "currentColor", "newValue": None, - "prevValue": None, + "prevValue": { + "red": 0, + "green": 255, + "blue": 0, + }, "propertyName": "currentColor", }, }, ) node.receive_event(event) await hass.async_block_till_done() - state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_UNKNOWN @@ -1132,6 +687,183 @@ async def test_light_color_only( assert args["value"] == {"red": 255, "green": 76, "blue": 255} +async def test_black_is_off_zdb5100( + hass: HomeAssistant, client, logic_group_zdb5100, integration +) -> None: + """Test the black is off light entity.""" + node = logic_group_zdb5100 + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_OFF + + # Attempt to turn on the light and ensure it defaults to white + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == {"red": 255, "green": 255, "blue": 255} + + client.async_send_command.reset_mock() + + # Force the light to turn off + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "newValue": { + "red": 0, + "green": 0, + "blue": 0, + }, + "prevValue": { + "red": 0, + "green": 255, + "blue": 0, + }, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_OFF + + # Force the light to turn on + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "newValue": { + "red": 0, + "green": 255, + "blue": 0, + }, + "prevValue": { + "red": 0, + "green": 0, + "blue": 0, + }, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_ON + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == {"red": 0, "green": 0, "blue": 0} + + client.async_send_command.reset_mock() + + # Assert that the last color is restored + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == {"red": 0, "green": 255, "blue": 0} + + client.async_send_command.reset_mock() + + # Force the light to turn on + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "newValue": None, + "prevValue": { + "red": 0, + "green": 255, + "blue": 0, + }, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_UNKNOWN + + client.async_send_command.reset_mock() + + # Assert that call fails if attribute is added to service call + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_RGBW_COLOR: (255, 76, 255, 0)}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == {"red": 255, "green": 76, "blue": 255} + + async def test_basic_cc_light( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/zwave_js/test_lock.py b/tests/components/zwave_js/test_lock.py index 47e680570f0..e8a8a2035d8 100644 --- a/tests/components/zwave_js/test_lock.py +++ b/tests/components/zwave_js/test_lock.py @@ -15,7 +15,6 @@ from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, - LockState, ) from homeassistant.components.zwave_js.const import ( ATTR_LOCK_TIMEOUT, @@ -28,7 +27,13 @@ from homeassistant.components.zwave_js.lock import ( SERVICE_SET_LOCK_CONFIGURATION, SERVICE_SET_LOCK_USERCODE, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_LOCKED, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + STATE_UNLOCKED, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -47,7 +52,7 @@ async def test_door_lock( state = hass.states.get(SCHLAGE_BE469_LOCK_ENTITY) assert state - assert state.state == LockState.UNLOCKED + assert state.state == STATE_UNLOCKED # Test locking await hass.services.async_call( @@ -90,9 +95,7 @@ async def test_door_lock( ) node.receive_event(event) - state = hass.states.get(SCHLAGE_BE469_LOCK_ENTITY) - assert state - assert state.state == LockState.LOCKED + assert hass.states.get(SCHLAGE_BE469_LOCK_ENTITY).state == STATE_LOCKED client.async_send_command.reset_mock() @@ -191,7 +194,6 @@ async def test_door_lock( "insideHandlesCanOpenDoorConfiguration": [True, True, True, True], "operationType": 2, "outsideHandlesCanOpenDoorConfiguration": [True, True, True, True], - "lockTimeoutConfiguration": 1, } ] assert args["commandClass"] == 98 @@ -237,7 +239,6 @@ async def test_door_lock( "insideHandlesCanOpenDoorConfiguration": [True, True, True, True], "operationType": 2, "outsideHandlesCanOpenDoorConfiguration": [True, True, True, True], - "lockTimeoutConfiguration": 1, } ] assert args["commandClass"] == 98 @@ -293,9 +294,7 @@ async def test_door_lock( node.receive_event(event) assert node.status == NodeStatus.DEAD - state = hass.states.get(SCHLAGE_BE469_LOCK_ENTITY) - assert state - assert state.state == STATE_UNAVAILABLE + assert hass.states.get(SCHLAGE_BE469_LOCK_ENTITY).state == STATE_UNAVAILABLE async def test_only_one_lock( diff --git a/tests/components/zwave_js/test_repairs.py b/tests/components/zwave_js/test_repairs.py index 2f10b70b48a..c103a06c5fa 100644 --- a/tests/components/zwave_js/test_repairs.py +++ b/tests/components/zwave_js/test_repairs.py @@ -1,22 +1,25 @@ """Test the Z-Wave JS repairs module.""" from copy import deepcopy +from http import HTTPStatus from unittest.mock import patch from zwave_js_server.event import Event from zwave_js_server.model.node import Node +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) from homeassistant.components.zwave_js import DOMAIN from homeassistant.components.zwave_js.helpers import get_device_id from homeassistant.core import HomeAssistant import homeassistant.helpers.device_registry as dr import homeassistant.helpers.issue_registry as ir -from tests.components.repairs import ( - async_process_repairs_platforms, - process_repair_fix_flow, - start_repair_fix_flow, -) from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -81,21 +84,30 @@ async def test_device_config_file_changed_confirm_step( assert issue["issue_id"] == issue_id assert issue["translation_placeholders"] == {"device_name": device.name} - data = await start_repair_fix_flow(http_client, DOMAIN, issue_id) + url = RepairsFlowIndexView.url + resp = await http_client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "init" assert data["description_placeholders"] == {"device_name": device.name} + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + # Show menu - data = await process_repair_fix_flow(http_client, flow_id) + resp = await http_client.post(url) + + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "menu" # Apply fix - data = await process_repair_fix_flow( - http_client, flow_id, json={"next_step_id": "confirm"} - ) + resp = await http_client.post(url, json={"next_step_id": "confirm"}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" @@ -147,21 +159,30 @@ async def test_device_config_file_changed_ignore_step( assert issue["issue_id"] == issue_id assert issue["translation_placeholders"] == {"device_name": device.name} - data = await start_repair_fix_flow(http_client, DOMAIN, issue_id) + url = RepairsFlowIndexView.url + resp = await http_client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "init" assert data["description_placeholders"] == {"device_name": device.name} + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + # Show menu - data = await process_repair_fix_flow(http_client, flow_id) + resp = await http_client.post(url) + + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "menu" # Ignore the issue - data = await process_repair_fix_flow( - http_client, flow_id, json={"next_step_id": "ignore"} - ) + resp = await http_client.post(url, json={"next_step_id": "ignore"}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "abort" assert data["reason"] == "issue_ignored" @@ -207,13 +228,22 @@ async def test_invalid_issue( issue = msg["result"]["issues"][0] assert issue["issue_id"] == "invalid_issue_id" - data = await start_repair_fix_flow(http_client, DOMAIN, "invalid_issue_id") + url = RepairsFlowIndexView.url + resp = await http_client.post( + url, json={"handler": DOMAIN, "issue_id": "invalid_issue_id"} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "confirm" # Apply fix - data = await process_repair_fix_flow(http_client, flow_id) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await http_client.post(url) + + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "create_entry" @@ -248,7 +278,10 @@ async def test_abort_confirm( await hass_ws_client(hass) http_client = await hass_client() - data = await start_repair_fix_flow(http_client, DOMAIN, issue_id) + url = RepairsFlowIndexView.url + resp = await http_client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() flow_id = data["flow_id"] assert data["step_id"] == "init" @@ -257,9 +290,11 @@ async def test_abort_confirm( await hass.config_entries.async_unload(integration.entry_id) # Apply fix - data = await process_repair_fix_flow( - http_client, flow_id, json={"next_step_id": "confirm"} - ) + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await http_client.post(url, json={"next_step_id": "confirm"}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() assert data["type"] == "abort" assert data["reason"] == "cannot_connect" diff --git a/tests/components/zwave_js/test_sensor.py b/tests/components/zwave_js/test_sensor.py index c93b722334b..02b3df17e22 100644 --- a/tests/components/zwave_js/test_sensor.py +++ b/tests/components/zwave_js/test_sensor.py @@ -9,6 +9,7 @@ from zwave_js_server.exceptions import FailedZWaveCommand from zwave_js_server.model.node import Node from homeassistant.components.sensor import ( + ATTR_OPTIONS, ATTR_STATE_CLASS, SensorDeviceClass, SensorStateClass, @@ -22,10 +23,6 @@ from homeassistant.components.zwave_js.const import ( SERVICE_RESET_METER, ) from homeassistant.components.zwave_js.helpers import get_valueless_base_unique_id -from homeassistant.components.zwave_js.sensor import ( - CONTROLLER_STATISTICS_KEY_MAP, - NODE_STATISTICS_KEY_MAP, -) from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -53,12 +50,11 @@ from .common import ( ENERGY_SENSOR, HUMIDITY_SENSOR, METER_ENERGY_SENSOR, + NOTIFICATION_MOTION_SENSOR, POWER_SENSOR, VOLTAGE_SENSOR, ) -from tests.common import MockConfigEntry - async def test_numeric_sensor( hass: HomeAssistant, @@ -225,6 +221,60 @@ async def test_basic_cc_sensor( assert state.state == "255.0" +async def test_disabled_notification_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry, multisensor_6, integration +) -> None: + """Test sensor is created from Notification CC and is disabled.""" + entity_entry = entity_registry.async_get(NOTIFICATION_MOTION_SENSOR) + + assert entity_entry + assert entity_entry.disabled + assert entity_entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + # Test enabling entity + updated_entry = entity_registry.async_update_entity( + entity_entry.entity_id, disabled_by=None + ) + assert updated_entry != entity_entry + assert updated_entry.disabled is False + + # reload integration and check if entity is correctly there + await hass.config_entries.async_reload(integration.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(NOTIFICATION_MOTION_SENSOR) + assert state.state == "Motion detection" + assert state.attributes[ATTR_VALUE] == 8 + assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.ENUM + assert state.attributes[ATTR_OPTIONS] == ["idle", "Motion detection"] + + event = Event( + "value updated", + { + "source": "node", + "event": "value updated", + "nodeId": multisensor_6.node_id, + "args": { + "commandClassName": "Notification", + "commandClass": 113, + "endpoint": 0, + "property": "Home Security", + "propertyKey": "Motion sensor status", + "newValue": None, + "prevValue": 0, + "propertyName": "Home Security", + "propertyKeyName": "Motion sensor status", + }, + }, + ) + + multisensor_6.receive_event(event) + await hass.async_block_till_done() + state = hass.states.get(NOTIFICATION_MOTION_SENSOR) + assert state + assert state.state == STATE_UNKNOWN + + async def test_config_parameter_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -472,7 +522,7 @@ async def test_reset_meter( "test", 1, "test" ) - with pytest.raises(HomeAssistantError) as err: + with pytest.raises(HomeAssistantError): await hass.services.async_call( DOMAIN, SERVICE_RESET_METER, @@ -480,11 +530,6 @@ async def test_reset_meter( blocking=True, ) - assert str(err.value) == ( - "Failed to reset meters on node Node(node_id=102) endpoint 0: " - "zwave_error: Z-Wave error 1 - test" - ) - async def test_meter_attributes( hass: HomeAssistant, client, aeon_smart_switch_6, integration @@ -706,54 +751,6 @@ NODE_STATISTICS_SUFFIXES_UNKNOWN = { } -async def test_statistics_sensors_migration( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - zp3111_state, - client, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test statistics migration sensor.""" - node = Node(client, copy.deepcopy(zp3111_state)) - client.driver.controller.nodes[node.node_id] = node - - entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) - entry.add_to_hass(hass) - - controller_base_unique_id = f"{client.driver.controller.home_id}.1.statistics" - node_base_unique_id = f"{client.driver.controller.home_id}.22.statistics" - - # Create entity registry records for the old statistics keys - for base_unique_id, key_map in ( - (controller_base_unique_id, CONTROLLER_STATISTICS_KEY_MAP), - (node_base_unique_id, NODE_STATISTICS_KEY_MAP), - ): - # old key - for key in key_map.values(): - entity_registry.async_get_or_create( - "sensor", DOMAIN, f"{base_unique_id}_{key}" - ) - - # Set up integration - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - # Validate that entity unique ID's have changed - for base_unique_id, key_map in ( - (controller_base_unique_id, CONTROLLER_STATISTICS_KEY_MAP), - (node_base_unique_id, NODE_STATISTICS_KEY_MAP), - ): - for new_key, old_key in key_map.items(): - # If the key has changed, the old entity should not exist - if new_key != old_key: - assert not entity_registry.async_get_entity_id( - "sensor", DOMAIN, f"{base_unique_id}_{old_key}" - ) - assert entity_registry.async_get_entity_id( - "sensor", DOMAIN, f"{base_unique_id}_{new_key}" - ) - - async def test_statistics_sensors_no_last_seen( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/zwave_js/test_services.py b/tests/components/zwave_js/test_services.py index 41477f18b97..ec13d0262f8 100644 --- a/tests/components/zwave_js/test_services.py +++ b/tests/components/zwave_js/test_services.py @@ -497,12 +497,13 @@ async def test_set_config_parameter( caplog.clear() + config_value = aeotec_zw164_siren.values["2-112-0-32"] cmd_result = SetConfigParameterResult("accepted", {"status": 255}) # Test accepted return with patch( "homeassistant.components.zwave_js.services.Endpoint.async_set_raw_config_parameter_value", - return_value=cmd_result, + return_value=(config_value, cmd_result), ) as mock_set_raw_config_parameter_value: await hass.services.async_call( DOMAIN, @@ -533,7 +534,7 @@ async def test_set_config_parameter( cmd_result.status = "queued" with patch( "homeassistant.components.zwave_js.services.Endpoint.async_set_raw_config_parameter_value", - return_value=cmd_result, + return_value=(config_value, cmd_result), ) as mock_set_raw_config_parameter_value: await hass.services.async_call( DOMAIN, diff --git a/tests/components/zwave_js/test_switch.py b/tests/components/zwave_js/test_switch.py index 30486186a4e..c18c0c4359e 100644 --- a/tests/components/zwave_js/test_switch.py +++ b/tests/components/zwave_js/test_switch.py @@ -6,11 +6,7 @@ from zwave_js_server.event import Event from zwave_js_server.exceptions import FailedZWaveCommand from zwave_js_server.model.node import Node -from homeassistant.components.switch import ( - DOMAIN as SWITCH_DOMAIN, - SERVICE_TURN_OFF, - SERVICE_TURN_ON, -) +from homeassistant.components.switch import DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.components.zwave_js.helpers import ZwaveValueMatcher from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN, EntityCategory from homeassistant.core import HomeAssistant @@ -99,7 +95,7 @@ async def test_barrier_signaling_switch( # Test turning off await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_OFF, {"entity_id": entity}, blocking=True + DOMAIN, SERVICE_TURN_OFF, {"entity_id": entity}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -124,7 +120,7 @@ async def test_barrier_signaling_switch( # Test turning on await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, {"entity_id": entity}, blocking=True + DOMAIN, SERVICE_TURN_ON, {"entity_id": entity}, blocking=True ) # Note: the valueId's value is still 255 because we never @@ -254,7 +250,7 @@ async def test_config_parameter_switch( # Test turning on await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, {"entity_id": switch_entity_id}, blocking=True + DOMAIN, SERVICE_TURN_ON, {"entity_id": switch_entity_id}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -272,7 +268,7 @@ async def test_config_parameter_switch( # Test turning off await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_OFF, {"entity_id": switch_entity_id}, blocking=True + DOMAIN, SERVICE_TURN_OFF, {"entity_id": switch_entity_id}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -290,14 +286,7 @@ async def test_config_parameter_switch( client.async_send_command.side_effect = FailedZWaveCommand("test", 1, "test") # Test turning off error raises proper exception - with pytest.raises(HomeAssistantError) as err: + with pytest.raises(HomeAssistantError): await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {"entity_id": switch_entity_id}, - blocking=True, + DOMAIN, SERVICE_TURN_OFF, {"entity_id": switch_entity_id}, blocking=True ) - - assert str(err.value) == ( - "Unable to set value 32-112-0-20: zwave_error: Z-Wave error 1 - test" - ) diff --git a/tests/components/zwave_js/test_trigger.py b/tests/components/zwave_js/test_trigger.py index 8c345619a90..5822afe7b9f 100644 --- a/tests/components/zwave_js/test_trigger.py +++ b/tests/components/zwave_js/test_trigger.py @@ -549,7 +549,7 @@ async def test_zwave_js_event( "config_entry_id": integration.entry_id, "event_source": "controller", "event": "inclusion started", - "event_data": {"strategy": 0}, + "event_data": {"secure": True}, }, "action": { "event": "controller_event_data_filter", @@ -667,7 +667,7 @@ async def test_zwave_js_event( data={ "source": "controller", "event": "inclusion started", - "strategy": 2, + "secure": False, }, ) client.driver.controller.receive_event(event) @@ -691,7 +691,7 @@ async def test_zwave_js_event( data={ "source": "controller", "event": "inclusion started", - "strategy": 0, + "secure": True, }, ) client.driver.controller.receive_event(event) diff --git a/tests/components/zwave_js/test_update.py b/tests/components/zwave_js/test_update.py index d6683fa24cb..abdceb155f7 100644 --- a/tests/components/zwave_js/test_update.py +++ b/tests/components/zwave_js/test_update.py @@ -16,7 +16,6 @@ from homeassistant.components.update import ( ATTR_LATEST_VERSION, ATTR_RELEASE_URL, ATTR_SKIPPED_VERSION, - ATTR_UPDATE_PERCENTAGE, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, SERVICE_SKIP, @@ -156,10 +155,9 @@ async def test_update_entity_states( attrs = state.attributes assert not attrs[ATTR_AUTO_UPDATE] assert attrs[ATTR_INSTALLED_VERSION] == "10.7" - assert attrs[ATTR_IN_PROGRESS] is False + assert not attrs[ATTR_IN_PROGRESS] assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert attrs[ATTR_RELEASE_URL] is None - assert attrs[ATTR_UPDATE_PERCENTAGE] is None await ws_client.send_json( { @@ -419,7 +417,6 @@ async def test_update_entity_progress( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is True - assert attrs[ATTR_UPDATE_PERCENTAGE] is None event = Event( type="firmware update progress", @@ -442,8 +439,7 @@ async def test_update_entity_progress( state = hass.states.get(UPDATE_ENTITY) assert state attrs = state.attributes - assert attrs[ATTR_IN_PROGRESS] is True - assert attrs[ATTR_UPDATE_PERCENTAGE] == 5 + assert attrs[ATTR_IN_PROGRESS] == 5 event = Event( type="firmware update finished", @@ -467,7 +463,6 @@ async def test_update_entity_progress( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is False - assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert attrs[ATTR_INSTALLED_VERSION] == "11.2.4" assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert state.state == STATE_OFF @@ -537,8 +532,7 @@ async def test_update_entity_install_failed( state = hass.states.get(UPDATE_ENTITY) assert state attrs = state.attributes - assert attrs[ATTR_IN_PROGRESS] is True - assert attrs[ATTR_UPDATE_PERCENTAGE] == 5 + assert attrs[ATTR_IN_PROGRESS] == 5 event = Event( type="firmware update finished", @@ -562,7 +556,6 @@ async def test_update_entity_install_failed( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is False - assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert attrs[ATTR_INSTALLED_VERSION] == "10.7" assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert state.state == STATE_ON @@ -601,8 +594,7 @@ async def test_update_entity_reload( attrs = state.attributes assert not attrs[ATTR_AUTO_UPDATE] assert attrs[ATTR_INSTALLED_VERSION] == "10.7" - assert attrs[ATTR_IN_PROGRESS] is False - assert attrs[ATTR_UPDATE_PERCENTAGE] is None + assert not attrs[ATTR_IN_PROGRESS] assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert attrs[ATTR_RELEASE_URL] is None @@ -841,7 +833,6 @@ async def test_update_entity_full_restore_data_update_available( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is True - assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert len(client.async_send_command.call_args_list) == 2 assert client.async_send_command.call_args_list[1][0][0] == { diff --git a/tests/conftest.py b/tests/conftest.py index 35b65c5653c..161ff458ac0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,8 +3,8 @@ from __future__ import annotations import asyncio -from collections.abc import AsyncGenerator, Callable, Coroutine, Generator -from contextlib import AsyncExitStack, asynccontextmanager, contextmanager +from collections.abc import Callable, Coroutine +from contextlib import asynccontextmanager, contextmanager import datetime import functools import gc @@ -12,7 +12,6 @@ import itertools import logging import os import reprlib -from shutil import rmtree import sqlite3 import ssl import threading @@ -34,17 +33,12 @@ import multidict import pytest import pytest_socket import requests_mock -import respx from syrupy.assertion import SnapshotAssertion -from syrupy.session import SnapshotSession +from typing_extensions import AsyncGenerator, Generator from homeassistant import block_async_io -from homeassistant.exceptions import ServiceNotFound -# Setup patching of recorder functions before any other Home Assistant imports -from . import patch_recorder # noqa: F401, isort:skip - -# Setup patching of dt_util time functions before any other Home Assistant imports +# Setup patching if dt_util time functions before any other Home Assistant imports from . import patch_time # noqa: F401, isort:skip from homeassistant import core as ha, loader, runner @@ -52,21 +46,16 @@ from homeassistant.auth.const import GROUP_ID_ADMIN, GROUP_ID_READ_ONLY from homeassistant.auth.models import Credentials from homeassistant.auth.providers import homeassistant from homeassistant.components.device_tracker.legacy import Device - -# pylint: disable-next=hass-component-root-import from homeassistant.components.websocket_api.auth import ( TYPE_AUTH, TYPE_AUTH_OK, TYPE_AUTH_REQUIRED, ) - -# pylint: disable-next=hass-component-root-import from homeassistant.components.websocket_api.http import URL from homeassistant.config import YAML_CONFIG_FILE from homeassistant.config_entries import ConfigEntries, ConfigEntry, ConfigEntryState -from homeassistant.const import BASE_PLATFORMS, HASSIO_USER_NAME +from homeassistant.const import HASSIO_USER_NAME from homeassistant.core import ( - Context, CoreState, HassJob, HomeAssistant, @@ -87,13 +76,13 @@ from homeassistant.helpers import ( from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.translation import _TranslationsCacheData from homeassistant.helpers.typing import ConfigType -from homeassistant.setup import async_setup_component +from homeassistant.setup import BASE_PLATFORMS, async_setup_component from homeassistant.util import dt as dt_util, location -from homeassistant.util.async_ import create_eager_task, get_scheduled_timer_handles +from homeassistant.util.async_ import create_eager_task from homeassistant.util.json import json_loads from .ignore_uncaught_exceptions import IGNORE_UNCAUGHT_EXCEPTIONS -from .syrupy import HomeAssistantSnapshotExtension, override_syrupy_finish +from .syrupy import HomeAssistantSnapshotExtension from .typing import ( ClientSessionGenerator, MockHAClientWebSocket, @@ -150,11 +139,6 @@ def pytest_configure(config: pytest.Config) -> None: if config.getoption("verbose") > 0: logging.getLogger().setLevel(logging.DEBUG) - # Override default finish to detect unused snapshots despite xdist - # Temporary workaround until it is finalised inside syrupy - # See https://github.com/syrupy-project/syrupy/pull/901 - SnapshotSession.finish = override_syrupy_finish - def pytest_runtest_setup() -> None: """Prepare pytest_socket and freezegun. @@ -382,7 +366,7 @@ def verify_cleanup( if tasks: event_loop.run_until_complete(asyncio.wait(tasks)) - for handle in get_scheduled_timer_handles(event_loop): + for handle in event_loop._scheduled: # type: ignore[attr-defined] if not handle.cancelled(): with long_repr_strings(): if expected_lingering_timers: @@ -409,13 +393,6 @@ def verify_cleanup( # Restore the default time zone to not break subsequent tests dt_util.DEFAULT_TIME_ZONE = datetime.UTC - try: - # Verify respx.mock has been cleaned up - assert not respx.mock.routes, "respx.mock routes not cleaned up, maybe the test needs to be decorated with @respx.mock" - finally: - # Clear mock routes not break subsequent tests - respx.mock.clear() - @pytest.fixture(autouse=True) def reset_hass_threading_local_object() -> Generator[None]: @@ -424,7 +401,7 @@ def reset_hass_threading_local_object() -> Generator[None]: ha._hass.__dict__.clear() -@pytest.fixture(autouse=True, scope="session") +@pytest.fixture(scope="session", autouse=True) def bcrypt_cost() -> Generator[None]: """Run with reduced rounds during tests, to speed up uses.""" gensalt_orig = bcrypt.gensalt @@ -915,7 +892,7 @@ def fail_on_log_exception( return def log_exception(format_err, *args): - raise # noqa: PLE0704 + raise # pylint: disable=misplaced-bare-raise monkeypatch.setattr("homeassistant.util.logging.log_exception", log_exception) @@ -1272,16 +1249,6 @@ def enable_statistics() -> bool: return False -@pytest.fixture -def enable_missing_statistics() -> bool: - """Fixture to control enabling of recorder's statistics compilation. - - To enable statistics, tests can be marked with: - @pytest.mark.parametrize("enable_missing_statistics", [True]) - """ - return False - - @pytest.fixture def enable_schema_validation() -> bool: """Fixture to control enabling of recorder's statistics table validation. @@ -1303,21 +1270,11 @@ def enable_nightly_purge() -> bool: @pytest.fixture -def enable_migrate_event_context_ids() -> bool: +def enable_migrate_context_ids() -> bool: """Fixture to control enabling of recorder's context id migration. To enable context id migration, tests can be marked with: - @pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) - """ - return False - - -@pytest.fixture -def enable_migrate_state_context_ids() -> bool: - """Fixture to control enabling of recorder's context id migration. - - To enable context id migration, tests can be marked with: - @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) + @pytest.mark.parametrize("enable_migrate_context_ids", [True]) """ return False @@ -1342,16 +1299,6 @@ def enable_migrate_entity_ids() -> bool: return False -@pytest.fixture -def enable_migrate_event_ids() -> bool: - """Fixture to control enabling of recorder's event id migration. - - To enable context id migration, tests can be marked with: - @pytest.mark.parametrize("enable_migrate_event_ids", [True]) - """ - return False - - @pytest.fixture def recorder_config() -> dict[str, Any] | None: """Fixture to override recorder config. @@ -1362,36 +1309,16 @@ def recorder_config() -> dict[str, Any] | None: return None -@pytest.fixture -def persistent_database() -> bool: - """Fixture to control if database should persist when recorder is shut down in test. - - When using sqlite, this uses on disk database instead of in memory database. - This does nothing when using mysql or postgresql. - - Note that the database is always destroyed in between tests. - - To use a persistent database, tests can be marked with: - @pytest.mark.parametrize("persistent_database", [True]) - """ - return False - - @pytest.fixture def recorder_db_url( pytestconfig: pytest.Config, hass_fixture_setup: list[bool], - persistent_database: str, - tmp_path_factory: pytest.TempPathFactory, ) -> Generator[str]: """Prepare a default database for tests and return a connection URL.""" assert not hass_fixture_setup db_url = cast(str, pytestconfig.getoption("dburl")) - if db_url == "sqlite://" and persistent_database: - tmp_path = tmp_path_factory.mktemp("recorder") - db_url = "sqlite:///" + str(tmp_path / "pytest.db") - elif db_url.startswith("mysql://"): + if db_url.startswith("mysql://"): # pylint: disable-next=import-outside-toplevel import sqlalchemy_utils @@ -1405,9 +1332,7 @@ def recorder_db_url( assert not sqlalchemy_utils.database_exists(db_url) sqlalchemy_utils.create_database(db_url, encoding="utf8") yield db_url - if db_url == "sqlite://" and persistent_database: - rmtree(tmp_path, ignore_errors=True) - elif db_url.startswith("mysql://"): + if db_url.startswith("mysql://"): # pylint: disable-next=import-outside-toplevel import sqlalchemy as sa @@ -1435,9 +1360,6 @@ async def _async_init_recorder_component( hass: HomeAssistant, add_config: dict[str, Any] | None = None, db_url: str | None = None, - *, - expected_setup_result: bool, - wait_setup: bool, ) -> None: """Initialize the recorder asynchronously.""" # pylint: disable-next=import-outside-toplevel @@ -1452,46 +1374,27 @@ async def _async_init_recorder_component( with patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True): if recorder.DOMAIN not in hass.data: recorder_helper.async_initialize_recorder(hass) - setup_task = asyncio.ensure_future( - async_setup_component(hass, recorder.DOMAIN, {recorder.DOMAIN: config}) + assert await async_setup_component( + hass, recorder.DOMAIN, {recorder.DOMAIN: config} ) - if wait_setup: - # Wait for recorder integration to setup - setup_result = await setup_task - assert setup_result == expected_setup_result - assert (recorder.DOMAIN in hass.config.components) == expected_setup_result - else: - # Wait for recorder to connect to the database - await recorder_helper.async_wait_recorder(hass) + assert recorder.DOMAIN in hass.config.components _LOGGER.info( "Test recorder successfully started, database location: %s", config[recorder.CONF_DB_URL], ) -class ThreadSession(threading.local): - """Keep track of session per thread.""" - - has_session = False - - -thread_session = ThreadSession() - - @pytest.fixture -async def async_test_recorder( +async def async_setup_recorder_instance( recorder_db_url: str, enable_nightly_purge: bool, enable_statistics: bool, - enable_missing_statistics: bool, enable_schema_validation: bool, - enable_migrate_event_context_ids: bool, - enable_migrate_state_context_ids: bool, + enable_migrate_context_ids: bool, enable_migrate_event_type_ids: bool, enable_migrate_entity_ids: bool, - enable_migrate_event_ids: bool, ) -> AsyncGenerator[RecorderInstanceGenerator]: - """Yield context manager to setup recorder instance.""" + """Yield callable to setup recorder instance.""" # pylint: disable-next=import-outside-toplevel from homeassistant.components import recorder @@ -1501,39 +1404,6 @@ async def async_test_recorder( # pylint: disable-next=import-outside-toplevel from .components.recorder.common import async_recorder_block_till_done - # pylint: disable-next=import-outside-toplevel - from .patch_recorder import real_session_scope - - if TYPE_CHECKING: - # pylint: disable-next=import-outside-toplevel - from sqlalchemy.orm.session import Session - - @contextmanager - def debug_session_scope( - *, - hass: HomeAssistant | None = None, - session: Session | None = None, - exception_filter: Callable[[Exception], bool] | None = None, - read_only: bool = False, - ) -> Generator[Session]: - """Wrap session_scope to bark if we create nested sessions.""" - if thread_session.has_session: - raise RuntimeError( - f"Thread '{threading.current_thread().name}' already has an " - "active session" - ) - thread_session.has_session = True - try: - with real_session_scope( - hass=hass, - session=session, - exception_filter=exception_filter, - read_only=read_only, - ) as ses: - yield ses - finally: - thread_session.has_session = False - nightly = recorder.Recorder.async_nightly_tasks if enable_nightly_purge else None stats = recorder.Recorder.async_periodic_statistics if enable_statistics else None schema_validate = ( @@ -1543,31 +1413,26 @@ async def async_test_recorder( ) compile_missing = ( recorder.Recorder._schedule_compile_missing_statistics - if enable_missing_statistics + if enable_statistics else None ) migrate_states_context_ids = ( - migration.StatesContextIDMigration.migrate_data - if enable_migrate_state_context_ids + recorder.Recorder._migrate_states_context_ids + if enable_migrate_context_ids else None ) migrate_events_context_ids = ( - migration.EventsContextIDMigration.migrate_data - if enable_migrate_event_context_ids + recorder.Recorder._migrate_events_context_ids + if enable_migrate_context_ids else None ) migrate_event_type_ids = ( - migration.EventTypeIDMigration.migrate_data + recorder.Recorder._migrate_event_type_ids if enable_migrate_event_type_ids else None ) migrate_entity_ids = ( - migration.EntityIDMigration.migrate_data if enable_migrate_entity_ids else None - ) - legacy_event_id_foreign_key_exists = ( - migration.EventIDPostMigration._legacy_event_id_foreign_key_exists - if enable_migrate_event_ids - else lambda _: None + recorder.Recorder._migrate_entity_ids if enable_migrate_entity_ids else None ) with ( patch( @@ -1586,101 +1451,43 @@ async def async_test_recorder( autospec=True, ), patch( - "homeassistant.components.recorder.migration.EventsContextIDMigration.migrate_data", + "homeassistant.components.recorder.Recorder._migrate_events_context_ids", side_effect=migrate_events_context_ids, autospec=True, ), patch( - "homeassistant.components.recorder.migration.StatesContextIDMigration.migrate_data", + "homeassistant.components.recorder.Recorder._migrate_states_context_ids", side_effect=migrate_states_context_ids, autospec=True, ), patch( - "homeassistant.components.recorder.migration.EventTypeIDMigration.migrate_data", + "homeassistant.components.recorder.Recorder._migrate_event_type_ids", side_effect=migrate_event_type_ids, autospec=True, ), patch( - "homeassistant.components.recorder.migration.EntityIDMigration.migrate_data", + "homeassistant.components.recorder.Recorder._migrate_entity_ids", side_effect=migrate_entity_ids, autospec=True, ), - patch( - "homeassistant.components.recorder.migration.EventIDPostMigration._legacy_event_id_foreign_key_exists", - side_effect=legacy_event_id_foreign_key_exists, - autospec=True, - ), patch( "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", side_effect=compile_missing, autospec=True, ), - patch.object( - patch_recorder, - "real_session_scope", - side_effect=debug_session_scope, - autospec=True, - ), ): - @asynccontextmanager - async def async_test_recorder( - hass: HomeAssistant, - config: ConfigType | None = None, - *, - expected_setup_result: bool = True, - wait_recorder: bool = True, - wait_recorder_setup: bool = True, - ) -> AsyncGenerator[recorder.Recorder]: + async def async_setup_recorder( + hass: HomeAssistant, config: ConfigType | None = None + ) -> recorder.Recorder: """Setup and return recorder instance.""" # noqa: D401 - await _async_init_recorder_component( - hass, - config, - recorder_db_url, - expected_setup_result=expected_setup_result, - wait_setup=wait_recorder_setup, - ) + await _async_init_recorder_component(hass, config, recorder_db_url) await hass.async_block_till_done() instance = hass.data[recorder.DATA_INSTANCE] # The recorder's worker is not started until Home Assistant is running - if hass.state is CoreState.running and wait_recorder: + if hass.state is CoreState.running: await async_recorder_block_till_done(hass) - try: - yield instance - finally: - if instance.is_alive(): - await instance._async_shutdown(None) - - yield async_test_recorder - - -@pytest.fixture -async def async_setup_recorder_instance( - async_test_recorder: RecorderInstanceGenerator, -) -> AsyncGenerator[RecorderInstanceGenerator]: - """Yield callable to setup recorder instance.""" - - async with AsyncExitStack() as stack: - - async def async_setup_recorder( - hass: HomeAssistant, - config: ConfigType | None = None, - *, - expected_setup_result: bool = True, - wait_recorder: bool = True, - wait_recorder_setup: bool = True, - ) -> AsyncGenerator[recorder.Recorder]: - """Set up and return recorder instance.""" - - return await stack.enter_async_context( - async_test_recorder( - hass, - config, - expected_setup_result=expected_setup_result, - wait_recorder=wait_recorder, - wait_recorder_setup=wait_recorder_setup, - ) - ) + return instance yield async_setup_recorder @@ -1688,12 +1495,11 @@ async def async_setup_recorder_instance( @pytest.fixture async def recorder_mock( recorder_config: dict[str, Any] | None, - async_test_recorder: RecorderInstanceGenerator, + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, -) -> AsyncGenerator[recorder.Recorder]: +) -> recorder.Recorder: """Fixture with in-memory recorder.""" - async with async_test_recorder(hass, recorder_config) as instance: - yield instance + return await async_setup_recorder_instance(hass, recorder_config) @pytest.fixture @@ -1721,7 +1527,7 @@ async def mock_enable_bluetooth( await hass.async_block_till_done() -@pytest.fixture(autouse=True, scope="session") +@pytest.fixture(scope="session") def mock_bluetooth_adapters() -> Generator[None]: """Fixture to mock bluetooth adapters.""" with ( @@ -1772,30 +1578,10 @@ def mock_bleak_scanner_start() -> Generator[MagicMock]: @pytest.fixture -def integration_frame_path() -> str: - """Return the path to the integration frame. - - Can be parametrized with - `@pytest.mark.parametrize("integration_frame_path", ["path_to_frame"])` - - - "custom_components/XYZ" for a custom integration - - "homeassistant/components/XYZ" for a core integration - - "homeassistant/XYZ" for core (no integration) - - Defaults to core component `hue` - """ - return "homeassistant/components/hue" - - -@pytest.fixture -def mock_integration_frame(integration_frame_path: str) -> Generator[Mock]: - """Mock where we are calling code from. - - Defaults to calling from `hue` core integration, and can be parametrized - with `integration_frame_path`. - """ +def mock_integration_frame() -> Generator[Mock]: + """Mock as if we're calling code from inside an integration.""" correct_frame = Mock( - filename=f"/home/paulus/{integration_frame_path}/light.py", + filename="/home/paulus/homeassistant/components/hue/light.py", lineno="23", line="self.light.is_on", ) @@ -1876,7 +1662,7 @@ def label_registry(hass: HomeAssistant) -> lr.LabelRegistry: @pytest.fixture -def service_calls(hass: HomeAssistant) -> Generator[list[ServiceCall]]: +def service_calls(hass: HomeAssistant) -> Generator[None, None, list[ServiceCall]]: """Track all service calls.""" calls = [] @@ -1887,25 +1673,17 @@ def service_calls(hass: HomeAssistant) -> Generator[list[ServiceCall]]: domain: str, service: str, service_data: dict[str, Any] | None = None, - blocking: bool = False, - context: Context | None = None, - target: dict[str, Any] | None = None, - return_response: bool = False, + **kwargs: Any, ) -> ServiceResponse: - calls.append( - ServiceCall(domain, service, service_data, context, return_response) - ) + calls.append(ServiceCall(domain, service, service_data)) try: return await _original_async_call( domain, service, service_data, - blocking, - context, - target, - return_response, + **kwargs, ) - except ServiceNotFound: + except ha.ServiceNotFound: _LOGGER.debug("Ignoring unknown service call to %s.%s", domain, service) return None @@ -1920,7 +1698,7 @@ def snapshot(snapshot: SnapshotAssertion) -> SnapshotAssertion: @pytest.fixture -def disable_block_async_io() -> Generator[None]: +def disable_block_async_io() -> Generator[Any, Any, None]: """Fixture to disable the loop protection from block_async_io.""" yield calls = block_async_io._BLOCKED_CALLS.calls diff --git a/tests/hassfest/test_requirements.py b/tests/hassfest/test_requirements.py index e70bee104c9..f3b008a6113 100644 --- a/tests/hassfest/test_requirements.py +++ b/tests/hassfest/test_requirements.py @@ -4,7 +4,7 @@ from pathlib import Path import pytest -from script.hassfest.model import Config, Integration +from script.hassfest.model import Integration from script.hassfest.requirements import validate_requirements_format @@ -13,13 +13,6 @@ def integration(): """Fixture for hassfest integration model.""" return Integration( path=Path("homeassistant/components/test"), - _config=Config( - root=Path(".").absolute(), - specific_integrations=None, - action="validate", - requirements=True, - core_integrations_path=Path("homeassistant/components"), - ), _manifest={ "domain": "test", "documentation": "https://example.com", @@ -87,22 +80,3 @@ def test_validate_requirements_format_successful(integration: Integration) -> No ] assert validate_requirements_format(integration) assert len(integration.errors) == 0 - - -def test_validate_requirements_format_github_core(integration: Integration) -> None: - """Test requirement that points to github fails with core component.""" - integration.manifest["requirements"] = [ - "git+https://github.com/user/project.git@1.2.3", - ] - assert not validate_requirements_format(integration) - assert len(integration.errors) == 1 - - -def test_validate_requirements_format_github_custom(integration: Integration) -> None: - """Test requirement that points to github succeeds with custom component.""" - integration.manifest["requirements"] = [ - "git+https://github.com/user/project.git@1.2.3", - ] - integration.path = Path("") - assert validate_requirements_format(integration) - assert len(integration.errors) == 0 diff --git a/tests/hassfest/test_version.py b/tests/hassfest/test_version.py index 30677356101..bfe15018fe2 100644 --- a/tests/hassfest/test_version.py +++ b/tests/hassfest/test_version.py @@ -1,7 +1,5 @@ """Tests for hassfest version.""" -from pathlib import Path - import pytest import voluptuous as vol @@ -9,22 +7,13 @@ from script.hassfest.manifest import ( CUSTOM_INTEGRATION_MANIFEST_SCHEMA, validate_version, ) -from script.hassfest.model import Config, Integration +from script.hassfest.model import Integration @pytest.fixture def integration(): """Fixture for hassfest integration model.""" - integration = Integration( - "", - _config=Config( - root=Path(".").absolute(), - specific_integrations=None, - action="validate", - requirements=True, - core_integrations_path=Path("homeassistant/components"), - ), - ) + integration = Integration("") integration._manifest = { "domain": "test", "documentation": "https://example.com", diff --git a/tests/helpers/snapshots/test_entity_platform.ambr b/tests/helpers/snapshots/test_entity_platform.ambr deleted file mode 100644 index 84cbb07bd73..00000000000 --- a/tests/helpers/snapshots/test_entity_platform.ambr +++ /dev/null @@ -1,37 +0,0 @@ -# serializer version: 1 -# name: test_device_info_called - DeviceRegistryEntrySnapshot({ - 'area_id': 'heliport', - 'config_entries': , - 'configuration_url': 'http://192.168.0.100/config', - 'connections': set({ - tuple( - 'mac', - 'abcd', - ), - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': 'test-hw', - 'id': , - 'identifiers': set({ - tuple( - 'hue', - '1234', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'test-manuf', - 'model': 'test-model', - 'model_id': None, - 'name': 'test-name', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': 'Heliport', - 'sw_version': 'test-sw', - 'via_device_id': , - }) -# --- diff --git a/tests/helpers/snapshots/test_template.ambr b/tests/helpers/snapshots/test_template.ambr deleted file mode 100644 index af38433f1a4..00000000000 --- a/tests/helpers/snapshots/test_template.ambr +++ /dev/null @@ -1,337 +0,0 @@ -# serializer version: 1 -# name: test_merge_response[calendar][a_response] - dict({ - 'calendar.local_furry_events': dict({ - 'events': list([ - ]), - }), - 'calendar.sports': dict({ - 'events': list([ - dict({ - 'description': '', - 'end': '2024-02-27T18:00:00-06:00', - 'start': '2024-02-27T17:00:00-06:00', - 'summary': 'Basketball vs. Rockets', - }), - ]), - }), - 'calendar.yap_house_schedules': dict({ - 'events': list([ - dict({ - 'description': '', - 'end': '2024-02-26T09:00:00-06:00', - 'start': '2024-02-26T08:00:00-06:00', - 'summary': 'Dr. Appt', - }), - dict({ - 'description': 'something good', - 'end': '2024-02-28T21:00:00-06:00', - 'start': '2024-02-28T20:00:00-06:00', - 'summary': 'Bake a cake', - }), - ]), - }), - }) -# --- -# name: test_merge_response[calendar][b_rendered] - Wrapper([ - dict({ - 'description': '', - 'end': '2024-02-27T18:00:00-06:00', - 'entity_id': 'calendar.sports', - 'start': '2024-02-27T17:00:00-06:00', - 'summary': 'Basketball vs. Rockets', - 'value_key': 'events', - }), - dict({ - 'description': '', - 'end': '2024-02-26T09:00:00-06:00', - 'entity_id': 'calendar.yap_house_schedules', - 'start': '2024-02-26T08:00:00-06:00', - 'summary': 'Dr. Appt', - 'value_key': 'events', - }), - dict({ - 'description': 'something good', - 'end': '2024-02-28T21:00:00-06:00', - 'entity_id': 'calendar.yap_house_schedules', - 'start': '2024-02-28T20:00:00-06:00', - 'summary': 'Bake a cake', - 'value_key': 'events', - }), - ]) -# --- -# name: test_merge_response[vacuum][a_response] - dict({ - 'vacuum.deebot_n8_plus_1': dict({ - 'header': dict({ - 'ver': '0.0.1', - }), - 'payloadType': 'j', - 'resp': dict({ - 'body': dict({ - 'msg': 'ok', - }), - }), - }), - 'vacuum.deebot_n8_plus_2': dict({ - 'header': dict({ - 'ver': '0.0.1', - }), - 'payloadType': 'j', - 'resp': dict({ - 'body': dict({ - 'msg': 'ok', - }), - }), - }), - }) -# --- -# name: test_merge_response[vacuum][b_rendered] - Wrapper([ - dict({ - 'entity_id': 'vacuum.deebot_n8_plus_1', - 'header': dict({ - 'ver': '0.0.1', - }), - 'payloadType': 'j', - 'resp': dict({ - 'body': dict({ - 'msg': 'ok', - }), - }), - }), - dict({ - 'entity_id': 'vacuum.deebot_n8_plus_2', - 'header': dict({ - 'ver': '0.0.1', - }), - 'payloadType': 'j', - 'resp': dict({ - 'body': dict({ - 'msg': 'ok', - }), - }), - }), - ]) -# --- -# name: test_merge_response[weather][a_response] - dict({ - 'weather.forecast_home': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2024-03-31T10:00:00+00:00', - 'humidity': 71, - 'precipitation': 0, - 'precipitation_probability': 6.6, - 'temperature': 10.9, - 'templow': 6.5, - 'wind_bearing': 71.8, - 'wind_gust_speed': 24.1, - 'wind_speed': 13.7, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2024-04-01T10:00:00+00:00', - 'humidity': 79, - 'precipitation': 0, - 'precipitation_probability': 8, - 'temperature': 10.2, - 'templow': 3.4, - 'wind_bearing': 350.6, - 'wind_gust_speed': 38.2, - 'wind_speed': 21.6, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2024-04-02T10:00:00+00:00', - 'humidity': 77, - 'precipitation': 2.3, - 'precipitation_probability': 67.4, - 'temperature': 3, - 'templow': 0, - 'wind_bearing': 24.5, - 'wind_gust_speed': 64.8, - 'wind_speed': 37.4, - }), - ]), - }), - 'weather.smhi_home': dict({ - 'forecast': list([ - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2024-03-31T16:00:00', - 'humidity': 87, - 'precipitation': 0.2, - 'pressure': 998, - 'temperature': 10, - 'templow': 4, - 'wind_bearing': 79, - 'wind_gust_speed': 21.6, - 'wind_speed': 11.88, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'rainy', - 'datetime': '2024-04-01T12:00:00', - 'humidity': 88, - 'precipitation': 2.2, - 'pressure': 999, - 'temperature': 6, - 'templow': 1, - 'wind_bearing': 17, - 'wind_gust_speed': 20.52, - 'wind_speed': 8.64, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2024-04-02T12:00:00', - 'humidity': 71, - 'precipitation': 1.3, - 'pressure': 1003, - 'temperature': 0, - 'templow': -3, - 'wind_bearing': 17, - 'wind_gust_speed': 57.24, - 'wind_speed': 30.6, - }), - ]), - }), - }) -# --- -# name: test_merge_response[weather][b_rendered] - Wrapper([ - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2024-03-31T16:00:00', - 'entity_id': 'weather.smhi_home', - 'humidity': 87, - 'precipitation': 0.2, - 'pressure': 998, - 'temperature': 10, - 'templow': 4, - 'value_key': 'forecast', - 'wind_bearing': 79, - 'wind_gust_speed': 21.6, - 'wind_speed': 11.88, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'rainy', - 'datetime': '2024-04-01T12:00:00', - 'entity_id': 'weather.smhi_home', - 'humidity': 88, - 'precipitation': 2.2, - 'pressure': 999, - 'temperature': 6, - 'templow': 1, - 'value_key': 'forecast', - 'wind_bearing': 17, - 'wind_gust_speed': 20.52, - 'wind_speed': 8.64, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2024-04-02T12:00:00', - 'entity_id': 'weather.smhi_home', - 'humidity': 71, - 'precipitation': 1.3, - 'pressure': 1003, - 'temperature': 0, - 'templow': -3, - 'value_key': 'forecast', - 'wind_bearing': 17, - 'wind_gust_speed': 57.24, - 'wind_speed': 30.6, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2024-03-31T10:00:00+00:00', - 'entity_id': 'weather.forecast_home', - 'humidity': 71, - 'precipitation': 0, - 'precipitation_probability': 6.6, - 'temperature': 10.9, - 'templow': 6.5, - 'value_key': 'forecast', - 'wind_bearing': 71.8, - 'wind_gust_speed': 24.1, - 'wind_speed': 13.7, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2024-04-01T10:00:00+00:00', - 'entity_id': 'weather.forecast_home', - 'humidity': 79, - 'precipitation': 0, - 'precipitation_probability': 8, - 'temperature': 10.2, - 'templow': 3.4, - 'value_key': 'forecast', - 'wind_bearing': 350.6, - 'wind_gust_speed': 38.2, - 'wind_speed': 21.6, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2024-04-02T10:00:00+00:00', - 'entity_id': 'weather.forecast_home', - 'humidity': 77, - 'precipitation': 2.3, - 'precipitation_probability': 67.4, - 'temperature': 3, - 'templow': 0, - 'value_key': 'forecast', - 'wind_bearing': 24.5, - 'wind_gust_speed': 64.8, - 'wind_speed': 37.4, - }), - ]) -# --- -# name: test_merge_response[workday][a_response] - dict({ - 'binary_sensor.workday': dict({ - 'workday': True, - }), - 'binary_sensor.workday2': dict({ - 'workday': False, - }), - }) -# --- -# name: test_merge_response[workday][b_rendered] - Wrapper([ - dict({ - 'entity_id': 'binary_sensor.workday', - 'workday': True, - }), - dict({ - 'entity_id': 'binary_sensor.workday2', - 'workday': False, - }), - ]) -# --- -# name: test_merge_response_with_empty_response[a_response] - dict({ - 'calendar.local_furry_events': dict({ - 'events': list([ - ]), - }), - 'calendar.sports': dict({ - 'events': list([ - ]), - }), - 'calendar.yap_house_schedules': dict({ - 'events': list([ - ]), - }), - }) -# --- -# name: test_merge_response_with_empty_response[b_rendered] - Wrapper([ - ]) -# --- diff --git a/tests/helpers/test_aiohttp_client.py b/tests/helpers/test_aiohttp_client.py index 126ed3f9287..7dd34fd2c64 100644 --- a/tests/helpers/test_aiohttp_client.py +++ b/tests/helpers/test_aiohttp_client.py @@ -1,6 +1,5 @@ """Test the aiohttp client helper.""" -import socket from unittest.mock import Mock, patch import aiohttp @@ -17,13 +16,11 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME, CONF_VERIFY_SSL, - EVENT_HOMEASSISTANT_CLOSE, HTTP_BASIC_AUTHENTICATION, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import EVENT_HOMEASSISTANT_CLOSE, HomeAssistant import homeassistant.helpers.aiohttp_client as client from homeassistant.util.color import RGBColor -from homeassistant.util.ssl import SSLCipherList from tests.common import ( MockConfigEntry, @@ -63,14 +60,11 @@ async def test_get_clientsession_with_ssl(hass: HomeAssistant) -> None: """Test init clientsession with ssl.""" client.async_get_clientsession(hass) verify_ssl = True - ssl_cipher = SSLCipherList.PYTHON_DEFAULT family = 0 - client_session = hass.data[client.DATA_CLIENTSESSION][ - (verify_ssl, family, ssl_cipher) - ] + client_session = hass.data[client.DATA_CLIENTSESSION][(verify_ssl, family)] assert isinstance(client_session, aiohttp.ClientSession) - connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family, ssl_cipher)] + connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family)] assert isinstance(connector, aiohttp.TCPConnector) @@ -78,63 +72,26 @@ async def test_get_clientsession_without_ssl(hass: HomeAssistant) -> None: """Test init clientsession without ssl.""" client.async_get_clientsession(hass, verify_ssl=False) verify_ssl = False - ssl_cipher = SSLCipherList.PYTHON_DEFAULT family = 0 - client_session = hass.data[client.DATA_CLIENTSESSION][ - (verify_ssl, family, ssl_cipher) - ] + client_session = hass.data[client.DATA_CLIENTSESSION][(verify_ssl, family)] assert isinstance(client_session, aiohttp.ClientSession) - connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family, ssl_cipher)] + connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family)] assert isinstance(connector, aiohttp.TCPConnector) @pytest.mark.parametrize( - ("verify_ssl", "expected_family", "ssl_cipher"), - [ - (True, socket.AF_UNSPEC, SSLCipherList.PYTHON_DEFAULT), - (True, socket.AF_INET, SSLCipherList.PYTHON_DEFAULT), - (True, socket.AF_INET6, SSLCipherList.PYTHON_DEFAULT), - (True, socket.AF_UNSPEC, SSLCipherList.INTERMEDIATE), - (True, socket.AF_INET, SSLCipherList.INTERMEDIATE), - (True, socket.AF_INET6, SSLCipherList.INTERMEDIATE), - (True, socket.AF_UNSPEC, SSLCipherList.MODERN), - (True, socket.AF_INET, SSLCipherList.MODERN), - (True, socket.AF_INET6, SSLCipherList.MODERN), - (True, socket.AF_UNSPEC, SSLCipherList.INSECURE), - (True, socket.AF_INET, SSLCipherList.INSECURE), - (True, socket.AF_INET6, SSLCipherList.INSECURE), - (False, socket.AF_UNSPEC, SSLCipherList.PYTHON_DEFAULT), - (False, socket.AF_INET, SSLCipherList.PYTHON_DEFAULT), - (False, socket.AF_INET6, SSLCipherList.PYTHON_DEFAULT), - (False, socket.AF_UNSPEC, SSLCipherList.INTERMEDIATE), - (False, socket.AF_INET, SSLCipherList.INTERMEDIATE), - (False, socket.AF_INET6, SSLCipherList.INTERMEDIATE), - (False, socket.AF_UNSPEC, SSLCipherList.MODERN), - (False, socket.AF_INET, SSLCipherList.MODERN), - (False, socket.AF_INET6, SSLCipherList.MODERN), - (False, socket.AF_UNSPEC, SSLCipherList.INSECURE), - (False, socket.AF_INET, SSLCipherList.INSECURE), - (False, socket.AF_INET6, SSLCipherList.INSECURE), - ], + ("verify_ssl", "expected_family"), + [(True, 0), (False, 0), (True, 4), (False, 4), (True, 6), (False, 6)], ) async def test_get_clientsession( - hass: HomeAssistant, - verify_ssl: bool, - expected_family: int, - ssl_cipher: SSLCipherList, + hass: HomeAssistant, verify_ssl: bool, expected_family: int ) -> None: """Test init clientsession combinations.""" - client.async_get_clientsession( - hass, verify_ssl=verify_ssl, family=expected_family, ssl_cipher=ssl_cipher - ) - client_session = hass.data[client.DATA_CLIENTSESSION][ - (verify_ssl, expected_family, ssl_cipher) - ] + client.async_get_clientsession(hass, verify_ssl=verify_ssl, family=expected_family) + client_session = hass.data[client.DATA_CLIENTSESSION][(verify_ssl, expected_family)] assert isinstance(client_session, aiohttp.ClientSession) - connector = hass.data[client.DATA_CONNECTOR][ - (verify_ssl, expected_family, ssl_cipher) - ] + connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, expected_family)] assert isinstance(connector, aiohttp.TCPConnector) @@ -144,11 +101,10 @@ async def test_create_clientsession_with_ssl_and_cookies(hass: HomeAssistant) -> assert isinstance(session, aiohttp.ClientSession) verify_ssl = True - ssl_cipher = SSLCipherList.PYTHON_DEFAULT family = 0 assert client.DATA_CLIENTSESSION not in hass.data - connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family, ssl_cipher)] + connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family)] assert isinstance(connector, aiohttp.TCPConnector) @@ -160,61 +116,26 @@ async def test_create_clientsession_without_ssl_and_cookies( assert isinstance(session, aiohttp.ClientSession) verify_ssl = False - ssl_cipher = SSLCipherList.PYTHON_DEFAULT family = 0 assert client.DATA_CLIENTSESSION not in hass.data - connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family, ssl_cipher)] + connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, family)] assert isinstance(connector, aiohttp.TCPConnector) @pytest.mark.parametrize( - ("verify_ssl", "expected_family", "ssl_cipher"), - [ - (True, 0, SSLCipherList.PYTHON_DEFAULT), - (True, 4, SSLCipherList.PYTHON_DEFAULT), - (True, 6, SSLCipherList.PYTHON_DEFAULT), - (True, 0, SSLCipherList.INTERMEDIATE), - (True, 4, SSLCipherList.INTERMEDIATE), - (True, 6, SSLCipherList.INTERMEDIATE), - (True, 0, SSLCipherList.MODERN), - (True, 4, SSLCipherList.MODERN), - (True, 6, SSLCipherList.MODERN), - (True, 0, SSLCipherList.INSECURE), - (True, 4, SSLCipherList.INSECURE), - (True, 6, SSLCipherList.INSECURE), - (False, 0, SSLCipherList.PYTHON_DEFAULT), - (False, 4, SSLCipherList.PYTHON_DEFAULT), - (False, 6, SSLCipherList.PYTHON_DEFAULT), - (False, 0, SSLCipherList.INTERMEDIATE), - (False, 4, SSLCipherList.INTERMEDIATE), - (False, 6, SSLCipherList.INTERMEDIATE), - (False, 0, SSLCipherList.MODERN), - (False, 4, SSLCipherList.MODERN), - (False, 6, SSLCipherList.MODERN), - (False, 0, SSLCipherList.INSECURE), - (False, 4, SSLCipherList.INSECURE), - (False, 6, SSLCipherList.INSECURE), - ], + ("verify_ssl", "expected_family"), + [(True, 0), (False, 0), (True, 4), (False, 4), (True, 6), (False, 6)], ) async def test_get_clientsession_cleanup( - hass: HomeAssistant, - verify_ssl: bool, - expected_family: int, - ssl_cipher: SSLCipherList, + hass: HomeAssistant, verify_ssl: bool, expected_family: int ) -> None: """Test init clientsession cleanup.""" - client.async_get_clientsession( - hass, verify_ssl=verify_ssl, family=expected_family, ssl_cipher=ssl_cipher - ) + client.async_get_clientsession(hass, verify_ssl=verify_ssl, family=expected_family) - client_session = hass.data[client.DATA_CLIENTSESSION][ - (verify_ssl, expected_family, ssl_cipher) - ] + client_session = hass.data[client.DATA_CLIENTSESSION][(verify_ssl, expected_family)] assert isinstance(client_session, aiohttp.ClientSession) - connector = hass.data[client.DATA_CONNECTOR][ - (verify_ssl, expected_family, ssl_cipher) - ] + connector = hass.data[client.DATA_CONNECTOR][(verify_ssl, expected_family)] assert isinstance(connector, aiohttp.TCPConnector) hass.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE) @@ -228,19 +149,17 @@ async def test_get_clientsession_patched_close(hass: HomeAssistant) -> None: """Test closing clientsession does not work.""" verify_ssl = True - ssl_cipher = SSLCipherList.PYTHON_DEFAULT family = 0 with patch("aiohttp.ClientSession.close") as mock_close: session = client.async_get_clientsession(hass) assert isinstance( - hass.data[client.DATA_CLIENTSESSION][(verify_ssl, family, ssl_cipher)], + hass.data[client.DATA_CLIENTSESSION][(verify_ssl, family)], aiohttp.ClientSession, ) assert isinstance( - hass.data[client.DATA_CONNECTOR][(verify_ssl, family, ssl_cipher)], - aiohttp.TCPConnector, + hass.data[client.DATA_CONNECTOR][(verify_ssl, family)], aiohttp.TCPConnector ) with pytest.raises(RuntimeError): diff --git a/tests/helpers/test_area_registry.py b/tests/helpers/test_area_registry.py index 74f55c86a6c..e6d637d1a99 100644 --- a/tests/helpers/test_area_registry.py +++ b/tests/helpers/test_area_registry.py @@ -1,10 +1,8 @@ """Tests for the Area Registry.""" -from datetime import datetime, timedelta from functools import partial from typing import Any -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant @@ -13,7 +11,6 @@ from homeassistant.helpers import ( floor_registry as fr, label_registry as lr, ) -from homeassistant.util.dt import utcnow from tests.common import ANY, async_capture_events, flush_store @@ -27,11 +24,7 @@ async def test_list_areas(area_registry: ar.AreaRegistry) -> None: assert len(areas) == len(area_registry.areas) -async def test_create_area( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - area_registry: ar.AreaRegistry, -) -> None: +async def test_create_area(hass: HomeAssistant, area_registry: ar.AreaRegistry) -> None: """Make sure that we can create an area.""" update_events = async_capture_events(hass, ar.EVENT_AREA_REGISTRY_UPDATED) @@ -45,14 +38,11 @@ async def test_create_area( id=ANY, labels=set(), name="mock", + normalized_name=ANY, picture=None, - created_at=utcnow(), - modified_at=utcnow(), ) assert len(area_registry.areas) == 1 - freezer.tick(timedelta(minutes=5)) - await hass.async_block_till_done() assert len(update_events) == 1 @@ -62,34 +52,31 @@ async def test_create_area( } # Create area with all parameters - area2 = area_registry.async_create( + area = area_registry.async_create( "mock 2", aliases={"alias_1", "alias_2"}, labels={"label1", "label2"}, picture="/image/example.png", ) - assert area2 == ar.AreaEntry( + assert area == ar.AreaEntry( aliases={"alias_1", "alias_2"}, floor_id=None, icon=None, id=ANY, labels={"label1", "label2"}, name="mock 2", + normalized_name=ANY, picture="/image/example.png", - created_at=utcnow(), - modified_at=utcnow(), ) assert len(area_registry.areas) == 2 - assert area.created_at != area2.created_at - assert area.modified_at != area2.modified_at await hass.async_block_till_done() assert len(update_events) == 2 assert update_events[-1].data == { "action": "create", - "area_id": area2.id, + "area_id": area.id, } @@ -163,18 +150,11 @@ async def test_update_area( area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, label_registry: lr.LabelRegistry, - freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can read areas.""" - created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") - freezer.move_to(created_at) update_events = async_capture_events(hass, ar.EVENT_AREA_REGISTRY_UPDATED) floor_registry.async_create("first") area = area_registry.async_create("mock") - assert area.modified_at == created_at - - modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") - freezer.move_to(modified_at) updated_area = area_registry.async_update( area.id, @@ -194,9 +174,8 @@ async def test_update_area( id=ANY, labels={"label1", "label2"}, name="mock1", + normalized_name=ANY, picture="/image/example.png", - created_at=created_at, - modified_at=modified_at, ) assert len(area_registry.areas) == 1 @@ -239,12 +218,9 @@ async def test_update_area_with_same_name_change_case( async def test_update_area_with_name_already_in_use( area_registry: ar.AreaRegistry, - floor_registry: fr.FloorRegistry, ) -> None: """Make sure that we can't update an area with a name already in use.""" - floor = floor_registry.async_create("mock") - floor_id = floor.floor_id - area1 = area_registry.async_create("mock1", floor_id=floor_id) + area1 = area_registry.async_create("mock1") area2 = area_registry.async_create("mock2") with pytest.raises(ValueError) as e_info: @@ -255,8 +231,6 @@ async def test_update_area_with_name_already_in_use( assert area2.name == "mock2" assert len(area_registry.areas) == 2 - assert area_registry.areas.get_areas_for_floor(floor_id) == [area1] - async def test_update_area_with_normalized_name_already_in_use( area_registry: ar.AreaRegistry, @@ -311,8 +285,6 @@ async def test_loading_area_from_storage( "labels": ["mock-label1", "mock-label2"], "name": "mock", "picture": "blah", - "created_at": utcnow().isoformat(), - "modified_at": utcnow().isoformat(), } ] }, @@ -357,8 +329,6 @@ async def test_migration_from_1_1( "labels": [], "name": "mock", "picture": None, - "created_at": "1970-01-01T00:00:00+00:00", - "modified_at": "1970-01-01T00:00:00+00:00", } ] }, diff --git a/tests/helpers/test_category_registry.py b/tests/helpers/test_category_registry.py index cad997fd50f..1317750ebec 100644 --- a/tests/helpers/test_category_registry.py +++ b/tests/helpers/test_category_registry.py @@ -1,16 +1,13 @@ """Tests for the category registry.""" -from datetime import datetime from functools import partial import re from typing import Any -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import category_registry as cr -from homeassistant.util.dt import UTC from tests.common import async_capture_events, flush_store @@ -155,13 +152,9 @@ async def test_delete_non_existing_category( async def test_update_category( - hass: HomeAssistant, - category_registry: cr.CategoryRegistry, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, category_registry: cr.CategoryRegistry ) -> None: """Make sure that we can update categories.""" - created = datetime(2024, 2, 14, 12, 0, 0, tzinfo=UTC) - freezer.move_to(created) update_events = async_capture_events(hass, cr.EVENT_CATEGORY_REGISTRY_UPDATED) category = category_registry.async_create( scope="automation", @@ -169,16 +162,9 @@ async def test_update_category( ) assert len(category_registry.categories["automation"]) == 1 - assert category == cr.CategoryEntry( - category_id=category.category_id, - created_at=created, - modified_at=created, - name="Energy saving", - icon=None, - ) - - modified = datetime(2024, 3, 14, 12, 0, 0, tzinfo=UTC) - freezer.move_to(modified) + assert category.category_id + assert category.name == "Energy saving" + assert category.icon is None updated_category = category_registry.async_update( scope="automation", @@ -188,13 +174,9 @@ async def test_update_category( ) assert updated_category != category - assert updated_category == cr.CategoryEntry( - category_id=category.category_id, - created_at=created, - modified_at=modified, - name="ENERGY SAVING", - icon="mdi:leaf", - ) + assert updated_category.category_id == category.category_id + assert updated_category.name == "ENERGY SAVING" + assert updated_category.icon == "mdi:leaf" assert len(category_registry.categories["automation"]) == 1 @@ -361,25 +343,18 @@ async def test_loading_categories_from_storage( hass: HomeAssistant, hass_storage: dict[str, Any] ) -> None: """Test loading stored categories on start.""" - date_1 = datetime(2024, 2, 14, 12, 0, 0) - date_2 = datetime(2024, 2, 14, 12, 0, 0) hass_storage[cr.STORAGE_KEY] = { "version": cr.STORAGE_VERSION_MAJOR, - "minor_version": cr.STORAGE_VERSION_MINOR, "data": { "categories": { "automation": [ { "category_id": "uuid1", - "created_at": date_1.isoformat(), - "modified_at": date_1.isoformat(), "name": "Energy saving", "icon": "mdi:leaf", }, { "category_id": "uuid2", - "created_at": date_1.isoformat(), - "modified_at": date_2.isoformat(), "name": "Something else", "icon": None, }, @@ -387,8 +362,6 @@ async def test_loading_categories_from_storage( "zone": [ { "category_id": "uuid3", - "created_at": date_2.isoformat(), - "modified_at": date_2.isoformat(), "name": "Grocery stores", "icon": "mdi:store", }, @@ -407,33 +380,21 @@ async def test_loading_categories_from_storage( category1 = category_registry.async_get_category( scope="automation", category_id="uuid1" ) - assert category1 == cr.CategoryEntry( - category_id="uuid1", - created_at=date_1, - modified_at=date_1, - name="Energy saving", - icon="mdi:leaf", - ) + assert category1.category_id == "uuid1" + assert category1.name == "Energy saving" + assert category1.icon == "mdi:leaf" category2 = category_registry.async_get_category( scope="automation", category_id="uuid2" ) - assert category2 == cr.CategoryEntry( - category_id="uuid2", - created_at=date_1, - modified_at=date_2, - name="Something else", - icon=None, - ) + assert category2.category_id == "uuid2" + assert category2.name == "Something else" + assert category2.icon is None category3 = category_registry.async_get_category(scope="zone", category_id="uuid3") - assert category3 == cr.CategoryEntry( - category_id="uuid3", - created_at=date_2, - modified_at=date_2, - name="Grocery stores", - icon="mdi:store", - ) + assert category3.category_id == "uuid3" + assert category3.name == "Grocery stores" + assert category3.icon == "mdi:store" async def test_async_create_thread_safety( @@ -486,83 +447,3 @@ async def test_async_update_thread_safety( name="new name", ) ) - - -@pytest.mark.parametrize("load_registries", [False]) -async def test_migration_from_1_1( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test migration from version 1.1.""" - hass_storage[cr.STORAGE_KEY] = { - "version": 1, - "data": { - "categories": { - "automation": [ - { - "category_id": "uuid1", - "name": "Energy saving", - "icon": "mdi:leaf", - }, - { - "category_id": "uuid2", - "name": "Something else", - "icon": None, - }, - ], - "zone": [ - { - "category_id": "uuid3", - "name": "Grocery stores", - "icon": "mdi:store", - }, - ], - } - }, - } - - await cr.async_load(hass) - registry = cr.async_get(hass) - - # Test data was loaded - assert len(registry.categories) == 2 - assert len(registry.categories["automation"]) == 2 - assert len(registry.categories["zone"]) == 1 - - assert registry.async_get_category(scope="automation", category_id="uuid1") - - # Check we store migrated data - await flush_store(registry._store) - assert hass_storage[cr.STORAGE_KEY] == { - "version": cr.STORAGE_VERSION_MAJOR, - "minor_version": cr.STORAGE_VERSION_MINOR, - "key": cr.STORAGE_KEY, - "data": { - "categories": { - "automation": [ - { - "category_id": "uuid1", - "created_at": "1970-01-01T00:00:00+00:00", - "modified_at": "1970-01-01T00:00:00+00:00", - "name": "Energy saving", - "icon": "mdi:leaf", - }, - { - "category_id": "uuid2", - "created_at": "1970-01-01T00:00:00+00:00", - "modified_at": "1970-01-01T00:00:00+00:00", - "name": "Something else", - "icon": None, - }, - ], - "zone": [ - { - "category_id": "uuid3", - "created_at": "1970-01-01T00:00:00+00:00", - "modified_at": "1970-01-01T00:00:00+00:00", - "name": "Grocery stores", - "icon": "mdi:store", - }, - ], - } - }, - } diff --git a/tests/helpers/test_collection.py b/tests/helpers/test_collection.py index f564f85ec3b..f0287218d7f 100644 --- a/tests/helpers/test_collection.py +++ b/tests/helpers/test_collection.py @@ -2,10 +2,8 @@ from __future__ import annotations -from datetime import timedelta import logging -from freezegun.api import FrozenDateTimeFactory import pytest import voluptuous as vol @@ -17,7 +15,6 @@ from homeassistant.helpers import ( storage, ) from homeassistant.helpers.typing import ConfigType -from homeassistant.util.dt import utcnow from tests.common import flush_store from tests.typing import WebSocketGenerator @@ -257,84 +254,6 @@ async def test_storage_collection(hass: HomeAssistant) -> None: } -async def test_storage_collection_update_modifiet_at( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test that updating a storage collection will update the modified_at datetime in the entity registry.""" - - entities: dict[str, TestEntity] = {} - - class TestEntity(MockEntity): - """Entity that is config based.""" - - def __init__(self, config: ConfigType) -> None: - """Initialize entity.""" - super().__init__(config) - self._state = "initial" - - @classmethod - def from_storage(cls, config: ConfigType) -> TestEntity: - """Create instance from storage.""" - obj = super().from_storage(config) - entities[obj.unique_id] = obj - return obj - - @property - def state(self) -> str: - """Return state of entity.""" - return self._state - - def set_state(self, value: str) -> None: - """Set value.""" - self._state = value - self.async_write_ha_state() - - store = storage.Store(hass, 1, "test-data") - data = {"id": "mock-1", "name": "Mock 1", "data": 1} - await store.async_save( - { - "items": [ - data, - ] - } - ) - id_manager = collection.IDManager() - ent_comp = entity_component.EntityComponent(_LOGGER, "test", hass) - await ent_comp.async_setup({}) - coll = MockStorageCollection(store, id_manager) - collection.sync_entity_lifecycle(hass, "test", "test", ent_comp, coll, TestEntity) - changes = track_changes(coll) - - await coll.async_load() - assert id_manager.has_id("mock-1") - assert len(changes) == 1 - assert changes[0] == (collection.CHANGE_ADDED, "mock-1", data) - - modified_1 = entity_registry.async_get("test.mock_1").modified_at - assert modified_1 == utcnow() - - freezer.tick(timedelta(minutes=1)) - - updated_item = await coll.async_update_item("mock-1", {"data": 2}) - assert id_manager.has_id("mock-1") - assert updated_item == {"id": "mock-1", "name": "Mock 1", "data": 2} - assert len(changes) == 2 - assert changes[1] == (collection.CHANGE_UPDATED, "mock-1", updated_item) - - modified_2 = entity_registry.async_get("test.mock_1").modified_at - assert modified_2 > modified_1 - assert modified_2 == utcnow() - - freezer.tick(timedelta(minutes=1)) - - entities["mock-1"].set_state("second") - - modified_3 = entity_registry.async_get("test.mock_1").modified_at - assert modified_3 == modified_2 - - async def test_attach_entity_component_collection(hass: HomeAssistant) -> None: """Test attaching collection to entity component.""" ent_comp = entity_component.EntityComponent(_LOGGER, "test", hass) diff --git a/tests/helpers/test_config_entry_flow.py b/tests/helpers/test_config_entry_flow.py index 13e28bb8840..6a198b7a297 100644 --- a/tests/helpers/test_config_entry_flow.py +++ b/tests/helpers/test_config_entry_flow.py @@ -1,13 +1,13 @@ """Tests for the Config Entry Flow helper.""" -from collections.abc import Generator from unittest.mock import Mock, PropertyMock, patch import pytest +from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow, setup +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import config_entry_flow from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform diff --git a/tests/helpers/test_config_entry_oauth2_flow.py b/tests/helpers/test_config_entry_oauth2_flow.py index 52def52f3f0..132a0b41707 100644 --- a/tests/helpers/test_config_entry_oauth2_flow.py +++ b/tests/helpers/test_config_entry_oauth2_flow.py @@ -1,6 +1,5 @@ """Tests for the Somfy config flow.""" -from collections.abc import Generator from http import HTTPStatus import logging import time @@ -9,6 +8,7 @@ from unittest.mock import patch import aiohttp import pytest +from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow, setup from homeassistant.core import HomeAssistant @@ -873,9 +873,7 @@ async def test_implementation_provider(hass: HomeAssistant, local_impl) -> None: provider_source = [] - async def async_provide_implementation( - hass: HomeAssistant, domain: str - ) -> list[config_entry_oauth2_flow.AbstractOAuth2Implementation]: + async def async_provide_implementation(hass, domain): """Mock implementation provider.""" return provider_source diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index 7202cef6f5f..6df29eefaff 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -3,17 +3,12 @@ from collections import OrderedDict from datetime import date, datetime, timedelta import enum -from functools import partial import logging import os -import re from socket import _GLOBAL_DEFAULT_TIMEOUT -import threading -from typing import Any -from unittest.mock import ANY, Mock, patch +from unittest.mock import Mock, patch import uuid -import py import pytest import voluptuous as vol @@ -26,7 +21,6 @@ from homeassistant.helpers import ( selector, template, ) -from homeassistant.helpers.config_validation import TRIGGER_SCHEMA def test_boolean() -> None: @@ -38,7 +32,7 @@ def test_boolean() -> None: "T", "negative", "lock", - "tr ue", # codespell:ignore ue + "tr ue", [], [1, 2], {"one": "two"}, @@ -199,12 +193,12 @@ def test_platform_config() -> None: def test_ensure_list() -> None: """Test ensure_list.""" schema = vol.Schema(cv.ensure_list) - assert schema(None) == [] - assert schema(1) == [1] - assert schema([1]) == [1] - assert schema("1") == ["1"] - assert schema(["1"]) == ["1"] - assert schema({"1": "2"}) == [{"1": "2"}] + assert [] == schema(None) + assert [1] == schema(1) + assert [1] == schema([1]) + assert ["1"] == schema("1") + assert ["1"] == schema(["1"]) + assert [{"1": "2"}] == schema({"1": "2"}) def test_entity_id() -> None: @@ -422,9 +416,27 @@ def test_service() -> None: schema("homeassistant.turn_on") -@pytest.mark.parametrize( - "config", - [ +def test_service_schema(hass: HomeAssistant) -> None: + """Test service_schema validation.""" + options = ( + {}, + None, + { + "service": "homeassistant.turn_on", + "service_template": "homeassistant.turn_on", + }, + {"data": {"entity_id": "light.kitchen"}}, + {"service": "homeassistant.turn_on", "data": None}, + { + "service": "homeassistant.turn_on", + "data_template": {"brightness": "{{ no_end"}, + }, + ) + for value in options: + with pytest.raises(vol.MultipleInvalid): + cv.SERVICE_SCHEMA(value) + + options = ( {"service": "homeassistant.turn_on"}, {"service": "homeassistant.turn_on", "entity_id": "light.kitchen"}, {"service": "light.turn_on", "entity_id": "all"}, @@ -438,70 +450,14 @@ def test_service() -> None: "alias": "turn on kitchen lights", }, {"service": "scene.turn_on", "metadata": {}}, - {"action": "homeassistant.turn_on"}, - {"action": "homeassistant.turn_on", "entity_id": "light.kitchen"}, - {"action": "light.turn_on", "entity_id": "all"}, - { - "action": "homeassistant.turn_on", - "entity_id": ["light.kitchen", "light.ceiling"], - }, - { - "action": "light.turn_on", - "entity_id": "all", - "alias": "turn on kitchen lights", - }, - {"action": "scene.turn_on", "metadata": {}}, - ], -) -def test_service_schema(hass: HomeAssistant, config: dict[str, Any]) -> None: - """Test service_schema validation.""" - validated = cv.SERVICE_SCHEMA(config) + ) + for value in options: + cv.SERVICE_SCHEMA(value) - # Ensure metadata is removed from the validated output - assert "metadata" not in validated - - # Ensure service is migrated to action - assert "service" not in validated - assert "action" in validated - assert validated["action"] == config.get("service", config["action"]) - - -@pytest.mark.parametrize( - "config", - [ - {}, - None, - {"data": {"entity_id": "light.kitchen"}}, - { - "service": "homeassistant.turn_on", - "service_template": "homeassistant.turn_on", - }, - {"service": "homeassistant.turn_on", "data": None}, - { - "service": "homeassistant.turn_on", - "data_template": {"brightness": "{{ no_end"}, - }, - { - "service": "homeassistant.turn_on", - "action": "homeassistant.turn_on", - }, - { - "action": "homeassistant.turn_on", - "service_template": "homeassistant.turn_on", - }, - {"action": "homeassistant.turn_on", "data": None}, - { - "action": "homeassistant.turn_on", - "data_template": {"brightness": "{{ no_end"}, - }, - ], -) -def test_invalid_service_schema( - hass: HomeAssistant, config: dict[str, Any] | None -) -> None: - """Test service_schema validation fails.""" - with pytest.raises(vol.MultipleInvalid): - cv.SERVICE_SCHEMA(config) + # Check metadata is removed from the validated output + assert cv.SERVICE_SCHEMA({"service": "scene.turn_on", "metadata": {}}) == { + "service": "scene.turn_on" + } def test_entity_service_schema() -> None: @@ -673,12 +629,10 @@ def test_template(hass: HomeAssistant) -> None: "Hello", "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function 'expand' added as an extension by Home Assistant + # Function added as an extension by Home Assistant "{{ expand('group.foo')|map(attribute='entity_id')|list }}", - # Filter 'expand' added as an extension by Home Assistant + # Filter added as an extension by Home Assistant "{{ ['group.foo']|expand|map(attribute='entity_id')|list }}", - # Non existing function 'no_such_function' is not detected by Jinja2 - "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: schema(value) @@ -704,11 +658,8 @@ async def test_template_no_hass(hass: HomeAssistant) -> None: "Hello", "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function 'expand' added as an extension by Home Assistant, no error - # because non existing functions are not detected by Jinja2 + # Function added as an extension by Home Assistant "{{ expand('group.foo')|map(attribute='entity_id')|list }}", - # Non existing function 'no_such_function' is not detected by Jinja2 - "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: await hass.async_add_executor_job(schema, value) @@ -732,12 +683,10 @@ def test_dynamic_template(hass: HomeAssistant) -> None: options = ( "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function 'expand' added as an extension by Home Assistant + # Function added as an extension by Home Assistant "{{ expand('group.foo')|map(attribute='entity_id')|list }}", - # Filter 'expand' added as an extension by Home Assistant + # Filter added as an extension by Home Assistant "{{ ['group.foo']|expand|map(attribute='entity_id')|list }}", - # Non existing function 'no_such_function' is not detected by Jinja2 - "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: schema(value) @@ -763,11 +712,8 @@ async def test_dynamic_template_no_hass(hass: HomeAssistant) -> None: options = ( "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function 'expand' added as an extension by Home Assistant, no error - # because non existing functions are not detected by Jinja2 + # Function added as an extension by Home Assistant "{{ expand('group.foo')|map(attribute='entity_id')|list }}", - # Non existing function 'no_such_function' is not detected by Jinja2 - "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: await hass.async_add_executor_job(schema, value) @@ -919,7 +865,7 @@ def schema(): @pytest.fixture -def version(monkeypatch: pytest.MonkeyPatch) -> None: +def version(monkeypatch): """Patch the version used for testing to 0.5.0.""" monkeypatch.setattr(homeassistant.const, "__version__", "0.5.0") @@ -1019,7 +965,7 @@ def test_deprecated_with_replacement_key( assert ( "The 'mars' option is deprecated, please replace it with 'jupiter'" ) in caplog.text - assert output == {"jupiter": True} + assert {"jupiter": True} == output caplog.clear() assert len(caplog.records) == 0 @@ -1090,7 +1036,7 @@ def test_deprecated_with_replacement_key_and_default( assert ( "The 'mars' option is deprecated, please replace it with 'jupiter'" ) in caplog.text - assert output == {"jupiter": True} + assert {"jupiter": True} == output caplog.clear() assert len(caplog.records) == 0 @@ -1103,7 +1049,7 @@ def test_deprecated_with_replacement_key_and_default( test_data = {"venus": True} output = deprecated_schema(test_data.copy()) assert len(caplog.records) == 0 - assert output == {"venus": True, "jupiter": False} + assert {"venus": True, "jupiter": False} == output deprecated_schema_with_default = vol.All( vol.Schema( @@ -1122,7 +1068,7 @@ def test_deprecated_with_replacement_key_and_default( assert ( "The 'mars' option is deprecated, please replace it with 'jupiter'" ) in caplog.text - assert output == {"jupiter": True} + assert {"jupiter": True} == output def test_deprecated_cant_find_module() -> None: @@ -1507,7 +1453,7 @@ def test_whitespace() -> None: "T", "negative", "lock", - "tr ue", # codespell:ignore ue + "tr ue", [], [1, 2], {"one": "two"}, @@ -1753,199 +1699,3 @@ def test_determine_script_action_ambiguous() -> None: def test_determine_script_action_non_ambiguous() -> None: """Test determine script action with a non ambiguous action.""" assert cv.determine_script_action({"delay": "00:00:05"}) == "delay" - - -async def test_async_validate(hass: HomeAssistant, tmpdir: py.path.local) -> None: - """Test the async_validate helper.""" - validator_calls: dict[str, list[int]] = {} - - def _mock_validator_schema(real_func, *args): - calls = validator_calls.setdefault(real_func.__name__, []) - calls.append(threading.get_ident()) - return real_func(*args) - - CV_PREFIX = "homeassistant.helpers.config_validation" - with ( - patch(f"{CV_PREFIX}.isdir", wraps=partial(_mock_validator_schema, cv.isdir)), - patch(f"{CV_PREFIX}.string", wraps=partial(_mock_validator_schema, cv.string)), - ): - # Assert validation in event loop when not decorated with not_async_friendly - await cv.async_validate(hass, cv.string, "abcd") - assert validator_calls == {"string": [hass.loop_thread_id]} - validator_calls = {} - - # Assert validation in executor when decorated with not_async_friendly - await cv.async_validate(hass, cv.isdir, tmpdir) - assert validator_calls == {"isdir": [hass.loop_thread_id, ANY]} - assert validator_calls["isdir"][1] != hass.loop_thread_id - validator_calls = {} - - # Assert validation in executor when decorated with not_async_friendly - await cv.async_validate(hass, vol.All(cv.isdir, cv.string), tmpdir) - assert validator_calls == {"isdir": [hass.loop_thread_id, ANY], "string": [ANY]} - assert validator_calls["isdir"][1] != hass.loop_thread_id - assert validator_calls["string"][0] != hass.loop_thread_id - validator_calls = {} - - # Assert validation in executor when decorated with not_async_friendly - await cv.async_validate(hass, vol.All(cv.string, cv.isdir), tmpdir) - assert validator_calls == { - "isdir": [hass.loop_thread_id, ANY], - "string": [hass.loop_thread_id, ANY], - } - assert validator_calls["isdir"][1] != hass.loop_thread_id - assert validator_calls["string"][1] != hass.loop_thread_id - validator_calls = {} - - # Assert validation in event loop when not using cv.async_validate - cv.isdir(tmpdir) - assert validator_calls == {"isdir": [hass.loop_thread_id]} - validator_calls = {} - - # Assert validation in event loop when not using cv.async_validate - vol.All(cv.isdir, cv.string)(tmpdir) - assert validator_calls == { - "isdir": [hass.loop_thread_id], - "string": [hass.loop_thread_id], - } - validator_calls = {} - - # Assert validation in event loop when not using cv.async_validate - vol.All(cv.string, cv.isdir)(tmpdir) - assert validator_calls == { - "isdir": [hass.loop_thread_id], - "string": [hass.loop_thread_id], - } - validator_calls = {} - - -async def test_nested_trigger_list() -> None: - """Test triggers within nested lists are flattened.""" - - trigger_config = [ - { - "triggers": { - "platform": "event", - "event_type": "trigger_1", - }, - }, - { - "platform": "event", - "event_type": "trigger_2", - }, - {"triggers": []}, - {"triggers": None}, - { - "triggers": [ - { - "platform": "event", - "event_type": "trigger_3", - }, - { - "trigger": "event", - "event_type": "trigger_4", - }, - ], - }, - ] - - validated_triggers = TRIGGER_SCHEMA(trigger_config) - - assert validated_triggers == [ - { - "platform": "event", - "event_type": "trigger_1", - }, - { - "platform": "event", - "event_type": "trigger_2", - }, - { - "platform": "event", - "event_type": "trigger_3", - }, - { - "platform": "event", - "event_type": "trigger_4", - }, - ] - - -async def test_nested_trigger_list_extra() -> None: - """Test triggers key with extra keys is not modified.""" - - trigger_config = [ - { - "platform": "other", - "triggers": [ - { - "platform": "event", - "event_type": "trigger_1", - }, - { - "platform": "event", - "event_type": "trigger_2", - }, - ], - }, - ] - - validated_triggers = TRIGGER_SCHEMA(trigger_config) - - assert validated_triggers == [ - { - "platform": "other", - "triggers": [ - { - "platform": "event", - "event_type": "trigger_1", - }, - { - "platform": "event", - "event_type": "trigger_2", - }, - ], - }, - ] - - -async def test_trigger_backwards_compatibility() -> None: - """Test triggers with backwards compatibility.""" - - assert cv._trigger_pre_validator("str") == "str" - assert cv._trigger_pre_validator({"platform": "abc"}) == {"platform": "abc"} - assert cv._trigger_pre_validator({"trigger": "abc"}) == {"platform": "abc"} - with pytest.raises( - vol.Invalid, - match="Cannot specify both 'platform' and 'trigger'. Please use 'trigger' only.", - ): - cv._trigger_pre_validator({"trigger": "abc", "platform": "def"}) - with pytest.raises( - vol.Invalid, - match=re.escape("required key not provided @ data['trigger']"), - ): - cv._trigger_pre_validator({}) - - -async def test_is_entity_service_schema( - hass: HomeAssistant, -) -> None: - """Test cv.is_entity_service_schema.""" - for schema in ( - vol.Schema({"some": str}), - vol.All(vol.Schema({"some": str})), - vol.Any(vol.Schema({"some": str})), - vol.Any(cv.make_entity_service_schema({"some": str})), - ): - assert cv.is_entity_service_schema(schema) is False - - for schema in ( - cv.make_entity_service_schema({"some": str}), - vol.Schema(cv.make_entity_service_schema({"some": str})), - vol.Schema(vol.All(cv.make_entity_service_schema({"some": str}))), - vol.Schema(vol.Schema(cv.make_entity_service_schema({"some": str}))), - vol.All(cv.make_entity_service_schema({"some": str})), - vol.All(vol.All(cv.make_entity_service_schema({"some": str}))), - vol.All(vol.Schema(cv.make_entity_service_schema({"some": str}))), - ): - assert cv.is_entity_service_schema(schema) is True diff --git a/tests/helpers/test_debounce.py b/tests/helpers/test_debounce.py index 6fa758aec6e..84b3d19b6d7 100644 --- a/tests/helpers/test_debounce.py +++ b/tests/helpers/test_debounce.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import debounce from homeassistant.util.dt import utcnow -from tests.common import async_fire_time_changed +from ..common import async_fire_time_changed _LOGGER = logging.getLogger(__name__) diff --git a/tests/helpers/test_deprecation.py b/tests/helpers/test_deprecation.py index 4cf7e851af3..b48e70eff82 100644 --- a/tests/helpers/test_deprecation.py +++ b/tests/helpers/test_deprecation.py @@ -13,7 +13,6 @@ from homeassistant.helpers.deprecation import ( DeprecatedAlias, DeprecatedConstant, DeprecatedConstantEnum, - EnumWithDeprecatedMembers, check_if_deprecated_constant, deprecated_class, deprecated_function, @@ -521,119 +520,3 @@ def test_dir_with_deprecated_constants( ) -> None: """Test dir() with deprecated constants.""" assert dir_with_deprecated_constants([*module_globals.keys()]) == expected - - -@pytest.mark.parametrize( - ("module_name", "extra_extra_msg"), - [ - ("homeassistant.components.hue.light", ""), # builtin integration - ( - "config.custom_components.hue.light", - ", please report it to the author of the 'hue' custom integration", - ), # custom component integration - ], -) -def test_enum_with_deprecated_members( - caplog: pytest.LogCaptureFixture, - module_name: str, - extra_extra_msg: str, -) -> None: - """Test EnumWithDeprecatedMembers.""" - filename = f"/home/paulus/{module_name.replace('.', '/')}.py" - - class TestEnum( - StrEnum, - metaclass=EnumWithDeprecatedMembers, - deprecated={ - "CATS": ("TestEnum.CATS_PER_CM", "2025.11.0"), - "DOGS": ("TestEnum.DOGS_PER_CM", None), - }, - ): - """Zoo units.""" - - CATS_PER_CM = "cats/cm" - DOGS_PER_CM = "dogs/cm" - CATS = "cats/cm" - DOGS = "dogs/cm" - - # mock sys.modules for homeassistant/helpers/frame.py#get_integration_frame - with ( - patch.dict(sys.modules, {module_name: Mock(__file__=filename)}), - patch( - "homeassistant.helpers.frame.linecache.getline", - return_value="await session.close()", - ), - patch( - "homeassistant.helpers.frame.get_current_frame", - return_value=extract_stack_to_frame( - [ - Mock( - filename="/home/paulus/homeassistant/core.py", - lineno="23", - line="do_something()", - ), - Mock( - filename=filename, - lineno="23", - line="await session.close()", - ), - Mock( - filename="/home/paulus/aiohue/lights.py", - lineno="2", - line="something()", - ), - ] - ), - ), - ): - TestEnum.CATS # noqa: B018 - TestEnum.DOGS # noqa: B018 - - assert len(caplog.record_tuples) == 2 - assert ( - "tests.helpers.test_deprecation", - logging.WARNING, - ( - "TestEnum.CATS was used from hue, this is a deprecated enum member which " - "will be removed in HA Core 2025.11.0. Use TestEnum.CATS_PER_CM instead" - f"{extra_extra_msg}" - ), - ) in caplog.record_tuples - assert ( - "tests.helpers.test_deprecation", - logging.WARNING, - ( - "TestEnum.DOGS was used from hue, this is a deprecated enum member. Use " - f"TestEnum.DOGS_PER_CM instead{extra_extra_msg}" - ), - ) in caplog.record_tuples - - -def test_enum_with_deprecated_members_integration_not_found( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test check_if_deprecated_constant.""" - - class TestEnum( - StrEnum, - metaclass=EnumWithDeprecatedMembers, - deprecated={ - "CATS": ("TestEnum.CATS_PER_CM", "2025.11.0"), - "DOGS": ("TestEnum.DOGS_PER_CM", None), - }, - ): - """Zoo units.""" - - CATS_PER_CM = "cats/cm" - DOGS_PER_CM = "dogs/cm" - CATS = "cats/cm" - DOGS = "dogs/cm" - - with patch( - "homeassistant.helpers.frame.get_current_frame", - side_effect=MissingIntegrationFrame, - ): - TestEnum.CATS # noqa: B018 - TestEnum.DOGS # noqa: B018 - - assert len(caplog.record_tuples) == 0 diff --git a/tests/helpers/test_device_registry.py b/tests/helpers/test_device_registry.py index 837400d502d..3a525f00870 100644 --- a/tests/helpers/test_device_registry.py +++ b/tests/helpers/test_device_registry.py @@ -2,13 +2,11 @@ from collections.abc import Iterable from contextlib import AbstractContextManager, nullcontext -from datetime import datetime from functools import partial import time from typing import Any from unittest.mock import patch -from freezegun.api import FrozenDateTimeFactory import pytest from yarl import URL @@ -21,7 +19,6 @@ from homeassistant.helpers import ( device_registry as dr, entity_registry as er, ) -from homeassistant.util.dt import utcnow from tests.common import ( MockConfigEntry, @@ -180,15 +177,12 @@ async def test_multiple_config_entries( @pytest.mark.parametrize("load_registries", [False]) -@pytest.mark.usefixtures("freezer") async def test_loading_from_storage( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: """Test loading stored devices on start.""" - created_at = "2024-01-01T00:00:00+00:00" - modified_at = "2024-02-01T00:00:00+00:00" hass_storage[dr.STORAGE_KEY] = { "version": dr.STORAGE_VERSION_MAJOR, "minor_version": dr.STORAGE_VERSION_MINOR, @@ -199,7 +193,6 @@ async def test_loading_from_storage( "config_entries": [mock_config_entry.entry_id], "configuration_url": "https://example.com/config", "connections": [["Zigbee", "01.23.45.67.89"]], - "created_at": created_at, "disabled_by": dr.DeviceEntryDisabler.USER, "entry_type": dr.DeviceEntryType.SERVICE, "hw_version": "hw_version", @@ -208,8 +201,6 @@ async def test_loading_from_storage( "labels": {"label1", "label2"}, "manufacturer": "manufacturer", "model": "model", - "model_id": "model_id", - "modified_at": modified_at, "name_by_user": "Test Friendly Name", "name": "name", "primary_config_entry": mock_config_entry.entry_id, @@ -222,10 +213,8 @@ async def test_loading_from_storage( { "config_entries": [mock_config_entry.entry_id], "connections": [["Zigbee", "23.45.67.89.01"]], - "created_at": created_at, "id": "bcdefghijklmn", "identifiers": [["serial", "3456ABCDEF12"]], - "modified_at": modified_at, "orphaned_timestamp": None, } ], @@ -237,16 +226,6 @@ async def test_loading_from_storage( assert len(registry.devices) == 1 assert len(registry.deleted_devices) == 1 - assert registry.deleted_devices["bcdefghijklmn"] == dr.DeletedDeviceEntry( - config_entries={mock_config_entry.entry_id}, - connections={("Zigbee", "23.45.67.89.01")}, - created_at=datetime.fromisoformat(created_at), - id="bcdefghijklmn", - identifiers={("serial", "3456ABCDEF12")}, - modified_at=datetime.fromisoformat(modified_at), - orphaned_timestamp=None, - ) - entry = registry.async_get_or_create( config_entry_id=mock_config_entry.entry_id, connections={("Zigbee", "01.23.45.67.89")}, @@ -259,7 +238,6 @@ async def test_loading_from_storage( config_entries={mock_config_entry.entry_id}, configuration_url="https://example.com/config", connections={("Zigbee", "01.23.45.67.89")}, - created_at=datetime.fromisoformat(created_at), disabled_by=dr.DeviceEntryDisabler.USER, entry_type=dr.DeviceEntryType.SERVICE, hw_version="hw_version", @@ -268,8 +246,6 @@ async def test_loading_from_storage( labels={"label1", "label2"}, manufacturer="manufacturer", model="model", - model_id="model_id", - modified_at=datetime.fromisoformat(modified_at), name_by_user="Test Friendly Name", name="name", primary_config_entry=mock_config_entry.entry_id, @@ -292,12 +268,10 @@ async def test_loading_from_storage( assert entry == dr.DeviceEntry( config_entries={mock_config_entry.entry_id}, connections={("Zigbee", "23.45.67.89.01")}, - created_at=datetime.fromisoformat(created_at), id="bcdefghijklmn", identifiers={("serial", "3456ABCDEF12")}, manufacturer="manufacturer", model="model", - modified_at=utcnow(), primary_config_entry=mock_config_entry.entry_id, ) assert entry.id == "bcdefghijklmn" @@ -307,13 +281,12 @@ async def test_loading_from_storage( @pytest.mark.parametrize("load_registries", [False]) -@pytest.mark.usefixtures("freezer") -async def test_migration_from_1_1( +async def test_migration_1_1_to_1_6( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.1.""" + """Test migration from version 1.1 to 1.6.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 1, @@ -332,7 +305,7 @@ async def test_migration_from_1_1( }, # Invalid entry type { - "config_entries": ["234567"], + "config_entries": [None], "connections": [], "entry_type": "INVALID_VALUE", "id": "invalid-entry-type", @@ -392,7 +365,6 @@ async def test_migration_from_1_1( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], - "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": None, @@ -401,8 +373,6 @@ async def test_migration_from_1_1( "labels": [], "manufacturer": "manufacturer", "model": "model", - "model_id": None, - "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "primary_config_entry": mock_config_entry.entry_id, @@ -412,10 +382,9 @@ async def test_migration_from_1_1( }, { "area_id": None, - "config_entries": ["234567"], + "config_entries": [None], "configuration_url": None, "connections": [], - "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -424,8 +393,6 @@ async def test_migration_from_1_1( "labels": [], "manufacturer": None, "model": None, - "model_id": None, - "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, "primary_config_entry": None, @@ -438,10 +405,8 @@ async def test_migration_from_1_1( { "config_entries": ["123456"], "connections": [], - "created_at": "1970-01-01T00:00:00+00:00", "id": "deletedid", "identifiers": [["serial", "123456ABCDFF"]], - "modified_at": "1970-01-01T00:00:00+00:00", "orphaned_timestamp": None, } ], @@ -450,13 +415,12 @@ async def test_migration_from_1_1( @pytest.mark.parametrize("load_registries", [False]) -@pytest.mark.usefixtures("freezer") -async def test_migration_from_1_2( +async def test_migration_1_2_to_1_6( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.2.""" + """Test migration from version 1.2 to 1.6.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 2, @@ -474,7 +438,6 @@ async def test_migration_from_1_2( "identifiers": [["serial", "123456ABCDEF"]], "manufacturer": "manufacturer", "model": "model", - "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "sw_version": "version", @@ -482,7 +445,7 @@ async def test_migration_from_1_2( }, { "area_id": None, - "config_entries": ["234567"], + "config_entries": [None], "configuration_url": None, "connections": [], "disabled_by": None, @@ -491,7 +454,6 @@ async def test_migration_from_1_2( "identifiers": [["serial", "mock-id-invalid-entry"]], "manufacturer": None, "model": None, - "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, "sw_version": None, @@ -536,7 +498,6 @@ async def test_migration_from_1_2( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], - "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": None, @@ -545,8 +506,6 @@ async def test_migration_from_1_2( "labels": [], "manufacturer": "manufacturer", "model": "model", - "model_id": None, - "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "primary_config_entry": mock_config_entry.entry_id, @@ -556,10 +515,9 @@ async def test_migration_from_1_2( }, { "area_id": None, - "config_entries": ["234567"], + "config_entries": [None], "configuration_url": None, "connections": [], - "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -568,8 +526,6 @@ async def test_migration_from_1_2( "labels": [], "manufacturer": None, "model": None, - "model_id": None, - "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, "primary_config_entry": None, @@ -584,13 +540,12 @@ async def test_migration_from_1_2( @pytest.mark.parametrize("load_registries", [False]) -@pytest.mark.usefixtures("freezer") -async def test_migration_fom_1_3( +async def test_migration_1_3_to_1_6( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.3.""" + """Test migration from version 1.3 to 1.6.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 3, @@ -616,7 +571,7 @@ async def test_migration_fom_1_3( }, { "area_id": None, - "config_entries": ["234567"], + "config_entries": [None], "configuration_url": None, "connections": [], "disabled_by": None, @@ -670,7 +625,6 @@ async def test_migration_fom_1_3( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], - "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": "hw_version", @@ -679,8 +633,6 @@ async def test_migration_fom_1_3( "labels": [], "manufacturer": "manufacturer", "model": "model", - "model_id": None, - "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "primary_config_entry": mock_config_entry.entry_id, @@ -690,10 +642,9 @@ async def test_migration_fom_1_3( }, { "area_id": None, - "config_entries": ["234567"], + "config_entries": [None], "configuration_url": None, "connections": [], - "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -702,8 +653,6 @@ async def test_migration_fom_1_3( "labels": [], "manufacturer": None, "model": None, - "model_id": None, - "modified_at": "1970-01-01T00:00:00+00:00", "name": None, "name_by_user": None, "primary_config_entry": None, @@ -718,13 +667,12 @@ async def test_migration_fom_1_3( @pytest.mark.parametrize("load_registries", [False]) -@pytest.mark.usefixtures("freezer") -async def test_migration_from_1_4( +async def test_migration_1_4_to_1_6( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.4.""" + """Test migration from version 1.4 to 1.6.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 4, @@ -751,7 +699,7 @@ async def test_migration_from_1_4( }, { "area_id": None, - "config_entries": ["234567"], + "config_entries": [None], "configuration_url": None, "connections": [], "disabled_by": None, @@ -806,7 +754,6 @@ async def test_migration_from_1_4( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], - "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": "hw_version", @@ -815,8 +762,6 @@ async def test_migration_from_1_4( "labels": [], "manufacturer": "manufacturer", "model": "model", - "model_id": None, - "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "primary_config_entry": mock_config_entry.entry_id, @@ -826,10 +771,9 @@ async def test_migration_from_1_4( }, { "area_id": None, - "config_entries": ["234567"], + "config_entries": [None], "configuration_url": None, "connections": [], - "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -838,8 +782,6 @@ async def test_migration_from_1_4( "labels": [], "manufacturer": None, "model": None, - "model_id": None, - "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, "primary_config_entry": None, @@ -854,13 +796,12 @@ async def test_migration_from_1_4( @pytest.mark.parametrize("load_registries", [False]) -@pytest.mark.usefixtures("freezer") -async def test_migration_from_1_5( +async def test_migration_1_5_to_1_6( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.5.""" + """Test migration from version 1.5 to 1.6.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 5, @@ -888,7 +829,7 @@ async def test_migration_from_1_5( }, { "area_id": None, - "config_entries": ["234567"], + "config_entries": [None], "configuration_url": None, "connections": [], "disabled_by": None, @@ -937,72 +878,6 @@ async def test_migration_from_1_5( "version": dr.STORAGE_VERSION_MAJOR, "minor_version": dr.STORAGE_VERSION_MINOR, "key": dr.STORAGE_KEY, - "data": { - "devices": [ - { - "area_id": None, - "config_entries": [mock_config_entry.entry_id], - "configuration_url": None, - "connections": [["Zigbee", "01.23.45.67.89"]], - "created_at": "1970-01-01T00:00:00+00:00", - "disabled_by": None, - "entry_type": "service", - "hw_version": "hw_version", - "id": "abcdefghijklm", - "identifiers": [["serial", "123456ABCDEF"]], - "labels": ["blah"], - "manufacturer": "manufacturer", - "model": "model", - "name": "name", - "model_id": None, - "modified_at": utcnow().isoformat(), - "name_by_user": None, - "primary_config_entry": mock_config_entry.entry_id, - "serial_number": None, - "sw_version": "new_version", - "via_device_id": None, - }, - { - "area_id": None, - "config_entries": ["234567"], - "configuration_url": None, - "connections": [], - "created_at": "1970-01-01T00:00:00+00:00", - "disabled_by": None, - "entry_type": None, - "hw_version": None, - "id": "invalid-entry-type", - "identifiers": [["serial", "mock-id-invalid-entry"]], - "labels": ["blah"], - "manufacturer": None, - "model": None, - "model_id": None, - "modified_at": "1970-01-01T00:00:00+00:00", - "name_by_user": None, - "name": None, - "primary_config_entry": None, - "serial_number": None, - "sw_version": None, - "via_device_id": None, - }, - ], - "deleted_devices": [], - }, - } - - -@pytest.mark.parametrize("load_registries", [False]) -@pytest.mark.usefixtures("freezer") -async def test_migration_from_1_6( - hass: HomeAssistant, - hass_storage: dict[str, Any], - mock_config_entry: MockConfigEntry, -) -> None: - """Test migration from version 1.6.""" - hass_storage[dr.STORAGE_KEY] = { - "version": 1, - "minor_version": 6, - "key": dr.STORAGE_KEY, "data": { "devices": [ { @@ -1027,7 +902,7 @@ async def test_migration_from_1_6( }, { "area_id": None, - "config_entries": ["234567"], + "config_entries": [None], "configuration_url": None, "connections": [], "disabled_by": None, @@ -1039,228 +914,6 @@ async def test_migration_from_1_6( "manufacturer": None, "model": None, "name_by_user": None, - "primary_config_entry": None, - "name": None, - "serial_number": None, - "sw_version": None, - "via_device_id": None, - }, - ], - "deleted_devices": [], - }, - } - - await dr.async_load(hass) - registry = dr.async_get(hass) - - # Test data was loaded - entry = registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - connections={("Zigbee", "01.23.45.67.89")}, - identifiers={("serial", "123456ABCDEF")}, - ) - assert entry.id == "abcdefghijklm" - - # Update to trigger a store - entry = registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - connections={("Zigbee", "01.23.45.67.89")}, - identifiers={("serial", "123456ABCDEF")}, - sw_version="new_version", - ) - assert entry.id == "abcdefghijklm" - - # Check we store migrated data - await flush_store(registry._store) - - assert hass_storage[dr.STORAGE_KEY] == { - "version": dr.STORAGE_VERSION_MAJOR, - "minor_version": dr.STORAGE_VERSION_MINOR, - "key": dr.STORAGE_KEY, - "data": { - "devices": [ - { - "area_id": None, - "config_entries": [mock_config_entry.entry_id], - "configuration_url": None, - "connections": [["Zigbee", "01.23.45.67.89"]], - "created_at": "1970-01-01T00:00:00+00:00", - "disabled_by": None, - "entry_type": "service", - "hw_version": "hw_version", - "id": "abcdefghijklm", - "identifiers": [["serial", "123456ABCDEF"]], - "labels": ["blah"], - "manufacturer": "manufacturer", - "model": "model", - "name": "name", - "model_id": None, - "modified_at": "1970-01-01T00:00:00+00:00", - "name_by_user": None, - "primary_config_entry": mock_config_entry.entry_id, - "serial_number": None, - "sw_version": "new_version", - "via_device_id": None, - }, - { - "area_id": None, - "config_entries": ["234567"], - "configuration_url": None, - "connections": [], - "created_at": "1970-01-01T00:00:00+00:00", - "disabled_by": None, - "entry_type": None, - "hw_version": None, - "id": "invalid-entry-type", - "identifiers": [["serial", "mock-id-invalid-entry"]], - "labels": ["blah"], - "manufacturer": None, - "model": None, - "model_id": None, - "modified_at": "1970-01-01T00:00:00+00:00", - "name_by_user": None, - "name": None, - "primary_config_entry": None, - "serial_number": None, - "sw_version": None, - "via_device_id": None, - }, - ], - "deleted_devices": [], - }, - } - - -@pytest.mark.parametrize("load_registries", [False]) -@pytest.mark.usefixtures("freezer") -async def test_migration_from_1_7( - hass: HomeAssistant, - hass_storage: dict[str, Any], - mock_config_entry: MockConfigEntry, -) -> None: - """Test migration from version 1.7.""" - hass_storage[dr.STORAGE_KEY] = { - "version": 1, - "minor_version": 7, - "key": dr.STORAGE_KEY, - "data": { - "devices": [ - { - "area_id": None, - "config_entries": [mock_config_entry.entry_id], - "configuration_url": None, - "connections": [["Zigbee", "01.23.45.67.89"]], - "disabled_by": None, - "entry_type": "service", - "hw_version": "hw_version", - "id": "abcdefghijklm", - "identifiers": [["serial", "123456ABCDEF"]], - "labels": ["blah"], - "manufacturer": "manufacturer", - "model": "model", - "model_id": None, - "name": "name", - "name_by_user": None, - "primary_config_entry": mock_config_entry.entry_id, - "serial_number": None, - "sw_version": "new_version", - "via_device_id": None, - }, - { - "area_id": None, - "config_entries": ["234567"], - "configuration_url": None, - "connections": [], - "disabled_by": None, - "entry_type": None, - "hw_version": None, - "id": "invalid-entry-type", - "identifiers": [["serial", "mock-id-invalid-entry"]], - "labels": ["blah"], - "manufacturer": None, - "model": None, - "model_id": None, - "name_by_user": None, - "primary_config_entry": None, - "name": None, - "serial_number": None, - "sw_version": None, - "via_device_id": None, - }, - ], - "deleted_devices": [], - }, - } - - await dr.async_load(hass) - registry = dr.async_get(hass) - - # Test data was loaded - entry = registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - connections={("Zigbee", "01.23.45.67.89")}, - identifiers={("serial", "123456ABCDEF")}, - ) - assert entry.id == "abcdefghijklm" - - # Update to trigger a store - entry = registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - connections={("Zigbee", "01.23.45.67.89")}, - identifiers={("serial", "123456ABCDEF")}, - sw_version="new_version", - ) - assert entry.id == "abcdefghijklm" - - # Check we store migrated data - await flush_store(registry._store) - - assert hass_storage[dr.STORAGE_KEY] == { - "version": dr.STORAGE_VERSION_MAJOR, - "minor_version": dr.STORAGE_VERSION_MINOR, - "key": dr.STORAGE_KEY, - "data": { - "devices": [ - { - "area_id": None, - "config_entries": [mock_config_entry.entry_id], - "configuration_url": None, - "connections": [["Zigbee", "01.23.45.67.89"]], - "created_at": "1970-01-01T00:00:00+00:00", - "disabled_by": None, - "entry_type": "service", - "hw_version": "hw_version", - "id": "abcdefghijklm", - "identifiers": [["serial", "123456ABCDEF"]], - "labels": ["blah"], - "manufacturer": "manufacturer", - "model": "model", - "name": "name", - "model_id": None, - "modified_at": "1970-01-01T00:00:00+00:00", - "name_by_user": None, - "primary_config_entry": mock_config_entry.entry_id, - "serial_number": None, - "sw_version": "new_version", - "via_device_id": None, - }, - { - "area_id": None, - "config_entries": ["234567"], - "configuration_url": None, - "connections": [], - "created_at": "1970-01-01T00:00:00+00:00", - "disabled_by": None, - "entry_type": None, - "hw_version": None, - "id": "invalid-entry-type", - "identifiers": [["serial", "mock-id-invalid-entry"]], - "labels": ["blah"], - "manufacturer": None, - "model": None, - "model_id": None, - "modified_at": "1970-01-01T00:00:00+00:00", - "name_by_user": None, "name": None, "primary_config_entry": None, "serial_number": None, @@ -1746,11 +1399,8 @@ async def test_update( hass: HomeAssistant, device_registry: dr.DeviceRegistry, mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, ) -> None: """Verify that we can update some attributes of a device.""" - created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") - freezer.move_to(created_at) update_events = async_capture_events(hass, dr.EVENT_DEVICE_REGISTRY_UPDATED) entry = device_registry.async_get_or_create( config_entry_id=mock_config_entry.entry_id, @@ -1762,11 +1412,7 @@ async def test_update( assert not entry.area_id assert not entry.labels assert not entry.name_by_user - assert entry.created_at == created_at - assert entry.modified_at == created_at - modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") - freezer.move_to(modified_at) with patch.object(device_registry, "async_schedule_save") as mock_save: updated_entry = device_registry.async_update_device( entry.id, @@ -1778,7 +1424,6 @@ async def test_update( labels={"label1", "label2"}, manufacturer="Test Producer", model="Test Model", - model_id="Test Model Name", name_by_user="Test Friendly Name", name="name", new_connections=new_connections, @@ -1796,7 +1441,6 @@ async def test_update( config_entries={mock_config_entry.entry_id}, configuration_url="https://example.com/config", connections={("mac", "65:43:21:fe:dc:ba")}, - created_at=created_at, disabled_by=dr.DeviceEntryDisabler.USER, entry_type=dr.DeviceEntryType.SERVICE, hw_version="hw_version", @@ -1805,8 +1449,6 @@ async def test_update( labels={"label1", "label2"}, manufacturer="Test Producer", model="Test Model", - model_id="Test Model Name", - modified_at=modified_at, name_by_user="Test Friendly Name", name="name", serial_number="serial_no", @@ -1861,7 +1503,6 @@ async def test_update( "labels": set(), "manufacturer": None, "model": None, - "model_id": None, "name": None, "name_by_user": None, "serial_number": None, @@ -2825,7 +2466,6 @@ async def test_loading_invalid_configuration_url_from_storage( "config_entries": ["1234"], "configuration_url": "invalid", "connections": [], - "created_at": "2024-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": dr.DeviceEntryType.SERVICE, "hw_version": None, @@ -2834,8 +2474,6 @@ async def test_loading_invalid_configuration_url_from_storage( "labels": [], "manufacturer": None, "model": None, - "model_id": None, - "modified_at": "2024-02-01T00:00:00+00:00", "name_by_user": None, "name": None, "primary_config_entry": "1234", diff --git a/tests/helpers/test_discovery.py b/tests/helpers/test_discovery.py index a66ac7474e3..100b50e2749 100644 --- a/tests/helpers/test_discovery.py +++ b/tests/helpers/test_discovery.py @@ -9,8 +9,6 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import discovery from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from tests.common import MockModule, MockPlatform, mock_integration, mock_platform @@ -117,7 +115,7 @@ async def test_circular_import(hass: HomeAssistant) -> None: component_calls = [] platform_calls = [] - def component_setup(hass: HomeAssistant, config: ConfigType) -> bool: + def component_setup(hass, config): """Set up mock component.""" discovery.load_platform( hass, Platform.SWITCH, "test_circular", {"key": "value"}, config @@ -125,12 +123,7 @@ async def test_circular_import(hass: HomeAssistant) -> None: component_calls.append(1) return True - def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities_callback: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, - ) -> None: + def setup_platform(hass, config, add_entities_callback, discovery_info=None): """Set up mock platform.""" platform_calls.append("disc" if discovery_info else "component") @@ -169,14 +162,14 @@ async def test_1st_discovers_2nd_component(hass: HomeAssistant) -> None: """ component_calls = [] - async def component1_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def component1_setup(hass, config): """Set up mock component.""" await discovery.async_discover( hass, "test_component2", {}, "test_component2", {} ) return True - def component2_setup(hass: HomeAssistant, config: ConfigType) -> bool: + def component2_setup(hass, config): """Set up mock component.""" component_calls.append(1) return True diff --git a/tests/helpers/test_discovery_flow.py b/tests/helpers/test_discovery_flow.py index dde0f209706..9c2249ac17f 100644 --- a/tests/helpers/test_discovery_flow.py +++ b/tests/helpers/test_discovery_flow.py @@ -1,15 +1,13 @@ """Test the discovery flow helper.""" -from collections.abc import Generator from unittest.mock import AsyncMock, call, patch import pytest +from typing_extensions import Generator from homeassistant import config_entries -from homeassistant.const import EVENT_HOMEASSISTANT_STARTED -from homeassistant.core import CoreState, HomeAssistant -from homeassistant.helpers import discovery_flow, json as json_helper -from homeassistant.helpers.discovery_flow import DiscoveryKey +from homeassistant.core import EVENT_HOMEASSISTANT_STARTED, CoreState, HomeAssistant +from homeassistant.helpers import discovery_flow @pytest.fixture @@ -21,29 +19,8 @@ def mock_flow_init(hass: HomeAssistant) -> Generator[AsyncMock]: yield mock_init -@pytest.mark.parametrize( - ("discovery_key", "context"), - [ - (None, {}), - ( - DiscoveryKey(domain="test", key="string_key", version=1), - {"discovery_key": DiscoveryKey(domain="test", key="string_key", version=1)}, - ), - ( - DiscoveryKey(domain="test", key=("one", "two"), version=1), - { - "discovery_key": DiscoveryKey( - domain="test", key=("one", "two"), version=1 - ) - }, - ), - ], -) async def test_async_create_flow( - hass: HomeAssistant, - mock_flow_init: AsyncMock, - discovery_key: DiscoveryKey | None, - context: {}, + hass: HomeAssistant, mock_flow_init: AsyncMock ) -> None: """Test we can create a flow.""" discovery_flow.async_create_flow( @@ -51,12 +28,11 @@ async def test_async_create_flow( "hue", {"source": config_entries.SOURCE_HOMEKIT}, {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - discovery_key=discovery_key, ) assert mock_flow_init.mock_calls == [ call( "hue", - context={"source": "homekit"} | context, + context={"source": "homekit"}, data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, ) ] @@ -91,7 +67,7 @@ async def test_async_create_flow_checks_existing_flows_after_startup( """Test existing flows prevent an identical ones from being after startup.""" hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) with patch( - "homeassistant.config_entries.ConfigEntriesFlowManager.async_has_matching_discovery_flow", + "homeassistant.data_entry_flow.FlowManager.async_has_matching_flow", return_value=True, ): discovery_flow.async_create_flow( @@ -141,16 +117,3 @@ async def test_async_create_flow_does_nothing_after_stop( {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, ) assert len(mock_flow_init.mock_calls) == 0 - - -@pytest.mark.parametrize("key", ["test", ("blah", "bleh")]) -def test_discovery_key_serialize_deserialize(key: str | tuple[str]) -> None: - """Test serialize and deserialize discovery key.""" - discovery_key_1 = discovery_flow.DiscoveryKey( - domain="test_domain", key=key, version=1 - ) - serialized = json_helper.json_dumps(discovery_key_1) - assert ( - discovery_flow.DiscoveryKey.from_json_dict(json_helper.json_loads(serialized)) - == discovery_key_1 - ) diff --git a/tests/helpers/test_dispatcher.py b/tests/helpers/test_dispatcher.py index edd18d54db4..c2c8663f47c 100644 --- a/tests/helpers/test_dispatcher.py +++ b/tests/helpers/test_dispatcher.py @@ -73,7 +73,7 @@ async def test_signal_type_format(hass: HomeAssistant) -> None: assert calls == [("Hello", 2)] # Test compatibility with string keys - async_dispatcher_send(hass, "test-unique-id", "x", 4) + async_dispatcher_send(hass, "test-{}".format("unique-id"), "x", 4) await hass.async_block_till_done() assert calls == [("Hello", 2), ("x", 4)] @@ -188,7 +188,8 @@ async def test_callback_exception_gets_logged( @callback def bad_handler(*args): """Record calls.""" - raise Exception("This is a bad message callback") # noqa: TRY002 + # pylint: disable-next=broad-exception-raised + raise Exception("This is a bad message callback") # wrap in partial to test message logging. async_dispatcher_connect(hass, "test", partial(bad_handler)) @@ -208,7 +209,8 @@ async def test_coro_exception_gets_logged( async def bad_async_handler(*args): """Record calls.""" - raise Exception("This is a bad message in a coro") # noqa: TRY002 + # pylint: disable-next=broad-exception-raised + raise Exception("This is a bad message in a coro") # wrap in partial to test message logging. async_dispatcher_connect(hass, "test", bad_async_handler) diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index 2bf441f70fd..f76b8555580 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -5,37 +5,34 @@ from collections.abc import Iterable import dataclasses from datetime import timedelta from enum import IntFlag +from functools import cached_property import logging import threading from typing import Any from unittest.mock import MagicMock, PropertyMock, patch from freezegun.api import FrozenDateTimeFactory -from propcache import cached_property import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, STATE_UNAVAILABLE, STATE_UNKNOWN, - EntityCategory, ) from homeassistant.core import ( Context, HassJobType, HomeAssistant, + HomeAssistantError, ReleaseChannel, callback, ) -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity -from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UNDEFINED, UndefinedType from tests.common import ( @@ -925,13 +922,13 @@ async def test_entity_category_property(hass: HomeAssistant) -> None: key="abc", entity_category="ignore_me" ) mock_entity1.entity_id = "hello.world" - mock_entity1._attr_entity_category = EntityCategory.CONFIG + mock_entity1._attr_entity_category = entity.EntityCategory.CONFIG assert mock_entity1.entity_category == "config" mock_entity2 = entity.Entity() mock_entity2.hass = hass mock_entity2.entity_description = entity.EntityDescription( - key="abc", entity_category=EntityCategory.CONFIG + key="abc", entity_category=entity.EntityCategory.CONFIG ) mock_entity2.entity_id = "hello.world" assert mock_entity2.entity_category == "config" @@ -940,8 +937,8 @@ async def test_entity_category_property(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("value", "expected"), [ - ("config", EntityCategory.CONFIG), - ("diagnostic", EntityCategory.DIAGNOSTIC), + ("config", entity.EntityCategory.CONFIG), + ("diagnostic", entity.EntityCategory.DIAGNOSTIC), ], ) def test_entity_category_schema(value, expected) -> None: @@ -949,7 +946,7 @@ def test_entity_category_schema(value, expected) -> None: schema = vol.Schema(entity.ENTITY_CATEGORIES_SCHEMA) result = schema(value) assert result == expected - assert isinstance(result, EntityCategory) + assert isinstance(result, entity.EntityCategory) @pytest.mark.parametrize("value", [None, "non_existing"]) @@ -983,13 +980,10 @@ async def _test_friendly_name( ) -> None: """Test friendly name.""" - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities([ent]) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1311,13 +1305,10 @@ async def test_entity_name_translation_placeholder_errors( """Return all backend translations.""" return translations[language] - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities([ent]) + return True ent = MockEntity( unique_id="qwer", @@ -1539,11 +1530,7 @@ async def test_friendly_name_updated( ) -> None: """Test friendly name is updated when device or entity registry updates.""" - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities( [ @@ -1559,6 +1546,7 @@ async def test_friendly_name_updated( ), ] ) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2314,12 +2302,7 @@ async def test_update_capabilities_too_often_cooldown( @pytest.mark.parametrize( - ("property", "default_value", "values"), - [ - ("attribution", None, ["abcd", "efgh"]), - ("attribution", None, [True, 1]), - ("attribution", None, [1.0, 1]), - ], + ("property", "default_value", "values"), [("attribution", None, ["abcd", "efgh"])] ) async def test_cached_entity_properties( hass: HomeAssistant, property: str, default_value: Any, values: Any @@ -2328,30 +2311,22 @@ async def test_cached_entity_properties( ent1 = entity.Entity() ent2 = entity.Entity() assert getattr(ent1, property) == default_value - assert type(getattr(ent1, property)) is type(default_value) assert getattr(ent2, property) == default_value - assert type(getattr(ent2, property)) is type(default_value) # Test set setattr(ent1, f"_attr_{property}", values[0]) assert getattr(ent1, property) == values[0] - assert type(getattr(ent1, property)) is type(values[0]) assert getattr(ent2, property) == default_value - assert type(getattr(ent2, property)) is type(default_value) # Test update setattr(ent1, f"_attr_{property}", values[1]) assert getattr(ent1, property) == values[1] - assert type(getattr(ent1, property)) is type(values[1]) assert getattr(ent2, property) == default_value - assert type(getattr(ent2, property)) is type(default_value) # Test delete delattr(ent1, f"_attr_{property}") assert getattr(ent1, property) == default_value - assert type(getattr(ent1, property)) is type(default_value) assert getattr(ent2, property) == default_value - assert type(getattr(ent2, property)) is type(default_value) async def test_cached_entity_property_delete_attr(hass: HomeAssistant) -> None: diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 9723b91eb9a..32ce740edb2 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -23,7 +23,7 @@ from homeassistant.core import ( callback, ) from homeassistant.exceptions import HomeAssistantError, PlatformNotReady -from homeassistant.helpers import config_validation as cv, discovery +from homeassistant.helpers import discovery from homeassistant.helpers.entity_component import EntityComponent, async_update_entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -117,7 +117,7 @@ async def test_setup_does_discovery( await hass.async_block_till_done() assert mock_setup.called - assert mock_setup.call_args[0] == ("platform_test", {}, {"msg": "discovery_info"}) + assert ("platform_test", {}, {"msg": "discovery_info"}) == mock_setup.call_args[0] async def test_set_scan_interval_via_config(hass: HomeAssistant) -> None: @@ -191,9 +191,9 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) - assert sorted( + assert ["test_domain.test_1", "test_domain.test_3"] == sorted( ent.entity_id for ent in (await component.async_extract_from_service(call_1)) - ) == ["test_domain.test_1", "test_domain.test_3"] + ) call_2 = ServiceCall( "test", @@ -201,9 +201,9 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) - assert sorted( + assert ["test_domain.test_3"] == sorted( ent.entity_id for ent in (await component.async_extract_from_service(call_2)) - ) == ["test_domain.test_3"] + ) async def test_platform_not_ready(hass: HomeAssistant) -> None: @@ -288,9 +288,9 @@ async def test_extract_from_service_filter_out_non_existing_entities( {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) - assert [ + assert ["test_domain.test_2"] == [ ent.entity_id for ent in await component.async_extract_from_service(call) - ] == ["test_domain.test_2"] + ] async def test_extract_from_service_no_group_expand(hass: HomeAssistant) -> None: @@ -467,11 +467,8 @@ async def test_extract_all_omit_entity_id( call = ServiceCall("test", "service") - assert ( - sorted( - ent.entity_id for ent in await component.async_extract_from_service(call) - ) - == [] + assert [] == sorted( + ent.entity_id for ent in await component.async_extract_from_service(call) ) @@ -487,27 +484,15 @@ async def test_extract_all_use_match_all( call = ServiceCall("test", "service", {"entity_id": "all"}) - assert sorted( + assert ["test_domain.test_1", "test_domain.test_2"] == sorted( ent.entity_id for ent in await component.async_extract_from_service(call) - ) == ["test_domain.test_1", "test_domain.test_2"] + ) assert ( "Not passing an entity ID to a service to target all entities is deprecated" ) not in caplog.text -@pytest.mark.parametrize( - ("schema", "service_data"), - [ - ({"some": str}, {"some": "data"}), - ({}, {}), - (None, {}), - ], -) -async def test_register_entity_service( - hass: HomeAssistant, - schema: dict | None, - service_data: dict, -) -> None: +async def test_register_entity_service(hass: HomeAssistant) -> None: """Test registering an enttiy service and calling it.""" entity = MockEntity(entity_id=f"{DOMAIN}.entity") calls = [] @@ -522,7 +507,9 @@ async def test_register_entity_service( await component.async_setup({}) await component.async_add_entities([entity]) - component.async_register_entity_service("hello", schema, "async_called_by_service") + component.async_register_entity_service( + "hello", {"some": str}, "async_called_by_service" + ) with pytest.raises(vol.Invalid): await hass.services.async_call( @@ -534,57 +521,28 @@ async def test_register_entity_service( assert len(calls) == 0 await hass.services.async_call( - DOMAIN, "hello", {"entity_id": entity.entity_id} | service_data, blocking=True + DOMAIN, "hello", {"entity_id": entity.entity_id, "some": "data"}, blocking=True ) assert len(calls) == 1 - assert calls[0] == service_data + assert calls[0] == {"some": "data"} await hass.services.async_call( - DOMAIN, "hello", {"entity_id": ENTITY_MATCH_ALL} | service_data, blocking=True + DOMAIN, "hello", {"entity_id": ENTITY_MATCH_ALL, "some": "data"}, blocking=True ) assert len(calls) == 2 - assert calls[1] == service_data + assert calls[1] == {"some": "data"} await hass.services.async_call( - DOMAIN, "hello", {"entity_id": ENTITY_MATCH_NONE} | service_data, blocking=True + DOMAIN, "hello", {"entity_id": ENTITY_MATCH_NONE, "some": "data"}, blocking=True ) assert len(calls) == 2 await hass.services.async_call( - DOMAIN, "hello", {"area_id": ENTITY_MATCH_NONE} | service_data, blocking=True + DOMAIN, "hello", {"area_id": ENTITY_MATCH_NONE, "some": "data"}, blocking=True ) assert len(calls) == 2 -async def test_register_entity_service_non_entity_service_schema( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test attempting to register a service with a non entity service schema.""" - component = EntityComponent(_LOGGER, DOMAIN, hass) - expected_message = "registers an entity service with a non entity service schema" - - for idx, schema in enumerate( - ( - vol.Schema({"some": str}), - vol.All(vol.Schema({"some": str})), - vol.Any(vol.Schema({"some": str})), - ) - ): - component.async_register_entity_service(f"hello_{idx}", schema, Mock()) - assert expected_message in caplog.text - caplog.clear() - - for idx, schema in enumerate( - ( - cv.make_entity_service_schema({"some": str}), - vol.Schema(cv.make_entity_service_schema({"some": str})), - vol.All(cv.make_entity_service_schema({"some": str})), - ) - ): - component.async_register_entity_service(f"test_service_{idx}", schema, Mock()) - assert expected_message not in caplog.text - - async def test_register_entity_service_response_data(hass: HomeAssistant) -> None: """Test an entity service that does support response data.""" entity = MockEntity(entity_id=f"{DOMAIN}.entity") diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index e80006dff84..4e761a21e8c 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -8,11 +8,8 @@ from typing import Any from unittest.mock import ANY, AsyncMock, Mock, patch import pytest -from syrupy.assertion import SnapshotAssertion -import voluptuous as vol -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, PERCENTAGE, EntityCategory +from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, PERCENTAGE from homeassistant.core import ( CoreState, HomeAssistant, @@ -24,19 +21,21 @@ from homeassistant.core import ( from homeassistant.exceptions import HomeAssistantError, PlatformNotReady from homeassistant.helpers import ( area_registry as ar, - config_validation as cv, device_registry as dr, entity_platform, entity_registry as er, issue_registry as ir, ) -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity, async_generate_entity_id +from homeassistant.helpers.entity import ( + DeviceInfo, + Entity, + EntityCategory, + async_generate_entity_id, +) from homeassistant.helpers.entity_component import ( DEFAULT_SCAN_INTERVAL, EntityComponent, ) -from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util @@ -859,13 +858,10 @@ async def test_setup_entry( ) -> None: """Test we can setup an entry.""" - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities([MockEntity(name="test1", unique_id="unique")]) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -879,9 +875,9 @@ async def test_setup_entry( assert full_name in hass.config.components assert len(hass.states.async_entity_ids()) == 1 assert len(entity_registry.entities) == 1 - - entity_registry_entry = entity_registry.entities["test_domain.test1"] - assert entity_registry_entry.config_entry_id == "super-mock-id" + assert ( + entity_registry.entities["test_domain.test1"].config_entry_id == "super-mock-id" + ) async def test_setup_entry_platform_not_ready( @@ -1132,9 +1128,7 @@ async def test_add_entity_with_invalid_id( async def test_device_info_called( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, + hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: """Test device info is forwarded correctly.""" config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1147,11 +1141,7 @@ async def test_device_info_called( model="via", ) - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities( [ @@ -1176,6 +1166,7 @@ async def test_device_info_called( ), ] ) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1188,9 +1179,18 @@ async def test_device_info_called( assert len(hass.states.async_entity_ids()) == 2 device = device_registry.async_get_device(identifiers={("hue", "1234")}) - assert device == snapshot - assert device.config_entries == {config_entry.entry_id} + assert device is not None + assert device.identifiers == {("hue", "1234")} + assert device.configuration_url == "http://192.168.0.100/config" + assert device.connections == {(dr.CONNECTION_NETWORK_MAC, "abcd")} + assert device.entry_type is dr.DeviceEntryType.SERVICE + assert device.manufacturer == "test-manuf" + assert device.model == "test-model" + assert device.name == "test-name" assert device.primary_config_entry == config_entry.entry_id + assert device.suggested_area == "Heliport" + assert device.sw_version == "test-sw" + assert device.hw_version == "test-hw" assert device.via_device_id == via.id @@ -1210,11 +1210,7 @@ async def test_device_info_not_overrides( assert device.manufacturer == "test-manufacturer" assert device.model == "test-model" - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities( [ @@ -1229,6 +1225,7 @@ async def test_device_info_not_overrides( ) ] ) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1263,11 +1260,7 @@ async def test_device_info_homeassistant_url( model="via", ) - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities( [ @@ -1281,6 +1274,7 @@ async def test_device_info_homeassistant_url( ), ] ) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1315,11 +1309,7 @@ async def test_device_info_change_to_no_url( configuration_url="homeassistant://config/mqtt", ) - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities( [ @@ -1333,6 +1323,7 @@ async def test_device_info_change_to_no_url( ), ] ) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1387,13 +1378,10 @@ async def test_entity_disabled_by_device( unique_id="disabled", device_info=DeviceInfo(connections=connections) ) - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities([entity_disabled]) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id", domain=DOMAIN) @@ -1438,7 +1426,6 @@ async def test_entity_hidden_by_integration( assert entry_hidden.hidden_by is er.RegistryEntryHider.INTEGRATION -@pytest.mark.usefixtures("freezer") async def test_entity_info_added_to_entity_registry( hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: @@ -1467,13 +1454,11 @@ async def test_entity_info_added_to_entity_registry( "default", "test_domain", capabilities={"max": 100}, - created_at=dt_util.utcnow(), device_class=None, entity_category=EntityCategory.CONFIG, has_entity_name=True, icon=None, id=ANY, - modified_at=dt_util.utcnow(), name=None, original_device_class="mock-device-class", original_icon="nice:icon", @@ -1776,67 +1761,6 @@ async def test_register_entity_service_limited_to_matching_platforms( } -async def test_register_entity_service_none_schema( - hass: HomeAssistant, -) -> None: - """Test registering a service with schema set to None.""" - entity_platform = MockEntityPlatform( - hass, domain="mock_integration", platform_name="mock_platform", platform=None - ) - entity1 = SlowEntity(name="entity_1") - entity2 = SlowEntity(name="entity_1") - await entity_platform.async_add_entities([entity1, entity2]) - - entities = [] - - @callback - def handle_service(entity, *_): - entities.append(entity) - - entity_platform.async_register_entity_service("hello", None, handle_service) - - await hass.services.async_call( - "mock_platform", "hello", {"entity_id": "all"}, blocking=True - ) - - assert len(entities) == 2 - assert entity1 in entities - assert entity2 in entities - - -async def test_register_entity_service_non_entity_service_schema( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test attempting to register a service with a non entity service schema.""" - entity_platform = MockEntityPlatform( - hass, domain="mock_integration", platform_name="mock_platform", platform=None - ) - expected_message = "registers an entity service with a non entity service schema" - - for idx, schema in enumerate( - ( - vol.Schema({"some": str}), - vol.All(vol.Schema({"some": str})), - vol.Any(vol.Schema({"some": str})), - ) - ): - entity_platform.async_register_entity_service(f"hello_{idx}", schema, Mock()) - assert expected_message in caplog.text - caplog.clear() - - for idx, schema in enumerate( - ( - cv.make_entity_service_schema({"some": str}), - vol.Schema(cv.make_entity_service_schema({"some": str})), - vol.All(cv.make_entity_service_schema({"some": str})), - ) - ): - entity_platform.async_register_entity_service( - f"test_service_{idx}", schema, Mock() - ) - assert expected_message not in caplog.text - - @pytest.mark.parametrize("update_before_add", [True, False]) async def test_invalid_entity_id( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, update_before_add: bool @@ -1873,16 +1797,13 @@ async def test_setup_entry_with_entities_that_block_forever( ) -> None: """Test we cancel adding entities when we reach the timeout.""" - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities( [MockBlockingEntity(name="test1", unique_id="unique")], update_before_add=update_before_add, ) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1921,16 +1842,13 @@ async def test_cancellation_is_not_blocked( ) -> None: """Test cancellation is not blocked while adding entities.""" - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities( [MockCancellingEntity(name="test1", unique_id="unique")], update_before_add=update_before_add, ) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2018,11 +1936,7 @@ async def test_entity_name_influences_entity_id( ) -> None: """Test entity_id is influenced by entity name.""" - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities( [ @@ -2039,6 +1953,7 @@ async def test_entity_name_influences_entity_id( ], update_before_add=update_before_add, ) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2106,15 +2021,12 @@ async def test_translated_entity_name_influences_entity_id( """Return all backend translations.""" return translations[language] - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities( [TranslatedEntity(has_entity_name)], update_before_add=update_before_add ) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2194,13 +2106,10 @@ async def test_translated_device_class_name_influences_entity_id( """Return all backend translations.""" return translations[language] - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities([TranslatedDeviceClassEntity(device_class, has_entity_name)]) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2256,13 +2165,10 @@ async def test_device_name_defaulting_config_entry( _attr_unique_id = "qwer" _attr_device_info = device_info - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities([DeviceNameEntity()]) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(title=config_entry_title, entry_id="super-mock-id") @@ -2312,13 +2218,10 @@ async def test_device_type_error_checking( _attr_unique_id = "qwer" _attr_device_info = device_info - async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: + async def async_setup_entry(hass, config_entry, async_add_entities): """Mock setup entry method.""" async_add_entities([DeviceNameEntity()]) + return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry( diff --git a/tests/helpers/test_entity_registry.py b/tests/helpers/test_entity_registry.py index 97f7e1dcc56..4dc8d79be3f 100644 --- a/tests/helpers/test_entity_registry.py +++ b/tests/helpers/test_entity_registry.py @@ -1,6 +1,6 @@ """Tests for the Entity Registry.""" -from datetime import datetime, timedelta +from datetime import timedelta from functools import partial from typing import Any from unittest.mock import patch @@ -19,10 +19,8 @@ from homeassistant.const import ( from homeassistant.core import CoreState, HomeAssistant, callback from homeassistant.exceptions import MaxLengthExceeded from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.util.dt import utc_from_timestamp from tests.common import ( - ANY, MockConfigEntry, async_capture_events, async_fire_time_changed, @@ -71,14 +69,9 @@ def test_get_or_create_suggested_object_id(entity_registry: er.EntityRegistry) - assert entry.entity_id == "light.beer" -def test_get_or_create_updates_data( - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, -) -> None: +def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: """Test that we update data in get_or_create.""" orig_config_entry = MockConfigEntry(domain="light") - created = datetime.fromisoformat("2024-02-14T12:00:00.0+00:00") - freezer.move_to(created) orig_entry = entity_registry.async_get_or_create( "light", @@ -107,7 +100,6 @@ def test_get_or_create_updates_data( "hue", capabilities={"max": 100}, config_entry_id=orig_config_entry.entry_id, - created_at=created, device_class=None, device_id="mock-dev-id", disabled_by=er.RegistryEntryDisabler.HASS, @@ -116,7 +108,6 @@ def test_get_or_create_updates_data( hidden_by=er.RegistryEntryHider.INTEGRATION, icon=None, id=orig_entry.id, - modified_at=created, name=None, original_device_class="mock-device-class", original_icon="initial-original_icon", @@ -127,8 +118,6 @@ def test_get_or_create_updates_data( ) new_config_entry = MockConfigEntry(domain="light") - modified = created + timedelta(minutes=5) - freezer.move_to(modified) new_entry = entity_registry.async_get_or_create( "light", @@ -157,7 +146,6 @@ def test_get_or_create_updates_data( area_id=None, capabilities={"new-max": 150}, config_entry_id=new_config_entry.entry_id, - created_at=created, device_class=None, device_id="new-mock-dev-id", disabled_by=er.RegistryEntryDisabler.HASS, # Should not be updated @@ -166,7 +154,6 @@ def test_get_or_create_updates_data( hidden_by=er.RegistryEntryHider.INTEGRATION, # Should not be updated icon=None, id=orig_entry.id, - modified_at=modified, name=None, original_device_class="new-mock-device-class", original_icon="updated-original_icon", @@ -177,8 +164,6 @@ def test_get_or_create_updates_data( ) assert set(entity_registry.async_device_ids()) == {"new-mock-dev-id"} - modified = created + timedelta(minutes=5) - freezer.move_to(modified) new_entry = entity_registry.async_get_or_create( "light", @@ -207,7 +192,6 @@ def test_get_or_create_updates_data( area_id=None, capabilities=None, config_entry_id=None, - created_at=created, device_class=None, device_id=None, disabled_by=er.RegistryEntryDisabler.HASS, # Should not be updated @@ -216,7 +200,6 @@ def test_get_or_create_updates_data( hidden_by=er.RegistryEntryHider.INTEGRATION, # Should not be updated icon=None, id=orig_entry.id, - modified_at=modified, name=None, original_device_class=None, original_icon=None, @@ -326,12 +309,8 @@ async def test_loading_saving_data( assert orig_entry1 == new_entry1 assert orig_entry2 == new_entry2 - - # By converting a deleted device to a active device, the modified_at will be updated - assert orig_entry3.modified_at < new_entry3.modified_at - assert attr.evolve(orig_entry3, modified_at=new_entry3.modified_at) == new_entry3 - assert orig_entry4.modified_at < new_entry4.modified_at - assert attr.evolve(orig_entry4, modified_at=new_entry4.modified_at) == new_entry4 + assert orig_entry3 == new_entry3 + assert orig_entry4 == new_entry4 assert new_entry2.area_id == "mock-area-id" assert new_entry2.categories == {"scope", "id"} @@ -443,8 +422,6 @@ async def test_filter_on_load( assert entry_with_name.name == "registry override" assert entry_without_name.name is None assert not entry_with_name.disabled - assert entry_with_name.created_at == utc_from_timestamp(0) - assert entry_with_name.modified_at == utc_from_timestamp(0) entry_disabled_hass = registry.async_get_or_create( "test", "super_platform", "disabled-hass" @@ -476,7 +453,6 @@ async def test_load_bad_data( "capabilities": None, "categories": {}, "config_entry_id": None, - "created_at": "2024-02-14T12:00:00.900075+00:00", "device_class": None, "device_id": None, "disabled_by": None, @@ -487,7 +463,6 @@ async def test_load_bad_data( "icon": None, "id": "00001", "labels": [], - "modified_at": "2024-02-14T12:00:00.900075+00:00", "name": None, "options": None, "original_device_class": None, @@ -506,7 +481,6 @@ async def test_load_bad_data( "capabilities": None, "categories": {}, "config_entry_id": None, - "created_at": "2024-02-14T12:00:00.900075+00:00", "device_class": None, "device_id": None, "disabled_by": None, @@ -517,7 +491,6 @@ async def test_load_bad_data( "icon": None, "id": "00002", "labels": [], - "modified_at": "2024-02-14T12:00:00.900075+00:00", "name": None, "options": None, "original_device_class": None, @@ -534,20 +507,16 @@ async def test_load_bad_data( "deleted_entities": [ { "config_entry_id": None, - "created_at": "2024-02-14T12:00:00.900075+00:00", "entity_id": "test.test3", "id": "00003", - "modified_at": "2024-02-14T12:00:00.900075+00:00", "orphaned_timestamp": None, "platform": "super_platform", "unique_id": 234, # Should not load }, { "config_entry_id": None, - "created_at": "2024-02-14T12:00:00.900075+00:00", "entity_id": "test.test4", "id": "00004", - "modified_at": "2024-02-14T12:00:00.900075+00:00", "orphaned_timestamp": None, "platform": "super_platform", "unique_id": ["also", "not", "valid"], # Should not load @@ -653,36 +622,36 @@ async def test_deleted_entity_removing_config_entry_id( entity_registry: er.EntityRegistry, ) -> None: """Test that we update config entry id in registry on deleted entity.""" - mock_config1 = MockConfigEntry(domain="light", entry_id="mock-id-1") - mock_config2 = MockConfigEntry(domain="light", entry_id="mock-id-2") + mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1") - entry1 = entity_registry.async_get_or_create( - "light", "hue", "5678", config_entry=mock_config1 + entry = entity_registry.async_get_or_create( + "light", "hue", "5678", config_entry=mock_config ) - assert entry1.config_entry_id == "mock-id-1" - entry2 = entity_registry.async_get_or_create( - "light", "hue", "1234", config_entry=mock_config2 - ) - assert entry2.config_entry_id == "mock-id-2" - entity_registry.async_remove(entry1.entity_id) - entity_registry.async_remove(entry2.entity_id) + assert entry.config_entry_id == "mock-id-1" + entity_registry.async_remove(entry.entity_id) assert len(entity_registry.entities) == 0 - assert len(entity_registry.deleted_entities) == 2 - deleted_entry1 = entity_registry.deleted_entities[("light", "hue", "5678")] - assert deleted_entry1.config_entry_id == "mock-id-1" - assert deleted_entry1.orphaned_timestamp is None - deleted_entry2 = entity_registry.deleted_entities[("light", "hue", "1234")] - assert deleted_entry2.config_entry_id == "mock-id-2" - assert deleted_entry2.orphaned_timestamp is None + assert len(entity_registry.deleted_entities) == 1 + assert ( + entity_registry.deleted_entities[("light", "hue", "5678")].config_entry_id + == "mock-id-1" + ) + assert ( + entity_registry.deleted_entities[("light", "hue", "5678")].orphaned_timestamp + is None + ) entity_registry.async_clear_config_entry("mock-id-1") assert len(entity_registry.entities) == 0 - assert len(entity_registry.deleted_entities) == 2 - deleted_entry1 = entity_registry.deleted_entities[("light", "hue", "5678")] - assert deleted_entry1.config_entry_id is None - assert deleted_entry1.orphaned_timestamp is not None - assert entity_registry.deleted_entities[("light", "hue", "1234")] == deleted_entry2 + assert len(entity_registry.deleted_entities) == 1 + assert ( + entity_registry.deleted_entities[("light", "hue", "5678")].config_entry_id + is None + ) + assert ( + entity_registry.deleted_entities[("light", "hue", "5678")].orphaned_timestamp + is not None + ) async def test_removing_area_id(entity_registry: er.EntityRegistry) -> None: @@ -726,49 +695,6 @@ async def test_migration_1_1(hass: HomeAssistant, hass_storage: dict[str, Any]) assert entry.device_class is None assert entry.original_device_class == "best_class" - # Check we store migrated data - await flush_store(registry._store) - assert hass_storage[er.STORAGE_KEY] == { - "version": er.STORAGE_VERSION_MAJOR, - "minor_version": er.STORAGE_VERSION_MINOR, - "key": er.STORAGE_KEY, - "data": { - "entities": [ - { - "aliases": [], - "area_id": None, - "capabilities": {}, - "categories": {}, - "config_entry_id": None, - "created_at": "1970-01-01T00:00:00+00:00", - "device_id": None, - "disabled_by": None, - "entity_category": None, - "entity_id": "test.entity", - "has_entity_name": False, - "hidden_by": None, - "icon": None, - "id": ANY, - "labels": [], - "modified_at": "1970-01-01T00:00:00+00:00", - "name": None, - "options": {}, - "original_device_class": "best_class", - "original_icon": None, - "original_name": None, - "platform": "super_platform", - "previous_unique_id": None, - "supported_features": 0, - "translation_key": None, - "unique_id": "very_unique", - "unit_of_measurement": None, - "device_class": None, - } - ], - "deleted_entities": [], - }, - } - @pytest.mark.parametrize("load_registries", [False]) async def test_migration_1_7(hass: HomeAssistant, hass_storage: dict[str, Any]) -> None: @@ -842,123 +768,6 @@ async def test_migration_1_7(hass: HomeAssistant, hass_storage: dict[str, Any]) assert entry.original_device_class == "class_by_integration" -@pytest.mark.parametrize("load_registries", [False]) -async def test_migration_1_11( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test migration from version 1.11. - - This is the first version which has deleted entities, make sure deleted entities - are updated. - """ - hass_storage[er.STORAGE_KEY] = { - "version": 1, - "minor_version": 11, - "data": { - "entities": [ - { - "aliases": [], - "area_id": None, - "capabilities": {}, - "config_entry_id": None, - "device_id": None, - "disabled_by": None, - "entity_category": None, - "entity_id": "test.entity", - "has_entity_name": False, - "hidden_by": None, - "icon": None, - "id": "12345", - "modified_at": "1970-01-01T00:00:00+00:00", - "name": None, - "options": {}, - "original_device_class": "best_class", - "original_icon": None, - "original_name": None, - "platform": "super_platform", - "supported_features": 0, - "translation_key": None, - "unique_id": "very_unique", - "unit_of_measurement": None, - "device_class": None, - } - ], - "deleted_entities": [ - { - "config_entry_id": None, - "entity_id": "test.deleted_entity", - "id": "23456", - "orphaned_timestamp": None, - "platform": "super_duper_platform", - "unique_id": "very_very_unique", - } - ], - }, - } - - await er.async_load(hass) - registry = er.async_get(hass) - - entry = registry.async_get_or_create("test", "super_platform", "very_unique") - - assert entry.device_class is None - assert entry.original_device_class == "best_class" - - # Check migrated data - await flush_store(registry._store) - assert hass_storage[er.STORAGE_KEY] == { - "version": er.STORAGE_VERSION_MAJOR, - "minor_version": er.STORAGE_VERSION_MINOR, - "key": er.STORAGE_KEY, - "data": { - "entities": [ - { - "aliases": [], - "area_id": None, - "capabilities": {}, - "categories": {}, - "config_entry_id": None, - "created_at": "1970-01-01T00:00:00+00:00", - "device_id": None, - "disabled_by": None, - "entity_category": None, - "entity_id": "test.entity", - "has_entity_name": False, - "hidden_by": None, - "icon": None, - "id": ANY, - "labels": [], - "modified_at": "1970-01-01T00:00:00+00:00", - "name": None, - "options": {}, - "original_device_class": "best_class", - "original_icon": None, - "original_name": None, - "platform": "super_platform", - "previous_unique_id": None, - "supported_features": 0, - "translation_key": None, - "unique_id": "very_unique", - "unit_of_measurement": None, - "device_class": None, - } - ], - "deleted_entities": [ - { - "config_entry_id": None, - "created_at": "1970-01-01T00:00:00+00:00", - "entity_id": "test.deleted_entity", - "id": "23456", - "modified_at": "1970-01-01T00:00:00+00:00", - "orphaned_timestamp": None, - "platform": "super_duper_platform", - "unique_id": "very_very_unique", - } - ], - }, - } - - async def test_update_entity_unique_id(entity_registry: er.EntityRegistry) -> None: """Test entity's unique_id is updated.""" mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1") @@ -1147,17 +956,14 @@ async def test_disabled_by(entity_registry: er.EntityRegistry) -> None: "light", "hue", "5678", disabled_by=er.RegistryEntryDisabler.HASS ) assert entry.disabled_by is er.RegistryEntryDisabler.HASS - assert entry.disabled is True entry = entity_registry.async_get_or_create( "light", "hue", "5678", disabled_by=er.RegistryEntryDisabler.INTEGRATION ) assert entry.disabled_by is er.RegistryEntryDisabler.HASS - assert entry.disabled is True entry2 = entity_registry.async_get_or_create("light", "hue", "1234") assert entry2.disabled_by is None - assert entry2.disabled is False async def test_disabled_by_config_entry_pref( @@ -1184,25 +990,6 @@ async def test_disabled_by_config_entry_pref( assert entry2.disabled_by is er.RegistryEntryDisabler.USER -async def test_hidden_by(entity_registry: er.EntityRegistry) -> None: - """Test that we can hide an entry when we create it.""" - entry = entity_registry.async_get_or_create( - "light", "hue", "5678", hidden_by=er.RegistryEntryHider.USER - ) - assert entry.hidden_by is er.RegistryEntryHider.USER - assert entry.hidden is True - - entry = entity_registry.async_get_or_create( - "light", "hue", "5678", disabled_by=er.RegistryEntryHider.INTEGRATION - ) - assert entry.hidden_by is er.RegistryEntryHider.USER - assert entry.hidden is True - - entry2 = entity_registry.async_get_or_create("light", "hue", "1234") - assert entry2.hidden_by is None - assert entry2.hidden is False - - async def test_restore_states( hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: diff --git a/tests/helpers/test_event.py b/tests/helpers/test_event.py index a45b418c526..4bb4c1a1967 100644 --- a/tests/helpers/test_event.py +++ b/tests/helpers/test_event.py @@ -1476,7 +1476,7 @@ async def test_track_template_result_super_template_2( wildercard_runs = [] wildercard_runs_availability = [] - template_availability = Template(availability_template, hass) + template_availability = Template(availability_template) template_condition = Template("{{states.sensor.test.state}}", hass) template_condition_var = Template( "{{(states.sensor.test.state|int) + test }}", hass @@ -1628,7 +1628,7 @@ async def test_track_template_result_super_template_2_initially_false( wildercard_runs = [] wildercard_runs_availability = [] - template_availability = Template(availability_template, hass) + template_availability = Template(availability_template) template_condition = Template("{{states.sensor.test.state}}", hass) template_condition_var = Template( "{{(states.sensor.test.state|int) + test }}", hass @@ -1892,10 +1892,10 @@ async def test_track_template_result_complex(hass: HomeAssistant) -> None: "time": False, } - hass.states.async_set("binary_sensor.single", "on") + hass.states.async_set("binary_sensor.single", "binary_sensor_on") await hass.async_block_till_done() assert len(specific_runs) == 9 - assert specific_runs[8] == "on" + assert specific_runs[8] == "binary_sensor_on" assert info.listeners == { "all": False, "domains": set(), @@ -3124,11 +3124,11 @@ async def test_async_track_template_result_multiple_templates( ) -> None: """Test tracking multiple templates.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) - template_2 = Template("{{ states.switch.test.state == 'on' }}", hass) - template_3 = Template("{{ states.switch.test.state == 'off' }}", hass) + template_1 = Template("{{ states.switch.test.state == 'on' }}") + template_2 = Template("{{ states.switch.test.state == 'on' }}") + template_3 = Template("{{ states.switch.test.state == 'off' }}") template_4 = Template( - "{{ states.binary_sensor | map(attribute='entity_id') | list }}", hass + "{{ states.binary_sensor | map(attribute='entity_id') | list }}" ) refresh_runs = [] @@ -3188,12 +3188,11 @@ async def test_async_track_template_result_multiple_templates_mixing_domain( ) -> None: """Test tracking multiple templates when tracking entities and an entire domain.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) - template_2 = Template("{{ states.switch.test.state == 'on' }}", hass) - template_3 = Template("{{ states.switch.test.state == 'off' }}", hass) + template_1 = Template("{{ states.switch.test.state == 'on' }}") + template_2 = Template("{{ states.switch.test.state == 'on' }}") + template_3 = Template("{{ states.switch.test.state == 'off' }}") template_4 = Template( - "{{ states.switch | sort(attribute='entity_id') | map(attribute='entity_id') | list }}", - hass, + "{{ states.switch | sort(attribute='entity_id') | map(attribute='entity_id') | list }}" ) refresh_runs = [] @@ -3418,8 +3417,8 @@ async def test_async_track_template_result_multiple_templates_mixing_listeners( ) -> None: """Test tracking multiple templates with mixing listener types.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) - template_2 = Template("{{ now() and True }}", hass) + template_1 = Template("{{ states.switch.test.state == 'on' }}") + template_2 = Template("{{ now() and True }}") refresh_runs = [] @@ -4938,43 +4937,3 @@ async def test_async_track_state_report_event(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(tracker_called) == 2 unsub() - - -async def test_async_track_template_no_hass_deprecated( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test async_track_template with a template without hass is deprecated.""" - message = ( - "Detected code that calls async_track_template_result with template without " - "hass, which will stop working in HA Core 2025.10. Please report this issue." - ) - - async_track_template(hass, Template("blah"), lambda x, y, z: None) - assert message in caplog.text - caplog.clear() - - async_track_template(hass, Template("blah", hass), lambda x, y, z: None) - assert message not in caplog.text - caplog.clear() - - -async def test_async_track_template_result_no_hass_deprecated( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test async_track_template_result with a template without hass is deprecated.""" - message = ( - "Detected code that calls async_track_template_result with template without " - "hass, which will stop working in HA Core 2025.10. Please report this issue." - ) - - async_track_template_result( - hass, [TrackTemplate(Template("blah"), None)], lambda x, y, z: None - ) - assert message in caplog.text - caplog.clear() - - async_track_template_result( - hass, [TrackTemplate(Template("blah", hass), None)], lambda x, y, z: None - ) - assert message not in caplog.text - caplog.clear() diff --git a/tests/helpers/test_floor_registry.py b/tests/helpers/test_floor_registry.py index 6a672399522..3b07563fd11 100644 --- a/tests/helpers/test_floor_registry.py +++ b/tests/helpers/test_floor_registry.py @@ -1,16 +1,13 @@ """Tests for the floor registry.""" -from datetime import datetime from functools import partial import re from typing import Any -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import area_registry as ar, floor_registry as fr -from homeassistant.util.dt import utcnow from tests.common import async_capture_events, flush_store @@ -21,10 +18,8 @@ async def test_list_floors(floor_registry: fr.FloorRegistry) -> None: assert len(list(floors)) == len(floor_registry.floors) -@pytest.mark.usefixtures("freezer") async def test_create_floor( - hass: HomeAssistant, - floor_registry: fr.FloorRegistry, + hass: HomeAssistant, floor_registry: fr.FloorRegistry ) -> None: """Make sure that we can create floors.""" update_events = async_capture_events(hass, fr.EVENT_FLOOR_REGISTRY_UPDATED) @@ -35,15 +30,11 @@ async def test_create_floor( level=1, ) - assert floor == fr.FloorEntry( - floor_id="first_floor", - name="First floor", - icon="mdi:home-floor-1", - aliases={"first", "ground", "ground floor"}, - level=1, - created_at=utcnow(), - modified_at=utcnow(), - ) + assert floor.floor_id == "first_floor" + assert floor.name == "First floor" + assert floor.icon == "mdi:home-floor-1" + assert floor.aliases == {"first", "ground", "ground floor"} + assert floor.level == 1 assert len(floor_registry.floors) == 1 @@ -125,30 +116,18 @@ async def test_delete_non_existing_floor(floor_registry: fr.FloorRegistry) -> No async def test_update_floor( - hass: HomeAssistant, - floor_registry: fr.FloorRegistry, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, floor_registry: fr.FloorRegistry ) -> None: """Make sure that we can update floors.""" - created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") - freezer.move_to(created_at) - update_events = async_capture_events(hass, fr.EVENT_FLOOR_REGISTRY_UPDATED) floor = floor_registry.async_create("First floor") - assert floor == fr.FloorEntry( - floor_id="first_floor", - name="First floor", - icon=None, - aliases=set(), - level=None, - created_at=created_at, - modified_at=created_at, - ) assert len(floor_registry.floors) == 1 - - modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") - freezer.move_to(modified_at) + assert floor.floor_id == "first_floor" + assert floor.name == "First floor" + assert floor.icon is None + assert floor.aliases == set() + assert floor.level is None updated_floor = floor_registry.async_update( floor.floor_id, @@ -159,15 +138,11 @@ async def test_update_floor( ) assert updated_floor != floor - assert updated_floor == fr.FloorEntry( - floor_id="first_floor", - name="Second floor", - icon="mdi:home-floor-2", - aliases={"ground", "downstairs"}, - level=2, - created_at=created_at, - modified_at=modified_at, - ) + assert updated_floor.floor_id == "first_floor" + assert updated_floor.name == "Second floor" + assert updated_floor.icon == "mdi:home-floor-2" + assert updated_floor.aliases == {"ground", "downstairs"} + assert updated_floor.level == 2 assert len(floor_registry.floors) == 1 @@ -261,22 +236,15 @@ async def test_update_floor_with_normalized_name_already_in_use( async def test_load_floors( - hass: HomeAssistant, - floor_registry: fr.FloorRegistry, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, floor_registry: fr.FloorRegistry ) -> None: """Make sure that we can load/save data correctly.""" - floor1_created = datetime.fromisoformat("2024-01-01T00:00:00+00:00") - freezer.move_to(floor1_created) floor1 = floor_registry.async_create( "First floor", icon="mdi:home-floor-1", aliases={"first", "ground"}, level=1, ) - - floor2_created = datetime.fromisoformat("2024-02-01T00:00:00+00:00") - freezer.move_to(floor2_created) floor2 = floor_registry.async_create( "Second floor", icon="mdi:home-floor-2", @@ -294,16 +262,25 @@ async def test_load_floors( assert list(floor_registry.floors) == list(registry2.floors) floor1_registry2 = registry2.async_get_floor_by_name("First floor") - assert floor1_registry2 == floor1 + assert floor1_registry2.floor_id == floor1.floor_id + assert floor1_registry2.name == floor1.name + assert floor1_registry2.icon == floor1.icon + assert floor1_registry2.aliases == floor1.aliases + assert floor1_registry2.level == floor1.level + assert floor1_registry2.normalized_name == floor1.normalized_name floor2_registry2 = registry2.async_get_floor_by_name("Second floor") - assert floor2_registry2 == floor2 + assert floor2_registry2.floor_id == floor2.floor_id + assert floor2_registry2.name == floor2.name + assert floor2_registry2.icon == floor2.icon + assert floor2_registry2.aliases == floor2.aliases + assert floor2_registry2.level == floor2.level + assert floor2_registry2.normalized_name == floor2.normalized_name @pytest.mark.parametrize("load_registries", [False]) async def test_loading_floors_from_storage( - hass: HomeAssistant, - hass_storage: dict[str, Any], + hass: HomeAssistant, hass_storage: dict[str, Any] ) -> None: """Test loading stored floors on start.""" hass_storage[fr.STORAGE_KEY] = { @@ -415,52 +392,3 @@ async def test_async_update_thread_safety( await hass.async_add_executor_job( partial(floor_registry.async_update, any_floor.floor_id, name="new name") ) - - -@pytest.mark.parametrize("load_registries", [False]) -async def test_migration_from_1_1( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test migration from version 1.1.""" - hass_storage[fr.STORAGE_KEY] = { - "version": 1, - "data": { - "floors": [ - { - "floor_id": "12345A", - "name": "mock", - "aliases": [], - "icon": None, - "level": None, - } - ] - }, - } - - await fr.async_load(hass) - registry = fr.async_get(hass) - - # Test data was loaded - entry = registry.async_get_floor_by_name("mock") - assert entry.floor_id == "12345A" - - # Check we store migrated data - await flush_store(registry._store) - assert hass_storage[fr.STORAGE_KEY] == { - "version": fr.STORAGE_VERSION_MAJOR, - "minor_version": fr.STORAGE_VERSION_MINOR, - "key": fr.STORAGE_KEY, - "data": { - "floors": [ - { - "aliases": [], - "icon": None, - "floor_id": "12345A", - "level": None, - "name": "mock", - "created_at": "1970-01-01T00:00:00+00:00", - "modified_at": "1970-01-01T00:00:00+00:00", - } - ] - }, - } diff --git a/tests/helpers/test_frame.py b/tests/helpers/test_frame.py index a2a4890810b..b3fbb0faaf4 100644 --- a/tests/helpers/test_frame.py +++ b/tests/helpers/test_frame.py @@ -1,6 +1,5 @@ """Test the frame helper.""" -from typing import Any from unittest.mock import ANY, Mock, patch import pytest @@ -157,97 +156,6 @@ async def test_get_integration_logger_no_integration( assert logger.name == __name__ -@pytest.mark.parametrize( - ("integration_frame_path", "keywords", "expected_error", "expected_log"), - [ - pytest.param( - "homeassistant/test_core", - {}, - True, - 0, - id="core default", - ), - pytest.param( - "homeassistant/components/test_core_integration", - {}, - False, - 1, - id="core integration default", - ), - pytest.param( - "custom_components/test_custom_integration", - {}, - False, - 1, - id="custom integration default", - ), - pytest.param( - "custom_components/test_custom_integration", - {"custom_integration_behavior": frame.ReportBehavior.IGNORE}, - False, - 0, - id="custom integration ignore", - ), - pytest.param( - "custom_components/test_custom_integration", - {"custom_integration_behavior": frame.ReportBehavior.ERROR}, - True, - 1, - id="custom integration error", - ), - pytest.param( - "homeassistant/components/test_integration_frame", - {"core_integration_behavior": frame.ReportBehavior.IGNORE}, - False, - 0, - id="core_integration_behavior ignore", - ), - pytest.param( - "homeassistant/components/test_integration_frame", - {"core_integration_behavior": frame.ReportBehavior.ERROR}, - True, - 1, - id="core_integration_behavior error", - ), - pytest.param( - "homeassistant/test_integration_frame", - {"core_behavior": frame.ReportBehavior.IGNORE}, - False, - 0, - id="core_behavior ignore", - ), - pytest.param( - "homeassistant/test_integration_frame", - {"core_behavior": frame.ReportBehavior.LOG}, - False, - 1, - id="core_behavior log", - ), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -async def test_report_usage( - caplog: pytest.LogCaptureFixture, - keywords: dict[str, Any], - expected_error: bool, - expected_log: int, -) -> None: - """Test report.""" - - what = "test_report_string" - - errored = False - try: - with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): - frame.report_usage(what, **keywords) - except RuntimeError: - errored = True - - assert errored == expected_error - - assert caplog.text.count(what) == expected_log - - @patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_prevent_flooding( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock @@ -339,87 +247,3 @@ async def test_report_error_if_integration( ), ): frame.report("did a bad thing", error_if_integration=True) - - -@pytest.mark.parametrize( - ("integration_frame_path", "keywords", "expected_error", "expected_log"), - [ - pytest.param( - "homeassistant/test_core", - {}, - True, - 0, - id="core default", - ), - pytest.param( - "homeassistant/components/test_core_integration", - {}, - False, - 1, - id="core integration default", - ), - pytest.param( - "custom_components/test_custom_integration", - {}, - False, - 1, - id="custom integration default", - ), - pytest.param( - "custom_components/test_integration_frame", - {"log_custom_component_only": True}, - False, - 1, - id="log_custom_component_only with custom integration", - ), - pytest.param( - "homeassistant/components/test_integration_frame", - {"log_custom_component_only": True}, - False, - 0, - id="log_custom_component_only with core integration", - ), - pytest.param( - "homeassistant/test_integration_frame", - {"error_if_core": False}, - False, - 1, - id="disable error_if_core", - ), - pytest.param( - "custom_components/test_integration_frame", - {"error_if_integration": True}, - True, - 1, - id="error_if_integration with custom integration", - ), - pytest.param( - "homeassistant/components/test_integration_frame", - {"error_if_integration": True}, - True, - 1, - id="error_if_integration with core integration", - ), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -async def test_report( - caplog: pytest.LogCaptureFixture, - keywords: dict[str, Any], - expected_error: bool, - expected_log: int, -) -> None: - """Test report.""" - - what = "test_report_string" - - errored = False - try: - with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): - frame.report(what, **keywords) - except RuntimeError: - errored = True - - assert errored == expected_error - - assert caplog.text.count(what) == expected_log diff --git a/tests/helpers/test_httpx_client.py b/tests/helpers/test_httpx_client.py index ccfccb3d698..60bdbe607e3 100644 --- a/tests/helpers/test_httpx_client.py +++ b/tests/helpers/test_httpx_client.py @@ -5,8 +5,7 @@ from unittest.mock import Mock, patch import httpx import pytest -from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE -from homeassistant.core import HomeAssistant +from homeassistant.core import EVENT_HOMEASSISTANT_CLOSE, HomeAssistant import homeassistant.helpers.httpx_client as client from tests.common import MockModule, extract_stack_to_frame, mock_integration diff --git a/tests/helpers/test_icon.py b/tests/helpers/test_icon.py index ad5c852ded9..732f9971ac0 100644 --- a/tests/helpers/test_icon.py +++ b/tests/helpers/test_icon.py @@ -25,8 +25,12 @@ def test_battery_icon() -> None: iconbase = "mdi:battery" for level in range(0, 100, 5): print( # noqa: T201 - f"Level: {level}. icon: {icon.icon_for_battery_level(level, False)}, " - f"charging: {icon.icon_for_battery_level(level, True)}" + "Level: %d. icon: %s, charging: %s" + % ( + level, + icon.icon_for_battery_level(level, False), + icon.icon_for_battery_level(level, True), + ) ) if level <= 10: postfix_charging = "-outline" @@ -97,7 +101,7 @@ async def test_get_icons(hass: HomeAssistant) -> None: # Test services icons are available icons = await icon.async_get_icons(hass, "services") assert len(icons) == 1 - assert icons["switch"]["turn_off"] == {"service": "mdi:toggle-switch-variant-off"} + assert icons["switch"]["turn_off"] == "mdi:toggle-switch-variant-off" # Ensure icons file for platform isn't loaded, as that isn't supported icons = await icon.async_get_icons(hass, "entity") @@ -122,7 +126,7 @@ async def test_get_icons(hass: HomeAssistant) -> None: icons = await icon.async_get_icons(hass, "services") assert len(icons) == 2 - assert icons["test_package"]["enable_god_mode"] == {"service": "mdi:shield"} + assert icons["test_package"]["enable_god_mode"] == "mdi:shield" # Load another one hass.config.components.add("test_embedded") diff --git a/tests/helpers/test_integration_platform.py b/tests/helpers/test_integration_platform.py index 93bfeb2da5b..81eb1f2fd38 100644 --- a/tests/helpers/test_integration_platform.py +++ b/tests/helpers/test_integration_platform.py @@ -2,19 +2,17 @@ from collections.abc import Callable from types import ModuleType -from typing import Any from unittest.mock import Mock, patch import pytest from homeassistant import loader -from homeassistant.const import EVENT_COMPONENT_LOADED from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) -from homeassistant.setup import ATTR_COMPONENT +from homeassistant.setup import ATTR_COMPONENT, EVENT_COMPONENT_LOADED from tests.common import mock_platform @@ -30,9 +28,7 @@ async def test_process_integration_platforms_with_wait(hass: HomeAssistant) -> N processed = [] - async def _process_platform( - hass: HomeAssistant, domain: str, platform: Any - ) -> None: + async def _process_platform(hass, domain, platform): """Process platform.""" processed.append((domain, platform)) @@ -70,9 +66,7 @@ async def test_process_integration_platforms(hass: HomeAssistant) -> None: processed = [] - async def _process_platform( - hass: HomeAssistant, domain: str, platform: Any - ) -> None: + async def _process_platform(hass, domain, platform): """Process platform.""" processed.append((domain, platform)) @@ -112,9 +106,7 @@ async def test_process_integration_platforms_import_fails( processed = [] - async def _process_platform( - hass: HomeAssistant, domain: str, platform: Any - ) -> None: + async def _process_platform(hass, domain, platform): """Process platform.""" processed.append((domain, platform)) @@ -157,9 +149,7 @@ async def test_process_integration_platforms_import_fails_after_registered( processed = [] - async def _process_platform( - hass: HomeAssistant, domain: str, platform: Any - ) -> None: + async def _process_platform(hass, domain, platform): """Process platform.""" processed.append((domain, platform)) @@ -251,9 +241,7 @@ async def test_broken_integration( processed = [] - async def _process_platform( - hass: HomeAssistant, domain: str, platform: Any - ) -> None: + async def _process_platform(hass, domain, platform): """Process platform.""" processed.append((domain, platform)) @@ -276,9 +264,7 @@ async def test_process_integration_platforms_no_integrations( processed = [] - async def _process_platform( - hass: HomeAssistant, domain: str, platform: Any - ) -> None: + async def _process_platform(hass, domain, platform): """Process platform.""" processed.append((domain, platform)) diff --git a/tests/helpers/test_intent.py b/tests/helpers/test_intent.py index ae8c2ed65d0..c592fc50c0a 100644 --- a/tests/helpers/test_intent.py +++ b/tests/helpers/test_intent.py @@ -765,7 +765,7 @@ async def test_service_intent_handler_required_domains(hass: HomeAssistant) -> N ) # Still fails even if we provide the domain - with pytest.raises(intent.InvalidSlotInfo): + with pytest.raises(intent.MatchFailedError): await intent.async_handle( hass, "test", @@ -777,10 +777,7 @@ async def test_service_intent_handler_required_domains(hass: HomeAssistant) -> N async def test_service_handler_empty_strings(hass: HomeAssistant) -> None: """Test that passing empty strings for filters fails in ServiceIntentHandler.""" handler = intent.ServiceIntentHandler( - "TestType", - "light", - "turn_on", - "Turned {} on", + "TestType", "light", "turn_on", "Turned {} on" ) intent.async_register(hass, handler) @@ -817,55 +814,3 @@ async def test_service_handler_no_filter(hass: HomeAssistant) -> None: "test", "TestType", ) - - -async def test_service_handler_device_classes( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: - """Test that passing empty strings for filters fails in ServiceIntentHandler.""" - - # Register a fake service and a switch intent handler - call_done = asyncio.Event() - calls = [] - - # Register a service that takes 0.1 seconds to execute - async def mock_service(call): - """Mock service.""" - call_done.set() - calls.append(call) - - hass.services.async_register("switch", "turn_on", mock_service) - - handler = intent.ServiceIntentHandler( - "TestType", - "switch", - "turn_on", - "Turned {} on", - device_classes={switch.SwitchDeviceClass}, - ) - intent.async_register(hass, handler) - - # Create a switch enttiy and match by device class - hass.states.async_set( - "switch.bedroom", "off", attributes={"device_class": "outlet"} - ) - hass.states.async_set("switch.living_room", "off") - - await intent.async_handle( - hass, - "test", - "TestType", - slots={"device_class": {"value": "outlet"}}, - ) - await call_done.wait() - assert [call.data.get("entity_id") for call in calls] == ["switch.bedroom"] - calls.clear() - - # Validate which device classes are allowed - with pytest.raises(intent.InvalidSlotInfo): - await intent.async_handle( - hass, - "test", - "TestType", - slots={"device_class": {"value": "light"}}, - ) diff --git a/tests/helpers/test_json.py b/tests/helpers/test_json.py index 94f21da1781..061faed6f93 100644 --- a/tests/helpers/test_json.py +++ b/tests/helpers/test_json.py @@ -13,12 +13,10 @@ from unittest.mock import Mock, patch import pytest from homeassistant.core import Event, HomeAssistant, State -from homeassistant.helpers import json as json_helper from homeassistant.helpers.json import ( ExtendedJSONEncoder, JSONEncoder as DefaultHASSJSONEncoder, find_paths_unserializable_data, - json_bytes_sorted, json_bytes_strip_null, json_dumps, json_dumps_sorted, @@ -27,14 +25,9 @@ from homeassistant.helpers.json import ( ) from homeassistant.util import dt as dt_util from homeassistant.util.color import RGBColor -from homeassistant.util.json import ( - JSON_DECODE_EXCEPTIONS, - JSON_ENCODE_EXCEPTIONS, - SerializationError, - load_json, -) +from homeassistant.util.json import SerializationError, load_json -from tests.common import import_and_test_deprecated_constant, json_round_trip +from tests.common import json_round_trip # Test data that can be saved as JSON TEST_JSON_A = {"a": 1, "B": "two"} @@ -108,14 +101,6 @@ def test_json_dumps_sorted() -> None: ) -def test_json_bytes_sorted() -> None: - """Test the json bytes sorted function.""" - data = {"c": 3, "a": 1, "b": 2} - assert json_bytes_sorted(data) == json.dumps( - data, sort_keys=True, separators=(",", ":") - ).encode("utf-8") - - def test_json_dumps_float_subclass() -> None: """Test the json dumps a float subclass.""" @@ -350,50 +335,3 @@ def test_find_unserializable_data() -> None: BadData(), dump=partial(json.dumps, cls=MockJSONEncoder), ) == {"$(BadData).bla": bad_data} - - -def test_deprecated_json_loads(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated json_loads function. - - It was moved from helpers to util in #88099 - """ - json_helper.json_loads("{}") - assert ( - "json_loads is a deprecated function which will be removed in " - "HA Core 2025.8. Use homeassistant.util.json.json_loads instead" - ) in caplog.text - - -@pytest.mark.parametrize( - ("constant_name", "replacement_name", "replacement"), - [ - ( - "JSON_DECODE_EXCEPTIONS", - "homeassistant.util.json.JSON_DECODE_EXCEPTIONS", - JSON_DECODE_EXCEPTIONS, - ), - ( - "JSON_ENCODE_EXCEPTIONS", - "homeassistant.util.json.JSON_ENCODE_EXCEPTIONS", - JSON_ENCODE_EXCEPTIONS, - ), - ], -) -def test_deprecated_aliases( - caplog: pytest.LogCaptureFixture, - constant_name: str, - replacement_name: str, - replacement: Any, -) -> None: - """Test deprecated JSON_DECODE_EXCEPTIONS and JSON_ENCODE_EXCEPTIONS constants. - - They were moved from helpers to util in #88099 - """ - import_and_test_deprecated_constant( - caplog, - json_helper, - constant_name, - replacement_name, - replacement, - "2025.8", - ) diff --git a/tests/helpers/test_label_registry.py b/tests/helpers/test_label_registry.py index ca1d4ac6fd3..445319a4b62 100644 --- a/tests/helpers/test_label_registry.py +++ b/tests/helpers/test_label_registry.py @@ -1,11 +1,9 @@ """Tests for the Label Registry.""" -from datetime import datetime from functools import partial import re from typing import Any -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant @@ -14,7 +12,6 @@ from homeassistant.helpers import ( entity_registry as er, label_registry as lr, ) -from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_capture_events, flush_store @@ -25,7 +22,6 @@ async def test_list_labels(label_registry: lr.LabelRegistry) -> None: assert len(list(labels)) == len(label_registry.labels) -@pytest.mark.usefixtures("freezer") async def test_create_label( hass: HomeAssistant, label_registry: lr.LabelRegistry ) -> None: @@ -38,15 +34,11 @@ async def test_create_label( description="This label is for testing", ) - assert label == lr.LabelEntry( - label_id="my_label", - name="My Label", - color="#FF0000", - icon="mdi:test", - description="This label is for testing", - created_at=utcnow(), - modified_at=utcnow(), - ) + assert label.label_id == "my_label" + assert label.name == "My Label" + assert label.color == "#FF0000" + assert label.icon == "mdi:test" + assert label.description == "This label is for testing" assert len(label_registry.labels) == 1 @@ -127,29 +119,19 @@ async def test_delete_non_existing_label(label_registry: lr.LabelRegistry) -> No async def test_update_label( - hass: HomeAssistant, - label_registry: lr.LabelRegistry, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, label_registry: lr.LabelRegistry ) -> None: """Make sure that we can update labels.""" - created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") - freezer.move_to(created_at) update_events = async_capture_events(hass, lr.EVENT_LABEL_REGISTRY_UPDATED) label = label_registry.async_create("Mock") assert len(label_registry.labels) == 1 - assert label == lr.LabelEntry( - label_id="mock", - name="Mock", - color=None, - icon=None, - description=None, - created_at=created_at, - modified_at=created_at, - ) + assert label.label_id == "mock" + assert label.name == "Mock" + assert label.color is None + assert label.icon is None + assert label.description is None - modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") - freezer.move_to(modified_at) updated_label = label_registry.async_update( label.label_id, name="Updated", @@ -159,15 +141,12 @@ async def test_update_label( ) assert updated_label != label - assert updated_label == lr.LabelEntry( - label_id="mock", - name="Updated", - color="#FFFFFF", - icon="mdi:update", - description="Updated description", - created_at=created_at, - modified_at=modified_at, - ) + assert updated_label.label_id == "mock" + assert updated_label.name == "Updated" + assert updated_label.color == "#FFFFFF" + assert updated_label.icon == "mdi:update" + assert updated_label.description == "Updated description" + assert len(label_registry.labels) == 1 await hass.async_block_till_done() @@ -263,21 +242,15 @@ async def test_update_label_with_normalized_name_already_in_use( async def test_load_labels( - hass: HomeAssistant, - label_registry: lr.LabelRegistry, - freezer: FrozenDateTimeFactory, + hass: HomeAssistant, label_registry: lr.LabelRegistry ) -> None: """Make sure that we can load/save data correctly.""" - label1_created = datetime.fromisoformat("2024-01-01T00:00:00+00:00") - freezer.move_to(label1_created) label1 = label_registry.async_create( "Label One", color="#FF000", icon="mdi:one", description="This label is label one", ) - label2_created = datetime.fromisoformat("2024-02-01T00:00:00+00:00") - freezer.move_to(label2_created) label2 = label_registry.async_create( "Label Two", color="#000FF", @@ -295,10 +268,19 @@ async def test_load_labels( assert list(label_registry.labels) == list(registry2.labels) label1_registry2 = registry2.async_get_label_by_name("Label One") - assert label1_registry2 == label1 + assert label1_registry2.label_id == label1.label_id + assert label1_registry2.name == label1.name + assert label1_registry2.color == label1.color + assert label1_registry2.description == label1.description + assert label1_registry2.icon == label1.icon + assert label1_registry2.normalized_name == label1.normalized_name label2_registry2 = registry2.async_get_label_by_name("Label Two") - assert label2_registry2 == label2 + assert label2_registry2.name == label2.name + assert label2_registry2.color == label2.color + assert label2_registry2.description == label2.description + assert label2_registry2.icon == label2.icon + assert label2_registry2.normalized_name == label2.normalized_name @pytest.mark.parametrize("load_registries", [False]) @@ -316,8 +298,6 @@ async def test_loading_label_from_storage( "icon": "mdi:test", "label_id": "one", "name": "One", - "created_at": "2024-01-01T00:00:00+00:00", - "modified_at": "2024-02-01T00:00:00+00:00", } ] }, @@ -509,52 +489,3 @@ async def test_async_update_thread_safety( await hass.async_add_executor_job( partial(label_registry.async_update, any_label.label_id, name="new name") ) - - -@pytest.mark.parametrize("load_registries", [False]) -async def test_migration_from_1_1( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test migration from version 1.1.""" - hass_storage[lr.STORAGE_KEY] = { - "version": 1, - "data": { - "labels": [ - { - "color": None, - "description": None, - "icon": None, - "label_id": "12345A", - "name": "mock", - } - ] - }, - } - - await lr.async_load(hass) - registry = lr.async_get(hass) - - # Test data was loaded - entry = registry.async_get_label_by_name("mock") - assert entry.label_id == "12345A" - - # Check we store migrated data - await flush_store(registry._store) - assert hass_storage[lr.STORAGE_KEY] == { - "version": lr.STORAGE_VERSION_MAJOR, - "minor_version": lr.STORAGE_VERSION_MINOR, - "key": lr.STORAGE_KEY, - "data": { - "labels": [ - { - "color": None, - "description": None, - "icon": None, - "label_id": "12345A", - "name": "mock", - "created_at": "1970-01-01T00:00:00+00:00", - "modified_at": "1970-01-01T00:00:00+00:00", - } - ] - }, - } diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index cd36fe18933..ad18aa53071 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -22,6 +22,7 @@ from homeassistant.helpers import ( selector, ) from homeassistant.setup import async_setup_component +from homeassistant.util import yaml from tests.common import MockConfigEntry @@ -374,16 +375,11 @@ async def test_assist_api_prompt( "beer": {"description": "Number of beers"}, "wine": {}, }, - }, - "script_with_no_fields": { - "description": "This is another test script", - "sequence": [], - }, + } } }, ) async_expose_entity(hass, "conversation", "script.test_script", True) - async_expose_entity(hass, "conversation", "script.script_with_no_fields", True) entry = MockConfigEntry(title=None) entry.add_to_hass(hass) @@ -412,13 +408,11 @@ async def test_assist_api_prompt( hass.states.async_set( entry1.entity_id, "on", - {"friendly_name": "Kitchen", "temperature": Decimal("0.9"), "humidity": 65}, + {"friendly_name": "Kitchen", "temperature": Decimal("0.9")}, ) hass.states.async_set(entry2.entity_id, "on", {"friendly_name": "Living Room"}) - def create_entity( - device: dr.DeviceEntry, write_state=True, aliases: set[str] | None = None - ) -> None: + def create_entity(device: dr.DeviceEntry, write_state=True) -> None: """Create an entity for a device and track entity_id.""" entity = entity_registry.async_get_or_create( "light", @@ -428,8 +422,6 @@ async def test_assist_api_prompt( original_name=str(device.name or "Unnamed Device"), suggested_object_id=str(device.name or "unnamed_device"), ) - if aliases: - entity_registry.async_update_entity(entity.entity_id, aliases=aliases) if write_state: entity.write_unavailable_state(hass) @@ -441,8 +433,7 @@ async def test_assist_api_prompt( manufacturer="Test Manufacturer", model="Test Model", suggested_area="Test Area", - ), - aliases={"my test light"}, + ) ) for i in range(3): create_entity( @@ -515,62 +506,76 @@ async def test_assist_api_prompt( suggested_area="Test Area 2", ) ) - exposed_entities_prompt = """An overview of the areas and the devices in this smart home: -- names: script_with_no_fields - domain: script - state: 'off' - description: This is another test script -- names: Kitchen - domain: light - state: 'on' - attributes: - temperature: '0.9' - humidity: '65' -- names: Living Room - domain: light - state: 'on' - areas: Test Area, Alternative name -- names: Test Device, my test light - domain: light - state: unavailable - areas: Test Area, Alternative name -- names: Test Service - domain: light - state: unavailable - areas: Test Area, Alternative name -- names: Test Service - domain: light - state: unavailable - areas: Test Area, Alternative name -- names: Test Service - domain: light - state: unavailable - areas: Test Area, Alternative name -- names: Test Device 2 - domain: light - state: unavailable - areas: Test Area 2 -- names: Test Device 3 - domain: light - state: unavailable - areas: Test Area 2 -- names: Test Device 4 - domain: light - state: unavailable - areas: Test Area 2 -- names: Unnamed Device - domain: light - state: unavailable - areas: Test Area 2 -- names: '1' - domain: light - state: unavailable - areas: Test Area 2 -""" + + exposed_entities = llm._get_exposed_entities(hass, llm_context.assistant) + assert exposed_entities == { + "light.1": { + "areas": "Test Area 2", + "names": "1", + "state": "unavailable", + }, + entry1.entity_id: { + "names": "Kitchen", + "state": "on", + "attributes": { + "temperature": "0.9", + }, + }, + entry2.entity_id: { + "areas": "Test Area, Alternative name", + "names": "Living Room", + "state": "on", + }, + "light.test_device": { + "areas": "Test Area, Alternative name", + "names": "Test Device", + "state": "unavailable", + }, + "light.test_device_2": { + "areas": "Test Area 2", + "names": "Test Device 2", + "state": "unavailable", + }, + "light.test_device_3": { + "areas": "Test Area 2", + "names": "Test Device 3", + "state": "unavailable", + }, + "light.test_device_4": { + "areas": "Test Area 2", + "names": "Test Device 4", + "state": "unavailable", + }, + "light.test_service": { + "areas": "Test Area, Alternative name", + "names": "Test Service", + "state": "unavailable", + }, + "light.test_service_2": { + "areas": "Test Area, Alternative name", + "names": "Test Service", + "state": "unavailable", + }, + "light.test_service_3": { + "areas": "Test Area, Alternative name", + "names": "Test Service", + "state": "unavailable", + }, + "light.unnamed_device": { + "areas": "Test Area 2", + "names": "Unnamed Device", + "state": "unavailable", + }, + } + exposed_entities_prompt = ( + "An overview of the areas and the devices in this smart home:\n" + + yaml.dump(exposed_entities) + ) first_part_prompt = ( "When controlling Home Assistant always call the intent tools. " "Use HassTurnOn to lock and HassTurnOff to unlock a lock. " - "When controlling a device, prefer passing just name and domain. " + "When controlling a device, prefer passing just its name and its domain " + "(what comes before the dot in its entity id). " "When controlling an area, prefer passing just area name and domain." ) no_timer_prompt = "This device is not able to start timers." @@ -630,7 +635,6 @@ async def test_assist_api_prompt( async def test_script_tool( hass: HomeAssistant, - entity_registry: er.EntityRegistry, area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, ) -> None: @@ -666,10 +670,6 @@ async def test_script_tool( "extra_field": {"selector": {"area": {}}}, }, }, - "script_with_no_fields": { - "description": "This is another test script", - "sequence": [], - }, "unexposed_script": { "sequence": [], }, @@ -677,11 +677,6 @@ async def test_script_tool( }, ) async_expose_entity(hass, "conversation", "script.test_script", True) - async_expose_entity(hass, "conversation", "script.script_with_no_fields", True) - - entity_registry.async_update_entity( - "script.test_script", name="script name", aliases={"script alias"} - ) area = area_registry.async_create("Living room") floor = floor_registry.async_create("2") @@ -695,10 +690,7 @@ async def test_script_tool( tool = tools[0] assert tool.name == "test_script" - assert ( - tool.description - == "This is a test script. Aliases: ['script name', 'script alias']" - ) + assert tool.description == "This is a test script" schema = { vol.Required("beer", description="Number of beers"): cv.string, vol.Optional("wine"): selector.NumberSelector({"min": 0, "max": 3}), @@ -711,11 +703,7 @@ async def test_script_tool( assert tool.parameters.schema == schema assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == { - "test_script": ( - "This is a test script. Aliases: ['script name', 'script alias']", - vol.Schema(schema), - ), - "script_with_no_fields": ("This is another test script", vol.Schema({})), + "test_script": ("This is a test script", vol.Schema(schema)) } tool_input = llm.ToolInput( @@ -785,62 +773,15 @@ async def test_script_tool( tool = tools[0] assert tool.name == "test_script" - assert ( - tool.description - == "This is a new test script. Aliases: ['script name', 'script alias']" - ) + assert tool.description == "This is a new test script" schema = {vol.Required("beer", description="Number of beers"): cv.string} assert tool.parameters.schema == schema assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == { - "test_script": ( - "This is a new test script. Aliases: ['script name', 'script alias']", - vol.Schema(schema), - ), - "script_with_no_fields": ("This is another test script", vol.Schema({})), + "test_script": ("This is a new test script", vol.Schema(schema)) } -async def test_script_tool_name(hass: HomeAssistant) -> None: - """Test that script tool name is not started with a digit.""" - assert await async_setup_component(hass, "homeassistant", {}) - context = Context() - llm_context = llm.LLMContext( - platform="test_platform", - context=context, - user_prompt="test_text", - language="*", - assistant="conversation", - device_id=None, - ) - - # Create a script with a unique ID - assert await async_setup_component( - hass, - "script", - { - "script": { - "123456": { - "description": "This is a test script", - "sequence": [], - "fields": { - "beer": {"description": "Number of beers", "required": True}, - }, - }, - } - }, - ) - async_expose_entity(hass, "conversation", "script.123456", True) - - api = await llm.async_get_api(hass, "assist", llm_context) - - tools = [tool for tool in api.tools if isinstance(tool, llm.ScriptTool)] - assert len(tools) == 1 - - tool = tools[0] - assert tool.name == "_123456" - - async def test_selector_serializer( hass: HomeAssistant, llm_context: llm.LLMContext ) -> None: @@ -880,22 +821,13 @@ async def test_selector_serializer( assert selector_serializer( selector.ColorTempSelector({"min_mireds": 100, "max_mireds": 1000}) ) == {"type": "number", "minimum": 100, "maximum": 1000} - assert selector_serializer(selector.ConditionSelector()) == { - "type": "array", - "items": {"nullable": True, "type": "string"}, - } assert selector_serializer(selector.ConfigEntrySelector()) == {"type": "string"} assert selector_serializer(selector.ConstantSelector({"value": "test"})) == { - "type": "string", - "enum": ["test"], - } - assert selector_serializer(selector.ConstantSelector({"value": 1})) == { - "type": "integer", - "enum": [1], + "enum": ["test"] } + assert selector_serializer(selector.ConstantSelector({"value": 1})) == {"enum": [1]} assert selector_serializer(selector.ConstantSelector({"value": True})) == { - "type": "boolean", - "enum": [True], + "enum": [True] } assert selector_serializer(selector.QrCodeSelector({"data": "test"})) == { "type": "string" @@ -923,17 +855,6 @@ async def test_selector_serializer( "type": "array", "items": {"type": "string"}, } - assert selector_serializer(selector.DurationSelector()) == { - "type": "object", - "properties": { - "days": {"type": "number"}, - "hours": {"type": "number"}, - "minutes": {"type": "number"}, - "seconds": {"type": "number"}, - "milliseconds": {"type": "number"}, - }, - "required": [], - } assert selector_serializer(selector.EntitySelector()) == { "type": "string", "format": "entity_id", @@ -987,10 +908,7 @@ async def test_selector_serializer( "minimum": 30, "maximum": 100, } - assert selector_serializer(selector.ObjectSelector()) == { - "type": "object", - "additionalProperties": True, - } + assert selector_serializer(selector.ObjectSelector()) == {"type": "object"} assert selector_serializer( selector.SelectSelector( { @@ -1012,48 +930,6 @@ async def test_selector_serializer( assert selector_serializer( selector.StateSelector({"entity_id": "sensor.test"}) ) == {"type": "string"} - target_schema = selector_serializer(selector.TargetSelector()) - target_schema["properties"]["entity_id"]["anyOf"][0][ - "enum" - ].sort() # Order is not deterministic - assert target_schema == { - "type": "object", - "properties": { - "area_id": { - "anyOf": [ - {"type": "string", "enum": ["none"]}, - {"type": "array", "items": {"type": "string", "nullable": True}}, - ] - }, - "device_id": { - "anyOf": [ - {"type": "string", "enum": ["none"]}, - {"type": "array", "items": {"type": "string", "nullable": True}}, - ] - }, - "entity_id": { - "anyOf": [ - {"type": "string", "enum": ["all", "none"], "format": "lower"}, - {"type": "string", "nullable": True}, - {"type": "array", "items": {"type": "string"}}, - ] - }, - "floor_id": { - "anyOf": [ - {"type": "string", "enum": ["none"]}, - {"type": "array", "items": {"type": "string", "nullable": True}}, - ] - }, - "label_id": { - "anyOf": [ - {"type": "string", "enum": ["none"]}, - {"type": "array", "items": {"type": "string", "nullable": True}}, - ] - }, - }, - "required": [], - } - assert selector_serializer(selector.TemplateSelector()) == { "type": "string", "format": "jinja2", diff --git a/tests/helpers/test_network.py b/tests/helpers/test_network.py index 3064b215f2f..3c9594bca38 100644 --- a/tests/helpers/test_network.py +++ b/tests/helpers/test_network.py @@ -2,14 +2,11 @@ from unittest.mock import Mock, patch -from aiohttp import hdrs -from multidict import CIMultiDict, CIMultiDictProxy import pytest -from yarl import URL from homeassistant.components import cloud +from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant -from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers.network import ( NoURLAvailableError, _get_cloud_url, @@ -587,82 +584,19 @@ async def test_get_url(hass: HomeAssistant) -> None: assert get_url(hass, allow_internal=False) -async def test_get_request_host_with_port(hass: HomeAssistant) -> None: +async def test_get_request_host(hass: HomeAssistant) -> None: """Test getting the host of the current web request from the request context.""" with pytest.raises(NoURLAvailableError): _get_request_host() with patch("homeassistant.components.http.current_request") as mock_request_context: mock_request = Mock() - mock_request.headers = CIMultiDictProxy( - CIMultiDict({hdrs.HOST: "example.com:8123"}) - ) - mock_request.url = URL("http://example.com:8123/test/request") - mock_request.host = "example.com:8123" + mock_request.url = "http://example.com:8123/test/request" mock_request_context.get = Mock(return_value=mock_request) assert _get_request_host() == "example.com" -async def test_get_request_host_without_port(hass: HomeAssistant) -> None: - """Test getting the host of the current web request from the request context.""" - with pytest.raises(NoURLAvailableError): - _get_request_host() - - with patch("homeassistant.components.http.current_request") as mock_request_context: - mock_request = Mock() - mock_request.headers = CIMultiDictProxy(CIMultiDict({hdrs.HOST: "example.com"})) - mock_request.url = URL("http://example.com/test/request") - mock_request.host = "example.com" - mock_request_context.get = Mock(return_value=mock_request) - - assert _get_request_host() == "example.com" - - -async def test_get_request_ipv6_address(hass: HomeAssistant) -> None: - """Test getting the ipv6 host of the current web request from the request context.""" - with pytest.raises(NoURLAvailableError): - _get_request_host() - - with patch("homeassistant.components.http.current_request") as mock_request_context: - mock_request = Mock() - mock_request.headers = CIMultiDictProxy(CIMultiDict({hdrs.HOST: "[::1]:8123"})) - mock_request.url = URL("http://[::1]:8123/test/request") - mock_request.host = "[::1]:8123" - mock_request_context.get = Mock(return_value=mock_request) - - assert _get_request_host() == "::1" - - -async def test_get_request_ipv6_address_without_port(hass: HomeAssistant) -> None: - """Test getting the ipv6 host of the current web request from the request context.""" - with pytest.raises(NoURLAvailableError): - _get_request_host() - - with patch("homeassistant.components.http.current_request") as mock_request_context: - mock_request = Mock() - mock_request.headers = CIMultiDictProxy(CIMultiDict({hdrs.HOST: "[::1]"})) - mock_request.url = URL("http://[::1]/test/request") - mock_request.host = "[::1]" - mock_request_context.get = Mock(return_value=mock_request) - - assert _get_request_host() == "::1" - - -async def test_get_request_host_no_host_header(hass: HomeAssistant) -> None: - """Test getting the host of the current web request from the request context.""" - with pytest.raises(NoURLAvailableError): - _get_request_host() - - with patch("homeassistant.components.http.current_request") as mock_request_context: - mock_request = Mock() - mock_request.headers = CIMultiDictProxy(CIMultiDict()) - mock_request.url = URL("/test/request") - mock_request_context.get = Mock(return_value=mock_request) - - assert _get_request_host() is None - - @patch("homeassistant.components.hassio.is_hassio", Mock(return_value=True)) @patch( "homeassistant.components.hassio.get_host_info", @@ -727,7 +661,7 @@ async def test_get_current_request_url_with_known_host( @patch( - "homeassistant.helpers.network.is_hassio", + "homeassistant.components.hassio.is_hassio", Mock(return_value={"hostname": "homeassistant"}), ) @patch( @@ -748,20 +682,10 @@ async def test_is_internal_request(hass: HomeAssistant, mock_current_request) -> mock_current_request.return_value = None assert not is_internal_request(hass) - mock_current_request.return_value = Mock( - headers=CIMultiDictProxy(CIMultiDict({hdrs.HOST: "example.local:8123"})), - host="example.local:8123", - url=URL("http://example.local:8123"), - ) + mock_current_request.return_value = Mock(url="http://example.local:8123") assert is_internal_request(hass) - mock_current_request.return_value = Mock( - headers=CIMultiDictProxy( - CIMultiDict({hdrs.HOST: "no_match.example.local:8123"}) - ), - host="no_match.example.local:8123", - url=URL("http://no_match.example.local:8123"), - ) + mock_current_request.return_value = Mock(url="http://no_match.example.local:8123") assert not is_internal_request(hass) # Test with internal URL: http://192.168.0.1:8123 @@ -773,30 +697,18 @@ async def test_is_internal_request(hass: HomeAssistant, mock_current_request) -> assert hass.config.internal_url == "http://192.168.0.1:8123" assert not is_internal_request(hass) - mock_current_request.return_value = Mock( - headers=CIMultiDictProxy(CIMultiDict({hdrs.HOST: "192.168.0.1:8123"})), - host="192.168.0.1:8123", - url=URL("http://192.168.0.1:8123"), - ) + mock_current_request.return_value = Mock(url="http://192.168.0.1:8123") assert is_internal_request(hass) # Test for matching against local IP hass.config.api = Mock(use_ssl=False, local_ip="192.168.123.123", port=8123) for allowed in ("127.0.0.1", "192.168.123.123"): - mock_current_request.return_value = Mock( - headers=CIMultiDictProxy(CIMultiDict({hdrs.HOST: f"{allowed}:8123"})), - host=f"{allowed}:8123", - url=URL(f"http://{allowed}:8123"), - ) + mock_current_request.return_value = Mock(url=f"http://{allowed}:8123") assert is_internal_request(hass), mock_current_request.return_value.url # Test for matching against HassOS hostname for allowed in ("hellohost", "hellohost.local"): - mock_current_request.return_value = Mock( - headers=CIMultiDictProxy(CIMultiDict({hdrs.HOST: f"{allowed}:8123"})), - host=f"{allowed}:8123", - url=URL(f"http://{allowed}:8123"), - ) + mock_current_request.return_value = Mock(url=f"http://{allowed}:8123") assert is_internal_request(hass), mock_current_request.return_value.url diff --git a/tests/helpers/test_normalized_name_base_registry.py b/tests/helpers/test_normalized_name_base_registry.py index 4795c759f9f..9783e64eeff 100644 --- a/tests/helpers/test_normalized_name_base_registry.py +++ b/tests/helpers/test_normalized_name_base_registry.py @@ -26,14 +26,18 @@ def test_registry_items( registry_items: NormalizedNameBaseRegistryItems[NormalizedNameBaseRegistryEntry], ) -> None: """Test registry items.""" - entry = NormalizedNameBaseRegistryEntry(name="Hello World") + entry = NormalizedNameBaseRegistryEntry( + name="Hello World", normalized_name="helloworld" + ) registry_items["key"] = entry assert registry_items["key"] == entry assert list(registry_items.values()) == [entry] assert registry_items.get_by_name("Hello World") == entry # test update entry - entry2 = NormalizedNameBaseRegistryEntry(name="Hello World 2") + entry2 = NormalizedNameBaseRegistryEntry( + name="Hello World 2", normalized_name="helloworld2" + ) registry_items["key"] = entry2 assert registry_items["key"] == entry2 assert list(registry_items.values()) == [entry2] @@ -49,12 +53,16 @@ def test_key_already_in_use( registry_items: NormalizedNameBaseRegistryItems[NormalizedNameBaseRegistryEntry], ) -> None: """Test key already in use.""" - entry = NormalizedNameBaseRegistryEntry(name="Hello World") + entry = NormalizedNameBaseRegistryEntry( + name="Hello World", normalized_name="helloworld" + ) registry_items["key"] = entry # should raise ValueError if we update a # key with a entry with the same normalized name - entry = NormalizedNameBaseRegistryEntry(name="Hello World 2") + entry = NormalizedNameBaseRegistryEntry( + name="Hello World 2", normalized_name="helloworld2" + ) registry_items["key2"] = entry with pytest.raises(ValueError): registry_items["key"] = entry diff --git a/tests/helpers/test_recorder.py b/tests/helpers/test_recorder.py index 8fb8450bcb8..94f30d812bc 100644 --- a/tests/helpers/test_recorder.py +++ b/tests/helpers/test_recorder.py @@ -18,25 +18,18 @@ async def test_async_migration_in_progress( ): assert recorder.async_migration_in_progress(hass) is False + # The recorder is not loaded + with patch( + "homeassistant.components.recorder.util.async_migration_in_progress", + return_value=True, + ): + assert recorder.async_migration_in_progress(hass) is False + + await async_setup_recorder_instance(hass) + + # The recorder is now loaded with patch( "homeassistant.components.recorder.util.async_migration_in_progress", return_value=True, ): assert recorder.async_migration_in_progress(hass) is True - - -async def test_async_migration_is_live( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: - """Test async_migration_in_progress wraps the recorder.""" - with patch( - "homeassistant.components.recorder.util.async_migration_is_live", - return_value=False, - ): - assert recorder.async_migration_is_live(hass) is False - - with patch( - "homeassistant.components.recorder.util.async_migration_is_live", - return_value=True, - ): - assert recorder.async_migration_is_live(hass) is True diff --git a/tests/helpers/test_schema_config_entry_flow.py b/tests/helpers/test_schema_config_entry_flow.py index e67525253bc..877e3762d3b 100644 --- a/tests/helpers/test_schema_config_entry_flow.py +++ b/tests/helpers/test_schema_config_entry_flow.py @@ -648,10 +648,6 @@ async def test_options_flow_state(hass: HomeAssistant) -> None: options_handler = hass.config_entries.options._progress[result["flow_id"]] assert options_handler._common_handler.flow_state == {"idx": None} - # Ensure that self.options and self._common_handler.options refer to the - # same mutable copy of the options - assert options_handler.options is options_handler._common_handler.options - # In step 1, flow state is updated with user input result = await hass.config_entries.options.async_configure( result["flow_id"], {"option1": "blublu"} diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index f67519905a1..08c196a04d3 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -249,7 +249,7 @@ async def test_calling_service_basic( alias = "service step" sequence = cv.SCRIPT_SCHEMA( - {"alias": alias, "action": "test.script", "data": {"hello": "world"}} + {"alias": alias, "service": "test.script", "data": {"hello": "world"}} ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -352,13 +352,13 @@ async def test_calling_service_response_data( [ { "alias": "service step1", - "action": "test.script", + "service": "test.script", # Store the result of the service call as a variable "response_variable": "my_response", }, { "alias": "service step2", - "action": "test.script", + "service": "test.script", "data_template": { # Result of previous service call "key": "{{ my_response.data }}" @@ -441,7 +441,7 @@ async def test_service_response_data_errors( [ { "alias": "service step1", - "action": "test.script", + "service": "test.script", **params, }, ] @@ -458,7 +458,7 @@ async def test_data_template_with_templated_key(hass: HomeAssistant) -> None: calls = async_mock_service(hass, "test", "script") sequence = cv.SCRIPT_SCHEMA( - {"action": "test.script", "data_template": {"{{ hello_var }}": "world"}} + {"service": "test.script", "data_template": {"{{ hello_var }}": "world"}} ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -525,11 +525,11 @@ async def test_multiple_runs_no_wait(hass: HomeAssistant) -> None: sequence = cv.SCRIPT_SCHEMA( [ { - "action": "test.script", + "service": "test.script", "data_template": {"fire": "{{ fire1 }}", "listen": "{{ listen1 }}"}, }, { - "action": "test.script", + "service": "test.script", "data_template": {"fire": "{{ fire2 }}", "listen": "{{ listen2 }}"}, }, ] @@ -605,7 +605,7 @@ async def test_stop_no_wait(hass: HomeAssistant, count) -> None: hass.services.async_register("test", "script", async_simulate_long_service) - sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) script_obj = script.Script( hass, sequence, @@ -943,9 +943,18 @@ async def test_wait_basic(hass: HomeAssistant, action_type) -> None: assert not script_obj.is_running assert script_obj.last_action is None - expected_var = {"completed": True, "remaining": None} - - if action_type == "trigger": + if action_type == "template": + assert_action_trace( + { + "0": [ + { + "result": {"wait": {"completed": True, "remaining": None}}, + "variables": {"wait": {"completed": True, "remaining": None}}, + } + ], + } + ) + else: expected_trigger = { "alias": None, "attribute": None, @@ -958,18 +967,23 @@ async def test_wait_basic(hass: HomeAssistant, action_type) -> None: "platform": "state", "to_state": ANY, } - expected_var["trigger"] = expected_trigger - - assert_action_trace( - { - "0": [ - { - "result": {"wait": expected_var}, - "variables": {"wait": expected_var}, - } - ], - } - ) + assert_action_trace( + { + "0": [ + { + "result": { + "wait": { + "trigger": expected_trigger, + "remaining": None, + } + }, + "variables": { + "wait": {"remaining": None, "trigger": expected_trigger} + }, + } + ], + } + ) async def test_wait_for_trigger_variables(hass: HomeAssistant) -> None: @@ -1045,21 +1059,28 @@ async def test_wait_basic_times_out(hass: HomeAssistant, action_type) -> None: assert timed_out - expected_var = {"completed": False, "remaining": None} - - if action_type == "trigger": - expected_var["trigger"] = None - - assert_action_trace( - { - "0": [ - { - "result": {"wait": expected_var}, - "variables": {"wait": expected_var}, - } - ], - } - ) + if action_type == "template": + assert_action_trace( + { + "0": [ + { + "result": {"wait": {"completed": False, "remaining": None}}, + "variables": {"wait": {"completed": False, "remaining": None}}, + } + ], + } + ) + else: + assert_action_trace( + { + "0": [ + { + "result": {"wait": {"trigger": None, "remaining": None}}, + "variables": {"wait": {"remaining": None, "trigger": None}}, + } + ], + } + ) @pytest.mark.parametrize("action_type", ["template", "trigger"]) @@ -1162,22 +1183,30 @@ async def test_cancel_wait(hass: HomeAssistant, action_type) -> None: assert not script_obj.is_running assert len(events) == 0 - expected_var = {"completed": False, "remaining": None} - - if action_type == "trigger": - expected_var["trigger"] = None - - assert_action_trace( - { - "0": [ - { - "result": {"wait": expected_var}, - "variables": {"wait": expected_var}, - } - ], - }, - expected_script_execution="cancelled", - ) + if action_type == "template": + assert_action_trace( + { + "0": [ + { + "result": {"wait": {"completed": False, "remaining": None}}, + "variables": {"wait": {"completed": False, "remaining": None}}, + } + ], + }, + expected_script_execution="cancelled", + ) + else: + assert_action_trace( + { + "0": [ + { + "result": {"wait": {"trigger": None, "remaining": None}}, + "variables": {"wait": {"remaining": None, "trigger": None}}, + } + ], + }, + expected_script_execution="cancelled", + ) async def test_wait_template_not_schedule(hass: HomeAssistant) -> None: @@ -1265,11 +1294,10 @@ async def test_wait_timeout( assert len(events) == 1 assert "(timeout: 0:00:05)" in caplog.text - variable_wait = {"wait": {"completed": False, "remaining": 0.0}} - - if action_type == "trigger": - variable_wait["wait"]["trigger"] = None - + if action_type == "template": + variable_wait = {"wait": {"completed": False, "remaining": 0.0}} + else: + variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} expected_trace = { "0": [ { @@ -1317,7 +1345,7 @@ async def test_wait_trigger_with_zero_timeout( assert len(events) == 1 assert "(timeout: 0:00:00)" in caplog.text - variable_wait = {"wait": {"completed": False, "trigger": None, "remaining": 0.0}} + variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} expected_trace = { "0": [ { @@ -1365,7 +1393,7 @@ async def test_wait_trigger_matches_with_zero_timeout( assert len(events) == 1 assert "(timeout: 0:00:00)" in caplog.text - variable_wait = {"wait": {"completed": False, "trigger": None, "remaining": 0.0}} + variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} expected_trace = { "0": [ { @@ -1505,11 +1533,12 @@ async def test_wait_continue_on_timeout( assert not script_obj.is_running assert len(events) == n_events - result_wait = {"wait": {"completed": False, "remaining": 0.0}} - if action_type == "trigger": - result_wait["wait"]["trigger"] = None - - variable_wait = dict(result_wait) + if action_type == "template": + result_wait = {"wait": {"completed": False, "remaining": 0.0}} + variable_wait = dict(result_wait) + else: + result_wait = {"wait": {"trigger": None, "remaining": 0.0}} + variable_wait = dict(result_wait) expected_trace = { "0": [{"result": result_wait, "variables": variable_wait}], } @@ -1737,12 +1766,8 @@ async def test_wait_for_trigger_bad( { "0": [ { - "result": { - "wait": {"completed": False, "trigger": None, "remaining": None} - }, - "variables": { - "wait": {"completed": False, "remaining": None, "trigger": None} - }, + "result": {"wait": {"trigger": None, "remaining": None}}, + "variables": {"wait": {"remaining": None, "trigger": None}}, } ], } @@ -1782,12 +1807,8 @@ async def test_wait_for_trigger_generated_exception( { "0": [ { - "result": { - "wait": {"completed": False, "trigger": None, "remaining": None} - }, - "variables": { - "wait": {"completed": False, "remaining": None, "trigger": None} - }, + "result": {"wait": {"trigger": None, "remaining": None}}, + "variables": {"wait": {"remaining": None, "trigger": None}}, } ], } @@ -3696,18 +3717,11 @@ async def test_parallel(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - { "result": { "wait": { - "completed": True, - "remaining": None, - "trigger": expected_trigger, - } - }, - "variables": { - "wait": { - "completed": True, "remaining": None, "trigger": expected_trigger, } }, + "variables": {"wait": {"remaining": None, "trigger": expected_trigger}}, } ], "0/parallel/1/sequence/0": [ @@ -3880,7 +3894,7 @@ async def test_parallel_error( sequence = cv.SCRIPT_SCHEMA( { "parallel": [ - {"action": "epic.failure"}, + {"service": "epic.failure"}, ] } ) @@ -3892,10 +3906,10 @@ async def test_parallel_error( assert len(events) == 0 expected_trace = { - "0": [{"error": "Action epic.failure not found"}], + "0": [{"error": "Service epic.failure not found"}], "0/parallel/0/sequence/0": [ { - "error": "Action epic.failure not found", + "error": "Service epic.failure not found", "result": { "params": { "domain": "epic", @@ -3932,7 +3946,7 @@ async def test_propagate_error_service_not_found(hass: HomeAssistant) -> None: await async_setup_component(hass, "homeassistant", {}) event = "test_event" events = async_capture_events(hass, event) - sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") with pytest.raises(exceptions.ServiceNotFound): @@ -3944,7 +3958,7 @@ async def test_propagate_error_service_not_found(hass: HomeAssistant) -> None: expected_trace = { "0": [ { - "error": "Action test.script not found", + "error": "Service test.script not found", "result": { "params": { "domain": "test", @@ -3966,7 +3980,7 @@ async def test_propagate_error_invalid_service_data(hass: HomeAssistant) -> None events = async_capture_events(hass, event) calls = async_mock_service(hass, "test", "script", vol.Schema({"text": str})) sequence = cv.SCRIPT_SCHEMA( - [{"action": "test.script", "data": {"text": 1}}, {"event": event}] + [{"service": "test.script", "data": {"text": 1}}, {"event": event}] ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -4008,7 +4022,7 @@ async def test_propagate_error_service_exception(hass: HomeAssistant) -> None: hass.services.async_register("test", "script", record_call) - sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") with pytest.raises(ValueError): @@ -4043,35 +4057,35 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "action": "test.script", + "service": "test.script", "data": {"label_id": "label_service_not_list"}, }, { - "action": "test.script", + "service": "test.script", "data": { "label_id": ["label_service_list_1", "label_service_list_2"] }, }, { - "action": "test.script", + "service": "test.script", "data": {"label_id": "{{ 'label_service_template' }}"}, }, { - "action": "test.script", + "service": "test.script", "target": {"label_id": "label_in_target"}, }, { - "action": "test.script", + "service": "test.script", "data_template": {"label_id": "label_in_data_template"}, }, - {"action": "test.script", "data": {"without": "label_id"}}, + {"service": "test.script", "data": {"without": "label_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"label_id": "label_choice_1_seq"}, } ], @@ -4080,7 +4094,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"label_id": "label_choice_2_seq"}, } ], @@ -4088,7 +4102,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: ], "default": [ { - "action": "test.script", + "service": "test.script", "data": {"label_id": "label_default_seq"}, } ], @@ -4099,13 +4113,13 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: "if": [], "then": [ { - "action": "test.script", + "service": "test.script", "data": {"label_id": "label_if_then"}, } ], "else": [ { - "action": "test.script", + "service": "test.script", "data": {"label_id": "label_if_else"}, } ], @@ -4113,7 +4127,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: { "parallel": [ { - "action": "test.script", + "service": "test.script", "data": {"label_id": "label_parallel"}, } ], @@ -4147,33 +4161,33 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "action": "test.script", + "service": "test.script", "data": {"floor_id": "floor_service_not_list"}, }, { - "action": "test.script", + "service": "test.script", "data": {"floor_id": ["floor_service_list"]}, }, { - "action": "test.script", + "service": "test.script", "data": {"floor_id": "{{ 'floor_service_template' }}"}, }, { - "action": "test.script", + "service": "test.script", "target": {"floor_id": "floor_in_target"}, }, { - "action": "test.script", + "service": "test.script", "data_template": {"floor_id": "floor_in_data_template"}, }, - {"action": "test.script", "data": {"without": "floor_id"}}, + {"service": "test.script", "data": {"without": "floor_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"floor_id": "floor_choice_1_seq"}, } ], @@ -4182,7 +4196,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"floor_id": "floor_choice_2_seq"}, } ], @@ -4190,7 +4204,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: ], "default": [ { - "action": "test.script", + "service": "test.script", "data": {"floor_id": "floor_default_seq"}, } ], @@ -4201,13 +4215,13 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: "if": [], "then": [ { - "action": "test.script", + "service": "test.script", "data": {"floor_id": "floor_if_then"}, } ], "else": [ { - "action": "test.script", + "service": "test.script", "data": {"floor_id": "floor_if_else"}, } ], @@ -4215,7 +4229,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: { "parallel": [ { - "action": "test.script", + "service": "test.script", "data": {"floor_id": "floor_parallel"}, } ], @@ -4248,33 +4262,33 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "action": "test.script", + "service": "test.script", "data": {"area_id": "area_service_not_list"}, }, { - "action": "test.script", + "service": "test.script", "data": {"area_id": ["area_service_list"]}, }, { - "action": "test.script", + "service": "test.script", "data": {"area_id": "{{ 'area_service_template' }}"}, }, { - "action": "test.script", + "service": "test.script", "target": {"area_id": "area_in_target"}, }, { - "action": "test.script", + "service": "test.script", "data_template": {"area_id": "area_in_data_template"}, }, - {"action": "test.script", "data": {"without": "area_id"}}, + {"service": "test.script", "data": {"without": "area_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"area_id": "area_choice_1_seq"}, } ], @@ -4283,7 +4297,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"area_id": "area_choice_2_seq"}, } ], @@ -4291,7 +4305,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: ], "default": [ { - "action": "test.script", + "service": "test.script", "data": {"area_id": "area_default_seq"}, } ], @@ -4302,13 +4316,13 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: "if": [], "then": [ { - "action": "test.script", + "service": "test.script", "data": {"area_id": "area_if_then"}, } ], "else": [ { - "action": "test.script", + "service": "test.script", "data": {"area_id": "area_if_else"}, } ], @@ -4316,7 +4330,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: { "parallel": [ { - "action": "test.script", + "service": "test.script", "data": {"area_id": "area_parallel"}, } ], @@ -4350,27 +4364,27 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.service_not_list"}, }, { - "action": "test.script", + "service": "test.script", "data": {"entity_id": ["light.service_list"]}, }, { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "{{ 'light.service_template' }}"}, }, { - "action": "test.script", + "service": "test.script", "entity_id": "light.direct_entity_referenced", }, { - "action": "test.script", + "service": "test.script", "target": {"entity_id": "light.entity_in_target"}, }, { - "action": "test.script", + "service": "test.script", "data_template": {"entity_id": "light.entity_in_data_template"}, }, { @@ -4378,7 +4392,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "entity_id": "sensor.condition", "state": "100", }, - {"action": "test.script", "data": {"without": "entity_id"}}, + {"service": "test.script", "data": {"without": "entity_id"}}, {"scene": "scene.hello"}, { "choose": [ @@ -4386,7 +4400,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "conditions": "{{ states.light.choice_1_cond == 'on' }}", "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.choice_1_seq"}, } ], @@ -4399,7 +4413,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: }, "sequence": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.choice_2_seq"}, } ], @@ -4407,7 +4421,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: ], "default": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.default_seq"}, } ], @@ -4418,13 +4432,13 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "if": [], "then": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.if_then"}, } ], "else": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.if_else"}, } ], @@ -4432,7 +4446,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: { "parallel": [ { - "action": "test.script", + "service": "test.script", "data": {"entity_id": "light.parallel"}, } ], @@ -4477,19 +4491,19 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: "domain": "switch", }, { - "action": "test.script", + "service": "test.script", "data": {"device_id": "data-string-id"}, }, { - "action": "test.script", + "service": "test.script", "data_template": {"device_id": "data-template-string-id"}, }, { - "action": "test.script", + "service": "test.script", "target": {"device_id": "target-string-id"}, }, { - "action": "test.script", + "service": "test.script", "target": {"device_id": ["target-list-id-1", "target-list-id-2"]}, }, { @@ -4501,7 +4515,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: ), "sequence": [ { - "action": "test.script", + "service": "test.script", "target": { "device_id": "choice-1-seq-device-target" }, @@ -4516,7 +4530,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: }, "sequence": [ { - "action": "test.script", + "service": "test.script", "target": { "device_id": "choice-2-seq-device-target" }, @@ -4526,7 +4540,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: ], "default": [ { - "action": "test.script", + "service": "test.script", "target": {"device_id": "default-device-target"}, } ], @@ -4535,13 +4549,13 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: "if": [], "then": [ { - "action": "test.script", + "service": "test.script", "data": {"device_id": "if-then"}, } ], "else": [ { - "action": "test.script", + "service": "test.script", "data": {"device_id": "if-else"}, } ], @@ -4549,7 +4563,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: { "parallel": [ { - "action": "test.script", + "service": "test.script", "target": {"device_id": "parallel-device"}, } ], @@ -5090,7 +5104,7 @@ async def test_set_variable( sequence = cv.SCRIPT_SCHEMA( [ {"alias": alias, "variables": {"variable": "value"}}, - {"action": "test.script", "data": {"value": "{{ variable }}"}}, + {"service": "test.script", "data": {"value": "{{ variable }}"}}, ] ) script_obj = script.Script(hass, sequence, "test script", "test_domain") @@ -5129,9 +5143,9 @@ async def test_set_redefines_variable( sequence = cv.SCRIPT_SCHEMA( [ {"variables": {"variable": "1"}}, - {"action": "test.script", "data": {"value": "{{ variable }}"}}, + {"service": "test.script", "data": {"value": "{{ variable }}"}}, {"variables": {"variable": "{{ variable | int + 1 }}"}}, - {"action": "test.script", "data": {"value": "{{ variable }}"}}, + {"service": "test.script", "data": {"value": "{{ variable }}"}}, ] ) script_obj = script.Script(hass, sequence, "test script", "test_domain") @@ -5200,7 +5214,7 @@ async def test_validate_action_config( } configs = { - cv.SCRIPT_ACTION_CALL_SERVICE: {"action": "light.turn_on"}, + cv.SCRIPT_ACTION_CALL_SERVICE: {"service": "light.turn_on"}, cv.SCRIPT_ACTION_DELAY: {"delay": 5}, cv.SCRIPT_ACTION_WAIT_TEMPLATE: { "wait_template": "{{ states.light.kitchen.state == 'on' }}" @@ -5335,7 +5349,7 @@ async def test_embedded_wait_for_trigger_in_automation(hass: HomeAssistant) -> N } ] }, - {"action": "test.script"}, + {"service": "test.script"}, ], } }, @@ -5690,12 +5704,12 @@ async def test_continue_on_error(hass: HomeAssistant) -> None: {"event": "test_event"}, { "continue_on_error": True, - "action": "broken.service", + "service": "broken.service", }, {"event": "test_event"}, { "continue_on_error": False, - "action": "broken.service", + "service": "broken.service", }, {"event": "test_event"}, ] @@ -5772,7 +5786,7 @@ async def test_continue_on_error_automation_issue(hass: HomeAssistant) -> None: [ { "continue_on_error": True, - "action": "service.not_found", + "service": "service.not_found", }, ] ) @@ -5785,7 +5799,7 @@ async def test_continue_on_error_automation_issue(hass: HomeAssistant) -> None: { "0": [ { - "error": "Action service.not_found not found", + "error": "Service service.not_found not found", "result": { "params": { "domain": "service", @@ -5820,7 +5834,7 @@ async def test_continue_on_error_unknown_error(hass: HomeAssistant) -> None: [ { "continue_on_error": True, - "action": "some.service", + "service": "some.service", }, ] ) @@ -5870,7 +5884,7 @@ async def test_disabled_actions( { "alias": "Hello", "enabled": enabled_value, - "action": "broken.service", + "service": "broken.service", }, { "alias": "World", @@ -6241,7 +6255,7 @@ async def test_disallowed_recursion( context = Context() calls = 0 alias = "event step" - sequence1 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_2"}) + sequence1 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_2"}) script1_obj = script.Script( hass, sequence1, @@ -6251,7 +6265,7 @@ async def test_disallowed_recursion( running_description="test script1", ) - sequence2 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_3"}) + sequence2 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_3"}) script2_obj = script.Script( hass, sequence2, @@ -6261,7 +6275,7 @@ async def test_disallowed_recursion( running_description="test script2", ) - sequence3 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_1"}) + sequence3 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_1"}) script3_obj = script.Script( hass, sequence3, @@ -6301,43 +6315,3 @@ async def test_disallowed_recursion( "- test_domain2.Test Name2\n" "- test_domain3.Test Name3" ) in caplog.text - - -async def test_calling_service_backwards_compatible( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test the calling of a service with the service instead of the action key.""" - context = Context() - calls = async_mock_service(hass, "test", "script") - - alias = "service step" - sequence = cv.SCRIPT_SCHEMA( - {"alias": alias, "service": "test.script", "data": {"hello": "{{ 'world' }}"}} - ) - script_obj = script.Script(hass, sequence, "Test Name", "test_domain") - - await script_obj.async_run(context=context) - await hass.async_block_till_done() - - assert len(calls) == 1 - assert calls[0].context is context - assert calls[0].data.get("hello") == "world" - assert f"Executing step {alias}" in caplog.text - - assert_action_trace( - { - "0": [ - { - "result": { - "params": { - "domain": "test", - "service": "script", - "service_data": {"hello": "world"}, - "target": {}, - }, - "running_script": False, - } - } - ], - } - ) diff --git a/tests/helpers/test_script_variables.py b/tests/helpers/test_script_variables.py index 3675c857279..ca942acdf66 100644 --- a/tests/helpers/test_script_variables.py +++ b/tests/helpers/test_script_variables.py @@ -3,8 +3,7 @@ import pytest from homeassistant.core import HomeAssistant -from homeassistant.exceptions import TemplateError -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, template async def test_static_vars() -> None: @@ -111,5 +110,5 @@ async def test_template_vars_run_args_no_default(hass: HomeAssistant) -> None: async def test_template_vars_error(hass: HomeAssistant) -> None: """Test template vars.""" var = cv.SCRIPT_VARIABLES_SCHEMA({"hello": "{{ canont.work }}"}) - with pytest.raises(TemplateError): + with pytest.raises(template.TemplateError): var.async_render(hass, None) diff --git a/tests/helpers/test_selector.py b/tests/helpers/test_selector.py index f73808a0625..e93ec3b8c22 100644 --- a/tests/helpers/test_selector.py +++ b/tests/helpers/test_selector.py @@ -1,7 +1,6 @@ """Test selectors.""" from enum import Enum -from typing import Any import pytest import voluptuous as vol @@ -740,13 +739,12 @@ def test_attribute_selector_schema( ( {"seconds": 10}, {"days": 10}, # Days is allowed also if `enable_day` is not set - {"milliseconds": 500}, ), (None, {}), ), ( - {"enable_day": True, "enable_millisecond": True}, - ({"seconds": 10}, {"days": 10}, {"milliseconds": 500}), + {"enable_day": True}, + ({"seconds": 10}, {"days": 10}), (None, {}), ), ( @@ -1108,13 +1106,6 @@ def test_condition_selector_schema( ( {}, ( - [ - { - "platform": "numeric_state", - "entity_id": ["sensor.temperature"], - "below": 20, - } - ], [ { "platform": "numeric_state", @@ -1130,24 +1121,7 @@ def test_condition_selector_schema( ) def test_trigger_selector_schema(schema, valid_selections, invalid_selections) -> None: """Test trigger sequence selector.""" - - def _custom_trigger_serializer( - triggers: list[dict[str, Any]], - ) -> list[dict[str, Any]]: - res = [] - for trigger in triggers: - if "trigger" in trigger: - trigger["platform"] = trigger.pop("trigger") - res.append(trigger) - return res - - _test_selector( - "trigger", - schema, - valid_selections, - invalid_selections, - _custom_trigger_serializer, - ) + _test_selector("trigger", schema, valid_selections, invalid_selections) @pytest.mark.parametrize( diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index d0e1aa34340..9c5cda67725 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -39,6 +39,7 @@ from homeassistant.helpers import ( device_registry as dr, entity_registry as er, service, + template, ) import homeassistant.helpers.config_validation as cv from homeassistant.loader import async_get_integration @@ -119,6 +120,7 @@ def floor_area_mock(hass: HomeAssistant) -> None: id="test-area", name="Test area", aliases={}, + normalized_name="test-area", floor_id="test-floor", icon=None, picture=None, @@ -127,6 +129,7 @@ def floor_area_mock(hass: HomeAssistant) -> None: id="area-a", name="Area A", aliases={}, + normalized_name="area-a", floor_id="floor-a", icon=None, picture=None, @@ -280,6 +283,7 @@ def label_mock(hass: HomeAssistant) -> None: id="area-with-labels", name="Area with labels", aliases={}, + normalized_name="with_labels", floor_id=None, icon=None, labels={"label_area"}, @@ -289,6 +293,7 @@ def label_mock(hass: HomeAssistant) -> None: id="area-no-labels", name="Area without labels", aliases={}, + normalized_name="without_labels", floor_id=None, icon=None, labels=set(), @@ -347,13 +352,6 @@ def label_mock(hass: HomeAssistant) -> None: platform="test", device_id=device_has_label1.id, ) - entity_with_label1_from_device_and_different_area = er.RegistryEntry( - entity_id="light.with_label1_from_device_diff_area", - unique_id="with_label1_from_device_diff_area", - platform="test", - device_id=device_has_label1.id, - area_id=area_without_labels.id, - ) entity_with_label1_and_label2_from_device = er.RegistryEntry( entity_id="light.with_label1_and_label2_from_device", unique_id="with_label1_and_label2_from_device", @@ -380,7 +378,6 @@ def label_mock(hass: HomeAssistant) -> None: config_entity_with_my_label.entity_id: config_entity_with_my_label, entity_with_label1_and_label2_from_device.entity_id: entity_with_label1_and_label2_from_device, entity_with_label1_from_device.entity_id: entity_with_label1_from_device, - entity_with_label1_from_device_and_different_area.entity_id: entity_with_label1_from_device_and_different_area, entity_with_labels_from_device.entity_id: entity_with_labels_from_device, entity_with_my_label.entity_id: entity_with_my_label, entity_with_no_labels.entity_id: entity_with_no_labels, @@ -408,7 +405,7 @@ async def test_service_call(hass: HomeAssistant) -> None: """Test service call with templating.""" calls = async_mock_service(hass, "test_domain", "test_service") config = { - "action": "{{ 'test_domain.test_service' }}", + "service": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data": { "hello": "{{ 'goodbye' }}", @@ -438,7 +435,7 @@ async def test_service_call(hass: HomeAssistant) -> None: } config = { - "action": "{{ 'test_domain.test_service' }}", + "service": "{{ 'test_domain.test_service' }}", "target": { "area_id": ["area-42", "{{ 'area-51' }}"], "device_id": ["abcdef", "{{ 'fedcba' }}"], @@ -458,7 +455,7 @@ async def test_service_call(hass: HomeAssistant) -> None: } config = { - "action": "{{ 'test_domain.test_service' }}", + "service": "{{ 'test_domain.test_service' }}", "target": "{{ var_target }}", } @@ -545,19 +542,19 @@ async def test_split_entity_string(hass: HomeAssistant) -> None: await service.async_call_from_config( hass, { - "action": "test_domain.test_service", + "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) await hass.async_block_till_done() - assert calls[-1].data.get("entity_id") == ["hello.world", "sensor.beer"] + assert ["hello.world", "sensor.beer"] == calls[-1].data.get("entity_id") async def test_not_mutate_input(hass: HomeAssistant) -> None: """Test for immutable input.""" async_mock_service(hass, "test_domain", "test_service") config = { - "action": "test_domain.test_service", + "service": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, @@ -568,6 +565,9 @@ async def test_not_mutate_input(hass: HomeAssistant) -> None: config = cv.SERVICE_SCHEMA(config) orig = cv.SERVICE_SCHEMA(orig) + # Only change after call is each template getting hass attached + template.attach(hass, orig) + await service.async_call_from_config(hass, config, validate_config=False) assert orig == config @@ -581,7 +581,7 @@ async def test_fail_silently_if_no_service(mock_log, hass: HomeAssistant) -> Non await service.async_call_from_config(hass, {}) assert mock_log.call_count == 2 - await service.async_call_from_config(hass, {"action": "invalid"}) + await service.async_call_from_config(hass, {"service": "invalid"}) assert mock_log.call_count == 3 @@ -597,7 +597,7 @@ async def test_service_call_entry_id( assert entry.entity_id == "hello.world" config = { - "action": "test_domain.test_service", + "service": "test_domain.test_service", "target": {"entity_id": entry.id}, } @@ -613,7 +613,7 @@ async def test_service_call_all_none(hass: HomeAssistant, target) -> None: calls = async_mock_service(hass, "test_domain", "test_service") config = { - "action": "test_domain.test_service", + "service": "test_domain.test_service", "target": {"entity_id": target}, } @@ -762,7 +762,6 @@ async def test_extract_entity_ids_from_labels(hass: HomeAssistant) -> None: assert { "light.with_label1_from_device", - "light.with_label1_from_device_diff_area", "light.with_labels_from_device", "light.with_label1_and_label2_from_device", } == await service.async_extract_entity_ids(hass, call) @@ -1793,10 +1792,10 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) - assert [ + assert ["test_domain.test_1", "test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) - ] == ["test_domain.test_1", "test_domain.test_3"] + ] call_2 = ServiceCall( "test", @@ -1804,10 +1803,10 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) - assert [ + assert ["test_domain.test_3"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) - ] == ["test_domain.test_3"] + ] assert ( await service.async_extract_entities( @@ -1831,10 +1830,10 @@ async def test_extract_from_service_empty_if_no_entity_id(hass: HomeAssistant) - ] call = ServiceCall("test", "service") - assert [ + assert [] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) - ] == [] + ] async def test_extract_from_service_filter_out_non_existing_entities( @@ -1852,10 +1851,10 @@ async def test_extract_from_service_filter_out_non_existing_entities( {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) - assert [ + assert ["test_domain.test_2"] == [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) - ] == ["test_domain.test_2"] + ] async def test_extract_from_service_area_id( diff --git a/tests/helpers/test_significant_change.py b/tests/helpers/test_significant_change.py index 577ea5907e5..f9dca5b6034 100644 --- a/tests/helpers/test_significant_change.py +++ b/tests/helpers/test_significant_change.py @@ -1,8 +1,5 @@ """Test significant change helper.""" -from types import MappingProxyType -from typing import Any - import pytest from homeassistant.components.sensor import SensorDeviceClass @@ -70,14 +67,8 @@ async def test_significant_change_extra( assert checker.async_is_significant_change(State(ent_id, "100", attrs), extra_arg=1) def extra_significant_check( - hass: HomeAssistant, - old_state: str, - old_attrs: dict | MappingProxyType, - old_extra_arg: Any, - new_state: str, - new_attrs: dict | MappingProxyType, - new_extra_arg: Any, - ) -> bool | None: + hass, old_state, old_attrs, old_extra_arg, new_state, new_attrs, new_extra_arg + ): return old_extra_arg != new_extra_arg checker.extra_significant_check = extra_significant_check diff --git a/tests/helpers/test_singleton.py b/tests/helpers/test_singleton.py index 4722c58dc9f..dcda1e2db3a 100644 --- a/tests/helpers/test_singleton.py +++ b/tests/helpers/test_singleton.py @@ -1,11 +1,9 @@ """Test singleton helper.""" -from typing import Any from unittest.mock import Mock import pytest -from homeassistant.core import HomeAssistant from homeassistant.helpers import singleton @@ -16,11 +14,11 @@ def mock_hass(): @pytest.mark.parametrize("result", [object(), {}, []]) -async def test_singleton_async(mock_hass: HomeAssistant, result: Any) -> None: +async def test_singleton_async(mock_hass, result) -> None: """Test singleton with async function.""" @singleton.singleton("test_key") - async def something(hass: HomeAssistant) -> Any: + async def something(hass): return result result1 = await something(mock_hass) @@ -32,11 +30,11 @@ async def test_singleton_async(mock_hass: HomeAssistant, result: Any) -> None: @pytest.mark.parametrize("result", [object(), {}, []]) -def test_singleton(mock_hass: HomeAssistant, result: Any) -> None: +def test_singleton(mock_hass, result) -> None: """Test singleton with function.""" @singleton.singleton("test_key") - def something(hass: HomeAssistant) -> Any: + def something(hass): return result result1 = something(mock_hass) diff --git a/tests/helpers/test_start.py b/tests/helpers/test_start.py index bd6b328a2c7..d9c6bbf441c 100644 --- a/tests/helpers/test_start.py +++ b/tests/helpers/test_start.py @@ -14,7 +14,7 @@ async def test_at_start_when_running_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass: HomeAssistant) -> None: + async def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -40,7 +40,7 @@ async def test_at_start_when_running_callback( calls = [] @callback - def cb_at_start(hass: HomeAssistant) -> None: + def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -65,7 +65,7 @@ async def test_at_start_when_starting_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass: HomeAssistant) -> None: + async def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -88,7 +88,7 @@ async def test_at_start_when_starting_callback( calls = [] @callback - def cb_at_start(hass: HomeAssistant) -> None: + def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -116,7 +116,7 @@ async def test_cancelling_at_start_when_running( calls = [] - async def cb_at_start(hass: HomeAssistant) -> None: + async def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -137,7 +137,7 @@ async def test_cancelling_at_start_when_starting(hass: HomeAssistant) -> None: calls = [] @callback - def cb_at_start(hass: HomeAssistant) -> None: + def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -156,7 +156,7 @@ async def test_at_started_when_running_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass: HomeAssistant) -> None: + async def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -181,7 +181,7 @@ async def test_at_started_when_running_callback( calls = [] @callback - def cb_at_start(hass: HomeAssistant) -> None: + def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -205,7 +205,7 @@ async def test_at_started_when_starting_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass: HomeAssistant) -> None: + async def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -231,7 +231,7 @@ async def test_at_started_when_starting_callback( calls = [] @callback - def cb_at_start(hass: HomeAssistant) -> None: + def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -263,7 +263,7 @@ async def test_cancelling_at_started_when_running( calls = [] - async def cb_at_start(hass: HomeAssistant) -> None: + async def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) @@ -284,7 +284,7 @@ async def test_cancelling_at_started_when_starting(hass: HomeAssistant) -> None: calls = [] @callback - def cb_at_start(hass: HomeAssistant) -> None: + def cb_at_start(hass): """Home Assistant is started.""" calls.append(1) diff --git a/tests/helpers/test_state.py b/tests/helpers/test_state.py index ea7c1f6827f..150f31f5fe9 100644 --- a/tests/helpers/test_state.py +++ b/tests/helpers/test_state.py @@ -5,17 +5,18 @@ from unittest.mock import patch import pytest -from homeassistant.components.lock import LockState from homeassistant.components.sun import STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_CLOSED, STATE_HOME, + STATE_LOCKED, STATE_NOT_HOME, STATE_OFF, STATE_ON, STATE_OPEN, + STATE_UNLOCKED, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers import state @@ -142,17 +143,11 @@ async def test_as_number_states(hass: HomeAssistant) -> None: zero_states = ( STATE_OFF, STATE_CLOSED, - LockState.UNLOCKED, + STATE_UNLOCKED, STATE_BELOW_HORIZON, STATE_NOT_HOME, ) - one_states = ( - STATE_ON, - STATE_OPEN, - LockState.LOCKED, - STATE_ABOVE_HORIZON, - STATE_HOME, - ) + one_states = (STATE_ON, STATE_OPEN, STATE_LOCKED, STATE_ABOVE_HORIZON, STATE_HOME) for _state in zero_states: assert state.state_as_number(State("domain.test", _state, {})) == 0 for _state in one_states: diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index b8c6b5a25af..3123c01f500 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -15,7 +15,6 @@ from unittest.mock import patch from freezegun import freeze_time import orjson import pytest -from syrupy import SnapshotAssertion import voluptuous as vol from homeassistant import config_entries @@ -4549,7 +4548,7 @@ async def test_async_render_to_info_with_wildcard_matching_state( hass.states.async_set("cover.office_window", "closed") hass.states.async_set("cover.office_skylight", "open") hass.states.async_set("cover.x_skylight", "open") - hass.states.async_set("binary_sensor.door", "on") + hass.states.async_set("binary_sensor.door", "open") await hass.async_block_till_done() info = render_to_info(hass, template_complex_str) @@ -4559,7 +4558,7 @@ async def test_async_render_to_info_with_wildcard_matching_state( assert info.all_states is True assert info.rate_limit == template.ALL_STATES_RATE_LIMIT - hass.states.async_set("binary_sensor.door", "off") + hass.states.async_set("binary_sensor.door", "closed") info = render_to_info(hass, template_complex_str) assert not info.domains @@ -6237,348 +6236,3 @@ async def test_template_thread_safety_checks(hass: HomeAssistant) -> None: await hass.async_add_executor_job(template_obj.async_render_to_info) assert template_obj.async_render_to_info().result() == 23 - - -@pytest.mark.parametrize( - ("cola", "colb", "expected"), - [ - ([1, 2], [3, 4], [(1, 3), (2, 4)]), - ([1, 2], [3, 4, 5], [(1, 3), (2, 4)]), - ([1, 2, 3, 4], [3, 4], [(1, 3), (2, 4)]), - ], -) -def test_zip(hass: HomeAssistant, cola, colb, expected) -> None: - """Test zip.""" - assert ( - template.Template("{{ zip(cola, colb) | list }}", hass).async_render( - {"cola": cola, "colb": colb} - ) - == expected - ) - assert ( - template.Template( - "[{% for a, b in zip(cola, colb) %}({{a}}, {{b}}), {% endfor %}]", hass - ).async_render({"cola": cola, "colb": colb}) - == expected - ) - - -@pytest.mark.parametrize( - ("col", "expected"), - [ - ([(1, 3), (2, 4)], [(1, 2), (3, 4)]), - (["ax", "by", "cz"], [("a", "b", "c"), ("x", "y", "z")]), - ], -) -def test_unzip(hass: HomeAssistant, col, expected) -> None: - """Test unzipping using zip.""" - assert ( - template.Template("{{ zip(*col) | list }}", hass).async_render({"col": col}) - == expected - ) - assert ( - template.Template( - "{% set a, b = zip(*col) %}[{{a}}, {{b}}]", hass - ).async_render({"col": col}) - == expected - ) - - -def test_template_output_exceeds_maximum_size(hass: HomeAssistant) -> None: - """Test template output exceeds maximum size.""" - tpl = template.Template("{{ 'a' * 1024 * 257 }}", hass) - with pytest.raises(TemplateError): - tpl.async_render() - - -@pytest.mark.parametrize( - ("service_response"), - [ - { - "calendar.sports": { - "events": [ - { - "start": "2024-02-27T17:00:00-06:00", - "end": "2024-02-27T18:00:00-06:00", - "summary": "Basketball vs. Rockets", - "description": "", - } - ] - }, - "calendar.local_furry_events": {"events": []}, - "calendar.yap_house_schedules": { - "events": [ - { - "start": "2024-02-26T08:00:00-06:00", - "end": "2024-02-26T09:00:00-06:00", - "summary": "Dr. Appt", - "description": "", - }, - { - "start": "2024-02-28T20:00:00-06:00", - "end": "2024-02-28T21:00:00-06:00", - "summary": "Bake a cake", - "description": "something good", - }, - ] - }, - }, - { - "binary_sensor.workday": {"workday": True}, - "binary_sensor.workday2": {"workday": False}, - }, - { - "weather.smhi_home": { - "forecast": [ - { - "datetime": "2024-03-31T16:00:00", - "condition": "cloudy", - "wind_bearing": 79, - "cloud_coverage": 100, - "temperature": 10, - "templow": 4, - "pressure": 998, - "wind_gust_speed": 21.6, - "wind_speed": 11.88, - "precipitation": 0.2, - "humidity": 87, - }, - { - "datetime": "2024-04-01T12:00:00", - "condition": "rainy", - "wind_bearing": 17, - "cloud_coverage": 100, - "temperature": 6, - "templow": 1, - "pressure": 999, - "wind_gust_speed": 20.52, - "wind_speed": 8.64, - "precipitation": 2.2, - "humidity": 88, - }, - { - "datetime": "2024-04-02T12:00:00", - "condition": "cloudy", - "wind_bearing": 17, - "cloud_coverage": 100, - "temperature": 0, - "templow": -3, - "pressure": 1003, - "wind_gust_speed": 57.24, - "wind_speed": 30.6, - "precipitation": 1.3, - "humidity": 71, - }, - ] - }, - "weather.forecast_home": { - "forecast": [ - { - "condition": "cloudy", - "precipitation_probability": 6.6, - "datetime": "2024-03-31T10:00:00+00:00", - "wind_bearing": 71.8, - "temperature": 10.9, - "templow": 6.5, - "wind_gust_speed": 24.1, - "wind_speed": 13.7, - "precipitation": 0, - "humidity": 71, - }, - { - "condition": "cloudy", - "precipitation_probability": 8, - "datetime": "2024-04-01T10:00:00+00:00", - "wind_bearing": 350.6, - "temperature": 10.2, - "templow": 3.4, - "wind_gust_speed": 38.2, - "wind_speed": 21.6, - "precipitation": 0, - "humidity": 79, - }, - { - "condition": "snowy", - "precipitation_probability": 67.4, - "datetime": "2024-04-02T10:00:00+00:00", - "wind_bearing": 24.5, - "temperature": 3, - "templow": 0, - "wind_gust_speed": 64.8, - "wind_speed": 37.4, - "precipitation": 2.3, - "humidity": 77, - }, - ] - }, - }, - { - "vacuum.deebot_n8_plus_1": { - "payloadType": "j", - "resp": { - "body": { - "msg": "ok", - } - }, - "header": { - "ver": "0.0.1", - }, - }, - "vacuum.deebot_n8_plus_2": { - "payloadType": "j", - "resp": { - "body": { - "msg": "ok", - } - }, - "header": { - "ver": "0.0.1", - }, - }, - }, - ], - ids=["calendar", "workday", "weather", "vacuum"], -) -async def test_merge_response( - hass: HomeAssistant, - service_response: dict, - snapshot: SnapshotAssertion, -) -> None: - """Test the merge_response function/filter.""" - - _template = "{{ merge_response(" + str(service_response) + ") }}" - - tpl = template.Template(_template, hass) - assert service_response == snapshot(name="a_response") - assert tpl.async_render() == snapshot(name="b_rendered") - - -async def test_merge_response_with_entity_id_in_response( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: - """Test the merge_response function/filter with empty lists.""" - - service_response = { - "test.response": {"some_key": True, "entity_id": "test.response"}, - "test.response2": {"some_key": False, "entity_id": "test.response2"}, - } - _template = "{{ merge_response(" + str(service_response) + ") }}" - with pytest.raises( - TemplateError, - match="ValueError: Response dictionary already contains key 'entity_id'", - ): - template.Template(_template, hass).async_render() - - service_response = { - "test.response": { - "happening": [ - { - "start": "2024-02-27T17:00:00-06:00", - "end": "2024-02-27T18:00:00-06:00", - "summary": "Magic day", - "entity_id": "test.response", - } - ] - } - } - _template = "{{ merge_response(" + str(service_response) + ") }}" - with pytest.raises( - TemplateError, - match="ValueError: Response dictionary already contains key 'entity_id'", - ): - template.Template(_template, hass).async_render() - - -async def test_merge_response_with_empty_response( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: - """Test the merge_response function/filter with empty lists.""" - - service_response = { - "calendar.sports": {"events": []}, - "calendar.local_furry_events": {"events": []}, - "calendar.yap_house_schedules": {"events": []}, - } - _template = "{{ merge_response(" + str(service_response) + ") }}" - tpl = template.Template(_template, hass) - assert service_response == snapshot(name="a_response") - assert tpl.async_render() == snapshot(name="b_rendered") - - -async def test_response_empty_dict( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: - """Test the merge_response function/filter with empty dict.""" - - service_response = {} - _template = "{{ merge_response(" + str(service_response) + ") }}" - tpl = template.Template(_template, hass) - assert tpl.async_render() == [] - - -async def test_response_incorrect_value( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: - """Test the merge_response function/filter with incorrect response.""" - - service_response = "incorrect" - _template = "{{ merge_response(" + str(service_response) + ") }}" - with pytest.raises(TemplateError, match="TypeError: Response is not a dictionary"): - template.Template(_template, hass).async_render() - - -async def test_merge_response_with_incorrect_response(hass: HomeAssistant) -> None: - """Test the merge_response function/filter with empty response should raise.""" - - service_response = {"calendar.sports": []} - _template = "{{ merge_response(" + str(service_response) + ") }}" - tpl = template.Template(_template, hass) - with pytest.raises(TemplateError, match="TypeError: Response is not a dictionary"): - tpl.async_render() - - service_response = { - "binary_sensor.workday": [], - } - _template = "{{ merge_response(" + str(service_response) + ") }}" - tpl = template.Template(_template, hass) - with pytest.raises(TemplateError, match="TypeError: Response is not a dictionary"): - tpl.async_render() - - -def test_warn_no_hass(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) -> None: - """Test deprecation warning when instantiating Template without hass.""" - - message = "Detected code that creates a template object without passing hass" - template.Template("blah") - assert message in caplog.text - caplog.clear() - - template.Template("blah", None) - assert message in caplog.text - caplog.clear() - - template.Template("blah", hass) - assert message not in caplog.text - caplog.clear() - - -async def test_merge_response_not_mutate_original_object( - hass: HomeAssistant, snapshot: SnapshotAssertion -) -> None: - """Test the merge_response does not mutate original service response value.""" - - value = '{"calendar.family": {"events": [{"summary": "An event"}]}' - _template = ( - "{% set calendar_response = " + value + "} %}" - "{{ merge_response(calendar_response) }}" - # We should be able to merge the same response again - # as the merge is working on a copy of the original object (response) - "{{ merge_response(calendar_response) }}" - ) - - tpl = template.Template(_template, hass) - assert tpl.async_render() diff --git a/tests/helpers/test_translation.py b/tests/helpers/test_translation.py index 3b60c7f695b..73cd243a0c6 100644 --- a/tests/helpers/test_translation.py +++ b/tests/helpers/test_translation.py @@ -425,10 +425,10 @@ async def test_caching(hass: HomeAssistant) -> None: side_effect=translation.build_resources, ) as mock_build_resources: load1 = await translation.async_get_translations(hass, "en", "entity_component") - assert len(mock_build_resources.mock_calls) == 7 + assert len(mock_build_resources.mock_calls) == 6 load2 = await translation.async_get_translations(hass, "en", "entity_component") - assert len(mock_build_resources.mock_calls) == 7 + assert len(mock_build_resources.mock_calls) == 6 assert load1 == load2 diff --git a/tests/helpers/test_trigger.py b/tests/helpers/test_trigger.py index 77f48be170b..0bd5da0707c 100644 --- a/tests/helpers/test_trigger.py +++ b/tests/helpers/test_trigger.py @@ -20,7 +20,7 @@ async def test_bad_trigger_platform(hass: HomeAssistant) -> None: """Test bad trigger platform.""" with pytest.raises(vol.Invalid) as ex: await async_validate_trigger_config(hass, [{"platform": "not_a_platform"}]) - assert "Invalid trigger 'not_a_platform' specified" in str(ex) + assert "Invalid platform 'not_a_platform' specified" in str(ex) async def test_trigger_subtype(hass: HomeAssistant) -> None: @@ -159,70 +159,6 @@ async def test_trigger_enabled_templates( assert len(service_calls) == 2 -async def test_nested_trigger_list( - hass: HomeAssistant, service_calls: list[ServiceCall] -) -> None: - """Test triggers within nested list.""" - - assert await async_setup_component( - hass, - "automation", - { - "automation": { - "trigger": [ - { - "triggers": { - "platform": "event", - "event_type": "trigger_1", - }, - }, - { - "platform": "event", - "event_type": "trigger_2", - }, - {"triggers": []}, - {"triggers": None}, - { - "triggers": [ - { - "platform": "event", - "event_type": "trigger_3", - }, - { - "platform": "event", - "event_type": "trigger_4", - }, - ], - }, - ], - "action": { - "service": "test.automation", - }, - } - }, - ) - - hass.bus.async_fire("trigger_1") - await hass.async_block_till_done() - assert len(service_calls) == 1 - - hass.bus.async_fire("trigger_2") - await hass.async_block_till_done() - assert len(service_calls) == 2 - - hass.bus.async_fire("trigger_none") - await hass.async_block_till_done() - assert len(service_calls) == 2 - - hass.bus.async_fire("trigger_3") - await hass.async_block_till_done() - assert len(service_calls) == 3 - - hass.bus.async_fire("trigger_4") - await hass.async_block_till_done() - assert len(service_calls) == 4 - - async def test_trigger_enabled_template_limited( hass: HomeAssistant, service_calls: list[ServiceCall], diff --git a/tests/helpers/test_update_coordinator.py b/tests/helpers/test_update_coordinator.py index 50da0ab6332..8633bf862a5 100644 --- a/tests/helpers/test_update_coordinator.py +++ b/tests/helpers/test_update_coordinator.py @@ -13,12 +13,8 @@ import requests from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import CoreState, HomeAssistant, callback -from homeassistant.exceptions import ( - ConfigEntryAuthFailed, - ConfigEntryError, - ConfigEntryNotReady, -) -from homeassistant.helpers import frame, update_coordinator +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import update_coordinator from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_fire_time_changed @@ -57,9 +53,7 @@ KNOWN_ERRORS: list[tuple[Exception, type[Exception], str]] = [ def get_crd( - hass: HomeAssistant, - update_interval: timedelta | None, - config_entry: config_entries.ConfigEntry | None = None, + hass: HomeAssistant, update_interval: timedelta | None ) -> update_coordinator.DataUpdateCoordinator[int]: """Make coordinator mocks.""" calls = 0 @@ -72,7 +66,6 @@ def get_crd( return update_coordinator.DataUpdateCoordinator[int]( hass, _LOGGER, - config_entry=config_entry, name="test", update_method=refresh, update_interval=update_interval, @@ -124,7 +117,8 @@ async def test_async_refresh( async def test_shutdown( - hass: HomeAssistant, crd: update_coordinator.DataUpdateCoordinator[int] + hass: HomeAssistant, + crd: update_coordinator.DataUpdateCoordinator[int], ) -> None: """Test async_shutdown for update coordinator.""" assert crd.data is None @@ -160,7 +154,8 @@ async def test_shutdown( async def test_shutdown_on_entry_unload( - hass: HomeAssistant, crd: update_coordinator.DataUpdateCoordinator[int] + hass: HomeAssistant, + crd: update_coordinator.DataUpdateCoordinator[int], ) -> None: """Test shutdown is requested on entry unload.""" entry = MockConfigEntry() @@ -192,7 +187,8 @@ async def test_shutdown_on_entry_unload( async def test_shutdown_on_hass_stop( - hass: HomeAssistant, crd: update_coordinator.DataUpdateCoordinator[int] + hass: HomeAssistant, + crd: update_coordinator.DataUpdateCoordinator[int], ) -> None: """Test shutdown can be shutdown on STOP event.""" calls = 0 @@ -529,19 +525,11 @@ async def test_stop_refresh_on_ha_stop( @pytest.mark.parametrize( "err_msg", - [ - *KNOWN_ERRORS, - (Exception(), Exception, "Unknown exception"), - ], -) -@pytest.mark.parametrize( - "method", - ["update_method", "setup_method"], + KNOWN_ERRORS, ) async def test_async_config_entry_first_refresh_failure( - hass: HomeAssistant, err_msg: tuple[Exception, type[Exception], str], - method: str, + crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture, ) -> None: """Test async_config_entry_first_refresh raises ConfigEntryNotReady on failure. @@ -550,12 +538,7 @@ async def test_async_config_entry_first_refresh_failure( will be caught by config_entries.async_setup which will log it with a decreasing level of logging once the first message is logged. """ - entry = MockConfigEntry() - entry._async_set_state( - hass, config_entries.ConfigEntryState.SETUP_IN_PROGRESS, None - ) - crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) - setattr(crd, method, AsyncMock(side_effect=err_msg[0])) + crd.update_method = AsyncMock(side_effect=err_msg[0]) with pytest.raises(ConfigEntryNotReady): await crd.async_config_entry_first_refresh() @@ -565,113 +548,13 @@ async def test_async_config_entry_first_refresh_failure( assert err_msg[2] not in caplog.text -@pytest.mark.parametrize( - "err_msg", - [ - (ConfigEntryError(), ConfigEntryError, "Config entry error"), - (ConfigEntryAuthFailed(), ConfigEntryAuthFailed, "Config entry error"), - ], -) -@pytest.mark.parametrize( - "method", - ["update_method", "setup_method"], -) -async def test_async_config_entry_first_refresh_failure_passed_through( - hass: HomeAssistant, - err_msg: tuple[Exception, type[Exception], str], - method: str, - caplog: pytest.LogCaptureFixture, +async def test_async_config_entry_first_refresh_success( + crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture ) -> None: - """Test async_config_entry_first_refresh passes through ConfigEntryError & ConfigEntryAuthFailed. - - Verify we do not log the exception since it - will be caught by config_entries.async_setup which will log it with - a decreasing level of logging once the first message is logged. - """ - entry = MockConfigEntry() - entry._async_set_state( - hass, config_entries.ConfigEntryState.SETUP_IN_PROGRESS, None - ) - crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) - setattr(crd, method, AsyncMock(side_effect=err_msg[0])) - - with pytest.raises(err_msg[1]): - await crd.async_config_entry_first_refresh() - - assert crd.last_update_success is False - assert isinstance(crd.last_exception, err_msg[1]) - assert err_msg[2] not in caplog.text - - -async def test_async_config_entry_first_refresh_success(hass: HomeAssistant) -> None: """Test first refresh successfully.""" - entry = MockConfigEntry() - entry._async_set_state( - hass, config_entries.ConfigEntryState.SETUP_IN_PROGRESS, None - ) - crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) - crd.setup_method = AsyncMock() await crd.async_config_entry_first_refresh() assert crd.last_update_success is True - crd.setup_method.assert_called_once() - - -async def test_async_config_entry_first_refresh_invalid_state( - hass: HomeAssistant, -) -> None: - """Test first refresh fails due to invalid state.""" - entry = MockConfigEntry() - crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) - crd.setup_method = AsyncMock() - with pytest.raises( - RuntimeError, - match="Detected code that uses `async_config_entry_first_refresh`, which " - "is only supported when entry state is ConfigEntryState.SETUP_IN_PROGRESS, " - "but it is in state ConfigEntryState.NOT_LOADED. This will stop working " - "in Home Assistant 2025.11. Please report this issue.", - ): - await crd.async_config_entry_first_refresh() - - assert crd.last_update_success is True - crd.setup_method.assert_not_called() - - -@pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) -async def test_async_config_entry_first_refresh_invalid_state_in_integration( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test first refresh successfully, despite wrong state.""" - entry = MockConfigEntry() - crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) - crd.setup_method = AsyncMock() - - await crd.async_config_entry_first_refresh() - assert crd.last_update_success is True - crd.setup_method.assert_called() - assert ( - "Detected that integration 'hue' uses `async_config_entry_first_refresh`, which " - "is only supported when entry state is ConfigEntryState.SETUP_IN_PROGRESS, " - "but it is in state ConfigEntryState.NOT_LOADED, This will stop working " - "in Home Assistant 2025.11" - ) in caplog.text - - -async def test_async_config_entry_first_refresh_no_entry(hass: HomeAssistant) -> None: - """Test first refresh successfully.""" - crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, None) - crd.setup_method = AsyncMock() - with pytest.raises( - RuntimeError, - match="Detected code that uses `async_config_entry_first_refresh`, " - "which is only supported for coordinators with a config entry and will " - "stop working in Home Assistant 2025.11. Please report this issue.", - ): - await crd.async_config_entry_first_refresh() - - assert crd.last_update_success is True - crd.setup_method.assert_not_called() async def test_not_schedule_refresh_if_system_option_disable_polling( @@ -679,7 +562,8 @@ async def test_not_schedule_refresh_if_system_option_disable_polling( ) -> None: """Test we do not schedule a refresh if disable polling in config entry.""" entry = MockConfigEntry(pref_disable_polling=True) - crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) + config_entries.current_entry.set(entry) + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL) crd.async_add_listener(lambda: None) assert crd._unsub_refresh is None @@ -719,7 +603,7 @@ async def test_async_set_update_error( async def test_only_callback_on_change_when_always_update_is_false( - crd: update_coordinator.DataUpdateCoordinator[int], + crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture ) -> None: """Test we do not callback listeners unless something has actually changed when always_update is false.""" update_callback = Mock() @@ -789,7 +673,7 @@ async def test_only_callback_on_change_when_always_update_is_false( async def test_always_callback_when_always_update_is_true( - crd: update_coordinator.DataUpdateCoordinator[int], + crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture ) -> None: """Test we callback listeners even though the data is the same when always_update is True.""" update_callback = Mock() @@ -863,38 +747,3 @@ async def test_timestamp_date_update_coordinator(hass: HomeAssistant) -> None: unsub() await crd.async_refresh() assert len(last_update_success_times) == 1 - - -async def test_config_entry(hass: HomeAssistant) -> None: - """Test behavior of coordinator.entry.""" - entry = MockConfigEntry() - - # Default without context should be None - crd = update_coordinator.DataUpdateCoordinator[int](hass, _LOGGER, name="test") - assert crd.config_entry is None - - # Explicit None is OK - crd = update_coordinator.DataUpdateCoordinator[int]( - hass, _LOGGER, name="test", config_entry=None - ) - assert crd.config_entry is None - - # Explicit entry is OK - crd = update_coordinator.DataUpdateCoordinator[int]( - hass, _LOGGER, name="test", config_entry=entry - ) - assert crd.config_entry is entry - - # set ContextVar - config_entries.current_entry.set(entry) - - # Default with ContextVar should match the ContextVar - crd = update_coordinator.DataUpdateCoordinator[int](hass, _LOGGER, name="test") - assert crd.config_entry is entry - - # Explicit entry different from ContextVar not recommended, but should work - another_entry = MockConfigEntry() - crd = update_coordinator.DataUpdateCoordinator[int]( - hass, _LOGGER, name="test", config_entry=another_entry - ) - assert crd.config_entry is another_entry diff --git a/tests/patch_recorder.py b/tests/patch_recorder.py deleted file mode 100644 index 4993e84fc30..00000000000 --- a/tests/patch_recorder.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Patch recorder related functions.""" - -from __future__ import annotations - -from contextlib import contextmanager -import sys - -# Patch recorder util session scope -from homeassistant.helpers import recorder as recorder_helper # noqa: E402 - -# Make sure homeassistant.components.recorder.util is not already imported -assert "homeassistant.components.recorder.util" not in sys.modules - -real_session_scope = recorder_helper.session_scope - - -@contextmanager -def _session_scope_wrapper(*args, **kwargs): - """Make session_scope patchable. - - This function will be imported by recorder modules. - """ - with real_session_scope(*args, **kwargs) as ses: - yield ses - - -recorder_helper.session_scope = _session_scope_wrapper diff --git a/tests/patch_time.py b/tests/patch_time.py index 362296ab8b2..a93d3c8ec4f 100644 --- a/tests/patch_time.py +++ b/tests/patch_time.py @@ -5,7 +5,9 @@ from __future__ import annotations import datetime import time -# Do not add any Home Assistant import here +from homeassistant import runner, util +from homeassistant.helpers import event as event_helper +from homeassistant.util import dt as dt_util def _utcnow() -> datetime.datetime: @@ -18,21 +20,10 @@ def _monotonic() -> float: return time.monotonic() -# Before importing any other Home Assistant functionality, import and replace -# partial dt_util.utcnow with a regular function which can be found by freezegun -from homeassistant import util # noqa: E402 -from homeassistant.util import dt as dt_util # noqa: E402 - -dt_util.utcnow = _utcnow # type: ignore[assignment] -util.utcnow = _utcnow # type: ignore[assignment] - - -# Import other Home Assistant functionality which we need to patch -from homeassistant import runner # noqa: E402 -from homeassistant.helpers import event as event_helper # noqa: E402 - # Replace partial functions which are not found by freezegun +dt_util.utcnow = _utcnow # type: ignore[assignment] event_helper.time_tracker_utcnow = _utcnow # type: ignore[assignment] +util.utcnow = _utcnow # type: ignore[assignment] # Replace bound methods which are not found by freezegun runner.monotonic = _monotonic # type: ignore[assignment] diff --git a/tests/pylint/conftest.py b/tests/pylint/conftest.py index 8ae291ac0b7..90e535a7b0e 100644 --- a/tests/pylint/conftest.py +++ b/tests/pylint/conftest.py @@ -104,37 +104,22 @@ def enforce_sorted_platforms_checker_fixture( return enforce_sorted_platforms_checker -@pytest.fixture(name="hass_enforce_class_module", scope="package") -def hass_enforce_class_module_fixture() -> ModuleType: - """Fixture to the content for the hass_enforce_class_module check.""" +@pytest.fixture(name="hass_enforce_coordinator_module", scope="package") +def hass_enforce_coordinator_module_fixture() -> ModuleType: + """Fixture to the content for the hass_enforce_coordinator_module check.""" return _load_plugin_from_file( - "hass_enforce_class_module", - "pylint/plugins/hass_enforce_class_module.py", + "hass_enforce_coordinator_module", + "pylint/plugins/hass_enforce_coordinator_module.py", ) -@pytest.fixture(name="enforce_class_module_checker") -def enforce_class_module_fixture(hass_enforce_class_module, linter) -> BaseChecker: - """Fixture to provide a hass_enforce_class_module checker.""" - enforce_class_module_checker = hass_enforce_class_module.HassEnforceClassModule( - linter +@pytest.fixture(name="enforce_coordinator_module_checker") +def enforce_coordinator_module_fixture( + hass_enforce_coordinator_module, linter +) -> BaseChecker: + """Fixture to provide a hass_enforce_coordinator_module checker.""" + enforce_coordinator_module_checker = ( + hass_enforce_coordinator_module.HassEnforceCoordinatorModule(linter) ) - enforce_class_module_checker.module = "homeassistant.components.pylint_test" - return enforce_class_module_checker - - -@pytest.fixture(name="hass_decorator", scope="package") -def hass_decorator_fixture() -> ModuleType: - """Fixture to provide a pylint plugin.""" - return _load_plugin_from_file( - "hass_imports", - "pylint/plugins/hass_decorator.py", - ) - - -@pytest.fixture(name="decorator_checker") -def decorator_checker_fixture(hass_decorator, linter) -> BaseChecker: - """Fixture to provide a pylint checker.""" - type_hint_checker = hass_decorator.HassDecoratorChecker(linter) - type_hint_checker.module = "homeassistant.components.pylint_test" - return type_hint_checker + enforce_coordinator_module_checker.module = "homeassistant.components.pylint_test" + return enforce_coordinator_module_checker diff --git a/tests/pylint/test_decorator.py b/tests/pylint/test_decorator.py deleted file mode 100644 index c2e45e5a433..00000000000 --- a/tests/pylint/test_decorator.py +++ /dev/null @@ -1,268 +0,0 @@ -"""Tests for pylint hass_enforce_type_hints plugin.""" - -from __future__ import annotations - -import astroid -from pylint.checkers import BaseChecker -from pylint.interfaces import UNDEFINED -from pylint.testutils import MessageTest -from pylint.testutils.unittest_linter import UnittestLinter -from pylint.utils.ast_walker import ASTWalker -import pytest - -from . import assert_adds_messages, assert_no_messages - - -def test_good_callback(linter: UnittestLinter, decorator_checker: BaseChecker) -> None: - """Test good `@callback` decorator.""" - code = """ - from homeassistant.core import callback - - @callback - def setup( - arg1, arg2 - ): - pass - """ - - root_node = astroid.parse(code) - walker = ASTWalker(linter) - walker.add_checker(decorator_checker) - - with assert_no_messages(linter): - walker.walk(root_node) - - -def test_bad_callback(linter: UnittestLinter, decorator_checker: BaseChecker) -> None: - """Test bad `@callback` decorator.""" - code = """ - from homeassistant.core import callback - - @callback - async def setup( - arg1, arg2 - ): - pass - """ - - root_node = astroid.parse(code) - walker = ASTWalker(linter) - walker.add_checker(decorator_checker) - - with assert_adds_messages( - linter, - MessageTest( - msg_id="hass-async-callback-decorator", - line=5, - node=root_node.body[1], - args=None, - confidence=UNDEFINED, - col_offset=0, - end_line=5, - end_col_offset=15, - ), - ): - walker.walk(root_node) - - -@pytest.mark.parametrize( - ("keywords", "path"), - [ - ('scope="function"', "tests.test_bootstrap"), - ('scope="class"', "tests.test_bootstrap"), - ('scope="module"', "tests.test_bootstrap"), - ('scope="package"', "tests.test_bootstrap"), - ('scope="session", autouse=True', "tests.test_bootstrap"), - ('scope="function"', "tests.components.conftest"), - ('scope="class"', "tests.components.conftest"), - ('scope="module"', "tests.components.conftest"), - ('scope="package"', "tests.components.conftest"), - ('scope="session", autouse=True', "tests.components.conftest"), - ( - 'scope="session", autouse=find_spec("zeroconf") is not None', - "tests.components.conftest", - ), - ('scope="function"', "tests.components.pylint_tests.conftest"), - ('scope="class"', "tests.components.pylint_tests.conftest"), - ('scope="module"', "tests.components.pylint_tests.conftest"), - ('scope="package"', "tests.components.pylint_tests.conftest"), - ('scope="function"', "tests.components.pylint_test"), - ('scope="class"', "tests.components.pylint_test"), - ('scope="module"', "tests.components.pylint_test"), - ], -) -def test_good_fixture( - linter: UnittestLinter, decorator_checker: BaseChecker, keywords: str, path: str -) -> None: - """Test good `@pytest.fixture` decorator.""" - code = f""" - import pytest - - @pytest.fixture - def setup( - arg1, arg2 - ): - pass - - @pytest.fixture({keywords}) - def setup_session( - arg1, arg2 - ): - pass - """ - - root_node = astroid.parse(code, path) - walker = ASTWalker(linter) - walker.add_checker(decorator_checker) - - with assert_no_messages(linter): - walker.walk(root_node) - - -@pytest.mark.parametrize( - "path", - [ - "tests.components.pylint_test", - "tests.components.pylint_test.conftest", - "tests.components.pylint_test.module", - ], -) -def test_bad_fixture_session_scope( - linter: UnittestLinter, decorator_checker: BaseChecker, path: str -) -> None: - """Test bad `@pytest.fixture` decorator.""" - code = """ - import pytest - - @pytest.fixture - def setup( - arg1, arg2 - ): - pass - - @pytest.fixture(scope="session") - def setup_session( - arg1, arg2 - ): - pass - """ - - root_node = astroid.parse(code, path) - walker = ASTWalker(linter) - walker.add_checker(decorator_checker) - - with assert_adds_messages( - linter, - MessageTest( - msg_id="hass-pytest-fixture-decorator", - line=10, - node=root_node.body[2].decorators.nodes[0], - args=("scope `session`", "use `package` or lower"), - confidence=UNDEFINED, - col_offset=1, - end_line=10, - end_col_offset=32, - ), - ): - walker.walk(root_node) - - -@pytest.mark.parametrize( - "path", - [ - "tests.components.pylint_test", - "tests.components.pylint_test.module", - ], -) -def test_bad_fixture_package_scope( - linter: UnittestLinter, decorator_checker: BaseChecker, path: str -) -> None: - """Test bad `@pytest.fixture` decorator.""" - code = """ - import pytest - - @pytest.fixture - def setup( - arg1, arg2 - ): - pass - - @pytest.fixture(scope="package") - def setup_session( - arg1, arg2 - ): - pass - """ - - root_node = astroid.parse(code, path) - walker = ASTWalker(linter) - walker.add_checker(decorator_checker) - - with assert_adds_messages( - linter, - MessageTest( - msg_id="hass-pytest-fixture-decorator", - line=10, - node=root_node.body[2].decorators.nodes[0], - args=("scope `package`", "use `module` or lower"), - confidence=UNDEFINED, - col_offset=1, - end_line=10, - end_col_offset=32, - ), - ): - walker.walk(root_node) - - -@pytest.mark.parametrize( - "keywords", - [ - 'scope="session"', - 'scope="session", autouse=False', - ], -) -@pytest.mark.parametrize( - "path", - [ - "tests.test_bootstrap", - "tests.components.conftest", - ], -) -def test_bad_fixture_autouse( - linter: UnittestLinter, decorator_checker: BaseChecker, keywords: str, path: str -) -> None: - """Test bad `@pytest.fixture` decorator.""" - code = f""" - import pytest - - @pytest.fixture - def setup( - arg1, arg2 - ): - pass - - @pytest.fixture({keywords}) - def setup_session( - arg1, arg2 - ): - pass - """ - - root_node = astroid.parse(code, path) - walker = ASTWalker(linter) - walker.add_checker(decorator_checker) - - with assert_adds_messages( - linter, - MessageTest( - msg_id="hass-pytest-fixture-decorator", - line=10, - node=root_node.body[2].decorators.nodes[0], - args=("scope/autouse combination", "set `autouse=True` or reduce scope"), - confidence=UNDEFINED, - col_offset=1, - end_line=10, - end_col_offset=17 + len(keywords), - ), - ): - walker.walk(root_node) diff --git a/tests/pylint/test_enforce_class_module.py b/tests/pylint/test_enforce_class_module.py deleted file mode 100644 index 8b3ac563c6a..00000000000 --- a/tests/pylint/test_enforce_class_module.py +++ /dev/null @@ -1,286 +0,0 @@ -"""Tests for pylint hass_enforce_class_module plugin.""" - -from __future__ import annotations - -import astroid -from pylint.checkers import BaseChecker -from pylint.interfaces import UNDEFINED -from pylint.testutils import MessageTest -from pylint.testutils.unittest_linter import UnittestLinter -from pylint.utils.ast_walker import ASTWalker -import pytest - -from . import assert_adds_messages, assert_no_messages - - -@pytest.mark.parametrize( - "code", - [ - pytest.param( - """ - class DataUpdateCoordinator: - pass - - class TestCoordinator(DataUpdateCoordinator): - pass - """, - id="simple", - ), - pytest.param( - """ - class DataUpdateCoordinator: - pass - - class TestCoordinator(DataUpdateCoordinator): - pass - - class TestCoordinator2(TestCoordinator): - pass - """, - id="nested", - ), - ], -) -@pytest.mark.parametrize( - "path", - [ - "homeassistant.components.pylint_test.coordinator", - "homeassistant.components.pylint_test.coordinator.my_coordinator", - ], -) -def test_enforce_class_module_good( - linter: UnittestLinter, - enforce_class_module_checker: BaseChecker, - code: str, - path: str, -) -> None: - """Good test cases.""" - root_node = astroid.parse(code, path) - walker = ASTWalker(linter) - walker.add_checker(enforce_class_module_checker) - - with assert_no_messages(linter): - walker.walk(root_node) - - -@pytest.mark.parametrize( - "path", - [ - "homeassistant.components.sensor", - "homeassistant.components.sensor.entity", - "homeassistant.components.pylint_test.sensor", - "homeassistant.components.pylint_test.sensor.entity", - ], -) -def test_enforce_class_platform_good( - linter: UnittestLinter, - enforce_class_module_checker: BaseChecker, - path: str, -) -> None: - """Good test cases.""" - code = """ - class SensorEntity: - pass - - class CustomSensorEntity(SensorEntity): - pass - - class CoordinatorEntity: - pass - - class CustomCoordinatorSensorEntity(CoordinatorEntity, SensorEntity): - pass - """ - root_node = astroid.parse(code, path) - walker = ASTWalker(linter) - walker.add_checker(enforce_class_module_checker) - - with assert_no_messages(linter): - walker.walk(root_node) - - -@pytest.mark.parametrize( - "path", - [ - "homeassistant.components.pylint_test", - "homeassistant.components.pylint_test.my_coordinator", - "homeassistant.components.pylint_test.coordinator_other", - "homeassistant.components.pylint_test.sensor", - ], -) -def test_enforce_class_module_bad_simple( - linter: UnittestLinter, - enforce_class_module_checker: BaseChecker, - path: str, -) -> None: - """Bad test case with coordinator extending directly.""" - root_node = astroid.parse( - """ - class DataUpdateCoordinator: - pass - - class TestCoordinator(DataUpdateCoordinator): - pass - - class CoordinatorEntity: - pass - - class CustomCoordinatorSensorEntity(CoordinatorEntity): - pass - """, - path, - ) - walker = ASTWalker(linter) - walker.add_checker(enforce_class_module_checker) - - with assert_adds_messages( - linter, - MessageTest( - msg_id="hass-enforce-class-module", - line=5, - node=root_node.body[1], - args=("DataUpdateCoordinator", "coordinator"), - confidence=UNDEFINED, - col_offset=0, - end_line=5, - end_col_offset=21, - ), - MessageTest( - msg_id="hass-enforce-class-module", - line=11, - node=root_node.body[3], - args=("CoordinatorEntity", "entity"), - confidence=UNDEFINED, - col_offset=0, - end_line=11, - end_col_offset=35, - ), - ): - walker.walk(root_node) - - -@pytest.mark.parametrize( - "path", - [ - "homeassistant.components.pylint_test", - "homeassistant.components.pylint_test.my_coordinator", - "homeassistant.components.pylint_test.coordinator_other", - "homeassistant.components.pylint_test.sensor", - ], -) -def test_enforce_class_module_bad_nested( - linter: UnittestLinter, - enforce_class_module_checker: BaseChecker, - path: str, -) -> None: - """Bad test case with nested coordinators.""" - root_node = astroid.parse( - """ - class DataUpdateCoordinator: - pass - - class TestCoordinator(DataUpdateCoordinator): - pass - - class NopeCoordinator(TestCoordinator): - pass - """, - path, - ) - walker = ASTWalker(linter) - walker.add_checker(enforce_class_module_checker) - - with assert_adds_messages( - linter, - MessageTest( - msg_id="hass-enforce-class-module", - line=5, - node=root_node.body[1], - args=("DataUpdateCoordinator", "coordinator"), - confidence=UNDEFINED, - col_offset=0, - end_line=5, - end_col_offset=21, - ), - MessageTest( - msg_id="hass-enforce-class-module", - line=8, - node=root_node.body[2], - args=("DataUpdateCoordinator", "coordinator"), - confidence=UNDEFINED, - col_offset=0, - end_line=8, - end_col_offset=21, - ), - ): - walker.walk(root_node) - - -@pytest.mark.parametrize( - "path", - [ - "homeassistant.components.sensor", - "homeassistant.components.sensor.entity", - "homeassistant.components.pylint_test.entity", - ], -) -def test_enforce_entity_good( - linter: UnittestLinter, - enforce_class_module_checker: BaseChecker, - path: str, -) -> None: - """Good test cases.""" - code = """ - class Entity: - pass - - class CustomEntity(Entity): - pass - """ - root_node = astroid.parse(code, path) - walker = ASTWalker(linter) - walker.add_checker(enforce_class_module_checker) - - with assert_no_messages(linter): - walker.walk(root_node) - - -@pytest.mark.parametrize( - "path", - [ - "homeassistant.components.pylint_test", - "homeassistant.components.pylint_test.select", - "homeassistant.components.pylint_test.select.entity", - ], -) -def test_enforce_entity_bad( - linter: UnittestLinter, - enforce_class_module_checker: BaseChecker, - path: str, -) -> None: - """Good test cases.""" - code = """ - class Entity: - pass - - class CustomEntity(Entity): - pass - """ - root_node = astroid.parse(code, path) - walker = ASTWalker(linter) - walker.add_checker(enforce_class_module_checker) - - with assert_adds_messages( - linter, - MessageTest( - msg_id="hass-enforce-class-module", - line=5, - node=root_node.body[1], - args=("Entity", "entity"), - confidence=UNDEFINED, - col_offset=0, - end_line=5, - end_col_offset=18, - ), - ): - walker.walk(root_node) diff --git a/tests/pylint/test_enforce_coordinator_module.py b/tests/pylint/test_enforce_coordinator_module.py new file mode 100644 index 00000000000..90d88246974 --- /dev/null +++ b/tests/pylint/test_enforce_coordinator_module.py @@ -0,0 +1,134 @@ +"""Tests for pylint hass_enforce_coordinator_module plugin.""" + +from __future__ import annotations + +import astroid +from pylint.checkers import BaseChecker +from pylint.interfaces import UNDEFINED +from pylint.testutils import MessageTest +from pylint.testutils.unittest_linter import UnittestLinter +from pylint.utils.ast_walker import ASTWalker +import pytest + +from . import assert_adds_messages, assert_no_messages + + +@pytest.mark.parametrize( + "code", + [ + pytest.param( + """ + class DataUpdateCoordinator: + pass + + class TestCoordinator(DataUpdateCoordinator): + pass + """, + id="simple", + ), + pytest.param( + """ + class DataUpdateCoordinator: + pass + + class TestCoordinator(DataUpdateCoordinator): + pass + + class TestCoordinator2(TestCoordinator): + pass + """, + id="nested", + ), + ], +) +def test_enforce_coordinator_module_good( + linter: UnittestLinter, enforce_coordinator_module_checker: BaseChecker, code: str +) -> None: + """Good test cases.""" + root_node = astroid.parse(code, "homeassistant.components.pylint_test.coordinator") + walker = ASTWalker(linter) + walker.add_checker(enforce_coordinator_module_checker) + + with assert_no_messages(linter): + walker.walk(root_node) + + +def test_enforce_coordinator_module_bad_simple( + linter: UnittestLinter, + enforce_coordinator_module_checker: BaseChecker, +) -> None: + """Bad test case with coordinator extending directly.""" + root_node = astroid.parse( + """ + class DataUpdateCoordinator: + pass + + class TestCoordinator(DataUpdateCoordinator): + pass + """, + "homeassistant.components.pylint_test", + ) + walker = ASTWalker(linter) + walker.add_checker(enforce_coordinator_module_checker) + + with assert_adds_messages( + linter, + MessageTest( + msg_id="hass-enforce-coordinator-module", + line=5, + node=root_node.body[1], + args=None, + confidence=UNDEFINED, + col_offset=0, + end_line=5, + end_col_offset=21, + ), + ): + walker.walk(root_node) + + +def test_enforce_coordinator_module_bad_nested( + linter: UnittestLinter, + enforce_coordinator_module_checker: BaseChecker, +) -> None: + """Bad test case with nested coordinators.""" + root_node = astroid.parse( + """ + class DataUpdateCoordinator: + pass + + class TestCoordinator(DataUpdateCoordinator): + pass + + class NopeCoordinator(TestCoordinator): + pass + """, + "homeassistant.components.pylint_test", + ) + walker = ASTWalker(linter) + walker.add_checker(enforce_coordinator_module_checker) + + with assert_adds_messages( + linter, + MessageTest( + msg_id="hass-enforce-coordinator-module", + line=5, + node=root_node.body[1], + args=None, + confidence=UNDEFINED, + col_offset=0, + end_line=5, + end_col_offset=21, + ), + MessageTest( + msg_id="hass-enforce-coordinator-module", + line=8, + node=root_node.body[2], + args=None, + confidence=UNDEFINED, + col_offset=0, + end_line=8, + end_col_offset=21, + ), + ): + walker.walk(root_node) diff --git a/tests/pylint/test_enforce_type_hints.py b/tests/pylint/test_enforce_type_hints.py index 6c53e9832d9..b1692d1d60d 100644 --- a/tests/pylint/test_enforce_type_hints.py +++ b/tests/pylint/test_enforce_type_hints.py @@ -313,9 +313,7 @@ def test_invalid_config_flow_step( linter: UnittestLinter, type_hint_checker: BaseChecker ) -> None: """Ensure invalid hints are rejected for ConfigFlow step.""" - type_hint_checker.linter.config.ignore_missing_annotations = True - - class_node, func_node, arg_node, func_node2 = astroid.extract_node( + class_node, func_node, arg_node = astroid.extract_node( """ class FlowHandler(): pass @@ -331,12 +329,6 @@ def test_invalid_config_flow_step( device_config: dict #@ ): pass - - async def async_step_custom( #@ - self, - user_input - ): - pass """, "homeassistant.components.pylint_test.config_flow", ) @@ -362,15 +354,6 @@ def test_invalid_config_flow_step( end_line=11, end_col_offset=33, ), - pylint.testutils.MessageTest( - msg_id="hass-return-type", - node=func_node2, - args=("ConfigFlowResult", "async_step_custom"), - line=17, - col_offset=4, - end_line=17, - end_col_offset=31, - ), ): type_hint_checker.visit_classdef(class_node) diff --git a/tests/pylint/test_imports.py b/tests/pylint/test_imports.py index 5044e73d253..e53b8206848 100644 --- a/tests/pylint/test_imports.py +++ b/tests/pylint/test_imports.py @@ -208,10 +208,6 @@ def test_good_root_import( "from homeassistant.components.climate.const import ClimateEntityFeature", "homeassistant.components.pylint_test.climate", ), - ( - "from homeassistant.components.climate.entity import ClimateEntityFeature", - "homeassistant.components.pylint_test.climate", - ), ( "from homeassistant.components.climate import const", "tests.components.pylint_test.climate", @@ -224,10 +220,6 @@ def test_good_root_import( "import homeassistant.components.climate.const as climate", "tests.components.pylint_test.climate", ), - ( - "import homeassistant.components.climate.entity as climate", - "tests.components.pylint_test.climate", - ), ], ) def test_bad_root_import( @@ -317,54 +309,3 @@ def test_bad_namespace_import( ), ): imports_checker.visit_importfrom(node) - - -@pytest.mark.parametrize( - ("module_name", "import_string", "end_col_offset"), - [ - ( - "homeassistant.components.pylint_test.sensor", - "from homeassistant.components.other import DOMAIN as OTHER_DOMAIN", - -1, - ), - ( - "homeassistant.components.pylint_test.sensor", - "from homeassistant.components.other import DOMAIN", - 49, - ), - ], -) -def test_domain_alias( - linter: UnittestLinter, - imports_checker: BaseChecker, - module_name: str, - import_string: str, - end_col_offset: int, -) -> None: - """Ensure good imports pass through ok.""" - - import_node = astroid.extract_node( - f"{import_string} #@", - module_name, - ) - imports_checker.visit_module(import_node.parent) - - expected_messages = [] - if end_col_offset > 0: - expected_messages.append( - pylint.testutils.MessageTest( - msg_id="hass-import-constant-alias", - node=import_node, - args=("DOMAIN", "DOMAIN", "OTHER_DOMAIN"), - line=1, - col_offset=0, - end_line=1, - end_col_offset=end_col_offset, - ) - ) - - with assert_adds_messages(linter, *expected_messages): - if import_string.startswith("import"): - imports_checker.visit_import(import_node) - else: - imports_checker.visit_importfrom(import_node) diff --git a/tests/script/test_gen_requirements_all.py b/tests/script/test_gen_requirements_all.py index 519a5c21855..793b3de63c5 100644 --- a/tests/script/test_gen_requirements_all.py +++ b/tests/script/test_gen_requirements_all.py @@ -1,7 +1,5 @@ """Tests for the gen_requirements_all script.""" -from unittest.mock import patch - from script import gen_requirements_all @@ -25,27 +23,3 @@ def test_include_overrides_subsets() -> None: for overrides in gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS.values(): for req in overrides["include"]: assert req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL - - -def test_requirement_override_markers() -> None: - """Test override markers are applied to the correct requirements.""" - data = { - "pytest": { - "exclude": set(), - "include": set(), - "markers": {"env-canada": "python_version<'3.13'"}, - } - } - with patch.dict( - gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS, data, clear=True - ): - assert ( - gen_requirements_all.process_action_requirement( - "env-canada==0.7.2", "pytest" - ) - == "env-canada==0.7.2;python_version<'3.13'" - ) - assert ( - gen_requirements_all.process_action_requirement("other==1.0", "pytest") - == "other==1.0" - ) diff --git a/tests/scripts/test_auth.py b/tests/scripts/test_auth.py index e52a2cc6567..19a9277a36a 100644 --- a/tests/scripts/test_auth.py +++ b/tests/scripts/test_auth.py @@ -1,13 +1,12 @@ """Test the auth script to manage local users.""" -import argparse from asyncio import AbstractEventLoop -from collections.abc import Generator import logging from typing import Any from unittest.mock import Mock, patch import pytest +from typing_extensions import Generator from homeassistant.auth.providers import homeassistant as hass_auth from homeassistant.core import HomeAssistant @@ -149,9 +148,7 @@ def test_parsing_args(event_loop: AbstractEventLoop) -> None: """Test we parse args correctly.""" called = False - async def mock_func( - hass: HomeAssistant, provider: hass_auth.AuthProvider, args2: argparse.Namespace - ) -> None: + async def mock_func(hass, provider, args2): """Mock function to be called.""" nonlocal called called = True diff --git a/tests/snapshots/test_config_entries.ambr b/tests/snapshots/test_config_entries.ambr index 51e56f4874e..bfb583ba8db 100644 --- a/tests/snapshots/test_config_entries.ambr +++ b/tests/snapshots/test_config_entries.ambr @@ -1,16 +1,12 @@ # serializer version: 1 # name: test_as_dict dict({ - 'created_at': '2024-02-14T12:00:00+00:00', 'data': dict({ }), 'disabled_by': None, - 'discovery_keys': dict({ - }), 'domain': 'test', 'entry_id': 'mock-entry', 'minor_version': 1, - 'modified_at': '2024-02-14T12:00:00+00:00', 'options': dict({ }), 'pref_disable_new_entities': False, @@ -21,83 +17,3 @@ 'version': 1, }) # --- -# name: test_unique_id_collision_issues - IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': '2025.11.0', - 'created': , - 'data': dict({ - 'issue_type': 'config_entry_unique_id_collision', - 'unique_id': 'group_1', - }), - 'dismissed_version': None, - 'domain': 'homeassistant', - 'is_fixable': False, - 'is_persistent': False, - 'issue_domain': 'test2', - 'issue_id': 'config_entry_unique_id_collision_test2_group_1', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'config_entry_unique_id_collision', - 'translation_placeholders': dict({ - 'configure_url': '/config/integrations/integration/test2', - 'domain': 'test2', - 'titles': "'Mock Title', 'Mock Title', 'Mock Title'", - 'unique_id': 'group_1', - }), - }) -# --- -# name: test_unique_id_collision_issues.1 - IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': '2025.11.0', - 'created': , - 'data': dict({ - 'issue_type': 'config_entry_unique_id_collision', - 'unique_id': 'not_unique', - }), - 'dismissed_version': None, - 'domain': 'homeassistant', - 'is_fixable': False, - 'is_persistent': False, - 'issue_domain': 'test3', - 'issue_id': 'config_entry_unique_id_collision_test3_not_unique', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'config_entry_unique_id_collision_many', - 'translation_placeholders': dict({ - 'configure_url': '/config/integrations/integration/test3', - 'domain': 'test3', - 'number_of_entries': '6', - 'title_limit': '5', - 'titles': "'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title'", - 'unique_id': 'not_unique', - }), - }) -# --- -# name: test_unique_id_collision_issues.2 - IssueRegistryItemSnapshot({ - 'active': True, - 'breaks_in_ha_version': '2025.11.0', - 'created': , - 'data': dict({ - 'issue_type': 'config_entry_unique_id_collision', - 'unique_id': 'not_unique', - }), - 'dismissed_version': None, - 'domain': 'homeassistant', - 'is_fixable': False, - 'is_persistent': False, - 'issue_domain': 'test3', - 'issue_id': 'config_entry_unique_id_collision_test3_not_unique', - 'learn_more_url': None, - 'severity': , - 'translation_key': 'config_entry_unique_id_collision', - 'translation_placeholders': dict({ - 'configure_url': '/config/integrations/integration/test3', - 'domain': 'test3', - 'titles': "'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title'", - 'unique_id': 'not_unique', - }), - }) -# --- diff --git a/tests/syrupy.py b/tests/syrupy.py index a3b3f763063..52bd5756798 100644 --- a/tests/syrupy.py +++ b/tests/syrupy.py @@ -5,22 +5,20 @@ from __future__ import annotations from contextlib import suppress import dataclasses from enum import IntFlag -import json -import os from pathlib import Path from typing import Any import attr import attrs -import pytest -from syrupy.constants import EXIT_STATUS_FAIL_UNUSED -from syrupy.data import Snapshot, SnapshotCollection, SnapshotCollections from syrupy.extensions.amber import AmberDataSerializer, AmberSnapshotExtension from syrupy.location import PyTestLocation -from syrupy.report import SnapshotReport -from syrupy.session import ItemStatus, SnapshotSession -from syrupy.types import PropertyFilter, PropertyMatcher, PropertyPath, SerializableData -from syrupy.utils import is_xdist_controller, is_xdist_worker +from syrupy.types import ( + PropertyFilter, + PropertyMatcher, + PropertyPath, + SerializableData, + SerializedData, +) import voluptuous as vol import voluptuous_serialize @@ -92,7 +90,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): matcher: PropertyMatcher | None = None, path: PropertyPath = (), visited: set[Any] | None = None, - ) -> str: + ) -> SerializedData: """Pre-process data before serializing. This allows us to handle specific cases for Home Assistant data structures. @@ -113,7 +111,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): serializable_data = voluptuous_serialize.convert(data) elif isinstance(data, ConfigEntry): serializable_data = cls._serializable_config_entry(data) - elif dataclasses.is_dataclass(type(data)): + elif dataclasses.is_dataclass(data): serializable_data = dataclasses.asdict(data) elif isinstance(data, IntFlag): # The repr of an enum.IntFlag has changed between Python 3.10 and 3.11 @@ -122,7 +120,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): else: serializable_data = data with suppress(TypeError): - if attr.has(type(data)): + if attr.has(data): serializable_data = attrs.asdict(data) return super()._serialize( @@ -138,16 +136,14 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): @classmethod def _serializable_area_registry_entry(cls, data: ar.AreaEntry) -> SerializableData: """Prepare a Home Assistant area registry entry for serialization.""" - serialized = AreaRegistryEntrySnapshot(dataclasses.asdict(data) | {"id": ANY}) + serialized = AreaRegistryEntrySnapshot(attrs.asdict(data) | {"id": ANY}) serialized.pop("_json_repr") - serialized.pop("_cache") return serialized @classmethod def _serializable_config_entry(cls, data: ConfigEntry) -> SerializableData: """Prepare a Home Assistant config entry for serialization.""" - entry = ConfigEntrySnapshot(data.as_dict() | {"entry_id": ANY}) - return cls._remove_created_and_modified_at(entry) + return ConfigEntrySnapshot(data.as_dict() | {"entry_id": ANY}) @classmethod def _serializable_device_registry_entry( @@ -165,17 +161,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): serialized["via_device_id"] = ANY if serialized["primary_config_entry"] is not None: serialized["primary_config_entry"] = ANY - serialized.pop("_cache") - return cls._remove_created_and_modified_at(serialized) - - @classmethod - def _remove_created_and_modified_at( - cls, data: SerializableData - ) -> SerializableData: - """Remove created_at and modified_at from the data.""" - data.pop("created_at", None) - data.pop("modified_at", None) - return data + return serialized @classmethod def _serializable_entity_registry_entry( @@ -192,8 +178,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): } ) serialized.pop("categories") - serialized.pop("_cache") - return cls._remove_created_and_modified_at(serialized) + return serialized @classmethod def _serializable_flow_result(cls, data: FlowResult) -> SerializableData: @@ -205,7 +190,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): cls, data: ir.IssueEntry ) -> SerializableData: """Prepare a Home Assistant issue registry entry for serialization.""" - return IssueRegistryItemSnapshot(dataclasses.asdict(data) | {"created": ANY}) + return IssueRegistryItemSnapshot(data.to_json() | {"created": ANY}) @classmethod def _serializable_state(cls, data: State) -> SerializableData: @@ -254,164 +239,3 @@ class HomeAssistantSnapshotExtension(AmberSnapshotExtension): """ test_dir = Path(test_location.filepath).parent return str(test_dir.joinpath("snapshots")) - - -# Classes and Methods to override default finish behavior in syrupy -# This is needed to handle the xdist plugin in pytest -# The default implementation does not handle the xdist plugin -# and will not work correctly when running tests in parallel -# with pytest-xdist. -# Temporary workaround until it is finalised inside syrupy -# See https://github.com/syrupy-project/syrupy/pull/901 - - -class _FakePytestObject: - """Fake object.""" - - def __init__(self, collected_item: dict[str, str]) -> None: - """Initialise fake object.""" - self.__module__ = collected_item["modulename"] - self.__name__ = collected_item["methodname"] - - -class _FakePytestItem: - """Fake pytest.Item object.""" - - def __init__(self, collected_item: dict[str, str]) -> None: - """Initialise fake pytest.Item object.""" - self.nodeid = collected_item["nodeid"] - self.name = collected_item["name"] - self.path = Path(collected_item["path"]) - self.obj = _FakePytestObject(collected_item) - - -def _serialize_collections(collections: SnapshotCollections) -> dict[str, Any]: - return { - k: [c.name for c in v] for k, v in collections._snapshot_collections.items() - } - - -def _serialize_report( - report: SnapshotReport, - collected_items: set[pytest.Item], - selected_items: dict[str, ItemStatus], -) -> dict[str, Any]: - return { - "discovered": _serialize_collections(report.discovered), - "created": _serialize_collections(report.created), - "failed": _serialize_collections(report.failed), - "matched": _serialize_collections(report.matched), - "updated": _serialize_collections(report.updated), - "used": _serialize_collections(report.used), - "_collected_items": [ - { - "nodeid": c.nodeid, - "name": c.name, - "path": str(c.path), - "modulename": c.obj.__module__, - "methodname": c.obj.__name__, - } - for c in list(collected_items) - ], - "_selected_items": { - key: status.value for key, status in selected_items.items() - }, - } - - -def _merge_serialized_collections( - collections: SnapshotCollections, json_data: dict[str, list[str]] -) -> None: - if not json_data: - return - for location, names in json_data.items(): - snapshot_collection = SnapshotCollection(location=location) - for name in names: - snapshot_collection.add(Snapshot(name)) - collections.update(snapshot_collection) - - -def _merge_serialized_report(report: SnapshotReport, json_data: dict[str, Any]) -> None: - _merge_serialized_collections(report.discovered, json_data["discovered"]) - _merge_serialized_collections(report.created, json_data["created"]) - _merge_serialized_collections(report.failed, json_data["failed"]) - _merge_serialized_collections(report.matched, json_data["matched"]) - _merge_serialized_collections(report.updated, json_data["updated"]) - _merge_serialized_collections(report.used, json_data["used"]) - for collected_item in json_data["_collected_items"]: - custom_item = _FakePytestItem(collected_item) - if not any( - t.nodeid == custom_item.nodeid and t.name == custom_item.nodeid - for t in report.collected_items - ): - report.collected_items.add(custom_item) - for key, selected_item in json_data["_selected_items"].items(): - if key in report.selected_items: - status = ItemStatus(selected_item) - if status != ItemStatus.NOT_RUN: - report.selected_items[key] = status - else: - report.selected_items[key] = ItemStatus(selected_item) - - -def override_syrupy_finish(self: SnapshotSession) -> int: - """Override the finish method to allow for custom handling.""" - exitstatus = 0 - self.flush_snapshot_write_queue() - self.report = SnapshotReport( - base_dir=self.pytest_session.config.rootpath, - collected_items=self._collected_items, - selected_items=self._selected_items, - assertions=self._assertions, - options=self.pytest_session.config.option, - ) - - needs_xdist_merge = self.update_snapshots or bool( - self.pytest_session.config.option.include_snapshot_details - ) - - if is_xdist_worker(): - if not needs_xdist_merge: - return exitstatus - with open(".pytest_syrupy_worker_count", "w", encoding="utf-8") as f: - f.write(os.getenv("PYTEST_XDIST_WORKER_COUNT")) - with open( - f".pytest_syrupy_{os.getenv("PYTEST_XDIST_WORKER")}_result", - "w", - encoding="utf-8", - ) as f: - json.dump( - _serialize_report( - self.report, self._collected_items, self._selected_items - ), - f, - indent=2, - ) - return exitstatus - if is_xdist_controller(): - return exitstatus - - if needs_xdist_merge: - worker_count = None - try: - with open(".pytest_syrupy_worker_count", encoding="utf-8") as f: - worker_count = f.read() - os.remove(".pytest_syrupy_worker_count") - except FileNotFoundError: - pass - - if worker_count: - for i in range(int(worker_count)): - with open(f".pytest_syrupy_gw{i}_result", encoding="utf-8") as f: - _merge_serialized_report(self.report, json.load(f)) - os.remove(f".pytest_syrupy_gw{i}_result") - - if self.report.num_unused: - if self.update_snapshots: - self.remove_unused_snapshots( - unused_snapshot_collections=self.report.unused, - used_snapshot_collections=self.report.used, - ) - elif not self.warn_unused_snapshots: - exitstatus |= EXIT_STATUS_FAIL_UNUSED - return exitstatus diff --git a/tests/test_backports.py b/tests/test_backports.py index af485abbc36..4df0a9e3f57 100644 --- a/tests/test_backports.py +++ b/tests/test_backports.py @@ -3,7 +3,7 @@ from __future__ import annotations from enum import StrEnum -from functools import cached_property # pylint: disable=hass-deprecated-import +from functools import cached_property from types import ModuleType from typing import Any diff --git a/tests/test_backup_restore.py b/tests/test_backup_restore.py deleted file mode 100644 index 44a05c0540e..00000000000 --- a/tests/test_backup_restore.py +++ /dev/null @@ -1,215 +0,0 @@ -"""Test methods in backup_restore.""" - -from pathlib import Path -import tarfile -from unittest import mock - -import pytest - -from homeassistant import backup_restore - -from .common import get_test_config_dir - - -@pytest.mark.parametrize( - ("side_effect", "content", "expected"), - [ - (FileNotFoundError, "", None), - (None, "", None), - ( - None, - '{"path": "test"}', - backup_restore.RestoreBackupFileContent(backup_file_path=Path("test")), - ), - ], -) -def test_reading_the_instruction_contents( - side_effect: Exception | None, - content: str, - expected: backup_restore.RestoreBackupFileContent | None, -) -> None: - """Test reading the content of the .HA_RESTORE file.""" - with ( - mock.patch( - "pathlib.Path.read_text", - return_value=content, - side_effect=side_effect, - ), - ): - read_content = backup_restore.restore_backup_file_content( - Path(get_test_config_dir()) - ) - assert read_content == expected - - -def test_restoring_backup_that_does_not_exist() -> None: - """Test restoring a backup that does not exist.""" - backup_file_path = Path(get_test_config_dir("backups", "test")) - with ( - mock.patch( - "homeassistant.backup_restore.restore_backup_file_content", - return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path - ), - ), - mock.patch("pathlib.Path.read_text", side_effect=FileNotFoundError), - pytest.raises( - ValueError, match=f"Backup file {backup_file_path} does not exist" - ), - ): - assert backup_restore.restore_backup(Path(get_test_config_dir())) is False - - -def test_restoring_backup_when_instructions_can_not_be_read() -> None: - """Test restoring a backup when instructions can not be read.""" - with ( - mock.patch( - "homeassistant.backup_restore.restore_backup_file_content", - return_value=None, - ), - ): - assert backup_restore.restore_backup(Path(get_test_config_dir())) is False - - -def test_restoring_backup_that_is_not_a_file() -> None: - """Test restoring a backup that is not a file.""" - backup_file_path = Path(get_test_config_dir("backups", "test")) - with ( - mock.patch( - "homeassistant.backup_restore.restore_backup_file_content", - return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path - ), - ), - mock.patch("pathlib.Path.exists", return_value=True), - mock.patch("pathlib.Path.is_file", return_value=False), - pytest.raises( - ValueError, match=f"Backup file {backup_file_path} does not exist" - ), - ): - assert backup_restore.restore_backup(Path(get_test_config_dir())) is False - - -def test_aborting_for_older_versions() -> None: - """Test that we abort for older versions.""" - config_dir = Path(get_test_config_dir()) - backup_file_path = Path(config_dir, "backups", "test.tar") - - def _patched_path_read_text(path: Path, **kwargs): - return '{"homeassistant": {"version": "9999.99.99"}, "compressed": false}' - - with ( - mock.patch( - "homeassistant.backup_restore.restore_backup_file_content", - return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path - ), - ), - mock.patch("securetar.SecureTarFile"), - mock.patch("homeassistant.backup_restore.TemporaryDirectory"), - mock.patch("pathlib.Path.read_text", _patched_path_read_text), - mock.patch("homeassistant.backup_restore.HA_VERSION", "2013.09.17"), - pytest.raises( - ValueError, - match="You need at least Home Assistant version 9999.99.99 to restore this backup", - ), - ): - assert backup_restore.restore_backup(config_dir) is True - - -def test_removal_of_current_configuration_when_restoring() -> None: - """Test that we are removing the current configuration directory.""" - config_dir = Path(get_test_config_dir()) - backup_file_path = Path(config_dir, "backups", "test.tar") - mock_config_dir = [ - {"path": Path(config_dir, ".HA_RESTORE"), "is_file": True}, - {"path": Path(config_dir, ".HA_VERSION"), "is_file": True}, - {"path": Path(config_dir, "backups"), "is_file": False}, - {"path": Path(config_dir, "www"), "is_file": False}, - ] - - def _patched_path_read_text(path: Path, **kwargs): - return '{"homeassistant": {"version": "2013.09.17"}, "compressed": false}' - - def _patched_path_is_file(path: Path, **kwargs): - return [x for x in mock_config_dir if x["path"] == path][0]["is_file"] - - def _patched_path_is_dir(path: Path, **kwargs): - return not [x for x in mock_config_dir if x["path"] == path][0]["is_file"] - - with ( - mock.patch( - "homeassistant.backup_restore.restore_backup_file_content", - return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path - ), - ), - mock.patch("securetar.SecureTarFile"), - mock.patch("homeassistant.backup_restore.TemporaryDirectory"), - mock.patch("homeassistant.backup_restore.HA_VERSION", "2013.09.17"), - mock.patch("pathlib.Path.read_text", _patched_path_read_text), - mock.patch("pathlib.Path.is_file", _patched_path_is_file), - mock.patch("pathlib.Path.is_dir", _patched_path_is_dir), - mock.patch( - "pathlib.Path.iterdir", - return_value=[x["path"] for x in mock_config_dir], - ), - mock.patch("pathlib.Path.unlink") as unlink_mock, - mock.patch("shutil.rmtree") as rmtreemock, - ): - assert backup_restore.restore_backup(config_dir) is True - assert unlink_mock.call_count == 2 - assert ( - rmtreemock.call_count == 1 - ) # We have 2 directories in the config directory, but backups is kept - - removed_directories = {Path(call.args[0]) for call in rmtreemock.mock_calls} - assert removed_directories == {Path(config_dir, "www")} - - -def test_extracting_the_contents_of_a_backup_file() -> None: - """Test extracting the contents of a backup file.""" - config_dir = Path(get_test_config_dir()) - backup_file_path = Path(config_dir, "backups", "test.tar") - - def _patched_path_read_text(path: Path, **kwargs): - return '{"homeassistant": {"version": "2013.09.17"}, "compressed": false}' - - getmembers_mock = mock.MagicMock( - return_value=[ - tarfile.TarInfo(name="data"), - tarfile.TarInfo(name="data/../test"), - tarfile.TarInfo(name="data/.HA_VERSION"), - tarfile.TarInfo(name="data/.storage"), - tarfile.TarInfo(name="data/www"), - ] - ) - extractall_mock = mock.MagicMock() - - with ( - mock.patch( - "homeassistant.backup_restore.restore_backup_file_content", - return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path - ), - ), - mock.patch( - "tarfile.open", - return_value=mock.MagicMock( - getmembers=getmembers_mock, - extractall=extractall_mock, - __iter__=lambda x: iter(getmembers_mock.return_value), - ), - ), - mock.patch("homeassistant.backup_restore.TemporaryDirectory"), - mock.patch("pathlib.Path.read_text", _patched_path_read_text), - mock.patch("pathlib.Path.is_file", return_value=False), - mock.patch("pathlib.Path.iterdir", return_value=[]), - ): - assert backup_restore.restore_backup(config_dir) is True - assert getmembers_mock.call_count == 1 - assert extractall_mock.call_count == 2 - - assert { - member.name for member in extractall_mock.mock_calls[-1].kwargs["members"] - } == {".HA_VERSION", ".storage", "www"} diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index dc2b096f595..ae77fbee217 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -5,7 +5,6 @@ import glob import importlib import os from pathlib import Path, PurePosixPath -import ssl import time from typing import Any from unittest.mock import Mock, patch @@ -45,7 +44,7 @@ async def test_protect_loop_debugger_sleep(caplog: pytest.LogCaptureFixture) -> return_value=frames, ), ): - time.sleep(0) # noqa: ASYNC251 + time.sleep(0) assert "Detected blocking call inside the event loop" not in caplog.text @@ -72,7 +71,7 @@ async def test_protect_loop_sleep() -> None: return_value=frames, ), ): - time.sleep(0) # noqa: ASYNC251 + time.sleep(0) async def test_protect_loop_sleep_get_current_frame_raises() -> None: @@ -98,7 +97,7 @@ async def test_protect_loop_sleep_get_current_frame_raises() -> None: return_value=frames, ), ): - time.sleep(0) # noqa: ASYNC251 + time.sleep(0) async def test_protect_loop_importlib_import_module_non_integration( @@ -212,18 +211,7 @@ async def test_protect_loop_open(caplog: pytest.LogCaptureFixture) -> None: block_async_io.enable() with ( contextlib.suppress(FileNotFoundError), - open("/proc/does_not_exist", encoding="utf8"), # noqa: ASYNC230 - ): - pass - assert "Detected blocking call to open with args" not in caplog.text - - -async def test_protect_loop_path_open(caplog: pytest.LogCaptureFixture) -> None: - """Test opening a file in /proc is not reported.""" - block_async_io.enable() - with ( - contextlib.suppress(FileNotFoundError), - Path("/proc/does_not_exist").open(encoding="utf8"), # noqa: ASYNC230 + open("/proc/does_not_exist", encoding="utf8"), ): pass assert "Detected blocking call to open with args" not in caplog.text @@ -235,78 +223,13 @@ async def test_protect_open(caplog: pytest.LogCaptureFixture) -> None: block_async_io.enable() with ( contextlib.suppress(FileNotFoundError), - open("/config/data_not_exist", encoding="utf8"), # noqa: ASYNC230 + open("/config/data_not_exist", encoding="utf8"), ): pass assert "Detected blocking call to open with args" in caplog.text -async def test_protect_path_open(caplog: pytest.LogCaptureFixture) -> None: - """Test opening a file in the event loop logs.""" - with patch.object(block_async_io, "_IN_TESTS", False): - block_async_io.enable() - with ( - contextlib.suppress(FileNotFoundError), - Path("/config/data_not_exist").open(encoding="utf8"), # noqa: ASYNC230 - ): - pass - - assert "Detected blocking call to open with args" in caplog.text - - -async def test_protect_path_read_bytes(caplog: pytest.LogCaptureFixture) -> None: - """Test reading file bytes in the event loop logs.""" - with patch.object(block_async_io, "_IN_TESTS", False): - block_async_io.enable() - with ( - contextlib.suppress(FileNotFoundError), - Path("/config/data_not_exist").read_bytes(), # noqa: ASYNC230 - ): - pass - - assert "Detected blocking call to read_bytes with args" in caplog.text - - -async def test_protect_path_read_text(caplog: pytest.LogCaptureFixture) -> None: - """Test reading a file text in the event loop logs.""" - with patch.object(block_async_io, "_IN_TESTS", False): - block_async_io.enable() - with ( - contextlib.suppress(FileNotFoundError), - Path("/config/data_not_exist").read_text(encoding="utf8"), # noqa: ASYNC230 - ): - pass - - assert "Detected blocking call to read_text with args" in caplog.text - - -async def test_protect_path_write_bytes(caplog: pytest.LogCaptureFixture) -> None: - """Test writing file bytes in the event loop logs.""" - with patch.object(block_async_io, "_IN_TESTS", False): - block_async_io.enable() - with ( - contextlib.suppress(FileNotFoundError), - Path("/config/data/not/exist").write_bytes(b"xxx"), # noqa: ASYNC230 - ): - pass - - assert "Detected blocking call to write_bytes with args" in caplog.text - - -async def test_protect_path_write_text(caplog: pytest.LogCaptureFixture) -> None: - """Test writing file text in the event loop logs.""" - with patch.object(block_async_io, "_IN_TESTS", False): - block_async_io.enable() - with ( - contextlib.suppress(FileNotFoundError), - Path("/config/data/not/exist").write_text("xxx", encoding="utf8"), # noqa: ASYNC230 - ): - pass - - assert "Detected blocking call to write_text with args" in caplog.text - - async def test_enable_multiple_times(caplog: pytest.LogCaptureFixture) -> None: """Test trying to enable multiple times.""" with patch.object(block_async_io, "_IN_TESTS", False): @@ -330,7 +253,7 @@ async def test_protect_open_path(path: Any, caplog: pytest.LogCaptureFixture) -> """Test opening a file by path in the event loop logs.""" with patch.object(block_async_io, "_IN_TESTS", False): block_async_io.enable() - with contextlib.suppress(FileNotFoundError), open(path, encoding="utf8"): # noqa: ASYNC230 + with contextlib.suppress(FileNotFoundError), open(path, encoding="utf8"): pass assert "Detected blocking call to open with args" in caplog.text @@ -407,48 +330,13 @@ async def test_protect_loop_walk( assert "Detected blocking call to walk with args" not in caplog.text -async def test_protect_loop_load_default_certs( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test SSLContext.load_default_certs calls in the loop are logged.""" - with patch.object(block_async_io, "_IN_TESTS", False): - block_async_io.enable() - context = ssl.create_default_context() - assert "Detected blocking call to load_default_certs" in caplog.text - assert context - - -async def test_protect_loop_load_verify_locations( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test SSLContext.load_verify_locations calls in the loop are logged.""" - with patch.object(block_async_io, "_IN_TESTS", False): - block_async_io.enable() - context = ssl.create_default_context() - with pytest.raises(OSError): - context.load_verify_locations("/dev/null") - assert "Detected blocking call to load_verify_locations" in caplog.text - - -async def test_protect_loop_load_cert_chain( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test SSLContext.load_cert_chain calls in the loop are logged.""" - with patch.object(block_async_io, "_IN_TESTS", False): - block_async_io.enable() - context = ssl.create_default_context() - with pytest.raises(OSError): - context.load_cert_chain("/dev/null") - assert "Detected blocking call to load_cert_chain" in caplog.text - - async def test_open_calls_ignored_in_tests(caplog: pytest.LogCaptureFixture) -> None: """Test opening a file in tests is ignored.""" assert block_async_io._IN_TESTS block_async_io.enable() with ( contextlib.suppress(FileNotFoundError), - open("/config/data_not_exist", encoding="utf8"), # noqa: ASYNC230 + open("/config/data_not_exist", encoding="utf8"), ): pass diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index a32d7d1e50b..ca864006852 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -1,7 +1,7 @@ """Test the bootstrapping.""" import asyncio -from collections.abc import Generator, Iterable +from collections.abc import Iterable import contextlib import glob import logging @@ -11,21 +11,19 @@ from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest +from typing_extensions import Generator from homeassistant import bootstrap, loader, runner import homeassistant.config as config_util from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - BASE_PLATFORMS, - CONF_DEBUG, - SIGNAL_BOOTSTRAP_INTEGRATIONS, -) +from homeassistant.const import CONF_DEBUG, SIGNAL_BOOTSTRAP_INTEGRATIONS from homeassistant.core import CoreState, HomeAssistant, async_get_hass, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.translation import async_translations_loaded from homeassistant.helpers.typing import ConfigType from homeassistant.loader import Integration +from homeassistant.setup import BASE_PLATFORMS from .common import ( MockConfigEntry, @@ -72,7 +70,7 @@ def mock_http_start_stop() -> Generator[None]: yield -@patch("homeassistant.bootstrap.async_enable_logging", AsyncMock()) +@patch("homeassistant.bootstrap.async_enable_logging", Mock()) async def test_home_assistant_core_config_validation(hass: HomeAssistant) -> None: """Test if we pass in wrong information for HA conf.""" # Extensive HA conf validation testing is done @@ -96,10 +94,10 @@ async def test_async_enable_logging( side_effect=OSError, ), ): - await bootstrap.async_enable_logging(hass) + bootstrap.async_enable_logging(hass) mock_async_activate_log_queue_handler.assert_called_once() mock_async_activate_log_queue_handler.reset_mock() - await bootstrap.async_enable_logging( + bootstrap.async_enable_logging( hass, log_rotate_days=5, log_file="test.log", @@ -143,7 +141,7 @@ async def test_config_does_not_turn_off_debug(hass: HomeAssistant) -> None: @pytest.mark.parametrize("hass_config", [{"frontend": {}}]) @pytest.mark.usefixtures("mock_hass_config") async def test_asyncio_debug_on_turns_hass_debug_on( - mock_enable_logging: AsyncMock, + mock_enable_logging: Mock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -213,7 +211,7 @@ async def test_setup_after_deps_all_present(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): order.append(domain) return True @@ -260,7 +258,7 @@ async def test_setup_after_deps_in_stage_1_ignored(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): order.append(domain) return True @@ -315,7 +313,7 @@ async def test_setup_after_deps_manifests_are_loaded_even_if_not_setup( order = [] def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): order.append(domain) return True @@ -392,7 +390,7 @@ async def test_setup_frontend_before_recorder(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): order.append(domain) return True @@ -436,6 +434,9 @@ async def test_setup_frontend_before_recorder(hass: HomeAssistant) -> None: MockModule( domain="recorder", async_setup=gen_domain_setup("recorder"), + partial_manifest={ + "after_dependencies": ["http"], + }, ), ) @@ -471,7 +472,7 @@ async def test_setup_after_deps_via_platform(hass: HomeAssistant) -> None: after_dep_event = asyncio.Event() def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): if domain == "after_dep_of_platform_int": await after_dep_event.wait() @@ -520,7 +521,7 @@ async def test_setup_after_deps_not_trigger_load(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): order.append(domain) return True @@ -559,7 +560,7 @@ async def test_setup_after_deps_not_present(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): order.append(domain) return True @@ -597,7 +598,7 @@ def mock_is_virtual_env() -> Generator[Mock]: @pytest.fixture -def mock_enable_logging() -> Generator[AsyncMock]: +def mock_enable_logging() -> Generator[Mock]: """Mock enable logging.""" with patch("homeassistant.bootstrap.async_enable_logging") as enable_logging: yield enable_logging @@ -633,7 +634,7 @@ def mock_ensure_config_exists() -> Generator[AsyncMock]: @pytest.mark.parametrize("hass_config", [{"browser": {}, "frontend": {}}]) @pytest.mark.usefixtures("mock_hass_config") async def test_setup_hass( - mock_enable_logging: AsyncMock, + mock_enable_logging: Mock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -686,7 +687,7 @@ async def test_setup_hass( @pytest.mark.parametrize("hass_config", [{"browser": {}, "frontend": {}}]) @pytest.mark.usefixtures("mock_hass_config") async def test_setup_hass_takes_longer_than_log_slow_startup( - mock_enable_logging: AsyncMock, + mock_enable_logging: Mock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -727,7 +728,7 @@ async def test_setup_hass_takes_longer_than_log_slow_startup( async def test_setup_hass_invalid_yaml( - mock_enable_logging: AsyncMock, + mock_enable_logging: Mock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -754,7 +755,7 @@ async def test_setup_hass_invalid_yaml( async def test_setup_hass_config_dir_nonexistent( - mock_enable_logging: AsyncMock, + mock_enable_logging: Mock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -780,7 +781,7 @@ async def test_setup_hass_config_dir_nonexistent( async def test_setup_hass_recovery_mode( - mock_enable_logging: AsyncMock, + mock_enable_logging: Mock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -816,7 +817,7 @@ async def test_setup_hass_recovery_mode( @pytest.mark.usefixtures("mock_hass_config") async def test_setup_hass_safe_mode( - mock_enable_logging: AsyncMock, + mock_enable_logging: Mock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -851,7 +852,7 @@ async def test_setup_hass_safe_mode( @pytest.mark.usefixtures("mock_hass_config") async def test_setup_hass_recovery_mode_and_safe_mode( - mock_enable_logging: AsyncMock, + mock_enable_logging: Mock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -887,7 +888,7 @@ async def test_setup_hass_recovery_mode_and_safe_mode( @pytest.mark.parametrize("hass_config", [{"homeassistant": {"non-existing": 1}}]) @pytest.mark.usefixtures("mock_hass_config") async def test_setup_hass_invalid_core_config( - mock_enable_logging: AsyncMock, + mock_enable_logging: Mock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -926,7 +927,7 @@ async def test_setup_hass_invalid_core_config( ) @pytest.mark.usefixtures("mock_hass_config") async def test_setup_recovery_mode_if_no_frontend( - mock_enable_logging: AsyncMock, + mock_enable_logging: Mock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -969,7 +970,7 @@ async def test_empty_integrations_list_is_only_sent_at_the_end_of_bootstrap( order = [] def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): order.append(domain) await asyncio.sleep(0.05) @@ -1029,7 +1030,7 @@ async def test_warning_logged_on_wrap_up_timeout( task: asyncio.Task | None = None def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): nonlocal task async def _not_marked_background_task(): @@ -1067,7 +1068,7 @@ async def test_tasks_logged_that_block_stage_1( """Test we log tasks that delay stage 1 startup.""" def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): async def _not_marked_background_task(): await asyncio.sleep(0.2) @@ -1110,7 +1111,7 @@ async def test_tasks_logged_that_block_stage_2( done_future = hass.loop.create_future() def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): async def _not_marked_background_task(): await done_future @@ -1326,34 +1327,6 @@ async def test_bootstrap_dependencies( ) -@pytest.mark.parametrize("load_registries", [False]) -async def test_bootstrap_dependency_not_found( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test setup when an integration has missing dependencies.""" - mock_integration( - hass, - MockModule("good_integration", dependencies=[]), - ) - # Simulate an integration with missing dependencies. While a core integration - # can't have missing dependencies thanks to checks by hassfest, there's no such - # guarantee for custom integrations. - mock_integration( - hass, - MockModule("bad_integration", dependencies=["hahaha_crash_and_burn"]), - ) - - assert await bootstrap.async_from_config_dict( - {"good_integration": {}, "bad_integration": {}}, hass - ) - - assert "good_integration" in hass.config.components - assert "bad_integration" not in hass.config.components - - assert "Unable to resolve dependencies for bad_integration" in caplog.text - - async def test_pre_import_no_requirements(hass: HomeAssistant) -> None: """Test pre-imported and do not have any requirements.""" pre_imports = [ @@ -1452,7 +1425,7 @@ async def test_setup_does_base_platforms_first(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): order.append(domain) return True diff --git a/tests/test_config.py b/tests/test_config.py index c8c5b081119..7f94317afea 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -2,34 +2,62 @@ import asyncio from collections import OrderedDict -from collections.abc import Generator import contextlib +import copy import logging import os -from pathlib import Path +from typing import Any from unittest import mock from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator import voluptuous as vol +from voluptuous import Invalid, MultipleInvalid import yaml from homeassistant import loader import homeassistant.config as config_util -from homeassistant.const import CONF_PACKAGES, __version__ -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.exceptions import ConfigValidationError, HomeAssistantError -from homeassistant.helpers import check_config, config_validation as cv +from homeassistant.const import ( + ATTR_ASSUMED_STATE, + ATTR_FRIENDLY_NAME, + CONF_AUTH_MFA_MODULES, + CONF_AUTH_PROVIDERS, + CONF_CUSTOMIZE, + CONF_LATITUDE, + CONF_LONGITUDE, + CONF_NAME, + __version__, +) +from homeassistant.core import ( + DOMAIN as HA_DOMAIN, + ConfigSource, + HomeAssistant, + HomeAssistantError, +) +from homeassistant.exceptions import ConfigValidationError +from homeassistant.helpers import ( + check_config, + config_validation as cv, + issue_registry as ir, +) +from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType from homeassistant.loader import Integration, async_get_integration from homeassistant.setup import async_setup_component +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) from homeassistant.util.yaml import SECRET_YAML from homeassistant.util.yaml.objects import NodeDictClass from .common import ( MockModule, MockPlatform, + MockUser, get_test_config_dir, mock_integration, mock_platform, @@ -384,10 +412,11 @@ async def test_ensure_config_exists_creates_config(hass: HomeAssistant) -> None: async def test_ensure_config_exists_uses_existing_config(hass: HomeAssistant) -> None: """Test that calling ensure_config_exists uses existing config.""" - await hass.async_add_executor_job(create_file, YAML_PATH) + create_file(YAML_PATH) await config_util.async_ensure_config_exists(hass) - content = await hass.async_add_executor_job(Path(YAML_PATH).read_text) + with open(YAML_PATH, encoding="utf8") as fp: + content = fp.read() # File created with create_file are empty assert content == "" @@ -395,11 +424,12 @@ async def test_ensure_config_exists_uses_existing_config(hass: HomeAssistant) -> async def test_ensure_existing_files_is_not_overwritten(hass: HomeAssistant) -> None: """Test that calling async_create_default_config does not overwrite existing files.""" - await hass.async_add_executor_job(create_file, SECRET_PATH) + create_file(SECRET_PATH) await config_util.async_create_default_config(hass) - content = await hass.async_add_executor_job(Path(SECRET_PATH).read_text) + with open(SECRET_PATH, encoding="utf8") as fp: + content = fp.read() # File created with create_file are empty assert content == "" @@ -460,10 +490,9 @@ def test_load_yaml_config_preserves_key_order() -> None: fp.write("hello: 2\n") fp.write("world: 1\n") - assert list(config_util.load_yaml_config_file(YAML_PATH).items()) == [ - ("hello", 2), - ("world", 1), - ] + assert [("hello", 2), ("world", 1)] == list( + config_util.load_yaml_config_file(YAML_PATH).items() + ) async def test_create_default_config_returns_none_if_write_error( @@ -479,6 +508,104 @@ async def test_create_default_config_returns_none_if_write_error( assert mock_print.called +def test_core_config_schema() -> None: + """Test core config schema.""" + for value in ( + {"unit_system": "K"}, + {"time_zone": "non-exist"}, + {"latitude": "91"}, + {"longitude": -181}, + {"external_url": "not an url"}, + {"internal_url": "not an url"}, + {"currency", 100}, + {"customize": "bla"}, + {"customize": {"light.sensor": 100}}, + {"customize": {"entity_id": []}}, + {"country": "xx"}, + {"language": "xx"}, + {"radius": -10}, + ): + with pytest.raises(MultipleInvalid): + config_util.CORE_CONFIG_SCHEMA(value) + + config_util.CORE_CONFIG_SCHEMA( + { + "name": "Test name", + "latitude": "-23.45", + "longitude": "123.45", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "unit_system": "metric", + "currency": "USD", + "customize": {"sensor.temperature": {"hidden": True}}, + "country": "SE", + "language": "sv", + "radius": "10", + } + ) + + +def test_core_config_schema_internal_external_warning( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that we warn for internal/external URL with path.""" + config_util.CORE_CONFIG_SCHEMA( + { + "external_url": "https://www.example.com/bla", + "internal_url": "http://example.local/yo", + } + ) + + assert "Invalid external_url set" in caplog.text + assert "Invalid internal_url set" in caplog.text + + +def test_customize_dict_schema() -> None: + """Test basic customize config validation.""" + values = ({ATTR_FRIENDLY_NAME: None}, {ATTR_ASSUMED_STATE: "2"}) + + for val in values: + with pytest.raises(MultipleInvalid): + config_util.CUSTOMIZE_DICT_SCHEMA(val) + + assert config_util.CUSTOMIZE_DICT_SCHEMA( + {ATTR_FRIENDLY_NAME: 2, ATTR_ASSUMED_STATE: "0"} + ) == {ATTR_FRIENDLY_NAME: "2", ATTR_ASSUMED_STATE: False} + + +def test_customize_glob_is_ordered() -> None: + """Test that customize_glob preserves order.""" + conf = config_util.CORE_CONFIG_SCHEMA({"customize_glob": OrderedDict()}) + assert isinstance(conf["customize_glob"], OrderedDict) + + +async def _compute_state(hass, config): + await config_util.async_process_ha_core_config(hass, config) + + entity = Entity() + entity.entity_id = "test.test" + entity.hass = hass + entity.schedule_update_ha_state() + + await hass.async_block_till_done() + + return hass.states.get("test.test") + + +async def test_entity_customization(hass: HomeAssistant) -> None: + """Test entity customization through configuration.""" + config = { + CONF_LATITUDE: 50, + CONF_LONGITUDE: 50, + CONF_NAME: "Test", + CONF_CUSTOMIZE: {"test.test": {"hidden": True}}, + } + + state = await _compute_state(hass, config) + + assert state.attributes["hidden"] + + @patch("homeassistant.config.shutil") @patch("homeassistant.config.os") @patch("homeassistant.config.is_docker_env", return_value=False) @@ -568,6 +695,361 @@ def test_config_upgrade_no_file(hass: HomeAssistant) -> None: assert opened_file.write.call_args == mock.call(__version__) +async def test_loading_configuration_from_storage( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test loading core config onto hass object.""" + hass_storage["core.config"] = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "metric", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "currency": "EUR", + "country": "SE", + "language": "sv", + "radius": 150, + }, + "key": "core.config", + "version": 1, + "minor_version": 4, + } + await config_util.async_process_ha_core_config( + hass, {"allowlist_external_dirs": "/etc"} + ) + + assert hass.config.latitude == 55 + assert hass.config.longitude == 13 + assert hass.config.elevation == 10 + assert hass.config.location_name == "Home" + assert hass.config.units is METRIC_SYSTEM + assert hass.config.time_zone == "Europe/Copenhagen" + assert hass.config.external_url == "https://www.example.com" + assert hass.config.internal_url == "http://example.local" + assert hass.config.currency == "EUR" + assert hass.config.country == "SE" + assert hass.config.language == "sv" + assert hass.config.radius == 150 + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert hass.config.config_source is ConfigSource.STORAGE + + +async def test_loading_configuration_from_storage_with_yaml_only( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test loading core and YAML config onto hass object.""" + hass_storage["core.config"] = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "metric", + }, + "key": "core.config", + "version": 1, + } + await config_util.async_process_ha_core_config( + hass, {"media_dirs": {"mymedia": "/usr"}, "allowlist_external_dirs": "/etc"} + ) + + assert hass.config.latitude == 55 + assert hass.config.longitude == 13 + assert hass.config.elevation == 10 + assert hass.config.location_name == "Home" + assert hass.config.units is METRIC_SYSTEM + assert hass.config.time_zone == "Europe/Copenhagen" + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert hass.config.media_dirs == {"mymedia": "/usr"} + assert hass.config.config_source is ConfigSource.STORAGE + + +async def test_migration_and_updating_configuration( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test updating configuration stores the new configuration.""" + core_data = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "imperial", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "currency": "BTC", + }, + "key": "core.config", + "version": 1, + "minor_version": 1, + } + hass_storage["core.config"] = dict(core_data) + await config_util.async_process_ha_core_config( + hass, {"allowlist_external_dirs": "/etc"} + ) + await hass.config.async_update(latitude=50, currency="USD") + + expected_new_core_data = copy.deepcopy(core_data) + # From async_update above + expected_new_core_data["data"]["latitude"] = 50 + expected_new_core_data["data"]["currency"] = "USD" + # 1.1 -> 1.2 store migration with migrated unit system + expected_new_core_data["data"]["unit_system_v2"] = "us_customary" + # 1.1 -> 1.3 defaults for country and language + expected_new_core_data["data"]["country"] = None + expected_new_core_data["data"]["language"] = "en" + # 1.1 -> 1.4 defaults for zone radius + expected_new_core_data["data"]["radius"] = 100 + # Bumped minor version + expected_new_core_data["minor_version"] = 4 + assert hass_storage["core.config"] == expected_new_core_data + assert hass.config.latitude == 50 + assert hass.config.currency == "USD" + assert hass.config.country is None + assert hass.config.language == "en" + assert hass.config.radius == 100 + + +async def test_override_stored_configuration( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test loading core and YAML config onto hass object.""" + hass_storage["core.config"] = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "metric", + }, + "key": "core.config", + "version": 1, + } + await config_util.async_process_ha_core_config( + hass, {"latitude": 60, "allowlist_external_dirs": "/etc"} + ) + + assert hass.config.latitude == 60 + assert hass.config.longitude == 13 + assert hass.config.elevation == 10 + assert hass.config.location_name == "Home" + assert hass.config.units is METRIC_SYSTEM + assert hass.config.time_zone == "Europe/Copenhagen" + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert hass.config.config_source is ConfigSource.YAML + + +async def test_loading_configuration(hass: HomeAssistant) -> None: + """Test loading core config onto hass object.""" + await config_util.async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "America/New_York", + "allowlist_external_dirs": "/etc", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "media_dirs": {"mymedia": "/usr"}, + "debug": True, + "currency": "EUR", + "country": "SE", + "language": "sv", + "radius": 150, + }, + ) + + assert hass.config.latitude == 60 + assert hass.config.longitude == 50 + assert hass.config.elevation == 25 + assert hass.config.location_name == "Huis" + assert hass.config.units is US_CUSTOMARY_SYSTEM + assert hass.config.time_zone == "America/New_York" + assert hass.config.external_url == "https://www.example.com" + assert hass.config.internal_url == "http://example.local" + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert "/usr" in hass.config.allowlist_external_dirs + assert hass.config.media_dirs == {"mymedia": "/usr"} + assert hass.config.config_source is ConfigSource.YAML + assert hass.config.debug is True + assert hass.config.currency == "EUR" + assert hass.config.country == "SE" + assert hass.config.language == "sv" + assert hass.config.radius == 150 + + +@pytest.mark.parametrize( + ("minor_version", "users", "user_data", "default_language"), + [ + (2, (), {}, "en"), + (2, ({"is_owner": True},), {}, "en"), + ( + 2, + ({"id": "user1", "is_owner": True},), + {"user1": {"language": {"language": "sv"}}}, + "sv", + ), + ( + 2, + ({"id": "user1", "is_owner": False},), + {"user1": {"language": {"language": "sv"}}}, + "en", + ), + (3, (), {}, "en"), + (3, ({"is_owner": True},), {}, "en"), + ( + 3, + ({"id": "user1", "is_owner": True},), + {"user1": {"language": {"language": "sv"}}}, + "en", + ), + ( + 3, + ({"id": "user1", "is_owner": False},), + {"user1": {"language": {"language": "sv"}}}, + "en", + ), + ], +) +async def test_language_default( + hass: HomeAssistant, + hass_storage: dict[str, Any], + minor_version, + users, + user_data, + default_language, +) -> None: + """Test language config default to owner user's language during migration. + + This should only happen if the core store version < 1.3 + """ + core_data = { + "data": {}, + "key": "core.config", + "version": 1, + "minor_version": minor_version, + } + hass_storage["core.config"] = dict(core_data) + + for user_config in users: + user = MockUser(**user_config).add_to_hass(hass) + if user.id not in user_data: + continue + storage_key = f"frontend.user_data_{user.id}" + hass_storage[storage_key] = { + "key": storage_key, + "version": 1, + "data": user_data[user.id], + } + + await config_util.async_process_ha_core_config( + hass, + {}, + ) + assert hass.config.language == default_language + + +async def test_loading_configuration_default_media_dirs_docker( + hass: HomeAssistant, +) -> None: + """Test loading core config onto hass object.""" + with patch("homeassistant.config.is_docker_env", return_value=True): + await config_util.async_process_ha_core_config( + hass, + { + "name": "Huis", + }, + ) + + assert hass.config.location_name == "Huis" + assert len(hass.config.allowlist_external_dirs) == 2 + assert "/media" in hass.config.allowlist_external_dirs + assert hass.config.media_dirs == {"local": "/media"} + + +async def test_loading_configuration_from_packages(hass: HomeAssistant) -> None: + """Test loading packages config onto hass object config.""" + await config_util.async_process_ha_core_config( + hass, + { + "latitude": 39, + "longitude": -1, + "elevation": 500, + "name": "Huis", + "unit_system": "metric", + "time_zone": "Europe/Madrid", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "packages": { + "package_1": {"wake_on_lan": None}, + "package_2": { + "light": {"platform": "hue"}, + "media_extractor": None, + "sun": None, + }, + }, + }, + ) + + # Empty packages not allowed + with pytest.raises(MultipleInvalid): + await config_util.async_process_ha_core_config( + hass, + { + "latitude": 39, + "longitude": -1, + "elevation": 500, + "name": "Huis", + "unit_system": "metric", + "time_zone": "Europe/Madrid", + "packages": {"empty_package": None}, + }, + ) + + +@pytest.mark.parametrize( + ("unit_system_name", "expected_unit_system"), + [ + ("metric", METRIC_SYSTEM), + ("imperial", US_CUSTOMARY_SYSTEM), + ("us_customary", US_CUSTOMARY_SYSTEM), + ], +) +async def test_loading_configuration_unit_system( + hass: HomeAssistant, unit_system_name: str, expected_unit_system: UnitSystem +) -> None: + """Test backward compatibility when loading core config.""" + await config_util.async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": unit_system_name, + "time_zone": "America/New_York", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + }, + ) + + assert hass.config.units is expected_unit_system + + @patch("homeassistant.helpers.check_config.async_check_ha_config_file") async def test_check_ha_config_file_correct(mock_check, hass: HomeAssistant) -> None: """Check that restart propagates to stop.""" @@ -588,8 +1070,10 @@ async def test_check_ha_config_file_wrong(mock_check, hass: HomeAssistant) -> No "hass_config", [ { - HOMEASSISTANT_DOMAIN: { - CONF_PACKAGES: {"pack_dict": {"input_boolean": {"ib1": None}}} + HA_DOMAIN: { + config_util.CONF_PACKAGES: { + "pack_dict": {"input_boolean": {"ib1": None}} + } }, "input_boolean": {"ib2": None}, "light": {"platform": "test"}, @@ -604,7 +1088,7 @@ async def test_async_hass_config_yaml_merge( conf = await config_util.async_hass_config_yaml(hass) assert merge_log_err.call_count == 0 - assert conf[HOMEASSISTANT_DOMAIN].get(CONF_PACKAGES) is not None + assert conf[HA_DOMAIN].get(config_util.CONF_PACKAGES) is not None assert len(conf) == 3 assert len(conf["input_boolean"]) == 2 assert len(conf["light"]) == 1 @@ -632,7 +1116,7 @@ async def test_merge(merge_log_err: MagicMock, hass: HomeAssistant) -> None: }, } config = { - HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, + HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "input_boolean": {"ib2": None}, "light": {"platform": "test"}, "automation": [], @@ -659,7 +1143,7 @@ async def test_merge_try_falsy(merge_log_err: MagicMock, hass: HomeAssistant) -> "pack_list2": {"light": OrderedDict()}, } config = { - HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, + HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "automation": {"do": "something"}, "light": {"some": "light"}, } @@ -682,7 +1166,7 @@ async def test_merge_new(merge_log_err: MagicMock, hass: HomeAssistant) -> None: "api": {}, }, } - config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}} + config = {HA_DOMAIN: {config_util.CONF_PACKAGES: packages}} await config_util.merge_packages_config(hass, config, packages) assert merge_log_err.call_count == 0 @@ -702,7 +1186,7 @@ async def test_merge_type_mismatch( "pack_2": {"light": {"ib1": None}}, # light gets merged - ensure_list } config = { - HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, + HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "input_boolean": {"ib2": None}, "input_select": [{"ib2": None}], "light": [{"platform": "two"}], @@ -720,13 +1204,13 @@ async def test_merge_once_only_keys( ) -> None: """Test if we have a merge for a comp that may occur only once. Keys.""" packages = {"pack_2": {"api": None}} - config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": None} + config = {HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "api": None} await config_util.merge_packages_config(hass, config, packages) assert config["api"] == OrderedDict() packages = {"pack_2": {"api": {"key_3": 3}}} config = { - HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, + HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "api": {"key_1": 1, "key_2": 2}, } await config_util.merge_packages_config(hass, config, packages) @@ -735,7 +1219,7 @@ async def test_merge_once_only_keys( # Duplicate keys error packages = {"pack_2": {"api": {"key": 2}}} config = { - HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, + HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "api": {"key": 1}, } await config_util.merge_packages_config(hass, config, packages) @@ -750,7 +1234,7 @@ async def test_merge_once_only_lists(hass: HomeAssistant) -> None: } } config = { - HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, + HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "api": {"list_1": ["item_1"]}, } await config_util.merge_packages_config(hass, config, packages) @@ -773,7 +1257,7 @@ async def test_merge_once_only_dictionaries(hass: HomeAssistant) -> None: } } config = { - HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, + HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "api": {"dict_1": {"key_1": 1, "dict_1.1": {"key_1.1": 1.1}}}, } await config_util.merge_packages_config(hass, config, packages) @@ -809,7 +1293,7 @@ async def test_merge_duplicate_keys( """Test if keys in dicts are duplicates.""" packages = {"pack_1": {"input_select": {"ib1": None}}} config = { - HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, + HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "input_select": {"ib1": 1}, } await config_util.merge_packages_config(hass, config, packages) @@ -819,13 +1303,155 @@ async def test_merge_duplicate_keys( assert len(config["input_select"]) == 1 +async def test_merge_customize(hass: HomeAssistant) -> None: + """Test loading core config onto hass object.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + "customize": {"a.a": {"friendly_name": "A"}}, + "packages": { + "pkg1": {"homeassistant": {"customize": {"b.b": {"friendly_name": "BB"}}}} + }, + } + await config_util.async_process_ha_core_config(hass, core_config) + + assert hass.data[config_util.DATA_CUSTOMIZE].get("b.b") == {"friendly_name": "BB"} + + +async def test_auth_provider_config(hass: HomeAssistant) -> None: + """Test loading auth provider config onto hass object.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_PROVIDERS: [ + {"type": "homeassistant"}, + ], + CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp", "id": "second"}], + } + if hasattr(hass, "auth"): + del hass.auth + await config_util.async_process_ha_core_config(hass, core_config) + + assert len(hass.auth.auth_providers) == 1 + assert hass.auth.auth_providers[0].type == "homeassistant" + assert len(hass.auth.auth_mfa_modules) == 2 + assert hass.auth.auth_mfa_modules[0].id == "totp" + assert hass.auth.auth_mfa_modules[1].id == "second" + + +async def test_auth_provider_config_default(hass: HomeAssistant) -> None: + """Test loading default auth provider config.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + } + if hasattr(hass, "auth"): + del hass.auth + await config_util.async_process_ha_core_config(hass, core_config) + + assert len(hass.auth.auth_providers) == 1 + assert hass.auth.auth_providers[0].type == "homeassistant" + assert len(hass.auth.auth_mfa_modules) == 1 + assert hass.auth.auth_mfa_modules[0].id == "totp" + + +async def test_disallowed_auth_provider_config(hass: HomeAssistant) -> None: + """Test loading insecure example auth provider is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_PROVIDERS: [ + { + "type": "insecure_example", + "users": [ + { + "username": "test-user", + "password": "test-pass", + "name": "Test Name", + } + ], + } + ], + } + with pytest.raises(Invalid): + await config_util.async_process_ha_core_config(hass, core_config) + + +async def test_disallowed_duplicated_auth_provider_config(hass: HomeAssistant) -> None: + """Test loading insecure example auth provider is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_PROVIDERS: [{"type": "homeassistant"}, {"type": "homeassistant"}], + } + with pytest.raises(Invalid): + await config_util.async_process_ha_core_config(hass, core_config) + + +async def test_disallowed_auth_mfa_module_config(hass: HomeAssistant) -> None: + """Test loading insecure example auth mfa module is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_MFA_MODULES: [ + { + "type": "insecure_example", + "data": [{"user_id": "mock-user", "pin": "test-pin"}], + } + ], + } + with pytest.raises(Invalid): + await config_util.async_process_ha_core_config(hass, core_config) + + +async def test_disallowed_duplicated_auth_mfa_module_config( + hass: HomeAssistant, +) -> None: + """Test loading insecure example auth mfa module is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp"}], + } + with pytest.raises(Invalid): + await config_util.async_process_ha_core_config(hass, core_config) + + async def test_merge_split_component_definition(hass: HomeAssistant) -> None: """Test components with trailing description in packages are merged.""" packages = { "pack_1": {"light one": {"l1": None}}, "pack_2": {"light two": {"l2": None}, "light three": {"l3": None}}, } - config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}} + config = {HA_DOMAIN: {config_util.CONF_PACKAGES: packages}} await config_util.merge_packages_config(hass, config, packages) assert len(config) == 4 @@ -1370,6 +1996,74 @@ def test_identify_config_schema(domain, schema, expected) -> None: ) +async def test_core_config_schema_historic_currency( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test core config schema.""" + await config_util.async_process_ha_core_config(hass, {"currency": "LTT"}) + + issue = issue_registry.async_get_issue("homeassistant", "historic_currency") + assert issue + assert issue.translation_placeholders == {"currency": "LTT"} + + +async def test_core_store_historic_currency( + hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry +) -> None: + """Test core config store.""" + core_data = { + "data": { + "currency": "LTT", + }, + "key": "core.config", + "version": 1, + "minor_version": 1, + } + hass_storage["core.config"] = dict(core_data) + await config_util.async_process_ha_core_config(hass, {}) + + issue_id = "historic_currency" + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert issue + assert issue.translation_placeholders == {"currency": "LTT"} + + await hass.config.async_update(currency="EUR") + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert not issue + + +async def test_core_config_schema_no_country( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test core config schema.""" + await config_util.async_process_ha_core_config(hass, {}) + + issue = issue_registry.async_get_issue("homeassistant", "country_not_configured") + assert issue + + +async def test_core_store_no_country( + hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry +) -> None: + """Test core config store.""" + core_data = { + "data": {}, + "key": "core.config", + "version": 1, + "minor_version": 1, + } + hass_storage["core.config"] = dict(core_data) + await config_util.async_process_ha_core_config(hass, {}) + + issue_id = "country_not_configured" + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert issue + + await hass.config.async_update(country="SE") + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert not issue + + async def test_safe_mode(hass: HomeAssistant) -> None: """Test safe mode.""" assert config_util.safe_mode_enabled(hass.config.config_dir) is False @@ -1646,7 +2340,7 @@ async def test_packages_schema_validation_error( ] assert error_records == snapshot - assert len(config[HOMEASSISTANT_DOMAIN][CONF_PACKAGES]) == 0 + assert len(config[HA_DOMAIN][config_util.CONF_PACKAGES]) == 0 def test_extract_domain_configs() -> None: @@ -1789,3 +2483,30 @@ async def test_loading_platforms_gathers(hass: HomeAssistant) -> None: ("platform_int", "sensor"), ("platform_int2", "sensor"), ] + + +async def test_configuration_legacy_template_is_removed(hass: HomeAssistant) -> None: + """Test loading core config onto hass object.""" + await config_util.async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "America/New_York", + "allowlist_external_dirs": "/etc", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "media_dirs": {"mymedia": "/usr"}, + "legacy_templates": True, + "debug": True, + "currency": "EUR", + "country": "SE", + "language": "sv", + "radius": 150, + }, + ) + + assert not getattr(hass.config, "legacy_templates") diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 41af8af3f21..cba7ad8f215 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -3,33 +3,26 @@ from __future__ import annotations import asyncio -from collections.abc import Generator from datetime import timedelta +from functools import cached_property import logging -import re -from typing import Any, Self +from typing import Any from unittest.mock import ANY, AsyncMock, Mock, patch -from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion +from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow, loader from homeassistant.components import dhcp -from homeassistant.config_entries import ConfigEntry +from homeassistant.components.hassio import HassioServiceInfo from homeassistant.const import ( - CONF_NAME, EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, ) -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - CoreState, - HomeAssistant, - callback, -) +from homeassistant.core import DOMAIN as HA_DOMAIN, CoreState, HomeAssistant, callback from homeassistant.data_entry_flow import BaseServiceInfo, FlowResult, FlowResultType from homeassistant.exceptions import ( ConfigEntryAuthFailed, @@ -37,17 +30,13 @@ from homeassistant.exceptions import ( ConfigEntryNotReady, HomeAssistantError, ) -from homeassistant.helpers import entity_registry as er, frame, issue_registry as ir -from homeassistant.helpers.discovery_flow import DiscoveryKey +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.json import json_dumps -from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.setup import async_set_domains_to_be_loaded, async_setup_component from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util -from homeassistant.util.json import json_loads from .common import ( MockConfigEntry, @@ -57,7 +46,6 @@ from .common import ( async_capture_events, async_fire_time_changed, async_get_persistent_notifications, - flush_store, mock_config_flow, mock_integration, mock_platform, @@ -87,27 +75,8 @@ def mock_handlers() -> Generator[None]: """Mock Reauth.""" return await self.async_step_reauth_confirm() - class MockFlowHandler2(config_entries.ConfigFlow): - """Define a second mock flow handler.""" - - VERSION = 1 - - async def async_step_reauth(self, data): - """Mock Reauth.""" - return await self.async_step_reauth_confirm() - - async def async_step_reauth_confirm(self, user_input=None): - """Test reauth confirm step.""" - if user_input is None: - return self.async_show_form( - step_id="reauth_confirm", - description_placeholders={CONF_NAME: "Custom title"}, - ) - return self.async_abort(reason="test") - with patch.dict( - config_entries.HANDLERS, - {"comp": MockFlowHandler, "test": MockFlowHandler, "test2": MockFlowHandler2}, + config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler} ): yield @@ -128,12 +97,12 @@ async def test_setup_race_only_setup_once(hass: HomeAssistant) -> None: fast_config_entry_setup_future = hass.loop.create_future() slow_setup_future = hass.loop.create_future() - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): """Mock setup.""" await slow_setup_future return True - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def async_setup_entry(hass, entry): """Mock setup entry.""" slow = entry.data["slow"] if slow: @@ -146,7 +115,7 @@ async def test_setup_race_only_setup_once(hass: HomeAssistant) -> None: await fast_config_entry_setup_future return True - async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def async_unload_entry(hass, entry): """Mock unload entry.""" return True @@ -534,41 +503,6 @@ async def test_remove_entry( assert not entity_entry_list -async def test_remove_entry_non_unique_unique_id( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - entity_registry: er.EntityRegistry, -) -> None: - """Test that we can remove entry with colliding unique_id.""" - entry_1 = MockConfigEntry( - domain="test_other", entry_id="test1", unique_id="not_unique" - ) - entry_1.add_to_manager(manager) - entry_2 = MockConfigEntry( - domain="test_other", entry_id="test2", unique_id="not_unique" - ) - entry_2.add_to_manager(manager) - entry_3 = MockConfigEntry( - domain="test_other", entry_id="test3", unique_id="not_unique" - ) - entry_3.add_to_manager(manager) - - # Check all config entries exist - assert manager.async_entry_ids() == [ - "test1", - "test2", - "test3", - ] - - # Remove entries - assert await manager.async_remove("test1") == {"require_restart": False} - await hass.async_block_till_done() - assert await manager.async_remove("test2") == {"require_restart": False} - await hass.async_block_till_done() - assert await manager.async_remove("test3") == {"require_restart": False} - await hass.async_block_till_done() - - async def test_remove_entry_cancels_reauth( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -592,13 +526,13 @@ async def test_remove_entry_cancels_reauth( assert entry.state is config_entries.ConfigEntryState.SETUP_ERROR issue_id = f"config_entry_reauth_test_{entry.entry_id}" - assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) + assert issue_registry.async_get_issue(HA_DOMAIN, issue_id) await manager.async_remove(entry.entry_id) flows = hass.config_entries.flow.async_progress_by_handler("test") assert len(flows) == 0 - assert not issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) + assert not issue_registry.async_get_issue(HA_DOMAIN, issue_id) async def test_remove_entry_handles_callback_error( @@ -641,9 +575,9 @@ async def test_remove_entry_raises( ) -> None: """Test if a component raises while removing entry.""" - async def mock_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def mock_unload_entry(hass, entry): """Mock unload entry function.""" - raise Exception("BROKEN") # noqa: TRY002 + raise Exception("BROKEN") # pylint: disable=broad-exception-raised mock_integration(hass, MockModule("comp", async_unload_entry=mock_unload_entry)) @@ -733,7 +667,7 @@ async def test_add_entry_calls_setup_entry( """Test user step.""" return self.async_create_entry(title="title", data={"token": "supersecret"}) - with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) @@ -924,7 +858,7 @@ async def test_saving_and_loading( await self.async_set_unique_id("unique") return self.async_create_entry(title="Test Title", data={"token": "abcd"}) - with mock_config_flow("test", TestFlow): + with patch.dict(config_entries.HANDLERS, {"test": TestFlow}): await hass.config_entries.flow.async_init( "test", context={"source": config_entries.SOURCE_USER} ) @@ -942,21 +876,10 @@ async def test_saving_and_loading( with patch("homeassistant.config_entries.HANDLERS.get", return_value=Test2Flow): await hass.config_entries.flow.async_init( - "test", - context={ - "source": config_entries.SOURCE_USER, - "discovery_key": DiscoveryKey(domain="test", key=("blah"), version=1), - }, - ) - await hass.config_entries.flow.async_init( - "test", - context={ - "source": config_entries.SOURCE_USER, - "discovery_key": DiscoveryKey(domain="test", key=("a", "b"), version=1), - }, + "test", context={"source": config_entries.SOURCE_USER} ) - assert len(hass.config_entries.async_entries()) == 3 + assert len(hass.config_entries.async_entries()) == 2 entry_1 = hass.config_entries.async_entries()[0] hass.config_entries.async_update_entry( @@ -975,7 +898,7 @@ async def test_saving_and_loading( manager = config_entries.ConfigEntries(hass, {}) await manager.async_initialize() - assert len(manager.async_entries()) == 3 + assert len(manager.async_entries()) == 2 # Ensure same order for orig, loaded in zip( @@ -984,7 +907,6 @@ async def test_saving_and_loading( assert orig.as_dict() == loaded.as_dict() -@freeze_time("2024-02-14 12:00:00") async def test_as_dict(snapshot: SnapshotAssertion) -> None: """Test ConfigEntry.as_dict.""" @@ -1010,6 +932,7 @@ async def test_as_dict(snapshot: SnapshotAssertion) -> None: "_tries", "_setup_again_job", "_supports_options", + "_reconfigure_lock", "supports_reconfigure", } @@ -1022,7 +945,7 @@ async def test_as_dict(snapshot: SnapshotAssertion) -> None: if ( key.startswith("__") or callable(func) - or type(func).__name__ in ("cached_property", "property") + or type(func) in (cached_property, property) ): continue assert key in dict_repr or key in excluded_from_dict @@ -1127,20 +1050,23 @@ async def test_discovery_notification( mock_integration(hass, MockModule("test")) mock_platform(hass, "test.config_flow", None) - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" + with patch.dict(config_entries.HANDLERS): - VERSION = 5 + class TestFlow(config_entries.ConfigFlow, domain="test"): + """Test flow.""" - async def async_step_discovery(self, discovery_info): - """Test discovery step.""" - return self.async_show_form(step_id="discovery_confirm") + VERSION = 5 - async def async_step_discovery_confirm(self, discovery_info): - """Test discovery confirm step.""" - return self.async_create_entry(title="Test Title", data={"token": "abcd"}) + async def async_step_discovery(self, discovery_info): + """Test discovery step.""" + return self.async_show_form(step_id="discovery_confirm") + + async def async_step_discovery_confirm(self, discovery_info): + """Test discovery confirm step.""" + return self.async_create_entry( + title="Test Title", data={"token": "abcd"} + ) - with mock_config_flow("test", TestFlow): notifications = async_get_persistent_notifications(hass) assert "config_entry_discovery" not in notifications @@ -1178,31 +1104,29 @@ async def test_reauth_notification(hass: HomeAssistant) -> None: mock_integration(hass, MockModule("test")) mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(title="test_title", domain="test") - entry.add_to_hass(hass) + with patch.dict(config_entries.HANDLERS): - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" + class TestFlow(config_entries.ConfigFlow, domain="test"): + """Test flow.""" - VERSION = 5 + VERSION = 5 - async def async_step_user(self, user_input): - """Test user step.""" - return self.async_show_form(step_id="user_confirm") + async def async_step_user(self, user_input): + """Test user step.""" + return self.async_show_form(step_id="user_confirm") - async def async_step_user_confirm(self, user_input): - """Test user confirm step.""" - return self.async_show_form(step_id="user_confirm") + async def async_step_user_confirm(self, user_input): + """Test user confirm step.""" + return self.async_show_form(step_id="user_confirm") - async def async_step_reauth(self, user_input): - """Test reauth step.""" - return self.async_show_form(step_id="reauth_confirm") + async def async_step_reauth(self, user_input): + """Test reauth step.""" + return self.async_show_form(step_id="reauth_confirm") - async def async_step_reauth_confirm(self, user_input): - """Test reauth confirm step.""" - return self.async_abort(reason="test") + async def async_step_reauth_confirm(self, user_input): + """Test reauth confirm step.""" + return self.async_abort(reason="test") - with mock_config_flow("test", TestFlow): # Start user flow to assert that reconfigure notification doesn't fire await hass.config_entries.flow.async_init( "test", context={"source": config_entries.SOURCE_USER} @@ -1214,11 +1138,7 @@ async def test_reauth_notification(hass: HomeAssistant) -> None: # Start first reauth flow to assert that reconfigure notification fires flow1 = await hass.config_entries.flow.async_init( - "test", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, + "test", context={"source": config_entries.SOURCE_REAUTH} ) await hass.async_block_till_done() @@ -1228,11 +1148,7 @@ async def test_reauth_notification(hass: HomeAssistant) -> None: # Start a second reauth flow so we can finish the first and assert that # the reconfigure notification persists until the second one is complete flow2 = await hass.config_entries.flow.async_init( - "test", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, + "test", context={"source": config_entries.SOURCE_REAUTH} ) flow1 = await hass.config_entries.flow.async_configure(flow1["flow_id"], {}) @@ -1273,14 +1189,14 @@ async def test_reauth_issue( assert len(issue_registry.issues) == 1 issue_id = f"config_entry_reauth_test_{entry.entry_id}" - issue = issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) + issue = issue_registry.async_get_issue(HA_DOMAIN, issue_id) assert issue == ir.IssueEntry( active=True, breaks_in_ha_version=None, created=ANY, data={"flow_id": flows[0]["flow_id"]}, dismissed_version=None, - domain=HOMEASSISTANT_DOMAIN, + domain=HA_DOMAIN, is_fixable=False, is_persistent=False, issue_domain="test", @@ -1310,7 +1226,7 @@ async def test_discovery_notification_not_created(hass: HomeAssistant) -> None: """Test discovery step.""" return self.async_abort(reason="test") - with mock_config_flow("test", TestFlow): + with patch.dict(config_entries.HANDLERS, {"test": TestFlow}): await hass.config_entries.flow.async_init( "test", context={"source": config_entries.SOURCE_DISCOVERY} ) @@ -1330,11 +1246,8 @@ async def test_loading_default_config(hass: HomeAssistant) -> None: assert len(manager.async_entries()) == 0 -async def test_updating_entry_data( - manager: config_entries.ConfigEntries, freezer: FrozenDateTimeFactory -) -> None: +async def test_updating_entry_data(manager: config_entries.ConfigEntries) -> None: """Test that we can update an entry data.""" - created = dt_util.utcnow() entry = MockConfigEntry( domain="test", data={"first": True}, @@ -1342,32 +1255,17 @@ async def test_updating_entry_data( ) entry.add_to_manager(manager) - assert len(manager.async_entries()) == 1 - assert manager.async_entries()[0] == entry - assert entry.created_at == created - assert entry.modified_at == created - - freezer.tick() - assert manager.async_update_entry(entry) is False assert entry.data == {"first": True} - assert entry.modified_at == created - assert manager.async_entries()[0].modified_at == created - - freezer.tick() - modified = dt_util.utcnow() assert manager.async_update_entry(entry, data={"second": True}) is True assert entry.data == {"second": True} - assert entry.modified_at == modified - assert manager.async_entries()[0].modified_at == modified async def test_updating_entry_system_options( - manager: config_entries.ConfigEntries, freezer: FrozenDateTimeFactory + manager: config_entries.ConfigEntries, ) -> None: """Test that we can update an entry data.""" - created = dt_util.utcnow() entry = MockConfigEntry( domain="test", data={"first": True}, @@ -1378,11 +1276,6 @@ async def test_updating_entry_system_options( assert entry.pref_disable_new_entities is True assert entry.pref_disable_polling is False - assert entry.created_at == created - assert entry.modified_at == created - - freezer.tick() - modified = dt_util.utcnow() manager.async_update_entry( entry, pref_disable_new_entities=False, pref_disable_polling=True @@ -1390,8 +1283,6 @@ async def test_updating_entry_system_options( assert entry.pref_disable_new_entities is False assert entry.pref_disable_polling is True - assert entry.created_at == created - assert entry.modified_at == modified async def test_update_entry_options_and_trigger_listener( @@ -1402,7 +1293,7 @@ async def test_update_entry_options_and_trigger_listener( entry.add_to_manager(manager) update_listener_calls = [] - async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + async def update_listener(hass, entry): """Test function.""" assert entry.options == {"second": True} update_listener_calls.append(None) @@ -1567,7 +1458,7 @@ async def test_reload_during_setup_retrying_waits(hass: HomeAssistant) -> None: load_attempts = [] sleep_duration = 0 - async def _mock_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def _mock_setup_entry(hass, entry): """Mock setup entry.""" nonlocal sleep_duration await asyncio.sleep(sleep_duration) @@ -1612,7 +1503,7 @@ async def test_create_entry_options( ) -> None: """Test a config entry being created with options.""" - async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def mock_async_setup(hass, config): """Mock setup.""" hass.async_create_task( hass.config_entries.flow.async_init( @@ -1645,7 +1536,7 @@ async def test_create_entry_options( options={"example": user_input["option"]}, ) - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): assert await async_setup_component(hass, "comp", {}) await hass.async_block_till_done() @@ -2392,7 +2283,7 @@ async def test_unique_id_persisted( await self.async_set_unique_id("mock-unique-id") return self.async_create_entry(title="mock-title", data={}) - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) @@ -2443,7 +2334,7 @@ async def test_unique_id_existing_entry( return self.async_create_entry(title="mock-title", data={"via": "flow"}) - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) @@ -2489,7 +2380,7 @@ async def test_entry_id_existing_entry( with ( pytest.raises(HomeAssistantError), - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch( "homeassistant.config_entries.ulid_util.ulid_now", return_value=collide_entry_id, @@ -2532,7 +2423,7 @@ async def test_unique_id_update_existing_entry_without_reload( ) with ( - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2582,7 +2473,7 @@ async def test_unique_id_update_existing_entry_with_reload( ) with ( - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2602,7 +2493,7 @@ async def test_unique_id_update_existing_entry_with_reload( updates["host"] = "2.2.2.2" entry._async_set_state(hass, config_entries.ConfigEntryState.NOT_LOADED, None) with ( - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2659,7 +2550,7 @@ async def test_unique_id_from_discovery_in_setup_retry( # Verify we do not reload from a user source with ( - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2675,7 +2566,7 @@ async def test_unique_id_from_discovery_in_setup_retry( # Verify do reload from a discovery source with ( - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2727,7 +2618,7 @@ async def test_unique_id_not_update_existing_entry( ) with ( - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2761,7 +2652,7 @@ async def test_unique_id_in_progress( await self.async_set_unique_id("mock-unique-id") return self.async_show_form(step_id="discovery") - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): # Create one to be in progress result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} @@ -2801,7 +2692,7 @@ async def test_finish_flow_aborts_progress( return self.async_create_entry(title="yo", data={}) - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): # Create one to be in progress result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} @@ -2818,24 +2709,8 @@ async def test_finish_flow_aborts_progress( assert len(hass.config_entries.flow.async_progress()) == 0 -@pytest.mark.parametrize( - ("extra_context", "expected_entry_discovery_keys"), - [ - ( - {}, - {}, - ), - ( - {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, - {"test": (DiscoveryKey(domain="test", key="blah", version=1),)}, - ), - ], -) async def test_unique_id_ignore( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - extra_context: dict, - expected_entry_discovery_keys: dict, + hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: """Test that we can ignore flows that are in progress and have a unique ID.""" async_setup_entry = AsyncMock(return_value=False) @@ -2852,7 +2727,7 @@ async def test_unique_id_ignore( await self.async_set_unique_id("mock-unique-id") return self.async_show_form(step_id="discovery") - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): # Create one to be in progress result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} @@ -2861,7 +2736,7 @@ async def test_unique_id_ignore( result2 = await manager.flow.async_init( "comp", - context={"source": config_entries.SOURCE_IGNORE} | extra_context, + context={"source": config_entries.SOURCE_IGNORE}, data={"unique_id": "mock-unique-id", "title": "Ignored Title"}, ) @@ -2877,8 +2752,6 @@ async def test_unique_id_ignore( assert entry.source == "ignore" assert entry.unique_id == "mock-unique-id" assert entry.title == "Ignored Title" - assert entry.data == {} - assert entry.discovery_keys == expected_entry_discovery_keys async def test_manual_add_overrides_ignored_entry( @@ -2918,7 +2791,7 @@ async def test_manual_add_overrides_ignored_entry( raise NotImplementedError with ( - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -2962,7 +2835,7 @@ async def test_manual_add_overrides_ignored_entry_singleton( return self.async_abort(reason="single_instance_allowed") return self.async_create_entry(title="title", data={"token": "supersecret"}) - with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) @@ -2975,254 +2848,6 @@ async def test_manual_add_overrides_ignored_entry_singleton( assert p_entry.data == {"token": "supersecret"} -@pytest.mark.parametrize( - ( - "discovery_keys", - "entry_unique_id", - "flow_context", - "flow_source", - "flow_result", - "updated_discovery_keys", - ), - [ - # No discovery key - ( - {}, - "mock-unique-id", - {}, - config_entries.SOURCE_ZEROCONF, - data_entry_flow.FlowResultType.ABORT, - {}, - ), - # Discovery key added to ignored entry data - ( - {}, - "mock-unique-id", - {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, - config_entries.SOURCE_ZEROCONF, - data_entry_flow.FlowResultType.ABORT, - {"test": (DiscoveryKey(domain="test", key="blah", version=1),)}, - ), - # Discovery key added to ignored entry data - ( - {"test": (DiscoveryKey(domain="test", key="bleh", version=1),)}, - "mock-unique-id", - {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, - config_entries.SOURCE_ZEROCONF, - data_entry_flow.FlowResultType.ABORT, - { - "test": ( - DiscoveryKey(domain="test", key="bleh", version=1), - DiscoveryKey(domain="test", key="blah", version=1), - ) - }, - ), - # Discovery key added to ignored entry data - ( - { - "test": ( - DiscoveryKey(domain="test", key="1", version=1), - DiscoveryKey(domain="test", key="2", version=1), - DiscoveryKey(domain="test", key="3", version=1), - DiscoveryKey(domain="test", key="4", version=1), - DiscoveryKey(domain="test", key="5", version=1), - DiscoveryKey(domain="test", key="6", version=1), - DiscoveryKey(domain="test", key="7", version=1), - DiscoveryKey(domain="test", key="8", version=1), - DiscoveryKey(domain="test", key="9", version=1), - DiscoveryKey(domain="test", key="10", version=1), - ) - }, - "mock-unique-id", - {"discovery_key": DiscoveryKey(domain="test", key="11", version=1)}, - config_entries.SOURCE_ZEROCONF, - data_entry_flow.FlowResultType.ABORT, - { - "test": ( - DiscoveryKey(domain="test", key="2", version=1), - DiscoveryKey(domain="test", key="3", version=1), - DiscoveryKey(domain="test", key="4", version=1), - DiscoveryKey(domain="test", key="5", version=1), - DiscoveryKey(domain="test", key="6", version=1), - DiscoveryKey(domain="test", key="7", version=1), - DiscoveryKey(domain="test", key="8", version=1), - DiscoveryKey(domain="test", key="9", version=1), - DiscoveryKey(domain="test", key="10", version=1), - DiscoveryKey(domain="test", key="11", version=1), - ) - }, - ), - # Discovery key already in ignored entry data - ( - {"test": (DiscoveryKey(domain="test", key="blah", version=1),)}, - "mock-unique-id", - {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, - config_entries.SOURCE_ZEROCONF, - data_entry_flow.FlowResultType.ABORT, - {"test": (DiscoveryKey(domain="test", key="blah", version=1),)}, - ), - # Flow not aborted when unique id is not matching - ( - {}, - "mock-unique-id-2", - {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, - config_entries.SOURCE_ZEROCONF, - data_entry_flow.FlowResultType.FORM, - {}, - ), - ], -) -@pytest.mark.parametrize( - "entry_source", - [ - config_entries.SOURCE_IGNORE, - config_entries.SOURCE_USER, - config_entries.SOURCE_ZEROCONF, - ], -) -async def test_update_discovery_keys( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - discovery_keys: tuple, - entry_source: str, - entry_unique_id: str, - flow_context: dict, - flow_source: str, - flow_result: data_entry_flow.FlowResultType, - updated_discovery_keys: tuple, -) -> None: - """Test that discovery keys of an entry can be updated.""" - hass.config.components.add("comp") - entry = MockConfigEntry( - domain="comp", - discovery_keys=discovery_keys, - unique_id=entry_unique_id, - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - mock_integration(hass, MockModule("comp")) - mock_platform(hass, "comp.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - VERSION = 1 - - async def async_step_user(self, user_input=None): - """Test user step.""" - await self.async_set_unique_id("mock-unique-id") - self._abort_if_unique_id_configured(reload_on_update=False) - return self.async_show_form(step_id="step2") - - async def async_step_step2(self, user_input=None): - raise NotImplementedError - - async def async_step_zeroconf(self, discovery_info=None): - """Test zeroconf step.""" - return await self.async_step_user(discovery_info) - - with ( - mock_config_flow("comp", TestFlow), - patch( - "homeassistant.config_entries.ConfigEntries.async_reload" - ) as async_reload, - ): - result = await manager.flow.async_init( - "comp", context={"source": flow_source} | flow_context - ) - await hass.async_block_till_done() - - assert result["type"] == flow_result - assert entry.data == {} - assert entry.discovery_keys == updated_discovery_keys - assert len(async_reload.mock_calls) == 0 - - -@pytest.mark.parametrize( - ( - "discovery_keys", - "entry_source", - "entry_unique_id", - "flow_context", - "flow_source", - "flow_result", - "updated_discovery_keys", - ), - [ - # Flow not aborted when user initiated flow - ( - {}, - config_entries.SOURCE_IGNORE, - "mock-unique-id-2", - {"discovery_key": DiscoveryKey(domain="test", key="blah", version=1)}, - config_entries.SOURCE_USER, - data_entry_flow.FlowResultType.FORM, - {}, - ), - ], -) -async def test_update_discovery_keys_2( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - discovery_keys: tuple, - entry_source: str, - entry_unique_id: str, - flow_context: dict, - flow_source: str, - flow_result: data_entry_flow.FlowResultType, - updated_discovery_keys: tuple, -) -> None: - """Test that discovery keys of an entry can be updated.""" - hass.config.components.add("comp") - entry = MockConfigEntry( - domain="comp", - discovery_keys=discovery_keys, - unique_id=entry_unique_id, - state=config_entries.ConfigEntryState.LOADED, - source=entry_source, - ) - entry.add_to_hass(hass) - - mock_integration(hass, MockModule("comp")) - mock_platform(hass, "comp.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - VERSION = 1 - - async def async_step_user(self, user_input=None): - """Test user step.""" - await self.async_set_unique_id("mock-unique-id") - self._abort_if_unique_id_configured(reload_on_update=False) - return self.async_show_form(step_id="step2") - - async def async_step_step2(self, user_input=None): - raise NotImplementedError - - async def async_step_zeroconf(self, discovery_info=None): - """Test zeroconf step.""" - return await self.async_step_user(discovery_info) - - with ( - mock_config_flow("comp", TestFlow), - patch( - "homeassistant.config_entries.ConfigEntries.async_reload" - ) as async_reload, - ): - result = await manager.flow.async_init( - "comp", context={"source": flow_source} | flow_context - ) - await hass.async_block_till_done() - - assert result["type"] == flow_result - assert entry.data == {} - assert entry.discovery_keys == updated_discovery_keys - assert len(async_reload.mock_calls) == 0 - - async def test_async_current_entries_does_not_skip_ignore_non_user( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: @@ -3251,7 +2876,7 @@ async def test_async_current_entries_does_not_skip_ignore_non_user( return self.async_abort(reason="single_instance_allowed") return self.async_create_entry(title="title", data={"token": "supersecret"}) - with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_IMPORT} ) @@ -3288,7 +2913,7 @@ async def test_async_current_entries_explicit_skip_ignore( return self.async_abort(reason="single_instance_allowed") return self.async_create_entry(title="title", data={"token": "supersecret"}) - with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_IMPORT} ) @@ -3329,7 +2954,7 @@ async def test_async_current_entries_explicit_include_ignore( return self.async_abort(reason="single_instance_allowed") return self.async_create_entry(title="title", data={"token": "supersecret"}) - with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_IMPORT} ) @@ -3338,6 +2963,129 @@ async def test_async_current_entries_explicit_include_ignore( assert len(mock_setup_entry.mock_calls) == 0 +async def test_unignore_step_form( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can ignore flows that are in progress and have a unique ID, then rediscover them.""" + async_setup_entry = AsyncMock(return_value=True) + mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) + mock_platform(hass, "comp.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_unignore(self, user_input): + """Test unignore step.""" + unique_id = user_input["unique_id"] + await self.async_set_unique_id(unique_id) + return self.async_show_form(step_id="discovery") + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + result = await manager.flow.async_init( + "comp", + context={"source": config_entries.SOURCE_IGNORE}, + data={"unique_id": "mock-unique-id", "title": "Ignored Title"}, + ) + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + + entry = hass.config_entries.async_entries("comp")[0] + assert entry.source == "ignore" + assert entry.unique_id == "mock-unique-id" + assert entry.domain == "comp" + assert entry.title == "Ignored Title" + + await manager.async_remove(entry.entry_id) + + # But after a 'tick' the unignore step has run and we can see an active flow again. + await hass.async_block_till_done() + assert len(hass.config_entries.flow.async_progress_by_handler("comp")) == 1 + + # and still not config entries + assert len(hass.config_entries.async_entries("comp")) == 0 + + +async def test_unignore_create_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can ignore flows that are in progress and have a unique ID, then rediscover them.""" + async_setup_entry = AsyncMock(return_value=True) + mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) + mock_platform(hass, "comp.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_unignore(self, user_input): + """Test unignore step.""" + unique_id = user_input["unique_id"] + await self.async_set_unique_id(unique_id) + return self.async_create_entry(title="yo", data={}) + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + result = await manager.flow.async_init( + "comp", + context={"source": config_entries.SOURCE_IGNORE}, + data={"unique_id": "mock-unique-id", "title": "Ignored Title"}, + ) + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + + entry = hass.config_entries.async_entries("comp")[0] + assert entry.source == "ignore" + assert entry.unique_id == "mock-unique-id" + assert entry.domain == "comp" + assert entry.title == "Ignored Title" + + await manager.async_remove(entry.entry_id) + + # But after a 'tick' the unignore step has run and we can see a config entry. + await hass.async_block_till_done() + entry = hass.config_entries.async_entries("comp")[0] + assert entry.source == config_entries.SOURCE_UNIGNORE + assert entry.unique_id == "mock-unique-id" + assert entry.title == "yo" + + # And still no active flow + assert len(hass.config_entries.flow.async_progress_by_handler("comp")) == 0 + + +async def test_unignore_default_impl( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that resdicovery is a no-op by default.""" + async_setup_entry = AsyncMock(return_value=True) + mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) + mock_platform(hass, "comp.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + result = await manager.flow.async_init( + "comp", + context={"source": config_entries.SOURCE_IGNORE}, + data={"unique_id": "mock-unique-id", "title": "Ignored Title"}, + ) + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + + entry = hass.config_entries.async_entries("comp")[0] + assert entry.source == "ignore" + assert entry.unique_id == "mock-unique-id" + assert entry.domain == "comp" + assert entry.title == "Ignored Title" + + await manager.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries("comp")) == 0 + assert len(hass.config_entries.flow.async_progress()) == 0 + + async def test_partial_flows_hidden( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: @@ -3369,7 +3117,7 @@ async def test_partial_flows_hidden( async def async_step_someform(self, user_input=None): raise NotImplementedError - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): # Start a config entry flow and wait for it to be blocked init_task = asyncio.ensure_future( manager.flow.async_init( @@ -3435,7 +3183,7 @@ async def test_async_setup_init_entry( """Test import step creating entry.""" return self.async_create_entry(title="title", data={}) - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): assert await async_setup_component(hass, "comp", {}) await hass.async_block_till_done() @@ -3453,7 +3201,7 @@ async def test_async_setup_init_entry_completes_before_loaded_event_fires( """Test a config entry being initialized during integration setup before the loaded event fires.""" load_events = async_capture_events(hass, EVENT_COMPONENT_LOADED) - async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def mock_async_setup(hass, config): """Mock setup.""" hass.async_create_task( hass.config_entries.flow.async_init( @@ -3496,7 +3244,7 @@ async def test_async_setup_init_entry_completes_before_loaded_event_fires( # This test must not use hass.async_block_till_done() # as its explicitly testing what happens without it - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): assert await async_setup_component(hass, "comp", {}) assert len(async_setup_entry.mock_calls) == 1 assert load_events[0].event_type == EVENT_COMPONENT_LOADED @@ -3511,7 +3259,7 @@ async def test_async_setup_update_entry(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain="comp", data={"value": "initial"}) entry.add_to_hass(hass) - async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def mock_async_setup(hass, config): """Mock setup.""" hass.async_create_task( hass.config_entries.flow.async_init( @@ -3522,7 +3270,7 @@ async def test_async_setup_update_entry(hass: HomeAssistant) -> None: ) return True - async def mock_async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def mock_async_setup_entry(hass, entry): """Mock setting up an entry.""" assert entry.data["value"] == "updated" return True @@ -3552,7 +3300,7 @@ async def test_async_setup_update_entry(hass: HomeAssistant) -> None: ) return self.async_abort(reason="yo") - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): assert await async_setup_component(hass, "comp", {}) entries = hass.config_entries.async_entries("comp") @@ -3601,7 +3349,7 @@ async def test_flow_with_default_discovery( return self.async_create_entry(title="yo", data={}) - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): # Create one to be in progress result = await manager.flow.async_init( "comp", context={"source": discovery_source[0]}, data=discovery_source[1] @@ -3651,7 +3399,7 @@ async def test_flow_with_default_discovery_with_unique_id( async def async_step_mock(self, user_input=None): raise NotImplementedError - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_DISCOVERY} ) @@ -3678,7 +3426,7 @@ async def test_default_discovery_abort_existing_entries( VERSION = 1 - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_DISCOVERY} ) @@ -3707,7 +3455,7 @@ async def test_default_discovery_in_progress( async def async_step_mock(self, user_input=None): raise NotImplementedError - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_DISCOVERY}, @@ -3747,7 +3495,7 @@ async def test_default_discovery_abort_on_new_unique_flow( async def async_step_mock(self, user_input=None): raise NotImplementedError - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): # First discovery with default, no unique ID result2 = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={} @@ -3794,7 +3542,7 @@ async def test_default_discovery_abort_on_user_flow_complete( async def async_step_mock(self, user_input=None): raise NotImplementedError - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): # First discovery with default, no unique ID flow1 = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={} @@ -3858,7 +3606,7 @@ async def test_flow_same_device_multiple_sources( return self.async_show_form(step_id="link") return self.async_create_entry(title="title", data={"token": "supersecret"}) - with mock_config_flow("comp", TestFlow): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): # Create one to be in progress flow1 = manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_ZEROCONF} @@ -4010,7 +3758,7 @@ async def test_setup_raise_entry_error_from_first_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def async_setup_entry(hass, entry): """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -4050,7 +3798,7 @@ async def test_setup_not_raise_entry_error_from_future_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def async_setup_entry(hass, entry): """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -4129,7 +3877,7 @@ async def test_setup_raise_auth_failed_from_first_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def async_setup_entry(hass, entry): """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -4181,7 +3929,7 @@ async def test_setup_raise_auth_failed_from_future_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def async_setup_entry(hass, entry): """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -4377,7 +4125,7 @@ async def test_async_abort_entries_match( self._async_abort_entries_match(matchers) return self.async_abort(reason="no_match") - with mock_config_flow("comp", TestFlow), mock_config_flow("invalid_flow", 5): + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}): result = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) @@ -4515,28 +4263,29 @@ async def test_loading_old_data( assert entry.pref_disable_new_entities is True -async def test_deprecated_disabled_by_str_ctor() -> None: +async def test_deprecated_disabled_by_str_ctor( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: """Test deprecated str disabled_by constructor enumizes and logs a warning.""" - with pytest.raises( - TypeError, match="disabled_by must be a ConfigEntryDisabler value, got user" - ): - MockConfigEntry(disabled_by=config_entries.ConfigEntryDisabler.USER.value) + entry = MockConfigEntry(disabled_by=config_entries.ConfigEntryDisabler.USER.value) + assert entry.disabled_by is config_entries.ConfigEntryDisabler.USER + assert " str for config entry disabled_by. This is deprecated " in caplog.text async def test_deprecated_disabled_by_str_set( hass: HomeAssistant, manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, ) -> None: """Test deprecated str set disabled_by enumizes and logs a warning.""" entry = MockConfigEntry(domain="comp") entry.add_to_manager(manager) hass.config.components.add("comp") - with pytest.raises( - TypeError, match="disabled_by must be a ConfigEntryDisabler value, got user" - ): - await manager.async_set_disabled_by( - entry.entry_id, config_entries.ConfigEntryDisabler.USER.value - ) + assert await manager.async_set_disabled_by( + entry.entry_id, config_entries.ConfigEntryDisabler.USER.value + ) + assert entry.disabled_by is config_entries.ConfigEntryDisabler.USER + assert " str for config entry disabled_by. This is deprecated " in caplog.text async def test_entry_reload_concurrency( @@ -4627,12 +4376,12 @@ async def test_unique_id_update_while_setup_in_progress( ) -> None: """Test we handle the case where the config entry is updated while setup is in progress.""" - async def mock_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def mock_setup_entry(hass, entry): """Mock setting up entry.""" await asyncio.sleep(0.1) return True - async def mock_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def mock_unload_entry(hass, entry): """Mock unloading an entry.""" return True @@ -4673,7 +4422,7 @@ async def test_unique_id_update_while_setup_in_progress( ) with ( - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch( "homeassistant.config_entries.ConfigEntries.async_reload" ) as async_reload, @@ -4779,75 +4528,6 @@ async def test_reauth( assert len(hass.config_entries.flow.async_progress()) == 1 -@pytest.mark.parametrize( - "source", [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE] -) -async def test_reauth_reconfigure_missing_entry( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - source: str, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the async_reauth_helper.""" - entry = MockConfigEntry(title="test_title", domain="test") - entry.add_to_hass(hass) - - mock_setup_entry = AsyncMock(return_value=True) - mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_platform(hass, "test.config_flow", None) - - await manager.async_setup(entry.entry_id) - await hass.async_block_till_done() - - with pytest.raises( - RuntimeError, - match=f"Detected code that initialises a {source} flow without a link " - "to the config entry. Please report this issue.", - ): - await manager.flow.async_init("test", context={"source": source}) - await hass.async_block_till_done() - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 0 - - -@pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) -@pytest.mark.parametrize( - "source", [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE] -) -async def test_reauth_reconfigure_missing_entry_component( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - source: str, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the async_reauth_helper.""" - entry = MockConfigEntry(title="test_title", domain="test") - entry.add_to_hass(hass) - - mock_setup_entry = AsyncMock(return_value=True) - mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_platform(hass, "test.config_flow", None) - - await manager.async_setup(entry.entry_id) - await hass.async_block_till_done() - - with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): - await manager.flow.async_init("test", context={"source": source}) - await hass.async_block_till_done() - - # Flow still created, but deprecation logged - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - assert flows[0]["context"]["source"] == source - - assert ( - f"Detected that integration 'hue' initialises a {source} flow" - " without a link to the config entry at homeassistant/components" in caplog.text - ) - - async def test_reconfigure( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: @@ -4864,68 +4544,67 @@ async def test_reconfigure( await manager.async_setup(entry.entry_id) await hass.async_block_till_done() - def _async_start_reconfigure(config_entry: MockConfigEntry) -> None: - hass.async_create_task( - manager.flow.async_init( - config_entry.domain, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": config_entry.entry_id, - }, - ), - f"config entry reconfigure {config_entry.title} " - f"{config_entry.domain} {config_entry.entry_id}", + flow = hass.config_entries.flow + with patch.object(flow, "async_init", wraps=flow.async_init) as mock_init: + entry.async_start_reconfigure( + hass, + context={"extra_context": "some_extra_context"}, + data={"extra_data": 1234}, ) - - _async_start_reconfigure(entry) - await hass.async_block_till_done() + await hass.async_block_till_done() flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 assert flows[0]["context"]["entry_id"] == entry.entry_id assert flows[0]["context"]["source"] == config_entries.SOURCE_RECONFIGURE + assert flows[0]["context"]["title_placeholders"] == {"name": "test_title"} + assert flows[0]["context"]["extra_context"] == "some_extra_context" + + assert mock_init.call_args.kwargs["data"]["extra_data"] == 1234 assert entry.entry_id != entry2.entry_id - # Check that we can start duplicate reconfigure flows - # (may need revisiting) - _async_start_reconfigure(entry) + # Check that we can't start duplicate reconfigure flows + entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + await hass.async_block_till_done() + assert len(hass.config_entries.flow.async_progress()) == 1 + + # Check that we can't start duplicate reconfigure flows when the context is different + entry.async_start_reconfigure(hass, {"diff": "diff"}) + await hass.async_block_till_done() + assert len(hass.config_entries.flow.async_progress()) == 1 + + # Check that we can start a reconfigure flow for a different entry + entry2.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) await hass.async_block_till_done() assert len(hass.config_entries.flow.async_progress()) == 2 - # Check that we can start a reconfigure flow for a different entry - _async_start_reconfigure(entry2) - await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 3 - # Abort all existing flows for flow in hass.config_entries.flow.async_progress(): hass.config_entries.flow.async_abort(flow["flow_id"]) await hass.async_block_till_done() - # Check that we can start duplicate reconfigure flows + # Check that we can't start duplicate reconfigure flows # without blocking between flows - # (may need revisiting) - _async_start_reconfigure(entry) - _async_start_reconfigure(entry) - _async_start_reconfigure(entry) - _async_start_reconfigure(entry) + entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 4 + assert len(hass.config_entries.flow.async_progress()) == 1 # Abort all existing flows for flow in hass.config_entries.flow.async_progress(): hass.config_entries.flow.async_abort(flow["flow_id"]) await hass.async_block_till_done() - # Check that we can start reconfigure flows with active reauth flow - # (may need revisiting) + # Check that we can't start reconfigure flows with active reauth flow entry.async_start_reauth(hass, {"extra_context": "some_extra_context"}) await hass.async_block_till_done() assert len(hass.config_entries.flow.async_progress()) == 1 - _async_start_reconfigure(entry) + entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 2 + assert len(hass.config_entries.flow.async_progress()) == 1 # Abort all existing flows for flow in hass.config_entries.flow.async_progress(): @@ -4933,7 +4612,7 @@ async def test_reconfigure( await hass.async_block_till_done() # Check that we can't start reauth flows with active reconfigure flow - _async_start_reconfigure(entry) + entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) await hass.async_block_till_done() assert len(hass.config_entries.flow.async_progress()) == 1 entry.async_start_reauth(hass, {"extra_context": "some_extra_context"}) @@ -5040,46 +4719,20 @@ async def test_async_wait_component_startup(hass: HomeAssistant) -> None: assert "test" in hass.config.components -@pytest.mark.parametrize( - "integration_frame_path", - ["homeassistant/components/my_integration", "homeassistant.core"], -) -@pytest.mark.usefixtures("mock_integration_frame") -async def test_options_flow_with_config_entry_core() -> None: - """Test that OptionsFlowWithConfigEntry cannot be used in core.""" - entry = MockConfigEntry( - domain="hue", - data={"first": True}, - options={"sub_dict": {"1": "one"}, "sub_list": ["one"]}, - ) - - with pytest.raises(RuntimeError, match="inherits from OptionsFlowWithConfigEntry"): - _ = config_entries.OptionsFlowWithConfigEntry(entry) - - -@pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) -@pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) -async def test_options_flow_with_config_entry(caplog: pytest.LogCaptureFixture) -> None: +async def test_options_flow_options_not_mutated() -> None: """Test that OptionsFlowWithConfigEntry doesn't mutate entry options.""" entry = MockConfigEntry( - domain="hue", + domain="test", data={"first": True}, options={"sub_dict": {"1": "one"}, "sub_list": ["one"]}, ) options_flow = config_entries.OptionsFlowWithConfigEntry(entry) - assert caplog.text == "" # No deprecation warning for custom components - # Ensure available at startup - assert options_flow.config_entry is entry - assert options_flow.options == entry.options + options_flow._options["sub_dict"]["2"] = "two" + options_flow._options["sub_list"].append("two") - options_flow.options["sub_dict"]["2"] = "two" - options_flow.options["sub_list"].append("two") - - # Ensure it does not mutate the entry options - assert options_flow.options == { + assert options_flow._options == { "sub_dict": {"1": "one", "2": "two"}, "sub_list": ["one", "two"], } @@ -5107,9 +4760,7 @@ async def test_initializing_flows_canceled_on_shutdown( config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler} ): task = asyncio.create_task( - manager.flow.async_init( - "test", context={"source": "reauth", "entry_id": "abc"} - ) + manager.flow.async_init("test", context={"source": "reauth"}) ) await hass.async_block_till_done() manager.flow.async_shutdown() @@ -5245,153 +4896,71 @@ def test_raise_trying_to_add_same_config_entry_twice( @pytest.mark.parametrize( ( + "title", + "unique_id", + "data_vendor", + "options_vendor", "kwargs", - "expected_title", - "expected_unique_id", - "expected_data", - "expected_options", "calls_entry_load_unload", - "raises", ), [ ( - { - "unique_id": "5678", - "title": "Updated title", - "data": {"vendor": "data2"}, - "options": {"vendor": "options2"}, - }, - "Updated title", - "5678", - {"vendor": "data2"}, - {"vendor": "options2"}, - (2, 1), - None, - ), - ( - { - "unique_id": "1234", - "title": "Test", - "data": {"vendor": "data"}, - "options": {"vendor": "options"}, - }, - "Test", - "1234", - {"vendor": "data"}, - {"vendor": "options"}, - (2, 1), - None, - ), - ( - { - "unique_id": "5678", - "title": "Updated title", - "data": {"vendor": "data2"}, - "options": {"vendor": "options2"}, - "reload_even_if_entry_is_unchanged": True, - }, - "Updated title", - "5678", - {"vendor": "data2"}, - {"vendor": "options2"}, - (2, 1), - None, - ), - ( - { - "unique_id": "1234", - "title": "Test", - "data": {"vendor": "data"}, - "options": {"vendor": "options"}, - "reload_even_if_entry_is_unchanged": False, - }, - "Test", - "1234", - {"vendor": "data"}, - {"vendor": "options"}, - (1, 0), - None, - ), - ( - {}, - "Test", - "1234", - {"vendor": "data"}, - {"vendor": "options"}, - (2, 1), - None, - ), - ( - {"data": {"buyer": "me"}, "options": {}}, - "Test", - "1234", - {"buyer": "me"}, + ("Test", "Updated title"), + ("1234", "5678"), + ("data", "data2"), + ("options", "options2"), {}, (2, 1), - None, ), ( - {"data_updates": {"buyer": "me"}}, - "Test", - "1234", - {"vendor": "data", "buyer": "me"}, - {"vendor": "options"}, + ("Test", "Test"), + ("1234", "1234"), + ("data", "data"), + ("options", "options"), + {}, (2, 1), - None, ), ( - { - "unique_id": "5678", - "title": "Updated title", - "data": {"vendor": "data2"}, - "options": {"vendor": "options2"}, - "data_updates": {"buyer": "me"}, - }, - "Test", - "1234", - {"vendor": "data"}, - {"vendor": "options"}, + ("Test", "Updated title"), + ("1234", "5678"), + ("data", "data2"), + ("options", "options2"), + {"reload_even_if_entry_is_unchanged": True}, + (2, 1), + ), + ( + ("Test", "Test"), + ("1234", "1234"), + ("data", "data"), + ("options", "options"), + {"reload_even_if_entry_is_unchanged": False}, (1, 0), - ValueError, ), ], ids=[ "changed_entry_default", "unchanged_entry_default", "changed_entry_explicit_reload", - "unchanged_entry_no_reload", - "no_kwargs", - "replace_data", - "update_data", - "update_and_data_raises", - ], -) -@pytest.mark.parametrize( - ("source", "reason"), - [ - (config_entries.SOURCE_REAUTH, "reauth_successful"), - (config_entries.SOURCE_RECONFIGURE, "reconfigure_successful"), + "changed_entry_no_reload", ], ) async def test_update_entry_and_reload( hass: HomeAssistant, - source: str, - reason: str, - expected_title: str, - expected_unique_id: str, - expected_data: dict[str, Any], - expected_options: dict[str, Any], + manager: config_entries.ConfigEntries, + title: tuple[str, str], + unique_id: tuple[str, str], + data_vendor: tuple[str, str], + options_vendor: tuple[str, str], kwargs: dict[str, Any], calls_entry_load_unload: tuple[int, int], - raises: type[Exception] | None, ) -> None: """Test updating an entry and reloading.""" entry = MockConfigEntry( domain="comp", - unique_id="1234", - title="Test", - data={"vendor": "data"}, - options={"vendor": "options"}, + unique_id=unique_id[0], + title=title[0], + data={"vendor": data_vendor[0]}, + options={"vendor": options_vendor[0]}, ) entry.add_to_hass(hass) @@ -5412,155 +4981,39 @@ async def test_update_entry_and_reload( async def async_step_reauth(self, data): """Mock Reauth.""" - return self.async_update_reload_and_abort(entry, **kwargs) + return self.async_update_reload_and_abort( + entry=entry, + unique_id=unique_id[1], + title=title[1], + data={"vendor": data_vendor[1]}, + options={"vendor": options_vendor[1]}, + **kwargs, + ) - async def async_step_reconfigure(self, data): - """Mock Reconfigure.""" - return self.async_update_reload_and_abort(entry, **kwargs) + with patch.dict(config_entries.HANDLERS, {"comp": MockFlowHandler}): + task = await manager.flow.async_init("comp", context={"source": "reauth"}) + await hass.async_block_till_done() - err: Exception - with mock_config_flow("comp", MockFlowHandler): - try: - if source == config_entries.SOURCE_REAUTH: - result = await entry.start_reauth_flow(hass) - elif source == config_entries.SOURCE_RECONFIGURE: - result = await entry.start_reconfigure_flow(hass) - except Exception as ex: # noqa: BLE001 - err = ex - - await hass.async_block_till_done() - - assert entry.title == expected_title - assert entry.unique_id == expected_unique_id - assert entry.data == expected_data - assert entry.options == expected_options - assert entry.state == config_entries.ConfigEntryState.LOADED - if raises: - assert isinstance(err, raises) - else: - assert result["type"] == FlowResultType.ABORT - assert result["reason"] == reason - # Assert entry was reloaded - assert len(comp.async_setup_entry.mock_calls) == calls_entry_load_unload[0] - assert len(comp.async_unload_entry.mock_calls) == calls_entry_load_unload[1] + assert entry.title == title[1] + assert entry.unique_id == unique_id[1] + assert entry.data == {"vendor": data_vendor[1]} + assert entry.options == {"vendor": options_vendor[1]} + assert entry.state == config_entries.ConfigEntryState.LOADED + assert task["type"] == FlowResultType.ABORT + assert task["reason"] == "reauth_successful" + # Assert entry was reloaded + assert len(comp.async_setup_entry.mock_calls) == calls_entry_load_unload[0] + assert len(comp.async_unload_entry.mock_calls) == calls_entry_load_unload[1] @pytest.mark.parametrize("unique_id", [["blah", "bleh"], {"key": "value"}]) -async def test_unhashable_unique_id_fails( +async def test_unhashable_unique_id( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, unique_id: Any ) -> None: - """Test the ConfigEntryItems user dict fails unhashable unique_id.""" + """Test the ConfigEntryItems user dict handles unhashable unique_id.""" entries = config_entries.ConfigEntryItems(hass) entry = config_entries.ConfigEntry( data={}, - discovery_keys={}, - domain="test", - entry_id="mock_id", - minor_version=1, - options={}, - source="test", - title="title", - unique_id=unique_id, - version=1, - ) - - unique_id_string = re.escape(str(unique_id)) - with pytest.raises( - HomeAssistantError, - match=f"The entry unique id {unique_id_string} is not a string.", - ): - entries[entry.entry_id] = entry - - assert entry.entry_id not in entries - - with pytest.raises( - HomeAssistantError, - match=f"The entry unique id {unique_id_string} is not a string.", - ): - entries.get_entry_by_domain_and_unique_id("test", unique_id) - - -@pytest.mark.parametrize("unique_id", [["blah", "bleh"], {"key": "value"}]) -async def test_unhashable_unique_id_fails_on_update( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, unique_id: Any -) -> None: - """Test the ConfigEntryItems user dict fails non-hashable unique_id on update.""" - entries = config_entries.ConfigEntryItems(hass) - entry = config_entries.ConfigEntry( - data={}, - discovery_keys={}, - domain="test", - entry_id="mock_id", - minor_version=1, - options={}, - source="test", - title="title", - unique_id="123", - version=1, - ) - - entries[entry.entry_id] = entry - assert entry.entry_id in entries - - unique_id_string = re.escape(str(unique_id)) - with pytest.raises( - HomeAssistantError, - match=f"The entry unique id {unique_id_string} is not a string.", - ): - entries.update_unique_id(entry, unique_id) - - -async def test_string_unique_id_no_warning( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the ConfigEntryItems user dict string unique id doesn't log warning.""" - entries = config_entries.ConfigEntryItems(hass) - entry = config_entries.ConfigEntry( - data={}, - discovery_keys={}, - domain="test", - entry_id="mock_id", - minor_version=1, - options={}, - source="test", - title="title", - unique_id="123", - version=1, - ) - - entries[entry.entry_id] = entry - - assert ( - "Config entry 'title' from integration test has an invalid unique_id" - ) not in caplog.text - - assert entry.entry_id in entries - assert entries[entry.entry_id] is entry - assert entries.get_entry_by_domain_and_unique_id("test", "123") == entry - del entries[entry.entry_id] - assert not entries - assert entries.get_entry_by_domain_and_unique_id("test", "123") is None - - -@pytest.mark.parametrize( - ("unique_id", "type_name"), - [ - (123, "int"), - (2.3, "float"), - ], -) -async def test_hashable_unique_id( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - unique_id: Any, - type_name: str, -) -> None: - """Test the ConfigEntryItems user dict handles hashable non string unique_id.""" - entries = config_entries.ConfigEntryItems(hass) - entry = config_entries.ConfigEntry( - data={}, - discovery_keys={}, domain="test", entry_id="mock_id", minor_version=1, @@ -5572,10 +5025,9 @@ async def test_hashable_unique_id( ) entries[entry.entry_id] = entry - assert ( - "Config entry 'title' from integration test has an invalid unique_id" - f" '{unique_id}' of type {type_name} when a string is expected" + "Config entry 'title' from integration test has an invalid unique_id " + f"'{unique_id!s}'" ) in caplog.text assert entry.entry_id in entries @@ -5586,60 +5038,67 @@ async def test_hashable_unique_id( assert entries.get_entry_by_domain_and_unique_id("test", unique_id) is None -async def test_no_unique_id_no_warning( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, +@pytest.mark.parametrize("unique_id", [123]) +async def test_hashable_non_string_unique_id( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, unique_id: Any ) -> None: - """Test the ConfigEntryItems user dict don't log warning with no unique id.""" + """Test the ConfigEntryItems user dict handles hashable non string unique_id.""" entries = config_entries.ConfigEntryItems(hass) entry = config_entries.ConfigEntry( data={}, - discovery_keys={}, domain="test", entry_id="mock_id", minor_version=1, options={}, source="test", title="title", - unique_id=None, + unique_id=unique_id, version=1, ) entries[entry.entry_id] = entry - assert ( "Config entry 'title' from integration test has an invalid unique_id" ) not in caplog.text assert entry.entry_id in entries assert entries[entry.entry_id] is entry + assert entries.get_entry_by_domain_and_unique_id("test", unique_id) == entry + del entries[entry.entry_id] + assert not entries + assert entries.get_entry_by_domain_and_unique_id("test", unique_id) is None @pytest.mark.parametrize( - ("context", "user_input", "expected_result"), + ("source", "user_input", "expected_result"), [ ( - {"source": config_entries.SOURCE_IGNORE}, + config_entries.SOURCE_IGNORE, {"unique_id": "blah", "title": "blah"}, {"type": data_entry_flow.FlowResultType.CREATE_ENTRY}, ), ( - {"source": config_entries.SOURCE_REAUTH, "entry_id": "1234"}, + config_entries.SOURCE_REAUTH, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - {"source": config_entries.SOURCE_RECONFIGURE, "entry_id": "1234"}, + config_entries.SOURCE_RECONFIGURE, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - {"source": config_entries.SOURCE_USER}, + config_entries.SOURCE_UNIGNORE, + None, + {"type": data_entry_flow.FlowResultType.ABORT, "reason": "not_implemented"}, + ), + ( + config_entries.SOURCE_USER, None, { "type": data_entry_flow.FlowResultType.ABORT, "reason": "single_instance_allowed", - "translation_domain": HOMEASSISTANT_DOMAIN, + "translation_domain": HA_DOMAIN, }, ), ], @@ -5647,7 +5106,7 @@ async def test_no_unique_id_no_warning( async def test_starting_config_flow_on_single_config_entry( hass: HomeAssistant, manager: config_entries.ConfigEntries, - context: dict[str, Any], + source: str, user_input: dict, expected_result: dict, ) -> None: @@ -5670,7 +5129,6 @@ async def test_starting_config_flow_on_single_config_entry( entry = MockConfigEntry( domain="comp", unique_id="1234", - entry_id="1234", title="Test", data={"vendor": "data"}, options={"vendor": "options"}, @@ -5679,7 +5137,6 @@ async def test_starting_config_flow_on_single_config_entry( ignored_entry = MockConfigEntry( domain="comp", unique_id="2345", - entry_id="2345", title="Test", data={"vendor": "data"}, options={"vendor": "options"}, @@ -5694,7 +5151,7 @@ async def test_starting_config_flow_on_single_config_entry( return_value=integration, ): result = await hass.config_entries.flow.async_init( - "comp", context=context, data=user_input + "comp", context={"source": source}, data=user_input ) for key in expected_result: @@ -5702,42 +5159,39 @@ async def test_starting_config_flow_on_single_config_entry( @pytest.mark.parametrize( - ("context", "user_input", "expected_result"), + ("source", "user_input", "expected_result"), [ ( - {"source": config_entries.SOURCE_IGNORE}, + config_entries.SOURCE_IGNORE, {"unique_id": "blah", "title": "blah"}, {"type": data_entry_flow.FlowResultType.CREATE_ENTRY}, ), ( - {"source": config_entries.SOURCE_REAUTH, "entry_id": "2345"}, + config_entries.SOURCE_REAUTH, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - {"source": config_entries.SOURCE_RECONFIGURE, "entry_id": "2345"}, + config_entries.SOURCE_RECONFIGURE, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - {"source": config_entries.SOURCE_USER}, + config_entries.SOURCE_UNIGNORE, None, {"type": data_entry_flow.FlowResultType.ABORT, "reason": "not_implemented"}, ), ( - {"source": config_entries.SOURCE_ZEROCONF}, + config_entries.SOURCE_USER, None, - { - "type": data_entry_flow.FlowResultType.ABORT, - "reason": "single_instance_allowed", - }, + {"type": data_entry_flow.FlowResultType.ABORT, "reason": "not_implemented"}, ), ], ) async def test_starting_config_flow_on_single_config_entry_2( hass: HomeAssistant, manager: config_entries.ConfigEntries, - context: dict[str, Any], + source: str, user_input: dict, expected_result: dict, ) -> None: @@ -5760,7 +5214,6 @@ async def test_starting_config_flow_on_single_config_entry_2( ignored_entry = MockConfigEntry( domain="comp", unique_id="2345", - entry_id="2345", title="Test", data={"vendor": "data"}, options={"vendor": "options"}, @@ -5775,7 +5228,7 @@ async def test_starting_config_flow_on_single_config_entry_2( return_value=integration, ): result = await hass.config_entries.flow.async_init( - "comp", context=context, data=user_input + "comp", context={"source": source}, data=user_input ) for key in expected_result: @@ -5819,7 +5272,7 @@ async def test_avoid_adding_second_config_entry_on_single_config_entry( "homeassistant.loader.async_get_integration", return_value=integration, ), - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), ): # Start a flow result = await manager.flow.async_init( @@ -5843,23 +5296,11 @@ async def test_avoid_adding_second_config_entry_on_single_config_entry( ) assert result["type"] == data_entry_flow.FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" - assert result["translation_domain"] == HOMEASSISTANT_DOMAIN + assert result["translation_domain"] == HA_DOMAIN -@pytest.mark.parametrize( - ("flow_1_unique_id", "flow_2_unique_id"), - [ - (None, None), - ("very_unique", "very_unique"), - (None, config_entries.DEFAULT_DISCOVERY_UNIQUE_ID), - ("very_unique", config_entries.DEFAULT_DISCOVERY_UNIQUE_ID), - ], -) async def test_in_progress_get_canceled_when_entry_is_created( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - flow_1_unique_id: str | None, - flow_2_unique_id: str | None, + hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: """Test that we abort all in progress flows when a new entry is created on a single instance only integration.""" integration = loader.Integration( @@ -5887,19 +5328,10 @@ async def test_in_progress_get_canceled_when_entry_is_created( if user_input is not None: return self.async_create_entry(title="Test Title", data=user_input) - await self.async_set_unique_id(flow_1_unique_id, raise_on_progress=False) - return self.async_show_form(step_id="user") - - async def async_step_zeroconfg(self, user_input=None): - """Test user step.""" - if user_input is not None: - return self.async_create_entry(title="Test Title", data=user_input) - - await self.async_set_unique_id(flow_2_unique_id, raise_on_progress=False) return self.async_show_form(step_id="user") with ( - mock_config_flow("comp", TestFlow), + patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch( "homeassistant.loader.async_get_integration", return_value=integration, @@ -5971,8 +5403,13 @@ async def test_report_direct_mutation_of_config_entry( entry = MockConfigEntry(domain="test") entry.add_to_hass(hass) - with pytest.raises(AttributeError): - setattr(entry, field, "new_value") + setattr(entry, field, "new_value") + + assert ( + f'Detected code that sets "{field}" directly to update a config entry. ' + "This is deprecated and will stop working in Home Assistant 2024.9, " + "it should be updated to use async_update_entry instead. Please report this issue." + ) in caplog.text async def test_updating_non_added_entry_raises(hass: HomeAssistant) -> None: @@ -5993,7 +5430,7 @@ async def test_reload_during_setup(hass: HomeAssistant) -> None: in_setup = False setup_calls = 0 - async def mock_async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def mock_async_setup_entry(hass, entry): """Mock setting up an entry.""" nonlocal in_setup nonlocal setup_calls @@ -6466,1148 +5903,3 @@ async def test_config_entry_late_platform_setup( "entry_id test2 cannot forward setup for light because it is " "not loaded in the ConfigEntryState.NOT_LOADED state" ) not in caplog.text - - -@pytest.mark.parametrize("load_registries", [False]) -async def test_migration_from_1_2( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test migration from version 1.2.""" - hass_storage[config_entries.STORAGE_KEY] = { - "version": 1, - "minor_version": 2, - "data": { - "entries": [ - { - "data": {}, - "disabled_by": None, - "domain": "sun", - "entry_id": "0a8bd02d0d58c7debf5daf7941c9afe2", - "minor_version": 1, - "options": {}, - "pref_disable_new_entities": False, - "pref_disable_polling": False, - "source": "import", - "title": "Sun", - "unique_id": None, - "version": 1, - }, - ] - }, - } - - manager = config_entries.ConfigEntries(hass, {}) - await manager.async_initialize() - - # Test data was loaded - entries = manager.async_entries() - assert len(entries) == 1 - - # Check we store migrated data - await flush_store(manager._store) - assert hass_storage[config_entries.STORAGE_KEY] == { - "version": config_entries.STORAGE_VERSION, - "minor_version": config_entries.STORAGE_VERSION_MINOR, - "key": config_entries.STORAGE_KEY, - "data": { - "entries": [ - { - "created_at": "1970-01-01T00:00:00+00:00", - "data": {}, - "disabled_by": None, - "discovery_keys": {}, - "domain": "sun", - "entry_id": "0a8bd02d0d58c7debf5daf7941c9afe2", - "minor_version": 1, - "modified_at": "1970-01-01T00:00:00+00:00", - "options": {}, - "pref_disable_new_entities": False, - "pref_disable_polling": False, - "source": "import", - "title": "Sun", - "unique_id": None, - "version": 1, - }, - ] - }, - } - - -async def test_async_loaded_entries( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test that we can get loaded config entries.""" - entry1 = MockConfigEntry(domain="comp") - entry1.add_to_hass(hass) - entry2 = MockConfigEntry(domain="comp", source=config_entries.SOURCE_IGNORE) - entry2.add_to_hass(hass) - entry3 = MockConfigEntry( - domain="comp", disabled_by=config_entries.ConfigEntryDisabler.USER - ) - entry3.add_to_hass(hass) - - mock_setup = AsyncMock(return_value=True) - mock_setup_entry = AsyncMock(return_value=True) - mock_unload_entry = AsyncMock(return_value=True) - - mock_integration( - hass, - MockModule( - "comp", - async_setup=mock_setup, - async_setup_entry=mock_setup_entry, - async_unload_entry=mock_unload_entry, - ), - ) - mock_platform(hass, "comp.config_flow", None) - - assert hass.config_entries.async_loaded_entries("comp") == [] - - assert await manager.async_setup(entry1.entry_id) - assert not await manager.async_setup(entry2.entry_id) - assert not await manager.async_setup(entry3.entry_id) - - assert hass.config_entries.async_loaded_entries("comp") == [entry1] - - assert await hass.config_entries.async_unload(entry1.entry_id) - - assert hass.config_entries.async_loaded_entries("comp") == [] - - -async def test_async_has_matching_discovery_flow( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test we can check for matching discovery flows.""" - assert ( - manager.flow.async_has_matching_discovery_flow( - "test", - {"source": config_entries.SOURCE_HOMEKIT}, - {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - is False - ) - - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - VERSION = 5 - - async def async_step_init(self, user_input=None): - return self.async_show_progress( - step_id="init", - progress_action="task_one", - ) - - async def async_step_homekit(self, discovery_info=None): - return await self.async_step_init(discovery_info) - - with mock_config_flow("test", TestFlow): - result = await manager.flow.async_init( - "test", - context={"source": config_entries.SOURCE_HOMEKIT}, - data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - assert result["type"] == data_entry_flow.FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "task_one" - assert len(manager.flow.async_progress()) == 1 - assert len(manager.flow.async_progress_by_handler("test")) == 1 - assert ( - len( - manager.flow.async_progress_by_handler( - "test", match_context={"source": config_entries.SOURCE_HOMEKIT} - ) - ) - == 1 - ) - assert ( - len( - manager.flow.async_progress_by_handler( - "test", match_context={"source": config_entries.SOURCE_BLUETOOTH} - ) - ) - == 0 - ) - assert manager.flow.async_get(result["flow_id"])["handler"] == "test" - - assert ( - manager.flow.async_has_matching_discovery_flow( - "test", - {"source": config_entries.SOURCE_HOMEKIT}, - {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - is True - ) - assert ( - manager.flow.async_has_matching_discovery_flow( - "test", - {"source": config_entries.SOURCE_SSDP}, - {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - is False - ) - assert ( - manager.flow.async_has_matching_discovery_flow( - "other", - {"source": config_entries.SOURCE_HOMEKIT}, - {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - is False - ) - - -async def test_async_has_matching_flow( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test check for matching flows when there is no active flow.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - VERSION = 5 - - async def async_step_init(self, user_input=None): - return self.async_show_progress( - step_id="init", - progress_action="task_one", - ) - - async def async_step_homekit(self, discovery_info=None): - return await self.async_step_init(discovery_info) - - def is_matching(self, other_flow: Self) -> bool: - """Return True if other_flow is matching this flow.""" - return True - - # Initiate a flow - with mock_config_flow("test", TestFlow): - await manager.flow.async_init( - "test", - context={"source": config_entries.SOURCE_HOMEKIT}, - data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - flow = list(manager.flow._handler_progress_index.get("test"))[0] - - assert manager.flow.async_has_matching_flow(flow) is False - - # Initiate another flow - with mock_config_flow("test", TestFlow): - await manager.flow.async_init( - "test", - context={"source": config_entries.SOURCE_HOMEKIT}, - data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - - assert manager.flow.async_has_matching_flow(flow) is True - - -async def test_async_has_matching_flow_no_flows( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test check for matching flows when there is no active flow.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - VERSION = 5 - - async def async_step_init(self, user_input=None): - return self.async_show_progress( - step_id="init", - progress_action="task_one", - ) - - async def async_step_homekit(self, discovery_info=None): - return await self.async_step_init(discovery_info) - - with mock_config_flow("test", TestFlow): - result = await manager.flow.async_init( - "test", - context={"source": config_entries.SOURCE_HOMEKIT}, - data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - flow = list(manager.flow._handler_progress_index.get("test"))[0] - - # Abort the flow before checking for matching flows - manager.flow.async_abort(result["flow_id"]) - - assert manager.flow.async_has_matching_flow(flow) is False - - -async def test_async_has_matching_flow_not_implemented( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test check for matching flows when there is no active flow.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - VERSION = 5 - - async def async_step_init(self, user_input=None): - return self.async_show_progress( - step_id="init", - progress_action="task_one", - ) - - async def async_step_homekit(self, discovery_info=None): - return await self.async_step_init(discovery_info) - - # Initiate a flow - with mock_config_flow("test", TestFlow): - await manager.flow.async_init( - "test", - context={"source": config_entries.SOURCE_HOMEKIT}, - data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - flow = list(manager.flow._handler_progress_index.get("test"))[0] - - # Initiate another flow - with mock_config_flow("test", TestFlow): - await manager.flow.async_init( - "test", - context={"source": config_entries.SOURCE_HOMEKIT}, - data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, - ) - - # The flow does not implement is_matching - with pytest.raises(NotImplementedError): - manager.flow.async_has_matching_flow(flow) - - -async def test_get_reauth_entry( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test _get_context_entry behavior.""" - entry = MockConfigEntry( - title="test_title", - domain="test", - entry_id="01J915Q6T9F6G5V0QJX6HBC94T", - data={"host": "any", "port": 123}, - unique_id=None, - ) - entry.add_to_hass(hass) - - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - VERSION = 1 - - async def async_step_user(self, user_input=None): - """Test user step.""" - return await self._async_step_confirm() - - async def async_step_reauth(self, entry_data): - """Test reauth step.""" - return await self._async_step_confirm() - - async def async_step_reconfigure(self, user_input=None): - """Test reauth step.""" - return await self._async_step_confirm() - - async def _async_step_confirm(self): - """Confirm input.""" - try: - entry = self._get_reauth_entry() - except ValueError as err: - reason = str(err) - except config_entries.UnknownEntry: - reason = "Entry not found" - else: - reason = f"Found entry {entry.title}" - try: - entry_id = self._reauth_entry_id - except ValueError: - reason = f"{reason}: -" - else: - reason = f"{reason}: {entry_id}" - return self.async_abort(reason=reason) - - # A reauth flow finds the config entry from context - with mock_config_flow("test", TestFlow): - result = await entry.start_reauth_flow(hass) - assert result["reason"] == "Found entry test_title: 01J915Q6T9F6G5V0QJX6HBC94T" - - # The config entry is removed before the reauth flow is aborted - with mock_config_flow("test", TestFlow): - result = await entry.start_reauth_flow(hass, context={"entry_id": "01JRemoved"}) - assert result["reason"] == "Entry not found: 01JRemoved" - - # A reconfigure flow does not have access to the config entry - with mock_config_flow("test", TestFlow): - result = await entry.start_reconfigure_flow(hass) - assert result["reason"] == "Source is reconfigure, expected reauth: -" - - # A user flow does not have access to the config entry - with mock_config_flow("test", TestFlow): - result = await manager.flow.async_init( - "test", context={"source": config_entries.SOURCE_USER} - ) - assert result["reason"] == "Source is user, expected reauth: -" - - -async def test_get_reconfigure_entry( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test _get_context_entry behavior.""" - entry = MockConfigEntry( - title="test_title", - domain="test", - entry_id="01J915Q6T9F6G5V0QJX6HBC94T", - data={"host": "any", "port": 123}, - unique_id=None, - ) - entry.add_to_hass(hass) - - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - VERSION = 1 - - async def async_step_user(self, user_input=None): - """Test user step.""" - return await self._async_step_confirm() - - async def async_step_reauth(self, entry_data): - """Test reauth step.""" - return await self._async_step_confirm() - - async def async_step_reconfigure(self, user_input=None): - """Test reauth step.""" - return await self._async_step_confirm() - - async def _async_step_confirm(self): - """Confirm input.""" - try: - entry = self._get_reconfigure_entry() - except ValueError as err: - reason = str(err) - except config_entries.UnknownEntry: - reason = "Entry not found" - else: - reason = f"Found entry {entry.title}" - try: - entry_id = self._reconfigure_entry_id - except ValueError: - reason = f"{reason}: -" - else: - reason = f"{reason}: {entry_id}" - return self.async_abort(reason=reason) - - # A reauth flow does not have access to the config entry from context - with mock_config_flow("test", TestFlow): - result = await entry.start_reauth_flow(hass) - assert result["reason"] == "Source is reauth, expected reconfigure: -" - - # A reconfigure flow finds the config entry - with mock_config_flow("test", TestFlow): - result = await entry.start_reconfigure_flow(hass) - assert result["reason"] == "Found entry test_title: 01J915Q6T9F6G5V0QJX6HBC94T" - - # The entry_id no longer exists - with mock_config_flow("test", TestFlow): - result = await manager.flow.async_init( - "test", - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": "01JRemoved", - }, - ) - assert result["reason"] == "Entry not found: 01JRemoved" - - # A user flow does not have access to the config entry - with mock_config_flow("test", TestFlow): - result = await manager.flow.async_init( - "test", context={"source": config_entries.SOURCE_USER} - ) - assert result["reason"] == "Source is user, expected reconfigure: -" - - -async def test_reauth_helper_alignment( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test `start_reauth_flow` helper alignment. - - It should be aligned with `ConfigEntry._async_init_reauth`. - """ - entry = MockConfigEntry( - title="test_title", - domain="test", - entry_id="01J915Q6T9F6G5V0QJX6HBC94T", - data={"host": "any", "port": 123}, - unique_id=None, - ) - entry.add_to_hass(hass) - - mock_setup_entry = AsyncMock( - side_effect=ConfigEntryAuthFailed("The password is no longer valid") - ) - mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_platform(hass, "test.config_flow", None) - - # Check context via auto-generated reauth - await manager.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert "could not authenticate: The password is no longer valid" in caplog.text - - assert entry.state is config_entries.ConfigEntryState.SETUP_ERROR - assert entry.reason == "The password is no longer valid" - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - - reauth_flow_context = flows[0]["context"] - reauth_flow_init_data = hass.config_entries.flow._progress[ - flows[0]["flow_id"] - ].init_data - - # Clear to make way for `start_reauth_flow` helper - manager.flow.async_abort(flows[0]["flow_id"]) - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 0 - - # Check context via `start_reauth_flow` helper - await entry.start_reauth_flow(hass) - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - helper_flow_context = flows[0]["context"] - helper_flow_init_data = hass.config_entries.flow._progress[ - flows[0]["flow_id"] - ].init_data - - # Ensure context and init data are aligned - assert helper_flow_context == reauth_flow_context - assert helper_flow_init_data == reauth_flow_init_data - - -@pytest.mark.parametrize( - ("original_unique_id", "new_unique_id", "reason"), - [ - ("unique", "unique", "success"), - (None, None, "success"), - ("unique", "new", "unique_id_mismatch"), - ("unique", None, "unique_id_mismatch"), - (None, "new", "unique_id_mismatch"), - ], -) -@pytest.mark.parametrize( - "source", - [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE], -) -async def test_abort_if_unique_id_mismatch( - hass: HomeAssistant, - source: str, - original_unique_id: str | None, - new_unique_id: str | None, - reason: str, -) -> None: - """Test to check if_unique_id_mismatch behavior.""" - entry = MockConfigEntry( - title="From config flow", - domain="test", - entry_id="01J915Q6T9F6G5V0QJX6HBC94T", - data={"host": "any", "port": 123}, - unique_id=original_unique_id, - ) - entry.add_to_hass(hass) - - mock_setup_entry = AsyncMock(return_value=True) - - mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - VERSION = 1 - - async def async_step_user(self, user_input=None): - """Test user step.""" - return await self._async_step_confirm() - - async def async_step_reauth(self, entry_data): - """Test reauth step.""" - return await self._async_step_confirm() - - async def async_step_reconfigure(self, user_input=None): - """Test reauth step.""" - return await self._async_step_confirm() - - async def _async_step_confirm(self): - """Confirm input.""" - await self.async_set_unique_id(new_unique_id) - self._abort_if_unique_id_mismatch() - return self.async_abort(reason="success") - - with mock_config_flow("test", TestFlow): - if source == config_entries.SOURCE_REAUTH: - result = await entry.start_reauth_flow(hass) - elif source == config_entries.SOURCE_RECONFIGURE: - result = await entry.start_reconfigure_flow(hass) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - - -def test_state_not_stored_in_storage() -> None: - """Test that state is not stored in storage. - - Verify we don't start accidentally storing state in storage. - """ - entry = MockConfigEntry(domain="test") - loaded = json_loads(json_dumps(entry.as_storage_fragment)) - for key in config_entries.STATE_KEYS: - assert key not in loaded - - -def test_storage_cache_is_cleared_on_entry_update(hass: HomeAssistant) -> None: - """Test that the storage cache is cleared when an entry is updated.""" - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - _ = entry.as_storage_fragment - hass.config_entries.async_update_entry(entry, data={"new": "data"}) - loaded = json_loads(json_dumps(entry.as_storage_fragment)) - assert "new" in loaded["data"] - - -async def test_storage_cache_is_cleared_on_entry_disable(hass: HomeAssistant) -> None: - """Test that the storage cache is cleared when an entry is disabled.""" - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - _ = entry.as_storage_fragment - await hass.config_entries.async_set_disabled_by( - entry.entry_id, config_entries.ConfigEntryDisabler.USER - ) - loaded = json_loads(json_dumps(entry.as_storage_fragment)) - assert loaded["disabled_by"] == "user" - - -async def test_state_cache_is_cleared_on_entry_disable(hass: HomeAssistant) -> None: - """Test that the state cache is cleared when an entry is disabled.""" - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - _ = entry.as_storage_fragment - await hass.config_entries.async_set_disabled_by( - entry.entry_id, config_entries.ConfigEntryDisabler.USER - ) - loaded = json_loads(json_dumps(entry.as_json_fragment)) - assert loaded["disabled_by"] == "user" - - -@pytest.mark.parametrize( - ("original_unique_id", "new_unique_id", "count"), - [ - ("unique", "unique", 1), - ("unique", "new", 2), - ("unique", None, 2), - (None, "unique", 2), - ], -) -@pytest.mark.parametrize( - "source", - [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE], -) -async def test_create_entry_reauth_reconfigure( - hass: HomeAssistant, - source: str, - original_unique_id: str | None, - new_unique_id: str | None, - count: int, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test to highlight unexpected behavior on create_entry.""" - entry = MockConfigEntry( - title="From config flow", - domain="test", - entry_id="01J915Q6T9F6G5V0QJX6HBC94T", - data={"host": "any", "port": 123}, - unique_id=original_unique_id, - ) - entry.add_to_hass(hass) - - mock_setup_entry = AsyncMock(return_value=True) - - mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - VERSION = 1 - - async def async_step_user(self, user_input=None): - """Test user step.""" - return await self._async_step_confirm() - - async def async_step_reauth(self, entry_data): - """Test reauth step.""" - return await self._async_step_confirm() - - async def async_step_reconfigure(self, user_input=None): - """Test reauth step.""" - return await self._async_step_confirm() - - async def _async_step_confirm(self): - """Confirm input.""" - await self.async_set_unique_id(new_unique_id) - return self.async_create_entry( - title="From config flow", - data={"token": "supersecret"}, - ) - - assert len(hass.config_entries.async_entries("test")) == 1 - - with mock_config_flow("test", TestFlow): - result = await getattr(entry, f"start_{source}_flow")(hass) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.CREATE_ENTRY - - entries = hass.config_entries.async_entries("test") - assert len(entries) == count - if count == 1: - # Show that the previous entry got binned and recreated - assert entries[0].entry_id != entry.entry_id - - assert ( - f"Detected {source} config flow creating a new entry, when it is expected " - "to update an existing entry and abort. This will stop working in " - "2025.11, please create a bug report at https://github.com/home" - "-assistant/core/issues?q=is%3Aopen+is%3Aissue+" - "label%3A%22integration%3A+test%22" - ) in caplog.text - - -async def test_async_update_entry_unique_id_collision( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - caplog: pytest.LogCaptureFixture, - issue_registry: ir.IssueRegistry, -) -> None: - """Test we warn when async_update_entry creates a unique_id collision. - - Also test an issue registry issue is created. - """ - assert len(issue_registry.issues) == 0 - - entry1 = MockConfigEntry(domain="test", unique_id=None) - entry2 = MockConfigEntry(domain="test", unique_id="not none") - entry3 = MockConfigEntry(domain="test", unique_id="very unique") - entry4 = MockConfigEntry(domain="test", unique_id="also very unique") - entry1.add_to_manager(manager) - entry2.add_to_manager(manager) - entry3.add_to_manager(manager) - entry4.add_to_manager(manager) - - manager.async_update_entry(entry2, unique_id=None) - assert len(issue_registry.issues) == 0 - assert len(caplog.record_tuples) == 0 - - manager.async_update_entry(entry4, unique_id="very unique") - assert len(issue_registry.issues) == 1 - assert len(caplog.record_tuples) == 1 - - assert ( - "Unique id of config entry 'Mock Title' from integration test changed to " - "'very unique' which is already in use" - ) in caplog.text - - issue_id = "config_entry_unique_id_collision_test_very unique" - assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) - - -@pytest.mark.parametrize("domain", ["flipr"]) -async def test_async_update_entry_unique_id_collision_allowed_domain( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - caplog: pytest.LogCaptureFixture, - issue_registry: ir.IssueRegistry, - domain: str, -) -> None: - """Test we warn when async_update_entry creates a unique_id collision. - - This tests we don't warn and don't create issues for domains which have - their own migration path. - """ - assert len(issue_registry.issues) == 0 - - entry1 = MockConfigEntry(domain=domain, unique_id=None) - entry2 = MockConfigEntry(domain=domain, unique_id="not none") - entry3 = MockConfigEntry(domain=domain, unique_id="very unique") - entry4 = MockConfigEntry(domain=domain, unique_id="also very unique") - entry1.add_to_manager(manager) - entry2.add_to_manager(manager) - entry3.add_to_manager(manager) - entry4.add_to_manager(manager) - - manager.async_update_entry(entry2, unique_id=None) - assert len(issue_registry.issues) == 0 - assert len(caplog.record_tuples) == 0 - - manager.async_update_entry(entry4, unique_id="very unique") - assert len(issue_registry.issues) == 0 - assert len(caplog.record_tuples) == 0 - - assert ("already in use") not in caplog.text - - -async def test_unique_id_collision_issues( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - caplog: pytest.LogCaptureFixture, - issue_registry: ir.IssueRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test issue registry issues are created and remove on unique id collision.""" - assert len(issue_registry.issues) == 0 - - mock_setup_entry = AsyncMock(return_value=True) - for i in range(3): - mock_integration( - hass, MockModule(f"test{i+1}", async_setup_entry=mock_setup_entry) - ) - mock_platform(hass, f"test{i+1}.config_flow", None) - - test2_group_1: list[MockConfigEntry] = [] - test2_group_2: list[MockConfigEntry] = [] - test3: list[MockConfigEntry] = [] - for _ in range(3): - await manager.async_add(MockConfigEntry(domain="test1", unique_id=None)) - test2_group_1.append(MockConfigEntry(domain="test2", unique_id="group_1")) - test2_group_2.append(MockConfigEntry(domain="test2", unique_id="group_2")) - await manager.async_add(test2_group_1[-1]) - await manager.async_add(test2_group_2[-1]) - for _ in range(6): - test3.append(MockConfigEntry(domain="test3", unique_id="not_unique")) - await manager.async_add(test3[-1]) - # Add an ignored config entry - await manager.async_add( - MockConfigEntry( - domain="test2", unique_id="group_1", source=config_entries.SOURCE_IGNORE - ) - ) - - # Check we get one issue for domain test2 and one issue for domain test3 - assert len(issue_registry.issues) == 2 - issue_id = "config_entry_unique_id_collision_test2_group_1" - assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) == snapshot - issue_id = "config_entry_unique_id_collision_test3_not_unique" - assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) == snapshot - - # Remove one config entry for domain test3, the translations should be updated - await manager.async_remove(test3[0].entry_id) - assert set(issue_registry.issues) == { - (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), - (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test3_not_unique"), - } - assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) == snapshot - - # Remove all but two config entries for domain test 3 - for i in range(3): - await manager.async_remove(test3[1 + i].entry_id) - assert set(issue_registry.issues) == { - (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), - (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test3_not_unique"), - } - - # Remove the last test3 duplicate, the issue is cleared - await manager.async_remove(test3[-1].entry_id) - assert set(issue_registry.issues) == { - (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), - } - - await manager.async_remove(test2_group_1[0].entry_id) - assert set(issue_registry.issues) == { - (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), - } - - # Remove the last test2 group1 duplicate, a new issue is created - await manager.async_remove(test2_group_1[1].entry_id) - assert set(issue_registry.issues) == { - (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_2"), - } - - await manager.async_remove(test2_group_2[0].entry_id) - assert set(issue_registry.issues) == { - (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_2"), - } - - # Remove the last test2 group2 duplicate, the issue is cleared - await manager.async_remove(test2_group_2[1].entry_id) - assert not issue_registry.issues - - -async def test_context_no_leak(hass: HomeAssistant) -> None: - """Test ensure that config entry context does not leak. - - Unlikely to happen in real world, but occurs often in tests. - """ - - connected_future = asyncio.Future() - bg_tasks = [] - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Mock setup entry.""" - - async def _async_set_runtime_data(): - # Show that config_entries.current_entry is preserved for child tasks - await connected_future - entry.runtime_data = config_entries.current_entry.get() - - bg_tasks.append(hass.loop.create_task(_async_set_runtime_data())) - - return True - - async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Mock unload entry.""" - return True - - mock_integration( - hass, - MockModule( - "comp", - async_setup_entry=async_setup_entry, - async_unload_entry=async_unload_entry, - ), - ) - mock_platform(hass, "comp.config_flow", None) - - entry1 = MockConfigEntry(domain="comp") - entry1.add_to_hass(hass) - - await hass.config_entries.async_setup(entry1.entry_id) - assert entry1.state is config_entries.ConfigEntryState.LOADED - assert config_entries.current_entry.get() is None - - # Load an existing config entry - entry2 = MockConfigEntry(domain="comp") - entry2.add_to_hass(hass) - await hass.config_entries.async_setup(entry2.entry_id) - assert entry2.state is config_entries.ConfigEntryState.LOADED - assert config_entries.current_entry.get() is None - - # Add a new config entry (eg. from config flow) - entry3 = MockConfigEntry(domain="comp") - await hass.config_entries.async_add(entry3) - assert entry3.state is config_entries.ConfigEntryState.LOADED - assert config_entries.current_entry.get() is None - - for entry in (entry1, entry2, entry3): - assert entry.state is config_entries.ConfigEntryState.LOADED - assert not hasattr(entry, "runtime_data") - assert config_entries.current_entry.get() is None - - connected_future.set_result(None) - await asyncio.gather(*bg_tasks) - - for entry in (entry1, entry2, entry3): - assert entry.state is config_entries.ConfigEntryState.LOADED - assert entry.runtime_data is entry - assert config_entries.current_entry.get() is None - - -async def test_options_flow_config_entry( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test _config_entry_id and config_entry properties in options flow.""" - original_entry = MockConfigEntry(domain="test", data={}) - original_entry.add_to_hass(hass) - - mock_setup_entry = AsyncMock(return_value=True) - - mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_options_flow(config_entry): - """Test options flow.""" - - class _OptionsFlow(config_entries.OptionsFlow): - """Test flow.""" - - def __init__(self) -> None: - """Test initialisation.""" - try: - self.init_entry_id = self._config_entry_id - except ValueError as err: - self.init_entry_id = err - try: - self.init_entry = self.config_entry - except ValueError as err: - self.init_entry = err - - async def async_step_init(self, user_input=None): - """Test user step.""" - errors = {} - if user_input is not None: - if user_input.get("abort"): - return self.async_abort(reason="abort") - - errors["entry_id"] = self._config_entry_id - try: - errors["entry"] = self.config_entry - except config_entries.UnknownEntry as err: - errors["entry"] = err - - return self.async_show_form(step_id="init", errors=errors) - - return _OptionsFlow() - - with mock_config_flow("test", TestFlow): - result = await hass.config_entries.options.async_init(original_entry.entry_id) - - options_flow = hass.config_entries.options._progress.get(result["flow_id"]) - assert isinstance(options_flow, config_entries.OptionsFlow) - assert options_flow.handler == original_entry.entry_id - assert isinstance(options_flow.init_entry_id, ValueError) - assert ( - str(options_flow.init_entry_id) - == "The config entry id is not available during initialisation" - ) - assert isinstance(options_flow.init_entry, ValueError) - assert ( - str(options_flow.init_entry) - == "The config entry is not available during initialisation" - ) - - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == "init" - assert result["errors"] == {} - - result = await hass.config_entries.options.async_configure(result["flow_id"], {}) - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == "init" - assert result["errors"]["entry_id"] == original_entry.entry_id - assert result["errors"]["entry"] is original_entry - - # Bad handler - not linked to a config entry - options_flow.handler = "123" - result = await hass.config_entries.options.async_configure(result["flow_id"], {}) - result = await hass.config_entries.options.async_configure(result["flow_id"], {}) - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == "init" - assert result["errors"]["entry_id"] == "123" - assert isinstance(result["errors"]["entry"], config_entries.UnknownEntry) - # Reset handler - options_flow.handler = original_entry.entry_id - - result = await hass.config_entries.options.async_configure( - result["flow_id"], {"abort": True} - ) - assert result["type"] == FlowResultType.ABORT - assert result["reason"] == "abort" - - -@pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) -@pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) -async def test_options_flow_deprecated_config_entry_setter( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that setting config_entry explicitly still works.""" - original_entry = MockConfigEntry(domain="my_integration", data={}) - original_entry.add_to_hass(hass) - - mock_setup_entry = AsyncMock(return_value=True) - - mock_integration( - hass, MockModule("my_integration", async_setup_entry=mock_setup_entry) - ) - mock_platform(hass, "my_integration.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_options_flow(config_entry): - """Test options flow.""" - - class _OptionsFlow(config_entries.OptionsFlow): - """Test flow.""" - - def __init__(self, entry) -> None: - """Test initialisation.""" - self.config_entry = entry - - async def async_step_init(self, user_input=None): - """Test user step.""" - errors = {} - if user_input is not None: - if user_input.get("abort"): - return self.async_abort(reason="abort") - - errors["entry_id"] = self._config_entry_id - try: - errors["entry"] = self.config_entry - except config_entries.UnknownEntry as err: - errors["entry"] = err - - return self.async_show_form(step_id="init", errors=errors) - - return _OptionsFlow(config_entry) - - with mock_config_flow("my_integration", TestFlow): - result = await hass.config_entries.options.async_init(original_entry.entry_id) - - options_flow = hass.config_entries.options._progress.get(result["flow_id"]) - assert options_flow.config_entry is original_entry - - assert ( - "Detected that custom integration 'my_integration' sets option flow " - "config_entry explicitly, which is deprecated and will stop working " - "in 2025.12" in caplog.text - ) - - -async def test_add_description_placeholder_automatically( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, -) -> None: - """Test entry title is added automatically to reauth flows description placeholder.""" - - entry = MockConfigEntry(title="test_title", domain="test") - - mock_setup_entry = AsyncMock(side_effect=ConfigEntryAuthFailed()) - mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_platform(hass, "test.config_flow", None) - - entry.add_to_hass(hass) - await manager.async_setup(entry.entry_id) - await hass.async_block_till_done() - - flows = hass.config_entries.flow.async_progress_by_handler("test") - assert len(flows) == 1 - - result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], None) - assert result["type"] == FlowResultType.FORM - assert result["description_placeholders"] == {"name": "test_title"} - - -async def test_add_description_placeholder_automatically_not_overwrites( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, -) -> None: - """Test entry title is not added automatically to reauth flows when custom name exist.""" - - entry = MockConfigEntry(title="test_title", domain="test2") - - mock_setup_entry = AsyncMock(side_effect=ConfigEntryAuthFailed()) - mock_integration(hass, MockModule("test2", async_setup_entry=mock_setup_entry)) - mock_platform(hass, "test2.config_flow", None) - - entry.add_to_hass(hass) - await manager.async_setup(entry.entry_id) - await hass.async_block_till_done() - - flows = hass.config_entries.flow.async_progress_by_handler("test2") - assert len(flows) == 1 - - result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], None) - assert result["type"] == FlowResultType.FORM - assert result["description_placeholders"] == {"name": "Custom title"} diff --git a/tests/test_const.py b/tests/test_const.py index 87a14ecfe9c..a6a2387b091 100644 --- a/tests/test_const.py +++ b/tests/test_const.py @@ -1,17 +1,13 @@ """Test const module.""" from enum import Enum -import logging -import sys -from unittest.mock import Mock, patch import pytest from homeassistant import const -from homeassistant.components import alarm_control_panel, lock, sensor +from homeassistant.components import sensor from .common import ( - extract_stack_to_frame, help_test_all, import_and_test_deprecated_constant, import_and_test_deprecated_constant_enum, @@ -19,7 +15,7 @@ from .common import ( def _create_tuples( - value: type[Enum] | list[Enum], constant_prefix: str + value: Enum | list[Enum], constant_prefix: str ) -> list[tuple[Enum, str]]: return [(enum, constant_prefix) for enum in value] @@ -66,14 +62,7 @@ def test_all() -> None: "DEVICE_CLASS_", ) + _create_tuples(const.UnitOfApparentPower, "POWER_") - + _create_tuples( - [ - const.UnitOfPower.WATT, - const.UnitOfPower.KILO_WATT, - const.UnitOfPower.BTU_PER_HOUR, - ], - "POWER_", - ) + + _create_tuples(const.UnitOfPower, "POWER_") + _create_tuples( [ const.UnitOfEnergy.KILO_WATT_HOUR, @@ -193,140 +182,3 @@ def test_deprecated_constant_name_changes( replacement, "2025.1", ) - - -def _create_tuples_lock_states( - enum: type[Enum], constant_prefix: str, remove_in_version: str -) -> list[tuple[Enum, str]]: - return [ - (enum_field, constant_prefix, remove_in_version) - for enum_field in enum - if enum_field - not in [ - lock.LockState.OPEN, - lock.LockState.OPENING, - ] - ] - - -@pytest.mark.parametrize( - ("enum", "constant_prefix", "remove_in_version"), - _create_tuples_lock_states(lock.LockState, "STATE_", "2025.10"), -) -def test_deprecated_constants_lock( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, - remove_in_version: str, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, const, enum, constant_prefix, remove_in_version - ) - - -def _create_tuples_alarm_states( - enum: type[Enum], constant_prefix: str, remove_in_version: str -) -> list[tuple[Enum, str]]: - return [ - (enum_field, constant_prefix, remove_in_version) - for enum_field in enum - if enum_field - not in [ - lock.LockState.OPEN, - lock.LockState.OPENING, - ] - ] - - -@pytest.mark.parametrize( - ("enum", "constant_prefix", "remove_in_version"), - _create_tuples_lock_states( - alarm_control_panel.AlarmControlPanelState, "STATE_ALARM_", "2025.11" - ), -) -def test_deprecated_constants_alarm( - caplog: pytest.LogCaptureFixture, - enum: Enum, - constant_prefix: str, - remove_in_version: str, -) -> None: - """Test deprecated constants.""" - import_and_test_deprecated_constant_enum( - caplog, const, enum, constant_prefix, remove_in_version - ) - - -def test_deprecated_unit_of_conductivity_alias() -> None: - """Test UnitOfConductivity deprecation.""" - - # Test the deprecated members are aliases - assert set(const.UnitOfConductivity) == {"S/cm", "µS/cm", "mS/cm"} - - -def test_deprecated_unit_of_conductivity_members( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test UnitOfConductivity deprecation.""" - - module_name = "config.custom_components.hue.light" - filename = f"/home/paulus/{module_name.replace('.', '/')}.py" - - with ( - patch.dict(sys.modules, {module_name: Mock(__file__=filename)}), - patch( - "homeassistant.helpers.frame.linecache.getline", - return_value="await session.close()", - ), - patch( - "homeassistant.helpers.frame.get_current_frame", - return_value=extract_stack_to_frame( - [ - Mock( - filename="/home/paulus/homeassistant/core.py", - lineno="23", - line="do_something()", - ), - Mock( - filename=filename, - lineno="23", - line="await session.close()", - ), - Mock( - filename="/home/paulus/aiohue/lights.py", - lineno="2", - line="something()", - ), - ] - ), - ), - ): - const.UnitOfConductivity.SIEMENS # noqa: B018 - const.UnitOfConductivity.MICROSIEMENS # noqa: B018 - const.UnitOfConductivity.MILLISIEMENS # noqa: B018 - - assert len(caplog.record_tuples) == 3 - - def deprecation_message(member: str, replacement: str) -> str: - return ( - f"UnitOfConductivity.{member} was used from hue, this is a deprecated enum " - "member which will be removed in HA Core 2025.11.0. Use UnitOfConductivity." - f"{replacement} instead, please report it to the author of the 'hue' custom" - " integration" - ) - - assert ( - const.__name__, - logging.WARNING, - deprecation_message("SIEMENS", "SIEMENS_PER_CM"), - ) in caplog.record_tuples - assert ( - const.__name__, - logging.WARNING, - deprecation_message("MICROSIEMENS", "MICROSIEMENS_PER_CM"), - ) in caplog.record_tuples - assert ( - const.__name__, - logging.WARNING, - deprecation_message("MILLISIEMENS", "MILLISIEMENS_PER_CM"), - ) in caplog.record_tuples diff --git a/tests/test_core.py b/tests/test_core.py index 67ed99daa09..5e6b51cc39e 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -10,10 +10,11 @@ import gc import logging import os import re +from tempfile import TemporaryDirectory import threading import time from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, Mock, PropertyMock, patch from freezegun import freeze_time import pytest @@ -22,6 +23,7 @@ import voluptuous as vol from homeassistant.const import ( ATTR_FRIENDLY_NAME, + CONF_UNIT_SYSTEM, EVENT_CALL_SERVICE, EVENT_CORE_CONFIG_UPDATE, EVENT_HOMEASSISTANT_CLOSE, @@ -34,6 +36,7 @@ from homeassistant.const import ( EVENT_STATE_CHANGED, EVENT_STATE_REPORTED, MATCH_ALL, + __version__, ) import homeassistant.core as ha from homeassistant.core import ( @@ -48,7 +51,6 @@ from homeassistant.core import ( callback, get_release_channel, ) -from homeassistant.core_config import Config from homeassistant.exceptions import ( HomeAssistantError, InvalidEntityFormatError, @@ -62,12 +64,12 @@ from homeassistant.setup import async_setup_component from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util from homeassistant.util.read_only_dict import ReadOnlyDict +from homeassistant.util.unit_system import METRIC_SYSTEM from .common import ( async_capture_events, async_mock_service, help_test_all, - import_and_test_deprecated_alias, import_and_test_deprecated_constant_enum, ) @@ -421,11 +423,11 @@ async def test_async_get_hass_can_be_called(hass: HomeAssistant) -> None: try: if ha.async_get_hass() is hass: return True - raise Exception # noqa: TRY002 + raise Exception # pylint: disable=broad-exception-raised except HomeAssistantError: return False - raise Exception # noqa: TRY002 + raise Exception # pylint: disable=broad-exception-raised # Test scheduling a coroutine which calls async_get_hass via hass.async_create_task async def _async_create_task() -> None: @@ -918,14 +920,6 @@ def test_event_repr() -> None: ) -def test_event_origin_idx() -> None: - """Test the EventOrigin idx.""" - assert ha.EventOrigin.remote is ha.EventOrigin.remote - assert ha.EventOrigin.local is ha.EventOrigin.local - assert ha.EventOrigin.local.idx == 0 - assert ha.EventOrigin.remote.idx == 1 - - def test_event_as_dict() -> None: """Test an Event as dictionary.""" event_type = "some_type" @@ -1626,7 +1620,7 @@ async def test_serviceregistry_call_non_existing_with_blocking( hass: HomeAssistant, ) -> None: """Test non-existing with blocking.""" - with pytest.raises(ServiceNotFound): + with pytest.raises(ha.ServiceNotFound): await hass.services.async_call("test_domain", "i_do_not_exist", blocking=True) @@ -1712,7 +1706,7 @@ async def test_serviceregistry_service_that_not_exists(hass: HomeAssistant) -> N assert exc.value.domain == "test_do_not" assert exc.value.service == "exist" - assert str(exc.value) == "Action test_do_not.exist not found" + assert str(exc.value) == "Service test_do_not.exist not found" async def test_serviceregistry_async_service_raise_exception( @@ -1803,7 +1797,7 @@ async def test_services_call_return_response_requires_blocking( return_response=True, ) assert str(exc.value) == ( - "A non blocking action call with argument blocking=False " + "A non blocking service call with argument blocking=False " "can't be used together with argument return_response=True" ) @@ -1849,7 +1843,7 @@ async def test_serviceregistry_return_response_invalid( ("supports_response", "return_response", "expected_error"), [ (SupportsResponse.NONE, True, "does not return responses"), - (SupportsResponse.ONLY, False, "action requires responses"), + (SupportsResponse.ONLY, False, "call requires responses"), ], ) async def test_serviceregistry_return_response_arguments( @@ -1915,6 +1909,172 @@ async def test_serviceregistry_return_response_optional( assert response_data == expected_response_data +async def test_config_defaults() -> None: + """Test config defaults.""" + hass = Mock() + hass.data = {} + config = ha.Config(hass, "/test/ha-config") + assert config.hass is hass + assert config.latitude == 0 + assert config.longitude == 0 + assert config.elevation == 0 + assert config.location_name == "Home" + assert config.time_zone == "UTC" + assert config.internal_url is None + assert config.external_url is None + assert config.config_source is ha.ConfigSource.DEFAULT + assert config.skip_pip is False + assert config.skip_pip_packages == [] + assert config.components == set() + assert config.api is None + assert config.config_dir == "/test/ha-config" + assert config.allowlist_external_dirs == set() + assert config.allowlist_external_urls == set() + assert config.media_dirs == {} + assert config.recovery_mode is False + assert config.legacy_templates is False + assert config.currency == "EUR" + assert config.country is None + assert config.language == "en" + assert config.radius == 100 + + +async def test_config_path_with_file() -> None: + """Test get_config_path method.""" + hass = Mock() + hass.data = {} + config = ha.Config(hass, "/test/ha-config") + assert config.path("test.conf") == "/test/ha-config/test.conf" + + +async def test_config_path_with_dir_and_file() -> None: + """Test get_config_path method.""" + hass = Mock() + hass.data = {} + config = ha.Config(hass, "/test/ha-config") + assert config.path("dir", "test.conf") == "/test/ha-config/dir/test.conf" + + +async def test_config_as_dict() -> None: + """Test as dict.""" + hass = Mock() + hass.data = {} + config = ha.Config(hass, "/test/ha-config") + type(config.hass.state).value = PropertyMock(return_value="RUNNING") + expected = { + "latitude": 0, + "longitude": 0, + "elevation": 0, + CONF_UNIT_SYSTEM: METRIC_SYSTEM.as_dict(), + "location_name": "Home", + "time_zone": "UTC", + "components": [], + "config_dir": "/test/ha-config", + "whitelist_external_dirs": [], + "allowlist_external_dirs": [], + "allowlist_external_urls": [], + "version": __version__, + "config_source": ha.ConfigSource.DEFAULT, + "recovery_mode": False, + "state": "RUNNING", + "external_url": None, + "internal_url": None, + "currency": "EUR", + "country": None, + "language": "en", + "safe_mode": False, + "debug": False, + "radius": 100, + } + + assert expected == config.as_dict() + + +async def test_config_is_allowed_path() -> None: + """Test is_allowed_path method.""" + hass = Mock() + hass.data = {} + config = ha.Config(hass, "/test/ha-config") + with TemporaryDirectory() as tmp_dir: + # The created dir is in /tmp. This is a symlink on OS X + # causing this test to fail unless we resolve path first. + config.allowlist_external_dirs = {os.path.realpath(tmp_dir)} + + test_file = os.path.join(tmp_dir, "test.jpg") + with open(test_file, "w", encoding="utf8") as tmp_file: + tmp_file.write("test") + + valid = [test_file, tmp_dir, os.path.join(tmp_dir, "notfound321")] + for path in valid: + assert config.is_allowed_path(path) + + config.allowlist_external_dirs = {"/home", "/var"} + + invalid = [ + "/hass/config/secure", + "/etc/passwd", + "/root/secure_file", + "/var/../etc/passwd", + test_file, + ] + for path in invalid: + assert not config.is_allowed_path(path) + + with pytest.raises(AssertionError): + config.is_allowed_path(None) + + +async def test_config_is_allowed_external_url() -> None: + """Test is_allowed_external_url method.""" + hass = Mock() + hass.data = {} + config = ha.Config(hass, "/test/ha-config") + config.allowlist_external_urls = [ + "http://x.com/", + "https://y.com/bla/", + "https://z.com/images/1.jpg/", + ] + + valid = [ + "http://x.com/1.jpg", + "http://x.com", + "https://y.com/bla/", + "https://y.com/bla/2.png", + "https://z.com/images/1.jpg", + ] + for url in valid: + assert config.is_allowed_external_url(url) + + invalid = [ + "https://a.co", + "https://y.com/bla_wrong", + "https://y.com/bla/../image.jpg", + "https://z.com/images", + ] + for url in invalid: + assert not config.is_allowed_external_url(url) + + +async def test_event_on_update(hass: HomeAssistant) -> None: + """Test that event is fired on update.""" + events = async_capture_events(hass, EVENT_CORE_CONFIG_UPDATE) + + assert hass.config.latitude != 12 + + await hass.config.async_update(latitude=12) + await hass.async_block_till_done() + + assert hass.config.latitude == 12 + assert len(events) == 1 + assert events[0].data == {"latitude": 12} + + +async def test_bad_timezone_raises_value_error(hass: HomeAssistant) -> None: + """Test bad timezone raises ValueError.""" + with pytest.raises(ValueError): + await hass.config.async_update(time_zone="not_a_timezone") + + async def test_start_taking_too_long(caplog: pytest.LogCaptureFixture) -> None: """Test when async_start takes too long.""" hass = ha.HomeAssistant("/test/ha-config") @@ -2024,7 +2184,7 @@ async def test_async_functions_with_callback(hass: HomeAssistant) -> None: runs = [] @ha.callback - async def test(): # pylint: disable=hass-async-callback-decorator + async def test(): runs.append(True) await hass.async_add_job(test) @@ -2035,7 +2195,7 @@ async def test_async_functions_with_callback(hass: HomeAssistant) -> None: assert len(runs) == 2 @ha.callback - async def service_handler(call): # pylint: disable=hass-async-callback-decorator + async def service_handler(call): runs.append(True) hass.services.async_register("test_domain", "test_service", service_handler) @@ -2129,6 +2289,53 @@ def test_valid_domain() -> None: assert ha.valid_domain(valid), valid +async def test_additional_data_in_core_config( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test that we can handle additional data in core configuration.""" + config = ha.Config(hass, "/test/ha-config") + config.async_initialize() + hass_storage[ha.CORE_STORAGE_KEY] = { + "version": 1, + "data": {"location_name": "Test Name", "additional_valid_key": "value"}, + } + await config.async_load() + assert config.location_name == "Test Name" + + +async def test_incorrect_internal_external_url( + hass: HomeAssistant, hass_storage: dict[str, Any], caplog: pytest.LogCaptureFixture +) -> None: + """Test that we warn when detecting invalid internal/external url.""" + config = ha.Config(hass, "/test/ha-config") + config.async_initialize() + + hass_storage[ha.CORE_STORAGE_KEY] = { + "version": 1, + "data": { + "internal_url": None, + "external_url": None, + }, + } + await config.async_load() + assert "Invalid external_url set" not in caplog.text + assert "Invalid internal_url set" not in caplog.text + + config = ha.Config(hass, "/test/ha-config") + config.async_initialize() + + hass_storage[ha.CORE_STORAGE_KEY] = { + "version": 1, + "data": { + "internal_url": "https://community.home-assistant.io/profile", + "external_url": "https://www.home-assistant.io/blue", + }, + } + await config.async_load() + assert "Invalid external_url set" in caplog.text + assert "Invalid internal_url set" in caplog.text + + async def test_start_events(hass: HomeAssistant) -> None: """Test events fired when starting Home Assistant.""" hass.state = ha.CoreState.not_running @@ -2312,14 +2519,14 @@ async def test_reserving_states(hass: HomeAssistant) -> None: hass.states.async_set("light.bedroom", "on") assert hass.states.async_available("light.bedroom") is False - with pytest.raises(HomeAssistantError): + with pytest.raises(ha.HomeAssistantError): hass.states.async_reserve("light.bedroom") hass.states.async_remove("light.bedroom") assert hass.states.async_available("light.bedroom") is True hass.states.async_set("light.bedroom", "on") - with pytest.raises(HomeAssistantError): + with pytest.raises(ha.HomeAssistantError): hass.states.async_reserve("light.bedroom") assert hass.states.async_available("light.bedroom") is False @@ -2623,7 +2830,7 @@ async def test_state_change_events_context_id_match_state_time( hass: HomeAssistant, ) -> None: """Test last_updated, timed_fired, and the ulid all have the same time.""" - events = async_capture_events(hass, EVENT_STATE_CHANGED) + events = async_capture_events(hass, ha.EVENT_STATE_CHANGED) hass.states.async_set("light.bedroom", "on") await hass.async_block_till_done() state: State = hass.states.get("light.bedroom") @@ -2642,7 +2849,7 @@ async def test_state_change_events_match_time_with_limits_of_precision( a bit better than the precision of datetime.now() which is used for last_updated on some platforms. """ - events = async_capture_events(hass, EVENT_STATE_CHANGED) + events = async_capture_events(hass, ha.EVENT_STATE_CHANGED) hass.states.async_set("light.bedroom", "on") await hass.async_block_till_done() state: State = hass.states.get("light.bedroom") @@ -2996,11 +3203,6 @@ def test_deprecated_constants( import_and_test_deprecated_constant_enum(caplog, ha, enum, "SOURCE_", "2025.1") -def test_deprecated_config(caplog: pytest.LogCaptureFixture) -> None: - """Test deprecated Config class.""" - import_and_test_deprecated_alias(caplog, ha, "Config", Config, "2025.11") - - def test_one_time_listener_repr(hass: HomeAssistant) -> None: """Test one time listener repr.""" @@ -3019,7 +3221,7 @@ async def test_async_add_import_executor_job(hass: HomeAssistant) -> None: evt = threading.Event() loop = asyncio.get_running_loop() - def executor_func() -> threading.Event: + def executor_func() -> None: evt.set() return evt @@ -3250,6 +3452,28 @@ async def test_async_listen_with_run_immediately_deprecated( ) in caplog.text +async def test_top_level_components(hass: HomeAssistant) -> None: + """Test top level components are updated when components change.""" + hass.config.components.add("homeassistant") + assert hass.config.components == {"homeassistant"} + assert hass.config.top_level_components == {"homeassistant"} + hass.config.components.add("homeassistant.scene") + assert hass.config.components == {"homeassistant", "homeassistant.scene"} + assert hass.config.top_level_components == {"homeassistant"} + hass.config.components.remove("homeassistant") + assert hass.config.components == {"homeassistant.scene"} + assert hass.config.top_level_components == set() + with pytest.raises(ValueError): + hass.config.components.remove("homeassistant.scene") + with pytest.raises(NotImplementedError): + hass.config.components.discard("homeassistant") + + +async def test_debug_mode_defaults_to_off(hass: HomeAssistant) -> None: + """Test debug mode defaults to off.""" + assert not hass.config.debug + + async def test_async_fire_thread_safety(hass: HomeAssistant) -> None: """Test async_fire thread safety.""" events = async_capture_events(hass, "test_event") @@ -3316,6 +3540,19 @@ async def test_thread_safety_message(hass: HomeAssistant) -> None: await hass.async_add_executor_job(hass.verify_event_loop_thread, "test") +async def test_set_time_zone_deprecated(hass: HomeAssistant) -> None: + """Test set_time_zone is deprecated.""" + with pytest.raises( + RuntimeError, + match=re.escape( + "Detected code that set the time zone using set_time_zone instead of " + "async_set_time_zone which will stop working in Home Assistant 2025.6. " + "Please report this issue.", + ), + ): + await hass.config.set_time_zone("America/New_York") + + async def test_async_set_updates_last_reported(hass: HomeAssistant) -> None: """Test async_set method updates last_reported AND last_reported_timestamp.""" hass.states.async_set("light.bowl", "on", {}) diff --git a/tests/test_core_config.py b/tests/test_core_config.py deleted file mode 100644 index 3e0c0999ad3..00000000000 --- a/tests/test_core_config.py +++ /dev/null @@ -1,1083 +0,0 @@ -"""Test core_config.""" - -import asyncio -from collections import OrderedDict -import copy -import os -from pathlib import Path -import re -from tempfile import TemporaryDirectory -from typing import Any -from unittest.mock import Mock, PropertyMock, patch - -import pytest -from voluptuous import Invalid, MultipleInvalid -from webrtc_models import RTCConfiguration, RTCIceServer - -from homeassistant.const import ( - ATTR_ASSUMED_STATE, - ATTR_FRIENDLY_NAME, - CONF_AUTH_MFA_MODULES, - CONF_AUTH_PROVIDERS, - CONF_CUSTOMIZE, - CONF_LATITUDE, - CONF_LONGITUDE, - CONF_NAME, - CONF_UNIT_SYSTEM, - EVENT_CORE_CONFIG_UPDATE, - __version__, -) -from homeassistant.core import HomeAssistant, State -from homeassistant.core_config import ( - _CUSTOMIZE_DICT_SCHEMA, - CORE_CONFIG_SCHEMA, - CORE_STORAGE_KEY, - DATA_CUSTOMIZE, - Config, - ConfigSource, - _validate_stun_or_turn_url, - async_process_ha_core_config, -) -from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.entity import Entity -from homeassistant.util.unit_system import ( - METRIC_SYSTEM, - US_CUSTOMARY_SYSTEM, - UnitSystem, -) - -from .common import MockUser, async_capture_events - - -def test_core_config_schema() -> None: - """Test core config schema.""" - for value in ( - {"unit_system": "K"}, - {"time_zone": "non-exist"}, - {"latitude": "91"}, - {"longitude": -181}, - {"external_url": "not an url"}, - {"internal_url": "not an url"}, - {"currency", 100}, - {"customize": "bla"}, - {"customize": {"light.sensor": 100}}, - {"customize": {"entity_id": []}}, - {"country": "xx"}, - {"language": "xx"}, - {"radius": -10}, - {"webrtc": "bla"}, - {"webrtc": {}}, - ): - with pytest.raises(MultipleInvalid): - CORE_CONFIG_SCHEMA(value) - - CORE_CONFIG_SCHEMA( - { - "name": "Test name", - "latitude": "-23.45", - "longitude": "123.45", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "unit_system": "metric", - "currency": "USD", - "customize": {"sensor.temperature": {"hidden": True}}, - "country": "SE", - "language": "sv", - "radius": "10", - "webrtc": {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}, - } - ) - - -def test_core_config_schema_internal_external_warning( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that we warn for internal/external URL with path.""" - CORE_CONFIG_SCHEMA( - { - "external_url": "https://www.example.com/bla", - "internal_url": "http://example.local/yo", - } - ) - - assert "Invalid external_url set" in caplog.text - assert "Invalid internal_url set" in caplog.text - - -def test_customize_dict_schema() -> None: - """Test basic customize config validation.""" - values = ({ATTR_FRIENDLY_NAME: None}, {ATTR_ASSUMED_STATE: "2"}) - - for val in values: - with pytest.raises(MultipleInvalid): - _CUSTOMIZE_DICT_SCHEMA(val) - - assert _CUSTOMIZE_DICT_SCHEMA({ATTR_FRIENDLY_NAME: 2, ATTR_ASSUMED_STATE: "0"}) == { - ATTR_FRIENDLY_NAME: "2", - ATTR_ASSUMED_STATE: False, - } - - -def test_webrtc_schema() -> None: - """Test webrtc config validation.""" - invalid_webrtc_configs = ( - "bla", - {}, - {"ice_servers": [], "unknown_key": 123}, - {"ice_servers": [{}]}, - {"ice_servers": [{"invalid_key": 123}]}, - ) - - valid_webrtc_configs = ( - ( - {"ice_servers": []}, - {"ice_servers": []}, - ), - ( - {"ice_servers": {"url": "stun:custom_stun_server:3478"}}, - {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, - ), - ( - {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}, - {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, - ), - ( - {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, - {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, - ), - ( - { - "ice_servers": [ - { - "url": ["stun:custom_stun_server:3478"], - "username": "bla", - "credential": "hunter2", - } - ] - }, - { - "ice_servers": [ - { - "url": ["stun:custom_stun_server:3478"], - "username": "bla", - "credential": "hunter2", - } - ] - }, - ), - ) - - for config in invalid_webrtc_configs: - with pytest.raises(MultipleInvalid): - CORE_CONFIG_SCHEMA({"webrtc": config}) - - for config, validated_webrtc in valid_webrtc_configs: - validated = CORE_CONFIG_SCHEMA({"webrtc": config}) - assert validated["webrtc"] == validated_webrtc - - -def test_validate_stun_or_turn_url() -> None: - """Test _validate_stun_or_turn_url.""" - invalid_urls = ( - "custom_stun_server", - "custom_stun_server:3478", - "bum:custom_stun_server:3478" "http://blah.com:80", - ) - - valid_urls = ( - "stun:custom_stun_server:3478", - "turn:custom_stun_server:3478", - "stuns:custom_stun_server:3478", - "turns:custom_stun_server:3478", - # The validator does not reject urls with path - "stun:custom_stun_server:3478/path", - "turn:custom_stun_server:3478/path", - "stuns:custom_stun_server:3478/path", - "turns:custom_stun_server:3478/path", - # The validator allows any query - "stun:custom_stun_server:3478?query", - "turn:custom_stun_server:3478?query", - "stuns:custom_stun_server:3478?query", - "turns:custom_stun_server:3478?query", - ) - - for url in invalid_urls: - with pytest.raises(Invalid): - _validate_stun_or_turn_url(url) - - for url in valid_urls: - assert _validate_stun_or_turn_url(url) == url - - -def test_customize_glob_is_ordered() -> None: - """Test that customize_glob preserves order.""" - conf = CORE_CONFIG_SCHEMA({"customize_glob": OrderedDict()}) - assert isinstance(conf["customize_glob"], OrderedDict) - - -async def _compute_state(hass: HomeAssistant, config: dict[str, Any]) -> State | None: - await async_process_ha_core_config(hass, config) - - entity = Entity() - entity.entity_id = "test.test" - entity.hass = hass - entity.schedule_update_ha_state() - - await hass.async_block_till_done() - - return hass.states.get("test.test") - - -async def test_entity_customization(hass: HomeAssistant) -> None: - """Test entity customization through configuration.""" - config = { - CONF_LATITUDE: 50, - CONF_LONGITUDE: 50, - CONF_NAME: "Test", - CONF_CUSTOMIZE: {"test.test": {"hidden": True}}, - } - - state = await _compute_state(hass, config) - - assert state.attributes["hidden"] - - -async def test_loading_configuration_from_storage( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test loading core config onto hass object.""" - hass_storage["core.config"] = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "metric", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "currency": "EUR", - "country": "SE", - "language": "sv", - "radius": 150, - }, - "key": "core.config", - "version": 1, - "minor_version": 4, - } - await async_process_ha_core_config(hass, {"allowlist_external_dirs": "/etc"}) - - assert hass.config.latitude == 55 - assert hass.config.longitude == 13 - assert hass.config.elevation == 10 - assert hass.config.location_name == "Home" - assert hass.config.units is METRIC_SYSTEM - assert hass.config.time_zone == "Europe/Copenhagen" - assert hass.config.external_url == "https://www.example.com" - assert hass.config.internal_url == "http://example.local" - assert hass.config.currency == "EUR" - assert hass.config.country == "SE" - assert hass.config.language == "sv" - assert hass.config.radius == 150 - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert hass.config.config_source is ConfigSource.STORAGE - - -async def test_loading_configuration_from_storage_with_yaml_only( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test loading core and YAML config onto hass object.""" - hass_storage["core.config"] = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "metric", - }, - "key": "core.config", - "version": 1, - } - await async_process_ha_core_config( - hass, {"media_dirs": {"mymedia": "/usr"}, "allowlist_external_dirs": "/etc"} - ) - - assert hass.config.latitude == 55 - assert hass.config.longitude == 13 - assert hass.config.elevation == 10 - assert hass.config.location_name == "Home" - assert hass.config.units is METRIC_SYSTEM - assert hass.config.time_zone == "Europe/Copenhagen" - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert hass.config.media_dirs == {"mymedia": "/usr"} - assert hass.config.config_source is ConfigSource.STORAGE - - -async def test_migration_and_updating_configuration( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test updating configuration stores the new configuration.""" - core_data = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "imperial", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "currency": "BTC", - }, - "key": "core.config", - "version": 1, - "minor_version": 1, - } - hass_storage["core.config"] = dict(core_data) - await async_process_ha_core_config(hass, {"allowlist_external_dirs": "/etc"}) - await hass.config.async_update(latitude=50, currency="USD") - - expected_new_core_data = copy.deepcopy(core_data) - # From async_update above - expected_new_core_data["data"]["latitude"] = 50 - expected_new_core_data["data"]["currency"] = "USD" - # 1.1 -> 1.2 store migration with migrated unit system - expected_new_core_data["data"]["unit_system_v2"] = "us_customary" - # 1.1 -> 1.3 defaults for country and language - expected_new_core_data["data"]["country"] = None - expected_new_core_data["data"]["language"] = "en" - # 1.1 -> 1.4 defaults for zone radius - expected_new_core_data["data"]["radius"] = 100 - # Bumped minor version - expected_new_core_data["minor_version"] = 4 - assert hass_storage["core.config"] == expected_new_core_data - assert hass.config.latitude == 50 - assert hass.config.currency == "USD" - assert hass.config.country is None - assert hass.config.language == "en" - assert hass.config.radius == 100 - - -async def test_override_stored_configuration( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test loading core and YAML config onto hass object.""" - hass_storage["core.config"] = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "metric", - }, - "key": "core.config", - "version": 1, - } - await async_process_ha_core_config( - hass, {"latitude": 60, "allowlist_external_dirs": "/etc"} - ) - - assert hass.config.latitude == 60 - assert hass.config.longitude == 13 - assert hass.config.elevation == 10 - assert hass.config.location_name == "Home" - assert hass.config.units is METRIC_SYSTEM - assert hass.config.time_zone == "Europe/Copenhagen" - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert hass.config.config_source is ConfigSource.YAML - - -async def test_loading_configuration(hass: HomeAssistant) -> None: - """Test loading core config onto hass object.""" - await async_process_ha_core_config( - hass, - { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "America/New_York", - "allowlist_external_dirs": "/etc", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "media_dirs": {"mymedia": "/usr"}, - "debug": True, - "currency": "EUR", - "country": "SE", - "language": "sv", - "radius": 150, - "webrtc": {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}, - }, - ) - - assert hass.config.latitude == 60 - assert hass.config.longitude == 50 - assert hass.config.elevation == 25 - assert hass.config.location_name == "Huis" - assert hass.config.units is US_CUSTOMARY_SYSTEM - assert hass.config.time_zone == "America/New_York" - assert hass.config.external_url == "https://www.example.com" - assert hass.config.internal_url == "http://example.local" - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert "/usr" in hass.config.allowlist_external_dirs - assert hass.config.media_dirs == {"mymedia": "/usr"} - assert hass.config.config_source is ConfigSource.YAML - assert hass.config.debug is True - assert hass.config.currency == "EUR" - assert hass.config.country == "SE" - assert hass.config.language == "sv" - assert hass.config.radius == 150 - assert hass.config.webrtc == RTCConfiguration( - [RTCIceServer(urls=["stun:custom_stun_server:3478"])] - ) - - -@pytest.mark.parametrize( - ("minor_version", "users", "user_data", "default_language"), - [ - (2, (), {}, "en"), - (2, ({"is_owner": True},), {}, "en"), - ( - 2, - ({"id": "user1", "is_owner": True},), - {"user1": {"language": {"language": "sv"}}}, - "sv", - ), - ( - 2, - ({"id": "user1", "is_owner": False},), - {"user1": {"language": {"language": "sv"}}}, - "en", - ), - (3, (), {}, "en"), - (3, ({"is_owner": True},), {}, "en"), - ( - 3, - ({"id": "user1", "is_owner": True},), - {"user1": {"language": {"language": "sv"}}}, - "en", - ), - ( - 3, - ({"id": "user1", "is_owner": False},), - {"user1": {"language": {"language": "sv"}}}, - "en", - ), - ], -) -async def test_language_default( - hass: HomeAssistant, - hass_storage: dict[str, Any], - minor_version, - users, - user_data, - default_language, -) -> None: - """Test language config default to owner user's language during migration. - - This should only happen if the core store version < 1.3 - """ - core_data = { - "data": {}, - "key": "core.config", - "version": 1, - "minor_version": minor_version, - } - hass_storage["core.config"] = dict(core_data) - - for user_config in users: - user = MockUser(**user_config).add_to_hass(hass) - if user.id not in user_data: - continue - storage_key = f"frontend.user_data_{user.id}" - hass_storage[storage_key] = { - "key": storage_key, - "version": 1, - "data": user_data[user.id], - } - - await async_process_ha_core_config( - hass, - {}, - ) - assert hass.config.language == default_language - - -async def test_loading_configuration_default_media_dirs_docker( - hass: HomeAssistant, -) -> None: - """Test loading core config onto hass object.""" - with patch("homeassistant.core_config.is_docker_env", return_value=True): - await async_process_ha_core_config( - hass, - { - "name": "Huis", - }, - ) - - assert hass.config.location_name == "Huis" - assert len(hass.config.allowlist_external_dirs) == 2 - assert "/media" in hass.config.allowlist_external_dirs - assert hass.config.media_dirs == {"local": "/media"} - - -async def test_loading_configuration_from_packages(hass: HomeAssistant) -> None: - """Test loading packages config onto hass object config.""" - await async_process_ha_core_config( - hass, - { - "latitude": 39, - "longitude": -1, - "elevation": 500, - "name": "Huis", - "unit_system": "metric", - "time_zone": "Europe/Madrid", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "packages": { - "package_1": {"wake_on_lan": None}, - "package_2": { - "light": {"platform": "hue"}, - "media_extractor": None, - "sun": None, - }, - }, - }, - ) - - # Empty packages not allowed - with pytest.raises(MultipleInvalid): - await async_process_ha_core_config( - hass, - { - "latitude": 39, - "longitude": -1, - "elevation": 500, - "name": "Huis", - "unit_system": "metric", - "time_zone": "Europe/Madrid", - "packages": {"empty_package": None}, - }, - ) - - -@pytest.mark.parametrize( - ("unit_system_name", "expected_unit_system"), - [ - ("metric", METRIC_SYSTEM), - ("imperial", US_CUSTOMARY_SYSTEM), - ("us_customary", US_CUSTOMARY_SYSTEM), - ], -) -async def test_loading_configuration_unit_system( - hass: HomeAssistant, unit_system_name: str, expected_unit_system: UnitSystem -) -> None: - """Test backward compatibility when loading core config.""" - await async_process_ha_core_config( - hass, - { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": unit_system_name, - "time_zone": "America/New_York", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - }, - ) - - assert hass.config.units is expected_unit_system - - -async def test_merge_customize(hass: HomeAssistant) -> None: - """Test loading core config onto hass object.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - "customize": {"a.a": {"friendly_name": "A"}}, - "packages": { - "pkg1": {"homeassistant": {"customize": {"b.b": {"friendly_name": "BB"}}}} - }, - } - await async_process_ha_core_config(hass, core_config) - - assert hass.data[DATA_CUSTOMIZE].get("b.b") == {"friendly_name": "BB"} - - -async def test_auth_provider_config(hass: HomeAssistant) -> None: - """Test loading auth provider config onto hass object.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_PROVIDERS: [ - {"type": "homeassistant"}, - ], - CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp", "id": "second"}], - } - if hasattr(hass, "auth"): - del hass.auth - await async_process_ha_core_config(hass, core_config) - - assert len(hass.auth.auth_providers) == 1 - assert hass.auth.auth_providers[0].type == "homeassistant" - assert len(hass.auth.auth_mfa_modules) == 2 - assert hass.auth.auth_mfa_modules[0].id == "totp" - assert hass.auth.auth_mfa_modules[1].id == "second" - - -async def test_auth_provider_config_default(hass: HomeAssistant) -> None: - """Test loading default auth provider config.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - } - if hasattr(hass, "auth"): - del hass.auth - await async_process_ha_core_config(hass, core_config) - - assert len(hass.auth.auth_providers) == 1 - assert hass.auth.auth_providers[0].type == "homeassistant" - assert len(hass.auth.auth_mfa_modules) == 1 - assert hass.auth.auth_mfa_modules[0].id == "totp" - - -async def test_disallowed_auth_provider_config(hass: HomeAssistant) -> None: - """Test loading insecure example auth provider is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_PROVIDERS: [ - { - "type": "insecure_example", - "users": [ - { - "username": "test-user", - "password": "test-pass", - "name": "Test Name", - } - ], - } - ], - } - with pytest.raises(Invalid): - await async_process_ha_core_config(hass, core_config) - - -async def test_disallowed_duplicated_auth_provider_config(hass: HomeAssistant) -> None: - """Test loading insecure example auth provider is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_PROVIDERS: [{"type": "homeassistant"}, {"type": "homeassistant"}], - } - with pytest.raises(Invalid): - await async_process_ha_core_config(hass, core_config) - - -async def test_disallowed_auth_mfa_module_config(hass: HomeAssistant) -> None: - """Test loading insecure example auth mfa module is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_MFA_MODULES: [ - { - "type": "insecure_example", - "data": [{"user_id": "mock-user", "pin": "test-pin"}], - } - ], - } - with pytest.raises(Invalid): - await async_process_ha_core_config(hass, core_config) - - -async def test_disallowed_duplicated_auth_mfa_module_config( - hass: HomeAssistant, -) -> None: - """Test loading insecure example auth mfa module is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp"}], - } - with pytest.raises(Invalid): - await async_process_ha_core_config(hass, core_config) - - -async def test_core_config_schema_historic_currency( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test core config schema.""" - await async_process_ha_core_config(hass, {"currency": "LTT"}) - - issue = issue_registry.async_get_issue("homeassistant", "historic_currency") - assert issue - assert issue.translation_placeholders == {"currency": "LTT"} - - -async def test_core_store_historic_currency( - hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry -) -> None: - """Test core config store.""" - core_data = { - "data": { - "currency": "LTT", - }, - "key": "core.config", - "version": 1, - "minor_version": 1, - } - hass_storage["core.config"] = dict(core_data) - await async_process_ha_core_config(hass, {}) - - issue_id = "historic_currency" - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert issue - assert issue.translation_placeholders == {"currency": "LTT"} - - await hass.config.async_update(currency="EUR") - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert not issue - - -async def test_core_config_schema_no_country( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test core config schema.""" - await async_process_ha_core_config(hass, {}) - - issue = issue_registry.async_get_issue("homeassistant", "country_not_configured") - assert issue - - -async def test_core_store_no_country( - hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry -) -> None: - """Test core config store.""" - core_data = { - "data": {}, - "key": "core.config", - "version": 1, - "minor_version": 1, - } - hass_storage["core.config"] = dict(core_data) - await async_process_ha_core_config(hass, {}) - - issue_id = "country_not_configured" - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert issue - - await hass.config.async_update(country="SE") - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert not issue - - -async def test_configuration_legacy_template_is_removed(hass: HomeAssistant) -> None: - """Test loading core config onto hass object.""" - await async_process_ha_core_config( - hass, - { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "America/New_York", - "allowlist_external_dirs": "/etc", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "media_dirs": {"mymedia": "/usr"}, - "legacy_templates": True, - "debug": True, - "currency": "EUR", - "country": "SE", - "language": "sv", - "radius": 150, - }, - ) - - assert not getattr(hass.config, "legacy_templates") - - -async def test_config_defaults() -> None: - """Test config defaults.""" - hass = Mock() - hass.data = {} - config = Config(hass, "/test/ha-config") - assert config.hass is hass - assert config.latitude == 0 - assert config.longitude == 0 - assert config.elevation == 0 - assert config.location_name == "Home" - assert config.time_zone == "UTC" - assert config.internal_url is None - assert config.external_url is None - assert config.config_source is ConfigSource.DEFAULT - assert config.skip_pip is False - assert config.skip_pip_packages == [] - assert config.components == set() - assert config.api is None - assert config.config_dir == "/test/ha-config" - assert config.allowlist_external_dirs == set() - assert config.allowlist_external_urls == set() - assert config.media_dirs == {} - assert config.recovery_mode is False - assert config.legacy_templates is False - assert config.currency == "EUR" - assert config.country is None - assert config.language == "en" - assert config.radius == 100 - - -async def test_config_path_with_file() -> None: - """Test get_config_path method.""" - hass = Mock() - hass.data = {} - config = Config(hass, "/test/ha-config") - assert config.path("test.conf") == "/test/ha-config/test.conf" - - -async def test_config_path_with_dir_and_file() -> None: - """Test get_config_path method.""" - hass = Mock() - hass.data = {} - config = Config(hass, "/test/ha-config") - assert config.path("dir", "test.conf") == "/test/ha-config/dir/test.conf" - - -async def test_config_as_dict() -> None: - """Test as dict.""" - hass = Mock() - hass.data = {} - config = Config(hass, "/test/ha-config") - type(config.hass.state).value = PropertyMock(return_value="RUNNING") - expected = { - "latitude": 0, - "longitude": 0, - "elevation": 0, - CONF_UNIT_SYSTEM: METRIC_SYSTEM.as_dict(), - "location_name": "Home", - "time_zone": "UTC", - "components": [], - "config_dir": "/test/ha-config", - "whitelist_external_dirs": [], - "allowlist_external_dirs": [], - "allowlist_external_urls": [], - "version": __version__, - "config_source": ConfigSource.DEFAULT, - "recovery_mode": False, - "state": "RUNNING", - "external_url": None, - "internal_url": None, - "currency": "EUR", - "country": None, - "language": "en", - "safe_mode": False, - "debug": False, - "radius": 100, - } - - assert expected == config.as_dict() - - -async def test_config_is_allowed_path() -> None: - """Test is_allowed_path method.""" - hass = Mock() - hass.data = {} - config = Config(hass, "/test/ha-config") - with TemporaryDirectory() as tmp_dir: - # The created dir is in /tmp. This is a symlink on OS X - # causing this test to fail unless we resolve path first. - config.allowlist_external_dirs = {os.path.realpath(tmp_dir)} - - test_file = os.path.join(tmp_dir, "test.jpg") - await asyncio.get_running_loop().run_in_executor( - None, Path(test_file).write_text, "test" - ) - - valid = [test_file, tmp_dir, os.path.join(tmp_dir, "notfound321")] - for path in valid: - assert config.is_allowed_path(path) - - config.allowlist_external_dirs = {"/home", "/var"} - - invalid = [ - "/hass/config/secure", - "/etc/passwd", - "/root/secure_file", - "/var/../etc/passwd", - test_file, - ] - for path in invalid: - assert not config.is_allowed_path(path) - - with pytest.raises(AssertionError): - config.is_allowed_path(None) - - -async def test_config_is_allowed_external_url() -> None: - """Test is_allowed_external_url method.""" - hass = Mock() - hass.data = {} - config = Config(hass, "/test/ha-config") - config.allowlist_external_urls = [ - "http://x.com/", - "https://y.com/bla/", - "https://z.com/images/1.jpg/", - ] - - valid = [ - "http://x.com/1.jpg", - "http://x.com", - "https://y.com/bla/", - "https://y.com/bla/2.png", - "https://z.com/images/1.jpg", - ] - for url in valid: - assert config.is_allowed_external_url(url) - - invalid = [ - "https://a.co", - "https://y.com/bla_wrong", - "https://y.com/bla/../image.jpg", - "https://z.com/images", - ] - for url in invalid: - assert not config.is_allowed_external_url(url) - - -async def test_event_on_update(hass: HomeAssistant) -> None: - """Test that event is fired on update.""" - events = async_capture_events(hass, EVENT_CORE_CONFIG_UPDATE) - - assert hass.config.latitude != 12 - - await hass.config.async_update(latitude=12) - await hass.async_block_till_done() - - assert hass.config.latitude == 12 - assert len(events) == 1 - assert events[0].data == {"latitude": 12} - - -async def test_bad_timezone_raises_value_error(hass: HomeAssistant) -> None: - """Test bad timezone raises ValueError.""" - with pytest.raises(ValueError): - await hass.config.async_update(time_zone="not_a_timezone") - - -async def test_additional_data_in_core_config( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test that we can handle additional data in core configuration.""" - config = Config(hass, "/test/ha-config") - config.async_initialize() - hass_storage[CORE_STORAGE_KEY] = { - "version": 1, - "data": {"location_name": "Test Name", "additional_valid_key": "value"}, - } - await config.async_load() - assert config.location_name == "Test Name" - - -async def test_incorrect_internal_external_url( - hass: HomeAssistant, hass_storage: dict[str, Any], caplog: pytest.LogCaptureFixture -) -> None: - """Test that we warn when detecting invalid internal/external url.""" - config = Config(hass, "/test/ha-config") - config.async_initialize() - - hass_storage[CORE_STORAGE_KEY] = { - "version": 1, - "data": { - "internal_url": None, - "external_url": None, - }, - } - await config.async_load() - assert "Invalid external_url set" not in caplog.text - assert "Invalid internal_url set" not in caplog.text - - config = Config(hass, "/test/ha-config") - config.async_initialize() - - hass_storage[CORE_STORAGE_KEY] = { - "version": 1, - "data": { - "internal_url": "https://community.home-assistant.io/profile", - "external_url": "https://www.home-assistant.io/blue", - }, - } - await config.async_load() - assert "Invalid external_url set" in caplog.text - assert "Invalid internal_url set" in caplog.text - - -async def test_top_level_components(hass: HomeAssistant) -> None: - """Test top level components are updated when components change.""" - hass.config.components.add("homeassistant") - assert hass.config.components == {"homeassistant"} - assert hass.config.top_level_components == {"homeassistant"} - hass.config.components.add("homeassistant.scene") - assert hass.config.components == {"homeassistant", "homeassistant.scene"} - assert hass.config.top_level_components == {"homeassistant"} - hass.config.components.remove("homeassistant") - assert hass.config.components == {"homeassistant.scene"} - assert hass.config.top_level_components == set() - with pytest.raises(ValueError): - hass.config.components.remove("homeassistant.scene") - with pytest.raises(NotImplementedError): - hass.config.components.discard("homeassistant") - - -async def test_debug_mode_defaults_to_off(hass: HomeAssistant) -> None: - """Test debug mode defaults to off.""" - assert not hass.config.debug - - -async def test_set_time_zone_deprecated(hass: HomeAssistant) -> None: - """Test set_time_zone is deprecated.""" - with pytest.raises( - RuntimeError, - match=re.escape( - "Detected code that set the time zone using set_time_zone instead of " - "async_set_time_zone which will stop working in Home Assistant 2025.6. " - "Please report this issue.", - ), - ): - await hass.config.set_time_zone("America/New_York") diff --git a/tests/test_data_entry_flow.py b/tests/test_data_entry_flow.py index 32020ac0d76..967b2565206 100644 --- a/tests/test_data_entry_flow.py +++ b/tests/test_data_entry_flow.py @@ -781,6 +781,83 @@ async def test_async_get_unknown_flow(manager: MockFlowManager) -> None: await manager.async_get("does_not_exist") +async def test_async_has_matching_flow( + hass: HomeAssistant, manager: MockFlowManager +) -> None: + """Test we can check for matching flows.""" + manager.hass = hass + assert ( + manager.async_has_matching_flow( + "test", + {"source": config_entries.SOURCE_HOMEKIT}, + {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + is False + ) + + @manager.mock_reg_handler("test") + class TestFlow(data_entry_flow.FlowHandler): + VERSION = 5 + + async def async_step_init(self, user_input=None): + return self.async_show_progress( + step_id="init", + progress_action="task_one", + ) + + result = await manager.async_init( + "test", + context={"source": config_entries.SOURCE_HOMEKIT}, + data={"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + assert result["type"] == data_entry_flow.FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "task_one" + assert len(manager.async_progress()) == 1 + assert len(manager.async_progress_by_handler("test")) == 1 + assert ( + len( + manager.async_progress_by_handler( + "test", match_context={"source": config_entries.SOURCE_HOMEKIT} + ) + ) + == 1 + ) + assert ( + len( + manager.async_progress_by_handler( + "test", match_context={"source": config_entries.SOURCE_BLUETOOTH} + ) + ) + == 0 + ) + assert manager.async_get(result["flow_id"])["handler"] == "test" + + assert ( + manager.async_has_matching_flow( + "test", + {"source": config_entries.SOURCE_HOMEKIT}, + {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + is True + ) + assert ( + manager.async_has_matching_flow( + "test", + {"source": config_entries.SOURCE_SSDP}, + {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + is False + ) + assert ( + manager.async_has_matching_flow( + "other", + {"source": config_entries.SOURCE_HOMEKIT}, + {"properties": {"id": "aa:bb:cc:dd:ee:ff"}}, + ) + is False + ) + + async def test_move_to_unknown_step_raises_and_removes_from_in_progress( manager: MockFlowManager, ) -> None: @@ -1021,27 +1098,3 @@ def test_section_in_serializer() -> None: ], "type": "expandable", } - - -def test_nested_section_in_serializer() -> None: - """Test section with custom_serializer.""" - with pytest.raises( - ValueError, match="Nesting expandable sections is not supported" - ): - cv.custom_serializer( - data_entry_flow.section( - vol.Schema( - { - vol.Required("section_1"): data_entry_flow.section( - vol.Schema( - { - vol.Optional("option_1", default=False): bool, - vol.Required("option_2"): int, - } - ) - ) - } - ), - {"collapsed": False}, - ) - ) diff --git a/tests/test_loader.py b/tests/test_loader.py index 57d3d6fa832..ae5280b2dcd 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -6,7 +6,7 @@ import pathlib import sys import threading from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, Mock, patch from awesomeversion import AwesomeVersion import pytest @@ -583,7 +583,6 @@ def test_integration_properties(hass: HomeAssistant) -> None: assert integration.dependencies == ["test-dep"] assert integration.requirements == ["test-req==1.0.0"] assert integration.is_built_in is True - assert integration.overwrites_built_in is False assert integration.version == "1.0.0" integration = loader.Integration( @@ -598,7 +597,6 @@ def test_integration_properties(hass: HomeAssistant) -> None: }, ) assert integration.is_built_in is False - assert integration.overwrites_built_in is True assert integration.homekit is None assert integration.zeroconf is None assert integration.dhcp is None @@ -621,7 +619,6 @@ def test_integration_properties(hass: HomeAssistant) -> None: }, ) assert integration.is_built_in is False - assert integration.overwrites_built_in is True assert integration.homekit is None assert integration.zeroconf == [{"type": "_hue._tcp.local.", "name": "hue*"}] assert integration.dhcp is None @@ -661,9 +658,7 @@ def _get_test_integration( ) -def _get_test_integration_with_application_credentials( - hass: HomeAssistant, name: str -) -> loader.Integration: +def _get_test_integration_with_application_credentials(hass, name): """Return a generated test integration with application_credentials support.""" return loader.Integration( hass, @@ -683,9 +678,7 @@ def _get_test_integration_with_application_credentials( ) -def _get_test_integration_with_zeroconf_matcher( - hass: HomeAssistant, name: str, config_flow: bool -) -> loader.Integration: +def _get_test_integration_with_zeroconf_matcher(hass, name, config_flow): """Return a generated test integration with a zeroconf matcher.""" return loader.Integration( hass, @@ -704,9 +697,7 @@ def _get_test_integration_with_zeroconf_matcher( ) -def _get_test_integration_with_legacy_zeroconf_matcher( - hass: HomeAssistant, name: str, config_flow: bool -) -> loader.Integration: +def _get_test_integration_with_legacy_zeroconf_matcher(hass, name, config_flow): """Return a generated test integration with a legacy zeroconf matcher.""" return loader.Integration( hass, @@ -733,9 +724,7 @@ def _get_test_integration_with_legacy_zeroconf_matcher( ) -def _get_test_integration_with_dhcp_matcher( - hass: HomeAssistant, name: str, config_flow: bool -) -> loader.Integration: +def _get_test_integration_with_dhcp_matcher(hass, name, config_flow): """Return a generated test integration with a dhcp matcher.""" return loader.Integration( hass, @@ -759,9 +748,7 @@ def _get_test_integration_with_dhcp_matcher( ) -def _get_test_integration_with_bluetooth_matcher( - hass: HomeAssistant, name: str, config_flow: bool -) -> loader.Integration: +def _get_test_integration_with_bluetooth_matcher(hass, name, config_flow): """Return a generated test integration with a bluetooth matcher.""" return loader.Integration( hass, @@ -780,9 +767,7 @@ def _get_test_integration_with_bluetooth_matcher( ) -def _get_test_integration_with_usb_matcher( - hass: HomeAssistant, name: str, config_flow: bool -) -> loader.Integration: +def _get_test_integration_with_usb_matcher(hass, name, config_flow): """Return a generated test integration with a usb matcher.""" return loader.Integration( hass, @@ -818,7 +803,7 @@ async def test_get_custom_components(hass: HomeAssistant) -> None: test_1_integration = _get_test_integration(hass, "test_1", False) test_2_integration = _get_test_integration(hass, "test_2", True) - name = "homeassistant.loader._get_custom_components" + name = "homeassistant.loader._async_get_custom_components" with patch(name) as mock_get: mock_get.return_value = { "test_1": test_1_integration, @@ -831,29 +816,6 @@ async def test_get_custom_components(hass: HomeAssistant) -> None: mock_get.assert_called_once_with(hass) -@pytest.mark.usefixtures("enable_custom_integrations") -async def test_custom_component_overwriting_core(hass: HomeAssistant) -> None: - """Test loading a custom component that overwrites a core component.""" - # First load the core 'light' component - core_light = await loader.async_get_integration(hass, "light") - assert core_light.is_built_in is True - - # create a mock custom 'light' component - mock_integration( - hass, - MockModule("light", partial_manifest={"version": "1.0.0"}), - built_in=False, - ) - - # Try to load the 'light' component again - custom_light = await loader.async_get_integration(hass, "light") - - # Assert that we got the custom component instead of the core one - assert custom_light.is_built_in is False - assert custom_light.overwrites_built_in is True - assert custom_light.version == "1.0.0" - - async def test_get_config_flows(hass: HomeAssistant) -> None: """Verify that custom components with config_flow are available.""" test_1_integration = _get_test_integration(hass, "test_1", False) @@ -1295,29 +1257,26 @@ async def test_config_folder_not_in_path() -> None: import tests.testing_config.check_config_not_in_path # noqa: F401 -@pytest.mark.parametrize( - ("integration_frame_path", "expected"), - [ - pytest.param( - "custom_components/test_integration_frame", True, id="custom integration" - ), - pytest.param( - "homeassistant/components/test_integration_frame", - False, - id="core integration", - ), - pytest.param("homeassistant/test_integration_frame", False, id="core"), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_hass_components_use_reported( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - expected: bool, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock ) -> None: - """Test whether use of hass.components is reported.""" + """Test that use of hass.components is reported.""" + mock_integration_frame.filename = ( + "/home/paulus/homeassistant/custom_components/demo/light.py" + ) + integration_frame = frame.IntegrationFrame( + custom_integration=True, + frame=mock_integration_frame, + integration="test_integration_frame", + module="custom_components.test_integration_frame", + relative_filename="custom_components/test_integration_frame/__init__.py", + ) + with ( + patch( + "homeassistant.helpers.frame.get_integration_frame", + return_value=integration_frame, + ), patch( "homeassistant.components.http.start_http_server_and_save_config", return_value=None, @@ -1325,11 +1284,10 @@ async def test_hass_components_use_reported( ): await hass.components.http.start_http_server_and_save_config(hass, [], None) - reported = ( + assert ( "Detected that custom integration 'test_integration_frame'" " accesses hass.components.http. This is deprecated" ) in caplog.text - assert reported == expected async def test_async_get_component_preloads_config_and_config_flow( @@ -1991,29 +1949,24 @@ async def test_has_services(hass: HomeAssistant) -> None: assert integration.has_services is True -@pytest.mark.parametrize( - ("integration_frame_path", "expected"), - [ - pytest.param( - "custom_components/test_integration_frame", True, id="custom integration" - ), - pytest.param( - "homeassistant/components/test_integration_frame", - False, - id="core integration", - ), - pytest.param("homeassistant/test_integration_frame", False, id="core"), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_hass_helpers_use_reported( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - expected: bool, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock ) -> None: - """Test whether use of hass.helpers is reported.""" + """Test that use of hass.components is reported.""" + integration_frame = frame.IntegrationFrame( + custom_integration=True, + frame=mock_integration_frame, + integration="test_integration_frame", + module="custom_components.test_integration_frame", + relative_filename="custom_components/test_integration_frame/__init__.py", + ) + with ( + patch.object(frame, "_REPORTED_INTEGRATIONS", new=set()), + patch( + "homeassistant.helpers.frame.get_integration_frame", + return_value=integration_frame, + ), patch( "homeassistant.helpers.aiohttp_client.async_get_clientsession", return_value=None, @@ -2021,11 +1974,10 @@ async def test_hass_helpers_use_reported( ): hass.helpers.aiohttp_client.async_get_clientsession() - reported = ( + assert ( "Detected that custom integration 'test_integration_frame' " "accesses hass.helpers.aiohttp_client. This is deprecated" ) in caplog.text - assert reported == expected async def test_manifest_json_fragment_round_trip(hass: HomeAssistant) -> None: diff --git a/tests/test_main.py b/tests/test_main.py index d32ca59a846..080787311a0 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -3,7 +3,7 @@ from unittest.mock import PropertyMock, patch from homeassistant import __main__ as main -from homeassistant.const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE +from homeassistant.const import REQUIRED_PYTHON_VER @patch("sys.exit") @@ -86,13 +86,3 @@ def test_skip_pip_mutually_exclusive(mock_exit) -> None: assert mock_exit.called is False args = parse_args("--skip-pip", "--skip-pip-packages", "foo") assert mock_exit.called is True - - -def test_restart_after_backup_restore() -> None: - """Test restarting if we restored a backup.""" - with ( - patch("sys.argv", ["python"]), - patch("homeassistant.__main__.restore_backup", return_value=True), - ): - exit_code = main.main() - assert exit_code == RESTART_EXIT_CODE diff --git a/tests/test_requirements.py b/tests/test_requirements.py index 191e1b7368c..161214160aa 100644 --- a/tests/test_requirements.py +++ b/tests/test_requirements.py @@ -585,8 +585,7 @@ async def test_discovery_requirements_mqtt(hass: HomeAssistant) -> None: ) as mock_process: await async_get_integration_with_requirements(hass, "mqtt_comp") - assert len(mock_process.mock_calls) == 2 - # one for mqtt and one for hassio + assert len(mock_process.mock_calls) == 1 assert mock_process.mock_calls[0][1][1] == mqtt.requirements @@ -603,12 +602,12 @@ async def test_discovery_requirements_ssdp(hass: HomeAssistant) -> None: ) as mock_process: await async_get_integration_with_requirements(hass, "ssdp_comp") - assert len(mock_process.mock_calls) == 2 + assert len(mock_process.mock_calls) == 3 assert mock_process.mock_calls[0][1][1] == ssdp.requirements assert { - mock_process.mock_calls[0][1][0], mock_process.mock_calls[1][1][0], - } == {"network", "ssdp"} + mock_process.mock_calls[2][1][0], + } == {"network", "recorder"} @pytest.mark.parametrize( @@ -632,7 +631,7 @@ async def test_discovery_requirements_zeroconf( ) as mock_process: await async_get_integration_with_requirements(hass, "comp") - assert len(mock_process.mock_calls) == 2 + assert len(mock_process.mock_calls) == 3 assert mock_process.mock_calls[0][1][1] == zeroconf.requirements diff --git a/tests/test_runner.py b/tests/test_runner.py index c61b8ed5628..90678454adf 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -2,7 +2,6 @@ import asyncio from collections.abc import Iterator -import subprocess import threading from unittest.mock import patch @@ -105,7 +104,7 @@ def test_run_does_not_block_forever_with_shielded_task( try: await asyncio.sleep(2) except asyncio.CancelledError: - raise Exception # noqa: TRY002 + raise Exception # pylint: disable=broad-exception-raised async def async_shielded(*_): try: @@ -142,7 +141,8 @@ async def test_unhandled_exception_traceback( async def _unhandled_exception(): raised.set() - raise Exception("This is unhandled") # noqa: TRY002 + # pylint: disable-next=broad-exception-raised + raise Exception("This is unhandled") try: hass.loop.set_debug(True) @@ -169,21 +169,21 @@ def test_enable_posix_spawn() -> None: yield from packaging.tags.parse_tag("cp311-cp311-musllinux_1_1_x86_64") with ( - patch.object(subprocess, "_USE_POSIX_SPAWN", False), + patch.object(runner.subprocess, "_USE_POSIX_SPAWN", False), patch( "homeassistant.runner.packaging.tags.sys_tags", side_effect=_mock_sys_tags_musl, ), ): runner._enable_posix_spawn() - assert subprocess._USE_POSIX_SPAWN is True + assert runner.subprocess._USE_POSIX_SPAWN is True with ( - patch.object(subprocess, "_USE_POSIX_SPAWN", False), + patch.object(runner.subprocess, "_USE_POSIX_SPAWN", False), patch( "homeassistant.runner.packaging.tags.sys_tags", side_effect=_mock_sys_tags_any, ), ): runner._enable_posix_spawn() - assert subprocess._USE_POSIX_SPAWN is False + assert runner.subprocess._USE_POSIX_SPAWN is False diff --git a/tests/test_setup.py b/tests/test_setup.py index 2d15c670cf7..1e19f1a7b76 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -9,22 +9,14 @@ import pytest import voluptuous as vol from homeassistant import config_entries, loader, setup -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - CoreState, - HomeAssistant, - callback, -) +from homeassistant.core import CoreState, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, discovery, translation from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) -from homeassistant.helpers.issue_registry import IssueRegistry -from homeassistant.helpers.typing import ConfigType from .common import ( MockConfigEntry, @@ -244,43 +236,9 @@ async def test_validate_platform_config_4(hass: HomeAssistant) -> None: hass.config.components.remove("platform_conf") -async def test_component_not_found( - hass: HomeAssistant, issue_registry: IssueRegistry -) -> None: - """setup_component should raise a repair issue if component doesn't exist.""" - MockConfigEntry(domain="non_existing").add_to_hass(hass) +async def test_component_not_found(hass: HomeAssistant) -> None: + """setup_component should not crash if component doesn't exist.""" assert await setup.async_setup_component(hass, "non_existing", {}) is False - assert len(issue_registry.issues) == 1 - assert ( - HOMEASSISTANT_DOMAIN, - "integration_not_found.non_existing", - ) in issue_registry.issues - - -async def test_yaml_component_not_found( - hass: HomeAssistant, issue_registry: IssueRegistry -) -> None: - """setup_component should only raise an exception for missing config entry integrations.""" - assert await setup.async_setup_component(hass, "non_existing", {}) is False - assert len(issue_registry.issues) == 0 - assert ( - HOMEASSISTANT_DOMAIN, - "integration_not_found.non_existing", - ) not in issue_registry.issues - - -async def test_component_missing_not_raising_in_safe_mode( - hass: HomeAssistant, issue_registry: IssueRegistry -) -> None: - """setup_component should not raise an issue if component doesn't exist in safe.""" - MockConfigEntry(domain="non_existing").add_to_hass(hass) - hass.config.safe_mode = True - assert await setup.async_setup_component(hass, "non_existing", {}) is False - assert len(issue_registry.issues) == 0 - assert ( - HOMEASSISTANT_DOMAIN, - "integration_not_found.non_existing", - ) not in issue_registry.issues async def test_component_not_double_initialized(hass: HomeAssistant) -> None: @@ -317,10 +275,9 @@ async def test_component_not_setup_twice_if_loaded_during_other_setup( """Test component setup while waiting for lock is not set up twice.""" result = [] - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def async_setup(hass, config): """Tracking Setup.""" result.append(1) - return True mock_integration(hass, MockModule("comp", async_setup=async_setup)) @@ -365,9 +322,9 @@ async def test_component_exception_setup(hass: HomeAssistant) -> None: """Test component that raises exception during setup.""" setup.async_set_domains_to_be_loaded(hass, {"comp"}) - def exception_setup(hass: HomeAssistant, config: ConfigType) -> bool: + def exception_setup(hass, config): """Raise exception.""" - raise Exception("fail!") # noqa: TRY002 + raise Exception("fail!") # pylint: disable=broad-exception-raised mock_integration(hass, MockModule("comp", setup=exception_setup)) @@ -379,9 +336,9 @@ async def test_component_base_exception_setup(hass: HomeAssistant) -> None: """Test component that raises exception during setup.""" setup.async_set_domains_to_be_loaded(hass, {"comp"}) - def exception_setup(hass: HomeAssistant, config: ConfigType) -> bool: + def exception_setup(hass, config): """Raise exception.""" - raise BaseException("fail!") # noqa: TRY002 + raise BaseException("fail!") # pylint: disable=broad-exception-raised mock_integration(hass, MockModule("comp", setup=exception_setup)) @@ -397,11 +354,12 @@ async def test_component_setup_with_validation_and_dependency( ) -> None: """Test all config is passed to dependencies.""" - def config_check_setup(hass: HomeAssistant, config: ConfigType) -> bool: + def config_check_setup(hass, config): """Test that config is passed in.""" if config.get("comp_a", {}).get("valid", False): return True - raise Exception(f"Config not passed in: {config}") # noqa: TRY002 + # pylint: disable-next=broad-exception-raised + raise Exception(f"Config not passed in: {config}") platform = MockPlatform() @@ -519,7 +477,7 @@ async def test_all_work_done_before_start(hass: HomeAssistant) -> None: """Test all init work done till start.""" call_order = [] - async def component1_setup(hass: HomeAssistant, config: ConfigType) -> bool: + async def component1_setup(hass, config): """Set up mock component.""" await discovery.async_discover( hass, "test_component2", {}, "test_component2", {} @@ -529,7 +487,7 @@ async def test_all_work_done_before_start(hass: HomeAssistant) -> None: ) return True - def component_track_setup(hass: HomeAssistant, config: ConfigType) -> bool: + def component_track_setup(hass, config): """Set up mock component.""" call_order.append(1) return True @@ -605,7 +563,7 @@ async def test_when_setup_already_loaded(hass: HomeAssistant) -> None: """Test when setup.""" calls = [] - async def mock_callback(hass: HomeAssistant, component: str) -> None: + async def mock_callback(hass, component): """Mock callback.""" calls.append(component) @@ -633,7 +591,7 @@ async def test_async_when_setup_or_start_already_loaded(hass: HomeAssistant) -> """Test when setup or start.""" calls = [] - async def mock_callback(hass: HomeAssistant, component: str) -> None: + async def mock_callback(hass, component): """Mock callback.""" calls.append(component) @@ -679,7 +637,7 @@ async def test_parallel_entry_setup(hass: HomeAssistant, mock_handlers) -> None: calls = [] - async def mock_async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + async def mock_async_setup_entry(hass, entry): """Mock setting up an entry.""" calls.append(entry.data["value"]) await asyncio.sleep(0) diff --git a/tests/test_util/aiohttp.py b/tests/test_util/aiohttp.py index 633f98dc5b3..b4b8cfa4b6d 100644 --- a/tests/test_util/aiohttp.py +++ b/tests/test_util/aiohttp.py @@ -1,12 +1,9 @@ """Aiohttp test utils.""" import asyncio -from collections.abc import Iterator from contextlib import contextmanager from http import HTTPStatus import re -from types import TracebackType -from typing import Any from unittest import mock from urllib.parse import parse_qs @@ -21,7 +18,6 @@ from multidict import CIMultiDict from yarl import URL from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE -from homeassistant.core import HomeAssistant from homeassistant.helpers.json import json_dumps from homeassistant.util.json import json_loads @@ -40,7 +36,7 @@ def mock_stream(data): class AiohttpClientMocker: """Mock Aiohttp client requests.""" - def __init__(self) -> None: + def __init__(self): """Initialize the request mocker.""" self._mocks = [] self._cookies = {} @@ -167,7 +163,7 @@ class AiohttpClientMockResponse: def __init__( self, method, - url: URL, + url, status=HTTPStatus.OK, response=None, json=None, @@ -177,7 +173,7 @@ class AiohttpClientMockResponse: headers=None, side_effect=None, closing=None, - ) -> None: + ): """Initialize a fake response.""" if json is not None: text = json_dumps(json) @@ -298,25 +294,13 @@ class AiohttpClientMockResponse: raise ClientConnectionError("Connection closed") return self._response - async def __aenter__(self): - """Enter the context manager.""" - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - """Exit the context manager.""" - @contextmanager -def mock_aiohttp_client() -> Iterator[AiohttpClientMocker]: +def mock_aiohttp_client(): """Context manager to mock aiohttp client.""" mocker = AiohttpClientMocker() - def create_session(hass: HomeAssistant, *args: Any, **kwargs: Any) -> ClientSession: + def create_session(hass, *args, **kwargs): session = mocker.create_session(hass.loop) async def close_session(event): @@ -342,7 +326,7 @@ class MockLongPollSideEffect: If queue is empty, will await until done. """ - def __init__(self) -> None: + def __init__(self): """Initialize the queue.""" self.semaphore = asyncio.Semaphore(0) self.response_list = [] diff --git a/tests/testing_config/blueprints/automation/test_event_service.yaml b/tests/testing_config/blueprints/automation/test_event_service.yaml index ec11f24fc63..ba7462ed2e0 100644 --- a/tests/testing_config/blueprints/automation/test_event_service.yaml +++ b/tests/testing_config/blueprints/automation/test_event_service.yaml @@ -10,9 +10,9 @@ blueprint: selector: number: mode: "box" -triggers: - trigger: event +trigger: + platform: event event_type: !input trigger_event -actions: +action: service: !input service_to_call entity_id: light.kitchen diff --git a/tests/testing_config/blueprints/automation/test_event_service_legacy_schema.yaml b/tests/testing_config/blueprints/automation/test_event_service_legacy_schema.yaml deleted file mode 100644 index ba7462ed2e0..00000000000 --- a/tests/testing_config/blueprints/automation/test_event_service_legacy_schema.yaml +++ /dev/null @@ -1,18 +0,0 @@ -blueprint: - name: "Call service based on event" - domain: automation - input: - trigger_event: - selector: - text: - service_to_call: - a_number: - selector: - number: - mode: "box" -trigger: - platform: event - event_type: !input trigger_event -action: - service: !input service_to_call - entity_id: light.kitchen diff --git a/tests/util/test_async.py b/tests/util/test_async.py index cda10b69c3f..ac927b1375a 100644 --- a/tests/util/test_async.py +++ b/tests/util/test_async.py @@ -14,26 +14,24 @@ from tests.common import extract_stack_to_frame @patch("concurrent.futures.Future") @patch("threading.get_ident") -def test_run_callback_threadsafe_from_inside_event_loop( - mock_ident: MagicMock, mock_future: MagicMock -) -> None: +def test_run_callback_threadsafe_from_inside_event_loop(mock_ident, _) -> None: """Testing calling run_callback_threadsafe from inside an event loop.""" callback = MagicMock() loop = Mock(spec=["call_soon_threadsafe"]) - loop._thread_id = None + loop._thread_ident = None mock_ident.return_value = 5 hasync.run_callback_threadsafe(loop, callback) assert len(loop.call_soon_threadsafe.mock_calls) == 1 - loop._thread_id = 5 + loop._thread_ident = 5 mock_ident.return_value = 5 with pytest.raises(RuntimeError): hasync.run_callback_threadsafe(loop, callback) assert len(loop.call_soon_threadsafe.mock_calls) == 1 - loop._thread_id = 1 + loop._thread_ident = 1 mock_ident.return_value = 5 hasync.run_callback_threadsafe(loop, callback) assert len(loop.call_soon_threadsafe.mock_calls) == 2 @@ -78,7 +76,7 @@ async def test_run_callback_threadsafe(hass: HomeAssistant) -> None: nonlocal it_ran it_ran = True - with patch.dict(hass.loop.__dict__, {"_thread_id": -1}): + with patch.dict(hass.loop.__dict__, {"_thread_ident": -1}): assert hasync.run_callback_threadsafe(hass.loop, callback) assert it_ran is False @@ -98,7 +96,7 @@ async def test_callback_is_always_scheduled(hass: HomeAssistant) -> None: hasync.shutdown_run_callback_threadsafe(hass.loop) with ( - patch.dict(hass.loop.__dict__, {"_thread_id": -1}), + patch.dict(hass.loop.__dict__, {"_thread_ident": -1}), patch.object(hass.loop, "call_soon_threadsafe") as mock_call_soon_threadsafe, pytest.raises(RuntimeError), ): @@ -199,17 +197,3 @@ async def test_create_eager_task_from_thread_in_integration( "from a thread at homeassistant/components/hue/light.py, line 23: " "self.light.is_on" ) in caplog.text - - -async def test_get_scheduled_timer_handles(hass: HomeAssistant) -> None: - """Test get_scheduled_timer_handles returns all scheduled timer handles.""" - loop = hass.loop - timer_handle = loop.call_later(10, lambda: None) - timer_handle2 = loop.call_later(5, lambda: None) - timer_handle3 = loop.call_later(15, lambda: None) - - handles = hasync.get_scheduled_timer_handles(loop) - assert set(handles).issuperset({timer_handle, timer_handle2, timer_handle3}) - timer_handle.cancel() - timer_handle2.cancel() - timer_handle3.cancel() diff --git a/tests/util/test_color.py b/tests/util/test_color.py index c8a5e0c8587..53c243a1e4f 100644 --- a/tests/util/test_color.py +++ b/tests/util/test_color.py @@ -200,17 +200,17 @@ def test_color_hs_to_xy() -> None: def test_rgb_hex_to_rgb_list() -> None: """Test rgb_hex_to_rgb_list.""" - assert color_util.rgb_hex_to_rgb_list("ffffff") == [255, 255, 255] + assert [255, 255, 255] == color_util.rgb_hex_to_rgb_list("ffffff") - assert color_util.rgb_hex_to_rgb_list("000000") == [0, 0, 0] + assert [0, 0, 0] == color_util.rgb_hex_to_rgb_list("000000") - assert color_util.rgb_hex_to_rgb_list("ffffffff") == [255, 255, 255, 255] + assert [255, 255, 255, 255] == color_util.rgb_hex_to_rgb_list("ffffffff") - assert color_util.rgb_hex_to_rgb_list("00000000") == [0, 0, 0, 0] + assert [0, 0, 0, 0] == color_util.rgb_hex_to_rgb_list("00000000") - assert color_util.rgb_hex_to_rgb_list("3399ff") == [51, 153, 255] + assert [51, 153, 255] == color_util.rgb_hex_to_rgb_list("3399ff") - assert color_util.rgb_hex_to_rgb_list("3399ff00") == [51, 153, 255, 0] + assert [51, 153, 255, 0] == color_util.rgb_hex_to_rgb_list("3399ff00") def test_color_name_to_rgb_valid_name() -> None: diff --git a/tests/util/test_dt.py b/tests/util/test_dt.py index 0e8432bbb83..6caca092517 100644 --- a/tests/util/test_dt.py +++ b/tests/util/test_dt.py @@ -294,12 +294,12 @@ def test_parse_time_expression() -> None: assert list(range(0, 60, 5)) == dt_util.parse_time_expression("/5", 0, 59) - assert dt_util.parse_time_expression([2, 1, 3], 0, 59) == [1, 2, 3] + assert [1, 2, 3] == dt_util.parse_time_expression([2, 1, 3], 0, 59) assert list(range(24)) == dt_util.parse_time_expression("*", 0, 23) - assert dt_util.parse_time_expression(42, 0, 59) == [42] - assert dt_util.parse_time_expression("42", 0, 59) == [42] + assert [42] == dt_util.parse_time_expression(42, 0, 59) + assert [42] == dt_util.parse_time_expression("42", 0, 59) with pytest.raises(ValueError): dt_util.parse_time_expression(61, 0, 60) diff --git a/tests/util/test_json.py b/tests/util/test_json.py index 05dab46002d..3a314bb5a1b 100644 --- a/tests/util/test_json.py +++ b/tests/util/test_json.py @@ -131,6 +131,34 @@ def test_json_loads_object() -> None: json_loads_object("null") +async def test_deprecated_test_find_unserializable_data( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test deprecated test_find_unserializable_data logs a warning.""" + # pylint: disable-next=hass-deprecated-import,import-outside-toplevel + from homeassistant.util.json import find_paths_unserializable_data + + find_paths_unserializable_data(1) + assert ( + "uses find_paths_unserializable_data from homeassistant.util.json" + in caplog.text + ) + assert "should be updated to use homeassistant.helpers.json module" in caplog.text + + +async def test_deprecated_save_json( + caplog: pytest.LogCaptureFixture, tmp_path: Path +) -> None: + """Test deprecated save_json logs a warning.""" + # pylint: disable-next=hass-deprecated-import,import-outside-toplevel + from homeassistant.util.json import save_json + + fname = tmp_path / "test1.json" + save_json(fname, TEST_JSON_A) + assert "uses save_json from homeassistant.util.json" in caplog.text + assert "should be updated to use homeassistant.helpers.json module" in caplog.text + + async def test_loading_derived_class() -> None: """Test loading data from classes derived from str.""" diff --git a/tests/util/test_logging.py b/tests/util/test_logging.py index 795444c89bd..4667dbcbec8 100644 --- a/tests/util/test_logging.py +++ b/tests/util/test_logging.py @@ -80,7 +80,8 @@ async def test_async_create_catching_coro( """Test exception logging of wrapped coroutine.""" async def job(): - raise Exception("This is a bad coroutine") # noqa: TRY002 + # pylint: disable-next=broad-exception-raised + raise Exception("This is a bad coroutine") hass.async_create_task(logging_util.async_create_catching_coro(job())) await hass.async_block_till_done() diff --git a/tests/util/test_loop.py b/tests/util/test_loop.py index 3ff7128938f..585f32a965f 100644 --- a/tests/util/test_loop.py +++ b/tests/util/test_loop.py @@ -1,7 +1,5 @@ """Tests for async util methods from Python source.""" -from collections.abc import Generator -import contextlib import threading from unittest.mock import Mock, patch @@ -17,14 +15,57 @@ def banned_function(): """Mock banned function.""" -@contextlib.contextmanager -def patch_get_current_frame(stack: list[Mock]) -> Generator[None]: - """Patch get_current_frame.""" - frames = extract_stack_to_frame(stack) +async def test_raise_for_blocking_call_async() -> None: + """Test raise_for_blocking_call detects when called from event loop without integration context.""" + with pytest.raises(RuntimeError): + haloop.raise_for_blocking_call(banned_function) + + +async def test_raise_for_blocking_call_async_non_strict_core( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test non_strict_core raise_for_blocking_call detects from event loop without integration context.""" + haloop.raise_for_blocking_call(banned_function, strict_core=False) + assert "Detected blocking call to banned_function" in caplog.text + assert "Traceback (most recent call last)" in caplog.text + assert ( + "Please create a bug report at https://github.com/home-assistant/core/issues" + in caplog.text + ) + assert ( + "For developers, please see " + "https://developers.home-assistant.io/docs/asyncio_blocking_operations/#banned_function" + ) in caplog.text + + +async def test_raise_for_blocking_call_async_integration( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test raise_for_blocking_call detects and raises when called from event loop from integration context.""" + frames = extract_stack_to_frame( + [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="23", + line="do_something()", + ), + Mock( + filename="/home/paulus/homeassistant/components/hue/light.py", + lineno="23", + line="self.light.is_on", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ] + ) with ( + pytest.raises(RuntimeError), patch( "homeassistant.helpers.frame.linecache.getline", - return_value=stack[1].line, + return_value="self.light.is_on", ), patch( "homeassistant.util.loop._get_line_from_cache", @@ -38,104 +79,13 @@ def patch_get_current_frame(stack: list[Mock]) -> Generator[None]: "homeassistant.helpers.frame.get_current_frame", return_value=frames, ), - ): - yield - - -async def test_raise_for_blocking_call_async() -> None: - """Test raise_for_blocking_call detects when called from event loop without integration context.""" - with pytest.raises(RuntimeError): - haloop.raise_for_blocking_call(banned_function) - - -async def test_raise_for_blocking_call_async_non_strict_core( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test non_strict_core raise_for_blocking_call detects from event loop without integration context.""" - stack = [ - Mock( - filename="/home/paulus/homeassistant/core.py", - lineno="12", - line="do_something()", - ), - Mock( - filename="/home/paulus/homeassistant/core.py", - lineno="12", - line="self.light.is_on", - ), - Mock( - filename="/home/paulus/aiohue/lights.py", - lineno="2", - line="something()", - ), - ] - with patch_get_current_frame(stack): - haloop.raise_for_blocking_call(banned_function, strict_core=False) - assert "Detected blocking call to banned_function" in caplog.text - assert "Traceback (most recent call last)" in caplog.text - assert ( - "Please create a bug report at https://github.com/home-assistant/core/issues" - in caplog.text - ) - assert ( - "For developers, please see " - "https://developers.home-assistant.io/docs/asyncio_blocking_operations/#banned_function" - ) in caplog.text - - warnings = [ - record for record in caplog.get_records("call") if record.levelname == "WARNING" - ] - assert len(warnings) == 1 - caplog.clear() - - # Second call should log at debug - with patch_get_current_frame(stack): - haloop.raise_for_blocking_call(banned_function, strict_core=False) - - warnings = [ - record for record in caplog.get_records("call") if record.levelname == "WARNING" - ] - assert len(warnings) == 0 - assert ( - "For developers, please see " - "https://developers.home-assistant.io/docs/asyncio_blocking_operations/#banned_function" - ) in caplog.text - - # no expensive traceback on debug - assert "Traceback (most recent call last)" not in caplog.text - - -async def test_raise_for_blocking_call_async_integration( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test raise_for_blocking_call detects and raises when called from event loop from integration context.""" - stack = [ - Mock( - filename="/home/paulus/homeassistant/core.py", - lineno="18", - line="do_something()", - ), - Mock( - filename="/home/paulus/homeassistant/components/hue/light.py", - lineno="18", - line="self.light.is_on", - ), - Mock( - filename="/home/paulus/aiohue/lights.py", - lineno="8", - line="something()", - ), - ] - with ( - pytest.raises(RuntimeError), - patch_get_current_frame(stack), ): haloop.raise_for_blocking_call(banned_function) assert ( "Detected blocking call to banned_function with args None" " inside the event loop by integration" - " 'hue' at homeassistant/components/hue/light.py, line 18: self.light.is_on " - "(offender: /home/paulus/aiohue/lights.py, line 8: mock_line), please create " + " 'hue' at homeassistant/components/hue/light.py, line 23: self.light.is_on " + "(offender: /home/paulus/aiohue/lights.py, line 2: mock_line), please create " "a bug report at https://github.com/home-assistant/core/issues?" "q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22" in caplog.text ) @@ -149,37 +99,55 @@ async def test_raise_for_blocking_call_async_integration_non_strict( caplog: pytest.LogCaptureFixture, ) -> None: """Test raise_for_blocking_call detects when called from event loop from integration context.""" - stack = [ - Mock( - filename="/home/paulus/homeassistant/core.py", - lineno="15", - line="do_something()", + frames = extract_stack_to_frame( + [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="23", + line="do_something()", + ), + Mock( + filename="/home/paulus/homeassistant/components/hue/light.py", + lineno="23", + line="self.light.is_on", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ] + ) + with ( + patch( + "homeassistant.helpers.frame.linecache.getline", + return_value="self.light.is_on", ), - Mock( - filename="/home/paulus/homeassistant/components/hue/light.py", - lineno="15", - line="self.light.is_on", + patch( + "homeassistant.util.loop._get_line_from_cache", + return_value="mock_line", ), - Mock( - filename="/home/paulus/aiohue/lights.py", - lineno="1", - line="something()", + patch( + "homeassistant.util.loop.get_current_frame", + return_value=frames, ), - ] - with patch_get_current_frame(stack): + patch( + "homeassistant.helpers.frame.get_current_frame", + return_value=frames, + ), + ): haloop.raise_for_blocking_call(banned_function, strict=False) - assert ( "Detected blocking call to banned_function with args None" " inside the event loop by integration" - " 'hue' at homeassistant/components/hue/light.py, line 15: self.light.is_on " - "(offender: /home/paulus/aiohue/lights.py, line 1: mock_line), " + " 'hue' at homeassistant/components/hue/light.py, line 23: self.light.is_on " + "(offender: /home/paulus/aiohue/lights.py, line 2: mock_line), " "please create a bug report at https://github.com/home-assistant/core/issues?" "q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22" in caplog.text ) assert "Traceback (most recent call last)" in caplog.text assert ( - 'File "/home/paulus/homeassistant/components/hue/light.py", line 15' + 'File "/home/paulus/homeassistant/components/hue/light.py", line 23' in caplog.text ) assert ( @@ -190,62 +158,62 @@ async def test_raise_for_blocking_call_async_integration_non_strict( "For developers, please see " "https://developers.home-assistant.io/docs/asyncio_blocking_operations/#banned_function" ) in caplog.text - warnings = [ - record for record in caplog.get_records("call") if record.levelname == "WARNING" - ] - assert len(warnings) == 1 - caplog.clear() - - # Second call should log at debug - with patch_get_current_frame(stack): - haloop.raise_for_blocking_call(banned_function, strict=False) - - warnings = [ - record for record in caplog.get_records("call") if record.levelname == "WARNING" - ] - assert len(warnings) == 0 - assert ( - "For developers, please see " - "https://developers.home-assistant.io/docs/asyncio_blocking_operations/#banned_function" - ) in caplog.text - # no expensive traceback on debug - assert "Traceback (most recent call last)" not in caplog.text async def test_raise_for_blocking_call_async_custom( caplog: pytest.LogCaptureFixture, ) -> None: """Test raise_for_blocking_call detects when called from event loop with custom component context.""" - stack = [ - Mock( - filename="/home/paulus/homeassistant/core.py", - lineno="12", - line="do_something()", + frames = extract_stack_to_frame( + [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="23", + line="do_something()", + ), + Mock( + filename="/home/paulus/config/custom_components/hue/light.py", + lineno="23", + line="self.light.is_on", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ] + ) + with ( + pytest.raises(RuntimeError), + patch( + "homeassistant.helpers.frame.linecache.getline", + return_value="self.light.is_on", ), - Mock( - filename="/home/paulus/config/custom_components/hue/light.py", - lineno="12", - line="self.light.is_on", + patch( + "homeassistant.util.loop._get_line_from_cache", + return_value="mock_line", ), - Mock( - filename="/home/paulus/aiohue/lights.py", - lineno="3", - line="something()", + patch( + "homeassistant.util.loop.get_current_frame", + return_value=frames, ), - ] - with pytest.raises(RuntimeError), patch_get_current_frame(stack): + patch( + "homeassistant.helpers.frame.get_current_frame", + return_value=frames, + ), + ): haloop.raise_for_blocking_call(banned_function) assert ( "Detected blocking call to banned_function with args None" " inside the event loop by custom " - "integration 'hue' at custom_components/hue/light.py, line 12: self.light.is_on" - " (offender: /home/paulus/aiohue/lights.py, line 3: mock_line), " + "integration 'hue' at custom_components/hue/light.py, line 23: self.light.is_on" + " (offender: /home/paulus/aiohue/lights.py, line 2: mock_line), " "please create a bug report at https://github.com/home-assistant/core/issues?" "q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22" ) in caplog.text assert "Traceback (most recent call last)" in caplog.text assert ( - 'File "/home/paulus/config/custom_components/hue/light.py", line 12' + 'File "/home/paulus/config/custom_components/hue/light.py", line 23' in caplog.text ) assert ( diff --git a/tests/util/test_package.py b/tests/util/test_package.py index b7497d620cd..2ead327bf10 100644 --- a/tests/util/test_package.py +++ b/tests/util/test_package.py @@ -1,13 +1,12 @@ """Test Home Assistant package util methods.""" import asyncio -from collections.abc import Generator from importlib.metadata import metadata import logging import os from subprocess import PIPE import sys -from unittest.mock import MagicMock, Mock, call, patch +from unittest.mock import MagicMock, call, patch import pytest @@ -19,11 +18,13 @@ RESOURCE_DIR = os.path.abspath( TEST_NEW_REQ = "pyhelloworld3==1.0.0" -TEST_ZIP_REQ = f"file://{RESOURCE_DIR}/pyhelloworld3.zip#{TEST_NEW_REQ}" +TEST_ZIP_REQ = "file://{}#{}".format( + os.path.join(RESOURCE_DIR, "pyhelloworld3.zip"), TEST_NEW_REQ +) @pytest.fixture -def mock_sys() -> Generator[MagicMock]: +def mock_sys(): """Mock sys.""" with patch("homeassistant.util.package.sys", spec=object) as sys_mock: sys_mock.executable = "python3" @@ -31,19 +32,19 @@ def mock_sys() -> Generator[MagicMock]: @pytest.fixture -def deps_dir() -> str: +def deps_dir(): """Return path to deps directory.""" return os.path.abspath("/deps_dir") @pytest.fixture -def lib_dir(deps_dir) -> str: +def lib_dir(deps_dir): """Return path to lib directory.""" return os.path.join(deps_dir, "lib_dir") @pytest.fixture -def mock_popen(lib_dir) -> Generator[MagicMock]: +def mock_popen(lib_dir): """Return a Popen mock.""" with patch("homeassistant.util.package.Popen") as popen_mock: popen_mock.return_value.__enter__ = popen_mock @@ -56,7 +57,7 @@ def mock_popen(lib_dir) -> Generator[MagicMock]: @pytest.fixture -def mock_env_copy() -> Generator[Mock]: +def mock_env_copy(): """Mock os.environ.copy.""" with patch("homeassistant.util.package.os.environ.copy") as env_copy: env_copy.return_value = {} @@ -64,14 +65,14 @@ def mock_env_copy() -> Generator[Mock]: @pytest.fixture -def mock_venv() -> Generator[MagicMock]: +def mock_venv(): """Mock homeassistant.util.package.is_virtual_env.""" with patch("homeassistant.util.package.is_virtual_env") as mock: mock.return_value = True yield mock -def mock_async_subprocess() -> Generator[MagicMock]: +def mock_async_subprocess(): """Return an async Popen mock.""" async_popen = MagicMock() @@ -84,26 +85,13 @@ def mock_async_subprocess() -> Generator[MagicMock]: return async_popen -@pytest.mark.usefixtures("mock_venv") -def test_install( - mock_popen: MagicMock, mock_env_copy: MagicMock, mock_sys: MagicMock -) -> None: +def test_install(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: """Test an install attempt on a package that doesn't exist.""" env = mock_env_copy() assert package.install_package(TEST_NEW_REQ, False) assert mock_popen.call_count == 2 assert mock_popen.mock_calls[0] == call( - [ - mock_sys.executable, - "-m", - "uv", - "pip", - "install", - "--quiet", - TEST_NEW_REQ, - "--index-strategy", - "unsafe-first-match", - ], + [mock_sys.executable, "-m", "pip", "install", "--quiet", TEST_NEW_REQ], stdin=PIPE, stdout=PIPE, stderr=PIPE, @@ -113,38 +101,7 @@ def test_install( assert mock_popen.return_value.communicate.call_count == 1 -@pytest.mark.usefixtures("mock_venv") -def test_install_with_timeout( - mock_popen: MagicMock, mock_env_copy: MagicMock, mock_sys: MagicMock -) -> None: - """Test an install attempt on a package that doesn't exist with a timeout set.""" - env = mock_env_copy() - assert package.install_package(TEST_NEW_REQ, False, timeout=10) - assert mock_popen.call_count == 2 - env["HTTP_TIMEOUT"] = "10" - assert mock_popen.mock_calls[0] == call( - [ - mock_sys.executable, - "-m", - "uv", - "pip", - "install", - "--quiet", - TEST_NEW_REQ, - "--index-strategy", - "unsafe-first-match", - ], - stdin=PIPE, - stdout=PIPE, - stderr=PIPE, - env=env, - close_fds=False, - ) - assert mock_popen.return_value.communicate.call_count == 1 - - -@pytest.mark.usefixtures("mock_venv") -def test_install_upgrade(mock_popen, mock_env_copy, mock_sys) -> None: +def test_install_upgrade(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: """Test an upgrade attempt on a package.""" env = mock_env_copy() assert package.install_package(TEST_NEW_REQ) @@ -153,13 +110,10 @@ def test_install_upgrade(mock_popen, mock_env_copy, mock_sys) -> None: [ mock_sys.executable, "-m", - "uv", "pip", "install", "--quiet", TEST_NEW_REQ, - "--index-strategy", - "unsafe-first-match", "--upgrade", ], stdin=PIPE, @@ -171,39 +125,21 @@ def test_install_upgrade(mock_popen, mock_env_copy, mock_sys) -> None: assert mock_popen.return_value.communicate.call_count == 1 -@pytest.mark.parametrize( - "is_venv", - [ - True, - False, - ], -) -def test_install_target( - mock_sys: MagicMock, - mock_popen: MagicMock, - mock_env_copy: MagicMock, - mock_venv: MagicMock, - is_venv: bool, -) -> None: +def test_install_target(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: """Test an install with a target.""" target = "target_folder" env = mock_env_copy() - abs_target = os.path.abspath(target) - env["PYTHONUSERBASE"] = abs_target - mock_venv.return_value = is_venv + env["PYTHONUSERBASE"] = os.path.abspath(target) + mock_venv.return_value = False mock_sys.platform = "linux" args = [ mock_sys.executable, "-m", - "uv", "pip", "install", "--quiet", TEST_NEW_REQ, - "--index-strategy", - "unsafe-first-match", - "--target", - abs_target, + "--user", ] assert package.install_package(TEST_NEW_REQ, False, target=target) @@ -214,91 +150,16 @@ def test_install_target( assert mock_popen.return_value.communicate.call_count == 1 -@pytest.mark.parametrize( - ("in_venv", "additional_env_vars"), - [ - (True, {}), - (False, {"UV_SYSTEM_PYTHON": "true"}), - (False, {"UV_PYTHON": "python3"}), - (False, {"UV_SYSTEM_PYTHON": "true", "UV_PYTHON": "python3"}), - ], - ids=["in_venv", "UV_SYSTEM_PYTHON", "UV_PYTHON", "UV_SYSTEM_PYTHON and UV_PYTHON"], -) -def test_install_pip_compatibility_no_workaround( - mock_sys: MagicMock, - mock_popen: MagicMock, - mock_env_copy: MagicMock, - mock_venv: MagicMock, - in_venv: bool, - additional_env_vars: dict[str, str], +def test_install_target_venv(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: + """Test an install with a target in a virtual environment.""" + target = "target_folder" + with pytest.raises(AssertionError): + package.install_package(TEST_NEW_REQ, False, target=target) + + +def test_install_error( + caplog: pytest.LogCaptureFixture, mock_sys, mock_popen, mock_venv ) -> None: - """Test install will not use pip fallback.""" - env = mock_env_copy() - env.update(additional_env_vars) - mock_venv.return_value = in_venv - mock_sys.platform = "linux" - args = [ - mock_sys.executable, - "-m", - "uv", - "pip", - "install", - "--quiet", - TEST_NEW_REQ, - "--index-strategy", - "unsafe-first-match", - ] - - assert package.install_package(TEST_NEW_REQ, False) - assert mock_popen.call_count == 2 - assert mock_popen.mock_calls[0] == call( - args, stdin=PIPE, stdout=PIPE, stderr=PIPE, env=env, close_fds=False - ) - assert mock_popen.return_value.communicate.call_count == 1 - - -def test_install_pip_compatibility_use_workaround( - mock_sys: MagicMock, - mock_popen: MagicMock, - mock_env_copy: MagicMock, - mock_venv: MagicMock, -) -> None: - """Test install will use pip compatibility fallback.""" - env = mock_env_copy() - mock_venv.return_value = False - mock_sys.platform = "linux" - python = "python3" - mock_sys.executable = python - site_dir = "/site_dir" - args = [ - mock_sys.executable, - "-m", - "uv", - "pip", - "install", - "--quiet", - TEST_NEW_REQ, - "--index-strategy", - "unsafe-first-match", - "--python", - python, - "--target", - site_dir, - ] - - with patch("homeassistant.util.package.site", autospec=True) as site_mock: - site_mock.getusersitepackages.return_value = site_dir - assert package.install_package(TEST_NEW_REQ, False) - - assert mock_popen.call_count == 2 - assert mock_popen.mock_calls[0] == call( - args, stdin=PIPE, stdout=PIPE, stderr=PIPE, env=env, close_fds=False - ) - assert mock_popen.return_value.communicate.call_count == 1 - - -@pytest.mark.usefixtures("mock_sys", "mock_venv") -def test_install_error(caplog: pytest.LogCaptureFixture, mock_popen) -> None: """Test an install that errors out.""" caplog.set_level(logging.WARNING) mock_popen.return_value.returncode = 1 @@ -308,8 +169,7 @@ def test_install_error(caplog: pytest.LogCaptureFixture, mock_popen) -> None: assert record.levelname == "ERROR" -@pytest.mark.usefixtures("mock_venv") -def test_install_constraint(mock_popen, mock_env_copy, mock_sys) -> None: +def test_install_constraint(mock_sys, mock_popen, mock_env_copy, mock_venv) -> None: """Test install with constraint file on not installed package.""" env = mock_env_copy() constraints = "constraints_file.txt" @@ -319,13 +179,10 @@ def test_install_constraint(mock_popen, mock_env_copy, mock_sys) -> None: [ mock_sys.executable, "-m", - "uv", "pip", "install", "--quiet", TEST_NEW_REQ, - "--index-strategy", - "unsafe-first-match", "--constraint", constraints, ], diff --git a/tests/util/test_process.py b/tests/util/test_process.py index 999abe0476f..ae28f5d82fc 100644 --- a/tests/util/test_process.py +++ b/tests/util/test_process.py @@ -1,25 +1,20 @@ """Test process util.""" -from functools import partial import os import subprocess import pytest -from homeassistant.core import HomeAssistant from homeassistant.util import process -async def test_kill_process(hass: HomeAssistant) -> None: +async def test_kill_process() -> None: """Test killing a process.""" - sleeper = await hass.async_add_executor_job( - partial( # noqa: S604 # shell by design - subprocess.Popen, - "sleep 1000", - shell=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) + sleeper = subprocess.Popen( + "sleep 1000", + shell=True, # noqa: S602 # shell by design + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, ) pid = sleeper.pid diff --git a/tests/util/test_ssl.py b/tests/util/test_ssl.py index c0cd2fdba10..d0c7ce3bfb6 100644 --- a/tests/util/test_ssl.py +++ b/tests/util/test_ssl.py @@ -5,6 +5,7 @@ from unittest.mock import MagicMock, Mock, patch import pytest from homeassistant.util.ssl import ( + SSL_CIPHER_LISTS, SSLCipherList, client_context, create_no_verify_ssl_context, @@ -24,13 +25,14 @@ def test_client_context(mock_sslcontext) -> None: mock_sslcontext.set_ciphers.assert_not_called() client_context(SSLCipherList.MODERN) - mock_sslcontext.set_ciphers.assert_not_called() + mock_sslcontext.set_ciphers.assert_called_with( + SSL_CIPHER_LISTS[SSLCipherList.MODERN] + ) client_context(SSLCipherList.INTERMEDIATE) - mock_sslcontext.set_ciphers.assert_not_called() - - client_context(SSLCipherList.INSECURE) - mock_sslcontext.set_ciphers.assert_not_called() + mock_sslcontext.set_ciphers.assert_called_with( + SSL_CIPHER_LISTS[SSLCipherList.INTERMEDIATE] + ) def test_no_verify_ssl_context(mock_sslcontext) -> None: @@ -40,13 +42,14 @@ def test_no_verify_ssl_context(mock_sslcontext) -> None: mock_sslcontext.set_ciphers.assert_not_called() create_no_verify_ssl_context(SSLCipherList.MODERN) - mock_sslcontext.set_ciphers.assert_not_called() + mock_sslcontext.set_ciphers.assert_called_with( + SSL_CIPHER_LISTS[SSLCipherList.MODERN] + ) create_no_verify_ssl_context(SSLCipherList.INTERMEDIATE) - mock_sslcontext.set_ciphers.assert_not_called() - - create_no_verify_ssl_context(SSLCipherList.INSECURE) - mock_sslcontext.set_ciphers.assert_not_called() + mock_sslcontext.set_ciphers.assert_called_with( + SSL_CIPHER_LISTS[SSLCipherList.INTERMEDIATE] + ) def test_ssl_context_caching() -> None: diff --git a/tests/util/test_timeout.py b/tests/util/test_timeout.py index 5e8261c4c02..797c849db3c 100644 --- a/tests/util/test_timeout.py +++ b/tests/util/test_timeout.py @@ -25,7 +25,7 @@ async def test_simple_global_timeout_with_executor_job(hass: HomeAssistant) -> N with pytest.raises(TimeoutError): async with timeout.async_timeout(0.1): - await hass.async_add_executor_job(time.sleep, 0.2) + await hass.async_add_executor_job(lambda: time.sleep(0.2)) async def test_simple_global_timeout_freeze() -> None: @@ -133,7 +133,7 @@ async def test_mix_global_timeout_freeze_and_zone_freeze_inside_executor_job_sec async with timeout.async_timeout(0.1): async with timeout.async_timeout(0.2, zone_name="recorder"): await hass.async_add_executor_job(_some_sync_work) - await hass.async_add_executor_job(time.sleep, 0.2) + await hass.async_add_executor_job(lambda: time.sleep(0.2)) async def test_simple_global_timeout_freeze_with_executor_job( @@ -143,63 +143,7 @@ async def test_simple_global_timeout_freeze_with_executor_job( timeout = TimeoutManager() async with timeout.async_timeout(0.2), timeout.async_freeze(): - await hass.async_add_executor_job(time.sleep, 0.3) - - -async def test_simple_global_timeout_does_not_leak_upward( - hass: HomeAssistant, -) -> None: - """Test a global timeout does not leak upward.""" - timeout = TimeoutManager() - current_task = asyncio.current_task() - assert current_task is not None - cancelling_inside_timeout = None - - with pytest.raises(asyncio.TimeoutError): # noqa: PT012 - async with timeout.async_timeout(0.1): - cancelling_inside_timeout = current_task.cancelling() - await asyncio.sleep(0.3) - - assert cancelling_inside_timeout == 0 - # After the context manager exits, the task should no longer be cancelling - assert current_task.cancelling() == 0 - - -async def test_simple_global_timeout_does_swallow_cancellation( - hass: HomeAssistant, -) -> None: - """Test a global timeout does not swallow cancellation.""" - timeout = TimeoutManager() - current_task = asyncio.current_task() - assert current_task is not None - cancelling_inside_timeout = None - - async def task_with_timeout() -> None: - nonlocal cancelling_inside_timeout - new_task = asyncio.current_task() - assert new_task is not None - with pytest.raises(asyncio.TimeoutError): # noqa: PT012 - cancelling_inside_timeout = new_task.cancelling() - async with timeout.async_timeout(0.1): - await asyncio.sleep(0.3) - - # After the context manager exits, the task should no longer be cancelling - assert current_task.cancelling() == 0 - - task = asyncio.create_task(task_with_timeout()) - await asyncio.sleep(0) - task.cancel() - assert task.cancelling() == 1 - - assert cancelling_inside_timeout == 0 - # Cancellation should not leak into the current task - assert current_task.cancelling() == 0 - # Cancellation should not be swallowed if the task is cancelled - # and it also times out - await asyncio.sleep(0) - with pytest.raises(asyncio.CancelledError): - await task - assert task.cancelling() == 1 + await hass.async_add_executor_job(lambda: time.sleep(0.3)) async def test_simple_global_timeout_freeze_reset() -> None: @@ -222,62 +166,6 @@ async def test_simple_zone_timeout() -> None: await asyncio.sleep(0.3) -async def test_simple_zone_timeout_does_not_leak_upward( - hass: HomeAssistant, -) -> None: - """Test a zone timeout does not leak upward.""" - timeout = TimeoutManager() - current_task = asyncio.current_task() - assert current_task is not None - cancelling_inside_timeout = None - - with pytest.raises(asyncio.TimeoutError): # noqa: PT012 - async with timeout.async_timeout(0.1, "test"): - cancelling_inside_timeout = current_task.cancelling() - await asyncio.sleep(0.3) - - assert cancelling_inside_timeout == 0 - # After the context manager exits, the task should no longer be cancelling - assert current_task.cancelling() == 0 - - -async def test_simple_zone_timeout_does_swallow_cancellation( - hass: HomeAssistant, -) -> None: - """Test a zone timeout does not swallow cancellation.""" - timeout = TimeoutManager() - current_task = asyncio.current_task() - assert current_task is not None - cancelling_inside_timeout = None - - async def task_with_timeout() -> None: - nonlocal cancelling_inside_timeout - new_task = asyncio.current_task() - assert new_task is not None - with pytest.raises(asyncio.TimeoutError): # noqa: PT012 - async with timeout.async_timeout(0.1, "test"): - cancelling_inside_timeout = current_task.cancelling() - await asyncio.sleep(0.3) - - # After the context manager exits, the task should no longer be cancelling - assert current_task.cancelling() == 0 - - task = asyncio.create_task(task_with_timeout()) - await asyncio.sleep(0) - task.cancel() - assert task.cancelling() == 1 - - # Cancellation should not leak into the current task - assert cancelling_inside_timeout == 0 - assert current_task.cancelling() == 0 - # Cancellation should not be swallowed if the task is cancelled - # and it also times out - await asyncio.sleep(0) - with pytest.raises(asyncio.CancelledError): - await task - assert task.cancelling() == 1 - - async def test_multiple_zone_timeout() -> None: """Test a simple zone timeout.""" timeout = TimeoutManager() @@ -439,7 +327,7 @@ async def test_simple_zone_timeout_freeze_without_timeout_exeption() -> None: await asyncio.sleep(0.4) -async def test_simple_zone_timeout_zone_with_timeout_exception() -> None: +async def test_simple_zone_timeout_zone_with_timeout_exeption() -> None: """Test a simple zone timeout freeze on a zone that does not have a timeout set.""" timeout = TimeoutManager() @@ -450,24 +338,3 @@ async def test_simple_zone_timeout_zone_with_timeout_exception() -> None: raise RuntimeError await asyncio.sleep(0.3) - - -async def test_multiple_global_freezes(hass: HomeAssistant) -> None: - """Test multiple global freezes.""" - timeout = TimeoutManager() - - async def background(delay: float) -> None: - async with timeout.async_freeze(): - await asyncio.sleep(delay) - - async with timeout.async_timeout(0.1): - task = hass.async_create_task(background(0.2)) - async with timeout.async_freeze(): - await asyncio.sleep(0.1) - await task - - async with timeout.async_timeout(0.1): - task = hass.async_create_task(background(0.2)) - async with timeout.async_freeze(): - await asyncio.sleep(0.3) - await task diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index 609809a96e8..98a6a1da5a6 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -11,7 +11,6 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_BILLION, CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, - UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -33,7 +32,6 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.util import unit_conversion from homeassistant.util.unit_conversion import ( BaseUnitConverter, - BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -61,7 +59,6 @@ INVALID_SYMBOL = "bob" _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = { converter: sorted(converter.VALID_UNITS, key=lambda x: (x is None, x)) for converter in ( - BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -83,14 +80,9 @@ _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = { # Dict containing all converters with a corresponding unit ratio. _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, float]] = { - BloodGlucoseConcentrationConverter: ( - UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, - UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, - 18, - ), ConductivityConverter: ( - UnitOfConductivity.MICROSIEMENS_PER_CM, - UnitOfConductivity.MILLISIEMENS_PER_CM, + UnitOfConductivity.MICROSIEMENS, + UnitOfConductivity.MILLISIEMENS, 1000, ), DataRateConverter: ( @@ -138,99 +130,13 @@ _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, flo _CONVERTED_VALUE: dict[ type[BaseUnitConverter], list[tuple[float, str | None, float, str | None]] ] = { - BloodGlucoseConcentrationConverter: [ - ( - 90, - UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, - 5, - UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, - ), - ( - 1, - UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, - 18, - UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, - ), - ], ConductivityConverter: [ - # Deprecated to deprecated (5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS), (5, UnitOfConductivity.SIEMENS, 5e6, UnitOfConductivity.MICROSIEMENS), (5, UnitOfConductivity.MILLISIEMENS, 5e3, UnitOfConductivity.MICROSIEMENS), (5, UnitOfConductivity.MILLISIEMENS, 5e-3, UnitOfConductivity.SIEMENS), (5e6, UnitOfConductivity.MICROSIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS), (5e6, UnitOfConductivity.MICROSIEMENS, 5, UnitOfConductivity.SIEMENS), - # Deprecated to new - (5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS_PER_CM), - (5, UnitOfConductivity.SIEMENS, 5e6, UnitOfConductivity.MICROSIEMENS_PER_CM), - ( - 5, - UnitOfConductivity.MILLISIEMENS, - 5e3, - UnitOfConductivity.MICROSIEMENS_PER_CM, - ), - (5, UnitOfConductivity.MILLISIEMENS, 5e-3, UnitOfConductivity.SIEMENS_PER_CM), - ( - 5e6, - UnitOfConductivity.MICROSIEMENS, - 5e3, - UnitOfConductivity.MILLISIEMENS_PER_CM, - ), - (5e6, UnitOfConductivity.MICROSIEMENS, 5, UnitOfConductivity.SIEMENS_PER_CM), - # New to deprecated - (5, UnitOfConductivity.SIEMENS_PER_CM, 5e3, UnitOfConductivity.MILLISIEMENS), - (5, UnitOfConductivity.SIEMENS_PER_CM, 5e6, UnitOfConductivity.MICROSIEMENS), - ( - 5, - UnitOfConductivity.MILLISIEMENS_PER_CM, - 5e3, - UnitOfConductivity.MICROSIEMENS, - ), - (5, UnitOfConductivity.MILLISIEMENS_PER_CM, 5e-3, UnitOfConductivity.SIEMENS), - ( - 5e6, - UnitOfConductivity.MICROSIEMENS_PER_CM, - 5e3, - UnitOfConductivity.MILLISIEMENS, - ), - (5e6, UnitOfConductivity.MICROSIEMENS_PER_CM, 5, UnitOfConductivity.SIEMENS), - # New to new - ( - 5, - UnitOfConductivity.SIEMENS_PER_CM, - 5e3, - UnitOfConductivity.MILLISIEMENS_PER_CM, - ), - ( - 5, - UnitOfConductivity.SIEMENS_PER_CM, - 5e6, - UnitOfConductivity.MICROSIEMENS_PER_CM, - ), - ( - 5, - UnitOfConductivity.MILLISIEMENS_PER_CM, - 5e3, - UnitOfConductivity.MICROSIEMENS_PER_CM, - ), - ( - 5, - UnitOfConductivity.MILLISIEMENS_PER_CM, - 5e-3, - UnitOfConductivity.SIEMENS_PER_CM, - ), - ( - 5e6, - UnitOfConductivity.MICROSIEMENS_PER_CM, - 5e3, - UnitOfConductivity.MILLISIEMENS_PER_CM, - ), - ( - 5e6, - UnitOfConductivity.MICROSIEMENS_PER_CM, - 5, - UnitOfConductivity.SIEMENS_PER_CM, - ), ], DataRateConverter: [ (8e3, UnitOfDataRate.BITS_PER_SECOND, 8, UnitOfDataRate.KILOBITS_PER_SECOND), @@ -267,13 +173,6 @@ _CONVERTED_VALUE: dict[ (5, UnitOfLength.MILES, 8800.0, UnitOfLength.YARDS), (5, UnitOfLength.MILES, 26400.0008448, UnitOfLength.FEET), (5, UnitOfLength.MILES, 316800.171072, UnitOfLength.INCHES), - (5, UnitOfLength.NAUTICAL_MILES, 9.26, UnitOfLength.KILOMETERS), - (5, UnitOfLength.NAUTICAL_MILES, 9260.0, UnitOfLength.METERS), - (5, UnitOfLength.NAUTICAL_MILES, 926000.0, UnitOfLength.CENTIMETERS), - (5, UnitOfLength.NAUTICAL_MILES, 9260000.0, UnitOfLength.MILLIMETERS), - (5, UnitOfLength.NAUTICAL_MILES, 10126.859142607176, UnitOfLength.YARDS), - (5, UnitOfLength.NAUTICAL_MILES, 30380.57742782153, UnitOfLength.FEET), - (5, UnitOfLength.NAUTICAL_MILES, 364566.9291338583, UnitOfLength.INCHES), (5, UnitOfLength.YARDS, 0.004572, UnitOfLength.KILOMETERS), (5, UnitOfLength.YARDS, 4.572, UnitOfLength.METERS), (5, UnitOfLength.YARDS, 457.2, UnitOfLength.CENTIMETERS), @@ -379,32 +278,14 @@ _CONVERTED_VALUE: dict[ EnergyConverter: [ (10, UnitOfEnergy.WATT_HOUR, 0.01, UnitOfEnergy.KILO_WATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.00001, UnitOfEnergy.MEGA_WATT_HOUR), - (10, UnitOfEnergy.WATT_HOUR, 0.00000001, UnitOfEnergy.GIGA_WATT_HOUR), - (10, UnitOfEnergy.WATT_HOUR, 0.00000000001, UnitOfEnergy.TERA_WATT_HOUR), (10, UnitOfEnergy.KILO_WATT_HOUR, 10000, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.KILO_WATT_HOUR, 0.01, UnitOfEnergy.MEGA_WATT_HOUR), (10, UnitOfEnergy.MEGA_WATT_HOUR, 10000000, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.MEGA_WATT_HOUR, 10000, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.GIGA_WATT_HOUR, 10e6, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.GIGA_WATT_HOUR, 10e9, UnitOfEnergy.WATT_HOUR), - (10, UnitOfEnergy.TERA_WATT_HOUR, 10e9, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.TERA_WATT_HOUR, 10e12, UnitOfEnergy.WATT_HOUR), - (10, UnitOfEnergy.GIGA_JOULE, 2777.78, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.GIGA_JOULE, 2.77778, UnitOfEnergy.MEGA_WATT_HOUR), - (10, UnitOfEnergy.MEGA_JOULE, 2.77778, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.MEGA_JOULE, 2.77778e-3, UnitOfEnergy.MEGA_WATT_HOUR), - (10, UnitOfEnergy.KILO_JOULE, 2.77778, UnitOfEnergy.WATT_HOUR), - (10, UnitOfEnergy.KILO_JOULE, 2.77778e-6, UnitOfEnergy.MEGA_WATT_HOUR), - (10, UnitOfEnergy.JOULE, 2.77778e-3, UnitOfEnergy.WATT_HOUR), - (10, UnitOfEnergy.JOULE, 2.390057, UnitOfEnergy.CALORIE), - (10, UnitOfEnergy.CALORIE, 0.01, UnitOfEnergy.KILO_CALORIE), - (10, UnitOfEnergy.CALORIE, 0.011622222, UnitOfEnergy.WATT_HOUR), - (10, UnitOfEnergy.KILO_CALORIE, 0.01, UnitOfEnergy.MEGA_CALORIE), - (10, UnitOfEnergy.KILO_CALORIE, 0.011622222, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.MEGA_CALORIE, 0.01, UnitOfEnergy.GIGA_CALORIE), - (10, UnitOfEnergy.MEGA_CALORIE, 0.011622222, UnitOfEnergy.MEGA_WATT_HOUR), - (10, UnitOfEnergy.GIGA_CALORIE, 10000, UnitOfEnergy.MEGA_CALORIE), - (10, UnitOfEnergy.GIGA_CALORIE, 11.622222, UnitOfEnergy.MEGA_WATT_HOUR), + (10, UnitOfEnergy.GIGA_JOULE, 10000 / 3.6, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.GIGA_JOULE, 10 / 3.6, UnitOfEnergy.MEGA_WATT_HOUR), + (10, UnitOfEnergy.MEGA_JOULE, 10 / 3.6, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.MEGA_JOULE, 0.010 / 3.6, UnitOfEnergy.MEGA_WATT_HOUR), ], InformationConverter: [ (8e3, UnitOfInformation.BITS, 8, UnitOfInformation.KILOBITS), @@ -467,9 +348,6 @@ _CONVERTED_VALUE: dict[ ], PowerConverter: [ (10, UnitOfPower.KILO_WATT, 10000, UnitOfPower.WATT), - (10, UnitOfPower.MEGA_WATT, 10e6, UnitOfPower.WATT), - (10, UnitOfPower.GIGA_WATT, 10e9, UnitOfPower.WATT), - (10, UnitOfPower.TERA_WATT, 10e12, UnitOfPower.WATT), (10, UnitOfPower.WATT, 0.01, UnitOfPower.KILO_WATT), ], PressureConverter: [ @@ -541,20 +419,6 @@ _CONVERTED_VALUE: dict[ 708661.42, UnitOfVolumetricFlux.INCHES_PER_HOUR, ), - # 5 m/s * 1000 = 5000 mm/s - ( - 5, - UnitOfSpeed.METERS_PER_SECOND, - 5000, - UnitOfSpeed.MILLIMETERS_PER_SECOND, - ), - # 5 m/s ÷ 0.0254 = 196.8503937 in/s - ( - 5, - UnitOfSpeed.METERS_PER_SECOND, - 5 / 0.0254, - UnitOfSpeed.INCHES_PER_SECOND, - ), # 5000 in/h / 39.3701 in/m / 3600 s/h = 0.03528 m/s ( 5000, diff --git a/tests/util/test_unit_system.py b/tests/util/test_unit_system.py index c08555840bb..033631563f4 100644 --- a/tests/util/test_unit_system.py +++ b/tests/util/test_unit_system.py @@ -15,7 +15,6 @@ from homeassistant.const import ( WIND_SPEED, UnitOfLength, UnitOfMass, - UnitOfPrecipitationDepth, UnitOfPressure, UnitOfSpeed, UnitOfTemperature, @@ -43,7 +42,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + accumulated_precipitation=UnitOfLength.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -56,7 +55,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + accumulated_precipitation=UnitOfLength.MILLIMETERS, conversions={}, length=INVALID_UNIT, mass=UnitOfMass.GRAMS, @@ -69,7 +68,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + accumulated_precipitation=UnitOfLength.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -82,7 +81,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + accumulated_precipitation=UnitOfLength.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -95,7 +94,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + accumulated_precipitation=UnitOfLength.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=INVALID_UNIT, @@ -108,7 +107,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + accumulated_precipitation=UnitOfLength.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -413,11 +412,6 @@ def test_get_unit_system_invalid(key: str) -> None: UnitOfSpeed.FEET_PER_SECOND, UnitOfSpeed.KILOMETERS_PER_HOUR, ), - ( - SensorDeviceClass.SPEED, - UnitOfSpeed.INCHES_PER_SECOND, - UnitOfSpeed.MILLIMETERS_PER_SECOND, - ), ( SensorDeviceClass.SPEED, UnitOfSpeed.MILES_PER_HOUR, @@ -525,7 +519,6 @@ UNCONVERTED_UNITS_METRIC_SYSTEM = { UnitOfSpeed.KILOMETERS_PER_HOUR, UnitOfSpeed.KNOTS, UnitOfSpeed.METERS_PER_SECOND, - UnitOfSpeed.MILLIMETERS_PER_SECOND, UnitOfVolumetricFlux.MILLIMETERS_PER_DAY, UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, ), @@ -667,11 +660,6 @@ def test_metric_converted_units(device_class: SensorDeviceClass) -> None: ), (SensorDeviceClass.SPEED, UnitOfVolumetricFlux.INCHES_PER_DAY, None), (SensorDeviceClass.SPEED, UnitOfVolumetricFlux.INCHES_PER_HOUR, None), - ( - SensorDeviceClass.SPEED, - UnitOfSpeed.MILLIMETERS_PER_SECOND, - UnitOfSpeed.INCHES_PER_SECOND, - ), (SensorDeviceClass.SPEED, "very_fast", None), # Test volume conversion (SensorDeviceClass.VOLUME, UnitOfVolume.CUBIC_METERS, UnitOfVolume.CUBIC_FEET), @@ -725,7 +713,6 @@ UNCONVERTED_UNITS_US_SYSTEM = { SensorDeviceClass.DISTANCE: ( UnitOfLength.FEET, UnitOfLength.INCHES, - UnitOfLength.NAUTICAL_MILES, UnitOfLength.MILES, UnitOfLength.YARDS, ), @@ -741,7 +728,6 @@ UNCONVERTED_UNITS_US_SYSTEM = { UnitOfSpeed.FEET_PER_SECOND, UnitOfSpeed.KNOTS, UnitOfSpeed.MILES_PER_HOUR, - UnitOfSpeed.INCHES_PER_SECOND, UnitOfVolumetricFlux.INCHES_PER_DAY, UnitOfVolumetricFlux.INCHES_PER_HOUR, ), diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 12a7eca5f9d..6ea3f1437af 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -1,14 +1,15 @@ """Test Home Assistant yaml loader.""" -from collections.abc import Generator import importlib import io import os import pathlib from typing import Any +import unittest from unittest.mock import Mock, patch import pytest +from typing_extensions import Generator import voluptuous as vol import yaml as pyyaml @@ -18,7 +19,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.util import yaml from homeassistant.util.yaml import loader as yaml_loader -from tests.common import extract_stack_to_frame +from tests.common import extract_stack_to_frame, get_test_config_dir, patch_yaml_files @pytest.fixture(params=["enable_c_loader", "disable_c_loader"]) @@ -395,6 +396,145 @@ def test_dump_unicode() -> None: assert yaml.dump({"a": None, "b": "привет"}) == "a:\nb: привет\n" +FILES = {} + + +def load_yaml(fname, string, secrets=None): + """Write a string to file and return the parsed yaml.""" + FILES[fname] = string + with patch_yaml_files(FILES): + return load_yaml_config_file(fname, secrets) + + +class TestSecrets(unittest.TestCase): + """Test the secrets parameter in the yaml utility.""" + + def setUp(self): + """Create & load secrets file.""" + config_dir = get_test_config_dir() + self._yaml_path = os.path.join(config_dir, YAML_CONFIG_FILE) + self._secret_path = os.path.join(config_dir, yaml.SECRET_YAML) + self._sub_folder_path = os.path.join(config_dir, "subFolder") + self._unrelated_path = os.path.join(config_dir, "unrelated") + + load_yaml( + self._secret_path, + ( + "http_pw: pwhttp\n" + "comp1_un: un1\n" + "comp1_pw: pw1\n" + "stale_pw: not_used\n" + "logger: debug\n" + ), + ) + self._yaml = load_yaml( + self._yaml_path, + ( + "http:\n" + " api_password: !secret http_pw\n" + "component:\n" + " username: !secret comp1_un\n" + " password: !secret comp1_pw\n" + "" + ), + yaml_loader.Secrets(config_dir), + ) + + def tearDown(self): + """Clean up secrets.""" + FILES.clear() + + def test_secrets_from_yaml(self): + """Did secrets load ok.""" + expected = {"api_password": "pwhttp"} + assert expected == self._yaml["http"] + + expected = {"username": "un1", "password": "pw1"} + assert expected == self._yaml["component"] + + def test_secrets_from_parent_folder(self): + """Test loading secrets from parent folder.""" + expected = {"api_password": "pwhttp"} + self._yaml = load_yaml( + os.path.join(self._sub_folder_path, "sub.yaml"), + ( + "http:\n" + " api_password: !secret http_pw\n" + "component:\n" + " username: !secret comp1_un\n" + " password: !secret comp1_pw\n" + "" + ), + yaml_loader.Secrets(get_test_config_dir()), + ) + + assert expected == self._yaml["http"] + + def test_secret_overrides_parent(self): + """Test loading current directory secret overrides the parent.""" + expected = {"api_password": "override"} + load_yaml( + os.path.join(self._sub_folder_path, yaml.SECRET_YAML), "http_pw: override" + ) + self._yaml = load_yaml( + os.path.join(self._sub_folder_path, "sub.yaml"), + ( + "http:\n" + " api_password: !secret http_pw\n" + "component:\n" + " username: !secret comp1_un\n" + " password: !secret comp1_pw\n" + "" + ), + yaml_loader.Secrets(get_test_config_dir()), + ) + + assert expected == self._yaml["http"] + + def test_secrets_from_unrelated_fails(self): + """Test loading secrets from unrelated folder fails.""" + load_yaml(os.path.join(self._unrelated_path, yaml.SECRET_YAML), "test: failure") + with pytest.raises(HomeAssistantError): + load_yaml( + os.path.join(self._sub_folder_path, "sub.yaml"), + "http:\n api_password: !secret test", + ) + + def test_secrets_logger_removed(self): + """Ensure logger: debug was removed.""" + with pytest.raises(HomeAssistantError): + load_yaml(self._yaml_path, "api_password: !secret logger") + + @patch("homeassistant.util.yaml.loader._LOGGER.error") + def test_bad_logger_value(self, mock_error): + """Ensure logger: debug was removed.""" + load_yaml(self._secret_path, "logger: info\npw: abc") + load_yaml( + self._yaml_path, + "api_password: !secret pw", + yaml_loader.Secrets(get_test_config_dir()), + ) + assert mock_error.call_count == 1, "Expected an error about logger: value" + + def test_secrets_are_not_dict(self): + """Did secrets handle non-dict file.""" + FILES[self._secret_path] = ( + "- http_pw: pwhttp\n comp1_un: un1\n comp1_pw: pw1\n" + ) + with pytest.raises(HomeAssistantError): + load_yaml( + self._yaml_path, + ( + "http:\n" + " api_password: !secret http_pw\n" + "component:\n" + " username: !secret comp1_un\n" + " password: !secret comp1_pw\n" + "" + ), + ) + + @pytest.mark.parametrize("hass_config_yaml", ['key: [1, "2", 3]']) @pytest.mark.usefixtures("try_both_dumpers", "mock_hass_config_yaml") def test_representing_yaml_loaded_data() -> None: @@ -426,8 +566,8 @@ def test_no_recursive_secrets() -> None: def test_input_class() -> None: """Test input class.""" - yaml_input = yaml.Input("hello") - yaml_input2 = yaml.Input("hello") + yaml_input = yaml_loader.Input("hello") + yaml_input2 = yaml_loader.Input("hello") assert yaml_input.name == "hello" assert yaml_input == yaml_input2 @@ -494,6 +634,31 @@ def mock_integration_frame() -> Generator[Mock]: yield correct_frame +@pytest.mark.parametrize( + ("loader_class", "message"), + [ + (yaml.loader.SafeLoader, "'SafeLoader' instead of 'FastSafeLoader'"), + ( + yaml.loader.SafeLineLoader, + "'SafeLineLoader' instead of 'PythonSafeLoader'", + ), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_deprecated_loaders( + caplog: pytest.LogCaptureFixture, + loader_class: type, + message: str, +) -> None: + """Test instantiating the deprecated yaml loaders logs a warning.""" + with ( + pytest.raises(TypeError), + patch("homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set()), + ): + loader_class() + assert (f"Detected that integration 'hue' uses deprecated {message}") in caplog.text + + @pytest.mark.usefixtures("try_both_loaders") def test_string_annotated() -> None: """Test strings are annotated with file + line.""" @@ -561,44 +726,3 @@ def test_load_yaml_dict_fail() -> None: """Test item without a key.""" with pytest.raises(yaml_loader.YamlTypeError): yaml_loader.load_yaml_dict(YAML_CONFIG_FILE) - - -@pytest.mark.parametrize( - "tag", - [ - "!include", - "!include_dir_named", - "!include_dir_merge_named", - "!include_dir_list", - "!include_dir_merge_list", - ], -) -@pytest.mark.usefixtures("try_both_loaders") -def test_include_without_parameter(tag: str) -> None: - """Test include extensions without parameters.""" - with ( - io.StringIO(f"key: {tag}") as file, - pytest.raises(HomeAssistantError, match=f"{tag} needs an argument"), - ): - yaml_loader.parse_yaml(file) - - -@pytest.mark.parametrize( - ("open_exception", "load_yaml_exception"), - [ - (FileNotFoundError, OSError), - (NotADirectoryError, HomeAssistantError), - (PermissionError, HomeAssistantError), - ], -) -@pytest.mark.usefixtures("try_both_loaders") -def test_load_yaml_wrap_oserror( - open_exception: Exception, - load_yaml_exception: Exception, -) -> None: - """Test load_yaml wraps OSError in HomeAssistantError.""" - with ( - patch("homeassistant.util.yaml.loader.open", side_effect=open_exception), - pytest.raises(load_yaml_exception), - ): - yaml_loader.load_yaml("bla") diff --git a/tests/util/yaml/test_secrets.py b/tests/util/yaml/test_secrets.py deleted file mode 100644 index 35b5ae319c4..00000000000 --- a/tests/util/yaml/test_secrets.py +++ /dev/null @@ -1,185 +0,0 @@ -"""Test Home Assistant secret substitution in YAML files.""" - -from dataclasses import dataclass -import logging -from pathlib import Path - -import pytest - -from homeassistant.config import YAML_CONFIG_FILE, load_yaml_config_file -from homeassistant.exceptions import HomeAssistantError -from homeassistant.util import yaml -from homeassistant.util.yaml import loader as yaml_loader - -from tests.common import get_test_config_dir, patch_yaml_files - - -@dataclass(frozen=True) -class YamlFile: - """Represents a .yaml file used for testing.""" - - path: Path - contents: str - - -def load_config_file(config_file_path: Path, files: list[YamlFile]): - """Patch secret files and return the loaded config file.""" - patch_files = {x.path.as_posix(): x.contents for x in files} - with patch_yaml_files(patch_files): - return load_yaml_config_file( - config_file_path.as_posix(), - yaml_loader.Secrets(Path(get_test_config_dir())), - ) - - -@pytest.fixture -def filepaths() -> dict[str, Path]: - """Return a dictionary of filepaths for testing.""" - config_dir = Path(get_test_config_dir()) - return { - "config": config_dir, - "sub_folder": config_dir / "subFolder", - "unrelated": config_dir / "unrelated", - } - - -@pytest.fixture -def default_config(filepaths: dict[str, Path]) -> YamlFile: - """Return the default config file for testing.""" - return YamlFile( - path=filepaths["config"] / YAML_CONFIG_FILE, - contents=( - "http:\n" - " api_password: !secret http_pw\n" - "component:\n" - " username: !secret comp1_un\n" - " password: !secret comp1_pw\n" - "" - ), - ) - - -@pytest.fixture -def default_secrets(filepaths: dict[str, Path]) -> YamlFile: - """Return the default secrets file for testing.""" - return YamlFile( - path=filepaths["config"] / yaml.SECRET_YAML, - contents=( - "http_pw: pwhttp\n" - "comp1_un: un1\n" - "comp1_pw: pw1\n" - "stale_pw: not_used\n" - "logger: debug\n" - ), - ) - - -def test_secrets_from_yaml(default_config: YamlFile, default_secrets: YamlFile) -> None: - """Did secrets load ok.""" - loaded_file = load_config_file( - default_config.path, [default_config, default_secrets] - ) - expected = {"api_password": "pwhttp"} - assert expected == loaded_file["http"] - - expected = {"username": "un1", "password": "pw1"} - assert expected == loaded_file["component"] - - -def test_secrets_from_parent_folder( - filepaths: dict[str, Path], - default_config: YamlFile, - default_secrets: YamlFile, -) -> None: - """Test loading secrets from parent folder.""" - config_file = YamlFile( - path=filepaths["sub_folder"] / "sub.yaml", - contents=default_config.contents, - ) - loaded_file = load_config_file(config_file.path, [config_file, default_secrets]) - expected = {"api_password": "pwhttp"} - - assert expected == loaded_file["http"] - - -def test_secret_overrides_parent( - filepaths: dict[str, Path], - default_config: YamlFile, - default_secrets: YamlFile, -) -> None: - """Test loading current directory secret overrides the parent.""" - config_file = YamlFile( - path=filepaths["sub_folder"] / "sub.yaml", contents=default_config.contents - ) - sub_secrets = YamlFile( - path=filepaths["sub_folder"] / yaml.SECRET_YAML, contents="http_pw: override" - ) - - loaded_file = load_config_file( - config_file.path, [config_file, default_secrets, sub_secrets] - ) - - expected = {"api_password": "override"} - assert loaded_file["http"] == expected - - -def test_secrets_from_unrelated_fails( - filepaths: dict[str, Path], - default_secrets: YamlFile, -) -> None: - """Test loading secrets from unrelated folder fails.""" - config_file = YamlFile( - path=filepaths["sub_folder"] / "sub.yaml", - contents="http:\n api_password: !secret test", - ) - unrelated_secrets = YamlFile( - path=filepaths["unrelated"] / yaml.SECRET_YAML, contents="test: failure" - ) - with pytest.raises(HomeAssistantError, match="Secret test not defined"): - load_config_file( - config_file.path, [config_file, default_secrets, unrelated_secrets] - ) - - -def test_secrets_logger_removed( - filepaths: dict[str, Path], - default_secrets: YamlFile, -) -> None: - """Ensure logger: debug gets removed from secrets file once logger is configured.""" - config_file = YamlFile( - path=filepaths["config"] / YAML_CONFIG_FILE, - contents="api_password: !secret logger", - ) - with pytest.raises(HomeAssistantError, match="Secret logger not defined"): - load_config_file(config_file.path, [config_file, default_secrets]) - - -def test_bad_logger_value( - caplog: pytest.LogCaptureFixture, filepaths: dict[str, Path] -) -> None: - """Ensure only logger: debug is allowed in secret file.""" - config_file = YamlFile( - path=filepaths["config"] / YAML_CONFIG_FILE, contents="api_password: !secret pw" - ) - secrets_file = YamlFile( - path=filepaths["config"] / yaml.SECRET_YAML, contents="logger: info\npw: abc" - ) - with caplog.at_level(logging.ERROR): - load_config_file(config_file.path, [config_file, secrets_file]) - assert ( - "Error in secrets.yaml: 'logger: debug' expected, but 'logger: info' found" - in caplog.messages - ) - - -def test_secrets_are_not_dict( - filepaths: dict[str, Path], - default_config: YamlFile, -) -> None: - """Did secrets handle non-dict file.""" - non_dict_secrets = YamlFile( - path=filepaths["config"] / yaml.SECRET_YAML, - contents="- http_pw: pwhttp\n comp1_un: un1\n comp1_pw: pw1\n", - ) - with pytest.raises(HomeAssistantError, match="Secrets is not a dictionary"): - load_config_file(default_config.path, [default_config, non_dict_secrets])